From a4790545e988ae81fadb4a803d1c067ec5ea5d10 Mon Sep 17 00:00:00 2001 From: Rob Young Date: Mon, 19 Feb 2024 10:14:51 +0000 Subject: [PATCH] Update dist --- dist/main/index.js | 116323 +++++++++++++++++++------------------- dist/main/index.js.map | 2 +- dist/main/licenses.txt | 737 +- dist/post/index.js | 116323 +++++++++++++++++++------------------- dist/post/index.js.map | 2 +- dist/post/licenses.txt | 737 +- 6 files changed, 116612 insertions(+), 117512 deletions(-) diff --git a/dist/main/index.js b/dist/main/index.js index a8f8005..c825a25 100644 --- a/dist/main/index.js +++ b/dist/main/index.js @@ -42,7 +42,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.GoogleCloudStorageMutex = void 0; const core = __importStar(__nccwpck_require__(2186)); const github = __importStar(__nccwpck_require__(5438)); -const storage_1 = __nccwpck_require__(8174); +const storage_1 = __nccwpck_require__(7577); const common_1 = __nccwpck_require__(4777); const TIME_PERIOD_PATTERN = /^(?:(\d+)m)?(?:(\d+)s)?$/; class TimePeriodError extends Error { @@ -770,7 +770,7 @@ class OidcClient { .catch(error => { throw new Error(`Failed to get ID Token. \n Error Code : ${error.statusCode}\n - Error Message: ${error.result.message}`); + Error Message: ${error.message}`); }); const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; if (!id_token) { @@ -1223,8 +1223,8 @@ class Context { var _a, _b, _c; this.payload = {}; if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); } else { const path = process.env.GITHUB_EVENT_PATH; @@ -1242,7 +1242,8 @@ class Context { this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; + this.graphqlUrl = + (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } get issue() { const payload = this.payload; @@ -1274,7 +1275,11 @@ exports.Context = Context; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1287,7 +1292,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -1304,7 +1309,7 @@ exports.context = new Context.Context(); */ function getOctokit(token, options, ...additionalPlugins) { const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); - return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options)); + return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); } exports.getOctokit = getOctokit; //# sourceMappingURL=github.js.map @@ -1318,7 +1323,11 @@ exports.getOctokit = getOctokit; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1331,13 +1340,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; +exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; const httpClient = __importStar(__nccwpck_require__(6255)); +const undici_1 = __nccwpck_require__(1773); function getAuthString(token, options) { if (!token && !options.auth) { throw new Error('Parameter token or opts.auth is required'); @@ -1353,6 +1372,19 @@ function getProxyAgent(destinationUrl) { return hc.getAgent(destinationUrl); } exports.getProxyAgent = getProxyAgent; +function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); +} +exports.getProxyAgentDispatcher = getProxyAgentDispatcher; +function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }); + return proxyFetch; +} +exports.getProxyFetch = getProxyFetch; function getApiBaseUrl() { return process.env['GITHUB_API_URL'] || 'https://api.github.com'; } @@ -1368,7 +1400,11 @@ exports.getApiBaseUrl = getApiBaseUrl; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1381,7 +1417,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -1398,7 +1434,8 @@ const baseUrl = Utils.getApiBaseUrl(); exports.defaults = { baseUrl, request: { - agent: Utils.getProxyAgent(baseUrl) + agent: Utils.getProxyAgent(baseUrl), + fetch: Utils.getProxyFetch(baseUrl) } }; exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); @@ -1518,7 +1555,11 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand /* eslint-disable @typescript-eslint/no-explicit-any */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1531,7 +1572,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -1550,6 +1591,7 @@ const http = __importStar(__nccwpck_require__(3685)); const https = __importStar(__nccwpck_require__(5687)); const pm = __importStar(__nccwpck_require__(9835)); const tunnel = __importStar(__nccwpck_require__(4294)); +const undici_1 = __nccwpck_require__(1773); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -1579,16 +1621,16 @@ var HttpCodes; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); var Headers; (function (Headers) { Headers["Accept"] = "accept"; Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); +})(Headers || (exports.Headers = Headers = {})); var MediaTypes; (function (MediaTypes) { MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); /** * Returns the proxy URL, depending upon the supplied url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com @@ -1639,6 +1681,19 @@ class HttpClientResponse { })); }); } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { @@ -1944,6 +1999,15 @@ class HttpClient { const parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } _prepareRequest(method, requestUrl, headers) { const info = {}; info.parsedUrl = requestUrl; @@ -2043,6 +2107,30 @@ class HttpClient { } return agent; } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `${proxyUrl.username}:${proxyUrl.password}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } _performExponentialBackoff(retryNumber) { return __awaiter(this, void 0, void 0, function* () { retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); @@ -2143,7 +2231,13 @@ function getProxyUrl(reqUrl) { } })(); if (proxyVar) { - return new URL(proxyVar); + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } } else { return undefined; @@ -4062,13688 +4156,1178 @@ exports.callbackifyAll = callbackifyAll; /***/ }), -/***/ 1672: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 334: +/***/ ((module) => { "use strict"; -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AclRoleAccessorMethods = exports.Acl = void 0; -const promisify_1 = __nccwpck_require__(9203); -/** - * Attach functionality to a {@link Storage.acl} instance. This will add an - * object for each role group (owners, readers, and writers), with each object - * containing methods to add or delete a type of entity. - * - * As an example, here are a few methods that are created. - * - * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); - * - * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); - * - * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); - * - * @private - */ -class AclRoleAccessorMethods { - constructor() { - this.owners = {}; - this.readers = {}; - this.writers = {}; - /** - * An object of convenience methods to add or delete owner ACL permissions - * for a given entity. - * - * The supported methods include: - * - * - `myFile.acl.owners.addAllAuthenticatedUsers` - * - `myFile.acl.owners.deleteAllAuthenticatedUsers` - * - `myFile.acl.owners.addAllUsers` - * - `myFile.acl.owners.deleteAllUsers` - * - `myFile.acl.owners.addDomain` - * - `myFile.acl.owners.deleteDomain` - * - `myFile.acl.owners.addGroup` - * - `myFile.acl.owners.deleteGroup` - * - `myFile.acl.owners.addProject` - * - `myFile.acl.owners.deleteProject` - * - `myFile.acl.owners.addUser` - * - `myFile.acl.owners.deleteUser` - * - * @name Acl#owners - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * //- - * // Add a user as an owner of a file. - * //- - * const myBucket = gcs.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) - * {}); - * - * //- - * // For reference, the above command is the same as running the following. - * //- - * myFile.acl.add({ - * entity: 'user-email@example.com', - * role: gcs.acl.OWNER_ROLE - * }, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myFile.acl.owners.addUser('email@example.com').then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - this.owners = {}; - /** - * An object of convenience methods to add or delete reader ACL permissions - * for a given entity. - * - * The supported methods include: - * - * - `myFile.acl.readers.addAllAuthenticatedUsers` - * - `myFile.acl.readers.deleteAllAuthenticatedUsers` - * - `myFile.acl.readers.addAllUsers` - * - `myFile.acl.readers.deleteAllUsers` - * - `myFile.acl.readers.addDomain` - * - `myFile.acl.readers.deleteDomain` - * - `myFile.acl.readers.addGroup` - * - `myFile.acl.readers.deleteGroup` - * - `myFile.acl.readers.addProject` - * - `myFile.acl.readers.deleteProject` - * - `myFile.acl.readers.addUser` - * - `myFile.acl.readers.deleteUser` - * - * @name Acl#readers - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * //- - * // Add a user as a reader of a file. - * //- - * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) - * {}); - * - * //- - * // For reference, the above command is the same as running the following. - * //- - * myFile.acl.add({ - * entity: 'user-email@example.com', - * role: gcs.acl.READER_ROLE - * }, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myFile.acl.readers.addUser('email@example.com').then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - this.readers = {}; - /** - * An object of convenience methods to add or delete writer ACL permissions - * for a given entity. - * - * The supported methods include: - * - * - `myFile.acl.writers.addAllAuthenticatedUsers` - * - `myFile.acl.writers.deleteAllAuthenticatedUsers` - * - `myFile.acl.writers.addAllUsers` - * - `myFile.acl.writers.deleteAllUsers` - * - `myFile.acl.writers.addDomain` - * - `myFile.acl.writers.deleteDomain` - * - `myFile.acl.writers.addGroup` - * - `myFile.acl.writers.deleteGroup` - * - `myFile.acl.writers.addProject` - * - `myFile.acl.writers.deleteProject` - * - `myFile.acl.writers.addUser` - * - `myFile.acl.writers.deleteUser` - * - * @name Acl#writers - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * //- - * // Add a user as a writer of a file. - * //- - * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) - * {}); - * - * //- - * // For reference, the above command is the same as running the following. - * //- - * myFile.acl.add({ - * entity: 'user-email@example.com', - * role: gcs.acl.WRITER_ROLE - * }, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myFile.acl.writers.addUser('email@example.com').then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - this.writers = {}; - AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; +} + +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} + +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 6762: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = __nccwpck_require__(5030); +var import_before_after_hook = __nccwpck_require__(3682); +var import_request = __nccwpck_require__(6234); +var import_graphql = __nccwpck_require__(8467); +var import_auth_token = __nccwpck_require__(334); + +// pkg/dist-src/version.js +var VERSION = "5.1.0"; + +// pkg/dist-src/index.js +var noop = () => { +}; +var consoleWarn = console.warn.bind(console); +var consoleError = console.error.bind(console); +var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var Octokit = class { + static { + this.VERSION = VERSION; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; + } + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; } - _assignAccessMethods(role) { - const accessMethods = AclRoleAccessorMethods.accessMethods; - const entities = AclRoleAccessorMethods.entities; - const roleGroup = role.toLowerCase() + 's'; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - this[roleGroup] = entities.reduce((acc, entity) => { - const isPrefix = entity.charAt(entity.length - 1) === '-'; - accessMethods.forEach(accessMethod => { - let method = accessMethod + entity[0].toUpperCase() + entity.substr(1); - if (isPrefix) { - method = method.replace('-', ''); - } - // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the - // more complex API of specifying an `entity` and `role`. - // eslint-disable-next-line @typescript-eslint/no-explicit-any - acc[method] = (entityId, options, callback) => { - let apiEntity; - if (typeof options === 'function') { - callback = options; - options = {}; - } - if (isPrefix) { - apiEntity = entity + entityId; - } - else { - // If the entity is not a prefix, it is a special entity group - // that does not require further details. The accessor methods - // only accept a callback. - apiEntity = entity; - callback = entityId; - } - options = Object.assign({ - entity: apiEntity, - role, - }, options); - const args = [options]; - if (typeof callback === 'function') { - args.push(callback); - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - return this[accessMethod].apply(this, args); - }; - }); - return acc; - }, {}); + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; } -} -exports.AclRoleAccessorMethods = AclRoleAccessorMethods; -AclRoleAccessorMethods.accessMethods = ['add', 'delete']; -AclRoleAccessorMethods.entities = [ - // Special entity groups that do not require further specification. - 'allAuthenticatedUsers', - 'allUsers', - // Entity groups that require specification, e.g. `user-email@example.com`. - 'domain-', - 'group-', - 'project-', - 'user-', -]; -AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; -/** - * Cloud Storage uses access control lists (ACLs) to manage object and - * bucket access. ACLs are the mechanism you use to share objects with other - * users and allow other users to access your buckets and objects. - * - * An ACL consists of one or more entries, where each entry grants permissions - * to an entity. Permissions define the actions that can be performed against an - * object or bucket (for example, `READ` or `WRITE`); the entity defines who the - * permission applies to (for example, a specific user or group of users). - * - * Where an `entity` value is accepted, we follow the format the Cloud Storage - * API expects. - * - * Refer to - * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls - * for the most up-to-date values. - * - * - `user-userId` - * - `user-email` - * - `group-groupId` - * - `group-email` - * - `domain-domain` - * - `project-team-projectId` - * - `allUsers` - * - `allAuthenticatedUsers` - * - * Examples: - * - * - The user "liz@example.com" would be `user-liz@example.com`. - * - The group "example@googlegroups.com" would be - * `group-example@googlegroups.com`. - * - To refer to all members of the Google Apps for Business domain - * "example.com", the entity would be `domain-example.com`. - * - * For more detailed information, see - * {@link http://goo.gl/6qBBPO| About Access Control Lists}. - * - * @constructor Acl - * @mixin - * @param {object} options Configuration options. - */ -class Acl extends AclRoleAccessorMethods { - constructor(options) { - super(); - this.pathPrefix = options.pathPrefix; - this.request_ = options.request; + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; } - /** - * @typedef {array} AddAclResponse - * @property {object} 0 The Acl Objects. - * @property {object} 1 The full API response. - */ - /** - * @callback AddAclCallback - * @param {?Error} err Request error, if any. - * @param {object} acl The Acl Objects. - * @param {object} apiResponse The full API response. - */ - /** - * Add access controls on a {@link Bucket} or {@link File}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} - * - * @param {object} options Configuration options. - * @param {string} options.entity Whose permissions will be added. - * @param {string} options.role Permissions allowed for the defined entity. - * See {@link https://cloud.google.com/storage/docs/access-control Access - * Control}. - * @param {number} [options.generation] **File Objects Only** Select a specific - * revision of this file (as opposed to the latest version, the default). - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {AddAclCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * const options = { - * entity: 'user-useremail@example.com', - * role: gcs.acl.OWNER_ROLE - * }; - * - * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); - * - * //- - * // For file ACL operations, you can also specify a `generation` property. - * // Here is how you would grant ownership permissions to a user on a - * specific - * // revision of a file. - * //- - * myFile.acl.add({ - * entity: 'user-useremail@example.com', - * role: gcs.acl.OWNER_ROLE, - * generation: 1 - * }, function(err, aclObject, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myBucket.acl.add(options).then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/acl.js - * region_tag:storage_add_file_owner - * Example of adding an owner to a file: - * - * @example include:samples/acl.js - * region_tag:storage_add_bucket_owner - * Example of adding an owner to a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_add_bucket_default_owner - * Example of adding a default owner to a bucket: - */ - add(options, callback) { - const query = {}; - if (options.generation) { - query.generation = options.generation; - } - if (options.userProject) { - query.userProject = options.userProject; - } - this.request({ - method: 'POST', - uri: '', - qs: query, - maxRetries: 0, - json: { - entity: options.entity, - role: options.role.toUpperCase(), - }, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - callback(null, this.makeAclObject_(resp), resp); - }); - } - /** - * @typedef {array} RemoveAclResponse - * @property {object} 0 The full API response. - */ - /** - * @callback RemoveAclCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Delete access controls on a {@link Bucket} or {@link File}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} - * - * @param {object} options Configuration object. - * @param {string} options.entity Whose permissions will be revoked. - * @param {int} [options.generation] **File Objects Only** Select a specific - * revision of this file (as opposed to the latest version, the default). - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {RemoveAclCallback} callback The callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * myBucket.acl.delete({ - * entity: 'user-useremail@example.com' - * }, function(err, apiResponse) {}); - * - * //- - * // For file ACL operations, you can also specify a `generation` property. - * //- - * myFile.acl.delete({ - * entity: 'user-useremail@example.com', - * generation: 1 - * }, function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myFile.acl.delete().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/acl.js - * region_tag:storage_remove_bucket_owner - * Example of removing an owner from a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_remove_bucket_default_owner - * Example of removing a default owner from a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_remove_file_owner - * Example of removing an owner from a bucket: - */ - delete(options, callback) { - const query = {}; - if (options.generation) { - query.generation = options.generation; - } - if (options.userProject) { - query.userProject = options.userProject; - } - this.request({ - method: 'DELETE', - uri: '/' + encodeURIComponent(options.entity), - qs: query, - }, (err, resp) => { - callback(err, resp); + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; } - /** - * @typedef {array} GetAclResponse - * @property {object|object[]} 0 Single or array of Acl Objects. - * @property {object} 1 The full API response. - */ - /** - * @callback GetAclCallback - * @param {?Error} err Request error, if any. - * @param {object|object[]} acl Single or array of Acl Objects. - * @param {object} apiResponse The full API response. - */ - /** - * Get access controls on a {@link Bucket} or {@link File}. If - * an entity is omitted, you will receive an array of all applicable access - * controls. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} - * - * @param {object|function} [options] Configuration options. If you want to - * receive a list of all access controls, pass the callback function as - * the only argument. - * @param {string} [options.entity] Whose permissions will be fetched. - * @param {number} [options.generation] **File Objects Only** Select a specific - * revision of this file (as opposed to the latest version, the default). - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetAclCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * myBucket.acl.get({ - * entity: 'user-useremail@example.com' - * }, function(err, aclObject, apiResponse) {}); - * - * //- - * // Get all access controls. - * //- - * myBucket.acl.get(function(err, aclObjects, apiResponse) { - * // aclObjects = [ - * // { - * // entity: 'user-useremail@example.com', - * // role: 'owner' - * // } - * // ] - * }); - * - * //- - * // For file ACL operations, you can also specify a `generation` property. - * //- - * myFile.acl.get({ - * entity: 'user-useremail@example.com', - * generation: 1 - * }, function(err, aclObject, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myBucket.acl.get().then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/acl.js - * region_tag:storage_print_file_acl - * Example of printing a file's ACL: - * - * @example include:samples/acl.js - * region_tag:storage_print_file_acl_for_user - * Example of printing a file's ACL for a specific user: - * - * @example include:samples/acl.js - * region_tag:storage_print_bucket_acl - * Example of printing a bucket's ACL: - * - * @example include:samples/acl.js - * region_tag:storage_print_bucket_acl_for_user - * Example of printing a bucket's ACL for a specific user: - */ - get(optionsOrCallback, cb) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; - const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - let path = ''; - const query = {}; - if (options) { - path = '/' + encodeURIComponent(options.entity); - if (options.generation) { - query.generation = options.generation; - } - if (options.userProject) { - query.userProject = options.userProject; - } - } - this.request({ - uri: path, - qs: query, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - let results; - if (resp.items) { - results = resp.items.map(this.makeAclObject_); - } - else { - results = this.makeAclObject_(resp); - } - callback(null, results, resp); - }); + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); } - /** - * @typedef {array} UpdateAclResponse - * @property {object} 0 The updated Acl Objects. - * @property {object} 1 The full API response. - */ - /** - * @callback UpdateAclCallback - * @param {?Error} err Request error, if any. - * @param {object} acl The updated Acl Objects. - * @param {object} apiResponse The full API response. - */ - /** - * Update access controls on a {@link Bucket} or {@link File}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} - * - * @param {object} options Configuration options. - * @param {string} options.entity Whose permissions will be updated. - * @param {string} options.role Permissions allowed for the defined entity. - * See {@link Storage.acl}. - * @param {number} [options.generation] **File Objects Only** Select a specific - * revision of this file (as opposed to the latest version, the default). - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {UpdateAclCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * const options = { - * entity: 'user-useremail@example.com', - * role: gcs.acl.WRITER_ROLE - * }; - * - * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); - * - * //- - * // For file ACL operations, you can also specify a `generation` property. - * //- - * myFile.acl.update({ - * entity: 'user-useremail@example.com', - * role: gcs.acl.WRITER_ROLE, - * generation: 1 - * }, function(err, aclObject, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myFile.acl.update(options).then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - update(options, callback) { - const query = {}; - if (options.generation) { - query.generation = options.generation; - } - if (options.userProject) { - query.userProject = options.userProject; - } - this.request({ - method: 'PUT', - uri: '/' + encodeURIComponent(options.entity), - qs: query, - json: { - role: options.role.toUpperCase(), - }, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - callback(null, this.makeAclObject_(resp), resp); - }); + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 9440: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/defaults.js +var import_universal_user_agent = __nccwpck_require__(5030); + +// pkg/dist-src/version.js +var VERSION = "9.0.4"; + +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } +}; + +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/util/merge-deep.js +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); } - /** - * Transform API responses to a consistent object format. - * - * @private - */ - makeAclObject_(accessControlObject) { - const obj = { - entity: accessControlObject.entity, - role: accessControlObject.role, - }; - if (accessControlObject.projectTeam) { - obj.projectTeam = accessControlObject.projectTeam; - } - return obj; + }); + return result; +} + +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; } - /** - * Patch requests up to the bucket's request object. - * - * @private - * - * @param {string} method Action. - * @param {string} path Request path. - * @param {*} query Request query object. - * @param {*} body Request body contents. - * @param {function} callback Callback function. - */ - request(reqOpts, callback) { - reqOpts.uri = this.pathPrefix + reqOpts.uri; - this.request_(reqOpts, callback); + } + return obj; +} + +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); + } + return mergedOptions; } -exports.Acl = Acl; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Acl, { - exclude: ['request'], -}); -//# sourceMappingURL=acl.js.map -/***/ }), +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} -/***/ 8561: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} -"use strict"; +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } + } + return result; +} -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const paginator_1 = __nccwpck_require__(6412); -const promisify_1 = __nccwpck_require__(9203); -const extend = __nccwpck_require__(8171); -const fs = __nccwpck_require__(7147); -const mime = __nccwpck_require__(3583); -const path = __nccwpck_require__(1017); -const pLimit = __nccwpck_require__(7684); -const util_1 = __nccwpck_require__(3837); -const retry = __nccwpck_require__(3415); -const util_2 = __nccwpck_require__(1862); -const acl_1 = __nccwpck_require__(1672); -const file_1 = __nccwpck_require__(5373); -const iam_1 = __nccwpck_require__(66); -const notification_1 = __nccwpck_require__(7523); -const storage_1 = __nccwpck_require__(346); -const signer_1 = __nccwpck_require__(9665); -const stream_1 = __nccwpck_require__(2781); -const url_1 = __nccwpck_require__(7310); -var BucketActionToHTTPMethod; -(function (BucketActionToHTTPMethod) { - BucketActionToHTTPMethod["list"] = "GET"; -})(BucketActionToHTTPMethod = exports.BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = {})); -var AvailableServiceObjectMethods; -(function (AvailableServiceObjectMethods) { - AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; - AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; -})(AvailableServiceObjectMethods = exports.AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = {})); -var BucketExceptionMessages; -(function (BucketExceptionMessages) { - BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; - BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; - BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; - BucketExceptionMessages["CHANNEL_ADDRESS_REQUIRED"] = "An address is required to create a channel."; - BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; - BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; - BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; - BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; - BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; -})(BucketExceptionMessages = exports.BucketExceptionMessages || (exports.BucketExceptionMessages = {})); -/** - * @callback Crc32cGeneratorToStringCallback - * A method returning the CRC32C as a base64-encoded string. - * - * @returns {string} - * - * @example - * Hashing the string 'data' should return 'rth90Q==' - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.toString(); // 'rth90Q==' - * ``` - **/ -/** - * @callback Crc32cGeneratorValidateCallback - * A method validating a base64-encoded CRC32C string. - * - * @param {string} [value] base64-encoded CRC32C string to validate - * @returns {boolean} - * - * @example - * Should return `true` if the value matches, `false` otherwise - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.validate('DkjKuA=='); // false - * crc32c.validate('rth90Q=='); // true - * ``` - **/ -/** - * @callback Crc32cGeneratorUpdateCallback - * A method for passing `Buffer`s for CRC32C generation. - * - * @param {Buffer} [data] data to update CRC32C value with - * @returns {undefined} - * - * @example - * Hashing buffers from 'some ' and 'text\n' - * - * ```js - * const buffer1 = Buffer.from('some '); - * crc32c.update(buffer1); - * - * const buffer2 = Buffer.from('text\n'); - * crc32c.update(buffer2); - * - * crc32c.toString(); // 'DkjKuA==' - * ``` - **/ -/** - * @typedef {object} CRC32CValidator - * @property {Crc32cGeneratorToStringCallback} - * @property {Crc32cGeneratorValidateCallback} - * @property {Crc32cGeneratorUpdateCallback} - */ -/** - * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} - * - * @name Bucket#crc32cGenerator - * @type {CRC32CValidator} - */ -/** - * Get and set IAM policies for your bucket. - * - * @name Bucket#iam - * @mixes Iam - * - * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} - * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} - * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Get the IAM policy for your bucket. - * //- - * bucket.iam.getPolicy(function(err, policy) { - * console.log(policy); - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.iam.getPolicy().then(function(data) { - * const policy = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/iam.js - * region_tag:storage_view_bucket_iam_members - * Example of retrieving a bucket's IAM policy: - * - * @example include:samples/iam.js - * region_tag:storage_add_bucket_iam_member - * Example of adding to a bucket's IAM policy: - * - * @example include:samples/iam.js - * region_tag:storage_remove_bucket_iam_member - * Example of removing from a bucket's IAM policy: - */ -/** - * Cloud Storage uses access control lists (ACLs) to manage object and - * bucket access. ACLs are the mechanism you use to share objects with other - * users and allow other users to access your buckets and objects. - * - * An ACL consists of one or more entries, where each entry grants permissions - * to an entity. Permissions define the actions that can be performed against - * an object or bucket (for example, `READ` or `WRITE`); the entity defines - * who the permission applies to (for example, a specific user or group of - * users). - * - * The `acl` object on a Bucket instance provides methods to get you a list of - * the ACLs defined on your bucket, as well as set, update, and delete them. - * - * Buckets also have - * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} - * for all created files. Default ACLs specify permissions that all new - * objects added to the bucket will inherit by default. You can add, delete, - * get, and update entities and permissions for these as well with - * {@link Bucket#acl.default}. - * - * See {@link http://goo.gl/6qBBPO| About Access Control Lists} - * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} - * - * @name Bucket#acl - * @mixes Acl - * @property {Acl} default Cloud Storage Buckets have - * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} - * for all created files. You can add, delete, get, and update entities and - * permissions for these as well. The method signatures and examples are all - * the same, after only prefixing the method call with `default`. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // Make a bucket's contents publicly readable. - * //- - * const myBucket = storage.bucket('my-bucket'); - * - * const options = { - * entity: 'allUsers', - * role: storage.acl.READER_ROLE - * }; - * - * myBucket.acl.add(options, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myBucket.acl.add(options).then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/acl.js - * region_tag:storage_print_bucket_acl - * Example of printing a bucket's ACL: - * - * @example include:samples/acl.js - * region_tag:storage_print_bucket_acl_for_user - * Example of printing a bucket's ACL for a specific user: - * - * @example include:samples/acl.js - * region_tag:storage_add_bucket_owner - * Example of adding an owner to a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_remove_bucket_owner - * Example of removing an owner from a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_add_bucket_default_owner - * Example of adding a default owner to a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_remove_bucket_default_owner - * Example of removing a default owner from a bucket: - */ -/** - * The API-formatted resource description of the bucket. - * - * Note: This is not guaranteed to be up-to-date when accessed. To get the - * latest record, call the `getMetadata()` method. - * - * @name Bucket#metadata - * @type {object} - */ -/** - * The bucket's name. - * @name Bucket#name - * @type {string} - */ -/** - * Get {@link File} objects for the files currently in the bucket as a - * readable object stream. - * - * @method Bucket#getFilesStream - * @param {GetFilesOptions} [query] Query object for listing files. - * @returns {ReadableStream} A readable stream that emits {@link File} instances. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.getFilesStream() - * .on('error', console.error) - * .on('data', function(file) { - * // file is a File object. - * }) - * .on('end', function() { - * // All files retrieved. - * }); - * - * //- - * // If you anticipate many results, you can end a stream early to prevent - * // unnecessary processing and API requests. - * //- - * bucket.getFilesStream() - * .on('data', function(file) { - * this.end(); - * }); - * - * //- - * // If you're filtering files with a delimiter, you should use - * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to - * // preserve the `apiResponse` argument. - * //- - * const prefixes = []; - * - * function callback(err, files, nextQuery, apiResponse) { - * prefixes = prefixes.concat(apiResponse.prefixes); - * - * if (nextQuery) { - * bucket.getFiles(nextQuery, callback); - * } else { - * // prefixes = The finished array of prefixes. - * } - * } - * - * bucket.getFiles({ - * autoPaginate: false, - * delimiter: '/' - * }, callback); - * ``` - */ -/** - * Create a Bucket object to interact with a Cloud Storage bucket. - * - * @class - * @hideconstructor - * - * @param {Storage} storage A {@link Storage} instance. - * @param {string} name The name of the bucket. - * @param {object} [options] Configuration object. - * @param {string} [options.userProject] User project. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * ``` - */ -class Bucket extends nodejs_common_1.ServiceObject { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - getFilesStream(query) { - // placeholder body, overwritten in constructor - return new stream_1.Readable(); +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); } - constructor(storage, name, options) { - var _a, _b, _c, _d; - options = options || {}; - // Allow for "gs://"-style input, and strip any trailing slashes. - name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); - const requestQueryObject = {}; - if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { - requestQueryObject.ifGenerationMatch = - options.preconditionOpts.ifGenerationMatch; - } - if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { - requestQueryObject.ifGenerationNotMatch = - options.preconditionOpts.ifGenerationNotMatch; + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} +function isDefined(value) { + return value !== void 0 && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); } - if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { - requestQueryObject.ifMetagenerationMatch = - options.preconditionOpts.ifMetagenerationMatch; + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); } - if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { - requestQueryObject.ifMetagenerationNotMatch = - options.preconditionOpts.ifMetagenerationNotMatch; + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); } - const userProject = options.userProject; - if (typeof userProject === 'string') { - requestQueryObject.userProject = userProject; + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); } - const methods = { - /** - * Create a bucket. - * - * @method Bucket#create - * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. - * @param {CreateBucketCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * bucket.create(function(err, bucket, apiResponse) { - * if (!err) { - * // The bucket was created successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.create().then(function(data) { - * const bucket = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - create: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * IamDeleteBucketOptions Configuration options. - * @property {boolean} [ignoreNotFound = false] Ignore an error if - * the bucket does not exist. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} DeleteBucketResponse - * @property {object} 0 The full API response. - */ - /** - * @callback DeleteBucketCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Delete the bucket. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} - * - * @method Bucket#delete - * @param {DeleteBucketOptions} [options] Configuration options. - * @param {boolean} [options.ignoreNotFound = false] Ignore an error if - * the bucket does not exist. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {DeleteBucketCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * bucket.delete(function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.delete().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/buckets.js - * region_tag:storage_delete_bucket - * Another example: - */ - delete: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} BucketExistsResponse - * @property {boolean} 0 Whether the {@link Bucket} exists. - */ - /** - * @callback BucketExistsCallback - * @param {?Error} err Request error, if any. - * @param {boolean} exists Whether the {@link Bucket} exists. - */ - /** - * Check if the bucket exists. - * - * @method Bucket#exists - * @param {BucketExistsOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {BucketExistsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.exists(function(err, exists) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.exists().then(function(data) { - * const exists = data[0]; - * }); - * ``` - */ - exists: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() - * @property {boolean} [autoCreate] Automatically create the object if - * it does not exist. Default: `false` - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} GetBucketResponse - * @property {Bucket} 0 The {@link Bucket}. - * @property {object} 1 The full API response. - */ - /** - * @callback GetBucketCallback - * @param {?Error} err Request error, if any. - * @param {Bucket} bucket The {@link Bucket}. - * @param {object} apiResponse The full API response. - */ - /** - * Get a bucket if it exists. - * - * You may optionally use this to "get or create" an object by providing - * an object with `autoCreate` set to `true`. Any extra configuration that - * is normally required for the `create` method must be contained within - * this object as well. - * - * @method Bucket#get - * @param {GetBucketOptions} [options] Configuration options. - * @param {boolean} [options.autoCreate] Automatically create the object if - * it does not exist. Default: `false` - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetBucketCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.get(function(err, bucket, apiResponse) { - * // `bucket.metadata` has been populated. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.get().then(function(data) { - * const bucket = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - get: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {array} GetBucketMetadataResponse - * @property {object} 0 The bucket metadata. - * @property {object} 1 The full API response. - */ - /** - * @callback GetBucketMetadataCallback - * @param {?Error} err Request error, if any. - * @param {object} metadata The bucket metadata. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Get the bucket's metadata. - * - * To set metadata, see {@link Bucket#setMetadata}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} - * - * @method Bucket#getMetadata - * @param {GetBucketMetadataOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetBucketMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.getMetadata(function(err, metadata, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.getMetadata().then(function(data) { - * const metadata = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/requesterPays.js - * region_tag:storage_get_requester_pays_status - * Example of retrieving the requester pays status of a bucket: - */ - getMetadata: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} SetBucketMetadataResponse - * @property {object} apiResponse The full API response. - */ - /** - * @callback SetBucketMetadataCallback - * @param {?Error} err Request error, if any. - * @param {object} metadata The bucket metadata. - */ - /** - * Set the bucket's metadata. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} - * - * @method Bucket#setMetadata - * @param {object} metadata The metadata you wish to set. - * @param {SetBucketMetadataOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Set website metadata field on the bucket. - * //- - * const metadata = { - * website: { - * mainPageSuffix: 'http://example.com', - * notFoundPage: 'http://example.com/404.html' - * } - * }; - * - * bucket.setMetadata(metadata, function(err, apiResponse) {}); - * - * //- - * // Enable versioning for your bucket. - * //- - * bucket.setMetadata({ - * versioning: { - * enabled: true - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Enable KMS encryption for objects within this bucket. - * //- - * bucket.setMetadata({ - * encryption: { - * defaultKmsKeyName: 'projects/grape-spaceship-123/...' - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Set the default event-based hold value for new objects in this - * // bucket. - * //- - * bucket.setMetadata({ - * defaultEventBasedHold: true - * }, function(err, apiResponse) {}); - * - * //- - * // Remove object lifecycle rules. - * //- - * bucket.setMetadata({ - * lifecycle: null - * }, function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.setMetadata(metadata).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - setMetadata: { - reqOpts: { - qs: requestQueryObject, - }, - }, - }; - super({ - parent: storage, - baseUrl: '/b', - id: name, - createMethod: storage.createBucket.bind(storage), - methods, - }); - this.name = name; - this.storage = storage; - this.userProject = options.userProject; - this.acl = new acl_1.Acl({ - request: this.request.bind(this), - pathPrefix: '/acl', - }); - this.acl.default = new acl_1.Acl({ - request: this.request.bind(this), - pathPrefix: '/defaultObjectAcl', - }); - this.crc32cGenerator = - options.crc32cGenerator || this.storage.crc32cGenerator; - this.iam = new iam_1.Iam(this); - this.getFilesStream = paginator_1.paginator.streamify('getFiles'); - this.instanceRetryValue = storage.retryOptions.autoRetry; - this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } else { + return encodeReserved(literal); + } } - /** - * The bucket's Cloud Storage URI (`gs://`) - * - * @example - * ```ts - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * // `gs://my-bucket` - * const href = bucket.cloudStorageURI.href; - * ``` - */ - get cloudStorageURI() { - const uri = new url_1.URL('gs://'); - uri.host = this.name; - return uri; + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); + } +} + +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } } - /** - * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). - * @property {boolean} [append=true] The new rules will be appended to any - * pre-existing rules. - */ - /** - * - * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects - * in this bucket. - * @property {string|object} action The action to be taken upon matching of - * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. - * **Note**: For configuring a raw-formatted rule object to be passed as `action` - * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. - * @property {object} condition Condition a bucket must meet before the - * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. - * @property {string} [storageClass] When using the `setStorageClass` - * action, provide this option to dictate which storage class the object - * should update to. Please see - * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. - */ - /** - * Add an object lifecycle management rule to the bucket. - * - * By default, an Object Lifecycle Management rule provided to this method - * will be included to the existing policy. To replace all existing rules, - * supply the `options` argument, setting `append` to `false`. - * - * To add multiple rules, pass a list to the `rule` parameter. Calling this - * function multiple times asynchronously does not guarantee that all rules - * are added correctly. - * - * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} - * - * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects - * in this bucket. - * @param {string|object} rule.action The action to be taken upon matching of - * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. - * **Note**: For configuring a raw-formatted rule object to be passed as `action` - * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. - * @param {object} rule.condition Condition a bucket must meet before the - * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. - * @param {string} [rule.storageClass] When using the `setStorageClass` - * action, provide this option to dictate which storage class the object - * should update to. - * @param {AddLifecycleRuleOptions} [options] Configuration object. - * @param {boolean} [options.append=true] Append the new rule to the existing - * policy. - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Automatically have an object deleted from this bucket once it is 3 years - * // of age. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * age: 365 * 3 // Specified in days. - * } - * }, function(err, apiResponse) { - * if (err) { - * // Error handling omitted. - * } - * - * const lifecycleRules = bucket.metadata.lifecycle.rule; - * - * // Iterate over the Object Lifecycle Management rules on this bucket. - * lifecycleRules.forEach(lifecycleRule => {}); - * }); - * - * //- - * // By default, the rule you provide will be added to the existing policy. - * // Optionally, you can disable this behavior to replace all of the - * // pre-existing rules. - * //- - * const options = { - * append: false - * }; - * - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * age: 365 * 3 // Specified in days. - * } - * }, options, function(err, apiResponse) { - * if (err) { - * // Error handling omitted. - * } - * - * // All rules have been replaced with the new "delete" rule. - * - * // Iterate over the Object Lifecycle Management rules on this bucket. - * lifecycleRules.forEach(lifecycleRule => {}); - * }); - * - * //- - * // For objects created before 2018, "downgrade" the storage class. - * //- - * bucket.addLifecycleRule({ - * action: 'setStorageClass', - * storageClass: 'COLDLINE', - * condition: { - * createdBefore: new Date('2018') - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Delete objects created before 2016 which have the Coldline storage - * // class. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * matchesStorageClass: [ - * 'COLDLINE' - * ], - * createdBefore: new Date('2016') - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Delete object that has a noncurrent timestamp that is at least 100 days. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * daysSinceNoncurrentTime: 100 - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Delete object that has a noncurrent timestamp before 2020-01-01. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * noncurrentTimeBefore: new Date('2020-01-01') - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Delete object that has a customTime that is at least 100 days. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * daysSinceCustomTime: 100 - * } - * }, function(err, apiResponse) ()); - * - * //- - * // Delete object that has a customTime before 2020-01-01. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * customTimeBefore: new Date('2020-01-01') - * } - * }, function(err, apiResponse) {}); - * ``` - */ - addLifecycleRule(rule, optionsOrCallback, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - options = options || {}; - const rules = Array.isArray(rule) ? rule : [rule]; - const newLifecycleRules = rules.map(rule => { - if (typeof rule.action === 'object') { - // This is a raw-formatted rule object, the way the API expects. - // Just pass it through as-is. - return rule; - } - const apiFormattedRule = {}; - apiFormattedRule.condition = {}; - apiFormattedRule.action = { - type: rule.action.charAt(0).toUpperCase() + rule.action.slice(1), - }; - if (rule.storageClass) { - apiFormattedRule.action.storageClass = rule.storageClass; - } - for (const condition in rule.condition) { - if (rule.condition[condition] instanceof Date) { - apiFormattedRule.condition[condition] = rule.condition[condition] - .toISOString() - .replace(/T.+$/, ''); - } - else { - apiFormattedRule.condition[condition] = rule.condition[condition]; - } - } - return apiFormattedRule; - }); - if (options.append === false) { - this.setMetadata({ lifecycle: { rule: newLifecycleRules } }, options, callback); - return; - } - // The default behavior appends the previously-defined lifecycle rules with - // the new ones just passed in by the user. - this.getMetadata((err, metadata) => { - if (err) { - callback(err); - return; - } - const currentLifecycleRules = Array.isArray(metadata.lifecycle && metadata.lifecycle.rule) - ? metadata.lifecycle && metadata.lifecycle.rule - : []; - this.setMetadata({ - lifecycle: { - rule: currentLifecycleRules.concat(newLifecycleRules), - }, - }, options, callback); - }); + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } } - /** - * @typedef {object} CombineOptions - * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of - * the form - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, - * that will be used to encrypt the object. Overwrites the object - * metadata's `kms_key_name` value, if any. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @callback CombineCallback - * @param {?Error} err Request error, if any. - * @param {File} newFile The new {@link File}. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} CombineResponse - * @property {File} 0 The new {@link File}. - * @property {object} 1 The full API response. - */ - /** - * Combine multiple files into one new file. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} - * - * @throws {Error} if a non-array is provided as sources argument. - * @throws {Error} if no sources are provided. - * @throws {Error} if no destination is provided. - * - * @param {string[]|File[]} sources The source files that will be - * combined. - * @param {string|File} destination The file you would like the - * source files combined into. - * @param {CombineOptions} [options] Configuration options. - * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of - * the form - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, - * that will be used to encrypt the object. Overwrites the object - * metadata's `kms_key_name` value, if any. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - - * @param {CombineCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const logBucket = storage.bucket('log-bucket'); - * - * const sources = [ - * logBucket.file('2013-logs.txt'), - * logBucket.file('2014-logs.txt') - * ]; - * - * const allLogs = logBucket.file('all-logs.txt'); - * - * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { - * // newFile === allLogs - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * logBucket.combine(sources, allLogs).then(function(data) { - * const newFile = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - combine(sources, destination, optionsOrCallback, callback) { - var _a; - if (!Array.isArray(sources) || sources.length === 0) { - throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); - } - if (!destination) { - throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); - } - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required - AvailableServiceObjectMethods.setMetadata, // Same as above - options); - const convertToFile = (file) => { - if (file instanceof file_1.File) { - return file; - } - return this.file(file); - }; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - sources = sources.map(convertToFile); - const destinationFile = convertToFile(destination); - callback = callback || nodejs_common_1.util.noop; - if (!destinationFile.metadata.contentType) { - const destinationContentType = mime.contentType(destinationFile.name); - if (destinationContentType) { - destinationFile.metadata.contentType = destinationContentType; + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); +} + +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); +} + +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 8467: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request3 = __nccwpck_require__(6234); +var import_universal_user_agent = __nccwpck_require__(5030); + +// pkg/dist-src/version.js +var VERSION = "7.0.2"; + +// pkg/dist-src/with-defaults.js +var import_request2 = __nccwpck_require__(6234); + +// pkg/dist-src/graphql.js +var import_request = __nccwpck_require__(6234); + +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}; + +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); +} + +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request3.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 4193: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "9.1.5"; + +// pkg/dist-src/normalize-paginated-list-response.js +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} + +// pkg/dist-src/iterator.js +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] } + }; } - let maxRetries = this.storage.retryOptions.maxRetries; - if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === - undefined && - options.ifGenerationMatch === undefined && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - maxRetries = 0; - } - if (options.ifGenerationMatch === undefined) { - Object.assign(options, destinationFile.instancePreconditionOpts, options); - } - // Make the request from the destination File object. - destinationFile.request({ - method: 'POST', - uri: '/compose', - maxRetries, - json: { - destination: { - contentType: destinationFile.metadata.contentType, - }, - sourceObjects: sources.map(source => { - const sourceObject = { - name: source.name, - }; - if (source.metadata && source.metadata.generation) { - sourceObject.generation = source.metadata.generation; - } - return sourceObject; - }), - }, - qs: options, - }, (err, resp) => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - if (err) { - callback(err, null, resp); - return; - } - callback(null, destinationFile, resp); - }); + } + }) + }; +} + +// pkg/dist-src/paginate.js +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; + } + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); +} +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; } - /** - * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. - * - * @typedef {object} CreateChannelConfig - * @property {string} address The address where notifications are - * delivered for this channel. - * @property {string} [delimiter] Returns results in a directory-like mode. - * @property {number} [maxResults] Maximum number of `items` plus `prefixes` - * to return in a single page of responses. - * @property {string} [pageToken] A previously-returned page token - * representing part of the larger set of results to view. - * @property {string} [prefix] Filter results to objects whose names begin - * with this prefix. - * @property {string} [projection=noAcl] Set of properties to return. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {boolean} [versions=false] If `true`, lists all versions of an object - * as distinct results. - */ - /** - * @typedef {object} CreateChannelOptions - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} CreateChannelResponse - * @property {Channel} 0 The new {@link Channel}. - * @property {object} 1 The full API response. - */ - /** - * @callback CreateChannelCallback - * @param {?Error} err Request error, if any. - * @param {Channel} channel The new {@link Channel}. - * @param {object} apiResponse The full API response. - */ - /** - * Create a channel that will be notified when objects in this bucket changes. - * - * @throws {Error} If an ID is not provided. - * @throws {Error} If an address is not provided. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} - * - * @param {string} id The ID of the channel to create. - * @param {CreateChannelConfig} config Configuration for creating channel. - * @param {string} config.address The address where notifications are - * delivered for this channel. - * @param {string} [config.delimiter] Returns results in a directory-like mode. - * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` - * to return in a single page of responses. - * @param {string} [config.pageToken] A previously-returned page token - * representing part of the larger set of results to view. - * @param {string} [config.prefix] Filter results to objects whose names begin - * with this prefix. - * @param {string} [config.projection=noAcl] Set of properties to return. - * @param {string} [config.userProject] The ID of the project which will be - * billed for the request. - * @param {boolean} [config.versions=false] If `true`, lists all versions of an object - * as distinct results. - * @param {CreateChannelOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {CreateChannelCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * const id = 'new-channel-id'; - * - * const config = { - * address: 'https://...' - * }; - * - * bucket.createChannel(id, config, function(err, channel, apiResponse) { - * if (!err) { - * // Channel created successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.createChannel(id, config).then(function(data) { - * const channel = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - createChannel(id, config, optionsOrCallback, callback) { - if (typeof id !== 'string') { - throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); - } - if (typeof config.address !== 'string') { - throw new Error(BucketExceptionMessages.CHANNEL_ADDRESS_REQUIRED); - } - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.request({ - method: 'POST', - uri: '/o/watch', - json: Object.assign({ - id, - type: 'web_hook', - }, config), - qs: options, - }, (err, apiResponse) => { - if (err) { - callback(err, null, apiResponse); - return; - } - const resourceId = apiResponse.resourceId; - const channel = this.storage.channel(id, resourceId); - channel.metadata = apiResponse; - callback(null, channel, apiResponse); - }); + let earlyExit = false; + function done() { + earlyExit = true; } - /** - * Metadata to set for the Notification. - * - * @typedef {object} CreateNotificationOptions - * @property {object} [customAttributes] An optional list of additional - * attributes to attach to each Cloud PubSub message published for this - * notification subscription. - * @property {string[]} [eventTypes] If present, only send notifications about - * listed event types. If empty, sent notifications for all event types. - * @property {string} [objectNamePrefix] If present, only apply this - * notification configuration to object names that begin with this prefix. - * @property {string} [payloadFormat] The desired content of the Payload. - * Defaults to `JSON_API_V1`. - * - * Acceptable values are: - * - `JSON_API_V1` - * - * - `NONE` - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @callback CreateNotificationCallback - * @param {?Error} err Request error, if any. - * @param {Notification} notification The new {@link Notification}. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} CreateNotificationResponse - * @property {Notification} 0 The new {@link Notification}. - * @property {object} 1 The full API response. - */ - /** - * Creates a notification subscription for the bucket. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} - * - * @param {Topic|string} topic The Cloud PubSub topic to which this - * subscription publishes. If the project ID is omitted, the current - * project ID will be used. - * - * Acceptable formats are: - * - `projects/grape-spaceship-123/topics/my-topic` - * - * - `my-topic` - * @param {CreateNotificationOptions} [options] Metadata to set for the - * notification. - * @param {object} [options.customAttributes] An optional list of additional - * attributes to attach to each Cloud PubSub message published for this - * notification subscription. - * @param {string[]} [options.eventTypes] If present, only send notifications about - * listed event types. If empty, sent notifications for all event types. - * @param {string} [options.objectNamePrefix] If present, only apply this - * notification configuration to object names that begin with this prefix. - * @param {string} [options.payloadFormat] The desired content of the Payload. - * Defaults to `JSON_API_V1`. - * - * Acceptable values are: - * - `JSON_API_V1` - * - * - `NONE` - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {CreateNotificationCallback} [callback] Callback function. - * @returns {Promise} - * @throws {Error} If a valid topic is not provided. - * @see Notification#create - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const callback = function(err, notification, apiResponse) { - * if (!err) { - * // The notification was created successfully. - * } - * }; - * - * myBucket.createNotification('my-topic', callback); - * - * //- - * // Configure the nofiication by providing Notification metadata. - * //- - * const metadata = { - * objectNamePrefix: 'prefix-' - * }; - * - * myBucket.createNotification('my-topic', metadata, callback); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myBucket.createNotification('my-topic').then(function(data) { - * const notification = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/createNotification.js - * region_tag:storage_create_bucket_notifications - * Another example: - */ - createNotification(topic, optionsOrCallback, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - const topicIsObject = topic !== null && typeof topic === 'object'; - if (topicIsObject && nodejs_common_1.util.isCustomType(topic, 'pubsub/topic')) { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - topic = topic.name; - } - if (typeof topic !== 'string') { - throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); - } - const body = Object.assign({ topic }, options); - if (body.topic.indexOf('projects') !== 0) { - body.topic = 'projects/{{projectId}}/topics/' + body.topic; - } - body.topic = '//pubsub.googleapis.com/' + body.topic; - if (!body.payloadFormat) { - body.payloadFormat = 'JSON_API_V1'; - } - const query = {}; - if (body.userProject) { - query.userProject = body.userProject; - delete body.userProject; - } - this.request({ - method: 'POST', - uri: '/notificationConfigs', - json: (0, util_2.convertObjKeysToSnakeCase)(body), - qs: query, - maxRetries: 0, //explicitly set this value since this is a non-idempotent function - }, (err, apiResponse) => { - if (err) { - callback(err, null, apiResponse); - return; - } - const notification = this.notification(apiResponse.id); - notification.metadata = apiResponse; - callback(null, notification, apiResponse); - }); - } - /** - * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} - * for all of the supported properties. - * @property {boolean} [force] Suppress errors until all files have been - * processed. - */ - /** - * @callback DeleteFilesCallback - * @param {?Error|?Error[]} err Request error, if any, or array of errors from - * files that were not able to be deleted. - * @param {object} [apiResponse] The full API response. - */ - /** - * Iterate over the bucket's files, calling `file.delete()` on each. - * - * This is not an atomic request. A delete attempt will be - * made for each file individually. Any one can fail, in which case only a - * portion of the files you intended to be deleted would have. - * - * Operations are performed in parallel, up to 10 at once. The first error - * breaks the loop and will execute the provided callback with it. Specify - * `{ force: true }` to suppress the errors until all files have had a chance - * to be processed. - * - * File preconditions cannot be passed to this function. It will not retry unless - * the idempotency strategy is set to retry always. - * - * The `query` object passed as the first argument will also be passed to - * {@link Bucket#getFiles}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} - * - * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} - * @param {boolean} [query.force] Suppress errors until all files have been - * processed. - * @param {DeleteFilesCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Delete all of the files in the bucket. - * //- - * bucket.deleteFiles(function(err) {}); - * - * //- - * // By default, if a file cannot be deleted, this method will stop deleting - * // files from your bucket. You can override this setting with `force: - * // true`. - * //- - * bucket.deleteFiles({ - * force: true - * }, function(errors) { - * // `errors`: - * // Array of errors if any occurred, otherwise null. - * }); - * - * //- - * // The first argument to this method acts as a query to - * // {@link Bucket#getFiles}. As an example, you can delete files - * // which match a prefix. - * //- - * bucket.deleteFiles({ - * prefix: 'images/' - * }, function(err) { - * if (!err) { - * // All files in the `images` directory have been deleted. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.deleteFiles().then(function() {}); - * ``` - */ - deleteFiles(queryOrCallback, callback) { - let query = {}; - if (typeof queryOrCallback === 'function') { - callback = queryOrCallback; - } - else if (queryOrCallback) { - query = queryOrCallback; - } - const MAX_PARALLEL_LIMIT = 10; - const MAX_QUEUE_SIZE = 1000; - const errors = []; - const deleteFile = (file) => { - return file.delete(query).catch(err => { - if (!query.force) { - throw err; - } - errors.push(err); - }); - }; - (async () => { - try { - let promises = []; - const limit = pLimit(MAX_PARALLEL_LIMIT); - const filesStream = this.getFilesStream(query); - for await (const curFile of filesStream) { - if (promises.length >= MAX_QUEUE_SIZE) { - await Promise.all(promises); - promises = []; - } - promises.push(limit(() => deleteFile(curFile)).catch(e => { - filesStream.destroy(); - throw e; - })); - } - await Promise.all(promises); - callback(errors.length > 0 ? errors : null); - } - catch (e) { - callback(e); - return; - } - })(); - } - /** - * @typedef {array} DeleteLabelsResponse - * @property {object} 0 The full API response. - */ - /** - * @callback DeleteLabelsCallback - * @param {?Error} err Request error, if any. - * @param {object} metadata Bucket's metadata. - */ - /** - * Delete one or more labels from this bucket. - * - * @param {string|string[]} [labels] The labels to delete. If no labels are - * provided, all of the labels are removed. - * @param {DeleteLabelsCallback} [callback] Callback function. - * @param {DeleteLabelsOptions} [options] Options, including precondition options - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Delete all of the labels from this bucket. - * //- - * bucket.deleteLabels(function(err, apiResponse) {}); - * - * //- - * // Delete a single label. - * //- - * bucket.deleteLabels('labelone', function(err, apiResponse) {}); - * - * //- - * // Delete a specific set of labels. - * //- - * bucket.deleteLabels([ - * 'labelone', - * 'labeltwo' - * ], function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.deleteLabels().then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { - let labels = new Array(); - let options = {}; - if (typeof labelsOrCallbackOrOptions === 'function') { - callback = labelsOrCallbackOrOptions; - } - else if (typeof labelsOrCallbackOrOptions === 'string') { - labels = [labelsOrCallbackOrOptions]; - } - else if (Array.isArray(labelsOrCallbackOrOptions)) { - labels = labelsOrCallbackOrOptions; - } - else if (labelsOrCallbackOrOptions) { - options = labelsOrCallbackOrOptions; - } - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - const deleteLabels = (labels) => { - const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { - nullLabelMap[labelKey] = null; - return nullLabelMap; - }, {}); - if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { - this.setLabels(nullLabelMap, options, callback); - } - else { - this.setLabels(nullLabelMap, callback); - } - }; - if (labels.length === 0) { - this.getLabels((err, labels) => { - if (err) { - callback(err); - return; - } - deleteLabels(Object.keys(labels)); - }); - } - else { - deleteLabels(labels); - } - } - /** - * @typedef {array} DisableRequesterPaysResponse - * @property {object} 0 The full API response. - */ - /** - * @callback DisableRequesterPaysCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - *
- * Early Access Testers Only - *

- * This feature is not yet widely-available. - *

- *
- * - * Disable `requesterPays` functionality from this bucket. - * - * @param {DisableRequesterPaysCallback} [callback] Callback function. - * @param {DisableRequesterPaysOptions} [options] Options, including precondition options - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.disableRequesterPays(function(err, apiResponse) { - * if (!err) { - * // requesterPays functionality disabled successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.disableRequesterPays().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/requesterPays.js - * region_tag:storage_disable_requester_pays - * Example of disabling requester pays: - */ - disableRequesterPays(optionsOrCallback, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.setMetadata({ - billing: { - requesterPays: false, - }, - }, options, callback); - } - /** - * Configuration object for enabling logging. - * - * @typedef {object} EnableLoggingOptions - * @property {string|Bucket} [bucket] The bucket for the log entries. By - * default, the current bucket is used. - * @property {string} prefix A unique prefix for log object names. - */ - /** - * Enable logging functionality for this bucket. This will make two API - * requests, first to grant Cloud Storage WRITE permission to the bucket, then - * to set the appropriate configuration on the Bucket's metadata. - * - * @param {EnableLoggingOptions} config Configuration options. - * @param {string|Bucket} [config.bucket] The bucket for the log entries. By - * default, the current bucket is used. - * @param {string} config.prefix A unique prefix for log object names. - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * const config = { - * prefix: 'log' - * }; - * - * bucket.enableLogging(config, function(err, apiResponse) { - * if (!err) { - * // Logging functionality enabled successfully. - * } - * }); - * - * ``` - * @example - * Optionally, provide a destination bucket. - * ``` - * const config = { - * prefix: 'log', - * bucket: 'destination-bucket' - * }; - * - * bucket.enableLogging(config, function(err, apiResponse) {}); - * ``` - * - * @example - * If the callback is omitted, we'll return a Promise. - * ``` - * bucket.enableLogging(config).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - enableLogging(config, callback) { - if (!config || - typeof config === 'function' || - typeof config.prefix === 'undefined') { - throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); - } - const logBucket = config.bucket - ? config.bucket.id || config.bucket - : this.id; - const options = {}; - if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { - options.ifMetagenerationMatch = config.ifMetagenerationMatch; - } - if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { - options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; - } - (async () => { - try { - const [policy] = await this.iam.getPolicy(); - policy.bindings.push({ - members: ['group:cloud-storage-analytics@google.com'], - role: 'roles/storage.objectCreator', - }); - await this.iam.setPolicy(policy); - this.setMetadata({ - logging: { - logBucket, - logObjectPrefix: config.prefix, - }, - }, options, callback); - } - catch (e) { - callback(e); - return; - } - })(); - } - /** - * @typedef {array} EnableRequesterPaysResponse - * @property {object} 0 The full API response. - */ - /** - * @callback EnableRequesterPaysCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - *
- * Early Access Testers Only - *

- * This feature is not yet widely-available. - *

- *
- * - * Enable `requesterPays` functionality for this bucket. This enables you, the - * bucket owner, to have the requesting user assume the charges for the access - * to your bucket and its contents. - * - * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] - * Callback function or precondition options. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.enableRequesterPays(function(err, apiResponse) { - * if (!err) { - * // requesterPays functionality enabled successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.enableRequesterPays().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/requesterPays.js - * region_tag:storage_enable_requester_pays - * Example of enabling requester pays: - */ - enableRequesterPays(optionsOrCallback, cb) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - cb = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.setMetadata({ - billing: { - requesterPays: true, - }, - }, options, cb); + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; } - /** - * Create a {@link File} object. See {@link File} to see how to handle - * the different use cases you may have. - * - * @param {string} name The name of the file in this bucket. - * @param {FileOptions} [options] Configuration options. - * @param {string|number} [options.generation] Only use a specific revision of - * this file. - * @param {string} [options.encryptionKey] A custom encryption key. See - * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. - * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will - * be used to encrypt the object. Must be in the format: - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. - * KMS key ring must use the same location as the bucket. - * @param {string} [options.userProject] The ID of the project which will be - * billed for all requests made from File object. - * @returns {File} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * const file = bucket.file('my-existing-file.png'); - * ``` - */ - file(name, options) { - if (!name) { - throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); - } - return new file_1.File(this, name, options); - } - /** - * @typedef {array} GetFilesResponse - * @property {File[]} 0 Array of {@link File} instances. - * @param {object} nextQuery 1 A query object to receive more results. - * @param {object} apiResponse 2 The full API response. - */ - /** - * @callback GetFilesCallback - * @param {?Error} err Request error, if any. - * @param {File[]} files Array of {@link File} instances. - * @param {object} nextQuery A query object to receive more results. - * @param {object} apiResponse The full API response. - */ - /** - * Query object for listing files. - * - * @typedef {object} GetFilesOptions - * @property {boolean} [autoPaginate=true] Have pagination handled - * automatically. - * @property {string} [delimiter] Results will contain only objects whose - * names, aside from the prefix, do not contain delimiter. Objects whose - * names, aside from the prefix, contain delimiter will have their name - * truncated after the delimiter, returned in `apiResponse.prefixes`. - * Duplicate prefixes are omitted. - * @property {string} [endOffset] Filter results to objects whose names are - * lexicographically before endOffset. If startOffset is also set, the objects - * listed have names between startOffset (inclusive) and endOffset (exclusive). - * @property {boolean} [includeTrailingDelimiter] If true, objects that end in - * exactly one instance of delimiter have their metadata included in items[] - * in addition to the relevant part of the object name appearing in prefixes[]. - * @property {string} [prefix] Filter results to objects whose names begin - * with this prefix. - * @property {number} [maxApiCalls] Maximum number of API calls to make. - * @property {number} [maxResults] Maximum number of items plus prefixes to - * return per call. - * Note: By default will handle pagination automatically - * if more than 1 page worth of results are requested per call. - * When `autoPaginate` is set to `false` the smaller of `maxResults` - * or 1 page of results will be returned per call. - * @property {string} [pageToken] A previously-returned page token - * representing part of the larger set of results to view. - * @property {string} [startOffset] Filter results to objects whose names are - * lexicographically equal to or after startOffset. If endOffset is also set, - * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {boolean} [versions] If true, returns File objects scoped to - * their versions. - */ - /** - * Get {@link File} objects for the files currently in the bucket. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} - * - * @param {GetFilesOptions} [query] Query object for listing files. - * @param {boolean} [query.autoPaginate=true] Have pagination handled - * automatically. - * @param {string} [query.delimiter] Results will contain only objects whose - * names, aside from the prefix, do not contain delimiter. Objects whose - * names, aside from the prefix, contain delimiter will have their name - * truncated after the delimiter, returned in `apiResponse.prefixes`. - * Duplicate prefixes are omitted. - * @param {string} [query.endOffset] Filter results to objects whose names are - * lexicographically before endOffset. If startOffset is also set, the objects - * listed have names between startOffset (inclusive) and endOffset (exclusive). - * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in - * exactly one instance of delimiter have their metadata included in items[] - * in addition to the relevant part of the object name appearing in prefixes[]. - * @param {string} [query.prefix] Filter results to objects whose names begin - * with this prefix. - * @param {number} [query.maxApiCalls] Maximum number of API calls to make. - * @param {number} [query.maxResults] Maximum number of items plus prefixes to - * return per call. - * Note: By default will handle pagination automatically - * if more than 1 page worth of results are requested per call. - * When `autoPaginate` is set to `false` the smaller of `maxResults` - * or 1 page of results will be returned per call. - * @param {string} [query.pageToken] A previously-returned page token - * representing part of the larger set of results to view. - * @param {string} [query.startOffset] Filter results to objects whose names are - * lexicographically equal to or after startOffset. If endOffset is also set, - * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). - * @param {string} [query.userProject] The ID of the project which will be - * billed for the request. - * @param {boolean} [query.versions] If true, returns File objects scoped to - * their versions. - * @param {GetFilesCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.getFiles(function(err, files) { - * if (!err) { - * // files is an array of File objects. - * } - * }); - * - * //- - * // If your bucket has versioning enabled, you can get all of your files - * // scoped to their generation. - * //- - * bucket.getFiles({ - * versions: true - * }, function(err, files) { - * // Each file is scoped to its generation. - * }); - * - * //- - * // To control how many API requests are made and page through the results - * // manually, set `autoPaginate` to `false`. - * //- - * const callback = function(err, files, nextQuery, apiResponse) { - * if (nextQuery) { - * // More results exist. - * bucket.getFiles(nextQuery, callback); - * } - * - * // The `metadata` property is populated for you with the metadata at the - * // time of fetching. - * files[0].metadata; - * - * // However, in cases where you are concerned the metadata could have - * // changed, use the `getMetadata` method. - * files[0].getMetadata(function(err, metadata) {}); - * }; - * - * bucket.getFiles({ - * autoPaginate: false - * }, callback); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.getFiles().then(function(data) { - * const files = data[0]; - * }); - * - * ``` - * @example - *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
- * ``` - * bucket.getFiles({ - * autoPaginate: false, - * delimiter: '/' - * }, function(err, files, nextQuery, apiResponse) { - * // files = [ - * // {File} // File object for file "a" - * // ] - * - * // apiResponse.prefixes = [ - * // 'a/', - * // 'b/' - * // ] - * }); - * ``` - * - * @example - * Using prefixes, it's now possible to simulate a file system with follow-up requests. - * ``` - * bucket.getFiles({ - * autoPaginate: false, - * delimiter: '/', - * prefix: 'a/' - * }, function(err, files, nextQuery, apiResponse) { - * // No files found within "directory" a. - * // files = [] - * - * // However, a "sub-directory" was found. - * // This prefix can be used to continue traversing the "file system". - * // apiResponse.prefixes = [ - * // 'a/b/' - * // ] - * }); - * ``` - * - * @example include:samples/files.js - * region_tag:storage_list_files - * Another example: - * - * @example include:samples/files.js - * region_tag:storage_list_files_with_prefix - * Example of listing files, filtered by a prefix: - */ - getFiles(queryOrCallback, callback) { - let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; - if (!callback) { - callback = queryOrCallback; - } - query = Object.assign({}, query); - this.request({ - uri: '/o', - qs: query, - }, (err, resp) => { - if (err) { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - callback(err, null, null, resp); - return; - } - const itemsArray = resp.items ? resp.items : []; - const files = itemsArray.map((file) => { - const options = {}; - if (query.versions) { - options.generation = file.generation; - } - if (file.kmsKeyName) { - options.kmsKeyName = file.kmsKeyName; - } - const fileInstance = this.file(file.name, options); - fileInstance.metadata = file; - return fileInstance; - }); - let nextQuery = null; - if (resp.nextPageToken) { - nextQuery = Object.assign({}, query, { - pageToken: resp.nextPageToken, - }); - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - callback(null, files, nextQuery, resp); - }); - } - /** - * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). - * @param {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} GetLabelsResponse - * @property {object} 0 Object of labels currently set on this bucket. - */ - /** - * @callback GetLabelsCallback - * @param {?Error} err Request error, if any. - * @param {object} labels Object of labels currently set on this bucket. - */ - /** - * Get the labels currently set on this bucket. - * - * @param {object} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetLabelsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.getLabels(function(err, labels) { - * if (err) { - * // Error handling omitted. - * } - * - * // labels = { - * // label: 'labelValue', - * // ... - * // } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.getLabels().then(function(data) { - * const labels = data[0]; - * }); - * ``` - */ - getLabels(optionsOrCallback, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.getMetadata(options, (err, metadata) => { - if (err) { - callback(err, null); - return; - } - callback(null, metadata.labels || {}); - }); - } - /** - * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @callback GetNotificationsCallback - * @param {?Error} err Request error, if any. - * @param {Notification[]} notifications Array of {@link Notification} - * instances. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} GetNotificationsResponse - * @property {Notification[]} 0 Array of {@link Notification} instances. - * @property {object} 1 The full API response. - */ - /** - * Retrieves a list of notification subscriptions for a given bucket. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} - * - * @param {GetNotificationsOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetNotificationsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * bucket.getNotifications(function(err, notifications, apiResponse) { - * if (!err) { - * // notifications is an array of Notification objects. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.getNotifications().then(function(data) { - * const notifications = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/listNotifications.js - * region_tag:storage_list_bucket_notifications - * Another example: - */ - getNotifications(optionsOrCallback, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.request({ - uri: '/notificationConfigs', - qs: options, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - const itemsArray = resp.items ? resp.items : []; - const notifications = itemsArray.map((notification) => { - const notificationInstance = this.notification(notification.id); - notificationInstance.metadata = notification; - return notificationInstance; - }); - callback(null, notifications, resp); - }); - } - /** - * @typedef {array} GetSignedUrlResponse - * @property {object} 0 The signed URL. - */ - /** - * @callback GetSignedUrlCallback - * @param {?Error} err Request error, if any. - * @param {object} url The signed URL. - */ - /** - * @typedef {object} GetBucketSignedUrlConfig - * @property {string} action Currently only supports "list" (HTTP: GET). - * @property {*} expires A timestamp when this link will expire. Any value - * given is passed to `new Date()`. - * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. - * @property {string} [version='v2'] The signing version to use, either - * 'v2' or 'v4'. - * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style - * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style - * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs - * should generally be preferred instaed of path-style URL. - * Currently defaults to `false` for path-style, although this may change in a - * future major-version release. - * @property {string} [cname] The cname for this bucket, i.e., - * "https://cdn.example.com". - * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} - * @property {object} [extensionHeaders] If these headers are used, the - * server will check to make sure that the client provides matching - * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} - * for the requirements of this feature, most notably: - * - The header name must be prefixed with `x-goog-` - * - The header name must be all lowercase - * - * Note: Multi-valued header passed as an array in the extensionHeaders - * object is converted into a string, delimited by `,` with - * no space. Requests made using the signed URL will need to - * delimit multi-valued headers using a single `,` as well, or - * else the server will report a mismatched signature. - * @property {object} [queryParams] Additional query parameters to include - * in the signed URL. - */ - /** - * Get a signed URL to allow limited time access to a bucket. - * - * In Google Cloud Platform environments, such as Cloud Functions and App - * Engine, you usually don't provide a `keyFilename` or `credentials` during - * instantiation. In those environments, we call the - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} - * to create a signed URL. That API requires either the - * `https://www.googleapis.com/auth/iam` or - * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are - * enabled. - * - * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} - * - * @throws {Error} if an expiration timestamp from the past is given. - * - * @param {GetBucketSignedUrlConfig} config Configuration object. - * @param {string} config.action Currently only supports "list" (HTTP: GET). - * @param {*} config.expires A timestamp when this link will expire. Any value - * given is passed to `new Date()`. - * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. - * @param {string} [config.version='v2'] The signing version to use, either - * 'v2' or 'v4'. - * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style - * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style - * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs - * should generally be preferred instaed of path-style URL. - * Currently defaults to `false` for path-style, although this may change in a - * future major-version release. - * @param {string} [config.cname] The cname for this bucket, i.e., - * "https://cdn.example.com". - * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} - * @param {object} [config.extensionHeaders] If these headers are used, the - * server will check to make sure that the client provides matching - * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} - * for the requirements of this feature, most notably: - * - The header name must be prefixed with `x-goog-` - * - The header name must be all lowercase - * - * Note: Multi-valued header passed as an array in the extensionHeaders - * object is converted into a string, delimited by `,` with - * no space. Requests made using the signed URL will need to - * delimit multi-valued headers using a single `,` as well, or - * else the server will report a mismatched signature. - * @property {object} [config.queryParams] Additional query parameters to include - * in the signed URL. - * @param {GetSignedUrlCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * //- - * // Generate a URL that allows temporary access to list files in a bucket. - * //- - * const request = require('request'); - * - * const config = { - * action: 'list', - * expires: '03-17-2025' - * }; - * - * bucket.getSignedUrl(config, function(err, url) { - * if (err) { - * console.error(err); - * return; - * } - * - * // The bucket is now available to be listed from this URL. - * request(url, function(err, resp) { - * // resp.statusCode = 200 - * }); - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.getSignedUrl(config).then(function(data) { - * const url = data[0]; - * }); - * ``` - */ - getSignedUrl(cfg, callback) { - const method = BucketActionToHTTPMethod[cfg.action]; - if (!method) { - throw new Error(storage_1.ExceptionMessages.INVALID_ACTION); - } - const signConfig = { - method, - expires: cfg.expires, - version: cfg.version, - cname: cfg.cname, - extensionHeaders: cfg.extensionHeaders || {}, - queryParams: cfg.queryParams || {}, - }; - if (!this.signer) { - this.signer = new signer_1.URLSigner(this.storage.authClient, this); - } - this.signer - .getSignedUrl(signConfig) - .then(signedUrl => callback(null, signedUrl), callback); - } - /** - * @callback BucketLockCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Lock a previously-defined retention policy. This will prevent changes to - * the policy. - * - * @throws {Error} if a metageneration is not provided. - * - * @param {number|string} metageneration The bucket's metageneration. This is - * accesssible from calling {@link File#getMetadata}. - * @param {BucketLockCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const bucket = storage.bucket('albums'); - * - * const metageneration = 2; - * - * bucket.lock(metageneration, function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.lock(metageneration).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - lock(metageneration, callback) { - const metatype = typeof metageneration; - if (metatype !== 'number' && metatype !== 'string') { - throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); - } - this.request({ - method: 'POST', - uri: '/lockRetentionPolicy', - qs: { - ifMetagenerationMatch: metageneration, - }, - }, callback); - } - /** - * @typedef {array} MakeBucketPrivateResponse - * @property {File[]} 0 List of files made private. - */ - /** - * @callback MakeBucketPrivateCallback - * @param {?Error} err Request error, if any. - * @param {File[]} files List of files made private. - */ - /** - * @typedef {object} MakeBucketPrivateOptions - * @property {boolean} [includeFiles=false] Make each file in the bucket - * private. - * @property {Metadata} [metadata] Define custom metadata properties to define - * along with the operation. - * @property {boolean} [force] Queue errors occurred while making files - * private until all files have been processed. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Make the bucket listing private. - * - * You may also choose to make the contents of the bucket private by - * specifying `includeFiles: true`. This will automatically run - * {@link File#makePrivate} for every file in the bucket. - * - * When specifying `includeFiles: true`, use `force: true` to delay execution - * of your callback until all files have been processed. By default, the - * callback is executed after the first error. Use `force` to queue such - * errors until all files have been processed, after which they will be - * returned as an array as the first argument to your callback. - * - * NOTE: This may cause the process to be long-running and use a high number - * of requests. Use with caution. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} - * - * @param {MakeBucketPrivateOptions} [options] Configuration options. - * @param {boolean} [options.includeFiles=false] Make each file in the bucket - * private. - * @param {Metadata} [options.metadata] Define custom metadata properties to define - * along with the operation. - * @param {boolean} [options.force] Queue errors occurred while making files - * private until all files have been processed. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {MakeBucketPrivateCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Make the bucket private. - * //- - * bucket.makePrivate(function(err) {}); - * - * //- - * // Make the bucket and its contents private. - * //- - * const opts = { - * includeFiles: true - * }; - * - * bucket.makePrivate(opts, function(err, files) { - * // `err`: - * // The first error to occur, otherwise null. - * // - * // `files`: - * // Array of files successfully made private in the bucket. - * }); - * - * //- - * // Make the bucket and its contents private, using force to suppress errors - * // until all files have been processed. - * //- - * const opts = { - * includeFiles: true, - * force: true - * }; - * - * bucket.makePrivate(opts, function(errors, files) { - * // `errors`: - * // Array of errors if any occurred, otherwise null. - * // - * // `files`: - * // Array of files successfully made private in the bucket. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.makePrivate(opts).then(function(data) { - * const files = data[0]; - * }); - * ``` - */ - makePrivate(optionsOrCallback, callback) { - var _a, _b, _c, _d; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - options.private = true; - const query = { - predefinedAcl: 'projectPrivate', - }; - if (options.userProject) { - query.userProject = options.userProject; - } - if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { - query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; - } - if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { - query.ifGenerationNotMatch = - options.preconditionOpts.ifGenerationNotMatch; - } - if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { - query.ifMetagenerationMatch = - options.preconditionOpts.ifMetagenerationMatch; - } - if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { - query.ifMetagenerationNotMatch = - options.preconditionOpts.ifMetagenerationNotMatch; - } - // You aren't allowed to set both predefinedAcl & acl properties on a bucket - // so acl must explicitly be nullified. - const metadata = extend({}, options.metadata, { acl: null }); - this.setMetadata(metadata, query, err => { - if (err) { - callback(err); - } - const internalCall = () => { - if (options.includeFiles) { - return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options); - } - return Promise.resolve([]); - }; - internalCall() - .then(files => callback(null, files)) - .catch(callback); - }); - } - /** - * @typedef {object} MakeBucketPublicOptions - * @property {boolean} [includeFiles=false] Make each file in the bucket - * private. - * @property {boolean} [force] Queue errors occurred while making files - * private until all files have been processed. - */ - /** - * @callback MakeBucketPublicCallback - * @param {?Error} err Request error, if any. - * @param {File[]} files List of files made public. - */ - /** - * @typedef {array} MakeBucketPublicResponse - * @property {File[]} 0 List of files made public. - */ - /** - * Make the bucket publicly readable. - * - * You may also choose to make the contents of the bucket publicly readable by - * specifying `includeFiles: true`. This will automatically run - * {@link File#makePublic} for every file in the bucket. - * - * When specifying `includeFiles: true`, use `force: true` to delay execution - * of your callback until all files have been processed. By default, the - * callback is executed after the first error. Use `force` to queue such - * errors until all files have been processed, after which they will be - * returned as an array as the first argument to your callback. - * - * NOTE: This may cause the process to be long-running and use a high number - * of requests. Use with caution. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} - * - * @param {MakeBucketPublicOptions} [options] Configuration options. - * @param {boolean} [options.includeFiles=false] Make each file in the bucket - * private. - * @param {boolean} [options.force] Queue errors occurred while making files - * private until all files have been processed. - * @param {MakeBucketPublicCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Make the bucket publicly readable. - * //- - * bucket.makePublic(function(err) {}); - * - * //- - * // Make the bucket and its contents publicly readable. - * //- - * const opts = { - * includeFiles: true - * }; - * - * bucket.makePublic(opts, function(err, files) { - * // `err`: - * // The first error to occur, otherwise null. - * // - * // `files`: - * // Array of files successfully made public in the bucket. - * }); - * - * //- - * // Make the bucket and its contents publicly readable, using force to - * // suppress errors until all files have been processed. - * //- - * const opts = { - * includeFiles: true, - * force: true - * }; - * - * bucket.makePublic(opts, function(errors, files) { - * // `errors`: - * // Array of errors if any occurred, otherwise null. - * // - * // `files`: - * // Array of files successfully made public in the bucket. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.makePublic(opts).then(function(data) { - * const files = data[0]; - * }); - * ``` - */ - makePublic(optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const req = extend(true, { public: true }, options); - this.acl - .add({ - entity: 'allUsers', - role: 'READER', - }) - .then(() => { - return this.acl.default.add({ - entity: 'allUsers', - role: 'READER', - }); - }) - .then(() => { - if (req.includeFiles) { - return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req); - } - return []; - }) - .then(files => callback(null, files), callback); - } - /** - * Get a reference to a Cloud Pub/Sub Notification. - * - * @param {string} id ID of notification. - * @returns {Notification} - * @see Notification - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * const notification = bucket.notification('1'); - * ``` - */ - notification(id) { - if (!id) { - throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); - } - return new notification_1.Notification(this, id); - } - /** - * Remove an already-existing retention policy from this bucket, if it is not - * locked. - * - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @param {SetBucketMetadataOptions} [options] Options, including precondition options - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const bucket = storage.bucket('albums'); - * - * bucket.removeRetentionPeriod(function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.removeRetentionPeriod().then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - removeRetentionPeriod(optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - this.setMetadata({ - retentionPolicy: null, - }, options, callback); - } - /** - * Makes request and applies userProject query parameter if necessary. - * - * @private - * - * @param {object} reqOpts - The request options. - * @param {function} callback - The callback function. - */ - request(reqOpts, callback) { - if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { - reqOpts.qs = extend(reqOpts.qs, { userProject: this.userProject }); - } - return super.request(reqOpts, callback); - } - /** - * @typedef {array} SetLabelsResponse - * @property {object} 0 The bucket metadata. - */ - /** - * @callback SetLabelsCallback - * @param {?Error} err Request error, if any. - * @param {object} metadata The bucket metadata. - */ - /** - * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Set labels on the bucket. - * - * This makes an underlying call to {@link Bucket#setMetadata}, which - * is a PATCH request. This means an individual label can be overwritten, but - * unmentioned labels will not be touched. - * - * @param {object} labels Labels to set on the bucket. - * @param {SetLabelsOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {SetLabelsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * const labels = { - * labelone: 'labelonevalue', - * labeltwo: 'labeltwovalue' - * }; - * - * bucket.setLabels(labels, function(err, metadata) { - * if (!err) { - * // Labels set successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.setLabels(labels).then(function(data) { - * const metadata = data[0]; - * }); - * ``` - */ - setLabels(labels, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - callback = callback || nodejs_common_1.util.noop; - this.setMetadata({ labels }, options, callback); - } - setMetadata(metadata, optionsOrCallback, cb) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - cb = - typeof optionsOrCallback === 'function' - ? optionsOrCallback - : cb; - this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); - super - .setMetadata(metadata, options) - .then(resp => cb(null, ...resp)) - .catch(cb) - .finally(() => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - }); - } - /** - * Lock all objects contained in the bucket, based on their creation time. Any - * attempt to overwrite or delete objects younger than the retention period - * will result in a `PERMISSION_DENIED` error. - * - * An unlocked retention policy can be modified or removed from the bucket via - * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A - * locked retention policy cannot be removed or shortened in duration for the - * lifetime of the bucket. Attempting to remove or decrease period of a locked - * retention policy will result in a `PERMISSION_DENIED` error. You can still - * increase the policy. - * - * @param {*} duration In seconds, the minimum retention time for all objects - * contained in this bucket. - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @param {SetBucketMetadataCallback} [options] Options, including precondition options. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const bucket = storage.bucket('albums'); - * - * const DURATION_SECONDS = 15780000; // 6 months. - * - * //- - * // Lock the objects in this bucket for 6 months. - * //- - * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - setRetentionPeriod(duration, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - this.setMetadata({ - retentionPolicy: { - retentionPeriod: duration, - }, - }, options, callback); - } - /** - * - * @typedef {object} Cors - * @property {number} [maxAgeSeconds] The number of seconds the browser is - * allowed to make requests before it must repeat the preflight request. - * @property {string[]} [method] HTTP method allowed for cross origin resource - * sharing with this bucket. - * @property {string[]} [origin] an origin allowed for cross origin resource - * sharing with this bucket. - * @property {string[]} [responseHeader] A header allowed for cross origin - * resource sharing with this bucket. - */ - /** - * This can be used to set the CORS configuration on the bucket. - * - * The configuration will be overwritten with the value passed into this. - * - * @param {Cors[]} corsConfiguration The new CORS configuration to set - * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is - * allowed to make requests before it must repeat the preflight request. - * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource - * sharing with this bucket. - * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource - * sharing with this bucket. - * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin - * resource sharing with this bucket. - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @param {SetBucketMetadataOptions} [options] Options, including precondition options. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const bucket = storage.bucket('albums'); - * - * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour - * bucket.setCorsConfiguration(corsConfiguration); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - this.setMetadata({ - cors: corsConfiguration, - }, options, callback); - } - /** - * @typedef {object} SetBucketStorageClassOptions - * @property {string} [userProject] - The ID of the project which will be - * billed for the request. - */ - /** - * @callback SetBucketStorageClassCallback - * @param {?Error} err Request error, if any. - */ - /** - * Set the default storage class for new files in this bucket. - * - * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} - * - * @param {string} storageClass The new storage class. (`standard`, - * `nearline`, `coldline`, or `archive`). - * **Note:** The storage classes `multi_regional`, `regional`, and - * `durable_reduced_availability` are now legacy and will be deprecated in - * the future. - * @param {object} [options] Configuration options. - * @param {string} [options.userProject] - The ID of the project which will be - * billed for the request. - * @param {SetStorageClassCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.setStorageClass('nearline', function(err, apiResponse) { - * if (err) { - * // Error handling omitted. - * } - * - * // The storage class was updated successfully. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.setStorageClass('nearline').then(function() {}); - * ``` - */ - setStorageClass(storageClass, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - // In case we get input like `storageClass`, convert to `storage_class`. - storageClass = storageClass - .replace(/-/g, '_') - .replace(/([a-z])([A-Z])/g, (_, low, up) => { - return low + '_' + up; - }) - .toUpperCase(); - this.setMetadata({ storageClass }, options, callback); - } - /** - * Set a user project to be billed for all requests made from this Bucket - * object and any files referenced from this Bucket object. - * - * @param {string} userProject The user project. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.setUserProject('grape-spaceship-123'); - * ``` - */ - setUserProject(userProject) { - this.userProject = userProject; - const methods = [ - 'create', - 'delete', - 'exists', - 'get', - 'getMetadata', - 'setMetadata', - ]; - methods.forEach(method => { - const methodConfig = this.methods[method]; - if (typeof methodConfig === 'object') { - if (typeof methodConfig.reqOpts === 'object') { - extend(methodConfig.reqOpts.qs, { userProject }); - } - else { - methodConfig.reqOpts = { - qs: { userProject }, - }; - } - } - }); - } - /** - * @typedef {object} UploadOptions Configuration options for Bucket#upload(). - * @property {string|File} [destination] The place to save - * your file. If given a string, the file will be uploaded to the bucket - * using the string as a filename. When given a File object, your local - * file will be uploaded to the File object's bucket and under the File - * object's name. Lastly, when this argument is omitted, the file is uploaded - * to your bucket using the name of the local file. - * @property {string} [encryptionKey] A custom encryption key. See - * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. - * @property {boolean} [gzip] Automatically gzip the file. This will set - * `options.metadata.contentEncoding` to `gzip`. - * @property {string} [kmsKeyName] The name of the Cloud KMS key that will - * be used to encrypt the object. Must be in the format: - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. - * @property {object} [metadata] See an - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. - * @property {string} [offset] The starting byte of the upload stream, for - * resuming an interrupted upload. Defaults to 0. - * @property {string} [predefinedAcl] Apply a predefined set of access - * controls to this object. - * - * Acceptable values are: - * - **`authenticatedRead`** - Object owner gets `OWNER` access, and - * `allAuthenticatedUsers` get `READER` access. - * - * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and - * project team owners get `OWNER` access. - * - * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project - * team owners get `READER` access. - * - * - **`private`** - Object owner gets `OWNER` access. - * - * - **`projectPrivate`** - Object owner gets `OWNER` access, and project - * team members get access according to their roles. - * - * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` - * get `READER` access. - * @property {boolean} [private] Make the uploaded file private. (Alias for - * `options.predefinedAcl = 'private'`) - * @property {boolean} [public] Make the uploaded file public. (Alias for - * `options.predefinedAcl = 'publicRead'`) - * @property {boolean} [resumable=true] Resumable uploads are automatically - * enabled and must be shut off explicitly by setting to false. - * @property {number} [timeout=60000] Set the HTTP request timeout in - * milliseconds. This option is not available for resumable uploads. - * Default: `60000` - * @property {string} [uri] The URI for an already-created resumable - * upload. See {@link File#createResumableUpload}. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {string|boolean} [validation] Possible values: `"md5"`, - * `"crc32c"`, or `false`. By default, data integrity is validated with an - * MD5 checksum for maximum reliability. CRC32c will provide better - * performance with less reliability. You may also choose to skip - * validation completely, however this is **not recommended**. - */ - /** - * @typedef {array} UploadResponse - * @property {object} 0 The uploaded {@link File}. - * @property {object} 1 The full API response. - */ - /** - * @callback UploadCallback - * @param {?Error} err Request error, if any. - * @param {object} file The uploaded {@link File}. - * @param {object} apiResponse The full API response. - */ - /** - * Upload a file to the bucket. This is a convenience method that wraps - * {@link File#createWriteStream}. - * - * Resumable uploads are enabled by default - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} - * - * @param {string} pathString The fully qualified path to the file you - * wish to upload to your bucket. - * @param {UploadOptions} [options] Configuration options. - * @param {string|File} [options.destination] The place to save - * your file. If given a string, the file will be uploaded to the bucket - * using the string as a filename. When given a File object, your local - * file will be uploaded to the File object's bucket and under the File - * object's name. Lastly, when this argument is omitted, the file is uploaded - * to your bucket using the name of the local file. - * @param {string} [options.encryptionKey] A custom encryption key. See - * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. - * @param {boolean} [options.gzip] Automatically gzip the file. This will set - * `options.metadata.contentEncoding` to `gzip`. - * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will - * be used to encrypt the object. Must be in the format: - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. - * @param {object} [options.metadata] See an - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. - * @param {string} [options.offset] The starting byte of the upload stream, for - * resuming an interrupted upload. Defaults to 0. - * @param {string} [options.predefinedAcl] Apply a predefined set of access - * controls to this object. - * Acceptable values are: - * - **`authenticatedRead`** - Object owner gets `OWNER` access, and - * `allAuthenticatedUsers` get `READER` access. - * - * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and - * project team owners get `OWNER` access. - * - * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project - * team owners get `READER` access. - * - * - **`private`** - Object owner gets `OWNER` access. - * - * - **`projectPrivate`** - Object owner gets `OWNER` access, and project - * team members get access according to their roles. - * - * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` - * get `READER` access. - * @param {boolean} [options.private] Make the uploaded file private. (Alias for - * `options.predefinedAcl = 'private'`) - * @param {boolean} [options.public] Make the uploaded file public. (Alias for - * `options.predefinedAcl = 'publicRead'`) - * @param {boolean} [options.resumable=true] Resumable uploads are automatically - * enabled and must be shut off explicitly by setting to false. - * @param {number} [options.timeout=60000] Set the HTTP request timeout in - * milliseconds. This option is not available for resumable uploads. - * Default: `60000` - * @param {string} [options.uri] The URI for an already-created resumable - * upload. See {@link File#createResumableUpload}. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {string|boolean} [options.validation] Possible values: `"md5"`, - * `"crc32c"`, or `false`. By default, data integrity is validated with an - * MD5 checksum for maximum reliability. CRC32c will provide better - * performance with less reliability. You may also choose to skip - * validation completely, however this is **not recommended**. - * @param {UploadCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Upload a file from a local path. - * //- - * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { - * // Your bucket now contains: - * // - "image.png" (with the contents of `/local/path/image.png') - * - * // `file` is an instance of a File object that refers to your new file. - * }); - * - * - * //- - * // It's not always that easy. You will likely want to specify the filename - * // used when your new file lands in your bucket. - * // - * // You may also want to set metadata or customize other options. - * //- - * const options = { - * destination: 'new-image.png', - * validation: 'crc32c', - * metadata: { - * metadata: { - * event: 'Fall trip to the zoo' - * } - * } - * }; - * - * bucket.upload('local-image.png', options, function(err, file) { - * // Your bucket now contains: - * // - "new-image.png" (with the contents of `local-image.png') - * - * // `file` is an instance of a File object that refers to your new file. - * }); - * - * //- - * // You can also have a file gzip'd on the fly. - * //- - * bucket.upload('index.html', { gzip: true }, function(err, file) { - * // Your bucket now contains: - * // - "index.html" (automatically compressed with gzip) - * - * // Downloading the file with `file.download` will automatically decode - * the - * // file. - * }); - * - * //- - * // You may also re-use a File object, {File}, that references - * // the file you wish to create or overwrite. - * //- - * const options = { - * destination: bucket.file('existing-file.png'), - * resumable: false - * }; - * - * bucket.upload('local-img.png', options, function(err, newFile) { - * // Your bucket now contains: - * // - "existing-file.png" (with the contents of `local-img.png') - * - * // Note: - * // The `newFile` parameter is equal to `file`. - * }); - * - * //- - * // To use - * // - * // Customer-supplied Encryption Keys, provide the `encryptionKey` - * option. - * //- - * const crypto = require('crypto'); - * const encryptionKey = crypto.randomBytes(32); - * - * bucket.upload('img.png', { - * encryptionKey: encryptionKey - * }, function(err, newFile) { - * // `img.png` was uploaded with your custom encryption key. - * - * // `newFile` is already configured to use the encryption key when making - * // operations on the remote object. - * - * // However, to use your encryption key later, you must create a `File` - * // instance with the `key` supplied: - * const file = bucket.file('img.png', { - * encryptionKey: encryptionKey - * }); - * - * // Or with `file#setEncryptionKey`: - * const file = bucket.file('img.png'); - * file.setEncryptionKey(encryptionKey); - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.upload('local-image.png').then(function(data) { - * const file = data[0]; - * }); - * - * To upload a file from a URL, use {@link File#createWriteStream}. - * - * ``` - * @example include:samples/files.js - * region_tag:storage_upload_file - * Another example: - * - * @example include:samples/encryption.js - * region_tag:storage_upload_encrypted_file - * Example of uploading an encrypted file: - */ - upload(pathString, optionsOrCallback, callback) { - var _a, _b; - const upload = (numberOfRetries) => { - const returnValue = retry(async (bail) => { - await new Promise((resolve, reject) => { - var _a, _b; - if (numberOfRetries === 0 && - ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { - newFile.storage.retryOptions.autoRetry = false; - } - const writable = newFile.createWriteStream(options); - if (options.onUploadProgress) { - writable.on('progress', options.onUploadProgress); - } - fs.createReadStream(pathString) - .on('error', bail) - .pipe(writable) - .on('error', err => { - if (this.storage.retryOptions.autoRetry && - this.storage.retryOptions.retryableErrorFn(err)) { - return reject(err); - } - else { - return bail(err); - } - }) - .on('finish', () => { - return resolve(); - }); - }); - }, { - retries: numberOfRetries, - factor: this.storage.retryOptions.retryDelayMultiplier, - maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, - maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds - }); - if (!callback) { - return returnValue; - } - else { - return returnValue - .then(() => { - if (callback) { - return callback(null, newFile, newFile.metadata); - } - }) - .catch(callback); - } - }; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - if (global['GCLOUD_SANDBOX_ENV']) { - return; - } - let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - options = Object.assign({ - metadata: {}, - }, options); - // Do not retry if precondition option ifGenerationMatch is not set - // because this is a file operation - let maxRetries = this.storage.retryOptions.maxRetries; - if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && - ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - maxRetries = 0; - } - let newFile; - if (options.destination instanceof file_1.File) { - newFile = options.destination; - } - else if (options.destination !== null && - typeof options.destination === 'string') { - // Use the string as the name of the file. - newFile = this.file(options.destination, { - encryptionKey: options.encryptionKey, - kmsKeyName: options.kmsKeyName, - preconditionOpts: this.instancePreconditionOpts, - }); - } - else { - // Resort to using the name of the incoming file. - const destination = path.basename(pathString); - newFile = this.file(destination, { - encryptionKey: options.encryptionKey, - kmsKeyName: options.kmsKeyName, - preconditionOpts: this.instancePreconditionOpts, - }); - } - upload(maxRetries); - } - /** - * @private - * - * @typedef {object} MakeAllFilesPublicPrivateOptions - * @property {boolean} [force] Suppress errors until all files have been - * processed. - * @property {boolean} [private] Make files private. - * @property {boolean} [public] Make files public. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @private - * - * @callback SetBucketMetadataCallback - * @param {?Error} err Request error, if any. - * @param {File[]} files Files that were updated. - */ - /** - * @typedef {array} MakeAllFilesPublicPrivateResponse - * @property {File[]} 0 List of files affected. - */ - /** - * Iterate over all of a bucket's files, calling `file.makePublic()` (public) - * or `file.makePrivate()` (private) on each. - * - * Operations are performed in parallel, up to 10 at once. The first error - * breaks the loop, and will execute the provided callback with it. Specify - * `{ force: true }` to suppress the errors. - * - * @private - * - * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. - * @param {boolean} [options.force] Suppress errors until all files have been - * processed. - * @param {boolean} [options.private] Make files private. - * @param {boolean} [options.public] Make files public. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - - * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. - * - * @return {Promise} - */ - makeAllFilesPublicPrivate_(optionsOrCallback, callback) { - const MAX_PARALLEL_LIMIT = 10; - const errors = []; - const updatedFiles = []; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const processFile = async (file) => { - try { - await (options.public ? file.makePublic() : file.makePrivate(options)); - updatedFiles.push(file); - } - catch (e) { - if (!options.force) { - throw e; - } - errors.push(e); - } - }; - this.getFiles(options) - .then(([files]) => { - const limit = pLimit(MAX_PARALLEL_LIMIT); - const promises = files.map(file => { - return limit(() => processFile(file)); - }); - return Promise.all(promises); - }) - .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); - } - getId() { - return this.id; - } - disableAutoRetryConditionallyIdempotent_( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - coreOpts, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - methodType, localPreconditionOptions) { - var _a, _b; - if (typeof coreOpts === 'object' && - ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && - (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && - (methodType === AvailableServiceObjectMethods.setMetadata || - methodType === AvailableServiceObjectMethods.delete) && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) { - this.storage.retryOptions.autoRetry = false; - } - else if (this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - this.storage.retryOptions.autoRetry = false; - } - } -} -exports.Bucket = Bucket; -/*! Developer Documentation - * - * These methods can be auto-paginated. - */ -paginator_1.paginator.extend(Bucket, 'getFiles'); -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Bucket, { - exclude: ['cloudStorageURI', 'request', 'file', 'notification'], -}); -//# sourceMappingURL=bucket.js.map - -/***/ }), - -/***/ 8953: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Channel = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const promisify_1 = __nccwpck_require__(9203); -/** - * Create a channel object to interact with a Cloud Storage channel. - * - * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} - * - * @class - * - * @param {string} id The ID of the channel. - * @param {string} resourceId The resource ID of the channel. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const channel = storage.channel('id', 'resource-id'); - * ``` - */ -class Channel extends nodejs_common_1.ServiceObject { - constructor(storage, id, resourceId) { - const config = { - parent: storage, - baseUrl: '/channels', - // An ID shouldn't be included in the API requests. - // RE: - // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 - id: '', - methods: { - // Only need `request`. - }, - }; - super(config); - this.metadata.id = id; - this.metadata.resourceId = resourceId; - } - /** - * @typedef {array} StopResponse - * @property {object} 0 The full API response. - */ - /** - * @callback StopCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Stop this channel. - * - * @param {StopCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const channel = storage.channel('id', 'resource-id'); - * channel.stop(function(err, apiResponse) { - * if (!err) { - * // Channel stopped successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * channel.stop().then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - stop(callback) { - callback = callback || nodejs_common_1.util.noop; - this.request({ - method: 'POST', - uri: '/stop', - json: this.metadata, - }, (err, apiResponse) => { - callback(err, apiResponse); - }); - } -} -exports.Channel = Channel; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Channel); -//# sourceMappingURL=channel.js.map - -/***/ }), - -/***/ 4921: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; -}; -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _CRC32C_crc32c; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; -const fs_1 = __nccwpck_require__(7147); -/** - * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} - */ -const CRC32C_EXTENSIONS = [ - 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, - 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, - 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, - 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, - 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, - 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, - 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, - 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, - 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, - 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, - 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, - 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, - 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, - 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, - 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, - 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, - 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, - 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, - 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, - 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, - 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, - 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, - 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, - 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, - 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, - 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, - 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, - 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, - 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, - 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, - 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, - 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, - 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, - 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, - 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, - 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, - 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, - 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, - 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, - 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, - 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, - 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, - 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, -]; -exports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; -const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); -exports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; -const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); -exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR; -const CRC32C_EXCEPTION_MESSAGES = { - INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, - INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, - INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, -}; -exports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES; -class CRC32C { - /** - * Constructs a new `CRC32C` object. - * - * Reconstruction is recommended via the `CRC32C.from` static method. - * - * @param initialValue An initial CRC32C value - a signed 32-bit integer. - */ - constructor(initialValue = 0) { - /** Current CRC32C value */ - _CRC32C_crc32c.set(this, 0); - __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); - } - /** - * Calculates a CRC32C from a provided buffer. - * - * Implementation inspired from: - * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} - * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} - * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} - * - * @param data The `Buffer` to generate the CRC32C from - */ - update(data) { - let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; - for (const d of data) { - const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; - current = tablePoly ^ (current >>> 8); - } - __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); - } - /** - * Validates a provided input to the current CRC32C value. - * - * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer - */ - validate(input) { - if (typeof input === 'number') { - return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); - } - else if (typeof input === 'string') { - return input === this.toString(); - } - else if (Buffer.isBuffer(input)) { - return Buffer.compare(input, this.toBuffer()) === 0; - } - else { - // `CRC32C`-like object - return input.toString() === this.toString(); - } - } - /** - * Returns a `Buffer` representation of the CRC32C value - */ - toBuffer() { - const buffer = Buffer.alloc(4); - buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); - return buffer; - } - /** - * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. - * - * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} - */ - toJSON() { - return this.toString(); - } - /** - * Returns a base64-encoded representation of the CRC32C value. - * - * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} - */ - toString() { - return this.toBuffer().toString('base64'); - } - /** - * Returns the `number` representation of the CRC32C value as a signed 32-bit integer - * - * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} - */ - valueOf() { - return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); - } - /** - * Generates a `CRC32C` from a compatible buffer format. - * - * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` - */ - static fromBuffer(value) { - let buffer; - if (Buffer.isBuffer(value)) { - buffer = value; - } - else if ('buffer' in value) { - // `ArrayBufferView` - buffer = Buffer.from(value.buffer); - } - else { - // `ArrayBuffer` - buffer = Buffer.from(value); - } - if (buffer.byteLength !== 4) { - throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); - } - return new CRC32C(buffer.readInt32BE()); - } - static async fromFile(file) { - const crc32c = new CRC32C(); - await new Promise((resolve, reject) => { - (0, fs_1.createReadStream)(file) - .on('data', (d) => crc32c.update(d)) - .on('end', resolve) - .on('error', reject); - }); - return crc32c; - } - /** - * Generates a `CRC32C` from 4-byte base64-encoded data (string). - * - * @param value 4-byte base64-encoded data (string) - */ - static fromString(value) { - const buffer = Buffer.from(value, 'base64'); - if (buffer.byteLength !== 4) { - throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); - } - return this.fromBuffer(buffer); - } - /** - * Generates a `CRC32C` from a safe, unsigned 32-bit integer. - * - * @param value an unsigned 32-bit integer - */ - static fromNumber(value) { - if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { - throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); - } - return new CRC32C(value); - } - /** - * Generates a `CRC32C` from a variety of compatable types. - * Note: strings are treated as input, not as file paths to read from. - * - * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) - */ - static from(value) { - if (typeof value === 'number') { - return this.fromNumber(value); - } - else if (typeof value === 'string') { - return this.fromString(value); - } - else if ('byteLength' in value) { - // `ArrayBuffer` | `Buffer` | `ArrayBufferView` - return this.fromBuffer(value); - } - else { - // `CRC32CValidator`/`CRC32C`-like - return this.fromString(value.toString()); - } - } -} -exports.CRC32C = CRC32C; -_CRC32C_crc32c = new WeakMap(); -CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; -CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; -//# sourceMappingURL=crc32c.js.map - -/***/ }), - -/***/ 5373: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _File_instances, _File_validateIntegrity; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const promisify_1 = __nccwpck_require__(9203); -const compressible = __nccwpck_require__(6763); -const crypto = __nccwpck_require__(6113); -const extend = __nccwpck_require__(8171); -const fs = __nccwpck_require__(7147); -const mime = __nccwpck_require__(5377); -const resumableUpload = __nccwpck_require__(8807); -const stream_1 = __nccwpck_require__(2781); -const zlib = __nccwpck_require__(9796); -const storage_1 = __nccwpck_require__(346); -const bucket_1 = __nccwpck_require__(8561); -const acl_1 = __nccwpck_require__(1672); -const signer_1 = __nccwpck_require__(9665); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const duplexify = __nccwpck_require__(6599); -const util_1 = __nccwpck_require__(1862); -const hash_stream_validator_1 = __nccwpck_require__(8386); -const retry = __nccwpck_require__(3415); -var ActionToHTTPMethod; -(function (ActionToHTTPMethod) { - ActionToHTTPMethod["read"] = "GET"; - ActionToHTTPMethod["write"] = "PUT"; - ActionToHTTPMethod["delete"] = "DELETE"; - ActionToHTTPMethod["resumable"] = "POST"; -})(ActionToHTTPMethod = exports.ActionToHTTPMethod || (exports.ActionToHTTPMethod = {})); -/** - * @private - */ -exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; -/** - * @private - */ -const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; -class RequestError extends Error { -} -exports.RequestError = RequestError; -const SEVEN_DAYS = 7 * 24 * 60 * 60; -var FileExceptionMessages; -(function (FileExceptionMessages) { - FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; - FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; - FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; - FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; - FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; - FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; - FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; - FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; - FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; - FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; -})(FileExceptionMessages = exports.FileExceptionMessages || (exports.FileExceptionMessages = {})); -/** - * A File object is created from your {@link Bucket} object using - * {@link Bucket#file}. - * - * @class - */ -class File extends nodejs_common_1.ServiceObject { - /** - * Cloud Storage uses access control lists (ACLs) to manage object and - * bucket access. ACLs are the mechanism you use to share objects with other - * users and allow other users to access your buckets and objects. - * - * An ACL consists of one or more entries, where each entry grants permissions - * to an entity. Permissions define the actions that can be performed against - * an object or bucket (for example, `READ` or `WRITE`); the entity defines - * who the permission applies to (for example, a specific user or group of - * users). - * - * The `acl` object on a File instance provides methods to get you a list of - * the ACLs defined on your bucket, as well as set, update, and delete them. - * - * See {@link http://goo.gl/6qBBPO| About Access Control lists} - * - * @name File#acl - * @mixes Acl - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * //- - * // Make a file publicly readable. - * //- - * const options = { - * entity: 'allUsers', - * role: storage.acl.READER_ROLE - * }; - * - * file.acl.add(options, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.acl.add(options).then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - /** - * The API-formatted resource description of the file. - * - * Note: This is not guaranteed to be up-to-date when accessed. To get the - * latest record, call the `getMetadata()` method. - * - * @name File#metadata - * @type {object} - */ - /** - * The file's name. - * @name File#name - * @type {string} - */ - /** - * @callback Crc32cGeneratorToStringCallback - * A method returning the CRC32C as a base64-encoded string. - * - * @returns {string} - * - * @example - * Hashing the string 'data' should return 'rth90Q==' - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.toString(); // 'rth90Q==' - * ``` - **/ - /** - * @callback Crc32cGeneratorValidateCallback - * A method validating a base64-encoded CRC32C string. - * - * @param {string} [value] base64-encoded CRC32C string to validate - * @returns {boolean} - * - * @example - * Should return `true` if the value matches, `false` otherwise - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.validate('DkjKuA=='); // false - * crc32c.validate('rth90Q=='); // true - * ``` - **/ - /** - * @callback Crc32cGeneratorUpdateCallback - * A method for passing `Buffer`s for CRC32C generation. - * - * @param {Buffer} [data] data to update CRC32C value with - * @returns {undefined} - * - * @example - * Hashing buffers from 'some ' and 'text\n' - * - * ```js - * const buffer1 = Buffer.from('some '); - * crc32c.update(buffer1); - * - * const buffer2 = Buffer.from('text\n'); - * crc32c.update(buffer2); - * - * crc32c.toString(); // 'DkjKuA==' - * ``` - **/ - /** - * @typedef {object} CRC32CValidator - * @property {Crc32cGeneratorToStringCallback} - * @property {Crc32cGeneratorValidateCallback} - * @property {Crc32cGeneratorUpdateCallback} - */ - /** - * @callback Crc32cGeneratorCallback - * @returns {CRC32CValidator} - */ - /** - * @typedef {object} FileOptions Options passed to the File constructor. - * @property {string} [encryptionKey] A custom encryption key. - * @property {number} [generation] Generation to scope the file to. - * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this - * object, if the object is encrypted by such a key. Limited availability; - * usable only by enabled projects. - * @property {string} [userProject] The ID of the project which will be - * billed for all requests made from File object. - * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} - */ - /** - * Constructs a file object. - * - * @param {Bucket} bucket The Bucket instance this file is - * attached to. - * @param {string} name The name of the remote file. - * @param {FileOptions} [options] Configuration options. - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * ``` - */ - constructor(bucket, name, options = {}) { - var _a, _b; - const requestQueryObject = {}; - let generation; - if (options.generation !== null) { - if (typeof options.generation === 'string') { - generation = Number(options.generation); - } - else { - generation = options.generation; - } - if (!isNaN(generation)) { - requestQueryObject.generation = generation; - } - } - Object.assign(requestQueryObject, options.preconditionOpts); - const userProject = options.userProject || bucket.userProject; - if (typeof userProject === 'string') { - requestQueryObject.userProject = userProject; - } - const methods = { - /** - * @typedef {array} DeleteFileResponse - * @property {object} 0 The full API response. - */ - /** - * @callback DeleteFileCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Delete the file. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} - * - * @method File#delete - * @param {object} [options] Configuration options. - * @param {boolean} [options.ignoreNotFound = false] Ignore an error if - * the file does not exist. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {DeleteFileCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * file.delete(function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.delete().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_delete_file - * Another example: - */ - delete: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {array} FileExistsResponse - * @property {boolean} 0 Whether the {@link File} exists. - */ - /** - * @callback FileExistsCallback - * @param {?Error} err Request error, if any. - * @param {boolean} exists Whether the {@link File} exists. - */ - /** - * Check if the file exists. - * - * @method File#exists - * @param {options} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {FileExistsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * file.exists(function(err, exists) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.exists().then(function(data) { - * const exists = data[0]; - * }); - * ``` - */ - exists: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {array} GetFileResponse - * @property {File} 0 The {@link File}. - * @property {object} 1 The full API response. - */ - /** - * @callback GetFileCallback - * @param {?Error} err Request error, if any. - * @param {File} file The {@link File}. - * @param {object} apiResponse The full API response. - */ - /** - * Get a file object and its metadata if it exists. - * - * @method File#get - * @param {options} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetFileCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * file.get(function(err, file, apiResponse) { - * // file.metadata` has been populated. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.get().then(function(data) { - * const file = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - get: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {array} GetFileMetadataResponse - * @property {object} 0 The {@link File} metadata. - * @property {object} 1 The full API response. - */ - /** - * @callback GetFileMetadataCallback - * @param {?Error} err Request error, if any. - * @param {object} metadata The {@link File} metadata. - * @param {object} apiResponse The full API response. - */ - /** - * Get the file's metadata. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} - * - * @method File#getMetadata - * @param {object} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetFileMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * file.getMetadata(function(err, metadata, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.getMetadata().then(function(data) { - * const metadata = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_get_metadata - * Another example: - */ - getMetadata: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). - * @param {string} [userProject] The ID of the project which will be billed for the request. - */ - /** - * @callback SetFileMetadataCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} SetFileMetadataResponse - * @property {object} 0 The full API response. - */ - /** - * Merge the given metadata with the current remote file's metadata. This - * will set metadata if it was previously unset or update previously set - * metadata. To unset previously set metadata, set its value to null. - * - * You can set custom key/value pairs in the metadata key of the given - * object, however the other properties outside of this object must adhere - * to the {@link https://goo.gl/BOnnCK| official API documentation}. - * - * - * See the examples below for more information. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} - * - * @method File#setMetadata - * @param {object} [metadata] The metadata you wish to update. - * @param {SetFileMetadataOptions} [options] Configuration options. - * @param {SetFileMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * const metadata = { - * contentType: 'application/x-font-ttf', - * metadata: { - * my: 'custom', - * properties: 'go here' - * } - * }; - * - * file.setMetadata(metadata, function(err, apiResponse) {}); - * - * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } - * file.setMetadata({ - * metadata: { - * abc: '123', // will be set. - * unsetMe: null, // will be unset (deleted). - * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. - * } - * }, function(err, apiResponse) { - * // metadata should now be { abc: '123', hello: 'goodbye' } - * }); - * - * //- - * // Set a temporary hold on this file from its bucket's retention period - * // configuration. - * // - * file.setMetadata({ - * temporaryHold: true - * }, function(err, apiResponse) {}); - * - * //- - * // Alternatively, you may set a temporary hold. This will follow the - * // same behavior as an event-based hold, with the exception that the - * // bucket's retention policy will not renew for this file from the time - * // the hold is released. - * //- - * file.setMetadata({ - * eventBasedHold: true - * }, function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.setMetadata(metadata).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - setMetadata: { - reqOpts: { - qs: requestQueryObject, - }, - }, - }; - super({ - parent: bucket, - baseUrl: '/o', - id: encodeURIComponent(name), - methods, - }); - _File_instances.add(this); - this.bucket = bucket; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - this.storage = bucket.parent; - // @TODO Can this duplicate code from above be avoided? - if (options.generation !== null) { - let generation; - if (typeof options.generation === 'string') { - generation = Number(options.generation); - } - else { - generation = options.generation; - } - if (!isNaN(generation)) { - this.generation = generation; - } - } - this.kmsKeyName = options.kmsKeyName; - this.userProject = userProject; - this.name = name; - if (options.encryptionKey) { - this.setEncryptionKey(options.encryptionKey); - } - this.acl = new acl_1.Acl({ - request: this.request.bind(this), - pathPrefix: '/acl', - }); - this.crc32cGenerator = - options.crc32cGenerator || this.bucket.crc32cGenerator; - this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; - this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; - } - /** - * The object's Cloud Storage URI (`gs://`) - * - * @example - * ```ts - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * const file = bucket.file('image.png'); - * - * // `gs://my-bucket/image.png` - * const href = file.cloudStorageURI.href; - * ``` - */ - get cloudStorageURI() { - const uri = this.bucket.cloudStorageURI; - uri.pathname = this.name; - return uri; - } - /** - * A helper method for determining if a request should be retried based on preconditions. - * This should only be used for methods where the idempotency is determined by - * `ifGenerationMatch` - * @private - * - * A request should not be retried under the following conditions: - * - if precondition option `ifGenerationMatch` is not set OR - * - if `idempotencyStrategy` is set to `RetryNever` - */ - shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { - var _a; - return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && - ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever); - } - /** - * @typedef {array} CopyResponse - * @property {File} 0 The copied {@link File}. - * @property {object} 1 The full API response. - */ - /** - * @callback CopyCallback - * @param {?Error} err Request error, if any. - * @param {File} copiedFile The copied {@link File}. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {object} CopyOptions Configuration options for File#copy(). See an - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. - * @property {string} [cacheControl] The cacheControl setting for the new file. - * @property {string} [contentEncoding] The contentEncoding setting for the new file. - * @property {string} [contentType] The contentType setting for the new file. - * @property {string} [destinationKmsKeyName] Resource name of the Cloud - * KMS key, of the form - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, - * that will be used to encrypt the object. Overwrites the object - * metadata's `kms_key_name` value, if any. - * @property {Metadata} [metadata] Metadata to specify on the copied file. - * @property {string} [predefinedAcl] Set the ACL for the new file. - * @property {string} [token] A previously-returned `rewriteToken` from an - * unfinished rewrite request. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Copy this file to another file. By default, this will copy the file to the - * same bucket, but you can choose to copy it to another Bucket by providing - * a Bucket or File object or a URL starting with "gs://". - * The generation of the file will not be preserved. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} - * - * @throws {Error} If the destination file is not provided. - * - * @param {string|Bucket|File} destination Destination file. - * @param {CopyOptions} [options] Configuration options. See an - * @param {CopyCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // You can pass in a variety of types for the destination. - * // - * // For all of the below examples, assume we are working with the following - * // Bucket and File objects. - * //- - * const bucket = storage.bucket('my-bucket'); - * const file = bucket.file('my-image.png'); - * - * //- - * // If you pass in a string for the destination, the file is copied to its - * // current bucket, under the new name provided. - * //- - * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { - * // `my-bucket` now contains: - * // - "my-image.png" - * // - "my-image-copy.png" - * - * // `copiedFile` is an instance of a File object that refers to your new - * // file. - * }); - * - * //- - * // If you pass in a string starting with "gs://" for the destination, the - * // file is copied to the other bucket and under the new name provided. - * //- - * const newLocation = 'gs://another-bucket/my-image-copy.png'; - * file.copy(newLocation, function(err, copiedFile, apiResponse) { - * // `my-bucket` still contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-image-copy.png" - * - * // `copiedFile` is an instance of a File object that refers to your new - * // file. - * }); - * - * //- - * // If you pass in a Bucket object, the file will be copied to that bucket - * // using the same name. - * //- - * const anotherBucket = storage.bucket('another-bucket'); - * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { - * // `my-bucket` still contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-image.png" - * - * // `copiedFile` is an instance of a File object that refers to your new - * // file. - * }); - * - * //- - * // If you pass in a File object, you have complete control over the new - * // bucket and filename. - * //- - * const anotherFile = anotherBucket.file('my-awesome-image.png'); - * file.copy(anotherFile, function(err, copiedFile, apiResponse) { - * // `my-bucket` still contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-awesome-image.png" - * - * // Note: - * // The `copiedFile` parameter is equal to `anotherFile`. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.copy(newLocation).then(function(data) { - * const newFile = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_copy_file - * Another example: - */ - copy(destination, optionsOrCallback, callback) { - var _a, _b; - const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); - if (!destination) { - throw noDestinationError; - } - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - options = extend(true, {}, options); - callback = callback || nodejs_common_1.util.noop; - let destBucket; - let destName; - let newFile; - if (typeof destination === 'string') { - const parsedDestination = GS_URL_REGEXP.exec(destination); - if (parsedDestination !== null && parsedDestination.length === 3) { - destBucket = this.storage.bucket(parsedDestination[1]); - destName = parsedDestination[2]; - } - else { - destBucket = this.bucket; - destName = destination; - } - } - else if (destination instanceof bucket_1.Bucket) { - destBucket = destination; - destName = this.name; - } - else if (destination instanceof File) { - destBucket = destination.bucket; - destName = destination.name; - newFile = destination; - } - else { - throw noDestinationError; - } - const query = {}; - if (this.generation !== undefined) { - query.sourceGeneration = this.generation; - } - if (options.token !== undefined) { - query.rewriteToken = options.token; - } - if (options.userProject !== undefined) { - query.userProject = options.userProject; - delete options.userProject; - } - if (options.predefinedAcl !== undefined) { - query.destinationPredefinedAcl = options.predefinedAcl; - delete options.predefinedAcl; - } - newFile = newFile || destBucket.file(destName); - const headers = {}; - if (this.encryptionKey !== undefined) { - headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; - headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; - headers['x-goog-copy-source-encryption-key-sha256'] = - this.encryptionKeyHash; - } - if (newFile.encryptionKey !== undefined) { - this.setEncryptionKey(newFile.encryptionKey); - } - else if (options.destinationKmsKeyName !== undefined) { - query.destinationKmsKeyName = options.destinationKmsKeyName; - delete options.destinationKmsKeyName; - } - else if (newFile.kmsKeyName !== undefined) { - query.destinationKmsKeyName = newFile.kmsKeyName; - } - if (query.destinationKmsKeyName) { - this.kmsKeyName = query.destinationKmsKeyName; - const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); - if (keyIndex > -1) { - this.interceptors.splice(keyIndex, 1); - } - } - if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { - this.storage.retryOptions.autoRetry = false; - } - if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { - query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; - delete options.preconditionOpts; - } - this.request({ - method: 'POST', - uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, - qs: query, - json: options, - headers, - }, (err, resp) => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - if (err) { - callback(err, null, resp); - return; - } - if (resp.rewriteToken) { - const options = { - token: resp.rewriteToken, - }; - if (query.userProject) { - options.userProject = query.userProject; - } - if (query.destinationKmsKeyName) { - options.destinationKmsKeyName = query.destinationKmsKeyName; - } - this.copy(newFile, options, callback); - return; - } - callback(null, newFile, resp); - }); - } - /** - * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {string|boolean} [validation] Possible values: `"md5"`, - * `"crc32c"`, or `false`. By default, data integrity is validated with a - * CRC32c checksum. You may use MD5 if preferred, but that hash is not - * supported for composite objects. An error will be raised if MD5 is - * specified but is not available. You may also choose to skip validation - * completely, however this is **not recommended**. - * @property {number} [start] A byte offset to begin the file's download - * from. Default is 0. NOTE: Byte ranges are inclusive; that is, - * `options.start = 0` and `options.end = 999` represent the first 1000 - * bytes in a file or object. NOTE: when specifying a byte range, data - * integrity is not available. - * @property {number} [end] A byte offset to stop reading the file at. - * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and - * `options.end = 999` represent the first 1000 bytes in a file or object. - * NOTE: when specifying a byte range, data integrity is not available. - * @property {boolean} [decompress=true] Disable auto decompression of the - * received data. By default this option is set to `true`. - * Applicable in cases where the data was uploaded with - * `gzip: true` option. See {@link File#createWriteStream}. - */ - /** - * Create a readable stream to read the contents of the remote file. It can be - * piped to a writable stream or listened to for 'data' events to read a - * file's contents. - * - * In the unlikely event there is a mismatch between what you downloaded and - * the version in your Bucket, your error handler will receive an error with - * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best - * recourse is to try downloading the file again. - * - * NOTE: Readable streams will emit the `end` event when the file is fully - * downloaded. - * - * @param {CreateReadStreamOptions} [options] Configuration options. - * @returns {ReadableStream} - * - * @example - * ``` - * //- - * //

Downloading a File

- * // - * // The example below demonstrates how we can reference a remote file, then - * // pipe its contents to a local file. This is effectively creating a local - * // backup of your remote data. - * //- - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * const fs = require('fs'); - * const remoteFile = bucket.file('image.png'); - * const localFilename = '/Users/stephen/Photos/image.png'; - * - * remoteFile.createReadStream() - * .on('error', function(err) {}) - * .on('response', function(response) { - * // Server connected and responded with the specified status and headers. - * }) - * .on('end', function() { - * // The file is fully downloaded. - * }) - * .pipe(fs.createWriteStream(localFilename)); - * - * //- - * // To limit the downloaded data to only a byte range, pass an options - * // object. - * //- - * const logFile = myBucket.file('access_log'); - * logFile.createReadStream({ - * start: 10000, - * end: 20000 - * }) - * .on('error', function(err) {}) - * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); - * - * //- - * // To read a tail byte range, specify only `options.end` as a negative - * // number. - * //- - * const logFile = myBucket.file('access_log'); - * logFile.createReadStream({ - * end: -100 - * }) - * .on('error', function(err) {}) - * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); - * ``` - */ - createReadStream(options = {}) { - options = Object.assign({ decompress: true }, options); - const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; - const tailRequest = options.end < 0; - let validateStream = undefined; - const throughStream = new util_1.PassThroughShim(); - let crc32c = true; - let md5 = false; - if (typeof options.validation === 'string') { - const value = options.validation.toLowerCase().trim(); - crc32c = value === 'crc32c'; - md5 = value === 'md5'; - } - else if (options.validation === false) { - crc32c = false; - } - const shouldRunValidation = !rangeRequest && (crc32c || md5); - if (rangeRequest) { - if (typeof options.validation === 'string' || - options.validation === true) { - throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); - } - // Range requests can't receive data integrity checks. - crc32c = false; - md5 = false; - } - const onComplete = (err) => { - if (err) { - throughStream.destroy(err); - } - }; - // We listen to the response event from the request stream so that we - // can... - // - // 1) Intercept any data from going to the user if an error occurred. - // 2) Calculate the hashes from the http.IncomingMessage response - // stream, - // which will return the bytes from the source without decompressing - // gzip'd content. We then send it through decompressed, if - // applicable, to the user. - const onResponse = (err, _body, rawResponseStream) => { - if (err) { - // Get error message from the body. - this.getBufferFromReadable(rawResponseStream).then(body => { - err.message = body.toString('utf8'); - throughStream.destroy(err); - }); - return; - } - const headers = rawResponseStream.toJSON().headers; - const isCompressed = headers['content-encoding'] === 'gzip'; - const hashes = {}; - // The object is safe to validate if: - // 1. It was stored gzip and returned to us gzip OR - // 2. It was never stored as gzip - const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && - isCompressed) || - headers['x-goog-stored-content-encoding'] === 'identity'; - const transformStreams = []; - if (shouldRunValidation) { - // The x-goog-hash header should be set with a crc32c and md5 hash. - // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' - if (typeof headers['x-goog-hash'] === 'string') { - headers['x-goog-hash'] - .split(',') - .forEach((hashKeyValPair) => { - const delimiterIndex = hashKeyValPair.indexOf('='); - const hashType = hashKeyValPair.substr(0, delimiterIndex); - const hashValue = hashKeyValPair.substr(delimiterIndex + 1); - hashes[hashType] = hashValue; - }); - } - validateStream = new hash_stream_validator_1.HashStreamValidator({ - crc32c, - md5, - crc32cGenerator: this.crc32cGenerator, - crc32cExpected: hashes.crc32c, - md5Expected: hashes.md5, - }); - } - if (md5 && !hashes.md5) { - const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); - hashError.code = 'MD5_NOT_AVAILABLE'; - throughStream.destroy(hashError); - return; - } - if (safeToValidate && shouldRunValidation && validateStream) { - transformStreams.push(validateStream); - } - if (isCompressed && options.decompress) { - transformStreams.push(zlib.createGunzip()); - } - (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete); - }; - // Authenticate the request, then pipe the remote API request to the stream - // returned to the user. - const makeRequest = () => { - const query = { alt: 'media' }; - if (this.generation) { - query.generation = this.generation; - } - if (options.userProject) { - query.userProject = options.userProject; - } - const headers = { - 'Accept-Encoding': 'gzip', - 'Cache-Control': 'no-store', - }; - if (rangeRequest) { - const start = typeof options.start === 'number' ? options.start : '0'; - const end = typeof options.end === 'number' ? options.end : ''; - headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; - } - const reqOpts = { - uri: '', - headers, - qs: query, - }; - this.requestStream(reqOpts) - .on('error', err => { - throughStream.destroy(err); - }) - .on('response', res => { - throughStream.emit('response', res); - nodejs_common_1.util.handleResp(null, res, null, onResponse); - }) - .resume(); - }; - throughStream.on('reading', makeRequest); - return throughStream; - } - /** - * @callback CreateResumableUploadCallback - * @param {?Error} err Request error, if any. - * @param {string} uri The resumable upload's unique session URI. - */ - /** - * @typedef {array} CreateResumableUploadResponse - * @property {string} 0 The resumable upload's unique session URI. - */ - /** - * @typedef {object} CreateResumableUploadOptions - * @property {object} [metadata] Metadata to set on the file. - * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. - * @property {string} [origin] Origin header to set for the upload. - * @property {string} [predefinedAcl] Apply a predefined set of access - * controls to this object. - * - * Acceptable values are: - * - **`authenticatedRead`** - Object owner gets `OWNER` access, and - * `allAuthenticatedUsers` get `READER` access. - * - * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and - * project team owners get `OWNER` access. - * - * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project - * team owners get `READER` access. - * - * - **`private`** - Object owner gets `OWNER` access. - * - * - **`projectPrivate`** - Object owner gets `OWNER` access, and project - * team members get access according to their roles. - * - * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` - * get `READER` access. - * @property {boolean} [private] Make the uploaded file private. (Alias for - * `options.predefinedAcl = 'private'`) - * @property {boolean} [public] Make the uploaded file public. (Alias for - * `options.predefinedAcl = 'publicRead'`) - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {string} [chunkSize] Create a separate request per chunk. This - * value is in bytes and should be a multiple of 256 KiB (2^18). - * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} - */ - /** - * Create a unique resumable upload session URI. This is the first step when - * performing a resumable upload. - * - * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} - * for more on how the entire process works. - * - *

Note

- * - * If you are just looking to perform a resumable upload without worrying - * about any of the details, see {@link File#createWriteStream}. Resumable - * uploads are performed by default. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} - * - * @param {CreateResumableUploadOptions} [options] Configuration options. - * @param {CreateResumableUploadCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * file.createResumableUpload(function(err, uri) { - * if (!err) { - * // `uri` can be used to PUT data to. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.createResumableUpload().then(function(data) { - * const uri = data[0]; - * }); - * ``` - */ - createResumableUpload(optionsOrCallback, callback) { - var _a, _b; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const retryOptions = this.storage.retryOptions; - if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && - ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - retryOptions.autoRetry = false; - } - resumableUpload.createURI({ - authClient: this.storage.authClient, - apiEndpoint: this.storage.apiEndpoint, - bucket: this.bucket.name, - customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), - file: this.name, - generation: this.generation, - key: this.encryptionKey, - kmsKeyName: this.kmsKeyName, - metadata: options.metadata, - offset: options.offset, - origin: options.origin, - predefinedAcl: options.predefinedAcl, - private: options.private, - public: options.public, - userProject: options.userProject || this.userProject, - retryOptions: retryOptions, - params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, - }, callback); - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - } - /** - * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). - * @property {string} [contentType] Alias for - * `options.metadata.contentType`. If set to `auto`, the file name is used - * to determine the contentType. - * @property {string|boolean} [gzip] If true, automatically gzip the file. - * If set to `auto`, the contentType is used to determine if the file - * should be gzipped. This will set `options.metadata.contentEncoding` to - * `gzip` if necessary. - * @property {object} [metadata] See the examples below or - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} - * for more details. - * @property {number} [offset] The starting byte of the upload stream, for - * resuming an interrupted upload. Defaults to 0. - * @property {string} [predefinedAcl] Apply a predefined set of access - * controls to this object. - * - * Acceptable values are: - * - **`authenticatedRead`** - Object owner gets `OWNER` access, and - * `allAuthenticatedUsers` get `READER` access. - * - * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and - * project team owners get `OWNER` access. - * - * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project - * team owners get `READER` access. - * - * - **`private`** - Object owner gets `OWNER` access. - * - * - **`projectPrivate`** - Object owner gets `OWNER` access, and project - * team members get access according to their roles. - * - * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` - * get `READER` access. - * @property {boolean} [private] Make the uploaded file private. (Alias for - * `options.predefinedAcl = 'private'`) - * @property {boolean} [public] Make the uploaded file public. (Alias for - * `options.predefinedAcl = 'publicRead'`) - * @property {boolean} [resumable] Force a resumable upload. NOTE: When - * working with streams, the file format and size is unknown until it's - * completely consumed. Because of this, it's best for you to be explicit - * for what makes sense given your input. - * @property {number} [timeout=60000] Set the HTTP request timeout in - * milliseconds. This option is not available for resumable uploads. - * Default: `60000` - * @property {string} [uri] The URI for an already-created resumable - * upload. See {@link File#createResumableUpload}. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {string|boolean} [validation] Possible values: `"md5"`, - * `"crc32c"`, or `false`. By default, data integrity is validated with a - * CRC32c checksum. You may use MD5 if preferred, but that hash is not - * supported for composite objects. An error will be raised if MD5 is - * specified but is not available. You may also choose to skip validation - * completely, however this is **not recommended**. In addition to specifying - * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will - * cause the server to perform validation in addition to client validation. - * NOTE: Validation is automatically skipped for objects that were - * uploaded using the `gzip` option and have already compressed content. - */ - /** - * Create a writable stream to overwrite the contents of the file in your - * bucket. - * - * A File object can also be used to create files for the first time. - * - * Resumable uploads are automatically enabled and must be shut off explicitly - * by setting `options.resumable` to `false`. - * - * - *

- * There is some overhead when using a resumable upload that can cause - * noticeable performance degradation while uploading a series of small - * files. When uploading files less than 10MB, it is recommended that the - * resumable feature is disabled. - *

- * - * NOTE: Writable streams will emit the `finish` event when the file is fully - * uploaded. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload| Upload Options (Simple or Resumable)} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} - * - * @param {CreateWriteStreamOptions} [options] Configuration options. - * @returns {WritableStream} - * - * @example - * ``` - * const fs = require('fs'); - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * //- - * //

Uploading a File

- * // - * // Now, consider a case where we want to upload a file to your bucket. You - * // have the option of using {@link Bucket#upload}, but that is just - * // a convenience method which will do the following. - * //- - * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') - * .pipe(file.createWriteStream()) - * .on('error', function(err) {}) - * .on('finish', function() { - * // The file upload is complete. - * }); - * - * //- - * //

Uploading a File with gzip compression

- * //- - * fs.createReadStream('/Users/stephen/site/index.html') - * .pipe(file.createWriteStream({ gzip: true })) - * .on('error', function(err) {}) - * .on('finish', function() { - * // The file upload is complete. - * }); - * - * //- - * // Downloading the file with `createReadStream` will automatically decode - * // the file. - * //- - * - * //- - * //

Uploading a File with Metadata

- * // - * // One last case you may run into is when you want to upload a file to your - * // bucket and set its metadata at the same time. Like above, you can use - * // {@link Bucket#upload} to do this, which is just a wrapper around - * // the following. - * //- - * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') - * .pipe(file.createWriteStream({ - * metadata: { - * contentType: 'image/jpeg', - * metadata: { - * custom: 'metadata' - * } - * } - * })) - * .on('error', function(err) {}) - * .on('finish', function() { - * // The file upload is complete. - * }); - * ``` - */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - createWriteStream(options = {}) { - options = extend(true, { metadata: {} }, options); - if (options.contentType) { - options.metadata.contentType = options.contentType; - } - if (!options.metadata.contentType || - options.metadata.contentType === 'auto') { - const detectedContentType = mime.getType(this.name); - if (detectedContentType) { - options.metadata.contentType = detectedContentType; - } - } - let gzip = options.gzip; - if (gzip === 'auto') { - gzip = compressible(options.metadata.contentType); - } - if (gzip) { - options.metadata.contentEncoding = 'gzip'; - } - let crc32c = true; - let md5 = false; - if (typeof options.validation === 'string') { - options.validation = options.validation.toLowerCase(); - crc32c = options.validation === 'crc32c'; - md5 = options.validation === 'md5'; - } - else if (options.validation === false) { - crc32c = false; - } - /** - * A callback for determining when the underlying pipeline is complete. - * It's possible the pipeline callback could error before the write stream - * calls `final` so by default this will destroy the write stream unless the - * write stream sets this callback via its `final` handler. - * @param error An optional error - */ - let pipelineCallback = error => { - writeStream.destroy(error || undefined); - }; - // A stream for consumer to write to - const writeStream = new stream_1.Writable({ - final(cb) { - // Set the pipeline callback to this callback so the pipeline's results - // can be populated to the consumer - pipelineCallback = cb; - emitStream.end(); - }, - write(chunk, encoding, cb) { - emitStream.write(chunk, encoding, cb); - }, - }); - const emitStream = new util_1.PassThroughShim(); - const hashCalculatingStream = new hash_stream_validator_1.HashStreamValidator({ - crc32c, - md5, - crc32cGenerator: this.crc32cGenerator, - updateHashesOnly: true, - }); - const fileWriteStream = duplexify(); - let fileWriteStreamMetadataReceived = false; - // Handing off emitted events to users - emitStream.on('reading', () => writeStream.emit('reading')); - emitStream.on('writing', () => writeStream.emit('writing')); - fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); - fileWriteStream.on('response', resp => writeStream.emit('response', resp)); - fileWriteStream.once('metadata', () => { - fileWriteStreamMetadataReceived = true; - }); - writeStream.on('writing', () => { - if (options.resumable === false) { - this.startSimpleUpload_(fileWriteStream, options); - } - else { - this.startResumableUpload_(fileWriteStream, options); - } - (0, stream_1.pipeline)(emitStream, gzip ? zlib.createGzip() : new stream_1.PassThrough(), hashCalculatingStream, fileWriteStream, async (e) => { - if (e) { - return pipelineCallback(e); - } - // We want to make sure we've received the metadata from the server in order - // to properly validate the object's integrity. Depending on the type of upload, - // the stream could close before the response is returned. - if (!fileWriteStreamMetadataReceived) { - try { - await new Promise((resolve, reject) => { - fileWriteStream.once('metadata', resolve); - fileWriteStream.once('error', reject); - }); - } - catch (e) { - return pipelineCallback(e); - } - } - try { - await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { crc32c, md5 }); - pipelineCallback(); - } - catch (e) { - pipelineCallback(e); - } - }); - }); - return writeStream; - } - delete(optionsOrCallback, cb) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_1.AvailableServiceObjectMethods.delete, options); - super - .delete(options) - .then(resp => cb(null, ...resp)) - .catch(cb) - .finally(() => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - }); - } - /** - * @typedef {array} DownloadResponse - * @property [0] The contents of a File. - */ - /** - * @callback DownloadCallback - * @param err Request error, if any. - * @param contents The contents of a File. - */ - /** - * Convenience method to download a file into memory or to a local - * destination. - * - * @param {object} [options] Configuration options. The arguments match those - * passed to {@link File#createReadStream}. - * @param {string} [options.destination] Local file path to write the file's - * contents to. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {DownloadCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * //- - * // Download a file into memory. The contents will be available as the - * second - * // argument in the demonstration below, `contents`. - * //- - * file.download(function(err, contents) {}); - * - * //- - * // Download a file to a local destination. - * //- - * file.download({ - * destination: '/Users/me/Desktop/file-backup.txt' - * }, function(err) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.download().then(function(data) { - * const contents = data[0]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_download_file - * Another example: - * - * @example include:samples/encryption.js - * region_tag:storage_download_encrypted_file - * Example of downloading an encrypted file: - * - * @example include:samples/requesterPays.js - * region_tag:storage_download_file_requester_pays - * Example of downloading a file where the requester pays: - */ - download(optionsOrCallback, cb) { - let options; - if (typeof optionsOrCallback === 'function') { - cb = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback; - } - let called = false; - const callback = ((...args) => { - if (!called) - cb(...args); - called = true; - }); - const destination = options.destination; - delete options.destination; - const fileStream = this.createReadStream(options); - let receivedData = false; - if (destination) { - fileStream - .on('error', callback) - .once('data', data => { - // We know that the file exists the server - now we can truncate/write to a file - receivedData = true; - const writable = fs.createWriteStream(destination); - writable.write(data); - fileStream - .pipe(writable) - .on('error', callback) - .on('finish', callback); - }) - .on('end', () => { - // In the case of an empty file no data will be received before the end event fires - if (!receivedData) { - fs.openSync(destination, 'w'); - callback(null, Buffer.alloc(0)); - } - }); - } - else { - this.getBufferFromReadable(fileStream) - .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) - .catch(callback); - } - } - /** - * The Storage API allows you to use a custom key for server-side encryption. - * - * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} - * - * @param {string|buffer} encryptionKey An AES-256 encryption key. - * @returns {File} - * - * @example - * ``` - * const crypto = require('crypto'); - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const encryptionKey = crypto.randomBytes(32); - * - * const fileWithCustomEncryption = myBucket.file('my-file'); - * fileWithCustomEncryption.setEncryptionKey(encryptionKey); - * - * const fileWithoutCustomEncryption = myBucket.file('my-file'); - * - * fileWithCustomEncryption.save('data', function(err) { - * // Try to download with the File object that hasn't had - * // `setEncryptionKey()` called: - * fileWithoutCustomEncryption.download(function(err) { - * // We will receive an error: - * // err.message === 'Bad Request' - * - * // Try again with the File object we called `setEncryptionKey()` on: - * fileWithCustomEncryption.download(function(err, contents) { - * // contents.toString() === 'data' - * }); - * }); - * }); - * - * ``` - * @example include:samples/encryption.js - * region_tag:storage_upload_encrypted_file - * Example of uploading an encrypted file: - * - * @example include:samples/encryption.js - * region_tag:storage_download_encrypted_file - * Example of downloading an encrypted file: - */ - setEncryptionKey(encryptionKey) { - this.encryptionKey = encryptionKey; - this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); - this.encryptionKeyHash = crypto - .createHash('sha256') - // eslint-disable-next-line @typescript-eslint/no-explicit-any - .update(this.encryptionKeyBase64, 'base64') - .digest('base64'); - this.encryptionKeyInterceptor = { - request: reqOpts => { - reqOpts.headers = reqOpts.headers || {}; - reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; - reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; - reqOpts.headers['x-goog-encryption-key-sha256'] = - this.encryptionKeyHash; - return reqOpts; - }, - }; - this.interceptors.push(this.encryptionKeyInterceptor); - return this; - } - /** - * @typedef {array} GetExpirationDateResponse - * @property {date} 0 A Date object representing the earliest time this file's - * retention policy will expire. - */ - /** - * @callback GetExpirationDateCallback - * @param {?Error} err Request error, if any. - * @param {date} expirationDate A Date object representing the earliest time - * this file's retention policy will expire. - */ - /** - * If this bucket has a retention policy defined, use this method to get a - * Date object representing the earliest time this file will expire. - * - * @param {GetExpirationDateCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * file.getExpirationDate(function(err, expirationDate) { - * // expirationDate is a Date object. - * }); - * ``` - */ - getExpirationDate(callback) { - this.getMetadata((err, metadata, apiResponse) => { - if (err) { - callback(err, null, apiResponse); - return; - } - if (!metadata.retentionExpirationTime) { - const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); - callback(error, null, apiResponse); - return; - } - callback(null, new Date(metadata.retentionExpirationTime), apiResponse); - }); - } - /** - * @typedef {array} GenerateSignedPostPolicyV2Response - * @property {object} 0 The document policy. - */ - /** - * @callback GenerateSignedPostPolicyV2Callback - * @param {?Error} err Request error, if any. - * @param {object} policy The document policy. - */ - /** - * Get a signed policy document to allow a user to upload data with a POST - * request. - * - * In Google Cloud Platform environments, such as Cloud Functions and App - * Engine, you usually don't provide a `keyFilename` or `credentials` during - * instantiation. In those environments, we call the - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} - * to create a signed policy. That API requires either the - * `https://www.googleapis.com/auth/iam` or - * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are - * enabled. - * - * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} - * - * @throws {Error} If an expiration timestamp from the past is given. - * @throws {Error} If options.equals has an array with less or more than two - * members. - * @throws {Error} If options.startsWith has an array with less or more than two - * members. - * - * @param {object} options Configuration options. - * @param {array|array[]} [options.equals] Array of request parameters and - * their expected value (e.g. [['$', '']]). Values are - * translated into equality constraints in the conditions field of the - * policy document (e.g. ['eq', '$', '']). If only one - * equality condition is to be specified, options.equals can be a one- - * dimensional array (e.g. ['$', '']). - * @param {*} options.expires - A timestamp when this policy will expire. Any - * value given is passed to `new Date()`. - * @param {array|array[]} [options.startsWith] Array of request parameters and - * their expected prefixes (e.g. [['$', '']). Values are - * translated into starts-with constraints in the conditions field of the - * policy document (e.g. ['starts-with', '$', '']). If only - * one prefix condition is to be specified, options.startsWith can be a - * one- dimensional array (e.g. ['$', '']). - * @param {string} [options.acl] ACL for the object from possibly predefined - * ACLs. - * @param {string} [options.successRedirect] The URL to which the user client - * is redirected if the upload is successful. - * @param {string} [options.successStatus] - The status of the Google Storage - * response if the upload is successful (must be string). - * @param {object} [options.contentLengthRange] - * @param {number} [options.contentLengthRange.min] Minimum value for the - * request's content length. - * @param {number} [options.contentLengthRange.max] Maximum value for the - * request's content length. - * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * const options = { - * equals: ['$Content-Type', 'image/jpeg'], - * expires: '10-25-2022', - * contentLengthRange: { - * min: 0, - * max: 1024 - * } - * }; - * - * file.generateSignedPostPolicyV2(options, function(err, policy) { - * // policy.string: the policy document in plain text. - * // policy.base64: the policy document in base64. - * // policy.signature: the policy signature in base64. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.generateSignedPostPolicyV2(options).then(function(data) { - * const policy = data[0]; - * }); - * ``` - */ - generateSignedPostPolicyV2(optionsOrCallback, cb) { - const args = (0, util_1.normalize)(optionsOrCallback, cb); - let options = args.options; - const callback = args.callback; - const expires = new Date(options.expires); - if (isNaN(expires.getTime())) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID); - } - if (expires.valueOf() < Date.now()) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST); - } - options = Object.assign({}, options); - const conditions = [ - ['eq', '$key', this.name], - { - bucket: this.bucket.name, - }, - ]; - if (Array.isArray(options.equals)) { - if (!Array.isArray(options.equals[0])) { - options.equals = [options.equals]; - } - options.equals.forEach(condition => { - if (!Array.isArray(condition) || condition.length !== 2) { - throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); - } - conditions.push(['eq', condition[0], condition[1]]); - }); - } - if (Array.isArray(options.startsWith)) { - if (!Array.isArray(options.startsWith[0])) { - options.startsWith = [options.startsWith]; - } - options.startsWith.forEach(condition => { - if (!Array.isArray(condition) || condition.length !== 2) { - throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); - } - conditions.push(['starts-with', condition[0], condition[1]]); - }); - } - if (options.acl) { - conditions.push({ - acl: options.acl, - }); - } - if (options.successRedirect) { - conditions.push({ - success_action_redirect: options.successRedirect, - }); - } - if (options.successStatus) { - conditions.push({ - success_action_status: options.successStatus, - }); - } - if (options.contentLengthRange) { - const min = options.contentLengthRange.min; - const max = options.contentLengthRange.max; - if (typeof min !== 'number' || typeof max !== 'number') { - throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); - } - conditions.push(['content-length-range', min, max]); - } - const policy = { - expiration: expires.toISOString(), - conditions, - }; - const policyString = JSON.stringify(policy); - const policyBase64 = Buffer.from(policyString).toString('base64'); - this.storage.authClient.sign(policyBase64).then(signature => { - callback(null, { - string: policyString, - base64: policyBase64, - signature, - }); - }, err => { - callback(new signer_1.SigningError(err.message)); - }); - } - /** - * @typedef {object} SignedPostPolicyV4Output - * @property {string} url The request URL. - * @property {object} fields The form fields to include in the POST request. - */ - /** - * @typedef {array} GenerateSignedPostPolicyV4Response - * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. - */ - /** - * @callback GenerateSignedPostPolicyV4Callback - * @param {?Error} err Request error, if any. - * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. - */ - /** - * Get a v4 signed policy document to allow a user to upload data with a POST - * request. - * - * In Google Cloud Platform environments, such as Cloud Functions and App - * Engine, you usually don't provide a `keyFilename` or `credentials` during - * instantiation. In those environments, we call the - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} - * to create a signed policy. That API requires either the - * `https://www.googleapis.com/auth/iam` or - * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are - * enabled. - * - * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} - * - * @param {object} options Configuration options. - * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any - * value given is passed to `new Date()`. - * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style - * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style - * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs - * should generally be preferred instead of path-style URL. - * Currently defaults to `false` for path-style, although this may change in a - * future major-version release. - * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in - * the result, e.g. "https://cdn.example.com". - * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} - * to include in the signed policy. Any fields with key beginning with 'x-ignore-' - * will not be included in the policy to be signed. - * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} - * to include in the signed policy. All fields given in `config.fields` are - * automatically included in the conditions array, adding the same entry - * in both `fields` and `conditions` will result in duplicate entries. - * - * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * const options = { - * expires: '10-25-2022', - * conditions: [ - * ['eq', '$Content-Type', 'image/jpeg'], - * ['content-length-range', 0, 1024], - * ], - * fields: { - * acl: 'public-read', - * 'x-goog-meta-foo': 'bar', - * 'x-ignore-mykey': 'data' - * } - * }; - * - * file.generateSignedPostPolicyV4(options, function(err, response) { - * // response.url The request URL - * // response.fields The form fields (including the signature) to include - * // to be used to upload objects by HTML forms. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.generateSignedPostPolicyV4(options).then(function(data) { - * const response = data[0]; - * // response.url The request URL - * // response.fields The form fields (including the signature) to include - * // to be used to upload objects by HTML forms. - * }); - * ``` - */ - generateSignedPostPolicyV4(optionsOrCallback, cb) { - const args = (0, util_1.normalize)(optionsOrCallback, cb); - let options = args.options; - const callback = args.callback; - const expires = new Date(options.expires); - if (isNaN(expires.getTime())) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID); - } - if (expires.valueOf() < Date.now()) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST); - } - if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { - throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); - } - options = Object.assign({}, options); - let fields = Object.assign({}, options.fields); - const now = new Date(); - const nowISO = (0, util_1.formatAsUTCISO)(now, true); - const todayISO = (0, util_1.formatAsUTCISO)(now); - const sign = async () => { - const { client_email } = await this.storage.authClient.getCredentials(); - const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; - fields = { - ...fields, - bucket: this.bucket.name, - key: this.name, - 'x-goog-date': nowISO, - 'x-goog-credential': credential, - 'x-goog-algorithm': 'GOOG4-RSA-SHA256', - }; - const conditions = options.conditions || []; - Object.entries(fields).forEach(([key, value]) => { - if (!key.startsWith('x-ignore-')) { - conditions.push({ [key]: value }); - } - }); - delete fields.bucket; - const expiration = (0, util_1.formatAsUTCISO)(expires, true, '-', ':'); - const policy = { - conditions, - expiration, - }; - const policyString = (0, util_1.unicodeJSONStringify)(policy); - const policyBase64 = Buffer.from(policyString).toString('base64'); - try { - const signature = await this.storage.authClient.sign(policyBase64); - const signatureHex = Buffer.from(signature, 'base64').toString('hex'); - fields['policy'] = policyBase64; - fields['x-goog-signature'] = signatureHex; - let url; - if (options.virtualHostedStyle) { - url = `https://${this.bucket.name}.storage.googleapis.com/`; - } - else if (options.bucketBoundHostname) { - url = `${options.bucketBoundHostname}/`; - } - else { - url = `${exports.STORAGE_POST_POLICY_BASE_URL}/${this.bucket.name}/`; - } - return { - url, - fields, - }; - } - catch (err) { - throw new signer_1.SigningError(err.message); - } - }; - sign().then(res => callback(null, res), callback); - } - /** - * @typedef {array} GetSignedUrlResponse - * @property {object} 0 The signed URL. - */ - /** - * @callback GetSignedUrlCallback - * @param {?Error} err Request error, if any. - * @param {object} url The signed URL. - */ - /** - * Get a signed URL to allow limited time access to the file. - * - * In Google Cloud Platform environments, such as Cloud Functions and App - * Engine, you usually don't provide a `keyFilename` or `credentials` during - * instantiation. In those environments, we call the - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} - * to create a signed URL. That API requires either the - * `https://www.googleapis.com/auth/iam` or - * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are - * enabled. - * - * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} - * - * @throws {Error} if an expiration timestamp from the past is given. - * - * @param {object} config Configuration object. - * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or - * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). - * When using "resumable", the header `X-Goog-Resumable: start` has - * to be sent when making a request with the signed URL. - * @param {*} config.expires A timestamp when this link will expire. Any value - * given is passed to `new Date()`. - * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. - * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} - * @param {string} [config.version='v2'] The signing version to use, either - * 'v2' or 'v4'. - * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style - * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style - * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs - * should generally be preferred instaed of path-style URL. - * Currently defaults to `false` for path-style, although this may change in a - * future major-version release. - * @param {string} [config.cname] The cname for this bucket, i.e., - * "https://cdn.example.com". - * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like - * if you provide this, the client must provide this HTTP header with this same - * value in its request, so to if this parameter is not provided here, - * the client must not provide any value for this HTTP header in its request. - * @param {string} [config.contentType] Just like if you provide this, the client - * must provide this HTTP header with this same value in its request, so to if - * this parameter is not provided here, the client must not provide any value - * for this HTTP header in its request. - * @param {object} [config.extensionHeaders] If these headers are used, the - * server will check to make sure that the client provides matching - * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} - * for the requirements of this feature, most notably: - * - The header name must be prefixed with `x-goog-` - * - The header name must be all lowercase - * - * Note: Multi-valued header passed as an array in the extensionHeaders - * object is converted into a string, delimited by `,` with - * no space. Requests made using the signed URL will need to - * delimit multi-valued headers using a single `,` as well, or - * else the server will report a mismatched signature. - * @param {object} [config.queryParams] Additional query parameters to include - * in the signed URL. - * @param {string} [config.promptSaveAs] The filename to prompt the user to - * save the file as when the signed url is accessed. This is ignored if - * `config.responseDisposition` is set. - * @param {string} [config.responseDisposition] The - * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the - * signed url. - * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value - * given is passed to `new Date()`. - * Note: Use for 'v4' only. - * @param {string} [config.responseType] The response-content-type parameter - * of the signed url. - * @param {GetSignedUrlCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * //- - * // Generate a URL that allows temporary access to download your file. - * //- - * const request = require('request'); - * - * const config = { - * action: 'read', - * expires: '03-17-2025', - * }; - * - * file.getSignedUrl(config, function(err, url) { - * if (err) { - * console.error(err); - * return; - * } - * - * // The file is now available to read from this URL. - * request(url, function(err, resp) { - * // resp.statusCode = 200 - * }); - * }); - * - * //- - * // Generate a URL that allows temporary access to download your file. - * // Access will begin at accessibleAt and end at expires. - * //- - * const request = require('request'); - * - * const config = { - * action: 'read', - * expires: '03-17-2025', - * accessibleAt: '03-13-2025' - * }; - * - * file.getSignedUrl(config, function(err, url) { - * if (err) { - * console.error(err); - * return; - * } - * - * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. - * request(url, function(err, resp) { - * // resp.statusCode = 200 - * }); - * }); - * - * //- - * // Generate a URL to allow write permissions. This means anyone with this - * URL - * // can send a POST request with new data that will overwrite the file. - * //- - * file.getSignedUrl({ - * action: 'write', - * expires: '03-17-2025' - * }, function(err, url) { - * if (err) { - * console.error(err); - * return; - * } - * - * // The file is now available to be written to. - * const writeStream = request.put(url); - * writeStream.end('New data'); - * - * writeStream.on('complete', function(resp) { - * // Confirm the new content was saved. - * file.download(function(err, fileContents) { - * console.log('Contents:', fileContents.toString()); - * // Contents: New data - * }); - * }); - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.getSignedUrl(config).then(function(data) { - * const url = data[0]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_generate_signed_url - * Another example: - */ - getSignedUrl(cfg, callback) { - const method = ActionToHTTPMethod[cfg.action]; - if (!method) { - throw new Error(storage_1.ExceptionMessages.INVALID_ACTION); - } - const extensionHeaders = (0, util_1.objectKeyToLowercase)(cfg.extensionHeaders || {}); - if (cfg.action === 'resumable') { - extensionHeaders['x-goog-resumable'] = 'start'; - } - const queryParams = Object.assign({}, cfg.queryParams); - if (typeof cfg.responseType === 'string') { - queryParams['response-content-type'] = cfg.responseType; - } - if (typeof cfg.promptSaveAs === 'string') { - queryParams['response-content-disposition'] = - 'attachment; filename="' + cfg.promptSaveAs + '"'; - } - if (typeof cfg.responseDisposition === 'string') { - queryParams['response-content-disposition'] = cfg.responseDisposition; - } - if (this.generation) { - queryParams['generation'] = this.generation.toString(); - } - const signConfig = { - method, - expires: cfg.expires, - accessibleAt: cfg.accessibleAt, - extensionHeaders, - queryParams, - contentMd5: cfg.contentMd5, - contentType: cfg.contentType, - }; - if (cfg.cname) { - signConfig.cname = cfg.cname; - } - if (cfg.version) { - signConfig.version = cfg.version; - } - if (cfg.virtualHostedStyle) { - signConfig.virtualHostedStyle = cfg.virtualHostedStyle; - } - if (!this.signer) { - this.signer = new signer_1.URLSigner(this.storage.authClient, this.bucket, this); - } - this.signer - .getSignedUrl(signConfig) - .then(signedUrl => callback(null, signedUrl), callback); - } - /** - * @callback IsPublicCallback - * @param {?Error} err Request error, if any. - * @param {boolean} resp Whether file is public or not. - */ - /** - * @typedef {array} IsPublicResponse - * @property {boolean} 0 Whether file is public or not. - */ - /** - * Check whether this file is public or not by sending - * a HEAD request without credentials. - * No errors from the server indicates that the current - * file is public. - * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} - * indicates that file is private. - * Any other non 403 error is propagated to user. - * - * @param {IsPublicCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * //- - * // Check whether the file is publicly accessible. - * //- - * file.isPublic(function(err, resp) { - * if (err) { - * console.error(err); - * return; - * } - * console.log(`the file ${file.id} is public: ${resp}`) ; - * }) - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.isPublic().then(function(data) { - * const resp = data[0]; - * }); - * ``` - */ - isPublic(callback) { - var _a; - // Build any custom headers based on the defined interceptors on the parent - // storage object and this object - const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; - const fileInterceptors = this.interceptors || []; - const allInterceptors = storageInterceptors.concat(fileInterceptors); - const headers = allInterceptors.reduce((acc, curInterceptor) => { - const currentHeaders = curInterceptor.request({ - uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, - }); - Object.assign(acc, currentHeaders.headers); - return acc; - }, {}); - nodejs_common_1.util.makeRequest({ - method: 'GET', - uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, - headers, - }, { - retryOptions: this.storage.retryOptions, - }, (err) => { - if (err) { - const apiError = err; - if (apiError.code === 403) { - callback(null, false); - } - else { - callback(err); - } - } - else { - callback(null, true); - } - }); - } - /** - * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). - * @property {Metadata} [metadata] Define custom metadata properties to define - * along with the operation. - * @property {boolean} [strict] If true, set the file to be private to - * only the owner user. Otherwise, it will be private to the project. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @callback MakeFilePrivateCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} MakeFilePrivateResponse - * @property {object} 0 The full API response. - */ - /** - * Make a file private to the project and remove all other permissions. - * Set `options.strict` to true to make the file private to only the owner. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} - * - * @param {MakeFilePrivateOptions} [options] Configuration options. - * @param {MakeFilePrivateCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * //- - * // Set the file private so only project maintainers can see and modify it. - * //- - * file.makePrivate(function(err) {}); - * - * //- - * // Set the file private so only the owner can see and modify it. - * //- - * file.makePrivate({ strict: true }, function(err) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.makePrivate().then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - makePrivate(optionsOrCallback, callback) { - var _a, _b; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const query = { - predefinedAcl: options.strict ? 'private' : 'projectPrivate', - // eslint-disable-next-line @typescript-eslint/no-explicit-any - }; - if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { - query.ifMetagenerationMatch = - (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; - delete options.preconditionOpts; - } - if (options.userProject) { - query.userProject = options.userProject; - } - // You aren't allowed to set both predefinedAcl & acl properties on a file, - // so acl must explicitly be nullified, destroying all previous acls on the - // file. - const metadata = extend({}, options.metadata, { acl: null }); - this.setMetadata(metadata, query, callback); - } - /** - * @typedef {array} MakeFilePublicResponse - * @property {object} 0 The full API response. - */ - /** - * @callback MakeFilePublicCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Set a file to be publicly readable and maintain all previous permissions. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} - * - * @param {MakeFilePublicCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * file.makePublic(function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.makePublic().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_make_public - * Another example: - */ - makePublic(callback) { - callback = callback || nodejs_common_1.util.noop; - this.acl.add({ - entity: 'allUsers', - role: 'READER', - }, (err, acl, resp) => { - callback(err, resp); - }); - } - /** - * The public URL of this File - * Use {@link File#makePublic} to enable anonymous access via the returned URL. - * - * @returns {string} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * const file = bucket.file('my-file'); - * - * // publicUrl will be "https://storage.googleapis.com/albums/my-file" - * const publicUrl = file.publicUrl(); - * ``` - */ - publicUrl() { - return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; - } - /** - * @typedef {array} MoveResponse - * @property {File} 0 The destination File. - * @property {object} 1 The full API response. - */ - /** - * @callback MoveCallback - * @param {?Error} err Request error, if any. - * @param {?File} destinationFile The destination File. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {object} MoveOptions Configuration options for File#move(). See an - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. - * @param {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Move this file to another location. By default, this will rename the file - * and keep it in the same bucket, but you can choose to move it to another - * Bucket by providing a Bucket or File object or a URL beginning with - * "gs://". - * - * **Warning**: - * There is currently no atomic `move` method in the Cloud Storage API, - * so this method is a composition of {@link File#copy} (to the new - * location) and {@link File#delete} (from the old location). While - * unlikely, it is possible that an error returned to your callback could be - * triggered from either one of these API calls failing, which could leave a - * duplicate file lingering. The error message will indicate what operation - * has failed. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} - * - * @throws {Error} If the destination file is not provided. - * - * @param {string|Bucket|File} destination Destination file. - * @param {MoveCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * //- - * // You can pass in a variety of types for the destination. - * // - * // For all of the below examples, assume we are working with the following - * // Bucket and File objects. - * //- - * const bucket = storage.bucket('my-bucket'); - * const file = bucket.file('my-image.png'); - * - * //- - * // If you pass in a string for the destination, the file is moved to its - * // current bucket, under the new name provided. - * //- - * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * // but contains instead: - * // - "my-image-new.png" - * - * // `destinationFile` is an instance of a File object that refers to your - * // new file. - * }); - * - * //- - * // If you pass in a string starting with "gs://" for the destination, the - * // file is copied to the other bucket and under the new name provided. - * //- - * const newLocation = 'gs://another-bucket/my-image-new.png'; - * file.move(newLocation, function(err, destinationFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-image-new.png" - * - * // `destinationFile` is an instance of a File object that refers to your - * // new file. - * }); - * - * //- - * // If you pass in a Bucket object, the file will be moved to that bucket - * // using the same name. - * //- - * const anotherBucket = gcs.bucket('another-bucket'); - * - * file.move(anotherBucket, function(err, destinationFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-image.png" - * - * // `destinationFile` is an instance of a File object that refers to your - * // new file. - * }); - * - * //- - * // If you pass in a File object, you have complete control over the new - * // bucket and filename. - * //- - * const anotherFile = anotherBucket.file('my-awesome-image.png'); - * - * file.move(anotherFile, function(err, destinationFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-awesome-image.png" - * - * // Note: - * // The `destinationFile` parameter is equal to `anotherFile`. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.move('my-image-new.png').then(function(data) { - * const destinationFile = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_move_file - * Another example: - */ - move(destination, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - callback = callback || nodejs_common_1.util.noop; - this.copy(destination, options, (err, destinationFile, copyApiResponse) => { - if (err) { - err.message = 'file#copy failed with an error - ' + err.message; - callback(err, null, copyApiResponse); - return; - } - if (this.name !== destinationFile.name || - this.bucket.name !== destinationFile.bucket.name) { - this.delete(options, (err, apiResponse) => { - if (err) { - err.message = 'file#delete failed with an error - ' + err.message; - callback(err, destinationFile, apiResponse); - return; - } - callback(null, destinationFile, copyApiResponse); - }); - } - else { - callback(null, destinationFile, copyApiResponse); - } - }); - } - /** - * @typedef {array} RenameResponse - * @property {File} 0 The destination File. - * @property {object} 1 The full API response. - */ - /** - * @callback RenameCallback - * @param {?Error} err Request error, if any. - * @param {?File} destinationFile The destination File. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {object} RenameOptions Configuration options for File#move(). See an - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. - * @param {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Rename this file. - * - * **Warning**: - * There is currently no atomic `rename` method in the Cloud Storage API, - * so this method is an alias of {@link File#move}, which in turn is a - * composition of {@link File#copy} (to the new location) and - * {@link File#delete} (from the old location). While - * unlikely, it is possible that an error returned to your callback could be - * triggered from either one of these API calls failing, which could leave a - * duplicate file lingering. The error message will indicate what operation - * has failed. - * - * @param {string|File} destinationFile Destination file. - * @param {RenameCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // You can pass in a string or a File object. - * // - * // For all of the below examples, assume we are working with the following - * // Bucket and File objects. - * //- - * - * const bucket = storage.bucket('my-bucket'); - * const file = bucket.file('my-image.png'); - * - * //- - * // You can pass in a string for the destinationFile. - * //- - * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * // but contains instead: - * // - "renamed-image.png" - * - * // `renamedFile` is an instance of a File object that refers to your - * // renamed file. - * }); - * - * //- - * // You can pass in a File object. - * //- - * const anotherFile = anotherBucket.file('my-awesome-image.png'); - * - * file.rename(anotherFile, function(err, renamedFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * - * // Note: - * // The `renamedFile` parameter is equal to `anotherFile`. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.rename('my-renamed-image.png').then(function(data) { - * const renamedFile = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - rename(destinationFile, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - callback = callback || nodejs_common_1.util.noop; - this.move(destinationFile, options, callback); - } - /** - * Makes request and applies userProject query parameter if necessary. - * - * @private - * - * @param {object} reqOpts - The request options. - * @param {function} callback - The callback function. - */ - request(reqOpts, callback) { - return this.parent.request.call(this, reqOpts, callback); - } - /** - * @callback RotateEncryptionKeyCallback - * @extends CopyCallback - */ - /** - * @typedef RotateEncryptionKeyResponse - * @extends CopyResponse - */ - /** - * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options - * for File#rotateEncryptionKey(). - * If a string or Buffer is provided, it is interpreted as an AES-256, - * customer-supplied encryption key. If you'd like to use a Cloud KMS key - * name, you must specify an options object with the property name: - * `kmsKeyName`. - * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. - * @param {string} [options.kmsKeyName] A Cloud KMS key name. - */ - /** - * This method allows you to update the encryption key associated with this - * file. - * - * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} - * - * @param {RotateEncryptionKeyOptions} [options] - Configuration options. - * @param {RotateEncryptionKeyCallback} [callback] - * @returns {Promise} - * - * @example include:samples/encryption.js - * region_tag:storage_rotate_encryption_key - * Example of rotating the encryption key for this file: - */ - rotateEncryptionKey(optionsOrCallback, callback) { - var _a; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - let options = {}; - if (typeof optionsOrCallback === 'string' || - optionsOrCallback instanceof Buffer) { - options = { - encryptionKey: optionsOrCallback, - }; - } - else if (typeof optionsOrCallback === 'object') { - options = optionsOrCallback; - } - const newFile = this.bucket.file(this.id, options); - const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined - ? { preconditionOpts: options.preconditionOpts } - : {}; - this.copy(newFile, copyOptions, callback); - } - /** - * @typedef {object} SaveOptions - * @extends CreateWriteStreamOptions - */ - /** - * @callback SaveCallback - * @param {?Error} err Request error, if any. - */ - /** - * Write strings or buffers to a file. - * - * *This is a convenience method which wraps {@link File#createWriteStream}.* - * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. - * - * Resumable uploads are automatically enabled and must be shut off explicitly - * by setting `options.resumable` to `false`. - * - * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. - * - *

- * There is some overhead when using a resumable upload that can cause - * noticeable performance degradation while uploading a series of small - * files. When uploading files less than 10MB, it is recommended that the - * resumable feature is disabled. - *

- * - * @param {string | Buffer} data The data to write to a file. - * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` - * parameter. - * @param {SaveCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * const contents = 'This is the contents of the file.'; - * - * file.save(contents, function(err) { - * if (!err) { - * // File written successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.save(contents).then(function() {}); - * ``` - */ - save(data, optionsOrCallback, callback) { - // tslint:enable:no-any - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - let maxRetries = this.storage.retryOptions.maxRetries; - if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { - maxRetries = 0; - } - const returnValue = retry(async (bail) => { - await new Promise((resolve, reject) => { - if (maxRetries === 0) { - this.storage.retryOptions.autoRetry = false; - } - const writable = this.createWriteStream(options) - .on('error', err => { - if (this.storage.retryOptions.autoRetry && - this.storage.retryOptions.retryableErrorFn(err)) { - return reject(err); - } - else { - return bail(err); - } - }) - .on('finish', () => { - return resolve(); - }); - if (options.onUploadProgress) { - writable.on('progress', options.onUploadProgress); - } - writable.end(data); - }); - }, { - retries: maxRetries, - factor: this.storage.retryOptions.retryDelayMultiplier, - maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, - maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds - }); - if (!callback) { - return returnValue; - } - else { - return returnValue - .then(() => { - if (callback) { - return callback(); - } - }) - .catch(callback); - } - } - setMetadata(metadata, optionsOrCallback, cb) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - cb = - typeof optionsOrCallback === 'function' - ? optionsOrCallback - : cb; - this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_1.AvailableServiceObjectMethods.setMetadata, options); - super - .setMetadata(metadata, options) - .then(resp => cb(null, ...resp)) - .catch(cb) - .finally(() => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - }); - } - /** - * @typedef {array} SetStorageClassResponse - * @property {object} 0 The full API response. - */ - /** - * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @callback SetStorageClassCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Set the storage class for this file. - * - * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} - * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} - * - * @param {string} storageClass The new storage class. (`standard`, - * `nearline`, `coldline`, or `archive`) - * **Note:** The storage classes `multi_regional` and `regional` - * are now legacy and will be deprecated in the future. - * @param {SetStorageClassOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {SetStorageClassCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * file.setStorageClass('nearline', function(err, apiResponse) { - * if (err) { - * // Error handling omitted. - * } - * - * // The storage class was updated successfully. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.setStorageClass('nearline').then(function() {}); - * ``` - */ - setStorageClass(storageClass, optionsOrCallback, callback) { - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - const req = extend(true, {}, options); - // In case we get input like `storageClass`, convert to `storage_class`. - req.storageClass = storageClass - .replace(/-/g, '_') - .replace(/([a-z])([A-Z])/g, (_, low, up) => { - return low + '_' + up; - }) - .toUpperCase(); - this.copy(this, req, (err, file, apiResponse) => { - if (err) { - callback(err, apiResponse); - return; - } - this.metadata = file.metadata; - callback(null, apiResponse); - }); - } - /** - * Set a user project to be billed for all requests made from this File - * object. - * - * @param {string} userProject The user project. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * const file = bucket.file('my-file'); - * - * file.setUserProject('grape-spaceship-123'); - * ``` - */ - setUserProject(userProject) { - this.bucket.setUserProject.call(this, userProject); - } - /** - * This creates a resumable-upload upload stream. - * - * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. - * @param {object=} options - Configuration object. - * - * @private - */ - startResumableUpload_(dup, options) { - options = extend(true, { - metadata: {}, - }, options); - const retryOptions = this.storage.retryOptions; - if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { - retryOptions.autoRetry = false; - } - const uploadStream = resumableUpload.upload({ - authClient: this.storage.authClient, - apiEndpoint: this.storage.apiEndpoint, - bucket: this.bucket.name, - customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), - file: this.name, - generation: this.generation, - key: this.encryptionKey, - kmsKeyName: this.kmsKeyName, - metadata: options.metadata, - offset: options.offset, - predefinedAcl: options.predefinedAcl, - private: options.private, - public: options.public, - uri: options.uri, - userProject: options.userProject || this.userProject, - retryOptions: { ...retryOptions }, - params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, - chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, - }); - uploadStream - .on('response', resp => { - dup.emit('response', resp); - }) - .on('metadata', metadata => { - this.metadata = metadata; - dup.emit('metadata'); - }) - .on('finish', () => { - dup.emit('complete'); - }) - .on('progress', evt => dup.emit('progress', evt)); - dup.setWritable(uploadStream); - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - } - /** - * Takes a readable stream and pipes it to a remote file. Unlike - * `startResumableUpload_`, which uses the resumable upload technique, this - * method uses a simple upload (all or nothing). - * - * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. - * @param {object=} options - Configuration object. - * - * @private - */ - startSimpleUpload_(dup, options) { - options = extend(true, { - metadata: {}, - }, options); - const apiEndpoint = this.storage.apiEndpoint; - const bucketName = this.bucket.name; - const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; - const reqOpts = { - qs: { - name: this.name, - }, - uri: uri, - }; - if (this.generation !== undefined) { - reqOpts.qs.ifGenerationMatch = this.generation; - } - if (this.kmsKeyName !== undefined) { - reqOpts.qs.kmsKeyName = this.kmsKeyName; - } - if (typeof options.timeout === 'number') { - reqOpts.timeout = options.timeout; - } - if (options.userProject || this.userProject) { - reqOpts.qs.userProject = options.userProject || this.userProject; - } - if (options.predefinedAcl) { - reqOpts.qs.predefinedAcl = options.predefinedAcl; - } - else if (options.private) { - reqOpts.qs.predefinedAcl = 'private'; - } - else if (options.public) { - reqOpts.qs.predefinedAcl = 'publicRead'; - } - Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); - nodejs_common_1.util.makeWritableStream(dup, { - makeAuthenticatedRequest: (reqOpts) => { - this.request(reqOpts, (err, body, resp) => { - if (err) { - dup.destroy(err); - return; - } - this.metadata = body; - dup.emit('metadata', body); - dup.emit('response', resp); - dup.emit('complete'); - }); - }, - metadata: options.metadata, - request: reqOpts, - }); - } - disableAutoRetryConditionallyIdempotent_( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - coreOpts, methodType, localPreconditionOptions) { - var _a, _b, _c, _d; - if ((typeof coreOpts === 'object' && - ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && - (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && - methodType === bucket_1.AvailableServiceObjectMethods.delete && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - this.storage.retryOptions.autoRetry = false; - } - if ((typeof coreOpts === 'object' && - ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && - (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && - methodType === bucket_1.AvailableServiceObjectMethods.setMetadata && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - this.storage.retryOptions.autoRetry = false; - } - } - async getBufferFromReadable(readable) { - const buf = []; - for await (const chunk of readable) { - buf.push(chunk); - } - return Buffer.concat(buf); - } -} -exports.File = File; -_File_instances = new WeakSet(), _File_validateIntegrity = -/** - * - * @param hashCalculatingStream - * @param verify - * @returns {boolean} Returns `true` if valid, throws with error otherwise - */ -async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { - const metadata = this.metadata; - // If we're doing validation, assume the worst - let dataMismatch = !!(verify.crc32c || verify.md5); - if (verify.crc32c && metadata.crc32c) { - dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); - } - if (verify.md5 && metadata.md5Hash) { - dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); - } - if (dataMismatch) { - const errors = []; - let code = ''; - let message = ''; - try { - await this.delete(); - if (verify.md5 && !metadata.md5Hash) { - code = 'MD5_NOT_AVAILABLE'; - message = FileExceptionMessages.MD5_NOT_AVAILABLE; - } - else { - code = 'FILE_NO_UPLOAD'; - message = FileExceptionMessages.UPLOAD_MISMATCH; - } - } - catch (e) { - const error = e; - code = 'FILE_NO_UPLOAD_DELETE'; - message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; - errors.push(error); - } - const error = new RequestError(message); - error.code = code; - error.errors = errors; - throw error; - } - return true; -}; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(File, { - exclude: [ - 'cloudStorageURI', - 'publicUrl', - 'request', - 'save', - 'setEncryptionKey', - 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', - 'getBufferFromReadable', - ], -}); -//# sourceMappingURL=file.js.map - -/***/ }), - -/***/ 8386: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; -}; -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HashStreamValidator = void 0; -const crypto_1 = __nccwpck_require__(6113); -const stream_1 = __nccwpck_require__(2781); -const crc32c_1 = __nccwpck_require__(4921); -const file_1 = __nccwpck_require__(5373); -class HashStreamValidator extends stream_1.Transform { - constructor(options = {}) { - super(); - this.updateHashesOnly = false; - _HashStreamValidator_crc32cHash.set(this, undefined); - _HashStreamValidator_md5Hash.set(this, undefined); - _HashStreamValidator_md5Digest.set(this, ''); - this.crc32cEnabled = !!options.crc32c; - this.md5Enabled = !!options.md5; - this.updateHashesOnly = !!options.updateHashesOnly; - this.crc32cExpected = options.crc32cExpected; - this.md5Expected = options.md5Expected; - if (this.crc32cEnabled) { - const crc32cGenerator = options.crc32cGenerator || crc32c_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; - __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); - } - if (this.md5Enabled) { - __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), "f"); - } - } - _flush(callback) { - if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { - __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); - } - if (this.updateHashesOnly) { - callback(); - return; - } - // If we're doing validation, assume the worst-- a data integrity - // mismatch. If not, these tests won't be performed, and we can assume - // the best. - // We must check if the server decompressed the data on serve because hash - // validation is not possible in this case. - let failed = this.crc32cEnabled || this.md5Enabled; - if (this.crc32cEnabled && this.crc32cExpected) { - failed = !this.test('crc32c', this.crc32cExpected); - } - if (this.md5Enabled && this.md5Expected) { - failed = !this.test('md5', this.md5Expected); - } - if (failed) { - const mismatchError = new file_1.RequestError(file_1.FileExceptionMessages.DOWNLOAD_MISMATCH); - mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; - callback(mismatchError); - } - else { - callback(); - } - } - _transform(chunk, encoding, callback) { - this.push(chunk, encoding); - try { - if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) - __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); - if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) - __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); - callback(); - } - catch (e) { - callback(e); - } - } - test(hash, sum) { - const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; - if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { - return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); - } - if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { - return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; - } - return false; - } -} -exports.HashStreamValidator = HashStreamValidator; -_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); -//# sourceMappingURL=hash-stream-validator.js.map - -/***/ }), - -/***/ 9930: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HmacKey = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const storage_1 = __nccwpck_require__(346); -const promisify_1 = __nccwpck_require__(9203); -/** - * The API-formatted resource description of the HMAC key. - * - * Note: This is not guaranteed to be up-to-date when accessed. To get the - * latest record, call the `getMetadata()` method. - * - * @name HmacKey#metadata - * @type {object} - */ -/** - * An HmacKey object contains metadata of an HMAC key created from a - * service account through the {@link Storage} client using - * {@link Storage#createHmacKey}. - * - * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} - * - * @class - */ -class HmacKey extends nodejs_common_1.ServiceObject { - /** - * @typedef {object} HmacKeyOptions - * @property {string} [projectId] The project ID of the project that owns - * the service account of the requested HMAC key. If not provided, - * the project ID used to instantiate the Storage client will be used. - */ - /** - * Constructs an HmacKey object. - * - * Note: this only create a local reference to an HMAC key, to create - * an HMAC key, use {@link Storage#createHmacKey}. - * - * @param {Storage} storage The Storage instance this HMAC key is - * attached to. - * @param {string} accessId The unique accessId for this HMAC key. - * @param {HmacKeyOptions} options Constructor configurations. - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const hmacKey = storage.hmacKey('access-id'); - * ``` - */ - constructor(storage, accessId, options) { - const methods = { - /** - * @typedef {object} DeleteHmacKeyOptions - * @property {string} [userProject] This parameter is currently ignored. - */ - /** - * @typedef {array} DeleteHmacKeyResponse - * @property {object} 0 The full API response. - */ - /** - * @callback DeleteHmacKeyCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Deletes an HMAC key. - * Key state must be set to `INACTIVE` prior to deletion. - * Caution: HMAC keys cannot be recovered once you delete them. - * - * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. - * - * @method HmacKey#delete - * @param {DeleteHmacKeyOptions} [options] Configuration options. - * @param {DeleteHmacKeyCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // Delete HMAC key after making the key inactive. - * //- - * const hmacKey = storage.hmacKey('ACCESS_ID'); - * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { - * if (err) { - * // The request was an error. - * console.error(err); - * return; - * } - * hmacKey.delete((err) => { - * if (err) { - * console.error(err); - * return; - * } - * // The HMAC key is deleted. - * }); - * }); - * - * //- - * // If the callback is omitted, a promise is returned. - * //- - * const hmacKey = storage.hmacKey('ACCESS_ID'); - * hmacKey - * .setMetadata({state: 'INACTIVE'}) - * .then(() => { - * return hmacKey.delete(); - * }); - * ``` - */ - delete: true, - /** - * @callback GetHmacKeyCallback - * @param {?Error} err Request error, if any. - * @param {HmacKey} hmacKey this {@link HmacKey} instance. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} GetHmacKeyResponse - * @property {HmacKey} 0 This {@link HmacKey} instance. - * @property {object} 1 The full API response. - */ - /** - * @typedef {object} GetHmacKeyOptions - * @property {string} [userProject] This parameter is currently ignored. - */ - /** - * Retrieves and populate an HMAC key's metadata, and return - * this {@link HmacKey} instance. - * - * HmacKey.get() does not give the HMAC key secret, as - * it is only returned on creation. - * - * The authenticated user must have `storage.hmacKeys.get` permission - * for the project in which the key exists. - * - * @method HmacKey#get - * @param {GetHmacKeyOptions} [options] Configuration options. - * @param {GetHmacKeyCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // Get the HmacKey's Metadata. - * //- - * storage.hmacKey('ACCESS_ID') - * .get((err, hmacKey) => { - * if (err) { - * // The request was an error. - * console.error(err); - * return; - * } - * // do something with the returned HmacKey object. - * }); - * - * //- - * // If the callback is omitted, a promise is returned. - * //- - * storage.hmacKey('ACCESS_ID') - * .get() - * .then((data) => { - * const hmacKey = data[0]; - * }); - * ``` - */ - get: true, - /** - * @typedef {object} GetHmacKeyMetadataOptions - * @property {string} [userProject] This parameter is currently ignored. - */ - /** - * Retrieves and populate an HMAC key's metadata, and return - * the HMAC key's metadata as an object. - * - * HmacKey.getMetadata() does not give the HMAC key secret, as - * it is only returned on creation. - * - * The authenticated user must have `storage.hmacKeys.get` permission - * for the project in which the key exists. - * - * @method HmacKey#getMetadata - * @param {GetHmacKeyMetadataOptions} [options] Configuration options. - * @param {HmacKeyMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // Get the HmacKey's metadata and populate to the metadata property. - * //- - * storage.hmacKey('ACCESS_ID') - * .getMetadata((err, hmacKeyMetadata) => { - * if (err) { - * // The request was an error. - * console.error(err); - * return; - * } - * console.log(hmacKeyMetadata); - * }); - * - * //- - * // If the callback is omitted, a promise is returned. - * //- - * storage.hmacKey('ACCESS_ID') - * .getMetadata() - * .then((data) => { - * const hmacKeyMetadata = data[0]; - * console.log(hmacKeyMetadata); - * }); - * ``` - */ - getMetadata: true, - /** - * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. - * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. - * @property {string} [etag] Include an etag from a previous get HMAC key request - * to perform safe read-modify-write. - */ - /** - * @typedef {object} SetHmacKeyMetadataOptions - * @property {string} [userProject] This parameter is currently ignored. - */ - /** - * @callback HmacKeyMetadataCallback - * @param {?Error} err Request error, if any. - * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} HmacKeyMetadataResponse - * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. - * @property {object} 1 The full API response. - */ - /** - * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for - * valid states. - * - * @method HmacKey#setMetadata - * @param {SetHmacKeyMetadata} metadata The new metadata. - * @param {SetHmacKeyMetadataOptions} [options] Configuration options. - * @param {HmacKeyMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * const metadata = { - * state: 'INACTIVE', - * }; - * - * storage.hmacKey('ACCESS_ID') - * .setMetadata(metadata, (err, hmacKeyMetadata) => { - * if (err) { - * // The request was an error. - * console.error(err); - * return; - * } - * console.log(hmacKeyMetadata); - * }); - * - * //- - * // If the callback is omitted, a promise is returned. - * //- - * storage.hmacKey('ACCESS_ID') - * .setMetadata(metadata) - * .then((data) => { - * const hmacKeyMetadata = data[0]; - * console.log(hmacKeyMetadata); - * }); - * ``` - */ - setMetadata: { - reqOpts: { - method: 'PUT', - }, - }, - }; - const projectId = (options && options.projectId) || storage.projectId; - super({ - parent: storage, - id: accessId, - baseUrl: `/projects/${projectId}/hmacKeys`, - methods, - }); - this.storage = storage; - this.instanceRetryValue = storage.retryOptions.autoRetry; - } - setMetadata(metadata, optionsOrCallback, cb) { - // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways - if (this.storage.retryOptions.idempotencyStrategy !== - storage_1.IdempotencyStrategy.RetryAlways) { - this.storage.retryOptions.autoRetry = false; - } - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - cb = - typeof optionsOrCallback === 'function' - ? optionsOrCallback - : cb; - super - .setMetadata(metadata, options) - .then(resp => cb(null, ...resp)) - .catch(cb) - .finally(() => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - }); - } -} -exports.HmacKey = HmacKey; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(HmacKey); -//# sourceMappingURL=hmacKey.js.map - -/***/ }), - -/***/ 66: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Iam = exports.IAMExceptionMessages = void 0; -const promisify_1 = __nccwpck_require__(9203); -const util_1 = __nccwpck_require__(1862); -var IAMExceptionMessages; -(function (IAMExceptionMessages) { - IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; - IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; -})(IAMExceptionMessages = exports.IAMExceptionMessages || (exports.IAMExceptionMessages = {})); -/** - * Get and set IAM policies for your Cloud Storage bucket. - * - * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} - * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} - * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} - * - * @constructor Iam - * - * @param {Bucket} bucket The parent instance. - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * // bucket.iam - * ``` - */ -class Iam { - constructor(bucket) { - this.request_ = bucket.request.bind(bucket); - this.resourceId_ = 'buckets/' + bucket.getId(); - } - /** - * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). - * @property {number} [requestedPolicyVersion] The version of IAM policies to - * request. If a policy with a condition is requested without setting - * this, the server will return an error. This must be set to a value - * of 3 to retrieve IAM policies containing conditions. This is to - * prevent client code that isn't aware of IAM conditions from - * interpreting and modifying policies incorrectly. The service might - * return a policy with version lower than the one that was requested, - * based on the feature syntax in the policy fetched. - * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} GetPolicyResponse - * @property {Policy} 0 The policy. - * @property {object} 1 The full API response. - */ - /** - * @typedef {object} Policy - * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. - * @property {string} [policy.etag] Etags are used to perform a read-modify-write. - * @property {number} [policy.version] The syntax schema version of the Policy. - * To set an IAM policy with conditional binding, this field must be set to - * 3 or greater. - * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} - */ - /** - * @typedef {object} PolicyBinding - * @property {string} role Role that is assigned to members. - * @property {string[]} members Specifies the identities requesting access for the bucket. - * @property {Expr} [condition] The condition that is associated with this binding. - */ - /** - * @typedef {object} Expr - * @property {string} [title] An optional title for the expression, i.e. a - * short string describing its purpose. This can be used e.g. in UIs - * which allow to enter the expression. - * @property {string} [description] An optional description of the - * expression. This is a longer text which describes the expression, - * e.g. when hovered over it in a UI. - * @property {string} expression Textual representation of an expression in - * Common Expression Language syntax. The application context of the - * containing message determines which well-known feature set of CEL - * is supported.The condition that is associated with this binding. - * - * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions - */ - /** - * Get the IAM policy. - * - * @param {GetPolicyOptions} [options] Request options. - * @param {GetPolicyCallback} [callback] Callback function. - * @returns {Promise} - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * bucket.iam.getPolicy( - * {requestedPolicyVersion: 3}, - * function(err, policy, apiResponse) { - * - * }, - * ); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.iam.getPolicy({requestedPolicyVersion: 3}) - * .then(function(data) { - * const policy = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/iam.js - * region_tag:storage_view_bucket_iam_members - * Example of retrieving a bucket's IAM policy: - */ - getPolicy(optionsOrCallback, callback) { - const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback); - const qs = {}; - if (options.userProject) { - qs.userProject = options.userProject; - } - if (options.requestedPolicyVersion !== null && - options.requestedPolicyVersion !== undefined) { - qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; - } - this.request_({ - uri: '/iam', - qs, - }, cb); - } - /** - * Set the IAM policy. - * - * @throws {Error} If no policy is provided. - * - * @param {Policy} policy The policy. - * @param {SetPolicyOptions} [options] Configuration options. - * @param {SetPolicyCallback} callback Callback function. - * @returns {Promise} - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} - * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * const myPolicy = { - * bindings: [ - * { - * role: 'roles/storage.admin', - * members: - * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] - * } - * ] - * }; - * - * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.iam.setPolicy(myPolicy).then(function(data) { - * const policy = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/iam.js - * region_tag:storage_add_bucket_iam_member - * Example of adding to a bucket's IAM policy: - * - * @example include:samples/iam.js - * region_tag:storage_remove_bucket_iam_member - * Example of removing from a bucket's IAM policy: - */ - setPolicy(policy, optionsOrCallback, callback) { - if (policy === null || typeof policy !== 'object') { - throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); - } - const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback); - let maxRetries; - if (policy.etag === undefined) { - maxRetries = 0; - } - this.request_({ - method: 'PUT', - uri: '/iam', - maxRetries, - json: Object.assign({ - resourceId: this.resourceId_, - }, policy), - qs: options, - }, cb); - } - /** - * Test a set of permissions for a resource. - * - * @throws {Error} If permissions are not provided. - * - * @param {string|string[]} permissions The permission(s) to test for. - * @param {TestIamPermissionsOptions} [options] Configuration object. - * @param {TestIamPermissionsCallback} [callback] Callback function. - * @returns {Promise} - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * //- - * // Test a single permission. - * //- - * const test = 'storage.buckets.delete'; - * - * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { - * console.log(permissions); - * // { - * // "storage.buckets.delete": true - * // } - * }); - * - * //- - * // Test several permissions at once. - * //- - * const tests = [ - * 'storage.buckets.delete', - * 'storage.buckets.get' - * ]; - * - * bucket.iam.testPermissions(tests, function(err, permissions) { - * console.log(permissions); - * // { - * // "storage.buckets.delete": false, - * // "storage.buckets.get": true - * // } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.iam.testPermissions(test).then(function(data) { - * const permissions = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - testPermissions(permissions, optionsOrCallback, callback) { - if (!Array.isArray(permissions) && typeof permissions !== 'string') { - throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); - } - const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback); - const permissionsArray = Array.isArray(permissions) - ? permissions - : [permissions]; - const req = Object.assign({ - permissions: permissionsArray, - }, options); - this.request_({ - uri: '/iam/testPermissions', - qs: req, - useQuerystring: true, - }, (err, resp) => { - if (err) { - cb(err, null, resp); - return; - } - const availablePermissions = Array.isArray(resp.permissions) - ? resp.permissions - : []; - const permissionsHash = permissionsArray.reduce((acc, permission) => { - acc[permission] = availablePermissions.indexOf(permission) > -1; - return acc; - }, {}); - cb(null, permissionsHash, resp); - }); - } -} -exports.Iam = Iam; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Iam); -//# sourceMappingURL=iam.js.map - -/***/ }), - -/***/ 8174: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Storage = exports.IdempotencyStrategy = exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = void 0; -var bucket_1 = __nccwpck_require__(8561); -Object.defineProperty(exports, "Bucket", ({ enumerable: true, get: function () { return bucket_1.Bucket; } })); -__exportStar(__nccwpck_require__(4921), exports); -var channel_1 = __nccwpck_require__(8953); -Object.defineProperty(exports, "Channel", ({ enumerable: true, get: function () { return channel_1.Channel; } })); -var file_1 = __nccwpck_require__(5373); -Object.defineProperty(exports, "File", ({ enumerable: true, get: function () { return file_1.File; } })); -__exportStar(__nccwpck_require__(8386), exports); -var hmacKey_1 = __nccwpck_require__(9930); -Object.defineProperty(exports, "HmacKey", ({ enumerable: true, get: function () { return hmacKey_1.HmacKey; } })); -var iam_1 = __nccwpck_require__(66); -Object.defineProperty(exports, "Iam", ({ enumerable: true, get: function () { return iam_1.Iam; } })); -var notification_1 = __nccwpck_require__(7523); -Object.defineProperty(exports, "Notification", ({ enumerable: true, get: function () { return notification_1.Notification; } })); -var storage_1 = __nccwpck_require__(346); -Object.defineProperty(exports, "IdempotencyStrategy", ({ enumerable: true, get: function () { return storage_1.IdempotencyStrategy; } })); -Object.defineProperty(exports, "Storage", ({ enumerable: true, get: function () { return storage_1.Storage; } })); -__exportStar(__nccwpck_require__(9904), exports); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 2881: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; -var service_1 = __nccwpck_require__(8440); -Object.defineProperty(exports, "Service", ({ enumerable: true, get: function () { return service_1.Service; } })); -var service_object_1 = __nccwpck_require__(822); -Object.defineProperty(exports, "ServiceObject", ({ enumerable: true, get: function () { return service_object_1.ServiceObject; } })); -var util_1 = __nccwpck_require__(2469); -Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return util_1.ApiError; } })); -Object.defineProperty(exports, "util", ({ enumerable: true, get: function () { return util_1.util; } })); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 822: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ServiceObject = void 0; -/*! - * Copyright 2022 Google LLC. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -const promisify_1 = __nccwpck_require__(9203); -const events_1 = __nccwpck_require__(2361); -const extend = __nccwpck_require__(8171); -const util_1 = __nccwpck_require__(2469); -/** - * ServiceObject is a base class, meant to be inherited from by a "service - * object," like a BigQuery dataset or Storage bucket. - * - * Most of the time, these objects share common functionality; they can be - * created or deleted, and you can get or set their metadata. - * - * By inheriting from this class, a service object will be extended with these - * shared behaviors. Note that any method can be overridden when the service - * object requires specific behavior. - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -class ServiceObject extends events_1.EventEmitter { - /* - * @constructor - * @alias module:common/service-object - * - * @private - * - * @param {object} config - Configuration object. - * @param {string} config.baseUrl - The base URL to make API requests to. - * @param {string} config.createMethod - The method which creates this object. - * @param {string=} config.id - The identifier of the object. For example, the - * name of a Storage bucket or Pub/Sub topic. - * @param {object=} config.methods - A map of each method name that should be inherited. - * @param {object} config.methods[].reqOpts - Default request options for this - * particular method. A common use case is when `setMetadata` requires a - * `PUT` method to override the default `PATCH`. - * @param {object} config.parent - The parent service instance. For example, an - * instance of Storage if the object is Bucket. - */ - constructor(config) { - super(); - this.metadata = {}; - this.baseUrl = config.baseUrl; - this.parent = config.parent; // Parent class. - this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). - this.createMethod = config.createMethod; - this.methods = config.methods || {}; - this.interceptors = []; - this.projectId = config.projectId; - if (config.methods) { - // This filters the ServiceObject instance (e.g. a "File") to only have - // the configured methods. We make a couple of exceptions for core- - // functionality ("request()" and "getRequestInterceptors()") - Object.getOwnPropertyNames(ServiceObject.prototype) - .filter(methodName => { - return ( - // All ServiceObjects need `request` and `getRequestInterceptors`. - // clang-format off - !/^request/.test(methodName) && - !/^getRequestInterceptors/.test(methodName) && - // clang-format on - // The ServiceObject didn't redefine the method. - // eslint-disable-next-line @typescript-eslint/no-explicit-any - this[methodName] === - // eslint-disable-next-line @typescript-eslint/no-explicit-any - ServiceObject.prototype[methodName] && - // This method isn't wanted. - !config.methods[methodName]); - }) - .forEach(methodName => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - this[methodName] = undefined; - }); - } - } - create(optionsOrCallback, callback) { - // eslint-disable-next-line @typescript-eslint/no-this-alias - const self = this; - const args = [this.id]; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - if (typeof optionsOrCallback === 'object') { - args.push(optionsOrCallback); - } - // Wrap the callback to return *this* instance of the object, not the - // newly-created one. - // tslint: disable-next-line no-any - function onCreate(...args) { - const [err, instance] = args; - if (!err) { - self.metadata = instance.metadata; - if (self.id && instance.metadata) { - self.id = instance.metadata.id; - } - args[1] = self; // replace the created `instance` with this one. - } - callback(...args); - } - args.push(onCreate); - // eslint-disable-next-line prefer-spread - this.createMethod.apply(null, args); - } - delete(optionsOrCallback, cb) { - const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); - const ignoreNotFound = options.ignoreNotFound; - delete options.ignoreNotFound; - const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; - const reqOpts = extend(true, { - method: 'DELETE', - uri: '', - }, methodConfig.reqOpts, { - qs: options, - }); - // The `request` method may have been overridden to hold any special - // behavior. Ensure we call the original `request` method. - ServiceObject.prototype.request.call(this, reqOpts, (err, ...args) => { - if (err) { - if (err.code === 404 && ignoreNotFound) { - err = null; - } - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - callback(err, ...args); - }); - } - exists(optionsOrCallback, cb) { - const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); - this.get(options, err => { - if (err) { - if (err.code === 404) { - callback(null, false); - } - else { - callback(err); - } - return; - } - callback(null, true); - }); - } - get(optionsOrCallback, cb) { - // eslint-disable-next-line @typescript-eslint/no-this-alias - const self = this; - const [opts, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); - const options = Object.assign({}, opts); - const autoCreate = options.autoCreate && typeof this.create === 'function'; - delete options.autoCreate; - function onCreate(err, instance, apiResponse) { - if (err) { - if (err.code === 409) { - self.get(options, callback); - return; - } - callback(err, null, apiResponse); - return; - } - callback(null, instance, apiResponse); - } - this.getMetadata(options, (err, metadata) => { - if (err) { - if (err.code === 404 && autoCreate) { - const args = []; - if (Object.keys(options).length > 0) { - args.push(options); - } - args.push(onCreate); - self.create(...args); - return; - } - callback(err, null, metadata); - return; - } - callback(null, self, metadata); - }); - } - getMetadata(optionsOrCallback, cb) { - const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); - const methodConfig = (typeof this.methods.getMetadata === 'object' && - this.methods.getMetadata) || - {}; - const reqOpts = extend(true, { - uri: '', - }, methodConfig.reqOpts, { - qs: options, - }); - // The `request` method may have been overridden to hold any special - // behavior. Ensure we call the original `request` method. - ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { - this.metadata = body; - callback(err, this.metadata, res); - }); - } - /** - * Return the user's custom request interceptors. - */ - getRequestInterceptors() { - // Interceptors should be returned in the order they were assigned. - const localInterceptors = this.interceptors - .filter(interceptor => typeof interceptor.request === 'function') - .map(interceptor => interceptor.request); - return this.parent.getRequestInterceptors().concat(localInterceptors); - } - setMetadata(metadata, optionsOrCallback, cb) { - const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); - const methodConfig = (typeof this.methods.setMetadata === 'object' && - this.methods.setMetadata) || - {}; - const reqOpts = extend(true, {}, { - method: 'PATCH', - uri: '', - }, methodConfig.reqOpts, { - json: metadata, - qs: options, - }); - // The `request` method may have been overridden to hold any special - // behavior. Ensure we call the original `request` method. - ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { - this.metadata = body; - callback(err, this.metadata, res); - }); - } - request_(reqOpts, callback) { - reqOpts = extend(true, {}, reqOpts); - if (this.projectId) { - reqOpts.projectId = this.projectId; - } - const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; - const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; - if (isAbsoluteUrl) { - uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); - } - reqOpts.uri = uriComponents - .filter(x => x.trim()) // Limit to non-empty strings. - .map(uriComponent => { - const trimSlashesRegex = /^\/*|\/*$/g; - return uriComponent.replace(trimSlashesRegex, ''); - }) - .join('/'); - const childInterceptors = Array.isArray(reqOpts.interceptors_) - ? reqOpts.interceptors_ - : []; - const localInterceptors = [].slice.call(this.interceptors); - reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); - if (reqOpts.shouldReturnStream) { - return this.parent.requestStream(reqOpts); - } - this.parent.request(reqOpts, callback); - } - request(reqOpts, callback) { - this.request_(reqOpts, callback); - } - /** - * Make an authenticated API request. - * - * @param {object} reqOpts - Request options that are passed to `request`. - * @param {string} reqOpts.uri - A URI relative to the baseUrl. - */ - requestStream(reqOpts) { - const opts = extend(true, reqOpts, { shouldReturnStream: true }); - return this.request_(opts); - } -} -exports.ServiceObject = ServiceObject; -(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] }); -//# sourceMappingURL=service-object.js.map - -/***/ }), - -/***/ 8440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; -/*! - * Copyright 2022 Google LLC. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -const extend = __nccwpck_require__(8171); -const uuid = __nccwpck_require__(5840); -const util_1 = __nccwpck_require__(2469); -exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; -class Service { - /** - * Service is a base class, meant to be inherited from by a "service," like - * BigQuery or Storage. - * - * This handles making authenticated requests by exposing a `makeReq_` - * function. - * - * @constructor - * @alias module:common/service - * - * @param {object} config - Configuration object. - * @param {string} config.baseUrl - The base URL to make API requests to. - * @param {string[]} config.scopes - The scopes required for the request. - * @param {object=} options - [Configuration object](#/docs). - */ - constructor(config, options = {}) { - this.baseUrl = config.baseUrl; - this.apiEndpoint = config.apiEndpoint; - this.timeout = options.timeout; - this.globalInterceptors = Array.isArray(options.interceptors_) - ? options.interceptors_ - : []; - this.interceptors = []; - this.packageJson = config.packageJson; - this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN; - this.projectIdRequired = config.projectIdRequired !== false; - this.providedUserAgent = options.userAgent; - const reqCfg = extend({}, config, { - projectIdRequired: this.projectIdRequired, - projectId: this.projectId, - authClient: options.authClient, - credentials: options.credentials, - keyFile: options.keyFilename, - email: options.email, - token: options.token, - }); - this.makeAuthenticatedRequest = - util_1.util.makeAuthenticatedRequestFactory(reqCfg); - this.authClient = this.makeAuthenticatedRequest.authClient; - this.getCredentials = this.makeAuthenticatedRequest.getCredentials; - const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; - if (isCloudFunctionEnv) { - this.interceptors.push({ - request(reqOpts) { - reqOpts.forever = false; - return reqOpts; - }, - }); - } - } - /** - * Return the user's custom request interceptors. - */ - getRequestInterceptors() { - // Interceptors should be returned in the order they were assigned. - return [].slice - .call(this.globalInterceptors) - .concat(this.interceptors) - .filter(interceptor => typeof interceptor.request === 'function') - .map(interceptor => interceptor.request); - } - getProjectId(callback) { - if (!callback) { - return this.getProjectIdAsync(); - } - this.getProjectIdAsync().then(p => callback(null, p), callback); - } - async getProjectIdAsync() { - const projectId = await this.authClient.getProjectId(); - if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) { - this.projectId = projectId; - } - return this.projectId; - } - request_(reqOpts, callback) { - reqOpts = extend(true, {}, reqOpts, { timeout: this.timeout }); - const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; - const uriComponents = [this.baseUrl]; - if (this.projectIdRequired) { - if (reqOpts.projectId) { - uriComponents.push('projects'); - uriComponents.push(reqOpts.projectId); - } - else { - uriComponents.push('projects'); - uriComponents.push(this.projectId); - } - } - uriComponents.push(reqOpts.uri); - if (isAbsoluteUrl) { - uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); - } - reqOpts.uri = uriComponents - .map(uriComponent => { - const trimSlashesRegex = /^\/*|\/*$/g; - return uriComponent.replace(trimSlashesRegex, ''); - }) - .join('/') - // Some URIs have colon separators. - // Bad: https://.../projects/:list - // Good: https://.../projects:list - .replace(/\/:/g, ':'); - const requestInterceptors = this.getRequestInterceptors(); - const interceptorArray = Array.isArray(reqOpts.interceptors_) - ? reqOpts.interceptors_ - : []; - interceptorArray.forEach(interceptor => { - if (typeof interceptor.request === 'function') { - requestInterceptors.push(interceptor.request); - } - }); - requestInterceptors.forEach(requestInterceptor => { - reqOpts = requestInterceptor(reqOpts); - }); - delete reqOpts.interceptors_; - const pkg = this.packageJson; - let userAgent = util_1.util.getUserAgentFromPackageJson(pkg); - if (this.providedUserAgent) { - userAgent = `${this.providedUserAgent} ${userAgent}`; - } - reqOpts.headers = extend({}, reqOpts.headers, { - 'User-Agent': userAgent, - 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${pkg.version} gccl-invocation-id/${uuid.v4()}`, - }); - if (reqOpts.shouldReturnStream) { - return this.makeAuthenticatedRequest(reqOpts); - } - else { - this.makeAuthenticatedRequest(reqOpts, callback); - } - } - /** - * Make an authenticated API request. - * - * @param {object} reqOpts - Request options that are passed to `request`. - * @param {string} reqOpts.uri - A URI relative to the baseUrl. - * @param {function} callback - The callback function passed to `request`. - */ - request(reqOpts, callback) { - Service.prototype.request_.call(this, reqOpts, callback); - } - /** - * Make an authenticated API request. - * - * @param {object} reqOpts - Request options that are passed to `request`. - * @param {string} reqOpts.uri - A URI relative to the baseUrl. - */ - requestStream(reqOpts) { - const opts = extend(true, reqOpts, { shouldReturnStream: true }); - return Service.prototype.request_.call(this, opts); - } -} -exports.Service = Service; -//# sourceMappingURL=service.js.map - -/***/ }), - -/***/ 2469: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/*! - * Copyright 2022 Google LLC. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = void 0; -/*! - * @module common/util - */ -const projectify_1 = __nccwpck_require__(3497); -const ent = __nccwpck_require__(1151); -const extend = __nccwpck_require__(8171); -const google_auth_library_1 = __nccwpck_require__(810); -const retryRequest = __nccwpck_require__(3515); -const stream_1 = __nccwpck_require__(2781); -const teeny_request_1 = __nccwpck_require__(6886); -const uuid = __nccwpck_require__(5840); -const service_1 = __nccwpck_require__(8440); -const packageJson = __nccwpck_require__(5458); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const duplexify = __nccwpck_require__(6599); -const requestDefaults = { - timeout: 60000, - gzip: true, - forever: true, - pool: { - maxSockets: Infinity, - }, -}; -/** - * Default behavior: Automatically retry retriable server errors. - * - * @const {boolean} - * @private - */ -const AUTO_RETRY_DEFAULT = true; -/** - * Default behavior: Only attempt to retry retriable errors 3 times. - * - * @const {number} - * @private - */ -const MAX_RETRY_DEFAULT = 3; -/** - * Custom error type for API errors. - * - * @param {object} errorBody - Error object. - */ -class ApiError extends Error { - constructor(errorBodyOrMessage) { - super(); - if (typeof errorBodyOrMessage !== 'object') { - this.message = errorBodyOrMessage || ''; - return; - } - const errorBody = errorBodyOrMessage; - this.code = errorBody.code; - this.errors = errorBody.errors; - this.response = errorBody.response; - try { - this.errors = JSON.parse(this.response.body).error.errors; - } - catch (e) { - this.errors = errorBody.errors; - } - this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); - Error.captureStackTrace(this); - } - /** - * Pieces together an error message by combining all unique error messages - * returned from a single GoogleError - * - * @private - * - * @param {GoogleErrorBody} err The original error. - * @param {GoogleInnerError[]} [errors] Inner errors, if any. - * @returns {string} - */ - static createMultiErrorMessage(err, errors) { - const messages = new Set(); - if (err.message) { - messages.add(err.message); - } - if (errors && errors.length) { - errors.forEach(({ message }) => messages.add(message)); - } - else if (err.response && err.response.body) { - messages.add(ent.decode(err.response.body.toString())); - } - else if (!err.message) { - messages.add('A failure occurred during this request.'); - } - let messageArr = Array.from(messages); - if (messageArr.length > 1) { - messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); - messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); - messageArr.push('\n'); - } - return messageArr.join('\n'); - } -} -exports.ApiError = ApiError; -/** - * Custom error type for partial errors returned from the API. - * - * @param {object} b - Error object. - */ -class PartialFailureError extends Error { - constructor(b) { - super(); - const errorObject = b; - this.errors = errorObject.errors; - this.name = 'PartialFailureError'; - this.response = errorObject.response; - this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); - } -} -exports.PartialFailureError = PartialFailureError; -class Util { - constructor() { - this.ApiError = ApiError; - this.PartialFailureError = PartialFailureError; - } - /** - * No op. - * - * @example - * function doSomething(callback) { - * callback = callback || noop; - * } - */ - noop() { } - /** - * Uniformly process an API response. - * - * @param {*} err - Error value. - * @param {*} resp - Response value. - * @param {*} body - Body value. - * @param {function} callback - The callback function. - */ - handleResp(err, resp, body, callback) { - callback = callback || util.noop; - const parsedResp = extend(true, { err: err || null }, resp && util.parseHttpRespMessage(resp), body && util.parseHttpRespBody(body)); - // Assign the parsed body to resp.body, even if { json: false } was passed - // as a request option. - // We assume that nobody uses the previously unparsed value of resp.body. - if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { - parsedResp.resp.body = parsedResp.body; - } - if (parsedResp.err && resp) { - parsedResp.err.response = resp; - } - callback(parsedResp.err, parsedResp.body, parsedResp.resp); - } - /** - * Sniff an incoming HTTP response message for errors. - * - * @param {object} httpRespMessage - An incoming HTTP response message from `request`. - * @return {object} parsedHttpRespMessage - The parsed response. - * @param {?error} parsedHttpRespMessage.err - An error detected. - * @param {object} parsedHttpRespMessage.resp - The original response object. - */ - parseHttpRespMessage(httpRespMessage) { - const parsedHttpRespMessage = { - resp: httpRespMessage, - }; - if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { - // Unknown error. Format according to ApiError standard. - parsedHttpRespMessage.err = new ApiError({ - errors: new Array(), - code: httpRespMessage.statusCode, - message: httpRespMessage.statusMessage, - response: httpRespMessage, - }); - } - return parsedHttpRespMessage; - } - /** - * Parse the response body from an HTTP request. - * - * @param {object} body - The response body. - * @return {object} parsedHttpRespMessage - The parsed response. - * @param {?error} parsedHttpRespMessage.err - An error detected. - * @param {object} parsedHttpRespMessage.body - The original body value provided - * will try to be JSON.parse'd. If it's successful, the parsed value will - * be returned here, otherwise the original value and an error will be returned. - */ - parseHttpRespBody(body) { - const parsedHttpRespBody = { - body, - }; - if (typeof body === 'string') { - try { - parsedHttpRespBody.body = JSON.parse(body); - } - catch (err) { - parsedHttpRespBody.body = body; - } - } - if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { - // Error from JSON API. - parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); - } - return parsedHttpRespBody; - } - /** - * Take a Duplexify stream, fetch an authenticated connection header, and - * create an outgoing writable stream. - * - * @param {Duplexify} dup - Duplexify stream. - * @param {object} options - Configuration object. - * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. - * @param {object} options.metadata - Metadata to send at the head of the request. - * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. - * @param {string=} options.request.method - Default: "POST". - * @param {string=} options.request.qs.uploadType - Default: "multipart". - * @param {string=} options.streamContentType - Default: "application/octet-stream". - * @param {function} onComplete - Callback, executed after the writable Request stream has completed. - */ - makeWritableStream(dup, options, onComplete) { - onComplete = onComplete || util.noop; - const writeStream = new ProgressStream(); - writeStream.on('progress', evt => dup.emit('progress', evt)); - dup.setWritable(writeStream); - const defaultReqOpts = { - method: 'POST', - qs: { - uploadType: 'multipart', - }, - timeout: 0, - maxRetries: 0, - }; - const metadata = options.metadata || {}; - const reqOpts = extend(true, defaultReqOpts, options.request, { - multipart: [ - { - 'Content-Type': 'application/json', - body: JSON.stringify(metadata), - }, - { - 'Content-Type': metadata.contentType || 'application/octet-stream', - body: writeStream, - }, - ], - }); - options.makeAuthenticatedRequest(reqOpts, { - onAuthenticated(err, authenticatedReqOpts) { - if (err) { - dup.destroy(err); - return; - } - requestDefaults.headers = util._getDefaultHeaders(); - const request = teeny_request_1.teenyRequest.defaults(requestDefaults); - request(authenticatedReqOpts, (err, resp, body) => { - util.handleResp(err, resp, body, (err, data) => { - if (err) { - dup.destroy(err); - return; - } - dup.emit('response', resp); - onComplete(data); - }); - }); - }, - }); - } - /** - * Returns true if the API request should be retried, given the error that was - * given the first time the request was attempted. This is used for rate limit - * related errors as well as intermittent server errors. - * - * @param {error} err - The API error to check if it is appropriate to retry. - * @return {boolean} True if the API request should be retried, false otherwise. - */ - shouldRetryRequest(err) { - if (err) { - if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { - return true; - } - if (err.errors) { - for (const e of err.errors) { - const reason = e.reason; - if (reason === 'rateLimitExceeded') { - return true; - } - if (reason === 'userRateLimitExceeded') { - return true; - } - if (reason && reason.includes('EAI_AGAIN')) { - return true; - } - } - } - } - return false; - } - /** - * Get a function for making authenticated requests. - * - * @param {object} config - Configuration object. - * @param {boolean=} config.autoRetry - Automatically retry requests if the - * response is related to rate limits or certain intermittent server - * errors. We will exponentially backoff subsequent requests by default. - * (default: true) - * @param {object=} config.credentials - Credentials object. - * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. - * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. - * @param {string=} config.email - Account email address, required for PEM/P12 usage. - * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) - * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. - * @param {array} config.scopes - Array of scopes required for the API. - */ - makeAuthenticatedRequestFactory(config) { - const googleAutoAuthConfig = extend({}, config); - if (googleAutoAuthConfig.projectId === service_1.DEFAULT_PROJECT_ID_TOKEN) { - delete googleAutoAuthConfig.projectId; - } - let authClient; - if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) { - // Use an existing `GoogleAuth` - authClient = googleAutoAuthConfig.authClient; - } - else { - // Pass an `AuthClient` to `GoogleAuth`, if available - const config = { - ...googleAutoAuthConfig, - authClient: googleAutoAuthConfig.authClient, - }; - authClient = new google_auth_library_1.GoogleAuth(config); - } - function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { - let stream; - let projectId; - const reqConfig = extend({}, config); - let activeRequest_; - if (!optionsOrCallback) { - stream = duplexify(); - reqConfig.stream = stream; - } - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; - const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; - async function setProjectId() { - projectId = await authClient.getProjectId(); - } - const onAuthenticated = async (err, authenticatedReqOpts) => { - const authLibraryError = err; - const autoAuthFailed = err && - err.message.indexOf('Could not load the default credentials') > -1; - if (autoAuthFailed) { - // Even though authentication failed, the API might not actually - // care. - authenticatedReqOpts = reqOpts; - } - if (!err || autoAuthFailed) { - try { - // Try with existing `projectId` value - authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); - err = null; - } - catch (e) { - if (e instanceof projectify_1.MissingProjectIdError) { - // A `projectId` was required, but we don't have one. - try { - // Attempt to get the `projectId` - await setProjectId(); - authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); - err = null; - } - catch (e) { - // Re-use the "Could not load the default credentials error" if - // auto auth failed. - err = err || e; - } - } - else { - // Some other error unrelated to missing `projectId` - err = err || e; - } - } - } - if (err) { - if (stream) { - stream.destroy(err); - } - else { - const fn = options && options.onAuthenticated - ? options.onAuthenticated - : callback; - fn(err); - } - return; - } - if (options && options.onAuthenticated) { - options.onAuthenticated(null, authenticatedReqOpts); - } - else { - activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { - if (apiResponseError && - apiResponseError.code === 401 && - authLibraryError) { - // Re-use the "Could not load the default credentials error" if - // the API request failed due to missing credentials. - apiResponseError = authLibraryError; - } - callback(apiResponseError, ...params); - }); - } - }; - const prepareRequest = async () => { - try { - const getProjectId = async () => { - if (config.projectId && - config.projectId !== service_1.DEFAULT_PROJECT_ID_TOKEN) { - // The user provided a project ID. We don't need to check with the - // auth client, it could be incorrect. - return config.projectId; - } - if (config.projectIdRequired === false) { - // A projectId is not required. Return the default. - return service_1.DEFAULT_PROJECT_ID_TOKEN; - } - return setProjectId(); - }; - const authorizeRequest = async () => { - if (reqConfig.customEndpoint && - !reqConfig.useAuthWithCustomEndpoint) { - // Using a custom API override. Do not use `google-auth-library` for - // authentication. (ex: connecting to a local Datastore server) - return reqOpts; - } - else { - return authClient.authorizeRequest(reqOpts); - } - }; - const [_projectId, authorizedReqOpts] = await Promise.all([ - getProjectId(), - authorizeRequest(), - ]); - if (_projectId) { - projectId = _projectId; - } - return onAuthenticated(null, authorizedReqOpts); - } - catch (e) { - return onAuthenticated(e); - } - }; - prepareRequest(); - if (stream) { - return stream; - } - return { - abort() { - setImmediate(() => { - if (activeRequest_) { - activeRequest_.abort(); - activeRequest_ = null; - } - }); - }, - }; - } - const mar = makeAuthenticatedRequest; - mar.getCredentials = authClient.getCredentials.bind(authClient); - mar.authClient = authClient; - return mar; - } - /** - * Make a request through the `retryRequest` module with built-in error - * handling and exponential back off. - * - * @param {object} reqOpts - Request options in the format `request` expects. - * @param {object=} config - Configuration object. - * @param {boolean=} config.autoRetry - Automatically retry requests if the - * response is related to rate limits or certain intermittent server - * errors. We will exponentially backoff subsequent requests by default. - * (default: true) - * @param {number=} config.maxRetries - Maximum number of automatic retries - * attempted before returning the error. (default: 3) - * @param {object=} config.request - HTTP module for request calls. - * @param {function} callback - The callback function. - */ - makeRequest(reqOpts, config, callback) { - var _a, _b, _c, _d, _e; - let autoRetryValue = AUTO_RETRY_DEFAULT; - if (config.autoRetry !== undefined) { - autoRetryValue = config.autoRetry; - } - else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { - autoRetryValue = config.retryOptions.autoRetry; - } - let maxRetryValue = MAX_RETRY_DEFAULT; - if (config.maxRetries !== undefined) { - maxRetryValue = config.maxRetries; - } - else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { - maxRetryValue = config.retryOptions.maxRetries; - } - requestDefaults.headers = this._getDefaultHeaders(); - const options = { - request: teeny_request_1.teenyRequest.defaults(requestDefaults), - retries: autoRetryValue !== false ? maxRetryValue : 0, - noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, - shouldRetryFn(httpRespMessage) { - var _a, _b; - const err = util.parseHttpRespMessage(httpRespMessage).err; - if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { - return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); - } - return err && util.shouldRetryRequest(err); - }, - maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, - retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, - totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, - }; - if (typeof reqOpts.maxRetries === 'number') { - options.retries = reqOpts.maxRetries; - options.noResponseRetries = reqOpts.maxRetries; - } - if (!config.stream) { - return retryRequest(reqOpts, options, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (err, response, body) => { - util.handleResp(err, response, body, callback); - }); - } - const dup = config.stream; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let requestStream; - const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; - if (isGetRequest) { - requestStream = retryRequest(reqOpts, options); - dup.setReadable(requestStream); - } - else { - // Streaming writable HTTP requests cannot be retried. - requestStream = options.request(reqOpts); - dup.setWritable(requestStream); - } - // Replay the Request events back to the stream. - requestStream - .on('error', dup.destroy.bind(dup)) - .on('response', dup.emit.bind(dup, 'response')) - .on('complete', dup.emit.bind(dup, 'complete')); - dup.abort = requestStream.abort; - return dup; - } - /** - * Decorate the options about to be made in a request. - * - * @param {object} reqOpts - The options to be passed to `request`. - * @param {string} projectId - The project ID. - * @return {object} reqOpts - The decorated reqOpts. - */ - decorateRequest(reqOpts, projectId) { - delete reqOpts.autoPaginate; - delete reqOpts.autoPaginateVal; - delete reqOpts.objectMode; - if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { - delete reqOpts.qs.autoPaginate; - delete reqOpts.qs.autoPaginateVal; - reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId); - } - if (Array.isArray(reqOpts.multipart)) { - reqOpts.multipart = reqOpts.multipart.map(part => { - return (0, projectify_1.replaceProjectIdToken)(part, projectId); - }); - } - if (reqOpts.json !== null && typeof reqOpts.json === 'object') { - delete reqOpts.json.autoPaginate; - delete reqOpts.json.autoPaginateVal; - reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId); - } - reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId); - return reqOpts; - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - isCustomType(unknown, module) { - function getConstructorName(obj) { - return obj.constructor && obj.constructor.name.toLowerCase(); - } - const moduleNameParts = module.split('/'); - const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); - const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); - if (subModuleName && getConstructorName(unknown) !== subModuleName) { - return false; - } - let walkingModule = unknown; - // eslint-disable-next-line no-constant-condition - while (true) { - if (getConstructorName(walkingModule) === parentModuleName) { - return true; - } - walkingModule = walkingModule.parent; - if (!walkingModule) { - return false; - } - } - } - /** - * Create a properly-formatted User-Agent string from a package.json file. - * - * @param {object} packageJson - A module's package.json file. - * @return {string} userAgent - The formatted User-Agent string. - */ - getUserAgentFromPackageJson(packageJson) { - const hyphenatedPackageName = packageJson.name - .replace('@google-cloud', 'gcloud-node') // For legacy purposes. - .replace('/', '-'); // For UA spec-compliance purposes. - return hyphenatedPackageName + '/' + packageJson.version; - } - /** - * Given two parameters, figure out if this is either: - * - Just a callback function - * - An options object, and then a callback function - * @param optionsOrCallback An options object or callback. - * @param cb A potentially undefined callback. - */ - maybeOptionsOrCallback(optionsOrCallback, cb) { - return typeof optionsOrCallback === 'function' - ? [{}, optionsOrCallback] - : [optionsOrCallback, cb]; - } - _getDefaultHeaders() { - return { - 'User-Agent': util.getUserAgentFromPackageJson(packageJson), - 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${uuid.v4()}`, - }; - } -} -exports.Util = Util; -/** - * Basic Passthrough Stream that records the number of bytes read - * every time the cursor is moved. - */ -class ProgressStream extends stream_1.Transform { - constructor() { - super(...arguments); - this.bytesRead = 0; - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - _transform(chunk, encoding, callback) { - this.bytesRead += chunk.length; - this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); - this.push(chunk); - callback(); - } -} -const util = new Util(); -exports.util = util; -//# sourceMappingURL=util.js.map - -/***/ }), - -/***/ 7523: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Notification = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const promisify_1 = __nccwpck_require__(9203); -/** - * The API-formatted resource description of the notification. - * - * Note: This is not guaranteed to be up-to-date when accessed. To get the - * latest record, call the `getMetadata()` method. - * - * @name Notification#metadata - * @type {object} - */ -/** - * A Notification object is created from your {@link Bucket} object using - * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub - * notifications. - * - * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} - * - * @class - * @hideconstructor - * - * @param {Bucket} bucket The bucket instance this notification is attached to. - * @param {string} id The ID of the notification. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const notification = myBucket.notification('1'); - * ``` - */ -class Notification extends nodejs_common_1.ServiceObject { - constructor(bucket, id) { - const methods = { - /** - * Creates a notification subscription for the bucket. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} - * @method Notification#create - * - * @param {Topic|string} topic The Cloud PubSub topic to which this - * subscription publishes. If the project ID is omitted, the current - * project ID will be used. - * - * Acceptable formats are: - * - `projects/grape-spaceship-123/topics/my-topic` - * - * - `my-topic` - * @param {CreateNotificationRequest} [options] Metadata to set for - * the notification. - * @param {CreateNotificationCallback} [callback] Callback function. - * @returns {Promise} - * @throws {Error} If a valid topic is not provided. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * const notification = myBucket.notification('1'); - * - * notification.create(function(err, notification, apiResponse) { - * if (!err) { - * // The notification was created successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * notification.create().then(function(data) { - * const notification = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - create: true, - /** - * @typedef {array} NotificationExistsResponse - * @property {boolean} 0 Whether the notification exists or not. - */ - /** - * @callback NotificationExistsCallback - * @param {?Error} err Request error, if any. - * @param {boolean} exists Whether the notification exists or not. - */ - /** - * Check if the notification exists. - * - * @method Notification#exists - * @param {NotificationExistsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * const notification = myBucket.notification('1'); - * - * notification.exists(function(err, exists) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * notification.exists().then(function(data) { - * const exists = data[0]; - * }); - * ``` - */ - exists: true, - }; - super({ - parent: bucket, - baseUrl: '/notificationConfigs', - id: id.toString(), - createMethod: bucket.createNotification.bind(bucket), - methods, - }); - } - /** - * @typedef {array} DeleteNotificationResponse - * @property {object} 0 The full API response. - */ - /** - * Permanently deletes a notification subscription. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} - * - * @param {object} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {DeleteNotificationCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * const notification = myBucket.notification('1'); - * - * notification.delete(function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * notification.delete().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/deleteNotification.js - * region_tag:storage_delete_bucket_notification - * Another example: - */ - delete(optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - this.request({ - method: 'DELETE', - uri: '', - qs: options, - }, callback || nodejs_common_1.util.noop); - } - /** - * Get a notification and its metadata if it exists. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} - * - * @param {object} [options] Configuration options. - * See {@link Bucket#createNotification} for create options. - * @param {boolean} [options.autoCreate] Automatically create the object if - * it does not exist. Default: `false`. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetNotificationCallback} [callback] Callback function. - * @return {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * const notification = myBucket.notification('1'); - * - * notification.get(function(err, notification, apiResponse) { - * // `notification.metadata` has been populated. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * notification.get().then(function(data) { - * const notification = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - get(optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const autoCreate = options.autoCreate; - delete options.autoCreate; - const onCreate = (err, notification, apiResponse) => { - if (err) { - if (err.code === 409) { - this.get(options, callback); - return; - } - callback(err, null, apiResponse); - return; - } - callback(null, notification, apiResponse); - }; - this.getMetadata(options, (err, metadata) => { - if (err) { - if (err.code === 404 && autoCreate) { - const args = []; - if (Object.keys(options).length > 0) { - args.push(options); - } - args.push(onCreate); - // eslint-disable-next-line - this.create.apply(this, args); - return; - } - callback(err, null, metadata); - return; - } - callback(null, this, metadata); - }); - } - /** - * Get the notification's metadata. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} - * - * @param {object} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetNotificationMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * const notification = myBucket.notification('1'); - * - * notification.getMetadata(function(err, metadata, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * notification.getMetadata().then(function(data) { - * const metadata = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/getMetadataNotifications.js - * region_tag:storage_print_pubsub_bucket_notification - * Another example: - */ - getMetadata(optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - this.request({ - uri: '', - qs: options, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - this.metadata = resp; - callback(null, this.metadata, resp); - }); - } -} -exports.Notification = Notification; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Notification); -//# sourceMappingURL=notification.js.map - -/***/ }), - -/***/ 8807: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0; -const abort_controller_1 = __nccwpck_require__(1659); -const crypto_1 = __nccwpck_require__(6113); -const extend = __nccwpck_require__(8171); -const gaxios = __nccwpck_require__(9555); -const google_auth_library_1 = __nccwpck_require__(810); -const stream_1 = __nccwpck_require__(2781); -const retry = __nccwpck_require__(3415); -const uuid = __nccwpck_require__(5840); -const NOT_FOUND_STATUS_CODE = 404; -const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; -const DEFAULT_API_ENDPOINT_REGEX = /.*\.googleapis\.com/; -const packageJson = __nccwpck_require__(5458); -exports.PROTOCOL_REGEX = /^(\w*):\/\//; -class Upload extends stream_1.Writable { - constructor(cfg) { - super(); - this.numBytesWritten = 0; - this.numRetries = 0; - this.currentInvocationId = { - chunk: uuid.v4(), - uri: uuid.v4(), - offset: uuid.v4(), - }; - this.upstreamChunkBuffer = Buffer.alloc(0); - this.chunkBufferEncoding = undefined; - this.numChunksReadInRequest = 0; - /** - * A chunk used for caching the most recent upload chunk. - * We should not assume that the server received all bytes sent in the request. - * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload - */ - this.lastChunkSent = Buffer.alloc(0); - this.upstreamEnded = false; - cfg = cfg || {}; - if (!cfg.bucket || !cfg.file) { - throw new Error('A bucket and file name are required'); - } - cfg.authConfig = cfg.authConfig || {}; - cfg.authConfig.scopes = [ - 'https://www.googleapis.com/auth/devstorage.full_control', - ]; - this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig); - this.apiEndpoint = 'https://storage.googleapis.com'; - if (cfg.apiEndpoint) { - this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); - if (!DEFAULT_API_ENDPOINT_REGEX.test(cfg.apiEndpoint)) { - this.authClient = gaxios; - } - } - this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; - this.bucket = cfg.bucket; - const cacheKeyElements = [cfg.bucket, cfg.file]; - if (typeof cfg.generation === 'number') { - cacheKeyElements.push(`${cfg.generation}`); - } - this.cacheKey = cacheKeyElements.join('/'); - this.customRequestOptions = cfg.customRequestOptions || {}; - this.file = cfg.file; - this.generation = cfg.generation; - this.kmsKeyName = cfg.kmsKeyName; - this.metadata = cfg.metadata || {}; - this.offset = cfg.offset; - this.origin = cfg.origin; - this.params = cfg.params || {}; - this.userProject = cfg.userProject; - this.chunkSize = cfg.chunkSize; - this.retryOptions = cfg.retryOptions; - if (cfg.key) { - const base64Key = Buffer.from(cfg.key).toString('base64'); - this.encryption = { - key: base64Key, - hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), - }; - } - this.predefinedAcl = cfg.predefinedAcl; - if (cfg.private) - this.predefinedAcl = 'private'; - if (cfg.public) - this.predefinedAcl = 'publicRead'; - const autoRetry = cfg.retryOptions.autoRetry; - this.uriProvidedManually = !!cfg.uri; - this.uri = cfg.uri; - this.numBytesWritten = 0; - this.numRetries = 0; // counter for number of retries currently executed - if (!autoRetry) { - cfg.retryOptions.maxRetries = 0; - } - this.timeOfFirstRequest = Date.now(); - const contentLength = cfg.metadata - ? Number(cfg.metadata.contentLength) - : NaN; - this.contentLength = isNaN(contentLength) ? '*' : contentLength; - this.once('writing', () => { - if (this.uri) { - this.continueUploading(); - } - else { - this.createURI(err => { - if (err) { - return this.destroy(err); - } - this.startUploading(); - return; - }); - } - }); - } - /** - * Prevent 'finish' event until the upload has succeeded. - * - * @param fireFinishEvent The finish callback - */ - _final(fireFinishEvent = () => { }) { - this.upstreamEnded = true; - this.once('uploadFinished', fireFinishEvent); - process.nextTick(() => { - this.emit('upstreamFinished'); - // it's possible `_write` may not be called - namely for empty object uploads - this.emit('writing'); - }); - } - /** - * Handles incoming data from upstream - * - * @param chunk The chunk to append to the buffer - * @param encoding The encoding of the chunk - * @param readCallback A callback for when the buffer has been read downstream - */ - _write(chunk, encoding, readCallback = () => { }) { - // Backwards-compatible event - this.emit('writing'); - this.upstreamChunkBuffer = Buffer.concat([ - this.upstreamChunkBuffer, - typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk, - ]); - this.chunkBufferEncoding = encoding; - this.once('readFromChunkBuffer', readCallback); - process.nextTick(() => this.emit('wroteToChunkBuffer')); - } - /** - * Prepends data back to the upstream chunk buffer. - * - * @param chunk The data to prepend - */ - unshiftChunkBuffer(chunk) { - this.upstreamChunkBuffer = Buffer.concat([chunk, this.upstreamChunkBuffer]); - } - /** - * Retrieves data from upstream's buffer. - * - * @param limit The maximum amount to return from the buffer. - * @returns The data requested. - */ - pullFromChunkBuffer(limit) { - const chunk = this.upstreamChunkBuffer.slice(0, limit); - this.upstreamChunkBuffer = this.upstreamChunkBuffer.slice(limit); - // notify upstream we've read from the buffer so it can potentially - // send more data down. - process.nextTick(() => this.emit('readFromChunkBuffer')); - return chunk; - } - /** - * A handler for determining if data is ready to be read from upstream. - * - * @returns If there will be more chunks to read in the future - */ - async waitForNextChunk() { - const willBeMoreChunks = await new Promise(resolve => { - // There's data available - it should be digested - if (this.upstreamChunkBuffer.byteLength) { - return resolve(true); - } - // The upstream writable ended, we shouldn't expect any more data. - if (this.upstreamEnded) { - return resolve(false); - } - // Nothing immediate seems to be determined. We need to prepare some - // listeners to determine next steps... - const wroteToChunkBufferCallback = () => { - removeListeners(); - return resolve(true); - }; - const upstreamFinishedCallback = () => { - removeListeners(); - // this should be the last chunk, if there's anything there - if (this.upstreamChunkBuffer.length) - return resolve(true); - return resolve(false); - }; - // Remove listeners when we're ready to callback. - const removeListeners = () => { - this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); - this.removeListener('upstreamFinished', upstreamFinishedCallback); - }; - // If there's data recently written it should be digested - this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); - // If the upstream finishes let's see if there's anything to grab - this.once('upstreamFinished', upstreamFinishedCallback); - }); - return willBeMoreChunks; - } - /** - * Reads data from upstream up to the provided `limit`. - * Ends when the limit has reached or no data is expected to be pushed from upstream. - * - * @param limit The most amount of data this iterator should return. `Infinity` by default. - * @param oneChunkMode Determines if one, exhaustive chunk is yielded for the iterator - */ - async *upstreamIterator(limit = Infinity, oneChunkMode) { - let completeChunk = Buffer.alloc(0); - // read from upstream chunk buffer - while (limit && (await this.waitForNextChunk())) { - // read until end or limit has been reached - const chunk = this.pullFromChunkBuffer(limit); - limit -= chunk.byteLength; - if (oneChunkMode) { - // return 1 chunk at the end of iteration - completeChunk = Buffer.concat([completeChunk, chunk]); - } - else { - // return many chunks throughout iteration - yield { - chunk, - encoding: this.chunkBufferEncoding, - }; - } - } - if (oneChunkMode) { - yield { - chunk: completeChunk, - encoding: this.chunkBufferEncoding, - }; - } - } - createURI(callback) { - if (!callback) { - return this.createURIAsync(); - } - this.createURIAsync().then(r => callback(null, r), callback); - } - async createURIAsync() { - const metadata = { ...this.metadata }; - const headers = {}; - // Delete content length and content type from metadata if they exist. - // These are headers and should not be sent as part of the metadata. - if (metadata.contentLength) { - headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); - delete metadata.contentLength; - } - if (metadata.contentType) { - headers['X-Upload-Content-Type'] = metadata.contentType; - delete metadata.contentType; - } - // Check if headers already exist before creating new ones - const reqOpts = { - method: 'POST', - url: [this.baseURI, this.bucket, 'o'].join('/'), - params: Object.assign({ - name: this.file, - uploadType: 'resumable', - }, this.params), - data: metadata, - headers: { - 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.uri}`, - ...headers, - }, - }; - if (metadata.contentLength) { - reqOpts.headers['X-Upload-Content-Length'] = - metadata.contentLength.toString(); - } - if (metadata.contentType) { - reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; - } - if (typeof this.generation !== 'undefined') { - reqOpts.params.ifGenerationMatch = this.generation; - } - if (this.kmsKeyName) { - reqOpts.params.kmsKeyName = this.kmsKeyName; - } - if (this.predefinedAcl) { - reqOpts.params.predefinedAcl = this.predefinedAcl; - } - if (this.origin) { - reqOpts.headers.Origin = this.origin; - } - const uri = await retry(async (bail) => { - var _a, _b, _c; - try { - const res = await this.makeRequest(reqOpts); - // We have successfully got a URI we can now create a new invocation id - this.currentInvocationId.uri = uuid.v4(); - return res.headers.location; - } - catch (err) { - const e = err; - const apiError = { - code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, - name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, - message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, - errors: [ - { - reason: e.code, - }, - ], - }; - if (this.retryOptions.maxRetries > 0 && - this.retryOptions.retryableErrorFn(apiError)) { - throw e; - } - else { - return bail(e); - } - } - }, { - retries: this.retryOptions.maxRetries, - factor: this.retryOptions.retryDelayMultiplier, - maxTimeout: this.retryOptions.maxRetryDelay * 1000, - maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds - }); - this.uri = uri; - this.offset = 0; - return uri; - } - async continueUploading() { - if (typeof this.offset === 'number') { - this.startUploading(); - return; - } - await this.getAndSetOffset(); - this.startUploading(); - } - async startUploading() { - const multiChunkMode = !!this.chunkSize; - let responseReceived = false; - this.numChunksReadInRequest = 0; - if (!this.offset) { - this.offset = 0; - } - // Check if the offset (server) is too far behind the current stream - if (this.offset < this.numBytesWritten) { - const delta = this.numBytesWritten - this.offset; - const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; - this.emit('error', new RangeError(message)); - return; - } - // Check if we should 'fast-forward' to the relevant data to upload - if (this.numBytesWritten < this.offset) { - // 'fast-forward' to the byte where we need to upload. - // only push data from the byte after the one we left off on - const fastForwardBytes = this.offset - this.numBytesWritten; - for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { - _chunk; // discard the data up until the point we want - } - this.numBytesWritten = this.offset; - } - let expectedUploadSize = undefined; - // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available - if (typeof this.contentLength === 'number') { - expectedUploadSize = this.contentLength - this.numBytesWritten; - } - // `expectedUploadSize` should be no more than the `chunkSize`. - // It's possible this is the last chunk request for a multiple - // chunk upload, thus smaller than the chunk size. - if (this.chunkSize) { - expectedUploadSize = expectedUploadSize - ? Math.min(this.chunkSize, expectedUploadSize) - : this.chunkSize; - } - // A queue for the upstream data - const upstreamQueue = this.upstreamIterator(expectedUploadSize, multiChunkMode // multi-chunk mode should return 1 chunk per request - ); - // The primary read stream for this request. This stream retrieves no more - // than the exact requested amount from upstream. - const requestStream = new stream_1.Readable({ - read: async () => { - // Don't attempt to retrieve data upstream if we already have a response - if (responseReceived) - requestStream.push(null); - const result = await upstreamQueue.next(); - if (result.value) { - this.numChunksReadInRequest++; - this.lastChunkSent = result.value.chunk; - this.numBytesWritten += result.value.chunk.byteLength; - this.emit('progress', { - bytesWritten: this.numBytesWritten, - contentLength: this.contentLength, - }); - requestStream.push(result.value.chunk, result.value.encoding); - } - if (result.done) { - requestStream.push(null); - } - }, - }); - const headers = { - 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.chunk}`, - }; - // If using multiple chunk upload, set appropriate header - if (multiChunkMode) { - // We need to know how much data is available upstream to set the `Content-Range` header. - const oneChunkIterator = this.upstreamIterator(expectedUploadSize, true); - const { value } = await oneChunkIterator.next(); - const bytesToUpload = value.chunk.byteLength; - // Important: we want to know if the upstream has ended and the queue is empty before - // unshifting data back into the queue. This way we will know if this is the last request or not. - const isLastChunkOfUpload = !(await this.waitForNextChunk()); - // Important: put the data back in the queue for the actual upload iterator - this.unshiftChunkBuffer(value.chunk); - let totalObjectSize = this.contentLength; - if (typeof this.contentLength !== 'number' && isLastChunkOfUpload) { - // Let's let the server know this is the last chunk since - // we didn't know the content-length beforehand. - totalObjectSize = bytesToUpload + this.numBytesWritten; - } - // `- 1` as the ending byte is inclusive in the request. - const endingByte = bytesToUpload + this.numBytesWritten - 1; - // `Content-Length` for multiple chunk uploads is the size of the chunk, - // not the overall object - headers['Content-Length'] = bytesToUpload; - headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; - } - else { - headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; - } - const reqOpts = { - method: 'PUT', - url: this.uri, - headers, - body: requestStream, - }; - try { - const resp = await this.makeRequestStream(reqOpts); - if (resp) { - responseReceived = true; - this.responseHandler(resp); - } - } - catch (e) { - const err = e; - if (this.retryOptions.retryableErrorFn(err)) { - this.attemptDelayedRetry({ - status: NaN, - data: err, - }); - return; - } - this.destroy(err); - } - } - // Process the API response to look for errors that came in - // the response body. - responseHandler(resp) { - if (resp.data.error) { - this.destroy(resp.data.error); - return; - } - // At this point we can safely create a new id for the chunk - this.currentInvocationId.chunk = uuid.v4(); - const shouldContinueWithNextMultiChunkRequest = this.chunkSize && - resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && - resp.headers.range; - if (shouldContinueWithNextMultiChunkRequest) { - // Use the upper value in this header to determine where to start the next chunk. - // We should not assume that the server received all bytes sent in the request. - // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload - const range = resp.headers.range; - this.offset = Number(range.split('-')[1]) + 1; - // We should not assume that the server received all bytes sent in the request. - // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload - const missingBytes = this.numBytesWritten - this.offset; - if (missingBytes) { - const dataToPrependForResending = this.lastChunkSent.slice(-missingBytes); - // As multi-chunk uploads send one chunk per request and pulls one - // chunk into the pipeline, prepending the missing bytes back should - // be fine for the next request. - this.unshiftChunkBuffer(dataToPrependForResending); - this.numBytesWritten -= missingBytes; - this.lastChunkSent = Buffer.alloc(0); - } - // continue uploading next chunk - this.continueUploading(); - } - else if (!this.isSuccessfulResponse(resp.status)) { - const err = new Error('Upload failed'); - err.code = resp.status; - err.name = 'Upload failed'; - if (resp === null || resp === void 0 ? void 0 : resp.data) { - err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; - } - this.destroy(err); - } - else { - // remove the last chunk sent to free memory - this.lastChunkSent = Buffer.alloc(0); - if (resp && resp.data) { - resp.data.size = Number(resp.data.size); - } - this.emit('metadata', resp.data); - // Allow the object (Upload) to continue naturally so the user's - // "finish" event fires. - this.emit('uploadFinished'); - } - } - async getAndSetOffset() { - const opts = { - method: 'PUT', - url: this.uri, - headers: { - 'Content-Length': 0, - 'Content-Range': 'bytes */*', - 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.offset}`, - }, - }; - try { - const resp = await this.makeRequest(opts); - // Successfully got the offset we can now create a new offset invocation id - this.currentInvocationId.offset = uuid.v4(); - if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { - if (resp.headers.range) { - const range = resp.headers.range; - this.offset = Number(range.split('-')[1]) + 1; - return; - } - } - this.offset = 0; - } - catch (e) { - const err = e; - if (this.retryOptions.retryableErrorFn(err)) { - this.attemptDelayedRetry({ - status: NaN, - data: err, - }); - return; - } - this.destroy(err); - } - } - async makeRequest(reqOpts) { - if (this.encryption) { - reqOpts.headers = reqOpts.headers || {}; - reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; - reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); - reqOpts.headers['x-goog-encryption-key-sha256'] = - this.encryption.hash.toString(); - } - if (this.userProject) { - reqOpts.params = reqOpts.params || {}; - reqOpts.params.userProject = this.userProject; - } - // Let gaxios know we will handle a 308 error code ourselves. - reqOpts.validateStatus = (status) => { - return (this.isSuccessfulResponse(status) || - status === RESUMABLE_INCOMPLETE_STATUS_CODE); - }; - const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts); - const res = await this.authClient.request(combinedReqOpts); - if (res.data && res.data.error) { - throw res.data.error; - } - return res; - } - async makeRequestStream(reqOpts) { - const controller = new abort_controller_1.default(); - const errorCallback = () => controller.abort(); - this.once('error', errorCallback); - if (this.userProject) { - reqOpts.params = reqOpts.params || {}; - reqOpts.params.userProject = this.userProject; - } - reqOpts.signal = controller.signal; - reqOpts.validateStatus = () => true; - const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts); - const res = await this.authClient.request(combinedReqOpts); - const successfulRequest = this.onResponse(res); - this.removeListener('error', errorCallback); - return successfulRequest ? res : null; - } - /** - * @return {bool} is the request good? - */ - onResponse(resp) { - if (resp.status !== 200 && - this.retryOptions.retryableErrorFn({ - code: resp.status, - message: resp.statusText, - name: resp.statusText, - })) { - this.attemptDelayedRetry(resp); - return false; - } - this.emit('response', resp); - return true; - } - /** - * @param resp GaxiosResponse object from previous attempt - */ - attemptDelayedRetry(resp) { - if (this.numRetries < this.retryOptions.maxRetries) { - if (resp.status === NOT_FOUND_STATUS_CODE && - this.numChunksReadInRequest === 0) { - this.startUploading(); - } - else { - const retryDelay = this.getRetryDelay(); - if (retryDelay <= 0) { - this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`)); - return; - } - // Unshift the most recent chunk back in case it's needed for the next - // request. - this.numBytesWritten -= this.lastChunkSent.byteLength; - this.unshiftChunkBuffer(this.lastChunkSent); - this.lastChunkSent = Buffer.alloc(0); - // We don't know how much data has been received by the server. - // `continueUploading` will recheck the offset via `getAndSetOffset`. - // If `offset` < `numberBytesReceived` then we will raise a RangeError - // as we've streamed too much data that has been missed - this should - // not be the case for multi-chunk uploads as `lastChunkSent` is the - // body of the entire request. - this.offset = undefined; - setTimeout(this.continueUploading.bind(this), retryDelay); - } - this.numRetries++; - } - else { - this.destroy(new Error('Retry limit exceeded - ' + resp.data)); - } - } - /** - * @returns {number} the amount of time to wait before retrying the request - */ - getRetryDelay() { - const randomMs = Math.round(Math.random() * 1000); - const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * - 1000 + - randomMs; - const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - - (Date.now() - this.timeOfFirstRequest); - const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; - return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); - } - /* - * Prepare user-defined API endpoint for compatibility with our API. - */ - sanitizeEndpoint(url) { - if (!exports.PROTOCOL_REGEX.test(url)) { - url = `https://${url}`; - } - return url.replace(/\/+$/, ''); // Remove trailing slashes - } - /** - * Check if a given status code is 2xx - * - * @param status The status code to check - * @returns if the status is 2xx - */ - isSuccessfulResponse(status) { - return status >= 200 && status < 300; - } -} -exports.Upload = Upload; -function upload(cfg) { - return new Upload(cfg); -} -exports.upload = upload; -function createURI(cfg, callback) { - const up = new Upload(cfg); - if (!callback) { - return up.createURI(); - } - up.createURI().then(r => callback(null, r), callback); -} -exports.createURI = createURI; -//# sourceMappingURL=resumable-upload.js.map - -/***/ }), - -/***/ 9665: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; -const crypto = __nccwpck_require__(6113); -const url = __nccwpck_require__(7310); -const storage_1 = __nccwpck_require__(346); -const util_1 = __nccwpck_require__(1862); -var SignerExceptionMessages; -(function (SignerExceptionMessages) { - SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; - SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; - SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; -})(SignerExceptionMessages = exports.SignerExceptionMessages || (exports.SignerExceptionMessages = {})); -/* - * Default signing version for getSignedUrl is 'v2'. - */ -const DEFAULT_SIGNING_VERSION = 'v2'; -const SEVEN_DAYS = 7 * 24 * 60 * 60; -/** - * @const {string} - * @private - */ -exports.PATH_STYLED_HOST = 'https://storage.googleapis.com'; -class URLSigner { - constructor(authClient, bucket, file) { - this.bucket = bucket; - this.file = file; - this.authClient = authClient; - } - getSignedUrl(cfg) { - const expiresInSeconds = this.parseExpires(cfg.expires); - const method = cfg.method; - const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); - if (expiresInSeconds < accessibleAtInSeconds) { - throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); - } - let customHost; - // Default style is `path`. - const isVirtualHostedStyle = cfg.virtualHostedStyle || false; - if (cfg.cname) { - customHost = cfg.cname; - } - else if (isVirtualHostedStyle) { - customHost = `https://${this.bucket.name}.storage.googleapis.com`; - } - const secondsToMilliseconds = 1000; - const config = Object.assign({}, cfg, { - method, - expiration: expiresInSeconds, - accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), - bucket: this.bucket.name, - file: this.file ? (0, util_1.encodeURI)(this.file.name, false) : undefined, - }); - if (customHost) { - config.cname = customHost; - } - const version = cfg.version || DEFAULT_SIGNING_VERSION; - let promise; - if (version === 'v2') { - promise = this.getSignedUrlV2(config); - } - else if (version === 'v4') { - promise = this.getSignedUrlV4(config); - } - else { - throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); - } - return promise.then(query => { - query = Object.assign(query, cfg.queryParams); - const signedUrl = new url.URL(config.cname || exports.PATH_STYLED_HOST); - signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - signedUrl.search = (0, util_1.qsStringify)(query); - return signedUrl.href; - }); - } - getSignedUrlV2(config) { - const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); - const resourcePath = this.getResourcePath(false, config.bucket, config.file); - const blobToSign = [ - config.method, - config.contentMd5 || '', - config.contentType || '', - config.expiration, - canonicalHeadersString + resourcePath, - ].join('\n'); - const sign = async () => { - const authClient = this.authClient; - try { - const signature = await authClient.sign(blobToSign); - const credentials = await authClient.getCredentials(); - return { - GoogleAccessId: credentials.client_email, - Expires: config.expiration, - Signature: signature, - }; - } - catch (err) { - const error = err; - const signingErr = new SigningError(error.message); - signingErr.stack = error.stack; - throw signingErr; - } - }; - return sign(); - } - getSignedUrlV4(config) { - config.accessibleAt = config.accessibleAt - ? config.accessibleAt - : new Date(); - const millisecondsToSeconds = 1.0 / 1000.0; - const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; - // v4 limit expiration to be 7 days maximum - if (expiresPeriodInSeconds > SEVEN_DAYS) { - throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); - } - const extensionHeaders = Object.assign({}, config.extensionHeaders); - const fqdn = new url.URL(config.cname || exports.PATH_STYLED_HOST); - extensionHeaders.host = fqdn.host; - if (config.contentMd5) { - extensionHeaders['content-md5'] = config.contentMd5; - } - if (config.contentType) { - extensionHeaders['content-type'] = config.contentType; - } - let contentSha256; - const sha256Header = extensionHeaders['x-goog-content-sha256']; - if (sha256Header) { - if (typeof sha256Header !== 'string' || - !/[A-Fa-f0-9]{40}/.test(sha256Header)) { - throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); - } - contentSha256 = sha256Header; - } - const signedHeaders = Object.keys(extensionHeaders) - .map(header => header.toLowerCase()) - .sort() - .join(';'); - const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); - const datestamp = (0, util_1.formatAsUTCISO)(config.accessibleAt); - const credentialScope = `${datestamp}/auto/storage/goog4_request`; - const sign = async () => { - const credentials = await this.authClient.getCredentials(); - const credential = `${credentials.client_email}/${credentialScope}`; - const dateISO = (0, util_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true); - const queryParams = { - 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', - 'X-Goog-Credential': credential, - 'X-Goog-Date': dateISO, - 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), - 'X-Goog-SignedHeaders': signedHeaders, - ...(config.queryParams || {}), - }; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); - const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); - const hash = crypto - .createHash('sha256') - .update(canonicalRequest) - .digest('hex'); - const blobToSign = [ - 'GOOG4-RSA-SHA256', - dateISO, - credentialScope, - hash, - ].join('\n'); - try { - const signature = await this.authClient.sign(blobToSign); - const signatureHex = Buffer.from(signature, 'base64').toString('hex'); - const signedQuery = Object.assign({}, queryParams, { - 'X-Goog-Signature': signatureHex, - }); - return signedQuery; - } - catch (err) { - const error = err; - const signingErr = new SigningError(error.message); - signingErr.stack = error.stack; - throw signingErr; - } - }; - return sign(); - } - /** - * Create canonical headers for signing v4 url. - * - * The canonical headers for v4-signing a request demands header names are - * first lowercased, followed by sorting the header names. - * Then, construct the canonical headers part of the request: - * + ":" + Trim() + "\n" - * .. - * + ":" + Trim() + "\n" - * - * @param headers - * @private - */ - getCanonicalHeaders(headers) { - // Sort headers by their lowercased names - const sortedHeaders = (0, util_1.objectEntries)(headers) - // Convert header names to lowercase - .map(([headerName, value]) => [ - headerName.toLowerCase(), - value, - ]) - .sort((a, b) => a[0].localeCompare(b[0])); - return sortedHeaders - .filter(([, value]) => value !== undefined) - .map(([headerName, value]) => { - // - Convert Array (multi-valued header) into string, delimited by - // ',' (no space). - // - Trim leading and trailing spaces. - // - Convert sequential (2+) spaces into a single space - const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); - return `${headerName}:${canonicalValue}\n`; - }) - .join(''); - } - getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { - return [ - method, - path, - query, - headers, - signedHeaders, - contentSha256 || 'UNSIGNED-PAYLOAD', - ].join('\n'); - } - getCanonicalQueryParams(query) { - return (0, util_1.objectEntries)(query) - .map(([key, value]) => [(0, util_1.encodeURI)(key, true), (0, util_1.encodeURI)(value, true)]) - .sort((a, b) => (a[0] < b[0] ? -1 : 1)) - .map(([key, value]) => `${key}=${value}`) - .join('&'); - } - getResourcePath(cname, bucket, file) { - if (cname) { - return '/' + (file || ''); - } - else if (file) { - return `/${bucket}/${file}`; - } - else { - return `/${bucket}`; - } - } - parseExpires(expires, current = new Date()) { - const expiresInMSeconds = new Date(expires).valueOf(); - if (isNaN(expiresInMSeconds)) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID); - } - if (expiresInMSeconds < current.valueOf()) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST); - } - return Math.round(expiresInMSeconds / 1000); // The API expects seconds. - } - parseAccessibleAt(accessibleAt) { - const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); - if (isNaN(accessibleAtInMSeconds)) { - throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); - } - return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. - } -} -exports.URLSigner = URLSigner; -/** - * Custom error type for errors related to getting signed errors and policies. - * - * @private - */ -class SigningError extends Error { - constructor() { - super(...arguments); - this.name = 'SigningError'; - } -} -exports.SigningError = SigningError; -//# sourceMappingURL=signer.js.map - -/***/ }), - -/***/ 346: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const paginator_1 = __nccwpck_require__(6412); -const promisify_1 = __nccwpck_require__(9203); -const stream_1 = __nccwpck_require__(2781); -const bucket_1 = __nccwpck_require__(8561); -const channel_1 = __nccwpck_require__(8953); -const file_1 = __nccwpck_require__(5373); -const util_1 = __nccwpck_require__(1862); -const hmacKey_1 = __nccwpck_require__(9930); -const crc32c_1 = __nccwpck_require__(4921); -var IdempotencyStrategy; -(function (IdempotencyStrategy) { - IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; - IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; - IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; -})(IdempotencyStrategy = exports.IdempotencyStrategy || (exports.IdempotencyStrategy = {})); -var ExceptionMessages; -(function (ExceptionMessages) { - ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; - ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; - ExceptionMessages["INVALID_ACTION"] = "The action is not provided or invalid."; -})(ExceptionMessages = exports.ExceptionMessages || (exports.ExceptionMessages = {})); -var StorageExceptionMessages; -(function (StorageExceptionMessages) { - StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; - StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; - StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; - StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; -})(StorageExceptionMessages = exports.StorageExceptionMessages || (exports.StorageExceptionMessages = {})); -exports.PROTOCOL_REGEX = /^(\w*):\/\//; -/** - * Default behavior: Automatically retry retriable server errors. - * - * @const {boolean} - */ -exports.AUTO_RETRY_DEFAULT = true; -/** - * Default behavior: Only attempt to retry retriable errors 3 times. - * - * @const {number} - */ -exports.MAX_RETRY_DEFAULT = 3; -/** - * Default behavior: Wait twice as long as previous retry before retrying. - * - * @const {number} - */ -exports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2; -/** - * Default behavior: If the operation doesn't succeed after 600 seconds, - * stop retrying. - * - * @const {number} - */ -exports.TOTAL_TIMEOUT_DEFAULT = 600; -/** - * Default behavior: Wait no more than 64 seconds between retries. - * - * @const {number} - */ -exports.MAX_RETRY_DELAY_DEFAULT = 64; -/** - * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. - * - * @const {enum} - * @private - */ -const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; -/** - * Returns true if the API request should be retried, given the error that was - * given the first time the request was attempted. - * @const - * @param {error} err - The API error to check if it is appropriate to retry. - * @return {boolean} True if the API request should be retried, false otherwise. - */ -const RETRYABLE_ERR_FN_DEFAULT = function (err) { - var _a; - const isConnectionProblem = (reason) => { - return (reason.includes('eai_again') || // DNS lookup error - reason === 'econnreset' || - reason === 'unexpected connection closure' || - reason === 'epipe'); - }; - if (err) { - if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { - return true; - } - if (typeof err.code === 'string') { - const reason = err.code.toLowerCase(); - if (isConnectionProblem(reason)) { - return true; - } - } - if (err.errors) { - for (const e of err.errors) { - const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); - if (reason && isConnectionProblem(reason)) { - return true; - } - } - } - } - return false; -}; -exports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT; -/*! Developer Documentation - * - * Invoke this method to create a new Storage object bound with pre-determined - * configuration options. For each object that can be created (e.g., a bucket), - * there is an equivalent static and instance method. While they are classes, - * they can be instantiated without use of the `new` keyword. - */ -/** - * Cloud Storage uses access control lists (ACLs) to manage object and - * bucket access. ACLs are the mechanism you use to share objects with other - * users and allow other users to access your buckets and objects. - * - * This object provides constants to refer to the three permission levels that - * can be granted to an entity: - * - * - `gcs.acl.OWNER_ROLE` - ("OWNER") - * - `gcs.acl.READER_ROLE` - ("READER") - * - `gcs.acl.WRITER_ROLE` - ("WRITER") - * - * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} - * - * @name Storage#acl - * @type {object} - * @property {string} OWNER_ROLE - * @property {string} READER_ROLE - * @property {string} WRITER_ROLE - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const albums = storage.bucket('albums'); - * - * //- - * // Make all of the files currently in a bucket publicly readable. - * //- - * const options = { - * entity: 'allUsers', - * role: storage.acl.READER_ROLE - * }; - * - * albums.acl.add(options, function(err, aclObject) {}); - * - * //- - * // Make any new objects added to a bucket publicly readable. - * //- - * albums.acl.default.add(options, function(err, aclObject) {}); - * - * //- - * // Grant a user ownership permissions to a bucket. - * //- - * albums.acl.add({ - * entity: 'user-useremail@example.com', - * role: storage.acl.OWNER_ROLE - * }, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * albums.acl.add(options).then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ -/** - * Get {@link Bucket} objects for all of the buckets in your project as - * a readable object stream. - * - * @method Storage#getBucketsStream - * @param {GetBucketsRequest} [query] Query object for listing buckets. - * @returns {ReadableStream} A readable stream that emits {@link Bucket} - * instances. - * - * @example - * ``` - * storage.getBucketsStream() - * .on('error', console.error) - * .on('data', function(bucket) { - * // bucket is a Bucket object. - * }) - * .on('end', function() { - * // All buckets retrieved. - * }); - * - * //- - * // If you anticipate many results, you can end a stream early to prevent - * // unnecessary processing and API requests. - * //- - * storage.getBucketsStream() - * .on('data', function(bucket) { - * this.end(); - * }); - * ``` - */ -/** - * Get {@link HmacKey} objects for all of the HMAC keys in the project in a - * readable object stream. - * - * @method Storage#getHmacKeysStream - * @param {GetHmacKeysOptions} [options] Configuration options. - * @returns {ReadableStream} A readable stream that emits {@link HmacKey} - * instances. - * - * @example - * ``` - * storage.getHmacKeysStream() - * .on('error', console.error) - * .on('data', function(hmacKey) { - * // hmacKey is an HmacKey object. - * }) - * .on('end', function() { - * // All HmacKey retrieved. - * }); - * - * //- - * // If you anticipate many results, you can end a stream early to prevent - * // unnecessary processing and API requests. - * //- - * storage.getHmacKeysStream() - * .on('data', function(bucket) { - * this.end(); - * }); - * ``` - */ -/** - *

ACLs

- * Cloud Storage uses access control lists (ACLs) to manage object and - * bucket access. ACLs are the mechanism you use to share files with other users - * and allow other users to access your buckets and files. - * - * To learn more about ACLs, read this overview on - * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. - * - * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} - * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} - * - * @class - */ -class Storage extends nodejs_common_1.Service { - getBucketsStream() { - // placeholder body, overwritten in constructor - return new stream_1.Readable(); - } - getHmacKeysStream() { - // placeholder body, overwritten in constructor - return new stream_1.Readable(); - } - /** - * @callback Crc32cGeneratorToStringCallback - * A method returning the CRC32C as a base64-encoded string. - * - * @returns {string} - * - * @example - * Hashing the string 'data' should return 'rth90Q==' - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.toString(); // 'rth90Q==' - * ``` - **/ - /** - * @callback Crc32cGeneratorValidateCallback - * A method validating a base64-encoded CRC32C string. - * - * @param {string} [value] base64-encoded CRC32C string to validate - * @returns {boolean} - * - * @example - * Should return `true` if the value matches, `false` otherwise - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.validate('DkjKuA=='); // false - * crc32c.validate('rth90Q=='); // true - * ``` - **/ - /** - * @callback Crc32cGeneratorUpdateCallback - * A method for passing `Buffer`s for CRC32C generation. - * - * @param {Buffer} [data] data to update CRC32C value with - * @returns {undefined} - * - * @example - * Hashing buffers from 'some ' and 'text\n' - * - * ```js - * const buffer1 = Buffer.from('some '); - * crc32c.update(buffer1); - * - * const buffer2 = Buffer.from('text\n'); - * crc32c.update(buffer2); - * - * crc32c.toString(); // 'DkjKuA==' - * ``` - **/ - /** - * @typedef {object} CRC32CValidator - * @property {Crc32cGeneratorToStringCallback} - * @property {Crc32cGeneratorValidateCallback} - * @property {Crc32cGeneratorUpdateCallback} - */ - /** - * @callback Crc32cGeneratorCallback - * @returns {CRC32CValidator} - */ - /** - * @typedef {object} StorageOptions - * @property {string} [projectId] The project ID from the Google Developer's - * Console, e.g. 'grape-spaceship-123'. We will also check the environment - * variable `GCLOUD_PROJECT` for your project ID. If your app is running - * in an environment which supports {@link - * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application - * Application Default Credentials}, your project ID will be detected - * automatically. - * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key - * downloaded from the Google Developers Console. If you provide a path to - * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and - * .p12 require you to specify the `email` option as well. - * @property {string} [email] Account email address. Required when using a .pem - * or .p12 keyFilename. - * @property {object} [credentials] Credentials object. - * @property {string} [credentials.client_email] - * @property {string} [credentials.private_key] - * @property {object} [retryOptions] Options for customizing retries. Retriable server errors - * will be retried with exponential delay between them dictated by the formula - * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout - * has been reached. Retries will only happen if autoRetry is set to true. - * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the - * response is related to rate limits or certain intermittent server - * errors. We will exponentially backoff subsequent requests by default. - * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to - * increase the delay time between the completion of failed requests, and the - * initiation of the subsequent retrying request. - * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from - * when the initial request is sent, after which an error will - * be returned, regardless of the retrying attempts made meanwhile. - * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. - * When this value is reached, ``retryDelayMultiplier`` will no longer be used to - * increase delay time. - * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries - * attempted before returning the error. - * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given - * error should be retried and false otherwise. - * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration - * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - - * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - - * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never - * retry a conditionally idempotent operation. - * @property {string} [userAgent] The value to be prepended to the User-Agent - * header in API requests. - * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. - * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. - * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. - * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. - * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. - * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} - */ - /** - * Constructs the Storage client. - * - * @example - * Create a client that uses Application Default Credentials - * (ADC) - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * ``` - * - * @example - * Create a client with explicit credentials - * ``` - * const storage = new Storage({ - * projectId: 'your-project-id', - * keyFilename: '/path/to/keyfile.json' - * }); - * ``` - * - * @example - * Create a client with credentials passed - * by value as a JavaScript object - * ``` - * const storage = new Storage({ - * projectId: 'your-project-id', - * credentials: { - * type: 'service_account', - * project_id: 'xxxxxxx', - * private_key_id: 'xxxx', - * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', - * client_email: 'xxxx', - * client_id: 'xxx', - * auth_uri: 'https://accounts.google.com/o/oauth2/auth', - * token_uri: 'https://oauth2.googleapis.com/token', - * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', - * client_x509_cert_url: 'xxx', - * } - * }); - * ``` - * - * @example - * Create a client with credentials passed - * by loading a JSON file directly from disk - * ``` - * const storage = new Storage({ - * projectId: 'your-project-id', - * credentials: require('/path/to-keyfile.json') - * }); - * ``` - * - * @example - * Create a client with an `AuthClient` (e.g. `DownscopedClient`) - * ``` - * const {DownscopedClient} = require('google-auth-library'); - * const authClient = new DownscopedClient({...}); - * - * const storage = new Storage({authClient}); - * ``` - * - * Additional samples: - * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 - * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js - * - * @param {StorageOptions} [options] Configuration options. - */ - constructor(options = {}) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; - let apiEndpoint = 'https://storage.googleapis.com'; - let customEndpoint = false; - // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. - const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; - if (typeof EMULATOR_HOST === 'string') { - apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); - customEndpoint = true; - } - if (options.apiEndpoint) { - apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); - customEndpoint = true; - } - options = Object.assign({}, options, { apiEndpoint }); - // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. - const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; - const config = { - apiEndpoint: options.apiEndpoint, - retryOptions: { - autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined - ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry - : exports.AUTO_RETRY_DEFAULT, - maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) - ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries - : exports.MAX_RETRY_DEFAULT, - retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) - ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier - : exports.RETRY_DELAY_MULTIPLIER_DEFAULT, - totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) - ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout - : exports.TOTAL_TIMEOUT_DEFAULT, - maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) - ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay - : exports.MAX_RETRY_DELAY_DEFAULT, - retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) - ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn - : exports.RETRYABLE_ERR_FN_DEFAULT, - idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined - ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy - : IDEMPOTENCY_STRATEGY_DEFAULT, - }, - baseUrl, - customEndpoint, - useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, - projectIdRequired: false, - scopes: [ - 'https://www.googleapis.com/auth/iam', - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/devstorage.full_control', - ], - packageJson: __nccwpck_require__(5458), - }; - super(config, options); - /** - * Reference to {@link Storage.acl}. - * - * @name Storage#acl - * @see Storage.acl - */ - this.acl = Storage.acl; - this.crc32cGenerator = - options.crc32cGenerator || crc32c_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; - this.retryOptions = config.retryOptions; - this.getBucketsStream = paginator_1.paginator.streamify('getBuckets'); - this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys'); - } - static sanitizeEndpoint(url) { - if (!exports.PROTOCOL_REGEX.test(url)) { - url = `https://${url}`; - } - return url.replace(/\/+$/, ''); // Remove trailing slashes - } - /** - * Get a reference to a Cloud Storage bucket. - * - * @param {string} name Name of the bucket. - * @param {object} [options] Configuration object. - * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to - * encrypt objects inserted into this bucket, if no encryption method is - * specified. - * @param {string} [options.userProject] User project to be billed for all - * requests made from this Bucket object. - * @returns {Bucket} - * @see Bucket - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const albums = storage.bucket('albums'); - * const photos = storage.bucket('photos'); - * ``` - */ - bucket(name, options) { - if (!name) { - throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); - } - return new bucket_1.Bucket(this, name, options); - } - /** - * Reference a channel to receive notifications about changes to your bucket. - * - * @param {string} id The ID of the channel. - * @param {string} resourceId The resource ID of the channel. - * @returns {Channel} - * @see Channel - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const channel = storage.channel('id', 'resource-id'); - * ``` - */ - channel(id, resourceId) { - return new channel_1.Channel(this, id, resourceId); - } - /** - * @typedef {array} CreateBucketResponse - * @property {Bucket} 0 The new {@link Bucket}. - * @property {object} 1 The full API response. - */ - /** - * @callback CreateBucketCallback - * @param {?Error} err Request error, if any. - * @param {Bucket} bucket The new {@link Bucket}. - * @param {object} apiResponse The full API response. - */ - /** - * Metadata to set for the bucket. - * - * @typedef {object} CreateBucketRequest - * @property {boolean} [archive=false] Specify the storage class as Archive. - * @property {object} [autoclass.enabled=false] Specify whether Autoclass is - * enabled for the bucket. - * @property {boolean} [coldline=false] Specify the storage class as Coldline. - * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. - * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. - * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. - * @property {boolean} [dra=false] Specify the storage class as Durable Reduced - * Availability. - * @property {string} [location] Specify the bucket's location. If specifying - * a dual-region, the `customPlacementConfig` property should be set in conjunction. - * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. - * @property {boolean} [multiRegional=false] Specify the storage class as - * Multi-Regional. - * @property {boolean} [nearline=false] Specify the storage class as Nearline. - * @property {boolean} [regional=false] Specify the storage class as Regional. - * @property {boolean} [requesterPays=false] **Early Access Testers Only** - * Force the use of the User Project metadata field to assign operational - * costs when an operation is made on a Bucket and its objects. - * @property {string} [rpo] For dual-region buckets, controls whether turbo - * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). - * @property {boolean} [standard=true] Specify the storage class as Standard. - * @property {string} [storageClass] The new storage class. (`standard`, - * `nearline`, `coldline`, or `archive`). - * **Note:** The storage classes `multi_regional`, `regional`, and - * `durable_reduced_availability` are now legacy and will be deprecated in - * the future. - * @property {Versioning} [versioning=undefined] Specify the versioning status. - * @property {string} [userProject] The ID of the project which will be billed - * for the request. - */ - /** - * Create a bucket. - * - * Cloud Storage uses a flat namespace, so you can't create a bucket with - * a name that is already in use. For more information, see - * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} - * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} - * - * @param {string} name Name of the bucket to create. - * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. - * @param {CreateBucketCallback} [callback] Callback function. - * @returns {Promise} - * @throws {Error} If a name is not provided. - * @see Bucket#create - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const callback = function(err, bucket, apiResponse) { - * // `bucket` is a Bucket object. - * }; - * - * storage.createBucket('new-bucket', callback); - * - * //- - * // Create a bucket in a specific location and region. See the - * // Official JSON API docs for complete details on the `location` - * option. - * // - * //- - * const metadata = { - * location: 'US-CENTRAL1', - * regional: true - * }; - * - * storage.createBucket('new-bucket', metadata, callback); - * - * //- - * // Create a bucket with a retention policy of 6 months. - * //- - * const metadata = { - * retentionPolicy: { - * retentionPeriod: 15780000 // 6 months in seconds. - * } - * }; - * - * storage.createBucket('new-bucket', metadata, callback); - * - * //- - * // Enable versioning on a new bucket. - * //- - * const metadata = { - * versioning: { - * enabled: true - * } - * }; - * - * storage.createBucket('new-bucket', metadata, callback); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * storage.createBucket('new-bucket').then(function(data) { - * const bucket = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/buckets.js - * region_tag:storage_create_bucket - * Another example: - */ - createBucket(name, metadataOrCallback, callback) { - if (!name) { - throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); - } - let metadata; - if (!callback) { - callback = metadataOrCallback; - metadata = {}; - } - else { - metadata = metadataOrCallback; - } - const body = { - ...metadata, - name, - }; - const storageClasses = { - archive: 'ARCHIVE', - coldline: 'COLDLINE', - dra: 'DURABLE_REDUCED_AVAILABILITY', - multiRegional: 'MULTI_REGIONAL', - nearline: 'NEARLINE', - regional: 'REGIONAL', - standard: 'STANDARD', - }; - const storageClassKeys = Object.keys(storageClasses); - for (const storageClass of storageClassKeys) { - if (body[storageClass]) { - if (metadata.storageClass && metadata.storageClass !== storageClass) { - throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); - } - body.storageClass = storageClasses[storageClass]; - delete body[storageClass]; - } - } - if (body.requesterPays) { - body.billing = { - requesterPays: body.requesterPays, - }; - delete body.requesterPays; - } - const query = { - project: this.projectId, - }; - if (body.userProject) { - query.userProject = body.userProject; - delete body.userProject; - } - this.request({ - method: 'POST', - uri: '/b', - qs: query, - json: body, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - const bucket = this.bucket(name); - bucket.metadata = resp; - callback(null, bucket, resp); - }); - } - /** - * @typedef {object} CreateHmacKeyOptions - * @property {string} [projectId] The project ID of the project that owns - * the service account of the requested HMAC key. If not provided, - * the project ID used to instantiate the Storage client will be used. - * @property {string} [userProject] This parameter is currently ignored. - */ - /** - * @typedef {object} HmacKeyMetadata - * @property {string} accessId The access id identifies which HMAC key was - * used to sign a request when authenticating with HMAC. - * @property {string} etag Used to perform a read-modify-write of the key. - * @property {string} id The resource name of the HMAC key. - * @property {string} projectId The project ID. - * @property {string} serviceAccountEmail The service account's email this - * HMAC key is created for. - * @property {string} state The state of this HMAC key. One of "ACTIVE", - * "INACTIVE" or "DELETED". - * @property {string} timeCreated The creation time of the HMAC key in - * RFC 3339 format. - * @property {string} [updated] The time this HMAC key was last updated in - * RFC 3339 format. - */ - /** - * @typedef {array} CreateHmacKeyResponse - * @property {HmacKey} 0 The HmacKey instance created from API response. - * @property {string} 1 The HMAC key's secret used to access the XML API. - * @property {object} 3 The raw API response. - */ - /** - * @callback CreateHmacKeyCallback Callback function. - * @param {?Error} err Request error, if any. - * @param {HmacKey} hmacKey The HmacKey instance created from API response. - * @param {string} secret The HMAC key's secret used to access the XML API. - * @param {object} apiResponse The raw API response. - */ - /** - * Create an HMAC key associated with an service account to authenticate - * requests to the Cloud Storage XML API. - * - * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} - * - * @param {string} serviceAccountEmail The service account's email address - * with which the HMAC key is created for. - * @param {CreateHmacKeyCallback} [callback] Callback function. - * @return {Promise} - * - * @example - * ``` - * const {Storage} = require('google-cloud/storage'); - * const storage = new Storage(); - * - * // Replace with your service account's email address - * const serviceAccountEmail = - * 'my-service-account@appspot.gserviceaccount.com'; - * - * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { - * if (!err) { - * // Securely store the secret for use with the XML API. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * storage.createHmacKey(serviceAccountEmail) - * .then((response) => { - * const hmacKey = response[0]; - * const secret = response[1]; - * // Securely store the secret for use with the XML API. - * }); - * ``` - */ - createHmacKey(serviceAccountEmail, optionsOrCb, cb) { - if (typeof serviceAccountEmail !== 'string') { - throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); - } - const { options, callback } = (0, util_1.normalize)(optionsOrCb, cb); - const query = Object.assign({}, options, { serviceAccountEmail }); - const projectId = query.projectId || this.projectId; - delete query.projectId; - this.request({ - method: 'POST', - uri: `/projects/${projectId}/hmacKeys`, - qs: query, - maxRetries: 0, //explicitly set this value since this is a non-idempotent function - }, (err, resp) => { - if (err) { - callback(err, null, null, resp); - return; - } - const metadata = resp.metadata; - const hmacKey = this.hmacKey(metadata.accessId, { - projectId: metadata.projectId, - }); - hmacKey.metadata = resp.metadata; - callback(null, hmacKey, resp.secret, resp); - }); - } - /** - * Query object for listing buckets. - * - * @typedef {object} GetBucketsRequest - * @property {boolean} [autoPaginate=true] Have pagination handled - * automatically. - * @property {number} [maxApiCalls] Maximum number of API calls to make. - * @property {number} [maxResults] Maximum number of items plus prefixes to - * return per call. - * Note: By default will handle pagination automatically - * if more than 1 page worth of results are requested per call. - * When `autoPaginate` is set to `false` the smaller of `maxResults` - * or 1 page of results will be returned per call. - * @property {string} [pageToken] A previously-returned page token - * representing part of the larger set of results to view. - * @property {string} [userProject] The ID of the project which will be billed - * for the request. - */ - /** - * @typedef {array} GetBucketsResponse - * @property {Bucket[]} 0 Array of {@link Bucket} instances. - * @property {object} 1 nextQuery A query object to receive more results. - * @property {object} 2 The full API response. - */ - /** - * @callback GetBucketsCallback - * @param {?Error} err Request error, if any. - * @param {Bucket[]} buckets Array of {@link Bucket} instances. - * @param {object} nextQuery A query object to receive more results. - * @param {object} apiResponse The full API response. - */ - /** - * Get Bucket objects for all of the buckets in your project. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} - * - * @param {GetBucketsRequest} [query] Query object for listing buckets. - * @param {GetBucketsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * storage.getBuckets(function(err, buckets) { - * if (!err) { - * // buckets is an array of Bucket objects. - * } - * }); - * - * //- - * // To control how many API requests are made and page through the results - * // manually, set `autoPaginate` to `false`. - * //- - * const callback = function(err, buckets, nextQuery, apiResponse) { - * if (nextQuery) { - * // More results exist. - * storage.getBuckets(nextQuery, callback); - * } - * - * // The `metadata` property is populated for you with the metadata at the - * // time of fetching. - * buckets[0].metadata; - * - * // However, in cases where you are concerned the metadata could have - * // changed, use the `getMetadata` method. - * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); - * }; - * - * storage.getBuckets({ - * autoPaginate: false - * }, callback); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * storage.getBuckets().then(function(data) { - * const buckets = data[0]; - * }); - * - * ``` - * @example include:samples/buckets.js - * region_tag:storage_list_buckets - * Another example: - */ - getBuckets(optionsOrCallback, cb) { - const { options, callback } = (0, util_1.normalize)(optionsOrCallback, cb); - options.project = options.project || this.projectId; - this.request({ - uri: '/b', - qs: options, - }, (err, resp) => { - if (err) { - callback(err, null, null, resp); - return; - } - const itemsArray = resp.items ? resp.items : []; - const buckets = itemsArray.map((bucket) => { - const bucketInstance = this.bucket(bucket.id); - bucketInstance.metadata = bucket; - return bucketInstance; - }); - const nextQuery = resp.nextPageToken - ? Object.assign({}, options, { pageToken: resp.nextPageToken }) - : null; - callback(null, buckets, nextQuery, resp); - }); - } - getHmacKeys(optionsOrCb, cb) { - const { options, callback } = (0, util_1.normalize)(optionsOrCb, cb); - const query = Object.assign({}, options); - const projectId = query.projectId || this.projectId; - delete query.projectId; - this.request({ - uri: `/projects/${projectId}/hmacKeys`, - qs: query, - }, (err, resp) => { - if (err) { - callback(err, null, null, resp); - return; - } - const itemsArray = resp.items ? resp.items : []; - const hmacKeys = itemsArray.map((hmacKey) => { - const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { - projectId: hmacKey.projectId, - }); - hmacKeyInstance.metadata = hmacKey; - return hmacKeyInstance; - }); - const nextQuery = resp.nextPageToken - ? Object.assign({}, options, { pageToken: resp.nextPageToken }) - : null; - callback(null, hmacKeys, nextQuery, resp); - }); - } - /** - * @typedef {array} GetServiceAccountResponse - * @property {object} 0 The service account resource. - * @property {object} 1 The full - * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. - */ - /** - * @callback GetServiceAccountCallback - * @param {?Error} err Request error, if any. - * @param {object} serviceAccount The serviceAccount resource. - * @param {string} serviceAccount.emailAddress The service account email - * address. - * @param {object} apiResponse The full - * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. - */ - /** - * Get the email address of this project's Google Cloud Storage service - * account. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} - * - * @param {object} [options] Configuration object. - * @param {string} [options.userProject] User project to be billed for this - * request. - * @param {GetServiceAccountCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { - * if (!err) { - * const serviceAccountEmail = serviceAccount.emailAddress; - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * storage.getServiceAccount().then(function(data) { - * const serviceAccountEmail = data[0].emailAddress; - * const apiResponse = data[1]; - * }); - * ``` - */ - getServiceAccount(optionsOrCallback, cb) { - const { options, callback } = (0, util_1.normalize)(optionsOrCallback, cb); - this.request({ - uri: `/projects/${this.projectId}/serviceAccount`, - qs: options, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - const camelCaseResponse = {}; - for (const prop in resp) { - // eslint-disable-next-line no-prototype-builtins - if (resp.hasOwnProperty(prop)) { - const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); - camelCaseResponse[camelCaseProp] = resp[prop]; - } - } - callback(null, camelCaseResponse, resp); - }); - } - /** - * Get a reference to an HmacKey object. - * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to - * retrieve and populate the metadata. - * - * To get a reference to an HMAC key that's not created for a service - * account in the same project used to instantiate the Storage client, - * supply the project's ID as `projectId` in the `options` argument. - * - * @param {string} accessId The HMAC key's access ID. - * @param {HmacKeyOptions} options HmacKey constructor options. - * @returns {HmacKey} - * @see HmacKey - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const hmacKey = storage.hmacKey('ACCESS_ID'); - * ``` - */ - hmacKey(accessId, options) { - if (!accessId) { - throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); - } - return new hmacKey_1.HmacKey(this, accessId, options); - } -} -exports.Storage = Storage; -/** - * {@link Bucket} class. - * - * @name Storage.Bucket - * @see Bucket - * @type {Constructor} - */ -Storage.Bucket = bucket_1.Bucket; -/** - * {@link Channel} class. - * - * @name Storage.Channel - * @see Channel - * @type {Constructor} - */ -Storage.Channel = channel_1.Channel; -/** - * {@link File} class. - * - * @name Storage.File - * @see File - * @type {Constructor} - */ -Storage.File = file_1.File; -/** - * {@link HmacKey} class. - * - * @name Storage.HmacKey - * @see HmacKey - * @type {Constructor} - */ -Storage.HmacKey = hmacKey_1.HmacKey; -Storage.acl = { - OWNER_ROLE: 'OWNER', - READER_ROLE: 'READER', - WRITER_ROLE: 'WRITER', -}; -/*! Developer Documentation - * - * These methods can be auto-paginated. - */ -paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Storage, { - exclude: ['bucket', 'channel', 'hmacKey'], -}); -//# sourceMappingURL=storage.js.map - -/***/ }), - -/***/ 9904: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/*! - * Copyright 2022 Google LLC. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TransferManager = void 0; -const pLimit = __nccwpck_require__(7684); -const path = __nccwpck_require__(1017); -const extend = __nccwpck_require__(8171); -const fs_1 = __nccwpck_require__(7147); -const crc32c_1 = __nccwpck_require__(4921); -/** - * Default number of concurrently executing promises to use when calling uploadManyFiles. - * @experimental - */ -const DEFAULT_PARALLEL_UPLOAD_LIMIT = 2; -/** - * Default number of concurrently executing promises to use when calling downloadManyFiles. - * @experimental - */ -const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 2; -/** - * Default number of concurrently executing promises to use when calling downloadFileInChunks. - * @experimental - */ -const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 2; -/** - * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. - * @experimental - */ -const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; -/** - * The chunk size in bytes to use when calling downloadFileInChunks. - * @experimental - */ -const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 10 * 1024 * 1024; -const EMPTY_REGEX = '(?:)'; -/** - * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. - * - * @class - * @hideconstructor - * - * @param {Bucket} bucket A {@link Bucket} instance - * @experimental - */ -class TransferManager { - constructor(bucket) { - this.bucket = bucket; - } - /** - * @typedef {object} UploadManyFilesOptions - * @property {number} [concurrencyLimit] The number of concurrently executing promises - * to use when uploading the files. - * @property {boolean} [skipIfExists] Do not upload the file if it already exists in - * the bucket. This will set the precondition ifGenerationMatch = 0. - * @property {string} [prefix] A prefix to append to all of the uploaded files. - * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through - * to each individual upload operation. - * @experimental - */ - /** - * Upload multiple files in parallel to the bucket. This is a convenience method - * that utilizes {@link Bucket#upload} to perform the upload. - * - * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. - * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. - * to be uploaded to the bucket - * @param {UploadManyFilesOptions} [options] Configuration options. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * const transferManager = new TransferManager(bucket); - * - * //- - * // Upload multiple files in parallel. - * //- - * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); - * // Your bucket now contains: - * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') - * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') - * const response = await transferManager.uploadManyFiles('/local/directory'); - * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. - * ``` - * @experimental - */ - async uploadManyFiles(filePathsOrDirectory, options = {}) { - var _a; - if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { - options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; - } - else if (options.skipIfExists && - options.passthroughOptions === undefined) { - options.passthroughOptions = { - preconditionOpts: { - ifGenerationMatch: 0, - }, - }; - } - const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); - const promises = []; - let allPaths = []; - if (!Array.isArray(filePathsOrDirectory)) { - for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { - allPaths.push(curPath); - } - } - else { - allPaths = filePathsOrDirectory; - } - for (const filePath of allPaths) { - const stat = await fs_1.promises.lstat(filePath); - if (stat.isDirectory()) { - continue; - } - const passThroughOptionsCopy = extend(true, {}, options.passthroughOptions); - passThroughOptionsCopy.destination = filePath; - if (options.prefix) { - passThroughOptionsCopy.destination = path.join(options.prefix, passThroughOptionsCopy.destination); - } - promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); - } - return Promise.all(promises); - } - /** - * @typedef {object} DownloadManyFilesOptions - * @property {number} [concurrencyLimit] The number of concurrently executing promises - * to use when downloading the files. - * @property {string} [prefix] A prefix to append to all of the downloaded files. - * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. - * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through - * to each individual download operation. - * @experimental - */ - /** - * Download multiple files in parallel to the local filesystem. This is a convenience method - * that utilizes {@link File#download} to perform the download. - * - * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If - * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. - * @param {DownloadManyFilesOptions} [options] Configuration options. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * const transferManager = new TransferManager(bucket); - * - * //- - * // Download multiple files in parallel. - * //- - * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); - * // The following files have been downloaded: - * // - "file1.txt" (with the contents from my-bucket.file1.txt) - * // - "file2.txt" (with the contents from my-bucket.file2.txt) - * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); - * // The following files have been downloaded: - * // - "file1.txt" (with the contents from my-bucket.file1.txt) - * // - "file2.txt" (with the contents from my-bucket.file2.txt) - * const response = await transferManager.downloadManyFiles('test-folder'); - * // All files with GCS prefix of 'test-folder' have been downloaded. - * ``` - * @experimental - */ - async downloadManyFiles(filesOrFolder, options = {}) { - const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); - const promises = []; - let files = []; - if (!Array.isArray(filesOrFolder)) { - const directoryFiles = await this.bucket.getFiles({ - prefix: filesOrFolder, - }); - files = directoryFiles[0]; - } - else { - files = filesOrFolder.map(curFile => { - if (typeof curFile === 'string') { - return this.bucket.file(curFile); - } - return curFile; - }); - } - const stripRegexString = options.stripPrefix - ? `^${options.stripPrefix}` - : EMPTY_REGEX; - const regex = new RegExp(stripRegexString, 'g'); - for (const file of files) { - const passThroughOptionsCopy = extend(true, {}, options.passthroughOptions); - if (options.prefix) { - passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); - } - if (options.stripPrefix) { - passThroughOptionsCopy.destination = file.name.replace(regex, ''); - } - promises.push(limit(() => file.download(passThroughOptionsCopy))); - } - return Promise.all(promises); - } - /** - * @typedef {object} DownloadFileInChunksOptions - * @property {number} [concurrencyLimit] The number of concurrently executing promises - * to use when downloading the file. - * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. - * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. - * @experimental - */ - /** - * Download a large file in chunks utilizing parallel download operations. This is a convenience method - * that utilizes {@link File#download} to perform the download. - * - * @param {object} [file | string] {@link File} to download. - * @param {DownloadFileInChunksOptions} [options] Configuration options. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * const transferManager = new TransferManager(bucket); - * - * //- - * // Download a large file in chunks utilizing parallel operations. - * //- - * const response = await transferManager.downloadLargeFile(bucket.file('large-file.txt'); - * // Your local directory now contains: - * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) - * ``` - * @experimental - */ - async downloadFileInChunks(fileOrName, options = {}) { - let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; - let limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); - const promises = []; - const file = typeof fileOrName === 'string' - ? this.bucket.file(fileOrName) - : fileOrName; - const fileInfo = await file.get(); - const size = parseInt(fileInfo[0].metadata.size); - // If the file size does not meet the threshold download it as a single chunk. - if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { - limit = pLimit(1); - chunkSize = size; - } - let start = 0; - const filePath = options.destination || path.basename(file.name); - const fileToWrite = await fs_1.promises.open(filePath, 'w+'); - while (start < size) { - const chunkStart = start; - let chunkEnd = start + chunkSize - 1; - chunkEnd = chunkEnd > size ? size : chunkEnd; - promises.push(limit(() => file.download({ start: chunkStart, end: chunkEnd }).then(resp => { - return fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); - }))); - start += chunkSize; - } - return new Promise((resolve, reject) => { - let results; - Promise.all(promises) - .then(data => { - results = data.map(result => result.buffer); - if (options.validation === 'crc32c') { - return crc32c_1.CRC32C.fromFile(filePath); - } - return; - }) - .then(() => { - resolve(results); - }) - .catch(e => { - reject(e); - }) - .finally(() => { - fileToWrite.close(); - }); - }); - } - async *getPathsFromDirectory(directory) { - const filesAndSubdirectories = await fs_1.promises.readdir(directory, { - withFileTypes: true, - }); - for (const curFileOrDirectory of filesAndSubdirectories) { - const fullPath = path.join(directory, curFileOrDirectory.name); - curFileOrDirectory.isDirectory() - ? yield* this.getPathsFromDirectory(fullPath) - : yield fullPath; - } - } -} -exports.TransferManager = TransferManager; -//# sourceMappingURL=transfer-manager.js.map - -/***/ }), - -/***/ 1862: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PassThroughShim = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0; -const querystring = __nccwpck_require__(3477); -const stream_1 = __nccwpck_require__(2781); -function normalize(optionsOrCallback, cb) { - const options = (typeof optionsOrCallback === 'object' ? optionsOrCallback : {}); - const callback = (typeof optionsOrCallback === 'function' ? optionsOrCallback : cb); - return { options, callback }; -} -exports.normalize = normalize; -/** - * Flatten an object into an Array of arrays, [[key, value], ..]. - * Implements Object.entries() for Node.js <8 - * @internal - */ -function objectEntries(obj) { - return Object.keys(obj).map(key => [key, obj[key]]); -} -exports.objectEntries = objectEntries; -/** - * Encode `str` with encodeURIComponent, plus these - * reserved characters: `! * ' ( )`. - * - * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} - * - * @param {string} str The URI component to encode. - * @return {string} The encoded string. - */ -function fixedEncodeURIComponent(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); -} -exports.fixedEncodeURIComponent = fixedEncodeURIComponent; -/** - * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. - * - * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. - * - * @param {string} uri The URI to encode. - * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. - * @return {string} The encoded string. - */ -function encodeURI(uri, encodeSlash) { - // Split the string by `/`, and conditionally rejoin them with either - // %2F if encodeSlash is `true`, or '/' if `false`. - return uri - .split('/') - .map(fixedEncodeURIComponent) - .join(encodeSlash ? '%2F' : '/'); -} -exports.encodeURI = encodeURI; -/** - * Serialize an object to a URL query string using util.encodeURI(uri, true). - * @param {string} url The object to serialize. - * @return {string} Serialized string. - */ -function qsStringify(qs) { - return querystring.stringify(qs, '&', '=', { - encodeURIComponent: (component) => encodeURI(component, true), - }); -} -exports.qsStringify = qsStringify; -function objectKeyToLowercase(object) { - const newObj = {}; - for (let key of Object.keys(object)) { - const value = object[key]; - key = key.toLowerCase(); - newObj[key] = value; - } - return newObj; -} -exports.objectKeyToLowercase = objectKeyToLowercase; -/** - * JSON encode str, with unicode \u+ representation. - * @param {object} obj The object to encode. - * @return {string} Serialized string. - */ -function unicodeJSONStringify(obj) { - return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, (char) => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); -} -exports.unicodeJSONStringify = unicodeJSONStringify; -/** - * Converts the given objects keys to snake_case - * @param {object} obj object to convert keys to snake case. - * @returns {object} object with keys converted to snake case. - */ -function convertObjKeysToSnakeCase(obj) { - if (obj instanceof Date || obj instanceof RegExp) { - return obj; - } - if (Array.isArray(obj)) { - return obj.map(convertObjKeysToSnakeCase); - } - if (obj instanceof Object) { - return Object.keys(obj).reduce((acc, cur) => { - const s = cur[0].toLocaleLowerCase() + - cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { - return `_${p1.toLowerCase()}`; - }); - acc[s] = convertObjKeysToSnakeCase(obj[cur]); - return acc; - }, Object()); - } - return obj; -} -exports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase; -/** - * Formats the provided date object as a UTC ISO string. - * @param {Date} dateTimeToFormat date object to be formatted. - * @param {boolean} includeTime flag to include hours, minutes, seconds in output. - * @param {string} dateDelimiter delimiter between date components. - * @param {string} timeDelimiter delimiter between time components. - * @returns {string} UTC ISO format of provided date obect. - */ -function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { - const year = dateTimeToFormat.getUTCFullYear(); - const month = dateTimeToFormat.getUTCMonth() + 1; - const day = dateTimeToFormat.getUTCDate(); - const hour = dateTimeToFormat.getUTCHours(); - const minute = dateTimeToFormat.getUTCMinutes(); - const second = dateTimeToFormat.getUTCSeconds(); - let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month - .toString() - .padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; - if (includeTime) { - resultString = `${resultString}T${hour - .toString() - .padStart(2, '0')}${timeDelimiter}${minute - .toString() - .padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; - } - return resultString; -} -exports.formatAsUTCISO = formatAsUTCISO; -class PassThroughShim extends stream_1.PassThrough { - constructor() { - super(...arguments); - this.shouldEmitReading = true; - this.shouldEmitWriting = true; - } - _read(size) { - if (this.shouldEmitReading) { - this.emit('reading'); - this.shouldEmitReading = false; - } - super._read(size); - } - _write(chunk, encoding, callback) { - if (this.shouldEmitWriting) { - this.emit('writing'); - this.shouldEmitWriting = false; - } - // Per the nodejs documention, callback must be invoked on the next tick - process.nextTick(() => { - super._write(chunk, encoding, callback); - }); - } - _final(callback) { - // If the stream is empty (i.e. empty file) final will be invoked before _read / _write - // and we should still emit the proper events. - if (this.shouldEmitReading) { - this.emit('reading'); - this.shouldEmitReading = false; - } - if (this.shouldEmitWriting) { - this.emit('writing'); - this.shouldEmitWriting = false; - } - callback(null); - } -} -exports.PassThroughShim = PassThroughShim; -//# sourceMappingURL=util.js.map - -/***/ }), - -/***/ 334: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; -const REGEX_IS_INSTALLATION = /^ghs_/; -const REGEX_IS_USER_TO_SERVER = /^ghu_/; -async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} - -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - - return `token ${token}`; -} - -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} - -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } - - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 6762: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var universalUserAgent = __nccwpck_require__(5030); -var beforeAfterHook = __nccwpck_require__(3682); -var request = __nccwpck_require__(6234); -var graphql = __nccwpck_require__(8467); -var authToken = __nccwpck_require__(334); - -function _objectWithoutPropertiesLoose(source, excluded) { - if (source == null) return {}; - var target = {}; - var sourceKeys = Object.keys(source); - var key, i; - - for (i = 0; i < sourceKeys.length; i++) { - key = sourceKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - target[key] = source[key]; - } - - return target; -} - -function _objectWithoutProperties(source, excluded) { - if (source == null) return {}; - - var target = _objectWithoutPropertiesLoose(source, excluded); - - var key, i; - - if (Object.getOwnPropertySymbols) { - var sourceSymbolKeys = Object.getOwnPropertySymbols(source); - - for (i = 0; i < sourceSymbolKeys.length; i++) { - key = sourceSymbolKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; - target[key] = source[key]; - } - } - - return target; -} - -const VERSION = "3.6.0"; - -const _excluded = ["authStrategy"]; -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; // prepend default user agent with `options.userAgent` if set - - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); - - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } - - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } - - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; - } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const { - authStrategy - } = options, - otherOptions = _objectWithoutProperties(options, _excluded); - - const auth = authStrategy(Object.assign({ - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(...newPlugins) { - var _a; - - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; - } - -} -Octokit.VERSION = VERSION; -Octokit.plugins = []; - -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var isPlainObject = __nccwpck_require__(3287); -var universalUserAgent = __nccwpck_require__(5030); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; -} - -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === undefined) { - delete obj[key]; - } - } - - return obj; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; -} - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequest) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "6.0.12"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 8467: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var request = __nccwpck_require__(6234); -var universalUserAgent = __nccwpck_require__(5030); - -const VERSION = "4.8.0"; - -function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); -} - -class GraphqlResponseError extends Error { - constructor(request, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. - - this.errors = response.errors; - this.data = response.data; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } - -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); - } - - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; - return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); - } - } - - const parsedOptions = typeof query === "string" ? Object.assign({ - query - }, options) : query; - const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; - } - - if (!result.variables) { - result.variables = {}; - } - - result.variables[key] = parsedOptions[key]; - return result; - }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix - // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 - - const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; - - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - - return request(requestOptions).then(response => { - if (response.data.errors) { - const headers = {}; - - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - - throw new GraphqlResponseError(requestOptions, headers, response.data); - } - - return response.data.data; - }); -} - -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} - -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} - -exports.GraphqlResponseError = GraphqlResponseError; -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 4193: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.21.3"; - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - enumerableOnly && (symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - })), keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = null != arguments[i] ? arguments[i] : {}; - i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { - _defineProperty(target, key, source[key]); - }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ -function normalizePaginatedListResponse(response) { - // endpoints can respond with 204 if repository is empty - if (!response.data) { - return _objectSpread2(_objectSpread2({}, response), {}, { - data: [] - }); - } - - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - return response; -} - -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) return { - done: true - }; - - try { - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; - } catch (error) { - if (error.status !== 409) throw error; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] - } - }; - } - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); -} - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; - } - - let earlyExit = false; - - function done() { - earlyExit = true; - } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; - } - - return gather(octokit, results, iterator, mapFn); + return gather(octokit, results, iterator2, mapFn); }); } -const composePaginateRest = Object.assign(paginate, { +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { iterator }); -const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; +// pkg/dist-src/paginating-endpoints.js function isPaginatingEndpoint(arg) { if (typeof arg === "string") { return paginatingEndpoints.includes(arg); @@ -17752,11 +5336,7 @@ function isPaginatingEndpoint(arg) { } } -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - +// pkg/dist-src/index.js function paginateRest(octokit) { return { paginate: Object.assign(paginate.bind(null, octokit), { @@ -17765,202 +5345,390 @@ function paginateRest(octokit) { }; } paginateRest.VERSION = VERSION; - -exports.composePaginateRest = composePaginateRest; -exports.isPaginatingEndpoint = isPaginatingEndpoint; -exports.paginateRest = paginateRest; -exports.paginatingEndpoints = paginatingEndpoints; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 3044: -/***/ ((__unused_webpack_module, exports) => { +/***/ ((module) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - - if (enumerableOnly) { - symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - } - - keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; - - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} +// pkg/dist-src/version.js +var VERSION = "10.3.0"; -const Endpoints = { +// pkg/dist-src/generated/endpoints.js +var Endpoints = { actions: { - addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"], - addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], - deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], - disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], - enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], - getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], - getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], - getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"], - getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], - getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], + getCustomOidcSubClaimForRepo: [ + "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { - renamed: ["actions", "getGithubActionsPermissionsRepository"] - }], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], - listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"], - listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"], - removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], - setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"], - setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"], - setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], - setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], - setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"], - setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"] + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setCustomOidcSubClaimForRepo: [ + "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] }, activity: { checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], getFeeds: ["GET /feeds"], getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], listEventsForAuthenticatedUser: ["GET /users/{username}/events"], listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], listPublicEvents: ["GET /events"], listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], listPublicEventsForUser: ["GET /users/{username}/events/public"], listPublicOrgEvents: ["GET /orgs/{org}/events"], listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], listReposStarredByAuthenticatedUser: ["GET /user/starred"], listReposStarredByUser: ["GET /users/{username}/starred"], listReposWatchedByUser: ["GET /users/{username}/subscriptions"], @@ -17971,18 +5739,26 @@ const Endpoints = { markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] }, apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] - }], - addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], checkToken: ["POST /applications/{client_id}/token"], createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], deleteAuthorization: ["DELETE /applications/{client_id}/grant"], deleteInstallation: ["DELETE /app/installations/{installation_id}"], deleteToken: ["DELETE /applications/{client_id}/token"], @@ -17991,75 +5767,132 @@ const Endpoints = { getInstallation: ["GET /app/installations/{installation_id}"], getOrgInstallation: ["GET /orgs/{org}/installation"], getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], getUserInstallation: ["GET /users/{username}/installation"], getWebhookConfigForApp: ["GET /app/hook/config"], getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], listInstallations: ["GET /app/installations"], listInstallationsForAuthenticatedUser: ["GET /user/installations"], listPlans: ["GET /marketplace_listing/plans"], listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], listReposAccessibleToInstallation: ["GET /installation/repositories"], listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], listWebhookDeliveries: ["GET /app/hook/deliveries"], - redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] - }], - removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], resetToken: ["PATCH /applications/{client_id}/token"], revokeInstallationAccessToken: ["DELETE /installation/token"], scopeToken: ["POST /applications/{client_id}/token/scoped"], suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], updateWebhookConfigForApp: ["PATCH /app/hook/config"] }, billing: { getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], - getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"], - getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], - getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], - getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] }, checks: { create: ["POST /repos/{owner}/{repo}/check-runs"], createSuite: ["POST /repos/{owner}/{repo}/check-suites"], get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] }, codeScanning: { - deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { - renamedParameters: { - alert_id: "alert_number" - } - }], - getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { - renamed: ["codeScanning", "listAlertInstances"] - }], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] }, codesOfConduct: { @@ -18067,82 +5900,190 @@ const Endpoints = { getConductCode: ["GET /codes_of_conduct/{key}"] }, codespaces: { - addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"], + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], createForAuthenticatedUser: ["POST /user/codespaces"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"], - createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"], - createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], - deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"], - exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"], - getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], - getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"], - listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], listForAuthenticatedUser: ["GET /user/codespaces"], - listInOrganization: ["GET /orgs/{org}/codespaces", {}, { - renamedParameters: { - org_id: "org" - } - }], - listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], - listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], - removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"], - setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], - stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] }, + copilot: { + addCopilotForBusinessSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotForBusinessSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + }, dependabot: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"] + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] }, dependencyGraph: { - createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"], - diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"] - }, - emojis: { - get: ["GET /emojis"] - }, - enterpriseAdmin: { - addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], - getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], - getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"], - listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], - removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"], - setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], - setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] }, + emojis: { get: ["GET /emojis"] }, gists: { checkIsStarred: ["GET /gists/{gist_id}/star"], create: ["POST /gists"], @@ -18188,33 +6129,52 @@ const Endpoints = { getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { - renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] - }], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], - removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { - renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] - }], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { - renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] - }] + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] }, issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], createLabel: ["POST /repos/{owner}/{repo}/labels"], createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], @@ -18226,24 +6186,38 @@ const Endpoints = { listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], listForAuthenticatedUser: ["GET /user/issues"], listForOrg: ["GET /orgs/{org}/issues"], listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], listMilestones: ["GET /repos/{owner}/{repo}/milestones"], lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] }, licenses: { get: ["GET /licenses/{license}"], @@ -18252,65 +6226,159 @@ const Endpoints = { }, markdown: { render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] }, meta: { get: ["GET /meta"], + getAllVersions: ["GET /versions"], getOctocat: ["GET /octocat"], getZen: ["GET /zen"], root: ["GET /"] }, migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" + } + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, + { + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" + } + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" + } + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, + { + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + } + ], getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], listForAuthenticatedUser: ["GET /user/migrations"], listForOrg: ["GET /orgs/{org}/migrations"], - listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], - listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, { - renamed: ["migrations", "listReposForAuthenticatedUser"] - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, + { + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" + } + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" + } + ], startForAuthenticatedUser: ["POST /user/migrations"], startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" + } + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" + } + ] + }, + oidc: { + getOidcCustomSubTemplateForOrg: [ + "GET /orgs/{org}/actions/oidc/customization/sub" + ], + updateOidcCustomSubTemplateForOrg: [ + "PUT /orgs/{org}/actions/oidc/customization/sub" + ] }, orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], blockUser: ["PUT /orgs/{org}/blocks/{username}"], cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], list: ["GET /organizations"], listAppInstallations: ["GET /orgs/{org}/installations"], listBlockedUsers: ["GET /orgs/{org}/blocks"], - listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], listForAuthenticatedUser: ["GET /user/orgs"], listForUser: ["GET /users/{username}/orgs"], @@ -18318,55 +6386,148 @@ const Endpoints = { listMembers: ["GET /orgs/{org}/members"], listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], listPendingInvitations: ["GET /orgs/{org}/invitations"], listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], listWebhooks: ["GET /orgs/{org}/hooks"], pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], removeMember: ["DELETE /orgs/{org}/members/{username}"], removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" + ], setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] }, packages: { - deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], - deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], - deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"], - deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] - }], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] - }], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], - getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], - getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], - getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], - getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], listPackagesForAuthenticatedUser: ["GET /user/packages"], listPackagesForOrganization: ["GET /orgs/{org}/packages"], listPackagesForUser: ["GET /users/{username}/packages"], - restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] }, projects: { addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], @@ -18381,7 +6542,9 @@ const Endpoints = { get: ["GET /projects/{project_id}"], getCard: ["GET /projects/columns/cards/{card_id}"], getColumn: ["GET /projects/columns/{column_id}"], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], listCards: ["GET /projects/columns/{column_id}/cards"], listCollaborators: ["GET /projects/{project_id}/collaborators"], listColumns: ["GET /projects/{project_id}/columns"], @@ -18390,7 +6553,9 @@ const Endpoints = { listForUser: ["GET /users/{username}/projects"], moveCard: ["POST /projects/columns/cards/{card_id}/moves"], moveColumn: ["POST /projects/columns/{column_id}/moves"], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], update: ["PATCH /projects/{project_id}"], updateCard: ["PATCH /projects/columns/cards/{card_id}"], updateColumn: ["PATCH /projects/columns/{column_id}"] @@ -18398,191 +6563,411 @@ const Endpoints = { pulls: { checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] }, + rateLimit: { get: ["GET /rate_limit"] }, reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"], - deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"], + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"] + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] }, repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "acceptInvitationForAuthenticatedUser"] - }], - acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" + ], checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], createDeployKey: ["POST /repos/{owner}/{repo}/keys"], createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], createForAuthenticatedUser: ["POST /user/repos"], createFork: ["POST /repos/{owner}/{repo}/forks"], createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], createPagesSite: ["POST /repos/{owner}/{repo}/pages"], createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "declineInvitationForAuthenticatedUser"] - }], - declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"], - disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"], - downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { - renamed: ["repos", "downloadZipballArchive"] - }], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"], - enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"], - generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], getAllTopics: ["GET /repos/{owner}/{repo}/topics"], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], getPages: ["GET /repos/{owner}/{repo}/pages"], getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], getReadme: ["GET /repos/{owner}/{repo}/readme"], getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" + ], + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], getViews: ["GET /repos/{owner}/{repo}/traffic/views"], getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listActivities: ["GET /repos/{owner}/{repo}/activity"], listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], listCommits: ["GET /repos/{owner}/{repo}/commits"], listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], listDeployments: ["GET /repos/{owner}/{repo}/deployments"], listForAuthenticatedUser: ["GET /user/repos"], listForOrg: ["GET /orgs/{org}/repos"], @@ -18593,67 +6978,117 @@ const Endpoints = { listLanguages: ["GET /repos/{owner}/{repo}/languages"], listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], listReleases: ["GET /repos/{owner}/{repo}/releases"], listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], listTags: ["GET /repos/{owner}/{repo}/tags"], listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], merge: ["POST /repos/{owner}/{repo}/merges"], mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], transfer: ["POST /repos/{owner}/{repo}/transfer"], update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { - renamed: ["repos", "updateStatusCheckProtection"] - }], - updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] }, search: { code: ["GET /search/code"], @@ -18665,390 +7100,596 @@ const Endpoints = { users: ["GET /search/users"] }, secretScanning: { - getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], - listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"], + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"], - updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] }, teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], list: ["GET /orgs/{org}/teams"], listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], listForAuthenticatedUser: ["GET /user/teams"], listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] }, users: { - addEmailForAuthenticated: ["POST /user/emails", {}, { - renamed: ["users", "addEmailForAuthenticatedUser"] - }], + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], block: ["PUT /user/blocks/{username}"], checkBlocked: ["GET /user/blocks/{username}"], checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, { - renamed: ["users", "createGpgKeyForAuthenticatedUser"] - }], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, { - renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] - }], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails", {}, { - renamed: ["users", "deleteEmailForAuthenticatedUser"] - }], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] - }], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, { - renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] - }], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], follow: ["PUT /user/following/{username}"], getAuthenticated: ["GET /user"], getByUsername: ["GET /users/{username}"], getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "getGpgKeyForAuthenticatedUser"] - }], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, { - renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] - }], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks", {}, { - renamed: ["users", "listBlockedByAuthenticatedUser"] - }], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], listBlockedByAuthenticatedUser: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails", {}, { - renamed: ["users", "listEmailsForAuthenticatedUser"] - }], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], listEmailsForAuthenticatedUser: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following", {}, { - renamed: ["users", "listFollowedByAuthenticatedUser"] - }], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], listFollowedByAuthenticatedUser: ["GET /user/following"], listFollowersForAuthenticatedUser: ["GET /user/followers"], listFollowersForUser: ["GET /users/{username}/followers"], listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, { - renamed: ["users", "listGpgKeysForAuthenticatedUser"] - }], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, { - renamed: ["users", "listPublicEmailsForAuthenticatedUser"] - }], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, { - renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] - }], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, { - renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] - }], - setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], unblock: ["DELETE /user/blocks/{username}"], unfollow: ["DELETE /user/following/{username}"], updateAuthenticated: ["PATCH /user"] } }; +var endpoints_default = Endpoints; -const VERSION = "5.16.2"; - -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { method, url - }, defaults); - - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - - const scopeMethods = newMethods[scope]; - - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); + }, + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; + }, + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; + } + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); } - return newMethods; } - function decorate(octokit, scope, methodName, defaults, decorations) { const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - + let options = requestWithDefaults.endpoint.merge(...args); if (decorations.mapToData) { options = Object.assign({}, options, { data: options[decorations.mapToData], - [decorations.mapToData]: undefined + [decorations.mapToData]: void 0 }); return requestWithDefaults(options); } - if (decorations.renamed) { const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); } - if (decorations.deprecated) { octokit.log.warn(decorations.deprecated); } - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - - if (!(alias in options)) { - options[alias] = options[name]; + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; } - - delete options[name]; + delete options2[name]; } } - - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - - + return requestWithDefaults(options2); + } return requestWithDefaults(...args); } - return Object.assign(withDecorations, requestWithDefaults); } +// pkg/dist-src/index.js function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); + const api = endpointsToMethods(octokit); return { rest: api }; } restEndpointMethods.VERSION = VERSION; function legacyRestEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return _objectSpread2(_objectSpread2({}, api), {}, { + const api = endpointsToMethods(octokit); + return { + ...api, rest: api - }); + }; } legacyRestEndpointMethods.VERSION = VERSION; - -exports.legacyRestEndpointMethods = legacyRestEndpointMethods; -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __nccwpck_require__(8932); -var once = _interopDefault(__nccwpck_require__(1223)); - -const logOnceCode = once(deprecation => console.warn(deprecation)); -const logOnceHeaders = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(8932); +var import_once = __toESM(__nccwpck_require__(1223)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - + super(message); if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } - this.name = "HttpError"; this.status = statusCode; let headers; - if ("headers" in options && typeof options.headers !== "undefined") { headers = options.headers; } - if ("response" in options) { this.response = options.response; headers = options.response.headers; - } // redact request credentials without mutating original request options - - + } const requestCopy = Object.assign({}, options.request); - if (options.request.headers.authorization) { requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) }); } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; // deprecations - + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; Object.defineProperty(this, "code", { get() { - logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); return statusCode; } - }); Object.defineProperty(this, "headers", { get() { - logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); return headers || {}; } - }); } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 6234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -Object.defineProperty(exports, "__esModule", ({ value: true })); +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(9440); +var import_universal_user_agent = __nccwpck_require__(5030); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +// pkg/dist-src/version.js +var VERSION = "8.2.0"; -var endpoint = __nccwpck_require__(9440); -var universalUserAgent = __nccwpck_require__(5030); -var isPlainObject = __nccwpck_require__(3287); -var nodeFetch = _interopDefault(__nccwpck_require__(467)); -var requestError = __nccwpck_require__(537); +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} -const VERSION = "5.6.3"; +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(537); +// pkg/dist-src/get-buffer-response.js function getBufferResponse(response) { return response.arrayBuffer(); } +// pkg/dist-src/fetch-wrapper.js function fetchWrapper(requestOptions) { + var _a, _b, _c; const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; - - if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { requestOptions.body = JSON.stringify(requestOptions.body); } - let headers = {}; let status; let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { method: requestOptions.method, body: requestOptions.body, headers: requestOptions.headers, - redirect: requestOptions.redirect - }, // `requestOptions.request.agent` type is incompatible - // see https://github.com/octokit/types.ts/pull/264 - requestOptions.request)).then(async response => { + signal: (_c = requestOptions.request) == null ? void 0 : _c.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { url = response.url; status = response.status; - for (const keyAndValue of response.headers) { headers[keyAndValue[0]] = keyAndValue[1]; } - if ("deprecation" in headers) { const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); const deprecationLink = matches && matches.pop(); - log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); } - if (status === 204 || status === 205) { return; - } // GitHub API returns 200 for HEAD requests - - + } if (requestOptions.method === "HEAD") { if (status < 400) { return; } - - throw new requestError.RequestError(response.statusText, status, { + throw new import_request_error.RequestError(response.statusText, status, { response: { url, status, headers, - data: undefined + data: void 0 }, request: requestOptions }); } - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { + throw new import_request_error.RequestError("Not modified", status, { response: { url, status, @@ -19058,10 +7699,9 @@ function fetchWrapper(requestOptions) { request: requestOptions }); } - if (status >= 400) { const data = await getResponseData(response); - const error = new requestError.RequestError(toErrorMessage(data), status, { + const error = new import_request_error.RequestError(toErrorMessage(data), status, { response: { url, status, @@ -19072,87 +7712,93 @@ function fetchWrapper(requestOptions) { }); throw error; } - - return getResponseData(response); - }).then(data => { + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { return { status, url, headers, data }; - }).catch(error => { - if (error instanceof requestError.RequestError) throw error; - throw new requestError.RequestError(error.message, 500, { + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { request: requestOptions }); }); } - async function getResponseData(response) { const contentType = response.headers.get("content-type"); - if (/application\/json/.test(contentType)) { - return response.json(); + return response.json().catch(() => response.text()).catch(() => ""); } - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { return response.text(); } - return getBufferResponse(response); } - function toErrorMessage(data) { - if (typeof data === "string") return data; // istanbul ignore else - just in case - + if (typeof data === "string") + return data; + let suffix; + if ("documentation_url" in data) { + suffix = ` - ${data.documentation_url}`; + } else { + suffix = ""; + } if ("message" in data) { if (Array.isArray(data.errors)) { - return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; } - - return data.message; - } // istanbul ignore next - just in case - - + return `${data.message}${suffix}`; + } return `Unknown error: ${JSON.stringify(data)}`; } +// pkg/dist-src/with-defaults.js function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); + return fetchWrapper(endpoint2.parse(endpointOptions)); } - - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); }; - - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) }); - return endpointOptions.request.hook(request, endpointOptions); + return endpointOptions.request.hook(request2, endpointOptions); }; - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) }); } -const request = withDefaults(endpoint.endpoint, { +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` } }); - -exports.request = request; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), @@ -19593,7 +8239,7 @@ module.exports = arrify; /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { // Packages -var retrier = __nccwpck_require__(4347); +var retrier = __nccwpck_require__(1604); function retry(fn, opts) { function run(resolve, reject) { @@ -20003,7 +8649,7 @@ function removeHook(state, name, method) { 'use strict'; /* - * bignumber.js v9.1.1 + * bignumber.js v9.1.2 * A JavaScript library for arbitrary-precision arithmetic. * https://github.com/MikeMcl/bignumber.js * Copyright (c) 2022 Michael Mclaughlin @@ -20638,7 +9284,7 @@ function removeHook(state, name, method) { * arguments {number|string|BigNumber} */ BigNumber.maximum = BigNumber.max = function () { - return maxOrMin(arguments, P.lt); + return maxOrMin(arguments, -1); }; @@ -20648,7 +9294,7 @@ function removeHook(state, name, method) { * arguments {number|string|BigNumber} */ BigNumber.minimum = BigNumber.min = function () { - return maxOrMin(arguments, P.gt); + return maxOrMin(arguments, 1); }; @@ -21292,24 +9938,20 @@ function removeHook(state, name, method) { // Handle BigNumber.max and BigNumber.min. - function maxOrMin(args, method) { - var n, + // If any number is NaN, return NaN. + function maxOrMin(args, n) { + var k, y, i = 1, - m = new BigNumber(args[0]); + x = new BigNumber(args[0]); for (; i < args.length; i++) { - n = new BigNumber(args[i]); - - // If any number is NaN, return NaN. - if (!n.s) { - m = n; - break; - } else if (method.call(m, n)) { - m = n; + y = new BigNumber(args[i]); + if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { + x = y; } } - return m; + return x; } @@ -21428,7 +10070,7 @@ function removeHook(state, name, method) { n = xc[ni = 0]; // Get the rounding digit at index j of n. - rd = n / pows10[d - j - 1] % 10 | 0; + rd = mathfloor(n / pows10[d - j - 1] % 10); } else { ni = mathceil((i + 1) / LOG_BASE); @@ -21459,7 +10101,7 @@ function removeHook(state, name, method) { j = i - LOG_BASE + d; // Get the rounding digit at index j of n. - rd = j < 0 ? 0 : n / pows10[d - j - 1] % 10 | 0; + rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); } } @@ -22929,2684 +11571,8328 @@ function removeHook(state, name, method) { /***/ }), -/***/ 9239: +/***/ 9239: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/*jshint node:true */ + +var Buffer = (__nccwpck_require__(4300).Buffer); // browserify +var SlowBuffer = (__nccwpck_require__(4300).SlowBuffer); + +module.exports = bufferEq; + +function bufferEq(a, b) { + + // shortcutting on type is necessary for correctness + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + return false; + } + + // buffer sizes should be well-known information, so despite this + // shortcutting, it doesn't leak any information about the *contents* of the + // buffers. + if (a.length !== b.length) { + return false; + } + + var c = 0; + for (var i = 0; i < a.length; i++) { + /*jshint bitwise:false */ + c |= a[i] ^ b[i]; // XOR + } + return c === 0; +} + +bufferEq.install = function() { + Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { + return bufferEq(this, that); + }; +}; + +var origBufEqual = Buffer.prototype.equal; +var origSlowBufEqual = SlowBuffer.prototype.equal; +bufferEq.restore = function() { + Buffer.prototype.equal = origBufEqual; + SlowBuffer.prototype.equal = origSlowBufEqual; +}; + + +/***/ }), + +/***/ 6763: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/*! + * compressible + * Copyright(c) 2013 Jonathan Ong + * Copyright(c) 2014 Jeremiah Senkpiel + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + + + +/** + * Module dependencies. + * @private + */ + +var db = __nccwpck_require__(7426) + +/** + * Module variables. + * @private + */ + +var COMPRESSIBLE_TYPE_REGEXP = /^text\/|\+(?:json|text|xml)$/i +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ + +/** + * Module exports. + * @public + */ + +module.exports = compressible + +/** + * Checks if a type is compressible. + * + * @param {string} type + * @return {Boolean} compressible + * @public + */ + +function compressible (type) { + if (!type || typeof type !== 'string') { + return false + } + + // strip parameters + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && match[1].toLowerCase() + var data = db[mime] + + // return database information + if (data && data.compressible !== undefined) { + return data.compressible + } + + // fallback to regexp or unknown + return COMPRESSIBLE_TYPE_REGEXP.test(mime) || undefined +} + + +/***/ }), + +/***/ 8222: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = __nccwpck_require__(6243)(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; + + +/***/ }), + +/***/ 6243: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __nccwpck_require__(900); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; + + +/***/ }), + +/***/ 8237: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = __nccwpck_require__(8222); +} else { + module.exports = __nccwpck_require__(4874); +} + + +/***/ }), + +/***/ 4874: +/***/ ((module, exports, __nccwpck_require__) => { + +/** + * Module dependencies. + */ + +const tty = __nccwpck_require__(6224); +const util = __nccwpck_require__(3837); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = __nccwpck_require__(9318); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = __nccwpck_require__(6243)(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + + +/***/ }), + +/***/ 8932: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +exports.Deprecation = Deprecation; + + +/***/ }), + +/***/ 6599: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var stream = __nccwpck_require__(1642) +var eos = __nccwpck_require__(1205) +var inherits = __nccwpck_require__(4124) +var shift = __nccwpck_require__(6121) + +var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from([0]) + : new Buffer([0]) + +var onuncork = function(self, fn) { + if (self._corked) self.once('uncork', fn) + else fn() +} + +var autoDestroy = function (self, err) { + if (self._autoDestroy) self.destroy(err) +} + +var destroyer = function(self, end) { + return function(err) { + if (err) autoDestroy(self, err.message === 'premature close' ? null : err) + else if (end && !self._ended) self.end() + } +} + +var end = function(ws, fn) { + if (!ws) return fn() + if (ws._writableState && ws._writableState.finished) return fn() + if (ws._writableState) return ws.end(fn) + ws.end() + fn() +} + +var noop = function() {} + +var toStreams2 = function(rs) { + return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) +} + +var Duplexify = function(writable, readable, opts) { + if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) + stream.Duplex.call(this, opts) + + this._writable = null + this._readable = null + this._readable2 = null + + this._autoDestroy = !opts || opts.autoDestroy !== false + this._forwardDestroy = !opts || opts.destroy !== false + this._forwardEnd = !opts || opts.end !== false + this._corked = 1 // start corked + this._ondrain = null + this._drained = false + this._forwarding = false + this._unwrite = null + this._unread = null + this._ended = false + + this.destroyed = false + + if (writable) this.setWritable(writable) + if (readable) this.setReadable(readable) +} + +inherits(Duplexify, stream.Duplex) + +Duplexify.obj = function(writable, readable, opts) { + if (!opts) opts = {} + opts.objectMode = true + opts.highWaterMark = 16 + return new Duplexify(writable, readable, opts) +} + +Duplexify.prototype.cork = function() { + if (++this._corked === 1) this.emit('cork') +} + +Duplexify.prototype.uncork = function() { + if (this._corked && --this._corked === 0) this.emit('uncork') +} + +Duplexify.prototype.setWritable = function(writable) { + if (this._unwrite) this._unwrite() + + if (this.destroyed) { + if (writable && writable.destroy) writable.destroy() + return + } + + if (writable === null || writable === false) { + this.end() + return + } + + var self = this + var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) + + var ondrain = function() { + var ondrain = self._ondrain + self._ondrain = null + if (ondrain) ondrain() + } + + var clear = function() { + self._writable.removeListener('drain', ondrain) + unend() + } + + if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks + + this._writable = writable + this._writable.on('drain', ondrain) + this._unwrite = clear + + this.uncork() // always uncork setWritable +} + +Duplexify.prototype.setReadable = function(readable) { + if (this._unread) this._unread() + + if (this.destroyed) { + if (readable && readable.destroy) readable.destroy() + return + } + + if (readable === null || readable === false) { + this.push(null) + this.resume() + return + } + + var self = this + var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) + + var onreadable = function() { + self._forward() + } + + var onend = function() { + self.push(null) + } + + var clear = function() { + self._readable2.removeListener('readable', onreadable) + self._readable2.removeListener('end', onend) + unend() + } + + this._drained = true + this._readable = readable + this._readable2 = readable._readableState ? readable : toStreams2(readable) + this._readable2.on('readable', onreadable) + this._readable2.on('end', onend) + this._unread = clear + + this._forward() +} + +Duplexify.prototype._read = function() { + this._drained = true + this._forward() +} + +Duplexify.prototype._forward = function() { + if (this._forwarding || !this._readable2 || !this._drained) return + this._forwarding = true + + var data + + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue + this._drained = this.push(data) + } + + this._forwarding = false +} + +Duplexify.prototype.destroy = function(err, cb) { + if (!cb) cb = noop + if (this.destroyed) return cb(null) + this.destroyed = true + + var self = this + process.nextTick(function() { + self._destroy(err) + cb(null) + }) +} + +Duplexify.prototype._destroy = function(err) { + if (err) { + var ondrain = this._ondrain + this._ondrain = null + if (ondrain) ondrain(err) + else this.emit('error', err) + } + + if (this._forwardDestroy) { + if (this._readable && this._readable.destroy) this._readable.destroy() + if (this._writable && this._writable.destroy) this._writable.destroy() + } + + this.emit('close') +} + +Duplexify.prototype._write = function(data, enc, cb) { + if (this.destroyed) return + if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) + if (data === SIGNAL_FLUSH) return this._finish(cb) + if (!this._writable) return cb() + + if (this._writable.write(data) === false) this._ondrain = cb + else if (!this.destroyed) cb() +} + +Duplexify.prototype._finish = function(cb) { + var self = this + this.emit('preend') + onuncork(this, function() { + end(self._forwardEnd && self._writable, function() { + // haxx to not emit prefinish twice + if (self._writableState.prefinished === false) self._writableState.prefinished = true + self.emit('prefinish') + onuncork(self, cb) + }) + }) +} + +Duplexify.prototype.end = function(data, enc, cb) { + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + this._ended = true + if (data) this.write(data) + if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} + +module.exports = Duplexify + + +/***/ }), + +/***/ 1728: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Buffer = (__nccwpck_require__(1867).Buffer); + +var getParamBytesForAlg = __nccwpck_require__(528); + +var MAX_OCTET = 0x80, + CLASS_UNIVERSAL = 0, + PRIMITIVE_BIT = 0x20, + TAG_SEQ = 0x10, + TAG_INT = 0x02, + ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), + ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); + +function base64Url(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function signatureAsBuffer(signature) { + if (Buffer.isBuffer(signature)) { + return signature; + } else if ('string' === typeof signature) { + return Buffer.from(signature, 'base64'); + } + + throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +} + +function derToJose(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + // the DER encoded param should at most be the param size, plus a padding + // zero, since due to being a signed integer + var maxEncodedParamLength = paramBytes + 1; + + var inputLength = signature.length; + + var offset = 0; + if (signature[offset++] !== ENCODED_TAG_SEQ) { + throw new Error('Could not find expected "seq"'); + } + + var seqLength = signature[offset++]; + if (seqLength === (MAX_OCTET | 1)) { + seqLength = signature[offset++]; + } + + if (inputLength - offset < seqLength) { + throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); + } + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "r"'); + } + + var rLength = signature[offset++]; + + if (inputLength - offset - 2 < rLength) { + throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); + } + + if (maxEncodedParamLength < rLength) { + throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var rOffset = offset; + offset += rLength; + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "s"'); + } + + var sLength = signature[offset++]; + + if (inputLength - offset !== sLength) { + throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); + } + + if (maxEncodedParamLength < sLength) { + throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var sOffset = offset; + offset += sLength; + + if (offset !== inputLength) { + throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); + } + + var rPadding = paramBytes - rLength, + sPadding = paramBytes - sLength; + + var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); + + for (offset = 0; offset < rPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); + + offset = paramBytes; + + for (var o = offset; offset < o + sPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); + + dst = dst.toString('base64'); + dst = base64Url(dst); + + return dst; +} + +function countPadding(buf, start, stop) { + var padding = 0; + while (start + padding < stop && buf[start + padding] === 0) { + ++padding; + } + + var needsSign = buf[start + padding] >= MAX_OCTET; + if (needsSign) { + --padding; + } + + return padding; +} + +function joseToDer(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + var signatureBytes = signature.length; + if (signatureBytes !== paramBytes * 2) { + throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); + } + + var rPadding = countPadding(signature, 0, paramBytes); + var sPadding = countPadding(signature, paramBytes, signature.length); + var rLength = paramBytes - rPadding; + var sLength = paramBytes - sPadding; + + var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; + + var shortLength = rsBytes < MAX_OCTET; + + var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); + + var offset = 0; + dst[offset++] = ENCODED_TAG_SEQ; + if (shortLength) { + // Bit 8 has value "0" + // bits 7-1 give the length. + dst[offset++] = rsBytes; + } else { + // Bit 8 of first octet has value "1" + // bits 7-1 give the number of additional length octets. + dst[offset++] = MAX_OCTET | 1; + // length, base 256 + dst[offset++] = rsBytes & 0xff; + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = rLength; + if (rPadding < 0) { + dst[offset++] = 0; + offset += signature.copy(dst, offset, 0, paramBytes); + } else { + offset += signature.copy(dst, offset, rPadding, paramBytes); + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = sLength; + if (sPadding < 0) { + dst[offset++] = 0; + signature.copy(dst, offset, paramBytes); + } else { + signature.copy(dst, offset, paramBytes + sPadding); + } + + return dst; +} + +module.exports = { + derToJose: derToJose, + joseToDer: joseToDer +}; + + +/***/ }), + +/***/ 528: +/***/ ((module) => { + +"use strict"; + + +function getParamSize(keySize) { + var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); + return result; +} + +var paramBytesForAlg = { + ES256: getParamSize(256), + ES384: getParamSize(384), + ES512: getParamSize(521) +}; + +function getParamBytesForAlg(alg) { + var paramBytes = paramBytesForAlg[alg]; + if (paramBytes) { + return paramBytes; + } + + throw new Error('Unknown algorithm "' + alg + '"'); +} + +module.exports = getParamBytesForAlg; + + +/***/ }), + +/***/ 1205: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var once = __nccwpck_require__(1223); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; + + +/***/ }), + +/***/ 5771: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var punycode = __nccwpck_require__(5477); +var entities = __nccwpck_require__(2077); + +module.exports = decode; + +function decode (str) { + if (typeof str !== 'string') { + throw new TypeError('Expected a String'); + } + + return str.replace(/&(#?[^;\W]+;?)/g, function (_, match) { + var m; + if (m = /^#(\d+);?$/.exec(match)) { + return punycode.ucs2.encode([ parseInt(m[1], 10) ]); + } else if (m = /^#[Xx]([A-Fa-f0-9]+);?/.exec(match)) { + return punycode.ucs2.encode([ parseInt(m[1], 16) ]); + } else { + // named entity + var hasSemi = /;$/.test(match); + var withoutSemi = hasSemi ? match.replace(/;$/, '') : match; + var target = entities[withoutSemi] || (hasSemi && entities[match]); + + if (typeof target === 'number') { + return punycode.ucs2.encode([ target ]); + } else if (typeof target === 'string') { + return target; + } else { + return '&' + match; + } + } + }); +} + + +/***/ }), + +/***/ 6521: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var punycode = __nccwpck_require__(5477); +var revEntities = __nccwpck_require__(3123); + +module.exports = encode; + +function encode (str, opts) { + if (typeof str !== 'string') { + throw new TypeError('Expected a String'); + } + if (!opts) opts = {}; + + var numeric = true; + if (opts.named) numeric = false; + if (opts.numeric !== undefined) numeric = opts.numeric; + + var special = opts.special || { + '"': true, "'": true, + '<': true, '>': true, + '&': true + }; + + var codePoints = punycode.ucs2.decode(str); + var chars = []; + for (var i = 0; i < codePoints.length; i++) { + var cc = codePoints[i]; + var c = punycode.ucs2.encode([ cc ]); + var e = revEntities[cc]; + if (e && (cc >= 127 || special[c]) && !numeric) { + chars.push('&' + (/;$/.test(e) ? e : e + ';')); + } + else if (cc < 32 || cc >= 127 || special[c]) { + chars.push('&#' + cc + ';'); + } + else { + chars.push(c); + } + } + return chars.join(''); +} + + +/***/ }), + +/***/ 1151: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +exports.encode = __nccwpck_require__(6521); +exports.decode = __nccwpck_require__(5771); + + +/***/ }), + +/***/ 4697: +/***/ ((module, exports) => { + +"use strict"; +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports["default"] = EventTarget; + +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map + + +/***/ }), + +/***/ 8171: +/***/ ((module) => { + +"use strict"; + + +var hasOwn = Object.prototype.hasOwnProperty; +var toStr = Object.prototype.toString; +var defineProperty = Object.defineProperty; +var gOPD = Object.getOwnPropertyDescriptor; + +var isArray = function isArray(arr) { + if (typeof Array.isArray === 'function') { + return Array.isArray(arr); + } + + return toStr.call(arr) === '[object Array]'; +}; + +var isPlainObject = function isPlainObject(obj) { + if (!obj || toStr.call(obj) !== '[object Object]') { + return false; + } + + var hasOwnConstructor = hasOwn.call(obj, 'constructor'); + var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); + // Not own constructor property must be Object + if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { + return false; + } + + // Own properties are enumerated firstly, so to speed up, + // if last one is own, then all properties are own. + var key; + for (key in obj) { /**/ } + + return typeof key === 'undefined' || hasOwn.call(obj, key); +}; + +// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target +var setProperty = function setProperty(target, options) { + if (defineProperty && options.name === '__proto__') { + defineProperty(target, options.name, { + enumerable: true, + configurable: true, + value: options.newValue, + writable: true + }); + } else { + target[options.name] = options.newValue; + } +}; + +// Return undefined instead of __proto__ if '__proto__' is not an own property +var getProperty = function getProperty(obj, name) { + if (name === '__proto__') { + if (!hasOwn.call(obj, name)) { + return void 0; + } else if (gOPD) { + // In early versions of node, obj['__proto__'] is buggy when obj has + // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. + return gOPD(obj, name).value; + } + } + + return obj[name]; +}; + +module.exports = function extend() { + var options, name, src, copy, copyIsArray, clone; + var target = arguments[0]; + var i = 1; + var length = arguments.length; + var deep = false; + + // Handle a deep copy situation + if (typeof target === 'boolean') { + deep = target; + target = arguments[1] || {}; + // skip the boolean and the target + i = 2; + } + if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { + target = {}; + } + + for (; i < length; ++i) { + options = arguments[i]; + // Only deal with non-null/undefined values + if (options != null) { + // Extend the base object + for (name in options) { + src = getProperty(target, name); + copy = getProperty(options, name); + + // Prevent never-ending loop + if (target !== copy) { + // Recurse if we're merging plain objects or arrays + if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { + if (copyIsArray) { + copyIsArray = false; + clone = src && isArray(src) ? src : []; + } else { + clone = src && isPlainObject(src) ? src : {}; + } + + // Never move original objects, clone them + setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); + + // Don't bring in undefined values + } else if (typeof copy !== 'undefined') { + setProperty(target, { name: name, newValue: copy }); + } + } + } + } + } + + // Return the modified object + return target; +}; + + +/***/ }), + +/***/ 2603: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const validator = __nccwpck_require__(1739); +const XMLParser = __nccwpck_require__(2380); +const XMLBuilder = __nccwpck_require__(660); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} + +/***/ }), + +/***/ 8280: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; + + +/***/ }), + +/***/ 1739: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(8280); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} + + +/***/ }), + +/***/ 660: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -/*jshint node:true */ -var Buffer = (__nccwpck_require__(4300).Buffer); // browserify -var SlowBuffer = (__nccwpck_require__(4300).SlowBuffer); +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(2462); + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } -module.exports = bufferEq; + this.processTextOrObjNode = processTextOrObjNode -function bufferEq(a, b) { + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} - // shortcutting on type is necessary for correctness - if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { - return false; +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0).val; } +}; - // buffer sizes should be well-known information, so despite this - // shortcutting, it doesn't leak any information about the *contents* of the - // buffers. - if (a.length !== b.length) { - return false; +Builder.prototype.j2x = function(jObj, level) { + let attrStr = ''; + let val = ''; + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + }else { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup ){ + listTagVal += this.j2x(item, level + 1).val; + }else{ + listTagVal += this.processTextOrObjNode(item, key, level) + } + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, '', level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level) + } + } } + return {attrStr: attrStr, val: val}; +}; - var c = 0; - for (var i = 0; i < a.length; i++) { - /*jshint bitwise:false */ - c |= a[i] ^ b[i]; // XOR +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level) { + const result = this.j2x(object, level + 1); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); } - return c === 0; } -bufferEq.install = function() { - Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { - return bufferEq(this, that); - }; -}; +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ -var origBufEqual = Buffer.prototype.equal; -var origSlowBufEqual = SlowBuffer.prototype.equal; -bufferEq.restore = function() { - Buffer.prototype.equal = origBufEqual; - SlowBuffer.prototype.equal = origSlowBufEqual; + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { + +const EOL = "\n"; + +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; + + +/***/ }), + +/***/ 6072: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const util = __nccwpck_require__(8280); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; + + +/***/ }), + +/***/ 2821: +/***/ ((__unused_webpack_module, exports) => { + + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); }; +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; /***/ }), -/***/ 6763: +/***/ 5832: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -/*! - * compressible - * Copyright(c) 2013 Jonathan Ong - * Copyright(c) 2014 Jeremiah Senkpiel - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ +///@ts-check + +const util = __nccwpck_require__(8280); +const xmlNode = __nccwpck_require__(7462); +const readDocType = __nccwpck_require__(6072); +const toNumber = __nccwpck_require__(4526); + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + } + +} +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} /** - * Module dependencies. - * @private + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} -var db = __nccwpck_require__(7426) +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic /** - * Module variables. - * @private + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName */ - -var COMPRESSIBLE_TYPE_REGEXP = /^text\/|\+(?:json|text|xml)$/i -var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} /** - * Module exports. - * @public + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; + + +/***/ }), + +/***/ 2380: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { buildOptions} = __nccwpck_require__(2821); +const OrderedObjParser = __nccwpck_require__(5832); +const { prettify} = __nccwpck_require__(2882); +const validator = __nccwpck_require__(1739); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; + +/***/ }), + +/***/ 2882: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; -module.exports = compressible /** - * Checks if a type is compressible. - * - * @param {string} type - * @return {Boolean} compressible - * @public + * + * @param {array} node + * @param {any} options + * @returns */ +function prettify(node, options){ + return compress( node, options); +} -function compressible (type) { - if (!type || typeof type !== 'string') { - return false +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} - // strip parameters - var match = EXTRACT_TYPE_REGEXP.exec(type) - var mime = match && match[1].toLowerCase() - var data = db[mime] +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} - // return database information - if (data && data.compressible !== undefined) { - return data.compressible +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } } +} - // fallback to regexp or unknown - return COMPRESSIBLE_TYPE_REGEXP.test(mime) || undefined +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; } +exports.prettify = prettify; + + +/***/ }), + +/***/ 7462: +/***/ ((module) => { + +"use strict"; -/***/ }), +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; -/***/ 8222: -/***/ ((module, exports, __nccwpck_require__) => { -/* eslint-env browser */ +module.exports = XmlNode; -/** - * This is the web browser implementation of `debug()`. - */ +/***/ }), -exports.formatArgs = formatArgs; -exports.save = save; -exports.load = load; -exports.useColors = useColors; -exports.storage = localstorage(); -exports.destroy = (() => { - let warned = false; +/***/ 6129: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - return () => { - if (!warned) { - warned = true; - console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); - } - }; -})(); +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultErrorRedactor = exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0; +const url_1 = __nccwpck_require__(7310); +const util_1 = __nccwpck_require__(1980); +const extend_1 = __importDefault(__nccwpck_require__(8171)); /** - * Colors. + * Support `instanceof` operator for `GaxiosError`s in different versions of this library. + * + * @see {@link GaxiosError[Symbol.hasInstance]} */ - -exports.colors = [ - '#0000CC', - '#0000FF', - '#0033CC', - '#0033FF', - '#0066CC', - '#0066FF', - '#0099CC', - '#0099FF', - '#00CC00', - '#00CC33', - '#00CC66', - '#00CC99', - '#00CCCC', - '#00CCFF', - '#3300CC', - '#3300FF', - '#3333CC', - '#3333FF', - '#3366CC', - '#3366FF', - '#3399CC', - '#3399FF', - '#33CC00', - '#33CC33', - '#33CC66', - '#33CC99', - '#33CCCC', - '#33CCFF', - '#6600CC', - '#6600FF', - '#6633CC', - '#6633FF', - '#66CC00', - '#66CC33', - '#9900CC', - '#9900FF', - '#9933CC', - '#9933FF', - '#99CC00', - '#99CC33', - '#CC0000', - '#CC0033', - '#CC0066', - '#CC0099', - '#CC00CC', - '#CC00FF', - '#CC3300', - '#CC3333', - '#CC3366', - '#CC3399', - '#CC33CC', - '#CC33FF', - '#CC6600', - '#CC6633', - '#CC9900', - '#CC9933', - '#CCCC00', - '#CCCC33', - '#FF0000', - '#FF0033', - '#FF0066', - '#FF0099', - '#FF00CC', - '#FF00FF', - '#FF3300', - '#FF3333', - '#FF3366', - '#FF3399', - '#FF33CC', - '#FF33FF', - '#FF6600', - '#FF6633', - '#FF9900', - '#FF9933', - '#FFCC00', - '#FFCC33' -]; - +exports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`); +/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ +class GaxiosError extends Error { + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} + */ + static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) { + if (instance && + typeof instance === 'object' && + exports.GAXIOS_ERROR_SYMBOL in instance && + instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) { + return true; + } + // fallback to native + return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance); + } + constructor(message, config, response, error) { + var _b; + super(message); + this.config = config; + this.response = response; + this.error = error; + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[Symbol.hasInstance]} + * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} + * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} + */ + this[_a] = util_1.pkg.version; + // deep-copy config as we do not want to mutate + // the existing config for future retries/use + this.config = (0, extend_1.default)(true, {}, config); + if (this.response) { + this.response.config = (0, extend_1.default)(true, {}, this.response.config); + } + if (this.response) { + try { + this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data); + } + catch (_c) { + // best effort - don't throw an error within an error + // we could set `this.response.config.responseType = 'unknown'`, but + // that would mutate future calls with this config object. + } + this.status = this.response.status; + } + if (error && 'code' in error && error.code) { + this.code = error.code; + } + if (config.errorRedactor) { + config.errorRedactor({ + config: this.config, + response: this.response, + }); + } + } +} +exports.GaxiosError = GaxiosError; +function translateData(responseType, data) { + switch (responseType) { + case 'stream': + return data; + case 'json': + return JSON.parse(JSON.stringify(data)); + case 'arraybuffer': + return JSON.parse(Buffer.from(data).toString('utf8')); + case 'blob': + return JSON.parse(data.text()); + default: + return data; + } +} /** - * Currently only WebKit-based Web Inspectors, Firefox >= v31, - * and the Firebug extension (any Firefox version) are known - * to support "%c" CSS customizations. + * An experimental error redactor. * - * TODO: add a `localStorage` variable to explicitly enable/disable colors + * @param config Config to potentially redact properties of + * @param response Config to potentially redact properties of + * + * @experimental */ +function defaultErrorRedactor(data) { + const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.'; + function redactHeaders(headers) { + if (!headers) + return; + for (const key of Object.keys(headers)) { + // any casing of `Authentication` + if (/^authentication$/.test(key)) { + headers[key] = REDACT; + } + } + } + function redactString(obj, key) { + if (typeof obj === 'object' && + obj !== null && + typeof obj[key] === 'string') { + const text = obj[key]; + if (/grant_type=/.test(text) || /assertion=/.test(text)) { + obj[key] = REDACT; + } + } + } + function redactObject(obj) { + if (typeof obj === 'object' && obj !== null) { + if ('grant_type' in obj) { + obj['grant_type'] = REDACT; + } + if ('assertion' in obj) { + obj['assertion'] = REDACT; + } + } + } + if (data.config) { + redactHeaders(data.config.headers); + redactString(data.config, 'data'); + redactObject(data.config.data); + redactString(data.config, 'body'); + redactObject(data.config.body); + try { + const url = new url_1.URL(data.config.url || ''); + if (url.searchParams.has('token')) { + url.searchParams.set('token', REDACT); + } + data.config.url = url.toString(); + } + catch (_b) { + // ignore error - no need to parse an invalid URL + } + } + if (data.response) { + defaultErrorRedactor({ config: data.response.config }); + redactHeaders(data.response.headers); + redactString(data.response, 'data'); + redactObject(data.response.data); + } + return data; +} +exports.defaultErrorRedactor = defaultErrorRedactor; +//# sourceMappingURL=common.js.map -// eslint-disable-next-line complexity -function useColors() { - // NB: In an Electron preload script, document will be defined but not fully - // initialized. Since we know we're in Chrome, we'll just detect this case - // explicitly - if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { - return true; - } +/***/ }), - // Internet Explorer and Edge do not support colors. - if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { - return false; - } +/***/ 8133: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - // Is webkit? http://stackoverflow.com/a/16459606/376773 - // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 - return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || - // Is firebug? http://stackoverflow.com/a/398120/376773 - (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || - // Is firefox >= v31? - // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages - (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || - // Double check webkit in userAgent just in case we are in a worker - (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Gaxios = void 0; +const extend_1 = __importDefault(__nccwpck_require__(8171)); +const https_1 = __nccwpck_require__(5687); +const node_fetch_1 = __importDefault(__nccwpck_require__(467)); +const querystring_1 = __importDefault(__nccwpck_require__(3477)); +const is_stream_1 = __importDefault(__nccwpck_require__(1554)); +const url_1 = __nccwpck_require__(7310); +const common_1 = __nccwpck_require__(6129); +const retry_1 = __nccwpck_require__(1052); +const https_proxy_agent_1 = __nccwpck_require__(4522); +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fetch = hasFetch() ? window.fetch : node_fetch_1.default; +function hasWindow() { + return typeof window !== 'undefined' && !!window; +} +function hasFetch() { + return hasWindow() && !!window.fetch; +} +function hasBuffer() { + return typeof Buffer !== 'undefined'; +} +function hasHeader(options, header) { + return !!getHeader(options, header); +} +function getHeader(options, header) { + header = header.toLowerCase(); + for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { + if (header === key.toLowerCase()) { + return options.headers[key]; + } + } + return undefined; +} +let HttpsProxyAgent; +function loadProxy() { + var _a, _b, _c, _d; + const proxy = ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.HTTPS_PROXY) || + ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.https_proxy) || + ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.HTTP_PROXY) || + ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.http_proxy); + if (proxy) { + HttpsProxyAgent = https_proxy_agent_1.HttpsProxyAgent; + } + return proxy; +} +loadProxy(); +function skipProxy(url) { + var _a; + const noProxyEnv = (_a = process.env.NO_PROXY) !== null && _a !== void 0 ? _a : process.env.no_proxy; + if (!noProxyEnv) { + return false; + } + const noProxyUrls = noProxyEnv.split(','); + const parsedURL = new url_1.URL(url); + return !!noProxyUrls.find(url => { + if (url.startsWith('*.') || url.startsWith('.')) { + url = url.replace(/^\*\./, '.'); + return parsedURL.hostname.endsWith(url); + } + else { + return url === parsedURL.origin || url === parsedURL.hostname; + } + }); +} +// Figure out if we should be using a proxy. Only if it's required, load +// the https-proxy-agent module as it adds startup cost. +function getProxy(url) { + // If there is a match between the no_proxy env variables and the url, then do not proxy + if (skipProxy(url)) { + return undefined; + // If there is not a match between the no_proxy env variables and the url, check to see if there should be a proxy + } + else { + return loadProxy(); + } +} +class Gaxios { + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults) { + this.agentCache = new Map(); + this.defaults = defaults || {}; + } + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async request(opts = {}) { + opts = this.validateOpts(opts); + return this._request(opts); + } + async _defaultAdapter(opts) { + const fetchImpl = opts.fetchImplementation || fetch; + const res = (await fetchImpl(opts.url, opts)); + const data = await this.getResponseData(opts, res); + return this.translateResponse(opts, res, data); + } + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async _request(opts = {}) { + var _a; + try { + let translatedResponse; + if (opts.adapter) { + translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); + } + else { + translatedResponse = await this._defaultAdapter(opts); + } + if (!opts.validateStatus(translatedResponse.status)) { + if (opts.responseType === 'stream') { + let response = ''; + await new Promise(resolve => { + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => { + response += chunk; + }); + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve); + }); + translatedResponse.data = response; + } + throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); + } + return translatedResponse; + } + catch (e) { + const err = e instanceof common_1.GaxiosError + ? e + : new common_1.GaxiosError(e.message, opts, undefined, e); + const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); + if (shouldRetry && config) { + err.config.retryConfig.currentRetryAttempt = + config.retryConfig.currentRetryAttempt; + // The error's config could be redacted - therefore we only want to + // copy the retry state over to the existing config + opts.retryConfig = (_a = err.config) === null || _a === void 0 ? void 0 : _a.retryConfig; + return this._request(opts); + } + throw err; + } + } + async getResponseData(opts, res) { + switch (opts.responseType) { + case 'stream': + return res.body; + case 'json': { + let data = await res.text(); + try { + data = JSON.parse(data); + } + catch (_a) { + // continue + } + return data; + } + case 'arraybuffer': + return res.arrayBuffer(); + case 'blob': + return res.blob(); + case 'text': + return res.text(); + default: + return this.getResponseDataFromContentType(res); + } + } + /** + * Validates the options, and merges them with defaults. + * @param opts The original options passed from the client. + */ + validateOpts(options) { + const opts = (0, extend_1.default)(true, {}, this.defaults, options); + if (!opts.url) { + throw new Error('URL is required.'); + } + // baseUrl has been deprecated, remove in 2.0 + const baseUrl = opts.baseUrl || opts.baseURL; + if (baseUrl) { + opts.url = baseUrl + opts.url; + } + opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; + if (opts.params && Object.keys(opts.params).length > 0) { + let additionalQueryParams = opts.paramsSerializer(opts.params); + if (additionalQueryParams.startsWith('?')) { + additionalQueryParams = additionalQueryParams.slice(1); + } + const prefix = opts.url.includes('?') ? '&' : '?'; + opts.url = opts.url + prefix + additionalQueryParams; + } + if (typeof options.maxContentLength === 'number') { + opts.size = options.maxContentLength; + } + if (typeof options.maxRedirects === 'number') { + opts.follow = options.maxRedirects; + } + opts.headers = opts.headers || {}; + if (opts.data) { + const isFormData = typeof FormData === 'undefined' + ? false + : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; + if (is_stream_1.default.readable(opts.data)) { + opts.body = opts.data; + } + else if (hasBuffer() && Buffer.isBuffer(opts.data)) { + // Do not attempt to JSON.stringify() a Buffer: + opts.body = opts.data; + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + } + else if (typeof opts.data === 'object') { + // If www-form-urlencoded content type has been set, but data is + // provided as an object, serialize the content using querystring: + if (!isFormData) { + if (getHeader(opts, 'content-type') === + 'application/x-www-form-urlencoded') { + opts.body = opts.paramsSerializer(opts.data); + } + else { + // } else if (!(opts.data instanceof FormData)) { + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + opts.body = JSON.stringify(opts.data); + } + } + } + else { + opts.body = opts.data; + } + } + opts.validateStatus = opts.validateStatus || this.validateStatus; + opts.responseType = opts.responseType || 'unknown'; + if (!opts.headers['Accept'] && opts.responseType === 'json') { + opts.headers['Accept'] = 'application/json'; + } + opts.method = opts.method || 'GET'; + const proxy = getProxy(opts.url); + if (proxy) { + if (this.agentCache.has(proxy)) { + opts.agent = this.agentCache.get(proxy); + } + else { + // Proxy is being used in conjunction with mTLS. + if (opts.cert && opts.key) { + const parsedURL = new url_1.URL(proxy); + opts.agent = new HttpsProxyAgent({ + port: parsedURL.port, + host: parsedURL.host, + protocol: parsedURL.protocol, + cert: opts.cert, + key: opts.key, + }); + } + else { + opts.agent = new HttpsProxyAgent(proxy); + } + this.agentCache.set(proxy, opts.agent); + } + } + else if (opts.cert && opts.key) { + // Configure client for mTLS: + if (this.agentCache.has(opts.key)) { + opts.agent = this.agentCache.get(opts.key); + } + else { + opts.agent = new https_1.Agent({ + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(opts.key, opts.agent); + } + } + if (typeof opts.errorRedactor !== 'function' && + opts.errorRedactor !== false) { + opts.errorRedactor = common_1.defaultErrorRedactor; + } + return opts; + } + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + validateStatus(status) { + return status >= 200 && status < 300; + } + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + paramsSerializer(params) { + return querystring_1.default.stringify(params); + } + translateResponse(opts, res, data) { + // headers need to be converted from a map to an obj + const headers = {}; + res.headers.forEach((value, key) => { + headers[key] = value; + }); + return { + config: opts, + data: data, + headers, + status: res.status, + statusText: res.statusText, + // XMLHttpRequestLike + request: { + responseURL: res.url, + }, + }; + } + /** + * Attempts to parse a response by looking at the Content-Type header. + * @param {FetchResponse} response the HTTP response. + * @returns {Promise} a promise that resolves to the response data. + */ + async getResponseDataFromContentType(response) { + let contentType = response.headers.get('Content-Type'); + if (contentType === null) { + // Maintain existing functionality by calling text() + return response.text(); + } + contentType = contentType.toLowerCase(); + if (contentType.includes('application/json')) { + let data = await response.text(); + try { + data = JSON.parse(data); + } + catch (_a) { + // continue + } + return data; + } + else if (contentType.match(/^text\//)) { + return response.text(); + } + else { + // If the content type is something not easily handled, just return the raw data (blob) + return response.blob(); + } + } } +exports.Gaxios = Gaxios; +//# sourceMappingURL=gaxios.js.map -/** - * Colorize log arguments if enabled. - * - * @api public - */ - -function formatArgs(args) { - args[0] = (this.useColors ? '%c' : '') + - this.namespace + - (this.useColors ? ' %c' : ' ') + - args[0] + - (this.useColors ? '%c ' : ' ') + - '+' + module.exports.humanize(this.diff); - - if (!this.useColors) { - return; - } - - const c = 'color: ' + this.color; - args.splice(1, 0, c, 'color: inherit'); +/***/ }), - // The final "%c" is somewhat tricky, because there could be other - // arguments passed either before or after the %c, so we need to - // figure out the correct index to insert the CSS into - let index = 0; - let lastC = 0; - args[0].replace(/%[a-zA-Z%]/g, match => { - if (match === '%%') { - return; - } - index++; - if (match === '%c') { - // We only are interested in the *last* %c - // (the user may have provided their own) - lastC = index; - } - }); +/***/ 9555: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - args.splice(lastC, 0, c); -} +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0; +const gaxios_1 = __nccwpck_require__(8133); +Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); +var common_1 = __nccwpck_require__(6129); +Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); /** - * Invokes `console.debug()` when available. - * No-op when `console.debug` is not a "function". - * If `console.debug` is not available, falls back - * to `console.log`. - * - * @api public + * The default instance used when the `request` method is directly + * invoked. */ -exports.log = console.debug || console.log || (() => {}); - +exports.instance = new gaxios_1.Gaxios(); /** - * Save `namespaces`. - * - * @param {String} namespaces - * @api private + * Make an HTTP request using the given options. + * @param opts Options for the request */ -function save(namespaces) { - try { - if (namespaces) { - exports.storage.setItem('debug', namespaces); - } else { - exports.storage.removeItem('debug'); - } - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } +async function request(opts) { + return exports.instance.request(opts); } +exports.request = request; +//# sourceMappingURL=index.js.map -/** - * Load `namespaces`. - * - * @return {String} returns the previously persisted debug modes - * @api private - */ -function load() { - let r; - try { - r = exports.storage.getItem('debug'); - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } - - // If debug isn't set in LS, and we're in Electron, try to load $DEBUG - if (!r && typeof process !== 'undefined' && 'env' in process) { - r = process.env.DEBUG; - } +/***/ }), - return r; -} +/***/ 1052: +/***/ ((__unused_webpack_module, exports) => { -/** - * Localstorage attempts to return the localstorage. - * - * This is necessary because safari throws - * when a user disables cookies/localstorage - * and you attempt to access it. - * - * @return {LocalStorage} - * @api private - */ +"use strict"; -function localstorage() { - try { - // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context - // The Browser also has localStorage in the global context. - return localStorage; - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRetryConfig = void 0; +async function getRetryConfig(err) { + var _a; + let config = getConfig(err); + if (!err || !err.config || (!config && !err.config.retry)) { + return { shouldRetry: false }; + } + config = config || {}; + config.currentRetryAttempt = config.currentRetryAttempt || 0; + config.retry = + config.retry === undefined || config.retry === null ? 3 : config.retry; + config.httpMethodsToRetry = config.httpMethodsToRetry || [ + 'GET', + 'HEAD', + 'PUT', + 'OPTIONS', + 'DELETE', + ]; + config.noResponseRetries = + config.noResponseRetries === undefined || config.noResponseRetries === null + ? 2 + : config.noResponseRetries; + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; + config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; + // Put the config back into the err + err.config.retryConfig = config; + // Determine if we should retry the request + const shouldRetryFn = config.shouldRetry || shouldRetryRequest; + if (!(await shouldRetryFn(err))) { + return { shouldRetry: false, config: err.config }; + } + // Calculate time to wait with exponential backoff. + // If this is the first retry, look for a configured retryDelay. + const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; + // Formula: retryDelay + ((2^c - 1 / 2) * 1000) + const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000; + // We're going to retry! Incremenent the counter. + err.config.retryConfig.currentRetryAttempt += 1; + // Create a promise that invokes the retry after the backOffDelay + const backoff = config.retryBackoff + ? config.retryBackoff(err, delay) + : new Promise(resolve => { + setTimeout(resolve, delay); + }); + // Notify the user if they added an `onRetryAttempt` handler + if (config.onRetryAttempt) { + config.onRetryAttempt(err); + } + // Return the promise in which recalls Gaxios to retry the request + await backoff; + return { shouldRetry: true, config: err.config }; } - -module.exports = __nccwpck_require__(6243)(exports); - -const {formatters} = module.exports; - +exports.getRetryConfig = getRetryConfig; /** - * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + * Determine based on config if we should retry the request. + * @param err The GaxiosError passed to the interceptor. */ - -formatters.j = function (v) { - try { - return JSON.stringify(v); - } catch (error) { - return '[UnexpectedJSONParseError]: ' + error.message; - } -}; - - -/***/ }), - -/***/ 6243: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - +function shouldRetryRequest(err) { + var _a; + const config = getConfig(err); + // node-fetch raises an AbortError if signaled: + // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') { + return false; + } + // If there's no config, or retries are disabled, return. + if (!config || config.retry === 0) { + return false; + } + // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) + if (!err.response && + (config.currentRetryAttempt || 0) >= config.noResponseRetries) { + return false; + } + // Only retry with configured HttpMethods. + if (!err.config.method || + config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { + return false; + } + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + if (err.response && err.response.status) { + let isInRange = false; + for (const [min, max] of config.statusCodesToRetry) { + const status = err.response.status; + if (status >= min && status <= max) { + isInRange = true; + break; + } + } + if (!isInRange) { + return false; + } + } + // If we are out of retry attempts, return + config.currentRetryAttempt = config.currentRetryAttempt || 0; + if (config.currentRetryAttempt >= config.retry) { + return false; + } + return true; +} /** - * This is the common logic for both the Node.js and web browser - * implementations of `debug()`. + * Acquire the raxConfig object from an GaxiosError if available. + * @param err The Gaxios error with a config object. */ +function getConfig(err) { + if (err && err.config && err.config.retryConfig) { + return err.config.retryConfig; + } + return; +} +//# sourceMappingURL=retry.js.map -function setup(env) { - createDebug.debug = createDebug; - createDebug.default = createDebug; - createDebug.coerce = coerce; - createDebug.disable = disable; - createDebug.enable = enable; - createDebug.enabled = enabled; - createDebug.humanize = __nccwpck_require__(900); - createDebug.destroy = destroy; - - Object.keys(env).forEach(key => { - createDebug[key] = env[key]; - }); - - /** - * The currently active debug mode names, and names to skip. - */ - - createDebug.names = []; - createDebug.skips = []; - - /** - * Map of special "%n" handling functions, for the debug "format" argument. - * - * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". - */ - createDebug.formatters = {}; - - /** - * Selects a color for a debug namespace - * @param {String} namespace The namespace string for the debug instance to be colored - * @return {Number|String} An ANSI color code for the given namespace - * @api private - */ - function selectColor(namespace) { - let hash = 0; - - for (let i = 0; i < namespace.length; i++) { - hash = ((hash << 5) - hash) + namespace.charCodeAt(i); - hash |= 0; // Convert to 32bit integer - } - - return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; - } - createDebug.selectColor = selectColor; - - /** - * Create a debugger with the given `namespace`. - * - * @param {String} namespace - * @return {Function} - * @api public - */ - function createDebug(namespace) { - let prevTime; - let enableOverride = null; - let namespacesCache; - let enabledCache; - - function debug(...args) { - // Disabled? - if (!debug.enabled) { - return; - } - - const self = debug; - - // Set `diff` timestamp - const curr = Number(new Date()); - const ms = curr - (prevTime || curr); - self.diff = ms; - self.prev = prevTime; - self.curr = curr; - prevTime = curr; - - args[0] = createDebug.coerce(args[0]); - - if (typeof args[0] !== 'string') { - // Anything else let's inspect with %O - args.unshift('%O'); - } - - // Apply any `formatters` transformations - let index = 0; - args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { - // If we encounter an escaped % then don't increase the array index - if (match === '%%') { - return '%'; - } - index++; - const formatter = createDebug.formatters[format]; - if (typeof formatter === 'function') { - const val = args[index]; - match = formatter.call(self, val); - - // Now we need to remove `args[index]` since it's inlined in the `format` - args.splice(index, 1); - index--; - } - return match; - }); - - // Apply env-specific formatting (colors, etc.) - createDebug.formatArgs.call(self, args); - - const logFn = self.log || createDebug.log; - logFn.apply(self, args); - } - - debug.namespace = namespace; - debug.useColors = createDebug.useColors(); - debug.color = createDebug.selectColor(namespace); - debug.extend = extend; - debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. - - Object.defineProperty(debug, 'enabled', { - enumerable: true, - configurable: false, - get: () => { - if (enableOverride !== null) { - return enableOverride; - } - if (namespacesCache !== createDebug.namespaces) { - namespacesCache = createDebug.namespaces; - enabledCache = createDebug.enabled(namespace); - } - - return enabledCache; - }, - set: v => { - enableOverride = v; - } - }); - - // Env-specific initialization logic for debug instances - if (typeof createDebug.init === 'function') { - createDebug.init(debug); - } - - return debug; - } - - function extend(namespace, delimiter) { - const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); - newDebug.log = this.log; - return newDebug; - } - - /** - * Enables a debug mode by namespaces. This can include modes - * separated by a colon and wildcards. - * - * @param {String} namespaces - * @api public - */ - function enable(namespaces) { - createDebug.save(namespaces); - createDebug.namespaces = namespaces; - - createDebug.names = []; - createDebug.skips = []; - - let i; - const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); - const len = split.length; - - for (i = 0; i < len; i++) { - if (!split[i]) { - // ignore empty strings - continue; - } - - namespaces = split[i].replace(/\*/g, '.*?'); - - if (namespaces[0] === '-') { - createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); - } else { - createDebug.names.push(new RegExp('^' + namespaces + '$')); - } - } - } - - /** - * Disable debug output. - * - * @return {String} namespaces - * @api public - */ - function disable() { - const namespaces = [ - ...createDebug.names.map(toNamespace), - ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) - ].join(','); - createDebug.enable(''); - return namespaces; - } - - /** - * Returns true if the given mode name is enabled, false otherwise. - * - * @param {String} name - * @return {Boolean} - * @api public - */ - function enabled(name) { - if (name[name.length - 1] === '*') { - return true; - } - - let i; - let len; - - for (i = 0, len = createDebug.skips.length; i < len; i++) { - if (createDebug.skips[i].test(name)) { - return false; - } - } +/***/ }), - for (i = 0, len = createDebug.names.length; i < len; i++) { - if (createDebug.names[i].test(name)) { - return true; - } - } +/***/ 1980: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return false; - } +"use strict"; - /** - * Convert regexp to namespace - * - * @param {RegExp} regxep - * @return {String} namespace - * @api private - */ - function toNamespace(regexp) { - return regexp.toString() - .substring(2, regexp.toString().length - 2) - .replace(/\.\*\?$/, '*'); - } +// Copyright 2023 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pkg = void 0; +exports.pkg = __nccwpck_require__(6318); +//# sourceMappingURL=util.js.map - /** - * Coerce `val`. - * - * @param {Mixed} val - * @return {Mixed} - * @api private - */ - function coerce(val) { - if (val instanceof Error) { - return val.stack || val.message; - } - return val; - } +/***/ }), - /** - * XXX DO NOT USE. This is a temporary stub function. - * XXX It WILL be removed in the next major release. - */ - function destroy() { - console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); - } +/***/ 9910: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - createDebug.enable(createDebug.load()); +"use strict"; - return createDebug; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.req = exports.json = exports.toBuffer = void 0; +const http = __importStar(__nccwpck_require__(3685)); +const https = __importStar(__nccwpck_require__(5687)); +async function toBuffer(stream) { + let length = 0; + const chunks = []; + for await (const chunk of stream) { + length += chunk.length; + chunks.push(chunk); + } + return Buffer.concat(chunks, length); } - -module.exports = setup; - +exports.toBuffer = toBuffer; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function json(stream) { + const buf = await toBuffer(stream); + const str = buf.toString('utf8'); + try { + return JSON.parse(str); + } + catch (_err) { + const err = _err; + err.message += ` (input: ${str})`; + throw err; + } +} +exports.json = json; +function req(url, opts = {}) { + const href = typeof url === 'string' ? url : url.href; + const req = (href.startsWith('https:') ? https : http).request(url, opts); + const promise = new Promise((resolve, reject) => { + req + .once('response', resolve) + .once('error', reject) + .end(); + }); + req.then = promise.then.bind(promise); + return req; +} +exports.req = req; +//# sourceMappingURL=helpers.js.map /***/ }), -/***/ 8237: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 7863: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -/** - * Detect Electron renderer / nwjs process, which is node, but we should - * treat as a browser. - */ +"use strict"; -if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { - module.exports = __nccwpck_require__(8222); -} else { - module.exports = __nccwpck_require__(4874); +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Agent = void 0; +const http = __importStar(__nccwpck_require__(3685)); +__exportStar(__nccwpck_require__(9910), exports); +const INTERNAL = Symbol('AgentBaseInternalState'); +class Agent extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; + } + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + // First check the `secureEndpoint` property explicitly, since this + // means that a parent `Agent` is "passing through" to this instance. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof options.secureEndpoint === 'boolean') { + return options.secureEndpoint; + } + // If no explicit `secure` endpoint, check if `protocol` property is + // set. This will usually be the case since using a full string URL + // or `URL` instance should be the most common usage. + if (typeof options.protocol === 'string') { + return options.protocol === 'https:'; + } + } + // Finally, if no `protocol` property was set, then fall back to + // checking the stack trace of the current call stack, and try to + // detect the "https" module. + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack + .split('\n') + .some((l) => l.indexOf('(https.js:') !== -1 || + l.indexOf('node:https:') !== -1); + } + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options), + }; + Promise.resolve() + .then(() => this.connect(req, connectOpts)) + .then((socket) => { + if (socket instanceof http.Agent) { + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + return socket.addRequest(req, connectOpts); + } + this[INTERNAL].currentSocket = socket; + // @ts-expect-error `createSocket()` isn't defined in `@types/node` + super.createSocket(req, options, cb); + }, cb); + } + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = undefined; + if (!socket) { + throw new Error('No socket was returned in the `connect()` function'); + } + return socket; + } + get defaultPort() { + return (this[INTERNAL].defaultPort ?? + (this.protocol === 'https:' ? 443 : 80)); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; + } + } + get protocol() { + return (this[INTERNAL].protocol ?? + (this.isSecureEndpoint() ? 'https:' : 'http:')); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; + } + } } - +exports.Agent = Agent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 4874: -/***/ ((module, exports, __nccwpck_require__) => { +/***/ 4522: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpsProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(1808)); +const tls = __importStar(__nccwpck_require__(4404)); +const assert_1 = __importDefault(__nccwpck_require__(9491)); +const debug_1 = __importDefault(__nccwpck_require__(8237)); +const agent_base_1 = __nccwpck_require__(7863); +const url_1 = __nccwpck_require__(7310); +const parse_proxy_response_1 = __nccwpck_require__(7224); +const debug = (0, debug_1.default)('https-proxy-agent'); /** - * Module dependencies. + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + const servername = this.connectOpts.servername || this.connectOpts.host; + socket = tls.connect({ + ...this.connectOpts, + servername: servername && net.isIP(servername) ? undefined : servername, + }); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls.connect({ + ...omit(opts, 'host', 'path', 'port'), + socket, + servername: net.isIP(servername) ? undefined : servername, + }); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + } +} +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map -const tty = __nccwpck_require__(6224); -const util = __nccwpck_require__(3837); +/***/ }), -/** - * This is the Node.js implementation of `debug()`. - */ +/***/ 7224: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -exports.init = init; -exports.log = log; -exports.formatArgs = formatArgs; -exports.save = save; -exports.load = load; -exports.useColors = useColors; -exports.destroy = util.deprecate( - () => {}, - 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' -); +"use strict"; -/** - * Colors. - */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(__nccwpck_require__(8237)); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); + } + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const headerParts = buffered + .slice(0, endOfHeaders) + .toString('ascii') + .split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; + } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, + }); + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); +} +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map -exports.colors = [6, 2, 3, 4, 5, 1]; +/***/ }), -try { - // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) - // eslint-disable-next-line import/no-extraneous-dependencies - const supportsColor = __nccwpck_require__(9318); +/***/ 1904: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { - exports.colors = [ - 20, - 21, - 26, - 27, - 32, - 33, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 56, - 57, - 62, - 63, - 68, - 69, - 74, - 75, - 76, - 77, - 78, - 79, - 80, - 81, - 92, - 93, - 98, - 99, - 112, - 113, - 128, - 129, - 134, - 135, - 148, - 149, - 160, - 161, - 162, - 163, - 164, - 165, - 166, - 167, - 168, - 169, - 170, - 171, - 172, - 173, - 178, - 179, - 184, - 185, - 196, - 197, - 198, - 199, - 200, - 201, - 202, - 203, - 204, - 205, - 206, - 207, - 208, - 209, - 214, - 215, - 220, - 221 - ]; - } -} catch (error) { - // Swallow - we only care if `supports-color` is available; it doesn't have to be. -} +"use strict"; /** - * Build up the default `inspectOpts` object from the environment variables. + * Copyright 2022 Google LLC * - * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ - -exports.inspectOpts = Object.keys(process.env).filter(key => { - return /^debug_/i.test(key); -}).reduce((obj, key) => { - // Camel-case - const prop = key - .substring(6) - .toLowerCase() - .replace(/_([a-z])/g, (_, k) => { - return k.toUpperCase(); - }); - - // Coerce string value into JS value - let val = process.env[key]; - if (/^(yes|on|true|enabled)$/i.test(val)) { - val = true; - } else if (/^(no|off|false|disabled)$/i.test(val)) { - val = false; - } else if (val === 'null') { - val = null; - } else { - val = Number(val); - } - - obj[prop] = val; - return obj; -}, {}); - +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0; +const fs_1 = __nccwpck_require__(7147); +const os_1 = __nccwpck_require__(2037); /** - * Is stdout a TTY? Colored output is enabled when `true`. + * Known paths unique to Google Compute Engine Linux instances */ - -function useColors() { - return 'colors' in exports.inspectOpts ? - Boolean(exports.inspectOpts.colors) : - tty.isatty(process.stderr.fd); +exports.GCE_LINUX_BIOS_PATHS = { + BIOS_DATE: '/sys/class/dmi/id/bios_date', + BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', +}; +const GCE_MAC_ADDRESS_REGEX = /^42:01/; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +function isGoogleCloudServerless() { + /** + * `CLOUD_RUN_JOB` is used for Cloud Run Jobs + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * + * `FUNCTION_NAME` is used in older Cloud Functions environments: + * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * + * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + */ + const isGFEnvironment = process.env.CLOUD_RUN_JOB || + process.env.FUNCTION_NAME || + process.env.K_SERVICE; + return !!isGFEnvironment; } - +exports.isGoogleCloudServerless = isGoogleCloudServerless; /** - * Adds ANSI color escape codes if enabled. + * Determines if the process is running on a Linux Google Compute Engine instance. * - * @api public - */ - -function formatArgs(args) { - const {namespace: name, useColors} = this; - - if (useColors) { - const c = this.color; - const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); - const prefix = ` ${colorCode};1m${name} \u001B[0m`; - - args[0] = prefix + args[0].split('\n').join('\n' + prefix); - args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); - } else { - args[0] = getDate() + name + ' ' + args[0]; - } -} - -function getDate() { - if (exports.inspectOpts.hideDate) { - return ''; - } - return new Date().toISOString() + ' '; -} - -/** - * Invokes `util.format()` with the specified arguments and writes to stderr. + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. */ - -function log(...args) { - return process.stderr.write(util.format(...args) + '\n'); +function isGoogleComputeEngineLinux() { + if ((0, os_1.platform)() !== 'linux') + return false; + try { + // ensure this file exist + (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); + // ensure this file exist and matches + const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); + return /Google/.test(biosVendor); + } + catch (_a) { + return false; + } } - +exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; /** - * Save `namespaces`. + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. * - * @param {String} namespaces - * @api private + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. */ -function save(namespaces) { - if (namespaces) { - process.env.DEBUG = namespaces; - } else { - // If you set a process.env field to null or undefined, it gets cast to the - // string 'null' or 'undefined'. Just delete instead. - delete process.env.DEBUG; - } +function isGoogleComputeEngineMACAddress() { + const interfaces = (0, os_1.networkInterfaces)(); + for (const item of Object.values(interfaces)) { + if (!item) + continue; + for (const { mac } of item) { + if (GCE_MAC_ADDRESS_REGEX.test(mac)) { + return true; + } + } + } + return false; } - +exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; /** - * Load `namespaces`. + * Determines if the process is running on a Google Compute Engine instance. * - * @return {String} returns the previously persisted debug modes - * @api private + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. */ - -function load() { - return process.env.DEBUG; +function isGoogleComputeEngine() { + return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); } - +exports.isGoogleComputeEngine = isGoogleComputeEngine; /** - * Init logic for `debug` instances. + * Determines if the process is running on Google Cloud Platform. * - * Create a new `inspectOpts` object in case `useColors` is set - * differently for a particular `debug` instance. + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. */ - -function init(debug) { - debug.inspectOpts = {}; - - const keys = Object.keys(exports.inspectOpts); - for (let i = 0; i < keys.length; i++) { - debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; - } +function detectGCPResidency() { + return isGoogleCloudServerless() || isGoogleComputeEngine(); } +exports.detectGCPResidency = detectGCPResidency; +//# sourceMappingURL=gcp-residency.js.map -module.exports = __nccwpck_require__(6243)(exports); - -const {formatters} = module.exports; +/***/ }), -/** - * Map %o to `util.inspect()`, all on a single line. - */ +/***/ 3563: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -formatters.o = function (v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts) - .split('\n') - .map(str => str.trim()) - .join(' '); -}; +"use strict"; /** - * Map %O to `util.inspect()`, allowing multiple lines if needed. + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT */ - -formatters.O = function (v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts); +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; - - -/***/ }), - -/***/ 8932: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - Object.defineProperty(exports, "__esModule", ({ value: true })); - -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); +exports.requestTimeout = exports.setGCPResidency = exports.getGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.bulk = exports.universe = exports.project = exports.instance = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; +const gaxios_1 = __nccwpck_require__(9555); +const jsonBigint = __nccwpck_require__(5031); +const gcp_residency_1 = __nccwpck_require__(1904); +exports.BASE_PATH = '/computeMetadata/v1'; +exports.HOST_ADDRESS = 'http://169.254.169.254'; +exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; +exports.HEADER_NAME = 'Metadata-Flavor'; +exports.HEADER_VALUE = 'Google'; +exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); +/** + * Metadata server detection override options. + * + * Available via `process.env.METADATA_SERVER_DETECTION`. + */ +exports.METADATA_SERVER_DETECTION = Object.freeze({ + 'assume-present': "don't try to ping the metadata server, but assume it's present", + none: "don't try to ping the metadata server, but don't try to use it either", + 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)", + 'ping-only': 'skip the BIOS probe, and go straight to pinging', +}); +/** + * Returns the base URL while taking into account the GCE_METADATA_HOST + * environment variable if it exists. + * + * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + */ +function getBaseUrl(baseUrl) { + if (!baseUrl) { + baseUrl = + process.env.GCE_METADATA_IP || + process.env.GCE_METADATA_HOST || + exports.HOST_ADDRESS; } - - this.name = 'Deprecation'; - } - -} - -exports.Deprecation = Deprecation; - - -/***/ }), - -/***/ 6599: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var stream = __nccwpck_require__(1642) -var eos = __nccwpck_require__(1205) -var inherits = __nccwpck_require__(4124) -var shift = __nccwpck_require__(6121) - -var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) - ? Buffer.from([0]) - : new Buffer([0]) - -var onuncork = function(self, fn) { - if (self._corked) self.once('uncork', fn) - else fn() -} - -var autoDestroy = function (self, err) { - if (self._autoDestroy) self.destroy(err) -} - -var destroyer = function(self, end) { - return function(err) { - if (err) autoDestroy(self, err.message === 'premature close' ? null : err) - else if (end && !self._ended) self.end() - } -} - -var end = function(ws, fn) { - if (!ws) return fn() - if (ws._writableState && ws._writableState.finished) return fn() - if (ws._writableState) return ws.end(fn) - ws.end() - fn() -} - -var noop = function() {} - -var toStreams2 = function(rs) { - return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) -} - -var Duplexify = function(writable, readable, opts) { - if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) - stream.Duplex.call(this, opts) - - this._writable = null - this._readable = null - this._readable2 = null - - this._autoDestroy = !opts || opts.autoDestroy !== false - this._forwardDestroy = !opts || opts.destroy !== false - this._forwardEnd = !opts || opts.end !== false - this._corked = 1 // start corked - this._ondrain = null - this._drained = false - this._forwarding = false - this._unwrite = null - this._unread = null - this._ended = false - - this.destroyed = false - - if (writable) this.setWritable(writable) - if (readable) this.setReadable(readable) -} - -inherits(Duplexify, stream.Duplex) - -Duplexify.obj = function(writable, readable, opts) { - if (!opts) opts = {} - opts.objectMode = true - opts.highWaterMark = 16 - return new Duplexify(writable, readable, opts) -} - -Duplexify.prototype.cork = function() { - if (++this._corked === 1) this.emit('cork') -} - -Duplexify.prototype.uncork = function() { - if (this._corked && --this._corked === 0) this.emit('uncork') -} - -Duplexify.prototype.setWritable = function(writable) { - if (this._unwrite) this._unwrite() - - if (this.destroyed) { - if (writable && writable.destroy) writable.destroy() - return - } - - if (writable === null || writable === false) { - this.end() - return - } - - var self = this - var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) - - var ondrain = function() { - var ondrain = self._ondrain - self._ondrain = null - if (ondrain) ondrain() - } - - var clear = function() { - self._writable.removeListener('drain', ondrain) - unend() - } - - if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks - - this._writable = writable - this._writable.on('drain', ondrain) - this._unwrite = clear - - this.uncork() // always uncork setWritable -} - -Duplexify.prototype.setReadable = function(readable) { - if (this._unread) this._unread() - - if (this.destroyed) { - if (readable && readable.destroy) readable.destroy() - return - } - - if (readable === null || readable === false) { - this.push(null) - this.resume() - return - } - - var self = this - var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) - - var onreadable = function() { - self._forward() - } - - var onend = function() { - self.push(null) - } - - var clear = function() { - self._readable2.removeListener('readable', onreadable) - self._readable2.removeListener('end', onend) - unend() - } - - this._drained = true - this._readable = readable - this._readable2 = readable._readableState ? readable : toStreams2(readable) - this._readable2.on('readable', onreadable) - this._readable2.on('end', onend) - this._unread = clear - - this._forward() -} - -Duplexify.prototype._read = function() { - this._drained = true - this._forward() + // If no scheme is provided default to HTTP: + if (!/^https?:\/\//.test(baseUrl)) { + baseUrl = `http://${baseUrl}`; + } + return new URL(exports.BASE_PATH, baseUrl).href; } - -Duplexify.prototype._forward = function() { - if (this._forwarding || !this._readable2 || !this._drained) return - this._forwarding = true - - var data - - while (this._drained && (data = shift(this._readable2)) !== null) { - if (this.destroyed) continue - this._drained = this.push(data) - } - - this._forwarding = false +// Accepts an options object passed from the user to the API. In previous +// versions of the API, it referred to a `Request` or an `Axios` request +// options object. Now it refers to an object with very limited property +// names. This is here to help ensure users don't pass invalid options when +// they upgrade from 0.4 to 0.5 to 0.8. +function validate(options) { + Object.keys(options).forEach(key => { + switch (key) { + case 'params': + case 'property': + case 'headers': + break; + case 'qs': + throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); + default: + throw new Error(`'${key}' is not a valid configuration option.`); + } + }); } - -Duplexify.prototype.destroy = function(err, cb) { - if (!cb) cb = noop - if (this.destroyed) return cb(null) - this.destroyed = true - - var self = this - process.nextTick(function() { - self._destroy(err) - cb(null) - }) +async function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) { + let metadataKey = ''; + let params = {}; + let headers = {}; + if (typeof type === 'object') { + const metadataAccessor = type; + metadataKey = metadataAccessor.metadataKey; + params = metadataAccessor.params || params; + headers = metadataAccessor.headers || headers; + noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; + fastFail = metadataAccessor.fastFail || fastFail; + } + else { + metadataKey = type; + } + if (typeof options === 'string') { + metadataKey += `/${options}`; + } + else { + validate(options); + if (options.property) { + metadataKey += `/${options.property}`; + } + headers = options.headers || headers; + params = options.params || params; + } + try { + const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; + const res = await requestMethod({ + url: `${getBaseUrl()}/${metadataKey}`, + headers: { ...exports.HEADERS, ...headers }, + retryConfig: { noResponseRetries }, + params, + responseType: 'text', + timeout: requestTimeout(), + }); + // NOTE: node.js converts all incoming headers to lower case. + if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { + throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`); + } + else if (!res.data) { + throw new Error('Invalid response from the metadata service'); + } + if (typeof res.data === 'string') { + try { + return jsonBigint.parse(res.data); + } + catch (_a) { + /* ignore */ + } + } + return res.data; + } + catch (e) { + const err = e; + if (err.response && err.response.status !== 200) { + err.message = `Unsuccessful response status code. ${err.message}`; + } + throw e; + } } - -Duplexify.prototype._destroy = function(err) { - if (err) { - var ondrain = this._ondrain - this._ondrain = null - if (ondrain) ondrain(err) - else this.emit('error', err) - } - - if (this._forwardDestroy) { - if (this._readable && this._readable.destroy) this._readable.destroy() - if (this._writable && this._writable.destroy) this._writable.destroy() - } - - this.emit('close') +async function fastFailMetadataRequest(options) { + const secondaryOptions = { + ...options, + url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), + }; + // We race a connection between DNS/IP to metadata server. There are a couple + // reasons for this: + // + // 1. the DNS is slow in some GCP environments; by checking both, we might + // detect the runtime environment signficantly faster. + // 2. we can't just check the IP, which is tarpitted and slow to respond + // on a user's local machine. + // + // Additional logic has been added to make sure that we don't create an + // unhandled rejection in scenarios where a failure happens sometime + // after a success. + // + // Note, however, if a failure happens prior to a success, a rejection should + // occur, this is for folks running locally. + // + let responded = false; + const r1 = (0, gaxios_1.request)(options) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r2; + } + else { + responded = true; + throw err; + } + }); + const r2 = (0, gaxios_1.request)(secondaryOptions) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r1; + } + else { + responded = true; + throw err; + } + }); + return Promise.race([r1, r2]); } - -Duplexify.prototype._write = function(data, enc, cb) { - if (this.destroyed) return - if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) - if (data === SIGNAL_FLUSH) return this._finish(cb) - if (!this._writable) return cb() - - if (this._writable.write(data) === false) this._ondrain = cb - else if (!this.destroyed) cb() +/** + * Obtain metadata for the current GCE instance. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const serviceAccount: {} = await instance('service-accounts/'); + * const serviceAccountEmail: string = await instance('service-accounts/default/email'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function instance(options) { + return metadataAccessor('instance', options); } - -Duplexify.prototype._finish = function(cb) { - var self = this - this.emit('preend') - onuncork(this, function() { - end(self._forwardEnd && self._writable, function() { - // haxx to not emit prefinish twice - if (self._writableState.prefinished === false) self._writableState.prefinished = true - self.emit('prefinish') - onuncork(self, cb) - }) - }) +exports.instance = instance; +/** + * Obtain metadata for the current GCP project. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const projectId: string = await project('project-id'); + * const numericProjectId: number = await project('numeric-project-id'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function project(options) { + return metadataAccessor('project', options); } - -Duplexify.prototype.end = function(data, enc, cb) { - if (typeof data === 'function') return this.end(null, null, data) - if (typeof enc === 'function') return this.end(data, null, enc) - this._ended = true - if (data) this.write(data) - if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) - return stream.Writable.prototype.end.call(this, cb) +exports.project = project; +/** + * Obtain metadata for the current universe. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const universeDomain: string = await universe('universe_domain'); + * ``` + */ +function universe(options) { + return metadataAccessor('universe', options); } - -module.exports = Duplexify - - -/***/ }), - -/***/ 1728: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var Buffer = (__nccwpck_require__(1867).Buffer); - -var getParamBytesForAlg = __nccwpck_require__(528); - -var MAX_OCTET = 0x80, - CLASS_UNIVERSAL = 0, - PRIMITIVE_BIT = 0x20, - TAG_SEQ = 0x10, - TAG_INT = 0x02, - ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), - ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); - -function base64Url(base64) { - return base64 - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); +exports.universe = universe; +/** + * Retrieve metadata items in parallel. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const data = await bulk([ + * { + * metadataKey: 'instance', + * }, + * { + * metadataKey: 'project/project-id', + * }, + * ] as const); + * + * // data.instance; + * // data['project/project-id']; + * ``` + * + * @param properties The metadata properties to retrieve + * @returns The metadata in `metadatakey:value` format + */ +async function bulk(properties) { + const r = {}; + await Promise.all(properties.map(item => { + return (async () => { + const res = await metadataAccessor(item); + const key = item.metadataKey; + r[key] = res; + })(); + })); + return r; } - -function signatureAsBuffer(signature) { - if (Buffer.isBuffer(signature)) { - return signature; - } else if ('string' === typeof signature) { - return Buffer.from(signature, 'base64'); - } - - throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +exports.bulk = bulk; +/* + * How many times should we retry detecting GCP environment. + */ +function detectGCPAvailableRetries() { + return process.env.DETECT_GCP_RETRIES + ? Number(process.env.DETECT_GCP_RETRIES) + : 0; } - -function derToJose(signature, alg) { - signature = signatureAsBuffer(signature); - var paramBytes = getParamBytesForAlg(alg); - - // the DER encoded param should at most be the param size, plus a padding - // zero, since due to being a signed integer - var maxEncodedParamLength = paramBytes + 1; - - var inputLength = signature.length; - - var offset = 0; - if (signature[offset++] !== ENCODED_TAG_SEQ) { - throw new Error('Could not find expected "seq"'); - } - - var seqLength = signature[offset++]; - if (seqLength === (MAX_OCTET | 1)) { - seqLength = signature[offset++]; - } - - if (inputLength - offset < seqLength) { - throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); - } - - if (signature[offset++] !== ENCODED_TAG_INT) { - throw new Error('Could not find expected "int" for "r"'); - } - - var rLength = signature[offset++]; - - if (inputLength - offset - 2 < rLength) { - throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); - } - - if (maxEncodedParamLength < rLength) { - throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); - } - - var rOffset = offset; - offset += rLength; - - if (signature[offset++] !== ENCODED_TAG_INT) { - throw new Error('Could not find expected "int" for "s"'); - } - - var sLength = signature[offset++]; - - if (inputLength - offset !== sLength) { - throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); - } - - if (maxEncodedParamLength < sLength) { - throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); - } - - var sOffset = offset; - offset += sLength; - - if (offset !== inputLength) { - throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); - } - - var rPadding = paramBytes - rLength, - sPadding = paramBytes - sLength; - - var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); - - for (offset = 0; offset < rPadding; ++offset) { - dst[offset] = 0; - } - signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); - - offset = paramBytes; - - for (var o = offset; offset < o + sPadding; ++offset) { - dst[offset] = 0; - } - signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); - - dst = dst.toString('base64'); - dst = base64Url(dst); - - return dst; +let cachedIsAvailableResponse; +/** + * Determine if the metadata server is currently available. + */ +async function isAvailable() { + if (process.env.METADATA_SERVER_DETECTION) { + const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); + if (!(value in exports.METADATA_SERVER_DETECTION)) { + throw new RangeError(`Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\`, or unset`); + } + switch (value) { + case 'assume-present': + return true; + case 'none': + return false; + case 'bios-only': + return getGCPResidency(); + case 'ping-only': + // continue, we want to ping the server + } + } + try { + // If a user is instantiating several GCP libraries at the same time, + // this may result in multiple calls to isAvailable(), to detect the + // runtime environment. We use the same promise for each of these calls + // to reduce the network load. + if (cachedIsAvailableResponse === undefined) { + cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), + // If the default HOST_ADDRESS has been overridden, we should not + // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in + // a non-GCP environment): + !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + } + await cachedIsAvailableResponse; + return true; + } + catch (e) { + const err = e; + if (process.env.DEBUG_AUTH) { + console.info(err); + } + if (err.type === 'request-timeout') { + // If running in a GCP environment, metadata endpoint should return + // within ms. + return false; + } + if (err.response && err.response.status === 404) { + return false; + } + else { + if (!(err.response && err.response.status === 404) && + // A warning is emitted if we see an unexpected err.code, or err.code + // is not populated: + (!err.code || + ![ + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'ENETUNREACH', + 'ENOENT', + 'ENOTFOUND', + 'ECONNREFUSED', + ].includes(err.code))) { + let code = 'UNKNOWN'; + if (err.code) + code = err.code; + process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); + } + // Failure to resolve the metadata service means that it is not available. + return false; + } + } } - -function countPadding(buf, start, stop) { - var padding = 0; - while (start + padding < stop && buf[start + padding] === 0) { - ++padding; - } - - var needsSign = buf[start + padding] >= MAX_OCTET; - if (needsSign) { - --padding; - } - - return padding; +exports.isAvailable = isAvailable; +/** + * reset the memoized isAvailable() lookup. + */ +function resetIsAvailableCache() { + cachedIsAvailableResponse = undefined; } - -function joseToDer(signature, alg) { - signature = signatureAsBuffer(signature); - var paramBytes = getParamBytesForAlg(alg); - - var signatureBytes = signature.length; - if (signatureBytes !== paramBytes * 2) { - throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); - } - - var rPadding = countPadding(signature, 0, paramBytes); - var sPadding = countPadding(signature, paramBytes, signature.length); - var rLength = paramBytes - rPadding; - var sLength = paramBytes - sPadding; - - var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; - - var shortLength = rsBytes < MAX_OCTET; - - var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); - - var offset = 0; - dst[offset++] = ENCODED_TAG_SEQ; - if (shortLength) { - // Bit 8 has value "0" - // bits 7-1 give the length. - dst[offset++] = rsBytes; - } else { - // Bit 8 of first octet has value "1" - // bits 7-1 give the number of additional length octets. - dst[offset++] = MAX_OCTET | 1; - // length, base 256 - dst[offset++] = rsBytes & 0xff; - } - dst[offset++] = ENCODED_TAG_INT; - dst[offset++] = rLength; - if (rPadding < 0) { - dst[offset++] = 0; - offset += signature.copy(dst, offset, 0, paramBytes); - } else { - offset += signature.copy(dst, offset, rPadding, paramBytes); - } - dst[offset++] = ENCODED_TAG_INT; - dst[offset++] = sLength; - if (sPadding < 0) { - dst[offset++] = 0; - signature.copy(dst, offset, paramBytes); - } else { - signature.copy(dst, offset, paramBytes + sPadding); - } - - return dst; +exports.resetIsAvailableCache = resetIsAvailableCache; +/** + * A cache for the detected GCP Residency. + */ +exports.gcpResidencyCache = null; +/** + * Detects GCP Residency. + * Caches results to reduce costs for subsequent calls. + * + * @see setGCPResidency for setting + */ +function getGCPResidency() { + if (exports.gcpResidencyCache === null) { + setGCPResidency(); + } + return exports.gcpResidencyCache; } - -module.exports = { - derToJose: derToJose, - joseToDer: joseToDer -}; - - -/***/ }), - -/***/ 528: -/***/ ((module) => { - -"use strict"; - - -function getParamSize(keySize) { - var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); - return result; +exports.getGCPResidency = getGCPResidency; +/** + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + * @see getGCPResidency for getting + */ +function setGCPResidency(value = null) { + exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); } - -var paramBytesForAlg = { - ES256: getParamSize(256), - ES384: getParamSize(384), - ES512: getParamSize(521) -}; - -function getParamBytesForAlg(alg) { - var paramBytes = paramBytesForAlg[alg]; - if (paramBytes) { - return paramBytes; - } - - throw new Error('Unknown algorithm "' + alg + '"'); +exports.setGCPResidency = setGCPResidency; +/** + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. + */ +function requestTimeout() { + return getGCPResidency() ? 0 : 3000; } - -module.exports = getParamBytesForAlg; - +exports.requestTimeout = requestTimeout; +__exportStar(__nccwpck_require__(1904), exports); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 1205: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var once = __nccwpck_require__(1223); - -var noop = function() {}; - -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; - -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; - -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - - callback = once(callback || noop); - - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; - - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; - - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; - - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; - - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; - - var onerror = function(err) { - callback.call(stream, err); - }; - - var onclose = function() { - process.nextTick(onclosenexttick); - }; - - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; - - var onrequest = function() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - if (isChildProcess(stream)) stream.on('exit', onexit); - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); +/***/ 4627: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -}; +"use strict"; -module.exports = eos; +// Copyright 2012 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0; +const events_1 = __nccwpck_require__(2361); +const transporters_1 = __nccwpck_require__(2649); +const util_1 = __nccwpck_require__(8905); +/** + * The default cloud universe + * + * @see {@link AuthJSONOptions.universe_domain} + */ +exports.DEFAULT_UNIVERSE = 'googleapis.com'; +/** + * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} + */ +exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; +class AuthClient extends events_1.EventEmitter { + constructor(opts = {}) { + var _a, _b, _c, _d, _e; + super(); + this.credentials = {}; + this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS; + this.forceRefreshOnFailure = false; + this.universeDomain = exports.DEFAULT_UNIVERSE; + const options = (0, util_1.originalOrCamelOptions)(opts); + // Shared auth options + this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null; + this.quotaProjectId = options.get('quota_project_id'); + this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {}; + this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE; + // Shared client options + this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter(); + if (opts.transporterOptions) { + this.transporter.defaults = opts.transporterOptions; + } + if (opts.eagerRefreshThresholdMillis) { + this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false; + } + /** + * Sets the auth credentials. + */ + setCredentials(credentials) { + this.credentials = credentials; + } + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + addSharedMetadataHeaders(headers) { + // quota_project_id, stored in application_default_credentials.json, is set in + // the x-goog-user-project header, to indicate an alternate account for + // billing and quota: + if (!headers['x-goog-user-project'] && // don't override a value the user sets. + this.quotaProjectId) { + headers['x-goog-user-project'] = this.quotaProjectId; + } + return headers; + } +} +exports.AuthClient = AuthClient; /***/ }), -/***/ 5771: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var punycode = __nccwpck_require__(5477); -var entities = __nccwpck_require__(2077); +/***/ 1569: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -module.exports = decode; +"use strict"; -function decode (str) { - if (typeof str !== 'string') { - throw new TypeError('Expected a String'); +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsClient = void 0; +const awsrequestsigner_1 = __nccwpck_require__(1754); +const baseexternalclient_1 = __nccwpck_require__(7391); +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + this.environmentId = options.credential_source.environment_id; + // This is only required if the AWS region is not available in the + // AWS_REGION or AWS_DEFAULT_REGION environment variables. + this.regionUrl = options.credential_source.region_url; + // This is only required if AWS security credentials are not available in + // environment variables. + this.securityCredentialsUrl = options.credential_source.url; + this.regionalCredVerificationUrl = + options.credential_source.regional_cred_verification_url; + this.imdsV2SessionTokenUrl = + options.credential_source.imdsv2_session_token_url; + this.awsRequestSigner = null; + this.region = ''; + this.credentialSourceType = 'aws'; + // Data validators. + this.validateEnvironmentId(); } - - return str.replace(/&(#?[^;\W]+;?)/g, function (_, match) { - var m; - if (m = /^#(\d+);?$/.exec(match)) { - return punycode.ucs2.encode([ parseInt(m[1], 10) ]); - } else if (m = /^#[Xx]([A-Fa-f0-9]+);?/.exec(match)) { - return punycode.ucs2.encode([ parseInt(m[1], 16) ]); - } else { - // named entity - var hasSemi = /;$/.test(match); - var withoutSemi = hasSemi ? match.replace(/;$/, '') : match; - var target = entities[withoutSemi] || (hasSemi && entities[match]); - - if (typeof target === 'number') { - return punycode.ucs2.encode([ target ]); - } else if (typeof target === 'string') { - return target; - } else { - return '&' + match; + validateEnvironmentId() { + var _a; + const match = (_a = this.environmentId) === null || _a === void 0 ? void 0 : _a.match(/^(aws)(\d+)$/); + if (!match || !this.regionalCredVerificationUrl) { + throw new Error('No valid AWS "credential_source" provided'); + } + else if (parseInt(match[2], 10) !== 1) { + throw new Error(`aws version "${match[2]}" is not supported in the current build.`); + } + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this uses a serialized AWS signed request to the STS GetCallerIdentity + * endpoint. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Initialize AWS request signer if not already initialized. + if (!this.awsRequestSigner) { + const metadataHeaders = {}; + // Only retrieve the IMDSv2 session token if both the security credentials and region are + // not retrievable through the environment. + // The credential config contains all the URLs by default but clients may be running this + // where the metadata server is not available and returning the credentials through the environment. + // Removing this check may break them. + if (this.shouldUseMetadataServer() && this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await this.getImdsV2SessionToken(); } + this.region = await this.getAwsRegion(metadataHeaders); + this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { + // Check environment variables for permanent credentials first. + // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html + if (this.securityCredentialsFromEnv) { + return this.securityCredentialsFromEnv; + } + // Since the role on a VM can change, we don't need to cache it. + const roleName = await this.getAwsRoleName(metadataHeaders); + // Temporary credentials typically last for several hours. + // Expiration is returned in response. + // Consider future optimization of this logic to cache AWS tokens + // until their natural expiration. + const awsCreds = await this.getAwsSecurityCredentials(roleName, metadataHeaders); + return { + accessKeyId: awsCreds.AccessKeyId, + secretAccessKey: awsCreds.SecretAccessKey, + token: awsCreds.Token, + }; + }, this.region); } - }); + // Generate signed request to AWS STS GetCallerIdentity API. + // Use the required regional endpoint. Otherwise, the request will fail. + const options = await this.awsRequestSigner.getRequestOptions({ + url: this.regionalCredVerificationUrl.replace('{region}', this.region), + method: 'POST', + }); + // The GCP STS endpoint expects the headers to be formatted as: + // [ + // {key: 'x-amz-date', value: '...'}, + // {key: 'Authorization', value: '...'}, + // ... + // ] + // And then serialized as: + // encodeURIComponent(JSON.stringify({ + // url: '...', + // method: 'POST', + // headers: [{key: 'x-amz-date', value: '...'}, ...] + // })) + const reformattedHeader = []; + const extendedHeaders = Object.assign({ + // The full, canonical resource name of the workload identity pool + // provider, with or without the HTTPS prefix. + // Including this header as part of the signature is recommended to + // ensure data integrity. + 'x-goog-cloud-target-resource': this.audience, + }, options.headers); + // Reformat header to GCP STS expected format. + for (const key in extendedHeaders) { + reformattedHeader.push({ + key, + value: extendedHeaders[key], + }); + } + // Serialize the reformatted signed request. + return encodeURIComponent(JSON.stringify({ + url: options.url, + method: options.method, + headers: reformattedHeader, + })); + } + /** + * @return A promise that resolves with the IMDSv2 Session Token. + */ + async getImdsV2SessionToken() { + const opts = { + url: this.imdsV2SessionTokenUrl, + method: 'PUT', + responseType: 'text', + headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, + }; + const response = await this.transporter.request(opts); + return response.data; + } + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the current AWS region. + */ + async getAwsRegion(headers) { + // Priority order for region determination: + // AWS_REGION > AWS_DEFAULT_REGION > metadata server. + if (this.regionFromEnv) { + return this.regionFromEnv; + } + if (!this.regionUrl) { + throw new Error('Unable to determine AWS region due to missing ' + + '"options.credential_source.region_url"'); + } + const opts = { + url: this.regionUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await this.transporter.request(opts); + // Remove last character. For example, if us-east-2b is returned, + // the region would be us-east-2. + return response.data.substr(0, response.data.length - 1); + } + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ + async getAwsRoleName(headers) { + if (!this.securityCredentialsUrl) { + throw new Error('Unable to determine AWS role name due to missing ' + + '"options.credential_source.url"'); + } + const opts = { + url: this.securityCredentialsUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await this.transporter.request(opts); + return response.data; + } + /** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ + async getAwsSecurityCredentials(roleName, headers) { + const response = await this.transporter.request({ + url: `${this.securityCredentialsUrl}/${roleName}`, + responseType: 'json', + headers: headers, + }); + return response.data; + } + shouldUseMetadataServer() { + // The metadata server must be used when either the AWS region or AWS security + // credentials cannot be retrieved through their defined environment variables. + return !this.regionFromEnv || !this.securityCredentialsFromEnv; + } + get regionFromEnv() { + // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. + // Only one is required. + return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); + } + get securityCredentialsFromEnv() { + // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. + if (process.env['AWS_ACCESS_KEY_ID'] && + process.env['AWS_SECRET_ACCESS_KEY']) { + return { + accessKeyId: process.env['AWS_ACCESS_KEY_ID'], + secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], + token: process.env['AWS_SESSION_TOKEN'], + }; + } + return null; + } } +exports.AwsClient = AwsClient; +AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; +AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; /***/ }), -/***/ 6521: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1754: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var punycode = __nccwpck_require__(5477); -var revEntities = __nccwpck_require__(3123); +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsRequestSigner = void 0; +const crypto_1 = __nccwpck_require__(8043); +/** AWS Signature Version 4 signing algorithm identifier. */ +const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; +/** + * The termination string for the AWS credential scope value as defined in + * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + */ +const AWS_REQUEST_TYPE = 'aws4_request'; +/** + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + */ +class AwsRequestSigner { + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials, region) { + this.getCredentials = getCredentials; + this.region = region; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + async getRequestOptions(amzOptions) { + if (!amzOptions.url) { + throw new Error('"url" is required in "amzOptions"'); + } + // Stringify JSON requests. This will be set in the request body of the + // generated signed request. + const requestPayloadData = typeof amzOptions.data === 'object' + ? JSON.stringify(amzOptions.data) + : amzOptions.data; + const url = amzOptions.url; + const method = amzOptions.method || 'GET'; + const requestPayload = amzOptions.body || requestPayloadData; + const additionalAmzHeaders = amzOptions.headers; + const awsSecurityCredentials = await this.getCredentials(); + const uri = new URL(url); + const headerMap = await generateAuthenticationHeaderMap({ + crypto: this.crypto, + host: uri.host, + canonicalUri: uri.pathname, + canonicalQuerystring: uri.search.substr(1), + method, + region: this.region, + securityCredentials: awsSecurityCredentials, + requestPayload, + additionalAmzHeaders, + }); + // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. + const headers = Object.assign( + // Add x-amz-date if available. + headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { + Authorization: headerMap.authorizationHeader, + host: uri.host, + }, additionalAmzHeaders || {}); + if (awsSecurityCredentials.token) { + Object.assign(headers, { + 'x-amz-security-token': awsSecurityCredentials.token, + }); + } + const awsSignedReq = { + url, + method: method, + headers, + }; + if (typeof requestPayload !== 'undefined') { + awsSignedReq.body = requestPayload; + } + return awsSignedReq; + } +} +exports.AwsRequestSigner = AwsRequestSigner; +/** + * Creates the HMAC-SHA256 hash of the provided message using the + * provided key. + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The HMAC-SHA256 key to use. + * @param msg The message to hash. + * @return The computed hash bytes. + */ +async function sign(crypto, key, msg) { + return await crypto.signWithHmacSha256(key, msg); +} +/** + * Calculates the signing key used to calculate the signature for + * AWS Signature Version 4 based on: + * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The AWS secret access key. + * @param dateStamp The '%Y%m%d' date format. + * @param region The AWS region. + * @param serviceName The AWS service name, eg. sts. + * @return The signing key bytes. + */ +async function getSigningKey(crypto, key, dateStamp, region, serviceName) { + const kDate = await sign(crypto, `AWS4${key}`, dateStamp); + const kRegion = await sign(crypto, kDate, region); + const kService = await sign(crypto, kRegion, serviceName); + const kSigning = await sign(crypto, kService, 'aws4_request'); + return kSigning; +} +/** + * Generates the authentication header map needed for generating the AWS + * Signature Version 4 signed request. + * + * @param option The options needed to compute the authentication header map. + * @return The AWS authentication header map which constitutes of the following + * components: amz-date, authorization header and canonical query string. + */ +async function generateAuthenticationHeaderMap(options) { + const additionalAmzHeaders = options.additionalAmzHeaders || {}; + const requestPayload = options.requestPayload || ''; + // iam.amazonaws.com host => iam service. + // sts.us-east-2.amazonaws.com => sts service. + const serviceName = options.host.split('.')[0]; + const now = new Date(); + // Format: '%Y%m%dT%H%M%SZ'. + const amzDate = now + .toISOString() + .replace(/[-:]/g, '') + .replace(/\.[0-9]+/, ''); + // Format: '%Y%m%d'. + const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); + // Change all additional headers to be lower case. + const reformattedAdditionalAmzHeaders = {}; + Object.keys(additionalAmzHeaders).forEach(key => { + reformattedAdditionalAmzHeaders[key.toLowerCase()] = + additionalAmzHeaders[key]; + }); + // Add AWS token if available. + if (options.securityCredentials.token) { + reformattedAdditionalAmzHeaders['x-amz-security-token'] = + options.securityCredentials.token; + } + // Header keys need to be sorted alphabetically. + const amzHeaders = Object.assign({ + host: options.host, + }, + // Previously the date was not fixed with x-amz- and could be provided manually. + // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req + reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); + let canonicalHeaders = ''; + const signedHeadersList = Object.keys(amzHeaders).sort(); + signedHeadersList.forEach(key => { + canonicalHeaders += `${key}:${amzHeaders[key]}\n`; + }); + const signedHeaders = signedHeadersList.join(';'); + const payloadHash = await options.crypto.sha256DigestHex(requestPayload); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html + const canonicalRequest = `${options.method}\n` + + `${options.canonicalUri}\n` + + `${options.canonicalQuerystring}\n` + + `${canonicalHeaders}\n` + + `${signedHeaders}\n` + + `${payloadHash}`; + const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + const stringToSign = `${AWS_ALGORITHM}\n` + + `${amzDate}\n` + + `${credentialScope}\n` + + (await options.crypto.sha256DigestHex(canonicalRequest)); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); + const signature = await sign(options.crypto, signingKey, stringToSign); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html + const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + + `${credentialScope}, SignedHeaders=${signedHeaders}, ` + + `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; + return { + // Do not return x-amz-date if date is available. + amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, + authorizationHeader, + canonicalQuerystring: options.canonicalQuerystring, + }; +} -module.exports = encode; -function encode (str, opts) { - if (typeof str !== 'string') { - throw new TypeError('Expected a String'); - } - if (!opts) opts = {}; +/***/ }), - var numeric = true; - if (opts.named) numeric = false; - if (opts.numeric !== undefined) numeric = opts.numeric; +/***/ 7391: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - var special = opts.special || { - '"': true, "'": true, - '<': true, '>': true, - '&': true - }; +"use strict"; - var codePoints = punycode.ucs2.decode(str); - var chars = []; - for (var i = 0; i < codePoints.length; i++) { - var cc = codePoints[i]; - var c = punycode.ucs2.encode([ cc ]); - var e = revEntities[cc]; - if (e && (cc >= 127 || special[c]) && !numeric) { - chars.push('&' + (/;$/.test(e) ? e : e + ';')); +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +const stream = __nccwpck_require__(2781); +const authclient_1 = __nccwpck_require__(4627); +const sts = __nccwpck_require__(6308); +const util_1 = __nccwpck_require__(8905); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** The default OAuth scope to request when none is provided. */ +const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; +/** Default impersonated token lifespan in seconds.*/ +const DEFAULT_TOKEN_LIFESPAN = 3600; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; +/** + * Cloud resource manager URL used to retrieve project information. + * + * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead + **/ +exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; +/** The workforce audience pattern. */ +const WORKFORCE_AUDIENCE_PATTERN = '//iam\\.googleapis\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(1402); +/** + * For backwards compatibility. + */ +var authclient_2 = __nccwpck_require__(4627); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } })); +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +class BaseExternalAccountClient extends authclient_1.AuthClient { + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super({ ...options, ...additionalOptions }); + const opts = (0, util_1.originalOrCamelOptions)(options); + if (opts.get('type') !== exports.EXTERNAL_ACCOUNT_TYPE) { + throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + + `received "${options.type}"`); } - else if (cc < 32 || cc >= 127 || special[c]) { - chars.push('&#' + cc + ';'); + const clientId = opts.get('client_id'); + const clientSecret = opts.get('client_secret'); + const tokenUrl = opts.get('token_url'); + const subjectTokenType = opts.get('subject_token_type'); + const workforcePoolUserProject = opts.get('workforce_pool_user_project'); + const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url'); + const serviceAccountImpersonation = opts.get('service_account_impersonation'); + const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds'); + this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') || + `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`); + if (clientId) { + this.clientAuth = { + confidentialClientType: 'basic', + clientId, + clientSecret, + }; + } + this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth); + this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE]; + this.cachedAccessToken = null; + this.audience = opts.get('audience'); + this.subjectTokenType = subjectTokenType; + this.workforcePoolUserProject = workforcePoolUserProject; + const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); + if (this.workforcePoolUserProject && + !this.audience.match(workforceAudiencePattern)) { + throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + + 'credentials.'); + } + this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl; + this.serviceAccountImpersonationLifetime = + serviceAccountImpersonationLifetime; + if (this.serviceAccountImpersonationLifetime) { + this.configLifetimeRequested = true; } else { - chars.push(c); + this.configLifetimeRequested = false; + this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN; } + this.projectNumber = this.getProjectNumber(this.audience); + } + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail() { + var _a; + if (this.serviceAccountImpersonationUrl) { + if (this.serviceAccountImpersonationUrl.length > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84} + **/ + throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`); + } + // Parse email from URL. The formal looks as follows: + // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken + const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; + const result = re.exec(this.serviceAccountImpersonationUrl); + return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + } + return null; + } + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + super.setCredentials(credentials); + this.cachedAccessToken = credentials; + } + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + async getProjectId() { + const projectNumber = this.projectNumber || this.workforcePoolUserProject; + if (this.projectId) { + // Return previously determined project ID. + return this.projectId; + } + else if (projectNumber) { + // Preferable not to use request() to avoid retrial policies. + const headers = await this.getRequestHeaders(); + const response = await this.transporter.request({ + headers, + url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`, + responseType: 'json', + }); + this.projectId = response.data.projectId; + return this.projectId; + } + return null; + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + async refreshAccessTokenAsync() { + // Retrieve the external credential. + const subjectToken = await this.retrieveSubjectToken(); + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + audience: this.audience, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken, + subjectTokenType: this.subjectTokenType, + // generateAccessToken requires the provided access token to have + // scopes: + // https://www.googleapis.com/auth/iam or + // https://www.googleapis.com/auth/cloud-platform + // The new service account access token scopes will match the user + // provided ones. + scope: this.serviceAccountImpersonationUrl + ? [DEFAULT_OAUTH_SCOPE] + : this.getScopesArray(), + }; + // Exchange the external credentials for a GCP access token. + // Client auth is prioritized over passing the workforcePoolUserProject + // parameter for STS token exchange. + const additionalOptions = !this.clientAuth && this.workforcePoolUserProject + ? { userProject: this.workforcePoolUserProject } + : undefined; + const additionalHeaders = { + 'x-goog-api-client': this.getMetricsHeaderValue(), + }; + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions); + if (this.serviceAccountImpersonationUrl) { + this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + } + else if (stsResponse.expires_in) { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, + res: stsResponse.res, + }; + } + else { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + res: stsResponse.res, + }; + } + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedAccessToken.expiry_date, + access_token: this.cachedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedAccessToken; + } + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + getProjectNumber(audience) { + // STS audience pattern: + // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... + const match = audience.match(/\/projects\/([^/]+)/); + if (!match) { + return null; + } + return match[1]; + } + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + async getImpersonatedAccessToken(token) { + const opts = { + url: this.serviceAccountImpersonationUrl, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + data: { + scope: this.getScopesArray(), + lifetime: this.serviceAccountImpersonationLifetime + 's', + }, + responseType: 'json', + }; + const response = await this.transporter.request(opts); + const successResponse = response.data; + return { + access_token: successResponse.accessToken, + // Convert from ISO format to timestamp. + expiry_date: new Date(successResponse.expireTime).getTime(), + res: response, + }; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(accessToken) { + const now = new Date().getTime(); + return accessToken.expiry_date + ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } + /** + * @return The list of scopes for the requested GCP access token. + */ + getScopesArray() { + // Since scopes can be provided as string or array, the type should + // be normalized. + if (typeof this.scopes === 'string') { + return [this.scopes]; + } + return this.scopes || [DEFAULT_OAUTH_SCOPE]; + } + getMetricsHeaderValue() { + const nodeVersion = process.version.replace(/^v/, ''); + const saImpersonation = this.serviceAccountImpersonationUrl !== undefined; + const credentialSourceType = this.credentialSourceType + ? this.credentialSourceType + : 'unknown'; + return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`; } - return chars.join(''); } +exports.BaseExternalAccountClient = BaseExternalAccountClient; /***/ }), -/***/ 1151: +/***/ 6875: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -exports.encode = __nccwpck_require__(6521); -exports.decode = __nccwpck_require__(5771); +"use strict"; + +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Compute = void 0; +const gaxios_1 = __nccwpck_require__(9555); +const gcpMetadata = __nccwpck_require__(3563); +const oauth2client_1 = __nccwpck_require__(3936); +class Compute extends oauth2client_1.OAuth2Client { + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications + */ + constructor(options = {}) { + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; + this.serviceAccountEmail = options.serviceAccountEmail || 'default'; + this.scopes = Array.isArray(options.scopes) + ? options.scopes + : options.scopes + ? [options.scopes] + : []; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; + let data; + try { + const instanceOptions = { + property: tokenPath, + }; + if (this.scopes.length > 0) { + instanceOptions.params = { + scopes: this.scopes.join(','), + }; + } + data = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError) { + e.message = `Could not refresh access token: ${e.message}`; + this.wrapError(e); + } + throw e; + } + const tokens = data; + if (data && data.expires_in) { + tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res: null }; + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + + `?format=full&audience=${targetAudience}`; + let idToken; + try { + const instanceOptions = { + property: idTokenPath, + }; + idToken = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof Error) { + e.message = `Could not fetch ID token: ${e.message}`; + } + throw e; + } + return idToken; + } + wrapError(e) { + const res = e.response; + if (res && res.status) { + e.status = res.status; + if (res.status === 403) { + e.message = + 'A Forbidden error was returned while attempting to retrieve an access ' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have the correct permission scopes specified: ' + + e.message; + } + else if (res.status === 404) { + e.message = + 'A Not Found error was returned while attempting to retrieve an access' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have any permission scopes specified: ' + + e.message; + } + } + } +} +exports.Compute = Compute; /***/ }), -/***/ 4697: -/***/ ((module, exports) => { +/***/ 6270: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/** - * @author Toru Nagashima - * @copyright 2015 Toru Nagashima. All rights reserved. - * See LICENSE file in root directory for full license. - */ - +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); - -/** - * @typedef {object} PrivateData - * @property {EventTarget} eventTarget The event target. - * @property {{type:string}} event The original event object. - * @property {number} eventPhase The current event phase. - * @property {EventTarget|null} currentTarget The current event target. - * @property {boolean} canceled The flag to prevent default. - * @property {boolean} stopped The flag to stop propagation. - * @property {boolean} immediateStopped The flag to stop propagation immediately. - * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. - * @property {number} timeStamp The unix time. - * @private - */ - +exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; +const stream = __nccwpck_require__(2781); +const authclient_1 = __nccwpck_require__(4627); +const sts = __nccwpck_require__(6308); /** - * Private data for event wrappers. - * @type {WeakMap} - * @private + * The required token exchange grant_type: rfc8693#section-2.1 */ -const privateData = new WeakMap(); - +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; /** - * Cache for wrapper classes. - * @type {WeakMap} - * @private + * The requested token exchange requested_token_type: rfc8693#section-2.1 */ -const wrappers = new WeakMap(); - +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; /** - * Get private data. - * @param {Event} event The event object to get private data. - * @returns {PrivateData} The private data of the event. - * @private + * The requested token exchange subject_token_type: rfc8693#section-2.1 */ -function pd(event) { - const retv = privateData.get(event); - console.assert( - retv != null, - "'this' is expected an Event object, but got", - event - ); - return retv -} - +const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; /** - * https://dom.spec.whatwg.org/#set-the-canceled-flag - * @param data {PrivateData} private data. + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. */ -function setCancelFlag(data) { - if (data.passiveListener != null) { - if ( - typeof console !== "undefined" && - typeof console.error === "function" - ) { - console.error( - "Unable to preventDefault inside passive event listener invocation.", - data.passiveListener - ); - } - return - } - if (!data.event.cancelable) { - return - } - - data.canceled = true; - if (typeof data.event.preventDefault === "function") { - data.event.preventDefault(); - } -} - +exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; /** - * @see https://dom.spec.whatwg.org/#interface-event - * @private + * Offset to take into account network delays and server clock skews. */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; /** - * The event wrapper. - * @constructor - * @param {EventTarget} eventTarget The event target of this dispatching. - * @param {Event|{type:string}} event The original event to wrap. + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. */ -function Event(eventTarget, event) { - privateData.set(this, { - eventTarget, - event, - eventPhase: 2, - currentTarget: eventTarget, - canceled: false, - stopped: false, - immediateStopped: false, - passiveListener: null, - timeStamp: event.timeStamp || Date.now(), - }); - - // https://heycam.github.io/webidl/#Unforgeable - Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); - - // Define accessors - const keys = Object.keys(event); - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - if (!(key in this)) { - Object.defineProperty(this, key, defineRedirectDescriptor(key)); - } - } -} - -// Should be enumerable, but class methods are not enumerable. -Event.prototype = { - /** - * The type of this event. - * @type {string} - */ - get type() { - return pd(this).event.type - }, - - /** - * The target of this event. - * @type {EventTarget} - */ - get target() { - return pd(this).eventTarget - }, - - /** - * The target of this event. - * @type {EventTarget} - */ - get currentTarget() { - return pd(this).currentTarget - }, - +class DownscopedClient extends authclient_1.AuthClient { /** - * @returns {EventTarget[]} The composed path of this event. + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** + * Optional additional behavior customization options. + * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** + * Optional quota project id for setting up in the x-goog-user-project header. */ - composedPath() { - const currentTarget = pd(this).currentTarget; - if (currentTarget == null) { - return [] + constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { + super({ ...additionalOptions, quotaProjectId }); + this.authClient = authClient; + this.credentialAccessBoundary = credentialAccessBoundary; + // Check 1-10 Access Boundary Rules are defined within Credential Access + // Boundary. + if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { + throw new Error('At least one access boundary rule needs to be defined.'); } - return [currentTarget] - }, - - /** - * Constant of NONE. - * @type {number} - */ - get NONE() { - return 0 - }, - - /** - * Constant of CAPTURING_PHASE. - * @type {number} - */ - get CAPTURING_PHASE() { - return 1 - }, - - /** - * Constant of AT_TARGET. - * @type {number} - */ - get AT_TARGET() { - return 2 - }, - - /** - * Constant of BUBBLING_PHASE. - * @type {number} - */ - get BUBBLING_PHASE() { - return 3 - }, - - /** - * The target of this event. - * @type {number} - */ - get eventPhase() { - return pd(this).eventPhase - }, - - /** - * Stop event bubbling. - * @returns {void} - */ - stopPropagation() { - const data = pd(this); - - data.stopped = true; - if (typeof data.event.stopPropagation === "function") { - data.event.stopPropagation(); + else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > + exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { + throw new Error('The provided access boundary has more than ' + + `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); } - }, - - /** - * Stop event bubbling. - * @returns {void} - */ - stopImmediatePropagation() { - const data = pd(this); - - data.stopped = true; - data.immediateStopped = true; - if (typeof data.event.stopImmediatePropagation === "function") { - data.event.stopImmediatePropagation(); + // Check at least one permission should be defined in each Access Boundary + // Rule. + for (const rule of credentialAccessBoundary.accessBoundary + .accessBoundaryRules) { + if (rule.availablePermissions.length === 0) { + throw new Error('At least one permission should be defined in access boundary rules.'); + } } - }, - - /** - * The flag to be bubbling. - * @type {boolean} - */ - get bubbles() { - return Boolean(pd(this).event.bubbles) - }, - - /** - * The flag to be cancelable. - * @type {boolean} - */ - get cancelable() { - return Boolean(pd(this).event.cancelable) - }, - - /** - * Cancel this event. - * @returns {void} - */ - preventDefault() { - setCancelFlag(pd(this)); - }, - + this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`); + this.cachedDownscopedAccessToken = null; + } /** - * The flag to indicate cancellation state. - * @type {boolean} + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. */ - get defaultPrevented() { - return pd(this).canceled - }, - + setCredentials(credentials) { + if (!credentials.expiry_date) { + throw new Error('The access token expiry_date field is missing in the provided ' + + 'credentials.'); + } + super.setCredentials(credentials); + this.cachedDownscopedAccessToken = credentials; + } + async getAccessToken() { + // If the cached access token is unavailable or expired, force refresh. + // The Downscoped access token will be returned in + // DownscopedAccessTokenResponse format. + if (!this.cachedDownscopedAccessToken || + this.isExpired(this.cachedDownscopedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return Downscoped access token in DownscopedAccessTokenResponse format. + return { + token: this.cachedDownscopedAccessToken.access_token, + expirationTime: this.cachedDownscopedAccessToken.expiry_date, + res: this.cachedDownscopedAccessToken.res, + }; + } /** - * The flag to be composed. - * @type {boolean} + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } */ - get composed() { - return Boolean(pd(this).event.composed) - }, - + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } /** - * The unix time of this event. - * @type {number} + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. */ - get timeStamp() { - return pd(this).timeStamp - }, - + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } /** - * The target of this event. - * @type {EventTarget} - * @deprecated + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. */ - get srcElement() { - return pd(this).eventTarget - }, - + async refreshAccessTokenAsync() { + var _a; + // Retrieve GCP access token from source credential. + const subjectToken = (await this.authClient.getAccessToken()).token; + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken: subjectToken, + subjectTokenType: STS_SUBJECT_TOKEN_TYPE, + }; + // Exchange the source AuthClient access token for a Downscoped access + // token. + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); + /** + * The STS endpoint will only return the expiration time for the downscoped + * access token if the original access token represents a service account. + * The downscoped token's expiration time will always match the source + * credential expiration. When no expires_in is returned, we can copy the + * source credential's expiration time. + */ + const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; + const expiryDate = stsResponse.expires_in + ? new Date().getTime() + stsResponse.expires_in * 1000 + : sourceCredExpireDate; + // Save response in cached access token. + this.cachedDownscopedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: expiryDate, + res: stsResponse.res, + }; + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedDownscopedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedDownscopedAccessToken.expiry_date, + access_token: this.cachedDownscopedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedDownscopedAccessToken; + } /** - * The flag to stop event bubbling. - * @type {boolean} - * @deprecated + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. */ - get cancelBubble() { - return pd(this).stopped - }, - set cancelBubble(value) { - if (!value) { - return - } - const data = pd(this); - - data.stopped = true; - if (typeof data.event.cancelBubble === "boolean") { - data.event.cancelBubble = true; - } - }, + isExpired(downscopedAccessToken) { + const now = new Date().getTime(); + return downscopedAccessToken.expiry_date + ? now >= + downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.DownscopedClient = DownscopedClient; - /** - * The flag to indicate cancellation state. - * @type {boolean} - * @deprecated - */ - get returnValue() { - return !pd(this).canceled - }, - set returnValue(value) { - if (!value) { - setCancelFlag(pd(this)); - } - }, - /** - * Initialize this event object. But do nothing under event dispatching. - * @param {string} type The event type. - * @param {boolean} [bubbles=false] The flag to be possible to bubble up. - * @param {boolean} [cancelable=false] The flag to be possible to cancel. - * @deprecated - */ - initEvent() { - // Do nothing. - }, -}; +/***/ }), -// `constructor` is not enumerable. -Object.defineProperty(Event.prototype, "constructor", { - value: Event, - configurable: true, - writable: true, -}); +/***/ 1380: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// Ensure `event instanceof window.Event` is `true`. -if (typeof window !== "undefined" && typeof window.Event !== "undefined") { - Object.setPrototypeOf(Event.prototype, window.Event.prototype); +"use strict"; - // Make association for wrappers. - wrappers.set(window.Event.prototype, Event); +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEnv = exports.clear = exports.GCPEnv = void 0; +const gcpMetadata = __nccwpck_require__(3563); +var GCPEnv; +(function (GCPEnv) { + GCPEnv["APP_ENGINE"] = "APP_ENGINE"; + GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; + GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; + GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; + GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; + GCPEnv["NONE"] = "NONE"; +})(GCPEnv || (exports.GCPEnv = GCPEnv = {})); +let envPromise; +function clear() { + envPromise = undefined; } - -/** - * Get the property descriptor to redirect a given property. - * @param {string} key Property name to define property descriptor. - * @returns {PropertyDescriptor} The property descriptor to redirect the property. - * @private - */ -function defineRedirectDescriptor(key) { - return { - get() { - return pd(this).event[key] - }, - set(value) { - pd(this).event[key] = value; - }, - configurable: true, - enumerable: true, +exports.clear = clear; +async function getEnv() { + if (envPromise) { + return envPromise; } + envPromise = getEnvMemoized(); + return envPromise; } - -/** - * Get the property descriptor to call a given method property. - * @param {string} key Property name to define property descriptor. - * @returns {PropertyDescriptor} The property descriptor to call the method property. - * @private - */ -function defineCallDescriptor(key) { - return { - value() { - const event = pd(this).event; - return event[key].apply(event, arguments) - }, - configurable: true, - enumerable: true, +exports.getEnv = getEnv; +async function getEnvMemoized() { + let env = GCPEnv.NONE; + if (isAppEngine()) { + env = GCPEnv.APP_ENGINE; } + else if (isCloudFunction()) { + env = GCPEnv.CLOUD_FUNCTIONS; + } + else if (await isComputeEngine()) { + if (await isKubernetesEngine()) { + env = GCPEnv.KUBERNETES_ENGINE; + } + else if (isCloudRun()) { + env = GCPEnv.CLOUD_RUN; + } + else { + env = GCPEnv.COMPUTE_ENGINE; + } + } + else { + env = GCPEnv.NONE; + } + return env; +} +function isAppEngine() { + return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); +} +function isCloudFunction() { + return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); } - /** - * Define new wrapper class. - * @param {Function} BaseEvent The base wrapper class. - * @param {Object} proto The prototype of the original event. - * @returns {Function} The defined wrapper class. - * @private + * This check only verifies that the environment is running knative. + * This must be run *after* checking for Kubernetes, otherwise it will + * return a false positive. */ -function defineWrapper(BaseEvent, proto) { - const keys = Object.keys(proto); - if (keys.length === 0) { - return BaseEvent +function isCloudRun() { + return !!process.env.K_CONFIGURATION; +} +async function isKubernetesEngine() { + try { + await gcpMetadata.instance('attributes/cluster-name'); + return true; } - - /** CustomEvent */ - function CustomEvent(eventTarget, event) { - BaseEvent.call(this, eventTarget, event); + catch (e) { + return false; } +} +async function isComputeEngine() { + return gcpMetadata.isAvailable(); +} - CustomEvent.prototype = Object.create(BaseEvent.prototype, { - constructor: { value: CustomEvent, configurable: true, writable: true }, - }); - // Define accessors. - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - if (!(key in BaseEvent.prototype)) { - const descriptor = Object.getOwnPropertyDescriptor(proto, key); - const isFunc = typeof descriptor.value === "function"; - Object.defineProperty( - CustomEvent.prototype, - key, - isFunc - ? defineCallDescriptor(key) - : defineRedirectDescriptor(key) - ); - } - } +/***/ }), - return CustomEvent -} +/***/ 8749: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; +const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; +const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; +const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; /** - * Get the wrapper class of a given prototype. - * @param {Object} proto The prototype of the original event to get its wrapper. - * @returns {Function} The wrapper class. - * @private + * Defines the response of a 3rd party executable run by the pluggable auth client. */ -function getWrapper(proto) { - if (proto == null || proto === Object.prototype) { - return Event +class ExecutableResponse { + /** + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. + */ + constructor(responseJson) { + // Check that the required fields exist in the json response. + if (!responseJson.version) { + throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + } + if (responseJson.success === undefined) { + throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + } + this.version = responseJson.version; + this.success = responseJson.success; + // Validate required fields for a successful response. + if (this.success) { + this.expirationTime = responseJson.expiration_time; + this.tokenType = responseJson.token_type; + // Validate token type field. + if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { + throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + + `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + } + // Validate subject token. + if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { + if (!responseJson.saml_response) { + throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); + } + this.subjectToken = responseJson.saml_response; + } + else { + if (!responseJson.id_token) { + throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + + `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); + } + this.subjectToken = responseJson.id_token; + } + } + else { + // Both code and message must be provided for unsuccessful responses. + if (!responseJson.code) { + throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); + } + if (!responseJson.message) { + throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); + } + this.errorCode = responseJson.code; + this.errorMessage = responseJson.message; + } } - - let wrapper = wrappers.get(proto); - if (wrapper == null) { - wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); - wrappers.set(proto, wrapper); + /** + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. + */ + isValid() { + return !this.isExpired() && this.success; + } + /** + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. + */ + isExpired() { + return (this.expirationTime !== undefined && + this.expirationTime < Math.round(Date.now() / 1000)); } - return wrapper -} - -/** - * Wrap a given event to management a dispatching. - * @param {EventTarget} eventTarget The event target of this dispatching. - * @param {Object} event The event to wrap. - * @returns {Event} The wrapper instance. - * @private - */ -function wrapEvent(eventTarget, event) { - const Wrapper = getWrapper(Object.getPrototypeOf(event)); - return new Wrapper(eventTarget, event) } - +exports.ExecutableResponse = ExecutableResponse; /** - * Get the immediateStopped flag of a given event. - * @param {Event} event The event to get. - * @returns {boolean} The flag to stop propagation immediately. - * @private + * An error thrown by the ExecutableResponse class. */ -function isStopped(event) { - return pd(event).immediateStopped +class ExecutableResponseError extends Error { + constructor(message) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } } - +exports.ExecutableResponseError = ExecutableResponseError; /** - * Set the current event phase of a given event. - * @param {Event} event The event to set current target. - * @param {number} eventPhase New event phase. - * @returns {void} - * @private + * An error thrown when the 'version' field in an executable response is missing or invalid. */ -function setEventPhase(event, eventPhase) { - pd(event).eventPhase = eventPhase; +class InvalidVersionFieldError extends ExecutableResponseError { } - +exports.InvalidVersionFieldError = InvalidVersionFieldError; /** - * Set the current target of a given event. - * @param {Event} event The event to set current target. - * @param {EventTarget|null} currentTarget New current target. - * @returns {void} - * @private + * An error thrown when the 'success' field in an executable response is missing or invalid. */ -function setCurrentTarget(event, currentTarget) { - pd(event).currentTarget = currentTarget; +class InvalidSuccessFieldError extends ExecutableResponseError { } - +exports.InvalidSuccessFieldError = InvalidSuccessFieldError; /** - * Set a passive listener of a given event. - * @param {Event} event The event to set current target. - * @param {Function|null} passiveListener New passive listener. - * @returns {void} - * @private + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. */ -function setPassiveListener(event, passiveListener) { - pd(event).passiveListener = passiveListener; +class InvalidExpirationTimeFieldError extends ExecutableResponseError { } - -/** - * @typedef {object} ListenerNode - * @property {Function} listener - * @property {1|2|3} listenerType - * @property {boolean} passive - * @property {boolean} once - * @property {ListenerNode|null} next - * @private - */ - -/** - * @type {WeakMap>} - * @private - */ -const listenersMap = new WeakMap(); - -// Listener types -const CAPTURE = 1; -const BUBBLE = 2; -const ATTRIBUTE = 3; - +exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; /** - * Check whether a given value is an object or not. - * @param {any} x The value to check. - * @returns {boolean} `true` if the value is an object. + * An error thrown when the 'token_type' field in an executable response is missing or invalid. */ -function isObject(x) { - return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +class InvalidTokenTypeFieldError extends ExecutableResponseError { } - +exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; /** - * Get listeners. - * @param {EventTarget} eventTarget The event target to get. - * @returns {Map} The listeners. - * @private + * An error thrown when the 'code' field in an executable response is missing or invalid. */ -function getListeners(eventTarget) { - const listeners = listenersMap.get(eventTarget); - if (listeners == null) { - throw new TypeError( - "'this' is expected an EventTarget object, but got another value." - ) - } - return listeners +class InvalidCodeFieldError extends ExecutableResponseError { } - +exports.InvalidCodeFieldError = InvalidCodeFieldError; /** - * Get the property descriptor for the event attribute of a given event. - * @param {string} eventName The event name to get property descriptor. - * @returns {PropertyDescriptor} The property descriptor. - * @private + * An error thrown when the 'message' field in an executable response is missing or invalid. */ -function defineEventAttributeDescriptor(eventName) { - return { - get() { - const listeners = getListeners(this); - let node = listeners.get(eventName); - while (node != null) { - if (node.listenerType === ATTRIBUTE) { - return node.listener - } - node = node.next; - } - return null - }, - - set(listener) { - if (typeof listener !== "function" && !isObject(listener)) { - listener = null; // eslint-disable-line no-param-reassign - } - const listeners = getListeners(this); - - // Traverse to the tail while removing old value. - let prev = null; - let node = listeners.get(eventName); - while (node != null) { - if (node.listenerType === ATTRIBUTE) { - // Remove old value. - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - } else { - prev = node; - } - - node = node.next; - } - - // Add new value. - if (listener !== null) { - const newNode = { - listener, - listenerType: ATTRIBUTE, - passive: false, - once: false, - next: null, - }; - if (prev === null) { - listeners.set(eventName, newNode); - } else { - prev.next = newNode; - } - } - }, - configurable: true, - enumerable: true, - } +class InvalidMessageFieldError extends ExecutableResponseError { } - +exports.InvalidMessageFieldError = InvalidMessageFieldError; /** - * Define an event attribute (e.g. `eventTarget.onclick`). - * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. - * @param {string} eventName The event name to define. - * @returns {void} + * An error thrown when the subject token in an executable response is missing or invalid. */ -function defineEventAttribute(eventTargetPrototype, eventName) { - Object.defineProperty( - eventTargetPrototype, - `on${eventName}`, - defineEventAttributeDescriptor(eventName) - ); +class InvalidSubjectTokenError extends ExecutableResponseError { } +exports.InvalidSubjectTokenError = InvalidSubjectTokenError; -/** - * Define a custom EventTarget with event attributes. - * @param {string[]} eventNames Event names for event attributes. - * @returns {EventTarget} The custom EventTarget. - * @private - */ -function defineCustomEventTarget(eventNames) { - /** CustomEventTarget */ - function CustomEventTarget() { - EventTarget.call(this); - } - CustomEventTarget.prototype = Object.create(EventTarget.prototype, { - constructor: { - value: CustomEventTarget, - configurable: true, - writable: true, - }, - }); +/***/ }), - for (let i = 0; i < eventNames.length; ++i) { - defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); - } +/***/ 8765: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return CustomEventTarget -} +"use strict"; -/** - * EventTarget. - * - * - This is constructor if no arguments. - * - This is a function which returns a CustomEventTarget constructor if there are arguments. - * - * For example: - * - * class A extends EventTarget {} - * class B extends EventTarget("message") {} - * class C extends EventTarget("message", "error") {} - * class D extends EventTarget(["message", "error"]) {} +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; +const authclient_1 = __nccwpck_require__(4627); +const oauth2common_1 = __nccwpck_require__(9510); +const gaxios_1 = __nccwpck_require__(9555); +const stream = __nccwpck_require__(2781); +const baseexternalclient_1 = __nccwpck_require__(7391); +/** + * The credentials JSON file type for external account authorized user clients. */ -function EventTarget() { - /*eslint-disable consistent-return */ - if (this instanceof EventTarget) { - listenersMap.set(this, new Map()); - return - } - if (arguments.length === 1 && Array.isArray(arguments[0])) { - return defineCustomEventTarget(arguments[0]) +exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; +/** + * Handler for token refresh requests sent to the token_url endpoint for external + * authorized user credentials. + */ +class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an ExternalAccountAuthorizedUserHandler instance. + * @param url The URL of the token refresh endpoint. + * @param transporter The transporter to use for the refresh request. + * @param clientAuthentication The client authentication credentials to use + * for the refresh request. + */ + constructor(url, transporter, clientAuthentication) { + super(clientAuthentication); + this.url = url; + this.transporter = transporter; } - if (arguments.length > 0) { - const types = new Array(arguments.length); - for (let i = 0; i < arguments.length; ++i) { - types[i] = arguments[i]; + /** + * Requests a new access token from the token_url endpoint using the provided + * refresh token. + * @param refreshToken The refresh token to use to generate a new access token. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @return A promise that resolves with the token refresh response containing + * the requested access token and its expiration time. + */ + async refreshToken(refreshToken, additionalHeaders) { + const values = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + ...additionalHeaders, + }; + const opts = { + url: this.url, + method: 'POST', + headers, + data: values.toString(), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const tokenRefreshResponse = response.data; + tokenRefreshResponse.res = response; + return tokenRefreshResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; } - return defineCustomEventTarget(types) } - throw new TypeError("Cannot call a class as a function") - /*eslint-enable consistent-return */ } - -// Should be enumerable, but class methods are not enumerable. -EventTarget.prototype = { +/** + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. + */ +class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { /** - * Add a given listener to this event target. - * @param {string} eventName The event name to add. - * @param {Function} listener The listener to add. - * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. - * @returns {void} + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. */ - addEventListener(eventName, listener, options) { - if (listener == null) { - return + constructor(options, additionalOptions) { + super({ ...options, ...additionalOptions }); + this.refreshToken = options.refresh_token; + const clientAuth = { + confidentialClientType: 'basic', + clientId: options.client_id, + clientSecret: options.client_secret, + }; + this.externalAccountAuthorizedUserHandler = + new ExternalAccountAuthorizedUserHandler(options.token_url, this.transporter, clientAuth); + this.cachedAccessToken = null; + this.quotaProjectId = options.quota_project_id; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; } - if (typeof listener !== "function" && !isObject(listener)) { - throw new TypeError("'listener' should be a function or an object.") + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; } - - const listeners = getListeners(this); - const optionsIsObj = isObject(options); - const capture = optionsIsObj - ? Boolean(options.capture) - : Boolean(options); - const listenerType = capture ? CAPTURE : BUBBLE; - const newNode = { - listener, - listenerType, - passive: optionsIsObj && Boolean(options.passive), - once: optionsIsObj && Boolean(options.once), - next: null, + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); + if (options.universe_domain) { + this.universeDomain = options.universe_domain; + } + } + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, }; - - // Set it as the first node if the first node is null. - let node = listeners.get(eventName); - if (node === undefined) { - listeners.set(eventName, newNode); - return + } + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); } - - // Traverse to the tail while checking duplication.. - let prev = null; - while (node != null) { - if ( - node.listener === listener && - node.listenerType === listenerType - ) { - // Should ignore duplication. - return - } - prev = node; - node = node.next; + else { + return this.requestAsync(opts); } - - // Add it. - prev.next = newNode; - }, - + } /** - * Remove a given listener from this event target. - * @param {string} eventName The event name to remove. - * @param {Function} listener The listener to remove. - * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. - * @returns {void} + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. */ - removeEventListener(eventName, listener, options) { - if (listener == null) { - return + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); } - - const listeners = getListeners(this); - const capture = isObject(options) - ? Boolean(options.capture) - : Boolean(options); - const listenerType = capture ? CAPTURE : BUBBLE; - - let prev = null; - let node = listeners.get(eventName); - while (node != null) { - if ( - node.listener === listener && - node.listenerType === listenerType - ) { - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); } - return } - - prev = node; - node = node.next; + throw e; } - }, - + return response; + } /** - * Dispatch a given event. - * @param {Event|{type:string}} event The event to dispatch. - * @returns {boolean} `false` if canceled. + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. */ - dispatchEvent(event) { - if (event == null || typeof event.type !== "string") { - throw new TypeError('"event.type" should be a string.') - } - - // If listeners aren't registered, terminate. - const listeners = getListeners(this); - const eventName = event.type; - let node = listeners.get(eventName); - if (node == null) { - return true - } - - // Since we cannot rewrite several properties, so wrap object. - const wrappedEvent = wrapEvent(this, event); - - // This doesn't process capturing phase and bubbling phase. - // This isn't participating in a tree. - let prev = null; - while (node != null) { - // Remove this listener if it's once - if (node.once) { - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - } else { - prev = node; - } - - // Call this listener - setPassiveListener( - wrappedEvent, - node.passive ? node.listener : null - ); - if (typeof node.listener === "function") { - try { - node.listener.call(this, wrappedEvent); - } catch (err) { - if ( - typeof console !== "undefined" && - typeof console.error === "function" - ) { - console.error(err); - } - } - } else if ( - node.listenerType !== ATTRIBUTE && - typeof node.listener.handleEvent === "function" - ) { - node.listener.handleEvent(wrappedEvent); - } - - // Break if `event.stopImmediatePropagation` was called. - if (isStopped(wrappedEvent)) { - break - } - - node = node.next; + async refreshAccessTokenAsync() { + // Refresh the access token using the refresh token. + const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); + this.cachedAccessToken = { + access_token: refreshResponse.access_token, + expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, + res: refreshResponse.res, + }; + if (refreshResponse.refresh_token !== undefined) { + this.refreshToken = refreshResponse.refresh_token; } - setPassiveListener(wrappedEvent, null); - setEventPhase(wrappedEvent, 0); - setCurrentTarget(wrappedEvent, null); - - return !wrappedEvent.defaultPrevented - }, -}; - -// `constructor` is not enumerable. -Object.defineProperty(EventTarget.prototype, "constructor", { - value: EventTarget, - configurable: true, - writable: true, -}); - -// Ensure `eventTarget instanceof window.EventTarget` is `true`. -if ( - typeof window !== "undefined" && - typeof window.EventTarget !== "undefined" -) { - Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); + return this.cachedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(credentials) { + const now = new Date().getTime(); + return credentials.expiry_date + ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis + : false; + } } - -exports.defineEventAttribute = defineEventAttribute; -exports.EventTarget = EventTarget; -exports["default"] = EventTarget; - -module.exports = EventTarget -module.exports.EventTarget = module.exports["default"] = EventTarget -module.exports.defineEventAttribute = defineEventAttribute -//# sourceMappingURL=event-target-shim.js.map - - -/***/ }), - -/***/ 8171: -/***/ ((module) => { - -"use strict"; - - -var hasOwn = Object.prototype.hasOwnProperty; -var toStr = Object.prototype.toString; -var defineProperty = Object.defineProperty; -var gOPD = Object.getOwnPropertyDescriptor; - -var isArray = function isArray(arr) { - if (typeof Array.isArray === 'function') { - return Array.isArray(arr); - } - - return toStr.call(arr) === '[object Array]'; -}; - -var isPlainObject = function isPlainObject(obj) { - if (!obj || toStr.call(obj) !== '[object Object]') { - return false; - } - - var hasOwnConstructor = hasOwn.call(obj, 'constructor'); - var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); - // Not own constructor property must be Object - if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { - return false; - } - - // Own properties are enumerated firstly, so to speed up, - // if last one is own, then all properties are own. - var key; - for (key in obj) { /**/ } - - return typeof key === 'undefined' || hasOwn.call(obj, key); -}; - -// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target -var setProperty = function setProperty(target, options) { - if (defineProperty && options.name === '__proto__') { - defineProperty(target, options.name, { - enumerable: true, - configurable: true, - value: options.newValue, - writable: true - }); - } else { - target[options.name] = options.newValue; - } -}; - -// Return undefined instead of __proto__ if '__proto__' is not an own property -var getProperty = function getProperty(obj, name) { - if (name === '__proto__') { - if (!hasOwn.call(obj, name)) { - return void 0; - } else if (gOPD) { - // In early versions of node, obj['__proto__'] is buggy when obj has - // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. - return gOPD(obj, name).value; - } - } - - return obj[name]; -}; - -module.exports = function extend() { - var options, name, src, copy, copyIsArray, clone; - var target = arguments[0]; - var i = 1; - var length = arguments.length; - var deep = false; - - // Handle a deep copy situation - if (typeof target === 'boolean') { - deep = target; - target = arguments[1] || {}; - // skip the boolean and the target - i = 2; - } - if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { - target = {}; - } - - for (; i < length; ++i) { - options = arguments[i]; - // Only deal with non-null/undefined values - if (options != null) { - // Extend the base object - for (name in options) { - src = getProperty(target, name); - copy = getProperty(options, name); - - // Prevent never-ending loop - if (target !== copy) { - // Recurse if we're merging plain objects or arrays - if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { - if (copyIsArray) { - copyIsArray = false; - clone = src && isArray(src) ? src : []; - } else { - clone = src && isPlainObject(src) ? src : {}; - } - - // Never move original objects, clone them - setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); - - // Don't bring in undefined values - } else if (typeof copy !== 'undefined') { - setProperty(target, { name: name, newValue: copy }); - } - } - } - } - } - - // Return the modified object - return target; -}; - - -/***/ }), - -/***/ 1917: -/***/ (function() { - -(function(scope) {'use strict'; -function B(r,e){var f;return r instanceof Buffer?f=r:f=Buffer.from(r.buffer,r.byteOffset,r.byteLength),f.toString(e)}var w=function(r){return Buffer.from(r)};function h(r){for(var e=0,f=Math.min(256*256,r.length+1),n=new Uint16Array(f),i=[],o=0;;){var t=e=f-1){var s=n.subarray(0,o),m=s;if(i.push(String.fromCharCode.apply(null,m)),!t)return i.join("");r=r.subarray(e),e=0,o=0}var a=r[e++];if((a&128)===0)n[o++]=a;else if((a&224)===192){var d=r[e++]&63;n[o++]=(a&31)<<6|d}else if((a&240)===224){var d=r[e++]&63,l=r[e++]&63;n[o++]=(a&31)<<12|d<<6|l}else if((a&248)===240){var d=r[e++]&63,l=r[e++]&63,R=r[e++]&63,c=(a&7)<<18|d<<12|l<<6|R;c>65535&&(c-=65536,n[o++]=c>>>10&1023|55296,c=56320|c&1023),n[o++]=c}}}function F(r){for(var e=0,f=r.length,n=0,i=Math.max(32,f+(f>>>1)+7),o=new Uint8Array(i>>>3<<3);e=55296&&t<=56319){if(e=55296&&t<=56319)continue}if(n+4>o.length){i+=8,i*=1+e/r.length*2,i=i>>>3<<3;var m=new Uint8Array(i);m.set(o),o=m}if((t&4294967168)===0){o[n++]=t;continue}else if((t&4294965248)===0)o[n++]=t>>>6&31|192;else if((t&4294901760)===0)o[n++]=t>>>12&15|224,o[n++]=t>>>6&63|128;else if((t&4292870144)===0)o[n++]=t>>>18&7|240,o[n++]=t>>>12&63|128,o[n++]=t>>>6&63|128;else continue;o[n++]=t&63|128}return o.slice?o.slice(0,n):o.subarray(0,n)}var u="Failed to ",p=function(r,e,f){if(r)throw new Error("".concat(u).concat(e,": the '").concat(f,"' option is unsupported."))};var x=typeof Buffer=="function"&&Buffer.from;var A=x?w:F;function v(){this.encoding="utf-8"}v.prototype.encode=function(r,e){return p(e&&e.stream,"encode","stream"),A(r)};function U(r){var e;try{var f=new Blob([r],{type:"text/plain;charset=UTF-8"});e=URL.createObjectURL(f);var n=new XMLHttpRequest;return n.open("GET",e,!1),n.send(),n.responseText}finally{e&&URL.revokeObjectURL(e)}}var O=!x&&typeof Blob=="function"&&typeof URL=="function"&&typeof URL.createObjectURL=="function",S=["utf-8","utf8","unicode-1-1-utf-8"],T=h;x?T=B:O&&(T=function(r){try{return U(r)}catch(e){return h(r)}});var y="construct 'TextDecoder'",E="".concat(u," ").concat(y,": the ");function g(r,e){p(e&&e.fatal,y,"fatal"),r=r||"utf-8";var f;if(x?f=Buffer.isEncoding(r):f=S.indexOf(r.toLowerCase())!==-1,!f)throw new RangeError("".concat(E," encoding label provided ('").concat(r,"') is invalid."));this.encoding=r,this.fatal=!1,this.ignoreBOM=!1}g.prototype.decode=function(r,e){p(e&&e.stream,"decode","stream");var f;return r instanceof Uint8Array?f=r:r.buffer instanceof ArrayBuffer?f=new Uint8Array(r.buffer):f=new Uint8Array(r),T(f,this.encoding)};scope.TextEncoder=scope.TextEncoder||v;scope.TextDecoder=scope.TextDecoder||g; -}(typeof window !== 'undefined' ? window : (typeof global !== 'undefined' ? global : this))); +exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; /***/ }), -/***/ 6129: -/***/ ((__unused_webpack_module, exports) => { +/***/ 4381: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2018 Google LLC +// Copyright 2021 Google LLC +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -25614,945 +19900,1106 @@ function B(r,e){var f;return r instanceof Buffer?f=r:f=Buffer.from(r.buffer,r.by // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GaxiosError = void 0; -/* eslint-disable @typescript-eslint/no-explicit-any */ -class GaxiosError extends Error { - constructor(message, options, response) { - super(message); - this.response = response; - this.config = options; - this.code = response.status.toString(); +exports.ExternalAccountClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(7391); +const identitypoolclient_1 = __nccwpck_require__(117); +const awsclient_1 = __nccwpck_require__(1569); +const pluggable_auth_client_1 = __nccwpck_require__(4782); +/** + * Dummy class with no constructor. Developers are expected to use fromJSON. + */ +class ExternalAccountClient { + constructor() { + throw new Error('ExternalAccountClients should be initialized via: ' + + 'ExternalAccountClient.fromJSON(), ' + + 'directly via explicit constructors, eg. ' + + 'new AwsClient(options), new IdentityPoolClient(options), new' + + 'PluggableAuthClientOptions, or via ' + + 'new GoogleAuth(options).getClient()'); + } + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options, additionalOptions) { + var _a, _b; + if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { + return new awsclient_1.AwsClient(options, additionalOptions); + } + else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { + return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + } + else { + return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + } + } + else { + return null; + } } } -exports.GaxiosError = GaxiosError; -//# sourceMappingURL=common.js.map +exports.ExternalAccountClient = ExternalAccountClient; + /***/ }), -/***/ 8133: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 695: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2018 Google LLC +// Copyright 2019 Google LLC +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Gaxios = void 0; -const extend_1 = __importDefault(__nccwpck_require__(8171)); -const https_1 = __nccwpck_require__(5687); -const node_fetch_1 = __importDefault(__nccwpck_require__(467)); -const querystring_1 = __importDefault(__nccwpck_require__(3477)); -const is_stream_1 = __importDefault(__nccwpck_require__(1554)); -const url_1 = __nccwpck_require__(7310); -const common_1 = __nccwpck_require__(6129); -const retry_1 = __nccwpck_require__(1052); -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fetch = hasFetch() ? window.fetch : node_fetch_1.default; -function hasWindow() { - return typeof window !== 'undefined' && !!window; -} -function hasFetch() { - return hasWindow() && !!window.fetch; -} -function hasBuffer() { - return typeof Buffer !== 'undefined'; -} -function hasHeader(options, header) { - return !!getHeader(options, header); -} -function getHeader(options, header) { - header = header.toLowerCase(); - for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { - if (header === key.toLowerCase()) { - return options.headers[key]; +exports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0; +const child_process_1 = __nccwpck_require__(2081); +const fs = __nccwpck_require__(7147); +const gcpMetadata = __nccwpck_require__(3563); +const os = __nccwpck_require__(2037); +const path = __nccwpck_require__(1017); +const crypto_1 = __nccwpck_require__(8043); +const transporters_1 = __nccwpck_require__(2649); +const computeclient_1 = __nccwpck_require__(6875); +const idtokenclient_1 = __nccwpck_require__(298); +const envDetect_1 = __nccwpck_require__(1380); +const jwtclient_1 = __nccwpck_require__(3959); +const refreshclient_1 = __nccwpck_require__(8790); +const impersonated_1 = __nccwpck_require__(1103); +const externalclient_1 = __nccwpck_require__(4381); +const baseexternalclient_1 = __nccwpck_require__(7391); +const authclient_1 = __nccwpck_require__(4627); +const externalAccountAuthorizedUserClient_1 = __nccwpck_require__(8765); +const util_1 = __nccwpck_require__(8905); +exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; +const GoogleAuthExceptionMessages = { + NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\n' + + 'To learn more about Universe Domain retrieval, visit: \n' + + 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys', +}; +class GoogleAuth { + // Note: this properly is only public to satisify unit tests. + // https://github.com/Microsoft/TypeScript/issues/5228 + get isGCE() { + return this.checkIsGCE; + } + /** + * Configuration is resolved in the following order of precedence: + * - {@link GoogleAuthOptions.credentials `credentials`} + * - {@link GoogleAuthOptions.keyFilename `keyFilename`} + * - {@link GoogleAuthOptions.keyFile `keyFile`} + * + * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the + * {@link AuthClient `AuthClient`s}. + * + * @param opts + */ + constructor(opts) { + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + this.checkIsGCE = undefined; + // To save the contents of the JSON credential file + this.jsonContent = null; + this.cachedCredential = null; + this.clientOptions = {}; + opts = opts || {}; + this._cachedProjectId = opts.projectId || null; + this.cachedCredential = opts.authClient || null; + this.keyFilename = opts.keyFilename || opts.keyFile; + this.scopes = opts.scopes; + this.jsonContent = opts.credentials || null; + this.clientOptions = opts.clientOptions || {}; + if (opts.universeDomain) { + this.clientOptions.universeDomain = opts.universeDomain; } } - return undefined; -} -let HttpsProxyAgent; -function loadProxy() { - var _a, _b, _c, _d; - const proxy = ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.HTTPS_PROXY) || - ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.https_proxy) || - ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.HTTP_PROXY) || - ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.http_proxy); - if (proxy) { - HttpsProxyAgent = __nccwpck_require__(7219); + // GAPIC client libraries should always use self-signed JWTs. The following + // variables are set on the JWT client in order to indicate the type of library, + // and sign the JWT with the correct audience and scopes (if not supplied). + setGapicJWTValues(client) { + client.defaultServicePath = this.defaultServicePath; + client.useJWTAccessWithScope = this.useJWTAccessWithScope; + client.defaultScopes = this.defaultScopes; } - return proxy; -} -loadProxy(); -function skipProxy(url) { - var _a; - const noProxyEnv = (_a = process.env.NO_PROXY) !== null && _a !== void 0 ? _a : process.env.no_proxy; - if (!noProxyEnv) { - return false; + getProjectId(callback) { + if (callback) { + this.getProjectIdAsync().then(r => callback(null, r), callback); + } + else { + return this.getProjectIdAsync(); + } } - const noProxyUrls = noProxyEnv.split(','); - const parsedURL = new url_1.URL(url); - return !!noProxyUrls.find(url => { - if (url.startsWith('*.') || url.startsWith('.')) { - url = url.replace(/^\*\./, '.'); - return parsedURL.hostname.endsWith(url); + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + async getProjectIdOptional() { + try { + return await this.getProjectId(); + } + catch (e) { + if (e instanceof Error && + e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { + return null; + } + else { + throw e; + } + } + } + /* + * A private method for finding and caching a projectId. + * + * Supports environments in order of precedence: + * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable + * - GOOGLE_APPLICATION_CREDENTIALS JSON file + * - Cloud SDK: `gcloud config config-helper --format json` + * - GCE project ID from metadata server + * + * @returns projectId + */ + async findAndCacheProjectId() { + let projectId = null; + projectId || (projectId = await this.getProductionProjectId()); + projectId || (projectId = await this.getFileProjectId()); + projectId || (projectId = await this.getDefaultServiceProjectId()); + projectId || (projectId = await this.getGCEProjectId()); + projectId || (projectId = await this.getExternalAccountClientProjectId()); + if (projectId) { + this._cachedProjectId = projectId; + return projectId; } else { - return url === parsedURL.origin || url === parsedURL.hostname; + throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); } - }); -} -// Figure out if we should be using a proxy. Only if it's required, load -// the https-proxy-agent module as it adds startup cost. -function getProxy(url) { - // If there is a match between the no_proxy env variables and the url, then do not proxy - if (skipProxy(url)) { - return undefined; - // If there is not a match between the no_proxy env variables and the url, check to see if there should be a proxy } - else { - return loadProxy(); + async getProjectIdAsync() { + if (this._cachedProjectId) { + return this._cachedProjectId; + } + if (!this._findProjectIdPromise) { + this._findProjectIdPromise = this.findAndCacheProjectId(); + } + return this._findProjectIdPromise; + } + /** + * Retrieves a universe domain from the metadata server via + * {@link gcpMetadata.universe}. + * + * @returns a universe domain + */ + async getUniverseDomainFromMetadataServer() { + var _a; + let universeDomain; + try { + universeDomain = await gcpMetadata.universe('universe_domain'); + universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + catch (e) { + if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) { + universeDomain = authclient_1.DEFAULT_UNIVERSE; + } + else { + throw e; + } + } + return universeDomain; + } + /** + * Retrieves, caches, and returns the universe domain in the following order + * of precedence: + * - The universe domain in {@link GoogleAuth.clientOptions} + * - An existing or ADC {@link AuthClient}'s universe domain + * - {@link gcpMetadata.universe}, if {@link Compute} client + * + * @returns The universe domain + */ + async getUniverseDomain() { + let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain'); + try { + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain); + } + catch (_a) { + // client or ADC is not available + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + return universeDomain; + } + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + getAnyScopes() { + return this.scopes || this.defaultScopes; + } + getApplicationDefault(optionsOrCallback = {}, callback) { + let options; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); + } + else { + return this.getApplicationDefaultAsync(options); + } + } + async getApplicationDefaultAsync(options = {}) { + // If we've already got a cached credential, return it. + // This will also preserve one's configured quota project, in case they + // set one directly on the credential previously. + if (this.cachedCredential) { + return await this.prepareAndCacheADC(this.cachedCredential); + } + // Since this is a 'new' ADC to cache we will use the environment variable + // if it's available. We prefer this value over the value from ADC. + const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT']; + let credential; + // Check for the existence of a local environment variable pointing to the + // location of the credential file. This is typically used in local + // developer scenarios. + credential = + await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); + } + // Look in the well-known credential file location. + credential = + await this._tryGetApplicationCredentialsFromWellKnownFile(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); + } + // Determine if we're running on GCE. + if (await this._checkIsGCE()) { + // set universe domain for Compute client + if (!(0, util_1.originalOrCamelOptions)(options).get('universe_domain')) { + options.universeDomain = + await this.getUniverseDomainFromMetadataServer(); + } + options.scopes = this.getAnyScopes(); + return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride); + } + throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.'); + } + async prepareAndCacheADC(credential, quotaProjectIdOverride) { + const projectId = await this.getProjectIdOptional(); + if (quotaProjectIdOverride) { + credential.quotaProjectId = quotaProjectIdOverride; + } + this.cachedCredential = credential; + return { credential, projectId }; + } + /** + * Determines whether the auth layer is running on Google Compute Engine. + * Checks for GCP Residency, then fallback to checking if metadata server + * is available. + * + * @returns A promise that resolves with the boolean. + * @api private + */ + async _checkIsGCE() { + if (this.checkIsGCE === undefined) { + this.checkIsGCE = + gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable()); + } + return this.checkIsGCE; + } + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { + const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || + process.env['google_application_credentials']; + if (!credentialsPath || credentialsPath.length === 0) { + return null; + } + try { + return this._getApplicationCredentialsFromFilePath(credentialsPath, options); + } + catch (e) { + if (e instanceof Error) { + e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; + } + throw e; + } + } + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromWellKnownFile(options) { + // First, figure out the location of the file, depending upon the OS type. + let location = null; + if (this._isWindows()) { + // Windows + location = process.env['APPDATA']; + } + else { + // Linux or Mac + const home = process.env['HOME']; + if (home) { + location = path.join(home, '.config'); + } + } + // If we found the root path, expand it. + if (location) { + location = path.join(location, 'gcloud', 'application_default_credentials.json'); + if (!fs.existsSync(location)) { + location = null; + } + } + // The file does not exist. + if (!location) { + return null; + } + // The file seems to exist. Try to use it. + const client = await this._getApplicationCredentialsFromFilePath(location, options); + return client; + } + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + async _getApplicationCredentialsFromFilePath(filePath, options = {}) { + // Make sure the path looks like a string. + if (!filePath || filePath.length === 0) { + throw new Error('The file path is invalid.'); + } + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = fs.realpathSync(filePath); + if (!fs.lstatSync(filePath).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + // Now open a read stream on the file, and parse it. + const readStream = fs.createReadStream(filePath); + return this.fromStream(readStream, options); + } + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json) { + var _a, _b, _c, _d, _e; + if (!json) { + throw new Error('Must pass in a JSON object containing an impersonated refresh token'); + } + if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + } + if (!json.source_credentials) { + throw new Error('The incoming JSON object does not contain a source_credentials field'); + } + if (!json.service_account_impersonation_url) { + throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + } + // Create source client for impersonation + const sourceClient = new refreshclient_1.UserRefreshClient(); + sourceClient.fromJSON(json.source_credentials); + if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85} + **/ + throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`); + } + // Extreact service account from service_account_impersonation_url + const targetPrincipal = (_c = (_b = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target; + if (!targetPrincipal) { + throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + } + const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : []; + const client = new impersonated_1.Impersonated({ + ...json, + delegates: (_e = json.delegates) !== null && _e !== void 0 ? _e : [], + sourceClient: sourceClient, + targetPrincipal: targetPrincipal, + targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], + }); + return client; + } + /** + * Create a credentials instance using the given input options. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json, options = {}) { + let client; + // user's preferred universe domain + const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { + client = new refreshclient_1.UserRefreshClient(options); + client.fromJSON(json); + } + else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + client = this.fromImpersonatedJSON(json); + } + else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + client = externalclient_1.ExternalAccountClient.fromJSON(json, options); + client.scopes = this.getAnyScopes(); + } + else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { + client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); + } + else { + options.scopes = this.scopes; + client = new jwtclient_1.JWT(options); + this.setGapicJWTValues(client); + client.fromJSON(json); + } + if (preferredUniverseDomain) { + client.universeDomain = preferredUniverseDomain; + } + return client; + } + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + _cacheClientFromJSON(json, options) { + const client = this.fromJSON(json, options); + // cache both raw data used to instantiate client and client itself. + this.jsonContent = json; + this.cachedCredential = client; + return client; + } + fromStream(inputStream, optionsOrCallback = {}, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); + } + else { + return this.fromStreamAsync(inputStream, options); + } + } + fromStreamAsync(inputStream, options) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the Google auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + try { + const data = JSON.parse(s); + const r = this._cacheClientFromJSON(data, options); + return resolve(r); + } + catch (err) { + // If we failed parsing this.keyFileName, assume that it + // is a PEM or p12 certificate: + if (!this.keyFilename) + throw err; + const client = new jwtclient_1.JWT({ + ...this.clientOptions, + keyFile: this.keyFilename, + }); + this.cachedCredential = client; + this.setGapicJWTValues(client); + return resolve(client); + } + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a credentials instance using the given API key string. + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey, options) { + options = options || {}; + const client = new jwtclient_1.JWT(options); + client.fromAPIKey(apiKey); + return client; } -} -class Gaxios { /** - * The Gaxios class is responsible for making HTTP requests. - * @param defaults The default set of options to be used for this instance. + * Determines whether the current operating system is Windows. + * @api private */ - constructor(defaults) { - this.agentCache = new Map(); - this.defaults = defaults || {}; + _isWindows() { + const sys = os.platform(); + if (sys && sys.length >= 3) { + if (sys.substring(0, 3).toLowerCase() === 'win') { + return true; + } + } + return false; } /** - * Perform an HTTP request with the given options. - * @param opts Set of HTTP options that will be used for this HTTP request. + * Run the Google Cloud SDK command that prints the default project ID */ - async request(opts = {}) { - opts = this.validateOpts(opts); - return this._request(opts); + async getDefaultServiceProjectId() { + return new Promise(resolve => { + (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { + if (!err && stdout) { + try { + const projectId = JSON.parse(stdout).configuration.properties.core.project; + resolve(projectId); + return; + } + catch (e) { + // ignore errors + } + } + resolve(null); + }); + }); } - async _defaultAdapter(opts) { - const fetchImpl = opts.fetchImplementation || fetch; - const res = (await fetchImpl(opts.url, opts)); - const data = await this.getResponseData(opts, res); - return this.translateResponse(opts, res, data); + /** + * Loads the project id from environment variables. + * @api private + */ + getProductionProjectId() { + return (process.env['GCLOUD_PROJECT'] || + process.env['GOOGLE_CLOUD_PROJECT'] || + process.env['gcloud_project'] || + process.env['google_cloud_project']); } /** - * Internal, retryable version of the `request` method. - * @param opts Set of HTTP options that will be used for this HTTP request. + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private */ - async _request(opts = {}) { - try { - let translatedResponse; - if (opts.adapter) { - translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); - } - else { - translatedResponse = await this._defaultAdapter(opts); - } - if (!opts.validateStatus(translatedResponse.status)) { - throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); - } - return translatedResponse; + async getFileProjectId() { + if (this.cachedCredential) { + // Try to read the project ID from the cached credentials file + return this.cachedCredential.projectId; } - catch (e) { - const err = e; - err.config = opts; - const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); - if (shouldRetry && config) { - err.config.retryConfig.currentRetryAttempt = - config.retryConfig.currentRetryAttempt; - return this._request(err.config); + // Ensure the projectId is loaded from the keyFile if available. + if (this.keyFilename) { + const creds = await this.getClient(); + if (creds && creds.projectId) { + return creds.projectId; } - throw err; + } + // Try to load a credentials file and read its project ID + const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); + if (r) { + return r.projectId; + } + else { + return null; } } - async getResponseData(opts, res) { - switch (opts.responseType) { - case 'stream': - return res.body; - case 'json': { - let data = await res.text(); - try { - data = JSON.parse(data); - } - catch (_a) { - // continue - } - return data; - } - case 'arraybuffer': - return res.arrayBuffer(); - case 'blob': - return res.blob(); - default: - return res.text(); + /** + * Gets the project ID from external account client if available. + */ + async getExternalAccountClientProjectId() { + if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + return null; } + const creds = await this.getClient(); + // Do not suppress the underlying error, as the error could contain helpful + // information for debugging and fixing. This is especially true for + // external account creds as in order to get the project ID, the following + // operations have to succeed: + // 1. Valid credentials file should be supplied. + // 2. Ability to retrieve access tokens from STS token exchange API. + // 3. Ability to exchange for service account impersonated credentials (if + // enabled). + // 4. Ability to get project info using the access token from step 2 or 3. + // Without surfacing the error, it is harder for developers to determine + // which step went wrong. + return await creds.getProjectId(); } /** - * Validates the options, and merges them with defaults. - * @param opts The original options passed from the client. + * Gets the Compute Engine project ID if it can be inferred. */ - validateOpts(options) { - const opts = (0, extend_1.default)(true, {}, this.defaults, options); - if (!opts.url) { - throw new Error('URL is required.'); + async getGCEProjectId() { + try { + const r = await gcpMetadata.project('project-id'); + return r; } - // baseUrl has been deprecated, remove in 2.0 - const baseUrl = opts.baseUrl || opts.baseURL; - if (baseUrl) { - opts.url = baseUrl + opts.url; + catch (e) { + // Ignore any errors + return null; } - opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; - if (opts.params && Object.keys(opts.params).length > 0) { - let additionalQueryParams = opts.paramsSerializer(opts.params); - if (additionalQueryParams.startsWith('?')) { - additionalQueryParams = additionalQueryParams.slice(1); - } - const prefix = opts.url.includes('?') ? '&' : '?'; - opts.url = opts.url + prefix + additionalQueryParams; + } + getCredentials(callback) { + if (callback) { + this.getCredentialsAsync().then(r => callback(null, r), callback); } - if (typeof options.maxContentLength === 'number') { - opts.size = options.maxContentLength; + else { + return this.getCredentialsAsync(); } - if (typeof options.maxRedirects === 'number') { - opts.follow = options.maxRedirects; + } + async getCredentialsAsync() { + const client = await this.getClient(); + if (client instanceof impersonated_1.Impersonated) { + return { client_email: client.getTargetPrincipal() }; } - opts.headers = opts.headers || {}; - if (opts.data) { - const isFormData = typeof FormData === 'undefined' - ? false - : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; - if (is_stream_1.default.readable(opts.data)) { - opts.body = opts.data; - } - else if (hasBuffer() && Buffer.isBuffer(opts.data)) { - // Do not attempt to JSON.stringify() a Buffer: - opts.body = opts.data; - if (!hasHeader(opts, 'Content-Type')) { - opts.headers['Content-Type'] = 'application/json'; - } - } - else if (typeof opts.data === 'object') { - // If www-form-urlencoded content type has been set, but data is - // provided as an object, serialize the content using querystring: - if (!isFormData) { - if (getHeader(opts, 'content-type') === - 'application/x-www-form-urlencoded') { - opts.body = opts.paramsSerializer(opts.data); - } - else { - // } else if (!(opts.data instanceof FormData)) { - if (!hasHeader(opts, 'Content-Type')) { - opts.headers['Content-Type'] = 'application/json'; - } - opts.body = JSON.stringify(opts.data); - } - } - } - else { - opts.body = opts.data; + if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { + const serviceAccountEmail = client.getServiceAccountEmail(); + if (serviceAccountEmail) { + return { + client_email: serviceAccountEmail, + universe_domain: client.universeDomain, + }; } } - opts.validateStatus = opts.validateStatus || this.validateStatus; - opts.responseType = opts.responseType || 'json'; - if (!opts.headers['Accept'] && opts.responseType === 'json') { - opts.headers['Accept'] = 'application/json'; + if (this.jsonContent) { + return { + client_email: this.jsonContent.client_email, + private_key: this.jsonContent.private_key, + universe_domain: this.jsonContent.universe_domain, + }; } - opts.method = opts.method || 'GET'; - const proxy = getProxy(opts.url); - if (proxy) { - if (this.agentCache.has(proxy)) { - opts.agent = this.agentCache.get(proxy); - } - else { - // Proxy is being used in conjunction with mTLS. - if (opts.cert && opts.key) { - const parsedURL = new url_1.URL(proxy); - opts.agent = new HttpsProxyAgent({ - port: parsedURL.port, - host: parsedURL.host, - protocol: parsedURL.protocol, - cert: opts.cert, - key: opts.key, - }); - } - else { - opts.agent = new HttpsProxyAgent(proxy); - } - this.agentCache.set(proxy, opts.agent); - } + if (await this._checkIsGCE()) { + const [client_email, universe_domain] = await Promise.all([ + gcpMetadata.instance('service-accounts/default/email'), + this.getUniverseDomain(), + ]); + return { client_email, universe_domain }; } - else if (opts.cert && opts.key) { - // Configure client for mTLS: - if (this.agentCache.has(opts.key)) { - opts.agent = this.agentCache.get(opts.key); + throw new Error(GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND); + } + /** + * Automatically obtain an {@link AuthClient `AuthClient`} based on the + * provided configuration. If no options were passed, use Application + * Default Credentials. + */ + async getClient() { + if (!this.cachedCredential) { + if (this.jsonContent) { + this._cacheClientFromJSON(this.jsonContent, this.clientOptions); + } + else if (this.keyFilename) { + const filePath = path.resolve(this.keyFilename); + const stream = fs.createReadStream(filePath); + await this.fromStreamAsync(stream, this.clientOptions); } else { - opts.agent = new https_1.Agent({ - cert: opts.cert, - key: opts.key, - }); - this.agentCache.set(opts.key, opts.agent); + await this.getApplicationDefaultAsync(this.clientOptions); } } - return opts; + return this.cachedCredential; } /** - * By default, throw for any non-2xx status code - * @param status status code from the HTTP response + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. */ - validateStatus(status) { - return status >= 200 && status < 300; + async getIdTokenClient(targetAudience) { + const client = await this.getClient(); + if (!('fetchIdToken' in client)) { + throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); + } + return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); } /** - * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) - * @param params key value pars to encode + * Automatically obtain application default credentials, and return + * an access token for making requests. */ - paramsSerializer(params) { - return querystring_1.default.stringify(params); - } - translateResponse(opts, res, data) { - // headers need to be converted from a map to an obj - const headers = {}; - res.headers.forEach((value, key) => { - headers[key] = value; - }); - return { - config: opts, - data: data, - headers, - status: res.status, - statusText: res.statusText, - // XMLHttpRequestLike - request: { - responseURL: res.url, - }, - }; - } -} -exports.Gaxios = Gaxios; -//# sourceMappingURL=gaxios.js.map - -/***/ }), - -/***/ 9555: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2018 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0; -const gaxios_1 = __nccwpck_require__(8133); -Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); -var common_1 = __nccwpck_require__(6129); -Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); -/** - * The default instance used when the `request` method is directly - * invoked. - */ -exports.instance = new gaxios_1.Gaxios(); -/** - * Make an HTTP request using the given options. - * @param opts Options for the request - */ -async function request(opts) { - return exports.instance.request(opts); -} -exports.request = request; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 1052: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2018 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getRetryConfig = void 0; -async function getRetryConfig(err) { - var _a; - let config = getConfig(err); - if (!err || !err.config || (!config && !err.config.retry)) { - return { shouldRetry: false }; - } - config = config || {}; - config.currentRetryAttempt = config.currentRetryAttempt || 0; - config.retry = - config.retry === undefined || config.retry === null ? 3 : config.retry; - config.httpMethodsToRetry = config.httpMethodsToRetry || [ - 'GET', - 'HEAD', - 'PUT', - 'OPTIONS', - 'DELETE', - ]; - config.noResponseRetries = - config.noResponseRetries === undefined || config.noResponseRetries === null - ? 2 - : config.noResponseRetries; - // If this wasn't in the list of status codes where we want - // to automatically retry, return. - const retryRanges = [ - // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes - // 1xx - Retry (Informational, request still processing) - // 2xx - Do not retry (Success) - // 3xx - Do not retry (Redirect) - // 4xx - Do not retry (Client errors) - // 429 - Retry ("Too Many Requests") - // 5xx - Retry (Server errors) - [100, 199], - [429, 429], - [500, 599], - ]; - config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; - // Put the config back into the err - err.config.retryConfig = config; - // Determine if we should retry the request - const shouldRetryFn = config.shouldRetry || shouldRetryRequest; - if (!(await shouldRetryFn(err))) { - return { shouldRetry: false, config: err.config }; - } - // Calculate time to wait with exponential backoff. - // If this is the first retry, look for a configured retryDelay. - const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; - // Formula: retryDelay + ((2^c - 1 / 2) * 1000) - const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000; - // We're going to retry! Incremenent the counter. - err.config.retryConfig.currentRetryAttempt += 1; - // Create a promise that invokes the retry after the backOffDelay - const backoff = config.retryBackoff - ? config.retryBackoff(err, delay) - : new Promise(resolve => { - setTimeout(resolve, delay); - }); - // Notify the user if they added an `onRetryAttempt` handler - if (config.onRetryAttempt) { - config.onRetryAttempt(err); + async getAccessToken() { + const client = await this.getClient(); + return (await client.getAccessToken()).token; } - // Return the promise in which recalls Gaxios to retry the request - await backoff; - return { shouldRetry: true, config: err.config }; -} -exports.getRetryConfig = getRetryConfig; -/** - * Determine based on config if we should retry the request. - * @param err The GaxiosError passed to the interceptor. - */ -function shouldRetryRequest(err) { - const config = getConfig(err); - // node-fetch raises an AbortError if signaled: - // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal - if (err.name === 'AbortError') { - return false; + /** + * Obtain the HTTP headers that will provide authorization for a given + * request. + */ + async getRequestHeaders(url) { + const client = await this.getClient(); + return client.getRequestHeaders(url); } - // If there's no config, or retries are disabled, return. - if (!config || config.retry === 0) { - return false; + /** + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers + */ + async authorizeRequest(opts) { + opts = opts || {}; + const url = opts.url || opts.uri; + const client = await this.getClient(); + const headers = await client.getRequestHeaders(url); + opts.headers = Object.assign(opts.headers || {}, headers); + return opts; } - // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) - if (!err.response && - (config.currentRetryAttempt || 0) >= config.noResponseRetries) { - return false; + /** + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async request(opts) { + const client = await this.getClient(); + return client.request(opts); } - // Only retry with configured HttpMethods. - if (!err.config.method || - config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { - return false; + /** + * Determine the compute environment in which the code is running. + */ + getEnv() { + return (0, envDetect_1.getEnv)(); } - // If this wasn't in the list of status codes where we want - // to automatically retry, return. - if (err.response && err.response.status) { - let isInRange = false; - for (const [min, max] of config.statusCodesToRetry) { - const status = err.response.status; - if (status >= min && status <= max) { - isInRange = true; - break; - } + /** + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + * @param endpoint A custom endpoint to use. + * + * @example + * ``` + * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); + * ``` + */ + async sign(data, endpoint) { + const client = await this.getClient(); + const universe = await this.getUniverseDomain(); + endpoint = + endpoint || + `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`; + if (client instanceof impersonated_1.Impersonated) { + const signed = await client.sign(data); + return signed.signedBlob; } - if (!isInRange) { - return false; + const crypto = (0, crypto_1.createCrypto)(); + if (client instanceof jwtclient_1.JWT && client.key) { + const sign = await crypto.sign(client.key, data); + return sign; + } + const creds = await this.getCredentials(); + if (!creds.client_email) { + throw new Error('Cannot sign data without `client_email`.'); } + return this.signBlob(crypto, creds.client_email, data, endpoint); } - // If we are out of retry attempts, return - config.currentRetryAttempt = config.currentRetryAttempt || 0; - if (config.currentRetryAttempt >= config.retry) { - return false; + async signBlob(crypto, emailOrUniqueId, data, endpoint) { + const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`); + const res = await this.request({ + method: 'POST', + url: url.href, + data: { + payload: crypto.encodeBase64StringUtf8(data), + }, + }); + return res.data.signedBlob; } - return true; } +exports.GoogleAuth = GoogleAuth; /** - * Acquire the raxConfig object from an GaxiosError if available. - * @param err The Gaxios error with a config object. + * Export DefaultTransporter as a static property of the class. */ -function getConfig(err) { - if (err && err.config && err.config.retryConfig) { - return err.config.retryConfig; - } - return; -} -//# sourceMappingURL=retry.js.map +GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; + /***/ }), -/***/ 1904: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9735: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/** - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0; -const fs_1 = __nccwpck_require__(7147); -const os_1 = __nccwpck_require__(2037); -/** - * Known paths unique to Google Compute Engine Linux instances - */ -exports.GCE_LINUX_BIOS_PATHS = { - BIOS_DATE: '/sys/class/dmi/id/bios_date', - BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', -}; -const GCE_MAC_ADDRESS_REGEX = /^42:01/; -/** - * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). - * - * Uses the: - * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. - * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. - * - * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. - */ -function isGoogleCloudServerless() { +exports.IAMAuth = void 0; +class IAMAuth { /** - * `CLOUD_RUN_JOB` is used for Cloud Run Jobs - * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. - * - * `FUNCTION_NAME` is used in older Cloud Functions environments: - * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * IAM credentials. * - * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: - * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. - * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + * @param selector the iam authority selector + * @param token the token + * @constructor */ - const isGFEnvironment = process.env.CLOUD_RUN_JOB || - process.env.FUNCTION_NAME || - process.env.K_SERVICE; - return !!isGFEnvironment; -} -exports.isGoogleCloudServerless = isGoogleCloudServerless; -/** - * Determines if the process is running on a Linux Google Compute Engine instance. - * - * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. - */ -function isGoogleComputeEngineLinux() { - if ((0, os_1.platform)() !== 'linux') - return false; - try { - // ensure this file exist - (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); - // ensure this file exist and matches - const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); - return /Google/.test(biosVendor); - } - catch (_a) { - return false; + constructor(selector, token) { + this.selector = selector; + this.token = token; + this.selector = selector; + this.token = token; } -} -exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; -/** - * Determines if the process is running on a Google Compute Engine instance with a known - * MAC address. - * - * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. - */ -function isGoogleComputeEngineMACAddress() { - const interfaces = (0, os_1.networkInterfaces)(); - for (const item of Object.values(interfaces)) { - if (!item) - continue; - for (const { mac } of item) { - if (GCE_MAC_ADDRESS_REGEX.test(mac)) { - return true; - } - } + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders() { + return { + 'x-goog-iam-authority-selector': this.selector, + 'x-goog-iam-authorization-token': this.token, + }; } - return false; -} -exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; -/** - * Determines if the process is running on a Google Compute Engine instance. - * - * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. - */ -function isGoogleComputeEngine() { - return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); -} -exports.isGoogleComputeEngine = isGoogleComputeEngine; -/** - * Determines if the process is running on Google Cloud Platform. - * - * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. - */ -function detectGCPResidency() { - return isGoogleCloudServerless() || isGoogleComputeEngine(); } -exports.detectGCPResidency = detectGCPResidency; -//# sourceMappingURL=gcp-residency.js.map +exports.IAMAuth = IAMAuth; + /***/ }), -/***/ 3563: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 117: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/** - * Copyright 2018 Google LLC - * - * Distributed under MIT license. - * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var _a, _b, _c; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.requestTimeout = exports.setGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.project = exports.instance = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const jsonBigint = __nccwpck_require__(5031); -const gcp_residency_1 = __nccwpck_require__(1904); -exports.BASE_PATH = '/computeMetadata/v1'; -exports.HOST_ADDRESS = 'http://169.254.169.254'; -exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; -exports.HEADER_NAME = 'Metadata-Flavor'; -exports.HEADER_VALUE = 'Google'; -exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); +exports.IdentityPoolClient = void 0; +const fs = __nccwpck_require__(7147); +const util_1 = __nccwpck_require__(3837); +const baseexternalclient_1 = __nccwpck_require__(7391); +const util_2 = __nccwpck_require__(8905); +// fs.readfile is undefined in browser karma tests causing +// `npm run browser-test` to fail as test.oauth2.ts imports this file via +// src/index.ts. +// Fallback to void function to avoid promisify throwing a TypeError. +const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); +const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); +const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); /** - * Returns the base URL while taking into account the GCE_METADATA_HOST - * environment variable if it exists. - * - * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. */ -function getBaseUrl(baseUrl) { - if (!baseUrl) { - baseUrl = - process.env.GCE_METADATA_IP || - process.env.GCE_METADATA_HOST || - exports.HOST_ADDRESS; - } - // If no scheme is provided default to HTTP: - if (!/^https?:\/\//.test(baseUrl)) { - baseUrl = `http://${baseUrl}`; - } - return new URL(exports.BASE_PATH, baseUrl).href; -} -// Accepts an options object passed from the user to the API. In previous -// versions of the API, it referred to a `Request` or an `Axios` request -// options object. Now it refers to an object with very limited property -// names. This is here to help ensure users don't pass invalid options when -// they upgrade from 0.4 to 0.5 to 0.8. -function validate(options) { - Object.keys(options).forEach(key => { - switch (key) { - case 'params': - case 'property': - case 'headers': - break; - case 'qs': - throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); - default: - throw new Error(`'${key}' is not a valid configuration option.`); +class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiate an IdentityPoolClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_2.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const credentialSourceOpts = (0, util_2.originalOrCamelOptions)(credentialSource); + this.file = credentialSourceOpts.get('file'); + this.url = credentialSourceOpts.get('url'); + this.headers = credentialSourceOpts.get('headers'); + if (this.file && this.url) { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); } - }); -} -async function metadataAccessor(type, options, noResponseRetries = 3, fastFail = false) { - options = options || {}; - if (typeof options === 'string') { - options = { property: options }; - } - let property = ''; - if (typeof options === 'object' && options.property) { - property = '/' + options.property; - } - validate(options); - try { - const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; - const res = await requestMethod({ - url: `${getBaseUrl()}/${type}${property}`, - headers: Object.assign({}, exports.HEADERS, options.headers), - retryConfig: { noResponseRetries }, - params: options.params, - responseType: 'text', - timeout: requestTimeout(), - }); - // NOTE: node.js converts all incoming headers to lower case. - if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { - throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`); + else if (this.file && !this.url) { + this.credentialSourceType = 'file'; } - else if (!res.data) { - throw new Error('Invalid response from the metadata service'); + else if (!this.file && this.url) { + this.credentialSourceType = 'url'; } - if (typeof res.data === 'string') { - try { - return jsonBigint.parse(res.data); - } - catch (_a) { - /* ignore */ - } + else { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + const formatOpts = (0, util_2.originalOrCamelOptions)(credentialSourceOpts.get('format')); + // Text is the default format type. + this.formatType = formatOpts.get('type') || 'text'; + this.formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name'); + if (this.formatType !== 'json' && this.formatType !== 'text') { + throw new Error(`Invalid credential_source format "${this.formatType}"`); + } + if (this.formatType === 'json' && !this.formatSubjectTokenFieldName) { + throw new Error('Missing subject_token_field_name for JSON credential_source format'); } - return res.data; } - catch (e) { - const err = e; - if (err.response && err.response.status !== 200) { - err.message = `Unsuccessful response status code. ${err.message}`; + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this either retrieves the local credential from a file location (k8s + * workload) or by sending a GET request to a local metadata server (Azure + * workloads). + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + if (this.file) { + return await this.getTokenFromFile(this.file, this.formatType, this.formatSubjectTokenFieldName); } - throw e; + return await this.getTokenFromUrl(this.url, this.formatType, this.formatSubjectTokenFieldName, this.headers); } -} -async function fastFailMetadataRequest(options) { - const secondaryOptions = { - ...options, - url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), - }; - // We race a connection between DNS/IP to metadata server. There are a couple - // reasons for this: - // - // 1. the DNS is slow in some GCP environments; by checking both, we might - // detect the runtime environment signficantly faster. - // 2. we can't just check the IP, which is tarpitted and slow to respond - // on a user's local machine. - // - // Additional logic has been added to make sure that we don't create an - // unhandled rejection in scenarios where a failure happens sometime - // after a success. - // - // Note, however, if a failure happens prior to a success, a rejection should - // occur, this is for folks running locally. - // - let responded = false; - const r1 = (0, gaxios_1.request)(options) - .then(res => { - responded = true; - return res; - }) - .catch(err => { - if (responded) { - return r2; + /** + * Looks up the external subject token in the file path provided and + * resolves with that token. + * @param file The file path where the external credential is located. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @return A promise that resolves with the external subject token. + */ + async getTokenFromFile(filePath, formatType, formatSubjectTokenFieldName) { + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = await realpath(filePath); + if (!(await lstat(filePath)).isFile()) { + throw new Error(); + } } - else { - responded = true; + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } throw err; } - }); - const r2 = (0, gaxios_1.request)(secondaryOptions) - .then(res => { - responded = true; - return res; - }) - .catch(err => { - if (responded) { - return r1; + let subjectToken; + const rawText = await readFile(filePath, { encoding: 'utf8' }); + if (formatType === 'text') { + subjectToken = rawText; } - else { - responded = true; - throw err; + else if (formatType === 'json' && formatSubjectTokenFieldName) { + const json = JSON.parse(rawText); + subjectToken = json[formatSubjectTokenFieldName]; } - }); - return Promise.race([r1, r2]); -} -/** - * Obtain metadata for the current GCE instance - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function instance(options) { - return metadataAccessor('instance', options); -} -exports.instance = instance; -/** - * Obtain metadata for the current GCP Project. - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function project(options) { - return metadataAccessor('project', options); -} -exports.project = project; -/* - * How many times should we retry detecting GCP environment. - */ -function detectGCPAvailableRetries() { - return process.env.DETECT_GCP_RETRIES - ? Number(process.env.DETECT_GCP_RETRIES) - : 0; -} -let cachedIsAvailableResponse; -/** - * Determine if the metadata server is currently available. - */ -async function isAvailable() { - try { - // If a user is instantiating several GCP libraries at the same time, - // this may result in multiple calls to isAvailable(), to detect the - // runtime environment. We use the same promise for each of these calls - // to reduce the network load. - if (cachedIsAvailableResponse === undefined) { - cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), - // If the default HOST_ADDRESS has been overridden, we should not - // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in - // a non-GCP environment): - !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source file'); } - await cachedIsAvailableResponse; - return true; + return subjectToken; } - catch (e) { - const err = e; - if (process.env.DEBUG_AUTH) { - console.info(err); - } - if (err.type === 'request-timeout') { - // If running in a GCP environment, metadata endpoint should return - // within ms. - return false; - } - if (err.response && err.response.status === 404) { - return false; - } - else { - if (!(err.response && err.response.status === 404) && - // A warning is emitted if we see an unexpected err.code, or err.code - // is not populated: - (!err.code || - ![ - 'EHOSTDOWN', - 'EHOSTUNREACH', - 'ENETUNREACH', - 'ENOENT', - 'ENOTFOUND', - 'ECONNREFUSED', - ].includes(err.code))) { - let code = 'UNKNOWN'; - if (err.code) - code = err.code; - process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); - } - // Failure to resolve the metadata service means that it is not available. - return false; + /** + * Sends a GET request to the URL provided and resolves with the returned + * external subject token. + * @param url The URL to call to retrieve the subject token. This is typically + * a local metadata server. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @param headers The optional additional headers to send with the request to + * the metadata server url. + * @return A promise that resolves with the external subject token. + */ + async getTokenFromUrl(url, formatType, formatSubjectTokenFieldName, headers) { + const opts = { + url, + method: 'GET', + headers, + responseType: formatType, + }; + let subjectToken; + if (formatType === 'text') { + const response = await this.transporter.request(opts); + subjectToken = response.data; } + else if (formatType === 'json' && formatSubjectTokenFieldName) { + const response = await this.transporter.request(opts); + subjectToken = response.data[formatSubjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source URL'); + } + return subjectToken; } } -exports.isAvailable = isAvailable; -/** - * reset the memoized isAvailable() lookup. - */ -function resetIsAvailableCache() { - cachedIsAvailableResponse = undefined; -} -exports.resetIsAvailableCache = resetIsAvailableCache; -/** - * A cache for the detected GCP Residency. - */ -exports.gcpResidencyCache = null; -/** - * Sets the detected GCP Residency. - * Useful for forcing metadata server detection behavior. - * - * Set `null` to autodetect the environment (default behavior). - */ -function setGCPResidency(value = null) { - exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); -} -exports.setGCPResidency = setGCPResidency; -/** - * Obtain the timeout for requests to the metadata server. - * - * In certain environments and conditions requests can take longer than - * the default timeout to complete. This function will determine the - * appropriate timeout based on the environment. - * - * @returns {number} a request timeout duration in milliseconds. - */ -function requestTimeout() { - // Detecting the residency can be resource-intensive. Let's cache the result. - if (exports.gcpResidencyCache === null) { - exports.gcpResidencyCache = (0, gcp_residency_1.detectGCPResidency)(); - } - return exports.gcpResidencyCache ? 0 : 3000; -} -exports.requestTimeout = requestTimeout; -__exportStar(__nccwpck_require__(1904), exports); -//# sourceMappingURL=index.js.map +exports.IdentityPoolClient = IdentityPoolClient; + /***/ }), -/***/ 4627: +/***/ 298: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2012 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26566,53 +21013,235 @@ __exportStar(__nccwpck_require__(1904), exports); // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AuthClient = void 0; -const events_1 = __nccwpck_require__(2361); -const transporters_1 = __nccwpck_require__(2649); -class AuthClient extends events_1.EventEmitter { - constructor() { - super(...arguments); - this.transporter = new transporters_1.DefaultTransporter(); - this.credentials = {}; - this.eagerRefreshThresholdMillis = 5 * 60 * 1000; - this.forceRefreshOnFailure = false; +exports.IdTokenClient = void 0; +const oauth2client_1 = __nccwpck_require__(3936); +class IdTokenClient extends oauth2client_1.OAuth2Client { + /** + * Google ID Token client + * + * Retrieve ID token from the metadata server. + * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server + */ + constructor(options) { + super(options); + this.targetAudience = options.targetAudience; + this.idTokenProvider = options.idTokenProvider; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + if (!this.credentials.id_token || + !this.credentials.expiry_date || + this.isTokenExpiring()) { + const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); + this.credentials = { + id_token: idToken, + expiry_date: this.getIdTokenExpiryDate(idToken), + }; + } + const headers = { + Authorization: 'Bearer ' + this.credentials.id_token, + }; + return { headers }; + } + getIdTokenExpiryDate(idToken) { + const payloadB64 = idToken.split('.')[1]; + if (payloadB64) { + const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); + return payload.exp * 1000; + } } +} +exports.IdTokenClient = IdTokenClient; + + +/***/ }), + +/***/ 1103: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(3936); +const gaxios_1 = __nccwpck_require__(9555); +exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; +class Impersonated extends oauth2client_1.OAuth2Client { /** - * Sets the auth credentials. + * Impersonated service account credentials. + * + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. */ - setCredentials(credentials) { - this.credentials = credentials; + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f; + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { + expiry_date: 1, + refresh_token: 'impersonated-placeholder', + }; + this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); + this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; + this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; + this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; + this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; + this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com'; } /** - * Append additional headers, e.g., x-goog-user-project, shared across the - * classes inheriting AuthClient. This method should be used by any method - * that overrides getRequestMetadataAsync(), which is a shared helper for - * setting request information in both gRPC and HTTP API calls. + * Signs some bytes. * - * @param headers object to append additional headers to. + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} + * @param blobToSign String to sign. + * @return denoting the keyyID and signedBlob in base64 string */ - addSharedMetadataHeaders(headers) { - // quota_project_id, stored in application_default_credentials.json, is set in - // the x-goog-user-project header, to indicate an alternate account for - // billing and quota: - if (!headers['x-goog-user-project'] && // don't override a value the user sets. - this.quotaProjectId) { - headers['x-goog-user-project'] = this.quotaProjectId; + async sign(blobToSign) { + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:signBlob`; + const body = { + delegates: this.delegates, + payload: Buffer.from(blobToSign).toString('base64'), + }; + const res = await this.sourceClient.request({ + url: u, + data: body, + method: 'POST', + }); + return res.data; + } + /** The service account email to be impersonated. */ + getTargetPrincipal() { + return this.targetPrincipal; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshToken(refreshToken) { + var _a, _b, _c, _d, _e, _f; + try { + await this.sourceClient.getAccessToken(); + const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; + const u = `${this.endpoint}/v1/${name}:generateAccessToken`; + const body = { + delegates: this.delegates, + scope: this.targetScopes, + lifetime: this.lifetime + 's', + }; + const res = await this.sourceClient.request({ + url: u, + data: body, + method: 'POST', + }); + const tokenResponse = res.data; + this.credentials.access_token = tokenResponse.accessToken; + this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); + return { + tokens: this.credentials, + res, + }; } - return headers; + catch (error) { + if (!(error instanceof Error)) + throw error; + let status = 0; + let message = ''; + if (error instanceof gaxios_1.GaxiosError) { + status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; + message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; + } + if (status && message) { + error.message = `${status}: unable to impersonate: ${message}`; + throw error; + } + else { + error.message = `unable to impersonate: ${error}`; + throw error; + } + } + } + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + async fetchIdToken(targetAudience, options) { + var _a; + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:generateIdToken`; + const body = { + delegates: this.delegates, + audience: targetAudience, + includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, + }; + const res = await this.sourceClient.request({ + url: u, + data: body, + method: 'POST', + }); + return res.data.token; } } -exports.AuthClient = AuthClient; -//# sourceMappingURL=authclient.js.map +exports.Impersonated = Impersonated; + /***/ }), -/***/ 1569: +/***/ 8740: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2021 Google LLC +// Copyright 2015 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26626,254 +21255,193 @@ exports.AuthClient = AuthClient; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AwsClient = void 0; -const awsrequestsigner_1 = __nccwpck_require__(1754); -const baseexternalclient_1 = __nccwpck_require__(7391); -/** - * AWS external account client. This is used for AWS workloads, where - * AWS STS GetCallerIdentity serialized signed requests are exchanged for - * GCP access token. - */ -class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { +exports.JWTAccess = void 0; +const jws = __nccwpck_require__(4636); +const util_1 = __nccwpck_require__(8905); +const DEFAULT_HEADER = { + alg: 'RS256', + typ: 'JWT', +}; +class JWTAccess { /** - * Instantiates an AwsClient instance using the provided JSON - * object loaded from an external account credentials file. - * An error is thrown if the credential is not a valid AWS credential. - * @param options The external account options object typically loaded - * from the external account JSON credential file. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. */ - constructor(options, additionalOptions) { - super(options, additionalOptions); - this.environmentId = options.credential_source.environment_id; - // This is only required if the AWS region is not available in the - // AWS_REGION or AWS_DEFAULT_REGION environment variables. - this.regionUrl = options.credential_source.region_url; - // This is only required if AWS security credentials are not available in - // environment variables. - this.securityCredentialsUrl = options.credential_source.url; - this.regionalCredVerificationUrl = - options.credential_source.regional_cred_verification_url; - this.imdsV2SessionTokenUrl = - options.credential_source.imdsv2_session_token_url; - this.awsRequestSigner = null; - this.region = ''; - // data validators - this.validateEnvironmentId(); - this.validateMetadataServerURLs(); + constructor(email, key, keyId, eagerRefreshThresholdMillis) { + this.cache = new util_1.LRUCache({ + capacity: 500, + maxAge: 60 * 60 * 1000, + }); + this.email = email; + this.key = key; + this.keyId = keyId; + this.eagerRefreshThresholdMillis = + eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; } - validateEnvironmentId() { - var _a; - const match = (_a = this.environmentId) === null || _a === void 0 ? void 0 : _a.match(/^(aws)(\d+)$/); - if (!match || !this.regionalCredVerificationUrl) { - throw new Error('No valid AWS "credential_source" provided'); + /** + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. + */ + getCachedKey(url, scopes) { + let cacheKey = url; + if (scopes && Array.isArray(scopes) && scopes.length) { + cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; } - else if (parseInt(match[2], 10) !== 1) { - throw new Error(`aws version "${match[2]}" is not supported in the current build.`); + else if (typeof scopes === 'string') { + cacheKey = url ? `${url}_${scopes}` : scopes; } - } - validateMetadataServerURLs() { - this.validateMetadataURL(this.regionUrl, 'region_url'); - this.validateMetadataURL(this.securityCredentialsUrl, 'url'); - this.validateMetadataURL(this.imdsV2SessionTokenUrl, 'imdsv2_session_token_url'); - } - validateMetadataURL(value, prop) { - if (!value) - return; - const url = new URL(value); - if (url.hostname !== AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS && - url.hostname !== `[${AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS}]`) { - throw new RangeError(`Invalid host "${url.hostname}" for "${prop}". Expecting ${AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS} or ${AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS}.`); + if (!cacheKey) { + throw Error('Scopes or url must be provided'); } + return cacheKey; } /** - * Triggered when an external subject token is needed to be exchanged for a - * GCP access token via GCP STS endpoint. - * This uses the `options.credential_source` object to figure out how - * to retrieve the token using the current environment. In this case, - * this uses a serialized AWS signed request to the STS GetCallerIdentity - * endpoint. - * The logic is summarized as: - * 1. If imdsv2_session_token_url is provided in the credential source, then - * fetch the aws session token and include it in the headers of the - * metadata requests. This is a requirement for IDMSv2 but optional - * for IDMSv1. - * 2. Retrieve AWS region from availability-zone. - * 3a. Check AWS credentials in environment variables. If not found, get - * from security-credentials endpoint. - * 3b. Get AWS credentials from security-credentials endpoint. In order - * to retrieve this, the AWS role needs to be determined by calling - * security-credentials endpoint without any argument. Then the - * credentials can be retrieved via: security-credentials/role_name - * 4. Generate the signed request to AWS STS GetCallerIdentity action. - * 5. Inject x-goog-cloud-target-resource into header and serialize the - * signed request. This will be the subject-token to pass to GCP STS. - * @return A promise that resolves with the external subject token. + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. */ - async retrieveSubjectToken() { - // Initialize AWS request signer if not already initialized. - if (!this.awsRequestSigner) { - const metadataHeaders = {}; - if (this.imdsV2SessionTokenUrl) { - metadataHeaders['x-aws-ec2-metadata-token'] = - await this.getImdsV2SessionToken(); - } - this.region = await this.getAwsRegion(metadataHeaders); - this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { - // Check environment variables for permanent credentials first. - // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html - if (process.env['AWS_ACCESS_KEY_ID'] && - process.env['AWS_SECRET_ACCESS_KEY']) { - return { - accessKeyId: process.env['AWS_ACCESS_KEY_ID'], - secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], - // This is normally not available for permanent credentials. - token: process.env['AWS_SESSION_TOKEN'], - }; + getRequestHeaders(url, additionalClaims, scopes) { + // Return cached authorization headers, unless we are within + // eagerRefreshThresholdMillis ms of them expiring: + const key = this.getCachedKey(url, scopes); + const cachedToken = this.cache.get(key); + const now = Date.now(); + if (cachedToken && + cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { + return cachedToken.headers; + } + const iat = Math.floor(Date.now() / 1000); + const exp = JWTAccess.getExpirationTime(iat); + let defaultClaims; + // Turn scopes into space-separated string + if (Array.isArray(scopes)) { + scopes = scopes.join(' '); + } + // If scopes are specified, sign with scopes + if (scopes) { + defaultClaims = { + iss: this.email, + sub: this.email, + scope: scopes, + exp, + iat, + }; + } + else { + defaultClaims = { + iss: this.email, + sub: this.email, + aud: url, + exp, + iat, + }; + } + // if additionalClaims are provided, ensure they do not collide with + // other required claims. + if (additionalClaims) { + for (const claim in defaultClaims) { + if (additionalClaims[claim]) { + throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); } - // Since the role on a VM can change, we don't need to cache it. - const roleName = await this.getAwsRoleName(metadataHeaders); - // Temporary credentials typically last for several hours. - // Expiration is returned in response. - // Consider future optimization of this logic to cache AWS tokens - // until their natural expiration. - const awsCreds = await this.getAwsSecurityCredentials(roleName, metadataHeaders); - return { - accessKeyId: awsCreds.AccessKeyId, - secretAccessKey: awsCreds.SecretAccessKey, - token: awsCreds.Token, - }; - }, this.region); + } } - // Generate signed request to AWS STS GetCallerIdentity API. - // Use the required regional endpoint. Otherwise, the request will fail. - const options = await this.awsRequestSigner.getRequestOptions({ - url: this.regionalCredVerificationUrl.replace('{region}', this.region), - method: 'POST', + const header = this.keyId + ? { ...DEFAULT_HEADER, kid: this.keyId } + : DEFAULT_HEADER; + const payload = Object.assign(defaultClaims, additionalClaims); + // Sign the jwt and add it to the cache + const signedJWT = jws.sign({ header, payload, secret: this.key }); + const headers = { Authorization: `Bearer ${signedJWT}` }; + this.cache.set(key, { + expiration: exp * 1000, + headers, }); - // The GCP STS endpoint expects the headers to be formatted as: - // [ - // {key: 'x-amz-date', value: '...'}, - // {key: 'Authorization', value: '...'}, - // ... - // ] - // And then serialized as: - // encodeURIComponent(JSON.stringify({ - // url: '...', - // method: 'POST', - // headers: [{key: 'x-amz-date', value: '...'}, ...] - // })) - const reformattedHeader = []; - const extendedHeaders = Object.assign({ - // The full, canonical resource name of the workload identity pool - // provider, with or without the HTTPS prefix. - // Including this header as part of the signature is recommended to - // ensure data integrity. - 'x-goog-cloud-target-resource': this.audience, - }, options.headers); - // Reformat header to GCP STS expected format. - for (const key in extendedHeaders) { - reformattedHeader.push({ - key, - value: extendedHeaders[key], - }); - } - // Serialize the reformatted signed request. - return encodeURIComponent(JSON.stringify({ - url: options.url, - method: options.method, - headers: reformattedHeader, - })); + return headers; } /** - * @return A promise that resolves with the IMDSv2 Session Token. + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. */ - async getImdsV2SessionToken() { - const opts = { - url: this.imdsV2SessionTokenUrl, - method: 'PUT', - responseType: 'text', - headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, - }; - const response = await this.transporter.request(opts); - return response.data; + static getExpirationTime(iat) { + const exp = iat + 3600; // 3600 seconds = 1 hour + return exp; } /** - * @param headers The headers to be used in the metadata request. - * @return A promise that resolves with the current AWS region. + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. */ - async getAwsRegion(headers) { - // Priority order for region determination: - // AWS_REGION > AWS_DEFAULT_REGION > metadata server. - if (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION']) { - return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION']); + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); } - if (!this.regionUrl) { - throw new Error('Unable to determine AWS region due to missing ' + - '"options.credential_source.region_url"'); + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); } - const opts = { - url: this.regionUrl, - method: 'GET', - responseType: 'text', - headers: headers, - }; - const response = await this.transporter.request(opts); - // Remove last character. For example, if us-east-2b is returned, - // the region would be us-east-2. - return response.data.substr(0, response.data.length - 1); + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; } - /** - * @param headers The headers to be used in the metadata request. - * @return A promise that resolves with the assigned role to the current - * AWS VM. This is needed for calling the security-credentials endpoint. - */ - async getAwsRoleName(headers) { - if (!this.securityCredentialsUrl) { - throw new Error('Unable to determine AWS role name due to missing ' + - '"options.credential_source.url"'); + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); } - const opts = { - url: this.securityCredentialsUrl, - method: 'GET', - responseType: 'text', - headers: headers, - }; - const response = await this.transporter.request(opts); - return response.data; } - /** - * Retrieves the temporary AWS credentials by calling the security-credentials - * endpoint as specified in the `credential_source` object. - * @param roleName The role attached to the current VM. - * @param headers The headers to be used in the metadata request. - * @return A promise that resolves with the temporary AWS credentials - * needed for creating the GetCallerIdentity signed request. - */ - async getAwsSecurityCredentials(roleName, headers) { - const response = await this.transporter.request({ - url: `${this.securityCredentialsUrl}/${roleName}`, - responseType: 'json', - headers: headers, + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + reject(new Error('Must pass in a stream containing the service account auth settings.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('data', chunk => (s += chunk)) + .on('error', reject) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (err) { + reject(err); + } + }); }); - return response.data; } } -exports.AwsClient = AwsClient; -AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; -AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; -//# sourceMappingURL=awsclient.js.map +exports.JWTAccess = JWTAccess; + /***/ }), -/***/ 1754: +/***/ 3959: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2021 Google LLC +// Copyright 2013 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26887,210 +21455,346 @@ AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AwsRequestSigner = void 0; -const crypto_1 = __nccwpck_require__(8043); -/** AWS Signature Version 4 signing algorithm identifier. */ -const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; -/** - * The termination string for the AWS credential scope value as defined in - * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html - */ -const AWS_REQUEST_TYPE = 'aws4_request'; -/** - * Implements an AWS API request signer based on the AWS Signature Version 4 - * signing process. - * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html - */ -class AwsRequestSigner { +exports.JWT = void 0; +const gtoken_1 = __nccwpck_require__(6031); +const jwtaccess_1 = __nccwpck_require__(8740); +const oauth2client_1 = __nccwpck_require__(3936); +const authclient_1 = __nccwpck_require__(4627); +class JWT extends oauth2client_1.OAuth2Client { + constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { + const opts = optionsOrEmail && typeof optionsOrEmail === 'object' + ? optionsOrEmail + : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; + super(opts); + this.email = opts.email; + this.keyFile = opts.keyFile; + this.key = opts.key; + this.keyId = opts.keyId; + this.scopes = opts.scopes; + this.subject = opts.subject; + this.additionalClaims = opts.additionalClaims; + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; + } /** - * Instantiates an AWS API request signer used to send authenticated signed - * requests to AWS APIs based on the AWS Signature Version 4 signing process. - * This also provides a mechanism to generate the signed request without - * sending it. - * @param getCredentials A mechanism to retrieve AWS security credentials - * when needed. - * @param region The AWS region to use. + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. */ - constructor(getCredentials, region) { - this.getCredentials = getCredentials; - this.region = region; - this.crypto = (0, crypto_1.createCrypto)(); + createScoped(scopes) { + const jwt = new JWT(this); + jwt.scopes = scopes; + return jwt; } /** - * Generates the signed request for the provided HTTP request for calling - * an AWS API. This follows the steps described at: - * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html - * @param amzOptions The AWS request options that need to be signed. - * @return A promise that resolves with the GaxiosOptions containing the - * signed HTTP request parameters. + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. */ - async getRequestOptions(amzOptions) { - if (!amzOptions.url) { - throw new Error('"url" is required in "amzOptions"'); + async getRequestMetadataAsync(url) { + url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; + const useSelfSignedJWT = (!this.hasUserScopes() && url) || + (this.useJWTAccessWithScope && this.hasAnyScopes()) || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) { + throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`); } - // Stringify JSON requests. This will be set in the request body of the - // generated signed request. - const requestPayloadData = typeof amzOptions.data === 'object' - ? JSON.stringify(amzOptions.data) - : amzOptions.data; - const url = amzOptions.url; - const method = amzOptions.method || 'GET'; - const requestPayload = amzOptions.body || requestPayloadData; - const additionalAmzHeaders = amzOptions.headers; - const awsSecurityCredentials = await this.getCredentials(); - const uri = new URL(url); - const headerMap = await generateAuthenticationHeaderMap({ - crypto: this.crypto, - host: uri.host, - canonicalUri: uri.pathname, - canonicalQuerystring: uri.search.substr(1), - method, - region: this.region, - securityCredentials: awsSecurityCredentials, - requestPayload, - additionalAmzHeaders, + if (!this.apiKey && useSelfSignedJWT) { + if (this.additionalClaims && + this.additionalClaims.target_audience) { + const { tokens } = await this.refreshToken(); + return { + headers: this.addSharedMetadataHeaders({ + Authorization: `Bearer ${tokens.id_token}`, + }), + }; + } + else { + // no scopes have been set, but a uri has been provided. Use JWTAccess + // credentials. + if (!this.access) { + this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); + } + let scopes; + if (this.hasUserScopes()) { + scopes = this.scopes; + } + else if (!url) { + scopes = this.defaultScopes; + } + const useScopes = this.useJWTAccessWithScope || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, + // Scopes take precedent over audience for signing, + // so we only provide them if `useJWTAccessWithScope` is on or + // if we are in a non-default universe + useScopes ? scopes : undefined); + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + else if (this.hasAnyScopes() || this.apiKey) { + return super.getRequestMetadataAsync(url); + } + else { + // If no audience, apiKey, or scopes are provided, we should not attempt + // to populate any headers: + return { headers: {} }; + } + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + // Create a new gToken for fetching an ID token + const gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: { target_audience: targetAudience }, + transporter: this.transporter, }); - // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. - const headers = Object.assign( - // Add x-amz-date if available. - headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { - Authorization: headerMap.authorizationHeader, - host: uri.host, - }, additionalAmzHeaders || {}); - if (awsSecurityCredentials.token) { - Object.assign(headers, { - 'x-amz-security-token': awsSecurityCredentials.token, - }); + await gtoken.getToken({ + forceRefresh: true, + }); + if (!gtoken.idToken) { + throw new Error('Unknown error: Failed to fetch ID token'); } - const awsSignedReq = { - url, - method: method, - headers, + return gtoken.idToken; + } + /** + * Determine if there are currently scopes available. + */ + hasUserScopes() { + if (!this.scopes) { + return false; + } + return this.scopes.length > 0; + } + /** + * Are there any default or user scopes defined. + */ + hasAnyScopes() { + if (this.scopes && this.scopes.length > 0) + return true; + if (this.defaultScopes && this.defaultScopes.length > 0) + return true; + return false; + } + authorize(callback) { + if (callback) { + this.authorizeAsync().then(r => callback(null, r), callback); + } + else { + return this.authorizeAsync(); + } + } + async authorizeAsync() { + const result = await this.refreshToken(); + if (!result) { + throw new Error('No result returned'); + } + this.credentials = result.tokens; + this.credentials.refresh_token = 'jwt-placeholder'; + this.key = this.gtoken.key; + this.email = this.gtoken.iss; + return result.tokens; + } + /** + * Refreshes the access token. + * @param refreshToken ignored + * @private + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const gtoken = this.createGToken(); + const token = await gtoken.getToken({ + forceRefresh: this.isTokenExpiring(), + }); + const tokens = { + access_token: token.access_token, + token_type: 'Bearer', + expiry_date: gtoken.expiresAt, + id_token: gtoken.idToken, }; - if (typeof requestPayload !== 'undefined') { - awsSignedReq.body = requestPayload; + this.emit('tokens', tokens); + return { res: null, tokens }; + } + /** + * Create a gToken if it doesn't already exist. + */ + createGToken() { + if (!this.gtoken) { + this.gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: this.additionalClaims, + transporter: this.transporter, + }); } - return awsSignedReq; + return this.gtoken; + } + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the service account auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (e) { + reject(e); + } + }); + }); + } + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey) { + if (typeof apiKey !== 'string') { + throw new Error('Must provide an API Key string.'); + } + this.apiKey = apiKey; + } + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + async getCredentials() { + if (this.key) { + return { private_key: this.key, client_email: this.email }; + } + else if (this.keyFile) { + const gtoken = this.createGToken(); + const creds = await gtoken.getCredentials(this.keyFile); + return { private_key: creds.privateKey, client_email: creds.clientEmail }; + } + throw new Error('A key or a keyFile must be provided to getCredentials.'); } } -exports.AwsRequestSigner = AwsRequestSigner; -/** - * Creates the HMAC-SHA256 hash of the provided message using the - * provided key. - * - * @param crypto The crypto instance used to facilitate cryptographic - * operations. - * @param key The HMAC-SHA256 key to use. - * @param msg The message to hash. - * @return The computed hash bytes. - */ -async function sign(crypto, key, msg) { - return await crypto.signWithHmacSha256(key, msg); -} -/** - * Calculates the signing key used to calculate the signature for - * AWS Signature Version 4 based on: - * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html - * - * @param crypto The crypto instance used to facilitate cryptographic - * operations. - * @param key The AWS secret access key. - * @param dateStamp The '%Y%m%d' date format. - * @param region The AWS region. - * @param serviceName The AWS service name, eg. sts. - * @return The signing key bytes. - */ -async function getSigningKey(crypto, key, dateStamp, region, serviceName) { - const kDate = await sign(crypto, `AWS4${key}`, dateStamp); - const kRegion = await sign(crypto, kDate, region); - const kService = await sign(crypto, kRegion, serviceName); - const kSigning = await sign(crypto, kService, 'aws4_request'); - return kSigning; -} -/** - * Generates the authentication header map needed for generating the AWS - * Signature Version 4 signed request. - * - * @param option The options needed to compute the authentication header map. - * @return The AWS authentication header map which constitutes of the following - * components: amz-date, authorization header and canonical query string. - */ -async function generateAuthenticationHeaderMap(options) { - const additionalAmzHeaders = options.additionalAmzHeaders || {}; - const requestPayload = options.requestPayload || ''; - // iam.amazonaws.com host => iam service. - // sts.us-east-2.amazonaws.com => sts service. - const serviceName = options.host.split('.')[0]; - const now = new Date(); - // Format: '%Y%m%dT%H%M%SZ'. - const amzDate = now - .toISOString() - .replace(/[-:]/g, '') - .replace(/\.[0-9]+/, ''); - // Format: '%Y%m%d'. - const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); - // Change all additional headers to be lower case. - const reformattedAdditionalAmzHeaders = {}; - Object.keys(additionalAmzHeaders).forEach(key => { - reformattedAdditionalAmzHeaders[key.toLowerCase()] = - additionalAmzHeaders[key]; - }); - // Add AWS token if available. - if (options.securityCredentials.token) { - reformattedAdditionalAmzHeaders['x-amz-security-token'] = - options.securityCredentials.token; +exports.JWT = JWT; + + +/***/ }), + +/***/ 4524: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LoginTicket = void 0; +class LoginTicket { + /** + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor + */ + constructor(env, pay) { + this.envelope = env; + this.payload = pay; + } + getEnvelope() { + return this.envelope; + } + getPayload() { + return this.payload; + } + /** + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID + */ + getUserId() { + const payload = this.getPayload(); + if (payload && payload.sub) { + return payload.sub; + } + return null; + } + /** + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload + */ + getAttributes() { + return { envelope: this.getEnvelope(), payload: this.getPayload() }; } - // Header keys need to be sorted alphabetically. - const amzHeaders = Object.assign({ - host: options.host, - }, - // Previously the date was not fixed with x-amz- and could be provided manually. - // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req - reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); - let canonicalHeaders = ''; - const signedHeadersList = Object.keys(amzHeaders).sort(); - signedHeadersList.forEach(key => { - canonicalHeaders += `${key}:${amzHeaders[key]}\n`; - }); - const signedHeaders = signedHeadersList.join(';'); - const payloadHash = await options.crypto.sha256DigestHex(requestPayload); - // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html - const canonicalRequest = `${options.method}\n` + - `${options.canonicalUri}\n` + - `${options.canonicalQuerystring}\n` + - `${canonicalHeaders}\n` + - `${signedHeaders}\n` + - `${payloadHash}`; - const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; - // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html - const stringToSign = `${AWS_ALGORITHM}\n` + - `${amzDate}\n` + - `${credentialScope}\n` + - (await options.crypto.sha256DigestHex(canonicalRequest)); - // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html - const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); - const signature = await sign(options.crypto, signingKey, stringToSign); - // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html - const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + - `${credentialScope}, SignedHeaders=${signedHeaders}, ` + - `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; - return { - // Do not return x-amz-date if date is available. - amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, - authorizationHeader, - canonicalQuerystring: options.canonicalQuerystring, - }; } -//# sourceMappingURL=awsrequestsigner.js.map +exports.LoginTicket = LoginTicket; + /***/ }), -/***/ 7391: +/***/ 3936: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2021 Google LLC +// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27104,231 +21808,396 @@ async function generateAuthenticationHeaderMap(options) { // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BaseExternalAccountClient = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +exports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; +const gaxios_1 = __nccwpck_require__(9555); +const querystring = __nccwpck_require__(3477); const stream = __nccwpck_require__(2781); +const formatEcdsa = __nccwpck_require__(1728); +const crypto_1 = __nccwpck_require__(8043); const authclient_1 = __nccwpck_require__(4627); -const sts = __nccwpck_require__(6308); -/** - * The required token exchange grant_type: rfc8693#section-2.1 - */ -const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; -/** - * The requested token exchange requested_token_type: rfc8693#section-2.1 - */ -const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; -/** The default OAuth scope to request when none is provided. */ -const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; -/** The google apis domain pattern. */ -const GOOGLE_APIS_DOMAIN_PATTERN = '\\.googleapis\\.com$'; -/** The variable portion pattern in a Google APIs domain. */ -const VARIABLE_PORTION_PATTERN = '[^\\.\\s\\/\\\\]+'; -/** Default impersonated token lifespan in seconds.*/ -const DEFAULT_TOKEN_LIFESPAN = 3600; -/** - * Offset to take into account network delays and server clock skews. - */ -exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; -/** - * The credentials JSON file type for external account clients. - * There are 3 types of JSON configs: - * 1. authorized_user => Google end user credential - * 2. service_account => Google service account credential - * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) - */ -exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; -/** Cloud resource manager URL used to retrieve project information. */ -exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; -/** The workforce audience pattern. */ -const WORKFORCE_AUDIENCE_PATTERN = '//iam.googleapis.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; -/** - * Base external account client. This is used to instantiate AuthClients for - * exchanging external account credentials for GCP access token and authorizing - * requests to GCP APIs. - * The base class implements common logic for exchanging various type of - * external credentials for GCP access token. The logic of determining and - * retrieving the external credential based on the environment and - * credential_source will be left for the subclasses. - */ -class BaseExternalAccountClient extends authclient_1.AuthClient { +const loginticket_1 = __nccwpck_require__(4524); +var CodeChallengeMethod; +(function (CodeChallengeMethod) { + CodeChallengeMethod["Plain"] = "plain"; + CodeChallengeMethod["S256"] = "S256"; +})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {})); +var CertificateFormat; +(function (CertificateFormat) { + CertificateFormat["PEM"] = "PEM"; + CertificateFormat["JWK"] = "JWK"; +})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {})); +class OAuth2Client extends authclient_1.AuthClient { + constructor(optionsOrClientId, clientSecret, redirectUri) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { clientId: optionsOrClientId, clientSecret, redirectUri }; + super(opts); + this.certificateCache = {}; + this.certificateExpiry = null; + this.certificateCacheFormat = CertificateFormat.PEM; + this.refreshTokenPromises = new Map(); + this._clientId = opts.clientId; + this._clientSecret = opts.clientSecret; + this.redirectUri = opts.redirectUri; + this.endpoints = { + tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo', + oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth', + oauth2TokenUrl: 'https://oauth2.googleapis.com/token', + oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke', + oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs', + oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs', + oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key', + ...opts.endpoints, + }; + this.issuers = opts.issuers || [ + 'accounts.google.com', + 'https://accounts.google.com', + this.universeDomain, + ]; + } /** - * Instantiate a BaseExternalAccountClient instance using the provided JSON - * object loaded from an external account credentials file. - * @param options The external account options object typically loaded - * from the external account JSON credential file. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. */ - constructor(options, additionalOptions) { - var _a, _b; - super(); - if (options.type !== exports.EXTERNAL_ACCOUNT_TYPE) { - throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + - `received "${options.type}"`); - } - this.clientAuth = options.client_id - ? { - confidentialClientType: 'basic', - clientId: options.client_id, - clientSecret: options.client_secret, - } - : undefined; - if (!this.validateGoogleAPIsUrl('sts', options.token_url)) { - throw new Error(`"${options.token_url}" is not a valid token url.`); - } - this.stsCredential = new sts.StsCredentials(options.token_url, this.clientAuth); - // Default OAuth scope. This could be overridden via public property. - this.scopes = [DEFAULT_OAUTH_SCOPE]; - this.cachedAccessToken = null; - this.audience = options.audience; - this.subjectTokenType = options.subject_token_type; - this.quotaProjectId = options.quota_project_id; - this.workforcePoolUserProject = options.workforce_pool_user_project; - const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); - if (this.workforcePoolUserProject && - !this.audience.match(workforceAudiencePattern)) { - throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + - 'credentials.'); + generateAuthUrl(opts = {}) { + if (opts.code_challenge_method && !opts.code_challenge) { + throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); } - if (typeof options.service_account_impersonation_url !== 'undefined' && - !this.validateGoogleAPIsUrl('iamcredentials', options.service_account_impersonation_url)) { - throw new Error(`"${options.service_account_impersonation_url}" is ` + - 'not a valid service account impersonation url.'); + opts.response_type = opts.response_type || 'code'; + opts.client_id = opts.client_id || this._clientId; + opts.redirect_uri = opts.redirect_uri || this.redirectUri; + // Allow scopes to be passed either as array or a string + if (Array.isArray(opts.scope)) { + opts.scope = opts.scope.join(' '); } - this.serviceAccountImpersonationUrl = - options.service_account_impersonation_url; - this.serviceAccountImpersonationLifetime = - (_b = (_a = options.service_account_impersonation) === null || _a === void 0 ? void 0 : _a.token_lifetime_seconds) !== null && _b !== void 0 ? _b : DEFAULT_TOKEN_LIFESPAN; - // As threshold could be zero, - // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the - // zero value. - if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { - this.eagerRefreshThresholdMillis = exports.EXPIRATION_TIME_OFFSET; + const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString(); + return (rootUrl + + '?' + + querystring.stringify(opts)); + } + generateCodeVerifier() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); + } + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + async generateCodeVerifierAsync() { + // base64 encoding uses 6 bits per character, and we want to generate128 + // characters. 6*128/8 = 96. + const crypto = (0, crypto_1.createCrypto)(); + const randomString = crypto.randomBytesBase64(96); + // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ + // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just + // swapping out a few chars. + const codeVerifier = randomString + .replace(/\+/g, '~') + .replace(/=/g, '_') + .replace(/\//g, '-'); + // Generate the base64 encoded SHA256 + const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); + // We need to use base64UrlEncoding instead of standard base64 + const codeChallenge = unencodedCodeChallenge + .split('=')[0] + .replace(/\+/g, '-') + .replace(/\//g, '_'); + return { codeVerifier, codeChallenge }; + } + getToken(codeOrOptions, callback) { + const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; + if (callback) { + this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); } else { - this.eagerRefreshThresholdMillis = additionalOptions - .eagerRefreshThresholdMillis; + return this.getTokenAsync(options); } - this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); - this.projectId = null; - this.projectNumber = this.getProjectNumber(this.audience); } - /** The service account email to be impersonated, if available. */ - getServiceAccountEmail() { - var _a; - if (this.serviceAccountImpersonationUrl) { - // Parse email from URL. The formal looks as follows: - // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken - const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; - const result = re.exec(this.serviceAccountImpersonationUrl); - return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + async getTokenAsync(options) { + const url = this.endpoints.oauth2TokenUrl.toString(); + const values = { + code: options.code, + client_id: options.client_id || this._clientId, + client_secret: this._clientSecret, + redirect_uri: options.redirect_uri || this.redirectUri, + grant_type: 'authorization_code', + code_verifier: options.codeVerifier, + }; + const res = await this.transporter.request({ + method: 'POST', + url, + data: querystring.stringify(values), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + const tokens = res.data; + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; } - return null; + this.emit('tokens', tokens); + return { tokens, res }; } /** - * Provides a mechanism to inject GCP access tokens directly. - * When the provided credential expires, a new credential, using the - * external account options, is retrieved. - * @param credentials The Credentials object to set on the current client. + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private */ - setCredentials(credentials) { - super.setCredentials(credentials); - this.cachedAccessToken = credentials; + async refreshToken(refreshToken) { + if (!refreshToken) { + return this.refreshTokenNoCache(refreshToken); + } + // If a request to refresh using the same token has started, + // return the same promise. + if (this.refreshTokenPromises.has(refreshToken)) { + return this.refreshTokenPromises.get(refreshToken); + } + const p = this.refreshTokenNoCache(refreshToken).then(r => { + this.refreshTokenPromises.delete(refreshToken); + return r; + }, e => { + this.refreshTokenPromises.delete(refreshToken); + throw e; + }); + this.refreshTokenPromises.set(refreshToken, p); + return p; } - /** - * @return A promise that resolves with the current GCP access token - * response. If the current credential is expired, a new one is retrieved. - */ - async getAccessToken() { - // If cached access token is unavailable or expired, force refresh. - if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { - await this.refreshAccessTokenAsync(); + async refreshTokenNoCache(refreshToken) { + var _a; + if (!refreshToken) { + throw new Error('No refresh token is set.'); } - // Return GCP access token in GetAccessTokenResponse format. - return { - token: this.cachedAccessToken.access_token, - res: this.cachedAccessToken.res, + const url = this.endpoints.oauth2TokenUrl.toString(); + const data = { + refresh_token: refreshToken, + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', }; + let res; + try { + // request for new token + res = await this.transporter.request({ + method: 'POST', + url, + data: querystring.stringify(data), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError && + e.message === 'invalid_grant' && + ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && + /ReAuth/i.test(e.response.data.error_description)) { + e.message = JSON.stringify(e.response.data); + } + throw e; + } + const tokens = res.data; + // TODO: de-duplicate this code from a few spots + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + refreshAccessToken(callback) { + if (callback) { + this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); + } + else { + return this.refreshAccessTokenAsync(); + } + } + async refreshAccessTokenAsync() { + const r = await this.refreshToken(this.credentials.refresh_token); + const tokens = r.tokens; + tokens.refresh_token = this.credentials.refresh_token; + this.credentials = tokens; + return { credentials: this.credentials, res: r.res }; + } + getAccessToken(callback) { + if (callback) { + this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + else { + return this.getAccessTokenAsync(); + } + } + async getAccessTokenAsync() { + const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); + if (shouldRefresh) { + if (!this.credentials.refresh_token) { + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + return { token: this.credentials.access_token }; + } + } + else { + throw new Error('No refresh token or refresh handler callback is set.'); + } + } + const r = await this.refreshAccessTokenAsync(); + if (!r.credentials || (r.credentials && !r.credentials.access_token)) { + throw new Error('Could not refresh access token.'); + } + return { token: r.credentials.access_token, res: r.res }; + } + else { + return { token: this.credentials.access_token }; + } } /** - * The main authentication interface. It takes an optional url which when + * The main authentication interface. It takes an optional url which when * present is the endpoint being accessed, and returns a Promise which * resolves with authorization header fields. * - * The result has the form: + * In OAuth2Client, the result has the form: * { Authorization: 'Bearer ' } + * @param url The optional url being authorized */ - async getRequestHeaders() { - const accessTokenResponse = await this.getAccessToken(); + async getRequestHeaders(url) { + const headers = (await this.getRequestMetadataAsync(url)).headers; + return headers; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + const thisCreds = this.credentials; + if (!thisCreds.access_token && + !thisCreds.refresh_token && + !this.apiKey && + !this.refreshHandler) { + throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + } + if (thisCreds.access_token && !this.isTokenExpiring()) { + thisCreds.token_type = thisCreds.token_type || 'Bearer'; + const headers = { + Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + // If refreshHandler exists, call processAndValidateRefreshHandler(). + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + const headers = { + Authorization: 'Bearer ' + this.credentials.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + if (this.apiKey) { + return { headers: { 'X-Goog-Api-Key': this.apiKey } }; + } + let r = null; + let tokens = null; + try { + r = await this.refreshToken(thisCreds.refresh_token); + tokens = r.tokens; + } + catch (err) { + const e = err; + if (e.response && + (e.response.status === 403 || e.response.status === 404)) { + e.message = `Could not refresh access token: ${e.message}`; + } + throw e; + } + const credentials = this.credentials; + credentials.token_type = credentials.token_type || 'Bearer'; + tokens.refresh_token = credentials.refresh_token; + this.credentials = tokens; const headers = { - Authorization: `Bearer ${accessTokenResponse.token}`, + Authorization: credentials.token_type + ' ' + tokens.access_token, }; - return this.addSharedMetadataHeaders(headers); + return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; } - request(opts, callback) { + /** + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + * + * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} + */ + static getRevokeTokenUrl(token) { + return new OAuth2Client().getRevokeTokenURL(token).toString(); + } + /** + * Generates a URL to revoke the given token. + * + * @param token The existing token to be revoked. + */ + getRevokeTokenURL(token) { + const url = new URL(this.endpoints.oauth2RevokeUrl); + url.searchParams.append('token', token); + return url; + } + revokeToken(token, callback) { + const opts = { + url: this.getRevokeTokenURL(token).toString(), + method: 'POST', + }; if (callback) { - this.requestAsync(opts).then(r => callback(null, r), e => { - return callback(e, e.response); - }); + this.transporter + .request(opts) + .then(r => callback(null, r), callback); } else { - return this.requestAsync(opts); + return this.transporter.request(opts); } } - /** - * @return A promise that resolves with the project ID corresponding to the - * current workload identity pool or current workforce pool if - * determinable. For workforce pool credential, it returns the project ID - * corresponding to the workforcePoolUserProject. - * This is introduced to match the current pattern of using the Auth - * library: - * const projectId = await auth.getProjectId(); - * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; - * const res = await client.request({ url }); - * The resource may not have permission - * (resourcemanager.projects.get) to call this API or the required - * scopes may not be selected: - * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes - */ - async getProjectId() { - const projectNumber = this.projectNumber || this.workforcePoolUserProject; - if (this.projectId) { - // Return previously determined project ID. - return this.projectId; + revokeCredentials(callback) { + if (callback) { + this.revokeCredentialsAsync().then(res => callback(null, res), callback); + } + else { + return this.revokeCredentialsAsync(); } - else if (projectNumber) { - // Preferable not to use request() to avoid retrial policies. - const headers = await this.getRequestHeaders(); - const response = await this.transporter.request({ - headers, - url: `${exports.CLOUD_RESOURCE_MANAGER}${projectNumber}`, - responseType: 'json', + } + async revokeCredentialsAsync() { + const token = this.credentials.access_token; + this.credentials = {}; + if (token) { + return this.revokeToken(token); + } + else { + throw new Error('No access token to revoke.'); + } + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); }); - this.projectId = response.data.projectId; - return this.projectId; } - return null; + else { + return this.requestAsync(opts); + } } - /** - * Authenticates the provided HTTP request, processes it and resolves with the - * returned response. - * @param opts The HTTP request options. - * @param retry Whether the current attempt is a retry after a failed attempt. - * @return A promise that resolves with the successful response. - */ async requestAsync(opts, retry = false) { - let response; + let r2; try { - const requestHeaders = await this.getRequestHeaders(); + const r = await this.getRequestMetadataAsync(opts.url); opts.headers = opts.headers || {}; - if (requestHeaders && requestHeaders['x-goog-user-project']) { - opts.headers['x-goog-user-project'] = - requestHeaders['x-goog-user-project']; + if (r.headers && r.headers['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; } - if (requestHeaders && requestHeaders.Authorization) { - opts.headers.Authorization = requestHeaders.Authorization; + if (r.headers && r.headers.Authorization) { + opts.headers.Authorization = r.headers.Authorization; } - response = await this.transporter.request(opts); + if (this.apiKey) { + opts.headers['X-Goog-Api-Key'] = this.apiKey; + } + r2 = await this.transporter.request(opts); } catch (e) { const res = e.response; @@ -27338,884 +22207,357 @@ class BaseExternalAccountClient extends authclient_1.AuthClient { // - We haven't already retried. It only makes sense to retry once. // - The response was a 401 or a 403 // - The request didn't send a readableStream - // - forceRefreshOnFailure is true + // - An access_token and refresh_token were available, but either no + // expiry_date was available or the forceRefreshOnFailure flag is set. + // The absent expiry_date case can happen when developers stash the + // access_token and refresh_token for later use, but the access_token + // fails on the first try because it's expired. Some developers may + // choose to enable forceRefreshOnFailure to mitigate time-related + // errors. + // Or the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - No refresh_token was available + // - An access_token and a refreshHandler callback were available, but + // either no expiry_date was available or the forceRefreshOnFailure + // flag is set. The access_token fails on the first try because it's + // expired. Some developers may choose to enable forceRefreshOnFailure + // to mitigate time-related errors. + const mayRequireRefresh = this.credentials && + this.credentials.access_token && + this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure); + const mayRequireRefreshWithNoRefreshToken = this.credentials && + this.credentials.access_token && + !this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure) && + this.refreshHandler; const isReadableStream = res.config.data instanceof stream.Readable; const isAuthErr = statusCode === 401 || statusCode === 403; - if (!retry && + if (!retry && isAuthErr && !isReadableStream && mayRequireRefresh) { + await this.refreshAccessTokenAsync(); + return this.requestAsync(opts, true); + } + else if (!retry && isAuthErr && !isReadableStream && - this.forceRefreshOnFailure) { - await this.refreshAccessTokenAsync(); - return await this.requestAsync(opts, true); + mayRequireRefreshWithNoRefreshToken) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + } + return this.requestAsync(opts, true); } } throw e; } - return response; + return r2; } - /** - * Forces token refresh, even if unexpired tokens are currently cached. - * External credentials are exchanged for GCP access tokens via the token - * exchange endpoint and other settings provided in the client options - * object. - * If the service_account_impersonation_url is provided, an additional - * step to exchange the external account GCP access token for a service - * account impersonated token is performed. - * @return A promise that resolves with the fresh GCP access tokens. - */ - async refreshAccessTokenAsync() { - // Retrieve the external credential. - const subjectToken = await this.retrieveSubjectToken(); - // Construct the STS credentials options. - const stsCredentialsOptions = { - grantType: STS_GRANT_TYPE, - audience: this.audience, - requestedTokenType: STS_REQUEST_TOKEN_TYPE, - subjectToken, - subjectTokenType: this.subjectTokenType, - // generateAccessToken requires the provided access token to have - // scopes: - // https://www.googleapis.com/auth/iam or - // https://www.googleapis.com/auth/cloud-platform - // The new service account access token scopes will match the user - // provided ones. - scope: this.serviceAccountImpersonationUrl - ? [DEFAULT_OAUTH_SCOPE] - : this.getScopesArray(), - }; - // Exchange the external credentials for a GCP access token. - // Client auth is prioritized over passing the workforcePoolUserProject - // parameter for STS token exchange. - const additionalOptions = !this.clientAuth && this.workforcePoolUserProject - ? { userProject: this.workforcePoolUserProject } - : undefined; - const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, additionalOptions); - if (this.serviceAccountImpersonationUrl) { - this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + verifyIdToken(options, callback) { + // This function used to accept two arguments instead of an options object. + // Check the types to help users upgrade with less pain. + // This check can be removed after a 2.0 release. + if (callback && typeof callback !== 'function') { + throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); } - else if (stsResponse.expires_in) { - // Save response in cached access token. - this.cachedAccessToken = { - access_token: stsResponse.access_token, - expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, - res: stsResponse.res, - }; + if (callback) { + this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); } else { - // Save response in cached access token. - this.cachedAccessToken = { - access_token: stsResponse.access_token, - res: stsResponse.res, - }; + return this.verifyIdTokenAsync(options); } - // Save credentials. - this.credentials = {}; - Object.assign(this.credentials, this.cachedAccessToken); - delete this.credentials.res; - // Trigger tokens event to notify external listeners. - this.emit('tokens', { - refresh_token: null, - expiry_date: this.cachedAccessToken.expiry_date, - access_token: this.cachedAccessToken.access_token, - token_type: 'Bearer', - id_token: null, - }); - // Return the cached access token. - return this.cachedAccessToken; } - /** - * Returns the workload identity pool project number if it is determinable - * from the audience resource name. - * @param audience The STS audience used to determine the project number. - * @return The project number associated with the workload identity pool, if - * this can be determined from the STS audience field. Otherwise, null is - * returned. - */ - getProjectNumber(audience) { - // STS audience pattern: - // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... - const match = audience.match(/\/projects\/([^/]+)/); - if (!match) { - return null; + async verifyIdTokenAsync(options) { + if (!options.idToken) { + throw new Error('The verifyIdToken method requires an ID Token'); } - return match[1]; + const response = await this.getFederatedSignonCertsAsync(); + const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry); + return login; } /** - * Exchanges an external account GCP access token for a service - * account impersonated access token using iamcredentials - * GenerateAccessToken API. - * @param token The access token to exchange for a service account access - * token. - * @return A promise that resolves with the service account impersonated - * credentials response. + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. */ - async getImpersonatedAccessToken(token) { - const opts = { - url: this.serviceAccountImpersonationUrl, + async getTokenInfo(accessToken) { + const { data } = await this.transporter.request({ method: 'POST', headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${token}`, - }, - data: { - scope: this.getScopesArray(), - lifetime: this.serviceAccountImpersonationLifetime + 's', + 'Content-Type': 'application/x-www-form-urlencoded', + Authorization: `Bearer ${accessToken}`, }, - responseType: 'json', - }; - const response = await this.transporter.request(opts); - const successResponse = response.data; - return { - access_token: successResponse.accessToken, - // Convert from ISO format to timestamp. - expiry_date: new Date(successResponse.expireTime).getTime(), - res: response, - }; - } - /** - * Returns whether the provided credentials are expired or not. - * If there is no expiry time, assumes the token is not expired or expiring. - * @param accessToken The credentials to check for expiration. - * @return Whether the credentials are expired or not. - */ - isExpired(accessToken) { - const now = new Date().getTime(); - return accessToken.expiry_date - ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis - : false; + url: this.endpoints.tokenInfoUrl.toString(), + }); + const info = Object.assign({ + expiry_date: new Date().getTime() + data.expires_in * 1000, + scopes: data.scope.split(' '), + }, data); + delete info.expires_in; + delete info.scope; + return info; } - /** - * @return The list of scopes for the requested GCP access token. - */ - getScopesArray() { - // Since scopes can be provided as string or array, the type should - // be normalized. - if (typeof this.scopes === 'string') { - return [this.scopes]; - } - else if (typeof this.scopes === 'undefined') { - return [DEFAULT_OAUTH_SCOPE]; + getFederatedSignonCerts(callback) { + if (callback) { + this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); } else { - return this.scopes; - } - } - /** - * Checks whether Google APIs URL is valid. - * @param apiName The apiName of url. - * @param url The Google API URL to validate. - * @return Whether the URL is valid or not. - */ - validateGoogleAPIsUrl(apiName, url) { - let parsedUrl; - // Return false if error is thrown during parsing URL. - try { - parsedUrl = new URL(url); - } - catch (e) { - return false; - } - const urlDomain = parsedUrl.hostname; - // Check the protocol is https. - if (parsedUrl.protocol !== 'https:') { - return false; - } - const googleAPIsDomainPatterns = [ - new RegExp('^' + - VARIABLE_PORTION_PATTERN + - '\\.' + - apiName + - GOOGLE_APIS_DOMAIN_PATTERN), - new RegExp('^' + apiName + GOOGLE_APIS_DOMAIN_PATTERN), - new RegExp('^' + - apiName + - '\\.' + - VARIABLE_PORTION_PATTERN + - GOOGLE_APIS_DOMAIN_PATTERN), - new RegExp('^' + - VARIABLE_PORTION_PATTERN + - '\\-' + - apiName + - GOOGLE_APIS_DOMAIN_PATTERN), - new RegExp('^' + - apiName + - '\\-' + - VARIABLE_PORTION_PATTERN + - '\\.p' + - GOOGLE_APIS_DOMAIN_PATTERN), - ]; - for (const googleAPIsDomainPattern of googleAPIsDomainPatterns) { - if (urlDomain.match(googleAPIsDomainPattern)) { - return true; - } + return this.getFederatedSignonCertsAsync(); } - return false; - } -} -exports.BaseExternalAccountClient = BaseExternalAccountClient; -//# sourceMappingURL=baseexternalclient.js.map - -/***/ }), - -/***/ 6875: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2013 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Compute = void 0; -const arrify = __nccwpck_require__(1546); -const gaxios_1 = __nccwpck_require__(9555); -const gcpMetadata = __nccwpck_require__(3563); -const oauth2client_1 = __nccwpck_require__(3936); -class Compute extends oauth2client_1.OAuth2Client { - /** - * Google Compute Engine service account credentials. - * - * Retrieve access token from the metadata server. - * See: https://developers.google.com/compute/docs/authentication - */ - constructor(options = {}) { - super(options); - // Start with an expired refresh token, which will automatically be - // refreshed before the first API call is made. - this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; - this.serviceAccountEmail = options.serviceAccountEmail || 'default'; - this.scopes = arrify(options.scopes); } - /** - * Refreshes the access token. - * @param refreshToken Unused parameter - */ - async refreshTokenNoCache( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - refreshToken) { - const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; - let data; - try { - const instanceOptions = { - property: tokenPath, - }; - if (this.scopes.length > 0) { - instanceOptions.params = { - scopes: this.scopes.join(','), - }; - } - data = await gcpMetadata.instance(instanceOptions); - } - catch (e) { - if (e instanceof gaxios_1.GaxiosError) { - e.message = `Could not refresh access token: ${e.message}`; - this.wrapError(e); - } - throw e; + async getFederatedSignonCertsAsync() { + const nowTime = new Date().getTime(); + const format = (0, crypto_1.hasBrowserCrypto)() + ? CertificateFormat.JWK + : CertificateFormat.PEM; + if (this.certificateExpiry && + nowTime < this.certificateExpiry.getTime() && + this.certificateCacheFormat === format) { + return { certs: this.certificateCache, format }; } - const tokens = data; - if (data && data.expires_in) { - tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; - delete tokens.expires_in; + let res; + let url; + switch (format) { + case CertificateFormat.PEM: + url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString(); + break; + case CertificateFormat.JWK: + url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString(); + break; + default: + throw new Error(`Unsupported certificate format ${format}`); } - this.emit('tokens', tokens); - return { tokens, res: null }; - } - /** - * Fetches an ID token. - * @param targetAudience the audience for the fetched ID token. - */ - async fetchIdToken(targetAudience) { - const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + - `?format=full&audience=${targetAudience}`; - let idToken; try { - const instanceOptions = { - property: idTokenPath, - }; - idToken = await gcpMetadata.instance(instanceOptions); + res = await this.transporter.request({ url }); } catch (e) { if (e instanceof Error) { - e.message = `Could not fetch ID token: ${e.message}`; - } - throw e; - } - return idToken; - } - wrapError(e) { - const res = e.response; - if (res && res.status) { - e.code = res.status.toString(); - if (res.status === 403) { - e.message = - 'A Forbidden error was returned while attempting to retrieve an access ' + - 'token for the Compute Engine built-in service account. This may be because the Compute ' + - 'Engine instance does not have the correct permission scopes specified: ' + - e.message; - } - else if (res.status === 404) { - e.message = - 'A Not Found error was returned while attempting to retrieve an access' + - 'token for the Compute Engine built-in service account. This may be because the Compute ' + - 'Engine instance does not have any permission scopes specified: ' + - e.message; - } - } - } -} -exports.Compute = Compute; -//# sourceMappingURL=computeclient.js.map - -/***/ }), - -/***/ 6270: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; -const stream = __nccwpck_require__(2781); -const authclient_1 = __nccwpck_require__(4627); -const sts = __nccwpck_require__(6308); -/** - * The required token exchange grant_type: rfc8693#section-2.1 - */ -const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; -/** - * The requested token exchange requested_token_type: rfc8693#section-2.1 - */ -const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; -/** - * The requested token exchange subject_token_type: rfc8693#section-2.1 - */ -const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; -/** The STS access token exchange end point. */ -const STS_ACCESS_TOKEN_URL = 'https://sts.googleapis.com/v1/token'; -/** - * The maximum number of access boundary rules a Credential Access Boundary - * can contain. - */ -exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; -/** - * Offset to take into account network delays and server clock skews. - */ -exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; -/** - * Defines a set of Google credentials that are downscoped from an existing set - * of Google OAuth2 credentials. This is useful to restrict the Identity and - * Access Management (IAM) permissions that a short-lived credential can use. - * The common pattern of usage is to have a token broker with elevated access - * generate these downscoped credentials from higher access source credentials - * and pass the downscoped short-lived access tokens to a token consumer via - * some secure authenticated channel for limited access to Google Cloud Storage - * resources. - */ -class DownscopedClient extends authclient_1.AuthClient { - /** - * Instantiates a downscoped client object using the provided source - * AuthClient and credential access boundary rules. - * To downscope permissions of a source AuthClient, a Credential Access - * Boundary that specifies which resources the new credential can access, as - * well as an upper bound on the permissions that are available on each - * resource, has to be defined. A downscoped client can then be instantiated - * using the source AuthClient and the Credential Access Boundary. - * @param authClient The source AuthClient to be downscoped based on the - * provided Credential Access Boundary rules. - * @param credentialAccessBoundary The Credential Access Boundary which - * contains a list of access boundary rules. Each rule contains information - * on the resource that the rule applies to, the upper bound of the - * permissions that are available on that resource and an optional - * condition to further restrict permissions. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. - * @param quotaProjectId Optional quota project id for setting up in the - * x-goog-user-project header. - */ - constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { - super(); - this.authClient = authClient; - this.credentialAccessBoundary = credentialAccessBoundary; - // Check 1-10 Access Boundary Rules are defined within Credential Access - // Boundary. - if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { - throw new Error('At least one access boundary rule needs to be defined.'); - } - else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > - exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { - throw new Error('The provided access boundary has more than ' + - `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); - } - // Check at least one permission should be defined in each Access Boundary - // Rule. - for (const rule of credentialAccessBoundary.accessBoundary - .accessBoundaryRules) { - if (rule.availablePermissions.length === 0) { - throw new Error('At least one permission should be defined in access boundary rules.'); - } - } - this.stsCredential = new sts.StsCredentials(STS_ACCESS_TOKEN_URL); - this.cachedDownscopedAccessToken = null; - // As threshold could be zero, - // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the - // zero value. - if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { - this.eagerRefreshThresholdMillis = exports.EXPIRATION_TIME_OFFSET; - } - else { - this.eagerRefreshThresholdMillis = additionalOptions - .eagerRefreshThresholdMillis; + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; } - this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); - this.quotaProjectId = quotaProjectId; - } - /** - * Provides a mechanism to inject Downscoped access tokens directly. - * The expiry_date field is required to facilitate determination of the token - * expiration which would make it easier for the token consumer to handle. - * @param credentials The Credentials object to set on the current client. - */ - setCredentials(credentials) { - if (!credentials.expiry_date) { - throw new Error('The access token expiry_date field is missing in the provided ' + - 'credentials.'); + const cacheControl = res ? res.headers['cache-control'] : undefined; + let cacheAge = -1; + if (cacheControl) { + const pattern = new RegExp('max-age=([0-9]*)'); + const regexResult = pattern.exec(cacheControl); + if (regexResult && regexResult.length === 2) { + // Cache results with max-age (in seconds) + cacheAge = Number(regexResult[1]) * 1000; // milliseconds + } } - super.setCredentials(credentials); - this.cachedDownscopedAccessToken = credentials; - } - async getAccessToken() { - // If the cached access token is unavailable or expired, force refresh. - // The Downscoped access token will be returned in - // DownscopedAccessTokenResponse format. - if (!this.cachedDownscopedAccessToken || - this.isExpired(this.cachedDownscopedAccessToken)) { - await this.refreshAccessTokenAsync(); + let certificates = {}; + switch (format) { + case CertificateFormat.PEM: + certificates = res.data; + break; + case CertificateFormat.JWK: + for (const key of res.data.keys) { + certificates[key.kid] = key; + } + break; + default: + throw new Error(`Unsupported certificate format ${format}`); } - // Return Downscoped access token in DownscopedAccessTokenResponse format. - return { - token: this.cachedDownscopedAccessToken.access_token, - expirationTime: this.cachedDownscopedAccessToken.expiry_date, - res: this.cachedDownscopedAccessToken.res, - }; - } - /** - * The main authentication interface. It takes an optional url which when - * present is the endpoint being accessed, and returns a Promise which - * resolves with authorization header fields. - * - * The result has the form: - * { Authorization: 'Bearer ' } - */ - async getRequestHeaders() { - const accessTokenResponse = await this.getAccessToken(); - const headers = { - Authorization: `Bearer ${accessTokenResponse.token}`, - }; - return this.addSharedMetadataHeaders(headers); + const now = new Date(); + this.certificateExpiry = + cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); + this.certificateCache = certificates; + this.certificateCacheFormat = format; + return { certs: certificates, format, res }; } - request(opts, callback) { + getIapPublicKeys(callback) { if (callback) { - this.requestAsync(opts).then(r => callback(null, r), e => { - return callback(e, e.response); - }); + this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); } else { - return this.requestAsync(opts); + return this.getIapPublicKeysAsync(); } } - /** - * Authenticates the provided HTTP request, processes it and resolves with the - * returned response. - * @param opts The HTTP request options. - * @param retry Whether the current attempt is a retry after a failed attempt. - * @return A promise that resolves with the successful response. - */ - async requestAsync(opts, retry = false) { - let response; + async getIapPublicKeysAsync() { + let res; + const url = this.endpoints.oauth2IapPublicKeyUrl.toString(); try { - const requestHeaders = await this.getRequestHeaders(); - opts.headers = opts.headers || {}; - if (requestHeaders && requestHeaders['x-goog-user-project']) { - opts.headers['x-goog-user-project'] = - requestHeaders['x-goog-user-project']; - } - if (requestHeaders && requestHeaders.Authorization) { - opts.headers.Authorization = requestHeaders.Authorization; - } - response = await this.transporter.request(opts); + res = await this.transporter.request({ url }); } catch (e) { - const res = e.response; - if (res) { - const statusCode = res.status; - // Retry the request for metadata if the following criteria are true: - // - We haven't already retried. It only makes sense to retry once. - // - The response was a 401 or a 403 - // - The request didn't send a readableStream - // - forceRefreshOnFailure is true - const isReadableStream = res.config.data instanceof stream.Readable; - const isAuthErr = statusCode === 401 || statusCode === 403; - if (!retry && - isAuthErr && - !isReadableStream && - this.forceRefreshOnFailure) { - await this.refreshAccessTokenAsync(); - return await this.requestAsync(opts, true); - } + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; } throw e; } - return response; + return { pubkeys: res.data, res }; } - /** - * Forces token refresh, even if unexpired tokens are currently cached. - * GCP access tokens are retrieved from authclient object/source credential. - * Then GCP access tokens are exchanged for downscoped access tokens via the - * token exchange endpoint. - * @return A promise that resolves with the fresh downscoped access token. - */ - async refreshAccessTokenAsync() { - var _a; - // Retrieve GCP access token from source credential. - const subjectToken = (await this.authClient.getAccessToken()).token; - // Construct the STS credentials options. - const stsCredentialsOptions = { - grantType: STS_GRANT_TYPE, - requestedTokenType: STS_REQUEST_TOKEN_TYPE, - subjectToken: subjectToken, - subjectTokenType: STS_SUBJECT_TOKEN_TYPE, - }; - // Exchange the source AuthClient access token for a Downscoped access - // token. - const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); - /** - * The STS endpoint will only return the expiration time for the downscoped - * access token if the original access token represents a service account. - * The downscoped token's expiration time will always match the source - * credential expiration. When no expires_in is returned, we can copy the - * source credential's expiration time. - */ - const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; - const expiryDate = stsResponse.expires_in - ? new Date().getTime() + stsResponse.expires_in * 1000 - : sourceCredExpireDate; - // Save response in cached access token. - this.cachedDownscopedAccessToken = { - access_token: stsResponse.access_token, - expiry_date: expiryDate, - res: stsResponse.res, - }; - // Save credentials. - this.credentials = {}; - Object.assign(this.credentials, this.cachedDownscopedAccessToken); - delete this.credentials.res; - // Trigger tokens event to notify external listeners. - this.emit('tokens', { - refresh_token: null, - expiry_date: this.cachedDownscopedAccessToken.expiry_date, - access_token: this.cachedDownscopedAccessToken.access_token, - token_type: 'Bearer', - id_token: null, - }); - // Return the cached access token. - return this.cachedDownscopedAccessToken; + verifySignedJwtWithCerts() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); } /** - * Returns whether the provided credentials are expired or not. - * If there is no expiry time, assumes the token is not expired or expiring. - * @param downscopedAccessToken The credentials to check for expiration. - * @return Whether the credentials are expired or not. + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. */ - isExpired(downscopedAccessToken) { - const now = new Date().getTime(); - return downscopedAccessToken.expiry_date - ? now >= - downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis - : false; - } -} -exports.DownscopedClient = DownscopedClient; -//# sourceMappingURL=downscopedclient.js.map - -/***/ }), - -/***/ 1380: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2018 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getEnv = exports.clear = exports.GCPEnv = void 0; -const gcpMetadata = __nccwpck_require__(3563); -var GCPEnv; -(function (GCPEnv) { - GCPEnv["APP_ENGINE"] = "APP_ENGINE"; - GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; - GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; - GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; - GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; - GCPEnv["NONE"] = "NONE"; -})(GCPEnv = exports.GCPEnv || (exports.GCPEnv = {})); -let envPromise; -function clear() { - envPromise = undefined; -} -exports.clear = clear; -async function getEnv() { - if (envPromise) { - return envPromise; - } - envPromise = getEnvMemoized(); - return envPromise; -} -exports.getEnv = getEnv; -async function getEnvMemoized() { - let env = GCPEnv.NONE; - if (isAppEngine()) { - env = GCPEnv.APP_ENGINE; - } - else if (isCloudFunction()) { - env = GCPEnv.CLOUD_FUNCTIONS; - } - else if (await isComputeEngine()) { - if (await isKubernetesEngine()) { - env = GCPEnv.KUBERNETES_ENGINE; + async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { + const crypto = (0, crypto_1.createCrypto)(); + if (!maxExpiry) { + maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_; } - else if (isCloudRun()) { - env = GCPEnv.CLOUD_RUN; + const segments = jwt.split('.'); + if (segments.length !== 3) { + throw new Error('Wrong number of segments in token: ' + jwt); } - else { - env = GCPEnv.COMPUTE_ENGINE; + const signed = segments[0] + '.' + segments[1]; + let signature = segments[2]; + let envelope; + let payload; + try { + envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); } - } - else { - env = GCPEnv.NONE; - } - return env; -} -function isAppEngine() { - return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); -} -function isCloudFunction() { - return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); -} -/** - * This check only verifies that the environment is running knative. - * This must be run *after* checking for Kubernetes, otherwise it will - * return a false positive. - */ -function isCloudRun() { - return !!process.env.K_CONFIGURATION; -} -async function isKubernetesEngine() { - try { - await gcpMetadata.instance('attributes/cluster-name'); - return true; - } - catch (e) { - return false; - } -} -async function isComputeEngine() { - return gcpMetadata.isAvailable(); -} -//# sourceMappingURL=envDetect.js.map - -/***/ }), - -/***/ 8749: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; -const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; -const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; -const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; -/** - * Defines the response of a 3rd party executable run by the pluggable auth client. - */ -class ExecutableResponse { - /** - * Instantiates an ExecutableResponse instance using the provided JSON object - * from the output of the executable. - * @param responseJson Response from a 3rd party executable, loaded from a - * run of the executable or a cached output file. - */ - constructor(responseJson) { - // Check that the required fields exist in the json response. - if (!responseJson.version) { - throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; + } + throw err; } - if (responseJson.success === undefined) { - throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + if (!envelope) { + throw new Error("Can't parse token envelope: " + segments[0]); } - this.version = responseJson.version; - this.success = responseJson.success; - // Validate required fields for a successful response. - if (this.success) { - this.expirationTime = responseJson.expiration_time; - this.tokenType = responseJson.token_type; - // Validate token type field. - if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && - this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && - this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { - throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + - `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + try { + payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token payload '${segments[0]}`; } - // Validate subject token. - if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { - if (!responseJson.saml_response) { - throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); - } - this.subjectToken = responseJson.saml_response; + throw err; + } + if (!payload) { + throw new Error("Can't parse token payload: " + segments[1]); + } + if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { + // If this is not present, then there's no reason to attempt verification + throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); + } + const cert = certs[envelope.kid]; + if (envelope.alg === 'ES256') { + signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); + } + const verified = await crypto.verify(cert, signed, signature); + if (!verified) { + throw new Error('Invalid token signature: ' + jwt); + } + if (!payload.iat) { + throw new Error('No issue time in token: ' + JSON.stringify(payload)); + } + if (!payload.exp) { + throw new Error('No expiration time in token: ' + JSON.stringify(payload)); + } + const iat = Number(payload.iat); + if (isNaN(iat)) + throw new Error('iat field using invalid format'); + const exp = Number(payload.exp); + if (isNaN(exp)) + throw new Error('exp field using invalid format'); + const now = new Date().getTime() / 1000; + if (exp >= now + maxExpiry) { + throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); + } + const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; + const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; + if (now < earliest) { + throw new Error('Token used too early, ' + + now + + ' < ' + + earliest + + ': ' + + JSON.stringify(payload)); + } + if (now > latest) { + throw new Error('Token used too late, ' + + now + + ' > ' + + latest + + ': ' + + JSON.stringify(payload)); + } + if (issuers && issuers.indexOf(payload.iss) < 0) { + throw new Error('Invalid issuer, expected one of [' + + issuers + + '], but got ' + + payload.iss); + } + // Check the audience matches if we have one + if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { + const aud = payload.aud; + let audVerified = false; + // If the requiredAudience is an array, check if it contains token + // audience + if (requiredAudience.constructor === Array) { + audVerified = requiredAudience.indexOf(aud) > -1; } else { - if (!responseJson.id_token) { - throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + - `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); - } - this.subjectToken = responseJson.id_token; - } - } - else { - // Both code and message must be provided for unsuccessful responses. - if (!responseJson.code) { - throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); + audVerified = aud === requiredAudience; } - if (!responseJson.message) { - throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); + if (!audVerified) { + throw new Error('Wrong recipient, payload audience != requiredAudience'); } - this.errorCode = responseJson.code; - this.errorMessage = responseJson.message; } + return new loginticket_1.LoginTicket(envelope, payload); } /** - * @return A boolean representing if the response has a valid token. Returns - * true when the response was successful and the token is not expired. - */ - isValid() { - return !this.isExpired() && this.success; - } - /** - * @return A boolean representing if the response is expired. Returns true if the - * provided timeout has passed. + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. */ - isExpired() { - return (this.expirationTime !== undefined && - this.expirationTime < Math.round(Date.now() / 1000)); - } -} -exports.ExecutableResponse = ExecutableResponse; -/** - * An error thrown by the ExecutableResponse class. - */ -class ExecutableResponseError extends Error { - constructor(message) { - super(message); - Object.setPrototypeOf(this, new.target.prototype); - } -} -exports.ExecutableResponseError = ExecutableResponseError; -/** - * An error thrown when the 'version' field in an executable response is missing or invalid. - */ -class InvalidVersionFieldError extends ExecutableResponseError { -} -exports.InvalidVersionFieldError = InvalidVersionFieldError; -/** - * An error thrown when the 'success' field in an executable response is missing or invalid. - */ -class InvalidSuccessFieldError extends ExecutableResponseError { -} -exports.InvalidSuccessFieldError = InvalidSuccessFieldError; -/** - * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. - */ -class InvalidExpirationTimeFieldError extends ExecutableResponseError { -} -exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; -/** - * An error thrown when the 'token_type' field in an executable response is missing or invalid. - */ -class InvalidTokenTypeFieldError extends ExecutableResponseError { + async processAndValidateRefreshHandler() { + if (this.refreshHandler) { + const accessTokenResponse = await this.refreshHandler(); + if (!accessTokenResponse.access_token) { + throw new Error('No access token is returned by the refreshHandler callback.'); + } + return accessTokenResponse; + } + return; + } + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + isTokenExpiring() { + const expiryDate = this.credentials.expiry_date; + return expiryDate + ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis + : false; + } } -exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; +exports.OAuth2Client = OAuth2Client; /** - * An error thrown when the 'code' field in an executable response is missing or invalid. + * @deprecated use instance's {@link OAuth2Client.endpoints} */ -class InvalidCodeFieldError extends ExecutableResponseError { -} -exports.InvalidCodeFieldError = InvalidCodeFieldError; +OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; /** - * An error thrown when the 'message' field in an executable response is missing or invalid. + * Clock skew - five minutes in seconds */ -class InvalidMessageFieldError extends ExecutableResponseError { -} -exports.InvalidMessageFieldError = InvalidMessageFieldError; +OAuth2Client.CLOCK_SKEW_SECS_ = 300; /** - * An error thrown when the subject token in an executable response is missing or invalid. + * The default max Token Lifetime is one day in seconds */ -class InvalidSubjectTokenError extends ExecutableResponseError { -} -exports.InvalidSubjectTokenError = InvalidSubjectTokenError; -//# sourceMappingURL=executable-response.js.map +OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400; + /***/ }), -/***/ 4381: +/***/ 9510: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -28234,64 +22576,176 @@ exports.InvalidSubjectTokenError = InvalidSubjectTokenError; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ExternalAccountClient = void 0; -const baseexternalclient_1 = __nccwpck_require__(7391); -const identitypoolclient_1 = __nccwpck_require__(117); -const awsclient_1 = __nccwpck_require__(1569); -const pluggable_auth_client_1 = __nccwpck_require__(4782); +exports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0; +const querystring = __nccwpck_require__(3477); +const crypto_1 = __nccwpck_require__(8043); +/** List of HTTP methods that accept request bodies. */ +const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; /** - * Dummy class with no constructor. Developers are expected to use fromJSON. + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. */ -class ExternalAccountClient { - constructor() { - throw new Error('ExternalAccountClients should be initialized via: ' + - 'ExternalAccountClient.fromJSON(), ' + - 'directly via explicit constructors, eg. ' + - 'new AwsClient(options), new IdentityPoolClient(options), new' + - 'PluggableAuthClientOptions, or via ' + - 'new GoogleAuth(options).getClient()'); +class OAuthClientAuthHandler { + /** + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. + */ + constructor(clientAuthentication) { + this.clientAuthentication = clientAuthentication; + this.crypto = (0, crypto_1.createCrypto)(); } /** - * This static method will instantiate the - * corresponding type of external account credential depending on the - * underlying credential source. - * @param options The external account options object typically loaded - * from the external account JSON credential file. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. - * @return A BaseExternalAccountClient instance or null if the options - * provided do not correspond to an external account credential. + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. */ - static fromJSON(options, additionalOptions) { - var _a, _b; - if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { - if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { - return new awsclient_1.AwsClient(options, additionalOptions); - } - else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { - return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + applyClientAuthenticationOptions(opts, bearerToken) { + // Inject authenticated header. + this.injectAuthenticatedHeaders(opts, bearerToken); + // Inject authenticated request body. + if (!bearerToken) { + this.injectAuthenticatedRequestBody(opts); + } + } + /** + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + injectAuthenticatedHeaders(opts, bearerToken) { + var _a; + // Bearer token prioritized higher than basic Auth. + if (bearerToken) { + opts.headers = opts.headers || {}; + Object.assign(opts.headers, { + Authorization: `Bearer ${bearerToken}}`, + }); + } + else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { + opts.headers = opts.headers || {}; + const clientId = this.clientAuthentication.clientId; + const clientSecret = this.clientAuthentication.clientSecret || ''; + const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); + Object.assign(opts.headers, { + Authorization: `Basic ${base64EncodedCreds}`, + }); + } + } + /** + * Applies client authentication on the request's body if request-body + * client authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + */ + injectAuthenticatedRequestBody(opts) { + var _a; + if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { + const method = (opts.method || 'GET').toUpperCase(); + // Inject authenticated request body. + if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { + // Get content-type. + let contentType; + const headers = opts.headers || {}; + for (const key in headers) { + if (key.toLowerCase() === 'content-type' && headers[key]) { + contentType = headers[key].toLowerCase(); + break; + } + } + if (contentType === 'application/x-www-form-urlencoded') { + opts.data = opts.data || ''; + const data = querystring.parse(opts.data); + Object.assign(data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + opts.data = querystring.stringify(data); + } + else if (contentType === 'application/json') { + opts.data = opts.data || {}; + Object.assign(opts.data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + } + else { + throw new Error(`${contentType} content-types are not supported with ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } } else { - return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + throw new Error(`${method} HTTP method does not support ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); } } - else { - return null; + } +} +exports.OAuthClientAuthHandler = OAuthClientAuthHandler; +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +function getErrorFromOAuthErrorResponse(resp, err) { + // Error response. + const errorCode = resp.error; + const errorDescription = resp.error_description; + const errorUri = resp.error_uri; + let message = `Error code ${errorCode}`; + if (typeof errorDescription !== 'undefined') { + message += `: ${errorDescription}`; + } + if (typeof errorUri !== 'undefined') { + message += ` - ${errorUri}`; + } + const newError = new Error(message); + // Copy properties from original error to newly generated error. + if (err) { + const keys = Object.keys(err); + if (err.stack) { + // Copy error.stack if available. + keys.push('stack'); } + keys.forEach(key => { + // Do not overwrite the message field. + if (key !== 'message') { + Object.defineProperty(newError, key, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: err[key], + writable: false, + enumerable: true, + }); + } + }); } + return newError; } -exports.ExternalAccountClient = ExternalAccountClient; -//# sourceMappingURL=externalclient.js.map +exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; + /***/ }), -/***/ 695: +/***/ 4782: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2019 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -28305,736 +22759,961 @@ exports.ExternalAccountClient = ExternalAccountClient; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0; -const child_process_1 = __nccwpck_require__(2081); -const fs = __nccwpck_require__(7147); -const gcpMetadata = __nccwpck_require__(3563); -const os = __nccwpck_require__(2037); -const path = __nccwpck_require__(1017); -const crypto_1 = __nccwpck_require__(8043); -const transporters_1 = __nccwpck_require__(2649); -const computeclient_1 = __nccwpck_require__(6875); -const idtokenclient_1 = __nccwpck_require__(298); -const envDetect_1 = __nccwpck_require__(1380); -const jwtclient_1 = __nccwpck_require__(3959); -const refreshclient_1 = __nccwpck_require__(8790); -const impersonated_1 = __nccwpck_require__(1103); -const externalclient_1 = __nccwpck_require__(4381); +exports.PluggableAuthClient = exports.ExecutableError = void 0; const baseexternalclient_1 = __nccwpck_require__(7391); -exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; -const GoogleAuthExceptionMessages = { - NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + - 'To learn more about authentication and Google APIs, visit: \n' + - 'https://cloud.google.com/docs/authentication/getting-started', -}; -class GoogleAuth { - constructor(opts) { - /** - * Caches a value indicating whether the auth layer is running on Google - * Compute Engine. - * @private - */ - this.checkIsGCE = undefined; - // To save the contents of the JSON credential file - this.jsonContent = null; - this.cachedCredential = null; - opts = opts || {}; - this._cachedProjectId = opts.projectId || null; - this.cachedCredential = opts.authClient || null; - this.keyFilename = opts.keyFilename || opts.keyFile; - this.scopes = opts.scopes; - this.jsonContent = opts.credentials || null; - this.clientOptions = opts.clientOptions; - } - // Note: this properly is only public to satisify unit tests. - // https://github.com/Microsoft/TypeScript/issues/5228 - get isGCE() { - return this.checkIsGCE; - } - // GAPIC client libraries should always use self-signed JWTs. The following - // variables are set on the JWT client in order to indicate the type of library, - // and sign the JWT with the correct audience and scopes (if not supplied). - setGapicJWTValues(client) { - client.defaultServicePath = this.defaultServicePath; - client.useJWTAccessWithScope = this.useJWTAccessWithScope; - client.defaultScopes = this.defaultScopes; - } - getProjectId(callback) { - if (callback) { - this.getProjectIdAsync().then(r => callback(null, r), callback); - } - else { - return this.getProjectIdAsync(); - } - } - /** - * A temporary method for internal `getProjectId` usages where `null` is - * acceptable. In a future major release, `getProjectId` should return `null` - * (as the `Promise` base signature describes) and this private - * method should be removed. - * - * @returns Promise that resolves with project id (or `null`) - */ - async getProjectIdOptional() { - try { - return await this.getProjectId(); - } - catch (e) { - if (e instanceof Error && - e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { - return null; - } - else { - throw e; - } - } - } - /* - * A private method for finding and caching a projectId. - * - * Supports environments in order of precedence: - * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable - * - GOOGLE_APPLICATION_CREDENTIALS JSON file - * - Cloud SDK: `gcloud config config-helper --format json` - * - GCE project ID from metadata server - * - * @returns projectId - */ - async findAndCacheProjectId() { - let projectId = null; - projectId || (projectId = await this.getProductionProjectId()); - projectId || (projectId = await this.getFileProjectId()); - projectId || (projectId = await this.getDefaultServiceProjectId()); - projectId || (projectId = await this.getGCEProjectId()); - projectId || (projectId = await this.getExternalAccountClientProjectId()); - if (projectId) { - this._cachedProjectId = projectId; - return projectId; - } - else { - throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); - } - } - async getProjectIdAsync() { - if (this._cachedProjectId) { - return this._cachedProjectId; - } - if (!this._findProjectIdPromise) { - this._findProjectIdPromise = this.findAndCacheProjectId(); - } - return this._findProjectIdPromise; - } - /** - * @returns Any scopes (user-specified or default scopes specified by the - * client library) that need to be set on the current Auth client. - */ - getAnyScopes() { - return this.scopes || this.defaultScopes; - } - getApplicationDefault(optionsOrCallback = {}, callback) { - let options; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else { - options = optionsOrCallback; - } - if (callback) { - this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); - } - else { - return this.getApplicationDefaultAsync(options); - } - } - async getApplicationDefaultAsync(options = {}) { - // If we've already got a cached credential, return it. - // This will also preserve one's configured quota project, in case they - // set one directly on the credential previously. - if (this.cachedCredential) { - return await this.prepareAndCacheADC(this.cachedCredential); - } - // Since this is a 'new' ADC to cache we will use the environment variable - // if it's available. We prefer this value over the value from ADC. - const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT']; - let credential; - // Check for the existence of a local environment variable pointing to the - // location of the credential file. This is typically used in local - // developer scenarios. - credential = - await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); - if (credential) { - if (credential instanceof jwtclient_1.JWT) { - credential.scopes = this.scopes; - } - else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { - credential.scopes = this.getAnyScopes(); - } - return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); - } - // Look in the well-known credential file location. - credential = await this._tryGetApplicationCredentialsFromWellKnownFile(options); - if (credential) { - if (credential instanceof jwtclient_1.JWT) { - credential.scopes = this.scopes; - } - else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { - credential.scopes = this.getAnyScopes(); - } - return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); - } - // Determine if we're running on GCE. - let isGCE; - try { - isGCE = await this._checkIsGCE(); - } - catch (e) { - if (e instanceof Error) { - e.message = `Unexpected error determining execution environment: ${e.message}`; - } - throw e; - } - if (!isGCE) { - // We failed to find the default credentials. Bail out with an error. - throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.'); - } - // For GCE, just return a default ComputeClient. It will take care of - // the rest. - options.scopes = this.getAnyScopes(); - return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride); - } - async prepareAndCacheADC(credential, quotaProjectIdOverride) { - const projectId = await this.getProjectIdOptional(); - if (quotaProjectIdOverride) { - credential.quotaProjectId = quotaProjectIdOverride; - } - this.cachedCredential = credential; - return { credential, projectId }; - } - /** - * Determines whether the auth layer is running on Google Compute Engine. - * @returns A promise that resolves with the boolean. - * @api private - */ - async _checkIsGCE() { - if (this.checkIsGCE === undefined) { - this.checkIsGCE = await gcpMetadata.isAvailable(); - } - return this.checkIsGCE; +const executable_response_1 = __nccwpck_require__(8749); +const pluggable_auth_handler_1 = __nccwpck_require__(8941); +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +class ExecutableError extends Error { + constructor(message, code) { + super(`The executable failed with exit code: ${code} and error message: ${message}.`); + this.code = code; + Object.setPrototypeOf(this, new.target.prototype); } +} +exports.ExecutableError = ExecutableError; +/** + * The default executable timeout when none is provided, in milliseconds. + */ +const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; +/** + * The minimum allowed executable timeout in milliseconds. + */ +const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; +/** + * The maximum allowed executable timeout in milliseconds. + */ +const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; +/** + * The environment variable to check to see if executable can be run. + * Value must be set to '1' for the executable to run. + */ +const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; +/** + * The maximum currently supported executable version. + */ +const MAXIMUM_EXECUTABLE_VERSION = 1; +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { /** - * Attempts to load default credentials from the environment variable path.. - * @returns Promise that resolves with the OAuth2Client or null. - * @api private + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. */ - async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { - const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || - process.env['google_application_credentials']; - if (!credentialsPath || credentialsPath.length === 0) { - return null; - } - try { - return this._getApplicationCredentialsFromFilePath(credentialsPath, options); + constructor(options, additionalOptions) { + super(options, additionalOptions); + if (!options.credential_source.executable) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); } - catch (e) { - if (e instanceof Error) { - e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; - } - throw e; + this.command = options.credential_source.executable.command; + if (!this.command) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); } - } - /** - * Attempts to load default credentials from a well-known file location - * @return Promise that resolves with the OAuth2Client or null. - * @api private - */ - async _tryGetApplicationCredentialsFromWellKnownFile(options) { - // First, figure out the location of the file, depending upon the OS type. - let location = null; - if (this._isWindows()) { - // Windows - location = process.env['APPDATA']; + // Check if the provided timeout exists and if it is valid. + if (options.credential_source.executable.timeout_millis === undefined) { + this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; } else { - // Linux or Mac - const home = process.env['HOME']; - if (home) { - location = path.join(home, '.config'); - } - } - // If we found the root path, expand it. - if (location) { - location = path.join(location, 'gcloud', 'application_default_credentials.json'); - if (!fs.existsSync(location)) { - location = null; + this.timeoutMillis = options.credential_source.executable.timeout_millis; + if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || + this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { + throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + + `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); } } - // The file does not exist. - if (!location) { - return null; - } - // The file seems to exist. Try to use it. - const client = await this._getApplicationCredentialsFromFilePath(location, options); - return client; + this.outputFile = options.credential_source.executable.output_file; + this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ + command: this.command, + timeoutMillis: this.timeoutMillis, + outputFile: this.outputFile, + }); + this.credentialSourceType = 'executable'; } /** - * Attempts to load default credentials from a file at the given path.. - * @param filePath The path to the file to read. - * @returns Promise that resolves with the OAuth2Client - * @api private + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. */ - async _getApplicationCredentialsFromFilePath(filePath, options = {}) { - // Make sure the path looks like a string. - if (!filePath || filePath.length === 0) { - throw new Error('The file path is invalid.'); + async retrieveSubjectToken() { + // Check if the executable is allowed to run. + if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { + throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + + 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + + 'Variable to 1.'); } - // Make sure there is a file at the path. lstatSync will throw if there is - // nothing there. - try { - // Resolve path to actual file in case of symlink. Expect a thrown error - // if not resolvable. - filePath = fs.realpathSync(filePath); - if (!fs.lstatSync(filePath).isFile()) { - throw new Error(); - } + let executableResponse = undefined; + // Try to get cached executable response from output file. + if (this.outputFile) { + executableResponse = await this.handler.retrieveCachedResponse(); } - catch (err) { - if (err instanceof Error) { - err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + // If no response from output file, call the executable. + if (!executableResponse) { + // Set up environment map with required values for the executable. + const envMap = new Map(); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); + // Always set to 0 because interactive mode is not supported. + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); + if (this.outputFile) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); } - throw err; - } - // Now open a read stream on the file, and parse it. - const readStream = fs.createReadStream(filePath); - return this.fromStream(readStream, options); - } - /** - * Create a credentials instance using a given impersonated input options. - * @param json The impersonated input object. - * @returns JWT or UserRefresh Client with data - */ - fromImpersonatedJSON(json) { - var _a, _b, _c, _d; - if (!json) { - throw new Error('Must pass in a JSON object containing an impersonated refresh token'); + const serviceAccountEmail = this.getServiceAccountEmail(); + if (serviceAccountEmail) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); + } + executableResponse = + await this.handler.retrieveResponseFromExecutable(envMap); } - if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { - throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { + throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); } - if (!json.source_credentials) { - throw new Error('The incoming JSON object does not contain a source_credentials field'); + // Check that response was successful. + if (!executableResponse.success) { + throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); } - if (!json.service_account_impersonation_url) { - throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + // Check that response contains expiration time if output file was specified. + if (this.outputFile) { + if (!executableResponse.expirationTime) { + throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); + } } - // Create source client for impersonation - const sourceClient = new refreshclient_1.UserRefreshClient(json.source_credentials.client_id, json.source_credentials.client_secret, json.source_credentials.refresh_token); - // Extreact service account from service_account_impersonation_url - const targetPrincipal = (_b = (_a = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _a === void 0 ? void 0 : _a.groups) === null || _b === void 0 ? void 0 : _b.target; - if (!targetPrincipal) { - throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + // Check that response is not expired. + if (executableResponse.isExpired()) { + throw new Error('Executable response is expired.'); } - const targetScopes = (_c = this.getAnyScopes()) !== null && _c !== void 0 ? _c : []; - const client = new impersonated_1.Impersonated({ - delegates: (_d = json.delegates) !== null && _d !== void 0 ? _d : [], - sourceClient: sourceClient, - targetPrincipal: targetPrincipal, - targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], - }); - return client; + // Return subject token from response. + return executableResponse.subjectToken; } +} +exports.PluggableAuthClient = PluggableAuthClient; + + +/***/ }), + +/***/ 8941: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthHandler = void 0; +const pluggable_auth_client_1 = __nccwpck_require__(4782); +const executable_response_1 = __nccwpck_require__(8749); +const childProcess = __nccwpck_require__(2081); +const fs = __nccwpck_require__(7147); +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +class PluggableAuthHandler { /** - * Create a credentials instance using the given input options. - * @param json The input object. - * @param options The JWT or UserRefresh options for the client - * @returns JWT or UserRefresh Client with data + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. */ - fromJSON(json, options) { - let client; - if (!json) { - throw new Error('Must pass in a JSON object containing the Google auth settings.'); - } - options = options || {}; - if (json.type === 'authorized_user') { - client = new refreshclient_1.UserRefreshClient(options); - client.fromJSON(json); - } - else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { - client = this.fromImpersonatedJSON(json); - } - else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { - client = externalclient_1.ExternalAccountClient.fromJSON(json, options); - client.scopes = this.getAnyScopes(); + constructor(options) { + if (!options.command) { + throw new Error('No command provided.'); } - else { - options.scopes = this.scopes; - client = new jwtclient_1.JWT(options); - this.setGapicJWTValues(client); - client.fromJSON(json); + this.commandComponents = PluggableAuthHandler.parseCommand(options.command); + this.timeoutMillis = options.timeoutMillis; + if (!this.timeoutMillis) { + throw new Error('No timeoutMillis provided.'); } - return client; + this.outputFile = options.outputFile; } /** - * Return a JWT or UserRefreshClient from JavaScript object, caching both the - * object used to instantiate and the client. - * @param json The input object. - * @param options The JWT or UserRefresh options for the client - * @returns JWT or UserRefresh Client with data + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. */ - _cacheClientFromJSON(json, options) { - let client; - // create either a UserRefreshClient or JWT client. - options = options || {}; - if (json.type === 'authorized_user') { - client = new refreshclient_1.UserRefreshClient(options); - client.fromJSON(json); - } - else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { - client = this.fromImpersonatedJSON(json); - } - else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { - client = externalclient_1.ExternalAccountClient.fromJSON(json, options); - client.scopes = this.getAnyScopes(); - } - else { - options.scopes = this.scopes; - client = new jwtclient_1.JWT(options); - this.setGapicJWTValues(client); - client.fromJSON(json); - } - // cache both raw data used to instantiate client and client itself. - this.jsonContent = json; - this.cachedCredential = client; - return client; - } - fromStream(inputStream, optionsOrCallback = {}, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else { - options = optionsOrCallback; - } - if (callback) { - this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); - } - else { - return this.fromStreamAsync(inputStream, options); - } - } - fromStreamAsync(inputStream, options) { + retrieveResponseFromExecutable(envMap) { return new Promise((resolve, reject) => { - if (!inputStream) { - throw new Error('Must pass in a stream containing the Google auth settings.'); - } - let s = ''; - inputStream - .setEncoding('utf8') - .on('error', reject) - .on('data', chunk => (s += chunk)) - .on('end', () => { - try { + // Spawn process to run executable using added environment variables. + const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { + env: { ...process.env, ...Object.fromEntries(envMap) }, + }); + let output = ''; + // Append stdout to output as executable runs. + child.stdout.on('data', (data) => { + output += data; + }); + // Append stderr as executable runs. + child.stderr.on('data', (err) => { + output += err; + }); + // Set up a timeout to end the child process and throw an error. + const timeout = setTimeout(() => { + // Kill child process and remove listeners so 'close' event doesn't get + // read after child process is killed. + child.removeAllListeners(); + child.kill(); + return reject(new Error('The executable failed to finish within the timeout specified.')); + }, this.timeoutMillis); + child.on('close', (code) => { + // Cancel timeout if executable closes before timeout is reached. + clearTimeout(timeout); + if (code === 0) { + // If the executable completed successfully, try to return the parsed response. try { - const data = JSON.parse(s); - const r = this._cacheClientFromJSON(data, options); - return resolve(r); + const responseJson = JSON.parse(output); + const response = new executable_response_1.ExecutableResponse(responseJson); + return resolve(response); } - catch (err) { - // If we failed parsing this.keyFileName, assume that it - // is a PEM or p12 certificate: - if (!this.keyFilename) - throw err; - const client = new jwtclient_1.JWT({ - ...this.clientOptions, - keyFile: this.keyFilename, - }); - this.cachedCredential = client; - this.setGapicJWTValues(client); - return resolve(client); + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + return reject(error); + } + return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); } } - catch (err) { - return reject(err); + else { + return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); } }); }); } /** - * Create a credentials instance using the given API key string. - * @param apiKey The API key string - * @param options An optional options object. - * @returns A JWT loaded from the key - */ - fromAPIKey(apiKey, options) { - options = options || {}; - const client = new jwtclient_1.JWT(options); - client.fromAPIKey(apiKey); - return client; - } - /** - * Determines whether the current operating system is Windows. - * @api private + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. */ - _isWindows() { - const sys = os.platform(); - if (sys && sys.length >= 3) { - if (sys.substring(0, 3).toLowerCase() === 'win') { - return true; - } + async retrieveCachedResponse() { + if (!this.outputFile || this.outputFile.length === 0) { + return undefined; } - return false; - } - /** - * Run the Google Cloud SDK command that prints the default project ID - */ - async getDefaultServiceProjectId() { - return new Promise(resolve => { - (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { - if (!err && stdout) { - try { - const projectId = JSON.parse(stdout).configuration.properties.core.project; - resolve(projectId); - return; - } - catch (e) { - // ignore errors - } - } - resolve(null); - }); + let filePath; + try { + filePath = await fs.promises.realpath(this.outputFile); + } + catch (_a) { + // If file path cannot be resolved, return undefined. + return undefined; + } + if (!(await fs.promises.lstat(filePath)).isFile()) { + // If path does not lead to file, return undefined. + return undefined; + } + const responseString = await fs.promises.readFile(filePath, { + encoding: 'utf8', }); + if (responseString === '') { + return undefined; + } + try { + const responseJson = JSON.parse(responseString); + const response = new executable_response_1.ExecutableResponse(responseJson); + // Check if response is successful and unexpired. + if (response.isValid()) { + return new executable_response_1.ExecutableResponse(responseJson); + } + return undefined; + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + throw error; + } + throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); + } } /** - * Loads the project id from environment variables. - * @api private - */ - getProductionProjectId() { - return (process.env['GCLOUD_PROJECT'] || - process.env['GOOGLE_CLOUD_PROJECT'] || - process.env['gcloud_project'] || - process.env['google_cloud_project']); - } - /** - * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. - * @api private + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. */ - async getFileProjectId() { - if (this.cachedCredential) { - // Try to read the project ID from the cached credentials file - return this.cachedCredential.projectId; + static parseCommand(command) { + // Split the command into components by splitting on spaces, + // unless spaces are contained in quotation marks. + const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); + if (!components) { + throw new Error(`Provided command: "${command}" could not be parsed.`); } - // Ensure the projectId is loaded from the keyFile if available. - if (this.keyFilename) { - const creds = await this.getClient(); - if (creds && creds.projectId) { - return creds.projectId; + // Remove quotation marks from the beginning and end of each component if they are present. + for (let i = 0; i < components.length; i++) { + if (components[i][0] === '"' && components[i].slice(-1) === '"') { + components[i] = components[i].slice(1, -1); } } - // Try to load a credentials file and read its project ID - const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); - if (r) { - return r.projectId; - } - else { - return null; - } + return components; + } +} +exports.PluggableAuthHandler = PluggableAuthHandler; + + +/***/ }), + +/***/ 8790: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(3936); +exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; +class UserRefreshClient extends oauth2client_1.OAuth2Client { + constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { + clientId: optionsOrClientId, + clientSecret, + refreshToken, + eagerRefreshThresholdMillis, + forceRefreshOnFailure, + }; + super(opts); + this._refreshToken = opts.refreshToken; + this.credentials.refresh_token = opts.refreshToken; } /** - * Gets the project ID from external account client if available. + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. */ - async getExternalAccountClientProjectId() { - if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { - return null; - } - const creds = await this.getClient(); - // Do not suppress the underlying error, as the error could contain helpful - // information for debugging and fixing. This is especially true for - // external account creds as in order to get the project ID, the following - // operations have to succeed: - // 1. Valid credentials file should be supplied. - // 2. Ability to retrieve access tokens from STS token exchange API. - // 3. Ability to exchange for service account impersonated credentials (if - // enabled). - // 4. Ability to get project info using the access token from step 2 or 3. - // Without surfacing the error, it is harder for developers to determine - // which step went wrong. - return await creds.getProjectId(); + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + return super.refreshTokenNoCache(this._refreshToken); } /** - * Gets the Compute Engine project ID if it can be inferred. + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. */ - async getGCEProjectId() { - try { - const r = await gcpMetadata.project('project-id'); - return r; + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the user refresh token'); } - catch (e) { - // Ignore any errors - return null; + if (json.type !== 'authorized_user') { + throw new Error('The incoming JSON object does not have the "authorized_user" type'); + } + if (!json.client_id) { + throw new Error('The incoming JSON object does not contain a client_id field'); } + if (!json.client_secret) { + throw new Error('The incoming JSON object does not contain a client_secret field'); + } + if (!json.refresh_token) { + throw new Error('The incoming JSON object does not contain a refresh_token field'); + } + this._clientId = json.client_id; + this._clientSecret = json.client_secret; + this._refreshToken = json.refresh_token; + this.credentials.refresh_token = json.refresh_token; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; } - getCredentials(callback) { + fromStream(inputStream, callback) { if (callback) { - this.getCredentialsAsync().then(r => callback(null, r), callback); + this.fromStreamAsync(inputStream).then(() => callback(), callback); } else { - return this.getCredentialsAsync(); + return this.fromStreamAsync(inputStream); } } - async getCredentialsAsync() { - const client = await this.getClient(); - if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { - const serviceAccountEmail = client.getServiceAccountEmail(); - if (serviceAccountEmail) { - return { client_email: serviceAccountEmail }; + async fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + return reject(new Error('Must pass in a stream containing the user refresh token.')); } - } - if (this.jsonContent) { - const credential = { - client_email: this.jsonContent.client_email, - private_key: this.jsonContent.private_key, - }; - return credential; - } - const isGCE = await this._checkIsGCE(); - if (!isGCE) { - throw new Error('Unknown error.'); - } - // For GCE, return the service account details from the metadata server - // NOTE: The trailing '/' at the end of service-accounts/ is very important! - // The GCF metadata server doesn't respect querystring params if this / is - // not included. - const data = await gcpMetadata.instance({ - property: 'service-accounts/', - params: { recursive: 'true' }, + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + return resolve(); + } + catch (err) { + return reject(err); + } + }); }); - if (!data || !data.default || !data.default.email) { - throw new Error('Failure from metadata server.'); - } - return { client_email: data.default.email }; + } +} +exports.UserRefreshClient = UserRefreshClient; + + +/***/ }), + +/***/ 6308: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StsCredentials = void 0; +const gaxios_1 = __nccwpck_require__(9555); +const querystring = __nccwpck_require__(3477); +const transporters_1 = __nccwpck_require__(2649); +const oauth2common_1 = __nccwpck_require__(9510); +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. + */ + constructor(tokenExchangeEndpoint, clientAuthentication) { + super(clientAuthentication); + this.tokenExchangeEndpoint = tokenExchangeEndpoint; + this.transporter = new transporters_1.DefaultTransporter(); } /** - * Automatically obtain a client based on the provided configuration. If no - * options were passed, use Application Default Credentials. + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. */ - async getClient() { - if (!this.cachedCredential) { - if (this.jsonContent) { - this._cacheClientFromJSON(this.jsonContent, this.clientOptions); - } - else if (this.keyFilename) { - const filePath = path.resolve(this.keyFilename); - const stream = fs.createReadStream(filePath); - await this.fromStreamAsync(stream, this.clientOptions); + async exchangeToken(stsCredentialsOptions, additionalHeaders, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options) { + var _a, _b, _c; + const values = { + grant_type: stsCredentialsOptions.grantType, + resource: stsCredentialsOptions.resource, + audience: stsCredentialsOptions.audience, + scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), + requested_token_type: stsCredentialsOptions.requestedTokenType, + subject_token: stsCredentialsOptions.subjectToken, + subject_token_type: stsCredentialsOptions.subjectTokenType, + actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, + actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, + // Non-standard GCP-specific options. + options: options && JSON.stringify(options), + }; + // Remove undefined fields. + Object.keys(values).forEach(key => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof values[key] === 'undefined') { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete values[key]; } - else { - await this.getApplicationDefaultAsync(this.clientOptions); + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + // Inject additional STS headers if available. + Object.assign(headers, additionalHeaders || {}); + const opts = { + url: this.tokenExchangeEndpoint.toString(), + method: 'POST', + headers, + data: querystring.stringify(values), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const stsSuccessfulResponse = response.data; + stsSuccessfulResponse.res = response; + return stsSuccessfulResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); } + // Request could fail before the server responds. + throw error; } - return this.cachedCredential; } - /** - * Creates a client which will fetch an ID token for authorization. - * @param targetAudience the audience for the fetched ID token. - * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. - */ - async getIdTokenClient(targetAudience) { - const client = await this.getClient(); - if (!('fetchIdToken' in client)) { - throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); +} +exports.StsCredentials = StsCredentials; + + +/***/ }), + +/***/ 4693: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BrowserCrypto = void 0; +// This file implements crypto functions we need using in-browser +// SubtleCrypto interface `window.crypto.subtle`. +const base64js = __nccwpck_require__(6463); +const crypto_1 = __nccwpck_require__(8043); +class BrowserCrypto { + constructor() { + if (typeof window === 'undefined' || + window.crypto === undefined || + window.crypto.subtle === undefined) { + throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); } - return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); } - /** - * Automatically obtain application default credentials, and return - * an access token for making requests. - */ - async getAccessToken() { - const client = await this.getClient(); - return (await client.getAccessToken()).token; + async sha256DigestBase64(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return base64js.fromByteArray(new Uint8Array(outputBuffer)); } - /** - * Obtain the HTTP headers that will provide authorization for a given - * request. - */ - async getRequestHeaders(url) { - const client = await this.getClient(); - return client.getRequestHeaders(url); + randomBytesBase64(count) { + const array = new Uint8Array(count); + window.crypto.getRandomValues(array); + return base64js.fromByteArray(array); + } + static padBase64(base64) { + // base64js requires padding, so let's add some '=' + while (base64.length % 4 !== 0) { + base64 += '='; + } + return base64; + } + async verify(pubkey, data, signature) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const dataArray = new TextEncoder().encode(data); + const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); + const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); + // SubtleCrypto's verify method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); + return result; + } + async sign(privateKey, data) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const dataArray = new TextEncoder().encode(data); + const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); + // SubtleCrypto's sign method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); + return base64js.fromByteArray(new Uint8Array(result)); + } + decodeBase64StringUtf8(base64) { + const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const result = new TextDecoder().decode(uint8array); + return result; + } + encodeBase64StringUtf8(text) { + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const uint8array = new TextEncoder().encode(text); + const result = base64js.fromByteArray(uint8array); + return result; } /** - * Obtain credentials for a request, then attach the appropriate headers to - * the request options. - * @param opts Axios or Request options on which to attach the headers + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. */ - async authorizeRequest(opts) { - opts = opts || {}; - const url = opts.url || opts.uri; - const client = await this.getClient(); - const headers = await client.getRequestHeaders(url); - opts.headers = Object.assign(opts.headers || {}, headers); - return opts; + async sha256DigestHex(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); } /** - * Automatically obtain application default credentials, and make an - * HTTP request using the given options. - * @param opts Axios request options for the HTTP request. + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - async request(opts) { - const client = await this.getClient(); - return client.request(opts); + async signWithHmacSha256(key, msg) { + // Convert key, if provided in ArrayBuffer format, to string. + const rawKey = typeof key === 'string' + ? key + : String.fromCharCode(...new Uint16Array(key)); + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const enc = new TextEncoder(); + const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { + name: 'HMAC', + hash: { + name: 'SHA-256', + }, + }, false, ['sign']); + return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); + } +} +exports.BrowserCrypto = BrowserCrypto; + + +/***/ }), + +/***/ 8043: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0; +const crypto_1 = __nccwpck_require__(4693); +const crypto_2 = __nccwpck_require__(757); +function createCrypto() { + if (hasBrowserCrypto()) { + return new crypto_1.BrowserCrypto(); + } + return new crypto_2.NodeCrypto(); +} +exports.createCrypto = createCrypto; +function hasBrowserCrypto() { + return (typeof window !== 'undefined' && + typeof window.crypto !== 'undefined' && + typeof window.crypto.subtle !== 'undefined'); +} +exports.hasBrowserCrypto = hasBrowserCrypto; +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +function fromArrayBufferToHex(arrayBuffer) { + // Convert buffer to byte array. + const byteArray = Array.from(new Uint8Array(arrayBuffer)); + // Convert bytes to hex string. + return byteArray + .map(byte => { + return byte.toString(16).padStart(2, '0'); + }) + .join(''); +} +exports.fromArrayBufferToHex = fromArrayBufferToHex; + + +/***/ }), + +/***/ 757: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeCrypto = void 0; +const crypto = __nccwpck_require__(6113); +class NodeCrypto { + async sha256DigestBase64(str) { + return crypto.createHash('sha256').update(str).digest('base64'); + } + randomBytesBase64(count) { + return crypto.randomBytes(count).toString('base64'); + } + async verify(pubkey, data, signature) { + const verifier = crypto.createVerify('RSA-SHA256'); + verifier.update(data); + verifier.end(); + return verifier.verify(pubkey, signature, 'base64'); + } + async sign(privateKey, data) { + const signer = crypto.createSign('RSA-SHA256'); + signer.update(data); + signer.end(); + return signer.sign(privateKey, 'base64'); + } + decodeBase64StringUtf8(base64) { + return Buffer.from(base64, 'base64').toString('utf-8'); + } + encodeBase64StringUtf8(text) { + return Buffer.from(text, 'utf-8').toString('base64'); } /** - * Determine the compute environment in which the code is running. + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. */ - getEnv() { - return (0, envDetect_1.getEnv)(); + async sha256DigestHex(str) { + return crypto.createHash('sha256').update(str).digest('hex'); } /** - * Sign the given data with the current private key, or go out - * to the IAM API to sign it. - * @param data The data to be signed. + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. */ - async sign(data) { - const client = await this.getClient(); - const crypto = (0, crypto_1.createCrypto)(); - if (client instanceof jwtclient_1.JWT && client.key) { - const sign = await crypto.sign(client.key, data); - return sign; - } - const creds = await this.getCredentials(); - if (!creds.client_email) { - throw new Error('Cannot sign data without `client_email`.'); - } - return this.signBlob(crypto, creds.client_email, data); - } - async signBlob(crypto, emailOrUniqueId, data) { - const url = 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' + - `${emailOrUniqueId}:signBlob`; - const res = await this.request({ - method: 'POST', - url, - data: { - payload: crypto.encodeBase64StringUtf8(data), - }, - }); - return res.data.signedBlob; + async signWithHmacSha256(key, msg) { + const cryptoKey = typeof key === 'string' ? key : toBuffer(key); + return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); } } -exports.GoogleAuth = GoogleAuth; +exports.NodeCrypto = NodeCrypto; /** - * Export DefaultTransporter as a static property of the class. + * Converts a Node.js Buffer to an ArrayBuffer. + * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer + * @param buffer The Buffer input to covert. + * @return The ArrayBuffer representation of the input. */ -GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; -//# sourceMappingURL=googleauth.js.map +function toArrayBuffer(buffer) { + return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); +} +/** + * Converts an ArrayBuffer to a Node.js Buffer. + * @param arrayBuffer The ArrayBuffer input to covert. + * @return The Buffer representation of the input. + */ +function toBuffer(arrayBuffer) { + return Buffer.from(arrayBuffer); +} + /***/ }), -/***/ 9735: +/***/ 810: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gcpMetadata = void 0; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const googleauth_1 = __nccwpck_require__(695); +Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); +exports.gcpMetadata = __nccwpck_require__(3563); +var authclient_1 = __nccwpck_require__(4627); +Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } })); +var computeclient_1 = __nccwpck_require__(6875); +Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); +var envDetect_1 = __nccwpck_require__(1380); +Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); +var iam_1 = __nccwpck_require__(9735); +Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); +var idtokenclient_1 = __nccwpck_require__(298); +Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); +var jwtaccess_1 = __nccwpck_require__(8740); +Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); +var jwtclient_1 = __nccwpck_require__(3959); +Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); +var impersonated_1 = __nccwpck_require__(1103); +Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); +var oauth2client_1 = __nccwpck_require__(3936); +Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); +Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); +var loginticket_1 = __nccwpck_require__(4524); +Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); +var refreshclient_1 = __nccwpck_require__(8790); +Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); +var awsclient_1 = __nccwpck_require__(1569); +Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); +var identitypoolclient_1 = __nccwpck_require__(117); +Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); +var externalclient_1 = __nccwpck_require__(4381); +Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); +var baseexternalclient_1 = __nccwpck_require__(7391); +Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); +var downscopedclient_1 = __nccwpck_require__(6270); +Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); +var pluggable_auth_client_1 = __nccwpck_require__(4782); +Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); +var transporters_1 = __nccwpck_require__(2649); +Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); +const auth = new googleauth_1.GoogleAuth(); +exports.auth = auth; + + +/***/ }), + +/***/ 6608: /***/ ((__unused_webpack_module, exports) => { "use strict"; -// Copyright 2014 Google LLC +// Copyright 2017 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -29048,42 +23727,36 @@ GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IAMAuth = void 0; -class IAMAuth { - /** - * IAM credentials. - * - * @param selector the iam authority selector - * @param token the token - * @constructor - */ - constructor(selector, token) { - this.selector = selector; - this.token = token; - this.selector = selector; - this.token = token; - } - /** - * Acquire the HTTP headers required to make an authenticated request. - */ - getRequestHeaders() { - return { - 'x-goog-iam-authority-selector': this.selector, - 'x-goog-iam-authorization-token': this.token, - }; +exports.validate = void 0; +// Accepts an options object passed from the user to the API. In the +// previous version of the API, it referred to a `Request` options object. +// Now it refers to an Axiox Request Config object. This is here to help +// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function validate(options) { + const vpairs = [ + { invalid: 'uri', expected: 'url' }, + { invalid: 'json', expected: 'data' }, + { invalid: 'qs', expected: 'params' }, + ]; + for (const pair of vpairs) { + if (options[pair.invalid]) { + const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; + throw new Error(e); + } } } -exports.IAMAuth = IAMAuth; -//# sourceMappingURL=iam.js.map +exports.validate = validate; + /***/ }), -/***/ 117: +/***/ 2649: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2021 Google LLC +// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -29096,159 +23769,111 @@ exports.IAMAuth = IAMAuth; // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -var _a, _b, _c; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IdentityPoolClient = void 0; -const fs = __nccwpck_require__(7147); -const util_1 = __nccwpck_require__(3837); -const baseexternalclient_1 = __nccwpck_require__(7391); -// fs.readfile is undefined in browser karma tests causing -// `npm run browser-test` to fail as test.oauth2.ts imports this file via -// src/index.ts. -// Fallback to void function to avoid promisify throwing a TypeError. -const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); -const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); -const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); -/** - * Defines the Url-sourced and file-sourced external account clients mainly - * used for K8s and Azure workloads. - */ -class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { +exports.DefaultTransporter = void 0; +const gaxios_1 = __nccwpck_require__(9555); +const options_1 = __nccwpck_require__(6608); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(1402); +const PRODUCT_NAME = 'google-api-nodejs-client'; +class DefaultTransporter { + constructor() { + /** + * A configurable, replacable `Gaxios` instance. + */ + this.instance = new gaxios_1.Gaxios(); + } /** - * Instantiate an IdentityPoolClient instance using the provided JSON - * object loaded from an external account credentials file. - * An error is thrown if the credential is not a valid file-sourced or - * url-sourced credential or a workforce pool user project is provided - * with a non workforce audience. - * @param options The external account options object typically loaded - * from the external account JSON credential file. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. */ - constructor(options, additionalOptions) { - var _a, _b; - super(options, additionalOptions); - this.file = options.credential_source.file; - this.url = options.credential_source.url; - this.headers = options.credential_source.headers; - if (!this.file && !this.url) { - throw new Error('No valid Identity Pool "credential_source" provided'); - } - // Text is the default format type. - this.formatType = ((_a = options.credential_source.format) === null || _a === void 0 ? void 0 : _a.type) || 'text'; - this.formatSubjectTokenFieldName = - (_b = options.credential_source.format) === null || _b === void 0 ? void 0 : _b.subject_token_field_name; - if (this.formatType !== 'json' && this.formatType !== 'text') { - throw new Error(`Invalid credential_source format "${this.formatType}"`); - } - if (this.formatType === 'json' && !this.formatSubjectTokenFieldName) { - throw new Error('Missing subject_token_field_name for JSON credential_source format'); + configure(opts = {}) { + opts.headers = opts.headers || {}; + if (typeof window === 'undefined') { + // set transporter user agent if not in browser + const uaValue = opts.headers['User-Agent']; + if (!uaValue) { + opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; + } + else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { + opts.headers['User-Agent'] = `${uaValue} ${DefaultTransporter.USER_AGENT}`; + } + // track google-auth-library-nodejs version: + if (!opts.headers['x-goog-api-client']) { + const nodeVersion = process.version.replace(/^v/, ''); + opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`; + } } + return opts; } /** - * Triggered when a external subject token is needed to be exchanged for a GCP - * access token via GCP STS endpoint. - * This uses the `options.credential_source` object to figure out how - * to retrieve the token using the current environment. In this case, - * this either retrieves the local credential from a file location (k8s - * workload) or by sending a GET request to a local metadata server (Azure - * workloads). - * @return A promise that resolves with the external subject token. + * Makes a request using Gaxios with given options. + * @param opts GaxiosOptions options. + * @param callback optional callback that contains GaxiosResponse object. + * @return GaxiosPromise, assuming no callback is passed. */ - async retrieveSubjectToken() { - if (this.file) { - return await this.getTokenFromFile(this.file, this.formatType, this.formatSubjectTokenFieldName); - } - return await this.getTokenFromUrl(this.url, this.formatType, this.formatSubjectTokenFieldName, this.headers); + request(opts) { + // ensure the user isn't passing in request-style options + opts = this.configure(opts); + (0, options_1.validate)(opts); + return this.instance.request(opts).catch(e => { + throw this.processError(e); + }); + } + get defaults() { + return this.instance.defaults; + } + set defaults(opts) { + this.instance.defaults = opts; } /** - * Looks up the external subject token in the file path provided and - * resolves with that token. - * @param file The file path where the external credential is located. - * @param formatType The token file or URL response type (JSON or text). - * @param formatSubjectTokenFieldName For JSON response types, this is the - * subject_token field name. For Azure, this is access_token. For text - * response types, this is ignored. - * @return A promise that resolves with the external subject token. + * Changes the error to include details from the body. */ - async getTokenFromFile(filePath, formatType, formatSubjectTokenFieldName) { - // Make sure there is a file at the path. lstatSync will throw if there is - // nothing there. - try { - // Resolve path to actual file in case of symlink. Expect a thrown error - // if not resolvable. - filePath = await realpath(filePath); - if (!(await lstat(filePath)).isFile()) { - throw new Error(); + processError(e) { + const res = e.response; + const err = e; + const body = res ? res.data : null; + if (res && body && body.error && res.status !== 200) { + if (typeof body.error === 'string') { + err.message = body.error; + err.status = res.status; } - } - catch (err) { - if (err instanceof Error) { - err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + else if (Array.isArray(body.error.errors)) { + err.message = body.error.errors + .map((err2) => err2.message) + .join('\n'); + err.code = body.error.code; + err.errors = body.error.errors; + } + else { + err.message = body.error.message; + err.code = body.error.code; } - throw err; - } - let subjectToken; - const rawText = await readFile(filePath, { encoding: 'utf8' }); - if (formatType === 'text') { - subjectToken = rawText; - } - else if (formatType === 'json' && formatSubjectTokenFieldName) { - const json = JSON.parse(rawText); - subjectToken = json[formatSubjectTokenFieldName]; - } - if (!subjectToken) { - throw new Error('Unable to parse the subject_token from the credential_source file'); - } - return subjectToken; - } - /** - * Sends a GET request to the URL provided and resolves with the returned - * external subject token. - * @param url The URL to call to retrieve the subject token. This is typically - * a local metadata server. - * @param formatType The token file or URL response type (JSON or text). - * @param formatSubjectTokenFieldName For JSON response types, this is the - * subject_token field name. For Azure, this is access_token. For text - * response types, this is ignored. - * @param headers The optional additional headers to send with the request to - * the metadata server url. - * @return A promise that resolves with the external subject token. - */ - async getTokenFromUrl(url, formatType, formatSubjectTokenFieldName, headers) { - const opts = { - url, - method: 'GET', - headers, - responseType: formatType, - }; - let subjectToken; - if (formatType === 'text') { - const response = await this.transporter.request(opts); - subjectToken = response.data; - } - else if (formatType === 'json' && formatSubjectTokenFieldName) { - const response = await this.transporter.request(opts); - subjectToken = response.data[formatSubjectTokenFieldName]; } - if (!subjectToken) { - throw new Error('Unable to parse the subject_token from the credential_source URL'); + else if (res && res.status >= 400) { + // Consider all 4xx and 5xx responses errors. + err.message = body; + err.status = res.status; } - return subjectToken; + return err; } } -exports.IdentityPoolClient = IdentityPoolClient; -//# sourceMappingURL=identitypoolclient.js.map +exports.DefaultTransporter = DefaultTransporter; +/** + * Default user agent. + */ +DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; + /***/ }), -/***/ 298: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 8905: +/***/ (function(__unused_webpack_module, exports) { "use strict"; -// Copyright 2020 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -29261,42203 +23886,32735 @@ exports.IdentityPoolClient = IdentityPoolClient; // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IdTokenClient = void 0; -const oauth2client_1 = __nccwpck_require__(3936); -class IdTokenClient extends oauth2client_1.OAuth2Client { - /** - * Google ID Token client - * - * Retrieve access token from the metadata server. - * See: https://developers.google.com/compute/docs/authentication - */ - constructor(options) { - super(); - this.targetAudience = options.targetAudience; - this.idTokenProvider = options.idTokenProvider; - } - async getRequestMetadataAsync( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - url) { - if (!this.credentials.id_token || - (this.credentials.expiry_date || 0) < Date.now()) { - const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); - this.credentials = { - id_token: idToken, - expiry_date: this.getIdTokenExpiryDate(idToken), - }; - } - const headers = { - Authorization: 'Bearer ' + this.credentials.id_token, - }; - return { headers }; - } - getIdTokenExpiryDate(idToken) { - const payloadB64 = idToken.split('.')[1]; - if (payloadB64) { - const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); - return payload.exp * 1000; - } - } -} -exports.IdTokenClient = IdTokenClient; -//# sourceMappingURL=idtokenclient.js.map - -/***/ }), - -/***/ 1103: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - +exports.LRUCache = exports.originalOrCamelOptions = exports.snakeToCamel = void 0; /** - * Copyright 2021 Google LLC + * Returns the camel case of a provided string. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * @remarks * - * http://www.apache.org/licenses/LICENSE-2.0 + * Match any `_` and not `_` pair, then return the uppercase of the not `_` + * character. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * @internal + * + * @param str the string to convert + * @returns the camelCase'd string */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; -const oauth2client_1 = __nccwpck_require__(3936); -const gaxios_1 = __nccwpck_require__(9555); -exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; -class Impersonated extends oauth2client_1.OAuth2Client { +function snakeToCamel(str) { + return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase()); +} +exports.snakeToCamel = snakeToCamel; +/** + * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference + * for original, non-camelCase key. + * + * @param obj object to lookup a value in + * @returns a `get` function for getting `obj[key || snakeKey]`, if available + */ +function originalOrCamelOptions(obj) { /** - * Impersonated service account credentials. - * - * Create a new access token by impersonating another service account. * - * Impersonated Credentials allowing credentials issued to a user or - * service account to impersonate another. The source project using - * Impersonated Credentials must enable the "IAMCredentials" API. - * Also, the target service account must grant the orginating principal - * the "Service Account Token Creator" IAM role. - * - * @param {object} options - The configuration object. - * @param {object} [options.sourceClient] the source credential used as to - * acquire the impersonated credentials. - * @param {string} [options.targetPrincipal] the service account to - * impersonate. - * @param {string[]} [options.delegates] the chained list of delegates - * required to grant the final access_token. If set, the sequence of - * identities must have "Service Account Token Creator" capability granted to - * the preceding identity. For example, if set to [serviceAccountB, - * serviceAccountC], the sourceCredential must have the Token Creator role on - * serviceAccountB. serviceAccountB must have the Token Creator on - * serviceAccountC. Finally, C must have Token Creator on target_principal. - * If left unset, sourceCredential must have that role on targetPrincipal. - * @param {string[]} [options.targetScopes] scopes to request during the - * authorization grant. - * @param {number} [options.lifetime] number of seconds the delegated - * credential should be valid for up to 3600 seconds by default, or 43,200 - * seconds by extending the token's lifetime, see: - * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth - * @param {string} [options.endpoint] api endpoint override. + * @param key an index of object, preferably snake_case + * @returns the value `obj[key || snakeKey]`, if available */ - constructor(options = {}) { - var _a, _b, _c, _d, _e, _f; - super(options); - this.credentials = { - expiry_date: 1, - refresh_token: 'impersonated-placeholder', - }; - this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); - this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; - this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; - this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; - this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; - this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com'; + function get(key) { + var _a; + const o = (obj || {}); + return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)]; } - /** - * Refreshes the access token. - * @param refreshToken Unused parameter - */ - async refreshToken(refreshToken) { - var _a, _b, _c, _d, _e, _f; - try { - await this.sourceClient.getAccessToken(); - const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; - const u = `${this.endpoint}/v1/${name}:generateAccessToken`; - const body = { - delegates: this.delegates, - scope: this.targetScopes, - lifetime: this.lifetime + 's', - }; - const res = await this.sourceClient.request({ - url: u, - data: body, - method: 'POST', - }); - const tokenResponse = res.data; - this.credentials.access_token = tokenResponse.accessToken; - this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); - return { - tokens: this.credentials, - res, - }; - } - catch (error) { - if (!(error instanceof Error)) - throw error; - let status = 0; - let message = ''; - if (error instanceof gaxios_1.GaxiosError) { - status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; - message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; - } - if (status && message) { - error.message = `${status}: unable to impersonate: ${message}`; - throw error; - } - else { - error.message = `unable to impersonate: ${error}`; - throw error; - } - } + return { get }; +} +exports.originalOrCamelOptions = originalOrCamelOptions; +/** + * A simple LRU cache utility. + * Not meant for external usage. + * + * @experimental + * @internal + */ +class LRUCache { + constructor(options) { + _LRUCache_instances.add(this); + /** + * Maps are in order. Thus, the older item is the first item. + * + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map} + */ + _LRUCache_cache.set(this, new Map()); + this.capacity = options.capacity; + this.maxAge = options.maxAge; } /** - * Generates an OpenID Connect ID token for a service account. + * Add an item to the cache. * - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * @param key the key to upsert + * @param value the value of the key + */ + set(key, value) { + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + } + /** + * Get an item from the cache. * - * @param targetAudience the audience for the fetched ID token. - * @param options the for the request - * @return an OpenID Connect ID token + * @param key the key to retrieve */ - async fetchIdToken(targetAudience, options) { - var _a; - await this.sourceClient.getAccessToken(); - const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; - const u = `${this.endpoint}/v1/${name}:generateIdToken`; - const body = { - delegates: this.delegates, - audience: targetAudience, - includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, - }; - const res = await this.sourceClient.request({ - url: u, - data: body, - method: 'POST', - }); - return res.data.token; + get(key) { + const item = __classPrivateFieldGet(this, _LRUCache_cache, "f").get(key); + if (!item) + return; + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, item.value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + return item.value; } } -exports.Impersonated = Impersonated; -//# sourceMappingURL=impersonated.js.map +exports.LRUCache = LRUCache; +_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(key); + __classPrivateFieldGet(this, _LRUCache_cache, "f").set(key, { + value, + lastAccessed: Date.now(), + }); +}, _LRUCache_evict = function _LRUCache_evict() { + const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0; + /** + * Because we know Maps are in order, this item is both the + * last item in the list (capacity) and oldest (maxAge). + */ + let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + while (!oldestItem.done && + (__classPrivateFieldGet(this, _LRUCache_cache, "f").size > this.capacity || // too many + oldestItem.value[1].lastAccessed < cutoffDate) // too old + ) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(oldestItem.value[0]); + oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + } +}; + /***/ }), -/***/ 8740: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 6031: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -// Copyright 2015 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JWTAccess = void 0; +exports.GoogleToken = void 0; +const fs = __nccwpck_require__(7147); +const gaxios_1 = __nccwpck_require__(9555); const jws = __nccwpck_require__(4636); -const LRU = __nccwpck_require__(7129); -const DEFAULT_HEADER = { - alg: 'RS256', - typ: 'JWT', -}; -class JWTAccess { +const path = __nccwpck_require__(1017); +const util_1 = __nccwpck_require__(3837); +const readFile = fs.readFile + ? (0, util_1.promisify)(fs.readFile) + : async () => { + // if running in the web-browser, fs.readFile may not have been shimmed. + throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); + }; +const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; +const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; +class ErrorWithCode extends Error { + constructor(message, code) { + super(message); + this.code = code; + } +} +class GoogleToken { + get accessToken() { + return this.rawToken ? this.rawToken.access_token : undefined; + } + get idToken() { + return this.rawToken ? this.rawToken.id_token : undefined; + } + get tokenType() { + return this.rawToken ? this.rawToken.token_type : undefined; + } + get refreshToken() { + return this.rawToken ? this.rawToken.refresh_token : undefined; + } /** - * JWTAccess service account credentials. - * - * Create a new access token by using the credential to create a new JWT token - * that's recognized as the access token. + * Create a GoogleToken. * - * @param email the service account email address. - * @param key the private key that will be used to sign the token. - * @param keyId the ID of the private key used to sign the token. + * @param options Configuration object. */ - constructor(email, key, keyId, eagerRefreshThresholdMillis) { - this.cache = new LRU({ - max: 500, - maxAge: 60 * 60 * 1000, - }); - this.email = email; - this.key = key; - this.keyId = keyId; - this.eagerRefreshThresholdMillis = - eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; + constructor(options) { + _GoogleToken_instances.add(this); + this.transporter = { + request: opts => (0, gaxios_1.request)(opts), + }; + _GoogleToken_inFlightRequest.set(this, void 0); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, options); } /** - * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * Returns whether the token has expired. * - * @param url The URI being authorized. - * @param scopes The scope or scopes being authorized - * @returns A string that returns the cached key. + * @return true if the token has expired, false otherwise. */ - getCachedKey(url, scopes) { - let cacheKey = url; - if (scopes && Array.isArray(scopes) && scopes.length) { - cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; - } - else if (typeof scopes === 'string') { - cacheKey = url ? `${url}_${scopes}` : scopes; + hasExpired() { + const now = new Date().getTime(); + if (this.rawToken && this.expiresAt) { + return now >= this.expiresAt; } - if (!cacheKey) { - throw Error('Scopes or url must be provided'); + else { + return true; } - return cacheKey; } /** - * Get a non-expired access token, after refreshing if necessary. + * Returns whether the token will expire within eagerRefreshThresholdMillis * - * @param url The URI being authorized. - * @param additionalClaims An object with a set of additional claims to - * include in the payload. - * @returns An object that includes the authorization header. + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. */ - getRequestHeaders(url, additionalClaims, scopes) { - // Return cached authorization headers, unless we are within - // eagerRefreshThresholdMillis ms of them expiring: - const key = this.getCachedKey(url, scopes); - const cachedToken = this.cache.get(key); - const now = Date.now(); - if (cachedToken && - cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { - return cachedToken.headers; - } - const iat = Math.floor(Date.now() / 1000); - const exp = JWTAccess.getExpirationTime(iat); - let defaultClaims; - // Turn scopes into space-separated string - if (Array.isArray(scopes)) { - scopes = scopes.join(' '); - } - // If scopes are specified, sign with scopes - if (scopes) { - defaultClaims = { - iss: this.email, - sub: this.email, - scope: scopes, - exp, - iat, - }; + isTokenExpiring() { + var _a; + const now = new Date().getTime(); + const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; + if (this.rawToken && this.expiresAt) { + return this.expiresAt <= now + eagerRefreshThresholdMillis; } else { - defaultClaims = { - iss: this.email, - sub: this.email, - aud: url, - exp, - iat, - }; + return true; } - // if additionalClaims are provided, ensure they do not collide with - // other required claims. - if (additionalClaims) { - for (const claim in defaultClaims) { - if (additionalClaims[claim]) { - throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); - } - } + } + getToken(callback, opts = {}) { + if (typeof callback === 'object') { + opts = callback; + callback = undefined; } - const header = this.keyId - ? { ...DEFAULT_HEADER, kid: this.keyId } - : DEFAULT_HEADER; - const payload = Object.assign(defaultClaims, additionalClaims); - // Sign the jwt and add it to the cache - const signedJWT = jws.sign({ header, payload, secret: this.key }); - const headers = { Authorization: `Bearer ${signedJWT}` }; - this.cache.set(key, { - expiration: exp * 1000, - headers, - }); - return headers; + opts = Object.assign({ + forceRefresh: false, + }, opts); + if (callback) { + const cb = callback; + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts); } /** - * Returns an expiration time for the JWT token. - * - * @param iat The issued at time for the JWT. - * @returns An expiration time for the JWT. + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties */ - static getExpirationTime(iat) { - const exp = iat + 3600; // 3600 seconds = 1 hour - return exp; + async getCredentials(keyFile) { + const ext = path.extname(keyFile); + switch (ext) { + case '.json': { + const key = await readFile(keyFile, 'utf8'); + const body = JSON.parse(key); + const privateKey = body.private_key; + const clientEmail = body.client_email; + if (!privateKey || !clientEmail) { + throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); + } + return { privateKey, clientEmail }; + } + case '.der': + case '.crt': + case '.pem': { + const privateKey = await readFile(keyFile, 'utf8'); + return { privateKey }; + } + case '.p12': + case '.pfx': { + throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' + + 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + default: + throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + + 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE'); + } } - /** - * Create a JWTAccess credentials instance using the given input options. - * @param json The input object. - */ - fromJSON(json) { - if (!json) { - throw new Error('Must pass in a JSON object containing the service account auth settings.'); + revokeToken(callback) { + if (callback) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback); + return; } - if (!json.client_email) { - throw new Error('The incoming JSON object does not contain a client_email field'); + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this); + } +} +exports.GoogleToken = GoogleToken; +_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) { + if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f") && !opts.forceRefresh) { + return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f"); + } + try { + return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsyncInner).call(this, opts), "f")); + } + finally { + __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, "f"); + } +}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) { + if (this.isTokenExpiring() === false && opts.forceRefresh === false) { + return Promise.resolve(this.rawToken); + } + if (!this.key && !this.keyFile) { + throw new Error('No key or keyFile set.'); + } + if (!this.key && this.keyFile) { + const creds = await this.getCredentials(this.keyFile); + this.key = creds.privateKey; + this.iss = creds.clientEmail || this.iss; + if (!creds.clientEmail) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_ensureEmail).call(this); } - if (!json.private_key) { - throw new Error('The incoming JSON object does not contain a private_key field'); + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_requestToken).call(this); +}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() { + if (!this.iss) { + throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); + } +}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() { + if (!this.accessToken) { + throw new Error('No token to revoke.'); + } + const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; + await this.transporter.request({ + url, + retry: true, + }); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, { + email: this.iss, + sub: this.sub, + key: this.key, + keyFile: this.keyFile, + scope: this.scope, + additionalClaims: this.additionalClaims, + }); +}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) { + this.keyFile = options.keyFile; + this.key = options.key; + this.rawToken = undefined; + this.iss = options.email || options.iss; + this.sub = options.sub; + this.additionalClaims = options.additionalClaims; + if (typeof options.scope === 'object') { + this.scope = options.scope.join(' '); + } + else { + this.scope = options.scope; + } + this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; + if (options.transporter) { + this.transporter = options.transporter; + } +}, _GoogleToken_requestToken = +/** + * Request the token from Google. + */ +async function _GoogleToken_requestToken() { + var _a, _b; + const iat = Math.floor(new Date().getTime() / 1000); + const additionalClaims = this.additionalClaims || {}; + const payload = Object.assign({ + iss: this.iss, + scope: this.scope, + aud: GOOGLE_TOKEN_URL, + exp: iat + 3600, + iat, + sub: this.sub, + }, additionalClaims); + const signedJWT = jws.sign({ + header: { alg: 'RS256' }, + payload, + secret: this.key, + }); + try { + const r = await this.transporter.request({ + method: 'POST', + url: GOOGLE_TOKEN_URL, + data: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: signedJWT, + }, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + responseType: 'json', + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + this.rawToken = r.data; + this.expiresAt = + r.data.expires_in === null || r.data.expires_in === undefined + ? undefined + : (iat + r.data.expires_in) * 1000; + return this.rawToken; + } + catch (e) { + this.rawToken = undefined; + this.tokenExpires = undefined; + const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) + ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data + : {}; + if (body.error) { + const desc = body.error_description + ? `: ${body.error_description}` + : ''; + e.message = `${body.error}${desc}`; } - // Extract the relevant information from the json key file. - this.email = json.client_email; - this.key = json.private_key; - this.keyId = json.private_key_id; - this.projectId = json.project_id; + throw e; } - fromStream(inputStream, callback) { - if (callback) { - this.fromStreamAsync(inputStream).then(() => callback(), callback); +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 1621: +/***/ ((module) => { + +"use strict"; + + +module.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +}; + + +/***/ }), + +/***/ 7492: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const net_1 = __importDefault(__nccwpck_require__(1808)); +const tls_1 = __importDefault(__nccwpck_require__(4404)); +const url_1 = __importDefault(__nccwpck_require__(7310)); +const debug_1 = __importDefault(__nccwpck_require__(8237)); +const once_1 = __importDefault(__nccwpck_require__(1040)); +const agent_base_1 = __nccwpck_require__(9690); +const debug = (0, debug_1.default)('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); } else { - return this.fromStreamAsync(inputStream); + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; } - fromStreamAsync(inputStream) { - return new Promise((resolve, reject) => { - if (!inputStream) { - reject(new Error('Must pass in a stream containing the service account auth settings.')); + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; } - let s = ''; - inputStream - .setEncoding('utf8') - .on('data', chunk => (s += chunk)) - .on('error', reject) - .on('end', () => { - try { - const data = JSON.parse(s); - this.fromJSON(data); - resolve(); + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + parsed.port = ''; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); } - catch (err) { - reject(err); + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); } - }); + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield (0, once_1.default)(socket, 'connect'); + return socket; }); } } -exports.JWTAccess = JWTAccess; -//# sourceMappingURL=jwtaccess.js.map +exports["default"] = HttpProxyAgent; +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 3764: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(7492)); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 3959: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5098: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -// Copyright 2013 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JWT = void 0; -const gtoken_1 = __nccwpck_require__(6031); -const jwtaccess_1 = __nccwpck_require__(8740); -const oauth2client_1 = __nccwpck_require__(3936); -class JWT extends oauth2client_1.OAuth2Client { - constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { - const opts = optionsOrEmail && typeof optionsOrEmail === 'object' - ? optionsOrEmail - : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; - super({ - eagerRefreshThresholdMillis: opts.eagerRefreshThresholdMillis, - forceRefreshOnFailure: opts.forceRefreshOnFailure, - }); - this.email = opts.email; - this.keyFile = opts.keyFile; - this.key = opts.key; - this.keyId = opts.keyId; - this.scopes = opts.scopes; - this.subject = opts.subject; - this.additionalClaims = opts.additionalClaims; - this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; - } - /** - * Creates a copy of the credential with the specified scopes. - * @param scopes List of requested scopes or a single scope. - * @return The cloned instance. - */ - createScoped(scopes) { - return new JWT({ - email: this.email, - keyFile: this.keyFile, - key: this.key, - keyId: this.keyId, - scopes, - subject: this.subject, - additionalClaims: this.additionalClaims, - }); - } - /** - * Obtains the metadata to be sent with the request. - * - * @param url the URI being authorized. - */ - async getRequestMetadataAsync(url) { - url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; - const useSelfSignedJWT = (!this.hasUserScopes() && url) || - (this.useJWTAccessWithScope && this.hasAnyScopes()); - if (!this.apiKey && useSelfSignedJWT) { - if (this.additionalClaims && - this.additionalClaims.target_audience) { - const { tokens } = await this.refreshToken(); - return { - headers: this.addSharedMetadataHeaders({ - Authorization: `Bearer ${tokens.id_token}`, - }), - }; - } - else { - // no scopes have been set, but a uri has been provided. Use JWTAccess - // credentials. - if (!this.access) { - this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); - } - let scopes; - if (this.hasUserScopes()) { - scopes = this.scopes; - } - else if (!url) { - scopes = this.defaultScopes; - } - const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, - // Scopes take precedent over audience for signing, - // so we only provide them if useJWTAccessWithScope is on - this.useJWTAccessWithScope ? scopes : undefined); - return { headers: this.addSharedMetadataHeaders(headers) }; - } - } - else if (this.hasAnyScopes() || this.apiKey) { - return super.getRequestMetadataAsync(url); +const net_1 = __importDefault(__nccwpck_require__(1808)); +const tls_1 = __importDefault(__nccwpck_require__(4404)); +const url_1 = __importDefault(__nccwpck_require__(7310)); +const assert_1 = __importDefault(__nccwpck_require__(9491)); +const debug_1 = __importDefault(__nccwpck_require__(8237)); +const agent_base_1 = __nccwpck_require__(9690); +const parse_proxy_response_1 = __importDefault(__nccwpck_require__(595)); +const debug = debug_1.default('https-proxy-agent:agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); } else { - // If no audience, apiKey, or scopes are provided, we should not attempt - // to populate any headers: - return { headers: {} }; + opts = _opts; } - } - /** - * Fetches an ID token. - * @param targetAudience the audience for the fetched ID token. - */ - async fetchIdToken(targetAudience) { - // Create a new gToken for fetching an ID token - const gtoken = new gtoken_1.GoogleToken({ - iss: this.email, - sub: this.subject, - scope: this.scopes || this.defaultScopes, - keyFile: this.keyFile, - key: this.key, - additionalClaims: { target_audience: targetAudience }, - transporter: this.transporter, - }); - await gtoken.getToken({ - forceRefresh: true, - }); - if (!gtoken.idToken) { - throw new Error('Unknown error: Failed to fetch ID token'); + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); } - return gtoken.idToken; - } - /** - * Determine if there are currently scopes available. - */ - hasUserScopes() { - if (!this.scopes) { - return false; + debug('creating new HttpsProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); } - return this.scopes.length > 0; - } - /** - * Are there any default or user scopes defined. - */ - hasAnyScopes() { - if (this.scopes && this.scopes.length > 0) - return true; - if (this.defaultScopes && this.defaultScopes.length > 0) - return true; - return false; - } - authorize(callback) { - if (callback) { - this.authorizeAsync().then(r => callback(null, r), callback); + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; } - else { - return this.authorizeAsync(); + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; } - } - async authorizeAsync() { - const result = await this.refreshToken(); - if (!result) { - throw new Error('No result returned'); + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; } - this.credentials = result.tokens; - this.credentials.refresh_token = 'jwt-placeholder'; - this.key = this.gtoken.key; - this.email = this.gtoken.iss; - return result.tokens; + this.proxy = proxy; } /** - * Refreshes the access token. - * @param refreshToken ignored - * @private + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected */ - async refreshTokenNoCache( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - refreshToken) { - const gtoken = this.createGToken(); - const token = await gtoken.getToken({ - forceRefresh: this.isTokenExpiring(), + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + const headers = Object.assign({}, proxy.headers); + const hostname = `${opts.host}:${opts.port}`; + let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; + } + // The `Host` header should only include the port + // number when it is not the default port. + let { host, port, secureEndpoint } = opts; + if (!isDefaultPort(port, secureEndpoint)) { + host += `:${port}`; + } + headers.Host = host; + headers.Connection = 'close'; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = parse_proxy_response_1.default(socket); + socket.write(`${payload}\r\n`); + const { statusCode, buffered } = yield proxyResponsePromise; + if (statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername })); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net_1.default.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('replaying proxy buffer for failed request'); + assert_1.default(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; }); - const tokens = { - access_token: token.access_token, - token_type: 'Bearer', - expiry_date: gtoken.expiresAt, - id_token: gtoken.idToken, - }; - this.emit('tokens', tokens); - return { res: null, tokens }; } - /** - * Create a gToken if it doesn't already exist. - */ - createGToken() { - if (!this.gtoken) { - this.gtoken = new gtoken_1.GoogleToken({ - iss: this.email, - sub: this.subject, - scope: this.scopes || this.defaultScopes, - keyFile: this.keyFile, - key: this.key, - additionalClaims: this.additionalClaims, - transporter: this.transporter, - }); +} +exports["default"] = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function isDefaultPort(port, secure) { + return Boolean((!secure && port === 80) || (secure && port === 443)); +} +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; } - return this.gtoken; } - /** - * Create a JWT credentials instance using the given input options. - * @param json The input object. - */ - fromJSON(json) { - if (!json) { - throw new Error('Must pass in a JSON object containing the service account auth settings.'); + return ret; +} +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 7219: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(5098)); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 595: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const debug_1 = __importDefault(__nccwpck_require__(8237)); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); } - if (!json.client_email) { - throw new Error('The incoming JSON object does not contain a client_email field'); + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); } - if (!json.private_key) { - throw new Error('The incoming JSON object does not contain a private_key field'); + function onclose(err) { + debug('onclose had error %o', err); } - // Extract the relevant information from the json key file. - this.email = json.client_email; - this.key = json.private_key; - this.keyId = json.private_key_id; - this.projectId = json.project_id; - this.quotaProjectId = json.quota_project_id; - } - fromStream(inputStream, callback) { - if (callback) { - this.fromStreamAsync(inputStream).then(() => callback(), callback); + function onend() { + debug('onend'); } - else { - return this.fromStreamAsync(inputStream); + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); } - } - fromStreamAsync(inputStream) { - return new Promise((resolve, reject) => { - if (!inputStream) { - throw new Error('Must pass in a stream containing the service account auth settings.'); + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; } - let s = ''; - inputStream - .setEncoding('utf8') - .on('error', reject) - .on('data', chunk => (s += chunk)) - .on('end', () => { - try { - const data = JSON.parse(s); - this.fromJSON(data); - resolve(); - } - catch (e) { - reject(e); - } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered }); - }); - } - /** - * Creates a JWT credentials instance using an API Key for authentication. - * @param apiKey The API Key in string form. - */ - fromAPIKey(apiKey) { - if (typeof apiKey !== 'string') { - throw new Error('Must provide an API Key string.'); - } - this.apiKey = apiKey; - } - /** - * Using the key or keyFile on the JWT client, obtain an object that contains - * the key and the client email. - */ - async getCredentials() { - if (this.key) { - return { private_key: this.key, client_email: this.email }; - } - else if (this.keyFile) { - const gtoken = this.createGToken(); - const creds = await gtoken.getCredentials(this.keyFile); - return { private_key: creds.privateKey, client_email: creds.clientEmail }; } - throw new Error('A key or a keyFile must be provided to getCredentials.'); - } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); + }); +} +exports["default"] = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map + +/***/ }), + +/***/ 4124: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +try { + var util = __nccwpck_require__(3837); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(8544); } -exports.JWT = JWT; -//# sourceMappingURL=jwtclient.js.map -/***/ }), -/***/ 4524: -/***/ ((__unused_webpack_module, exports) => { +/***/ }), -"use strict"; +/***/ 8544: +/***/ ((module) => { -// Copyright 2014 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.LoginTicket = void 0; -class LoginTicket { - /** - * Create a simple class to extract user ID from an ID Token - * - * @param {string} env Envelope of the jwt - * @param {TokenPayload} pay Payload of the jwt - * @constructor - */ - constructor(env, pay) { - this.envelope = env; - this.payload = pay; - } - getEnvelope() { - return this.envelope; - } - getPayload() { - return this.payload; - } - /** - * Create a simple class to extract user ID from an ID Token - * - * @return The user ID - */ - getUserId() { - const payload = this.getPayload(); - if (payload && payload.sub) { - return payload.sub; +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true } - return null; + }) } - /** - * Returns attributes from the login ticket. This can contain - * various information about the user session. - * - * @return The envelope and payload - */ - getAttributes() { - return { envelope: this.getEnvelope(), payload: this.getPayload() }; + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor } + } } -exports.LoginTicket = LoginTicket; -//# sourceMappingURL=loginticket.js.map + /***/ }), -/***/ 3936: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 1554: +/***/ ((module) => { "use strict"; -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const querystring = __nccwpck_require__(3477); -const stream = __nccwpck_require__(2781); -const formatEcdsa = __nccwpck_require__(1728); -const crypto_1 = __nccwpck_require__(8043); -const authclient_1 = __nccwpck_require__(4627); -const loginticket_1 = __nccwpck_require__(4524); -var CodeChallengeMethod; -(function (CodeChallengeMethod) { - CodeChallengeMethod["Plain"] = "plain"; - CodeChallengeMethod["S256"] = "S256"; -})(CodeChallengeMethod = exports.CodeChallengeMethod || (exports.CodeChallengeMethod = {})); -var CertificateFormat; -(function (CertificateFormat) { - CertificateFormat["PEM"] = "PEM"; - CertificateFormat["JWK"] = "JWK"; -})(CertificateFormat = exports.CertificateFormat || (exports.CertificateFormat = {})); -class OAuth2Client extends authclient_1.AuthClient { - constructor(optionsOrClientId, clientSecret, redirectUri) { - super(); - this.certificateCache = {}; - this.certificateExpiry = null; - this.certificateCacheFormat = CertificateFormat.PEM; - this.refreshTokenPromises = new Map(); - const opts = optionsOrClientId && typeof optionsOrClientId === 'object' - ? optionsOrClientId - : { clientId: optionsOrClientId, clientSecret, redirectUri }; - this._clientId = opts.clientId; - this._clientSecret = opts.clientSecret; - this.redirectUri = opts.redirectUri; - this.eagerRefreshThresholdMillis = - opts.eagerRefreshThresholdMillis || 5 * 60 * 1000; - this.forceRefreshOnFailure = !!opts.forceRefreshOnFailure; - } - /** - * Generates URL for consent page landing. - * @param opts Options. - * @return URL to consent page. - */ - generateAuthUrl(opts = {}) { - if (opts.code_challenge_method && !opts.code_challenge) { - throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); - } - opts.response_type = opts.response_type || 'code'; - opts.client_id = opts.client_id || this._clientId; - opts.redirect_uri = opts.redirect_uri || this.redirectUri; - // Allow scopes to be passed either as array or a string - if (Array.isArray(opts.scope)) { - opts.scope = opts.scope.join(' '); - } - const rootUrl = OAuth2Client.GOOGLE_OAUTH2_AUTH_BASE_URL_; - return (rootUrl + - '?' + - querystring.stringify(opts)); - } - generateCodeVerifier() { - // To make the code compatible with browser SubtleCrypto we need to make - // this method async. - throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); - } - /** - * Convenience method to automatically generate a code_verifier, and its - * resulting SHA256. If used, this must be paired with a S256 - * code_challenge_method. - * - * For a full example see: - * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js - */ - async generateCodeVerifierAsync() { - // base64 encoding uses 6 bits per character, and we want to generate128 - // characters. 6*128/8 = 96. - const crypto = (0, crypto_1.createCrypto)(); - const randomString = crypto.randomBytesBase64(96); - // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ - // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just - // swapping out a few chars. - const codeVerifier = randomString - .replace(/\+/g, '~') - .replace(/=/g, '_') - .replace(/\//g, '-'); - // Generate the base64 encoded SHA256 - const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); - // We need to use base64UrlEncoding instead of standard base64 - const codeChallenge = unencodedCodeChallenge - .split('=')[0] - .replace(/\+/g, '-') - .replace(/\//g, '_'); - return { codeVerifier, codeChallenge }; + +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; + +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; + +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; + +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); + +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; + +module.exports = isStream; + + +/***/ }), + +/***/ 5031: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var json_stringify = (__nccwpck_require__(8574).stringify); +var json_parse = __nccwpck_require__(9099); + +module.exports = function(options) { + return { + parse: json_parse(options), + stringify: json_stringify } - getToken(codeOrOptions, callback) { - const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; - if (callback) { - this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); - } - else { - return this.getTokenAsync(options); - } +}; +//create the default method members with no options applied for backwards compatibility +module.exports.parse = json_parse(); +module.exports.stringify = json_stringify; + + +/***/ }), + +/***/ 9099: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = null; + +// regexpxs extracted from +// (c) BSD-3-Clause +// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors + +const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; +const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; + +/* + json_parse.js + 2012-06-20 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + This file creates a json_parse function. + During create you can (optionally) specify some behavioural switches + + require('json-bigint')(options) + + The optional options parameter holds switches that drive certain + aspects of the parsing process: + * options.strict = true will warn about duplicate-key usage in the json. + The default (strict = false) will silently ignore those and overwrite + values for keys that are in duplicate use. + + The resulting function follows this signature: + json_parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = json_parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + This is a reference implementation. You are free to copy, modify, or + redistribute. + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. +*/ + +/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, + hasOwnProperty, message, n, name, prototype, push, r, t, text +*/ + +var json_parse = function (options) { + 'use strict'; + + // This is a function that can parse a JSON text, producing a JavaScript + // data structure. It is a simple, recursive descent parser. It does not use + // eval or regular expressions, so it can be used as a model for implementing + // a JSON parser in other languages. + + // We are defining the function inside of another function to avoid creating + // global variables. + + // Default options one can override by passing options to the parse() + var _options = { + strict: false, // not being strict means do not generate syntax errors for "duplicate key" + storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string + alwaysParseAsBig: false, // toggles whether all numbers should be Big + useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js + protoAction: 'error', + constructorAction: 'error', + }; + + // If there are options, then use them to override the default _options + if (options !== undefined && options !== null) { + if (options.strict === true) { + _options.strict = true; } - async getTokenAsync(options) { - const url = OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_; - const values = { - code: options.code, - client_id: options.client_id || this._clientId, - client_secret: this._clientSecret, - redirect_uri: options.redirect_uri || this.redirectUri, - grant_type: 'authorization_code', - code_verifier: options.codeVerifier, - }; - const res = await this.transporter.request({ - method: 'POST', - url, - data: querystring.stringify(values), - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - }); - const tokens = res.data; - if (res.data && res.data.expires_in) { - tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; - delete tokens.expires_in; - } - this.emit('tokens', tokens); - return { tokens, res }; + if (options.storeAsString === true) { + _options.storeAsString = true; } - /** - * Refreshes the access token. - * @param refresh_token Existing refresh token. - * @private - */ - async refreshToken(refreshToken) { - if (!refreshToken) { - return this.refreshTokenNoCache(refreshToken); - } - // If a request to refresh using the same token has started, - // return the same promise. - if (this.refreshTokenPromises.has(refreshToken)) { - return this.refreshTokenPromises.get(refreshToken); - } - const p = this.refreshTokenNoCache(refreshToken).then(r => { - this.refreshTokenPromises.delete(refreshToken); - return r; - }, e => { - this.refreshTokenPromises.delete(refreshToken); - throw e; - }); - this.refreshTokenPromises.set(refreshToken, p); - return p; + _options.alwaysParseAsBig = + options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; + _options.useNativeBigInt = + options.useNativeBigInt === true ? options.useNativeBigInt : false; + + if (typeof options.constructorAction !== 'undefined') { + if ( + options.constructorAction === 'error' || + options.constructorAction === 'ignore' || + options.constructorAction === 'preserve' + ) { + _options.constructorAction = options.constructorAction; + } else { + throw new Error( + `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` + ); + } } - async refreshTokenNoCache(refreshToken) { - var _a; - if (!refreshToken) { - throw new Error('No refresh token is set.'); - } - const url = OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_; - const data = { - refresh_token: refreshToken, - client_id: this._clientId, - client_secret: this._clientSecret, - grant_type: 'refresh_token', - }; - let res; - try { - // request for new token - res = await this.transporter.request({ - method: 'POST', - url, - data: querystring.stringify(data), - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - }); - } - catch (e) { - if (e instanceof gaxios_1.GaxiosError && - e.message === 'invalid_grant' && - ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && - /ReAuth/i.test(e.response.data.error_description)) { - e.message = JSON.stringify(e.response.data); - } - throw e; - } - const tokens = res.data; - // TODO: de-duplicate this code from a few spots - if (res.data && res.data.expires_in) { - tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; - delete tokens.expires_in; - } - this.emit('tokens', tokens); - return { tokens, res }; + + if (typeof options.protoAction !== 'undefined') { + if ( + options.protoAction === 'error' || + options.protoAction === 'ignore' || + options.protoAction === 'preserve' + ) { + _options.protoAction = options.protoAction; + } else { + throw new Error( + `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` + ); + } } - refreshAccessToken(callback) { - if (callback) { - this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); - } - else { - return this.refreshAccessTokenAsync(); + } + + var at, // The index of the current character + ch, // The current character + escapee = { + '"': '"', + '\\': '\\', + '/': '/', + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + }, + text, + error = function (m) { + // Call error when something is wrong. + + throw { + name: 'SyntaxError', + message: m, + at: at, + text: text, + }; + }, + next = function (c) { + // If a c parameter is provided, verify that it matches the current character. + + if (c && c !== ch) { + error("Expected '" + c + "' instead of '" + ch + "'"); + } + + // Get the next character. When there are no more characters, + // return the empty string. + + ch = text.charAt(at); + at += 1; + return ch; + }, + number = function () { + // Parse a number value. + + var number, + string = ''; + + if (ch === '-') { + string = '-'; + next('-'); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + if (ch === '.') { + string += '.'; + while (next() && ch >= '0' && ch <= '9') { + string += ch; } - } - async refreshAccessTokenAsync() { - const r = await this.refreshToken(this.credentials.refresh_token); - const tokens = r.tokens; - tokens.refresh_token = this.credentials.refresh_token; - this.credentials = tokens; - return { credentials: this.credentials, res: r.res }; - } - getAccessToken(callback) { - if (callback) { - this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + if (ch === 'e' || ch === 'E') { + string += ch; + next(); + if (ch === '-' || ch === '+') { + string += ch; + next(); } - else { - return this.getAccessTokenAsync(); + while (ch >= '0' && ch <= '9') { + string += ch; + next(); } - } - async getAccessTokenAsync() { - const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); - if (shouldRefresh) { - if (!this.credentials.refresh_token) { - if (this.refreshHandler) { - const refreshedAccessToken = await this.processAndValidateRefreshHandler(); - if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { - this.setCredentials(refreshedAccessToken); - return { token: this.credentials.access_token }; - } - } - else { - throw new Error('No refresh token or refresh handler callback is set.'); + } + number = +string; + if (!isFinite(number)) { + error('Bad number'); + } else { + if (BigNumber == null) BigNumber = __nccwpck_require__(7558); + //if (number > 9007199254740992 || number < -9007199254740992) + // Bignumber has stricter check: everything with length > 15 digits disallowed + if (string.length > 15) + return _options.storeAsString + ? string + : _options.useNativeBigInt + ? BigInt(string) + : new BigNumber(string); + else + return !_options.alwaysParseAsBig + ? number + : _options.useNativeBigInt + ? BigInt(number) + : new BigNumber(number); + } + }, + string = function () { + // Parse a string value. + + var hex, + i, + string = '', + uffff; + + // When parsing for string values, we must look for " and \ characters. + + if (ch === '"') { + var startAt = at; + while (next()) { + if (ch === '"') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + return string; + } + if (ch === '\\') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + if (ch === 'u') { + uffff = 0; + for (i = 0; i < 4; i += 1) { + hex = parseInt(next(), 16); + if (!isFinite(hex)) { + break; } + uffff = uffff * 16 + hex; + } + string += String.fromCharCode(uffff); + } else if (typeof escapee[ch] === 'string') { + string += escapee[ch]; + } else { + break; } - const r = await this.refreshAccessTokenAsync(); - if (!r.credentials || (r.credentials && !r.credentials.access_token)) { - throw new Error('Could not refresh access token.'); - } - return { token: r.credentials.access_token, res: r.res }; + startAt = at; + } } - else { - return { token: this.credentials.access_token }; + } + error('Bad string'); + }, + white = function () { + // Skip whitespace. + + while (ch && ch <= ' ') { + next(); + } + }, + word = function () { + // true, false, or null. + + switch (ch) { + case 't': + next('t'); + next('r'); + next('u'); + next('e'); + return true; + case 'f': + next('f'); + next('a'); + next('l'); + next('s'); + next('e'); + return false; + case 'n': + next('n'); + next('u'); + next('l'); + next('l'); + return null; + } + error("Unexpected '" + ch + "'"); + }, + value, // Place holder for the value function. + array = function () { + // Parse an array value. + + var array = []; + + if (ch === '[') { + next('['); + white(); + if (ch === ']') { + next(']'); + return array; // empty array } - } - /** - * The main authentication interface. It takes an optional url which when - * present is the endpoint being accessed, and returns a Promise which - * resolves with authorization header fields. - * - * In OAuth2Client, the result has the form: - * { Authorization: 'Bearer ' } - * @param url The optional url being authorized - */ - async getRequestHeaders(url) { - const headers = (await this.getRequestMetadataAsync(url)).headers; - return headers; - } - async getRequestMetadataAsync( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - url) { - const thisCreds = this.credentials; - if (!thisCreds.access_token && - !thisCreds.refresh_token && - !this.apiKey && - !this.refreshHandler) { - throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + while (ch) { + array.push(value()); + white(); + if (ch === ']') { + next(']'); + return array; + } + next(','); + white(); } - if (thisCreds.access_token && !this.isTokenExpiring()) { - thisCreds.token_type = thisCreds.token_type || 'Bearer'; - const headers = { - Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, - }; - return { headers: this.addSharedMetadataHeaders(headers) }; + } + error('Bad array'); + }, + object = function () { + // Parse an object value. + + var key, + object = Object.create(null); + + if (ch === '{') { + next('{'); + white(); + if (ch === '}') { + next('}'); + return object; // empty object } - // If refreshHandler exists, call processAndValidateRefreshHandler(). - if (this.refreshHandler) { - const refreshedAccessToken = await this.processAndValidateRefreshHandler(); - if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { - this.setCredentials(refreshedAccessToken); - const headers = { - Authorization: 'Bearer ' + this.credentials.access_token, - }; - return { headers: this.addSharedMetadataHeaders(headers) }; + while (ch) { + key = string(); + white(); + next(':'); + if ( + _options.strict === true && + Object.hasOwnProperty.call(object, key) + ) { + error('Duplicate key "' + key + '"'); + } + + if (suspectProtoRx.test(key) === true) { + if (_options.protoAction === 'error') { + error('Object contains forbidden prototype property'); + } else if (_options.protoAction === 'ignore') { + value(); + } else { + object[key] = value(); } - } - if (this.apiKey) { - return { headers: { 'X-Goog-Api-Key': this.apiKey } }; - } - let r = null; - let tokens = null; - try { - r = await this.refreshToken(thisCreds.refresh_token); - tokens = r.tokens; - } - catch (err) { - const e = err; - if (e.response && - (e.response.status === 403 || e.response.status === 404)) { - e.message = `Could not refresh access token: ${e.message}`; + } else if (suspectConstructorRx.test(key) === true) { + if (_options.constructorAction === 'error') { + error('Object contains forbidden constructor property'); + } else if (_options.constructorAction === 'ignore') { + value(); + } else { + object[key] = value(); } - throw e; + } else { + object[key] = value(); + } + + white(); + if (ch === '}') { + next('}'); + return object; + } + next(','); + white(); } - const credentials = this.credentials; - credentials.token_type = credentials.token_type || 'Bearer'; - tokens.refresh_token = credentials.refresh_token; - this.credentials = tokens; - const headers = { - Authorization: credentials.token_type + ' ' + tokens.access_token, - }; - return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; - } - /** - * Generates an URL to revoke the given token. - * @param token The existing token to be revoked. - */ - static getRevokeTokenUrl(token) { - const parameters = querystring.stringify({ token }); - return `${OAuth2Client.GOOGLE_OAUTH2_REVOKE_URL_}?${parameters}`; + } + error('Bad object'); + }; + + value = function () { + // Parse a JSON value. It could be an object, an array, a string, a number, + // or a word. + + white(); + switch (ch) { + case '{': + return object(); + case '[': + return array(); + case '"': + return string(); + case '-': + return number(); + default: + return ch >= '0' && ch <= '9' ? number() : word(); } - revokeToken(token, callback) { - const opts = { - url: OAuth2Client.getRevokeTokenUrl(token), - method: 'POST', - }; - if (callback) { - this.transporter - .request(opts) - .then(r => callback(null, r), callback); - } - else { - return this.transporter.request(opts); - } + }; + + // Return the json_parse function. It will have access to all of the above + // functions and variables. + + return function (source, reviver) { + var result; + + text = source + ''; + at = 0; + ch = ' '; + result = value(); + white(); + if (ch) { + error('Syntax error'); } - revokeCredentials(callback) { - if (callback) { - this.revokeCredentialsAsync().then(res => callback(null, res), callback); - } - else { - return this.revokeCredentialsAsync(); - } + + // If there is a reviver function, we recursively walk the new structure, + // passing each name/value pair to the reviver function for possible + // transformation, starting with a temporary root object that holds the result + // in an empty key. If there is not a reviver function, we simply return the + // result. + + return typeof reviver === 'function' + ? (function walk(holder, key) { + var k, + v, + value = holder[key]; + if (value && typeof value === 'object') { + Object.keys(value).forEach(function (k) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + }); + } + return reviver.call(holder, key, value); + })({ '': result }, '') + : result; + }; +}; + +module.exports = json_parse; + + +/***/ }), + +/***/ 8574: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = __nccwpck_require__(7558); + +/* + json2.js + 2013-05-26 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. + + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, regexp: true */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +var JSON = module.exports; + +(function () { + 'use strict'; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; } - async revokeCredentialsAsync() { - const token = this.credentials.access_token; - this.credentials = {}; - if (token) { - return this.revokeToken(token); - } - else { - throw new Error('No access token to revoke.'); - } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' + ? c + : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; } - request(opts, callback) { - if (callback) { - this.requestAsync(opts).then(r => callback(null, r), e => { - return callback(e, e.response); - }); + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key], + isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); } - else { - return this.requestAsync(opts); + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); } - } - async requestAsync(opts, retry = false) { - let r2; - try { - const r = await this.getRequestMetadataAsync(opts.url); - opts.headers = opts.headers || {}; - if (r.headers && r.headers['x-goog-user-project']) { - opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; - } - if (r.headers && r.headers.Authorization) { - opts.headers.Authorization = r.headers.Authorization; + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + if (isBigNumber) { + return value; + } else { + return quote(value); } - if (this.apiKey) { - opts.headers['X-Goog-Api-Key'] = this.apiKey; + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + case 'bigint': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; } - r2 = await this.transporter.request(opts); - } - catch (e) { - const res = e.response; - if (res) { - const statusCode = res.status; - // Retry the request for metadata if the following criteria are true: - // - We haven't already retried. It only makes sense to retry once. - // - The response was a 401 or a 403 - // - The request didn't send a readableStream - // - An access_token and refresh_token were available, but either no - // expiry_date was available or the forceRefreshOnFailure flag is set. - // The absent expiry_date case can happen when developers stash the - // access_token and refresh_token for later use, but the access_token - // fails on the first try because it's expired. Some developers may - // choose to enable forceRefreshOnFailure to mitigate time-related - // errors. - // Or the following criteria are true: - // - We haven't already retried. It only makes sense to retry once. - // - The response was a 401 or a 403 - // - The request didn't send a readableStream - // - No refresh_token was available - // - An access_token and a refreshHandler callback were available, but - // either no expiry_date was available or the forceRefreshOnFailure - // flag is set. The access_token fails on the first try because it's - // expired. Some developers may choose to enable forceRefreshOnFailure - // to mitigate time-related errors. - const mayRequireRefresh = this.credentials && - this.credentials.access_token && - this.credentials.refresh_token && - (!this.credentials.expiry_date || this.forceRefreshOnFailure); - const mayRequireRefreshWithNoRefreshToken = this.credentials && - this.credentials.access_token && - !this.credentials.refresh_token && - (!this.credentials.expiry_date || this.forceRefreshOnFailure) && - this.refreshHandler; - const isReadableStream = res.config.data instanceof stream.Readable; - const isAuthErr = statusCode === 401 || statusCode === 403; - if (!retry && isAuthErr && !isReadableStream && mayRequireRefresh) { - await this.refreshAccessTokenAsync(); - return this.requestAsync(opts, true); + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; } - else if (!retry && - isAuthErr && - !isReadableStream && - mayRequireRefreshWithNoRefreshToken) { - const refreshedAccessToken = await this.processAndValidateRefreshHandler(); - if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { - this.setCredentials(refreshedAccessToken); + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 + ? '[]' + : gap + ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' + : '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + if (typeof rep[i] === 'string') { + k = rep[i]; + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } } - return this.requestAsync(opts, true); } + } else { + +// Otherwise, iterate through all of the keys in the object. + + Object.keys(value).forEach(function(k) { + var v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + }); } - throw e; - } - return r2; - } - verifyIdToken(options, callback) { - // This function used to accept two arguments instead of an options object. - // Check the types to help users upgrade with less pain. - // This check can be removed after a 2.0 release. - if (callback && typeof callback !== 'function') { - throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); - } - if (callback) { - this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); - } - else { - return this.verifyIdTokenAsync(options); - } - } - async verifyIdTokenAsync(options) { - if (!options.idToken) { - throw new Error('The verifyIdToken method requires an ID Token'); - } - const response = await this.getFederatedSignonCertsAsync(); - const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, OAuth2Client.ISSUERS_, options.maxExpiry); - return login; - } - /** - * Obtains information about the provisioned access token. Especially useful - * if you want to check the scopes that were provisioned to a given token. - * - * @param accessToken Required. The Access Token for which you want to get - * user info. - */ - async getTokenInfo(accessToken) { - const { data } = await this.transporter.request({ - method: 'POST', - headers: { - 'Content-Type': 'application/x-www-form-urlencoded', - Authorization: `Bearer ${accessToken}`, - }, - url: OAuth2Client.GOOGLE_TOKEN_INFO_URL, - }); - const info = Object.assign({ - expiry_date: new Date().getTime() + data.expires_in * 1000, - scopes: data.scope.split(' '), - }, data); - delete info.expires_in; - delete info.scope; - return info; - } - getFederatedSignonCerts(callback) { - if (callback) { - this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); - } - else { - return this.getFederatedSignonCertsAsync(); + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 + ? '{}' + : gap + ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' + : '{' + partial.join(',') + '}'; + gap = mind; + return v; } } - async getFederatedSignonCertsAsync() { - const nowTime = new Date().getTime(); - const format = (0, crypto_1.hasBrowserCrypto)() - ? CertificateFormat.JWK - : CertificateFormat.PEM; - if (this.certificateExpiry && - nowTime < this.certificateExpiry.getTime() && - this.certificateCacheFormat === format) { - return { certs: this.certificateCache, format }; - } - let res; - let url; - switch (format) { - case CertificateFormat.PEM: - url = OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_PEM_CERTS_URL_; - break; - case CertificateFormat.JWK: - url = OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_JWK_CERTS_URL_; - break; - default: - throw new Error(`Unsupported certificate format ${format}`); - } - try { - res = await this.transporter.request({ url }); - } - catch (e) { - if (e instanceof Error) { - e.message = `Failed to retrieve verification certificates: ${e.message}`; + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; } - throw e; - } - const cacheControl = res ? res.headers['cache-control'] : undefined; - let cacheAge = -1; - if (cacheControl) { - const pattern = new RegExp('max-age=([0-9]*)'); - const regexResult = pattern.exec(cacheControl); - if (regexResult && regexResult.length === 2) { - // Cache results with max-age (in seconds) - cacheAge = Number(regexResult[1]) * 1000; // milliseconds + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); } - } - let certificates = {}; - switch (format) { - case CertificateFormat.PEM: - certificates = res.data; - break; - case CertificateFormat.JWK: - for (const key of res.data.keys) { - certificates[key.kid] = key; - } - break; - default: - throw new Error(`Unsupported certificate format ${format}`); - } - const now = new Date(); - this.certificateExpiry = - cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); - this.certificateCache = certificates; - this.certificateCacheFormat = format; - return { certs: certificates, format, res }; + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } +}()); + + +/***/ }), + +/***/ 6010: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var bufferEqual = __nccwpck_require__(9239); +var Buffer = (__nccwpck_require__(1867).Buffer); +var crypto = __nccwpck_require__(6113); +var formatEcdsa = __nccwpck_require__(1728); +var util = __nccwpck_require__(3837); + +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; + +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} + +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; + +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (typeof key === 'object') { + return; + } + + throw typeError(MSG_INVALID_SIGNER_KEY); +}; + +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return key; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } + + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} + +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function toBase64(base64url) { + base64url = base64url.toString(); + + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } + } + + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} + +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} + +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} + +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} + +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} + +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} + +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} + +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} + +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} + +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} + +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} + +function createNoneSigner() { + return function sign() { + return ''; + } +} + +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} + +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; + + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; + + +/***/ }), + +/***/ 4636: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +/*global exports*/ +var SignStream = __nccwpck_require__(3334); +var VerifyStream = __nccwpck_require__(5522); + +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; + +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); +}; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); +}; + + +/***/ }), + +/***/ 1868: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module, process*/ +var Buffer = (__nccwpck_require__(1867).Buffer); +var Stream = __nccwpck_require__(2781); +var util = __nccwpck_require__(3837); + +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; + + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } + + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } + + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } + + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); + +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); +}; + +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; +}; + +module.exports = DataStream; + + +/***/ }), + +/***/ 3334: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(1867).Buffer); +var DataStream = __nccwpck_require__(1868); +var jwa = __nccwpck_require__(6010); +var Stream = __nccwpck_require__(2781); +var toString = __nccwpck_require__(5292); +var util = __nccwpck_require__(3837); + +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} + +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} + +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); + + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); + +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +SignStream.sign = jwsSign; + +module.exports = SignStream; + + +/***/ }), + +/***/ 5292: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(4300).Buffer); + +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); +}; + + +/***/ }), + +/***/ 5522: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(1867).Buffer); +var DataStream = __nccwpck_require__(1868); +var jwa = __nccwpck_require__(6010); +var Stream = __nccwpck_require__(2781); +var toString = __nccwpck_require__(5292); +var util = __nccwpck_require__(3837); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; +} + +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} + +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} + +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} + +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; +} + +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} + +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} + +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} + +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); + + if (!isValidJws(jwsSig)) + return null; + + var header = headerFromJWS(jwsSig); + + if (!header) + return null; + + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); + + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} + +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); + + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; + +module.exports = VerifyStream; + + +/***/ }), + +/***/ 7426: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2022 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = __nccwpck_require__(3765) + + +/***/ }), + +/***/ 3583: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + + + +/** + * Module dependencies. + * @private + */ + +var db = __nccwpck_require__(7426) +var extname = (__nccwpck_require__(1017).extname) + +/** + * Module variables. + * @private + */ + +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i + +/** + * Module exports. + * @public + */ + +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) + +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) + +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] + + if (mime && mime.charset) { + return mime.charset + } + + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } + + return false +} + +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ + +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } + + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str + + if (!mime) { + return false + } + + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } + + return mime +} + +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] +} + +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ + +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } + + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} + +/** + * Populate the extensions and types maps. + * @private + */ + +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] + + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions + + if (!exts || !exts.length) { + return } - getIapPublicKeys(callback) { - if (callback) { - this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); - } - else { - return this.getIapPublicKeysAsync(); + + // mime -> extensions + extensions[type] = exts + + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] + + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) + + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue } + } + + // set the extension -> mime + types[extension] = type } - async getIapPublicKeysAsync() { - let res; - const url = OAuth2Client.GOOGLE_OAUTH2_IAP_PUBLIC_KEY_URL_; - try { - res = await this.transporter.request({ url }); - } - catch (e) { - if (e instanceof Error) { - e.message = `Failed to retrieve verification certificates: ${e.message}`; - } - throw e; - } - return { pubkeys: res.data, res }; + }) +} + + +/***/ }), + +/***/ 6038: +/***/ ((module) => { + +"use strict"; + + +/** + * @param typeMap [Object] Map of MIME type -> Array[extensions] + * @param ... + */ +function Mime() { + this._types = Object.create(null); + this._extensions = Object.create(null); + + for (let i = 0; i < arguments.length; i++) { + this.define(arguments[i]); + } + + this.define = this.define.bind(this); + this.getType = this.getType.bind(this); + this.getExtension = this.getExtension.bind(this); +} + +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * If a type declares an extension that has already been defined, an error will + * be thrown. To suppress this error and force the extension to be associated + * with the new type, pass `force`=true. Alternatively, you may prefix the + * extension with "*" to map the type to extension, without mapping the + * extension to the type. + * + * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * + * + * @param map (Object) type definitions + * @param force (Boolean) if true, force overriding of existing definitions + */ +Mime.prototype.define = function(typeMap, force) { + for (let type in typeMap) { + let extensions = typeMap[type].map(function(t) { + return t.toLowerCase(); + }); + type = type.toLowerCase(); + + for (let i = 0; i < extensions.length; i++) { + const ext = extensions[i]; + + // '*' prefix = not the preferred type for this extension. So fixup the + // extension, and skip it. + if (ext[0] === '*') { + continue; + } + + if (!force && (ext in this._types)) { + throw new Error( + 'Attempt to change mapping for "' + ext + + '" extension from "' + this._types[ext] + '" to "' + type + + '". Pass `force=true` to allow this, otherwise remove "' + ext + + '" from the list of extensions for "' + type + '".' + ); + } + + this._types[ext] = type; } - verifySignedJwtWithCerts() { - // To make the code compatible with browser SubtleCrypto we need to make - // this method async. - throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); + + // Use first extension as default + if (force || !this._extensions[type]) { + const ext = extensions[0]; + this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); } - /** - * Verify the id token is signed with the correct certificate - * and is from the correct audience. - * @param jwt The jwt to verify (The ID Token in this case). - * @param certs The array of certs to test the jwt against. - * @param requiredAudience The audience to test the jwt against. - * @param issuers The allowed issuers of the jwt (Optional). - * @param maxExpiry The max expiry the certificate can be (Optional). - * @return Returns a promise resolving to LoginTicket on verification. - */ - async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { - const crypto = (0, crypto_1.createCrypto)(); - if (!maxExpiry) { - maxExpiry = OAuth2Client.MAX_TOKEN_LIFETIME_SECS_; - } - const segments = jwt.split('.'); - if (segments.length !== 3) { - throw new Error('Wrong number of segments in token: ' + jwt); - } - const signed = segments[0] + '.' + segments[1]; - let signature = segments[2]; - let envelope; - let payload; - try { - envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); - } - catch (err) { - if (err instanceof Error) { - err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; - } - throw err; - } - if (!envelope) { - throw new Error("Can't parse token envelope: " + segments[0]); - } - try { - payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); - } - catch (err) { - if (err instanceof Error) { - err.message = `Can't parse token payload '${segments[0]}`; - } - throw err; - } - if (!payload) { - throw new Error("Can't parse token payload: " + segments[1]); - } - if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { - // If this is not present, then there's no reason to attempt verification - throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); - } - const cert = certs[envelope.kid]; - if (envelope.alg === 'ES256') { - signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); - } - const verified = await crypto.verify(cert, signed, signature); - if (!verified) { - throw new Error('Invalid token signature: ' + jwt); - } - if (!payload.iat) { - throw new Error('No issue time in token: ' + JSON.stringify(payload)); - } - if (!payload.exp) { - throw new Error('No expiration time in token: ' + JSON.stringify(payload)); - } - const iat = Number(payload.iat); - if (isNaN(iat)) - throw new Error('iat field using invalid format'); - const exp = Number(payload.exp); - if (isNaN(exp)) - throw new Error('exp field using invalid format'); - const now = new Date().getTime() / 1000; - if (exp >= now + maxExpiry) { - throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); - } - const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; - const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; - if (now < earliest) { - throw new Error('Token used too early, ' + - now + - ' < ' + - earliest + - ': ' + - JSON.stringify(payload)); - } - if (now > latest) { - throw new Error('Token used too late, ' + - now + - ' > ' + - latest + - ': ' + - JSON.stringify(payload)); - } - if (issuers && issuers.indexOf(payload.iss) < 0) { - throw new Error('Invalid issuer, expected one of [' + - issuers + - '], but got ' + - payload.iss); - } - // Check the audience matches if we have one - if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { - const aud = payload.aud; - let audVerified = false; - // If the requiredAudience is an array, check if it contains token - // audience - if (requiredAudience.constructor === Array) { - audVerified = requiredAudience.indexOf(aud) > -1; - } - else { - audVerified = aud === requiredAudience; - } - if (!audVerified) { - throw new Error('Wrong recipient, payload audience != requiredAudience'); - } - } - return new loginticket_1.LoginTicket(envelope, payload); + } +}; + +/** + * Lookup a mime type based on extension + */ +Mime.prototype.getType = function(path) { + path = String(path); + let last = path.replace(/^.*[/\\]/, '').toLowerCase(); + let ext = last.replace(/^.*\./, '').toLowerCase(); + + let hasPath = last.length < path.length; + let hasDot = ext.length < last.length - 1; + + return (hasDot || !hasPath) && this._types[ext] || null; +}; + +/** + * Return file extension associated with a mime type + */ +Mime.prototype.getExtension = function(type) { + type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; + return type && this._extensions[type.toLowerCase()] || null; +}; + +module.exports = Mime; + + +/***/ }), + +/***/ 9994: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +let Mime = __nccwpck_require__(6038); +module.exports = new Mime(__nccwpck_require__(3114), __nccwpck_require__(8809)); + + +/***/ }), + +/***/ 8809: +/***/ ((module) => { + +module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; + +/***/ }), + +/***/ 3114: +/***/ ((module) => { + +module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; + +/***/ }), + +/***/ 900: +/***/ ((module) => { + +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} + + +/***/ }), + +/***/ 467: +/***/ ((module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(__nccwpck_require__(2781)); +var http = _interopDefault(__nccwpck_require__(3685)); +var Url = _interopDefault(__nccwpck_require__(7310)); +var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); +var https = _interopDefault(__nccwpck_require__(5687)); +var zlib = _interopDefault(__nccwpck_require__(9796)); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = (__nccwpck_require__(2877).convert); +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close'); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports["default"] = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; + + +/***/ }), + +/***/ 1223: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(2940) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 7684: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const Queue = __nccwpck_require__(5185); + +const pLimit = concurrency => { + if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) { + throw new TypeError('Expected `concurrency` to be a number from 1 and up'); + } + + const queue = new Queue(); + let activeCount = 0; + + const next = () => { + activeCount--; + + if (queue.size > 0) { + queue.dequeue()(); + } + }; + + const run = async (fn, resolve, ...args) => { + activeCount++; + + const result = (async () => fn(...args))(); + + resolve(result); + + try { + await result; + } catch {} + + next(); + }; + + const enqueue = (fn, resolve, ...args) => { + queue.enqueue(run.bind(null, fn, resolve, ...args)); + + (async () => { + // This function needs to wait until the next microtask before comparing + // `activeCount` to `concurrency`, because `activeCount` is updated asynchronously + // when the run function is dequeued and called. The comparison in the if-statement + // needs to happen asynchronously as well to get an up-to-date value for `activeCount`. + await Promise.resolve(); + + if (activeCount < concurrency && queue.size > 0) { + queue.dequeue()(); + } + })(); + }; + + const generator = (fn, ...args) => new Promise(resolve => { + enqueue(fn, resolve, ...args); + }); + + Object.defineProperties(generator, { + activeCount: { + get: () => activeCount + }, + pendingCount: { + get: () => queue.size + }, + clearQueue: { + value: () => { + queue.clear(); + } + } + }); + + return generator; +}; + +module.exports = pLimit; + + +/***/ }), + +/***/ 7214: +/***/ ((module) => { + +"use strict"; + + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) } - /** - * Returns a promise that resolves with AccessTokenResponse type if - * refreshHandler is defined. - * If not, nothing is returned. - */ - async processAndValidateRefreshHandler() { - if (this.refreshHandler) { - const accessTokenResponse = await this.refreshHandler(); - if (!accessTokenResponse.access_token) { - throw new Error('No access token is returned by the refreshHandler callback.'); - } - return accessTokenResponse; - } - return; + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); } - /** - * Returns true if a token is expired or will expire within - * eagerRefreshThresholdMillismilliseconds. - * If there is no expiry time, assumes the token is not expired or expiring. - */ - isTokenExpiring() { - const expiryDate = this.credentials.expiry_date; - return expiryDate - ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis - : false; + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; } + } else { + return `of ${thing} ${String(expected)}`; + } } -exports.OAuth2Client = OAuth2Client; -OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; -/** - * The base URL for auth endpoints. - */ -OAuth2Client.GOOGLE_OAUTH2_AUTH_BASE_URL_ = 'https://accounts.google.com/o/oauth2/v2/auth'; -/** - * The base endpoint for token retrieval. - */ -OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_ = 'https://oauth2.googleapis.com/token'; -/** - * The base endpoint to revoke tokens. - */ -OAuth2Client.GOOGLE_OAUTH2_REVOKE_URL_ = 'https://oauth2.googleapis.com/revoke'; -/** - * Google Sign on certificates in PEM format. - */ -OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_PEM_CERTS_URL_ = 'https://www.googleapis.com/oauth2/v1/certs'; -/** - * Google Sign on certificates in JWK format. - */ -OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_JWK_CERTS_URL_ = 'https://www.googleapis.com/oauth2/v3/certs'; -/** - * Google Sign on certificates in JWK format. - */ -OAuth2Client.GOOGLE_OAUTH2_IAP_PUBLIC_KEY_URL_ = 'https://www.gstatic.com/iap/verify/public_key'; -/** - * Clock skew - five minutes in seconds - */ -OAuth2Client.CLOCK_SKEW_SECS_ = 300; -/** - * Max Token Lifetime is one day in seconds - */ -OAuth2Client.MAX_TOKEN_LIFETIME_SECS_ = 86400; -/** - * The allowed oauth token issuers. - */ -OAuth2Client.ISSUERS_ = [ - 'accounts.google.com', - 'https://accounts.google.com', -]; -//# sourceMappingURL=oauth2client.js.map + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.q = codes; + /***/ }), -/***/ 9510: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 1359: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2021 Google LLC +// Copyright Joyent, Inc. and other Node contributors. // -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: // -// http://www.apache.org/licenses/LICENSE-2.0 +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0; -const querystring = __nccwpck_require__(3477); -const crypto_1 = __nccwpck_require__(8043); -/** List of HTTP methods that accept request bodies. */ -const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; -/** - * Abstract class for handling client authentication in OAuth-based - * operations. - * When request-body client authentication is used, only application/json and - * application/x-www-form-urlencoded content types for HTTP methods that support - * request bodies are supported. - */ -class OAuthClientAuthHandler { - /** - * Instantiates an OAuth client authentication handler. - * @param clientAuthentication The client auth credentials. - */ - constructor(clientAuthentication) { - this.clientAuthentication = clientAuthentication; - this.crypto = (0, crypto_1.createCrypto)(); - } - /** - * Applies client authentication on the OAuth request's headers or POST - * body but does not process the request. - * @param opts The GaxiosOptions whose headers or data are to be modified - * depending on the client authentication mechanism to be used. - * @param bearerToken The optional bearer token to use for authentication. - * When this is used, no client authentication credentials are needed. - */ - applyClientAuthenticationOptions(opts, bearerToken) { - // Inject authenticated header. - this.injectAuthenticatedHeaders(opts, bearerToken); - // Inject authenticated request body. - if (!bearerToken) { - this.injectAuthenticatedRequestBody(opts); - } +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + + + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +const Readable = __nccwpck_require__(1433); +const Writable = __nccwpck_require__(6993); +__nccwpck_require__(4124)(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + const keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + const method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); } - /** - * Applies client authentication on the request's header if either - * basic authentication or bearer token authentication is selected. - * - * @param opts The GaxiosOptions whose headers or data are to be modified - * depending on the client authentication mechanism to be used. - * @param bearerToken The optional bearer token to use for authentication. - * When this is used, no client authentication credentials are needed. - */ - injectAuthenticatedHeaders(opts, bearerToken) { - var _a; - // Bearer token prioritized higher than basic Auth. - if (bearerToken) { - opts.headers = opts.headers || {}; - Object.assign(opts.headers, { - Authorization: `Bearer ${bearerToken}}`, - }); - } - else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { - opts.headers = opts.headers || {}; - const clientId = this.clientAuthentication.clientId; - const clientSecret = this.clientAuthentication.clientSecret || ''; - const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); - Object.assign(opts.headers, { - Authorization: `Basic ${base64EncodedCreds}`, - }); - } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; } - /** - * Applies client authentication on the request's body if request-body - * client authentication is selected. - * - * @param opts The GaxiosOptions whose headers or data are to be modified - * depending on the client authentication mechanism to be used. - */ - injectAuthenticatedRequestBody(opts) { - var _a; - if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { - const method = (opts.method || 'GET').toUpperCase(); - // Inject authenticated request body. - if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { - // Get content-type. - let contentType; - const headers = opts.headers || {}; - for (const key in headers) { - if (key.toLowerCase() === 'content-type' && headers[key]) { - contentType = headers[key].toLowerCase(); - break; - } - } - if (contentType === 'application/x-www-form-urlencoded') { - opts.data = opts.data || ''; - const data = querystring.parse(opts.data); - Object.assign(data, { - client_id: this.clientAuthentication.clientId, - client_secret: this.clientAuthentication.clientSecret || '', - }); - opts.data = querystring.stringify(data); - } - else if (contentType === 'application/json') { - opts.data = opts.data || {}; - Object.assign(opts.data, { - client_id: this.clientAuthentication.clientId, - client_secret: this.clientAuthentication.clientSecret || '', - }); - } - else { - throw new Error(`${contentType} content-types are not supported with ` + - `${this.clientAuthentication.confidentialClientType} ` + - 'client authentication'); - } - } - else { - throw new Error(`${method} HTTP method does not support ` + - `${this.clientAuthentication.confidentialClientType} ` + - 'client authentication'); - } - } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +/***/ }), + +/***/ 1542: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + + + +module.exports = PassThrough; +const Transform = __nccwpck_require__(4415); +__nccwpck_require__(4124)(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + +/***/ }), + +/***/ 1433: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +const EE = (__nccwpck_require__(2361).EventEmitter); +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(2387); +/**/ + +const Buffer = (__nccwpck_require__(4300).Buffer); +const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +const debugUtil = __nccwpck_require__(3837); +let debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; } -exports.OAuthClientAuthHandler = OAuthClientAuthHandler; -/** - * Converts an OAuth error response to a native JavaScript Error. - * @param resp The OAuth error response to convert to a native Error object. - * @param err The optional original error. If provided, the error properties - * will be copied to the new error. - * @return The converted native Error object. - */ -function getErrorFromOAuthErrorResponse(resp, err) { - // Error response. - const errorCode = resp.error; - const errorDescription = resp.error_description; - const errorUri = resp.error_uri; - let message = `Error code ${errorCode}`; - if (typeof errorDescription !== 'undefined') { - message += `: ${errorDescription}`; - } - if (typeof errorUri !== 'undefined') { - message += ` - ${errorUri}`; - } - const newError = new Error(message); - // Copy properties from original error to newly generated error. - if (err) { - const keys = Object.keys(err); - if (err.stack) { - // Copy error.stack if available. - keys.push('stack'); - } - keys.forEach(key => { - // Do not overwrite the message field. - if (key !== 'message') { - Object.defineProperty(newError, key, { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - value: err[key], - writable: false, - enumerable: true, - }); - } - }); - } - return newError; +/**/ + +const BufferList = __nccwpck_require__(6522); +const destroyImpl = __nccwpck_require__(7049); +const _require = __nccwpck_require__(9948), + getHighWaterMark = _require.getHighWaterMark; +const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +let StringDecoder; +let createReadableStreamAsyncIterator; +let from; +__nccwpck_require__(4124)(Readable, Stream); +const errorOrDestroy = destroyImpl.errorOrDestroy; +const kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; } -exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; -//# sourceMappingURL=oauth2common.js.map +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(1359); + options = options || {}; -/***/ }), + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; -/***/ 4782: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; -"use strict"; + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PluggableAuthClient = exports.ExecutableError = void 0; -const baseexternalclient_1 = __nccwpck_require__(7391); -const executable_response_1 = __nccwpck_require__(8749); -const pluggable_auth_handler_1 = __nccwpck_require__(8941); -/** - * Error thrown from the executable run by PluggableAuthClient. - */ -class ExecutableError extends Error { - constructor(message, code) { - super(`The executable failed with exit code: ${code} and error message: ${message}.`); - this.code = code; - Object.setPrototypeOf(this, new.target.prototype); - } + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } } -exports.ExecutableError = ExecutableError; -/** - * The default executable timeout when none is provided, in milliseconds. - */ -const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; -/** - * The minimum allowed executable timeout in milliseconds. - */ -const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; -/** - * The maximum allowed executable timeout in milliseconds. - */ -const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; -/** - * The environment variable to check to see if executable can be run. - * Value must be set to '1' for the executable to run. - */ -const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; -/** - * The maximum currently supported executable version. - */ -const MAXIMUM_EXECUTABLE_VERSION = 1; -/** - * PluggableAuthClient enables the exchange of workload identity pool external credentials for - * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These - * scripts/executables are completely independent of the Google Cloud Auth libraries. These - * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token - * to be exchanged for a Google access token. - * - *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable - * must be set to '1'. This is for security reasons. - * - *

Both OIDC and SAML are supported. The executable must adhere to a specific response format - * defined below. - * - *

The executable must print out the 3rd party token to STDOUT in JSON format. When an - * output_file is specified in the credential configuration, the executable must also handle writing the - * JSON response to this file. - * - *

- * OIDC response sample:
- * {
- *   "version": 1,
- *   "success": true,
- *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
- *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
- *   "expiration_time": 1620433341
- * }
- *
- * SAML2 response sample:
- * {
- *   "version": 1,
- *   "success": true,
- *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
- *   "saml_response": "...",
- *   "expiration_time": 1620433341
- * }
- *
- * Error response sample:
- * {
- *   "version": 1,
- *   "success": false,
- *   "code": "401",
- *   "message": "Error message."
- * }
- * 
- * - *

The "expiration_time" field in the JSON response is only required for successful - * responses when an output file was specified in the credential configuration - * - *

The auth libraries will populate certain environment variables that will be accessible by the - * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, - * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and - * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. - * - *

Please see this repositories README for a complete executable request/response specification. - */ -class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { - /** - * Instantiates a PluggableAuthClient instance using the provided JSON - * object loaded from an external account credentials file. - * An error is thrown if the credential is not a valid pluggable auth credential. - * @param options The external account options object typically loaded from - * the external account JSON credential file. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. - */ - constructor(options, additionalOptions) { - super(options, additionalOptions); - if (!options.credential_source.executable) { - throw new Error('No valid Pluggable Auth "credential_source" provided.'); - } - this.command = options.credential_source.executable.command; - if (!this.command) { - throw new Error('No valid Pluggable Auth "credential_source" provided.'); - } - // Check if the provided timeout exists and if it is valid. - if (options.credential_source.executable.timeout_millis === undefined) { - this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; - } - else { - this.timeoutMillis = options.credential_source.executable.timeout_millis; - if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || - this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { - throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + - `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); - } - } - this.outputFile = options.credential_source.executable.output_file; - this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ - command: this.command, - timeoutMillis: this.timeoutMillis, - outputFile: this.outputFile, - }); +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(1359); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + const isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + if (this._readableState === undefined) { + return false; } - /** - * Triggered when an external subject token is needed to be exchanged for a - * GCP access token via GCP STS endpoint. - * This uses the `options.credential_source` object to figure out how - * to retrieve the token using the current environment. In this case, - * this calls a user provided executable which returns the subject token. - * The logic is summarized as: - * 1. Validated that the executable is allowed to run. The - * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to - * 1 for security reasons. - * 2. If an output file is specified by the user, check the file location - * for a response. If the file exists and contains a valid response, - * return the subject token from the file. - * 3. Call the provided executable and return response. - * @return A promise that resolves with the external subject token. - */ - async retrieveSubjectToken() { - // Check if the executable is allowed to run. - if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { - throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + - 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + - 'Variable to 1.'); - } - let executableResponse = undefined; - // Try to get cached executable response from output file. - if (this.outputFile) { - executableResponse = await this.handler.retrieveCachedResponse(); - } - // If no response from output file, call the executable. - if (!executableResponse) { - // Set up environment map with required values for the executable. - const envMap = new Map(); - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); - // Always set to 0 because interactive mode is not supported. - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); - if (this.outputFile) { - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); - } - const serviceAccountEmail = this.getServiceAccountEmail(); - if (serviceAccountEmail) { - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); - } - executableResponse = await this.handler.retrieveResponseFromExecutable(envMap); - } - if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { - throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); - } - // Check that response was successful. - if (!executableResponse.success) { - throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); - } - // Check that response contains expiration time if output file was specified. - if (this.outputFile) { - if (!executableResponse.expirationTime) { - throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); - } - } - // Check that response is not expired. - if (executableResponse.isExpired()) { - throw new Error('Executable response is expired.'); + return this._readableState.destroyed; + }, + set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); } - // Return subject token from response. - return executableResponse.subjectToken; + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); + const decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + let p = this._readableState.buffer.head; + let content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +const MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; } -exports.PluggableAuthClient = PluggableAuthClient; -//# sourceMappingURL=pluggable-auth-client.js.map -/***/ }), +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} -/***/ 8941: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; -"use strict"; + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PluggableAuthHandler = void 0; -const pluggable_auth_client_1 = __nccwpck_require__(4782); -const executable_response_1 = __nccwpck_require__(8749); -const childProcess = __nccwpck_require__(2081); -const fs = __nccwpck_require__(7147); -/** - * A handler used to retrieve 3rd party token responses from user defined - * executables and cached file output for the PluggableAuthClient class. - */ -class PluggableAuthHandler { - /** - * Instantiates a PluggableAuthHandler instance using the provided - * PluggableAuthHandlerOptions object. - */ - constructor(options) { - if (!options.command) { - throw new Error('No command provided.'); - } - this.commandComponents = PluggableAuthHandler.parseCommand(options.command); - this.timeoutMillis = options.timeoutMillis; - if (!this.timeoutMillis) { - throw new Error('No timeoutMillis provided.'); - } - this.outputFile = options.outputFile; - } - /** - * Calls user provided executable to get a 3rd party subject token and - * returns the response. - * @param envMap a Map of additional Environment Variables required for - * the executable. - * @return A promise that resolves with the executable response. - */ - retrieveResponseFromExecutable(envMap) { - return new Promise((resolve, reject) => { - // Spawn process to run executable using added environment variables. - const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { - env: { ...process.env, ...Object.fromEntries(envMap) }, - }); - let output = ''; - // Append stdout to output as executable runs. - child.stdout.on('data', (data) => { - output += data; - }); - // Append stderr as executable runs. - child.stderr.on('data', (err) => { - output += err; - }); - // Set up a timeout to end the child process and throw an error. - const timeout = setTimeout(() => { - // Kill child process and remove listeners so 'close' event doesn't get - // read after child process is killed. - child.removeAllListeners(); - child.kill(); - return reject(new Error('The executable failed to finish within the timeout specified.')); - }, this.timeoutMillis); - child.on('close', (code) => { - // Cancel timeout if executable closes before timeout is reached. - clearTimeout(timeout); - if (code === 0) { - // If the executable completed successfully, try to return the parsed response. - try { - const responseJson = JSON.parse(output); - const response = new executable_response_1.ExecutableResponse(responseJson); - return resolve(response); - } - catch (error) { - if (error instanceof executable_response_1.ExecutableResponseError) { - return reject(error); - } - return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); - } - } - else { - return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); - } - }); - }); - } - /** - * Checks user provided output file for response from previous run of - * executable and return the response if it exists, is formatted correctly, and is not expired. - */ - async retrieveCachedResponse() { - if (!this.outputFile || this.outputFile.length === 0) { - return undefined; - } - let filePath; - try { - filePath = await fs.promises.realpath(this.outputFile); - } - catch (_a) { - // If file path cannot be resolved, return undefined. - return undefined; - } - if (!(await fs.promises.lstat(filePath)).isFile()) { - // If path does not lead to file, return undefined. - return undefined; - } - const responseString = await fs.promises.readFile(filePath, { - encoding: 'utf8', - }); - if (responseString === '') { - return undefined; - } - try { - const responseJson = JSON.parse(responseString); - const response = new executable_response_1.ExecutableResponse(responseJson); - // Check if response is successful and unexpired. - if (response.isValid()) { - return new executable_response_1.ExecutableResponse(responseJson); - } - return undefined; - } - catch (error) { - if (error instanceof executable_response_1.ExecutableResponseError) { - throw error; - } - throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); - } + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; } - /** - * Parses given command string into component array, splitting on spaces unless - * spaces are between quotation marks. - */ - static parseCommand(command) { - // Split the command into components by splitting on spaces, - // unless spaces are contained in quotation marks. - const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); - if (!components) { - throw new Error(`Provided command: "${command}" could not be parsed.`); - } - // Remove quotation marks from the beginning and end of each component if they are present. - for (let i = 0; i < components.length; i++) { - if (components[i][0] === '"' && components[i].slice(-1) === '"') { - components[i] = components[i].slice(1, -1); - } - } - return components; + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); } + } } -exports.PluggableAuthHandler = PluggableAuthHandler; -//# sourceMappingURL=pluggable-auth-handler.js.map -/***/ }), +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } -/***/ 8790: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} -"use strict"; +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + const len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} -// Copyright 2015 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.UserRefreshClient = void 0; -const oauth2client_1 = __nccwpck_require__(3936); -class UserRefreshClient extends oauth2client_1.OAuth2Client { - constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { - const opts = optionsOrClientId && typeof optionsOrClientId === 'object' - ? optionsOrClientId - : { - clientId: optionsOrClientId, - clientSecret, - refreshToken, - eagerRefreshThresholdMillis, - forceRefreshOnFailure, - }; - super({ - clientId: opts.clientId, - clientSecret: opts.clientSecret, - eagerRefreshThresholdMillis: opts.eagerRefreshThresholdMillis, - forceRefreshOnFailure: opts.forceRefreshOnFailure, - }); - this._refreshToken = opts.refreshToken; - this.credentials.refresh_token = opts.refreshToken; - } - /** - * Refreshes the access token. - * @param refreshToken An ignored refreshToken.. - * @param callback Optional callback. - */ - async refreshTokenNoCache( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - refreshToken) { - return super.refreshTokenNoCache(this._refreshToken); - } - /** - * Create a UserRefreshClient credentials instance using the given input - * options. - * @param json The input object. - */ - fromJSON(json) { - if (!json) { - throw new Error('Must pass in a JSON object containing the user refresh token'); - } - if (json.type !== 'authorized_user') { - throw new Error('The incoming JSON object does not have the "authorized_user" type'); - } - if (!json.client_id) { - throw new Error('The incoming JSON object does not contain a client_id field'); - } - if (!json.client_secret) { - throw new Error('The incoming JSON object does not contain a client_secret field'); - } - if (!json.refresh_token) { - throw new Error('The incoming JSON object does not contain a refresh_token field'); - } - this._clientId = json.client_id; - this._clientSecret = json.client_secret; - this._refreshToken = json.refresh_token; - this.credentials.refresh_token = json.refresh_token; - this.quotaProjectId = json.quota_project_id; - } - fromStream(inputStream, callback) { - if (callback) { - this.fromStreamAsync(inputStream).then(() => callback(), callback); - } - else { - return this.fromStreamAsync(inputStream); - } +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } } - async fromStreamAsync(inputStream) { - return new Promise((resolve, reject) => { - if (!inputStream) { - return reject(new Error('Must pass in a stream containing the user refresh token.')); - } - let s = ''; - inputStream - .setEncoding('utf8') - .on('error', reject) - .on('data', chunk => (s += chunk)) - .on('end', () => { - try { - const data = JSON.parse(s); - this.fromJSON(data); - return resolve(); - } - catch (err) { - return reject(err); - } - }); - }); + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); } -} -exports.UserRefreshClient = UserRefreshClient; -//# sourceMappingURL=refreshclient.js.map + } -/***/ }), + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } -/***/ 6308: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); -"use strict"; + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.StsCredentials = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const querystring = __nccwpck_require__(3477); -const transporters_1 = __nccwpck_require__(2649); -const oauth2common_1 = __nccwpck_require__(9510); -/** - * Implements the OAuth 2.0 token exchange based on - * https://tools.ietf.org/html/rfc8693 - */ -class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { - /** - * Initializes an STS credentials instance. - * @param tokenExchangeEndpoint The token exchange endpoint. - * @param clientAuthentication The client authentication credentials if - * available. - */ - constructor(tokenExchangeEndpoint, clientAuthentication) { - super(clientAuthentication); - this.tokenExchangeEndpoint = tokenExchangeEndpoint; - this.transporter = new transporters_1.DefaultTransporter(); - } - /** - * Exchanges the provided token for another type of token based on the - * rfc8693 spec. - * @param stsCredentialsOptions The token exchange options used to populate - * the token exchange request. - * @param additionalHeaders Optional additional headers to pass along the - * request. - * @param options Optional additional GCP-specific non-spec defined options - * to send with the request. - * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` - * @return A promise that resolves with the token exchange response containing - * the requested token and its expiration time. - */ - async exchangeToken(stsCredentialsOptions, additionalHeaders, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - options) { - var _a, _b, _c; - const values = { - grant_type: stsCredentialsOptions.grantType, - resource: stsCredentialsOptions.resource, - audience: stsCredentialsOptions.audience, - scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), - requested_token_type: stsCredentialsOptions.requestedTokenType, - subject_token: stsCredentialsOptions.subjectToken, - subject_token_type: stsCredentialsOptions.subjectTokenType, - actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, - actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, - // Non-standard GCP-specific options. - options: options && JSON.stringify(options), - }; - // Remove undefined fields. - Object.keys(values).forEach(key => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - if (typeof values[key] === 'undefined') { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - delete values[key]; - } - }); - const headers = { - 'Content-Type': 'application/x-www-form-urlencoded', - }; - // Inject additional STS headers if available. - Object.assign(headers, additionalHeaders || {}); - const opts = { - url: this.tokenExchangeEndpoint, - method: 'POST', - headers, - data: querystring.stringify(values), - responseType: 'json', - }; - // Apply OAuth client authentication. - this.applyClientAuthenticationOptions(opts); - try { - const response = await this.transporter.request(opts); - // Successful response. - const stsSuccessfulResponse = response.data; - stsSuccessfulResponse.res = response; - return stsSuccessfulResponse; - } - catch (error) { - // Translate error to OAuthError. - if (error instanceof gaxios_1.GaxiosError && error.response) { - throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, - // Preserve other fields from the original error. - error); - } - // Request could fail before the server responds. - throw error; - } + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); } + }; } -exports.StsCredentials = StsCredentials; -//# sourceMappingURL=stscredentials.js.map +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; -/***/ }), + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; -/***/ 4693: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; -"use strict"; + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -/* global window */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BrowserCrypto = void 0; -// This file implements crypto functions we need using in-browser -// SubtleCrypto interface `window.crypto.subtle`. -const base64js = __nccwpck_require__(6463); -// Not all browsers support `TextEncoder`. The following `require` will -// provide a fast UTF8-only replacement for those browsers that don't support -// text encoding natively. -// eslint-disable-next-line node/no-unsupported-features/node-builtins -if (typeof process === 'undefined' && typeof TextEncoder === 'undefined') { - __nccwpck_require__(1917); -} -const crypto_1 = __nccwpck_require__(8043); -class BrowserCrypto { - constructor() { - if (typeof window === 'undefined' || - window.crypto === undefined || - window.crypto.subtle === undefined) { - throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); - } - } - async sha256DigestBase64(str) { - // SubtleCrypto digest() method is async, so we must make - // this method async as well. - // To calculate SHA256 digest using SubtleCrypto, we first - // need to convert an input string to an ArrayBuffer: - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const inputBuffer = new TextEncoder().encode(str); - // Result is ArrayBuffer as well. - const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); - return base64js.fromByteArray(new Uint8Array(outputBuffer)); - } - randomBytesBase64(count) { - const array = new Uint8Array(count); - window.crypto.getRandomValues(array); - return base64js.fromByteArray(array); + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + const res = Stream.prototype.on.call(this, ev, fn); + const state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } } - static padBase64(base64) { - // base64js requires padding, so let's add some '=' - while (base64.length % 4 !== 0) { - base64 += '='; - } - return base64; + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + const res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + const res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + const state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + const state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var state = this._readableState; + var paused = false; + stream.on('end', () => { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) this.push(chunk); } - async verify(pubkey, data, signature) { - const algo = { - name: 'RSASSA-PKCS1-v1_5', - hash: { name: 'SHA-256' }, - }; - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const dataArray = new TextEncoder().encode(data); - const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); - const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); - // SubtleCrypto's verify method is async so we must make - // this method async as well. - const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); - return result; + this.push(null); + }); + stream.on('data', chunk => { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); } - async sign(privateKey, data) { - const algo = { - name: 'RSASSA-PKCS1-v1_5', - hash: { name: 'SHA-256' }, + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); }; - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const dataArray = new TextEncoder().encode(data); - const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); - // SubtleCrypto's sign method is async so we must make - // this method async as well. - const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); - return base64js.fromByteArray(new Uint8Array(result)); - } - decodeBase64StringUtf8(base64) { - const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const result = new TextDecoder().decode(uint8array); - return result; - } - encodeBase64StringUtf8(text) { - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const uint8array = new TextEncoder().encode(text); - const result = base64js.fromByteArray(uint8array); - return result; + }(i); } - /** - * Computes the SHA-256 hash of the provided string. - * @param str The plain text string to hash. - * @return A promise that resolves with the SHA-256 hash of the provided - * string in hexadecimal encoding. - */ - async sha256DigestHex(str) { - // SubtleCrypto digest() method is async, so we must make - // this method async as well. - // To calculate SHA256 digest using SubtleCrypto, we first - // need to convert an input string to an ArrayBuffer: - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const inputBuffer = new TextEncoder().encode(str); - // Result is ArrayBuffer as well. - const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); - return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = n => { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); } - /** - * Computes the HMAC hash of a message using the provided crypto key and the - * SHA-256 algorithm. - * @param key The secret crypto key in utf-8 or ArrayBuffer format. - * @param msg The plain text message. - * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer - * format. - */ - async signWithHmacSha256(key, msg) { - // Convert key, if provided in ArrayBuffer format, to string. - const rawKey = typeof key === 'string' - ? key - : String.fromCharCode(...new Uint16Array(key)); - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const enc = new TextEncoder(); - const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { - name: 'HMAC', - hash: { - name: 'SHA-256', - }, - }, false, ['sign']); - return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = __nccwpck_require__(3306); } + return createReadableStreamAsyncIterator(this); + }; } -exports.BrowserCrypto = BrowserCrypto; -//# sourceMappingURL=crypto.js.map - -/***/ }), +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); -/***/ 8043: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + return this._readableState.length; + } +}); -"use strict"; +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -/* global window */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0; -const crypto_1 = __nccwpck_require__(4693); -const crypto_2 = __nccwpck_require__(757); -function createCrypto() { - if (hasBrowserCrypto()) { - return new crypto_1.BrowserCrypto(); + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + const wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } } - return new crypto_2.NodeCrypto(); + } } -exports.createCrypto = createCrypto; -function hasBrowserCrypto() { - return (typeof window !== 'undefined' && - typeof window.crypto !== 'undefined' && - typeof window.crypto.subtle !== 'undefined'); +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = __nccwpck_require__(9082); + } + return from(Readable, iterable, opts); + }; } -exports.hasBrowserCrypto = hasBrowserCrypto; -/** - * Converts an ArrayBuffer to a hexadecimal string. - * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. - * @return The hexadecimal encoding of the ArrayBuffer. - */ -function fromArrayBufferToHex(arrayBuffer) { - // Convert buffer to byte array. - const byteArray = Array.from(new Uint8Array(arrayBuffer)); - // Convert bytes to hex string. - return byteArray - .map(byte => { - return byte.toString(16).padStart(2, '0'); - }) - .join(''); +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; } -exports.fromArrayBufferToHex = fromArrayBufferToHex; -//# sourceMappingURL=crypto.js.map /***/ }), -/***/ 757: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 4415: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2019 Google LLC +// Copyright Joyent, Inc. and other Node contributors. // -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: // -// http://www.apache.org/licenses/LICENSE-2.0 +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NodeCrypto = void 0; -const crypto = __nccwpck_require__(6113); -class NodeCrypto { - async sha256DigestBase64(str) { - return crypto.createHash('sha256').update(str).digest('base64'); - } - randomBytesBase64(count) { - return crypto.randomBytes(count).toString('base64'); - } - async verify(pubkey, data, signature) { - const verifier = crypto.createVerify('sha256'); - verifier.update(data); - verifier.end(); - return verifier.verify(pubkey, signature, 'base64'); - } - async sign(privateKey, data) { - const signer = crypto.createSign('RSA-SHA256'); - signer.update(data); - signer.end(); - return signer.sign(privateKey, 'base64'); - } - decodeBase64StringUtf8(base64) { - return Buffer.from(base64, 'base64').toString('utf-8'); - } - encodeBase64StringUtf8(text) { - return Buffer.from(text, 'utf-8').toString('base64'); - } - /** - * Computes the SHA-256 hash of the provided string. - * @param str The plain text string to hash. - * @return A promise that resolves with the SHA-256 hash of the provided - * string in hexadecimal encoding. - */ - async sha256DigestHex(str) { - return crypto.createHash('sha256').update(str).digest('hex'); - } - /** - * Computes the HMAC hash of a message using the provided crypto key and the - * SHA-256 algorithm. - * @param key The secret crypto key in utf-8 or ArrayBuffer format. - * @param msg The plain text message. - * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer - * format. - */ - async signWithHmacSha256(key, msg) { - const cryptoKey = typeof key === 'string' ? key : toBuffer(key); - return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); - } -} -exports.NodeCrypto = NodeCrypto; -/** - * Converts a Node.js Buffer to an ArrayBuffer. - * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer - * @param buffer The Buffer input to covert. - * @return The ArrayBuffer representation of the input. - */ -function toArrayBuffer(buffer) { - return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); -} -/** - * Converts an ArrayBuffer to a Node.js Buffer. - * @param arrayBuffer The ArrayBuffer input to covert. - * @return The Buffer representation of the input. - */ -function toBuffer(arrayBuffer) { - return Buffer.from(arrayBuffer); -} -//# sourceMappingURL=crypto.js.map - -/***/ }), - -/***/ 810: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.AuthClient = void 0; -// Copyright 2017 Google LLC +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) // -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. // -// http://www.apache.org/licenses/LICENSE-2.0 +// Here's how this works: // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -const googleauth_1 = __nccwpck_require__(695); -Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); -var authclient_1 = __nccwpck_require__(4627); -Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); -var computeclient_1 = __nccwpck_require__(6875); -Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); -var envDetect_1 = __nccwpck_require__(1380); -Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); -var iam_1 = __nccwpck_require__(9735); -Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); -var idtokenclient_1 = __nccwpck_require__(298); -Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); -var jwtaccess_1 = __nccwpck_require__(8740); -Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); -var jwtclient_1 = __nccwpck_require__(3959); -Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); -var impersonated_1 = __nccwpck_require__(1103); -Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); -var oauth2client_1 = __nccwpck_require__(3936); -Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); -Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); -var loginticket_1 = __nccwpck_require__(4524); -Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); -var refreshclient_1 = __nccwpck_require__(8790); -Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); -var awsclient_1 = __nccwpck_require__(1569); -Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); -var identitypoolclient_1 = __nccwpck_require__(117); -Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); -var externalclient_1 = __nccwpck_require__(4381); -Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); -var baseexternalclient_1 = __nccwpck_require__(7391); -Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); -var downscopedclient_1 = __nccwpck_require__(6270); -Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); -var pluggable_auth_client_1 = __nccwpck_require__(4782); -Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); -var transporters_1 = __nccwpck_require__(2649); -Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); -const auth = new googleauth_1.GoogleAuth(); -exports.auth = auth; -//# sourceMappingURL=index.js.map +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. -/***/ }), -/***/ 6608: -/***/ ((__unused_webpack_module, exports) => { -"use strict"; +module.exports = Transform; +const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +const Duplex = __nccwpck_require__(1359); +__nccwpck_require__(4124)(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; -// Copyright 2017 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush((er, data) => { + done(this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. // -// http://www.apache.org/licenses/LICENSE-2.0 +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validate = void 0; -// Accepts an options object passed from the user to the API. In the -// previous version of the API, it referred to a `Request` options object. -// Now it refers to an Axiox Request Config object. This is here to help -// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function validate(options) { - const vpairs = [ - { invalid: 'uri', expected: 'url' }, - { invalid: 'json', expected: 'data' }, - { invalid: 'qs', expected: 'params' }, - ]; - for (const pair of vpairs) { - if (options[pair.invalid]) { - const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; - throw new Error(e); - } - } +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, err2 => { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); } -exports.validate = validate; -//# sourceMappingURL=options.js.map /***/ }), -/***/ 2649: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 6993: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2019 Google LLC +// Copyright Joyent, Inc. and other Node contributors. // -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: // -// http://www.apache.org/licenses/LICENSE-2.0 +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultTransporter = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const options_1 = __nccwpck_require__(6608); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const pkg = __nccwpck_require__(1402); -const PRODUCT_NAME = 'google-api-nodejs-client'; -class DefaultTransporter { - /** - * Configures request options before making a request. - * @param opts GaxiosOptions options. - * @return Configured options. - */ - configure(opts = {}) { - opts.headers = opts.headers || {}; - if (typeof window === 'undefined') { - // set transporter user agent if not in browser - const uaValue = opts.headers['User-Agent']; - if (!uaValue) { - opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; - } - else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { - opts.headers['User-Agent'] = `${uaValue} ${DefaultTransporter.USER_AGENT}`; - } - // track google-auth-library-nodejs version: - const authVersion = `auth/${pkg.version}`; - if (opts.headers['x-goog-api-client'] && - !opts.headers['x-goog-api-client'].includes(authVersion)) { - opts.headers['x-goog-api-client'] = `${opts.headers['x-goog-api-client']} ${authVersion}`; - } - else if (!opts.headers['x-goog-api-client']) { - const nodeVersion = process.version.replace(/^v/, ''); - opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion} ${authVersion}`; - } - } - return opts; - } - request(opts, callback) { - // ensure the user isn't passing in request-style options - opts = this.configure(opts); - try { - (0, options_1.validate)(opts); - } - catch (e) { - if (callback) { - return callback(e); - } - else { - throw e; - } - } - if (callback) { - (0, gaxios_1.request)(opts).then(r => { - callback(null, r); - }, e => { - callback(this.processError(e)); - }); - } - else { - return (0, gaxios_1.request)(opts).catch(e => { - throw this.processError(e); - }); - } - } - /** - * Changes the error to include details from the body. - */ - processError(e) { - const res = e.response; - const err = e; - const body = res ? res.data : null; - if (res && body && body.error && res.status !== 200) { - if (typeof body.error === 'string') { - err.message = body.error; - err.code = res.status.toString(); - } - else if (Array.isArray(body.error.errors)) { - err.message = body.error.errors - .map((err2) => err2.message) - .join('\n'); - err.code = body.error.code; - err.errors = body.error.errors; - } - else { - err.message = body.error.message; - err.code = body.error.code || res.status; - } - } - else if (res && res.status >= 400) { - // Consider all 4xx and 5xx responses errors. - err.message = body; - err.code = res.status.toString(); - } - return err; - } -} -exports.DefaultTransporter = DefaultTransporter; -/** - * Default user agent. - */ -DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; -//# sourceMappingURL=transporters.js.map +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -/***/ }), +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. -/***/ 2098: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; -/** - * Copyright 2018 Google LLC - * - * Distributed under MIT license. - * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getPem = void 0; -const fs = __nccwpck_require__(7147); -const forge = __nccwpck_require__(7655); -const util_1 = __nccwpck_require__(3837); -const readFile = (0, util_1.promisify)(fs.readFile); -function getPem(filename, callback) { - if (callback) { - getPemAsync(filename) - .then(pem => callback(null, pem)) - .catch(err => callback(err, null)); - } - else { - return getPemAsync(filename); - } +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; } -exports.getPem = getPem; -function getPemAsync(filename) { - return readFile(filename, { encoding: 'base64' }).then(keyp12 => { - return convertToPem(keyp12); - }); + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + this.next = null; + this.entry = null; + this.finish = () => { + onCorkedFinish(this, state); + }; } -/** - * Converts a P12 in base64 encoding to a pem. - * @param p12base64 String containing base64 encoded p12. - * @returns a string containing the pem. - */ -function convertToPem(p12base64) { - const p12Der = forge.util.decode64(p12base64); - const p12Asn1 = forge.asn1.fromDer(p12Der); - const p12 = forge.pkcs12.pkcs12FromAsn1(p12Asn1, 'notasecret'); - const bags = p12.getBags({ friendlyName: 'privatekey' }); - if (bags.friendlyName) { - const privateKey = bags.friendlyName[0].key; - const pem = forge.pki.privateKeyToPem(privateKey); - return pem.replace(/\r\n/g, '\n'); - } - else { - throw new Error('Unable to get friendly name.'); - } +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +const internalUtil = { + deprecate: __nccwpck_require__(7127) +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(2387); +/**/ + +const Buffer = (__nccwpck_require__(4300).Buffer); +const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); } -//# sourceMappingURL=index.js.map +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +const destroyImpl = __nccwpck_require__(7049); +const _require = __nccwpck_require__(9948), + getHighWaterMark = _require.getHighWaterMark; +const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +const errorOrDestroy = destroyImpl.errorOrDestroy; +__nccwpck_require__(4124)(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(1359); + options = options || {}; -/***/ }), + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; -/***/ 6031: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; -"use strict"; + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); -/** - * Copyright 2018 Google LLC - * - * Distributed under MIT license. - * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GoogleToken = void 0; -const fs = __nccwpck_require__(7147); -const gaxios_1 = __nccwpck_require__(9555); -const jws = __nccwpck_require__(4636); -const path = __nccwpck_require__(1017); -const util_1 = __nccwpck_require__(3837); -const readFile = fs.readFile - ? (0, util_1.promisify)(fs.readFile) - : async () => { - // if running in the web-browser, fs.readFile may not have been shimmed. - throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); - }; -const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; -const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; -class ErrorWithCode extends Error { - constructor(message, code) { - super(message); - this.code = code; - } + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); } -let getPem; -class GoogleToken { - /** - * Create a GoogleToken. - * - * @param options Configuration object. - */ - constructor(options) { - this.transporter = { - request: opts => (0, gaxios_1.request)(opts), - }; - this.configure(options); - } - get accessToken() { - return this.rawToken ? this.rawToken.access_token : undefined; - } - get idToken() { - return this.rawToken ? this.rawToken.id_token : undefined; - } - get tokenType() { - return this.rawToken ? this.rawToken.token_type : undefined; - } - get refreshToken() { - return this.rawToken ? this.rawToken.refresh_token : undefined; - } - /** - * Returns whether the token has expired. - * - * @return true if the token has expired, false otherwise. - */ - hasExpired() { - const now = new Date().getTime(); - if (this.rawToken && this.expiresAt) { - return now >= this.expiresAt; - } - else { - return true; - } - } - /** - * Returns whether the token will expire within eagerRefreshThresholdMillis - * - * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. - */ - isTokenExpiring() { - var _a; - const now = new Date().getTime(); - const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; - if (this.rawToken && this.expiresAt) { - return this.expiresAt <= now + eagerRefreshThresholdMillis; - } - else { - return true; - } - } - getToken(callback, opts = {}) { - if (typeof callback === 'object') { - opts = callback; - callback = undefined; - } - opts = Object.assign({ - forceRefresh: false, - }, opts); - if (callback) { - const cb = callback; - this.getTokenAsync(opts).then(t => cb(null, t), callback); - return; - } - return this.getTokenAsync(opts); - } - /** - * Given a keyFile, extract the key and client email if available - * @param keyFile Path to a json, pem, or p12 file that contains the key. - * @returns an object with privateKey and clientEmail properties - */ - async getCredentials(keyFile) { - const ext = path.extname(keyFile); - switch (ext) { - case '.json': { - const key = await readFile(keyFile, 'utf8'); - const body = JSON.parse(key); - const privateKey = body.private_key; - const clientEmail = body.client_email; - if (!privateKey || !clientEmail) { - throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); - } - return { privateKey, clientEmail }; - } - case '.der': - case '.crt': - case '.pem': { - const privateKey = await readFile(keyFile, 'utf8'); - return { privateKey }; - } - case '.p12': - case '.pfx': { - // NOTE: The loading of `google-p12-pem` is deferred for performance - // reasons. The `node-forge` npm module in `google-p12-pem` adds a fair - // bit time to overall module loading, and is likely not frequently - // used. In a future release, p12 support will be entirely removed. - if (!getPem) { - getPem = (await Promise.resolve().then(() => __nccwpck_require__(2098))).getPem; - } - const privateKey = await getPem(keyFile); - return { privateKey }; - } - default: - throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + - 'Current supported extensions are *.json, *.pem, and *.p12.', 'UNKNOWN_CERTIFICATE_TYPE'); - } - } - async getTokenAsync(opts) { - if (this.inFlightRequest && !opts.forceRefresh) { - return this.inFlightRequest; - } - try { - return await (this.inFlightRequest = this.getTokenAsyncInner(opts)); - } - finally { - this.inFlightRequest = undefined; - } - } - async getTokenAsyncInner(opts) { - if (this.isTokenExpiring() === false && opts.forceRefresh === false) { - return Promise.resolve(this.rawToken); - } - if (!this.key && !this.keyFile) { - throw new Error('No key or keyFile set.'); - } - if (!this.key && this.keyFile) { - const creds = await this.getCredentials(this.keyFile); - this.key = creds.privateKey; - this.iss = creds.clientEmail || this.iss; - if (!creds.clientEmail) { - this.ensureEmail(); - } - } - return this.requestToken(); - } - ensureEmail() { - if (!this.iss) { - throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); - } - } - revokeToken(callback) { - if (callback) { - this.revokeTokenAsync().then(() => callback(), callback); - return; - } - return this.revokeTokenAsync(); - } - async revokeTokenAsync() { - if (!this.accessToken) { - throw new Error('No token to revoke.'); - } - const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; - await this.transporter.request({ url }); - this.configure({ - email: this.iss, - sub: this.sub, - key: this.key, - keyFile: this.keyFile, - scope: this.scope, - additionalClaims: this.additionalClaims, - }); - } - /** - * Configure the GoogleToken for re-use. - * @param {object} options Configuration object. - */ - configure(options = {}) { - this.keyFile = options.keyFile; - this.key = options.key; - this.rawToken = undefined; - this.iss = options.email || options.iss; - this.sub = options.sub; - this.additionalClaims = options.additionalClaims; - if (typeof options.scope === 'object') { - this.scope = options.scope.join(' '); - } - else { - this.scope = options.scope; - } - this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; - if (options.transporter) { - this.transporter = options.transporter; - } - } - /** - * Request the token from Google. - */ - async requestToken() { - var _a, _b; - const iat = Math.floor(new Date().getTime() / 1000); - const additionalClaims = this.additionalClaims || {}; - const payload = Object.assign({ - iss: this.iss, - scope: this.scope, - aud: GOOGLE_TOKEN_URL, - exp: iat + 3600, - iat, - sub: this.sub, - }, additionalClaims); - const signedJWT = jws.sign({ - header: { alg: 'RS256' }, - payload, - secret: this.key, - }); - try { - const r = await this.transporter.request({ - method: 'POST', - url: GOOGLE_TOKEN_URL, - data: { - grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', - assertion: signedJWT, - }, - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - responseType: 'json', - }); - this.rawToken = r.data; - this.expiresAt = - r.data.expires_in === null || r.data.expires_in === undefined - ? undefined - : (iat + r.data.expires_in) * 1000; - return this.rawToken; - } - catch (e) { - this.rawToken = undefined; - this.tokenExpires = undefined; - const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) - ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data - : {}; - if (body.error) { - const desc = body.error_description - ? `: ${body.error_description}` - : ''; - e.message = `${body.error}${desc}`; - } - throw e; - } +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; } -exports.GoogleToken = GoogleToken; -//# sourceMappingURL=index.js.map +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(1359); -/***/ }), + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. -/***/ 1621: -/***/ ((module) => { + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. -"use strict"; + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + const isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} -module.exports = (flag, argv = process.argv) => { - const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); - const position = argv.indexOf(prefix + flag); - const terminatorPosition = argv.indexOf('--'); - return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); }; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} - -/***/ }), - -/***/ 7492: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; }; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; +Writable.prototype.cork = function () { + this._writableState.corked++; }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const net_1 = __importDefault(__nccwpck_require__(1808)); -const tls_1 = __importDefault(__nccwpck_require__(4404)); -const url_1 = __importDefault(__nccwpck_require__(7310)); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const once_1 = __importDefault(__nccwpck_require__(1040)); -const agent_base_1 = __nccwpck_require__(9690); -const debug = (0, debug_1.default)('http-proxy-agent'); -function isHTTPS(protocol) { - return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; -} -/** - * The `HttpProxyAgent` implements an HTTP Agent subclass that connects - * to the specified "HTTP proxy server" in order to proxy HTTP requests. - * - * @api public - */ -class HttpProxyAgent extends agent_base_1.Agent { - constructor(_opts) { - let opts; - if (typeof _opts === 'string') { - opts = url_1.default.parse(_opts); - } - else { - opts = _opts; - } - if (!opts) { - throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); - } - debug('Creating new HttpProxyAgent instance: %o', opts); - super(opts); - const proxy = Object.assign({}, opts); - // If `true`, then connect to the proxy server over TLS. - // Defaults to `false`. - this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); - // Prefer `hostname` over `host`, and set the `port` if needed. - proxy.host = proxy.hostname || proxy.host; - if (typeof proxy.port === 'string') { - proxy.port = parseInt(proxy.port, 10); - } - if (!proxy.port && proxy.host) { - proxy.port = this.secureProxy ? 443 : 80; - } - if (proxy.host && proxy.path) { - // If both a `host` and `path` are specified then it's most likely - // the result of a `url.parse()` call... we need to remove the - // `path` portion so that `net.connect()` doesn't attempt to open - // that as a Unix socket file. - delete proxy.path; - delete proxy.pathname; - } - this.proxy = proxy; - } - /** - * Called when the node-core HTTP client library is creating a - * new HTTP request. - * - * @api protected - */ - callback(req, opts) { - return __awaiter(this, void 0, void 0, function* () { - const { proxy, secureProxy } = this; - const parsed = url_1.default.parse(req.path); - if (!parsed.protocol) { - parsed.protocol = 'http:'; - } - if (!parsed.hostname) { - parsed.hostname = opts.hostname || opts.host || null; - } - if (parsed.port == null && typeof opts.port) { - parsed.port = String(opts.port); - } - if (parsed.port === '80') { - // if port is 80, then we can remove the port so that the - // ":80" portion is not on the produced URL - parsed.port = ''; - } - // Change the `http.ClientRequest` instance's "path" field - // to the absolute path of the URL that will be requested. - req.path = url_1.default.format(parsed); - // Inject the `Proxy-Authorization` header if necessary. - if (proxy.auth) { - req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); - } - // Create a socket connection to the proxy server. - let socket; - if (secureProxy) { - debug('Creating `tls.Socket`: %o', proxy); - socket = tls_1.default.connect(proxy); - } - else { - debug('Creating `net.Socket`: %o', proxy); - socket = net_1.default.connect(proxy); - } - // At this point, the http ClientRequest's internal `_header` field - // might have already been set. If this is the case then we'll need - // to re-generate the string since we just changed the `req.path`. - if (req._header) { - let first; - let endOfHeaders; - debug('Regenerating stored HTTP header string for request'); - req._header = null; - req._implicitHeader(); - if (req.output && req.output.length > 0) { - // Node < 12 - debug('Patching connection write() output buffer with updated header'); - first = req.output[0]; - endOfHeaders = first.indexOf('\r\n\r\n') + 4; - req.output[0] = req._header + first.substring(endOfHeaders); - debug('Output buffer: %o', req.output); - } - else if (req.outputData && req.outputData.length > 0) { - // Node >= 12 - debug('Patching connection write() output buffer with updated header'); - first = req.outputData[0].data; - endOfHeaders = first.indexOf('\r\n\r\n') + 4; - req.outputData[0].data = - req._header + first.substring(endOfHeaders); - debug('Output buffer: %o', req.outputData[0].data); - } - } - // Wait for the socket's `connect` event, so that this `callback()` - // function throws instead of the `http` request machinery. This is - // important for i.e. `PacProxyAgent` which determines a failed proxy - // connection via the `callback()` function throwing. - yield (0, once_1.default)(socket, 'connect'); - return socket; - }); - } -} -exports["default"] = HttpProxyAgent; -//# sourceMappingURL=agent.js.map - -/***/ }), - -/***/ 3764: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { - -"use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } }; -const agent_1 = __importDefault(__nccwpck_require__(7492)); -function createHttpProxyAgent(opts) { - return new agent_1.default(opts); +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; } -(function (createHttpProxyAgent) { - createHttpProxyAgent.HttpProxyAgent = agent_1.default; - createHttpProxyAgent.prototype = agent_1.default.prototype; -})(createHttpProxyAgent || (createHttpProxyAgent = {})); -module.exports = createHttpProxyAgent; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 5098: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const net_1 = __importDefault(__nccwpck_require__(1808)); -const tls_1 = __importDefault(__nccwpck_require__(4404)); -const url_1 = __importDefault(__nccwpck_require__(7310)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const agent_base_1 = __nccwpck_require__(9690); -const parse_proxy_response_1 = __importDefault(__nccwpck_require__(595)); -const debug = debug_1.default('https-proxy-agent:agent'); -/** - * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to - * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. - * - * Outgoing HTTP requests are first tunneled through the proxy server using the - * `CONNECT` HTTP request method to establish a connection to the proxy server, - * and then the proxy server connects to the destination target and issues the - * HTTP request from the proxy server. - * - * `https:` requests have their socket connection upgraded to TLS once - * the connection to the proxy server has been established. - * - * @api public - */ -class HttpsProxyAgent extends agent_base_1.Agent { - constructor(_opts) { - let opts; - if (typeof _opts === 'string') { - opts = url_1.default.parse(_opts); - } - else { - opts = _opts; - } - if (!opts) { - throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); - } - debug('creating new HttpsProxyAgent instance: %o', opts); - super(opts); - const proxy = Object.assign({}, opts); - // If `true`, then connect to the proxy server over TLS. - // Defaults to `false`. - this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); - // Prefer `hostname` over `host`, and set the `port` if needed. - proxy.host = proxy.hostname || proxy.host; - if (typeof proxy.port === 'string') { - proxy.port = parseInt(proxy.port, 10); - } - if (!proxy.port && proxy.host) { - proxy.port = this.secureProxy ? 443 : 80; - } - // ALPN is supported by Node.js >= v5. - // attempt to negotiate http/1.1 for proxy servers that support http/2 - if (this.secureProxy && !('ALPNProtocols' in proxy)) { - proxy.ALPNProtocols = ['http 1.1']; - } - if (proxy.host && proxy.path) { - // If both a `host` and `path` are specified then it's most likely - // the result of a `url.parse()` call... we need to remove the - // `path` portion so that `net.connect()` doesn't attempt to open - // that as a Unix socket file. - delete proxy.path; - delete proxy.pathname; - } - this.proxy = proxy; +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; } - /** - * Called when the node-core HTTP client library is creating a - * new HTTP request. - * - * @api protected - */ - callback(req, opts) { - return __awaiter(this, void 0, void 0, function* () { - const { proxy, secureProxy } = this; - // Create a socket connection to the proxy server. - let socket; - if (secureProxy) { - debug('Creating `tls.Socket`: %o', proxy); - socket = tls_1.default.connect(proxy); - } - else { - debug('Creating `net.Socket`: %o', proxy); - socket = net_1.default.connect(proxy); - } - const headers = Object.assign({}, proxy.headers); - const hostname = `${opts.host}:${opts.port}`; - let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; - // Inject the `Proxy-Authorization` header if necessary. - if (proxy.auth) { - headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; - } - // The `Host` header should only include the port - // number when it is not the default port. - let { host, port, secureEndpoint } = opts; - if (!isDefaultPort(port, secureEndpoint)) { - host += `:${port}`; - } - headers.Host = host; - headers.Connection = 'close'; - for (const name of Object.keys(headers)) { - payload += `${name}: ${headers[name]}\r\n`; - } - const proxyResponsePromise = parse_proxy_response_1.default(socket); - socket.write(`${payload}\r\n`); - const { statusCode, buffered } = yield proxyResponsePromise; - if (statusCode === 200) { - req.once('socket', resume); - if (opts.secureEndpoint) { - // The proxy is connecting to a TLS server, so upgrade - // this socket connection to a TLS connection. - debug('Upgrading socket connection to TLS'); - const servername = opts.servername || opts.host; - return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, - servername })); - } - return socket; - } - // Some other status code that's not 200... need to re-play the HTTP - // header "data" events onto the socket once the HTTP machinery is - // attached so that the node core `http` can parse and handle the - // error status code. - // Close the original socket, and a new "fake" socket is returned - // instead, so that the proxy doesn't get the HTTP request - // written to it (which may contain `Authorization` headers or other - // sensitive data). - // - // See: https://hackerone.com/reports/541502 - socket.destroy(); - const fakeSocket = new net_1.default.Socket({ writable: false }); - fakeSocket.readable = true; - // Need to wait for the "socket" event to re-play the "data" events. - req.once('socket', (s) => { - debug('replaying proxy buffer for failed request'); - assert_1.default(s.listenerCount('data') > 0); - // Replay the "buffered" Buffer onto the fake `socket`, since at - // this point the HTTP module machinery has been hooked up for - // the user. - s.push(buffered); - s.push(null); - }); - return fakeSocket; - }); + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk, + encoding, + isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; } -exports["default"] = HttpsProxyAgent; -function resume(socket) { - socket.resume(); +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; } -function isDefaultPort(port, secure) { - return Boolean((!secure && port === 80) || (secure && port === 443)); +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } } -function isHTTPS(protocol) { - return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; } -function omit(obj, ...keys) { - const ret = {}; - let key; - for (key in obj) { - if (!keys.includes(key)) { - ret[key] = obj[key]; - } +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); } - return ret; + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); } -//# sourceMappingURL=agent.js.map - -/***/ }), - -/***/ 7219: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { - -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -const agent_1 = __importDefault(__nccwpck_require__(5098)); -function createHttpsProxyAgent(opts) { - return new agent_1.default(opts); +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } } -(function (createHttpsProxyAgent) { - createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; - createHttpsProxyAgent.prototype = agent_1.default.prototype; -})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); -module.exports = createHttpsProxyAgent; -//# sourceMappingURL=index.js.map -/***/ }), +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); -/***/ 595: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); -"use strict"; + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); -function parseProxyResponse(socket) { - return new Promise((resolve, reject) => { - // we need to buffer any HTTP traffic that happens with the proxy before we get - // the CONNECT response, so that if the response is anything other than an "200" - // response code, then we can re-play the "data" events on the socket once the - // HTTP parser is hooked up... - let buffersLength = 0; - const buffers = []; - function read() { - const b = socket.read(); - if (b) - ondata(b); - else - socket.once('readable', read); - } - function cleanup() { - socket.removeListener('end', onend); - socket.removeListener('error', onerror); - socket.removeListener('close', onclose); - socket.removeListener('readable', read); - } - function onclose(err) { - debug('onclose had error %o', err); - } - function onend() { - debug('onend'); - } - function onerror(err) { - cleanup(); - debug('onerror %o', err); - reject(err); - } - function ondata(b) { - buffers.push(b); - buffersLength += b.length; - const buffered = Buffer.concat(buffers, buffersLength); - const endOfHeaders = buffered.indexOf('\r\n\r\n'); - if (endOfHeaders === -1) { - // keep buffering - debug('have not received end of HTTP headers yet...'); - read(); - return; - } - const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); - const statusCode = +firstLine.split(' ')[1]; - debug('got proxy server response: %o', firstLine); - resolve({ - statusCode, - buffered - }); - } - socket.on('error', onerror); - socket.on('close', onclose); - socket.on('end', onend); - read(); - }); +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; } -exports["default"] = parseProxyResponse; -//# sourceMappingURL=parse-proxy-response.js.map - -/***/ }), - -/***/ 4124: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -try { - var util = __nccwpck_require__(3837); - /* istanbul ignore next */ - if (typeof util.inherits !== 'function') throw ''; - module.exports = util.inherits; -} catch (e) { - /* istanbul ignore next */ - module.exports = __nccwpck_require__(8544); +function callFinal(stream, state) { + stream._final(err => { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); } - - -/***/ }), - -/***/ 8544: -/***/ ((module) => { - -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }) +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); } - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + const rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } } } + return need; } - - -/***/ }), - -/***/ 3287: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; } - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; } - // Most likely a plain Object - return true; + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; } - -exports.isPlainObject = isPlainObject; - - -/***/ }), - -/***/ 1554: -/***/ ((module) => { - -"use strict"; - - -const isStream = stream => - stream !== null && - typeof stream === 'object' && - typeof stream.pipe === 'function'; - -isStream.writable = stream => - isStream(stream) && - stream.writable !== false && - typeof stream._write === 'function' && - typeof stream._writableState === 'object'; - -isStream.readable = stream => - isStream(stream) && - stream.readable !== false && - typeof stream._read === 'function' && - typeof stream._readableState === 'object'; - -isStream.duplex = stream => - isStream.writable(stream) && - isStream.readable(stream); - -isStream.transform = stream => - isStream.duplex(stream) && - typeof stream._transform === 'function'; - -module.exports = isStream; - - -/***/ }), - -/***/ 5031: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var json_stringify = (__nccwpck_require__(8574).stringify); -var json_parse = __nccwpck_require__(9099); - -module.exports = function(options) { - return { - parse: json_parse(options), - stringify: json_stringify +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; } -}; -//create the default method members with no options applied for backwards compatibility -module.exports.parse = json_parse(); -module.exports.stringify = json_stringify; - - -/***/ }), - -/***/ 9099: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var BigNumber = null; - -// regexpxs extracted from -// (c) BSD-3-Clause -// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors - -const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; -const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; - -/* - json_parse.js - 2012-06-20 - - Public Domain. - - NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. - - This file creates a json_parse function. - During create you can (optionally) specify some behavioural switches - - require('json-bigint')(options) - - The optional options parameter holds switches that drive certain - aspects of the parsing process: - * options.strict = true will warn about duplicate-key usage in the json. - The default (strict = false) will silently ignore those and overwrite - values for keys that are in duplicate use. - - The resulting function follows this signature: - json_parse(text, reviver) - This method parses a JSON text to produce an object or array. - It can throw a SyntaxError exception. - - The optional reviver parameter is a function that can filter and - transform the results. It receives each of the keys and values, - and its return value is used instead of the original value. - If it returns what it received, then the structure is not modified. - If it returns undefined then the member is deleted. - - Example: - - // Parse the text. Values that look like ISO date strings will - // be converted to Date objects. - - myData = json_parse(text, function (key, value) { - var a; - if (typeof value === 'string') { - a = -/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); - if (a) { - return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], - +a[5], +a[6])); - } - } - return value; - }); - - This is a reference implementation. You are free to copy, modify, or - redistribute. - - This code should be minified before deployment. - See http://javascript.crockford.com/jsmin.html - - USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO - NOT CONTROL. -*/ -/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, - hasOwnProperty, message, n, name, prototype, push, r, t, text -*/ + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; -var json_parse = function (options) { - 'use strict'; +/***/ }), - // This is a function that can parse a JSON text, producing a JavaScript - // data structure. It is a simple, recursive descent parser. It does not use - // eval or regular expressions, so it can be used as a model for implementing - // a JSON parser in other languages. +/***/ 3306: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // We are defining the function inside of another function to avoid creating - // global variables. +"use strict"; - // Default options one can override by passing options to the parse() - var _options = { - strict: false, // not being strict means do not generate syntax errors for "duplicate key" - storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string - alwaysParseAsBig: false, // toggles whether all numbers should be Big - useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js - protoAction: 'error', - constructorAction: 'error', - }; - // If there are options, then use them to override the default _options - if (options !== undefined && options !== null) { - if (options.strict === true) { - _options.strict = true; - } - if (options.storeAsString === true) { - _options.storeAsString = true; +const finished = __nccwpck_require__(6080); +const kLastResolve = Symbol('lastResolve'); +const kLastReject = Symbol('lastReject'); +const kError = Symbol('error'); +const kEnded = Symbol('ended'); +const kLastPromise = Symbol('lastPromise'); +const kHandlePromise = Symbol('handlePromise'); +const kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value, + done + }; +} +function readAndResolve(iter) { + const resolve = iter[kLastResolve]; + if (resolve !== null) { + const data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); } - _options.alwaysParseAsBig = - options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; - _options.useNativeBigInt = - options.useNativeBigInt === true ? options.useNativeBigInt : false; - - if (typeof options.constructorAction !== 'undefined') { - if ( - options.constructorAction === 'error' || - options.constructorAction === 'ignore' || - options.constructorAction === 'preserve' - ) { - _options.constructorAction = options.constructorAction; - } else { - throw new Error( - `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` - ); + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return (resolve, reject) => { + lastPromise.then(() => { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +const AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({ + get stream() { + return this[kStream]; + }, + next() { + // if we have detected an error in the meanwhile + // reject straight away + const error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise((resolve, reject) => { + process.nextTick(() => { + if (this[kError]) { + reject(this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); } - if (typeof options.protoAction !== 'undefined') { - if ( - options.protoAction === 'error' || - options.protoAction === 'ignore' || - options.protoAction === 'preserve' - ) { - _options.protoAction = options.protoAction; - } else { - throw new Error( - `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` - ); + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + const lastPromise = this[kLastPromise]; + let promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + const data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); } + promise = new Promise(this[kHandlePromise]); } + this[kLastPromise] = promise; + return promise; + }, + [Symbol.asyncIterator]() { + return this; + }, + return() { + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise((resolve, reject) => { + this[kStream].destroy(null, err => { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); } - - var at, // The index of the current character - ch, // The current character - escapee = { - '"': '"', - '\\': '\\', - '/': '/', - b: '\b', - f: '\f', - n: '\n', - r: '\r', - t: '\t', +}, AsyncIteratorPrototype); +const createReadableStreamAsyncIterator = stream => { + const iterator = Object.create(ReadableStreamAsyncIteratorPrototype, { + [kStream]: { + value: stream, + writable: true }, - text, - error = function (m) { - // Call error when something is wrong. - - throw { - name: 'SyntaxError', - message: m, - at: at, - text: text, - }; + [kLastResolve]: { + value: null, + writable: true }, - next = function (c) { - // If a c parameter is provided, verify that it matches the current character. - - if (c && c !== ch) { - error("Expected '" + c + "' instead of '" + ch + "'"); - } - - // Get the next character. When there are no more characters, - // return the empty string. - - ch = text.charAt(at); - at += 1; - return ch; + [kLastReject]: { + value: null, + writable: true }, - number = function () { - // Parse a number value. - - var number, - string = ''; - - if (ch === '-') { - string = '-'; - next('-'); - } - while (ch >= '0' && ch <= '9') { - string += ch; - next(); - } - if (ch === '.') { - string += '.'; - while (next() && ch >= '0' && ch <= '9') { - string += ch; - } - } - if (ch === 'e' || ch === 'E') { - string += ch; - next(); - if (ch === '-' || ch === '+') { - string += ch; - next(); - } - while (ch >= '0' && ch <= '9') { - string += ch; - next(); + [kError]: { + value: null, + writable: true + }, + [kEnded]: { + value: stream._readableState.endEmitted, + writable: true + }, + // the function passed to new Promise + // is cached so we avoid allocating a new + // closure at every run + [kHandlePromise]: { + value: (resolve, reject) => { + const data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; } + }, + writable: true + } + }); + iterator[kLastPromise] = null; + finished(stream, err => { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + const reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); } - number = +string; - if (!isFinite(number)) { - error('Bad number'); - } else { - if (BigNumber == null) BigNumber = __nccwpck_require__(7558); - //if (number > 9007199254740992 || number < -9007199254740992) - // Bignumber has stricter check: everything with length > 15 digits disallowed - if (string.length > 15) - return _options.storeAsString - ? string - : _options.useNativeBigInt - ? BigInt(string) - : new BigNumber(string); - else - return !_options.alwaysParseAsBig - ? number - : _options.useNativeBigInt - ? BigInt(number) - : new BigNumber(number); - } - }, - string = function () { - // Parse a string value. + iterator[kError] = err; + return; + } + const resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; - var hex, - i, - string = '', - uffff; +/***/ }), - // When parsing for string values, we must look for " and \ characters. +/***/ 6522: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (ch === '"') { - var startAt = at; - while (next()) { - if (ch === '"') { - if (at - 1 > startAt) string += text.substring(startAt, at - 1); - next(); - return string; - } - if (ch === '\\') { - if (at - 1 > startAt) string += text.substring(startAt, at - 1); - next(); - if (ch === 'u') { - uffff = 0; - for (i = 0; i < 4; i += 1) { - hex = parseInt(next(), 16); - if (!isFinite(hex)) { - break; - } - uffff = uffff * 16 + hex; - } - string += String.fromCharCode(uffff); - } else if (typeof escapee[ch] === 'string') { - string += escapee[ch]; - } else { - break; - } - startAt = at; - } - } - } - error('Bad string'); - }, - white = function () { - // Skip whitespace. +"use strict"; - while (ch && ch <= ' ') { - next(); - } - }, - word = function () { - // true, false, or null. - switch (ch) { - case 't': - next('t'); - next('r'); - next('u'); - next('e'); - return true; - case 'f': - next('f'); - next('a'); - next('l'); - next('s'); - next('e'); - return false; - case 'n': - next('n'); - next('u'); - next('l'); - next('l'); - return null; - } - error("Unexpected '" + ch + "'"); - }, - value, // Place holder for the value function. - array = function () { - // Parse an array value. +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +const _require = __nccwpck_require__(4300), + Buffer = _require.Buffer; +const _require2 = __nccwpck_require__(3837), + inspect = _require2.inspect; +const custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = class BufferList { + constructor() { + this.head = null; + this.tail = null; + this.length = 0; + } + push(v) { + const entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + unshift(v) { + const entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + shift() { + if (this.length === 0) return; + const ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + clear() { + this.head = this.tail = null; + this.length = 0; + } + join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + concat(n) { + if (this.length === 0) return Buffer.alloc(0); + const ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } - var array = []; + // Consumes a specified amount of bytes or characters from the buffered data. + consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + first() { + return this.head.data; + } - if (ch === '[') { - next('['); - white(); - if (ch === ']') { - next(']'); - return array; // empty array + // Consumes a specified amount of characters from the buffered data. + _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + const str = p.data; + const nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); } - while (ch) { - array.push(value()); - white(); - if (ch === ']') { - next(']'); - return array; - } - next(','); - white(); + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + _getBuffer(n) { + const ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + const buf = p.data; + const nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); } + break; } - error('Bad array'); - }, - object = function () { - // Parse an object value. + ++c; + } + this.length -= c; + return ret; + } - var key, - object = Object.create(null); + // Make sure the linked list only shows the minimal necessary information. + [custom](_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } +}; - if (ch === '{') { - next('{'); - white(); - if (ch === '}') { - next('}'); - return object; // empty object - } - while (ch) { - key = string(); - white(); - next(':'); - if ( - _options.strict === true && - Object.hasOwnProperty.call(object, key) - ) { - error('Duplicate key "' + key + '"'); - } +/***/ }), - if (suspectProtoRx.test(key) === true) { - if (_options.protoAction === 'error') { - error('Object contains forbidden prototype property'); - } else if (_options.protoAction === 'ignore') { - value(); - } else { - object[key] = value(); - } - } else if (suspectConstructorRx.test(key) === true) { - if (_options.constructorAction === 'error') { - error('Object contains forbidden constructor property'); - } else if (_options.constructorAction === 'ignore') { - value(); - } else { - object[key] = value(); - } - } else { - object[key] = value(); - } +/***/ 7049: +/***/ ((module) => { - white(); - if (ch === '}') { - next('}'); - return object; - } - next(','); - white(); - } - } - error('Bad object'); - }; +"use strict"; - value = function () { - // Parse a JSON value. It could be an object, an array, a string, a number, - // or a word. - white(); - switch (ch) { - case '{': - return object(); - case '[': - return array(); - case '"': - return string(); - case '-': - return number(); - default: - return ch >= '0' && ch <= '9' ? number() : word(); +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + const readableDestroyed = this._readableState && this._readableState.destroyed; + const writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } } - }; + return this; + } - // Return the json_parse function. It will have access to all of the above - // functions and variables. + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks - return function (source, reviver) { - var result; + if (this._readableState) { + this._readableState.destroyed = true; + } - text = source + ''; - at = 0; - ch = ' '; - result = value(); - white(); - if (ch) { - error('Syntax error'); + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, err => { + if (!cb && err) { + if (!this._writableState) { + process.nextTick(emitErrorAndCloseNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, this, err); + } else { + process.nextTick(emitCloseNT, this); + } + } else if (cb) { + process.nextTick(emitCloseNT, this); + cb(err); + } else { + process.nextTick(emitCloseNT, this); } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. - // If there is a reviver function, we recursively walk the new structure, - // passing each name/value pair to the reviver function for possible - // transformation, starting with a temporary root object that holds the result - // in an empty key. If there is not a reviver function, we simply return the - // result. - - return typeof reviver === 'function' - ? (function walk(holder, key) { - var k, - v, - value = holder[key]; - if (value && typeof value === 'object') { - Object.keys(value).forEach(function (k) { - v = walk(value, k); - if (v !== undefined) { - value[k] = v; - } else { - delete value[k]; - } - }); - } - return reviver.call(holder, key, value); - })({ '': result }, '') - : result; - }; + const rState = stream._readableState; + const wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy, + undestroy, + errorOrDestroy }; -module.exports = json_parse; - - /***/ }), -/***/ 8574: +/***/ 6080: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var BigNumber = __nccwpck_require__(7558); - -/* - json2.js - 2013-05-26 - - Public Domain. - - NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. - - See http://www.JSON.org/js.html - - - This code should be minified before deployment. - See http://javascript.crockford.com/jsmin.html +"use strict"; +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). - USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO - NOT CONTROL. - This file creates a global JSON object containing two methods: stringify - and parse. +const ERR_STREAM_PREMATURE_CLOSE = (__nccwpck_require__(7214)/* .codes.ERR_STREAM_PREMATURE_CLOSE */ .q.ERR_STREAM_PREMATURE_CLOSE); +function once(callback) { + let called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + let readable = opts.readable || opts.readable !== false && stream.readable; + let writable = opts.writable || opts.writable !== false && stream.writable; + const onlegacyfinish = () => { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + const onfinish = () => { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + const onend = () => { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + const onerror = err => { + callback.call(stream, err); + }; + const onclose = () => { + let err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + const onrequest = () => { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; - JSON.stringify(value, replacer, space) - value any JavaScript value, usually an object or array. +/***/ }), - replacer an optional parameter that determines how object - values are stringified for objects. It can be a - function or an array of strings. +/***/ 9082: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - space an optional parameter that specifies the indentation - of nested structures. If it is omitted, the text will - be packed without extra whitespace. If it is a number, - it will specify the number of spaces to indent at each - level. If it is a string (such as '\t' or ' '), - it contains the characters used to indent at each level. +"use strict"; - This method produces a JSON text from a JavaScript value. - When an object value is found, if the object contains a toJSON - method, its toJSON method will be called and the result will be - stringified. A toJSON method does not serialize: it returns the - value represented by the name/value pair that should be serialized, - or undefined if nothing should be serialized. The toJSON method - will be passed the key associated with the value, and this will be - bound to the value +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +const ERR_INVALID_ARG_TYPE = (__nccwpck_require__(7214)/* .codes.ERR_INVALID_ARG_TYPE */ .q.ERR_INVALID_ARG_TYPE); +function from(Readable, iterable, opts) { + let iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + const readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + let reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + const _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; - For example, this would serialize Dates as ISO strings. +/***/ }), - Date.prototype.toJSON = function (key) { - function f(n) { - // Format integers to have at least two digits. - return n < 10 ? '0' + n : n; - } +/***/ 6989: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return this.getUTCFullYear() + '-' + - f(this.getUTCMonth() + 1) + '-' + - f(this.getUTCDate()) + 'T' + - f(this.getUTCHours()) + ':' + - f(this.getUTCMinutes()) + ':' + - f(this.getUTCSeconds()) + 'Z'; - }; +"use strict"; +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). - You can provide an optional replacer method. It will be passed the - key and value of each member, with this bound to the containing - object. The value that is returned from your method will be - serialized. If your method returns undefined, then the member will - be excluded from the serialization. - If the replacer parameter is an array of strings, then it will be - used to select the members to be serialized. It filters the results - such that only members with keys listed in the replacer array are - stringified. - Values that do not have JSON representations, such as undefined or - functions, will not be serialized. Such values in objects will be - dropped; in arrays they will be replaced with null. You can use - a replacer function to replace those with JSON values. - JSON.stringify(undefined) returns undefined. +let eos; +function once(callback) { + let called = false; + return function () { + if (called) return; + called = true; + callback(...arguments); + }; +} +const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + let closed = false; + stream.on('close', () => { + closed = true; + }); + if (eos === undefined) eos = __nccwpck_require__(6080); + eos(stream, { + readable: reading, + writable: writing + }, err => { + if (err) return callback(err); + closed = true; + callback(); + }); + let destroyed = false; + return err => { + if (closed) return; + if (destroyed) return; + destroyed = true; - The optional space parameter produces a stringification of the - value that is filled with line breaks and indentation to make it - easier to read. + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + const callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + let error; + const destroys = streams.map(function (stream, i) { + const reading = i < streams.length - 1; + const writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; - If the space parameter is a non-empty string, then that string will - be used for indentation. If the space parameter is a number, then - the indentation will be that many spaces. +/***/ }), - Example: +/***/ 9948: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - text = JSON.stringify(['e', {pluribus: 'unum'}]); - // text is '["e",{"pluribus":"unum"}]' +"use strict"; - text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); - // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' +const ERR_INVALID_OPT_VALUE = (__nccwpck_require__(7214)/* .codes.ERR_INVALID_OPT_VALUE */ .q.ERR_INVALID_OPT_VALUE); +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + const name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } - text = JSON.stringify([new Date()], function (key, value) { - return this[key] instanceof Date ? - 'Date(' + this[key] + ')' : value; - }); - // text is '["Date(---current time---)"]' + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark +}; +/***/ }), - JSON.parse(text, reviver) - This method parses a JSON text to produce an object or array. - It can throw a SyntaxError exception. +/***/ 2387: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - The optional reviver parameter is a function that can filter and - transform the results. It receives each of the keys and values, - and its return value is used instead of the original value. - If it returns what it received, then the structure is not modified. - If it returns undefined then the member is deleted. +module.exports = __nccwpck_require__(2781); - Example: - // Parse the text. Values that look like ISO date strings will - // be converted to Date objects. +/***/ }), - myData = JSON.parse(text, function (key, value) { - var a; - if (typeof value === 'string') { - a = -/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); - if (a) { - return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], - +a[5], +a[6])); - } - } - return value; - }); +/***/ 1642: +/***/ ((module, exports, __nccwpck_require__) => { - myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { - var d; - if (typeof value === 'string' && - value.slice(0, 5) === 'Date(' && - value.slice(-1) === ')') { - d = new Date(value.slice(5, -1)); - if (d) { - return d; - } - } - return value; - }); +var Stream = __nccwpck_require__(2781); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = __nccwpck_require__(1433); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(6993); + exports.Duplex = __nccwpck_require__(1359); + exports.Transform = __nccwpck_require__(4415); + exports.PassThrough = __nccwpck_require__(1542); + exports.finished = __nccwpck_require__(6080); + exports.pipeline = __nccwpck_require__(6989); +} - This is a reference implementation. You are free to copy, modify, or - redistribute. -*/ +/***/ }), -/*jslint evil: true, regexp: true */ +/***/ 3515: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, - call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, - getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, - lastIndex, length, parse, prototype, push, replace, slice, stringify, - test, toJSON, toString, valueOf -*/ +"use strict"; -// Create a JSON object only if one does not already exist. We create the -// methods in a closure to avoid creating global variables. +const {PassThrough} = __nccwpck_require__(2781); +const extend = __nccwpck_require__(8171); -var JSON = module.exports; +let debug = () => {}; +if ( + typeof process !== 'undefined' && + 'env' in process && + typeof process.env === 'object' && + process.env.DEBUG === 'retry-request' +) { + debug = message => { + console.log('retry-request:', message); + }; +} -(function () { - 'use strict'; +const DEFAULTS = { + objectMode: false, + retries: 2, - function f(n) { - // Format integers to have at least two digits. - return n < 10 ? '0' + n : n; - } + /* + The maximum time to delay in seconds. If retryDelayMultiplier results in a + delay greater than maxRetryDelay, retries should delay by maxRetryDelay + seconds instead. + */ + maxRetryDelay: 64, - var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, - escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, - gap, - indent, - meta = { // table of character substitutions - '\b': '\\b', - '\t': '\\t', - '\n': '\\n', - '\f': '\\f', - '\r': '\\r', - '"' : '\\"', - '\\': '\\\\' - }, - rep; + /* + The multiplier by which to increase the delay time between the completion of + failed requests, and the initiation of the subsequent retrying request. + */ + retryDelayMultiplier: 2, + /* + The length of time to keep retrying in seconds. The last sleep period will + be shortened as necessary, so that the last retry runs at deadline (and not + considerably beyond it). The total time starting from when the initial + request is sent, after which an error will be returned, regardless of the + retrying attempts made meanwhile. + */ + totalTimeout: 600, - function quote(string) { + noResponseRetries: 2, + currentRetryAttempt: 0, + shouldRetryFn: function (response) { + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; -// If the string contains no control characters, no quote characters, and no -// backslash characters, then we can safely slap some quotes around it. -// Otherwise we must also replace the offending characters with safe escape -// sequences. + const statusCode = response.statusCode; + debug(`Response status: ${statusCode}`); - escapable.lastIndex = 0; - return escapable.test(string) ? '"' + string.replace(escapable, function (a) { - var c = meta[a]; - return typeof c === 'string' - ? c - : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); - }) + '"' : '"' + string + '"'; + let range; + while ((range = retryRanges.shift())) { + if (statusCode >= range[0] && statusCode <= range[1]) { + // Not a successful status or redirect. + return true; + } } + }, +}; +function retryRequest(requestOpts, opts, callback) { + if (typeof requestOpts === 'string') { + requestOpts = {url: requestOpts}; + } - function str(key, holder) { - -// Produce a string from holder[key]. - - var i, // The loop counter. - k, // The member key. - v, // The member value. - length, - mind = gap, - partial, - value = holder[key], - isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); - -// If the value has a toJSON method, call it to obtain a replacement value. - - if (value && typeof value === 'object' && - typeof value.toJSON === 'function') { - value = value.toJSON(key); - } + const streamMode = typeof arguments[arguments.length - 1] !== 'function'; -// If we were called with a replacer function, then call the replacer to -// obtain a replacement value. + if (typeof opts === 'function') { + callback = opts; + } - if (typeof rep === 'function') { - value = rep.call(holder, key, value); - } + const manualCurrentRetryAttemptWasSet = + opts && typeof opts.currentRetryAttempt === 'number'; + opts = extend({}, DEFAULTS, opts); -// What happens next depends on the value's type. + if (typeof opts.request === 'undefined') { + throw new Error('A request library must be provided to retry-request.'); + } - switch (typeof value) { - case 'string': - if (isBigNumber) { - return value; - } else { - return quote(value); - } + let currentRetryAttempt = opts.currentRetryAttempt; - case 'number': + let numNoResponseAttempts = 0; + let streamResponseHandled = false; -// JSON numbers must be finite. Encode non-finite numbers as null. + let retryStream; + let requestStream; + let delayStream; - return isFinite(value) ? String(value) : 'null'; + let activeRequest; + const retryRequest = { + abort: function () { + if (activeRequest && activeRequest.abort) { + activeRequest.abort(); + } + }, + }; - case 'boolean': - case 'null': - case 'bigint': + if (streamMode) { + retryStream = new PassThrough({objectMode: opts.objectMode}); + retryStream.abort = resetStreams; + } -// If the value is a boolean or null, convert it to a string. Note: -// typeof null does not produce 'null'. The case is included here in -// the remote chance that this gets fixed someday. + const timeOfFirstRequest = Date.now(); + if (currentRetryAttempt > 0) { + retryAfterDelay(currentRetryAttempt); + } else { + makeRequest(); + } - return String(value); + if (streamMode) { + return retryStream; + } else { + return retryRequest; + } -// If the type is 'object', we might be dealing with an object or an array or -// null. + function resetStreams() { + delayStream = null; - case 'object': + if (requestStream) { + requestStream.abort && requestStream.abort(); + requestStream.cancel && requestStream.cancel(); -// Due to a specification blunder in ECMAScript, typeof null is 'object', -// so watch out for that case. + if (requestStream.destroy) { + requestStream.destroy(); + } else if (requestStream.end) { + requestStream.end(); + } + } + } - if (!value) { - return 'null'; - } + function makeRequest() { + let finishHandled = false; + currentRetryAttempt++; + debug(`Current retry attempt: ${currentRetryAttempt}`); -// Make an array to hold the partial results of stringifying this object value. + function handleFinish(args = []) { + if (!finishHandled) { + finishHandled = true; + retryStream.emit('complete', ...args); + } + } - gap += indent; - partial = []; + if (streamMode) { + streamResponseHandled = false; -// Is the value an array? + delayStream = new PassThrough({objectMode: opts.objectMode}); + requestStream = opts.request(requestOpts); - if (Object.prototype.toString.apply(value) === '[object Array]') { + setImmediate(() => { + retryStream.emit('request'); + }); -// The value is an array. Stringify every element. Use null as a placeholder -// for non-JSON values. + requestStream + // gRPC via google-cloud-node can emit an `error` as well as a `response` + // Whichever it emits, we run with-- we can't run with both. That's what + // is up with the `streamResponseHandled` tracking. + .on('error', err => { + if (streamResponseHandled) { + return; + } - length = value.length; - for (i = 0; i < length; i += 1) { - partial[i] = str(i, value) || 'null'; - } + streamResponseHandled = true; + onResponse(err); + }) + .on('response', (resp, body) => { + if (streamResponseHandled) { + return; + } -// Join all of the elements together, separated with commas, and wrap them in -// brackets. + streamResponseHandled = true; + onResponse(null, resp, body); + }) + .on('complete', (...params) => handleFinish(params)) + .on('finish', (...params) => handleFinish(params)); - v = partial.length === 0 - ? '[]' - : gap - ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' - : '[' + partial.join(',') + ']'; - gap = mind; - return v; - } + requestStream.pipe(delayStream); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } + } -// If the replacer is an array, use it to select the members to be stringified. + function retryAfterDelay(currentRetryAttempt) { + if (streamMode) { + resetStreams(); + } - if (rep && typeof rep === 'object') { - length = rep.length; - for (i = 0; i < length; i += 1) { - if (typeof rep[i] === 'string') { - k = rep[i]; - v = str(k, value); - if (v) { - partial.push(quote(k) + (gap ? ': ' : ':') + v); - } - } - } - } else { + const nextRetryDelay = getNextRetryDelay({ + maxRetryDelay: opts.maxRetryDelay, + retryDelayMultiplier: opts.retryDelayMultiplier, + retryNumber: currentRetryAttempt, + timeOfFirstRequest, + totalTimeout: opts.totalTimeout, + }); + debug(`Next retry delay: ${nextRetryDelay}`); -// Otherwise, iterate through all of the keys in the object. + if (nextRetryDelay <= 0) { + numNoResponseAttempts = opts.noResponseRetries + 1; + return; + } - Object.keys(value).forEach(function(k) { - var v = str(k, value); - if (v) { - partial.push(quote(k) + (gap ? ': ' : ':') + v); - } - }); - } + setTimeout(makeRequest, nextRetryDelay); + } -// Join all of the member texts together, separated with commas, -// and wrap them in braces. + function onResponse(err, response, body) { + // An error such as DNS resolution. + if (err) { + numNoResponseAttempts++; - v = partial.length === 0 - ? '{}' - : gap - ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' - : '{' + partial.join(',') + '}'; - gap = mind; - return v; + if (numNoResponseAttempts <= opts.noResponseRetries) { + retryAfterDelay(numNoResponseAttempts); + } else { + if (streamMode) { + retryStream.emit('error', err); + retryStream.end(); + } else { + callback(err, response, body); } - } - -// If the JSON object does not yet have a stringify method, give it one. - - if (typeof JSON.stringify !== 'function') { - JSON.stringify = function (value, replacer, space) { + } -// The stringify method takes a value and an optional replacer, and an optional -// space parameter, and returns a JSON text. The replacer can be a function -// that can replace values, or an array of strings that will select the keys. -// A default replacer method can be provided. Use of the space parameter can -// produce text that is more easily readable. + return; + } - var i; - gap = ''; - indent = ''; + // Send the response to see if we should try again. + // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts + // the very first request sent as the first "retry". It is only accurate + // when a user provides their own "currentRetryAttempt" option at + // instantiation. + const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet + ? currentRetryAttempt + : currentRetryAttempt - 1; + if ( + adjustedCurrentRetryAttempt < opts.retries && + opts.shouldRetryFn(response) + ) { + retryAfterDelay(currentRetryAttempt); + return; + } -// If the space parameter is a number, make an indent string containing that -// many spaces. + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); + delayStream.pipe(retryStream); + requestStream.on('error', err => { + retryStream.destroy(err); + }); + } else { + callback(err, response, body); + } + } +} - if (typeof space === 'number') { - for (i = 0; i < space; i += 1) { - indent += ' '; - } +module.exports = retryRequest; -// If the space parameter is a string, it will be used as the indent string. +function getNextRetryDelay(config) { + const { + maxRetryDelay, + retryDelayMultiplier, + retryNumber, + timeOfFirstRequest, + totalTimeout, + } = config; - } else if (typeof space === 'string') { - indent = space; - } + const maxRetryDelayMs = maxRetryDelay * 1000; + const totalTimeoutMs = totalTimeout * 1000; -// If there is a replacer, it must be a function or an array. -// Otherwise, throw an error. + const jitter = Math.floor(Math.random() * 1000); + const calculatedNextRetryDelay = + Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; - rep = replacer; - if (replacer && typeof replacer !== 'function' && - (typeof replacer !== 'object' || - typeof replacer.length !== 'number')) { - throw new Error('JSON.stringify'); - } + const maxAllowableDelayMs = + totalTimeoutMs - (Date.now() - timeOfFirstRequest); -// Make a fake root object containing our value under the key of ''. -// Return the result of stringifying the value. + return Math.min( + calculatedNextRetryDelay, + maxAllowableDelayMs, + maxRetryDelayMs + ); +} - return str('', {'': value}); - }; - } -}()); +module.exports.defaults = DEFAULTS; +module.exports.getNextRetryDelay = getNextRetryDelay; /***/ }), -/***/ 6010: +/***/ 1604: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var bufferEqual = __nccwpck_require__(9239); -var Buffer = (__nccwpck_require__(1867).Buffer); -var crypto = __nccwpck_require__(6113); -var formatEcdsa = __nccwpck_require__(1728); -var util = __nccwpck_require__(3837); - -var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' -var MSG_INVALID_SECRET = 'secret must be a string or buffer'; -var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; -var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; - -var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; -if (supportsKeyObjects) { - MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; - MSG_INVALID_SECRET += 'or a KeyObject'; -} +module.exports = __nccwpck_require__(6244); -function checkIsPublicKey(key) { - if (Buffer.isBuffer(key)) { - return; - } +/***/ }), - if (typeof key === 'string') { - return; - } +/***/ 6244: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (!supportsKeyObjects) { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } +var RetryOperation = __nccwpck_require__(5369); - if (typeof key !== 'object') { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } +exports.operation = function(options) { + var timeouts = exports.timeouts(options); + return new RetryOperation(timeouts, { + forever: options && (options.forever || options.retries === Infinity), + unref: options && options.unref, + maxRetryTime: options && options.maxRetryTime + }); +}; - if (typeof key.type !== 'string') { - throw typeError(MSG_INVALID_VERIFIER_KEY); +exports.timeouts = function(options) { + if (options instanceof Array) { + return [].concat(options); } - if (typeof key.asymmetricKeyType !== 'string') { - throw typeError(MSG_INVALID_VERIFIER_KEY); + var opts = { + retries: 10, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: Infinity, + randomize: false + }; + for (var key in options) { + opts[key] = options[key]; } - if (typeof key.export !== 'function') { - throw typeError(MSG_INVALID_VERIFIER_KEY); + if (opts.minTimeout > opts.maxTimeout) { + throw new Error('minTimeout is greater than maxTimeout'); } -}; -function checkIsPrivateKey(key) { - if (Buffer.isBuffer(key)) { - return; + var timeouts = []; + for (var i = 0; i < opts.retries; i++) { + timeouts.push(this.createTimeout(i, opts)); } - if (typeof key === 'string') { - return; + if (options && options.forever && !timeouts.length) { + timeouts.push(this.createTimeout(i, opts)); } - if (typeof key === 'object') { - return; - } + // sort the array numerically ascending + timeouts.sort(function(a,b) { + return a - b; + }); - throw typeError(MSG_INVALID_SIGNER_KEY); + return timeouts; }; -function checkIsSecretKey(key) { - if (Buffer.isBuffer(key)) { - return; - } +exports.createTimeout = function(attempt, opts) { + var random = (opts.randomize) + ? (Math.random() + 1) + : 1; - if (typeof key === 'string') { - return key; - } + var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); + timeout = Math.min(timeout, opts.maxTimeout); - if (!supportsKeyObjects) { - throw typeError(MSG_INVALID_SECRET); - } + return timeout; +}; - if (typeof key !== 'object') { - throw typeError(MSG_INVALID_SECRET); +exports.wrap = function(obj, options, methods) { + if (options instanceof Array) { + methods = options; + options = null; } - if (key.type !== 'secret') { - throw typeError(MSG_INVALID_SECRET); + if (!methods) { + methods = []; + for (var key in obj) { + if (typeof obj[key] === 'function') { + methods.push(key); + } + } } - if (typeof key.export !== 'function') { - throw typeError(MSG_INVALID_SECRET); - } -} + for (var i = 0; i < methods.length; i++) { + var method = methods[i]; + var original = obj[method]; -function fromBase64(base64) { - return base64 - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); -} + obj[method] = function retryWrapper(original) { + var op = exports.operation(options); + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); -function toBase64(base64url) { - base64url = base64url.toString(); + args.push(function(err) { + if (op.retry(err)) { + return; + } + if (err) { + arguments[0] = op.mainError(); + } + callback.apply(this, arguments); + }); - var padding = 4 - base64url.length % 4; - if (padding !== 4) { - for (var i = 0; i < padding; ++i) { - base64url += '='; - } + op.attempt(function() { + original.apply(obj, args); + }); + }.bind(obj, original); + obj[method].options = options; } +}; - return base64url - .replace(/\-/g, '+') - .replace(/_/g, '/'); -} - -function typeError(template) { - var args = [].slice.call(arguments, 1); - var errMsg = util.format.bind(util, template).apply(null, args); - return new TypeError(errMsg); -} -function bufferOrString(obj) { - return Buffer.isBuffer(obj) || typeof obj === 'string'; -} +/***/ }), -function normalizeInput(thing) { - if (!bufferOrString(thing)) - thing = JSON.stringify(thing); - return thing; -} +/***/ 5369: +/***/ ((module) => { -function createHmacSigner(bits) { - return function sign(thing, secret) { - checkIsSecretKey(secret); - thing = normalizeInput(thing); - var hmac = crypto.createHmac('sha' + bits, secret); - var sig = (hmac.update(thing), hmac.digest('base64')) - return fromBase64(sig); +function RetryOperation(timeouts, options) { + // Compatibility for the old (timeouts, retryForever) signature + if (typeof options === 'boolean') { + options = { forever: options }; } -} -function createHmacVerifier(bits) { - return function verify(thing, signature, secret) { - var computedSig = createHmacSigner(bits)(thing, secret); - return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); - } -} + this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); + this._timeouts = timeouts; + this._options = options || {}; + this._maxRetryTime = options && options.maxRetryTime || Infinity; + this._fn = null; + this._errors = []; + this._attempts = 1; + this._operationTimeout = null; + this._operationTimeoutCb = null; + this._timeout = null; + this._operationStart = null; + this._timer = null; -function createKeySigner(bits) { - return function sign(thing, privateKey) { - checkIsPrivateKey(privateKey); - thing = normalizeInput(thing); - // Even though we are specifying "RSA" here, this works with ECDSA - // keys as well. - var signer = crypto.createSign('RSA-SHA' + bits); - var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); - return fromBase64(sig); + if (this._options.forever) { + this._cachedTimeouts = this._timeouts.slice(0); } } +module.exports = RetryOperation; -function createKeyVerifier(bits) { - return function verify(thing, signature, publicKey) { - checkIsPublicKey(publicKey); - thing = normalizeInput(thing); - signature = toBase64(signature); - var verifier = crypto.createVerify('RSA-SHA' + bits); - verifier.update(thing); - return verifier.verify(publicKey, signature, 'base64'); - } +RetryOperation.prototype.reset = function() { + this._attempts = 1; + this._timeouts = this._originalTimeouts.slice(0); } -function createPSSKeySigner(bits) { - return function sign(thing, privateKey) { - checkIsPrivateKey(privateKey); - thing = normalizeInput(thing); - var signer = crypto.createSign('RSA-SHA' + bits); - var sig = (signer.update(thing), signer.sign({ - key: privateKey, - padding: crypto.constants.RSA_PKCS1_PSS_PADDING, - saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST - }, 'base64')); - return fromBase64(sig); +RetryOperation.prototype.stop = function() { + if (this._timeout) { + clearTimeout(this._timeout); } -} - -function createPSSKeyVerifier(bits) { - return function verify(thing, signature, publicKey) { - checkIsPublicKey(publicKey); - thing = normalizeInput(thing); - signature = toBase64(signature); - var verifier = crypto.createVerify('RSA-SHA' + bits); - verifier.update(thing); - return verifier.verify({ - key: publicKey, - padding: crypto.constants.RSA_PKCS1_PSS_PADDING, - saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST - }, signature, 'base64'); + if (this._timer) { + clearTimeout(this._timer); } -} - -function createECDSASigner(bits) { - var inner = createKeySigner(bits); - return function sign() { - var signature = inner.apply(null, arguments); - signature = formatEcdsa.derToJose(signature, 'ES' + bits); - return signature; - }; -} - -function createECDSAVerifer(bits) { - var inner = createKeyVerifier(bits); - return function verify(thing, signature, publicKey) { - signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); - var result = inner(thing, signature, publicKey); - return result; - }; -} -function createNoneSigner() { - return function sign() { - return ''; - } -} + this._timeouts = []; + this._cachedTimeouts = null; +}; -function createNoneVerifier() { - return function verify(thing, signature) { - return signature === ''; +RetryOperation.prototype.retry = function(err) { + if (this._timeout) { + clearTimeout(this._timeout); } -} -module.exports = function jwa(algorithm) { - var signerFactories = { - hs: createHmacSigner, - rs: createKeySigner, - ps: createPSSKeySigner, - es: createECDSASigner, - none: createNoneSigner, + if (!err) { + return false; } - var verifierFactories = { - hs: createHmacVerifier, - rs: createKeyVerifier, - ps: createPSSKeyVerifier, - es: createECDSAVerifer, - none: createNoneVerifier, + var currentTime = new Date().getTime(); + if (err && currentTime - this._operationStart >= this._maxRetryTime) { + this._errors.push(err); + this._errors.unshift(new Error('RetryOperation timeout occurred')); + return false; } - var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); - if (!match) - throw typeError(MSG_INVALID_ALGORITHM, algorithm); - var algo = (match[1] || match[3]).toLowerCase(); - var bits = match[2]; - return { - sign: signerFactories[algo](bits), - verify: verifierFactories[algo](bits), + this._errors.push(err); + + var timeout = this._timeouts.shift(); + if (timeout === undefined) { + if (this._cachedTimeouts) { + // retry forever, only keep last error + this._errors.splice(0, this._errors.length - 1); + timeout = this._cachedTimeouts.slice(-1); + } else { + return false; + } } -}; + var self = this; + this._timer = setTimeout(function() { + self._attempts++; -/***/ }), + if (self._operationTimeoutCb) { + self._timeout = setTimeout(function() { + self._operationTimeoutCb(self._attempts); + }, self._operationTimeout); -/***/ 4636: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (self._options.unref) { + self._timeout.unref(); + } + } -/*global exports*/ -var SignStream = __nccwpck_require__(3334); -var VerifyStream = __nccwpck_require__(5522); + self._fn(self._attempts); + }, timeout); -var ALGORITHMS = [ - 'HS256', 'HS384', 'HS512', - 'RS256', 'RS384', 'RS512', - 'PS256', 'PS384', 'PS512', - 'ES256', 'ES384', 'ES512' -]; + if (this._options.unref) { + this._timer.unref(); + } -exports.ALGORITHMS = ALGORITHMS; -exports.sign = SignStream.sign; -exports.verify = VerifyStream.verify; -exports.decode = VerifyStream.decode; -exports.isValid = VerifyStream.isValid; -exports.createSign = function createSign(opts) { - return new SignStream(opts); -}; -exports.createVerify = function createVerify(opts) { - return new VerifyStream(opts); + return true; }; +RetryOperation.prototype.attempt = function(fn, timeoutOps) { + this._fn = fn; -/***/ }), - -/***/ 1868: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/*global module, process*/ -var Buffer = (__nccwpck_require__(1867).Buffer); -var Stream = __nccwpck_require__(2781); -var util = __nccwpck_require__(3837); - -function DataStream(data) { - this.buffer = null; - this.writable = true; - this.readable = true; - - // No input - if (!data) { - this.buffer = Buffer.alloc(0); - return this; - } - - // Stream - if (typeof data.pipe === 'function') { - this.buffer = Buffer.alloc(0); - data.pipe(this); - return this; + if (timeoutOps) { + if (timeoutOps.timeout) { + this._operationTimeout = timeoutOps.timeout; + } + if (timeoutOps.cb) { + this._operationTimeoutCb = timeoutOps.cb; + } } - // Buffer or String - // or Object (assumedly a passworded key) - if (data.length || typeof data === 'object') { - this.buffer = data; - this.writable = false; - process.nextTick(function () { - this.emit('end', data); - this.readable = false; - this.emit('close'); - }.bind(this)); - return this; + var self = this; + if (this._operationTimeoutCb) { + this._timeout = setTimeout(function() { + self._operationTimeoutCb(); + }, self._operationTimeout); } - throw new TypeError('Unexpected data type ('+ typeof data + ')'); -} -util.inherits(DataStream, Stream); + this._operationStart = new Date().getTime(); -DataStream.prototype.write = function write(data) { - this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); - this.emit('data', data); + this._fn(this._attempts); }; -DataStream.prototype.end = function end(data) { - if (data) - this.write(data); - this.emit('end', data); - this.emit('close'); - this.writable = false; - this.readable = false; +RetryOperation.prototype.try = function(fn) { + console.log('Using RetryOperation.try() is deprecated'); + this.attempt(fn); }; -module.exports = DataStream; - - -/***/ }), - -/***/ 3334: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/*global module*/ -var Buffer = (__nccwpck_require__(1867).Buffer); -var DataStream = __nccwpck_require__(1868); -var jwa = __nccwpck_require__(6010); -var Stream = __nccwpck_require__(2781); -var toString = __nccwpck_require__(5292); -var util = __nccwpck_require__(3837); - -function base64url(string, encoding) { - return Buffer - .from(string, encoding) - .toString('base64') - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); -} - -function jwsSecuredInput(header, payload, encoding) { - encoding = encoding || 'utf8'; - var encodedHeader = base64url(toString(header), 'binary'); - var encodedPayload = base64url(toString(payload), encoding); - return util.format('%s.%s', encodedHeader, encodedPayload); -} - -function jwsSign(opts) { - var header = opts.header; - var payload = opts.payload; - var secretOrKey = opts.secret || opts.privateKey; - var encoding = opts.encoding; - var algo = jwa(header.alg); - var securedInput = jwsSecuredInput(header, payload, encoding); - var signature = algo.sign(securedInput, secretOrKey); - return util.format('%s.%s', securedInput, signature); -} - -function SignStream(opts) { - var secret = opts.secret||opts.privateKey||opts.key; - var secretStream = new DataStream(secret); - this.readable = true; - this.header = opts.header; - this.encoding = opts.encoding; - this.secret = this.privateKey = this.key = secretStream; - this.payload = new DataStream(opts.payload); - this.secret.once('close', function () { - if (!this.payload.writable && this.readable) - this.sign(); - }.bind(this)); - - this.payload.once('close', function () { - if (!this.secret.writable && this.readable) - this.sign(); - }.bind(this)); -} -util.inherits(SignStream, Stream); +RetryOperation.prototype.start = function(fn) { + console.log('Using RetryOperation.start() is deprecated'); + this.attempt(fn); +}; -SignStream.prototype.sign = function sign() { - try { - var signature = jwsSign({ - header: this.header, - payload: this.payload.buffer, - secret: this.secret.buffer, - encoding: this.encoding - }); - this.emit('done', signature); - this.emit('data', signature); - this.emit('end'); - this.readable = false; - return signature; - } catch (e) { - this.readable = false; - this.emit('error', e); - this.emit('close'); - } +RetryOperation.prototype.start = RetryOperation.prototype.try; + +RetryOperation.prototype.errors = function() { + return this._errors; }; -SignStream.sign = jwsSign; +RetryOperation.prototype.attempts = function() { + return this._attempts; +}; -module.exports = SignStream; +RetryOperation.prototype.mainError = function() { + if (this._errors.length === 0) { + return null; + } + var counts = {}; + var mainError = null; + var mainErrorCount = 0; -/***/ }), + for (var i = 0; i < this._errors.length; i++) { + var error = this._errors[i]; + var message = error.message; + var count = (counts[message] || 0) + 1; -/***/ 5292: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + counts[message] = count; -/*global module*/ -var Buffer = (__nccwpck_require__(4300).Buffer); + if (count >= mainErrorCount) { + mainError = error; + mainErrorCount = count; + } + } -module.exports = function toString(obj) { - if (typeof obj === 'string') - return obj; - if (typeof obj === 'number' || Buffer.isBuffer(obj)) - return obj.toString(); - return JSON.stringify(obj); + return mainError; }; /***/ }), -/***/ 5522: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1867: +/***/ ((module, exports, __nccwpck_require__) => { -/*global module*/ -var Buffer = (__nccwpck_require__(1867).Buffer); -var DataStream = __nccwpck_require__(1868); -var jwa = __nccwpck_require__(6010); -var Stream = __nccwpck_require__(2781); -var toString = __nccwpck_require__(5292); -var util = __nccwpck_require__(3837); -var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(4300) +var Buffer = buffer.Buffer -function isObject(thing) { - return Object.prototype.toString.call(thing) === '[object Object]'; +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } } - -function safeJsonParse(thing) { - if (isObject(thing)) - return thing; - try { return JSON.parse(thing); } - catch (e) { return undefined; } +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer } -function headerFromJWS(jwsSig) { - var encodedHeader = jwsSig.split('.', 1)[0]; - return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) } -function securedInputFromJWS(jwsSig) { - return jwsSig.split('.', 2).join('.'); -} +SafeBuffer.prototype = Object.create(Buffer.prototype) -function signatureFromJWS(jwsSig) { - return jwsSig.split('.')[2]; +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) } -function payloadFromJWS(jwsSig, encoding) { - encoding = encoding || 'utf8'; - var payload = jwsSig.split('.')[1]; - return Buffer.from(payload, 'base64').toString(encoding); +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf } -function isValidJws(string) { - return JWS_REGEX.test(string) && !!headerFromJWS(string); +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) } -function jwsVerify(jwsSig, algorithm, secretOrKey) { - if (!algorithm) { - var err = new Error("Missing algorithm parameter for jws.verify"); - err.code = "MISSING_ALGORITHM"; - throw err; +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') } - jwsSig = toString(jwsSig); - var signature = signatureFromJWS(jwsSig); - var securedInput = securedInputFromJWS(jwsSig); - var algo = jwa(algorithm); - return algo.verify(securedInput, signature, secretOrKey); + return buffer.SlowBuffer(size) } -function jwsDecode(jwsSig, opts) { - opts = opts || {}; - jwsSig = toString(jwsSig); - if (!isValidJws(jwsSig)) - return null; +/***/ }), - var header = headerFromJWS(jwsSig); +/***/ 9626: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (!header) - return null; +"use strict"; - var payload = payloadFromJWS(jwsSig); - if (header.typ === 'JWT' || opts.json) - payload = JSON.parse(payload, opts.encoding); - return { - header: header, - payload: payload, - signature: signatureFromJWS(jwsSig) - }; -} +var stubs = __nccwpck_require__(1099) -function VerifyStream(opts) { - opts = opts || {}; - var secretOrKey = opts.secret||opts.publicKey||opts.key; - var secretStream = new DataStream(secretOrKey); - this.readable = true; - this.algorithm = opts.algorithm; - this.encoding = opts.encoding; - this.secret = this.publicKey = this.key = secretStream; - this.signature = new DataStream(opts.signature); - this.secret.once('close', function () { - if (!this.signature.writable && this.readable) - this.verify(); - }.bind(this)); +/* + * StreamEvents can be used 2 ways: + * + * 1: + * function MyStream() { + * require('stream-events').call(this) + * } + * + * 2: + * require('stream-events')(myStream) + */ +function StreamEvents(stream) { + stream = stream || this - this.signature.once('close', function () { - if (!this.secret.writable && this.readable) - this.verify(); - }.bind(this)); -} -util.inherits(VerifyStream, Stream); -VerifyStream.prototype.verify = function verify() { - try { - var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); - var obj = jwsDecode(this.signature.buffer, this.encoding); - this.emit('done', valid, obj); - this.emit('data', valid); - this.emit('end'); - this.readable = false; - return valid; - } catch (e) { - this.readable = false; - this.emit('error', e); - this.emit('close'); + var cfg = { + callthrough: true, + calls: 1 } -}; -VerifyStream.decode = jwsDecode; -VerifyStream.isValid = isValidJws; -VerifyStream.verify = jwsVerify; + stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) + stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) -module.exports = VerifyStream; + return stream +} + +module.exports = StreamEvents /***/ }), -/***/ 7129: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 6121: +/***/ ((module) => { -"use strict"; +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } + + return state.buffer[0].length + } + return state.length +} -// A linked list to keep track of recently-used-ness -const Yallist = __nccwpck_require__(665) -const MAX = Symbol('max') -const LENGTH = Symbol('length') -const LENGTH_CALCULATOR = Symbol('lengthCalculator') -const ALLOW_STALE = Symbol('allowStale') -const MAX_AGE = Symbol('maxAge') -const DISPOSE = Symbol('dispose') -const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') -const LRU_LIST = Symbol('lruList') -const CACHE = Symbol('cache') -const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') +/***/ }), -const naiveLength = () => 1 +/***/ 4841: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// lruList is a yallist where the head is the youngest -// item, and the tail is the oldest. the list contains the Hit -// objects as the entries. -// Each Hit object has a reference to its Yallist.Node. This -// never changes. +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. // -// cache is a Map (or PseudoMap) that matches the keys to -// the Yallist.Node object. -class LRUCache { - constructor (options) { - if (typeof options === 'number') - options = { max: options } +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - if (!options) - options = {} - if (options.max && (typeof options.max !== 'number' || options.max < 0)) - throw new TypeError('max must be a non-negative number') - // Kind of weird to have a default max of Infinity, but oh well. - const max = this[MAX] = options.max || Infinity - const lc = options.length || naiveLength - this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc - this[ALLOW_STALE] = options.stale || false - if (options.maxAge && typeof options.maxAge !== 'number') - throw new TypeError('maxAge must be a number') - this[MAX_AGE] = options.maxAge || 0 - this[DISPOSE] = options.dispose - this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false - this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false - this.reset() - } +/**/ - // resize the cache when the max changes. - set max (mL) { - if (typeof mL !== 'number' || mL < 0) - throw new TypeError('max must be a non-negative number') +var Buffer = (__nccwpck_require__(1867).Buffer); +/**/ - this[MAX] = mL || Infinity - trim(this) +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; } - get max () { - return this[MAX] +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} - set allowStale (allowStale) { - this[ALLOW_STALE] = !!allowStale +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.s = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; } - get allowStale () { - return this[ALLOW_STALE] + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; - set maxAge (mA) { - if (typeof mA !== 'number') - throw new TypeError('maxAge must be a non-negative number') +StringDecoder.prototype.end = utf8End; - this[MAX_AGE] = mA - trim(this) - } - get maxAge () { - return this[MAX_AGE] +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; - // resize the cache when the lengthCalculator changes. - set lengthCalculator (lC) { - if (typeof lC !== 'function') - lC = naiveLength +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} - if (lC !== this[LENGTH_CALCULATOR]) { - this[LENGTH_CALCULATOR] = lC - this[LENGTH] = 0 - this[LRU_LIST].forEach(hit => { - hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) - this[LENGTH] += hit.length - }) +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; } - trim(this) + return nb; } - get lengthCalculator () { return this[LENGTH_CALCULATOR] } - - get length () { return this[LENGTH] } - get itemCount () { return this[LRU_LIST].length } + return 0; +} - rforEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].tail; walker !== null;) { - const prev = walker.prev - forEachStep(this, fn, walker, thisp) - walker = prev +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } } } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} - forEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].head; walker !== null;) { - const next = walker.next - forEachStep(this, fn, walker, thisp) - walker = next +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } } + return r; } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} - keys () { - return this[LRU_LIST].toArray().map(k => k.key) +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); } + return r; +} - values () { - return this[LRU_LIST].toArray().map(k => k.value) +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; } + return buf.toString('base64', i, buf.length - n); +} - reset () { - if (this[DISPOSE] && - this[LRU_LIST] && - this[LRU_LIST].length) { - this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) - } +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} - this[CACHE] = new Map() // hash of items by key - this[LRU_LIST] = new Yallist() // list of items in order of use recency - this[LENGTH] = 0 // length of items in the list - } +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} - dump () { - return this[LRU_LIST].map(hit => - isStale(this, hit) ? false : { - k: hit.key, - v: hit.value, - e: hit.now + (hit.maxAge || 0) - }).toArray().filter(h => h) - } +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} - dumpLru () { - return this[LRU_LIST] - } +/***/ }), - set (key, value, maxAge) { - maxAge = maxAge || this[MAX_AGE] +/***/ 4526: +/***/ ((module) => { - if (maxAge && typeof maxAge !== 'number') - throw new TypeError('maxAge must be a number') +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)(\.[0-9]+([eE]\-?[0-9]+)?|[0-9]+(\.[0-9]+([eE]\-?[0-9]+)?)?)$/; +// const octRegex = /0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; - const now = maxAge ? Date.now() : 0 - const len = this[LENGTH_CALCULATOR](value, key) - if (this[CACHE].has(key)) { - if (len > this[MAX]) { - del(this, this[CACHE].get(key)) - return false - } +//polyfill +if (!Number.parseInt && window.parseInt) { + Number.parseInt = window.parseInt; +} +if (!Number.parseFloat && window.parseFloat) { + Number.parseFloat = window.parseFloat; +} - const node = this[CACHE].get(key) - const item = node.value + +const consider = { + hex : true, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + // const options = Object.assign({}, consider); + // if(opt.leadingZeros === false){ + // options.leadingZeros = false; + // }else if(opt.hex === false){ + // options.hex = false; + // } + + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + // if(trimmedStr === "0.0") return 0; + // else if(trimmedStr === "+0.0") return 0; + // else if(trimmedStr === "-0.0") return -0; + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if (options.hex && hexRegex.test(trimmedStr)) { + return Number.parseInt(trimmedStr, 16); + // } else if (options.parseOct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + const eNotation = match[4] || match[6]; + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(eNotation){ //given number has enotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + // const decimalPart = match[5].substr(1); + // const intPart = trimmedStr.substr(0,trimmedStr.indexOf(".")); + + + // const p = numStr.indexOf("."); + // const givenIntPart = numStr.substr(0,p); + // const givenDecPart = numStr.substr(p+1); + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + // if(numTrimmedByZeros === numStr){ + // if(options.leadingZeros) return num; + // else return str; + // }else return str; + if(numTrimmedByZeros === numStr) return num; + else if(sign+numTrimmedByZeros === numStr) return num; + else return str; + } - // dispose of the old one before overwriting - // split out into 2 ifs for better coverage tracking - if (this[DISPOSE]) { - if (!this[NO_DISPOSE_ON_SET]) - this[DISPOSE](key, item.value) - } + if(trimmedStr === numStr) return num; + else if(trimmedStr === sign+numStr) return num; + // else{ + // //number with +/- sign + // trimmedStr.test(/[-+][0-9]); - item.now = now - item.maxAge = maxAge - item.value = value - this[LENGTH] += len - item.length - item.length = len - this.get(key) - trim(this) - return true + // } + return str; + } + // else if(!eNotation && trimmedStr && trimmedStr !== Number(trimmedStr) ) return str; + + }else{ //non-numeric string + return str; + } } +} - const hit = new Entry(key, value, len, now, maxAge) - - // oversized objects fall out of cache automatically. - if (hit.length > this[MAX]) { - if (this[DISPOSE]) - this[DISPOSE](key, value) - - return false +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; } + return numStr; +} +module.exports = toNumber - this[LENGTH] += hit.length - this[LRU_LIST].unshift(hit) - this[CACHE].set(key, this[LRU_LIST].head) - trim(this) - return true - } - - has (key) { - if (!this[CACHE].has(key)) return false - const hit = this[CACHE].get(key).value - return !isStale(this, hit) - } - - get (key) { - return get(this, key, true) - } - peek (key) { - return get(this, key, false) - } +/***/ }), - pop () { - const node = this[LRU_LIST].tail - if (!node) - return null +/***/ 1099: +/***/ ((module) => { - del(this, node) - return node.value - } +"use strict"; - del (key) { - del(this, this[CACHE].get(key)) - } - load (arr) { - // reset the cache - this.reset() +module.exports = function stubs(obj, method, cfg, stub) { + if (!obj || !method || !obj[method]) + throw new Error('You must provide an object and a key for an existing method') - const now = Date.now() - // A previous serialized cache has the most recent items first - for (let l = arr.length - 1; l >= 0; l--) { - const hit = arr[l] - const expiresAt = hit.e || 0 - if (expiresAt === 0) - // the item was created without expiration in a non aged cache - this.set(hit.k, hit.v) - else { - const maxAge = expiresAt - now - // dont add already expired items - if (maxAge > 0) { - this.set(hit.k, hit.v, maxAge) - } - } - } + if (!stub) { + stub = cfg + cfg = {} } - prune () { - this[CACHE].forEach((value, key) => get(this, key, false)) - } -} + stub = stub || function() {} -const get = (self, key, doUse) => { - const node = self[CACHE].get(key) - if (node) { - const hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - return undefined - } else { - if (doUse) { - if (self[UPDATE_AGE_ON_GET]) - node.value.now = Date.now() - self[LRU_LIST].unshiftNode(node) - } - } - return hit.value - } -} + cfg.callthrough = cfg.callthrough || false + cfg.calls = cfg.calls || 0 -const isStale = (self, hit) => { - if (!hit || (!hit.maxAge && !self[MAX_AGE])) - return false + var norevert = cfg.calls === 0 - const diff = Date.now() - hit.now - return hit.maxAge ? diff > hit.maxAge - : self[MAX_AGE] && (diff > self[MAX_AGE]) -} + var cached = obj[method].bind(obj) -const trim = self => { - if (self[LENGTH] > self[MAX]) { - for (let walker = self[LRU_LIST].tail; - self[LENGTH] > self[MAX] && walker !== null;) { - // We know that we're about to delete this one, and also - // what the next least recently used key will be, so just - // go ahead and set it now. - const prev = walker.prev - del(self, walker) - walker = prev - } - } -} + obj[method] = function() { + var args = [].slice.call(arguments) + var returnVal -const del = (self, node) => { - if (node) { - const hit = node.value - if (self[DISPOSE]) - self[DISPOSE](hit.key, hit.value) + if (cfg.callthrough) + returnVal = cached.apply(obj, args) - self[LENGTH] -= hit.length - self[CACHE].delete(hit.key) - self[LRU_LIST].removeNode(node) - } -} + returnVal = stub.apply(obj, args) || returnVal -class Entry { - constructor (key, value, length, now, maxAge) { - this.key = key - this.value = value - this.length = length - this.now = now - this.maxAge = maxAge || 0 - } -} + if (!norevert && --cfg.calls === 0) + obj[method] = cached -const forEachStep = (self, fn, node, thisp) => { - let hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - hit = undefined + return returnVal } - if (hit) - fn.call(thisp, hit.value, hit.key, self) } -module.exports = LRUCache - /***/ }), -/***/ 7426: +/***/ 9318: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/*! - * mime-db - * Copyright(c) 2014 Jonathan Ong - * Copyright(c) 2015-2022 Douglas Christopher Wilson - * MIT Licensed - */ - -/** - * Module exports. - */ - -module.exports = __nccwpck_require__(3765) - +"use strict"; -/***/ }), +const os = __nccwpck_require__(2037); +const tty = __nccwpck_require__(6224); +const hasFlag = __nccwpck_require__(1621); -/***/ 3583: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +const {env} = process; -"use strict"; -/*! - * mime-types - * Copyright(c) 2014 Jonathan Ong - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false') || + hasFlag('color=never')) { + forceColor = 0; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = 1; +} +if ('FORCE_COLOR' in env) { + if (env.FORCE_COLOR === 'true') { + forceColor = 1; + } else if (env.FORCE_COLOR === 'false') { + forceColor = 0; + } else { + forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); + } +} +function translateLevel(level) { + if (level === 0) { + return false; + } -/** - * Module dependencies. - * @private - */ + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} -var db = __nccwpck_require__(7426) -var extname = (__nccwpck_require__(1017).extname) +function supportsColor(haveStream, streamIsTTY) { + if (forceColor === 0) { + return 0; + } -/** - * Module variables. - * @private - */ + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } -var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ -var TEXT_TYPE_REGEXP = /^text\//i + if (hasFlag('color=256')) { + return 2; + } -/** - * Module exports. - * @public - */ + if (haveStream && !streamIsTTY && forceColor === undefined) { + return 0; + } -exports.charset = charset -exports.charsets = { lookup: charset } -exports.contentType = contentType -exports.extension = extension -exports.extensions = Object.create(null) -exports.lookup = lookup -exports.types = Object.create(null) + const min = forceColor || 0; -// Populate the extensions/types maps -populateMaps(exports.extensions, exports.types) + if (env.TERM === 'dumb') { + return min; + } -/** - * Get the default charset for a MIME type. - * - * @param {string} type - * @return {boolean|string} - */ + if (process.platform === 'win32') { + // Windows 10 build 10586 is the first Windows release that supports 256 colors. + // Windows 10 build 14931 is the first release that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } -function charset (type) { - if (!type || typeof type !== 'string') { - return false - } + return 1; + } - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) - var mime = match && db[match[1].toLowerCase()] + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } - if (mime && mime.charset) { - return mime.charset - } + return min; + } - // default text/* to utf-8 - if (match && TEXT_TYPE_REGEXP.test(match[1])) { - return 'UTF-8' - } + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } - return false -} + if (env.COLORTERM === 'truecolor') { + return 3; + } -/** - * Create a full Content-Type header given a MIME type or extension. - * - * @param {string} str - * @return {boolean|string} - */ + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); -function contentType (str) { - // TODO: should this even be in this module? - if (!str || typeof str !== 'string') { - return false - } + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } - var mime = str.indexOf('/') === -1 - ? exports.lookup(str) - : str + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } - if (!mime) { - return false - } + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } - // TODO: use content-type or other module - if (mime.indexOf('charset') === -1) { - var charset = exports.charset(mime) - if (charset) mime += '; charset=' + charset.toLowerCase() - } + if ('COLORTERM' in env) { + return 1; + } - return mime + return min; } -/** - * Get the default extension for a MIME type. - * - * @param {string} type - * @return {boolean|string} - */ +function getSupportLevel(stream) { + const level = supportsColor(stream, stream && stream.isTTY); + return translateLevel(level); +} -function extension (type) { - if (!type || typeof type !== 'string') { - return false - } +module.exports = { + supportsColor: getSupportLevel, + stdout: translateLevel(supportsColor(true, tty.isatty(1))), + stderr: translateLevel(supportsColor(true, tty.isatty(2))) +}; - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) - // get extensions - var exts = match && exports.extensions[match[1].toLowerCase()] +/***/ }), - if (!exts || !exts.length) { - return false - } +/***/ 4920: +/***/ ((__unused_webpack_module, exports) => { - return exts[0] -} +"use strict"; /** - * Lookup the MIME type for a file path/extension. + * @license + * Copyright 2020 Google LLC * - * @param {string} path - * @return {boolean|string} + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ - -function lookup (path) { - if (!path || typeof path !== 'string') { - return false - } - - // get the extension ("ext" or ".ext" or full path) - var extension = extname('x.' + path) - .toLowerCase() - .substr(1) - - if (!extension) { - return false - } - - return exports.types[extension] || false +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; +/** + * @class TeenyStatisticsWarning + * @extends Error + * @description While an error, is used for emitting warnings when + * meeting certain configured thresholds. + * @see process.emitWarning + */ +class TeenyStatisticsWarning extends Error { + /** + * @param {string} message + */ + constructor(message) { + super(message); + this.threshold = 0; + this.type = ''; + this.value = 0; + this.name = this.constructor.name; + Error.captureStackTrace(this, this.constructor); + } } - +exports.TeenyStatisticsWarning = TeenyStatisticsWarning; +TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; /** - * Populate the extensions and types maps. - * @private + * @class TeenyStatistics + * @description Maintain various statistics internal to teeny-request. Tracking + * is not automatic and must be instrumented within teeny-request. */ - -function populateMaps (extensions, types) { - // source preference (least -> most) - var preference = ['nginx', 'apache', undefined, 'iana'] - - Object.keys(db).forEach(function forEachMimeType (type) { - var mime = db[type] - var exts = mime.extensions - - if (!exts || !exts.length) { - return +class TeenyStatistics { + /** + * @param {TeenyStatisticsOptions} [opts] + */ + constructor(opts) { + /** + * @type {number} + * @private + * @default 0 + */ + this._concurrentRequests = 0; + /** + * @type {boolean} + * @private + * @default false + */ + this._didConcurrentRequestWarn = false; + this._options = TeenyStatistics._prepareOptions(opts); } - - // mime -> extensions - extensions[type] = exts - - // extension -> mime - for (var i = 0; i < exts.length; i++) { - var extension = exts[i] - - if (types[extension]) { - var from = preference.indexOf(db[types[extension]].source) - var to = preference.indexOf(mime.source) - - if (types[extension] !== 'application/octet-stream' && - (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { - // skip the remapping - continue + /** + * Returns a copy of the current options. + * @return {TeenyStatisticsOptions} + */ + getOptions() { + return Object.assign({}, this._options); + } + /** + * Change configured statistics options. This will not preserve unspecified + * options that were previously specified, i.e. this is a reset of options. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsConfig} The previous options. + * @see _prepareOptions + */ + setOptions(opts) { + const oldOpts = this._options; + this._options = TeenyStatistics._prepareOptions(opts); + return oldOpts; + } + /** + * @readonly + * @return {TeenyStatisticsCounters} + */ + get counters() { + return { + concurrentRequests: this._concurrentRequests, + }; + } + /** + * @description Should call this right before making a request. + */ + requestStarting() { + this._concurrentRequests++; + if (this._options.concurrentRequests > 0 && + this._concurrentRequests >= this._options.concurrentRequests && + !this._didConcurrentRequestWarn) { + this._didConcurrentRequestWarn = true; + const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + + this._concurrentRequests + + ' requests in-flight, which exceeds the configured threshold of ' + + this._options.concurrentRequests + + '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + + 'variable or the concurrentRequests option of teeny-request to ' + + 'increase or disable (0) this warning.'); + warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; + warning.value = this._concurrentRequests; + warning.threshold = this._options.concurrentRequests; + process.emitWarning(warning); + } + } + /** + * @description When using `requestStarting`, call this after the request + * has finished. + */ + requestFinished() { + // TODO negative? + this._concurrentRequests--; + } + /** + * Configuration Precedence: + * 1. Dependency inversion via defined option. + * 2. Global numeric environment variable. + * 3. Built-in default. + * This will not preserve unspecified options previously specified. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsOptions} + * @private + */ + static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { + let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; + const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); + if (diConcurrentRequests !== undefined) { + concurrentRequests = diConcurrentRequests; } - } - - // set the extension -> mime - types[extension] = type + else if (!Number.isNaN(envConcurrentRequests)) { + concurrentRequests = envConcurrentRequests; + } + return { concurrentRequests }; } - }) } - +exports.TeenyStatistics = TeenyStatistics; +/** + * @description A default threshold representing when to warn about excessive + * in-flight/concurrent requests. + * @type {number} + * @static + * @readonly + * @default 5000 + */ +TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; +//# sourceMappingURL=TeenyStatistics.js.map /***/ }), -/***/ 6038: -/***/ ((module) => { +/***/ 446: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -/** - * @param typeMap [Object] Map of MIME type -> Array[extensions] - * @param ... - */ -function Mime() { - this._types = Object.create(null); - this._extensions = Object.create(null); - - for (let i = 0; i < arguments.length; i++) { - this.define(arguments[i]); - } - - this.define = this.define.bind(this); - this.getType = this.getType.bind(this); - this.getExtension = this.getExtension.bind(this); -} - /** - * Define mimetype -> extension mappings. Each key is a mime-type that maps - * to an array of extensions associated with the type. The first extension is - * used as the default extension for the type. - * - * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * @license + * Copyright 2019 Google LLC * - * If a type declares an extension that has already been defined, an error will - * be thrown. To suppress this error and force the extension to be associated - * with the new type, pass `force`=true. Alternatively, you may prefix the - * extension with "*" to map the type to extension, without mapping the - * extension to the type. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at * - * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * http://www.apache.org/licenses/LICENSE-2.0 * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAgent = exports.pool = void 0; +const http_1 = __nccwpck_require__(3685); +const https_1 = __nccwpck_require__(5687); +// eslint-disable-next-line node/no-deprecated-api +const url_1 = __nccwpck_require__(7310); +exports.pool = new Map(); +/** + * Determines if a proxy should be considered based on the environment. * - * @param map (Object) type definitions - * @param force (Boolean) if true, force overriding of existing definitions + * @param uri The request uri + * @returns {boolean} */ -Mime.prototype.define = function(typeMap, force) { - for (let type in typeMap) { - let extensions = typeMap[type].map(function(t) { - return t.toLowerCase(); - }); - type = type.toLowerCase(); - - for (let i = 0; i < extensions.length; i++) { - const ext = extensions[i]; - - // '*' prefix = not the preferred type for this extension. So fixup the - // extension, and skip it. - if (ext[0] === '*') { - continue; - } - - if (!force && (ext in this._types)) { - throw new Error( - 'Attempt to change mapping for "' + ext + - '" extension from "' + this._types[ext] + '" to "' + type + - '". Pass `force=true` to allow this, otherwise remove "' + ext + - '" from the list of extensions for "' + type + '".' - ); - } - - this._types[ext] = type; +function shouldUseProxyForURI(uri) { + const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; + if (!noProxyEnv) { + return true; } - - // Use first extension as default - if (force || !this._extensions[type]) { - const ext = extensions[0]; - this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); + const givenURI = new URL(uri); + for (const noProxyRaw of noProxyEnv.split(',')) { + const noProxy = noProxyRaw.trim(); + if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { + return false; + } + else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { + const noProxyWildcard = noProxy.replace(/^\*\./, '.'); + if (givenURI.hostname.endsWith(noProxyWildcard)) { + return false; + } + } } - } -}; - -/** - * Lookup a mime type based on extension - */ -Mime.prototype.getType = function(path) { - path = String(path); - let last = path.replace(/^.*[/\\]/, '').toLowerCase(); - let ext = last.replace(/^.*\./, '').toLowerCase(); - - let hasPath = last.length < path.length; - let hasDot = ext.length < last.length - 1; - - return (hasDot || !hasPath) && this._types[ext] || null; -}; - + return true; +} /** - * Return file extension associated with a mime type + * Returns a custom request Agent if one is found, otherwise returns undefined + * which will result in the global http(s) Agent being used. + * @private + * @param {string} uri The request uri + * @param {Options} reqOpts The request options + * @returns {HttpAnyAgent|undefined} */ -Mime.prototype.getExtension = function(type) { - type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; - return type && this._extensions[type.toLowerCase()] || null; -}; - -module.exports = Mime; - +function getAgent(uri, reqOpts) { + const isHttp = uri.startsWith('http://'); + const proxy = reqOpts.proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || + process.env.HTTPS_PROXY || + process.env.https_proxy; + const poolOptions = Object.assign({}, reqOpts.pool); + const manuallyProvidedProxy = !!reqOpts.proxy; + const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); + if (proxy && shouldUseProxy) { + // tslint:disable-next-line variable-name + const Agent = isHttp + ? __nccwpck_require__(3764) + : __nccwpck_require__(7219); + const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; + return new Agent(proxyOpts); + } + let key = isHttp ? 'http' : 'https'; + if (reqOpts.forever) { + key += ':forever'; + if (!exports.pool.has(key)) { + // tslint:disable-next-line variable-name + const Agent = isHttp ? http_1.Agent : https_1.Agent; + exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); + } + } + return exports.pool.get(key); +} +exports.getAgent = getAgent; +//# sourceMappingURL=agents.js.map /***/ }), -/***/ 5377: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 6886: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -let Mime = __nccwpck_require__(6038); -module.exports = new Mime(__nccwpck_require__(3114), __nccwpck_require__(8809)); - - -/***/ }), - -/***/ 8809: -/***/ ((module) => { - -module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; - -/***/ }), - -/***/ 3114: -/***/ ((module) => { - -module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; - -/***/ }), - -/***/ 900: -/***/ ((module) => { - -/** - * Helpers. - */ - -var s = 1000; -var m = s * 60; -var h = m * 60; -var d = h * 24; -var w = d * 7; -var y = d * 365.25; - /** - * Parse or format the given `val`. + * @license + * Copyright 2018 Google LLC * - * Options: + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at * - * - `long` verbose formatting [false] + * http://www.apache.org/licenses/LICENSE-2.0 * - * @param {String|Number} val - * @param {Object} [options] - * @throws {Error} throw an error if val is not a non-empty string or a number - * @return {String|Number} - * @api public + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ - -module.exports = function(val, options) { - options = options || {}; - var type = typeof val; - if (type === 'string' && val.length > 0) { - return parse(val); - } else if (type === 'number' && isFinite(val)) { - return options.long ? fmtLong(val) : fmtShort(val); - } - throw new Error( - 'val is not a non-empty string or a valid number. val=' + - JSON.stringify(val) - ); -}; - +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.teenyRequest = exports.RequestError = void 0; +const node_fetch_1 = __nccwpck_require__(467); +const stream_1 = __nccwpck_require__(2781); +const uuid = __nccwpck_require__(7835); +const agents_1 = __nccwpck_require__(446); +const TeenyStatistics_1 = __nccwpck_require__(4920); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const streamEvents = __nccwpck_require__(9626); +class RequestError extends Error { +} +exports.RequestError = RequestError; /** - * Parse the given `str` and return milliseconds. - * - * @param {String} str - * @return {Number} - * @api private + * Convert options from Request to Fetch format + * @private + * @param reqOpts Request options */ - -function parse(str) { - str = String(str); - if (str.length > 100) { - return; - } - var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( - str - ); - if (!match) { - return; - } - var n = parseFloat(match[1]); - var type = (match[2] || 'ms').toLowerCase(); - switch (type) { - case 'years': - case 'year': - case 'yrs': - case 'yr': - case 'y': - return n * y; - case 'weeks': - case 'week': - case 'w': - return n * w; - case 'days': - case 'day': - case 'd': - return n * d; - case 'hours': - case 'hour': - case 'hrs': - case 'hr': - case 'h': - return n * h; - case 'minutes': - case 'minute': - case 'mins': - case 'min': - case 'm': - return n * m; - case 'seconds': - case 'second': - case 'secs': - case 'sec': - case 's': - return n * s; - case 'milliseconds': - case 'millisecond': - case 'msecs': - case 'msec': - case 'ms': - return n; - default: - return undefined; - } +function requestToFetchOptions(reqOpts) { + const options = { + method: reqOpts.method || 'GET', + ...(reqOpts.timeout && { timeout: reqOpts.timeout }), + ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), + }; + if (typeof reqOpts.json === 'object') { + // Add Content-type: application/json header + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['Content-Type'] = 'application/json'; + // Set body to JSON representation of value + options.body = JSON.stringify(reqOpts.json); + } + else { + if (Buffer.isBuffer(reqOpts.body)) { + options.body = reqOpts.body; + } + else if (typeof reqOpts.body !== 'string') { + options.body = JSON.stringify(reqOpts.body); + } + else { + options.body = reqOpts.body; + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options.headers = reqOpts.headers; + let uri = (reqOpts.uri || + reqOpts.url); + if (!uri) { + throw new Error('Missing uri or url in reqOpts.'); + } + if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const qs = __nccwpck_require__(3477); + const params = qs.stringify(reqOpts.qs); + uri = uri + '?' + params; + } + options.agent = (0, agents_1.getAgent)(uri, reqOpts); + return { uri, options }; +} +/** + * Convert a response from `fetch` to `request` format. + * @private + * @param opts The `request` options used to create the request. + * @param res The Fetch response + * @returns A `request` response object + */ +function fetchToRequestResponse(opts, res) { + const request = {}; + request.agent = opts.agent || false; + request.headers = (opts.headers || {}); + request.href = res.url; + // headers need to be converted from a map to an obj + const resHeaders = {}; + res.headers.forEach((value, key) => (resHeaders[key] = value)); + const response = Object.assign(res.body, { + statusCode: res.status, + statusMessage: res.statusText, + request, + body: res.body, + headers: resHeaders, + toJSON: () => ({ headers: resHeaders }), + }); + return response; +} +/** + * Create POST body from two parts as multipart/related content-type + * @private + * @param boundary + * @param multipart + */ +function createMultipartStream(boundary, multipart) { + const finale = `--${boundary}--`; + const stream = new stream_1.PassThrough(); + for (const part of multipart) { + const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; + stream.write(preamble); + if (typeof part.body === 'string') { + stream.write(part.body); + stream.write('\r\n'); + } + else { + part.body.pipe(stream, { end: false }); + part.body.on('end', () => { + stream.write('\r\n'); + stream.write(finale); + stream.end(); + }); + } + } + return stream; +} +function teenyRequest(reqOpts, callback) { + const { uri, options } = requestToFetchOptions(reqOpts); + const multipart = reqOpts.multipart; + if (reqOpts.multipart && multipart.length === 2) { + if (!callback) { + // TODO: add support for multipart uploads through streaming + throw new Error('Multipart without callback is not implemented.'); + } + const boundary = uuid.v4(); + options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + options.body = createMultipartStream(boundary, multipart); + // Multipart upload + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, (err) => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; + } + if (callback === undefined) { + // Stream mode + const requestStream = streamEvents(new stream_1.PassThrough()); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let responseStream; + requestStream.once('reading', () => { + if (responseStream) { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + } + else { + requestStream.once('response', () => { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + }); + } + }); + options.compress = false; + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + responseStream = res.body; + responseStream.on('error', (err) => { + requestStream.emit('error', err); + }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { + teenyRequest.stats.requestFinished(); + requestStream.emit('error', err); + }); + // fetch doesn't supply the raw HTTP stream, instead it + // returns a PassThrough piped from the HTTP response + // stream. + return requestStream; + } + // GET or POST with callback + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + if (response.statusCode === 204) { + // Probably a DELETE + callback(null, response, body); + return; + } + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, err => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + const response = fetchToRequestResponse(options, res); + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; } - +exports.teenyRequest = teenyRequest; +teenyRequest.defaults = (defaults) => { + return (reqOpts, callback) => { + const opts = { ...defaults, ...reqOpts }; + if (callback === undefined) { + return teenyRequest(opts); + } + teenyRequest(opts, callback); + }; +}; /** - * Short format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private + * Single instance of an interface for keeping track of things. */ +teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); +teenyRequest.resetStats = () => { + teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); +}; +//# sourceMappingURL=index.js.map -function fmtShort(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return Math.round(ms / d) + 'd'; +/***/ }), + +/***/ 7835: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; } - if (msAbs >= h) { - return Math.round(ms / h) + 'h'; +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; } - if (msAbs >= m) { - return Math.round(ms / m) + 'm'; +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; } - if (msAbs >= s) { - return Math.round(ms / s) + 's'; +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; } - return ms + 'ms'; -} - -/** - * Long format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private - */ - -function fmtLong(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return plural(ms, msAbs, d, 'day'); +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; } - if (msAbs >= h) { - return plural(ms, msAbs, h, 'hour'); +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; } - if (msAbs >= m) { - return plural(ms, msAbs, m, 'minute'); +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; } - if (msAbs >= s) { - return plural(ms, msAbs, s, 'second'); +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; } - return ms + ' ms'; -} - -/** - * Pluralization helper. - */ - -function plural(ms, msAbs, n, name) { - var isPlural = msAbs >= n * 1.5; - return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); -} +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +var _v = _interopRequireDefault(__nccwpck_require__(618)); -/***/ }), +var _v2 = _interopRequireDefault(__nccwpck_require__(6888)); -/***/ 467: -/***/ ((module, exports, __nccwpck_require__) => { +var _v3 = _interopRequireDefault(__nccwpck_require__(1186)); -"use strict"; +var _v4 = _interopRequireDefault(__nccwpck_require__(2385)); +var _nil = _interopRequireDefault(__nccwpck_require__(5879)); -Object.defineProperty(exports, "__esModule", ({ value: true })); +var _version = _interopRequireDefault(__nccwpck_require__(694)); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +var _validate = _interopRequireDefault(__nccwpck_require__(4875)); -var Stream = _interopDefault(__nccwpck_require__(2781)); -var http = _interopDefault(__nccwpck_require__(3685)); -var Url = _interopDefault(__nccwpck_require__(7310)); -var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); -var https = _interopDefault(__nccwpck_require__(5687)); -var zlib = _interopDefault(__nccwpck_require__(9796)); +var _stringify = _interopRequireDefault(__nccwpck_require__(5802)); -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js +var _parse = _interopRequireDefault(__nccwpck_require__(893)); -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); +/***/ }), -class Blob { - constructor() { - this[TYPE] = ''; +/***/ 9576: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const blobParts = arguments[0]; - const options = arguments[1]; +"use strict"; - const buffers = []; - let size = 0; - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - this[BUFFER] = Buffer.concat(buffers); +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } + return _crypto.default.createHash('md5').update(bytes).digest(); } -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } +var _default = md5; +exports["default"] = _default; - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} +/***/ }), -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; +/***/ 4974: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -let convert; -try { - convert = (__nccwpck_require__(2877).convert); -} catch (e) {} +"use strict"; -const INTERNALS = Symbol('Body internals'); -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; +/***/ }), - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} +/***/ 5879: +/***/ ((__unused_webpack_module, exports) => { -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, +"use strict"; - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, +/***/ }), - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; +/***/ 893: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, +"use strict"; - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; +var _validate = _interopRequireDefault(__nccwpck_require__(4875)); - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ - this[INTERNALS].disturbed = true; + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) - let body = this.body; + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } +var _default = parse; +exports["default"] = _default; - // body is blob - if (isBlob(body)) { - body = body.stream(); - } +/***/ }), - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } +/***/ 7658: +/***/ ((__unused_webpack_module, exports) => { - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } +"use strict"; - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - return new Body.Promise(function (resolve, reject) { - let resTimeout; +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } +/***/ }), - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); +/***/ 2042: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } +"use strict"; - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - accumBytes += chunk.length; - accum.push(chunk); - }); +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; - body.on('end', function () { - if (abort) { - return; - } +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - clearTimeout(resTimeout); +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } +let poolPtr = rnds8Pool.length; - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } + poolPtr = 0; + } - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} - // html5 - if (!res && str) { - res = / { - if (res) { - res = /charset=(.*)/i.exec(res.pop()); - } - } +"use strict"; - // xml - if (!res && str) { - res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str); - } - // found charset - if (res) { - charset = res.pop(); +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - // prevent decode issues when sites use incorrect encoding - // ref: https://hsivonen.fi/encoding-menu/ - if (charset === 'gb2312' || charset === 'gbk') { - charset = 'gb18030'; - } - } +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - // turn raw buffers into a single utf-8 buffer - return convert(buffer, 'UTF-8', charset).toString(); -} +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -/** - * Detect a URLSearchParams object - * ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143 - * - * @param Object obj Object to detect by type or brand - * @return String - */ -function isURLSearchParams(obj) { - // Duck-typing as a necessary condition. - if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { - return false; - } +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } - // Brand-checking and more duck-typing as optional condition. - return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; + return _crypto.default.createHash('sha1').update(bytes).digest(); } -/** - * Check if `obj` is a W3C `Blob` object (which `File` inherits from) - * @param {*} obj - * @return {boolean} - */ -function isBlob(obj) { - return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); -} +var _default = sha1; +exports["default"] = _default; -/** - * Clone body given Res/Req instance - * - * @param Mixed instance Response or Request instance - * @return Mixed - */ -function clone(instance) { - let p1, p2; - let body = instance.body; +/***/ }), - // don't allow cloning a used body - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used'); - } +/***/ 5802: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // check that body is a stream and not form-data object - // note: we can't clone the form-data object without having it as a dependency - if (body instanceof Stream && typeof body.getBoundary !== 'function') { - // tee instance body - p1 = new PassThrough(); - p2 = new PassThrough(); - body.pipe(p1); - body.pipe(p2); - // set instance body to teed body and return the other teed body - instance[INTERNALS].body = p1; - body = p2; - } +"use strict"; - return body; -} -/** - * Performs the operation "extract a `Content-Type` value from |object|" as - * specified in the specification: - * https://fetch.spec.whatwg.org/#concept-bodyinit-extract - * - * This function assumes that instance.body is present. - * - * @param Mixed instance Any options.body input - */ -function extractContentType(body) { - if (body === null) { - // body is null - return null; - } else if (typeof body === 'string') { - // body is string - return 'text/plain;charset=UTF-8'; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - return 'application/x-www-form-urlencoded;charset=UTF-8'; - } else if (isBlob(body)) { - // body is blob - return body.type || null; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return null; - } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - return null; - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - return null; - } else if (typeof body.getBoundary === 'function') { - // detect form data input from form-data module - return `multipart/form-data;boundary=${body.getBoundary()}`; - } else if (body instanceof Stream) { - // body is stream - // can't really do much about this - return null; - } else { - // Body constructor defaults other things to string - return 'text/plain;charset=UTF-8'; - } -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(__nccwpck_require__(4875)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /** - * The Fetch Standard treats this as if "total bytes" is a property on the body. - * For us, we have to explicitly get it with a function. - * - * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes - * - * @param Body instance Instance of Body - * @return Number? Number of bytes, or null if not possible + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX */ -function getTotalBytes(instance) { - const body = instance.body; +const byteToHex = []; +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} - if (body === null) { - // body is null - return 0; - } else if (isBlob(body)) { - return body.size; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return body.length; - } else if (body && typeof body.getLengthSync === 'function') { - // detect form data input from form-data module - if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) { - // 2.x - return body.getLengthSync(); - } - return null; - } else { - // body is stream - return null; - } +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; } -/** - * Write a Body to a Node.js WritableStream (e.g. http.Request) object. - * - * @param Body instance Instance of Body - * @return Void - */ -function writeToStream(dest, instance) { - const body = instance.body; +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } - if (body === null) { - // body is null - dest.end(); - } else if (isBlob(body)) { - body.stream().pipe(dest); - } else if (Buffer.isBuffer(body)) { - // body is buffer - dest.write(body); - dest.end(); - } else { - // body is stream - body.pipe(dest); - } + return uuid; } -// expose Promise -Body.Promise = global.Promise; - -/** - * headers.js - * - * Headers class offers convenient helpers - */ +var _default = stringify; +exports["default"] = _default; -const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; +/***/ }), -function validateName(name) { - name = `${name}`; - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`); - } -} +/***/ 618: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function validateValue(value) { - value = `${value}`; - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`); - } -} +"use strict"; -/** - * Find the key in the map object given a header name. - * - * Returns undefined if not found. - * - * @param String name Header name - * @return String|Undefined - */ -function find(map, name) { - name = name.toLowerCase(); - for (const key in map) { - if (key.toLowerCase() === name) { - return key; - } - } - return undefined; -} -const MAP = Symbol('map'); -class Headers { - /** - * Headers class - * - * @param Object headers Response headers - * @return Void - */ - constructor() { - let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - this[MAP] = Object.create(null); +var _rng = _interopRequireDefault(__nccwpck_require__(2042)); - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); +var _stringify = __nccwpck_require__(5802); - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - return; - } +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } +let _clockseq; // Previous uuid creation time - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 - return this[MAP][key].join(', '); - } + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); + msecs += 12219292800000; // `time_low` -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` -const INTERNAL = Symbol('internal'); + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; + b[i++] = clockseq & 0xff; // `node` - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } - this[INTERNAL].index = index + 1; + return buf || (0, _stringify.unsafeStringify)(b); +} - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); +var _default = v1; +exports["default"] = _default; -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); +/***/ }), -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); +/***/ 6888: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } +"use strict"; - return obj; -} -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -const INTERNALS$1 = Symbol('Response internals'); +var _v = _interopRequireDefault(__nccwpck_require__(8392)); -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; +var _md = _interopRequireDefault(__nccwpck_require__(9576)); -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - Body.call(this, body, opts); +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; - const status = opts.status || 200; - const headers = new Headers(opts.headers); +/***/ }), - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } +/***/ 8392: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } +"use strict"; - get url() { - return this[INTERNALS$1].url || ''; - } - get status() { - return this[INTERNALS$1].status; - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } +var _stringify = __nccwpck_require__(5802); - get redirected() { - return this[INTERNALS$1].counter > 0; - } +var _parse = _interopRequireDefault(__nccwpck_require__(893)); - get statusText() { - return this[INTERNALS$1].statusText; - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - get headers() { - return this[INTERNALS$1].headers; - } +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; } -Body.mixIn(Response.prototype); +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); + if (typeof value === 'string') { + value = stringToBytes(value); + } -const INTERNALS$2 = Symbol('Request internals'); -const URL = Url.URL || whatwgUrl.URL; + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` -/** - * Wrapper around `new URL` to handle arbitrary URLs - * - * @param {string} urlStr - * @return {void} - */ -function parseURL(urlStr) { - /* - Check whether the URL is absolute or not - Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 - Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 - */ - if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { - urlStr = new URL(urlStr).toString(); - } - // Fallback to old implementation for arbitrary URLs - return parse_url(urlStr); -} + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + if (buf) { + offset = offset || 0; -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} + return buf; + } -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) - let parsedURL; - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parseURL(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parseURL(`${input}`); - } - input = {}; - } else { - parsedURL = parseURL(input.url); - } + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; +/***/ }), - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); +/***/ 1186: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const headers = new Headers(init.headers || input.headers || {}); +"use strict"; - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } +var _native = _interopRequireDefault(__nccwpck_require__(4974)); - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; +var _rng = _interopRequireDefault(__nccwpck_require__(2042)); - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } +var _stringify = __nccwpck_require__(5802); - get method() { - return this[INTERNALS$2].method; - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } - get headers() { - return this[INTERNALS$2].headers; - } + options = options || {}; - get redirect() { - return this[INTERNALS$2].redirect; - } + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - get signal() { - return this[INTERNALS$2].signal; - } - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided -Body.mixIn(Request.prototype); + if (buf) { + offset = offset || 0; -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); + return buf; + } -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); + return (0, _stringify.unsafeStringify)(rnds); +} - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } +var _default = v4; +exports["default"] = _default; - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } +/***/ }), - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } +/***/ 2385: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } +"use strict"; - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } +var _v = _interopRequireDefault(__nccwpck_require__(8392)); - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } +var _sha = _interopRequireDefault(__nccwpck_require__(6723)); - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} +/***/ }), -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ +/***/ 4875: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); +"use strict"; - this.type = 'aborted'; - this.message = message; - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; +var _regex = _interopRequireDefault(__nccwpck_require__(7658)); -const URL$1 = Url.URL || whatwgUrl.URL; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; +var _default = validate; +exports["default"] = _default; - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; +/***/ }), -/** - * isSameProtocol reports whether the two provided URLs use the same protocol. - * - * Both domains must already be in canonical form. - * @param {string|URL} original - * @param {string|URL} destination - */ -const isSameProtocol = function isSameProtocol(destination, original) { - const orig = new URL$1(original).protocol; - const dest = new URL$1(destination).protocol; +/***/ 694: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return orig === dest; -}; +"use strict"; -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - Body.Promise = fetch.Promise; +var _validate = _interopRequireDefault(__nccwpck_require__(4875)); - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } - let response = null; + return parseInt(uuid.slice(14, 15), 16); +} - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - destroyStream(request.body, error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; +var _default = version; +exports["default"] = _default; - if (signal && signal.aborted) { - abort(); - return; - } +/***/ }), - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; +/***/ 4256: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // send request - const req = send(options); - let reqTimeout; +"use strict"; - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } +var punycode = __nccwpck_require__(5477); +var mappingTable = __nccwpck_require__(2020); - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } +var PROCESSING_OPTIONS = { + TRANSITIONAL: 0, + NONTRANSITIONAL: 1 +}; - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); +function normalize(str) { // fix bug in v8 + return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); +} - if (response && response.body) { - destroyStream(response.body, err); - } +function findStatus(val) { + var start = 0; + var end = mappingTable.length - 1; - finalize(); - }); + while (start <= end) { + var mid = Math.floor((start + end) / 2); - fixResponseChunkedTransferBadEnding(req, function (err) { - if (signal && signal.aborted) { - return; - } + var target = mappingTable[mid]; + if (target[0][0] <= val && target[0][1] >= val) { + return target; + } else if (target[0][0] > val) { + end = mid - 1; + } else { + start = mid + 1; + } + } - if (response && response.body) { - destroyStream(response.body, err); - } - }); + return null; +} - /* c8 ignore next 18 */ - if (parseInt(process.version.substring(1)) < 14) { - // Before Node.js 14, pipeline() does not fully support async iterators and does not always - // properly handle when the socket close/end events are out of order. - req.on('socket', function (s) { - s.addListener('close', function (hadError) { - // if a data listener is still present we didn't end cleanly - const hasDataListener = s.listenerCount('data') > 0; +var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; - // if end happened before close but the socket didn't emit an error, do it now - if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - response.body.emit('error', err); - } - }); - }); - } +function countSymbols(string) { + return string + // replace every surrogate pair with a BMP symbol + .replace(regexAstralSymbols, '_') + // then get the length + .length; +} - req.on('response', function (res) { - clearTimeout(reqTimeout); +function mapChars(domain_name, useSTD3, processing_option) { + var hasError = false; + var processed = ""; - const headers = createHeadersLenient(res.headers); + var len = countSymbols(domain_name); + for (var i = 0; i < len; ++i) { + var codePoint = domain_name.codePointAt(i); + var status = findStatus(codePoint); - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); + switch (status[1]) { + case "disallowed": + hasError = true; + processed += String.fromCodePoint(codePoint); + break; + case "ignored": + break; + case "mapped": + processed += String.fromCodePoint.apply(String, status[2]); + break; + case "deviation": + if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { + processed += String.fromCodePoint.apply(String, status[2]); + } else { + processed += String.fromCodePoint(codePoint); + } + break; + case "valid": + processed += String.fromCodePoint(codePoint); + break; + case "disallowed_STD3_mapped": + if (useSTD3) { + hasError = true; + processed += String.fromCodePoint(codePoint); + } else { + processed += String.fromCodePoint.apply(String, status[2]); + } + break; + case "disallowed_STD3_valid": + if (useSTD3) { + hasError = true; + } - // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } + processed += String.fromCodePoint(codePoint); + break; + } + } - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } + return { + string: processed, + error: hasError + }; +} - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } +var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; +function validateLabel(label, processing_option) { + if (label.substr(0, 4) === "xn--") { + label = punycode.toUnicode(label); + processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; + } - if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } + var error = false; - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } + if (normalize(label) !== label || + (label[3] === "-" && label[4] === "-") || + label[0] === "-" || label[label.length - 1] === "-" || + label.indexOf(".") !== -1 || + label.search(combiningMarksRegex) === 0) { + error = true; + } - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } + var len = countSymbols(label); + for (var i = 0; i < len; ++i) { + var status = findStatus(label.codePointAt(i)); + if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || + (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && + status[1] !== "valid" && status[1] !== "deviation")) { + error = true; + break; + } + } - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } + return { + label: label, + error: error + }; +} - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); +function processing(domain_name, useSTD3, processing_option) { + var result = mapChars(domain_name, useSTD3, processing_option); + result.string = normalize(result.string); - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; + var labels = result.string.split("."); + for (var i = 0; i < labels.length; ++i) { + try { + var validation = validateLabel(labels[i]); + labels[i] = validation.label; + result.error = result.error || validation.error; + } catch(e) { + result.error = true; + } + } - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); + return { + string: labels.join("."), + error: result.error + }; +} - // HTTP-network fetch step 12.1.1.4: handle content codings +module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { + var result = processing(domain_name, useSTD3, processing_option); + var labels = result.string.split("."); + labels = labels.map(function(l) { + try { + return punycode.toASCII(l); + } catch(e) { + result.error = true; + return l; + } + }); - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } + if (verifyDnsLength) { + var total = labels.slice(0, labels.length - 1).join(".").length; + if (total.length > 253 || total.length === 0) { + result.error = true; + } - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; + for (var i=0; i < labels.length; ++i) { + if (labels.length > 63 || labels.length === 0) { + result.error = true; + break; + } + } + } - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } + if (result.error) return null; + return labels.join("."); +}; - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - raw.on('end', function () { - // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. - if (!response) { - response = new Response(body, response_options); - resolve(response); - } - }); - return; - } +module.exports.toUnicode = function(domain_name, useSTD3) { + var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } + return { + domain: result.string, + error: result.error + }; +}; - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); +module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; - writeToStream(req, request); - }); -} -function fixResponseChunkedTransferBadEnding(request, errorCallback) { - let socket; - request.on('socket', function (s) { - socket = s; - }); +/***/ }), - request.on('response', function (response) { - const headers = response.headers; +/***/ 4294: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { - response.once('close', function (hadError) { - // if a data listener is still present we didn't end cleanly - const hasDataListener = socket.listenerCount('data') > 0; +module.exports = __nccwpck_require__(4219); - if (hasDataListener && !hadError) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - errorCallback(err); - } - }); - } - }); -} -function destroyStream(stream, err) { - if (stream.destroy) { - stream.destroy(err); - } else { - // node < 8 - stream.emit('error', err); - stream.end(); - } -} +/***/ }), -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; +/***/ 4219: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// expose Promise -fetch.Promise = global.Promise; +"use strict"; -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports["default"] = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; +var net = __nccwpck_require__(1808); +var tls = __nccwpck_require__(4404); +var http = __nccwpck_require__(3685); +var https = __nccwpck_require__(5687); +var events = __nccwpck_require__(2361); +var assert = __nccwpck_require__(9491); +var util = __nccwpck_require__(3837); -/***/ }), -/***/ 7994: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; -/** - * Advanced Encryption Standard (AES) implementation. - * - * This implementation is based on the public domain library 'jscrypto' which - * was written by: - * - * Emily Stark (estark@stanford.edu) - * Mike Hamburg (mhamburg@stanford.edu) - * Dan Boneh (dabo@cs.stanford.edu) - * - * Parts of this code are based on the OpenSSL implementation of AES: - * http://www.openssl.org - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7088); -__nccwpck_require__(873); -__nccwpck_require__(8339); -/* AES API */ -module.exports = forge.aes = forge.aes || {}; +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} -/** - * Deprecated. Instead, use: - * - * var cipher = forge.cipher.createCipher('AES-', key); - * cipher.start({iv: iv}); - * - * Creates an AES cipher object to encrypt data using the given symmetric key. - * The output will be stored in the 'output' member of the returned cipher. - * - * The key and iv may be given as a string of bytes, an array of bytes, - * a byte buffer, or an array of 32-bit words. - * - * @param key the symmetric key to use. - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.aes.startEncrypting = function(key, iv, output, mode) { - var cipher = _createCipher({ - key: key, - output: output, - decrypt: false, - mode: mode - }); - cipher.start(iv); - return cipher; -}; +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} -/** - * Deprecated. Instead, use: - * - * var cipher = forge.cipher.createCipher('AES-', key); - * - * Creates an AES cipher object to encrypt data using the given symmetric key. - * - * The key may be given as a string of bytes, an array of bytes, a - * byte buffer, or an array of 32-bit words. - * - * @param key the symmetric key to use. - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.aes.createEncryptionCipher = function(key, mode) { - return _createCipher({ - key: key, - output: null, - decrypt: false, - mode: mode - }); -}; +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} -/** - * Deprecated. Instead, use: - * - * var decipher = forge.cipher.createDecipher('AES-', key); - * decipher.start({iv: iv}); - * - * Creates an AES cipher object to decrypt data using the given symmetric key. - * The output will be stored in the 'output' member of the returned cipher. - * - * The key and iv may be given as a string of bytes, an array of bytes, - * a byte buffer, or an array of 32-bit words. - * - * @param key the symmetric key to use. - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.aes.startDecrypting = function(key, iv, output, mode) { - var cipher = _createCipher({ - key: key, - output: output, - decrypt: true, - mode: mode - }); - cipher.start(iv); - return cipher; -}; +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} -/** - * Deprecated. Instead, use: - * - * var decipher = forge.cipher.createDecipher('AES-', key); - * - * Creates an AES cipher object to decrypt data using the given symmetric key. - * - * The key may be given as a string of bytes, an array of bytes, a - * byte buffer, or an array of 32-bit words. - * - * @param key the symmetric key to use. - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.aes.createDecryptionCipher = function(key, mode) { - return _createCipher({ - key: key, - output: null, - decrypt: true, - mode: mode - }); -}; -/** - * Creates a new AES cipher algorithm object. - * - * @param name the name of the algorithm. - * @param mode the mode factory function. - * - * @return the AES algorithm object. - */ -forge.aes.Algorithm = function(name, mode) { - if(!init) { - initialize(); - } +function TunnelingAgent(options) { var self = this; - self.name = name; - self.mode = new mode({ - blockSize: 16, - cipher: { - encrypt: function(inBlock, outBlock) { - return _updateBlock(self._w, inBlock, outBlock, false); - }, - decrypt: function(inBlock, outBlock) { - return _updateBlock(self._w, inBlock, outBlock, true); + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; } } + socket.destroy(); + self.removeSocket(socket); }); - self._init = false; -}; +} +util.inherits(TunnelingAgent, events.EventEmitter); -/** - * Initializes this AES algorithm by expanding its key. - * - * @param options the options to use. - * key the key to use with this algorithm. - * decrypt true if the algorithm should be initialized for decryption, - * false for encryption. - */ -forge.aes.Algorithm.prototype.initialize = function(options) { - if(this._init) { +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); return; } - var key = options.key; - var tmp; + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); - /* Note: The key may be a string of bytes, an array of bytes, a byte - buffer, or an array of 32-bit integers. If the key is in bytes, then - it must be 16, 24, or 32 bytes in length. If it is in 32-bit - integers, it must be 4, 6, or 8 integers long. */ + function onFree() { + self.emit('free', socket, options); + } - if(typeof key === 'string' && - (key.length === 16 || key.length === 24 || key.length === 32)) { - // convert key string into byte buffer - key = forge.util.createBuffer(key); - } else if(forge.util.isArray(key) && - (key.length === 16 || key.length === 24 || key.length === 32)) { - // convert key integer array into byte buffer - tmp = key; - key = forge.util.createBuffer(); - for(var i = 0; i < tmp.length; ++i) { - key.putByte(tmp[i]); + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); } - } + }); +}; - // convert key byte buffer into 32-bit integer array - if(!forge.util.isArray(key)) { - tmp = key; - key = []; +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); - // key lengths of 16, 24, 32 bytes allowed - var len = tmp.length(); - if(len === 16 || len === 24 || len === 32) { - len = len >>> 2; - for(var i = 0; i < len; ++i) { - key.push(tmp.getInt32()); - } + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; } - - // key must be an array of 32-bit integers by now - if(!forge.util.isArray(key) || - !(key.length === 4 || key.length === 6 || key.length === 8)) { - throw new Error('Invalid key parameter.'); + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); } - // encryption operation is always used for these modes - var mode = this.mode.name; - var encryptOp = (['CFB', 'OFB', 'CTR', 'GCM'].indexOf(mode) !== -1); + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); - // do key expansion - this._w = _expandKey(key, options.decrypt && !encryptOp); - this._init = true; -}; + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } -/** - * Expands a key. Typically only used for testing. - * - * @param key the symmetric key to expand, as an array of 32-bit words. - * @param decrypt true to expand for decryption, false for encryption. - * - * @return the expanded key. - */ -forge.aes._expandKey = function(key, decrypt) { - if(!init) { - initialize(); + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); } - return _expandKey(key, decrypt); -}; -/** - * Updates a single block. Typically only used for testing. - * - * @param w the expanded key to use. - * @param input an array of block-size 32-bit words. - * @param output an array of block-size 32-bit words. - * @param decrypt true to decrypt, false to encrypt. - */ -forge.aes._updateBlock = _updateBlock; + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); -/** Register AES algorithms **/ + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } -registerAlgorithm('AES-ECB', forge.cipher.modes.ecb); -registerAlgorithm('AES-CBC', forge.cipher.modes.cbc); -registerAlgorithm('AES-CFB', forge.cipher.modes.cfb); -registerAlgorithm('AES-OFB', forge.cipher.modes.ofb); -registerAlgorithm('AES-CTR', forge.cipher.modes.ctr); -registerAlgorithm('AES-GCM', forge.cipher.modes.gcm); + function onError(cause) { + connectReq.removeAllListeners(); -function registerAlgorithm(name, mode) { - var factory = function() { - return new forge.aes.Algorithm(name, mode); - }; - forge.cipher.registerAlgorithm(name, factory); -} + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; -/** AES implementation **/ +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); -var init = false; // not yet initialized -var Nb = 4; // number of words comprising the state (AES = 4) -var sbox; // non-linear substitution table used in key expansion -var isbox; // inversion of sbox -var rcon; // round constant word array -var mix; // mix-columns table -var imix; // inverse mix-columns table + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; -/** - * Performs initialization, ie: precomputes tables to optimize for speed. - * - * One way to understand how AES works is to imagine that 'addition' and - * 'multiplication' are interfaces that require certain mathematical - * properties to hold true (ie: they are associative) but they might have - * different implementations and produce different kinds of results ... - * provided that their mathematical properties remain true. AES defines - * its own methods of addition and multiplication but keeps some important - * properties the same, ie: associativity and distributivity. The - * explanation below tries to shed some light on how AES defines addition - * and multiplication of bytes and 32-bit words in order to perform its - * encryption and decryption algorithms. - * - * The basics: - * - * The AES algorithm views bytes as binary representations of polynomials - * that have either 1 or 0 as the coefficients. It defines the addition - * or subtraction of two bytes as the XOR operation. It also defines the - * multiplication of two bytes as a finite field referred to as GF(2^8) - * (Note: 'GF' means "Galois Field" which is a field that contains a finite - * number of elements so GF(2^8) has 256 elements). - * - * This means that any two bytes can be represented as binary polynomials; - * when they multiplied together and modularly reduced by an irreducible - * polynomial of the 8th degree, the results are the field GF(2^8). The - * specific irreducible polynomial that AES uses in hexadecimal is 0x11b. - * This multiplication is associative with 0x01 as the identity: - * - * (b * 0x01 = GF(b, 0x01) = b). - * - * The operation GF(b, 0x02) can be performed at the byte level by left - * shifting b once and then XOR'ing it (to perform the modular reduction) - * with 0x11b if b is >= 128. Repeated application of the multiplication - * of 0x02 can be used to implement the multiplication of any two bytes. - * - * For instance, multiplying 0x57 and 0x13, denoted as GF(0x57, 0x13), can - * be performed by factoring 0x13 into 0x01, 0x02, and 0x10. Then these - * factors can each be multiplied by 0x57 and then added together. To do - * the multiplication, values for 0x57 multiplied by each of these 3 factors - * can be precomputed and stored in a table. To add them, the values from - * the table are XOR'd together. - * - * AES also defines addition and multiplication of words, that is 4-byte - * numbers represented as polynomials of 3 degrees where the coefficients - * are the values of the bytes. - * - * The word [a0, a1, a2, a3] is a polynomial a3x^3 + a2x^2 + a1x + a0. - * - * Addition is performed by XOR'ing like powers of x. Multiplication - * is performed in two steps, the first is an algebriac expansion as - * you would do normally (where addition is XOR). But the result is - * a polynomial larger than 3 degrees and thus it cannot fit in a word. So - * next the result is modularly reduced by an AES-specific polynomial of - * degree 4 which will always produce a polynomial of less than 4 degrees - * such that it will fit in a word. In AES, this polynomial is x^4 + 1. - * - * The modular product of two polynomials 'a' and 'b' is thus: - * - * d(x) = d3x^3 + d2x^2 + d1x + d0 - * with - * d0 = GF(a0, b0) ^ GF(a3, b1) ^ GF(a2, b2) ^ GF(a1, b3) - * d1 = GF(a1, b0) ^ GF(a0, b1) ^ GF(a3, b2) ^ GF(a2, b3) - * d2 = GF(a2, b0) ^ GF(a1, b1) ^ GF(a0, b2) ^ GF(a3, b3) - * d3 = GF(a3, b0) ^ GF(a2, b1) ^ GF(a1, b2) ^ GF(a0, b3) - * - * As a matrix: - * - * [d0] = [a0 a3 a2 a1][b0] - * [d1] [a1 a0 a3 a2][b1] - * [d2] [a2 a1 a0 a3][b2] - * [d3] [a3 a2 a1 a0][b3] - * - * Special polynomials defined by AES (0x02 == {02}): - * a(x) = {03}x^3 + {01}x^2 + {01}x + {02} - * a^-1(x) = {0b}x^3 + {0d}x^2 + {09}x + {0e}. - * - * These polynomials are used in the MixColumns() and InverseMixColumns() - * operations, respectively, to cause each element in the state to affect - * the output (referred to as diffusing). - * - * RotWord() uses: a0 = a1 = a2 = {00} and a3 = {01}, which is the - * polynomial x3. - * - * The ShiftRows() method modifies the last 3 rows in the state (where - * the state is 4 words with 4 bytes per word) by shifting bytes cyclically. - * The 1st byte in the second row is moved to the end of the row. The 1st - * and 2nd bytes in the third row are moved to the end of the row. The 1st, - * 2nd, and 3rd bytes are moved in the fourth row. - * - * More details on how AES arithmetic works: - * - * In the polynomial representation of binary numbers, XOR performs addition - * and subtraction and multiplication in GF(2^8) denoted as GF(a, b) - * corresponds with the multiplication of polynomials modulo an irreducible - * polynomial of degree 8. In other words, for AES, GF(a, b) will multiply - * polynomial 'a' with polynomial 'b' and then do a modular reduction by - * an AES-specific irreducible polynomial of degree 8. - * - * A polynomial is irreducible if its only divisors are one and itself. For - * the AES algorithm, this irreducible polynomial is: - * - * m(x) = x^8 + x^4 + x^3 + x + 1, - * - * or {01}{1b} in hexadecimal notation, where each coefficient is a bit: - * 100011011 = 283 = 0x11b. - * - * For example, GF(0x57, 0x83) = 0xc1 because - * - * 0x57 = 87 = 01010111 = x^6 + x^4 + x^2 + x + 1 - * 0x85 = 131 = 10000101 = x^7 + x + 1 - * - * (x^6 + x^4 + x^2 + x + 1) * (x^7 + x + 1) - * = x^13 + x^11 + x^9 + x^8 + x^7 + - * x^7 + x^5 + x^3 + x^2 + x + - * x^6 + x^4 + x^2 + x + 1 - * = x^13 + x^11 + x^9 + x^8 + x^6 + x^5 + x^4 + x^3 + 1 = y - * y modulo (x^8 + x^4 + x^3 + x + 1) - * = x^7 + x^6 + 1. - * - * The modular reduction by m(x) guarantees the result will be a binary - * polynomial of less than degree 8, so that it can fit in a byte. - * - * The operation to multiply a binary polynomial b with x (the polynomial - * x in binary representation is 00000010) is: - * - * b_7x^8 + b_6x^7 + b_5x^6 + b_4x^5 + b_3x^4 + b_2x^3 + b_1x^2 + b_0x^1 - * - * To get GF(b, x) we must reduce that by m(x). If b_7 is 0 (that is the - * most significant bit is 0 in b) then the result is already reduced. If - * it is 1, then we can reduce it by subtracting m(x) via an XOR. - * - * It follows that multiplication by x (00000010 or 0x02) can be implemented - * by performing a left shift followed by a conditional bitwise XOR with - * 0x1b. This operation on bytes is denoted by xtime(). Multiplication by - * higher powers of x can be implemented by repeated application of xtime(). - * - * By adding intermediate results, multiplication by any constant can be - * implemented. For instance: - * - * GF(0x57, 0x13) = 0xfe because: - * - * xtime(b) = (b & 128) ? (b << 1 ^ 0x11b) : (b << 1) - * - * Note: We XOR with 0x11b instead of 0x1b because in javascript our - * datatype for b can be larger than 1 byte, so a left shift will not - * automatically eliminate bits that overflow a byte ... by XOR'ing the - * overflow bit with 1 (the extra one from 0x11b) we zero it out. - * - * GF(0x57, 0x02) = xtime(0x57) = 0xae - * GF(0x57, 0x04) = xtime(0xae) = 0x47 - * GF(0x57, 0x08) = xtime(0x47) = 0x8e - * GF(0x57, 0x10) = xtime(0x8e) = 0x07 - * - * GF(0x57, 0x13) = GF(0x57, (0x01 ^ 0x02 ^ 0x10)) - * - * And by the distributive property (since XOR is addition and GF() is - * multiplication): - * - * = GF(0x57, 0x01) ^ GF(0x57, 0x02) ^ GF(0x57, 0x10) - * = 0x57 ^ 0xae ^ 0x07 - * = 0xfe. - */ -function initialize() { - init = true; +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); - /* Populate the Rcon table. These are the values given by - [x^(i-1),{00},{00},{00}] where x^(i-1) are powers of x (and x = 0x02) - in the field of GF(2^8), where i starts at 1. + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} - rcon[0] = [0x00, 0x00, 0x00, 0x00] - rcon[1] = [0x01, 0x00, 0x00, 0x00] 2^(1-1) = 2^0 = 1 - rcon[2] = [0x02, 0x00, 0x00, 0x00] 2^(2-1) = 2^1 = 2 - ... - rcon[9] = [0x1B, 0x00, 0x00, 0x00] 2^(9-1) = 2^8 = 0x1B - rcon[10] = [0x36, 0x00, 0x00, 0x00] 2^(10-1) = 2^9 = 0x36 - We only store the first byte because it is the only one used. - */ - rcon = [0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1B, 0x36]; - - // compute xtime table which maps i onto GF(i, 0x02) - var xtime = new Array(256); - for(var i = 0; i < 128; ++i) { - xtime[i] = i << 1; - xtime[i + 128] = (i + 128) << 1 ^ 0x11B; - } - - // compute all other tables - sbox = new Array(256); - isbox = new Array(256); - mix = new Array(4); - imix = new Array(4); - for(var i = 0; i < 4; ++i) { - mix[i] = new Array(256); - imix[i] = new Array(256); - } - var e = 0, ei = 0, e2, e4, e8, sx, sx2, me, ime; - for(var i = 0; i < 256; ++i) { - /* We need to generate the SubBytes() sbox and isbox tables so that - we can perform byte substitutions. This requires us to traverse - all of the elements in GF, find their multiplicative inverses, - and apply to each the following affine transformation: - - bi' = bi ^ b(i + 4) mod 8 ^ b(i + 5) mod 8 ^ b(i + 6) mod 8 ^ - b(i + 7) mod 8 ^ ci - for 0 <= i < 8, where bi is the ith bit of the byte, and ci is the - ith bit of a byte c with the value {63} or {01100011}. - - It is possible to traverse every possible value in a Galois field - using what is referred to as a 'generator'. There are many - generators (128 out of 256): 3,5,6,9,11,82 to name a few. To fully - traverse GF we iterate 255 times, multiplying by our generator - each time. - - On each iteration we can determine the multiplicative inverse for - the current element. - - Suppose there is an element in GF 'e'. For a given generator 'g', - e = g^x. The multiplicative inverse of e is g^(255 - x). It turns - out that if use the inverse of a generator as another generator - it will produce all of the corresponding multiplicative inverses - at the same time. For this reason, we choose 5 as our inverse - generator because it only requires 2 multiplies and 1 add and its - inverse, 82, requires relatively few operations as well. - - In order to apply the affine transformation, the multiplicative - inverse 'ei' of 'e' can be repeatedly XOR'd (4 times) with a - bit-cycling of 'ei'. To do this 'ei' is first stored in 's' and - 'x'. Then 's' is left shifted and the high bit of 's' is made the - low bit. The resulting value is stored in 's'. Then 'x' is XOR'd - with 's' and stored in 'x'. On each subsequent iteration the same - operation is performed. When 4 iterations are complete, 'x' is - XOR'd with 'c' (0x63) and the transformed value is stored in 'x'. - For example: - - s = 01000001 - x = 01000001 - - iteration 1: s = 10000010, x ^= s - iteration 2: s = 00000101, x ^= s - iteration 3: s = 00001010, x ^= s - iteration 4: s = 00010100, x ^= s - x ^= 0x63 - - This can be done with a loop where s = (s << 1) | (s >> 7). However, - it can also be done by using a single 16-bit (in this case 32-bit) - number 'sx'. Since XOR is an associative operation, we can set 'sx' - to 'ei' and then XOR it with 'sx' left-shifted 1,2,3, and 4 times. - The most significant bits will flow into the high 8 bit positions - and be correctly XOR'd with one another. All that remains will be - to cycle the high 8 bits by XOR'ing them all with the lower 8 bits - afterwards. - - At the same time we're populating sbox and isbox we can precompute - the multiplication we'll need to do to do MixColumns() later. - */ - - // apply affine transformation - sx = ei ^ (ei << 1) ^ (ei << 2) ^ (ei << 3) ^ (ei << 4); - sx = (sx >> 8) ^ (sx & 255) ^ 0x63; - - // update tables - sbox[e] = sx; - isbox[sx] = e; - - /* Mixing columns is done using matrix multiplication. The columns - that are to be mixed are each a single word in the current state. - The state has Nb columns (4 columns). Therefore each column is a - 4 byte word. So to mix the columns in a single column 'c' where - its rows are r0, r1, r2, and r3, we use the following matrix - multiplication: - - [2 3 1 1]*[r0,c]=[r'0,c] - [1 2 3 1] [r1,c] [r'1,c] - [1 1 2 3] [r2,c] [r'2,c] - [3 1 1 2] [r3,c] [r'3,c] - - r0, r1, r2, and r3 are each 1 byte of one of the words in the - state (a column). To do matrix multiplication for each mixed - column c' we multiply the corresponding row from the left matrix - with the corresponding column from the right matrix. In total, we - get 4 equations: - - r0,c' = 2*r0,c + 3*r1,c + 1*r2,c + 1*r3,c - r1,c' = 1*r0,c + 2*r1,c + 3*r2,c + 1*r3,c - r2,c' = 1*r0,c + 1*r1,c + 2*r2,c + 3*r3,c - r3,c' = 3*r0,c + 1*r1,c + 1*r2,c + 2*r3,c - - As usual, the multiplication is as previously defined and the - addition is XOR. In order to optimize mixing columns we can store - the multiplication results in tables. If you think of the whole - column as a word (it might help to visualize by mentally rotating - the equations above by counterclockwise 90 degrees) then you can - see that it would be useful to map the multiplications performed on - each byte (r0, r1, r2, r3) onto a word as well. For instance, we - could map 2*r0,1*r0,1*r0,3*r0 onto a word by storing 2*r0 in the - highest 8 bits and 3*r0 in the lowest 8 bits (with the other two - respectively in the middle). This means that a table can be - constructed that uses r0 as an index to the word. We can do the - same with r1, r2, and r3, creating a total of 4 tables. - - To construct a full c', we can just look up each byte of c in - their respective tables and XOR the results together. - - Also, to build each table we only have to calculate the word - for 2,1,1,3 for every byte ... which we can do on each iteration - of this loop since we will iterate over every byte. After we have - calculated 2,1,1,3 we can get the results for the other tables - by cycling the byte at the end to the beginning. For instance - we can take the result of table 2,1,1,3 and produce table 3,2,1,1 - by moving the right most byte to the left most position just like - how you can imagine the 3 moved out of 2,1,1,3 and to the front - to produce 3,2,1,1. - - There is another optimization in that the same multiples of - the current element we need in order to advance our generator - to the next iteration can be reused in performing the 2,1,1,3 - calculation. We also calculate the inverse mix column tables, - with e,9,d,b being the inverse of 2,1,1,3. - - When we're done, and we need to actually mix columns, the first - byte of each state word should be put through mix[0] (2,1,1,3), - the second through mix[1] (3,2,1,1) and so forth. Then they should - be XOR'd together to produce the fully mixed column. - */ - - // calculate mix and imix table values - sx2 = xtime[sx]; - e2 = xtime[e]; - e4 = xtime[e2]; - e8 = xtime[e4]; - me = - (sx2 << 24) ^ // 2 - (sx << 16) ^ // 1 - (sx << 8) ^ // 1 - (sx ^ sx2); // 3 - ime = - (e2 ^ e4 ^ e8) << 24 ^ // E (14) - (e ^ e8) << 16 ^ // 9 - (e ^ e4 ^ e8) << 8 ^ // D (13) - (e ^ e2 ^ e8); // B (11) - // produce each of the mix tables by rotating the 2,1,1,3 value - for(var n = 0; n < 4; ++n) { - mix[n][e] = me; - imix[n][sx] = ime; - // cycle the right most byte to the left most position - // ie: 2,1,1,3 becomes 3,2,1,1 - me = me << 24 | me >>> 8; - ime = ime << 24 | ime >>> 8; - } - - // get next element and inverse - if(e === 0) { - // 1 is the inverse of 1 - e = ei = 1; - } else { - // e = 2e + 2*2*2*(10e)) = multiply e by 82 (chosen generator) - // ei = ei + 2*2*ei = multiply ei by 5 (inverse generator) - e = e2 ^ xtime[xtime[xtime[e2 ^ e8]]]; - ei ^= xtime[xtime[ei]]; - } +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; } + return host; // for v0.11 or later } -/** - * Generates a key schedule using the AES key expansion algorithm. - * - * The AES algorithm takes the Cipher Key, K, and performs a Key Expansion - * routine to generate a key schedule. The Key Expansion generates a total - * of Nb*(Nr + 1) words: the algorithm requires an initial set of Nb words, - * and each of the Nr rounds requires Nb words of key data. The resulting - * key schedule consists of a linear array of 4-byte words, denoted [wi ], - * with i in the range 0 <= i < Nb(Nr + 1). - * - * KeyExpansion(byte key[4*Nk], word w[Nb*(Nr+1)], Nk) - * AES-128 (Nb=4, Nk=4, Nr=10) - * AES-192 (Nb=4, Nk=6, Nr=12) - * AES-256 (Nb=4, Nk=8, Nr=14) - * Note: Nr=Nk+6. - * - * Nb is the number of columns (32-bit words) comprising the State (or - * number of bytes in a block). For AES, Nb=4. - * - * @param key the key to schedule (as an array of 32-bit words). - * @param decrypt true to modify the key schedule to decrypt, false not to. - * - * @return the generated key schedule. - */ -function _expandKey(key, decrypt) { - // copy the key's words to initialize the key schedule - var w = key.slice(0); - - /* RotWord() will rotate a word, moving the first byte to the last - byte's position (shifting the other bytes left). - - We will be getting the value of Rcon at i / Nk. 'i' will iterate - from Nk to (Nb * Nr+1). Nk = 4 (4 byte key), Nb = 4 (4 words in - a block), Nr = Nk + 6 (10). Therefore 'i' will iterate from - 4 to 44 (exclusive). Each time we iterate 4 times, i / Nk will - increase by 1. We use a counter iNk to keep track of this. - */ - - // go through the rounds expanding the key - var temp, iNk = 1; - var Nk = w.length; - var Nr1 = Nk + 6 + 1; - var end = Nb * Nr1; - for(var i = Nk; i < end; ++i) { - temp = w[i - 1]; - if(i % Nk === 0) { - // temp = SubWord(RotWord(temp)) ^ Rcon[i / Nk] - temp = - sbox[temp >>> 16 & 255] << 24 ^ - sbox[temp >>> 8 & 255] << 16 ^ - sbox[temp & 255] << 8 ^ - sbox[temp >>> 24] ^ (rcon[iNk] << 24); - iNk++; - } else if(Nk > 6 && (i % Nk === 4)) { - // temp = SubWord(temp) - temp = - sbox[temp >>> 24] << 24 ^ - sbox[temp >>> 16 & 255] << 16 ^ - sbox[temp >>> 8 & 255] << 8 ^ - sbox[temp & 255]; - } - w[i] = w[i - Nk] ^ temp; - } - - /* When we are updating a cipher block we always use the code path for - encryption whether we are decrypting or not (to shorten code and - simplify the generation of look up tables). However, because there - are differences in the decryption algorithm, other than just swapping - in different look up tables, we must transform our key schedule to - account for these changes: - - 1. The decryption algorithm gets its key rounds in reverse order. - 2. The decryption algorithm adds the round key before mixing columns - instead of afterwards. - - We don't need to modify our key schedule to handle the first case, - we can just traverse the key schedule in reverse order when decrypting. - - The second case requires a little work. - - The tables we built for performing rounds will take an input and then - perform SubBytes() and MixColumns() or, for the decrypt version, - InvSubBytes() and InvMixColumns(). But the decrypt algorithm requires - us to AddRoundKey() before InvMixColumns(). This means we'll need to - apply some transformations to the round key to inverse-mix its columns - so they'll be correct for moving AddRoundKey() to after the state has - had its columns inverse-mixed. - - To inverse-mix the columns of the state when we're decrypting we use a - lookup table that will apply InvSubBytes() and InvMixColumns() at the - same time. However, the round key's bytes are not inverse-substituted - in the decryption algorithm. To get around this problem, we can first - substitute the bytes in the round key so that when we apply the - transformation via the InvSubBytes()+InvMixColumns() table, it will - undo our substitution leaving us with the original value that we - want -- and then inverse-mix that value. - - This change will correctly alter our key schedule so that we can XOR - each round key with our already transformed decryption state. This - allows us to use the same code path as the encryption algorithm. - - We make one more change to the decryption key. Since the decryption - algorithm runs in reverse from the encryption algorithm, we reverse - the order of the round keys to avoid having to iterate over the key - schedule backwards when running the encryption algorithm later in - decryption mode. In addition to reversing the order of the round keys, - we also swap each round key's 2nd and 4th rows. See the comments - section where rounds are performed for more details about why this is - done. These changes are done inline with the other substitution - described above. - */ - if(decrypt) { - var tmp; - var m0 = imix[0]; - var m1 = imix[1]; - var m2 = imix[2]; - var m3 = imix[3]; - var wnew = w.slice(0); - end = w.length; - for(var i = 0, wi = end - Nb; i < end; i += Nb, wi -= Nb) { - // do not sub the first or last round key (round keys are Nb - // words) as no column mixing is performed before they are added, - // but do change the key order - if(i === 0 || i === (end - Nb)) { - wnew[i] = w[wi]; - wnew[i + 1] = w[wi + 3]; - wnew[i + 2] = w[wi + 2]; - wnew[i + 3] = w[wi + 1]; - } else { - // substitute each round key byte because the inverse-mix - // table will inverse-substitute it (effectively cancel the - // substitution because round key bytes aren't sub'd in - // decryption mode) and swap indexes 3 and 1 - for(var n = 0; n < Nb; ++n) { - tmp = w[wi + n]; - wnew[i + (3&-n)] = - m0[sbox[tmp >>> 24]] ^ - m1[sbox[tmp >>> 16 & 255]] ^ - m2[sbox[tmp >>> 8 & 255]] ^ - m3[sbox[tmp & 255]]; +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; } } } - w = wnew; } - - return w; + return target; } -/** - * Updates a single block (16 bytes) using AES. The update will either - * encrypt or decrypt the block. - * - * @param w the key schedule. - * @param input the input block (an array of 32-bit words). - * @param output the updated output block. - * @param decrypt true to decrypt the block, false to encrypt it. - */ -function _updateBlock(w, input, output, decrypt) { - /* - Cipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)]) - begin - byte state[4,Nb] - state = in - AddRoundKey(state, w[0, Nb-1]) - for round = 1 step 1 to Nr-1 - SubBytes(state) - ShiftRows(state) - MixColumns(state) - AddRoundKey(state, w[round*Nb, (round+1)*Nb-1]) - end for - SubBytes(state) - ShiftRows(state) - AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) - out = state - end - - InvCipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)]) - begin - byte state[4,Nb] - state = in - AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) - for round = Nr-1 step -1 downto 1 - InvShiftRows(state) - InvSubBytes(state) - AddRoundKey(state, w[round*Nb, (round+1)*Nb-1]) - InvMixColumns(state) - end for - InvShiftRows(state) - InvSubBytes(state) - AddRoundKey(state, w[0, Nb-1]) - out = state - end - */ - // Encrypt: AddRoundKey(state, w[0, Nb-1]) - // Decrypt: AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) - var Nr = w.length / 4 - 1; - var m0, m1, m2, m3, sub; - if(decrypt) { - m0 = imix[0]; - m1 = imix[1]; - m2 = imix[2]; - m3 = imix[3]; - sub = isbox; - } else { - m0 = mix[0]; - m1 = mix[1]; - m2 = mix[2]; - m3 = mix[3]; - sub = sbox; - } - var a, b, c, d, a2, b2, c2; - a = input[0] ^ w[0]; - b = input[decrypt ? 3 : 1] ^ w[1]; - c = input[2] ^ w[2]; - d = input[decrypt ? 1 : 3] ^ w[3]; - var i = 3; - - /* In order to share code we follow the encryption algorithm when both - encrypting and decrypting. To account for the changes required in the - decryption algorithm, we use different lookup tables when decrypting - and use a modified key schedule to account for the difference in the - order of transformations applied when performing rounds. We also get - key rounds in reverse order (relative to encryption). */ - for(var round = 1; round < Nr; ++round) { - /* As described above, we'll be using table lookups to perform the - column mixing. Each column is stored as a word in the state (the - array 'input' has one column as a word at each index). In order to - mix a column, we perform these transformations on each row in c, - which is 1 byte in each word. The new column for c0 is c'0: - - m0 m1 m2 m3 - r0,c'0 = 2*r0,c0 + 3*r1,c0 + 1*r2,c0 + 1*r3,c0 - r1,c'0 = 1*r0,c0 + 2*r1,c0 + 3*r2,c0 + 1*r3,c0 - r2,c'0 = 1*r0,c0 + 1*r1,c0 + 2*r2,c0 + 3*r3,c0 - r3,c'0 = 3*r0,c0 + 1*r1,c0 + 1*r2,c0 + 2*r3,c0 - - So using mix tables where c0 is a word with r0 being its upper - 8 bits and r3 being its lower 8 bits: - - m0[c0 >> 24] will yield this word: [2*r0,1*r0,1*r0,3*r0] - ... - m3[c0 & 255] will yield this word: [1*r3,1*r3,3*r3,2*r3] - - Therefore to mix the columns in each word in the state we - do the following (& 255 omitted for brevity): - c'0,r0 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] - c'0,r1 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] - c'0,r2 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] - c'0,r3 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] - - However, before mixing, the algorithm requires us to perform - ShiftRows(). The ShiftRows() transformation cyclically shifts the - last 3 rows of the state over different offsets. The first row - (r = 0) is not shifted. - - s'_r,c = s_r,(c + shift(r, Nb) mod Nb - for 0 < r < 4 and 0 <= c < Nb and - shift(1, 4) = 1 - shift(2, 4) = 2 - shift(3, 4) = 3. - - This causes the first byte in r = 1 to be moved to the end of - the row, the first 2 bytes in r = 2 to be moved to the end of - the row, the first 3 bytes in r = 3 to be moved to the end of - the row: - - r1: [c0 c1 c2 c3] => [c1 c2 c3 c0] - r2: [c0 c1 c2 c3] [c2 c3 c0 c1] - r3: [c0 c1 c2 c3] [c3 c0 c1 c2] - - We can make these substitutions inline with our column mixing to - generate an updated set of equations to produce each word in the - state (note the columns have changed positions): - - c0 c1 c2 c3 => c0 c1 c2 c3 - c0 c1 c2 c3 c1 c2 c3 c0 (cycled 1 byte) - c0 c1 c2 c3 c2 c3 c0 c1 (cycled 2 bytes) - c0 c1 c2 c3 c3 c0 c1 c2 (cycled 3 bytes) - - Therefore: - - c'0 = 2*r0,c0 + 3*r1,c1 + 1*r2,c2 + 1*r3,c3 - c'0 = 1*r0,c0 + 2*r1,c1 + 3*r2,c2 + 1*r3,c3 - c'0 = 1*r0,c0 + 1*r1,c1 + 2*r2,c2 + 3*r3,c3 - c'0 = 3*r0,c0 + 1*r1,c1 + 1*r2,c2 + 2*r3,c3 - - c'1 = 2*r0,c1 + 3*r1,c2 + 1*r2,c3 + 1*r3,c0 - c'1 = 1*r0,c1 + 2*r1,c2 + 3*r2,c3 + 1*r3,c0 - c'1 = 1*r0,c1 + 1*r1,c2 + 2*r2,c3 + 3*r3,c0 - c'1 = 3*r0,c1 + 1*r1,c2 + 1*r2,c3 + 2*r3,c0 - - ... and so forth for c'2 and c'3. The important distinction is - that the columns are cycling, with c0 being used with the m0 - map when calculating c0, but c1 being used with the m0 map when - calculating c1 ... and so forth. - - When performing the inverse we transform the mirror image and - skip the bottom row, instead of the top one, and move upwards: - - c3 c2 c1 c0 => c0 c3 c2 c1 (cycled 3 bytes) *same as encryption - c3 c2 c1 c0 c1 c0 c3 c2 (cycled 2 bytes) - c3 c2 c1 c0 c2 c1 c0 c3 (cycled 1 byte) *same as encryption - c3 c2 c1 c0 c3 c2 c1 c0 - - If you compare the resulting matrices for ShiftRows()+MixColumns() - and for InvShiftRows()+InvMixColumns() the 2nd and 4th columns are - different (in encrypt mode vs. decrypt mode). So in order to use - the same code to handle both encryption and decryption, we will - need to do some mapping. - - If in encryption mode we let a=c0, b=c1, c=c2, d=c3, and r be - a row number in the state, then the resulting matrix in encryption - mode for applying the above transformations would be: - - r1: a b c d - r2: b c d a - r3: c d a b - r4: d a b c - - If we did the same in decryption mode we would get: - - r1: a d c b - r2: b a d c - r3: c b a d - r4: d c b a - - If instead we swap d and b (set b=c3 and d=c1), then we get: - - r1: a b c d - r2: d a b c - r3: c d a b - r4: b c d a - - Now the 1st and 3rd rows are the same as the encryption matrix. All - we need to do then to make the mapping exactly the same is to swap - the 2nd and 4th rows when in decryption mode. To do this without - having to do it on each iteration, we swapped the 2nd and 4th rows - in the decryption key schedule. We also have to do the swap above - when we first pull in the input and when we set the final output. */ - a2 = - m0[a >>> 24] ^ - m1[b >>> 16 & 255] ^ - m2[c >>> 8 & 255] ^ - m3[d & 255] ^ w[++i]; - b2 = - m0[b >>> 24] ^ - m1[c >>> 16 & 255] ^ - m2[d >>> 8 & 255] ^ - m3[a & 255] ^ w[++i]; - c2 = - m0[c >>> 24] ^ - m1[d >>> 16 & 255] ^ - m2[a >>> 8 & 255] ^ - m3[b & 255] ^ w[++i]; - d = - m0[d >>> 24] ^ - m1[a >>> 16 & 255] ^ - m2[b >>> 8 & 255] ^ - m3[c & 255] ^ w[++i]; - a = a2; - b = b2; - c = c2; +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); } - - /* - Encrypt: - SubBytes(state) - ShiftRows(state) - AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) - - Decrypt: - InvShiftRows(state) - InvSubBytes(state) - AddRoundKey(state, w[0, Nb-1]) - */ - // Note: rows are shifted inline - output[0] = - (sub[a >>> 24] << 24) ^ - (sub[b >>> 16 & 255] << 16) ^ - (sub[c >>> 8 & 255] << 8) ^ - (sub[d & 255]) ^ w[++i]; - output[decrypt ? 3 : 1] = - (sub[b >>> 24] << 24) ^ - (sub[c >>> 16 & 255] << 16) ^ - (sub[d >>> 8 & 255] << 8) ^ - (sub[a & 255]) ^ w[++i]; - output[2] = - (sub[c >>> 24] << 24) ^ - (sub[d >>> 16 & 255] << 16) ^ - (sub[a >>> 8 & 255] << 8) ^ - (sub[b & 255]) ^ w[++i]; - output[decrypt ? 1 : 3] = - (sub[d >>> 24] << 24) ^ - (sub[a >>> 16 & 255] << 16) ^ - (sub[b >>> 8 & 255] << 8) ^ - (sub[c & 255]) ^ w[++i]; +} else { + debug = function() {}; } +exports.debug = debug; // for test -/** - * Deprecated. Instead, use: - * - * forge.cipher.createCipher('AES-', key); - * forge.cipher.createDecipher('AES-', key); - * - * Creates a deprecated AES cipher object. This object's mode will default to - * CBC (cipher-block-chaining). - * - * The key and iv may be given as a string of bytes, an array of bytes, a - * byte buffer, or an array of 32-bit words. - * - * @param options the options to use. - * key the symmetric key to use. - * output the buffer to write to. - * decrypt true for decryption, false for encryption. - * mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -function _createCipher(options) { - options = options || {}; - var mode = (options.mode || 'CBC').toUpperCase(); - var algorithm = 'AES-' + mode; - var cipher; - if(options.decrypt) { - cipher = forge.cipher.createDecipher(algorithm, options.key); - } else { - cipher = forge.cipher.createCipher(algorithm, options.key); - } +/***/ }), - // backwards compatible start API - var start = cipher.start; - cipher.start = function(iv, options) { - // backwards compatibility: support second arg as output buffer - var output = null; - if(options instanceof forge.util.ByteBuffer) { - output = options; - options = {}; - } - options = options || {}; - options.output = output; - options.iv = iv; - start.call(cipher, options); - }; +/***/ 1773: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return cipher; -} +"use strict"; -/***/ }), +const Client = __nccwpck_require__(3598) +const Dispatcher = __nccwpck_require__(412) +const errors = __nccwpck_require__(8045) +const Pool = __nccwpck_require__(4634) +const BalancedPool = __nccwpck_require__(7931) +const Agent = __nccwpck_require__(7890) +const util = __nccwpck_require__(3983) +const { InvalidArgumentError } = errors +const api = __nccwpck_require__(4059) +const buildConnector = __nccwpck_require__(2067) +const MockClient = __nccwpck_require__(8687) +const MockAgent = __nccwpck_require__(6771) +const MockPool = __nccwpck_require__(6193) +const mockErrors = __nccwpck_require__(888) +const ProxyAgent = __nccwpck_require__(7858) +const RetryHandler = __nccwpck_require__(2286) +const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(1892) +const DecoratorHandler = __nccwpck_require__(6930) +const RedirectHandler = __nccwpck_require__(2860) +const createRedirectInterceptor = __nccwpck_require__(8861) + +let hasCrypto +try { + __nccwpck_require__(6113) + hasCrypto = true +} catch { + hasCrypto = false +} -/***/ 1449: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Object.assign(Dispatcher.prototype, api) -/** - * A Javascript implementation of AES Cipher Suites for TLS. - * - * @author Dave Longley - * - * Copyright (c) 2009-2015 Digital Bazaar, Inc. - * - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(9167); +module.exports.Dispatcher = Dispatcher +module.exports.Client = Client +module.exports.Pool = Pool +module.exports.BalancedPool = BalancedPool +module.exports.Agent = Agent +module.exports.ProxyAgent = ProxyAgent +module.exports.RetryHandler = RetryHandler -var tls = module.exports = forge.tls; +module.exports.DecoratorHandler = DecoratorHandler +module.exports.RedirectHandler = RedirectHandler +module.exports.createRedirectInterceptor = createRedirectInterceptor -/** - * Supported cipher suites. - */ -tls.CipherSuites['TLS_RSA_WITH_AES_128_CBC_SHA'] = { - id: [0x00, 0x2f], - name: 'TLS_RSA_WITH_AES_128_CBC_SHA', - initSecurityParameters: function(sp) { - sp.bulk_cipher_algorithm = tls.BulkCipherAlgorithm.aes; - sp.cipher_type = tls.CipherType.block; - sp.enc_key_length = 16; - sp.block_length = 16; - sp.fixed_iv_length = 16; - sp.record_iv_length = 16; - sp.mac_algorithm = tls.MACAlgorithm.hmac_sha1; - sp.mac_length = 20; - sp.mac_key_length = 20; - }, - initConnectionState: initConnectionState -}; -tls.CipherSuites['TLS_RSA_WITH_AES_256_CBC_SHA'] = { - id: [0x00, 0x35], - name: 'TLS_RSA_WITH_AES_256_CBC_SHA', - initSecurityParameters: function(sp) { - sp.bulk_cipher_algorithm = tls.BulkCipherAlgorithm.aes; - sp.cipher_type = tls.CipherType.block; - sp.enc_key_length = 32; - sp.block_length = 16; - sp.fixed_iv_length = 16; - sp.record_iv_length = 16; - sp.mac_algorithm = tls.MACAlgorithm.hmac_sha1; - sp.mac_length = 20; - sp.mac_key_length = 20; - }, - initConnectionState: initConnectionState -}; +module.exports.buildConnector = buildConnector +module.exports.errors = errors -function initConnectionState(state, c, sp) { - var client = (c.entity === forge.tls.ConnectionEnd.client); +function makeDispatcher (fn) { + return (url, opts, handler) => { + if (typeof opts === 'function') { + handler = opts + opts = null + } - // cipher setup - state.read.cipherState = { - init: false, - cipher: forge.cipher.createDecipher('AES-CBC', client ? - sp.keys.server_write_key : sp.keys.client_write_key), - iv: client ? sp.keys.server_write_IV : sp.keys.client_write_IV - }; - state.write.cipherState = { - init: false, - cipher: forge.cipher.createCipher('AES-CBC', client ? - sp.keys.client_write_key : sp.keys.server_write_key), - iv: client ? sp.keys.client_write_IV : sp.keys.server_write_IV - }; - state.read.cipherFunction = decrypt_aes_cbc_sha1; - state.write.cipherFunction = encrypt_aes_cbc_sha1; + if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) { + throw new InvalidArgumentError('invalid url') + } - // MAC setup - state.read.macLength = state.write.macLength = sp.mac_length; - state.read.macFunction = state.write.macFunction = tls.hmac_sha1; -} + if (opts != null && typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } -/** - * Encrypts the TLSCompressed record into a TLSCipherText record using AES - * in CBC mode. - * - * @param record the TLSCompressed record to encrypt. - * @param s the ConnectionState to use. - * - * @return true on success, false on failure. - */ -function encrypt_aes_cbc_sha1(record, s) { - var rval = false; - - // append MAC to fragment, update sequence number - var mac = s.macFunction(s.macKey, s.sequenceNumber, record); - record.fragment.putBytes(mac); - s.updateSequenceNumber(); - - // TLS 1.1+ use an explicit IV every time to protect against CBC attacks - var iv; - if(record.version.minor === tls.Versions.TLS_1_0.minor) { - // use the pre-generated IV when initializing for TLS 1.0, otherwise use - // the residue from the previous encryption - iv = s.cipherState.init ? null : s.cipherState.iv; - } else { - iv = forge.random.getBytesSync(16); - } + if (opts && opts.path != null) { + if (typeof opts.path !== 'string') { + throw new InvalidArgumentError('invalid opts.path') + } - s.cipherState.init = true; + let path = opts.path + if (!opts.path.startsWith('/')) { + path = `/${path}` + } - // start cipher - var cipher = s.cipherState.cipher; - cipher.start({iv: iv}); + url = new URL(util.parseOrigin(url).origin + path) + } else { + if (!opts) { + opts = typeof url === 'object' ? url : {} + } - // TLS 1.1+ write IV into output - if(record.version.minor >= tls.Versions.TLS_1_1.minor) { - cipher.output.putBytes(iv); - } + url = util.parseURL(url) + } - // do encryption (default padding is appropriate) - cipher.update(record.fragment); - if(cipher.finish(encrypt_aes_cbc_sha1_padding)) { - // set record fragment to encrypted output - record.fragment = cipher.output; - record.length = record.fragment.length(); - rval = true; - } + const { agent, dispatcher = getGlobalDispatcher() } = opts - return rval; -} + if (agent) { + throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?') + } -/** - * Handles padding for aes_cbc_sha1 in encrypt mode. - * - * @param blockSize the block size. - * @param input the input buffer. - * @param decrypt true in decrypt mode, false in encrypt mode. - * - * @return true on success, false on failure. - */ -function encrypt_aes_cbc_sha1_padding(blockSize, input, decrypt) { - /* The encrypted data length (TLSCiphertext.length) is one more than the sum - of SecurityParameters.block_length, TLSCompressed.length, - SecurityParameters.mac_length, and padding_length. - - The padding may be any length up to 255 bytes long, as long as it results in - the TLSCiphertext.length being an integral multiple of the block length. - Lengths longer than necessary might be desirable to frustrate attacks on a - protocol based on analysis of the lengths of exchanged messages. Each uint8 - in the padding data vector must be filled with the padding length value. - - The padding length should be such that the total size of the - GenericBlockCipher structure is a multiple of the cipher's block length. - Legal values range from zero to 255, inclusive. This length specifies the - length of the padding field exclusive of the padding_length field itself. - - This is slightly different from PKCS#7 because the padding value is 1 - less than the actual number of padding bytes if you include the - padding_length uint8 itself as a padding byte. */ - if(!decrypt) { - // get the number of padding bytes required to reach the blockSize and - // subtract 1 for the padding value (to make room for the padding_length - // uint8) - var padding = blockSize - (input.length() % blockSize); - input.fillWithByte(padding - 1, padding); + return fn.call(dispatcher, { + ...opts, + origin: url.origin, + path: url.search ? `${url.pathname}${url.search}` : url.pathname, + method: opts.method || (opts.body ? 'PUT' : 'GET') + }, handler) } - return true; } -/** - * Handles padding for aes_cbc_sha1 in decrypt mode. - * - * @param blockSize the block size. - * @param output the output buffer. - * @param decrypt true in decrypt mode, false in encrypt mode. - * - * @return true on success, false on failure. - */ -function decrypt_aes_cbc_sha1_padding(blockSize, output, decrypt) { - var rval = true; - if(decrypt) { - /* The last byte in the output specifies the number of padding bytes not - including itself. Each of the padding bytes has the same value as that - last byte (known as the padding_length). Here we check all padding - bytes to ensure they have the value of padding_length even if one of - them is bad in order to ward-off timing attacks. */ - var len = output.length(); - var paddingLength = output.last(); - for(var i = len - 1 - paddingLength; i < len - 1; ++i) { - rval = rval && (output.at(i) == paddingLength); - } - if(rval) { - // trim off padding bytes and last padding length byte - output.truncate(paddingLength + 1); +module.exports.setGlobalDispatcher = setGlobalDispatcher +module.exports.getGlobalDispatcher = getGlobalDispatcher + +if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) { + let fetchImpl = null + module.exports.fetch = async function fetch (resource) { + if (!fetchImpl) { + fetchImpl = (__nccwpck_require__(4881).fetch) } - } - return rval; -} -/** - * Decrypts a TLSCipherText record into a TLSCompressed record using - * AES in CBC mode. - * - * @param record the TLSCipherText record to decrypt. - * @param s the ConnectionState to use. - * - * @return true on success, false on failure. - */ -function decrypt_aes_cbc_sha1(record, s) { - var rval = false; - - var iv; - if(record.version.minor === tls.Versions.TLS_1_0.minor) { - // use pre-generated IV when initializing for TLS 1.0, otherwise use the - // residue from the previous decryption - iv = s.cipherState.init ? null : s.cipherState.iv; - } else { - // TLS 1.1+ use an explicit IV every time to protect against CBC attacks - // that is appended to the record fragment - iv = record.fragment.getBytes(16); + try { + return await fetchImpl(...arguments) + } catch (err) { + if (typeof err === 'object') { + Error.captureStackTrace(err, this) + } + + throw err + } } + module.exports.Headers = __nccwpck_require__(554).Headers + module.exports.Response = __nccwpck_require__(7823).Response + module.exports.Request = __nccwpck_require__(8359).Request + module.exports.FormData = __nccwpck_require__(2015).FormData + module.exports.File = __nccwpck_require__(8511).File + module.exports.FileReader = __nccwpck_require__(1446).FileReader - s.cipherState.init = true; + const { setGlobalOrigin, getGlobalOrigin } = __nccwpck_require__(1246) - // start cipher - var cipher = s.cipherState.cipher; - cipher.start({iv: iv}); + module.exports.setGlobalOrigin = setGlobalOrigin + module.exports.getGlobalOrigin = getGlobalOrigin - // do decryption - cipher.update(record.fragment); - rval = cipher.finish(decrypt_aes_cbc_sha1_padding); + const { CacheStorage } = __nccwpck_require__(7907) + const { kConstruct } = __nccwpck_require__(9174) - // even if decryption fails, keep going to minimize timing attacks + // Cache & CacheStorage are tightly coupled with fetch. Even if it may run + // in an older version of Node, it doesn't have any use without fetch. + module.exports.caches = new CacheStorage(kConstruct) +} - // decrypted data: - // first (len - 20) bytes = application data - // last 20 bytes = MAC - var macLen = s.macLength; +if (util.nodeMajor >= 16) { + const { deleteCookie, getCookies, getSetCookies, setCookie } = __nccwpck_require__(1724) - // create a random MAC to check against should the mac length check fail - // Note: do this regardless of the failure to keep timing consistent - var mac = forge.random.getBytesSync(macLen); + module.exports.deleteCookie = deleteCookie + module.exports.getCookies = getCookies + module.exports.getSetCookies = getSetCookies + module.exports.setCookie = setCookie - // get fragment and mac - var len = cipher.output.length(); - if(len >= macLen) { - record.fragment = cipher.output.getBytes(len - macLen); - mac = cipher.output.getBytes(macLen); - } else { - // bad data, but get bytes anyway to try to keep timing consistent - record.fragment = cipher.output.getBytes(); - } - record.fragment = forge.util.createBuffer(record.fragment); - record.length = record.fragment.length(); + const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) - // see if data integrity checks out, update sequence number - var mac2 = s.macFunction(s.macKey, s.sequenceNumber, record); - s.updateSequenceNumber(); - rval = compareMacs(s.macKey, mac, mac2) && rval; - return rval; + module.exports.parseMIMEType = parseMIMEType + module.exports.serializeAMimeType = serializeAMimeType } -/** - * Safely compare two MACs. This function will compare two MACs in a way - * that protects against timing attacks. - * - * TODO: Expose elsewhere as a utility API. - * - * See: https://www.nccgroup.trust/us/about-us/newsroom-and-events/blog/2011/february/double-hmac-verification/ - * - * @param key the MAC key to use. - * @param mac1 as a binary-encoded string of bytes. - * @param mac2 as a binary-encoded string of bytes. - * - * @return true if the MACs are the same, false if not. - */ -function compareMacs(key, mac1, mac2) { - var hmac = forge.hmac.create(); +if (util.nodeMajor >= 18 && hasCrypto) { + const { WebSocket } = __nccwpck_require__(4284) - hmac.start('SHA1', key); - hmac.update(mac1); - mac1 = hmac.digest().getBytes(); + module.exports.WebSocket = WebSocket +} - hmac.start(null, null); - hmac.update(mac2); - mac2 = hmac.digest().getBytes(); +module.exports.request = makeDispatcher(api.request) +module.exports.stream = makeDispatcher(api.stream) +module.exports.pipeline = makeDispatcher(api.pipeline) +module.exports.connect = makeDispatcher(api.connect) +module.exports.upgrade = makeDispatcher(api.upgrade) - return mac1 === mac2; -} +module.exports.MockClient = MockClient +module.exports.MockPool = MockPool +module.exports.MockAgent = MockAgent +module.exports.mockErrors = mockErrors /***/ }), -/***/ 9414: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7890: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Copyright (c) 2019 Digital Bazaar, Inc. - */ +"use strict"; -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -var asn1 = forge.asn1; - -exports.privateKeyValidator = { - // PrivateKeyInfo - name: 'PrivateKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // Version (INTEGER) - name: 'PrivateKeyInfo.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyVersion' - }, { - // privateKeyAlgorithm - name: 'PrivateKeyInfo.privateKeyAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'privateKeyOid' - }] - }, { - // PrivateKey - name: 'PrivateKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'privateKey' - }] -}; -exports.publicKeyValidator = { - name: 'SubjectPublicKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'subjectPublicKeyInfo', - value: [{ - name: 'SubjectPublicKeyInfo.AlgorithmIdentifier', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'publicKeyOid' - }] - }, - // capture group for ed25519PublicKey - { - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - composed: true, - captureBitStringValue: 'ed25519PublicKey' - } - // FIXME: this is capture group for rsaPublicKey, use it in this API or - // discard? - /* { - // subjectPublicKey - name: 'SubjectPublicKeyInfo.subjectPublicKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - value: [{ - // RSAPublicKey - name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - optional: true, - captureAsn1: 'rsaPublicKey' - }] - } */ - ] -}; +const { InvalidArgumentError } = __nccwpck_require__(8045) +const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = __nccwpck_require__(2785) +const DispatcherBase = __nccwpck_require__(4839) +const Pool = __nccwpck_require__(4634) +const Client = __nccwpck_require__(3598) +const util = __nccwpck_require__(3983) +const createRedirectInterceptor = __nccwpck_require__(8861) +const { WeakRef, FinalizationRegistry } = __nccwpck_require__(6436)() +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kMaxRedirections = Symbol('maxRedirections') +const kOnDrain = Symbol('onDrain') +const kFactory = Symbol('factory') +const kFinalizer = Symbol('finalizer') +const kOptions = Symbol('options') -/***/ }), +function defaultFactory (origin, opts) { + return opts && opts.connections === 1 + ? new Client(origin, opts) + : new Pool(origin, opts) +} -/***/ 9549: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +class Agent extends DispatcherBase { + constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) { + super() -/** - * Javascript implementation of Abstract Syntax Notation Number One. - * - * @author Dave Longley - * - * Copyright (c) 2010-2015 Digital Bazaar, Inc. - * - * An API for storing data using the Abstract Syntax Notation Number One - * format using DER (Distinguished Encoding Rules) encoding. This encoding is - * commonly used to store data for PKI, i.e. X.509 Certificates, and this - * implementation exists for that purpose. - * - * Abstract Syntax Notation Number One (ASN.1) is used to define the abstract - * syntax of information without restricting the way the information is encoded - * for transmission. It provides a standard that allows for open systems - * communication. ASN.1 defines the syntax of information data and a number of - * simple data types as well as a notation for describing them and specifying - * values for them. - * - * The RSA algorithm creates public and private keys that are often stored in - * X.509 or PKCS#X formats -- which use ASN.1 (encoded in DER format). This - * class provides the most basic functionality required to store and load DSA - * keys that are encoded according to ASN.1. - * - * The most common binary encodings for ASN.1 are BER (Basic Encoding Rules) - * and DER (Distinguished Encoding Rules). DER is just a subset of BER that - * has stricter requirements for how data must be encoded. - * - * Each ASN.1 structure has a tag (a byte identifying the ASN.1 structure type) - * and a byte array for the value of this ASN1 structure which may be data or a - * list of ASN.1 structures. - * - * Each ASN.1 structure using BER is (Tag-Length-Value): - * - * | byte 0 | bytes X | bytes Y | - * |--------|---------|---------- - * | tag | length | value | - * - * ASN.1 allows for tags to be of "High-tag-number form" which allows a tag to - * be two or more octets, but that is not supported by this class. A tag is - * only 1 byte. Bits 1-5 give the tag number (ie the data type within a - * particular 'class'), 6 indicates whether or not the ASN.1 value is - * constructed from other ASN.1 values, and bits 7 and 8 give the 'class'. If - * bits 7 and 8 are both zero, the class is UNIVERSAL. If only bit 7 is set, - * then the class is APPLICATION. If only bit 8 is set, then the class is - * CONTEXT_SPECIFIC. If both bits 7 and 8 are set, then the class is PRIVATE. - * The tag numbers for the data types for the class UNIVERSAL are listed below: - * - * UNIVERSAL 0 Reserved for use by the encoding rules - * UNIVERSAL 1 Boolean type - * UNIVERSAL 2 Integer type - * UNIVERSAL 3 Bitstring type - * UNIVERSAL 4 Octetstring type - * UNIVERSAL 5 Null type - * UNIVERSAL 6 Object identifier type - * UNIVERSAL 7 Object descriptor type - * UNIVERSAL 8 External type and Instance-of type - * UNIVERSAL 9 Real type - * UNIVERSAL 10 Enumerated type - * UNIVERSAL 11 Embedded-pdv type - * UNIVERSAL 12 UTF8String type - * UNIVERSAL 13 Relative object identifier type - * UNIVERSAL 14-15 Reserved for future editions - * UNIVERSAL 16 Sequence and Sequence-of types - * UNIVERSAL 17 Set and Set-of types - * UNIVERSAL 18-22, 25-30 Character string types - * UNIVERSAL 23-24 Time types - * - * The length of an ASN.1 structure is specified after the tag identifier. - * There is a definite form and an indefinite form. The indefinite form may - * be used if the encoding is constructed and not all immediately available. - * The indefinite form is encoded using a length byte with only the 8th bit - * set. The end of the constructed object is marked using end-of-contents - * octets (two zero bytes). - * - * The definite form looks like this: - * - * The length may take up 1 or more bytes, it depends on the length of the - * value of the ASN.1 structure. DER encoding requires that if the ASN.1 - * structure has a value that has a length greater than 127, more than 1 byte - * will be used to store its length, otherwise just one byte will be used. - * This is strict. - * - * In the case that the length of the ASN.1 value is less than 127, 1 octet - * (byte) is used to store the "short form" length. The 8th bit has a value of - * 0 indicating the length is "short form" and not "long form" and bits 7-1 - * give the length of the data. (The 8th bit is the left-most, most significant - * bit: also known as big endian or network format). - * - * In the case that the length of the ASN.1 value is greater than 127, 2 to - * 127 octets (bytes) are used to store the "long form" length. The first - * byte's 8th bit is set to 1 to indicate the length is "long form." Bits 7-1 - * give the number of additional octets. All following octets are in base 256 - * with the most significant digit first (typical big-endian binary unsigned - * integer storage). So, for instance, if the length of a value was 257, the - * first byte would be set to: - * - * 10000010 = 130 = 0x82. - * - * This indicates there are 2 octets (base 256) for the length. The second and - * third bytes (the octets just mentioned) would store the length in base 256: - * - * octet 2: 00000001 = 1 * 256^1 = 256 - * octet 3: 00000001 = 1 * 256^0 = 1 - * total = 257 - * - * The algorithm for converting a js integer value of 257 to base-256 is: - * - * var value = 257; - * var bytes = []; - * bytes[0] = (value >>> 8) & 0xFF; // most significant byte first - * bytes[1] = value & 0xFF; // least significant byte last - * - * On the ASN.1 UNIVERSAL Object Identifier (OID) type: - * - * An OID can be written like: "value1.value2.value3...valueN" - * - * The DER encoding rules: - * - * The first byte has the value 40 * value1 + value2. - * The following bytes, if any, encode the remaining values. Each value is - * encoded in base 128, most significant digit first (big endian), with as - * few digits as possible, and the most significant bit of each byte set - * to 1 except the last in each value's encoding. For example: Given the - * OID "1.2.840.113549", its DER encoding is (remember each byte except the - * last one in each encoding is OR'd with 0x80): - * - * byte 1: 40 * 1 + 2 = 42 = 0x2A. - * bytes 2-3: 128 * 6 + 72 = 840 = 6 72 = 6 72 = 0x0648 = 0x8648 - * bytes 4-6: 16384 * 6 + 128 * 119 + 13 = 6 119 13 = 0x06770D = 0x86F70D - * - * The final value is: 0x2A864886F70D. - * The full OID (including ASN.1 tag and length of 6 bytes) is: - * 0x06062A864886F70D - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); -__nccwpck_require__(1925); + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } -/* ASN.1 API */ -var asn1 = module.exports = forge.asn1 = forge.asn1 || {}; + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } -/** - * ASN.1 classes. - */ -asn1.Class = { - UNIVERSAL: 0x00, - APPLICATION: 0x40, - CONTEXT_SPECIFIC: 0x80, - PRIVATE: 0xC0 -}; + if (!Number.isInteger(maxRedirections) || maxRedirections < 0) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } -/** - * ASN.1 types. Not all types are supported by this implementation, only - * those necessary to implement a simple PKI are implemented. - */ -asn1.Type = { - NONE: 0, - BOOLEAN: 1, - INTEGER: 2, - BITSTRING: 3, - OCTETSTRING: 4, - NULL: 5, - OID: 6, - ODESC: 7, - EXTERNAL: 8, - REAL: 9, - ENUMERATED: 10, - EMBEDDED: 11, - UTF8: 12, - ROID: 13, - SEQUENCE: 16, - SET: 17, - PRINTABLESTRING: 19, - IA5STRING: 22, - UTCTIME: 23, - GENERALIZEDTIME: 24, - BMPSTRING: 30 -}; + if (connect && typeof connect !== 'function') { + connect = { ...connect } + } -/** - * Creates a new asn1 object. - * - * @param tagClass the tag class for the object. - * @param type the data type (tag number) for the object. - * @param constructed true if the asn1 object is in constructed form. - * @param value the value for the object, if it is not constructed. - * @param [options] the options to use: - * [bitStringContents] the plain BIT STRING content including padding - * byte. - * - * @return the asn1 object. - */ -asn1.create = function(tagClass, type, constructed, value, options) { - /* An asn1 object has a tagClass, a type, a constructed flag, and a - value. The value's type depends on the constructed flag. If - constructed, it will contain a list of other asn1 objects. If not, - it will contain the ASN.1 value as an array of bytes formatted - according to the ASN.1 data type. */ - - // remove undefined values - if(forge.util.isArray(value)) { - var tmp = []; - for(var i = 0; i < value.length; ++i) { - if(value[i] !== undefined) { - tmp.push(value[i]); + this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent) + ? options.interceptors.Agent + : [createRedirectInterceptor({ maxRedirections })] + + this[kOptions] = { ...util.deepClone(options), connect } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kMaxRedirections] = maxRedirections + this[kFactory] = factory + this[kClients] = new Map() + this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => { + const ref = this[kClients].get(key) + if (ref !== undefined && ref.deref() === undefined) { + this[kClients].delete(key) } - } - value = tmp; - } + }) - var obj = { - tagClass: tagClass, - type: type, - constructed: constructed, - composed: constructed || forge.util.isArray(value), - value: value - }; - if(options && 'bitStringContents' in options) { - // TODO: copy byte buffer if it's a buffer not a string - obj.bitStringContents = options.bitStringContents; - // TODO: add readonly flag to avoid this overhead - // save copy to detect changes - obj.original = asn1.copy(obj); - } - return obj; -}; + const agent = this -/** - * Copies an asn1 object. - * - * @param obj the asn1 object. - * @param [options] copy options: - * [excludeBitStringContents] true to not copy bitStringContents - * - * @return the a copy of the asn1 object. - */ -asn1.copy = function(obj, options) { - var copy; + this[kOnDrain] = (origin, targets) => { + agent.emit('drain', origin, [agent, ...targets]) + } - if(forge.util.isArray(obj)) { - copy = []; - for(var i = 0; i < obj.length; ++i) { - copy.push(asn1.copy(obj[i], options)); + this[kOnConnect] = (origin, targets) => { + agent.emit('connect', origin, [agent, ...targets]) } - return copy; - } - if(typeof obj === 'string') { - // TODO: copy byte buffer if it's a buffer not a string - return obj; + this[kOnDisconnect] = (origin, targets, err) => { + agent.emit('disconnect', origin, [agent, ...targets], err) + } + + this[kOnConnectionError] = (origin, targets, err) => { + agent.emit('connectionError', origin, [agent, ...targets], err) + } } - copy = { - tagClass: obj.tagClass, - type: obj.type, - constructed: obj.constructed, - composed: obj.composed, - value: asn1.copy(obj.value, options) - }; - if(options && !options.excludeBitStringContents) { - // TODO: copy byte buffer if it's a buffer not a string - copy.bitStringContents = obj.bitStringContents; + get [kRunning] () { + let ret = 0 + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore next: gc is undeterministic */ + if (client) { + ret += client[kRunning] + } + } + return ret } - return copy; -}; -/** - * Compares asn1 objects for equality. - * - * Note this function does not run in constant time. - * - * @param obj1 the first asn1 object. - * @param obj2 the second asn1 object. - * @param [options] compare options: - * [includeBitStringContents] true to compare bitStringContents - * - * @return true if the asn1 objects are equal. - */ -asn1.equals = function(obj1, obj2, options) { - if(forge.util.isArray(obj1)) { - if(!forge.util.isArray(obj2)) { - return false; + [kDispatch] (opts, handler) { + let key + if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) { + key = String(opts.origin) + } else { + throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.') } - if(obj1.length !== obj2.length) { - return false; + + const ref = this[kClients].get(key) + + let dispatcher = ref ? ref.deref() : null + if (!dispatcher) { + dispatcher = this[kFactory](opts.origin, this[kOptions]) + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].set(key, new WeakRef(dispatcher)) + this[kFinalizer].register(dispatcher, key) } - for(var i = 0; i < obj1.length; ++i) { - if(!asn1.equals(obj1[i], obj2[i])) { - return false; + + return dispatcher.dispatch(opts, handler) + } + + async [kClose] () { + const closePromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + closePromises.push(client.close()) } } - return true; - } - if(typeof obj1 !== typeof obj2) { - return false; + await Promise.all(closePromises) } - if(typeof obj1 === 'string') { - return obj1 === obj2; + async [kDestroy] (err) { + const destroyPromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + destroyPromises.push(client.destroy(err)) + } + } + + await Promise.all(destroyPromises) } +} + +module.exports = Agent + - var equal = obj1.tagClass === obj2.tagClass && - obj1.type === obj2.type && - obj1.constructed === obj2.constructed && - obj1.composed === obj2.composed && - asn1.equals(obj1.value, obj2.value); - if(options && options.includeBitStringContents) { - equal = equal && (obj1.bitStringContents === obj2.bitStringContents); +/***/ }), + +/***/ 7032: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { addAbortListener } = __nccwpck_require__(3983) +const { RequestAbortedError } = __nccwpck_require__(8045) + +const kListener = Symbol('kListener') +const kSignal = Symbol('kSignal') + +function abort (self) { + if (self.abort) { + self.abort() + } else { + self.onError(new RequestAbortedError()) } +} - return equal; -}; +function addSignal (self, signal) { + self[kSignal] = null + self[kListener] = null -/** - * Gets the length of a BER-encoded ASN.1 value. - * - * In case the length is not specified, undefined is returned. - * - * @param b the BER-encoded ASN.1 byte buffer, starting with the first - * length byte. - * - * @return the length of the BER-encoded ASN.1 value or undefined. - */ -asn1.getBerValueLength = function(b) { - // TODO: move this function and related DER/BER functions to a der.js - // file; better abstract ASN.1 away from der/ber. - var b2 = b.getByte(); - if(b2 === 0x80) { - return undefined; + if (!signal) { + return } - // see if the length is "short form" or "long form" (bit 8 set) - var length; - var longForm = b2 & 0x80; - if(!longForm) { - // length is just the first byte - length = b2; - } else { - // the number of bytes the length is specified in bits 7 through 1 - // and each length byte is in big-endian base-256 - length = b.getInt((b2 & 0x7F) << 3); + if (signal.aborted) { + abort(self) + return } - return length; -}; -/** - * Check if the byte buffer has enough bytes. Throws an Error if not. - * - * @param bytes the byte buffer to parse from. - * @param remaining the bytes remaining in the current parsing state. - * @param n the number of bytes the buffer must have. - */ -function _checkBufferLength(bytes, remaining, n) { - if(n > remaining) { - var error = new Error('Too few bytes to parse DER.'); - error.available = bytes.length(); - error.remaining = remaining; - error.requested = n; - throw error; + self[kSignal] = signal + self[kListener] = () => { + abort(self) } + + addAbortListener(self[kSignal], self[kListener]) } -/** - * Gets the length of a BER-encoded ASN.1 value. - * - * In case the length is not specified, undefined is returned. - * - * @param bytes the byte buffer to parse from. - * @param remaining the bytes remaining in the current parsing state. - * - * @return the length of the BER-encoded ASN.1 value or undefined. - */ -var _getValueLength = function(bytes, remaining) { - // TODO: move this function and related DER/BER functions to a der.js - // file; better abstract ASN.1 away from der/ber. - // fromDer already checked that this byte exists - var b2 = bytes.getByte(); - remaining--; - if(b2 === 0x80) { - return undefined; +function removeSignal (self) { + if (!self[kSignal]) { + return } - // see if the length is "short form" or "long form" (bit 8 set) - var length; - var longForm = b2 & 0x80; - if(!longForm) { - // length is just the first byte - length = b2; + if ('removeEventListener' in self[kSignal]) { + self[kSignal].removeEventListener('abort', self[kListener]) } else { - // the number of bytes the length is specified in bits 7 through 1 - // and each length byte is in big-endian base-256 - var longFormBytes = b2 & 0x7F; - _checkBufferLength(bytes, remaining, longFormBytes); - length = bytes.getInt(longFormBytes << 3); + self[kSignal].removeListener('abort', self[kListener]) } - // FIXME: this will only happen for 32 bit getInt with high bit set - if(length < 0) { - throw new Error('Negative length: ' + length); - } - return length; -}; -/** - * Parses an asn1 object from a byte buffer in DER format. - * - * @param bytes the byte buffer to parse from. - * @param [strict] true to be strict when checking value lengths, false to - * allow truncated values (default: true). - * @param [options] object with options or boolean strict flag - * [strict] true to be strict when checking value lengths, false to - * allow truncated values (default: true). - * [parseAllBytes] true to ensure all bytes are parsed - * (default: true) - * [decodeBitStrings] true to attempt to decode the content of - * BIT STRINGs (not OCTET STRINGs) using strict mode. Note that - * without schema support to understand the data context this can - * erroneously decode values that happen to be valid ASN.1. This - * flag will be deprecated or removed as soon as schema support is - * available. (default: true) - * - * @throws Will throw an error for various malformed input conditions. - * - * @return the parsed asn1 object. - */ -asn1.fromDer = function(bytes, options) { - if(options === undefined) { - options = { - strict: true, - parseAllBytes: true, - decodeBitStrings: true - }; - } - if(typeof options === 'boolean') { - options = { - strict: options, - parseAllBytes: true, - decodeBitStrings: true - }; - } - if(!('strict' in options)) { - options.strict = true; - } - if(!('parseAllBytes' in options)) { - options.parseAllBytes = true; - } - if(!('decodeBitStrings' in options)) { - options.decodeBitStrings = true; - } + self[kSignal] = null + self[kListener] = null +} - // wrap in buffer if needed - if(typeof bytes === 'string') { - bytes = forge.util.createBuffer(bytes); - } +module.exports = { + addSignal, + removeSignal +} - var byteCount = bytes.length(); - var value = _fromDer(bytes, bytes.length(), 0, options); - if(options.parseAllBytes && bytes.length() !== 0) { - var error = new Error('Unparsed DER bytes remain after ASN.1 parsing.'); - error.byteCount = byteCount; - error.remaining = bytes.length(); - throw error; - } - return value; -}; -/** - * Internal function to parse an asn1 object from a byte buffer in DER format. - * - * @param bytes the byte buffer to parse from. - * @param remaining the number of bytes remaining for this chunk. - * @param depth the current parsing depth. - * @param options object with same options as fromDer(). - * - * @return the parsed asn1 object. - */ -function _fromDer(bytes, remaining, depth, options) { - // temporary storage for consumption calculations - var start; - - // minimum length for ASN.1 DER structure is 2 - _checkBufferLength(bytes, remaining, 2); - - // get the first byte - var b1 = bytes.getByte(); - // consumed one byte - remaining--; - - // get the tag class - var tagClass = (b1 & 0xC0); - - // get the type (bits 1-5) - var type = b1 & 0x1F; - - // get the variable value length and adjust remaining bytes - start = bytes.length(); - var length = _getValueLength(bytes, remaining); - remaining -= start - bytes.length(); - - // ensure there are enough bytes to get the value - if(length !== undefined && length > remaining) { - if(options.strict) { - var error = new Error('Too few bytes to read ASN.1 value.'); - error.available = bytes.length(); - error.remaining = remaining; - error.requested = length; - throw error; - } - // Note: be lenient with truncated values and use remaining state bytes - length = remaining; - } - - // value storage - var value; - // possible BIT STRING contents storage - var bitStringContents; - - // constructed flag is bit 6 (32 = 0x20) of the first byte - var constructed = ((b1 & 0x20) === 0x20); - if(constructed) { - // parse child asn1 objects from the value - value = []; - if(length === undefined) { - // asn1 object of indefinite length, read until end tag - for(;;) { - _checkBufferLength(bytes, remaining, 2); - if(bytes.bytes(2) === String.fromCharCode(0, 0)) { - bytes.getBytes(2); - remaining -= 2; - break; - } - start = bytes.length(); - value.push(_fromDer(bytes, remaining, depth + 1, options)); - remaining -= start - bytes.length(); - } - } else { - // parsing asn1 object of definite length - while(length > 0) { - start = bytes.length(); - value.push(_fromDer(bytes, length, depth + 1, options)); - remaining -= start - bytes.length(); - length -= start - bytes.length(); - } - } - } +/***/ }), - // if a BIT STRING, save the contents including padding - if(value === undefined && tagClass === asn1.Class.UNIVERSAL && - type === asn1.Type.BITSTRING) { - bitStringContents = bytes.bytes(length); - } - - // determine if a non-constructed value should be decoded as a composed - // value that contains other ASN.1 objects. BIT STRINGs (and OCTET STRINGs) - // can be used this way. - if(value === undefined && options.decodeBitStrings && - tagClass === asn1.Class.UNIVERSAL && - // FIXME: OCTET STRINGs not yet supported here - // .. other parts of forge expect to decode OCTET STRINGs manually - (type === asn1.Type.BITSTRING /*|| type === asn1.Type.OCTETSTRING*/) && - length > 1) { - // save read position - var savedRead = bytes.read; - var savedRemaining = remaining; - var unused = 0; - if(type === asn1.Type.BITSTRING) { - /* The first octet gives the number of bits by which the length of the - bit string is less than the next multiple of eight (this is called - the "number of unused bits"). - - The second and following octets give the value of the bit string - converted to an octet string. */ - _checkBufferLength(bytes, remaining, 1); - unused = bytes.getByte(); - remaining--; - } - // if all bits are used, maybe the BIT/OCTET STRING holds ASN.1 objs - if(unused === 0) { - try { - // attempt to parse child asn1 object from the value - // (stored in array to signal composed value) - start = bytes.length(); - var subOptions = { - // enforce strict mode to avoid parsing ASN.1 from plain data - strict: true, - decodeBitStrings: true - }; - var composed = _fromDer(bytes, remaining, depth + 1, subOptions); - var used = start - bytes.length(); - remaining -= used; - if(type == asn1.Type.BITSTRING) { - used++; - } +/***/ 9744: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // if the data all decoded and the class indicates UNIVERSAL or - // CONTEXT_SPECIFIC then assume we've got an encapsulated ASN.1 object - var tc = composed.tagClass; - if(used === length && - (tc === asn1.Class.UNIVERSAL || tc === asn1.Class.CONTEXT_SPECIFIC)) { - value = [composed]; - } - } catch(ex) { - } - } - if(value === undefined) { - // restore read position - bytes.read = savedRead; - remaining = savedRemaining; - } - } +"use strict"; - if(value === undefined) { - // asn1 not constructed or composed, get raw value - // TODO: do DER to OID conversion and vice-versa in .toDer? - if(length === undefined) { - if(options.strict) { - throw new Error('Non-constructed ASN.1 object of indefinite length.'); - } - // be lenient and use remaining state bytes - length = remaining; - } +const { AsyncResource } = __nccwpck_require__(852) +const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { addSignal, removeSignal } = __nccwpck_require__(7032) - if(type === asn1.Type.BMPSTRING) { - value = ''; - for(; length > 0; length -= 2) { - _checkBufferLength(bytes, remaining, 2); - value += String.fromCharCode(bytes.getInt16()); - remaining -= 2; - } - } else { - value = bytes.getBytes(length); - remaining -= length; +class ConnectHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') } - } - // add BIT STRING contents if available - var asn1Options = bitStringContents === undefined ? null : { - bitStringContents: bitStringContents - }; + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } - // create and return asn1 object - return asn1.create(tagClass, type, constructed, value, asn1Options); -} + const { signal, opaque, responseHeaders } = opts -/** - * Converts the given asn1 object to a buffer of bytes in DER format. - * - * @param asn1 the asn1 object to convert to bytes. - * - * @return the buffer of bytes. - */ -asn1.toDer = function(obj) { - var bytes = forge.util.createBuffer(); + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } - // build the first byte - var b1 = obj.tagClass | obj.type; + super('UNDICI_CONNECT') - // for storing the ASN.1 value - var value = forge.util.createBuffer(); + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.callback = callback + this.abort = null - // use BIT STRING contents if available and data not changed - var useBitStringContents = false; - if('bitStringContents' in obj) { - useBitStringContents = true; - if(obj.original) { - useBitStringContents = asn1.equals(obj, obj.original); - } + addSignal(this, signal) } - if(useBitStringContents) { - value.putBytes(obj.bitStringContents); - } else if(obj.composed) { - // if composed, use each child asn1 object's DER bytes as value - // turn on 6th bit (0x20 = 32) to indicate asn1 is constructed - // from other asn1 objects - if(obj.constructed) { - b1 |= 0x20; - } else { - // type is a bit string, add unused bits of 0x00 - value.putByte(0x00); + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() } - // add all of the child DER bytes together - for(var i = 0; i < obj.value.length; ++i) { - if(obj.value[i] !== undefined) { - value.putBuffer(asn1.toDer(obj.value[i])); - } - } - } else { - // use asn1.value directly - if(obj.type === asn1.Type.BMPSTRING) { - for(var i = 0; i < obj.value.length; ++i) { - value.putInt16(obj.value.charCodeAt(i)); - } - } else { - // ensure integer is minimally-encoded - // TODO: should all leading bytes be stripped vs just one? - // .. ex '00 00 01' => '01'? - if(obj.type === asn1.Type.INTEGER && - obj.value.length > 1 && - // leading 0x00 for positive integer - ((obj.value.charCodeAt(0) === 0 && - (obj.value.charCodeAt(1) & 0x80) === 0) || - // leading 0xFF for negative integer - (obj.value.charCodeAt(0) === 0xFF && - (obj.value.charCodeAt(1) & 0x80) === 0x80))) { - value.putBytes(obj.value.substr(1)); - } else { - value.putBytes(obj.value); - } - } + this.abort = abort + this.context = context } - // add tag byte - bytes.putByte(b1); + onHeaders () { + throw new SocketError('bad connect', null) + } - // use "short form" encoding - if(value.length() <= 127) { - // one byte describes the length - // bit 8 = 0 and bits 7-1 = length - bytes.putByte(value.length() & 0x7F); - } else { - // use "long form" encoding - // 2 to 127 bytes describe the length - // first byte: bit 8 = 1 and bits 7-1 = # of additional bytes - // other bytes: length in base 256, big-endian - var len = value.length(); - var lenBytes = ''; - do { - lenBytes += String.fromCharCode(len & 0xFF); - len = len >>> 8; - } while(len > 0); - - // set first byte to # bytes used to store the length and turn on - // bit 8 to indicate long-form length is used - bytes.putByte(lenBytes.length | 0x80); - - // concatenate length bytes in reverse since they were generated - // little endian and we need big endian - for(var i = lenBytes.length - 1; i >= 0; --i) { - bytes.putByte(lenBytes.charCodeAt(i)); - } - } - - // concatenate value bytes - bytes.putBuffer(value); - return bytes; -}; + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this -/** - * Converts an OID dot-separated string to a byte buffer. The byte buffer - * contains only the DER-encoded value, not any tag or length bytes. - * - * @param oid the OID dot-separated string. - * - * @return the byte buffer. - */ -asn1.oidToDer = function(oid) { - // split OID into individual values - var values = oid.split('.'); - var bytes = forge.util.createBuffer(); - - // first byte is 40 * value1 + value2 - bytes.putByte(40 * parseInt(values[0], 10) + parseInt(values[1], 10)); - // other bytes are each value in base 128 with 8th bit set except for - // the last byte for each value - var last, valueBytes, value, b; - for(var i = 2; i < values.length; ++i) { - // produce value bytes in reverse because we don't know how many - // bytes it will take to store the value - last = true; - valueBytes = []; - value = parseInt(values[i], 10); - do { - b = value & 0x7F; - value = value >>> 7; - // if value is not last, then turn on 8th bit - if(!last) { - b |= 0x80; - } - valueBytes.push(b); - last = false; - } while(value > 0); + removeSignal(this) - // add value bytes in reverse (needs to be in big endian) - for(var n = valueBytes.length - 1; n >= 0; --n) { - bytes.putByte(valueBytes[n]); + this.callback = null + + let headers = rawHeaders + // Indicates is an HTTP2Session + if (headers != null) { + headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) } + + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + socket, + opaque, + context + }) } - return bytes; -}; + onError (err) { + const { callback, opaque } = this -/** - * Converts a DER-encoded byte buffer to an OID dot-separated string. The - * byte buffer should contain only the DER-encoded value, not any tag or - * length bytes. - * - * @param bytes the byte buffer. - * - * @return the OID dot-separated string. - */ -asn1.derToOid = function(bytes) { - var oid; - - // wrap in buffer if needed - if(typeof bytes === 'string') { - bytes = forge.util.createBuffer(bytes); - } - - // first byte is 40 * value1 + value2 - var b = bytes.getByte(); - oid = Math.floor(b / 40) + '.' + (b % 40); - - // other bytes are each value in base 128 with 8th bit set except for - // the last byte for each value - var value = 0; - while(bytes.length() > 0) { - b = bytes.getByte(); - value = value << 7; - // not the last byte for the value - if(b & 0x80) { - value += b & 0x7F; - } else { - // last byte - oid += '.' + (value + b); - value = 0; + removeSignal(this) + + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } } +} - return oid; -}; +function connect (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + connect.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } -/** - * Converts a UTCTime value to a date. - * - * Note: GeneralizedTime has 4 digits for the year and is used for X.509 - * dates past 2049. Parsing that structure hasn't been implemented yet. - * - * @param utc the UTCTime value to convert. - * - * @return the date. - */ -asn1.utcTimeToDate = function(utc) { - /* The following formats can be used: - - YYMMDDhhmmZ - YYMMDDhhmm+hh'mm' - YYMMDDhhmm-hh'mm' - YYMMDDhhmmssZ - YYMMDDhhmmss+hh'mm' - YYMMDDhhmmss-hh'mm' - - Where: - - YY is the least significant two digits of the year - MM is the month (01 to 12) - DD is the day (01 to 31) - hh is the hour (00 to 23) - mm are the minutes (00 to 59) - ss are the seconds (00 to 59) - Z indicates that local time is GMT, + indicates that local time is - later than GMT, and - indicates that local time is earlier than GMT - hh' is the absolute value of the offset from GMT in hours - mm' is the absolute value of the offset from GMT in minutes */ - var date = new Date(); - - // if YY >= 50 use 19xx, if YY < 50 use 20xx - var year = parseInt(utc.substr(0, 2), 10); - year = (year >= 50) ? 1900 + year : 2000 + year; - var MM = parseInt(utc.substr(2, 2), 10) - 1; // use 0-11 for month - var DD = parseInt(utc.substr(4, 2), 10); - var hh = parseInt(utc.substr(6, 2), 10); - var mm = parseInt(utc.substr(8, 2), 10); - var ss = 0; - - // not just YYMMDDhhmmZ - if(utc.length > 11) { - // get character after minutes - var c = utc.charAt(10); - var end = 10; - - // see if seconds are present - if(c !== '+' && c !== '-') { - // get seconds - ss = parseInt(utc.substr(10, 2), 10); - end += 2; - } - } - - // update date - date.setUTCFullYear(year, MM, DD); - date.setUTCHours(hh, mm, ss, 0); - - if(end) { - // get +/- after end of time - c = utc.charAt(end); - if(c === '+' || c === '-') { - // get hours+minutes offset - var hhoffset = parseInt(utc.substr(end + 1, 2), 10); - var mmoffset = parseInt(utc.substr(end + 4, 2), 10); - - // calculate offset in milliseconds - var offset = hhoffset * 60 + mmoffset; - offset *= 60000; - - // apply offset - if(c === '+') { - date.setTime(+date - offset); - } else { - date.setTime(+date + offset); - } + try { + const connectHandler = new ConnectHandler(opts, callback) + this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) } +} - return date; -}; +module.exports = connect -/** - * Converts a GeneralizedTime value to a date. - * - * @param gentime the GeneralizedTime value to convert. - * - * @return the date. - */ -asn1.generalizedTimeToDate = function(gentime) { - /* The following formats can be used: - - YYYYMMDDHHMMSS - YYYYMMDDHHMMSS.fff - YYYYMMDDHHMMSSZ - YYYYMMDDHHMMSS.fffZ - YYYYMMDDHHMMSS+hh'mm' - YYYYMMDDHHMMSS.fff+hh'mm' - YYYYMMDDHHMMSS-hh'mm' - YYYYMMDDHHMMSS.fff-hh'mm' - - Where: - - YYYY is the year - MM is the month (01 to 12) - DD is the day (01 to 31) - hh is the hour (00 to 23) - mm are the minutes (00 to 59) - ss are the seconds (00 to 59) - .fff is the second fraction, accurate to three decimal places - Z indicates that local time is GMT, + indicates that local time is - later than GMT, and - indicates that local time is earlier than GMT - hh' is the absolute value of the offset from GMT in hours - mm' is the absolute value of the offset from GMT in minutes */ - var date = new Date(); - - var YYYY = parseInt(gentime.substr(0, 4), 10); - var MM = parseInt(gentime.substr(4, 2), 10) - 1; // use 0-11 for month - var DD = parseInt(gentime.substr(6, 2), 10); - var hh = parseInt(gentime.substr(8, 2), 10); - var mm = parseInt(gentime.substr(10, 2), 10); - var ss = parseInt(gentime.substr(12, 2), 10); - var fff = 0; - var offset = 0; - var isUTC = false; - - if(gentime.charAt(gentime.length - 1) === 'Z') { - isUTC = true; - } - - var end = gentime.length - 5, c = gentime.charAt(end); - if(c === '+' || c === '-') { - // get hours+minutes offset - var hhoffset = parseInt(gentime.substr(end + 1, 2), 10); - var mmoffset = parseInt(gentime.substr(end + 4, 2), 10); - - // calculate offset in milliseconds - offset = hhoffset * 60 + mmoffset; - offset *= 60000; - - // apply offset - if(c === '+') { - offset *= -1; - } - - isUTC = true; - } - - // check for second fraction - if(gentime.charAt(14) === '.') { - fff = parseFloat(gentime.substr(14), 10) * 1000; - } - - if(isUTC) { - date.setUTCFullYear(YYYY, MM, DD); - date.setUTCHours(hh, mm, ss, fff); - - // apply offset - date.setTime(+date + offset); - } else { - date.setFullYear(YYYY, MM, DD); - date.setHours(hh, mm, ss, fff); - } - return date; -}; +/***/ }), -/** - * Converts a date to a UTCTime value. - * - * Note: GeneralizedTime has 4 digits for the year and is used for X.509 - * dates past 2049. Converting to a GeneralizedTime hasn't been - * implemented yet. - * - * @param date the date to convert. - * - * @return the UTCTime value. - */ -asn1.dateToUtcTime = function(date) { - // TODO: validate; currently assumes proper format - if(typeof date === 'string') { - return date; - } +/***/ 8752: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var rval = ''; +"use strict"; - // create format YYMMDDhhmmssZ - var format = []; - format.push(('' + date.getUTCFullYear()).substr(2)); - format.push('' + (date.getUTCMonth() + 1)); - format.push('' + date.getUTCDate()); - format.push('' + date.getUTCHours()); - format.push('' + date.getUTCMinutes()); - format.push('' + date.getUTCSeconds()); - // ensure 2 digits are used for each format entry - for(var i = 0; i < format.length; ++i) { - if(format[i].length < 2) { - rval += '0'; - } - rval += format[i]; - } - rval += 'Z'; +const { + Readable, + Duplex, + PassThrough +} = __nccwpck_require__(2781) +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { AsyncResource } = __nccwpck_require__(852) +const { addSignal, removeSignal } = __nccwpck_require__(7032) +const assert = __nccwpck_require__(9491) - return rval; -}; +const kResume = Symbol('resume') -/** - * Converts a date to a GeneralizedTime value. - * - * @param date the date to convert. - * - * @return the GeneralizedTime value as a string. - */ -asn1.dateToGeneralizedTime = function(date) { - // TODO: validate; currently assumes proper format - if(typeof date === 'string') { - return date; - } +class PipelineRequest extends Readable { + constructor () { + super({ autoDestroy: true }) - var rval = ''; + this[kResume] = null + } - // create format YYYYMMDDHHMMSSZ - var format = []; - format.push('' + date.getUTCFullYear()); - format.push('' + (date.getUTCMonth() + 1)); - format.push('' + date.getUTCDate()); - format.push('' + date.getUTCHours()); - format.push('' + date.getUTCMinutes()); - format.push('' + date.getUTCSeconds()); + _read () { + const { [kResume]: resume } = this - // ensure 2 digits are used for each format entry - for(var i = 0; i < format.length; ++i) { - if(format[i].length < 2) { - rval += '0'; + if (resume) { + this[kResume] = null + resume() } - rval += format[i]; } - rval += 'Z'; - return rval; -}; + _destroy (err, callback) { + this._read() -/** - * Converts a javascript integer to a DER-encoded byte buffer to be used - * as the value for an INTEGER type. - * - * @param x the integer. - * - * @return the byte buffer. - */ -asn1.integerToDer = function(x) { - var rval = forge.util.createBuffer(); - if(x >= -0x80 && x < 0x80) { - return rval.putSignedInt(x, 8); - } - if(x >= -0x8000 && x < 0x8000) { - return rval.putSignedInt(x, 16); - } - if(x >= -0x800000 && x < 0x800000) { - return rval.putSignedInt(x, 24); + callback(err) } - if(x >= -0x80000000 && x < 0x80000000) { - return rval.putSignedInt(x, 32); - } - var error = new Error('Integer too large; max is 32-bits.'); - error.integer = x; - throw error; -}; +} -/** - * Converts a DER-encoded byte buffer to a javascript integer. This is - * typically used to decode the value of an INTEGER type. - * - * @param bytes the byte buffer. - * - * @return the integer. - */ -asn1.derToInteger = function(bytes) { - // wrap in buffer if needed - if(typeof bytes === 'string') { - bytes = forge.util.createBuffer(bytes); +class PipelineResponse extends Readable { + constructor (resume) { + super({ autoDestroy: true }) + this[kResume] = resume } - var n = bytes.length() * 8; - if(n > 32) { - throw new Error('Integer too large; max is 32-bits.'); + _read () { + this[kResume]() } - return bytes.getSignedInt(n); -}; -/** - * Validates that the given ASN.1 object is at least a super set of the - * given ASN.1 structure. Only tag classes and types are checked. An - * optional map may also be provided to capture ASN.1 values while the - * structure is checked. - * - * To capture an ASN.1 value, set an object in the validator's 'capture' - * parameter to the key to use in the capture map. To capture the full - * ASN.1 object, specify 'captureAsn1'. To capture BIT STRING bytes, including - * the leading unused bits counter byte, specify 'captureBitStringContents'. - * To capture BIT STRING bytes, without the leading unused bits counter byte, - * specify 'captureBitStringValue'. - * - * Objects in the validator may set a field 'optional' to true to indicate - * that it isn't necessary to pass validation. - * - * @param obj the ASN.1 object to validate. - * @param v the ASN.1 structure validator. - * @param capture an optional map to capture values in. - * @param errors an optional array for storing validation errors. - * - * @return true on success, false on failure. - */ -asn1.validate = function(obj, v, capture, errors) { - var rval = false; - - // ensure tag class and type are the same if specified - if((obj.tagClass === v.tagClass || typeof(v.tagClass) === 'undefined') && - (obj.type === v.type || typeof(v.type) === 'undefined')) { - // ensure constructed flag is the same if specified - if(obj.constructed === v.constructed || - typeof(v.constructed) === 'undefined') { - rval = true; - - // handle sub values - if(v.value && forge.util.isArray(v.value)) { - var j = 0; - for(var i = 0; rval && i < v.value.length; ++i) { - rval = v.value[i].optional || false; - if(obj.value[j]) { - rval = asn1.validate(obj.value[j], v.value[i], capture, errors); - if(rval) { - ++j; - } else if(v.value[i].optional) { - rval = true; - } - } - if(!rval && errors) { - errors.push( - '[' + v.name + '] ' + - 'Tag class "' + v.tagClass + '", type "' + - v.type + '" expected value length "' + - v.value.length + '", got "' + - obj.value.length + '"'); - } - } - } + _destroy (err, callback) { + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() + } - if(rval && capture) { - if(v.capture) { - capture[v.capture] = obj.value; - } - if(v.captureAsn1) { - capture[v.captureAsn1] = obj; - } - if(v.captureBitStringContents && 'bitStringContents' in obj) { - capture[v.captureBitStringContents] = obj.bitStringContents; - } - if(v.captureBitStringValue && 'bitStringContents' in obj) { - var value; - if(obj.bitStringContents.length < 2) { - capture[v.captureBitStringValue] = ''; - } else { - // FIXME: support unused bits with data shifting - var unused = obj.bitStringContents.charCodeAt(0); - if(unused !== 0) { - throw new Error( - 'captureBitStringValue only supported for zero unused bits'); - } - capture[v.captureBitStringValue] = obj.bitStringContents.slice(1); - } - } - } - } else if(errors) { - errors.push( - '[' + v.name + '] ' + - 'Expected constructed "' + v.constructed + '", got "' + - obj.constructed + '"'); - } - } else if(errors) { - if(obj.tagClass !== v.tagClass) { - errors.push( - '[' + v.name + '] ' + - 'Expected tag class "' + v.tagClass + '", got "' + - obj.tagClass + '"'); - } - if(obj.type !== v.type) { - errors.push( - '[' + v.name + '] ' + - 'Expected type "' + v.type + '", got "' + obj.type + '"'); - } - } - return rval; -}; + callback(err) + } +} -// regex for testing for non-latin characters -var _nonLatinRegex = /[^\\u0000-\\u00ff]/; +class PipelineHandler extends AsyncResource { + constructor (opts, handler) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } -/** - * Pretty prints an ASN.1 object to a string. - * - * @param obj the object to write out. - * @param level the level in the tree. - * @param indentation the indentation to use. - * - * @return the string. - */ -asn1.prettyPrint = function(obj, level, indentation) { - var rval = ''; - - // set default level and indentation - level = level || 0; - indentation = indentation || 2; - - // start new line for deep levels - if(level > 0) { - rval += '\n'; - } - - // create indent - var indent = ''; - for(var i = 0; i < level * indentation; ++i) { - indent += ' '; - } - - // print class:type - rval += indent + 'Tag: '; - switch(obj.tagClass) { - case asn1.Class.UNIVERSAL: - rval += 'Universal:'; - break; - case asn1.Class.APPLICATION: - rval += 'Application:'; - break; - case asn1.Class.CONTEXT_SPECIFIC: - rval += 'Context-Specific:'; - break; - case asn1.Class.PRIVATE: - rval += 'Private:'; - break; - } - - if(obj.tagClass === asn1.Class.UNIVERSAL) { - rval += obj.type; - - // known types - switch(obj.type) { - case asn1.Type.NONE: - rval += ' (None)'; - break; - case asn1.Type.BOOLEAN: - rval += ' (Boolean)'; - break; - case asn1.Type.INTEGER: - rval += ' (Integer)'; - break; - case asn1.Type.BITSTRING: - rval += ' (Bit string)'; - break; - case asn1.Type.OCTETSTRING: - rval += ' (Octet string)'; - break; - case asn1.Type.NULL: - rval += ' (Null)'; - break; - case asn1.Type.OID: - rval += ' (Object Identifier)'; - break; - case asn1.Type.ODESC: - rval += ' (Object Descriptor)'; - break; - case asn1.Type.EXTERNAL: - rval += ' (External or Instance of)'; - break; - case asn1.Type.REAL: - rval += ' (Real)'; - break; - case asn1.Type.ENUMERATED: - rval += ' (Enumerated)'; - break; - case asn1.Type.EMBEDDED: - rval += ' (Embedded PDV)'; - break; - case asn1.Type.UTF8: - rval += ' (UTF8)'; - break; - case asn1.Type.ROID: - rval += ' (Relative Object Identifier)'; - break; - case asn1.Type.SEQUENCE: - rval += ' (Sequence)'; - break; - case asn1.Type.SET: - rval += ' (Set)'; - break; - case asn1.Type.PRINTABLESTRING: - rval += ' (Printable String)'; - break; - case asn1.Type.IA5String: - rval += ' (IA5String (ASCII))'; - break; - case asn1.Type.UTCTIME: - rval += ' (UTC time)'; - break; - case asn1.Type.GENERALIZEDTIME: - rval += ' (Generalized time)'; - break; - case asn1.Type.BMPSTRING: - rval += ' (BMP String)'; - break; + if (typeof handler !== 'function') { + throw new InvalidArgumentError('invalid handler') } - } else { - rval += obj.type; - } - rval += '\n'; - rval += indent + 'Constructed: ' + obj.constructed + '\n'; + const { signal, method, opaque, onInfo, responseHeaders } = opts - if(obj.composed) { - var subvalues = 0; - var sub = ''; - for(var i = 0; i < obj.value.length; ++i) { - if(obj.value[i] !== undefined) { - subvalues += 1; - sub += asn1.prettyPrint(obj.value[i], level + 1, indentation); - if((i + 1) < obj.value.length) { - sub += ','; - } - } + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') } - rval += indent + 'Sub values: ' + subvalues + sub; - } else { - rval += indent + 'Value: '; - if(obj.type === asn1.Type.OID) { - var oid = asn1.derToOid(obj.value); - rval += oid; - if(forge.pki && forge.pki.oids) { - if(oid in forge.pki.oids) { - rval += ' (' + forge.pki.oids[oid] + ') '; - } - } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') } - if(obj.type === asn1.Type.INTEGER) { - try { - rval += asn1.derToInteger(obj.value); - } catch(ex) { - rval += '0x' + forge.util.bytesToHex(obj.value); - } - } else if(obj.type === asn1.Type.BITSTRING) { - // TODO: shift bits as needed to display without padding - if(obj.value.length > 1) { - // remove unused bits field - rval += '0x' + forge.util.bytesToHex(obj.value.slice(1)); - } else { - rval += '(none)'; - } - // show unused bit count - if(obj.value.length > 0) { - var unused = obj.value.charCodeAt(0); - if(unused == 1) { - rval += ' (1 unused bit shown)'; - } else if(unused > 1) { - rval += ' (' + unused + ' unused bits shown)'; - } - } - } else if(obj.type === asn1.Type.OCTETSTRING) { - if(!_nonLatinRegex.test(obj.value)) { - rval += '(' + obj.value + ') '; - } - rval += '0x' + forge.util.bytesToHex(obj.value); - } else if(obj.type === asn1.Type.UTF8) { - try { - rval += forge.util.decodeUtf8(obj.value); - } catch(e) { - if(e.message === 'URI malformed') { - rval += - '0x' + forge.util.bytesToHex(obj.value) + ' (malformed UTF8)'; - } else { - throw e; - } - } - } else if(obj.type === asn1.Type.PRINTABLESTRING || - obj.type === asn1.Type.IA5String) { - rval += obj.value; - } else if(_nonLatinRegex.test(obj.value)) { - rval += '0x' + forge.util.bytesToHex(obj.value); - } else if(obj.value.length === 0) { - rval += '[null]'; - } else { - rval += obj.value; + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') } - } - return rval; -}; + super('UNDICI_PIPELINE') + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.handler = handler + this.abort = null + this.context = null + this.onInfo = onInfo || null -/***/ }), + this.req = new PipelineRequest().on('error', util.nop) -/***/ 2300: -/***/ ((module) => { + this.ret = new Duplex({ + readableObjectMode: opts.objectMode, + autoDestroy: true, + read: () => { + const { body } = this -/** - * Base-N/Base-X encoding/decoding functions. - * - * Original implementation from base-x: - * https://github.com/cryptocoinjs/base-x - * - * Which is MIT licensed: - * - * The MIT License (MIT) - * - * Copyright base-x contributors (c) 2016 - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ -var api = {}; -module.exports = api; + if (body && body.resume) { + body.resume() + } + }, + write: (chunk, encoding, callback) => { + const { req } = this + + if (req.push(chunk, encoding) || req._readableState.destroyed) { + callback() + } else { + req[kResume] = callback + } + }, + destroy: (err, callback) => { + const { body, req, res, ret, abort } = this -// baseN alphabet indexes -var _reverseAlphabets = {}; + if (!err && !ret._readableState.endEmitted) { + err = new RequestAbortedError() + } -/** - * BaseN-encodes a Uint8Array using the given alphabet. - * - * @param input the Uint8Array to encode. - * @param maxline the maximum number of encoded characters per line to use, - * defaults to none. - * - * @return the baseN-encoded output string. - */ -api.encode = function(input, alphabet, maxline) { - if(typeof alphabet !== 'string') { - throw new TypeError('"alphabet" must be a string.'); - } - if(maxline !== undefined && typeof maxline !== 'number') { - throw new TypeError('"maxline" must be a number.'); - } + if (abort && err) { + abort() + } - var output = ''; + util.destroy(body, err) + util.destroy(req, err) + util.destroy(res, err) - if(!(input instanceof Uint8Array)) { - // assume forge byte buffer - output = _encodeWithByteBuffer(input, alphabet); - } else { - var i = 0; - var base = alphabet.length; - var first = alphabet.charAt(0); - var digits = [0]; - for(i = 0; i < input.length; ++i) { - for(var j = 0, carry = input[i]; j < digits.length; ++j) { - carry += digits[j] << 8; - digits[j] = carry % base; - carry = (carry / base) | 0; - } + removeSignal(this) - while(carry > 0) { - digits.push(carry % base); - carry = (carry / base) | 0; + callback(err) } - } + }).on('prefinish', () => { + const { req } = this - // deal with leading zeros - for(i = 0; input[i] === 0 && i < input.length - 1; ++i) { - output += first; - } - // convert digits to a string - for(i = digits.length - 1; i >= 0; --i) { - output += alphabet[digits[i]]; - } - } + // Node < 15 does not call _final in same tick. + req.push(null) + }) - if(maxline) { - var regex = new RegExp('.{1,' + maxline + '}', 'g'); - output = output.match(regex).join('\r\n'); + this.res = null + + addSignal(this, signal) } - return output; -}; + onConnect (abort, context) { + const { ret, res } = this -/** - * Decodes a baseN-encoded (using the given alphabet) string to a - * Uint8Array. - * - * @param input the baseN-encoded input string. - * - * @return the Uint8Array. - */ -api.decode = function(input, alphabet) { - if(typeof input !== 'string') { - throw new TypeError('"input" must be a string.'); - } - if(typeof alphabet !== 'string') { - throw new TypeError('"alphabet" must be a string.'); - } + assert(!res, 'pipeline cannot be retried') - var table = _reverseAlphabets[alphabet]; - if(!table) { - // compute reverse alphabet - table = _reverseAlphabets[alphabet] = []; - for(var i = 0; i < alphabet.length; ++i) { - table[alphabet.charCodeAt(i)] = i; + if (ret.destroyed) { + throw new RequestAbortedError() } + + this.abort = abort + this.context = context } - // remove whitespace characters - input = input.replace(/\s/g, ''); + onHeaders (statusCode, rawHeaders, resume) { + const { opaque, handler, context } = this - var base = alphabet.length; - var first = alphabet.charAt(0); - var bytes = [0]; - for(var i = 0; i < input.length; i++) { - var value = table[input.charCodeAt(i)]; - if(value === undefined) { - return; + if (statusCode < 200) { + if (this.onInfo) { + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.onInfo({ statusCode, headers }) + } + return } - for(var j = 0, carry = value; j < bytes.length; ++j) { - carry += bytes[j] * base; - bytes[j] = carry & 0xff; - carry >>= 8; + this.res = new PipelineResponse(resume) + + let body + try { + this.handler = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + body = this.runInAsyncScope(handler, null, { + statusCode, + headers, + opaque, + body: this.res, + context + }) + } catch (err) { + this.res.on('error', util.nop) + throw err } - while(carry > 0) { - bytes.push(carry & 0xff); - carry >>= 8; + if (!body || typeof body.on !== 'function') { + throw new InvalidReturnValueError('expected Readable') } - } - // deal with leading zeros - for(var k = 0; input[k] === first && k < input.length - 1; ++k) { - bytes.push(0); - } + body + .on('data', (chunk) => { + const { ret, body } = this - if(typeof Buffer !== 'undefined') { - return Buffer.from(bytes.reverse()); - } + if (!ret.push(chunk) && body.pause) { + body.pause() + } + }) + .on('error', (err) => { + const { ret } = this - return new Uint8Array(bytes.reverse()); -}; + util.destroy(ret, err) + }) + .on('end', () => { + const { ret } = this -function _encodeWithByteBuffer(input, alphabet) { - var i = 0; - var base = alphabet.length; - var first = alphabet.charAt(0); - var digits = [0]; - for(i = 0; i < input.length(); ++i) { - for(var j = 0, carry = input.at(i); j < digits.length; ++j) { - carry += digits[j] << 8; - digits[j] = carry % base; - carry = (carry / base) | 0; - } + ret.push(null) + }) + .on('close', () => { + const { ret } = this - while(carry > 0) { - digits.push(carry % base); - carry = (carry / base) | 0; - } + if (!ret._readableState.ended) { + util.destroy(ret, new RequestAbortedError()) + } + }) + + this.body = body } - var output = ''; + onData (chunk) { + const { res } = this + return res.push(chunk) + } - // deal with leading zeros - for(i = 0; input.at(i) === 0 && i < input.length() - 1; ++i) { - output += first; + onComplete (trailers) { + const { res } = this + res.push(null) } - // convert digits to a string - for(i = digits.length - 1; i >= 0; --i) { - output += alphabet[digits[i]]; + + onError (err) { + const { ret } = this + this.handler = null + util.destroy(ret, err) } +} - return output; +function pipeline (opts, handler) { + try { + const pipelineHandler = new PipelineHandler(opts, handler) + this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler) + return pipelineHandler.ret + } catch (err) { + return new PassThrough().destroy(err) + } } +module.exports = pipeline + /***/ }), -/***/ 7088: +/***/ 5448: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Cipher base API. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); +"use strict"; -module.exports = forge.cipher = forge.cipher || {}; -// registered algorithms -forge.cipher.algorithms = forge.cipher.algorithms || {}; +const Readable = __nccwpck_require__(3858) +const { + InvalidArgumentError, + RequestAbortedError +} = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { getResolveErrorBodyCallback } = __nccwpck_require__(7474) +const { AsyncResource } = __nccwpck_require__(852) +const { addSignal, removeSignal } = __nccwpck_require__(7032) -/** - * Creates a cipher object that can be used to encrypt data using the given - * algorithm and key. The algorithm may be provided as a string value for a - * previously registered algorithm or it may be given as a cipher algorithm - * API object. - * - * @param algorithm the algorithm to use, either a string or an algorithm API - * object. - * @param key the key to use, as a binary-encoded string of bytes or a - * byte buffer. - * - * @return the cipher. - */ -forge.cipher.createCipher = function(algorithm, key) { - var api = algorithm; - if(typeof api === 'string') { - api = forge.cipher.getAlgorithm(api); - if(api) { - api = api(); +class RequestHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') } - } - if(!api) { - throw new Error('Unsupported algorithm: ' + algorithm); - } - // assume block cipher - return new forge.cipher.BlockCipher({ - algorithm: api, - key: key, - decrypt: false - }); -}; + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts -/** - * Creates a decipher object that can be used to decrypt data using the given - * algorithm and key. The algorithm may be provided as a string value for a - * previously registered algorithm or it may be given as a cipher algorithm - * API object. - * - * @param algorithm the algorithm to use, either a string or an algorithm API - * object. - * @param key the key to use, as a binary-encoded string of bytes or a - * byte buffer. - * - * @return the cipher. - */ -forge.cipher.createDecipher = function(algorithm, key) { - var api = algorithm; - if(typeof api === 'string') { - api = forge.cipher.getAlgorithm(api); - if(api) { - api = api(); + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) { + throw new InvalidArgumentError('invalid highWaterMark') + } + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_REQUEST') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.res = null + this.abort = null + this.body = body + this.trailers = {} + this.context = null + this.onInfo = onInfo || null + this.throwOnError = throwOnError + this.highWaterMark = highWaterMark + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) } + + addSignal(this, signal) } - if(!api) { - throw new Error('Unsupported algorithm: ' + algorithm); + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context } - // assume block cipher - return new forge.cipher.BlockCipher({ - algorithm: api, - key: key, - decrypt: true - }); -}; + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this -/** - * Registers an algorithm by name. If the name was already registered, the - * algorithm API object will be overwritten. - * - * @param name the name of the algorithm. - * @param algorithm the algorithm API object. - */ -forge.cipher.registerAlgorithm = function(name, algorithm) { - name = name.toUpperCase(); - forge.cipher.algorithms[name] = algorithm; -}; + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) -/** - * Gets a registered algorithm by name. - * - * @param name the name of the algorithm. - * - * @return the algorithm, if found, null if not. - */ -forge.cipher.getAlgorithm = function(name) { - name = name.toUpperCase(); - if(name in forge.cipher.algorithms) { - return forge.cipher.algorithms[name]; - } - return null; -}; + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } -var BlockCipher = forge.cipher.BlockCipher = function(options) { - this.algorithm = options.algorithm; - this.mode = this.algorithm.mode; - this.blockSize = this.mode.blockSize; - this._finish = false; - this._input = null; - this.output = null; - this._op = options.decrypt ? this.mode.decrypt : this.mode.encrypt; - this._decrypt = options.decrypt; - this.algorithm.initialize(options); -}; + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + const body = new Readable({ resume, abort, contentType, highWaterMark }) -/** - * Starts or restarts the encryption or decryption process, whichever - * was previously configured. - * - * For non-GCM mode, the IV may be a binary-encoded string of bytes, an array - * of bytes, a byte buffer, or an array of 32-bit integers. If the IV is in - * bytes, then it must be Nb (16) bytes in length. If the IV is given in as - * 32-bit integers, then it must be 4 integers long. - * - * Note: an IV is not required or used in ECB mode. - * - * For GCM-mode, the IV must be given as a binary-encoded string of bytes or - * a byte buffer. The number of bytes should be 12 (96 bits) as recommended - * by NIST SP-800-38D but another length may be given. - * - * @param options the options to use: - * iv the initialization vector to use as a binary-encoded string of - * bytes, null to reuse the last ciphered block from a previous - * update() (this "residue" method is for legacy support only). - * additionalData additional authentication data as a binary-encoded - * string of bytes, for 'GCM' mode, (default: none). - * tagLength desired length of authentication tag, in bits, for - * 'GCM' mode (0-128, default: 128). - * tag the authentication tag to check if decrypting, as a - * binary-encoded string of bytes. - * output the output the buffer to write to, null to create one. - */ -BlockCipher.prototype.start = function(options) { - options = options || {}; - var opts = {}; - for(var key in options) { - opts[key] = options[key]; + this.callback = null + this.res = body + if (callback !== null) { + if (this.throwOnError && statusCode >= 400) { + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body, contentType, statusCode, statusMessage, headers } + ) + } else { + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + trailers: this.trailers, + opaque, + body, + context + }) + } + } } - opts.decrypt = this._decrypt; - this._finish = false; - this._input = forge.util.createBuffer(); - this.output = options.output || forge.util.createBuffer(); - this.mode.start(opts); -}; -/** - * Updates the next block according to the cipher mode. - * - * @param input the buffer to read from. - */ -BlockCipher.prototype.update = function(input) { - if(input) { - // input given, so empty it into the input buffer - this._input.putBuffer(input); + onData (chunk) { + const { res } = this + return res.push(chunk) } - // do cipher operation until it needs more input and not finished - while(!this._op.call(this.mode, this._input, this.output, this._finish) && - !this._finish) {} + onComplete (trailers) { + const { res } = this - // free consumed memory from input buffer - this._input.compact(); -}; + removeSignal(this) -/** - * Finishes encrypting or decrypting. - * - * @param pad a padding function to use in CBC mode, null for default, - * signature(blockSize, buffer, decrypt). - * - * @return true if successful, false on error. - */ -BlockCipher.prototype.finish = function(pad) { - // backwards-compatibility w/deprecated padding API - // Note: will overwrite padding functions even after another start() call - if(pad && (this.mode.name === 'ECB' || this.mode.name === 'CBC')) { - this.mode.pad = function(input) { - return pad(this.blockSize, input, false); - }; - this.mode.unpad = function(output) { - return pad(this.blockSize, output, true); - }; + util.parseHeaders(trailers, this.trailers) + + res.push(null) } - // build options for padding and afterFinish functions - var options = {}; - options.decrypt = this._decrypt; + onError (err) { + const { res, callback, body, opaque } = this - // get # of bytes that won't fill a block - options.overflow = this._input.length() % this.blockSize; + removeSignal(this) - if(!this._decrypt && this.mode.pad) { - if(!this.mode.pad(this._input, options)) { - return false; + if (callback) { + // TODO: Does this need queueMicrotask? + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } - } - // do final update - this._finish = true; - this.update(); + if (res) { + this.res = null + // Ensure all queued handlers are invoked before destroying res. + queueMicrotask(() => { + util.destroy(res, err) + }) + } - if(this._decrypt && this.mode.unpad) { - if(!this.mode.unpad(this.output, options)) { - return false; + if (body) { + this.body = null + util.destroy(body, err) } } +} - if(this.mode.afterFinish) { - if(!this.mode.afterFinish(this.output, options)) { - return false; +function request (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + request.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + this.dispatch(opts, new RequestHandler(opts, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) } +} - return true; -}; +module.exports = request +module.exports.RequestHandler = RequestHandler /***/ }), -/***/ 873: +/***/ 5395: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Supported cipher modes. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); - -forge.cipher = forge.cipher || {}; - -// supported cipher modes -var modes = module.exports = forge.cipher.modes = forge.cipher.modes || {}; +"use strict"; -/** Electronic codebook (ECB) (Don't use this; it's not secure) **/ -modes.ecb = function(options) { - options = options || {}; - this.name = 'ECB'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = new Array(this._ints); - this._outBlock = new Array(this._ints); -}; +const { finished, PassThrough } = __nccwpck_require__(2781) +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { getResolveErrorBodyCallback } = __nccwpck_require__(7474) +const { AsyncResource } = __nccwpck_require__(852) +const { addSignal, removeSignal } = __nccwpck_require__(7032) -modes.ecb.prototype.start = function(options) {}; +class StreamHandler extends AsyncResource { + constructor (opts, factory, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } -modes.ecb.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - if(input.length() < this.blockSize && !(finish && input.length() > 0)) { - return true; - } + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts - // get next block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = input.getInt32(); - } + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } - // encrypt block - this.cipher.encrypt(this._inBlock, this._outBlock); + if (typeof factory !== 'function') { + throw new InvalidArgumentError('invalid factory') + } - // write output - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._outBlock[i]); - } -}; + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } -modes.ecb.prototype.decrypt = function(input, output, finish) { - // not enough input to decrypt - if(input.length() < this.blockSize && !(finish && input.length() > 0)) { - return true; - } + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } - // get next block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = input.getInt32(); - } + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } - // decrypt block - this.cipher.decrypt(this._inBlock, this._outBlock); + super('UNDICI_STREAM') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.factory = factory + this.callback = callback + this.res = null + this.abort = null + this.context = null + this.trailers = null + this.body = body + this.onInfo = onInfo || null + this.throwOnError = throwOnError || false + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) + } - // write output - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._outBlock[i]); + addSignal(this, signal) } -}; -modes.ecb.prototype.pad = function(input, options) { - // add PKCS#7 padding to block (each pad byte is the - // value of the number of pad bytes) - var padding = (input.length() === this.blockSize ? - this.blockSize : (this.blockSize - input.length())); - input.fillWithByte(padding, padding); - return true; -}; - -modes.ecb.prototype.unpad = function(output, options) { - // check for error: input data not a multiple of blockSize - if(options.overflow > 0) { - return false; - } + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } - // ensure padding byte count is valid - var len = output.length(); - var count = output.at(len - 1); - if(count > (this.blockSize << 2)) { - return false; + this.abort = abort + this.context = context } - // trim off padding bytes - output.truncate(count); - return true; -}; - -/** Cipher-block Chaining (CBC) **/ - -modes.cbc = function(options) { - options = options || {}; - this.name = 'CBC'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = new Array(this._ints); - this._outBlock = new Array(this._ints); -}; - -modes.cbc.prototype.start = function(options) { - // Note: legacy support for using IV residue (has security flaws) - // if IV is null, reuse block from previous processing - if(options.iv === null) { - // must have a previous block - if(!this._prev) { - throw new Error('Invalid IV parameter.'); - } - this._iv = this._prev.slice(0); - } else if(!('iv' in options)) { - throw new Error('Invalid IV parameter.'); - } else { - // save IV as "previous" block - this._iv = transformIV(options.iv, this.blockSize); - this._prev = this._iv.slice(0); - } -}; + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { factory, opaque, context, callback, responseHeaders } = this -modes.cbc.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - if(input.length() < this.blockSize && !(finish && input.length() > 0)) { - return true; - } + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) - // get next block - // CBC XOR's IV (or previous block) with plaintext - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = this._prev[i] ^ input.getInt32(); - } + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } - // encrypt block - this.cipher.encrypt(this._inBlock, this._outBlock); + this.factory = null - // write output, save previous block - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._outBlock[i]); - } - this._prev = this._outBlock; -}; + let res -modes.cbc.prototype.decrypt = function(input, output, finish) { - // not enough input to decrypt - if(input.length() < this.blockSize && !(finish && input.length() > 0)) { - return true; - } + if (this.throwOnError && statusCode >= 400) { + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + res = new PassThrough() - // get next block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = input.getInt32(); - } + this.callback = null + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body: res, contentType, statusCode, statusMessage, headers } + ) + } else { + if (factory === null) { + return + } - // decrypt block - this.cipher.decrypt(this._inBlock, this._outBlock); + res = this.runInAsyncScope(factory, null, { + statusCode, + headers, + opaque, + context + }) - // write output, save previous ciphered block - // CBC XOR's IV (or previous block) with ciphertext - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._prev[i] ^ this._outBlock[i]); - } - this._prev = this._inBlock.slice(0); -}; + if ( + !res || + typeof res.write !== 'function' || + typeof res.end !== 'function' || + typeof res.on !== 'function' + ) { + throw new InvalidReturnValueError('expected Writable') + } -modes.cbc.prototype.pad = function(input, options) { - // add PKCS#7 padding to block (each pad byte is the - // value of the number of pad bytes) - var padding = (input.length() === this.blockSize ? - this.blockSize : (this.blockSize - input.length())); - input.fillWithByte(padding, padding); - return true; -}; + // TODO: Avoid finished. It registers an unnecessary amount of listeners. + finished(res, { readable: false }, (err) => { + const { callback, res, opaque, trailers, abort } = this -modes.cbc.prototype.unpad = function(output, options) { - // check for error: input data not a multiple of blockSize - if(options.overflow > 0) { - return false; - } + this.res = null + if (err || !res.readable) { + util.destroy(res, err) + } - // ensure padding byte count is valid - var len = output.length(); - var count = output.at(len - 1); - if(count > (this.blockSize << 2)) { - return false; - } + this.callback = null + this.runInAsyncScope(callback, null, err || null, { opaque, trailers }) - // trim off padding bytes - output.truncate(count); - return true; -}; + if (err) { + abort() + } + }) + } -/** Cipher feedback (CFB) **/ + res.on('drain', resume) -modes.cfb = function(options) { - options = options || {}; - this.name = 'CFB'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = null; - this._outBlock = new Array(this._ints); - this._partialBlock = new Array(this._ints); - this._partialOutput = forge.util.createBuffer(); - this._partialBytes = 0; -}; + this.res = res -modes.cfb.prototype.start = function(options) { - if(!('iv' in options)) { - throw new Error('Invalid IV parameter.'); - } - // use IV as first input - this._iv = transformIV(options.iv, this.blockSize); - this._inBlock = this._iv.slice(0); - this._partialBytes = 0; -}; + const needDrain = res.writableNeedDrain !== undefined + ? res.writableNeedDrain + : res._writableState && res._writableState.needDrain -modes.cfb.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - var inputLength = input.length(); - if(inputLength === 0) { - return true; + return needDrain !== true } - // encrypt block - this.cipher.encrypt(this._inBlock, this._outBlock); + onData (chunk) { + const { res } = this - // handle full block - if(this._partialBytes === 0 && inputLength >= this.blockSize) { - // XOR input with output, write input as output - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = input.getInt32() ^ this._outBlock[i]; - output.putInt32(this._inBlock[i]); - } - return; + return res ? res.write(chunk) : true } - // handle partial block - var partialBytes = (this.blockSize - inputLength) % this.blockSize; - if(partialBytes > 0) { - partialBytes = this.blockSize - partialBytes; - } + onComplete (trailers) { + const { res } = this - // XOR input with output, write input as partial output - this._partialOutput.clear(); - for(var i = 0; i < this._ints; ++i) { - this._partialBlock[i] = input.getInt32() ^ this._outBlock[i]; - this._partialOutput.putInt32(this._partialBlock[i]); - } + removeSignal(this) - if(partialBytes > 0) { - // block still incomplete, restore input buffer - input.read -= this.blockSize; - } else { - // block complete, update input block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = this._partialBlock[i]; + if (!res) { + return } - } - // skip any previous partial bytes - if(this._partialBytes > 0) { - this._partialOutput.getBytes(this._partialBytes); - } + this.trailers = util.parseHeaders(trailers) - if(partialBytes > 0 && !finish) { - output.putBytes(this._partialOutput.getBytes( - partialBytes - this._partialBytes)); - this._partialBytes = partialBytes; - return true; + res.end() } - output.putBytes(this._partialOutput.getBytes( - inputLength - this._partialBytes)); - this._partialBytes = 0; -}; + onError (err) { + const { res, callback, opaque, body } = this -modes.cfb.prototype.decrypt = function(input, output, finish) { - // not enough input to decrypt - var inputLength = input.length(); - if(inputLength === 0) { - return true; - } + removeSignal(this) - // encrypt block (CFB always uses encryption mode) - this.cipher.encrypt(this._inBlock, this._outBlock); + this.factory = null - // handle full block - if(this._partialBytes === 0 && inputLength >= this.blockSize) { - // XOR input with output, write input as output - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = input.getInt32(); - output.putInt32(this._inBlock[i] ^ this._outBlock[i]); + if (res) { + this.res = null + util.destroy(res, err) + } else if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } - return; - } - // handle partial block - var partialBytes = (this.blockSize - inputLength) % this.blockSize; - if(partialBytes > 0) { - partialBytes = this.blockSize - partialBytes; + if (body) { + this.body = null + util.destroy(body, err) + } } +} - // XOR input with output, write input as partial output - this._partialOutput.clear(); - for(var i = 0; i < this._ints; ++i) { - this._partialBlock[i] = input.getInt32(); - this._partialOutput.putInt32(this._partialBlock[i] ^ this._outBlock[i]); +function stream (opts, factory, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + stream.call(this, opts, factory, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) } - if(partialBytes > 0) { - // block still incomplete, restore input buffer - input.read -= this.blockSize; - } else { - // block complete, update input block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = this._partialBlock[i]; + try { + this.dispatch(opts, new StreamHandler(opts, factory, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) } +} - // skip any previous partial bytes - if(this._partialBytes > 0) { - this._partialOutput.getBytes(this._partialBytes); - } +module.exports = stream - if(partialBytes > 0 && !finish) { - output.putBytes(this._partialOutput.getBytes( - partialBytes - this._partialBytes)); - this._partialBytes = partialBytes; - return true; - } - output.putBytes(this._partialOutput.getBytes( - inputLength - this._partialBytes)); - this._partialBytes = 0; -}; +/***/ }), -/** Output feedback (OFB) **/ +/***/ 6923: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -modes.ofb = function(options) { - options = options || {}; - this.name = 'OFB'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = null; - this._outBlock = new Array(this._ints); - this._partialOutput = forge.util.createBuffer(); - this._partialBytes = 0; -}; +"use strict"; -modes.ofb.prototype.start = function(options) { - if(!('iv' in options)) { - throw new Error('Invalid IV parameter.'); - } - // use IV as first input - this._iv = transformIV(options.iv, this.blockSize); - this._inBlock = this._iv.slice(0); - this._partialBytes = 0; -}; -modes.ofb.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - var inputLength = input.length(); - if(input.length() === 0) { - return true; - } +const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8045) +const { AsyncResource } = __nccwpck_require__(852) +const util = __nccwpck_require__(3983) +const { addSignal, removeSignal } = __nccwpck_require__(7032) +const assert = __nccwpck_require__(9491) - // encrypt block (OFB always uses encryption mode) - this.cipher.encrypt(this._inBlock, this._outBlock); +class UpgradeHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } - // handle full block - if(this._partialBytes === 0 && inputLength >= this.blockSize) { - // XOR input with output and update next input - for(var i = 0; i < this._ints; ++i) { - output.putInt32(input.getInt32() ^ this._outBlock[i]); - this._inBlock[i] = this._outBlock[i]; + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') } - return; - } - // handle partial block - var partialBytes = (this.blockSize - inputLength) % this.blockSize; - if(partialBytes > 0) { - partialBytes = this.blockSize - partialBytes; - } + const { signal, opaque, responseHeaders } = opts - // XOR input with output - this._partialOutput.clear(); - for(var i = 0; i < this._ints; ++i) { - this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]); + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + super('UNDICI_UPGRADE') + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.abort = null + this.context = null + + addSignal(this, signal) } - if(partialBytes > 0) { - // block still incomplete, restore input buffer - input.read -= this.blockSize; - } else { - // block complete, update input block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = this._outBlock[i]; + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() } - } - // skip any previous partial bytes - if(this._partialBytes > 0) { - this._partialOutput.getBytes(this._partialBytes); + this.abort = abort + this.context = null } - if(partialBytes > 0 && !finish) { - output.putBytes(this._partialOutput.getBytes( - partialBytes - this._partialBytes)); - this._partialBytes = partialBytes; - return true; + onHeaders () { + throw new SocketError('bad upgrade', null) } - output.putBytes(this._partialOutput.getBytes( - inputLength - this._partialBytes)); - this._partialBytes = 0; -}; - -modes.ofb.prototype.decrypt = modes.ofb.prototype.encrypt; + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this -/** Counter (CTR) **/ + assert.strictEqual(statusCode, 101) -modes.ctr = function(options) { - options = options || {}; - this.name = 'CTR'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = null; - this._outBlock = new Array(this._ints); - this._partialOutput = forge.util.createBuffer(); - this._partialBytes = 0; -}; + removeSignal(this) -modes.ctr.prototype.start = function(options) { - if(!('iv' in options)) { - throw new Error('Invalid IV parameter.'); + this.callback = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.runInAsyncScope(callback, null, null, { + headers, + socket, + opaque, + context + }) } - // use IV as first input - this._iv = transformIV(options.iv, this.blockSize); - this._inBlock = this._iv.slice(0); - this._partialBytes = 0; -}; -modes.ctr.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - var inputLength = input.length(); - if(inputLength === 0) { - return true; - } + onError (err) { + const { callback, opaque } = this - // encrypt block (CTR always uses encryption mode) - this.cipher.encrypt(this._inBlock, this._outBlock); + removeSignal(this) - // handle full block - if(this._partialBytes === 0 && inputLength >= this.blockSize) { - // XOR input with output - for(var i = 0; i < this._ints; ++i) { - output.putInt32(input.getInt32() ^ this._outBlock[i]); - } - } else { - // handle partial block - var partialBytes = (this.blockSize - inputLength) % this.blockSize; - if(partialBytes > 0) { - partialBytes = this.blockSize - partialBytes; + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } + } +} - // XOR input with output - this._partialOutput.clear(); - for(var i = 0; i < this._ints; ++i) { - this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]); - } +function upgrade (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + upgrade.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } - if(partialBytes > 0) { - // block still incomplete, restore input buffer - input.read -= this.blockSize; + try { + const upgradeHandler = new UpgradeHandler(opts, callback) + this.dispatch({ + ...opts, + method: opts.method || 'GET', + upgrade: opts.protocol || 'Websocket' + }, upgradeHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} - // skip any previous partial bytes - if(this._partialBytes > 0) { - this._partialOutput.getBytes(this._partialBytes); - } +module.exports = upgrade - if(partialBytes > 0 && !finish) { - output.putBytes(this._partialOutput.getBytes( - partialBytes - this._partialBytes)); - this._partialBytes = partialBytes; - return true; - } - output.putBytes(this._partialOutput.getBytes( - inputLength - this._partialBytes)); - this._partialBytes = 0; - } +/***/ }), - // block complete, increment counter (input block) - inc32(this._inBlock); -}; +/***/ 4059: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -modes.ctr.prototype.decrypt = modes.ctr.prototype.encrypt; +"use strict"; -/** Galois/Counter Mode (GCM) **/ -modes.gcm = function(options) { - options = options || {}; - this.name = 'GCM'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = new Array(this._ints); - this._outBlock = new Array(this._ints); - this._partialOutput = forge.util.createBuffer(); - this._partialBytes = 0; - - // R is actually this value concatenated with 120 more zero bits, but - // we only XOR against R so the other zeros have no effect -- we just - // apply this value to the first integer in a block - this._R = 0xE1000000; -}; +module.exports.request = __nccwpck_require__(5448) +module.exports.stream = __nccwpck_require__(5395) +module.exports.pipeline = __nccwpck_require__(8752) +module.exports.upgrade = __nccwpck_require__(6923) +module.exports.connect = __nccwpck_require__(9744) -modes.gcm.prototype.start = function(options) { - if(!('iv' in options)) { - throw new Error('Invalid IV parameter.'); - } - // ensure IV is a byte buffer - var iv = forge.util.createBuffer(options.iv); - // no ciphered data processed yet - this._cipherLength = 0; +/***/ }), - // default additional data is none - var additionalData; - if('additionalData' in options) { - additionalData = forge.util.createBuffer(options.additionalData); - } else { - additionalData = forge.util.createBuffer(); - } +/***/ 3858: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // default tag length is 128 bits - if('tagLength' in options) { - this._tagLength = options.tagLength; - } else { - this._tagLength = 128; - } +"use strict"; +// Ported from https://github.com/nodejs/undici/pull/907 - // if tag is given, ensure tag matches tag length - this._tag = null; - if(options.decrypt) { - // save tag to check later - this._tag = forge.util.createBuffer(options.tag).getBytes(); - if(this._tag.length !== (this._tagLength / 8)) { - throw new Error('Authentication tag does not match tag length.'); - } - } - // create tmp storage for hash calculation - this._hashBlock = new Array(this._ints); - // no tag generated yet - this.tag = null; +const assert = __nccwpck_require__(9491) +const { Readable } = __nccwpck_require__(2781) +const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { ReadableStreamFrom, toUSVString } = __nccwpck_require__(3983) - // generate hash subkey - // (apply block cipher to "zero" block) - this._hashSubkey = new Array(this._ints); - this.cipher.encrypt([0, 0, 0, 0], this._hashSubkey); +let Blob - // generate table M - // use 4-bit tables (32 component decomposition of a 16 byte value) - // 8-bit tables take more space and are known to have security - // vulnerabilities (in native implementations) - this.componentBits = 4; - this._m = this.generateHashTable(this._hashSubkey, this.componentBits); +const kConsume = Symbol('kConsume') +const kReading = Symbol('kReading') +const kBody = Symbol('kBody') +const kAbort = Symbol('abort') +const kContentType = Symbol('kContentType') - // Note: support IV length different from 96 bits? (only supporting - // 96 bits is recommended by NIST SP-800-38D) - // generate J_0 - var ivLength = iv.length(); - if(ivLength === 12) { - // 96-bit IV - this._j0 = [iv.getInt32(), iv.getInt32(), iv.getInt32(), 1]; - } else { - // IV is NOT 96-bits - this._j0 = [0, 0, 0, 0]; - while(iv.length() > 0) { - this._j0 = this.ghash( - this._hashSubkey, this._j0, - [iv.getInt32(), iv.getInt32(), iv.getInt32(), iv.getInt32()]); - } - this._j0 = this.ghash( - this._hashSubkey, this._j0, [0, 0].concat(from64To32(ivLength * 8))); - } - - // generate ICB (initial counter block) - this._inBlock = this._j0.slice(0); - inc32(this._inBlock); - this._partialBytes = 0; - - // consume authentication data - additionalData = forge.util.createBuffer(additionalData); - // save additional data length as a BE 64-bit number - this._aDataLength = from64To32(additionalData.length() * 8); - // pad additional data to 128 bit (16 byte) block size - var overflow = additionalData.length() % this.blockSize; - if(overflow) { - additionalData.fillWithByte(0, this.blockSize - overflow); - } - this._s = [0, 0, 0, 0]; - while(additionalData.length() > 0) { - this._s = this.ghash(this._hashSubkey, this._s, [ - additionalData.getInt32(), - additionalData.getInt32(), - additionalData.getInt32(), - additionalData.getInt32() - ]); - } -}; +const noop = () => {} -modes.gcm.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - var inputLength = input.length(); - if(inputLength === 0) { - return true; - } +module.exports = class BodyReadable extends Readable { + constructor ({ + resume, + abort, + contentType = '', + highWaterMark = 64 * 1024 // Same as nodejs fs streams. + }) { + super({ + autoDestroy: true, + read: resume, + highWaterMark + }) - // encrypt block - this.cipher.encrypt(this._inBlock, this._outBlock); + this._readableState.dataEmitted = false - // handle full block - if(this._partialBytes === 0 && inputLength >= this.blockSize) { - // XOR input with output - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._outBlock[i] ^= input.getInt32()); - } - this._cipherLength += this.blockSize; - } else { - // handle partial block - var partialBytes = (this.blockSize - inputLength) % this.blockSize; - if(partialBytes > 0) { - partialBytes = this.blockSize - partialBytes; - } - - // XOR input with output - this._partialOutput.clear(); - for(var i = 0; i < this._ints; ++i) { - this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]); - } - - if(partialBytes <= 0 || finish) { - // handle overflow prior to hashing - if(finish) { - // get block overflow - var overflow = inputLength % this.blockSize; - this._cipherLength += overflow; - // truncate for hash function - this._partialOutput.truncate(this.blockSize - overflow); - } else { - this._cipherLength += this.blockSize; - } + this[kAbort] = abort + this[kConsume] = null + this[kBody] = null + this[kContentType] = contentType - // get output block for hashing - for(var i = 0; i < this._ints; ++i) { - this._outBlock[i] = this._partialOutput.getInt32(); - } - this._partialOutput.read -= this.blockSize; + // Is stream being consumed through Readable API? + // This is an optimization so that we avoid checking + // for 'data' and 'readable' listeners in the hot path + // inside push(). + this[kReading] = false + } + + destroy (err) { + if (this.destroyed) { + // Node < 16 + return this } - // skip any previous partial bytes - if(this._partialBytes > 0) { - this._partialOutput.getBytes(this._partialBytes); + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() } - if(partialBytes > 0 && !finish) { - // block still incomplete, restore input buffer, get partial output, - // and return early - input.read -= this.blockSize; - output.putBytes(this._partialOutput.getBytes( - partialBytes - this._partialBytes)); - this._partialBytes = partialBytes; - return true; + if (err) { + this[kAbort]() } - output.putBytes(this._partialOutput.getBytes( - inputLength - this._partialBytes)); - this._partialBytes = 0; + return super.destroy(err) } - // update hash block S - this._s = this.ghash(this._hashSubkey, this._s, this._outBlock); - - // increment counter (input block) - inc32(this._inBlock); -}; - -modes.gcm.prototype.decrypt = function(input, output, finish) { - // not enough input to decrypt - var inputLength = input.length(); - if(inputLength < this.blockSize && !(finish && inputLength > 0)) { - return true; + emit (ev, ...args) { + if (ev === 'data') { + // Node < 16.7 + this._readableState.dataEmitted = true + } else if (ev === 'error') { + // Node < 16 + this._readableState.errorEmitted = true + } + return super.emit(ev, ...args) } - // encrypt block (GCM always uses encryption mode) - this.cipher.encrypt(this._inBlock, this._outBlock); - - // increment counter (input block) - inc32(this._inBlock); + on (ev, ...args) { + if (ev === 'data' || ev === 'readable') { + this[kReading] = true + } + return super.on(ev, ...args) + } - // update hash block S - this._hashBlock[0] = input.getInt32(); - this._hashBlock[1] = input.getInt32(); - this._hashBlock[2] = input.getInt32(); - this._hashBlock[3] = input.getInt32(); - this._s = this.ghash(this._hashSubkey, this._s, this._hashBlock); + addListener (ev, ...args) { + return this.on(ev, ...args) + } - // XOR hash input with output - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._outBlock[i] ^ this._hashBlock[i]); + off (ev, ...args) { + const ret = super.off(ev, ...args) + if (ev === 'data' || ev === 'readable') { + this[kReading] = ( + this.listenerCount('data') > 0 || + this.listenerCount('readable') > 0 + ) + } + return ret } - // increment cipher data length - if(inputLength < this.blockSize) { - this._cipherLength += inputLength % this.blockSize; - } else { - this._cipherLength += this.blockSize; + removeListener (ev, ...args) { + return this.off(ev, ...args) } -}; -modes.gcm.prototype.afterFinish = function(output, options) { - var rval = true; + push (chunk) { + if (this[kConsume] && chunk !== null && this.readableLength === 0) { + consumePush(this[kConsume], chunk) + return this[kReading] ? super.push(chunk) : true + } + return super.push(chunk) + } - // handle overflow - if(options.decrypt && options.overflow) { - output.truncate(this.blockSize - options.overflow); + // https://fetch.spec.whatwg.org/#dom-body-text + async text () { + return consume(this, 'text') } - // handle authentication tag - this.tag = forge.util.createBuffer(); + // https://fetch.spec.whatwg.org/#dom-body-json + async json () { + return consume(this, 'json') + } - // concatenate additional data length with cipher length - var lengths = this._aDataLength.concat(from64To32(this._cipherLength * 8)); + // https://fetch.spec.whatwg.org/#dom-body-blob + async blob () { + return consume(this, 'blob') + } - // include lengths in hash - this._s = this.ghash(this._hashSubkey, this._s, lengths); + // https://fetch.spec.whatwg.org/#dom-body-arraybuffer + async arrayBuffer () { + return consume(this, 'arrayBuffer') + } - // do GCTR(J_0, S) - var tag = []; - this.cipher.encrypt(this._j0, tag); - for(var i = 0; i < this._ints; ++i) { - this.tag.putInt32(this._s[i] ^ tag[i]); + // https://fetch.spec.whatwg.org/#dom-body-formdata + async formData () { + // TODO: Implement. + throw new NotSupportedError() } - // trim tag to length - this.tag.truncate(this.tag.length() % (this._tagLength / 8)); + // https://fetch.spec.whatwg.org/#dom-body-bodyused + get bodyUsed () { + return util.isDisturbed(this) + } - // check authentication tag - if(options.decrypt && this.tag.bytes() !== this._tag) { - rval = false; + // https://fetch.spec.whatwg.org/#dom-body-body + get body () { + if (!this[kBody]) { + this[kBody] = ReadableStreamFrom(this) + if (this[kConsume]) { + // TODO: Is this the best way to force a lock? + this[kBody].getReader() // Ensure stream is locked. + assert(this[kBody].locked) + } + } + return this[kBody] } - return rval; -}; + dump (opts) { + let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 + const signal = opts && opts.signal -/** - * See NIST SP-800-38D 6.3 (Algorithm 1). This function performs Galois - * field multiplication. The field, GF(2^128), is defined by the polynomial: - * - * x^128 + x^7 + x^2 + x + 1 - * - * Which is represented in little-endian binary form as: 11100001 (0xe1). When - * the value of a coefficient is 1, a bit is set. The value R, is the - * concatenation of this value and 120 zero bits, yielding a 128-bit value - * which matches the block size. - * - * This function will multiply two elements (vectors of bytes), X and Y, in - * the field GF(2^128). The result is initialized to zero. For each bit of - * X (out of 128), x_i, if x_i is set, then the result is multiplied (XOR'd) - * by the current value of Y. For each bit, the value of Y will be raised by - * a power of x (multiplied by the polynomial x). This can be achieved by - * shifting Y once to the right. If the current value of Y, prior to being - * multiplied by x, has 0 as its LSB, then it is a 127th degree polynomial. - * Otherwise, we must divide by R after shifting to find the remainder. - * - * @param x the first block to multiply by the second. - * @param y the second block to multiply by the first. - * - * @return the block result of the multiplication. - */ -modes.gcm.prototype.multiply = function(x, y) { - var z_i = [0, 0, 0, 0]; - var v_i = y.slice(0); + if (signal) { + try { + if (typeof signal !== 'object' || !('aborted' in signal)) { + throw new InvalidArgumentError('signal must be an AbortSignal') + } + util.throwIfAborted(signal) + } catch (err) { + return Promise.reject(err) + } + } - // calculate Z_128 (block has 128 bits) - for(var i = 0; i < 128; ++i) { - // if x_i is 0, Z_{i+1} = Z_i (unchanged) - // else Z_{i+1} = Z_i ^ V_i - // get x_i by finding 32-bit int position, then left shift 1 by remainder - var x_i = x[(i / 32) | 0] & (1 << (31 - i % 32)); - if(x_i) { - z_i[0] ^= v_i[0]; - z_i[1] ^= v_i[1]; - z_i[2] ^= v_i[2]; - z_i[3] ^= v_i[3]; + if (this.closed) { + return Promise.resolve(null) } - // if LSB(V_i) is 1, V_i = V_i >> 1 - // else V_i = (V_i >> 1) ^ R - this.pow(v_i, v_i); + return new Promise((resolve, reject) => { + const signalListenerCleanup = signal + ? util.addAbortListener(signal, () => { + this.destroy() + }) + : noop + + this + .on('close', function () { + signalListenerCleanup() + if (signal && signal.aborted) { + reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) + } else { + resolve(null) + } + }) + .on('error', noop) + .on('data', function (chunk) { + limit -= chunk.length + if (limit <= 0) { + this.destroy() + } + }) + .resume() + }) } +} - return z_i; -}; +// https://streams.spec.whatwg.org/#readablestream-locked +function isLocked (self) { + // Consume is an implicit lock. + return (self[kBody] && self[kBody].locked === true) || self[kConsume] +} -modes.gcm.prototype.pow = function(x, out) { - // if LSB(x) is 1, x = x >>> 1 - // else x = (x >>> 1) ^ R - var lsb = x[3] & 1; +// https://fetch.spec.whatwg.org/#body-unusable +function isUnusable (self) { + return util.isDisturbed(self) || isLocked(self) +} - // always do x >>> 1: - // starting with the rightmost integer, shift each integer to the right - // one bit, pulling in the bit from the integer to the left as its top - // most bit (do this for the last 3 integers) - for(var i = 3; i > 0; --i) { - out[i] = (x[i] >>> 1) | ((x[i - 1] & 1) << 31); +async function consume (stream, type) { + if (isUnusable(stream)) { + throw new TypeError('unusable') } - // shift the first integer normally - out[0] = x[0] >>> 1; - // if lsb was not set, then polynomial had a degree of 127 and doesn't - // need to divided; otherwise, XOR with R to find the remainder; we only - // need to XOR the first integer since R technically ends w/120 zero bits - if(lsb) { - out[0] ^= this._R; - } -}; + assert(!stream[kConsume]) -modes.gcm.prototype.tableMultiply = function(x) { - // assumes 4-bit tables are used - var z = [0, 0, 0, 0]; - for(var i = 0; i < 32; ++i) { - var idx = (i / 8) | 0; - var x_i = (x[idx] >>> ((7 - (i % 8)) * 4)) & 0xF; - var ah = this._m[i][x_i]; - z[0] ^= ah[0]; - z[1] ^= ah[1]; - z[2] ^= ah[2]; - z[3] ^= ah[3]; - } - return z; -}; + return new Promise((resolve, reject) => { + stream[kConsume] = { + type, + stream, + resolve, + reject, + length: 0, + body: [] + } -/** - * A continuing version of the GHASH algorithm that operates on a single - * block. The hash block, last hash value (Ym) and the new block to hash - * are given. - * - * @param h the hash block. - * @param y the previous value for Ym, use [0, 0, 0, 0] for a new hash. - * @param x the block to hash. - * - * @return the hashed value (Ym). - */ -modes.gcm.prototype.ghash = function(h, y, x) { - y[0] ^= x[0]; - y[1] ^= x[1]; - y[2] ^= x[2]; - y[3] ^= x[3]; - return this.tableMultiply(y); - //return this.multiply(y, h); -}; + stream + .on('error', function (err) { + consumeFinish(this[kConsume], err) + }) + .on('close', function () { + if (this[kConsume].body !== null) { + consumeFinish(this[kConsume], new RequestAbortedError()) + } + }) -/** - * Precomputes a table for multiplying against the hash subkey. This - * mechanism provides a substantial speed increase over multiplication - * performed without a table. The table-based multiplication this table is - * for solves X * H by multiplying each component of X by H and then - * composing the results together using XOR. - * - * This function can be used to generate tables with different bit sizes - * for the components, however, this implementation assumes there are - * 32 components of X (which is a 16 byte vector), therefore each component - * takes 4-bits (so the table is constructed with bits=4). - * - * @param h the hash subkey. - * @param bits the bit size for a component. - */ -modes.gcm.prototype.generateHashTable = function(h, bits) { - // TODO: There are further optimizations that would use only the - // first table M_0 (or some variant) along with a remainder table; - // this can be explored in the future - var multiplier = 8 / bits; - var perInt = 4 * multiplier; - var size = 16 * multiplier; - var m = new Array(size); - for(var i = 0; i < size; ++i) { - var tmp = [0, 0, 0, 0]; - var idx = (i / perInt) | 0; - var shft = ((perInt - 1 - (i % perInt)) * bits); - tmp[idx] = (1 << (bits - 1)) << shft; - m[i] = this.generateSubHashTable(this.multiply(tmp, h), bits); - } - return m; -}; + process.nextTick(consumeStart, stream[kConsume]) + }) +} -/** - * Generates a table for multiplying against the hash subkey for one - * particular component (out of all possible component values). - * - * @param mid the pre-multiplied value for the middle key of the table. - * @param bits the bit size for a component. - */ -modes.gcm.prototype.generateSubHashTable = function(mid, bits) { - // compute the table quickly by minimizing the number of - // POW operations -- they only need to be performed for powers of 2, - // all other entries can be composed from those powers using XOR - var size = 1 << bits; - var half = size >>> 1; - var m = new Array(size); - m[half] = mid.slice(0); - var i = half >>> 1; - while(i > 0) { - // raise m0[2 * i] and store in m0[i] - this.pow(m[2 * i], m[i] = []); - i >>= 1; - } - i = 2; - while(i < half) { - for(var j = 1; j < i; ++j) { - var m_i = m[i]; - var m_j = m[j]; - m[i + j] = [ - m_i[0] ^ m_j[0], - m_i[1] ^ m_j[1], - m_i[2] ^ m_j[2], - m_i[3] ^ m_j[3] - ]; - } - i *= 2; - } - m[0] = [0, 0, 0, 0]; - /* Note: We could avoid storing these by doing composition during multiply - calculate top half using composition by speed is preferred. */ - for(i = half + 1; i < size; ++i) { - var c = m[i ^ half]; - m[i] = [mid[0] ^ c[0], mid[1] ^ c[1], mid[2] ^ c[2], mid[3] ^ c[3]]; - } - return m; -}; +function consumeStart (consume) { + if (consume.body === null) { + return + } -/** Utility functions */ + const { _readableState: state } = consume.stream -function transformIV(iv, blockSize) { - if(typeof iv === 'string') { - // convert iv string into byte buffer - iv = forge.util.createBuffer(iv); + for (const chunk of state.buffer) { + consumePush(consume, chunk) } - if(forge.util.isArray(iv) && iv.length > 4) { - // convert iv byte array into byte buffer - var tmp = iv; - iv = forge.util.createBuffer(); - for(var i = 0; i < tmp.length; ++i) { - iv.putByte(tmp[i]); - } + if (state.endEmitted) { + consumeEnd(this[kConsume]) + } else { + consume.stream.on('end', function () { + consumeEnd(this[kConsume]) + }) } - if(iv.length() < blockSize) { - throw new Error( - 'Invalid IV length; got ' + iv.length() + - ' bytes and expected ' + blockSize + ' bytes.'); + consume.stream.resume() + + while (consume.stream.read() != null) { + // Loop } +} - if(!forge.util.isArray(iv)) { - // convert iv byte buffer into 32-bit integer array - var ints = []; - var blocks = blockSize / 4; - for(var i = 0; i < blocks; ++i) { - ints.push(iv.getInt32()); +function consumeEnd (consume) { + const { type, body, resolve, stream, length } = consume + + try { + if (type === 'text') { + resolve(toUSVString(Buffer.concat(body))) + } else if (type === 'json') { + resolve(JSON.parse(Buffer.concat(body))) + } else if (type === 'arrayBuffer') { + const dst = new Uint8Array(length) + + let pos = 0 + for (const buf of body) { + dst.set(buf, pos) + pos += buf.byteLength + } + + resolve(dst.buffer) + } else if (type === 'blob') { + if (!Blob) { + Blob = (__nccwpck_require__(4300).Blob) + } + resolve(new Blob(body, { type: stream[kContentType] })) } - iv = ints; - } - return iv; + consumeFinish(consume) + } catch (err) { + stream.destroy(err) + } } -function inc32(block) { - // increment last 32 bits of block only - block[block.length - 1] = (block[block.length - 1] + 1) & 0xFFFFFFFF; +function consumePush (consume, chunk) { + consume.length += chunk.length + consume.body.push(chunk) } -function from64To32(num) { - // convert 64-bit number to two BE Int32s - return [(num / 0x100000000) | 0, num & 0xFFFFFFFF]; +function consumeFinish (consume, err) { + if (consume.body === null) { + return + } + + if (err) { + consume.reject(err) + } else { + consume.resolve() + } + + consume.type = null + consume.stream = null + consume.resolve = null + consume.reject = null + consume.length = 0 + consume.body = null } /***/ }), -/***/ 7157: +/***/ 7474: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * DES (Data Encryption Standard) implementation. - * - * This implementation supports DES as well as 3DES-EDE in ECB and CBC mode. - * It is based on the BSD-licensed implementation by Paul Tero: - * - * Paul Tero, July 2001 - * http://www.tero.co.uk/des/ - * - * Optimised for performance with large blocks by - * Michael Hayworth, November 2001 - * http://www.netdealing.com - * - * THIS SOFTWARE IS PROVIDED "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS - * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY - * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF - * SUCH DAMAGE. - * - * @author Stefan Siegl - * @author Dave Longley - * - * Copyright (c) 2012 Stefan Siegl - * Copyright (c) 2012-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7088); -__nccwpck_require__(873); -__nccwpck_require__(8339); - -/* DES API */ -module.exports = forge.des = forge.des || {}; - -/** - * Deprecated. Instead, use: - * - * var cipher = forge.cipher.createCipher('DES-', key); - * cipher.start({iv: iv}); - * - * Creates an DES cipher object to encrypt data using the given symmetric key. - * The output will be stored in the 'output' member of the returned cipher. - * - * The key and iv may be given as binary-encoded strings of bytes or - * byte buffers. - * - * @param key the symmetric key to use (64 or 192 bits). - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * @param mode the cipher mode to use (default: 'CBC' if IV is - * given, 'ECB' if null). - * - * @return the cipher. - */ -forge.des.startEncrypting = function(key, iv, output, mode) { - var cipher = _createCipher({ - key: key, - output: output, - decrypt: false, - mode: mode || (iv === null ? 'ECB' : 'CBC') - }); - cipher.start(iv); - return cipher; -}; - -/** - * Deprecated. Instead, use: - * - * var cipher = forge.cipher.createCipher('DES-', key); - * - * Creates an DES cipher object to encrypt data using the given symmetric key. - * - * The key may be given as a binary-encoded string of bytes or a byte buffer. - * - * @param key the symmetric key to use (64 or 192 bits). - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.des.createEncryptionCipher = function(key, mode) { - return _createCipher({ - key: key, - output: null, - decrypt: false, - mode: mode - }); -}; +const assert = __nccwpck_require__(9491) +const { + ResponseStatusCodeError +} = __nccwpck_require__(8045) +const { toUSVString } = __nccwpck_require__(3983) -/** - * Deprecated. Instead, use: - * - * var decipher = forge.cipher.createDecipher('DES-', key); - * decipher.start({iv: iv}); - * - * Creates an DES cipher object to decrypt data using the given symmetric key. - * The output will be stored in the 'output' member of the returned cipher. - * - * The key and iv may be given as binary-encoded strings of bytes or - * byte buffers. - * - * @param key the symmetric key to use (64 or 192 bits). - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * @param mode the cipher mode to use (default: 'CBC' if IV is - * given, 'ECB' if null). - * - * @return the cipher. - */ -forge.des.startDecrypting = function(key, iv, output, mode) { - var cipher = _createCipher({ - key: key, - output: output, - decrypt: true, - mode: mode || (iv === null ? 'ECB' : 'CBC') - }); - cipher.start(iv); - return cipher; -}; +async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) { + assert(body) -/** - * Deprecated. Instead, use: - * - * var decipher = forge.cipher.createDecipher('DES-', key); - * - * Creates an DES cipher object to decrypt data using the given symmetric key. - * - * The key may be given as a binary-encoded string of bytes or a byte buffer. - * - * @param key the symmetric key to use (64 or 192 bits). - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.des.createDecryptionCipher = function(key, mode) { - return _createCipher({ - key: key, - output: null, - decrypt: true, - mode: mode - }); -}; + let chunks = [] + let limit = 0 -/** - * Creates a new DES cipher algorithm object. - * - * @param name the name of the algorithm. - * @param mode the mode factory function. - * - * @return the DES algorithm object. - */ -forge.des.Algorithm = function(name, mode) { - var self = this; - self.name = name; - self.mode = new mode({ - blockSize: 8, - cipher: { - encrypt: function(inBlock, outBlock) { - return _updateBlock(self._keys, inBlock, outBlock, false); - }, - decrypt: function(inBlock, outBlock) { - return _updateBlock(self._keys, inBlock, outBlock, true); - } + for await (const chunk of body) { + chunks.push(chunk) + limit += chunk.length + if (limit > 128 * 1024) { + chunks = null + break } - }); - self._init = false; -}; + } -/** - * Initializes this DES algorithm by expanding its key. - * - * @param options the options to use. - * key the key to use with this algorithm. - * decrypt true if the algorithm should be initialized for decryption, - * false for encryption. - */ -forge.des.Algorithm.prototype.initialize = function(options) { - if(this._init) { - return; + if (statusCode === 204 || !contentType || !chunks) { + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) + return } - var key = forge.util.createBuffer(options.key); - if(this.name.indexOf('3DES') === 0) { - if(key.length() !== 24) { - throw new Error('Invalid Triple-DES key size: ' + key.length() * 8); + try { + if (contentType.startsWith('application/json')) { + const payload = JSON.parse(toUSVString(Buffer.concat(chunks))) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } + + if (contentType.startsWith('text/')) { + const payload = toUSVString(Buffer.concat(chunks)) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return } + } catch (err) { + // Process in a fallback if error } - // do key expansion to 16 or 48 subkeys (single or triple DES) - this._keys = _createKeys(key); - this._init = true; -}; + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) +} -/** Register DES algorithms **/ +module.exports = { getResolveErrorBodyCallback } -registerAlgorithm('DES-ECB', forge.cipher.modes.ecb); -registerAlgorithm('DES-CBC', forge.cipher.modes.cbc); -registerAlgorithm('DES-CFB', forge.cipher.modes.cfb); -registerAlgorithm('DES-OFB', forge.cipher.modes.ofb); -registerAlgorithm('DES-CTR', forge.cipher.modes.ctr); -registerAlgorithm('3DES-ECB', forge.cipher.modes.ecb); -registerAlgorithm('3DES-CBC', forge.cipher.modes.cbc); -registerAlgorithm('3DES-CFB', forge.cipher.modes.cfb); -registerAlgorithm('3DES-OFB', forge.cipher.modes.ofb); -registerAlgorithm('3DES-CTR', forge.cipher.modes.ctr); +/***/ }), -function registerAlgorithm(name, mode) { - var factory = function() { - return new forge.des.Algorithm(name, mode); - }; - forge.cipher.registerAlgorithm(name, factory); -} +/***/ 7931: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** DES implementation **/ +"use strict"; -var spfunction1 = [0x1010400,0,0x10000,0x1010404,0x1010004,0x10404,0x4,0x10000,0x400,0x1010400,0x1010404,0x400,0x1000404,0x1010004,0x1000000,0x4,0x404,0x1000400,0x1000400,0x10400,0x10400,0x1010000,0x1010000,0x1000404,0x10004,0x1000004,0x1000004,0x10004,0,0x404,0x10404,0x1000000,0x10000,0x1010404,0x4,0x1010000,0x1010400,0x1000000,0x1000000,0x400,0x1010004,0x10000,0x10400,0x1000004,0x400,0x4,0x1000404,0x10404,0x1010404,0x10004,0x1010000,0x1000404,0x1000004,0x404,0x10404,0x1010400,0x404,0x1000400,0x1000400,0,0x10004,0x10400,0,0x1010004]; -var spfunction2 = [-0x7fef7fe0,-0x7fff8000,0x8000,0x108020,0x100000,0x20,-0x7fefffe0,-0x7fff7fe0,-0x7fffffe0,-0x7fef7fe0,-0x7fef8000,-0x80000000,-0x7fff8000,0x100000,0x20,-0x7fefffe0,0x108000,0x100020,-0x7fff7fe0,0,-0x80000000,0x8000,0x108020,-0x7ff00000,0x100020,-0x7fffffe0,0,0x108000,0x8020,-0x7fef8000,-0x7ff00000,0x8020,0,0x108020,-0x7fefffe0,0x100000,-0x7fff7fe0,-0x7ff00000,-0x7fef8000,0x8000,-0x7ff00000,-0x7fff8000,0x20,-0x7fef7fe0,0x108020,0x20,0x8000,-0x80000000,0x8020,-0x7fef8000,0x100000,-0x7fffffe0,0x100020,-0x7fff7fe0,-0x7fffffe0,0x100020,0x108000,0,-0x7fff8000,0x8020,-0x80000000,-0x7fefffe0,-0x7fef7fe0,0x108000]; -var spfunction3 = [0x208,0x8020200,0,0x8020008,0x8000200,0,0x20208,0x8000200,0x20008,0x8000008,0x8000008,0x20000,0x8020208,0x20008,0x8020000,0x208,0x8000000,0x8,0x8020200,0x200,0x20200,0x8020000,0x8020008,0x20208,0x8000208,0x20200,0x20000,0x8000208,0x8,0x8020208,0x200,0x8000000,0x8020200,0x8000000,0x20008,0x208,0x20000,0x8020200,0x8000200,0,0x200,0x20008,0x8020208,0x8000200,0x8000008,0x200,0,0x8020008,0x8000208,0x20000,0x8000000,0x8020208,0x8,0x20208,0x20200,0x8000008,0x8020000,0x8000208,0x208,0x8020000,0x20208,0x8,0x8020008,0x20200]; -var spfunction4 = [0x802001,0x2081,0x2081,0x80,0x802080,0x800081,0x800001,0x2001,0,0x802000,0x802000,0x802081,0x81,0,0x800080,0x800001,0x1,0x2000,0x800000,0x802001,0x80,0x800000,0x2001,0x2080,0x800081,0x1,0x2080,0x800080,0x2000,0x802080,0x802081,0x81,0x800080,0x800001,0x802000,0x802081,0x81,0,0,0x802000,0x2080,0x800080,0x800081,0x1,0x802001,0x2081,0x2081,0x80,0x802081,0x81,0x1,0x2000,0x800001,0x2001,0x802080,0x800081,0x2001,0x2080,0x800000,0x802001,0x80,0x800000,0x2000,0x802080]; -var spfunction5 = [0x100,0x2080100,0x2080000,0x42000100,0x80000,0x100,0x40000000,0x2080000,0x40080100,0x80000,0x2000100,0x40080100,0x42000100,0x42080000,0x80100,0x40000000,0x2000000,0x40080000,0x40080000,0,0x40000100,0x42080100,0x42080100,0x2000100,0x42080000,0x40000100,0,0x42000000,0x2080100,0x2000000,0x42000000,0x80100,0x80000,0x42000100,0x100,0x2000000,0x40000000,0x2080000,0x42000100,0x40080100,0x2000100,0x40000000,0x42080000,0x2080100,0x40080100,0x100,0x2000000,0x42080000,0x42080100,0x80100,0x42000000,0x42080100,0x2080000,0,0x40080000,0x42000000,0x80100,0x2000100,0x40000100,0x80000,0,0x40080000,0x2080100,0x40000100]; -var spfunction6 = [0x20000010,0x20400000,0x4000,0x20404010,0x20400000,0x10,0x20404010,0x400000,0x20004000,0x404010,0x400000,0x20000010,0x400010,0x20004000,0x20000000,0x4010,0,0x400010,0x20004010,0x4000,0x404000,0x20004010,0x10,0x20400010,0x20400010,0,0x404010,0x20404000,0x4010,0x404000,0x20404000,0x20000000,0x20004000,0x10,0x20400010,0x404000,0x20404010,0x400000,0x4010,0x20000010,0x400000,0x20004000,0x20000000,0x4010,0x20000010,0x20404010,0x404000,0x20400000,0x404010,0x20404000,0,0x20400010,0x10,0x4000,0x20400000,0x404010,0x4000,0x400010,0x20004010,0,0x20404000,0x20000000,0x400010,0x20004010]; -var spfunction7 = [0x200000,0x4200002,0x4000802,0,0x800,0x4000802,0x200802,0x4200800,0x4200802,0x200000,0,0x4000002,0x2,0x4000000,0x4200002,0x802,0x4000800,0x200802,0x200002,0x4000800,0x4000002,0x4200000,0x4200800,0x200002,0x4200000,0x800,0x802,0x4200802,0x200800,0x2,0x4000000,0x200800,0x4000000,0x200800,0x200000,0x4000802,0x4000802,0x4200002,0x4200002,0x2,0x200002,0x4000000,0x4000800,0x200000,0x4200800,0x802,0x200802,0x4200800,0x802,0x4000002,0x4200802,0x4200000,0x200800,0,0x2,0x4200802,0,0x200802,0x4200000,0x800,0x4000002,0x4000800,0x800,0x200002]; -var spfunction8 = [0x10001040,0x1000,0x40000,0x10041040,0x10000000,0x10001040,0x40,0x10000000,0x40040,0x10040000,0x10041040,0x41000,0x10041000,0x41040,0x1000,0x40,0x10040000,0x10000040,0x10001000,0x1040,0x41000,0x40040,0x10040040,0x10041000,0x1040,0,0,0x10040040,0x10000040,0x10001000,0x41040,0x40000,0x41040,0x40000,0x10041000,0x1000,0x40,0x10040040,0x1000,0x41040,0x10001000,0x40,0x10000040,0x10040000,0x10040040,0x10000000,0x40000,0x10001040,0,0x10041040,0x40040,0x10000040,0x10040000,0x10001000,0x10001040,0,0x10041040,0x41000,0x41000,0x1040,0x1040,0x40040,0x10000000,0x10041000]; -/** - * Create necessary sub keys. - * - * @param key the 64-bit or 192-bit key. - * - * @return the expanded keys. - */ -function _createKeys(key) { - var pc2bytes0 = [0,0x4,0x20000000,0x20000004,0x10000,0x10004,0x20010000,0x20010004,0x200,0x204,0x20000200,0x20000204,0x10200,0x10204,0x20010200,0x20010204], - pc2bytes1 = [0,0x1,0x100000,0x100001,0x4000000,0x4000001,0x4100000,0x4100001,0x100,0x101,0x100100,0x100101,0x4000100,0x4000101,0x4100100,0x4100101], - pc2bytes2 = [0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808,0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808], - pc2bytes3 = [0,0x200000,0x8000000,0x8200000,0x2000,0x202000,0x8002000,0x8202000,0x20000,0x220000,0x8020000,0x8220000,0x22000,0x222000,0x8022000,0x8222000], - pc2bytes4 = [0,0x40000,0x10,0x40010,0,0x40000,0x10,0x40010,0x1000,0x41000,0x1010,0x41010,0x1000,0x41000,0x1010,0x41010], - pc2bytes5 = [0,0x400,0x20,0x420,0,0x400,0x20,0x420,0x2000000,0x2000400,0x2000020,0x2000420,0x2000000,0x2000400,0x2000020,0x2000420], - pc2bytes6 = [0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002,0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002], - pc2bytes7 = [0,0x10000,0x800,0x10800,0x20000000,0x20010000,0x20000800,0x20010800,0x20000,0x30000,0x20800,0x30800,0x20020000,0x20030000,0x20020800,0x20030800], - pc2bytes8 = [0,0x40000,0,0x40000,0x2,0x40002,0x2,0x40002,0x2000000,0x2040000,0x2000000,0x2040000,0x2000002,0x2040002,0x2000002,0x2040002], - pc2bytes9 = [0,0x10000000,0x8,0x10000008,0,0x10000000,0x8,0x10000008,0x400,0x10000400,0x408,0x10000408,0x400,0x10000400,0x408,0x10000408], - pc2bytes10 = [0,0x20,0,0x20,0x100000,0x100020,0x100000,0x100020,0x2000,0x2020,0x2000,0x2020,0x102000,0x102020,0x102000,0x102020], - pc2bytes11 = [0,0x1000000,0x200,0x1000200,0x200000,0x1200000,0x200200,0x1200200,0x4000000,0x5000000,0x4000200,0x5000200,0x4200000,0x5200000,0x4200200,0x5200200], - pc2bytes12 = [0,0x1000,0x8000000,0x8001000,0x80000,0x81000,0x8080000,0x8081000,0x10,0x1010,0x8000010,0x8001010,0x80010,0x81010,0x8080010,0x8081010], - pc2bytes13 = [0,0x4,0x100,0x104,0,0x4,0x100,0x104,0x1,0x5,0x101,0x105,0x1,0x5,0x101,0x105]; - - // how many iterations (1 for des, 3 for triple des) - // changed by Paul 16/6/2007 to use Triple DES for 9+ byte keys - var iterations = key.length() > 8 ? 3 : 1; - - // stores the return keys - var keys = []; +const { + BalancedPoolMissingUpstreamError, + InvalidArgumentError +} = __nccwpck_require__(8045) +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} = __nccwpck_require__(3198) +const Pool = __nccwpck_require__(4634) +const { kUrl, kInterceptors } = __nccwpck_require__(2785) +const { parseOrigin } = __nccwpck_require__(3983) +const kFactory = Symbol('factory') - // now define the left shifts which need to be done - var shifts = [0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0]; +const kOptions = Symbol('options') +const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor') +const kCurrentWeight = Symbol('kCurrentWeight') +const kIndex = Symbol('kIndex') +const kWeight = Symbol('kWeight') +const kMaxWeightPerServer = Symbol('kMaxWeightPerServer') +const kErrorPenalty = Symbol('kErrorPenalty') - var n = 0, tmp; - for(var j = 0; j < iterations; j++) { - var left = key.getInt32(); - var right = key.getInt32(); +function getGreatestCommonDivisor (a, b) { + if (b === 0) return a + return getGreatestCommonDivisor(b, a % b) +} - tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f; - right ^= tmp; - left ^= (tmp << 4); +function defaultFactory (origin, opts) { + return new Pool(origin, opts) +} - tmp = ((right >>> -16) ^ left) & 0x0000ffff; - left ^= tmp; - right ^= (tmp << -16); +class BalancedPool extends PoolBase { + constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) { + super() - tmp = ((left >>> 2) ^ right) & 0x33333333; - right ^= tmp; - left ^= (tmp << 2); + this[kOptions] = opts + this[kIndex] = -1 + this[kCurrentWeight] = 0 - tmp = ((right >>> -16) ^ left) & 0x0000ffff; - left ^= tmp; - right ^= (tmp << -16); + this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100 + this[kErrorPenalty] = this[kOptions].errorPenalty || 15 - tmp = ((left >>> 1) ^ right) & 0x55555555; - right ^= tmp; - left ^= (tmp << 1); + if (!Array.isArray(upstreams)) { + upstreams = [upstreams] + } - tmp = ((right >>> 8) ^ left) & 0x00ff00ff; - left ^= tmp; - right ^= (tmp << 8); + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } - tmp = ((left >>> 1) ^ right) & 0x55555555; - right ^= tmp; - left ^= (tmp << 1); + this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool) + ? opts.interceptors.BalancedPool + : [] + this[kFactory] = factory - // right needs to be shifted and OR'd with last four bits of left - tmp = (left << 8) | ((right >>> 20) & 0x000000f0); + for (const upstream of upstreams) { + this.addUpstream(upstream) + } + this._updateBalancedPoolStats() + } - // left needs to be put upside down - left = ((right << 24) | ((right << 8) & 0xff0000) | - ((right >>> 8) & 0xff00) | ((right >>> 24) & 0xf0)); - right = tmp; + addUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin - // now go through and perform these shifts on the left and right keys - for(var i = 0; i < shifts.length; ++i) { - //shift the keys either one or two bits to the left - if(shifts[i]) { - left = (left << 2) | (left >>> 26); - right = (right << 2) | (right >>> 26); - } else { - left = (left << 1) | (left >>> 27); - right = (right << 1) | (right >>> 27); - } - left &= -0xf; - right &= -0xf; - - // now apply PC-2, in such a way that E is easier when encrypting or - // decrypting this conversion will look like PC-2 except only the last 6 - // bits of each byte are used rather than 48 consecutive bits and the - // order of lines will be according to how the S selection functions will - // be applied: S2, S4, S6, S8, S1, S3, S5, S7 - var lefttmp = ( - pc2bytes0[left >>> 28] | pc2bytes1[(left >>> 24) & 0xf] | - pc2bytes2[(left >>> 20) & 0xf] | pc2bytes3[(left >>> 16) & 0xf] | - pc2bytes4[(left >>> 12) & 0xf] | pc2bytes5[(left >>> 8) & 0xf] | - pc2bytes6[(left >>> 4) & 0xf]); - var righttmp = ( - pc2bytes7[right >>> 28] | pc2bytes8[(right >>> 24) & 0xf] | - pc2bytes9[(right >>> 20) & 0xf] | pc2bytes10[(right >>> 16) & 0xf] | - pc2bytes11[(right >>> 12) & 0xf] | pc2bytes12[(right >>> 8) & 0xf] | - pc2bytes13[(right >>> 4) & 0xf]); - tmp = ((righttmp >>> 16) ^ lefttmp) & 0x0000ffff; - keys[n++] = lefttmp ^ tmp; - keys[n++] = righttmp ^ (tmp << 16); + if (this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + ))) { + return this } - } + const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions])) - return keys; -} + this[kAddClient](pool) + pool.on('connect', () => { + pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty]) + }) -/** - * Updates a single block (1 byte) using DES. The update will either - * encrypt or decrypt the block. - * - * @param keys the expanded keys. - * @param input the input block (an array of 32-bit words). - * @param output the updated output block. - * @param decrypt true to decrypt the block, false to encrypt it. - */ -function _updateBlock(keys, input, output, decrypt) { - // set up loops for single or triple DES - var iterations = keys.length === 32 ? 3 : 9; - var looping; - if(iterations === 3) { - looping = decrypt ? [30, -2, -2] : [0, 32, 2]; - } else { - looping = (decrypt ? - [94, 62, -2, 32, 64, 2, 30, -2, -2] : - [0, 32, 2, 62, 30, -2, 64, 96, 2]); - } + pool.on('connectionError', () => { + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + }) - var tmp; + pool.on('disconnect', (...args) => { + const err = args[2] + if (err && err.code === 'UND_ERR_SOCKET') { + // decrease the weight of the pool. + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + } + }) - var left = input[0]; - var right = input[1]; + for (const client of this[kClients]) { + client[kWeight] = this[kMaxWeightPerServer] + } - // first each 64 bit chunk of the message must be permuted according to IP - tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f; - right ^= tmp; - left ^= (tmp << 4); + this._updateBalancedPoolStats() - tmp = ((left >>> 16) ^ right) & 0x0000ffff; - right ^= tmp; - left ^= (tmp << 16); + return this + } - tmp = ((right >>> 2) ^ left) & 0x33333333; - left ^= tmp; - right ^= (tmp << 2); + _updateBalancedPoolStats () { + this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0) + } - tmp = ((right >>> 8) ^ left) & 0x00ff00ff; - left ^= tmp; - right ^= (tmp << 8); + removeUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin - tmp = ((left >>> 1) ^ right) & 0x55555555; - right ^= tmp; - left ^= (tmp << 1); + const pool = this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + )) - // rotate left 1 bit - left = ((left << 1) | (left >>> 31)); - right = ((right << 1) | (right >>> 31)); + if (pool) { + this[kRemoveClient](pool) + } - for(var j = 0; j < iterations; j += 3) { - var endloop = looping[j + 1]; - var loopinc = looping[j + 2]; + return this + } - // now go through and perform the encryption or decryption - for(var i = looping[j]; i != endloop; i += loopinc) { - var right1 = right ^ keys[i]; - var right2 = ((right >>> 4) | (right << 28)) ^ keys[i + 1]; + get upstreams () { + return this[kClients] + .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true) + .map((p) => p[kUrl].origin) + } - // passing these bytes through the S selection functions - tmp = left; - left = right; - right = tmp ^ ( - spfunction2[(right1 >>> 24) & 0x3f] | - spfunction4[(right1 >>> 16) & 0x3f] | - spfunction6[(right1 >>> 8) & 0x3f] | - spfunction8[right1 & 0x3f] | - spfunction1[(right2 >>> 24) & 0x3f] | - spfunction3[(right2 >>> 16) & 0x3f] | - spfunction5[(right2 >>> 8) & 0x3f] | - spfunction7[right2 & 0x3f]); + [kGetDispatcher] () { + // We validate that pools is greater than 0, + // otherwise we would have to wait until an upstream + // is added, which might never happen. + if (this[kClients].length === 0) { + throw new BalancedPoolMissingUpstreamError() } - // unreverse left and right - tmp = left; - left = right; - right = tmp; - } - // rotate right 1 bit - left = ((left >>> 1) | (left << 31)); - right = ((right >>> 1) | (right << 31)); + const dispatcher = this[kClients].find(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) - // now perform IP-1, which is IP in the opposite direction - tmp = ((left >>> 1) ^ right) & 0x55555555; - right ^= tmp; - left ^= (tmp << 1); + if (!dispatcher) { + return + } - tmp = ((right >>> 8) ^ left) & 0x00ff00ff; - left ^= tmp; - right ^= (tmp << 8); + const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true) - tmp = ((right >>> 2) ^ left) & 0x33333333; - left ^= tmp; - right ^= (tmp << 2); + if (allClientsBusy) { + return + } - tmp = ((left >>> 16) ^ right) & 0x0000ffff; - right ^= tmp; - left ^= (tmp << 16); + let counter = 0 - tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f; - right ^= tmp; - left ^= (tmp << 4); + let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain]) - output[0] = left; - output[1] = right; -} + while (counter++ < this[kClients].length) { + this[kIndex] = (this[kIndex] + 1) % this[kClients].length + const pool = this[kClients][this[kIndex]] -/** - * Deprecated. Instead, use: - * - * forge.cipher.createCipher('DES-', key); - * forge.cipher.createDecipher('DES-', key); - * - * Creates a deprecated DES cipher object. This object's mode will default to - * CBC (cipher-block-chaining). - * - * The key may be given as a binary-encoded string of bytes or a byte buffer. - * - * @param options the options to use. - * key the symmetric key to use (64 or 192 bits). - * output the buffer to write to. - * decrypt true for decryption, false for encryption. - * mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -function _createCipher(options) { - options = options || {}; - var mode = (options.mode || 'CBC').toUpperCase(); - var algorithm = 'DES-' + mode; + // find pool index with the largest weight + if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) { + maxWeightIndex = this[kIndex] + } - var cipher; - if(options.decrypt) { - cipher = forge.cipher.createDecipher(algorithm, options.key); - } else { - cipher = forge.cipher.createCipher(algorithm, options.key); - } + // decrease the current weight every `this[kClients].length`. + if (this[kIndex] === 0) { + // Set the current weight to the next lower weight. + this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor] - // backwards compatible start API - var start = cipher.start; - cipher.start = function(iv, options) { - // backwards compatibility: support second arg as output buffer - var output = null; - if(options instanceof forge.util.ByteBuffer) { - output = options; - options = {}; + if (this[kCurrentWeight] <= 0) { + this[kCurrentWeight] = this[kMaxWeightPerServer] + } + } + if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) { + return pool + } } - options = options || {}; - options.output = output; - options.iv = iv; - start.call(cipher, options); - }; - return cipher; + this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight] + this[kIndex] = maxWeightIndex + return this[kClients][maxWeightIndex] + } } +module.exports = BalancedPool + /***/ }), -/***/ 0: +/***/ 6101: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * JavaScript implementation of Ed25519. - * - * Copyright (c) 2017-2019 Digital Bazaar, Inc. - * - * This implementation is based on the most excellent TweetNaCl which is - * in the public domain. Many thanks to its contributors: - * - * https://github.com/dchest/tweetnacl-js - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7052); -__nccwpck_require__(7821); -__nccwpck_require__(9542); -__nccwpck_require__(8339); -var asn1Validator = __nccwpck_require__(9414); -var publicKeyValidator = asn1Validator.publicKeyValidator; -var privateKeyValidator = asn1Validator.privateKeyValidator; - -if(typeof BigInteger === 'undefined') { - var BigInteger = forge.jsbn.BigInteger; -} - -var ByteBuffer = forge.util.ByteBuffer; -var NativeBuffer = typeof Buffer === 'undefined' ? Uint8Array : Buffer; - -/* - * Ed25519 algorithms, see RFC 8032: - * https://tools.ietf.org/html/rfc8032 - */ -forge.pki = forge.pki || {}; -module.exports = forge.pki.ed25519 = forge.ed25519 = forge.ed25519 || {}; -var ed25519 = forge.ed25519; - -ed25519.constants = {}; -ed25519.constants.PUBLIC_KEY_BYTE_LENGTH = 32; -ed25519.constants.PRIVATE_KEY_BYTE_LENGTH = 64; -ed25519.constants.SEED_BYTE_LENGTH = 32; -ed25519.constants.SIGN_BYTE_LENGTH = 64; -ed25519.constants.HASH_BYTE_LENGTH = 64; - -ed25519.generateKeyPair = function(options) { - options = options || {}; - var seed = options.seed; - if(seed === undefined) { - // generate seed - seed = forge.random.getBytesSync(ed25519.constants.SEED_BYTE_LENGTH); - } else if(typeof seed === 'string') { - if(seed.length !== ed25519.constants.SEED_BYTE_LENGTH) { - throw new TypeError( - '"seed" must be ' + ed25519.constants.SEED_BYTE_LENGTH + - ' bytes in length.'); - } - } else if(!(seed instanceof Uint8Array)) { - throw new TypeError( - '"seed" must be a node.js Buffer, Uint8Array, or a binary string.'); - } +"use strict"; - seed = messageToNativeBuffer({message: seed, encoding: 'binary'}); - var pk = new NativeBuffer(ed25519.constants.PUBLIC_KEY_BYTE_LENGTH); - var sk = new NativeBuffer(ed25519.constants.PRIVATE_KEY_BYTE_LENGTH); - for(var i = 0; i < 32; ++i) { - sk[i] = seed[i]; - } - crypto_sign_keypair(pk, sk); - return {publicKey: pk, privateKey: sk}; -}; +const { kConstruct } = __nccwpck_require__(9174) +const { urlEquals, fieldValues: getFieldValues } = __nccwpck_require__(2396) +const { kEnumerableProperty, isDisturbed } = __nccwpck_require__(3983) +const { kHeadersList } = __nccwpck_require__(2785) +const { webidl } = __nccwpck_require__(1744) +const { Response, cloneResponse } = __nccwpck_require__(7823) +const { Request } = __nccwpck_require__(8359) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) +const { fetching } = __nccwpck_require__(4881) +const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = __nccwpck_require__(2538) +const assert = __nccwpck_require__(9491) +const { getGlobalDispatcher } = __nccwpck_require__(1892) /** - * Converts a private key from a RFC8410 ASN.1 encoding. - * - * @param obj - The asn1 representation of a private key. - * - * @returns {Object} keyInfo - The key information. - * @returns {Buffer|Uint8Array} keyInfo.privateKeyBytes - 32 private key bytes. + * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation + * @typedef {Object} CacheBatchOperation + * @property {'delete' | 'put'} type + * @property {any} request + * @property {any} response + * @property {import('../../types/cache').CacheQueryOptions} options */ -ed25519.privateKeyFromAsn1 = function(obj) { - var capture = {}; - var errors = []; - var valid = forge.asn1.validate(obj, privateKeyValidator, capture, errors); - if(!valid) { - var error = new Error('Invalid Key.'); - error.errors = errors; - throw error; - } - var oid = forge.asn1.derToOid(capture.privateKeyOid); - var ed25519Oid = forge.oids.EdDSA25519; - if(oid !== ed25519Oid) { - throw new Error('Invalid OID "' + oid + '"; OID must be "' + - ed25519Oid + '".'); - } - var privateKey = capture.privateKey; - // manually extract the private key bytes from nested octet string, see FIXME: - // https://github.com/digitalbazaar/forge/blob/master/lib/asn1.js#L542 - var privateKeyBytes = messageToNativeBuffer({ - message: forge.asn1.fromDer(privateKey).value, - encoding: 'binary' - }); - // TODO: RFC8410 specifies a format for encoding the public key bytes along - // with the private key bytes. `publicKeyBytes` can be returned in the - // future. https://tools.ietf.org/html/rfc8410#section-10.3 - return {privateKeyBytes: privateKeyBytes}; -}; /** - * Converts a public key from a RFC8410 ASN.1 encoding. - * - * @param obj - The asn1 representation of a public key. - * - * @return {Buffer|Uint8Array} - 32 public key bytes. + * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list + * @typedef {[any, any][]} requestResponseList */ -ed25519.publicKeyFromAsn1 = function(obj) { - // get SubjectPublicKeyInfo - var capture = {}; - var errors = []; - var valid = forge.asn1.validate(obj, publicKeyValidator, capture, errors); - if(!valid) { - var error = new Error('Invalid Key.'); - error.errors = errors; - throw error; - } - var oid = forge.asn1.derToOid(capture.publicKeyOid); - var ed25519Oid = forge.oids.EdDSA25519; - if(oid !== ed25519Oid) { - throw new Error('Invalid OID "' + oid + '"; OID must be "' + - ed25519Oid + '".'); - } - var publicKeyBytes = capture.ed25519PublicKey; - if(publicKeyBytes.length !== ed25519.constants.PUBLIC_KEY_BYTE_LENGTH) { - throw new Error('Key length is invalid.'); - } - return messageToNativeBuffer({ - message: publicKeyBytes, - encoding: 'binary' - }); -}; -ed25519.publicKeyFromPrivateKey = function(options) { - options = options || {}; - var privateKey = messageToNativeBuffer({ - message: options.privateKey, encoding: 'binary' - }); - if(privateKey.length !== ed25519.constants.PRIVATE_KEY_BYTE_LENGTH) { - throw new TypeError( - '"options.privateKey" must have a byte length of ' + - ed25519.constants.PRIVATE_KEY_BYTE_LENGTH); - } +class Cache { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list + * @type {requestResponseList} + */ + #relevantRequestResponseList - var pk = new NativeBuffer(ed25519.constants.PUBLIC_KEY_BYTE_LENGTH); - for(var i = 0; i < pk.length; ++i) { - pk[i] = privateKey[32 + i]; - } - return pk; -}; + constructor () { + if (arguments[0] !== kConstruct) { + webidl.illegalConstructor() + } -ed25519.sign = function(options) { - options = options || {}; - var msg = messageToNativeBuffer(options); - var privateKey = messageToNativeBuffer({ - message: options.privateKey, - encoding: 'binary' - }); - if(privateKey.length === ed25519.constants.SEED_BYTE_LENGTH) { - var keyPair = ed25519.generateKeyPair({seed: privateKey}); - privateKey = keyPair.privateKey; - } else if(privateKey.length !== ed25519.constants.PRIVATE_KEY_BYTE_LENGTH) { - throw new TypeError( - '"options.privateKey" must have a byte length of ' + - ed25519.constants.SEED_BYTE_LENGTH + ' or ' + - ed25519.constants.PRIVATE_KEY_BYTE_LENGTH); + this.#relevantRequestResponseList = arguments[1] } - var signedMsg = new NativeBuffer( - ed25519.constants.SIGN_BYTE_LENGTH + msg.length); - crypto_sign(signedMsg, msg, msg.length, privateKey); + async match (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' }) - var sig = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH); - for(var i = 0; i < sig.length; ++i) { - sig[i] = signedMsg[i]; - } - return sig; -}; + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) -ed25519.verify = function(options) { - options = options || {}; - var msg = messageToNativeBuffer(options); - if(options.signature === undefined) { - throw new TypeError( - '"options.signature" must be a node.js Buffer, a Uint8Array, a forge ' + - 'ByteBuffer, or a binary string.'); - } - var sig = messageToNativeBuffer({ - message: options.signature, - encoding: 'binary' - }); - if(sig.length !== ed25519.constants.SIGN_BYTE_LENGTH) { - throw new TypeError( - '"options.signature" must have a byte length of ' + - ed25519.constants.SIGN_BYTE_LENGTH); - } - var publicKey = messageToNativeBuffer({ - message: options.publicKey, - encoding: 'binary' - }); - if(publicKey.length !== ed25519.constants.PUBLIC_KEY_BYTE_LENGTH) { - throw new TypeError( - '"options.publicKey" must have a byte length of ' + - ed25519.constants.PUBLIC_KEY_BYTE_LENGTH); - } + const p = await this.matchAll(request, options) - var sm = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH + msg.length); - var m = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH + msg.length); - var i; - for(i = 0; i < ed25519.constants.SIGN_BYTE_LENGTH; ++i) { - sm[i] = sig[i]; - } - for(i = 0; i < msg.length; ++i) { - sm[i + ed25519.constants.SIGN_BYTE_LENGTH] = msg[i]; - } - return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0); -}; + if (p.length === 0) { + return + } -function messageToNativeBuffer(options) { - var message = options.message; - if(message instanceof Uint8Array || message instanceof NativeBuffer) { - return message; + return p[0] } - var encoding = options.encoding; - if(message === undefined) { - if(options.md) { - // TODO: more rigorous validation that `md` is a MessageDigest - message = options.md.digest().getBytes(); - encoding = 'binary'; - } else { - throw new TypeError('"options.message" or "options.md" not specified.'); - } - } - - if(typeof message === 'string' && !encoding) { - throw new TypeError('"options.encoding" must be "binary" or "utf8".'); - } - - if(typeof message === 'string') { - if(typeof Buffer !== 'undefined') { - return Buffer.from(message, encoding); - } - message = new ByteBuffer(message, encoding); - } else if(!(message instanceof ByteBuffer)) { - throw new TypeError( - '"options.message" must be a node.js Buffer, a Uint8Array, a forge ' + - 'ByteBuffer, or a string with "options.encoding" specifying its ' + - 'encoding.'); - } - - // convert to native buffer - var buffer = new NativeBuffer(message.length()); - for(var i = 0; i < buffer.length; ++i) { - buffer[i] = message.at(i); - } - return buffer; -} - -var gf0 = gf(); -var gf1 = gf([1]); -var D = gf([ - 0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, - 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]); -var D2 = gf([ - 0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, - 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]); -var X = gf([ - 0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, - 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]); -var Y = gf([ - 0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, - 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]); -var L = new Float64Array([ - 0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, - 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]); -var I = gf([ - 0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, - 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]); - -// TODO: update forge buffer implementation to use `Buffer` or `Uint8Array`, -// whichever is available, to improve performance -function sha512(msg, msgLen) { - // Note: `out` and `msg` are NativeBuffer - var md = forge.md.sha512.create(); - var buffer = new ByteBuffer(msg); - md.update(buffer.getBytes(msgLen), 'binary'); - var hash = md.digest().getBytes(); - if(typeof Buffer !== 'undefined') { - return Buffer.from(hash, 'binary'); - } - var out = new NativeBuffer(ed25519.constants.HASH_BYTE_LENGTH); - for(var i = 0; i < 64; ++i) { - out[i] = hash.charCodeAt(i); - } - return out; -} + async matchAll (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) -function crypto_sign_keypair(pk, sk) { - var p = [gf(), gf(), gf(), gf()]; - var i; + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) - var d = sha512(sk, 32); - d[0] &= 248; - d[31] &= 127; - d[31] |= 64; + // 1. + let r = null - scalarbase(p, d); - pack(pk, p); + // 2. + if (request !== undefined) { + if (request instanceof Request) { + // 2.1.1 + r = request[kState] - for(i = 0; i < 32; ++i) { - sk[i + 32] = pk[i]; - } - return 0; -} + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { + // 2.2.1 + r = new Request(request)[kState] + } + } -// Note: difference from C - smlen returned, not passed as argument. -function crypto_sign(sm, m, n, sk) { - var i, j, x = new Float64Array(64); - var p = [gf(), gf(), gf(), gf()]; + // 5. + // 5.1 + const responses = [] - var d = sha512(sk, 32); - d[0] &= 248; - d[31] &= 127; - d[31] |= 64; + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + responses.push(requestResponse[1]) + } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) - var smlen = n + 64; - for(i = 0; i < n; ++i) { - sm[64 + i] = m[i]; - } - for(i = 0; i < 32; ++i) { - sm[32 + i] = d[32 + i]; - } + // 5.3.2 + for (const requestResponse of requestResponses) { + responses.push(requestResponse[1]) + } + } - var r = sha512(sm.subarray(32), n + 32); - reduce(r); - scalarbase(p, r); - pack(sm, p); + // 5.4 + // We don't implement CORs so we don't need to loop over the responses, yay! - for(i = 32; i < 64; ++i) { - sm[i] = sk[i]; - } - var h = sha512(sm, n + 64); - reduce(h); + // 5.5.1 + const responseList = [] - for(i = 32; i < 64; ++i) { - x[i] = 0; - } - for(i = 0; i < 32; ++i) { - x[i] = r[i]; - } - for(i = 0; i < 32; ++i) { - for(j = 0; j < 32; j++) { - x[i + j] += h[i] * d[j]; + // 5.5.2 + for (const response of responses) { + // 5.5.2.1 + const responseObject = new Response(response.body?.source ?? null) + const body = responseObject[kState].body + responseObject[kState] = response + responseObject[kState].body = body + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + + responseList.push(responseObject) } + + // 6. + return Object.freeze(responseList) } - modL(sm.subarray(32), x); - return smlen; -} + async add (request) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' }) -function crypto_sign_open(m, sm, n, pk) { - var i, mlen; - var t = new NativeBuffer(32); - var p = [gf(), gf(), gf(), gf()], - q = [gf(), gf(), gf(), gf()]; + request = webidl.converters.RequestInfo(request) - mlen = -1; - if(n < 64) { - return -1; - } + // 1. + const requests = [request] - if(unpackneg(q, pk)) { - return -1; - } + // 2. + const responseArrayPromise = this.addAll(requests) - for(i = 0; i < n; ++i) { - m[i] = sm[i]; - } - for(i = 0; i < 32; ++i) { - m[i + 32] = pk[i]; + // 3. + return await responseArrayPromise } - var h = sha512(m, n); - reduce(h); - scalarmult(p, q, h); - scalarbase(q, sm.subarray(32)); - add(p, q); - pack(t, p); - - n -= 64; - if(crypto_verify_32(sm, 0, t, 0)) { - for(i = 0; i < n; ++i) { - m[i] = 0; - } - return -1; - } + async addAll (requests) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' }) - for(i = 0; i < n; ++i) { - m[i] = sm[i + 64]; - } - mlen = n; - return mlen; -} + requests = webidl.converters['sequence'](requests) -function modL(r, x) { - var carry, i, j, k; - for(i = 63; i >= 32; --i) { - carry = 0; - for(j = i - 32, k = i - 12; j < k; ++j) { - x[j] += carry - 16 * x[i] * L[j - (i - 32)]; - carry = (x[j] + 128) >> 8; - x[j] -= carry * 256; - } - x[j] += carry; - x[i] = 0; - } - carry = 0; - for(j = 0; j < 32; ++j) { - x[j] += carry - (x[31] >> 4) * L[j]; - carry = x[j] >> 8; - x[j] &= 255; - } - for(j = 0; j < 32; ++j) { - x[j] -= carry * L[j]; - } - for(i = 0; i < 32; ++i) { - x[i + 1] += x[i] >> 8; - r[i] = x[i] & 255; - } -} + // 1. + const responsePromises = [] -function reduce(r) { - var x = new Float64Array(64); - for(var i = 0; i < 64; ++i) { - x[i] = r[i]; - r[i] = 0; - } - modL(r, x); -} + // 2. + const requestList = [] -function add(p, q) { - var a = gf(), b = gf(), c = gf(), - d = gf(), e = gf(), f = gf(), - g = gf(), h = gf(), t = gf(); + // 3. + for (const request of requests) { + if (typeof request === 'string') { + continue + } - Z(a, p[1], p[0]); - Z(t, q[1], q[0]); - M(a, a, t); - A(b, p[0], p[1]); - A(t, q[0], q[1]); - M(b, b, t); - M(c, p[3], q[3]); - M(c, c, D2); - M(d, p[2], q[2]); - A(d, d, d); - Z(e, b, a); - Z(f, d, c); - A(g, d, c); - A(h, b, a); + // 3.1 + const r = request[kState] - M(p[0], e, f); - M(p[1], h, g); - M(p[2], g, f); - M(p[3], e, h); -} + // 3.2 + if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme when method is not GET.' + }) + } + } -function cswap(p, q, b) { - for(var i = 0; i < 4; ++i) { - sel25519(p[i], q[i], b); - } -} + // 4. + /** @type {ReturnType[]} */ + const fetchControllers = [] -function pack(r, p) { - var tx = gf(), ty = gf(), zi = gf(); - inv25519(zi, p[2]); - M(tx, p[0], zi); - M(ty, p[1], zi); - pack25519(r, ty); - r[31] ^= par25519(tx) << 7; -} + // 5. + for (const request of requests) { + // 5.1 + const r = new Request(request)[kState] -function pack25519(o, n) { - var i, j, b; - var m = gf(), t = gf(); - for(i = 0; i < 16; ++i) { - t[i] = n[i]; - } - car25519(t); - car25519(t); - car25519(t); - for(j = 0; j < 2; ++j) { - m[0] = t[0] - 0xffed; - for(i = 1; i < 15; ++i) { - m[i] = t[i] - 0xffff - ((m[i - 1] >> 16) & 1); - m[i-1] &= 0xffff; - } - m[15] = t[15] - 0x7fff - ((m[14] >> 16) & 1); - b = (m[15] >> 16) & 1; - m[14] &= 0xffff; - sel25519(t, m, 1 - b); - } - for (i = 0; i < 16; i++) { - o[2 * i] = t[i] & 0xff; - o[2 * i + 1] = t[i] >> 8; - } -} - -function unpackneg(r, p) { - var t = gf(), chk = gf(), num = gf(), - den = gf(), den2 = gf(), den4 = gf(), - den6 = gf(); - - set25519(r[2], gf1); - unpack25519(r[1], p); - S(num, r[1]); - M(den, num, D); - Z(num, num, r[2]); - A(den, r[2], den); - - S(den2, den); - S(den4, den2); - M(den6, den4, den2); - M(t, den6, num); - M(t, t, den); - - pow2523(t, t); - M(t, t, num); - M(t, t, den); - M(t, t, den); - M(r[0], t, den); - - S(chk, r[0]); - M(chk, chk, den); - if(neq25519(chk, num)) { - M(r[0], r[0], I); - } - - S(chk, r[0]); - M(chk, chk, den); - if(neq25519(chk, num)) { - return -1; - } + // 5.2 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme.' + }) + } - if(par25519(r[0]) === (p[31] >> 7)) { - Z(r[0], gf0, r[0]); - } + // 5.4 + r.initiator = 'fetch' + r.destination = 'subresource' + + // 5.5 + requestList.push(r) + + // 5.6 + const responsePromise = createDeferredPromise() + + // 5.7 + fetchControllers.push(fetching({ + request: r, + dispatcher: getGlobalDispatcher(), + processResponse (response) { + // 1. + if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Received an invalid status code or the request failed.' + })) + } else if (response.headersList.contains('vary')) { // 2. + // 2.1 + const fieldValues = getFieldValues(response.headersList.get('vary')) + + // 2.2 + for (const fieldValue of fieldValues) { + // 2.2.1 + if (fieldValue === '*') { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'invalid vary field value' + })) + + for (const controller of fetchControllers) { + controller.abort() + } - M(r[3], r[0], r[1]); - return 0; -} + return + } + } + } + }, + processResponseEndOfBody (response) { + // 1. + if (response.aborted) { + responsePromise.reject(new DOMException('aborted', 'AbortError')) + return + } -function unpack25519(o, n) { - var i; - for(i = 0; i < 16; ++i) { - o[i] = n[2 * i] + (n[2 * i + 1] << 8); - } - o[15] &= 0x7fff; -} + // 2. + responsePromise.resolve(response) + } + })) -function pow2523(o, i) { - var c = gf(); - var a; - for(a = 0; a < 16; ++a) { - c[a] = i[a]; - } - for(a = 250; a >= 0; --a) { - S(c, c); - if(a !== 1) { - M(c, c, i); + // 5.8 + responsePromises.push(responsePromise.promise) } - } - for(a = 0; a < 16; ++a) { - o[a] = c[a]; - } -} - -function neq25519(a, b) { - var c = new NativeBuffer(32); - var d = new NativeBuffer(32); - pack25519(c, a); - pack25519(d, b); - return crypto_verify_32(c, 0, d, 0); -} -function crypto_verify_32(x, xi, y, yi) { - return vn(x, xi, y, yi, 32); -} + // 6. + const p = Promise.all(responsePromises) -function vn(x, xi, y, yi, n) { - var i, d = 0; - for(i = 0; i < n; ++i) { - d |= x[xi + i] ^ y[yi + i]; - } - return (1 & ((d - 1) >>> 8)) - 1; -} + // 7. + const responses = await p -function par25519(a) { - var d = new NativeBuffer(32); - pack25519(d, a); - return d[0] & 1; -} + // 7.1 + const operations = [] -function scalarmult(p, q, s) { - var b, i; - set25519(p[0], gf0); - set25519(p[1], gf1); - set25519(p[2], gf1); - set25519(p[3], gf0); - for(i = 255; i >= 0; --i) { - b = (s[(i / 8)|0] >> (i & 7)) & 1; - cswap(p, q, b); - add(q, p); - add(p, p); - cswap(p, q, b); - } -} + // 7.2 + let index = 0 -function scalarbase(p, s) { - var q = [gf(), gf(), gf(), gf()]; - set25519(q[0], X); - set25519(q[1], Y); - set25519(q[2], gf1); - M(q[3], X, Y); - scalarmult(p, q, s); -} + // 7.3 + for (const response of responses) { + // 7.3.1 + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 7.3.2 + request: requestList[index], // 7.3.3 + response // 7.3.4 + } -function set25519(r, a) { - var i; - for(i = 0; i < 16; i++) { - r[i] = a[i] | 0; - } -} + operations.push(operation) // 7.3.5 -function inv25519(o, i) { - var c = gf(); - var a; - for(a = 0; a < 16; ++a) { - c[a] = i[a]; - } - for(a = 253; a >= 0; --a) { - S(c, c); - if(a !== 2 && a !== 4) { - M(c, c, i); + index++ // 7.3.6 } - } - for(a = 0; a < 16; ++a) { - o[a] = c[a]; - } -} -function car25519(o) { - var i, v, c = 1; - for(i = 0; i < 16; ++i) { - v = o[i] + c + 65535; - c = Math.floor(v / 65536); - o[i] = v - c * 65536; - } - o[0] += c - 1 + 37 * (c - 1); -} + // 7.5 + const cacheJobPromise = createDeferredPromise() -function sel25519(p, q, b) { - var t, c = ~(b - 1); - for(var i = 0; i < 16; ++i) { - t = c & (p[i] ^ q[i]); - p[i] ^= t; - q[i] ^= t; - } -} + // 7.6.1 + let errorData = null -function gf(init) { - var i, r = new Float64Array(16); - if(init) { - for(i = 0; i < init.length; ++i) { - r[i] = init[i]; + // 7.6.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e } - } - return r; -} -function A(o, a, b) { - for(var i = 0; i < 16; ++i) { - o[i] = a[i] + b[i]; - } -} - -function Z(o, a, b) { - for(var i = 0; i < 16; ++i) { - o[i] = a[i] - b[i]; - } -} - -function S(o, a) { - M(o, a, a); -} - -function M(o, a, b) { - var v, c, - t0 = 0, t1 = 0, t2 = 0, t3 = 0, t4 = 0, t5 = 0, t6 = 0, t7 = 0, - t8 = 0, t9 = 0, t10 = 0, t11 = 0, t12 = 0, t13 = 0, t14 = 0, t15 = 0, - t16 = 0, t17 = 0, t18 = 0, t19 = 0, t20 = 0, t21 = 0, t22 = 0, t23 = 0, - t24 = 0, t25 = 0, t26 = 0, t27 = 0, t28 = 0, t29 = 0, t30 = 0, - b0 = b[0], - b1 = b[1], - b2 = b[2], - b3 = b[3], - b4 = b[4], - b5 = b[5], - b6 = b[6], - b7 = b[7], - b8 = b[8], - b9 = b[9], - b10 = b[10], - b11 = b[11], - b12 = b[12], - b13 = b[13], - b14 = b[14], - b15 = b[15]; - - v = a[0]; - t0 += v * b0; - t1 += v * b1; - t2 += v * b2; - t3 += v * b3; - t4 += v * b4; - t5 += v * b5; - t6 += v * b6; - t7 += v * b7; - t8 += v * b8; - t9 += v * b9; - t10 += v * b10; - t11 += v * b11; - t12 += v * b12; - t13 += v * b13; - t14 += v * b14; - t15 += v * b15; - v = a[1]; - t1 += v * b0; - t2 += v * b1; - t3 += v * b2; - t4 += v * b3; - t5 += v * b4; - t6 += v * b5; - t7 += v * b6; - t8 += v * b7; - t9 += v * b8; - t10 += v * b9; - t11 += v * b10; - t12 += v * b11; - t13 += v * b12; - t14 += v * b13; - t15 += v * b14; - t16 += v * b15; - v = a[2]; - t2 += v * b0; - t3 += v * b1; - t4 += v * b2; - t5 += v * b3; - t6 += v * b4; - t7 += v * b5; - t8 += v * b6; - t9 += v * b7; - t10 += v * b8; - t11 += v * b9; - t12 += v * b10; - t13 += v * b11; - t14 += v * b12; - t15 += v * b13; - t16 += v * b14; - t17 += v * b15; - v = a[3]; - t3 += v * b0; - t4 += v * b1; - t5 += v * b2; - t6 += v * b3; - t7 += v * b4; - t8 += v * b5; - t9 += v * b6; - t10 += v * b7; - t11 += v * b8; - t12 += v * b9; - t13 += v * b10; - t14 += v * b11; - t15 += v * b12; - t16 += v * b13; - t17 += v * b14; - t18 += v * b15; - v = a[4]; - t4 += v * b0; - t5 += v * b1; - t6 += v * b2; - t7 += v * b3; - t8 += v * b4; - t9 += v * b5; - t10 += v * b6; - t11 += v * b7; - t12 += v * b8; - t13 += v * b9; - t14 += v * b10; - t15 += v * b11; - t16 += v * b12; - t17 += v * b13; - t18 += v * b14; - t19 += v * b15; - v = a[5]; - t5 += v * b0; - t6 += v * b1; - t7 += v * b2; - t8 += v * b3; - t9 += v * b4; - t10 += v * b5; - t11 += v * b6; - t12 += v * b7; - t13 += v * b8; - t14 += v * b9; - t15 += v * b10; - t16 += v * b11; - t17 += v * b12; - t18 += v * b13; - t19 += v * b14; - t20 += v * b15; - v = a[6]; - t6 += v * b0; - t7 += v * b1; - t8 += v * b2; - t9 += v * b3; - t10 += v * b4; - t11 += v * b5; - t12 += v * b6; - t13 += v * b7; - t14 += v * b8; - t15 += v * b9; - t16 += v * b10; - t17 += v * b11; - t18 += v * b12; - t19 += v * b13; - t20 += v * b14; - t21 += v * b15; - v = a[7]; - t7 += v * b0; - t8 += v * b1; - t9 += v * b2; - t10 += v * b3; - t11 += v * b4; - t12 += v * b5; - t13 += v * b6; - t14 += v * b7; - t15 += v * b8; - t16 += v * b9; - t17 += v * b10; - t18 += v * b11; - t19 += v * b12; - t20 += v * b13; - t21 += v * b14; - t22 += v * b15; - v = a[8]; - t8 += v * b0; - t9 += v * b1; - t10 += v * b2; - t11 += v * b3; - t12 += v * b4; - t13 += v * b5; - t14 += v * b6; - t15 += v * b7; - t16 += v * b8; - t17 += v * b9; - t18 += v * b10; - t19 += v * b11; - t20 += v * b12; - t21 += v * b13; - t22 += v * b14; - t23 += v * b15; - v = a[9]; - t9 += v * b0; - t10 += v * b1; - t11 += v * b2; - t12 += v * b3; - t13 += v * b4; - t14 += v * b5; - t15 += v * b6; - t16 += v * b7; - t17 += v * b8; - t18 += v * b9; - t19 += v * b10; - t20 += v * b11; - t21 += v * b12; - t22 += v * b13; - t23 += v * b14; - t24 += v * b15; - v = a[10]; - t10 += v * b0; - t11 += v * b1; - t12 += v * b2; - t13 += v * b3; - t14 += v * b4; - t15 += v * b5; - t16 += v * b6; - t17 += v * b7; - t18 += v * b8; - t19 += v * b9; - t20 += v * b10; - t21 += v * b11; - t22 += v * b12; - t23 += v * b13; - t24 += v * b14; - t25 += v * b15; - v = a[11]; - t11 += v * b0; - t12 += v * b1; - t13 += v * b2; - t14 += v * b3; - t15 += v * b4; - t16 += v * b5; - t17 += v * b6; - t18 += v * b7; - t19 += v * b8; - t20 += v * b9; - t21 += v * b10; - t22 += v * b11; - t23 += v * b12; - t24 += v * b13; - t25 += v * b14; - t26 += v * b15; - v = a[12]; - t12 += v * b0; - t13 += v * b1; - t14 += v * b2; - t15 += v * b3; - t16 += v * b4; - t17 += v * b5; - t18 += v * b6; - t19 += v * b7; - t20 += v * b8; - t21 += v * b9; - t22 += v * b10; - t23 += v * b11; - t24 += v * b12; - t25 += v * b13; - t26 += v * b14; - t27 += v * b15; - v = a[13]; - t13 += v * b0; - t14 += v * b1; - t15 += v * b2; - t16 += v * b3; - t17 += v * b4; - t18 += v * b5; - t19 += v * b6; - t20 += v * b7; - t21 += v * b8; - t22 += v * b9; - t23 += v * b10; - t24 += v * b11; - t25 += v * b12; - t26 += v * b13; - t27 += v * b14; - t28 += v * b15; - v = a[14]; - t14 += v * b0; - t15 += v * b1; - t16 += v * b2; - t17 += v * b3; - t18 += v * b4; - t19 += v * b5; - t20 += v * b6; - t21 += v * b7; - t22 += v * b8; - t23 += v * b9; - t24 += v * b10; - t25 += v * b11; - t26 += v * b12; - t27 += v * b13; - t28 += v * b14; - t29 += v * b15; - v = a[15]; - t15 += v * b0; - t16 += v * b1; - t17 += v * b2; - t18 += v * b3; - t19 += v * b4; - t20 += v * b5; - t21 += v * b6; - t22 += v * b7; - t23 += v * b8; - t24 += v * b9; - t25 += v * b10; - t26 += v * b11; - t27 += v * b12; - t28 += v * b13; - t29 += v * b14; - t30 += v * b15; - - t0 += 38 * t16; - t1 += 38 * t17; - t2 += 38 * t18; - t3 += 38 * t19; - t4 += 38 * t20; - t5 += 38 * t21; - t6 += 38 * t22; - t7 += 38 * t23; - t8 += 38 * t24; - t9 += 38 * t25; - t10 += 38 * t26; - t11 += 38 * t27; - t12 += 38 * t28; - t13 += 38 * t29; - t14 += 38 * t30; - // t15 left as is - - // first car - c = 1; - v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; - v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; - v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; - v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; - v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; - v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; - v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; - v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; - v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; - v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; - v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; - v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; - v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; - v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; - v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; - v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; - t0 += c-1 + 37 * (c-1); - - // second car - c = 1; - v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; - v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; - v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; - v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; - v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; - v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; - v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; - v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; - v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; - v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; - v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; - v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; - v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; - v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; - v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; - v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; - t0 += c-1 + 37 * (c-1); - - o[ 0] = t0; - o[ 1] = t1; - o[ 2] = t2; - o[ 3] = t3; - o[ 4] = t4; - o[ 5] = t5; - o[ 6] = t6; - o[ 7] = t7; - o[ 8] = t8; - o[ 9] = t9; - o[10] = t10; - o[11] = t11; - o[12] = t12; - o[13] = t13; - o[14] = t14; - o[15] = t15; -} + // 7.6.3 + queueMicrotask(() => { + // 7.6.3.1 + if (errorData === null) { + cacheJobPromise.resolve(undefined) + } else { + // 7.6.3.2 + cacheJobPromise.reject(errorData) + } + }) + // 7.7 + return cacheJobPromise.promise + } -/***/ }), + async put (request, response) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' }) -/***/ 9177: -/***/ ((module) => { + request = webidl.converters.RequestInfo(request) + response = webidl.converters.Response(response) -/** - * Node.js module for Forge. - * - * @author Dave Longley - * - * Copyright 2011-2016 Digital Bazaar, Inc. - */ -module.exports = { - // default options - options: { - usePureJavaScript: false - } -}; + // 1. + let innerRequest = null + // 2. + if (request instanceof Request) { + innerRequest = request[kState] + } else { // 3. + innerRequest = new Request(request)[kState] + } -/***/ }), + // 4. + if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Expected an http/s scheme when method is not GET' + }) + } -/***/ 5104: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 5. + const innerResponse = response[kState] -/** - * Hash-based Message Authentication Code implementation. Requires a message - * digest object that can be obtained, for example, from forge.md.sha1 or - * forge.md.md5. - * - * @author Dave Longley - * - * Copyright (c) 2010-2012 Digital Bazaar, Inc. All rights reserved. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(6231); -__nccwpck_require__(8339); + // 6. + if (innerResponse.status === 206) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got 206 status' + }) + } -/* HMAC API */ -var hmac = module.exports = forge.hmac = forge.hmac || {}; + // 7. + if (innerResponse.headersList.contains('vary')) { + // 7.1. + const fieldValues = getFieldValues(innerResponse.headersList.get('vary')) -/** - * Creates an HMAC object that uses the given message digest object. - * - * @return an HMAC object. - */ -hmac.create = function() { - // the hmac key to use - var _key = null; + // 7.2. + for (const fieldValue of fieldValues) { + // 7.2.1 + if (fieldValue === '*') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got * vary field value' + }) + } + } + } - // the message digest to use - var _md = null; + // 8. + if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Response body is locked or disturbed' + }) + } - // the inner padding - var _ipadding = null; + // 9. + const clonedResponse = cloneResponse(innerResponse) - // the outer padding - var _opadding = null; + // 10. + const bodyReadPromise = createDeferredPromise() - // hmac context - var ctx = {}; + // 11. + if (innerResponse.body != null) { + // 11.1 + const stream = innerResponse.body.stream - /** - * Starts or restarts the HMAC with the given key and message digest. - * - * @param md the message digest to use, null to reuse the previous one, - * a string to use builtin 'sha1', 'md5', 'sha256'. - * @param key the key to use as a string, array of bytes, byte buffer, - * or null to reuse the previous key. - */ - ctx.start = function(md, key) { - if(md !== null) { - if(typeof md === 'string') { - // create builtin message digest - md = md.toLowerCase(); - if(md in forge.md.algorithms) { - _md = forge.md.algorithms[md].create(); - } else { - throw new Error('Unknown hash algorithm "' + md + '"'); - } - } else { - // store message digest - _md = md; - } - } + // 11.2 + const reader = stream.getReader() - if(key === null) { - // reuse previous key - key = _key; + // 11.3 + readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject) } else { - if(typeof key === 'string') { - // convert string into byte buffer - key = forge.util.createBuffer(key); - } else if(forge.util.isArray(key)) { - // convert byte array into byte buffer - var tmp = key; - key = forge.util.createBuffer(); - for(var i = 0; i < tmp.length; ++i) { - key.putByte(tmp[i]); - } - } + bodyReadPromise.resolve(undefined) + } - // if key is longer than blocksize, hash it - var keylen = key.length(); - if(keylen > _md.blockLength) { - _md.start(); - _md.update(key.bytes()); - key = _md.digest(); - } + // 12. + /** @type {CacheBatchOperation[]} */ + const operations = [] - // mix key into inner and outer padding - // ipadding = [0x36 * blocksize] ^ key - // opadding = [0x5C * blocksize] ^ key - _ipadding = forge.util.createBuffer(); - _opadding = forge.util.createBuffer(); - keylen = key.length(); - for(var i = 0; i < keylen; ++i) { - var tmp = key.at(i); - _ipadding.putByte(0x36 ^ tmp); - _opadding.putByte(0x5C ^ tmp); - } + // 13. + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 14. + request: innerRequest, // 15. + response: clonedResponse // 16. + } - // if key is shorter than blocksize, add additional padding - if(keylen < _md.blockLength) { - var tmp = _md.blockLength - keylen; - for(var i = 0; i < tmp; ++i) { - _ipadding.putByte(0x36); - _opadding.putByte(0x5C); - } - } - _key = key; - _ipadding = _ipadding.bytes(); - _opadding = _opadding.bytes(); + // 17. + operations.push(operation) + + // 19. + const bytes = await bodyReadPromise.promise + + if (clonedResponse.body != null) { + clonedResponse.body.source = bytes } - // digest is done like so: hash(opadding | hash(ipadding | message)) + // 19.1 + const cacheJobPromise = createDeferredPromise() - // prepare to do inner hash - // hash(ipadding | message) - _md.start(); - _md.update(_ipadding); - }; + // 19.2.1 + let errorData = null - /** - * Updates the HMAC with the given message bytes. - * - * @param bytes the bytes to update with. - */ - ctx.update = function(bytes) { - _md.update(bytes); - }; + // 19.2.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } - /** - * Produces the Message Authentication Code (MAC). - * - * @return a byte buffer containing the digest value. - */ - ctx.getMac = function() { - // digest is done like so: hash(opadding | hash(ipadding | message)) - // here we do the outer hashing - var inner = _md.digest().bytes(); - _md.start(); - _md.update(_opadding); - _md.update(inner); - return _md.digest(); - }; - // alias for getMac - ctx.digest = ctx.getMac; + // 19.2.3 + queueMicrotask(() => { + // 19.2.3.1 + if (errorData === null) { + cacheJobPromise.resolve() + } else { // 19.2.3.2 + cacheJobPromise.reject(errorData) + } + }) - return ctx; -}; + return cacheJobPromise.promise + } + async delete (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' }) -/***/ }), + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) -/***/ 7655: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * @type {Request} + */ + let r = null -/** - * Node.js module for Forge. - * - * @author Dave Longley - * - * Copyright 2011-2016 Digital Bazaar, Inc. - */ -module.exports = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(1449); -__nccwpck_require__(9549); -__nccwpck_require__(7088); -__nccwpck_require__(7157); -__nccwpck_require__(0); -__nccwpck_require__(5104); -__nccwpck_require__(5173); -__nccwpck_require__(9994); -__nccwpck_require__(1145); -__nccwpck_require__(3339); -__nccwpck_require__(1611); -__nccwpck_require__(154); -__nccwpck_require__(7014); -__nccwpck_require__(466); -__nccwpck_require__(4829); -__nccwpck_require__(6924); -__nccwpck_require__(6861); -__nccwpck_require__(4467); -__nccwpck_require__(4376); -__nccwpck_require__(7821); -__nccwpck_require__(9965); -__nccwpck_require__(4280); -__nccwpck_require__(9167); -__nccwpck_require__(8339); + if (request instanceof Request) { + r = request[kState] + if (r.method !== 'GET' && !options.ignoreMethod) { + return false + } + } else { + assert(typeof request === 'string') -/***/ }), + r = new Request(request)[kState] + } -/***/ 7052: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** @type {CacheBatchOperation[]} */ + const operations = [] -// Copyright (c) 2005 Tom Wu -// All Rights Reserved. -// See "LICENSE" for details. + /** @type {CacheBatchOperation} */ + const operation = { + type: 'delete', + request: r, + options + } -// Basic JavaScript BN library - subset useful for RSA encryption. + operations.push(operation) -/* -Licensing (LICENSE) -------------------- + const cacheJobPromise = createDeferredPromise() -This software is covered under the following copyright: -*/ -/* - * Copyright (c) 2003-2005 Tom Wu - * All Rights Reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining - * a copy of this software and associated documentation files (the - * "Software"), to deal in the Software without restriction, including - * without limitation the rights to use, copy, modify, merge, publish, - * distribute, sublicense, and/or sell copies of the Software, and to - * permit persons to whom the Software is furnished to do so, subject to - * the following conditions: - * - * The above copyright notice and this permission notice shall be - * included in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, - * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY - * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. - * - * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL, - * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER - * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF - * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT - * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - * - * In addition, the following condition applies: - * - * All redistributions must retain an intact copy of this copyright notice - * and disclaimer. - */ -/* -Address all questions regarding this license to: + let errorData = null + let requestResponses - Tom Wu - tjw@cs.Stanford.EDU -*/ -var forge = __nccwpck_require__(9177); - -module.exports = forge.jsbn = forge.jsbn || {}; - -// Bits per digit -var dbits; - -// JavaScript engine analysis -var canary = 0xdeadbeefcafe; -var j_lm = ((canary&0xffffff)==0xefcafe); - -// (public) Constructor -function BigInteger(a,b,c) { - this.data = []; - if(a != null) - if("number" == typeof a) this.fromNumber(a,b,c); - else if(b == null && "string" != typeof a) this.fromString(a,256); - else this.fromString(a,b); -} -forge.jsbn.BigInteger = BigInteger; - -// return new, unset BigInteger -function nbi() { return new BigInteger(null); } - -// am: Compute w_j += (x*this_i), propagate carries, -// c is initial carry, returns final carry. -// c < 3*dvalue, x < 2*dvalue, this_i < dvalue -// We need to select the fastest one that works in this environment. - -// am1: use a single mult and divide to get the high bits, -// max digit bits should be 26 because -// max internal value = 2*dvalue^2-2*dvalue (< 2^53) -function am1(i,x,w,j,c,n) { - while(--n >= 0) { - var v = x*this.data[i++]+w.data[j]+c; - c = Math.floor(v/0x4000000); - w.data[j++] = v&0x3ffffff; - } - return c; -} -// am2 avoids a big mult-and-extract completely. -// Max digit bits should be <= 30 because we do bitwise ops -// on values up to 2*hdvalue^2-hdvalue-1 (< 2^31) -function am2(i,x,w,j,c,n) { - var xl = x&0x7fff, xh = x>>15; - while(--n >= 0) { - var l = this.data[i]&0x7fff; - var h = this.data[i++]>>15; - var m = xh*l+h*xl; - l = xl*l+((m&0x7fff)<<15)+w.data[j]+(c&0x3fffffff); - c = (l>>>30)+(m>>>15)+xh*h+(c>>>30); - w.data[j++] = l&0x3fffffff; - } - return c; -} -// Alternately, set max digit bits to 28 since some -// browsers slow down when dealing with 32-bit numbers. -function am3(i,x,w,j,c,n) { - var xl = x&0x3fff, xh = x>>14; - while(--n >= 0) { - var l = this.data[i]&0x3fff; - var h = this.data[i++]>>14; - var m = xh*l+h*xl; - l = xl*l+((m&0x3fff)<<14)+w.data[j]+c; - c = (l>>28)+(m>>14)+xh*h; - w.data[j++] = l&0xfffffff; - } - return c; -} - -// node.js (no browser) -if(typeof(navigator) === 'undefined') -{ - BigInteger.prototype.am = am3; - dbits = 28; -} else if(j_lm && (navigator.appName == "Microsoft Internet Explorer")) { - BigInteger.prototype.am = am2; - dbits = 30; -} else if(j_lm && (navigator.appName != "Netscape")) { - BigInteger.prototype.am = am1; - dbits = 26; -} else { // Mozilla/Netscape seems to prefer am3 - BigInteger.prototype.am = am3; - dbits = 28; -} - -BigInteger.prototype.DB = dbits; -BigInteger.prototype.DM = ((1<= 0; --i) r.data[i] = this.data[i]; - r.t = this.t; - r.s = this.s; -} - -// (protected) set from integer value x, -DV <= x < DV -function bnpFromInt(x) { - this.t = 1; - this.s = (x<0)?-1:0; - if(x > 0) this.data[0] = x; - else if(x < -1) this.data[0] = x+this.DV; - else this.t = 0; -} - -// return bigint initialized to value -function nbv(i) { var r = nbi(); r.fromInt(i); return r; } - -// (protected) set from string and radix -function bnpFromString(s,b) { - var k; - if(b == 16) k = 4; - else if(b == 8) k = 3; - else if(b == 256) k = 8; // byte array - else if(b == 2) k = 1; - else if(b == 32) k = 5; - else if(b == 4) k = 2; - else { this.fromRadix(s,b); return; } - this.t = 0; - this.s = 0; - var i = s.length, mi = false, sh = 0; - while(--i >= 0) { - var x = (k==8)?s[i]&0xff:intAt(s,i); - if(x < 0) { - if(s.charAt(i) == "-") mi = true; - continue; + try { + requestResponses = this.#batchCacheOperations(operations) + } catch (e) { + errorData = e } - mi = false; - if(sh == 0) - this.data[this.t++] = x; - else if(sh+k > this.DB) { - this.data[this.t-1] |= (x&((1<<(this.DB-sh))-1))<>(this.DB-sh)); - } else - this.data[this.t-1] |= x<= this.DB) sh -= this.DB; - } - if(k == 8 && (s[0]&0x80) != 0) { - this.s = -1; - if(sh > 0) this.data[this.t-1] |= ((1<<(this.DB-sh))-1)< 0 && this.data[this.t-1] == c) --this.t; -} - -// (public) return string representation in given radix -function bnToString(b) { - if(this.s < 0) return "-"+this.negate().toString(b); - var k; - if(b == 16) k = 4; - else if(b == 8) k = 3; - else if(b == 2) k = 1; - else if(b == 32) k = 5; - else if(b == 4) k = 2; - else return this.toRadix(b); - var km = (1< 0) { - if(p < this.DB && (d = this.data[i]>>p) > 0) { m = true; r = int2char(d); } - while(i >= 0) { - if(p < k) { - d = (this.data[i]&((1<>(p+=this.DB-k); + + queueMicrotask(() => { + if (errorData === null) { + cacheJobPromise.resolve(!!requestResponses?.length) } else { - d = (this.data[i]>>(p-=k))&km; - if(p <= 0) { p += this.DB; --i; } + cacheJobPromise.reject(errorData) } - if(d > 0) m = true; - if(m) r += int2char(d); - } - } - return m?r:"0"; -} - -// (public) -this -function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; } + }) -// (public) |this| -function bnAbs() { return (this.s<0)?this.negate():this; } + return cacheJobPromise.promise + } -// (public) return + if this > a, - if this < a, 0 if equal -function bnCompareTo(a) { - var r = this.s-a.s; - if(r != 0) return r; - var i = this.t; - r = i-a.t; - if(r != 0) return (this.s<0)?-r:r; - while(--i >= 0) if((r=this.data[i]-a.data[i]) != 0) return r; - return 0; -} + /** + * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys + * @param {any} request + * @param {import('../../types/cache').CacheQueryOptions} options + * @returns {readonly Request[]} + */ + async keys (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) -// returns bit length of the integer x -function nbits(x) { - var r = 1, t; - if((t=x>>>16) != 0) { x = t; r += 16; } - if((t=x>>8) != 0) { x = t; r += 8; } - if((t=x>>4) != 0) { x = t; r += 4; } - if((t=x>>2) != 0) { x = t; r += 2; } - if((t=x>>1) != 0) { x = t; r += 1; } - return r; -} + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) -// (public) return the number of bits in "this" -function bnBitLength() { - if(this.t <= 0) return 0; - return this.DB*(this.t-1)+nbits(this.data[this.t-1]^(this.s&this.DM)); -} + // 1. + let r = null -// (protected) r = this << n*DB -function bnpDLShiftTo(n,r) { - var i; - for(i = this.t-1; i >= 0; --i) r.data[i+n] = this.data[i]; - for(i = n-1; i >= 0; --i) r.data[i] = 0; - r.t = this.t+n; - r.s = this.s; -} - -// (protected) r = this >> n*DB -function bnpDRShiftTo(n,r) { - for(var i = n; i < this.t; ++i) r.data[i-n] = this.data[i]; - r.t = Math.max(this.t-n,0); - r.s = this.s; -} - -// (protected) r = this << n -function bnpLShiftTo(n,r) { - var bs = n%this.DB; - var cbs = this.DB-bs; - var bm = (1<= 0; --i) { - r.data[i+ds+1] = (this.data[i]>>cbs)|c; - c = (this.data[i]&bm)<= 0; --i) r.data[i] = 0; - r.data[ds] = c; - r.t = this.t+ds+1; - r.s = this.s; - r.clamp(); -} - -// (protected) r = this >> n -function bnpRShiftTo(n,r) { - r.s = this.s; - var ds = Math.floor(n/this.DB); - if(ds >= this.t) { r.t = 0; return; } - var bs = n%this.DB; - var cbs = this.DB-bs; - var bm = (1<>bs; - for(var i = ds+1; i < this.t; ++i) { - r.data[i-ds-1] |= (this.data[i]&bm)<>bs; - } - if(bs > 0) r.data[this.t-ds-1] |= (this.s&bm)<>= this.DB; - } - if(a.t < this.t) { - c -= a.s; - while(i < this.t) { - c += this.data[i]; - r.data[i++] = c&this.DM; - c >>= this.DB; - } - c += this.s; - } else { - c += this.s; - while(i < a.t) { - c -= a.data[i]; - r.data[i++] = c&this.DM; - c >>= this.DB; - } - c -= a.s; - } - r.s = (c<0)?-1:0; - if(c < -1) r.data[i++] = this.DV+c; - else if(c > 0) r.data[i++] = c; - r.t = i; - r.clamp(); -} - -// (protected) r = this * a, r != this,a (HAC 14.12) -// "this" should be the larger one if appropriate. -function bnpMultiplyTo(a,r) { - var x = this.abs(), y = a.abs(); - var i = x.t; - r.t = i+y.t; - while(--i >= 0) r.data[i] = 0; - for(i = 0; i < y.t; ++i) r.data[i+x.t] = x.am(0,y.data[i],r,i,0,x.t); - r.s = 0; - r.clamp(); - if(this.s != a.s) BigInteger.ZERO.subTo(r,r); -} - -// (protected) r = this^2, r != this (HAC 14.16) -function bnpSquareTo(r) { - var x = this.abs(); - var i = r.t = 2*x.t; - while(--i >= 0) r.data[i] = 0; - for(i = 0; i < x.t-1; ++i) { - var c = x.am(i,x.data[i],r,2*i,0,1); - if((r.data[i+x.t]+=x.am(i+1,2*x.data[i],r,2*i+1,c,x.t-i-1)) >= x.DV) { - r.data[i+x.t] -= x.DV; - r.data[i+x.t+1] = 1; - } - } - if(r.t > 0) r.data[r.t-1] += x.am(i,x.data[i],r,2*i,0,1); - r.s = 0; - r.clamp(); -} - -// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20) -// r != q, this != m. q or r may be null. -function bnpDivRemTo(m,q,r) { - var pm = m.abs(); - if(pm.t <= 0) return; - var pt = this.abs(); - if(pt.t < pm.t) { - if(q != null) q.fromInt(0); - if(r != null) this.copyTo(r); - return; - } - if(r == null) r = nbi(); - var y = nbi(), ts = this.s, ms = m.s; - var nsh = this.DB-nbits(pm.data[pm.t-1]); // normalize modulus - if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); } else { pm.copyTo(y); pt.copyTo(r); } - var ys = y.t; - var y0 = y.data[ys-1]; - if(y0 == 0) return; - var yt = y0*(1<1)?y.data[ys-2]>>this.F2:0); - var d1 = this.FV/yt, d2 = (1<= 0) { - r.data[r.t++] = 1; - r.subTo(t,r); - } - BigInteger.ONE.dlShiftTo(ys,t); - t.subTo(y,y); // "negative" y so we can replace sub with am later - while(y.t < ys) y.data[y.t++] = 0; - while(--j >= 0) { - // Estimate quotient digit - var qd = (r.data[--i]==y0)?this.DM:Math.floor(r.data[i]*d1+(r.data[i-1]+e)*d2); - if((r.data[i]+=y.am(0,qd,r,j,0,ys)) < qd) { // Try it out - y.dlShiftTo(j,t); - r.subTo(t,r); - while(r.data[i] < --qd) r.subTo(t,r); - } - } - if(q != null) { - r.drShiftTo(ys,q); - if(ts != ms) BigInteger.ZERO.subTo(q,q); - } - r.t = ys; - r.clamp(); - if(nsh > 0) r.rShiftTo(nsh,r); // Denormalize remainder - if(ts < 0) BigInteger.ZERO.subTo(r,r); -} - -// (public) this mod a -function bnMod(a) { - var r = nbi(); - this.abs().divRemTo(a,null,r); - if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r); - return r; -} + // 2. + if (request !== undefined) { + // 2.1 + if (request instanceof Request) { + // 2.1.1 + r = request[kState] -// Modular reduction using "classic" algorithm -function Classic(m) { this.m = m; } -function cConvert(x) { - if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m); - else return x; -} -function cRevert(x) { return x; } -function cReduce(x) { x.divRemTo(this.m,null,x); } -function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } -function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); } - -Classic.prototype.convert = cConvert; -Classic.prototype.revert = cRevert; -Classic.prototype.reduce = cReduce; -Classic.prototype.mulTo = cMulTo; -Classic.prototype.sqrTo = cSqrTo; - -// (protected) return "-1/this % 2^DB"; useful for Mont. reduction -// justification: -// xy == 1 (mod m) -// xy = 1+km -// xy(2-xy) = (1+km)(1-km) -// x[y(2-xy)] = 1-k^2m^2 -// x[y(2-xy)] == 1 (mod m^2) -// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2 -// should reduce x and y(2-xy) by m^2 at each step to keep size bounded. -// JS multiply "overflows" differently from C/C++, so care is needed here. -function bnpInvDigit() { - if(this.t < 1) return 0; - var x = this.data[0]; - if((x&1) == 0) return 0; - var y = x&3; // y == 1/x mod 2^2 - y = (y*(2-(x&0xf)*y))&0xf; // y == 1/x mod 2^4 - y = (y*(2-(x&0xff)*y))&0xff; // y == 1/x mod 2^8 - y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff; // y == 1/x mod 2^16 - // last step - calculate inverse mod DV directly; - // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints - y = (y*(2-x*y%this.DV))%this.DV; // y == 1/x mod 2^dbits - // we really want the negative inverse, and -DV < y < DV - return (y>0)?this.DV-y:-y; -} - -// Montgomery reduction -function Montgomery(m) { - this.m = m; - this.mp = m.invDigit(); - this.mpl = this.mp&0x7fff; - this.mph = this.mp>>15; - this.um = (1<<(m.DB-15))-1; - this.mt2 = 2*m.t; -} - -// xR mod m -function montConvert(x) { - var r = nbi(); - x.abs().dlShiftTo(this.m.t,r); - r.divRemTo(this.m,null,r); - if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r); - return r; -} + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { // 2.2 + r = new Request(request)[kState] + } + } -// x/R mod m -function montRevert(x) { - var r = nbi(); - x.copyTo(r); - this.reduce(r); - return r; -} + // 4. + const promise = createDeferredPromise() -// x = x/R mod m (HAC 14.32) -function montReduce(x) { - while(x.t <= this.mt2) // pad x so am has enough room later - x.data[x.t++] = 0; - for(var i = 0; i < this.m.t; ++i) { - // faster way of calculating u0 = x.data[i]*mp mod DV - var j = x.data[i]&0x7fff; - var u0 = (j*this.mpl+(((j*this.mph+(x.data[i]>>15)*this.mpl)&this.um)<<15))&x.DM; - // use am to combine the multiply-shift-add into one call - j = i+this.m.t; - x.data[j] += this.m.am(0,u0,x,i,0,this.m.t); - // propagate carry - while(x.data[j] >= x.DV) { x.data[j] -= x.DV; x.data[++j]++; } - } - x.clamp(); - x.drShiftTo(this.m.t,x); - if(x.compareTo(this.m) >= 0) x.subTo(this.m,x); -} - -// r = "x^2/R mod m"; x != r -function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); } - -// r = "xy/R mod m"; x,y != r -function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } - -Montgomery.prototype.convert = montConvert; -Montgomery.prototype.revert = montRevert; -Montgomery.prototype.reduce = montReduce; -Montgomery.prototype.mulTo = montMulTo; -Montgomery.prototype.sqrTo = montSqrTo; - -// (protected) true iff this is even -function bnpIsEven() { return ((this.t>0)?(this.data[0]&1):this.s) == 0; } - -// (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79) -function bnpExp(e,z) { - if(e > 0xffffffff || e < 1) return BigInteger.ONE; - var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1; - g.copyTo(r); - while(--i >= 0) { - z.sqrTo(r,r2); - if((e&(1< 0) z.mulTo(r2,g,r); - else { var t = r; r = r2; r2 = t; } - } - return z.revert(r); -} - -// (public) this^e % m, 0 <= e < 2^32 -function bnModPowInt(e,m) { - var z; - if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m); - return this.exp(e,z); -} - -// protected -BigInteger.prototype.copyTo = bnpCopyTo; -BigInteger.prototype.fromInt = bnpFromInt; -BigInteger.prototype.fromString = bnpFromString; -BigInteger.prototype.clamp = bnpClamp; -BigInteger.prototype.dlShiftTo = bnpDLShiftTo; -BigInteger.prototype.drShiftTo = bnpDRShiftTo; -BigInteger.prototype.lShiftTo = bnpLShiftTo; -BigInteger.prototype.rShiftTo = bnpRShiftTo; -BigInteger.prototype.subTo = bnpSubTo; -BigInteger.prototype.multiplyTo = bnpMultiplyTo; -BigInteger.prototype.squareTo = bnpSquareTo; -BigInteger.prototype.divRemTo = bnpDivRemTo; -BigInteger.prototype.invDigit = bnpInvDigit; -BigInteger.prototype.isEven = bnpIsEven; -BigInteger.prototype.exp = bnpExp; - -// public -BigInteger.prototype.toString = bnToString; -BigInteger.prototype.negate = bnNegate; -BigInteger.prototype.abs = bnAbs; -BigInteger.prototype.compareTo = bnCompareTo; -BigInteger.prototype.bitLength = bnBitLength; -BigInteger.prototype.mod = bnMod; -BigInteger.prototype.modPowInt = bnModPowInt; - -// "constants" -BigInteger.ZERO = nbv(0); -BigInteger.ONE = nbv(1); - -// jsbn2 lib - -//Copyright (c) 2005-2009 Tom Wu -//All Rights Reserved. -//See "LICENSE" for details (See jsbn.js for LICENSE). - -//Extended JavaScript BN functions, required for RSA private ops. - -//Version 1.1: new BigInteger("0", 10) returns "proper" zero - -//(public) -function bnClone() { var r = nbi(); this.copyTo(r); return r; } - -//(public) return value as integer -function bnIntValue() { -if(this.s < 0) { - if(this.t == 1) return this.data[0]-this.DV; - else if(this.t == 0) return -1; -} else if(this.t == 1) return this.data[0]; -else if(this.t == 0) return 0; -// assumes 16 < DB < 32 -return ((this.data[1]&((1<<(32-this.DB))-1))<>24; } - -//(public) return value as short (assumes DB>=16) -function bnShortValue() { return (this.t==0)?this.s:(this.data[0]<<16)>>16; } - -//(protected) return x s.t. r^x < DV -function bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); } - -//(public) 0 if this == 0, 1 if this > 0 -function bnSigNum() { -if(this.s < 0) return -1; -else if(this.t <= 0 || (this.t == 1 && this.data[0] <= 0)) return 0; -else return 1; -} - -//(protected) convert to radix string -function bnpToRadix(b) { -if(b == null) b = 10; -if(this.signum() == 0 || b < 2 || b > 36) return "0"; -var cs = this.chunkSize(b); -var a = Math.pow(b,cs); -var d = nbv(a), y = nbi(), z = nbi(), r = ""; -this.divRemTo(d,y,z); -while(y.signum() > 0) { - r = (a+z.intValue()).toString(b).substr(1) + r; - y.divRemTo(d,y,z); -} -return z.intValue().toString(b) + r; -} - -//(protected) convert from radix string -function bnpFromRadix(s,b) { -this.fromInt(0); -if(b == null) b = 10; -var cs = this.chunkSize(b); -var d = Math.pow(b,cs), mi = false, j = 0, w = 0; -for(var i = 0; i < s.length; ++i) { - var x = intAt(s,i); - if(x < 0) { - if(s.charAt(i) == "-" && this.signum() == 0) mi = true; - continue; - } - w = b*w+x; - if(++j >= cs) { - this.dMultiply(d); - this.dAddOffset(w,0); - j = 0; - w = 0; - } -} -if(j > 0) { - this.dMultiply(Math.pow(b,j)); - this.dAddOffset(w,0); -} -if(mi) BigInteger.ZERO.subTo(this,this); -} - -//(protected) alternate constructor -function bnpFromNumber(a,b,c) { -if("number" == typeof b) { - // new BigInteger(int,int,RNG) - if(a < 2) this.fromInt(1); - else { - this.fromNumber(a,c); - if(!this.testBit(a-1)) // force MSB set - this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this); - if(this.isEven()) this.dAddOffset(1,0); // force odd - while(!this.isProbablePrime(b)) { - this.dAddOffset(2,0); - if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this); - } - } -} else { - // new BigInteger(int,RNG) - var x = new Array(), t = a&7; - x.length = (a>>3)+1; - b.nextBytes(x); - if(t > 0) x[0] &= ((1< 0) { - if(p < this.DB && (d = this.data[i]>>p) != (this.s&this.DM)>>p) - r[k++] = d|(this.s<<(this.DB-p)); - while(i >= 0) { - if(p < 8) { - d = (this.data[i]&((1<>(p+=this.DB-8); - } else { - d = (this.data[i]>>(p-=8))&0xff; - if(p <= 0) { p += this.DB; --i; } - } - if((d&0x80) != 0) d |= -256; - if(k == 0 && (this.s&0x80) != (d&0x80)) ++k; - if(k > 0 || d != this.s) r[k++] = d; - } -} -return r; -} - -function bnEquals(a) { return(this.compareTo(a)==0); } -function bnMin(a) { return(this.compareTo(a)<0)?this:a; } -function bnMax(a) { return(this.compareTo(a)>0)?this:a; } - -//(protected) r = this op a (bitwise) -function bnpBitwiseTo(a,op,r) { -var i, f, m = Math.min(a.t,this.t); -for(i = 0; i < m; ++i) r.data[i] = op(this.data[i],a.data[i]); -if(a.t < this.t) { - f = a.s&this.DM; - for(i = m; i < this.t; ++i) r.data[i] = op(this.data[i],f); - r.t = this.t; -} else { - f = this.s&this.DM; - for(i = m; i < a.t; ++i) r.data[i] = op(f,a.data[i]); - r.t = a.t; -} -r.s = op(this.s,a.s); -r.clamp(); -} + // 5. + // 5.1 + const requests = [] -//(public) this & a -function op_and(x,y) { return x&y; } -function bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; } + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + // 5.2.1.1 + requests.push(requestResponse[0]) + } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) + + // 5.3.2 + for (const requestResponse of requestResponses) { + // 5.3.2.1 + requests.push(requestResponse[0]) + } + } -//(public) this | a -function op_or(x,y) { return x|y; } -function bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; } + // 5.4 + queueMicrotask(() => { + // 5.4.1 + const requestList = [] -//(public) this ^ a -function op_xor(x,y) { return x^y; } -function bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; } - -//(public) this & ~a -function op_andnot(x,y) { return x&~y; } -function bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; } - -//(public) ~this -function bnNot() { -var r = nbi(); -for(var i = 0; i < this.t; ++i) r.data[i] = this.DM&~this.data[i]; -r.t = this.t; -r.s = ~this.s; -return r; -} - -//(public) this << n -function bnShiftLeft(n) { -var r = nbi(); -if(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r); -return r; -} - -//(public) this >> n -function bnShiftRight(n) { -var r = nbi(); -if(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r); -return r; -} - -//return index of lowest 1-bit in x, x < 2^31 -function lbit(x) { -if(x == 0) return -1; -var r = 0; -if((x&0xffff) == 0) { x >>= 16; r += 16; } -if((x&0xff) == 0) { x >>= 8; r += 8; } -if((x&0xf) == 0) { x >>= 4; r += 4; } -if((x&3) == 0) { x >>= 2; r += 2; } -if((x&1) == 0) ++r; -return r; -} - -//(public) returns index of lowest 1-bit (or -1 if none) -function bnGetLowestSetBit() { -for(var i = 0; i < this.t; ++i) - if(this.data[i] != 0) return i*this.DB+lbit(this.data[i]); -if(this.s < 0) return this.t*this.DB; -return -1; -} - -//return number of 1 bits in x -function cbit(x) { -var r = 0; -while(x != 0) { x &= x-1; ++r; } -return r; -} - -//(public) return number of set bits -function bnBitCount() { -var r = 0, x = this.s&this.DM; -for(var i = 0; i < this.t; ++i) r += cbit(this.data[i]^x); -return r; -} - -//(public) true iff nth bit is set -function bnTestBit(n) { -var j = Math.floor(n/this.DB); -if(j >= this.t) return(this.s!=0); -return((this.data[j]&(1<<(n%this.DB)))!=0); -} - -//(protected) this op (1<>= this.DB; -} -if(a.t < this.t) { - c += a.s; - while(i < this.t) { - c += this.data[i]; - r.data[i++] = c&this.DM; - c >>= this.DB; - } - c += this.s; -} else { - c += this.s; - while(i < a.t) { - c += a.data[i]; - r.data[i++] = c&this.DM; - c >>= this.DB; - } - c += a.s; -} -r.s = (c<0)?-1:0; -if(c > 0) r.data[i++] = c; -else if(c < -1) r.data[i++] = this.DV+c; -r.t = i; -r.clamp(); -} - -//(public) this + a -function bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; } - -//(public) this - a -function bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; } - -//(public) this * a -function bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; } - -//(public) this / a -function bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; } - -//(public) this % a -function bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; } - -//(public) [this/a,this%a] -function bnDivideAndRemainder(a) { -var q = nbi(), r = nbi(); -this.divRemTo(a,q,r); -return new Array(q,r); -} - -//(protected) this *= n, this >= 0, 1 < n < DV -function bnpDMultiply(n) { -this.data[this.t] = this.am(0,n-1,this,0,0,this.t); -++this.t; -this.clamp(); -} - -//(protected) this += n << w words, this >= 0 -function bnpDAddOffset(n,w) { -if(n == 0) return; -while(this.t <= w) this.data[this.t++] = 0; -this.data[w] += n; -while(this.data[w] >= this.DV) { - this.data[w] -= this.DV; - if(++w >= this.t) this.data[this.t++] = 0; - ++this.data[w]; -} -} - -//A "null" reducer -function NullExp() {} -function nNop(x) { return x; } -function nMulTo(x,y,r) { x.multiplyTo(y,r); } -function nSqrTo(x,r) { x.squareTo(r); } - -NullExp.prototype.convert = nNop; -NullExp.prototype.revert = nNop; -NullExp.prototype.mulTo = nMulTo; -NullExp.prototype.sqrTo = nSqrTo; - -//(public) this^e -function bnPow(e) { return this.exp(e,new NullExp()); } - -//(protected) r = lower n words of "this * a", a.t <= n -//"this" should be the larger one if appropriate. -function bnpMultiplyLowerTo(a,n,r) { -var i = Math.min(this.t+a.t,n); -r.s = 0; // assumes a,this >= 0 -r.t = i; -while(i > 0) r.data[--i] = 0; -var j; -for(j = r.t-this.t; i < j; ++i) r.data[i+this.t] = this.am(0,a.data[i],r,i,0,this.t); -for(j = Math.min(a.t,n); i < j; ++i) this.am(0,a.data[i],r,i,0,n-i); -r.clamp(); -} - -//(protected) r = "this * a" without lower n words, n > 0 -//"this" should be the larger one if appropriate. -function bnpMultiplyUpperTo(a,n,r) { ---n; -var i = r.t = this.t+a.t-n; -r.s = 0; // assumes a,this >= 0 -while(--i >= 0) r.data[i] = 0; -for(i = Math.max(n-this.t,0); i < a.t; ++i) - r.data[this.t+i-n] = this.am(n-i,a.data[i],r,0,0,this.t+i-n); -r.clamp(); -r.drShiftTo(1,r); -} - -//Barrett modular reduction -function Barrett(m) { -// setup Barrett -this.r2 = nbi(); -this.q3 = nbi(); -BigInteger.ONE.dlShiftTo(2*m.t,this.r2); -this.mu = this.r2.divide(m); -this.m = m; -} - -function barrettConvert(x) { -if(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m); -else if(x.compareTo(this.m) < 0) return x; -else { var r = nbi(); x.copyTo(r); this.reduce(r); return r; } -} - -function barrettRevert(x) { return x; } - -//x = x mod m (HAC 14.42) -function barrettReduce(x) { -x.drShiftTo(this.m.t-1,this.r2); -if(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); } -this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3); -this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2); -while(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1); -x.subTo(this.r2,x); -while(x.compareTo(this.m) >= 0) x.subTo(this.m,x); -} - -//r = x^2 mod m; x != r -function barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); } - -//r = x*y mod m; x,y != r -function barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } - -Barrett.prototype.convert = barrettConvert; -Barrett.prototype.revert = barrettRevert; -Barrett.prototype.reduce = barrettReduce; -Barrett.prototype.mulTo = barrettMulTo; -Barrett.prototype.sqrTo = barrettSqrTo; - -//(public) this^e % m (HAC 14.85) -function bnModPow(e,m) { -var i = e.bitLength(), k, r = nbv(1), z; -if(i <= 0) return r; -else if(i < 18) k = 1; -else if(i < 48) k = 3; -else if(i < 144) k = 4; -else if(i < 768) k = 5; -else k = 6; -if(i < 8) - z = new Classic(m); -else if(m.isEven()) - z = new Barrett(m); -else - z = new Montgomery(m); - -// precomputation -var g = new Array(), n = 3, k1 = k-1, km = (1< 1) { - var g2 = nbi(); - z.sqrTo(g[1],g2); - while(n <= km) { - g[n] = nbi(); - z.mulTo(g2,g[n-2],g[n]); - n += 2; - } -} - -var j = e.t-1, w, is1 = true, r2 = nbi(), t; -i = nbits(e.data[j])-1; -while(j >= 0) { - if(i >= k1) w = (e.data[j]>>(i-k1))&km; - else { - w = (e.data[j]&((1<<(i+1))-1))<<(k1-i); - if(j > 0) w |= e.data[j-1]>>(this.DB+i-k1); - } - - n = k; - while((w&1) == 0) { w >>= 1; --n; } - if((i -= n) < 0) { i += this.DB; --j; } - if(is1) { // ret == 1, don't bother squaring or multiplying it - g[w].copyTo(r); - is1 = false; - } else { - while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; } - if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; } - z.mulTo(r2,g[w],r); - } - - while(j >= 0 && (e.data[j]&(1< 0) { - x.rShiftTo(g,x); - y.rShiftTo(g,y); -} -while(x.signum() > 0) { - if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x); - if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y); - if(x.compareTo(y) >= 0) { - x.subTo(y,x); - x.rShiftTo(1,x); - } else { - y.subTo(x,y); - y.rShiftTo(1,y); - } -} -if(g > 0) y.lShiftTo(g,y); -return y; -} - -//(protected) this % n, n < 2^26 -function bnpModInt(n) { -if(n <= 0) return 0; -var d = this.DV%n, r = (this.s<0)?n-1:0; -if(this.t > 0) - if(d == 0) r = this.data[0]%n; - else for(var i = this.t-1; i >= 0; --i) r = (d*r+this.data[i])%n; -return r; -} - -//(public) 1/this % m (HAC 14.61) -function bnModInverse(m) { -var ac = m.isEven(); -if((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO; -var u = m.clone(), v = this.clone(); -var a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1); -while(u.signum() != 0) { - while(u.isEven()) { - u.rShiftTo(1,u); - if(ac) { - if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); } - a.rShiftTo(1,a); - } else if(!b.isEven()) b.subTo(m,b); - b.rShiftTo(1,b); - } - while(v.isEven()) { - v.rShiftTo(1,v); - if(ac) { - if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); } - c.rShiftTo(1,c); - } else if(!d.isEven()) d.subTo(m,d); - d.rShiftTo(1,d); - } - if(u.compareTo(v) >= 0) { - u.subTo(v,u); - if(ac) a.subTo(c,a); - b.subTo(d,b); - } else { - v.subTo(u,v); - if(ac) c.subTo(a,c); - d.subTo(b,d); - } -} -if(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO; -if(d.compareTo(m) >= 0) return d.subtract(m); -if(d.signum() < 0) d.addTo(m,d); else return d; -if(d.signum() < 0) return d.add(m); else return d; -} - -var lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509]; -var lplim = (1<<26)/lowprimes[lowprimes.length-1]; - -//(public) test primality with certainty >= 1-.5^t -function bnIsProbablePrime(t) { -var i, x = this.abs(); -if(x.t == 1 && x.data[0] <= lowprimes[lowprimes.length-1]) { - for(i = 0; i < lowprimes.length; ++i) - if(x.data[0] == lowprimes[i]) return true; - return false; -} -if(x.isEven()) return false; -i = 1; -while(i < lowprimes.length) { - var m = lowprimes[i], j = i+1; - while(j < lowprimes.length && m < lplim) m *= lowprimes[j++]; - m = x.modInt(m); - while(i < j) if(m%lowprimes[i++] == 0) return false; -} -return x.millerRabin(t); -} - -//(protected) true if probably prime (HAC 4.24, Miller-Rabin) -function bnpMillerRabin(t) { -var n1 = this.subtract(BigInteger.ONE); -var k = n1.getLowestSetBit(); -if(k <= 0) return false; -var r = n1.shiftRight(k); -var prng = bnGetPrng(); -var a; -for(var i = 0; i < t; ++i) { - // select witness 'a' at random from between 1 and n1 - do { - a = new BigInteger(this.bitLength(), prng); - } - while(a.compareTo(BigInteger.ONE) <= 0 || a.compareTo(n1) >= 0); - var y = a.modPow(r,this); - if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) { - var j = 1; - while(j++ < k && y.compareTo(n1) != 0) { - y = y.modPowInt(2,this); - if(y.compareTo(BigInteger.ONE) == 0) return false; - } - if(y.compareTo(n1) != 0) return false; - } -} -return true; -} - -// get pseudo random number generator -function bnGetPrng() { - // create prng with api that matches BigInteger secure random - return { - // x is an array to fill with bytes - nextBytes: function(x) { - for(var i = 0; i < x.length; ++i) { - x[i] = Math.floor(Math.random() * 0x0100); + // 5.4.2.1 + requestList.push(requestObject) } - } - }; -} -//protected -BigInteger.prototype.chunkSize = bnpChunkSize; -BigInteger.prototype.toRadix = bnpToRadix; -BigInteger.prototype.fromRadix = bnpFromRadix; -BigInteger.prototype.fromNumber = bnpFromNumber; -BigInteger.prototype.bitwiseTo = bnpBitwiseTo; -BigInteger.prototype.changeBit = bnpChangeBit; -BigInteger.prototype.addTo = bnpAddTo; -BigInteger.prototype.dMultiply = bnpDMultiply; -BigInteger.prototype.dAddOffset = bnpDAddOffset; -BigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo; -BigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo; -BigInteger.prototype.modInt = bnpModInt; -BigInteger.prototype.millerRabin = bnpMillerRabin; - -//public -BigInteger.prototype.clone = bnClone; -BigInteger.prototype.intValue = bnIntValue; -BigInteger.prototype.byteValue = bnByteValue; -BigInteger.prototype.shortValue = bnShortValue; -BigInteger.prototype.signum = bnSigNum; -BigInteger.prototype.toByteArray = bnToByteArray; -BigInteger.prototype.equals = bnEquals; -BigInteger.prototype.min = bnMin; -BigInteger.prototype.max = bnMax; -BigInteger.prototype.and = bnAnd; -BigInteger.prototype.or = bnOr; -BigInteger.prototype.xor = bnXor; -BigInteger.prototype.andNot = bnAndNot; -BigInteger.prototype.not = bnNot; -BigInteger.prototype.shiftLeft = bnShiftLeft; -BigInteger.prototype.shiftRight = bnShiftRight; -BigInteger.prototype.getLowestSetBit = bnGetLowestSetBit; -BigInteger.prototype.bitCount = bnBitCount; -BigInteger.prototype.testBit = bnTestBit; -BigInteger.prototype.setBit = bnSetBit; -BigInteger.prototype.clearBit = bnClearBit; -BigInteger.prototype.flipBit = bnFlipBit; -BigInteger.prototype.add = bnAdd; -BigInteger.prototype.subtract = bnSubtract; -BigInteger.prototype.multiply = bnMultiply; -BigInteger.prototype.divide = bnDivide; -BigInteger.prototype.remainder = bnRemainder; -BigInteger.prototype.divideAndRemainder = bnDivideAndRemainder; -BigInteger.prototype.modPow = bnModPow; -BigInteger.prototype.modInverse = bnModInverse; -BigInteger.prototype.pow = bnPow; -BigInteger.prototype.gcd = bnGCD; -BigInteger.prototype.isProbablePrime = bnIsProbablePrime; - -//BigInteger interfaces not implemented in jsbn: - -//BigInteger(int signum, byte[] magnitude) -//double doubleValue() -//float floatValue() -//int hashCode() -//long longValue() -//static BigInteger valueOf(long val) + // 5.4.3 + promise.resolve(Object.freeze(requestList)) + }) + return promise.promise + } -/***/ }), + /** + * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm + * @param {CacheBatchOperation[]} operations + * @returns {requestResponseList} + */ + #batchCacheOperations (operations) { + // 1. + const cache = this.#relevantRequestResponseList -/***/ 5173: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 2. + const backupCache = [...cache] -/** - * Javascript implementation of RSA-KEM. - * - * @author Lautaro Cozzani Rodriguez - * @author Dave Longley - * - * Copyright (c) 2014 Lautaro Cozzani - * Copyright (c) 2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); -__nccwpck_require__(7821); -__nccwpck_require__(7052); + // 3. + const addedItems = [] -module.exports = forge.kem = forge.kem || {}; + // 4.1 + const resultList = [] -var BigInteger = forge.jsbn.BigInteger; + try { + // 4.2 + for (const operation of operations) { + // 4.2.1 + if (operation.type !== 'delete' && operation.type !== 'put') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'operation type does not match "delete" or "put"' + }) + } -/** - * The API for the RSA Key Encapsulation Mechanism (RSA-KEM) from ISO 18033-2. - */ -forge.kem.rsa = {}; + // 4.2.2 + if (operation.type === 'delete' && operation.response != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'delete operation should not have an associated response' + }) + } -/** - * Creates an RSA KEM API object for generating a secret asymmetric key. - * - * The symmetric key may be generated via a call to 'encrypt', which will - * produce a ciphertext to be transmitted to the recipient and a key to be - * kept secret. The ciphertext is a parameter to be passed to 'decrypt' which - * will produce the same secret key for the recipient to use to decrypt a - * message that was encrypted with the secret key. - * - * @param kdf the KDF API to use (eg: new forge.kem.kdf1()). - * @param options the options to use. - * [prng] a custom crypto-secure pseudo-random number generator to use, - * that must define "getBytesSync". - */ -forge.kem.rsa.create = function(kdf, options) { - options = options || {}; - var prng = options.prng || forge.random; + // 4.2.3 + if (this.#queryCache(operation.request, operation.options, addedItems).length) { + throw new DOMException('???', 'InvalidStateError') + } - var kem = {}; + // 4.2.4 + let requestResponses - /** - * Generates a secret key and its encapsulation. - * - * @param publicKey the RSA public key to encrypt with. - * @param keyLength the length, in bytes, of the secret key to generate. - * - * @return an object with: - * encapsulation: the ciphertext for generating the secret key, as a - * binary-encoded string of bytes. - * key: the secret key to use for encrypting a message. - */ - kem.encrypt = function(publicKey, keyLength) { - // generate a random r where 1 < r < n - var byteLength = Math.ceil(publicKey.n.bitLength() / 8); - var r; - do { - r = new BigInteger( - forge.util.bytesToHex(prng.getBytesSync(byteLength)), - 16).mod(publicKey.n); - } while(r.compareTo(BigInteger.ONE) <= 0); - - // prepend r with zeros - r = forge.util.hexToBytes(r.toString(16)); - var zeros = byteLength - r.length; - if(zeros > 0) { - r = forge.util.fillString(String.fromCharCode(0), zeros) + r; - } - - // encrypt the random - var encapsulation = publicKey.encrypt(r, 'NONE'); - - // generate the secret key - var key = kdf.generate(r, keyLength); - - return {encapsulation: encapsulation, key: key}; - }; + // 4.2.5 + if (operation.type === 'delete') { + // 4.2.5.1 + requestResponses = this.#queryCache(operation.request, operation.options) - /** - * Decrypts an encapsulated secret key. - * - * @param privateKey the RSA private key to decrypt with. - * @param encapsulation the ciphertext for generating the secret key, as - * a binary-encoded string of bytes. - * @param keyLength the length, in bytes, of the secret key to generate. - * - * @return the secret key as a binary-encoded string of bytes. - */ - kem.decrypt = function(privateKey, encapsulation, keyLength) { - // decrypt the encapsulation and generate the secret key - var r = privateKey.decrypt(encapsulation, 'NONE'); - return kdf.generate(r, keyLength); - }; + // TODO: the spec is wrong, this is needed to pass WPTs + if (requestResponses.length === 0) { + return [] + } - return kem; -}; + // 4.2.5.2 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) -// TODO: add forge.kem.kdf.create('KDF1', {md: ..., ...}) API? + // 4.2.5.2.1 + cache.splice(idx, 1) + } + } else if (operation.type === 'put') { // 4.2.6 + // 4.2.6.1 + if (operation.response == null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'put operation should have an associated response' + }) + } -/** - * Creates a key derivation API object that implements KDF1 per ISO 18033-2. - * - * @param md the hash API to use. - * @param [digestLength] an optional digest length that must be positive and - * less than or equal to md.digestLength. - * - * @return a KDF1 API object. - */ -forge.kem.kdf1 = function(md, digestLength) { - _createKDF(this, md, 0, digestLength || md.digestLength); -}; + // 4.2.6.2 + const r = operation.request -/** - * Creates a key derivation API object that implements KDF2 per ISO 18033-2. - * - * @param md the hash API to use. - * @param [digestLength] an optional digest length that must be positive and - * less than or equal to md.digestLength. - * - * @return a KDF2 API object. - */ -forge.kem.kdf2 = function(md, digestLength) { - _createKDF(this, md, 1, digestLength || md.digestLength); -}; + // 4.2.6.3 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'expected http or https scheme' + }) + } -/** - * Creates a KDF1 or KDF2 API object. - * - * @param md the hash API to use. - * @param counterStart the starting index for the counter. - * @param digestLength the digest length to use. - * - * @return the KDF API object. - */ -function _createKDF(kdf, md, counterStart, digestLength) { - /** - * Generate a key of the specified length. - * - * @param x the binary-encoded byte string to generate a key from. - * @param length the number of bytes to generate (the size of the key). - * - * @return the key as a binary-encoded string. - */ - kdf.generate = function(x, length) { - var key = new forge.util.ByteBuffer(); + // 4.2.6.4 + if (r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'not get method' + }) + } - // run counter from counterStart to ceil(length / Hash.len) - var k = Math.ceil(length / digestLength) + counterStart; + // 4.2.6.5 + if (operation.options != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'options must not be defined' + }) + } - var c = new forge.util.ByteBuffer(); - for(var i = counterStart; i < k; ++i) { - // I2OSP(i, 4): convert counter to an octet string of 4 octets - c.putInt32(i); + // 4.2.6.6 + requestResponses = this.#queryCache(operation.request) - // digest 'x' and the counter and add the result to the key - md.start(); - md.update(x + c.getBytes()); - var hash = md.digest(); - key.putBytes(hash.getBytes(digestLength)); - } + // 4.2.6.7 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) - // truncate to the correct key length - key.truncate(key.length() - length); - return key.getBytes(); - }; -} + // 4.2.6.7.1 + cache.splice(idx, 1) + } + // 4.2.6.8 + cache.push([operation.request, operation.response]) -/***/ }), + // 4.2.6.10 + addedItems.push([operation.request, operation.response]) + } -/***/ 9994: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 4.2.7 + resultList.push([operation.request, operation.response]) + } -/** - * Cross-browser support for logging in a web application. - * - * @author David I. Lehn - * - * Copyright (c) 2008-2013 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); + // 4.3 + return resultList + } catch (e) { // 5. + // 5.1 + this.#relevantRequestResponseList.length = 0 -/* LOG API */ -module.exports = forge.log = forge.log || {}; + // 5.2 + this.#relevantRequestResponseList = backupCache -/** - * Application logging system. - * - * Each logger level available as it's own function of the form: - * forge.log.level(category, args...) - * The category is an arbitrary string, and the args are the same as - * Firebug's console.log API. By default the call will be output as: - * 'LEVEL [category] , args[1], ...' - * This enables proper % formatting via the first argument. - * Each category is enabled by default but can be enabled or disabled with - * the setCategoryEnabled() function. - */ -// list of known levels -forge.log.levels = [ - 'none', 'error', 'warning', 'info', 'debug', 'verbose', 'max']; -// info on the levels indexed by name: -// index: level index -// name: uppercased display name -var sLevelInfo = {}; -// list of loggers -var sLoggers = []; -/** - * Standard console logger. If no console support is enabled this will - * remain null. Check before using. - */ -var sConsoleLogger = null; + // 5.3 + throw e + } + } -// logger flags -/** - * Lock the level at the current value. Used in cases where user config may - * set the level such that only critical messages are seen but more verbose - * messages are needed for debugging or other purposes. - */ -forge.log.LEVEL_LOCKED = (1 << 1); -/** - * Always call log function. By default, the logging system will check the - * message level against logger.level before calling the log function. This - * flag allows the function to do its own check. - */ -forge.log.NO_LEVEL_CHECK = (1 << 2); -/** - * Perform message interpolation with the passed arguments. "%" style - * fields in log messages will be replaced by arguments as needed. Some - * loggers, such as Firebug, may do this automatically. The original log - * message will be available as 'message' and the interpolated version will - * be available as 'fullMessage'. - */ -forge.log.INTERPOLATE = (1 << 3); + /** + * @see https://w3c.github.io/ServiceWorker/#query-cache + * @param {any} requestQuery + * @param {import('../../types/cache').CacheQueryOptions} options + * @param {requestResponseList} targetStorage + * @returns {requestResponseList} + */ + #queryCache (requestQuery, options, targetStorage) { + /** @type {requestResponseList} */ + const resultList = [] -// setup each log level -for(var i = 0; i < forge.log.levels.length; ++i) { - var level = forge.log.levels[i]; - sLevelInfo[level] = { - index: i, - name: level.toUpperCase() - }; -} + const storage = targetStorage ?? this.#relevantRequestResponseList -/** - * Message logger. Will dispatch a message to registered loggers as needed. - * - * @param message message object - */ -forge.log.logMessage = function(message) { - var messageLevelIndex = sLevelInfo[message.level].index; - for(var i = 0; i < sLoggers.length; ++i) { - var logger = sLoggers[i]; - if(logger.flags & forge.log.NO_LEVEL_CHECK) { - logger.f(message); - } else { - // get logger level - var loggerLevelIndex = sLevelInfo[logger.level].index; - // check level - if(messageLevelIndex <= loggerLevelIndex) { - // message critical enough, call logger - logger.f(logger, message); + for (const requestResponse of storage) { + const [cachedRequest, cachedResponse] = requestResponse + if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) { + resultList.push(requestResponse) } } - } -}; -/** - * Sets the 'standard' key on a message object to: - * "LEVEL [category] " + message - * - * @param message a message log object - */ -forge.log.prepareStandard = function(message) { - if(!('standard' in message)) { - message.standard = - sLevelInfo[message.level].name + - //' ' + +message.timestamp + - ' [' + message.category + '] ' + - message.message; + return resultList } -}; -/** - * Sets the 'full' key on a message object to the original message - * interpolated via % formatting with the message arguments. - * - * @param message a message log object. - */ -forge.log.prepareFull = function(message) { - if(!('full' in message)) { - // copy args and insert message at the front - var args = [message.message]; - args = args.concat([] || 0); - // format the message - message.full = forge.util.format.apply(this, args); - } -}; + /** + * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm + * @param {any} requestQuery + * @param {any} request + * @param {any | null} response + * @param {import('../../types/cache').CacheQueryOptions | undefined} options + * @returns {boolean} + */ + #requestMatchesCachedItem (requestQuery, request, response = null, options) { + // if (options?.ignoreMethod === false && request.method === 'GET') { + // return false + // } -/** - * Applies both preparseStandard() and prepareFull() to a message object and - * store result in 'standardFull'. - * - * @param message a message log object. - */ -forge.log.prepareStandardFull = function(message) { - if(!('standardFull' in message)) { - // FIXME implement 'standardFull' logging - forge.log.prepareStandard(message); - message.standardFull = message.standard; - } -}; + const queryURL = new URL(requestQuery.url) -// create log level functions -if(true) { - // levels for which we want functions - var levels = ['error', 'warning', 'info', 'debug', 'verbose']; - for(var i = 0; i < levels.length; ++i) { - // wrap in a function to ensure proper level var is passed - (function(level) { - // create function for this level - forge.log[level] = function(category, message/*, args...*/) { - // convert arguments to real array, remove category and message - var args = Array.prototype.slice.call(arguments).slice(2); - // create message object - // Note: interpolation and standard formatting is done lazily - var msg = { - timestamp: new Date(), - level: level, - category: category, - message: message, - 'arguments': args - /*standard*/ - /*full*/ - /*fullMessage*/ - }; - // process this message - forge.log.logMessage(msg); - }; - })(levels[i]); - } -} + const cachedURL = new URL(request.url) -/** - * Creates a new logger with specified custom logging function. - * - * The logging function has a signature of: - * function(logger, message) - * logger: current logger - * message: object: - * level: level id - * category: category - * message: string message - * arguments: Array of extra arguments - * fullMessage: interpolated message and arguments if INTERPOLATE flag set - * - * @param logFunction a logging function which takes a log message object - * as a parameter. - * - * @return a logger object. - */ -forge.log.makeLogger = function(logFunction) { - var logger = { - flags: 0, - f: logFunction - }; - forge.log.setLevel(logger, 'none'); - return logger; -}; + if (options?.ignoreSearch) { + cachedURL.search = '' -/** - * Sets the current log level on a logger. - * - * @param logger the target logger. - * @param level the new maximum log level as a string. - * - * @return true if set, false if not. - */ -forge.log.setLevel = function(logger, level) { - var rval = false; - if(logger && !(logger.flags & forge.log.LEVEL_LOCKED)) { - for(var i = 0; i < forge.log.levels.length; ++i) { - var aValidLevel = forge.log.levels[i]; - if(level == aValidLevel) { - // set level - logger.level = level; - rval = true; - break; - } + queryURL.search = '' } - } - return rval; -}; + if (!urlEquals(queryURL, cachedURL, true)) { + return false + } -/** - * Locks the log level at its current value. - * - * @param logger the target logger. - * @param lock boolean lock value, default to true. - */ -forge.log.lock = function(logger, lock) { - if(typeof lock === 'undefined' || lock) { - logger.flags |= forge.log.LEVEL_LOCKED; - } else { - logger.flags &= ~forge.log.LEVEL_LOCKED; - } -}; + if ( + response == null || + options?.ignoreVary || + !response.headersList.contains('vary') + ) { + return true + } -/** - * Adds a logger. - * - * @param logger the logger object. - */ -forge.log.addLogger = function(logger) { - sLoggers.push(logger); -}; + const fieldValues = getFieldValues(response.headersList.get('vary')) -// setup the console logger if possible, else create fake console.log -if(typeof(console) !== 'undefined' && 'log' in console) { - var logger; - if(console.error && console.warn && console.info && console.debug) { - // looks like Firebug-style logging is available - // level handlers map - var levelHandlers = { - error: console.error, - warning: console.warn, - info: console.info, - debug: console.debug, - verbose: console.debug - }; - var f = function(logger, message) { - forge.log.prepareStandard(message); - var handler = levelHandlers[message.level]; - // prepend standard message and concat args - var args = [message.standard]; - args = args.concat(message['arguments'].slice()); - // apply to low-level console function - handler.apply(console, args); - }; - logger = forge.log.makeLogger(f); - } else { - // only appear to have basic console.log - var f = function(logger, message) { - forge.log.prepareStandardFull(message); - console.log(message.standardFull); - }; - logger = forge.log.makeLogger(f); + for (const fieldValue of fieldValues) { + if (fieldValue === '*') { + return false + } + + const requestValue = request.headersList.get(fieldValue) + const queryValue = requestQuery.headersList.get(fieldValue) + + // If one has the header and the other doesn't, or one has + // a different value than the other, return false + if (requestValue !== queryValue) { + return false + } + } + + return true } - forge.log.setLevel(logger, 'debug'); - forge.log.addLogger(logger); - sConsoleLogger = logger; -} else { - // define fake console.log to avoid potential script errors on - // browsers that do not have console logging - console = { - log: function() {} - }; } -/* - * Check for logging control query vars in current URL. - * - * console.level= - * Set's the console log level by name. Useful to override defaults and - * allow more verbose logging before a user config is loaded. - * - * console.lock= - * Lock the console log level at whatever level it is set at. This is run - * after console.level is processed. Useful to force a level of verbosity - * that could otherwise be limited by a user config. - */ -if(sConsoleLogger !== null && - typeof window !== 'undefined' && window.location -) { - var query = new URL(window.location.href).searchParams; - if(query.has('console.level')) { - // set with last value - forge.log.setLevel( - sConsoleLogger, query.get('console.level').slice(-1)[0]); - } - if(query.has('console.lock')) { - // set with last value - var lock = query.get('console.lock').slice(-1)[0]; - if(lock == 'true') { - forge.log.lock(sConsoleLogger); - } +Object.defineProperties(Cache.prototype, { + [Symbol.toStringTag]: { + value: 'Cache', + configurable: true + }, + match: kEnumerableProperty, + matchAll: kEnumerableProperty, + add: kEnumerableProperty, + addAll: kEnumerableProperty, + put: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) + +const cacheQueryOptionConverters = [ + { + key: 'ignoreSearch', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreMethod', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreVary', + converter: webidl.converters.boolean, + defaultValue: false } -} - -// provide public access to console logger -forge.log.consoleLogger = sConsoleLogger; +] +webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters) -/***/ }), +webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([ + ...cacheQueryOptionConverters, + { + key: 'cacheName', + converter: webidl.converters.DOMString + } +]) -/***/ 1145: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +webidl.converters.Response = webidl.interfaceConverter(Response) -/** - * Node.js module for all known Forge message digests. - * - * @author Dave Longley - * - * Copyright 2011-2017 Digital Bazaar, Inc. - */ -module.exports = __nccwpck_require__(6231); +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.RequestInfo +) -__nccwpck_require__(6594); -__nccwpck_require__(279); -__nccwpck_require__(4086); -__nccwpck_require__(9542); +module.exports = { + Cache +} /***/ }), -/***/ 6231: +/***/ 7907: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Node.js module for Forge message digests. - * - * @author Dave Longley - * - * Copyright 2011-2017 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); +"use strict"; -module.exports = forge.md = forge.md || {}; -forge.md.algorithms = forge.md.algorithms || {}; +const { kConstruct } = __nccwpck_require__(9174) +const { Cache } = __nccwpck_require__(6101) +const { webidl } = __nccwpck_require__(1744) +const { kEnumerableProperty } = __nccwpck_require__(3983) -/***/ }), +class CacheStorage { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map + * @type {Map { + constructor () { + if (arguments[0] !== kConstruct) { + webidl.illegalConstructor() + } + } -/** - * Message Digest Algorithm 5 with 128-bit digest (MD5) implementation. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(6231); -__nccwpck_require__(8339); + async match (request, options = {}) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.match' }) -var md5 = module.exports = forge.md5 = forge.md5 || {}; -forge.md.md5 = forge.md.algorithms.md5 = md5; + request = webidl.converters.RequestInfo(request) + options = webidl.converters.MultiCacheQueryOptions(options) -/** - * Creates an MD5 message digest object. - * - * @return a message digest object. - */ -md5.create = function() { - // do initialization as necessary - if(!_initialized) { - _init(); - } - - // MD5 state contains four 32-bit integers - var _state = null; - - // input buffer - var _input = forge.util.createBuffer(); - - // used for word storage - var _w = new Array(16); - - // message digest object - var md = { - algorithm: 'md5', - blockLength: 64, - digestLength: 16, - // 56-bit length of message so far (does not including padding) - messageLength: 0, - // true message length - fullMessageLength: null, - // size of message length in bytes - messageLengthSize: 8 - }; + // 1. + if (options.cacheName != null) { + // 1.1.1.1 + if (this.#caches.has(options.cacheName)) { + // 1.1.1.1.1 + const cacheList = this.#caches.get(options.cacheName) + const cache = new Cache(kConstruct, cacheList) + + return await cache.match(request, options) + } + } else { // 2. + // 2.2 + for (const cacheList of this.#caches.values()) { + const cache = new Cache(kConstruct, cacheList) + + // 2.2.1.2 + const response = await cache.match(request, options) + + if (response !== undefined) { + return response + } + } + } + } /** - * Starts the digest. - * - * @return this digest object. + * @see https://w3c.github.io/ServiceWorker/#cache-storage-has + * @param {string} cacheName + * @returns {Promise} */ - md.start = function() { - // up to 56-bit message length for convenience - md.messageLength = 0; - - // full message length (set md.messageLength64 for backwards-compatibility) - md.fullMessageLength = md.messageLength64 = []; - var int32s = md.messageLengthSize / 4; - for(var i = 0; i < int32s; ++i) { - md.fullMessageLength.push(0); - } - _input = forge.util.createBuffer(); - _state = { - h0: 0x67452301, - h1: 0xEFCDAB89, - h2: 0x98BADCFE, - h3: 0x10325476 - }; - return md; - }; - // start digest automatically for first time - md.start(); + async has (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' }) + + cacheName = webidl.converters.DOMString(cacheName) + + // 2.1.1 + // 2.2 + return this.#caches.has(cacheName) + } /** - * Updates the digest with the given message input. The given input can - * treated as raw input (no encoding will be applied) or an encoding of - * 'utf8' maybe given to encode the input using UTF-8. - * - * @param msg the message input to update with. - * @param encoding the encoding to use (default: 'raw', other: 'utf8'). - * - * @return this digest object. + * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open + * @param {string} cacheName + * @returns {Promise} */ - md.update = function(msg, encoding) { - if(encoding === 'utf8') { - msg = forge.util.encodeUtf8(msg); - } + async open (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' }) - // update message length - var len = msg.length; - md.messageLength += len; - len = [(len / 0x100000000) >>> 0, len >>> 0]; - for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { - md.fullMessageLength[i] += len[1]; - len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); - md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; - len[0] = (len[1] / 0x100000000) >>> 0; - } + cacheName = webidl.converters.DOMString(cacheName) - // add bytes to input buffer - _input.putBytes(msg); + // 2.1 + if (this.#caches.has(cacheName)) { + // await caches.open('v1') !== await caches.open('v1') - // process bytes - _update(_state, _w, _input); + // 2.1.1 + const cache = this.#caches.get(cacheName) - // compact input buffer every 2K or if empty - if(_input.read > 2048 || _input.length() === 0) { - _input.compact(); + // 2.1.1.1 + return new Cache(kConstruct, cache) } - return md; - }; + // 2.2 + const cache = [] + + // 2.3 + this.#caches.set(cacheName, cache) + + // 2.4 + return new Cache(kConstruct, cache) + } /** - * Produces the digest. - * - * @return a byte buffer containing the digest value. + * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete + * @param {string} cacheName + * @returns {Promise} */ - md.digest = function() { - /* Note: Here we copy the remaining bytes in the input buffer and - add the appropriate MD5 padding. Then we do the final update - on a copy of the state so that if the user wants to get - intermediate digests they can do so. */ - - /* Determine the number of bytes that must be added to the message - to ensure its length is congruent to 448 mod 512. In other words, - the data to be digested must be a multiple of 512 bits (or 128 bytes). - This data includes the message, some padding, and the length of the - message. Since the length of the message will be encoded as 8 bytes (64 - bits), that means that the last segment of the data must have 56 bytes - (448 bits) of message and padding. Therefore, the length of the message - plus the padding must be congruent to 448 mod 512 because - 512 - 128 = 448. - - In order to fill up the message length it must be filled with - padding that begins with 1 bit followed by all 0 bits. Padding - must *always* be present, so if the message length is already - congruent to 448 mod 512, then 512 padding bits must be added. */ - - var finalBlock = forge.util.createBuffer(); - finalBlock.putBytes(_input.bytes()); - - // compute remaining size to be digested (include message length size) - var remaining = ( - md.fullMessageLength[md.fullMessageLength.length - 1] + - md.messageLengthSize); - - // add padding for overflow blockSize - overflow - // _padding starts with 1 byte with first bit is set (byte value 128), then - // there may be up to (blockSize - 1) other pad bytes - var overflow = remaining & (md.blockLength - 1); - finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); - - // serialize message length in bits in little-endian order; since length - // is stored in bytes we multiply by 8 and add carry - var bits, carry = 0; - for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { - bits = md.fullMessageLength[i] * 8 + carry; - carry = (bits / 0x100000000) >>> 0; - finalBlock.putInt32Le(bits >>> 0); - } - - var s2 = { - h0: _state.h0, - h1: _state.h1, - h2: _state.h2, - h3: _state.h3 - }; - _update(s2, _w, finalBlock); - var rval = forge.util.createBuffer(); - rval.putInt32Le(s2.h0); - rval.putInt32Le(s2.h1); - rval.putInt32Le(s2.h2); - rval.putInt32Le(s2.h3); - return rval; - }; - - return md; -}; + async delete (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' }) -// padding, constant tables for calculating md5 -var _padding = null; -var _g = null; -var _r = null; -var _k = null; -var _initialized = false; + cacheName = webidl.converters.DOMString(cacheName) -/** - * Initializes the constant tables. - */ -function _init() { - // create padding - _padding = String.fromCharCode(128); - _padding += forge.util.fillString(String.fromCharCode(0x00), 64); + return this.#caches.delete(cacheName) + } - // g values - _g = [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - 1, 6, 11, 0, 5, 10, 15, 4, 9, 14, 3, 8, 13, 2, 7, 12, - 5, 8, 11, 14, 1, 4, 7, 10, 13, 0, 3, 6, 9, 12, 15, 2, - 0, 7, 14, 5, 12, 3, 10, 1, 8, 15, 6, 13, 4, 11, 2, 9]; + /** + * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys + * @returns {string[]} + */ + async keys () { + webidl.brandCheck(this, CacheStorage) - // rounds table - _r = [ - 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, - 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, - 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, - 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21]; + // 2.1 + const keys = this.#caches.keys() - // get the result of abs(sin(i + 1)) as a 32-bit integer - _k = new Array(64); - for(var i = 0; i < 64; ++i) { - _k[i] = Math.floor(Math.abs(Math.sin(i + 1)) * 0x100000000); + // 2.2 + return [...keys] } - - // now initialized - _initialized = true; } -/** - * Updates an MD5 state with the given byte buffer. - * - * @param s the MD5 state to update. - * @param w the array to use to store words. - * @param bytes the byte buffer to update with. - */ -function _update(s, w, bytes) { - // consume 512 bit (64 byte) chunks - var t, a, b, c, d, f, r, i; - var len = bytes.length(); - while(len >= 64) { - // initialize hash value for this chunk - a = s.h0; - b = s.h1; - c = s.h2; - d = s.h3; - - // round 1 - for(i = 0; i < 16; ++i) { - w[i] = bytes.getInt32Le(); - f = d ^ (b & (c ^ d)); - t = (a + f + _k[i] + w[i]); - r = _r[i]; - a = d; - d = c; - c = b; - b += (t << r) | (t >>> (32 - r)); - } - // round 2 - for(; i < 32; ++i) { - f = c ^ (d & (b ^ c)); - t = (a + f + _k[i] + w[_g[i]]); - r = _r[i]; - a = d; - d = c; - c = b; - b += (t << r) | (t >>> (32 - r)); - } - // round 3 - for(; i < 48; ++i) { - f = b ^ c ^ d; - t = (a + f + _k[i] + w[_g[i]]); - r = _r[i]; - a = d; - d = c; - c = b; - b += (t << r) | (t >>> (32 - r)); - } - // round 4 - for(; i < 64; ++i) { - f = c ^ (b | ~d); - t = (a + f + _k[i] + w[_g[i]]); - r = _r[i]; - a = d; - d = c; - c = b; - b += (t << r) | (t >>> (32 - r)); - } - - // update hash state - s.h0 = (s.h0 + a) | 0; - s.h1 = (s.h1 + b) | 0; - s.h2 = (s.h2 + c) | 0; - s.h3 = (s.h3 + d) | 0; - - len -= 64; - } +Object.defineProperties(CacheStorage.prototype, { + [Symbol.toStringTag]: { + value: 'CacheStorage', + configurable: true + }, + match: kEnumerableProperty, + has: kEnumerableProperty, + open: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) + +module.exports = { + CacheStorage } /***/ }), -/***/ 7973: +/***/ 9174: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Node.js module for Forge mask generation functions. - * - * @author Stefan Siegl - * - * Copyright 2012 Stefan Siegl - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(3339); +"use strict"; + -module.exports = forge.mgf = forge.mgf || {}; -forge.mgf.mgf1 = forge.mgf1; +module.exports = { + kConstruct: (__nccwpck_require__(2785).kConstruct) +} /***/ }), -/***/ 3339: +/***/ 2396: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; + + +const assert = __nccwpck_require__(9491) +const { URLSerializer } = __nccwpck_require__(685) +const { isValidHeaderName } = __nccwpck_require__(2538) + /** - * Javascript implementation of mask generation function MGF1. - * - * @author Stefan Siegl - * @author Dave Longley - * - * Copyright (c) 2012 Stefan Siegl - * Copyright (c) 2014 Digital Bazaar, Inc. + * @see https://url.spec.whatwg.org/#concept-url-equals + * @param {URL} A + * @param {URL} B + * @param {boolean | undefined} excludeFragment + * @returns {boolean} */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); +function urlEquals (A, B, excludeFragment = false) { + const serializedA = URLSerializer(A, excludeFragment) + + const serializedB = URLSerializer(B, excludeFragment) -forge.mgf = forge.mgf || {}; -var mgf1 = module.exports = forge.mgf.mgf1 = forge.mgf1 = forge.mgf1 || {}; + return serializedA === serializedB +} /** - * Creates a MGF1 mask generation function object. - * - * @param md the message digest API to use (eg: forge.md.sha1.create()). - * - * @return a mask generation function object. + * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262 + * @param {string} header */ -mgf1.create = function(md) { - var mgf = { - /** - * Generate mask of specified length. - * - * @param {String} seed The seed for mask generation. - * @param maskLen Number of bytes to generate. - * @return {String} The generated mask. - */ - generate: function(seed, maskLen) { - /* 2. Let T be the empty octet string. */ - var t = new forge.util.ByteBuffer(); - - /* 3. For counter from 0 to ceil(maskLen / hLen), do the following: */ - var len = Math.ceil(maskLen / md.digestLength); - for(var i = 0; i < len; i++) { - /* a. Convert counter to an octet string C of length 4 octets */ - var c = new forge.util.ByteBuffer(); - c.putInt32(i); - - /* b. Concatenate the hash of the seed mgfSeed and C to the octet - * string T: */ - md.start(); - md.update(seed + c.getBytes()); - t.putBuffer(md.digest()); - } +function fieldValues (header) { + assert(header !== null) + + const values = [] - /* Output the leading maskLen octets of T as the octet string mask. */ - t.truncate(t.length() - maskLen); - return t.getBytes(); + for (let value of header.split(',')) { + value = value.trim() + + if (!value.length) { + continue + } else if (!isValidHeaderName(value)) { + continue } - }; - return mgf; -}; + values.push(value) + } + + return values +} + +module.exports = { + urlEquals, + fieldValues +} /***/ }), -/***/ 1925: +/***/ 3598: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Object IDs for ASN.1. - * - * @author Dave Longley - * - * Copyright (c) 2010-2013 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); - -forge.pki = forge.pki || {}; -var oids = module.exports = forge.pki.oids = forge.oids = forge.oids || {}; - -// set id to name mapping and name to id mapping -function _IN(id, name) { - oids[id] = name; - oids[name] = id; -} -// set id to name mapping only -function _I_(id, name) { - oids[id] = name; -} - -// algorithm OIDs -_IN('1.2.840.113549.1.1.1', 'rsaEncryption'); -// Note: md2 & md4 not implemented -//_IN('1.2.840.113549.1.1.2', 'md2WithRSAEncryption'); -//_IN('1.2.840.113549.1.1.3', 'md4WithRSAEncryption'); -_IN('1.2.840.113549.1.1.4', 'md5WithRSAEncryption'); -_IN('1.2.840.113549.1.1.5', 'sha1WithRSAEncryption'); -_IN('1.2.840.113549.1.1.7', 'RSAES-OAEP'); -_IN('1.2.840.113549.1.1.8', 'mgf1'); -_IN('1.2.840.113549.1.1.9', 'pSpecified'); -_IN('1.2.840.113549.1.1.10', 'RSASSA-PSS'); -_IN('1.2.840.113549.1.1.11', 'sha256WithRSAEncryption'); -_IN('1.2.840.113549.1.1.12', 'sha384WithRSAEncryption'); -_IN('1.2.840.113549.1.1.13', 'sha512WithRSAEncryption'); -// Edwards-curve Digital Signature Algorithm (EdDSA) Ed25519 -_IN('1.3.101.112', 'EdDSA25519'); - -_IN('1.2.840.10040.4.3', 'dsa-with-sha1'); - -_IN('1.3.14.3.2.7', 'desCBC'); - -_IN('1.3.14.3.2.26', 'sha1'); -// Deprecated equivalent of sha1WithRSAEncryption -_IN('1.3.14.3.2.29', 'sha1WithRSASignature'); -_IN('2.16.840.1.101.3.4.2.1', 'sha256'); -_IN('2.16.840.1.101.3.4.2.2', 'sha384'); -_IN('2.16.840.1.101.3.4.2.3', 'sha512'); -_IN('2.16.840.1.101.3.4.2.4', 'sha224'); -_IN('2.16.840.1.101.3.4.2.5', 'sha512-224'); -_IN('2.16.840.1.101.3.4.2.6', 'sha512-256'); -_IN('1.2.840.113549.2.2', 'md2'); -_IN('1.2.840.113549.2.5', 'md5'); - -// pkcs#7 content types -_IN('1.2.840.113549.1.7.1', 'data'); -_IN('1.2.840.113549.1.7.2', 'signedData'); -_IN('1.2.840.113549.1.7.3', 'envelopedData'); -_IN('1.2.840.113549.1.7.4', 'signedAndEnvelopedData'); -_IN('1.2.840.113549.1.7.5', 'digestedData'); -_IN('1.2.840.113549.1.7.6', 'encryptedData'); - -// pkcs#9 oids -_IN('1.2.840.113549.1.9.1', 'emailAddress'); -_IN('1.2.840.113549.1.9.2', 'unstructuredName'); -_IN('1.2.840.113549.1.9.3', 'contentType'); -_IN('1.2.840.113549.1.9.4', 'messageDigest'); -_IN('1.2.840.113549.1.9.5', 'signingTime'); -_IN('1.2.840.113549.1.9.6', 'counterSignature'); -_IN('1.2.840.113549.1.9.7', 'challengePassword'); -_IN('1.2.840.113549.1.9.8', 'unstructuredAddress'); -_IN('1.2.840.113549.1.9.14', 'extensionRequest'); - -_IN('1.2.840.113549.1.9.20', 'friendlyName'); -_IN('1.2.840.113549.1.9.21', 'localKeyId'); -_IN('1.2.840.113549.1.9.22.1', 'x509Certificate'); - -// pkcs#12 safe bags -_IN('1.2.840.113549.1.12.10.1.1', 'keyBag'); -_IN('1.2.840.113549.1.12.10.1.2', 'pkcs8ShroudedKeyBag'); -_IN('1.2.840.113549.1.12.10.1.3', 'certBag'); -_IN('1.2.840.113549.1.12.10.1.4', 'crlBag'); -_IN('1.2.840.113549.1.12.10.1.5', 'secretBag'); -_IN('1.2.840.113549.1.12.10.1.6', 'safeContentsBag'); - -// password-based-encryption for pkcs#12 -_IN('1.2.840.113549.1.5.13', 'pkcs5PBES2'); -_IN('1.2.840.113549.1.5.12', 'pkcs5PBKDF2'); - -_IN('1.2.840.113549.1.12.1.1', 'pbeWithSHAAnd128BitRC4'); -_IN('1.2.840.113549.1.12.1.2', 'pbeWithSHAAnd40BitRC4'); -_IN('1.2.840.113549.1.12.1.3', 'pbeWithSHAAnd3-KeyTripleDES-CBC'); -_IN('1.2.840.113549.1.12.1.4', 'pbeWithSHAAnd2-KeyTripleDES-CBC'); -_IN('1.2.840.113549.1.12.1.5', 'pbeWithSHAAnd128BitRC2-CBC'); -_IN('1.2.840.113549.1.12.1.6', 'pbewithSHAAnd40BitRC2-CBC'); - -// hmac OIDs -_IN('1.2.840.113549.2.7', 'hmacWithSHA1'); -_IN('1.2.840.113549.2.8', 'hmacWithSHA224'); -_IN('1.2.840.113549.2.9', 'hmacWithSHA256'); -_IN('1.2.840.113549.2.10', 'hmacWithSHA384'); -_IN('1.2.840.113549.2.11', 'hmacWithSHA512'); - -// symmetric key algorithm oids -_IN('1.2.840.113549.3.7', 'des-EDE3-CBC'); -_IN('2.16.840.1.101.3.4.1.2', 'aes128-CBC'); -_IN('2.16.840.1.101.3.4.1.22', 'aes192-CBC'); -_IN('2.16.840.1.101.3.4.1.42', 'aes256-CBC'); - -// certificate issuer/subject OIDs -_IN('2.5.4.3', 'commonName'); -_IN('2.5.4.4', 'surname'); -_IN('2.5.4.5', 'serialNumber'); -_IN('2.5.4.6', 'countryName'); -_IN('2.5.4.7', 'localityName'); -_IN('2.5.4.8', 'stateOrProvinceName'); -_IN('2.5.4.9', 'streetAddress'); -_IN('2.5.4.10', 'organizationName'); -_IN('2.5.4.11', 'organizationalUnitName'); -_IN('2.5.4.12', 'title'); -_IN('2.5.4.13', 'description'); -_IN('2.5.4.15', 'businessCategory'); -_IN('2.5.4.17', 'postalCode'); -_IN('2.5.4.42', 'givenName'); -_IN('1.3.6.1.4.1.311.60.2.1.2', 'jurisdictionOfIncorporationStateOrProvinceName'); -_IN('1.3.6.1.4.1.311.60.2.1.3', 'jurisdictionOfIncorporationCountryName'); - -// X.509 extension OIDs -_IN('2.16.840.1.113730.1.1', 'nsCertType'); -_IN('2.16.840.1.113730.1.13', 'nsComment'); // deprecated in theory; still widely used -_I_('2.5.29.1', 'authorityKeyIdentifier'); // deprecated, use .35 -_I_('2.5.29.2', 'keyAttributes'); // obsolete use .37 or .15 -_I_('2.5.29.3', 'certificatePolicies'); // deprecated, use .32 -_I_('2.5.29.4', 'keyUsageRestriction'); // obsolete use .37 or .15 -_I_('2.5.29.5', 'policyMapping'); // deprecated use .33 -_I_('2.5.29.6', 'subtreesConstraint'); // obsolete use .30 -_I_('2.5.29.7', 'subjectAltName'); // deprecated use .17 -_I_('2.5.29.8', 'issuerAltName'); // deprecated use .18 -_I_('2.5.29.9', 'subjectDirectoryAttributes'); -_I_('2.5.29.10', 'basicConstraints'); // deprecated use .19 -_I_('2.5.29.11', 'nameConstraints'); // deprecated use .30 -_I_('2.5.29.12', 'policyConstraints'); // deprecated use .36 -_I_('2.5.29.13', 'basicConstraints'); // deprecated use .19 -_IN('2.5.29.14', 'subjectKeyIdentifier'); -_IN('2.5.29.15', 'keyUsage'); -_I_('2.5.29.16', 'privateKeyUsagePeriod'); -_IN('2.5.29.17', 'subjectAltName'); -_IN('2.5.29.18', 'issuerAltName'); -_IN('2.5.29.19', 'basicConstraints'); -_I_('2.5.29.20', 'cRLNumber'); -_I_('2.5.29.21', 'cRLReason'); -_I_('2.5.29.22', 'expirationDate'); -_I_('2.5.29.23', 'instructionCode'); -_I_('2.5.29.24', 'invalidityDate'); -_I_('2.5.29.25', 'cRLDistributionPoints'); // deprecated use .31 -_I_('2.5.29.26', 'issuingDistributionPoint'); // deprecated use .28 -_I_('2.5.29.27', 'deltaCRLIndicator'); -_I_('2.5.29.28', 'issuingDistributionPoint'); -_I_('2.5.29.29', 'certificateIssuer'); -_I_('2.5.29.30', 'nameConstraints'); -_IN('2.5.29.31', 'cRLDistributionPoints'); -_IN('2.5.29.32', 'certificatePolicies'); -_I_('2.5.29.33', 'policyMappings'); -_I_('2.5.29.34', 'policyConstraints'); // deprecated use .36 -_IN('2.5.29.35', 'authorityKeyIdentifier'); -_I_('2.5.29.36', 'policyConstraints'); -_IN('2.5.29.37', 'extKeyUsage'); -_I_('2.5.29.46', 'freshestCRL'); -_I_('2.5.29.54', 'inhibitAnyPolicy'); - -// extKeyUsage purposes -_IN('1.3.6.1.4.1.11129.2.4.2', 'timestampList'); -_IN('1.3.6.1.5.5.7.1.1', 'authorityInfoAccess'); -_IN('1.3.6.1.5.5.7.3.1', 'serverAuth'); -_IN('1.3.6.1.5.5.7.3.2', 'clientAuth'); -_IN('1.3.6.1.5.5.7.3.3', 'codeSigning'); -_IN('1.3.6.1.5.5.7.3.4', 'emailProtection'); -_IN('1.3.6.1.5.5.7.3.8', 'timeStamping'); +"use strict"; +// @ts-check + + + +/* global WebAssembly */ + +const assert = __nccwpck_require__(9491) +const net = __nccwpck_require__(1808) +const http = __nccwpck_require__(3685) +const { pipeline } = __nccwpck_require__(2781) +const util = __nccwpck_require__(3983) +const timers = __nccwpck_require__(9459) +const Request = __nccwpck_require__(2905) +const DispatcherBase = __nccwpck_require__(4839) +const { + RequestContentLengthMismatchError, + ResponseContentLengthMismatchError, + InvalidArgumentError, + RequestAbortedError, + HeadersTimeoutError, + HeadersOverflowError, + SocketError, + InformationalError, + BodyTimeoutError, + HTTPParserError, + ResponseExceededMaxSizeError, + ClientDestroyedError +} = __nccwpck_require__(8045) +const buildConnector = __nccwpck_require__(2067) +const { + kUrl, + kReset, + kServerName, + kClient, + kBusy, + kParser, + kConnect, + kBlocking, + kResuming, + kRunning, + kPending, + kSize, + kWriting, + kQueue, + kConnected, + kConnecting, + kNeedDrain, + kNoRef, + kKeepAliveDefaultTimeout, + kHostHeader, + kPendingIdx, + kRunningIdx, + kError, + kPipelining, + kSocket, + kKeepAliveTimeoutValue, + kMaxHeadersSize, + kKeepAliveMaxTimeout, + kKeepAliveTimeoutThreshold, + kHeadersTimeout, + kBodyTimeout, + kStrictContentLength, + kConnector, + kMaxRedirections, + kMaxRequests, + kCounter, + kClose, + kDestroy, + kDispatch, + kInterceptors, + kLocalAddress, + kMaxResponseSize, + kHTTPConnVersion, + // HTTP2 + kHost, + kHTTP2Session, + kHTTP2SessionState, + kHTTP2BuildRequest, + kHTTP2CopyHeaders, + kHTTP1BuildRequest +} = __nccwpck_require__(2785) + +/** @type {import('http2')} */ +let http2 +try { + http2 = __nccwpck_require__(5158) +} catch { + // @ts-ignore + http2 = { constants: {} } +} +const { + constants: { + HTTP2_HEADER_AUTHORITY, + HTTP2_HEADER_METHOD, + HTTP2_HEADER_PATH, + HTTP2_HEADER_SCHEME, + HTTP2_HEADER_CONTENT_LENGTH, + HTTP2_HEADER_EXPECT, + HTTP2_HEADER_STATUS + } +} = http2 -/***/ }), +// Experimental +let h2ExperimentalWarned = false -/***/ 1281: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +const FastBuffer = Buffer[Symbol.species] -/** - * Password-based encryption functions. - * - * @author Dave Longley - * @author Stefan Siegl - * - * Copyright (c) 2010-2013 Digital Bazaar, Inc. - * Copyright (c) 2012 Stefan Siegl - * - * An EncryptedPrivateKeyInfo: - * - * EncryptedPrivateKeyInfo ::= SEQUENCE { - * encryptionAlgorithm EncryptionAlgorithmIdentifier, - * encryptedData EncryptedData } - * - * EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier - * - * EncryptedData ::= OCTET STRING - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(9549); -__nccwpck_require__(7157); -__nccwpck_require__(6231); -__nccwpck_require__(1925); -__nccwpck_require__(1611); -__nccwpck_require__(154); -__nccwpck_require__(7821); -__nccwpck_require__(9965); -__nccwpck_require__(3921); -__nccwpck_require__(8339); - -if(typeof BigInteger === 'undefined') { - var BigInteger = forge.jsbn.BigInteger; -} - -// shortcut for asn.1 API -var asn1 = forge.asn1; - -/* Password-based encryption implementation. */ -var pki = forge.pki = forge.pki || {}; -module.exports = pki.pbe = forge.pbe = forge.pbe || {}; -var oids = pki.oids; - -// validator for an EncryptedPrivateKeyInfo structure -// Note: Currently only works w/algorithm params -var encryptedPrivateKeyValidator = { - name: 'EncryptedPrivateKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'EncryptedPrivateKeyInfo.encryptionAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'encryptionOid' - }, { - name: 'AlgorithmIdentifier.parameters', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'encryptionParams' - }] - }, { - // encryptedData - name: 'EncryptedPrivateKeyInfo.encryptedData', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'encryptedData' - }] -}; +const kClosedResolve = Symbol('kClosedResolve') -// validator for a PBES2Algorithms structure -// Note: Currently only works w/PBKDF2 + AES encryption schemes -var PBES2AlgorithmsValidator = { - name: 'PBES2Algorithms', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'PBES2Algorithms.keyDerivationFunc', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'PBES2Algorithms.keyDerivationFunc.oid', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'kdfOid' - }, { - name: 'PBES2Algorithms.params', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'PBES2Algorithms.params.salt', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'kdfSalt' - }, { - name: 'PBES2Algorithms.params.iterationCount', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'kdfIterationCount' - }, { - name: 'PBES2Algorithms.params.keyLength', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - optional: true, - capture: 'keyLength' - }, { - // prf - name: 'PBES2Algorithms.params.prf', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - optional: true, - value: [{ - name: 'PBES2Algorithms.params.prf.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'prfOid' - }] - }] - }] - }, { - name: 'PBES2Algorithms.encryptionScheme', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'PBES2Algorithms.encryptionScheme.oid', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'encOid' - }, { - name: 'PBES2Algorithms.encryptionScheme.iv', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'encIv' - }] - }] -}; +const channels = {} -var pkcs12PbeParamsValidator = { - name: 'pkcs-12PbeParams', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'pkcs-12PbeParams.salt', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'salt' - }, { - name: 'pkcs-12PbeParams.iterations', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'iterations' - }] -}; +try { + const diagnosticsChannel = __nccwpck_require__(7643) + channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders') + channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect') + channels.connectError = diagnosticsChannel.channel('undici:client:connectError') + channels.connected = diagnosticsChannel.channel('undici:client:connected') +} catch { + channels.sendHeaders = { hasSubscribers: false } + channels.beforeConnect = { hasSubscribers: false } + channels.connectError = { hasSubscribers: false } + channels.connected = { hasSubscribers: false } +} /** - * Encrypts a ASN.1 PrivateKeyInfo object, producing an EncryptedPrivateKeyInfo. - * - * PBES2Algorithms ALGORITHM-IDENTIFIER ::= - * { {PBES2-params IDENTIFIED BY id-PBES2}, ...} - * - * id-PBES2 OBJECT IDENTIFIER ::= {pkcs-5 13} - * - * PBES2-params ::= SEQUENCE { - * keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}}, - * encryptionScheme AlgorithmIdentifier {{PBES2-Encs}} - * } - * - * PBES2-KDFs ALGORITHM-IDENTIFIER ::= - * { {PBKDF2-params IDENTIFIED BY id-PBKDF2}, ... } - * - * PBES2-Encs ALGORITHM-IDENTIFIER ::= { ... } - * - * PBKDF2-params ::= SEQUENCE { - * salt CHOICE { - * specified OCTET STRING, - * otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}} - * }, - * iterationCount INTEGER (1..MAX), - * keyLength INTEGER (1..MAX) OPTIONAL, - * prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1 - * } - * - * @param obj the ASN.1 PrivateKeyInfo object. - * @param password the password to encrypt with. - * @param options: - * algorithm the encryption algorithm to use - * ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'. - * count the iteration count to use. - * saltSize the salt size to use. - * prfAlgorithm the PRF message digest algorithm to use - * ('sha1', 'sha224', 'sha256', 'sha384', 'sha512') - * - * @return the ASN.1 EncryptedPrivateKeyInfo. + * @type {import('../types/client').default} */ -pki.encryptPrivateKeyInfo = function(obj, password, options) { - // set default options - options = options || {}; - options.saltSize = options.saltSize || 8; - options.count = options.count || 2048; - options.algorithm = options.algorithm || 'aes128'; - options.prfAlgorithm = options.prfAlgorithm || 'sha1'; - - // generate PBE params - var salt = forge.random.getBytesSync(options.saltSize); - var count = options.count; - var countBytes = asn1.integerToDer(count); - var dkLen; - var encryptionAlgorithm; - var encryptedData; - if(options.algorithm.indexOf('aes') === 0 || options.algorithm === 'des') { - // do PBES2 - var ivLen, encOid, cipherFn; - switch(options.algorithm) { - case 'aes128': - dkLen = 16; - ivLen = 16; - encOid = oids['aes128-CBC']; - cipherFn = forge.aes.createEncryptionCipher; - break; - case 'aes192': - dkLen = 24; - ivLen = 16; - encOid = oids['aes192-CBC']; - cipherFn = forge.aes.createEncryptionCipher; - break; - case 'aes256': - dkLen = 32; - ivLen = 16; - encOid = oids['aes256-CBC']; - cipherFn = forge.aes.createEncryptionCipher; - break; - case 'des': - dkLen = 8; - ivLen = 8; - encOid = oids['desCBC']; - cipherFn = forge.des.createEncryptionCipher; - break; - default: - var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.'); - error.algorithm = options.algorithm; - throw error; +class Client extends DispatcherBase { + /** + * + * @param {string|URL} url + * @param {import('../types/client').Client.Options} options + */ + constructor (url, { + interceptors, + maxHeaderSize, + headersTimeout, + socketTimeout, + requestTimeout, + connectTimeout, + bodyTimeout, + idleTimeout, + keepAlive, + keepAliveTimeout, + maxKeepAliveTimeout, + keepAliveMaxTimeout, + keepAliveTimeoutThreshold, + socketPath, + pipelining, + tls, + strictContentLength, + maxCachedSessions, + maxRedirections, + connect, + maxRequestsPerClient, + localAddress, + maxResponseSize, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + // h2 + allowH2, + maxConcurrentStreams + } = {}) { + super() + + if (keepAlive !== undefined) { + throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead') } - // get PRF message digest - var prfAlgorithm = 'hmacWith' + options.prfAlgorithm.toUpperCase(); - var md = prfAlgorithmToMessageDigest(prfAlgorithm); - - // encrypt private key using pbe SHA-1 and AES/DES - var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md); - var iv = forge.random.getBytesSync(ivLen); - var cipher = cipherFn(dk); - cipher.start(iv); - cipher.update(asn1.toDer(obj)); - cipher.finish(); - encryptedData = cipher.output.getBytes(); - - // get PBKDF2-params - var params = createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm); - - encryptionAlgorithm = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(oids['pkcs5PBES2']).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // keyDerivationFunc - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(oids['pkcs5PBKDF2']).getBytes()), - // PBKDF2-params - params - ]), - // encryptionScheme - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(encOid).getBytes()), - // iv - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, iv) - ]) - ]) - ]); - } else if(options.algorithm === '3des') { - // Do PKCS12 PBE - dkLen = 24; - - var saltBytes = new forge.util.ByteBuffer(salt); - var dk = pki.pbe.generatePkcs12Key(password, saltBytes, 1, count, dkLen); - var iv = pki.pbe.generatePkcs12Key(password, saltBytes, 2, count, dkLen); - var cipher = forge.des.createEncryptionCipher(dk); - cipher.start(iv); - cipher.update(asn1.toDer(obj)); - cipher.finish(); - encryptedData = cipher.output.getBytes(); - - encryptionAlgorithm = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(oids['pbeWithSHAAnd3-KeyTripleDES-CBC']).getBytes()), - // pkcs-12PbeParams - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // salt - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt), - // iteration count - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - countBytes.getBytes()) - ]) - ]); - } else { - var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.'); - error.algorithm = options.algorithm; - throw error; - } - - // EncryptedPrivateKeyInfo - var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // encryptionAlgorithm - encryptionAlgorithm, - // encryptedData - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, encryptedData) - ]); - return rval; -}; + if (socketTimeout !== undefined) { + throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead') + } -/** - * Decrypts a ASN.1 PrivateKeyInfo object. - * - * @param obj the ASN.1 EncryptedPrivateKeyInfo object. - * @param password the password to decrypt with. - * - * @return the ASN.1 PrivateKeyInfo on success, null on failure. - */ -pki.decryptPrivateKeyInfo = function(obj, password) { - var rval = null; + if (requestTimeout !== undefined) { + throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead') + } - // get PBE params - var capture = {}; - var errors = []; - if(!asn1.validate(obj, encryptedPrivateKeyValidator, capture, errors)) { - var error = new Error('Cannot read encrypted private key. ' + - 'ASN.1 object is not a supported EncryptedPrivateKeyInfo.'); - error.errors = errors; - throw error; - } + if (idleTimeout !== undefined) { + throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead') + } - // get cipher - var oid = asn1.derToOid(capture.encryptionOid); - var cipher = pki.pbe.getCipher(oid, capture.encryptionParams, password); + if (maxKeepAliveTimeout !== undefined) { + throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead') + } - // get encrypted data - var encrypted = forge.util.createBuffer(capture.encryptedData); + if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) { + throw new InvalidArgumentError('invalid maxHeaderSize') + } - cipher.update(encrypted); - if(cipher.finish()) { - rval = asn1.fromDer(cipher.output); - } + if (socketPath != null && typeof socketPath !== 'string') { + throw new InvalidArgumentError('invalid socketPath') + } - return rval; -}; + if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) { + throw new InvalidArgumentError('invalid connectTimeout') + } -/** - * Converts a EncryptedPrivateKeyInfo to PEM format. - * - * @param epki the EncryptedPrivateKeyInfo. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted encrypted private key. - */ -pki.encryptedPrivateKeyToPem = function(epki, maxline) { - // convert to DER, then PEM-encode - var msg = { - type: 'ENCRYPTED PRIVATE KEY', - body: asn1.toDer(epki).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveTimeout') + } -/** - * Converts a PEM-encoded EncryptedPrivateKeyInfo to ASN.1 format. Decryption - * is not performed. - * - * @param pem the EncryptedPrivateKeyInfo in PEM-format. - * - * @return the ASN.1 EncryptedPrivateKeyInfo. - */ -pki.encryptedPrivateKeyFromPem = function(pem) { - var msg = forge.pem.decode(pem)[0]; + if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveMaxTimeout') + } - if(msg.type !== 'ENCRYPTED PRIVATE KEY') { - var error = new Error('Could not convert encrypted private key from PEM; ' + - 'PEM header type is "ENCRYPTED PRIVATE KEY".'); - error.headerType = msg.type; - throw error; - } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert encrypted private key from PEM; ' + - 'PEM is encrypted.'); - } + if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) { + throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold') + } - // convert DER to ASN.1 object - return asn1.fromDer(msg.body); -}; + if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('headersTimeout must be a positive integer or zero') + } -/** - * Encrypts an RSA private key. By default, the key will be wrapped in - * a PrivateKeyInfo and encrypted to produce a PKCS#8 EncryptedPrivateKeyInfo. - * This is the standard, preferred way to encrypt a private key. - * - * To produce a non-standard PEM-encrypted private key that uses encapsulated - * headers to indicate the encryption algorithm (old-style non-PKCS#8 OpenSSL - * private key encryption), set the 'legacy' option to true. Note: Using this - * option will cause the iteration count to be forced to 1. - * - * Note: The 'des' algorithm is supported, but it is not considered to be - * secure because it only uses a single 56-bit key. If possible, it is highly - * recommended that a different algorithm be used. - * - * @param rsaKey the RSA key to encrypt. - * @param password the password to use. - * @param options: - * algorithm: the encryption algorithm to use - * ('aes128', 'aes192', 'aes256', '3des', 'des'). - * count: the iteration count to use. - * saltSize: the salt size to use. - * legacy: output an old non-PKCS#8 PEM-encrypted+encapsulated - * headers (DEK-Info) private key. - * - * @return the PEM-encoded ASN.1 EncryptedPrivateKeyInfo. - */ -pki.encryptRsaPrivateKey = function(rsaKey, password, options) { - // standard PKCS#8 - options = options || {}; - if(!options.legacy) { - // encrypt PrivateKeyInfo - var rval = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(rsaKey)); - rval = pki.encryptPrivateKeyInfo(rval, password, options); - return pki.encryptedPrivateKeyToPem(rval); - } - - // legacy non-PKCS#8 - var algorithm; - var iv; - var dkLen; - var cipherFn; - switch(options.algorithm) { - case 'aes128': - algorithm = 'AES-128-CBC'; - dkLen = 16; - iv = forge.random.getBytesSync(16); - cipherFn = forge.aes.createEncryptionCipher; - break; - case 'aes192': - algorithm = 'AES-192-CBC'; - dkLen = 24; - iv = forge.random.getBytesSync(16); - cipherFn = forge.aes.createEncryptionCipher; - break; - case 'aes256': - algorithm = 'AES-256-CBC'; - dkLen = 32; - iv = forge.random.getBytesSync(16); - cipherFn = forge.aes.createEncryptionCipher; - break; - case '3des': - algorithm = 'DES-EDE3-CBC'; - dkLen = 24; - iv = forge.random.getBytesSync(8); - cipherFn = forge.des.createEncryptionCipher; - break; - case 'des': - algorithm = 'DES-CBC'; - dkLen = 8; - iv = forge.random.getBytesSync(8); - cipherFn = forge.des.createEncryptionCipher; - break; - default: - var error = new Error('Could not encrypt RSA private key; unsupported ' + - 'encryption algorithm "' + options.algorithm + '".'); - error.algorithm = options.algorithm; - throw error; - } - - // encrypt private key using OpenSSL legacy key derivation - var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen); - var cipher = cipherFn(dk); - cipher.start(iv); - cipher.update(asn1.toDer(pki.privateKeyToAsn1(rsaKey))); - cipher.finish(); - - var msg = { - type: 'RSA PRIVATE KEY', - procType: { - version: '4', - type: 'ENCRYPTED' - }, - dekInfo: { - algorithm: algorithm, - parameters: forge.util.bytesToHex(iv).toUpperCase() - }, - body: cipher.output.getBytes() - }; - return forge.pem.encode(msg); -}; + if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero') + } -/** - * Decrypts an RSA private key. - * - * @param pem the PEM-formatted EncryptedPrivateKeyInfo to decrypt. - * @param password the password to use. - * - * @return the RSA key on success, null on failure. - */ -pki.decryptRsaPrivateKey = function(pem, password) { - var rval = null; - - var msg = forge.pem.decode(pem)[0]; - - if(msg.type !== 'ENCRYPTED PRIVATE KEY' && - msg.type !== 'PRIVATE KEY' && - msg.type !== 'RSA PRIVATE KEY') { - var error = new Error('Could not convert private key from PEM; PEM header type ' + - 'is not "ENCRYPTED PRIVATE KEY", "PRIVATE KEY", or "RSA PRIVATE KEY".'); - error.headerType = error; - throw error; - } - - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - var dkLen; - var cipherFn; - switch(msg.dekInfo.algorithm) { - case 'DES-CBC': - dkLen = 8; - cipherFn = forge.des.createDecryptionCipher; - break; - case 'DES-EDE3-CBC': - dkLen = 24; - cipherFn = forge.des.createDecryptionCipher; - break; - case 'AES-128-CBC': - dkLen = 16; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'AES-192-CBC': - dkLen = 24; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'AES-256-CBC': - dkLen = 32; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'RC2-40-CBC': - dkLen = 5; - cipherFn = function(key) { - return forge.rc2.createDecryptionCipher(key, 40); - }; - break; - case 'RC2-64-CBC': - dkLen = 8; - cipherFn = function(key) { - return forge.rc2.createDecryptionCipher(key, 64); - }; - break; - case 'RC2-128-CBC': - dkLen = 16; - cipherFn = function(key) { - return forge.rc2.createDecryptionCipher(key, 128); - }; - break; - default: - var error = new Error('Could not decrypt private key; unsupported ' + - 'encryption algorithm "' + msg.dekInfo.algorithm + '".'); - error.algorithm = msg.dekInfo.algorithm; - throw error; + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') } - // use OpenSSL legacy key derivation - var iv = forge.util.hexToBytes(msg.dekInfo.parameters); - var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen); - var cipher = cipherFn(dk); - cipher.start(iv); - cipher.update(forge.util.createBuffer(msg.body)); - if(cipher.finish()) { - rval = cipher.output.getBytes(); - } else { - return rval; + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') } - } else { - rval = msg.body; - } - if(msg.type === 'ENCRYPTED PRIVATE KEY') { - rval = pki.decryptPrivateKeyInfo(asn1.fromDer(rval), password); - } else { - // decryption already performed above - rval = asn1.fromDer(rval); + if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) { + throw new InvalidArgumentError('maxRequestsPerClient must be a positive number') + } + + if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) { + throw new InvalidArgumentError('localAddress must be valid string IP address') + } + + if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) { + throw new InvalidArgumentError('maxResponseSize must be a positive number') + } + + if ( + autoSelectFamilyAttemptTimeout != null && + (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1) + ) { + throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number') + } + + // h2 + if (allowH2 != null && typeof allowH2 !== 'boolean') { + throw new InvalidArgumentError('allowH2 must be a valid boolean value') + } + + if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) { + throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0') + } + + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) + } + + this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client) + ? interceptors.Client + : [createRedirectInterceptor({ maxRedirections })] + this[kUrl] = util.parseOrigin(url) + this[kConnector] = connect + this[kSocket] = null + this[kPipelining] = pipelining != null ? pipelining : 1 + this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize + this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout + this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout + this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold + this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout] + this[kServerName] = null + this[kLocalAddress] = localAddress != null ? localAddress : null + this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n` + this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3 + this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3 + this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength + this[kMaxRedirections] = maxRedirections + this[kMaxRequests] = maxRequestsPerClient + this[kClosedResolve] = null + this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1 + this[kHTTPConnVersion] = 'h1' + + // HTTP/2 + this[kHTTP2Session] = null + this[kHTTP2SessionState] = !allowH2 + ? null + : { + // streams: null, // Fixed queue of streams - For future support of `push` + openStreams: 0, // Keep track of them to decide wether or not unref the session + maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server + } + this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}` + + // kQueue is built up of 3 sections separated by + // the kRunningIdx and kPendingIdx indices. + // | complete | running | pending | + // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length + // kRunningIdx points to the first running element. + // kPendingIdx points to the first pending element. + // This implements a fast queue with an amortized + // time of O(1). + + this[kQueue] = [] + this[kRunningIdx] = 0 + this[kPendingIdx] = 0 + } + + get pipelining () { + return this[kPipelining] + } + + set pipelining (value) { + this[kPipelining] = value + resume(this, true) + } + + get [kPending] () { + return this[kQueue].length - this[kPendingIdx] + } + + get [kRunning] () { + return this[kPendingIdx] - this[kRunningIdx] + } + + get [kSize] () { + return this[kQueue].length - this[kRunningIdx] + } + + get [kConnected] () { + return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed + } + + get [kBusy] () { + const socket = this[kSocket] + return ( + (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) || + (this[kSize] >= (this[kPipelining] || 1)) || + this[kPending] > 0 + ) } - if(rval !== null) { - rval = pki.privateKeyFromAsn1(rval); + /* istanbul ignore: only used for test */ + [kConnect] (cb) { + connect(this) + this.once('connect', cb) } - return rval; -}; + [kDispatch] (opts, handler) { + const origin = opts.origin || this[kUrl].origin -/** - * Derives a PKCS#12 key. - * - * @param password the password to derive the key material from, null or - * undefined for none. - * @param salt the salt, as a ByteBuffer, to use. - * @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC). - * @param iter the iteration count. - * @param n the number of bytes to derive from the password. - * @param md the message digest to use, defaults to SHA-1. - * - * @return a ByteBuffer with the bytes derived from the password. - */ -pki.pbe.generatePkcs12Key = function(password, salt, id, iter, n, md) { - var j, l; - - if(typeof md === 'undefined' || md === null) { - if(!('sha1' in forge.md)) { - throw new Error('"sha1" hash algorithm unavailable.'); - } - md = forge.md.sha1.create(); - } - - var u = md.digestLength; - var v = md.blockLength; - var result = new forge.util.ByteBuffer(); - - /* Convert password to Unicode byte buffer + trailing 0-byte. */ - var passBuf = new forge.util.ByteBuffer(); - if(password !== null && password !== undefined) { - for(l = 0; l < password.length; l++) { - passBuf.putInt16(password.charCodeAt(l)); - } - passBuf.putInt16(0); - } - - /* Length of salt and password in BYTES. */ - var p = passBuf.length(); - var s = salt.length(); - - /* 1. Construct a string, D (the "diversifier"), by concatenating - v copies of ID. */ - var D = new forge.util.ByteBuffer(); - D.fillWithByte(id, v); - - /* 2. Concatenate copies of the salt together to create a string S of length - v * ceil(s / v) bytes (the final copy of the salt may be trunacted - to create S). - Note that if the salt is the empty string, then so is S. */ - var Slen = v * Math.ceil(s / v); - var S = new forge.util.ByteBuffer(); - for(l = 0; l < Slen; l++) { - S.putByte(salt.at(l % s)); - } - - /* 3. Concatenate copies of the password together to create a string P of - length v * ceil(p / v) bytes (the final copy of the password may be - truncated to create P). - Note that if the password is the empty string, then so is P. */ - var Plen = v * Math.ceil(p / v); - var P = new forge.util.ByteBuffer(); - for(l = 0; l < Plen; l++) { - P.putByte(passBuf.at(l % p)); - } - - /* 4. Set I=S||P to be the concatenation of S and P. */ - var I = S; - I.putBuffer(P); - - /* 5. Set c=ceil(n / u). */ - var c = Math.ceil(n / u); - - /* 6. For i=1, 2, ..., c, do the following: */ - for(var i = 1; i <= c; i++) { - /* a) Set Ai=H^r(D||I). (l.e. the rth hash of D||I, H(H(H(...H(D||I)))) */ - var buf = new forge.util.ByteBuffer(); - buf.putBytes(D.bytes()); - buf.putBytes(I.bytes()); - for(var round = 0; round < iter; round++) { - md.start(); - md.update(buf.getBytes()); - buf = md.digest(); - } - - /* b) Concatenate copies of Ai to create a string B of length v bytes (the - final copy of Ai may be truncated to create B). */ - var B = new forge.util.ByteBuffer(); - for(l = 0; l < v; l++) { - B.putByte(buf.at(l % u)); - } - - /* c) Treating I as a concatenation I0, I1, ..., Ik-1 of v-byte blocks, - where k=ceil(s / v) + ceil(p / v), modify I by setting - Ij=(Ij+B+1) mod 2v for each j. */ - var k = Math.ceil(s / v) + Math.ceil(p / v); - var Inew = new forge.util.ByteBuffer(); - for(j = 0; j < k; j++) { - var chunk = new forge.util.ByteBuffer(I.getBytes(v)); - var x = 0x1ff; - for(l = B.length() - 1; l >= 0; l--) { - x = x >> 8; - x += B.at(l) + chunk.at(l); - chunk.setAt(l, x & 0xff); - } - Inew.putBuffer(chunk); + const request = this[kHTTPConnVersion] === 'h2' + ? Request[kHTTP2BuildRequest](origin, opts, handler) + : Request[kHTTP1BuildRequest](origin, opts, handler) + + this[kQueue].push(request) + if (this[kResuming]) { + // Do nothing. + } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) { + // Wait a tick in case stream/iterator is ended in the same tick. + this[kResuming] = 1 + process.nextTick(resume, this) + } else { + resume(this, true) } - I = Inew; - /* Add Ai to A. */ - result.putBuffer(buf); - } + if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) { + this[kNeedDrain] = 2 + } - result.truncate(result.length() - n); - return result; -}; + return this[kNeedDrain] < 2 + } -/** - * Get new Forge cipher object instance. - * - * @param oid the OID (in string notation). - * @param params the ASN.1 params object. - * @param password the password to decrypt with. - * - * @return new cipher object instance. - */ -pki.pbe.getCipher = function(oid, params, password) { - switch(oid) { - case pki.oids['pkcs5PBES2']: - return pki.pbe.getCipherForPBES2(oid, params, password); - - case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']: - case pki.oids['pbewithSHAAnd40BitRC2-CBC']: - return pki.pbe.getCipherForPKCS12PBE(oid, params, password); - - default: - var error = new Error('Cannot read encrypted PBE data block. Unsupported OID.'); - error.oid = oid; - error.supportedOids = [ - 'pkcs5PBES2', - 'pbeWithSHAAnd3-KeyTripleDES-CBC', - 'pbewithSHAAnd40BitRC2-CBC' - ]; - throw error; + async [kClose] () { + // TODO: for H2 we need to gracefully flush the remaining enqueued + // request and close each stream. + return new Promise((resolve) => { + if (!this[kSize]) { + resolve(null) + } else { + this[kClosedResolve] = resolve + } + }) } -}; -/** - * Get new Forge cipher object instance according to PBES2 params block. - * - * The returned cipher instance is already started using the IV - * from PBES2 parameter block. - * - * @param oid the PKCS#5 PBKDF2 OID (in string notation). - * @param params the ASN.1 PBES2-params object. - * @param password the password to decrypt with. - * - * @return new cipher object instance. - */ -pki.pbe.getCipherForPBES2 = function(oid, params, password) { - // get PBE params - var capture = {}; - var errors = []; - if(!asn1.validate(params, PBES2AlgorithmsValidator, capture, errors)) { - var error = new Error('Cannot read password-based-encryption algorithm ' + - 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.'); - error.errors = errors; - throw error; - } - - // check oids - oid = asn1.derToOid(capture.kdfOid); - if(oid !== pki.oids['pkcs5PBKDF2']) { - var error = new Error('Cannot read encrypted private key. ' + - 'Unsupported key derivation function OID.'); - error.oid = oid; - error.supportedOids = ['pkcs5PBKDF2']; - throw error; - } - oid = asn1.derToOid(capture.encOid); - if(oid !== pki.oids['aes128-CBC'] && - oid !== pki.oids['aes192-CBC'] && - oid !== pki.oids['aes256-CBC'] && - oid !== pki.oids['des-EDE3-CBC'] && - oid !== pki.oids['desCBC']) { - var error = new Error('Cannot read encrypted private key. ' + - 'Unsupported encryption scheme OID.'); - error.oid = oid; - error.supportedOids = [ - 'aes128-CBC', 'aes192-CBC', 'aes256-CBC', 'des-EDE3-CBC', 'desCBC']; - throw error; - } - - // set PBE params - var salt = capture.kdfSalt; - var count = forge.util.createBuffer(capture.kdfIterationCount); - count = count.getInt(count.length() << 3); - var dkLen; - var cipherFn; - switch(pki.oids[oid]) { - case 'aes128-CBC': - dkLen = 16; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'aes192-CBC': - dkLen = 24; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'aes256-CBC': - dkLen = 32; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'des-EDE3-CBC': - dkLen = 24; - cipherFn = forge.des.createDecryptionCipher; - break; - case 'desCBC': - dkLen = 8; - cipherFn = forge.des.createDecryptionCipher; - break; - } - - // get PRF message digest - var md = prfOidToMessageDigest(capture.prfOid); - - // decrypt private key using pbe with chosen PRF and AES/DES - var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md); - var iv = capture.encIv; - var cipher = cipherFn(dk); - cipher.start(iv); - - return cipher; -}; + async [kDestroy] (err) { + return new Promise((resolve) => { + const requests = this[kQueue].splice(this[kPendingIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) + } -/** - * Get new Forge cipher object instance for PKCS#12 PBE. - * - * The returned cipher instance is already started using the key & IV - * derived from the provided password and PKCS#12 PBE salt. - * - * @param oid The PKCS#12 PBE OID (in string notation). - * @param params The ASN.1 PKCS#12 PBE-params object. - * @param password The password to decrypt with. - * - * @return the new cipher object instance. - */ -pki.pbe.getCipherForPKCS12PBE = function(oid, params, password) { - // get PBE params - var capture = {}; - var errors = []; - if(!asn1.validate(params, pkcs12PbeParamsValidator, capture, errors)) { - var error = new Error('Cannot read password-based-encryption algorithm ' + - 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.'); - error.errors = errors; - throw error; - } - - var salt = forge.util.createBuffer(capture.salt); - var count = forge.util.createBuffer(capture.iterations); - count = count.getInt(count.length() << 3); - - var dkLen, dIvLen, cipherFn; - switch(oid) { - case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']: - dkLen = 24; - dIvLen = 8; - cipherFn = forge.des.startDecrypting; - break; + const callback = () => { + if (this[kClosedResolve]) { + // TODO (fix): Should we error here with ClientDestroyedError? + this[kClosedResolve]() + this[kClosedResolve] = null + } + resolve() + } - case pki.oids['pbewithSHAAnd40BitRC2-CBC']: - dkLen = 5; - dIvLen = 8; - cipherFn = function(key, iv) { - var cipher = forge.rc2.createDecryptionCipher(key, 40); - cipher.start(iv, null); - return cipher; - }; - break; + if (this[kHTTP2Session] != null) { + util.destroy(this[kHTTP2Session], err) + this[kHTTP2Session] = null + this[kHTTP2SessionState] = null + } - default: - var error = new Error('Cannot read PKCS #12 PBE data block. Unsupported OID.'); - error.oid = oid; - throw error; + if (!this[kSocket]) { + queueMicrotask(callback) + } else { + util.destroy(this[kSocket].on('close', callback), err) + } + + resume(this) + }) } +} - // get PRF message digest - var md = prfOidToMessageDigest(capture.prfOid); - var key = pki.pbe.generatePkcs12Key(password, salt, 1, count, dkLen, md); - md.start(); - var iv = pki.pbe.generatePkcs12Key(password, salt, 2, count, dIvLen, md); +function onHttp2SessionError (err) { + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') - return cipherFn(key, iv); -}; + this[kSocket][kError] = err -/** - * OpenSSL's legacy key derivation function. - * - * See: http://www.openssl.org/docs/crypto/EVP_BytesToKey.html - * - * @param password the password to derive the key from. - * @param salt the salt to use, null for none. - * @param dkLen the number of bytes needed for the derived key. - * @param [options] the options to use: - * [md] an optional message digest object to use. - */ -pki.pbe.opensslDeriveBytes = function(password, salt, dkLen, md) { - if(typeof md === 'undefined' || md === null) { - if(!('md5' in forge.md)) { - throw new Error('"md5" hash algorithm unavailable.'); - } - md = forge.md.md5.create(); - } - if(salt === null) { - salt = ''; - } - var digests = [hash(md, password + salt)]; - for(var length = 16, i = 1; length < dkLen; ++i, length += 16) { - digests.push(hash(md, digests[i - 1] + password + salt)); + onError(this[kClient], err) +} + +function onHttp2FrameError (type, code, id) { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + + if (id === 0) { + this[kSocket][kError] = err + onError(this[kClient], err) } - return digests.join('').substr(0, dkLen); -}; +} -function hash(md, bytes) { - return md.start().update(bytes).digest().getBytes(); +function onHttp2SessionEnd () { + util.destroy(this, new SocketError('other side closed')) + util.destroy(this[kSocket], new SocketError('other side closed')) } -function prfOidToMessageDigest(prfOid) { - // get PRF algorithm, default to SHA-1 - var prfAlgorithm; - if(!prfOid) { - prfAlgorithm = 'hmacWithSHA1'; - } else { - prfAlgorithm = pki.oids[asn1.derToOid(prfOid)]; - if(!prfAlgorithm) { - var error = new Error('Unsupported PRF OID.'); - error.oid = prfOid; - error.supported = [ - 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384', - 'hmacWithSHA512']; - throw error; +function onHTTP2GoAway (code) { + const client = this[kClient] + const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`) + client[kSocket] = null + client[kHTTP2Session] = null + + if (client.destroyed) { + assert(this[kPending] === 0) + + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) } + } else if (client[kRunning] > 0) { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null + + errorRequest(client, request, err) } - return prfAlgorithmToMessageDigest(prfAlgorithm); -} - -function prfAlgorithmToMessageDigest(prfAlgorithm) { - var factory = forge.md; - switch(prfAlgorithm) { - case 'hmacWithSHA224': - factory = forge.md.sha512; - case 'hmacWithSHA1': - case 'hmacWithSHA256': - case 'hmacWithSHA384': - case 'hmacWithSHA512': - prfAlgorithm = prfAlgorithm.substr(8).toLowerCase(); - break; - default: - var error = new Error('Unsupported PRF algorithm.'); - error.algorithm = prfAlgorithm; - error.supported = [ - 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384', - 'hmacWithSHA512']; - throw error; - } - if(!factory || !(prfAlgorithm in factory)) { - throw new Error('Unknown hash algorithm: ' + prfAlgorithm); - } - return factory[prfAlgorithm].create(); -} - -function createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm) { - var params = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // salt - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt), - // iteration count - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - countBytes.getBytes()) - ]); - // when PRF algorithm is not SHA-1 default, add key length and PRF algorithm - if(prfAlgorithm !== 'hmacWithSHA1') { - params.value.push( - // key length - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - forge.util.hexToBytes(dkLen.toString(16))), - // AlgorithmIdentifier - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids[prfAlgorithm]).getBytes()), - // parameters (null) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ])); - } - return params; + + client[kPendingIdx] = client[kRunningIdx] + + assert(client[kRunning] === 0) + + client.emit('disconnect', + client[kUrl], + [client], + err + ) + + resume(client) } +const constants = __nccwpck_require__(953) +const createRedirectInterceptor = __nccwpck_require__(8861) +const EMPTY_BUF = Buffer.alloc(0) -/***/ }), +async function lazyllhttp () { + const llhttpWasmData = process.env.JEST_WORKER_ID ? __nccwpck_require__(1145) : undefined -/***/ 1611: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + let mod + try { + mod = await WebAssembly.compile(Buffer.from(__nccwpck_require__(5627), 'base64')) + } catch (e) { + /* istanbul ignore next */ -/** - * Password-Based Key-Derivation Function #2 implementation. - * - * See RFC 2898 for details. - * - * @author Dave Longley - * - * Copyright (c) 2010-2013 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(5104); -__nccwpck_require__(6231); -__nccwpck_require__(8339); + // We could check if the error was caused by the simd option not + // being enabled, but the occurring of this other error + // * https://github.com/emscripten-core/emscripten/issues/11495 + // got me to remove that check to avoid breaking Node 12. + mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || __nccwpck_require__(1145), 'base64')) + } + + return await WebAssembly.instantiate(mod, { + env: { + /* eslint-disable camelcase */ -var pkcs5 = forge.pkcs5 = forge.pkcs5 || {}; + wasm_on_url: (p, at, len) => { + /* istanbul ignore next */ + return 0 + }, + wasm_on_status: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_begin: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageBegin() || 0 + }, + wasm_on_header_field: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_header_value: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0 + }, + wasm_on_body: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_complete: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageComplete() || 0 + } -var crypto; -if(forge.util.isNodejs && !forge.options.usePureJavaScript) { - crypto = __nccwpck_require__(6113); + /* eslint-enable camelcase */ + } + }) } -/** - * Derives a key from a password. - * - * @param p the password as a binary-encoded string of bytes. - * @param s the salt as a binary-encoded string of bytes. - * @param c the iteration count, a positive integer. - * @param dkLen the intended length, in bytes, of the derived key, - * (max: 2^32 - 1) * hash length of the PRF. - * @param [md] the message digest (or algorithm identifier as a string) to use - * in the PRF, defaults to SHA-1. - * @param [callback(err, key)] presence triggers asynchronous version, called - * once the operation completes. - * - * @return the derived key, as a binary-encoded string of bytes, for the - * synchronous version (if no callback is specified). - */ -module.exports = forge.pbkdf2 = pkcs5.pbkdf2 = function( - p, s, c, dkLen, md, callback) { - if(typeof md === 'function') { - callback = md; - md = null; - } - - // use native implementation if possible and not disabled, note that - // some node versions only support SHA-1, others allow digest to be changed - if(forge.util.isNodejs && !forge.options.usePureJavaScript && - crypto.pbkdf2 && (md === null || typeof md !== 'object') && - (crypto.pbkdf2Sync.length > 4 || (!md || md === 'sha1'))) { - if(typeof md !== 'string') { - // default prf to SHA-1 - md = 'sha1'; - } - p = Buffer.from(p, 'binary'); - s = Buffer.from(s, 'binary'); - if(!callback) { - if(crypto.pbkdf2Sync.length === 4) { - return crypto.pbkdf2Sync(p, s, c, dkLen).toString('binary'); +let llhttpInstance = null +let llhttpPromise = lazyllhttp() +llhttpPromise.catch() + +let currentParser = null +let currentBufferRef = null +let currentBufferSize = 0 +let currentBufferPtr = null + +const TIMEOUT_HEADERS = 1 +const TIMEOUT_BODY = 2 +const TIMEOUT_IDLE = 3 + +class Parser { + constructor (client, socket, { exports }) { + assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0) + + this.llhttp = exports + this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE) + this.client = client + this.socket = socket + this.timeout = null + this.timeoutValue = null + this.timeoutType = null + this.statusCode = null + this.statusText = '' + this.upgrade = false + this.headers = [] + this.headersSize = 0 + this.headersMaxSize = client[kMaxHeadersSize] + this.shouldKeepAlive = false + this.paused = false + this.resume = this.resume.bind(this) + + this.bytesRead = 0 + + this.keepAlive = '' + this.contentLength = '' + this.connection = '' + this.maxResponseSize = client[kMaxResponseSize] + } + + setTimeout (value, type) { + this.timeoutType = type + if (value !== this.timeoutValue) { + timers.clearTimeout(this.timeout) + if (value) { + this.timeout = timers.setTimeout(onParserTimeout, value, this) + // istanbul ignore else: only for jest + if (this.timeout.unref) { + this.timeout.unref() + } + } else { + this.timeout = null + } + this.timeoutValue = value + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() } - return crypto.pbkdf2Sync(p, s, c, dkLen, md).toString('binary'); } - if(crypto.pbkdf2Sync.length === 4) { - return crypto.pbkdf2(p, s, c, dkLen, function(err, key) { - if(err) { - return callback(err); - } - callback(null, key.toString('binary')); - }); + } + + resume () { + if (this.socket.destroyed || !this.paused) { + return } - return crypto.pbkdf2(p, s, c, dkLen, md, function(err, key) { - if(err) { - return callback(err); + + assert(this.ptr != null) + assert(currentParser == null) + + this.llhttp.llhttp_resume(this.ptr) + + assert(this.timeoutType === TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() } - callback(null, key.toString('binary')); - }); - } + } - if(typeof md === 'undefined' || md === null) { - // default prf to SHA-1 - md = 'sha1'; + this.paused = false + this.execute(this.socket.read() || EMPTY_BUF) // Flush parser. + this.readMore() } - if(typeof md === 'string') { - if(!(md in forge.md.algorithms)) { - throw new Error('Unknown hash algorithm: ' + md); + + readMore () { + while (!this.paused && this.ptr) { + const chunk = this.socket.read() + if (chunk === null) { + break + } + this.execute(chunk) } - md = forge.md[md].create(); } - var hLen = md.digestLength; + execute (data) { + assert(this.ptr != null) + assert(currentParser == null) + assert(!this.paused) - /* 1. If dkLen > (2^32 - 1) * hLen, output "derived key too long" and - stop. */ - if(dkLen > (0xFFFFFFFF * hLen)) { - var err = new Error('Derived key is too long.'); - if(callback) { - return callback(err); - } - throw err; - } + const { socket, llhttp } = this - /* 2. Let len be the number of hLen-octet blocks in the derived key, - rounding up, and let r be the number of octets in the last - block: - - len = CEIL(dkLen / hLen), - r = dkLen - (len - 1) * hLen. */ - var len = Math.ceil(dkLen / hLen); - var r = dkLen - (len - 1) * hLen; - - /* 3. For each block of the derived key apply the function F defined - below to the password P, the salt S, the iteration count c, and - the block index to compute the block: - - T_1 = F(P, S, c, 1), - T_2 = F(P, S, c, 2), - ... - T_len = F(P, S, c, len), - - where the function F is defined as the exclusive-or sum of the - first c iterates of the underlying pseudorandom function PRF - applied to the password P and the concatenation of the salt S - and the block index i: - - F(P, S, c, i) = u_1 XOR u_2 XOR ... XOR u_c - - where - - u_1 = PRF(P, S || INT(i)), - u_2 = PRF(P, u_1), - ... - u_c = PRF(P, u_{c-1}). - - Here, INT(i) is a four-octet encoding of the integer i, most - significant octet first. */ - var prf = forge.hmac.create(); - prf.start(md, p); - var dk = ''; - var xor, u_c, u_c1; - - // sync version - if(!callback) { - for(var i = 1; i <= len; ++i) { - // PRF(P, S || INT(i)) (first iteration) - prf.start(null, null); - prf.update(s); - prf.update(forge.util.int32ToBytes(i)); - xor = u_c1 = prf.digest().getBytes(); - - // PRF(P, u_{c-1}) (other iterations) - for(var j = 2; j <= c; ++j) { - prf.start(null, null); - prf.update(u_c1); - u_c = prf.digest().getBytes(); - // F(p, s, c, i) - xor = forge.util.xorBytes(xor, u_c, hLen); - u_c1 = u_c; + if (data.length > currentBufferSize) { + if (currentBufferPtr) { + llhttp.free(currentBufferPtr) } + currentBufferSize = Math.ceil(data.length / 4096) * 4096 + currentBufferPtr = llhttp.malloc(currentBufferSize) + } + + new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data) - /* 4. Concatenate the blocks and extract the first dkLen octets to - produce a derived key DK: + // Call `execute` on the wasm parser. + // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data, + // and finally the length of bytes to parse. + // The return value is an error code or `constants.ERROR.OK`. + try { + let ret + + try { + currentBufferRef = data + currentParser = this + ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length) + /* eslint-disable-next-line no-useless-catch */ + } catch (err) { + /* istanbul ignore next: difficult to make a test case for */ + throw err + } finally { + currentParser = null + currentBufferRef = null + } - DK = T_1 || T_2 || ... || T_len<0..r-1> */ - dk += (i < len) ? xor : xor.substr(0, r); + const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr + + if (ret === constants.ERROR.PAUSED_UPGRADE) { + this.onUpgrade(data.slice(offset)) + } else if (ret === constants.ERROR.PAUSED) { + this.paused = true + socket.unshift(data.slice(offset)) + } else if (ret !== constants.ERROR.OK) { + const ptr = llhttp.llhttp_get_error_reason(this.ptr) + let message = '' + /* istanbul ignore else: difficult to make a test case for */ + if (ptr) { + const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0) + message = + 'Response does not match the HTTP/1.1 protocol (' + + Buffer.from(llhttp.memory.buffer, ptr, len).toString() + + ')' + } + throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset)) + } + } catch (err) { + util.destroy(socket, err) } - /* 5. Output the derived key DK. */ - return dk; } - // async version - var i = 1, j; - function outer() { - if(i > len) { - // done - return callback(null, dk); - } + destroy () { + assert(this.ptr != null) + assert(currentParser == null) + + this.llhttp.llhttp_free(this.ptr) + this.ptr = null - // PRF(P, S || INT(i)) (first iteration) - prf.start(null, null); - prf.update(s); - prf.update(forge.util.int32ToBytes(i)); - xor = u_c1 = prf.digest().getBytes(); + timers.clearTimeout(this.timeout) + this.timeout = null + this.timeoutValue = null + this.timeoutType = null - // PRF(P, u_{c-1}) (other iterations) - j = 2; - inner(); + this.paused = false } - function inner() { - if(j <= c) { - prf.start(null, null); - prf.update(u_c1); - u_c = prf.digest().getBytes(); - // F(p, s, c, i) - xor = forge.util.xorBytes(xor, u_c, hLen); - u_c1 = u_c; - ++j; - return forge.util.setImmediate(inner); - } + onStatus (buf) { + this.statusText = buf.toString() + } - /* 4. Concatenate the blocks and extract the first dkLen octets to - produce a derived key DK: + onMessageBegin () { + const { socket, client } = this - DK = T_1 || T_2 || ... || T_len<0..r-1> */ - dk += (i < len) ? xor : xor.substr(0, r); + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 + } - ++i; - outer(); + const request = client[kQueue][client[kRunningIdx]] + if (!request) { + return -1 + } } - outer(); -}; + onHeaderField (buf) { + const len = this.headers.length + if ((len & 1) === 0) { + this.headers.push(buf) + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) + } -/***/ }), + this.trackHeader(buf.length) + } -/***/ 154: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + onHeaderValue (buf) { + let len = this.headers.length -/** - * Javascript implementation of basic PEM (Privacy Enhanced Mail) algorithms. - * - * See: RFC 1421. - * - * @author Dave Longley - * - * Copyright (c) 2013-2014 Digital Bazaar, Inc. - * - * A Forge PEM object has the following fields: - * - * type: identifies the type of message (eg: "RSA PRIVATE KEY"). - * - * procType: identifies the type of processing performed on the message, - * it has two subfields: version and type, eg: 4,ENCRYPTED. - * - * contentDomain: identifies the type of content in the message, typically - * only uses the value: "RFC822". - * - * dekInfo: identifies the message encryption algorithm and mode and includes - * any parameters for the algorithm, it has two subfields: algorithm and - * parameters, eg: DES-CBC,F8143EDE5960C597. - * - * headers: contains all other PEM encapsulated headers -- where order is - * significant (for pairing data like recipient ID + key info). - * - * body: the binary-encoded body. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); + if ((len & 1) === 1) { + this.headers.push(buf) + len += 1 + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) + } -// shortcut for pem API -var pem = module.exports = forge.pem = forge.pem || {}; + const key = this.headers[len - 2] + if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') { + this.keepAlive += buf.toString() + } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') { + this.connection += buf.toString() + } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') { + this.contentLength += buf.toString() + } -/** - * Encodes (serializes) the given PEM object. - * - * @param msg the PEM message object to encode. - * @param options the options to use: - * maxline the maximum characters per line for the body, (default: 64). - * - * @return the PEM-formatted string. - */ -pem.encode = function(msg, options) { - options = options || {}; - var rval = '-----BEGIN ' + msg.type + '-----\r\n'; - - // encode special headers - var header; - if(msg.procType) { - header = { - name: 'Proc-Type', - values: [String(msg.procType.version), msg.procType.type] - }; - rval += foldHeader(header); - } - if(msg.contentDomain) { - header = {name: 'Content-Domain', values: [msg.contentDomain]}; - rval += foldHeader(header); + this.trackHeader(buf.length) } - if(msg.dekInfo) { - header = {name: 'DEK-Info', values: [msg.dekInfo.algorithm]}; - if(msg.dekInfo.parameters) { - header.values.push(msg.dekInfo.parameters); + + trackHeader (len) { + this.headersSize += len + if (this.headersSize >= this.headersMaxSize) { + util.destroy(this.socket, new HeadersOverflowError()) } - rval += foldHeader(header); } - if(msg.headers) { - // encode all other headers - for(var i = 0; i < msg.headers.length; ++i) { - rval += foldHeader(msg.headers[i]); + onUpgrade (head) { + const { upgrade, client, socket, headers, statusCode } = this + + assert(upgrade) + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert(!socket.destroyed) + assert(socket === client[kSocket]) + assert(!this.paused) + assert(request.upgrade || request.method === 'CONNECT') + + this.statusCode = null + this.statusText = '' + this.shouldKeepAlive = null + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + socket.unshift(head) + + socket[kParser].destroy() + socket[kParser] = null + + socket[kClient] = null + socket[kError] = null + socket + .removeListener('error', onSocketError) + .removeListener('readable', onSocketReadable) + .removeListener('end', onSocketEnd) + .removeListener('close', onSocketClose) + + client[kSocket] = null + client[kQueue][client[kRunningIdx]++] = null + client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade')) + + try { + request.onUpgrade(statusCode, headers, socket) + } catch (err) { + util.destroy(socket, err) } - } - // terminate header - if(msg.procType) { - rval += '\r\n'; + resume(client) } - // add body - rval += forge.util.encode64(msg.body, options.maxline || 64) + '\r\n'; - - rval += '-----END ' + msg.type + '-----\r\n'; - return rval; -}; + onHeadersComplete (statusCode, upgrade, shouldKeepAlive) { + const { client, socket, headers, statusText } = this -/** - * Decodes (deserializes) all PEM messages found in the given string. - * - * @param str the PEM-formatted string to decode. - * - * @return the PEM message objects in an array. - */ -pem.decode = function(str) { - var rval = []; - - // split string into PEM messages (be lenient w/EOF on BEGIN line) - var rMessage = /\s*-----BEGIN ([A-Z0-9- ]+)-----\r?\n?([\x21-\x7e\s]+?(?:\r?\n\r?\n))?([:A-Za-z0-9+\/=\s]+?)-----END \1-----/g; - var rHeader = /([\x21-\x7e]+):\s*([\x21-\x7e\s^:]+)/; - var rCRLF = /\r?\n/; - var match; - while(true) { - match = rMessage.exec(str); - if(!match) { - break; + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 } - // accept "NEW CERTIFICATE REQUEST" as "CERTIFICATE REQUEST" - // https://datatracker.ietf.org/doc/html/rfc7468#section-7 - var type = match[1]; - if(type === 'NEW CERTIFICATE REQUEST') { - type = 'CERTIFICATE REQUEST'; + const request = client[kQueue][client[kRunningIdx]] + + /* istanbul ignore next: difficult to make a test case for */ + if (!request) { + return -1 } - var msg = { - type: type, - procType: null, - contentDomain: null, - dekInfo: null, - headers: [], - body: forge.util.decode64(match[3]) - }; - rval.push(msg); + assert(!this.upgrade) + assert(this.statusCode < 200) - // no headers - if(!match[2]) { - continue; + if (statusCode === 100) { + util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket))) + return -1 + } + + /* this can only happen if server is misbehaving */ + if (upgrade && !request.upgrade) { + util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket))) + return -1 } - // parse headers - var lines = match[2].split(rCRLF); - var li = 0; - while(match && li < lines.length) { - // get line, trim any rhs whitespace - var line = lines[li].replace(/\s+$/, ''); + assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS) - // RFC2822 unfold any following folded lines - for(var nl = li + 1; nl < lines.length; ++nl) { - var next = lines[nl]; - if(!/\s/.test(next[0])) { - break; - } - line += next; - li = nl; + this.statusCode = statusCode + this.shouldKeepAlive = ( + shouldKeepAlive || + // Override llhttp value which does not allow keepAlive for HEAD. + (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive') + ) + + if (this.statusCode >= 200) { + const bodyTimeout = request.bodyTimeout != null + ? request.bodyTimeout + : client[kBodyTimeout] + this.setTimeout(bodyTimeout, TIMEOUT_BODY) + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() } + } - // parse header - match = line.match(rHeader); - if(match) { - var header = {name: match[1], values: []}; - var values = match[2].split(','); - for(var vi = 0; vi < values.length; ++vi) { - header.values.push(ltrim(values[vi])); - } - - // Proc-Type must be the first header - if(!msg.procType) { - if(header.name !== 'Proc-Type') { - throw new Error('Invalid PEM formatted message. The first ' + - 'encapsulated header must be "Proc-Type".'); - } else if(header.values.length !== 2) { - throw new Error('Invalid PEM formatted message. The "Proc-Type" ' + - 'header must have two subfields.'); - } - msg.procType = {version: values[0], type: values[1]}; - } else if(!msg.contentDomain && header.name === 'Content-Domain') { - // special-case Content-Domain - msg.contentDomain = values[0] || ''; - } else if(!msg.dekInfo && header.name === 'DEK-Info') { - // special-case DEK-Info - if(header.values.length === 0) { - throw new Error('Invalid PEM formatted message. The "DEK-Info" ' + - 'header must have at least one subfield.'); - } - msg.dekInfo = {algorithm: values[0], parameters: values[1] || null}; + if (request.method === 'CONNECT') { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 + } + + if (upgrade) { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 + } + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + if (this.shouldKeepAlive && client[kPipelining]) { + const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null + + if (keepAliveTimeout != null) { + const timeout = Math.min( + keepAliveTimeout - client[kKeepAliveTimeoutThreshold], + client[kKeepAliveMaxTimeout] + ) + if (timeout <= 0) { + socket[kReset] = true } else { - msg.headers.push(header); + client[kKeepAliveTimeoutValue] = timeout } + } else { + client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout] } + } else { + // Stop more requests from being dispatched. + socket[kReset] = true + } - ++li; + const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false + + if (request.aborted) { + return -1 } - if(msg.procType === 'ENCRYPTED' && !msg.dekInfo) { - throw new Error('Invalid PEM formatted message. The "DEK-Info" ' + - 'header must be present if "Proc-Type" is "ENCRYPTED".'); + if (request.method === 'HEAD') { + return 1 + } + + if (statusCode < 200) { + return 1 + } + + if (socket[kBlocking]) { + socket[kBlocking] = false + resume(client) } - } - if(rval.length === 0) { - throw new Error('Invalid PEM formatted message.'); + return pause ? constants.ERROR.PAUSED : 0 } - return rval; -}; + onBody (buf) { + const { client, socket, statusCode, maxResponseSize } = this -function foldHeader(header) { - var rval = header.name + ': '; + if (socket.destroyed) { + return -1 + } - // ensure values with CRLF are folded - var values = []; - var insertSpace = function(match, $1) { - return ' ' + $1; - }; - for(var i = 0; i < header.values.length; ++i) { - values.push(header.values[i].replace(/^(\S+\r\n)/, insertSpace)); - } - rval += values.join(',') + '\r\n'; - - // do folding - var length = 0; - var candidate = -1; - for(var i = 0; i < rval.length; ++i, ++length) { - if(length > 65 && candidate !== -1) { - var insert = rval[candidate]; - if(insert === ',') { - ++candidate; - rval = rval.substr(0, candidate) + '\r\n ' + rval.substr(candidate); - } else { - rval = rval.substr(0, candidate) + - '\r\n' + insert + rval.substr(candidate + 1); + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert.strictEqual(this.timeoutType, TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() } - length = (i - candidate - 1); - candidate = -1; - ++i; - } else if(rval[i] === ' ' || rval[i] === '\t' || rval[i] === ',') { - candidate = i; + } + + assert(statusCode >= 200) + + if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) { + util.destroy(socket, new ResponseExceededMaxSizeError()) + return -1 + } + + this.bytesRead += buf.length + + if (request.onData(buf) === false) { + return constants.ERROR.PAUSED } } - return rval; -} + onMessageComplete () { + const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this + + if (socket.destroyed && (!statusCode || shouldKeepAlive)) { + return -1 + } + + if (upgrade) { + return + } + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert(statusCode >= 100) + + this.statusCode = null + this.statusText = '' + this.bytesRead = 0 + this.contentLength = '' + this.keepAlive = '' + this.connection = '' + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + if (statusCode < 200) { + return + } -function ltrim(str) { - return str.replace(/^\s+/, ''); + /* istanbul ignore next: should be handled by llhttp? */ + if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) { + util.destroy(socket, new ResponseContentLengthMismatchError()) + return -1 + } + + request.onComplete(headers) + + client[kQueue][client[kRunningIdx]++] = null + + if (socket[kWriting]) { + assert.strictEqual(client[kRunning], 0) + // Response completed before request. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (!shouldKeepAlive) { + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (socket[kReset] && client[kRunning] === 0) { + // Destroy socket once all requests have completed. + // The request at the tail of the pipeline is the one + // that requested reset and no further requests should + // have been queued since then. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (client[kPipelining] === 1) { + // We must wait a full event loop cycle to reuse this socket to make sure + // that non-spec compliant servers are not closing the connection even if they + // said they won't. + setImmediate(resume, client) + } else { + resume(client) + } + } } +function onParserTimeout (parser) { + const { socket, timeoutType, client } = parser -/***/ }), + /* istanbul ignore else */ + if (timeoutType === TIMEOUT_HEADERS) { + if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { + assert(!parser.paused, 'cannot be paused while waiting for headers') + util.destroy(socket, new HeadersTimeoutError()) + } + } else if (timeoutType === TIMEOUT_BODY) { + if (!parser.paused) { + util.destroy(socket, new BodyTimeoutError()) + } + } else if (timeoutType === TIMEOUT_IDLE) { + assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue]) + util.destroy(socket, new InformationalError('socket idle timeout')) + } +} -/***/ 7014: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function onSocketReadable () { + const { [kParser]: parser } = this + if (parser) { + parser.readMore() + } +} -/** - * Partial implementation of PKCS#1 v2.2: RSA-OEAP - * - * Modified but based on the following MIT and BSD licensed code: - * - * https://github.com/kjur/jsjws/blob/master/rsa.js: - * - * The 'jsjws'(JSON Web Signature JavaScript Library) License - * - * Copyright (c) 2012 Kenji Urushima - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - * THE SOFTWARE. - * - * http://webrsa.cvs.sourceforge.net/viewvc/webrsa/Client/RSAES-OAEP.js?content-type=text%2Fplain: - * - * RSAES-OAEP.js - * $Id: RSAES-OAEP.js,v 1.1.1.1 2003/03/19 15:37:20 ellispritchard Exp $ - * JavaScript Implementation of PKCS #1 v2.1 RSA CRYPTOGRAPHY STANDARD (RSA Laboratories, June 14, 2002) - * Copyright (C) Ellis Pritchard, Guardian Unlimited 2003. - * Contact: ellis@nukinetics.com - * Distributed under the BSD License. - * - * Official documentation: http://www.rsa.com/rsalabs/node.asp?id=2125 - * - * @author Evan Jones (http://evanjones.ca/) - * @author Dave Longley - * - * Copyright (c) 2013-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); -__nccwpck_require__(7821); -__nccwpck_require__(279); +function onSocketError (err) { + const { [kClient]: client, [kParser]: parser } = this -// shortcut for PKCS#1 API -var pkcs1 = module.exports = forge.pkcs1 = forge.pkcs1 || {}; + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') -/** - * Encode the given RSAES-OAEP message (M) using key, with optional label (L) - * and seed. - * - * This method does not perform RSA encryption, it only encodes the message - * using RSAES-OAEP. - * - * @param key the RSA key to use. - * @param message the message to encode. - * @param options the options to use: - * label an optional label to use. - * seed the seed to use. - * md the message digest object to use, undefined for SHA-1. - * mgf1 optional mgf1 parameters: - * md the message digest object to use for MGF1. - * - * @return the encoded message bytes. - */ -pkcs1.encode_rsa_oaep = function(key, message, options) { - // parse arguments - var label; - var seed; - var md; - var mgf1Md; - // legacy args (label, seed, md) - if(typeof options === 'string') { - label = options; - seed = arguments[3] || undefined; - md = arguments[4] || undefined; - } else if(options) { - label = options.label || undefined; - seed = options.seed || undefined; - md = options.md || undefined; - if(options.mgf1 && options.mgf1.md) { - mgf1Md = options.mgf1.md; - } - } - - // default OAEP to SHA-1 message digest - if(!md) { - md = forge.md.sha1.create(); - } else { - md.start(); + if (client[kHTTPConnVersion] !== 'h2') { + // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded + // to the user. + if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so for as a valid response. + parser.onMessageComplete() + return + } } - // default MGF-1 to same as OAEP - if(!mgf1Md) { - mgf1Md = md; - } + this[kError] = err - // compute length in bytes and check output - var keyLength = Math.ceil(key.n.bitLength() / 8); - var maxLength = keyLength - 2 * md.digestLength - 2; - if(message.length > maxLength) { - var error = new Error('RSAES-OAEP input message length is too long.'); - error.length = message.length; - error.maxLength = maxLength; - throw error; - } + onError(this[kClient], err) +} - if(!label) { - label = ''; - } - md.update(label, 'raw'); - var lHash = md.digest(); +function onError (client, err) { + if ( + client[kRunning] === 0 && + err.code !== 'UND_ERR_INFO' && + err.code !== 'UND_ERR_SOCKET' + ) { + // Error is not caused by running request and not a recoverable + // socket error. + + assert(client[kPendingIdx] === client[kRunningIdx]) - var PS = ''; - var PS_length = maxLength - message.length; - for(var i = 0; i < PS_length; i++) { - PS += '\x00'; + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) + } + assert(client[kSize] === 0) } +} - var DB = lHash.getBytes() + PS + '\x01' + message; +function onSocketEnd () { + const { [kParser]: parser, [kClient]: client } = this - if(!seed) { - seed = forge.random.getBytes(md.digestLength); - } else if(seed.length !== md.digestLength) { - var error = new Error('Invalid RSAES-OAEP seed. The seed length must ' + - 'match the digest length.'); - error.seedLength = seed.length; - error.digestLength = md.digestLength; - throw error; + if (client[kHTTPConnVersion] !== 'h2') { + if (parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() + return + } } - var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md); - var maskedDB = forge.util.xorBytes(DB, dbMask, DB.length); + util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this))) +} - var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md); - var maskedSeed = forge.util.xorBytes(seed, seedMask, seed.length); +function onSocketClose () { + const { [kClient]: client, [kParser]: parser } = this - // return encoded message - return '\x00' + maskedSeed + maskedDB; -}; + if (client[kHTTPConnVersion] === 'h1' && parser) { + if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() + } -/** - * Decode the given RSAES-OAEP encoded message (EM) using key, with optional - * label (L). - * - * This method does not perform RSA decryption, it only decodes the message - * using RSAES-OAEP. - * - * @param key the RSA key to use. - * @param em the encoded message to decode. - * @param options the options to use: - * label an optional label to use. - * md the message digest object to use for OAEP, undefined for SHA-1. - * mgf1 optional mgf1 parameters: - * md the message digest object to use for MGF1. - * - * @return the decoded message bytes. - */ -pkcs1.decode_rsa_oaep = function(key, em, options) { - // parse args - var label; - var md; - var mgf1Md; - // legacy args - if(typeof options === 'string') { - label = options; - md = arguments[3] || undefined; - } else if(options) { - label = options.label || undefined; - md = options.md || undefined; - if(options.mgf1 && options.mgf1.md) { - mgf1Md = options.mgf1.md; - } - } - - // compute length in bytes - var keyLength = Math.ceil(key.n.bitLength() / 8); - - if(em.length !== keyLength) { - var error = new Error('RSAES-OAEP encoded message length is invalid.'); - error.length = em.length; - error.expectedLength = keyLength; - throw error; - } - - // default OAEP to SHA-1 message digest - if(md === undefined) { - md = forge.md.sha1.create(); - } else { - md.start(); + this[kParser].destroy() + this[kParser] = null } - // default MGF-1 to same as OAEP - if(!mgf1Md) { - mgf1Md = md; - } + const err = this[kError] || new SocketError('closed', util.getSocketInfo(this)) - if(keyLength < 2 * md.digestLength + 2) { - throw new Error('RSAES-OAEP key is too short for the hash function.'); - } + client[kSocket] = null - if(!label) { - label = ''; - } - md.update(label, 'raw'); - var lHash = md.digest().getBytes(); + if (client.destroyed) { + assert(client[kPending] === 0) - // split the message into its parts - var y = em.charAt(0); - var maskedSeed = em.substring(1, md.digestLength + 1); - var maskedDB = em.substring(1 + md.digestLength); + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) + } + } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null - var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md); - var seed = forge.util.xorBytes(maskedSeed, seedMask, maskedSeed.length); + errorRequest(client, request, err) + } - var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md); - var db = forge.util.xorBytes(maskedDB, dbMask, maskedDB.length); + client[kPendingIdx] = client[kRunningIdx] - var lHashPrime = db.substring(0, md.digestLength); + assert(client[kRunning] === 0) - // constant time check that all values match what is expected - var error = (y !== '\x00'); + client.emit('disconnect', client[kUrl], [client], err) - // constant time check lHash vs lHashPrime - for(var i = 0; i < md.digestLength; ++i) { - error |= (lHash.charAt(i) !== lHashPrime.charAt(i)); - } + resume(client) +} - // "constant time" find the 0x1 byte separating the padding (zeros) from the - // message - // TODO: It must be possible to do this in a better/smarter way? - var in_ps = 1; - var index = md.digestLength; - for(var j = md.digestLength; j < db.length; j++) { - var code = db.charCodeAt(j); +async function connect (client) { + assert(!client[kConnecting]) + assert(!client[kSocket]) - var is_0 = (code & 0x1) ^ 0x1; + let { host, hostname, protocol, port } = client[kUrl] - // non-zero if not 0 or 1 in the ps section - var error_mask = in_ps ? 0xfffe : 0x0000; - error |= (code & error_mask); + // Resolve ipv6 + if (hostname[0] === '[') { + const idx = hostname.indexOf(']') - // latch in_ps to zero after we find 0x1 - in_ps = in_ps & is_0; - index += in_ps; - } + assert(idx !== -1) + const ip = hostname.substring(1, idx) - if(error || db.charCodeAt(index) !== 0x1) { - throw new Error('Invalid RSAES-OAEP padding.'); + assert(net.isIP(ip)) + hostname = ip } - return db.substring(index + 1); -}; + client[kConnecting] = true -function rsa_mgf1(seed, maskLength, hash) { - // default to SHA-1 message digest - if(!hash) { - hash = forge.md.sha1.create(); - } - var t = ''; - var count = Math.ceil(maskLength / hash.digestLength); - for(var i = 0; i < count; ++i) { - var c = String.fromCharCode( - (i >> 24) & 0xFF, (i >> 16) & 0xFF, (i >> 8) & 0xFF, i & 0xFF); - hash.start(); - hash.update(seed + c); - t += hash.digest().getBytes(); + if (channels.beforeConnect.hasSubscribers) { + channels.beforeConnect.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector] + }) } - return t.substring(0, maskLength); -} + try { + const socket = await new Promise((resolve, reject) => { + client[kConnector]({ + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, (err, socket) => { + if (err) { + reject(err) + } else { + resolve(socket) + } + }) + }) -/***/ }), + if (client.destroyed) { + util.destroy(socket.on('error', () => {}), new ClientDestroyedError()) + return + } -/***/ 466: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + client[kConnecting] = false -/** - * Javascript implementation of PKCS#12. - * - * @author Dave Longley - * @author Stefan Siegl - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - * Copyright (c) 2012 Stefan Siegl - * - * The ASN.1 representation of PKCS#12 is as follows - * (see ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-12/pkcs-12-tc1.pdf for details) - * - * PFX ::= SEQUENCE { - * version INTEGER {v3(3)}(v3,...), - * authSafe ContentInfo, - * macData MacData OPTIONAL - * } - * - * MacData ::= SEQUENCE { - * mac DigestInfo, - * macSalt OCTET STRING, - * iterations INTEGER DEFAULT 1 - * } - * Note: The iterations default is for historical reasons and its use is - * deprecated. A higher value, like 1024, is recommended. - * - * DigestInfo is defined in PKCS#7 as follows: - * - * DigestInfo ::= SEQUENCE { - * digestAlgorithm DigestAlgorithmIdentifier, - * digest Digest - * } - * - * DigestAlgorithmIdentifier ::= AlgorithmIdentifier - * - * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters - * for the algorithm, if any. In the case of SHA1 there is none. - * - * AlgorithmIdentifer ::= SEQUENCE { - * algorithm OBJECT IDENTIFIER, - * parameters ANY DEFINED BY algorithm OPTIONAL - * } - * - * Digest ::= OCTET STRING - * - * - * ContentInfo ::= SEQUENCE { - * contentType ContentType, - * content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL - * } - * - * ContentType ::= OBJECT IDENTIFIER - * - * AuthenticatedSafe ::= SEQUENCE OF ContentInfo - * -- Data if unencrypted - * -- EncryptedData if password-encrypted - * -- EnvelopedData if public key-encrypted - * - * - * SafeContents ::= SEQUENCE OF SafeBag - * - * SafeBag ::= SEQUENCE { - * bagId BAG-TYPE.&id ({PKCS12BagSet}) - * bagValue [0] EXPLICIT BAG-TYPE.&Type({PKCS12BagSet}{@bagId}), - * bagAttributes SET OF PKCS12Attribute OPTIONAL - * } - * - * PKCS12Attribute ::= SEQUENCE { - * attrId ATTRIBUTE.&id ({PKCS12AttrSet}), - * attrValues SET OF ATTRIBUTE.&Type ({PKCS12AttrSet}{@attrId}) - * } -- This type is compatible with the X.500 type 'Attribute' - * - * PKCS12AttrSet ATTRIBUTE ::= { - * friendlyName | -- from PKCS #9 - * localKeyId, -- from PKCS #9 - * ... -- Other attributes are allowed - * } - * - * CertBag ::= SEQUENCE { - * certId BAG-TYPE.&id ({CertTypes}), - * certValue [0] EXPLICIT BAG-TYPE.&Type ({CertTypes}{@certId}) - * } - * - * x509Certificate BAG-TYPE ::= {OCTET STRING IDENTIFIED BY {certTypes 1}} - * -- DER-encoded X.509 certificate stored in OCTET STRING - * - * sdsiCertificate BAG-TYPE ::= {IA5String IDENTIFIED BY {certTypes 2}} - * -- Base64-encoded SDSI certificate stored in IA5String - * - * CertTypes BAG-TYPE ::= { - * x509Certificate | - * sdsiCertificate, - * ... -- For future extensions - * } - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -__nccwpck_require__(5104); -__nccwpck_require__(1925); -__nccwpck_require__(266); -__nccwpck_require__(1281); -__nccwpck_require__(7821); -__nccwpck_require__(3921); -__nccwpck_require__(279); -__nccwpck_require__(8339); -__nccwpck_require__(8180); - -// shortcut for asn.1 & PKI API -var asn1 = forge.asn1; -var pki = forge.pki; - -// shortcut for PKCS#12 API -var p12 = module.exports = forge.pkcs12 = forge.pkcs12 || {}; - -var contentInfoValidator = { - name: 'ContentInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, // a ContentInfo - constructed: true, - value: [{ - name: 'ContentInfo.contentType', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'contentType' - }, { - name: 'ContentInfo.content', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - constructed: true, - captureAsn1: 'content' - }] -}; + assert(socket) -var pfxValidator = { - name: 'PFX', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'PFX.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'version' - }, - contentInfoValidator, { - name: 'PFX.macData', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - optional: true, - captureAsn1: 'mac', - value: [{ - name: 'PFX.macData.mac', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, // DigestInfo - constructed: true, - value: [{ - name: 'PFX.macData.mac.digestAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, // DigestAlgorithmIdentifier - constructed: true, - value: [{ - name: 'PFX.macData.mac.digestAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'macAlgorithm' - }, { - name: 'PFX.macData.mac.digestAlgorithm.parameters', - tagClass: asn1.Class.UNIVERSAL, - captureAsn1: 'macAlgorithmParameters' - }] - }, { - name: 'PFX.macData.mac.digest', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'macDigest' - }] - }, { - name: 'PFX.macData.macSalt', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'macSalt' - }, { - name: 'PFX.macData.iterations', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - optional: true, - capture: 'macIterations' - }] - }] -}; + const isH2 = socket.alpnProtocol === 'h2' + if (isH2) { + if (!h2ExperimentalWarned) { + h2ExperimentalWarned = true + process.emitWarning('H2 support is experimental, expect them to change at any time.', { + code: 'UNDICI-H2' + }) + } -var safeBagValidator = { - name: 'SafeBag', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'SafeBag.bagId', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'bagId' - }, { - name: 'SafeBag.bagValue', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - constructed: true, - captureAsn1: 'bagValue' - }, { - name: 'SafeBag.bagAttributes', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - constructed: true, - optional: true, - capture: 'bagAttributes' - }] -}; + const session = http2.connect(client[kUrl], { + createConnection: () => socket, + peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams + }) -var attributeValidator = { - name: 'Attribute', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'Attribute.attrId', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'oid' - }, { - name: 'Attribute.attrValues', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - constructed: true, - capture: 'values' - }] -}; + client[kHTTPConnVersion] = 'h2' + session[kClient] = client + session[kSocket] = socket + session.on('error', onHttp2SessionError) + session.on('frameError', onHttp2FrameError) + session.on('end', onHttp2SessionEnd) + session.on('goaway', onHTTP2GoAway) + session.on('close', onSocketClose) + session.unref() + + client[kHTTP2Session] = session + socket[kHTTP2Session] = session + } else { + if (!llhttpInstance) { + llhttpInstance = await llhttpPromise + llhttpPromise = null + } -var certBagValidator = { - name: 'CertBag', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'CertBag.certId', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'certId' - }, { - name: 'CertBag.certValue', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - constructed: true, - /* So far we only support X.509 certificates (which are wrapped in - an OCTET STRING, hence hard code that here). */ - value: [{ - name: 'CertBag.certValue[0]', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Class.OCTETSTRING, - constructed: false, - capture: 'cert' - }] - }] -}; + socket[kNoRef] = false + socket[kWriting] = false + socket[kReset] = false + socket[kBlocking] = false + socket[kParser] = new Parser(client, socket, llhttpInstance) + } + + socket[kCounter] = 0 + socket[kMaxRequests] = client[kMaxRequests] + socket[kClient] = client + socket[kError] = null + + socket + .on('error', onSocketError) + .on('readable', onSocketReadable) + .on('end', onSocketEnd) + .on('close', onSocketClose) + + client[kSocket] = socket + + if (channels.connected.hasSubscribers) { + channels.connected.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + socket + }) + } + client.emit('connect', client[kUrl], [client]) + } catch (err) { + if (client.destroyed) { + return + } -/** - * Search SafeContents structure for bags with matching attributes. - * - * The search can optionally be narrowed by a certain bag type. - * - * @param safeContents the SafeContents structure to search in. - * @param attrName the name of the attribute to compare against. - * @param attrValue the attribute value to search for. - * @param [bagType] bag type to narrow search by. - * - * @return an array of matching bags. - */ -function _getBagsByAttribute(safeContents, attrName, attrValue, bagType) { - var result = []; + client[kConnecting] = false - for(var i = 0; i < safeContents.length; i++) { - for(var j = 0; j < safeContents[i].safeBags.length; j++) { - var bag = safeContents[i].safeBags[j]; - if(bagType !== undefined && bag.type !== bagType) { - continue; - } - // only filter by bag type, no attribute specified - if(attrName === null) { - result.push(bag); - continue; - } - if(bag.attributes[attrName] !== undefined && - bag.attributes[attrName].indexOf(attrValue) >= 0) { - result.push(bag); + if (channels.connectError.hasSubscribers) { + channels.connectError.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + error: err + }) + } + + if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') { + assert(client[kRunning] === 0) + while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { + const request = client[kQueue][client[kPendingIdx]++] + errorRequest(client, request, err) } + } else { + onError(client, err) } + + client.emit('connectionError', client[kUrl], [client], err) } - return result; + resume(client) } -/** - * Converts a PKCS#12 PFX in ASN.1 notation into a PFX object. - * - * @param obj The PKCS#12 PFX in ASN.1 notation. - * @param strict true to use strict DER decoding, false not to (default: true). - * @param {String} password Password to decrypt with (optional). - * - * @return PKCS#12 PFX object. - */ -p12.pkcs12FromAsn1 = function(obj, strict, password) { - // handle args - if(typeof strict === 'string') { - password = strict; - strict = true; - } else if(strict === undefined) { - strict = true; +function emitDrain (client) { + client[kNeedDrain] = 0 + client.emit('drain', client[kUrl], [client]) +} + +function resume (client, sync) { + if (client[kResuming] === 2) { + return } - // validate PFX and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, pfxValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#12 PFX. ' + - 'ASN.1 object is not an PKCS#12 PFX.'); - error.errors = error; - throw error; + client[kResuming] = 2 + + _resume(client, sync) + client[kResuming] = 0 + + if (client[kRunningIdx] > 256) { + client[kQueue].splice(0, client[kRunningIdx]) + client[kPendingIdx] -= client[kRunningIdx] + client[kRunningIdx] = 0 } +} - var pfx = { - version: capture.version.charCodeAt(0), - safeContents: [], +function _resume (client, sync) { + while (true) { + if (client.destroyed) { + assert(client[kPending] === 0) + return + } - /** - * Gets bags with matching attributes. - * - * @param filter the attributes to filter by: - * [localKeyId] the localKeyId to search for. - * [localKeyIdHex] the localKeyId in hex to search for. - * [friendlyName] the friendly name to search for. - * [bagType] bag type to narrow each attribute search by. - * - * @return a map of attribute type to an array of matching bags or, if no - * attribute was given but a bag type, the map key will be the - * bag type. - */ - getBags: function(filter) { - var rval = {}; - - var localKeyId; - if('localKeyId' in filter) { - localKeyId = filter.localKeyId; - } else if('localKeyIdHex' in filter) { - localKeyId = forge.util.hexToBytes(filter.localKeyIdHex); - } + if (client[kClosedResolve] && !client[kSize]) { + client[kClosedResolve]() + client[kClosedResolve] = null + return + } - // filter on bagType only - if(localKeyId === undefined && !('friendlyName' in filter) && - 'bagType' in filter) { - rval[filter.bagType] = _getBagsByAttribute( - pfx.safeContents, null, null, filter.bagType); - } + const socket = client[kSocket] - if(localKeyId !== undefined) { - rval.localKeyId = _getBagsByAttribute( - pfx.safeContents, 'localKeyId', - localKeyId, filter.bagType); + if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') { + if (client[kSize] === 0) { + if (!socket[kNoRef] && socket.unref) { + socket.unref() + socket[kNoRef] = true + } + } else if (socket[kNoRef] && socket.ref) { + socket.ref() + socket[kNoRef] = false } - if('friendlyName' in filter) { - rval.friendlyName = _getBagsByAttribute( - pfx.safeContents, 'friendlyName', - filter.friendlyName, filter.bagType); + + if (client[kSize] === 0) { + if (socket[kParser].timeoutType !== TIMEOUT_IDLE) { + socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE) + } + } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { + if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { + const request = client[kQueue][client[kRunningIdx]] + const headersTimeout = request.headersTimeout != null + ? request.headersTimeout + : client[kHeadersTimeout] + socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS) + } } + } - return rval; - }, + if (client[kBusy]) { + client[kNeedDrain] = 2 + } else if (client[kNeedDrain] === 2) { + if (sync) { + client[kNeedDrain] = 1 + process.nextTick(emitDrain, client) + } else { + emitDrain(client) + } + continue + } - /** - * DEPRECATED: use getBags() instead. - * - * Get bags with matching friendlyName attribute. - * - * @param friendlyName the friendly name to search for. - * @param [bagType] bag type to narrow search by. - * - * @return an array of bags with matching friendlyName attribute. - */ - getBagsByFriendlyName: function(friendlyName, bagType) { - return _getBagsByAttribute( - pfx.safeContents, 'friendlyName', friendlyName, bagType); - }, + if (client[kPending] === 0) { + return + } - /** - * DEPRECATED: use getBags() instead. - * - * Get bags with matching localKeyId attribute. - * - * @param localKeyId the localKeyId to search for. - * @param [bagType] bag type to narrow search by. - * - * @return an array of bags with matching localKeyId attribute. - */ - getBagsByLocalKeyId: function(localKeyId, bagType) { - return _getBagsByAttribute( - pfx.safeContents, 'localKeyId', localKeyId, bagType); + if (client[kRunning] >= (client[kPipelining] || 1)) { + return } - }; - if(capture.version.charCodeAt(0) !== 3) { - var error = new Error('PKCS#12 PFX of version other than 3 not supported.'); - error.version = capture.version.charCodeAt(0); - throw error; - } + const request = client[kQueue][client[kPendingIdx]] - if(asn1.derToOid(capture.contentType) !== pki.oids.data) { - var error = new Error('Only PKCS#12 PFX in password integrity mode supported.'); - error.oid = asn1.derToOid(capture.contentType); - throw error; - } + if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) { + if (client[kRunning] > 0) { + return + } - var data = capture.content.value[0]; - if(data.tagClass !== asn1.Class.UNIVERSAL || - data.type !== asn1.Type.OCTETSTRING) { - throw new Error('PKCS#12 authSafe content data is not an OCTET STRING.'); - } - data = _decodePkcs7Data(data); + client[kServerName] = request.servername - // check for MAC - if(capture.mac) { - var md = null; - var macKeyBytes = 0; - var macAlgorithm = asn1.derToOid(capture.macAlgorithm); - switch(macAlgorithm) { - case pki.oids.sha1: - md = forge.md.sha1.create(); - macKeyBytes = 20; - break; - case pki.oids.sha256: - md = forge.md.sha256.create(); - macKeyBytes = 32; - break; - case pki.oids.sha384: - md = forge.md.sha384.create(); - macKeyBytes = 48; - break; - case pki.oids.sha512: - md = forge.md.sha512.create(); - macKeyBytes = 64; - break; - case pki.oids.md5: - md = forge.md.md5.create(); - macKeyBytes = 16; - break; + if (socket && socket.servername !== request.servername) { + util.destroy(socket, new InformationalError('servername changed')) + return + } } - if(md === null) { - throw new Error('PKCS#12 uses unsupported MAC algorithm: ' + macAlgorithm); + + if (client[kConnecting]) { + return } - // verify MAC (iterations default to 1) - var macSalt = new forge.util.ByteBuffer(capture.macSalt); - var macIterations = (('macIterations' in capture) ? - parseInt(forge.util.bytesToHex(capture.macIterations), 16) : 1); - var macKey = p12.generateKey( - password, macSalt, 3, macIterations, macKeyBytes, md); - var mac = forge.hmac.create(); - mac.start(md, macKey); - mac.update(data.value); - var macValue = mac.getMac(); - if(macValue.getBytes() !== capture.macDigest) { - throw new Error('PKCS#12 MAC could not be verified. Invalid password?'); + if (!socket && !client[kHTTP2Session]) { + connect(client) + return } - } - _decodeAuthenticatedSafe(pfx, data.value, strict, password); - return pfx; -}; + if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) { + return + } -/** - * Decodes PKCS#7 Data. PKCS#7 (RFC 2315) defines "Data" as an OCTET STRING, - * but it is sometimes an OCTET STRING that is composed/constructed of chunks, - * each its own OCTET STRING. This is BER-encoding vs. DER-encoding. This - * function transforms this corner-case into the usual simple, - * non-composed/constructed OCTET STRING. - * - * This function may be moved to ASN.1 at some point to better deal with - * more BER-encoding issues, should they arise. - * - * @param data the ASN.1 Data object to transform. - */ -function _decodePkcs7Data(data) { - // handle special case of "chunked" data content: an octet string composed - // of other octet strings - if(data.composed || data.constructed) { - var value = forge.util.createBuffer(); - for(var i = 0; i < data.value.length; ++i) { - value.putBytes(data.value[i].value); + if (client[kRunning] > 0 && !request.idempotent) { + // Non-idempotent request cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } + + if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) { + // Don't dispatch an upgrade until all preceding requests have completed. + // A misbehaving server might upgrade the connection before all pipelined + // request has completed. + return + } + + if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 && + (util.isStream(request.body) || util.isAsyncIterable(request.body))) { + // Request with stream or iterator body can error while other requests + // are inflight and indirectly error those as well. + // Ensure this doesn't happen by waiting for inflight + // to complete before dispatching. + + // Request with stream or iterator body cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } + + if (!request.aborted && write(client, request)) { + client[kPendingIdx]++ + } else { + client[kQueue].splice(client[kPendingIdx], 1) } - data.composed = data.constructed = false; - data.value = value.getBytes(); } - return data; } -/** - * Decode PKCS#12 AuthenticatedSafe (BER encoded) into PFX object. - * - * The AuthenticatedSafe is a BER-encoded SEQUENCE OF ContentInfo. - * - * @param pfx The PKCS#12 PFX object to fill. - * @param {String} authSafe BER-encoded AuthenticatedSafe. - * @param strict true to use strict DER decoding, false not to. - * @param {String} password Password to decrypt with (optional). - */ -function _decodeAuthenticatedSafe(pfx, authSafe, strict, password) { - authSafe = asn1.fromDer(authSafe, strict); /* actually it's BER encoded */ +// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2 +function shouldSendContentLength (method) { + return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT' +} - if(authSafe.tagClass !== asn1.Class.UNIVERSAL || - authSafe.type !== asn1.Type.SEQUENCE || - authSafe.constructed !== true) { - throw new Error('PKCS#12 AuthenticatedSafe expected to be a ' + - 'SEQUENCE OF ContentInfo'); +function write (client, request) { + if (client[kHTTPConnVersion] === 'h2') { + writeH2(client, client[kHTTP2Session], request) + return } - for(var i = 0; i < authSafe.value.length; i++) { - var contentInfo = authSafe.value[i]; + const { body, method, path, host, upgrade, headers, blocking, reset } = request - // validate contentInfo and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(contentInfo, contentInfoValidator, capture, errors)) { - var error = new Error('Cannot read ContentInfo.'); - error.errors = errors; - throw error; - } + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 - var obj = { - encrypted: false - }; - var safeContents = null; - var data = capture.content.value[0]; - switch(asn1.derToOid(capture.contentType)) { - case pki.oids.data: - if(data.tagClass !== asn1.Class.UNIVERSAL || - data.type !== asn1.Type.OCTETSTRING) { - throw new Error('PKCS#12 SafeContents Data is not an OCTET STRING.'); - } - safeContents = _decodePkcs7Data(data).value; - break; - case pki.oids.encryptedData: - safeContents = _decryptSafeContents(data, password); - obj.encrypted = true; - break; - default: - var error = new Error('Unsupported PKCS#12 contentType.'); - error.contentType = asn1.derToOid(capture.contentType); - throw error; - } + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. - obj.safeBags = _decodeSafeContents(safeContents, strict, password); - pfx.safeContents.push(obj); - } -} + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) -/** - * Decrypt PKCS#7 EncryptedData structure. - * - * @param data ASN.1 encoded EncryptedContentInfo object. - * @param password The user-provided password. - * - * @return The decrypted SafeContents (ASN.1 object). - */ -function _decryptSafeContents(data, password) { - var capture = {}; - var errors = []; - if(!asn1.validate( - data, forge.pkcs7.asn1.encryptedDataValidator, capture, errors)) { - var error = new Error('Cannot read EncryptedContentInfo.'); - error.errors = errors; - throw error; + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) } - var oid = asn1.derToOid(capture.contentType); - if(oid !== pki.oids.data) { - var error = new Error( - 'PKCS#12 EncryptedContentInfo ContentType is not Data.'); - error.oid = oid; - throw error; + const bodyLength = util.bodyLength(body) + + let contentLength = bodyLength + + if (contentLength === null) { + contentLength = request.contentLength } - // get cipher - oid = asn1.derToOid(capture.encAlgorithm); - var cipher = pki.pbe.getCipher(oid, capture.encParameter, password); + if (contentLength === 0 && !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. + + contentLength = null + } - // get encrypted data - var encryptedContentAsn1 = _decodePkcs7Data(capture.encryptedContentAsn1); - var encrypted = forge.util.createBuffer(encryptedContentAsn1.value); + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) + return false + } - cipher.update(encrypted); - if(!cipher.finish()) { - throw new Error('Failed to decrypt PKCS#12 SafeContents.'); + process.emitWarning(new RequestContentLengthMismatchError()) } - return cipher.output.getBytes(); -} + const socket = client[kSocket] -/** - * Decode PKCS#12 SafeContents (BER-encoded) into array of Bag objects. - * - * The safeContents is a BER-encoded SEQUENCE OF SafeBag. - * - * @param {String} safeContents BER-encoded safeContents. - * @param strict true to use strict DER decoding, false not to. - * @param {String} password Password to decrypt with (optional). - * - * @return {Array} Array of Bag objects. - */ -function _decodeSafeContents(safeContents, strict, password) { - // if strict and no safe contents, return empty safes - if(!strict && safeContents.length === 0) { - return []; + try { + request.onConnect((err) => { + if (request.aborted || request.completed) { + return + } + + errorRequest(client, request, err || new RequestAbortedError()) + + util.destroy(socket, new InformationalError('aborted')) + }) + } catch (err) { + errorRequest(client, request, err) + } + + if (request.aborted) { + return false } - // actually it's BER-encoded - safeContents = asn1.fromDer(safeContents, strict); + if (method === 'HEAD') { + // https://github.com/mcollina/undici/issues/258 + // Close after a HEAD request to interop with misbehaving servers + // that may send a body in the response. - if(safeContents.tagClass !== asn1.Class.UNIVERSAL || - safeContents.type !== asn1.Type.SEQUENCE || - safeContents.constructed !== true) { - throw new Error( - 'PKCS#12 SafeContents expected to be a SEQUENCE OF SafeBag.'); + socket[kReset] = true } - var res = []; - for(var i = 0; i < safeContents.value.length; i++) { - var safeBag = safeContents.value[i]; + if (upgrade || method === 'CONNECT') { + // On CONNECT or upgrade, block pipeline from dispatching further + // requests on this connection. - // validate SafeBag and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(safeBag, safeBagValidator, capture, errors)) { - var error = new Error('Cannot read SafeBag.'); - error.errors = errors; - throw error; - } + socket[kReset] = true + } - /* Create bag object and push to result array. */ - var bag = { - type: asn1.derToOid(capture.bagId), - attributes: _decodeBagAttributes(capture.bagAttributes) - }; - res.push(bag); - - var validator, decoder; - var bagAsn1 = capture.bagValue.value[0]; - switch(bag.type) { - case pki.oids.pkcs8ShroudedKeyBag: - /* bagAsn1 has a EncryptedPrivateKeyInfo, which we need to decrypt. - Afterwards we can handle it like a keyBag, - which is a PrivateKeyInfo. */ - bagAsn1 = pki.decryptPrivateKeyInfo(bagAsn1, password); - if(bagAsn1 === null) { - throw new Error( - 'Unable to decrypt PKCS#8 ShroudedKeyBag, wrong password?'); - } - - /* fall through */ - case pki.oids.keyBag: - /* A PKCS#12 keyBag is a simple PrivateKeyInfo as understood by our - PKI module, hence we don't have to do validation/capturing here, - just pass what we already got. */ - try { - bag.key = pki.privateKeyFromAsn1(bagAsn1); - } catch(e) { - // ignore unknown key type, pass asn1 value - bag.key = null; - bag.asn1 = bagAsn1; - } - continue; /* Nothing more to do. */ - - case pki.oids.certBag: - /* A PKCS#12 certBag can wrap both X.509 and sdsi certificates. - Therefore put the SafeBag content through another validator to - capture the fields. Afterwards check & store the results. */ - validator = certBagValidator; - decoder = function() { - if(asn1.derToOid(capture.certId) !== pki.oids.x509Certificate) { - var error = new Error( - 'Unsupported certificate type, only X.509 supported.'); - error.oid = asn1.derToOid(capture.certId); - throw error; - } + if (reset != null) { + socket[kReset] = reset + } - // true=produce cert hash - var certAsn1 = asn1.fromDer(capture.cert, strict); - try { - bag.cert = pki.certificateFromAsn1(certAsn1, true); - } catch(e) { - // ignore unknown cert type, pass asn1 value - bag.cert = null; - bag.asn1 = certAsn1; - } - }; - break; + if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) { + socket[kReset] = true + } - default: - var error = new Error('Unsupported PKCS#12 SafeBag type.'); - error.oid = bag.type; - throw error; - } + if (blocking) { + socket[kBlocking] = true + } - /* Validate SafeBag value (i.e. CertBag, etc.) and capture data if needed. */ - if(validator !== undefined && - !asn1.validate(bagAsn1, validator, capture, errors)) { - var error = new Error('Cannot read PKCS#12 ' + validator.name); - error.errors = errors; - throw error; - } + let header = `${method} ${path} HTTP/1.1\r\n` - /* Call decoder function from above to store the results. */ - decoder(); + if (typeof host === 'string') { + header += `host: ${host}\r\n` + } else { + header += client[kHostHeader] } - return res; -} + if (upgrade) { + header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n` + } else if (client[kPipelining] && !socket[kReset]) { + header += 'connection: keep-alive\r\n' + } else { + header += 'connection: close\r\n' + } -/** - * Decode PKCS#12 SET OF PKCS12Attribute into JavaScript object. - * - * @param attributes SET OF PKCS12Attribute (ASN.1 object). - * - * @return the decoded attributes. - */ -function _decodeBagAttributes(attributes) { - var decodedAttrs = {}; - - if(attributes !== undefined) { - for(var i = 0; i < attributes.length; ++i) { - var capture = {}; - var errors = []; - if(!asn1.validate(attributes[i], attributeValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#12 BagAttribute.'); - error.errors = errors; - throw error; - } + if (headers) { + header += headers + } - var oid = asn1.derToOid(capture.oid); - if(pki.oids[oid] === undefined) { - // unsupported attribute type, ignore. - continue; - } + if (channels.sendHeaders.hasSubscribers) { + channels.sendHeaders.publish({ request, headers: header, socket }) + } - decodedAttrs[pki.oids[oid]] = []; - for(var j = 0; j < capture.values.length; ++j) { - decodedAttrs[pki.oids[oid]].push(capture.values[j].value); - } + /* istanbul ignore else: assertion */ + if (!body || bodyLength === 0) { + if (contentLength === 0) { + socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') + } else { + assert(contentLength === null, 'no body must not have content length') + socket.write(`${header}\r\n`, 'latin1') + } + request.onRequestSent() + } else if (util.isBuffer(body)) { + assert(contentLength === body.byteLength, 'buffer body must have content length') + + socket.cork() + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + socket.write(body) + socket.uncork() + request.onBodySent(body) + request.onRequestSent() + if (!expectsPayload) { + socket[kReset] = true + } + } else if (util.isBlobLike(body)) { + if (typeof body.stream === 'function') { + writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload }) + } else { + writeBlob({ body, client, request, socket, contentLength, header, expectsPayload }) } + } else if (util.isStream(body)) { + writeStream({ body, client, request, socket, contentLength, header, expectsPayload }) + } else if (util.isIterable(body)) { + writeIterable({ body, client, request, socket, contentLength, header, expectsPayload }) + } else { + assert(false) } - return decodedAttrs; + return true } -/** - * Wraps a private key and certificate in a PKCS#12 PFX wrapper. If a - * password is provided then the private key will be encrypted. - * - * An entire certificate chain may also be included. To do this, pass - * an array for the "cert" parameter where the first certificate is - * the one that is paired with the private key and each subsequent one - * verifies the previous one. The certificates may be in PEM format or - * have been already parsed by Forge. - * - * @todo implement password-based-encryption for the whole package - * - * @param key the private key. - * @param cert the certificate (may be an array of certificates in order - * to specify a certificate chain). - * @param password the password to use, null for none. - * @param options: - * algorithm the encryption algorithm to use - * ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'. - * count the iteration count to use. - * saltSize the salt size to use. - * useMac true to include a MAC, false not to, defaults to true. - * localKeyId the local key ID to use, in hex. - * friendlyName the friendly name to use. - * generateLocalKeyId true to generate a random local key ID, - * false not to, defaults to true. - * - * @return the PKCS#12 PFX ASN.1 object. - */ -p12.toPkcs12Asn1 = function(key, cert, password, options) { - // set default options - options = options || {}; - options.saltSize = options.saltSize || 8; - options.count = options.count || 2048; - options.algorithm = options.algorithm || options.encAlgorithm || 'aes128'; - if(!('useMac' in options)) { - options.useMac = true; - } - if(!('localKeyId' in options)) { - options.localKeyId = null; - } - if(!('generateLocalKeyId' in options)) { - options.generateLocalKeyId = true; - } - - var localKeyId = options.localKeyId; - var bagAttrs; - if(localKeyId !== null) { - localKeyId = forge.util.hexToBytes(localKeyId); - } else if(options.generateLocalKeyId) { - // use SHA-1 of paired cert, if available - if(cert) { - var pairedCert = forge.util.isArray(cert) ? cert[0] : cert; - if(typeof pairedCert === 'string') { - pairedCert = pki.certificateFromPem(pairedCert); +function writeH2 (client, session, request) { + const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request + + let headers + if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()) + else headers = reqHeaders + + if (upgrade) { + errorRequest(client, request, new Error('Upgrade not supported for H2')) + return false + } + + try { + // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event? + request.onConnect((err) => { + if (request.aborted || request.completed) { + return } - var sha1 = forge.md.sha1.create(); - sha1.update(asn1.toDer(pki.certificateToAsn1(pairedCert)).getBytes()); - localKeyId = sha1.digest().getBytes(); - } else { - // FIXME: consider using SHA-1 of public key (which can be generated - // from private key components), see: cert.generateSubjectKeyIdentifier - // generate random bytes - localKeyId = forge.random.getBytes(20); - } - } - - var attrs = []; - if(localKeyId !== null) { - attrs.push( - // localKeyID - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // attrId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.localKeyId).getBytes()), - // attrValues - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - localKeyId) - ]) - ])); - } - if('friendlyName' in options) { - attrs.push( - // friendlyName - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // attrId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.friendlyName).getBytes()), - // attrValues - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BMPSTRING, false, - options.friendlyName) - ]) - ])); + + errorRequest(client, request, err || new RequestAbortedError()) + }) + } catch (err) { + errorRequest(client, request, err) } - if(attrs.length > 0) { - bagAttrs = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs); + if (request.aborted) { + return false } - // collect contents for AuthenticatedSafe - var contents = []; + /** @type {import('node:http2').ClientHttp2Stream} */ + let stream + const h2State = client[kHTTP2SessionState] - // create safe bag(s) for certificate chain - var chain = []; - if(cert !== null) { - if(forge.util.isArray(cert)) { - chain = cert; - } else { - chain = [cert]; - } - } - - var certSafeBags = []; - for(var i = 0; i < chain.length; ++i) { - // convert cert from PEM as necessary - cert = chain[i]; - if(typeof cert === 'string') { - cert = pki.certificateFromPem(cert); - } - - // SafeBag - var certBagAttrs = (i === 0) ? bagAttrs : undefined; - var certAsn1 = pki.certificateToAsn1(cert); - var certSafeBag = - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // bagId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.certBag).getBytes()), - // bagValue - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - // CertBag - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // certId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.x509Certificate).getBytes()), - // certValue (x509Certificate) - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - asn1.toDer(certAsn1).getBytes()) - ])])]), - // bagAttributes (OPTIONAL) - certBagAttrs - ]); - certSafeBags.push(certSafeBag); - } - - if(certSafeBags.length > 0) { - // SafeContents - var certSafeContents = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, certSafeBags); - - // ContentInfo - var certCI = - // PKCS#7 ContentInfo - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // contentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - // OID for the content type is 'data' - asn1.oidToDer(pki.oids.data).getBytes()), - // content - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - asn1.toDer(certSafeContents).getBytes()) - ]) - ]); - contents.push(certCI); - } - - // create safe contents for private key - var keyBag = null; - if(key !== null) { - // SafeBag - var pkAsn1 = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(key)); - if(password === null) { - // no encryption - keyBag = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // bagId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.keyBag).getBytes()), - // bagValue - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - // PrivateKeyInfo - pkAsn1 - ]), - // bagAttributes (OPTIONAL) - bagAttrs - ]); + headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost] + headers[HTTP2_HEADER_METHOD] = method + + if (method === 'CONNECT') { + session.ref() + // we are already connected, streams are pending, first request + // will create a new stream. We trigger a request to create the stream and wait until + // `ready` event is triggered + // We disabled endStream to allow the user to write to the stream + stream = session.request(headers, { endStream: false, signal }) + + if (stream.id && !stream.pending) { + request.onUpgrade(null, null, stream) + ++h2State.openStreams } else { - // encrypted PrivateKeyInfo - keyBag = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // bagId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.pkcs8ShroudedKeyBag).getBytes()), - // bagValue - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - // EncryptedPrivateKeyInfo - pki.encryptPrivateKeyInfo(pkAsn1, password, options) - ]), - // bagAttributes (OPTIONAL) - bagAttrs - ]); - } - - // SafeContents - var keySafeContents = - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [keyBag]); - - // ContentInfo - var keyCI = - // PKCS#7 ContentInfo - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // contentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - // OID for the content type is 'data' - asn1.oidToDer(pki.oids.data).getBytes()), - // content - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - asn1.toDer(keySafeContents).getBytes()) - ]) - ]); - contents.push(keyCI); - } - - // create AuthenticatedSafe by stringing together the contents - var safe = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, contents); - - var macData; - if(options.useMac) { - // MacData - var sha1 = forge.md.sha1.create(); - var macSalt = new forge.util.ByteBuffer( - forge.random.getBytes(options.saltSize)); - var count = options.count; - // 160-bit key - var key = p12.generateKey(password, macSalt, 3, count, 20); - var mac = forge.hmac.create(); - mac.start(sha1, key); - mac.update(asn1.toDer(safe).getBytes()); - var macValue = mac.getMac(); - macData = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // mac DigestInfo - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // digestAlgorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm = SHA-1 - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.sha1).getBytes()), - // parameters = Null - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]), - // digest - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, - false, macValue.getBytes()) - ]), - // macSalt OCTET STRING - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, macSalt.getBytes()), - // iterations INTEGER (XXX: Only support count < 65536) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(count).getBytes() - ) - ]); - } - - // PFX - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version (3) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(3).getBytes()), - // PKCS#7 ContentInfo - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // contentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - // OID for the content type is 'data' - asn1.oidToDer(pki.oids.data).getBytes()), - // content - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - asn1.toDer(safe).getBytes()) - ]) - ]), - macData - ]); -}; + stream.once('ready', () => { + request.onUpgrade(null, null, stream) + ++h2State.openStreams + }) + } -/** - * Derives a PKCS#12 key. - * - * @param password the password to derive the key material from, null or - * undefined for none. - * @param salt the salt, as a ByteBuffer, to use. - * @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC). - * @param iter the iteration count. - * @param n the number of bytes to derive from the password. - * @param md the message digest to use, defaults to SHA-1. - * - * @return a ByteBuffer with the bytes derived from the password. - */ -p12.generateKey = forge.pbe.generatePkcs12Key; + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) session.unref() + }) + return true + } -/***/ }), + // https://tools.ietf.org/html/rfc7540#section-8.3 + // :path and :scheme headers must be omited when sending CONNECT -/***/ 4829: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + headers[HTTP2_HEADER_PATH] = path + headers[HTTP2_HEADER_SCHEME] = 'https' -/** - * Javascript implementation of PKCS#7 v1.5. - * - * @author Stefan Siegl - * @author Dave Longley - * - * Copyright (c) 2012 Stefan Siegl - * Copyright (c) 2012-2015 Digital Bazaar, Inc. - * - * Currently this implementation only supports ContentType of EnvelopedData, - * EncryptedData, or SignedData at the root level. The top level elements may - * contain only a ContentInfo of ContentType Data, i.e. plain data. Further - * nesting is not (yet) supported. - * - * The Forge validators for PKCS #7's ASN.1 structures are available from - * a separate file pkcs7asn1.js, since those are referenced from other - * PKCS standards like PKCS #12. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(9549); -__nccwpck_require__(7157); -__nccwpck_require__(1925); -__nccwpck_require__(154); -__nccwpck_require__(266); -__nccwpck_require__(7821); -__nccwpck_require__(8339); -__nccwpck_require__(8180); - -// shortcut for ASN.1 API -var asn1 = forge.asn1; - -// shortcut for PKCS#7 API -var p7 = module.exports = forge.pkcs7 = forge.pkcs7 || {}; + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 -/** - * Converts a PKCS#7 message from PEM format. - * - * @param pem the PEM-formatted PKCS#7 message. - * - * @return the PKCS#7 message. - */ -p7.messageFromPem = function(pem) { - var msg = forge.pem.decode(pem)[0]; + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. - if(msg.type !== 'PKCS7') { - var error = new Error('Could not convert PKCS#7 message from PEM; PEM ' + - 'header type is not "PKCS#7".'); - error.headerType = msg.type; - throw error; + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) + + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert PKCS#7 message from PEM; PEM is encrypted.'); + + let contentLength = util.bodyLength(body) + + if (contentLength == null) { + contentLength = request.contentLength } - // convert DER to ASN.1 object - var obj = asn1.fromDer(msg.body); + if (contentLength === 0 || !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. - return p7.messageFromAsn1(obj); -}; + contentLength = null + } -/** - * Converts a PKCS#7 message to PEM format. - * - * @param msg The PKCS#7 message object - * @param maxline The maximum characters per line, defaults to 64. - * - * @return The PEM-formatted PKCS#7 message. - */ -p7.messageToPem = function(msg, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var pemObj = { - type: 'PKCS7', - body: asn1.toDer(msg.toAsn1()).getBytes() - }; - return forge.pem.encode(pemObj, {maxline: maxline}); -}; + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) + return false + } -/** - * Converts a PKCS#7 message from an ASN.1 object. - * - * @param obj the ASN.1 representation of a ContentInfo. - * - * @return the PKCS#7 message. - */ -p7.messageFromAsn1 = function(obj) { - // validate root level ContentInfo and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, p7.asn1.contentInfoValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#7 message. ' + - 'ASN.1 object is not an PKCS#7 ContentInfo.'); - error.errors = errors; - throw error; - } - - var contentType = asn1.derToOid(capture.contentType); - var msg; - - switch(contentType) { - case forge.pki.oids.envelopedData: - msg = p7.createEnvelopedData(); - break; + process.emitWarning(new RequestContentLengthMismatchError()) + } - case forge.pki.oids.encryptedData: - msg = p7.createEncryptedData(); - break; + if (contentLength != null) { + assert(body, 'no body must not have content length') + headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}` + } - case forge.pki.oids.signedData: - msg = p7.createSignedData(); - break; + session.ref() - default: - throw new Error('Cannot read PKCS#7 message. ContentType with OID ' + - contentType + ' is not (yet) supported.'); + const shouldEndStream = method === 'GET' || method === 'HEAD' + if (expectContinue) { + headers[HTTP2_HEADER_EXPECT] = '100-continue' + stream = session.request(headers, { endStream: shouldEndStream, signal }) + + stream.once('continue', writeBodyH2) + } else { + stream = session.request(headers, { + endStream: shouldEndStream, + signal + }) + writeBodyH2() } - msg.fromAsn1(capture.content.value[0]); - return msg; -}; + // Increment counter as we have new several streams open + ++h2State.openStreams -p7.createSignedData = function() { - var msg = null; - msg = { - type: forge.pki.oids.signedData, - version: 1, - certificates: [], - crls: [], - // TODO: add json-formatted signer stuff here? - signers: [], - // populated during sign() - digestAlgorithmIdentifiers: [], - contentInfo: null, - signerInfos: [], - - fromAsn1: function(obj) { - // validate SignedData content block and capture data. - _fromAsn1(msg, obj, p7.asn1.signedDataValidator); - msg.certificates = []; - msg.crls = []; - msg.digestAlgorithmIdentifiers = []; - msg.contentInfo = null; - msg.signerInfos = []; - - if(msg.rawCapture.certificates) { - var certs = msg.rawCapture.certificates.value; - for(var i = 0; i < certs.length; ++i) { - msg.certificates.push(forge.pki.certificateFromAsn1(certs[i])); - } - } + stream.once('response', headers => { + const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers - // TODO: parse crls - }, + if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { + stream.pause() + } + }) - toAsn1: function() { - // degenerate case with no content - if(!msg.contentInfo) { - msg.sign(); - } + stream.once('end', () => { + request.onComplete([]) + }) - var certs = []; - for(var i = 0; i < msg.certificates.length; ++i) { - certs.push(forge.pki.certificateToAsn1(msg.certificates[i])); - } + stream.on('data', (chunk) => { + if (request.onData(chunk) === false) { + stream.pause() + } + }) - var crls = []; - // TODO: implement CRLs - - // [0] SignedData - var signedData = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Version - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(msg.version).getBytes()), - // DigestAlgorithmIdentifiers - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SET, true, - msg.digestAlgorithmIdentifiers), - // ContentInfo - msg.contentInfo - ]) - ]); - if(certs.length > 0) { - // [0] IMPLICIT ExtendedCertificatesAndCertificates OPTIONAL - signedData.value[0].value.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, certs)); - } - if(crls.length > 0) { - // [1] IMPLICIT CertificateRevocationLists OPTIONAL - signedData.value[0].value.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, crls)); - } - // SignerInfos - signedData.value[0].value.push( - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, - msg.signerInfos)); - - // ContentInfo - return asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // ContentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(msg.type).getBytes()), - // [0] SignedData - signedData - ]); - }, + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) { + session.unref() + } + }) - /** - * Add (another) entity to list of signers. - * - * Note: If authenticatedAttributes are provided, then, per RFC 2315, - * they must include at least two attributes: content type and - * message digest. The message digest attribute value will be - * auto-calculated during signing and will be ignored if provided. - * - * Here's an example of providing these two attributes: - * - * forge.pkcs7.createSignedData(); - * p7.addSigner({ - * issuer: cert.issuer.attributes, - * serialNumber: cert.serialNumber, - * key: privateKey, - * digestAlgorithm: forge.pki.oids.sha1, - * authenticatedAttributes: [{ - * type: forge.pki.oids.contentType, - * value: forge.pki.oids.data - * }, { - * type: forge.pki.oids.messageDigest - * }] - * }); - * - * TODO: Support [subjectKeyIdentifier] as signer's ID. - * - * @param signer the signer information: - * key the signer's private key. - * [certificate] a certificate containing the public key - * associated with the signer's private key; use this option as - * an alternative to specifying signer.issuer and - * signer.serialNumber. - * [issuer] the issuer attributes (eg: cert.issuer.attributes). - * [serialNumber] the signer's certificate's serial number in - * hexadecimal (eg: cert.serialNumber). - * [digestAlgorithm] the message digest OID, as a string, to use - * (eg: forge.pki.oids.sha1). - * [authenticatedAttributes] an optional array of attributes - * to also sign along with the content. - */ - addSigner: function(signer) { - var issuer = signer.issuer; - var serialNumber = signer.serialNumber; - if(signer.certificate) { - var cert = signer.certificate; - if(typeof cert === 'string') { - cert = forge.pki.certificateFromPem(cert); - } - issuer = cert.issuer.attributes; - serialNumber = cert.serialNumber; - } - var key = signer.key; - if(!key) { - throw new Error( - 'Could not add PKCS#7 signer; no private key specified.'); - } - if(typeof key === 'string') { - key = forge.pki.privateKeyFromPem(key); - } + stream.once('error', function (err) { + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) - // ensure OID known for digest algorithm - var digestAlgorithm = signer.digestAlgorithm || forge.pki.oids.sha1; - switch(digestAlgorithm) { - case forge.pki.oids.sha1: - case forge.pki.oids.sha256: - case forge.pki.oids.sha384: - case forge.pki.oids.sha512: - case forge.pki.oids.md5: - break; - default: - throw new Error( - 'Could not add PKCS#7 signer; unknown message digest algorithm: ' + - digestAlgorithm); - } + stream.once('frameError', (type, code) => { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + errorRequest(client, request, err) - // if authenticatedAttributes is present, then the attributes - // must contain at least PKCS #9 content-type and message-digest - var authenticatedAttributes = signer.authenticatedAttributes || []; - if(authenticatedAttributes.length > 0) { - var contentType = false; - var messageDigest = false; - for(var i = 0; i < authenticatedAttributes.length; ++i) { - var attr = authenticatedAttributes[i]; - if(!contentType && attr.type === forge.pki.oids.contentType) { - contentType = true; - if(messageDigest) { - break; - } - continue; - } - if(!messageDigest && attr.type === forge.pki.oids.messageDigest) { - messageDigest = true; - if(contentType) { - break; - } - continue; - } - } + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) - if(!contentType || !messageDigest) { - throw new Error('Invalid signer.authenticatedAttributes. If ' + - 'signer.authenticatedAttributes is specified, then it must ' + - 'contain at least two attributes, PKCS #9 content-type and ' + - 'PKCS #9 message-digest.'); - } + // stream.on('aborted', () => { + // // TODO(HTTP/2): Support aborted + // }) + + // stream.on('timeout', () => { + // // TODO(HTTP/2): Support timeout + // }) + + // stream.on('push', headers => { + // // TODO(HTTP/2): Suppor push + // }) + + // stream.on('trailers', headers => { + // // TODO(HTTP/2): Support trailers + // }) + + return true + + function writeBodyH2 () { + /* istanbul ignore else: assertion */ + if (!body) { + request.onRequestSent() + } else if (util.isBuffer(body)) { + assert(contentLength === body.byteLength, 'buffer body must have content length') + stream.cork() + stream.write(body) + stream.uncork() + stream.end() + request.onBodySent(body) + request.onRequestSent() + } else if (util.isBlobLike(body)) { + if (typeof body.stream === 'function') { + writeIterable({ + client, + request, + contentLength, + h2stream: stream, + expectsPayload, + body: body.stream(), + socket: client[kSocket], + header: '' + }) + } else { + writeBlob({ + body, + client, + request, + contentLength, + expectsPayload, + h2stream: stream, + header: '', + socket: client[kSocket] + }) } + } else if (util.isStream(body)) { + writeStream({ + body, + client, + request, + contentLength, + expectsPayload, + socket: client[kSocket], + h2stream: stream, + header: '' + }) + } else if (util.isIterable(body)) { + writeIterable({ + body, + client, + request, + contentLength, + expectsPayload, + header: '', + h2stream: stream, + socket: client[kSocket] + }) + } else { + assert(false) + } + } +} - msg.signers.push({ - key: key, - version: 1, - issuer: issuer, - serialNumber: serialNumber, - digestAlgorithm: digestAlgorithm, - signatureAlgorithm: forge.pki.oids.rsaEncryption, - signature: null, - authenticatedAttributes: authenticatedAttributes, - unauthenticatedAttributes: [] - }); - }, - - /** - * Signs the content. - * @param options Options to apply when signing: - * [detached] boolean. If signing should be done in detached mode. Defaults to false. - */ - sign: function(options) { - options = options || {}; - // auto-generate content info - if(typeof msg.content !== 'object' || msg.contentInfo === null) { - // use Data ContentInfo - msg.contentInfo = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // ContentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(forge.pki.oids.data).getBytes()) - ]); - - // add actual content, if present - if('content' in msg) { - var content; - if(msg.content instanceof forge.util.ByteBuffer) { - content = msg.content.bytes(); - } else if(typeof msg.content === 'string') { - content = forge.util.encodeUtf8(msg.content); - } +function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined') - if (options.detached) { - msg.detachedContent = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, content); - } else { - msg.contentInfo.value.push( - // [0] EXPLICIT content - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - content) - ])); - } + if (client[kHTTPConnVersion] === 'h2') { + // For HTTP/2, is enough to pipe the stream + const pipe = pipeline( + body, + h2stream, + (err) => { + if (err) { + util.destroy(body, err) + util.destroy(h2stream, err) + } else { + request.onRequestSent() } } + ) - // no signers, return early (degenerate case for certificate container) - if(msg.signers.length === 0) { - return; - } + pipe.on('data', onPipeData) + pipe.once('end', () => { + pipe.removeListener('data', onPipeData) + util.destroy(pipe) + }) - // generate digest algorithm identifiers - var mds = addDigestAlgorithmIds(); + function onPipeData (chunk) { + request.onBodySent(chunk) + } - // generate signerInfos - addSignerInfos(mds); - }, + return + } - verify: function() { - throw new Error('PKCS#7 signature verification not yet implemented.'); - }, + let finished = false - /** - * Add a certificate. - * - * @param cert the certificate to add. - */ - addCertificate: function(cert) { - // convert from PEM - if(typeof cert === 'string') { - cert = forge.pki.certificateFromPem(cert); + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + + const onData = function (chunk) { + if (finished) { + return + } + + try { + if (!writer.write(chunk) && this.pause) { + this.pause() } - msg.certificates.push(cert); - }, + } catch (err) { + util.destroy(this, err) + } + } + const onDrain = function () { + if (finished) { + return + } - /** - * Add a certificate revokation list. - * - * @param crl the certificate revokation list to add. - */ - addCertificateRevokationList: function(crl) { - throw new Error('PKCS#7 CRL support not yet implemented.'); + if (body.resume) { + body.resume() + } + } + const onAbort = function () { + if (finished) { + return + } + const err = new RequestAbortedError() + queueMicrotask(() => onFinished(err)) + } + const onFinished = function (err) { + if (finished) { + return } - }; - return msg; - function addDigestAlgorithmIds() { - var mds = {}; + finished = true - for(var i = 0; i < msg.signers.length; ++i) { - var signer = msg.signers[i]; - var oid = signer.digestAlgorithm; - if(!(oid in mds)) { - // content digest - mds[oid] = forge.md[forge.pki.oids[oid]].create(); - } - if(signer.authenticatedAttributes.length === 0) { - // no custom attributes to digest; use content message digest - signer.md = mds[oid]; - } else { - // custom attributes to be digested; use own message digest - // TODO: optimize to just copy message digest state if that - // feature is ever supported with message digests - signer.md = forge.md[forge.pki.oids[oid]].create(); + assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1)) + + socket + .off('drain', onDrain) + .off('error', onFinished) + + body + .removeListener('data', onData) + .removeListener('end', onFinished) + .removeListener('error', onFinished) + .removeListener('close', onAbort) + + if (!err) { + try { + writer.end() + } catch (er) { + err = er } } - // add unique digest algorithm identifiers - msg.digestAlgorithmIdentifiers = []; - for(var oid in mds) { - msg.digestAlgorithmIdentifiers.push( - // AlgorithmIdentifier - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(oid).getBytes()), - // parameters (null) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ])); + writer.destroy(err) + + if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) { + util.destroy(body, err) + } else { + util.destroy(body) } + } - return mds; + body + .on('data', onData) + .on('end', onFinished) + .on('error', onFinished) + .on('close', onAbort) + + if (body.resume) { + body.resume() } - function addSignerInfos(mds) { - var content; + socket + .on('drain', onDrain) + .on('error', onFinished) +} + +async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength === body.size, 'blob body must have content length') + + const isH2 = client[kHTTPConnVersion] === 'h2' + try { + if (contentLength != null && contentLength !== body.size) { + throw new RequestContentLengthMismatchError() + } - if (msg.detachedContent) { - // Signature has been made in detached mode. - content = msg.detachedContent; + const buffer = Buffer.from(await body.arrayBuffer()) + + if (isH2) { + h2stream.cork() + h2stream.write(buffer) + h2stream.uncork() } else { - // Note: ContentInfo is a SEQUENCE with 2 values, second value is - // the content field and is optional for a ContentInfo but required here - // since signers are present - // get ContentInfo content - content = msg.contentInfo.value[1]; - // skip [0] EXPLICIT content wrapper - content = content.value[0]; + socket.cork() + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + socket.write(buffer) + socket.uncork() } - if(!content) { - throw new Error( - 'Could not sign PKCS#7 message; there is no content to sign.'); + request.onBodySent(buffer) + request.onRequestSent() + + if (!expectsPayload) { + socket[kReset] = true } - // get ContentInfo content type - var contentType = asn1.derToOid(msg.contentInfo.value[0].value); + resume(client) + } catch (err) { + util.destroy(isH2 ? h2stream : socket, err) + } +} - // serialize content - var bytes = asn1.toDer(content); +async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined') + + let callback = null + function onDrain () { + if (callback) { + const cb = callback + callback = null + cb() + } + } - // skip identifier and length per RFC 2315 9.3 - // skip identifier (1 byte) - bytes.getByte(); - // read and discard length bytes - asn1.getBerValueLength(bytes); - bytes = bytes.getBytes(); + const waitForDrain = () => new Promise((resolve, reject) => { + assert(callback === null) - // digest content DER value bytes - for(var oid in mds) { - mds[oid].start().update(bytes); + if (socket[kError]) { + reject(socket[kError]) + } else { + callback = resolve } + }) - // sign content - var signingTime = new Date(); - for(var i = 0; i < msg.signers.length; ++i) { - var signer = msg.signers[i]; + if (client[kHTTPConnVersion] === 'h2') { + h2stream + .on('close', onDrain) + .on('drain', onDrain) - if(signer.authenticatedAttributes.length === 0) { - // if ContentInfo content type is not "Data", then - // authenticatedAttributes must be present per RFC 2315 - if(contentType !== forge.pki.oids.data) { - throw new Error( - 'Invalid signer; authenticatedAttributes must be present ' + - 'when the ContentInfo content type is not PKCS#7 Data.'); + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] } - } else { - // process authenticated attributes - // [0] IMPLICIT - signer.authenticatedAttributesAsn1 = asn1.create( - asn1.Class.CONTEXT_SPECIFIC, 0, true, []); - - // per RFC 2315, attributes are to be digested using a SET container - // not the above [0] IMPLICIT container - var attrsAsn1 = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SET, true, []); - - for(var ai = 0; ai < signer.authenticatedAttributes.length; ++ai) { - var attr = signer.authenticatedAttributes[ai]; - if(attr.type === forge.pki.oids.messageDigest) { - // use content message digest as value - attr.value = mds[signer.digestAlgorithm].digest(); - } else if(attr.type === forge.pki.oids.signingTime) { - // auto-populate signing time if not already set - if(!attr.value) { - attr.value = signingTime; - } - } - // convert to ASN.1 and push onto Attributes SET (for signing) and - // onto authenticatedAttributesAsn1 to complete SignedData ASN.1 - // TODO: optimize away duplication - attrsAsn1.value.push(_attributeToAsn1(attr)); - signer.authenticatedAttributesAsn1.value.push(_attributeToAsn1(attr)); + const res = h2stream.write(chunk) + request.onBodySent(chunk) + if (!res) { + await waitForDrain() } + } + } catch (err) { + h2stream.destroy(err) + } finally { + request.onRequestSent() + h2stream.end() + h2stream + .off('close', onDrain) + .off('drain', onDrain) + } + + return + } + + socket + .on('close', onDrain) + .on('drain', onDrain) - // DER-serialize and digest SET OF attributes only - bytes = asn1.toDer(attrsAsn1).getBytes(); - signer.md.start().update(bytes); + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] } - // sign digest - signer.signature = signer.key.sign(signer.md, 'RSASSA-PKCS1-V1_5'); + if (!writer.write(chunk)) { + await waitForDrain() + } } - // add signer info - msg.signerInfos = _signersToAsn1(msg.signers); + writer.end() + } catch (err) { + writer.destroy(err) + } finally { + socket + .off('close', onDrain) + .off('drain', onDrain) } -}; +} -/** - * Creates an empty PKCS#7 message of type EncryptedData. - * - * @return the message. - */ -p7.createEncryptedData = function() { - var msg = null; - msg = { - type: forge.pki.oids.encryptedData, - version: 0, - encryptedContent: { - algorithm: forge.pki.oids['aes256-CBC'] - }, +class AsyncWriter { + constructor ({ socket, request, contentLength, client, expectsPayload, header }) { + this.socket = socket + this.request = request + this.contentLength = contentLength + this.client = client + this.bytesWritten = 0 + this.expectsPayload = expectsPayload + this.header = header - /** - * Reads an EncryptedData content block (in ASN.1 format) - * - * @param obj The ASN.1 representation of the EncryptedData content block - */ - fromAsn1: function(obj) { - // Validate EncryptedData content block and capture data. - _fromAsn1(msg, obj, p7.asn1.encryptedDataValidator); - }, + socket[kWriting] = true + } - /** - * Decrypt encrypted content - * - * @param key The (symmetric) key as a byte buffer - */ - decrypt: function(key) { - if(key !== undefined) { - msg.encryptedContent.key = key; - } - _decryptContent(msg); + write (chunk) { + const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this + + if (socket[kError]) { + throw socket[kError] + } + + if (socket.destroyed) { + return false + } + + const len = Buffer.byteLength(chunk) + if (!len) { + return true + } + + // We should defer writing chunks. + if (contentLength !== null && bytesWritten + len > contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } + + process.emitWarning(new RequestContentLengthMismatchError()) } - }; - return msg; -}; -/** - * Creates an empty PKCS#7 message of type EnvelopedData. - * - * @return the message. - */ -p7.createEnvelopedData = function() { - var msg = null; - msg = { - type: forge.pki.oids.envelopedData, - version: 0, - recipients: [], - encryptedContent: { - algorithm: forge.pki.oids['aes256-CBC'] - }, + socket.cork() - /** - * Reads an EnvelopedData content block (in ASN.1 format) - * - * @param obj the ASN.1 representation of the EnvelopedData content block. - */ - fromAsn1: function(obj) { - // validate EnvelopedData content block and capture data - var capture = _fromAsn1(msg, obj, p7.asn1.envelopedDataValidator); - msg.recipients = _recipientsFromAsn1(capture.recipientInfos.value); - }, + if (bytesWritten === 0) { + if (!expectsPayload) { + socket[kReset] = true + } - toAsn1: function() { - // ContentInfo - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // ContentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(msg.type).getBytes()), - // [0] EnvelopedData - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Version - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(msg.version).getBytes()), - // RecipientInfos - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, - _recipientsToAsn1(msg.recipients)), - // EncryptedContentInfo - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, - _encryptedContentToAsn1(msg.encryptedContent)) - ]) - ]) - ]); - }, + if (contentLength === null) { + socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1') + } else { + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + } + } - /** - * Find recipient by X.509 certificate's issuer. - * - * @param cert the certificate with the issuer to look for. - * - * @return the recipient object. - */ - findRecipient: function(cert) { - var sAttr = cert.issuer.attributes; + if (contentLength === null) { + socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1') + } - for(var i = 0; i < msg.recipients.length; ++i) { - var r = msg.recipients[i]; - var rAttr = r.issuer; + this.bytesWritten += len - if(r.serialNumber !== cert.serialNumber) { - continue; - } + const ret = socket.write(chunk) - if(rAttr.length !== sAttr.length) { - continue; - } + socket.uncork() - var match = true; - for(var j = 0; j < sAttr.length; ++j) { - if(rAttr[j].type !== sAttr[j].type || - rAttr[j].value !== sAttr[j].value) { - match = false; - break; - } - } + request.onBodySent(chunk) - if(match) { - return r; + if (!ret) { + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() } } + } - return null; - }, + return ret + } - /** - * Decrypt enveloped content - * - * @param recipient The recipient object related to the private key - * @param privKey The (RSA) private key object - */ - decrypt: function(recipient, privKey) { - if(msg.encryptedContent.key === undefined && recipient !== undefined && - privKey !== undefined) { - switch(recipient.encryptedContent.algorithm) { - case forge.pki.oids.rsaEncryption: - case forge.pki.oids.desCBC: - var key = privKey.decrypt(recipient.encryptedContent.content); - msg.encryptedContent.key = forge.util.createBuffer(key); - break; - - default: - throw new Error('Unsupported asymmetric cipher, ' + - 'OID ' + recipient.encryptedContent.algorithm); - } - } + end () { + const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this + request.onRequestSent() - _decryptContent(msg); - }, + socket[kWriting] = false - /** - * Add (another) entity to list of recipients. - * - * @param cert The certificate of the entity to add. - */ - addRecipient: function(cert) { - msg.recipients.push({ - version: 0, - issuer: cert.issuer.attributes, - serialNumber: cert.serialNumber, - encryptedContent: { - // We simply assume rsaEncryption here, since forge.pki only - // supports RSA so far. If the PKI module supports other - // ciphers one day, we need to modify this one as well. - algorithm: forge.pki.oids.rsaEncryption, - key: cert.publicKey - } - }); - }, + if (socket[kError]) { + throw socket[kError] + } - /** - * Encrypt enveloped content. - * - * This function supports two optional arguments, cipher and key, which - * can be used to influence symmetric encryption. Unless cipher is - * provided, the cipher specified in encryptedContent.algorithm is used - * (defaults to AES-256-CBC). If no key is provided, encryptedContent.key - * is (re-)used. If that one's not set, a random key will be generated - * automatically. - * - * @param [key] The key to be used for symmetric encryption. - * @param [cipher] The OID of the symmetric cipher to use. - */ - encrypt: function(key, cipher) { - // Part 1: Symmetric encryption - if(msg.encryptedContent.content === undefined) { - cipher = cipher || msg.encryptedContent.algorithm; - key = key || msg.encryptedContent.key; - - var keyLen, ivLen, ciphFn; - switch(cipher) { - case forge.pki.oids['aes128-CBC']: - keyLen = 16; - ivLen = 16; - ciphFn = forge.aes.createEncryptionCipher; - break; - - case forge.pki.oids['aes192-CBC']: - keyLen = 24; - ivLen = 16; - ciphFn = forge.aes.createEncryptionCipher; - break; - - case forge.pki.oids['aes256-CBC']: - keyLen = 32; - ivLen = 16; - ciphFn = forge.aes.createEncryptionCipher; - break; - - case forge.pki.oids['des-EDE3-CBC']: - keyLen = 24; - ivLen = 8; - ciphFn = forge.des.createEncryptionCipher; - break; - - default: - throw new Error('Unsupported symmetric cipher, OID ' + cipher); - } - - if(key === undefined) { - key = forge.util.createBuffer(forge.random.getBytes(keyLen)); - } else if(key.length() != keyLen) { - throw new Error('Symmetric key has wrong length; ' + - 'got ' + key.length() + ' bytes, expected ' + keyLen + '.'); - } - - // Keep a copy of the key & IV in the object, so the caller can - // use it for whatever reason. - msg.encryptedContent.algorithm = cipher; - msg.encryptedContent.key = key; - msg.encryptedContent.parameter = forge.util.createBuffer( - forge.random.getBytes(ivLen)); - - var ciph = ciphFn(key); - ciph.start(msg.encryptedContent.parameter.copy()); - ciph.update(msg.content); - - // The finish function does PKCS#7 padding by default, therefore - // no action required by us. - if(!ciph.finish()) { - throw new Error('Symmetric encryption failed.'); - } - - msg.encryptedContent.content = ciph.output; - } + if (socket.destroyed) { + return + } - // Part 2: asymmetric encryption for each recipient - for(var i = 0; i < msg.recipients.length; ++i) { - var recipient = msg.recipients[i]; + if (bytesWritten === 0) { + if (expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD send a Content-Length in a request message when + // no Transfer-Encoding is sent and the request method defines a meaning + // for an enclosed payload body. - // Nothing to do, encryption already done. - if(recipient.encryptedContent.content !== undefined) { - continue; - } + socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') + } else { + socket.write(`${header}\r\n`, 'latin1') + } + } else if (contentLength === null) { + socket.write('\r\n0\r\n\r\n', 'latin1') + } - switch(recipient.encryptedContent.algorithm) { - case forge.pki.oids.rsaEncryption: - recipient.encryptedContent.content = - recipient.encryptedContent.key.encrypt( - msg.encryptedContent.key.data); - break; + if (contentLength !== null && bytesWritten !== contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } else { + process.emitWarning(new RequestContentLengthMismatchError()) + } + } - default: - throw new Error('Unsupported asymmetric cipher, OID ' + - recipient.encryptedContent.algorithm); - } + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() } } - }; - return msg; -}; -/** - * Converts a single recipient from an ASN.1 object. - * - * @param obj the ASN.1 RecipientInfo. - * - * @return the recipient object. - */ -function _recipientFromAsn1(obj) { - // validate EnvelopedData content block and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, p7.asn1.recipientInfoValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#7 RecipientInfo. ' + - 'ASN.1 object is not an PKCS#7 RecipientInfo.'); - error.errors = errors; - throw error; + resume(client) } - return { - version: capture.version.charCodeAt(0), - issuer: forge.pki.RDNAttributesAsArray(capture.issuer), - serialNumber: forge.util.createBuffer(capture.serial).toHex(), - encryptedContent: { - algorithm: asn1.derToOid(capture.encAlgorithm), - parameter: capture.encParameter ? capture.encParameter.value : undefined, - content: capture.encKey - } - }; -} + destroy (err) { + const { socket, client } = this -/** - * Converts a single recipient object to an ASN.1 object. - * - * @param obj the recipient object. - * - * @return the ASN.1 RecipientInfo. - */ -function _recipientToAsn1(obj) { - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Version - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(obj.version).getBytes()), - // IssuerAndSerialNumber - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Name - forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}), - // Serial - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - forge.util.hexToBytes(obj.serialNumber)) - ]), - // KeyEncryptionAlgorithmIdentifier - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(obj.encryptedContent.algorithm).getBytes()), - // Parameter, force NULL, only RSA supported for now. - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]), - // EncryptedKey - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - obj.encryptedContent.content) - ]); -} + socket[kWriting] = false -/** - * Map a set of RecipientInfo ASN.1 objects to recipient objects. - * - * @param infos an array of ASN.1 representations RecipientInfo (i.e. SET OF). - * - * @return an array of recipient objects. - */ -function _recipientsFromAsn1(infos) { - var ret = []; - for(var i = 0; i < infos.length; ++i) { - ret.push(_recipientFromAsn1(infos[i])); + if (err) { + assert(client[kRunning] <= 1, 'pipeline should only contain this request') + util.destroy(socket, err) + } } - return ret; } -/** - * Map an array of recipient objects to ASN.1 RecipientInfo objects. - * - * @param recipients an array of recipientInfo objects. - * - * @return an array of ASN.1 RecipientInfos. - */ -function _recipientsToAsn1(recipients) { - var ret = []; - for(var i = 0; i < recipients.length; ++i) { - ret.push(_recipientToAsn1(recipients[i])); +function errorRequest (client, request, err) { + try { + request.onError(err) + assert(request.aborted) + } catch (err) { + client.emit('error', err) } - return ret; } -/** - * Converts a single signer from an ASN.1 object. - * - * @param obj the ASN.1 representation of a SignerInfo. - * - * @return the signer object. - */ -function _signerFromAsn1(obj) { - // validate EnvelopedData content block and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, p7.asn1.signerInfoValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#7 SignerInfo. ' + - 'ASN.1 object is not an PKCS#7 SignerInfo.'); - error.errors = errors; - throw error; - } - - var rval = { - version: capture.version.charCodeAt(0), - issuer: forge.pki.RDNAttributesAsArray(capture.issuer), - serialNumber: forge.util.createBuffer(capture.serial).toHex(), - digestAlgorithm: asn1.derToOid(capture.digestAlgorithm), - signatureAlgorithm: asn1.derToOid(capture.signatureAlgorithm), - signature: capture.signature, - authenticatedAttributes: [], - unauthenticatedAttributes: [] - }; +module.exports = Client - // TODO: convert attributes - var authenticatedAttributes = capture.authenticatedAttributes || []; - var unauthenticatedAttributes = capture.unauthenticatedAttributes || []; - return rval; -} +/***/ }), -/** - * Converts a single signerInfo object to an ASN.1 object. - * - * @param obj the signerInfo object. - * - * @return the ASN.1 representation of a SignerInfo. - */ -function _signerToAsn1(obj) { - // SignerInfo - var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(obj.version).getBytes()), - // issuerAndSerialNumber - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // name - forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}), - // serial - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - forge.util.hexToBytes(obj.serialNumber)) - ]), - // digestAlgorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(obj.digestAlgorithm).getBytes()), - // parameters (null) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]) - ]); +/***/ 6436: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // authenticatedAttributes (OPTIONAL) - if(obj.authenticatedAttributesAsn1) { - // add ASN.1 previously generated during signing - rval.value.push(obj.authenticatedAttributesAsn1); - } +"use strict"; - // digestEncryptionAlgorithm - rval.value.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(obj.signatureAlgorithm).getBytes()), - // parameters (null) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ])); - // encryptedDigest - rval.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, obj.signature)); +/* istanbul ignore file: only for Node 12 */ - // unauthenticatedAttributes (OPTIONAL) - if(obj.unauthenticatedAttributes.length > 0) { - // [1] IMPLICIT - var attrsAsn1 = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, []); - for(var i = 0; i < obj.unauthenticatedAttributes.length; ++i) { - var attr = obj.unauthenticatedAttributes[i]; - attrsAsn1.values.push(_attributeToAsn1(attr)); - } - rval.value.push(attrsAsn1); - } +const { kConnected, kSize } = __nccwpck_require__(2785) - return rval; -} +class CompatWeakRef { + constructor (value) { + this.value = value + } -/** - * Map a set of SignerInfo ASN.1 objects to an array of signer objects. - * - * @param signerInfoAsn1s an array of ASN.1 SignerInfos (i.e. SET OF). - * - * @return an array of signers objects. - */ -function _signersFromAsn1(signerInfoAsn1s) { - var ret = []; - for(var i = 0; i < signerInfoAsn1s.length; ++i) { - ret.push(_signerFromAsn1(signerInfoAsn1s[i])); + deref () { + return this.value[kConnected] === 0 && this.value[kSize] === 0 + ? undefined + : this.value } - return ret; } -/** - * Map an array of signer objects to ASN.1 objects. - * - * @param signers an array of signer objects. - * - * @return an array of ASN.1 SignerInfos. - */ -function _signersToAsn1(signers) { - var ret = []; - for(var i = 0; i < signers.length; ++i) { - ret.push(_signerToAsn1(signers[i])); +class CompatFinalizer { + constructor (finalizer) { + this.finalizer = finalizer } - return ret; -} -/** - * Convert an attribute object to an ASN.1 Attribute. - * - * @param attr the attribute object. - * - * @return the ASN.1 Attribute. - */ -function _attributeToAsn1(attr) { - var value; - - // TODO: generalize to support more attributes - if(attr.type === forge.pki.oids.contentType) { - value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(attr.value).getBytes()); - } else if(attr.type === forge.pki.oids.messageDigest) { - value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - attr.value.bytes()); - } else if(attr.type === forge.pki.oids.signingTime) { - /* Note per RFC 2985: Dates between 1 January 1950 and 31 December 2049 - (inclusive) MUST be encoded as UTCTime. Any dates with year values - before 1950 or after 2049 MUST be encoded as GeneralizedTime. [Further,] - UTCTime values MUST be expressed in Greenwich Mean Time (Zulu) and MUST - include seconds (i.e., times are YYMMDDHHMMSSZ), even where the - number of seconds is zero. Midnight (GMT) must be represented as - "YYMMDD000000Z". */ - // TODO: make these module-level constants - var jan_1_1950 = new Date('1950-01-01T00:00:00Z'); - var jan_1_2050 = new Date('2050-01-01T00:00:00Z'); - var date = attr.value; - if(typeof date === 'string') { - // try to parse date - var timestamp = Date.parse(date); - if(!isNaN(timestamp)) { - date = new Date(timestamp); - } else if(date.length === 13) { - // YYMMDDHHMMSSZ (13 chars for UTCTime) - date = asn1.utcTimeToDate(date); - } else { - // assume generalized time - date = asn1.generalizedTimeToDate(date); - } + register (dispatcher, key) { + if (dispatcher.on) { + dispatcher.on('disconnect', () => { + if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) { + this.finalizer(key) + } + }) } - - if(date >= jan_1_1950 && date < jan_1_2050) { - value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false, - asn1.dateToUtcTime(date)); - } else { - value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false, - asn1.dateToGeneralizedTime(date)); - } - } - - // TODO: expose as common API call - // create a RelativeDistinguishedName set - // each value in the set is an AttributeTypeAndValue first - // containing the type (an OID) and second the value - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // AttributeType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(attr.type).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ - // AttributeValue - value - ]) - ]); -} - -/** - * Map messages encrypted content to ASN.1 objects. - * - * @param ec The encryptedContent object of the message. - * - * @return ASN.1 representation of the encryptedContent object (SEQUENCE). - */ -function _encryptedContentToAsn1(ec) { - return [ - // ContentType, always Data for the moment - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(forge.pki.oids.data).getBytes()), - // ContentEncryptionAlgorithmIdentifier - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(ec.algorithm).getBytes()), - // Parameters (IV) - !ec.parameter ? - undefined : - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - ec.parameter.getBytes()) - ]), - // [0] EncryptedContent - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - ec.content.getBytes()) - ]) - ]; + } } -/** - * Reads the "common part" of an PKCS#7 content block (in ASN.1 format) - * - * This function reads the "common part" of the PKCS#7 content blocks - * EncryptedData and EnvelopedData, i.e. version number and symmetrically - * encrypted content block. - * - * The result of the ASN.1 validate and capture process is returned - * to allow the caller to extract further data, e.g. the list of recipients - * in case of a EnvelopedData object. - * - * @param msg the PKCS#7 object to read the data to. - * @param obj the ASN.1 representation of the content block. - * @param validator the ASN.1 structure validator object to use. - * - * @return the value map captured by validator object. - */ -function _fromAsn1(msg, obj, validator) { - var capture = {}; - var errors = []; - if(!asn1.validate(obj, validator, capture, errors)) { - var error = new Error('Cannot read PKCS#7 message. ' + - 'ASN.1 object is not a supported PKCS#7 message.'); - error.errors = error; - throw error; - } - - // Check contentType, so far we only support (raw) Data. - var contentType = asn1.derToOid(capture.contentType); - if(contentType !== forge.pki.oids.data) { - throw new Error('Unsupported PKCS#7 message. ' + - 'Only wrapped ContentType Data supported.'); - } - - if(capture.encryptedContent) { - var content = ''; - if(forge.util.isArray(capture.encryptedContent)) { - for(var i = 0; i < capture.encryptedContent.length; ++i) { - if(capture.encryptedContent[i].type !== asn1.Type.OCTETSTRING) { - throw new Error('Malformed PKCS#7 message, expecting encrypted ' + - 'content constructed of only OCTET STRING objects.'); - } - content += capture.encryptedContent[i].value; - } - } else { - content = capture.encryptedContent; +module.exports = function () { + // FIXME: remove workaround when the Node bug is fixed + // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 + if (process.env.NODE_V8_COVERAGE) { + return { + WeakRef: CompatWeakRef, + FinalizationRegistry: CompatFinalizer } - msg.encryptedContent = { - algorithm: asn1.derToOid(capture.encAlgorithm), - parameter: forge.util.createBuffer(capture.encParameter.value), - content: forge.util.createBuffer(content) - }; } - - if(capture.content) { - var content = ''; - if(forge.util.isArray(capture.content)) { - for(var i = 0; i < capture.content.length; ++i) { - if(capture.content[i].type !== asn1.Type.OCTETSTRING) { - throw new Error('Malformed PKCS#7 message, expecting ' + - 'content constructed of only OCTET STRING objects.'); - } - content += capture.content[i].value; - } - } else { - content = capture.content; - } - msg.content = forge.util.createBuffer(content); + return { + WeakRef: global.WeakRef || CompatWeakRef, + FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer } - - msg.version = capture.version.charCodeAt(0); - msg.rawCapture = capture; - - return capture; } -/** - * Decrypt the symmetrically encrypted content block of the PKCS#7 message. - * - * Decryption is skipped in case the PKCS#7 message object already has a - * (decrypted) content attribute. The algorithm, key and cipher parameters - * (probably the iv) are taken from the encryptedContent attribute of the - * message object. - * - * @param The PKCS#7 message object. - */ -function _decryptContent(msg) { - if(msg.encryptedContent.key === undefined) { - throw new Error('Symmetric key not available.'); - } - if(msg.content === undefined) { - var ciph; +/***/ }), - switch(msg.encryptedContent.algorithm) { - case forge.pki.oids['aes128-CBC']: - case forge.pki.oids['aes192-CBC']: - case forge.pki.oids['aes256-CBC']: - ciph = forge.aes.createDecryptionCipher(msg.encryptedContent.key); - break; +/***/ 663: +/***/ ((module) => { - case forge.pki.oids['desCBC']: - case forge.pki.oids['des-EDE3-CBC']: - ciph = forge.des.createDecryptionCipher(msg.encryptedContent.key); - break; +"use strict"; - default: - throw new Error('Unsupported symmetric cipher, OID ' + - msg.encryptedContent.algorithm); - } - ciph.start(msg.encryptedContent.parameter); - ciph.update(msg.encryptedContent.content); - if(!ciph.finish()) { - throw new Error('Symmetric decryption failed.'); - } +// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size +const maxAttributeValueSize = 1024 - msg.content = ciph.output; - } +// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size +const maxNameValuePairSize = 4096 + +module.exports = { + maxAttributeValueSize, + maxNameValuePairSize } /***/ }), -/***/ 266: +/***/ 1724: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; + + +const { parseSetCookie } = __nccwpck_require__(4408) +const { stringify, getHeadersList } = __nccwpck_require__(3121) +const { webidl } = __nccwpck_require__(1744) +const { Headers } = __nccwpck_require__(554) + /** - * Javascript implementation of ASN.1 validators for PKCS#7 v1.5. - * - * @author Dave Longley - * @author Stefan Siegl - * - * Copyright (c) 2012-2015 Digital Bazaar, Inc. - * Copyright (c) 2012 Stefan Siegl - * - * The ASN.1 representation of PKCS#7 is as follows - * (see RFC #2315 for details, http://www.ietf.org/rfc/rfc2315.txt): - * - * A PKCS#7 message consists of a ContentInfo on root level, which may - * contain any number of further ContentInfo nested into it. - * - * ContentInfo ::= SEQUENCE { - * contentType ContentType, - * content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL - * } - * - * ContentType ::= OBJECT IDENTIFIER - * - * EnvelopedData ::= SEQUENCE { - * version Version, - * recipientInfos RecipientInfos, - * encryptedContentInfo EncryptedContentInfo - * } - * - * EncryptedData ::= SEQUENCE { - * version Version, - * encryptedContentInfo EncryptedContentInfo - * } - * - * id-signedData OBJECT IDENTIFIER ::= { iso(1) member-body(2) - * us(840) rsadsi(113549) pkcs(1) pkcs7(7) 2 } - * - * SignedData ::= SEQUENCE { - * version INTEGER, - * digestAlgorithms DigestAlgorithmIdentifiers, - * contentInfo ContentInfo, - * certificates [0] IMPLICIT Certificates OPTIONAL, - * crls [1] IMPLICIT CertificateRevocationLists OPTIONAL, - * signerInfos SignerInfos - * } - * - * SignerInfos ::= SET OF SignerInfo - * - * SignerInfo ::= SEQUENCE { - * version Version, - * issuerAndSerialNumber IssuerAndSerialNumber, - * digestAlgorithm DigestAlgorithmIdentifier, - * authenticatedAttributes [0] IMPLICIT Attributes OPTIONAL, - * digestEncryptionAlgorithm DigestEncryptionAlgorithmIdentifier, - * encryptedDigest EncryptedDigest, - * unauthenticatedAttributes [1] IMPLICIT Attributes OPTIONAL - * } - * - * EncryptedDigest ::= OCTET STRING - * - * Attributes ::= SET OF Attribute - * - * Attribute ::= SEQUENCE { - * attrType OBJECT IDENTIFIER, - * attrValues SET OF AttributeValue - * } - * - * AttributeValue ::= ANY - * - * Version ::= INTEGER - * - * RecipientInfos ::= SET OF RecipientInfo - * - * EncryptedContentInfo ::= SEQUENCE { - * contentType ContentType, - * contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier, - * encryptedContent [0] IMPLICIT EncryptedContent OPTIONAL - * } - * - * ContentEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier - * - * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters - * for the algorithm, if any. In the case of AES and DES3, there is only one, - * the IV. - * - * AlgorithmIdentifer ::= SEQUENCE { - * algorithm OBJECT IDENTIFIER, - * parameters ANY DEFINED BY algorithm OPTIONAL - * } - * - * EncryptedContent ::= OCTET STRING - * - * RecipientInfo ::= SEQUENCE { - * version Version, - * issuerAndSerialNumber IssuerAndSerialNumber, - * keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier, - * encryptedKey EncryptedKey - * } - * - * IssuerAndSerialNumber ::= SEQUENCE { - * issuer Name, - * serialNumber CertificateSerialNumber - * } - * - * CertificateSerialNumber ::= INTEGER - * - * KeyEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier - * - * EncryptedKey ::= OCTET STRING + * @typedef {Object} Cookie + * @property {string} name + * @property {string} value + * @property {Date|number|undefined} expires + * @property {number|undefined} maxAge + * @property {string|undefined} domain + * @property {string|undefined} path + * @property {boolean|undefined} secure + * @property {boolean|undefined} httpOnly + * @property {'Strict'|'Lax'|'None'} sameSite + * @property {string[]} unparsed */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -__nccwpck_require__(8339); - -// shortcut for ASN.1 API -var asn1 = forge.asn1; - -// shortcut for PKCS#7 API -var p7v = module.exports = forge.pkcs7asn1 = forge.pkcs7asn1 || {}; -forge.pkcs7 = forge.pkcs7 || {}; -forge.pkcs7.asn1 = p7v; - -var contentInfoValidator = { - name: 'ContentInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'ContentInfo.ContentType', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'contentType' - }, { - name: 'ContentInfo.content', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - constructed: true, - optional: true, - captureAsn1: 'content' - }] -}; -p7v.contentInfoValidator = contentInfoValidator; - -var encryptedContentInfoValidator = { - name: 'EncryptedContentInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'EncryptedContentInfo.contentType', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'contentType' - }, { - name: 'EncryptedContentInfo.contentEncryptionAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'EncryptedContentInfo.contentEncryptionAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'encAlgorithm' - }, { - name: 'EncryptedContentInfo.contentEncryptionAlgorithm.parameter', - tagClass: asn1.Class.UNIVERSAL, - captureAsn1: 'encParameter' - }] - }, { - name: 'EncryptedContentInfo.encryptedContent', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - /* The PKCS#7 structure output by OpenSSL somewhat differs from what - * other implementations do generate. - * - * OpenSSL generates a structure like this: - * SEQUENCE { - * ... - * [0] - * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38 - * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45 - * ... - * } - * - * Whereas other implementations (and this PKCS#7 module) generate: - * SEQUENCE { - * ... - * [0] { - * OCTET STRING - * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38 - * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45 - * ... - * } - * } - * - * In order to support both, we just capture the context specific - * field here. The OCTET STRING bit is removed below. - */ - capture: 'encryptedContent', - captureAsn1: 'encryptedContentAsn1' - }] -}; - -p7v.envelopedDataValidator = { - name: 'EnvelopedData', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'EnvelopedData.Version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'version' - }, { - name: 'EnvelopedData.RecipientInfos', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - constructed: true, - captureAsn1: 'recipientInfos' - }].concat(encryptedContentInfoValidator) -}; -p7v.encryptedDataValidator = { - name: 'EncryptedData', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'EncryptedData.Version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'version' - }].concat(encryptedContentInfoValidator) -}; +/** + * @param {Headers} headers + * @returns {Record} + */ +function getCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' }) -var signerValidator = { - name: 'SignerInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'SignerInfo.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false - }, { - name: 'SignerInfo.issuerAndSerialNumber', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'SignerInfo.issuerAndSerialNumber.issuer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'issuer' - }, { - name: 'SignerInfo.issuerAndSerialNumber.serialNumber', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'serial' - }] - }, { - name: 'SignerInfo.digestAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'SignerInfo.digestAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'digestAlgorithm' - }, { - name: 'SignerInfo.digestAlgorithm.parameter', - tagClass: asn1.Class.UNIVERSAL, - constructed: false, - captureAsn1: 'digestParameter', - optional: true - }] - }, { - name: 'SignerInfo.authenticatedAttributes', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - constructed: true, - optional: true, - capture: 'authenticatedAttributes' - }, { - name: 'SignerInfo.digestEncryptionAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - capture: 'signatureAlgorithm' - }, { - name: 'SignerInfo.encryptedDigest', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'signature' - }, { - name: 'SignerInfo.unauthenticatedAttributes', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 1, - constructed: true, - optional: true, - capture: 'unauthenticatedAttributes' - }] -}; + webidl.brandCheck(headers, Headers, { strict: false }) -p7v.signedDataValidator = { - name: 'SignedData', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'SignedData.Version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'version' - }, { - name: 'SignedData.DigestAlgorithms', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - constructed: true, - captureAsn1: 'digestAlgorithms' - }, - contentInfoValidator, - { - name: 'SignedData.Certificates', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - optional: true, - captureAsn1: 'certificates' - }, { - name: 'SignedData.CertificateRevocationLists', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 1, - optional: true, - captureAsn1: 'crls' - }, { - name: 'SignedData.SignerInfos', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - capture: 'signerInfos', - optional: true, - value: [signerValidator] - }] -}; + const cookie = headers.get('cookie') + const out = {} -p7v.recipientInfoValidator = { - name: 'RecipientInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'RecipientInfo.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'version' - }, { - name: 'RecipientInfo.issuerAndSerial', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'RecipientInfo.issuerAndSerial.issuer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'issuer' - }, { - name: 'RecipientInfo.issuerAndSerial.serialNumber', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'serial' - }] - }, { - name: 'RecipientInfo.keyEncryptionAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'RecipientInfo.keyEncryptionAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'encAlgorithm' - }, { - name: 'RecipientInfo.keyEncryptionAlgorithm.parameter', - tagClass: asn1.Class.UNIVERSAL, - constructed: false, - captureAsn1: 'encParameter', - optional: true - }] - }, { - name: 'RecipientInfo.encryptedKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'encKey' - }] -}; + if (!cookie) { + return out + } + for (const piece of cookie.split(';')) { + const [name, ...value] = piece.split('=') -/***/ }), + out[name.trim()] = value.join('=') + } -/***/ 6924: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return out +} /** - * Javascript implementation of a basic Public Key Infrastructure, including - * support for RSA public and private keys. - * - * @author Dave Longley - * - * Copyright (c) 2010-2013 Digital Bazaar, Inc. + * @param {Headers} headers + * @param {string} name + * @param {{ path?: string, domain?: string }|undefined} attributes + * @returns {void} */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -__nccwpck_require__(1925); -__nccwpck_require__(1281); -__nccwpck_require__(154); -__nccwpck_require__(1611); -__nccwpck_require__(466); -__nccwpck_require__(4376); -__nccwpck_require__(3921); -__nccwpck_require__(8339); -__nccwpck_require__(8180); - -// shortcut for asn.1 API -var asn1 = forge.asn1; - -/* Public Key Infrastructure (PKI) implementation. */ -var pki = module.exports = forge.pki = forge.pki || {}; +function deleteCookie (headers, name, attributes) { + webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + name = webidl.converters.DOMString(name) + attributes = webidl.converters.DeleteCookieAttributes(attributes) + + // Matches behavior of + // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278 + setCookie(headers, { + name, + value: '', + expires: new Date(0), + ...attributes + }) +} /** - * NOTE: THIS METHOD IS DEPRECATED. Use pem.decode() instead. - * - * Converts PEM-formatted data to DER. - * - * @param pem the PEM-formatted data. - * - * @return the DER-formatted data. + * @param {Headers} headers + * @returns {Cookie[]} */ -pki.pemToDer = function(pem) { - var msg = forge.pem.decode(pem)[0]; - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert PEM to DER; PEM is encrypted.'); +function getSetCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + const cookies = getHeadersList(headers).cookies + + if (!cookies) { + return [] } - return forge.util.createBuffer(msg.body); -}; + + // In older versions of undici, cookies is a list of name:value. + return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair)) +} /** - * Converts an RSA private key from PEM format. - * - * @param pem the PEM-formatted private key. - * - * @return the private key. + * @param {Headers} headers + * @param {Cookie} cookie + * @returns {void} */ -pki.privateKeyFromPem = function(pem) { - var msg = forge.pem.decode(pem)[0]; +function setCookie (headers, cookie) { + webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' }) - if(msg.type !== 'PRIVATE KEY' && msg.type !== 'RSA PRIVATE KEY') { - var error = new Error('Could not convert private key from PEM; PEM ' + - 'header type is not "PRIVATE KEY" or "RSA PRIVATE KEY".'); - error.headerType = msg.type; - throw error; - } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert private key from PEM; PEM is encrypted.'); + webidl.brandCheck(headers, Headers, { strict: false }) + + cookie = webidl.converters.Cookie(cookie) + + const str = stringify(cookie) + + if (str) { + headers.append('Set-Cookie', stringify(cookie)) } +} - // convert DER to ASN.1 object - var obj = asn1.fromDer(msg.body); +webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([ + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null + } +]) - return pki.privateKeyFromAsn1(obj); -}; +webidl.converters.Cookie = webidl.dictionaryConverter([ + { + converter: webidl.converters.DOMString, + key: 'name' + }, + { + converter: webidl.converters.DOMString, + key: 'value' + }, + { + converter: webidl.nullableConverter((value) => { + if (typeof value === 'number') { + return webidl.converters['unsigned long long'](value) + } -/** - * Converts an RSA private key to PEM format. - * - * @param key the private key. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted private key. - */ -pki.privateKeyToPem = function(key, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var msg = { - type: 'RSA PRIVATE KEY', - body: asn1.toDer(pki.privateKeyToAsn1(key)).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + return new Date(value) + }), + key: 'expires', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters['long long']), + key: 'maxAge', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'secure', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'httpOnly', + defaultValue: null + }, + { + converter: webidl.converters.USVString, + key: 'sameSite', + allowedValues: ['Strict', 'Lax', 'None'] + }, + { + converter: webidl.sequenceConverter(webidl.converters.DOMString), + key: 'unparsed', + defaultValue: [] + } +]) -/** - * Converts a PrivateKeyInfo to PEM format. - * - * @param pki the PrivateKeyInfo. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted private key. - */ -pki.privateKeyInfoToPem = function(pki, maxline) { - // convert to DER, then PEM-encode - var msg = { - type: 'PRIVATE KEY', - body: asn1.toDer(pki).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; +module.exports = { + getCookies, + deleteCookie, + getSetCookies, + setCookie +} /***/ }), -/***/ 6861: +/***/ 4408: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; + + +const { maxNameValuePairSize, maxAttributeValueSize } = __nccwpck_require__(663) +const { isCTLExcludingHtab } = __nccwpck_require__(3121) +const { collectASequenceOfCodePointsFast } = __nccwpck_require__(685) +const assert = __nccwpck_require__(9491) + /** - * Prime number generation API. - * - * @author Dave Longley - * - * Copyright (c) 2014 Digital Bazaar, Inc. + * @description Parses the field-value attributes of a set-cookie header string. + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} header + * @returns if the header is invalid, null will be returned */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); -__nccwpck_require__(7052); -__nccwpck_require__(7821); +function parseSetCookie (header) { + // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F + // character (CTL characters excluding HTAB): Abort these steps and + // ignore the set-cookie-string entirely. + if (isCTLExcludingHtab(header)) { + return null + } -(function() { + let nameValuePair = '' + let unparsedAttributes = '' + let name = '' + let value = '' -// forge.prime already defined -if(forge.prime) { - module.exports = forge.prime; - return; -} + // 2. If the set-cookie-string contains a %x3B (";") character: + if (header.includes(';')) { + // 1. The name-value-pair string consists of the characters up to, + // but not including, the first %x3B (";"), and the unparsed- + // attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question). + const position = { position: 0 } + + nameValuePair = collectASequenceOfCodePointsFast(';', header, position) + unparsedAttributes = header.slice(position.position) + } else { + // Otherwise: + + // 1. The name-value-pair string consists of all the characters + // contained in the set-cookie-string, and the unparsed- + // attributes is the empty string. + nameValuePair = header + } + + // 3. If the name-value-pair string lacks a %x3D ("=") character, then + // the name string is empty, and the value string is the value of + // name-value-pair. + if (!nameValuePair.includes('=')) { + value = nameValuePair + } else { + // Otherwise, the name string consists of the characters up to, but + // not including, the first %x3D ("=") character, and the (possibly + // empty) value string consists of the characters after the first + // %x3D ("=") character. + const position = { position: 0 } + name = collectASequenceOfCodePointsFast( + '=', + nameValuePair, + position + ) + value = nameValuePair.slice(position.position + 1) + } -/* PRIME API */ -var prime = module.exports = forge.prime = forge.prime || {}; + // 4. Remove any leading or trailing WSP characters from the name + // string and the value string. + name = name.trim() + value = value.trim() -var BigInteger = forge.jsbn.BigInteger; + // 5. If the sum of the lengths of the name string and the value string + // is more than 4096 octets, abort these steps and ignore the set- + // cookie-string entirely. + if (name.length + value.length > maxNameValuePairSize) { + return null + } -// primes are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29 -var GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2]; -var THIRTY = new BigInteger(null); -THIRTY.fromInt(30); -var op_or = function(x, y) {return x|y;}; + // 6. The cookie-name is the name string, and the cookie-value is the + // value string. + return { + name, value, ...parseUnparsedAttributes(unparsedAttributes) + } +} /** - * Generates a random probable prime with the given number of bits. - * - * Alternative algorithms can be specified by name as a string or as an - * object with custom options like so: - * - * { - * name: 'PRIMEINC', - * options: { - * maxBlockTime: , - * millerRabinTests: , - * workerScript: , - * workers: . - * workLoad: the size of the work load, ie: number of possible prime - * numbers for each web worker to check per work assignment, - * (default: 100). - * } - * } - * - * @param bits the number of bits for the prime number. - * @param options the options to use. - * [algorithm] the algorithm to use (default: 'PRIMEINC'). - * [prng] a custom crypto-secure pseudo-random number generator to use, - * that must define "getBytesSync". - * - * @return callback(err, num) called once the operation completes. + * Parses the remaining attributes of a set-cookie header + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} unparsedAttributes + * @param {[Object.]={}} cookieAttributeList */ -prime.generateProbablePrime = function(bits, options, callback) { - if(typeof options === 'function') { - callback = options; - options = {}; +function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) { + // 1. If the unparsed-attributes string is empty, skip the rest of + // these steps. + if (unparsedAttributes.length === 0) { + return cookieAttributeList } - options = options || {}; - // default to PRIMEINC algorithm - var algorithm = options.algorithm || 'PRIMEINC'; - if(typeof algorithm === 'string') { - algorithm = {name: algorithm}; - } - algorithm.options = algorithm.options || {}; - - // create prng with api that matches BigInteger secure random - var prng = options.prng || forge.random; - var rng = { - // x is an array to fill with bytes - nextBytes: function(x) { - var b = prng.getBytesSync(x.length); - for(var i = 0; i < x.length; ++i) { - x[i] = b.charCodeAt(i); - } - } - }; + // 2. Discard the first character of the unparsed-attributes (which + // will be a %x3B (";") character). + assert(unparsedAttributes[0] === ';') + unparsedAttributes = unparsedAttributes.slice(1) - if(algorithm.name === 'PRIMEINC') { - return primeincFindPrime(bits, rng, algorithm.options, callback); - } + let cookieAv = '' - throw new Error('Invalid prime generation algorithm: ' + algorithm.name); -}; + // 3. If the remaining unparsed-attributes contains a %x3B (";") + // character: + if (unparsedAttributes.includes(';')) { + // 1. Consume the characters of the unparsed-attributes up to, but + // not including, the first %x3B (";") character. + cookieAv = collectASequenceOfCodePointsFast( + ';', + unparsedAttributes, + { position: 0 } + ) + unparsedAttributes = unparsedAttributes.slice(cookieAv.length) + } else { + // Otherwise: -function primeincFindPrime(bits, rng, options, callback) { - if('workers' in options) { - return primeincFindPrimeWithWorkers(bits, rng, options, callback); + // 1. Consume the remainder of the unparsed-attributes. + cookieAv = unparsedAttributes + unparsedAttributes = '' } - return primeincFindPrimeWithoutWorkers(bits, rng, options, callback); -} -function primeincFindPrimeWithoutWorkers(bits, rng, options, callback) { - // initialize random number - var num = generateRandom(bits, rng); + // Let the cookie-av string be the characters consumed in this step. - /* Note: All primes are of the form 30k+i for i < 30 and gcd(30, i)=1. The - number we are given is always aligned at 30k + 1. Each time the number is - determined not to be prime we add to get to the next 'i', eg: if the number - was at 30k + 1 we add 6. */ - var deltaIdx = 0; + let attributeName = '' + let attributeValue = '' - // get required number of MR tests - var mrTests = getMillerRabinTests(num.bitLength()); - if('millerRabinTests' in options) { - mrTests = options.millerRabinTests; - } + // 4. If the cookie-av string contains a %x3D ("=") character: + if (cookieAv.includes('=')) { + // 1. The (possibly empty) attribute-name string consists of the + // characters up to, but not including, the first %x3D ("=") + // character, and the (possibly empty) attribute-value string + // consists of the characters after the first %x3D ("=") + // character. + const position = { position: 0 } - // find prime nearest to 'num' for maxBlockTime ms - // 10 ms gives 5ms of leeway for other calculations before dropping - // below 60fps (1000/60 == 16.67), but in reality, the number will - // likely be higher due to an 'atomic' big int modPow - var maxBlockTime = 10; - if('maxBlockTime' in options) { - maxBlockTime = options.maxBlockTime; - } + attributeName = collectASequenceOfCodePointsFast( + '=', + cookieAv, + position + ) + attributeValue = cookieAv.slice(position.position + 1) + } else { + // Otherwise: - _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback); -} + // 1. The attribute-name string consists of the entire cookie-av + // string, and the attribute-value string is empty. + attributeName = cookieAv + } + + // 5. Remove any leading or trailing WSP characters from the attribute- + // name string and the attribute-value string. + attributeName = attributeName.trim() + attributeValue = attributeValue.trim() + + // 6. If the attribute-value is longer than 1024 octets, ignore the + // cookie-av string and return to Step 1 of this algorithm. + if (attributeValue.length > maxAttributeValueSize) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 7. Process the attribute-name and attribute-value according to the + // requirements in the following subsections. (Notice that + // attributes with unrecognized attribute-names are ignored.) + const attributeNameLowercase = attributeName.toLowerCase() + + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1 + // If the attribute-name case-insensitively matches the string + // "Expires", the user agent MUST process the cookie-av as follows. + if (attributeNameLowercase === 'expires') { + // 1. Let the expiry-time be the result of parsing the attribute-value + // as cookie-date (see Section 5.1.1). + const expiryTime = new Date(attributeValue) + + // 2. If the attribute-value failed to parse as a cookie date, ignore + // the cookie-av. + + cookieAttributeList.expires = expiryTime + } else if (attributeNameLowercase === 'max-age') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2 + // If the attribute-name case-insensitively matches the string "Max- + // Age", the user agent MUST process the cookie-av as follows. + + // 1. If the first character of the attribute-value is not a DIGIT or a + // "-" character, ignore the cookie-av. + const charCode = attributeValue.charCodeAt(0) + + if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 2. If the remainder of attribute-value contains a non-DIGIT + // character, ignore the cookie-av. + if (!/^\d+$/.test(attributeValue)) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 3. Let delta-seconds be the attribute-value converted to an integer. + const deltaSeconds = Number(attributeValue) + + // 4. Let cookie-age-limit be the maximum age of the cookie (which + // SHOULD be 400 days or less, see Section 4.1.2.2). + + // 5. Set delta-seconds to the smaller of its present value and cookie- + // age-limit. + // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs) + + // 6. If delta-seconds is less than or equal to zero (0), let expiry- + // time be the earliest representable date and time. Otherwise, let + // the expiry-time be the current date and time plus delta-seconds + // seconds. + // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds + + // 7. Append an attribute to the cookie-attribute-list with an + // attribute-name of Max-Age and an attribute-value of expiry-time. + cookieAttributeList.maxAge = deltaSeconds + } else if (attributeNameLowercase === 'domain') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3 + // If the attribute-name case-insensitively matches the string "Domain", + // the user agent MUST process the cookie-av as follows. + + // 1. Let cookie-domain be the attribute-value. + let cookieDomain = attributeValue + + // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be + // cookie-domain without its leading %x2E ("."). + if (cookieDomain[0] === '.') { + cookieDomain = cookieDomain.slice(1) + } + + // 3. Convert the cookie-domain to lower case. + cookieDomain = cookieDomain.toLowerCase() + + // 4. Append an attribute to the cookie-attribute-list with an + // attribute-name of Domain and an attribute-value of cookie-domain. + cookieAttributeList.domain = cookieDomain + } else if (attributeNameLowercase === 'path') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4 + // If the attribute-name case-insensitively matches the string "Path", + // the user agent MUST process the cookie-av as follows. + + // 1. If the attribute-value is empty or if the first character of the + // attribute-value is not %x2F ("/"): + let cookiePath = '' + if (attributeValue.length === 0 || attributeValue[0] !== '/') { + // 1. Let cookie-path be the default-path. + cookiePath = '/' + } else { + // Otherwise: -function _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback) { - var start = +new Date(); - do { - // overflow, regenerate random number - if(num.bitLength() > bits) { - num = generateRandom(bits, rng); - } - // do primality test - if(num.isProbablePrime(mrTests)) { - return callback(null, num); + // 1. Let cookie-path be the attribute-value. + cookiePath = attributeValue } - // get next potential prime - num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0); - } while(maxBlockTime < 0 || (+new Date() - start < maxBlockTime)); - // keep trying later - forge.util.setImmediate(function() { - _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback); - }); -} + // 2. Append an attribute to the cookie-attribute-list with an + // attribute-name of Path and an attribute-value of cookie-path. + cookieAttributeList.path = cookiePath + } else if (attributeNameLowercase === 'secure') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5 + // If the attribute-name case-insensitively matches the string "Secure", + // the user agent MUST append an attribute to the cookie-attribute-list + // with an attribute-name of Secure and an empty attribute-value. -// NOTE: This algorithm is indeterminate in nature because workers -// run in parallel looking at different segments of numbers. Even if this -// algorithm is run twice with the same input from a predictable RNG, it -// may produce different outputs. -function primeincFindPrimeWithWorkers(bits, rng, options, callback) { - // web workers unavailable - if(typeof Worker === 'undefined') { - return primeincFindPrimeWithoutWorkers(bits, rng, options, callback); - } - - // initialize random number - var num = generateRandom(bits, rng); - - // use web workers to generate keys - var numWorkers = options.workers; - var workLoad = options.workLoad || 100; - var range = workLoad * 30 / 8; - var workerScript = options.workerScript || 'forge/prime.worker.js'; - if(numWorkers === -1) { - return forge.util.estimateCores(function(err, cores) { - if(err) { - // default to 2 - cores = 2; - } - numWorkers = cores - 1; - generate(); - }); - } - generate(); + cookieAttributeList.secure = true + } else if (attributeNameLowercase === 'httponly') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6 + // If the attribute-name case-insensitively matches the string + // "HttpOnly", the user agent MUST append an attribute to the cookie- + // attribute-list with an attribute-name of HttpOnly and an empty + // attribute-value. - function generate() { - // require at least 1 worker - numWorkers = Math.max(1, numWorkers); + cookieAttributeList.httpOnly = true + } else if (attributeNameLowercase === 'samesite') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7 + // If the attribute-name case-insensitively matches the string + // "SameSite", the user agent MUST process the cookie-av as follows: - // TODO: consider optimizing by starting workers outside getPrime() ... - // note that in order to clean up they will have to be made internally - // asynchronous which may actually be slower + // 1. Let enforcement be "Default". + let enforcement = 'Default' - // start workers immediately - var workers = []; - for(var i = 0; i < numWorkers; ++i) { - // FIXME: fix path or use blob URLs - workers[i] = new Worker(workerScript); + const attributeValueLowercase = attributeValue.toLowerCase() + // 2. If cookie-av's attribute-value is a case-insensitive match for + // "None", set enforcement to "None". + if (attributeValueLowercase.includes('none')) { + enforcement = 'None' } - var running = numWorkers; - // listen for requests from workers and assign ranges to find prime - for(var i = 0; i < numWorkers; ++i) { - workers[i].addEventListener('message', workerMessage); + // 3. If cookie-av's attribute-value is a case-insensitive match for + // "Strict", set enforcement to "Strict". + if (attributeValueLowercase.includes('strict')) { + enforcement = 'Strict' } - /* Note: The distribution of random numbers is unknown. Therefore, each - web worker is continuously allocated a range of numbers to check for a - random number until one is found. + // 4. If cookie-av's attribute-value is a case-insensitive match for + // "Lax", set enforcement to "Lax". + if (attributeValueLowercase.includes('lax')) { + enforcement = 'Lax' + } - Every 30 numbers will be checked just 8 times, because prime numbers - have the form: + // 5. Append an attribute to the cookie-attribute-list with an + // attribute-name of "SameSite" and an attribute-value of + // enforcement. + cookieAttributeList.sameSite = enforcement + } else { + cookieAttributeList.unparsed ??= [] - 30k+i, for i < 30 and gcd(30, i)=1 (there are 8 values of i for this) + cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`) + } - Therefore, if we want a web worker to run N checks before asking for - a new range of numbers, each range must contain N*30/8 numbers. + // 8. Return to Step 1 of this algorithm. + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) +} - For 100 checks (workLoad), this is a range of 375. */ +module.exports = { + parseSetCookie, + parseUnparsedAttributes +} - var found = false; - function workerMessage(e) { - // ignore message, prime already found - if(found) { - return; - } - --running; - var data = e.data; - if(data.found) { - // terminate all workers - for(var i = 0; i < workers.length; ++i) { - workers[i].terminate(); - } - found = true; - return callback(null, new BigInteger(data.prime, 16)); - } +/***/ }), - // overflow, regenerate random number - if(num.bitLength() > bits) { - num = generateRandom(bits, rng); - } +/***/ 3121: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // assign new range to check - var hex = num.toString(16); +"use strict"; - // start prime search - e.target.postMessage({ - hex: hex, - workLoad: workLoad - }); - num.dAddOffset(range, 0); +const assert = __nccwpck_require__(9491) +const { kHeadersList } = __nccwpck_require__(2785) + +function isCTLExcludingHtab (value) { + if (value.length === 0) { + return false + } + + for (const char of value) { + const code = char.charCodeAt(0) + + if ( + (code >= 0x00 || code <= 0x08) || + (code >= 0x0A || code <= 0x1F) || + code === 0x7F + ) { + return false } } } /** - * Generates a random number using the given number of bits and RNG. - * - * @param bits the number of bits for the number. - * @param rng the random number generator to use. - * - * @return the random number. + CHAR = + token = 1* + separators = "(" | ")" | "<" | ">" | "@" + | "," | ";" | ":" | "\" | <"> + | "/" | "[" | "]" | "?" | "=" + | "{" | "}" | SP | HT + * @param {string} name */ -function generateRandom(bits, rng) { - var num = new BigInteger(bits, rng); - // force MSB set - var bits1 = bits - 1; - if(!num.testBit(bits1)) { - num.bitwiseTo(BigInteger.ONE.shiftLeft(bits1), op_or, num); +function validateCookieName (name) { + for (const char of name) { + const code = char.charCodeAt(0) + + if ( + (code <= 0x20 || code > 0x7F) || + char === '(' || + char === ')' || + char === '>' || + char === '<' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' + ) { + throw new Error('Invalid cookie name') + } } - // align number on 30k+1 boundary - num.dAddOffset(31 - num.mod(THIRTY).byteValue(), 0); - return num; } /** - * Returns the required number of Miller-Rabin tests to generate a - * prime with an error probability of (1/2)^80. - * - * See Handbook of Applied Cryptography Chapter 4, Table 4.4. - * - * @param bits the bit size. - * - * @return the required number of iterations. + cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) + cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E + ; US-ASCII characters excluding CTLs, + ; whitespace DQUOTE, comma, semicolon, + ; and backslash + * @param {string} value */ -function getMillerRabinTests(bits) { - if(bits <= 100) return 27; - if(bits <= 150) return 18; - if(bits <= 200) return 15; - if(bits <= 250) return 12; - if(bits <= 300) return 9; - if(bits <= 350) return 8; - if(bits <= 400) return 7; - if(bits <= 500) return 6; - if(bits <= 600) return 5; - if(bits <= 800) return 4; - if(bits <= 1250) return 3; - return 2; -} - -})(); - - -/***/ }), +function validateCookieValue (value) { + for (const char of value) { + const code = char.charCodeAt(0) -/***/ 4467: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if ( + code < 0x21 || // exclude CTLs (0-31) + code === 0x22 || + code === 0x2C || + code === 0x3B || + code === 0x5C || + code > 0x7E // non-ascii + ) { + throw new Error('Invalid header value') + } + } +} /** - * A javascript implementation of a cryptographically-secure - * Pseudo Random Number Generator (PRNG). The Fortuna algorithm is followed - * here though the use of SHA-256 is not enforced; when generating an - * a PRNG context, the hashing algorithm and block cipher used for - * the generator are specified via a plugin. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. + * path-value = + * @param {string} path */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); +function validateCookiePath (path) { + for (const char of path) { + const code = char.charCodeAt(0) -var _crypto = null; -if(forge.util.isNodejs && !forge.options.usePureJavaScript && - !process.versions['node-webkit']) { - _crypto = __nccwpck_require__(6113); + if (code < 0x21 || char === ';') { + throw new Error('Invalid cookie path') + } + } } -/* PRNG API */ -var prng = module.exports = forge.prng = forge.prng || {}; - /** - * Creates a new PRNG context. - * - * A PRNG plugin must be passed in that will provide: - * - * 1. A function that initializes the key and seed of a PRNG context. It - * will be given a 16 byte key and a 16 byte seed. Any key expansion - * or transformation of the seed from a byte string into an array of - * integers (or similar) should be performed. - * 2. The cryptographic function used by the generator. It takes a key and - * a seed. - * 3. A seed increment function. It takes the seed and returns seed + 1. - * 4. An api to create a message digest. - * - * For an example, see random.js. - * - * @param plugin the PRNG plugin to use. + * I have no idea why these values aren't allowed to be honest, + * but Deno tests these. - Khafra + * @param {string} domain */ -prng.create = function(plugin) { - var ctx = { - plugin: plugin, - key: null, - seed: null, - time: null, - // number of reseeds so far - reseeds: 0, - // amount of data generated so far - generated: 0, - // no initial key bytes - keyBytes: '' - }; - - // create 32 entropy pools (each is a message digest) - var md = plugin.md; - var pools = new Array(32); - for(var i = 0; i < 32; ++i) { - pools[i] = md.create(); +function validateCookieDomain (domain) { + if ( + domain.startsWith('-') || + domain.endsWith('.') || + domain.endsWith('-') + ) { + throw new Error('Invalid cookie domain') } - ctx.pools = pools; - - // entropy pools are written to cyclically, starting at index 0 - ctx.pool = 0; - - /** - * Generates random bytes. The bytes may be generated synchronously or - * asynchronously. Web workers must use the asynchronous interface or - * else the behavior is undefined. - * - * @param count the number of random bytes to generate. - * @param [callback(err, bytes)] called once the operation completes. - * - * @return count random bytes as a string. - */ - ctx.generate = function(count, callback) { - // do synchronously - if(!callback) { - return ctx.generateSync(count); - } - - // simple generator using counter-based CBC - var cipher = ctx.plugin.cipher; - var increment = ctx.plugin.increment; - var formatKey = ctx.plugin.formatKey; - var formatSeed = ctx.plugin.formatSeed; - var b = forge.util.createBuffer(); - - // paranoid deviation from Fortuna: - // reset key for every request to protect previously - // generated random bytes should the key be discovered; - // there is no 100ms based reseeding because of this - // forced reseed for every `generate` call - ctx.key = null; - - generate(); - - function generate(err) { - if(err) { - return callback(err); - } - - // sufficient bytes generated - if(b.length() >= count) { - return callback(null, b.getBytes(count)); - } - - // if amount of data generated is greater than 1 MiB, trigger reseed - if(ctx.generated > 0xfffff) { - ctx.key = null; - } - - if(ctx.key === null) { - // prevent stack overflow - return forge.util.nextTick(function() { - _reseed(generate); - }); - } - - // generate the random bytes - var bytes = cipher(ctx.key, ctx.seed); - ctx.generated += bytes.length; - b.putBytes(bytes); - - // generate bytes for a new key and seed - ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed))); - ctx.seed = formatSeed(cipher(ctx.key, ctx.seed)); - - forge.util.setImmediate(generate); - } - }; +} - /** - * Generates random bytes synchronously. - * - * @param count the number of random bytes to generate. - * - * @return count random bytes as a string. - */ - ctx.generateSync = function(count) { - // simple generator using counter-based CBC - var cipher = ctx.plugin.cipher; - var increment = ctx.plugin.increment; - var formatKey = ctx.plugin.formatKey; - var formatSeed = ctx.plugin.formatSeed; - - // paranoid deviation from Fortuna: - // reset key for every request to protect previously - // generated random bytes should the key be discovered; - // there is no 100ms based reseeding because of this - // forced reseed for every `generateSync` call - ctx.key = null; - - var b = forge.util.createBuffer(); - while(b.length() < count) { - // if amount of data generated is greater than 1 MiB, trigger reseed - if(ctx.generated > 0xfffff) { - ctx.key = null; - } +/** + * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1 + * @param {number|Date} date + IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT + ; fixed length/zone/capitalization subset of the format + ; see Section 3.3 of [RFC5322] - if(ctx.key === null) { - _reseedSync(); - } + day-name = %x4D.6F.6E ; "Mon", case-sensitive + / %x54.75.65 ; "Tue", case-sensitive + / %x57.65.64 ; "Wed", case-sensitive + / %x54.68.75 ; "Thu", case-sensitive + / %x46.72.69 ; "Fri", case-sensitive + / %x53.61.74 ; "Sat", case-sensitive + / %x53.75.6E ; "Sun", case-sensitive + date1 = day SP month SP year + ; e.g., 02 Jun 1982 - // generate the random bytes - var bytes = cipher(ctx.key, ctx.seed); - ctx.generated += bytes.length; - b.putBytes(bytes); + day = 2DIGIT + month = %x4A.61.6E ; "Jan", case-sensitive + / %x46.65.62 ; "Feb", case-sensitive + / %x4D.61.72 ; "Mar", case-sensitive + / %x41.70.72 ; "Apr", case-sensitive + / %x4D.61.79 ; "May", case-sensitive + / %x4A.75.6E ; "Jun", case-sensitive + / %x4A.75.6C ; "Jul", case-sensitive + / %x41.75.67 ; "Aug", case-sensitive + / %x53.65.70 ; "Sep", case-sensitive + / %x4F.63.74 ; "Oct", case-sensitive + / %x4E.6F.76 ; "Nov", case-sensitive + / %x44.65.63 ; "Dec", case-sensitive + year = 4DIGIT - // generate bytes for a new key and seed - ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed))); - ctx.seed = formatSeed(cipher(ctx.key, ctx.seed)); - } + GMT = %x47.4D.54 ; "GMT", case-sensitive - return b.getBytes(count); - }; + time-of-day = hour ":" minute ":" second + ; 00:00:00 - 23:59:60 (leap second) - /** - * Private function that asynchronously reseeds a generator. - * - * @param callback(err) called once the operation completes. - */ - function _reseed(callback) { - if(ctx.pools[0].messageLength >= 32) { - _seed(); - return callback(); - } - // not enough seed data... - var needed = (32 - ctx.pools[0].messageLength) << 5; - ctx.seedFile(needed, function(err, bytes) { - if(err) { - return callback(err); - } - ctx.collect(bytes); - _seed(); - callback(); - }); + hour = 2DIGIT + minute = 2DIGIT + second = 2DIGIT + */ +function toIMFDate (date) { + if (typeof date === 'number') { + date = new Date(date) } - /** - * Private function that synchronously reseeds a generator. - */ - function _reseedSync() { - if(ctx.pools[0].messageLength >= 32) { - return _seed(); - } - // not enough seed data... - var needed = (32 - ctx.pools[0].messageLength) << 5; - ctx.collect(ctx.seedFileSync(needed)); - _seed(); - } + const days = [ + 'Sun', 'Mon', 'Tue', 'Wed', + 'Thu', 'Fri', 'Sat' + ] - /** - * Private function that seeds a generator once enough bytes are available. - */ - function _seed() { - // update reseed count - ctx.reseeds = (ctx.reseeds === 0xffffffff) ? 0 : ctx.reseeds + 1; - - // goal is to update `key` via: - // key = hash(key + s) - // where 's' is all collected entropy from selected pools, then... - - // create a plugin-based message digest - var md = ctx.plugin.md.create(); - - // consume current key bytes - md.update(ctx.keyBytes); - - // digest the entropy of pools whose index k meet the - // condition 'n mod 2^k == 0' where n is the number of reseeds - var _2powK = 1; - for(var k = 0; k < 32; ++k) { - if(ctx.reseeds % _2powK === 0) { - md.update(ctx.pools[k].digest().getBytes()); - ctx.pools[k].start(); - } - _2powK = _2powK << 1; - } + const months = [ + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' + ] - // get digest for key bytes - ctx.keyBytes = md.digest().getBytes(); + const dayName = days[date.getUTCDay()] + const day = date.getUTCDate().toString().padStart(2, '0') + const month = months[date.getUTCMonth()] + const year = date.getUTCFullYear() + const hour = date.getUTCHours().toString().padStart(2, '0') + const minute = date.getUTCMinutes().toString().padStart(2, '0') + const second = date.getUTCSeconds().toString().padStart(2, '0') - // paranoid deviation from Fortuna: - // update `seed` via `seed = hash(key)` - // instead of initializing to zero once and only - // ever incrementing it - md.start(); - md.update(ctx.keyBytes); - var seedBytes = md.digest().getBytes(); + return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT` +} - // update state - ctx.key = ctx.plugin.formatKey(ctx.keyBytes); - ctx.seed = ctx.plugin.formatSeed(seedBytes); - ctx.generated = 0; +/** + max-age-av = "Max-Age=" non-zero-digit *DIGIT + ; In practice, both expires-av and max-age-av + ; are limited to dates representable by the + ; user agent. + * @param {number} maxAge + */ +function validateCookieMaxAge (maxAge) { + if (maxAge < 0) { + throw new Error('Invalid cookie max-age') } +} - /** - * The built-in default seedFile. This seedFile is used when entropy - * is needed immediately. - * - * @param needed the number of bytes that are needed. - * - * @return the random bytes. - */ - function defaultSeedFile(needed) { - // use window.crypto.getRandomValues strong source of entropy if available - var getRandomValues = null; - var globalScope = forge.util.globalScope; - var _crypto = globalScope.crypto || globalScope.msCrypto; - if(_crypto && _crypto.getRandomValues) { - getRandomValues = function(arr) { - return _crypto.getRandomValues(arr); - }; - } - - var b = forge.util.createBuffer(); - if(getRandomValues) { - while(b.length() < needed) { - // max byte length is 65536 before QuotaExceededError is thrown - // http://www.w3.org/TR/WebCryptoAPI/#RandomSource-method-getRandomValues - var count = Math.max(1, Math.min(needed - b.length(), 65536) / 4); - var entropy = new Uint32Array(Math.floor(count)); - try { - getRandomValues(entropy); - for(var i = 0; i < entropy.length; ++i) { - b.putInt32(entropy[i]); - } - } catch(e) { - /* only ignore QuotaExceededError */ - if(!(typeof QuotaExceededError !== 'undefined' && - e instanceof QuotaExceededError)) { - throw e; - } - } - } - } - - // be sad and add some weak random data - if(b.length() < needed) { - /* Draws from Park-Miller "minimal standard" 31 bit PRNG, - implemented with David G. Carta's optimization: with 32 bit math - and without division (Public Domain). */ - var hi, lo, next; - var seed = Math.floor(Math.random() * 0x010000); - while(b.length() < needed) { - lo = 16807 * (seed & 0xFFFF); - hi = 16807 * (seed >> 16); - lo += (hi & 0x7FFF) << 16; - lo += hi >> 15; - lo = (lo & 0x7FFFFFFF) + (lo >> 31); - seed = lo & 0xFFFFFFFF; - - // consume lower 3 bytes of seed - for(var i = 0; i < 3; ++i) { - // throw in more pseudo random - next = seed >>> (i << 3); - next ^= Math.floor(Math.random() * 0x0100); - b.putByte(next & 0xFF); - } - } - } - - return b.getBytes(needed); - } - // initialize seed file APIs - if(_crypto) { - // use nodejs async API - ctx.seedFile = function(needed, callback) { - _crypto.randomBytes(needed, function(err, bytes) { - if(err) { - return callback(err); - } - callback(null, bytes.toString()); - }); - }; - // use nodejs sync API - ctx.seedFileSync = function(needed) { - return _crypto.randomBytes(needed).toString(); - }; - } else { - ctx.seedFile = function(needed, callback) { - try { - callback(null, defaultSeedFile(needed)); - } catch(e) { - callback(e); - } - }; - ctx.seedFileSync = defaultSeedFile; +/** + * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1 + * @param {import('./index').Cookie} cookie + */ +function stringify (cookie) { + if (cookie.name.length === 0) { + return null } - /** - * Adds entropy to a prng ctx's accumulator. - * - * @param bytes the bytes of entropy as a string. - */ - ctx.collect = function(bytes) { - // iterate over pools distributing entropy cyclically - var count = bytes.length; - for(var i = 0; i < count; ++i) { - ctx.pools[ctx.pool].update(bytes.substr(i, 1)); - ctx.pool = (ctx.pool === 31) ? 0 : ctx.pool + 1; - } - }; + validateCookieName(cookie.name) + validateCookieValue(cookie.value) - /** - * Collects an integer of n bits. - * - * @param i the integer entropy. - * @param n the number of bits in the integer. - */ - ctx.collectInt = function(i, n) { - var bytes = ''; - for(var x = 0; x < n; x += 8) { - bytes += String.fromCharCode((i >> x) & 0xFF); - } - ctx.collect(bytes); - }; + const out = [`${cookie.name}=${cookie.value}`] - /** - * Registers a Web Worker to receive immediate entropy from the main thread. - * This method is required until Web Workers can access the native crypto - * API. This method should be called twice for each created worker, once in - * the main thread, and once in the worker itself. - * - * @param worker the worker to register. - */ - ctx.registerWorker = function(worker) { - // worker receives random bytes - if(worker === self) { - ctx.seedFile = function(needed, callback) { - function listener(e) { - var data = e.data; - if(data.forge && data.forge.prng) { - self.removeEventListener('message', listener); - callback(data.forge.prng.err, data.forge.prng.bytes); - } - } - self.addEventListener('message', listener); - self.postMessage({forge: {prng: {needed: needed}}}); - }; - } else { - // main thread sends random bytes upon request - var listener = function(e) { - var data = e.data; - if(data.forge && data.forge.prng) { - ctx.seedFile(data.forge.prng.needed, function(err, bytes) { - worker.postMessage({forge: {prng: {err: err, bytes: bytes}}}); - }); - } - }; - // TODO: do we need to remove the event listener when the worker dies? - worker.addEventListener('message', listener); - } - }; + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1 + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2 + if (cookie.name.startsWith('__Secure-')) { + cookie.secure = true + } - return ctx; -}; + if (cookie.name.startsWith('__Host-')) { + cookie.secure = true + cookie.domain = null + cookie.path = '/' + } + if (cookie.secure) { + out.push('Secure') + } -/***/ }), + if (cookie.httpOnly) { + out.push('HttpOnly') + } -/***/ 4376: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (typeof cookie.maxAge === 'number') { + validateCookieMaxAge(cookie.maxAge) + out.push(`Max-Age=${cookie.maxAge}`) + } -/** - * Javascript implementation of PKCS#1 PSS signature padding. - * - * @author Stefan Siegl - * - * Copyright (c) 2012 Stefan Siegl - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7821); -__nccwpck_require__(8339); + if (cookie.domain) { + validateCookieDomain(cookie.domain) + out.push(`Domain=${cookie.domain}`) + } -// shortcut for PSS API -var pss = module.exports = forge.pss = forge.pss || {}; + if (cookie.path) { + validateCookiePath(cookie.path) + out.push(`Path=${cookie.path}`) + } -/** - * Creates a PSS signature scheme object. - * - * There are several ways to provide a salt for encoding: - * - * 1. Specify the saltLength only and the built-in PRNG will generate it. - * 2. Specify the saltLength and a custom PRNG with 'getBytesSync' defined that - * will be used. - * 3. Specify the salt itself as a forge.util.ByteBuffer. - * - * @param options the options to use: - * md the message digest object to use, a forge md instance. - * mgf the mask generation function to use, a forge mgf instance. - * [saltLength] the length of the salt in octets. - * [prng] the pseudo-random number generator to use to produce a salt. - * [salt] the salt to use when encoding. - * - * @return a signature scheme object. - */ -pss.create = function(options) { - // backwards compatibility w/legacy args: hash, mgf, sLen - if(arguments.length === 3) { - options = { - md: arguments[0], - mgf: arguments[1], - saltLength: arguments[2] - }; + if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') { + out.push(`Expires=${toIMFDate(cookie.expires)}`) + } + + if (cookie.sameSite) { + out.push(`SameSite=${cookie.sameSite}`) } - var hash = options.md; - var mgf = options.mgf; - var hLen = hash.digestLength; + for (const part of cookie.unparsed) { + if (!part.includes('=')) { + throw new Error('Invalid unparsed') + } + + const [key, ...value] = part.split('=') - var salt_ = options.salt || null; - if(typeof salt_ === 'string') { - // assume binary-encoded string - salt_ = forge.util.createBuffer(salt_); + out.push(`${key.trim()}=${value.join('=')}`) } - var sLen; - if('saltLength' in options) { - sLen = options.saltLength; - } else if(salt_ !== null) { - sLen = salt_.length(); - } else { - throw new Error('Salt length not specified or specific salt not given.'); + return out.join('; ') +} + +let kHeadersListNode + +function getHeadersList (headers) { + if (headers[kHeadersList]) { + return headers[kHeadersList] } - if(salt_ !== null && salt_.length() !== sLen) { - throw new Error('Given salt length does not match length of given salt.'); + if (!kHeadersListNode) { + kHeadersListNode = Object.getOwnPropertySymbols(headers).find( + (symbol) => symbol.description === 'headers list' + ) + + assert(kHeadersListNode, 'Headers cannot be parsed') } - var prng = options.prng || forge.random; + const headersList = headers[kHeadersListNode] + assert(headersList) + + return headersList +} - var pssobj = {}; +module.exports = { + isCTLExcludingHtab, + stringify, + getHeadersList +} - /** - * Encodes a PSS signature. - * - * This function implements EMSA-PSS-ENCODE as per RFC 3447, section 9.1.1. - * - * @param md the message digest object with the hash to sign. - * @param modsBits the length of the RSA modulus in bits. - * - * @return the encoded message as a binary-encoded string of length - * ceil((modBits - 1) / 8). - */ - pssobj.encode = function(md, modBits) { - var i; - var emBits = modBits - 1; - var emLen = Math.ceil(emBits / 8); - /* 2. Let mHash = Hash(M), an octet string of length hLen. */ - var mHash = md.digest().getBytes(); +/***/ }), - /* 3. If emLen < hLen + sLen + 2, output "encoding error" and stop. */ - if(emLen < hLen + sLen + 2) { - throw new Error('Message is too long to encrypt.'); - } +/***/ 2067: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /* 4. Generate a random octet string salt of length sLen; if sLen = 0, - * then salt is the empty string. */ - var salt; - if(salt_ === null) { - salt = prng.getBytesSync(sLen); - } else { - salt = salt_.bytes(); - } - - /* 5. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt; */ - var m_ = new forge.util.ByteBuffer(); - m_.fillWithByte(0, 8); - m_.putBytes(mHash); - m_.putBytes(salt); - - /* 6. Let H = Hash(M'), an octet string of length hLen. */ - hash.start(); - hash.update(m_.getBytes()); - var h = hash.digest().getBytes(); - - /* 7. Generate an octet string PS consisting of emLen - sLen - hLen - 2 - * zero octets. The length of PS may be 0. */ - var ps = new forge.util.ByteBuffer(); - ps.fillWithByte(0, emLen - sLen - hLen - 2); - - /* 8. Let DB = PS || 0x01 || salt; DB is an octet string of length - * emLen - hLen - 1. */ - ps.putByte(0x01); - ps.putBytes(salt); - var db = ps.getBytes(); - - /* 9. Let dbMask = MGF(H, emLen - hLen - 1). */ - var maskLen = emLen - hLen - 1; - var dbMask = mgf.generate(h, maskLen); - - /* 10. Let maskedDB = DB \xor dbMask. */ - var maskedDB = ''; - for(i = 0; i < maskLen; i++) { - maskedDB += String.fromCharCode(db.charCodeAt(i) ^ dbMask.charCodeAt(i)); - } - - /* 11. Set the leftmost 8emLen - emBits bits of the leftmost octet in - * maskedDB to zero. */ - var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF; - maskedDB = String.fromCharCode(maskedDB.charCodeAt(0) & ~mask) + - maskedDB.substr(1); - - /* 12. Let EM = maskedDB || H || 0xbc. - * 13. Output EM. */ - return maskedDB + h + String.fromCharCode(0xbc); - }; +"use strict"; - /** - * Verifies a PSS signature. - * - * This function implements EMSA-PSS-VERIFY as per RFC 3447, section 9.1.2. - * - * @param mHash the message digest hash, as a binary-encoded string, to - * compare against the signature. - * @param em the encoded message, as a binary-encoded string - * (RSA decryption result). - * @param modsBits the length of the RSA modulus in bits. - * - * @return true if the signature was verified, false if not. - */ - pssobj.verify = function(mHash, em, modBits) { - var i; - var emBits = modBits - 1; - var emLen = Math.ceil(emBits / 8); - /* c. Convert the message representative m to an encoded message EM - * of length emLen = ceil((modBits - 1) / 8) octets, where modBits - * is the length in bits of the RSA modulus n */ - em = em.substr(-emLen); +const net = __nccwpck_require__(1808) +const assert = __nccwpck_require__(9491) +const util = __nccwpck_require__(3983) +const { InvalidArgumentError, ConnectTimeoutError } = __nccwpck_require__(8045) + +let tls // include tls conditionally since it is not always available - /* 3. If emLen < hLen + sLen + 2, output "inconsistent" and stop. */ - if(emLen < hLen + sLen + 2) { - throw new Error('Inconsistent parameters to PSS signature verification.'); +// TODO: session re-use does not wait for the first +// connection to resolve the session and might therefore +// resolve the same servername multiple times even when +// re-use is enabled. + +let SessionCache +// FIXME: remove workaround when the Node bug is fixed +// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 +if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) { + SessionCache = class WeakSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() + this._sessionRegistry = new global.FinalizationRegistry((key) => { + if (this._sessionCache.size < this._maxCachedSessions) { + return + } + + const ref = this._sessionCache.get(key) + if (ref !== undefined && ref.deref() === undefined) { + this._sessionCache.delete(key) + } + }) } - /* 4. If the rightmost octet of EM does not have hexadecimal value - * 0xbc, output "inconsistent" and stop. */ - if(em.charCodeAt(emLen - 1) !== 0xbc) { - throw new Error('Encoded message does not end in 0xBC.'); + get (sessionKey) { + const ref = this._sessionCache.get(sessionKey) + return ref ? ref.deref() : null } - /* 5. Let maskedDB be the leftmost emLen - hLen - 1 octets of EM, and - * let H be the next hLen octets. */ - var maskLen = emLen - hLen - 1; - var maskedDB = em.substr(0, maskLen); - var h = em.substr(maskLen, hLen); + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { + return + } + + this._sessionCache.set(sessionKey, new WeakRef(session)) + this._sessionRegistry.register(session, sessionKey) + } + } +} else { + SessionCache = class SimpleSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() + } - /* 6. If the leftmost 8emLen - emBits bits of the leftmost octet in - * maskedDB are not all equal to zero, output "inconsistent" and stop. */ - var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF; - if((maskedDB.charCodeAt(0) & mask) !== 0) { - throw new Error('Bits beyond keysize not zero as expected.'); + get (sessionKey) { + return this._sessionCache.get(sessionKey) } - /* 7. Let dbMask = MGF(H, emLen - hLen - 1). */ - var dbMask = mgf.generate(h, maskLen); + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { + return + } + + if (this._sessionCache.size >= this._maxCachedSessions) { + // remove the oldest session + const { value: oldestKey } = this._sessionCache.keys().next() + this._sessionCache.delete(oldestKey) + } - /* 8. Let DB = maskedDB \xor dbMask. */ - var db = ''; - for(i = 0; i < maskLen; i++) { - db += String.fromCharCode(maskedDB.charCodeAt(i) ^ dbMask.charCodeAt(i)); + this._sessionCache.set(sessionKey, session) } + } +} - /* 9. Set the leftmost 8emLen - emBits bits of the leftmost octet - * in DB to zero. */ - db = String.fromCharCode(db.charCodeAt(0) & ~mask) + db.substr(1); +function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) { + if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) { + throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero') + } - /* 10. If the emLen - hLen - sLen - 2 leftmost octets of DB are not zero - * or if the octet at position emLen - hLen - sLen - 1 (the leftmost - * position is "position 1") does not have hexadecimal value 0x01, - * output "inconsistent" and stop. */ - var checkLen = emLen - hLen - sLen - 2; - for(i = 0; i < checkLen; i++) { - if(db.charCodeAt(i) !== 0x00) { - throw new Error('Leftmost octets not zero as expected'); + const options = { path: socketPath, ...opts } + const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions) + timeout = timeout == null ? 10e3 : timeout + allowH2 = allowH2 != null ? allowH2 : false + return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) { + let socket + if (protocol === 'https:') { + if (!tls) { + tls = __nccwpck_require__(4404) } + servername = servername || options.servername || util.getServerName(host) || null + + const sessionKey = servername || hostname + const session = sessionCache.get(sessionKey) || null + + assert(sessionKey) + + socket = tls.connect({ + highWaterMark: 16384, // TLS in node can't have bigger HWM anyway... + ...options, + servername, + session, + localAddress, + // TODO(HTTP/2): Add support for h2c + ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'], + socket: httpSocket, // upgrade socket connection + port: port || 443, + host: hostname + }) + + socket + .on('session', function (session) { + // TODO (fix): Can a session become invalid once established? Don't think so? + sessionCache.set(sessionKey, session) + }) + } else { + assert(!httpSocket, 'httpSocket can only be sent on TLS update') + socket = net.connect({ + highWaterMark: 64 * 1024, // Same as nodejs fs streams. + ...options, + localAddress, + port: port || 80, + host: hostname + }) } - if(db.charCodeAt(checkLen) !== 0x01) { - throw new Error('Inconsistent PSS signature, 0x01 marker not found'); + // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket + if (options.keepAlive == null || options.keepAlive) { + const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay + socket.setKeepAlive(true, keepAliveInitialDelay) } - /* 11. Let salt be the last sLen octets of DB. */ - var salt = db.substr(-sLen); + const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout) - /* 12. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt */ - var m_ = new forge.util.ByteBuffer(); - m_.fillWithByte(0, 8); - m_.putBytes(mHash); - m_.putBytes(salt); + socket + .setNoDelay(true) + .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () { + cancelTimeout() - /* 13. Let H' = Hash(M'), an octet string of length hLen. */ - hash.start(); - hash.update(m_.getBytes()); - var h_ = hash.digest().getBytes(); + if (callback) { + const cb = callback + callback = null + cb(null, this) + } + }) + .on('error', function (err) { + cancelTimeout() - /* 14. If H = H', output "consistent." Otherwise, output "inconsistent." */ - return h === h_; - }; + if (callback) { + const cb = callback + callback = null + cb(err) + } + }) - return pssobj; -}; + return socket + } +} + +function setupTimeout (onConnectTimeout, timeout) { + if (!timeout) { + return () => {} + } + + let s1 = null + let s2 = null + const timeoutId = setTimeout(() => { + // setImmediate is added to make sure that we priotorise socket error events over timeouts + s1 = setImmediate(() => { + if (process.platform === 'win32') { + // Windows needs an extra setImmediate probably due to implementation differences in the socket logic + s2 = setImmediate(() => onConnectTimeout()) + } else { + onConnectTimeout() + } + }) + }, timeout) + return () => { + clearTimeout(timeoutId) + clearImmediate(s1) + clearImmediate(s2) + } +} + +function onConnectTimeout (socket) { + util.destroy(socket, new ConnectTimeoutError()) +} + +module.exports = buildConnector /***/ }), -/***/ 7821: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 8045: +/***/ ((module) => { -/** - * An API for getting cryptographically-secure random bytes. The bytes are - * generated using the Fortuna algorithm devised by Bruce Schneier and - * Niels Ferguson. - * - * Getting strong random bytes is not yet easy to do in javascript. The only - * truish random entropy that can be collected is from the mouse, keyboard, or - * from timing with respect to page loads, etc. This generator makes a poor - * attempt at providing random bytes when those sources haven't yet provided - * enough entropy to initially seed or to reseed the PRNG. - * - * @author Dave Longley - * - * Copyright (c) 2009-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(4086); -__nccwpck_require__(4467); -__nccwpck_require__(8339); - -(function() { - -// forge.random already defined -if(forge.random && forge.random.getBytes) { - module.exports = forge.random; - return; -} - -(function(jQuery) { - -// the default prng plugin, uses AES-128 -var prng_aes = {}; -var _prng_aes_output = new Array(4); -var _prng_aes_buffer = forge.util.createBuffer(); -prng_aes.formatKey = function(key) { - // convert the key into 32-bit integers - var tmp = forge.util.createBuffer(key); - key = new Array(4); - key[0] = tmp.getInt32(); - key[1] = tmp.getInt32(); - key[2] = tmp.getInt32(); - key[3] = tmp.getInt32(); - - // return the expanded key - return forge.aes._expandKey(key, false); -}; -prng_aes.formatSeed = function(seed) { - // convert seed into 32-bit integers - var tmp = forge.util.createBuffer(seed); - seed = new Array(4); - seed[0] = tmp.getInt32(); - seed[1] = tmp.getInt32(); - seed[2] = tmp.getInt32(); - seed[3] = tmp.getInt32(); - return seed; -}; -prng_aes.cipher = function(key, seed) { - forge.aes._updateBlock(key, seed, _prng_aes_output, false); - _prng_aes_buffer.putInt32(_prng_aes_output[0]); - _prng_aes_buffer.putInt32(_prng_aes_output[1]); - _prng_aes_buffer.putInt32(_prng_aes_output[2]); - _prng_aes_buffer.putInt32(_prng_aes_output[3]); - return _prng_aes_buffer.getBytes(); -}; -prng_aes.increment = function(seed) { - // FIXME: do we care about carry or signed issues? - ++seed[3]; - return seed; -}; -prng_aes.md = forge.md.sha256; +"use strict"; -/** - * Creates a new PRNG. - */ -function spawnPrng() { - var ctx = forge.prng.create(prng_aes); - /** - * Gets random bytes. If a native secure crypto API is unavailable, this - * method tries to make the bytes more unpredictable by drawing from data that - * can be collected from the user of the browser, eg: mouse movement. - * - * If a callback is given, this method will be called asynchronously. - * - * @param count the number of random bytes to get. - * @param [callback(err, bytes)] called once the operation completes. - * - * @return the random bytes in a string. - */ - ctx.getBytes = function(count, callback) { - return ctx.generate(count, callback); - }; +class UndiciError extends Error { + constructor (message) { + super(message) + this.name = 'UndiciError' + this.code = 'UND_ERR' + } +} - /** - * Gets random bytes asynchronously. If a native secure crypto API is - * unavailable, this method tries to make the bytes more unpredictable by - * drawing from data that can be collected from the user of the browser, - * eg: mouse movement. - * - * @param count the number of random bytes to get. - * - * @return the random bytes in a string. - */ - ctx.getBytesSync = function(count) { - return ctx.generate(count); - }; +class ConnectTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ConnectTimeoutError) + this.name = 'ConnectTimeoutError' + this.message = message || 'Connect Timeout Error' + this.code = 'UND_ERR_CONNECT_TIMEOUT' + } +} - return ctx; +class HeadersTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersTimeoutError) + this.name = 'HeadersTimeoutError' + this.message = message || 'Headers Timeout Error' + this.code = 'UND_ERR_HEADERS_TIMEOUT' + } } -// create default prng context -var _ctx = spawnPrng(); +class HeadersOverflowError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersOverflowError) + this.name = 'HeadersOverflowError' + this.message = message || 'Headers Overflow Error' + this.code = 'UND_ERR_HEADERS_OVERFLOW' + } +} -// add other sources of entropy only if window.crypto.getRandomValues is not -// available -- otherwise this source will be automatically used by the prng -var getRandomValues = null; -var globalScope = forge.util.globalScope; -var _crypto = globalScope.crypto || globalScope.msCrypto; -if(_crypto && _crypto.getRandomValues) { - getRandomValues = function(arr) { - return _crypto.getRandomValues(arr); - }; +class BodyTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, BodyTimeoutError) + this.name = 'BodyTimeoutError' + this.message = message || 'Body Timeout Error' + this.code = 'UND_ERR_BODY_TIMEOUT' + } } -if(forge.options.usePureJavaScript || - (!forge.util.isNodejs && !getRandomValues)) { - // if this is a web worker, do not use weak entropy, instead register to - // receive strong entropy asynchronously from the main thread - if(typeof window === 'undefined' || window.document === undefined) { - // FIXME: +class ResponseStatusCodeError extends UndiciError { + constructor (message, statusCode, headers, body) { + super(message) + Error.captureStackTrace(this, ResponseStatusCodeError) + this.name = 'ResponseStatusCodeError' + this.message = message || 'Response Status Code Error' + this.code = 'UND_ERR_RESPONSE_STATUS_CODE' + this.body = body + this.status = statusCode + this.statusCode = statusCode + this.headers = headers } +} - // get load time entropy - _ctx.collectInt(+new Date(), 32); +class InvalidArgumentError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidArgumentError) + this.name = 'InvalidArgumentError' + this.message = message || 'Invalid Argument Error' + this.code = 'UND_ERR_INVALID_ARG' + } +} - // add some entropy from navigator object - if(typeof(navigator) !== 'undefined') { - var _navBytes = ''; - for(var key in navigator) { - try { - if(typeof(navigator[key]) == 'string') { - _navBytes += navigator[key]; - } - } catch(e) { - /* Some navigator keys might not be accessible, e.g. the geolocation - attribute throws an exception if touched in Mozilla chrome:// - context. +class InvalidReturnValueError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidReturnValueError) + this.name = 'InvalidReturnValueError' + this.message = message || 'Invalid Return Value Error' + this.code = 'UND_ERR_INVALID_RETURN_VALUE' + } +} - Silently ignore this and just don't use this as a source of - entropy. */ - } - } - _ctx.collect(_navBytes); - _navBytes = null; +class RequestAbortedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestAbortedError) + this.name = 'AbortError' + this.message = message || 'Request aborted' + this.code = 'UND_ERR_ABORTED' } +} - // add mouse and keyboard collectors if jquery is available - if(jQuery) { - // set up mouse entropy capture - jQuery().mousemove(function(e) { - // add mouse coords - _ctx.collectInt(e.clientX, 16); - _ctx.collectInt(e.clientY, 16); - }); +class InformationalError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InformationalError) + this.name = 'InformationalError' + this.message = message || 'Request information' + this.code = 'UND_ERR_INFO' + } +} - // set up keyboard entropy capture - jQuery().keypress(function(e) { - _ctx.collectInt(e.charCode, 8); - }); +class RequestContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestContentLengthMismatchError) + this.name = 'RequestContentLengthMismatchError' + this.message = message || 'Request body length does not match content-length header' + this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH' } } -/* Random API */ -if(!forge.random) { - forge.random = _ctx; -} else { - // extend forge.random with _ctx - for(var key in _ctx) { - forge.random[key] = _ctx[key]; +class ResponseContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseContentLengthMismatchError) + this.name = 'ResponseContentLengthMismatchError' + this.message = message || 'Response body length does not match content-length header' + this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH' } } -// expose spawn PRNG -forge.random.createInstance = spawnPrng; +class ClientDestroyedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientDestroyedError) + this.name = 'ClientDestroyedError' + this.message = message || 'The client is destroyed' + this.code = 'UND_ERR_DESTROYED' + } +} -module.exports = forge.random; +class ClientClosedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientClosedError) + this.name = 'ClientClosedError' + this.message = message || 'The client is closed' + this.code = 'UND_ERR_CLOSED' + } +} -})(typeof(jQuery) !== 'undefined' ? jQuery : null); +class SocketError extends UndiciError { + constructor (message, socket) { + super(message) + Error.captureStackTrace(this, SocketError) + this.name = 'SocketError' + this.message = message || 'Socket error' + this.code = 'UND_ERR_SOCKET' + this.socket = socket + } +} -})(); +class NotSupportedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'NotSupportedError' + this.message = message || 'Not supported error' + this.code = 'UND_ERR_NOT_SUPPORTED' + } +} +class BalancedPoolMissingUpstreamError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'MissingUpstreamError' + this.message = message || 'No upstream has been added to the BalancedPool' + this.code = 'UND_ERR_BPL_MISSING_UPSTREAM' + } +} -/***/ }), +class HTTPParserError extends Error { + constructor (message, code, data) { + super(message) + Error.captureStackTrace(this, HTTPParserError) + this.name = 'HTTPParserError' + this.code = code ? `HPE_${code}` : undefined + this.data = data ? data.toString() : undefined + } +} -/***/ 9965: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +class ResponseExceededMaxSizeError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseExceededMaxSizeError) + this.name = 'ResponseExceededMaxSizeError' + this.message = message || 'Response content exceeded max size' + this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE' + } +} -/** - * RC2 implementation. - * - * @author Stefan Siegl - * - * Copyright (c) 2012 Stefan Siegl - * - * Information on the RC2 cipher is available from RFC #2268, - * http://www.ietf.org/rfc/rfc2268.txt - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); - -var piTable = [ - 0xd9, 0x78, 0xf9, 0xc4, 0x19, 0xdd, 0xb5, 0xed, 0x28, 0xe9, 0xfd, 0x79, 0x4a, 0xa0, 0xd8, 0x9d, - 0xc6, 0x7e, 0x37, 0x83, 0x2b, 0x76, 0x53, 0x8e, 0x62, 0x4c, 0x64, 0x88, 0x44, 0x8b, 0xfb, 0xa2, - 0x17, 0x9a, 0x59, 0xf5, 0x87, 0xb3, 0x4f, 0x13, 0x61, 0x45, 0x6d, 0x8d, 0x09, 0x81, 0x7d, 0x32, - 0xbd, 0x8f, 0x40, 0xeb, 0x86, 0xb7, 0x7b, 0x0b, 0xf0, 0x95, 0x21, 0x22, 0x5c, 0x6b, 0x4e, 0x82, - 0x54, 0xd6, 0x65, 0x93, 0xce, 0x60, 0xb2, 0x1c, 0x73, 0x56, 0xc0, 0x14, 0xa7, 0x8c, 0xf1, 0xdc, - 0x12, 0x75, 0xca, 0x1f, 0x3b, 0xbe, 0xe4, 0xd1, 0x42, 0x3d, 0xd4, 0x30, 0xa3, 0x3c, 0xb6, 0x26, - 0x6f, 0xbf, 0x0e, 0xda, 0x46, 0x69, 0x07, 0x57, 0x27, 0xf2, 0x1d, 0x9b, 0xbc, 0x94, 0x43, 0x03, - 0xf8, 0x11, 0xc7, 0xf6, 0x90, 0xef, 0x3e, 0xe7, 0x06, 0xc3, 0xd5, 0x2f, 0xc8, 0x66, 0x1e, 0xd7, - 0x08, 0xe8, 0xea, 0xde, 0x80, 0x52, 0xee, 0xf7, 0x84, 0xaa, 0x72, 0xac, 0x35, 0x4d, 0x6a, 0x2a, - 0x96, 0x1a, 0xd2, 0x71, 0x5a, 0x15, 0x49, 0x74, 0x4b, 0x9f, 0xd0, 0x5e, 0x04, 0x18, 0xa4, 0xec, - 0xc2, 0xe0, 0x41, 0x6e, 0x0f, 0x51, 0xcb, 0xcc, 0x24, 0x91, 0xaf, 0x50, 0xa1, 0xf4, 0x70, 0x39, - 0x99, 0x7c, 0x3a, 0x85, 0x23, 0xb8, 0xb4, 0x7a, 0xfc, 0x02, 0x36, 0x5b, 0x25, 0x55, 0x97, 0x31, - 0x2d, 0x5d, 0xfa, 0x98, 0xe3, 0x8a, 0x92, 0xae, 0x05, 0xdf, 0x29, 0x10, 0x67, 0x6c, 0xba, 0xc9, - 0xd3, 0x00, 0xe6, 0xcf, 0xe1, 0x9e, 0xa8, 0x2c, 0x63, 0x16, 0x01, 0x3f, 0x58, 0xe2, 0x89, 0xa9, - 0x0d, 0x38, 0x34, 0x1b, 0xab, 0x33, 0xff, 0xb0, 0xbb, 0x48, 0x0c, 0x5f, 0xb9, 0xb1, 0xcd, 0x2e, - 0xc5, 0xf3, 0xdb, 0x47, 0xe5, 0xa5, 0x9c, 0x77, 0x0a, 0xa6, 0x20, 0x68, 0xfe, 0x7f, 0xc1, 0xad -]; +class RequestRetryError extends UndiciError { + constructor (message, code, { headers, data }) { + super(message) + Error.captureStackTrace(this, RequestRetryError) + this.name = 'RequestRetryError' + this.message = message || 'Request retry error' + this.code = 'UND_ERR_REQ_RETRY' + this.statusCode = code + this.data = data + this.headers = headers + } +} -var s = [1, 2, 3, 5]; +module.exports = { + HTTPParserError, + UndiciError, + HeadersTimeoutError, + HeadersOverflowError, + BodyTimeoutError, + RequestContentLengthMismatchError, + ConnectTimeoutError, + ResponseStatusCodeError, + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError, + ClientDestroyedError, + ClientClosedError, + InformationalError, + SocketError, + NotSupportedError, + ResponseContentLengthMismatchError, + BalancedPoolMissingUpstreamError, + ResponseExceededMaxSizeError, + RequestRetryError +} -/** - * Rotate a word left by given number of bits. - * - * Bits that are shifted out on the left are put back in on the right - * hand side. - * - * @param word The word to shift left. - * @param bits The number of bits to shift by. - * @return The rotated word. - */ -var rol = function(word, bits) { - return ((word << bits) & 0xffff) | ((word & 0xffff) >> (16 - bits)); -}; -/** - * Rotate a word right by given number of bits. - * - * Bits that are shifted out on the right are put back in on the left - * hand side. - * - * @param word The word to shift right. - * @param bits The number of bits to shift by. - * @return The rotated word. - */ -var ror = function(word, bits) { - return ((word & 0xffff) >> bits) | ((word << (16 - bits)) & 0xffff); -}; +/***/ }), -/* RC2 API */ -module.exports = forge.rc2 = forge.rc2 || {}; +/***/ 2905: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Perform RC2 key expansion as per RFC #2268, section 2. - * - * @param key variable-length user key (between 1 and 128 bytes) - * @param effKeyBits number of effective key bits (default: 128) - * @return the expanded RC2 key (ByteBuffer of 128 bytes) - */ -forge.rc2.expandKey = function(key, effKeyBits) { - if(typeof key === 'string') { - key = forge.util.createBuffer(key); - } - effKeyBits = effKeyBits || 128; - - /* introduce variables that match the names used in RFC #2268 */ - var L = key; - var T = key.length(); - var T1 = effKeyBits; - var T8 = Math.ceil(T1 / 8); - var TM = 0xff >> (T1 & 0x07); - var i; +"use strict"; - for(i = T; i < 128; i++) { - L.putByte(piTable[(L.at(i - 1) + L.at(i - T)) & 0xff]); - } - L.setAt(128 - T8, piTable[L.at(128 - T8) & TM]); +const { + InvalidArgumentError, + NotSupportedError +} = __nccwpck_require__(8045) +const assert = __nccwpck_require__(9491) +const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = __nccwpck_require__(2785) +const util = __nccwpck_require__(3983) - for(i = 127 - T8; i >= 0; i--) { - L.setAt(i, piTable[L.at(i + 1) ^ L.at(i + T8)]); - } +// tokenRegExp and headerCharRegex have been lifted from +// https://github.com/nodejs/node/blob/main/lib/_http_common.js - return L; -}; +/** + * Verifies that the given val is a valid HTTP token + * per the rules defined in RFC 7230 + * See https://tools.ietf.org/html/rfc7230#section-3.2.6 + */ +const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/ /** - * Creates a RC2 cipher object. - * - * @param key the symmetric key to use (as base for key generation). - * @param bits the number of effective key bits. - * @param encrypt false for decryption, true for encryption. - * - * @return the cipher. + * Matches if val contains an invalid field-vchar + * field-value = *( field-content / obs-fold ) + * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] + * field-vchar = VCHAR / obs-text */ -var createCipher = function(key, bits, encrypt) { - var _finish = false, _input = null, _output = null, _iv = null; - var mixRound, mashRound; - var i, j, K = []; +const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/ - /* Expand key and fill into K[] Array */ - key = forge.rc2.expandKey(key, bits); - for(i = 0; i < 64; i++) { - K.push(key.getInt16Le()); - } +// Verifies that a given path is valid does not contain control chars \x00 to \x20 +const invalidPathRegex = /[^\u0021-\u00ff]/ - if(encrypt) { - /** - * Perform one mixing round "in place". - * - * @param R Array of four words to perform mixing on. - */ - mixRound = function(R) { - for(i = 0; i < 4; i++) { - R[i] += K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) + - ((~R[(i + 3) % 4]) & R[(i + 1) % 4]); - R[i] = rol(R[i], s[i]); - j++; - } - }; +const kHandler = Symbol('handler') - /** - * Perform one mashing round "in place". - * - * @param R Array of four words to perform mashing on. - */ - mashRound = function(R) { - for(i = 0; i < 4; i++) { - R[i] += K[R[(i + 3) % 4] & 63]; - } - }; - } else { - /** - * Perform one r-mixing round "in place". - * - * @param R Array of four words to perform mixing on. - */ - mixRound = function(R) { - for(i = 3; i >= 0; i--) { - R[i] = ror(R[i], s[i]); - R[i] -= K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) + - ((~R[(i + 3) % 4]) & R[(i + 1) % 4]); - j--; - } - }; +const channels = {} - /** - * Perform one r-mashing round "in place". - * - * @param R Array of four words to perform mashing on. - */ - mashRound = function(R) { - for(i = 3; i >= 0; i--) { - R[i] -= K[R[(i + 3) % 4] & 63]; - } - }; - } +let extractBody - /** - * Run the specified cipher execution plan. - * - * This function takes four words from the input buffer, applies the IV on - * it (if requested) and runs the provided execution plan. - * - * The plan must be put together in form of a array of arrays. Where the - * outer one is simply a list of steps to perform and the inner one needs - * to have two elements: the first one telling how many rounds to perform, - * the second one telling what to do (i.e. the function to call). - * - * @param {Array} plan The plan to execute. - */ - var runPlan = function(plan) { - var R = []; +try { + const diagnosticsChannel = __nccwpck_require__(7643) + channels.create = diagnosticsChannel.channel('undici:request:create') + channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent') + channels.headers = diagnosticsChannel.channel('undici:request:headers') + channels.trailers = diagnosticsChannel.channel('undici:request:trailers') + channels.error = diagnosticsChannel.channel('undici:request:error') +} catch { + channels.create = { hasSubscribers: false } + channels.bodySent = { hasSubscribers: false } + channels.headers = { hasSubscribers: false } + channels.trailers = { hasSubscribers: false } + channels.error = { hasSubscribers: false } +} - /* Get data from input buffer and fill the four words into R */ - for(i = 0; i < 4; i++) { - var val = _input.getInt16Le(); +class Request { + constructor (origin, { + path, + method, + body, + headers, + query, + idempotent, + blocking, + upgrade, + headersTimeout, + bodyTimeout, + reset, + throwOnError, + expectContinue + }, handler) { + if (typeof path !== 'string') { + throw new InvalidArgumentError('path must be a string') + } else if ( + path[0] !== '/' && + !(path.startsWith('http://') || path.startsWith('https://')) && + method !== 'CONNECT' + ) { + throw new InvalidArgumentError('path must be an absolute URL or start with a slash') + } else if (invalidPathRegex.exec(path) !== null) { + throw new InvalidArgumentError('invalid request path') + } - if(_iv !== null) { - if(encrypt) { - /* We're encrypting, apply the IV first. */ - val ^= _iv.getInt16Le(); - } else { - /* We're decryption, keep cipher text for next block. */ - _iv.putInt16Le(val); - } - } + if (typeof method !== 'string') { + throw new InvalidArgumentError('method must be a string') + } else if (tokenRegExp.exec(method) === null) { + throw new InvalidArgumentError('invalid request method') + } - R.push(val & 0xffff); + if (upgrade && typeof upgrade !== 'string') { + throw new InvalidArgumentError('upgrade must be a string') } - /* Reset global "j" variable as per spec. */ - j = encrypt ? 0 : 63; + if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('invalid headersTimeout') + } - /* Run execution plan. */ - for(var ptr = 0; ptr < plan.length; ptr++) { - for(var ctr = 0; ctr < plan[ptr][0]; ctr++) { - plan[ptr][1](R); - } + if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('invalid bodyTimeout') } - /* Write back result to output buffer. */ - for(i = 0; i < 4; i++) { - if(_iv !== null) { - if(encrypt) { - /* We're encrypting in CBC-mode, feed back encrypted bytes into - IV buffer to carry it forward to next block. */ - _iv.putInt16Le(R[i]); - } else { - R[i] ^= _iv.getInt16Le(); - } - } + if (reset != null && typeof reset !== 'boolean') { + throw new InvalidArgumentError('invalid reset') + } - _output.putInt16Le(R[i]); + if (expectContinue != null && typeof expectContinue !== 'boolean') { + throw new InvalidArgumentError('invalid expectContinue') } - }; - /* Create cipher object */ - var cipher = null; - cipher = { - /** - * Starts or restarts the encryption or decryption process, whichever - * was previously configured. - * - * To use the cipher in CBC mode, iv may be given either as a string - * of bytes, or as a byte buffer. For ECB mode, give null as iv. - * - * @param iv the initialization vector to use, null for ECB mode. - * @param output the output the buffer to write to, null to create one. - */ - start: function(iv, output) { - if(iv) { - /* CBC mode */ - if(typeof iv === 'string') { - iv = forge.util.createBuffer(iv); - } - } + this.headersTimeout = headersTimeout - _finish = false; - _input = forge.util.createBuffer(); - _output = output || new forge.util.createBuffer(); - _iv = iv; + this.bodyTimeout = bodyTimeout - cipher.output = _output; - }, + this.throwOnError = throwOnError === true - /** - * Updates the next block. - * - * @param input the buffer to read from. - */ - update: function(input) { - if(!_finish) { - // not finishing, so fill the input buffer with more input - _input.putBuffer(input); - } + this.method = method - while(_input.length() >= 8) { - runPlan([ - [ 5, mixRound ], - [ 1, mashRound ], - [ 6, mixRound ], - [ 1, mashRound ], - [ 5, mixRound ] - ]); - } - }, + this.abort = null - /** - * Finishes encrypting or decrypting. - * - * @param pad a padding function to use, null for PKCS#7 padding, - * signature(blockSize, buffer, decrypt). - * - * @return true if successful, false on error. - */ - finish: function(pad) { - var rval = true; + if (body == null) { + this.body = null + } else if (util.isStream(body)) { + this.body = body - if(encrypt) { - if(pad) { - rval = pad(8, _input, !encrypt); - } else { - // add PKCS#7 padding to block (each pad byte is the - // value of the number of pad bytes) - var padding = (_input.length() === 8) ? 8 : (8 - _input.length()); - _input.fillWithByte(padding, padding); + const rState = this.body._readableState + if (!rState || !rState.autoDestroy) { + this.endHandler = function autoDestroy () { + util.destroy(this) } + this.body.on('end', this.endHandler) } - if(rval) { - // do final update - _finish = true; - cipher.update(); - } - - if(!encrypt) { - // check for error: input data not a multiple of block size - rval = (_input.length() === 0); - if(rval) { - if(pad) { - rval = pad(8, _output, !encrypt); - } else { - // ensure padding byte count is valid - var len = _output.length(); - var count = _output.at(len - 1); - - if(count > len) { - rval = false; - } else { - // trim off padding bytes - _output.truncate(count); - } - } + this.errorHandler = err => { + if (this.abort) { + this.abort(err) + } else { + this.error = err } } - - return rval; + this.body.on('error', this.errorHandler) + } else if (util.isBuffer(body)) { + this.body = body.byteLength ? body : null + } else if (ArrayBuffer.isView(body)) { + this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null + } else if (body instanceof ArrayBuffer) { + this.body = body.byteLength ? Buffer.from(body) : null + } else if (typeof body === 'string') { + this.body = body.length ? Buffer.from(body) : null + } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) { + this.body = body + } else { + throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable') } - }; - return cipher; -}; + this.completed = false -/** - * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the - * given symmetric key. The output will be stored in the 'output' member - * of the returned cipher. - * - * The key and iv may be given as a string of bytes or a byte buffer. - * The cipher is initialized to use 128 effective key bits. - * - * @param key the symmetric key to use. - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * - * @return the cipher. - */ -forge.rc2.startEncrypting = function(key, iv, output) { - var cipher = forge.rc2.createEncryptionCipher(key, 128); - cipher.start(iv, output); - return cipher; -}; + this.aborted = false -/** - * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the - * given symmetric key. - * - * The key may be given as a string of bytes or a byte buffer. - * - * To start encrypting call start() on the cipher with an iv and optional - * output buffer. - * - * @param key the symmetric key to use. - * - * @return the cipher. - */ -forge.rc2.createEncryptionCipher = function(key, bits) { - return createCipher(key, bits, true); -}; + this.upgrade = upgrade || null -/** - * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the - * given symmetric key. The output will be stored in the 'output' member - * of the returned cipher. - * - * The key and iv may be given as a string of bytes or a byte buffer. - * The cipher is initialized to use 128 effective key bits. - * - * @param key the symmetric key to use. - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * - * @return the cipher. - */ -forge.rc2.startDecrypting = function(key, iv, output) { - var cipher = forge.rc2.createDecryptionCipher(key, 128); - cipher.start(iv, output); - return cipher; -}; + this.path = query ? util.buildURL(path, query) : path -/** - * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the - * given symmetric key. - * - * The key may be given as a string of bytes or a byte buffer. - * - * To start decrypting call start() on the cipher with an iv and optional - * output buffer. - * - * @param key the symmetric key to use. - * - * @return the cipher. - */ -forge.rc2.createDecryptionCipher = function(key, bits) { - return createCipher(key, bits, false); -}; + this.origin = origin + this.idempotent = idempotent == null + ? method === 'HEAD' || method === 'GET' + : idempotent -/***/ }), + this.blocking = blocking == null ? false : blocking -/***/ 3921: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + this.reset = reset == null ? null : reset -/** - * Javascript implementation of basic RSA algorithms. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - * - * The only algorithm currently supported for PKI is RSA. - * - * An RSA key is often stored in ASN.1 DER format. The SubjectPublicKeyInfo - * ASN.1 structure is composed of an algorithm of type AlgorithmIdentifier - * and a subjectPublicKey of type bit string. - * - * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters - * for the algorithm, if any. In the case of RSA, there aren't any. - * - * SubjectPublicKeyInfo ::= SEQUENCE { - * algorithm AlgorithmIdentifier, - * subjectPublicKey BIT STRING - * } - * - * AlgorithmIdentifer ::= SEQUENCE { - * algorithm OBJECT IDENTIFIER, - * parameters ANY DEFINED BY algorithm OPTIONAL - * } - * - * For an RSA public key, the subjectPublicKey is: - * - * RSAPublicKey ::= SEQUENCE { - * modulus INTEGER, -- n - * publicExponent INTEGER -- e - * } - * - * PrivateKeyInfo ::= SEQUENCE { - * version Version, - * privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, - * privateKey PrivateKey, - * attributes [0] IMPLICIT Attributes OPTIONAL - * } - * - * Version ::= INTEGER - * PrivateKeyAlgorithmIdentifier ::= AlgorithmIdentifier - * PrivateKey ::= OCTET STRING - * Attributes ::= SET OF Attribute - * - * An RSA private key as the following structure: - * - * RSAPrivateKey ::= SEQUENCE { - * version Version, - * modulus INTEGER, -- n - * publicExponent INTEGER, -- e - * privateExponent INTEGER, -- d - * prime1 INTEGER, -- p - * prime2 INTEGER, -- q - * exponent1 INTEGER, -- d mod (p-1) - * exponent2 INTEGER, -- d mod (q-1) - * coefficient INTEGER -- (inverse of q) mod p - * } - * - * Version ::= INTEGER - * - * The OID for the RSA key algorithm is: 1.2.840.113549.1.1.1 - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -__nccwpck_require__(7052); -__nccwpck_require__(1925); -__nccwpck_require__(7014); -__nccwpck_require__(6861); -__nccwpck_require__(7821); -__nccwpck_require__(8339); + this.host = null -if(typeof BigInteger === 'undefined') { - var BigInteger = forge.jsbn.BigInteger; -} + this.contentLength = null -var _crypto = forge.util.isNodejs ? __nccwpck_require__(6113) : null; + this.contentType = null -// shortcut for asn.1 API -var asn1 = forge.asn1; + this.headers = '' -// shortcut for util API -var util = forge.util; + // Only for H2 + this.expectContinue = expectContinue != null ? expectContinue : false -/* - * RSA encryption and decryption, see RFC 2313. - */ -forge.pki = forge.pki || {}; -module.exports = forge.pki.rsa = forge.rsa = forge.rsa || {}; -var pki = forge.pki; - -// for finding primes, which are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29 -var GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2]; - -// validator for a PrivateKeyInfo structure -var privateKeyValidator = { - // PrivateKeyInfo - name: 'PrivateKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // Version (INTEGER) - name: 'PrivateKeyInfo.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyVersion' - }, { - // privateKeyAlgorithm - name: 'PrivateKeyInfo.privateKeyAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'privateKeyOid' - }] - }, { - // PrivateKey - name: 'PrivateKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'privateKey' - }] -}; + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(this, headers[i], headers[i + 1]) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(this, key, headers[key]) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') + } -// validator for an RSA private key -var rsaPrivateKeyValidator = { - // RSAPrivateKey - name: 'RSAPrivateKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // Version (INTEGER) - name: 'RSAPrivateKey.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyVersion' - }, { - // modulus (n) - name: 'RSAPrivateKey.modulus', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyModulus' - }, { - // publicExponent (e) - name: 'RSAPrivateKey.publicExponent', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyPublicExponent' - }, { - // privateExponent (d) - name: 'RSAPrivateKey.privateExponent', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyPrivateExponent' - }, { - // prime1 (p) - name: 'RSAPrivateKey.prime1', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyPrime1' - }, { - // prime2 (q) - name: 'RSAPrivateKey.prime2', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyPrime2' - }, { - // exponent1 (d mod (p-1)) - name: 'RSAPrivateKey.exponent1', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyExponent1' - }, { - // exponent2 (d mod (q-1)) - name: 'RSAPrivateKey.exponent2', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyExponent2' - }, { - // coefficient ((inverse of q) mod p) - name: 'RSAPrivateKey.coefficient', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyCoefficient' - }] -}; + if (util.isFormDataLike(this.body)) { + if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) { + throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.') + } -// validator for an RSA public key -var rsaPublicKeyValidator = { - // RSAPublicKey - name: 'RSAPublicKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // modulus (n) - name: 'RSAPublicKey.modulus', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'publicKeyModulus' - }, { - // publicExponent (e) - name: 'RSAPublicKey.exponent', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'publicKeyExponent' - }] -}; + if (!extractBody) { + extractBody = (__nccwpck_require__(1472).extractBody) + } -// validator for an SubjectPublicKeyInfo structure -// Note: Currently only works with an RSA public key -var publicKeyValidator = forge.pki.rsa.publicKeyValidator = { - name: 'SubjectPublicKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'subjectPublicKeyInfo', - value: [{ - name: 'SubjectPublicKeyInfo.AlgorithmIdentifier', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'publicKeyOid' - }] - }, { - // subjectPublicKey - name: 'SubjectPublicKeyInfo.subjectPublicKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - value: [{ - // RSAPublicKey - name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - optional: true, - captureAsn1: 'rsaPublicKey' - }] - }] -}; + const [bodyStream, contentType] = extractBody(body) + if (this.contentType == null) { + this.contentType = contentType + this.headers += `content-type: ${contentType}\r\n` + } + this.body = bodyStream.stream + this.contentLength = bodyStream.length + } else if (util.isBlobLike(body) && this.contentType == null && body.type) { + this.contentType = body.type + this.headers += `content-type: ${body.type}\r\n` + } -// validator for a DigestInfo structure -var digestInfoValidator = { - name: 'DigestInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'DigestInfo.DigestAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'DigestInfo.DigestAlgorithm.algorithmIdentifier', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'algorithmIdentifier' - }, { - // NULL paramters - name: 'DigestInfo.DigestAlgorithm.parameters', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.NULL, - // captured only to check existence for md2 and md5 - capture: 'parameters', - optional: true, - constructed: false - }] - }, { - // digest - name: 'DigestInfo.digest', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'digest' - }] -}; + util.validateHandler(handler, method, upgrade) -/** - * Wrap digest in DigestInfo object. - * - * This function implements EMSA-PKCS1-v1_5-ENCODE as per RFC 3447. - * - * DigestInfo ::= SEQUENCE { - * digestAlgorithm DigestAlgorithmIdentifier, - * digest Digest - * } - * - * DigestAlgorithmIdentifier ::= AlgorithmIdentifier - * Digest ::= OCTET STRING - * - * @param md the message digest object with the hash to sign. - * - * @return the encoded message (ready for RSA encrytion) - */ -var emsaPkcs1v15encode = function(md) { - // get the oid for the algorithm - var oid; - if(md.algorithm in pki.oids) { - oid = pki.oids[md.algorithm]; - } else { - var error = new Error('Unknown message digest algorithm.'); - error.algorithm = md.algorithm; - throw error; - } - var oidBytes = asn1.oidToDer(oid).getBytes(); - - // create the digest info - var digestInfo = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - var digestAlgorithm = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - digestAlgorithm.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OID, false, oidBytes)); - digestAlgorithm.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')); - var digest = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, - false, md.digest().getBytes()); - digestInfo.value.push(digestAlgorithm); - digestInfo.value.push(digest); - - // encode digest info - return asn1.toDer(digestInfo).getBytes(); -}; + this.servername = util.getServerName(this.host) -/** - * Performs x^c mod n (RSA encryption or decryption operation). - * - * @param x the number to raise and mod. - * @param key the key to use. - * @param pub true if the key is public, false if private. - * - * @return the result of x^c mod n. - */ -var _modPow = function(x, key, pub) { - if(pub) { - return x.modPow(key.e, key.n); - } + this[kHandler] = handler - if(!key.p || !key.q) { - // allow calculation without CRT params (slow) - return x.modPow(key.d, key.n); + if (channels.create.hasSubscribers) { + channels.create.publish({ request: this }) + } } - // pre-compute dP, dQ, and qInv if necessary - if(!key.dP) { - key.dP = key.d.mod(key.p.subtract(BigInteger.ONE)); - } - if(!key.dQ) { - key.dQ = key.d.mod(key.q.subtract(BigInteger.ONE)); - } - if(!key.qInv) { - key.qInv = key.q.modInverse(key.p); + onBodySent (chunk) { + if (this[kHandler].onBodySent) { + try { + return this[kHandler].onBodySent(chunk) + } catch (err) { + this.abort(err) + } + } } - /* Chinese remainder theorem (CRT) states: - - Suppose n1, n2, ..., nk are positive integers which are pairwise - coprime (n1 and n2 have no common factors other than 1). For any - integers x1, x2, ..., xk there exists an integer x solving the - system of simultaneous congruences (where ~= means modularly - congruent so a ~= b mod n means a mod n = b mod n): - - x ~= x1 mod n1 - x ~= x2 mod n2 - ... - x ~= xk mod nk + onRequestSent () { + if (channels.bodySent.hasSubscribers) { + channels.bodySent.publish({ request: this }) + } - This system of congruences has a single simultaneous solution x - between 0 and n - 1. Furthermore, each xk solution and x itself - is congruent modulo the product n = n1*n2*...*nk. - So x1 mod n = x2 mod n = xk mod n = x mod n. + if (this[kHandler].onRequestSent) { + try { + return this[kHandler].onRequestSent() + } catch (err) { + this.abort(err) + } + } + } - The single simultaneous solution x can be solved with the following - equation: + onConnect (abort) { + assert(!this.aborted) + assert(!this.completed) - x = sum(xi*ri*si) mod n where ri = n/ni and si = ri^-1 mod ni. + if (this.error) { + abort(this.error) + } else { + this.abort = abort + return this[kHandler].onConnect(abort) + } + } - Where x is less than n, xi = x mod ni. + onHeaders (statusCode, headers, resume, statusText) { + assert(!this.aborted) + assert(!this.completed) - For RSA we are only concerned with k = 2. The modulus n = pq, where - p and q are coprime. The RSA decryption algorithm is: + if (channels.headers.hasSubscribers) { + channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) + } - y = x^d mod n + try { + return this[kHandler].onHeaders(statusCode, headers, resume, statusText) + } catch (err) { + this.abort(err) + } + } - Given the above: + onData (chunk) { + assert(!this.aborted) + assert(!this.completed) - x1 = x^d mod p - r1 = n/p = q - s1 = q^-1 mod p - x2 = x^d mod q - r2 = n/q = p - s2 = p^-1 mod q + try { + return this[kHandler].onData(chunk) + } catch (err) { + this.abort(err) + return false + } + } - So y = (x1r1s1 + x2r2s2) mod n - = ((x^d mod p)q(q^-1 mod p) + (x^d mod q)p(p^-1 mod q)) mod n + onUpgrade (statusCode, headers, socket) { + assert(!this.aborted) + assert(!this.completed) - According to Fermat's Little Theorem, if the modulus P is prime, - for any integer A not evenly divisible by P, A^(P-1) ~= 1 mod P. - Since A is not divisible by P it follows that if: - N ~= M mod (P - 1), then A^N mod P = A^M mod P. Therefore: + return this[kHandler].onUpgrade(statusCode, headers, socket) + } - A^N mod P = A^(M mod (P - 1)) mod P. (The latter takes less effort - to calculate). In order to calculate x^d mod p more quickly the - exponent d mod (p - 1) is stored in the RSA private key (the same - is done for x^d mod q). These values are referred to as dP and dQ - respectively. Therefore we now have: + onComplete (trailers) { + this.onFinally() - y = ((x^dP mod p)q(q^-1 mod p) + (x^dQ mod q)p(p^-1 mod q)) mod n + assert(!this.aborted) - Since we'll be reducing x^dP by modulo p (same for q) we can also - reduce x by p (and q respectively) before hand. Therefore, let + this.completed = true + if (channels.trailers.hasSubscribers) { + channels.trailers.publish({ request: this, trailers }) + } - xp = ((x mod p)^dP mod p), and - xq = ((x mod q)^dQ mod q), yielding: + try { + return this[kHandler].onComplete(trailers) + } catch (err) { + // TODO (fix): This might be a bad idea? + this.onError(err) + } + } - y = (xp*q*(q^-1 mod p) + xq*p*(p^-1 mod q)) mod n + onError (error) { + this.onFinally() - This can be further reduced to a simple algorithm that only - requires 1 inverse (the q inverse is used) to be used and stored. - The algorithm is called Garner's algorithm. If qInv is the - inverse of q, we simply calculate: + if (channels.error.hasSubscribers) { + channels.error.publish({ request: this, error }) + } - y = (qInv*(xp - xq) mod p) * q + xq + if (this.aborted) { + return + } + this.aborted = true - However, there are two further complications. First, we need to - ensure that xp > xq to prevent signed BigIntegers from being used - so we add p until this is true (since we will be mod'ing with - p anyway). Then, there is a known timing attack on algorithms - using the CRT. To mitigate this risk, "cryptographic blinding" - should be used. This requires simply generating a random number r - between 0 and n-1 and its inverse and multiplying x by r^e before - calculating y and then multiplying y by r^-1 afterwards. Note that - r must be coprime with n (gcd(r, n) === 1) in order to have an - inverse. - */ + return this[kHandler].onError(error) + } - // cryptographic blinding - var r; - do { - r = new BigInteger( - forge.util.bytesToHex(forge.random.getBytes(key.n.bitLength() / 8)), - 16); - } while(r.compareTo(key.n) >= 0 || !r.gcd(key.n).equals(BigInteger.ONE)); - x = x.multiply(r.modPow(key.e, key.n)).mod(key.n); + onFinally () { + if (this.errorHandler) { + this.body.off('error', this.errorHandler) + this.errorHandler = null + } - // calculate xp and xq - var xp = x.mod(key.p).modPow(key.dP, key.p); - var xq = x.mod(key.q).modPow(key.dQ, key.q); + if (this.endHandler) { + this.body.off('end', this.endHandler) + this.endHandler = null + } + } - // xp must be larger than xq to avoid signed bit usage - while(xp.compareTo(xq) < 0) { - xp = xp.add(key.p); + // TODO: adjust to support H2 + addHeader (key, value) { + processHeader(this, key, value) + return this } - // do last step - var y = xp.subtract(xq) - .multiply(key.qInv).mod(key.p) - .multiply(key.q).add(xq); + static [kHTTP1BuildRequest] (origin, opts, handler) { + // TODO: Migrate header parsing here, to make Requests + // HTTP agnostic + return new Request(origin, opts, handler) + } - // remove effect of random for cryptographic blinding - y = y.multiply(r.modInverse(key.n)).mod(key.n); + static [kHTTP2BuildRequest] (origin, opts, handler) { + const headers = opts.headers + opts = { ...opts, headers: null } - return y; -}; + const request = new Request(origin, opts, handler) -/** - * NOTE: THIS METHOD IS DEPRECATED, use 'sign' on a private key object or - * 'encrypt' on a public key object instead. - * - * Performs RSA encryption. - * - * The parameter bt controls whether to put padding bytes before the - * message passed in. Set bt to either true or false to disable padding - * completely (in order to handle e.g. EMSA-PSS encoding seperately before), - * signaling whether the encryption operation is a public key operation - * (i.e. encrypting data) or not, i.e. private key operation (data signing). - * - * For PKCS#1 v1.5 padding pass in the block type to use, i.e. either 0x01 - * (for signing) or 0x02 (for encryption). The key operation mode (private - * or public) is derived from this flag in that case). - * - * @param m the message to encrypt as a byte string. - * @param key the RSA key to use. - * @param bt for PKCS#1 v1.5 padding, the block type to use - * (0x01 for private key, 0x02 for public), - * to disable padding: true = public key, false = private key. - * - * @return the encrypted bytes as a string. - */ -pki.rsa.encrypt = function(m, key, bt) { - var pub = bt; - var eb; + request.headers = {} - // get the length of the modulus in bytes - var k = Math.ceil(key.n.bitLength() / 8); + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(request, headers[i], headers[i + 1], true) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(request, key, headers[key], true) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') + } - if(bt !== false && bt !== true) { - // legacy, default to PKCS#1 v1.5 padding - pub = (bt === 0x02); - eb = _encodePkcs1_v1_5(m, key, bt); - } else { - eb = forge.util.createBuffer(); - eb.putBytes(m); + return request } - // load encryption block as big integer 'x' - // FIXME: hex conversion inefficient, get BigInteger w/byte strings - var x = new BigInteger(eb.toHex(), 16); + static [kHTTP2CopyHeaders] (raw) { + const rawHeaders = raw.split('\r\n') + const headers = {} - // do RSA encryption - var y = _modPow(x, key, pub); + for (const header of rawHeaders) { + const [key, value] = header.split(': ') - // convert y into the encrypted data byte string, if y is shorter in - // bytes than k, then prepend zero bytes to fill up ed - // FIXME: hex conversion inefficient, get BigInteger w/byte strings - var yhex = y.toString(16); - var ed = forge.util.createBuffer(); - var zeros = k - Math.ceil(yhex.length / 2); - while(zeros > 0) { - ed.putByte(0x00); - --zeros; - } - ed.putBytes(forge.util.hexToBytes(yhex)); - return ed.getBytes(); -}; + if (value == null || value.length === 0) continue -/** - * NOTE: THIS METHOD IS DEPRECATED, use 'decrypt' on a private key object or - * 'verify' on a public key object instead. - * - * Performs RSA decryption. - * - * The parameter ml controls whether to apply PKCS#1 v1.5 padding - * or not. Set ml = false to disable padding removal completely - * (in order to handle e.g. EMSA-PSS later on) and simply pass back - * the RSA encryption block. - * - * @param ed the encrypted data to decrypt in as a byte string. - * @param key the RSA key to use. - * @param pub true for a public key operation, false for private. - * @param ml the message length, if known, false to disable padding. - * - * @return the decrypted message as a byte string. - */ -pki.rsa.decrypt = function(ed, key, pub, ml) { - // get the length of the modulus in bytes - var k = Math.ceil(key.n.bitLength() / 8); + if (headers[key]) headers[key] += `,${value}` + else headers[key] = value + } - // error if the length of the encrypted data ED is not k - if(ed.length !== k) { - var error = new Error('Encrypted message length is invalid.'); - error.length = ed.length; - error.expected = k; - throw error; + return headers } +} - // convert encrypted data into a big integer - // FIXME: hex conversion inefficient, get BigInteger w/byte strings - var y = new BigInteger(forge.util.createBuffer(ed).toHex(), 16); - - // y must be less than the modulus or it wasn't the result of - // a previous mod operation (encryption) using that modulus - if(y.compareTo(key.n) >= 0) { - throw new Error('Encrypted message is invalid.'); +function processHeaderValue (key, val, skipAppend) { + if (val && typeof val === 'object') { + throw new InvalidArgumentError(`invalid ${key} header`) } - // do RSA decryption - var x = _modPow(y, key, pub); + val = val != null ? `${val}` : '' - // create the encryption block, if x is shorter in bytes than k, then - // prepend zero bytes to fill up eb - // FIXME: hex conversion inefficient, get BigInteger w/byte strings - var xhex = x.toString(16); - var eb = forge.util.createBuffer(); - var zeros = k - Math.ceil(xhex.length / 2); - while(zeros > 0) { - eb.putByte(0x00); - --zeros; + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) } - eb.putBytes(forge.util.hexToBytes(xhex)); - if(ml !== false) { - // legacy, default to PKCS#1 v1.5 padding - return _decodePkcs1_v1_5(eb.getBytes(), key, pub); - } - - // return message - return eb.getBytes(); -}; - -/** - * Creates an RSA key-pair generation state object. It is used to allow - * key-generation to be performed in steps. It also allows for a UI to - * display progress updates. - * - * @param bits the size for the private key in bits, defaults to 2048. - * @param e the public exponent to use, defaults to 65537 (0x10001). - * @param [options] the options to use. - * prng a custom crypto-secure pseudo-random number generator to use, - * that must define "getBytesSync". - * algorithm the algorithm to use (default: 'PRIMEINC'). - * - * @return the state object to use to generate the key-pair. - */ -pki.rsa.createKeyPairGenerationState = function(bits, e, options) { - // TODO: migrate step-based prime generation code to forge.prime + return skipAppend ? val : `${key}: ${val}\r\n` +} - // set default bits - if(typeof(bits) === 'string') { - bits = parseInt(bits, 10); +function processHeader (request, key, val, skipAppend = false) { + if (val && (typeof val === 'object' && !Array.isArray(val))) { + throw new InvalidArgumentError(`invalid ${key} header`) + } else if (val === undefined) { + return } - bits = bits || 2048; - - // create prng with api that matches BigInteger secure random - options = options || {}; - var prng = options.prng || forge.random; - var rng = { - // x is an array to fill with bytes - nextBytes: function(x) { - var b = prng.getBytesSync(x.length); - for(var i = 0; i < x.length; ++i) { - x[i] = b.charCodeAt(i); - } - } - }; - var algorithm = options.algorithm || 'PRIMEINC'; - - // create PRIMEINC algorithm state - var rval; - if(algorithm === 'PRIMEINC') { - rval = { - algorithm: algorithm, - state: 0, - bits: bits, - rng: rng, - eInt: e || 65537, - e: new BigInteger(null), - p: null, - q: null, - qBits: bits >> 1, - pBits: bits - (bits >> 1), - pqState: 0, - num: null, - keys: null - }; - rval.e.fromInt(rval.eInt); + if ( + request.host === null && + key.length === 4 && + key.toLowerCase() === 'host' + ) { + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) + } + // Consumed by Client + request.host = val + } else if ( + request.contentLength === null && + key.length === 14 && + key.toLowerCase() === 'content-length' + ) { + request.contentLength = parseInt(val, 10) + if (!Number.isFinite(request.contentLength)) { + throw new InvalidArgumentError('invalid content-length header') + } + } else if ( + request.contentType === null && + key.length === 12 && + key.toLowerCase() === 'content-type' + ) { + request.contentType = val + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) + } else if ( + key.length === 17 && + key.toLowerCase() === 'transfer-encoding' + ) { + throw new InvalidArgumentError('invalid transfer-encoding header') + } else if ( + key.length === 10 && + key.toLowerCase() === 'connection' + ) { + const value = typeof val === 'string' ? val.toLowerCase() : null + if (value !== 'close' && value !== 'keep-alive') { + throw new InvalidArgumentError('invalid connection header') + } else if (value === 'close') { + request.reset = true + } + } else if ( + key.length === 10 && + key.toLowerCase() === 'keep-alive' + ) { + throw new InvalidArgumentError('invalid keep-alive header') + } else if ( + key.length === 7 && + key.toLowerCase() === 'upgrade' + ) { + throw new InvalidArgumentError('invalid upgrade header') + } else if ( + key.length === 6 && + key.toLowerCase() === 'expect' + ) { + throw new NotSupportedError('expect header not supported') + } else if (tokenRegExp.exec(key) === null) { + throw new InvalidArgumentError('invalid header key') } else { - throw new Error('Invalid key generation algorithm: ' + algorithm); - } - - return rval; -}; - -/** - * Attempts to runs the key-generation algorithm for at most n seconds - * (approximately) using the given state. When key-generation has completed, - * the keys will be stored in state.keys. - * - * To use this function to update a UI while generating a key or to prevent - * causing browser lockups/warnings, set "n" to a value other than 0. A - * simple pattern for generating a key and showing a progress indicator is: - * - * var state = pki.rsa.createKeyPairGenerationState(2048); - * var step = function() { - * // step key-generation, run algorithm for 100 ms, repeat - * if(!forge.pki.rsa.stepKeyPairGenerationState(state, 100)) { - * setTimeout(step, 1); - * } else { - * // key-generation complete - * // TODO: turn off progress indicator here - * // TODO: use the generated key-pair in "state.keys" - * } - * }; - * // TODO: turn on progress indicator here - * setTimeout(step, 0); - * - * @param state the state to use. - * @param n the maximum number of milliseconds to run the algorithm for, 0 - * to run the algorithm to completion. - * - * @return true if the key-generation completed, false if not. - */ -pki.rsa.stepKeyPairGenerationState = function(state, n) { - // set default algorithm if not set - if(!('algorithm' in state)) { - state.algorithm = 'PRIMEINC'; - } - - // TODO: migrate step-based prime generation code to forge.prime - // TODO: abstract as PRIMEINC algorithm - - // do key generation (based on Tom Wu's rsa.js, see jsbn.js license) - // with some minor optimizations and designed to run in steps - - // local state vars - var THIRTY = new BigInteger(null); - THIRTY.fromInt(30); - var deltaIdx = 0; - var op_or = function(x, y) {return x | y;}; - - // keep stepping until time limit is reached or done - var t1 = +new Date(); - var t2; - var total = 0; - while(state.keys === null && (n <= 0 || total < n)) { - // generate p or q - if(state.state === 0) { - /* Note: All primes are of the form: - - 30k+i, for i < 30 and gcd(30, i)=1, where there are 8 values for i - - When we generate a random number, we always align it at 30k + 1. Each - time the number is determined not to be prime we add to get to the - next 'i', eg: if the number was at 30k + 1 we add 6. */ - var bits = (state.p === null) ? state.pBits : state.qBits; - var bits1 = bits - 1; - - // get a random number - if(state.pqState === 0) { - state.num = new BigInteger(bits, state.rng); - // force MSB set - if(!state.num.testBit(bits1)) { - state.num.bitwiseTo( - BigInteger.ONE.shiftLeft(bits1), op_or, state.num); - } - // align number on 30k+1 boundary - state.num.dAddOffset(31 - state.num.mod(THIRTY).byteValue(), 0); - deltaIdx = 0; - - ++state.pqState; - } else if(state.pqState === 1) { - // try to make the number a prime - if(state.num.bitLength() > bits) { - // overflow, try again - state.pqState = 0; - // do primality test - } else if(state.num.isProbablePrime( - _getMillerRabinTests(state.num.bitLength()))) { - ++state.pqState; - } else { - // get next potential prime - state.num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0); - } - } else if(state.pqState === 2) { - // ensure number is coprime with e - state.pqState = - (state.num.subtract(BigInteger.ONE).gcd(state.e) - .compareTo(BigInteger.ONE) === 0) ? 3 : 0; - } else if(state.pqState === 3) { - // store p or q - state.pqState = 0; - if(state.p === null) { - state.p = state.num; + if (Array.isArray(val)) { + for (let i = 0; i < val.length; i++) { + if (skipAppend) { + if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}` + else request.headers[key] = processHeaderValue(key, val[i], skipAppend) } else { - state.q = state.num; - } - - // advance state if both p and q are ready - if(state.p !== null && state.q !== null) { - ++state.state; + request.headers += processHeaderValue(key, val[i]) } - state.num = null; } - } else if(state.state === 1) { - // ensure p is larger than q (swap them if not) - if(state.p.compareTo(state.q) < 0) { - state.num = state.p; - state.p = state.q; - state.q = state.num; - } - ++state.state; - } else if(state.state === 2) { - // compute phi: (p - 1)(q - 1) (Euler's totient function) - state.p1 = state.p.subtract(BigInteger.ONE); - state.q1 = state.q.subtract(BigInteger.ONE); - state.phi = state.p1.multiply(state.q1); - ++state.state; - } else if(state.state === 3) { - // ensure e and phi are coprime - if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) === 0) { - // phi and e are coprime, advance - ++state.state; - } else { - // phi and e aren't coprime, so generate a new p and q - state.p = null; - state.q = null; - state.state = 0; - } - } else if(state.state === 4) { - // create n, ensure n is has the right number of bits - state.n = state.p.multiply(state.q); - - // ensure n is right number of bits - if(state.n.bitLength() === state.bits) { - // success, advance - ++state.state; - } else { - // failed, get new q - state.q = null; - state.state = 0; - } - } else if(state.state === 5) { - // set keys - var d = state.e.modInverse(state.phi); - state.keys = { - privateKey: pki.rsa.setPrivateKey( - state.n, state.e, d, state.p, state.q, - d.mod(state.p1), d.mod(state.q1), - state.q.modInverse(state.p)), - publicKey: pki.rsa.setPublicKey(state.n, state.e) - }; + } else { + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) } - - // update timing - t2 = +new Date(); - total += t2 - t1; - t1 = t2; } +} - return state.keys !== null; -}; +module.exports = Request -/** - * Generates an RSA public-private key pair in a single call. - * - * To generate a key-pair in steps (to allow for progress updates and to - * prevent blocking or warnings in slow browsers) then use the key-pair - * generation state functions. - * - * To generate a key-pair asynchronously (either through web-workers, if - * available, or by breaking up the work on the main thread), pass a - * callback function. - * - * @param [bits] the size for the private key in bits, defaults to 2048. - * @param [e] the public exponent to use, defaults to 65537. - * @param [options] options for key-pair generation, if given then 'bits' - * and 'e' must *not* be given: - * bits the size for the private key in bits, (default: 2048). - * e the public exponent to use, (default: 65537 (0x10001)). - * workerScript the worker script URL. - * workers the number of web workers (if supported) to use, - * (default: 2). - * workLoad the size of the work load, ie: number of possible prime - * numbers for each web worker to check per work assignment, - * (default: 100). - * prng a custom crypto-secure pseudo-random number generator to use, - * that must define "getBytesSync". Disables use of native APIs. - * algorithm the algorithm to use (default: 'PRIMEINC'). - * @param [callback(err, keypair)] called once the operation completes. - * - * @return an object with privateKey and publicKey properties. - */ -pki.rsa.generateKeyPair = function(bits, e, options, callback) { - // (bits), (options), (callback) - if(arguments.length === 1) { - if(typeof bits === 'object') { - options = bits; - bits = undefined; - } else if(typeof bits === 'function') { - callback = bits; - bits = undefined; - } - } else if(arguments.length === 2) { - // (bits, e), (bits, options), (bits, callback), (options, callback) - if(typeof bits === 'number') { - if(typeof e === 'function') { - callback = e; - e = undefined; - } else if(typeof e !== 'number') { - options = e; - e = undefined; - } - } else { - options = bits; - callback = e; - bits = undefined; - e = undefined; - } - } else if(arguments.length === 3) { - // (bits, e, options), (bits, e, callback), (bits, options, callback) - if(typeof e === 'number') { - if(typeof options === 'function') { - callback = options; - options = undefined; - } - } else { - callback = options; - options = e; - e = undefined; - } - } - options = options || {}; - if(bits === undefined) { - bits = options.bits || 2048; - } - if(e === undefined) { - e = options.e || 0x10001; - } - - // use native code if permitted, available, and parameters are acceptable - if(!forge.options.usePureJavaScript && !options.prng && - bits >= 256 && bits <= 16384 && (e === 0x10001 || e === 3)) { - if(callback) { - // try native async - if(_detectNodeCrypto('generateKeyPair')) { - return _crypto.generateKeyPair('rsa', { - modulusLength: bits, - publicExponent: e, - publicKeyEncoding: { - type: 'spki', - format: 'pem' - }, - privateKeyEncoding: { - type: 'pkcs8', - format: 'pem' - } - }, function(err, pub, priv) { - if(err) { - return callback(err); - } - callback(null, { - privateKey: pki.privateKeyFromPem(priv), - publicKey: pki.publicKeyFromPem(pub) - }); - }); - } - if(_detectSubtleCrypto('generateKey') && - _detectSubtleCrypto('exportKey')) { - // use standard native generateKey - return util.globalScope.crypto.subtle.generateKey({ - name: 'RSASSA-PKCS1-v1_5', - modulusLength: bits, - publicExponent: _intToUint8Array(e), - hash: {name: 'SHA-256'} - }, true /* key can be exported*/, ['sign', 'verify']) - .then(function(pair) { - return util.globalScope.crypto.subtle.exportKey( - 'pkcs8', pair.privateKey); - // avoiding catch(function(err) {...}) to support IE <= 8 - }).then(undefined, function(err) { - callback(err); - }).then(function(pkcs8) { - if(pkcs8) { - var privateKey = pki.privateKeyFromAsn1( - asn1.fromDer(forge.util.createBuffer(pkcs8))); - callback(null, { - privateKey: privateKey, - publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e) - }); - } - }); - } - if(_detectSubtleMsCrypto('generateKey') && - _detectSubtleMsCrypto('exportKey')) { - var genOp = util.globalScope.msCrypto.subtle.generateKey({ - name: 'RSASSA-PKCS1-v1_5', - modulusLength: bits, - publicExponent: _intToUint8Array(e), - hash: {name: 'SHA-256'} - }, true /* key can be exported*/, ['sign', 'verify']); - genOp.oncomplete = function(e) { - var pair = e.target.result; - var exportOp = util.globalScope.msCrypto.subtle.exportKey( - 'pkcs8', pair.privateKey); - exportOp.oncomplete = function(e) { - var pkcs8 = e.target.result; - var privateKey = pki.privateKeyFromAsn1( - asn1.fromDer(forge.util.createBuffer(pkcs8))); - callback(null, { - privateKey: privateKey, - publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e) - }); - }; - exportOp.onerror = function(err) { - callback(err); - }; - }; - genOp.onerror = function(err) { - callback(err); - }; - return; - } - } else { - // try native sync - if(_detectNodeCrypto('generateKeyPairSync')) { - var keypair = _crypto.generateKeyPairSync('rsa', { - modulusLength: bits, - publicExponent: e, - publicKeyEncoding: { - type: 'spki', - format: 'pem' - }, - privateKeyEncoding: { - type: 'pkcs8', - format: 'pem' - } - }); - return { - privateKey: pki.privateKeyFromPem(keypair.privateKey), - publicKey: pki.publicKeyFromPem(keypair.publicKey) - }; - } - } - } - // use JavaScript implementation - var state = pki.rsa.createKeyPairGenerationState(bits, e, options); - if(!callback) { - pki.rsa.stepKeyPairGenerationState(state, 0); - return state.keys; - } - _generateKeyPair(state, options, callback); -}; +/***/ }), + +/***/ 2785: +/***/ ((module) => { + +module.exports = { + kClose: Symbol('close'), + kDestroy: Symbol('destroy'), + kDispatch: Symbol('dispatch'), + kUrl: Symbol('url'), + kWriting: Symbol('writing'), + kResuming: Symbol('resuming'), + kQueue: Symbol('queue'), + kConnect: Symbol('connect'), + kConnecting: Symbol('connecting'), + kHeadersList: Symbol('headers list'), + kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'), + kKeepAliveMaxTimeout: Symbol('max keep alive timeout'), + kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'), + kKeepAliveTimeoutValue: Symbol('keep alive timeout'), + kKeepAlive: Symbol('keep alive'), + kHeadersTimeout: Symbol('headers timeout'), + kBodyTimeout: Symbol('body timeout'), + kServerName: Symbol('server name'), + kLocalAddress: Symbol('local address'), + kHost: Symbol('host'), + kNoRef: Symbol('no ref'), + kBodyUsed: Symbol('used'), + kRunning: Symbol('running'), + kBlocking: Symbol('blocking'), + kPending: Symbol('pending'), + kSize: Symbol('size'), + kBusy: Symbol('busy'), + kQueued: Symbol('queued'), + kFree: Symbol('free'), + kConnected: Symbol('connected'), + kClosed: Symbol('closed'), + kNeedDrain: Symbol('need drain'), + kReset: Symbol('reset'), + kDestroyed: Symbol.for('nodejs.stream.destroyed'), + kMaxHeadersSize: Symbol('max headers size'), + kRunningIdx: Symbol('running index'), + kPendingIdx: Symbol('pending index'), + kError: Symbol('error'), + kClients: Symbol('clients'), + kClient: Symbol('client'), + kParser: Symbol('parser'), + kOnDestroyed: Symbol('destroy callbacks'), + kPipelining: Symbol('pipelining'), + kSocket: Symbol('socket'), + kHostHeader: Symbol('host header'), + kConnector: Symbol('connector'), + kStrictContentLength: Symbol('strict content length'), + kMaxRedirections: Symbol('maxRedirections'), + kMaxRequests: Symbol('maxRequestsPerClient'), + kProxy: Symbol('proxy agent options'), + kCounter: Symbol('socket request counter'), + kInterceptors: Symbol('dispatch interceptors'), + kMaxResponseSize: Symbol('max response size'), + kHTTP2Session: Symbol('http2Session'), + kHTTP2SessionState: Symbol('http2Session state'), + kHTTP2BuildRequest: Symbol('http2 build request'), + kHTTP1BuildRequest: Symbol('http1 build request'), + kHTTP2CopyHeaders: Symbol('http2 copy headers'), + kHTTPConnVersion: Symbol('http connection version'), + kRetryHandlerDefaultRetry: Symbol('retry agent default retry'), + kConstruct: Symbol('constructable') +} + -/** - * Sets an RSA public key from BigIntegers modulus and exponent. - * - * @param n the modulus. - * @param e the exponent. - * - * @return the public key. - */ -pki.setRsaPublicKey = pki.rsa.setPublicKey = function(n, e) { - var key = { - n: n, - e: e - }; +/***/ }), - /** - * Encrypts the given data with this public key. Newer applications - * should use the 'RSA-OAEP' decryption scheme, 'RSAES-PKCS1-V1_5' is for - * legacy applications. - * - * @param data the byte string to encrypt. - * @param scheme the encryption scheme to use: - * 'RSAES-PKCS1-V1_5' (default), - * 'RSA-OAEP', - * 'RAW', 'NONE', or null to perform raw RSA encryption, - * an object with an 'encode' property set to a function - * with the signature 'function(data, key)' that returns - * a binary-encoded string representing the encoded data. - * @param schemeOptions any scheme-specific options. - * - * @return the encrypted byte string. - */ - key.encrypt = function(data, scheme, schemeOptions) { - if(typeof scheme === 'string') { - scheme = scheme.toUpperCase(); - } else if(scheme === undefined) { - scheme = 'RSAES-PKCS1-V1_5'; - } +/***/ 3983: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if(scheme === 'RSAES-PKCS1-V1_5') { - scheme = { - encode: function(m, key, pub) { - return _encodePkcs1_v1_5(m, key, 0x02).getBytes(); - } - }; - } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') { - scheme = { - encode: function(m, key) { - return forge.pkcs1.encode_rsa_oaep(key, m, schemeOptions); - } - }; - } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) { - scheme = {encode: function(e) {return e;}}; - } else if(typeof scheme === 'string') { - throw new Error('Unsupported encryption scheme: "' + scheme + '".'); - } +"use strict"; - // do scheme-based encoding then rsa encryption - var e = scheme.encode(data, key, true); - return pki.rsa.encrypt(e, key, true); - }; - /** - * Verifies the given signature against the given digest. - * - * PKCS#1 supports multiple (currently two) signature schemes: - * RSASSA-PKCS1-V1_5 and RSASSA-PSS. - * - * By default this implementation uses the "old scheme", i.e. - * RSASSA-PKCS1-V1_5, in which case once RSA-decrypted, the - * signature is an OCTET STRING that holds a DigestInfo. - * - * DigestInfo ::= SEQUENCE { - * digestAlgorithm DigestAlgorithmIdentifier, - * digest Digest - * } - * DigestAlgorithmIdentifier ::= AlgorithmIdentifier - * Digest ::= OCTET STRING - * - * To perform PSS signature verification, provide an instance - * of Forge PSS object as the scheme parameter. - * - * @param digest the message digest hash to compare against the signature, - * as a binary-encoded string. - * @param signature the signature to verify, as a binary-encoded string. - * @param scheme signature verification scheme to use: - * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5, - * a Forge PSS object for RSASSA-PSS, - * 'NONE' or null for none, DigestInfo will not be expected, but - * PKCS#1 v1.5 padding will still be used. - * @param options optional verify options - * _parseAllDigestBytes testing flag to control parsing of all - * digest bytes. Unsupported and not for general usage. - * (default: true) - * - * @return true if the signature was verified, false if not. - */ - key.verify = function(digest, signature, scheme, options) { - if(typeof scheme === 'string') { - scheme = scheme.toUpperCase(); - } else if(scheme === undefined) { - scheme = 'RSASSA-PKCS1-V1_5'; - } - if(options === undefined) { - options = { - _parseAllDigestBytes: true - }; - } - if(!('_parseAllDigestBytes' in options)) { - options._parseAllDigestBytes = true; - } +const assert = __nccwpck_require__(9491) +const { kDestroyed, kBodyUsed } = __nccwpck_require__(2785) +const { IncomingMessage } = __nccwpck_require__(3685) +const stream = __nccwpck_require__(2781) +const net = __nccwpck_require__(1808) +const { InvalidArgumentError } = __nccwpck_require__(8045) +const { Blob } = __nccwpck_require__(4300) +const nodeUtil = __nccwpck_require__(3837) +const { stringify } = __nccwpck_require__(3477) - if(scheme === 'RSASSA-PKCS1-V1_5') { - scheme = { - verify: function(digest, d) { - // remove padding - d = _decodePkcs1_v1_5(d, key, true); - // d is ASN.1 BER-encoded DigestInfo - var obj = asn1.fromDer(d, { - parseAllBytes: options._parseAllDigestBytes - }); +const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) - // validate DigestInfo - var capture = {}; - var errors = []; - if(!asn1.validate(obj, digestInfoValidator, capture, errors)) { - var error = new Error( - 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' + - 'DigestInfo value.'); - error.errors = errors; - throw error; - } - // check hash algorithm identifier - // see PKCS1-v1-5DigestAlgorithms in RFC 8017 - // FIXME: add support to vaidator for strict value choices - var oid = asn1.derToOid(capture.algorithmIdentifier); - if(!(oid === forge.oids.md2 || - oid === forge.oids.md5 || - oid === forge.oids.sha1 || - oid === forge.oids.sha224 || - oid === forge.oids.sha256 || - oid === forge.oids.sha384 || - oid === forge.oids.sha512 || - oid === forge.oids['sha512-224'] || - oid === forge.oids['sha512-256'])) { - var error = new Error( - 'Unknown RSASSA-PKCS1-v1_5 DigestAlgorithm identifier.'); - error.oid = oid; - throw error; - } +function nop () {} - // special check for md2 and md5 that NULL parameters exist - if(oid === forge.oids.md2 || oid === forge.oids.md5) { - if(!('parameters' in capture)) { - throw new Error( - 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' + - 'DigestInfo value. ' + - 'Missing algorithm identifer NULL parameters.'); - } - } +function isStream (obj) { + return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function' +} - // compare the given digest to the decrypted one - return digest === capture.digest; - } - }; - } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) { - scheme = { - verify: function(digest, d) { - // remove padding - d = _decodePkcs1_v1_5(d, key, true); - return digest === d; - } - }; - } +// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License) +function isBlobLike (object) { + return (Blob && object instanceof Blob) || ( + object && + typeof object === 'object' && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + /^(Blob|File)$/.test(object[Symbol.toStringTag]) + ) +} - // do rsa decryption w/o any decoding, then verify -- which does decoding - var d = pki.rsa.decrypt(signature, key, true, false); - return scheme.verify(digest, d, key.n.bitLength()); - }; +function buildURL (url, queryParams) { + if (url.includes('?') || url.includes('#')) { + throw new Error('Query params cannot be passed when url already contains "?" or "#".') + } - return key; -}; + const stringified = stringify(queryParams) -/** - * Sets an RSA private key from BigIntegers modulus, exponent, primes, - * prime exponents, and modular multiplicative inverse. - * - * @param n the modulus. - * @param e the public exponent. - * @param d the private exponent ((inverse of e) mod n). - * @param p the first prime. - * @param q the second prime. - * @param dP exponent1 (d mod (p-1)). - * @param dQ exponent2 (d mod (q-1)). - * @param qInv ((inverse of q) mod p) - * - * @return the private key. - */ -pki.setRsaPrivateKey = pki.rsa.setPrivateKey = function( - n, e, d, p, q, dP, dQ, qInv) { - var key = { - n: n, - e: e, - d: d, - p: p, - q: q, - dP: dP, - dQ: dQ, - qInv: qInv - }; + if (stringified) { + url += '?' + stringified + } - /** - * Decrypts the given data with this private key. The decryption scheme - * must match the one used to encrypt the data. - * - * @param data the byte string to decrypt. - * @param scheme the decryption scheme to use: - * 'RSAES-PKCS1-V1_5' (default), - * 'RSA-OAEP', - * 'RAW', 'NONE', or null to perform raw RSA decryption. - * @param schemeOptions any scheme-specific options. - * - * @return the decrypted byte string. - */ - key.decrypt = function(data, scheme, schemeOptions) { - if(typeof scheme === 'string') { - scheme = scheme.toUpperCase(); - } else if(scheme === undefined) { - scheme = 'RSAES-PKCS1-V1_5'; - } + return url +} - // do rsa decryption w/o any decoding - var d = pki.rsa.decrypt(data, key, false, false); +function parseURL (url) { + if (typeof url === 'string') { + url = new URL(url) - if(scheme === 'RSAES-PKCS1-V1_5') { - scheme = {decode: _decodePkcs1_v1_5}; - } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') { - scheme = { - decode: function(d, key) { - return forge.pkcs1.decode_rsa_oaep(key, d, schemeOptions); - } - }; - } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) { - scheme = {decode: function(d) {return d;}}; - } else { - throw new Error('Unsupported encryption scheme: "' + scheme + '".'); + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') } - // decode according to scheme - return scheme.decode(d, key, false); - }; + return url + } - /** - * Signs the given digest, producing a signature. - * - * PKCS#1 supports multiple (currently two) signature schemes: - * RSASSA-PKCS1-V1_5 and RSASSA-PSS. - * - * By default this implementation uses the "old scheme", i.e. - * RSASSA-PKCS1-V1_5. In order to generate a PSS signature, provide - * an instance of Forge PSS object as the scheme parameter. - * - * @param md the message digest object with the hash to sign. - * @param scheme the signature scheme to use: - * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5, - * a Forge PSS object for RSASSA-PSS, - * 'NONE' or null for none, DigestInfo will not be used but - * PKCS#1 v1.5 padding will still be used. - * - * @return the signature as a byte string. - */ - key.sign = function(md, scheme) { - /* Note: The internal implementation of RSA operations is being - transitioned away from a PKCS#1 v1.5 hard-coded scheme. Some legacy - code like the use of an encoding block identifier 'bt' will eventually - be removed. */ + if (!url || typeof url !== 'object') { + throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.') + } - // private key operation - var bt = false; + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') + } - if(typeof scheme === 'string') { - scheme = scheme.toUpperCase(); + if (!(url instanceof URL)) { + if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) { + throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.') } - if(scheme === undefined || scheme === 'RSASSA-PKCS1-V1_5') { - scheme = {encode: emsaPkcs1v15encode}; - bt = 0x01; - } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) { - scheme = {encode: function() {return md;}}; - bt = 0x01; + if (url.path != null && typeof url.path !== 'string') { + throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.') } - // encode and then encrypt - var d = scheme.encode(md, key.n.bitLength()); - return pki.rsa.encrypt(d, key, bt); - }; + if (url.pathname != null && typeof url.pathname !== 'string') { + throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.') + } - return key; -}; + if (url.hostname != null && typeof url.hostname !== 'string') { + throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.') + } -/** - * Wraps an RSAPrivateKey ASN.1 object in an ASN.1 PrivateKeyInfo object. - * - * @param rsaKey the ASN.1 RSAPrivateKey. - * - * @return the ASN.1 PrivateKeyInfo. - */ -pki.wrapRsaPrivateKey = function(rsaKey) { - // PrivateKeyInfo - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version (0) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(0).getBytes()), - // privateKeyAlgorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.rsaEncryption).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]), - // PrivateKey - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - asn1.toDer(rsaKey).getBytes()) - ]); -}; + if (url.origin != null && typeof url.origin !== 'string') { + throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.') + } -/** - * Converts a private key from an ASN.1 object. - * - * @param obj the ASN.1 representation of a PrivateKeyInfo containing an - * RSAPrivateKey or an RSAPrivateKey. - * - * @return the private key. - */ -pki.privateKeyFromAsn1 = function(obj) { - // get PrivateKeyInfo - var capture = {}; - var errors = []; - if(asn1.validate(obj, privateKeyValidator, capture, errors)) { - obj = asn1.fromDer(forge.util.createBuffer(capture.privateKey)); - } - - // get RSAPrivateKey - capture = {}; - errors = []; - if(!asn1.validate(obj, rsaPrivateKeyValidator, capture, errors)) { - var error = new Error('Cannot read private key. ' + - 'ASN.1 object does not contain an RSAPrivateKey.'); - error.errors = errors; - throw error; - } - - // Note: Version is currently ignored. - // capture.privateKeyVersion - // FIXME: inefficient, get a BigInteger that uses byte strings - var n, e, d, p, q, dP, dQ, qInv; - n = forge.util.createBuffer(capture.privateKeyModulus).toHex(); - e = forge.util.createBuffer(capture.privateKeyPublicExponent).toHex(); - d = forge.util.createBuffer(capture.privateKeyPrivateExponent).toHex(); - p = forge.util.createBuffer(capture.privateKeyPrime1).toHex(); - q = forge.util.createBuffer(capture.privateKeyPrime2).toHex(); - dP = forge.util.createBuffer(capture.privateKeyExponent1).toHex(); - dQ = forge.util.createBuffer(capture.privateKeyExponent2).toHex(); - qInv = forge.util.createBuffer(capture.privateKeyCoefficient).toHex(); - - // set private key - return pki.setRsaPrivateKey( - new BigInteger(n, 16), - new BigInteger(e, 16), - new BigInteger(d, 16), - new BigInteger(p, 16), - new BigInteger(q, 16), - new BigInteger(dP, 16), - new BigInteger(dQ, 16), - new BigInteger(qInv, 16)); -}; + const port = url.port != null + ? url.port + : (url.protocol === 'https:' ? 443 : 80) + let origin = url.origin != null + ? url.origin + : `${url.protocol}//${url.hostname}:${port}` + let path = url.path != null + ? url.path + : `${url.pathname || ''}${url.search || ''}` -/** - * Converts a private key to an ASN.1 RSAPrivateKey. - * - * @param key the private key. - * - * @return the ASN.1 representation of an RSAPrivateKey. - */ -pki.privateKeyToAsn1 = pki.privateKeyToRSAPrivateKey = function(key) { - // RSAPrivateKey - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version (0 = only 2 primes, 1 multiple primes) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(0).getBytes()), - // modulus (n) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.n)), - // publicExponent (e) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.e)), - // privateExponent (d) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.d)), - // privateKeyPrime1 (p) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.p)), - // privateKeyPrime2 (q) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.q)), - // privateKeyExponent1 (dP) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.dP)), - // privateKeyExponent2 (dQ) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.dQ)), - // coefficient (qInv) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.qInv)) - ]); -}; + if (origin.endsWith('/')) { + origin = origin.substring(0, origin.length - 1) + } -/** - * Converts a public key from an ASN.1 SubjectPublicKeyInfo or RSAPublicKey. - * - * @param obj the asn1 representation of a SubjectPublicKeyInfo or RSAPublicKey. - * - * @return the public key. - */ -pki.publicKeyFromAsn1 = function(obj) { - // get SubjectPublicKeyInfo - var capture = {}; - var errors = []; - if(asn1.validate(obj, publicKeyValidator, capture, errors)) { - // get oid - var oid = asn1.derToOid(capture.publicKeyOid); - if(oid !== pki.oids.rsaEncryption) { - var error = new Error('Cannot read public key. Unknown OID.'); - error.oid = oid; - throw error; + if (path && !path.startsWith('/')) { + path = `/${path}` } - obj = capture.rsaPublicKey; + // new URL(path, origin) is unsafe when `path` contains an absolute URL + // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL: + // If first parameter is a relative URL, second param is required, and will be used as the base URL. + // If first parameter is an absolute URL, a given second param will be ignored. + url = new URL(origin + path) } - // get RSA params - errors = []; - if(!asn1.validate(obj, rsaPublicKeyValidator, capture, errors)) { - var error = new Error('Cannot read public key. ' + - 'ASN.1 object does not contain an RSAPublicKey.'); - error.errors = errors; - throw error; + return url +} + +function parseOrigin (url) { + url = parseURL(url) + + if (url.pathname !== '/' || url.search || url.hash) { + throw new InvalidArgumentError('invalid url') } - // FIXME: inefficient, get a BigInteger that uses byte strings - var n = forge.util.createBuffer(capture.publicKeyModulus).toHex(); - var e = forge.util.createBuffer(capture.publicKeyExponent).toHex(); + return url +} - // set public key - return pki.setRsaPublicKey( - new BigInteger(n, 16), - new BigInteger(e, 16)); -}; +function getHostname (host) { + if (host[0] === '[') { + const idx = host.indexOf(']') -/** - * Converts a public key to an ASN.1 SubjectPublicKeyInfo. - * - * @param key the public key. - * - * @return the asn1 representation of a SubjectPublicKeyInfo. - */ -pki.publicKeyToAsn1 = pki.publicKeyToSubjectPublicKeyInfo = function(key) { - // SubjectPublicKeyInfo - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // AlgorithmIdentifier - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.rsaEncryption).getBytes()), - // parameters (null) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]), - // subjectPublicKey - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, [ - pki.publicKeyToRSAPublicKey(key) - ]) - ]); -}; + assert(idx !== -1) + return host.substring(1, idx) + } -/** - * Converts a public key to an ASN.1 RSAPublicKey. - * - * @param key the public key. - * - * @return the asn1 representation of a RSAPublicKey. - */ -pki.publicKeyToRSAPublicKey = function(key) { - // RSAPublicKey - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // modulus (n) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.n)), - // publicExponent (e) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.e)) - ]); -}; + const idx = host.indexOf(':') + if (idx === -1) return host -/** - * Encodes a message using PKCS#1 v1.5 padding. - * - * @param m the message to encode. - * @param key the RSA key to use. - * @param bt the block type to use, i.e. either 0x01 (for signing) or 0x02 - * (for encryption). - * - * @return the padded byte buffer. - */ -function _encodePkcs1_v1_5(m, key, bt) { - var eb = forge.util.createBuffer(); + return host.substring(0, idx) +} + +// IP addresses are not valid server names per RFC6066 +// > Currently, the only server names supported are DNS hostnames +function getServerName (host) { + if (!host) { + return null + } - // get the length of the modulus in bytes - var k = Math.ceil(key.n.bitLength() / 8); + assert.strictEqual(typeof host, 'string') - /* use PKCS#1 v1.5 padding */ - if(m.length > (k - 11)) { - var error = new Error('Message is too long for PKCS#1 v1.5 padding.'); - error.length = m.length; - error.max = k - 11; - throw error; + const servername = getHostname(host) + if (net.isIP(servername)) { + return '' } - /* A block type BT, a padding string PS, and the data D shall be - formatted into an octet string EB, the encryption block: + return servername +} - EB = 00 || BT || PS || 00 || D +function deepClone (obj) { + return JSON.parse(JSON.stringify(obj)) +} - The block type BT shall be a single octet indicating the structure of - the encryption block. For this version of the document it shall have - value 00, 01, or 02. For a private-key operation, the block type - shall be 00 or 01. For a public-key operation, it shall be 02. +function isAsyncIterable (obj) { + return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function') +} - The padding string PS shall consist of k-3-||D|| octets. For block - type 00, the octets shall have value 00; for block type 01, they - shall have value FF; and for block type 02, they shall be - pseudorandomly generated and nonzero. This makes the length of the - encryption block EB equal to k. */ +function isIterable (obj) { + return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function')) +} - // build the encryption block - eb.putByte(0x00); - eb.putByte(bt); +function bodyLength (body) { + if (body == null) { + return 0 + } else if (isStream(body)) { + const state = body._readableState + return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length) + ? state.length + : null + } else if (isBlobLike(body)) { + return body.size != null ? body.size : null + } else if (isBuffer(body)) { + return body.byteLength + } - // create the padding - var padNum = k - 3 - m.length; - var padByte; - // private key op - if(bt === 0x00 || bt === 0x01) { - padByte = (bt === 0x00) ? 0x00 : 0xFF; - for(var i = 0; i < padNum; ++i) { - eb.putByte(padByte); - } - } else { - // public key op - // pad with random non-zero values - while(padNum > 0) { - var numZeros = 0; - var padBytes = forge.random.getBytes(padNum); - for(var i = 0; i < padNum; ++i) { - padByte = padBytes.charCodeAt(i); - if(padByte === 0) { - ++numZeros; - } else { - eb.putByte(padByte); - } - } - padNum = numZeros; + return null +} + +function isDestroyed (stream) { + return !stream || !!(stream.destroyed || stream[kDestroyed]) +} + +function isReadableAborted (stream) { + const state = stream && stream._readableState + return isDestroyed(stream) && state && !state.endEmitted +} + +function destroy (stream, err) { + if (stream == null || !isStream(stream) || isDestroyed(stream)) { + return + } + + if (typeof stream.destroy === 'function') { + if (Object.getPrototypeOf(stream).constructor === IncomingMessage) { + // See: https://github.com/nodejs/node/pull/38505/files + stream.socket = null } + + stream.destroy(err) + } else if (err) { + process.nextTick((stream, err) => { + stream.emit('error', err) + }, stream, err) } - // zero followed by message - eb.putByte(0x00); - eb.putBytes(m); + if (stream.destroyed !== true) { + stream[kDestroyed] = true + } +} - return eb; +const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/ +function parseKeepAliveTimeout (val) { + const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR) + return m ? parseInt(m[1], 10) * 1000 : null } -/** - * Decodes a message using PKCS#1 v1.5 padding. - * - * @param em the message to decode. - * @param key the RSA key to use. - * @param pub true if the key is a public key, false if it is private. - * @param ml the message length, if specified. - * - * @return the decoded bytes. - */ -function _decodePkcs1_v1_5(em, key, pub, ml) { - // get the length of the modulus in bytes - var k = Math.ceil(key.n.bitLength() / 8); - - /* It is an error if any of the following conditions occurs: - - 1. The encryption block EB cannot be parsed unambiguously. - 2. The padding string PS consists of fewer than eight octets - or is inconsisent with the block type BT. - 3. The decryption process is a public-key operation and the block - type BT is not 00 or 01, or the decryption process is a - private-key operation and the block type is not 02. - */ +function parseHeaders (headers, obj = {}) { + // For H2 support + if (!Array.isArray(headers)) return headers - // parse the encryption block - var eb = forge.util.createBuffer(em); - var first = eb.getByte(); - var bt = eb.getByte(); - if(first !== 0x00 || - (pub && bt !== 0x00 && bt !== 0x01) || - (!pub && bt != 0x02) || - (pub && bt === 0x00 && typeof(ml) === 'undefined')) { - throw new Error('Encryption block is invalid.'); - } - - var padNum = 0; - if(bt === 0x00) { - // check all padding bytes for 0x00 - padNum = k - 3 - ml; - for(var i = 0; i < padNum; ++i) { - if(eb.getByte() !== 0x00) { - throw new Error('Encryption block is invalid.'); - } - } - } else if(bt === 0x01) { - // find the first byte that isn't 0xFF, should be after all padding - padNum = 0; - while(eb.length() > 1) { - if(eb.getByte() !== 0xFF) { - --eb.read; - break; + for (let i = 0; i < headers.length; i += 2) { + const key = headers[i].toString().toLowerCase() + let val = obj[key] + + if (!val) { + if (Array.isArray(headers[i + 1])) { + obj[key] = headers[i + 1].map(x => x.toString('utf8')) + } else { + obj[key] = headers[i + 1].toString('utf8') } - ++padNum; - } - } else if(bt === 0x02) { - // look for 0x00 byte - padNum = 0; - while(eb.length() > 1) { - if(eb.getByte() === 0x00) { - --eb.read; - break; + } else { + if (!Array.isArray(val)) { + val = [val] + obj[key] = val } - ++padNum; + val.push(headers[i + 1].toString('utf8')) } } - // zero must be 0x00 and padNum must be (k - 3 - message length) - var zero = eb.getByte(); - if(zero !== 0x00 || padNum !== (k - 3 - eb.length())) { - throw new Error('Encryption block is invalid.'); + // See https://github.com/nodejs/node/pull/46528 + if ('content-length' in obj && 'content-disposition' in obj) { + obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1') } - return eb.getBytes(); + return obj } -/** - * Runs the key-generation algorithm asynchronously, either in the background - * via Web Workers, or using the main thread and setImmediate. - * - * @param state the key-pair generation state. - * @param [options] options for key-pair generation: - * workerScript the worker script URL. - * workers the number of web workers (if supported) to use, - * (default: 2, -1 to use estimated cores minus one). - * workLoad the size of the work load, ie: number of possible prime - * numbers for each web worker to check per work assignment, - * (default: 100). - * @param callback(err, keypair) called once the operation completes. - */ -function _generateKeyPair(state, options, callback) { - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; +function parseRawHeaders (headers) { + const ret = [] + let hasContentLength = false + let contentDispositionIdx = -1 - var opts = { - algorithm: { - name: options.algorithm || 'PRIMEINC', - options: { - workers: options.workers || 2, - workLoad: options.workLoad || 100, - workerScript: options.workerScript - } + for (let n = 0; n < headers.length; n += 2) { + const key = headers[n + 0].toString() + const val = headers[n + 1].toString('utf8') + + if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) { + ret.push(key, val) + hasContentLength = true + } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) { + contentDispositionIdx = ret.push(key, val) - 1 + } else { + ret.push(key, val) } - }; - if('prng' in options) { - opts.prng = options.prng; } - generate(); + // See https://github.com/nodejs/node/pull/46528 + if (hasContentLength && contentDispositionIdx !== -1) { + ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1') + } - function generate() { - // find p and then q (done in series to simplify) - getPrime(state.pBits, function(err, num) { - if(err) { - return callback(err); - } - state.p = num; - if(state.q !== null) { - return finish(err, state.q); - } - getPrime(state.qBits, finish); - }); + return ret +} + +function isBuffer (buffer) { + // See, https://github.com/mcollina/undici/pull/319 + return buffer instanceof Uint8Array || Buffer.isBuffer(buffer) +} + +function validateHandler (handler, method, upgrade) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') } - function getPrime(bits, callback) { - forge.prime.generateProbablePrime(bits, opts, callback); + if (typeof handler.onConnect !== 'function') { + throw new InvalidArgumentError('invalid onConnect method') } - function finish(err, num) { - if(err) { - return callback(err); - } + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } - // set q - state.q = num; + if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) { + throw new InvalidArgumentError('invalid onBodySent method') + } - // ensure p is larger than q (swap them if not) - if(state.p.compareTo(state.q) < 0) { - var tmp = state.p; - state.p = state.q; - state.q = tmp; + if (upgrade || method === 'CONNECT') { + if (typeof handler.onUpgrade !== 'function') { + throw new InvalidArgumentError('invalid onUpgrade method') } - - // ensure p is coprime with e - if(state.p.subtract(BigInteger.ONE).gcd(state.e) - .compareTo(BigInteger.ONE) !== 0) { - state.p = null; - generate(); - return; + } else { + if (typeof handler.onHeaders !== 'function') { + throw new InvalidArgumentError('invalid onHeaders method') } - // ensure q is coprime with e - if(state.q.subtract(BigInteger.ONE).gcd(state.e) - .compareTo(BigInteger.ONE) !== 0) { - state.q = null; - getPrime(state.qBits, finish); - return; + if (typeof handler.onData !== 'function') { + throw new InvalidArgumentError('invalid onData method') } - // compute phi: (p - 1)(q - 1) (Euler's totient function) - state.p1 = state.p.subtract(BigInteger.ONE); - state.q1 = state.q.subtract(BigInteger.ONE); - state.phi = state.p1.multiply(state.q1); - - // ensure e and phi are coprime - if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) !== 0) { - // phi and e aren't coprime, so generate a new p and q - state.p = state.q = null; - generate(); - return; + if (typeof handler.onComplete !== 'function') { + throw new InvalidArgumentError('invalid onComplete method') } + } +} - // create n, ensure n is has the right number of bits - state.n = state.p.multiply(state.q); - if(state.n.bitLength() !== state.bits) { - // failed, get new q - state.q = null; - getPrime(state.qBits, finish); - return; - } +// A body is disturbed if it has been read from and it cannot +// be re-used without losing state or data. +function isDisturbed (body) { + return !!(body && ( + stream.isDisturbed + ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed? + : body[kBodyUsed] || + body.readableDidRead || + (body._readableState && body._readableState.dataEmitted) || + isReadableAborted(body) + )) +} - // set keys - var d = state.e.modInverse(state.phi); - state.keys = { - privateKey: pki.rsa.setPrivateKey( - state.n, state.e, d, state.p, state.q, - d.mod(state.p1), d.mod(state.q1), - state.q.modInverse(state.p)), - publicKey: pki.rsa.setPublicKey(state.n, state.e) - }; +function isErrored (body) { + return !!(body && ( + stream.isErrored + ? stream.isErrored(body) + : /state: 'errored'/.test(nodeUtil.inspect(body) + ))) +} - callback(null, state.keys); - } +function isReadable (body) { + return !!(body && ( + stream.isReadable + ? stream.isReadable(body) + : /state: 'readable'/.test(nodeUtil.inspect(body) + ))) } -/** - * Converts a positive BigInteger into 2's-complement big-endian bytes. - * - * @param b the big integer to convert. - * - * @return the bytes. - */ -function _bnToBytes(b) { - // prepend 0x00 if first byte >= 0x80 - var hex = b.toString(16); - if(hex[0] >= '8') { - hex = '00' + hex; - } - var bytes = forge.util.hexToBytes(hex); - - // ensure integer is minimally-encoded - if(bytes.length > 1 && - // leading 0x00 for positive integer - ((bytes.charCodeAt(0) === 0 && - (bytes.charCodeAt(1) & 0x80) === 0) || - // leading 0xFF for negative integer - (bytes.charCodeAt(0) === 0xFF && - (bytes.charCodeAt(1) & 0x80) === 0x80))) { - return bytes.substr(1); +function getSocketInfo (socket) { + return { + localAddress: socket.localAddress, + localPort: socket.localPort, + remoteAddress: socket.remoteAddress, + remotePort: socket.remotePort, + remoteFamily: socket.remoteFamily, + timeout: socket.timeout, + bytesWritten: socket.bytesWritten, + bytesRead: socket.bytesRead } - return bytes; } -/** - * Returns the required number of Miller-Rabin tests to generate a - * prime with an error probability of (1/2)^80. - * - * See Handbook of Applied Cryptography Chapter 4, Table 4.4. - * - * @param bits the bit size. - * - * @return the required number of iterations. - */ -function _getMillerRabinTests(bits) { - if(bits <= 100) return 27; - if(bits <= 150) return 18; - if(bits <= 200) return 15; - if(bits <= 250) return 12; - if(bits <= 300) return 9; - if(bits <= 350) return 8; - if(bits <= 400) return 7; - if(bits <= 500) return 6; - if(bits <= 600) return 5; - if(bits <= 800) return 4; - if(bits <= 1250) return 3; - return 2; +async function * convertIterableToBuffer (iterable) { + for await (const chunk of iterable) { + yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + } } -/** - * Performs feature detection on the Node crypto interface. - * - * @param fn the feature (function) to detect. - * - * @return true if detected, false if not. - */ -function _detectNodeCrypto(fn) { - return forge.util.isNodejs && typeof _crypto[fn] === 'function'; -} +let ReadableStream +function ReadableStreamFrom (iterable) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(5356).ReadableStream) + } -/** - * Performs feature detection on the SubtleCrypto interface. - * - * @param fn the feature (function) to detect. - * - * @return true if detected, false if not. - */ -function _detectSubtleCrypto(fn) { - return (typeof util.globalScope !== 'undefined' && - typeof util.globalScope.crypto === 'object' && - typeof util.globalScope.crypto.subtle === 'object' && - typeof util.globalScope.crypto.subtle[fn] === 'function'); -} + if (ReadableStream.from) { + return ReadableStream.from(convertIterableToBuffer(iterable)) + } -/** - * Performs feature detection on the deprecated Microsoft Internet Explorer - * outdated SubtleCrypto interface. This function should only be used after - * checking for the modern, standard SubtleCrypto interface. - * - * @param fn the feature (function) to detect. - * - * @return true if detected, false if not. - */ -function _detectSubtleMsCrypto(fn) { - return (typeof util.globalScope !== 'undefined' && - typeof util.globalScope.msCrypto === 'object' && - typeof util.globalScope.msCrypto.subtle === 'object' && - typeof util.globalScope.msCrypto.subtle[fn] === 'function'); + let iterator + return new ReadableStream( + { + async start () { + iterator = iterable[Symbol.asyncIterator]() + }, + async pull (controller) { + const { done, value } = await iterator.next() + if (done) { + queueMicrotask(() => { + controller.close() + }) + } else { + const buf = Buffer.isBuffer(value) ? value : Buffer.from(value) + controller.enqueue(new Uint8Array(buf)) + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + } + }, + 0 + ) } -function _intToUint8Array(x) { - var bytes = forge.util.hexToBytes(x.toString(16)); - var buffer = new Uint8Array(bytes.length); - for(var i = 0; i < bytes.length; ++i) { - buffer[i] = bytes.charCodeAt(i); +// The chunk should be a FormData instance and contains +// all the required methods. +function isFormDataLike (object) { + return ( + object && + typeof object === 'object' && + typeof object.append === 'function' && + typeof object.delete === 'function' && + typeof object.get === 'function' && + typeof object.getAll === 'function' && + typeof object.has === 'function' && + typeof object.set === 'function' && + object[Symbol.toStringTag] === 'FormData' + ) +} + +function throwIfAborted (signal) { + if (!signal) { return } + if (typeof signal.throwIfAborted === 'function') { + signal.throwIfAborted() + } else { + if (signal.aborted) { + // DOMException not available < v17.0.0 + const err = new Error('The operation was aborted') + err.name = 'AbortError' + throw err + } } - return buffer; } -function _privateKeyFromJwk(jwk) { - if(jwk.kty !== 'RSA') { - throw new Error( - 'Unsupported key algorithm "' + jwk.kty + '"; algorithm must be "RSA".'); +function addAbortListener (signal, listener) { + if ('addEventListener' in signal) { + signal.addEventListener('abort', listener, { once: true }) + return () => signal.removeEventListener('abort', listener) } - return pki.setRsaPrivateKey( - _base64ToBigInt(jwk.n), - _base64ToBigInt(jwk.e), - _base64ToBigInt(jwk.d), - _base64ToBigInt(jwk.p), - _base64ToBigInt(jwk.q), - _base64ToBigInt(jwk.dp), - _base64ToBigInt(jwk.dq), - _base64ToBigInt(jwk.qi)); + signal.addListener('abort', listener) + return () => signal.removeListener('abort', listener) } -function _publicKeyFromJwk(jwk) { - if(jwk.kty !== 'RSA') { - throw new Error('Key algorithm must be "RSA".'); +const hasToWellFormed = !!String.prototype.toWellFormed + +/** + * @param {string} val + */ +function toUSVString (val) { + if (hasToWellFormed) { + return `${val}`.toWellFormed() + } else if (nodeUtil.toUSVString) { + return nodeUtil.toUSVString(val) } - return pki.setRsaPublicKey( - _base64ToBigInt(jwk.n), - _base64ToBigInt(jwk.e)); + + return `${val}` +} + +// Parsed accordingly to RFC 9110 +// https://www.rfc-editor.org/rfc/rfc9110#field.content-range +function parseRangeHeader (range) { + if (range == null || range === '') return { start: 0, end: null, size: null } + + const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null + return m + ? { + start: parseInt(m[1]), + end: m[2] ? parseInt(m[2]) : null, + size: m[3] ? parseInt(m[3]) : null + } + : null } -function _base64ToBigInt(b64) { - return new BigInteger(forge.util.bytesToHex(forge.util.decode64(b64)), 16); +const kEnumerableProperty = Object.create(null) +kEnumerableProperty.enumerable = true + +module.exports = { + kEnumerableProperty, + nop, + isDisturbed, + isErrored, + isReadable, + toUSVString, + isReadableAborted, + isBlobLike, + parseOrigin, + parseURL, + getServerName, + isStream, + isIterable, + isAsyncIterable, + isDestroyed, + parseRawHeaders, + parseHeaders, + parseKeepAliveTimeout, + destroy, + bodyLength, + deepClone, + ReadableStreamFrom, + isBuffer, + validateHandler, + getSocketInfo, + isFormDataLike, + buildURL, + throwIfAborted, + addAbortListener, + parseRangeHeader, + nodeMajor, + nodeMinor, + nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13), + safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] } /***/ }), -/***/ 279: +/***/ 4839: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Secure Hash Algorithm with 160-bit digest (SHA-1) implementation. - * - * @author Dave Longley - * - * Copyright (c) 2010-2015 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(6231); -__nccwpck_require__(8339); +"use strict"; -var sha1 = module.exports = forge.sha1 = forge.sha1 || {}; -forge.md.sha1 = forge.md.algorithms.sha1 = sha1; -/** - * Creates a SHA-1 message digest object. - * - * @return a message digest object. - */ -sha1.create = function() { - // do initialization as necessary - if(!_initialized) { - _init(); - } - - // SHA-1 state contains five 32-bit integers - var _state = null; - - // input buffer - var _input = forge.util.createBuffer(); - - // used for word storage - var _w = new Array(80); - - // message digest object - var md = { - algorithm: 'sha1', - blockLength: 64, - digestLength: 20, - // 56-bit length of message so far (does not including padding) - messageLength: 0, - // true message length - fullMessageLength: null, - // size of message length in bytes - messageLengthSize: 8 - }; +const Dispatcher = __nccwpck_require__(412) +const { + ClientDestroyedError, + ClientClosedError, + InvalidArgumentError +} = __nccwpck_require__(8045) +const { kDestroy, kClose, kDispatch, kInterceptors } = __nccwpck_require__(2785) - /** - * Starts the digest. - * - * @return this digest object. - */ - md.start = function() { - // up to 56-bit message length for convenience - md.messageLength = 0; - - // full message length (set md.messageLength64 for backwards-compatibility) - md.fullMessageLength = md.messageLength64 = []; - var int32s = md.messageLengthSize / 4; - for(var i = 0; i < int32s; ++i) { - md.fullMessageLength.push(0); - } - _input = forge.util.createBuffer(); - _state = { - h0: 0x67452301, - h1: 0xEFCDAB89, - h2: 0x98BADCFE, - h3: 0x10325476, - h4: 0xC3D2E1F0 - }; - return md; - }; - // start digest automatically for first time - md.start(); +const kDestroyed = Symbol('destroyed') +const kClosed = Symbol('closed') +const kOnDestroyed = Symbol('onDestroyed') +const kOnClosed = Symbol('onClosed') +const kInterceptedDispatch = Symbol('Intercepted Dispatch') - /** - * Updates the digest with the given message input. The given input can - * treated as raw input (no encoding will be applied) or an encoding of - * 'utf8' maybe given to encode the input using UTF-8. - * - * @param msg the message input to update with. - * @param encoding the encoding to use (default: 'raw', other: 'utf8'). - * - * @return this digest object. - */ - md.update = function(msg, encoding) { - if(encoding === 'utf8') { - msg = forge.util.encodeUtf8(msg); - } +class DispatcherBase extends Dispatcher { + constructor () { + super() - // update message length - var len = msg.length; - md.messageLength += len; - len = [(len / 0x100000000) >>> 0, len >>> 0]; - for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { - md.fullMessageLength[i] += len[1]; - len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); - md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; - len[0] = ((len[1] / 0x100000000) >>> 0); - } + this[kDestroyed] = false + this[kOnDestroyed] = null + this[kClosed] = false + this[kOnClosed] = [] + } - // add bytes to input buffer - _input.putBytes(msg); + get destroyed () { + return this[kDestroyed] + } + + get closed () { + return this[kClosed] + } - // process bytes - _update(_state, _w, _input); + get interceptors () { + return this[kInterceptors] + } - // compact input buffer every 2K or if empty - if(_input.read > 2048 || _input.length() === 0) { - _input.compact(); + set interceptors (newInterceptors) { + if (newInterceptors) { + for (let i = newInterceptors.length - 1; i >= 0; i--) { + const interceptor = this[kInterceptors][i] + if (typeof interceptor !== 'function') { + throw new InvalidArgumentError('interceptor must be an function') + } + } } - return md; - }; + this[kInterceptors] = newInterceptors + } - /** - * Produces the digest. - * - * @return a byte buffer containing the digest value. - */ - md.digest = function() { - /* Note: Here we copy the remaining bytes in the input buffer and - add the appropriate SHA-1 padding. Then we do the final update - on a copy of the state so that if the user wants to get - intermediate digests they can do so. */ - - /* Determine the number of bytes that must be added to the message - to ensure its length is congruent to 448 mod 512. In other words, - the data to be digested must be a multiple of 512 bits (or 128 bytes). - This data includes the message, some padding, and the length of the - message. Since the length of the message will be encoded as 8 bytes (64 - bits), that means that the last segment of the data must have 56 bytes - (448 bits) of message and padding. Therefore, the length of the message - plus the padding must be congruent to 448 mod 512 because - 512 - 128 = 448. - - In order to fill up the message length it must be filled with - padding that begins with 1 bit followed by all 0 bits. Padding - must *always* be present, so if the message length is already - congruent to 448 mod 512, then 512 padding bits must be added. */ - - var finalBlock = forge.util.createBuffer(); - finalBlock.putBytes(_input.bytes()); - - // compute remaining size to be digested (include message length size) - var remaining = ( - md.fullMessageLength[md.fullMessageLength.length - 1] + - md.messageLengthSize); - - // add padding for overflow blockSize - overflow - // _padding starts with 1 byte with first bit is set (byte value 128), then - // there may be up to (blockSize - 1) other pad bytes - var overflow = remaining & (md.blockLength - 1); - finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); - - // serialize message length in bits in big-endian order; since length - // is stored in bytes we multiply by 8 and add carry from next int - var next, carry; - var bits = md.fullMessageLength[0] * 8; - for(var i = 0; i < md.fullMessageLength.length - 1; ++i) { - next = md.fullMessageLength[i + 1] * 8; - carry = (next / 0x100000000) >>> 0; - bits += carry; - finalBlock.putInt32(bits >>> 0); - bits = next >>> 0; - } - finalBlock.putInt32(bits); - - var s2 = { - h0: _state.h0, - h1: _state.h1, - h2: _state.h2, - h3: _state.h3, - h4: _state.h4 - }; - _update(s2, _w, finalBlock); - var rval = forge.util.createBuffer(); - rval.putInt32(s2.h0); - rval.putInt32(s2.h1); - rval.putInt32(s2.h2); - rval.putInt32(s2.h3); - rval.putInt32(s2.h4); - return rval; - }; + close (callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.close((err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } - return md; -}; + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } -// sha-1 padding bytes not initialized yet -var _padding = null; -var _initialized = false; + if (this[kDestroyed]) { + queueMicrotask(() => callback(new ClientDestroyedError(), null)) + return + } -/** - * Initializes the constant tables. - */ -function _init() { - // create padding - _padding = String.fromCharCode(128); - _padding += forge.util.fillString(String.fromCharCode(0x00), 64); + if (this[kClosed]) { + if (this[kOnClosed]) { + this[kOnClosed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } - // now initialized - _initialized = true; -} + this[kClosed] = true + this[kOnClosed].push(callback) -/** - * Updates a SHA-1 state with the given byte buffer. - * - * @param s the SHA-1 state to update. - * @param w the array to use to store words. - * @param bytes the byte buffer to update with. - */ -function _update(s, w, bytes) { - // consume 512 bit (64 byte) chunks - var t, a, b, c, d, e, f, i; - var len = bytes.length(); - while(len >= 64) { - // the w array will be populated with sixteen 32-bit big-endian words - // and then extended into 80 32-bit words according to SHA-1 algorithm - // and for 32-79 using Max Locktyukhin's optimization - - // initialize hash value for this chunk - a = s.h0; - b = s.h1; - c = s.h2; - d = s.h3; - e = s.h4; - - // round 1 - for(i = 0; i < 16; ++i) { - t = bytes.getInt32(); - w[i] = t; - f = d ^ (b & (c ^ d)); - t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - for(; i < 20; ++i) { - t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]); - t = (t << 1) | (t >>> 31); - w[i] = t; - f = d ^ (b & (c ^ d)); - t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - // round 2 - for(; i < 32; ++i) { - t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]); - t = (t << 1) | (t >>> 31); - w[i] = t; - f = b ^ c ^ d; - t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - for(; i < 40; ++i) { - t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]); - t = (t << 2) | (t >>> 30); - w[i] = t; - f = b ^ c ^ d; - t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - // round 3 - for(; i < 60; ++i) { - t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]); - t = (t << 2) | (t >>> 30); - w[i] = t; - f = (b & c) | (d & (b ^ c)); - t = ((a << 5) | (a >>> 27)) + f + e + 0x8F1BBCDC + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - // round 4 - for(; i < 80; ++i) { - t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]); - t = (t << 2) | (t >>> 30); - w[i] = t; - f = b ^ c ^ d; - t = ((a << 5) | (a >>> 27)) + f + e + 0xCA62C1D6 + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - - // update hash state - s.h0 = (s.h0 + a) | 0; - s.h1 = (s.h1 + b) | 0; - s.h2 = (s.h2 + c) | 0; - s.h3 = (s.h3 + d) | 0; - s.h4 = (s.h4 + e) | 0; - - len -= 64; + const onClosed = () => { + const callbacks = this[kOnClosed] + this[kOnClosed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } + + // Should not error. + this[kClose]() + .then(() => this.destroy()) + .then(() => { + queueMicrotask(onClosed) + }) } -} + destroy (err, callback) { + if (typeof err === 'function') { + callback = err + err = null + } -/***/ }), + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.destroy(err, (err, data) => { + return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data) + }) + }) + } -/***/ 4086: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } -/** - * Secure Hash Algorithm with 256-bit digest (SHA-256) implementation. - * - * See FIPS 180-2 for details. - * - * @author Dave Longley - * - * Copyright (c) 2010-2015 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(6231); -__nccwpck_require__(8339); + if (this[kDestroyed]) { + if (this[kOnDestroyed]) { + this[kOnDestroyed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } -var sha256 = module.exports = forge.sha256 = forge.sha256 || {}; -forge.md.sha256 = forge.md.algorithms.sha256 = sha256; + if (!err) { + err = new ClientDestroyedError() + } -/** - * Creates a SHA-256 message digest object. - * - * @return a message digest object. - */ -sha256.create = function() { - // do initialization as necessary - if(!_initialized) { - _init(); - } - - // SHA-256 state contains eight 32-bit integers - var _state = null; - - // input buffer - var _input = forge.util.createBuffer(); - - // used for word storage - var _w = new Array(64); - - // message digest object - var md = { - algorithm: 'sha256', - blockLength: 64, - digestLength: 32, - // 56-bit length of message so far (does not including padding) - messageLength: 0, - // true message length - fullMessageLength: null, - // size of message length in bytes - messageLengthSize: 8 - }; + this[kDestroyed] = true + this[kOnDestroyed] = this[kOnDestroyed] || [] + this[kOnDestroyed].push(callback) - /** - * Starts the digest. - * - * @return this digest object. - */ - md.start = function() { - // up to 56-bit message length for convenience - md.messageLength = 0; - - // full message length (set md.messageLength64 for backwards-compatibility) - md.fullMessageLength = md.messageLength64 = []; - var int32s = md.messageLengthSize / 4; - for(var i = 0; i < int32s; ++i) { - md.fullMessageLength.push(0); - } - _input = forge.util.createBuffer(); - _state = { - h0: 0x6A09E667, - h1: 0xBB67AE85, - h2: 0x3C6EF372, - h3: 0xA54FF53A, - h4: 0x510E527F, - h5: 0x9B05688C, - h6: 0x1F83D9AB, - h7: 0x5BE0CD19 - }; - return md; - }; - // start digest automatically for first time - md.start(); + const onDestroyed = () => { + const callbacks = this[kOnDestroyed] + this[kOnDestroyed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } - /** - * Updates the digest with the given message input. The given input can - * treated as raw input (no encoding will be applied) or an encoding of - * 'utf8' maybe given to encode the input using UTF-8. - * - * @param msg the message input to update with. - * @param encoding the encoding to use (default: 'raw', other: 'utf8'). - * - * @return this digest object. - */ - md.update = function(msg, encoding) { - if(encoding === 'utf8') { - msg = forge.util.encodeUtf8(msg); + // Should not error. + this[kDestroy](err).then(() => { + queueMicrotask(onDestroyed) + }) + } + + [kInterceptedDispatch] (opts, handler) { + if (!this[kInterceptors] || this[kInterceptors].length === 0) { + this[kInterceptedDispatch] = this[kDispatch] + return this[kDispatch](opts, handler) + } + + let dispatch = this[kDispatch].bind(this) + for (let i = this[kInterceptors].length - 1; i >= 0; i--) { + dispatch = this[kInterceptors][i](dispatch) } + this[kInterceptedDispatch] = dispatch + return dispatch(opts, handler) + } - // update message length - var len = msg.length; - md.messageLength += len; - len = [(len / 0x100000000) >>> 0, len >>> 0]; - for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { - md.fullMessageLength[i] += len[1]; - len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); - md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; - len[0] = ((len[1] / 0x100000000) >>> 0); + dispatch (opts, handler) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') } - // add bytes to input buffer - _input.putBytes(msg); + try { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object.') + } + + if (this[kDestroyed] || this[kOnDestroyed]) { + throw new ClientDestroyedError() + } + + if (this[kClosed]) { + throw new ClientClosedError() + } + + return this[kInterceptedDispatch](opts, handler) + } catch (err) { + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } - // process bytes - _update(_state, _w, _input); + handler.onError(err) - // compact input buffer every 2K or if empty - if(_input.read > 2048 || _input.length() === 0) { - _input.compact(); + return false } + } +} - return md; - }; +module.exports = DispatcherBase - /** - * Produces the digest. - * - * @return a byte buffer containing the digest value. - */ - md.digest = function() { - /* Note: Here we copy the remaining bytes in the input buffer and - add the appropriate SHA-256 padding. Then we do the final update - on a copy of the state so that if the user wants to get - intermediate digests they can do so. */ - - /* Determine the number of bytes that must be added to the message - to ensure its length is congruent to 448 mod 512. In other words, - the data to be digested must be a multiple of 512 bits (or 128 bytes). - This data includes the message, some padding, and the length of the - message. Since the length of the message will be encoded as 8 bytes (64 - bits), that means that the last segment of the data must have 56 bytes - (448 bits) of message and padding. Therefore, the length of the message - plus the padding must be congruent to 448 mod 512 because - 512 - 128 = 448. - - In order to fill up the message length it must be filled with - padding that begins with 1 bit followed by all 0 bits. Padding - must *always* be present, so if the message length is already - congruent to 448 mod 512, then 512 padding bits must be added. */ - - var finalBlock = forge.util.createBuffer(); - finalBlock.putBytes(_input.bytes()); - - // compute remaining size to be digested (include message length size) - var remaining = ( - md.fullMessageLength[md.fullMessageLength.length - 1] + - md.messageLengthSize); - - // add padding for overflow blockSize - overflow - // _padding starts with 1 byte with first bit is set (byte value 128), then - // there may be up to (blockSize - 1) other pad bytes - var overflow = remaining & (md.blockLength - 1); - finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); - - // serialize message length in bits in big-endian order; since length - // is stored in bytes we multiply by 8 and add carry from next int - var next, carry; - var bits = md.fullMessageLength[0] * 8; - for(var i = 0; i < md.fullMessageLength.length - 1; ++i) { - next = md.fullMessageLength[i + 1] * 8; - carry = (next / 0x100000000) >>> 0; - bits += carry; - finalBlock.putInt32(bits >>> 0); - bits = next >>> 0; - } - finalBlock.putInt32(bits); - - var s2 = { - h0: _state.h0, - h1: _state.h1, - h2: _state.h2, - h3: _state.h3, - h4: _state.h4, - h5: _state.h5, - h6: _state.h6, - h7: _state.h7 - }; - _update(s2, _w, finalBlock); - var rval = forge.util.createBuffer(); - rval.putInt32(s2.h0); - rval.putInt32(s2.h1); - rval.putInt32(s2.h2); - rval.putInt32(s2.h3); - rval.putInt32(s2.h4); - rval.putInt32(s2.h5); - rval.putInt32(s2.h6); - rval.putInt32(s2.h7); - return rval; - }; - return md; -}; +/***/ }), -// sha-256 padding bytes not initialized yet -var _padding = null; -var _initialized = false; +/***/ 412: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// table of constants -var _k = null; +"use strict"; -/** - * Initializes the constant tables. - */ -function _init() { - // create padding - _padding = String.fromCharCode(128); - _padding += forge.util.fillString(String.fromCharCode(0x00), 64); - - // create K table for SHA-256 - _k = [ - 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, - 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, - 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, - 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, - 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, - 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, - 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, - 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, - 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, - 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, - 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, - 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, - 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, - 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, - 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, - 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2]; - - // now initialized - _initialized = true; -} -/** - * Updates a SHA-256 state with the given byte buffer. - * - * @param s the SHA-256 state to update. - * @param w the array to use to store words. - * @param bytes the byte buffer to update with. - */ -function _update(s, w, bytes) { - // consume 512 bit (64 byte) chunks - var t1, t2, s0, s1, ch, maj, i, a, b, c, d, e, f, g, h; - var len = bytes.length(); - while(len >= 64) { - // the w array will be populated with sixteen 32-bit big-endian words - // and then extended into 64 32-bit words according to SHA-256 - for(i = 0; i < 16; ++i) { - w[i] = bytes.getInt32(); - } - for(; i < 64; ++i) { - // XOR word 2 words ago rot right 17, rot right 19, shft right 10 - t1 = w[i - 2]; - t1 = - ((t1 >>> 17) | (t1 << 15)) ^ - ((t1 >>> 19) | (t1 << 13)) ^ - (t1 >>> 10); - // XOR word 15 words ago rot right 7, rot right 18, shft right 3 - t2 = w[i - 15]; - t2 = - ((t2 >>> 7) | (t2 << 25)) ^ - ((t2 >>> 18) | (t2 << 14)) ^ - (t2 >>> 3); - // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^32 - w[i] = (t1 + w[i - 7] + t2 + w[i - 16]) | 0; - } - - // initialize hash value for this chunk - a = s.h0; - b = s.h1; - c = s.h2; - d = s.h3; - e = s.h4; - f = s.h5; - g = s.h6; - h = s.h7; - - // round function - for(i = 0; i < 64; ++i) { - // Sum1(e) - s1 = - ((e >>> 6) | (e << 26)) ^ - ((e >>> 11) | (e << 21)) ^ - ((e >>> 25) | (e << 7)); - // Ch(e, f, g) (optimized the same way as SHA-1) - ch = g ^ (e & (f ^ g)); - // Sum0(a) - s0 = - ((a >>> 2) | (a << 30)) ^ - ((a >>> 13) | (a << 19)) ^ - ((a >>> 22) | (a << 10)); - // Maj(a, b, c) (optimized the same way as SHA-1) - maj = (a & b) | (c & (a ^ b)); - - // main algorithm - t1 = h + s1 + ch + _k[i] + w[i]; - t2 = s0 + maj; - h = g; - g = f; - f = e; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - // can't truncate with `| 0` - e = (d + t1) >>> 0; - d = c; - c = b; - b = a; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - // can't truncate with `| 0` - a = (t1 + t2) >>> 0; - } - - // update hash state - s.h0 = (s.h0 + a) | 0; - s.h1 = (s.h1 + b) | 0; - s.h2 = (s.h2 + c) | 0; - s.h3 = (s.h3 + d) | 0; - s.h4 = (s.h4 + e) | 0; - s.h5 = (s.h5 + f) | 0; - s.h6 = (s.h6 + g) | 0; - s.h7 = (s.h7 + h) | 0; - len -= 64; +const EventEmitter = __nccwpck_require__(2361) + +class Dispatcher extends EventEmitter { + dispatch () { + throw new Error('not implemented') + } + + close () { + throw new Error('not implemented') + } + + destroy () { + throw new Error('not implemented') } } +module.exports = Dispatcher + /***/ }), -/***/ 9542: +/***/ 1472: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Secure Hash Algorithm with a 1024-bit block size implementation. - * - * This includes: SHA-512, SHA-384, SHA-512/224, and SHA-512/256. For - * SHA-256 (block size 512 bits), see sha256.js. - * - * See FIPS 180-4 for details. - * - * @author Dave Longley - * - * Copyright (c) 2014-2015 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(6231); -__nccwpck_require__(8339); +"use strict"; -var sha512 = module.exports = forge.sha512 = forge.sha512 || {}; -// SHA-512 -forge.md.sha512 = forge.md.algorithms.sha512 = sha512; +const Busboy = __nccwpck_require__(727) +const util = __nccwpck_require__(3983) +const { + ReadableStreamFrom, + isBlobLike, + isReadableStreamLike, + readableStreamClose, + createDeferredPromise, + fullyReadBody +} = __nccwpck_require__(2538) +const { FormData } = __nccwpck_require__(2015) +const { kState } = __nccwpck_require__(5861) +const { webidl } = __nccwpck_require__(1744) +const { DOMException, structuredClone } = __nccwpck_require__(1037) +const { Blob, File: NativeFile } = __nccwpck_require__(4300) +const { kBodyUsed } = __nccwpck_require__(2785) +const assert = __nccwpck_require__(9491) +const { isErrored } = __nccwpck_require__(3983) +const { isUint8Array, isArrayBuffer } = __nccwpck_require__(9830) +const { File: UndiciFile } = __nccwpck_require__(8511) +const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) + +let ReadableStream = globalThis.ReadableStream + +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile +const textEncoder = new TextEncoder() +const textDecoder = new TextDecoder() + +// https://fetch.spec.whatwg.org/#concept-bodyinit-extract +function extractBody (object, keepalive = false) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(5356).ReadableStream) + } + + // 1. Let stream be null. + let stream = null + + // 2. If object is a ReadableStream object, then set stream to object. + if (object instanceof ReadableStream) { + stream = object + } else if (isBlobLike(object)) { + // 3. Otherwise, if object is a Blob object, set stream to the + // result of running object’s get stream. + stream = object.stream() + } else { + // 4. Otherwise, set stream to a new ReadableStream object, and set + // up stream. + stream = new ReadableStream({ + async pull (controller) { + controller.enqueue( + typeof source === 'string' ? textEncoder.encode(source) : source + ) + queueMicrotask(() => readableStreamClose(controller)) + }, + start () {}, + type: undefined + }) + } -// SHA-384 -var sha384 = forge.sha384 = forge.sha512.sha384 = forge.sha512.sha384 || {}; -sha384.create = function() { - return sha512.create('SHA-384'); -}; -forge.md.sha384 = forge.md.algorithms.sha384 = sha384; + // 5. Assert: stream is a ReadableStream object. + assert(isReadableStreamLike(stream)) + + // 6. Let action be null. + let action = null + + // 7. Let source be null. + let source = null + + // 8. Let length be null. + let length = null + + // 9. Let type be null. + let type = null + + // 10. Switch on object: + if (typeof object === 'string') { + // Set source to the UTF-8 encoding of object. + // Note: setting source to a Uint8Array here breaks some mocking assumptions. + source = object + + // Set type to `text/plain;charset=UTF-8`. + type = 'text/plain;charset=UTF-8' + } else if (object instanceof URLSearchParams) { + // URLSearchParams + + // spec says to run application/x-www-form-urlencoded on body.list + // this is implemented in Node.js as apart of an URLSearchParams instance toString method + // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490 + // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100 + + // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list. + source = object.toString() + + // Set type to `application/x-www-form-urlencoded;charset=UTF-8`. + type = 'application/x-www-form-urlencoded;charset=UTF-8' + } else if (isArrayBuffer(object)) { + // BufferSource/ArrayBuffer + + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.slice()) + } else if (ArrayBuffer.isView(object)) { + // BufferSource/ArrayBufferView + + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) + } else if (util.isFormDataLike(object)) { + const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` + const prefix = `--${boundary}\r\nContent-Disposition: form-data` + + /*! formdata-polyfill. MIT License. Jimmy Wärting */ + const escape = (str) => + str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22') + const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n') + + // Set action to this step: run the multipart/form-data + // encoding algorithm, with object’s entry list and UTF-8. + // - This ensures that the body is immutable and can't be changed afterwords + // - That the content-length is calculated in advance. + // - And that all parts are pre-encoded and ready to be sent. + + const blobParts = [] + const rn = new Uint8Array([13, 10]) // '\r\n' + length = 0 + let hasUnknownSizeValue = false + + for (const [name, value] of object) { + if (typeof value === 'string') { + const chunk = textEncoder.encode(prefix + + `; name="${escape(normalizeLinefeeds(name))}"` + + `\r\n\r\n${normalizeLinefeeds(value)}\r\n`) + blobParts.push(chunk) + length += chunk.byteLength + } else { + const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` + + (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' + + `Content-Type: ${ + value.type || 'application/octet-stream' + }\r\n\r\n`) + blobParts.push(chunk, value, rn) + if (typeof value.size === 'number') { + length += chunk.byteLength + value.size + rn.byteLength + } else { + hasUnknownSizeValue = true + } + } + } -// SHA-512/256 -forge.sha512.sha256 = forge.sha512.sha256 || { - create: function() { - return sha512.create('SHA-512/256'); - } -}; -forge.md['sha512/256'] = forge.md.algorithms['sha512/256'] = - forge.sha512.sha256; + const chunk = textEncoder.encode(`--${boundary}--`) + blobParts.push(chunk) + length += chunk.byteLength + if (hasUnknownSizeValue) { + length = null + } -// SHA-512/224 -forge.sha512.sha224 = forge.sha512.sha224 || { - create: function() { - return sha512.create('SHA-512/224'); - } -}; -forge.md['sha512/224'] = forge.md.algorithms['sha512/224'] = - forge.sha512.sha224; + // Set source to object. + source = object -/** - * Creates a SHA-2 message digest object. - * - * @param algorithm the algorithm to use (SHA-512, SHA-384, SHA-512/224, - * SHA-512/256). - * - * @return a message digest object. - */ -sha512.create = function(algorithm) { - // do initialization as necessary - if(!_initialized) { - _init(); + action = async function * () { + for (const part of blobParts) { + if (part.stream) { + yield * part.stream() + } else { + yield part + } + } + } + + // Set type to `multipart/form-data; boundary=`, + // followed by the multipart/form-data boundary string generated + // by the multipart/form-data encoding algorithm. + type = 'multipart/form-data; boundary=' + boundary + } else if (isBlobLike(object)) { + // Blob + + // Set source to object. + source = object + + // Set length to object’s size. + length = object.size + + // If object’s type attribute is not the empty byte sequence, set + // type to its value. + if (object.type) { + type = object.type + } + } else if (typeof object[Symbol.asyncIterator] === 'function') { + // If keepalive is true, then throw a TypeError. + if (keepalive) { + throw new TypeError('keepalive') + } + + // If object is disturbed or locked, then throw a TypeError. + if (util.isDisturbed(object) || object.locked) { + throw new TypeError( + 'Response body object should not be disturbed or locked' + ) + } + + stream = + object instanceof ReadableStream ? object : ReadableStreamFrom(object) } - if(typeof algorithm === 'undefined') { - algorithm = 'SHA-512'; + // 11. If source is a byte sequence, then set action to a + // step that returns source and length to source’s length. + if (typeof source === 'string' || util.isBuffer(source)) { + length = Buffer.byteLength(source) } - if(!(algorithm in _states)) { - throw new Error('Invalid SHA-512 algorithm: ' + algorithm); + // 12. If action is non-null, then run these steps in in parallel: + if (action != null) { + // Run action. + let iterator + stream = new ReadableStream({ + async start () { + iterator = action(object)[Symbol.asyncIterator]() + }, + async pull (controller) { + const { value, done } = await iterator.next() + if (done) { + // When running action is done, close stream. + queueMicrotask(() => { + controller.close() + }) + } else { + // Whenever one or more bytes are available and stream is not errored, + // enqueue a Uint8Array wrapping an ArrayBuffer containing the available + // bytes into stream. + if (!isErrored(stream)) { + controller.enqueue(new Uint8Array(value)) + } + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + }, + type: undefined + }) } - // SHA-512 state contains eight 64-bit integers (each as two 32-bit ints) - var _state = _states[algorithm]; - var _h = null; + // 13. Let body be a body whose stream is stream, source is source, + // and length is length. + const body = { stream, source, length } - // input buffer - var _input = forge.util.createBuffer(); + // 14. Return (body, type). + return [body, type] +} - // used for 64-bit word storage - var _w = new Array(80); - for(var wi = 0; wi < 80; ++wi) { - _w[wi] = new Array(2); +// https://fetch.spec.whatwg.org/#bodyinit-safely-extract +function safelyExtractBody (object, keepalive = false) { + if (!ReadableStream) { + // istanbul ignore next + ReadableStream = (__nccwpck_require__(5356).ReadableStream) } - // determine digest length by algorithm name (default) - var digestLength = 64; - switch(algorithm) { - case 'SHA-384': - digestLength = 48; - break; - case 'SHA-512/256': - digestLength = 32; - break; - case 'SHA-512/224': - digestLength = 28; - break; + // To safely extract a body and a `Content-Type` value from + // a byte sequence or BodyInit object object, run these steps: + + // 1. If object is a ReadableStream object, then: + if (object instanceof ReadableStream) { + // Assert: object is neither disturbed nor locked. + // istanbul ignore next + assert(!util.isDisturbed(object), 'The body has already been consumed.') + // istanbul ignore next + assert(!object.locked, 'The stream is locked.') } - // message digest object - var md = { - // SHA-512 => sha512 - algorithm: algorithm.replace('-', '').toLowerCase(), - blockLength: 128, - digestLength: digestLength, - // 56-bit length of message so far (does not including padding) - messageLength: 0, - // true message length - fullMessageLength: null, - // size of message length in bytes - messageLengthSize: 16 - }; + // 2. Return the results of extracting object. + return extractBody(object, keepalive) +} - /** - * Starts the digest. - * - * @return this digest object. - */ - md.start = function() { - // up to 56-bit message length for convenience - md.messageLength = 0; - - // full message length (set md.messageLength128 for backwards-compatibility) - md.fullMessageLength = md.messageLength128 = []; - var int32s = md.messageLengthSize / 4; - for(var i = 0; i < int32s; ++i) { - md.fullMessageLength.push(0); - } - _input = forge.util.createBuffer(); - _h = new Array(_state.length); - for(var i = 0; i < _state.length; ++i) { - _h[i] = _state[i].slice(0); - } - return md; - }; - // start digest automatically for first time - md.start(); +function cloneBody (body) { + // To clone a body body, run these steps: - /** - * Updates the digest with the given message input. The given input can - * treated as raw input (no encoding will be applied) or an encoding of - * 'utf8' maybe given to encode the input using UTF-8. - * - * @param msg the message input to update with. - * @param encoding the encoding to use (default: 'raw', other: 'utf8'). - * - * @return this digest object. - */ - md.update = function(msg, encoding) { - if(encoding === 'utf8') { - msg = forge.util.encodeUtf8(msg); - } + // https://fetch.spec.whatwg.org/#concept-body-clone - // update message length - var len = msg.length; - md.messageLength += len; - len = [(len / 0x100000000) >>> 0, len >>> 0]; - for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { - md.fullMessageLength[i] += len[1]; - len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); - md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; - len[0] = ((len[1] / 0x100000000) >>> 0); - } + // 1. Let « out1, out2 » be the result of teeing body’s stream. + const [out1, out2] = body.stream.tee() + const out2Clone = structuredClone(out2, { transfer: [out2] }) + // This, for whatever reasons, unrefs out2Clone which allows + // the process to exit by itself. + const [, finalClone] = out2Clone.tee() - // add bytes to input buffer - _input.putBytes(msg); + // 2. Set body’s stream to out1. + body.stream = out1 - // process bytes - _update(_h, _w, _input); + // 3. Return a body whose stream is out2 and other members are copied from body. + return { + stream: finalClone, + length: body.length, + source: body.source + } +} - // compact input buffer every 2K or if empty - if(_input.read > 2048 || _input.length() === 0) { - _input.compact(); - } +async function * consumeBody (body) { + if (body) { + if (isUint8Array(body)) { + yield body + } else { + const stream = body.stream - return md; - }; + if (util.isDisturbed(stream)) { + throw new TypeError('The body has already been consumed.') + } - /** - * Produces the digest. - * - * @return a byte buffer containing the digest value. - */ - md.digest = function() { - /* Note: Here we copy the remaining bytes in the input buffer and - add the appropriate SHA-512 padding. Then we do the final update - on a copy of the state so that if the user wants to get - intermediate digests they can do so. */ - - /* Determine the number of bytes that must be added to the message - to ensure its length is congruent to 896 mod 1024. In other words, - the data to be digested must be a multiple of 1024 bits (or 128 bytes). - This data includes the message, some padding, and the length of the - message. Since the length of the message will be encoded as 16 bytes (128 - bits), that means that the last segment of the data must have 112 bytes - (896 bits) of message and padding. Therefore, the length of the message - plus the padding must be congruent to 896 mod 1024 because - 1024 - 128 = 896. - - In order to fill up the message length it must be filled with - padding that begins with 1 bit followed by all 0 bits. Padding - must *always* be present, so if the message length is already - congruent to 896 mod 1024, then 1024 padding bits must be added. */ - - var finalBlock = forge.util.createBuffer(); - finalBlock.putBytes(_input.bytes()); - - // compute remaining size to be digested (include message length size) - var remaining = ( - md.fullMessageLength[md.fullMessageLength.length - 1] + - md.messageLengthSize); - - // add padding for overflow blockSize - overflow - // _padding starts with 1 byte with first bit is set (byte value 128), then - // there may be up to (blockSize - 1) other pad bytes - var overflow = remaining & (md.blockLength - 1); - finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); - - // serialize message length in bits in big-endian order; since length - // is stored in bytes we multiply by 8 and add carry from next int - var next, carry; - var bits = md.fullMessageLength[0] * 8; - for(var i = 0; i < md.fullMessageLength.length - 1; ++i) { - next = md.fullMessageLength[i + 1] * 8; - carry = (next / 0x100000000) >>> 0; - bits += carry; - finalBlock.putInt32(bits >>> 0); - bits = next >>> 0; - } - finalBlock.putInt32(bits); - - var h = new Array(_h.length); - for(var i = 0; i < _h.length; ++i) { - h[i] = _h[i].slice(0); - } - _update(h, _w, finalBlock); - var rval = forge.util.createBuffer(); - var hlen; - if(algorithm === 'SHA-512') { - hlen = h.length; - } else if(algorithm === 'SHA-384') { - hlen = h.length - 2; - } else { - hlen = h.length - 4; - } - for(var i = 0; i < hlen; ++i) { - rval.putInt32(h[i][0]); - if(i !== hlen - 1 || algorithm !== 'SHA-512/224') { - rval.putInt32(h[i][1]); + if (stream.locked) { + throw new TypeError('The stream is locked.') } + + // Compat. + stream[kBodyUsed] = true + + yield * stream } - return rval; - }; + } +} - return md; -}; +function throwIfAborted (state) { + if (state.aborted) { + throw new DOMException('The operation was aborted.', 'AbortError') + } +} -// sha-512 padding bytes not initialized yet -var _padding = null; -var _initialized = false; +function bodyMixinMethods (instance) { + const methods = { + blob () { + // The blob() method steps are to return the result of + // running consume body with this and the following step + // given a byte sequence bytes: return a Blob whose + // contents are bytes and whose type attribute is this’s + // MIME type. + return specConsumeBody(this, (bytes) => { + let mimeType = bodyMimeType(this) -// table of constants -var _k = null; + if (mimeType === 'failure') { + mimeType = '' + } else if (mimeType) { + mimeType = serializeAMimeType(mimeType) + } -// initial hash states -var _states = null; + // Return a Blob whose contents are bytes and type attribute + // is mimeType. + return new Blob([bytes], { type: mimeType }) + }, instance) + }, -/** - * Initializes the constant tables. - */ -function _init() { - // create padding - _padding = String.fromCharCode(128); - _padding += forge.util.fillString(String.fromCharCode(0x00), 128); - - // create K table for SHA-512 - _k = [ - [0x428a2f98, 0xd728ae22], [0x71374491, 0x23ef65cd], - [0xb5c0fbcf, 0xec4d3b2f], [0xe9b5dba5, 0x8189dbbc], - [0x3956c25b, 0xf348b538], [0x59f111f1, 0xb605d019], - [0x923f82a4, 0xaf194f9b], [0xab1c5ed5, 0xda6d8118], - [0xd807aa98, 0xa3030242], [0x12835b01, 0x45706fbe], - [0x243185be, 0x4ee4b28c], [0x550c7dc3, 0xd5ffb4e2], - [0x72be5d74, 0xf27b896f], [0x80deb1fe, 0x3b1696b1], - [0x9bdc06a7, 0x25c71235], [0xc19bf174, 0xcf692694], - [0xe49b69c1, 0x9ef14ad2], [0xefbe4786, 0x384f25e3], - [0x0fc19dc6, 0x8b8cd5b5], [0x240ca1cc, 0x77ac9c65], - [0x2de92c6f, 0x592b0275], [0x4a7484aa, 0x6ea6e483], - [0x5cb0a9dc, 0xbd41fbd4], [0x76f988da, 0x831153b5], - [0x983e5152, 0xee66dfab], [0xa831c66d, 0x2db43210], - [0xb00327c8, 0x98fb213f], [0xbf597fc7, 0xbeef0ee4], - [0xc6e00bf3, 0x3da88fc2], [0xd5a79147, 0x930aa725], - [0x06ca6351, 0xe003826f], [0x14292967, 0x0a0e6e70], - [0x27b70a85, 0x46d22ffc], [0x2e1b2138, 0x5c26c926], - [0x4d2c6dfc, 0x5ac42aed], [0x53380d13, 0x9d95b3df], - [0x650a7354, 0x8baf63de], [0x766a0abb, 0x3c77b2a8], - [0x81c2c92e, 0x47edaee6], [0x92722c85, 0x1482353b], - [0xa2bfe8a1, 0x4cf10364], [0xa81a664b, 0xbc423001], - [0xc24b8b70, 0xd0f89791], [0xc76c51a3, 0x0654be30], - [0xd192e819, 0xd6ef5218], [0xd6990624, 0x5565a910], - [0xf40e3585, 0x5771202a], [0x106aa070, 0x32bbd1b8], - [0x19a4c116, 0xb8d2d0c8], [0x1e376c08, 0x5141ab53], - [0x2748774c, 0xdf8eeb99], [0x34b0bcb5, 0xe19b48a8], - [0x391c0cb3, 0xc5c95a63], [0x4ed8aa4a, 0xe3418acb], - [0x5b9cca4f, 0x7763e373], [0x682e6ff3, 0xd6b2b8a3], - [0x748f82ee, 0x5defb2fc], [0x78a5636f, 0x43172f60], - [0x84c87814, 0xa1f0ab72], [0x8cc70208, 0x1a6439ec], - [0x90befffa, 0x23631e28], [0xa4506ceb, 0xde82bde9], - [0xbef9a3f7, 0xb2c67915], [0xc67178f2, 0xe372532b], - [0xca273ece, 0xea26619c], [0xd186b8c7, 0x21c0c207], - [0xeada7dd6, 0xcde0eb1e], [0xf57d4f7f, 0xee6ed178], - [0x06f067aa, 0x72176fba], [0x0a637dc5, 0xa2c898a6], - [0x113f9804, 0xbef90dae], [0x1b710b35, 0x131c471b], - [0x28db77f5, 0x23047d84], [0x32caab7b, 0x40c72493], - [0x3c9ebe0a, 0x15c9bebc], [0x431d67c4, 0x9c100d4c], - [0x4cc5d4be, 0xcb3e42b6], [0x597f299c, 0xfc657e2a], - [0x5fcb6fab, 0x3ad6faec], [0x6c44198c, 0x4a475817] - ]; - - // initial hash states - _states = {}; - _states['SHA-512'] = [ - [0x6a09e667, 0xf3bcc908], - [0xbb67ae85, 0x84caa73b], - [0x3c6ef372, 0xfe94f82b], - [0xa54ff53a, 0x5f1d36f1], - [0x510e527f, 0xade682d1], - [0x9b05688c, 0x2b3e6c1f], - [0x1f83d9ab, 0xfb41bd6b], - [0x5be0cd19, 0x137e2179] - ]; - _states['SHA-384'] = [ - [0xcbbb9d5d, 0xc1059ed8], - [0x629a292a, 0x367cd507], - [0x9159015a, 0x3070dd17], - [0x152fecd8, 0xf70e5939], - [0x67332667, 0xffc00b31], - [0x8eb44a87, 0x68581511], - [0xdb0c2e0d, 0x64f98fa7], - [0x47b5481d, 0xbefa4fa4] - ]; - _states['SHA-512/256'] = [ - [0x22312194, 0xFC2BF72C], - [0x9F555FA3, 0xC84C64C2], - [0x2393B86B, 0x6F53B151], - [0x96387719, 0x5940EABD], - [0x96283EE2, 0xA88EFFE3], - [0xBE5E1E25, 0x53863992], - [0x2B0199FC, 0x2C85B8AA], - [0x0EB72DDC, 0x81C52CA2] - ]; - _states['SHA-512/224'] = [ - [0x8C3D37C8, 0x19544DA2], - [0x73E19966, 0x89DCD4D6], - [0x1DFAB7AE, 0x32FF9C82], - [0x679DD514, 0x582F9FCF], - [0x0F6D2B69, 0x7BD44DA8], - [0x77E36F73, 0x04C48942], - [0x3F9D85A8, 0x6A1D36C8], - [0x1112E6AD, 0x91D692A1] - ]; - - // now initialized - _initialized = true; -} + arrayBuffer () { + // The arrayBuffer() method steps are to return the result + // of running consume body with this and the following step + // given a byte sequence bytes: return a new ArrayBuffer + // whose contents are bytes. + return specConsumeBody(this, (bytes) => { + return new Uint8Array(bytes).buffer + }, instance) + }, -/** - * Updates a SHA-512 state with the given byte buffer. - * - * @param s the SHA-512 state to update. - * @param w the array to use to store words. - * @param bytes the byte buffer to update with. - */ -function _update(s, w, bytes) { - // consume 512 bit (128 byte) chunks - var t1_hi, t1_lo; - var t2_hi, t2_lo; - var s0_hi, s0_lo; - var s1_hi, s1_lo; - var ch_hi, ch_lo; - var maj_hi, maj_lo; - var a_hi, a_lo; - var b_hi, b_lo; - var c_hi, c_lo; - var d_hi, d_lo; - var e_hi, e_lo; - var f_hi, f_lo; - var g_hi, g_lo; - var h_hi, h_lo; - var i, hi, lo, w2, w7, w15, w16; - var len = bytes.length(); - while(len >= 128) { - // the w array will be populated with sixteen 64-bit big-endian words - // and then extended into 64 64-bit words according to SHA-512 - for(i = 0; i < 16; ++i) { - w[i][0] = bytes.getInt32() >>> 0; - w[i][1] = bytes.getInt32() >>> 0; - } - for(; i < 80; ++i) { - // for word 2 words ago: ROTR 19(x) ^ ROTR 61(x) ^ SHR 6(x) - w2 = w[i - 2]; - hi = w2[0]; - lo = w2[1]; - - // high bits - t1_hi = ( - ((hi >>> 19) | (lo << 13)) ^ // ROTR 19 - ((lo >>> 29) | (hi << 3)) ^ // ROTR 61/(swap + ROTR 29) - (hi >>> 6)) >>> 0; // SHR 6 - // low bits - t1_lo = ( - ((hi << 13) | (lo >>> 19)) ^ // ROTR 19 - ((lo << 3) | (hi >>> 29)) ^ // ROTR 61/(swap + ROTR 29) - ((hi << 26) | (lo >>> 6))) >>> 0; // SHR 6 - - // for word 15 words ago: ROTR 1(x) ^ ROTR 8(x) ^ SHR 7(x) - w15 = w[i - 15]; - hi = w15[0]; - lo = w15[1]; - - // high bits - t2_hi = ( - ((hi >>> 1) | (lo << 31)) ^ // ROTR 1 - ((hi >>> 8) | (lo << 24)) ^ // ROTR 8 - (hi >>> 7)) >>> 0; // SHR 7 - // low bits - t2_lo = ( - ((hi << 31) | (lo >>> 1)) ^ // ROTR 1 - ((hi << 24) | (lo >>> 8)) ^ // ROTR 8 - ((hi << 25) | (lo >>> 7))) >>> 0; // SHR 7 - - // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^64 (carry lo overflow) - w7 = w[i - 7]; - w16 = w[i - 16]; - lo = (t1_lo + w7[1] + t2_lo + w16[1]); - w[i][0] = (t1_hi + w7[0] + t2_hi + w16[0] + - ((lo / 0x100000000) >>> 0)) >>> 0; - w[i][1] = lo >>> 0; - } - - // initialize hash value for this chunk - a_hi = s[0][0]; - a_lo = s[0][1]; - b_hi = s[1][0]; - b_lo = s[1][1]; - c_hi = s[2][0]; - c_lo = s[2][1]; - d_hi = s[3][0]; - d_lo = s[3][1]; - e_hi = s[4][0]; - e_lo = s[4][1]; - f_hi = s[5][0]; - f_lo = s[5][1]; - g_hi = s[6][0]; - g_lo = s[6][1]; - h_hi = s[7][0]; - h_lo = s[7][1]; - - // round function - for(i = 0; i < 80; ++i) { - // Sum1(e) = ROTR 14(e) ^ ROTR 18(e) ^ ROTR 41(e) - s1_hi = ( - ((e_hi >>> 14) | (e_lo << 18)) ^ // ROTR 14 - ((e_hi >>> 18) | (e_lo << 14)) ^ // ROTR 18 - ((e_lo >>> 9) | (e_hi << 23))) >>> 0; // ROTR 41/(swap + ROTR 9) - s1_lo = ( - ((e_hi << 18) | (e_lo >>> 14)) ^ // ROTR 14 - ((e_hi << 14) | (e_lo >>> 18)) ^ // ROTR 18 - ((e_lo << 23) | (e_hi >>> 9))) >>> 0; // ROTR 41/(swap + ROTR 9) - - // Ch(e, f, g) (optimized the same way as SHA-1) - ch_hi = (g_hi ^ (e_hi & (f_hi ^ g_hi))) >>> 0; - ch_lo = (g_lo ^ (e_lo & (f_lo ^ g_lo))) >>> 0; - - // Sum0(a) = ROTR 28(a) ^ ROTR 34(a) ^ ROTR 39(a) - s0_hi = ( - ((a_hi >>> 28) | (a_lo << 4)) ^ // ROTR 28 - ((a_lo >>> 2) | (a_hi << 30)) ^ // ROTR 34/(swap + ROTR 2) - ((a_lo >>> 7) | (a_hi << 25))) >>> 0; // ROTR 39/(swap + ROTR 7) - s0_lo = ( - ((a_hi << 4) | (a_lo >>> 28)) ^ // ROTR 28 - ((a_lo << 30) | (a_hi >>> 2)) ^ // ROTR 34/(swap + ROTR 2) - ((a_lo << 25) | (a_hi >>> 7))) >>> 0; // ROTR 39/(swap + ROTR 7) - - // Maj(a, b, c) (optimized the same way as SHA-1) - maj_hi = ((a_hi & b_hi) | (c_hi & (a_hi ^ b_hi))) >>> 0; - maj_lo = ((a_lo & b_lo) | (c_lo & (a_lo ^ b_lo))) >>> 0; - - // main algorithm - // t1 = (h + s1 + ch + _k[i] + _w[i]) modulo 2^64 (carry lo overflow) - lo = (h_lo + s1_lo + ch_lo + _k[i][1] + w[i][1]); - t1_hi = (h_hi + s1_hi + ch_hi + _k[i][0] + w[i][0] + - ((lo / 0x100000000) >>> 0)) >>> 0; - t1_lo = lo >>> 0; - - // t2 = s0 + maj modulo 2^64 (carry lo overflow) - lo = s0_lo + maj_lo; - t2_hi = (s0_hi + maj_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - t2_lo = lo >>> 0; - - h_hi = g_hi; - h_lo = g_lo; - - g_hi = f_hi; - g_lo = f_lo; - - f_hi = e_hi; - f_lo = e_lo; - - // e = (d + t1) modulo 2^64 (carry lo overflow) - lo = d_lo + t1_lo; - e_hi = (d_hi + t1_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - e_lo = lo >>> 0; - - d_hi = c_hi; - d_lo = c_lo; - - c_hi = b_hi; - c_lo = b_lo; - - b_hi = a_hi; - b_lo = a_lo; - - // a = (t1 + t2) modulo 2^64 (carry lo overflow) - lo = t1_lo + t2_lo; - a_hi = (t1_hi + t2_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - a_lo = lo >>> 0; - } - - // update hash state (additional modulo 2^64) - lo = s[0][1] + a_lo; - s[0][0] = (s[0][0] + a_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[0][1] = lo >>> 0; - - lo = s[1][1] + b_lo; - s[1][0] = (s[1][0] + b_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[1][1] = lo >>> 0; - - lo = s[2][1] + c_lo; - s[2][0] = (s[2][0] + c_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[2][1] = lo >>> 0; - - lo = s[3][1] + d_lo; - s[3][0] = (s[3][0] + d_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[3][1] = lo >>> 0; - - lo = s[4][1] + e_lo; - s[4][0] = (s[4][0] + e_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[4][1] = lo >>> 0; - - lo = s[5][1] + f_lo; - s[5][0] = (s[5][0] + f_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[5][1] = lo >>> 0; + text () { + // The text() method steps are to return the result of running + // consume body with this and UTF-8 decode. + return specConsumeBody(this, utf8DecodeBytes, instance) + }, + + json () { + // The json() method steps are to return the result of running + // consume body with this and parse JSON from bytes. + return specConsumeBody(this, parseJSONFromBytes, instance) + }, + + async formData () { + webidl.brandCheck(this, instance) + + throwIfAborted(this[kState]) + + const contentType = this.headers.get('Content-Type') + + // If mimeType’s essence is "multipart/form-data", then: + if (/multipart\/form-data/.test(contentType)) { + const headers = {} + for (const [key, value] of this.headers) headers[key.toLowerCase()] = value + + const responseFormData = new FormData() + + let busboy + + try { + busboy = new Busboy({ + headers, + preservePath: true + }) + } catch (err) { + throw new DOMException(`${err}`, 'AbortError') + } + + busboy.on('field', (name, value) => { + responseFormData.append(name, value) + }) + busboy.on('file', (name, value, filename, encoding, mimeType) => { + const chunks = [] + + if (encoding === 'base64' || encoding.toLowerCase() === 'base64') { + let base64chunk = '' + + value.on('data', (chunk) => { + base64chunk += chunk.toString().replace(/[\r\n]/gm, '') + + const end = base64chunk.length - base64chunk.length % 4 + chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64')) + + base64chunk = base64chunk.slice(end) + }) + value.on('end', () => { + chunks.push(Buffer.from(base64chunk, 'base64')) + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } else { + value.on('data', (chunk) => { + chunks.push(chunk) + }) + value.on('end', () => { + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } + }) + + const busboyResolve = new Promise((resolve, reject) => { + busboy.on('finish', resolve) + busboy.on('error', (err) => reject(new TypeError(err))) + }) + + if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk) + busboy.end() + await busboyResolve + + return responseFormData + } else if (/application\/x-www-form-urlencoded/.test(contentType)) { + // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then: + + // 1. Let entries be the result of parsing bytes. + let entries + try { + let text = '' + // application/x-www-form-urlencoded parser will keep the BOM. + // https://url.spec.whatwg.org/#concept-urlencoded-parser + // Note that streaming decoder is stateful and cannot be reused + const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true }) + + for await (const chunk of consumeBody(this[kState].body)) { + if (!isUint8Array(chunk)) { + throw new TypeError('Expected Uint8Array chunk') + } + text += streamingDecoder.decode(chunk, { stream: true }) + } + text += streamingDecoder.decode() + entries = new URLSearchParams(text) + } catch (err) { + // istanbul ignore next: Unclear when new URLSearchParams can fail on a string. + // 2. If entries is failure, then throw a TypeError. + throw Object.assign(new TypeError(), { cause: err }) + } - lo = s[6][1] + g_lo; - s[6][0] = (s[6][0] + g_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[6][1] = lo >>> 0; + // 3. Return a new FormData object whose entries are entries. + const formData = new FormData() + for (const [name, value] of entries) { + formData.append(name, value) + } + return formData + } else { + // Wait a tick before checking if the request has been aborted. + // Otherwise, a TypeError can be thrown when an AbortError should. + await Promise.resolve() - lo = s[7][1] + h_lo; - s[7][0] = (s[7][0] + h_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[7][1] = lo >>> 0; + throwIfAborted(this[kState]) - len -= 128; + // Otherwise, throw a TypeError. + throw webidl.errors.exception({ + header: `${instance.name}.formData`, + message: 'Could not parse content as FormData.' + }) + } + } } -} - -/***/ }), - -/***/ 4280: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/** - * Functions to output keys in SSH-friendly formats. - * - * This is part of the Forge project which may be used under the terms of - * either the BSD License or the GNU General Public License (GPL) Version 2. - * - * See: https://github.com/digitalbazaar/forge/blob/cbebca3780658703d925b61b2caffb1d263a6c1d/LICENSE - * - * @author https://github.com/shellac - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(5104); -__nccwpck_require__(6594); -__nccwpck_require__(279); -__nccwpck_require__(8339); + return methods +} -var ssh = module.exports = forge.ssh = forge.ssh || {}; +function mixinBody (prototype) { + Object.assign(prototype.prototype, bodyMixinMethods(prototype)) +} /** - * Encodes (and optionally encrypts) a private RSA key as a Putty PPK file. - * - * @param privateKey the key. - * @param passphrase a passphrase to protect the key (falsy for no encryption). - * @param comment a comment to include in the key file. - * - * @return the PPK file as a string. + * @see https://fetch.spec.whatwg.org/#concept-body-consume-body + * @param {Response|Request} object + * @param {(value: unknown) => unknown} convertBytesToJSValue + * @param {Response|Request} instance */ -ssh.privateKeyToPutty = function(privateKey, passphrase, comment) { - comment = comment || ''; - passphrase = passphrase || ''; - var algorithm = 'ssh-rsa'; - var encryptionAlgorithm = (passphrase === '') ? 'none' : 'aes256-cbc'; - - var ppk = 'PuTTY-User-Key-File-2: ' + algorithm + '\r\n'; - ppk += 'Encryption: ' + encryptionAlgorithm + '\r\n'; - ppk += 'Comment: ' + comment + '\r\n'; - - // public key into buffer for ppk - var pubbuffer = forge.util.createBuffer(); - _addStringToBuffer(pubbuffer, algorithm); - _addBigIntegerToBuffer(pubbuffer, privateKey.e); - _addBigIntegerToBuffer(pubbuffer, privateKey.n); - - // write public key - var pub = forge.util.encode64(pubbuffer.bytes(), 64); - var length = Math.floor(pub.length / 66) + 1; // 66 = 64 + \r\n - ppk += 'Public-Lines: ' + length + '\r\n'; - ppk += pub; - - // private key into a buffer - var privbuffer = forge.util.createBuffer(); - _addBigIntegerToBuffer(privbuffer, privateKey.d); - _addBigIntegerToBuffer(privbuffer, privateKey.p); - _addBigIntegerToBuffer(privbuffer, privateKey.q); - _addBigIntegerToBuffer(privbuffer, privateKey.qInv); - - // optionally encrypt the private key - var priv; - if(!passphrase) { - // use the unencrypted buffer - priv = forge.util.encode64(privbuffer.bytes(), 64); - } else { - // encrypt RSA key using passphrase - var encLen = privbuffer.length() + 16 - 1; - encLen -= encLen % 16; +async function specConsumeBody (object, convertBytesToJSValue, instance) { + webidl.brandCheck(object, instance) - // pad private key with sha1-d data -- needs to be a multiple of 16 - var padding = _sha1(privbuffer.bytes()); + throwIfAborted(object[kState]) - padding.truncate(padding.length() - encLen + privbuffer.length()); - privbuffer.putBuffer(padding); - - var aeskey = forge.util.createBuffer(); - aeskey.putBuffer(_sha1('\x00\x00\x00\x00', passphrase)); - aeskey.putBuffer(_sha1('\x00\x00\x00\x01', passphrase)); + // 1. If object is unusable, then return a promise rejected + // with a TypeError. + if (bodyUnusable(object[kState].body)) { + throw new TypeError('Body is unusable') + } - // encrypt some bytes using CBC mode - // key is 40 bytes, so truncate *by* 8 bytes - var cipher = forge.aes.createEncryptionCipher(aeskey.truncate(8), 'CBC'); - cipher.start(forge.util.createBuffer().fillWithByte(0, 16)); - cipher.update(privbuffer.copy()); - cipher.finish(); - var encrypted = cipher.output; + // 2. Let promise be a new promise. + const promise = createDeferredPromise() - // Note: this appears to differ from Putty -- is forge wrong, or putty? - // due to padding we finish as an exact multiple of 16 - encrypted.truncate(16); // all padding + // 3. Let errorSteps given error be to reject promise with error. + const errorSteps = (error) => promise.reject(error) - priv = forge.util.encode64(encrypted.bytes(), 64); + // 4. Let successSteps given a byte sequence data be to resolve + // promise with the result of running convertBytesToJSValue + // with data. If that threw an exception, then run errorSteps + // with that exception. + const successSteps = (data) => { + try { + promise.resolve(convertBytesToJSValue(data)) + } catch (e) { + errorSteps(e) + } } - // output private key - length = Math.floor(priv.length / 66) + 1; // 64 + \r\n - ppk += '\r\nPrivate-Lines: ' + length + '\r\n'; - ppk += priv; - - // MAC - var mackey = _sha1('putty-private-key-file-mac-key', passphrase); - - var macbuffer = forge.util.createBuffer(); - _addStringToBuffer(macbuffer, algorithm); - _addStringToBuffer(macbuffer, encryptionAlgorithm); - _addStringToBuffer(macbuffer, comment); - macbuffer.putInt32(pubbuffer.length()); - macbuffer.putBuffer(pubbuffer); - macbuffer.putInt32(privbuffer.length()); - macbuffer.putBuffer(privbuffer); + // 5. If object’s body is null, then run successSteps with an + // empty byte sequence. + if (object[kState].body == null) { + successSteps(new Uint8Array()) + return promise.promise + } - var hmac = forge.hmac.create(); - hmac.start('sha1', mackey); - hmac.update(macbuffer.bytes()); + // 6. Otherwise, fully read object’s body given successSteps, + // errorSteps, and object’s relevant global object. + await fullyReadBody(object[kState].body, successSteps, errorSteps) - ppk += '\r\nPrivate-MAC: ' + hmac.digest().toHex() + '\r\n'; + // 7. Return promise. + return promise.promise +} - return ppk; -}; +// https://fetch.spec.whatwg.org/#body-unusable +function bodyUnusable (body) { + // An object including the Body interface mixin is + // said to be unusable if its body is non-null and + // its body’s stream is disturbed or locked. + return body != null && (body.stream.locked || util.isDisturbed(body.stream)) +} /** - * Encodes a public RSA key as an OpenSSH file. - * - * @param key the key. - * @param comment a comment. - * - * @return the public key in OpenSSH format. + * @see https://encoding.spec.whatwg.org/#utf-8-decode + * @param {Buffer} buffer */ -ssh.publicKeyToOpenSSH = function(key, comment) { - var type = 'ssh-rsa'; - comment = comment || ''; - - var buffer = forge.util.createBuffer(); - _addStringToBuffer(buffer, type); - _addBigIntegerToBuffer(buffer, key.e); - _addBigIntegerToBuffer(buffer, key.n); +function utf8DecodeBytes (buffer) { + if (buffer.length === 0) { + return '' + } - return type + ' ' + forge.util.encode64(buffer.bytes()) + ' ' + comment; -}; + // 1. Let buffer be the result of peeking three bytes from + // ioQueue, converted to a byte sequence. -/** - * Encodes a private RSA key as an OpenSSH file. - * - * @param key the key. - * @param passphrase a passphrase to protect the key (falsy for no encryption). - * - * @return the public key in OpenSSH format. - */ -ssh.privateKeyToOpenSSH = function(privateKey, passphrase) { - if(!passphrase) { - return forge.pki.privateKeyToPem(privateKey); + // 2. If buffer is 0xEF 0xBB 0xBF, then read three + // bytes from ioQueue. (Do nothing with those bytes.) + if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) { + buffer = buffer.subarray(3) } - // OpenSSH private key is just a legacy format, it seems - return forge.pki.encryptRsaPrivateKey(privateKey, passphrase, - {legacy: true, algorithm: 'aes128'}); -}; -/** - * Gets the SSH fingerprint for the given public key. - * - * @param options the options to use. - * [md] the message digest object to use (defaults to forge.md.md5). - * [encoding] an alternative output encoding, such as 'hex' - * (defaults to none, outputs a byte buffer). - * [delimiter] the delimiter to use between bytes for 'hex' encoded - * output, eg: ':' (defaults to none). - * - * @return the fingerprint as a byte buffer or other encoding based on options. - */ -ssh.getPublicKeyFingerprint = function(key, options) { - options = options || {}; - var md = options.md || forge.md.md5.create(); - - var type = 'ssh-rsa'; - var buffer = forge.util.createBuffer(); - _addStringToBuffer(buffer, type); - _addBigIntegerToBuffer(buffer, key.e); - _addBigIntegerToBuffer(buffer, key.n); - - // hash public key bytes - md.start(); - md.update(buffer.getBytes()); - var digest = md.digest(); - if(options.encoding === 'hex') { - var hex = digest.toHex(); - if(options.delimiter) { - return hex.match(/.{2}/g).join(options.delimiter); - } - return hex; - } else if(options.encoding === 'binary') { - return digest.getBytes(); - } else if(options.encoding) { - throw new Error('Unknown encoding "' + options.encoding + '".'); - } - return digest; -}; + // 3. Process a queue with an instance of UTF-8’s + // decoder, ioQueue, output, and "replacement". + const output = textDecoder.decode(buffer) -/** - * Adds len(val) then val to a buffer. - * - * @param buffer the buffer to add to. - * @param val a big integer. - */ -function _addBigIntegerToBuffer(buffer, val) { - var hexVal = val.toString(16); - // ensure 2s complement +ve - if(hexVal[0] >= '8') { - hexVal = '00' + hexVal; - } - var bytes = forge.util.hexToBytes(hexVal); - buffer.putInt32(bytes.length); - buffer.putBytes(bytes); + // 4. Return output. + return output } /** - * Adds len(val) then val to a buffer. - * - * @param buffer the buffer to add to. - * @param val a string. + * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value + * @param {Uint8Array} bytes */ -function _addStringToBuffer(buffer, val) { - buffer.putInt32(val.length); - buffer.putString(val); +function parseJSONFromBytes (bytes) { + return JSON.parse(utf8DecodeBytes(bytes)) } /** - * Hashes the arguments into one value using SHA-1. - * - * @return the sha1 hash of the provided arguments. + * @see https://fetch.spec.whatwg.org/#concept-body-mime-type + * @param {import('./response').Response|import('./request').Request} object */ -function _sha1() { - var sha = forge.md.sha1.create(); - var num = arguments.length; - for (var i = 0; i < num; ++i) { - sha.update(arguments[i]); +function bodyMimeType (object) { + const { headersList } = object[kState] + const contentType = headersList.get('content-type') + + if (contentType === null) { + return 'failure' } - return sha.digest(); + + return parseMIMEType(contentType) +} + +module.exports = { + extractBody, + safelyExtractBody, + cloneBody, + mixinBody } /***/ }), -/***/ 9167: +/***/ 1037: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * A Javascript implementation of Transport Layer Security (TLS). - * - * @author Dave Longley - * - * Copyright (c) 2009-2014 Digital Bazaar, Inc. - * - * The TLS Handshake Protocol involves the following steps: - * - * - Exchange hello messages to agree on algorithms, exchange random values, - * and check for session resumption. - * - * - Exchange the necessary cryptographic parameters to allow the client and - * server to agree on a premaster secret. - * - * - Exchange certificates and cryptographic information to allow the client - * and server to authenticate themselves. - * - * - Generate a master secret from the premaster secret and exchanged random - * values. - * - * - Provide security parameters to the record layer. - * - * - Allow the client and server to verify that their peer has calculated the - * same security parameters and that the handshake occurred without tampering - * by an attacker. - * - * Up to 4 different messages may be sent during a key exchange. The server - * certificate, the server key exchange, the client certificate, and the - * client key exchange. - * - * A typical handshake (from the client's perspective). - * - * 1. Client sends ClientHello. - * 2. Client receives ServerHello. - * 3. Client receives optional Certificate. - * 4. Client receives optional ServerKeyExchange. - * 5. Client receives ServerHelloDone. - * 6. Client sends optional Certificate. - * 7. Client sends ClientKeyExchange. - * 8. Client sends optional CertificateVerify. - * 9. Client sends ChangeCipherSpec. - * 10. Client sends Finished. - * 11. Client receives ChangeCipherSpec. - * 12. Client receives Finished. - * 13. Client sends/receives application data. - * - * To reuse an existing session: - * - * 1. Client sends ClientHello with session ID for reuse. - * 2. Client receives ServerHello with same session ID if reusing. - * 3. Client receives ChangeCipherSpec message if reusing. - * 4. Client receives Finished. - * 5. Client sends ChangeCipherSpec. - * 6. Client sends Finished. - * - * Note: Client ignores HelloRequest if in the middle of a handshake. - * - * Record Layer: - * - * The record layer fragments information blocks into TLSPlaintext records - * carrying data in chunks of 2^14 bytes or less. Client message boundaries are - * not preserved in the record layer (i.e., multiple client messages of the - * same ContentType MAY be coalesced into a single TLSPlaintext record, or a - * single message MAY be fragmented across several records). - * - * struct { - * uint8 major; - * uint8 minor; - * } ProtocolVersion; - * - * struct { - * ContentType type; - * ProtocolVersion version; - * uint16 length; - * opaque fragment[TLSPlaintext.length]; - * } TLSPlaintext; - * - * type: - * The higher-level protocol used to process the enclosed fragment. - * - * version: - * The version of the protocol being employed. TLS Version 1.2 uses version - * {3, 3}. TLS Version 1.0 uses version {3, 1}. Note that a client that - * supports multiple versions of TLS may not know what version will be - * employed before it receives the ServerHello. - * - * length: - * The length (in bytes) of the following TLSPlaintext.fragment. The length - * MUST NOT exceed 2^14 = 16384 bytes. - * - * fragment: - * The application data. This data is transparent and treated as an - * independent block to be dealt with by the higher-level protocol specified - * by the type field. - * - * Implementations MUST NOT send zero-length fragments of Handshake, Alert, or - * ChangeCipherSpec content types. Zero-length fragments of Application data - * MAY be sent as they are potentially useful as a traffic analysis - * countermeasure. - * - * Note: Data of different TLS record layer content types MAY be interleaved. - * Application data is generally of lower precedence for transmission than - * other content types. However, records MUST be delivered to the network in - * the same order as they are protected by the record layer. Recipients MUST - * receive and process interleaved application layer traffic during handshakes - * subsequent to the first one on a connection. - * - * struct { - * ContentType type; // same as TLSPlaintext.type - * ProtocolVersion version;// same as TLSPlaintext.version - * uint16 length; - * opaque fragment[TLSCompressed.length]; - * } TLSCompressed; - * - * length: - * The length (in bytes) of the following TLSCompressed.fragment. - * The length MUST NOT exceed 2^14 + 1024. - * - * fragment: - * The compressed form of TLSPlaintext.fragment. - * - * Note: A CompressionMethod.null operation is an identity operation; no fields - * are altered. In this implementation, since no compression is supported, - * uncompressed records are always the same as compressed records. - * - * Encryption Information: - * - * The encryption and MAC functions translate a TLSCompressed structure into a - * TLSCiphertext. The decryption functions reverse the process. The MAC of the - * record also includes a sequence number so that missing, extra, or repeated - * messages are detectable. - * - * struct { - * ContentType type; - * ProtocolVersion version; - * uint16 length; - * select (SecurityParameters.cipher_type) { - * case stream: GenericStreamCipher; - * case block: GenericBlockCipher; - * case aead: GenericAEADCipher; - * } fragment; - * } TLSCiphertext; - * - * type: - * The type field is identical to TLSCompressed.type. - * - * version: - * The version field is identical to TLSCompressed.version. - * - * length: - * The length (in bytes) of the following TLSCiphertext.fragment. - * The length MUST NOT exceed 2^14 + 2048. - * - * fragment: - * The encrypted form of TLSCompressed.fragment, with the MAC. - * - * Note: Only CBC Block Ciphers are supported by this implementation. - * - * The TLSCompressed.fragment structures are converted to/from block - * TLSCiphertext.fragment structures. - * - * struct { - * opaque IV[SecurityParameters.record_iv_length]; - * block-ciphered struct { - * opaque content[TLSCompressed.length]; - * opaque MAC[SecurityParameters.mac_length]; - * uint8 padding[GenericBlockCipher.padding_length]; - * uint8 padding_length; - * }; - * } GenericBlockCipher; - * - * The MAC is generated as described in Section 6.2.3.1. - * - * IV: - * The Initialization Vector (IV) SHOULD be chosen at random, and MUST be - * unpredictable. Note that in versions of TLS prior to 1.1, there was no - * IV field, and the last ciphertext block of the previous record (the "CBC - * residue") was used as the IV. This was changed to prevent the attacks - * described in [CBCATT]. For block ciphers, the IV length is of length - * SecurityParameters.record_iv_length, which is equal to the - * SecurityParameters.block_size. - * - * padding: - * Padding that is added to force the length of the plaintext to be an - * integral multiple of the block cipher's block length. The padding MAY be - * any length up to 255 bytes, as long as it results in the - * TLSCiphertext.length being an integral multiple of the block length. - * Lengths longer than necessary might be desirable to frustrate attacks on - * a protocol that are based on analysis of the lengths of exchanged - * messages. Each uint8 in the padding data vector MUST be filled with the - * padding length value. The receiver MUST check this padding and MUST use - * the bad_record_mac alert to indicate padding errors. - * - * padding_length: - * The padding length MUST be such that the total size of the - * GenericBlockCipher structure is a multiple of the cipher's block length. - * Legal values range from zero to 255, inclusive. This length specifies the - * length of the padding field exclusive of the padding_length field itself. - * - * The encrypted data length (TLSCiphertext.length) is one more than the sum of - * SecurityParameters.block_length, TLSCompressed.length, - * SecurityParameters.mac_length, and padding_length. - * - * Example: If the block length is 8 bytes, the content length - * (TLSCompressed.length) is 61 bytes, and the MAC length is 20 bytes, then the - * length before padding is 82 bytes (this does not include the IV. Thus, the - * padding length modulo 8 must be equal to 6 in order to make the total length - * an even multiple of 8 bytes (the block length). The padding length can be - * 6, 14, 22, and so on, through 254. If the padding length were the minimum - * necessary, 6, the padding would be 6 bytes, each containing the value 6. - * Thus, the last 8 octets of the GenericBlockCipher before block encryption - * would be xx 06 06 06 06 06 06 06, where xx is the last octet of the MAC. - * - * Note: With block ciphers in CBC mode (Cipher Block Chaining), it is critical - * that the entire plaintext of the record be known before any ciphertext is - * transmitted. Otherwise, it is possible for the attacker to mount the attack - * described in [CBCATT]. - * - * Implementation note: Canvel et al. [CBCTIME] have demonstrated a timing - * attack on CBC padding based on the time required to compute the MAC. In - * order to defend against this attack, implementations MUST ensure that - * record processing time is essentially the same whether or not the padding - * is correct. In general, the best way to do this is to compute the MAC even - * if the padding is incorrect, and only then reject the packet. For instance, - * if the pad appears to be incorrect, the implementation might assume a - * zero-length pad and then compute the MAC. This leaves a small timing - * channel, since MAC performance depends, to some extent, on the size of the - * data fragment, but it is not believed to be large enough to be exploitable, - * due to the large block size of existing MACs and the small size of the - * timing signal. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -__nccwpck_require__(5104); -__nccwpck_require__(6594); -__nccwpck_require__(154); -__nccwpck_require__(6924); -__nccwpck_require__(7821); -__nccwpck_require__(279); -__nccwpck_require__(8339); - -/** - * Generates pseudo random bytes by mixing the result of two hash functions, - * MD5 and SHA-1. - * - * prf_TLS1(secret, label, seed) = - * P_MD5(S1, label + seed) XOR P_SHA-1(S2, label + seed); - * - * Each P_hash function functions as follows: - * - * P_hash(secret, seed) = HMAC_hash(secret, A(1) + seed) + - * HMAC_hash(secret, A(2) + seed) + - * HMAC_hash(secret, A(3) + seed) + ... - * A() is defined as: - * A(0) = seed - * A(i) = HMAC_hash(secret, A(i-1)) - * - * The '+' operator denotes concatenation. - * - * As many iterations A(N) as are needed are performed to generate enough - * pseudo random byte output. If an iteration creates more data than is - * necessary, then it is truncated. - * - * Therefore: - * A(1) = HMAC_hash(secret, A(0)) - * = HMAC_hash(secret, seed) - * A(2) = HMAC_hash(secret, A(1)) - * = HMAC_hash(secret, HMAC_hash(secret, seed)) - * - * Therefore: - * P_hash(secret, seed) = - * HMAC_hash(secret, HMAC_hash(secret, A(0)) + seed) + - * HMAC_hash(secret, HMAC_hash(secret, A(1)) + seed) + - * ... - * - * Therefore: - * P_hash(secret, seed) = - * HMAC_hash(secret, HMAC_hash(secret, seed) + seed) + - * HMAC_hash(secret, HMAC_hash(secret, HMAC_hash(secret, seed)) + seed) + - * ... - * - * @param secret the secret to use. - * @param label the label to use. - * @param seed the seed value to use. - * @param length the number of bytes to generate. - * - * @return the pseudo random bytes in a byte buffer. - */ -var prf_TLS1 = function(secret, label, seed, length) { - var rval = forge.util.createBuffer(); - - /* For TLS 1.0, the secret is split in half, into two secrets of equal - length. If the secret has an odd length then the last byte of the first - half will be the same as the first byte of the second. The length of the - two secrets is half of the secret rounded up. */ - var idx = (secret.length >> 1); - var slen = idx + (secret.length & 1); - var s1 = secret.substr(0, slen); - var s2 = secret.substr(idx, slen); - var ai = forge.util.createBuffer(); - var hmac = forge.hmac.create(); - seed = label + seed; - - // determine the number of iterations that must be performed to generate - // enough output bytes, md5 creates 16 byte hashes, sha1 creates 20 - var md5itr = Math.ceil(length / 16); - var sha1itr = Math.ceil(length / 20); - - // do md5 iterations - hmac.start('MD5', s1); - var md5bytes = forge.util.createBuffer(); - ai.putBytes(seed); - for(var i = 0; i < md5itr; ++i) { - // HMAC_hash(secret, A(i-1)) - hmac.start(null, null); - hmac.update(ai.getBytes()); - ai.putBuffer(hmac.digest()); - - // HMAC_hash(secret, A(i) + seed) - hmac.start(null, null); - hmac.update(ai.bytes() + seed); - md5bytes.putBuffer(hmac.digest()); - } - - // do sha1 iterations - hmac.start('SHA1', s2); - var sha1bytes = forge.util.createBuffer(); - ai.clear(); - ai.putBytes(seed); - for(var i = 0; i < sha1itr; ++i) { - // HMAC_hash(secret, A(i-1)) - hmac.start(null, null); - hmac.update(ai.getBytes()); - ai.putBuffer(hmac.digest()); - - // HMAC_hash(secret, A(i) + seed) - hmac.start(null, null); - hmac.update(ai.bytes() + seed); - sha1bytes.putBuffer(hmac.digest()); - } - - // XOR the md5 bytes with the sha1 bytes - rval.putBytes(forge.util.xorBytes( - md5bytes.getBytes(), sha1bytes.getBytes(), length)); - - return rval; -}; - -/** - * Generates pseudo random bytes using a SHA256 algorithm. For TLS 1.2. - * - * @param secret the secret to use. - * @param label the label to use. - * @param seed the seed value to use. - * @param length the number of bytes to generate. - * - * @return the pseudo random bytes in a byte buffer. - */ -var prf_sha256 = function(secret, label, seed, length) { - // FIXME: implement me for TLS 1.2 -}; - -/** - * Gets a MAC for a record using the SHA-1 hash algorithm. - * - * @param key the mac key. - * @param state the sequence number (array of two 32-bit integers). - * @param record the record. - * - * @return the sha-1 hash (20 bytes) for the given record. - */ -var hmac_sha1 = function(key, seqNum, record) { - /* MAC is computed like so: - HMAC_hash( - key, seqNum + - TLSCompressed.type + - TLSCompressed.version + - TLSCompressed.length + - TLSCompressed.fragment) - */ - var hmac = forge.hmac.create(); - hmac.start('SHA1', key); - var b = forge.util.createBuffer(); - b.putInt32(seqNum[0]); - b.putInt32(seqNum[1]); - b.putByte(record.type); - b.putByte(record.version.major); - b.putByte(record.version.minor); - b.putInt16(record.length); - b.putBytes(record.fragment.bytes()); - hmac.update(b.getBytes()); - return hmac.digest().getBytes(); -}; +"use strict"; -/** - * Compresses the TLSPlaintext record into a TLSCompressed record using the - * deflate algorithm. - * - * @param c the TLS connection. - * @param record the TLSPlaintext record to compress. - * @param s the ConnectionState to use. - * - * @return true on success, false on failure. - */ -var deflate = function(c, record, s) { - var rval = false; +const { MessageChannel, receiveMessageOnPort } = __nccwpck_require__(1267) + +const corsSafeListedMethods = ['GET', 'HEAD', 'POST'] +const corsSafeListedMethodsSet = new Set(corsSafeListedMethods) + +const nullBodyStatus = [101, 204, 205, 304] + +const redirectStatus = [301, 302, 303, 307, 308] +const redirectStatusSet = new Set(redirectStatus) + +// https://fetch.spec.whatwg.org/#block-bad-port +const badPorts = [ + '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79', + '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137', + '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532', + '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723', + '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697', + '10080' +] + +const badPortsSet = new Set(badPorts) + +// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies +const referrerPolicy = [ + '', + 'no-referrer', + 'no-referrer-when-downgrade', + 'same-origin', + 'origin', + 'strict-origin', + 'origin-when-cross-origin', + 'strict-origin-when-cross-origin', + 'unsafe-url' +] +const referrerPolicySet = new Set(referrerPolicy) + +const requestRedirect = ['follow', 'manual', 'error'] + +const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE'] +const safeMethodsSet = new Set(safeMethods) + +const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors'] + +const requestCredentials = ['omit', 'same-origin', 'include'] + +const requestCache = [ + 'default', + 'no-store', + 'reload', + 'no-cache', + 'force-cache', + 'only-if-cached' +] + +// https://fetch.spec.whatwg.org/#request-body-header-name +const requestBodyHeader = [ + 'content-encoding', + 'content-language', + 'content-location', + 'content-type', + // See https://github.com/nodejs/undici/issues/2021 + // 'Content-Length' is a forbidden header name, which is typically + // removed in the Headers implementation. However, undici doesn't + // filter out headers, so we add it here. + 'content-length' +] + +// https://fetch.spec.whatwg.org/#enumdef-requestduplex +const requestDuplex = [ + 'half' +] + +// http://fetch.spec.whatwg.org/#forbidden-method +const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK'] +const forbiddenMethodsSet = new Set(forbiddenMethods) + +const subresource = [ + 'audio', + 'audioworklet', + 'font', + 'image', + 'manifest', + 'paintworklet', + 'script', + 'style', + 'track', + 'video', + 'xslt', + '' +] +const subresourceSet = new Set(subresource) + +/** @type {globalThis['DOMException']} */ +const DOMException = globalThis.DOMException ?? (() => { + // DOMException was only made a global in Node v17.0.0, + // but fetch supports >= v16.8. try { - var bytes = c.deflate(record.fragment.getBytes()); - record.fragment = forge.util.createBuffer(bytes); - record.length = bytes.length; - rval = true; - } catch(ex) { - // deflate error, fail out + atob('~') + } catch (err) { + return Object.getPrototypeOf(err).constructor } +})() - return rval; -}; +let channel -/** - * Decompresses the TLSCompressed record into a TLSPlaintext record using the - * deflate algorithm. - * - * @param c the TLS connection. - * @param record the TLSCompressed record to decompress. - * @param s the ConnectionState to use. - * - * @return true on success, false on failure. - */ -var inflate = function(c, record, s) { - var rval = false; +/** @type {globalThis['structuredClone']} */ +const structuredClone = + globalThis.structuredClone ?? + // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js + // structuredClone was added in v17.0.0, but fetch supports v16.8 + function structuredClone (value, options = undefined) { + if (arguments.length === 0) { + throw new TypeError('missing argument') + } - try { - var bytes = c.inflate(record.fragment.getBytes()); - record.fragment = forge.util.createBuffer(bytes); - record.length = bytes.length; - rval = true; - } catch(ex) { - // inflate error, fail out + if (!channel) { + channel = new MessageChannel() + } + channel.port1.unref() + channel.port2.unref() + channel.port1.postMessage(value, options?.transfer) + return receiveMessageOnPort(channel.port2).message } - return rval; -}; - -/** - * Reads a TLS variable-length vector from a byte buffer. - * - * Variable-length vectors are defined by specifying a subrange of legal - * lengths, inclusively, using the notation . When these are - * encoded, the actual length precedes the vector's contents in the byte - * stream. The length will be in the form of a number consuming as many bytes - * as required to hold the vector's specified maximum (ceiling) length. A - * variable-length vector with an actual length field of zero is referred to - * as an empty vector. - * - * @param b the byte buffer. - * @param lenBytes the number of bytes required to store the length. - * - * @return the resulting byte buffer. - */ -var readVector = function(b, lenBytes) { - var len = 0; - switch(lenBytes) { - case 1: - len = b.getByte(); - break; - case 2: - len = b.getInt16(); - break; - case 3: - len = b.getInt24(); - break; - case 4: - len = b.getInt32(); - break; - } - - // read vector bytes into a new buffer - return forge.util.createBuffer(b.getBytes(len)); -}; - -/** - * Writes a TLS variable-length vector to a byte buffer. - * - * @param b the byte buffer. - * @param lenBytes the number of bytes required to store the length. - * @param v the byte buffer vector. - */ -var writeVector = function(b, lenBytes, v) { - // encode length at the start of the vector, where the number of bytes for - // the length is the maximum number of bytes it would take to encode the - // vector's ceiling - b.putInt(v.length(), lenBytes << 3); - b.putBuffer(v); -}; - -/** - * The tls implementation. - */ -var tls = {}; +module.exports = { + DOMException, + structuredClone, + subresource, + forbiddenMethods, + requestBodyHeader, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + redirectStatus, + corsSafeListedMethods, + nullBodyStatus, + safeMethods, + badPorts, + requestDuplex, + subresourceSet, + badPortsSet, + redirectStatusSet, + corsSafeListedMethodsSet, + safeMethodsSet, + forbiddenMethodsSet, + referrerPolicySet +} -/** - * Version: TLS 1.2 = 3.3, TLS 1.1 = 3.2, TLS 1.0 = 3.1. Both TLS 1.1 and - * TLS 1.2 were still too new (ie: openSSL didn't implement them) at the time - * of this implementation so TLS 1.0 was implemented instead. - */ -tls.Versions = { - TLS_1_0: {major: 3, minor: 1}, - TLS_1_1: {major: 3, minor: 2}, - TLS_1_2: {major: 3, minor: 3} -}; -tls.SupportedVersions = [ - tls.Versions.TLS_1_1, - tls.Versions.TLS_1_0 -]; -tls.Version = tls.SupportedVersions[0]; -/** - * Maximum fragment size. True maximum is 16384, but we fragment before that - * to allow for unusual small increases during compression. - */ -tls.MaxFragment = 16384 - 1024; +/***/ }), -/** - * Whether this entity is considered the "client" or "server". - * enum { server, client } ConnectionEnd; - */ -tls.ConnectionEnd = { - server: 0, - client: 1 -}; +/***/ 685: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Pseudo-random function algorithm used to generate keys from the master - * secret. - * enum { tls_prf_sha256 } PRFAlgorithm; - */ -tls.PRFAlgorithm = { - tls_prf_sha256: 0 -}; +const assert = __nccwpck_require__(9491) +const { atob } = __nccwpck_require__(4300) +const { isomorphicDecode } = __nccwpck_require__(2538) -/** - * Bulk encryption algorithms. - * enum { null, rc4, des3, aes } BulkCipherAlgorithm; - */ -tls.BulkCipherAlgorithm = { - none: null, - rc4: 0, - des3: 1, - aes: 2 -}; +const encoder = new TextEncoder() /** - * Cipher types. - * enum { stream, block, aead } CipherType; + * @see https://mimesniff.spec.whatwg.org/#http-token-code-point */ -tls.CipherType = { - stream: 0, - block: 1, - aead: 2 -}; - +const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/ +const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line /** - * MAC (Message Authentication Code) algorithms. - * enum { null, hmac_md5, hmac_sha1, hmac_sha256, - * hmac_sha384, hmac_sha512} MACAlgorithm; + * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point */ -tls.MACAlgorithm = { - none: null, - hmac_md5: 0, - hmac_sha1: 1, - hmac_sha256: 2, - hmac_sha384: 3, - hmac_sha512: 4 -}; +const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line -/** - * Compression algorithms. - * enum { null(0), deflate(1), (255) } CompressionMethod; - */ -tls.CompressionMethod = { - none: 0, - deflate: 1 -}; +// https://fetch.spec.whatwg.org/#data-url-processor +/** @param {URL} dataURL */ +function dataURLProcessor (dataURL) { + // 1. Assert: dataURL’s scheme is "data". + assert(dataURL.protocol === 'data:') -/** - * TLS record content types. - * enum { - * change_cipher_spec(20), alert(21), handshake(22), - * application_data(23), (255) - * } ContentType; - */ -tls.ContentType = { - change_cipher_spec: 20, - alert: 21, - handshake: 22, - application_data: 23, - heartbeat: 24 -}; + // 2. Let input be the result of running the URL + // serializer on dataURL with exclude fragment + // set to true. + let input = URLSerializer(dataURL, true) -/** - * TLS handshake types. - * enum { - * hello_request(0), client_hello(1), server_hello(2), - * certificate(11), server_key_exchange (12), - * certificate_request(13), server_hello_done(14), - * certificate_verify(15), client_key_exchange(16), - * finished(20), (255) - * } HandshakeType; - */ -tls.HandshakeType = { - hello_request: 0, - client_hello: 1, - server_hello: 2, - certificate: 11, - server_key_exchange: 12, - certificate_request: 13, - server_hello_done: 14, - certificate_verify: 15, - client_key_exchange: 16, - finished: 20 -}; + // 3. Remove the leading "data:" string from input. + input = input.slice(5) -/** - * TLS Alert Protocol. - * - * enum { warning(1), fatal(2), (255) } AlertLevel; - * - * enum { - * close_notify(0), - * unexpected_message(10), - * bad_record_mac(20), - * decryption_failed(21), - * record_overflow(22), - * decompression_failure(30), - * handshake_failure(40), - * bad_certificate(42), - * unsupported_certificate(43), - * certificate_revoked(44), - * certificate_expired(45), - * certificate_unknown(46), - * illegal_parameter(47), - * unknown_ca(48), - * access_denied(49), - * decode_error(50), - * decrypt_error(51), - * export_restriction(60), - * protocol_version(70), - * insufficient_security(71), - * internal_error(80), - * user_canceled(90), - * no_renegotiation(100), - * (255) - * } AlertDescription; - * - * struct { - * AlertLevel level; - * AlertDescription description; - * } Alert; - */ -tls.Alert = {}; -tls.Alert.Level = { - warning: 1, - fatal: 2 -}; -tls.Alert.Description = { - close_notify: 0, - unexpected_message: 10, - bad_record_mac: 20, - decryption_failed: 21, - record_overflow: 22, - decompression_failure: 30, - handshake_failure: 40, - bad_certificate: 42, - unsupported_certificate: 43, - certificate_revoked: 44, - certificate_expired: 45, - certificate_unknown: 46, - illegal_parameter: 47, - unknown_ca: 48, - access_denied: 49, - decode_error: 50, - decrypt_error: 51, - export_restriction: 60, - protocol_version: 70, - insufficient_security: 71, - internal_error: 80, - user_canceled: 90, - no_renegotiation: 100 -}; + // 4. Let position point at the start of input. + const position = { position: 0 } -/** - * TLS Heartbeat Message types. - * enum { - * heartbeat_request(1), - * heartbeat_response(2), - * (255) - * } HeartbeatMessageType; - */ -tls.HeartbeatMessageType = { - heartbeat_request: 1, - heartbeat_response: 2 -}; + // 5. Let mimeType be the result of collecting a + // sequence of code points that are not equal + // to U+002C (,), given position. + let mimeType = collectASequenceOfCodePointsFast( + ',', + input, + position + ) -/** - * Supported cipher suites. - */ -tls.CipherSuites = {}; + // 6. Strip leading and trailing ASCII whitespace + // from mimeType. + // Undici implementation note: we need to store the + // length because if the mimetype has spaces removed, + // the wrong amount will be sliced from the input in + // step #9 + const mimeTypeLength = mimeType.length + mimeType = removeASCIIWhitespace(mimeType, true, true) -/** - * Gets a supported cipher suite from its 2 byte ID. - * - * @param twoBytes two bytes in a string. - * - * @return the matching supported cipher suite or null. - */ -tls.getCipherSuite = function(twoBytes) { - var rval = null; - for(var key in tls.CipherSuites) { - var cs = tls.CipherSuites[key]; - if(cs.id[0] === twoBytes.charCodeAt(0) && - cs.id[1] === twoBytes.charCodeAt(1)) { - rval = cs; - break; - } + // 7. If position is past the end of input, then + // return failure + if (position.position >= input.length) { + return 'failure' } - return rval; -}; -/** - * Called when an unexpected record is encountered. - * - * @param c the connection. - * @param record the record. - */ -tls.handleUnexpected = function(c, record) { - // if connection is client and closed, ignore unexpected messages - var ignore = (!c.open && c.entity === tls.ConnectionEnd.client); - if(!ignore) { - c.error(c, { - message: 'Unexpected message. Received TLS record out of order.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.unexpected_message - } - }); - } -}; + // 8. Advance position by 1. + position.position++ -/** - * Called when a client receives a HelloRequest record. - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleHelloRequest = function(c, record, length) { - // ignore renegotiation requests from the server during a handshake, but - // if handshaking, send a warning alert that renegotation is denied - if(!c.handshaking && c.handshakes > 0) { - // send alert warning - tls.queue(c, tls.createAlert(c, { - level: tls.Alert.Level.warning, - description: tls.Alert.Description.no_renegotiation - })); - tls.flush(c); - } + // 9. Let encodedBody be the remainder of input. + const encodedBody = input.slice(mimeTypeLength + 1) - // continue - c.process(); -}; + // 10. Let body be the percent-decoding of encodedBody. + let body = stringPercentDecode(encodedBody) -/** - * Parses a hello message from a ClientHello or ServerHello record. - * - * @param record the record to parse. - * - * @return the parsed message. - */ -tls.parseHelloMessage = function(c, record, length) { - var msg = null; - - var client = (c.entity === tls.ConnectionEnd.client); - - // minimum of 38 bytes in message - if(length < 38) { - c.error(c, { - message: client ? - 'Invalid ServerHello message. Message too short.' : - 'Invalid ClientHello message. Message too short.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter - } - }); - } else { - // use 'remaining' to calculate # of remaining bytes in the message - var b = record.fragment; - var remaining = b.length(); - msg = { - version: { - major: b.getByte(), - minor: b.getByte() - }, - random: forge.util.createBuffer(b.getBytes(32)), - session_id: readVector(b, 1), - extensions: [] - }; - if(client) { - msg.cipher_suite = b.getBytes(2); - msg.compression_method = b.getByte(); - } else { - msg.cipher_suites = readVector(b, 2); - msg.compression_methods = readVector(b, 1); - } - - // read extensions if there are any bytes left in the message - remaining = length - (remaining - b.length()); - if(remaining > 0) { - // parse extensions - var exts = readVector(b, 2); - while(exts.length() > 0) { - msg.extensions.push({ - type: [exts.getByte(), exts.getByte()], - data: readVector(exts, 2) - }); - } + // 11. If mimeType ends with U+003B (;), followed by + // zero or more U+0020 SPACE, followed by an ASCII + // case-insensitive match for "base64", then: + if (/;(\u0020){0,}base64$/i.test(mimeType)) { + // 1. Let stringBody be the isomorphic decode of body. + const stringBody = isomorphicDecode(body) - // TODO: make extension support modular - if(!client) { - for(var i = 0; i < msg.extensions.length; ++i) { - var ext = msg.extensions[i]; - - // support SNI extension - if(ext.type[0] === 0x00 && ext.type[1] === 0x00) { - // get server name list - var snl = readVector(ext.data, 2); - while(snl.length() > 0) { - // read server name type - var snType = snl.getByte(); - - // only HostName type (0x00) is known, break out if - // another type is detected - if(snType !== 0x00) { - break; - } + // 2. Set body to the forgiving-base64 decode of + // stringBody. + body = forgivingBase64(stringBody) - // add host name to server name list - c.session.extensions.server_name.serverNameList.push( - readVector(snl, 2).getBytes()); - } - } - } - } + // 3. If body is failure, then return failure. + if (body === 'failure') { + return 'failure' } - // version already set, do not allow version change - if(c.session.version) { - if(msg.version.major !== c.session.version.major || - msg.version.minor !== c.session.version.minor) { - return c.error(c, { - message: 'TLS version change is disallowed during renegotiation.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.protocol_version - } - }); - } - } + // 4. Remove the last 6 code points from mimeType. + mimeType = mimeType.slice(0, -6) - // get the chosen (ServerHello) cipher suite - if(client) { - // FIXME: should be checking configured acceptable cipher suites - c.session.cipherSuite = tls.getCipherSuite(msg.cipher_suite); - } else { - // get a supported preferred (ClientHello) cipher suite - // choose the first supported cipher suite - var tmp = forge.util.createBuffer(msg.cipher_suites.bytes()); - while(tmp.length() > 0) { - // FIXME: should be checking configured acceptable suites - // cipher suites take up 2 bytes - c.session.cipherSuite = tls.getCipherSuite(tmp.getBytes(2)); - if(c.session.cipherSuite !== null) { - break; - } - } - } + // 5. Remove trailing U+0020 SPACE code points from mimeType, + // if any. + mimeType = mimeType.replace(/(\u0020)+$/, '') - // cipher suite not supported - if(c.session.cipherSuite === null) { - return c.error(c, { - message: 'No cipher suites in common.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.handshake_failure - }, - cipherSuite: forge.util.bytesToHex(msg.cipher_suite) - }); - } + // 6. Remove the last U+003B (;) code point from mimeType. + mimeType = mimeType.slice(0, -1) + } - // TODO: handle compression methods - if(client) { - c.session.compressionMethod = msg.compression_method; - } else { - // no compression - c.session.compressionMethod = tls.CompressionMethod.none; - } + // 12. If mimeType starts with U+003B (;), then prepend + // "text/plain" to mimeType. + if (mimeType.startsWith(';')) { + mimeType = 'text/plain' + mimeType } - return msg; -}; + // 13. Let mimeTypeRecord be the result of parsing + // mimeType. + let mimeTypeRecord = parseMIMEType(mimeType) -/** - * Creates security parameters for the given connection based on the given - * hello message. - * - * @param c the TLS connection. - * @param msg the hello message. - */ -tls.createSecurityParameters = function(c, msg) { - /* Note: security params are from TLS 1.2, some values like prf_algorithm - are ignored for TLS 1.0/1.1 and the builtin as specified in the spec is - used. */ - - // TODO: handle other options from server when more supported - - // get client and server randoms - var client = (c.entity === tls.ConnectionEnd.client); - var msgRandom = msg.random.bytes(); - var cRandom = client ? c.session.sp.client_random : msgRandom; - var sRandom = client ? msgRandom : tls.createRandom().getBytes(); - - // create new security parameters - c.session.sp = { - entity: c.entity, - prf_algorithm: tls.PRFAlgorithm.tls_prf_sha256, - bulk_cipher_algorithm: null, - cipher_type: null, - enc_key_length: null, - block_length: null, - fixed_iv_length: null, - record_iv_length: null, - mac_algorithm: null, - mac_length: null, - mac_key_length: null, - compression_algorithm: c.session.compressionMethod, - pre_master_secret: null, - master_secret: null, - client_random: cRandom, - server_random: sRandom - }; -}; + // 14. If mimeTypeRecord is failure, then set + // mimeTypeRecord to text/plain;charset=US-ASCII. + if (mimeTypeRecord === 'failure') { + mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII') + } + + // 15. Return a new data: URL struct whose MIME + // type is mimeTypeRecord and body is body. + // https://fetch.spec.whatwg.org/#data-url-struct + return { mimeType: mimeTypeRecord, body } +} +// https://url.spec.whatwg.org/#concept-url-serializer /** - * Called when a client receives a ServerHello record. - * - * When a ServerHello message will be sent: - * The server will send this message in response to a client hello message - * when it was able to find an acceptable set of algorithms. If it cannot - * find such a match, it will respond with a handshake failure alert. - * - * uint24 length; - * struct { - * ProtocolVersion server_version; - * Random random; - * SessionID session_id; - * CipherSuite cipher_suite; - * CompressionMethod compression_method; - * select(extensions_present) { - * case false: - * struct {}; - * case true: - * Extension extensions<0..2^16-1>; - * }; - * } ServerHello; - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. + * @param {URL} url + * @param {boolean} excludeFragment */ -tls.handleServerHello = function(c, record, length) { - var msg = tls.parseHelloMessage(c, record, length); - if(c.fail) { - return; - } - - // ensure server version is compatible - if(msg.version.minor <= c.version.minor) { - c.version.minor = msg.version.minor; - } else { - return c.error(c, { - message: 'Incompatible TLS version.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.protocol_version - } - }); +function URLSerializer (url, excludeFragment = false) { + if (!excludeFragment) { + return url.href } - // indicate session version has been set - c.session.version = c.version; + const href = url.href + const hashLength = url.hash.length - // get the session ID from the message - var sessionId = msg.session_id.bytes(); + return hashLength === 0 ? href : href.substring(0, href.length - hashLength) +} - // if the session ID is not blank and matches the cached one, resume - // the session - if(sessionId.length > 0 && sessionId === c.session.id) { - // resuming session, expect a ChangeCipherSpec next - c.expect = SCC; - c.session.resuming = true; +// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points +/** + * @param {(char: string) => boolean} condition + * @param {string} input + * @param {{ position: number }} position + */ +function collectASequenceOfCodePoints (condition, input, position) { + // 1. Let result be the empty string. + let result = '' - // get new server random - c.session.sp.server_random = msg.random.bytes(); - } else { - // not resuming, expect a server Certificate message next - c.expect = SCE; - c.session.resuming = false; + // 2. While position doesn’t point past the end of input and the + // code point at position within input meets the condition condition: + while (position.position < input.length && condition(input[position.position])) { + // 1. Append that code point to the end of result. + result += input[position.position] - // create new security parameters - tls.createSecurityParameters(c, msg); + // 2. Advance position by 1. + position.position++ } - // set new session ID - c.session.id = sessionId; - - // continue - c.process(); -}; + // 3. Return result. + return result +} /** - * Called when a server receives a ClientHello record. - * - * When a ClientHello message will be sent: - * When a client first connects to a server it is required to send the - * client hello as its first message. The client can also send a client - * hello in response to a hello request or on its own initiative in order - * to renegotiate the security parameters in an existing connection. - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. + * A faster collectASequenceOfCodePoints that only works when comparing a single character. + * @param {string} char + * @param {string} input + * @param {{ position: number }} position */ -tls.handleClientHello = function(c, record, length) { - var msg = tls.parseHelloMessage(c, record, length); - if(c.fail) { - return; - } - - // get the session ID from the message - var sessionId = msg.session_id.bytes(); +function collectASequenceOfCodePointsFast (char, input, position) { + const idx = input.indexOf(char, position.position) + const start = position.position - // see if the given session ID is in the cache - var session = null; - if(c.sessionCache) { - session = c.sessionCache.getSession(sessionId); - if(session === null) { - // session ID not found - sessionId = ''; - } else if(session.version.major !== msg.version.major || - session.version.minor > msg.version.minor) { - // if session version is incompatible with client version, do not resume - session = null; - sessionId = ''; - } - } - - // no session found to resume, generate a new session ID - if(sessionId.length === 0) { - sessionId = forge.random.getBytes(32); + if (idx === -1) { + position.position = input.length + return input.slice(start) } - // update session - c.session.id = sessionId; - c.session.clientHelloVersion = msg.version; - c.session.sp = {}; - if(session) { - // use version and security parameters from resumed session - c.version = c.session.version = session.version; - c.session.sp = session.sp; - } else { - // use highest compatible minor version - var version; - for(var i = 1; i < tls.SupportedVersions.length; ++i) { - version = tls.SupportedVersions[i]; - if(version.minor <= msg.version.minor) { - break; - } - } - c.version = {major: version.major, minor: version.minor}; - c.session.version = c.version; - } + position.position = idx + return input.slice(start, position.position) +} - // if a session is set, resume it - if(session !== null) { - // resuming session, expect a ChangeCipherSpec next - c.expect = CCC; - c.session.resuming = true; +// https://url.spec.whatwg.org/#string-percent-decode +/** @param {string} input */ +function stringPercentDecode (input) { + // 1. Let bytes be the UTF-8 encoding of input. + const bytes = encoder.encode(input) - // get new client random - c.session.sp.client_random = msg.random.bytes(); - } else { - // not resuming, expect a Certificate or ClientKeyExchange - c.expect = (c.verifyClient !== false) ? CCE : CKE; - c.session.resuming = false; + // 2. Return the percent-decoding of bytes. + return percentDecode(bytes) +} - // create new security parameters - tls.createSecurityParameters(c, msg); - } +// https://url.spec.whatwg.org/#percent-decode +/** @param {Uint8Array} input */ +function percentDecode (input) { + // 1. Let output be an empty byte sequence. + /** @type {number[]} */ + const output = [] - // connection now open - c.open = true; + // 2. For each byte byte in input: + for (let i = 0; i < input.length; i++) { + const byte = input[i] - // queue server hello - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createServerHello(c) - })); + // 1. If byte is not 0x25 (%), then append byte to output. + if (byte !== 0x25) { + output.push(byte) - if(c.session.resuming) { - // queue change cipher spec message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.change_cipher_spec, - data: tls.createChangeCipherSpec() - })); + // 2. Otherwise, if byte is 0x25 (%) and the next two bytes + // after byte in input are not in the ranges + // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F), + // and 0x61 (a) to 0x66 (f), all inclusive, append byte + // to output. + } else if ( + byte === 0x25 && + !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2])) + ) { + output.push(0x25) - // create pending state - c.state.pending = tls.createConnectionState(c); + // 3. Otherwise: + } else { + // 1. Let bytePoint be the two bytes after byte in input, + // decoded, and then interpreted as hexadecimal number. + const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2]) + const bytePoint = Number.parseInt(nextTwoBytes, 16) + + // 2. Append a byte whose value is bytePoint to output. + output.push(bytePoint) + + // 3. Skip the next two bytes in input. + i += 2 + } + } + + // 3. Return output. + return Uint8Array.from(output) +} + +// https://mimesniff.spec.whatwg.org/#parse-a-mime-type +/** @param {string} input */ +function parseMIMEType (input) { + // 1. Remove any leading and trailing HTTP whitespace + // from input. + input = removeHTTPWhitespace(input, true, true) + + // 2. Let position be a position variable for input, + // initially pointing at the start of input. + const position = { position: 0 } + + // 3. Let type be the result of collecting a sequence + // of code points that are not U+002F (/) from + // input, given position. + const type = collectASequenceOfCodePointsFast( + '/', + input, + position + ) + + // 4. If type is the empty string or does not solely + // contain HTTP token code points, then return failure. + // https://mimesniff.spec.whatwg.org/#http-token-code-point + if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) { + return 'failure' + } + + // 5. If position is past the end of input, then return + // failure + if (position.position > input.length) { + return 'failure' + } + + // 6. Advance position by 1. (This skips past U+002F (/).) + position.position++ + + // 7. Let subtype be the result of collecting a sequence of + // code points that are not U+003B (;) from input, given + // position. + let subtype = collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 8. Remove any trailing HTTP whitespace from subtype. + subtype = removeHTTPWhitespace(subtype, false, true) + + // 9. If subtype is the empty string or does not solely + // contain HTTP token code points, then return failure. + if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) { + return 'failure' + } + + const typeLowercase = type.toLowerCase() + const subtypeLowercase = subtype.toLowerCase() + + // 10. Let mimeType be a new MIME type record whose type + // is type, in ASCII lowercase, and subtype is subtype, + // in ASCII lowercase. + // https://mimesniff.spec.whatwg.org/#mime-type + const mimeType = { + type: typeLowercase, + subtype: subtypeLowercase, + /** @type {Map} */ + parameters: new Map(), + // https://mimesniff.spec.whatwg.org/#mime-type-essence + essence: `${typeLowercase}/${subtypeLowercase}` + } + + // 11. While position is not past the end of input: + while (position.position < input.length) { + // 1. Advance position by 1. (This skips past U+003B (;).) + position.position++ + + // 2. Collect a sequence of code points that are HTTP + // whitespace from input given position. + collectASequenceOfCodePoints( + // https://fetch.spec.whatwg.org/#http-whitespace + char => HTTP_WHITESPACE_REGEX.test(char), + input, + position + ) - // change current write state to pending write state - c.state.current.write = c.state.pending.write; + // 3. Let parameterName be the result of collecting a + // sequence of code points that are not U+003B (;) + // or U+003D (=) from input, given position. + let parameterName = collectASequenceOfCodePoints( + (char) => char !== ';' && char !== '=', + input, + position + ) - // queue finished - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createFinished(c) - })); - } else { - // queue server certificate - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createCertificate(c) - })); + // 4. Set parameterName to parameterName, in ASCII + // lowercase. + parameterName = parameterName.toLowerCase() - if(!c.fail) { - // queue server key exchange - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createServerKeyExchange(c) - })); - - // request client certificate if set - if(c.verifyClient !== false) { - // queue certificate request - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createCertificateRequest(c) - })); + // 5. If position is not past the end of input, then: + if (position.position < input.length) { + // 1. If the code point at position within input is + // U+003B (;), then continue. + if (input[position.position] === ';') { + continue } - // queue server hello done - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createServerHelloDone(c) - })); + // 2. Advance position by 1. (This skips past U+003D (=).) + position.position++ } - } - // send records - tls.flush(c); + // 6. If position is past the end of input, then break. + if (position.position > input.length) { + break + } - // continue - c.process(); -}; + // 7. Let parameterValue be null. + let parameterValue = null -/** - * Called when a client receives a Certificate record. - * - * When this message will be sent: - * The server must send a certificate whenever the agreed-upon key exchange - * method is not an anonymous one. This message will always immediately - * follow the server hello message. - * - * Meaning of this message: - * The certificate type must be appropriate for the selected cipher suite's - * key exchange algorithm, and is generally an X.509v3 certificate. It must - * contain a key which matches the key exchange method, as follows. Unless - * otherwise specified, the signing algorithm for the certificate must be - * the same as the algorithm for the certificate key. Unless otherwise - * specified, the public key may be of any length. - * - * opaque ASN.1Cert<1..2^24-1>; - * struct { - * ASN.1Cert certificate_list<1..2^24-1>; - * } Certificate; - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleCertificate = function(c, record, length) { - // minimum of 3 bytes in message - if(length < 3) { - return c.error(c, { - message: 'Invalid Certificate message. Message too short.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter - } - }); - } + // 8. If the code point at position within input is + // U+0022 ("), then: + if (input[position.position] === '"') { + // 1. Set parameterValue to the result of collecting + // an HTTP quoted string from input, given position + // and the extract-value flag. + parameterValue = collectAnHTTPQuotedString(input, position, true) - var b = record.fragment; - var msg = { - certificate_list: readVector(b, 3) - }; + // 2. Collect a sequence of code points that are not + // U+003B (;) from input, given position. + collectASequenceOfCodePointsFast( + ';', + input, + position + ) - /* The sender's certificate will be first in the list (chain), each - subsequent one that follows will certify the previous one, but root - certificates (self-signed) that specify the certificate authority may - be omitted under the assumption that clients must already possess it. */ - var cert, asn1; - var certs = []; - try { - while(msg.certificate_list.length() > 0) { - // each entry in msg.certificate_list is a vector with 3 len bytes - cert = readVector(msg.certificate_list, 3); - asn1 = forge.asn1.fromDer(cert); - cert = forge.pki.certificateFromAsn1(asn1, true); - certs.push(cert); - } - } catch(ex) { - return c.error(c, { - message: 'Could not parse certificate list.', - cause: ex, - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.bad_certificate - } - }); - } + // 9. Otherwise: + } else { + // 1. Set parameterValue to the result of collecting + // a sequence of code points that are not U+003B (;) + // from input, given position. + parameterValue = collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 2. Remove any trailing HTTP whitespace from parameterValue. + parameterValue = removeHTTPWhitespace(parameterValue, false, true) - // ensure at least 1 certificate was provided if in client-mode - // or if verifyClient was set to true to require a certificate - // (as opposed to 'optional') - var client = (c.entity === tls.ConnectionEnd.client); - if((client || c.verifyClient === true) && certs.length === 0) { - // error, no certificate - c.error(c, { - message: client ? - 'No server certificate provided.' : - 'No client certificate provided.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter + // 3. If parameterValue is the empty string, then continue. + if (parameterValue.length === 0) { + continue } - }); - } else if(certs.length === 0) { - // no certs to verify - // expect a ServerKeyExchange or ClientKeyExchange message next - c.expect = client ? SKE : CKE; - } else { - // save certificate in session - if(client) { - c.session.serverCertificate = certs[0]; - } else { - c.session.clientCertificate = certs[0]; } - if(tls.verifyCertificateChain(c, certs)) { - // expect a ServerKeyExchange or ClientKeyExchange message next - c.expect = client ? SKE : CKE; + // 10. If all of the following are true + // - parameterName is not the empty string + // - parameterName solely contains HTTP token code points + // - parameterValue solely contains HTTP quoted-string token code points + // - mimeType’s parameters[parameterName] does not exist + // then set mimeType’s parameters[parameterName] to parameterValue. + if ( + parameterName.length !== 0 && + HTTP_TOKEN_CODEPOINTS.test(parameterName) && + (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) && + !mimeType.parameters.has(parameterName) + ) { + mimeType.parameters.set(parameterName, parameterValue) } } - // continue - c.process(); -}; - -/** - * Called when a client receives a ServerKeyExchange record. - * - * When this message will be sent: - * This message will be sent immediately after the server certificate - * message (or the server hello message, if this is an anonymous - * negotiation). - * - * The server key exchange message is sent by the server only when the - * server certificate message (if sent) does not contain enough data to - * allow the client to exchange a premaster secret. - * - * Meaning of this message: - * This message conveys cryptographic information to allow the client to - * communicate the premaster secret: either an RSA public key to encrypt - * the premaster secret with, or a Diffie-Hellman public key with which the - * client can complete a key exchange (with the result being the premaster - * secret.) - * - * enum { - * dhe_dss, dhe_rsa, dh_anon, rsa, dh_dss, dh_rsa - * } KeyExchangeAlgorithm; - * - * struct { - * opaque dh_p<1..2^16-1>; - * opaque dh_g<1..2^16-1>; - * opaque dh_Ys<1..2^16-1>; - * } ServerDHParams; - * - * struct { - * select(KeyExchangeAlgorithm) { - * case dh_anon: - * ServerDHParams params; - * case dhe_dss: - * case dhe_rsa: - * ServerDHParams params; - * digitally-signed struct { - * opaque client_random[32]; - * opaque server_random[32]; - * ServerDHParams params; - * } signed_params; - * case rsa: - * case dh_dss: - * case dh_rsa: - * struct {}; - * }; - * } ServerKeyExchange; - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleServerKeyExchange = function(c, record, length) { - // this implementation only supports RSA, no Diffie-Hellman support - // so any length > 0 is invalid - if(length > 0) { - return c.error(c, { - message: 'Invalid key parameters. Only RSA is supported.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.unsupported_certificate - } - }); - } - - // expect an optional CertificateRequest message next - c.expect = SCR; + // 12. Return mimeType. + return mimeType +} - // continue - c.process(); -}; +// https://infra.spec.whatwg.org/#forgiving-base64-decode +/** @param {string} data */ +function forgivingBase64 (data) { + // 1. Remove all ASCII whitespace from data. + data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line -/** - * Called when a client receives a ClientKeyExchange record. - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleClientKeyExchange = function(c, record, length) { - // this implementation only supports RSA, no Diffie-Hellman support - // so any length < 48 is invalid - if(length < 48) { - return c.error(c, { - message: 'Invalid key parameters. Only RSA is supported.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.unsupported_certificate - } - }); + // 2. If data’s code point length divides by 4 leaving + // no remainder, then: + if (data.length % 4 === 0) { + // 1. If data ends with one or two U+003D (=) code points, + // then remove them from data. + data = data.replace(/=?=$/, '') } - var b = record.fragment; - var msg = { - enc_pre_master_secret: readVector(b, 2).getBytes() - }; - - // do rsa decryption - var privateKey = null; - if(c.getPrivateKey) { - try { - privateKey = c.getPrivateKey(c, c.session.serverCertificate); - privateKey = forge.pki.privateKeyFromPem(privateKey); - } catch(ex) { - c.error(c, { - message: 'Could not get private key.', - cause: ex, - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); - } + // 3. If data’s code point length divides by 4 leaving + // a remainder of 1, then return failure. + if (data.length % 4 === 1) { + return 'failure' } - if(privateKey === null) { - return c.error(c, { - message: 'No private key set.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); + // 4. If data contains a code point that is not one of + // U+002B (+) + // U+002F (/) + // ASCII alphanumeric + // then return failure. + if (/[^+/0-9A-Za-z]/.test(data)) { + return 'failure' } - try { - // decrypt 48-byte pre-master secret - var sp = c.session.sp; - sp.pre_master_secret = privateKey.decrypt(msg.enc_pre_master_secret); - - // ensure client hello version matches first 2 bytes - var version = c.session.clientHelloVersion; - if(version.major !== sp.pre_master_secret.charCodeAt(0) || - version.minor !== sp.pre_master_secret.charCodeAt(1)) { - // error, do not send alert (see BLEI attack below) - throw new Error('TLS version rollback attack detected.'); - } - } catch(ex) { - /* Note: Daniel Bleichenbacher [BLEI] can be used to attack a - TLS server which is using PKCS#1 encoded RSA, so instead of - failing here, we generate 48 random bytes and use that as - the pre-master secret. */ - sp.pre_master_secret = forge.random.getBytes(48); - } - - // expect a CertificateVerify message if a Certificate was received that - // does not have fixed Diffie-Hellman params, otherwise expect - // ChangeCipherSpec - c.expect = CCC; - if(c.session.clientCertificate !== null) { - // only RSA support, so expect CertificateVerify - // TODO: support Diffie-Hellman - c.expect = CCV; - } - - // continue - c.process(); -}; + const binary = atob(data) + const bytes = new Uint8Array(binary.length) -/** - * Called when a client receives a CertificateRequest record. - * - * When this message will be sent: - * A non-anonymous server can optionally request a certificate from the - * client, if appropriate for the selected cipher suite. This message, if - * sent, will immediately follow the Server Key Exchange message (if it is - * sent; otherwise, the Server Certificate message). - * - * enum { - * rsa_sign(1), dss_sign(2), rsa_fixed_dh(3), dss_fixed_dh(4), - * rsa_ephemeral_dh_RESERVED(5), dss_ephemeral_dh_RESERVED(6), - * fortezza_dms_RESERVED(20), (255) - * } ClientCertificateType; - * - * opaque DistinguishedName<1..2^16-1>; - * - * struct { - * ClientCertificateType certificate_types<1..2^8-1>; - * SignatureAndHashAlgorithm supported_signature_algorithms<2^16-1>; - * DistinguishedName certificate_authorities<0..2^16-1>; - * } CertificateRequest; - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleCertificateRequest = function(c, record, length) { - // minimum of 3 bytes in message - if(length < 3) { - return c.error(c, { - message: 'Invalid CertificateRequest. Message too short.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter - } - }); + for (let byte = 0; byte < binary.length; byte++) { + bytes[byte] = binary.charCodeAt(byte) } - // TODO: TLS 1.2+ has different format including - // SignatureAndHashAlgorithm after cert types - var b = record.fragment; - var msg = { - certificate_types: readVector(b, 1), - certificate_authorities: readVector(b, 2) - }; - - // save certificate request in session - c.session.certificateRequest = msg; - - // expect a ServerHelloDone message next - c.expect = SHD; - - // continue - c.process(); -}; + return bytes +} +// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string +// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string /** - * Called when a server receives a CertificateVerify record. - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. + * @param {string} input + * @param {{ position: number }} position + * @param {boolean?} extractValue */ -tls.handleCertificateVerify = function(c, record, length) { - if(length < 2) { - return c.error(c, { - message: 'Invalid CertificateVerify. Message too short.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter - } - }); - } +function collectAnHTTPQuotedString (input, position, extractValue) { + // 1. Let positionStart be position. + const positionStart = position.position - // rewind to get full bytes for message so it can be manually - // digested below (special case for CertificateVerify messages because - // they must be digested *after* handling as opposed to all others) - var b = record.fragment; - b.read -= 4; - var msgBytes = b.bytes(); - b.read += 4; + // 2. Let value be the empty string. + let value = '' - var msg = { - signature: readVector(b, 2).getBytes() - }; + // 3. Assert: the code point at position within input + // is U+0022 ("). + assert(input[position.position] === '"') - // TODO: add support for DSA + // 4. Advance position by 1. + position.position++ - // generate data to verify - var verify = forge.util.createBuffer(); - verify.putBuffer(c.session.md5.digest()); - verify.putBuffer(c.session.sha1.digest()); - verify = verify.getBytes(); + // 5. While true: + while (true) { + // 1. Append the result of collecting a sequence of code points + // that are not U+0022 (") or U+005C (\) from input, given + // position, to value. + value += collectASequenceOfCodePoints( + (char) => char !== '"' && char !== '\\', + input, + position + ) - try { - var cert = c.session.clientCertificate; - /*b = forge.pki.rsa.decrypt( - msg.signature, cert.publicKey, true, verify.length); - if(b !== verify) {*/ - if(!cert.publicKey.verify(verify, msg.signature, 'NONE')) { - throw new Error('CertificateVerify signature does not match.'); - } - - // digest message now that it has been handled - c.session.md5.update(msgBytes); - c.session.sha1.update(msgBytes); - } catch(ex) { - return c.error(c, { - message: 'Bad signature in CertificateVerify.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.handshake_failure - } - }); - } + // 2. If position is past the end of input, then break. + if (position.position >= input.length) { + break + } - // expect ChangeCipherSpec - c.expect = CCC; + // 3. Let quoteOrBackslash be the code point at position within + // input. + const quoteOrBackslash = input[position.position] - // continue - c.process(); -}; + // 4. Advance position by 1. + position.position++ -/** - * Called when a client receives a ServerHelloDone record. - * - * When this message will be sent: - * The server hello done message is sent by the server to indicate the end - * of the server hello and associated messages. After sending this message - * the server will wait for a client response. - * - * Meaning of this message: - * This message means that the server is done sending messages to support - * the key exchange, and the client can proceed with its phase of the key - * exchange. - * - * Upon receipt of the server hello done message the client should verify - * that the server provided a valid certificate if required and check that - * the server hello parameters are acceptable. - * - * struct {} ServerHelloDone; - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleServerHelloDone = function(c, record, length) { - // len must be 0 bytes - if(length > 0) { - return c.error(c, { - message: 'Invalid ServerHelloDone message. Invalid length.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.record_overflow + // 5. If quoteOrBackslash is U+005C (\), then: + if (quoteOrBackslash === '\\') { + // 1. If position is past the end of input, then append + // U+005C (\) to value and break. + if (position.position >= input.length) { + value += '\\' + break } - }); - } - if(c.serverCertificate === null) { - // no server certificate was provided - var error = { - message: 'No server certificate provided. Not enough security.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.insufficient_security - } - }; + // 2. Append the code point at position within input to value. + value += input[position.position] - // call application callback - var depth = 0; - var ret = c.verify(c, error.alert.description, depth, []); - if(ret !== true) { - // check for custom alert info - if(ret || ret === 0) { - // set custom message and alert description - if(typeof ret === 'object' && !forge.util.isArray(ret)) { - if(ret.message) { - error.message = ret.message; - } - if(ret.alert) { - error.alert.description = ret.alert; - } - } else if(typeof ret === 'number') { - // set custom alert description - error.alert.description = ret; - } - } + // 3. Advance position by 1. + position.position++ + + // 6. Otherwise: + } else { + // 1. Assert: quoteOrBackslash is U+0022 ("). + assert(quoteOrBackslash === '"') - // send error - return c.error(c, error); + // 2. Break. + break } } - // create client certificate message if requested - if(c.session.certificateRequest !== null) { - record = tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createCertificate(c) - }); - tls.queue(c, record); + // 6. If the extract-value flag is set, then return value. + if (extractValue) { + return value } - // create client key exchange message - record = tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createClientKeyExchange(c) - }); - tls.queue(c, record); - - // expect no messages until the following callback has been called - c.expect = SER; - - // create callback to handle client signature (for client-certs) - var callback = function(c, signature) { - if(c.session.certificateRequest !== null && - c.session.clientCertificate !== null) { - // create certificate verify message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createCertificateVerify(c, signature) - })); - } - - // create change cipher spec message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.change_cipher_spec, - data: tls.createChangeCipherSpec() - })); + // 7. Return the code points from positionStart to position, + // inclusive, within input. + return input.slice(positionStart, position.position) +} - // create pending state - c.state.pending = tls.createConnectionState(c); +/** + * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type + */ +function serializeAMimeType (mimeType) { + assert(mimeType !== 'failure') + const { parameters, essence } = mimeType - // change current write state to pending write state - c.state.current.write = c.state.pending.write; + // 1. Let serialization be the concatenation of mimeType’s + // type, U+002F (/), and mimeType’s subtype. + let serialization = essence - // create finished message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createFinished(c) - })); + // 2. For each name → value of mimeType’s parameters: + for (let [name, value] of parameters.entries()) { + // 1. Append U+003B (;) to serialization. + serialization += ';' - // expect a server ChangeCipherSpec message next - c.expect = SCC; + // 2. Append name to serialization. + serialization += name - // send records - tls.flush(c); + // 3. Append U+003D (=) to serialization. + serialization += '=' - // continue - c.process(); - }; + // 4. If value does not solely contain HTTP token code + // points or value is the empty string, then: + if (!HTTP_TOKEN_CODEPOINTS.test(value)) { + // 1. Precede each occurence of U+0022 (") or + // U+005C (\) in value with U+005C (\). + value = value.replace(/(\\|")/g, '\\$1') - // if there is no certificate request or no client certificate, do - // callback immediately - if(c.session.certificateRequest === null || - c.session.clientCertificate === null) { - return callback(c, null); + // 2. Prepend U+0022 (") to value. + value = '"' + value + + // 3. Append U+0022 (") to value. + value += '"' + } + + // 5. Append value to serialization. + serialization += value } - // otherwise get the client signature - tls.getClientSignature(c, callback); -}; + // 3. Return serialization. + return serialization +} /** - * Called when a ChangeCipherSpec record is received. - * - * @param c the connection. - * @param record the record. + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} char */ -tls.handleChangeCipherSpec = function(c, record) { - if(record.fragment.getByte() !== 0x01) { - return c.error(c, { - message: 'Invalid ChangeCipherSpec message received.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter - } - }); - } - - // create pending state if: - // 1. Resuming session in client mode OR - // 2. NOT resuming session in server mode - var client = (c.entity === tls.ConnectionEnd.client); - if((c.session.resuming && client) || (!c.session.resuming && !client)) { - c.state.pending = tls.createConnectionState(c); - } +function isHTTPWhiteSpace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === ' ' +} - // change current read state to pending read state - c.state.current.read = c.state.pending.read; +/** + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} str + */ +function removeHTTPWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 - // clear pending state if: - // 1. NOT resuming session in client mode OR - // 2. resuming a session in server mode - if((!c.session.resuming && client) || (c.session.resuming && !client)) { - c.state.pending = null; + if (leading) { + for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++); } - // expect a Finished record next - c.expect = client ? SFI : CFI; + if (trailing) { + for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--); + } - // continue - c.process(); -}; + return str.slice(lead, trail + 1) +} /** - * Called when a Finished record is received. - * - * When this message will be sent: - * A finished message is always sent immediately after a change - * cipher spec message to verify that the key exchange and - * authentication processes were successful. It is essential that a - * change cipher spec message be received between the other - * handshake messages and the Finished message. - * - * Meaning of this message: - * The finished message is the first protected with the just- - * negotiated algorithms, keys, and secrets. Recipients of finished - * messages must verify that the contents are correct. Once a side - * has sent its Finished message and received and validated the - * Finished message from its peer, it may begin to send and receive - * application data over the connection. - * - * struct { - * opaque verify_data[verify_data_length]; - * } Finished; - * - * verify_data - * PRF(master_secret, finished_label, Hash(handshake_messages)) - * [0..verify_data_length-1]; - * - * finished_label - * For Finished messages sent by the client, the string - * "client finished". For Finished messages sent by the server, the - * string "server finished". - * - * verify_data_length depends on the cipher suite. If it is not specified - * by the cipher suite, then it is 12. Versions of TLS < 1.2 always used - * 12 bytes. - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. + * @see https://infra.spec.whatwg.org/#ascii-whitespace + * @param {string} char */ -tls.handleFinished = function(c, record, length) { - // rewind to get full bytes for message so it can be manually - // digested below (special case for Finished messages because they - // must be digested *after* handling as opposed to all others) - var b = record.fragment; - b.read -= 4; - var msgBytes = b.bytes(); - b.read += 4; - - // message contains only verify_data - var vd = record.fragment.getBytes(); - - // ensure verify data is correct - b = forge.util.createBuffer(); - b.putBuffer(c.session.md5.digest()); - b.putBuffer(c.session.sha1.digest()); - - // set label based on entity type - var client = (c.entity === tls.ConnectionEnd.client); - var label = client ? 'server finished' : 'client finished'; - - // TODO: determine prf function and verify length for TLS 1.2 - var sp = c.session.sp; - var vdl = 12; - var prf = prf_TLS1; - b = prf(sp.master_secret, label, b.getBytes(), vdl); - if(b.getBytes() !== vd) { - return c.error(c, { - message: 'Invalid verify_data in Finished message.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.decrypt_error - } - }); - } - - // digest finished message now that it has been handled - c.session.md5.update(msgBytes); - c.session.sha1.update(msgBytes); - - // resuming session as client or NOT resuming session as server - if((c.session.resuming && client) || (!c.session.resuming && !client)) { - // create change cipher spec message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.change_cipher_spec, - data: tls.createChangeCipherSpec() - })); +function isASCIIWhitespace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' ' +} - // change current write state to pending write state, clear pending - c.state.current.write = c.state.pending.write; - c.state.pending = null; +/** + * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace + */ +function removeASCIIWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 - // create finished message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createFinished(c) - })); + if (leading) { + for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++); } - // expect application data next - c.expect = client ? SAD : CAD; + if (trailing) { + for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--); + } - // handshake complete - c.handshaking = false; - ++c.handshakes; + return str.slice(lead, trail + 1) +} - // save access to peer certificate - c.peerCertificate = client ? - c.session.serverCertificate : c.session.clientCertificate; +module.exports = { + dataURLProcessor, + URLSerializer, + collectASequenceOfCodePoints, + collectASequenceOfCodePointsFast, + stringPercentDecode, + parseMIMEType, + collectAnHTTPQuotedString, + serializeAMimeType +} - // send records - tls.flush(c); - // now connected - c.isConnected = true; - c.connected(c); +/***/ }), - // continue - c.process(); -}; +/***/ 8511: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Called when an Alert record is received. - * - * @param c the connection. - * @param record the record. - */ -tls.handleAlert = function(c, record) { - // read alert - var b = record.fragment; - var alert = { - level: b.getByte(), - description: b.getByte() - }; +"use strict"; - // TODO: consider using a table? - // get appropriate message - var msg; - switch(alert.description) { - case tls.Alert.Description.close_notify: - msg = 'Connection closed.'; - break; - case tls.Alert.Description.unexpected_message: - msg = 'Unexpected message.'; - break; - case tls.Alert.Description.bad_record_mac: - msg = 'Bad record MAC.'; - break; - case tls.Alert.Description.decryption_failed: - msg = 'Decryption failed.'; - break; - case tls.Alert.Description.record_overflow: - msg = 'Record overflow.'; - break; - case tls.Alert.Description.decompression_failure: - msg = 'Decompression failed.'; - break; - case tls.Alert.Description.handshake_failure: - msg = 'Handshake failure.'; - break; - case tls.Alert.Description.bad_certificate: - msg = 'Bad certificate.'; - break; - case tls.Alert.Description.unsupported_certificate: - msg = 'Unsupported certificate.'; - break; - case tls.Alert.Description.certificate_revoked: - msg = 'Certificate revoked.'; - break; - case tls.Alert.Description.certificate_expired: - msg = 'Certificate expired.'; - break; - case tls.Alert.Description.certificate_unknown: - msg = 'Certificate unknown.'; - break; - case tls.Alert.Description.illegal_parameter: - msg = 'Illegal parameter.'; - break; - case tls.Alert.Description.unknown_ca: - msg = 'Unknown certificate authority.'; - break; - case tls.Alert.Description.access_denied: - msg = 'Access denied.'; - break; - case tls.Alert.Description.decode_error: - msg = 'Decode error.'; - break; - case tls.Alert.Description.decrypt_error: - msg = 'Decrypt error.'; - break; - case tls.Alert.Description.export_restriction: - msg = 'Export restriction.'; - break; - case tls.Alert.Description.protocol_version: - msg = 'Unsupported protocol version.'; - break; - case tls.Alert.Description.insufficient_security: - msg = 'Insufficient security.'; - break; - case tls.Alert.Description.internal_error: - msg = 'Internal error.'; - break; - case tls.Alert.Description.user_canceled: - msg = 'User canceled.'; - break; - case tls.Alert.Description.no_renegotiation: - msg = 'Renegotiation not supported.'; - break; - default: - msg = 'Unknown error.'; - break; - } - - // close connection on close_notify, not an error - if(alert.description === tls.Alert.Description.close_notify) { - return c.close(); - } - - // call error handler - c.error(c, { - message: msg, - send: false, - // origin is the opposite end - origin: (c.entity === tls.ConnectionEnd.client) ? 'server' : 'client', - alert: alert - }); - // continue - c.process(); -}; +const { Blob, File: NativeFile } = __nccwpck_require__(4300) +const { types } = __nccwpck_require__(3837) +const { kState } = __nccwpck_require__(5861) +const { isBlobLike } = __nccwpck_require__(2538) +const { webidl } = __nccwpck_require__(1744) +const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) +const { kEnumerableProperty } = __nccwpck_require__(3983) +const encoder = new TextEncoder() + +class File extends Blob { + constructor (fileBits, fileName, options = {}) { + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' }) + + fileBits = webidl.converters['sequence'](fileBits) + fileName = webidl.converters.USVString(fileName) + options = webidl.converters.FilePropertyBag(options) + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + // Note: Blob handles this for us + + // 2. Let n be the fileName argument to the constructor. + const n = fileName + + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: + + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // 2. Convert every character in t to ASCII lowercase. + let t = options.type + let d + + // eslint-disable-next-line no-labels + substep: { + if (t) { + t = parseMIMEType(t) + + if (t === 'failure') { + t = '' + // eslint-disable-next-line no-labels + break substep + } + + t = serializeAMimeType(t).toLowerCase() + } -/** - * Called when a Handshake record is received. - * - * @param c the connection. - * @param record the record. - */ -tls.handleHandshake = function(c, record) { - // get the handshake type and message length - var b = record.fragment; - var type = b.getByte(); - var length = b.getInt24(); - - // see if the record fragment doesn't yet contain the full message - if(length > b.length()) { - // cache the record, clear its fragment, and reset the buffer read - // pointer before the type and length were read - c.fragmented = record; - record.fragment = forge.util.createBuffer(); - b.read -= 4; - - // continue - return c.process(); - } - - // full message now available, clear cache, reset read pointer to - // before type and length - c.fragmented = null; - b.read -= 4; - - // save the handshake bytes for digestion after handler is found - // (include type and length of handshake msg) - var bytes = b.bytes(length + 4); - - // restore read pointer - b.read += 4; - - // handle expected message - if(type in hsTable[c.entity][c.expect]) { - // initialize server session - if(c.entity === tls.ConnectionEnd.server && !c.open && !c.fail) { - c.handshaking = true; - c.session = { - version: null, - extensions: { - server_name: { - serverNameList: [] - } - }, - cipherSuite: null, - compressionMethod: null, - serverCertificate: null, - clientCertificate: null, - md5: forge.md.md5.create(), - sha1: forge.md.sha1.create() - }; + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + d = options.lastModified } - /* Update handshake messages digest. Finished and CertificateVerify - messages are not digested here. They can't be digested as part of - the verify_data that they contain. These messages are manually - digested in their handlers. HelloRequest messages are simply never - included in the handshake message digest according to spec. */ - if(type !== tls.HandshakeType.hello_request && - type !== tls.HandshakeType.certificate_verify && - type !== tls.HandshakeType.finished) { - c.session.md5.update(bytes); - c.session.sha1.update(bytes); + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. + + super(processBlobParts(fileBits, options), { type: t }) + this[kState] = { + name: n, + lastModified: d, + type: t } + } - // handle specific handshake type record - hsTable[c.entity][c.expect][type](c, record, length); - } else { - // unexpected record - tls.handleUnexpected(c, record); + get name () { + webidl.brandCheck(this, File) + + return this[kState].name } -}; -/** - * Called when an ApplicationData record is received. - * - * @param c the connection. - * @param record the record. - */ -tls.handleApplicationData = function(c, record) { - // buffer data, notify that its ready - c.data.putBuffer(record.fragment); - c.dataReady(c); + get lastModified () { + webidl.brandCheck(this, File) - // continue - c.process(); -}; + return this[kState].lastModified + } -/** - * Called when a Heartbeat record is received. - * - * @param c the connection. - * @param record the record. - */ -tls.handleHeartbeat = function(c, record) { - // get the heartbeat type and payload - var b = record.fragment; - var type = b.getByte(); - var length = b.getInt16(); - var payload = b.getBytes(length); - - if(type === tls.HeartbeatMessageType.heartbeat_request) { - // discard request during handshake or if length is too large - if(c.handshaking || length > payload.length) { - // continue - return c.process(); - } - // retransmit payload - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.heartbeat, - data: tls.createHeartbeat( - tls.HeartbeatMessageType.heartbeat_response, payload) - })); - tls.flush(c); - } else if(type === tls.HeartbeatMessageType.heartbeat_response) { - // check payload against expected payload, discard heartbeat if no match - if(payload !== c.expectedHeartbeatPayload) { - // continue - return c.process(); - } + get type () { + webidl.brandCheck(this, File) - // notify that a valid heartbeat was received - if(c.heartbeatReceived) { - c.heartbeatReceived(c, forge.util.createBuffer(payload)); - } + return this[kState].type } +} - // continue - c.process(); -}; +class FileLike { + constructor (blobLike, fileName, options = {}) { + // TODO: argument idl type check -/** - * The transistional state tables for receiving TLS records. It maps the - * current TLS engine state and a received record to a function to handle the - * record and update the state. - * - * For instance, if the current state is SHE, then the TLS engine is expecting - * a ServerHello record. Once a record is received, the handler function is - * looked up using the state SHE and the record's content type. - * - * The resulting function will either be an error handler or a record handler. - * The function will take whatever action is appropriate and update the state - * for the next record. - * - * The states are all based on possible server record types. Note that the - * client will never specifically expect to receive a HelloRequest or an alert - * from the server so there is no state that reflects this. These messages may - * occur at any time. - * - * There are two tables for mapping states because there is a second tier of - * types for handshake messages. Once a record with a content type of handshake - * is received, the handshake record handler will look up the handshake type in - * the secondary map to get its appropriate handler. - * - * Valid message orders are as follows: - * - * =======================FULL HANDSHAKE====================== - * Client Server - * - * ClientHello --------> - * ServerHello - * Certificate* - * ServerKeyExchange* - * CertificateRequest* - * <-------- ServerHelloDone - * Certificate* - * ClientKeyExchange - * CertificateVerify* - * [ChangeCipherSpec] - * Finished --------> - * [ChangeCipherSpec] - * <-------- Finished - * Application Data <-------> Application Data - * - * =====================SESSION RESUMPTION===================== - * Client Server - * - * ClientHello --------> - * ServerHello - * [ChangeCipherSpec] - * <-------- Finished - * [ChangeCipherSpec] - * Finished --------> - * Application Data <-------> Application Data - */ -// client expect states (indicate which records are expected to be received) -var SHE = 0; // rcv server hello -var SCE = 1; // rcv server certificate -var SKE = 2; // rcv server key exchange -var SCR = 3; // rcv certificate request -var SHD = 4; // rcv server hello done -var SCC = 5; // rcv change cipher spec -var SFI = 6; // rcv finished -var SAD = 7; // rcv application data -var SER = 8; // not expecting any messages at this point - -// server expect states -var CHE = 0; // rcv client hello -var CCE = 1; // rcv client certificate -var CKE = 2; // rcv client key exchange -var CCV = 3; // rcv certificate verify -var CCC = 4; // rcv change cipher spec -var CFI = 5; // rcv finished -var CAD = 6; // rcv application data -var CER = 7; // not expecting any messages at this point - -// map client current expect state and content type to function -var __ = tls.handleUnexpected; -var R0 = tls.handleChangeCipherSpec; -var R1 = tls.handleAlert; -var R2 = tls.handleHandshake; -var R3 = tls.handleApplicationData; -var R4 = tls.handleHeartbeat; -var ctTable = []; -ctTable[tls.ConnectionEnd.client] = [ -// CC,AL,HS,AD,HB -/*SHE*/[__,R1,R2,__,R4], -/*SCE*/[__,R1,R2,__,R4], -/*SKE*/[__,R1,R2,__,R4], -/*SCR*/[__,R1,R2,__,R4], -/*SHD*/[__,R1,R2,__,R4], -/*SCC*/[R0,R1,__,__,R4], -/*SFI*/[__,R1,R2,__,R4], -/*SAD*/[__,R1,R2,R3,R4], -/*SER*/[__,R1,R2,__,R4] -]; + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + + // 2. Let n be the fileName argument to the constructor. + const n = fileName -// map server current expect state and content type to function -ctTable[tls.ConnectionEnd.server] = [ -// CC,AL,HS,AD -/*CHE*/[__,R1,R2,__,R4], -/*CCE*/[__,R1,R2,__,R4], -/*CKE*/[__,R1,R2,__,R4], -/*CCV*/[__,R1,R2,__,R4], -/*CCC*/[R0,R1,__,__,R4], -/*CFI*/[__,R1,R2,__,R4], -/*CAD*/[__,R1,R2,R3,R4], -/*CER*/[__,R1,R2,__,R4] -]; + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: -// map client current expect state and handshake type to function -var H0 = tls.handleHelloRequest; -var H1 = tls.handleServerHello; -var H2 = tls.handleCertificate; -var H3 = tls.handleServerKeyExchange; -var H4 = tls.handleCertificateRequest; -var H5 = tls.handleServerHelloDone; -var H6 = tls.handleFinished; -var hsTable = []; -hsTable[tls.ConnectionEnd.client] = [ -// HR,01,SH,03,04,05,06,07,08,09,10,SC,SK,CR,HD,15,CK,17,18,19,FI -/*SHE*/[__,__,H1,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*SCE*/[H0,__,__,__,__,__,__,__,__,__,__,H2,H3,H4,H5,__,__,__,__,__,__], -/*SKE*/[H0,__,__,__,__,__,__,__,__,__,__,__,H3,H4,H5,__,__,__,__,__,__], -/*SCR*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,H4,H5,__,__,__,__,__,__], -/*SHD*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,H5,__,__,__,__,__,__], -/*SCC*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*SFI*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H6], -/*SAD*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*SER*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__] -]; + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // TODO + const t = options.type -// map server current expect state and handshake type to function -// Note: CAD[CH] does not map to FB because renegotation is prohibited -var H7 = tls.handleClientHello; -var H8 = tls.handleClientKeyExchange; -var H9 = tls.handleCertificateVerify; -hsTable[tls.ConnectionEnd.server] = [ -// 01,CH,02,03,04,05,06,07,08,09,10,CC,12,13,14,CV,CK,17,18,19,FI -/*CHE*/[__,H7,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*CCE*/[__,__,__,__,__,__,__,__,__,__,__,H2,__,__,__,__,__,__,__,__,__], -/*CKE*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H8,__,__,__,__], -/*CCV*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H9,__,__,__,__,__], -/*CCC*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*CFI*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H6], -/*CAD*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*CER*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__] -]; + // 2. Convert every character in t to ASCII lowercase. + // TODO -/** - * Generates the master_secret and keys using the given security parameters. - * - * The security parameters for a TLS connection state are defined as such: - * - * struct { - * ConnectionEnd entity; - * PRFAlgorithm prf_algorithm; - * BulkCipherAlgorithm bulk_cipher_algorithm; - * CipherType cipher_type; - * uint8 enc_key_length; - * uint8 block_length; - * uint8 fixed_iv_length; - * uint8 record_iv_length; - * MACAlgorithm mac_algorithm; - * uint8 mac_length; - * uint8 mac_key_length; - * CompressionMethod compression_algorithm; - * opaque master_secret[48]; - * opaque client_random[32]; - * opaque server_random[32]; - * } SecurityParameters; - * - * Note that this definition is from TLS 1.2. In TLS 1.0 some of these - * parameters are ignored because, for instance, the PRFAlgorithm is a - * builtin-fixed algorithm combining iterations of MD5 and SHA-1 in TLS 1.0. - * - * The Record Protocol requires an algorithm to generate keys required by the - * current connection state. - * - * The master secret is expanded into a sequence of secure bytes, which is then - * split to a client write MAC key, a server write MAC key, a client write - * encryption key, and a server write encryption key. In TLS 1.0 a client write - * IV and server write IV are also generated. Each of these is generated from - * the byte sequence in that order. Unused values are empty. In TLS 1.2, some - * AEAD ciphers may additionally require a client write IV and a server write - * IV (see Section 6.2.3.3). - * - * When keys, MAC keys, and IVs are generated, the master secret is used as an - * entropy source. - * - * To generate the key material, compute: - * - * master_secret = PRF(pre_master_secret, "master secret", - * ClientHello.random + ServerHello.random) - * - * key_block = PRF(SecurityParameters.master_secret, - * "key expansion", - * SecurityParameters.server_random + - * SecurityParameters.client_random); - * - * until enough output has been generated. Then, the key_block is - * partitioned as follows: - * - * client_write_MAC_key[SecurityParameters.mac_key_length] - * server_write_MAC_key[SecurityParameters.mac_key_length] - * client_write_key[SecurityParameters.enc_key_length] - * server_write_key[SecurityParameters.enc_key_length] - * client_write_IV[SecurityParameters.fixed_iv_length] - * server_write_IV[SecurityParameters.fixed_iv_length] - * - * In TLS 1.2, the client_write_IV and server_write_IV are only generated for - * implicit nonce techniques as described in Section 3.2.1 of [AEAD]. This - * implementation uses TLS 1.0 so IVs are generated. - * - * Implementation note: The currently defined cipher suite which requires the - * most material is AES_256_CBC_SHA256. It requires 2 x 32 byte keys and 2 x 32 - * byte MAC keys, for a total 128 bytes of key material. In TLS 1.0 it also - * requires 2 x 16 byte IVs, so it actually takes 160 bytes of key material. - * - * @param c the connection. - * @param sp the security parameters to use. - * - * @return the security keys. - */ -tls.generateKeys = function(c, sp) { - // TLS_RSA_WITH_AES_128_CBC_SHA (required to be compliant with TLS 1.2) & - // TLS_RSA_WITH_AES_256_CBC_SHA are the only cipher suites implemented - // at present + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + const d = options.lastModified ?? Date.now() - // TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA is required to be compliant with - // TLS 1.0 but we don't care right now because AES is better and we have - // an implementation for it + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. - // TODO: TLS 1.2 implementation - /* - // determine the PRF - var prf; - switch(sp.prf_algorithm) { - case tls.PRFAlgorithm.tls_prf_sha256: - prf = prf_sha256; - break; - default: - // should never happen - throw new Error('Invalid PRF'); + this[kState] = { + blobLike, + name: n, + type: t, + lastModified: d + } } - */ - - // TLS 1.0/1.1 implementation - var prf = prf_TLS1; - // concatenate server and client random - var random = sp.client_random + sp.server_random; + stream (...args) { + webidl.brandCheck(this, FileLike) - // only create master secret if session is new - if(!c.session.resuming) { - // create master secret, clean up pre-master secret - sp.master_secret = prf( - sp.pre_master_secret, 'master secret', random, 48).bytes(); - sp.pre_master_secret = null; + return this[kState].blobLike.stream(...args) } - // generate the amount of key material needed - random = sp.server_random + sp.client_random; - var length = 2 * sp.mac_key_length + 2 * sp.enc_key_length; + arrayBuffer (...args) { + webidl.brandCheck(this, FileLike) - // include IV for TLS/1.0 - var tls10 = (c.version.major === tls.Versions.TLS_1_0.major && - c.version.minor === tls.Versions.TLS_1_0.minor); - if(tls10) { - length += 2 * sp.fixed_iv_length; + return this[kState].blobLike.arrayBuffer(...args) } - var km = prf(sp.master_secret, 'key expansion', random, length); - // split the key material into the MAC and encryption keys - var rval = { - client_write_MAC_key: km.getBytes(sp.mac_key_length), - server_write_MAC_key: km.getBytes(sp.mac_key_length), - client_write_key: km.getBytes(sp.enc_key_length), - server_write_key: km.getBytes(sp.enc_key_length) - }; + slice (...args) { + webidl.brandCheck(this, FileLike) - // include TLS 1.0 IVs - if(tls10) { - rval.client_write_IV = km.getBytes(sp.fixed_iv_length); - rval.server_write_IV = km.getBytes(sp.fixed_iv_length); + return this[kState].blobLike.slice(...args) } - return rval; -}; + text (...args) { + webidl.brandCheck(this, FileLike) -/** - * Creates a new initialized TLS connection state. A connection state has - * a read mode and a write mode. - * - * compression state: - * The current state of the compression algorithm. - * - * cipher state: - * The current state of the encryption algorithm. This will consist of the - * scheduled key for that connection. For stream ciphers, this will also - * contain whatever state information is necessary to allow the stream to - * continue to encrypt or decrypt data. - * - * MAC key: - * The MAC key for the connection. - * - * sequence number: - * Each connection state contains a sequence number, which is maintained - * separately for read and write states. The sequence number MUST be set to - * zero whenever a connection state is made the active state. Sequence - * numbers are of type uint64 and may not exceed 2^64-1. Sequence numbers do - * not wrap. If a TLS implementation would need to wrap a sequence number, - * it must renegotiate instead. A sequence number is incremented after each - * record: specifically, the first record transmitted under a particular - * connection state MUST use sequence number 0. - * - * @param c the connection. - * - * @return the new initialized TLS connection state. - */ -tls.createConnectionState = function(c) { - var client = (c.entity === tls.ConnectionEnd.client); - - var createMode = function() { - var mode = { - // two 32-bit numbers, first is most significant - sequenceNumber: [0, 0], - macKey: null, - macLength: 0, - macFunction: null, - cipherState: null, - cipherFunction: function(record) {return true;}, - compressionState: null, - compressFunction: function(record) {return true;}, - updateSequenceNumber: function() { - if(mode.sequenceNumber[1] === 0xFFFFFFFF) { - mode.sequenceNumber[1] = 0; - ++mode.sequenceNumber[0]; - } else { - ++mode.sequenceNumber[1]; - } - } - }; - return mode; - }; - var state = { - read: createMode(), - write: createMode() - }; + return this[kState].blobLike.text(...args) + } - // update function in read mode will decrypt then decompress a record - state.read.update = function(c, record) { - if(!state.read.cipherFunction(record, state.read)) { - c.error(c, { - message: 'Could not decrypt record or bad MAC.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - // doesn't matter if decryption failed or MAC was - // invalid, return the same error so as not to reveal - // which one occurred - description: tls.Alert.Description.bad_record_mac - } - }); - } else if(!state.read.compressFunction(c, record, state.read)) { - c.error(c, { - message: 'Could not decompress record.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.decompression_failure - } - }); - } - return !c.fail; - }; + get size () { + webidl.brandCheck(this, FileLike) - // update function in write mode will compress then encrypt a record - state.write.update = function(c, record) { - if(!state.write.compressFunction(c, record, state.write)) { - // error, but do not send alert since it would require - // compression as well - c.error(c, { - message: 'Could not compress record.', - send: false, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); - } else if(!state.write.cipherFunction(record, state.write)) { - // error, but do not send alert since it would require - // encryption as well - c.error(c, { - message: 'Could not encrypt record.', - send: false, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); - } - return !c.fail; - }; + return this[kState].blobLike.size + } - // handle security parameters - if(c.session) { - var sp = c.session.sp; - c.session.cipherSuite.initSecurityParameters(sp); + get type () { + webidl.brandCheck(this, FileLike) - // generate keys - sp.keys = tls.generateKeys(c, sp); - state.read.macKey = client ? - sp.keys.server_write_MAC_key : sp.keys.client_write_MAC_key; - state.write.macKey = client ? - sp.keys.client_write_MAC_key : sp.keys.server_write_MAC_key; + return this[kState].blobLike.type + } - // cipher suite setup - c.session.cipherSuite.initConnectionState(state, c, sp); + get name () { + webidl.brandCheck(this, FileLike) - // compression setup - switch(sp.compression_algorithm) { - case tls.CompressionMethod.none: - break; - case tls.CompressionMethod.deflate: - state.read.compressFunction = inflate; - state.write.compressFunction = deflate; - break; - default: - throw new Error('Unsupported compression algorithm.'); - } + return this[kState].name } - return state; -}; + get lastModified () { + webidl.brandCheck(this, FileLike) -/** - * Creates a Random structure. - * - * struct { - * uint32 gmt_unix_time; - * opaque random_bytes[28]; - * } Random; - * - * gmt_unix_time: - * The current time and date in standard UNIX 32-bit format (seconds since - * the midnight starting Jan 1, 1970, UTC, ignoring leap seconds) according - * to the sender's internal clock. Clocks are not required to be set - * correctly by the basic TLS protocol; higher-level or application - * protocols may define additional requirements. Note that, for historical - * reasons, the data element is named using GMT, the predecessor of the - * current worldwide time base, UTC. - * random_bytes: - * 28 bytes generated by a secure random number generator. - * - * @return the Random structure as a byte array. - */ -tls.createRandom = function() { - // get UTC milliseconds - var d = new Date(); - var utc = +d + d.getTimezoneOffset() * 60000; - var rval = forge.util.createBuffer(); - rval.putInt32(utc); - rval.putBytes(forge.random.getBytes(28)); - return rval; -}; + return this[kState].lastModified + } -/** - * Creates a TLS record with the given type and data. - * - * @param c the connection. - * @param options: - * type: the record type. - * data: the plain text data in a byte buffer. - * - * @return the created record. - */ -tls.createRecord = function(c, options) { - if(!options.data) { - return null; + get [Symbol.toStringTag] () { + return 'File' } - var record = { - type: options.type, - version: { - major: c.version.major, - minor: c.version.minor - }, - length: options.data.length(), - fragment: options.data - }; - return record; -}; +} -/** - * Creates a TLS alert record. - * - * @param c the connection. - * @param alert: - * level: the TLS alert level. - * description: the TLS alert description. - * - * @return the created alert record. - */ -tls.createAlert = function(c, alert) { - var b = forge.util.createBuffer(); - b.putByte(alert.level); - b.putByte(alert.description); - return tls.createRecord(c, { - type: tls.ContentType.alert, - data: b - }); -}; +Object.defineProperties(File.prototype, { + [Symbol.toStringTag]: { + value: 'File', + configurable: true + }, + name: kEnumerableProperty, + lastModified: kEnumerableProperty +}) -/* The structure of a TLS handshake message. - * - * struct { - * HandshakeType msg_type; // handshake type - * uint24 length; // bytes in message - * select(HandshakeType) { - * case hello_request: HelloRequest; - * case client_hello: ClientHello; - * case server_hello: ServerHello; - * case certificate: Certificate; - * case server_key_exchange: ServerKeyExchange; - * case certificate_request: CertificateRequest; - * case server_hello_done: ServerHelloDone; - * case certificate_verify: CertificateVerify; - * case client_key_exchange: ClientKeyExchange; - * case finished: Finished; - * } body; - * } Handshake; - */ +webidl.converters.Blob = webidl.interfaceConverter(Blob) -/** - * Creates a ClientHello message. - * - * opaque SessionID<0..32>; - * enum { null(0), deflate(1), (255) } CompressionMethod; - * uint8 CipherSuite[2]; - * - * struct { - * ProtocolVersion client_version; - * Random random; - * SessionID session_id; - * CipherSuite cipher_suites<2..2^16-2>; - * CompressionMethod compression_methods<1..2^8-1>; - * select(extensions_present) { - * case false: - * struct {}; - * case true: - * Extension extensions<0..2^16-1>; - * }; - * } ClientHello; - * - * The extension format for extended client hellos and server hellos is: - * - * struct { - * ExtensionType extension_type; - * opaque extension_data<0..2^16-1>; - * } Extension; - * - * Here: - * - * - "extension_type" identifies the particular extension type. - * - "extension_data" contains information specific to the particular - * extension type. - * - * The extension types defined in this document are: - * - * enum { - * server_name(0), max_fragment_length(1), - * client_certificate_url(2), trusted_ca_keys(3), - * truncated_hmac(4), status_request(5), (65535) - * } ExtensionType; - * - * @param c the connection. - * - * @return the ClientHello byte buffer. - */ -tls.createClientHello = function(c) { - // save hello version - c.session.clientHelloVersion = { - major: c.version.major, - minor: c.version.minor - }; +webidl.converters.BlobPart = function (V, opts) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } - // create supported cipher suites - var cipherSuites = forge.util.createBuffer(); - for(var i = 0; i < c.cipherSuites.length; ++i) { - var cs = c.cipherSuites[i]; - cipherSuites.putByte(cs.id[0]); - cipherSuites.putByte(cs.id[1]); - } - var cSuites = cipherSuites.length(); - - // create supported compression methods, null always supported, but - // also support deflate if connection has inflate and deflate methods - var compressionMethods = forge.util.createBuffer(); - compressionMethods.putByte(tls.CompressionMethod.none); - // FIXME: deflate support disabled until issues with raw deflate data - // without zlib headers are resolved - /* - if(c.inflate !== null && c.deflate !== null) { - compressionMethods.putByte(tls.CompressionMethod.deflate); + if ( + ArrayBuffer.isView(V) || + types.isAnyArrayBuffer(V) + ) { + return webidl.converters.BufferSource(V, opts) + } } - */ - var cMethods = compressionMethods.length(); - - // create TLS SNI (server name indication) extension if virtual host - // has been specified, see RFC 3546 - var extensions = forge.util.createBuffer(); - if(c.virtualHost) { - // create extension struct - var ext = forge.util.createBuffer(); - ext.putByte(0x00); // type server_name (ExtensionType is 2 bytes) - ext.putByte(0x00); - - /* In order to provide the server name, clients MAY include an - * extension of type "server_name" in the (extended) client hello. - * The "extension_data" field of this extension SHALL contain - * "ServerNameList" where: - * - * struct { - * NameType name_type; - * select(name_type) { - * case host_name: HostName; - * } name; - * } ServerName; - * - * enum { - * host_name(0), (255) - * } NameType; - * - * opaque HostName<1..2^16-1>; - * - * struct { - * ServerName server_name_list<1..2^16-1> - * } ServerNameList; - */ - var serverName = forge.util.createBuffer(); - serverName.putByte(0x00); // type host_name - writeVector(serverName, 2, forge.util.createBuffer(c.virtualHost)); - - // ServerNameList is in extension_data - var snList = forge.util.createBuffer(); - writeVector(snList, 2, serverName); - writeVector(ext, 2, snList); - extensions.putBuffer(ext); - } - var extLength = extensions.length(); - if(extLength > 0) { - // add extension vector length - extLength += 2; - } - - // determine length of the handshake message - // cipher suites and compression methods size will need to be - // updated if more get added to the list - var sessionId = c.session.id; - var length = - sessionId.length + 1 + // session ID vector - 2 + // version (major + minor) - 4 + 28 + // random time and random bytes - 2 + cSuites + // cipher suites vector - 1 + cMethods + // compression methods vector - extLength; // extensions vector - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.client_hello); - rval.putInt24(length); // handshake length - rval.putByte(c.version.major); // major version - rval.putByte(c.version.minor); // minor version - rval.putBytes(c.session.sp.client_random); // random time + bytes - writeVector(rval, 1, forge.util.createBuffer(sessionId)); - writeVector(rval, 2, cipherSuites); - writeVector(rval, 1, compressionMethods); - if(extLength > 0) { - writeVector(rval, 2, extensions); - } - return rval; -}; -/** - * Creates a ServerHello message. - * - * @param c the connection. - * - * @return the ServerHello byte buffer. - */ -tls.createServerHello = function(c) { - // determine length of the handshake message - var sessionId = c.session.id; - var length = - sessionId.length + 1 + // session ID vector - 2 + // version (major + minor) - 4 + 28 + // random time and random bytes - 2 + // chosen cipher suite - 1; // chosen compression method - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.server_hello); - rval.putInt24(length); // handshake length - rval.putByte(c.version.major); // major version - rval.putByte(c.version.minor); // minor version - rval.putBytes(c.session.sp.server_random); // random time + bytes - writeVector(rval, 1, forge.util.createBuffer(sessionId)); - rval.putByte(c.session.cipherSuite.id[0]); - rval.putByte(c.session.cipherSuite.id[1]); - rval.putByte(c.session.compressionMethod); - return rval; -}; + return webidl.converters.USVString(V, opts) +} -/** - * Creates a Certificate message. - * - * When this message will be sent: - * This is the first message the client can send after receiving a server - * hello done message and the first message the server can send after - * sending a ServerHello. This client message is only sent if the server - * requests a certificate. If no suitable certificate is available, the - * client should send a certificate message containing no certificates. If - * client authentication is required by the server for the handshake to - * continue, it may respond with a fatal handshake failure alert. - * - * opaque ASN.1Cert<1..2^24-1>; - * - * struct { - * ASN.1Cert certificate_list<0..2^24-1>; - * } Certificate; - * - * @param c the connection. - * - * @return the Certificate byte buffer. - */ -tls.createCertificate = function(c) { - // TODO: check certificate request to ensure types are supported - - // get a certificate (a certificate as a PEM string) - var client = (c.entity === tls.ConnectionEnd.client); - var cert = null; - if(c.getCertificate) { - var hint; - if(client) { - hint = c.session.certificateRequest; - } else { - hint = c.session.extensions.server_name.serverNameList; +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.BlobPart +) + +// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag +webidl.converters.FilePropertyBag = webidl.dictionaryConverter([ + { + key: 'lastModified', + converter: webidl.converters['long long'], + get defaultValue () { + return Date.now() } - cert = c.getCertificate(c, hint); - } + }, + { + key: 'type', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'endings', + converter: (value) => { + value = webidl.converters.DOMString(value) + value = value.toLowerCase() - // buffer to hold certificate list - var certList = forge.util.createBuffer(); - if(cert !== null) { - try { - // normalize cert to a chain of certificates - if(!forge.util.isArray(cert)) { - cert = [cert]; + if (value !== 'native') { + value = 'transparent' } - var asn1 = null; - for(var i = 0; i < cert.length; ++i) { - var msg = forge.pem.decode(cert[i])[0]; - if(msg.type !== 'CERTIFICATE' && - msg.type !== 'X509 CERTIFICATE' && - msg.type !== 'TRUSTED CERTIFICATE') { - var error = new Error('Could not convert certificate from PEM; PEM ' + - 'header type is not "CERTIFICATE", "X509 CERTIFICATE", or ' + - '"TRUSTED CERTIFICATE".'); - error.headerType = msg.type; - throw error; - } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert certificate from PEM; PEM is encrypted.'); - } - var der = forge.util.createBuffer(msg.body); - if(asn1 === null) { - asn1 = forge.asn1.fromDer(der.bytes(), false); - } + return value + }, + defaultValue: 'transparent' + } +]) - // certificate entry is itself a vector with 3 length bytes - var certBuffer = forge.util.createBuffer(); - writeVector(certBuffer, 3, der); +/** + * @see https://www.w3.org/TR/FileAPI/#process-blob-parts + * @param {(NodeJS.TypedArray|Blob|string)[]} parts + * @param {{ type: string, endings: string }} options + */ +function processBlobParts (parts, options) { + // 1. Let bytes be an empty sequence of bytes. + /** @type {NodeJS.TypedArray[]} */ + const bytes = [] - // add cert vector to cert list vector - certList.putBuffer(certBuffer); + // 2. For each element in parts: + for (const element of parts) { + // 1. If element is a USVString, run the following substeps: + if (typeof element === 'string') { + // 1. Let s be element. + let s = element + + // 2. If the endings member of options is "native", set s + // to the result of converting line endings to native + // of element. + if (options.endings === 'native') { + s = convertLineEndingsNative(s) } - // save certificate - cert = forge.pki.certificateFromAsn1(asn1); - if(client) { - c.session.clientCertificate = cert; + // 3. Append the result of UTF-8 encoding s to bytes. + bytes.push(encoder.encode(s)) + } else if ( + types.isAnyArrayBuffer(element) || + types.isTypedArray(element) + ) { + // 2. If element is a BufferSource, get a copy of the + // bytes held by the buffer source, and append those + // bytes to bytes. + if (!element.buffer) { // ArrayBuffer + bytes.push(new Uint8Array(element)) } else { - c.session.serverCertificate = cert; + bytes.push( + new Uint8Array(element.buffer, element.byteOffset, element.byteLength) + ) } - } catch(ex) { - return c.error(c, { - message: 'Could not send certificate list.', - cause: ex, - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.bad_certificate - } - }); + } else if (isBlobLike(element)) { + // 3. If element is a Blob, append the bytes it represents + // to bytes. + bytes.push(element) } } - // determine length of the handshake message - var length = 3 + certList.length(); // cert list vector - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.certificate); - rval.putInt24(length); - writeVector(rval, 3, certList); - return rval; -}; - -/** - * Creates a ClientKeyExchange message. - * - * When this message will be sent: - * This message is always sent by the client. It will immediately follow the - * client certificate message, if it is sent. Otherwise it will be the first - * message sent by the client after it receives the server hello done - * message. - * - * Meaning of this message: - * With this message, the premaster secret is set, either though direct - * transmission of the RSA-encrypted secret, or by the transmission of - * Diffie-Hellman parameters which will allow each side to agree upon the - * same premaster secret. When the key exchange method is DH_RSA or DH_DSS, - * client certification has been requested, and the client was able to - * respond with a certificate which contained a Diffie-Hellman public key - * whose parameters (group and generator) matched those specified by the - * server in its certificate, this message will not contain any data. - * - * Meaning of this message: - * If RSA is being used for key agreement and authentication, the client - * generates a 48-byte premaster secret, encrypts it using the public key - * from the server's certificate or the temporary RSA key provided in a - * server key exchange message, and sends the result in an encrypted - * premaster secret message. This structure is a variant of the client - * key exchange message, not a message in itself. - * - * struct { - * select(KeyExchangeAlgorithm) { - * case rsa: EncryptedPreMasterSecret; - * case diffie_hellman: ClientDiffieHellmanPublic; - * } exchange_keys; - * } ClientKeyExchange; - * - * struct { - * ProtocolVersion client_version; - * opaque random[46]; - * } PreMasterSecret; - * - * struct { - * public-key-encrypted PreMasterSecret pre_master_secret; - * } EncryptedPreMasterSecret; - * - * A public-key-encrypted element is encoded as a vector <0..2^16-1>. - * - * @param c the connection. - * - * @return the ClientKeyExchange byte buffer. - */ -tls.createClientKeyExchange = function(c) { - // create buffer to encrypt - var b = forge.util.createBuffer(); - - // add highest client-supported protocol to help server avoid version - // rollback attacks - b.putByte(c.session.clientHelloVersion.major); - b.putByte(c.session.clientHelloVersion.minor); - - // generate and add 46 random bytes - b.putBytes(forge.random.getBytes(46)); - - // save pre-master secret - var sp = c.session.sp; - sp.pre_master_secret = b.getBytes(); - - // RSA-encrypt the pre-master secret - var key = c.session.serverCertificate.publicKey; - b = key.encrypt(sp.pre_master_secret); - - /* Note: The encrypted pre-master secret will be stored in a - public-key-encrypted opaque vector that has the length prefixed using - 2 bytes, so include those 2 bytes in the handshake message length. This - is done as a minor optimization instead of calling writeVector(). */ - - // determine length of the handshake message - var length = b.length + 2; - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.client_key_exchange); - rval.putInt24(length); - // add vector length bytes - rval.putInt16(b.length); - rval.putBytes(b); - return rval; -}; + // 3. Return bytes. + return bytes +} /** - * Creates a ServerKeyExchange message. - * - * @param c the connection. - * - * @return the ServerKeyExchange byte buffer. + * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native + * @param {string} s */ -tls.createServerKeyExchange = function(c) { - // this implementation only supports RSA, no Diffie-Hellman support, - // so this record is empty - - // determine length of the handshake message - var length = 0; +function convertLineEndingsNative (s) { + // 1. Let native line ending be be the code point U+000A LF. + let nativeLineEnding = '\n' - // build record fragment - var rval = forge.util.createBuffer(); - if(length > 0) { - rval.putByte(tls.HandshakeType.server_key_exchange); - rval.putInt24(length); + // 2. If the underlying platform’s conventions are to + // represent newlines as a carriage return and line feed + // sequence, set native line ending to the code point + // U+000D CR followed by the code point U+000A LF. + if (process.platform === 'win32') { + nativeLineEnding = '\r\n' } - return rval; -}; -/** - * Gets the signed data used to verify a client-side certificate. See - * tls.createCertificateVerify() for details. - * - * @param c the connection. - * @param callback the callback to call once the signed data is ready. - */ -tls.getClientSignature = function(c, callback) { - // generate data to RSA encrypt - var b = forge.util.createBuffer(); - b.putBuffer(c.session.md5.digest()); - b.putBuffer(c.session.sha1.digest()); - b = b.getBytes(); - - // create default signing function as necessary - c.getSignature = c.getSignature || function(c, b, callback) { - // do rsa encryption, call callback - var privateKey = null; - if(c.getPrivateKey) { - try { - privateKey = c.getPrivateKey(c, c.session.clientCertificate); - privateKey = forge.pki.privateKeyFromPem(privateKey); - } catch(ex) { - c.error(c, { - message: 'Could not get private key.', - cause: ex, - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); - } - } - if(privateKey === null) { - c.error(c, { - message: 'No private key set.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); - } else { - b = privateKey.sign(b, null); - } - callback(c, b); - }; + return s.replace(/\r?\n/g, nativeLineEnding) +} - // get client signature - c.getSignature(c, b, callback); -}; +// If this function is moved to ./util.js, some tools (such as +// rollup) will warn about circular dependencies. See: +// https://github.com/nodejs/undici/issues/1629 +function isFileLike (object) { + return ( + (NativeFile && object instanceof NativeFile) || + object instanceof File || ( + object && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + object[Symbol.toStringTag] === 'File' + ) + ) +} -/** - * Creates a CertificateVerify message. - * - * Meaning of this message: - * This structure conveys the client's Diffie-Hellman public value - * (Yc) if it was not already included in the client's certificate. - * The encoding used for Yc is determined by the enumerated - * PublicValueEncoding. This structure is a variant of the client - * key exchange message, not a message in itself. - * - * When this message will be sent: - * This message is used to provide explicit verification of a client - * certificate. This message is only sent following a client - * certificate that has signing capability (i.e. all certificates - * except those containing fixed Diffie-Hellman parameters). When - * sent, it will immediately follow the client key exchange message. - * - * struct { - * Signature signature; - * } CertificateVerify; - * - * CertificateVerify.signature.md5_hash - * MD5(handshake_messages); - * - * Certificate.signature.sha_hash - * SHA(handshake_messages); - * - * Here handshake_messages refers to all handshake messages sent or - * received starting at client hello up to but not including this - * message, including the type and length fields of the handshake - * messages. - * - * select(SignatureAlgorithm) { - * case anonymous: struct { }; - * case rsa: - * digitally-signed struct { - * opaque md5_hash[16]; - * opaque sha_hash[20]; - * }; - * case dsa: - * digitally-signed struct { - * opaque sha_hash[20]; - * }; - * } Signature; - * - * In digital signing, one-way hash functions are used as input for a - * signing algorithm. A digitally-signed element is encoded as an opaque - * vector <0..2^16-1>, where the length is specified by the signing - * algorithm and key. - * - * In RSA signing, a 36-byte structure of two hashes (one SHA and one - * MD5) is signed (encrypted with the private key). It is encoded with - * PKCS #1 block type 0 or type 1 as described in [PKCS1]. - * - * In DSS, the 20 bytes of the SHA hash are run directly through the - * Digital Signing Algorithm with no additional hashing. - * - * @param c the connection. - * @param signature the signature to include in the message. - * - * @return the CertificateVerify byte buffer. - */ -tls.createCertificateVerify = function(c, signature) { - /* Note: The signature will be stored in a "digitally-signed" opaque - vector that has the length prefixed using 2 bytes, so include those - 2 bytes in the handshake message length. This is done as a minor - optimization instead of calling writeVector(). */ - - // determine length of the handshake message - var length = signature.length + 2; - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.certificate_verify); - rval.putInt24(length); - // add vector length bytes - rval.putInt16(signature.length); - rval.putBytes(signature); - return rval; -}; +module.exports = { File, FileLike, isFileLike } -/** - * Creates a CertificateRequest message. - * - * @param c the connection. - * - * @return the CertificateRequest byte buffer. - */ -tls.createCertificateRequest = function(c) { - // TODO: support other certificate types - var certTypes = forge.util.createBuffer(); - - // common RSA certificate type - certTypes.putByte(0x01); - - // add distinguished names from CA store - var cAs = forge.util.createBuffer(); - for(var key in c.caStore.certs) { - var cert = c.caStore.certs[key]; - var dn = forge.pki.distinguishedNameToAsn1(cert.subject); - var byteBuffer = forge.asn1.toDer(dn); - cAs.putInt16(byteBuffer.length()); - cAs.putBuffer(byteBuffer); - } - - // TODO: TLS 1.2+ has a different format - - // determine length of the handshake message - var length = - 1 + certTypes.length() + - 2 + cAs.length(); - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.certificate_request); - rval.putInt24(length); - writeVector(rval, 1, certTypes); - writeVector(rval, 2, cAs); - return rval; -}; -/** - * Creates a ServerHelloDone message. - * - * @param c the connection. - * - * @return the ServerHelloDone byte buffer. - */ -tls.createServerHelloDone = function(c) { - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.server_hello_done); - rval.putInt24(0); - return rval; -}; +/***/ }), -/** - * Creates a ChangeCipherSpec message. - * - * The change cipher spec protocol exists to signal transitions in - * ciphering strategies. The protocol consists of a single message, - * which is encrypted and compressed under the current (not the pending) - * connection state. The message consists of a single byte of value 1. - * - * struct { - * enum { change_cipher_spec(1), (255) } type; - * } ChangeCipherSpec; - * - * @return the ChangeCipherSpec byte buffer. - */ -tls.createChangeCipherSpec = function() { - var rval = forge.util.createBuffer(); - rval.putByte(0x01); - return rval; -}; +/***/ 2015: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Creates a Finished message. - * - * struct { - * opaque verify_data[12]; - * } Finished; - * - * verify_data - * PRF(master_secret, finished_label, MD5(handshake_messages) + - * SHA-1(handshake_messages)) [0..11]; - * - * finished_label - * For Finished messages sent by the client, the string "client - * finished". For Finished messages sent by the server, the - * string "server finished". - * - * handshake_messages - * All of the data from all handshake messages up to but not - * including this message. This is only data visible at the - * handshake layer and does not include record layer headers. - * This is the concatenation of all the Handshake structures as - * defined in 7.4 exchanged thus far. - * - * @param c the connection. - * - * @return the Finished byte buffer. - */ -tls.createFinished = function(c) { - // generate verify_data - var b = forge.util.createBuffer(); - b.putBuffer(c.session.md5.digest()); - b.putBuffer(c.session.sha1.digest()); - - // TODO: determine prf function and verify length for TLS 1.2 - var client = (c.entity === tls.ConnectionEnd.client); - var sp = c.session.sp; - var vdl = 12; - var prf = prf_TLS1; - var label = client ? 'client finished' : 'server finished'; - b = prf(sp.master_secret, label, b.getBytes(), vdl); - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.finished); - rval.putInt24(b.length()); - rval.putBuffer(b); - return rval; -}; +"use strict"; -/** - * Creates a HeartbeatMessage (See RFC 6520). - * - * struct { - * HeartbeatMessageType type; - * uint16 payload_length; - * opaque payload[HeartbeatMessage.payload_length]; - * opaque padding[padding_length]; - * } HeartbeatMessage; - * - * The total length of a HeartbeatMessage MUST NOT exceed 2^14 or - * max_fragment_length when negotiated as defined in [RFC6066]. - * - * type: The message type, either heartbeat_request or heartbeat_response. - * - * payload_length: The length of the payload. - * - * payload: The payload consists of arbitrary content. - * - * padding: The padding is random content that MUST be ignored by the - * receiver. The length of a HeartbeatMessage is TLSPlaintext.length - * for TLS and DTLSPlaintext.length for DTLS. Furthermore, the - * length of the type field is 1 byte, and the length of the - * payload_length is 2. Therefore, the padding_length is - * TLSPlaintext.length - payload_length - 3 for TLS and - * DTLSPlaintext.length - payload_length - 3 for DTLS. The - * padding_length MUST be at least 16. - * - * The sender of a HeartbeatMessage MUST use a random padding of at - * least 16 bytes. The padding of a received HeartbeatMessage message - * MUST be ignored. - * - * If the payload_length of a received HeartbeatMessage is too large, - * the received HeartbeatMessage MUST be discarded silently. - * - * @param c the connection. - * @param type the tls.HeartbeatMessageType. - * @param payload the heartbeat data to send as the payload. - * @param [payloadLength] the payload length to use, defaults to the - * actual payload length. - * - * @return the HeartbeatRequest byte buffer. - */ -tls.createHeartbeat = function(type, payload, payloadLength) { - if(typeof payloadLength === 'undefined') { - payloadLength = payload.length; - } - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(type); // heartbeat message type - rval.putInt16(payloadLength); // payload length - rval.putBytes(payload); // payload - // padding - var plaintextLength = rval.length(); - var paddingLength = Math.max(16, plaintextLength - payloadLength - 3); - rval.putBytes(forge.random.getBytes(paddingLength)); - return rval; -}; -/** - * Fragments, compresses, encrypts, and queues a record for delivery. - * - * @param c the connection. - * @param record the record to queue. - */ -tls.queue = function(c, record) { - // error during record creation - if(!record) { - return; - } +const { isBlobLike, toUSVString, makeIterator } = __nccwpck_require__(2538) +const { kState } = __nccwpck_require__(5861) +const { File: UndiciFile, FileLike, isFileLike } = __nccwpck_require__(8511) +const { webidl } = __nccwpck_require__(1744) +const { Blob, File: NativeFile } = __nccwpck_require__(4300) - if(record.fragment.length() === 0) { - if(record.type === tls.ContentType.handshake || - record.type === tls.ContentType.alert || - record.type === tls.ContentType.change_cipher_spec) { - // Empty handshake, alert of change cipher spec messages are not allowed per the TLS specification and should not be sent. - return; +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile + +// https://xhr.spec.whatwg.org/#formdata +class FormData { + constructor (form) { + if (form !== undefined) { + throw webidl.errors.conversionFailed({ + prefix: 'FormData constructor', + argument: 'Argument 1', + types: ['undefined'] + }) } - } - // if the record is a handshake record, update handshake hashes - if(record.type === tls.ContentType.handshake) { - var bytes = record.fragment.bytes(); - c.session.md5.update(bytes); - c.session.sha1.update(bytes); - bytes = null; + this[kState] = [] } - // handle record fragmentation - var records; - if(record.fragment.length() <= tls.MaxFragment) { - records = [record]; - } else { - // fragment data as long as it is too long - records = []; - var data = record.fragment.bytes(); - while(data.length > tls.MaxFragment) { - records.push(tls.createRecord(c, { - type: record.type, - data: forge.util.createBuffer(data.slice(0, tls.MaxFragment)) - })); - data = data.slice(tls.MaxFragment); - } - // add last record - if(data.length > 0) { - records.push(tls.createRecord(c, { - type: record.type, - data: forge.util.createBuffer(data) - })); - } - } - - // compress and encrypt all fragmented records - for(var i = 0; i < records.length && !c.fail; ++i) { - // update the record using current write state - var rec = records[i]; - var s = c.state.current.write; - if(s.update(c, rec)) { - // store record - c.records.push(rec); + append (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' }) + + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'" + ) } - } -}; -/** - * Flushes all queued records to the output buffer and calls the - * tlsDataReady() handler on the given connection. - * - * @param c the connection. - * - * @return true on success, false on failure. - */ -tls.flush = function(c) { - for(var i = 0; i < c.records.length; ++i) { - var record = c.records[i]; - - // add record header and fragment - c.tlsData.putByte(record.type); - c.tlsData.putByte(record.version.major); - c.tlsData.putByte(record.version.minor); - c.tlsData.putInt16(record.fragment.length()); - c.tlsData.putBuffer(c.records[i].fragment); - } - c.records = []; - return c.tlsDataReady(c); -}; + // 1. Let value be value if given; otherwise blobValue. -/** - * Maps a pki.certificateError to a tls.Alert.Description. - * - * @param error the error to map. - * - * @return the alert description. - */ -var _certErrorToAlertDesc = function(error) { - switch(error) { - case true: - return true; - case forge.pki.certificateError.bad_certificate: - return tls.Alert.Description.bad_certificate; - case forge.pki.certificateError.unsupported_certificate: - return tls.Alert.Description.unsupported_certificate; - case forge.pki.certificateError.certificate_revoked: - return tls.Alert.Description.certificate_revoked; - case forge.pki.certificateError.certificate_expired: - return tls.Alert.Description.certificate_expired; - case forge.pki.certificateError.certificate_unknown: - return tls.Alert.Description.certificate_unknown; - case forge.pki.certificateError.unknown_ca: - return tls.Alert.Description.unknown_ca; - default: - return tls.Alert.Description.bad_certificate; + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? webidl.converters.USVString(filename) + : undefined + + // 2. Let entry be the result of creating an entry with + // name, value, and filename if given. + const entry = makeEntry(name, value, filename) + + // 3. Append entry to this’s entry list. + this[kState].push(entry) } -}; -/** - * Maps a tls.Alert.Description to a pki.certificateError. - * - * @param desc the alert description. - * - * @return the certificate error. - */ -var _alertDescToCertError = function(desc) { - switch(desc) { - case true: - return true; - case tls.Alert.Description.bad_certificate: - return forge.pki.certificateError.bad_certificate; - case tls.Alert.Description.unsupported_certificate: - return forge.pki.certificateError.unsupported_certificate; - case tls.Alert.Description.certificate_revoked: - return forge.pki.certificateError.certificate_revoked; - case tls.Alert.Description.certificate_expired: - return forge.pki.certificateError.certificate_expired; - case tls.Alert.Description.certificate_unknown: - return forge.pki.certificateError.certificate_unknown; - case tls.Alert.Description.unknown_ca: - return forge.pki.certificateError.unknown_ca; - default: - return forge.pki.certificateError.bad_certificate; + delete (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' }) + + name = webidl.converters.USVString(name) + + // The delete(name) method steps are to remove all entries whose name + // is name from this’s entry list. + this[kState] = this[kState].filter(entry => entry.name !== name) } -}; -/** - * Verifies a certificate chain against the given connection's - * Certificate Authority store. - * - * @param c the TLS connection. - * @param chain the certificate chain to verify, with the root or highest - * authority at the end. - * - * @return true if successful, false if not. - */ -tls.verifyCertificateChain = function(c, chain) { - try { - // Make a copy of c.verifyOptions so that we can modify options.verify - // without modifying c.verifyOptions. - var options = {}; - for (var key in c.verifyOptions) { - options[key] = c.verifyOptions[key]; - } - - options.verify = function(vfd, depth, chain) { - // convert pki.certificateError to tls alert description - var desc = _certErrorToAlertDesc(vfd); - - // call application callback - var ret = c.verify(c, vfd, depth, chain); - if(ret !== true) { - if(typeof ret === 'object' && !forge.util.isArray(ret)) { - // throw custom error - var error = new Error('The application rejected the certificate.'); - error.send = true; - error.alert = { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.bad_certificate - }; - if(ret.message) { - error.message = ret.message; - } - if(ret.alert) { - error.alert.description = ret.alert; - } - throw error; - } + get (name) { + webidl.brandCheck(this, FormData) - // convert tls alert description to pki.certificateError - if(ret !== vfd) { - ret = _alertDescToCertError(ret); - } - } + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' }) - return ret; - }; + name = webidl.converters.USVString(name) - // verify chain - forge.pki.verifyCertificateChain(c.caStore, chain, options); - } catch(ex) { - // build tls error if not already customized - var err = ex; - if(typeof err !== 'object' || forge.util.isArray(err)) { - err = { - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: _certErrorToAlertDesc(ex) - } - }; - } - if(!('send' in err)) { - err.send = true; - } - if(!('alert' in err)) { - err.alert = { - level: tls.Alert.Level.fatal, - description: _certErrorToAlertDesc(err.error) - }; + // 1. If there is no entry whose name is name in this’s entry list, + // then return null. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx === -1) { + return null } - // send error - c.error(c, err); + // 2. Return the value of the first entry whose name is name from + // this’s entry list. + return this[kState][idx].value } - return !c.fail; -}; + getAll (name) { + webidl.brandCheck(this, FormData) -/** - * Creates a new TLS session cache. - * - * @param cache optional map of session ID to cached session. - * @param capacity the maximum size for the cache (default: 100). - * - * @return the new TLS session cache. - */ -tls.createSessionCache = function(cache, capacity) { - var rval = null; + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' }) - // assume input is already a session cache object - if(cache && cache.getSession && cache.setSession && cache.order) { - rval = cache; - } else { - // create cache - rval = {}; - rval.cache = cache || {}; - rval.capacity = Math.max(capacity || 100, 1); - rval.order = []; - - // store order for sessions, delete session overflow - for(var key in cache) { - if(rval.order.length <= capacity) { - rval.order.push(key); - } else { - delete cache[key]; - } - } + name = webidl.converters.USVString(name) - // get a session from a session ID (or get any session) - rval.getSession = function(sessionId) { - var session = null; - var key = null; + // 1. If there is no entry whose name is name in this’s entry list, + // then return the empty list. + // 2. Return the values of all entries whose name is name, in order, + // from this’s entry list. + return this[kState] + .filter((entry) => entry.name === name) + .map((entry) => entry.value) + } - // if session ID provided, use it - if(sessionId) { - key = forge.util.bytesToHex(sessionId); - } else if(rval.order.length > 0) { - // get first session from cache - key = rval.order[0]; - } + has (name) { + webidl.brandCheck(this, FormData) - if(key !== null && key in rval.cache) { - // get cached session and remove from cache - session = rval.cache[key]; - delete rval.cache[key]; - for(var i in rval.order) { - if(rval.order[i] === key) { - rval.order.splice(i, 1); - break; - } - } - } + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' }) - return session; - }; + name = webidl.converters.USVString(name) - // set a session in the cache - rval.setSession = function(sessionId, session) { - // remove session from cache if at capacity - if(rval.order.length === rval.capacity) { - var key = rval.order.shift(); - delete rval.cache[key]; - } - // add session to cache - var key = forge.util.bytesToHex(sessionId); - rval.order.push(key); - rval.cache[key] = session; - }; + // The has(name) method steps are to return true if there is an entry + // whose name is name in this’s entry list; otherwise false. + return this[kState].findIndex((entry) => entry.name === name) !== -1 } - return rval; -}; + set (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) -/** - * Creates a new TLS connection. - * - * See public createConnection() docs for more details. - * - * @param options the options for this connection. - * - * @return the new TLS connection. - */ -tls.createConnection = function(options) { - var caStore = null; - if(options.caStore) { - // if CA store is an array, convert it to a CA store object - if(forge.util.isArray(options.caStore)) { - caStore = forge.pki.createCaStore(options.caStore); + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' }) + + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'" + ) + } + + // The set(name, value) and set(name, blobValue, filename) method steps + // are: + + // 1. Let value be value if given; otherwise blobValue. + + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? toUSVString(filename) + : undefined + + // 2. Let entry be the result of creating an entry with name, value, and + // filename if given. + const entry = makeEntry(name, value, filename) + + // 3. If there are entries in this’s entry list whose name is name, then + // replace the first such entry with entry and remove the others. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx !== -1) { + this[kState] = [ + ...this[kState].slice(0, idx), + entry, + ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name) + ] } else { - caStore = options.caStore; + // 4. Otherwise, append entry to this’s entry list. + this[kState].push(entry) } - } else { - // create empty CA store - caStore = forge.pki.createCaStore(); - } - - // setup default cipher suites - var cipherSuites = options.cipherSuites || null; - if(cipherSuites === null) { - cipherSuites = []; - for(var key in tls.CipherSuites) { - cipherSuites.push(tls.CipherSuites[key]); - } - } - - // set default entity - var entity = (options.server || false) ? - tls.ConnectionEnd.server : tls.ConnectionEnd.client; - - // create session cache if requested - var sessionCache = options.sessionCache ? - tls.createSessionCache(options.sessionCache) : null; - - // create TLS connection - var c = { - version: {major: tls.Version.major, minor: tls.Version.minor}, - entity: entity, - sessionId: options.sessionId, - caStore: caStore, - sessionCache: sessionCache, - cipherSuites: cipherSuites, - connected: options.connected, - virtualHost: options.virtualHost || null, - verifyClient: options.verifyClient || false, - verify: options.verify || function(cn, vfd, dpth, cts) {return vfd;}, - verifyOptions: options.verifyOptions || {}, - getCertificate: options.getCertificate || null, - getPrivateKey: options.getPrivateKey || null, - getSignature: options.getSignature || null, - input: forge.util.createBuffer(), - tlsData: forge.util.createBuffer(), - data: forge.util.createBuffer(), - tlsDataReady: options.tlsDataReady, - dataReady: options.dataReady, - heartbeatReceived: options.heartbeatReceived, - closed: options.closed, - error: function(c, ex) { - // set origin if not set - ex.origin = ex.origin || - ((c.entity === tls.ConnectionEnd.client) ? 'client' : 'server'); - - // send TLS alert - if(ex.send) { - tls.queue(c, tls.createAlert(c, ex.alert)); - tls.flush(c); - } + } - // error is fatal by default - var fatal = (ex.fatal !== false); - if(fatal) { - // set fail flag - c.fail = true; - } + entries () { + webidl.brandCheck(this, FormData) - // call error handler first - options.error(c, ex); + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key+value' + ) + } - if(fatal) { - // fatal error, close connection, do not clear fail - c.close(false); - } - }, - deflate: options.deflate || null, - inflate: options.inflate || null - }; + keys () { + webidl.brandCheck(this, FormData) - /** - * Resets a closed TLS connection for reuse. Called in c.close(). - * - * @param clearFail true to clear the fail flag (default: true). - */ - c.reset = function(clearFail) { - c.version = {major: tls.Version.major, minor: tls.Version.minor}; - c.record = null; - c.session = null; - c.peerCertificate = null; - c.state = { - pending: null, - current: null - }; - c.expect = (c.entity === tls.ConnectionEnd.client) ? SHE : CHE; - c.fragmented = null; - c.records = []; - c.open = false; - c.handshakes = 0; - c.handshaking = false; - c.isConnected = false; - c.fail = !(clearFail || typeof(clearFail) === 'undefined'); - c.input.clear(); - c.tlsData.clear(); - c.data.clear(); - c.state.current = tls.createConnectionState(c); - }; + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key' + ) + } - // do initial reset of connection - c.reset(); + values () { + webidl.brandCheck(this, FormData) - /** - * Updates the current TLS engine state based on the given record. - * - * @param c the TLS connection. - * @param record the TLS record to act on. - */ - var _update = function(c, record) { - // get record handler (align type in table by subtracting lowest) - var aligned = record.type - tls.ContentType.change_cipher_spec; - var handlers = ctTable[c.entity][c.expect]; - if(aligned in handlers) { - handlers[aligned](c, record); - } else { - // unexpected record - tls.handleUnexpected(c, record); - } - }; + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'value' + ) + } /** - * Reads the record header and initializes the next record on the given - * connection. - * - * @param c the TLS connection with the next record. - * - * @return 0 if the input data could be processed, otherwise the - * number of bytes required for data to be processed. + * @param {(value: string, key: string, self: FormData) => void} callbackFn + * @param {unknown} thisArg */ - var _readRecordHeader = function(c) { - var rval = 0; + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, FormData) - // get input buffer and its length - var b = c.input; - var len = b.length(); + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' }) - // need at least 5 bytes to initialize a record - if(len < 5) { - rval = 5 - len; - } else { - // enough bytes for header - // initialize record - c.record = { - type: b.getByte(), - version: { - major: b.getByte(), - minor: b.getByte() - }, - length: b.getInt16(), - fragment: forge.util.createBuffer(), - ready: false - }; + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'." + ) + } - // check record version - var compatibleVersion = (c.record.version.major === c.version.major); - if(compatibleVersion && c.session && c.session.version) { - // session version already set, require same minor version - compatibleVersion = (c.record.version.minor === c.version.minor); - } - if(!compatibleVersion) { - c.error(c, { - message: 'Incompatible TLS version.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.protocol_version - } - }); - } + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) } + } +} - return rval; - }; +FormData.prototype[Symbol.iterator] = FormData.prototype.entries - /** - * Reads the next record's contents and appends its message to any - * previously fragmented message. - * - * @param c the TLS connection with the next record. - * - * @return 0 if the input data could be processed, otherwise the - * number of bytes required for data to be processed. - */ - var _readRecord = function(c) { - var rval = 0; - - // ensure there is enough input data to get the entire record - var b = c.input; - var len = b.length(); - if(len < c.record.length) { - // not enough data yet, return how much is required - rval = c.record.length - len; - } else { - // there is enough data to parse the pending record - // fill record fragment and compact input buffer - c.record.fragment.putBytes(b.getBytes(c.record.length)); - b.compact(); - - // update record using current read state - var s = c.state.current.read; - if(s.update(c, c.record)) { - // see if there is a previously fragmented message that the - // new record's message fragment should be appended to - if(c.fragmented !== null) { - // if the record type matches a previously fragmented - // record, append the record fragment to it - if(c.fragmented.type === c.record.type) { - // concatenate record fragments - c.fragmented.fragment.putBuffer(c.record.fragment); - c.record = c.fragmented; - } else { - // error, invalid fragmented record - c.error(c, { - message: 'Invalid fragmented record.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: - tls.Alert.Description.unexpected_message - } - }); - } - } +Object.defineProperties(FormData.prototype, { + [Symbol.toStringTag]: { + value: 'FormData', + configurable: true + } +}) + +/** + * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry + * @param {string} name + * @param {string|Blob} value + * @param {?string} filename + * @returns + */ +function makeEntry (name, value, filename) { + // 1. Set name to the result of converting name into a scalar value string. + // "To convert a string into a scalar value string, replace any surrogates + // with U+FFFD." + // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end + name = Buffer.from(name).toString('utf8') - // record is now ready - c.record.ready = true; + // 2. If value is a string, then set value to the result of converting + // value into a scalar value string. + if (typeof value === 'string') { + value = Buffer.from(value).toString('utf8') + } else { + // 3. Otherwise: + + // 1. If value is not a File object, then set value to a new File object, + // representing the same bytes, whose name attribute value is "blob" + if (!isFileLike(value)) { + value = value instanceof Blob + ? new File([value], 'blob', { type: value.type }) + : new FileLike(value, 'blob', { type: value.type }) + } + + // 2. If filename is given, then set value to a new File object, + // representing the same bytes, whose name attribute is filename. + if (filename !== undefined) { + /** @type {FilePropertyBag} */ + const options = { + type: value.type, + lastModified: value.lastModified } + + value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile + ? new File([value], filename, options) + : new FileLike(value, filename, options) } + } - return rval; - }; + // 4. Return an entry whose name is name and whose value is value. + return { name, value } +} - /** - * Performs a handshake using the TLS Handshake Protocol, as a client. - * - * This method should only be called if the connection is in client mode. - * - * @param sessionId the session ID to use, null to start a new one. - */ - c.handshake = function(sessionId) { - // error to call this in non-client mode - if(c.entity !== tls.ConnectionEnd.client) { - // not fatal error - c.error(c, { - message: 'Cannot initiate handshake as a server.', - fatal: false - }); - } else if(c.handshaking) { - // handshake is already in progress, fail but not fatal error - c.error(c, { - message: 'Handshake already in progress.', - fatal: false - }); - } else { - // clear fail flag on reuse - if(c.fail && !c.open && c.handshakes === 0) { - c.fail = false; - } +module.exports = { FormData } - // now handshaking - c.handshaking = true; - // default to blank (new session) - sessionId = sessionId || ''; +/***/ }), - // if a session ID was specified, try to find it in the cache - var session = null; - if(sessionId.length > 0) { - if(c.sessionCache) { - session = c.sessionCache.getSession(sessionId); - } +/***/ 1246: +/***/ ((module) => { - // matching session not found in cache, clear session ID - if(session === null) { - sessionId = ''; - } - } +"use strict"; - // no session given, grab a session from the cache, if available - if(sessionId.length === 0 && c.sessionCache) { - session = c.sessionCache.getSession(); - if(session !== null) { - sessionId = session.id; - } - } - // set up session - c.session = { - id: sessionId, - version: null, - cipherSuite: null, - compressionMethod: null, - serverCertificate: null, - certificateRequest: null, - clientCertificate: null, - sp: {}, - md5: forge.md.md5.create(), - sha1: forge.md.sha1.create() - }; +// In case of breaking changes, increase the version +// number to avoid conflicts. +const globalOrigin = Symbol.for('undici.globalOrigin.1') - // use existing session information - if(session) { - // only update version on connection, session version not yet set - c.version = session.version; - c.session.sp = session.sp; - } +function getGlobalOrigin () { + return globalThis[globalOrigin] +} - // generate new client random - c.session.sp.client_random = tls.createRandom().getBytes(); +function setGlobalOrigin (newOrigin) { + if (newOrigin === undefined) { + Object.defineProperty(globalThis, globalOrigin, { + value: undefined, + writable: true, + enumerable: false, + configurable: false + }) - // connection now open - c.open = true; + return + } - // send hello - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createClientHello(c) - })); - tls.flush(c); - } - }; + const parsedURL = new URL(newOrigin) - /** - * Called when TLS protocol data has been received from somewhere and should - * be processed by the TLS engine. - * - * @param data the TLS protocol data, as a string, to process. - * - * @return 0 if the data could be processed, otherwise the number of bytes - * required for data to be processed. - */ - c.process = function(data) { - var rval = 0; + if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') { + throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`) + } - // buffer input data - if(data) { - c.input.putBytes(data); - } + Object.defineProperty(globalThis, globalOrigin, { + value: parsedURL, + writable: true, + enumerable: false, + configurable: false + }) +} - // process next record if no failure, process will be called after - // each record is handled (since handling can be asynchronous) - if(!c.fail) { - // reset record if ready and now empty - if(c.record !== null && - c.record.ready && c.record.fragment.isEmpty()) { - c.record = null; - } +module.exports = { + getGlobalOrigin, + setGlobalOrigin +} - // if there is no pending record, try to read record header - if(c.record === null) { - rval = _readRecordHeader(c); - } - // read the next record (if record not yet ready) - if(!c.fail && c.record !== null && !c.record.ready) { - rval = _readRecord(c); - } +/***/ }), - // record ready to be handled, update engine state - if(!c.fail && c.record !== null && c.record.ready) { - _update(c, c.record); - } - } +/***/ 554: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return rval; - }; +"use strict"; +// https://github.com/Ethan-Arrowood/undici-fetch - /** - * Requests that application data be packaged into a TLS record. The - * tlsDataReady handler will be called when the TLS record(s) have been - * prepared. - * - * @param data the application data, as a raw 'binary' encoded string, to - * be sent; to send utf-16/utf-8 string data, use the return value - * of util.encodeUtf8(str). - * - * @return true on success, false on failure. - */ - c.prepare = function(data) { - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.application_data, - data: forge.util.createBuffer(data) - })); - return tls.flush(c); - }; - /** - * Requests that a heartbeat request be packaged into a TLS record for - * transmission. The tlsDataReady handler will be called when TLS record(s) - * have been prepared. - * - * When a heartbeat response has been received, the heartbeatReceived - * handler will be called with the matching payload. This handler can - * be used to clear a retransmission timer, etc. - * - * @param payload the heartbeat data to send as the payload in the message. - * @param [payloadLength] the payload length to use, defaults to the - * actual payload length. - * - * @return true on success, false on failure. - */ - c.prepareHeartbeatRequest = function(payload, payloadLength) { - if(payload instanceof forge.util.ByteBuffer) { - payload = payload.bytes(); - } - if(typeof payloadLength === 'undefined') { - payloadLength = payload.length; - } - c.expectedHeartbeatPayload = payload; - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.heartbeat, - data: tls.createHeartbeat( - tls.HeartbeatMessageType.heartbeat_request, payload, payloadLength) - })); - return tls.flush(c); - }; - /** - * Closes the connection (sends a close_notify alert). - * - * @param clearFail true to clear the fail flag (default: true). - */ - c.close = function(clearFail) { - // save session if connection didn't fail - if(!c.fail && c.sessionCache && c.session) { - // only need to preserve session ID, version, and security params - var session = { - id: c.session.id, - version: c.session.version, - sp: c.session.sp - }; - session.sp.keys = null; - c.sessionCache.setSession(session.id, session); - } +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) +const { kGuard } = __nccwpck_require__(5861) +const { kEnumerableProperty } = __nccwpck_require__(3983) +const { + makeIterator, + isValidHeaderName, + isValidHeaderValue +} = __nccwpck_require__(2538) +const { webidl } = __nccwpck_require__(1744) +const assert = __nccwpck_require__(9491) + +const kHeadersMap = Symbol('headers map') +const kHeadersSortedMap = Symbol('headers map sorted') - if(c.open) { - // connection no longer open, clear input - c.open = false; - c.input.clear(); +/** + * @param {number} code + */ +function isHTTPWhiteSpaceCharCode (code) { + return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020 +} - // if connected or handshaking, send an alert - if(c.isConnected || c.handshaking) { - c.isConnected = c.handshaking = false; +/** + * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize + * @param {string} potentialValue + */ +function headerValueNormalize (potentialValue) { + // To normalize a byte sequence potentialValue, remove + // any leading and trailing HTTP whitespace bytes from + // potentialValue. + let i = 0; let j = potentialValue.length - // send close_notify alert - tls.queue(c, tls.createAlert(c, { - level: tls.Alert.Level.warning, - description: tls.Alert.Description.close_notify - })); - tls.flush(c); - } + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i - // call handler - c.closed(c); - } + return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j) +} - // reset TLS connection, do not clear fail flag - c.reset(clearFail); - }; +function fill (headers, object) { + // To fill a Headers object headers with a given object object, run these steps: - return c; -}; + // 1. If object is a sequence, then for each header in object: + // Note: webidl conversion to array has already been done. + if (Array.isArray(object)) { + for (let i = 0; i < object.length; ++i) { + const header = object[i] + // 1. If header does not contain exactly two items, then throw a TypeError. + if (header.length !== 2) { + throw webidl.errors.exception({ + header: 'Headers constructor', + message: `expected name/value pair to be length 2, found ${header.length}.` + }) + } -/* TLS API */ -module.exports = forge.tls = forge.tls || {}; + // 2. Append (header’s first item, header’s second item) to headers. + appendHeader(headers, header[0], header[1]) + } + } else if (typeof object === 'object' && object !== null) { + // Note: null should throw -// expose non-functions -for(var key in tls) { - if(typeof tls[key] !== 'function') { - forge.tls[key] = tls[key]; + // 2. Otherwise, object is a record, then for each key → value in object, + // append (key, value) to headers + const keys = Object.keys(object) + for (let i = 0; i < keys.length; ++i) { + appendHeader(headers, keys[i], object[keys[i]]) + } + } else { + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) } } -// expose prf_tls1 for testing -forge.tls.prf_tls1 = prf_TLS1; +/** + * @see https://fetch.spec.whatwg.org/#concept-headers-append + */ +function appendHeader (headers, name, value) { + // 1. Normalize value. + value = headerValueNormalize(value) -// expose sha1 hmac method -forge.tls.hmac_sha1 = hmac_sha1; + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value, + type: 'header value' + }) + } -// expose session cache creation -forge.tls.createSessionCache = tls.createSessionCache; + // 3. If headers’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if headers’s guard is "request" and name is a + // forbidden header name, return. + // Note: undici does not implement forbidden header names + if (headers[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (headers[kGuard] === 'request-no-cors') { + // 5. Otherwise, if headers’s guard is "request-no-cors": + // TODO + } -/** - * Creates a new TLS connection. This does not make any assumptions about the - * transport layer that TLS is working on top of, ie: it does not assume there - * is a TCP/IP connection or establish one. A TLS connection is totally - * abstracted away from the layer is runs on top of, it merely establishes a - * secure channel between a client" and a "server". - * - * A TLS connection contains 4 connection states: pending read and write, and - * current read and write. - * - * At initialization, the current read and write states will be null. Only once - * the security parameters have been set and the keys have been generated can - * the pending states be converted into current states. Current states will be - * updated for each record processed. - * - * A custom certificate verify callback may be provided to check information - * like the common name on the server's certificate. It will be called for - * every certificate in the chain. It has the following signature: - * - * variable func(c, certs, index, preVerify) - * Where: - * c The TLS connection - * verified Set to true if certificate was verified, otherwise the alert - * tls.Alert.Description for why the certificate failed. - * depth The current index in the chain, where 0 is the server's cert. - * certs The certificate chain, *NOTE* if the server was anonymous then - * the chain will be empty. - * - * The function returns true on success and on failure either the appropriate - * tls.Alert.Description or an object with 'alert' set to the appropriate - * tls.Alert.Description and 'message' set to a custom error message. If true - * is not returned then the connection will abort using, in order of - * availability, first the returned alert description, second the preVerify - * alert description, and lastly the default 'bad_certificate'. - * - * There are three callbacks that can be used to make use of client-side - * certificates where each takes the TLS connection as the first parameter: - * - * getCertificate(conn, hint) - * The second parameter is a hint as to which certificate should be - * returned. If the connection entity is a client, then the hint will be - * the CertificateRequest message from the server that is part of the - * TLS protocol. If the connection entity is a server, then it will be - * the servername list provided via an SNI extension the ClientHello, if - * one was provided (empty array if not). The hint can be examined to - * determine which certificate to use (advanced). Most implementations - * will just return a certificate. The return value must be a - * PEM-formatted certificate or an array of PEM-formatted certificates - * that constitute a certificate chain, with the first in the array/chain - * being the client's certificate. - * getPrivateKey(conn, certificate) - * The second parameter is an forge.pki X.509 certificate object that - * is associated with the requested private key. The return value must - * be a PEM-formatted private key. - * getSignature(conn, bytes, callback) - * This callback can be used instead of getPrivateKey if the private key - * is not directly accessible in javascript or should not be. For - * instance, a secure external web service could provide the signature - * in exchange for appropriate credentials. The second parameter is a - * string of bytes to be signed that are part of the TLS protocol. These - * bytes are used to verify that the private key for the previously - * provided client-side certificate is accessible to the client. The - * callback is a function that takes 2 parameters, the TLS connection - * and the RSA encrypted (signed) bytes as a string. This callback must - * be called once the signature is ready. - * - * @param options the options for this connection: - * server: true if the connection is server-side, false for client. - * sessionId: a session ID to reuse, null for a new connection. - * caStore: an array of certificates to trust. - * sessionCache: a session cache to use. - * cipherSuites: an optional array of cipher suites to use, - * see tls.CipherSuites. - * connected: function(conn) called when the first handshake completes. - * virtualHost: the virtual server name to use in a TLS SNI extension. - * verifyClient: true to require a client certificate in server mode, - * 'optional' to request one, false not to (default: false). - * verify: a handler used to custom verify certificates in the chain. - * verifyOptions: an object with options for the certificate chain validation. - * See documentation of pki.verifyCertificateChain for possible options. - * verifyOptions.verify is ignored. If you wish to specify a verify handler - * use the verify key. - * getCertificate: an optional callback used to get a certificate or - * a chain of certificates (as an array). - * getPrivateKey: an optional callback used to get a private key. - * getSignature: an optional callback used to get a signature. - * tlsDataReady: function(conn) called when TLS protocol data has been - * prepared and is ready to be used (typically sent over a socket - * connection to its destination), read from conn.tlsData buffer. - * dataReady: function(conn) called when application data has - * been parsed from a TLS record and should be consumed by the - * application, read from conn.data buffer. - * closed: function(conn) called when the connection has been closed. - * error: function(conn, error) called when there was an error. - * deflate: function(inBytes) if provided, will deflate TLS records using - * the deflate algorithm if the server supports it. - * inflate: function(inBytes) if provided, will inflate TLS records using - * the deflate algorithm if the server supports it. - * - * @return the new TLS connection. - */ -forge.tls.createConnection = tls.createConnection; + // 6. Otherwise, if headers’s guard is "response" and name is a + // forbidden response-header name, return. + // 7. Append (name, value) to headers’s header list. + return headers[kHeadersList].append(name, value) -/***/ }), + // 8. If headers’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from headers +} -/***/ 8339: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +class HeadersList { + /** @type {[string, string][]|null} */ + cookies = null -/** - * Utility functions for web applications. - * - * @author Dave Longley - * - * Copyright (c) 2010-2018 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -var baseN = __nccwpck_require__(2300); - -/* Utilities API */ -var util = module.exports = forge.util = forge.util || {}; - -// define setImmediate and nextTick -(function() { - // use native nextTick (unless we're in webpack) - // webpack (or better node-libs-browser polyfill) sets process.browser. - // this way we can detect webpack properly - if(typeof process !== 'undefined' && process.nextTick && !process.browser) { - util.nextTick = process.nextTick; - if(typeof setImmediate === 'function') { - util.setImmediate = setImmediate; + constructor (init) { + if (init instanceof HeadersList) { + this[kHeadersMap] = new Map(init[kHeadersMap]) + this[kHeadersSortedMap] = init[kHeadersSortedMap] + this.cookies = init.cookies === null ? null : [...init.cookies] } else { - // polyfill setImmediate with nextTick, older versions of node - // (those w/o setImmediate) won't totally starve IO - util.setImmediate = util.nextTick; + this[kHeadersMap] = new Map(init) + this[kHeadersSortedMap] = null } - return; } - // polyfill nextTick with native setImmediate - if(typeof setImmediate === 'function') { - util.setImmediate = function() { return setImmediate.apply(undefined, arguments); }; - util.nextTick = function(callback) { - return setImmediate(callback); - }; - return; + // https://fetch.spec.whatwg.org/#header-list-contains + contains (name) { + // A header list list contains a header name name if list + // contains a header whose name is a byte-case-insensitive + // match for name. + name = name.toLowerCase() + + return this[kHeadersMap].has(name) } - /* Note: A polyfill upgrade pattern is used here to allow combining - polyfills. For example, MutationObserver is fast, but blocks UI updates, - so it needs to allow UI updates periodically, so it falls back on - postMessage or setTimeout. */ + clear () { + this[kHeadersMap].clear() + this[kHeadersSortedMap] = null + this.cookies = null + } - // polyfill with setTimeout - util.setImmediate = function(callback) { - setTimeout(callback, 0); - }; + // https://fetch.spec.whatwg.org/#concept-header-list-append + append (name, value) { + this[kHeadersSortedMap] = null - // upgrade polyfill to use postMessage - if(typeof window !== 'undefined' && - typeof window.postMessage === 'function') { - var msg = 'forge.setImmediate'; - var callbacks = []; - util.setImmediate = function(callback) { - callbacks.push(callback); - // only send message when one hasn't been sent in - // the current turn of the event loop - if(callbacks.length === 1) { - window.postMessage(msg, '*'); - } - }; - function handler(event) { - if(event.source === window && event.data === msg) { - event.stopPropagation(); - var copy = callbacks.slice(); - callbacks.length = 0; - copy.forEach(function(callback) { - callback(); - }); - } - } - window.addEventListener('message', handler, true); - } + // 1. If list contains name, then set name to the first such + // header’s name. + const lowercaseName = name.toLowerCase() + const exists = this[kHeadersMap].get(lowercaseName) - // upgrade polyfill to use MutationObserver - if(typeof MutationObserver !== 'undefined') { - // polyfill with MutationObserver - var now = Date.now(); - var attr = true; - var div = document.createElement('div'); - var callbacks = []; - new MutationObserver(function() { - var copy = callbacks.slice(); - callbacks.length = 0; - copy.forEach(function(callback) { - callback(); - }); - }).observe(div, {attributes: true}); - var oldSetImmediate = util.setImmediate; - util.setImmediate = function(callback) { - if(Date.now() - now > 15) { - now = Date.now(); - oldSetImmediate(callback); - } else { - callbacks.push(callback); - // only trigger observer when it hasn't been triggered in - // the current turn of the event loop - if(callbacks.length === 1) { - div.setAttribute('a', attr = !attr); - } - } - }; - } + // 2. Append (name, value) to list. + if (exists) { + const delimiter = lowercaseName === 'cookie' ? '; ' : ', ' + this[kHeadersMap].set(lowercaseName, { + name: exists.name, + value: `${exists.value}${delimiter}${value}` + }) + } else { + this[kHeadersMap].set(lowercaseName, { name, value }) + } - util.nextTick = util.setImmediate; -})(); + if (lowercaseName === 'set-cookie') { + this.cookies ??= [] + this.cookies.push(value) + } + } -// check if running under Node.js -util.isNodejs = - typeof process !== 'undefined' && process.versions && process.versions.node; + // https://fetch.spec.whatwg.org/#concept-header-list-set + set (name, value) { + this[kHeadersSortedMap] = null + const lowercaseName = name.toLowerCase() + if (lowercaseName === 'set-cookie') { + this.cookies = [value] + } -// 'self' will also work in Web Workers (instance of WorkerGlobalScope) while -// it will point to `window` in the main thread. -// To remain compatible with older browsers, we fall back to 'window' if 'self' -// is not available. -util.globalScope = (function() { - if(util.isNodejs) { - return global; + // 1. If list contains name, then set the value of + // the first such header to value and remove the + // others. + // 2. Otherwise, append header (name, value) to list. + this[kHeadersMap].set(lowercaseName, { name, value }) } - return typeof self === 'undefined' ? window : self; -})(); + // https://fetch.spec.whatwg.org/#concept-header-list-delete + delete (name) { + this[kHeadersSortedMap] = null -// define isArray -util.isArray = Array.isArray || function(x) { - return Object.prototype.toString.call(x) === '[object Array]'; -}; + name = name.toLowerCase() -// define isArrayBuffer -util.isArrayBuffer = function(x) { - return typeof ArrayBuffer !== 'undefined' && x instanceof ArrayBuffer; -}; + if (name === 'set-cookie') { + this.cookies = null + } -// define isArrayBufferView -util.isArrayBufferView = function(x) { - return x && util.isArrayBuffer(x.buffer) && x.byteLength !== undefined; -}; + this[kHeadersMap].delete(name) + } -/** - * Ensure a bits param is 8, 16, 24, or 32. Used to validate input for - * algorithms where bit manipulation, JavaScript limitations, and/or algorithm - * design only allow for byte operations of a limited size. - * - * @param n number of bits. - * - * Throw Error if n invalid. - */ -function _checkBitsParam(n) { - if(!(n === 8 || n === 16 || n === 24 || n === 32)) { - throw new Error('Only 8, 16, 24, or 32 bits supported: ' + n); + // https://fetch.spec.whatwg.org/#concept-header-list-get + get (name) { + const value = this[kHeadersMap].get(name.toLowerCase()) + + // 1. If list does not contain name, then return null. + // 2. Return the values of all headers in list whose name + // is a byte-case-insensitive match for name, + // separated from each other by 0x2C 0x20, in order. + return value === undefined ? null : value.value } -} -// TODO: set ByteBuffer to best available backing -util.ByteBuffer = ByteStringBuffer; + * [Symbol.iterator] () { + // use the lowercased name + for (const [name, { value }] of this[kHeadersMap]) { + yield [name, value] + } + } -/** Buffer w/BinaryString backing */ + get entries () { + const headers = {} -/** - * Constructor for a binary string backed byte buffer. - * - * @param [b] the bytes to wrap (either encoded as string, one byte per - * character, or as an ArrayBuffer or Typed Array). - */ -function ByteStringBuffer(b) { - // TODO: update to match DataBuffer API - - // the data in this buffer - this.data = ''; - // the pointer for reading from this buffer - this.read = 0; - - if(typeof b === 'string') { - this.data = b; - } else if(util.isArrayBuffer(b) || util.isArrayBufferView(b)) { - if(typeof Buffer !== 'undefined' && b instanceof Buffer) { - this.data = b.toString('binary'); - } else { - // convert native buffer to forge buffer - // FIXME: support native buffers internally instead - var arr = new Uint8Array(b); - try { - this.data = String.fromCharCode.apply(null, arr); - } catch(e) { - for(var i = 0; i < arr.length; ++i) { - this.putByte(arr[i]); - } + if (this[kHeadersMap].size) { + for (const { name, value } of this[kHeadersMap].values()) { + headers[name] = value } } - } else if(b instanceof ByteStringBuffer || - (typeof b === 'object' && typeof b.data === 'string' && - typeof b.read === 'number')) { - // copy existing buffer - this.data = b.data; - this.read = b.read; - } - - // used for v8 optimization - this._constructedStringLength = 0; -} -util.ByteStringBuffer = ByteStringBuffer; - -/* Note: This is an optimization for V8-based browsers. When V8 concatenates - a string, the strings are only joined logically using a "cons string" or - "constructed/concatenated string". These containers keep references to one - another and can result in very large memory usage. For example, if a 2MB - string is constructed by concatenating 4 bytes together at a time, the - memory usage will be ~44MB; so ~22x increase. The strings are only joined - together when an operation requiring their joining takes place, such as - substr(). This function is called when adding data to this buffer to ensure - these types of strings are periodically joined to reduce the memory - footprint. */ -var _MAX_CONSTRUCTED_STRING_LENGTH = 4096; -util.ByteStringBuffer.prototype._optimizeConstructedString = function(x) { - this._constructedStringLength += x; - if(this._constructedStringLength > _MAX_CONSTRUCTED_STRING_LENGTH) { - // this substr() should cause the constructed string to join - this.data.substr(0, 1); - this._constructedStringLength = 0; + + return headers } -}; +} -/** - * Gets the number of bytes in this buffer. - * - * @return the number of bytes in this buffer. - */ -util.ByteStringBuffer.prototype.length = function() { - return this.data.length - this.read; -}; +// https://fetch.spec.whatwg.org/#headers-class +class Headers { + constructor (init = undefined) { + if (init === kConstruct) { + return + } + this[kHeadersList] = new HeadersList() -/** - * Gets whether or not this buffer is empty. - * - * @return true if this buffer is empty, false if not. - */ -util.ByteStringBuffer.prototype.isEmpty = function() { - return this.length() <= 0; -}; + // The new Headers(init) constructor steps are: -/** - * Puts a byte in this buffer. - * - * @param b the byte to put. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putByte = function(b) { - return this.putBytes(String.fromCharCode(b)); -}; + // 1. Set this’s guard to "none". + this[kGuard] = 'none' -/** - * Puts a byte in this buffer N times. - * - * @param b the byte to put. - * @param n the number of bytes of value b to put. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.fillWithByte = function(b, n) { - b = String.fromCharCode(b); - var d = this.data; - while(n > 0) { - if(n & 1) { - d += b; - } - n >>>= 1; - if(n > 0) { - b += b; + // 2. If init is given, then fill this with init. + if (init !== undefined) { + init = webidl.converters.HeadersInit(init) + fill(this, init) } } - this.data = d; - this._optimizeConstructedString(n); - return this; -}; -/** - * Puts bytes in this buffer. - * - * @param bytes the bytes (as a binary encoded string) to put. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putBytes = function(bytes) { - this.data += bytes; - this._optimizeConstructedString(bytes.length); - return this; -}; + // https://fetch.spec.whatwg.org/#dom-headers-append + append (name, value) { + webidl.brandCheck(this, Headers) -/** - * Puts a UTF-16 encoded string into this buffer. - * - * @param str the string to put. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putString = function(str) { - return this.putBytes(util.encodeUtf8(str)); -}; + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' }) -/** - * Puts a 16-bit integer in this buffer in big-endian order. - * - * @param i the 16-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt16 = function(i) { - return this.putBytes( - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i & 0xFF)); -}; + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) -/** - * Puts a 24-bit integer in this buffer in big-endian order. - * - * @param i the 24-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt24 = function(i) { - return this.putBytes( - String.fromCharCode(i >> 16 & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i & 0xFF)); -}; + return appendHeader(this, name, value) + } -/** - * Puts a 32-bit integer in this buffer in big-endian order. - * - * @param i the 32-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt32 = function(i) { - return this.putBytes( - String.fromCharCode(i >> 24 & 0xFF) + - String.fromCharCode(i >> 16 & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i & 0xFF)); -}; + // https://fetch.spec.whatwg.org/#dom-headers-delete + delete (name) { + webidl.brandCheck(this, Headers) -/** - * Puts a 16-bit integer in this buffer in little-endian order. - * - * @param i the 16-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt16Le = function(i) { - return this.putBytes( - String.fromCharCode(i & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF)); -}; + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' }) -/** - * Puts a 24-bit integer in this buffer in little-endian order. - * - * @param i the 24-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt24Le = function(i) { - return this.putBytes( - String.fromCharCode(i & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i >> 16 & 0xFF)); -}; + name = webidl.converters.ByteString(name) -/** - * Puts a 32-bit integer in this buffer in little-endian order. - * - * @param i the 32-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt32Le = function(i) { - return this.putBytes( - String.fromCharCode(i & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i >> 16 & 0xFF) + - String.fromCharCode(i >> 24 & 0xFF)); -}; + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.delete', + value: name, + type: 'header name' + }) + } -/** - * Puts an n-bit integer in this buffer in big-endian order. - * - * @param i the n-bit integer. - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt = function(i, n) { - _checkBitsParam(n); - var bytes = ''; - do { - n -= 8; - bytes += String.fromCharCode((i >> n) & 0xFF); - } while(n > 0); - return this.putBytes(bytes); -}; + // 2. If this’s guard is "immutable", then throw a TypeError. + // 3. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 4. Otherwise, if this’s guard is "request-no-cors", name + // is not a no-CORS-safelisted request-header name, and + // name is not a privileged no-CORS request-header name, + // return. + // 5. Otherwise, if this’s guard is "response" and name is + // a forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO + } + + // 6. If this’s header list does not contain name, then + // return. + if (!this[kHeadersList].contains(name)) { + return + } -/** - * Puts a signed n-bit integer in this buffer in big-endian order. Two's - * complement representation is used. - * - * @param i the n-bit integer. - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putSignedInt = function(i, n) { - // putInt checks n - if(i < 0) { - i += 2 << (n - 1); + // 7. Delete name from this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this. + this[kHeadersList].delete(name) } - return this.putInt(i, n); -}; -/** - * Puts the given buffer into this buffer. - * - * @param buffer the buffer to put into this one. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putBuffer = function(buffer) { - return this.putBytes(buffer.getBytes()); -}; - -/** - * Gets a byte from this buffer and advances the read pointer by 1. - * - * @return the byte. - */ -util.ByteStringBuffer.prototype.getByte = function() { - return this.data.charCodeAt(this.read++); -}; + // https://fetch.spec.whatwg.org/#dom-headers-get + get (name) { + webidl.brandCheck(this, Headers) -/** - * Gets a uint16 from this buffer in big-endian order and advances the read - * pointer by 2. - * - * @return the uint16. - */ -util.ByteStringBuffer.prototype.getInt16 = function() { - var rval = ( - this.data.charCodeAt(this.read) << 8 ^ - this.data.charCodeAt(this.read + 1)); - this.read += 2; - return rval; -}; + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' }) -/** - * Gets a uint24 from this buffer in big-endian order and advances the read - * pointer by 3. - * - * @return the uint24. - */ -util.ByteStringBuffer.prototype.getInt24 = function() { - var rval = ( - this.data.charCodeAt(this.read) << 16 ^ - this.data.charCodeAt(this.read + 1) << 8 ^ - this.data.charCodeAt(this.read + 2)); - this.read += 3; - return rval; -}; + name = webidl.converters.ByteString(name) -/** - * Gets a uint32 from this buffer in big-endian order and advances the read - * pointer by 4. - * - * @return the word. - */ -util.ByteStringBuffer.prototype.getInt32 = function() { - var rval = ( - this.data.charCodeAt(this.read) << 24 ^ - this.data.charCodeAt(this.read + 1) << 16 ^ - this.data.charCodeAt(this.read + 2) << 8 ^ - this.data.charCodeAt(this.read + 3)); - this.read += 4; - return rval; -}; + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.get', + value: name, + type: 'header name' + }) + } -/** - * Gets a uint16 from this buffer in little-endian order and advances the read - * pointer by 2. - * - * @return the uint16. - */ -util.ByteStringBuffer.prototype.getInt16Le = function() { - var rval = ( - this.data.charCodeAt(this.read) ^ - this.data.charCodeAt(this.read + 1) << 8); - this.read += 2; - return rval; -}; + // 2. Return the result of getting name from this’s header + // list. + return this[kHeadersList].get(name) + } -/** - * Gets a uint24 from this buffer in little-endian order and advances the read - * pointer by 3. - * - * @return the uint24. - */ -util.ByteStringBuffer.prototype.getInt24Le = function() { - var rval = ( - this.data.charCodeAt(this.read) ^ - this.data.charCodeAt(this.read + 1) << 8 ^ - this.data.charCodeAt(this.read + 2) << 16); - this.read += 3; - return rval; -}; + // https://fetch.spec.whatwg.org/#dom-headers-has + has (name) { + webidl.brandCheck(this, Headers) -/** - * Gets a uint32 from this buffer in little-endian order and advances the read - * pointer by 4. - * - * @return the word. - */ -util.ByteStringBuffer.prototype.getInt32Le = function() { - var rval = ( - this.data.charCodeAt(this.read) ^ - this.data.charCodeAt(this.read + 1) << 8 ^ - this.data.charCodeAt(this.read + 2) << 16 ^ - this.data.charCodeAt(this.read + 3) << 24); - this.read += 4; - return rval; -}; + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' }) -/** - * Gets an n-bit integer from this buffer in big-endian order and advances the - * read pointer by ceil(n/8). - * - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return the integer. - */ -util.ByteStringBuffer.prototype.getInt = function(n) { - _checkBitsParam(n); - var rval = 0; - do { - // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits. - rval = (rval << 8) + this.data.charCodeAt(this.read++); - n -= 8; - } while(n > 0); - return rval; -}; + name = webidl.converters.ByteString(name) -/** - * Gets a signed n-bit integer from this buffer in big-endian order, using - * two's complement, and advances the read pointer by n/8. - * - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return the integer. - */ -util.ByteStringBuffer.prototype.getSignedInt = function(n) { - // getInt checks n - var x = this.getInt(n); - var max = 2 << (n - 2); - if(x >= max) { - x -= max << 1; - } - return x; -}; + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.has', + value: name, + type: 'header name' + }) + } -/** - * Reads bytes out as a binary encoded string and clears them from the - * buffer. Note that the resulting string is binary encoded (in node.js this - * encoding is referred to as `binary`, it is *not* `utf8`). - * - * @param count the number of bytes to read, undefined or null for all. - * - * @return a binary encoded string of bytes. - */ -util.ByteStringBuffer.prototype.getBytes = function(count) { - var rval; - if(count) { - // read count bytes - count = Math.min(this.length(), count); - rval = this.data.slice(this.read, this.read + count); - this.read += count; - } else if(count === 0) { - rval = ''; - } else { - // read all bytes, optimize to only copy when needed - rval = (this.read === 0) ? this.data : this.data.slice(this.read); - this.clear(); + // 2. Return true if this’s header list contains name; + // otherwise false. + return this[kHeadersList].contains(name) } - return rval; -}; -/** - * Gets a binary encoded string of the bytes from this buffer without - * modifying the read pointer. - * - * @param count the number of bytes to get, omit to get all. - * - * @return a string full of binary encoded characters. - */ -util.ByteStringBuffer.prototype.bytes = function(count) { - return (typeof(count) === 'undefined' ? - this.data.slice(this.read) : - this.data.slice(this.read, this.read + count)); -}; + // https://fetch.spec.whatwg.org/#dom-headers-set + set (name, value) { + webidl.brandCheck(this, Headers) -/** - * Gets a byte at the given index without modifying the read pointer. - * - * @param i the byte index. - * - * @return the byte. - */ -util.ByteStringBuffer.prototype.at = function(i) { - return this.data.charCodeAt(this.read + i); -}; + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' }) -/** - * Puts a byte at the given index without modifying the read pointer. - * - * @param i the byte index. - * @param b the byte to put. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.setAt = function(i, b) { - this.data = this.data.substr(0, this.read + i) + - String.fromCharCode(b) + - this.data.substr(this.read + i + 1); - return this; -}; + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) -/** - * Gets the last byte without modifying the read pointer. - * - * @return the last byte. - */ -util.ByteStringBuffer.prototype.last = function() { - return this.data.charCodeAt(this.data.length - 1); -}; + // 1. Normalize value. + value = headerValueNormalize(value) -/** - * Creates a copy of this buffer. - * - * @return the copy. - */ -util.ByteStringBuffer.prototype.copy = function() { - var c = util.createBuffer(this.data); - c.read = this.read; - return c; -}; + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value, + type: 'header value' + }) + } -/** - * Compacts this buffer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.compact = function() { - if(this.read > 0) { - this.data = this.data.slice(this.read); - this.read = 0; + // 3. If this’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 5. Otherwise, if this’s guard is "request-no-cors" and + // name/value is not a no-CORS-safelisted request-header, + // return. + // 6. Otherwise, if this’s guard is "response" and name is a + // forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO + } + + // 7. Set (name, value) in this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this + this[kHeadersList].set(name, value) } - return this; -}; -/** - * Clears this buffer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.clear = function() { - this.data = ''; - this.read = 0; - return this; -}; + // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie + getSetCookie () { + webidl.brandCheck(this, Headers) -/** - * Shortens this buffer by triming bytes off of the end of this buffer. - * - * @param count the number of bytes to trim off. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.truncate = function(count) { - var len = Math.max(0, this.length() - count); - this.data = this.data.substr(this.read, len); - this.read = 0; - return this; -}; + // 1. If this’s header list does not contain `Set-Cookie`, then return « ». + // 2. Return the values of all headers in this’s header list whose name is + // a byte-case-insensitive match for `Set-Cookie`, in order. -/** - * Converts this buffer to a hexadecimal string. - * - * @return a hexadecimal string. - */ -util.ByteStringBuffer.prototype.toHex = function() { - var rval = ''; - for(var i = this.read; i < this.data.length; ++i) { - var b = this.data.charCodeAt(i); - if(b < 16) { - rval += '0'; + const list = this[kHeadersList].cookies + + if (list) { + return [...list] } - rval += b.toString(16); + + return [] } - return rval; -}; -/** - * Converts this buffer to a UTF-16 string (standard JavaScript string). - * - * @return a UTF-16 string. - */ -util.ByteStringBuffer.prototype.toString = function() { - return util.decodeUtf8(this.bytes()); -}; + // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine + get [kHeadersSortedMap] () { + if (this[kHeadersList][kHeadersSortedMap]) { + return this[kHeadersList][kHeadersSortedMap] + } -/** End Buffer w/BinaryString backing */ + // 1. Let headers be an empty list of headers with the key being the name + // and value the value. + const headers = [] -/** Buffer w/UInt8Array backing */ + // 2. Let names be the result of convert header names to a sorted-lowercase + // set with all the names of the headers in list. + const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1) + const cookies = this[kHeadersList].cookies -/** - * FIXME: Experimental. Do not use yet. - * - * Constructor for an ArrayBuffer-backed byte buffer. - * - * The buffer may be constructed from a string, an ArrayBuffer, DataView, or a - * TypedArray. - * - * If a string is given, its encoding should be provided as an option, - * otherwise it will default to 'binary'. A 'binary' string is encoded such - * that each character is one byte in length and size. - * - * If an ArrayBuffer, DataView, or TypedArray is given, it will be used - * *directly* without any copying. Note that, if a write to the buffer requires - * more space, the buffer will allocate a new backing ArrayBuffer to - * accommodate. The starting read and write offsets for the buffer may be - * given as options. - * - * @param [b] the initial bytes for this buffer. - * @param options the options to use: - * [readOffset] the starting read offset to use (default: 0). - * [writeOffset] the starting write offset to use (default: the - * length of the first parameter). - * [growSize] the minimum amount, in bytes, to grow the buffer by to - * accommodate writes (default: 1024). - * [encoding] the encoding ('binary', 'utf8', 'utf16', 'hex') for the - * first parameter, if it is a string (default: 'binary'). - */ -function DataBuffer(b, options) { - // default options - options = options || {}; + // 3. For each name of names: + for (let i = 0; i < names.length; ++i) { + const [name, value] = names[i] + // 1. If name is `set-cookie`, then: + if (name === 'set-cookie') { + // 1. Let values be a list of all values of headers in list whose name + // is a byte-case-insensitive match for name, in order. + + // 2. For each value of values: + // 1. Append (name, value) to headers. + for (let j = 0; j < cookies.length; ++j) { + headers.push([name, cookies[j]]) + } + } else { + // 2. Otherwise: - // pointers for read from/write to buffer - this.read = options.readOffset || 0; - this.growSize = options.growSize || 1024; + // 1. Let value be the result of getting name from list. - var isArrayBuffer = util.isArrayBuffer(b); - var isArrayBufferView = util.isArrayBufferView(b); - if(isArrayBuffer || isArrayBufferView) { - // use ArrayBuffer directly - if(isArrayBuffer) { - this.data = new DataView(b); - } else { - // TODO: adjust read/write offset based on the type of view - // or specify that this must be done in the options ... that the - // offsets are byte-based - this.data = new DataView(b.buffer, b.byteOffset, b.byteLength); - } - this.write = ('writeOffset' in options ? - options.writeOffset : this.data.byteLength); - return; - } + // 2. Assert: value is non-null. + assert(value !== null) - // initialize to empty array buffer and add any given bytes using putBytes - this.data = new DataView(new ArrayBuffer(0)); - this.write = 0; + // 3. Append (name, value) to headers. + headers.push([name, value]) + } + } - if(b !== null && b !== undefined) { - this.putBytes(b); - } + this[kHeadersList][kHeadersSortedMap] = headers - if('writeOffset' in options) { - this.write = options.writeOffset; + // 4. Return headers. + return headers } -} -util.DataBuffer = DataBuffer; -/** - * Gets the number of bytes in this buffer. - * - * @return the number of bytes in this buffer. - */ -util.DataBuffer.prototype.length = function() { - return this.write - this.read; -}; + keys () { + webidl.brandCheck(this, Headers) -/** - * Gets whether or not this buffer is empty. - * - * @return true if this buffer is empty, false if not. - */ -util.DataBuffer.prototype.isEmpty = function() { - return this.length() <= 0; -}; + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key') + } -/** - * Ensures this buffer has enough empty space to accommodate the given number - * of bytes. An optional parameter may be given that indicates a minimum - * amount to grow the buffer if necessary. If the parameter is not given, - * the buffer will be grown by some previously-specified default amount - * or heuristic. - * - * @param amount the number of bytes to accommodate. - * @param [growSize] the minimum amount, in bytes, to grow the buffer by if - * necessary. - */ -util.DataBuffer.prototype.accommodate = function(amount, growSize) { - if(this.length() >= amount) { - return this; + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key' + ) } - growSize = Math.max(growSize || this.growSize, amount); - - // grow buffer - var src = new Uint8Array( - this.data.buffer, this.data.byteOffset, this.data.byteLength); - var dst = new Uint8Array(this.length() + growSize); - dst.set(src); - this.data = new DataView(dst.buffer); - - return this; -}; -/** - * Puts a byte in this buffer. - * - * @param b the byte to put. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putByte = function(b) { - this.accommodate(1); - this.data.setUint8(this.write++, b); - return this; -}; + values () { + webidl.brandCheck(this, Headers) -/** - * Puts a byte in this buffer N times. - * - * @param b the byte to put. - * @param n the number of bytes of value b to put. - * - * @return this buffer. - */ -util.DataBuffer.prototype.fillWithByte = function(b, n) { - this.accommodate(n); - for(var i = 0; i < n; ++i) { - this.data.setUint8(b); - } - return this; -}; + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'value') + } -/** - * Puts bytes in this buffer. The bytes may be given as a string, an - * ArrayBuffer, a DataView, or a TypedArray. - * - * @param bytes the bytes to put. - * @param [encoding] the encoding for the first parameter ('binary', 'utf8', - * 'utf16', 'hex'), if it is a string (default: 'binary'). - * - * @return this buffer. - */ -util.DataBuffer.prototype.putBytes = function(bytes, encoding) { - if(util.isArrayBufferView(bytes)) { - var src = new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength); - var len = src.byteLength - src.byteOffset; - this.accommodate(len); - var dst = new Uint8Array(this.data.buffer, this.write); - dst.set(src); - this.write += len; - return this; + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'value' + ) } - if(util.isArrayBuffer(bytes)) { - var src = new Uint8Array(bytes); - this.accommodate(src.byteLength); - var dst = new Uint8Array(this.data.buffer); - dst.set(src, this.write); - this.write += src.byteLength; - return this; - } + entries () { + webidl.brandCheck(this, Headers) - // bytes is a util.DataBuffer or equivalent - if(bytes instanceof util.DataBuffer || - (typeof bytes === 'object' && - typeof bytes.read === 'number' && typeof bytes.write === 'number' && - util.isArrayBufferView(bytes.data))) { - var src = new Uint8Array(bytes.data.byteLength, bytes.read, bytes.length()); - this.accommodate(src.byteLength); - var dst = new Uint8Array(bytes.data.byteLength, this.write); - dst.set(src); - this.write += src.byteLength; - return this; - } + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key+value') + } - if(bytes instanceof util.ByteStringBuffer) { - // copy binary string and process as the same as a string parameter below - bytes = bytes.data; - encoding = 'binary'; + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key+value' + ) } - // string conversion - encoding = encoding || 'binary'; - if(typeof bytes === 'string') { - var view; + /** + * @param {(value: string, key: string, self: Headers) => void} callbackFn + * @param {unknown} thisArg + */ + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, Headers) - // decode from string - if(encoding === 'hex') { - this.accommodate(Math.ceil(bytes.length / 2)); - view = new Uint8Array(this.data.buffer, this.write); - this.write += util.binary.hex.decode(bytes, view, this.write); - return this; - } - if(encoding === 'base64') { - this.accommodate(Math.ceil(bytes.length / 4) * 3); - view = new Uint8Array(this.data.buffer, this.write); - this.write += util.binary.base64.decode(bytes, view, this.write); - return this; - } + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' }) - // encode text as UTF-8 bytes - if(encoding === 'utf8') { - // encode as UTF-8 then decode string as raw binary - bytes = util.encodeUtf8(bytes); - encoding = 'binary'; + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'." + ) } - // decode string as raw binary - if(encoding === 'binary' || encoding === 'raw') { - // one byte per character - this.accommodate(bytes.length); - view = new Uint8Array(this.data.buffer, this.write); - this.write += util.binary.raw.decode(view); - return this; + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) } + } - // encode text as UTF-16 bytes - if(encoding === 'utf16') { - // two bytes per character - this.accommodate(bytes.length * 2); - view = new Uint16Array(this.data.buffer, this.write); - this.write += util.text.utf16.encode(view); - return this; - } + [Symbol.for('nodejs.util.inspect.custom')] () { + webidl.brandCheck(this, Headers) - throw new Error('Invalid encoding: ' + encoding); + return this[kHeadersList] } +} - throw Error('Invalid parameter: ' + bytes); -}; +Headers.prototype[Symbol.iterator] = Headers.prototype.entries -/** - * Puts the given buffer into this buffer. - * - * @param buffer the buffer to put into this one. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putBuffer = function(buffer) { - this.putBytes(buffer); - buffer.clear(); - return this; -}; +Object.defineProperties(Headers.prototype, { + append: kEnumerableProperty, + delete: kEnumerableProperty, + get: kEnumerableProperty, + has: kEnumerableProperty, + set: kEnumerableProperty, + getSetCookie: kEnumerableProperty, + keys: kEnumerableProperty, + values: kEnumerableProperty, + entries: kEnumerableProperty, + forEach: kEnumerableProperty, + [Symbol.iterator]: { enumerable: false }, + [Symbol.toStringTag]: { + value: 'Headers', + configurable: true + } +}) -/** - * Puts a string into this buffer. - * - * @param str the string to put. - * @param [encoding] the encoding for the string (default: 'utf16'). - * - * @return this buffer. - */ -util.DataBuffer.prototype.putString = function(str) { - return this.putBytes(str, 'utf16'); -}; +webidl.converters.HeadersInit = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (V[Symbol.iterator]) { + return webidl.converters['sequence>'](V) + } -/** - * Puts a 16-bit integer in this buffer in big-endian order. - * - * @param i the 16-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt16 = function(i) { - this.accommodate(2); - this.data.setInt16(this.write, i); - this.write += 2; - return this; -}; + return webidl.converters['record'](V) + } -/** - * Puts a 24-bit integer in this buffer in big-endian order. - * - * @param i the 24-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt24 = function(i) { - this.accommodate(3); - this.data.setInt16(this.write, i >> 8 & 0xFFFF); - this.data.setInt8(this.write, i >> 16 & 0xFF); - this.write += 3; - return this; -}; + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) +} -/** - * Puts a 32-bit integer in this buffer in big-endian order. - * - * @param i the 32-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt32 = function(i) { - this.accommodate(4); - this.data.setInt32(this.write, i); - this.write += 4; - return this; -}; +module.exports = { + fill, + Headers, + HeadersList +} -/** - * Puts a 16-bit integer in this buffer in little-endian order. - * - * @param i the 16-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt16Le = function(i) { - this.accommodate(2); - this.data.setInt16(this.write, i, true); - this.write += 2; - return this; -}; -/** - * Puts a 24-bit integer in this buffer in little-endian order. - * - * @param i the 24-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt24Le = function(i) { - this.accommodate(3); - this.data.setInt8(this.write, i >> 16 & 0xFF); - this.data.setInt16(this.write, i >> 8 & 0xFFFF, true); - this.write += 3; - return this; -}; +/***/ }), -/** - * Puts a 32-bit integer in this buffer in little-endian order. - * - * @param i the 32-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt32Le = function(i) { - this.accommodate(4); - this.data.setInt32(this.write, i, true); - this.write += 4; - return this; -}; +/***/ 4881: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Puts an n-bit integer in this buffer in big-endian order. - * - * @param i the n-bit integer. - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt = function(i, n) { - _checkBitsParam(n); - this.accommodate(n / 8); - do { - n -= 8; - this.data.setInt8(this.write++, (i >> n) & 0xFF); - } while(n > 0); - return this; -}; +"use strict"; +// https://github.com/Ethan-Arrowood/undici-fetch + + + +const { + Response, + makeNetworkError, + makeAppropriateNetworkError, + filterResponse, + makeResponse +} = __nccwpck_require__(7823) +const { Headers } = __nccwpck_require__(554) +const { Request, makeRequest } = __nccwpck_require__(8359) +const zlib = __nccwpck_require__(9796) +const { + bytesMatch, + makePolicyContainer, + clonePolicyContainer, + requestBadPort, + TAOCheck, + appendRequestOriginHeader, + responseLocationURL, + requestCurrentURL, + setRequestReferrerPolicyOnRedirect, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + createOpaqueTimingInfo, + appendFetchMetadata, + corsCheck, + crossOriginResourcePolicyCheck, + determineRequestsReferrer, + coarsenedSharedCurrentTime, + createDeferredPromise, + isBlobLike, + sameOrigin, + isCancelled, + isAborted, + isErrorLike, + fullyReadBody, + readableStreamClose, + isomorphicEncode, + urlIsLocal, + urlIsHttpHttpsScheme, + urlHasHttpsScheme +} = __nccwpck_require__(2538) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) +const assert = __nccwpck_require__(9491) +const { safelyExtractBody } = __nccwpck_require__(1472) +const { + redirectStatusSet, + nullBodyStatus, + safeMethodsSet, + requestBodyHeader, + subresourceSet, + DOMException +} = __nccwpck_require__(1037) +const { kHeadersList } = __nccwpck_require__(2785) +const EE = __nccwpck_require__(2361) +const { Readable, pipeline } = __nccwpck_require__(2781) +const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = __nccwpck_require__(3983) +const { dataURLProcessor, serializeAMimeType } = __nccwpck_require__(685) +const { TransformStream } = __nccwpck_require__(5356) +const { getGlobalDispatcher } = __nccwpck_require__(1892) +const { webidl } = __nccwpck_require__(1744) +const { STATUS_CODES } = __nccwpck_require__(3685) +const GET_OR_HEAD = ['GET', 'HEAD'] + +/** @type {import('buffer').resolveObjectURL} */ +let resolveObjectURL +let ReadableStream = globalThis.ReadableStream + +class Fetch extends EE { + constructor (dispatcher) { + super() + + this.dispatcher = dispatcher + this.connection = null + this.dump = false + this.state = 'ongoing' + // 2 terminated listeners get added per request, + // but only 1 gets removed. If there are 20 redirects, + // 21 listeners will be added. + // See https://github.com/nodejs/undici/issues/1711 + // TODO (fix): Find and fix root cause for leaked listener. + this.setMaxListeners(21) + } + + terminate (reason) { + if (this.state !== 'ongoing') { + return + } -/** - * Puts a signed n-bit integer in this buffer in big-endian order. Two's - * complement representation is used. - * - * @param i the n-bit integer. - * @param n the number of bits in the integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putSignedInt = function(i, n) { - _checkBitsParam(n); - this.accommodate(n / 8); - if(i < 0) { - i += 2 << (n - 1); + this.state = 'terminated' + this.connection?.destroy(reason) + this.emit('terminated', reason) } - return this.putInt(i, n); -}; -/** - * Gets a byte from this buffer and advances the read pointer by 1. - * - * @return the byte. - */ -util.DataBuffer.prototype.getByte = function() { - return this.data.getInt8(this.read++); -}; + // https://fetch.spec.whatwg.org/#fetch-controller-abort + abort (error) { + if (this.state !== 'ongoing') { + return + } -/** - * Gets a uint16 from this buffer in big-endian order and advances the read - * pointer by 2. - * - * @return the uint16. - */ -util.DataBuffer.prototype.getInt16 = function() { - var rval = this.data.getInt16(this.read); - this.read += 2; - return rval; -}; + // 1. Set controller’s state to "aborted". + this.state = 'aborted' -/** - * Gets a uint24 from this buffer in big-endian order and advances the read - * pointer by 3. - * - * @return the uint24. - */ -util.DataBuffer.prototype.getInt24 = function() { - var rval = ( - this.data.getInt16(this.read) << 8 ^ - this.data.getInt8(this.read + 2)); - this.read += 3; - return rval; -}; + // 2. Let fallbackError be an "AbortError" DOMException. + // 3. Set error to fallbackError if it is not given. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } -/** - * Gets a uint32 from this buffer in big-endian order and advances the read - * pointer by 4. - * - * @return the word. - */ -util.DataBuffer.prototype.getInt32 = function() { - var rval = this.data.getInt32(this.read); - this.read += 4; - return rval; -}; + // 4. Let serializedError be StructuredSerialize(error). + // If that threw an exception, catch it, and let + // serializedError be StructuredSerialize(fallbackError). -/** - * Gets a uint16 from this buffer in little-endian order and advances the read - * pointer by 2. - * - * @return the uint16. - */ -util.DataBuffer.prototype.getInt16Le = function() { - var rval = this.data.getInt16(this.read, true); - this.read += 2; - return rval; -}; + // 5. Set controller’s serialized abort reason to serializedError. + this.serializedAbortReason = error -/** - * Gets a uint24 from this buffer in little-endian order and advances the read - * pointer by 3. - * - * @return the uint24. - */ -util.DataBuffer.prototype.getInt24Le = function() { - var rval = ( - this.data.getInt8(this.read) ^ - this.data.getInt16(this.read + 1, true) << 8); - this.read += 3; - return rval; -}; + this.connection?.destroy(error) + this.emit('terminated', error) + } +} -/** - * Gets a uint32 from this buffer in little-endian order and advances the read - * pointer by 4. - * - * @return the word. - */ -util.DataBuffer.prototype.getInt32Le = function() { - var rval = this.data.getInt32(this.read, true); - this.read += 4; - return rval; -}; +// https://fetch.spec.whatwg.org/#fetch-method +function fetch (input, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' }) -/** - * Gets an n-bit integer from this buffer in big-endian order and advances the - * read pointer by n/8. - * - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return the integer. - */ -util.DataBuffer.prototype.getInt = function(n) { - _checkBitsParam(n); - var rval = 0; - do { - // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits. - rval = (rval << 8) + this.data.getInt8(this.read++); - n -= 8; - } while(n > 0); - return rval; -}; + // 1. Let p be a new promise. + const p = createDeferredPromise() -/** - * Gets a signed n-bit integer from this buffer in big-endian order, using - * two's complement, and advances the read pointer by n/8. - * - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return the integer. - */ -util.DataBuffer.prototype.getSignedInt = function(n) { - // getInt checks n - var x = this.getInt(n); - var max = 2 << (n - 2); - if(x >= max) { - x -= max << 1; - } - return x; -}; + // 2. Let requestObject be the result of invoking the initial value of + // Request as constructor with input and init as arguments. If this throws + // an exception, reject p with it and return p. + let requestObject -/** - * Reads bytes out as a binary encoded string and clears them from the - * buffer. - * - * @param count the number of bytes to read, undefined or null for all. - * - * @return a binary encoded string of bytes. - */ -util.DataBuffer.prototype.getBytes = function(count) { - // TODO: deprecate this method, it is poorly named and - // this.toString('binary') replaces it - // add a toTypedArray()/toArrayBuffer() function - var rval; - if(count) { - // read count bytes - count = Math.min(this.length(), count); - rval = this.data.slice(this.read, this.read + count); - this.read += count; - } else if(count === 0) { - rval = ''; - } else { - // read all bytes, optimize to only copy when needed - rval = (this.read === 0) ? this.data : this.data.slice(this.read); - this.clear(); + try { + requestObject = new Request(input, init) + } catch (e) { + p.reject(e) + return p.promise } - return rval; -}; - -/** - * Gets a binary encoded string of the bytes from this buffer without - * modifying the read pointer. - * - * @param count the number of bytes to get, omit to get all. - * - * @return a string full of binary encoded characters. - */ -util.DataBuffer.prototype.bytes = function(count) { - // TODO: deprecate this method, it is poorly named, add "getString()" - return (typeof(count) === 'undefined' ? - this.data.slice(this.read) : - this.data.slice(this.read, this.read + count)); -}; -/** - * Gets a byte at the given index without modifying the read pointer. - * - * @param i the byte index. - * - * @return the byte. - */ -util.DataBuffer.prototype.at = function(i) { - return this.data.getUint8(this.read + i); -}; + // 3. Let request be requestObject’s request. + const request = requestObject[kState] -/** - * Puts a byte at the given index without modifying the read pointer. - * - * @param i the byte index. - * @param b the byte to put. - * - * @return this buffer. - */ -util.DataBuffer.prototype.setAt = function(i, b) { - this.data.setUint8(i, b); - return this; -}; + // 4. If requestObject’s signal’s aborted flag is set, then: + if (requestObject.signal.aborted) { + // 1. Abort the fetch() call with p, request, null, and + // requestObject’s signal’s abort reason. + abortFetch(p, request, null, requestObject.signal.reason) -/** - * Gets the last byte without modifying the read pointer. - * - * @return the last byte. - */ -util.DataBuffer.prototype.last = function() { - return this.data.getUint8(this.write - 1); -}; + // 2. Return p. + return p.promise + } -/** - * Creates a copy of this buffer. - * - * @return the copy. - */ -util.DataBuffer.prototype.copy = function() { - return new util.DataBuffer(this); -}; + // 5. Let globalObject be request’s client’s global object. + const globalObject = request.client.globalObject -/** - * Compacts this buffer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.compact = function() { - if(this.read > 0) { - var src = new Uint8Array(this.data.buffer, this.read); - var dst = new Uint8Array(src.byteLength); - dst.set(src); - this.data = new DataView(dst); - this.write -= this.read; - this.read = 0; + // 6. If globalObject is a ServiceWorkerGlobalScope object, then set + // request’s service-workers mode to "none". + if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') { + request.serviceWorkers = 'none' } - return this; -}; -/** - * Clears this buffer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.clear = function() { - this.data = new DataView(new ArrayBuffer(0)); - this.read = this.write = 0; - return this; -}; + // 7. Let responseObject be null. + let responseObject = null -/** - * Shortens this buffer by triming bytes off of the end of this buffer. - * - * @param count the number of bytes to trim off. - * - * @return this buffer. - */ -util.DataBuffer.prototype.truncate = function(count) { - this.write = Math.max(0, this.length() - count); - this.read = Math.min(this.read, this.write); - return this; -}; + // 8. Let relevantRealm be this’s relevant Realm. + const relevantRealm = null -/** - * Converts this buffer to a hexadecimal string. - * - * @return a hexadecimal string. - */ -util.DataBuffer.prototype.toHex = function() { - var rval = ''; - for(var i = this.read; i < this.data.byteLength; ++i) { - var b = this.data.getUint8(i); - if(b < 16) { - rval += '0'; - } - rval += b.toString(16); - } - return rval; -}; + // 9. Let locallyAborted be false. + let locallyAborted = false -/** - * Converts this buffer to a string, using the given encoding. If no - * encoding is given, 'utf8' (UTF-8) is used. - * - * @param [encoding] the encoding to use: 'binary', 'utf8', 'utf16', 'hex', - * 'base64' (default: 'utf8'). - * - * @return a string representation of the bytes in this buffer. - */ -util.DataBuffer.prototype.toString = function(encoding) { - var view = new Uint8Array(this.data, this.read, this.length()); - encoding = encoding || 'utf8'; + // 10. Let controller be null. + let controller = null - // encode to string - if(encoding === 'binary' || encoding === 'raw') { - return util.binary.raw.encode(view); - } - if(encoding === 'hex') { - return util.binary.hex.encode(view); - } - if(encoding === 'base64') { - return util.binary.base64.encode(view); - } + // 11. Add the following abort steps to requestObject’s signal: + addAbortListener( + requestObject.signal, + () => { + // 1. Set locallyAborted to true. + locallyAborted = true - // decode to text - if(encoding === 'utf8') { - return util.text.utf8.decode(view); - } - if(encoding === 'utf16') { - return util.text.utf16.decode(view); - } + // 2. Assert: controller is non-null. + assert(controller != null) - throw new Error('Invalid encoding: ' + encoding); -}; + // 3. Abort controller with requestObject’s signal’s abort reason. + controller.abort(requestObject.signal.reason) -/** End Buffer w/UInt8Array backing */ + // 4. Abort the fetch() call with p, request, responseObject, + // and requestObject’s signal’s abort reason. + abortFetch(p, request, responseObject, requestObject.signal.reason) + } + ) -/** - * Creates a buffer that stores bytes. A value may be given to populate the - * buffer with data. This value can either be string of encoded bytes or a - * regular string of characters. When passing a string of binary encoded - * bytes, the encoding `raw` should be given. This is also the default. When - * passing a string of characters, the encoding `utf8` should be given. - * - * @param [input] a string with encoded bytes to store in the buffer. - * @param [encoding] (default: 'raw', other: 'utf8'). - */ -util.createBuffer = function(input, encoding) { - // TODO: deprecate, use new ByteBuffer() instead - encoding = encoding || 'raw'; - if(input !== undefined && encoding === 'utf8') { - input = util.encodeUtf8(input); - } - return new util.ByteBuffer(input); -}; + // 12. Let handleFetchDone given response response be to finalize and + // report timing with response, globalObject, and "fetch". + const handleFetchDone = (response) => + finalizeAndReportTiming(response, 'fetch') -/** - * Fills a string with a particular value. If you want the string to be a byte - * string, pass in String.fromCharCode(theByte). - * - * @param c the character to fill the string with, use String.fromCharCode - * to fill the string with a byte value. - * @param n the number of characters of value c to fill with. - * - * @return the filled string. - */ -util.fillString = function(c, n) { - var s = ''; - while(n > 0) { - if(n & 1) { - s += c; - } - n >>>= 1; - if(n > 0) { - c += c; + // 13. Set controller to the result of calling fetch given request, + // with processResponseEndOfBody set to handleFetchDone, and processResponse + // given response being these substeps: + + const processResponse = (response) => { + // 1. If locallyAborted is true, terminate these substeps. + if (locallyAborted) { + return Promise.resolve() } - } - return s; -}; -/** - * Performs a per byte XOR between two byte strings and returns the result as a - * string of bytes. - * - * @param s1 first string of bytes. - * @param s2 second string of bytes. - * @param n the number of bytes to XOR. - * - * @return the XOR'd result. - */ -util.xorBytes = function(s1, s2, n) { - var s3 = ''; - var b = ''; - var t = ''; - var i = 0; - var c = 0; - for(; n > 0; --n, ++i) { - b = s1.charCodeAt(i) ^ s2.charCodeAt(i); - if(c >= 10) { - s3 += t; - t = ''; - c = 0; - } - t += String.fromCharCode(b); - ++c; - } - s3 += t; - return s3; -}; + // 2. If response’s aborted flag is set, then: + if (response.aborted) { + // 1. Let deserializedError be the result of deserialize a serialized + // abort reason given controller’s serialized abort reason and + // relevantRealm. -/** - * Converts a hex string into a 'binary' encoded string of bytes. - * - * @param hex the hexadecimal string to convert. - * - * @return the binary-encoded string of bytes. - */ -util.hexToBytes = function(hex) { - // TODO: deprecate: "Deprecated. Use util.binary.hex.decode instead." - var rval = ''; - var i = 0; - if(hex.length & 1 == 1) { - // odd number of characters, convert first character alone - i = 1; - rval += String.fromCharCode(parseInt(hex[0], 16)); - } - // convert 2 characters (1 byte) at a time - for(; i < hex.length; i += 2) { - rval += String.fromCharCode(parseInt(hex.substr(i, 2), 16)); - } - return rval; -}; + // 2. Abort the fetch() call with p, request, responseObject, and + // deserializedError. -/** - * Converts a 'binary' encoded string of bytes to hex. - * - * @param bytes the byte string to convert. - * - * @return the string of hexadecimal characters. - */ -util.bytesToHex = function(bytes) { - // TODO: deprecate: "Deprecated. Use util.binary.hex.encode instead." - return util.createBuffer(bytes).toHex(); -}; + abortFetch(p, request, responseObject, controller.serializedAbortReason) + return Promise.resolve() + } -/** - * Converts an 32-bit integer to 4-big-endian byte string. - * - * @param i the integer. - * - * @return the byte string. - */ -util.int32ToBytes = function(i) { - return ( - String.fromCharCode(i >> 24 & 0xFF) + - String.fromCharCode(i >> 16 & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i & 0xFF)); -}; + // 3. If response is a network error, then reject p with a TypeError + // and terminate these substeps. + if (response.type === 'error') { + p.reject( + Object.assign(new TypeError('fetch failed'), { cause: response.error }) + ) + return Promise.resolve() + } -// base64 characters, reverse mapping -var _base64 = - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='; -var _base64Idx = [ -/*43 -43 = 0*/ -/*'+', 1, 2, 3,'/' */ - 62, -1, -1, -1, 63, + // 4. Set responseObject to the result of creating a Response object, + // given response, "immutable", and relevantRealm. + responseObject = new Response() + responseObject[kState] = response + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm -/*'0','1','2','3','4','5','6','7','8','9' */ - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, + // 5. Resolve p with responseObject. + p.resolve(responseObject) + } -/*15, 16, 17,'=', 19, 20, 21 */ - -1, -1, -1, 64, -1, -1, -1, + controller = fetching({ + request, + processResponseEndOfBody: handleFetchDone, + processResponse, + dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici + }) -/*65 - 43 = 22*/ -/*'A','B','C','D','E','F','G','H','I','J','K','L','M', */ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, + // 14. Return p. + return p.promise +} -/*'N','O','P','Q','R','S','T','U','V','W','X','Y','Z' */ - 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, +// https://fetch.spec.whatwg.org/#finalize-and-report-timing +function finalizeAndReportTiming (response, initiatorType = 'other') { + // 1. If response is an aborted network error, then return. + if (response.type === 'error' && response.aborted) { + return + } -/*91 - 43 = 48 */ -/*48, 49, 50, 51, 52, 53 */ - -1, -1, -1, -1, -1, -1, + // 2. If response’s URL list is null or empty, then return. + if (!response.urlList?.length) { + return + } -/*97 - 43 = 54*/ -/*'a','b','c','d','e','f','g','h','i','j','k','l','m' */ - 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + // 3. Let originalURL be response’s URL list[0]. + const originalURL = response.urlList[0] -/*'n','o','p','q','r','s','t','u','v','w','x','y','z' */ - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51 -]; + // 4. Let timingInfo be response’s timing info. + let timingInfo = response.timingInfo -// base58 characters (Bitcoin alphabet) -var _base58 = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'; + // 5. Let cacheState be response’s cache state. + let cacheState = response.cacheState -/** - * Base64 encodes a 'binary' encoded string of bytes. - * - * @param input the binary encoded string of bytes to base64-encode. - * @param maxline the maximum number of encoded characters per line to use, - * defaults to none. - * - * @return the base64-encoded output. - */ -util.encode64 = function(input, maxline) { - // TODO: deprecate: "Deprecated. Use util.binary.base64.encode instead." - var line = ''; - var output = ''; - var chr1, chr2, chr3; - var i = 0; - while(i < input.length) { - chr1 = input.charCodeAt(i++); - chr2 = input.charCodeAt(i++); - chr3 = input.charCodeAt(i++); - - // encode 4 character group - line += _base64.charAt(chr1 >> 2); - line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4)); - if(isNaN(chr2)) { - line += '=='; - } else { - line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6)); - line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63); - } + // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return. + if (!urlIsHttpHttpsScheme(originalURL)) { + return + } - if(maxline && line.length > maxline) { - output += line.substr(0, maxline) + '\r\n'; - line = line.substr(maxline); - } + // 7. If timingInfo is null, then return. + if (timingInfo === null) { + return } - output += line; - return output; -}; -/** - * Base64 decodes a string into a 'binary' encoded string of bytes. - * - * @param input the base64-encoded input. - * - * @return the binary encoded string. - */ -util.decode64 = function(input) { - // TODO: deprecate: "Deprecated. Use util.binary.base64.decode instead." - - // remove all non-base64 characters - input = input.replace(/[^A-Za-z0-9\+\/\=]/g, ''); - - var output = ''; - var enc1, enc2, enc3, enc4; - var i = 0; - - while(i < input.length) { - enc1 = _base64Idx[input.charCodeAt(i++) - 43]; - enc2 = _base64Idx[input.charCodeAt(i++) - 43]; - enc3 = _base64Idx[input.charCodeAt(i++) - 43]; - enc4 = _base64Idx[input.charCodeAt(i++) - 43]; - - output += String.fromCharCode((enc1 << 2) | (enc2 >> 4)); - if(enc3 !== 64) { - // decoded at least 2 bytes - output += String.fromCharCode(((enc2 & 15) << 4) | (enc3 >> 2)); - if(enc4 !== 64) { - // decoded 3 bytes - output += String.fromCharCode(((enc3 & 3) << 6) | enc4); - } - } + // 8. If response’s timing allow passed flag is not set, then: + if (!response.timingAllowPassed) { + // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. + timingInfo = createOpaqueTimingInfo({ + startTime: timingInfo.startTime + }) + + // 2. Set cacheState to the empty string. + cacheState = '' } - return output; -}; + // 9. Set timingInfo’s end time to the coarsened shared current time + // given global’s relevant settings object’s cross-origin isolated + // capability. + // TODO: given global’s relevant settings object’s cross-origin isolated + // capability? + timingInfo.endTime = coarsenedSharedCurrentTime() -/** - * Encodes the given string of characters (a standard JavaScript - * string) as a binary encoded string where the bytes represent - * a UTF-8 encoded string of characters. Non-ASCII characters will be - * encoded as multiple bytes according to UTF-8. - * - * @param str a standard string of characters to encode. - * - * @return the binary encoded string. - */ -util.encodeUtf8 = function(str) { - return unescape(encodeURIComponent(str)); -}; + // 10. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo -/** - * Decodes a binary encoded string that contains bytes that - * represent a UTF-8 encoded string of characters -- into a - * string of characters (a standard JavaScript string). - * - * @param str the binary encoded string to decode. - * - * @return the resulting standard string of characters. - */ -util.decodeUtf8 = function(str) { - return decodeURIComponent(escape(str)); -}; + // 11. Mark resource timing for timingInfo, originalURL, initiatorType, + // global, and cacheState. + markResourceTiming( + timingInfo, + originalURL, + initiatorType, + globalThis, + cacheState + ) +} -// binary encoding/decoding tools -// FIXME: Experimental. Do not use yet. -util.binary = { - raw: {}, - hex: {}, - base64: {}, - base58: {}, - baseN : { - encode: baseN.encode, - decode: baseN.decode +// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing +function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) { + if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) { + performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState) } -}; +} -/** - * Encodes a Uint8Array as a binary-encoded string. This encoding uses - * a value between 0 and 255 for each character. - * - * @param bytes the Uint8Array to encode. - * - * @return the binary-encoded string. - */ -util.binary.raw.encode = function(bytes) { - return String.fromCharCode.apply(null, bytes); -}; +// https://fetch.spec.whatwg.org/#abort-fetch +function abortFetch (p, request, responseObject, error) { + // Note: AbortSignal.reason was added in node v17.2.0 + // which would give us an undefined error to reject with. + // Remove this once node v16 is no longer supported. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } -/** - * Decodes a binary-encoded string to a Uint8Array. This encoding uses - * a value between 0 and 255 for each character. - * - * @param str the binary-encoded string to decode. - * @param [output] an optional Uint8Array to write the output to; if it - * is too small, an exception will be thrown. - * @param [offset] the start offset for writing to the output (default: 0). - * - * @return the Uint8Array or the number of bytes written if output was given. - */ -util.binary.raw.decode = function(str, output, offset) { - var out = output; - if(!out) { - out = new Uint8Array(str.length); + // 1. Reject promise with error. + p.reject(error) + + // 2. If request’s body is not null and is readable, then cancel request’s + // body with error. + if (request.body != null && isReadable(request.body?.stream)) { + request.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) } - offset = offset || 0; - var j = offset; - for(var i = 0; i < str.length; ++i) { - out[j++] = str.charCodeAt(i); + + // 3. If responseObject is null, then return. + if (responseObject == null) { + return } - return output ? (j - offset) : out; -}; -/** - * Encodes a 'binary' string, ArrayBuffer, DataView, TypedArray, or - * ByteBuffer as a string of hexadecimal characters. - * - * @param bytes the bytes to convert. - * - * @return the string of hexadecimal characters. - */ -util.binary.hex.encode = util.bytesToHex; + // 4. Let response be responseObject’s response. + const response = responseObject[kState] -/** - * Decodes a hex-encoded string to a Uint8Array. - * - * @param hex the hexadecimal string to convert. - * @param [output] an optional Uint8Array to write the output to; if it - * is too small, an exception will be thrown. - * @param [offset] the start offset for writing to the output (default: 0). - * - * @return the Uint8Array or the number of bytes written if output was given. - */ -util.binary.hex.decode = function(hex, output, offset) { - var out = output; - if(!out) { - out = new Uint8Array(Math.ceil(hex.length / 2)); - } - offset = offset || 0; - var i = 0, j = offset; - if(hex.length & 1) { - // odd number of characters, convert first character alone - i = 1; - out[j++] = parseInt(hex[0], 16); - } - // convert 2 characters (1 byte) at a time - for(; i < hex.length; i += 2) { - out[j++] = parseInt(hex.substr(i, 2), 16); - } - return output ? (j - offset) : out; -}; + // 5. If response’s body is not null and is readable, then error response’s + // body with error. + if (response.body != null && isReadable(response.body?.stream)) { + response.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) + } +} -/** - * Base64-encodes a Uint8Array. - * - * @param input the Uint8Array to encode. - * @param maxline the maximum number of encoded characters per line to use, - * defaults to none. - * - * @return the base64-encoded output string. - */ -util.binary.base64.encode = function(input, maxline) { - var line = ''; - var output = ''; - var chr1, chr2, chr3; - var i = 0; - while(i < input.byteLength) { - chr1 = input[i++]; - chr2 = input[i++]; - chr3 = input[i++]; - - // encode 4 character group - line += _base64.charAt(chr1 >> 2); - line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4)); - if(isNaN(chr2)) { - line += '=='; - } else { - line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6)); - line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63); - } +// https://fetch.spec.whatwg.org/#fetching +function fetching ({ + request, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseEndOfBody, + processResponseConsumeBody, + useParallelQueue = false, + dispatcher // undici +}) { + // 1. Let taskDestination be null. + let taskDestination = null + + // 2. Let crossOriginIsolatedCapability be false. + let crossOriginIsolatedCapability = false + + // 3. If request’s client is non-null, then: + if (request.client != null) { + // 1. Set taskDestination to request’s client’s global object. + taskDestination = request.client.globalObject + + // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin + // isolated capability. + crossOriginIsolatedCapability = + request.client.crossOriginIsolatedCapability + } + + // 4. If useParallelQueue is true, then set taskDestination to the result of + // starting a new parallel queue. + // TODO + + // 5. Let timingInfo be a new fetch timing info whose start time and + // post-redirect start time are the coarsened shared current time given + // crossOriginIsolatedCapability. + const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability) + const timingInfo = createOpaqueTimingInfo({ + startTime: currenTime + }) - if(maxline && line.length > maxline) { - output += line.substr(0, maxline) + '\r\n'; - line = line.substr(maxline); + // 6. Let fetchParams be a new fetch params whose + // request is request, + // timing info is timingInfo, + // process request body chunk length is processRequestBodyChunkLength, + // process request end-of-body is processRequestEndOfBody, + // process response is processResponse, + // process response consume body is processResponseConsumeBody, + // process response end-of-body is processResponseEndOfBody, + // task destination is taskDestination, + // and cross-origin isolated capability is crossOriginIsolatedCapability. + const fetchParams = { + controller: new Fetch(dispatcher), + request, + timingInfo, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseConsumeBody, + processResponseEndOfBody, + taskDestination, + crossOriginIsolatedCapability + } + + // 7. If request’s body is a byte sequence, then set request’s body to + // request’s body as a body. + // NOTE: Since fetching is only called from fetch, body should already be + // extracted. + assert(!request.body || request.body.stream) + + // 8. If request’s window is "client", then set request’s window to request’s + // client, if request’s client’s global object is a Window object; otherwise + // "no-window". + if (request.window === 'client') { + // TODO: What if request.client is null? + request.window = + request.client?.globalObject?.constructor?.name === 'Window' + ? request.client + : 'no-window' + } + + // 9. If request’s origin is "client", then set request’s origin to request’s + // client’s origin. + if (request.origin === 'client') { + // TODO: What if request.client is null? + request.origin = request.client?.origin + } + + // 10. If all of the following conditions are true: + // TODO + + // 11. If request’s policy container is "client", then: + if (request.policyContainer === 'client') { + // 1. If request’s client is non-null, then set request’s policy + // container to a clone of request’s client’s policy container. [HTML] + if (request.client != null) { + request.policyContainer = clonePolicyContainer( + request.client.policyContainer + ) + } else { + // 2. Otherwise, set request’s policy container to a new policy + // container. + request.policyContainer = makePolicyContainer() } } - output += line; - return output; -}; -/** - * Decodes a base64-encoded string to a Uint8Array. - * - * @param input the base64-encoded input string. - * @param [output] an optional Uint8Array to write the output to; if it - * is too small, an exception will be thrown. - * @param [offset] the start offset for writing to the output (default: 0). - * - * @return the Uint8Array or the number of bytes written if output was given. - */ -util.binary.base64.decode = function(input, output, offset) { - var out = output; - if(!out) { - out = new Uint8Array(Math.ceil(input.length / 4) * 3); - } - - // remove all non-base64 characters - input = input.replace(/[^A-Za-z0-9\+\/\=]/g, ''); - - offset = offset || 0; - var enc1, enc2, enc3, enc4; - var i = 0, j = offset; - - while(i < input.length) { - enc1 = _base64Idx[input.charCodeAt(i++) - 43]; - enc2 = _base64Idx[input.charCodeAt(i++) - 43]; - enc3 = _base64Idx[input.charCodeAt(i++) - 43]; - enc4 = _base64Idx[input.charCodeAt(i++) - 43]; - - out[j++] = (enc1 << 2) | (enc2 >> 4); - if(enc3 !== 64) { - // decoded at least 2 bytes - out[j++] = ((enc2 & 15) << 4) | (enc3 >> 2); - if(enc4 !== 64) { - // decoded 3 bytes - out[j++] = ((enc3 & 3) << 6) | enc4; - } - } - } + // 12. If request’s header list does not contain `Accept`, then: + if (!request.headersList.contains('accept')) { + // 1. Let value be `*/*`. + const value = '*/*' - // make sure result is the exact decoded length - return output ? (j - offset) : out.subarray(0, j); -}; + // 2. A user agent should set value to the first matching statement, if + // any, switching on request’s destination: + // "document" + // "frame" + // "iframe" + // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8` + // "image" + // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5` + // "style" + // `text/css,*/*;q=0.1` + // TODO -// add support for base58 encoding/decoding with Bitcoin alphabet -util.binary.base58.encode = function(input, maxline) { - return util.binary.baseN.encode(input, _base58, maxline); -}; -util.binary.base58.decode = function(input, maxline) { - return util.binary.baseN.decode(input, _base58, maxline); -}; + // 3. Append `Accept`/value to request’s header list. + request.headersList.append('accept', value) + } -// text encoding/decoding tools -// FIXME: Experimental. Do not use yet. -util.text = { - utf8: {}, - utf16: {} -}; + // 13. If request’s header list does not contain `Accept-Language`, then + // user agents should append `Accept-Language`/an appropriate value to + // request’s header list. + if (!request.headersList.contains('accept-language')) { + request.headersList.append('accept-language', '*') + } -/** - * Encodes the given string as UTF-8 in a Uint8Array. - * - * @param str the string to encode. - * @param [output] an optional Uint8Array to write the output to; if it - * is too small, an exception will be thrown. - * @param [offset] the start offset for writing to the output (default: 0). - * - * @return the Uint8Array or the number of bytes written if output was given. - */ -util.text.utf8.encode = function(str, output, offset) { - str = util.encodeUtf8(str); - var out = output; - if(!out) { - out = new Uint8Array(str.length); - } - offset = offset || 0; - var j = offset; - for(var i = 0; i < str.length; ++i) { - out[j++] = str.charCodeAt(i); - } - return output ? (j - offset) : out; -}; + // 14. If request’s priority is null, then use request’s initiator and + // destination appropriately in setting request’s priority to a + // user-agent-defined object. + if (request.priority === null) { + // TODO + } -/** - * Decodes the UTF-8 contents from a Uint8Array. - * - * @param bytes the Uint8Array to decode. - * - * @return the resulting string. - */ -util.text.utf8.decode = function(bytes) { - return util.decodeUtf8(String.fromCharCode.apply(null, bytes)); -}; + // 15. If request is a subresource request, then: + if (subresourceSet.has(request.destination)) { + // TODO + } -/** - * Encodes the given string as UTF-16 in a Uint8Array. - * - * @param str the string to encode. - * @param [output] an optional Uint8Array to write the output to; if it - * is too small, an exception will be thrown. - * @param [offset] the start offset for writing to the output (default: 0). - * - * @return the Uint8Array or the number of bytes written if output was given. - */ -util.text.utf16.encode = function(str, output, offset) { - var out = output; - if(!out) { - out = new Uint8Array(str.length * 2); - } - var view = new Uint16Array(out.buffer); - offset = offset || 0; - var j = offset; - var k = offset; - for(var i = 0; i < str.length; ++i) { - view[k++] = str.charCodeAt(i); - j += 2; - } - return output ? (j - offset) : out; -}; + // 16. Run main fetch given fetchParams. + mainFetch(fetchParams) + .catch(err => { + fetchParams.controller.terminate(err) + }) -/** - * Decodes the UTF-16 contents from a Uint8Array. - * - * @param bytes the Uint8Array to decode. - * - * @return the resulting string. - */ -util.text.utf16.decode = function(bytes) { - return String.fromCharCode.apply(null, new Uint16Array(bytes.buffer)); -}; + // 17. Return fetchParam's controller + return fetchParams.controller +} -/** - * Deflates the given data using a flash interface. - * - * @param api the flash interface. - * @param bytes the data. - * @param raw true to return only raw deflate data, false to include zlib - * header and trailer. - * - * @return the deflated data as a string. - */ -util.deflate = function(api, bytes, raw) { - bytes = util.decode64(api.deflate(util.encode64(bytes)).rval); +// https://fetch.spec.whatwg.org/#concept-main-fetch +async function mainFetch (fetchParams, recursive = false) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request - // strip zlib header and trailer if necessary - if(raw) { - // zlib header is 2 bytes (CMF,FLG) where FLG indicates that - // there is a 4-byte DICT (alder-32) block before the data if - // its 5th bit is set - var start = 2; - var flg = bytes.charCodeAt(1); - if(flg & 0x20) { - start = 6; - } - // zlib trailer is 4 bytes of adler-32 - bytes = bytes.substring(start, bytes.length - 4); + // 2. Let response be null. + let response = null + + // 3. If request’s local-URLs-only flag is set and request’s current URL is + // not local, then set response to a network error. + if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) { + response = makeNetworkError('local URLs only') } - return bytes; -}; + // 4. Run report Content Security Policy violations for request. + // TODO -/** - * Inflates the given data using a flash interface. - * - * @param api the flash interface. - * @param bytes the data. - * @param raw true if the incoming data has no zlib header or trailer and is - * raw DEFLATE data. - * - * @return the inflated data as a string, null on error. - */ -util.inflate = function(api, bytes, raw) { - // TODO: add zlib header and trailer if necessary/possible - var rval = api.inflate(util.encode64(bytes)).rval; - return (rval === null) ? null : util.decode64(rval); -}; + // 5. Upgrade request to a potentially trustworthy URL, if appropriate. + tryUpgradeRequestToAPotentiallyTrustworthyURL(request) -/** - * Sets a storage object. - * - * @param api the storage interface. - * @param id the storage ID to use. - * @param obj the storage object, null to remove. - */ -var _setStorageObject = function(api, id, obj) { - if(!api) { - throw new Error('WebStorage not available.'); + // 6. If should request be blocked due to a bad port, should fetching request + // be blocked as mixed content, or should request be blocked by Content + // Security Policy returns blocked, then set response to a network error. + if (requestBadPort(request) === 'blocked') { + response = makeNetworkError('bad port') } + // TODO: should fetching request be blocked as mixed content? + // TODO: should request be blocked by Content Security Policy? - var rval; - if(obj === null) { - rval = api.removeItem(id); - } else { - // json-encode and base64-encode object - obj = util.encode64(JSON.stringify(obj)); - rval = api.setItem(id, obj); + // 7. If request’s referrer policy is the empty string, then set request’s + // referrer policy to request’s policy container’s referrer policy. + if (request.referrerPolicy === '') { + request.referrerPolicy = request.policyContainer.referrerPolicy } - // handle potential flash error - if(typeof(rval) !== 'undefined' && rval.rval !== true) { - var error = new Error(rval.error.message); - error.id = rval.error.id; - error.name = rval.error.name; - throw error; + // 8. If request’s referrer is not "no-referrer", then set request’s + // referrer to the result of invoking determine request’s referrer. + if (request.referrer !== 'no-referrer') { + request.referrer = determineRequestsReferrer(request) } -}; -/** - * Gets a storage object. - * - * @param api the storage interface. - * @param id the storage ID to use. - * - * @return the storage object entry or null if none exists. - */ -var _getStorageObject = function(api, id) { - if(!api) { - throw new Error('WebStorage not available.'); - } - - // get the existing entry - var rval = api.getItem(id); - - /* Note: We check api.init because we can't do (api == localStorage) - on IE because of "Class doesn't support Automation" exception. Only - the flash api has an init method so this works too, but we need a - better solution in the future. */ - - // flash returns item wrapped in an object, handle special case - if(api.init) { - if(rval.rval === null) { - if(rval.error) { - var error = new Error(rval.error.message); - error.id = rval.error.id; - error.name = rval.error.name; - throw error; + // 9. Set request’s current URL’s scheme to "https" if all of the following + // conditions are true: + // - request’s current URL’s scheme is "http" + // - request’s current URL’s host is a domain + // - Matching request’s current URL’s host per Known HSTS Host Domain Name + // Matching results in either a superdomain match with an asserted + // includeSubDomains directive or a congruent match (with or without an + // asserted includeSubDomains directive). [HSTS] + // TODO + + // 10. If recursive is false, then run the remaining steps in parallel. + // TODO + + // 11. If response is null, then set response to the result of running + // the steps corresponding to the first matching statement: + if (response === null) { + response = await (async () => { + const currentURL = requestCurrentURL(request) + + if ( + // - request’s current URL’s origin is same origin with request’s origin, + // and request’s response tainting is "basic" + (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') || + // request’s current URL’s scheme is "data" + (currentURL.protocol === 'data:') || + // - request’s mode is "navigate" or "websocket" + (request.mode === 'navigate' || request.mode === 'websocket') + ) { + // 1. Set request’s response tainting to "basic". + request.responseTainting = 'basic' + + // 2. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) } - // no error, but also no item - rval = null; - } else { - rval = rval.rval; - } - } - // handle decoding - if(rval !== null) { - // base64-decode and json-decode data - rval = JSON.parse(util.decode64(rval)); - } + // request’s mode is "same-origin" + if (request.mode === 'same-origin') { + // 1. Return a network error. + return makeNetworkError('request mode cannot be "same-origin"') + } - return rval; -}; + // request’s mode is "no-cors" + if (request.mode === 'no-cors') { + // 1. If request’s redirect mode is not "follow", then return a network + // error. + if (request.redirect !== 'follow') { + return makeNetworkError( + 'redirect mode cannot be "follow" for "no-cors" request' + ) + } -/** - * Stores an item in local storage. - * - * @param api the storage interface. - * @param id the storage ID to use. - * @param key the key for the item. - * @param data the data for the item (any javascript object/primitive). - */ -var _setItem = function(api, id, key, data) { - // get storage object - var obj = _getStorageObject(api, id); - if(obj === null) { - // create a new storage object - obj = {}; - } - // update key - obj[key] = data; - - // set storage object - _setStorageObject(api, id, obj); -}; + // 2. Set request’s response tainting to "opaque". + request.responseTainting = 'opaque' -/** - * Gets an item from local storage. - * - * @param api the storage interface. - * @param id the storage ID to use. - * @param key the key for the item. - * - * @return the item. - */ -var _getItem = function(api, id, key) { - // get storage object - var rval = _getStorageObject(api, id); - if(rval !== null) { - // return data at key - rval = (key in rval) ? rval[key] : null; - } + // 3. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) + } - return rval; -}; + // request’s current URL’s scheme is not an HTTP(S) scheme + if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) { + // Return a network error. + return makeNetworkError('URL scheme must be a HTTP(S) scheme') + } -/** - * Removes an item from local storage. - * - * @param api the storage interface. - * @param id the storage ID to use. - * @param key the key for the item. - */ -var _removeItem = function(api, id, key) { - // get storage object - var obj = _getStorageObject(api, id); - if(obj !== null && key in obj) { - // remove key - delete obj[key]; - - // see if entry has no keys remaining - var empty = true; - for(var prop in obj) { - empty = false; - break; + // - request’s use-CORS-preflight flag is set + // - request’s unsafe-request flag is set and either request’s method is + // not a CORS-safelisted method or CORS-unsafe request-header names with + // request’s header list is not empty + // 1. Set request’s response tainting to "cors". + // 2. Let corsWithPreflightResponse be the result of running HTTP fetch + // given fetchParams and true. + // 3. If corsWithPreflightResponse is a network error, then clear cache + // entries using request. + // 4. Return corsWithPreflightResponse. + // TODO + + // Otherwise + // 1. Set request’s response tainting to "cors". + request.responseTainting = 'cors' + + // 2. Return the result of running HTTP fetch given fetchParams. + return await httpFetch(fetchParams) + })() + } + + // 12. If recursive is true, then return response. + if (recursive) { + return response + } + + // 13. If response is not a network error and response is not a filtered + // response, then: + if (response.status !== 0 && !response.internalResponse) { + // If request’s response tainting is "cors", then: + if (request.responseTainting === 'cors') { + // 1. Let headerNames be the result of extracting header list values + // given `Access-Control-Expose-Headers` and response’s header list. + // TODO + // 2. If request’s credentials mode is not "include" and headerNames + // contains `*`, then set response’s CORS-exposed header-name list to + // all unique header names in response’s header list. + // TODO + // 3. Otherwise, if headerNames is not null or failure, then set + // response’s CORS-exposed header-name list to headerNames. + // TODO + } + + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (request.responseTainting === 'basic') { + response = filterResponse(response, 'basic') + } else if (request.responseTainting === 'cors') { + response = filterResponse(response, 'cors') + } else if (request.responseTainting === 'opaque') { + response = filterResponse(response, 'opaque') + } else { + assert(false) + } + } + + // 14. Let internalResponse be response, if response is a network error, + // and response’s internal response otherwise. + let internalResponse = + response.status === 0 ? response : response.internalResponse + + // 15. If internalResponse’s URL list is empty, then set it to a clone of + // request’s URL list. + if (internalResponse.urlList.length === 0) { + internalResponse.urlList.push(...request.urlList) + } + + // 16. If request’s timing allow failed flag is unset, then set + // internalResponse’s timing allow passed flag. + if (!request.timingAllowFailed) { + response.timingAllowPassed = true + } + + // 17. If response is not a network error and any of the following returns + // blocked + // - should internalResponse to request be blocked as mixed content + // - should internalResponse to request be blocked by Content Security Policy + // - should internalResponse to request be blocked due to its MIME type + // - should internalResponse to request be blocked due to nosniff + // TODO + + // 18. If response’s type is "opaque", internalResponse’s status is 206, + // internalResponse’s range-requested flag is set, and request’s header + // list does not contain `Range`, then set response and internalResponse + // to a network error. + if ( + response.type === 'opaque' && + internalResponse.status === 206 && + internalResponse.rangeRequested && + !request.headers.contains('range') + ) { + response = internalResponse = makeNetworkError() + } + + // 19. If response is not a network error and either request’s method is + // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status, + // set internalResponse’s body to null and disregard any enqueuing toward + // it (if any). + if ( + response.status !== 0 && + (request.method === 'HEAD' || + request.method === 'CONNECT' || + nullBodyStatus.includes(internalResponse.status)) + ) { + internalResponse.body = null + fetchParams.controller.dump = true + } + + // 20. If request’s integrity metadata is not the empty string, then: + if (request.integrity) { + // 1. Let processBodyError be this step: run fetch finale given fetchParams + // and a network error. + const processBodyError = (reason) => + fetchFinale(fetchParams, makeNetworkError(reason)) + + // 2. If request’s response tainting is "opaque", or response’s body is null, + // then run processBodyError and abort these steps. + if (request.responseTainting === 'opaque' || response.body == null) { + processBodyError(response.error) + return } - if(empty) { - // remove entry entirely if no keys are left - obj = null; + + // 3. Let processBody given bytes be these steps: + const processBody = (bytes) => { + // 1. If bytes do not match request’s integrity metadata, + // then run processBodyError and abort these steps. [SRI] + if (!bytesMatch(bytes, request.integrity)) { + processBodyError('integrity mismatch') + return + } + + // 2. Set response’s body to bytes as a body. + response.body = safelyExtractBody(bytes)[0] + + // 3. Run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) } - // set storage object - _setStorageObject(api, id, obj); + // 4. Fully read response’s body given processBody and processBodyError. + await fullyReadBody(response.body, processBody, processBodyError) + } else { + // 21. Otherwise, run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) } -}; +} -/** - * Clears the local disk storage identified by the given ID. - * - * @param api the storage interface. - * @param id the storage ID to use. - */ -var _clearItems = function(api, id) { - _setStorageObject(api, id, null); -}; +// https://fetch.spec.whatwg.org/#concept-scheme-fetch +// given a fetch params fetchParams +function schemeFetch (fetchParams) { + // Note: since the connection is destroyed on redirect, which sets fetchParams to a + // cancelled state, we do not want this condition to trigger *unless* there have been + // no redirects. See https://github.com/nodejs/undici/issues/1776 + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { + return Promise.resolve(makeAppropriateNetworkError(fetchParams)) + } -/** - * Calls a storage function. - * - * @param func the function to call. - * @param args the arguments for the function. - * @param location the location argument. - * - * @return the return value from the function. - */ -var _callStorageFunction = function(func, args, location) { - var rval = null; + // 2. Let request be fetchParams’s request. + const { request } = fetchParams - // default storage types - if(typeof(location) === 'undefined') { - location = ['web', 'flash']; - } + const { protocol: scheme } = requestCurrentURL(request) - // apply storage types in order of preference - var type; - var done = false; - var exception = null; - for(var idx in location) { - type = location[idx]; - try { - if(type === 'flash' || type === 'both') { - if(args[0] === null) { - throw new Error('Flash local storage not available.'); - } - rval = func.apply(this, args); - done = (type === 'flash'); + // 3. Switch on request’s current URL’s scheme and run the associated steps: + switch (scheme) { + case 'about:': { + // If request’s current URL’s path is the string "blank", then return a new response + // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) », + // and body is the empty byte sequence as a body. + + // Otherwise, return a network error. + return Promise.resolve(makeNetworkError('about scheme is not supported')) + } + case 'blob:': { + if (!resolveObjectURL) { + resolveObjectURL = (__nccwpck_require__(4300).resolveObjectURL) } - if(type === 'web' || type === 'both') { - args[0] = localStorage; - rval = func.apply(this, args); - done = true; + + // 1. Let blobURLEntry be request’s current URL’s blob URL entry. + const blobURLEntry = requestCurrentURL(request) + + // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56 + // Buffer.resolveObjectURL does not ignore URL queries. + if (blobURLEntry.search.length !== 0) { + return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.')) } - } catch(ex) { - exception = ex; - } - if(done) { - break; - } - } - if(!done) { - throw exception; - } + const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString()) - return rval; -}; + // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s + // object is not a Blob object, then return a network error. + if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) { + return Promise.resolve(makeNetworkError('invalid method')) + } -/** - * Stores an item on local disk. - * - * The available types of local storage include 'flash', 'web', and 'both'. - * - * The type 'flash' refers to flash local storage (SharedObject). In order - * to use flash local storage, the 'api' parameter must be valid. The type - * 'web' refers to WebStorage, if supported by the browser. The type 'both' - * refers to storing using both 'flash' and 'web', not just one or the - * other. - * - * The location array should list the storage types to use in order of - * preference: - * - * ['flash']: flash only storage - * ['web']: web only storage - * ['both']: try to store in both - * ['flash','web']: store in flash first, but if not available, 'web' - * ['web','flash']: store in web first, but if not available, 'flash' - * - * The location array defaults to: ['web', 'flash'] - * - * @param api the flash interface, null to use only WebStorage. - * @param id the storage ID to use. - * @param key the key for the item. - * @param data the data for the item (any javascript object/primitive). - * @param location an array with the preferred types of storage to use. - */ -util.setItem = function(api, id, key, data, location) { - _callStorageFunction(_setItem, arguments, location); -}; + // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object. + const bodyWithType = safelyExtractBody(blobURLEntryObject) -/** - * Gets an item on local disk. - * - * Set setItem() for details on storage types. - * - * @param api the flash interface, null to use only WebStorage. - * @param id the storage ID to use. - * @param key the key for the item. - * @param location an array with the preferred types of storage to use. - * - * @return the item. - */ -util.getItem = function(api, id, key, location) { - return _callStorageFunction(_getItem, arguments, location); -}; + // 4. Let body be bodyWithType’s body. + const body = bodyWithType[0] -/** - * Removes an item on local disk. - * - * Set setItem() for details on storage types. - * - * @param api the flash interface. - * @param id the storage ID to use. - * @param key the key for the item. - * @param location an array with the preferred types of storage to use. - */ -util.removeItem = function(api, id, key, location) { - _callStorageFunction(_removeItem, arguments, location); -}; + // 5. Let length be body’s length, serialized and isomorphic encoded. + const length = isomorphicEncode(`${body.length}`) -/** - * Clears the local disk storage identified by the given ID. - * - * Set setItem() for details on storage types. - * - * @param api the flash interface if flash is available. - * @param id the storage ID to use. - * @param location an array with the preferred types of storage to use. - */ -util.clearItems = function(api, id, location) { - _callStorageFunction(_clearItems, arguments, location); -}; + // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence. + const type = bodyWithType[1] ?? '' -/** - * Check if an object is empty. - * - * Taken from: - * http://stackoverflow.com/questions/679915/how-do-i-test-for-an-empty-javascript-object-from-json/679937#679937 - * - * @param object the object to check. - */ -util.isEmpty = function(obj) { - for(var prop in obj) { - if(obj.hasOwnProperty(prop)) { - return false; + // 7. Return a new response whose status message is `OK`, header list is + // « (`Content-Length`, length), (`Content-Type`, type) », and body is body. + const response = makeResponse({ + statusText: 'OK', + headersList: [ + ['content-length', { name: 'Content-Length', value: length }], + ['content-type', { name: 'Content-Type', value: type }] + ] + }) + + response.body = body + + return Promise.resolve(response) } - } - return true; -}; + case 'data:': { + // 1. Let dataURLStruct be the result of running the + // data: URL processor on request’s current URL. + const currentURL = requestCurrentURL(request) + const dataURLStruct = dataURLProcessor(currentURL) -/** - * Format with simple printf-style interpolation. - * - * %%: literal '%' - * %s,%o: convert next argument into a string. - * - * @param format the string to format. - * @param ... arguments to interpolate into the format string. - */ -util.format = function(format) { - var re = /%./g; - // current match - var match; - // current part - var part; - // current arg index - var argi = 0; - // collected parts to recombine later - var parts = []; - // last index found - var last = 0; - // loop while matches remain - while((match = re.exec(format))) { - part = format.substring(last, re.lastIndex - 2); - // don't add empty strings (ie, parts between %s%s) - if(part.length > 0) { - parts.push(part); - } - last = re.lastIndex; - // switch on % code - var code = match[0][1]; - switch(code) { - case 's': - case 'o': - // check if enough arguments were given - if(argi < arguments.length) { - parts.push(arguments[argi++ + 1]); - } else { - parts.push(''); + // 2. If dataURLStruct is failure, then return a + // network error. + if (dataURLStruct === 'failure') { + return Promise.resolve(makeNetworkError('failed to fetch the data URL')) } - break; - // FIXME: do proper formating for numbers, etc - //case 'f': - //case 'd': - case '%': - parts.push('%'); - break; - default: - parts.push('<%' + code + '?>'); + + // 3. Let mimeType be dataURLStruct’s MIME type, serialized. + const mimeType = serializeAMimeType(dataURLStruct.mimeType) + + // 4. Return a response whose status message is `OK`, + // header list is « (`Content-Type`, mimeType) », + // and body is dataURLStruct’s body as a body. + return Promise.resolve(makeResponse({ + statusText: 'OK', + headersList: [ + ['content-type', { name: 'Content-Type', value: mimeType }] + ], + body: safelyExtractBody(dataURLStruct.body)[0] + })) + } + case 'file:': { + // For now, unfortunate as it is, file URLs are left as an exercise for the reader. + // When in doubt, return a network error. + return Promise.resolve(makeNetworkError('not implemented... yet...')) + } + case 'http:': + case 'https:': { + // Return the result of running HTTP fetch given fetchParams. + + return httpFetch(fetchParams) + .catch((err) => makeNetworkError(err)) + } + default: { + return Promise.resolve(makeNetworkError('unknown scheme')) } } - // add trailing part of format string - parts.push(format.substring(last)); - return parts.join(''); -}; +} -/** - * Formats a number. - * - * http://snipplr.com/view/5945/javascript-numberformat--ported-from-php/ - */ -util.formatNumber = function(number, decimals, dec_point, thousands_sep) { - // http://kevin.vanzonneveld.net - // + original by: Jonas Raoni Soares Silva (http://www.jsfromhell.com) - // + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net) - // + bugfix by: Michael White (http://crestidg.com) - // + bugfix by: Benjamin Lupton - // + bugfix by: Allan Jensen (http://www.winternet.no) - // + revised by: Jonas Raoni Soares Silva (http://www.jsfromhell.com) - // * example 1: number_format(1234.5678, 2, '.', ''); - // * returns 1: 1234.57 - - var n = number, c = isNaN(decimals = Math.abs(decimals)) ? 2 : decimals; - var d = dec_point === undefined ? ',' : dec_point; - var t = thousands_sep === undefined ? - '.' : thousands_sep, s = n < 0 ? '-' : ''; - var i = parseInt((n = Math.abs(+n || 0).toFixed(c)), 10) + ''; - var j = (i.length > 3) ? i.length % 3 : 0; - return s + (j ? i.substr(0, j) + t : '') + - i.substr(j).replace(/(\d{3})(?=\d)/g, '$1' + t) + - (c ? d + Math.abs(n - i).toFixed(c).slice(2) : ''); -}; +// https://fetch.spec.whatwg.org/#finalize-response +function finalizeResponse (fetchParams, response) { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true -/** - * Formats a byte size. - * - * http://snipplr.com/view/5949/format-humanize-file-byte-size-presentation-in-javascript/ - */ -util.formatSize = function(size) { - if(size >= 1073741824) { - size = util.formatNumber(size / 1073741824, 2, '.', '') + ' GiB'; - } else if(size >= 1048576) { - size = util.formatNumber(size / 1048576, 2, '.', '') + ' MiB'; - } else if(size >= 1024) { - size = util.formatNumber(size / 1024, 0) + ' KiB'; - } else { - size = util.formatNumber(size, 0) + ' bytes'; + // 2, If fetchParams’s process response done is not null, then queue a fetch + // task to run fetchParams’s process response done given response, with + // fetchParams’s task destination. + if (fetchParams.processResponseDone != null) { + queueMicrotask(() => fetchParams.processResponseDone(response)) } - return size; -}; +} -/** - * Converts an IPv4 or IPv6 string representation into bytes (in network order). - * - * @param ip the IPv4 or IPv6 address to convert. - * - * @return the 4-byte IPv6 or 16-byte IPv6 address or null if the address can't - * be parsed. - */ -util.bytesFromIP = function(ip) { - if(ip.indexOf('.') !== -1) { - return util.bytesFromIPv4(ip); - } - if(ip.indexOf(':') !== -1) { - return util.bytesFromIPv6(ip); - } - return null; -}; +// https://fetch.spec.whatwg.org/#fetch-finale +function fetchFinale (fetchParams, response) { + // 1. If response is a network error, then: + if (response.type === 'error') { + // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ». + response.urlList = [fetchParams.request.urlList[0]] -/** - * Converts an IPv4 string representation into bytes (in network order). - * - * @param ip the IPv4 address to convert. - * - * @return the 4-byte address or null if the address can't be parsed. - */ -util.bytesFromIPv4 = function(ip) { - ip = ip.split('.'); - if(ip.length !== 4) { - return null; - } - var b = util.createBuffer(); - for(var i = 0; i < ip.length; ++i) { - var num = parseInt(ip[i], 10); - if(isNaN(num)) { - return null; - } - b.putByte(num); + // 2. Set response’s timing info to the result of creating an opaque timing + // info for fetchParams’s timing info. + response.timingInfo = createOpaqueTimingInfo({ + startTime: fetchParams.timingInfo.startTime + }) } - return b.getBytes(); -}; -/** - * Converts an IPv6 string representation into bytes (in network order). - * - * @param ip the IPv6 address to convert. - * - * @return the 16-byte address or null if the address can't be parsed. - */ -util.bytesFromIPv6 = function(ip) { - var blanks = 0; - ip = ip.split(':').filter(function(e) { - if(e.length === 0) ++blanks; - return true; - }); - var zeros = (8 - ip.length + blanks) * 2; - var b = util.createBuffer(); - for(var i = 0; i < 8; ++i) { - if(!ip[i] || ip[i].length === 0) { - b.fillWithByte(0, zeros); - zeros = 0; - continue; - } - var bytes = util.hexToBytes(ip[i]); - if(bytes.length < 2) { - b.putByte(0); + // 2. Let processResponseEndOfBody be the following steps: + const processResponseEndOfBody = () => { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true + + // If fetchParams’s process response end-of-body is not null, + // then queue a fetch task to run fetchParams’s process response + // end-of-body given response with fetchParams’s task destination. + if (fetchParams.processResponseEndOfBody != null) { + queueMicrotask(() => fetchParams.processResponseEndOfBody(response)) } - b.putBytes(bytes); } - return b.getBytes(); -}; -/** - * Converts 4-bytes into an IPv4 string representation or 16-bytes into - * an IPv6 string representation. The bytes must be in network order. - * - * @param bytes the bytes to convert. - * - * @return the IPv4 or IPv6 string representation if 4 or 16 bytes, - * respectively, are given, otherwise null. - */ -util.bytesToIP = function(bytes) { - if(bytes.length === 4) { - return util.bytesToIPv4(bytes); + // 3. If fetchParams’s process response is non-null, then queue a fetch task + // to run fetchParams’s process response given response, with fetchParams’s + // task destination. + if (fetchParams.processResponse != null) { + queueMicrotask(() => fetchParams.processResponse(response)) } - if(bytes.length === 16) { - return util.bytesToIPv6(bytes); - } - return null; -}; -/** - * Converts 4-bytes into an IPv4 string representation. The bytes must be - * in network order. - * - * @param bytes the bytes to convert. - * - * @return the IPv4 string representation or null for an invalid # of bytes. - */ -util.bytesToIPv4 = function(bytes) { - if(bytes.length !== 4) { - return null; - } - var ip = []; - for(var i = 0; i < bytes.length; ++i) { - ip.push(bytes.charCodeAt(i)); - } - return ip.join('.'); -}; + // 4. If response’s body is null, then run processResponseEndOfBody. + if (response.body == null) { + processResponseEndOfBody() + } else { + // 5. Otherwise: -/** - * Converts 16-bytes into an IPv16 string representation. The bytes must be - * in network order. - * - * @param bytes the bytes to convert. - * - * @return the IPv16 string representation or null for an invalid # of bytes. - */ -util.bytesToIPv6 = function(bytes) { - if(bytes.length !== 16) { - return null; - } - var ip = []; - var zeroGroups = []; - var zeroMaxGroup = 0; - for(var i = 0; i < bytes.length; i += 2) { - var hex = util.bytesToHex(bytes[i] + bytes[i + 1]); - // canonicalize zero representation - while(hex[0] === '0' && hex !== '0') { - hex = hex.substr(1); - } - if(hex === '0') { - var last = zeroGroups[zeroGroups.length - 1]; - var idx = ip.length; - if(!last || idx !== last.end + 1) { - zeroGroups.push({start: idx, end: idx}); - } else { - last.end = idx; - if((last.end - last.start) > - (zeroGroups[zeroMaxGroup].end - zeroGroups[zeroMaxGroup].start)) { - zeroMaxGroup = zeroGroups.length - 1; - } - } + // 1. Let transformStream be a new a TransformStream. + + // 2. Let identityTransformAlgorithm be an algorithm which, given chunk, + // enqueues chunk in transformStream. + const identityTransformAlgorithm = (chunk, controller) => { + controller.enqueue(chunk) } - ip.push(hex); - } - if(zeroGroups.length > 0) { - var group = zeroGroups[zeroMaxGroup]; - // only shorten group of length > 0 - if(group.end - group.start > 0) { - ip.splice(group.start, group.end - group.start + 1, ''); - if(group.start === 0) { - ip.unshift(''); + + // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm + // and flushAlgorithm set to processResponseEndOfBody. + const transformStream = new TransformStream({ + start () {}, + transform: identityTransformAlgorithm, + flush: processResponseEndOfBody + }, { + size () { + return 1 } - if(group.end === 7) { - ip.push(''); + }, { + size () { + return 1 } - } - } - return ip.join(':'); -}; + }) -/** - * Estimates the number of processes that can be run concurrently. If - * creating Web Workers, keep in mind that the main JavaScript process needs - * its own core. - * - * @param options the options to use: - * update true to force an update (not use the cached value). - * @param callback(err, max) called once the operation completes. - */ -util.estimateCores = function(options, callback) { - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; - if('cores' in util && !options.update) { - return callback(null, util.cores); - } - if(typeof navigator !== 'undefined' && - 'hardwareConcurrency' in navigator && - navigator.hardwareConcurrency > 0) { - util.cores = navigator.hardwareConcurrency; - return callback(null, util.cores); - } - if(typeof Worker === 'undefined') { - // workers not available - util.cores = 1; - return callback(null, util.cores); - } - if(typeof Blob === 'undefined') { - // can't estimate, default to 2 - util.cores = 2; - return callback(null, util.cores); - } - - // create worker concurrency estimation code as blob - var blobUrl = URL.createObjectURL(new Blob(['(', - function() { - self.addEventListener('message', function(e) { - // run worker for 4 ms - var st = Date.now(); - var et = st + 4; - while(Date.now() < et); - self.postMessage({st: st, et: et}); - }); - }.toString(), - ')()'], {type: 'application/javascript'})); - - // take 5 samples using 16 workers - sample([], 5, 16); - - function sample(max, samples, numWorkers) { - if(samples === 0) { - // get overlap average - var avg = Math.floor(max.reduce(function(avg, x) { - return avg + x; - }, 0) / max.length); - util.cores = Math.max(1, avg); - URL.revokeObjectURL(blobUrl); - return callback(null, util.cores); - } - map(numWorkers, function(err, results) { - max.push(reduce(numWorkers, results)); - sample(max, samples - 1, numWorkers); - }); + // 4. Set response’s body to the result of piping response’s body through transformStream. + response.body = { stream: response.body.stream.pipeThrough(transformStream) } } - function map(numWorkers, callback) { - var workers = []; - var results = []; - for(var i = 0; i < numWorkers; ++i) { - var worker = new Worker(blobUrl); - worker.addEventListener('message', function(e) { - results.push(e.data); - if(results.length === numWorkers) { - for(var i = 0; i < numWorkers; ++i) { - workers[i].terminate(); - } - callback(null, results); - } - }); - workers.push(worker); - } - for(var i = 0; i < numWorkers; ++i) { - workers[i].postMessage(i); - } - } + // 6. If fetchParams’s process response consume body is non-null, then: + if (fetchParams.processResponseConsumeBody != null) { + // 1. Let processBody given nullOrBytes be this step: run fetchParams’s + // process response consume body given response and nullOrBytes. + const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes) - function reduce(numWorkers, results) { - // find overlapping time windows - var overlaps = []; - for(var n = 0; n < numWorkers; ++n) { - var r1 = results[n]; - var overlap = overlaps[n] = []; - for(var i = 0; i < numWorkers; ++i) { - if(n === i) { - continue; - } - var r2 = results[i]; - if((r1.st > r2.st && r1.st < r2.et) || - (r2.st > r1.st && r2.st < r1.et)) { - overlap.push(i); - } - } + // 2. Let processBodyError be this step: run fetchParams’s process + // response consume body given response and failure. + const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure) + + // 3. If response’s body is null, then queue a fetch task to run processBody + // given null, with fetchParams’s task destination. + if (response.body == null) { + queueMicrotask(() => processBody(null)) + } else { + // 4. Otherwise, fully read response’s body given processBody, processBodyError, + // and fetchParams’s task destination. + return fullyReadBody(response.body, processBody, processBodyError) } - // get maximum overlaps ... don't include overlapping worker itself - // as the main JS process was also being scheduled during the work and - // would have to be subtracted from the estimate anyway - return overlaps.reduce(function(max, overlap) { - return Math.max(max, overlap.length); - }, 0); + return Promise.resolve() } -}; +} +// https://fetch.spec.whatwg.org/#http-fetch +async function httpFetch (fetchParams) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request -/***/ }), + // 2. Let response be null. + let response = null -/***/ 8180: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 3. Let actualResponse be null. + let actualResponse = null -/** - * Javascript implementation of X.509 and related components (such as - * Certification Signing Requests) of a Public Key Infrastructure. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - * - * The ASN.1 representation of an X.509v3 certificate is as follows - * (see RFC 2459): - * - * Certificate ::= SEQUENCE { - * tbsCertificate TBSCertificate, - * signatureAlgorithm AlgorithmIdentifier, - * signatureValue BIT STRING - * } - * - * TBSCertificate ::= SEQUENCE { - * version [0] EXPLICIT Version DEFAULT v1, - * serialNumber CertificateSerialNumber, - * signature AlgorithmIdentifier, - * issuer Name, - * validity Validity, - * subject Name, - * subjectPublicKeyInfo SubjectPublicKeyInfo, - * issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL, - * -- If present, version shall be v2 or v3 - * subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL, - * -- If present, version shall be v2 or v3 - * extensions [3] EXPLICIT Extensions OPTIONAL - * -- If present, version shall be v3 - * } - * - * Version ::= INTEGER { v1(0), v2(1), v3(2) } - * - * CertificateSerialNumber ::= INTEGER - * - * Name ::= CHOICE { - * // only one possible choice for now - * RDNSequence - * } - * - * RDNSequence ::= SEQUENCE OF RelativeDistinguishedName - * - * RelativeDistinguishedName ::= SET OF AttributeTypeAndValue - * - * AttributeTypeAndValue ::= SEQUENCE { - * type AttributeType, - * value AttributeValue - * } - * AttributeType ::= OBJECT IDENTIFIER - * AttributeValue ::= ANY DEFINED BY AttributeType - * - * Validity ::= SEQUENCE { - * notBefore Time, - * notAfter Time - * } - * - * Time ::= CHOICE { - * utcTime UTCTime, - * generalTime GeneralizedTime - * } - * - * UniqueIdentifier ::= BIT STRING - * - * SubjectPublicKeyInfo ::= SEQUENCE { - * algorithm AlgorithmIdentifier, - * subjectPublicKey BIT STRING - * } - * - * Extensions ::= SEQUENCE SIZE (1..MAX) OF Extension - * - * Extension ::= SEQUENCE { - * extnID OBJECT IDENTIFIER, - * critical BOOLEAN DEFAULT FALSE, - * extnValue OCTET STRING - * } - * - * The only key algorithm currently supported for PKI is RSA. - * - * RSASSA-PSS signatures are described in RFC 3447 and RFC 4055. - * - * PKCS#10 v1.7 describes certificate signing requests: - * - * CertificationRequestInfo: - * - * CertificationRequestInfo ::= SEQUENCE { - * version INTEGER { v1(0) } (v1,...), - * subject Name, - * subjectPKInfo SubjectPublicKeyInfo{{ PKInfoAlgorithms }}, - * attributes [0] Attributes{{ CRIAttributes }} - * } - * - * Attributes { ATTRIBUTE:IOSet } ::= SET OF Attribute{{ IOSet }} - * - * CRIAttributes ATTRIBUTE ::= { - * ... -- add any locally defined attributes here -- } - * - * Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE { - * type ATTRIBUTE.&id({IOSet}), - * values SET SIZE(1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type}) - * } - * - * CertificationRequest ::= SEQUENCE { - * certificationRequestInfo CertificationRequestInfo, - * signatureAlgorithm AlgorithmIdentifier{{ SignatureAlgorithms }}, - * signature BIT STRING - * } - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(9549); -__nccwpck_require__(7157); -__nccwpck_require__(6231); -__nccwpck_require__(7973); -__nccwpck_require__(1925); -__nccwpck_require__(154); -__nccwpck_require__(4376); -__nccwpck_require__(3921); -__nccwpck_require__(8339); - -// shortcut for asn.1 API -var asn1 = forge.asn1; - -/* Public Key Infrastructure (PKI) implementation. */ -var pki = module.exports = forge.pki = forge.pki || {}; -var oids = pki.oids; - -// short name OID mappings -var _shortNames = {}; -_shortNames['CN'] = oids['commonName']; -_shortNames['commonName'] = 'CN'; -_shortNames['C'] = oids['countryName']; -_shortNames['countryName'] = 'C'; -_shortNames['L'] = oids['localityName']; -_shortNames['localityName'] = 'L'; -_shortNames['ST'] = oids['stateOrProvinceName']; -_shortNames['stateOrProvinceName'] = 'ST'; -_shortNames['O'] = oids['organizationName']; -_shortNames['organizationName'] = 'O'; -_shortNames['OU'] = oids['organizationalUnitName']; -_shortNames['organizationalUnitName'] = 'OU'; -_shortNames['E'] = oids['emailAddress']; -_shortNames['emailAddress'] = 'E'; - -// validator for an SubjectPublicKeyInfo structure -// Note: Currently only works with an RSA public key -var publicKeyValidator = forge.pki.rsa.publicKeyValidator; - -// validator for an X.509v3 certificate -var x509CertificateValidator = { - name: 'Certificate', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'Certificate.TBSCertificate', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'tbsCertificate', - value: [{ - name: 'Certificate.TBSCertificate.version', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - constructed: true, - optional: true, - value: [{ - name: 'Certificate.TBSCertificate.version.integer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'certVersion' - }] - }, { - name: 'Certificate.TBSCertificate.serialNumber', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'certSerialNumber' - }, { - name: 'Certificate.TBSCertificate.signature', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'Certificate.TBSCertificate.signature.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'certinfoSignatureOid' - }, { - name: 'Certificate.TBSCertificate.signature.parameters', - tagClass: asn1.Class.UNIVERSAL, - optional: true, - captureAsn1: 'certinfoSignatureParams' - }] - }, { - name: 'Certificate.TBSCertificate.issuer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'certIssuer' - }, { - name: 'Certificate.TBSCertificate.validity', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - // Note: UTC and generalized times may both appear so the capture - // names are based on their detected order, the names used below - // are only for the common case, which validity time really means - // "notBefore" and which means "notAfter" will be determined by order - value: [{ - // notBefore (Time) (UTC time case) - name: 'Certificate.TBSCertificate.validity.notBefore (utc)', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.UTCTIME, - constructed: false, - optional: true, - capture: 'certValidity1UTCTime' - }, { - // notBefore (Time) (generalized time case) - name: 'Certificate.TBSCertificate.validity.notBefore (generalized)', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.GENERALIZEDTIME, - constructed: false, - optional: true, - capture: 'certValidity2GeneralizedTime' - }, { - // notAfter (Time) (only UTC time is supported) - name: 'Certificate.TBSCertificate.validity.notAfter (utc)', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.UTCTIME, - constructed: false, - optional: true, - capture: 'certValidity3UTCTime' - }, { - // notAfter (Time) (only UTC time is supported) - name: 'Certificate.TBSCertificate.validity.notAfter (generalized)', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.GENERALIZEDTIME, - constructed: false, - optional: true, - capture: 'certValidity4GeneralizedTime' - }] - }, { - // Name (subject) (RDNSequence) - name: 'Certificate.TBSCertificate.subject', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'certSubject' - }, - // SubjectPublicKeyInfo - publicKeyValidator, - { - // issuerUniqueID (optional) - name: 'Certificate.TBSCertificate.issuerUniqueID', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 1, - constructed: true, - optional: true, - value: [{ - name: 'Certificate.TBSCertificate.issuerUniqueID.id', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - // TODO: support arbitrary bit length ids - captureBitStringValue: 'certIssuerUniqueId' - }] - }, { - // subjectUniqueID (optional) - name: 'Certificate.TBSCertificate.subjectUniqueID', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 2, - constructed: true, - optional: true, - value: [{ - name: 'Certificate.TBSCertificate.subjectUniqueID.id', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - // TODO: support arbitrary bit length ids - captureBitStringValue: 'certSubjectUniqueId' - }] - }, { - // Extensions (optional) - name: 'Certificate.TBSCertificate.extensions', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 3, - constructed: true, - captureAsn1: 'certExtensions', - optional: true - }] - }, { - // AlgorithmIdentifier (signature algorithm) - name: 'Certificate.signatureAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // algorithm - name: 'Certificate.signatureAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'certSignatureOid' - }, { - name: 'Certificate.TBSCertificate.signature.parameters', - tagClass: asn1.Class.UNIVERSAL, - optional: true, - captureAsn1: 'certSignatureParams' - }] - }, { - // SignatureValue - name: 'Certificate.signatureValue', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - captureBitStringValue: 'certSignature' - }] -}; + // 4. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo -var rsassaPssParameterValidator = { - name: 'rsapss', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'rsapss.hashAlgorithm', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - constructed: true, - value: [{ - name: 'rsapss.hashAlgorithm.AlgorithmIdentifier', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Class.SEQUENCE, - constructed: true, - optional: true, - value: [{ - name: 'rsapss.hashAlgorithm.AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'hashOid' - /* parameter block omitted, for SHA1 NULL anyhow. */ - }] - }] - }, { - name: 'rsapss.maskGenAlgorithm', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 1, - constructed: true, - value: [{ - name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Class.SEQUENCE, - constructed: true, - optional: true, - value: [{ - name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'maskGenOid' - }, { - name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'maskGenHashOid' - /* parameter block omitted, for SHA1 NULL anyhow. */ - }] - }] - }] - }, { - name: 'rsapss.saltLength', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 2, - optional: true, - value: [{ - name: 'rsapss.saltLength.saltLength', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Class.INTEGER, - constructed: false, - capture: 'saltLength' - }] - }, { - name: 'rsapss.trailerField', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 3, - optional: true, - value: [{ - name: 'rsapss.trailer.trailer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Class.INTEGER, - constructed: false, - capture: 'trailer' - }] - }] -}; + // 5. If request’s service-workers mode is "all", then: + if (request.serviceWorkers === 'all') { + // TODO + } -// validator for a CertificationRequestInfo structure -var certificationRequestInfoValidator = { - name: 'CertificationRequestInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'certificationRequestInfo', - value: [{ - name: 'CertificationRequestInfo.integer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'certificationRequestInfoVersion' - }, { - // Name (subject) (RDNSequence) - name: 'CertificationRequestInfo.subject', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'certificationRequestInfoSubject' - }, - // SubjectPublicKeyInfo - publicKeyValidator, - { - name: 'CertificationRequestInfo.attributes', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - constructed: true, - optional: true, - capture: 'certificationRequestInfoAttributes', - value: [{ - name: 'CertificationRequestInfo.attributes', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'CertificationRequestInfo.attributes.type', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false - }, { - name: 'CertificationRequestInfo.attributes.value', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - constructed: true - }] - }] - }] -}; + // 6. If response is null, then: + if (response === null) { + // 1. If makeCORSPreflight is true and one of these conditions is true: + // TODO -// validator for a CertificationRequest structure -var certificationRequestValidator = { - name: 'CertificationRequest', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'csr', - value: [ - certificationRequestInfoValidator, { - // AlgorithmIdentifier (signature algorithm) - name: 'CertificationRequest.signatureAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // algorithm - name: 'CertificationRequest.signatureAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'csrSignatureOid' - }, { - name: 'CertificationRequest.signatureAlgorithm.parameters', - tagClass: asn1.Class.UNIVERSAL, - optional: true, - captureAsn1: 'csrSignatureParams' - }] - }, { - // signature - name: 'CertificationRequest.signature', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - captureBitStringValue: 'csrSignature' + // 2. If request’s redirect mode is "follow", then set request’s + // service-workers mode to "none". + if (request.redirect === 'follow') { + request.serviceWorkers = 'none' } - ] -}; -/** - * Converts an RDNSequence of ASN.1 DER-encoded RelativeDistinguishedName - * sets into an array with objects that have type and value properties. - * - * @param rdn the RDNSequence to convert. - * @param md a message digest to append type and value to if provided. - */ -pki.RDNAttributesAsArray = function(rdn, md) { - var rval = []; - - // each value in 'rdn' in is a SET of RelativeDistinguishedName - var set, attr, obj; - for(var si = 0; si < rdn.value.length; ++si) { - // get the RelativeDistinguishedName set - set = rdn.value[si]; - - // each value in the SET is an AttributeTypeAndValue sequence - // containing first a type (an OID) and second a value (defined by - // the OID) - for(var i = 0; i < set.value.length; ++i) { - obj = {}; - attr = set.value[i]; - obj.type = asn1.derToOid(attr.value[0].value); - obj.value = attr.value[1].value; - obj.valueTagClass = attr.value[1].type; - // if the OID is known, get its name and short name - if(obj.type in oids) { - obj.name = oids[obj.type]; - if(obj.name in _shortNames) { - obj.shortName = _shortNames[obj.name]; - } - } - if(md) { - md.update(obj.type); - md.update(obj.value); - } - rval.push(obj); + // 3. Set response and actualResponse to the result of running + // HTTP-network-or-cache fetch given fetchParams. + actualResponse = response = await httpNetworkOrCacheFetch(fetchParams) + + // 4. If request’s response tainting is "cors" and a CORS check + // for request and response returns failure, then return a network error. + if ( + request.responseTainting === 'cors' && + corsCheck(request, response) === 'failure' + ) { + return makeNetworkError('cors failure') + } + + // 5. If the TAO check for request and response returns failure, then set + // request’s timing allow failed flag. + if (TAOCheck(request, response) === 'failure') { + request.timingAllowFailed = true + } + } + + // 7. If either request’s response tainting or response’s type + // is "opaque", and the cross-origin resource policy check with + // request’s origin, request’s client, request’s destination, + // and actualResponse returns blocked, then return a network error. + if ( + (request.responseTainting === 'opaque' || response.type === 'opaque') && + crossOriginResourcePolicyCheck( + request.origin, + request.client, + request.destination, + actualResponse + ) === 'blocked' + ) { + return makeNetworkError('blocked') + } + + // 8. If actualResponse’s status is a redirect status, then: + if (redirectStatusSet.has(actualResponse.status)) { + // 1. If actualResponse’s status is not 303, request’s body is not null, + // and the connection uses HTTP/2, then user agents may, and are even + // encouraged to, transmit an RST_STREAM frame. + // See, https://github.com/whatwg/fetch/issues/1288 + if (request.redirect !== 'manual') { + fetchParams.controller.connection.destroy() + } + + // 2. Switch on request’s redirect mode: + if (request.redirect === 'error') { + // Set response to a network error. + response = makeNetworkError('unexpected redirect') + } else if (request.redirect === 'manual') { + // Set response to an opaque-redirect filtered response whose internal + // response is actualResponse. + // NOTE(spec): On the web this would return an `opaqueredirect` response, + // but that doesn't make sense server side. + // See https://github.com/nodejs/undici/issues/1193. + response = actualResponse + } else if (request.redirect === 'follow') { + // Set response to the result of running HTTP-redirect fetch given + // fetchParams and response. + response = await httpRedirectFetch(fetchParams, response) + } else { + assert(false) } } - return rval; -}; + // 9. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo -/** - * Converts ASN.1 CRIAttributes into an array with objects that have type and - * value properties. - * - * @param attributes the CRIAttributes to convert. - */ -pki.CRIAttributesAsArray = function(attributes) { - var rval = []; - - // each value in 'attributes' in is a SEQUENCE with an OID and a SET - for(var si = 0; si < attributes.length; ++si) { - // get the attribute sequence - var seq = attributes[si]; - - // each value in the SEQUENCE containing first a type (an OID) and - // second a set of values (defined by the OID) - var type = asn1.derToOid(seq.value[0].value); - var values = seq.value[1].value; - for(var vi = 0; vi < values.length; ++vi) { - var obj = {}; - obj.type = type; - obj.value = values[vi].value; - obj.valueTagClass = values[vi].type; - // if the OID is known, get its name and short name - if(obj.type in oids) { - obj.name = oids[obj.type]; - if(obj.name in _shortNames) { - obj.shortName = _shortNames[obj.name]; - } - } - // parse extensions - if(obj.type === oids.extensionRequest) { - obj.extensions = []; - for(var ei = 0; ei < obj.value.length; ++ei) { - obj.extensions.push(pki.certificateExtensionFromAsn1(obj.value[ei])); - } - } - rval.push(obj); + // 10. Return response. + return response +} + +// https://fetch.spec.whatwg.org/#http-redirect-fetch +function httpRedirectFetch (fetchParams, response) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let actualResponse be response, if response is not a filtered response, + // and response’s internal response otherwise. + const actualResponse = response.internalResponse + ? response.internalResponse + : response + + // 3. Let locationURL be actualResponse’s location URL given request’s current + // URL’s fragment. + let locationURL + + try { + locationURL = responseLocationURL( + actualResponse, + requestCurrentURL(request).hash + ) + + // 4. If locationURL is null, then return response. + if (locationURL == null) { + return response } + } catch (err) { + // 5. If locationURL is failure, then return a network error. + return Promise.resolve(makeNetworkError(err)) } - return rval; -}; - -/** - * Gets an issuer or subject attribute from its name, type, or short name. - * - * @param obj the issuer or subject object. - * @param options a short name string or an object with: - * shortName the short name for the attribute. - * name the name for the attribute. - * type the type for the attribute. - * - * @return the attribute. - */ -function _getAttribute(obj, options) { - if(typeof options === 'string') { - options = {shortName: options}; + // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network + // error. + if (!urlIsHttpHttpsScheme(locationURL)) { + return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme')) } - var rval = null; - var attr; - for(var i = 0; rval === null && i < obj.attributes.length; ++i) { - attr = obj.attributes[i]; - if(options.type && options.type === attr.type) { - rval = attr; - } else if(options.name && options.name === attr.name) { - rval = attr; - } else if(options.shortName && options.shortName === attr.shortName) { - rval = attr; - } + // 7. If request’s redirect count is 20, then return a network error. + if (request.redirectCount === 20) { + return Promise.resolve(makeNetworkError('redirect count exceeded')) } - return rval; -} -/** - * Converts signature parameters from ASN.1 structure. - * - * Currently only RSASSA-PSS supported. The PKCS#1 v1.5 signature scheme had - * no parameters. - * - * RSASSA-PSS-params ::= SEQUENCE { - * hashAlgorithm [0] HashAlgorithm DEFAULT - * sha1Identifier, - * maskGenAlgorithm [1] MaskGenAlgorithm DEFAULT - * mgf1SHA1Identifier, - * saltLength [2] INTEGER DEFAULT 20, - * trailerField [3] INTEGER DEFAULT 1 - * } - * - * HashAlgorithm ::= AlgorithmIdentifier - * - * MaskGenAlgorithm ::= AlgorithmIdentifier - * - * AlgorithmIdentifer ::= SEQUENCE { - * algorithm OBJECT IDENTIFIER, - * parameters ANY DEFINED BY algorithm OPTIONAL - * } - * - * @param oid The OID specifying the signature algorithm - * @param obj The ASN.1 structure holding the parameters - * @param fillDefaults Whether to use return default values where omitted - * @return signature parameter object - */ -var _readSignatureParameters = function(oid, obj, fillDefaults) { - var params = {}; + // 8. Increase request’s redirect count by 1. + request.redirectCount += 1 - if(oid !== oids['RSASSA-PSS']) { - return params; + // 9. If request’s mode is "cors", locationURL includes credentials, and + // request’s origin is not same origin with locationURL’s origin, then return + // a network error. + if ( + request.mode === 'cors' && + (locationURL.username || locationURL.password) && + !sameOrigin(request, locationURL) + ) { + return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')) } - if(fillDefaults) { - params = { - hash: { - algorithmOid: oids['sha1'] - }, - mgf: { - algorithmOid: oids['mgf1'], - hash: { - algorithmOid: oids['sha1'] - } - }, - saltLength: 20 - }; + // 10. If request’s response tainting is "cors" and locationURL includes + // credentials, then return a network error. + if ( + request.responseTainting === 'cors' && + (locationURL.username || locationURL.password) + ) { + return Promise.resolve(makeNetworkError( + 'URL cannot contain credentials for request mode "cors"' + )) } - var capture = {}; - var errors = []; - if(!asn1.validate(obj, rsassaPssParameterValidator, capture, errors)) { - var error = new Error('Cannot read RSASSA-PSS parameter block.'); - error.errors = errors; - throw error; + // 11. If actualResponse’s status is not 303, request’s body is non-null, + // and request’s body’s source is null, then return a network error. + if ( + actualResponse.status !== 303 && + request.body != null && + request.body.source == null + ) { + return Promise.resolve(makeNetworkError()) } - if(capture.hashOid !== undefined) { - params.hash = params.hash || {}; - params.hash.algorithmOid = asn1.derToOid(capture.hashOid); + // 12. If one of the following is true + // - actualResponse’s status is 301 or 302 and request’s method is `POST` + // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD` + if ( + ([301, 302].includes(actualResponse.status) && request.method === 'POST') || + (actualResponse.status === 303 && + !GET_OR_HEAD.includes(request.method)) + ) { + // then: + // 1. Set request’s method to `GET` and request’s body to null. + request.method = 'GET' + request.body = null + + // 2. For each headerName of request-body-header name, delete headerName from + // request’s header list. + for (const headerName of requestBodyHeader) { + request.headersList.delete(headerName) + } } - if(capture.maskGenOid !== undefined) { - params.mgf = params.mgf || {}; - params.mgf.algorithmOid = asn1.derToOid(capture.maskGenOid); - params.mgf.hash = params.mgf.hash || {}; - params.mgf.hash.algorithmOid = asn1.derToOid(capture.maskGenHashOid); + // 13. If request’s current URL’s origin is not same origin with locationURL’s + // origin, then for each headerName of CORS non-wildcard request-header name, + // delete headerName from request’s header list. + if (!sameOrigin(requestCurrentURL(request), locationURL)) { + // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name + request.headersList.delete('authorization') + + // https://fetch.spec.whatwg.org/#authentication-entries + request.headersList.delete('proxy-authorization', true) + + // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. + request.headersList.delete('cookie') + request.headersList.delete('host') } - if(capture.saltLength !== undefined) { - params.saltLength = capture.saltLength.charCodeAt(0); + // 14. If request’s body is non-null, then set request’s body to the first return + // value of safely extracting request’s body’s source. + if (request.body != null) { + assert(request.body.source != null) + request.body = safelyExtractBody(request.body.source)[0] } - return params; -}; + // 15. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo -/** - * Create signature digest for OID. - * - * @param options - * signatureOid: the OID specifying the signature algorithm. - * type: a human readable type for error messages - * @return a created md instance. throws if unknown oid. - */ -var _createSignatureDigest = function(options) { - switch(oids[options.signatureOid]) { - case 'sha1WithRSAEncryption': - // deprecated alias - case 'sha1WithRSASignature': - return forge.md.sha1.create(); - case 'md5WithRSAEncryption': - return forge.md.md5.create(); - case 'sha256WithRSAEncryption': - return forge.md.sha256.create(); - case 'sha384WithRSAEncryption': - return forge.md.sha384.create(); - case 'sha512WithRSAEncryption': - return forge.md.sha512.create(); - case 'RSASSA-PSS': - return forge.md.sha256.create(); - default: - var error = new Error( - 'Could not compute ' + options.type + ' digest. ' + - 'Unknown signature OID.'); - error.signatureOid = options.signatureOid; - throw error; + // 16. Set timingInfo’s redirect end time and post-redirect start time to the + // coarsened shared current time given fetchParams’s cross-origin isolated + // capability. + timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = + coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability) + + // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s + // redirect start time to timingInfo’s start time. + if (timingInfo.redirectStartTime === 0) { + timingInfo.redirectStartTime = timingInfo.startTime } -}; -/** - * Verify signature on certificate or CSR. - * - * @param options: - * certificate the certificate or CSR to verify. - * md the signature digest. - * signature the signature - * @return a created md instance. throws if unknown oid. - */ -var _verifySignature = function(options) { - var cert = options.certificate; - var scheme; - - switch(cert.signatureOid) { - case oids.sha1WithRSAEncryption: - // deprecated alias - case oids.sha1WithRSASignature: - /* use PKCS#1 v1.5 padding scheme */ - break; - case oids['RSASSA-PSS']: - var hash, mgf; - - /* initialize mgf */ - hash = oids[cert.signatureParameters.mgf.hash.algorithmOid]; - if(hash === undefined || forge.md[hash] === undefined) { - var error = new Error('Unsupported MGF hash function.'); - error.oid = cert.signatureParameters.mgf.hash.algorithmOid; - error.name = hash; - throw error; - } + // 18. Append locationURL to request’s URL list. + request.urlList.push(locationURL) - mgf = oids[cert.signatureParameters.mgf.algorithmOid]; - if(mgf === undefined || forge.mgf[mgf] === undefined) { - var error = new Error('Unsupported MGF function.'); - error.oid = cert.signatureParameters.mgf.algorithmOid; - error.name = mgf; - throw error; - } + // 19. Invoke set request’s referrer policy on redirect on request and + // actualResponse. + setRequestReferrerPolicyOnRedirect(request, actualResponse) - mgf = forge.mgf[mgf].create(forge.md[hash].create()); + // 20. Return the result of running main fetch given fetchParams and true. + return mainFetch(fetchParams, true) +} + +// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch +async function httpNetworkOrCacheFetch ( + fetchParams, + isAuthenticationFetch = false, + isNewConnectionFetch = false +) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request - /* initialize hash function */ - hash = oids[cert.signatureParameters.hash.algorithmOid]; - if(hash === undefined || forge.md[hash] === undefined) { - var error = new Error('Unsupported RSASSA-PSS hash function.'); - error.oid = cert.signatureParameters.hash.algorithmOid; - error.name = hash; - throw error; - } + // 2. Let httpFetchParams be null. + let httpFetchParams = null - scheme = forge.pss.create( - forge.md[hash].create(), mgf, cert.signatureParameters.saltLength - ); - break; - } + // 3. Let httpRequest be null. + let httpRequest = null - // verify signature on cert using public key - return cert.publicKey.verify( - options.md.digest().getBytes(), options.signature, scheme - ); -}; + // 4. Let response be null. + let response = null -/** - * Converts an X.509 certificate from PEM format. - * - * Note: If the certificate is to be verified then compute hash should - * be set to true. This will scan the TBSCertificate part of the ASN.1 - * object while it is converted so it doesn't need to be converted back - * to ASN.1-DER-encoding later. - * - * @param pem the PEM-formatted certificate. - * @param computeHash true to compute the hash for verification. - * @param strict true to be strict when checking ASN.1 value lengths, false to - * allow truncated values (default: true). - * - * @return the certificate. - */ -pki.certificateFromPem = function(pem, computeHash, strict) { - var msg = forge.pem.decode(pem)[0]; - - if(msg.type !== 'CERTIFICATE' && - msg.type !== 'X509 CERTIFICATE' && - msg.type !== 'TRUSTED CERTIFICATE') { - var error = new Error( - 'Could not convert certificate from PEM; PEM header type ' + - 'is not "CERTIFICATE", "X509 CERTIFICATE", or "TRUSTED CERTIFICATE".'); - error.headerType = msg.type; - throw error; - } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error( - 'Could not convert certificate from PEM; PEM is encrypted.'); - } + // 5. Let storedResponse be null. + // TODO: cache - // convert DER to ASN.1 object - var obj = asn1.fromDer(msg.body, strict); + // 6. Let httpCache be null. + const httpCache = null - return pki.certificateFromAsn1(obj, computeHash); -}; + // 7. Let the revalidatingFlag be unset. + const revalidatingFlag = false -/** - * Converts an X.509 certificate to PEM format. - * - * @param cert the certificate. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted certificate. - */ -pki.certificateToPem = function(cert, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var msg = { - type: 'CERTIFICATE', - body: asn1.toDer(pki.certificateToAsn1(cert)).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + // 8. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. If request’s window is "no-window" and request’s redirect mode is + // "error", then set httpFetchParams to fetchParams and httpRequest to + // request. + if (request.window === 'no-window' && request.redirect === 'error') { + httpFetchParams = fetchParams + httpRequest = request + } else { + // Otherwise: + + // 1. Set httpRequest to a clone of request. + httpRequest = makeRequest(request) + + // 2. Set httpFetchParams to a copy of fetchParams. + httpFetchParams = { ...fetchParams } + + // 3. Set httpFetchParams’s request to httpRequest. + httpFetchParams.request = httpRequest + } + + // 3. Let includeCredentials be true if one of + const includeCredentials = + request.credentials === 'include' || + (request.credentials === 'same-origin' && + request.responseTainting === 'basic') -/** - * Converts an RSA public key from PEM format. - * - * @param pem the PEM-formatted public key. - * - * @return the public key. - */ -pki.publicKeyFromPem = function(pem) { - var msg = forge.pem.decode(pem)[0]; + // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s + // body is non-null; otherwise null. + const contentLength = httpRequest.body ? httpRequest.body.length : null - if(msg.type !== 'PUBLIC KEY' && msg.type !== 'RSA PUBLIC KEY') { - var error = new Error('Could not convert public key from PEM; PEM header ' + - 'type is not "PUBLIC KEY" or "RSA PUBLIC KEY".'); - error.headerType = msg.type; - throw error; + // 5. Let contentLengthHeaderValue be null. + let contentLengthHeaderValue = null + + // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or + // `PUT`, then set contentLengthHeaderValue to `0`. + if ( + httpRequest.body == null && + ['POST', 'PUT'].includes(httpRequest.method) + ) { + contentLengthHeaderValue = '0' } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert public key from PEM; PEM is encrypted.'); + + // 7. If contentLength is non-null, then set contentLengthHeaderValue to + // contentLength, serialized and isomorphic encoded. + if (contentLength != null) { + contentLengthHeaderValue = isomorphicEncode(`${contentLength}`) } - // convert DER to ASN.1 object - var obj = asn1.fromDer(msg.body); + // 8. If contentLengthHeaderValue is non-null, then append + // `Content-Length`/contentLengthHeaderValue to httpRequest’s header + // list. + if (contentLengthHeaderValue != null) { + httpRequest.headersList.append('content-length', contentLengthHeaderValue) + } - return pki.publicKeyFromAsn1(obj); -}; + // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`, + // contentLengthHeaderValue) to httpRequest’s header list. -/** - * Converts an RSA public key to PEM format (using a SubjectPublicKeyInfo). - * - * @param key the public key. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted public key. - */ -pki.publicKeyToPem = function(key, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var msg = { - type: 'PUBLIC KEY', - body: asn1.toDer(pki.publicKeyToAsn1(key)).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + // 10. If contentLength is non-null and httpRequest’s keepalive is true, + // then: + if (contentLength != null && httpRequest.keepalive) { + // NOTE: keepalive is a noop outside of browser context. + } -/** - * Converts an RSA public key to PEM format (using an RSAPublicKey). - * - * @param key the public key. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted public key. - */ -pki.publicKeyToRSAPublicKeyPem = function(key, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var msg = { - type: 'RSA PUBLIC KEY', - body: asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + // 11. If httpRequest’s referrer is a URL, then append + // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded, + // to httpRequest’s header list. + if (httpRequest.referrer instanceof URL) { + httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href)) + } -/** - * Gets a fingerprint for the given public key. - * - * @param options the options to use. - * [md] the message digest object to use (defaults to forge.md.sha1). - * [type] the type of fingerprint, such as 'RSAPublicKey', - * 'SubjectPublicKeyInfo' (defaults to 'RSAPublicKey'). - * [encoding] an alternative output encoding, such as 'hex' - * (defaults to none, outputs a byte buffer). - * [delimiter] the delimiter to use between bytes for 'hex' encoded - * output, eg: ':' (defaults to none). - * - * @return the fingerprint as a byte buffer or other encoding based on options. - */ -pki.getPublicKeyFingerprint = function(key, options) { - options = options || {}; - var md = options.md || forge.md.sha1.create(); - var type = options.type || 'RSAPublicKey'; + // 12. Append a request `Origin` header for httpRequest. + appendRequestOriginHeader(httpRequest) - var bytes; - switch(type) { - case 'RSAPublicKey': - bytes = asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes(); - break; - case 'SubjectPublicKeyInfo': - bytes = asn1.toDer(pki.publicKeyToAsn1(key)).getBytes(); - break; - default: - throw new Error('Unknown fingerprint type "' + options.type + '".'); + // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA] + appendFetchMetadata(httpRequest) + + // 14. If httpRequest’s header list does not contain `User-Agent`, then + // user agents should append `User-Agent`/default `User-Agent` value to + // httpRequest’s header list. + if (!httpRequest.headersList.contains('user-agent')) { + httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node') } - // hash public key bytes - md.start(); - md.update(bytes); - var digest = md.digest(); - if(options.encoding === 'hex') { - var hex = digest.toHex(); - if(options.delimiter) { - return hex.match(/.{2}/g).join(options.delimiter); - } - return hex; - } else if(options.encoding === 'binary') { - return digest.getBytes(); - } else if(options.encoding) { - throw new Error('Unknown encoding "' + options.encoding + '".'); + // 15. If httpRequest’s cache mode is "default" and httpRequest’s header + // list contains `If-Modified-Since`, `If-None-Match`, + // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set + // httpRequest’s cache mode to "no-store". + if ( + httpRequest.cache === 'default' && + (httpRequest.headersList.contains('if-modified-since') || + httpRequest.headersList.contains('if-none-match') || + httpRequest.headersList.contains('if-unmodified-since') || + httpRequest.headersList.contains('if-match') || + httpRequest.headersList.contains('if-range')) + ) { + httpRequest.cache = 'no-store' } - return digest; -}; -/** - * Converts a PKCS#10 certification request (CSR) from PEM format. - * - * Note: If the certification request is to be verified then compute hash - * should be set to true. This will scan the CertificationRequestInfo part of - * the ASN.1 object while it is converted so it doesn't need to be converted - * back to ASN.1-DER-encoding later. - * - * @param pem the PEM-formatted certificate. - * @param computeHash true to compute the hash for verification. - * @param strict true to be strict when checking ASN.1 value lengths, false to - * allow truncated values (default: true). - * - * @return the certification request (CSR). - */ -pki.certificationRequestFromPem = function(pem, computeHash, strict) { - var msg = forge.pem.decode(pem)[0]; + // 16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent + // no-cache cache-control header modification flag is unset, and + // httpRequest’s header list does not contain `Cache-Control`, then append + // `Cache-Control`/`max-age=0` to httpRequest’s header list. + if ( + httpRequest.cache === 'no-cache' && + !httpRequest.preventNoCacheCacheControlHeaderModification && + !httpRequest.headersList.contains('cache-control') + ) { + httpRequest.headersList.append('cache-control', 'max-age=0') + } + + // 17. If httpRequest’s cache mode is "no-store" or "reload", then: + if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') { + // 1. If httpRequest’s header list does not contain `Pragma`, then append + // `Pragma`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('pragma')) { + httpRequest.headersList.append('pragma', 'no-cache') + } - if(msg.type !== 'CERTIFICATE REQUEST') { - var error = new Error('Could not convert certification request from PEM; ' + - 'PEM header type is not "CERTIFICATE REQUEST".'); - error.headerType = msg.type; - throw error; + // 2. If httpRequest’s header list does not contain `Cache-Control`, + // then append `Cache-Control`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('cache-control')) { + httpRequest.headersList.append('cache-control', 'no-cache') + } } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert certification request from PEM; ' + - 'PEM is encrypted.'); + + // 18. If httpRequest’s header list contains `Range`, then append + // `Accept-Encoding`/`identity` to httpRequest’s header list. + if (httpRequest.headersList.contains('range')) { + httpRequest.headersList.append('accept-encoding', 'identity') } - // convert DER to ASN.1 object - var obj = asn1.fromDer(msg.body, strict); + // 19. Modify httpRequest’s header list per HTTP. Do not append a given + // header if httpRequest’s header list contains that header’s name. + // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129 + if (!httpRequest.headersList.contains('accept-encoding')) { + if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) { + httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate') + } else { + httpRequest.headersList.append('accept-encoding', 'gzip, deflate') + } + } - return pki.certificationRequestFromAsn1(obj, computeHash); -}; + httpRequest.headersList.delete('host') -/** - * Converts a PKCS#10 certification request (CSR) to PEM format. - * - * @param csr the certification request. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted certification request. - */ -pki.certificationRequestToPem = function(csr, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var msg = { - type: 'CERTIFICATE REQUEST', - body: asn1.toDer(pki.certificationRequestToAsn1(csr)).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + // 20. If includeCredentials is true, then: + if (includeCredentials) { + // 1. If the user agent is not configured to block cookies for httpRequest + // (see section 7 of [COOKIES]), then: + // TODO: credentials + // 2. If httpRequest’s header list does not contain `Authorization`, then: + // TODO: credentials + } -/** - * Creates an empty X.509v3 RSA certificate. - * - * @return the certificate. - */ -pki.createCertificate = function() { - var cert = {}; - cert.version = 0x02; - cert.serialNumber = '00'; - cert.signatureOid = null; - cert.signature = null; - cert.siginfo = {}; - cert.siginfo.algorithmOid = null; - cert.validity = {}; - cert.validity.notBefore = new Date(); - cert.validity.notAfter = new Date(); - - cert.issuer = {}; - cert.issuer.getField = function(sn) { - return _getAttribute(cert.issuer, sn); - }; - cert.issuer.addField = function(attr) { - _fillMissingFields([attr]); - cert.issuer.attributes.push(attr); - }; - cert.issuer.attributes = []; - cert.issuer.hash = null; + // 21. If there’s a proxy-authentication entry, use it as appropriate. + // TODO: proxy-authentication - cert.subject = {}; - cert.subject.getField = function(sn) { - return _getAttribute(cert.subject, sn); - }; - cert.subject.addField = function(attr) { - _fillMissingFields([attr]); - cert.subject.attributes.push(attr); - }; - cert.subject.attributes = []; - cert.subject.hash = null; + // 22. Set httpCache to the result of determining the HTTP cache + // partition, given httpRequest. + // TODO: cache - cert.extensions = []; - cert.publicKey = null; - cert.md = null; + // 23. If httpCache is null, then set httpRequest’s cache mode to + // "no-store". + if (httpCache == null) { + httpRequest.cache = 'no-store' + } - /** - * Sets the subject of this certificate. - * - * @param attrs the array of subject attributes to use. - * @param uniqueId an optional a unique ID to use. - */ - cert.setSubject = function(attrs, uniqueId) { - // set new attributes, clear hash - _fillMissingFields(attrs); - cert.subject.attributes = attrs; - delete cert.subject.uniqueId; - if(uniqueId) { - // TODO: support arbitrary bit length ids - cert.subject.uniqueId = uniqueId; - } - cert.subject.hash = null; - }; + // 24. If httpRequest’s cache mode is neither "no-store" nor "reload", + // then: + if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') { + // TODO: cache + } - /** - * Sets the issuer of this certificate. - * - * @param attrs the array of issuer attributes to use. - * @param uniqueId an optional a unique ID to use. - */ - cert.setIssuer = function(attrs, uniqueId) { - // set new attributes, clear hash - _fillMissingFields(attrs); - cert.issuer.attributes = attrs; - delete cert.issuer.uniqueId; - if(uniqueId) { - // TODO: support arbitrary bit length ids - cert.issuer.uniqueId = uniqueId; - } - cert.issuer.hash = null; - }; + // 9. If aborted, then return the appropriate network error for fetchParams. + // TODO - /** - * Sets the extensions of this certificate. - * - * @param exts the array of extensions to use. - */ - cert.setExtensions = function(exts) { - for(var i = 0; i < exts.length; ++i) { - _fillMissingExtensionFields(exts[i], {cert: cert}); + // 10. If response is null, then: + if (response == null) { + // 1. If httpRequest’s cache mode is "only-if-cached", then return a + // network error. + if (httpRequest.mode === 'only-if-cached') { + return makeNetworkError('only if cached') } - // set new extensions - cert.extensions = exts; - }; - /** - * Gets an extension by its name or id. - * - * @param options the name to use or an object with: - * name the name to use. - * id the id to use. - * - * @return the extension or null if not found. - */ - cert.getExtension = function(options) { - if(typeof options === 'string') { - options = {name: options}; - } - - var rval = null; - var ext; - for(var i = 0; rval === null && i < cert.extensions.length; ++i) { - ext = cert.extensions[i]; - if(options.id && ext.id === options.id) { - rval = ext; - } else if(options.name && ext.name === options.name) { - rval = ext; - } - } - return rval; - }; + // 2. Let forwardResponse be the result of running HTTP-network fetch + // given httpFetchParams, includeCredentials, and isNewConnectionFetch. + const forwardResponse = await httpNetworkFetch( + httpFetchParams, + includeCredentials, + isNewConnectionFetch + ) - /** - * Signs this certificate using the given private key. - * - * @param key the private key to sign with. - * @param md the message digest object to use (defaults to forge.md.sha1). - */ - cert.sign = function(key, md) { - // TODO: get signature OID from private key - cert.md = md || forge.md.sha1.create(); - var algorithmOid = oids[cert.md.algorithm + 'WithRSAEncryption']; - if(!algorithmOid) { - var error = new Error('Could not compute certificate digest. ' + - 'Unknown message digest algorithm OID.'); - error.algorithm = cert.md.algorithm; - throw error; + // 3. If httpRequest’s method is unsafe and forwardResponse’s status is + // in the range 200 to 399, inclusive, invalidate appropriate stored + // responses in httpCache, as per the "Invalidation" chapter of HTTP + // Caching, and set storedResponse to null. [HTTP-CACHING] + if ( + !safeMethodsSet.has(httpRequest.method) && + forwardResponse.status >= 200 && + forwardResponse.status <= 399 + ) { + // TODO: cache } - cert.signatureOid = cert.siginfo.algorithmOid = algorithmOid; - // get TBSCertificate, convert to DER - cert.tbsCertificate = pki.getTBSCertificate(cert); - var bytes = asn1.toDer(cert.tbsCertificate); + // 4. If the revalidatingFlag is set and forwardResponse’s status is 304, + // then: + if (revalidatingFlag && forwardResponse.status === 304) { + // TODO: cache + } - // digest and sign - cert.md.update(bytes.getBytes()); - cert.signature = key.sign(cert.md); - }; + // 5. If response is null, then: + if (response == null) { + // 1. Set response to forwardResponse. + response = forwardResponse - /** - * Attempts verify the signature on the passed certificate using this - * certificate's public key. - * - * @param child the certificate to verify. - * - * @return true if verified, false if not. - */ - cert.verify = function(child) { - var rval = false; - - if(!cert.issued(child)) { - var issuer = child.issuer; - var subject = cert.subject; - var error = new Error( - 'The parent certificate did not issue the given child ' + - 'certificate; the child certificate\'s issuer does not match the ' + - 'parent\'s subject.'); - error.expectedIssuer = subject.attributes; - error.actualIssuer = issuer.attributes; - throw error; + // 2. Store httpRequest and forwardResponse in httpCache, as per the + // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING] + // TODO: cache } + } - var md = child.md; - if(md === null) { - // create digest for OID signature types - md = _createSignatureDigest({ - signatureOid: child.signatureOid, - type: 'certificate' - }); + // 11. Set response’s URL list to a clone of httpRequest’s URL list. + response.urlList = [...httpRequest.urlList] - // produce DER formatted TBSCertificate and digest it - var tbsCertificate = child.tbsCertificate || pki.getTBSCertificate(child); - var bytes = asn1.toDer(tbsCertificate); - md.update(bytes.getBytes()); - } + // 12. If httpRequest’s header list contains `Range`, then set response’s + // range-requested flag. + if (httpRequest.headersList.contains('range')) { + response.rangeRequested = true + } - if(md !== null) { - rval = _verifySignature({ - certificate: cert, md: md, signature: child.signature - }); + // 13. Set response’s request-includes-credentials to includeCredentials. + response.requestIncludesCredentials = includeCredentials + + // 14. If response’s status is 401, httpRequest’s response tainting is not + // "cors", includeCredentials is true, and request’s window is an environment + // settings object, then: + // TODO + + // 15. If response’s status is 407, then: + if (response.status === 407) { + // 1. If request’s window is "no-window", then return a network error. + if (request.window === 'no-window') { + return makeNetworkError() } - return rval; - }; + // 2. ??? - /** - * Returns true if this certificate's issuer matches the passed - * certificate's subject. Note that no signature check is performed. - * - * @param parent the certificate to check. - * - * @return true if this certificate's issuer matches the passed certificate's - * subject. - */ - cert.isIssuer = function(parent) { - var rval = false; - - var i = cert.issuer; - var s = parent.subject; - - // compare hashes if present - if(i.hash && s.hash) { - rval = (i.hash === s.hash); - } else if(i.attributes.length === s.attributes.length) { - // all attributes are the same so issuer matches subject - rval = true; - var iattr, sattr; - for(var n = 0; rval && n < i.attributes.length; ++n) { - iattr = i.attributes[n]; - sattr = s.attributes[n]; - if(iattr.type !== sattr.type || iattr.value !== sattr.value) { - // attribute mismatch - rval = false; - } - } + // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) } - return rval; - }; + // 4. Prompt the end user as appropriate in request’s window and store + // the result as a proxy-authentication entry. [HTTP-AUTH] + // TODO: Invoke some kind of callback? - /** - * Returns true if this certificate's subject matches the issuer of the - * given certificate). Note that not signature check is performed. - * - * @param child the certificate to check. - * - * @return true if this certificate's subject matches the passed - * certificate's issuer. - */ - cert.issued = function(child) { - return child.isIssuer(cert); - }; + // 5. Set response to the result of running HTTP-network-or-cache fetch given + // fetchParams. + // TODO + return makeNetworkError('proxy authentication required') + } - /** - * Generates the subjectKeyIdentifier for this certificate as byte buffer. - * - * @return the subjectKeyIdentifier for this certificate as byte buffer. - */ - cert.generateSubjectKeyIdentifier = function() { - /* See: 4.2.1.2 section of the the RFC3280, keyIdentifier is either: - - (1) The keyIdentifier is composed of the 160-bit SHA-1 hash of the - value of the BIT STRING subjectPublicKey (excluding the tag, - length, and number of unused bits). - - (2) The keyIdentifier is composed of a four bit type field with - the value 0100 followed by the least significant 60 bits of the - SHA-1 hash of the value of the BIT STRING subjectPublicKey - (excluding the tag, length, and number of unused bit string bits). - */ - - // skipping the tag, length, and number of unused bits is the same - // as just using the RSAPublicKey (for RSA keys, which are the - // only ones supported) - return pki.getPublicKeyFingerprint(cert.publicKey, {type: 'RSAPublicKey'}); - }; + // 16. If all of the following are true + if ( + // response’s status is 421 + response.status === 421 && + // isNewConnectionFetch is false + !isNewConnectionFetch && + // request’s body is null, or request’s body is non-null and request’s body’s source is non-null + (request.body == null || request.body.source != null) + ) { + // then: - /** - * Verifies the subjectKeyIdentifier extension value for this certificate - * against its public key. If no extension is found, false will be - * returned. - * - * @return true if verified, false if not. - */ - cert.verifySubjectKeyIdentifier = function() { - var oid = oids['subjectKeyIdentifier']; - for(var i = 0; i < cert.extensions.length; ++i) { - var ext = cert.extensions[i]; - if(ext.id === oid) { - var ski = cert.generateSubjectKeyIdentifier().getBytes(); - return (forge.util.hexToBytes(ext.subjectKeyIdentifier) === ski); - } + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) } - return false; - }; - return cert; -}; + // 2. Set response to the result of running HTTP-network-or-cache + // fetch given fetchParams, isAuthenticationFetch, and true. -/** - * Converts an X.509v3 RSA certificate from an ASN.1 object. - * - * Note: If the certificate is to be verified then compute hash should - * be set to true. There is currently no implementation for converting - * a certificate back to ASN.1 so the TBSCertificate part of the ASN.1 - * object needs to be scanned before the cert object is created. - * - * @param obj the asn1 representation of an X.509v3 RSA certificate. - * @param computeHash true to compute the hash for verification. - * - * @return the certificate. - */ -pki.certificateFromAsn1 = function(obj, computeHash) { - // validate certificate and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, x509CertificateValidator, capture, errors)) { - var error = new Error('Cannot read X.509 certificate. ' + - 'ASN.1 object is not an X509v3 Certificate.'); - error.errors = errors; - throw error; - } - - // get oid - var oid = asn1.derToOid(capture.publicKeyOid); - if(oid !== pki.oids.rsaEncryption) { - throw new Error('Cannot read public key. OID is not RSA.'); - } - - // create certificate - var cert = pki.createCertificate(); - cert.version = capture.certVersion ? - capture.certVersion.charCodeAt(0) : 0; - var serial = forge.util.createBuffer(capture.certSerialNumber); - cert.serialNumber = serial.toHex(); - cert.signatureOid = forge.asn1.derToOid(capture.certSignatureOid); - cert.signatureParameters = _readSignatureParameters( - cert.signatureOid, capture.certSignatureParams, true); - cert.siginfo.algorithmOid = forge.asn1.derToOid(capture.certinfoSignatureOid); - cert.siginfo.parameters = _readSignatureParameters(cert.siginfo.algorithmOid, - capture.certinfoSignatureParams, false); - cert.signature = capture.certSignature; - - var validity = []; - if(capture.certValidity1UTCTime !== undefined) { - validity.push(asn1.utcTimeToDate(capture.certValidity1UTCTime)); - } - if(capture.certValidity2GeneralizedTime !== undefined) { - validity.push(asn1.generalizedTimeToDate( - capture.certValidity2GeneralizedTime)); - } - if(capture.certValidity3UTCTime !== undefined) { - validity.push(asn1.utcTimeToDate(capture.certValidity3UTCTime)); - } - if(capture.certValidity4GeneralizedTime !== undefined) { - validity.push(asn1.generalizedTimeToDate( - capture.certValidity4GeneralizedTime)); - } - if(validity.length > 2) { - throw new Error('Cannot read notBefore/notAfter validity times; more ' + - 'than two times were provided in the certificate.'); - } - if(validity.length < 2) { - throw new Error('Cannot read notBefore/notAfter validity times; they ' + - 'were not provided as either UTCTime or GeneralizedTime.'); - } - cert.validity.notBefore = validity[0]; - cert.validity.notAfter = validity[1]; - - // keep TBSCertificate to preserve signature when exporting - cert.tbsCertificate = capture.tbsCertificate; - - if(computeHash) { - // create digest for OID signature type - cert.md = _createSignatureDigest({ - signatureOid: cert.signatureOid, - type: 'certificate' - }); + // TODO (spec): The spec doesn't specify this but we need to cancel + // the active response before we can start a new one. + // https://github.com/whatwg/fetch/issues/1293 + fetchParams.controller.connection.destroy() - // produce DER formatted TBSCertificate and digest it - var bytes = asn1.toDer(cert.tbsCertificate); - cert.md.update(bytes.getBytes()); + response = await httpNetworkOrCacheFetch( + fetchParams, + isAuthenticationFetch, + true + ) } - // handle issuer, build issuer message digest - var imd = forge.md.sha1.create(); - var ibytes = asn1.toDer(capture.certIssuer); - imd.update(ibytes.getBytes()); - cert.issuer.getField = function(sn) { - return _getAttribute(cert.issuer, sn); - }; - cert.issuer.addField = function(attr) { - _fillMissingFields([attr]); - cert.issuer.attributes.push(attr); - }; - cert.issuer.attributes = pki.RDNAttributesAsArray(capture.certIssuer); - if(capture.certIssuerUniqueId) { - cert.issuer.uniqueId = capture.certIssuerUniqueId; - } - cert.issuer.hash = imd.digest().toHex(); - - // handle subject, build subject message digest - var smd = forge.md.sha1.create(); - var sbytes = asn1.toDer(capture.certSubject); - smd.update(sbytes.getBytes()); - cert.subject.getField = function(sn) { - return _getAttribute(cert.subject, sn); - }; - cert.subject.addField = function(attr) { - _fillMissingFields([attr]); - cert.subject.attributes.push(attr); - }; - cert.subject.attributes = pki.RDNAttributesAsArray(capture.certSubject); - if(capture.certSubjectUniqueId) { - cert.subject.uniqueId = capture.certSubjectUniqueId; + // 17. If isAuthenticationFetch is true, then create an authentication entry + if (isAuthenticationFetch) { + // TODO } - cert.subject.hash = smd.digest().toHex(); - // handle extensions - if(capture.certExtensions) { - cert.extensions = pki.certificateExtensionsFromAsn1(capture.certExtensions); - } else { - cert.extensions = []; + // 18. Return response. + return response +} + +// https://fetch.spec.whatwg.org/#http-network-fetch +async function httpNetworkFetch ( + fetchParams, + includeCredentials = false, + forceNewConnection = false +) { + assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed) + + fetchParams.controller.connection = { + abort: null, + destroyed: false, + destroy (err) { + if (!this.destroyed) { + this.destroyed = true + this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError')) + } + } } - // convert RSA public key from ASN.1 - cert.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo); + // 1. Let request be fetchParams’s request. + const request = fetchParams.request - return cert; -}; + // 2. Let response be null. + let response = null -/** - * Converts an ASN.1 extensions object (with extension sequences as its - * values) into an array of extension objects with types and values. - * - * Supported extensions: - * - * id-ce-keyUsage OBJECT IDENTIFIER ::= { id-ce 15 } - * KeyUsage ::= BIT STRING { - * digitalSignature (0), - * nonRepudiation (1), - * keyEncipherment (2), - * dataEncipherment (3), - * keyAgreement (4), - * keyCertSign (5), - * cRLSign (6), - * encipherOnly (7), - * decipherOnly (8) - * } - * - * id-ce-basicConstraints OBJECT IDENTIFIER ::= { id-ce 19 } - * BasicConstraints ::= SEQUENCE { - * cA BOOLEAN DEFAULT FALSE, - * pathLenConstraint INTEGER (0..MAX) OPTIONAL - * } - * - * subjectAltName EXTENSION ::= { - * SYNTAX GeneralNames - * IDENTIFIED BY id-ce-subjectAltName - * } - * - * GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName - * - * GeneralName ::= CHOICE { - * otherName [0] INSTANCE OF OTHER-NAME, - * rfc822Name [1] IA5String, - * dNSName [2] IA5String, - * x400Address [3] ORAddress, - * directoryName [4] Name, - * ediPartyName [5] EDIPartyName, - * uniformResourceIdentifier [6] IA5String, - * IPAddress [7] OCTET STRING, - * registeredID [8] OBJECT IDENTIFIER - * } - * - * OTHER-NAME ::= TYPE-IDENTIFIER - * - * EDIPartyName ::= SEQUENCE { - * nameAssigner [0] DirectoryString {ub-name} OPTIONAL, - * partyName [1] DirectoryString {ub-name} - * } - * - * @param exts the extensions ASN.1 with extension sequences to parse. - * - * @return the array. - */ -pki.certificateExtensionsFromAsn1 = function(exts) { - var rval = []; - for(var i = 0; i < exts.value.length; ++i) { - // get extension sequence - var extseq = exts.value[i]; - for(var ei = 0; ei < extseq.value.length; ++ei) { - rval.push(pki.certificateExtensionFromAsn1(extseq.value[ei])); - } - } + // 3. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo - return rval; -}; + // 4. Let httpCache be the result of determining the HTTP cache partition, + // given request. + // TODO: cache + const httpCache = null -/** - * Parses a single certificate extension from ASN.1. - * - * @param ext the extension in ASN.1 format. - * - * @return the parsed extension as an object. - */ -pki.certificateExtensionFromAsn1 = function(ext) { - // an extension has: - // [0] extnID OBJECT IDENTIFIER - // [1] critical BOOLEAN DEFAULT FALSE - // [2] extnValue OCTET STRING - var e = {}; - e.id = asn1.derToOid(ext.value[0].value); - e.critical = false; - if(ext.value[1].type === asn1.Type.BOOLEAN) { - e.critical = (ext.value[1].value.charCodeAt(0) !== 0x00); - e.value = ext.value[2].value; - } else { - e.value = ext.value[1].value; - } - // if the oid is known, get its name - if(e.id in oids) { - e.name = oids[e.id]; - - // handle key usage - if(e.name === 'keyUsage') { - // get value as BIT STRING - var ev = asn1.fromDer(e.value); - var b2 = 0x00; - var b3 = 0x00; - if(ev.value.length > 1) { - // skip first byte, just indicates unused bits which - // will be padded with 0s anyway - // get bytes with flag bits - b2 = ev.value.charCodeAt(1); - b3 = ev.value.length > 2 ? ev.value.charCodeAt(2) : 0; - } - // set flags - e.digitalSignature = (b2 & 0x80) === 0x80; - e.nonRepudiation = (b2 & 0x40) === 0x40; - e.keyEncipherment = (b2 & 0x20) === 0x20; - e.dataEncipherment = (b2 & 0x10) === 0x10; - e.keyAgreement = (b2 & 0x08) === 0x08; - e.keyCertSign = (b2 & 0x04) === 0x04; - e.cRLSign = (b2 & 0x02) === 0x02; - e.encipherOnly = (b2 & 0x01) === 0x01; - e.decipherOnly = (b3 & 0x80) === 0x80; - } else if(e.name === 'basicConstraints') { - // handle basic constraints - // get value as SEQUENCE - var ev = asn1.fromDer(e.value); - // get cA BOOLEAN flag (defaults to false) - if(ev.value.length > 0 && ev.value[0].type === asn1.Type.BOOLEAN) { - e.cA = (ev.value[0].value.charCodeAt(0) !== 0x00); - } else { - e.cA = false; - } - // get path length constraint - var value = null; - if(ev.value.length > 0 && ev.value[0].type === asn1.Type.INTEGER) { - value = ev.value[0].value; - } else if(ev.value.length > 1) { - value = ev.value[1].value; - } - if(value !== null) { - e.pathLenConstraint = asn1.derToInteger(value); - } - } else if(e.name === 'extKeyUsage') { - // handle extKeyUsage - // value is a SEQUENCE of OIDs - var ev = asn1.fromDer(e.value); - for(var vi = 0; vi < ev.value.length; ++vi) { - var oid = asn1.derToOid(ev.value[vi].value); - if(oid in oids) { - e[oids[oid]] = true; - } else { - e[oid] = true; - } - } - } else if(e.name === 'nsCertType') { - // handle nsCertType - // get value as BIT STRING - var ev = asn1.fromDer(e.value); - var b2 = 0x00; - if(ev.value.length > 1) { - // skip first byte, just indicates unused bits which - // will be padded with 0s anyway - // get bytes with flag bits - b2 = ev.value.charCodeAt(1); - } - // set flags - e.client = (b2 & 0x80) === 0x80; - e.server = (b2 & 0x40) === 0x40; - e.email = (b2 & 0x20) === 0x20; - e.objsign = (b2 & 0x10) === 0x10; - e.reserved = (b2 & 0x08) === 0x08; - e.sslCA = (b2 & 0x04) === 0x04; - e.emailCA = (b2 & 0x02) === 0x02; - e.objCA = (b2 & 0x01) === 0x01; - } else if( - e.name === 'subjectAltName' || - e.name === 'issuerAltName') { - // handle subjectAltName/issuerAltName - e.altNames = []; - - // ev is a SYNTAX SEQUENCE - var gn; - var ev = asn1.fromDer(e.value); - for(var n = 0; n < ev.value.length; ++n) { - // get GeneralName - gn = ev.value[n]; - - var altName = { - type: gn.type, - value: gn.value - }; - e.altNames.push(altName); - - // Note: Support for types 1,2,6,7,8 - switch(gn.type) { - // rfc822Name - case 1: - // dNSName - case 2: - // uniformResourceIdentifier (URI) - case 6: - break; - // IPAddress - case 7: - // convert to IPv4/IPv6 string representation - altName.ip = forge.util.bytesToIP(gn.value); - break; - // registeredID - case 8: - altName.oid = asn1.derToOid(gn.value); - break; - default: - // unsupported - } - } - } else if(e.name === 'subjectKeyIdentifier') { - // value is an OCTETSTRING w/the hash of the key-type specific - // public key structure (eg: RSAPublicKey) - var ev = asn1.fromDer(e.value); - e.subjectKeyIdentifier = forge.util.bytesToHex(ev.value); - } + // 5. If httpCache is null, then set request’s cache mode to "no-store". + if (httpCache == null) { + request.cache = 'no-store' } - return e; -}; -/** - * Converts a PKCS#10 certification request (CSR) from an ASN.1 object. - * - * Note: If the certification request is to be verified then compute hash - * should be set to true. There is currently no implementation for converting - * a certificate back to ASN.1 so the CertificationRequestInfo part of the - * ASN.1 object needs to be scanned before the csr object is created. - * - * @param obj the asn1 representation of a PKCS#10 certification request (CSR). - * @param computeHash true to compute the hash for verification. - * - * @return the certification request (CSR). - */ -pki.certificationRequestFromAsn1 = function(obj, computeHash) { - // validate certification request and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, certificationRequestValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#10 certificate request. ' + - 'ASN.1 object is not a PKCS#10 CertificationRequest.'); - error.errors = errors; - throw error; - } - - // get oid - var oid = asn1.derToOid(capture.publicKeyOid); - if(oid !== pki.oids.rsaEncryption) { - throw new Error('Cannot read public key. OID is not RSA.'); - } - - // create certification request - var csr = pki.createCertificationRequest(); - csr.version = capture.csrVersion ? capture.csrVersion.charCodeAt(0) : 0; - csr.signatureOid = forge.asn1.derToOid(capture.csrSignatureOid); - csr.signatureParameters = _readSignatureParameters( - csr.signatureOid, capture.csrSignatureParams, true); - csr.siginfo.algorithmOid = forge.asn1.derToOid(capture.csrSignatureOid); - csr.siginfo.parameters = _readSignatureParameters( - csr.siginfo.algorithmOid, capture.csrSignatureParams, false); - csr.signature = capture.csrSignature; - - // keep CertificationRequestInfo to preserve signature when exporting - csr.certificationRequestInfo = capture.certificationRequestInfo; - - if(computeHash) { - // create digest for OID signature type - csr.md = _createSignatureDigest({ - signatureOid: csr.signatureOid, - type: 'certification request' - }); + // 6. Let networkPartitionKey be the result of determining the network + // partition key given request. + // TODO + + // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise + // "no". + const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars - // produce DER formatted CertificationRequestInfo and digest it - var bytes = asn1.toDer(csr.certificationRequestInfo); - csr.md.update(bytes.getBytes()); + // 8. Switch on request’s mode: + if (request.mode === 'websocket') { + // Let connection be the result of obtaining a WebSocket connection, + // given request’s current URL. + // TODO + } else { + // Let connection be the result of obtaining a connection, given + // networkPartitionKey, request’s current URL’s origin, + // includeCredentials, and forceNewConnection. + // TODO } - // handle subject, build subject message digest - var smd = forge.md.sha1.create(); - csr.subject.getField = function(sn) { - return _getAttribute(csr.subject, sn); - }; - csr.subject.addField = function(attr) { - _fillMissingFields([attr]); - csr.subject.attributes.push(attr); - }; - csr.subject.attributes = pki.RDNAttributesAsArray( - capture.certificationRequestInfoSubject, smd); - csr.subject.hash = smd.digest().toHex(); + // 9. Run these steps, but abort when the ongoing fetch is terminated: - // convert RSA public key from ASN.1 - csr.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo); + // 1. If connection is failure, then return a network error. - // convert attributes from ASN.1 - csr.getAttribute = function(sn) { - return _getAttribute(csr, sn); - }; - csr.addAttribute = function(attr) { - _fillMissingFields([attr]); - csr.attributes.push(attr); - }; - csr.attributes = pki.CRIAttributesAsArray( - capture.certificationRequestInfoAttributes || []); + // 2. Set timingInfo’s final connection timing info to the result of + // calling clamp and coarsen connection timing info with connection’s + // timing info, timingInfo’s post-redirect start time, and fetchParams’s + // cross-origin isolated capability. - return csr; -}; + // 3. If connection is not an HTTP/2 connection, request’s body is non-null, + // and request’s body’s source is null, then append (`Transfer-Encoding`, + // `chunked`) to request’s header list. -/** - * Creates an empty certification request (a CSR or certificate signing - * request). Once created, its public key and attributes can be set and then - * it can be signed. - * - * @return the empty certification request. - */ -pki.createCertificationRequest = function() { - var csr = {}; - csr.version = 0x00; - csr.signatureOid = null; - csr.signature = null; - csr.siginfo = {}; - csr.siginfo.algorithmOid = null; - - csr.subject = {}; - csr.subject.getField = function(sn) { - return _getAttribute(csr.subject, sn); - }; - csr.subject.addField = function(attr) { - _fillMissingFields([attr]); - csr.subject.attributes.push(attr); - }; - csr.subject.attributes = []; - csr.subject.hash = null; + // 4. Set timingInfo’s final network-request start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated + // capability. - csr.publicKey = null; - csr.attributes = []; - csr.getAttribute = function(sn) { - return _getAttribute(csr, sn); - }; - csr.addAttribute = function(attr) { - _fillMissingFields([attr]); - csr.attributes.push(attr); - }; - csr.md = null; + // 5. Set response to the result of making an HTTP request over connection + // using request with the following caveats: - /** - * Sets the subject of this certification request. - * - * @param attrs the array of subject attributes to use. - */ - csr.setSubject = function(attrs) { - // set new attributes - _fillMissingFields(attrs); - csr.subject.attributes = attrs; - csr.subject.hash = null; - }; + // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS] + // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH] - /** - * Sets the attributes of this certification request. - * - * @param attrs the array of attributes to use. - */ - csr.setAttributes = function(attrs) { - // set new attributes - _fillMissingFields(attrs); - csr.attributes = attrs; - }; + // - If request’s body is non-null, and request’s body’s source is null, + // then the user agent may have a buffer of up to 64 kibibytes and store + // a part of request’s body in that buffer. If the user agent reads from + // request’s body beyond that buffer’s size and the user agent needs to + // resend request, then instead return a network error. - /** - * Signs this certification request using the given private key. - * - * @param key the private key to sign with. - * @param md the message digest object to use (defaults to forge.md.sha1). - */ - csr.sign = function(key, md) { - // TODO: get signature OID from private key - csr.md = md || forge.md.sha1.create(); - var algorithmOid = oids[csr.md.algorithm + 'WithRSAEncryption']; - if(!algorithmOid) { - var error = new Error('Could not compute certification request digest. ' + - 'Unknown message digest algorithm OID.'); - error.algorithm = csr.md.algorithm; - throw error; - } - csr.signatureOid = csr.siginfo.algorithmOid = algorithmOid; + // - Set timingInfo’s final network-response start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated capability, + // immediately after the user agent’s HTTP parser receives the first byte + // of the response (e.g., frame header bytes for HTTP/2 or response status + // line for HTTP/1.x). - // get CertificationRequestInfo, convert to DER - csr.certificationRequestInfo = pki.getCertificationRequestInfo(csr); - var bytes = asn1.toDer(csr.certificationRequestInfo); + // - Wait until all the headers are transmitted. - // digest and sign - csr.md.update(bytes.getBytes()); - csr.signature = key.sign(csr.md); - }; + // - Any responses whose status is in the range 100 to 199, inclusive, + // and is not 101, are to be ignored, except for the purposes of setting + // timingInfo’s final network-response start time above. - /** - * Attempts verify the signature on the passed certification request using - * its public key. - * - * A CSR that has been exported to a file in PEM format can be verified using - * OpenSSL using this command: - * - * openssl req -in -verify -noout -text - * - * @return true if verified, false if not. - */ - csr.verify = function() { - var rval = false; - - var md = csr.md; - if(md === null) { - md = _createSignatureDigest({ - signatureOid: csr.signatureOid, - type: 'certification request' - }); + // - If request’s header list contains `Transfer-Encoding`/`chunked` and + // response is transferred via HTTP/1.0 or older, then return a network + // error. - // produce DER formatted CertificationRequestInfo and digest it - var cri = csr.certificationRequestInfo || - pki.getCertificationRequestInfo(csr); - var bytes = asn1.toDer(cri); - md.update(bytes.getBytes()); - } + // - If the HTTP request results in a TLS client certificate dialog, then: - if(md !== null) { - rval = _verifySignature({ - certificate: csr, md: md, signature: csr.signature - }); - } + // 1. If request’s window is an environment settings object, make the + // dialog available in request’s window. - return rval; - }; + // 2. Otherwise, return a network error. - return csr; -}; + // To transmit request’s body body, run these steps: + let requestBody = null + // 1. If body is null and fetchParams’s process request end-of-body is + // non-null, then queue a fetch task given fetchParams’s process request + // end-of-body and fetchParams’s task destination. + if (request.body == null && fetchParams.processRequestEndOfBody) { + queueMicrotask(() => fetchParams.processRequestEndOfBody()) + } else if (request.body != null) { + // 2. Otherwise, if body is non-null: -/** - * Converts an X.509 subject or issuer to an ASN.1 RDNSequence. - * - * @param obj the subject or issuer (distinguished name). - * - * @return the ASN.1 RDNSequence. - */ -function _dnToAsn1(obj) { - // create an empty RDNSequence - var rval = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - - // iterate over attributes - var attr, set; - var attrs = obj.attributes; - for(var i = 0; i < attrs.length; ++i) { - attr = attrs[i]; - var value = attr.value; - - // reuse tag class for attribute value if available - var valueTagClass = asn1.Type.PRINTABLESTRING; - if('valueTagClass' in attr) { - valueTagClass = attr.valueTagClass; - - if(valueTagClass === asn1.Type.UTF8) { - value = forge.util.encodeUtf8(value); + // 1. Let processBodyChunk given bytes be these steps: + const processBodyChunk = async function * (bytes) { + // 1. If the ongoing fetch is terminated, then abort these steps. + if (isCancelled(fetchParams)) { + return } - // FIXME: handle more encodings - } - // create a RelativeDistinguishedName set - // each value in the set is an AttributeTypeAndValue first - // containing the type (an OID) and second the value - set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // AttributeType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(attr.type).getBytes()), - // AttributeValue - asn1.create(asn1.Class.UNIVERSAL, valueTagClass, false, value) - ]) - ]); - rval.value.push(set); - } + // 2. Run this step in parallel: transmit bytes. + yield bytes - return rval; -} + // 3. If fetchParams’s process request body is non-null, then run + // fetchParams’s process request body given bytes’s length. + fetchParams.processRequestBodyChunkLength?.(bytes.byteLength) + } -/** - * Gets all printable attributes (typically of an issuer or subject) in a - * simplified JSON format for display. - * - * @param attrs the attributes. - * - * @return the JSON for display. - */ -function _getAttributesAsJson(attrs) { - var rval = {}; - for(var i = 0; i < attrs.length; ++i) { - var attr = attrs[i]; - if(attr.shortName && ( - attr.valueTagClass === asn1.Type.UTF8 || - attr.valueTagClass === asn1.Type.PRINTABLESTRING || - attr.valueTagClass === asn1.Type.IA5STRING)) { - var value = attr.value; - if(attr.valueTagClass === asn1.Type.UTF8) { - value = forge.util.encodeUtf8(attr.value); - } - if(!(attr.shortName in rval)) { - rval[attr.shortName] = value; - } else if(forge.util.isArray(rval[attr.shortName])) { - rval[attr.shortName].push(value); - } else { - rval[attr.shortName] = [rval[attr.shortName], value]; + // 2. Let processEndOfBody be these steps: + const processEndOfBody = () => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return } - } - } - return rval; -} -/** - * Fills in missing fields in attributes. - * - * @param attrs the attributes to fill missing fields in. - */ -function _fillMissingFields(attrs) { - var attr; - for(var i = 0; i < attrs.length; ++i) { - attr = attrs[i]; - - // populate missing name - if(typeof attr.name === 'undefined') { - if(attr.type && attr.type in pki.oids) { - attr.name = pki.oids[attr.type]; - } else if(attr.shortName && attr.shortName in _shortNames) { - attr.name = pki.oids[_shortNames[attr.shortName]]; + // 2. If fetchParams’s process request end-of-body is non-null, + // then run fetchParams’s process request end-of-body. + if (fetchParams.processRequestEndOfBody) { + fetchParams.processRequestEndOfBody() } } - // populate missing type (OID) - if(typeof attr.type === 'undefined') { - if(attr.name && attr.name in pki.oids) { - attr.type = pki.oids[attr.name]; - } else { - var error = new Error('Attribute type not specified.'); - error.attribute = attr; - throw error; + // 3. Let processBodyError given e be these steps: + const processBodyError = (e) => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return } - } - // populate missing shortname - if(typeof attr.shortName === 'undefined') { - if(attr.name && attr.name in _shortNames) { - attr.shortName = _shortNames[attr.name]; + // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller. + if (e.name === 'AbortError') { + fetchParams.controller.abort() + } else { + fetchParams.controller.terminate(e) } } - // convert extensions to value - if(attr.type === oids.extensionRequest) { - attr.valueConstructed = true; - attr.valueTagClass = asn1.Type.SEQUENCE; - if(!attr.value && attr.extensions) { - attr.value = []; - for(var ei = 0; ei < attr.extensions.length; ++ei) { - attr.value.push(pki.certificateExtensionToAsn1( - _fillMissingExtensionFields(attr.extensions[ei]))); + // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody, + // processBodyError, and fetchParams’s task destination. + requestBody = (async function * () { + try { + for await (const bytes of request.body.stream) { + yield * processBodyChunk(bytes) } + processEndOfBody() + } catch (err) { + processBodyError(err) } + })() + } + + try { + // socket is only provided for websockets + const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody }) + + if (socket) { + response = makeResponse({ status, statusText, headersList, socket }) + } else { + const iterator = body[Symbol.asyncIterator]() + fetchParams.controller.next = () => iterator.next() + + response = makeResponse({ status, statusText, headersList }) } + } catch (err) { + // 10. If aborted, then: + if (err.name === 'AbortError') { + // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame. + fetchParams.controller.connection.destroy() - if(typeof attr.value === 'undefined') { - var error = new Error('Attribute value not specified.'); - error.attribute = attr; - throw error; + // 2. Return the appropriate network error for fetchParams. + return makeAppropriateNetworkError(fetchParams, err) } + + return makeNetworkError(err) } -} -/** - * Fills in missing fields in certificate extensions. - * - * @param e the extension. - * @param [options] the options to use. - * [cert] the certificate the extensions are for. - * - * @return the extension. - */ -function _fillMissingExtensionFields(e, options) { - options = options || {}; + // 11. Let pullAlgorithm be an action that resumes the ongoing fetch + // if it is suspended. + const pullAlgorithm = () => { + fetchParams.controller.resume() + } - // populate missing name - if(typeof e.name === 'undefined') { - if(e.id && e.id in pki.oids) { - e.name = pki.oids[e.id]; - } + // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s + // controller with reason, given reason. + const cancelAlgorithm = (reason) => { + fetchParams.controller.abort(reason) } - // populate missing id - if(typeof e.id === 'undefined') { - if(e.name && e.name in pki.oids) { - e.id = pki.oids[e.name]; - } else { - var error = new Error('Extension ID not specified.'); - error.extension = e; - throw error; - } + // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by + // the user agent. + // TODO + + // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object + // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent. + // TODO + + // 15. Let stream be a new ReadableStream. + // 16. Set up stream with pullAlgorithm set to pullAlgorithm, + // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to + // highWaterMark, and sizeAlgorithm set to sizeAlgorithm. + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(5356).ReadableStream) } - if(typeof e.value !== 'undefined') { - return e; - } - - // handle missing value: - - // value is a BIT STRING - if(e.name === 'keyUsage') { - // build flags - var unused = 0; - var b2 = 0x00; - var b3 = 0x00; - if(e.digitalSignature) { - b2 |= 0x80; - unused = 7; - } - if(e.nonRepudiation) { - b2 |= 0x40; - unused = 6; - } - if(e.keyEncipherment) { - b2 |= 0x20; - unused = 5; - } - if(e.dataEncipherment) { - b2 |= 0x10; - unused = 4; - } - if(e.keyAgreement) { - b2 |= 0x08; - unused = 3; - } - if(e.keyCertSign) { - b2 |= 0x04; - unused = 2; - } - if(e.cRLSign) { - b2 |= 0x02; - unused = 1; - } - if(e.encipherOnly) { - b2 |= 0x01; - unused = 0; - } - if(e.decipherOnly) { - b3 |= 0x80; - unused = 7; - } - - // create bit string - var value = String.fromCharCode(unused); - if(b3 !== 0) { - value += String.fromCharCode(b2) + String.fromCharCode(b3); - } else if(b2 !== 0) { - value += String.fromCharCode(b2); - } - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value); - } else if(e.name === 'basicConstraints') { - // basicConstraints is a SEQUENCE - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - // cA BOOLEAN flag defaults to false - if(e.cA) { - e.value.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false, - String.fromCharCode(0xFF))); - } - if('pathLenConstraint' in e) { - e.value.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(e.pathLenConstraint).getBytes())); - } - } else if(e.name === 'extKeyUsage') { - // extKeyUsage is a SEQUENCE of OIDs - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - var seq = e.value.value; - for(var key in e) { - if(e[key] !== true) { - continue; + const stream = new ReadableStream( + { + async start (controller) { + fetchParams.controller.controller = controller + }, + async pull (controller) { + await pullAlgorithm(controller) + }, + async cancel (reason) { + await cancelAlgorithm(reason) } - // key is name in OID map - if(key in oids) { - seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, - false, asn1.oidToDer(oids[key]).getBytes())); - } else if(key.indexOf('.') !== -1) { - // assume key is an OID - seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, - false, asn1.oidToDer(key).getBytes())); + }, + { + highWaterMark: 0, + size () { + return 1 } } - } else if(e.name === 'nsCertType') { - // nsCertType is a BIT STRING - // build flags - var unused = 0; - var b2 = 0x00; - - if(e.client) { - b2 |= 0x80; - unused = 7; - } - if(e.server) { - b2 |= 0x40; - unused = 6; - } - if(e.email) { - b2 |= 0x20; - unused = 5; - } - if(e.objsign) { - b2 |= 0x10; - unused = 4; - } - if(e.reserved) { - b2 |= 0x08; - unused = 3; - } - if(e.sslCA) { - b2 |= 0x04; - unused = 2; - } - if(e.emailCA) { - b2 |= 0x02; - unused = 1; - } - if(e.objCA) { - b2 |= 0x01; - unused = 0; - } - - // create bit string - var value = String.fromCharCode(unused); - if(b2 !== 0) { - value += String.fromCharCode(b2); - } - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value); - } else if(e.name === 'subjectAltName' || e.name === 'issuerAltName') { - // SYNTAX SEQUENCE - e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - - var altName; - for(var n = 0; n < e.altNames.length; ++n) { - altName = e.altNames[n]; - var value = altName.value; - // handle IP - if(altName.type === 7 && altName.ip) { - value = forge.util.bytesFromIP(altName.ip); - if(value === null) { - var error = new Error( - 'Extension "ip" value is not a valid IPv4 or IPv6 address.'); - error.extension = e; - throw error; - } - } else if(altName.type === 8) { - // handle OID - if(altName.oid) { - value = asn1.oidToDer(asn1.oidToDer(altName.oid)); - } else { - // deprecated ... convert value to OID - value = asn1.oidToDer(value); + ) + + // 17. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. Set response’s body to a new body whose stream is stream. + response.body = { stream } + + // 2. If response is not a network error and request’s cache mode is + // not "no-store", then update response in httpCache for request. + // TODO + + // 3. If includeCredentials is true and the user agent is not configured + // to block cookies for request (see section 7 of [COOKIES]), then run the + // "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on + // the value of each header whose name is a byte-case-insensitive match for + // `Set-Cookie` in response’s header list, if any, and request’s current URL. + // TODO + + // 18. If aborted, then: + // TODO + + // 19. Run these steps in parallel: + + // 1. Run these steps, but abort when fetchParams is canceled: + fetchParams.controller.on('terminated', onAborted) + fetchParams.controller.resume = async () => { + // 1. While true + while (true) { + // 1-3. See onData... + + // 4. Set bytes to the result of handling content codings given + // codings and bytes. + let bytes + let isFailure + try { + const { done, value } = await fetchParams.controller.next() + + if (isAborted(fetchParams)) { + break } - } - e.value.value.push(asn1.create( - asn1.Class.CONTEXT_SPECIFIC, altName.type, false, - value)); - } - } else if(e.name === 'nsComment' && options.cert) { - // sanity check value is ASCII (req'd) and not too big - if(!(/^[\x00-\x7F]*$/.test(e.comment)) || - (e.comment.length < 1) || (e.comment.length > 128)) { - throw new Error('Invalid "nsComment" content.'); - } - // IA5STRING opaque comment - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.IA5STRING, false, e.comment); - } else if(e.name === 'subjectKeyIdentifier' && options.cert) { - var ski = options.cert.generateSubjectKeyIdentifier(); - e.subjectKeyIdentifier = ski.toHex(); - // OCTETSTRING w/digest - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, ski.getBytes()); - } else if(e.name === 'authorityKeyIdentifier' && options.cert) { - // SYNTAX SEQUENCE - e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - var seq = e.value.value; - - if(e.keyIdentifier) { - var keyIdentifier = (e.keyIdentifier === true ? - options.cert.generateSubjectKeyIdentifier().getBytes() : - e.keyIdentifier); - seq.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, false, keyIdentifier)); - } - - if(e.authorityCertIssuer) { - var authorityCertIssuer = [ - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 4, true, [ - _dnToAsn1(e.authorityCertIssuer === true ? - options.cert.issuer : e.authorityCertIssuer) - ]) - ]; - seq.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, authorityCertIssuer)); - } - - if(e.serialNumber) { - var serialNumber = forge.util.hexToBytes(e.serialNumber === true ? - options.cert.serialNumber : e.serialNumber); - seq.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, false, serialNumber)); - } - } else if(e.name === 'cRLDistributionPoints') { - e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - var seq = e.value.value; - - // Create sub SEQUENCE of DistributionPointName - var subSeq = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - - // Create fullName CHOICE - var fullNameGeneralNames = asn1.create( - asn1.Class.CONTEXT_SPECIFIC, 0, true, []); - var altName; - for(var n = 0; n < e.altNames.length; ++n) { - altName = e.altNames[n]; - var value = altName.value; - // handle IP - if(altName.type === 7 && altName.ip) { - value = forge.util.bytesFromIP(altName.ip); - if(value === null) { - var error = new Error( - 'Extension "ip" value is not a valid IPv4 or IPv6 address.'); - error.extension = e; - throw error; - } - } else if(altName.type === 8) { - // handle OID - if(altName.oid) { - value = asn1.oidToDer(asn1.oidToDer(altName.oid)); + + bytes = done ? undefined : value + } catch (err) { + if (fetchParams.controller.ended && !timingInfo.encodedBodySize) { + // zlib doesn't like empty streams. + bytes = undefined } else { - // deprecated ... convert value to OID - value = asn1.oidToDer(value); + bytes = err + + // err may be propagated from the result of calling readablestream.cancel, + // which might not be an error. https://github.com/nodejs/undici/issues/2009 + isFailure = true } } - fullNameGeneralNames.value.push(asn1.create( - asn1.Class.CONTEXT_SPECIFIC, altName.type, false, - value)); - } - // Add to the parent SEQUENCE - subSeq.value.push(asn1.create( - asn1.Class.CONTEXT_SPECIFIC, 0, true, [fullNameGeneralNames])); - seq.push(subSeq); - } + if (bytes === undefined) { + // 2. Otherwise, if the bytes transmission for response’s message + // body is done normally and stream is readable, then close + // stream, finalize response for fetchParams and response, and + // abort these in-parallel steps. + readableStreamClose(fetchParams.controller.controller) - // ensure value has been defined by now - if(typeof e.value === 'undefined') { - var error = new Error('Extension value not specified.'); - error.extension = e; - throw error; - } + finalizeResponse(fetchParams, response) - return e; -} + return + } -/** - * Convert signature parameters object to ASN.1 - * - * @param {String} oid Signature algorithm OID - * @param params The signature parametrs object - * @return ASN.1 object representing signature parameters - */ -function _signatureParametersToAsn1(oid, params) { - switch(oid) { - case oids['RSASSA-PSS']: - var parts = []; - - if(params.hash.algorithmOid !== undefined) { - parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(params.hash.algorithmOid).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]) - ])); + // 5. Increase timingInfo’s decoded body size by bytes’s length. + timingInfo.decodedBodySize += bytes?.byteLength ?? 0 + + // 6. If bytes is failure, then terminate fetchParams’s controller. + if (isFailure) { + fetchParams.controller.terminate(bytes) + return } - if(params.mgf.algorithmOid !== undefined) { - parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(params.mgf.algorithmOid).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(params.mgf.hash.algorithmOid).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]) - ]) - ])); + // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes + // into stream. + fetchParams.controller.controller.enqueue(new Uint8Array(bytes)) + + // 8. If stream is errored, then terminate the ongoing fetch. + if (isErrored(stream)) { + fetchParams.controller.terminate() + return } - if(params.saltLength !== undefined) { - parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(params.saltLength).getBytes()) - ])); + // 9. If stream doesn’t need more data ask the user agent to suspend + // the ongoing fetch. + if (!fetchParams.controller.controller.desiredSize) { + return } + } + } - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, parts); + // 2. If aborted, then: + function onAborted (reason) { + // 2. If fetchParams is aborted, then: + if (isAborted(fetchParams)) { + // 1. Set response’s aborted flag. + response.aborted = true - default: - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, ''); + // 2. If stream is readable, then error stream with the result of + // deserialize a serialized abort reason given fetchParams’s + // controller’s serialized abort reason and an + // implementation-defined realm. + if (isReadable(stream)) { + fetchParams.controller.controller.error( + fetchParams.controller.serializedAbortReason + ) + } + } else { + // 3. Otherwise, if stream is readable, error stream with a TypeError. + if (isReadable(stream)) { + fetchParams.controller.controller.error(new TypeError('terminated', { + cause: isErrorLike(reason) ? reason : undefined + })) + } + } + + // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame. + // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so. + fetchParams.controller.connection.destroy() } -} -/** - * Converts a certification request's attributes to an ASN.1 set of - * CRIAttributes. - * - * @param csr certification request. - * - * @return the ASN.1 set of CRIAttributes. - */ -function _CRIAttributesToAsn1(csr) { - // create an empty context-specific container - var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, []); + // 20. Return response. + return response - // no attributes, return empty container - if(csr.attributes.length === 0) { - return rval; - } + async function dispatch ({ body }) { + const url = requestCurrentURL(request) + /** @type {import('../..').Agent} */ + const agent = fetchParams.controller.dispatcher - // each attribute has a sequence with a type and a set of values - var attrs = csr.attributes; - for(var i = 0; i < attrs.length; ++i) { - var attr = attrs[i]; - var value = attr.value; + return new Promise((resolve, reject) => agent.dispatch( + { + path: url.pathname + url.search, + origin: url.origin, + method: request.method, + body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body, + headers: request.headersList.entries, + maxRedirections: 0, + upgrade: request.mode === 'websocket' ? 'websocket' : undefined + }, + { + body: null, + abort: null, - // reuse tag class for attribute value if available - var valueTagClass = asn1.Type.UTF8; - if('valueTagClass' in attr) { - valueTagClass = attr.valueTagClass; - } - if(valueTagClass === asn1.Type.UTF8) { - value = forge.util.encodeUtf8(value); - } - var valueConstructed = false; - if('valueConstructed' in attr) { - valueConstructed = attr.valueConstructed; - } - // FIXME: handle more encodings + onConnect (abort) { + // TODO (fix): Do we need connection here? + const { connection } = fetchParams.controller - // create a RelativeDistinguishedName set - // each value in the set is an AttributeTypeAndValue first - // containing the type (an OID) and second the value - var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // AttributeType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(attr.type).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ - // AttributeValue - asn1.create( - asn1.Class.UNIVERSAL, valueTagClass, valueConstructed, value) - ]) - ]); - rval.value.push(seq); - } + if (connection.destroyed) { + abort(new DOMException('The operation was aborted.', 'AbortError')) + } else { + fetchParams.controller.on('terminated', abort) + this.abort = connection.abort = abort + } + }, - return rval; -} + onHeaders (status, headersList, resume, statusText) { + if (status < 200) { + return + } -var jan_1_1950 = new Date('1950-01-01T00:00:00Z'); -var jan_1_2050 = new Date('2050-01-01T00:00:00Z'); + let codings = [] + let location = '' + + const headers = new Headers() + + // For H2, the headers are a plain JS object + // We distinguish between them and iterate accordingly + if (Array.isArray(headersList)) { + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()) + } else if (key.toLowerCase() === 'location') { + location = val + } -/** - * Converts a Date object to ASN.1 - * Handles the different format before and after 1st January 2050 - * - * @param date date object. - * - * @return the ASN.1 object representing the date. - */ -function _dateToAsn1(date) { - if(date >= jan_1_1950 && date < jan_1_2050) { - return asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false, - asn1.dateToUtcTime(date)); - } else { - return asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false, - asn1.dateToGeneralizedTime(date)); - } -} + headers[kHeadersList].append(key, val) + } + } else { + const keys = Object.keys(headersList) + for (const key of keys) { + const val = headersList[key] + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse() + } else if (key.toLowerCase() === 'location') { + location = val + } -/** - * Gets the ASN.1 TBSCertificate part of an X.509v3 certificate. - * - * @param cert the certificate. - * - * @return the asn1 TBSCertificate. - */ -pki.getTBSCertificate = function(cert) { - // TBSCertificate - var notBefore = _dateToAsn1(cert.validity.notBefore); - var notAfter = _dateToAsn1(cert.validity.notAfter); - var tbs = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - // integer - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(cert.version).getBytes()) - ]), - // serialNumber - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - forge.util.hexToBytes(cert.serialNumber)), - // signature - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(cert.siginfo.algorithmOid).getBytes()), - // parameters - _signatureParametersToAsn1( - cert.siginfo.algorithmOid, cert.siginfo.parameters) - ]), - // issuer - _dnToAsn1(cert.issuer), - // validity - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - notBefore, - notAfter - ]), - // subject - _dnToAsn1(cert.subject), - // SubjectPublicKeyInfo - pki.publicKeyToAsn1(cert.publicKey) - ]); + headers[kHeadersList].append(key, val) + } + } - if(cert.issuer.uniqueId) { - // issuerUniqueID (optional) - tbs.value.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, - // TODO: support arbitrary bit length ids - String.fromCharCode(0x00) + - cert.issuer.uniqueId - ) - ]) - ); - } - if(cert.subject.uniqueId) { - // subjectUniqueID (optional) - tbs.value.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, - // TODO: support arbitrary bit length ids - String.fromCharCode(0x00) + - cert.subject.uniqueId - ) - ]) - ); - } + this.body = new Readable({ read: resume }) + + const decoders = [] + + const willFollow = request.redirect === 'follow' && + location && + redirectStatusSet.has(status) + + // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding + if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) { + for (const coding of codings) { + // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2 + if (coding === 'x-gzip' || coding === 'gzip') { + decoders.push(zlib.createGunzip({ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH + })) + } else if (coding === 'deflate') { + decoders.push(zlib.createInflate()) + } else if (coding === 'br') { + decoders.push(zlib.createBrotliDecompress()) + } else { + decoders.length = 0 + break + } + } + } - if(cert.extensions.length > 0) { - // extensions (optional) - tbs.value.push(pki.certificateExtensionsToAsn1(cert.extensions)); - } + resolve({ + status, + statusText, + headersList: headers[kHeadersList], + body: decoders.length + ? pipeline(this.body, ...decoders, () => { }) + : this.body.on('error', () => {}) + }) - return tbs; -}; + return true + }, -/** - * Gets the ASN.1 CertificationRequestInfo part of a - * PKCS#10 CertificationRequest. - * - * @param csr the certification request. - * - * @return the asn1 CertificationRequestInfo. - */ -pki.getCertificationRequestInfo = function(csr) { - // CertificationRequestInfo - var cri = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(csr.version).getBytes()), - // subject - _dnToAsn1(csr.subject), - // SubjectPublicKeyInfo - pki.publicKeyToAsn1(csr.publicKey), - // attributes - _CRIAttributesToAsn1(csr) - ]); + onData (chunk) { + if (fetchParams.controller.dump) { + return + } - return cri; -}; + // 1. If one or more bytes have been transmitted from response’s + // message body, then: -/** - * Converts a DistinguishedName (subject or issuer) to an ASN.1 object. - * - * @param dn the DistinguishedName. - * - * @return the asn1 representation of a DistinguishedName. - */ -pki.distinguishedNameToAsn1 = function(dn) { - return _dnToAsn1(dn); -}; + // 1. Let bytes be the transmitted bytes. + const bytes = chunk -/** - * Converts an X.509v3 RSA certificate to an ASN.1 object. - * - * @param cert the certificate. - * - * @return the asn1 representation of an X.509v3 RSA certificate. - */ -pki.certificateToAsn1 = function(cert) { - // prefer cached TBSCertificate over generating one - var tbsCertificate = cert.tbsCertificate || pki.getTBSCertificate(cert); - - // Certificate - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // TBSCertificate - tbsCertificate, - // AlgorithmIdentifier (signature algorithm) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(cert.signatureOid).getBytes()), - // parameters - _signatureParametersToAsn1(cert.signatureOid, cert.signatureParameters) - ]), - // SignatureValue - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, - String.fromCharCode(0x00) + cert.signature) - ]); -}; + // 2. Let codings be the result of extracting header list values + // given `Content-Encoding` and response’s header list. + // See pullAlgorithm. -/** - * Converts X.509v3 certificate extensions to ASN.1. - * - * @param exts the extensions to convert. - * - * @return the extensions in ASN.1 format. - */ -pki.certificateExtensionsToAsn1 = function(exts) { - // create top-level extension container - var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 3, true, []); + // 3. Increase timingInfo’s encoded body size by bytes’s length. + timingInfo.encodedBodySize += bytes.byteLength - // create extension sequence (stores a sequence for each extension) - var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - rval.value.push(seq); + // 4. See pullAlgorithm... - for(var i = 0; i < exts.length; ++i) { - seq.value.push(pki.certificateExtensionToAsn1(exts[i])); - } + return this.body.push(bytes) + }, - return rval; -}; + onComplete () { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } -/** - * Converts a single certificate extension to ASN.1. - * - * @param ext the extension to convert. - * - * @return the extension in ASN.1 format. - */ -pki.certificateExtensionToAsn1 = function(ext) { - // create a sequence for each extension - var extseq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + fetchParams.controller.ended = true - // extnID (OID) - extseq.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(ext.id).getBytes())); + this.body.push(null) + }, - // critical defaults to false - if(ext.critical) { - // critical BOOLEAN DEFAULT FALSE - extseq.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false, - String.fromCharCode(0xFF))); - } + onError (error) { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } - var value = ext.value; - if(typeof ext.value !== 'string') { - // value is asn.1 - value = asn1.toDer(value).getBytes(); - } + this.body?.destroy(error) - // extnValue (OCTET STRING) - extseq.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, value)); + fetchParams.controller.terminate(error) - return extseq; -}; + reject(error) + }, -/** - * Converts a PKCS#10 certification request to an ASN.1 object. - * - * @param csr the certification request. - * - * @return the asn1 representation of a certification request. - */ -pki.certificationRequestToAsn1 = function(csr) { - // prefer cached CertificationRequestInfo over generating one - var cri = csr.certificationRequestInfo || - pki.getCertificationRequestInfo(csr); - - // Certificate - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // CertificationRequestInfo - cri, - // AlgorithmIdentifier (signature algorithm) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(csr.signatureOid).getBytes()), - // parameters - _signatureParametersToAsn1(csr.signatureOid, csr.signatureParameters) - ]), - // signature - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, - String.fromCharCode(0x00) + csr.signature) - ]); -}; + onUpgrade (status, headersList, socket) { + if (status !== 101) { + return + } -/** - * Creates a CA store. - * - * @param certs an optional array of certificate objects or PEM-formatted - * certificate strings to add to the CA store. - * - * @return the CA store. - */ -pki.createCaStore = function(certs) { - // create CA store - var caStore = { - // stored certificates - certs: {} - }; + const headers = new Headers() - /** - * Gets the certificate that issued the passed certificate or its - * 'parent'. - * - * @param cert the certificate to get the parent for. - * - * @return the parent certificate or null if none was found. - */ - caStore.getIssuer = function(cert) { - var rval = getBySubject(cert.issuer); - - // see if there are multiple matches - /*if(forge.util.isArray(rval)) { - // TODO: resolve multiple matches by checking - // authorityKey/subjectKey/issuerUniqueID/other identifiers, etc. - // FIXME: or alternatively do authority key mapping - // if possible (X.509v1 certs can't work?) - throw new Error('Resolving multiple issuer matches not implemented yet.'); - }*/ - - return rval; - }; + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') - /** - * Adds a trusted certificate to the store. - * - * @param cert the certificate to add as a trusted certificate (either a - * pki.certificate object or a PEM-formatted certificate). - */ - caStore.addCertificate = function(cert) { - // convert from pem if necessary - if(typeof cert === 'string') { - cert = forge.pki.certificateFromPem(cert); - } + headers[kHeadersList].append(key, val) + } - ensureSubjectHasHash(cert.subject); + resolve({ + status, + statusText: STATUS_CODES[status], + headersList: headers[kHeadersList], + socket + }) - if(!caStore.hasCertificate(cert)) { // avoid duplicate certificates in store - if(cert.subject.hash in caStore.certs) { - // subject hash already exists, append to array - var tmp = caStore.certs[cert.subject.hash]; - if(!forge.util.isArray(tmp)) { - tmp = [tmp]; + return true } - tmp.push(cert); - caStore.certs[cert.subject.hash] = tmp; - } else { - caStore.certs[cert.subject.hash] = cert; } - } - }; + )) + } +} - /** - * Checks to see if the given certificate is in the store. - * - * @param cert the certificate to check (either a pki.certificate or a - * PEM-formatted certificate). - * - * @return true if the certificate is in the store, false if not. - */ - caStore.hasCertificate = function(cert) { - // convert from pem if necessary - if(typeof cert === 'string') { - cert = forge.pki.certificateFromPem(cert); - } +module.exports = { + fetch, + Fetch, + fetching, + finalizeAndReportTiming +} - var match = getBySubject(cert.subject); - if(!match) { - return false; + +/***/ }), + +/***/ 8359: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/* globals AbortController */ + + + +const { extractBody, mixinBody, cloneBody } = __nccwpck_require__(1472) +const { Headers, fill: fillHeaders, HeadersList } = __nccwpck_require__(554) +const { FinalizationRegistry } = __nccwpck_require__(6436)() +const util = __nccwpck_require__(3983) +const { + isValidHTTPToken, + sameOrigin, + normalizeMethod, + makePolicyContainer, + normalizeMethodRecord +} = __nccwpck_require__(2538) +const { + forbiddenMethodsSet, + corsSafeListedMethodsSet, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + requestDuplex +} = __nccwpck_require__(1037) +const { kEnumerableProperty } = util +const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(5861) +const { webidl } = __nccwpck_require__(1744) +const { getGlobalOrigin } = __nccwpck_require__(1246) +const { URLSerializer } = __nccwpck_require__(685) +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) +const assert = __nccwpck_require__(9491) +const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(2361) + +let TransformStream = globalThis.TransformStream + +const kAbortController = Symbol('abortController') + +const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { + signal.removeEventListener('abort', abort) +}) + +// https://fetch.spec.whatwg.org/#request-class +class Request { + // https://fetch.spec.whatwg.org/#dom-request + constructor (input, init = {}) { + if (input === kConstruct) { + return } - if(!forge.util.isArray(match)) { - match = [match]; + + webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' }) + + input = webidl.converters.RequestInfo(input) + init = webidl.converters.RequestInit(init) + + // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object + this[kRealm] = { + settingsObject: { + baseUrl: getGlobalOrigin(), + get origin () { + return this.baseUrl?.origin + }, + policyContainer: makePolicyContainer() + } } - // compare DER-encoding of certificates - var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes(); - for(var i = 0; i < match.length; ++i) { - var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes(); - if(der1 === der2) { - return true; + + // 1. Let request be null. + let request = null + + // 2. Let fallbackMode be null. + let fallbackMode = null + + // 3. Let baseURL be this’s relevant settings object’s API base URL. + const baseUrl = this[kRealm].settingsObject.baseUrl + + // 4. Let signal be null. + let signal = null + + // 5. If input is a string, then: + if (typeof input === 'string') { + // 1. Let parsedURL be the result of parsing input with baseURL. + // 2. If parsedURL is failure, then throw a TypeError. + let parsedURL + try { + parsedURL = new URL(input, baseUrl) + } catch (err) { + throw new TypeError('Failed to parse URL from ' + input, { cause: err }) } + + // 3. If parsedURL includes credentials, then throw a TypeError. + if (parsedURL.username || parsedURL.password) { + throw new TypeError( + 'Request cannot be constructed from a URL that includes credentials: ' + + input + ) + } + + // 4. Set request to a new request whose URL is parsedURL. + request = makeRequest({ urlList: [parsedURL] }) + + // 5. Set fallbackMode to "cors". + fallbackMode = 'cors' + } else { + // 6. Otherwise: + + // 7. Assert: input is a Request object. + assert(input instanceof Request) + + // 8. Set request to input’s request. + request = input[kState] + + // 9. Set signal to input’s signal. + signal = input[kSignal] } - return false; - }; - /** - * Lists all of the certificates kept in the store. - * - * @return an array of all of the pki.certificate objects in the store. - */ - caStore.listAllCertificates = function() { - var certList = []; - - for(var hash in caStore.certs) { - if(caStore.certs.hasOwnProperty(hash)) { - var value = caStore.certs[hash]; - if(!forge.util.isArray(value)) { - certList.push(value); - } else { - for(var i = 0; i < value.length; ++i) { - certList.push(value[i]); - } - } + // 7. Let origin be this’s relevant settings object’s origin. + const origin = this[kRealm].settingsObject.origin + + // 8. Let window be "client". + let window = 'client' + + // 9. If request’s window is an environment settings object and its origin + // is same origin with origin, then set window to request’s window. + if ( + request.window?.constructor?.name === 'EnvironmentSettingsObject' && + sameOrigin(request.window, origin) + ) { + window = request.window + } + + // 10. If init["window"] exists and is non-null, then throw a TypeError. + if (init.window != null) { + throw new TypeError(`'window' option '${window}' must be null`) + } + + // 11. If init["window"] exists, then set window to "no-window". + if ('window' in init) { + window = 'no-window' + } + + // 12. Set request to a new request with the following properties: + request = makeRequest({ + // URL request’s URL. + // undici implementation note: this is set as the first item in request's urlList in makeRequest + // method request’s method. + method: request.method, + // header list A copy of request’s header list. + // undici implementation note: headersList is cloned in makeRequest + headersList: request.headersList, + // unsafe-request flag Set. + unsafeRequest: request.unsafeRequest, + // client This’s relevant settings object. + client: this[kRealm].settingsObject, + // window window. + window, + // priority request’s priority. + priority: request.priority, + // origin request’s origin. The propagation of the origin is only significant for navigation requests + // being handled by a service worker. In this scenario a request can have an origin that is different + // from the current client. + origin: request.origin, + // referrer request’s referrer. + referrer: request.referrer, + // referrer policy request’s referrer policy. + referrerPolicy: request.referrerPolicy, + // mode request’s mode. + mode: request.mode, + // credentials mode request’s credentials mode. + credentials: request.credentials, + // cache mode request’s cache mode. + cache: request.cache, + // redirect mode request’s redirect mode. + redirect: request.redirect, + // integrity metadata request’s integrity metadata. + integrity: request.integrity, + // keepalive request’s keepalive. + keepalive: request.keepalive, + // reload-navigation flag request’s reload-navigation flag. + reloadNavigation: request.reloadNavigation, + // history-navigation flag request’s history-navigation flag. + historyNavigation: request.historyNavigation, + // URL list A clone of request’s URL list. + urlList: [...request.urlList] + }) + + const initHasKey = Object.keys(init).length !== 0 + + // 13. If init is not empty, then: + if (initHasKey) { + // 1. If request’s mode is "navigate", then set it to "same-origin". + if (request.mode === 'navigate') { + request.mode = 'same-origin' } + + // 2. Unset request’s reload-navigation flag. + request.reloadNavigation = false + + // 3. Unset request’s history-navigation flag. + request.historyNavigation = false + + // 4. Set request’s origin to "client". + request.origin = 'client' + + // 5. Set request’s referrer to "client" + request.referrer = 'client' + + // 6. Set request’s referrer policy to the empty string. + request.referrerPolicy = '' + + // 7. Set request’s URL to request’s current URL. + request.url = request.urlList[request.urlList.length - 1] + + // 8. Set request’s URL list to « request’s URL ». + request.urlList = [request.url] } - return certList; - }; + // 14. If init["referrer"] exists, then: + if (init.referrer !== undefined) { + // 1. Let referrer be init["referrer"]. + const referrer = init.referrer - /** - * Removes a certificate from the store. - * - * @param cert the certificate to remove (either a pki.certificate or a - * PEM-formatted certificate). - * - * @return the certificate that was removed or null if the certificate - * wasn't in store. - */ - caStore.removeCertificate = function(cert) { - var result; + // 2. If referrer is the empty string, then set request’s referrer to "no-referrer". + if (referrer === '') { + request.referrer = 'no-referrer' + } else { + // 1. Let parsedReferrer be the result of parsing referrer with + // baseURL. + // 2. If parsedReferrer is failure, then throw a TypeError. + let parsedReferrer + try { + parsedReferrer = new URL(referrer, baseUrl) + } catch (err) { + throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }) + } - // convert from pem if necessary - if(typeof cert === 'string') { - cert = forge.pki.certificateFromPem(cert); + // 3. If one of the following is true + // - parsedReferrer’s scheme is "about" and path is the string "client" + // - parsedReferrer’s origin is not same origin with origin + // then set request’s referrer to "client". + if ( + (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') || + (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl)) + ) { + request.referrer = 'client' + } else { + // 4. Otherwise, set request’s referrer to parsedReferrer. + request.referrer = parsedReferrer + } + } } - ensureSubjectHasHash(cert.subject); - if(!caStore.hasCertificate(cert)) { - return null; + + // 15. If init["referrerPolicy"] exists, then set request’s referrer policy + // to it. + if (init.referrerPolicy !== undefined) { + request.referrerPolicy = init.referrerPolicy } - var match = getBySubject(cert.subject); + // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise. + let mode + if (init.mode !== undefined) { + mode = init.mode + } else { + mode = fallbackMode + } - if(!forge.util.isArray(match)) { - result = caStore.certs[cert.subject.hash]; - delete caStore.certs[cert.subject.hash]; - return result; + // 17. If mode is "navigate", then throw a TypeError. + if (mode === 'navigate') { + throw webidl.errors.exception({ + header: 'Request constructor', + message: 'invalid request mode navigate.' + }) } - // compare DER-encoding of certificates - var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes(); - for(var i = 0; i < match.length; ++i) { - var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes(); - if(der1 === der2) { - result = match[i]; - match.splice(i, 1); - } + // 18. If mode is non-null, set request’s mode to mode. + if (mode != null) { + request.mode = mode } - if(match.length === 0) { - delete caStore.certs[cert.subject.hash]; + + // 19. If init["credentials"] exists, then set request’s credentials mode + // to it. + if (init.credentials !== undefined) { + request.credentials = init.credentials } - return result; - }; + // 18. If init["cache"] exists, then set request’s cache mode to it. + if (init.cache !== undefined) { + request.cache = init.cache + } - function getBySubject(subject) { - ensureSubjectHasHash(subject); - return caStore.certs[subject.hash] || null; - } + // 21. If request’s cache mode is "only-if-cached" and request’s mode is + // not "same-origin", then throw a TypeError. + if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') { + throw new TypeError( + "'only-if-cached' can be set only with 'same-origin' mode" + ) + } - function ensureSubjectHasHash(subject) { - // produce subject hash if it doesn't exist - if(!subject.hash) { - var md = forge.md.sha1.create(); - subject.attributes = pki.RDNAttributesAsArray(_dnToAsn1(subject), md); - subject.hash = md.digest().toHex(); + // 22. If init["redirect"] exists, then set request’s redirect mode to it. + if (init.redirect !== undefined) { + request.redirect = init.redirect } - } - // auto-add passed in certs - if(certs) { - // parse PEM-formatted certificates as necessary - for(var i = 0; i < certs.length; ++i) { - var cert = certs[i]; - caStore.addCertificate(cert); + // 23. If init["integrity"] exists, then set request’s integrity metadata to it. + if (init.integrity != null) { + request.integrity = String(init.integrity) } - } - return caStore; -}; + // 24. If init["keepalive"] exists, then set request’s keepalive to it. + if (init.keepalive !== undefined) { + request.keepalive = Boolean(init.keepalive) + } -/** - * Certificate verification errors, based on TLS. - */ -pki.certificateError = { - bad_certificate: 'forge.pki.BadCertificate', - unsupported_certificate: 'forge.pki.UnsupportedCertificate', - certificate_revoked: 'forge.pki.CertificateRevoked', - certificate_expired: 'forge.pki.CertificateExpired', - certificate_unknown: 'forge.pki.CertificateUnknown', - unknown_ca: 'forge.pki.UnknownCertificateAuthority' -}; + // 25. If init["method"] exists, then: + if (init.method !== undefined) { + // 1. Let method be init["method"]. + let method = init.method -/** - * Verifies a certificate chain against the given Certificate Authority store - * with an optional custom verify callback. - * - * @param caStore a certificate store to verify against. - * @param chain the certificate chain to verify, with the root or highest - * authority at the end (an array of certificates). - * @param options a callback to be called for every certificate in the chain or - * an object with: - * verify a callback to be called for every certificate in the - * chain - * validityCheckDate the date against which the certificate - * validity period should be checked. Pass null to not check - * the validity period. By default, the current date is used. - * - * The verify callback has the following signature: - * - * verified - Set to true if certificate was verified, otherwise the - * pki.certificateError for why the certificate failed. - * depth - The current index in the chain, where 0 is the end point's cert. - * certs - The certificate chain, *NOTE* an empty chain indicates an anonymous - * end point. - * - * The function returns true on success and on failure either the appropriate - * pki.certificateError or an object with 'error' set to the appropriate - * pki.certificateError and 'message' set to a custom error message. - * - * @return true if successful, error thrown if not. - */ -pki.verifyCertificateChain = function(caStore, chain, options) { - /* From: RFC3280 - Internet X.509 Public Key Infrastructure Certificate - Section 6: Certification Path Validation - See inline parentheticals related to this particular implementation. - - The primary goal of path validation is to verify the binding between - a subject distinguished name or a subject alternative name and subject - public key, as represented in the end entity certificate, based on the - public key of the trust anchor. This requires obtaining a sequence of - certificates that support that binding. That sequence should be provided - in the passed 'chain'. The trust anchor should be in the given CA - store. The 'end entity' certificate is the certificate provided by the - end point (typically a server) and is the first in the chain. - - To meet this goal, the path validation process verifies, among other - things, that a prospective certification path (a sequence of n - certificates or a 'chain') satisfies the following conditions: - - (a) for all x in {1, ..., n-1}, the subject of certificate x is - the issuer of certificate x+1; - - (b) certificate 1 is issued by the trust anchor; - - (c) certificate n is the certificate to be validated; and - - (d) for all x in {1, ..., n}, the certificate was valid at the - time in question. - - Note that here 'n' is index 0 in the chain and 1 is the last certificate - in the chain and it must be signed by a certificate in the connection's - CA store. - - The path validation process also determines the set of certificate - policies that are valid for this path, based on the certificate policies - extension, policy mapping extension, policy constraints extension, and - inhibit any-policy extension. - - Note: Policy mapping extension not supported (Not Required). - - Note: If the certificate has an unsupported critical extension, then it - must be rejected. - - Note: A certificate is self-issued if the DNs that appear in the subject - and issuer fields are identical and are not empty. - - The path validation algorithm assumes the following seven inputs are - provided to the path processing logic. What this specific implementation - will use is provided parenthetically: - - (a) a prospective certification path of length n (the 'chain') - (b) the current date/time: ('now'). - (c) user-initial-policy-set: A set of certificate policy identifiers - naming the policies that are acceptable to the certificate user. - The user-initial-policy-set contains the special value any-policy - if the user is not concerned about certificate policy - (Not implemented. Any policy is accepted). - (d) trust anchor information, describing a CA that serves as a trust - anchor for the certification path. The trust anchor information - includes: - - (1) the trusted issuer name, - (2) the trusted public key algorithm, - (3) the trusted public key, and - (4) optionally, the trusted public key parameters associated - with the public key. - - (Trust anchors are provided via certificates in the CA store). - - The trust anchor information may be provided to the path processing - procedure in the form of a self-signed certificate. The trusted anchor - information is trusted because it was delivered to the path processing - procedure by some trustworthy out-of-band procedure. If the trusted - public key algorithm requires parameters, then the parameters are - provided along with the trusted public key (No parameters used in this - implementation). - - (e) initial-policy-mapping-inhibit, which indicates if policy mapping is - allowed in the certification path. - (Not implemented, no policy checking) - - (f) initial-explicit-policy, which indicates if the path must be valid - for at least one of the certificate policies in the user-initial- - policy-set. - (Not implemented, no policy checking) - - (g) initial-any-policy-inhibit, which indicates whether the - anyPolicy OID should be processed if it is included in a - certificate. - (Not implemented, so any policy is valid provided that it is - not marked as critical) */ - - /* Basic Path Processing: - - For each certificate in the 'chain', the following is checked: - - 1. The certificate validity period includes the current time. - 2. The certificate was signed by its parent (where the parent is either - the next in the chain or from the CA store). Allow processing to - continue to the next step if no parent is found but the certificate is - in the CA store. - 3. TODO: The certificate has not been revoked. - 4. The certificate issuer name matches the parent's subject name. - 5. TODO: If the certificate is self-issued and not the final certificate - in the chain, skip this step, otherwise verify that the subject name - is within one of the permitted subtrees of X.500 distinguished names - and that each of the alternative names in the subjectAltName extension - (critical or non-critical) is within one of the permitted subtrees for - that name type. - 6. TODO: If the certificate is self-issued and not the final certificate - in the chain, skip this step, otherwise verify that the subject name - is not within one of the excluded subtrees for X.500 distinguished - names and none of the subjectAltName extension names are excluded for - that name type. - 7. The other steps in the algorithm for basic path processing involve - handling the policy extension which is not presently supported in this - implementation. Instead, if a critical policy extension is found, the - certificate is rejected as not supported. - 8. If the certificate is not the first or if its the only certificate in - the chain (having no parent from the CA store or is self-signed) and it - has a critical key usage extension, verify that the keyCertSign bit is - set. If the key usage extension exists, verify that the basic - constraints extension exists. If the basic constraints extension exists, - verify that the cA flag is set. If pathLenConstraint is set, ensure that - the number of certificates that precede in the chain (come earlier - in the chain as implemented below), excluding the very first in the - chain (typically the end-entity one), isn't greater than the - pathLenConstraint. This constraint limits the number of intermediate - CAs that may appear below a CA before only end-entity certificates - may be issued. */ - - // if a verify callback is passed as the third parameter, package it within - // the options object. This is to support a legacy function signature that - // expected the verify callback as the third parameter. - if(typeof options === 'function') { - options = {verify: options}; - } - options = options || {}; + // 2. If method is not a method or method is a forbidden method, then + // throw a TypeError. + if (!isValidHTTPToken(method)) { + throw new TypeError(`'${method}' is not a valid HTTP method.`) + } - // copy cert chain references to another array to protect against changes - // in verify callback - chain = chain.slice(0); - var certs = chain.slice(0); - - var validityCheckDate = options.validityCheckDate; - // if no validityCheckDate is specified, default to the current date. Make - // sure to maintain the value null because it indicates that the validity - // period should not be checked. - if(typeof validityCheckDate === 'undefined') { - validityCheckDate = new Date(); - } - - // verify each cert in the chain using its parent, where the parent - // is either the next in the chain or from the CA store - var first = true; - var error = null; - var depth = 0; - do { - var cert = chain.shift(); - var parent = null; - var selfSigned = false; - - if(validityCheckDate) { - // 1. check valid time - if(validityCheckDate < cert.validity.notBefore || - validityCheckDate > cert.validity.notAfter) { - error = { - message: 'Certificate is not valid yet or has expired.', - error: pki.certificateError.certificate_expired, - notBefore: cert.validity.notBefore, - notAfter: cert.validity.notAfter, - // TODO: we might want to reconsider renaming 'now' to - // 'validityCheckDate' should this API be changed in the future. - now: validityCheckDate - }; + if (forbiddenMethodsSet.has(method.toUpperCase())) { + throw new TypeError(`'${method}' HTTP method is unsupported.`) } + + // 3. Normalize method. + method = normalizeMethodRecord[method] ?? normalizeMethod(method) + + // 4. Set request’s method to method. + request.method = method } - // 2. verify with parent from chain or CA store - if(error === null) { - parent = chain[0] || caStore.getIssuer(cert); - if(parent === null) { - // check for self-signed cert - if(cert.isIssuer(cert)) { - selfSigned = true; - parent = cert; - } + // 26. If init["signal"] exists, then set signal to it. + if (init.signal !== undefined) { + signal = init.signal + } + + // 27. Set this’s request to request. + this[kState] = request + + // 28. Set this’s signal to a new AbortSignal object with this’s relevant + // Realm. + // TODO: could this be simplified with AbortSignal.any + // (https://dom.spec.whatwg.org/#dom-abortsignal-any) + const ac = new AbortController() + this[kSignal] = ac.signal + this[kSignal][kRealm] = this[kRealm] + + // 29. If signal is not null, then make this’s signal follow signal. + if (signal != null) { + if ( + !signal || + typeof signal.aborted !== 'boolean' || + typeof signal.addEventListener !== 'function' + ) { + throw new TypeError( + "Failed to construct 'Request': member signal is not of type AbortSignal." + ) } - if(parent) { - // FIXME: current CA store implementation might have multiple - // certificates where the issuer can't be determined from the - // certificate (happens rarely with, eg: old certificates) so normalize - // by always putting parents into an array - // TODO: there's may be an extreme degenerate case currently uncovered - // where an old intermediate certificate seems to have a matching parent - // but none of the parents actually verify ... but the intermediate - // is in the CA and it should pass this check; needs investigation - var parents = parent; - if(!forge.util.isArray(parents)) { - parents = [parents]; - } - - // try to verify with each possible parent (typically only one) - var verified = false; - while(!verified && parents.length > 0) { - parent = parents.shift(); - try { - verified = parent.verify(cert); - } catch(ex) { - // failure to verify, don't care why, try next one + if (signal.aborted) { + ac.abort(signal.reason) + } else { + // Keep a strong ref to ac while request object + // is alive. This is needed to prevent AbortController + // from being prematurely garbage collected. + // See, https://github.com/nodejs/undici/issues/1926. + this[kAbortController] = ac + + const acRef = new WeakRef(ac) + const abort = function () { + const ac = acRef.deref() + if (ac !== undefined) { + ac.abort(this.reason) } } - if(!verified) { - error = { - message: 'Certificate signature is invalid.', - error: pki.certificateError.bad_certificate - }; - } - } + // Third-party AbortControllers may not work with these. + // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619. + try { + // If the max amount of listeners is equal to the default, increase it + // This is only available in node >= v19.9.0 + if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) { + setMaxListeners(100, signal) + } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) { + setMaxListeners(100, signal) + } + } catch {} - if(error === null && (!parent || selfSigned) && - !caStore.hasCertificate(cert)) { - // no parent issuer and certificate itself is not trusted - error = { - message: 'Certificate is not trusted.', - error: pki.certificateError.unknown_ca - }; + util.addAbortListener(signal, abort) + requestFinalizer.register(ac, { signal, abort }) } } - // TODO: 3. check revoked + // 30. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is request’s header list and guard is + // "request". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kHeadersList] = request.headersList + this[kHeaders][kGuard] = 'request' + this[kHeaders][kRealm] = this[kRealm] - // 4. check for matching issuer/subject - if(error === null && parent && !cert.isIssuer(parent)) { - // parent is not issuer - error = { - message: 'Certificate issuer is invalid.', - error: pki.certificateError.bad_certificate - }; + // 31. If this’s request’s mode is "no-cors", then: + if (mode === 'no-cors') { + // 1. If this’s request’s method is not a CORS-safelisted method, + // then throw a TypeError. + if (!corsSafeListedMethodsSet.has(request.method)) { + throw new TypeError( + `'${request.method} is unsupported in no-cors mode.` + ) + } + + // 2. Set this’s headers’s guard to "request-no-cors". + this[kHeaders][kGuard] = 'request-no-cors' } - // 5. TODO: check names with permitted names tree + // 32. If init is not empty, then: + if (initHasKey) { + /** @type {HeadersList} */ + const headersList = this[kHeaders][kHeadersList] + // 1. Let headers be a copy of this’s headers and its associated header + // list. + // 2. If init["headers"] exists, then set headers to init["headers"]. + const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList) - // 6. TODO: check names against excluded names tree + // 3. Empty this’s headers’s header list. + headersList.clear() - // 7. check for unsupported critical extensions - if(error === null) { - // supported extensions - var se = { - keyUsage: true, - basicConstraints: true - }; - for(var i = 0; error === null && i < cert.extensions.length; ++i) { - var ext = cert.extensions[i]; - if(ext.critical && !(ext.name in se)) { - error = { - message: - 'Certificate has an unsupported critical extension.', - error: pki.certificateError.unsupported_certificate - }; + // 4. If headers is a Headers object, then for each header in its header + // list, append header’s name/header’s value to this’s headers. + if (headers instanceof HeadersList) { + for (const [key, val] of headers) { + headersList.append(key, val) } + // Note: Copy the `set-cookie` meta-data. + headersList.cookies = headers.cookies + } else { + // 5. Otherwise, fill this’s headers with headers. + fillHeaders(this[kHeaders], headers) } } - // 8. check for CA if cert is not first or is the only certificate - // remaining in chain with no parent or is self-signed - if(error === null && - (!first || (chain.length === 0 && (!parent || selfSigned)))) { - // first check keyUsage extension and then basic constraints - var bcExt = cert.getExtension('basicConstraints'); - var keyUsageExt = cert.getExtension('keyUsage'); - if(keyUsageExt !== null) { - // keyCertSign must be true and there must be a basic - // constraints extension - if(!keyUsageExt.keyCertSign || bcExt === null) { - // bad certificate - error = { - message: - 'Certificate keyUsage or basicConstraints conflict ' + - 'or indicate that the certificate is not a CA. ' + - 'If the certificate is the only one in the chain or ' + - 'isn\'t the first then the certificate must be a ' + - 'valid CA.', - error: pki.certificateError.bad_certificate - }; - } - } - // basic constraints cA flag must be set - if(error === null && bcExt !== null && !bcExt.cA) { - // bad certificate - error = { - message: - 'Certificate basicConstraints indicates the certificate ' + - 'is not a CA.', - error: pki.certificateError.bad_certificate - }; - } - // if error is not null and keyUsage is available, then we know it - // has keyCertSign and there is a basic constraints extension too, - // which means we can check pathLenConstraint (if it exists) - if(error === null && keyUsageExt !== null && - 'pathLenConstraint' in bcExt) { - // pathLen is the maximum # of intermediate CA certs that can be - // found between the current certificate and the end-entity (depth 0) - // certificate; this number does not include the end-entity (depth 0, - // last in the chain) even if it happens to be a CA certificate itself - var pathLen = depth - 1; - if(pathLen > bcExt.pathLenConstraint) { - // pathLenConstraint violated, bad certificate - error = { - message: - 'Certificate basicConstraints pathLenConstraint violated.', - error: pki.certificateError.bad_certificate - }; - } + // 33. Let inputBody be input’s request’s body if input is a Request + // object; otherwise null. + const inputBody = input instanceof Request ? input[kState].body : null + + // 34. If either init["body"] exists and is non-null or inputBody is + // non-null, and request’s method is `GET` or `HEAD`, then throw a + // TypeError. + if ( + (init.body != null || inputBody != null) && + (request.method === 'GET' || request.method === 'HEAD') + ) { + throw new TypeError('Request with GET/HEAD method cannot have body.') + } + + // 35. Let initBody be null. + let initBody = null + + // 36. If init["body"] exists and is non-null, then: + if (init.body != null) { + // 1. Let Content-Type be null. + // 2. Set initBody and Content-Type to the result of extracting + // init["body"], with keepalive set to request’s keepalive. + const [extractedBody, contentType] = extractBody( + init.body, + request.keepalive + ) + initBody = extractedBody + + // 3, If Content-Type is non-null and this’s headers’s header list does + // not contain `Content-Type`, then append `Content-Type`/Content-Type to + // this’s headers. + if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) { + this[kHeaders].append('content-type', contentType) } } - // call application callback - var vfd = (error === null) ? true : error.error; - var ret = options.verify ? options.verify(vfd, depth, certs) : vfd; - if(ret === true) { - // clear any set error - error = null; - } else { - // if passed basic tests, set default message and alert - if(vfd === true) { - error = { - message: 'The application rejected the certificate.', - error: pki.certificateError.bad_certificate - }; + // 37. Let inputOrInitBody be initBody if it is non-null; otherwise + // inputBody. + const inputOrInitBody = initBody ?? inputBody + + // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is + // null, then: + if (inputOrInitBody != null && inputOrInitBody.source == null) { + // 1. If initBody is non-null and init["duplex"] does not exist, + // then throw a TypeError. + if (initBody != null && init.duplex == null) { + throw new TypeError('RequestInit: duplex option is required when sending a body.') } - // check for custom error info - if(ret || ret === 0) { - // set custom message and error - if(typeof ret === 'object' && !forge.util.isArray(ret)) { - if(ret.message) { - error.message = ret.message; - } - if(ret.error) { - error.error = ret.error; - } - } else if(typeof ret === 'string') { - // set custom error - error.error = ret; - } + // 2. If this’s request’s mode is neither "same-origin" nor "cors", + // then throw a TypeError. + if (request.mode !== 'same-origin' && request.mode !== 'cors') { + throw new TypeError( + 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' + ) } - // throw error - throw error; + // 3. Set this’s request’s use-CORS-preflight flag. + request.useCORSPreflightFlag = true } - // no longer first cert in chain - first = false; - ++depth; - } while(chain.length > 0); + // 39. Let finalBody be inputOrInitBody. + let finalBody = inputOrInitBody - return true; -}; + // 40. If initBody is null and inputBody is non-null, then: + if (initBody == null && inputBody != null) { + // 1. If input is unusable, then throw a TypeError. + if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) { + throw new TypeError( + 'Cannot construct a Request with a Request object that has already been used.' + ) + } + // 2. Set finalBody to the result of creating a proxy for inputBody. + if (!TransformStream) { + TransformStream = (__nccwpck_require__(5356).TransformStream) + } -/***/ }), + // https://streams.spec.whatwg.org/#readablestream-create-a-proxy + const identityTransform = new TransformStream() + inputBody.stream.pipeThrough(identityTransform) + finalBody = { + source: inputBody.source, + length: inputBody.length, + stream: identityTransform.readable + } + } -/***/ 1223: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 41. Set this’s request’s body to finalBody. + this[kState].body = finalBody + } -var wrappy = __nccwpck_require__(2940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) + // Returns request’s HTTP method, which is "GET" by default. + get method () { + webidl.brandCheck(this, Request) -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) + // The method getter steps are to return this’s request’s method. + return this[kState].method + } - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) + // Returns the URL of request as a string. + get url () { + webidl.brandCheck(this, Request) -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) + // The url getter steps are to return this’s request’s URL, serialized. + return URLSerializer(this[kState].url) } - f.called = false - return f -} -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) + // Returns a Headers object consisting of the headers associated with request. + // Note that headers added in the network layer by the user agent will not + // be accounted for in this object, e.g., the "Host" header. + get headers () { + webidl.brandCheck(this, Request) + + // The headers getter steps are to return this’s headers. + return this[kHeaders] } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} + // Returns the kind of resource requested by request, e.g., "document" + // or "script". + get destination () { + webidl.brandCheck(this, Request) -/***/ }), + // The destination getter are to return this’s request’s destination. + return this[kState].destination + } -/***/ 7684: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Returns the referrer of request. Its value can be a same-origin URL if + // explicitly set in init, the empty string to indicate no referrer, and + // "about:client" when defaulting to the global’s default. This is used + // during fetching to determine the value of the `Referer` header of the + // request being made. + get referrer () { + webidl.brandCheck(this, Request) -"use strict"; + // 1. If this’s request’s referrer is "no-referrer", then return the + // empty string. + if (this[kState].referrer === 'no-referrer') { + return '' + } -const Queue = __nccwpck_require__(5185); + // 2. If this’s request’s referrer is "client", then return + // "about:client". + if (this[kState].referrer === 'client') { + return 'about:client' + } -const pLimit = concurrency => { - if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) { - throw new TypeError('Expected `concurrency` to be a number from 1 and up'); - } + // Return this’s request’s referrer, serialized. + return this[kState].referrer.toString() + } - const queue = new Queue(); - let activeCount = 0; + // Returns the referrer policy associated with request. + // This is used during fetching to compute the value of the request’s + // referrer. + get referrerPolicy () { + webidl.brandCheck(this, Request) - const next = () => { - activeCount--; + // The referrerPolicy getter steps are to return this’s request’s referrer policy. + return this[kState].referrerPolicy + } - if (queue.size > 0) { - queue.dequeue()(); - } - }; + // Returns the mode associated with request, which is a string indicating + // whether the request will use CORS, or will be restricted to same-origin + // URLs. + get mode () { + webidl.brandCheck(this, Request) - const run = async (fn, resolve, ...args) => { - activeCount++; + // The mode getter steps are to return this’s request’s mode. + return this[kState].mode + } - const result = (async () => fn(...args))(); + // Returns the credentials mode associated with request, + // which is a string indicating whether credentials will be sent with the + // request always, never, or only when sent to a same-origin URL. + get credentials () { + // The credentials getter steps are to return this’s request’s credentials mode. + return this[kState].credentials + } - resolve(result); + // Returns the cache mode associated with request, + // which is a string indicating how the request will + // interact with the browser’s cache when fetching. + get cache () { + webidl.brandCheck(this, Request) - try { - await result; - } catch {} + // The cache getter steps are to return this’s request’s cache mode. + return this[kState].cache + } - next(); - }; + // Returns the redirect mode associated with request, + // which is a string indicating how redirects for the + // request will be handled during fetching. A request + // will follow redirects by default. + get redirect () { + webidl.brandCheck(this, Request) - const enqueue = (fn, resolve, ...args) => { - queue.enqueue(run.bind(null, fn, resolve, ...args)); + // The redirect getter steps are to return this’s request’s redirect mode. + return this[kState].redirect + } - (async () => { - // This function needs to wait until the next microtask before comparing - // `activeCount` to `concurrency`, because `activeCount` is updated asynchronously - // when the run function is dequeued and called. The comparison in the if-statement - // needs to happen asynchronously as well to get an up-to-date value for `activeCount`. - await Promise.resolve(); + // Returns request’s subresource integrity metadata, which is a + // cryptographic hash of the resource being fetched. Its value + // consists of multiple hashes separated by whitespace. [SRI] + get integrity () { + webidl.brandCheck(this, Request) - if (activeCount < concurrency && queue.size > 0) { - queue.dequeue()(); - } - })(); - }; + // The integrity getter steps are to return this’s request’s integrity + // metadata. + return this[kState].integrity + } - const generator = (fn, ...args) => new Promise(resolve => { - enqueue(fn, resolve, ...args); - }); + // Returns a boolean indicating whether or not request can outlive the + // global in which it was created. + get keepalive () { + webidl.brandCheck(this, Request) - Object.defineProperties(generator, { - activeCount: { - get: () => activeCount - }, - pendingCount: { - get: () => queue.size - }, - clearQueue: { - value: () => { - queue.clear(); - } - } - }); + // The keepalive getter steps are to return this’s request’s keepalive. + return this[kState].keepalive + } - return generator; -}; + // Returns a boolean indicating whether or not request is for a reload + // navigation. + get isReloadNavigation () { + webidl.brandCheck(this, Request) -module.exports = pLimit; + // The isReloadNavigation getter steps are to return true if this’s + // request’s reload-navigation flag is set; otherwise false. + return this[kState].reloadNavigation + } + // Returns a boolean indicating whether or not request is for a history + // navigation (a.k.a. back-foward navigation). + get isHistoryNavigation () { + webidl.brandCheck(this, Request) -/***/ }), + // The isHistoryNavigation getter steps are to return true if this’s request’s + // history-navigation flag is set; otherwise false. + return this[kState].historyNavigation + } -/***/ 7214: -/***/ ((module) => { + // Returns the signal associated with request, which is an AbortSignal + // object indicating whether or not request has been aborted, and its + // abort event handler. + get signal () { + webidl.brandCheck(this, Request) -"use strict"; + // The signal getter steps are to return this’s signal. + return this[kSignal] + } + get body () { + webidl.brandCheck(this, Request) -const codes = {}; + return this[kState].body ? this[kState].body.stream : null + } -function createErrorType(code, message, Base) { - if (!Base) { - Base = Error + get bodyUsed () { + webidl.brandCheck(this, Request) + + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) } - function getMessage (arg1, arg2, arg3) { - if (typeof message === 'string') { - return message - } else { - return message(arg1, arg2, arg3) - } + get duplex () { + webidl.brandCheck(this, Request) + + return 'half' } - class NodeError extends Base { - constructor (arg1, arg2, arg3) { - super(getMessage(arg1, arg2, arg3)); + // Returns a clone of request. + clone () { + webidl.brandCheck(this, Request) + + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || this.body?.locked) { + throw new TypeError('unusable') } - } - NodeError.prototype.name = Base.name; - NodeError.prototype.code = code; + // 2. Let clonedRequest be the result of cloning this’s request. + const clonedRequest = cloneRequest(this[kState]) - codes[code] = NodeError; -} + // 3. Let clonedRequestObject be the result of creating a Request object, + // given clonedRequest, this’s headers’s guard, and this’s relevant Realm. + const clonedRequestObject = new Request(kConstruct) + clonedRequestObject[kState] = clonedRequest + clonedRequestObject[kRealm] = this[kRealm] + clonedRequestObject[kHeaders] = new Headers(kConstruct) + clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList + clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] -// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js -function oneOf(expected, thing) { - if (Array.isArray(expected)) { - const len = expected.length; - expected = expected.map((i) => String(i)); - if (len > 2) { - return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + - expected[len - 1]; - } else if (len === 2) { - return `one of ${thing} ${expected[0]} or ${expected[1]}`; + // 4. Make clonedRequestObject’s signal follow this’s signal. + const ac = new AbortController() + if (this.signal.aborted) { + ac.abort(this.signal.reason) } else { - return `of ${thing} ${expected[0]}`; + util.addAbortListener( + this.signal, + () => { + ac.abort(this.signal.reason) + } + ) } - } else { - return `of ${thing} ${String(expected)}`; - } + clonedRequestObject[kSignal] = ac.signal + + // 4. Return clonedRequestObject. + return clonedRequestObject + } +} + +mixinBody(Request) + +function makeRequest (init) { + // https://fetch.spec.whatwg.org/#requests + const request = { + method: 'GET', + localURLsOnly: false, + unsafeRequest: false, + body: null, + client: null, + reservedClient: null, + replacesClientId: '', + window: 'client', + keepalive: false, + serviceWorkers: 'all', + initiator: '', + destination: '', + priority: null, + origin: 'client', + policyContainer: 'client', + referrer: 'client', + referrerPolicy: '', + mode: 'no-cors', + useCORSPreflightFlag: false, + credentials: 'same-origin', + useCredentials: false, + cache: 'default', + redirect: 'follow', + integrity: '', + cryptoGraphicsNonceMetadata: '', + parserMetadata: '', + reloadNavigation: false, + historyNavigation: false, + userActivation: false, + taintedOrigin: false, + redirectCount: 0, + responseTainting: 'basic', + preventNoCacheCacheControlHeaderModification: false, + done: false, + timingAllowFailed: false, + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList() + } + request.url = request.urlList[0] + return request +} + +// https://fetch.spec.whatwg.org/#concept-request-clone +function cloneRequest (request) { + // To clone a request request, run these steps: + + // 1. Let newRequest be a copy of request, except for its body. + const newRequest = makeRequest({ ...request, body: null }) + + // 2. If request’s body is non-null, set newRequest’s body to the + // result of cloning request’s body. + if (request.body != null) { + newRequest.body = cloneBody(request.body) + } + + // 3. Return newRequest. + return newRequest } -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith -function startsWith(str, search, pos) { - return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; -} +Object.defineProperties(Request.prototype, { + method: kEnumerableProperty, + url: kEnumerableProperty, + headers: kEnumerableProperty, + redirect: kEnumerableProperty, + clone: kEnumerableProperty, + signal: kEnumerableProperty, + duplex: kEnumerableProperty, + destination: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + isHistoryNavigation: kEnumerableProperty, + isReloadNavigation: kEnumerableProperty, + keepalive: kEnumerableProperty, + integrity: kEnumerableProperty, + cache: kEnumerableProperty, + credentials: kEnumerableProperty, + attribute: kEnumerableProperty, + referrerPolicy: kEnumerableProperty, + referrer: kEnumerableProperty, + mode: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Request', + configurable: true + } +}) -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith -function endsWith(str, search, this_len) { - if (this_len === undefined || this_len > str.length) { - this_len = str.length; - } - return str.substring(this_len - search.length, this_len) === search; -} +webidl.converters.Request = webidl.interfaceConverter( + Request +) -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes -function includes(str, search, start) { - if (typeof start !== 'number') { - start = 0; +// https://fetch.spec.whatwg.org/#requestinfo +webidl.converters.RequestInfo = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) } - if (start + search.length > str.length) { - return false; - } else { - return str.indexOf(search, start) !== -1; + if (V instanceof Request) { + return webidl.converters.Request(V) } + + return webidl.converters.USVString(V) } -createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { - return 'The value "' + value + '" is invalid for option "' + name + '"' -}, TypeError); -createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { - // determiner: 'must be' or 'must not be' - let determiner; - if (typeof expected === 'string' && startsWith(expected, 'not ')) { - determiner = 'must not be'; - expected = expected.replace(/^not /, ''); - } else { - determiner = 'must be'; - } +webidl.converters.AbortSignal = webidl.interfaceConverter( + AbortSignal +) - let msg; - if (endsWith(name, ' argument')) { - // For cases like 'first argument' - msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; - } else { - const type = includes(name, '.') ? 'property' : 'argument'; - msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; +// https://fetch.spec.whatwg.org/#requestinit +webidl.converters.RequestInit = webidl.dictionaryConverter([ + { + key: 'method', + converter: webidl.converters.ByteString + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + }, + { + key: 'body', + converter: webidl.nullableConverter( + webidl.converters.BodyInit + ) + }, + { + key: 'referrer', + converter: webidl.converters.USVString + }, + { + key: 'referrerPolicy', + converter: webidl.converters.DOMString, + // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy + allowedValues: referrerPolicy + }, + { + key: 'mode', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#concept-request-mode + allowedValues: requestMode + }, + { + key: 'credentials', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcredentials + allowedValues: requestCredentials + }, + { + key: 'cache', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcache + allowedValues: requestCache + }, + { + key: 'redirect', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestredirect + allowedValues: requestRedirect + }, + { + key: 'integrity', + converter: webidl.converters.DOMString + }, + { + key: 'keepalive', + converter: webidl.converters.boolean + }, + { + key: 'signal', + converter: webidl.nullableConverter( + (signal) => webidl.converters.AbortSignal( + signal, + { strict: false } + ) + ) + }, + { + key: 'window', + converter: webidl.converters.any + }, + { + key: 'duplex', + converter: webidl.converters.DOMString, + allowedValues: requestDuplex } +]) - msg += `. Received type ${typeof actual}`; - return msg; -}, TypeError); -createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); -createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { - return 'The ' + name + ' method is not implemented' -}); -createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); -createErrorType('ERR_STREAM_DESTROYED', function (name) { - return 'Cannot call ' + name + ' after a stream was destroyed'; -}); -createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); -createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); -createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); -createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); -createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { - return 'Unknown encoding: ' + arg -}, TypeError); -createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); - -module.exports.q = codes; +module.exports = { Request, makeRequest } /***/ }), -/***/ 1359: +/***/ 7823: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. +const { Headers, HeadersList, fill } = __nccwpck_require__(554) +const { extractBody, cloneBody, mixinBody } = __nccwpck_require__(1472) +const util = __nccwpck_require__(3983) +const { kEnumerableProperty } = util +const { + isValidReasonPhrase, + isCancelled, + isAborted, + isBlobLike, + serializeJavascriptValueToJSONString, + isErrorLike, + isomorphicEncode +} = __nccwpck_require__(2538) +const { + redirectStatusSet, + nullBodyStatus, + DOMException +} = __nccwpck_require__(1037) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) +const { webidl } = __nccwpck_require__(1744) +const { FormData } = __nccwpck_require__(2015) +const { getGlobalOrigin } = __nccwpck_require__(1246) +const { URLSerializer } = __nccwpck_require__(685) +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) +const assert = __nccwpck_require__(9491) +const { types } = __nccwpck_require__(3837) + +const ReadableStream = globalThis.ReadableStream || (__nccwpck_require__(5356).ReadableStream) +const textEncoder = new TextEncoder('utf-8') + +// https://fetch.spec.whatwg.org/#response-class +class Response { + // Creates network error Response. + static error () { + // TODO + const relevantRealm = { settingsObject: {} } + + // The static error() method steps are to return the result of creating a + // Response object, given a new network error, "immutable", and this’s + // relevant Realm. + const responseObject = new Response() + responseObject[kState] = makeNetworkError() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + return responseObject + } + + // https://fetch.spec.whatwg.org/#dom-response-json + static json (data, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' }) + + if (init !== null) { + init = webidl.converters.ResponseInit(init) + } + + // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data. + const bytes = textEncoder.encode( + serializeJavascriptValueToJSONString(data) + ) + // 2. Let body be the result of extracting bytes. + const body = extractBody(bytes) -/**/ -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) keys.push(key); - return keys; -}; -/**/ + // 3. Let responseObject be the result of creating a Response object, given a new response, + // "response", and this’s relevant Realm. + const relevantRealm = { settingsObject: {} } + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'response' + responseObject[kHeaders][kRealm] = relevantRealm -module.exports = Duplex; -const Readable = __nccwpck_require__(1433); -const Writable = __nccwpck_require__(6993); -__nccwpck_require__(4124)(Duplex, Readable); -{ - // Allow the keys array to be GC'ed. - const keys = objectKeys(Writable.prototype); - for (var v = 0; v < keys.length; v++) { - const method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + // 4. Perform initialize a response given responseObject, init, and (body, "application/json"). + initializeResponse(responseObject, init, { body: body[0], type: 'application/json' }) + + // 5. Return responseObject. + return responseObject } -} -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - Readable.call(this, options); - Writable.call(this, options); - this.allowHalfOpen = true; - if (options) { - if (options.readable === false) this.readable = false; - if (options.writable === false) this.writable = false; - if (options.allowHalfOpen === false) { - this.allowHalfOpen = false; - this.once('end', onend); + + // Creates a redirect Response that redirects to url with status status. + static redirect (url, status = 302) { + const relevantRealm = { settingsObject: {} } + + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' }) + + url = webidl.converters.USVString(url) + status = webidl.converters['unsigned short'](status) + + // 1. Let parsedURL be the result of parsing url with current settings + // object’s API base URL. + // 2. If parsedURL is failure, then throw a TypeError. + // TODO: base-URL? + let parsedURL + try { + parsedURL = new URL(url, getGlobalOrigin()) + } catch (err) { + throw Object.assign(new TypeError('Failed to parse URL from ' + url), { + cause: err + }) } - } -} -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - return this._writableState.highWaterMark; - } -}); -Object.defineProperty(Duplex.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); - } -}); -Object.defineProperty(Duplex.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - return this._writableState.length; - } -}); -// the no-half-open enforcer -function onend() { - // If the writable side ended, then we're ok. - if (this._writableState.ended) return; + // 3. If status is not a redirect status, then throw a RangeError. + if (!redirectStatusSet.has(status)) { + throw new RangeError('Invalid status code ' + status) + } - // no more data can be written. - // But allow more writes to happen in this tick. - process.nextTick(onEndNT, this); -} -function onEndNT(self) { - self.end(); -} -Object.defineProperty(Duplex.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - if (this._readableState === undefined || this._writableState === undefined) { - return false; + // 4. Let responseObject be the result of creating a Response object, + // given a new response, "immutable", and this’s relevant Realm. + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + + // 5. Set responseObject’s response’s status to status. + responseObject[kState].status = status + + // 6. Let value be parsedURL, serialized and isomorphic encoded. + const value = isomorphicEncode(URLSerializer(parsedURL)) + + // 7. Append `Location`/value to responseObject’s response’s header list. + responseObject[kState].headersList.append('location', value) + + // 8. Return responseObject. + return responseObject + } + + // https://fetch.spec.whatwg.org/#dom-response + constructor (body = null, init = {}) { + if (body !== null) { + body = webidl.converters.BodyInit(body) } - return this._readableState.destroyed && this._writableState.destroyed; - }, - set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; + + init = webidl.converters.ResponseInit(init) + + // TODO + this[kRealm] = { settingsObject: {} } + + // 1. Set this’s response to a new response. + this[kState] = makeResponse({}) + + // 2. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is this’s response’s header list and guard + // is "response". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kGuard] = 'response' + this[kHeaders][kHeadersList] = this[kState].headersList + this[kHeaders][kRealm] = this[kRealm] + + // 3. Let bodyWithType be null. + let bodyWithType = null + + // 4. If body is non-null, then set bodyWithType to the result of extracting body. + if (body != null) { + const [extractedBody, type] = extractBody(body) + bodyWithType = { body: extractedBody, type } } - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; - this._writableState.destroyed = value; + // 5. Perform initialize a response given this, init, and bodyWithType. + initializeResponse(this, init, bodyWithType) } -}); -/***/ }), + // Returns response’s type, e.g., "cors". + get type () { + webidl.brandCheck(this, Response) -/***/ 1542: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // The type getter steps are to return this’s response’s type. + return this[kState].type + } -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + // Returns response’s URL, if it has one; otherwise the empty string. + get url () { + webidl.brandCheck(this, Response) + + const urlList = this[kState].urlList + + // The url getter steps are to return the empty string if this’s + // response’s URL is null; otherwise this’s response’s URL, + // serialized with exclude fragment set to true. + const url = urlList[urlList.length - 1] ?? null + + if (url === null) { + return '' + } -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. + return URLSerializer(url, true) + } + // Returns whether response was obtained through a redirect. + get redirected () { + webidl.brandCheck(this, Response) + // The redirected getter steps are to return true if this’s response’s URL + // list has more than one item; otherwise false. + return this[kState].urlList.length > 1 + } -module.exports = PassThrough; -const Transform = __nccwpck_require__(4415); -__nccwpck_require__(4124)(PassThrough, Transform); -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - Transform.call(this, options); -} -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; + // Returns response’s status. + get status () { + webidl.brandCheck(this, Response) -/***/ }), + // The status getter steps are to return this’s response’s status. + return this[kState].status + } -/***/ 1433: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Returns whether response’s status is an ok status. + get ok () { + webidl.brandCheck(this, Response) -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + // The ok getter steps are to return true if this’s response’s status is an + // ok status; otherwise false. + return this[kState].status >= 200 && this[kState].status <= 299 + } + // Returns response’s status message. + get statusText () { + webidl.brandCheck(this, Response) + // The statusText getter steps are to return this’s response’s status + // message. + return this[kState].statusText + } -module.exports = Readable; + // Returns response’s headers as Headers. + get headers () { + webidl.brandCheck(this, Response) -/**/ -var Duplex; -/**/ + // The headers getter steps are to return this’s headers. + return this[kHeaders] + } -Readable.ReadableState = ReadableState; + get body () { + webidl.brandCheck(this, Response) -/**/ -const EE = (__nccwpck_require__(2361).EventEmitter); -var EElistenerCount = function EElistenerCount(emitter, type) { - return emitter.listeners(type).length; -}; -/**/ + return this[kState].body ? this[kState].body.stream : null + } -/**/ -var Stream = __nccwpck_require__(2387); -/**/ + get bodyUsed () { + webidl.brandCheck(this, Response) -const Buffer = (__nccwpck_require__(4300).Buffer); -const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) + } -/**/ -const debugUtil = __nccwpck_require__(3837); -let debug; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function debug() {}; -} -/**/ + // Returns a clone of response. + clone () { + webidl.brandCheck(this, Response) -const BufferList = __nccwpck_require__(6522); -const destroyImpl = __nccwpck_require__(7049); -const _require = __nccwpck_require__(9948), - getHighWaterMark = _require.getHighWaterMark; -const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || (this.body && this.body.locked)) { + throw webidl.errors.exception({ + header: 'Response.clone', + message: 'Body has already been consumed.' + }) + } -// Lazy loaded to improve the startup performance. -let StringDecoder; -let createReadableStreamAsyncIterator; -let from; -__nccwpck_require__(4124)(Readable, Stream); -const errorOrDestroy = destroyImpl.errorOrDestroy; -const kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + // 2. Let clonedResponse be the result of cloning this’s response. + const clonedResponse = cloneResponse(this[kState]) - // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; + // 3. Return the result of creating a Response object, given + // clonedResponse, this’s headers’s guard, and this’s relevant Realm. + const clonedResponseObject = new Response() + clonedResponseObject[kState] = clonedResponse + clonedResponseObject[kRealm] = this[kRealm] + clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList + clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm] + + return clonedResponseObject + } } -function ReadableState(options, stream, isDuplex) { - Duplex = Duplex || __nccwpck_require__(1359); - options = options || {}; - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; +mixinBody(Response) - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; +Object.defineProperties(Response.prototype, { + type: kEnumerableProperty, + url: kEnumerableProperty, + status: kEnumerableProperty, + ok: kEnumerableProperty, + redirected: kEnumerableProperty, + statusText: kEnumerableProperty, + headers: kEnumerableProperty, + clone: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Response', + configurable: true + } +}) - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); +Object.defineProperties(Response, { + json: kEnumerableProperty, + redirect: kEnumerableProperty, + error: kEnumerableProperty +}) - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; +// https://fetch.spec.whatwg.org/#concept-response-clone +function cloneResponse (response) { + // To clone a response response, run these steps: + + // 1. If response is a filtered response, then return a new identical + // filtered response whose internal response is a clone of response’s + // internal response. + if (response.internalResponse) { + return filterResponse( + cloneResponse(response.internalResponse), + response.type + ) + } - // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - this.sync = true; + // 2. Let newResponse be a copy of response, except for its body. + const newResponse = makeResponse({ ...response, body: null }) - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - this.paused = true; + // 3. If response’s body is non-null, then set newResponse’s body to the + // result of cloning response’s body. + if (response.body != null) { + newResponse.body = cloneBody(response.body) + } - // Should close be emitted on destroy. Defaults to true. - this.emitClose = options.emitClose !== false; + // 4. Return newResponse. + return newResponse +} - // Should .destroy() be called after 'end' (and potentially 'finish') - this.autoDestroy = !!options.autoDestroy; +function makeResponse (init) { + return { + aborted: false, + rangeRequested: false, + timingAllowPassed: false, + requestIncludesCredentials: false, + type: 'default', + status: 200, + timingInfo: null, + cacheState: '', + statusText: '', + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList(), + urlList: init.urlList ? [...init.urlList] : [] + } +} + +function makeNetworkError (reason) { + const isError = isErrorLike(reason) + return makeResponse({ + type: 'error', + status: 0, + error: isError + ? reason + : new Error(reason ? String(reason) : reason), + aborted: reason && reason.name === 'AbortError' + }) +} - // has it been destroyed - this.destroyed = false; +function makeFilteredResponse (response, state) { + state = { + internalResponse: response, + ...state + } - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; + return new Proxy(response, { + get (target, p) { + return p in state ? state[p] : target[p] + }, + set (target, p, value) { + assert(!(p in state)) + target[p] = value + return true + } + }) +} - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; +// https://fetch.spec.whatwg.org/#concept-filtered-response +function filterResponse (response, type) { + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (type === 'basic') { + // A basic filtered response is a filtered response whose type is "basic" + // and header list excludes any headers in internal response’s header list + // whose name is a forbidden response-header name. - // if true, a maybeReadMore has been scheduled - this.readingMore = false; - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; + // Note: undici does not implement forbidden response-header names + return makeFilteredResponse(response, { + type: 'basic', + headersList: response.headersList + }) + } else if (type === 'cors') { + // A CORS filtered response is a filtered response whose type is "cors" + // and header list excludes any headers in internal response’s header + // list whose name is not a CORS-safelisted response-header name, given + // internal response’s CORS-exposed header-name list. + + // Note: undici does not implement CORS-safelisted response-header names + return makeFilteredResponse(response, { + type: 'cors', + headersList: response.headersList + }) + } else if (type === 'opaque') { + // An opaque filtered response is a filtered response whose type is + // "opaque", URL list is the empty list, status is 0, status message + // is the empty byte sequence, header list is empty, and body is null. + + return makeFilteredResponse(response, { + type: 'opaque', + urlList: Object.freeze([]), + status: 0, + statusText: '', + body: null + }) + } else if (type === 'opaqueredirect') { + // An opaque-redirect filtered response is a filtered response whose type + // is "opaqueredirect", status is 0, status message is the empty byte + // sequence, header list is empty, and body is null. + + return makeFilteredResponse(response, { + type: 'opaqueredirect', + status: 0, + statusText: '', + headersList: [], + body: null + }) + } else { + assert(false) } } -function Readable(options) { - Duplex = Duplex || __nccwpck_require__(1359); - if (!(this instanceof Readable)) return new Readable(options); - // Checking for a Stream.Duplex instance is faster here instead of inside - // the ReadableState constructor, at least with V8 6.5 - const isDuplex = this instanceof Duplex; - this._readableState = new ReadableState(options, this, isDuplex); +// https://fetch.spec.whatwg.org/#appropriate-network-error +function makeAppropriateNetworkError (fetchParams, err = null) { + // 1. Assert: fetchParams is canceled. + assert(isCancelled(fetchParams)) - // legacy - this.readable = true; - if (options) { - if (typeof options.read === 'function') this._read = options.read; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - } - Stream.call(this); + // 2. Return an aborted network error if fetchParams is aborted; + // otherwise return a network error. + return isAborted(fetchParams) + ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err })) + : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err })) } -Object.defineProperty(Readable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - if (this._readableState === undefined) { - return false; - } - return this._readableState.destroyed; - }, - set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; - } - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; +// https://whatpr.org/fetch/1392.html#initialize-a-response +function initializeResponse (response, init, body) { + // 1. If init["status"] is not in the range 200 to 599, inclusive, then + // throw a RangeError. + if (init.status !== null && (init.status < 200 || init.status > 599)) { + throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.') } -}); -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; -Readable.prototype._destroy = function (err, cb) { - cb(err); -}; -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; - } - skipChunkCheck = true; + // 2. If init["statusText"] does not match the reason-phrase token production, + // then throw a TypeError. + if ('statusText' in init && init.statusText != null) { + // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2: + // reason-phrase = *( HTAB / SP / VCHAR / obs-text ) + if (!isValidReasonPhrase(String(init.statusText))) { + throw new TypeError('Invalid statusText') } - } else { - skipChunkCheck = true; } - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); -}; -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - debug('readableAddChunk', chunk); - var state = stream._readableState; - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - if (er) { - errorOrDestroy(stream, er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } - if (addToFront) { - if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); - } else if (state.ended) { - errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); - } else if (state.destroyed) { - return false; - } else { - state.reading = false; - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - maybeReadMore(stream, state); - } + // 3. Set response’s response’s status to init["status"]. + if ('status' in init && init.status != null) { + response[kState].status = init.status } - // We can push more data if we are below the highWaterMark. - // Also, if we have no data yet, we can stand some more bytes. - // This is to work around cases where hwm=0, such as the repl. - return !state.ended && (state.length < state.highWaterMark || state.length === 0); -} -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - state.awaitDrain = 0; - stream.emit('data', chunk); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - if (state.needReadable) emitReadable(stream); + // 4. Set response’s response’s status message to init["statusText"]. + if ('statusText' in init && init.statusText != null) { + response[kState].statusText = init.statusText } - maybeReadMore(stream, state); -} -function chunkInvalid(state, chunk) { - var er; - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + + // 5. If init["headers"] exists, then fill response’s headers with init["headers"]. + if ('headers' in init && init.headers != null) { + fill(response[kHeaders], init.headers) + } + + // 6. If body was given, then: + if (body) { + // 1. If response's status is a null body status, then throw a TypeError. + if (nullBodyStatus.includes(response.status)) { + throw webidl.errors.exception({ + header: 'Response constructor', + message: 'Invalid response status code ' + response.status + }) + } + + // 2. Set response's body to body's body. + response[kState].body = body.body + + // 3. If body's type is non-null and response's header list does not contain + // `Content-Type`, then append (`Content-Type`, body's type) to response's header list. + if (body.type != null && !response[kState].headersList.contains('Content-Type')) { + response[kState].headersList.append('content-type', body.type) + } } - return er; } -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); - const decoder = new StringDecoder(enc); - this._readableState.decoder = decoder; - // If setEncoding(null), decoder.encoding equals utf8 - this._readableState.encoding = this._readableState.decoder.encoding; +webidl.converters.ReadableStream = webidl.interfaceConverter( + ReadableStream +) - // Iterate over current buffer to convert already stored Buffers: - let p = this._readableState.buffer.head; - let content = ''; - while (p !== null) { - content += decoder.write(p.data); - p = p.next; +webidl.converters.FormData = webidl.interfaceConverter( + FormData +) + +webidl.converters.URLSearchParams = webidl.interfaceConverter( + URLSearchParams +) + +// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit +webidl.converters.XMLHttpRequestBodyInit = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) } - this._readableState.buffer.clear(); - if (content !== '') this._readableState.buffer.push(content); - this._readableState.length = content.length; - return this; -}; -// Don't raise the hwm > 1GB -const MAX_HWM = 0x40000000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) } - return n; + + if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { + return webidl.converters.BufferSource(V) + } + + if (util.isFormDataLike(V)) { + return webidl.converters.FormData(V, { strict: false }) + } + + if (V instanceof URLSearchParams) { + return webidl.converters.URLSearchParams(V) + } + + return webidl.converters.DOMString(V) } -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; +// https://fetch.spec.whatwg.org/#bodyinit +webidl.converters.BodyInit = function (V) { + if (V instanceof ReadableStream) { + return webidl.converters.ReadableStream(V) } - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; - // Don't have enough - if (!state.ended) { - state.needReadable = true; - return 0; + + // Note: the spec doesn't include async iterables, + // this is an undici extension. + if (V?.[Symbol.asyncIterator]) { + return V } - return state.length; + + return webidl.converters.XMLHttpRequestBodyInit(V) } -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - if (n !== 0) state.emittedReadable = false; +webidl.converters.ResponseInit = webidl.dictionaryConverter([ + { + key: 'status', + converter: webidl.converters['unsigned short'], + defaultValue: 200 + }, + { + key: 'statusText', + converter: webidl.converters.ByteString, + defaultValue: '' + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + } +]) - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; +module.exports = { + makeNetworkError, + makeResponse, + makeAppropriateNetworkError, + filterResponse, + Response, + cloneResponse +} + + +/***/ }), + +/***/ 5861: +/***/ ((module) => { + +"use strict"; + + +module.exports = { + kUrl: Symbol('url'), + kHeaders: Symbol('headers'), + kSignal: Symbol('signal'), + kState: Symbol('state'), + kGuard: Symbol('guard'), + kRealm: Symbol('realm') +} + + +/***/ }), + +/***/ 2538: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = __nccwpck_require__(1037) +const { getGlobalOrigin } = __nccwpck_require__(1246) +const { performance } = __nccwpck_require__(4074) +const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983) +const assert = __nccwpck_require__(9491) +const { isUint8Array } = __nccwpck_require__(9830) + +// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable +/** @type {import('crypto')|undefined} */ +let crypto + +try { + crypto = __nccwpck_require__(6113) +} catch { + +} + +function responseURL (response) { + // https://fetch.spec.whatwg.org/#responses + // A response has an associated URL. It is a pointer to the last URL + // in response’s URL list and null if response’s URL list is empty. + const urlList = response.urlList + const length = urlList.length + return length === 0 ? null : urlList[length - 1].toString() +} + +// https://fetch.spec.whatwg.org/#concept-response-location-url +function responseLocationURL (response, requestFragment) { + // 1. If response’s status is not a redirect status, then return null. + if (!redirectStatusSet.has(response.status)) { + return null } - n = howMuchToRead(n, state); - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; + // 2. Let location be the result of extracting header list values given + // `Location` and response’s header list. + let location = response.headersList.get('location') + + // 3. If location is a header value, then set location to the result of + // parsing location with response’s URL. + if (location !== null && isValidHeaderValue(location)) { + location = new URL(location, responseURL(response)) } - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. + // 4. If location is a URL whose fragment is null, then set location’s + // fragment to requestFragment. + if (location && !location.hash) { + location.hash = requestFragment + } - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); + // 5. Return location. + return location +} - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); +/** @returns {URL} */ +function requestCurrentURL (request) { + return request.urlList[request.urlList.length - 1] +} + +function requestBadPort (request) { + // 1. Let url be request’s current URL. + const url = requestCurrentURL(request) + + // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port, + // then return blocked. + if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { + return 'blocked' } - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); + // 3. Return allowed. + return 'allowed' +} + +function isErrorLike (object) { + return object instanceof Error || ( + object?.constructor?.name === 'Error' || + object?.constructor?.name === 'DOMException' + ) +} + +// Check whether |statusText| is a ByteString and +// matches the Reason-Phrase token production. +// RFC 2616: https://tools.ietf.org/html/rfc2616 +// RFC 7230: https://tools.ietf.org/html/rfc7230 +// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )" +// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116 +function isValidReasonPhrase (statusText) { + for (let i = 0; i < statusText.length; ++i) { + const c = statusText.charCodeAt(i) + if ( + !( + ( + c === 0x09 || // HTAB + (c >= 0x20 && c <= 0x7e) || // SP / VCHAR + (c >= 0x80 && c <= 0xff) + ) // obs-text + ) + ) { + return false + } } - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - if (ret === null) { - state.needReadable = state.length <= state.highWaterMark; - n = 0; - } else { - state.length -= n; - state.awaitDrain = 0; + return true +} + +/** + * @see https://tools.ietf.org/html/rfc7230#section-3.2.6 + * @param {number} c + */ +function isTokenCharCode (c) { + switch (c) { + case 0x22: + case 0x28: + case 0x29: + case 0x2c: + case 0x2f: + case 0x3a: + case 0x3b: + case 0x3c: + case 0x3d: + case 0x3e: + case 0x3f: + case 0x40: + case 0x5b: + case 0x5c: + case 0x5d: + case 0x7b: + case 0x7d: + // DQUOTE and "(),/:;<=>?@[\]{}" + return false + default: + // VCHAR %x21-7E + return c >= 0x21 && c <= 0x7e + } +} + +/** + * @param {string} characters + */ +function isValidHTTPToken (characters) { + if (characters.length === 0) { + return false + } + for (let i = 0; i < characters.length; ++i) { + if (!isTokenCharCode(characters.charCodeAt(i))) { + return false + } } - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; + return true +} - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this); +/** + * @see https://fetch.spec.whatwg.org/#header-name + * @param {string} potentialValue + */ +function isValidHeaderName (potentialValue) { + return isValidHTTPToken(potentialValue) +} + +/** + * @see https://fetch.spec.whatwg.org/#header-value + * @param {string} potentialValue + */ +function isValidHeaderValue (potentialValue) { + // - Has no leading or trailing HTTP tab or space bytes. + // - Contains no 0x00 (NUL) or HTTP newline bytes. + if ( + potentialValue.startsWith('\t') || + potentialValue.startsWith(' ') || + potentialValue.endsWith('\t') || + potentialValue.endsWith(' ') + ) { + return false } - if (ret !== null) this.emit('data', ret); - return ret; -}; -function onEofChunk(stream, state) { - debug('onEofChunk'); - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } + + if ( + potentialValue.includes('\0') || + potentialValue.includes('\r') || + potentialValue.includes('\n') + ) { + return false } - state.ended = true; - if (state.sync) { - // if we are sync, wait until next tick to emit the data. - // Otherwise we risk emitting data in the flow() - // the readable code triggers during a read() call - emitReadable(stream); - } else { - // emit 'readable' now to make sure it gets picked up. - state.needReadable = false; - if (!state.emittedReadable) { - state.emittedReadable = true; - emitReadable_(stream); + + return true +} + +// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect +function setRequestReferrerPolicyOnRedirect (request, actualResponse) { + // Given a request request and a response actualResponse, this algorithm + // updates request’s referrer policy according to the Referrer-Policy + // header (if any) in actualResponse. + + // 1. Let policy be the result of executing § 8.1 Parse a referrer policy + // from a Referrer-Policy header on actualResponse. + + // 8.1 Parse a referrer policy from a Referrer-Policy header + // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list. + const { headersList } = actualResponse + // 2. Let policy be the empty string. + // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token. + // 4. Return policy. + const policyHeader = (headersList.get('referrer-policy') ?? '').split(',') + + // Note: As the referrer-policy can contain multiple policies + // separated by comma, we need to loop through all of them + // and pick the first valid one. + // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy + let policy = '' + if (policyHeader.length > 0) { + // The right-most policy takes precedence. + // The left-most policy is the fallback. + for (let i = policyHeader.length; i !== 0; i--) { + const token = policyHeader[i - 1].trim() + if (referrerPolicyTokens.has(token)) { + policy = token + break + } } } -} -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - debug('emitReadable', state.needReadable, state.emittedReadable); - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - process.nextTick(emitReadable_, stream); + // 2. If policy is not the empty string, then set request’s referrer policy to policy. + if (policy !== '') { + request.referrerPolicy = policy } } -function emitReadable_(stream) { - var state = stream._readableState; - debug('emitReadable_', state.destroyed, state.length, state.ended); - if (!state.destroyed && (state.length || state.ended)) { - stream.emit('readable'); - state.emittedReadable = false; - } - // The stream needs another readable event if - // 1. It is not flowing, as the flow mechanism will take - // care of it. - // 2. It is not ended. - // 3. It is below the highWaterMark, so we can schedule - // another readable later. - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; - flow(stream); +// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check +function crossOriginResourcePolicyCheck () { + // TODO + return 'allowed' } -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - process.nextTick(maybeReadMore_, stream, state); +// https://fetch.spec.whatwg.org/#concept-cors-check +function corsCheck () { + // TODO + return 'success' +} + +// https://fetch.spec.whatwg.org/#concept-tao-check +function TAOCheck () { + // TODO + return 'success' +} + +function appendFetchMetadata (httpRequest) { + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header + + // 1. Assert: r’s url is a potentially trustworthy URL. + // TODO + + // 2. Let header be a Structured Header whose value is a token. + let header = null + + // 3. Set header’s value to r’s mode. + header = httpRequest.mode + + // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list. + httpRequest.headersList.set('sec-fetch-mode', header) + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header + // TODO +} + +// https://fetch.spec.whatwg.org/#append-a-request-origin-header +function appendRequestOriginHeader (request) { + // 1. Let serializedOrigin be the result of byte-serializing a request origin with request. + let serializedOrigin = request.origin + + // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list. + if (request.responseTainting === 'cors' || request.mode === 'websocket') { + if (serializedOrigin) { + request.headersList.append('origin', serializedOrigin) + } + + // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then: + } else if (request.method !== 'GET' && request.method !== 'HEAD') { + // 1. Switch on request’s referrer policy: + switch (request.referrerPolicy) { + case 'no-referrer': + // Set serializedOrigin to `null`. + serializedOrigin = null + break + case 'no-referrer-when-downgrade': + case 'strict-origin': + case 'strict-origin-when-cross-origin': + // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`. + if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) { + serializedOrigin = null + } + break + case 'same-origin': + // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`. + if (!sameOrigin(request, requestCurrentURL(request))) { + serializedOrigin = null + } + break + default: + // Do nothing. + } + + if (serializedOrigin) { + // 2. Append (`Origin`, serializedOrigin) to request’s header list. + request.headersList.append('origin', serializedOrigin) + } } } -function maybeReadMore_(stream, state) { - // Attempt to read more data if we should. - // - // The conditions for reading more data are (one of): - // - Not enough data buffered (state.length < state.highWaterMark). The loop - // is responsible for filling the buffer with enough data if such data - // is available. If highWaterMark is 0 and we are not in the flowing mode - // we should _not_ attempt to buffer any extra data. We'll get more data - // when the stream consumer calls read() instead. - // - No data in the buffer, and the stream is in flowing mode. In this mode - // the loop below is responsible for ensuring read() is called. Failing to - // call read here would abort the flow and there's no other mechanism for - // continuing the flow if the stream consumer has just subscribed to the - // 'data' event. - // - // In addition to the above conditions to keep reading data, the following - // conditions prevent the data from being read: - // - The stream has ended (state.ended). - // - There is already a pending 'read' operation (state.reading). This is a - // case where the the stream has called the implementation defined _read() - // method, but they are processing the call asynchronously and have _not_ - // called push() with new data. In this case we skip performing more - // read()s. The execution ends in this method again after the _read() ends - // up calling push() with more data. - while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { - const len = state.length; - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break; + +function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) { + // TODO + return performance.now() +} + +// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info +function createOpaqueTimingInfo (timingInfo) { + return { + startTime: timingInfo.startTime ?? 0, + redirectStartTime: 0, + redirectEndTime: 0, + postRedirectStartTime: timingInfo.startTime ?? 0, + finalServiceWorkerStartTime: 0, + finalNetworkResponseStartTime: 0, + finalNetworkRequestStartTime: 0, + endTime: 0, + encodedBodySize: 0, + decodedBodySize: 0, + finalConnectionTimingInfo: null + } +} + +// https://html.spec.whatwg.org/multipage/origin.html#policy-container +function makePolicyContainer () { + // Note: the fetch spec doesn't make use of embedder policy or CSP list + return { + referrerPolicy: 'strict-origin-when-cross-origin' } - state.readingMore = false; } -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); -}; -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; +// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container +function clonePolicyContainer (policyContainer) { + return { + referrerPolicy: policyContainer.referrerPolicy } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); - dest.on('unpipe', onunpipe); - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } +} + +// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer +function determineRequestsReferrer (request) { + // 1. Let policy be request's referrer policy. + const policy = request.referrerPolicy + + // Note: policy cannot (shouldn't) be null or an empty string. + assert(policy) + + // 2. Let environment be request’s client. + + let referrerSource = null + + // 3. Switch on request’s referrer: + if (request.referrer === 'client') { + // Note: node isn't a browser and doesn't implement document/iframes, + // so we bypass this step and replace it with our own. + + const globalOrigin = getGlobalOrigin() + + if (!globalOrigin || globalOrigin.origin === 'null') { + return 'no-referrer' } - } - function onend() { - debug('onend'); - dest.end(); + + // note: we need to clone it as it's mutated + referrerSource = new URL(globalOrigin) + } else if (request.referrer instanceof URL) { + // Let referrerSource be request’s referrer. + referrerSource = request.referrer } - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); - cleanedUp = true; + // 4. Let request’s referrerURL be the result of stripping referrerSource for + // use as a referrer. + let referrerURL = stripURLForReferrer(referrerSource) - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + // 5. Let referrerOrigin be the result of stripping referrerSource for use as + // a referrer, with the origin-only flag set to true. + const referrerOrigin = stripURLForReferrer(referrerSource, true) + + // 6. If the result of serializing referrerURL is a string whose length is + // greater than 4096, set referrerURL to referrerOrigin. + if (referrerURL.toString().length > 4096) { + referrerURL = referrerOrigin } - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - debug('dest.write', ret); - if (ret === false) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', state.awaitDrain); - state.awaitDrain++; + + const areSameOrigin = sameOrigin(request, referrerURL) + const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && + !isURLPotentiallyTrustworthy(request.url) + + // 8. Execute the switch statements corresponding to the value of policy: + switch (policy) { + case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true) + case 'unsafe-url': return referrerURL + case 'same-origin': + return areSameOrigin ? referrerOrigin : 'no-referrer' + case 'origin-when-cross-origin': + return areSameOrigin ? referrerURL : referrerOrigin + case 'strict-origin-when-cross-origin': { + const currentURL = requestCurrentURL(request) + + // 1. If the origin of referrerURL and the origin of request’s current + // URL are the same, then return referrerURL. + if (sameOrigin(referrerURL, currentURL)) { + return referrerURL } - src.pause(); + + // 2. If referrerURL is a potentially trustworthy URL and request’s + // current URL is not a potentially trustworthy URL, then return no + // referrer. + if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) { + return 'no-referrer' + } + + // 3. Return referrerOrigin. + return referrerOrigin } + case 'strict-origin': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ + case 'no-referrer-when-downgrade': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ + + default: // eslint-disable-line + return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin } +} - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); +/** + * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url + * @param {URL} url + * @param {boolean|undefined} originOnly + */ +function stripURLForReferrer (url, originOnly) { + // 1. Assert: url is a URL. + assert(url instanceof URL) + + // 2. If url’s scheme is a local scheme, then return no referrer. + if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') { + return 'no-referrer' } - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror); + // 3. Set url’s username to the empty string. + url.username = '' - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); + // 4. Set url’s password to the empty string. + url.password = '' + + // 5. Set url’s fragment to null. + url.hash = '' + + // 6. If the origin-only flag is true, then: + if (originOnly) { + // 1. Set url’s path to « the empty string ». + url.pathname = '' + + // 2. Set url’s query to null. + url.search = '' } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); + + // 7. Return url. + return url +} + +function isURLPotentiallyTrustworthy (url) { + if (!(url instanceof URL)) { + return false } - dest.once('finish', onfinish); - function unpipe() { - debug('unpipe'); - src.unpipe(dest); + + // If child of about, return true + if (url.href === 'about:blank' || url.href === 'about:srcdoc') { + return true } - // tell the dest that it's being piped to - dest.emit('pipe', src); + // If scheme is data, return true + if (url.protocol === 'data:') return true - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - return dest; -}; -function pipeOnDrain(src) { - return function pipeOnDrainFunctionResult() { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); + // If file, return true + if (url.protocol === 'file:') return true + + return isOriginPotentiallyTrustworthy(url.origin) + + function isOriginPotentiallyTrustworthy (origin) { + // If origin is explicitly null, return false + if (origin == null || origin === 'null') return false + + const originAsURL = new URL(origin) + + // If secure, return true + if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') { + return true } - }; -} -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { - hasUnpiped: false - }; - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; + // If localhost or variants, return true + if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) || + (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) || + (originAsURL.hostname.endsWith('.localhost'))) { + return true + } - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - if (!dest) dest = state.pipes; + // If any other, return false + return false + } +} - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist + * @param {Uint8Array} bytes + * @param {string} metadataList + */ +function bytesMatch (bytes, metadataList) { + // If node is not built with OpenSSL support, we cannot check + // a request's integrity, so allow it by default (the spec will + // allow requests if an invalid hash is given, as precedence). + /* istanbul ignore if: only if node is built with --without-ssl */ + if (crypto === undefined) { + return true } - // slow case. multiple pipe destinations. + // 1. Let parsedMetadata be the result of parsing metadataList. + const parsedMetadata = parseMetadata(metadataList) - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { - hasUnpiped: false - }); - return this; + // 2. If parsedMetadata is no metadata, return true. + if (parsedMetadata === 'no metadata') { + return true } - // try to find the right one. - var index = indexOf(state.pipes, dest); - if (index === -1) return this; - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - dest.emit('unpipe', this, unpipeInfo); - return this; -}; + // 3. If parsedMetadata is the empty set, return true. + if (parsedMetadata.length === 0) { + return true + } -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - const res = Stream.prototype.on.call(this, ev, fn); - const state = this._readableState; - if (ev === 'data') { - // update readableListening so that resume() may be a no-op - // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0; + // 4. Let metadata be the result of getting the strongest + // metadata from parsedMetadata. + const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) + // get the strongest algorithm + const strongest = list[0].algo + // get all entries that use the strongest algorithm; ignore weaker + const metadata = list.filter((item) => item.algo === strongest) - // Try start flowing on next tick if stream isn't explicitly paused - if (state.flowing !== false) this.resume(); - } else if (ev === 'readable') { - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.flowing = false; - state.emittedReadable = false; - debug('on readable', state.length, state.reading); - if (state.length) { - emitReadable(this); - } else if (!state.reading) { - process.nextTick(nReadingNextTick, this); - } + // 5. For each item in metadata: + for (const item of metadata) { + // 1. Let algorithm be the alg component of item. + const algorithm = item.algo + + // 2. Let expectedValue be the val component of item. + let expectedValue = item.hash + + // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e + // "be liberal with padding". This is annoying, and it's not even in the spec. + + if (expectedValue.endsWith('==')) { + expectedValue = expectedValue.slice(0, -2) + } + + // 3. Let actualValue be the result of applying algorithm to bytes. + let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') + + if (actualValue.endsWith('==')) { + actualValue = actualValue.slice(0, -2) + } + + // 4. If actualValue is a case-sensitive match for expectedValue, + // return true. + if (actualValue === expectedValue) { + return true + } + + let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') + + if (actualBase64URL.endsWith('==')) { + actualBase64URL = actualBase64URL.slice(0, -2) + } + + if (actualBase64URL === expectedValue) { + return true } } - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; -Readable.prototype.removeListener = function (ev, fn) { - const res = Stream.prototype.removeListener.call(this, ev, fn); - if (ev === 'readable') { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); - } - return res; -}; -Readable.prototype.removeAllListeners = function (ev) { - const res = Stream.prototype.removeAllListeners.apply(this, arguments); - if (ev === 'readable' || ev === undefined) { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); + + // 6. Return false. + return false +} + +// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options +// https://www.w3.org/TR/CSP2/#source-list-syntax +// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 +const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i + +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + * @param {string} metadata + */ +function parseMetadata (metadata) { + // 1. Let result be the empty set. + /** @type {{ algo: string, hash: string }[]} */ + const result = [] + + // 2. Let empty be equal to true. + let empty = true + + const supportedHashes = crypto.getHashes() + + // 3. For each token returned by splitting metadata on spaces: + for (const token of metadata.split(' ')) { + // 1. Set empty to false. + empty = false + + // 2. Parse token as a hash-with-options. + const parsedToken = parseHashWithOptions.exec(token) + + // 3. If token does not parse, continue to the next token. + if (parsedToken === null || parsedToken.groups === undefined) { + // Note: Chromium blocks the request at this point, but Firefox + // gives a warning that an invalid integrity was given. The + // correct behavior is to ignore these, and subsequently not + // check the integrity of the resource. + continue + } + + // 4. Let algorithm be the hash-algo component of token. + const algorithm = parsedToken.groups.algo + + // 5. If algorithm is a hash function recognized by the user + // agent, add the parsed token to result. + if (supportedHashes.includes(algorithm.toLowerCase())) { + result.push(parsedToken.groups) + } } - return res; -}; -function updateReadableListening(self) { - const state = self._readableState; - state.readableListening = self.listenerCount('readable') > 0; - if (state.resumeScheduled && !state.paused) { - // flowing needs to be set to true now, otherwise - // the upcoming resume will not flow. - state.flowing = true; - // crude way to check if we should resume - } else if (self.listenerCount('data') > 0) { - self.resume(); + // 4. Return no metadata if empty is true, otherwise return result. + if (empty === true) { + return 'no metadata' } + + return result } -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); + +// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request +function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { + // TODO } -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - // we flow only if there is no one listening - // for readable, but we still have to call - // resume() - state.flowing = !state.readableListening; - resume(this, state); +/** + * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin} + * @param {URL} A + * @param {URL} B + */ +function sameOrigin (A, B) { + // 1. If A and B are the same opaque origin, then return true. + if (A.origin === B.origin && A.origin === 'null') { + return true } - state.paused = false; - return this; -}; -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - process.nextTick(resume_, stream, state); + + // 2. If A and B are both tuple origins and their schemes, + // hosts, and port are identical, then return true. + if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) { + return true } + + // 3. Return false. + return false } -function resume_(stream, state) { - debug('resume', state.reading); - if (!state.reading) { - stream.read(0); - } - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); + +function createDeferredPromise () { + let res + let rej + const promise = new Promise((resolve, reject) => { + res = resolve + rej = reject + }) + + return { promise, resolve: res, reject: rej } } -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (this._readableState.flowing !== false) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - this._readableState.paused = true; - return this; -}; -function flow(stream) { - const state = stream._readableState; - debug('flow', state.flowing); - while (state.flowing && stream.read() !== null); + +function isAborted (fetchParams) { + return fetchParams.controller.state === 'aborted' } -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var state = this._readableState; - var paused = false; - stream.on('end', () => { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) this.push(chunk); - } - this.push(null); - }); - stream.on('data', chunk => { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); +function isCancelled (fetchParams) { + return fetchParams.controller.state === 'aborted' || + fetchParams.controller.state === 'terminated' +} + +const normalizeMethodRecord = { + delete: 'DELETE', + DELETE: 'DELETE', + get: 'GET', + GET: 'GET', + head: 'HEAD', + HEAD: 'HEAD', + options: 'OPTIONS', + OPTIONS: 'OPTIONS', + post: 'POST', + POST: 'POST', + put: 'PUT', + PUT: 'PUT' +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(normalizeMethodRecord, null) + +/** + * @see https://fetch.spec.whatwg.org/#concept-method-normalize + * @param {string} method + */ +function normalizeMethod (method) { + return normalizeMethodRecord[method.toLowerCase()] ?? method +} - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - var ret = this.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); +// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string +function serializeJavascriptValueToJSONString (value) { + // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »). + const result = JSON.stringify(value) - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function methodWrap(method) { - return function methodWrapReturnFunction() { - return stream[method].apply(stream, arguments); - }; - }(i); - } + // 2. If result is undefined, then throw a TypeError. + if (result === undefined) { + throw new TypeError('Value is not JSON serializable') } - // proxy certain important events. - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + // 3. Assert: result is a string. + assert(typeof result === 'string') + + // 4. Return result. + return result +} + +// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object +const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())) + +/** + * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object + * @param {() => unknown[]} iterator + * @param {string} name name of the instance + * @param {'key'|'value'|'key+value'} kind + */ +function makeIterator (iterator, name, kind) { + const object = { + index: 0, + kind, + target: iterator } - // when we try to consume some more bytes, simply unpause the - // underlying stream. - this._read = n => { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); + const i = { + next () { + // 1. Let interface be the interface for which the iterator prototype object exists. + + // 2. Let thisValue be the this value. + + // 3. Let object be ? ToObject(thisValue). + + // 4. If object is a platform object, then perform a security + // check, passing: + + // 5. If object is not a default iterator object for interface, + // then throw a TypeError. + if (Object.getPrototypeOf(this) !== i) { + throw new TypeError( + `'next' called on an object that does not implement interface ${name} Iterator.` + ) + } + + // 6. Let index be object’s index. + // 7. Let kind be object’s kind. + // 8. Let values be object’s target's value pairs to iterate over. + const { index, kind, target } = object + const values = target() + + // 9. Let len be the length of values. + const len = values.length + + // 10. If index is greater than or equal to len, then return + // CreateIterResultObject(undefined, true). + if (index >= len) { + return { value: undefined, done: true } + } + + // 11. Let pair be the entry in values at index index. + const pair = values[index] + + // 12. Set object’s index to index + 1. + object.index = index + 1 + + // 13. Return the iterator result for pair and kind. + return iteratorResult(pair, kind) + }, + // The class string of an iterator prototype object for a given interface is the + // result of concatenating the identifier of the interface and the string " Iterator". + [Symbol.toStringTag]: `${name} Iterator` + } + + // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%. + Object.setPrototypeOf(i, esIteratorPrototype) + // esIteratorPrototype needs to be the prototype of i + // which is the prototype of an empty object. Yes, it's confusing. + return Object.setPrototypeOf({}, i) +} + +// https://webidl.spec.whatwg.org/#iterator-result +function iteratorResult (pair, kind) { + let result + + // 1. Let result be a value determined by the value of kind: + switch (kind) { + case 'key': { + // 1. Let idlKey be pair’s key. + // 2. Let key be the result of converting idlKey to an + // ECMAScript value. + // 3. result is key. + result = pair[0] + break } - }; - return this; -}; -if (typeof Symbol === 'function') { - Readable.prototype[Symbol.asyncIterator] = function () { - if (createReadableStreamAsyncIterator === undefined) { - createReadableStreamAsyncIterator = __nccwpck_require__(3306); + case 'value': { + // 1. Let idlValue be pair’s value. + // 2. Let value be the result of converting idlValue to + // an ECMAScript value. + // 3. result is value. + result = pair[1] + break } - return createReadableStreamAsyncIterator(this); - }; + case 'key+value': { + // 1. Let idlKey be pair’s key. + // 2. Let idlValue be pair’s value. + // 3. Let key be the result of converting idlKey to an + // ECMAScript value. + // 4. Let value be the result of converting idlValue to + // an ECMAScript value. + // 5. Let array be ! ArrayCreate(2). + // 6. Call ! CreateDataProperty(array, "0", key). + // 7. Call ! CreateDataProperty(array, "1", value). + // 8. result is array. + result = pair + break + } + } + + // 2. Return CreateIterResultObject(result, false). + return { value: result, done: false } } -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.highWaterMark; + +/** + * @see https://fetch.spec.whatwg.org/#body-fully-read + */ +async function fullyReadBody (body, processBody, processBodyError) { + // 1. If taskDestination is null, then set taskDestination to + // the result of starting a new parallel queue. + + // 2. Let successSteps given a byte sequence bytes be to queue a + // fetch task to run processBody given bytes, with taskDestination. + const successSteps = processBody + + // 3. Let errorSteps be to queue a fetch task to run processBodyError, + // with taskDestination. + const errorSteps = processBodyError + + // 4. Let reader be the result of getting a reader for body’s stream. + // If that threw an exception, then run errorSteps with that + // exception and return. + let reader + + try { + reader = body.stream.getReader() + } catch (e) { + errorSteps(e) + return } -}); -Object.defineProperty(Readable.prototype, 'readableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState && this._readableState.buffer; + + // 5. Read all bytes from reader, given successSteps and errorSteps. + try { + const result = await readAllBytes(reader) + successSteps(result) + } catch (e) { + errorSteps(e) } -}); -Object.defineProperty(Readable.prototype, 'readableFlowing', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.flowing; - }, - set: function set(state) { - if (this._readableState) { - this._readableState.flowing = state; - } +} + +/** @type {ReadableStream} */ +let ReadableStream = globalThis.ReadableStream + +function isReadableStreamLike (stream) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(5356).ReadableStream) } -}); -// exposed for testing purposes only. -Readable._fromList = fromList; -Object.defineProperty(Readable.prototype, 'readableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - return this._readableState.length; + return stream instanceof ReadableStream || ( + stream[Symbol.toStringTag] === 'ReadableStream' && + typeof stream.tee === 'function' + ) +} + +const MAXIMUM_ARGUMENT_LENGTH = 65535 + +/** + * @see https://infra.spec.whatwg.org/#isomorphic-decode + * @param {number[]|Uint8Array} input + */ +function isomorphicDecode (input) { + // 1. To isomorphic decode a byte sequence input, return a string whose code point + // length is equal to input’s length and whose code points have the same values + // as the values of input’s bytes, in the same order. + + if (input.length < MAXIMUM_ARGUMENT_LENGTH) { + return String.fromCharCode(...input) } -}); -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = state.buffer.consume(n, state.decoder); + return input.reduce((previous, current) => previous + String.fromCharCode(current), '') +} + +/** + * @param {ReadableStreamController} controller + */ +function readableStreamClose (controller) { + try { + controller.close() + } catch (err) { + // TODO: add comment explaining why this error occurs. + if (!err.message.includes('Controller is already closed')) { + throw err + } } - return ret; } -function endReadable(stream) { - var state = stream._readableState; - debug('endReadable', state.endEmitted); - if (!state.endEmitted) { - state.ended = true; - process.nextTick(endReadableNT, state, stream); + +/** + * @see https://infra.spec.whatwg.org/#isomorphic-encode + * @param {string} input + */ +function isomorphicEncode (input) { + // 1. Assert: input contains no code points greater than U+00FF. + for (let i = 0; i < input.length; i++) { + assert(input.charCodeAt(i) <= 0xFF) } + + // 2. Return a byte sequence whose length is equal to input’s code + // point length and whose bytes have the same values as the + // values of input’s code points, in the same order + return input } -function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length); - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the writable side is ready for autoDestroy as well - const wState = stream._writableState; - if (!wState || wState.autoDestroy && wState.finished) { - stream.destroy(); - } +/** + * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes + * @see https://streams.spec.whatwg.org/#read-loop + * @param {ReadableStreamDefaultReader} reader + */ +async function readAllBytes (reader) { + const bytes = [] + let byteLength = 0 + + while (true) { + const { done, value: chunk } = await reader.read() + + if (done) { + // 1. Call successSteps with bytes. + return Buffer.concat(bytes, byteLength) + } + + // 1. If chunk is not a Uint8Array object, call failureSteps + // with a TypeError and abort these steps. + if (!isUint8Array(chunk)) { + throw new TypeError('Received non-Uint8Array chunk') } + + // 2. Append the bytes represented by chunk to bytes. + bytes.push(chunk) + byteLength += chunk.length + + // 3. Read-loop given reader, bytes, successSteps, and failureSteps. } } -if (typeof Symbol === 'function') { - Readable.from = function (iterable, opts) { - if (from === undefined) { - from = __nccwpck_require__(9082); - } - return from(Readable, iterable, opts); - }; + +/** + * @see https://fetch.spec.whatwg.org/#is-local + * @param {URL} url + */ +function urlIsLocal (url) { + assert('protocol' in url) // ensure it's a url object + + const protocol = url.protocol + + return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:' } -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; + +/** + * @param {string|URL} url + */ +function urlHasHttpsScheme (url) { + if (typeof url === 'string') { + return url.startsWith('https:') } - return -1; + + return url.protocol === 'https:' +} + +/** + * @see https://fetch.spec.whatwg.org/#http-scheme + * @param {URL} url + */ +function urlIsHttpHttpsScheme (url) { + assert('protocol' in url) // ensure it's a url object + + const protocol = url.protocol + + return protocol === 'http:' || protocol === 'https:' +} + +/** + * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0. + */ +const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key)) + +module.exports = { + isAborted, + isCancelled, + createDeferredPromise, + ReadableStreamFrom, + toUSVString, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + coarsenedSharedCurrentTime, + determineRequestsReferrer, + makePolicyContainer, + clonePolicyContainer, + appendFetchMetadata, + appendRequestOriginHeader, + TAOCheck, + corsCheck, + crossOriginResourcePolicyCheck, + createOpaqueTimingInfo, + setRequestReferrerPolicyOnRedirect, + isValidHTTPToken, + requestBadPort, + requestCurrentURL, + responseURL, + responseLocationURL, + isBlobLike, + isURLPotentiallyTrustworthy, + isValidReasonPhrase, + sameOrigin, + normalizeMethod, + serializeJavascriptValueToJSONString, + makeIterator, + isValidHeaderName, + isValidHeaderValue, + hasOwn, + isErrorLike, + fullyReadBody, + bytesMatch, + isReadableStreamLike, + readableStreamClose, + isomorphicEncode, + isomorphicDecode, + urlIsLocal, + urlHasHttpsScheme, + urlIsHttpHttpsScheme, + readAllBytes, + normalizeMethodRecord } + /***/ }), -/***/ 4415: +/***/ 1744: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. +const { types } = __nccwpck_require__(3837) +const { hasOwn, toUSVString } = __nccwpck_require__(2538) +/** @type {import('../../types/webidl').Webidl} */ +const webidl = {} +webidl.converters = {} +webidl.util = {} +webidl.errors = {} -module.exports = Transform; -const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, - ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; -const Duplex = __nccwpck_require__(1359); -__nccwpck_require__(4124)(Transform, Duplex); -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; - var cb = ts.writecb; - if (cb === null) { - return this.emit('error', new ERR_MULTIPLE_CALLBACK()); - } - ts.writechunk = null; - ts.writecb = null; - if (data != null) - // single equals check for both `null` and `undefined` - this.push(data); - cb(er); - var rs = this._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); - } +webidl.errors.exception = function (message) { + return new TypeError(`${message.header}: ${message.message}`) } -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - Duplex.call(this, options); - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; +webidl.errors.conversionFailed = function (context) { + const plural = context.types.length === 1 ? '' : ' one of' + const message = + `${context.argument} could not be converted to` + + `${plural}: ${context.types.join(', ')}.` - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - if (typeof options.flush === 'function') this._flush = options.flush; - } + return webidl.errors.exception({ + header: context.prefix, + message + }) +} - // When the writable side finishes, then flush out anything remaining. - this.on('prefinish', prefinish); +webidl.errors.invalidArgument = function (context) { + return webidl.errors.exception({ + header: context.prefix, + message: `"${context.value}" is an invalid ${context.type}.` + }) } -function prefinish() { - if (typeof this._flush === 'function' && !this._readableState.destroyed) { - this._flush((er, data) => { - done(this, er, data); - }); + +// https://webidl.spec.whatwg.org/#implements +webidl.brandCheck = function (V, I, opts = undefined) { + if (opts?.strict !== false && !(V instanceof I)) { + throw new TypeError('Illegal invocation') } else { - done(this, null, null); + return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag] } } -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); -}; -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); +webidl.argumentLengthCheck = function ({ length }, min, ctx) { + if (length < min) { + throw webidl.errors.exception({ + message: `${min} argument${min !== 1 ? 's' : ''} required, ` + + `but${length ? ' only' : ''} ${length} found.`, + ...ctx + }) } -}; +} -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; - if (ts.writechunk !== null && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; -Transform.prototype._destroy = function (err, cb) { - Duplex.prototype._destroy.call(this, err, err2 => { - cb(err2); - }); -}; -function done(stream, er, data) { - if (er) return stream.emit('error', er); - if (data != null) - // single equals check for both `null` and `undefined` - stream.push(data); +webidl.illegalConstructor = function () { + throw webidl.errors.exception({ + header: 'TypeError', + message: 'Illegal constructor' + }) +} - // TODO(BridgeAR): Write a test for these two error cases - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); - if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); - return stream.push(null); +// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values +webidl.util.Type = function (V) { + switch (typeof V) { + case 'undefined': return 'Undefined' + case 'boolean': return 'Boolean' + case 'string': return 'String' + case 'symbol': return 'Symbol' + case 'number': return 'Number' + case 'bigint': return 'BigInt' + case 'function': + case 'object': { + if (V === null) { + return 'Null' + } + + return 'Object' + } + } } -/***/ }), +// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint +webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) { + let upperBound + let lowerBound -/***/ 6993: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 1. If bitLength is 64, then: + if (bitLength === 64) { + // 1. Let upperBound be 2^53 − 1. + upperBound = Math.pow(2, 53) - 1 -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + // 2. If signedness is "unsigned", then let lowerBound be 0. + if (signedness === 'unsigned') { + lowerBound = 0 + } else { + // 3. Otherwise let lowerBound be −2^53 + 1. + lowerBound = Math.pow(-2, 53) + 1 + } + } else if (signedness === 'unsigned') { + // 2. Otherwise, if signedness is "unsigned", then: -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. + // 1. Let lowerBound be 0. + lowerBound = 0 + // 2. Let upperBound be 2^bitLength − 1. + upperBound = Math.pow(2, bitLength) - 1 + } else { + // 3. Otherwise: + // 1. Let lowerBound be -2^bitLength − 1. + lowerBound = Math.pow(-2, bitLength) - 1 -module.exports = Writable; + // 2. Let upperBound be 2^bitLength − 1 − 1. + upperBound = Math.pow(2, bitLength - 1) - 1 + } -/* */ -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} + // 4. Let x be ? ToNumber(V). + let x = Number(V) -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - this.next = null; - this.entry = null; - this.finish = () => { - onCorkedFinish(this, state); - }; -} -/* */ + // 5. If x is −0, then set x to +0. + if (x === 0) { + x = 0 + } -/**/ -var Duplex; -/**/ + // 6. If the conversion is to an IDL type associated + // with the [EnforceRange] extended attribute, then: + if (opts.enforceRange === true) { + // 1. If x is NaN, +∞, or −∞, then throw a TypeError. + if ( + Number.isNaN(x) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Could not convert ${V} to an integer.` + }) + } -Writable.WritableState = WritableState; + // 2. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) -/**/ -const internalUtil = { - deprecate: __nccwpck_require__(7127) -}; -/**/ + // 3. If x < lowerBound or x > upperBound, then + // throw a TypeError. + if (x < lowerBound || x > upperBound) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.` + }) + } -/**/ -var Stream = __nccwpck_require__(2387); -/**/ + // 4. Return x. + return x + } -const Buffer = (__nccwpck_require__(4300).Buffer); -const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} -const destroyImpl = __nccwpck_require__(7049); -const _require = __nccwpck_require__(9948), - getHighWaterMark = _require.getHighWaterMark; -const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, - ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; -const errorOrDestroy = destroyImpl.errorOrDestroy; -__nccwpck_require__(4124)(Writable, Stream); -function nop() {} -function WritableState(options, stream, isDuplex) { - Duplex = Duplex || __nccwpck_require__(1359); - options = options || {}; + // 7. If x is not NaN and the conversion is to an IDL + // type associated with the [Clamp] extended + // attribute, then: + if (!Number.isNaN(x) && opts.clamp === true) { + // 1. Set x to min(max(x, lowerBound), upperBound). + x = Math.min(Math.max(x, lowerBound), upperBound) - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream, - // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + // 2. Round x to the nearest integer, choosing the + // even integer if it lies halfway between two, + // and choosing +0 rather than −0. + if (Math.floor(x) % 2 === 0) { + x = Math.floor(x) + } else { + x = Math.ceil(x) + } + + // 3. Return x. + return x + } + + // 8. If x is NaN, +0, +∞, or −∞, then return +0. + if ( + Number.isNaN(x) || + (x === 0 && Object.is(0, x)) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + return 0 + } + + // 9. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) + + // 10. Set x to x modulo 2^bitLength. + x = x % Math.pow(2, bitLength) - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + // 11. If signedness is "signed" and x ≥ 2^bitLength − 1, + // then return x − 2^bitLength. + if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) { + return x - Math.pow(2, bitLength) + } - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + // 12. Otherwise, return x. + return x +} - // if _final has been called - this.finalCalled = false; +// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart +webidl.util.IntegerPart = function (n) { + // 1. Let r be floor(abs(n)). + const r = Math.floor(Math.abs(n)) - // drain event flag. - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; + // 2. If n < 0, then return -1 × r. + if (n < 0) { + return -1 * r + } - // has it been destroyed - this.destroyed = false; + // 3. Otherwise, return r. + return r +} - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; +// https://webidl.spec.whatwg.org/#es-sequence +webidl.sequenceConverter = function (converter) { + return (V) => { + // 1. If Type(V) is not Object, throw a TypeError. + if (webidl.util.Type(V) !== 'Object') { + throw webidl.errors.exception({ + header: 'Sequence', + message: `Value of type ${webidl.util.Type(V)} is not an Object.` + }) + } - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; + // 2. Let method be ? GetMethod(V, @@iterator). + /** @type {Generator} */ + const method = V?.[Symbol.iterator]?.() + const seq = [] - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; + // 3. If method is undefined, throw a TypeError. + if ( + method === undefined || + typeof method.next !== 'function' + ) { + throw webidl.errors.exception({ + header: 'Sequence', + message: 'Object is not an iterator.' + }) + } - // a flag to see when we're in the middle of a write. - this.writing = false; + // https://webidl.spec.whatwg.org/#create-sequence-from-iterable + while (true) { + const { done, value } = method.next() - // when true all writes will be buffered until .uncork() call - this.corked = 0; + if (done) { + break + } - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; + seq.push(converter(value)) + } - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; + return seq + } +} - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; +// https://webidl.spec.whatwg.org/#es-to-record +webidl.recordConverter = function (keyConverter, valueConverter) { + return (O) => { + // 1. If Type(O) is not Object, throw a TypeError. + if (webidl.util.Type(O) !== 'Object') { + throw webidl.errors.exception({ + header: 'Record', + message: `Value of type ${webidl.util.Type(O)} is not an Object.` + }) + } - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; + // 2. Let result be a new empty instance of record. + const result = {} - // the amount that is being written when _write is called. - this.writelen = 0; - this.bufferedRequest = null; - this.lastBufferedRequest = null; + if (!types.isProxy(O)) { + // Object.keys only returns enumerable properties + const keys = Object.keys(O) - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; + for (const key of keys) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue + } - // Should close be emitted on destroy. Defaults to true. - this.emitClose = options.emitClose !== false; + // 5. Return result. + return result + } - // Should .destroy() be called after 'finish' (and potentially 'end') - this.autoDestroy = !!options.autoDestroy; + // 3. Let keys be ? O.[[OwnPropertyKeys]](). + const keys = Reflect.ownKeys(O) - // count buffered requests - this.bufferedRequestCount = 0; + // 4. For each key of keys. + for (const key of keys) { + // 1. Let desc be ? O.[[GetOwnProperty]](key). + const desc = Reflect.getOwnPropertyDescriptor(O, key) - // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - this.corkedRequestsFree = new CorkedRequest(this); -} -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; + // 2. If desc is not undefined and desc.[[Enumerable]] is true: + if (desc?.enumerable) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) + + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) + + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue + } + } + + // 5. Return result. + return result } - return out; -}; -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function writableStateBufferGetter() { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); +} -// Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. -var realHasInstance; -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function value(object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; - return object && object._writableState instanceof WritableState; +webidl.interfaceConverter = function (i) { + return (V, opts = {}) => { + if (opts.strict !== false && !(V instanceof i)) { + throw webidl.errors.exception({ + header: i.name, + message: `Expected ${V} to be an instance of ${i.name}.` + }) } - }); -} else { - realHasInstance = function realHasInstance(object) { - return object instanceof this; - }; + + return V + } } -function Writable(options) { - Duplex = Duplex || __nccwpck_require__(1359); - // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. +webidl.dictionaryConverter = function (converters) { + return (dictionary) => { + const type = webidl.util.Type(dictionary) + const dict = {} - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. + if (type === 'Null' || type === 'Undefined') { + return dict + } else if (type !== 'Object') { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Expected ${dictionary} to be one of: Null, Undefined, Object.` + }) + } - // Checking for a Stream.Duplex instance is faster here instead of inside - // the WritableState constructor, at least with V8 6.5 - const isDuplex = this instanceof Duplex; - if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); - this._writableState = new WritableState(options, this, isDuplex); + for (const options of converters) { + const { key, defaultValue, required, converter } = options - // legacy. - this.writable = true; - if (options) { - if (typeof options.write === 'function') this._write = options.write; - if (typeof options.writev === 'function') this._writev = options.writev; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - if (typeof options.final === 'function') this._final = options.final; + if (required === true) { + if (!hasOwn(dictionary, key)) { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Missing required key "${key}".` + }) + } + } + + let value = dictionary[key] + const hasDefault = hasOwn(options, 'defaultValue') + + // Only use defaultValue if value is undefined and + // a defaultValue options was provided. + if (hasDefault && value !== null) { + value = value ?? defaultValue + } + + // A key can be optional and have no default value. + // When this happens, do not perform a conversion, + // and do not assign the key a value. + if (required || hasDefault || value !== undefined) { + value = converter(value) + + if ( + options.allowedValues && + !options.allowedValues.includes(value) + ) { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.` + }) + } + + dict[key] = value + } + } + + return dict } - Stream.call(this); } -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); -}; -function writeAfterEnd(stream, cb) { - var er = new ERR_STREAM_WRITE_AFTER_END(); - // TODO: defer error events consistently everywhere, not just the cb - errorOrDestroy(stream, er); - process.nextTick(cb, er); +webidl.nullableConverter = function (converter) { + return (V) => { + if (V === null) { + return V + } + + return converter(V) + } } -// Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. -function validChunk(stream, state, chunk, cb) { - var er; - if (chunk === null) { - er = new ERR_STREAM_NULL_VALUES(); - } else if (typeof chunk !== 'string' && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); +// https://webidl.spec.whatwg.org/#es-DOMString +webidl.converters.DOMString = function (V, opts = {}) { + // 1. If V is null and the conversion is to an IDL type + // associated with the [LegacyNullToEmptyString] + // extended attribute, then return the DOMString value + // that represents the empty string. + if (V === null && opts.legacyNullToEmptyString) { + return '' } - if (er) { - errorOrDestroy(stream, er); - process.nextTick(cb, er); - return false; + + // 2. Let x be ? ToString(V). + if (typeof V === 'symbol') { + throw new TypeError('Could not convert argument of type symbol to string.') } - return true; + + // 3. Return the IDL DOMString value that represents the + // same sequence of code units as the one the + // ECMAScript String value x represents. + return String(V) } -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - var isBuf = !state.objectMode && _isUint8Array(chunk); - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); + +// https://webidl.spec.whatwg.org/#es-ByteString +webidl.converters.ByteString = function (V) { + // 1. Let x be ? ToString(V). + // Note: DOMString converter perform ? ToString(V) + const x = webidl.converters.DOMString(V) + + // 2. If the value of any element of x is greater than + // 255, then throw a TypeError. + for (let index = 0; index < x.length; index++) { + if (x.charCodeAt(index) > 255) { + throw new TypeError( + 'Cannot convert argument to a ByteString because the character at ' + + `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.` + ) + } } - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; + + // 3. Return an IDL ByteString value whose length is the + // length of x, and where the value of each element is + // the value of the corresponding element of x. + return x +} + +// https://webidl.spec.whatwg.org/#es-USVString +webidl.converters.USVString = toUSVString + +// https://webidl.spec.whatwg.org/#es-boolean +webidl.converters.boolean = function (V) { + // 1. Let x be the result of computing ToBoolean(V). + const x = Boolean(V) + + // 2. Return the IDL boolean value that is the one that represents + // the same truth value as the ECMAScript Boolean value x. + return x +} + +// https://webidl.spec.whatwg.org/#es-any +webidl.converters.any = function (V) { + return V +} + +// https://webidl.spec.whatwg.org/#es-long-long +webidl.converters['long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "signed"). + const x = webidl.util.ConvertToInt(V, 64, 'signed') + + // 2. Return the IDL long long value that represents + // the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-long-long +webidl.converters['unsigned long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "unsigned"). + const x = webidl.util.ConvertToInt(V, 64, 'unsigned') + + // 2. Return the IDL unsigned long long value that + // represents the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-long +webidl.converters['unsigned long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 32, "unsigned"). + const x = webidl.util.ConvertToInt(V, 32, 'unsigned') + + // 2. Return the IDL unsigned long value that + // represents the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-short +webidl.converters['unsigned short'] = function (V, opts) { + // 1. Let x be ? ConvertToInt(V, 16, "unsigned"). + const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts) + + // 2. Return the IDL unsigned short value that represents + // the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#idl-ArrayBuffer +webidl.converters.ArrayBuffer = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have an + // [[ArrayBufferData]] internal slot, then throw a + // TypeError. + // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances + // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances + if ( + webidl.util.Type(V) !== 'Object' || + !types.isAnyArrayBuffer(V) + ) { + throw webidl.errors.conversionFailed({ + prefix: `${V}`, + argument: `${V}`, + types: ['ArrayBuffer'] + }) } - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - if (typeof cb !== 'function') cb = nop; - if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V) is true, then throw a + // TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) } - return ret; -}; -Writable.prototype.cork = function () { - this._writableState.corked++; -}; -Writable.prototype.uncork = function () { - var state = this._writableState; - if (state.corked) { - state.corked--; - if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V) is true, then throw a + // TypeError. + // Note: resizable ArrayBuffers are currently a proposal. + + // 4. Return the IDL ArrayBuffer value that is a + // reference to the same object as V. + return V +} + +webidl.converters.TypedArray = function (V, T, opts = {}) { + // 1. Let T be the IDL type V is being converted to. + + // 2. If Type(V) is not Object, or V does not have a + // [[TypedArrayName]] internal slot with a value + // equal to T’s name, then throw a TypeError. + if ( + webidl.util.Type(V) !== 'Object' || + !types.isTypedArray(V) || + V.constructor.name !== T.name + ) { + throw webidl.errors.conversionFailed({ + prefix: `${T.name}`, + argument: `${V}`, + types: [T.name] + }) } -}; -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; -Object.defineProperty(Writable.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); + + // 3. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) } -}); -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); + + // 4. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable array buffers are currently a proposal + + // 5. Return the IDL value of type T that is a reference + // to the same object as V. + return V +} + +webidl.converters.DataView = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have a + // [[DataView]] internal slot, then throw a TypeError. + if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) { + throw webidl.errors.exception({ + header: 'DataView', + message: 'Object is not a DataView.' + }) } - return chunk; + + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true, + // then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable ArrayBuffers are currently a proposal + + // 4. Return the IDL DataView value that is a reference + // to the same object as V. + return V } -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; + +// https://webidl.spec.whatwg.org/#BufferSource +webidl.converters.BufferSource = function (V, opts = {}) { + if (types.isAnyArrayBuffer(V)) { + return webidl.converters.ArrayBuffer(V, opts) } -}); -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; - } + if (types.isTypedArray(V)) { + return webidl.converters.TypedArray(V, V.constructor) } - var len = state.objectMode ? 1 : chunk.length; - state.length += len; - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk, - encoding, - isBuf, - callback: cb, - next: null - }; - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); + + if (types.isDataView(V)) { + return webidl.converters.DataView(V, opts) } - return ret; + + throw new TypeError(`Could not convert ${V} to a BufferSource.`) } -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.ByteString +) + +webidl.converters['sequence>'] = webidl.sequenceConverter( + webidl.converters['sequence'] +) + +webidl.converters['record'] = webidl.recordConverter( + webidl.converters.ByteString, + webidl.converters.ByteString +) + +module.exports = { + webidl } -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - process.nextTick(cb, er); - // this can emit finish, and it will always happen - // after error - process.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); - // this can emit finish, but finish must - // always follow error - finishMaybe(stream, state); + + +/***/ }), + +/***/ 4854: +/***/ ((module) => { + +"use strict"; + + +/** + * @see https://encoding.spec.whatwg.org/#concept-encoding-get + * @param {string|undefined} label + */ +function getEncoding (label) { + if (!label) { + return 'failure' + } + + // 1. Remove any leading and trailing ASCII whitespace from label. + // 2. If label is an ASCII case-insensitive match for any of the + // labels listed in the table below, then return the + // corresponding encoding; otherwise return failure. + switch (label.trim().toLowerCase()) { + case 'unicode-1-1-utf-8': + case 'unicode11utf8': + case 'unicode20utf8': + case 'utf-8': + case 'utf8': + case 'x-unicode20utf8': + return 'UTF-8' + case '866': + case 'cp866': + case 'csibm866': + case 'ibm866': + return 'IBM866' + case 'csisolatin2': + case 'iso-8859-2': + case 'iso-ir-101': + case 'iso8859-2': + case 'iso88592': + case 'iso_8859-2': + case 'iso_8859-2:1987': + case 'l2': + case 'latin2': + return 'ISO-8859-2' + case 'csisolatin3': + case 'iso-8859-3': + case 'iso-ir-109': + case 'iso8859-3': + case 'iso88593': + case 'iso_8859-3': + case 'iso_8859-3:1988': + case 'l3': + case 'latin3': + return 'ISO-8859-3' + case 'csisolatin4': + case 'iso-8859-4': + case 'iso-ir-110': + case 'iso8859-4': + case 'iso88594': + case 'iso_8859-4': + case 'iso_8859-4:1988': + case 'l4': + case 'latin4': + return 'ISO-8859-4' + case 'csisolatincyrillic': + case 'cyrillic': + case 'iso-8859-5': + case 'iso-ir-144': + case 'iso8859-5': + case 'iso88595': + case 'iso_8859-5': + case 'iso_8859-5:1988': + return 'ISO-8859-5' + case 'arabic': + case 'asmo-708': + case 'csiso88596e': + case 'csiso88596i': + case 'csisolatinarabic': + case 'ecma-114': + case 'iso-8859-6': + case 'iso-8859-6-e': + case 'iso-8859-6-i': + case 'iso-ir-127': + case 'iso8859-6': + case 'iso88596': + case 'iso_8859-6': + case 'iso_8859-6:1987': + return 'ISO-8859-6' + case 'csisolatingreek': + case 'ecma-118': + case 'elot_928': + case 'greek': + case 'greek8': + case 'iso-8859-7': + case 'iso-ir-126': + case 'iso8859-7': + case 'iso88597': + case 'iso_8859-7': + case 'iso_8859-7:1987': + case 'sun_eu_greek': + return 'ISO-8859-7' + case 'csiso88598e': + case 'csisolatinhebrew': + case 'hebrew': + case 'iso-8859-8': + case 'iso-8859-8-e': + case 'iso-ir-138': + case 'iso8859-8': + case 'iso88598': + case 'iso_8859-8': + case 'iso_8859-8:1988': + case 'visual': + return 'ISO-8859-8' + case 'csiso88598i': + case 'iso-8859-8-i': + case 'logical': + return 'ISO-8859-8-I' + case 'csisolatin6': + case 'iso-8859-10': + case 'iso-ir-157': + case 'iso8859-10': + case 'iso885910': + case 'l6': + case 'latin6': + return 'ISO-8859-10' + case 'iso-8859-13': + case 'iso8859-13': + case 'iso885913': + return 'ISO-8859-13' + case 'iso-8859-14': + case 'iso8859-14': + case 'iso885914': + return 'ISO-8859-14' + case 'csisolatin9': + case 'iso-8859-15': + case 'iso8859-15': + case 'iso885915': + case 'iso_8859-15': + case 'l9': + return 'ISO-8859-15' + case 'iso-8859-16': + return 'ISO-8859-16' + case 'cskoi8r': + case 'koi': + case 'koi8': + case 'koi8-r': + case 'koi8_r': + return 'KOI8-R' + case 'koi8-ru': + case 'koi8-u': + return 'KOI8-U' + case 'csmacintosh': + case 'mac': + case 'macintosh': + case 'x-mac-roman': + return 'macintosh' + case 'iso-8859-11': + case 'iso8859-11': + case 'iso885911': + case 'tis-620': + case 'windows-874': + return 'windows-874' + case 'cp1250': + case 'windows-1250': + case 'x-cp1250': + return 'windows-1250' + case 'cp1251': + case 'windows-1251': + case 'x-cp1251': + return 'windows-1251' + case 'ansi_x3.4-1968': + case 'ascii': + case 'cp1252': + case 'cp819': + case 'csisolatin1': + case 'ibm819': + case 'iso-8859-1': + case 'iso-ir-100': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'iso_8859-1:1987': + case 'l1': + case 'latin1': + case 'us-ascii': + case 'windows-1252': + case 'x-cp1252': + return 'windows-1252' + case 'cp1253': + case 'windows-1253': + case 'x-cp1253': + return 'windows-1253' + case 'cp1254': + case 'csisolatin5': + case 'iso-8859-9': + case 'iso-ir-148': + case 'iso8859-9': + case 'iso88599': + case 'iso_8859-9': + case 'iso_8859-9:1989': + case 'l5': + case 'latin5': + case 'windows-1254': + case 'x-cp1254': + return 'windows-1254' + case 'cp1255': + case 'windows-1255': + case 'x-cp1255': + return 'windows-1255' + case 'cp1256': + case 'windows-1256': + case 'x-cp1256': + return 'windows-1256' + case 'cp1257': + case 'windows-1257': + case 'x-cp1257': + return 'windows-1257' + case 'cp1258': + case 'windows-1258': + case 'x-cp1258': + return 'windows-1258' + case 'x-mac-cyrillic': + case 'x-mac-ukrainian': + return 'x-mac-cyrillic' + case 'chinese': + case 'csgb2312': + case 'csiso58gb231280': + case 'gb2312': + case 'gb_2312': + case 'gb_2312-80': + case 'gbk': + case 'iso-ir-58': + case 'x-gbk': + return 'GBK' + case 'gb18030': + return 'gb18030' + case 'big5': + case 'big5-hkscs': + case 'cn-big5': + case 'csbig5': + case 'x-x-big5': + return 'Big5' + case 'cseucpkdfmtjapanese': + case 'euc-jp': + case 'x-euc-jp': + return 'EUC-JP' + case 'csiso2022jp': + case 'iso-2022-jp': + return 'ISO-2022-JP' + case 'csshiftjis': + case 'ms932': + case 'ms_kanji': + case 'shift-jis': + case 'shift_jis': + case 'sjis': + case 'windows-31j': + case 'x-sjis': + return 'Shift_JIS' + case 'cseuckr': + case 'csksc56011987': + case 'euc-kr': + case 'iso-ir-149': + case 'korean': + case 'ks_c_5601-1987': + case 'ks_c_5601-1989': + case 'ksc5601': + case 'ksc_5601': + case 'windows-949': + return 'EUC-KR' + case 'csiso2022kr': + case 'hz-gb-2312': + case 'iso-2022-cn': + case 'iso-2022-cn-ext': + case 'iso-2022-kr': + case 'replacement': + return 'replacement' + case 'unicodefffe': + case 'utf-16be': + return 'UTF-16BE' + case 'csunicode': + case 'iso-10646-ucs-2': + case 'ucs-2': + case 'unicode': + case 'unicodefeff': + case 'utf-16': + case 'utf-16le': + return 'UTF-16LE' + case 'x-user-defined': + return 'x-user-defined' + default: return 'failure' } } -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; + +module.exports = { + getEncoding } -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); - onwriteStateUpdate(state); - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state) || stream.destroyed; - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - if (sync) { - process.nextTick(afterWrite, stream, state, finished, cb); - } else { - afterWrite(stream, state, finished, cb); + + +/***/ }), + +/***/ 1446: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + staticPropertyDescriptors, + readOperation, + fireAProgressEvent +} = __nccwpck_require__(7530) +const { + kState, + kError, + kResult, + kEvents, + kAborted +} = __nccwpck_require__(9054) +const { webidl } = __nccwpck_require__(1744) +const { kEnumerableProperty } = __nccwpck_require__(3983) + +class FileReader extends EventTarget { + constructor () { + super() + + this[kState] = 'empty' + this[kResult] = null + this[kError] = null + this[kEvents] = { + loadend: null, + error: null, + abort: null, + load: null, + progress: null, + loadstart: null } } -} -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer + * @param {import('buffer').Blob} blob + */ + readAsArrayBuffer (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsArrayBuffer(blob) method, when invoked, + // must initiate a read operation for blob with ArrayBuffer. + readOperation(this, blob, 'ArrayBuffer') } -} -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - var count = 0; - var allBuffers = true; - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; - } - buffer.allBuffers = allBuffers; - doWrite(stream, state, true, state.length, buffer, '', holder.finish); + /** + * @see https://w3c.github.io/FileAPI/#readAsBinaryString + * @param {import('buffer').Blob} blob + */ + readAsBinaryString (blob) { + webidl.brandCheck(this, FileReader) - // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - state.bufferedRequestCount = 0; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; - } - } - if (entry === null) state.lastBufferedRequest = null; + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsBinaryString(blob) method, when invoked, + // must initiate a read operation for blob with BinaryString. + readOperation(this, blob, 'BinaryString') } - state.bufferedRequest = entry; - state.bufferProcessing = false; -} -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); -}; -Writable.prototype._writev = null; -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; + + /** + * @see https://w3c.github.io/FileAPI/#readAsDataText + * @param {import('buffer').Blob} blob + * @param {string?} encoding + */ + readAsText (blob, encoding = undefined) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + if (encoding !== undefined) { + encoding = webidl.converters.DOMString(encoding) + } + + // The readAsText(blob, encoding) method, when invoked, + // must initiate a read operation for blob with Text and encoding. + readOperation(this, blob, 'Text', encoding) } - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL + * @param {import('buffer').Blob} blob + */ + readAsDataURL (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsDataURL(blob) method, when invoked, must + // initiate a read operation for blob with DataURL. + readOperation(this, blob, 'DataURL') } - // ignore unnecessary end() calls. - if (!state.ending) endWritable(this, state, cb); - return this; -}; -Object.defineProperty(Writable.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - return this._writableState.length; - } -}); -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} -function callFinal(stream, state) { - stream._final(err => { - state.pendingcb--; - if (err) { - errorOrDestroy(stream, err); + /** + * @see https://w3c.github.io/FileAPI/#dfn-abort + */ + abort () { + // 1. If this's state is "empty" or if this's state is + // "done" set this's result to null and terminate + // this algorithm. + if (this[kState] === 'empty' || this[kState] === 'done') { + this[kResult] = null + return } - state.prefinished = true; - stream.emit('prefinish'); - finishMaybe(stream, state); - }); -} -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function' && !state.destroyed) { - state.pendingcb++; - state.finalCalled = true; - process.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit('prefinish'); + + // 2. If this's state is "loading" set this's state to + // "done" and set this's result to null. + if (this[kState] === 'loading') { + this[kState] = 'done' + this[kResult] = null + } + + // 3. If there are any tasks from this on the file reading + // task source in an affiliated task queue, then remove + // those tasks from that task queue. + this[kAborted] = true + + // 4. Terminate the algorithm for the read method being processed. + // TODO + + // 5. Fire a progress event called abort at this. + fireAProgressEvent('abort', this) + + // 6. If this's state is not "loading", fire a progress + // event called loadend at this. + if (this[kState] !== 'loading') { + fireAProgressEvent('loadend', this) } } -} -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - prefinish(stream, state); - if (state.pendingcb === 0) { - state.finished = true; - stream.emit('finish'); - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the readable side is ready for autoDestroy as well - const rState = stream._readableState; - if (!rState || rState.autoDestroy && rState.endEmitted) { - stream.destroy(); - } - } + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate + */ + get readyState () { + webidl.brandCheck(this, FileReader) + + switch (this[kState]) { + case 'empty': return this.EMPTY + case 'loading': return this.LOADING + case 'done': return this.DONE } } - return need; -} -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-result + */ + get result () { + webidl.brandCheck(this, FileReader) + + // The result attribute’s getter, when invoked, must return + // this's result. + return this[kResult] } - state.ended = true; - stream.writable = false; -} -function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-error + */ + get error () { + webidl.brandCheck(this, FileReader) + + // The error attribute’s getter, when invoked, must return + // this's error. + return this[kError] } - // reuse the free corkReq. - state.corkedRequestsFree.next = corkReq; -} -Object.defineProperty(Writable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - if (this._writableState === undefined) { - return false; - } - return this._writableState.destroyed; - }, - set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._writableState) { - return; + get onloadend () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].loadend + } + + set onloadend (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].loadend) { + this.removeEventListener('loadend', this[kEvents].loadend) } - // backward compatibility, the user is explicitly - // managing destroyed - this._writableState.destroyed = value; + if (typeof fn === 'function') { + this[kEvents].loadend = fn + this.addEventListener('loadend', fn) + } else { + this[kEvents].loadend = null + } } -}); -Writable.prototype.destroy = destroyImpl.destroy; -Writable.prototype._undestroy = destroyImpl.undestroy; -Writable.prototype._destroy = function (err, cb) { - cb(err); -}; -/***/ }), + get onerror () { + webidl.brandCheck(this, FileReader) -/***/ 3306: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return this[kEvents].error + } -"use strict"; + set onerror (fn) { + webidl.brandCheck(this, FileReader) + if (this[kEvents].error) { + this.removeEventListener('error', this[kEvents].error) + } -const finished = __nccwpck_require__(6080); -const kLastResolve = Symbol('lastResolve'); -const kLastReject = Symbol('lastReject'); -const kError = Symbol('error'); -const kEnded = Symbol('ended'); -const kLastPromise = Symbol('lastPromise'); -const kHandlePromise = Symbol('handlePromise'); -const kStream = Symbol('stream'); -function createIterResult(value, done) { - return { - value, - done - }; -} -function readAndResolve(iter) { - const resolve = iter[kLastResolve]; - if (resolve !== null) { - const data = iter[kStream].read(); - // we defer if data is null - // we can be expecting either 'end' or - // 'error' - if (data !== null) { - iter[kLastPromise] = null; - iter[kLastResolve] = null; - iter[kLastReject] = null; - resolve(createIterResult(data, false)); + if (typeof fn === 'function') { + this[kEvents].error = fn + this.addEventListener('error', fn) + } else { + this[kEvents].error = null } } -} -function onReadable(iter) { - // we wait for the next tick, because it might - // emit an error with process.nextTick - process.nextTick(readAndResolve, iter); -} -function wrapForNext(lastPromise, iter) { - return (resolve, reject) => { - lastPromise.then(() => { - if (iter[kEnded]) { - resolve(createIterResult(undefined, true)); - return; - } - iter[kHandlePromise](resolve, reject); - }, reject); - }; -} -const AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); -const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({ - get stream() { - return this[kStream]; - }, - next() { - // if we have detected an error in the meanwhile - // reject straight away - const error = this[kError]; - if (error !== null) { - return Promise.reject(error); + + get onloadstart () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].loadstart + } + + set onloadstart (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].loadstart) { + this.removeEventListener('loadstart', this[kEvents].loadstart) } - if (this[kEnded]) { - return Promise.resolve(createIterResult(undefined, true)); + + if (typeof fn === 'function') { + this[kEvents].loadstart = fn + this.addEventListener('loadstart', fn) + } else { + this[kEvents].loadstart = null } - if (this[kStream].destroyed) { - // We need to defer via nextTick because if .destroy(err) is - // called, the error will be emitted via nextTick, and - // we cannot guarantee that there is no error lingering around - // waiting to be emitted. - return new Promise((resolve, reject) => { - process.nextTick(() => { - if (this[kError]) { - reject(this[kError]); - } else { - resolve(createIterResult(undefined, true)); - } - }); - }); + } + + get onprogress () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].progress + } + + set onprogress (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].progress) { + this.removeEventListener('progress', this[kEvents].progress) } - // if we have multiple next() calls - // we will wait for the previous Promise to finish - // this logic is optimized to support for await loops, - // where next() is only called once at a time - const lastPromise = this[kLastPromise]; - let promise; - if (lastPromise) { - promise = new Promise(wrapForNext(lastPromise, this)); + if (typeof fn === 'function') { + this[kEvents].progress = fn + this.addEventListener('progress', fn) } else { - // fast path needed to support multiple this.push() - // without triggering the next() queue - const data = this[kStream].read(); - if (data !== null) { - return Promise.resolve(createIterResult(data, false)); - } - promise = new Promise(this[kHandlePromise]); + this[kEvents].progress = null } - this[kLastPromise] = promise; - return promise; - }, - [Symbol.asyncIterator]() { - return this; - }, - return() { - // destroy(err, cb) is a private API - // we can guarantee we have that here, because we control the - // Readable class this is attached to - return new Promise((resolve, reject) => { - this[kStream].destroy(null, err => { - if (err) { - reject(err); - return; - } - resolve(createIterResult(undefined, true)); - }); - }); } -}, AsyncIteratorPrototype); -const createReadableStreamAsyncIterator = stream => { - const iterator = Object.create(ReadableStreamAsyncIteratorPrototype, { - [kStream]: { - value: stream, - writable: true - }, - [kLastResolve]: { - value: null, - writable: true - }, - [kLastReject]: { - value: null, - writable: true - }, - [kError]: { - value: null, - writable: true - }, - [kEnded]: { - value: stream._readableState.endEmitted, - writable: true - }, - // the function passed to new Promise - // is cached so we avoid allocating a new - // closure at every run - [kHandlePromise]: { - value: (resolve, reject) => { - const data = iterator[kStream].read(); - if (data) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(data, false)); - } else { - iterator[kLastResolve] = resolve; - iterator[kLastReject] = reject; - } - }, - writable: true + + get onload () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].load + } + + set onload (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].load) { + this.removeEventListener('load', this[kEvents].load) } - }); - iterator[kLastPromise] = null; - finished(stream, err => { - if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - const reject = iterator[kLastReject]; - // reject if we are waiting for data in the Promise - // returned by next() and store the error - if (reject !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - reject(err); - } - iterator[kError] = err; - return; + + if (typeof fn === 'function') { + this[kEvents].load = fn + this.addEventListener('load', fn) + } else { + this[kEvents].load = null } - const resolve = iterator[kLastResolve]; - if (resolve !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(undefined, true)); + } + + get onabort () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].abort + } + + set onabort (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].abort) { + this.removeEventListener('abort', this[kEvents].abort) } - iterator[kEnded] = true; - }); - stream.on('readable', onReadable.bind(null, iterator)); - return iterator; -}; -module.exports = createReadableStreamAsyncIterator; + + if (typeof fn === 'function') { + this[kEvents].abort = fn + this.addEventListener('abort', fn) + } else { + this[kEvents].abort = null + } + } +} + +// https://w3c.github.io/FileAPI/#dom-filereader-empty +FileReader.EMPTY = FileReader.prototype.EMPTY = 0 +// https://w3c.github.io/FileAPI/#dom-filereader-loading +FileReader.LOADING = FileReader.prototype.LOADING = 1 +// https://w3c.github.io/FileAPI/#dom-filereader-done +FileReader.DONE = FileReader.prototype.DONE = 2 + +Object.defineProperties(FileReader.prototype, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors, + readAsArrayBuffer: kEnumerableProperty, + readAsBinaryString: kEnumerableProperty, + readAsText: kEnumerableProperty, + readAsDataURL: kEnumerableProperty, + abort: kEnumerableProperty, + readyState: kEnumerableProperty, + result: kEnumerableProperty, + error: kEnumerableProperty, + onloadstart: kEnumerableProperty, + onprogress: kEnumerableProperty, + onload: kEnumerableProperty, + onabort: kEnumerableProperty, + onerror: kEnumerableProperty, + onloadend: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'FileReader', + writable: false, + enumerable: false, + configurable: true + } +}) + +Object.defineProperties(FileReader, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors +}) + +module.exports = { + FileReader +} + /***/ }), -/***/ 6522: +/***/ 5504: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } -function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } -function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } -function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } -const _require = __nccwpck_require__(4300), - Buffer = _require.Buffer; -const _require2 = __nccwpck_require__(3837), - inspect = _require2.inspect; -const custom = inspect && inspect.custom || 'inspect'; -function copyBuffer(src, target, offset) { - Buffer.prototype.copy.call(src, target, offset); -} -module.exports = class BufferList { - constructor() { - this.head = null; - this.tail = null; - this.length = 0; - } - push(v) { - const entry = { - data: v, - next: null - }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - } - unshift(v) { - const entry = { - data: v, - next: this.head - }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - } - shift() { - if (this.length === 0) return; - const ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; +const { webidl } = __nccwpck_require__(1744) + +const kState = Symbol('ProgressEvent state') + +/** + * @see https://xhr.spec.whatwg.org/#progressevent + */ +class ProgressEvent extends Event { + constructor (type, eventInitDict = {}) { + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {}) + + super(type, eventInitDict) + + this[kState] = { + lengthComputable: eventInitDict.lengthComputable, + loaded: eventInitDict.loaded, + total: eventInitDict.total + } } - clear() { - this.head = this.tail = null; - this.length = 0; + + get lengthComputable () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].lengthComputable } - join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - while (p = p.next) ret += s + p.data; - return ret; + + get loaded () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].loaded } - concat(n) { - if (this.length === 0) return Buffer.alloc(0); - const ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - return ret; + + get total () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].total } +} - // Consumes a specified amount of bytes or characters from the buffered data. - consume(n, hasStrings) { - var ret; - if (n < this.head.data.length) { - // `slice` is the same for buffers and strings. - ret = this.head.data.slice(0, n); - this.head.data = this.head.data.slice(n); - } else if (n === this.head.data.length) { - // First chunk is a perfect match. - ret = this.shift(); - } else { - // Result spans more than one buffer. - ret = hasStrings ? this._getString(n) : this._getBuffer(n); - } - return ret; +webidl.converters.ProgressEventInit = webidl.dictionaryConverter([ + { + key: 'lengthComputable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'loaded', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'total', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false } - first() { - return this.head.data; +]) + +module.exports = { + ProgressEvent +} + + +/***/ }), + +/***/ 9054: +/***/ ((module) => { + +"use strict"; + + +module.exports = { + kState: Symbol('FileReader state'), + kResult: Symbol('FileReader result'), + kError: Symbol('FileReader error'), + kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'), + kEvents: Symbol('FileReader events'), + kAborted: Symbol('FileReader aborted') +} + + +/***/ }), + +/***/ 7530: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + kState, + kError, + kResult, + kAborted, + kLastProgressEventFired +} = __nccwpck_require__(9054) +const { ProgressEvent } = __nccwpck_require__(5504) +const { getEncoding } = __nccwpck_require__(4854) +const { DOMException } = __nccwpck_require__(1037) +const { serializeAMimeType, parseMIMEType } = __nccwpck_require__(685) +const { types } = __nccwpck_require__(3837) +const { StringDecoder } = __nccwpck_require__(1576) +const { btoa } = __nccwpck_require__(4300) + +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} + +/** + * @see https://w3c.github.io/FileAPI/#readOperation + * @param {import('./filereader').FileReader} fr + * @param {import('buffer').Blob} blob + * @param {string} type + * @param {string?} encodingName + */ +function readOperation (fr, blob, type, encodingName) { + // 1. If fr’s state is "loading", throw an InvalidStateError + // DOMException. + if (fr[kState] === 'loading') { + throw new DOMException('Invalid state', 'InvalidStateError') } - // Consumes a specified amount of characters from the buffered data. - _getString(n) { - var p = this.head; - var c = 1; - var ret = p.data; - n -= ret.length; - while (p = p.next) { - const str = p.data; - const nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = str.slice(nb); + // 2. Set fr’s state to "loading". + fr[kState] = 'loading' + + // 3. Set fr’s result to null. + fr[kResult] = null + + // 4. Set fr’s error to null. + fr[kError] = null + + // 5. Let stream be the result of calling get stream on blob. + /** @type {import('stream/web').ReadableStream} */ + const stream = blob.stream() + + // 6. Let reader be the result of getting a reader from stream. + const reader = stream.getReader() + + // 7. Let bytes be an empty byte sequence. + /** @type {Uint8Array[]} */ + const bytes = [] + + // 8. Let chunkPromise be the result of reading a chunk from + // stream with reader. + let chunkPromise = reader.read() + + // 9. Let isFirstChunk be true. + let isFirstChunk = true + + // 10. In parallel, while true: + // Note: "In parallel" just means non-blocking + // Note 2: readOperation itself cannot be async as double + // reading the body would then reject the promise, instead + // of throwing an error. + ;(async () => { + while (!fr[kAborted]) { + // 1. Wait for chunkPromise to be fulfilled or rejected. + try { + const { done, value } = await chunkPromise + + // 2. If chunkPromise is fulfilled, and isFirstChunk is + // true, queue a task to fire a progress event called + // loadstart at fr. + if (isFirstChunk && !fr[kAborted]) { + queueMicrotask(() => { + fireAProgressEvent('loadstart', fr) + }) + } + + // 3. Set isFirstChunk to false. + isFirstChunk = false + + // 4. If chunkPromise is fulfilled with an object whose + // done property is false and whose value property is + // a Uint8Array object, run these steps: + if (!done && types.isUint8Array(value)) { + // 1. Let bs be the byte sequence represented by the + // Uint8Array object. + + // 2. Append bs to bytes. + bytes.push(value) + + // 3. If roughly 50ms have passed since these steps + // were last invoked, queue a task to fire a + // progress event called progress at fr. + if ( + ( + fr[kLastProgressEventFired] === undefined || + Date.now() - fr[kLastProgressEventFired] >= 50 + ) && + !fr[kAborted] + ) { + fr[kLastProgressEventFired] = Date.now() + queueMicrotask(() => { + fireAProgressEvent('progress', fr) + }) + } + + // 4. Set chunkPromise to the result of reading a + // chunk from stream with reader. + chunkPromise = reader.read() + } else if (done) { + // 5. Otherwise, if chunkPromise is fulfilled with an + // object whose done property is true, queue a task + // to run the following steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' + + // 2. Let result be the result of package data given + // bytes, type, blob’s type, and encodingName. + try { + const result = packageData(bytes, type, blob.type, encodingName) + + // 4. Else: + + if (fr[kAborted]) { + return + } + + // 1. Set fr’s result to result. + fr[kResult] = result + + // 2. Fire a progress event called load at the fr. + fireAProgressEvent('load', fr) + } catch (error) { + // 3. If package data threw an exception error: + + // 1. Set fr’s error to error. + fr[kError] = error + + // 2. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) + } + + // 5. If fr’s state is not "loading", fire a progress + // event called loadend at the fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) + + break } - break; + } catch (error) { + if (fr[kAborted]) { + return + } + + // 6. Otherwise, if chunkPromise is rejected with an + // error error, queue a task to run the following + // steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' + + // 2. Set fr’s error to error. + fr[kError] = error + + // 3. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) + + // 4. If fr’s state is not "loading", fire a progress + // event called loadend at fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) + + break } - ++c; } - this.length -= c; - return ret; - } + })() +} - // Consumes a specified amount of bytes from the buffered data. - _getBuffer(n) { - const ret = Buffer.allocUnsafe(n); - var p = this.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - while (p = p.next) { - const buf = p.data; - const nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = buf.slice(nb); - } - break; +/** + * @see https://w3c.github.io/FileAPI/#fire-a-progress-event + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e The name of the event + * @param {import('./filereader').FileReader} reader + */ +function fireAProgressEvent (e, reader) { + // The progress event e does not bubble. e.bubbles must be false + // The progress event e is NOT cancelable. e.cancelable must be false + const event = new ProgressEvent(e, { + bubbles: false, + cancelable: false + }) + + reader.dispatchEvent(event) +} + +/** + * @see https://w3c.github.io/FileAPI/#blob-package-data + * @param {Uint8Array[]} bytes + * @param {string} type + * @param {string?} mimeType + * @param {string?} encodingName + */ +function packageData (bytes, type, mimeType, encodingName) { + // 1. A Blob has an associated package data algorithm, given + // bytes, a type, a optional mimeType, and a optional + // encodingName, which switches on type and runs the + // associated steps: + + switch (type) { + case 'DataURL': { + // 1. Return bytes as a DataURL [RFC2397] subject to + // the considerations below: + // * Use mimeType as part of the Data URL if it is + // available in keeping with the Data URL + // specification [RFC2397]. + // * If mimeType is not available return a Data URL + // without a media-type. [RFC2397]. + + // https://datatracker.ietf.org/doc/html/rfc2397#section-3 + // dataurl := "data:" [ mediatype ] [ ";base64" ] "," data + // mediatype := [ type "/" subtype ] *( ";" parameter ) + // data := *urlchar + // parameter := attribute "=" value + let dataURL = 'data:' + + const parsed = parseMIMEType(mimeType || 'application/octet-stream') + + if (parsed !== 'failure') { + dataURL += serializeAMimeType(parsed) } - ++c; - } - this.length -= c; - return ret; - } - // Make sure the linked list only shows the minimal necessary information. - [custom](_, options) { - return inspect(this, _objectSpread(_objectSpread({}, options), {}, { - // Only inspect one level. - depth: 0, - // It should not recurse. - customInspect: false - })); - } -}; + dataURL += ';base64,' -/***/ }), + const decoder = new StringDecoder('latin1') -/***/ 7049: -/***/ ((module) => { + for (const chunk of bytes) { + dataURL += btoa(decoder.write(chunk)) + } -"use strict"; + dataURL += btoa(decoder.end()) + return dataURL + } + case 'Text': { + // 1. Let encoding be failure + let encoding = 'failure' -// undocumented cb() API, needed for core, not for public API -function destroy(err, cb) { - const readableDestroyed = this._readableState && this._readableState.destroyed; - const writableDestroyed = this._writableState && this._writableState.destroyed; - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err) { - if (!this._writableState) { - process.nextTick(emitErrorNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - process.nextTick(emitErrorNT, this, err); + // 2. If the encodingName is present, set encoding to the + // result of getting an encoding from encodingName. + if (encodingName) { + encoding = getEncoding(encodingName) + } + + // 3. If encoding is failure, and mimeType is present: + if (encoding === 'failure' && mimeType) { + // 1. Let type be the result of parse a MIME type + // given mimeType. + const type = parseMIMEType(mimeType) + + // 2. If type is not failure, set encoding to the result + // of getting an encoding from type’s parameters["charset"]. + if (type !== 'failure') { + encoding = getEncoding(type.parameters.get('charset')) + } + } + + // 4. If encoding is failure, then set encoding to UTF-8. + if (encoding === 'failure') { + encoding = 'UTF-8' } + + // 5. Decode bytes using fallback encoding encoding, and + // return the result. + return decode(bytes, encoding) } - return this; - } + case 'ArrayBuffer': { + // Return a new ArrayBuffer whose contents are bytes. + const sequence = combineByteSequences(bytes) - // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks + return sequence.buffer + } + case 'BinaryString': { + // Return bytes as a binary string, in which every byte + // is represented by a code unit of equal value [0..255]. + let binaryString = '' - if (this._readableState) { - this._readableState.destroyed = true; - } + const decoder = new StringDecoder('latin1') - // if this is a duplex stream mark the writable part as destroyed as well - if (this._writableState) { - this._writableState.destroyed = true; - } - this._destroy(err || null, err => { - if (!cb && err) { - if (!this._writableState) { - process.nextTick(emitErrorAndCloseNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - process.nextTick(emitErrorAndCloseNT, this, err); - } else { - process.nextTick(emitCloseNT, this); + for (const chunk of bytes) { + binaryString += decoder.write(chunk) } - } else if (cb) { - process.nextTick(emitCloseNT, this); - cb(err); - } else { - process.nextTick(emitCloseNT, this); + + binaryString += decoder.end() + + return binaryString } - }); - return this; -} -function emitErrorAndCloseNT(self, err) { - emitErrorNT(self, err); - emitCloseNT(self); -} -function emitCloseNT(self) { - if (self._writableState && !self._writableState.emitClose) return; - if (self._readableState && !self._readableState.emitClose) return; - self.emit('close'); -} -function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; } - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finalCalled = false; - this._writableState.prefinished = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; +} + +/** + * @see https://encoding.spec.whatwg.org/#decode + * @param {Uint8Array[]} ioQueue + * @param {string} encoding + */ +function decode (ioQueue, encoding) { + const bytes = combineByteSequences(ioQueue) + + // 1. Let BOMEncoding be the result of BOM sniffing ioQueue. + const BOMEncoding = BOMSniffing(bytes) + + let slice = 0 + + // 2. If BOMEncoding is non-null: + if (BOMEncoding !== null) { + // 1. Set encoding to BOMEncoding. + encoding = BOMEncoding + + // 2. Read three bytes from ioQueue, if BOMEncoding is + // UTF-8; otherwise read two bytes. + // (Do nothing with those bytes.) + slice = BOMEncoding === 'UTF-8' ? 3 : 2 } + + // 3. Process a queue with an instance of encoding’s + // decoder, ioQueue, output, and "replacement". + + // 4. Return output. + + const sliced = bytes.slice(slice) + return new TextDecoder(encoding).decode(sliced) } -function emitErrorNT(self, err) { - self.emit('error', err); + +/** + * @see https://encoding.spec.whatwg.org/#bom-sniff + * @param {Uint8Array} ioQueue + */ +function BOMSniffing (ioQueue) { + // 1. Let BOM be the result of peeking 3 bytes from ioQueue, + // converted to a byte sequence. + const [a, b, c] = ioQueue + + // 2. For each of the rows in the table below, starting with + // the first one and going down, if BOM starts with the + // bytes given in the first column, then return the + // encoding given in the cell in the second column of that + // row. Otherwise, return null. + if (a === 0xEF && b === 0xBB && c === 0xBF) { + return 'UTF-8' + } else if (a === 0xFE && b === 0xFF) { + return 'UTF-16BE' + } else if (a === 0xFF && b === 0xFE) { + return 'UTF-16LE' + } + + return null } -function errorOrDestroy(stream, err) { - // We have tests that rely on errors being emitted - // in the same tick, so changing this is semver major. - // For now when you opt-in to autoDestroy we allow - // the error to be emitted nextTick. In a future - // semver major update we should change the default to this. - const rState = stream._readableState; - const wState = stream._writableState; - if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +/** + * @param {Uint8Array[]} sequences + */ +function combineByteSequences (sequences) { + const size = sequences.reduce((a, b) => { + return a + b.byteLength + }, 0) + + let offset = 0 + + return sequences.reduce((a, b) => { + a.set(b, offset) + offset += b.byteLength + return a + }, new Uint8Array(size)) } + module.exports = { - destroy, - undestroy, - errorOrDestroy -}; + staticPropertyDescriptors, + readOperation, + fireAProgressEvent +} + /***/ }), -/***/ 6080: +/***/ 1892: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Ported from https://github.com/mafintosh/end-of-stream with -// permission from the author, Mathias Buus (@mafintosh). +// We include a version number for the Dispatcher API. In case of breaking changes, +// this version number must be increased to avoid conflicts. +const globalDispatcher = Symbol.for('undici.globalDispatcher.1') +const { InvalidArgumentError } = __nccwpck_require__(8045) +const Agent = __nccwpck_require__(7890) -const ERR_STREAM_PREMATURE_CLOSE = (__nccwpck_require__(7214)/* .codes.ERR_STREAM_PREMATURE_CLOSE */ .q.ERR_STREAM_PREMATURE_CLOSE); -function once(callback) { - let called = false; - return function () { - if (called) return; - called = true; - for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { - args[_key] = arguments[_key]; - } - callback.apply(this, args); - }; -} -function noop() {} -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; +if (getGlobalDispatcher() === undefined) { + setGlobalDispatcher(new Agent()) } -function eos(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - callback = once(callback || noop); - let readable = opts.readable || opts.readable !== false && stream.readable; - let writable = opts.writable || opts.writable !== false && stream.writable; - const onlegacyfinish = () => { - if (!stream.writable) onfinish(); - }; - var writableEnded = stream._writableState && stream._writableState.finished; - const onfinish = () => { - writable = false; - writableEnded = true; - if (!readable) callback.call(stream); - }; - var readableEnded = stream._readableState && stream._readableState.endEmitted; - const onend = () => { - readable = false; - readableEnded = true; - if (!writable) callback.call(stream); - }; - const onerror = err => { - callback.call(stream, err); - }; - const onclose = () => { - let err; - if (readable && !readableEnded) { - if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - if (writable && !writableEnded) { - if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - }; - const onrequest = () => { - stream.req.on('finish', onfinish); - }; - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest();else stream.on('request', onrequest); - } else if (writable && !stream._writableState) { - // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); + +function setGlobalDispatcher (agent) { + if (!agent || typeof agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument agent must implement Agent') } - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - return function () { - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; + Object.defineProperty(globalThis, globalDispatcher, { + value: agent, + writable: true, + enumerable: false, + configurable: false + }) } -module.exports = eos; + +function getGlobalDispatcher () { + return globalThis[globalDispatcher] +} + +module.exports = { + setGlobalDispatcher, + getGlobalDispatcher +} + /***/ }), -/***/ 9082: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 6930: +/***/ ((module) => { "use strict"; -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } -function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } -function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } -function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } -const ERR_INVALID_ARG_TYPE = (__nccwpck_require__(7214)/* .codes.ERR_INVALID_ARG_TYPE */ .q.ERR_INVALID_ARG_TYPE); -function from(Readable, iterable, opts) { - let iterator; - if (iterable && typeof iterable.next === 'function') { - iterator = iterable; - } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); - const readable = new Readable(_objectSpread({ - objectMode: true - }, opts)); - // Reading boolean to protect against _read - // being called before last iteration completion. - let reading = false; - readable._read = function () { - if (!reading) { - reading = true; - next(); - } - }; - function next() { - return _next2.apply(this, arguments); +module.exports = class DecoratorHandler { + constructor (handler) { + this.handler = handler } - function _next2() { - _next2 = _asyncToGenerator(function* () { - try { - const _yield$iterator$next = yield iterator.next(), - value = _yield$iterator$next.value, - done = _yield$iterator$next.done; - if (done) { - readable.push(null); - } else if (readable.push(yield value)) { - next(); - } else { - reading = false; - } - } catch (err) { - readable.destroy(err); - } - }); - return _next2.apply(this, arguments); + + onConnect (...args) { + return this.handler.onConnect(...args) + } + + onError (...args) { + return this.handler.onError(...args) + } + + onUpgrade (...args) { + return this.handler.onUpgrade(...args) + } + + onHeaders (...args) { + return this.handler.onHeaders(...args) + } + + onData (...args) { + return this.handler.onData(...args) + } + + onComplete (...args) { + return this.handler.onComplete(...args) + } + + onBodySent (...args) { + return this.handler.onBodySent(...args) } - return readable; } -module.exports = from; + /***/ }), -/***/ 6989: +/***/ 2860: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Ported from https://github.com/mafintosh/pump with -// permission from the author, Mathias Buus (@mafintosh). +const util = __nccwpck_require__(3983) +const { kBodyUsed } = __nccwpck_require__(2785) +const assert = __nccwpck_require__(9491) +const { InvalidArgumentError } = __nccwpck_require__(8045) +const EE = __nccwpck_require__(2361) -let eos; -function once(callback) { - let called = false; - return function () { - if (called) return; - called = true; - callback(...arguments); - }; -} -const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; -function noop(err) { - // Rethrow the error if it exists to avoid swallowing it - if (err) throw err; -} -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -} -function destroyer(stream, reading, writing, callback) { - callback = once(callback); - let closed = false; - stream.on('close', () => { - closed = true; - }); - if (eos === undefined) eos = __nccwpck_require__(6080); - eos(stream, { - readable: reading, - writable: writing - }, err => { - if (err) return callback(err); - closed = true; - callback(); - }); - let destroyed = false; - return err => { - if (closed) return; - if (destroyed) return; - destroyed = true; +const redirectableStatusCodes = [300, 301, 302, 303, 307, 308] - // request.destroy just do .end - .abort is what we want - if (isRequest(stream)) return stream.abort(); - if (typeof stream.destroy === 'function') return stream.destroy(); - callback(err || new ERR_STREAM_DESTROYED('pipe')); - }; +const kBody = Symbol('body') + +class BodyAsyncIterable { + constructor (body) { + this[kBody] = body + this[kBodyUsed] = false + } + + async * [Symbol.asyncIterator] () { + assert(!this[kBodyUsed], 'disturbed') + this[kBodyUsed] = true + yield * this[kBody] + } } -function call(fn) { - fn(); + +class RedirectHandler { + constructor (dispatch, maxRedirections, opts, handler) { + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + util.validateHandler(handler, opts.method, opts.upgrade) + + this.dispatch = dispatch + this.location = null + this.abort = null + this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy + this.maxRedirections = maxRedirections + this.handler = handler + this.history = [] + + if (util.isStream(this.opts.body)) { + // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp + // so that it can be dispatched again? + // TODO (fix): Do we need 100-expect support to provide a way to do this properly? + if (util.bodyLength(this.opts.body) === 0) { + this.opts.body + .on('data', function () { + assert(false) + }) + } + + if (typeof this.opts.body.readableDidRead !== 'boolean') { + this.opts.body[kBodyUsed] = false + EE.prototype.on.call(this.opts.body, 'data', function () { + this[kBodyUsed] = true + }) + } + } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') { + // TODO (fix): We can't access ReadableStream internal state + // to determine whether or not it has been disturbed. This is just + // a workaround. + this.opts.body = new BodyAsyncIterable(this.opts.body) + } else if ( + this.opts.body && + typeof this.opts.body !== 'string' && + !ArrayBuffer.isView(this.opts.body) && + util.isIterable(this.opts.body) + ) { + // TODO: Should we allow re-using iterable if !this.opts.idempotent + // or through some other flag? + this.opts.body = new BodyAsyncIterable(this.opts.body) + } + } + + onConnect (abort) { + this.abort = abort + this.handler.onConnect(abort, { history: this.history }) + } + + onUpgrade (statusCode, headers, socket) { + this.handler.onUpgrade(statusCode, headers, socket) + } + + onError (error) { + this.handler.onError(error) + } + + onHeaders (statusCode, headers, resume, statusText) { + this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) + ? null + : parseLocation(statusCode, headers) + + if (this.opts.origin) { + this.history.push(new URL(this.opts.path, this.opts.origin)) + } + + if (!this.location) { + return this.handler.onHeaders(statusCode, headers, resume, statusText) + } + + const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))) + const path = search ? `${pathname}${search}` : pathname + + // Remove headers referring to the original URL. + // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers. + // https://tools.ietf.org/html/rfc7231#section-6.4 + this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin) + this.opts.path = path + this.opts.origin = origin + this.opts.maxRedirections = 0 + this.opts.query = null + + // https://tools.ietf.org/html/rfc7231#section-6.4.4 + // In case of HTTP 303, always replace method to be either HEAD or GET + if (statusCode === 303 && this.opts.method !== 'HEAD') { + this.opts.method = 'GET' + this.opts.body = null + } + } + + onData (chunk) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 + + TLDR: undici always ignores 3xx response bodies. + + Redirection is used to serve the requested resource from another URL, so it is assumes that + no body is generated (and thus can be ignored). Even though generating a body is not prohibited. + + For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually + (which means it's optional and not mandated) contain just an hyperlink to the value of + the Location response header, so the body can be ignored safely. + + For status 300, which is "Multiple Choices", the spec mentions both generating a Location + response header AND a response body with the other possible location to follow. + Since the spec explicitily chooses not to specify a format for such body and leave it to + servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it. + */ + } else { + return this.handler.onData(chunk) + } + } + + onComplete (trailers) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 + + TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections + and neither are useful if present. + + See comment on onData method above for more detailed informations. + */ + + this.location = null + this.abort = null + + this.dispatch(this.opts, this) + } else { + this.handler.onComplete(trailers) + } + } + + onBodySent (chunk) { + if (this.handler.onBodySent) { + this.handler.onBodySent(chunk) + } + } } -function pipe(from, to) { - return from.pipe(to); + +function parseLocation (statusCode, headers) { + if (redirectableStatusCodes.indexOf(statusCode) === -1) { + return null + } + + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toString().toLowerCase() === 'location') { + return headers[i + 1] + } + } } -function popCallback(streams) { - if (!streams.length) return noop; - if (typeof streams[streams.length - 1] !== 'function') return noop; - return streams.pop(); + +// https://tools.ietf.org/html/rfc7231#section-6.4.4 +function shouldRemoveHeader (header, removeContent, unknownOrigin) { + return ( + (header.length === 4 && header.toString().toLowerCase() === 'host') || + (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || + (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || + (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') + ) +} + +// https://tools.ietf.org/html/rfc7231#section-6.4 +function cleanRequestHeaders (headers, removeContent, unknownOrigin) { + const ret = [] + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) { + ret.push(headers[i], headers[i + 1]) + } + } + } else if (headers && typeof headers === 'object') { + for (const key of Object.keys(headers)) { + if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) { + ret.push(key, headers[key]) + } + } + } else { + assert(headers == null, 'headers must be an object or an array') + } + return ret } -function pipeline() { - for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { - streams[_key] = arguments[_key]; + +module.exports = RedirectHandler + + +/***/ }), + +/***/ 2286: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const assert = __nccwpck_require__(9491) + +const { kRetryHandlerDefaultRetry } = __nccwpck_require__(2785) +const { RequestRetryError } = __nccwpck_require__(8045) +const { isDisturbed, parseHeaders, parseRangeHeader } = __nccwpck_require__(3983) + +function calculateRetryAfterHeader (retryAfter) { + const current = Date.now() + const diff = new Date(retryAfter).getTime() - current + + return diff +} + +class RetryHandler { + constructor (opts, handlers) { + const { retryOptions, ...dispatchOpts } = opts + const { + // Retry scoped + retry: retryFn, + maxRetries, + maxTimeout, + minTimeout, + timeoutFactor, + // Response scoped + methods, + errorCodes, + retryAfter, + statusCodes + } = retryOptions ?? {} + + this.dispatch = handlers.dispatch + this.handler = handlers.handler + this.opts = dispatchOpts + this.abort = null + this.aborted = false + this.retryOpts = { + retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry], + retryAfter: retryAfter ?? true, + maxTimeout: maxTimeout ?? 30 * 1000, // 30s, + timeout: minTimeout ?? 500, // .5s + timeoutFactor: timeoutFactor ?? 2, + maxRetries: maxRetries ?? 5, + // What errors we should retry + methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'], + // Indicates which errors to retry + statusCodes: statusCodes ?? [500, 502, 503, 504, 429], + // List of errors to retry + errorCodes: errorCodes ?? [ + 'ECONNRESET', + 'ECONNREFUSED', + 'ENOTFOUND', + 'ENETDOWN', + 'ENETUNREACH', + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'EPIPE' + ] + } + + this.retryCount = 0 + this.start = 0 + this.end = null + this.etag = null + this.resume = null + + // Handle possible onConnect duplication + this.handler.onConnect(reason => { + this.aborted = true + if (this.abort) { + this.abort(reason) + } else { + this.reason = reason + } + }) } - const callback = popCallback(streams); - if (Array.isArray(streams[0])) streams = streams[0]; - if (streams.length < 2) { - throw new ERR_MISSING_ARGS('streams'); + + onRequestSent () { + if (this.handler.onRequestSent) { + this.handler.onRequestSent() + } + } + + onUpgrade (statusCode, headers, socket) { + if (this.handler.onUpgrade) { + this.handler.onUpgrade(statusCode, headers, socket) + } + } + + onConnect (abort) { + if (this.aborted) { + abort(this.reason) + } else { + this.abort = abort + } } - let error; - const destroys = streams.map(function (stream, i) { - const reading = i < streams.length - 1; - const writing = i > 0; - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err; - if (err) destroys.forEach(call); - if (reading) return; - destroys.forEach(call); - callback(error); - }); - }); - return streams.reduce(pipe); -} -module.exports = pipeline; -/***/ }), + onBodySent (chunk) { + if (this.handler.onBodySent) return this.handler.onBodySent(chunk) + } -/***/ 9948: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) { + const { statusCode, code, headers } = err + const { method, retryOptions } = opts + const { + maxRetries, + timeout, + maxTimeout, + timeoutFactor, + statusCodes, + errorCodes, + methods + } = retryOptions + let { counter, currentTimeout } = state -"use strict"; + currentTimeout = + currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout + // Any code that is not a Undici's originated and allowed to retry + if ( + code && + code !== 'UND_ERR_REQ_RETRY' && + code !== 'UND_ERR_SOCKET' && + !errorCodes.includes(code) + ) { + cb(err) + return + } -const ERR_INVALID_OPT_VALUE = (__nccwpck_require__(7214)/* .codes.ERR_INVALID_OPT_VALUE */ .q.ERR_INVALID_OPT_VALUE); -function highWaterMarkFrom(options, isDuplex, duplexKey) { - return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; -} -function getHighWaterMark(state, options, duplexKey, isDuplex) { - const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); - if (hwm != null) { - if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { - const name = isDuplex ? duplexKey : 'highWaterMark'; - throw new ERR_INVALID_OPT_VALUE(name, hwm); + // If a set of method are provided and the current method is not in the list + if (Array.isArray(methods) && !methods.includes(method)) { + cb(err) + return } - return Math.floor(hwm); - } - // Default value - return state.objectMode ? 16 : 16 * 1024; -} -module.exports = { - getHighWaterMark -}; + // If a set of status code are provided and the current status code is not in the list + if ( + statusCode != null && + Array.isArray(statusCodes) && + !statusCodes.includes(statusCode) + ) { + cb(err) + return + } -/***/ }), + // If we reached the max number of retries + if (counter > maxRetries) { + cb(err) + return + } -/***/ 2387: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + let retryAfterHeader = headers != null && headers['retry-after'] + if (retryAfterHeader) { + retryAfterHeader = Number(retryAfterHeader) + retryAfterHeader = isNaN(retryAfterHeader) + ? calculateRetryAfterHeader(retryAfterHeader) + : retryAfterHeader * 1e3 // Retry-After is in seconds + } -module.exports = __nccwpck_require__(2781); + const retryTimeout = + retryAfterHeader > 0 + ? Math.min(retryAfterHeader, maxTimeout) + : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout) + state.currentTimeout = retryTimeout -/***/ }), + setTimeout(() => cb(null), retryTimeout) + } -/***/ 1642: -/***/ ((module, exports, __nccwpck_require__) => { + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const headers = parseHeaders(rawHeaders) -var Stream = __nccwpck_require__(2781); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream.Readable; - Object.assign(module.exports, Stream); - module.exports.Stream = Stream; -} else { - exports = module.exports = __nccwpck_require__(1433); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = __nccwpck_require__(6993); - exports.Duplex = __nccwpck_require__(1359); - exports.Transform = __nccwpck_require__(4415); - exports.PassThrough = __nccwpck_require__(1542); - exports.finished = __nccwpck_require__(6080); - exports.pipeline = __nccwpck_require__(6989); -} + this.retryCount += 1 + if (statusCode >= 300) { + this.abort( + new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } -/***/ }), + // Checkpoint for resume from where we left it + if (this.resume != null) { + this.resume = null -/***/ 3515: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (statusCode !== 206) { + return true + } -"use strict"; + const contentRange = parseRangeHeader(headers['content-range']) + // If no content range + if (!contentRange) { + this.abort( + new RequestRetryError('Content-Range mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + // Let's start with a weak etag check + if (this.etag != null && this.etag !== headers.etag) { + this.abort( + new RequestRetryError('ETag mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } -const {PassThrough} = __nccwpck_require__(2781); -const debug = __nccwpck_require__(8237)('retry-request'); -const extend = __nccwpck_require__(8171); + const { start, size, end = size } = contentRange -const DEFAULTS = { - objectMode: false, - retries: 2, + assert(this.start === start, 'content-range mismatch') + assert(this.end == null || this.end === end, 'content-range mismatch') - /* - The maximum time to delay in seconds. If retryDelayMultiplier results in a - delay greater than maxRetryDelay, retries should delay by maxRetryDelay - seconds instead. - */ - maxRetryDelay: 64, + this.resume = resume + return true + } - /* - The multiplier by which to increase the delay time between the completion of - failed requests, and the initiation of the subsequent retrying request. - */ - retryDelayMultiplier: 2, + if (this.end == null) { + if (statusCode === 206) { + // First time we receive 206 + const range = parseRangeHeader(headers['content-range']) - /* - The length of time to keep retrying in seconds. The last sleep period will - be shortened as necessary, so that the last retry runs at deadline (and not - considerably beyond it). The total time starting from when the initial - request is sent, after which an error will be returned, regardless of the - retrying attempts made meanwhile. - */ - totalTimeout: 600, + if (range == null) { + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } - noResponseRetries: 2, - currentRetryAttempt: 0, - shouldRetryFn: function (response) { - const retryRanges = [ - // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes - // 1xx - Retry (Informational, request still processing) - // 2xx - Do not retry (Success) - // 3xx - Do not retry (Redirect) - // 4xx - Do not retry (Client errors) - // 429 - Retry ("Too Many Requests") - // 5xx - Retry (Server errors) - [100, 199], - [429, 429], - [500, 599], - ]; + const { start, size, end = size } = range - const statusCode = response.statusCode; - debug(`Response status: ${statusCode}`); + assert( + start != null && Number.isFinite(start) && this.start !== start, + 'content-range mismatch' + ) + assert(Number.isFinite(start)) + assert( + end != null && Number.isFinite(end) && this.end !== end, + 'invalid content-length' + ) - let range; - while ((range = retryRanges.shift())) { - if (statusCode >= range[0] && statusCode <= range[1]) { - // Not a successful status or redirect. - return true; + this.start = start + this.end = end } - } - }, -}; -function retryRequest(requestOpts, opts, callback) { - const streamMode = typeof arguments[arguments.length - 1] !== 'function'; + // We make our best to checkpoint the body for further range headers + if (this.end == null) { + const contentLength = headers['content-length'] + this.end = contentLength != null ? Number(contentLength) : null + } - if (typeof opts === 'function') { - callback = opts; - } + assert(Number.isFinite(this.start)) + assert( + this.end == null || Number.isFinite(this.end), + 'invalid content-length' + ) - const manualCurrentRetryAttemptWasSet = - opts && typeof opts.currentRetryAttempt === 'number'; - opts = extend({}, DEFAULTS, opts); + this.resume = resume + this.etag = headers.etag != null ? headers.etag : null - if (typeof opts.request === 'undefined') { - try { - // eslint-disable-next-line node/no-unpublished-require - opts.request = __nccwpck_require__(8418); - } catch (e) { - throw new Error('A request library must be provided to retry-request.'); + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) } - } - let currentRetryAttempt = opts.currentRetryAttempt; + const err = new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) - let numNoResponseAttempts = 0; - let streamResponseHandled = false; + this.abort(err) - let retryStream; - let requestStream; - let delayStream; + return false + } - let activeRequest; - const retryRequest = { - abort: function () { - if (activeRequest && activeRequest.abort) { - activeRequest.abort(); - } - }, - }; + onData (chunk) { + this.start += chunk.length - if (streamMode) { - retryStream = new PassThrough({objectMode: opts.objectMode}); - retryStream.abort = resetStreams; + return this.handler.onData(chunk) } - const timeOfFirstRequest = Date.now(); - if (currentRetryAttempt > 0) { - retryAfterDelay(currentRetryAttempt); - } else { - makeRequest(); + onComplete (rawTrailers) { + this.retryCount = 0 + return this.handler.onComplete(rawTrailers) } - if (streamMode) { - return retryStream; - } else { - return retryRequest; - } + onError (err) { + if (this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } - function resetStreams() { - delayStream = null; + this.retryOpts.retry( + err, + { + state: { counter: this.retryCount++, currentTimeout: this.retryAfter }, + opts: { retryOptions: this.retryOpts, ...this.opts } + }, + onRetry.bind(this) + ) - if (requestStream) { - requestStream.abort && requestStream.abort(); - requestStream.cancel && requestStream.cancel(); + function onRetry (err) { + if (err != null || this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } - if (requestStream.destroy) { - requestStream.destroy(); - } else if (requestStream.end) { - requestStream.end(); + if (this.start !== 0) { + this.opts = { + ...this.opts, + headers: { + ...this.opts.headers, + range: `bytes=${this.start}-${this.end ?? ''}` + } + } + } + + try { + this.dispatch(this.opts, this) + } catch (err) { + this.handler.onError(err) } } } +} - function makeRequest() { - currentRetryAttempt++; - debug(`Current retry attempt: ${currentRetryAttempt}`); - - if (streamMode) { - streamResponseHandled = false; +module.exports = RetryHandler - delayStream = new PassThrough({objectMode: opts.objectMode}); - requestStream = opts.request(requestOpts); - setImmediate(() => { - retryStream.emit('request'); - }); +/***/ }), - requestStream - // gRPC via google-cloud-node can emit an `error` as well as a `response` - // Whichever it emits, we run with-- we can't run with both. That's what - // is up with the `streamResponseHandled` tracking. - .on('error', err => { - if (streamResponseHandled) { - return; - } +/***/ 8861: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - streamResponseHandled = true; - onResponse(err); - }) - .on('response', (resp, body) => { - if (streamResponseHandled) { - return; - } +"use strict"; - streamResponseHandled = true; - onResponse(null, resp, body); - }) - .on('complete', retryStream.emit.bind(retryStream, 'complete')); - requestStream.pipe(delayStream); - } else { - activeRequest = opts.request(requestOpts, onResponse); - } - } +const RedirectHandler = __nccwpck_require__(2860) - function retryAfterDelay(currentRetryAttempt) { - if (streamMode) { - resetStreams(); - } +function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) { + return (dispatch) => { + return function Intercept (opts, handler) { + const { maxRedirections = defaultMaxRedirections } = opts - const nextRetryDelay = getNextRetryDelay({ - maxRetryDelay: opts.maxRetryDelay, - retryDelayMultiplier: opts.retryDelayMultiplier, - retryNumber: currentRetryAttempt, - timeOfFirstRequest, - totalTimeout: opts.totalTimeout, - }); - debug(`Next retry delay: ${nextRetryDelay}`); + if (!maxRedirections) { + return dispatch(opts, handler) + } - if (nextRetryDelay <= 0) { - numNoResponseAttempts = opts.noResponseRetries + 1; - return; + const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler) + opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting. + return dispatch(opts, redirectHandler) } - - setTimeout(makeRequest, nextRetryDelay); } +} - function onResponse(err, response, body) { - // An error such as DNS resolution. - if (err) { - numNoResponseAttempts++; +module.exports = createRedirectInterceptor - if (numNoResponseAttempts <= opts.noResponseRetries) { - retryAfterDelay(numNoResponseAttempts); - } else { - if (streamMode) { - retryStream.emit('error', err); - retryStream.end(); - } else { - callback(err, response, body); - } - } - return; - } +/***/ }), - // Send the response to see if we should try again. - // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts - // the very first request sent as the first "retry". It is only accurate - // when a user provides their own "currentRetryAttempt" option at - // instantiation. - const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet - ? currentRetryAttempt - : currentRetryAttempt - 1; - if ( - adjustedCurrentRetryAttempt < opts.retries && - opts.shouldRetryFn(response) - ) { - retryAfterDelay(currentRetryAttempt); - return; - } +/***/ 953: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // No more attempts need to be made, just continue on. - if (streamMode) { - retryStream.emit('response', response); - delayStream.pipe(retryStream); - requestStream.on('error', err => { - retryStream.destroy(err); - }); - } else { - callback(err, response, body); +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0; +const utils_1 = __nccwpck_require__(1891); +// C headers +var ERROR; +(function (ERROR) { + ERROR[ERROR["OK"] = 0] = "OK"; + ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL"; + ERROR[ERROR["STRICT"] = 2] = "STRICT"; + ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED"; + ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH"; + ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION"; + ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD"; + ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL"; + ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT"; + ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION"; + ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN"; + ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH"; + ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE"; + ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS"; + ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE"; + ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING"; + ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN"; + ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE"; + ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE"; + ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER"; + ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE"; + ERROR[ERROR["PAUSED"] = 21] = "PAUSED"; + ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE"; + ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE"; + ERROR[ERROR["USER"] = 24] = "USER"; +})(ERROR = exports.ERROR || (exports.ERROR = {})); +var TYPE; +(function (TYPE) { + TYPE[TYPE["BOTH"] = 0] = "BOTH"; + TYPE[TYPE["REQUEST"] = 1] = "REQUEST"; + TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE"; +})(TYPE = exports.TYPE || (exports.TYPE = {})); +var FLAGS; +(function (FLAGS) { + FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE"; + FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE"; + FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE"; + FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED"; + FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE"; + FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH"; + FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY"; + FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING"; + // 1 << 8 is unused + FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING"; +})(FLAGS = exports.FLAGS || (exports.FLAGS = {})); +var LENIENT_FLAGS; +(function (LENIENT_FLAGS) { + LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS"; + LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH"; + LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE"; +})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {})); +var METHODS; +(function (METHODS) { + METHODS[METHODS["DELETE"] = 0] = "DELETE"; + METHODS[METHODS["GET"] = 1] = "GET"; + METHODS[METHODS["HEAD"] = 2] = "HEAD"; + METHODS[METHODS["POST"] = 3] = "POST"; + METHODS[METHODS["PUT"] = 4] = "PUT"; + /* pathological */ + METHODS[METHODS["CONNECT"] = 5] = "CONNECT"; + METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS"; + METHODS[METHODS["TRACE"] = 7] = "TRACE"; + /* WebDAV */ + METHODS[METHODS["COPY"] = 8] = "COPY"; + METHODS[METHODS["LOCK"] = 9] = "LOCK"; + METHODS[METHODS["MKCOL"] = 10] = "MKCOL"; + METHODS[METHODS["MOVE"] = 11] = "MOVE"; + METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND"; + METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH"; + METHODS[METHODS["SEARCH"] = 14] = "SEARCH"; + METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK"; + METHODS[METHODS["BIND"] = 16] = "BIND"; + METHODS[METHODS["REBIND"] = 17] = "REBIND"; + METHODS[METHODS["UNBIND"] = 18] = "UNBIND"; + METHODS[METHODS["ACL"] = 19] = "ACL"; + /* subversion */ + METHODS[METHODS["REPORT"] = 20] = "REPORT"; + METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY"; + METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT"; + METHODS[METHODS["MERGE"] = 23] = "MERGE"; + /* upnp */ + METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH"; + METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY"; + METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE"; + METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE"; + /* RFC-5789 */ + METHODS[METHODS["PATCH"] = 28] = "PATCH"; + METHODS[METHODS["PURGE"] = 29] = "PURGE"; + /* CalDAV */ + METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR"; + /* RFC-2068, section 19.6.1.2 */ + METHODS[METHODS["LINK"] = 31] = "LINK"; + METHODS[METHODS["UNLINK"] = 32] = "UNLINK"; + /* icecast */ + METHODS[METHODS["SOURCE"] = 33] = "SOURCE"; + /* RFC-7540, section 11.6 */ + METHODS[METHODS["PRI"] = 34] = "PRI"; + /* RFC-2326 RTSP */ + METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE"; + METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE"; + METHODS[METHODS["SETUP"] = 37] = "SETUP"; + METHODS[METHODS["PLAY"] = 38] = "PLAY"; + METHODS[METHODS["PAUSE"] = 39] = "PAUSE"; + METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN"; + METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER"; + METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER"; + METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT"; + METHODS[METHODS["RECORD"] = 44] = "RECORD"; + /* RAOP */ + METHODS[METHODS["FLUSH"] = 45] = "FLUSH"; +})(METHODS = exports.METHODS || (exports.METHODS = {})); +exports.METHODS_HTTP = [ + METHODS.DELETE, + METHODS.GET, + METHODS.HEAD, + METHODS.POST, + METHODS.PUT, + METHODS.CONNECT, + METHODS.OPTIONS, + METHODS.TRACE, + METHODS.COPY, + METHODS.LOCK, + METHODS.MKCOL, + METHODS.MOVE, + METHODS.PROPFIND, + METHODS.PROPPATCH, + METHODS.SEARCH, + METHODS.UNLOCK, + METHODS.BIND, + METHODS.REBIND, + METHODS.UNBIND, + METHODS.ACL, + METHODS.REPORT, + METHODS.MKACTIVITY, + METHODS.CHECKOUT, + METHODS.MERGE, + METHODS['M-SEARCH'], + METHODS.NOTIFY, + METHODS.SUBSCRIBE, + METHODS.UNSUBSCRIBE, + METHODS.PATCH, + METHODS.PURGE, + METHODS.MKCALENDAR, + METHODS.LINK, + METHODS.UNLINK, + METHODS.PRI, + // TODO(indutny): should we allow it with HTTP? + METHODS.SOURCE, +]; +exports.METHODS_ICE = [ + METHODS.SOURCE, +]; +exports.METHODS_RTSP = [ + METHODS.OPTIONS, + METHODS.DESCRIBE, + METHODS.ANNOUNCE, + METHODS.SETUP, + METHODS.PLAY, + METHODS.PAUSE, + METHODS.TEARDOWN, + METHODS.GET_PARAMETER, + METHODS.SET_PARAMETER, + METHODS.REDIRECT, + METHODS.RECORD, + METHODS.FLUSH, + // For AirPlay + METHODS.GET, + METHODS.POST, +]; +exports.METHOD_MAP = utils_1.enumToMap(METHODS); +exports.H_METHOD_MAP = {}; +Object.keys(exports.METHOD_MAP).forEach((key) => { + if (/^H/.test(key)) { + exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key]; } - } -} +}); +var FINISH; +(function (FINISH) { + FINISH[FINISH["SAFE"] = 0] = "SAFE"; + FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB"; + FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE"; +})(FINISH = exports.FINISH || (exports.FINISH = {})); +exports.ALPHA = []; +for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) { + // Upper case + exports.ALPHA.push(String.fromCharCode(i)); + // Lower case + exports.ALPHA.push(String.fromCharCode(i + 0x20)); +} +exports.NUM_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, +}; +exports.HEX_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, + A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF, + a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf, +}; +exports.NUM = [ + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', +]; +exports.ALPHANUM = exports.ALPHA.concat(exports.NUM); +exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')']; +exports.USERINFO_CHARS = exports.ALPHANUM + .concat(exports.MARK) + .concat(['%', ';', ':', '&', '=', '+', '$', ',']); +// TODO(indutny): use RFC +exports.STRICT_URL_CHAR = [ + '!', '"', '$', '%', '&', '\'', + '(', ')', '*', '+', ',', '-', '.', '/', + ':', ';', '<', '=', '>', + '@', '[', '\\', ']', '^', '_', + '`', + '{', '|', '}', '~', +].concat(exports.ALPHANUM); +exports.URL_CHAR = exports.STRICT_URL_CHAR + .concat(['\t', '\f']); +// All characters with 0x80 bit set to 1 +for (let i = 0x80; i <= 0xff; i++) { + exports.URL_CHAR.push(i); +} +exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']); +/* Tokens as defined by rfc 2616. Also lowercases them. + * token = 1* + * separators = "(" | ")" | "<" | ">" | "@" + * | "," | ";" | ":" | "\" | <"> + * | "/" | "[" | "]" | "?" | "=" + * | "{" | "}" | SP | HT + */ +exports.STRICT_TOKEN = [ + '!', '#', '$', '%', '&', '\'', + '*', '+', '-', '.', + '^', '_', '`', + '|', '~', +].concat(exports.ALPHANUM); +exports.TOKEN = exports.STRICT_TOKEN.concat([' ']); +/* + * Verify that a char is a valid visible (printable) US-ASCII + * character or %x80-FF + */ +exports.HEADER_CHARS = ['\t']; +for (let i = 32; i <= 255; i++) { + if (i !== 127) { + exports.HEADER_CHARS.push(i); + } +} +// ',' = \x44 +exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44); +exports.MAJOR = exports.NUM_MAP; +exports.MINOR = exports.MAJOR; +var HEADER_STATE; +(function (HEADER_STATE) { + HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL"; + HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION"; + HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING"; + HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE"; + HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE"; + HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE"; + HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED"; +})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {})); +exports.SPECIAL_HEADERS = { + 'connection': HEADER_STATE.CONNECTION, + 'content-length': HEADER_STATE.CONTENT_LENGTH, + 'proxy-connection': HEADER_STATE.CONNECTION, + 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING, + 'upgrade': HEADER_STATE.UPGRADE, +}; +//# sourceMappingURL=constants.js.map -module.exports = retryRequest; +/***/ }), -function getNextRetryDelay(config) { - const { - maxRetryDelay, - retryDelayMultiplier, - retryNumber, - timeOfFirstRequest, - totalTimeout, - } = config; +/***/ 1145: +/***/ ((module) => { - const maxRetryDelayMs = maxRetryDelay * 1000; - const totalTimeoutMs = totalTimeout * 1000; +module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC1kAIABBGGpCADcDACAAQgA3AwAgAEE4akIANwMAIABBMGpCADcDACAAQShqQgA3AwAgAEEgakIANwMAIABBEGpCADcDACAAQQhqQgA3AwAgAEHdATYCHEEAC3sBAX8CQCAAKAIMIgMNAAJAIAAoAgRFDQAgACABNgIECwJAIAAgASACEMSAgIAAIgMNACAAKAIMDwsgACADNgIcQQAhAyAAKAIEIgFFDQAgACABIAIgACgCCBGBgICAAAAiAUUNACAAIAI2AhQgACABNgIMIAEhAwsgAwvk8wEDDn8DfgR/I4CAgIAAQRBrIgMkgICAgAAgASEEIAEhBSABIQYgASEHIAEhCCABIQkgASEKIAEhCyABIQwgASENIAEhDiABIQ8CQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgACgCHCIQQX9qDt0B2gEB2QECAwQFBgcICQoLDA0O2AEPENcBERLWARMUFRYXGBkaG+AB3wEcHR7VAR8gISIjJCXUASYnKCkqKyzTAdIBLS7RAdABLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVG2wFHSElKzwHOAUvNAUzMAU1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4ABgQGCAYMBhAGFAYYBhwGIAYkBigGLAYwBjQGOAY8BkAGRAZIBkwGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwHLAcoBuAHJAbkByAG6AbsBvAG9Ab4BvwHAAcEBwgHDAcQBxQHGAQDcAQtBACEQDMYBC0EOIRAMxQELQQ0hEAzEAQtBDyEQDMMBC0EQIRAMwgELQRMhEAzBAQtBFCEQDMABC0EVIRAMvwELQRYhEAy+AQtBFyEQDL0BC0EYIRAMvAELQRkhEAy7AQtBGiEQDLoBC0EbIRAMuQELQRwhEAy4AQtBCCEQDLcBC0EdIRAMtgELQSAhEAy1AQtBHyEQDLQBC0EHIRAMswELQSEhEAyyAQtBIiEQDLEBC0EeIRAMsAELQSMhEAyvAQtBEiEQDK4BC0ERIRAMrQELQSQhEAysAQtBJSEQDKsBC0EmIRAMqgELQSchEAypAQtBwwEhEAyoAQtBKSEQDKcBC0ErIRAMpgELQSwhEAylAQtBLSEQDKQBC0EuIRAMowELQS8hEAyiAQtBxAEhEAyhAQtBMCEQDKABC0E0IRAMnwELQQwhEAyeAQtBMSEQDJ0BC0EyIRAMnAELQTMhEAybAQtBOSEQDJoBC0E1IRAMmQELQcUBIRAMmAELQQshEAyXAQtBOiEQDJYBC0E2IRAMlQELQQohEAyUAQtBNyEQDJMBC0E4IRAMkgELQTwhEAyRAQtBOyEQDJABC0E9IRAMjwELQQkhEAyOAQtBKCEQDI0BC0E+IRAMjAELQT8hEAyLAQtBwAAhEAyKAQtBwQAhEAyJAQtBwgAhEAyIAQtBwwAhEAyHAQtBxAAhEAyGAQtBxQAhEAyFAQtBxgAhEAyEAQtBKiEQDIMBC0HHACEQDIIBC0HIACEQDIEBC0HJACEQDIABC0HKACEQDH8LQcsAIRAMfgtBzQAhEAx9C0HMACEQDHwLQc4AIRAMewtBzwAhEAx6C0HQACEQDHkLQdEAIRAMeAtB0gAhEAx3C0HTACEQDHYLQdQAIRAMdQtB1gAhEAx0C0HVACEQDHMLQQYhEAxyC0HXACEQDHELQQUhEAxwC0HYACEQDG8LQQQhEAxuC0HZACEQDG0LQdoAIRAMbAtB2wAhEAxrC0HcACEQDGoLQQMhEAxpC0HdACEQDGgLQd4AIRAMZwtB3wAhEAxmC0HhACEQDGULQeAAIRAMZAtB4gAhEAxjC0HjACEQDGILQQIhEAxhC0HkACEQDGALQeUAIRAMXwtB5gAhEAxeC0HnACEQDF0LQegAIRAMXAtB6QAhEAxbC0HqACEQDFoLQesAIRAMWQtB7AAhEAxYC0HtACEQDFcLQe4AIRAMVgtB7wAhEAxVC0HwACEQDFQLQfEAIRAMUwtB8gAhEAxSC0HzACEQDFELQfQAIRAMUAtB9QAhEAxPC0H2ACEQDE4LQfcAIRAMTQtB+AAhEAxMC0H5ACEQDEsLQfoAIRAMSgtB+wAhEAxJC0H8ACEQDEgLQf0AIRAMRwtB/gAhEAxGC0H/ACEQDEULQYABIRAMRAtBgQEhEAxDC0GCASEQDEILQYMBIRAMQQtBhAEhEAxAC0GFASEQDD8LQYYBIRAMPgtBhwEhEAw9C0GIASEQDDwLQYkBIRAMOwtBigEhEAw6C0GLASEQDDkLQYwBIRAMOAtBjQEhEAw3C0GOASEQDDYLQY8BIRAMNQtBkAEhEAw0C0GRASEQDDMLQZIBIRAMMgtBkwEhEAwxC0GUASEQDDALQZUBIRAMLwtBlgEhEAwuC0GXASEQDC0LQZgBIRAMLAtBmQEhEAwrC0GaASEQDCoLQZsBIRAMKQtBnAEhEAwoC0GdASEQDCcLQZ4BIRAMJgtBnwEhEAwlC0GgASEQDCQLQaEBIRAMIwtBogEhEAwiC0GjASEQDCELQaQBIRAMIAtBpQEhEAwfC0GmASEQDB4LQacBIRAMHQtBqAEhEAwcC0GpASEQDBsLQaoBIRAMGgtBqwEhEAwZC0GsASEQDBgLQa0BIRAMFwtBrgEhEAwWC0EBIRAMFQtBrwEhEAwUC0GwASEQDBMLQbEBIRAMEgtBswEhEAwRC0GyASEQDBALQbQBIRAMDwtBtQEhEAwOC0G2ASEQDA0LQbcBIRAMDAtBuAEhEAwLC0G5ASEQDAoLQboBIRAMCQtBuwEhEAwIC0HGASEQDAcLQbwBIRAMBgtBvQEhEAwFC0G+ASEQDAQLQb8BIRAMAwtBwAEhEAwCC0HCASEQDAELQcEBIRALA0ACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQDscBAAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxweHyAhIyUoP0BBREVGR0hJSktMTU9QUVJT3gNXWVtcXWBiZWZnaGlqa2xtb3BxcnN0dXZ3eHl6e3x9foABggGFAYYBhwGJAYsBjAGNAY4BjwGQAZEBlAGVAZYBlwGYAZkBmgGbAZwBnQGeAZ8BoAGhAaIBowGkAaUBpgGnAagBqQGqAasBrAGtAa4BrwGwAbEBsgGzAbQBtQG2AbcBuAG5AboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBxwHIAckBygHLAcwBzQHOAc8B0AHRAdIB0wHUAdUB1gHXAdgB2QHaAdsB3AHdAd4B4AHhAeIB4wHkAeUB5gHnAegB6QHqAesB7AHtAe4B7wHwAfEB8gHzAZkCpAKwAv4C/gILIAEiBCACRw3zAUHdASEQDP8DCyABIhAgAkcN3QFBwwEhEAz+AwsgASIBIAJHDZABQfcAIRAM/QMLIAEiASACRw2GAUHvACEQDPwDCyABIgEgAkcNf0HqACEQDPsDCyABIgEgAkcNe0HoACEQDPoDCyABIgEgAkcNeEHmACEQDPkDCyABIgEgAkcNGkEYIRAM+AMLIAEiASACRw0UQRIhEAz3AwsgASIBIAJHDVlBxQAhEAz2AwsgASIBIAJHDUpBPyEQDPUDCyABIgEgAkcNSEE8IRAM9AMLIAEiASACRw1BQTEhEAzzAwsgAC0ALkEBRg3rAwyHAgsgACABIgEgAhDAgICAAEEBRw3mASAAQgA3AyAM5wELIAAgASIBIAIQtICAgAAiEA3nASABIQEM9QILAkAgASIBIAJHDQBBBiEQDPADCyAAIAFBAWoiASACELuAgIAAIhAN6AEgASEBDDELIABCADcDIEESIRAM1QMLIAEiECACRw0rQR0hEAztAwsCQCABIgEgAkYNACABQQFqIQFBECEQDNQDC0EHIRAM7AMLIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN5QFBCCEQDOsDCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEUIRAM0gMLQQkhEAzqAwsgASEBIAApAyBQDeQBIAEhAQzyAgsCQCABIgEgAkcNAEELIRAM6QMLIAAgAUEBaiIBIAIQtoCAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3mASABIQEMDQsgACABIgEgAhC6gICAACIQDecBIAEhAQzwAgsCQCABIgEgAkcNAEEPIRAM5QMLIAEtAAAiEEE7Rg0IIBBBDUcN6AEgAUEBaiEBDO8CCyAAIAEiASACELqAgIAAIhAN6AEgASEBDPICCwNAAkAgAS0AAEHwtYCAAGotAAAiEEEBRg0AIBBBAkcN6wEgACgCBCEQIABBADYCBCAAIBAgAUEBaiIBELmAgIAAIhAN6gEgASEBDPQCCyABQQFqIgEgAkcNAAtBEiEQDOIDCyAAIAEiASACELqAgIAAIhAN6QEgASEBDAoLIAEiASACRw0GQRshEAzgAwsCQCABIgEgAkcNAEEWIRAM4AMLIABBioCAgAA2AgggACABNgIEIAAgASACELiAgIAAIhAN6gEgASEBQSAhEAzGAwsCQCABIgEgAkYNAANAAkAgAS0AAEHwt4CAAGotAAAiEEECRg0AAkAgEEF/ag4E5QHsAQDrAewBCyABQQFqIQFBCCEQDMgDCyABQQFqIgEgAkcNAAtBFSEQDN8DC0EVIRAM3gMLA0ACQCABLQAAQfC5gIAAai0AACIQQQJGDQAgEEF/ag4E3gHsAeAB6wHsAQsgAUEBaiIBIAJHDQALQRghEAzdAwsCQCABIgEgAkYNACAAQYuAgIAANgIIIAAgATYCBCABIQFBByEQDMQDC0EZIRAM3AMLIAFBAWohAQwCCwJAIAEiFCACRw0AQRohEAzbAwsgFCEBAkAgFC0AAEFzag4U3QLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gIA7gILQQAhECAAQQA2AhwgAEGvi4CAADYCECAAQQI2AgwgACAUQQFqNgIUDNoDCwJAIAEtAAAiEEE7Rg0AIBBBDUcN6AEgAUEBaiEBDOUCCyABQQFqIQELQSIhEAy/AwsCQCABIhAgAkcNAEEcIRAM2AMLQgAhESAQIQEgEC0AAEFQag435wHmAQECAwQFBgcIAAAAAAAAAAkKCwwNDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxAREhMUAAtBHiEQDL0DC0ICIREM5QELQgMhEQzkAQtCBCERDOMBC0IFIREM4gELQgYhEQzhAQtCByERDOABC0IIIREM3wELQgkhEQzeAQtCCiERDN0BC0ILIREM3AELQgwhEQzbAQtCDSERDNoBC0IOIREM2QELQg8hEQzYAQtCCiERDNcBC0ILIREM1gELQgwhEQzVAQtCDSERDNQBC0IOIREM0wELQg8hEQzSAQtCACERAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQLQAAQVBqDjflAeQBAAECAwQFBgfmAeYB5gHmAeYB5gHmAQgJCgsMDeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gEODxAREhPmAQtCAiERDOQBC0IDIREM4wELQgQhEQziAQtCBSERDOEBC0IGIREM4AELQgchEQzfAQtCCCERDN4BC0IJIREM3QELQgohEQzcAQtCCyERDNsBC0IMIREM2gELQg0hEQzZAQtCDiERDNgBC0IPIREM1wELQgohEQzWAQtCCyERDNUBC0IMIREM1AELQg0hEQzTAQtCDiERDNIBC0IPIREM0QELIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN0gFBHyEQDMADCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEkIRAMpwMLQSAhEAy/AwsgACABIhAgAhC+gICAAEF/ag4FtgEAxQIB0QHSAQtBESEQDKQDCyAAQQE6AC8gECEBDLsDCyABIgEgAkcN0gFBJCEQDLsDCyABIg0gAkcNHkHGACEQDLoDCyAAIAEiASACELKAgIAAIhAN1AEgASEBDLUBCyABIhAgAkcNJkHQACEQDLgDCwJAIAEiASACRw0AQSghEAy4AwsgAEEANgIEIABBjICAgAA2AgggACABIAEQsYCAgAAiEA3TASABIQEM2AELAkAgASIQIAJHDQBBKSEQDLcDCyAQLQAAIgFBIEYNFCABQQlHDdMBIBBBAWohAQwVCwJAIAEiASACRg0AIAFBAWohAQwXC0EqIRAMtQMLAkAgASIQIAJHDQBBKyEQDLUDCwJAIBAtAAAiAUEJRg0AIAFBIEcN1QELIAAtACxBCEYN0wEgECEBDJEDCwJAIAEiASACRw0AQSwhEAy0AwsgAS0AAEEKRw3VASABQQFqIQEMyQILIAEiDiACRw3VAUEvIRAMsgMLA0ACQCABLQAAIhBBIEYNAAJAIBBBdmoOBADcAdwBANoBCyABIQEM4AELIAFBAWoiASACRw0AC0ExIRAMsQMLQTIhECABIhQgAkYNsAMgAiAUayAAKAIAIgFqIRUgFCABa0EDaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfC7gIAAai0AAEcNAQJAIAFBA0cNAEEGIQEMlgMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLEDCyAAQQA2AgAgFCEBDNkBC0EzIRAgASIUIAJGDa8DIAIgFGsgACgCACIBaiEVIBQgAWtBCGohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUH0u4CAAGotAABHDQECQCABQQhHDQBBBSEBDJUDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAywAwsgAEEANgIAIBQhAQzYAQtBNCEQIAEiFCACRg2uAyACIBRrIAAoAgAiAWohFSAUIAFrQQVqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw0BAkAgAUEFRw0AQQchAQyUAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMrwMLIABBADYCACAUIQEM1wELAkAgASIBIAJGDQADQAJAIAEtAABBgL6AgABqLQAAIhBBAUYNACAQQQJGDQogASEBDN0BCyABQQFqIgEgAkcNAAtBMCEQDK4DC0EwIRAMrQMLAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AIBBBdmoOBNkB2gHaAdkB2gELIAFBAWoiASACRw0AC0E4IRAMrQMLQTghEAysAwsDQAJAIAEtAAAiEEEgRg0AIBBBCUcNAwsgAUEBaiIBIAJHDQALQTwhEAyrAwsDQAJAIAEtAAAiEEEgRg0AAkACQCAQQXZqDgTaAQEB2gEACyAQQSxGDdsBCyABIQEMBAsgAUEBaiIBIAJHDQALQT8hEAyqAwsgASEBDNsBC0HAACEQIAEiFCACRg2oAyACIBRrIAAoAgAiAWohFiAUIAFrQQZqIRcCQANAIBQtAABBIHIgAUGAwICAAGotAABHDQEgAUEGRg2OAyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAypAwsgAEEANgIAIBQhAQtBNiEQDI4DCwJAIAEiDyACRw0AQcEAIRAMpwMLIABBjICAgAA2AgggACAPNgIEIA8hASAALQAsQX9qDgTNAdUB1wHZAYcDCyABQQFqIQEMzAELAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgciAQIBBBv39qQf8BcUEaSRtB/wFxIhBBCUYNACAQQSBGDQACQAJAAkACQCAQQZ1/ag4TAAMDAwMDAwMBAwMDAwMDAwMDAgMLIAFBAWohAUExIRAMkQMLIAFBAWohAUEyIRAMkAMLIAFBAWohAUEzIRAMjwMLIAEhAQzQAQsgAUEBaiIBIAJHDQALQTUhEAylAwtBNSEQDKQDCwJAIAEiASACRg0AA0ACQCABLQAAQYC8gIAAai0AAEEBRg0AIAEhAQzTAQsgAUEBaiIBIAJHDQALQT0hEAykAwtBPSEQDKMDCyAAIAEiASACELCAgIAAIhAN1gEgASEBDAELIBBBAWohAQtBPCEQDIcDCwJAIAEiASACRw0AQcIAIRAMoAMLAkADQAJAIAEtAABBd2oOGAAC/gL+AoQD/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4CAP4CCyABQQFqIgEgAkcNAAtBwgAhEAygAwsgAUEBaiEBIAAtAC1BAXFFDb0BIAEhAQtBLCEQDIUDCyABIgEgAkcN0wFBxAAhEAydAwsDQAJAIAEtAABBkMCAgABqLQAAQQFGDQAgASEBDLcCCyABQQFqIgEgAkcNAAtBxQAhEAycAwsgDS0AACIQQSBGDbMBIBBBOkcNgQMgACgCBCEBIABBADYCBCAAIAEgDRCvgICAACIBDdABIA1BAWohAQyzAgtBxwAhECABIg0gAkYNmgMgAiANayAAKAIAIgFqIRYgDSABa0EFaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGQwoCAAGotAABHDYADIAFBBUYN9AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmgMLQcgAIRAgASINIAJGDZkDIAIgDWsgACgCACIBaiEWIA0gAWtBCWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBlsKAgABqLQAARw3/AgJAIAFBCUcNAEECIQEM9QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJkDCwJAIAEiDSACRw0AQckAIRAMmQMLAkACQCANLQAAIgFBIHIgASABQb9/akH/AXFBGkkbQf8BcUGSf2oOBwCAA4ADgAOAA4ADAYADCyANQQFqIQFBPiEQDIADCyANQQFqIQFBPyEQDP8CC0HKACEQIAEiDSACRg2XAyACIA1rIAAoAgAiAWohFiANIAFrQQFqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaDCgIAAai0AAEcN/QIgAUEBRg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyXAwtBywAhECABIg0gAkYNlgMgAiANayAAKAIAIgFqIRYgDSABa0EOaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGiwoCAAGotAABHDfwCIAFBDkYN8AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlgMLQcwAIRAgASINIAJGDZUDIAIgDWsgACgCACIBaiEWIA0gAWtBD2ohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBwMKAgABqLQAARw37AgJAIAFBD0cNAEEDIQEM8QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJUDC0HNACEQIAEiDSACRg2UAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQdDCgIAAai0AAEcN+gICQCABQQVHDQBBBCEBDPACCyABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyUAwsCQCABIg0gAkcNAEHOACEQDJQDCwJAAkACQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZ1/ag4TAP0C/QL9Av0C/QL9Av0C/QL9Av0C/QL9AgH9Av0C/QICA/0CCyANQQFqIQFBwQAhEAz9AgsgDUEBaiEBQcIAIRAM/AILIA1BAWohAUHDACEQDPsCCyANQQFqIQFBxAAhEAz6AgsCQCABIgEgAkYNACAAQY2AgIAANgIIIAAgATYCBCABIQFBxQAhEAz6AgtBzwAhEAySAwsgECEBAkACQCAQLQAAQXZqDgQBqAKoAgCoAgsgEEEBaiEBC0EnIRAM+AILAkAgASIBIAJHDQBB0QAhEAyRAwsCQCABLQAAQSBGDQAgASEBDI0BCyABQQFqIQEgAC0ALUEBcUUNxwEgASEBDIwBCyABIhcgAkcNyAFB0gAhEAyPAwtB0wAhECABIhQgAkYNjgMgAiAUayAAKAIAIgFqIRYgFCABa0EBaiEXA0AgFC0AACABQdbCgIAAai0AAEcNzAEgAUEBRg3HASABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAyOAwsCQCABIgEgAkcNAEHVACEQDI4DCyABLQAAQQpHDcwBIAFBAWohAQzHAQsCQCABIgEgAkcNAEHWACEQDI0DCwJAAkAgAS0AAEF2ag4EAM0BzQEBzQELIAFBAWohAQzHAQsgAUEBaiEBQcoAIRAM8wILIAAgASIBIAIQroCAgAAiEA3LASABIQFBzQAhEAzyAgsgAC0AKUEiRg2FAwymAgsCQCABIgEgAkcNAEHbACEQDIoDC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgAS0AAEFQag4K1AHTAQABAgMEBQYI1QELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMzAELQQkhEEEBIRRBACEXQQAhFgzLAQsCQCABIgEgAkcNAEHdACEQDIkDCyABLQAAQS5HDcwBIAFBAWohAQymAgsgASIBIAJHDcwBQd8AIRAMhwMLAkAgASIBIAJGDQAgAEGOgICAADYCCCAAIAE2AgQgASEBQdAAIRAM7gILQeAAIRAMhgMLQeEAIRAgASIBIAJGDYUDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHiwoCAAGotAABHDc0BIBRBA0YNzAEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhQMLQeIAIRAgASIBIAJGDYQDIAIgAWsgACgCACIUaiEWIAEgFGtBAmohFwNAIAEtAAAgFEHmwoCAAGotAABHDcwBIBRBAkYNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhAMLQeMAIRAgASIBIAJGDYMDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHpwoCAAGotAABHDcsBIBRBA0YNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMgwMLAkAgASIBIAJHDQBB5QAhEAyDAwsgACABQQFqIgEgAhCogICAACIQDc0BIAEhAUHWACEQDOkCCwJAIAEiASACRg0AA0ACQCABLQAAIhBBIEYNAAJAAkACQCAQQbh/ag4LAAHPAc8BzwHPAc8BzwHPAc8BAs8BCyABQQFqIQFB0gAhEAztAgsgAUEBaiEBQdMAIRAM7AILIAFBAWohAUHUACEQDOsCCyABQQFqIgEgAkcNAAtB5AAhEAyCAwtB5AAhEAyBAwsDQAJAIAEtAABB8MKAgABqLQAAIhBBAUYNACAQQX5qDgPPAdAB0QHSAQsgAUEBaiIBIAJHDQALQeYAIRAMgAMLAkAgASIBIAJGDQAgAUEBaiEBDAMLQecAIRAM/wILA0ACQCABLQAAQfDEgIAAai0AACIQQQFGDQACQCAQQX5qDgTSAdMB1AEA1QELIAEhAUHXACEQDOcCCyABQQFqIgEgAkcNAAtB6AAhEAz+AgsCQCABIgEgAkcNAEHpACEQDP4CCwJAIAEtAAAiEEF2ag4augHVAdUBvAHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHKAdUB1QEA0wELIAFBAWohAQtBBiEQDOMCCwNAAkAgAS0AAEHwxoCAAGotAABBAUYNACABIQEMngILIAFBAWoiASACRw0AC0HqACEQDPsCCwJAIAEiASACRg0AIAFBAWohAQwDC0HrACEQDPoCCwJAIAEiASACRw0AQewAIRAM+gILIAFBAWohAQwBCwJAIAEiASACRw0AQe0AIRAM+QILIAFBAWohAQtBBCEQDN4CCwJAIAEiFCACRw0AQe4AIRAM9wILIBQhAQJAAkACQCAULQAAQfDIgIAAai0AAEF/ag4H1AHVAdYBAJwCAQLXAQsgFEEBaiEBDAoLIBRBAWohAQzNAQtBACEQIABBADYCHCAAQZuSgIAANgIQIABBBzYCDCAAIBRBAWo2AhQM9gILAkADQAJAIAEtAABB8MiAgABqLQAAIhBBBEYNAAJAAkAgEEF/ag4H0gHTAdQB2QEABAHZAQsgASEBQdoAIRAM4AILIAFBAWohAUHcACEQDN8CCyABQQFqIgEgAkcNAAtB7wAhEAz2AgsgAUEBaiEBDMsBCwJAIAEiFCACRw0AQfAAIRAM9QILIBQtAABBL0cN1AEgFEEBaiEBDAYLAkAgASIUIAJHDQBB8QAhEAz0AgsCQCAULQAAIgFBL0cNACAUQQFqIQFB3QAhEAzbAgsgAUF2aiIEQRZLDdMBQQEgBHRBiYCAAnFFDdMBDMoCCwJAIAEiASACRg0AIAFBAWohAUHeACEQDNoCC0HyACEQDPICCwJAIAEiFCACRw0AQfQAIRAM8gILIBQhAQJAIBQtAABB8MyAgABqLQAAQX9qDgPJApQCANQBC0HhACEQDNgCCwJAIAEiFCACRg0AA0ACQCAULQAAQfDKgIAAai0AACIBQQNGDQACQCABQX9qDgLLAgDVAQsgFCEBQd8AIRAM2gILIBRBAWoiFCACRw0AC0HzACEQDPECC0HzACEQDPACCwJAIAEiASACRg0AIABBj4CAgAA2AgggACABNgIEIAEhAUHgACEQDNcCC0H1ACEQDO8CCwJAIAEiASACRw0AQfYAIRAM7wILIABBj4CAgAA2AgggACABNgIEIAEhAQtBAyEQDNQCCwNAIAEtAABBIEcNwwIgAUEBaiIBIAJHDQALQfcAIRAM7AILAkAgASIBIAJHDQBB+AAhEAzsAgsgAS0AAEEgRw3OASABQQFqIQEM7wELIAAgASIBIAIQrICAgAAiEA3OASABIQEMjgILAkAgASIEIAJHDQBB+gAhEAzqAgsgBC0AAEHMAEcN0QEgBEEBaiEBQRMhEAzPAQsCQCABIgQgAkcNAEH7ACEQDOkCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRADQCAELQAAIAFB8M6AgABqLQAARw3QASABQQVGDc4BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQfsAIRAM6AILAkAgASIEIAJHDQBB/AAhEAzoAgsCQAJAIAQtAABBvX9qDgwA0QHRAdEB0QHRAdEB0QHRAdEB0QEB0QELIARBAWohAUHmACEQDM8CCyAEQQFqIQFB5wAhEAzOAgsCQCABIgQgAkcNAEH9ACEQDOcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDc8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH9ACEQDOcCCyAAQQA2AgAgEEEBaiEBQRAhEAzMAQsCQCABIgQgAkcNAEH+ACEQDOYCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUH2zoCAAGotAABHDc4BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH+ACEQDOYCCyAAQQA2AgAgEEEBaiEBQRYhEAzLAQsCQCABIgQgAkcNAEH/ACEQDOUCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUH8zoCAAGotAABHDc0BIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH/ACEQDOUCCyAAQQA2AgAgEEEBaiEBQQUhEAzKAQsCQCABIgQgAkcNAEGAASEQDOQCCyAELQAAQdkARw3LASAEQQFqIQFBCCEQDMkBCwJAIAEiBCACRw0AQYEBIRAM4wILAkACQCAELQAAQbJ/ag4DAMwBAcwBCyAEQQFqIQFB6wAhEAzKAgsgBEEBaiEBQewAIRAMyQILAkAgASIEIAJHDQBBggEhEAziAgsCQAJAIAQtAABBuH9qDggAywHLAcsBywHLAcsBAcsBCyAEQQFqIQFB6gAhEAzJAgsgBEEBaiEBQe0AIRAMyAILAkAgASIEIAJHDQBBgwEhEAzhAgsgAiAEayAAKAIAIgFqIRAgBCABa0ECaiEUAkADQCAELQAAIAFBgM+AgABqLQAARw3JASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBA2AgBBgwEhEAzhAgtBACEQIABBADYCACAUQQFqIQEMxgELAkAgASIEIAJHDQBBhAEhEAzgAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBg8+AgABqLQAARw3IASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhAEhEAzgAgsgAEEANgIAIBBBAWohAUEjIRAMxQELAkAgASIEIAJHDQBBhQEhEAzfAgsCQAJAIAQtAABBtH9qDggAyAHIAcgByAHIAcgBAcgBCyAEQQFqIQFB7wAhEAzGAgsgBEEBaiEBQfAAIRAMxQILAkAgASIEIAJHDQBBhgEhEAzeAgsgBC0AAEHFAEcNxQEgBEEBaiEBDIMCCwJAIAEiBCACRw0AQYcBIRAM3QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQYjPgIAAai0AAEcNxQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYcBIRAM3QILIABBADYCACAQQQFqIQFBLSEQDMIBCwJAIAEiBCACRw0AQYgBIRAM3AILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNxAEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYgBIRAM3AILIABBADYCACAQQQFqIQFBKSEQDMEBCwJAIAEiASACRw0AQYkBIRAM2wILQQEhECABLQAAQd8ARw3AASABQQFqIQEMgQILAkAgASIEIAJHDQBBigEhEAzaAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQA0AgBC0AACABQYzPgIAAai0AAEcNwQEgAUEBRg2vAiABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGKASEQDNkCCwJAIAEiBCACRw0AQYsBIRAM2QILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQY7PgIAAai0AAEcNwQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYsBIRAM2QILIABBADYCACAQQQFqIQFBAiEQDL4BCwJAIAEiBCACRw0AQYwBIRAM2AILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNwAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYwBIRAM2AILIABBADYCACAQQQFqIQFBHyEQDL0BCwJAIAEiBCACRw0AQY0BIRAM1wILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNvwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY0BIRAM1wILIABBADYCACAQQQFqIQFBCSEQDLwBCwJAIAEiBCACRw0AQY4BIRAM1gILAkACQCAELQAAQbd/ag4HAL8BvwG/Ab8BvwEBvwELIARBAWohAUH4ACEQDL0CCyAEQQFqIQFB+QAhEAy8AgsCQCABIgQgAkcNAEGPASEQDNUCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGRz4CAAGotAABHDb0BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGPASEQDNUCCyAAQQA2AgAgEEEBaiEBQRghEAy6AQsCQCABIgQgAkcNAEGQASEQDNQCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUGXz4CAAGotAABHDbwBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGQASEQDNQCCyAAQQA2AgAgEEEBaiEBQRchEAy5AQsCQCABIgQgAkcNAEGRASEQDNMCCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUGaz4CAAGotAABHDbsBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGRASEQDNMCCyAAQQA2AgAgEEEBaiEBQRUhEAy4AQsCQCABIgQgAkcNAEGSASEQDNICCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGhz4CAAGotAABHDboBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGSASEQDNICCyAAQQA2AgAgEEEBaiEBQR4hEAy3AQsCQCABIgQgAkcNAEGTASEQDNECCyAELQAAQcwARw24ASAEQQFqIQFBCiEQDLYBCwJAIAQgAkcNAEGUASEQDNACCwJAAkAgBC0AAEG/f2oODwC5AbkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AQG5AQsgBEEBaiEBQf4AIRAMtwILIARBAWohAUH/ACEQDLYCCwJAIAQgAkcNAEGVASEQDM8CCwJAAkAgBC0AAEG/f2oOAwC4AQG4AQsgBEEBaiEBQf0AIRAMtgILIARBAWohBEGAASEQDLUCCwJAIAQgAkcNAEGWASEQDM4CCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUGnz4CAAGotAABHDbYBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGWASEQDM4CCyAAQQA2AgAgEEEBaiEBQQshEAyzAQsCQCAEIAJHDQBBlwEhEAzNAgsCQAJAAkACQCAELQAAQVNqDiMAuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AQG4AbgBuAG4AbgBArgBuAG4AQO4AQsgBEEBaiEBQfsAIRAMtgILIARBAWohAUH8ACEQDLUCCyAEQQFqIQRBgQEhEAy0AgsgBEEBaiEEQYIBIRAMswILAkAgBCACRw0AQZgBIRAMzAILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQanPgIAAai0AAEcNtAEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZgBIRAMzAILIABBADYCACAQQQFqIQFBGSEQDLEBCwJAIAQgAkcNAEGZASEQDMsCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGuz4CAAGotAABHDbMBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGZASEQDMsCCyAAQQA2AgAgEEEBaiEBQQYhEAywAQsCQCAEIAJHDQBBmgEhEAzKAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBtM+AgABqLQAARw2yASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmgEhEAzKAgsgAEEANgIAIBBBAWohAUEcIRAMrwELAkAgBCACRw0AQZsBIRAMyQILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbbPgIAAai0AAEcNsQEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZsBIRAMyQILIABBADYCACAQQQFqIQFBJyEQDK4BCwJAIAQgAkcNAEGcASEQDMgCCwJAAkAgBC0AAEGsf2oOAgABsQELIARBAWohBEGGASEQDK8CCyAEQQFqIQRBhwEhEAyuAgsCQCAEIAJHDQBBnQEhEAzHAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBuM+AgABqLQAARw2vASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBnQEhEAzHAgsgAEEANgIAIBBBAWohAUEmIRAMrAELAkAgBCACRw0AQZ4BIRAMxgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbrPgIAAai0AAEcNrgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ4BIRAMxgILIABBADYCACAQQQFqIQFBAyEQDKsBCwJAIAQgAkcNAEGfASEQDMUCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDa0BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGfASEQDMUCCyAAQQA2AgAgEEEBaiEBQQwhEAyqAQsCQCAEIAJHDQBBoAEhEAzEAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBvM+AgABqLQAARw2sASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBoAEhEAzEAgsgAEEANgIAIBBBAWohAUENIRAMqQELAkAgBCACRw0AQaEBIRAMwwILAkACQCAELQAAQbp/ag4LAKwBrAGsAawBrAGsAawBrAGsAQGsAQsgBEEBaiEEQYsBIRAMqgILIARBAWohBEGMASEQDKkCCwJAIAQgAkcNAEGiASEQDMICCyAELQAAQdAARw2pASAEQQFqIQQM6QELAkAgBCACRw0AQaMBIRAMwQILAkACQCAELQAAQbd/ag4HAaoBqgGqAaoBqgEAqgELIARBAWohBEGOASEQDKgCCyAEQQFqIQFBIiEQDKYBCwJAIAQgAkcNAEGkASEQDMACCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHAz4CAAGotAABHDagBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGkASEQDMACCyAAQQA2AgAgEEEBaiEBQR0hEAylAQsCQCAEIAJHDQBBpQEhEAy/AgsCQAJAIAQtAABBrn9qDgMAqAEBqAELIARBAWohBEGQASEQDKYCCyAEQQFqIQFBBCEQDKQBCwJAIAQgAkcNAEGmASEQDL4CCwJAAkACQAJAAkAgBC0AAEG/f2oOFQCqAaoBqgGqAaoBqgGqAaoBqgGqAQGqAaoBAqoBqgEDqgGqAQSqAQsgBEEBaiEEQYgBIRAMqAILIARBAWohBEGJASEQDKcCCyAEQQFqIQRBigEhEAymAgsgBEEBaiEEQY8BIRAMpQILIARBAWohBEGRASEQDKQCCwJAIAQgAkcNAEGnASEQDL0CCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDaUBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGnASEQDL0CCyAAQQA2AgAgEEEBaiEBQREhEAyiAQsCQCAEIAJHDQBBqAEhEAy8AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBws+AgABqLQAARw2kASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqAEhEAy8AgsgAEEANgIAIBBBAWohAUEsIRAMoQELAkAgBCACRw0AQakBIRAMuwILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQcXPgIAAai0AAEcNowEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQakBIRAMuwILIABBADYCACAQQQFqIQFBKyEQDKABCwJAIAQgAkcNAEGqASEQDLoCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHKz4CAAGotAABHDaIBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGqASEQDLoCCyAAQQA2AgAgEEEBaiEBQRQhEAyfAQsCQCAEIAJHDQBBqwEhEAy5AgsCQAJAAkACQCAELQAAQb5/ag4PAAECpAGkAaQBpAGkAaQBpAGkAaQBpAGkAQOkAQsgBEEBaiEEQZMBIRAMogILIARBAWohBEGUASEQDKECCyAEQQFqIQRBlQEhEAygAgsgBEEBaiEEQZYBIRAMnwILAkAgBCACRw0AQawBIRAMuAILIAQtAABBxQBHDZ8BIARBAWohBAzgAQsCQCAEIAJHDQBBrQEhEAy3AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBzc+AgABqLQAARw2fASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrQEhEAy3AgsgAEEANgIAIBBBAWohAUEOIRAMnAELAkAgBCACRw0AQa4BIRAMtgILIAQtAABB0ABHDZ0BIARBAWohAUElIRAMmwELAkAgBCACRw0AQa8BIRAMtQILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNnQEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQa8BIRAMtQILIABBADYCACAQQQFqIQFBKiEQDJoBCwJAIAQgAkcNAEGwASEQDLQCCwJAAkAgBC0AAEGrf2oOCwCdAZ0BnQGdAZ0BnQGdAZ0BnQEBnQELIARBAWohBEGaASEQDJsCCyAEQQFqIQRBmwEhEAyaAgsCQCAEIAJHDQBBsQEhEAyzAgsCQAJAIAQtAABBv39qDhQAnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBAZwBCyAEQQFqIQRBmQEhEAyaAgsgBEEBaiEEQZwBIRAMmQILAkAgBCACRw0AQbIBIRAMsgILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQdnPgIAAai0AAEcNmgEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbIBIRAMsgILIABBADYCACAQQQFqIQFBISEQDJcBCwJAIAQgAkcNAEGzASEQDLECCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUHdz4CAAGotAABHDZkBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGzASEQDLECCyAAQQA2AgAgEEEBaiEBQRohEAyWAQsCQCAEIAJHDQBBtAEhEAywAgsCQAJAAkAgBC0AAEG7f2oOEQCaAZoBmgGaAZoBmgGaAZoBmgEBmgGaAZoBmgGaAQKaAQsgBEEBaiEEQZ0BIRAMmAILIARBAWohBEGeASEQDJcCCyAEQQFqIQRBnwEhEAyWAgsCQCAEIAJHDQBBtQEhEAyvAgsgAiAEayAAKAIAIgFqIRQgBCABa0EFaiEQAkADQCAELQAAIAFB5M+AgABqLQAARw2XASABQQVGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtQEhEAyvAgsgAEEANgIAIBBBAWohAUEoIRAMlAELAkAgBCACRw0AQbYBIRAMrgILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQerPgIAAai0AAEcNlgEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbYBIRAMrgILIABBADYCACAQQQFqIQFBByEQDJMBCwJAIAQgAkcNAEG3ASEQDK0CCwJAAkAgBC0AAEG7f2oODgCWAZYBlgGWAZYBlgGWAZYBlgGWAZYBlgEBlgELIARBAWohBEGhASEQDJQCCyAEQQFqIQRBogEhEAyTAgsCQCAEIAJHDQBBuAEhEAysAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB7c+AgABqLQAARw2UASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuAEhEAysAgsgAEEANgIAIBBBAWohAUESIRAMkQELAkAgBCACRw0AQbkBIRAMqwILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNkwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbkBIRAMqwILIABBADYCACAQQQFqIQFBICEQDJABCwJAIAQgAkcNAEG6ASEQDKoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHyz4CAAGotAABHDZIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG6ASEQDKoCCyAAQQA2AgAgEEEBaiEBQQ8hEAyPAQsCQCAEIAJHDQBBuwEhEAypAgsCQAJAIAQtAABBt39qDgcAkgGSAZIBkgGSAQGSAQsgBEEBaiEEQaUBIRAMkAILIARBAWohBEGmASEQDI8CCwJAIAQgAkcNAEG8ASEQDKgCCyACIARrIAAoAgAiAWohFCAEIAFrQQdqIRACQANAIAQtAAAgAUH0z4CAAGotAABHDZABIAFBB0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG8ASEQDKgCCyAAQQA2AgAgEEEBaiEBQRshEAyNAQsCQCAEIAJHDQBBvQEhEAynAgsCQAJAAkAgBC0AAEG+f2oOEgCRAZEBkQGRAZEBkQGRAZEBkQEBkQGRAZEBkQGRAZEBApEBCyAEQQFqIQRBpAEhEAyPAgsgBEEBaiEEQacBIRAMjgILIARBAWohBEGoASEQDI0CCwJAIAQgAkcNAEG+ASEQDKYCCyAELQAAQc4ARw2NASAEQQFqIQQMzwELAkAgBCACRw0AQb8BIRAMpQILAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgBC0AAEG/f2oOFQABAgOcAQQFBpwBnAGcAQcICQoLnAEMDQ4PnAELIARBAWohAUHoACEQDJoCCyAEQQFqIQFB6QAhEAyZAgsgBEEBaiEBQe4AIRAMmAILIARBAWohAUHyACEQDJcCCyAEQQFqIQFB8wAhEAyWAgsgBEEBaiEBQfYAIRAMlQILIARBAWohAUH3ACEQDJQCCyAEQQFqIQFB+gAhEAyTAgsgBEEBaiEEQYMBIRAMkgILIARBAWohBEGEASEQDJECCyAEQQFqIQRBhQEhEAyQAgsgBEEBaiEEQZIBIRAMjwILIARBAWohBEGYASEQDI4CCyAEQQFqIQRBoAEhEAyNAgsgBEEBaiEEQaMBIRAMjAILIARBAWohBEGqASEQDIsCCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEGrASEQDIsCC0HAASEQDKMCCyAAIAUgAhCqgICAACIBDYsBIAUhAQxcCwJAIAYgAkYNACAGQQFqIQUMjQELQcIBIRAMoQILA0ACQCAQLQAAQXZqDgSMAQAAjwEACyAQQQFqIhAgAkcNAAtBwwEhEAygAgsCQCAHIAJGDQAgAEGRgICAADYCCCAAIAc2AgQgByEBQQEhEAyHAgtBxAEhEAyfAgsCQCAHIAJHDQBBxQEhEAyfAgsCQAJAIActAABBdmoOBAHOAc4BAM4BCyAHQQFqIQYMjQELIAdBAWohBQyJAQsCQCAHIAJHDQBBxgEhEAyeAgsCQAJAIActAABBdmoOFwGPAY8BAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAQCPAQsgB0EBaiEHC0GwASEQDIQCCwJAIAggAkcNAEHIASEQDJ0CCyAILQAAQSBHDY0BIABBADsBMiAIQQFqIQFBswEhEAyDAgsgASEXAkADQCAXIgcgAkYNASAHLQAAQVBqQf8BcSIQQQpPDcwBAkAgAC8BMiIUQZkzSw0AIAAgFEEKbCIUOwEyIBBB//8DcyAUQf7/A3FJDQAgB0EBaiEXIAAgFCAQaiIQOwEyIBBB//8DcUHoB0kNAQsLQQAhECAAQQA2AhwgAEHBiYCAADYCECAAQQ02AgwgACAHQQFqNgIUDJwCC0HHASEQDJsCCyAAIAggAhCugICAACIQRQ3KASAQQRVHDYwBIABByAE2AhwgACAINgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAyaAgsCQCAJIAJHDQBBzAEhEAyaAgtBACEUQQEhF0EBIRZBACEQAkACQAJAAkACQAJAAkACQAJAIAktAABBUGoOCpYBlQEAAQIDBAUGCJcBC0ECIRAMBgtBAyEQDAULQQQhEAwEC0EFIRAMAwtBBiEQDAILQQchEAwBC0EIIRALQQAhF0EAIRZBACEUDI4BC0EJIRBBASEUQQAhF0EAIRYMjQELAkAgCiACRw0AQc4BIRAMmQILIAotAABBLkcNjgEgCkEBaiEJDMoBCyALIAJHDY4BQdABIRAMlwILAkAgCyACRg0AIABBjoCAgAA2AgggACALNgIEQbcBIRAM/gELQdEBIRAMlgILAkAgBCACRw0AQdIBIRAMlgILIAIgBGsgACgCACIQaiEUIAQgEGtBBGohCwNAIAQtAAAgEEH8z4CAAGotAABHDY4BIBBBBEYN6QEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB0gEhEAyVAgsgACAMIAIQrICAgAAiAQ2NASAMIQEMuAELAkAgBCACRw0AQdQBIRAMlAILIAIgBGsgACgCACIQaiEUIAQgEGtBAWohDANAIAQtAAAgEEGB0ICAAGotAABHDY8BIBBBAUYNjgEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB1AEhEAyTAgsCQCAEIAJHDQBB1gEhEAyTAgsgAiAEayAAKAIAIhBqIRQgBCAQa0ECaiELA0AgBC0AACAQQYPQgIAAai0AAEcNjgEgEEECRg2QASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHWASEQDJICCwJAIAQgAkcNAEHXASEQDJICCwJAAkAgBC0AAEG7f2oOEACPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAY8BCyAEQQFqIQRBuwEhEAz5AQsgBEEBaiEEQbwBIRAM+AELAkAgBCACRw0AQdgBIRAMkQILIAQtAABByABHDYwBIARBAWohBAzEAQsCQCAEIAJGDQAgAEGQgICAADYCCCAAIAQ2AgRBvgEhEAz3AQtB2QEhEAyPAgsCQCAEIAJHDQBB2gEhEAyPAgsgBC0AAEHIAEYNwwEgAEEBOgAoDLkBCyAAQQI6AC8gACAEIAIQpoCAgAAiEA2NAUHCASEQDPQBCyAALQAoQX9qDgK3AbkBuAELA0ACQCAELQAAQXZqDgQAjgGOAQCOAQsgBEEBaiIEIAJHDQALQd0BIRAMiwILIABBADoALyAALQAtQQRxRQ2EAgsgAEEAOgAvIABBAToANCABIQEMjAELIBBBFUYN2gEgAEEANgIcIAAgATYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMiAILAkAgACAQIAIQtICAgAAiBA0AIBAhAQyBAgsCQCAEQRVHDQAgAEEDNgIcIAAgEDYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMiAILIABBADYCHCAAIBA2AhQgAEGnjoCAADYCECAAQRI2AgxBACEQDIcCCyAQQRVGDdYBIABBADYCHCAAIAE2AhQgAEHajYCAADYCECAAQRQ2AgxBACEQDIYCCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNjQEgAEEHNgIcIAAgEDYCFCAAIBQ2AgxBACEQDIUCCyAAIAAvATBBgAFyOwEwIAEhAQtBKiEQDOoBCyAQQRVGDdEBIABBADYCHCAAIAE2AhQgAEGDjICAADYCECAAQRM2AgxBACEQDIICCyAQQRVGDc8BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDIECCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyNAQsgAEEMNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDIACCyAQQRVGDcwBIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDP8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyMAQsgAEENNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDP4BCyAQQRVGDckBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDP0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyLAQsgAEEONgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPwBCyAAQQA2AhwgACABNgIUIABBwJWAgAA2AhAgAEECNgIMQQAhEAz7AQsgEEEVRg3FASAAQQA2AhwgACABNgIUIABBxoyAgAA2AhAgAEEjNgIMQQAhEAz6AQsgAEEQNgIcIAAgATYCFCAAIBA2AgxBACEQDPkBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQzxAQsgAEERNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPgBCyAQQRVGDcEBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPcBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyIAQsgAEETNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPYBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQztAQsgAEEUNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPUBCyAQQRVGDb0BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDPQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyGAQsgAEEWNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPMBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQt4CAgAAiBA0AIAFBAWohAQzpAQsgAEEXNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPIBCyAAQQA2AhwgACABNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzxAQtCASERCyAQQQFqIQECQCAAKQMgIhJC//////////8PVg0AIAAgEkIEhiARhDcDICABIQEMhAELIABBADYCHCAAIAE2AhQgAEGtiYCAADYCECAAQQw2AgxBACEQDO8BCyAAQQA2AhwgACAQNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzuAQsgACgCBCEXIABBADYCBCAQIBGnaiIWIQEgACAXIBAgFiAUGyIQELWAgIAAIhRFDXMgAEEFNgIcIAAgEDYCFCAAIBQ2AgxBACEQDO0BCyAAQQA2AhwgACAQNgIUIABBqpyAgAA2AhAgAEEPNgIMQQAhEAzsAQsgACAQIAIQtICAgAAiAQ0BIBAhAQtBDiEQDNEBCwJAIAFBFUcNACAAQQI2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAzqAQsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAM6QELIAFBAWohEAJAIAAvATAiAUGAAXFFDQACQCAAIBAgAhC7gICAACIBDQAgECEBDHALIAFBFUcNugEgAEEFNgIcIAAgEDYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAM6QELAkAgAUGgBHFBoARHDQAgAC0ALUECcQ0AIABBADYCHCAAIBA2AhQgAEGWk4CAADYCECAAQQQ2AgxBACEQDOkBCyAAIBAgAhC9gICAABogECEBAkACQAJAAkACQCAAIBAgAhCzgICAAA4WAgEABAQEBAQEBAQEBAQEBAQEBAQEAwQLIABBAToALgsgACAALwEwQcAAcjsBMCAQIQELQSYhEAzRAQsgAEEjNgIcIAAgEDYCFCAAQaWWgIAANgIQIABBFTYCDEEAIRAM6QELIABBADYCHCAAIBA2AhQgAEHVi4CAADYCECAAQRE2AgxBACEQDOgBCyAALQAtQQFxRQ0BQcMBIRAMzgELAkAgDSACRg0AA0ACQCANLQAAQSBGDQAgDSEBDMQBCyANQQFqIg0gAkcNAAtBJSEQDOcBC0ElIRAM5gELIAAoAgQhBCAAQQA2AgQgACAEIA0Qr4CAgAAiBEUNrQEgAEEmNgIcIAAgBDYCDCAAIA1BAWo2AhRBACEQDOUBCyAQQRVGDasBIABBADYCHCAAIAE2AhQgAEH9jYCAADYCECAAQR02AgxBACEQDOQBCyAAQSc2AhwgACABNgIUIAAgEDYCDEEAIRAM4wELIBAhAUEBIRQCQAJAAkACQAJAAkACQCAALQAsQX5qDgcGBQUDAQIABQsgACAALwEwQQhyOwEwDAMLQQIhFAwBC0EEIRQLIABBAToALCAAIAAvATAgFHI7ATALIBAhAQtBKyEQDMoBCyAAQQA2AhwgACAQNgIUIABBq5KAgAA2AhAgAEELNgIMQQAhEAziAQsgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDEEAIRAM4QELIABBADoALCAQIQEMvQELIBAhAUEBIRQCQAJAAkACQAJAIAAtACxBe2oOBAMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0EpIRAMxQELIABBADYCHCAAIAE2AhQgAEHwlICAADYCECAAQQM2AgxBACEQDN0BCwJAIA4tAABBDUcNACAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA5BAWohAQx1CyAAQSw2AhwgACABNgIMIAAgDkEBajYCFEEAIRAM3QELIAAtAC1BAXFFDQFBxAEhEAzDAQsCQCAOIAJHDQBBLSEQDNwBCwJAAkADQAJAIA4tAABBdmoOBAIAAAMACyAOQQFqIg4gAkcNAAtBLSEQDN0BCyAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA4hAQx0CyAAQSw2AhwgACAONgIUIAAgATYCDEEAIRAM3AELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHMLIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzbAQsgACgCBCEEIABBADYCBCAAIAQgDhCxgICAACIEDaABIA4hAQzOAQsgEEEsRw0BIAFBAWohEEEBIQECQAJAAkACQAJAIAAtACxBe2oOBAMBAgQACyAQIQEMBAtBAiEBDAELQQQhAQsgAEEBOgAsIAAgAC8BMCABcjsBMCAQIQEMAQsgACAALwEwQQhyOwEwIBAhAQtBOSEQDL8BCyAAQQA6ACwgASEBC0E0IRAMvQELIAAgAC8BMEEgcjsBMCABIQEMAgsgACgCBCEEIABBADYCBAJAIAAgBCABELGAgIAAIgQNACABIQEMxwELIABBNzYCHCAAIAE2AhQgACAENgIMQQAhEAzUAQsgAEEIOgAsIAEhAQtBMCEQDLkBCwJAIAAtAChBAUYNACABIQEMBAsgAC0ALUEIcUUNkwEgASEBDAMLIAAtADBBIHENlAFBxQEhEAy3AQsCQCAPIAJGDQACQANAAkAgDy0AAEFQaiIBQf8BcUEKSQ0AIA8hAUE1IRAMugELIAApAyAiEUKZs+bMmbPmzBlWDQEgACARQgp+IhE3AyAgESABrUL/AYMiEkJ/hVYNASAAIBEgEnw3AyAgD0EBaiIPIAJHDQALQTkhEAzRAQsgACgCBCECIABBADYCBCAAIAIgD0EBaiIEELGAgIAAIgINlQEgBCEBDMMBC0E5IRAMzwELAkAgAC8BMCIBQQhxRQ0AIAAtAChBAUcNACAALQAtQQhxRQ2QAQsgACABQff7A3FBgARyOwEwIA8hAQtBNyEQDLQBCyAAIAAvATBBEHI7ATAMqwELIBBBFUYNiwEgAEEANgIcIAAgATYCFCAAQfCOgIAANgIQIABBHDYCDEEAIRAMywELIABBwwA2AhwgACABNgIMIAAgDUEBajYCFEEAIRAMygELAkAgAS0AAEE6Rw0AIAAoAgQhECAAQQA2AgQCQCAAIBAgARCvgICAACIQDQAgAUEBaiEBDGMLIABBwwA2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMygELIABBADYCHCAAIAE2AhQgAEGxkYCAADYCECAAQQo2AgxBACEQDMkBCyAAQQA2AhwgACABNgIUIABBoJmAgAA2AhAgAEEeNgIMQQAhEAzIAQsgAEEANgIACyAAQYASOwEqIAAgF0EBaiIBIAIQqICAgAAiEA0BIAEhAQtBxwAhEAysAQsgEEEVRw2DASAAQdEANgIcIAAgATYCFCAAQeOXgIAANgIQIABBFTYCDEEAIRAMxAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDF4LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMwwELIABBADYCHCAAIBQ2AhQgAEHBqICAADYCECAAQQc2AgwgAEEANgIAQQAhEAzCAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAzBAQtBACEQIABBADYCHCAAIAE2AhQgAEGAkYCAADYCECAAQQk2AgwMwAELIBBBFUYNfSAAQQA2AhwgACABNgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAy/AQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgAUEBaiEBAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBAJAIAAgECABEK2AgIAAIhANACABIQEMXAsgAEHYADYCHCAAIAE2AhQgACAQNgIMQQAhEAy+AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMrQELIABB2QA2AhwgACABNgIUIAAgBDYCDEEAIRAMvQELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKsBCyAAQdoANgIcIAAgATYCFCAAIAQ2AgxBACEQDLwBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQypAQsgAEHcADYCHCAAIAE2AhQgACAENgIMQQAhEAy7AQsCQCABLQAAQVBqIhBB/wFxQQpPDQAgACAQOgAqIAFBAWohAUHPACEQDKIBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQynAQsgAEHeADYCHCAAIAE2AhQgACAENgIMQQAhEAy6AQsgAEEANgIAIBdBAWohAQJAIAAtAClBI08NACABIQEMWQsgAEEANgIcIAAgATYCFCAAQdOJgIAANgIQIABBCDYCDEEAIRAMuQELIABBADYCAAtBACEQIABBADYCHCAAIAE2AhQgAEGQs4CAADYCECAAQQg2AgwMtwELIABBADYCACAXQQFqIQECQCAALQApQSFHDQAgASEBDFYLIABBADYCHCAAIAE2AhQgAEGbioCAADYCECAAQQg2AgxBACEQDLYBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKSIQQV1qQQtPDQAgASEBDFULAkAgEEEGSw0AQQEgEHRBygBxRQ0AIAEhAQxVC0EAIRAgAEEANgIcIAAgATYCFCAAQfeJgIAANgIQIABBCDYCDAy1AQsgEEEVRg1xIABBADYCHCAAIAE2AhQgAEG5jYCAADYCECAAQRo2AgxBACEQDLQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxUCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLMBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDLIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDLEBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxRCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLABCyAAQQA2AhwgACABNgIUIABBxoqAgAA2AhAgAEEHNgIMQQAhEAyvAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAyuAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAytAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMTQsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAysAQsgAEEANgIcIAAgATYCFCAAQdyIgIAANgIQIABBBzYCDEEAIRAMqwELIBBBP0cNASABQQFqIQELQQUhEAyQAQtBACEQIABBADYCHCAAIAE2AhQgAEH9koCAADYCECAAQQc2AgwMqAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMpwELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMpgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEYLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMpQELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0gA2AhwgACAUNgIUIAAgATYCDEEAIRAMpAELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0wA2AhwgACAUNgIUIAAgATYCDEEAIRAMowELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDEMLIABB5QA2AhwgACAUNgIUIAAgATYCDEEAIRAMogELIABBADYCHCAAIBQ2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKEBCyAAQQA2AhwgACABNgIUIABBw4+AgAA2AhAgAEEHNgIMQQAhEAygAQtBACEQIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgwMnwELIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgxBACEQDJ4BCyAAQQA2AhwgACAUNgIUIABB/pGAgAA2AhAgAEEHNgIMQQAhEAydAQsgAEEANgIcIAAgATYCFCAAQY6bgIAANgIQIABBBjYCDEEAIRAMnAELIBBBFUYNVyAAQQA2AhwgACABNgIUIABBzI6AgAA2AhAgAEEgNgIMQQAhEAybAQsgAEEANgIAIBBBAWohAUEkIRALIAAgEDoAKSAAKAIEIRAgAEEANgIEIAAgECABEKuAgIAAIhANVCABIQEMPgsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQfGbgIAANgIQIABBBjYCDAyXAQsgAUEVRg1QIABBADYCHCAAIAU2AhQgAEHwjICAADYCECAAQRs2AgxBACEQDJYBCyAAKAIEIQUgAEEANgIEIAAgBSAQEKmAgIAAIgUNASAQQQFqIQULQa0BIRAMewsgAEHBATYCHCAAIAU2AgwgACAQQQFqNgIUQQAhEAyTAQsgACgCBCEGIABBADYCBCAAIAYgEBCpgICAACIGDQEgEEEBaiEGC0GuASEQDHgLIABBwgE2AhwgACAGNgIMIAAgEEEBajYCFEEAIRAMkAELIABBADYCHCAAIAc2AhQgAEGXi4CAADYCECAAQQ02AgxBACEQDI8BCyAAQQA2AhwgACAINgIUIABB45CAgAA2AhAgAEEJNgIMQQAhEAyOAQsgAEEANgIcIAAgCDYCFCAAQZSNgIAANgIQIABBITYCDEEAIRAMjQELQQEhFkEAIRdBACEUQQEhEAsgACAQOgArIAlBAWohCAJAAkAgAC0ALUEQcQ0AAkACQAJAIAAtACoOAwEAAgQLIBZFDQMMAgsgFA0BDAILIBdFDQELIAAoAgQhECAAQQA2AgQgACAQIAgQrYCAgAAiEEUNPSAAQckBNgIcIAAgCDYCFCAAIBA2AgxBACEQDIwBCyAAKAIEIQQgAEEANgIEIAAgBCAIEK2AgIAAIgRFDXYgAEHKATYCHCAAIAg2AhQgACAENgIMQQAhEAyLAQsgACgCBCEEIABBADYCBCAAIAQgCRCtgICAACIERQ10IABBywE2AhwgACAJNgIUIAAgBDYCDEEAIRAMigELIAAoAgQhBCAAQQA2AgQgACAEIAoQrYCAgAAiBEUNciAAQc0BNgIcIAAgCjYCFCAAIAQ2AgxBACEQDIkBCwJAIAstAABBUGoiEEH/AXFBCk8NACAAIBA6ACogC0EBaiEKQbYBIRAMcAsgACgCBCEEIABBADYCBCAAIAQgCxCtgICAACIERQ1wIABBzwE2AhwgACALNgIUIAAgBDYCDEEAIRAMiAELIABBADYCHCAAIAQ2AhQgAEGQs4CAADYCECAAQQg2AgwgAEEANgIAQQAhEAyHAQsgAUEVRg0/IABBADYCHCAAIAw2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDIYBCyAAQYEEOwEoIAAoAgQhECAAQgA3AwAgACAQIAxBAWoiDBCrgICAACIQRQ04IABB0wE2AhwgACAMNgIUIAAgEDYCDEEAIRAMhQELIABBADYCAAtBACEQIABBADYCHCAAIAQ2AhQgAEHYm4CAADYCECAAQQg2AgwMgwELIAAoAgQhECAAQgA3AwAgACAQIAtBAWoiCxCrgICAACIQDQFBxgEhEAxpCyAAQQI6ACgMVQsgAEHVATYCHCAAIAs2AhQgACAQNgIMQQAhEAyAAQsgEEEVRg03IABBADYCHCAAIAQ2AhQgAEGkjICAADYCECAAQRA2AgxBACEQDH8LIAAtADRBAUcNNCAAIAQgAhC8gICAACIQRQ00IBBBFUcNNSAAQdwBNgIcIAAgBDYCFCAAQdWWgIAANgIQIABBFTYCDEEAIRAMfgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQMfQtBACEQDGMLQQIhEAxiC0ENIRAMYQtBDyEQDGALQSUhEAxfC0ETIRAMXgtBFSEQDF0LQRYhEAxcC0EXIRAMWwtBGCEQDFoLQRkhEAxZC0EaIRAMWAtBGyEQDFcLQRwhEAxWC0EdIRAMVQtBHyEQDFQLQSEhEAxTC0EjIRAMUgtBxgAhEAxRC0EuIRAMUAtBLyEQDE8LQTshEAxOC0E9IRAMTQtByAAhEAxMC0HJACEQDEsLQcsAIRAMSgtBzAAhEAxJC0HOACEQDEgLQdEAIRAMRwtB1QAhEAxGC0HYACEQDEULQdkAIRAMRAtB2wAhEAxDC0HkACEQDEILQeUAIRAMQQtB8QAhEAxAC0H0ACEQDD8LQY0BIRAMPgtBlwEhEAw9C0GpASEQDDwLQawBIRAMOwtBwAEhEAw6C0G5ASEQDDkLQa8BIRAMOAtBsQEhEAw3C0GyASEQDDYLQbQBIRAMNQtBtQEhEAw0C0G6ASEQDDMLQb0BIRAMMgtBvwEhEAwxC0HBASEQDDALIABBADYCHCAAIAQ2AhQgAEHpi4CAADYCECAAQR82AgxBACEQDEgLIABB2wE2AhwgACAENgIUIABB+paAgAA2AhAgAEEVNgIMQQAhEAxHCyAAQfgANgIcIAAgDDYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMRgsgAEHRADYCHCAAIAU2AhQgAEGwl4CAADYCECAAQRU2AgxBACEQDEULIABB+QA2AhwgACABNgIUIAAgEDYCDEEAIRAMRAsgAEH4ADYCHCAAIAE2AhQgAEHKmICAADYCECAAQRU2AgxBACEQDEMLIABB5AA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAxCCyAAQdcANgIcIAAgATYCFCAAQcmXgIAANgIQIABBFTYCDEEAIRAMQQsgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMQAsgAEHCADYCHCAAIAE2AhQgAEHjmICAADYCECAAQRU2AgxBACEQDD8LIABBADYCBCAAIA8gDxCxgICAACIERQ0BIABBOjYCHCAAIAQ2AgwgACAPQQFqNgIUQQAhEAw+CyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBEUNACAAQTs2AhwgACAENgIMIAAgAUEBajYCFEEAIRAMPgsgAUEBaiEBDC0LIA9BAWohAQwtCyAAQQA2AhwgACAPNgIUIABB5JKAgAA2AhAgAEEENgIMQQAhEAw7CyAAQTY2AhwgACAENgIUIAAgAjYCDEEAIRAMOgsgAEEuNgIcIAAgDjYCFCAAIAQ2AgxBACEQDDkLIABB0AA2AhwgACABNgIUIABBkZiAgAA2AhAgAEEVNgIMQQAhEAw4CyANQQFqIQEMLAsgAEEVNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMNgsgAEEbNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNQsgAEEPNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNAsgAEELNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMMwsgAEEaNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMgsgAEELNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMQsgAEEKNgIcIAAgATYCFCAAQeSWgIAANgIQIABBFTYCDEEAIRAMMAsgAEEeNgIcIAAgATYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAMLwsgAEEANgIcIAAgEDYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMLgsgAEEENgIcIAAgATYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMLQsgAEEANgIAIAtBAWohCwtBuAEhEAwSCyAAQQA2AgAgEEEBaiEBQfUAIRAMEQsgASEBAkAgAC0AKUEFRw0AQeMAIRAMEQtB4gAhEAwQC0EAIRAgAEEANgIcIABB5JGAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAwoCyAAQQA2AgAgF0EBaiEBQcAAIRAMDgtBASEBCyAAIAE6ACwgAEEANgIAIBdBAWohAQtBKCEQDAsLIAEhAQtBOCEQDAkLAkAgASIPIAJGDQADQAJAIA8tAABBgL6AgABqLQAAIgFBAUYNACABQQJHDQMgD0EBaiEBDAQLIA9BAWoiDyACRw0AC0E+IRAMIgtBPiEQDCELIABBADoALCAPIQEMAQtBCyEQDAYLQTohEAwFCyABQQFqIQFBLSEQDAQLIAAgAToALCAAQQA2AgAgFkEBaiEBQQwhEAwDCyAAQQA2AgAgF0EBaiEBQQohEAwCCyAAQQA2AgALIABBADoALCANIQFBCSEQDAALC0EAIRAgAEEANgIcIAAgCzYCFCAAQc2QgIAANgIQIABBCTYCDAwXC0EAIRAgAEEANgIcIAAgCjYCFCAAQemKgIAANgIQIABBCTYCDAwWC0EAIRAgAEEANgIcIAAgCTYCFCAAQbeQgIAANgIQIABBCTYCDAwVC0EAIRAgAEEANgIcIAAgCDYCFCAAQZyRgIAANgIQIABBCTYCDAwUC0EAIRAgAEEANgIcIAAgATYCFCAAQc2QgIAANgIQIABBCTYCDAwTC0EAIRAgAEEANgIcIAAgATYCFCAAQemKgIAANgIQIABBCTYCDAwSC0EAIRAgAEEANgIcIAAgATYCFCAAQbeQgIAANgIQIABBCTYCDAwRC0EAIRAgAEEANgIcIAAgATYCFCAAQZyRgIAANgIQIABBCTYCDAwQC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwPC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwOC0EAIRAgAEEANgIcIAAgATYCFCAAQcCSgIAANgIQIABBCzYCDAwNC0EAIRAgAEEANgIcIAAgATYCFCAAQZWJgIAANgIQIABBCzYCDAwMC0EAIRAgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDAwLC0EAIRAgAEEANgIcIAAgATYCFCAAQfuPgIAANgIQIABBCjYCDAwKC0EAIRAgAEEANgIcIAAgATYCFCAAQfGZgIAANgIQIABBAjYCDAwJC0EAIRAgAEEANgIcIAAgATYCFCAAQcSUgIAANgIQIABBAjYCDAwIC0EAIRAgAEEANgIcIAAgATYCFCAAQfKVgIAANgIQIABBAjYCDAwHCyAAQQI2AhwgACABNgIUIABBnJqAgAA2AhAgAEEWNgIMQQAhEAwGC0EBIRAMBQtB1AAhECABIgQgAkYNBCADQQhqIAAgBCACQdjCgIAAQQoQxYCAgAAgAygCDCEEIAMoAggOAwEEAgALEMqAgIAAAAsgAEEANgIcIABBtZqAgAA2AhAgAEEXNgIMIAAgBEEBajYCFEEAIRAMAgsgAEEANgIcIAAgBDYCFCAAQcqagIAANgIQIABBCTYCDEEAIRAMAQsCQCABIgQgAkcNAEEiIRAMAQsgAEGJgICAADYCCCAAIAQ2AgRBISEQCyADQRBqJICAgIAAIBALrwEBAn8gASgCACEGAkACQCACIANGDQAgBCAGaiEEIAYgA2ogAmshByACIAZBf3MgBWoiBmohBQNAAkAgAi0AACAELQAARg0AQQIhBAwDCwJAIAYNAEEAIQQgBSECDAMLIAZBf2ohBiAEQQFqIQQgAkEBaiICIANHDQALIAchBiADIQILIABBATYCACABIAY2AgAgACACNgIEDwsgAUEANgIAIAAgBDYCACAAIAI2AgQLCgAgABDHgICAAAvyNgELfyOAgICAAEEQayIBJICAgIAAAkBBACgCoNCAgAANAEEAEMuAgIAAQYDUhIAAayICQdkASQ0AQQAhAwJAQQAoAuDTgIAAIgQNAEEAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEIakFwcUHYqtWqBXMiBDYC4NOAgABBAEEANgL004CAAEEAQQA2AsTTgIAAC0EAIAI2AszTgIAAQQBBgNSEgAA2AsjTgIAAQQBBgNSEgAA2ApjQgIAAQQAgBDYCrNCAgABBAEF/NgKo0ICAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALQYDUhIAAQXhBgNSEgABrQQ9xQQBBgNSEgABBCGpBD3EbIgNqIgRBBGogAkFIaiIFIANrIgNBAXI2AgBBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAQYDUhIAAIAVqQTg2AgQLAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABB7AFLDQACQEEAKAKI0ICAACIGQRAgAEETakFwcSAAQQtJGyICQQN2IgR2IgNBA3FFDQACQAJAIANBAXEgBHJBAXMiBUEDdCIEQbDQgIAAaiIDIARBuNCAgABqKAIAIgQoAggiAkcNAEEAIAZBfiAFd3E2AojQgIAADAELIAMgAjYCCCACIAM2AgwLIARBCGohAyAEIAVBA3QiBUEDcjYCBCAEIAVqIgQgBCgCBEEBcjYCBAwMCyACQQAoApDQgIAAIgdNDQECQCADRQ0AAkACQCADIAR0QQIgBHQiA0EAIANrcnEiA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqIgRBA3QiA0Gw0ICAAGoiBSADQbjQgIAAaigCACIDKAIIIgBHDQBBACAGQX4gBHdxIgY2AojQgIAADAELIAUgADYCCCAAIAU2AgwLIAMgAkEDcjYCBCADIARBA3QiBGogBCACayIFNgIAIAMgAmoiACAFQQFyNgIEAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQQCQAJAIAZBASAHQQN2dCIIcQ0AQQAgBiAIcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCAENgIMIAIgBDYCCCAEIAI2AgwgBCAINgIICyADQQhqIQNBACAANgKc0ICAAEEAIAU2ApDQgIAADAwLQQAoAozQgIAAIglFDQEgCUEAIAlrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqQQJ0QbjSgIAAaigCACIAKAIEQXhxIAJrIQQgACEFAkADQAJAIAUoAhAiAw0AIAVBFGooAgAiA0UNAgsgAygCBEF4cSACayIFIAQgBSAESSIFGyEEIAMgACAFGyEAIAMhBQwACwsgACgCGCEKAkAgACgCDCIIIABGDQAgACgCCCIDQQAoApjQgIAASRogCCADNgIIIAMgCDYCDAwLCwJAIABBFGoiBSgCACIDDQAgACgCECIDRQ0DIABBEGohBQsDQCAFIQsgAyIIQRRqIgUoAgAiAw0AIAhBEGohBSAIKAIQIgMNAAsgC0EANgIADAoLQX8hAiAAQb9/Sw0AIABBE2oiA0FwcSECQQAoAozQgIAAIgdFDQBBACELAkAgAkGAAkkNAEEfIQsgAkH///8HSw0AIANBCHYiAyADQYD+P2pBEHZBCHEiA3QiBCAEQYDgH2pBEHZBBHEiBHQiBSAFQYCAD2pBEHZBAnEiBXRBD3YgAyAEciAFcmsiA0EBdCACIANBFWp2QQFxckEcaiELC0EAIAJrIQQCQAJAAkACQCALQQJ0QbjSgIAAaigCACIFDQBBACEDQQAhCAwBC0EAIQMgAkEAQRkgC0EBdmsgC0EfRht0IQBBACEIA0ACQCAFKAIEQXhxIAJrIgYgBE8NACAGIQQgBSEIIAYNAEEAIQQgBSEIIAUhAwwDCyADIAVBFGooAgAiBiAGIAUgAEEddkEEcWpBEGooAgAiBUYbIAMgBhshAyAAQQF0IQAgBQ0ACwsCQCADIAhyDQBBACEIQQIgC3QiA0EAIANrciAHcSIDRQ0DIANBACADa3FBf2oiAyADQQx2QRBxIgN2IgVBBXZBCHEiACADciAFIAB2IgNBAnZBBHEiBXIgAyAFdiIDQQF2QQJxIgVyIAMgBXYiA0EBdkEBcSIFciADIAV2akECdEG40oCAAGooAgAhAwsgA0UNAQsDQCADKAIEQXhxIAJrIgYgBEkhAAJAIAMoAhAiBQ0AIANBFGooAgAhBQsgBiAEIAAbIQQgAyAIIAAbIQggBSEDIAUNAAsLIAhFDQAgBEEAKAKQ0ICAACACa08NACAIKAIYIQsCQCAIKAIMIgAgCEYNACAIKAIIIgNBACgCmNCAgABJGiAAIAM2AgggAyAANgIMDAkLAkAgCEEUaiIFKAIAIgMNACAIKAIQIgNFDQMgCEEQaiEFCwNAIAUhBiADIgBBFGoiBSgCACIDDQAgAEEQaiEFIAAoAhAiAw0ACyAGQQA2AgAMCAsCQEEAKAKQ0ICAACIDIAJJDQBBACgCnNCAgAAhBAJAAkAgAyACayIFQRBJDQAgBCACaiIAIAVBAXI2AgRBACAFNgKQ0ICAAEEAIAA2ApzQgIAAIAQgA2ogBTYCACAEIAJBA3I2AgQMAQsgBCADQQNyNgIEIAQgA2oiAyADKAIEQQFyNgIEQQBBADYCnNCAgABBAEEANgKQ0ICAAAsgBEEIaiEDDAoLAkBBACgClNCAgAAiACACTQ0AQQAoAqDQgIAAIgMgAmoiBCAAIAJrIgVBAXI2AgRBACAFNgKU0ICAAEEAIAQ2AqDQgIAAIAMgAkEDcjYCBCADQQhqIQMMCgsCQAJAQQAoAuDTgIAARQ0AQQAoAujTgIAAIQQMAQtBAEJ/NwLs04CAAEEAQoCAhICAgMAANwLk04CAAEEAIAFBDGpBcHFB2KrVqgVzNgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgABBgIAEIQQLQQAhAwJAIAQgAkHHAGoiB2oiBkEAIARrIgtxIgggAksNAEEAQTA2AvjTgIAADAoLAkBBACgCwNOAgAAiA0UNAAJAQQAoArjTgIAAIgQgCGoiBSAETQ0AIAUgA00NAQtBACEDQQBBMDYC+NOAgAAMCgtBAC0AxNOAgABBBHENBAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQAJAIAMoAgAiBSAESw0AIAUgAygCBGogBEsNAwsgAygCCCIDDQALC0EAEMuAgIAAIgBBf0YNBSAIIQYCQEEAKALk04CAACIDQX9qIgQgAHFFDQAgCCAAayAEIABqQQAgA2txaiEGCyAGIAJNDQUgBkH+////B0sNBQJAQQAoAsDTgIAAIgNFDQBBACgCuNOAgAAiBCAGaiIFIARNDQYgBSADSw0GCyAGEMuAgIAAIgMgAEcNAQwHCyAGIABrIAtxIgZB/v///wdLDQQgBhDLgICAACIAIAMoAgAgAygCBGpGDQMgACEDCwJAIANBf0YNACACQcgAaiAGTQ0AAkAgByAGa0EAKALo04CAACIEakEAIARrcSIEQf7///8HTQ0AIAMhAAwHCwJAIAQQy4CAgABBf0YNACAEIAZqIQYgAyEADAcLQQAgBmsQy4CAgAAaDAQLIAMhACADQX9HDQUMAwtBACEIDAcLQQAhAAwFCyAAQX9HDQILQQBBACgCxNOAgABBBHI2AsTTgIAACyAIQf7///8HSw0BIAgQy4CAgAAhAEEAEMuAgIAAIQMgAEF/Rg0BIANBf0YNASAAIANPDQEgAyAAayIGIAJBOGpNDQELQQBBACgCuNOAgAAgBmoiAzYCuNOAgAACQCADQQAoArzTgIAATQ0AQQAgAzYCvNOAgAALAkACQAJAAkBBACgCoNCAgAAiBEUNAEHI04CAACEDA0AgACADKAIAIgUgAygCBCIIakYNAiADKAIIIgMNAAwDCwsCQAJAQQAoApjQgIAAIgNFDQAgACADTw0BC0EAIAA2ApjQgIAAC0EAIQNBACAGNgLM04CAAEEAIAA2AsjTgIAAQQBBfzYCqNCAgABBAEEAKALg04CAADYCrNCAgABBAEEANgLU04CAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgQgBkFIaiIFIANrIgNBAXI2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAIAAgBWpBODYCBAwCCyADLQAMQQhxDQAgBCAFSQ0AIAQgAE8NACAEQXggBGtBD3FBACAEQQhqQQ9xGyIFaiIAQQAoApTQgIAAIAZqIgsgBWsiBUEBcjYCBCADIAggBmo2AgRBAEEAKALw04CAADYCpNCAgABBACAFNgKU0ICAAEEAIAA2AqDQgIAAIAQgC2pBODYCBAwBCwJAIABBACgCmNCAgAAiCE8NAEEAIAA2ApjQgIAAIAAhCAsgACAGaiEFQcjTgIAAIQMCQAJAAkACQAJAAkACQANAIAMoAgAgBUYNASADKAIIIgMNAAwCCwsgAy0ADEEIcUUNAQtByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiIFIARLDQMLIAMoAgghAwwACwsgAyAANgIAIAMgAygCBCAGajYCBCAAQXggAGtBD3FBACAAQQhqQQ9xG2oiCyACQQNyNgIEIAVBeCAFa0EPcUEAIAVBCGpBD3EbaiIGIAsgAmoiAmshAwJAIAYgBEcNAEEAIAI2AqDQgIAAQQBBACgClNCAgAAgA2oiAzYClNCAgAAgAiADQQFyNgIEDAMLAkAgBkEAKAKc0ICAAEcNAEEAIAI2ApzQgIAAQQBBACgCkNCAgAAgA2oiAzYCkNCAgAAgAiADQQFyNgIEIAIgA2ogAzYCAAwDCwJAIAYoAgQiBEEDcUEBRw0AIARBeHEhBwJAAkAgBEH/AUsNACAGKAIIIgUgBEEDdiIIQQN0QbDQgIAAaiIARhoCQCAGKAIMIgQgBUcNAEEAQQAoAojQgIAAQX4gCHdxNgKI0ICAAAwCCyAEIABGGiAEIAU2AgggBSAENgIMDAELIAYoAhghCQJAAkAgBigCDCIAIAZGDQAgBigCCCIEIAhJGiAAIAQ2AgggBCAANgIMDAELAkAgBkEUaiIEKAIAIgUNACAGQRBqIgQoAgAiBQ0AQQAhAAwBCwNAIAQhCCAFIgBBFGoiBCgCACIFDQAgAEEQaiEEIAAoAhAiBQ0ACyAIQQA2AgALIAlFDQACQAJAIAYgBigCHCIFQQJ0QbjSgIAAaiIEKAIARw0AIAQgADYCACAADQFBAEEAKAKM0ICAAEF+IAV3cTYCjNCAgAAMAgsgCUEQQRQgCSgCECAGRhtqIAA2AgAgAEUNAQsgACAJNgIYAkAgBigCECIERQ0AIAAgBDYCECAEIAA2AhgLIAYoAhQiBEUNACAAQRRqIAQ2AgAgBCAANgIYCyAHIANqIQMgBiAHaiIGKAIEIQQLIAYgBEF+cTYCBCACIANqIAM2AgAgAiADQQFyNgIEAkAgA0H/AUsNACADQXhxQbDQgIAAaiEEAkACQEEAKAKI0ICAACIFQQEgA0EDdnQiA3ENAEEAIAUgA3I2AojQgIAAIAQhAwwBCyAEKAIIIQMLIAMgAjYCDCAEIAI2AgggAiAENgIMIAIgAzYCCAwDC0EfIQQCQCADQf///wdLDQAgA0EIdiIEIARBgP4/akEQdkEIcSIEdCIFIAVBgOAfakEQdkEEcSIFdCIAIABBgIAPakEQdkECcSIAdEEPdiAEIAVyIAByayIEQQF0IAMgBEEVanZBAXFyQRxqIQQLIAIgBDYCHCACQgA3AhAgBEECdEG40oCAAGohBQJAQQAoAozQgIAAIgBBASAEdCIIcQ0AIAUgAjYCAEEAIAAgCHI2AozQgIAAIAIgBTYCGCACIAI2AgggAiACNgIMDAMLIANBAEEZIARBAXZrIARBH0YbdCEEIAUoAgAhAANAIAAiBSgCBEF4cSADRg0CIARBHXYhACAEQQF0IQQgBSAAQQRxakEQaiIIKAIAIgANAAsgCCACNgIAIAIgBTYCGCACIAI2AgwgAiACNgIIDAILIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgsgBkFIaiIIIANrIgNBAXI2AgQgACAIakE4NgIEIAQgBUE3IAVrQQ9xQQAgBUFJakEPcRtqQUFqIgggCCAEQRBqSRsiCEEjNgIEQQBBACgC8NOAgAA2AqTQgIAAQQAgAzYClNCAgABBACALNgKg0ICAACAIQRBqQQApAtDTgIAANwIAIAhBACkCyNOAgAA3AghBACAIQQhqNgLQ04CAAEEAIAY2AszTgIAAQQAgADYCyNOAgABBAEEANgLU04CAACAIQSRqIQMDQCADQQc2AgAgA0EEaiIDIAVJDQALIAggBEYNAyAIIAgoAgRBfnE2AgQgCCAIIARrIgA2AgAgBCAAQQFyNgIEAkAgAEH/AUsNACAAQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgAEEDdnQiAHENAEEAIAUgAHI2AojQgIAAIAMhBQwBCyADKAIIIQULIAUgBDYCDCADIAQ2AgggBCADNgIMIAQgBTYCCAwEC0EfIQMCQCAAQf///wdLDQAgAEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCIIIAhBgIAPakEQdkECcSIIdEEPdiADIAVyIAhyayIDQQF0IAAgA0EVanZBAXFyQRxqIQMLIAQgAzYCHCAEQgA3AhAgA0ECdEG40oCAAGohBQJAQQAoAozQgIAAIghBASADdCIGcQ0AIAUgBDYCAEEAIAggBnI2AozQgIAAIAQgBTYCGCAEIAQ2AgggBCAENgIMDAQLIABBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhCANAIAgiBSgCBEF4cSAARg0DIANBHXYhCCADQQF0IQMgBSAIQQRxakEQaiIGKAIAIggNAAsgBiAENgIAIAQgBTYCGCAEIAQ2AgwgBCAENgIIDAMLIAUoAggiAyACNgIMIAUgAjYCCCACQQA2AhggAiAFNgIMIAIgAzYCCAsgC0EIaiEDDAULIAUoAggiAyAENgIMIAUgBDYCCCAEQQA2AhggBCAFNgIMIAQgAzYCCAtBACgClNCAgAAiAyACTQ0AQQAoAqDQgIAAIgQgAmoiBSADIAJrIgNBAXI2AgRBACADNgKU0ICAAEEAIAU2AqDQgIAAIAQgAkEDcjYCBCAEQQhqIQMMAwtBACEDQQBBMDYC+NOAgAAMAgsCQCALRQ0AAkACQCAIIAgoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAA2AgAgAA0BQQAgB0F+IAV3cSIHNgKM0ICAAAwCCyALQRBBFCALKAIQIAhGG2ogADYCACAARQ0BCyAAIAs2AhgCQCAIKAIQIgNFDQAgACADNgIQIAMgADYCGAsgCEEUaigCACIDRQ0AIABBFGogAzYCACADIAA2AhgLAkACQCAEQQ9LDQAgCCAEIAJqIgNBA3I2AgQgCCADaiIDIAMoAgRBAXI2AgQMAQsgCCACaiIAIARBAXI2AgQgCCACQQNyNgIEIAAgBGogBDYCAAJAIARB/wFLDQAgBEF4cUGw0ICAAGohAwJAAkBBACgCiNCAgAAiBUEBIARBA3Z0IgRxDQBBACAFIARyNgKI0ICAACADIQQMAQsgAygCCCEECyAEIAA2AgwgAyAANgIIIAAgAzYCDCAAIAQ2AggMAQtBHyEDAkAgBEH///8HSw0AIARBCHYiAyADQYD+P2pBEHZBCHEiA3QiBSAFQYDgH2pBEHZBBHEiBXQiAiACQYCAD2pBEHZBAnEiAnRBD3YgAyAFciACcmsiA0EBdCAEIANBFWp2QQFxckEcaiEDCyAAIAM2AhwgAEIANwIQIANBAnRBuNKAgABqIQUCQCAHQQEgA3QiAnENACAFIAA2AgBBACAHIAJyNgKM0ICAACAAIAU2AhggACAANgIIIAAgADYCDAwBCyAEQQBBGSADQQF2ayADQR9GG3QhAyAFKAIAIQICQANAIAIiBSgCBEF4cSAERg0BIANBHXYhAiADQQF0IQMgBSACQQRxakEQaiIGKAIAIgINAAsgBiAANgIAIAAgBTYCGCAAIAA2AgwgACAANgIIDAELIAUoAggiAyAANgIMIAUgADYCCCAAQQA2AhggACAFNgIMIAAgAzYCCAsgCEEIaiEDDAELAkAgCkUNAAJAAkAgACAAKAIcIgVBAnRBuNKAgABqIgMoAgBHDQAgAyAINgIAIAgNAUEAIAlBfiAFd3E2AozQgIAADAILIApBEEEUIAooAhAgAEYbaiAINgIAIAhFDQELIAggCjYCGAJAIAAoAhAiA0UNACAIIAM2AhAgAyAINgIYCyAAQRRqKAIAIgNFDQAgCEEUaiADNgIAIAMgCDYCGAsCQAJAIARBD0sNACAAIAQgAmoiA0EDcjYCBCAAIANqIgMgAygCBEEBcjYCBAwBCyAAIAJqIgUgBEEBcjYCBCAAIAJBA3I2AgQgBSAEaiAENgIAAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQMCQAJAQQEgB0EDdnQiCCAGcQ0AQQAgCCAGcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCADNgIMIAIgAzYCCCADIAI2AgwgAyAINgIIC0EAIAU2ApzQgIAAQQAgBDYCkNCAgAALIABBCGohAwsgAUEQaiSAgICAACADCwoAIAAQyYCAgAAL4g0BB38CQCAARQ0AIABBeGoiASAAQXxqKAIAIgJBeHEiAGohAwJAIAJBAXENACACQQNxRQ0BIAEgASgCACICayIBQQAoApjQgIAAIgRJDQEgAiAAaiEAAkAgAUEAKAKc0ICAAEYNAAJAIAJB/wFLDQAgASgCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgASgCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAwsgAiAGRhogAiAENgIIIAQgAjYCDAwCCyABKAIYIQcCQAJAIAEoAgwiBiABRg0AIAEoAggiAiAESRogBiACNgIIIAIgBjYCDAwBCwJAIAFBFGoiAigCACIEDQAgAUEQaiICKAIAIgQNAEEAIQYMAQsDQCACIQUgBCIGQRRqIgIoAgAiBA0AIAZBEGohAiAGKAIQIgQNAAsgBUEANgIACyAHRQ0BAkACQCABIAEoAhwiBEECdEG40oCAAGoiAigCAEcNACACIAY2AgAgBg0BQQBBACgCjNCAgABBfiAEd3E2AozQgIAADAMLIAdBEEEUIAcoAhAgAUYbaiAGNgIAIAZFDQILIAYgBzYCGAJAIAEoAhAiAkUNACAGIAI2AhAgAiAGNgIYCyABKAIUIgJFDQEgBkEUaiACNgIAIAIgBjYCGAwBCyADKAIEIgJBA3FBA0cNACADIAJBfnE2AgRBACAANgKQ0ICAACABIABqIAA2AgAgASAAQQFyNgIEDwsgASADTw0AIAMoAgQiAkEBcUUNAAJAAkAgAkECcQ0AAkAgA0EAKAKg0ICAAEcNAEEAIAE2AqDQgIAAQQBBACgClNCAgAAgAGoiADYClNCAgAAgASAAQQFyNgIEIAFBACgCnNCAgABHDQNBAEEANgKQ0ICAAEEAQQA2ApzQgIAADwsCQCADQQAoApzQgIAARw0AQQAgATYCnNCAgABBAEEAKAKQ0ICAACAAaiIANgKQ0ICAACABIABBAXI2AgQgASAAaiAANgIADwsgAkF4cSAAaiEAAkACQCACQf8BSw0AIAMoAggiBCACQQN2IgVBA3RBsNCAgABqIgZGGgJAIAMoAgwiAiAERw0AQQBBACgCiNCAgABBfiAFd3E2AojQgIAADAILIAIgBkYaIAIgBDYCCCAEIAI2AgwMAQsgAygCGCEHAkACQCADKAIMIgYgA0YNACADKAIIIgJBACgCmNCAgABJGiAGIAI2AgggAiAGNgIMDAELAkAgA0EUaiICKAIAIgQNACADQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQACQAJAIAMgAygCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAgsgB0EQQRQgBygCECADRhtqIAY2AgAgBkUNAQsgBiAHNgIYAkAgAygCECICRQ0AIAYgAjYCECACIAY2AhgLIAMoAhQiAkUNACAGQRRqIAI2AgAgAiAGNgIYCyABIABqIAA2AgAgASAAQQFyNgIEIAFBACgCnNCAgABHDQFBACAANgKQ0ICAAA8LIAMgAkF+cTYCBCABIABqIAA2AgAgASAAQQFyNgIECwJAIABB/wFLDQAgAEF4cUGw0ICAAGohAgJAAkBBACgCiNCAgAAiBEEBIABBA3Z0IgBxDQBBACAEIAByNgKI0ICAACACIQAMAQsgAigCCCEACyAAIAE2AgwgAiABNgIIIAEgAjYCDCABIAA2AggPC0EfIQICQCAAQf///wdLDQAgAEEIdiICIAJBgP4/akEQdkEIcSICdCIEIARBgOAfakEQdkEEcSIEdCIGIAZBgIAPakEQdkECcSIGdEEPdiACIARyIAZyayICQQF0IAAgAkEVanZBAXFyQRxqIQILIAEgAjYCHCABQgA3AhAgAkECdEG40oCAAGohBAJAAkBBACgCjNCAgAAiBkEBIAJ0IgNxDQAgBCABNgIAQQAgBiADcjYCjNCAgAAgASAENgIYIAEgATYCCCABIAE2AgwMAQsgAEEAQRkgAkEBdmsgAkEfRht0IQIgBCgCACEGAkADQCAGIgQoAgRBeHEgAEYNASACQR12IQYgAkEBdCECIAQgBkEEcWpBEGoiAygCACIGDQALIAMgATYCACABIAQ2AhggASABNgIMIAEgATYCCAwBCyAEKAIIIgAgATYCDCAEIAE2AgggAUEANgIYIAEgBDYCDCABIAA2AggLQQBBACgCqNCAgABBf2oiAUF/IAEbNgKo0ICAAAsLBAAAAAtOAAJAIAANAD8AQRB0DwsCQCAAQf//A3ENACAAQX9MDQACQCAAQRB2QAAiAEF/Rw0AQQBBMDYC+NOAgABBfw8LIABBEHQPCxDKgICAAAAL8gICA38BfgJAIAJFDQAgACABOgAAIAIgAGoiA0F/aiABOgAAIAJBA0kNACAAIAE6AAIgACABOgABIANBfWogAToAACADQX5qIAE6AAAgAkEHSQ0AIAAgAToAAyADQXxqIAE6AAAgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIFayICQSBJDQAgAa1CgYCAgBB+IQYgAyAFaiEBA0AgASAGNwMYIAEgBjcDECABIAY3AwggASAGNwMAIAFBIGohASACQWBqIgJBH0sNAAsLIAALC45IAQBBgAgLhkgBAAAAAgAAAAMAAAAAAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAGAAAABwAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEludmFsaWQgY2hhciBpbiB1cmwgcXVlcnkAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9ib2R5AENvbnRlbnQtTGVuZ3RoIG92ZXJmbG93AENodW5rIHNpemUgb3ZlcmZsb3cAUmVzcG9uc2Ugb3ZlcmZsb3cASW52YWxpZCBtZXRob2QgZm9yIEhUVFAveC54IHJlcXVlc3QASW52YWxpZCBtZXRob2QgZm9yIFJUU1AveC54IHJlcXVlc3QARXhwZWN0ZWQgU09VUkNFIG1ldGhvZCBmb3IgSUNFL3gueCByZXF1ZXN0AEludmFsaWQgY2hhciBpbiB1cmwgZnJhZ21lbnQgc3RhcnQARXhwZWN0ZWQgZG90AFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fc3RhdHVzAEludmFsaWQgcmVzcG9uc2Ugc3RhdHVzAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMAVXNlciBjYWxsYmFjayBlcnJvcgBgb25fcmVzZXRgIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19oZWFkZXJgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2JlZ2luYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlYCBjYWxsYmFjayBlcnJvcgBgb25fc3RhdHVzX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdmVyc2lvbl9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX3VybF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWVzc2FnZV9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX21ldGhvZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lYCBjYWxsYmFjayBlcnJvcgBVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNlcnZlcgBJbnZhbGlkIGhlYWRlciB2YWx1ZSBjaGFyAEludmFsaWQgaGVhZGVyIGZpZWxkIGNoYXIAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl92ZXJzaW9uAEludmFsaWQgbWlub3IgdmVyc2lvbgBJbnZhbGlkIG1ham9yIHZlcnNpb24ARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgdmVyc2lvbgBFeHBlY3RlZCBDUkxGIGFmdGVyIHZlcnNpb24ASW52YWxpZCBIVFRQIHZlcnNpb24ASW52YWxpZCBoZWFkZXIgdG9rZW4AU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl91cmwASW52YWxpZCBjaGFyYWN0ZXJzIGluIHVybABVbmV4cGVjdGVkIHN0YXJ0IGNoYXIgaW4gdXJsAERvdWJsZSBAIGluIHVybABFbXB0eSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXJhY3RlciBpbiBDb250ZW50LUxlbmd0aABEdXBsaWNhdGUgQ29udGVudC1MZW5ndGgASW52YWxpZCBjaGFyIGluIHVybCBwYXRoAENvbnRlbnQtTGVuZ3RoIGNhbid0IGJlIHByZXNlbnQgd2l0aCBUcmFuc2Zlci1FbmNvZGluZwBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBzaXplAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX3ZhbHVlAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgdmFsdWUATWlzc2luZyBleHBlY3RlZCBMRiBhZnRlciBoZWFkZXIgdmFsdWUASW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIHF1b3RlIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGVkIHZhbHVlAFBhdXNlZCBieSBvbl9oZWFkZXJzX2NvbXBsZXRlAEludmFsaWQgRU9GIHN0YXRlAG9uX3Jlc2V0IHBhdXNlAG9uX2NodW5rX2hlYWRlciBwYXVzZQBvbl9tZXNzYWdlX2JlZ2luIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl92YWx1ZSBwYXVzZQBvbl9zdGF0dXNfY29tcGxldGUgcGF1c2UAb25fdmVyc2lvbl9jb21wbGV0ZSBwYXVzZQBvbl91cmxfY29tcGxldGUgcGF1c2UAb25fY2h1bmtfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlIHBhdXNlAG9uX21lc3NhZ2VfY29tcGxldGUgcGF1c2UAb25fbWV0aG9kX2NvbXBsZXRlIHBhdXNlAG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19leHRlbnNpb25fbmFtZSBwYXVzZQBVbmV4cGVjdGVkIHNwYWNlIGFmdGVyIHN0YXJ0IGxpbmUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fbmFtZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIG5hbWUAUGF1c2Ugb24gQ09OTkVDVC9VcGdyYWRlAFBhdXNlIG9uIFBSSS9VcGdyYWRlAEV4cGVjdGVkIEhUVFAvMiBDb25uZWN0aW9uIFByZWZhY2UAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9tZXRob2QARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgbWV0aG9kAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX2ZpZWxkAFBhdXNlZABJbnZhbGlkIHdvcmQgZW5jb3VudGVyZWQASW52YWxpZCBtZXRob2QgZW5jb3VudGVyZWQAVW5leHBlY3RlZCBjaGFyIGluIHVybCBzY2hlbWEAUmVxdWVzdCBoYXMgaW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgAFNXSVRDSF9QUk9YWQBVU0VfUFJPWFkATUtBQ1RJVklUWQBVTlBST0NFU1NBQkxFX0VOVElUWQBDT1BZAE1PVkVEX1BFUk1BTkVOVExZAFRPT19FQVJMWQBOT1RJRlkARkFJTEVEX0RFUEVOREVOQ1kAQkFEX0dBVEVXQVkAUExBWQBQVVQAQ0hFQ0tPVVQAR0FURVdBWV9USU1FT1VUAFJFUVVFU1RfVElNRU9VVABORVRXT1JLX0NPTk5FQ1RfVElNRU9VVABDT05ORUNUSU9OX1RJTUVPVVQATE9HSU5fVElNRU9VVABORVRXT1JLX1JFQURfVElNRU9VVABQT1NUAE1JU0RJUkVDVEVEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9SRVFVRVNUAENMSUVOVF9DTE9TRURfTE9BRF9CQUxBTkNFRF9SRVFVRVNUAEJBRF9SRVFVRVNUAEhUVFBfUkVRVUVTVF9TRU5UX1RPX0hUVFBTX1BPUlQAUkVQT1JUAElNX0FfVEVBUE9UAFJFU0VUX0NPTlRFTlQATk9fQ09OVEVOVABQQVJUSUFMX0NPTlRFTlQASFBFX0lOVkFMSURfQ09OU1RBTlQASFBFX0NCX1JFU0VUAEdFVABIUEVfU1RSSUNUAENPTkZMSUNUAFRFTVBPUkFSWV9SRURJUkVDVABQRVJNQU5FTlRfUkVESVJFQ1QAQ09OTkVDVABNVUxUSV9TVEFUVVMASFBFX0lOVkFMSURfU1RBVFVTAFRPT19NQU5ZX1JFUVVFU1RTAEVBUkxZX0hJTlRTAFVOQVZBSUxBQkxFX0ZPUl9MRUdBTF9SRUFTT05TAE9QVElPTlMAU1dJVENISU5HX1BST1RPQ09MUwBWQVJJQU5UX0FMU09fTkVHT1RJQVRFUwBNVUxUSVBMRV9DSE9JQ0VTAElOVEVSTkFMX1NFUlZFUl9FUlJPUgBXRUJfU0VSVkVSX1VOS05PV05fRVJST1IAUkFJTEdVTl9FUlJPUgBJREVOVElUWV9QUk9WSURFUl9BVVRIRU5USUNBVElPTl9FUlJPUgBTU0xfQ0VSVElGSUNBVEVfRVJST1IASU5WQUxJRF9YX0ZPUldBUkRFRF9GT1IAU0VUX1BBUkFNRVRFUgBHRVRfUEFSQU1FVEVSAEhQRV9VU0VSAFNFRV9PVEhFUgBIUEVfQ0JfQ0hVTktfSEVBREVSAE1LQ0FMRU5EQVIAU0VUVVAAV0VCX1NFUlZFUl9JU19ET1dOAFRFQVJET1dOAEhQRV9DTE9TRURfQ09OTkVDVElPTgBIRVVSSVNUSUNfRVhQSVJBVElPTgBESVNDT05ORUNURURfT1BFUkFUSU9OAE5PTl9BVVRIT1JJVEFUSVZFX0lORk9STUFUSU9OAEhQRV9JTlZBTElEX1ZFUlNJT04ASFBFX0NCX01FU1NBR0VfQkVHSU4AU0lURV9JU19GUk9aRU4ASFBFX0lOVkFMSURfSEVBREVSX1RPS0VOAElOVkFMSURfVE9LRU4ARk9SQklEREVOAEVOSEFOQ0VfWU9VUl9DQUxNAEhQRV9JTlZBTElEX1VSTABCTE9DS0VEX0JZX1BBUkVOVEFMX0NPTlRST0wATUtDT0wAQUNMAEhQRV9JTlRFUk5BTABSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFX1VOT0ZGSUNJQUwASFBFX09LAFVOTElOSwBVTkxPQ0sAUFJJAFJFVFJZX1dJVEgASFBFX0lOVkFMSURfQ09OVEVOVF9MRU5HVEgASFBFX1VORVhQRUNURURfQ09OVEVOVF9MRU5HVEgARkxVU0gAUFJPUFBBVENIAE0tU0VBUkNIAFVSSV9UT09fTE9ORwBQUk9DRVNTSU5HAE1JU0NFTExBTkVPVVNfUEVSU0lTVEVOVF9XQVJOSU5HAE1JU0NFTExBTkVPVVNfV0FSTklORwBIUEVfSU5WQUxJRF9UUkFOU0ZFUl9FTkNPRElORwBFeHBlY3RlZCBDUkxGAEhQRV9JTlZBTElEX0NIVU5LX1NJWkUATU9WRQBDT05USU5VRQBIUEVfQ0JfU1RBVFVTX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJTX0NPTVBMRVRFAEhQRV9DQl9WRVJTSU9OX0NPTVBMRVRFAEhQRV9DQl9VUkxfQ09NUExFVEUASFBFX0NCX0NIVU5LX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfVkFMVUVfQ09NUExFVEUASFBFX0NCX0NIVU5LX0VYVEVOU0lPTl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX05BTUVfQ09NUExFVEUASFBFX0NCX01FU1NBR0VfQ09NUExFVEUASFBFX0NCX01FVEhPRF9DT01QTEVURQBIUEVfQ0JfSEVBREVSX0ZJRUxEX0NPTVBMRVRFAERFTEVURQBIUEVfSU5WQUxJRF9FT0ZfU1RBVEUASU5WQUxJRF9TU0xfQ0VSVElGSUNBVEUAUEFVU0UATk9fUkVTUE9OU0UAVU5TVVBQT1JURURfTUVESUFfVFlQRQBHT05FAE5PVF9BQ0NFUFRBQkxFAFNFUlZJQ0VfVU5BVkFJTEFCTEUAUkFOR0VfTk9UX1NBVElTRklBQkxFAE9SSUdJTl9JU19VTlJFQUNIQUJMRQBSRVNQT05TRV9JU19TVEFMRQBQVVJHRQBNRVJHRQBSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFAFJFUVVFU1RfSEVBREVSX1RPT19MQVJHRQBQQVlMT0FEX1RPT19MQVJHRQBJTlNVRkZJQ0lFTlRfU1RPUkFHRQBIUEVfUEFVU0VEX1VQR1JBREUASFBFX1BBVVNFRF9IMl9VUEdSQURFAFNPVVJDRQBBTk5PVU5DRQBUUkFDRQBIUEVfVU5FWFBFQ1RFRF9TUEFDRQBERVNDUklCRQBVTlNVQlNDUklCRQBSRUNPUkQASFBFX0lOVkFMSURfTUVUSE9EAE5PVF9GT1VORABQUk9QRklORABVTkJJTkQAUkVCSU5EAFVOQVVUSE9SSVpFRABNRVRIT0RfTk9UX0FMTE9XRUQASFRUUF9WRVJTSU9OX05PVF9TVVBQT1JURUQAQUxSRUFEWV9SRVBPUlRFRABBQ0NFUFRFRABOT1RfSU1QTEVNRU5URUQATE9PUF9ERVRFQ1RFRABIUEVfQ1JfRVhQRUNURUQASFBFX0xGX0VYUEVDVEVEAENSRUFURUQASU1fVVNFRABIUEVfUEFVU0VEAFRJTUVPVVRfT0NDVVJFRABQQVlNRU5UX1JFUVVJUkVEAFBSRUNPTkRJVElPTl9SRVFVSVJFRABQUk9YWV9BVVRIRU5USUNBVElPTl9SRVFVSVJFRABORVRXT1JLX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAExFTkdUSF9SRVFVSVJFRABTU0xfQ0VSVElGSUNBVEVfUkVRVUlSRUQAVVBHUkFERV9SRVFVSVJFRABQQUdFX0VYUElSRUQAUFJFQ09ORElUSU9OX0ZBSUxFRABFWFBFQ1RBVElPTl9GQUlMRUQAUkVWQUxJREFUSU9OX0ZBSUxFRABTU0xfSEFORFNIQUtFX0ZBSUxFRABMT0NLRUQAVFJBTlNGT1JNQVRJT05fQVBQTElFRABOT1RfTU9ESUZJRUQATk9UX0VYVEVOREVEAEJBTkRXSURUSF9MSU1JVF9FWENFRURFRABTSVRFX0lTX09WRVJMT0FERUQASEVBRABFeHBlY3RlZCBIVFRQLwAAXhMAACYTAAAwEAAA8BcAAJ0TAAAVEgAAORcAAPASAAAKEAAAdRIAAK0SAACCEwAATxQAAH8QAACgFQAAIxQAAIkSAACLFAAATRUAANQRAADPFAAAEBgAAMkWAADcFgAAwREAAOAXAAC7FAAAdBQAAHwVAADlFAAACBcAAB8QAABlFQAAoxQAACgVAAACFQAAmRUAACwQAACLGQAATw8AANQOAABqEAAAzhAAAAIXAACJDgAAbhMAABwTAABmFAAAVhcAAMETAADNEwAAbBMAAGgXAABmFwAAXxcAACITAADODwAAaQ4AANgOAABjFgAAyxMAAKoOAAAoFwAAJhcAAMUTAABdFgAA6BEAAGcTAABlEwAA8hYAAHMTAAAdFwAA+RYAAPMRAADPDgAAzhUAAAwSAACzEQAApREAAGEQAAAyFwAAuxMAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIDAgICAgIAAAICAAICAAICAgICAgICAgIABAAAAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAACAAICAgICAAACAgACAgACAgICAgICAgICAAMABAAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAgACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbG9zZWVlcC1hbGl2ZQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBY2h1bmtlZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEAAAEBAAEBAAEBAQEBAQEBAQEAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlY3Rpb25lbnQtbGVuZ3Rob25yb3h5LWNvbm5lY3Rpb24AAAAAAAAAAAAAAAAAAAByYW5zZmVyLWVuY29kaW5ncGdyYWRlDQoNCg0KU00NCg0KVFRQL0NFL1RTUC8AAAAAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQIAAQMAAAAAAAAAAAAAAAAAAAAAAAAEAQEFAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAAAAAAAAAAAAMEAAAEBAQEBAQEBAQEBAUEBAQEBAQEBAQEBAQABAAGBwQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAIAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOT1VOQ0VFQ0tPVVRORUNURVRFQ1JJQkVMVVNIRVRFQURTRUFSQ0hSR0VDVElWSVRZTEVOREFSVkVPVElGWVBUSU9OU0NIU0VBWVNUQVRDSEdFT1JESVJFQ1RPUlRSQ0hQQVJBTUVURVJVUkNFQlNDUklCRUFSRE9XTkFDRUlORE5LQ0tVQlNDUklCRUhUVFAvQURUUC8=' - const jitter = Math.floor(Math.random() * 1000); - const calculatedNextRetryDelay = - Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; - const maxAllowableDelayMs = - totalTimeoutMs - (Date.now() - timeOfFirstRequest); +/***/ }), - return Math.min( - calculatedNextRetryDelay, - maxAllowableDelayMs, - maxRetryDelayMs - ); -} +/***/ 5627: +/***/ ((module) => { -module.exports.getNextRetryDelay = getNextRetryDelay; +module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC0kBAXsgAEEQav0MAAAAAAAAAAAAAAAAAAAAACIB/QsDACAAIAH9CwMAIABBMGogAf0LAwAgAEEgaiAB/QsDACAAQd0BNgIcQQALewEBfwJAIAAoAgwiAw0AAkAgACgCBEUNACAAIAE2AgQLAkAgACABIAIQxICAgAAiAw0AIAAoAgwPCyAAIAM2AhxBACEDIAAoAgQiAUUNACAAIAEgAiAAKAIIEYGAgIAAACIBRQ0AIAAgAjYCFCAAIAE2AgwgASEDCyADC+TzAQMOfwN+BH8jgICAgABBEGsiAySAgICAACABIQQgASEFIAEhBiABIQcgASEIIAEhCSABIQogASELIAEhDCABIQ0gASEOIAEhDwJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAKAIcIhBBf2oO3QHaAQHZAQIDBAUGBwgJCgsMDQ7YAQ8Q1wEREtYBExQVFhcYGRob4AHfARwdHtUBHyAhIiMkJdQBJicoKSorLNMB0gEtLtEB0AEvMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUbbAUdISUrPAc4BS80BTMwBTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gAGBAYIBgwGEAYUBhgGHAYgBiQGKAYsBjAGNAY4BjwGQAZEBkgGTAZQBlQGWAZcBmAGZAZoBmwGcAZ0BngGfAaABoQGiAaMBpAGlAaYBpwGoAakBqgGrAawBrQGuAa8BsAGxAbIBswG0AbUBtgG3AcsBygG4AckBuQHIAboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBANwBC0EAIRAMxgELQQ4hEAzFAQtBDSEQDMQBC0EPIRAMwwELQRAhEAzCAQtBEyEQDMEBC0EUIRAMwAELQRUhEAy/AQtBFiEQDL4BC0EXIRAMvQELQRghEAy8AQtBGSEQDLsBC0EaIRAMugELQRshEAy5AQtBHCEQDLgBC0EIIRAMtwELQR0hEAy2AQtBICEQDLUBC0EfIRAMtAELQQchEAyzAQtBISEQDLIBC0EiIRAMsQELQR4hEAywAQtBIyEQDK8BC0ESIRAMrgELQREhEAytAQtBJCEQDKwBC0ElIRAMqwELQSYhEAyqAQtBJyEQDKkBC0HDASEQDKgBC0EpIRAMpwELQSshEAymAQtBLCEQDKUBC0EtIRAMpAELQS4hEAyjAQtBLyEQDKIBC0HEASEQDKEBC0EwIRAMoAELQTQhEAyfAQtBDCEQDJ4BC0ExIRAMnQELQTIhEAycAQtBMyEQDJsBC0E5IRAMmgELQTUhEAyZAQtBxQEhEAyYAQtBCyEQDJcBC0E6IRAMlgELQTYhEAyVAQtBCiEQDJQBC0E3IRAMkwELQTghEAySAQtBPCEQDJEBC0E7IRAMkAELQT0hEAyPAQtBCSEQDI4BC0EoIRAMjQELQT4hEAyMAQtBPyEQDIsBC0HAACEQDIoBC0HBACEQDIkBC0HCACEQDIgBC0HDACEQDIcBC0HEACEQDIYBC0HFACEQDIUBC0HGACEQDIQBC0EqIRAMgwELQccAIRAMggELQcgAIRAMgQELQckAIRAMgAELQcoAIRAMfwtBywAhEAx+C0HNACEQDH0LQcwAIRAMfAtBzgAhEAx7C0HPACEQDHoLQdAAIRAMeQtB0QAhEAx4C0HSACEQDHcLQdMAIRAMdgtB1AAhEAx1C0HWACEQDHQLQdUAIRAMcwtBBiEQDHILQdcAIRAMcQtBBSEQDHALQdgAIRAMbwtBBCEQDG4LQdkAIRAMbQtB2gAhEAxsC0HbACEQDGsLQdwAIRAMagtBAyEQDGkLQd0AIRAMaAtB3gAhEAxnC0HfACEQDGYLQeEAIRAMZQtB4AAhEAxkC0HiACEQDGMLQeMAIRAMYgtBAiEQDGELQeQAIRAMYAtB5QAhEAxfC0HmACEQDF4LQecAIRAMXQtB6AAhEAxcC0HpACEQDFsLQeoAIRAMWgtB6wAhEAxZC0HsACEQDFgLQe0AIRAMVwtB7gAhEAxWC0HvACEQDFULQfAAIRAMVAtB8QAhEAxTC0HyACEQDFILQfMAIRAMUQtB9AAhEAxQC0H1ACEQDE8LQfYAIRAMTgtB9wAhEAxNC0H4ACEQDEwLQfkAIRAMSwtB+gAhEAxKC0H7ACEQDEkLQfwAIRAMSAtB/QAhEAxHC0H+ACEQDEYLQf8AIRAMRQtBgAEhEAxEC0GBASEQDEMLQYIBIRAMQgtBgwEhEAxBC0GEASEQDEALQYUBIRAMPwtBhgEhEAw+C0GHASEQDD0LQYgBIRAMPAtBiQEhEAw7C0GKASEQDDoLQYsBIRAMOQtBjAEhEAw4C0GNASEQDDcLQY4BIRAMNgtBjwEhEAw1C0GQASEQDDQLQZEBIRAMMwtBkgEhEAwyC0GTASEQDDELQZQBIRAMMAtBlQEhEAwvC0GWASEQDC4LQZcBIRAMLQtBmAEhEAwsC0GZASEQDCsLQZoBIRAMKgtBmwEhEAwpC0GcASEQDCgLQZ0BIRAMJwtBngEhEAwmC0GfASEQDCULQaABIRAMJAtBoQEhEAwjC0GiASEQDCILQaMBIRAMIQtBpAEhEAwgC0GlASEQDB8LQaYBIRAMHgtBpwEhEAwdC0GoASEQDBwLQakBIRAMGwtBqgEhEAwaC0GrASEQDBkLQawBIRAMGAtBrQEhEAwXC0GuASEQDBYLQQEhEAwVC0GvASEQDBQLQbABIRAMEwtBsQEhEAwSC0GzASEQDBELQbIBIRAMEAtBtAEhEAwPC0G1ASEQDA4LQbYBIRAMDQtBtwEhEAwMC0G4ASEQDAsLQbkBIRAMCgtBugEhEAwJC0G7ASEQDAgLQcYBIRAMBwtBvAEhEAwGC0G9ASEQDAULQb4BIRAMBAtBvwEhEAwDC0HAASEQDAILQcIBIRAMAQtBwQEhEAsDQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAOxwEAAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB4fICEjJSg/QEFERUZHSElKS0xNT1BRUlPeA1dZW1xdYGJlZmdoaWprbG1vcHFyc3R1dnd4eXp7fH1+gAGCAYUBhgGHAYkBiwGMAY0BjgGPAZABkQGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwG4AbkBugG7AbwBvQG+Ab8BwAHBAcIBwwHEAcUBxgHHAcgByQHKAcsBzAHNAc4BzwHQAdEB0gHTAdQB1QHWAdcB2AHZAdoB2wHcAd0B3gHgAeEB4gHjAeQB5QHmAecB6AHpAeoB6wHsAe0B7gHvAfAB8QHyAfMBmQKkArAC/gL+AgsgASIEIAJHDfMBQd0BIRAM/wMLIAEiECACRw3dAUHDASEQDP4DCyABIgEgAkcNkAFB9wAhEAz9AwsgASIBIAJHDYYBQe8AIRAM/AMLIAEiASACRw1/QeoAIRAM+wMLIAEiASACRw17QegAIRAM+gMLIAEiASACRw14QeYAIRAM+QMLIAEiASACRw0aQRghEAz4AwsgASIBIAJHDRRBEiEQDPcDCyABIgEgAkcNWUHFACEQDPYDCyABIgEgAkcNSkE/IRAM9QMLIAEiASACRw1IQTwhEAz0AwsgASIBIAJHDUFBMSEQDPMDCyAALQAuQQFGDesDDIcCCyAAIAEiASACEMCAgIAAQQFHDeYBIABCADcDIAznAQsgACABIgEgAhC0gICAACIQDecBIAEhAQz1AgsCQCABIgEgAkcNAEEGIRAM8AMLIAAgAUEBaiIBIAIQu4CAgAAiEA3oASABIQEMMQsgAEIANwMgQRIhEAzVAwsgASIQIAJHDStBHSEQDO0DCwJAIAEiASACRg0AIAFBAWohAUEQIRAM1AMLQQchEAzsAwsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3lAUEIIRAM6wMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQRQhEAzSAwtBCSEQDOoDCyABIQEgACkDIFAN5AEgASEBDPICCwJAIAEiASACRw0AQQshEAzpAwsgACABQQFqIgEgAhC2gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeYBIAEhAQwNCyAAIAEiASACELqAgIAAIhAN5wEgASEBDPACCwJAIAEiASACRw0AQQ8hEAzlAwsgAS0AACIQQTtGDQggEEENRw3oASABQQFqIQEM7wILIAAgASIBIAIQuoCAgAAiEA3oASABIQEM8gILA0ACQCABLQAAQfC1gIAAai0AACIQQQFGDQAgEEECRw3rASAAKAIEIRAgAEEANgIEIAAgECABQQFqIgEQuYCAgAAiEA3qASABIQEM9AILIAFBAWoiASACRw0AC0ESIRAM4gMLIAAgASIBIAIQuoCAgAAiEA3pASABIQEMCgsgASIBIAJHDQZBGyEQDOADCwJAIAEiASACRw0AQRYhEAzgAwsgAEGKgICAADYCCCAAIAE2AgQgACABIAIQuICAgAAiEA3qASABIQFBICEQDMYDCwJAIAEiASACRg0AA0ACQCABLQAAQfC3gIAAai0AACIQQQJGDQACQCAQQX9qDgTlAewBAOsB7AELIAFBAWohAUEIIRAMyAMLIAFBAWoiASACRw0AC0EVIRAM3wMLQRUhEAzeAwsDQAJAIAEtAABB8LmAgABqLQAAIhBBAkYNACAQQX9qDgTeAewB4AHrAewBCyABQQFqIgEgAkcNAAtBGCEQDN0DCwJAIAEiASACRg0AIABBi4CAgAA2AgggACABNgIEIAEhAUEHIRAMxAMLQRkhEAzcAwsgAUEBaiEBDAILAkAgASIUIAJHDQBBGiEQDNsDCyAUIQECQCAULQAAQXNqDhTdAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAgDuAgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQM2gMLAkAgAS0AACIQQTtGDQAgEEENRw3oASABQQFqIQEM5QILIAFBAWohAQtBIiEQDL8DCwJAIAEiECACRw0AQRwhEAzYAwtCACERIBAhASAQLQAAQVBqDjfnAeYBAQIDBAUGBwgAAAAAAAAACQoLDA0OAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPEBESExQAC0EeIRAMvQMLQgIhEQzlAQtCAyERDOQBC0IEIREM4wELQgUhEQziAQtCBiERDOEBC0IHIREM4AELQgghEQzfAQtCCSERDN4BC0IKIREM3QELQgshEQzcAQtCDCERDNsBC0INIREM2gELQg4hEQzZAQtCDyERDNgBC0IKIREM1wELQgshEQzWAQtCDCERDNUBC0INIREM1AELQg4hEQzTAQtCDyERDNIBC0IAIRECQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAtAABBUGoON+UB5AEAAQIDBAUGB+YB5gHmAeYB5gHmAeYBCAkKCwwN5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAQ4PEBESE+YBC0ICIREM5AELQgMhEQzjAQtCBCERDOIBC0IFIREM4QELQgYhEQzgAQtCByERDN8BC0IIIREM3gELQgkhEQzdAQtCCiERDNwBC0ILIREM2wELQgwhEQzaAQtCDSERDNkBC0IOIREM2AELQg8hEQzXAQtCCiERDNYBC0ILIREM1QELQgwhEQzUAQtCDSERDNMBC0IOIREM0gELQg8hEQzRAQsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3SAUEfIRAMwAMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQSQhEAynAwtBICEQDL8DCyAAIAEiECACEL6AgIAAQX9qDgW2AQDFAgHRAdIBC0ERIRAMpAMLIABBAToALyAQIQEMuwMLIAEiASACRw3SAUEkIRAMuwMLIAEiDSACRw0eQcYAIRAMugMLIAAgASIBIAIQsoCAgAAiEA3UASABIQEMtQELIAEiECACRw0mQdAAIRAMuAMLAkAgASIBIAJHDQBBKCEQDLgDCyAAQQA2AgQgAEGMgICAADYCCCAAIAEgARCxgICAACIQDdMBIAEhAQzYAQsCQCABIhAgAkcNAEEpIRAMtwMLIBAtAAAiAUEgRg0UIAFBCUcN0wEgEEEBaiEBDBULAkAgASIBIAJGDQAgAUEBaiEBDBcLQSohEAy1AwsCQCABIhAgAkcNAEErIRAMtQMLAkAgEC0AACIBQQlGDQAgAUEgRw3VAQsgAC0ALEEIRg3TASAQIQEMkQMLAkAgASIBIAJHDQBBLCEQDLQDCyABLQAAQQpHDdUBIAFBAWohAQzJAgsgASIOIAJHDdUBQS8hEAyyAwsDQAJAIAEtAAAiEEEgRg0AAkAgEEF2ag4EANwB3AEA2gELIAEhAQzgAQsgAUEBaiIBIAJHDQALQTEhEAyxAwtBMiEQIAEiFCACRg2wAyACIBRrIAAoAgAiAWohFSAUIAFrQQNqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB8LuAgABqLQAARw0BAkAgAUEDRw0AQQYhAQyWAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMsQMLIABBADYCACAUIQEM2QELQTMhECABIhQgAkYNrwMgAiAUayAAKAIAIgFqIRUgFCABa0EIaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfS7gIAAai0AAEcNAQJAIAFBCEcNAEEFIQEMlQMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLADCyAAQQA2AgAgFCEBDNgBC0E0IRAgASIUIAJGDa4DIAIgFGsgACgCACIBaiEVIBQgAWtBBWohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUHQwoCAAGotAABHDQECQCABQQVHDQBBByEBDJQDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAyvAwsgAEEANgIAIBQhAQzXAQsCQCABIgEgAkYNAANAAkAgAS0AAEGAvoCAAGotAAAiEEEBRg0AIBBBAkYNCiABIQEM3QELIAFBAWoiASACRw0AC0EwIRAMrgMLQTAhEAytAwsCQCABIgEgAkYNAANAAkAgAS0AACIQQSBGDQAgEEF2ag4E2QHaAdoB2QHaAQsgAUEBaiIBIAJHDQALQTghEAytAwtBOCEQDKwDCwNAAkAgAS0AACIQQSBGDQAgEEEJRw0DCyABQQFqIgEgAkcNAAtBPCEQDKsDCwNAAkAgAS0AACIQQSBGDQACQAJAIBBBdmoOBNoBAQHaAQALIBBBLEYN2wELIAEhAQwECyABQQFqIgEgAkcNAAtBPyEQDKoDCyABIQEM2wELQcAAIRAgASIUIAJGDagDIAIgFGsgACgCACIBaiEWIBQgAWtBBmohFwJAA0AgFC0AAEEgciABQYDAgIAAai0AAEcNASABQQZGDY4DIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADKkDCyAAQQA2AgAgFCEBC0E2IRAMjgMLAkAgASIPIAJHDQBBwQAhEAynAwsgAEGMgICAADYCCCAAIA82AgQgDyEBIAAtACxBf2oOBM0B1QHXAdkBhwMLIAFBAWohAQzMAQsCQCABIgEgAkYNAANAAkAgAS0AACIQQSByIBAgEEG/f2pB/wFxQRpJG0H/AXEiEEEJRg0AIBBBIEYNAAJAAkACQAJAIBBBnX9qDhMAAwMDAwMDAwEDAwMDAwMDAwMCAwsgAUEBaiEBQTEhEAyRAwsgAUEBaiEBQTIhEAyQAwsgAUEBaiEBQTMhEAyPAwsgASEBDNABCyABQQFqIgEgAkcNAAtBNSEQDKUDC0E1IRAMpAMLAkAgASIBIAJGDQADQAJAIAEtAABBgLyAgABqLQAAQQFGDQAgASEBDNMBCyABQQFqIgEgAkcNAAtBPSEQDKQDC0E9IRAMowMLIAAgASIBIAIQsICAgAAiEA3WASABIQEMAQsgEEEBaiEBC0E8IRAMhwMLAkAgASIBIAJHDQBBwgAhEAygAwsCQANAAkAgAS0AAEF3ag4YAAL+Av4ChAP+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gIA/gILIAFBAWoiASACRw0AC0HCACEQDKADCyABQQFqIQEgAC0ALUEBcUUNvQEgASEBC0EsIRAMhQMLIAEiASACRw3TAUHEACEQDJ0DCwNAAkAgAS0AAEGQwICAAGotAABBAUYNACABIQEMtwILIAFBAWoiASACRw0AC0HFACEQDJwDCyANLQAAIhBBIEYNswEgEEE6Rw2BAyAAKAIEIQEgAEEANgIEIAAgASANEK+AgIAAIgEN0AEgDUEBaiEBDLMCC0HHACEQIAEiDSACRg2aAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQZDCgIAAai0AAEcNgAMgAUEFRg30AiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyaAwtByAAhECABIg0gAkYNmQMgAiANayAAKAIAIgFqIRYgDSABa0EJaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGWwoCAAGotAABHDf8CAkAgAUEJRw0AQQIhAQz1AgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmQMLAkAgASINIAJHDQBByQAhEAyZAwsCQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZJ/ag4HAIADgAOAA4ADgAMBgAMLIA1BAWohAUE+IRAMgAMLIA1BAWohAUE/IRAM/wILQcoAIRAgASINIAJGDZcDIAIgDWsgACgCACIBaiEWIA0gAWtBAWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBoMKAgABqLQAARw39AiABQQFGDfACIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJcDC0HLACEQIAEiDSACRg2WAyACIA1rIAAoAgAiAWohFiANIAFrQQ5qIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaLCgIAAai0AAEcN/AIgAUEORg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyWAwtBzAAhECABIg0gAkYNlQMgAiANayAAKAIAIgFqIRYgDSABa0EPaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUHAwoCAAGotAABHDfsCAkAgAUEPRw0AQQMhAQzxAgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlQMLQc0AIRAgASINIAJGDZQDIAIgDWsgACgCACIBaiEWIA0gAWtBBWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw36AgJAIAFBBUcNAEEEIQEM8AILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJQDCwJAIAEiDSACRw0AQc4AIRAMlAMLAkACQAJAAkAgDS0AACIBQSByIAEgAUG/f2pB/wFxQRpJG0H/AXFBnX9qDhMA/QL9Av0C/QL9Av0C/QL9Av0C/QL9Av0CAf0C/QL9AgID/QILIA1BAWohAUHBACEQDP0CCyANQQFqIQFBwgAhEAz8AgsgDUEBaiEBQcMAIRAM+wILIA1BAWohAUHEACEQDPoCCwJAIAEiASACRg0AIABBjYCAgAA2AgggACABNgIEIAEhAUHFACEQDPoCC0HPACEQDJIDCyAQIQECQAJAIBAtAABBdmoOBAGoAqgCAKgCCyAQQQFqIQELQSchEAz4AgsCQCABIgEgAkcNAEHRACEQDJEDCwJAIAEtAABBIEYNACABIQEMjQELIAFBAWohASAALQAtQQFxRQ3HASABIQEMjAELIAEiFyACRw3IAUHSACEQDI8DC0HTACEQIAEiFCACRg2OAyACIBRrIAAoAgAiAWohFiAUIAFrQQFqIRcDQCAULQAAIAFB1sKAgABqLQAARw3MASABQQFGDccBIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADI4DCwJAIAEiASACRw0AQdUAIRAMjgMLIAEtAABBCkcNzAEgAUEBaiEBDMcBCwJAIAEiASACRw0AQdYAIRAMjQMLAkACQCABLQAAQXZqDgQAzQHNAQHNAQsgAUEBaiEBDMcBCyABQQFqIQFBygAhEAzzAgsgACABIgEgAhCugICAACIQDcsBIAEhAUHNACEQDPICCyAALQApQSJGDYUDDKYCCwJAIAEiASACRw0AQdsAIRAMigMLQQAhFEEBIRdBASEWQQAhEAJAAkACQAJAAkACQAJAAkACQCABLQAAQVBqDgrUAdMBAAECAwQFBgjVAQtBAiEQDAYLQQMhEAwFC0EEIRAMBAtBBSEQDAMLQQYhEAwCC0EHIRAMAQtBCCEQC0EAIRdBACEWQQAhFAzMAQtBCSEQQQEhFEEAIRdBACEWDMsBCwJAIAEiASACRw0AQd0AIRAMiQMLIAEtAABBLkcNzAEgAUEBaiEBDKYCCyABIgEgAkcNzAFB3wAhEAyHAwsCQCABIgEgAkYNACAAQY6AgIAANgIIIAAgATYCBCABIQFB0AAhEAzuAgtB4AAhEAyGAwtB4QAhECABIgEgAkYNhQMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQeLCgIAAai0AAEcNzQEgFEEDRg3MASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyFAwtB4gAhECABIgEgAkYNhAMgAiABayAAKAIAIhRqIRYgASAUa0ECaiEXA0AgAS0AACAUQebCgIAAai0AAEcNzAEgFEECRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyEAwtB4wAhECABIgEgAkYNgwMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQenCgIAAai0AAEcNywEgFEEDRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyDAwsCQCABIgEgAkcNAEHlACEQDIMDCyAAIAFBAWoiASACEKiAgIAAIhANzQEgASEBQdYAIRAM6QILAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AAkACQAJAIBBBuH9qDgsAAc8BzwHPAc8BzwHPAc8BzwECzwELIAFBAWohAUHSACEQDO0CCyABQQFqIQFB0wAhEAzsAgsgAUEBaiEBQdQAIRAM6wILIAFBAWoiASACRw0AC0HkACEQDIIDC0HkACEQDIEDCwNAAkAgAS0AAEHwwoCAAGotAAAiEEEBRg0AIBBBfmoOA88B0AHRAdIBCyABQQFqIgEgAkcNAAtB5gAhEAyAAwsCQCABIgEgAkYNACABQQFqIQEMAwtB5wAhEAz/AgsDQAJAIAEtAABB8MSAgABqLQAAIhBBAUYNAAJAIBBBfmoOBNIB0wHUAQDVAQsgASEBQdcAIRAM5wILIAFBAWoiASACRw0AC0HoACEQDP4CCwJAIAEiASACRw0AQekAIRAM/gILAkAgAS0AACIQQXZqDhq6AdUB1QG8AdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAcoB1QHVAQDTAQsgAUEBaiEBC0EGIRAM4wILA0ACQCABLQAAQfDGgIAAai0AAEEBRg0AIAEhAQyeAgsgAUEBaiIBIAJHDQALQeoAIRAM+wILAkAgASIBIAJGDQAgAUEBaiEBDAMLQesAIRAM+gILAkAgASIBIAJHDQBB7AAhEAz6AgsgAUEBaiEBDAELAkAgASIBIAJHDQBB7QAhEAz5AgsgAUEBaiEBC0EEIRAM3gILAkAgASIUIAJHDQBB7gAhEAz3AgsgFCEBAkACQAJAIBQtAABB8MiAgABqLQAAQX9qDgfUAdUB1gEAnAIBAtcBCyAUQQFqIQEMCgsgFEEBaiEBDM0BC0EAIRAgAEEANgIcIABBm5KAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAz2AgsCQANAAkAgAS0AAEHwyICAAGotAAAiEEEERg0AAkACQCAQQX9qDgfSAdMB1AHZAQAEAdkBCyABIQFB2gAhEAzgAgsgAUEBaiEBQdwAIRAM3wILIAFBAWoiASACRw0AC0HvACEQDPYCCyABQQFqIQEMywELAkAgASIUIAJHDQBB8AAhEAz1AgsgFC0AAEEvRw3UASAUQQFqIQEMBgsCQCABIhQgAkcNAEHxACEQDPQCCwJAIBQtAAAiAUEvRw0AIBRBAWohAUHdACEQDNsCCyABQXZqIgRBFksN0wFBASAEdEGJgIACcUUN0wEMygILAkAgASIBIAJGDQAgAUEBaiEBQd4AIRAM2gILQfIAIRAM8gILAkAgASIUIAJHDQBB9AAhEAzyAgsgFCEBAkAgFC0AAEHwzICAAGotAABBf2oOA8kClAIA1AELQeEAIRAM2AILAkAgASIUIAJGDQADQAJAIBQtAABB8MqAgABqLQAAIgFBA0YNAAJAIAFBf2oOAssCANUBCyAUIQFB3wAhEAzaAgsgFEEBaiIUIAJHDQALQfMAIRAM8QILQfMAIRAM8AILAkAgASIBIAJGDQAgAEGPgICAADYCCCAAIAE2AgQgASEBQeAAIRAM1wILQfUAIRAM7wILAkAgASIBIAJHDQBB9gAhEAzvAgsgAEGPgICAADYCCCAAIAE2AgQgASEBC0EDIRAM1AILA0AgAS0AAEEgRw3DAiABQQFqIgEgAkcNAAtB9wAhEAzsAgsCQCABIgEgAkcNAEH4ACEQDOwCCyABLQAAQSBHDc4BIAFBAWohAQzvAQsgACABIgEgAhCsgICAACIQDc4BIAEhAQyOAgsCQCABIgQgAkcNAEH6ACEQDOoCCyAELQAAQcwARw3RASAEQQFqIQFBEyEQDM8BCwJAIAEiBCACRw0AQfsAIRAM6QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEANAIAQtAAAgAUHwzoCAAGotAABHDdABIAFBBUYNzgEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBB+wAhEAzoAgsCQCABIgQgAkcNAEH8ACEQDOgCCwJAAkAgBC0AAEG9f2oODADRAdEB0QHRAdEB0QHRAdEB0QHRAQHRAQsgBEEBaiEBQeYAIRAMzwILIARBAWohAUHnACEQDM4CCwJAIAEiBCACRw0AQf0AIRAM5wILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNzwEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf0AIRAM5wILIABBADYCACAQQQFqIQFBECEQDMwBCwJAIAEiBCACRw0AQf4AIRAM5gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQfbOgIAAai0AAEcNzgEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf4AIRAM5gILIABBADYCACAQQQFqIQFBFiEQDMsBCwJAIAEiBCACRw0AQf8AIRAM5QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQfzOgIAAai0AAEcNzQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf8AIRAM5QILIABBADYCACAQQQFqIQFBBSEQDMoBCwJAIAEiBCACRw0AQYABIRAM5AILIAQtAABB2QBHDcsBIARBAWohAUEIIRAMyQELAkAgASIEIAJHDQBBgQEhEAzjAgsCQAJAIAQtAABBsn9qDgMAzAEBzAELIARBAWohAUHrACEQDMoCCyAEQQFqIQFB7AAhEAzJAgsCQCABIgQgAkcNAEGCASEQDOICCwJAAkAgBC0AAEG4f2oOCADLAcsBywHLAcsBywEBywELIARBAWohAUHqACEQDMkCCyAEQQFqIQFB7QAhEAzIAgsCQCABIgQgAkcNAEGDASEQDOECCyACIARrIAAoAgAiAWohECAEIAFrQQJqIRQCQANAIAQtAAAgAUGAz4CAAGotAABHDckBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgEDYCAEGDASEQDOECC0EAIRAgAEEANgIAIBRBAWohAQzGAQsCQCABIgQgAkcNAEGEASEQDOACCyACIARrIAAoAgAiAWohFCAEIAFrQQRqIRACQANAIAQtAAAgAUGDz4CAAGotAABHDcgBIAFBBEYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGEASEQDOACCyAAQQA2AgAgEEEBaiEBQSMhEAzFAQsCQCABIgQgAkcNAEGFASEQDN8CCwJAAkAgBC0AAEG0f2oOCADIAcgByAHIAcgByAEByAELIARBAWohAUHvACEQDMYCCyAEQQFqIQFB8AAhEAzFAgsCQCABIgQgAkcNAEGGASEQDN4CCyAELQAAQcUARw3FASAEQQFqIQEMgwILAkAgASIEIAJHDQBBhwEhEAzdAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBiM+AgABqLQAARw3FASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhwEhEAzdAgsgAEEANgIAIBBBAWohAUEtIRAMwgELAkAgASIEIAJHDQBBiAEhEAzcAgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw3EASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiAEhEAzcAgsgAEEANgIAIBBBAWohAUEpIRAMwQELAkAgASIBIAJHDQBBiQEhEAzbAgtBASEQIAEtAABB3wBHDcABIAFBAWohAQyBAgsCQCABIgQgAkcNAEGKASEQDNoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRADQCAELQAAIAFBjM+AgABqLQAARw3BASABQQFGDa8CIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYoBIRAM2QILAkAgASIEIAJHDQBBiwEhEAzZAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBjs+AgABqLQAARw3BASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiwEhEAzZAgsgAEEANgIAIBBBAWohAUECIRAMvgELAkAgASIEIAJHDQBBjAEhEAzYAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw3AASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjAEhEAzYAgsgAEEANgIAIBBBAWohAUEfIRAMvQELAkAgASIEIAJHDQBBjQEhEAzXAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8s+AgABqLQAARw2/ASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjQEhEAzXAgsgAEEANgIAIBBBAWohAUEJIRAMvAELAkAgASIEIAJHDQBBjgEhEAzWAgsCQAJAIAQtAABBt39qDgcAvwG/Ab8BvwG/AQG/AQsgBEEBaiEBQfgAIRAMvQILIARBAWohAUH5ACEQDLwCCwJAIAEiBCACRw0AQY8BIRAM1QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQZHPgIAAai0AAEcNvQEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY8BIRAM1QILIABBADYCACAQQQFqIQFBGCEQDLoBCwJAIAEiBCACRw0AQZABIRAM1AILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQZfPgIAAai0AAEcNvAEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZABIRAM1AILIABBADYCACAQQQFqIQFBFyEQDLkBCwJAIAEiBCACRw0AQZEBIRAM0wILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQZrPgIAAai0AAEcNuwEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZEBIRAM0wILIABBADYCACAQQQFqIQFBFSEQDLgBCwJAIAEiBCACRw0AQZIBIRAM0gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQaHPgIAAai0AAEcNugEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZIBIRAM0gILIABBADYCACAQQQFqIQFBHiEQDLcBCwJAIAEiBCACRw0AQZMBIRAM0QILIAQtAABBzABHDbgBIARBAWohAUEKIRAMtgELAkAgBCACRw0AQZQBIRAM0AILAkACQCAELQAAQb9/ag4PALkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AbkBAbkBCyAEQQFqIQFB/gAhEAy3AgsgBEEBaiEBQf8AIRAMtgILAkAgBCACRw0AQZUBIRAMzwILAkACQCAELQAAQb9/ag4DALgBAbgBCyAEQQFqIQFB/QAhEAy2AgsgBEEBaiEEQYABIRAMtQILAkAgBCACRw0AQZYBIRAMzgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQafPgIAAai0AAEcNtgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZYBIRAMzgILIABBADYCACAQQQFqIQFBCyEQDLMBCwJAIAQgAkcNAEGXASEQDM0CCwJAAkACQAJAIAQtAABBU2oOIwC4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBAbgBuAG4AbgBuAECuAG4AbgBA7gBCyAEQQFqIQFB+wAhEAy2AgsgBEEBaiEBQfwAIRAMtQILIARBAWohBEGBASEQDLQCCyAEQQFqIQRBggEhEAyzAgsCQCAEIAJHDQBBmAEhEAzMAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBqc+AgABqLQAARw20ASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmAEhEAzMAgsgAEEANgIAIBBBAWohAUEZIRAMsQELAkAgBCACRw0AQZkBIRAMywILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQa7PgIAAai0AAEcNswEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZkBIRAMywILIABBADYCACAQQQFqIQFBBiEQDLABCwJAIAQgAkcNAEGaASEQDMoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG0z4CAAGotAABHDbIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGaASEQDMoCCyAAQQA2AgAgEEEBaiEBQRwhEAyvAQsCQCAEIAJHDQBBmwEhEAzJAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBts+AgABqLQAARw2xASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmwEhEAzJAgsgAEEANgIAIBBBAWohAUEnIRAMrgELAkAgBCACRw0AQZwBIRAMyAILAkACQCAELQAAQax/ag4CAAGxAQsgBEEBaiEEQYYBIRAMrwILIARBAWohBEGHASEQDK4CCwJAIAQgAkcNAEGdASEQDMcCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG4z4CAAGotAABHDa8BIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGdASEQDMcCCyAAQQA2AgAgEEEBaiEBQSYhEAysAQsCQCAEIAJHDQBBngEhEAzGAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBus+AgABqLQAARw2uASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBngEhEAzGAgsgAEEANgIAIBBBAWohAUEDIRAMqwELAkAgBCACRw0AQZ8BIRAMxQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNrQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ8BIRAMxQILIABBADYCACAQQQFqIQFBDCEQDKoBCwJAIAQgAkcNAEGgASEQDMQCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUG8z4CAAGotAABHDawBIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGgASEQDMQCCyAAQQA2AgAgEEEBaiEBQQ0hEAypAQsCQCAEIAJHDQBBoQEhEAzDAgsCQAJAIAQtAABBun9qDgsArAGsAawBrAGsAawBrAGsAawBAawBCyAEQQFqIQRBiwEhEAyqAgsgBEEBaiEEQYwBIRAMqQILAkAgBCACRw0AQaIBIRAMwgILIAQtAABB0ABHDakBIARBAWohBAzpAQsCQCAEIAJHDQBBowEhEAzBAgsCQAJAIAQtAABBt39qDgcBqgGqAaoBqgGqAQCqAQsgBEEBaiEEQY4BIRAMqAILIARBAWohAUEiIRAMpgELAkAgBCACRw0AQaQBIRAMwAILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQcDPgIAAai0AAEcNqAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaQBIRAMwAILIABBADYCACAQQQFqIQFBHSEQDKUBCwJAIAQgAkcNAEGlASEQDL8CCwJAAkAgBC0AAEGuf2oOAwCoAQGoAQsgBEEBaiEEQZABIRAMpgILIARBAWohAUEEIRAMpAELAkAgBCACRw0AQaYBIRAMvgILAkACQAJAAkACQCAELQAAQb9/ag4VAKoBqgGqAaoBqgGqAaoBqgGqAaoBAaoBqgECqgGqAQOqAaoBBKoBCyAEQQFqIQRBiAEhEAyoAgsgBEEBaiEEQYkBIRAMpwILIARBAWohBEGKASEQDKYCCyAEQQFqIQRBjwEhEAylAgsgBEEBaiEEQZEBIRAMpAILAkAgBCACRw0AQacBIRAMvQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNpQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQacBIRAMvQILIABBADYCACAQQQFqIQFBESEQDKIBCwJAIAQgAkcNAEGoASEQDLwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHCz4CAAGotAABHDaQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGoASEQDLwCCyAAQQA2AgAgEEEBaiEBQSwhEAyhAQsCQCAEIAJHDQBBqQEhEAy7AgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBxc+AgABqLQAARw2jASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqQEhEAy7AgsgAEEANgIAIBBBAWohAUErIRAMoAELAkAgBCACRw0AQaoBIRAMugILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQcrPgIAAai0AAEcNogEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaoBIRAMugILIABBADYCACAQQQFqIQFBFCEQDJ8BCwJAIAQgAkcNAEGrASEQDLkCCwJAAkACQAJAIAQtAABBvn9qDg8AAQKkAaQBpAGkAaQBpAGkAaQBpAGkAaQBA6QBCyAEQQFqIQRBkwEhEAyiAgsgBEEBaiEEQZQBIRAMoQILIARBAWohBEGVASEQDKACCyAEQQFqIQRBlgEhEAyfAgsCQCAEIAJHDQBBrAEhEAy4AgsgBC0AAEHFAEcNnwEgBEEBaiEEDOABCwJAIAQgAkcNAEGtASEQDLcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHNz4CAAGotAABHDZ8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGtASEQDLcCCyAAQQA2AgAgEEEBaiEBQQ4hEAycAQsCQCAEIAJHDQBBrgEhEAy2AgsgBC0AAEHQAEcNnQEgBEEBaiEBQSUhEAybAQsCQCAEIAJHDQBBrwEhEAy1AgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw2dASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrwEhEAy1AgsgAEEANgIAIBBBAWohAUEqIRAMmgELAkAgBCACRw0AQbABIRAMtAILAkACQCAELQAAQat/ag4LAJ0BnQGdAZ0BnQGdAZ0BnQGdAQGdAQsgBEEBaiEEQZoBIRAMmwILIARBAWohBEGbASEQDJoCCwJAIAQgAkcNAEGxASEQDLMCCwJAAkAgBC0AAEG/f2oOFACcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAEBnAELIARBAWohBEGZASEQDJoCCyAEQQFqIQRBnAEhEAyZAgsCQCAEIAJHDQBBsgEhEAyyAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFB2c+AgABqLQAARw2aASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBsgEhEAyyAgsgAEEANgIAIBBBAWohAUEhIRAMlwELAkAgBCACRw0AQbMBIRAMsQILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQd3PgIAAai0AAEcNmQEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbMBIRAMsQILIABBADYCACAQQQFqIQFBGiEQDJYBCwJAIAQgAkcNAEG0ASEQDLACCwJAAkACQCAELQAAQbt/ag4RAJoBmgGaAZoBmgGaAZoBmgGaAQGaAZoBmgGaAZoBApoBCyAEQQFqIQRBnQEhEAyYAgsgBEEBaiEEQZ4BIRAMlwILIARBAWohBEGfASEQDJYCCwJAIAQgAkcNAEG1ASEQDK8CCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUHkz4CAAGotAABHDZcBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG1ASEQDK8CCyAAQQA2AgAgEEEBaiEBQSghEAyUAQsCQCAEIAJHDQBBtgEhEAyuAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB6s+AgABqLQAARw2WASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtgEhEAyuAgsgAEEANgIAIBBBAWohAUEHIRAMkwELAkAgBCACRw0AQbcBIRAMrQILAkACQCAELQAAQbt/ag4OAJYBlgGWAZYBlgGWAZYBlgGWAZYBlgGWAQGWAQsgBEEBaiEEQaEBIRAMlAILIARBAWohBEGiASEQDJMCCwJAIAQgAkcNAEG4ASEQDKwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDZQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG4ASEQDKwCCyAAQQA2AgAgEEEBaiEBQRIhEAyRAQsCQCAEIAJHDQBBuQEhEAyrAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw2TASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuQEhEAyrAgsgAEEANgIAIBBBAWohAUEgIRAMkAELAkAgBCACRw0AQboBIRAMqgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNkgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQboBIRAMqgILIABBADYCACAQQQFqIQFBDyEQDI8BCwJAIAQgAkcNAEG7ASEQDKkCCwJAAkAgBC0AAEG3f2oOBwCSAZIBkgGSAZIBAZIBCyAEQQFqIQRBpQEhEAyQAgsgBEEBaiEEQaYBIRAMjwILAkAgBCACRw0AQbwBIRAMqAILIAIgBGsgACgCACIBaiEUIAQgAWtBB2ohEAJAA0AgBC0AACABQfTPgIAAai0AAEcNkAEgAUEHRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbwBIRAMqAILIABBADYCACAQQQFqIQFBGyEQDI0BCwJAIAQgAkcNAEG9ASEQDKcCCwJAAkACQCAELQAAQb5/ag4SAJEBkQGRAZEBkQGRAZEBkQGRAQGRAZEBkQGRAZEBkQECkQELIARBAWohBEGkASEQDI8CCyAEQQFqIQRBpwEhEAyOAgsgBEEBaiEEQagBIRAMjQILAkAgBCACRw0AQb4BIRAMpgILIAQtAABBzgBHDY0BIARBAWohBAzPAQsCQCAEIAJHDQBBvwEhEAylAgsCQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAELQAAQb9/ag4VAAECA5wBBAUGnAGcAZwBBwgJCgucAQwNDg+cAQsgBEEBaiEBQegAIRAMmgILIARBAWohAUHpACEQDJkCCyAEQQFqIQFB7gAhEAyYAgsgBEEBaiEBQfIAIRAMlwILIARBAWohAUHzACEQDJYCCyAEQQFqIQFB9gAhEAyVAgsgBEEBaiEBQfcAIRAMlAILIARBAWohAUH6ACEQDJMCCyAEQQFqIQRBgwEhEAySAgsgBEEBaiEEQYQBIRAMkQILIARBAWohBEGFASEQDJACCyAEQQFqIQRBkgEhEAyPAgsgBEEBaiEEQZgBIRAMjgILIARBAWohBEGgASEQDI0CCyAEQQFqIQRBowEhEAyMAgsgBEEBaiEEQaoBIRAMiwILAkAgBCACRg0AIABBkICAgAA2AgggACAENgIEQasBIRAMiwILQcABIRAMowILIAAgBSACEKqAgIAAIgENiwEgBSEBDFwLAkAgBiACRg0AIAZBAWohBQyNAQtBwgEhEAyhAgsDQAJAIBAtAABBdmoOBIwBAACPAQALIBBBAWoiECACRw0AC0HDASEQDKACCwJAIAcgAkYNACAAQZGAgIAANgIIIAAgBzYCBCAHIQFBASEQDIcCC0HEASEQDJ8CCwJAIAcgAkcNAEHFASEQDJ8CCwJAAkAgBy0AAEF2ag4EAc4BzgEAzgELIAdBAWohBgyNAQsgB0EBaiEFDIkBCwJAIAcgAkcNAEHGASEQDJ4CCwJAAkAgBy0AAEF2ag4XAY8BjwEBjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAI8BCyAHQQFqIQcLQbABIRAMhAILAkAgCCACRw0AQcgBIRAMnQILIAgtAABBIEcNjQEgAEEAOwEyIAhBAWohAUGzASEQDIMCCyABIRcCQANAIBciByACRg0BIActAABBUGpB/wFxIhBBCk8NzAECQCAALwEyIhRBmTNLDQAgACAUQQpsIhQ7ATIgEEH//wNzIBRB/v8DcUkNACAHQQFqIRcgACAUIBBqIhA7ATIgEEH//wNxQegHSQ0BCwtBACEQIABBADYCHCAAQcGJgIAANgIQIABBDTYCDCAAIAdBAWo2AhQMnAILQccBIRAMmwILIAAgCCACEK6AgIAAIhBFDcoBIBBBFUcNjAEgAEHIATYCHCAAIAg2AhQgAEHJl4CAADYCECAAQRU2AgxBACEQDJoCCwJAIAkgAkcNAEHMASEQDJoCC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgCS0AAEFQag4KlgGVAQABAgMEBQYIlwELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMjgELQQkhEEEBIRRBACEXQQAhFgyNAQsCQCAKIAJHDQBBzgEhEAyZAgsgCi0AAEEuRw2OASAKQQFqIQkMygELIAsgAkcNjgFB0AEhEAyXAgsCQCALIAJGDQAgAEGOgICAADYCCCAAIAs2AgRBtwEhEAz+AQtB0QEhEAyWAgsCQCAEIAJHDQBB0gEhEAyWAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EEaiELA0AgBC0AACAQQfzPgIAAai0AAEcNjgEgEEEERg3pASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHSASEQDJUCCyAAIAwgAhCsgICAACIBDY0BIAwhAQy4AQsCQCAEIAJHDQBB1AEhEAyUAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EBaiEMA0AgBC0AACAQQYHQgIAAai0AAEcNjwEgEEEBRg2OASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHUASEQDJMCCwJAIAQgAkcNAEHWASEQDJMCCyACIARrIAAoAgAiEGohFCAEIBBrQQJqIQsDQCAELQAAIBBBg9CAgABqLQAARw2OASAQQQJGDZABIBBBAWohECAEQQFqIgQgAkcNAAsgACAUNgIAQdYBIRAMkgILAkAgBCACRw0AQdcBIRAMkgILAkACQCAELQAAQbt/ag4QAI8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwEBjwELIARBAWohBEG7ASEQDPkBCyAEQQFqIQRBvAEhEAz4AQsCQCAEIAJHDQBB2AEhEAyRAgsgBC0AAEHIAEcNjAEgBEEBaiEEDMQBCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEG+ASEQDPcBC0HZASEQDI8CCwJAIAQgAkcNAEHaASEQDI8CCyAELQAAQcgARg3DASAAQQE6ACgMuQELIABBAjoALyAAIAQgAhCmgICAACIQDY0BQcIBIRAM9AELIAAtAChBf2oOArcBuQG4AQsDQAJAIAQtAABBdmoOBACOAY4BAI4BCyAEQQFqIgQgAkcNAAtB3QEhEAyLAgsgAEEAOgAvIAAtAC1BBHFFDYQCCyAAQQA6AC8gAEEBOgA0IAEhAQyMAQsgEEEVRg3aASAAQQA2AhwgACABNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAyIAgsCQCAAIBAgAhC0gICAACIEDQAgECEBDIECCwJAIARBFUcNACAAQQM2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAyIAgsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMhwILIBBBFUYN1gEgAEEANgIcIAAgATYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMhgILIAAoAgQhFyAAQQA2AgQgECARp2oiFiEBIAAgFyAQIBYgFBsiEBC1gICAACIURQ2NASAAQQc2AhwgACAQNgIUIAAgFDYCDEEAIRAMhQILIAAgAC8BMEGAAXI7ATAgASEBC0EqIRAM6gELIBBBFUYN0QEgAEEANgIcIAAgATYCFCAAQYOMgIAANgIQIABBEzYCDEEAIRAMggILIBBBFUYNzwEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAMgQILIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDI0BCyAAQQw2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMgAILIBBBFUYNzAEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM/wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIwBCyAAQQ02AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/gELIBBBFUYNyQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM/QELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIsBCyAAQQ42AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/AELIABBADYCHCAAIAE2AhQgAEHAlYCAADYCECAAQQI2AgxBACEQDPsBCyAQQRVGDcUBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPoBCyAAQRA2AhwgACABNgIUIAAgEDYCDEEAIRAM+QELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDPEBCyAAQRE2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM+AELIBBBFUYNwQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM9wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIgBCyAAQRM2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM9gELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDO0BCyAAQRQ2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM9QELIBBBFUYNvQEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM9AELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIYBCyAAQRY2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM8wELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC3gICAACIEDQAgAUEBaiEBDOkBCyAAQRc2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM8gELIABBADYCHCAAIAE2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDPEBC0IBIRELIBBBAWohAQJAIAApAyAiEkL//////////w9WDQAgACASQgSGIBGENwMgIAEhAQyEAQsgAEEANgIcIAAgATYCFCAAQa2JgIAANgIQIABBDDYCDEEAIRAM7wELIABBADYCHCAAIBA2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDO4BCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNcyAAQQU2AhwgACAQNgIUIAAgFDYCDEEAIRAM7QELIABBADYCHCAAIBA2AhQgAEGqnICAADYCECAAQQ82AgxBACEQDOwBCyAAIBAgAhC0gICAACIBDQEgECEBC0EOIRAM0QELAkAgAUEVRw0AIABBAjYCHCAAIBA2AhQgAEGwmICAADYCECAAQRU2AgxBACEQDOoBCyAAQQA2AhwgACAQNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAzpAQsgAUEBaiEQAkAgAC8BMCIBQYABcUUNAAJAIAAgECACELuAgIAAIgENACAQIQEMcAsgAUEVRw26ASAAQQU2AhwgACAQNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAzpAQsCQCABQaAEcUGgBEcNACAALQAtQQJxDQAgAEEANgIcIAAgEDYCFCAAQZaTgIAANgIQIABBBDYCDEEAIRAM6QELIAAgECACEL2AgIAAGiAQIQECQAJAAkACQAJAIAAgECACELOAgIAADhYCAQAEBAQEBAQEBAQEBAQEBAQEBAQDBAsgAEEBOgAuCyAAIAAvATBBwAByOwEwIBAhAQtBJiEQDNEBCyAAQSM2AhwgACAQNgIUIABBpZaAgAA2AhAgAEEVNgIMQQAhEAzpAQsgAEEANgIcIAAgEDYCFCAAQdWLgIAANgIQIABBETYCDEEAIRAM6AELIAAtAC1BAXFFDQFBwwEhEAzOAQsCQCANIAJGDQADQAJAIA0tAABBIEYNACANIQEMxAELIA1BAWoiDSACRw0AC0ElIRAM5wELQSUhEAzmAQsgACgCBCEEIABBADYCBCAAIAQgDRCvgICAACIERQ2tASAAQSY2AhwgACAENgIMIAAgDUEBajYCFEEAIRAM5QELIBBBFUYNqwEgAEEANgIcIAAgATYCFCAAQf2NgIAANgIQIABBHTYCDEEAIRAM5AELIABBJzYCHCAAIAE2AhQgACAQNgIMQQAhEAzjAQsgECEBQQEhFAJAAkACQAJAAkACQAJAIAAtACxBfmoOBwYFBQMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0ErIRAMygELIABBADYCHCAAIBA2AhQgAEGrkoCAADYCECAAQQs2AgxBACEQDOIBCyAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMQQAhEAzhAQsgAEEAOgAsIBAhAQy9AQsgECEBQQEhFAJAAkACQAJAAkAgAC0ALEF7ag4EAwECAAULIAAgAC8BMEEIcjsBMAwDC0ECIRQMAQtBBCEUCyAAQQE6ACwgACAALwEwIBRyOwEwCyAQIQELQSkhEAzFAQsgAEEANgIcIAAgATYCFCAAQfCUgIAANgIQIABBAzYCDEEAIRAM3QELAkAgDi0AAEENRw0AIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHULIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzdAQsgAC0ALUEBcUUNAUHEASEQDMMBCwJAIA4gAkcNAEEtIRAM3AELAkACQANAAkAgDi0AAEF2ag4EAgAAAwALIA5BAWoiDiACRw0AC0EtIRAM3QELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDiEBDHQLIABBLDYCHCAAIA42AhQgACABNgIMQQAhEAzcAQsgACgCBCEBIABBADYCBAJAIAAgASAOELGAgIAAIgENACAOQQFqIQEMcwsgAEEsNgIcIAAgATYCDCAAIA5BAWo2AhRBACEQDNsBCyAAKAIEIQQgAEEANgIEIAAgBCAOELGAgIAAIgQNoAEgDiEBDM4BCyAQQSxHDQEgAUEBaiEQQQEhAQJAAkACQAJAAkAgAC0ALEF7ag4EAwECBAALIBAhAQwEC0ECIQEMAQtBBCEBCyAAQQE6ACwgACAALwEwIAFyOwEwIBAhAQwBCyAAIAAvATBBCHI7ATAgECEBC0E5IRAMvwELIABBADoALCABIQELQTQhEAy9AQsgACAALwEwQSByOwEwIAEhAQwCCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBA0AIAEhAQzHAQsgAEE3NgIcIAAgATYCFCAAIAQ2AgxBACEQDNQBCyAAQQg6ACwgASEBC0EwIRAMuQELAkAgAC0AKEEBRg0AIAEhAQwECyAALQAtQQhxRQ2TASABIQEMAwsgAC0AMEEgcQ2UAUHFASEQDLcBCwJAIA8gAkYNAAJAA0ACQCAPLQAAQVBqIgFB/wFxQQpJDQAgDyEBQTUhEAy6AQsgACkDICIRQpmz5syZs+bMGVYNASAAIBFCCn4iETcDICARIAGtQv8BgyISQn+FVg0BIAAgESASfDcDICAPQQFqIg8gAkcNAAtBOSEQDNEBCyAAKAIEIQIgAEEANgIEIAAgAiAPQQFqIgQQsYCAgAAiAg2VASAEIQEMwwELQTkhEAzPAQsCQCAALwEwIgFBCHFFDQAgAC0AKEEBRw0AIAAtAC1BCHFFDZABCyAAIAFB9/sDcUGABHI7ATAgDyEBC0E3IRAMtAELIAAgAC8BMEEQcjsBMAyrAQsgEEEVRg2LASAAQQA2AhwgACABNgIUIABB8I6AgAA2AhAgAEEcNgIMQQAhEAzLAQsgAEHDADYCHCAAIAE2AgwgACANQQFqNgIUQQAhEAzKAQsCQCABLQAAQTpHDQAgACgCBCEQIABBADYCBAJAIAAgECABEK+AgIAAIhANACABQQFqIQEMYwsgAEHDADYCHCAAIBA2AgwgACABQQFqNgIUQQAhEAzKAQsgAEEANgIcIAAgATYCFCAAQbGRgIAANgIQIABBCjYCDEEAIRAMyQELIABBADYCHCAAIAE2AhQgAEGgmYCAADYCECAAQR42AgxBACEQDMgBCyAAQQA2AgALIABBgBI7ASogACAXQQFqIgEgAhCogICAACIQDQEgASEBC0HHACEQDKwBCyAQQRVHDYMBIABB0QA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAzEAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAzDAQsgAEEANgIcIAAgFDYCFCAAQcGogIAANgIQIABBBzYCDCAAQQA2AgBBACEQDMIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxdCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDMEBC0EAIRAgAEEANgIcIAAgATYCFCAAQYCRgIAANgIQIABBCTYCDAzAAQsgEEEVRg19IABBADYCHCAAIAE2AhQgAEGUjYCAADYCECAAQSE2AgxBACEQDL8BC0EBIRZBACEXQQAhFEEBIRALIAAgEDoAKyABQQFqIQECQAJAIAAtAC1BEHENAAJAAkACQCAALQAqDgMBAAIECyAWRQ0DDAILIBQNAQwCCyAXRQ0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQrYCAgAAiEA0AIAEhAQxcCyAAQdgANgIcIAAgATYCFCAAIBA2AgxBACEQDL4BCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQytAQsgAEHZADYCHCAAIAE2AhQgACAENgIMQQAhEAy9AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMqwELIABB2gA2AhwgACABNgIUIAAgBDYCDEEAIRAMvAELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKkBCyAAQdwANgIcIAAgATYCFCAAIAQ2AgxBACEQDLsBCwJAIAEtAABBUGoiEEH/AXFBCk8NACAAIBA6ACogAUEBaiEBQc8AIRAMogELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKcBCyAAQd4ANgIcIAAgATYCFCAAIAQ2AgxBACEQDLoBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKUEjTw0AIAEhAQxZCyAAQQA2AhwgACABNgIUIABB04mAgAA2AhAgAEEINgIMQQAhEAy5AQsgAEEANgIAC0EAIRAgAEEANgIcIAAgATYCFCAAQZCzgIAANgIQIABBCDYCDAy3AQsgAEEANgIAIBdBAWohAQJAIAAtAClBIUcNACABIQEMVgsgAEEANgIcIAAgATYCFCAAQZuKgIAANgIQIABBCDYCDEEAIRAMtgELIABBADYCACAXQQFqIQECQCAALQApIhBBXWpBC08NACABIQEMVQsCQCAQQQZLDQBBASAQdEHKAHFFDQAgASEBDFULQQAhECAAQQA2AhwgACABNgIUIABB94mAgAA2AhAgAEEINgIMDLUBCyAQQRVGDXEgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMtAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFQLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMswELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMsgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMsQELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFELIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMsAELIABBADYCHCAAIAE2AhQgAEHGioCAADYCECAAQQc2AgxBACEQDK8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDK4BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDK0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDKwBCyAAQQA2AhwgACABNgIUIABB3IiAgAA2AhAgAEEHNgIMQQAhEAyrAQsgEEE/Rw0BIAFBAWohAQtBBSEQDJABC0EAIRAgAEEANgIcIAAgATYCFCAAQf2SgIAANgIQIABBBzYCDAyoAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAynAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAymAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMRgsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAylAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHSADYCHCAAIBQ2AhQgACABNgIMQQAhEAykAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHTADYCHCAAIBQ2AhQgACABNgIMQQAhEAyjAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMQwsgAEHlADYCHCAAIBQ2AhQgACABNgIMQQAhEAyiAQsgAEEANgIcIAAgFDYCFCAAQcOPgIAANgIQIABBBzYCDEEAIRAMoQELIABBADYCHCAAIAE2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKABC0EAIRAgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDAyfAQsgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDEEAIRAMngELIABBADYCHCAAIBQ2AhQgAEH+kYCAADYCECAAQQc2AgxBACEQDJ0BCyAAQQA2AhwgACABNgIUIABBjpuAgAA2AhAgAEEGNgIMQQAhEAycAQsgEEEVRg1XIABBADYCHCAAIAE2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDJsBCyAAQQA2AgAgEEEBaiEBQSQhEAsgACAQOgApIAAoAgQhECAAQQA2AgQgACAQIAEQq4CAgAAiEA1UIAEhAQw+CyAAQQA2AgALQQAhECAAQQA2AhwgACAENgIUIABB8ZuAgAA2AhAgAEEGNgIMDJcBCyABQRVGDVAgAEEANgIcIAAgBTYCFCAAQfCMgIAANgIQIABBGzYCDEEAIRAMlgELIAAoAgQhBSAAQQA2AgQgACAFIBAQqYCAgAAiBQ0BIBBBAWohBQtBrQEhEAx7CyAAQcEBNgIcIAAgBTYCDCAAIBBBAWo2AhRBACEQDJMBCyAAKAIEIQYgAEEANgIEIAAgBiAQEKmAgIAAIgYNASAQQQFqIQYLQa4BIRAMeAsgAEHCATYCHCAAIAY2AgwgACAQQQFqNgIUQQAhEAyQAQsgAEEANgIcIAAgBzYCFCAAQZeLgIAANgIQIABBDTYCDEEAIRAMjwELIABBADYCHCAAIAg2AhQgAEHjkICAADYCECAAQQk2AgxBACEQDI4BCyAAQQA2AhwgACAINgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAyNAQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgCUEBaiEIAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBCAAIBAgCBCtgICAACIQRQ09IABByQE2AhwgACAINgIUIAAgEDYCDEEAIRAMjAELIAAoAgQhBCAAQQA2AgQgACAEIAgQrYCAgAAiBEUNdiAAQcoBNgIcIAAgCDYCFCAAIAQ2AgxBACEQDIsBCyAAKAIEIQQgAEEANgIEIAAgBCAJEK2AgIAAIgRFDXQgAEHLATYCHCAAIAk2AhQgACAENgIMQQAhEAyKAQsgACgCBCEEIABBADYCBCAAIAQgChCtgICAACIERQ1yIABBzQE2AhwgACAKNgIUIAAgBDYCDEEAIRAMiQELAkAgCy0AAEFQaiIQQf8BcUEKTw0AIAAgEDoAKiALQQFqIQpBtgEhEAxwCyAAKAIEIQQgAEEANgIEIAAgBCALEK2AgIAAIgRFDXAgAEHPATYCHCAAIAs2AhQgACAENgIMQQAhEAyIAQsgAEEANgIcIAAgBDYCFCAAQZCzgIAANgIQIABBCDYCDCAAQQA2AgBBACEQDIcBCyABQRVGDT8gAEEANgIcIAAgDDYCFCAAQcyOgIAANgIQIABBIDYCDEEAIRAMhgELIABBgQQ7ASggACgCBCEQIABCADcDACAAIBAgDEEBaiIMEKuAgIAAIhBFDTggAEHTATYCHCAAIAw2AhQgACAQNgIMQQAhEAyFAQsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQdibgIAANgIQIABBCDYCDAyDAQsgACgCBCEQIABCADcDACAAIBAgC0EBaiILEKuAgIAAIhANAUHGASEQDGkLIABBAjoAKAxVCyAAQdUBNgIcIAAgCzYCFCAAIBA2AgxBACEQDIABCyAQQRVGDTcgAEEANgIcIAAgBDYCFCAAQaSMgIAANgIQIABBEDYCDEEAIRAMfwsgAC0ANEEBRw00IAAgBCACELyAgIAAIhBFDTQgEEEVRw01IABB3AE2AhwgACAENgIUIABB1ZaAgAA2AhAgAEEVNgIMQQAhEAx+C0EAIRAgAEEANgIcIABBr4uAgAA2AhAgAEECNgIMIAAgFEEBajYCFAx9C0EAIRAMYwtBAiEQDGILQQ0hEAxhC0EPIRAMYAtBJSEQDF8LQRMhEAxeC0EVIRAMXQtBFiEQDFwLQRchEAxbC0EYIRAMWgtBGSEQDFkLQRohEAxYC0EbIRAMVwtBHCEQDFYLQR0hEAxVC0EfIRAMVAtBISEQDFMLQSMhEAxSC0HGACEQDFELQS4hEAxQC0EvIRAMTwtBOyEQDE4LQT0hEAxNC0HIACEQDEwLQckAIRAMSwtBywAhEAxKC0HMACEQDEkLQc4AIRAMSAtB0QAhEAxHC0HVACEQDEYLQdgAIRAMRQtB2QAhEAxEC0HbACEQDEMLQeQAIRAMQgtB5QAhEAxBC0HxACEQDEALQfQAIRAMPwtBjQEhEAw+C0GXASEQDD0LQakBIRAMPAtBrAEhEAw7C0HAASEQDDoLQbkBIRAMOQtBrwEhEAw4C0GxASEQDDcLQbIBIRAMNgtBtAEhEAw1C0G1ASEQDDQLQboBIRAMMwtBvQEhEAwyC0G/ASEQDDELQcEBIRAMMAsgAEEANgIcIAAgBDYCFCAAQemLgIAANgIQIABBHzYCDEEAIRAMSAsgAEHbATYCHCAAIAQ2AhQgAEH6loCAADYCECAAQRU2AgxBACEQDEcLIABB+AA2AhwgACAMNgIUIABBypiAgAA2AhAgAEEVNgIMQQAhEAxGCyAAQdEANgIcIAAgBTYCFCAAQbCXgIAANgIQIABBFTYCDEEAIRAMRQsgAEH5ADYCHCAAIAE2AhQgACAQNgIMQQAhEAxECyAAQfgANgIcIAAgATYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMQwsgAEHkADYCHCAAIAE2AhQgAEHjl4CAADYCECAAQRU2AgxBACEQDEILIABB1wA2AhwgACABNgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAxBCyAAQQA2AhwgACABNgIUIABBuY2AgAA2AhAgAEEaNgIMQQAhEAxACyAAQcIANgIcIAAgATYCFCAAQeOYgIAANgIQIABBFTYCDEEAIRAMPwsgAEEANgIEIAAgDyAPELGAgIAAIgRFDQEgAEE6NgIcIAAgBDYCDCAAIA9BAWo2AhRBACEQDD4LIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCxgICAACIERQ0AIABBOzYCHCAAIAQ2AgwgACABQQFqNgIUQQAhEAw+CyABQQFqIQEMLQsgD0EBaiEBDC0LIABBADYCHCAAIA82AhQgAEHkkoCAADYCECAAQQQ2AgxBACEQDDsLIABBNjYCHCAAIAQ2AhQgACACNgIMQQAhEAw6CyAAQS42AhwgACAONgIUIAAgBDYCDEEAIRAMOQsgAEHQADYCHCAAIAE2AhQgAEGRmICAADYCECAAQRU2AgxBACEQDDgLIA1BAWohAQwsCyAAQRU2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAw2CyAAQRs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw1CyAAQQ82AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw0CyAAQQs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAwzCyAAQRo2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwyCyAAQQs2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwxCyAAQQo2AhwgACABNgIUIABB5JaAgAA2AhAgAEEVNgIMQQAhEAwwCyAAQR42AhwgACABNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAwvCyAAQQA2AhwgACAQNgIUIABB2o2AgAA2AhAgAEEUNgIMQQAhEAwuCyAAQQQ2AhwgACABNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAwtCyAAQQA2AgAgC0EBaiELC0G4ASEQDBILIABBADYCACAQQQFqIQFB9QAhEAwRCyABIQECQCAALQApQQVHDQBB4wAhEAwRC0HiACEQDBALQQAhECAAQQA2AhwgAEHkkYCAADYCECAAQQc2AgwgACAUQQFqNgIUDCgLIABBADYCACAXQQFqIQFBwAAhEAwOC0EBIQELIAAgAToALCAAQQA2AgAgF0EBaiEBC0EoIRAMCwsgASEBC0E4IRAMCQsCQCABIg8gAkYNAANAAkAgDy0AAEGAvoCAAGotAAAiAUEBRg0AIAFBAkcNAyAPQQFqIQEMBAsgD0EBaiIPIAJHDQALQT4hEAwiC0E+IRAMIQsgAEEAOgAsIA8hAQwBC0ELIRAMBgtBOiEQDAULIAFBAWohAUEtIRAMBAsgACABOgAsIABBADYCACAWQQFqIQFBDCEQDAMLIABBADYCACAXQQFqIQFBCiEQDAILIABBADYCAAsgAEEAOgAsIA0hAUEJIRAMAAsLQQAhECAAQQA2AhwgACALNgIUIABBzZCAgAA2AhAgAEEJNgIMDBcLQQAhECAAQQA2AhwgACAKNgIUIABB6YqAgAA2AhAgAEEJNgIMDBYLQQAhECAAQQA2AhwgACAJNgIUIABBt5CAgAA2AhAgAEEJNgIMDBULQQAhECAAQQA2AhwgACAINgIUIABBnJGAgAA2AhAgAEEJNgIMDBQLQQAhECAAQQA2AhwgACABNgIUIABBzZCAgAA2AhAgAEEJNgIMDBMLQQAhECAAQQA2AhwgACABNgIUIABB6YqAgAA2AhAgAEEJNgIMDBILQQAhECAAQQA2AhwgACABNgIUIABBt5CAgAA2AhAgAEEJNgIMDBELQQAhECAAQQA2AhwgACABNgIUIABBnJGAgAA2AhAgAEEJNgIMDBALQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA8LQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA4LQQAhECAAQQA2AhwgACABNgIUIABBwJKAgAA2AhAgAEELNgIMDA0LQQAhECAAQQA2AhwgACABNgIUIABBlYmAgAA2AhAgAEELNgIMDAwLQQAhECAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMDAsLQQAhECAAQQA2AhwgACABNgIUIABB+4+AgAA2AhAgAEEKNgIMDAoLQQAhECAAQQA2AhwgACABNgIUIABB8ZmAgAA2AhAgAEECNgIMDAkLQQAhECAAQQA2AhwgACABNgIUIABBxJSAgAA2AhAgAEECNgIMDAgLQQAhECAAQQA2AhwgACABNgIUIABB8pWAgAA2AhAgAEECNgIMDAcLIABBAjYCHCAAIAE2AhQgAEGcmoCAADYCECAAQRY2AgxBACEQDAYLQQEhEAwFC0HUACEQIAEiBCACRg0EIANBCGogACAEIAJB2MKAgABBChDFgICAACADKAIMIQQgAygCCA4DAQQCAAsQyoCAgAAACyAAQQA2AhwgAEG1moCAADYCECAAQRc2AgwgACAEQQFqNgIUQQAhEAwCCyAAQQA2AhwgACAENgIUIABBypqAgAA2AhAgAEEJNgIMQQAhEAwBCwJAIAEiBCACRw0AQSIhEAwBCyAAQYmAgIAANgIIIAAgBDYCBEEhIRALIANBEGokgICAgAAgEAuvAQECfyABKAIAIQYCQAJAIAIgA0YNACAEIAZqIQQgBiADaiACayEHIAIgBkF/cyAFaiIGaiEFA0ACQCACLQAAIAQtAABGDQBBAiEEDAMLAkAgBg0AQQAhBCAFIQIMAwsgBkF/aiEGIARBAWohBCACQQFqIgIgA0cNAAsgByEGIAMhAgsgAEEBNgIAIAEgBjYCACAAIAI2AgQPCyABQQA2AgAgACAENgIAIAAgAjYCBAsKACAAEMeAgIAAC/I2AQt/I4CAgIAAQRBrIgEkgICAgAACQEEAKAKg0ICAAA0AQQAQy4CAgABBgNSEgABrIgJB2QBJDQBBACEDAkBBACgC4NOAgAAiBA0AQQBCfzcC7NOAgABBAEKAgISAgIDAADcC5NOAgABBACABQQhqQXBxQdiq1aoFcyIENgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgAALQQAgAjYCzNOAgABBAEGA1ISAADYCyNOAgABBAEGA1ISAADYCmNCAgABBACAENgKs0ICAAEEAQX82AqjQgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAtBgNSEgABBeEGA1ISAAGtBD3FBAEGA1ISAAEEIakEPcRsiA2oiBEEEaiACQUhqIgUgA2siA0EBcjYCAEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgABBgNSEgAAgBWpBODYCBAsCQAJAAkACQAJAAkACQAJAAkACQAJAAkAgAEHsAUsNAAJAQQAoAojQgIAAIgZBECAAQRNqQXBxIABBC0kbIgJBA3YiBHYiA0EDcUUNAAJAAkAgA0EBcSAEckEBcyIFQQN0IgRBsNCAgABqIgMgBEG40ICAAGooAgAiBCgCCCICRw0AQQAgBkF+IAV3cTYCiNCAgAAMAQsgAyACNgIIIAIgAzYCDAsgBEEIaiEDIAQgBUEDdCIFQQNyNgIEIAQgBWoiBCAEKAIEQQFyNgIEDAwLIAJBACgCkNCAgAAiB00NAQJAIANFDQACQAJAIAMgBHRBAiAEdCIDQQAgA2tycSIDQQAgA2txQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmoiBEEDdCIDQbDQgIAAaiIFIANBuNCAgABqKAIAIgMoAggiAEcNAEEAIAZBfiAEd3EiBjYCiNCAgAAMAQsgBSAANgIIIAAgBTYCDAsgAyACQQNyNgIEIAMgBEEDdCIEaiAEIAJrIgU2AgAgAyACaiIAIAVBAXI2AgQCQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhBAJAAkAgBkEBIAdBA3Z0IghxDQBBACAGIAhyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAQ2AgwgAiAENgIIIAQgAjYCDCAEIAg2AggLIANBCGohA0EAIAA2ApzQgIAAQQAgBTYCkNCAgAAMDAtBACgCjNCAgAAiCUUNASAJQQAgCWtxQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmpBAnRBuNKAgABqKAIAIgAoAgRBeHEgAmshBCAAIQUCQANAAkAgBSgCECIDDQAgBUEUaigCACIDRQ0CCyADKAIEQXhxIAJrIgUgBCAFIARJIgUbIQQgAyAAIAUbIQAgAyEFDAALCyAAKAIYIQoCQCAAKAIMIgggAEYNACAAKAIIIgNBACgCmNCAgABJGiAIIAM2AgggAyAINgIMDAsLAkAgAEEUaiIFKAIAIgMNACAAKAIQIgNFDQMgAEEQaiEFCwNAIAUhCyADIghBFGoiBSgCACIDDQAgCEEQaiEFIAgoAhAiAw0ACyALQQA2AgAMCgtBfyECIABBv39LDQAgAEETaiIDQXBxIQJBACgCjNCAgAAiB0UNAEEAIQsCQCACQYACSQ0AQR8hCyACQf///wdLDQAgA0EIdiIDIANBgP4/akEQdkEIcSIDdCIEIARBgOAfakEQdkEEcSIEdCIFIAVBgIAPakEQdkECcSIFdEEPdiADIARyIAVyayIDQQF0IAIgA0EVanZBAXFyQRxqIQsLQQAgAmshBAJAAkACQAJAIAtBAnRBuNKAgABqKAIAIgUNAEEAIQNBACEIDAELQQAhAyACQQBBGSALQQF2ayALQR9GG3QhAEEAIQgDQAJAIAUoAgRBeHEgAmsiBiAETw0AIAYhBCAFIQggBg0AQQAhBCAFIQggBSEDDAMLIAMgBUEUaigCACIGIAYgBSAAQR12QQRxakEQaigCACIFRhsgAyAGGyEDIABBAXQhACAFDQALCwJAIAMgCHINAEEAIQhBAiALdCIDQQAgA2tyIAdxIgNFDQMgA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBUEFdkEIcSIAIANyIAUgAHYiA0ECdkEEcSIFciADIAV2IgNBAXZBAnEiBXIgAyAFdiIDQQF2QQFxIgVyIAMgBXZqQQJ0QbjSgIAAaigCACEDCyADRQ0BCwNAIAMoAgRBeHEgAmsiBiAESSEAAkAgAygCECIFDQAgA0EUaigCACEFCyAGIAQgABshBCADIAggABshCCAFIQMgBQ0ACwsgCEUNACAEQQAoApDQgIAAIAJrTw0AIAgoAhghCwJAIAgoAgwiACAIRg0AIAgoAggiA0EAKAKY0ICAAEkaIAAgAzYCCCADIAA2AgwMCQsCQCAIQRRqIgUoAgAiAw0AIAgoAhAiA0UNAyAIQRBqIQULA0AgBSEGIAMiAEEUaiIFKAIAIgMNACAAQRBqIQUgACgCECIDDQALIAZBADYCAAwICwJAQQAoApDQgIAAIgMgAkkNAEEAKAKc0ICAACEEAkACQCADIAJrIgVBEEkNACAEIAJqIgAgBUEBcjYCBEEAIAU2ApDQgIAAQQAgADYCnNCAgAAgBCADaiAFNgIAIAQgAkEDcjYCBAwBCyAEIANBA3I2AgQgBCADaiIDIAMoAgRBAXI2AgRBAEEANgKc0ICAAEEAQQA2ApDQgIAACyAEQQhqIQMMCgsCQEEAKAKU0ICAACIAIAJNDQBBACgCoNCAgAAiAyACaiIEIAAgAmsiBUEBcjYCBEEAIAU2ApTQgIAAQQAgBDYCoNCAgAAgAyACQQNyNgIEIANBCGohAwwKCwJAAkBBACgC4NOAgABFDQBBACgC6NOAgAAhBAwBC0EAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEMakFwcUHYqtWqBXM2AuDTgIAAQQBBADYC9NOAgABBAEEANgLE04CAAEGAgAQhBAtBACEDAkAgBCACQccAaiIHaiIGQQAgBGsiC3EiCCACSw0AQQBBMDYC+NOAgAAMCgsCQEEAKALA04CAACIDRQ0AAkBBACgCuNOAgAAiBCAIaiIFIARNDQAgBSADTQ0BC0EAIQNBAEEwNgL404CAAAwKC0EALQDE04CAAEEEcQ0EAkACQAJAQQAoAqDQgIAAIgRFDQBByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiAESw0DCyADKAIIIgMNAAsLQQAQy4CAgAAiAEF/Rg0FIAghBgJAQQAoAuTTgIAAIgNBf2oiBCAAcUUNACAIIABrIAQgAGpBACADa3FqIQYLIAYgAk0NBSAGQf7///8HSw0FAkBBACgCwNOAgAAiA0UNAEEAKAK404CAACIEIAZqIgUgBE0NBiAFIANLDQYLIAYQy4CAgAAiAyAARw0BDAcLIAYgAGsgC3EiBkH+////B0sNBCAGEMuAgIAAIgAgAygCACADKAIEakYNAyAAIQMLAkAgA0F/Rg0AIAJByABqIAZNDQACQCAHIAZrQQAoAujTgIAAIgRqQQAgBGtxIgRB/v///wdNDQAgAyEADAcLAkAgBBDLgICAAEF/Rg0AIAQgBmohBiADIQAMBwtBACAGaxDLgICAABoMBAsgAyEAIANBf0cNBQwDC0EAIQgMBwtBACEADAULIABBf0cNAgtBAEEAKALE04CAAEEEcjYCxNOAgAALIAhB/v///wdLDQEgCBDLgICAACEAQQAQy4CAgAAhAyAAQX9GDQEgA0F/Rg0BIAAgA08NASADIABrIgYgAkE4ak0NAQtBAEEAKAK404CAACAGaiIDNgK404CAAAJAIANBACgCvNOAgABNDQBBACADNgK804CAAAsCQAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQCAAIAMoAgAiBSADKAIEIghqRg0CIAMoAggiAw0ADAMLCwJAAkBBACgCmNCAgAAiA0UNACAAIANPDQELQQAgADYCmNCAgAALQQAhA0EAIAY2AszTgIAAQQAgADYCyNOAgABBAEF/NgKo0ICAAEEAQQAoAuDTgIAANgKs0ICAAEEAQQA2AtTTgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiBCAGQUhqIgUgA2siA0EBcjYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgAAgACAFakE4NgIEDAILIAMtAAxBCHENACAEIAVJDQAgBCAATw0AIARBeCAEa0EPcUEAIARBCGpBD3EbIgVqIgBBACgClNCAgAAgBmoiCyAFayIFQQFyNgIEIAMgCCAGajYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAU2ApTQgIAAQQAgADYCoNCAgAAgBCALakE4NgIEDAELAkAgAEEAKAKY0ICAACIITw0AQQAgADYCmNCAgAAgACEICyAAIAZqIQVByNOAgAAhAwJAAkACQAJAAkACQAJAA0AgAygCACAFRg0BIAMoAggiAw0ADAILCyADLQAMQQhxRQ0BC0HI04CAACEDA0ACQCADKAIAIgUgBEsNACAFIAMoAgRqIgUgBEsNAwsgAygCCCEDDAALCyADIAA2AgAgAyADKAIEIAZqNgIEIABBeCAAa0EPcUEAIABBCGpBD3EbaiILIAJBA3I2AgQgBUF4IAVrQQ9xQQAgBUEIakEPcRtqIgYgCyACaiICayEDAkAgBiAERw0AQQAgAjYCoNCAgABBAEEAKAKU0ICAACADaiIDNgKU0ICAACACIANBAXI2AgQMAwsCQCAGQQAoApzQgIAARw0AQQAgAjYCnNCAgABBAEEAKAKQ0ICAACADaiIDNgKQ0ICAACACIANBAXI2AgQgAiADaiADNgIADAMLAkAgBigCBCIEQQNxQQFHDQAgBEF4cSEHAkACQCAEQf8BSw0AIAYoAggiBSAEQQN2IghBA3RBsNCAgABqIgBGGgJAIAYoAgwiBCAFRw0AQQBBACgCiNCAgABBfiAId3E2AojQgIAADAILIAQgAEYaIAQgBTYCCCAFIAQ2AgwMAQsgBigCGCEJAkACQCAGKAIMIgAgBkYNACAGKAIIIgQgCEkaIAAgBDYCCCAEIAA2AgwMAQsCQCAGQRRqIgQoAgAiBQ0AIAZBEGoiBCgCACIFDQBBACEADAELA0AgBCEIIAUiAEEUaiIEKAIAIgUNACAAQRBqIQQgACgCECIFDQALIAhBADYCAAsgCUUNAAJAAkAgBiAGKAIcIgVBAnRBuNKAgABqIgQoAgBHDQAgBCAANgIAIAANAUEAQQAoAozQgIAAQX4gBXdxNgKM0ICAAAwCCyAJQRBBFCAJKAIQIAZGG2ogADYCACAARQ0BCyAAIAk2AhgCQCAGKAIQIgRFDQAgACAENgIQIAQgADYCGAsgBigCFCIERQ0AIABBFGogBDYCACAEIAA2AhgLIAcgA2ohAyAGIAdqIgYoAgQhBAsgBiAEQX5xNgIEIAIgA2ogAzYCACACIANBAXI2AgQCQCADQf8BSw0AIANBeHFBsNCAgABqIQQCQAJAQQAoAojQgIAAIgVBASADQQN2dCIDcQ0AQQAgBSADcjYCiNCAgAAgBCEDDAELIAQoAgghAwsgAyACNgIMIAQgAjYCCCACIAQ2AgwgAiADNgIIDAMLQR8hBAJAIANB////B0sNACADQQh2IgQgBEGA/j9qQRB2QQhxIgR0IgUgBUGA4B9qQRB2QQRxIgV0IgAgAEGAgA9qQRB2QQJxIgB0QQ92IAQgBXIgAHJrIgRBAXQgAyAEQRVqdkEBcXJBHGohBAsgAiAENgIcIAJCADcCECAEQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiAEEBIAR0IghxDQAgBSACNgIAQQAgACAIcjYCjNCAgAAgAiAFNgIYIAIgAjYCCCACIAI2AgwMAwsgA0EAQRkgBEEBdmsgBEEfRht0IQQgBSgCACEAA0AgACIFKAIEQXhxIANGDQIgBEEddiEAIARBAXQhBCAFIABBBHFqQRBqIggoAgAiAA0ACyAIIAI2AgAgAiAFNgIYIAIgAjYCDCACIAI2AggMAgsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiCyAGQUhqIgggA2siA0EBcjYCBCAAIAhqQTg2AgQgBCAFQTcgBWtBD3FBACAFQUlqQQ9xG2pBQWoiCCAIIARBEGpJGyIIQSM2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAs2AqDQgIAAIAhBEGpBACkC0NOAgAA3AgAgCEEAKQLI04CAADcCCEEAIAhBCGo2AtDTgIAAQQAgBjYCzNOAgABBACAANgLI04CAAEEAQQA2AtTTgIAAIAhBJGohAwNAIANBBzYCACADQQRqIgMgBUkNAAsgCCAERg0DIAggCCgCBEF+cTYCBCAIIAggBGsiADYCACAEIABBAXI2AgQCQCAAQf8BSw0AIABBeHFBsNCAgABqIQMCQAJAQQAoAojQgIAAIgVBASAAQQN2dCIAcQ0AQQAgBSAAcjYCiNCAgAAgAyEFDAELIAMoAgghBQsgBSAENgIMIAMgBDYCCCAEIAM2AgwgBCAFNgIIDAQLQR8hAwJAIABB////B0sNACAAQQh2IgMgA0GA/j9qQRB2QQhxIgN0IgUgBUGA4B9qQRB2QQRxIgV0IgggCEGAgA9qQRB2QQJxIgh0QQ92IAMgBXIgCHJrIgNBAXQgACADQRVqdkEBcXJBHGohAwsgBCADNgIcIARCADcCECADQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiCEEBIAN0IgZxDQAgBSAENgIAQQAgCCAGcjYCjNCAgAAgBCAFNgIYIAQgBDYCCCAEIAQ2AgwMBAsgAEEAQRkgA0EBdmsgA0EfRht0IQMgBSgCACEIA0AgCCIFKAIEQXhxIABGDQMgA0EddiEIIANBAXQhAyAFIAhBBHFqQRBqIgYoAgAiCA0ACyAGIAQ2AgAgBCAFNgIYIAQgBDYCDCAEIAQ2AggMAwsgBSgCCCIDIAI2AgwgBSACNgIIIAJBADYCGCACIAU2AgwgAiADNgIICyALQQhqIQMMBQsgBSgCCCIDIAQ2AgwgBSAENgIIIARBADYCGCAEIAU2AgwgBCADNgIIC0EAKAKU0ICAACIDIAJNDQBBACgCoNCAgAAiBCACaiIFIAMgAmsiA0EBcjYCBEEAIAM2ApTQgIAAQQAgBTYCoNCAgAAgBCACQQNyNgIEIARBCGohAwwDC0EAIQNBAEEwNgL404CAAAwCCwJAIAtFDQACQAJAIAggCCgCHCIFQQJ0QbjSgIAAaiIDKAIARw0AIAMgADYCACAADQFBACAHQX4gBXdxIgc2AozQgIAADAILIAtBEEEUIAsoAhAgCEYbaiAANgIAIABFDQELIAAgCzYCGAJAIAgoAhAiA0UNACAAIAM2AhAgAyAANgIYCyAIQRRqKAIAIgNFDQAgAEEUaiADNgIAIAMgADYCGAsCQAJAIARBD0sNACAIIAQgAmoiA0EDcjYCBCAIIANqIgMgAygCBEEBcjYCBAwBCyAIIAJqIgAgBEEBcjYCBCAIIAJBA3I2AgQgACAEaiAENgIAAkAgBEH/AUsNACAEQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgBEEDdnQiBHENAEEAIAUgBHI2AojQgIAAIAMhBAwBCyADKAIIIQQLIAQgADYCDCADIAA2AgggACADNgIMIAAgBDYCCAwBC0EfIQMCQCAEQf///wdLDQAgBEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCICIAJBgIAPakEQdkECcSICdEEPdiADIAVyIAJyayIDQQF0IAQgA0EVanZBAXFyQRxqIQMLIAAgAzYCHCAAQgA3AhAgA0ECdEG40oCAAGohBQJAIAdBASADdCICcQ0AIAUgADYCAEEAIAcgAnI2AozQgIAAIAAgBTYCGCAAIAA2AgggACAANgIMDAELIARBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhAgJAA0AgAiIFKAIEQXhxIARGDQEgA0EddiECIANBAXQhAyAFIAJBBHFqQRBqIgYoAgAiAg0ACyAGIAA2AgAgACAFNgIYIAAgADYCDCAAIAA2AggMAQsgBSgCCCIDIAA2AgwgBSAANgIIIABBADYCGCAAIAU2AgwgACADNgIICyAIQQhqIQMMAQsCQCAKRQ0AAkACQCAAIAAoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAg2AgAgCA0BQQAgCUF+IAV3cTYCjNCAgAAMAgsgCkEQQRQgCigCECAARhtqIAg2AgAgCEUNAQsgCCAKNgIYAkAgACgCECIDRQ0AIAggAzYCECADIAg2AhgLIABBFGooAgAiA0UNACAIQRRqIAM2AgAgAyAINgIYCwJAAkAgBEEPSw0AIAAgBCACaiIDQQNyNgIEIAAgA2oiAyADKAIEQQFyNgIEDAELIAAgAmoiBSAEQQFyNgIEIAAgAkEDcjYCBCAFIARqIAQ2AgACQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhAwJAAkBBASAHQQN2dCIIIAZxDQBBACAIIAZyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAM2AgwgAiADNgIIIAMgAjYCDCADIAg2AggLQQAgBTYCnNCAgABBACAENgKQ0ICAAAsgAEEIaiEDCyABQRBqJICAgIAAIAMLCgAgABDJgICAAAviDQEHfwJAIABFDQAgAEF4aiIBIABBfGooAgAiAkF4cSIAaiEDAkAgAkEBcQ0AIAJBA3FFDQEgASABKAIAIgJrIgFBACgCmNCAgAAiBEkNASACIABqIQACQCABQQAoApzQgIAARg0AAkAgAkH/AUsNACABKAIIIgQgAkEDdiIFQQN0QbDQgIAAaiIGRhoCQCABKAIMIgIgBEcNAEEAQQAoAojQgIAAQX4gBXdxNgKI0ICAAAwDCyACIAZGGiACIAQ2AgggBCACNgIMDAILIAEoAhghBwJAAkAgASgCDCIGIAFGDQAgASgCCCICIARJGiAGIAI2AgggAiAGNgIMDAELAkAgAUEUaiICKAIAIgQNACABQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQECQAJAIAEgASgCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAwsgB0EQQRQgBygCECABRhtqIAY2AgAgBkUNAgsgBiAHNgIYAkAgASgCECICRQ0AIAYgAjYCECACIAY2AhgLIAEoAhQiAkUNASAGQRRqIAI2AgAgAiAGNgIYDAELIAMoAgQiAkEDcUEDRw0AIAMgAkF+cTYCBEEAIAA2ApDQgIAAIAEgAGogADYCACABIABBAXI2AgQPCyABIANPDQAgAygCBCICQQFxRQ0AAkACQCACQQJxDQACQCADQQAoAqDQgIAARw0AQQAgATYCoNCAgABBAEEAKAKU0ICAACAAaiIANgKU0ICAACABIABBAXI2AgQgAUEAKAKc0ICAAEcNA0EAQQA2ApDQgIAAQQBBADYCnNCAgAAPCwJAIANBACgCnNCAgABHDQBBACABNgKc0ICAAEEAQQAoApDQgIAAIABqIgA2ApDQgIAAIAEgAEEBcjYCBCABIABqIAA2AgAPCyACQXhxIABqIQACQAJAIAJB/wFLDQAgAygCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgAygCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAgsgAiAGRhogAiAENgIIIAQgAjYCDAwBCyADKAIYIQcCQAJAIAMoAgwiBiADRg0AIAMoAggiAkEAKAKY0ICAAEkaIAYgAjYCCCACIAY2AgwMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEGDAELA0AgAiEFIAQiBkEUaiICKAIAIgQNACAGQRBqIQIgBigCECIEDQALIAVBADYCAAsgB0UNAAJAAkAgAyADKAIcIgRBAnRBuNKAgABqIgIoAgBHDQAgAiAGNgIAIAYNAUEAQQAoAozQgIAAQX4gBHdxNgKM0ICAAAwCCyAHQRBBFCAHKAIQIANGG2ogBjYCACAGRQ0BCyAGIAc2AhgCQCADKAIQIgJFDQAgBiACNgIQIAIgBjYCGAsgAygCFCICRQ0AIAZBFGogAjYCACACIAY2AhgLIAEgAGogADYCACABIABBAXI2AgQgAUEAKAKc0ICAAEcNAUEAIAA2ApDQgIAADwsgAyACQX5xNgIEIAEgAGogADYCACABIABBAXI2AgQLAkAgAEH/AUsNACAAQXhxQbDQgIAAaiECAkACQEEAKAKI0ICAACIEQQEgAEEDdnQiAHENAEEAIAQgAHI2AojQgIAAIAIhAAwBCyACKAIIIQALIAAgATYCDCACIAE2AgggASACNgIMIAEgADYCCA8LQR8hAgJAIABB////B0sNACAAQQh2IgIgAkGA/j9qQRB2QQhxIgJ0IgQgBEGA4B9qQRB2QQRxIgR0IgYgBkGAgA9qQRB2QQJxIgZ0QQ92IAIgBHIgBnJrIgJBAXQgACACQRVqdkEBcXJBHGohAgsgASACNgIcIAFCADcCECACQQJ0QbjSgIAAaiEEAkACQEEAKAKM0ICAACIGQQEgAnQiA3ENACAEIAE2AgBBACAGIANyNgKM0ICAACABIAQ2AhggASABNgIIIAEgATYCDAwBCyAAQQBBGSACQQF2ayACQR9GG3QhAiAEKAIAIQYCQANAIAYiBCgCBEF4cSAARg0BIAJBHXYhBiACQQF0IQIgBCAGQQRxakEQaiIDKAIAIgYNAAsgAyABNgIAIAEgBDYCGCABIAE2AgwgASABNgIIDAELIAQoAggiACABNgIMIAQgATYCCCABQQA2AhggASAENgIMIAEgADYCCAtBAEEAKAKo0ICAAEF/aiIBQX8gARs2AqjQgIAACwsEAAAAC04AAkAgAA0APwBBEHQPCwJAIABB//8DcQ0AIABBf0wNAAJAIABBEHZAACIAQX9HDQBBAEEwNgL404CAAEF/DwsgAEEQdA8LEMqAgIAAAAvyAgIDfwF+AkAgAkUNACAAIAE6AAAgAiAAaiIDQX9qIAE6AAAgAkEDSQ0AIAAgAToAAiAAIAE6AAEgA0F9aiABOgAAIANBfmogAToAACACQQdJDQAgACABOgADIANBfGogAToAACACQQlJDQAgAEEAIABrQQNxIgRqIgMgAUH/AXFBgYKECGwiATYCACADIAIgBGtBfHEiBGoiAkF8aiABNgIAIARBCUkNACADIAE2AgggAyABNgIEIAJBeGogATYCACACQXRqIAE2AgAgBEEZSQ0AIAMgATYCGCADIAE2AhQgAyABNgIQIAMgATYCDCACQXBqIAE2AgAgAkFsaiABNgIAIAJBaGogATYCACACQWRqIAE2AgAgBCADQQRxQRhyIgVrIgJBIEkNACABrUKBgICAEH4hBiADIAVqIQEDQCABIAY3AxggASAGNwMQIAEgBjcDCCABIAY3AwAgAUEgaiEBIAJBYGoiAkEfSw0ACwsgAAsLjkgBAEGACAuGSAEAAAACAAAAAwAAAAAAAAAAAAAABAAAAAUAAAAAAAAAAAAAAAYAAAAHAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASW52YWxpZCBjaGFyIGluIHVybCBxdWVyeQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2JvZHkAQ29udGVudC1MZW5ndGggb3ZlcmZsb3cAQ2h1bmsgc2l6ZSBvdmVyZmxvdwBSZXNwb25zZSBvdmVyZmxvdwBJbnZhbGlkIG1ldGhvZCBmb3IgSFRUUC94LnggcmVxdWVzdABJbnZhbGlkIG1ldGhvZCBmb3IgUlRTUC94LnggcmVxdWVzdABFeHBlY3RlZCBTT1VSQ0UgbWV0aG9kIGZvciBJQ0UveC54IHJlcXVlc3QASW52YWxpZCBjaGFyIGluIHVybCBmcmFnbWVudCBzdGFydABFeHBlY3RlZCBkb3QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9zdGF0dXMASW52YWxpZCByZXNwb25zZSBzdGF0dXMASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucwBVc2VyIGNhbGxiYWNrIGVycm9yAGBvbl9yZXNldGAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2hlYWRlcmAgY2FsbGJhY2sgZXJyb3IAYG9uX21lc3NhZ2VfYmVnaW5gIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19leHRlbnNpb25fdmFsdWVgIGNhbGxiYWNrIGVycm9yAGBvbl9zdGF0dXNfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl92ZXJzaW9uX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdXJsX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWV0aG9kX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX25hbWVgIGNhbGxiYWNrIGVycm9yAFVuZXhwZWN0ZWQgY2hhciBpbiB1cmwgc2VydmVyAEludmFsaWQgaGVhZGVyIHZhbHVlIGNoYXIASW52YWxpZCBoZWFkZXIgZmllbGQgY2hhcgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3ZlcnNpb24ASW52YWxpZCBtaW5vciB2ZXJzaW9uAEludmFsaWQgbWFqb3IgdmVyc2lvbgBFeHBlY3RlZCBzcGFjZSBhZnRlciB2ZXJzaW9uAEV4cGVjdGVkIENSTEYgYWZ0ZXIgdmVyc2lvbgBJbnZhbGlkIEhUVFAgdmVyc2lvbgBJbnZhbGlkIGhlYWRlciB0b2tlbgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3VybABJbnZhbGlkIGNoYXJhY3RlcnMgaW4gdXJsAFVuZXhwZWN0ZWQgc3RhcnQgY2hhciBpbiB1cmwARG91YmxlIEAgaW4gdXJsAEVtcHR5IENvbnRlbnQtTGVuZ3RoAEludmFsaWQgY2hhcmFjdGVyIGluIENvbnRlbnQtTGVuZ3RoAER1cGxpY2F0ZSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXIgaW4gdXJsIHBhdGgAQ29udGVudC1MZW5ndGggY2FuJ3QgYmUgcHJlc2VudCB3aXRoIFRyYW5zZmVyLUVuY29kaW5nAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIHNpemUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfdmFsdWUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyB2YWx1ZQBNaXNzaW5nIGV4cGVjdGVkIExGIGFmdGVyIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AgaGVhZGVyIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGUgdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyBxdW90ZWQgdmFsdWUAUGF1c2VkIGJ5IG9uX2hlYWRlcnNfY29tcGxldGUASW52YWxpZCBFT0Ygc3RhdGUAb25fcmVzZXQgcGF1c2UAb25fY2h1bmtfaGVhZGVyIHBhdXNlAG9uX21lc3NhZ2VfYmVnaW4gcGF1c2UAb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlIHBhdXNlAG9uX3N0YXR1c19jb21wbGV0ZSBwYXVzZQBvbl92ZXJzaW9uX2NvbXBsZXRlIHBhdXNlAG9uX3VybF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19jb21wbGV0ZSBwYXVzZQBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGUgcGF1c2UAb25fbWVzc2FnZV9jb21wbGV0ZSBwYXVzZQBvbl9tZXRob2RfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lIHBhdXNlAFVuZXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgc3RhcnQgbGluZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgbmFtZQBQYXVzZSBvbiBDT05ORUNUL1VwZ3JhZGUAUGF1c2Ugb24gUFJJL1VwZ3JhZGUARXhwZWN0ZWQgSFRUUC8yIENvbm5lY3Rpb24gUHJlZmFjZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX21ldGhvZABFeHBlY3RlZCBzcGFjZSBhZnRlciBtZXRob2QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfZmllbGQAUGF1c2VkAEludmFsaWQgd29yZCBlbmNvdW50ZXJlZABJbnZhbGlkIG1ldGhvZCBlbmNvdW50ZXJlZABVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNjaGVtYQBSZXF1ZXN0IGhhcyBpbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AAU1dJVENIX1BST1hZAFVTRV9QUk9YWQBNS0FDVElWSVRZAFVOUFJPQ0VTU0FCTEVfRU5USVRZAENPUFkATU9WRURfUEVSTUFORU5UTFkAVE9PX0VBUkxZAE5PVElGWQBGQUlMRURfREVQRU5ERU5DWQBCQURfR0FURVdBWQBQTEFZAFBVVABDSEVDS09VVABHQVRFV0FZX1RJTUVPVVQAUkVRVUVTVF9USU1FT1VUAE5FVFdPUktfQ09OTkVDVF9USU1FT1VUAENPTk5FQ1RJT05fVElNRU9VVABMT0dJTl9USU1FT1VUAE5FVFdPUktfUkVBRF9USU1FT1VUAFBPU1QATUlTRElSRUNURURfUkVRVUVTVABDTElFTlRfQ0xPU0VEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9MT0FEX0JBTEFOQ0VEX1JFUVVFU1QAQkFEX1JFUVVFU1QASFRUUF9SRVFVRVNUX1NFTlRfVE9fSFRUUFNfUE9SVABSRVBPUlQASU1fQV9URUFQT1QAUkVTRVRfQ09OVEVOVABOT19DT05URU5UAFBBUlRJQUxfQ09OVEVOVABIUEVfSU5WQUxJRF9DT05TVEFOVABIUEVfQ0JfUkVTRVQAR0VUAEhQRV9TVFJJQ1QAQ09ORkxJQ1QAVEVNUE9SQVJZX1JFRElSRUNUAFBFUk1BTkVOVF9SRURJUkVDVABDT05ORUNUAE1VTFRJX1NUQVRVUwBIUEVfSU5WQUxJRF9TVEFUVVMAVE9PX01BTllfUkVRVUVTVFMARUFSTFlfSElOVFMAVU5BVkFJTEFCTEVfRk9SX0xFR0FMX1JFQVNPTlMAT1BUSU9OUwBTV0lUQ0hJTkdfUFJPVE9DT0xTAFZBUklBTlRfQUxTT19ORUdPVElBVEVTAE1VTFRJUExFX0NIT0lDRVMASU5URVJOQUxfU0VSVkVSX0VSUk9SAFdFQl9TRVJWRVJfVU5LTk9XTl9FUlJPUgBSQUlMR1VOX0VSUk9SAElERU5USVRZX1BST1ZJREVSX0FVVEhFTlRJQ0FUSU9OX0VSUk9SAFNTTF9DRVJUSUZJQ0FURV9FUlJPUgBJTlZBTElEX1hfRk9SV0FSREVEX0ZPUgBTRVRfUEFSQU1FVEVSAEdFVF9QQVJBTUVURVIASFBFX1VTRVIAU0VFX09USEVSAEhQRV9DQl9DSFVOS19IRUFERVIATUtDQUxFTkRBUgBTRVRVUABXRUJfU0VSVkVSX0lTX0RPV04AVEVBUkRPV04ASFBFX0NMT1NFRF9DT05ORUNUSU9OAEhFVVJJU1RJQ19FWFBJUkFUSU9OAERJU0NPTk5FQ1RFRF9PUEVSQVRJT04ATk9OX0FVVEhPUklUQVRJVkVfSU5GT1JNQVRJT04ASFBFX0lOVkFMSURfVkVSU0lPTgBIUEVfQ0JfTUVTU0FHRV9CRUdJTgBTSVRFX0lTX0ZST1pFTgBIUEVfSU5WQUxJRF9IRUFERVJfVE9LRU4ASU5WQUxJRF9UT0tFTgBGT1JCSURERU4ARU5IQU5DRV9ZT1VSX0NBTE0ASFBFX0lOVkFMSURfVVJMAEJMT0NLRURfQllfUEFSRU5UQUxfQ09OVFJPTABNS0NPTABBQ0wASFBFX0lOVEVSTkFMAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0VfVU5PRkZJQ0lBTABIUEVfT0sAVU5MSU5LAFVOTE9DSwBQUkkAUkVUUllfV0lUSABIUEVfSU5WQUxJRF9DT05URU5UX0xFTkdUSABIUEVfVU5FWFBFQ1RFRF9DT05URU5UX0xFTkdUSABGTFVTSABQUk9QUEFUQ0gATS1TRUFSQ0gAVVJJX1RPT19MT05HAFBST0NFU1NJTkcATUlTQ0VMTEFORU9VU19QRVJTSVNURU5UX1dBUk5JTkcATUlTQ0VMTEFORU9VU19XQVJOSU5HAEhQRV9JTlZBTElEX1RSQU5TRkVSX0VOQ09ESU5HAEV4cGVjdGVkIENSTEYASFBFX0lOVkFMSURfQ0hVTktfU0laRQBNT1ZFAENPTlRJTlVFAEhQRV9DQl9TVEFUVVNfQ09NUExFVEUASFBFX0NCX0hFQURFUlNfQ09NUExFVEUASFBFX0NCX1ZFUlNJT05fQ09NUExFVEUASFBFX0NCX1VSTF9DT01QTEVURQBIUEVfQ0JfQ0hVTktfQ09NUExFVEUASFBFX0NCX0hFQURFUl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX1ZBTFVFX0NPTVBMRVRFAEhQRV9DQl9DSFVOS19FWFRFTlNJT05fTkFNRV9DT01QTEVURQBIUEVfQ0JfTUVTU0FHRV9DT01QTEVURQBIUEVfQ0JfTUVUSE9EX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfRklFTERfQ09NUExFVEUAREVMRVRFAEhQRV9JTlZBTElEX0VPRl9TVEFURQBJTlZBTElEX1NTTF9DRVJUSUZJQ0FURQBQQVVTRQBOT19SRVNQT05TRQBVTlNVUFBPUlRFRF9NRURJQV9UWVBFAEdPTkUATk9UX0FDQ0VQVEFCTEUAU0VSVklDRV9VTkFWQUlMQUJMRQBSQU5HRV9OT1RfU0FUSVNGSUFCTEUAT1JJR0lOX0lTX1VOUkVBQ0hBQkxFAFJFU1BPTlNFX0lTX1NUQUxFAFBVUkdFAE1FUkdFAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0UAUkVRVUVTVF9IRUFERVJfVE9PX0xBUkdFAFBBWUxPQURfVE9PX0xBUkdFAElOU1VGRklDSUVOVF9TVE9SQUdFAEhQRV9QQVVTRURfVVBHUkFERQBIUEVfUEFVU0VEX0gyX1VQR1JBREUAU09VUkNFAEFOTk9VTkNFAFRSQUNFAEhQRV9VTkVYUEVDVEVEX1NQQUNFAERFU0NSSUJFAFVOU1VCU0NSSUJFAFJFQ09SRABIUEVfSU5WQUxJRF9NRVRIT0QATk9UX0ZPVU5EAFBST1BGSU5EAFVOQklORABSRUJJTkQAVU5BVVRIT1JJWkVEAE1FVEhPRF9OT1RfQUxMT1dFRABIVFRQX1ZFUlNJT05fTk9UX1NVUFBPUlRFRABBTFJFQURZX1JFUE9SVEVEAEFDQ0VQVEVEAE5PVF9JTVBMRU1FTlRFRABMT09QX0RFVEVDVEVEAEhQRV9DUl9FWFBFQ1RFRABIUEVfTEZfRVhQRUNURUQAQ1JFQVRFRABJTV9VU0VEAEhQRV9QQVVTRUQAVElNRU9VVF9PQ0NVUkVEAFBBWU1FTlRfUkVRVUlSRUQAUFJFQ09ORElUSU9OX1JFUVVJUkVEAFBST1hZX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAE5FVFdPUktfQVVUSEVOVElDQVRJT05fUkVRVUlSRUQATEVOR1RIX1JFUVVJUkVEAFNTTF9DRVJUSUZJQ0FURV9SRVFVSVJFRABVUEdSQURFX1JFUVVJUkVEAFBBR0VfRVhQSVJFRABQUkVDT05ESVRJT05fRkFJTEVEAEVYUEVDVEFUSU9OX0ZBSUxFRABSRVZBTElEQVRJT05fRkFJTEVEAFNTTF9IQU5EU0hBS0VfRkFJTEVEAExPQ0tFRABUUkFOU0ZPUk1BVElPTl9BUFBMSUVEAE5PVF9NT0RJRklFRABOT1RfRVhURU5ERUQAQkFORFdJRFRIX0xJTUlUX0VYQ0VFREVEAFNJVEVfSVNfT1ZFUkxPQURFRABIRUFEAEV4cGVjdGVkIEhUVFAvAABeEwAAJhMAADAQAADwFwAAnRMAABUSAAA5FwAA8BIAAAoQAAB1EgAArRIAAIITAABPFAAAfxAAAKAVAAAjFAAAiRIAAIsUAABNFQAA1BEAAM8UAAAQGAAAyRYAANwWAADBEQAA4BcAALsUAAB0FAAAfBUAAOUUAAAIFwAAHxAAAGUVAACjFAAAKBUAAAIVAACZFQAALBAAAIsZAABPDwAA1A4AAGoQAADOEAAAAhcAAIkOAABuEwAAHBMAAGYUAABWFwAAwRMAAM0TAABsEwAAaBcAAGYXAABfFwAAIhMAAM4PAABpDgAA2A4AAGMWAADLEwAAqg4AACgXAAAmFwAAxRMAAF0WAADoEQAAZxMAAGUTAADyFgAAcxMAAB0XAAD5FgAA8xEAAM8OAADOFQAADBIAALMRAAClEQAAYRAAADIXAAC7EwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAgMCAgICAgAAAgIAAgIAAgICAgICAgICAgAEAAAAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAgICAAIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIAAgICAgIAAAICAAICAAICAgICAgICAgIAAwAEAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsb3NlZWVwLWFsaXZlAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQFjaHVua2VkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQABAQEBAQAAAQEAAQEAAQEBAQEBAQEBAQAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVjdGlvbmVudC1sZW5ndGhvbnJveHktY29ubmVjdGlvbgAAAAAAAAAAAAAAAAAAAHJhbnNmZXItZW5jb2RpbmdwZ3JhZGUNCg0KDQpTTQ0KDQpUVFAvQ0UvVFNQLwAAAAAAAAAAAAAAAAECAAEDAAAAAAAAAAAAAAAAAAAAAAAABAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQUBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAABAAACAAAAAAAAAAAAAAAAAAAAAAAAAwQAAAQEBAQEBAQEBAQEBQQEBAQEBAQEBAQEBAAEAAYHBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQABAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAgAAAAACAAAAAAAAAAAAAAAAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5PVU5DRUVDS09VVE5FQ1RFVEVDUklCRUxVU0hFVEVBRFNFQVJDSFJHRUNUSVZJVFlMRU5EQVJWRU9USUZZUFRJT05TQ0hTRUFZU1RBVENIR0VPUkRJUkVDVE9SVFJDSFBBUkFNRVRFUlVSQ0VCU0NSSUJFQVJET1dOQUNFSU5ETktDS1VCU0NSSUJFSFRUUC9BRFRQLw==' /***/ }), -/***/ 4347: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1891: +/***/ ((__unused_webpack_module, exports) => { -module.exports = __nccwpck_require__(6244); +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.enumToMap = void 0; +function enumToMap(obj) { + const res = {}; + Object.keys(obj).forEach((key) => { + const value = obj[key]; + if (typeof value === 'number') { + res[key] = value; + } + }); + return res; +} +exports.enumToMap = enumToMap; +//# sourceMappingURL=utils.js.map /***/ }), -/***/ 6244: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 6771: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var RetryOperation = __nccwpck_require__(5369); +"use strict"; -exports.operation = function(options) { - var timeouts = exports.timeouts(options); - return new RetryOperation(timeouts, { - forever: options && (options.forever || options.retries === Infinity), - unref: options && options.unref, - maxRetryTime: options && options.maxRetryTime - }); -}; -exports.timeouts = function(options) { - if (options instanceof Array) { - return [].concat(options); +const { kClients } = __nccwpck_require__(2785) +const Agent = __nccwpck_require__(7890) +const { + kAgent, + kMockAgentSet, + kMockAgentGet, + kDispatches, + kIsMockActive, + kNetConnect, + kGetNetConnect, + kOptions, + kFactory +} = __nccwpck_require__(4347) +const MockClient = __nccwpck_require__(8687) +const MockPool = __nccwpck_require__(6193) +const { matchValue, buildMockOptions } = __nccwpck_require__(9323) +const { InvalidArgumentError, UndiciError } = __nccwpck_require__(8045) +const Dispatcher = __nccwpck_require__(412) +const Pluralizer = __nccwpck_require__(8891) +const PendingInterceptorsFormatter = __nccwpck_require__(6823) + +class FakeWeakRef { + constructor (value) { + this.value = value } - var opts = { - retries: 10, - factor: 2, - minTimeout: 1 * 1000, - maxTimeout: Infinity, - randomize: false - }; - for (var key in options) { - opts[key] = options[key]; + deref () { + return this.value } +} - if (opts.minTimeout > opts.maxTimeout) { - throw new Error('minTimeout is greater than maxTimeout'); - } +class MockAgent extends Dispatcher { + constructor (opts) { + super(opts) - var timeouts = []; - for (var i = 0; i < opts.retries; i++) { - timeouts.push(this.createTimeout(i, opts)); - } + this[kNetConnect] = true + this[kIsMockActive] = true - if (options && options.forever && !timeouts.length) { - timeouts.push(this.createTimeout(i, opts)); + // Instantiate Agent and encapsulate + if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + const agent = opts && opts.agent ? opts.agent : new Agent(opts) + this[kAgent] = agent + + this[kClients] = agent[kClients] + this[kOptions] = buildMockOptions(opts) } - // sort the array numerically ascending - timeouts.sort(function(a,b) { - return a - b; - }); + get (origin) { + let dispatcher = this[kMockAgentGet](origin) - return timeouts; -}; + if (!dispatcher) { + dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + } + return dispatcher + } -exports.createTimeout = function(attempt, opts) { - var random = (opts.randomize) - ? (Math.random() + 1) - : 1; + dispatch (opts, handler) { + // Call MockAgent.get to perform additional setup before dispatching as normal + this.get(opts.origin) + return this[kAgent].dispatch(opts, handler) + } - var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); - timeout = Math.min(timeout, opts.maxTimeout); + async close () { + await this[kAgent].close() + this[kClients].clear() + } - return timeout; -}; + deactivate () { + this[kIsMockActive] = false + } -exports.wrap = function(obj, options, methods) { - if (options instanceof Array) { - methods = options; - options = null; + activate () { + this[kIsMockActive] = true } - if (!methods) { - methods = []; - for (var key in obj) { - if (typeof obj[key] === 'function') { - methods.push(key); + enableNetConnect (matcher) { + if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) { + if (Array.isArray(this[kNetConnect])) { + this[kNetConnect].push(matcher) + } else { + this[kNetConnect] = [matcher] } + } else if (typeof matcher === 'undefined') { + this[kNetConnect] = true + } else { + throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.') } } - for (var i = 0; i < methods.length; i++) { - var method = methods[i]; - var original = obj[method]; + disableNetConnect () { + this[kNetConnect] = false + } - obj[method] = function retryWrapper(original) { - var op = exports.operation(options); - var args = Array.prototype.slice.call(arguments, 1); - var callback = args.pop(); + // This is required to bypass issues caused by using global symbols - see: + // https://github.com/nodejs/undici/issues/1447 + get isMockActive () { + return this[kIsMockActive] + } - args.push(function(err) { - if (op.retry(err)) { - return; - } - if (err) { - arguments[0] = op.mainError(); - } - callback.apply(this, arguments); - }); + [kMockAgentSet] (origin, dispatcher) { + this[kClients].set(origin, new FakeWeakRef(dispatcher)) + } - op.attempt(function() { - original.apply(obj, args); - }); - }.bind(obj, original); - obj[method].options = options; + [kFactory] (origin) { + const mockOptions = Object.assign({ agent: this }, this[kOptions]) + return this[kOptions] && this[kOptions].connections === 1 + ? new MockClient(origin, mockOptions) + : new MockPool(origin, mockOptions) } -}; + [kMockAgentGet] (origin) { + // First check if we can immediately find it + const ref = this[kClients].get(origin) + if (ref) { + return ref.deref() + } -/***/ }), + // If the origin is not a string create a dummy parent pool and return to user + if (typeof origin !== 'string') { + const dispatcher = this[kFactory]('http://localhost:9999') + this[kMockAgentSet](origin, dispatcher) + return dispatcher + } -/***/ 5369: -/***/ ((module) => { + // If we match, create a pool and assign the same dispatches + for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) { + const nonExplicitDispatcher = nonExplicitRef.deref() + if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) { + const dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches] + return dispatcher + } + } + } -function RetryOperation(timeouts, options) { - // Compatibility for the old (timeouts, retryForever) signature - if (typeof options === 'boolean') { - options = { forever: options }; + [kGetNetConnect] () { + return this[kNetConnect] } - this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); - this._timeouts = timeouts; - this._options = options || {}; - this._maxRetryTime = options && options.maxRetryTime || Infinity; - this._fn = null; - this._errors = []; - this._attempts = 1; - this._operationTimeout = null; - this._operationTimeoutCb = null; - this._timeout = null; - this._operationStart = null; - this._timer = null; + pendingInterceptors () { + const mockAgentClients = this[kClients] - if (this._options.forever) { - this._cachedTimeouts = this._timeouts.slice(0); + return Array.from(mockAgentClients.entries()) + .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin }))) + .filter(({ pending }) => pending) } -} -module.exports = RetryOperation; -RetryOperation.prototype.reset = function() { - this._attempts = 1; - this._timeouts = this._originalTimeouts.slice(0); -} + assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) { + const pending = this.pendingInterceptors() -RetryOperation.prototype.stop = function() { - if (this._timeout) { - clearTimeout(this._timeout); - } - if (this._timer) { - clearTimeout(this._timer); - } + if (pending.length === 0) { + return + } - this._timeouts = []; - this._cachedTimeouts = null; -}; + const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length) -RetryOperation.prototype.retry = function(err) { - if (this._timeout) { - clearTimeout(this._timeout); - } + throw new UndiciError(` +${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending: - if (!err) { - return false; - } - var currentTime = new Date().getTime(); - if (err && currentTime - this._operationStart >= this._maxRetryTime) { - this._errors.push(err); - this._errors.unshift(new Error('RetryOperation timeout occurred')); - return false; +${pendingInterceptorsFormatter.format(pending)} +`.trim()) } +} - this._errors.push(err); +module.exports = MockAgent - var timeout = this._timeouts.shift(); - if (timeout === undefined) { - if (this._cachedTimeouts) { - // retry forever, only keep last error - this._errors.splice(0, this._errors.length - 1); - timeout = this._cachedTimeouts.slice(-1); - } else { - return false; - } - } - var self = this; - this._timer = setTimeout(function() { - self._attempts++; +/***/ }), + +/***/ 8687: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; - if (self._operationTimeoutCb) { - self._timeout = setTimeout(function() { - self._operationTimeoutCb(self._attempts); - }, self._operationTimeout); - if (self._options.unref) { - self._timeout.unref(); - } +const { promisify } = __nccwpck_require__(3837) +const Client = __nccwpck_require__(3598) +const { buildMockDispatch } = __nccwpck_require__(9323) +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = __nccwpck_require__(4347) +const { MockInterceptor } = __nccwpck_require__(410) +const Symbols = __nccwpck_require__(2785) +const { InvalidArgumentError } = __nccwpck_require__(8045) + +/** + * MockClient provides an API that extends the Client to influence the mockDispatches. + */ +class MockClient extends Client { + constructor (origin, opts) { + super(origin, opts) + + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') } - self._fn(self._attempts); - }, timeout); + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) - if (this._options.unref) { - this._timer.unref(); + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] } - return true; -}; - -RetryOperation.prototype.attempt = function(fn, timeoutOps) { - this._fn = fn; + get [Symbols.kConnected] () { + return this[kConnected] + } - if (timeoutOps) { - if (timeoutOps.timeout) { - this._operationTimeout = timeoutOps.timeout; - } - if (timeoutOps.cb) { - this._operationTimeoutCb = timeoutOps.cb; - } + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) } - var self = this; - if (this._operationTimeoutCb) { - this._timeout = setTimeout(function() { - self._operationTimeoutCb(); - }, self._operationTimeout); + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) } +} - this._operationStart = new Date().getTime(); +module.exports = MockClient - this._fn(this._attempts); -}; -RetryOperation.prototype.try = function(fn) { - console.log('Using RetryOperation.try() is deprecated'); - this.attempt(fn); -}; +/***/ }), -RetryOperation.prototype.start = function(fn) { - console.log('Using RetryOperation.start() is deprecated'); - this.attempt(fn); -}; +/***/ 888: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -RetryOperation.prototype.start = RetryOperation.prototype.try; +"use strict"; -RetryOperation.prototype.errors = function() { - return this._errors; -}; -RetryOperation.prototype.attempts = function() { - return this._attempts; -}; +const { UndiciError } = __nccwpck_require__(8045) -RetryOperation.prototype.mainError = function() { - if (this._errors.length === 0) { - return null; +class MockNotMatchedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, MockNotMatchedError) + this.name = 'MockNotMatchedError' + this.message = message || 'The request does not match any registered mock dispatches' + this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED' } +} - var counts = {}; - var mainError = null; - var mainErrorCount = 0; +module.exports = { + MockNotMatchedError +} - for (var i = 0; i < this._errors.length; i++) { - var error = this._errors[i]; - var message = error.message; - var count = (counts[message] || 0) + 1; - counts[message] = count; +/***/ }), - if (count >= mainErrorCount) { - mainError = error; - mainErrorCount = count; +/***/ 410: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { getResponseData, buildKey, addMockDispatch } = __nccwpck_require__(9323) +const { + kDispatches, + kDispatchKey, + kDefaultHeaders, + kDefaultTrailers, + kContentLength, + kMockDispatch +} = __nccwpck_require__(4347) +const { InvalidArgumentError } = __nccwpck_require__(8045) +const { buildURL } = __nccwpck_require__(3983) + +/** + * Defines the scope API for an interceptor reply + */ +class MockScope { + constructor (mockDispatch) { + this[kMockDispatch] = mockDispatch + } + + /** + * Delay a reply by a set amount in ms. + */ + delay (waitInMs) { + if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) { + throw new InvalidArgumentError('waitInMs must be a valid integer > 0') } + + this[kMockDispatch].delay = waitInMs + return this } - return mainError; -}; + /** + * For a defined reply, never mark as consumed. + */ + persist () { + this[kMockDispatch].persist = true + return this + } + /** + * Allow one to define a reply for a set amount of matching requests. + */ + times (repeatTimes) { + if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) { + throw new InvalidArgumentError('repeatTimes must be a valid integer > 0') + } -/***/ }), + this[kMockDispatch].times = repeatTimes + return this + } +} -/***/ 1867: -/***/ ((module, exports, __nccwpck_require__) => { +/** + * Defines an interceptor for a Mock + */ +class MockInterceptor { + constructor (opts, mockDispatches) { + if (typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object') + } + if (typeof opts.path === 'undefined') { + throw new InvalidArgumentError('opts.path must be defined') + } + if (typeof opts.method === 'undefined') { + opts.method = 'GET' + } + // See https://github.com/nodejs/undici/issues/1245 + // As per RFC 3986, clients are not supposed to send URI + // fragments to servers when they retrieve a document, + if (typeof opts.path === 'string') { + if (opts.query) { + opts.path = buildURL(opts.path, opts.query) + } else { + // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811 + const parsedURL = new URL(opts.path, 'data://') + opts.path = parsedURL.pathname + parsedURL.search + } + } + if (typeof opts.method === 'string') { + opts.method = opts.method.toUpperCase() + } -/*! safe-buffer. MIT License. Feross Aboukhadijeh */ -/* eslint-disable node/no-deprecated-api */ -var buffer = __nccwpck_require__(4300) -var Buffer = buffer.Buffer + this[kDispatchKey] = buildKey(opts) + this[kDispatches] = mockDispatches + this[kDefaultHeaders] = {} + this[kDefaultTrailers] = {} + this[kContentLength] = false + } -// alternative to using Object.keys for old browsers -function copyProps (src, dst) { - for (var key in src) { - dst[key] = src[key] + createMockScopeDispatchData (statusCode, data, responseOptions = {}) { + const responseData = getResponseData(data) + const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {} + const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers } + const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers } + + return { statusCode, data, headers, trailers } } -} -if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { - module.exports = buffer -} else { - // Copy properties from require('buffer') - copyProps(buffer, exports) - exports.Buffer = SafeBuffer -} -function SafeBuffer (arg, encodingOrOffset, length) { - return Buffer(arg, encodingOrOffset, length) -} + validateReplyParameters (statusCode, data, responseOptions) { + if (typeof statusCode === 'undefined') { + throw new InvalidArgumentError('statusCode must be defined') + } + if (typeof data === 'undefined') { + throw new InvalidArgumentError('data must be defined') + } + if (typeof responseOptions !== 'object') { + throw new InvalidArgumentError('responseOptions must be an object') + } + } -SafeBuffer.prototype = Object.create(Buffer.prototype) + /** + * Mock an undici request with a defined reply. + */ + reply (replyData) { + // Values of reply aren't available right now as they + // can only be available when the reply callback is invoked. + if (typeof replyData === 'function') { + // We'll first wrap the provided callback in another function, + // this function will properly resolve the data from the callback + // when invoked. + const wrappedDefaultsCallback = (opts) => { + // Our reply options callback contains the parameter for statusCode, data and options. + const resolvedData = replyData(opts) + + // Check if it is in the right format + if (typeof resolvedData !== 'object') { + throw new InvalidArgumentError('reply options callback must return an object') + } + + const { statusCode, data = '', responseOptions = {} } = resolvedData + this.validateReplyParameters(statusCode, data, responseOptions) + // Since the values can be obtained immediately we return them + // from this higher order function that will be resolved later. + return { + ...this.createMockScopeDispatchData(statusCode, data, responseOptions) + } + } -// Copy static methods from Buffer -copyProps(Buffer, SafeBuffer) + // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data. + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback) + return new MockScope(newMockDispatch) + } -SafeBuffer.from = function (arg, encodingOrOffset, length) { - if (typeof arg === 'number') { - throw new TypeError('Argument must not be a number') + // We can have either one or three parameters, if we get here, + // we should have 1-3 parameters. So we spread the arguments of + // this function to obtain the parameters, since replyData will always + // just be the statusCode. + const [statusCode, data = '', responseOptions = {}] = [...arguments] + this.validateReplyParameters(statusCode, data, responseOptions) + + // Send in-already provided data like usual + const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions) + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData) + return new MockScope(newMockDispatch) } - return Buffer(arg, encodingOrOffset, length) -} -SafeBuffer.alloc = function (size, fill, encoding) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') + /** + * Mock an undici request with a defined error. + */ + replyWithError (error) { + if (typeof error === 'undefined') { + throw new InvalidArgumentError('error must be defined') + } + + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error }) + return new MockScope(newMockDispatch) } - var buf = Buffer(size) - if (fill !== undefined) { - if (typeof encoding === 'string') { - buf.fill(fill, encoding) - } else { - buf.fill(fill) + + /** + * Set default reply headers on the interceptor for subsequent replies + */ + defaultReplyHeaders (headers) { + if (typeof headers === 'undefined') { + throw new InvalidArgumentError('headers must be defined') } - } else { - buf.fill(0) + + this[kDefaultHeaders] = headers + return this } - return buf -} -SafeBuffer.allocUnsafe = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') + /** + * Set default reply trailers on the interceptor for subsequent replies + */ + defaultReplyTrailers (trailers) { + if (typeof trailers === 'undefined') { + throw new InvalidArgumentError('trailers must be defined') + } + + this[kDefaultTrailers] = trailers + return this } - return Buffer(size) -} -SafeBuffer.allocUnsafeSlow = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') + /** + * Set reply content length header for replies on the interceptor + */ + replyContentLength () { + this[kContentLength] = true + return this } - return buffer.SlowBuffer(size) } +module.exports.MockInterceptor = MockInterceptor +module.exports.MockScope = MockScope + /***/ }), -/***/ 9626: +/***/ 6193: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var stubs = __nccwpck_require__(1099) +const { promisify } = __nccwpck_require__(3837) +const Pool = __nccwpck_require__(4634) +const { buildMockDispatch } = __nccwpck_require__(9323) +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = __nccwpck_require__(4347) +const { MockInterceptor } = __nccwpck_require__(410) +const Symbols = __nccwpck_require__(2785) +const { InvalidArgumentError } = __nccwpck_require__(8045) -/* - * StreamEvents can be used 2 ways: - * - * 1: - * function MyStream() { - * require('stream-events').call(this) - * } - * - * 2: - * require('stream-events')(myStream) +/** + * MockPool provides an API that extends the Pool to influence the mockDispatches. */ -function StreamEvents(stream) { - stream = stream || this +class MockPool extends Pool { + constructor (origin, opts) { + super(origin, opts) - var cfg = { - callthrough: true, - calls: 1 + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) + + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] } - stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) - stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) + get [Symbols.kConnected] () { + return this[kConnected] + } - return stream + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) + } + + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) + } } -module.exports = StreamEvents +module.exports = MockPool /***/ }), -/***/ 6121: +/***/ 4347: /***/ ((module) => { -module.exports = shift - -function shift (stream) { - var rs = stream._readableState - if (!rs) return null - return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) -} - -function getStateLength (state) { - if (state.buffer.length) { - // Since node 6.3.0 state.buffer is a BufferList not an array - if (state.buffer.head) { - return state.buffer.head.data.length - } +"use strict"; - return state.buffer[0].length - } - return state.length +module.exports = { + kAgent: Symbol('agent'), + kOptions: Symbol('options'), + kFactory: Symbol('factory'), + kDispatches: Symbol('dispatches'), + kDispatchKey: Symbol('dispatch key'), + kDefaultHeaders: Symbol('default headers'), + kDefaultTrailers: Symbol('default trailers'), + kContentLength: Symbol('content length'), + kMockAgent: Symbol('mock agent'), + kMockAgentSet: Symbol('mock agent set'), + kMockAgentGet: Symbol('mock agent get'), + kMockDispatch: Symbol('mock dispatch'), + kClose: Symbol('close'), + kOriginalClose: Symbol('original agent close'), + kOrigin: Symbol('origin'), + kIsMockActive: Symbol('is mock active'), + kNetConnect: Symbol('net connect'), + kGetNetConnect: Symbol('get net connect'), + kConnected: Symbol('connected') } /***/ }), -/***/ 4841: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9323: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +const { MockNotMatchedError } = __nccwpck_require__(888) +const { + kDispatches, + kMockAgent, + kOriginalDispatch, + kOrigin, + kGetNetConnect +} = __nccwpck_require__(4347) +const { buildURL, nop } = __nccwpck_require__(3983) +const { STATUS_CODES } = __nccwpck_require__(3685) +const { + types: { + isPromise + } +} = __nccwpck_require__(3837) -/**/ +function matchValue (match, value) { + if (typeof match === 'string') { + return match === value + } + if (match instanceof RegExp) { + return match.test(value) + } + if (typeof match === 'function') { + return match(value) === true + } + return false +} -var Buffer = (__nccwpck_require__(1867).Buffer); -/**/ +function lowerCaseEntries (headers) { + return Object.fromEntries( + Object.entries(headers).map(([headerName, headerValue]) => { + return [headerName.toLocaleLowerCase(), headerValue] + }) + ) +} -var isEncoding = Buffer.isEncoding || function (encoding) { - encoding = '' + encoding; - switch (encoding && encoding.toLowerCase()) { - case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': - return true; - default: - return false; +/** + * @param {import('../../index').Headers|string[]|Record} headers + * @param {string} key + */ +function getHeaderByName (headers, key) { + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) { + return headers[i + 1] + } + } + + return undefined + } else if (typeof headers.get === 'function') { + return headers.get(key) + } else { + return lowerCaseEntries(headers)[key.toLocaleLowerCase()] } -}; +} -function _normalizeEncoding(enc) { - if (!enc) return 'utf8'; - var retried; - while (true) { - switch (enc) { - case 'utf8': - case 'utf-8': - return 'utf8'; - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return 'utf16le'; - case 'latin1': - case 'binary': - return 'latin1'; - case 'base64': - case 'ascii': - case 'hex': - return enc; - default: - if (retried) return; // undefined - enc = ('' + enc).toLowerCase(); - retried = true; +/** @param {string[]} headers */ +function buildHeadersFromArray (headers) { // fetch HeadersList + const clone = headers.slice() + const entries = [] + for (let index = 0; index < clone.length; index += 2) { + entries.push([clone[index], clone[index + 1]]) + } + return Object.fromEntries(entries) +} + +function matchHeaders (mockDispatch, headers) { + if (typeof mockDispatch.headers === 'function') { + if (Array.isArray(headers)) { // fetch HeadersList + headers = buildHeadersFromArray(headers) } + return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {}) + } + if (typeof mockDispatch.headers === 'undefined') { + return true + } + if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') { + return false } -}; -// Do not cache `Buffer.isEncoding` when checking encoding names as some -// modules monkey-patch it to support additional encodings -function normalizeEncoding(enc) { - var nenc = _normalizeEncoding(enc); - if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); - return nenc || enc; -} + for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) { + const headerValue = getHeaderByName(headers, matchHeaderName) -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. -exports.s = StringDecoder; -function StringDecoder(encoding) { - this.encoding = normalizeEncoding(encoding); - var nb; - switch (this.encoding) { - case 'utf16le': - this.text = utf16Text; - this.end = utf16End; - nb = 4; - break; - case 'utf8': - this.fillLast = utf8FillLast; - nb = 4; - break; - case 'base64': - this.text = base64Text; - this.end = base64End; - nb = 3; - break; - default: - this.write = simpleWrite; - this.end = simpleEnd; - return; + if (!matchValue(matchHeaderValue, headerValue)) { + return false + } } - this.lastNeed = 0; - this.lastTotal = 0; - this.lastChar = Buffer.allocUnsafe(nb); + return true } -StringDecoder.prototype.write = function (buf) { - if (buf.length === 0) return ''; - var r; - var i; - if (this.lastNeed) { - r = this.fillLast(buf); - if (r === undefined) return ''; - i = this.lastNeed; - this.lastNeed = 0; - } else { - i = 0; +function safeUrl (path) { + if (typeof path !== 'string') { + return path } - if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); - return r || ''; -}; - -StringDecoder.prototype.end = utf8End; -// Returns only complete characters in a Buffer -StringDecoder.prototype.text = utf8Text; + const pathSegments = path.split('?') -// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer -StringDecoder.prototype.fillLast = function (buf) { - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); + if (pathSegments.length !== 2) { + return path } - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); - this.lastNeed -= buf.length; -}; -// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a -// continuation byte. If an invalid byte is detected, -2 is returned. -function utf8CheckByte(byte) { - if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; - return byte >> 6 === 0x02 ? -1 : -2; + const qp = new URLSearchParams(pathSegments.pop()) + qp.sort() + return [...pathSegments, qp.toString()].join('?') } -// Checks at most 3 bytes at the end of a Buffer in order to detect an -// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) -// needed to complete the UTF-8 character (if applicable) are returned. -function utf8CheckIncomplete(self, buf, i) { - var j = buf.length - 1; - if (j < i) return 0; - var nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 1; - return nb; +function matchKey (mockDispatch, { path, method, body, headers }) { + const pathMatch = matchValue(mockDispatch.path, path) + const methodMatch = matchValue(mockDispatch.method, method) + const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true + const headersMatch = matchHeaders(mockDispatch, headers) + return pathMatch && methodMatch && bodyMatch && headersMatch +} + +function getResponseData (data) { + if (Buffer.isBuffer(data)) { + return data + } else if (typeof data === 'object') { + return JSON.stringify(data) + } else { + return data.toString() } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 2; - return nb; +} + +function getMockDispatch (mockDispatches, key) { + const basePath = key.query ? buildURL(key.path, key.query) : key.path + const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath + + // Match path + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`) } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) { - if (nb === 2) nb = 0;else self.lastNeed = nb - 3; - } - return nb; + + // Match method + matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`) } - return 0; -} -// Validates as many continuation bytes for a multi-byte UTF-8 character as -// needed or are available. If we see a non-continuation byte where we expect -// one, we "replace" the validated continuation bytes we've seen so far with -// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding -// behavior. The continuation byte check is included three times in the case -// where all of the continuation bytes for a character exist in the same buffer. -// It is also done this way as a slight performance increase instead of using a -// loop. -function utf8CheckExtraBytes(self, buf, p) { - if ((buf[0] & 0xC0) !== 0x80) { - self.lastNeed = 0; - return '\ufffd'; + // Match body + matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`) } - if (self.lastNeed > 1 && buf.length > 1) { - if ((buf[1] & 0xC0) !== 0x80) { - self.lastNeed = 1; - return '\ufffd'; - } - if (self.lastNeed > 2 && buf.length > 2) { - if ((buf[2] & 0xC0) !== 0x80) { - self.lastNeed = 2; - return '\ufffd'; - } + + // Match headers + matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`) + } + + return matchedMockDispatches[0] +} + +function addMockDispatch (mockDispatches, key, data) { + const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false } + const replyData = typeof data === 'function' ? { callback: data } : { ...data } + const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } } + mockDispatches.push(newMockDispatch) + return newMockDispatch +} + +function deleteMockDispatch (mockDispatches, key) { + const index = mockDispatches.findIndex(dispatch => { + if (!dispatch.consumed) { + return false } + return matchKey(dispatch, key) + }) + if (index !== -1) { + mockDispatches.splice(index, 1) } } -// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. -function utf8FillLast(buf) { - var p = this.lastTotal - this.lastNeed; - var r = utf8CheckExtraBytes(this, buf, p); - if (r !== undefined) return r; - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, p, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); +function buildKey (opts) { + const { path, method, body, headers, query } = opts + return { + path, + method, + body, + headers, + query } - buf.copy(this.lastChar, p, 0, buf.length); - this.lastNeed -= buf.length; } -// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a -// partial character, the character's bytes are buffered until the required -// number of bytes are available. -function utf8Text(buf, i) { - var total = utf8CheckIncomplete(this, buf, i); - if (!this.lastNeed) return buf.toString('utf8', i); - this.lastTotal = total; - var end = buf.length - (total - this.lastNeed); - buf.copy(this.lastChar, 0, end); - return buf.toString('utf8', i, end); +function generateKeyValues (data) { + return Object.entries(data).reduce((keyValuePairs, [key, value]) => [ + ...keyValuePairs, + Buffer.from(`${key}`), + Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`) + ], []) } -// For UTF-8, a replacement character is added when ending on a partial -// character. -function utf8End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + '\ufffd'; - return r; +/** + * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status + * @param {number} statusCode + */ +function getStatusText (statusCode) { + return STATUS_CODES[statusCode] || 'unknown' } -// UTF-16LE typically needs two bytes per character, but even if we have an even -// number of bytes available, we need to check if we end on a leading/high -// surrogate. In that case, we need to wait for the next two bytes in order to -// decode the last character properly. -function utf16Text(buf, i) { - if ((buf.length - i) % 2 === 0) { - var r = buf.toString('utf16le', i); - if (r) { - var c = r.charCodeAt(r.length - 1); - if (c >= 0xD800 && c <= 0xDBFF) { - this.lastNeed = 2; - this.lastTotal = 4; - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - return r.slice(0, -1); - } +async function getResponse (body) { + const buffers = [] + for await (const data of body) { + buffers.push(data) + } + return Buffer.concat(buffers).toString('utf8') +} + +/** + * Mock dispatch function used to simulate undici dispatches + */ +function mockDispatch (opts, handler) { + // Get mock dispatch from built key + const key = buildKey(opts) + const mockDispatch = getMockDispatch(this[kDispatches], key) + + mockDispatch.timesInvoked++ + + // Here's where we resolve a callback if a callback is present for the dispatch data. + if (mockDispatch.data.callback) { + mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) } + } + + // Parse mockDispatch data + const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch + const { timesInvoked, times } = mockDispatch + + // If it's used up and not persistent, mark as consumed + mockDispatch.consumed = !persist && timesInvoked >= times + mockDispatch.pending = timesInvoked < times + + // If specified, trigger dispatch error + if (error !== null) { + deleteMockDispatch(this[kDispatches], key) + handler.onError(error) + return true + } + + // Handle the request with a delay if necessary + if (typeof delay === 'number' && delay > 0) { + setTimeout(() => { + handleReply(this[kDispatches]) + }, delay) + } else { + handleReply(this[kDispatches]) + } + + function handleReply (mockDispatches, _data = data) { + // fetch's HeadersList is a 1D string array + const optsHeaders = Array.isArray(opts.headers) + ? buildHeadersFromArray(opts.headers) + : opts.headers + const body = typeof _data === 'function' + ? _data({ ...opts, headers: optsHeaders }) + : _data + + // util.types.isPromise is likely needed for jest. + if (isPromise(body)) { + // If handleReply is asynchronous, throwing an error + // in the callback will reject the promise, rather than + // synchronously throw the error, which breaks some tests. + // Rather, we wait for the callback to resolve if it is a + // promise, and then re-run handleReply with the new body. + body.then((newData) => handleReply(mockDispatches, newData)) + return } - return r; + + const responseData = getResponseData(body) + const responseHeaders = generateKeyValues(headers) + const responseTrailers = generateKeyValues(trailers) + + handler.abort = nop + handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode)) + handler.onData(Buffer.from(responseData)) + handler.onComplete(responseTrailers) + deleteMockDispatch(mockDispatches, key) } - this.lastNeed = 1; - this.lastTotal = 2; - this.lastChar[0] = buf[buf.length - 1]; - return buf.toString('utf16le', i, buf.length - 1); + + function resume () {} + + return true } -// For UTF-16LE we do not explicitly append special replacement characters if we -// end on a partial character, we simply let v8 handle that. -function utf16End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) { - var end = this.lastTotal - this.lastNeed; - return r + this.lastChar.toString('utf16le', 0, end); +function buildMockDispatch () { + const agent = this[kMockAgent] + const origin = this[kOrigin] + const originalDispatch = this[kOriginalDispatch] + + return function dispatch (opts, handler) { + if (agent.isMockActive) { + try { + mockDispatch.call(this, opts, handler) + } catch (error) { + if (error instanceof MockNotMatchedError) { + const netConnect = agent[kGetNetConnect]() + if (netConnect === false) { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`) + } + if (checkNetConnect(netConnect, origin)) { + originalDispatch.call(this, opts, handler) + } else { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`) + } + } else { + throw error + } + } + } else { + originalDispatch.call(this, opts, handler) + } } - return r; } -function base64Text(buf, i) { - var n = (buf.length - i) % 3; - if (n === 0) return buf.toString('base64', i); - this.lastNeed = 3 - n; - this.lastTotal = 3; - if (n === 1) { - this.lastChar[0] = buf[buf.length - 1]; - } else { - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; +function checkNetConnect (netConnect, origin) { + const url = new URL(origin) + if (netConnect === true) { + return true + } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) { + return true } - return buf.toString('base64', i, buf.length - n); + return false } -function base64End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); - return r; +function buildMockOptions (opts) { + if (opts) { + const { agent, ...mockOptions } = opts + return mockOptions + } } -// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) -function simpleWrite(buf) { - return buf.toString(this.encoding); +module.exports = { + getResponseData, + getMockDispatch, + addMockDispatch, + deleteMockDispatch, + buildKey, + generateKeyValues, + matchValue, + getResponse, + getStatusText, + mockDispatch, + buildMockDispatch, + checkNetConnect, + buildMockOptions, + getHeaderByName } -function simpleEnd(buf) { - return buf && buf.length ? this.write(buf) : ''; -} /***/ }), -/***/ 1099: -/***/ ((module) => { +/***/ 6823: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -module.exports = function stubs(obj, method, cfg, stub) { - if (!obj || !method || !obj[method]) - throw new Error('You must provide an object and a key for an existing method') +const { Transform } = __nccwpck_require__(2781) +const { Console } = __nccwpck_require__(6206) - if (!stub) { - stub = cfg - cfg = {} +/** + * Gets the output of `console.table(…)` as a string. + */ +module.exports = class PendingInterceptorsFormatter { + constructor ({ disableColors } = {}) { + this.transform = new Transform({ + transform (chunk, _enc, cb) { + cb(null, chunk) + } + }) + + this.logger = new Console({ + stdout: this.transform, + inspectOptions: { + colors: !disableColors && !process.env.CI + } + }) } - stub = stub || function() {} + format (pendingInterceptors) { + const withPrettyHeaders = pendingInterceptors.map( + ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + Method: method, + Origin: origin, + Path: path, + 'Status code': statusCode, + Persistent: persist ? '✅' : '❌', + Invocations: timesInvoked, + Remaining: persist ? Infinity : times - timesInvoked + })) - cfg.callthrough = cfg.callthrough || false - cfg.calls = cfg.calls || 0 + this.logger.table(withPrettyHeaders) + return this.transform.read().toString() + } +} - var norevert = cfg.calls === 0 - var cached = obj[method].bind(obj) +/***/ }), - obj[method] = function() { - var args = [].slice.call(arguments) - var returnVal +/***/ 8891: +/***/ ((module) => { - if (cfg.callthrough) - returnVal = cached.apply(obj, args) +"use strict"; - returnVal = stub.apply(obj, args) || returnVal - if (!norevert && --cfg.calls === 0) - obj[method] = cached +const singulars = { + pronoun: 'it', + is: 'is', + was: 'was', + this: 'this' +} - return returnVal +const plurals = { + pronoun: 'they', + is: 'are', + was: 'were', + this: 'these' +} + +module.exports = class Pluralizer { + constructor (singular, plural) { + this.singular = singular + this.plural = plural + } + + pluralize (count) { + const one = count === 1 + const keys = one ? singulars : plurals + const noun = one ? this.singular : this.plural + return { ...keys, count, noun } } } /***/ }), -/***/ 9318: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 8266: +/***/ ((module) => { "use strict"; +/* eslint-disable */ -const os = __nccwpck_require__(2037); -const tty = __nccwpck_require__(6224); -const hasFlag = __nccwpck_require__(1621); - -const {env} = process; -let forceColor; -if (hasFlag('no-color') || - hasFlag('no-colors') || - hasFlag('color=false') || - hasFlag('color=never')) { - forceColor = 0; -} else if (hasFlag('color') || - hasFlag('colors') || - hasFlag('color=true') || - hasFlag('color=always')) { - forceColor = 1; -} -if ('FORCE_COLOR' in env) { - if (env.FORCE_COLOR === 'true') { - forceColor = 1; - } else if (env.FORCE_COLOR === 'false') { - forceColor = 0; - } else { - forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); - } -} +// Extracted from node/lib/internal/fixed_queue.js -function translateLevel(level) { - if (level === 0) { - return false; - } +// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two. +const kSize = 2048; +const kMask = kSize - 1; - return { - level, - hasBasic: true, - has256: level >= 2, - has16m: level >= 3 - }; -} +// The FixedQueue is implemented as a singly-linked list of fixed-size +// circular buffers. It looks something like this: +// +// head tail +// | | +// v v +// +-----------+ <-----\ +-----------+ <------\ +-----------+ +// | [null] | \----- | next | \------- | next | +// +-----------+ +-----------+ +-----------+ +// | item | <-- bottom | item | <-- bottom | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | bottom --> | item | +// | item | | item | | item | +// | ... | | ... | | ... | +// | item | | item | | item | +// | item | | item | | item | +// | [empty] | <-- top | item | | item | +// | [empty] | | item | | item | +// | [empty] | | [empty] | <-- top top --> | [empty] | +// +-----------+ +-----------+ +-----------+ +// +// Or, if there is only one circular buffer, it looks something +// like either of these: +// +// head tail head tail +// | | | | +// v v v v +// +-----------+ +-----------+ +// | [null] | | [null] | +// +-----------+ +-----------+ +// | [empty] | | item | +// | [empty] | | item | +// | item | <-- bottom top --> | [empty] | +// | item | | [empty] | +// | [empty] | <-- top bottom --> | item | +// | [empty] | | item | +// +-----------+ +-----------+ +// +// Adding a value means moving `top` forward by one, removing means +// moving `bottom` forward by one. After reaching the end, the queue +// wraps around. +// +// When `top === bottom` the current queue is empty and when +// `top + 1 === bottom` it's full. This wastes a single space of storage +// but allows much quicker checks. -function supportsColor(haveStream, streamIsTTY) { - if (forceColor === 0) { - return 0; - } +class FixedCircularBuffer { + constructor() { + this.bottom = 0; + this.top = 0; + this.list = new Array(kSize); + this.next = null; + } - if (hasFlag('color=16m') || - hasFlag('color=full') || - hasFlag('color=truecolor')) { - return 3; - } + isEmpty() { + return this.top === this.bottom; + } - if (hasFlag('color=256')) { - return 2; - } + isFull() { + return ((this.top + 1) & kMask) === this.bottom; + } - if (haveStream && !streamIsTTY && forceColor === undefined) { - return 0; - } + push(data) { + this.list[this.top] = data; + this.top = (this.top + 1) & kMask; + } - const min = forceColor || 0; + shift() { + const nextItem = this.list[this.bottom]; + if (nextItem === undefined) + return null; + this.list[this.bottom] = undefined; + this.bottom = (this.bottom + 1) & kMask; + return nextItem; + } +} - if (env.TERM === 'dumb') { - return min; - } +module.exports = class FixedQueue { + constructor() { + this.head = this.tail = new FixedCircularBuffer(); + } - if (process.platform === 'win32') { - // Windows 10 build 10586 is the first Windows release that supports 256 colors. - // Windows 10 build 14931 is the first release that supports 16m/TrueColor. - const osRelease = os.release().split('.'); - if ( - Number(osRelease[0]) >= 10 && - Number(osRelease[2]) >= 10586 - ) { - return Number(osRelease[2]) >= 14931 ? 3 : 2; - } + isEmpty() { + return this.head.isEmpty(); + } - return 1; - } + push(data) { + if (this.head.isFull()) { + // Head is full: Creates a new queue, sets the old queue's `.next` to it, + // and sets it as the new main queue. + this.head = this.head.next = new FixedCircularBuffer(); + } + this.head.push(data); + } - if ('CI' in env) { - if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { - return 1; - } + shift() { + const tail = this.tail; + const next = tail.shift(); + if (tail.isEmpty() && tail.next !== null) { + // If there is another queue, it forms the new tail. + this.tail = tail.next; + } + return next; + } +}; - return min; - } - if ('TEAMCITY_VERSION' in env) { - return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; - } +/***/ }), - if (env.COLORTERM === 'truecolor') { - return 3; - } +/***/ 3198: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if ('TERM_PROGRAM' in env) { - const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); +"use strict"; - switch (env.TERM_PROGRAM) { - case 'iTerm.app': - return version >= 3 ? 3 : 2; - case 'Apple_Terminal': - return 2; - // No default - } - } - if (/-256(color)?$/i.test(env.TERM)) { - return 2; - } +const DispatcherBase = __nccwpck_require__(4839) +const FixedQueue = __nccwpck_require__(8266) +const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = __nccwpck_require__(2785) +const PoolStats = __nccwpck_require__(9689) - if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { - return 1; - } +const kClients = Symbol('clients') +const kNeedDrain = Symbol('needDrain') +const kQueue = Symbol('queue') +const kClosedResolve = Symbol('closed resolve') +const kOnDrain = Symbol('onDrain') +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kGetDispatcher = Symbol('get dispatcher') +const kAddClient = Symbol('add client') +const kRemoveClient = Symbol('remove client') +const kStats = Symbol('stats') - if ('COLORTERM' in env) { - return 1; - } +class PoolBase extends DispatcherBase { + constructor () { + super() - return min; -} + this[kQueue] = new FixedQueue() + this[kClients] = [] + this[kQueued] = 0 -function getSupportLevel(stream) { - const level = supportsColor(stream, stream && stream.isTTY); - return translateLevel(level); -} + const pool = this -module.exports = { - supportsColor: getSupportLevel, - stdout: translateLevel(supportsColor(true, tty.isatty(1))), - stderr: translateLevel(supportsColor(true, tty.isatty(2))) -}; + this[kOnDrain] = function onDrain (origin, targets) { + const queue = pool[kQueue] + let needDrain = false -/***/ }), + while (!needDrain) { + const item = queue.shift() + if (!item) { + break + } + pool[kQueued]-- + needDrain = !this.dispatch(item.opts, item.handler) + } -/***/ 4920: -/***/ ((__unused_webpack_module, exports) => { + this[kNeedDrain] = needDrain -"use strict"; + if (!this[kNeedDrain] && pool[kNeedDrain]) { + pool[kNeedDrain] = false + pool.emit('drain', origin, [pool, ...targets]) + } -/** - * @license - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; -/** - * @class TeenyStatisticsWarning - * @extends Error - * @description While an error, is used for emitting warnings when - * meeting certain configured thresholds. - * @see process.emitWarning - */ -class TeenyStatisticsWarning extends Error { - /** - * @param {string} message - */ - constructor(message) { - super(message); - this.threshold = 0; - this.type = ''; - this.value = 0; - this.name = this.constructor.name; - Error.captureStackTrace(this, this.constructor); + if (pool[kClosedResolve] && queue.isEmpty()) { + Promise + .all(pool[kClients].map(c => c.close())) + .then(pool[kClosedResolve]) + } } -} -exports.TeenyStatisticsWarning = TeenyStatisticsWarning; -TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; -/** - * @class TeenyStatistics - * @description Maintain various statistics internal to teeny-request. Tracking - * is not automatic and must be instrumented within teeny-request. - */ -class TeenyStatistics { - /** - * @param {TeenyStatisticsOptions} [opts] - */ - constructor(opts) { - /** - * @type {number} - * @private - * @default 0 - */ - this._concurrentRequests = 0; - /** - * @type {boolean} - * @private - * @default false - */ - this._didConcurrentRequestWarn = false; - this._options = TeenyStatistics._prepareOptions(opts); + + this[kOnConnect] = (origin, targets) => { + pool.emit('connect', origin, [pool, ...targets]) } - /** - * Returns a copy of the current options. - * @return {TeenyStatisticsOptions} - */ - getOptions() { - return Object.assign({}, this._options); + + this[kOnDisconnect] = (origin, targets, err) => { + pool.emit('disconnect', origin, [pool, ...targets], err) } - /** - * Change configured statistics options. This will not preserve unspecified - * options that were previously specified, i.e. this is a reset of options. - * @param {TeenyStatisticsOptions} [opts] - * @returns {TeenyStatisticsConfig} The previous options. - * @see _prepareOptions - */ - setOptions(opts) { - const oldOpts = this._options; - this._options = TeenyStatistics._prepareOptions(opts); - return oldOpts; + + this[kOnConnectionError] = (origin, targets, err) => { + pool.emit('connectionError', origin, [pool, ...targets], err) } - /** - * @readonly - * @return {TeenyStatisticsCounters} - */ - get counters() { - return { - concurrentRequests: this._concurrentRequests, - }; + + this[kStats] = new PoolStats(this) + } + + get [kBusy] () { + return this[kNeedDrain] + } + + get [kConnected] () { + return this[kClients].filter(client => client[kConnected]).length + } + + get [kFree] () { + return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length + } + + get [kPending] () { + let ret = this[kQueued] + for (const { [kPending]: pending } of this[kClients]) { + ret += pending } - /** - * @description Should call this right before making a request. - */ - requestStarting() { - this._concurrentRequests++; - if (this._options.concurrentRequests > 0 && - this._concurrentRequests >= this._options.concurrentRequests && - !this._didConcurrentRequestWarn) { - this._didConcurrentRequestWarn = true; - const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + - this._concurrentRequests + - ' requests in-flight, which exceeds the configured threshold of ' + - this._options.concurrentRequests + - '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + - 'variable or the concurrentRequests option of teeny-request to ' + - 'increase or disable (0) this warning.'); - warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; - warning.value = this._concurrentRequests; - warning.threshold = this._options.concurrentRequests; - process.emitWarning(warning); - } + return ret + } + + get [kRunning] () { + let ret = 0 + for (const { [kRunning]: running } of this[kClients]) { + ret += running } - /** - * @description When using `requestStarting`, call this after the request - * has finished. - */ - requestFinished() { - // TODO negative? - this._concurrentRequests--; + return ret + } + + get [kSize] () { + let ret = this[kQueued] + for (const { [kSize]: size } of this[kClients]) { + ret += size } - /** - * Configuration Precedence: - * 1. Dependency inversion via defined option. - * 2. Global numeric environment variable. - * 3. Built-in default. - * This will not preserve unspecified options previously specified. - * @param {TeenyStatisticsOptions} [opts] - * @returns {TeenyStatisticsOptions} - * @private - */ - static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { - let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; - const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); - if (diConcurrentRequests !== undefined) { - concurrentRequests = diConcurrentRequests; - } - else if (!Number.isNaN(envConcurrentRequests)) { - concurrentRequests = envConcurrentRequests; - } - return { concurrentRequests }; + return ret + } + + get stats () { + return this[kStats] + } + + async [kClose] () { + if (this[kQueue].isEmpty()) { + return Promise.all(this[kClients].map(c => c.close())) + } else { + return new Promise((resolve) => { + this[kClosedResolve] = resolve + }) } -} -exports.TeenyStatistics = TeenyStatistics; -/** - * @description A default threshold representing when to warn about excessive - * in-flight/concurrent requests. - * @type {number} - * @static - * @readonly - * @default 5000 - */ -TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; -//# sourceMappingURL=TeenyStatistics.js.map + } -/***/ }), + async [kDestroy] (err) { + while (true) { + const item = this[kQueue].shift() + if (!item) { + break + } + item.handler.onError(err) + } -/***/ 446: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return Promise.all(this[kClients].map(c => c.destroy(err))) + } -"use strict"; + [kDispatch] (opts, handler) { + const dispatcher = this[kGetDispatcher]() -/** - * @license - * Copyright 2019 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getAgent = exports.pool = void 0; -const http_1 = __nccwpck_require__(3685); -const https_1 = __nccwpck_require__(5687); -// eslint-disable-next-line node/no-deprecated-api -const url_1 = __nccwpck_require__(7310); -exports.pool = new Map(); -/** - * Determines if a proxy should be considered based on the environment. - * - * @param uri The request uri - * @returns {boolean} - */ -function shouldUseProxyForURI(uri) { - const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; - if (!noProxyEnv) { - return true; - } - const givenURI = new URL(uri); - for (const noProxyRaw of noProxyEnv.split(',')) { - const noProxy = noProxyRaw.trim(); - if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { - return false; - } - else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { - const noProxyWildcard = noProxy.replace(/^\*\./, '.'); - if (givenURI.hostname.endsWith(noProxyWildcard)) { - return false; - } - } - } - return true; -} -/** - * Returns a custom request Agent if one is found, otherwise returns undefined - * which will result in the global http(s) Agent being used. - * @private - * @param {string} uri The request uri - * @param {Options} reqOpts The request options - * @returns {HttpAnyAgent|undefined} - */ -function getAgent(uri, reqOpts) { - const isHttp = uri.startsWith('http://'); - const proxy = reqOpts.proxy || - process.env.HTTP_PROXY || - process.env.http_proxy || - process.env.HTTPS_PROXY || - process.env.https_proxy; - const poolOptions = Object.assign({}, reqOpts.pool); - const manuallyProvidedProxy = !!reqOpts.proxy; - const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); - if (proxy && shouldUseProxy) { - // tslint:disable-next-line variable-name - const Agent = isHttp - ? __nccwpck_require__(3764) - : __nccwpck_require__(7219); - const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; - return new Agent(proxyOpts); + if (!dispatcher) { + this[kNeedDrain] = true + this[kQueue].push({ opts, handler }) + this[kQueued]++ + } else if (!dispatcher.dispatch(opts, handler)) { + dispatcher[kNeedDrain] = true + this[kNeedDrain] = !this[kGetDispatcher]() } - let key = isHttp ? 'http' : 'https'; - if (reqOpts.forever) { - key += ':forever'; - if (!exports.pool.has(key)) { - // tslint:disable-next-line variable-name - const Agent = isHttp ? http_1.Agent : https_1.Agent; - exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); + + return !this[kNeedDrain] + } + + [kAddClient] (client) { + client + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].push(client) + + if (this[kNeedDrain]) { + process.nextTick(() => { + if (this[kNeedDrain]) { + this[kOnDrain](client[kUrl], [this, client]) } + }) } - return exports.pool.get(key); + + return this + } + + [kRemoveClient] (client) { + client.close(() => { + const idx = this[kClients].indexOf(client) + if (idx !== -1) { + this[kClients].splice(idx, 1) + } + }) + + this[kNeedDrain] = this[kClients].some(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) + } } -exports.getAgent = getAgent; -//# sourceMappingURL=agents.js.map + +module.exports = { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} + /***/ }), -/***/ 6886: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9689: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = __nccwpck_require__(2785) +const kPool = Symbol('pool') -/** - * @license - * Copyright 2018 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.teenyRequest = exports.RequestError = void 0; -const node_fetch_1 = __nccwpck_require__(467); -const stream_1 = __nccwpck_require__(2781); -const uuid = __nccwpck_require__(7835); -const agents_1 = __nccwpck_require__(446); -const TeenyStatistics_1 = __nccwpck_require__(4920); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const streamEvents = __nccwpck_require__(9626); -class RequestError extends Error { +class PoolStats { + constructor (pool) { + this[kPool] = pool + } + + get connected () { + return this[kPool][kConnected] + } + + get free () { + return this[kPool][kFree] + } + + get pending () { + return this[kPool][kPending] + } + + get queued () { + return this[kPool][kQueued] + } + + get running () { + return this[kPool][kRunning] + } + + get size () { + return this[kPool][kSize] + } } -exports.RequestError = RequestError; -/** - * Convert options from Request to Fetch format - * @private - * @param reqOpts Request options - */ -function requestToFetchOptions(reqOpts) { - const options = { - method: reqOpts.method || 'GET', - ...(reqOpts.timeout && { timeout: reqOpts.timeout }), - ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), - }; - if (typeof reqOpts.json === 'object') { - // Add Content-type: application/json header - reqOpts.headers = reqOpts.headers || {}; - reqOpts.headers['Content-Type'] = 'application/json'; - // Set body to JSON representation of value - options.body = JSON.stringify(reqOpts.json); - } - else { - if (Buffer.isBuffer(reqOpts.body)) { - options.body = reqOpts.body; - } - else if (typeof reqOpts.body !== 'string') { - options.body = JSON.stringify(reqOpts.body); - } - else { - options.body = reqOpts.body; - } + +module.exports = PoolStats + + +/***/ }), + +/***/ 4634: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kGetDispatcher +} = __nccwpck_require__(3198) +const Client = __nccwpck_require__(3598) +const { + InvalidArgumentError +} = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { kUrl, kInterceptors } = __nccwpck_require__(2785) +const buildConnector = __nccwpck_require__(2067) + +const kOptions = Symbol('options') +const kConnections = Symbol('connections') +const kFactory = Symbol('factory') + +function defaultFactory (origin, opts) { + return new Client(origin, opts) +} + +class Pool extends PoolBase { + constructor (origin, { + connections, + factory = defaultFactory, + connect, + connectTimeout, + tls, + maxCachedSessions, + socketPath, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + allowH2, + ...options + } = {}) { + super() + + if (connections != null && (!Number.isFinite(connections) || connections < 0)) { + throw new InvalidArgumentError('invalid connections') + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } + + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } + + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - options.headers = reqOpts.headers; - let uri = (reqOpts.uri || - reqOpts.url); - if (!uri) { - throw new Error('Missing uri or url in reqOpts.'); + + this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool) + ? options.interceptors.Pool + : [] + this[kConnections] = connections || null + this[kUrl] = util.parseOrigin(origin) + this[kOptions] = { ...util.deepClone(options), connect, allowH2 } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kFactory] = factory + } + + [kGetDispatcher] () { + let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain]) + + if (dispatcher) { + return dispatcher } - if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const qs = __nccwpck_require__(3477); - const params = qs.stringify(reqOpts.qs); - uri = uri + '?' + params; + + if (!this[kConnections] || this[kClients].length < this[kConnections]) { + dispatcher = this[kFactory](this[kUrl], this[kOptions]) + this[kAddClient](dispatcher) } - options.agent = (0, agents_1.getAgent)(uri, reqOpts); - return { uri, options }; + + return dispatcher + } } -/** - * Convert a response from `fetch` to `request` format. - * @private - * @param opts The `request` options used to create the request. - * @param res The Fetch response - * @returns A `request` response object - */ -function fetchToRequestResponse(opts, res) { - const request = {}; - request.agent = opts.agent || false; - request.headers = (opts.headers || {}); - request.href = res.url; - // headers need to be converted from a map to an obj - const resHeaders = {}; - res.headers.forEach((value, key) => (resHeaders[key] = value)); - const response = Object.assign(res.body, { - statusCode: res.status, - statusMessage: res.statusText, - request, - body: res.body, - headers: resHeaders, - toJSON: () => ({ headers: resHeaders }), - }); - return response; + +module.exports = Pool + + +/***/ }), + +/***/ 7858: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { kProxy, kClose, kDestroy, kInterceptors } = __nccwpck_require__(2785) +const { URL } = __nccwpck_require__(7310) +const Agent = __nccwpck_require__(7890) +const Pool = __nccwpck_require__(4634) +const DispatcherBase = __nccwpck_require__(4839) +const { InvalidArgumentError, RequestAbortedError } = __nccwpck_require__(8045) +const buildConnector = __nccwpck_require__(2067) + +const kAgent = Symbol('proxy agent') +const kClient = Symbol('proxy client') +const kProxyHeaders = Symbol('proxy headers') +const kRequestTls = Symbol('request tls settings') +const kProxyTls = Symbol('proxy tls settings') +const kConnectEndpoint = Symbol('connect endpoint function') + +function defaultProtocolPort (protocol) { + return protocol === 'https:' ? 443 : 80 } -/** - * Create POST body from two parts as multipart/related content-type - * @private - * @param boundary - * @param multipart - */ -function createMultipartStream(boundary, multipart) { - const finale = `--${boundary}--`; - const stream = new stream_1.PassThrough(); - for (const part of multipart) { - const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; - stream.write(preamble); - if (typeof part.body === 'string') { - stream.write(part.body); - stream.write('\r\n'); - } - else { - part.body.pipe(stream, { end: false }); - part.body.on('end', () => { - stream.write('\r\n'); - stream.write(finale); - stream.end(); - }); - } - } - return stream; + +function buildProxyOptions (opts) { + if (typeof opts === 'string') { + opts = { uri: opts } + } + + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') + } + + return { + uri: opts.uri, + protocol: opts.protocol || 'https' + } } -function teenyRequest(reqOpts, callback) { - const { uri, options } = requestToFetchOptions(reqOpts); - const multipart = reqOpts.multipart; - if (reqOpts.multipart && multipart.length === 2) { - if (!callback) { - // TODO: add support for multipart uploads through streaming - throw new Error('Multipart without callback is not implemented.'); - } - const boundary = uuid.v4(); - options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; - options.body = createMultipartStream(boundary, multipart); - // Multipart upload - teenyRequest.stats.requestStarting(); - (0, node_fetch_1.default)(uri, options).then(res => { - teenyRequest.stats.requestFinished(); - const header = res.headers.get('content-type'); - const response = fetchToRequestResponse(options, res); - const body = response.body; - if (header === 'application/json' || - header === 'application/json; charset=utf-8') { - res.json().then(json => { - response.body = json; - callback(null, response, json); - }, (err) => { - callback(err, response, body); - }); - return; - } - res.text().then(text => { - response.body = text; - callback(null, response, text); - }, err => { - callback(err, response, body); - }); - }, err => { - teenyRequest.stats.requestFinished(); - callback(err, null, null); - }); - return; + +function defaultFactory (origin, opts) { + return new Pool(origin, opts) +} + +class ProxyAgent extends DispatcherBase { + constructor (opts) { + super(opts) + this[kProxy] = buildProxyOptions(opts) + this[kAgent] = new Agent(opts) + this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent) + ? opts.interceptors.ProxyAgent + : [] + + if (typeof opts === 'string') { + opts = { uri: opts } } - if (callback === undefined) { - // Stream mode - const requestStream = streamEvents(new stream_1.PassThrough()); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let responseStream; - requestStream.once('reading', () => { - if (responseStream) { - (0, stream_1.pipeline)(responseStream, requestStream, () => { }); - } - else { - requestStream.once('response', () => { - (0, stream_1.pipeline)(responseStream, requestStream, () => { }); - }); - } - }); - options.compress = false; - teenyRequest.stats.requestStarting(); - (0, node_fetch_1.default)(uri, options).then(res => { - teenyRequest.stats.requestFinished(); - responseStream = res.body; - responseStream.on('error', (err) => { - requestStream.emit('error', err); - }); - const response = fetchToRequestResponse(options, res); - requestStream.emit('response', response); - }, err => { - teenyRequest.stats.requestFinished(); - requestStream.emit('error', err); - }); - // fetch doesn't supply the raw HTTP stream, instead it - // returns a PassThrough piped from the HTTP response - // stream. - return requestStream; + + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') } - // GET or POST with callback - teenyRequest.stats.requestStarting(); - (0, node_fetch_1.default)(uri, options).then(res => { - teenyRequest.stats.requestFinished(); - const header = res.headers.get('content-type'); - const response = fetchToRequestResponse(options, res); - const body = response.body; - if (header === 'application/json' || - header === 'application/json; charset=utf-8') { - if (response.statusCode === 204) { - // Probably a DELETE - callback(null, response, body); - return; + + const { clientFactory = defaultFactory } = opts + + if (typeof clientFactory !== 'function') { + throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.') + } + + this[kRequestTls] = opts.requestTls + this[kProxyTls] = opts.proxyTls + this[kProxyHeaders] = opts.headers || {} + + const resolvedUrl = new URL(opts.uri) + const { origin, port, host, username, password } = resolvedUrl + + if (opts.auth && opts.token) { + throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') + } else if (opts.auth) { + /* @deprecated in favour of opts.token */ + this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}` + } else if (opts.token) { + this[kProxyHeaders]['proxy-authorization'] = opts.token + } else if (username && password) { + this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}` + } + + const connect = buildConnector({ ...opts.proxyTls }) + this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }) + this[kClient] = clientFactory(resolvedUrl, { connect }) + this[kAgent] = new Agent({ + ...opts, + connect: async (opts, callback) => { + let requestedHost = opts.host + if (!opts.port) { + requestedHost += `:${defaultProtocolPort(opts.protocol)}` + } + try { + const { socket, statusCode } = await this[kClient].connect({ + origin, + port, + path: requestedHost, + signal: opts.signal, + headers: { + ...this[kProxyHeaders], + host } - res.json().then(json => { - response.body = json; - callback(null, response, json); - }, err => { - callback(err, response, body); - }); - return; + }) + if (statusCode !== 200) { + socket.on('error', () => {}).destroy() + callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`)) + } + if (opts.protocol !== 'https:') { + callback(null, socket) + return + } + let servername + if (this[kRequestTls]) { + servername = this[kRequestTls].servername + } else { + servername = opts.servername + } + this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback) + } catch (err) { + callback(err) } - res.text().then(text => { - const response = fetchToRequestResponse(options, res); - response.body = text; - callback(null, response, text); - }, err => { - callback(err, response, body); - }); - }, err => { - teenyRequest.stats.requestFinished(); - callback(err, null, null); - }); - return; -} -exports.teenyRequest = teenyRequest; -teenyRequest.defaults = (defaults) => { - return (reqOpts, callback) => { - const opts = { ...defaults, ...reqOpts }; - if (callback === undefined) { - return teenyRequest(opts); + } + }) + } + + dispatch (opts, handler) { + const { host } = new URL(opts.origin) + const headers = buildHeaders(opts.headers) + throwIfProxyAuthIsSent(headers) + return this[kAgent].dispatch( + { + ...opts, + headers: { + ...headers, + host } - teenyRequest(opts, callback); - }; -}; + }, + handler + ) + } + + async [kClose] () { + await this[kAgent].close() + await this[kClient].close() + } + + async [kDestroy] () { + await this[kAgent].destroy() + await this[kClient].destroy() + } +} + /** - * Single instance of an interface for keeping track of things. + * @param {string[] | Record} headers + * @returns {Record} */ -teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); -teenyRequest.resetStats = () => { - teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); -}; -//# sourceMappingURL=index.js.map +function buildHeaders (headers) { + // When using undici.fetch, the headers list is stored + // as an array. + if (Array.isArray(headers)) { + /** @type {Record} */ + const headersPair = {} + + for (let i = 0; i < headers.length; i += 2) { + headersPair[headers[i]] = headers[i + 1] + } + + return headersPair + } + + return headers +} + +/** + * @param {Record} headers + * + * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers + * Nevertheless, it was changed and to avoid a security vulnerability by end users + * this check was created. + * It should be removed in the next major version for performance reasons + */ +function throwIfProxyAuthIsSent (headers) { + const existProxyAuth = headers && Object.keys(headers) + .find((key) => key.toLowerCase() === 'proxy-authorization') + if (existProxyAuth) { + throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor') + } +} + +module.exports = ProxyAgent + /***/ }), -/***/ 7835: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9459: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; +let fastNow = Date.now() +let fastNowTimeout + +const fastTimers = [] + +function onTimeout () { + fastNow = Date.now() + + let len = fastTimers.length + let idx = 0 + while (idx < len) { + const timer = fastTimers[idx] + + if (timer.state === 0) { + timer.state = fastNow + timer.delay + } else if (timer.state > 0 && fastNow >= timer.state) { + timer.state = -1 + timer.callback(timer.opaque) + } + + if (timer.state === -1) { + timer.state = -2 + if (idx !== len - 1) { + fastTimers[idx] = fastTimers.pop() + } else { + fastTimers.pop() + } + len -= 1 + } else { + idx += 1 + } } -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; + + if (fastTimers.length > 0) { + refreshTimeout() } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; +} + +function refreshTimeout () { + if (fastNowTimeout && fastNowTimeout.refresh) { + fastNowTimeout.refresh() + } else { + clearTimeout(fastNowTimeout) + fastNowTimeout = setTimeout(onTimeout, 1e3) + if (fastNowTimeout.unref) { + fastNowTimeout.unref() + } } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; +} + +class Timeout { + constructor (callback, delay, opaque) { + this.callback = callback + this.delay = delay + this.opaque = opaque + + // -2 not in timer list + // -1 in timer list but inactive + // 0 in timer list waiting for time + // > 0 in timer list waiting for time to expire + this.state = -2 + + this.refresh() } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; + + refresh () { + if (this.state === -2) { + fastTimers.push(this) + if (!fastNowTimeout || fastTimers.length === 1) { + refreshTimeout() + } + } + + this.state = 0 } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; + + clear () { + this.state = -1 } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; +} + +module.exports = { + setTimeout (callback, delay, opaque) { + return delay < 1e3 + ? setTimeout(callback, delay, opaque) + : new Timeout(callback, delay, opaque) + }, + clearTimeout (timeout) { + if (timeout instanceof Timeout) { + timeout.clear() + } else { + clearTimeout(timeout) + } } -})); +} -var _v = _interopRequireDefault(__nccwpck_require__(618)); -var _v2 = _interopRequireDefault(__nccwpck_require__(6888)); +/***/ }), -var _v3 = _interopRequireDefault(__nccwpck_require__(1186)); +/***/ 5354: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var _v4 = _interopRequireDefault(__nccwpck_require__(2385)); +"use strict"; -var _nil = _interopRequireDefault(__nccwpck_require__(5879)); -var _version = _interopRequireDefault(__nccwpck_require__(694)); +const diagnosticsChannel = __nccwpck_require__(7643) +const { uid, states } = __nccwpck_require__(9188) +const { + kReadyState, + kSentClose, + kByteParser, + kReceivedClose +} = __nccwpck_require__(7578) +const { fireEvent, failWebsocketConnection } = __nccwpck_require__(5515) +const { CloseEvent } = __nccwpck_require__(2611) +const { makeRequest } = __nccwpck_require__(8359) +const { fetching } = __nccwpck_require__(4881) +const { Headers } = __nccwpck_require__(554) +const { getGlobalDispatcher } = __nccwpck_require__(1892) +const { kHeadersList } = __nccwpck_require__(2785) + +const channels = {} +channels.open = diagnosticsChannel.channel('undici:websocket:open') +channels.close = diagnosticsChannel.channel('undici:websocket:close') +channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error') + +/** @type {import('crypto')} */ +let crypto +try { + crypto = __nccwpck_require__(6113) +} catch { + +} + +/** + * @see https://websockets.spec.whatwg.org/#concept-websocket-establish + * @param {URL} url + * @param {string|string[]} protocols + * @param {import('./websocket').WebSocket} ws + * @param {(response: any) => void} onEstablish + * @param {Partial} options + */ +function establishWebSocketConnection (url, protocols, ws, onEstablish, options) { + // 1. Let requestURL be a copy of url, with its scheme set to "http", if url’s + // scheme is "ws", and to "https" otherwise. + const requestURL = url + + requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:' + + // 2. Let request be a new request, whose URL is requestURL, client is client, + // service-workers mode is "none", referrer is "no-referrer", mode is + // "websocket", credentials mode is "include", cache mode is "no-store" , + // and redirect mode is "error". + const request = makeRequest({ + urlList: [requestURL], + serviceWorkers: 'none', + referrer: 'no-referrer', + mode: 'websocket', + credentials: 'include', + cache: 'no-store', + redirect: 'error' + }) -var _validate = _interopRequireDefault(__nccwpck_require__(4875)); + // Note: undici extension, allow setting custom headers. + if (options.headers) { + const headersList = new Headers(options.headers)[kHeadersList] + + request.headersList = headersList + } + + // 3. Append (`Upgrade`, `websocket`) to request’s header list. + // 4. Append (`Connection`, `Upgrade`) to request’s header list. + // Note: both of these are handled by undici currently. + // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397 + + // 5. Let keyValue be a nonce consisting of a randomly selected + // 16-byte value that has been forgiving-base64-encoded and + // isomorphic encoded. + const keyValue = crypto.randomBytes(16).toString('base64') + + // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s + // header list. + request.headersList.append('sec-websocket-key', keyValue) + + // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s + // header list. + request.headersList.append('sec-websocket-version', '13') + + // 8. For each protocol in protocols, combine + // (`Sec-WebSocket-Protocol`, protocol) in request’s header + // list. + for (const protocol of protocols) { + request.headersList.append('sec-websocket-protocol', protocol) + } + + // 9. Let permessageDeflate be a user-agent defined + // "permessage-deflate" extension header value. + // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673 + // TODO: enable once permessage-deflate is supported + const permessageDeflate = '' // 'permessage-deflate; 15' + + // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to + // request’s header list. + // request.headersList.append('sec-websocket-extensions', permessageDeflate) + + // 11. Fetch request with useParallelQueue set to true, and + // processResponse given response being these steps: + const controller = fetching({ + request, + useParallelQueue: true, + dispatcher: options.dispatcher ?? getGlobalDispatcher(), + processResponse (response) { + // 1. If response is a network error or its status is not 101, + // fail the WebSocket connection. + if (response.type === 'error' || response.status !== 101) { + failWebsocketConnection(ws, 'Received network error or non-101 status code.') + return + } -var _stringify = _interopRequireDefault(__nccwpck_require__(5802)); + // 2. If protocols is not the empty list and extracting header + // list values given `Sec-WebSocket-Protocol` and response’s + // header list results in null, failure, or the empty byte + // sequence, then fail the WebSocket connection. + if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Server did not respond with sent protocols.') + return + } -var _parse = _interopRequireDefault(__nccwpck_require__(893)); + // 3. Follow the requirements stated step 2 to step 6, inclusive, + // of the last set of steps in section 4.1 of The WebSocket + // Protocol to validate response. This either results in fail + // the WebSocket connection or the WebSocket connection is + // established. + + // 2. If the response lacks an |Upgrade| header field or the |Upgrade| + // header field contains a value that is not an ASCII case- + // insensitive match for the value "websocket", the client MUST + // _Fail the WebSocket Connection_. + if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') { + failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".') + return + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + // 3. If the response lacks a |Connection| header field or the + // |Connection| header field doesn't contain a token that is an + // ASCII case-insensitive match for the value "Upgrade", the client + // MUST _Fail the WebSocket Connection_. + if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') { + failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".') + return + } -/***/ }), + // 4. If the response lacks a |Sec-WebSocket-Accept| header field or + // the |Sec-WebSocket-Accept| contains a value other than the + // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket- + // Key| (as a string, not base64-decoded) with the string "258EAFA5- + // E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and + // trailing whitespace, the client MUST _Fail the WebSocket + // Connection_. + const secWSAccept = response.headersList.get('Sec-WebSocket-Accept') + const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64') + if (secWSAccept !== digest) { + failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.') + return + } -/***/ 9576: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // 5. If the response includes a |Sec-WebSocket-Extensions| header + // field and this header field indicates the use of an extension + // that was not present in the client's handshake (the server has + // indicated an extension not requested by the client), the client + // MUST _Fail the WebSocket Connection_. (The parsing of this + // header field to determine which extensions are requested is + // discussed in Section 9.1.) + const secExtension = response.headersList.get('Sec-WebSocket-Extensions') + + if (secExtension !== null && secExtension !== permessageDeflate) { + failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.') + return + } -"use strict"; + // 6. If the response includes a |Sec-WebSocket-Protocol| header field + // and this header field indicates the use of a subprotocol that was + // not present in the client's handshake (the server has indicated a + // subprotocol not requested by the client), the client MUST _Fail + // the WebSocket Connection_. + const secProtocol = response.headersList.get('Sec-WebSocket-Protocol') + if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.') + return + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + response.socket.on('data', onSocketData) + response.socket.on('close', onSocketClose) + response.socket.on('error', onSocketError) -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + if (channels.open.hasSubscribers) { + channels.open.publish({ + address: response.socket.address(), + protocol: secProtocol, + extensions: secExtension + }) + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + onEstablish(response) + } + }) -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); + return controller +} + +/** + * @param {Buffer} chunk + */ +function onSocketData (chunk) { + if (!this.ws[kByteParser].write(chunk)) { + this.pause() + } +} + +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4 + */ +function onSocketClose () { + const { ws } = this + + // If the TCP connection was closed after the + // WebSocket closing handshake was completed, the WebSocket connection + // is said to have been closed _cleanly_. + const wasClean = ws[kSentClose] && ws[kReceivedClose] + + let code = 1005 + let reason = '' + + const result = ws[kByteParser].closingInfo + + if (result) { + code = result.code ?? 1005 + reason = result.reason + } else if (!ws[kSentClose]) { + // If _The WebSocket + // Connection is Closed_ and no Close control frame was received by the + // endpoint (such as could occur if the underlying transport connection + // is lost), _The WebSocket Connection Close Code_ is considered to be + // 1006. + code = 1006 + } + + // 1. Change the ready state to CLOSED (3). + ws[kReadyState] = states.CLOSED + + // 2. If the user agent was required to fail the WebSocket + // connection, or if the WebSocket connection was closed + // after being flagged as full, fire an event named error + // at the WebSocket object. + // TODO + + // 3. Fire an event named close at the WebSocket object, + // using CloseEvent, with the wasClean attribute + // initialized to true if the connection closed cleanly + // and false otherwise, the code attribute initialized to + // the WebSocket connection close code, and the reason + // attribute initialized to the result of applying UTF-8 + // decode without BOM to the WebSocket connection close + // reason. + fireEvent('close', ws, CloseEvent, { + wasClean, code, reason + }) + + if (channels.close.hasSubscribers) { + channels.close.publish({ + websocket: ws, + code, + reason + }) + } +} + +function onSocketError (error) { + const { ws } = this + + ws[kReadyState] = states.CLOSING + + if (channels.socketError.hasSubscribers) { + channels.socketError.publish(error) } - return _crypto.default.createHash('md5').update(bytes).digest(); + this.destroy() +} + +module.exports = { + establishWebSocketConnection } -var _default = md5; -exports["default"] = _default; /***/ }), -/***/ 4974: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9188: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +// This is a Globally Unique Identifier unique used +// to validate that the endpoint accepts websocket +// connections. +// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3 +const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +const states = { + CONNECTING: 0, + OPEN: 1, + CLOSING: 2, + CLOSED: 3 +} -var _default = { - randomUUID: _crypto.default.randomUUID -}; -exports["default"] = _default; +const opcodes = { + CONTINUATION: 0x0, + TEXT: 0x1, + BINARY: 0x2, + CLOSE: 0x8, + PING: 0x9, + PONG: 0xA +} -/***/ }), +const maxUnsigned16Bit = 2 ** 16 - 1 // 65535 -/***/ 5879: -/***/ ((__unused_webpack_module, exports) => { +const parserStates = { + INFO: 0, + PAYLOADLENGTH_16: 2, + PAYLOADLENGTH_64: 3, + READ_DATA: 4 +} -"use strict"; +const emptyBuffer = Buffer.allocUnsafe(0) +module.exports = { + uid, + staticPropertyDescriptors, + states, + opcodes, + maxUnsigned16Bit, + parserStates, + emptyBuffer +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; /***/ }), -/***/ 893: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 2611: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +const { webidl } = __nccwpck_require__(1744) +const { kEnumerableProperty } = __nccwpck_require__(3983) +const { MessagePort } = __nccwpck_require__(1267) -var _validate = _interopRequireDefault(__nccwpck_require__(4875)); +/** + * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent + */ +class MessageEvent extends Event { + #eventInit -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' }) -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.MessageEventInit(eventInitDict) + + super(type, eventInitDict) + + this.#eventInit = eventInitDict } - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + get data () { + webidl.brandCheck(this, MessageEvent) - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ + return this.#eventInit.data + } - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ + get origin () { + webidl.brandCheck(this, MessageEvent) - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ + return this.#eventInit.origin + } - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + get lastEventId () { + webidl.brandCheck(this, MessageEvent) - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} + return this.#eventInit.lastEventId + } -var _default = parse; -exports["default"] = _default; + get source () { + webidl.brandCheck(this, MessageEvent) -/***/ }), + return this.#eventInit.source + } -/***/ 7658: -/***/ ((__unused_webpack_module, exports) => { + get ports () { + webidl.brandCheck(this, MessageEvent) -"use strict"; + if (!Object.isFrozen(this.#eventInit.ports)) { + Object.freeze(this.#eventInit.ports) + } + return this.#eventInit.ports + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; + initMessageEvent ( + type, + bubbles = false, + cancelable = false, + data = null, + origin = '', + lastEventId = '', + source = null, + ports = [] + ) { + webidl.brandCheck(this, MessageEvent) -/***/ }), + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' }) -/***/ 2042: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return new MessageEvent(type, { + bubbles, cancelable, data, origin, lastEventId, source, ports + }) + } +} -"use strict"; +/** + * @see https://websockets.spec.whatwg.org/#the-closeevent-interface + */ +class CloseEvent extends Event { + #eventInit + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' }) -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.CloseEventInit(eventInitDict) -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + super(type, eventInitDict) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + this.#eventInit = eventInitDict + } -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + get wasClean () { + webidl.brandCheck(this, CloseEvent) -let poolPtr = rnds8Pool.length; + return this.#eventInit.wasClean + } -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); + get code () { + webidl.brandCheck(this, CloseEvent) - poolPtr = 0; + return this.#eventInit.code } - return rnds8Pool.slice(poolPtr, poolPtr += 16); + get reason () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.reason + } } -/***/ }), +// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface +class ErrorEvent extends Event { + #eventInit -/***/ 6723: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + constructor (type, eventInitDict) { + webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' }) -"use strict"; + super(type, eventInitDict) + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {}) -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + this.#eventInit = eventInitDict + } -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + get message () { + webidl.brandCheck(this, ErrorEvent) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + return this.#eventInit.message + } -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); + get filename () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.filename } - return _crypto.default.createHash('sha1').update(bytes).digest(); -} + get lineno () { + webidl.brandCheck(this, ErrorEvent) -var _default = sha1; -exports["default"] = _default; + return this.#eventInit.lineno + } -/***/ }), + get colno () { + webidl.brandCheck(this, ErrorEvent) -/***/ 5802: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return this.#eventInit.colno + } -"use strict"; + get error () { + webidl.brandCheck(this, ErrorEvent) + return this.#eventInit.error + } +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -exports.unsafeStringify = unsafeStringify; +Object.defineProperties(MessageEvent.prototype, { + [Symbol.toStringTag]: { + value: 'MessageEvent', + configurable: true + }, + data: kEnumerableProperty, + origin: kEnumerableProperty, + lastEventId: kEnumerableProperty, + source: kEnumerableProperty, + ports: kEnumerableProperty, + initMessageEvent: kEnumerableProperty +}) -var _validate = _interopRequireDefault(__nccwpck_require__(4875)); +Object.defineProperties(CloseEvent.prototype, { + [Symbol.toStringTag]: { + value: 'CloseEvent', + configurable: true + }, + reason: kEnumerableProperty, + code: kEnumerableProperty, + wasClean: kEnumerableProperty +}) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +Object.defineProperties(ErrorEvent.prototype, { + [Symbol.toStringTag]: { + value: 'ErrorEvent', + configurable: true + }, + message: kEnumerableProperty, + filename: kEnumerableProperty, + lineno: kEnumerableProperty, + colno: kEnumerableProperty, + error: kEnumerableProperty +}) -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; +webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort) -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).slice(1)); -} +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.MessagePort +) -function unsafeStringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); -} +const eventInit = [ + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false + } +] -function stringify(arr, offset = 0) { - const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields +webidl.converters.MessageEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'data', + converter: webidl.converters.any, + defaultValue: null + }, + { + key: 'origin', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lastEventId', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'source', + // Node doesn't implement WindowProxy or ServiceWorker, so the only + // valid value for source is a MessagePort. + converter: webidl.nullableConverter(webidl.converters.MessagePort), + defaultValue: null + }, + { + key: 'ports', + converter: webidl.converters['sequence'], + get defaultValue () { + return [] + } + } +]) - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); +webidl.converters.CloseEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'wasClean', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'code', + converter: webidl.converters['unsigned short'], + defaultValue: 0 + }, + { + key: 'reason', + converter: webidl.converters.USVString, + defaultValue: '' } +]) - return uuid; +webidl.converters.ErrorEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'message', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'filename', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lineno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'colno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'error', + converter: webidl.converters.any + } +]) + +module.exports = { + MessageEvent, + CloseEvent, + ErrorEvent } -var _default = stringify; -exports["default"] = _default; /***/ }), -/***/ 618: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5444: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +const { maxUnsigned16Bit } = __nccwpck_require__(9188) -var _rng = _interopRequireDefault(__nccwpck_require__(2042)); +/** @type {import('crypto')} */ +let crypto +try { + crypto = __nccwpck_require__(6113) +} catch { -var _stringify = __nccwpck_require__(5802); +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +class WebsocketFrameSend { + /** + * @param {Buffer|undefined} data + */ + constructor (data) { + this.frameData = data + this.maskKey = crypto.randomBytes(4) + } -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; + createFrame (opcode) { + const bodyLength = this.frameData?.byteLength ?? 0 -let _clockseq; // Previous uuid creation time + /** @type {number} */ + let payloadLength = bodyLength // 0-125 + let offset = 6 + if (bodyLength > maxUnsigned16Bit) { + offset += 8 // payload length is next 8 bytes + payloadLength = 127 + } else if (bodyLength > 125) { + offset += 2 // payload length is next 2 bytes + payloadLength = 126 + } -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + const buffer = Buffer.allocUnsafe(bodyLength + offset) -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 + // Clear first 2 bytes, everything else is overwritten + buffer[0] = buffer[1] = 0 + buffer[0] |= 0x80 // FIN + buffer[0] = (buffer[0] & 0xF0) + opcode // opcode - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); + /*! ws. MIT License. Einar Otto Stangvik */ + buffer[offset - 4] = this.maskKey[0] + buffer[offset - 3] = this.maskKey[1] + buffer[offset - 2] = this.maskKey[2] + buffer[offset - 1] = this.maskKey[3] - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } + buffer[1] = payloadLength - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + if (payloadLength === 126) { + buffer.writeUInt16BE(bodyLength, 2) + } else if (payloadLength === 127) { + // Clear extended payload length + buffer[2] = buffer[3] = 0 + buffer.writeUIntBE(bodyLength, 4, 6) } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + buffer[1] |= 0x80 // MASK - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock + // mask body + for (let i = 0; i < bodyLength; i++) { + buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4] + } - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + return buffer + } +} - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression +module.exports = { + WebsocketFrameSend +} - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval +/***/ }), - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested +/***/ 1688: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch +const { Writable } = __nccwpck_require__(2781) +const diagnosticsChannel = __nccwpck_require__(7643) +const { parserStates, opcodes, states, emptyBuffer } = __nccwpck_require__(9188) +const { kReadyState, kSentClose, kResponse, kReceivedClose } = __nccwpck_require__(7578) +const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = __nccwpck_require__(5515) +const { WebsocketFrameSend } = __nccwpck_require__(5444) - msecs += 12219292800000; // `time_low` +// This code was influenced by ws released under the MIT license. +// Copyright (c) 2011 Einar Otto Stangvik +// Copyright (c) 2013 Arnout Kazemier and contributors +// Copyright (c) 2016 Luigi Pinca and contributors - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` +const channels = {} +channels.ping = diagnosticsChannel.channel('undici:websocket:ping') +channels.pong = diagnosticsChannel.channel('undici:websocket:pong') - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` +class ByteParser extends Writable { + #buffers = [] + #byteOffset = 0 - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + #state = parserStates.INFO - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + #info = {} + #fragments = [] - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + constructor (ws) { + super() - b[i++] = clockseq & 0xff; // `node` + this.ws = ws + } - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; + /** + * @param {Buffer} chunk + * @param {() => void} callback + */ + _write (chunk, _, callback) { + this.#buffers.push(chunk) + this.#byteOffset += chunk.length + + this.run(callback) } - return buf || (0, _stringify.unsafeStringify)(b); -} + /** + * Runs whenever a new chunk is received. + * Callback is called whenever there are no more chunks buffering, + * or not enough bytes are buffered to parse. + */ + run (callback) { + while (true) { + if (this.#state === parserStates.INFO) { + // If there aren't enough bytes to parse the payload length, etc. + if (this.#byteOffset < 2) { + return callback() + } -var _default = v1; -exports["default"] = _default; + const buffer = this.consume(2) -/***/ }), + this.#info.fin = (buffer[0] & 0x80) !== 0 + this.#info.opcode = buffer[0] & 0x0F -/***/ 6888: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // If we receive a fragmented message, we use the type of the first + // frame to parse the full message as binary/text, when it's terminated + this.#info.originalOpcode ??= this.#info.opcode -"use strict"; + this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION + if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) { + // Only text and binary frames can be fragmented + failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.') + return + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + const payloadLength = buffer[1] & 0x7F -var _v = _interopRequireDefault(__nccwpck_require__(8392)); + if (payloadLength <= 125) { + this.#info.payloadLength = payloadLength + this.#state = parserStates.READ_DATA + } else if (payloadLength === 126) { + this.#state = parserStates.PAYLOADLENGTH_16 + } else if (payloadLength === 127) { + this.#state = parserStates.PAYLOADLENGTH_64 + } -var _md = _interopRequireDefault(__nccwpck_require__(9576)); + if (this.#info.fragmented && payloadLength > 125) { + // A fragmented frame can't be fragmented itself + failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.') + return + } else if ( + (this.#info.opcode === opcodes.PING || + this.#info.opcode === opcodes.PONG || + this.#info.opcode === opcodes.CLOSE) && + payloadLength > 125 + ) { + // Control frames can have a payload length of 125 bytes MAX + failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.') + return + } else if (this.#info.opcode === opcodes.CLOSE) { + if (payloadLength === 1) { + failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.') + return + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + const body = this.consume(payloadLength) -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; + this.#info.closeInfo = this.parseCloseBody(false, body) -/***/ }), + if (!this.ws[kSentClose]) { + // If an endpoint receives a Close frame and did not previously send a + // Close frame, the endpoint MUST send a Close frame in response. (When + // sending a Close frame in response, the endpoint typically echos the + // status code it received.) + const body = Buffer.allocUnsafe(2) + body.writeUInt16BE(this.#info.closeInfo.code, 0) + const closeFrame = new WebsocketFrameSend(body) -/***/ 8392: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + this.ws[kResponse].socket.write( + closeFrame.createFrame(opcodes.CLOSE), + (err) => { + if (!err) { + this.ws[kSentClose] = true + } + } + ) + } -"use strict"; + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this.ws[kReadyState] = states.CLOSING + this.ws[kReceivedClose] = true + this.end() -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports.URL = exports.DNS = void 0; -exports["default"] = v35; + return + } else if (this.#info.opcode === opcodes.PING) { + // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in + // response, unless it already received a Close frame. + // A Pong frame sent in response to a Ping frame must have identical + // "Application data" -var _stringify = __nccwpck_require__(5802); + const body = this.consume(payloadLength) -var _parse = _interopRequireDefault(__nccwpck_require__(893)); + if (!this.ws[kReceivedClose]) { + const frame = new WebsocketFrameSend(body) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG)) -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape + if (channels.ping.hasSubscribers) { + channels.ping.publish({ + payload: body + }) + } + } - const bytes = []; + this.#state = parserStates.INFO - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } else if (this.#info.opcode === opcodes.PONG) { + // A Pong frame MAY be sent unsolicited. This serves as a + // unidirectional heartbeat. A response to an unsolicited Pong frame is + // not expected. - return bytes; -} + const body = this.consume(payloadLength) -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; + if (channels.pong.hasSubscribers) { + channels.pong.publish({ + payload: body + }) + } -function v35(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - var _namespace; + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } + } else if (this.#state === parserStates.PAYLOADLENGTH_16) { + if (this.#byteOffset < 2) { + return callback() + } - if (typeof value === 'string') { - value = stringToBytes(value); - } + const buffer = this.consume(2) - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); - } + this.#info.payloadLength = buffer.readUInt16BE(0) + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.PAYLOADLENGTH_64) { + if (this.#byteOffset < 8) { + return callback() + } - if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` + const buffer = this.consume(8) + const upper = buffer.readUInt32BE(0) + // 2^31 is the maxinimum bytes an arraybuffer can contain + // on 32-bit systems. Although, on 64-bit systems, this is + // 2^53-1 bytes. + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275 + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e + if (upper > 2 ** 31 - 1) { + failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.') + return + } - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; + const lower = buffer.readUInt32BE(4) - if (buf) { - offset = offset || 0; + this.#info.payloadLength = (upper << 8) + lower + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.READ_DATA) { + if (this.#byteOffset < this.#info.payloadLength) { + // If there is still more data in this chunk that needs to be read + return callback() + } else if (this.#byteOffset >= this.#info.payloadLength) { + // If the server sent multiple frames in a single chunk - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; + const body = this.consume(this.#info.payloadLength) + + this.#fragments.push(body) + + // If the frame is unfragmented, or a fragmented frame was terminated, + // a message was received + if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) { + const fullMessage = Buffer.concat(this.#fragments) + + websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage) + + this.#info = {} + this.#fragments.length = 0 + } + + this.#state = parserStates.INFO + } } - return buf; + if (this.#byteOffset > 0) { + continue + } else { + callback() + break + } } + } - return (0, _stringify.unsafeStringify)(bytes); - } // Function#name is not settable on some platforms (#270) + /** + * Take n bytes from the buffered Buffers + * @param {number} n + * @returns {Buffer|null} + */ + consume (n) { + if (n > this.#byteOffset) { + return null + } else if (n === 0) { + return emptyBuffer + } + if (this.#buffers[0].length === n) { + this.#byteOffset -= this.#buffers[0].length + return this.#buffers.shift() + } - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support + const buffer = Buffer.allocUnsafe(n) + let offset = 0 + while (offset !== n) { + const next = this.#buffers[0] + const { length } = next - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} + if (length + offset === n) { + buffer.set(this.#buffers.shift(), offset) + break + } else if (length + offset > n) { + buffer.set(next.subarray(0, n - offset), offset) + this.#buffers[0] = next.subarray(n - offset) + break + } else { + buffer.set(this.#buffers.shift(), offset) + offset += next.length + } + } -/***/ }), + this.#byteOffset -= n -/***/ 1186: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return buffer + } -"use strict"; + parseCloseBody (onlyCode, data) { + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5 + /** @type {number|undefined} */ + let code + if (data.length >= 2) { + // _The WebSocket Connection Close Code_ is + // defined as the status code (Section 7.4) contained in the first Close + // control frame received by the application + code = data.readUInt16BE(0) + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + if (onlyCode) { + if (!isValidStatusCode(code)) { + return null + } -var _native = _interopRequireDefault(__nccwpck_require__(4974)); + return { code } + } -var _rng = _interopRequireDefault(__nccwpck_require__(2042)); + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6 + /** @type {Buffer} */ + let reason = data.subarray(2) -var _stringify = __nccwpck_require__(5802); + // Remove BOM + if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) { + reason = reason.subarray(3) + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + if (code !== undefined && !isValidStatusCode(code)) { + return null + } -function v4(options, buf, offset) { - if (_native.default.randomUUID && !buf && !options) { - return _native.default.randomUUID(); + try { + // TODO: optimize this + reason = new TextDecoder('utf-8', { fatal: true }).decode(reason) + } catch { + return null + } + + return { code, reason } } - options = options || {}; + get closingInfo () { + return this.#info.closeInfo + } +} - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` +module.exports = { + ByteParser +} - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided +/***/ }), - if (buf) { - offset = offset || 0; +/***/ 7578: +/***/ ((module) => { - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } +"use strict"; - return buf; - } - return (0, _stringify.unsafeStringify)(rnds); +module.exports = { + kWebSocketURL: Symbol('url'), + kReadyState: Symbol('ready state'), + kController: Symbol('controller'), + kResponse: Symbol('response'), + kBinaryType: Symbol('binary type'), + kSentClose: Symbol('sent close'), + kReceivedClose: Symbol('received close'), + kByteParser: Symbol('byte parser') } -var _default = v4; -exports["default"] = _default; /***/ }), -/***/ 2385: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5515: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +const { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = __nccwpck_require__(7578) +const { states, opcodes } = __nccwpck_require__(9188) +const { MessageEvent, ErrorEvent } = __nccwpck_require__(2611) -var _v = _interopRequireDefault(__nccwpck_require__(8392)); +/* globals Blob */ -var _sha = _interopRequireDefault(__nccwpck_require__(6723)); +/** + * @param {import('./websocket').WebSocket} ws + */ +function isEstablished (ws) { + // If the server's response is validated as provided for above, it is + // said that _The WebSocket Connection is Established_ and that the + // WebSocket Connection is in the OPEN state. + return ws[kReadyState] === states.OPEN +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosing (ws) { + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + return ws[kReadyState] === states.CLOSING +} -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosed (ws) { + return ws[kReadyState] === states.CLOSED +} -/***/ }), +/** + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e + * @param {EventTarget} target + * @param {EventInit | undefined} eventInitDict + */ +function fireEvent (e, target, eventConstructor = Event, eventInitDict) { + // 1. If eventConstructor is not given, then let eventConstructor be Event. -/***/ 4875: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // 2. Let event be the result of creating an event given eventConstructor, + // in the relevant realm of target. + // 3. Initialize event’s type attribute to e. + const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap -"use strict"; + // 4. Initialize any other IDL attributes of event as described in the + // invocation of this algorithm. + // 5. Return the result of dispatching event at target, with legacy target + // override flag set if set. + target.dispatchEvent(event) +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @param {import('./websocket').WebSocket} ws + * @param {number} type Opcode + * @param {Buffer} data application data + */ +function websocketMessageReceived (ws, type, data) { + // 1. If ready state is not OPEN (1), then return. + if (ws[kReadyState] !== states.OPEN) { + return + } -var _regex = _interopRequireDefault(__nccwpck_require__(7658)); + // 2. Let dataForEvent be determined by switching on type and binary type: + let dataForEvent -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + if (type === opcodes.TEXT) { + // -> type indicates that the data is Text + // a new DOMString containing data + try { + dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data) + } catch { + failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.') + return + } + } else if (type === opcodes.BINARY) { + if (ws[kBinaryType] === 'blob') { + // -> type indicates that the data is Binary and binary type is "blob" + // a new Blob object, created in the relevant Realm of the WebSocket + // object, that represents data as its raw data + dataForEvent = new Blob([data]) + } else { + // -> type indicates that the data is Binary and binary type is "arraybuffer" + // a new ArrayBuffer object, created in the relevant Realm of the + // WebSocket object, whose contents are data + dataForEvent = new Uint8Array(data).buffer + } + } -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); + // 3. Fire an event named message at the WebSocket object, using MessageEvent, + // with the origin attribute initialized to the serialization of the WebSocket + // object’s url's origin, and the data attribute initialized to dataForEvent. + fireEvent('message', ws, MessageEvent, { + origin: ws[kWebSocketURL].origin, + data: dataForEvent + }) } -var _default = validate; -exports["default"] = _default; +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455 + * @see https://datatracker.ietf.org/doc/html/rfc2616 + * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407 + * @param {string} protocol + */ +function isValidSubprotocol (protocol) { + // If present, this value indicates one + // or more comma-separated subprotocol the client wishes to speak, + // ordered by preference. The elements that comprise this value + // MUST be non-empty strings with characters in the range U+0021 to + // U+007E not including separator characters as defined in + // [RFC2616] and MUST all be unique strings. + if (protocol.length === 0) { + return false + } -/***/ }), + for (const char of protocol) { + const code = char.charCodeAt(0) -/***/ 694: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if ( + code < 0x21 || + code > 0x7E || + char === '(' || + char === ')' || + char === '<' || + char === '>' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' || + code === 32 || // SP + code === 9 // HT + ) { + return false + } + } -"use strict"; + return true +} + +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4 + * @param {number} code + */ +function isValidStatusCode (code) { + if (code >= 1000 && code < 1015) { + return ( + code !== 1004 && // reserved + code !== 1005 && // "MUST NOT be set as a status code" + code !== 1006 // "MUST NOT be set as a status code" + ) + } + return code >= 3000 && code <= 4999 +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/** + * @param {import('./websocket').WebSocket} ws + * @param {string|undefined} reason + */ +function failWebsocketConnection (ws, reason) { + const { [kController]: controller, [kResponse]: response } = ws -var _validate = _interopRequireDefault(__nccwpck_require__(4875)); + controller.abort() -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + if (response?.socket && !response.socket.destroyed) { + response.socket.destroy() + } -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); + if (reason) { + fireEvent('error', ws, ErrorEvent, { + error: new Error(reason) + }) } +} - return parseInt(uuid.slice(14, 15), 16); +module.exports = { + isEstablished, + isClosing, + isClosed, + fireEvent, + isValidSubprotocol, + isValidStatusCode, + failWebsocketConnection, + websocketMessageReceived } -var _default = version; -exports["default"] = _default; /***/ }), -/***/ 4256: +/***/ 4284: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var punycode = __nccwpck_require__(5477); -var mappingTable = __nccwpck_require__(2020); +const { webidl } = __nccwpck_require__(1744) +const { DOMException } = __nccwpck_require__(1037) +const { URLSerializer } = __nccwpck_require__(685) +const { getGlobalOrigin } = __nccwpck_require__(1246) +const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = __nccwpck_require__(9188) +const { + kWebSocketURL, + kReadyState, + kController, + kBinaryType, + kResponse, + kSentClose, + kByteParser +} = __nccwpck_require__(7578) +const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = __nccwpck_require__(5515) +const { establishWebSocketConnection } = __nccwpck_require__(5354) +const { WebsocketFrameSend } = __nccwpck_require__(5444) +const { ByteParser } = __nccwpck_require__(1688) +const { kEnumerableProperty, isBlobLike } = __nccwpck_require__(3983) +const { getGlobalDispatcher } = __nccwpck_require__(1892) +const { types } = __nccwpck_require__(3837) + +let experimentalWarned = false + +// https://websockets.spec.whatwg.org/#interface-definition +class WebSocket extends EventTarget { + #events = { + open: null, + error: null, + close: null, + message: null + } + + #bufferedAmount = 0 + #protocol = '' + #extensions = '' -var PROCESSING_OPTIONS = { - TRANSITIONAL: 0, - NONTRANSITIONAL: 1 -}; + /** + * @param {string} url + * @param {string|string[]} protocols + */ + constructor (url, protocols = []) { + super() -function normalize(str) { // fix bug in v8 - return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); -} + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' }) -function findStatus(val) { - var start = 0; - var end = mappingTable.length - 1; + if (!experimentalWarned) { + experimentalWarned = true + process.emitWarning('WebSockets are experimental, expect them to change at any time.', { + code: 'UNDICI-WS' + }) + } - while (start <= end) { - var mid = Math.floor((start + end) / 2); + const options = webidl.converters['DOMString or sequence or WebSocketInit'](protocols) - var target = mappingTable[mid]; - if (target[0][0] <= val && target[0][1] >= val) { - return target; - } else if (target[0][0] > val) { - end = mid - 1; - } else { - start = mid + 1; - } - } + url = webidl.converters.USVString(url) + protocols = options.protocols - return null; -} + // 1. Let baseURL be this's relevant settings object's API base URL. + const baseURL = getGlobalOrigin() -var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; + // 1. Let urlRecord be the result of applying the URL parser to url with baseURL. + let urlRecord -function countSymbols(string) { - return string - // replace every surrogate pair with a BMP symbol - .replace(regexAstralSymbols, '_') - // then get the length - .length; -} + try { + urlRecord = new URL(url, baseURL) + } catch (e) { + // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException. + throw new DOMException(e, 'SyntaxError') + } -function mapChars(domain_name, useSTD3, processing_option) { - var hasError = false; - var processed = ""; + // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws". + if (urlRecord.protocol === 'http:') { + urlRecord.protocol = 'ws:' + } else if (urlRecord.protocol === 'https:') { + // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss". + urlRecord.protocol = 'wss:' + } - var len = countSymbols(domain_name); - for (var i = 0; i < len; ++i) { - var codePoint = domain_name.codePointAt(i); - var status = findStatus(codePoint); + // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException. + if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') { + throw new DOMException( + `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`, + 'SyntaxError' + ) + } - switch (status[1]) { - case "disallowed": - hasError = true; - processed += String.fromCodePoint(codePoint); - break; - case "ignored": - break; - case "mapped": - processed += String.fromCodePoint.apply(String, status[2]); - break; - case "deviation": - if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { - processed += String.fromCodePoint.apply(String, status[2]); - } else { - processed += String.fromCodePoint(codePoint); - } - break; - case "valid": - processed += String.fromCodePoint(codePoint); - break; - case "disallowed_STD3_mapped": - if (useSTD3) { - hasError = true; - processed += String.fromCodePoint(codePoint); - } else { - processed += String.fromCodePoint.apply(String, status[2]); - } - break; - case "disallowed_STD3_valid": - if (useSTD3) { - hasError = true; - } + // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError" + // DOMException. + if (urlRecord.hash || urlRecord.href.endsWith('#')) { + throw new DOMException('Got fragment', 'SyntaxError') + } - processed += String.fromCodePoint(codePoint); - break; + // 8. If protocols is a string, set protocols to a sequence consisting + // of just that string. + if (typeof protocols === 'string') { + protocols = [protocols] } - } - return { - string: processed, - error: hasError - }; -} + // 9. If any of the values in protocols occur more than once or otherwise + // fail to match the requirements for elements that comprise the value + // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket + // protocol, then throw a "SyntaxError" DOMException. + if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } -var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; + if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } -function validateLabel(label, processing_option) { - if (label.substr(0, 4) === "xn--") { - label = punycode.toUnicode(label); - processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; - } + // 10. Set this's url to urlRecord. + this[kWebSocketURL] = new URL(urlRecord.href) - var error = false; + // 11. Let client be this's relevant settings object. - if (normalize(label) !== label || - (label[3] === "-" && label[4] === "-") || - label[0] === "-" || label[label.length - 1] === "-" || - label.indexOf(".") !== -1 || - label.search(combiningMarksRegex) === 0) { - error = true; - } + // 12. Run this step in parallel: - var len = countSymbols(label); - for (var i = 0; i < len; ++i) { - var status = findStatus(label.codePointAt(i)); - if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || - (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && - status[1] !== "valid" && status[1] !== "deviation")) { - error = true; - break; - } - } + // 1. Establish a WebSocket connection given urlRecord, protocols, + // and client. + this[kController] = establishWebSocketConnection( + urlRecord, + protocols, + this, + (response) => this.#onConnectionEstablished(response), + options + ) - return { - label: label, - error: error - }; -} + // Each WebSocket object has an associated ready state, which is a + // number representing the state of the connection. Initially it must + // be CONNECTING (0). + this[kReadyState] = WebSocket.CONNECTING -function processing(domain_name, useSTD3, processing_option) { - var result = mapChars(domain_name, useSTD3, processing_option); - result.string = normalize(result.string); + // The extensions attribute must initially return the empty string. - var labels = result.string.split("."); - for (var i = 0; i < labels.length; ++i) { - try { - var validation = validateLabel(labels[i]); - labels[i] = validation.label; - result.error = result.error || validation.error; - } catch(e) { - result.error = true; - } + // The protocol attribute must initially return the empty string. + + // Each WebSocket object has an associated binary type, which is a + // BinaryType. Initially it must be "blob". + this[kBinaryType] = 'blob' } - return { - string: labels.join("."), - error: result.error - }; -} + /** + * @see https://websockets.spec.whatwg.org/#dom-websocket-close + * @param {number|undefined} code + * @param {string|undefined} reason + */ + close (code = undefined, reason = undefined) { + webidl.brandCheck(this, WebSocket) -module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { - var result = processing(domain_name, useSTD3, processing_option); - var labels = result.string.split("."); - labels = labels.map(function(l) { - try { - return punycode.toASCII(l); - } catch(e) { - result.error = true; - return l; + if (code !== undefined) { + code = webidl.converters['unsigned short'](code, { clamp: true }) } - }); - if (verifyDnsLength) { - var total = labels.slice(0, labels.length - 1).join(".").length; - if (total.length > 253 || total.length === 0) { - result.error = true; + if (reason !== undefined) { + reason = webidl.converters.USVString(reason) } - for (var i=0; i < labels.length; ++i) { - if (labels.length > 63 || labels.length === 0) { - result.error = true; - break; + // 1. If code is present, but is neither an integer equal to 1000 nor an + // integer in the range 3000 to 4999, inclusive, throw an + // "InvalidAccessError" DOMException. + if (code !== undefined) { + if (code !== 1000 && (code < 3000 || code > 4999)) { + throw new DOMException('invalid code', 'InvalidAccessError') } } - } - if (result.error) return null; - return labels.join("."); -}; + let reasonByteLength = 0 -module.exports.toUnicode = function(domain_name, useSTD3) { - var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); + // 2. If reason is present, then run these substeps: + if (reason !== undefined) { + // 1. Let reasonBytes be the result of encoding reason. + // 2. If reasonBytes is longer than 123 bytes, then throw a + // "SyntaxError" DOMException. + reasonByteLength = Buffer.byteLength(reason) - return { - domain: result.string, - error: result.error - }; -}; + if (reasonByteLength > 123) { + throw new DOMException( + `Reason must be less than 123 bytes; received ${reasonByteLength}`, + 'SyntaxError' + ) + } + } -module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; + // 3. Run the first matching steps from the following list: + if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) { + // If this's ready state is CLOSING (2) or CLOSED (3) + // Do nothing. + } else if (!isEstablished(this)) { + // If the WebSocket connection is not yet established + // Fail the WebSocket connection and set this's ready state + // to CLOSING (2). + failWebsocketConnection(this, 'Connection was closed before it was established.') + this[kReadyState] = WebSocket.CLOSING + } else if (!isClosing(this)) { + // If the WebSocket closing handshake has not yet been started + // Start the WebSocket closing handshake and set this's ready + // state to CLOSING (2). + // - If neither code nor reason is present, the WebSocket Close + // message must not have a body. + // - If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + // - If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + + const frame = new WebsocketFrameSend() + + // If neither code nor reason is present, the WebSocket Close + // message must not have a body. + + // If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + if (code !== undefined && reason === undefined) { + frame.frameData = Buffer.allocUnsafe(2) + frame.frameData.writeUInt16BE(code, 0) + } else if (code !== undefined && reason !== undefined) { + // If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength) + frame.frameData.writeUInt16BE(code, 0) + // the body MAY contain UTF-8-encoded data with value /reason/ + frame.frameData.write(reason, 2, 'utf-8') + } else { + frame.frameData = emptyBuffer + } + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket -/***/ }), + socket.write(frame.createFrame(opcodes.CLOSE), (err) => { + if (!err) { + this[kSentClose] = true + } + }) -/***/ 4294: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this[kReadyState] = states.CLOSING + } else { + // Otherwise + // Set this's ready state to CLOSING (2). + this[kReadyState] = WebSocket.CLOSING + } + } -module.exports = __nccwpck_require__(4219); + /** + * @see https://websockets.spec.whatwg.org/#dom-websocket-send + * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data + */ + send (data) { + webidl.brandCheck(this, WebSocket) + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' }) -/***/ }), + data = webidl.converters.WebSocketSendData(data) -/***/ 4219: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // 1. If this's ready state is CONNECTING, then throw an + // "InvalidStateError" DOMException. + if (this[kReadyState] === WebSocket.CONNECTING) { + throw new DOMException('Sent before connected.', 'InvalidStateError') + } -"use strict"; + // 2. Run the appropriate set of steps from the following list: + // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1 + // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + if (!isEstablished(this) || isClosing(this)) { + return + } -var net = __nccwpck_require__(1808); -var tls = __nccwpck_require__(4404); -var http = __nccwpck_require__(3685); -var https = __nccwpck_require__(5687); -var events = __nccwpck_require__(2361); -var assert = __nccwpck_require__(9491); -var util = __nccwpck_require__(3837); + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket + + // If data is a string + if (typeof data === 'string') { + // If the WebSocket connection is established and the WebSocket + // closing handshake has not yet started, then the user agent + // must send a WebSocket Message comprised of the data argument + // using a text frame opcode; if the data cannot be sent, e.g. + // because it would need to be buffered but the buffer is full, + // the user agent must flag the WebSocket as full and then close + // the WebSocket connection. Any invocation of this method with a + // string argument that does not throw an exception must increase + // the bufferedAmount attribute by the number of bytes needed to + // express the argument as UTF-8. + + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.TEXT) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (types.isArrayBuffer(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need + // to be buffered but the buffer is full, the user agent must flag + // the WebSocket as full and then close the WebSocket connection. + // The data to be sent is the data stored in the buffer described + // by the ArrayBuffer object. Any invocation of this method with an + // ArrayBuffer argument that does not throw an exception must + // increase the bufferedAmount attribute by the length of the + // ArrayBuffer in bytes. + + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (ArrayBuffer.isView(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The + // data to be sent is the data stored in the section of the buffer + // described by the ArrayBuffer object that data references. Any + // invocation of this method with this kind of argument that does + // not throw an exception must increase the bufferedAmount attribute + // by the length of data’s buffer in bytes. + + const ab = Buffer.from(data, data.byteOffset, data.byteLength) + + const frame = new WebsocketFrameSend(ab) + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += ab.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= ab.byteLength + }) + } else if (isBlobLike(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The data + // to be sent is the raw data represented by the Blob object. Any + // invocation of this method with a Blob argument that does not throw + // an exception must increase the bufferedAmount attribute by the size + // of the Blob object’s raw data, in bytes. + + const frame = new WebsocketFrameSend() + + data.arrayBuffer().then((ab) => { + const value = Buffer.from(ab) + frame.frameData = value + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + }) + } + } + get readyState () { + webidl.brandCheck(this, WebSocket) -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; + // The readyState getter steps are to return this's ready state. + return this[kReadyState] + } + get bufferedAmount () { + webidl.brandCheck(this, WebSocket) -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} + return this.#bufferedAmount + } -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} + get url () { + webidl.brandCheck(this, WebSocket) -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} + // The url getter steps are to return this's url, serialized. + return URLSerializer(this[kWebSocketURL]) + } -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} + get extensions () { + webidl.brandCheck(this, WebSocket) + return this.#extensions + } -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; + get protocol () { + webidl.brandCheck(this, WebSocket) - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); + return this.#protocol + } -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + get onopen () { + webidl.brandCheck(this, WebSocket) - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; + return this.#events.open } - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); + set onopen (fn) { + webidl.brandCheck(this, WebSocket) - function onFree() { - self.emit('free', socket, options); + if (this.#events.open) { + this.removeEventListener('open', this.#events.open) } - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); + if (typeof fn === 'function') { + this.#events.open = fn + this.addEventListener('open', fn) + } else { + this.#events.open = null } - }); -}; + } -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); + get onerror () { + webidl.brandCheck(this, WebSocket) - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port + return this.#events.error + } + + set onerror (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.error) { + this.removeEventListener('error', this.#events.error) + } + + if (typeof fn === 'function') { + this.#events.error = fn + this.addEventListener('error', fn) + } else { + this.#events.error = null } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); + + get onclose () { + webidl.brandCheck(this, WebSocket) + + return this.#events.close } - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); + set onclose (fn) { + webidl.brandCheck(this, WebSocket) - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; + if (this.#events.close) { + this.removeEventListener('close', this.#events.close) + } + + if (typeof fn === 'function') { + this.#events.close = fn + this.addEventListener('close', fn) + } else { + this.#events.close = null + } } - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); + get onmessage () { + webidl.brandCheck(this, WebSocket) + + return this.#events.message } - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); + set onmessage (fn) { + webidl.brandCheck(this, WebSocket) - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; + if (this.#events.message) { + this.removeEventListener('message', this.#events.message) } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; + + if (typeof fn === 'function') { + this.#events.message = fn + this.addEventListener('message', fn) + } else { + this.#events.message = null } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); } - function onError(cause) { - connectReq.removeAllListeners(); + get binaryType () { + webidl.brandCheck(this, WebSocket) - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); + return this[kBinaryType] } -}; -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); + set binaryType (type) { + webidl.brandCheck(this, WebSocket) - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); + if (type !== 'blob' && type !== 'arraybuffer') { + this[kBinaryType] = 'blob' + } else { + this[kBinaryType] = type + } } -}; -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); + /** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + */ + #onConnectionEstablished (response) { + // processResponse is called when the "response’s header list has been received and initialized." + // once this happens, the connection is open + this[kResponse] = response + + const parser = new ByteParser(this) + parser.on('drain', function onParserDrain () { + this.ws[kResponse].socket.resume() + }) - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} + response.socket.ws = this + this[kByteParser] = parser + + // 1. Change the ready state to OPEN (1). + this[kReadyState] = states.OPEN + + // 2. Change the extensions attribute’s value to the extensions in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1 + const extensions = response.headersList.get('sec-websocket-extensions') + + if (extensions !== null) { + this.#extensions = extensions + } + + // 3. Change the protocol attribute’s value to the subprotocol in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9 + const protocol = response.headersList.get('sec-websocket-protocol') + + if (protocol !== null) { + this.#protocol = protocol + } + + // 4. Fire an event named open at the WebSocket object. + fireEvent('open', this) + } +} + +// https://websockets.spec.whatwg.org/#dom-websocket-connecting +WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING +// https://websockets.spec.whatwg.org/#dom-websocket-open +WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN +// https://websockets.spec.whatwg.org/#dom-websocket-closing +WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING +// https://websockets.spec.whatwg.org/#dom-websocket-closed +WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED + +Object.defineProperties(WebSocket.prototype, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors, + url: kEnumerableProperty, + readyState: kEnumerableProperty, + bufferedAmount: kEnumerableProperty, + onopen: kEnumerableProperty, + onerror: kEnumerableProperty, + onclose: kEnumerableProperty, + close: kEnumerableProperty, + onmessage: kEnumerableProperty, + binaryType: kEnumerableProperty, + send: kEnumerableProperty, + extensions: kEnumerableProperty, + protocol: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'WebSocket', + writable: false, + enumerable: false, + configurable: true + } +}) + +Object.defineProperties(WebSocket, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors +}) +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.DOMString +) -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; +webidl.converters['DOMString or sequence'] = function (V) { + if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) { + return webidl.converters['sequence'](V) } - return host; // for v0.11 or later + + return webidl.converters.DOMString(V) } -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } - } +// This implements the propsal made in https://github.com/whatwg/websockets/issues/42 +webidl.converters.WebSocketInit = webidl.dictionaryConverter([ + { + key: 'protocols', + converter: webidl.converters['DOMString or sequence'], + get defaultValue () { + return [] + } + }, + { + key: 'dispatcher', + converter: (V) => V, + get defaultValue () { + return getGlobalDispatcher() } + }, + { + key: 'headers', + converter: webidl.nullableConverter(webidl.converters.HeadersInit) } - return target; +]) + +webidl.converters['DOMString or sequence or WebSocketInit'] = function (V) { + if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) { + return webidl.converters.WebSocketInit(V) + } + + return { protocols: webidl.converters['DOMString or sequence'](V) } } +webidl.converters.WebSocketSendData = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); + if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) { + return webidl.converters.BufferSource(V) } - console.error.apply(console, args); } -} else { - debug = function() {}; + + return webidl.converters.USVString(V) +} + +module.exports = { + WebSocket } -exports.debug = debug; // for test /***/ }), @@ -71475,7 +56632,7 @@ function getUserAgent() { return navigator.userAgent; } - if (typeof process === "object" && "version" in process) { + if (typeof process === "object" && process.version !== undefined) { return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; } @@ -74080,788 +59237,15690 @@ module.exports.parseURL = function (input, options) { /***/ }), -/***/ 3185: -/***/ ((module) => { +/***/ 3185: +/***/ ((module) => { + +"use strict"; + + +module.exports.mixin = function mixin(target, source) { + const keys = Object.getOwnPropertyNames(source); + for (let i = 0; i < keys.length; ++i) { + Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); + } +}; + +module.exports.wrapperSymbol = Symbol("wrapper"); +module.exports.implSymbol = Symbol("impl"); + +module.exports.wrapperForImpl = function (impl) { + return impl[module.exports.wrapperSymbol]; +}; + +module.exports.implForWrapper = function (wrapper) { + return wrapper[module.exports.implSymbol]; +}; + + + +/***/ }), + +/***/ 2940: +/***/ ((module) => { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + + +/***/ }), + +/***/ 5185: +/***/ ((module) => { + +class Node { + /// value; + /// next; + + constructor(value) { + this.value = value; + + // TODO: Remove this when targeting Node.js 12. + this.next = undefined; + } +} + +class Queue { + // TODO: Use private class fields when targeting Node.js 12. + // #_head; + // #_tail; + // #_size; + + constructor() { + this.clear(); + } + + enqueue(value) { + const node = new Node(value); + + if (this._head) { + this._tail.next = node; + this._tail = node; + } else { + this._head = node; + this._tail = node; + } + + this._size++; + } + + dequeue() { + const current = this._head; + if (!current) { + return; + } + + this._head = this._head.next; + this._size--; + return current.value; + } + + clear() { + this._head = undefined; + this._tail = undefined; + this._size = 0; + } + + get size() { + return this._size; + } + + * [Symbol.iterator]() { + let current = this._head; + + while (current) { + yield current.value; + current = current.next; + } + } +} + +module.exports = Queue; + + +/***/ }), + +/***/ 2877: +/***/ ((module) => { + +module.exports = eval("require")("encoding"); + + +/***/ }), + +/***/ 9491: +/***/ ((module) => { + +"use strict"; +module.exports = require("assert"); + +/***/ }), + +/***/ 852: +/***/ ((module) => { + +"use strict"; +module.exports = require("async_hooks"); + +/***/ }), + +/***/ 4300: +/***/ ((module) => { + +"use strict"; +module.exports = require("buffer"); + +/***/ }), + +/***/ 2081: +/***/ ((module) => { + +"use strict"; +module.exports = require("child_process"); + +/***/ }), + +/***/ 6206: +/***/ ((module) => { + +"use strict"; +module.exports = require("console"); + +/***/ }), + +/***/ 6113: +/***/ ((module) => { + +"use strict"; +module.exports = require("crypto"); + +/***/ }), + +/***/ 7643: +/***/ ((module) => { + +"use strict"; +module.exports = require("diagnostics_channel"); + +/***/ }), + +/***/ 2361: +/***/ ((module) => { + +"use strict"; +module.exports = require("events"); + +/***/ }), + +/***/ 7147: +/***/ ((module) => { + +"use strict"; +module.exports = require("fs"); + +/***/ }), + +/***/ 3685: +/***/ ((module) => { + +"use strict"; +module.exports = require("http"); + +/***/ }), + +/***/ 5158: +/***/ ((module) => { + +"use strict"; +module.exports = require("http2"); + +/***/ }), + +/***/ 5687: +/***/ ((module) => { + +"use strict"; +module.exports = require("https"); + +/***/ }), + +/***/ 1808: +/***/ ((module) => { + +"use strict"; +module.exports = require("net"); + +/***/ }), + +/***/ 5673: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:events"); + +/***/ }), + +/***/ 4492: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:stream"); + +/***/ }), + +/***/ 7261: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:util"); + +/***/ }), + +/***/ 2037: +/***/ ((module) => { + +"use strict"; +module.exports = require("os"); + +/***/ }), + +/***/ 1017: +/***/ ((module) => { + +"use strict"; +module.exports = require("path"); + +/***/ }), + +/***/ 4074: +/***/ ((module) => { + +"use strict"; +module.exports = require("perf_hooks"); + +/***/ }), + +/***/ 5477: +/***/ ((module) => { + +"use strict"; +module.exports = require("punycode"); + +/***/ }), + +/***/ 3477: +/***/ ((module) => { + +"use strict"; +module.exports = require("querystring"); + +/***/ }), + +/***/ 2781: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream"); + +/***/ }), + +/***/ 5356: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream/web"); + +/***/ }), + +/***/ 1576: +/***/ ((module) => { + +"use strict"; +module.exports = require("string_decoder"); + +/***/ }), + +/***/ 4404: +/***/ ((module) => { + +"use strict"; +module.exports = require("tls"); + +/***/ }), + +/***/ 6224: +/***/ ((module) => { + +"use strict"; +module.exports = require("tty"); + +/***/ }), + +/***/ 7310: +/***/ ((module) => { + +"use strict"; +module.exports = require("url"); + +/***/ }), + +/***/ 3837: +/***/ ((module) => { + +"use strict"; +module.exports = require("util"); + +/***/ }), + +/***/ 9830: +/***/ ((module) => { + +"use strict"; +module.exports = require("util/types"); + +/***/ }), + +/***/ 1267: +/***/ ((module) => { + +"use strict"; +module.exports = require("worker_threads"); + +/***/ }), + +/***/ 9796: +/***/ ((module) => { + +"use strict"; +module.exports = require("zlib"); + +/***/ }), + +/***/ 2960: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const WritableStream = (__nccwpck_require__(4492).Writable) +const inherits = (__nccwpck_require__(7261).inherits) + +const StreamSearch = __nccwpck_require__(1142) + +const PartStream = __nccwpck_require__(1620) +const HeaderParser = __nccwpck_require__(2032) + +const DASH = 45 +const B_ONEDASH = Buffer.from('-') +const B_CRLF = Buffer.from('\r\n') +const EMPTY_FN = function () {} + +function Dicer (cfg) { + if (!(this instanceof Dicer)) { return new Dicer(cfg) } + WritableStream.call(this, cfg) + + if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } + + if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } + + this._headerFirst = cfg.headerFirst + + this._dashes = 0 + this._parts = 0 + this._finished = false + this._realFinish = false + this._isPreamble = true + this._justMatched = false + this._firstWrite = true + this._inHeader = true + this._part = undefined + this._cb = undefined + this._ignoreData = false + this._partOpts = { highWaterMark: cfg.partHwm } + this._pause = false + + const self = this + this._hparser = new HeaderParser(cfg) + this._hparser.on('header', function (header) { + self._inHeader = false + self._part.emit('header', header) + }) +} +inherits(Dicer, WritableStream) + +Dicer.prototype.emit = function (ev) { + if (ev === 'finish' && !this._realFinish) { + if (!this._finished) { + const self = this + process.nextTick(function () { + self.emit('error', new Error('Unexpected end of multipart data')) + if (self._part && !self._ignoreData) { + const type = (self._isPreamble ? 'Preamble' : 'Part') + self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) + self._part.push(null) + process.nextTick(function () { + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + return + } + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + } + } else { WritableStream.prototype.emit.apply(this, arguments) } +} + +Dicer.prototype._write = function (data, encoding, cb) { + // ignore unexpected data (e.g. extra trailer data after finished) + if (!this._hparser && !this._bparser) { return cb() } + + if (this._headerFirst && this._isPreamble) { + if (!this._part) { + this._part = new PartStream(this._partOpts) + if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() } + } + const r = this._hparser.push(data) + if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } + } + + // allows for "easier" testing + if (this._firstWrite) { + this._bparser.push(B_CRLF) + this._firstWrite = false + } + + this._bparser.push(data) + + if (this._pause) { this._cb = cb } else { cb() } +} + +Dicer.prototype.reset = function () { + this._part = undefined + this._bparser = undefined + this._hparser = undefined +} + +Dicer.prototype.setBoundary = function (boundary) { + const self = this + this._bparser = new StreamSearch('\r\n--' + boundary) + this._bparser.on('info', function (isMatch, data, start, end) { + self._oninfo(isMatch, data, start, end) + }) +} + +Dicer.prototype._ignore = function () { + if (this._part && !this._ignoreData) { + this._ignoreData = true + this._part.on('error', EMPTY_FN) + // we must perform some kind of read on the stream even though we are + // ignoring the data, otherwise node's Readable stream will not emit 'end' + // after pushing null to the stream + this._part.resume() + } +} + +Dicer.prototype._oninfo = function (isMatch, data, start, end) { + let buf; const self = this; let i = 0; let r; let shouldWriteMore = true + + if (!this._part && this._justMatched && data) { + while (this._dashes < 2 && (start + i) < end) { + if (data[start + i] === DASH) { + ++i + ++this._dashes + } else { + if (this._dashes) { buf = B_ONEDASH } + this._dashes = 0 + break + } + } + if (this._dashes === 2) { + if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) } + this.reset() + this._finished = true + // no more parts will be added + if (self._parts === 0) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } + } + if (this._dashes) { return } + } + if (this._justMatched) { this._justMatched = false } + if (!this._part) { + this._part = new PartStream(this._partOpts) + this._part._read = function (n) { + self._unpause() + } + if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() } + if (!this._isPreamble) { this._inHeader = true } + } + if (data && start < end && !this._ignoreData) { + if (this._isPreamble || !this._inHeader) { + if (buf) { shouldWriteMore = this._part.push(buf) } + shouldWriteMore = this._part.push(data.slice(start, end)) + if (!shouldWriteMore) { this._pause = true } + } else if (!this._isPreamble && this._inHeader) { + if (buf) { this._hparser.push(buf) } + r = this._hparser.push(data.slice(start, end)) + if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } + } + } + if (isMatch) { + this._hparser.reset() + if (this._isPreamble) { this._isPreamble = false } else { + if (start !== end) { + ++this._parts + this._part.on('end', function () { + if (--self._parts === 0) { + if (self._finished) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } else { + self._unpause() + } + } + }) + } + } + this._part.push(null) + this._part = undefined + this._ignoreData = false + this._justMatched = true + this._dashes = 0 + } +} + +Dicer.prototype._unpause = function () { + if (!this._pause) { return } + + this._pause = false + if (this._cb) { + const cb = this._cb + this._cb = undefined + cb() + } +} + +module.exports = Dicer + + +/***/ }), + +/***/ 2032: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const EventEmitter = (__nccwpck_require__(5673).EventEmitter) +const inherits = (__nccwpck_require__(7261).inherits) +const getLimit = __nccwpck_require__(1467) + +const StreamSearch = __nccwpck_require__(1142) + +const B_DCRLF = Buffer.from('\r\n\r\n') +const RE_CRLF = /\r\n/g +const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex + +function HeaderParser (cfg) { + EventEmitter.call(this) + + cfg = cfg || {} + const self = this + this.nread = 0 + this.maxed = false + this.npairs = 0 + this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) + this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) + this.buffer = '' + this.header = {} + this.finished = false + this.ss = new StreamSearch(B_DCRLF) + this.ss.on('info', function (isMatch, data, start, end) { + if (data && !self.maxed) { + if (self.nread + end - start >= self.maxHeaderSize) { + end = self.maxHeaderSize - self.nread + start + self.nread = self.maxHeaderSize + self.maxed = true + } else { self.nread += (end - start) } + + self.buffer += data.toString('binary', start, end) + } + if (isMatch) { self._finish() } + }) +} +inherits(HeaderParser, EventEmitter) + +HeaderParser.prototype.push = function (data) { + const r = this.ss.push(data) + if (this.finished) { return r } +} + +HeaderParser.prototype.reset = function () { + this.finished = false + this.buffer = '' + this.header = {} + this.ss.reset() +} + +HeaderParser.prototype._finish = function () { + if (this.buffer) { this._parseHeader() } + this.ss.matches = this.ss.maxMatches + const header = this.header + this.header = {} + this.buffer = '' + this.finished = true + this.nread = this.npairs = 0 + this.maxed = false + this.emit('header', header) +} + +HeaderParser.prototype._parseHeader = function () { + if (this.npairs === this.maxHeaderPairs) { return } + + const lines = this.buffer.split(RE_CRLF) + const len = lines.length + let m, h + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (lines[i].length === 0) { continue } + if (lines[i][0] === '\t' || lines[i][0] === ' ') { + // folded header content + // RFC2822 says to just remove the CRLF and not the whitespace following + // it, so we follow the RFC and include the leading whitespace ... + if (h) { + this.header[h][this.header[h].length - 1] += lines[i] + continue + } + } + + const posColon = lines[i].indexOf(':') + if ( + posColon === -1 || + posColon === 0 + ) { + return + } + m = RE_HDR.exec(lines[i]) + h = m[1].toLowerCase() + this.header[h] = this.header[h] || [] + this.header[h].push((m[2] || '')) + if (++this.npairs === this.maxHeaderPairs) { break } + } +} + +module.exports = HeaderParser + + +/***/ }), + +/***/ 1620: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const inherits = (__nccwpck_require__(7261).inherits) +const ReadableStream = (__nccwpck_require__(4492).Readable) + +function PartStream (opts) { + ReadableStream.call(this, opts) +} +inherits(PartStream, ReadableStream) + +PartStream.prototype._read = function (n) {} + +module.exports = PartStream + + +/***/ }), + +/***/ 1142: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +/** + * Copyright Brian White. All rights reserved. + * + * @see https://github.com/mscdex/streamsearch + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + */ +const EventEmitter = (__nccwpck_require__(5673).EventEmitter) +const inherits = (__nccwpck_require__(7261).inherits) + +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) + } + + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } + + const needleLength = needle.length + + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } + + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } + + this.maxMatches = Infinity + this.matches = 0 + + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 + + this._lookbehind = Buffer.alloc(needleLength) + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i + } +} +inherits(SBMH, EventEmitter) + +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 +} + +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r +} + +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) + + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) + + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] + } + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } + } + + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } + + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff + + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len + + this._bufpos = len + return len + } + } + + pos += (pos >= 0) * this._bufpos + + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } + + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } + + this._bufpos = len + return len +} + +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] +} + +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true +} + +module.exports = SBMH + + +/***/ }), + +/***/ 727: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const WritableStream = (__nccwpck_require__(4492).Writable) +const { inherits } = __nccwpck_require__(7261) +const Dicer = __nccwpck_require__(2960) + +const MultipartParser = __nccwpck_require__(2183) +const UrlencodedParser = __nccwpck_require__(8306) +const parseParams = __nccwpck_require__(1854) + +function Busboy (opts) { + if (!(this instanceof Busboy)) { return new Busboy(opts) } + + if (typeof opts !== 'object') { + throw new TypeError('Busboy expected an options-Object.') + } + if (typeof opts.headers !== 'object') { + throw new TypeError('Busboy expected an options-Object with headers-attribute.') + } + if (typeof opts.headers['content-type'] !== 'string') { + throw new TypeError('Missing Content-Type-header.') + } + + const { + headers, + ...streamOptions + } = opts + + this.opts = { + autoDestroy: false, + ...streamOptions + } + WritableStream.call(this, this.opts) + + this._done = false + this._parser = this.getParserByHeaders(headers) + this._finished = false +} +inherits(Busboy, WritableStream) + +Busboy.prototype.emit = function (ev) { + if (ev === 'finish') { + if (!this._done) { + this._parser?.end() + return + } else if (this._finished) { + return + } + this._finished = true + } + WritableStream.prototype.emit.apply(this, arguments) +} + +Busboy.prototype.getParserByHeaders = function (headers) { + const parsed = parseParams(headers['content-type']) + + const cfg = { + defCharset: this.opts.defCharset, + fileHwm: this.opts.fileHwm, + headers, + highWaterMark: this.opts.highWaterMark, + isPartAFile: this.opts.isPartAFile, + limits: this.opts.limits, + parsedConType: parsed, + preservePath: this.opts.preservePath + } + + if (MultipartParser.detect.test(parsed[0])) { + return new MultipartParser(this, cfg) + } + if (UrlencodedParser.detect.test(parsed[0])) { + return new UrlencodedParser(this, cfg) + } + throw new Error('Unsupported Content-Type.') +} + +Busboy.prototype._write = function (chunk, encoding, cb) { + this._parser.write(chunk, cb) +} + +module.exports = Busboy +module.exports["default"] = Busboy +module.exports.Busboy = Busboy + +module.exports.Dicer = Dicer + + +/***/ }), + +/***/ 2183: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +// TODO: +// * support 1 nested multipart level +// (see second multipart example here: +// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) +// * support limits.fieldNameSize +// -- this will require modifications to utils.parseParams + +const { Readable } = __nccwpck_require__(4492) +const { inherits } = __nccwpck_require__(7261) + +const Dicer = __nccwpck_require__(2960) + +const parseParams = __nccwpck_require__(1854) +const decodeText = __nccwpck_require__(4619) +const basename = __nccwpck_require__(8647) +const getLimit = __nccwpck_require__(1467) + +const RE_BOUNDARY = /^boundary$/i +const RE_FIELD = /^form-data$/i +const RE_CHARSET = /^charset$/i +const RE_FILENAME = /^filename$/i +const RE_NAME = /^name$/i + +Multipart.detect = /^multipart\/form-data/i +function Multipart (boy, cfg) { + let i + let len + const self = this + let boundary + const limits = cfg.limits + const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) + const parsedConType = cfg.parsedConType || [] + const defCharset = cfg.defCharset || 'utf8' + const preservePath = cfg.preservePath + const fileOpts = { highWaterMark: cfg.fileHwm } + + for (i = 0, len = parsedConType.length; i < len; ++i) { + if (Array.isArray(parsedConType[i]) && + RE_BOUNDARY.test(parsedConType[i][0])) { + boundary = parsedConType[i][1] + break + } + } + + function checkFinished () { + if (nends === 0 && finished && !boy._done) { + finished = false + self.end() + } + } + + if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } + + const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) + const filesLimit = getLimit(limits, 'files', Infinity) + const fieldsLimit = getLimit(limits, 'fields', Infinity) + const partsLimit = getLimit(limits, 'parts', Infinity) + const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) + const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) + + let nfiles = 0 + let nfields = 0 + let nends = 0 + let curFile + let curField + let finished = false + + this._needDrain = false + this._pause = false + this._cb = undefined + this._nparts = 0 + this._boy = boy + + const parserCfg = { + boundary, + maxHeaderPairs: headerPairsLimit, + maxHeaderSize: headerSizeLimit, + partHwm: fileOpts.highWaterMark, + highWaterMark: cfg.highWaterMark + } + + this.parser = new Dicer(parserCfg) + this.parser.on('drain', function () { + self._needDrain = false + if (self._cb && !self._pause) { + const cb = self._cb + self._cb = undefined + cb() + } + }).on('part', function onPart (part) { + if (++self._nparts > partsLimit) { + self.parser.removeListener('part', onPart) + self.parser.on('part', skipPart) + boy.hitPartsLimit = true + boy.emit('partsLimit') + return skipPart(part) + } + + // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let + // us emit 'end' early since we know the part has ended if we are already + // seeing the next part + if (curField) { + const field = curField + field.emit('end') + field.removeAllListeners('end') + } + + part.on('header', function (header) { + let contype + let fieldname + let parsed + let charset + let encoding + let filename + let nsize = 0 + + if (header['content-type']) { + parsed = parseParams(header['content-type'][0]) + if (parsed[0]) { + contype = parsed[0].toLowerCase() + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_CHARSET.test(parsed[i][0])) { + charset = parsed[i][1].toLowerCase() + break + } + } + } + } + + if (contype === undefined) { contype = 'text/plain' } + if (charset === undefined) { charset = defCharset } + + if (header['content-disposition']) { + parsed = parseParams(header['content-disposition'][0]) + if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_NAME.test(parsed[i][0])) { + fieldname = parsed[i][1] + } else if (RE_FILENAME.test(parsed[i][0])) { + filename = parsed[i][1] + if (!preservePath) { filename = basename(filename) } + } + } + } else { return skipPart(part) } + + if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } + + let onData, + onEnd + + if (isPartAFile(fieldname, contype, filename)) { + // file/binary field + if (nfiles === filesLimit) { + if (!boy.hitFilesLimit) { + boy.hitFilesLimit = true + boy.emit('filesLimit') + } + return skipPart(part) + } + + ++nfiles + + if (!boy._events.file) { + self.parser._ignore() + return + } + + ++nends + const file = new FileStream(fileOpts) + curFile = file + file.on('end', function () { + --nends + self._pause = false + checkFinished() + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + }) + file._read = function (n) { + if (!self._pause) { return } + self._pause = false + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + } + boy.emit('file', fieldname, file, filename, encoding, contype) + + onData = function (data) { + if ((nsize += data.length) > fileSizeLimit) { + const extralen = fileSizeLimit - nsize + data.length + if (extralen > 0) { file.push(data.slice(0, extralen)) } + file.truncated = true + file.bytesRead = fileSizeLimit + part.removeAllListeners('data') + file.emit('limit') + return + } else if (!file.push(data)) { self._pause = true } + + file.bytesRead = nsize + } + + onEnd = function () { + curFile = undefined + file.push(null) + } + } else { + // non-file field + if (nfields === fieldsLimit) { + if (!boy.hitFieldsLimit) { + boy.hitFieldsLimit = true + boy.emit('fieldsLimit') + } + return skipPart(part) + } + + ++nfields + ++nends + let buffer = '' + let truncated = false + curField = part + + onData = function (data) { + if ((nsize += data.length) > fieldSizeLimit) { + const extralen = (fieldSizeLimit - (nsize - data.length)) + buffer += data.toString('binary', 0, extralen) + truncated = true + part.removeAllListeners('data') + } else { buffer += data.toString('binary') } + } + + onEnd = function () { + curField = undefined + if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } + boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) + --nends + checkFinished() + } + } + + /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become + broken. Streams2/streams3 is a huge black box of confusion, but + somehow overriding the sync state seems to fix things again (and still + seems to work for previous node versions). + */ + part._readableState.sync = false + + part.on('data', onData) + part.on('end', onEnd) + }).on('error', function (err) { + if (curFile) { curFile.emit('error', err) } + }) + }).on('error', function (err) { + boy.emit('error', err) + }).on('finish', function () { + finished = true + checkFinished() + }) +} + +Multipart.prototype.write = function (chunk, cb) { + const r = this.parser.write(chunk) + if (r && !this._pause) { + cb() + } else { + this._needDrain = !r + this._cb = cb + } +} + +Multipart.prototype.end = function () { + const self = this + + if (self.parser.writable) { + self.parser.end() + } else if (!self._boy._done) { + process.nextTick(function () { + self._boy._done = true + self._boy.emit('finish') + }) + } +} + +function skipPart (part) { + part.resume() +} + +function FileStream (opts) { + Readable.call(this, opts) + + this.bytesRead = 0 + + this.truncated = false +} + +inherits(FileStream, Readable) + +FileStream.prototype._read = function (n) {} + +module.exports = Multipart + + +/***/ }), + +/***/ 8306: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const Decoder = __nccwpck_require__(7100) +const decodeText = __nccwpck_require__(4619) +const getLimit = __nccwpck_require__(1467) + +const RE_CHARSET = /^charset$/i + +UrlEncoded.detect = /^application\/x-www-form-urlencoded/i +function UrlEncoded (boy, cfg) { + const limits = cfg.limits + const parsedConType = cfg.parsedConType + this.boy = boy + + this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) + this.fieldsLimit = getLimit(limits, 'fields', Infinity) + + let charset + for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var + if (Array.isArray(parsedConType[i]) && + RE_CHARSET.test(parsedConType[i][0])) { + charset = parsedConType[i][1].toLowerCase() + break + } + } + + if (charset === undefined) { charset = cfg.defCharset || 'utf8' } + + this.decoder = new Decoder() + this.charset = charset + this._fields = 0 + this._state = 'key' + this._checkingBytes = true + this._bytesKey = 0 + this._bytesVal = 0 + this._key = '' + this._val = '' + this._keyTrunc = false + this._valTrunc = false + this._hitLimit = false +} + +UrlEncoded.prototype.write = function (data, cb) { + if (this._fields === this.fieldsLimit) { + if (!this.boy.hitFieldsLimit) { + this.boy.hitFieldsLimit = true + this.boy.emit('fieldsLimit') + } + return cb() + } + + let idxeq; let idxamp; let i; let p = 0; const len = data.length + + while (p < len) { + if (this._state === 'key') { + idxeq = idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x3D/* = */) { + idxeq = i + break + } else if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesKey } + } + + if (idxeq !== undefined) { + // key with assignment + if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } + this._state = 'val' + + this._hitLimit = false + this._checkingBytes = true + this._val = '' + this._bytesVal = 0 + this._valTrunc = false + this.decoder.reset() + + p = idxeq + 1 + } else if (idxamp !== undefined) { + // key with no assignment + ++this._fields + let key; const keyTrunc = this._keyTrunc + if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + if (key.length) { + this.boy.emit('field', decodeText(key, 'binary', this.charset), + '', + keyTrunc, + false) + } + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._keyTrunc = true + } + } else { + if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } + p = len + } + } else { + idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesVal } + } + + if (idxamp !== undefined) { + ++this._fields + if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + this._state = 'key' + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._val === '' && this.fieldSizeLimit === 0) || + (this._bytesVal = this._val.length) === this.fieldSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._valTrunc = true + } + } else { + if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } + p = len + } + } + } + cb() +} + +UrlEncoded.prototype.end = function () { + if (this.boy._done) { return } + + if (this._state === 'key' && this._key.length > 0) { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + '', + this._keyTrunc, + false) + } else if (this._state === 'val') { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + } + this.boy._done = true + this.boy.emit('finish') +} + +module.exports = UrlEncoded + + +/***/ }), + +/***/ 7100: +/***/ ((module) => { + +"use strict"; + + +const RE_PLUS = /\+/g + +const HEX = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +] + +function Decoder () { + this.buffer = undefined +} +Decoder.prototype.write = function (str) { + // Replace '+' with ' ' before decoding + str = str.replace(RE_PLUS, ' ') + let res = '' + let i = 0; let p = 0; const len = str.length + for (; i < len; ++i) { + if (this.buffer !== undefined) { + if (!HEX[str.charCodeAt(i)]) { + res += '%' + this.buffer + this.buffer = undefined + --i // retry character + } else { + this.buffer += str[i] + ++p + if (this.buffer.length === 2) { + res += String.fromCharCode(parseInt(this.buffer, 16)) + this.buffer = undefined + } + } + } else if (str[i] === '%') { + if (i > p) { + res += str.substring(p, i) + p = i + } + this.buffer = '' + ++p + } + } + if (p < len && this.buffer === undefined) { res += str.substring(p) } + return res +} +Decoder.prototype.reset = function () { + this.buffer = undefined +} + +module.exports = Decoder + + +/***/ }), + +/***/ 8647: +/***/ ((module) => { + +"use strict"; + + +module.exports = function basename (path) { + if (typeof path !== 'string') { return '' } + for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1) + return (path === '..' || path === '.' ? '' : path) + } + } + return (path === '..' || path === '.' ? '' : path) +} + + +/***/ }), + +/***/ 4619: +/***/ (function(module) { + +"use strict"; + + +// Node has always utf-8 +const utf8Decoder = new TextDecoder('utf-8') +const textDecoders = new Map([ + ['utf-8', utf8Decoder], + ['utf8', utf8Decoder] +]) + +function getDecoder (charset) { + let lc + while (true) { + switch (charset) { + case 'utf-8': + case 'utf8': + return decoders.utf8 + case 'latin1': + case 'ascii': // TODO: Make these a separate, strict decoder? + case 'us-ascii': + case 'iso-8859-1': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'windows-1252': + case 'iso_8859-1:1987': + case 'cp1252': + case 'x-cp1252': + return decoders.latin1 + case 'utf16le': + case 'utf-16le': + case 'ucs2': + case 'ucs-2': + return decoders.utf16le + case 'base64': + return decoders.base64 + default: + if (lc === undefined) { + lc = true + charset = charset.toLowerCase() + continue + } + return decoders.other.bind(charset) + } + } +} + +const decoders = { + utf8: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.utf8Slice(0, data.length) + }, + + latin1: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + return data + } + return data.latin1Slice(0, data.length) + }, + + utf16le: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.ucs2Slice(0, data.length) + }, + + base64: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.base64Slice(0, data.length) + }, + + other: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + + if (textDecoders.has(this.toString())) { + try { + return textDecoders.get(this).decode(data) + } catch (e) { } + } + return typeof data === 'string' + ? data + : data.toString() + } +} + +function decodeText (text, sourceEncoding, destEncoding) { + if (text) { + return getDecoder(destEncoding)(text, sourceEncoding) + } + return text +} + +module.exports = decodeText + + +/***/ }), + +/***/ 1467: +/***/ ((module) => { + +"use strict"; + + +module.exports = function getLimit (limits, name, defaultLimit) { + if ( + !limits || + limits[name] === undefined || + limits[name] === null + ) { return defaultLimit } + + if ( + typeof limits[name] !== 'number' || + isNaN(limits[name]) + ) { throw new TypeError('Limit ' + name + ' is not a valid number') } + + return limits[name] +} + + +/***/ }), + +/***/ 1854: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/* eslint-disable object-property-newline */ + + +const decodeText = __nccwpck_require__(4619) + +const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g + +const EncodedLookup = { + '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04', + '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09', + '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c', + '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e', + '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12', + '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17', + '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b', + '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d', + '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20', + '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25', + '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a', + '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c', + '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f', + '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33', + '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38', + '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b', + '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e', + '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41', + '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46', + '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a', + '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d', + '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f', + '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54', + '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59', + '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c', + '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e', + '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62', + '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67', + '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b', + '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d', + '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70', + '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75', + '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a', + '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c', + '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f', + '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83', + '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88', + '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b', + '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e', + '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91', + '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96', + '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a', + '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d', + '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f', + '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2', + '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4', + '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7', + '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9', + '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab', + '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac', + '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad', + '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae', + '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0', + '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2', + '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5', + '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7', + '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba', + '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb', + '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc', + '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd', + '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf', + '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0', + '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3', + '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5', + '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8', + '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca', + '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb', + '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc', + '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce', + '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf', + '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1', + '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3', + '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6', + '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8', + '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda', + '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb', + '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd', + '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde', + '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf', + '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1', + '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4', + '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6', + '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9', + '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea', + '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec', + '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed', + '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee', + '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef', + '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2', + '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4', + '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7', + '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9', + '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb', + '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc', + '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd', + '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe', + '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff' +} + +function encodedReplacer (match) { + return EncodedLookup[match] +} + +const STATE_KEY = 0 +const STATE_VALUE = 1 +const STATE_CHARSET = 2 +const STATE_LANG = 3 + +function parseParams (str) { + const res = [] + let state = STATE_KEY + let charset = '' + let inquote = false + let escaping = false + let p = 0 + let tmp = '' + const len = str.length + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + const char = str[i] + if (char === '\\' && inquote) { + if (escaping) { escaping = false } else { + escaping = true + continue + } + } else if (char === '"') { + if (!escaping) { + if (inquote) { + inquote = false + state = STATE_KEY + } else { inquote = true } + continue + } else { escaping = false } + } else { + if (escaping && inquote) { tmp += '\\' } + escaping = false + if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") { + if (state === STATE_CHARSET) { + state = STATE_LANG + charset = tmp.substring(1) + } else { state = STATE_VALUE } + tmp = '' + continue + } else if (state === STATE_KEY && + (char === '*' || char === '=') && + res.length) { + state = char === '*' + ? STATE_CHARSET + : STATE_VALUE + res[p] = [tmp, undefined] + tmp = '' + continue + } else if (!inquote && char === ';') { + state = STATE_KEY + if (charset) { + if (tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } + charset = '' + } else if (tmp.length) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } + tmp = '' + ++p + continue + } else if (!inquote && (char === ' ' || char === '\t')) { continue } + } + tmp += char + } + if (charset && tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } else if (tmp) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + + if (res[p] === undefined) { + if (tmp) { res[p] = tmp } + } else { res[p][1] = tmp } + + return res +} + +module.exports = parseParams + + +/***/ }), + +/***/ 2989: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AclRoleAccessorMethods = exports.Acl = void 0; +const promisify_1 = __nccwpck_require__(9203); +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +class AclRoleAccessorMethods { + constructor() { + this.owners = {}; + this.readers = {}; + this.writers = {}; + /** + * An object of convenience methods to add or delete owner ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.owners.addAllAuthenticatedUsers` + * - `myFile.acl.owners.deleteAllAuthenticatedUsers` + * - `myFile.acl.owners.addAllUsers` + * - `myFile.acl.owners.deleteAllUsers` + * - `myFile.acl.owners.addDomain` + * - `myFile.acl.owners.deleteDomain` + * - `myFile.acl.owners.addGroup` + * - `myFile.acl.owners.deleteGroup` + * - `myFile.acl.owners.addProject` + * - `myFile.acl.owners.deleteProject` + * - `myFile.acl.owners.addUser` + * - `myFile.acl.owners.deleteUser` + * + * @name Acl#owners + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as an owner of a file. + * //- + * const myBucket = gcs.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.owners.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.owners = {}; + /** + * An object of convenience methods to add or delete reader ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.readers.addAllAuthenticatedUsers` + * - `myFile.acl.readers.deleteAllAuthenticatedUsers` + * - `myFile.acl.readers.addAllUsers` + * - `myFile.acl.readers.deleteAllUsers` + * - `myFile.acl.readers.addDomain` + * - `myFile.acl.readers.deleteDomain` + * - `myFile.acl.readers.addGroup` + * - `myFile.acl.readers.deleteGroup` + * - `myFile.acl.readers.addProject` + * - `myFile.acl.readers.deleteProject` + * - `myFile.acl.readers.addUser` + * - `myFile.acl.readers.deleteUser` + * + * @name Acl#readers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a reader of a file. + * //- + * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.READER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.readers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.readers = {}; + /** + * An object of convenience methods to add or delete writer ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.writers.addAllAuthenticatedUsers` + * - `myFile.acl.writers.deleteAllAuthenticatedUsers` + * - `myFile.acl.writers.addAllUsers` + * - `myFile.acl.writers.deleteAllUsers` + * - `myFile.acl.writers.addDomain` + * - `myFile.acl.writers.deleteDomain` + * - `myFile.acl.writers.addGroup` + * - `myFile.acl.writers.deleteGroup` + * - `myFile.acl.writers.addProject` + * - `myFile.acl.writers.deleteProject` + * - `myFile.acl.writers.addUser` + * - `myFile.acl.writers.deleteUser` + * + * @name Acl#writers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a writer of a file. + * //- + * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.WRITER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.writers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.writers = {}; + AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); + } + _assignAccessMethods(role) { + const accessMethods = AclRoleAccessorMethods.accessMethods; + const entities = AclRoleAccessorMethods.entities; + const roleGroup = role.toLowerCase() + 's'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[roleGroup] = entities.reduce((acc, entity) => { + const isPrefix = entity.charAt(entity.length - 1) === '-'; + accessMethods.forEach(accessMethod => { + let method = accessMethod + entity[0].toUpperCase() + entity.substring(1); + if (isPrefix) { + method = method.replace('-', ''); + } + // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the + // more complex API of specifying an `entity` and `role`. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + acc[method] = (entityId, options, callback) => { + let apiEntity; + if (typeof options === 'function') { + callback = options; + options = {}; + } + if (isPrefix) { + apiEntity = entity + entityId; + } + else { + // If the entity is not a prefix, it is a special entity group + // that does not require further details. The accessor methods + // only accept a callback. + apiEntity = entity; + callback = entityId; + } + options = Object.assign({ + entity: apiEntity, + role, + }, options); + const args = [options]; + if (typeof callback === 'function') { + args.push(callback); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return this[accessMethod].apply(this, args); + }; + }); + return acc; + }, {}); + } +} +exports.AclRoleAccessorMethods = AclRoleAccessorMethods; +AclRoleAccessorMethods.accessMethods = ['add', 'delete']; +AclRoleAccessorMethods.entities = [ + // Special entity groups that do not require further specification. + 'allAuthenticatedUsers', + 'allUsers', + // Entity groups that require specification, e.g. `user-email@example.com`. + 'domain-', + 'group-', + 'project-', + 'user-', +]; +AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +class Acl extends AclRoleAccessorMethods { + constructor(options) { + super(); + this.pathPrefix = options.pathPrefix; + this.request_ = options.request; + } + /** + * @typedef {array} AddAclResponse + * @property {object} 0 The Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback AddAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Add access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be added. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link https://cloud.google.com/storage/docs/access-control Access + * Control}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {AddAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * // Here is how you would grant ownership permissions to a user on a + * specific + * // revision of a file. + * //- + * myFile.acl.add({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_add_file_owner + * Example of adding an owner to a file: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + */ + add(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'POST', + uri: '', + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + json: { + entity: options.entity, + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * @typedef {array} RemoveAclResponse + * @property {object} 0 The full API response. + */ + /** + * @callback RemoveAclCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} + * + * @param {object} options Configuration object. + * @param {string} options.entity Whose permissions will be revoked. + * @param {int} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {RemoveAclCallback} callback The callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.delete({ + * entity: 'user-useremail@example.com' + * }, function(err, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.delete({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_file_owner + * Example of removing an owner from a bucket: + */ + delete(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'DELETE', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + }, (err, resp) => { + callback(err, resp); + }); + } + /** + * @typedef {array} GetAclResponse + * @property {object|object[]} 0 Single or array of Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback GetAclCallback + * @param {?Error} err Request error, if any. + * @param {object|object[]} acl Single or array of Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Get access controls on a {@link Bucket} or {@link File}. If + * an entity is omitted, you will receive an array of all applicable access + * controls. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} + * + * @param {object|function} [options] Configuration options. If you want to + * receive a list of all access controls, pass the callback function as + * the only argument. + * @param {string} options.entity Whose permissions will be fetched. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.get({ + * entity: 'user-useremail@example.com' + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // Get all access controls. + * //- + * myBucket.acl.get(function(err, aclObjects, apiResponse) { + * // aclObjects = [ + * // { + * // entity: 'user-useremail@example.com', + * // role: 'owner' + * // } + * // ] + * }); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.get({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.get().then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_file_acl + * Example of printing a file's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_file_acl_for_user + * Example of printing a file's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + */ + get(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + let path = ''; + const query = {}; + if (options) { + path = '/' + encodeURIComponent(options.entity); + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + } + this.request({ + uri: path, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + let results; + if (resp.items) { + results = resp.items.map(this.makeAclObject_); + } + else { + results = this.makeAclObject_(resp); + } + callback(null, results, resp); + }); + } + /** + * @typedef {array} UpdateAclResponse + * @property {object} 0 The updated Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback UpdateAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The updated Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Update access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be updated. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link Storage.acl}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {UpdateAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE + * }; + * + * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.update({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.update(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + update(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'PUT', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + json: { + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject) { + const obj = { + entity: accessControlObject.entity, + role: accessControlObject.role, + }; + if (accessControlObject.projectTeam) { + obj.projectTeam = accessControlObject.projectTeam; + } + return obj; + } + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts, callback) { + reqOpts.uri = this.pathPrefix + reqOpts.uri; + this.request_(reqOpts, callback); + } +} +exports.Acl = Acl; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Acl, { + exclude: ['request'], +}); + + +/***/ }), + +/***/ 3973: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(4052); +const paginator_1 = __nccwpck_require__(6412); +const promisify_1 = __nccwpck_require__(9203); +const fs = __importStar(__nccwpck_require__(7147)); +const mime = __importStar(__nccwpck_require__(3583)); +const path = __importStar(__nccwpck_require__(1017)); +const p_limit_1 = __importDefault(__nccwpck_require__(7684)); +const util_1 = __nccwpck_require__(3837); +const async_retry_1 = __importDefault(__nccwpck_require__(3415)); +const util_js_1 = __nccwpck_require__(9258); +const acl_js_1 = __nccwpck_require__(2989); +const file_js_1 = __nccwpck_require__(4713); +const iam_js_1 = __nccwpck_require__(352); +const notification_js_1 = __nccwpck_require__(1178); +const storage_js_1 = __nccwpck_require__(6007); +const signer_js_1 = __nccwpck_require__(9019); +const stream_1 = __nccwpck_require__(2781); +const url_1 = __nccwpck_require__(7310); +var BucketActionToHTTPMethod; +(function (BucketActionToHTTPMethod) { + BucketActionToHTTPMethod["list"] = "GET"; +})(BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = BucketActionToHTTPMethod = {})); +var AvailableServiceObjectMethods; +(function (AvailableServiceObjectMethods) { + AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; + AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; +})(AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = AvailableServiceObjectMethods = {})); +var BucketExceptionMessages; +(function (BucketExceptionMessages) { + BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; + BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; + BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; + BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; + BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; + BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; + BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; + BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; +})(BucketExceptionMessages || (exports.BucketExceptionMessages = BucketExceptionMessages = {})); +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +class Bucket extends index_js_1.ServiceObject { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + getFilesStream(query) { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + constructor(storage, name, options) { + var _a, _b, _c, _d; + options = options || {}; + // Allow for "gs://"-style input, and strip any trailing slashes. + name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); + const requestQueryObject = {}; + if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + requestQueryObject.ifGenerationMatch = + options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + requestQueryObject.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + requestQueryObject.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + requestQueryObject.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + const userProject = options.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * Create a bucket. + * + * @method Bucket#create + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.create(function(err, bucket, apiResponse) { + * if (!err) { + * // The bucket was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.create().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * IamDeleteBucketOptions Configuration options. + * @property {boolean} [ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} DeleteBucketResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteBucketCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} + * + * @method Bucket#delete + * @param {DeleteBucketOptions} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_delete_bucket + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} BucketExistsResponse + * @property {boolean} 0 Whether the {@link Bucket} exists. + */ + /** + * @callback BucketExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link Bucket} exists. + */ + /** + * Check if the bucket exists. + * + * @method Bucket#exists + * @param {BucketExistsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {BucketExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() + * @property {boolean} [autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetBucketResponse + * @property {Bucket} 0 The {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a bucket if it exists. + * + * You may optionally use this to "get or create" an object by providing + * an object with `autoCreate` set to `true`. Any extra configuration that + * is normally required for the `create` method must be contained within + * this object as well. + * + * @method Bucket#get + * @param {GetBucketOptions} [options] Configuration options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.get(function(err, bucket, apiResponse) { + * // `bucket.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.get().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetBucketMetadataResponse + * @property {object} 0 The bucket metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Get the bucket's metadata. + * + * To set metadata, see {@link Bucket#setMetadata}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} + * + * @method Bucket#getMetadata + * @param {GetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_get_requester_pays_status + * Example of retrieving the requester pays status of a bucket: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} SetBucketMetadataResponse + * @property {object} apiResponse The full API response. + */ + /** + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * Set the bucket's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @method Bucket#setMetadata + * @param {object} metadata The metadata you wish to set. + * @param {SetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Set website metadata field on the bucket. + * //- + * const metadata = { + * website: { + * mainPageSuffix: 'http://example.com', + * notFoundPage: 'http://example.com/404.html' + * } + * }; + * + * bucket.setMetadata(metadata, function(err, apiResponse) {}); + * + * //- + * // Enable versioning for your bucket. + * //- + * bucket.setMetadata({ + * versioning: { + * enabled: true + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Enable KMS encryption for objects within this bucket. + * //- + * bucket.setMetadata({ + * encryption: { + * defaultKmsKeyName: 'projects/grape-spaceship-123/...' + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Set the default event-based hold value for new objects in this + * // bucket. + * //- + * bucket.setMetadata({ + * defaultEventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Remove object lifecycle rules. + * //- + * bucket.setMetadata({ + * lifecycle: null + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: storage, + baseUrl: '/b', + id: name, + createMethod: storage.createBucket.bind(storage), + methods, + }); + this.name = name; + this.storage = storage; + this.userProject = options.userProject; + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.acl.default = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/defaultObjectAcl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.storage.crc32cGenerator; + this.iam = new iam_js_1.Iam(this); + this.getFilesStream = paginator_1.paginator.streamify('getFiles'); + this.instanceRetryValue = storage.retryOptions.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = new url_1.URL('gs://'); + uri.host = this.name; + return uri; + } + /** + * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). + * @property {boolean} [append=true] The new rules will be appended to any + * pre-existing rules. + */ + /** + * + * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects + * in this bucket. + * @property {string|object} action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @property {object} condition Condition a bucket must meet before the + * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @property {string} [storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. Please see + * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. + */ + /** + * Add an object lifecycle management rule to the bucket. + * + * By default, an Object Lifecycle Management rule provided to this method + * will be included to the existing policy. To replace all existing rules, + * supply the `options` argument, setting `append` to `false`. + * + * To add multiple rules, pass a list to the `rule` parameter. Calling this + * function multiple times asynchronously does not guarantee that all rules + * are added correctly. + * + * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects + * in this bucket. + * @param {string|object} rule.action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @param {object} rule.condition Condition a bucket must meet before the + * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @param {string} [rule.storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. + * @param {AddLifecycleRuleOptions} [options] Configuration object. + * @param {boolean} [options.append=true] Append the new rule to the existing + * policy. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Automatically have an object deleted from this bucket once it is 3 years + * // of age. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * const lifecycleRules = bucket.metadata.lifecycle.rule; + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // By default, the rule you provide will be added to the existing policy. + * // Optionally, you can disable this behavior to replace all of the + * // pre-existing rules. + * //- + * const options = { + * append: false + * }; + * + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, options, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // All rules have been replaced with the new "delete" rule. + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // For objects created before 2018, "downgrade" the storage class. + * //- + * bucket.addLifecycleRule({ + * action: 'setStorageClass', + * storageClass: 'COLDLINE', + * condition: { + * createdBefore: new Date('2018') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete objects created before 2016 which have the Coldline storage + * // class. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * matchesStorageClass: [ + * 'COLDLINE' + * ], + * createdBefore: new Date('2016') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceNoncurrentTime: 100 + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * noncurrentTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a customTime that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceCustomTime: 100 + * } + * }, function(err, apiResponse) ()); + * + * //- + * // Delete object that has a customTime before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * customTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * ``` + */ + addLifecycleRule(rule, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + options = options || {}; + const rules = Array.isArray(rule) ? rule : [rule]; + for (const curRule of rules) { + if (curRule.condition.createdBefore instanceof Date) { + curRule.condition.createdBefore = curRule.condition.createdBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.customTimeBefore instanceof Date) { + curRule.condition.customTimeBefore = curRule.condition.customTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.noncurrentTimeBefore instanceof Date) { + curRule.condition.noncurrentTimeBefore = + curRule.condition.noncurrentTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + } + if (options.append === false) { + this.setMetadata({ lifecycle: { rule: rules } }, options, callback); + return; + } + // The default behavior appends the previously-defined lifecycle rules with + // the new ones just passed in by the user. + this.getMetadata((err, metadata) => { + var _a, _b; + if (err) { + callback(err); + return; + } + const currentLifecycleRules = Array.isArray((_a = metadata.lifecycle) === null || _a === void 0 ? void 0 : _a.rule) + ? (_b = metadata.lifecycle) === null || _b === void 0 ? void 0 : _b.rule + : []; + this.setMetadata({ + lifecycle: { rule: currentLifecycleRules.concat(rules) }, + }, options, callback); + }); + } + /** + * @typedef {object} CombineOptions + * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CombineCallback + * @param {?Error} err Request error, if any. + * @param {File} newFile The new {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CombineResponse + * @property {File} 0 The new {@link File}. + * @property {object} 1 The full API response. + */ + /** + * Combine multiple files into one new file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} + * + * @throws {Error} if a non-array is provided as sources argument. + * @throws {Error} if no sources are provided. + * @throws {Error} if no destination is provided. + * + * @param {string[]|File[]} sources The source files that will be + * combined. + * @param {string|File} destination The file you would like the + * source files combined into. + * @param {CombineOptions} [options] Configuration options. + * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {CombineCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const logBucket = storage.bucket('log-bucket'); + * + * const sources = [ + * logBucket.file('2013-logs.txt'), + * logBucket.file('2014-logs.txt') + * ]; + * + * const allLogs = logBucket.file('all-logs.txt'); + * + * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { + * // newFile === allLogs + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * logBucket.combine(sources, allLogs).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + combine(sources, destination, optionsOrCallback, callback) { + var _a; + if (!Array.isArray(sources) || sources.length === 0) { + throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); + } + if (!destination) { + throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required + AvailableServiceObjectMethods.setMetadata, // Same as above + options); + const convertToFile = (file) => { + if (file instanceof file_js_1.File) { + return file; + } + return this.file(file); + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sources = sources.map(convertToFile); + const destinationFile = convertToFile(destination); + callback = callback || index_js_1.util.noop; + if (!destinationFile.metadata.contentType) { + const destinationContentType = mime.contentType(destinationFile.name); + if (destinationContentType) { + destinationFile.metadata.contentType = destinationContentType; + } + } + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === + undefined && + options.ifGenerationMatch === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + if (options.ifGenerationMatch === undefined) { + Object.assign(options, destinationFile.instancePreconditionOpts, options); + } + // Make the request from the destination File object. + destinationFile.request({ + method: 'POST', + uri: '/compose', + maxRetries, + json: { + destination: { + contentType: destinationFile.metadata.contentType, + }, + sourceObjects: sources.map(source => { + const sourceObject = { + name: source.name, + }; + if (source.metadata && source.metadata.generation) { + sourceObject.generation = parseInt(source.metadata.generation.toString()); + } + return sourceObject; + }), + }, + qs: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, destinationFile, resp); + }); + } + /** + * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. + * + * @typedef {object} CreateChannelConfig + * @property {string} address The address where notifications are + * delivered for this channel. + * @property {string} [delimiter] Returns results in a directory-like mode. + * @property {number} [maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [projection=noAcl] Set of properties to return. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions=false] If `true`, lists all versions of an object + * as distinct results. + */ + /** + * @typedef {object} CreateChannelOptions + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} CreateChannelResponse + * @property {Channel} 0 The new {@link Channel}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateChannelCallback + * @param {?Error} err Request error, if any. + * @param {Channel} channel The new {@link Channel}. + * @param {object} apiResponse The full API response. + */ + /** + * Create a channel that will be notified when objects in this bucket changes. + * + * @throws {Error} If an ID is not provided. + * @throws {Error} If an address is not provided. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} + * + * @param {string} id The ID of the channel to create. + * @param {CreateChannelConfig} config Configuration for creating channel. + * @param {string} config.address The address where notifications are + * delivered for this channel. + * @param {string} [config.delimiter] Returns results in a directory-like mode. + * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @param {string} [config.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [config.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {string} [config.projection=noAcl] Set of properties to return. + * @param {string} [config.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [config.versions=false] If `true`, lists all versions of an object + * as distinct results. + * @param {CreateChannelOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateChannelCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const id = 'new-channel-id'; + * + * const config = { + * address: 'https://...' + * }; + * + * bucket.createChannel(id, config, function(err, channel, apiResponse) { + * if (!err) { + * // Channel created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.createChannel(id, config).then(function(data) { + * const channel = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + createChannel(id, config, optionsOrCallback, callback) { + if (typeof id !== 'string') { + throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + method: 'POST', + uri: '/o/watch', + json: Object.assign({ + id, + type: 'web_hook', + }, config), + qs: options, + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const resourceId = apiResponse.resourceId; + const channel = this.storage.channel(id, resourceId); + channel.metadata = apiResponse; + callback(null, channel, apiResponse); + }); + } + /** + * Metadata to set for the Notification. + * + * @typedef {object} CreateNotificationOptions + * @property {object} [customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @property {string[]} [eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @property {string} [objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @property {string} [payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CreateNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The new {@link Notification}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CreateNotificationResponse + * @property {Notification} 0 The new {@link Notification}. + * @property {object} 1 The full API response. + */ + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationOptions} [options] Metadata to set for the + * notification. + * @param {object} [options.customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @param {string[]} [options.eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @param {string} [options.objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @param {string} [options.payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * @see Notification#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const callback = function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }; + * + * myBucket.createNotification('my-topic', callback); + * + * //- + * // Configure the nofiication by providing Notification metadata. + * //- + * const metadata = { + * objectNamePrefix: 'prefix-' + * }; + * + * myBucket.createNotification('my-topic', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.createNotification('my-topic').then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/createNotification.js + * region_tag:storage_create_bucket_notifications + * Another example: + */ + createNotification(topic, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const topicIsObject = topic !== null && typeof topic === 'object'; + if (topicIsObject && index_js_1.util.isCustomType(topic, 'pubsub/topic')) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + topic = topic.name; + } + if (typeof topic !== 'string') { + throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); + } + const body = Object.assign({ topic }, options); + if (body.topic.indexOf('projects') !== 0) { + body.topic = 'projects/{{projectId}}/topics/' + body.topic; + } + body.topic = '//pubsub.googleapis.com/' + body.topic; + if (!body.payloadFormat) { + body.payloadFormat = 'JSON_API_V1'; + } + const query = {}; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + this.request({ + method: 'POST', + uri: '/notificationConfigs', + json: (0, util_js_1.convertObjKeysToSnakeCase)(body), + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const notification = this.notification(apiResponse.id); + notification.metadata = apiResponse; + callback(null, notification, apiResponse); + }); + } + /** + * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} + * for all of the supported properties. + * @property {boolean} [force] Suppress errors until all files have been + * processed. + */ + /** + * @callback DeleteFilesCallback + * @param {?Error|?Error[]} err Request error, if any, or array of errors from + * files that were not able to be deleted. + * @param {object} [apiResponse] The full API response. + */ + /** + * Iterate over the bucket's files, calling `file.delete()` on each. + * + * This is not an atomic request. A delete attempt will be + * made for each file individually. Any one can fail, in which case only a + * portion of the files you intended to be deleted would have. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors until all files have had a chance + * to be processed. + * + * File preconditions cannot be passed to this function. It will not retry unless + * the idempotency strategy is set to retry always. + * + * The `query` object passed as the first argument will also be passed to + * {@link Bucket#getFiles}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} + * @param {boolean} [query.force] Suppress errors until all files have been + * processed. + * @param {DeleteFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the files in the bucket. + * //- + * bucket.deleteFiles(function(err) {}); + * + * //- + * // By default, if a file cannot be deleted, this method will stop deleting + * // files from your bucket. You can override this setting with `force: + * // true`. + * //- + * bucket.deleteFiles({ + * force: true + * }, function(errors) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * }); + * + * //- + * // The first argument to this method acts as a query to + * // {@link Bucket#getFiles}. As an example, you can delete files + * // which match a prefix. + * //- + * bucket.deleteFiles({ + * prefix: 'images/' + * }, function(err) { + * if (!err) { + * // All files in the `images` directory have been deleted. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteFiles().then(function() {}); + * ``` + */ + deleteFiles(queryOrCallback, callback) { + let query = {}; + if (typeof queryOrCallback === 'function') { + callback = queryOrCallback; + } + else if (queryOrCallback) { + query = queryOrCallback; + } + const MAX_PARALLEL_LIMIT = 10; + const MAX_QUEUE_SIZE = 1000; + const errors = []; + const deleteFile = (file) => { + return file.delete(query).catch(err => { + if (!query.force) { + throw err; + } + errors.push(err); + }); + }; + (async () => { + try { + let promises = []; + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const filesStream = this.getFilesStream(query); + for await (const curFile of filesStream) { + if (promises.length >= MAX_QUEUE_SIZE) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => deleteFile(curFile)).catch(e => { + filesStream.destroy(); + throw e; + })); + } + await Promise.all(promises); + callback(errors.length > 0 ? errors : null); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @deprecated + * @typedef {array} DeleteLabelsResponse + * @property {object} 0 The full API response. + */ + /** + * @deprecated + * @callback DeleteLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata Bucket's metadata. + */ + /** + * @deprecated Use setMetadata directly + * Delete one or more labels from this bucket. + * + * @param {string|string[]} [labels] The labels to delete. If no labels are + * provided, all of the labels are removed. + * @param {DeleteLabelsCallback} [callback] Callback function. + * @param {DeleteLabelsOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the labels from this bucket. + * //- + * bucket.deleteLabels(function(err, apiResponse) {}); + * + * //- + * // Delete a single label. + * //- + * bucket.deleteLabels('labelone', function(err, apiResponse) {}); + * + * //- + * // Delete a specific set of labels. + * //- + * bucket.deleteLabels([ + * 'labelone', + * 'labeltwo' + * ], function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteLabels().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { + let labels = new Array(); + let options = {}; + if (typeof labelsOrCallbackOrOptions === 'function') { + callback = labelsOrCallbackOrOptions; + } + else if (typeof labelsOrCallbackOrOptions === 'string') { + labels = [labelsOrCallbackOrOptions]; + } + else if (Array.isArray(labelsOrCallbackOrOptions)) { + labels = labelsOrCallbackOrOptions; + } + else if (labelsOrCallbackOrOptions) { + options = labelsOrCallbackOrOptions; + } + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const deleteLabels = (labels) => { + const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { + nullLabelMap[labelKey] = null; + return nullLabelMap; + }, {}); + if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { + this.setLabels(nullLabelMap, options, callback); + } + else { + this.setLabels(nullLabelMap, callback); + } + }; + if (labels.length === 0) { + this.getLabels((err, labels) => { + if (err) { + callback(err); + return; + } + deleteLabels(Object.keys(labels)); + }); + } + else { + deleteLabels(labels); + } + } + /** + * @typedef {array} DisableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DisableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *

+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Disable `requesterPays` functionality from this bucket. + * + * @param {DisableRequesterPaysCallback} [callback] Callback function. + * @param {DisableRequesterPaysOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.disableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality disabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.disableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_disable_requester_pays + * Example of disabling requester pays: + */ + disableRequesterPays(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: false, + }, + }, options, callback); + } + /** + * Configuration object for enabling logging. + * + * @typedef {object} EnableLoggingOptions + * @property {string|Bucket} [bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @property {string} prefix A unique prefix for log object names. + */ + /** + * Enable logging functionality for this bucket. This will make two API + * requests, first to grant Cloud Storage WRITE permission to the bucket, then + * to set the appropriate configuration on the Bucket's metadata. + * + * @param {EnableLoggingOptions} config Configuration options. + * @param {string|Bucket} [config.bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @param {string} config.prefix A unique prefix for log object names. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const config = { + * prefix: 'log' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) { + * if (!err) { + * // Logging functionality enabled successfully. + * } + * }); + * + * ``` + * @example + * Optionally, provide a destination bucket. + * ``` + * const config = { + * prefix: 'log', + * bucket: 'destination-bucket' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) {}); + * ``` + * + * @example + * If the callback is omitted, we'll return a Promise. + * ``` + * bucket.enableLogging(config).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + enableLogging(config, callback) { + if (!config || + typeof config === 'function' || + typeof config.prefix === 'undefined') { + throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); + } + let logBucket = this.id; + if (config.bucket && config.bucket instanceof Bucket) { + logBucket = config.bucket.id; + } + else if (config.bucket && typeof config.bucket === 'string') { + logBucket = config.bucket; + } + const options = {}; + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { + options.ifMetagenerationMatch = config.ifMetagenerationMatch; + } + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { + options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; + } + (async () => { + try { + const [policy] = await this.iam.getPolicy(); + policy.bindings.push({ + members: ['group:cloud-storage-analytics@google.com'], + role: 'roles/storage.objectCreator', + }); + await this.iam.setPolicy(policy); + this.setMetadata({ + logging: { + logBucket, + logObjectPrefix: config.prefix, + }, + }, options, callback); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @typedef {array} EnableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback EnableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Enable `requesterPays` functionality for this bucket. This enables you, the + * bucket owner, to have the requesting user assume the charges for the access + * to your bucket and its contents. + * + * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] + * Callback function or precondition options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.enableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality enabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.enableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_enable_requester_pays + * Example of enabling requester pays: + */ + enableRequesterPays(optionsOrCallback, cb) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: true, + }, + }, options, cb); + } + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name, options) { + if (!name) { + throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); + } + return new file_js_1.File(this, name, options); + } + /** + * @typedef {array} GetFilesResponse + * @property {File[]} 0 Array of {@link File} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetFilesCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Array of {@link File} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Query object for listing files. + * + * @typedef {object} GetFilesOptions + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {string} [delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @property {string} [endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {boolean} [includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [matchGlob] A glob pattern used to filter results, + * for example foo*bar + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions] If true, returns File objects scoped to + * their versions. + */ + /** + * Get {@link File} objects for the files currently in the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} + * + * @param {GetFilesOptions} [query] Query object for listing files. + * @param {boolean} [query.autoPaginate=true] Have pagination handled + * automatically. + * @param {string} [query.delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @param {string} [query.endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @param {string} [query.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {number} [query.maxApiCalls] Maximum number of API calls to make. + * @param {number} [query.maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @param {string} [query.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [query.startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {string} [query.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [query.versions] If true, returns File objects scoped to + * their versions. + * @param {GetFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFiles(function(err, files) { + * if (!err) { + * // files is an array of File objects. + * } + * }); + * + * //- + * // If your bucket has versioning enabled, you can get all of your files + * // scoped to their generation. + * //- + * bucket.getFiles({ + * versions: true + * }, function(err, files) { + * // Each file is scoped to its generation. + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, files, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * bucket.getFiles(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * files[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * files[0].getMetadata(function(err, metadata) {}); + * }; + * + * bucket.getFiles({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getFiles().then(function(data) { + * const files = data[0]; + * }); + * + * ``` + * @example + *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
+ * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, function(err, files, nextQuery, apiResponse) { + * // files = [ + * // {File} // File object for file "a" + * // ] + * + * // apiResponse.prefixes = [ + * // 'a/', + * // 'b/' + * // ] + * }); + * ``` + * + * @example + * Using prefixes, it's now possible to simulate a file system with follow-up requests. + * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/', + * prefix: 'a/' + * }, function(err, files, nextQuery, apiResponse) { + * // No files found within "directory" a. + * // files = [] + * + * // However, a "sub-directory" was found. + * // This prefix can be used to continue traversing the "file system". + * // apiResponse.prefixes = [ + * // 'a/b/' + * // ] + * }); + * ``` + * + * @example include:samples/files.js + * region_tag:storage_list_files + * Another example: + * + * @example include:samples/files.js + * region_tag:storage_list_files_with_prefix + * Example of listing files, filtered by a prefix: + */ + getFiles(queryOrCallback, callback) { + let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; + if (!callback) { + callback = queryOrCallback; + } + query = Object.assign({}, query); + this.request({ + uri: '/o', + qs: query, + }, (err, resp) => { + if (err) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const files = itemsArray.map((file) => { + const options = {}; + if (query.versions) { + options.generation = file.generation; + } + if (file.kmsKeyName) { + options.kmsKeyName = file.kmsKeyName; + } + const fileInstance = this.file(file.name, options); + fileInstance.metadata = file; + return fileInstance; + }); + let nextQuery = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, query, { + pageToken: resp.nextPageToken, + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(null, files, nextQuery, resp); + }); + } + /** + * @deprecated + * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated + * @typedef {array} GetLabelsResponse + * @property {object} 0 Object of labels currently set on this bucket. + */ + /** + * @deprecated + * @callback GetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} labels Object of labels currently set on this bucket. + */ + /** + * @deprecated Use getMetadata directly. + * Get the labels currently set on this bucket. + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getLabels(function(err, labels) { + * if (err) { + * // Error handling omitted. + * } + * + * // labels = { + * // label: 'labelValue', + * // ... + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getLabels().then(function(data) { + * const labels = data[0]; + * }); + * ``` + */ + getLabels(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.getMetadata(options, (err, metadata) => { + if (err) { + callback(err, null); + return; + } + callback(null, (metadata === null || metadata === void 0 ? void 0 : metadata.labels) || {}); + }); + } + /** + * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback GetNotificationsCallback + * @param {?Error} err Request error, if any. + * @param {Notification[]} notifications Array of {@link Notification} + * instances. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetNotificationsResponse + * @property {Notification[]} 0 Array of {@link Notification} instances. + * @property {object} 1 The full API response. + */ + /** + * Retrieves a list of notification subscriptions for a given bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} + * + * @param {GetNotificationsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.getNotifications(function(err, notifications, apiResponse) { + * if (!err) { + * // notifications is an array of Notification objects. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getNotifications().then(function(data) { + * const notifications = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/listNotifications.js + * region_tag:storage_list_bucket_notifications + * Another example: + */ + getNotifications(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + uri: '/notificationConfigs', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const notifications = itemsArray.map((notification) => { + const notificationInstance = this.notification(notification.id); + notificationInstance.metadata = notification; + return notificationInstance; + }); + callback(null, notifications, resp); + }); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * @typedef {object} GetBucketSignedUrlConfig + * @property {string} action Currently only supports "list" (HTTP: GET). + * @property {*} expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @property {string} [version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @property {string} [cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @property {object} [extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [queryParams] Additional query parameters to include + * in the signed URL. + */ + /** + * Get a signed URL to allow limited time access to a bucket. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {GetBucketSignedUrlConfig} config Configuration object. + * @param {string} config.action Currently only supports "list" (HTTP: GET). + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * //- + * // Generate a URL that allows temporary access to list files in a bucket. + * //- + * const request = require('request'); + * + * const config = { + * action: 'list', + * expires: '03-17-2025' + * }; + * + * bucket.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The bucket is now available to be listed from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * ``` + */ + getSignedUrl(cfg, callback) { + const method = BucketActionToHTTPMethod[cfg.action]; + const signConfig = { + method, + expires: cfg.expires, + version: cfg.version, + cname: cfg.cname, + extensionHeaders: cfg.extensionHeaders || {}, + queryParams: cfg.queryParams || {}, + }; + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback BucketLockCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Lock a previously-defined retention policy. This will prevent changes to + * the policy. + * + * @throws {Error} if a metageneration is not provided. + * + * @param {number|string} metageneration The bucket's metageneration. This is + * accesssible from calling {@link File#getMetadata}. + * @param {BucketLockCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const metageneration = 2; + * + * bucket.lock(metageneration, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.lock(metageneration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + lock(metageneration, callback) { + const metatype = typeof metageneration; + if (metatype !== 'number' && metatype !== 'string') { + throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); + } + this.request({ + method: 'POST', + uri: '/lockRetentionPolicy', + qs: { + ifMetagenerationMatch: metageneration, + }, + }, callback); + } + /** + * @typedef {array} MakeBucketPrivateResponse + * @property {File[]} 0 List of files made private. + */ + /** + * @callback MakeBucketPrivateCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made private. + */ + /** + * @typedef {object} MakeBucketPrivateOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Make the bucket listing private. + * + * You may also choose to make the contents of the bucket private by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePrivate} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPrivateOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {Metadata} [options.metadata] Define custom metadata properties to define + * along with the operation. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {MakeBucketPrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket private. + * //- + * bucket.makePrivate(function(err) {}); + * + * //- + * // Make the bucket and its contents private. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePrivate(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // Make the bucket and its contents private, using force to suppress errors + * // until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePrivate(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePrivate(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b, _c, _d; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options.private = true; + const query = { + predefinedAcl: 'projectPrivate', + }; + if (options.userProject) { + query.userProject = options.userProject; + } + if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + query.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + query.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + query.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + // You aren't allowed to set both predefinedAcl & acl properties on a bucket + // so acl must explicitly be nullified. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, (err) => { + if (err) { + callback(err); + } + const internalCall = () => { + if (options.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options); + } + return Promise.resolve([]); + }; + internalCall() + .then(files => callback(null, files)) + .catch(callback); + }); + } + /** + * @typedef {object} MakeBucketPublicOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + */ + /** + * @callback MakeBucketPublicCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made public. + */ + /** + * @typedef {array} MakeBucketPublicResponse + * @property {File[]} 0 List of files made public. + */ + /** + * Make the bucket publicly readable. + * + * You may also choose to make the contents of the bucket publicly readable by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePublic} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPublicOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {MakeBucketPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket publicly readable. + * //- + * bucket.makePublic(function(err) {}); + * + * //- + * // Make the bucket and its contents publicly readable. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePublic(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // Make the bucket and its contents publicly readable, using force to + * // suppress errors until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePublic(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePublic(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePublic(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const req = { public: true, ...options }; + this.acl + .add({ + entity: 'allUsers', + role: 'READER', + }) + .then(() => { + return this.acl.default.add({ + entity: 'allUsers', + role: 'READER', + }); + }) + .then(() => { + if (req.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req); + } + return []; + }) + .then(files => callback(null, files), callback); + } + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id) { + if (!id) { + throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); + } + return new notification_js_1.Notification(this, id); + } + /** + * Remove an already-existing retention policy from this bucket, if it is not + * locked. + * + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * bucket.removeRetentionPeriod(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.removeRetentionPeriod().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + removeRetentionPeriod(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: null, + }, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { + reqOpts.qs = { ...reqOpts.qs, userProject: this.userProject }; + } + return super.request(reqOpts, callback); + } + /** + * @deprecated + * @typedef {array} SetLabelsResponse + * @property {object} 0 The bucket metadata. + */ + /** + * @deprecated + * @callback SetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * @deprecated + * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated Use setMetadata directly. + * Set labels on the bucket. + * + * This makes an underlying call to {@link Bucket#setMetadata}, which + * is a PATCH request. This means an individual label can be overwritten, but + * unmentioned labels will not be touched. + * + * @param {object} labels Labels to set on the bucket. + * @param {SetLabelsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const labels = { + * labelone: 'labelonevalue', + * labeltwo: 'labeltwovalue' + * }; + * + * bucket.setLabels(labels, function(err, metadata) { + * if (!err) { + * // Labels set successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setLabels(labels).then(function(data) { + * const metadata = data[0]; + * }); + * ``` + */ + setLabels(labels, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.setMetadata({ labels }, options, callback); + } + setMetadata(metadata, optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * Lock all objects contained in the bucket, based on their creation time. Any + * attempt to overwrite or delete objects younger than the retention period + * will result in a `PERMISSION_DENIED` error. + * + * An unlocked retention policy can be modified or removed from the bucket via + * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A + * locked retention policy cannot be removed or shortened in duration for the + * lifetime of the bucket. Attempting to remove or decrease period of a locked + * retention policy will result in a `PERMISSION_DENIED` error. You can still + * increase the policy. + * + * @param {*} duration In seconds, the minimum retention time for all objects + * contained in this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataCallback} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const DURATION_SECONDS = 15780000; // 6 months. + * + * //- + * // Lock the objects in this bucket for 6 months. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setRetentionPeriod(duration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: { + retentionPeriod: duration.toString(), + }, + }, options, callback); + } + /** + * + * @typedef {object} Cors + * @property {number} [maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @property {string[]} [method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + */ + /** + * This can be used to set the CORS configuration on the bucket. + * + * The configuration will be overwritten with the value passed into this. + * + * @param {Cors[]} corsConfiguration The new CORS configuration to set + * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour + * bucket.setCorsConfiguration(corsConfiguration); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + cors: corsConfiguration, + }, options, callback); + } + /** + * @typedef {object} SetBucketStorageClassOptions + * @property {string} [userProject] - The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetBucketStorageClassCallback + * @param {?Error} err Request error, if any. + */ + /** + * Set the default storage class for new files in this bucket. + * + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] - The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass = storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(); + this.setMetadata({ storageClass }, options, callback); + } + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.userProject = userProject; + const methods = [ + 'create', + 'delete', + 'exists', + 'get', + 'getMetadata', + 'setMetadata', + ]; + methods.forEach(method => { + const methodConfig = this.methods[method]; + if (typeof methodConfig === 'object') { + if (typeof methodConfig.reqOpts === 'object') { + Object.assign(methodConfig.reqOpts.qs, { userProject }); + } + else { + methodConfig.reqOpts = { + qs: { userProject }, + }; + } + } + }); + } + /** + * @typedef {object} UploadOptions Configuration options for Bucket#upload(). + * @property {string|File} [destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @property {string} [encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @property {boolean} [gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @property {string} [kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @property {object} [metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @property {string} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + */ + /** + * @typedef {array} UploadResponse + * @property {object} 0 The uploaded {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback UploadCallback + * @param {?Error} err Request error, if any. + * @param {object} file The uploaded {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Upload a file to the bucket. This is a convenience method that wraps + * {@link File#createWriteStream}. + * + * Resumable uploads are enabled by default + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} + * + * @param {string} pathString The fully qualified path to the file you + * wish to upload to your bucket. + * @param {UploadOptions} [options] Configuration options. + * @param {string|File} [options.destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {boolean} [options.gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @param {object} [options.metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @param {string} [options.offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @param {string} [options.predefinedAcl] Apply a predefined set of access + * controls to this object. + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @param {boolean} [options.private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @param {boolean} [options.public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @param {boolean} [options.resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @param {number} [options.timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @param {string} [options.uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {string|boolean} [options.validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + * @param {UploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Upload a file from a local path. + * //- + * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { + * // Your bucket now contains: + * // - "image.png" (with the contents of `/local/path/image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * + * //- + * // It's not always that easy. You will likely want to specify the filename + * // used when your new file lands in your bucket. + * // + * // You may also want to set metadata or customize other options. + * //- + * const options = { + * destination: 'new-image.png', + * validation: 'crc32c', + * metadata: { + * metadata: { + * event: 'Fall trip to the zoo' + * } + * } + * }; + * + * bucket.upload('local-image.png', options, function(err, file) { + * // Your bucket now contains: + * // - "new-image.png" (with the contents of `local-image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * //- + * // You can also have a file gzip'd on the fly. + * //- + * bucket.upload('index.html', { gzip: true }, function(err, file) { + * // Your bucket now contains: + * // - "index.html" (automatically compressed with gzip) + * + * // Downloading the file with `file.download` will automatically decode + * the + * // file. + * }); + * + * //- + * // You may also re-use a File object, {File}, that references + * // the file you wish to create or overwrite. + * //- + * const options = { + * destination: bucket.file('existing-file.png'), + * resumable: false + * }; + * + * bucket.upload('local-img.png', options, function(err, newFile) { + * // Your bucket now contains: + * // - "existing-file.png" (with the contents of `local-img.png') + * + * // Note: + * // The `newFile` parameter is equal to `file`. + * }); + * + * //- + * // To use + * // + * // Customer-supplied Encryption Keys, provide the `encryptionKey` + * option. + * //- + * const crypto = require('crypto'); + * const encryptionKey = crypto.randomBytes(32); + * + * bucket.upload('img.png', { + * encryptionKey: encryptionKey + * }, function(err, newFile) { + * // `img.png` was uploaded with your custom encryption key. + * + * // `newFile` is already configured to use the encryption key when making + * // operations on the remote object. + * + * // However, to use your encryption key later, you must create a `File` + * // instance with the `key` supplied: + * const file = bucket.file('img.png', { + * encryptionKey: encryptionKey + * }); + * + * // Or with `file#setEncryptionKey`: + * const file = bucket.file('img.png'); + * file.setEncryptionKey(encryptionKey); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.upload('local-image.png').then(function(data) { + * const file = data[0]; + * }); + * + * To upload a file from a URL, use {@link File#createWriteStream}. + * + * ``` + * @example include:samples/files.js + * region_tag:storage_upload_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + */ + upload(pathString, optionsOrCallback, callback) { + var _a, _b; + const upload = (numberOfRetries) => { + const returnValue = (0, async_retry_1.default)(async (bail) => { + await new Promise((resolve, reject) => { + var _a, _b; + if (numberOfRetries === 0 && + ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { + newFile.storage.retryOptions.autoRetry = false; + } + const writable = newFile.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + fs.createReadStream(pathString) + .on('error', bail) + .pipe(writable) + .on('error', err => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + else { + return bail(err); + } + }) + .on('finish', () => { + return resolve(); + }); + }); + }, { + retries: numberOfRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(null, newFile, newFile.metadata); + } + }) + .catch(callback); + } + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (global['GCLOUD_SANDBOX_ENV']) { + return; + } + let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options = Object.assign({ + metadata: {}, + }, options); + // Do not retry if precondition option ifGenerationMatch is not set + // because this is a file operation + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + let newFile; + if (options.destination instanceof file_js_1.File) { + newFile = options.destination; + } + else if (options.destination !== null && + typeof options.destination === 'string') { + // Use the string as the name of the file. + newFile = this.file(options.destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + else { + // Resort to using the name of the incoming file. + const destination = path.basename(pathString); + newFile = this.file(destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + upload(maxRetries); + } + /** + * @private + * + * @typedef {object} MakeAllFilesPublicPrivateOptions + * @property {boolean} [force] Suppress errors until all files have been + * processed. + * @property {boolean} [private] Make files private. + * @property {boolean} [public] Make files public. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @private + * + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Files that were updated. + */ + /** + * @typedef {array} MakeAllFilesPublicPrivateResponse + * @property {File[]} 0 List of files affected. + */ + /** + * Iterate over all of a bucket's files, calling `file.makePublic()` (public) + * or `file.makePrivate()` (private) on each. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop, and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors. + * + * @private + * + * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. + * @param {boolean} [options.force] Suppress errors until all files have been + * processed. + * @param {boolean} [options.private] Make files private. + * @param {boolean} [options.public] Make files public. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. + * + * @return {Promise} + */ + makeAllFilesPublicPrivate_(optionsOrCallback, callback) { + const MAX_PARALLEL_LIMIT = 10; + const errors = []; + const updatedFiles = []; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const processFile = async (file) => { + try { + await (options.public ? file.makePublic() : file.makePrivate(options)); + updatedFiles.push(file); + } + catch (e) { + if (!options.force) { + throw e; + } + errors.push(e); + } + }; + this.getFiles(options) + .then(([files]) => { + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const promises = files.map(file => { + return limit(() => processFile(file)); + }); + return Promise.all(promises); + }) + .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); + } + getId() { + return this.id; + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + methodType, localPreconditionOptions) { + var _a, _b; + if (typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + (methodType === AvailableServiceObjectMethods.setMetadata || + methodType === AvailableServiceObjectMethods.delete) && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) { + this.storage.retryOptions.autoRetry = false; + } + else if (this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } +} +exports.Bucket = Bucket; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Bucket, 'getFiles'); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Bucket, { + exclude: ['cloudStorageURI', 'request', 'file', 'notification'], +}); + + +/***/ }), + +/***/ 2665: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Channel = void 0; +const index_js_1 = __nccwpck_require__(4052); +const promisify_1 = __nccwpck_require__(9203); +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +class Channel extends index_js_1.ServiceObject { + constructor(storage, id, resourceId) { + const config = { + parent: storage, + baseUrl: '/channels', + // An ID shouldn't be included in the API requests. + // RE: + // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 + id: '', + methods: { + // Only need `request`. + }, + }; + super(config); + this.metadata.id = id; + this.metadata.resourceId = resourceId; + } + /** + * @typedef {array} StopResponse + * @property {object} 0 The full API response. + */ + /** + * @callback StopCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Stop this channel. + * + * @param {StopCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * channel.stop(function(err, apiResponse) { + * if (!err) { + * // Channel stopped successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * channel.stop().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + stop(callback) { + callback = callback || index_js_1.util.noop; + this.request({ + method: 'POST', + uri: '/stop', + json: this.metadata, + }, (err, apiResponse) => { + callback(err, apiResponse); + }); + } +} +exports.Channel = Channel; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Channel); + + +/***/ }), + +/***/ 5810: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _CRC32C_crc32c; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; +const fs_1 = __nccwpck_require__(7147); +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +const CRC32C_EXTENSIONS = [ + 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, + 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, + 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, + 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, + 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, + 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, + 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, + 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, + 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, + 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, + 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, + 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, + 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, + 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, + 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, + 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, + 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, + 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, + 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, + 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, + 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, + 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, + 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, + 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, + 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, + 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, + 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, + 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, + 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, + 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, + 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, + 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, + 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, + 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, + 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, + 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, + 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, + 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, + 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, + 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, + 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, + 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, + 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, +]; +exports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); +exports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; +const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); +exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR; +const CRC32C_EXCEPTION_MESSAGES = { + INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, + INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, + INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, +}; +exports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES; +class CRC32C { + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue = 0) { + /** Current CRC32C value */ + _CRC32C_crc32c.set(this, 0); + __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); + } + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data) { + let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; + for (const d of data) { + const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; + current = tablePoly ^ (current >>> 8); + } + __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); + } + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input) { + if (typeof input === 'number') { + return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + else if (typeof input === 'string') { + return input === this.toString(); + } + else if (Buffer.isBuffer(input)) { + return Buffer.compare(input, this.toBuffer()) === 0; + } + else { + // `CRC32C`-like object + return input.toString() === this.toString(); + } + } + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer() { + const buffer = Buffer.alloc(4); + buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); + return buffer; + } + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON() { + return this.toString(); + } + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString() { + return this.toBuffer().toString('base64'); + } + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf() { + return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + static fromBuffer(value) { + let buffer; + if (Buffer.isBuffer(value)) { + buffer = value; + } + else if ('buffer' in value) { + // `ArrayBufferView` + buffer = Buffer.from(value.buffer); + } + else { + // `ArrayBuffer` + buffer = Buffer.from(value); + } + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); + } + return new CRC32C(buffer.readInt32BE()); + } + static async fromFile(file) { + const crc32c = new CRC32C(); + await new Promise((resolve, reject) => { + (0, fs_1.createReadStream)(file) + .on('data', (d) => crc32c.update(d)) + .on('end', resolve) + .on('error', reject); + }); + return crc32c; + } + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + static fromString(value) { + const buffer = Buffer.from(value, 'base64'); + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); + } + return this.fromBuffer(buffer); + } + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + static fromNumber(value) { + if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); + } + return new CRC32C(value); + } + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value) { + if (typeof value === 'number') { + return this.fromNumber(value); + } + else if (typeof value === 'string') { + return this.fromString(value); + } + else if ('byteLength' in value) { + // `ArrayBuffer` | `Buffer` | `ArrayBufferView` + return this.fromBuffer(value); + } + else { + // `CRC32CValidator`/`CRC32C`-like + return this.fromString(value.toString()); + } + } +} +exports.CRC32C = CRC32C; +_CRC32C_crc32c = new WeakMap(); +CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; + + +/***/ }), + +/***/ 4713: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _File_instances, _File_validateIntegrity; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(4052); +const promisify_1 = __nccwpck_require__(9203); +const compressible_1 = __importDefault(__nccwpck_require__(6763)); +const crypto = __importStar(__nccwpck_require__(6113)); +const fs = __importStar(__nccwpck_require__(7147)); +const mime_1 = __importDefault(__nccwpck_require__(9994)); +const resumableUpload = __importStar(__nccwpck_require__(2851)); +const stream_1 = __nccwpck_require__(2781); +const zlib = __importStar(__nccwpck_require__(9796)); +const storage_js_1 = __nccwpck_require__(6007); +const bucket_js_1 = __nccwpck_require__(3973); +const acl_js_1 = __nccwpck_require__(2989); +const signer_js_1 = __nccwpck_require__(9019); +const util_js_1 = __nccwpck_require__(8064); +const duplexify_1 = __importDefault(__nccwpck_require__(6599)); +const util_js_2 = __nccwpck_require__(9258); +const crc32c_js_1 = __nccwpck_require__(5810); +const hash_stream_validator_js_1 = __nccwpck_require__(725); +const async_retry_1 = __importDefault(__nccwpck_require__(3415)); +var ActionToHTTPMethod; +(function (ActionToHTTPMethod) { + ActionToHTTPMethod["read"] = "GET"; + ActionToHTTPMethod["write"] = "PUT"; + ActionToHTTPMethod["delete"] = "DELETE"; + ActionToHTTPMethod["resumable"] = "POST"; +})(ActionToHTTPMethod || (exports.ActionToHTTPMethod = ActionToHTTPMethod = {})); +/** + * @private + */ +exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; +/** + * @private + */ +const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; +class RequestError extends Error { +} +exports.RequestError = RequestError; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +var FileExceptionMessages; +(function (FileExceptionMessages) { + FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; + FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; + FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; + FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; + FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; + FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; + FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; + FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; + FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; + FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; + FileExceptionMessages["MD5_RESUMED_UPLOAD"] = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value"; + FileExceptionMessages["MISSING_RESUME_CRC32C_FINAL_UPLOAD"] = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`."; +})(FileExceptionMessages || (exports.FileExceptionMessages = FileExceptionMessages = {})); +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +class File extends index_js_1.ServiceObject { + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket, name, options = {}) { + var _a, _b; + const requestQueryObject = {}; + let generation; + if (options.generation !== null) { + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + requestQueryObject.generation = generation; + } + } + Object.assign(requestQueryObject, options.preconditionOpts); + const userProject = options.userProject || bucket.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * @typedef {array} DeleteFileResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteFileCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @method File#delete + * @param {object} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the file does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_delete_file + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} FileExistsResponse + * @property {boolean} 0 Whether the {@link File} exists. + */ + /** + * @callback FileExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link File} exists. + */ + /** + * Check if the file exists. + * + * @method File#exists + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {FileExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileResponse + * @property {File} 0 The {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileCallback + * @param {?Error} err Request error, if any. + * @param {File} file The {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a file object and its metadata if it exists. + * + * @method File#get + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.get(function(err, file, apiResponse) { + * // file.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.get().then(function(data) { + * const file = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileMetadataResponse + * @property {object} 0 The {@link File} metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The {@link File} metadata. + * @param {object} apiResponse The full API response. + */ + /** + * Get the file's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} + * + * @method File#getMetadata + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_get_metadata + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). + * @param {string} [userProject] The ID of the project which will be billed for the request. + */ + /** + * @callback SetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} SetFileMetadataResponse + * @property {object} 0 The full API response. + */ + /** + * Merge the given metadata with the current remote file's metadata. This + * will set metadata if it was previously unset or update previously set + * metadata. To unset previously set metadata, set its value to null. + * + * You can set custom key/value pairs in the metadata key of the given + * object, however the other properties outside of this object must adhere + * to the {@link https://goo.gl/BOnnCK| official API documentation}. + * + * + * See the examples below for more information. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @method File#setMetadata + * @param {object} [metadata] The metadata you wish to update. + * @param {SetFileMetadataOptions} [options] Configuration options. + * @param {SetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * const metadata = { + * contentType: 'application/x-font-ttf', + * metadata: { + * my: 'custom', + * properties: 'go here' + * } + * }; + * + * file.setMetadata(metadata, function(err, apiResponse) {}); + * + * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } + * file.setMetadata({ + * metadata: { + * abc: '123', // will be set. + * unsetMe: null, // will be unset (deleted). + * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. + * } + * }, function(err, apiResponse) { + * // metadata should now be { abc: '123', hello: 'goodbye' } + * }); + * + * //- + * // Set a temporary hold on this file from its bucket's retention period + * // configuration. + * // + * file.setMetadata({ + * temporaryHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Alternatively, you may set a temporary hold. This will follow the + * // same behavior as an event-based hold, with the exception that the + * // bucket's retention policy will not renew for this file from the time + * // the hold is released. + * //- + * file.setMetadata({ + * eventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: bucket, + baseUrl: '/o', + id: encodeURIComponent(name), + methods, + }); + _File_instances.add(this); + this.bucket = bucket; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.storage = bucket.parent; + // @TODO Can this duplicate code from above be avoided? + if (options.generation !== null) { + let generation; + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + this.generation = generation; + } + } + this.kmsKeyName = options.kmsKeyName; + this.userProject = userProject; + this.name = name; + if (options.encryptionKey) { + this.setEncryptionKey(options.encryptionKey); + } + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.bucket.crc32cGenerator; + this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = this.bucket.cloudStorageURI; + uri.pathname = this.name; + return uri; + } + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { + var _a; + return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && + ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever); + } + /** + * @typedef {array} CopyResponse + * @property {File} 0 The copied {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback CopyCallback + * @param {?Error} err Request error, if any. + * @param {File} copiedFile The copied {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} CopyOptions Configuration options for File#copy(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [cacheControl] The cacheControl setting for the new file. + * @property {string} [contentEncoding] The contentEncoding setting for the new file. + * @property {string} [contentType] The contentType setting for the new file. + * @property {string} [destinationKmsKeyName] Resource name of the Cloud + * KMS key, of the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {Metadata} [metadata] Metadata to specify on the copied file. + * @property {string} [predefinedAcl] Set the ACL for the new file. + * @property {string} [token] A previously-returned `rewriteToken` from an + * unfinished rewrite request. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Copy this file to another file. By default, this will copy the file to the + * same bucket, but you can choose to copy it to another Bucket by providing + * a Bucket or File object or a URL starting with "gs://". + * The generation of the file will not be preserved. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {CopyOptions} [options] Configuration options. See an + * @param {CopyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { + * // `my-bucket` now contains: + * // - "my-image.png" + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-copy.png'; + * file.copy(newLocation, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be copied to that bucket + * // using the same name. + * //- + * const anotherBucket = storage.bucket('another-bucket'); + * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * file.copy(anotherFile, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `copiedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.copy(newLocation).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_copy_file + * Another example: + */ + copy(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || index_js_1.util.noop; + let destBucket; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destBucket = this.storage.bucket(parsedDestination[1]); + destName = parsedDestination[2]; + } + else { + destBucket = this.bucket; + destName = destination; + } + } + else if (destination instanceof bucket_js_1.Bucket) { + destBucket = destination; + destName = this.name; + } + else if (destination instanceof File) { + destBucket = destination.bucket; + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + const query = {}; + if (this.generation !== undefined) { + query.sourceGeneration = this.generation; + } + if (options.token !== undefined) { + query.rewriteToken = options.token; + } + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (options.predefinedAcl !== undefined) { + query.destinationPredefinedAcl = options.predefinedAcl; + delete options.predefinedAcl; + } + newFile = newFile || destBucket.file(destName); + const headers = {}; + if (this.encryptionKey !== undefined) { + headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; + headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; + headers['x-goog-copy-source-encryption-key-sha256'] = + this.encryptionKeyHash; + } + if (newFile.encryptionKey !== undefined) { + this.setEncryptionKey(newFile.encryptionKey); + } + else if (options.destinationKmsKeyName !== undefined) { + query.destinationKmsKeyName = options.destinationKmsKeyName; + delete options.destinationKmsKeyName; + } + else if (newFile.kmsKeyName !== undefined) { + query.destinationKmsKeyName = newFile.kmsKeyName; + } + if (query.destinationKmsKeyName) { + this.kmsKeyName = query.destinationKmsKeyName; + const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); + if (keyIndex > -1) { + this.interceptors.splice(keyIndex, 1); + } + } + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + headers, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + if (resp.rewriteToken) { + const options = { + token: resp.rewriteToken, + }; + if (query.userProject) { + options.userProject = query.userProject; + } + if (query.destinationKmsKeyName) { + options.destinationKmsKeyName = query.destinationKmsKeyName; + } + this.copy(newFile, options, callback); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options = {}) { + options = Object.assign({ decompress: true }, options); + const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; + const tailRequest = options.end < 0; + let validateStream = undefined; + let request = undefined; + const throughStream = new util_js_2.PassThroughShim(); + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + const value = options.validation.toLowerCase().trim(); + crc32c = value === 'crc32c'; + md5 = value === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + } + const shouldRunValidation = !rangeRequest && (crc32c || md5); + if (rangeRequest) { + if (typeof options.validation === 'string' || + options.validation === true) { + throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); + } + // Range requests can't receive data integrity checks. + crc32c = false; + md5 = false; + } + const onComplete = (err) => { + if (err) { + // There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed. + // This causes a memory leak, so cleanup the sockets manually here by destroying the agent. + if (request === null || request === void 0 ? void 0 : request.agent) { + request.agent.destroy(); + } + throughStream.destroy(err); + } + }; + // We listen to the response event from the request stream so that we + // can... + // + // 1) Intercept any data from going to the user if an error occurred. + // 2) Calculate the hashes from the http.IncomingMessage response + // stream, + // which will return the bytes from the source without decompressing + // gzip'd content. We then send it through decompressed, if + // applicable, to the user. + const onResponse = (err, _body, rawResponseStream) => { + if (err) { + // Get error message from the body. + this.getBufferFromReadable(rawResponseStream).then(body => { + err.message = body.toString('utf8'); + throughStream.destroy(err); + }); + return; + } + request = rawResponseStream.request; + const headers = rawResponseStream.toJSON().headers; + const isCompressed = headers['content-encoding'] === 'gzip'; + const hashes = {}; + // The object is safe to validate if: + // 1. It was stored gzip and returned to us gzip OR + // 2. It was never stored as gzip + const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && + isCompressed) || + headers['x-goog-stored-content-encoding'] === 'identity'; + const transformStreams = []; + if (shouldRunValidation) { + // The x-goog-hash header should be set with a crc32c and md5 hash. + // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' + if (typeof headers['x-goog-hash'] === 'string') { + headers['x-goog-hash'] + .split(',') + .forEach((hashKeyValPair) => { + const delimiterIndex = hashKeyValPair.indexOf('='); + const hashType = hashKeyValPair.substring(0, delimiterIndex); + const hashValue = hashKeyValPair.substring(delimiterIndex + 1); + hashes[hashType] = hashValue; + }); + } + validateStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + md5, + crc32cGenerator: this.crc32cGenerator, + crc32cExpected: hashes.crc32c, + md5Expected: hashes.md5, + }); + } + if (md5 && !hashes.md5) { + const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); + hashError.code = 'MD5_NOT_AVAILABLE'; + throughStream.destroy(hashError); + return; + } + if (safeToValidate && shouldRunValidation && validateStream) { + transformStreams.push(validateStream); + } + if (isCompressed && options.decompress) { + transformStreams.push(zlib.createGunzip()); + } + (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete); + }; + // Authenticate the request, then pipe the remote API request to the stream + // returned to the user. + const makeRequest = () => { + const query = { alt: 'media' }; + if (this.generation) { + query.generation = this.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + const headers = { + 'Accept-Encoding': 'gzip', + 'Cache-Control': 'no-store', + }; + if (rangeRequest) { + const start = typeof options.start === 'number' ? options.start : '0'; + const end = typeof options.end === 'number' ? options.end : ''; + headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; + } + const reqOpts = { + uri: '', + headers, + qs: query, + }; + if (options[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts[util_js_1.GCCL_GCS_CMD_KEY] = options[util_js_1.GCCL_GCS_CMD_KEY]; + } + this.requestStream(reqOpts) + .on('error', err => { + throughStream.destroy(err); + }) + .on('response', res => { + throughStream.emit('response', res); + index_js_1.util.handleResp(null, res, null, onResponse); + }) + .resume(); + }; + throughStream.on('reading', makeRequest); + return throughStream; + } + /** + * @callback CreateResumableUploadCallback + * @param {?Error} err Request error, if any. + * @param {string} uri The resumable upload's unique session URI. + */ + /** + * @typedef {array} CreateResumableUploadResponse + * @property {string} 0 The resumable upload's unique session URI. + */ + /** + * @typedef {object} CreateResumableUploadOptions + * @property {object} [metadata] Metadata to set on the file. + * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. + * @property {string} [origin] Origin header to set for the upload. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string} [chunkSize] Create a separate request per chunk. This + * value is in bytes and should be a multiple of 256 KiB (2^18). + * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} + */ + /** + * Create a unique resumable upload session URI. This is the first step when + * performing a resumable upload. + * + * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * for more on how the entire process works. + * + *

Note

+ * + * If you are just looking to perform a resumable upload without worrying + * about any of the details, see {@link File#createWriteStream}. Resumable + * uploads are performed by default. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * + * @param {CreateResumableUploadOptions} [options] Configuration options. + * @param {CreateResumableUploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.createResumableUpload(function(err, uri) { + * if (!err) { + * // `uri` can be used to PUT data to. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.createResumableUpload().then(function(data) { + * const uri = data[0]; + * }); + * ``` + */ + createResumableUpload(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const retryOptions = this.storage.retryOptions; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + retryOptions.autoRetry = false; + } + resumableUpload.createURI({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + origin: options.origin, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + userProject: options.userProject || this.userProject, + retryOptions: retryOptions, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }, callback); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + * + * //- + * //

Continuing a Resumable Upload

+ * // + * // One can capture a `uri` from a resumable upload to reuse later. + * // Additionally, for validation, one can also capture and pass `crc32c`. + * //- + * let uri: string | undefined = undefined; + * let resumeCRC32C: string | undefined = undefined; + * + * fs.createWriteStream() + * .on('uri', link => {uri = link}) + * .on('crc32', crc32c => {resumeCRC32C = crc32c}); + * + * // later... + * fs.createWriteStream({uri, resumeCRC32C}); + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + createWriteStream(options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + if (options.contentType) { + options.metadata.contentType = options.contentType; + } + if (!options.metadata.contentType || + options.metadata.contentType === 'auto') { + const detectedContentType = mime_1.default.getType(this.name); + if (detectedContentType) { + options.metadata.contentType = detectedContentType; + } + } + let gzip = options.gzip; + if (gzip === 'auto') { + gzip = (0, compressible_1.default)(options.metadata.contentType || ''); + } + if (gzip) { + options.metadata.contentEncoding = 'gzip'; + } + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + options.validation = options.validation.toLowerCase(); + crc32c = options.validation === 'crc32c'; + md5 = options.validation === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + md5 = false; + } + if (options.offset) { + if (md5) { + throw new RangeError(FileExceptionMessages.MD5_RESUMED_UPLOAD); + } + if (crc32c && !options.isPartialUpload && !options.resumeCRC32C) { + throw new RangeError(FileExceptionMessages.MISSING_RESUME_CRC32C_FINAL_UPLOAD); + } + } + /** + * A callback for determining when the underlying pipeline is complete. + * It's possible the pipeline callback could error before the write stream + * calls `final` so by default this will destroy the write stream unless the + * write stream sets this callback via its `final` handler. + * @param error An optional error + */ + let pipelineCallback = error => { + writeStream.destroy(error || undefined); + }; + // A stream for consumer to write to + const writeStream = new stream_1.Writable({ + final(cb) { + // Set the pipeline callback to this callback so the pipeline's results + // can be populated to the consumer + pipelineCallback = cb; + emitStream.end(); + }, + write(chunk, encoding, cb) { + emitStream.write(chunk, encoding, cb); + }, + }); + const transformStreams = []; + if (gzip) { + transformStreams.push(zlib.createGzip()); + } + const emitStream = new util_js_2.PassThroughShim(); + let hashCalculatingStream = null; + if (crc32c || md5) { + const crc32cInstance = options.resumeCRC32C + ? crc32c_js_1.CRC32C.from(options.resumeCRC32C) + : undefined; + hashCalculatingStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + crc32cInstance, + md5, + crc32cGenerator: this.crc32cGenerator, + updateHashesOnly: true, + }); + transformStreams.push(hashCalculatingStream); + } + const fileWriteStream = (0, duplexify_1.default)(); + let fileWriteStreamMetadataReceived = false; + // Handing off emitted events to users + emitStream.on('reading', () => writeStream.emit('reading')); + emitStream.on('writing', () => writeStream.emit('writing')); + fileWriteStream.on('uri', evt => writeStream.emit('uri', evt)); + fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); + fileWriteStream.on('response', resp => writeStream.emit('response', resp)); + fileWriteStream.once('metadata', () => { + fileWriteStreamMetadataReceived = true; + }); + writeStream.once('writing', () => { + if (options.resumable === false) { + this.startSimpleUpload_(fileWriteStream, options); + } + else { + this.startResumableUpload_(fileWriteStream, options); + } + (0, stream_1.pipeline)(emitStream, ...transformStreams, fileWriteStream, async (e) => { + if (e) { + return pipelineCallback(e); + } + // We want to make sure we've received the metadata from the server in order + // to properly validate the object's integrity. Depending on the type of upload, + // the stream could close before the response is returned. + if (!fileWriteStreamMetadataReceived) { + try { + await new Promise((resolve, reject) => { + fileWriteStream.once('metadata', resolve); + fileWriteStream.once('error', reject); + }); + } + catch (e) { + return pipelineCallback(e); + } + } + // Emit the local CRC32C value for future validation, if validation is enabled. + if (hashCalculatingStream === null || hashCalculatingStream === void 0 ? void 0 : hashCalculatingStream.crc32c) { + writeStream.emit('crc32c', hashCalculatingStream.crc32c); + } + try { + // Metadata may not be ready if the upload is a partial upload, + // nothing to validate yet. + const metadataNotReady = options.isPartialUpload && !this.metadata; + if (hashCalculatingStream && !metadataNotReady) { + await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { + crc32c, + md5, + }); + } + pipelineCallback(); + } + catch (e) { + pipelineCallback(e); + } + }); + }); + return writeStream; + } + delete(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_js_1.AvailableServiceObjectMethods.delete, options); + super + .delete(options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} DownloadResponse + * @property [0] The contents of a File. + */ + /** + * @callback DownloadCallback + * @param err Request error, if any. + * @param contents The contents of a File. + */ + /** + * Convenience method to download a file into memory or to a local + * destination. + * + * @param {object} [options] Configuration options. The arguments match those + * passed to {@link File#createReadStream}. + * @param {string} [options.destination] Local file path to write the file's + * contents to. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DownloadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Download a file into memory. The contents will be available as the + * second + * // argument in the demonstration below, `contents`. + * //- + * file.download(function(err, contents) {}); + * + * //- + * // Download a file to a local destination. + * //- + * file.download({ + * destination: '/Users/me/Desktop/file-backup.txt' + * }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.download().then(function(data) { + * const contents = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_download_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + * + * @example include:samples/requesterPays.js + * region_tag:storage_download_file_requester_pays + * Example of downloading a file where the requester pays: + */ + download(optionsOrCallback, cb) { + let options; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + let called = false; + const callback = ((...args) => { + if (!called) + cb(...args); + called = true; + }); + const destination = options.destination; + delete options.destination; + const fileStream = this.createReadStream(options); + let receivedData = false; + if (destination) { + fileStream + .on('error', callback) + .once('data', data => { + // We know that the file exists the server - now we can truncate/write to a file + receivedData = true; + const writable = fs.createWriteStream(destination); + writable.write(data); + fileStream + .pipe(writable) + .on('error', callback) + .on('finish', callback); + }) + .on('end', () => { + // In the case of an empty file no data will be received before the end event fires + if (!receivedData) { + const data = Buffer.alloc(0); + try { + fs.writeFileSync(destination, data); + callback(null, data); + } + catch (e) { + callback(e, data); + } + } + }); + } + else { + this.getBufferFromReadable(fileStream) + .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) + .catch(callback); + } + } + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey) { + this.encryptionKey = encryptionKey; + this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); + this.encryptionKeyHash = crypto + .createHash('sha256') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + .update(this.encryptionKeyBase64, 'base64') + .digest('base64'); + this.encryptionKeyInterceptor = { + request: reqOpts => { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryptionKeyHash; + return reqOpts; + }, + }; + this.interceptors.push(this.encryptionKeyInterceptor); + return this; + } + /** + * @typedef {array} GetExpirationDateResponse + * @property {date} 0 A Date object representing the earliest time this file's + * retention policy will expire. + */ + /** + * @callback GetExpirationDateCallback + * @param {?Error} err Request error, if any. + * @param {date} expirationDate A Date object representing the earliest time + * this file's retention policy will expire. + */ + /** + * If this bucket has a retention policy defined, use this method to get a + * Date object representing the earliest time this file will expire. + * + * @param {GetExpirationDateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getExpirationDate(function(err, expirationDate) { + * // expirationDate is a Date object. + * }); + * ``` + */ + getExpirationDate(callback) { + this.getMetadata((err, metadata, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + if (!metadata.retentionExpirationTime) { + const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); + callback(error, null, apiResponse); + return; + } + callback(null, new Date(metadata.retentionExpirationTime), apiResponse); + }); + } + /** + * @typedef {array} GenerateSignedPostPolicyV2Response + * @property {object} 0 The document policy. + */ + /** + * @callback GenerateSignedPostPolicyV2Callback + * @param {?Error} err Request error, if any. + * @param {object} policy The document policy. + */ + /** + * Get a signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} + * + * @throws {Error} If an expiration timestamp from the past is given. + * @throws {Error} If options.equals has an array with less or more than two + * members. + * @throws {Error} If options.startsWith has an array with less or more than two + * members. + * + * @param {object} options Configuration options. + * @param {array|array[]} [options.equals] Array of request parameters and + * their expected value (e.g. [['$', '']]). Values are + * translated into equality constraints in the conditions field of the + * policy document (e.g. ['eq', '$', '']). If only one + * equality condition is to be specified, options.equals can be a one- + * dimensional array (e.g. ['$', '']). + * @param {*} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {array|array[]} [options.startsWith] Array of request parameters and + * their expected prefixes (e.g. [['$', '']). Values are + * translated into starts-with constraints in the conditions field of the + * policy document (e.g. ['starts-with', '$', '']). If only + * one prefix condition is to be specified, options.startsWith can be a + * one- dimensional array (e.g. ['$', '']). + * @param {string} [options.acl] ACL for the object from possibly predefined + * ACLs. + * @param {string} [options.successRedirect] The URL to which the user client + * is redirected if the upload is successful. + * @param {string} [options.successStatus] - The status of the Google Storage + * response if the upload is successful (must be string). + * @param {object} [options.contentLengthRange] + * @param {number} [options.contentLengthRange.min] Minimum value for the + * request's content length. + * @param {number} [options.contentLengthRange.max] Maximum value for the + * request's content length. + * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * equals: ['$Content-Type', 'image/jpeg'], + * expires: '10-25-2022', + * contentLengthRange: { + * min: 0, + * max: 1024 + * } + * }; + * + * file.generateSignedPostPolicyV2(options, function(err, policy) { + * // policy.string: the policy document in plain text. + * // policy.base64: the policy document in base64. + * // policy.signature: the policy signature in base64. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV2(options).then(function(data) { + * const policy = data[0]; + * }); + * ``` + */ + generateSignedPostPolicyV2(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + options = Object.assign({}, options); + const conditions = [ + ['eq', '$key', this.name], + { + bucket: this.bucket.name, + }, + ]; + if (Array.isArray(options.equals)) { + if (!Array.isArray(options.equals[0])) { + options.equals = [options.equals]; + } + options.equals.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); + } + conditions.push(['eq', condition[0], condition[1]]); + }); + } + if (Array.isArray(options.startsWith)) { + if (!Array.isArray(options.startsWith[0])) { + options.startsWith = [options.startsWith]; + } + options.startsWith.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); + } + conditions.push(['starts-with', condition[0], condition[1]]); + }); + } + if (options.acl) { + conditions.push({ + acl: options.acl, + }); + } + if (options.successRedirect) { + conditions.push({ + success_action_redirect: options.successRedirect, + }); + } + if (options.successStatus) { + conditions.push({ + success_action_status: options.successStatus, + }); + } + if (options.contentLengthRange) { + const min = options.contentLengthRange.min; + const max = options.contentLengthRange.max; + if (typeof min !== 'number' || typeof max !== 'number') { + throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); + } + conditions.push(['content-length-range', min, max]); + } + const policy = { + expiration: expires.toISOString(), + conditions, + }; + const policyString = JSON.stringify(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + this.storage.authClient.sign(policyBase64).then(signature => { + callback(null, { + string: policyString, + base64: policyBase64, + signature, + }); + }, err => { + callback(new signer_js_1.SigningError(err.message)); + }); + } + /** + * @typedef {object} SignedPostPolicyV4Output + * @property {string} url The request URL. + * @property {object} fields The form fields to include in the POST request. + */ + /** + * @typedef {array} GenerateSignedPostPolicyV4Response + * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. + */ + /** + * @callback GenerateSignedPostPolicyV4Callback + * @param {?Error} err Request error, if any. + * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. + */ + /** + * Get a v4 signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} + * + * @param {object} options Configuration options. + * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instead of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in + * the result, e.g. "https://cdn.example.com". + * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} + * to include in the signed policy. Any fields with key beginning with 'x-ignore-' + * will not be included in the policy to be signed. + * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} + * to include in the signed policy. All fields given in `config.fields` are + * automatically included in the conditions array, adding the same entry + * in both `fields` and `conditions` will result in duplicate entries. + * + * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * expires: '10-25-2022', + * conditions: [ + * ['eq', '$Content-Type', 'image/jpeg'], + * ['content-length-range', 0, 1024], + * ], + * fields: { + * acl: 'public-read', + * 'x-goog-meta-foo': 'bar', + * 'x-ignore-mykey': 'data' + * } + * }; + * + * file.generateSignedPostPolicyV4(options, function(err, response) { + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV4(options).then(function(data) { + * const response = data[0]; + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * ``` + */ + generateSignedPostPolicyV4(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + options = Object.assign({}, options); + let fields = Object.assign({}, options.fields); + const now = new Date(); + const nowISO = (0, util_js_2.formatAsUTCISO)(now, true); + const todayISO = (0, util_js_2.formatAsUTCISO)(now); + const sign = async () => { + const { client_email } = await this.storage.authClient.getCredentials(); + const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; + fields = { + ...fields, + bucket: this.bucket.name, + key: this.name, + 'x-goog-date': nowISO, + 'x-goog-credential': credential, + 'x-goog-algorithm': 'GOOG4-RSA-SHA256', + }; + const conditions = options.conditions || []; + Object.entries(fields).forEach(([key, value]) => { + if (!key.startsWith('x-ignore-')) { + conditions.push({ [key]: value }); + } + }); + delete fields.bucket; + const expiration = (0, util_js_2.formatAsUTCISO)(expires, true, '-', ':'); + const policy = { + conditions, + expiration, + }; + const policyString = (0, util_js_2.unicodeJSONStringify)(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + try { + const signature = await this.storage.authClient.sign(policyBase64); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + fields['policy'] = policyBase64; + fields['x-goog-signature'] = signatureHex; + let url; + if (options.virtualHostedStyle) { + url = `https://${this.bucket.name}.storage.googleapis.com/`; + } + else if (options.bucketBoundHostname) { + url = `${options.bucketBoundHostname}/`; + } + else { + url = `${exports.STORAGE_POST_POLICY_BASE_URL}/${this.bucket.name}/`; + } + return { + url, + fields, + }; + } + catch (err) { + throw new signer_js_1.SigningError(err.message); + } + }; + sign().then(res => callback(null, res), callback); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * Get a signed URL to allow limited time access to the file. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {object} config Configuration object. + * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or + * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). + * When using "resumable", the header `X-Goog-Resumable: start` has + * to be sent when making a request with the signed URL. + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like + * if you provide this, the client must provide this HTTP header with this same + * value in its request, so to if this parameter is not provided here, + * the client must not provide any value for this HTTP header in its request. + * @param {string} [config.contentType] Just like if you provide this, the client + * must provide this HTTP header with this same value in its request, so to if + * this parameter is not provided here, the client must not provide any value + * for this HTTP header in its request. + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @param {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {string} [config.promptSaveAs] The filename to prompt the user to + * save the file as when the signed url is accessed. This is ignored if + * `config.responseDisposition` is set. + * @param {string} [config.responseDisposition] The + * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the + * signed url. + * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value + * given is passed to `new Date()`. + * Note: Use for 'v4' only. + * @param {string} [config.responseType] The response-content-type parameter + * of the signed url. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to read from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * // Access will begin at accessibleAt and end at expires. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * accessibleAt: '03-13-2025' + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL to allow write permissions. This means anyone with this + * URL + * // can send a POST request with new data that will overwrite the file. + * //- + * file.getSignedUrl({ + * action: 'write', + * expires: '03-17-2025' + * }, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to be written to. + * const writeStream = request.put(url); + * writeStream.end('New data'); + * + * writeStream.on('complete', function(resp) { + * // Confirm the new content was saved. + * file.download(function(err, fileContents) { + * console.log('Contents:', fileContents.toString()); + * // Contents: New data + * }); + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_generate_signed_url + * Another example: + */ + getSignedUrl(cfg, callback) { + const method = ActionToHTTPMethod[cfg.action]; + const extensionHeaders = (0, util_js_2.objectKeyToLowercase)(cfg.extensionHeaders || {}); + if (cfg.action === 'resumable') { + extensionHeaders['x-goog-resumable'] = 'start'; + } + const queryParams = Object.assign({}, cfg.queryParams); + if (typeof cfg.responseType === 'string') { + queryParams['response-content-type'] = cfg.responseType; + } + if (typeof cfg.promptSaveAs === 'string') { + queryParams['response-content-disposition'] = + 'attachment; filename="' + cfg.promptSaveAs + '"'; + } + if (typeof cfg.responseDisposition === 'string') { + queryParams['response-content-disposition'] = cfg.responseDisposition; + } + if (this.generation) { + queryParams['generation'] = this.generation.toString(); + } + const signConfig = { + method, + expires: cfg.expires, + accessibleAt: cfg.accessibleAt, + extensionHeaders, + queryParams, + contentMd5: cfg.contentMd5, + contentType: cfg.contentType, + }; + if (cfg.cname) { + signConfig.cname = cfg.cname; + } + if (cfg.version) { + signConfig.version = cfg.version; + } + if (cfg.virtualHostedStyle) { + signConfig.virtualHostedStyle = cfg.virtualHostedStyle; + } + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this.bucket, this); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback IsPublicCallback + * @param {?Error} err Request error, if any. + * @param {boolean} resp Whether file is public or not. + */ + /** + * @typedef {array} IsPublicResponse + * @property {boolean} 0 Whether file is public or not. + */ + /** + * Check whether this file is public or not by sending + * a HEAD request without credentials. + * No errors from the server indicates that the current + * file is public. + * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} + * indicates that file is private. + * Any other non 403 error is propagated to user. + * + * @param {IsPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Check whether the file is publicly accessible. + * //- + * file.isPublic(function(err, resp) { + * if (err) { + * console.error(err); + * return; + * } + * console.log(`the file ${file.id} is public: ${resp}`) ; + * }) + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.isPublic().then(function(data) { + * const resp = data[0]; + * }); + * ``` + */ + isPublic(callback) { + var _a; + // Build any custom headers based on the defined interceptors on the parent + // storage object and this object + const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; + const fileInterceptors = this.interceptors || []; + const allInterceptors = storageInterceptors.concat(fileInterceptors); + const headers = allInterceptors.reduce((acc, curInterceptor) => { + const currentHeaders = curInterceptor.request({ + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + }); + Object.assign(acc, currentHeaders.headers); + return acc; + }, {}); + index_js_1.util.makeRequest({ + method: 'GET', + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + headers, + }, { + retryOptions: this.storage.retryOptions, + }, (err) => { + if (err) { + const apiError = err; + if (apiError.code === 403) { + callback(null, false); + } + else { + callback(err); + } + } + else { + callback(null, true); + } + }); + } + /** + * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [strict] If true, set the file to be private to + * only the owner user. Otherwise, it will be private to the project. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback MakeFilePrivateCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} MakeFilePrivateResponse + * @property {object} 0 The full API response. + */ + /** + * Make a file private to the project and remove all other permissions. + * Set `options.strict` to true to make the file private to only the owner. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @param {MakeFilePrivateOptions} [options] Configuration options. + * @param {MakeFilePrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Set the file private so only project maintainers can see and modify it. + * //- + * file.makePrivate(function(err) {}); + * + * //- + * // Set the file private so only the owner can see and modify it. + * //- + * file.makePrivate({ strict: true }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePrivate().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const query = { + predefinedAcl: options.strict ? 'private' : 'projectPrivate', + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }; + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { + query.ifMetagenerationMatch = + (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; + delete options.preconditionOpts; + } + if (options.userProject) { + query.userProject = options.userProject; + } + // You aren't allowed to set both predefinedAcl & acl properties on a file, + // so acl must explicitly be nullified, destroying all previous acls on the + // file. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, callback); + } + /** + * @typedef {array} MakeFilePublicResponse + * @property {object} 0 The full API response. + */ + /** + * @callback MakeFilePublicCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set a file to be publicly readable and maintain all previous permissions. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {MakeFilePublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.makePublic(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePublic().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_make_public + * Another example: + */ + makePublic(callback) { + callback = callback || index_js_1.util.noop; + this.acl.add({ + entity: 'allUsers', + role: 'READER', + }, (err, acl, resp) => { + callback(err, resp); + }); + } + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl() { + return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; + } + /** + * @typedef {array} MoveResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Move this file to another location. By default, this will rename the file + * and keep it in the same bucket, but you can choose to move it to another + * Bucket by providing a Bucket or File object or a URL beginning with + * "gs://". + * + * **Warning**: + * There is currently no atomic `move` method in the Cloud Storage API, + * so this method is a composition of {@link File#copy} (to the new + * location) and {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {MoveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is moved to its + * // current bucket, under the new name provided. + * //- + * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-new.png'; + * file.move(newLocation, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be moved to that bucket + * // using the same name. + * //- + * const anotherBucket = gcs.bucket('another-bucket'); + * + * file.move(anotherBucket, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.move(anotherFile, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `destinationFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.move('my-image-new.png').then(function(data) { + * const destinationFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file + * Another example: + */ + move(destination, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.copy(destination, options, (err, destinationFile, copyApiResponse) => { + if (err) { + err.message = 'file#copy failed with an error - ' + err.message; + callback(err, null, copyApiResponse); + return; + } + if (this.name !== destinationFile.name || + this.bucket.name !== destinationFile.bucket.name) { + this.delete(options, (err, apiResponse) => { + if (err) { + err.message = 'file#delete failed with an error - ' + err.message; + callback(err, destinationFile, apiResponse); + return; + } + callback(null, destinationFile, copyApiResponse); + }); + } + else { + callback(null, destinationFile, copyApiResponse); + } + }); + } + /** + * @typedef {array} RenameResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback RenameCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} RenameOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Rename this file. + * + * **Warning**: + * There is currently no atomic `rename` method in the Cloud Storage API, + * so this method is an alias of {@link File#move}, which in turn is a + * composition of {@link File#copy} (to the new location) and + * {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * @param {string|File} destinationFile Destination file. + * @param {RenameCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a string or a File object. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // You can pass in a string for the destinationFile. + * //- + * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "renamed-image.png" + * + * // `renamedFile` is an instance of a File object that refers to your + * // renamed file. + * }); + * + * //- + * // You can pass in a File object. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.rename(anotherFile, function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * + * // Note: + * // The `renamedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.rename('my-renamed-image.png').then(function(data) { + * const renamedFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + rename(destinationFile, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.move(destinationFile, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + return this.parent.request.call(this, reqOpts, callback); + } + /** + * @callback RotateEncryptionKeyCallback + * @extends CopyCallback + */ + /** + * @typedef RotateEncryptionKeyResponse + * @extends CopyResponse + */ + /** + * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options + * for File#rotateEncryptionKey(). + * If a string or Buffer is provided, it is interpreted as an AES-256, + * customer-supplied encryption key. If you'd like to use a Cloud KMS key + * name, you must specify an options object with the property name: + * `kmsKeyName`. + * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. + * @param {string} [options.kmsKeyName] A Cloud KMS key name. + */ + /** + * This method allows you to update the encryption key associated with this + * file. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {RotateEncryptionKeyOptions} [options] - Configuration options. + * @param {RotateEncryptionKeyCallback} [callback] + * @returns {Promise} + * + * @example include:samples/encryption.js + * region_tag:storage_rotate_encryption_key + * Example of rotating the encryption key for this file: + */ + rotateEncryptionKey(optionsOrCallback, callback) { + var _a; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + let options = {}; + if (typeof optionsOrCallback === 'string' || + optionsOrCallback instanceof Buffer) { + options = { + encryptionKey: optionsOrCallback, + }; + } + else if (typeof optionsOrCallback === 'object') { + options = optionsOrCallback; + } + const newFile = this.bucket.file(this.id, options); + const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined + ? { preconditionOpts: options.preconditionOpts } + : {}; + this.copy(newFile, copyOptions, callback); + } + /** + * @typedef {object} SaveOptions + * @extends CreateWriteStreamOptions + */ + /** + * @callback SaveCallback + * @param {?Error} err Request error, if any. + */ + /** + * Write strings or buffers to a file. + * + * *This is a convenience method which wraps {@link File#createWriteStream}.* + * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * @param {SaveData} data The data to write to a file. + * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` + * parameter. + * @param {SaveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const contents = 'This is the contents of the file.'; + * + * file.save(contents, function(err) { + * if (!err) { + * // File written successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.save(contents).then(function() {}); + * ``` + */ + save(data, optionsOrCallback, callback) { + // tslint:enable:no-any + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + let maxRetries = this.storage.retryOptions.maxRetries; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + maxRetries = 0; + } + const returnValue = (0, async_retry_1.default)(async (bail) => { + return new Promise((resolve, reject) => { + if (maxRetries === 0) { + this.storage.retryOptions.autoRetry = false; + } + const writable = this.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + const handleError = (err) => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + return bail(err); + }; + if (typeof data === 'string' || Buffer.isBuffer(data)) { + writable + .on('error', handleError) + .on('finish', () => resolve()) + .end(data); + } + else { + (0, stream_1.pipeline)(data, writable, err => { + if (err) { + if (typeof data !== 'function') { + // Only PipelineSourceFunction can be retried. Async-iterables + // and Readable streams can only be consumed once. + return bail(err); + } + handleError(err); + } + else { + resolve(); + } + }); + } + }); + }, { + retries: maxRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(); + } + }) + .catch(callback); + } + } + setMetadata(metadata, optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_js_1.AvailableServiceObjectMethods.setMetadata, options); + if (metadata.retention !== undefined) { + options.overrideUnlockedRetention = true; + } + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} SetStorageClassResponse + * @property {object} 0 The full API response. + */ + /** + * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetStorageClassCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set the storage class for this file. + * + * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`) + * **Note:** The storage classes `multi_regional` and `regional` + * are now legacy and will be deprecated in the future. + * @param {SetStorageClassOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * file.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const req = { + ...options, + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass: storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(), + }; + this.copy(this, req, (err, file, apiResponse) => { + if (err) { + callback(err, apiResponse); + return; + } + this.metadata = file.metadata; + callback(null, apiResponse); + }); + } + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.bucket.setUserProject.call(this, userProject); + } + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const retryOptions = this.storage.retryOptions; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options.preconditionOpts)) { + retryOptions.autoRetry = false; + } + const uploadStream = resumableUpload.upload({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + isPartialUpload: options.isPartialUpload, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + uri: options.uri, + userProject: options.userProject || this.userProject, + retryOptions: { ...retryOptions }, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, + highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }); + uploadStream + .on('response', resp => { + dup.emit('response', resp); + }) + .on('uri', uri => { + dup.emit('uri', uri); + }) + .on('metadata', metadata => { + this.metadata = metadata; + dup.emit('metadata'); + }) + .on('finish', () => { + dup.emit('complete'); + }) + .on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(uploadStream); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const apiEndpoint = this.storage.apiEndpoint; + const bucketName = this.bucket.name; + const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; + const reqOpts = { + qs: { + name: this.name, + }, + uri: uri, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }; + if (this.generation !== undefined) { + reqOpts.qs.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName !== undefined) { + reqOpts.qs.kmsKeyName = this.kmsKeyName; + } + if (typeof options.timeout === 'number') { + reqOpts.timeout = options.timeout; + } + if (options.userProject || this.userProject) { + reqOpts.qs.userProject = options.userProject || this.userProject; + } + if (options.predefinedAcl) { + reqOpts.qs.predefinedAcl = options.predefinedAcl; + } + else if (options.private) { + reqOpts.qs.predefinedAcl = 'private'; + } + else if (options.public) { + reqOpts.qs.predefinedAcl = 'publicRead'; + } + Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); + index_js_1.util.makeWritableStream(dup, { + makeAuthenticatedRequest: (reqOpts) => { + this.request(reqOpts, (err, body, resp) => { + if (err) { + dup.destroy(err); + return; + } + this.metadata = body; + dup.emit('metadata', body); + dup.emit('response', resp); + dup.emit('complete'); + }); + }, + metadata: options.metadata, + request: reqOpts, + }); + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, methodType, localPreconditionOptions) { + var _a, _b, _c, _d; + if ((typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.delete && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + if ((typeof coreOpts === 'object' && + ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.setMetadata && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } + async getBufferFromReadable(readable) { + const buf = []; + for await (const chunk of readable) { + buf.push(chunk); + } + return Buffer.concat(buf); + } +} +exports.File = File; +_File_instances = new WeakSet(), _File_validateIntegrity = +/** + * + * @param hashCalculatingStream + * @param verify + * @returns {boolean} Returns `true` if valid, throws with error otherwise + */ +async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { + const metadata = this.metadata; + // If we're doing validation, assume the worst + let dataMismatch = !!(verify.crc32c || verify.md5); + if (verify.crc32c && metadata.crc32c) { + dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); + } + if (verify.md5 && metadata.md5Hash) { + dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); + } + if (dataMismatch) { + const errors = []; + let code = ''; + let message = ''; + try { + await this.delete(); + if (verify.md5 && !metadata.md5Hash) { + code = 'MD5_NOT_AVAILABLE'; + message = FileExceptionMessages.MD5_NOT_AVAILABLE; + } + else { + code = 'FILE_NO_UPLOAD'; + message = FileExceptionMessages.UPLOAD_MISMATCH; + } + } + catch (e) { + const error = e; + code = 'FILE_NO_UPLOAD_DELETE'; + message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; + errors.push(error); + } + const error = new RequestError(message); + error.code = code; + error.errors = errors; + throw error; + } + return true; +}; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(File, { + exclude: [ + 'cloudStorageURI', + 'publicUrl', + 'request', + 'save', + 'setEncryptionKey', + 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', + 'getBufferFromReadable', + ], +}); + + +/***/ }), + +/***/ 725: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HashStreamValidator = void 0; +const crypto_1 = __nccwpck_require__(6113); +const stream_1 = __nccwpck_require__(2781); +const crc32c_js_1 = __nccwpck_require__(5810); +const file_js_1 = __nccwpck_require__(4713); +class HashStreamValidator extends stream_1.Transform { + constructor(options = {}) { + super(); + this.updateHashesOnly = false; + _HashStreamValidator_crc32cHash.set(this, undefined); + _HashStreamValidator_md5Hash.set(this, undefined); + _HashStreamValidator_md5Digest.set(this, ''); + this.crc32cEnabled = !!options.crc32c; + this.md5Enabled = !!options.md5; + this.updateHashesOnly = !!options.updateHashesOnly; + this.crc32cExpected = options.crc32cExpected; + this.md5Expected = options.md5Expected; + if (this.crc32cEnabled) { + if (options.crc32cInstance) { + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, "f"); + } + else { + const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); + } + } + if (this.md5Enabled) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), "f"); + } + } + /** + * Return the current CRC32C value, if available. + */ + get crc32c() { + var _a; + return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) === null || _a === void 0 ? void 0 : _a.toString(); + } + _flush(callback) { + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); + } + if (this.updateHashesOnly) { + callback(); + return; + } + // If we're doing validation, assume the worst-- a data integrity + // mismatch. If not, these tests won't be performed, and we can assume + // the best. + // We must check if the server decompressed the data on serve because hash + // validation is not possible in this case. + let failed = this.crc32cEnabled || this.md5Enabled; + if (this.crc32cEnabled && this.crc32cExpected) { + failed = !this.test('crc32c', this.crc32cExpected); + } + if (this.md5Enabled && this.md5Expected) { + failed = !this.test('md5', this.md5Expected); + } + if (failed) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + callback(mismatchError); + } + else { + callback(); + } + } + _transform(chunk, encoding, callback) { + this.push(chunk, encoding); + try { + if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); + callback(); + } + catch (e) { + callback(e); + } + } + test(hash, sum) { + const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; + if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); + } + if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; + } + return false; + } +} +exports.HashStreamValidator = HashStreamValidator; +_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); + + +/***/ }), + +/***/ 4654: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HmacKey = void 0; +const index_js_1 = __nccwpck_require__(4052); +const storage_js_1 = __nccwpck_require__(6007); +const promisify_1 = __nccwpck_require__(9203); +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +class HmacKey extends index_js_1.ServiceObject { + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage, accessId, options) { + const methods = { + /** + * @typedef {object} DeleteHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} DeleteHmacKeyResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Deletes an HMAC key. + * Key state must be set to `INACTIVE` prior to deletion. + * Caution: HMAC keys cannot be recovered once you delete them. + * + * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. + * + * @method HmacKey#delete + * @param {DeleteHmacKeyOptions} [options] Configuration options. + * @param {DeleteHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Delete HMAC key after making the key inactive. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * hmacKey.delete((err) => { + * if (err) { + * console.error(err); + * return; + * } + * // The HMAC key is deleted. + * }); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey + * .setMetadata({state: 'INACTIVE'}) + * .then(() => { + * return hmacKey.delete(); + * }); + * ``` + */ + delete: true, + /** + * @callback GetHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey this {@link HmacKey} instance. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetHmacKeyResponse + * @property {HmacKey} 0 This {@link HmacKey} instance. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} GetHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * this {@link HmacKey} instance. + * + * HmacKey.get() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#get + * @param {GetHmacKeyOptions} [options] Configuration options. + * @param {GetHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's Metadata. + * //- + * storage.hmacKey('ACCESS_ID') + * .get((err, hmacKey) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * // do something with the returned HmacKey object. + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .get() + * .then((data) => { + * const hmacKey = data[0]; + * }); + * ``` + */ + get: true, + /** + * @typedef {object} GetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * the HMAC key's metadata as an object. + * + * HmacKey.getMetadata() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#getMetadata + * @param {GetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's metadata and populate to the metadata property. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata((err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata() + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + getMetadata: true, + /** + * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. + * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. + * @property {string} [etag] Include an etag from a previous get HMAC key request + * to perform safe read-modify-write. + */ + /** + * @typedef {object} SetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @callback HmacKeyMetadataCallback + * @param {?Error} err Request error, if any. + * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} HmacKeyMetadataResponse + * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. + * @property {object} 1 The full API response. + */ + /** + * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for + * valid states. + * + * @method HmacKey#setMetadata + * @param {SetHmacKeyMetadata} metadata The new metadata. + * @param {SetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * const metadata = { + * state: 'INACTIVE', + * }; + * + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata) + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + setMetadata: { + reqOpts: { + method: 'PUT', + }, + }, + }; + const projectId = (options && options.projectId) || storage.projectId; + super({ + parent: storage, + id: accessId, + baseUrl: `/projects/${projectId}/hmacKeys`, + methods, + }); + this.storage = storage; + this.instanceRetryValue = storage.retryOptions.autoRetry; + } + setMetadata(metadata, optionsOrCallback, cb) { + // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways + if (this.storage.retryOptions.idempotencyStrategy !== + storage_js_1.IdempotencyStrategy.RetryAlways) { + this.storage.retryOptions.autoRetry = false; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } +} +exports.HmacKey = HmacKey; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(HmacKey); + + +/***/ }), + +/***/ 352: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Iam = exports.IAMExceptionMessages = void 0; +const promisify_1 = __nccwpck_require__(9203); +const util_js_1 = __nccwpck_require__(9258); +var IAMExceptionMessages; +(function (IAMExceptionMessages) { + IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; + IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; +})(IAMExceptionMessages || (exports.IAMExceptionMessages = IAMExceptionMessages = {})); +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +class Iam { + constructor(bucket) { + this.request_ = bucket.request.bind(bucket); + this.resourceId_ = 'buckets/' + bucket.getId(); + } + /** + * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). + * @property {number} [requestedPolicyVersion] The version of IAM policies to + * request. If a policy with a condition is requested without setting + * this, the server will return an error. This must be set to a value + * of 3 to retrieve IAM policies containing conditions. This is to + * prevent client code that isn't aware of IAM conditions from + * interpreting and modifying policies incorrectly. The service might + * return a policy with version lower than the one that was requested, + * based on the feature syntax in the policy fetched. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetPolicyResponse + * @property {Policy} 0 The policy. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} Policy + * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. + * @property {string} [policy.etag] Etags are used to perform a read-modify-write. + * @property {number} [policy.version] The syntax schema version of the Policy. + * To set an IAM policy with conditional binding, this field must be set to + * 3 or greater. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + */ + /** + * @typedef {object} PolicyBinding + * @property {string} role Role that is assigned to members. + * @property {string[]} members Specifies the identities requesting access for the bucket. + * @property {Expr} [condition] The condition that is associated with this binding. + */ + /** + * @typedef {object} Expr + * @property {string} [title] An optional title for the expression, i.e. a + * short string describing its purpose. This can be used e.g. in UIs + * which allow to enter the expression. + * @property {string} [description] An optional description of the + * expression. This is a longer text which describes the expression, + * e.g. when hovered over it in a UI. + * @property {string} expression Textual representation of an expression in + * Common Expression Language syntax. The application context of the + * containing message determines which well-known feature set of CEL + * is supported.The condition that is associated with this binding. + * + * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions + */ + /** + * Get the IAM policy. + * + * @param {GetPolicyOptions} [options] Request options. + * @param {GetPolicyCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.iam.getPolicy( + * {requestedPolicyVersion: 3}, + * function(err, policy, apiResponse) { + * + * }, + * ); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy({requestedPolicyVersion: 3}) + * .then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + */ + getPolicy(optionsOrCallback, callback) { + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const qs = {}; + if (options.userProject) { + qs.userProject = options.userProject; + } + if (options.requestedPolicyVersion !== null && + options.requestedPolicyVersion !== undefined) { + qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; + } + this.request_({ + uri: '/iam', + qs, + }, cb); + } + /** + * Set the IAM policy. + * + * @throws {Error} If no policy is provided. + * + * @param {Policy} policy The policy. + * @param {SetPolicyOptions} [options] Configuration options. + * @param {SetPolicyCallback} callback Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const myPolicy = { + * bindings: [ + * { + * role: 'roles/storage.admin', + * members: + * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] + * } + * ] + * }; + * + * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.setPolicy(myPolicy).then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ + setPolicy(policy, optionsOrCallback, callback) { + if (policy === null || typeof policy !== 'object') { + throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + let maxRetries; + if (policy.etag === undefined) { + maxRetries = 0; + } + this.request_({ + method: 'PUT', + uri: '/iam', + maxRetries, + json: Object.assign({ + resourceId: this.resourceId_, + }, policy), + qs: options, + }, cb); + } + /** + * Test a set of permissions for a resource. + * + * @throws {Error} If permissions are not provided. + * + * @param {string|string[]} permissions The permission(s) to test for. + * @param {TestIamPermissionsOptions} [options] Configuration object. + * @param {TestIamPermissionsCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * //- + * // Test a single permission. + * //- + * const test = 'storage.buckets.delete'; + * + * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": true + * // } + * }); + * + * //- + * // Test several permissions at once. + * //- + * const tests = [ + * 'storage.buckets.delete', + * 'storage.buckets.get' + * ]; + * + * bucket.iam.testPermissions(tests, function(err, permissions) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": false, + * // "storage.buckets.get": true + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.testPermissions(test).then(function(data) { + * const permissions = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + testPermissions(permissions, optionsOrCallback, callback) { + if (!Array.isArray(permissions) && typeof permissions !== 'string') { + throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const permissionsArray = Array.isArray(permissions) + ? permissions + : [permissions]; + const req = Object.assign({ + permissions: permissionsArray, + }, options); + this.request_({ + uri: '/iam/testPermissions', + qs: req, + useQuerystring: true, + }, (err, resp) => { + if (err) { + cb(err, null, resp); + return; + } + const availablePermissions = Array.isArray(resp.permissions) + ? resp.permissions + : []; + const permissionsHash = permissionsArray.reduce((acc, permission) => { + acc[permission] = availablePermissions.indexOf(permission) > -1; + return acc; + }, {}); + cb(null, permissionsHash, resp); + }); + } +} +exports.Iam = Iam; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Iam); + + +/***/ }), + +/***/ 7577: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = exports.Storage = exports.IdempotencyStrategy = exports.ApiError = void 0; +/** + * The `@google-cloud/storage` package has a single named export which is the + * {@link Storage} (ES6) class, which should be instantiated with `new`. + * + * See {@link Storage} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Storage} @google-cloud/storage + * @alias nodejs-storage + * + * @example + * Install the client library with npm: + * ``` + * npm install --save @google-cloud/storage + * ``` + * + * @example + * Import the client library + * ``` + * const {Storage} = require('@google-cloud/storage'); + * ``` + * + * @example + * Create a client that uses Application + * Default Credentials (ADC): + * ``` + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit + * credentials: + * ``` + * const storage = new Storage({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example include:samples/quickstart.js + * region_tag:storage_quickstart + * Full quickstart example: + */ +var index_js_1 = __nccwpck_require__(4052); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return index_js_1.ApiError; } })); +var storage_js_1 = __nccwpck_require__(6007); +Object.defineProperty(exports, "IdempotencyStrategy", ({ enumerable: true, get: function () { return storage_js_1.IdempotencyStrategy; } })); +Object.defineProperty(exports, "Storage", ({ enumerable: true, get: function () { return storage_js_1.Storage; } })); +var bucket_js_1 = __nccwpck_require__(3973); +Object.defineProperty(exports, "Bucket", ({ enumerable: true, get: function () { return bucket_js_1.Bucket; } })); +__exportStar(__nccwpck_require__(5810), exports); +var channel_js_1 = __nccwpck_require__(2665); +Object.defineProperty(exports, "Channel", ({ enumerable: true, get: function () { return channel_js_1.Channel; } })); +var file_js_1 = __nccwpck_require__(4713); +Object.defineProperty(exports, "File", ({ enumerable: true, get: function () { return file_js_1.File; } })); +__exportStar(__nccwpck_require__(725), exports); +var hmacKey_js_1 = __nccwpck_require__(4654); +Object.defineProperty(exports, "HmacKey", ({ enumerable: true, get: function () { return hmacKey_js_1.HmacKey; } })); +var iam_js_1 = __nccwpck_require__(352); +Object.defineProperty(exports, "Iam", ({ enumerable: true, get: function () { return iam_js_1.Iam; } })); +var notification_js_1 = __nccwpck_require__(1178); +Object.defineProperty(exports, "Notification", ({ enumerable: true, get: function () { return notification_js_1.Notification; } })); +__exportStar(__nccwpck_require__(5594), exports); + + +/***/ }), + +/***/ 4052: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; +var service_js_1 = __nccwpck_require__(7370); +Object.defineProperty(exports, "Service", ({ enumerable: true, get: function () { return service_js_1.Service; } })); +var service_object_js_1 = __nccwpck_require__(3409); +Object.defineProperty(exports, "ServiceObject", ({ enumerable: true, get: function () { return service_object_js_1.ServiceObject; } })); +var util_js_1 = __nccwpck_require__(8064); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return util_js_1.ApiError; } })); +Object.defineProperty(exports, "util", ({ enumerable: true, get: function () { return util_js_1.util; } })); + + +/***/ }), + +/***/ 3409: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ServiceObject = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const promisify_1 = __nccwpck_require__(9203); +const events_1 = __nccwpck_require__(2361); +const util_js_1 = __nccwpck_require__(8064); +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +class ServiceObject extends events_1.EventEmitter { + /* + * @constructor + * @alias module:common/service-object + * + * @private + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string} config.createMethod - The method which creates this object. + * @param {string=} config.id - The identifier of the object. For example, the + * name of a Storage bucket or Pub/Sub topic. + * @param {object=} config.methods - A map of each method name that should be inherited. + * @param {object} config.methods[].reqOpts - Default request options for this + * particular method. A common use case is when `setMetadata` requires a + * `PUT` method to override the default `PATCH`. + * @param {object} config.parent - The parent service instance. For example, an + * instance of Storage if the object is Bucket. + */ + constructor(config) { + super(); + this.metadata = {}; + this.baseUrl = config.baseUrl; + this.parent = config.parent; // Parent class. + this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). + this.createMethod = config.createMethod; + this.methods = config.methods || {}; + this.interceptors = []; + this.projectId = config.projectId; + if (config.methods) { + // This filters the ServiceObject instance (e.g. a "File") to only have + // the configured methods. We make a couple of exceptions for core- + // functionality ("request()" and "getRequestInterceptors()") + Object.getOwnPropertyNames(ServiceObject.prototype) + .filter(methodName => { + return ( + // All ServiceObjects need `request` and `getRequestInterceptors`. + // clang-format off + !/^request/.test(methodName) && + !/^getRequestInterceptors/.test(methodName) && + // clang-format on + // The ServiceObject didn't redefine the method. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] === + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ServiceObject.prototype[methodName] && + // This method isn't wanted. + !config.methods[methodName]); + }) + .forEach(methodName => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] = undefined; + }); + } + } + create(optionsOrCallback, callback) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const args = [this.id]; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + if (typeof optionsOrCallback === 'object') { + args.push(optionsOrCallback); + } + // Wrap the callback to return *this* instance of the object, not the + // newly-created one. + // tslint: disable-next-line no-any + function onCreate(...args) { + const [err, instance] = args; + if (!err) { + self.metadata = instance.metadata; + if (self.id && instance.metadata) { + self.id = instance.metadata.id; + } + args[1] = self; // replace the created `instance` with this one. + } + callback(...args); + } + args.push(onCreate); + // eslint-disable-next-line prefer-spread + this.createMethod.apply(null, args); + } + delete(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const ignoreNotFound = options.ignoreNotFound; + delete options.ignoreNotFound; + const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; + const reqOpts = { + method: 'DELETE', + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + if (err) { + if (err.code === 404 && ignoreNotFound) { + err = null; + } + } + callback(err, res); + }); + } + exists(optionsOrCallback, cb) { + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + this.get(options, err => { + if (err) { + if (err.code === 404) { + callback(null, false); + } + else { + callback(err); + } + return; + } + callback(null, true); + }); + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const [opts, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const options = Object.assign({}, opts); + const autoCreate = options.autoCreate && typeof this.create === 'function'; + delete options.autoCreate; + function onCreate(err, instance, apiResponse) { + if (err) { + if (err.code === 409) { + self.get(options, callback); + return; + } + callback(err, null, apiResponse); + return; + } + callback(null, instance, apiResponse); + } + this.getMetadata(options, (err, metadata) => { + if (err) { + if (err.code === 404 && autoCreate) { + const args = []; + if (Object.keys(options).length > 0) { + args.push(options); + } + args.push(onCreate); + self.create(...args); + return; + } + callback(err, null, metadata); + return; + } + callback(null, self, metadata); + }); + } + getMetadata(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.getMetadata === 'object' && + this.methods.getMetadata) || + {}; + const reqOpts = { + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + const localInterceptors = this.interceptors + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + return this.parent.getRequestInterceptors().concat(localInterceptors); + } + setMetadata(metadata, optionsOrCallback, cb) { + var _a, _b; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.setMetadata === 'object' && + this.methods.setMetadata) || + {}; + const reqOpts = { + method: 'PATCH', + uri: '', + ...methodConfig.reqOpts, + json: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.json, + ...metadata, + }, + qs: { + ...(_b = methodConfig.reqOpts) === null || _b === void 0 ? void 0 : _b.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts }; + if (this.projectId) { + reqOpts.projectId = this.projectId; + } + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .filter(x => x.trim()) // Limit to non-empty strings. + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/'); + const childInterceptors = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + const localInterceptors = [].slice.call(this.interceptors); + reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); + if (reqOpts.shouldReturnStream) { + return this.parent.requestStream(reqOpts); + } + this.parent.request(reqOpts, callback); + } + request(reqOpts, callback) { + this.request_(reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return this.request_(opts); + } +} +exports.ServiceObject = ServiceObject; +(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] }); + + +/***/ }), + +/***/ 7370: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; +const uuid = __importStar(__nccwpck_require__(5840)); +const util_js_1 = __nccwpck_require__(8064); +const util_js_2 = __nccwpck_require__(9258); +exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; +class Service { + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config, options = {}) { + this.baseUrl = config.baseUrl; + this.apiEndpoint = config.apiEndpoint; + this.timeout = options.timeout; + this.globalInterceptors = Array.isArray(options.interceptors_) + ? options.interceptors_ + : []; + this.interceptors = []; + this.packageJson = config.packageJson; + this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN; + this.projectIdRequired = config.projectIdRequired !== false; + this.providedUserAgent = options.userAgent; + const reqCfg = { + ...config, + projectIdRequired: this.projectIdRequired, + projectId: this.projectId, + authClient: options.authClient || config.authClient, + credentials: options.credentials, + keyFile: options.keyFilename, + email: options.email, + token: options.token, + }; + this.makeAuthenticatedRequest = + util_js_1.util.makeAuthenticatedRequestFactory(reqCfg); + this.authClient = this.makeAuthenticatedRequest.authClient; + this.getCredentials = this.makeAuthenticatedRequest.getCredentials; + const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; + if (isCloudFunctionEnv) { + this.interceptors.push({ + request(reqOpts) { + reqOpts.forever = false; + return reqOpts; + }, + }); + } + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + return [].slice + .call(this.globalInterceptors) + .concat(this.interceptors) + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + } + getProjectId(callback) { + if (!callback) { + return this.getProjectIdAsync(); + } + this.getProjectIdAsync().then(p => callback(null, p), callback); + } + async getProjectIdAsync() { + const projectId = await this.authClient.getProjectId(); + if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) { + this.projectId = projectId; + } + return this.projectId; + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts, timeout: this.timeout }; + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl]; + if (this.projectIdRequired) { + if (reqOpts.projectId) { + uriComponents.push('projects'); + uriComponents.push(reqOpts.projectId); + } + else { + uriComponents.push('projects'); + uriComponents.push(this.projectId); + } + } + uriComponents.push(reqOpts.uri); + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/') + // Some URIs have colon separators. + // Bad: https://.../projects/:list + // Good: https://.../projects:list + .replace(/\/:/g, ':'); + const requestInterceptors = this.getRequestInterceptors(); + const interceptorArray = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + interceptorArray.forEach(interceptor => { + if (typeof interceptor.request === 'function') { + requestInterceptors.push(interceptor.request); + } + }); + requestInterceptors.forEach(requestInterceptor => { + reqOpts = requestInterceptor(reqOpts); + }); + delete reqOpts.interceptors_; + const pkg = this.packageJson; + let userAgent = (0, util_js_2.getUserAgentString)(); + if (this.providedUserAgent) { + userAgent = `${this.providedUserAgent} ${userAgent}`; + } + reqOpts.headers = { + ...reqOpts.headers, + 'User-Agent': userAgent, + 'x-goog-api-client': `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${pkg.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (reqOpts[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts.headers['x-goog-api-client'] += ` gccl-gcs-cmd/${reqOpts[util_js_1.GCCL_GCS_CMD_KEY]}`; + } + if (reqOpts.shouldReturnStream) { + return this.makeAuthenticatedRequest(reqOpts); + } + else { + this.makeAuthenticatedRequest(reqOpts, callback); + } + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts, callback) { + Service.prototype.request_.call(this, reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return Service.prototype.request_.call(this, opts); + } +} +exports.Service = Service; + + +/***/ }), + +/***/ 8064: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = exports.GCCL_GCS_CMD_KEY = void 0; +/*! + * @module common/util + */ +const projectify_1 = __nccwpck_require__(3497); +const ent = __importStar(__nccwpck_require__(1151)); +const google_auth_library_1 = __nccwpck_require__(810); +const retry_request_1 = __importDefault(__nccwpck_require__(3515)); +const stream_1 = __nccwpck_require__(2781); +const teeny_request_1 = __nccwpck_require__(6886); +const uuid = __importStar(__nccwpck_require__(5840)); +const service_js_1 = __nccwpck_require__(7370); +const util_js_1 = __nccwpck_require__(9258); +const duplexify_1 = __importDefault(__nccwpck_require__(6599)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * A unique symbol for providing a `gccl-gcs-cmd` value + * for the `X-Goog-API-Client` header. + * + * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V` + **/ +exports.GCCL_GCS_CMD_KEY = Symbol.for('GCCL_GCS_CMD'); +const requestDefaults = { + timeout: 60000, + gzip: true, + forever: true, + pool: { + maxSockets: Infinity, + }, +}; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + * @private + */ +const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + * @private + */ +const MAX_RETRY_DEFAULT = 3; +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +class ApiError extends Error { + constructor(errorBodyOrMessage) { + super(); + if (typeof errorBodyOrMessage !== 'object') { + this.message = errorBodyOrMessage || ''; + return; + } + const errorBody = errorBodyOrMessage; + this.code = errorBody.code; + this.errors = errorBody.errors; + this.response = errorBody.response; + try { + this.errors = JSON.parse(this.response.body).error.errors; + } + catch (e) { + this.errors = errorBody.errors; + } + this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); + Error.captureStackTrace(this); + } + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err, errors) { + const messages = new Set(); + if (err.message) { + messages.add(err.message); + } + if (errors && errors.length) { + errors.forEach(({ message }) => messages.add(message)); + } + else if (err.response && err.response.body) { + messages.add(ent.decode(err.response.body.toString())); + } + else if (!err.message) { + messages.add('A failure occurred during this request.'); + } + let messageArr = Array.from(messages); + if (messageArr.length > 1) { + messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); + messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); + messageArr.push('\n'); + } + return messageArr.join('\n'); + } +} +exports.ApiError = ApiError; +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +class PartialFailureError extends Error { + constructor(b) { + super(); + const errorObject = b; + this.errors = errorObject.errors; + this.name = 'PartialFailureError'; + this.response = errorObject.response; + this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); + } +} +exports.PartialFailureError = PartialFailureError; +class Util { + constructor() { + this.ApiError = ApiError; + this.PartialFailureError = PartialFailureError; + } + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop() { } + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err, resp, body, callback) { + callback = callback || util.noop; + const parsedResp = { + err: err || null, + ...(resp && util.parseHttpRespMessage(resp)), + ...(body && util.parseHttpRespBody(body)), + }; + // Assign the parsed body to resp.body, even if { json: false } was passed + // as a request option. + // We assume that nobody uses the previously unparsed value of resp.body. + if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { + parsedResp.resp.body = parsedResp.body; + } + if (parsedResp.err && resp) { + parsedResp.err.response = resp; + } + callback(parsedResp.err, parsedResp.body, parsedResp.resp); + } + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage) { + const parsedHttpRespMessage = { + resp: httpRespMessage, + }; + if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { + // Unknown error. Format according to ApiError standard. + parsedHttpRespMessage.err = new ApiError({ + errors: new Array(), + code: httpRespMessage.statusCode, + message: httpRespMessage.statusMessage, + response: httpRespMessage, + }); + } + return parsedHttpRespMessage; + } + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body) { + const parsedHttpRespBody = { + body, + }; + if (typeof body === 'string') { + try { + parsedHttpRespBody.body = JSON.parse(body); + } + catch (err) { + parsedHttpRespBody.body = body; + } + } + if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { + // Error from JSON API. + parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); + } + return parsedHttpRespBody; + } + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup, options, onComplete) { + var _a; + onComplete = onComplete || util.noop; + const writeStream = new ProgressStream(); + writeStream.on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(writeStream); + const defaultReqOpts = { + method: 'POST', + qs: { + uploadType: 'multipart', + }, + timeout: 0, + maxRetries: 0, + }; + const metadata = options.metadata || {}; + const reqOpts = { + ...defaultReqOpts, + ...options.request, + qs: { + ...defaultReqOpts.qs, + ...(_a = options.request) === null || _a === void 0 ? void 0 : _a.qs, + }, + multipart: [ + { + 'Content-Type': 'application/json', + body: JSON.stringify(metadata), + }, + { + 'Content-Type': metadata.contentType || 'application/octet-stream', + body: writeStream, + }, + ], + }; + options.makeAuthenticatedRequest(reqOpts, { + onAuthenticated(err, authenticatedReqOpts) { + if (err) { + dup.destroy(err); + return; + } + requestDefaults.headers = util._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const request = teeny_request_1.teenyRequest.defaults(requestDefaults); + request(authenticatedReqOpts, (err, resp, body) => { + util.handleResp(err, resp, body, (err, data) => { + if (err) { + dup.destroy(err); + return; + } + dup.emit('response', resp); + onComplete(data); + }); + }); + }, + }); + } + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err) { + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (err.errors) { + for (const e of err.errors) { + const reason = e.reason; + if (reason === 'rateLimitExceeded') { + return true; + } + if (reason === 'userRateLimitExceeded') { + return true; + } + if (reason && reason.includes('EAI_AGAIN')) { + return true; + } + } + } + } + return false; + } + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config) { + const googleAutoAuthConfig = { ...config }; + if (googleAutoAuthConfig.projectId === service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + delete googleAutoAuthConfig.projectId; + } + let authClient; + if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) { + // Use an existing `GoogleAuth` + authClient = googleAutoAuthConfig.authClient; + } + else { + // Pass an `AuthClient` to `GoogleAuth`, if available + const config = { + ...googleAutoAuthConfig, + authClient: googleAutoAuthConfig.authClient, + }; + authClient = new google_auth_library_1.GoogleAuth(config); + } + function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { + let stream; + let projectId; + const reqConfig = { ...config }; + let activeRequest_; + if (!optionsOrCallback) { + stream = (0, duplexify_1.default)(); + reqConfig.stream = stream; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; + async function setProjectId() { + projectId = await authClient.getProjectId(); + } + const onAuthenticated = async (err, authenticatedReqOpts) => { + const authLibraryError = err; + const autoAuthFailed = err && + typeof err.message === 'string' && + err.message.indexOf('Could not load the default credentials') > -1; + if (autoAuthFailed) { + // Even though authentication failed, the API might not actually + // care. + authenticatedReqOpts = reqOpts; + } + if (!err || autoAuthFailed) { + try { + // Try with existing `projectId` value + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + if (e instanceof projectify_1.MissingProjectIdError) { + // A `projectId` was required, but we don't have one. + try { + // Attempt to get the `projectId` + await setProjectId(); + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + // Re-use the "Could not load the default credentials error" if + // auto auth failed. + err = err || e; + } + } + else { + // Some other error unrelated to missing `projectId` + err = err || e; + } + } + } + if (err) { + if (stream) { + stream.destroy(err); + } + else { + const fn = options && options.onAuthenticated + ? options.onAuthenticated + : callback; + fn(err); + } + return; + } + if (options && options.onAuthenticated) { + options.onAuthenticated(null, authenticatedReqOpts); + } + else { + activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { + if (apiResponseError && + apiResponseError.code === 401 && + authLibraryError) { + // Re-use the "Could not load the default credentials error" if + // the API request failed due to missing credentials. + apiResponseError = authLibraryError; + } + callback(apiResponseError, ...params); + }); + } + }; + const prepareRequest = async () => { + try { + const getProjectId = async () => { + if (config.projectId && + config.projectId !== service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + // The user provided a project ID. We don't need to check with the + // auth client, it could be incorrect. + return config.projectId; + } + if (config.projectIdRequired === false) { + // A projectId is not required. Return the default. + return service_js_1.DEFAULT_PROJECT_ID_TOKEN; + } + return setProjectId(); + }; + const authorizeRequest = async () => { + if (reqConfig.customEndpoint && + !reqConfig.useAuthWithCustomEndpoint) { + // Using a custom API override. Do not use `google-auth-library` for + // authentication. (ex: connecting to a local Datastore server) + return reqOpts; + } + else { + return authClient.authorizeRequest(reqOpts); + } + }; + const [_projectId, authorizedReqOpts] = await Promise.all([ + getProjectId(), + authorizeRequest(), + ]); + if (_projectId) { + projectId = _projectId; + } + return onAuthenticated(null, authorizedReqOpts); + } + catch (e) { + return onAuthenticated(e); + } + }; + prepareRequest(); + if (stream) { + return stream; + } + return { + abort() { + setImmediate(() => { + if (activeRequest_) { + activeRequest_.abort(); + activeRequest_ = null; + } + }); + }, + }; + } + const mar = makeAuthenticatedRequest; + mar.getCredentials = authClient.getCredentials.bind(authClient); + mar.authClient = authClient; + return mar; + } + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts, config, callback) { + var _a, _b, _c, _d, _e; + let autoRetryValue = AUTO_RETRY_DEFAULT; + if (config.autoRetry !== undefined) { + autoRetryValue = config.autoRetry; + } + else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { + autoRetryValue = config.retryOptions.autoRetry; + } + let maxRetryValue = MAX_RETRY_DEFAULT; + if (config.maxRetries !== undefined) { + maxRetryValue = config.maxRetries; + } + else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { + maxRetryValue = config.retryOptions.maxRetries; + } + requestDefaults.headers = this._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const options = { + request: teeny_request_1.teenyRequest.defaults(requestDefaults), + retries: autoRetryValue !== false ? maxRetryValue : 0, + noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, + shouldRetryFn(httpRespMessage) { + var _a, _b; + const err = util.parseHttpRespMessage(httpRespMessage).err; + if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { + return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); + } + return err && util.shouldRetryRequest(err); + }, + maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, + retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, + totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, + }; + if (typeof reqOpts.maxRetries === 'number') { + options.retries = reqOpts.maxRetries; + options.noResponseRetries = reqOpts.maxRetries; + } + if (!config.stream) { + return (0, retry_request_1.default)(reqOpts, options, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (err, response, body) => { + util.handleResp(err, response, body, callback); + }); + } + const dup = config.stream; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let requestStream; + const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; + if (isGetRequest) { + requestStream = (0, retry_request_1.default)(reqOpts, options); + dup.setReadable(requestStream); + } + else { + // Streaming writable HTTP requests cannot be retried. + requestStream = options.request(reqOpts); + dup.setWritable(requestStream); + } + // Replay the Request events back to the stream. + requestStream + .on('error', dup.destroy.bind(dup)) + .on('response', dup.emit.bind(dup, 'response')) + .on('complete', dup.emit.bind(dup, 'complete')); + dup.abort = requestStream.abort; + return dup; + } + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts, projectId) { + delete reqOpts.autoPaginate; + delete reqOpts.autoPaginateVal; + delete reqOpts.objectMode; + if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { + delete reqOpts.qs.autoPaginate; + delete reqOpts.qs.autoPaginateVal; + reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId); + } + if (Array.isArray(reqOpts.multipart)) { + reqOpts.multipart = reqOpts.multipart.map(part => { + return (0, projectify_1.replaceProjectIdToken)(part, projectId); + }); + } + if (reqOpts.json !== null && typeof reqOpts.json === 'object') { + delete reqOpts.json.autoPaginate; + delete reqOpts.json.autoPaginateVal; + reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId); + } + reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId); + return reqOpts; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + isCustomType(unknown, module) { + function getConstructorName(obj) { + return obj.constructor && obj.constructor.name.toLowerCase(); + } + const moduleNameParts = module.split('/'); + const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); + const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); + if (subModuleName && getConstructorName(unknown) !== subModuleName) { + return false; + } + let walkingModule = unknown; + // eslint-disable-next-line no-constant-condition + while (true) { + if (getConstructorName(walkingModule) === parentModuleName) { + return true; + } + walkingModule = walkingModule.parent; + if (!walkingModule) { + return false; + } + } + } + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback(optionsOrCallback, cb) { + return typeof optionsOrCallback === 'function' + ? [{}, optionsOrCallback] + : [optionsOrCallback, cb]; + } + _getDefaultHeaders(gcclGcsCmd) { + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (gcclGcsCmd) { + headers['x-goog-api-client'] += ` gccl-gcs-cmd/${gcclGcsCmd}`; + } + return headers; + } +} +exports.Util = Util; +/** + * Basic Passthrough Stream that records the number of bytes read + * every time the cursor is moved. + */ +class ProgressStream extends stream_1.Transform { + constructor() { + super(...arguments); + this.bytesRead = 0; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + _transform(chunk, encoding, callback) { + this.bytesRead += chunk.length; + this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); + this.push(chunk); + callback(); + } +} +const util = new Util(); +exports.util = util; + + +/***/ }), + +/***/ 1178: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = void 0; +const index_js_1 = __nccwpck_require__(4052); +const promisify_1 = __nccwpck_require__(9203); +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +class Notification extends index_js_1.ServiceObject { + constructor(bucket, id) { + const requestQueryObject = {}; + const methods = { + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * @method Notification#create + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationRequest} [options] Metadata to set for + * the notification. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.create(function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.create().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: true, + /** + * @typedef {array} DeleteNotificationResponse + * @property {object} 0 The full API response. + */ + /** + * Permanently deletes a notification subscription. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteNotificationCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/deleteNotification.js + * region_tag:storage_delete_bucket_notification + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get a notification and its metadata if it exists. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * See {@link Bucket#createNotification} for create options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false`. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.get(function(err, notification, apiResponse) { + * // `notification.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.get().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get the notification's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/getMetadataNotifications.js + * region_tag:storage_print_pubsub_bucket_notification + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} NotificationExistsResponse + * @property {boolean} 0 Whether the notification exists or not. + */ + /** + * @callback NotificationExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the notification exists or not. + */ + /** + * Check if the notification exists. + * + * @method Notification#exists + * @param {NotificationExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: true, + }; + super({ + parent: bucket, + baseUrl: '/notificationConfigs', + id: id.toString(), + createMethod: bucket.createNotification.bind(bucket), + methods, + }); + } +} +exports.Notification = Notification; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Notification); + + +/***/ }), + +/***/ 2851: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkUploadStatus = exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0; +const abort_controller_1 = __importDefault(__nccwpck_require__(1659)); +const crypto_1 = __nccwpck_require__(6113); +const gaxios = __importStar(__nccwpck_require__(9555)); +const google_auth_library_1 = __nccwpck_require__(810); +const stream_1 = __nccwpck_require__(2781); +const async_retry_1 = __importDefault(__nccwpck_require__(3415)); +const uuid = __importStar(__nccwpck_require__(5840)); +const util_js_1 = __nccwpck_require__(9258); +const util_js_2 = __nccwpck_require__(8064); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const NOT_FOUND_STATUS_CODE = 404; +const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; +const DEFAULT_API_ENDPOINT_REGEX = /.*\.googleapis\.com/; +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +class Upload extends stream_1.Writable { + constructor(cfg) { + var _a; + super(cfg); + _Upload_instances.add(this); + this.numBytesWritten = 0; + this.numRetries = 0; + this.currentInvocationId = { + checkUploadStatus: uuid.v4(), + chunk: uuid.v4(), + uri: uuid.v4(), + }; + /** + * A cache of buffers written to this instance, ready for consuming + */ + this.writeBuffers = []; + this.numChunksReadInRequest = 0; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; + this.upstreamEnded = false; + _Upload_gcclGcsCmd.set(this, void 0); + cfg = cfg || {}; + if (!cfg.bucket || !cfg.file) { + throw new Error('A bucket and file name are required'); + } + if (cfg.offset && !cfg.uri) { + throw new RangeError('Cannot provide an `offset` without providing a `uri`'); + } + if (cfg.isPartialUpload && !cfg.chunkSize) { + throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`'); + } + cfg.authConfig = cfg.authConfig || {}; + cfg.authConfig.scopes = [ + 'https://www.googleapis.com/auth/devstorage.full_control', + ]; + this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig); + this.apiEndpoint = 'https://storage.googleapis.com'; + if (cfg.apiEndpoint) { + this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); + if (!DEFAULT_API_ENDPOINT_REGEX.test(cfg.apiEndpoint)) { + this.authClient = gaxios; + } + } + this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; + this.bucket = cfg.bucket; + const cacheKeyElements = [cfg.bucket, cfg.file]; + if (typeof cfg.generation === 'number') { + cacheKeyElements.push(`${cfg.generation}`); + } + this.cacheKey = cacheKeyElements.join('/'); + this.customRequestOptions = cfg.customRequestOptions || {}; + this.file = cfg.file; + this.generation = cfg.generation; + this.kmsKeyName = cfg.kmsKeyName; + this.metadata = cfg.metadata || {}; + this.offset = cfg.offset; + this.origin = cfg.origin; + this.params = cfg.params || {}; + this.userProject = cfg.userProject; + this.chunkSize = cfg.chunkSize; + this.retryOptions = cfg.retryOptions; + this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false; + if (cfg.key) { + const base64Key = Buffer.from(cfg.key).toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + this.predefinedAcl = cfg.predefinedAcl; + if (cfg.private) + this.predefinedAcl = 'private'; + if (cfg.public) + this.predefinedAcl = 'publicRead'; + const autoRetry = cfg.retryOptions.autoRetry; + this.uriProvidedManually = !!cfg.uri; + this.uri = cfg.uri; + if (this.offset) { + // we're resuming an incomplete upload + this.numBytesWritten = this.offset; + } + this.numRetries = 0; // counter for number of retries currently executed + if (!autoRetry) { + cfg.retryOptions.maxRetries = 0; + } + this.timeOfFirstRequest = Date.now(); + const contentLength = cfg.metadata + ? Number(cfg.metadata.contentLength) + : NaN; + this.contentLength = isNaN(contentLength) ? '*' : contentLength; + __classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_2.GCCL_GCS_CMD_KEY], "f"); + this.once('writing', () => { + if (this.uri) { + this.continueUploading(); + } + else { + this.createURI(err => { + if (err) { + return this.destroy(err); + } + this.startUploading(); + return; + }); + } + }); + } + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent = () => { }) { + this.upstreamEnded = true; + this.once('uploadFinished', fireFinishEvent); + process.nextTick(() => { + this.emit('upstreamFinished'); + // it's possible `_write` may not be called - namely for empty object uploads + this.emit('writing'); + }); + } + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk, encoding, readCallback = () => { }) { + // Backwards-compatible event + this.emit('writing'); + this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk); + this.once('readFromChunkBuffer', readCallback); + process.nextTick(() => this.emit('wroteToChunkBuffer')); + } + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + prependLocalBufferToUpstream(keepLastBytes) { + // Typically, the upstream write buffers should be smaller than the local + // cache, so we can save time by setting the local cache as the new + // upstream write buffer array and appending the old array to it + let initialBuffers = []; + if (keepLastBytes) { + // we only want the last X bytes + let bytesKept = 0; + while (keepLastBytes > bytesKept) { + // load backwards because we want the last X bytes + // note: `localWriteCacheByteLength` is reset below + let buf = this.localWriteCache.pop(); + if (!buf) + break; + bytesKept += buf.byteLength; + if (bytesKept > keepLastBytes) { + // we have gone over the amount desired, let's keep the last X bytes + // of this buffer + const diff = bytesKept - keepLastBytes; + buf = buf.subarray(diff); + bytesKept -= diff; + } + initialBuffers.unshift(buf); + } + } + else { + // we're keeping all of the local cache, simply use it as the initial buffer + initialBuffers = this.localWriteCache; + } + // Append the old upstream to the new + const append = this.writeBuffers; + this.writeBuffers = initialBuffers; + for (const buf of append) { + this.writeBuffers.push(buf); + } + // reset last buffers sent + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + */ + *pullFromChunkBuffer(limit) { + while (limit) { + const buf = this.writeBuffers.shift(); + if (!buf) + break; + let bufToYield = buf; + if (buf.byteLength > limit) { + bufToYield = buf.subarray(0, limit); + this.writeBuffers.unshift(buf.subarray(limit)); + limit = 0; + } + else { + limit -= buf.byteLength; + } + yield bufToYield; + // Notify upstream we've read from the buffer and we're able to consume + // more. It can also potentially send more data down as we're currently + // iterating. + this.emit('readFromChunkBuffer'); + } + } + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + async waitForNextChunk() { + const willBeMoreChunks = await new Promise(resolve => { + // There's data available - it should be digested + if (this.writeBuffers.length) { + return resolve(true); + } + // The upstream writable ended, we shouldn't expect any more data. + if (this.upstreamEnded) { + return resolve(false); + } + // Nothing immediate seems to be determined. We need to prepare some + // listeners to determine next steps... + const wroteToChunkBufferCallback = () => { + removeListeners(); + return resolve(true); + }; + const upstreamFinishedCallback = () => { + removeListeners(); + // this should be the last chunk, if there's anything there + if (this.writeBuffers.length) + return resolve(true); + return resolve(false); + }; + // Remove listeners when we're ready to callback. + const removeListeners = () => { + this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); + this.removeListener('upstreamFinished', upstreamFinishedCallback); + }; + // If there's data recently written it should be digested + this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); + // If the upstream finishes let's see if there's anything to grab + this.once('upstreamFinished', upstreamFinishedCallback); + }); + return willBeMoreChunks; + } + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + async *upstreamIterator(limit = Infinity) { + // read from upstream chunk buffer + while (limit && (await this.waitForNextChunk())) { + // read until end or limit has been reached + for (const chunk of this.pullFromChunkBuffer(limit)) { + limit -= chunk.byteLength; + yield chunk; + } + } + } + createURI(callback) { + if (!callback) { + return this.createURIAsync(); + } + this.createURIAsync().then(r => callback(null, r), callback); + } + async createURIAsync() { + const metadata = { ...this.metadata }; + const headers = {}; + // Delete content length and content type from metadata if they exist. + // These are headers and should not be sent as part of the metadata. + if (metadata.contentLength) { + headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); + delete metadata.contentLength; + } + if (metadata.contentType) { + headers['X-Upload-Content-Type'] = metadata.contentType; + delete metadata.contentType; + } + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + // Check if headers already exist before creating new ones + const reqOpts = { + method: 'POST', + url: [this.baseURI, this.bucket, 'o'].join('/'), + params: Object.assign({ + name: this.file, + uploadType: 'resumable', + }, this.params), + data: metadata, + headers: { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + ...headers, + }, + }; + if (metadata.contentLength) { + reqOpts.headers['X-Upload-Content-Length'] = + metadata.contentLength.toString(); + } + if (metadata.contentType) { + reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; + } + if (typeof this.generation !== 'undefined') { + reqOpts.params.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName) { + reqOpts.params.kmsKeyName = this.kmsKeyName; + } + if (this.predefinedAcl) { + reqOpts.params.predefinedAcl = this.predefinedAcl; + } + if (this.origin) { + reqOpts.headers.Origin = this.origin; + } + const uri = await (0, async_retry_1.default)(async (bail) => { + var _a, _b, _c; + try { + const res = await this.makeRequest(reqOpts); + // We have successfully got a URI we can now create a new invocation id + this.currentInvocationId.uri = uuid.v4(); + return res.headers.location; + } + catch (err) { + const e = err; + const apiError = { + code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, + name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, + message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, + errors: [ + { + reason: e.code, + }, + ], + }; + if (this.retryOptions.maxRetries > 0 && + this.retryOptions.retryableErrorFn(apiError)) { + throw e; + } + else { + return bail(e); + } + } + }, { + retries: this.retryOptions.maxRetries, + factor: this.retryOptions.retryDelayMultiplier, + maxTimeout: this.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + this.uri = uri; + this.offset = 0; + // emit the newly generated URI for future reuse, if necessary. + this.emit('uri', uri); + return uri; + } + async continueUploading() { + var _a; + (_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset()); + return this.startUploading(); + } + async startUploading() { + const multiChunkMode = !!this.chunkSize; + let responseReceived = false; + this.numChunksReadInRequest = 0; + if (!this.offset) { + this.offset = 0; + } + // Check if the offset (server) is too far behind the current stream + if (this.offset < this.numBytesWritten) { + const delta = this.numBytesWritten - this.offset; + const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; + this.emit('error', new RangeError(message)); + return; + } + // Check if we should 'fast-forward' to the relevant data to upload + if (this.numBytesWritten < this.offset) { + // 'fast-forward' to the byte where we need to upload. + // only push data from the byte after the one we left off on + const fastForwardBytes = this.offset - this.numBytesWritten; + for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { + _chunk; // discard the data up until the point we want + } + this.numBytesWritten = this.offset; + } + let expectedUploadSize = undefined; + // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available + if (typeof this.contentLength === 'number') { + expectedUploadSize = this.contentLength - this.numBytesWritten; + } + // `expectedUploadSize` should be no more than the `chunkSize`. + // It's possible this is the last chunk request for a multiple + // chunk upload, thus smaller than the chunk size. + if (this.chunkSize) { + expectedUploadSize = expectedUploadSize + ? Math.min(this.chunkSize, expectedUploadSize) + : this.chunkSize; + } + // A queue for the upstream data + const upstreamQueue = this.upstreamIterator(expectedUploadSize); + // The primary read stream for this request. This stream retrieves no more + // than the exact requested amount from upstream. + const requestStream = new stream_1.Readable({ + read: async () => { + // Don't attempt to retrieve data upstream if we already have a response + if (responseReceived) + requestStream.push(null); + const result = await upstreamQueue.next(); + if (result.value) { + this.numChunksReadInRequest++; + if (multiChunkMode) { + // save ever buffer used in the request in multi-chunk mode + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + else { + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + this.numBytesWritten += result.value.byteLength; + this.emit('progress', { + bytesWritten: this.numBytesWritten, + contentLength: this.contentLength, + }); + requestStream.push(result.value); + } + if (result.done) { + requestStream.push(null); + } + }, + }); + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }; + // If using multiple chunk upload, set appropriate header + if (multiChunkMode) { + // We need to know how much data is available upstream to set the `Content-Range` header. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + for await (const chunk of this.upstreamIterator(expectedUploadSize)) { + // This will conveniently track and keep the size of the buffers. + // We will reach either the expected upload size or the remainder of the stream. + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk); + } + // This is the sum from the `#addLocalBufferCache` calls + const bytesToUpload = this.localWriteCacheByteLength; + // Important: we want to know if the upstream has ended and the queue is empty before + // unshifting data back into the queue. This way we will know if this is the last request or not. + const isLastChunkOfUpload = !(await this.waitForNextChunk()); + // Important: put the data back in the queue for the actual upload + this.prependLocalBufferToUpstream(); + let totalObjectSize = this.contentLength; + if (typeof this.contentLength !== 'number' && + isLastChunkOfUpload && + !this.isPartialUpload) { + // Let's let the server know this is the last chunk of the object since we didn't set it before. + totalObjectSize = bytesToUpload + this.numBytesWritten; + } + // `- 1` as the ending byte is inclusive in the request. + const endingByte = bytesToUpload + this.numBytesWritten - 1; + // `Content-Length` for multiple chunk uploads is the size of the chunk, + // not the overall object + headers['Content-Length'] = bytesToUpload; + headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; + } + else { + headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; + } + const reqOpts = { + method: 'PUT', + url: this.uri, + headers, + body: requestStream, + }; + try { + const resp = await this.makeRequestStream(reqOpts); + if (resp) { + responseReceived = true; + await this.responseHandler(resp); + } + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + // Process the API response to look for errors that came in + // the response body. + async responseHandler(resp) { + if (resp.data.error) { + this.destroy(resp.data.error); + return; + } + // At this point we can safely create a new id for the chunk + this.currentInvocationId.chunk = uuid.v4(); + const moreDataToUpload = await this.waitForNextChunk(); + const shouldContinueWithNextMultiChunkRequest = this.chunkSize && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + resp.headers.range && + moreDataToUpload; + /** + * This is true when we're expecting to upload more data in a future request, + * yet the upstream for the upload session has been exhausted. + */ + const shouldContinueUploadInAnotherRequest = this.isPartialUpload && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + !moreDataToUpload; + if (shouldContinueWithNextMultiChunkRequest) { + // Use the upper value in this header to determine where to start the next chunk. + // We should not assume that the server received all bytes sent in the request. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const range = resp.headers.range; + this.offset = Number(range.split('-')[1]) + 1; + // We should not assume that the server received all bytes sent in the request. + // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const missingBytes = this.numBytesWritten - this.offset; + if (missingBytes) { + // As multi-chunk uploads send one chunk per request and pulls one + // chunk into the pipeline, prepending the missing bytes back should + // be fine for the next request. + this.prependLocalBufferToUpstream(missingBytes); + this.numBytesWritten -= missingBytes; + } + else { + // No bytes missing - no need to keep the local cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + // continue uploading next chunk + this.continueUploading(); + } + else if (!this.isSuccessfulResponse(resp.status) && + !shouldContinueUploadInAnotherRequest) { + const err = new Error('Upload failed'); + err.code = resp.status; + err.name = 'Upload failed'; + if (resp === null || resp === void 0 ? void 0 : resp.data) { + err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; + } + this.destroy(err); + } + else { + // no need to keep the cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + if (resp && resp.data) { + resp.data.size = Number(resp.data.size); + } + this.emit('metadata', resp.data); + // Allow the object (Upload) to continue naturally so the user's + // "finish" event fires. + this.emit('uploadFinished'); + } + } + /** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ + async checkUploadStatus(config = {}) { + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const opts = { + method: 'PUT', + url: this.uri, + headers: { + 'Content-Length': 0, + 'Content-Range': 'bytes */*', + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }, + }; + try { + const resp = await this.makeRequest(opts); + // Successfully got the offset we can now create a new offset invocation id + this.currentInvocationId.checkUploadStatus = uuid.v4(); + return resp; + } + catch (e) { + if (config.retry === false || + !(e instanceof Error) || + !this.retryOptions.retryableErrorFn(e)) { + throw e; + } + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + throw e; + } + await new Promise(res => setTimeout(res, retryDelay)); + return this.checkUploadStatus(config); + } + } + async getAndSetOffset() { + try { + // we want to handle retries in this method. + const resp = await this.checkUploadStatus({ retry: false }); + if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { + if (typeof resp.headers.range === 'string') { + this.offset = Number(resp.headers.range.split('-')[1]) + 1; + return; + } + } + this.offset = 0; + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + async makeRequest(reqOpts) { + if (this.encryption) { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryption.hash.toString(); + } + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + // Let gaxios know we will handle a 308 error code ourselves. + reqOpts.validateStatus = (status) => { + return (this.isSuccessfulResponse(status) || + status === RESUMABLE_INCOMPLETE_STATUS_CODE); + }; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + async makeRequestStream(reqOpts) { + const controller = new abort_controller_1.default(); + const errorCallback = () => controller.abort(); + this.once('error', errorCallback); + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + reqOpts.signal = controller.signal; + reqOpts.validateStatus = () => true; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + const successfulRequest = this.onResponse(res); + this.removeListener('error', errorCallback); + return successfulRequest ? res : null; + } + /** + * @return {bool} is the request good? + */ + onResponse(resp) { + if (resp.status !== 200 && + this.retryOptions.retryableErrorFn({ + code: resp.status, + message: resp.statusText, + name: resp.statusText, + })) { + this.attemptDelayedRetry(resp); + return false; + } + this.emit('response', resp); + return true; + } + /** + * @param resp GaxiosResponse object from previous attempt + */ + attemptDelayedRetry(resp) { + if (this.numRetries < this.retryOptions.maxRetries) { + if (resp.status === NOT_FOUND_STATUS_CODE && + this.numChunksReadInRequest === 0) { + this.startUploading(); + } + else { + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`)); + return; + } + // Unshift the local cache back in case it's needed for the next request. + this.numBytesWritten -= this.localWriteCacheByteLength; + this.prependLocalBufferToUpstream(); + // We don't know how much data has been received by the server. + // `continueUploading` will recheck the offset via `getAndSetOffset`. + // If `offset` < `numberBytesReceived` then we will raise a RangeError + // as we've streamed too much data that has been missed - this should + // not be the case for multi-chunk uploads as `lastChunkSent` is the + // body of the entire request. + this.offset = undefined; + setTimeout(this.continueUploading.bind(this), retryDelay); + } + this.numRetries++; + } + else { + this.destroy(new Error('Retry limit exceeded - ' + resp.data)); + } + } + /** + * The amount of time to wait before retrying the request, in milliseconds. + * If negative, do not retry. + * + * @returns the amount of time to wait, in milliseconds. + */ + getRetryDelay() { + const randomMs = Math.round(Math.random() * 1000); + const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * + 1000 + + randomMs; + const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - + (Date.now() - this.timeOfFirstRequest); + const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; + return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); + } + /* + * Prepare user-defined API endpoint for compatibility with our API. + */ + sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status) { + return status >= 200 && status < 300; + } +} +exports.Upload = Upload; +_Upload_gcclGcsCmd = new WeakMap(), _Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() { + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; +}, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) { + this.localWriteCache.push(buf); + this.localWriteCacheByteLength += buf.byteLength; +}; +function upload(cfg) { + return new Upload(cfg); +} +exports.upload = upload; +function createURI(cfg, callback) { + const up = new Upload(cfg); + if (!callback) { + return up.createURI(); + } + up.createURI().then(r => callback(null, r), callback); +} +exports.createURI = createURI; +/** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ +function checkUploadStatus(cfg) { + const up = new Upload(cfg); + return up.checkUploadStatus(); +} +exports.checkUploadStatus = checkUploadStatus; + + +/***/ }), + +/***/ 9019: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; +const crypto = __importStar(__nccwpck_require__(6113)); +const url = __importStar(__nccwpck_require__(7310)); +const storage_js_1 = __nccwpck_require__(6007); +const util_js_1 = __nccwpck_require__(9258); +var SignerExceptionMessages; +(function (SignerExceptionMessages) { + SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; + SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; + SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; +})(SignerExceptionMessages || (exports.SignerExceptionMessages = SignerExceptionMessages = {})); +/* + * Default signing version for getSignedUrl is 'v2'. + */ +const DEFAULT_SIGNING_VERSION = 'v2'; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +/** + * @const {string} + * @private + */ +exports.PATH_STYLED_HOST = 'https://storage.googleapis.com'; +class URLSigner { + constructor(authClient, bucket, file) { + this.bucket = bucket; + this.file = file; + this.authClient = authClient; + } + getSignedUrl(cfg) { + const expiresInSeconds = this.parseExpires(cfg.expires); + const method = cfg.method; + const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); + if (expiresInSeconds < accessibleAtInSeconds) { + throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); + } + let customHost; + // Default style is `path`. + const isVirtualHostedStyle = cfg.virtualHostedStyle || false; + if (cfg.cname) { + customHost = cfg.cname; + } + else if (isVirtualHostedStyle) { + customHost = `https://${this.bucket.name}.storage.googleapis.com`; + } + const secondsToMilliseconds = 1000; + const config = Object.assign({}, cfg, { + method, + expiration: expiresInSeconds, + accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), + bucket: this.bucket.name, + file: this.file ? (0, util_js_1.encodeURI)(this.file.name, false) : undefined, + }); + if (customHost) { + config.cname = customHost; + } + const version = cfg.version || DEFAULT_SIGNING_VERSION; + let promise; + if (version === 'v2') { + promise = this.getSignedUrlV2(config); + } + else if (version === 'v4') { + promise = this.getSignedUrlV4(config); + } + else { + throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); + } + return promise.then(query => { + query = Object.assign(query, cfg.queryParams); + const signedUrl = new url.URL(config.cname || exports.PATH_STYLED_HOST); + signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + signedUrl.search = (0, util_js_1.qsStringify)(query); + return signedUrl.href; + }); + } + getSignedUrlV2(config) { + const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); + const resourcePath = this.getResourcePath(false, config.bucket, config.file); + const blobToSign = [ + config.method, + config.contentMd5 || '', + config.contentType || '', + config.expiration, + canonicalHeadersString + resourcePath, + ].join('\n'); + const sign = async () => { + const authClient = this.authClient; + try { + const signature = await authClient.sign(blobToSign); + const credentials = await authClient.getCredentials(); + return { + GoogleAccessId: credentials.client_email, + Expires: config.expiration, + Signature: signature, + }; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + getSignedUrlV4(config) { + config.accessibleAt = config.accessibleAt + ? config.accessibleAt + : new Date(); + const millisecondsToSeconds = 1.0 / 1000.0; + const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; + // v4 limit expiration to be 7 days maximum + if (expiresPeriodInSeconds > SEVEN_DAYS) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + const extensionHeaders = Object.assign({}, config.extensionHeaders); + const fqdn = new url.URL(config.cname || exports.PATH_STYLED_HOST); + extensionHeaders.host = fqdn.host; + if (config.contentMd5) { + extensionHeaders['content-md5'] = config.contentMd5; + } + if (config.contentType) { + extensionHeaders['content-type'] = config.contentType; + } + let contentSha256; + const sha256Header = extensionHeaders['x-goog-content-sha256']; + if (sha256Header) { + if (typeof sha256Header !== 'string' || + !/[A-Fa-f0-9]{40}/.test(sha256Header)) { + throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); + } + contentSha256 = sha256Header; + } + const signedHeaders = Object.keys(extensionHeaders) + .map(header => header.toLowerCase()) + .sort() + .join(';'); + const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); + const datestamp = (0, util_js_1.formatAsUTCISO)(config.accessibleAt); + const credentialScope = `${datestamp}/auto/storage/goog4_request`; + const sign = async () => { + const credentials = await this.authClient.getCredentials(); + const credential = `${credentials.client_email}/${credentialScope}`; + const dateISO = (0, util_js_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true); + const queryParams = { + 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', + 'X-Goog-Credential': credential, + 'X-Goog-Date': dateISO, + 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), + 'X-Goog-SignedHeaders': signedHeaders, + ...(config.queryParams || {}), + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); + const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); + const hash = crypto + .createHash('sha256') + .update(canonicalRequest) + .digest('hex'); + const blobToSign = [ + 'GOOG4-RSA-SHA256', + dateISO, + credentialScope, + hash, + ].join('\n'); + try { + const signature = await this.authClient.sign(blobToSign); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const signedQuery = Object.assign({}, queryParams, { + 'X-Goog-Signature': signatureHex, + }); + return signedQuery; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers) { + // Sort headers by their lowercased names + const sortedHeaders = (0, util_js_1.objectEntries)(headers) + // Convert header names to lowercase + .map(([headerName, value]) => [ + headerName.toLowerCase(), + value, + ]) + .sort((a, b) => a[0].localeCompare(b[0])); + return sortedHeaders + .filter(([, value]) => value !== undefined) + .map(([headerName, value]) => { + // - Convert Array (multi-valued header) into string, delimited by + // ',' (no space). + // - Trim leading and trailing spaces. + // - Convert sequential (2+) spaces into a single space + const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); + return `${headerName}:${canonicalValue}\n`; + }) + .join(''); + } + getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { + return [ + method, + path, + query, + headers, + signedHeaders, + contentSha256 || 'UNSIGNED-PAYLOAD', + ].join('\n'); + } + getCanonicalQueryParams(query) { + return (0, util_js_1.objectEntries)(query) + .map(([key, value]) => [(0, util_js_1.encodeURI)(key, true), (0, util_js_1.encodeURI)(value, true)]) + .sort((a, b) => (a[0] < b[0] ? -1 : 1)) + .map(([key, value]) => `${key}=${value}`) + .join('&'); + } + getResourcePath(cname, bucket, file) { + if (cname) { + return '/' + (file || ''); + } + else if (file) { + return `/${bucket}/${file}`; + } + else { + return `/${bucket}`; + } + } + parseExpires(expires, current = new Date()) { + const expiresInMSeconds = new Date(expires).valueOf(); + if (isNaN(expiresInMSeconds)) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expiresInMSeconds < current.valueOf()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + return Math.floor(expiresInMSeconds / 1000); // The API expects seconds. + } + parseAccessibleAt(accessibleAt) { + const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); + if (isNaN(accessibleAtInMSeconds)) { + throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); + } + return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. + } +} +exports.URLSigner = URLSigner; +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +class SigningError extends Error { + constructor() { + super(...arguments); + this.name = 'SigningError'; + } +} +exports.SigningError = SigningError; + + +/***/ }), + +/***/ 6007: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; +const index_js_1 = __nccwpck_require__(4052); +const paginator_1 = __nccwpck_require__(6412); +const promisify_1 = __nccwpck_require__(9203); +const stream_1 = __nccwpck_require__(2781); +const bucket_js_1 = __nccwpck_require__(3973); +const channel_js_1 = __nccwpck_require__(2665); +const file_js_1 = __nccwpck_require__(4713); +const util_js_1 = __nccwpck_require__(9258); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const hmacKey_js_1 = __nccwpck_require__(4654); +const crc32c_js_1 = __nccwpck_require__(5810); +var IdempotencyStrategy; +(function (IdempotencyStrategy) { + IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; + IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; + IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; +})(IdempotencyStrategy || (exports.IdempotencyStrategy = IdempotencyStrategy = {})); +var ExceptionMessages; +(function (ExceptionMessages) { + ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; + ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; +})(ExceptionMessages || (exports.ExceptionMessages = ExceptionMessages = {})); +var StorageExceptionMessages; +(function (StorageExceptionMessages) { + StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; + StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; + StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; + StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; +})(StorageExceptionMessages || (exports.StorageExceptionMessages = StorageExceptionMessages = {})); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +exports.AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +exports.MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +exports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +exports.TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +exports.MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. + * + * @const {enum} + * @private + */ +const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +const RETRYABLE_ERR_FN_DEFAULT = function (err) { + var _a; + const isConnectionProblem = (reason) => { + return (reason.includes('eai_again') || // DNS lookup error + reason === 'econnreset' || + reason === 'unexpected connection closure' || + reason === 'epipe' || + reason === 'socket connection timeout'); + }; + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (typeof err.code === 'string') { + if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) { + return true; + } + const reason = err.code.toLowerCase(); + if (isConnectionProblem(reason)) { + return true; + } + } + if (err.errors) { + for (const e of err.errors) { + const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); + if (reason && isConnectionProblem(reason)) { + return true; + } + } + } + } + return false; +}; +exports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +class Storage extends index_js_1.Service { + getBucketsStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + getHmacKeysStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; + let apiEndpoint = 'https://storage.googleapis.com'; + let customEndpoint = false; + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; + if (typeof EMULATOR_HOST === 'string') { + apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); + customEndpoint = true; + } + if (options.apiEndpoint) { + apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); + customEndpoint = true; + } + options = Object.assign({}, options, { apiEndpoint }); + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; + const config = { + apiEndpoint: options.apiEndpoint, + retryOptions: { + autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined + ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry + : exports.AUTO_RETRY_DEFAULT, + maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) + ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries + : exports.MAX_RETRY_DEFAULT, + retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) + ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier + : exports.RETRY_DELAY_MULTIPLIER_DEFAULT, + totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) + ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout + : exports.TOTAL_TIMEOUT_DEFAULT, + maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) + ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay + : exports.MAX_RETRY_DELAY_DEFAULT, + retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) + ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn + : exports.RETRYABLE_ERR_FN_DEFAULT, + idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined + ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy + : IDEMPOTENCY_STRATEGY_DEFAULT, + }, + baseUrl, + customEndpoint, + useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, + projectIdRequired: false, + scopes: [ + 'https://www.googleapis.com/auth/iam', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/devstorage.full_control', + ], + packageJson: (0, package_json_helper_cjs_1.getPackageJSON)(), + }; + super(config, options); + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + this.acl = Storage.acl; + this.crc32cGenerator = + options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + this.retryOptions = config.retryOptions; + this.getBucketsStream = paginator_1.paginator.streamify('getBuckets'); + this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys'); + } + static sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name, options) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); + } + return new bucket_js_1.Bucket(this, name, options); + } + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id, resourceId) { + return new channel_js_1.Channel(this, id, resourceId); + } + /** + * @typedef {array} CreateBucketResponse + * @property {Bucket} 0 The new {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The new {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Metadata to set for the bucket. + * + * @typedef {object} CreateBucketRequest + * @property {boolean} [archive=false] Specify the storage class as Archive. + * @property {object} [autoclass.enabled=false] Specify whether Autoclass is + * enabled for the bucket. + * @property {object} [autoclass.terminalStorageClass='NEARLINE'] The storage class that objects in an Autoclass bucket eventually transition to if + * they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. + * @property {boolean} [coldline=false] Specify the storage class as Coldline. + * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. + * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [dra=false] Specify the storage class as Durable Reduced + * Availability. + * @property {boolean} [enableObjectRetention=false] Specifiy whether or not object retention should be enabled on this bucket. + * @property {string} [location] Specify the bucket's location. If specifying + * a dual-region, the `customPlacementConfig` property should be set in conjunction. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [multiRegional=false] Specify the storage class as + * Multi-Regional. + * @property {boolean} [nearline=false] Specify the storage class as Nearline. + * @property {boolean} [regional=false] Specify the storage class as Regional. + * @property {boolean} [requesterPays=false] **Early Access Testers Only** + * Force the use of the User Project metadata field to assign operational + * costs when an operation is made on a Bucket and its objects. + * @property {string} [rpo] For dual-region buckets, controls whether turbo + * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). + * @property {boolean} [standard=true] Specify the storage class as Standard. + * @property {string} [storageClass] The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @property {Versioning} [versioning=undefined] Specify the versioning status. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * Create a bucket. + * + * Cloud Storage uses a flat namespace, so you can't create a bucket with + * a name that is already in use. For more information, see + * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} name Name of the bucket to create. + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a name is not provided. + * @see Bucket#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const callback = function(err, bucket, apiResponse) { + * // `bucket` is a Bucket object. + * }; + * + * storage.createBucket('new-bucket', callback); + * + * //- + * // Create a bucket in a specific location and region. See the + * // Official JSON API docs for complete details on the `location` + * option. + * // + * //- + * const metadata = { + * location: 'US-CENTRAL1', + * regional: true + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Create a bucket with a retention policy of 6 months. + * //- + * const metadata = { + * retentionPolicy: { + * retentionPeriod: 15780000 // 6 months in seconds. + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Enable versioning on a new bucket. + * //- + * const metadata = { + * versioning: { + * enabled: true + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createBucket('new-bucket').then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_create_bucket + * Another example: + */ + createBucket(name, metadataOrCallback, callback) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); + } + let metadata; + if (!callback) { + callback = metadataOrCallback; + metadata = {}; + } + else { + metadata = metadataOrCallback; + } + const body = { + ...metadata, + name, + }; + const storageClasses = { + archive: 'ARCHIVE', + coldline: 'COLDLINE', + dra: 'DURABLE_REDUCED_AVAILABILITY', + multiRegional: 'MULTI_REGIONAL', + nearline: 'NEARLINE', + regional: 'REGIONAL', + standard: 'STANDARD', + }; + const storageClassKeys = Object.keys(storageClasses); + for (const storageClass of storageClassKeys) { + if (body[storageClass]) { + if (metadata.storageClass && metadata.storageClass !== storageClass) { + throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); + } + body.storageClass = storageClasses[storageClass]; + delete body[storageClass]; + } + } + if (body.requesterPays) { + body.billing = { + requesterPays: body.requesterPays, + }; + delete body.requesterPays; + } + const query = { + project: this.projectId, + }; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + if (body.enableObjectRetention) { + query.enableObjectRetention = body.enableObjectRetention; + delete body.enableObjectRetention; + } + this.request({ + method: 'POST', + uri: '/b', + qs: query, + json: body, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const bucket = this.bucket(name); + bucket.metadata = resp; + callback(null, bucket, resp); + }); + } + /** + * @typedef {object} CreateHmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {object} HmacKeyMetadata + * @property {string} accessId The access id identifies which HMAC key was + * used to sign a request when authenticating with HMAC. + * @property {string} etag Used to perform a read-modify-write of the key. + * @property {string} id The resource name of the HMAC key. + * @property {string} projectId The project ID. + * @property {string} serviceAccountEmail The service account's email this + * HMAC key is created for. + * @property {string} state The state of this HMAC key. One of "ACTIVE", + * "INACTIVE" or "DELETED". + * @property {string} timeCreated The creation time of the HMAC key in + * RFC 3339 format. + * @property {string} [updated] The time this HMAC key was last updated in + * RFC 3339 format. + */ + /** + * @typedef {array} CreateHmacKeyResponse + * @property {HmacKey} 0 The HmacKey instance created from API response. + * @property {string} 1 The HMAC key's secret used to access the XML API. + * @property {object} 3 The raw API response. + */ + /** + * @callback CreateHmacKeyCallback Callback function. + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey The HmacKey instance created from API response. + * @param {string} secret The HMAC key's secret used to access the XML API. + * @param {object} apiResponse The raw API response. + */ + /** + * Create an HMAC key associated with an service account to authenticate + * requests to the Cloud Storage XML API. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @param {string} serviceAccountEmail The service account's email address + * with which the HMAC key is created for. + * @param {CreateHmacKeyCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('google-cloud/storage'); + * const storage = new Storage(); + * + * // Replace with your service account's email address + * const serviceAccountEmail = + * 'my-service-account@appspot.gserviceaccount.com'; + * + * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { + * if (!err) { + * // Securely store the secret for use with the XML API. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createHmacKey(serviceAccountEmail) + * .then((response) => { + * const hmacKey = response[0]; + * const secret = response[1]; + * // Securely store the secret for use with the XML API. + * }); + * ``` + */ + createHmacKey(serviceAccountEmail, optionsOrCb, cb) { + if (typeof serviceAccountEmail !== 'string') { + throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); + } + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options, { serviceAccountEmail }); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + method: 'POST', + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const metadata = resp.metadata; + const hmacKey = this.hmacKey(metadata.accessId, { + projectId: metadata.projectId, + }); + hmacKey.metadata = resp.metadata; + callback(null, hmacKey, resp.secret, resp); + }); + } + /** + * Query object for listing buckets. + * + * @typedef {object} GetBucketsRequest + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * @typedef {array} GetBucketsResponse + * @property {Bucket[]} 0 Array of {@link Bucket} instances. + * @property {object} 1 nextQuery A query object to receive more results. + * @property {object} 2 The full API response. + */ + /** + * @callback GetBucketsCallback + * @param {?Error} err Request error, if any. + * @param {Bucket[]} buckets Array of {@link Bucket} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Get Bucket objects for all of the buckets in your project. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} + * + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @param {GetBucketsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getBuckets(function(err, buckets) { + * if (!err) { + * // buckets is an array of Bucket objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, buckets, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getBuckets(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * buckets[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); + * }; + * + * storage.getBuckets({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getBuckets().then(function(data) { + * const buckets = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_list_buckets + * Another example: + */ + getBuckets(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + options.project = options.project || this.projectId; + this.request({ + uri: '/b', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const buckets = itemsArray.map((bucket) => { + const bucketInstance = this.bucket(bucket.id); + bucketInstance.metadata = bucket; + return bucketInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, buckets, nextQuery, resp); + }); + } + getHmacKeys(optionsOrCb, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const hmacKeys = itemsArray.map((hmacKey) => { + const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { + projectId: hmacKey.projectId, + }); + hmacKeyInstance.metadata = hmacKey; + return hmacKeyInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, hmacKeys, nextQuery, resp); + }); + } + /** + * @typedef {array} GetServiceAccountResponse + * @property {object} 0 The service account resource. + * @property {object} 1 The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * @callback GetServiceAccountCallback + * @param {?Error} err Request error, if any. + * @param {object} serviceAccount The serviceAccount resource. + * @param {string} serviceAccount.emailAddress The service account email + * address. + * @param {object} apiResponse The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * Get the email address of this project's Google Cloud Storage service + * account. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} + * + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project to be billed for this + * request. + * @param {GetServiceAccountCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { + * if (!err) { + * const serviceAccountEmail = serviceAccount.emailAddress; + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getServiceAccount().then(function(data) { + * const serviceAccountEmail = data[0].emailAddress; + * const apiResponse = data[1]; + * }); + * ``` + */ + getServiceAccount(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + this.request({ + uri: `/projects/${this.projectId}/serviceAccount`, + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const camelCaseResponse = {}; + for (const prop in resp) { + // eslint-disable-next-line no-prototype-builtins + if (resp.hasOwnProperty(prop)) { + const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); + camelCaseResponse[camelCaseProp] = resp[prop]; + } + } + callback(null, camelCaseResponse, resp); + }); + } + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId, options) { + if (!accessId) { + throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); + } + return new hmacKey_js_1.HmacKey(this, accessId, options); + } +} +exports.Storage = Storage; +/** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ +Storage.Bucket = bucket_js_1.Bucket; +/** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ +Storage.Channel = channel_js_1.Channel; +/** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ +Storage.File = file_js_1.File; +/** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ +Storage.HmacKey = hmacKey_js_1.HmacKey; +Storage.acl = { + OWNER_ROLE: 'OWNER', + READER_ROLE: 'READER', + WRITER_ROLE: 'WRITER', +}; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Storage, { + exclude: ['bucket', 'channel', 'hmacKey'], +}); + + +/***/ }), + +/***/ 5594: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -module.exports.mixin = function mixin(target, source) { - const keys = Object.getOwnPropertyNames(source); - for (let i = 0; i < keys.length; ++i) { - Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); - } +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; }; - -module.exports.wrapperSymbol = Symbol("wrapper"); -module.exports.implSymbol = Symbol("impl"); - -module.exports.wrapperForImpl = function (impl) { - return impl[module.exports.wrapperSymbol]; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; - -module.exports.implForWrapper = function (wrapper) { - return wrapper[module.exports.implSymbol]; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; }; - - - -/***/ }), - -/***/ 2940: -/***/ ((module) => { - -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) - - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') - - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) - - return wrapper - - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] - } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) +var _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiClientHeaders, _XMLMultiPartUploadHelper_handleErrorResponse; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TransferManager = exports.MultiPartUploadError = void 0; +const file_js_1 = __nccwpck_require__(4713); +const p_limit_1 = __importDefault(__nccwpck_require__(7684)); +const path = __importStar(__nccwpck_require__(1017)); +const fs_1 = __nccwpck_require__(7147); +const crc32c_js_1 = __nccwpck_require__(5810); +const google_auth_library_1 = __nccwpck_require__(810); +const fast_xml_parser_1 = __nccwpck_require__(2603); +const async_retry_1 = __importDefault(__nccwpck_require__(3415)); +const crypto_1 = __nccwpck_require__(6113); +const util_js_1 = __nccwpck_require__(8064); +const util_js_2 = __nccwpck_require__(9258); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * Default number of concurrently executing promises to use when calling uploadManyFiles. + * + */ +const DEFAULT_PARALLEL_UPLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadManyFiles. + * + */ +const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 5; +/** + * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling uploadFileInChunks. + * + */ +const UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * Default number of concurrently executing promises to use when calling uploadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 5; +const EMPTY_REGEX = '(?:)'; +/** + * The `gccl-gcs-cmd` value for the `X-Goog-API-Client` header. + * Example: `gccl-gcs-cmd/tm.upload_many` + * + * @see {@link GCCL_GCS_CMD}. + * @see {@link GCCL_GCS_CMD_KEY}. + */ +const GCCL_GCS_CMD_FEATURE = { + UPLOAD_MANY: 'tm.upload_many', + DOWNLOAD_MANY: 'tm.download_many', + UPLOAD_SHARDED: 'tm.upload_sharded', + DOWNLOAD_SHARDED: 'tm.download_sharded', +}; +const defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => { + return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap); +}; +class MultiPartUploadError extends Error { + constructor(message, uploadId, partsMap) { + super(message); + this.uploadId = uploadId; + this.partsMap = partsMap; } - return ret - } } - - -/***/ }), - -/***/ 4091: -/***/ ((module) => { - -"use strict"; - -module.exports = function (Yallist) { - Yallist.prototype[Symbol.iterator] = function* () { - for (let walker = this.head; walker; walker = walker.next) { - yield walker.value +exports.MultiPartUploadError = MultiPartUploadError; +/** + * Class representing an implementation of MPU in the XML API. This class is not meant for public usage. + * + * @private + * + */ +class XMLMultiPartUploadHelper { + constructor(bucket, fileName, uploadId, partsMap) { + _XMLMultiPartUploadHelper_instances.add(this); + this.authClient = bucket.storage.authClient || new google_auth_library_1.GoogleAuth(); + this.uploadId = uploadId || ''; + this.bucket = bucket; + this.fileName = fileName; + this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`; + this.xmlBuilder = new fast_xml_parser_1.XMLBuilder({ arrayNodeName: 'Part' }); + this.xmlParser = new fast_xml_parser_1.XMLParser(); + this.partsMap = partsMap || new Map(); + this.retryOptions = { + retries: this.bucket.storage.retryOptions.maxRetries, + factor: this.bucket.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000, + maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000, + }; + } + /** + * Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id. + * + * @returns {Promise} + */ + async initiateUpload(headers = {}) { + const url = `${this.baseUrl}?uploads`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this, headers), + method: 'POST', + url, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + const parsedXML = this.xmlParser.parse(res.data); + this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Uploads the provided chunk of data to the XML API using the previously created upload id. + * + * @param {number} partNumber the sequence number of this chunk. + * @param {Buffer} chunk the chunk of data to be uploaded. + * @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server + * to validate the chunk was not corrupted. + * @returns {Promise} + */ + async uploadPart(partNumber, chunk, validation) { + const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`; + let headers = __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this); + if (validation === 'md5') { + const hash = (0, crypto_1.createHash)('md5').update(chunk).digest('base64'); + headers = { + 'Content-MD5': hash, + }; + } + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'PUT', + body: chunk, + headers, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + this.partsMap.set(partNumber, res.headers['etag']); + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Sends the final request of the MPU to tell GCS the upload is now complete. + * + * @returns {Promise} + */ + async completeUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0])); + const parts = []; + for (const entry of sortedMap.entries()) { + parts.push({ PartNumber: entry[0], ETag: entry[1] }); + } + const body = `${this.xmlBuilder.build(parts)}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this), + url, + method: 'POST', + body, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } + /** + * Aborts an multipart upload that is in progress. Once aborted, any parts in the process of being uploaded fail, + * and future requests using the upload ID fail. + * + * @returns {Promise} + */ + async abortUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'DELETE', + }); + if (res.data && res.data.error) { + throw res.data.error; + } + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } +} +_XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_setGoogApiClientHeaders = function _XMLMultiPartUploadHelper_setGoogApiClientHeaders(headers = {}) { + let headerFound = false; + let userAgentFound = false; + for (const [key, value] of Object.entries(headers)) { + if (key.toLocaleLowerCase().trim() === 'x-goog-api-client') { + headerFound = true; + // Prepend command feature to value, if not already there + if (!value.includes(GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED)) { + headers[key] = `${value} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + } + else if (key.toLocaleLowerCase().trim() === 'user-agent') { + userAgentFound = true; + } + } + // If the header isn't present, add it + if (!headerFound) { + headers['x-goog-api-client'] = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + // If the User-Agent isn't present, add it + if (!userAgentFound) { + headers['User-Agent'] = (0, util_js_2.getUserAgentString)(); + } + return headers; +}, _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) { + if (this.bucket.storage.retryOptions.autoRetry && + this.bucket.storage.retryOptions.retryableErrorFn(err)) { + throw err; + } + else { + bail(err); + } +}; +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * + */ +class TransferManager { + constructor(bucket) { + this.bucket = bucket; + } + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * + */ + async uploadManyFiles(filePathsOrDirectory, options = {}) { + var _a; + if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { + options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; + } + else if (options.skipIfExists && + options.passthroughOptions === undefined) { + options.passthroughOptions = { + preconditionOpts: { + ifGenerationMatch: 0, + }, + }; + } + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); + const promises = []; + let allPaths = []; + if (!Array.isArray(filePathsOrDirectory)) { + for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { + allPaths.push(curPath); + } + } + else { + allPaths = filePathsOrDirectory; + } + for (const filePath of allPaths) { + const stat = await fs_1.promises.lstat(filePath); + if (stat.isDirectory()) { + continue; + } + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.UPLOAD_MANY, + }; + passThroughOptionsCopy.destination = filePath + .split(path.sep) + .join(path.posix.sep); + if (options.prefix) { + passThroughOptionsCopy.destination = path.posix.join(...options.prefix.split(path.sep), passThroughOptionsCopy.destination); + } + promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * + */ + async downloadManyFiles(filesOrFolder, options = {}) { + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); + const promises = []; + let files = []; + if (!Array.isArray(filesOrFolder)) { + const directoryFiles = await this.bucket.getFiles({ + prefix: filesOrFolder, + }); + files = directoryFiles[0]; + } + else { + files = filesOrFolder.map(curFile => { + if (typeof curFile === 'string') { + return this.bucket.file(curFile); + } + return curFile; + }); + } + const stripRegexString = options.stripPrefix + ? `^${options.stripPrefix}` + : EMPTY_REGEX; + const regex = new RegExp(stripRegexString, 'g'); + for (const file of files) { + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_MANY, + }; + if (options.prefix) { + passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); + } + if (options.stripPrefix) { + passThroughOptionsCopy.destination = file.name.replace(regex, ''); + } + promises.push(limit(() => file.download(passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory. + * + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {File | string} fileOrName {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * + */ + async downloadFileInChunks(fileOrName, options = {}) { + let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + let limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); + const noReturnData = Boolean(options.noReturnData); + const promises = []; + const file = typeof fileOrName === 'string' + ? this.bucket.file(fileOrName) + : fileOrName; + const fileInfo = await file.get(); + const size = parseInt(fileInfo[0].metadata.size.toString()); + // If the file size does not meet the threshold download it as a single chunk. + if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { + limit = (0, p_limit_1.default)(1); + chunkSize = size; + } + let start = 0; + const filePath = options.destination || path.basename(file.name); + const fileToWrite = await fs_1.promises.open(filePath, 'w'); + while (start < size) { + const chunkStart = start; + let chunkEnd = start + chunkSize - 1; + chunkEnd = chunkEnd > size ? size : chunkEnd; + promises.push(limit(async () => { + const resp = await file.download({ + start: chunkStart, + end: chunkEnd, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_SHARDED, + }); + const result = await fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); + if (noReturnData) + return; + return result.buffer; + })); + start += chunkSize; + } + let chunks; + try { + chunks = await Promise.all(promises); + } + finally { + await fileToWrite.close(); + } + if (options.validation === 'crc32c' && fileInfo[0].metadata.crc32c) { + const downloadedCrc32C = await crc32c_js_1.CRC32C.fromFile(filePath); + if (!downloadedCrc32C.validate(fileInfo[0].metadata.crc32c)) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + throw mismatchError; + } + } + if (noReturnData) + return; + return [Buffer.concat(chunks, size)]; + } + /** + * @typedef {object} UploadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded. + * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path. + * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified + * defaults to the specified concurrency limit. + * @property {string} [uploadId] If specified attempts to resume a previous upload. + * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk + * specified in partsMap + * @property {object} [headers] headers to be sent when initiating the multipart upload. + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload} + * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set, + * failures will be automatically aborted. + * + */ + /** + * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and + * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to + * resume the upload. + * + * @param {string} [filePath] The path of the file to be uploaded + * @param {UploadFileInChunksOptions} [options] Configuration options. + * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this. + * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.uploadFileInChunks('large-file.txt'); + * // Your bucket now contains: + * // - "large-file.txt" + * ``` + * + * + */ + async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) { + const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT); + const maxQueueSize = options.maxQueueSize || + options.concurrencyLimit || + DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT; + const fileName = options.uploadName || path.basename(filePath); + const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap); + let partNumber = 1; + let promises = []; + try { + if (options.uploadId === undefined) { + await mpuHelper.initiateUpload(options.headers); + } + const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize; + const readStream = (0, fs_1.createReadStream)(filePath, { + highWaterMark: chunkSize, + start: startOrResumptionByte, + }); + // p-limit only limits the number of running promises. We do not want to hold an entire + // large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory. + for await (const curChunk of readStream) { + if (promises.length >= maxQueueSize) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation))); + } + await Promise.all(promises); + return await mpuHelper.completeUpload(); + } + catch (e) { + if ((options.autoAbortFailure === undefined || options.autoAbortFailure) && + mpuHelper.uploadId) { + try { + await mpuHelper.abortUpload(); + return; + } + catch (e) { + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + async *getPathsFromDirectory(directory) { + const filesAndSubdirectories = await fs_1.promises.readdir(directory, { + withFileTypes: true, + }); + for (const curFileOrDirectory of filesAndSubdirectories) { + const fullPath = path.join(directory, curFileOrDirectory.name); + curFileOrDirectory.isDirectory() + ? yield* this.getPathsFromDirectory(fullPath) + : yield fullPath; + } } - } } +exports.TransferManager = TransferManager; /***/ }), -/***/ 665: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 9258: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -module.exports = Yallist - -Yallist.Node = Node -Yallist.create = Yallist - -function Yallist (list) { - var self = this - if (!(self instanceof Yallist)) { - self = new Yallist() - } - - self.tail = null - self.head = null - self.length = 0 - - if (list && typeof list.forEach === 'function') { - list.forEach(function (item) { - self.push(item) - }) - } else if (arguments.length > 0) { - for (var i = 0, l = arguments.length; i < l; i++) { - self.push(arguments[i]) - } - } - - return self -} - -Yallist.prototype.removeNode = function (node) { - if (node.list !== this) { - throw new Error('removing node which does not belong to this list') - } - - var next = node.next - var prev = node.prev - - if (next) { - next.prev = prev - } - - if (prev) { - prev.next = next - } - - if (node === this.head) { - this.head = next - } - if (node === this.tail) { - this.tail = prev - } - - node.list.length-- - node.next = null - node.prev = null - node.list = null - - return next -} - -Yallist.prototype.unshiftNode = function (node) { - if (node === this.head) { - return - } - - if (node.list) { - node.list.removeNode(node) - } - - var head = this.head - node.list = this - node.next = head - if (head) { - head.prev = node - } - - this.head = node - if (!this.tail) { - this.tail = node - } - this.length++ -} - -Yallist.prototype.pushNode = function (node) { - if (node === this.tail) { - return - } - - if (node.list) { - node.list.removeNode(node) - } - - var tail = this.tail - node.list = this - node.prev = tail - if (tail) { - tail.next = node - } - this.tail = node - if (!this.head) { - this.head = node - } - this.length++ -} - -Yallist.prototype.push = function () { - for (var i = 0, l = arguments.length; i < l; i++) { - push(this, arguments[i]) +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { + enumerable: true, + get: function () { + return m[k]; + } + }; } - return this.length + Object.defineProperty(o, k2, desc); +} : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); +var __setModuleDefault = this && this.__setModuleDefault || (Object.create ? function (o, v) { + Object.defineProperty(o, "default", { + enumerable: true, + value: v + }); +} : function (o, v) { + o["default"] = v; +}); +var __importStar = this && this.__importStar || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.PassThroughShim = exports.getModuleFormat = exports.getDirName = exports.getUserAgentString = exports.getRuntimeTrackingString = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0; +const path = __importStar(__nccwpck_require__(1017)); +const querystring = __importStar(__nccwpck_require__(3477)); +const stream_1 = __nccwpck_require__(2781); +const url = __importStar(__nccwpck_require__(7310)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(8568); +// Done to avoid a problem with mangling of identifiers when using esModuleInterop +const fileURLToPath = url.fileURLToPath; +const isEsm = false; +function normalize(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + return { + options, + callback + }; } - -Yallist.prototype.unshift = function () { - for (var i = 0, l = arguments.length; i < l; i++) { - unshift(this, arguments[i]) - } - return this.length +exports.normalize = normalize; +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +function objectEntries(obj) { + return Object.keys(obj).map(key => [key, obj[key]]); } - -Yallist.prototype.pop = function () { - if (!this.tail) { - return undefined - } - - var res = this.tail.value - this.tail = this.tail.prev - if (this.tail) { - this.tail.next = null - } else { - this.head = null - } - this.length-- - return res +exports.objectEntries = objectEntries; +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +function fixedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); } - -Yallist.prototype.shift = function () { - if (!this.head) { - return undefined - } - - var res = this.head.value - this.head = this.head.next - if (this.head) { - this.head.prev = null - } else { - this.tail = null - } - this.length-- - return res +exports.fixedEncodeURIComponent = fixedEncodeURIComponent; +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +function encodeURI(uri, encodeSlash) { + // Split the string by `/`, and conditionally rejoin them with either + // %2F if encodeSlash is `true`, or '/' if `false`. + return uri.split('/').map(fixedEncodeURIComponent).join(encodeSlash ? '%2F' : '/'); } - -Yallist.prototype.forEach = function (fn, thisp) { - thisp = thisp || this - for (var walker = this.head, i = 0; walker !== null; i++) { - fn.call(thisp, walker.value, i, this) - walker = walker.next - } +exports.encodeURI = encodeURI; +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +function qsStringify(qs) { + return querystring.stringify(qs, '&', '=', { + encodeURIComponent: component => encodeURI(component, true) + }); } - -Yallist.prototype.forEachReverse = function (fn, thisp) { - thisp = thisp || this - for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { - fn.call(thisp, walker.value, i, this) - walker = walker.prev +exports.qsStringify = qsStringify; +function objectKeyToLowercase(object) { + const newObj = {}; + for (let key of Object.keys(object)) { + const value = object[key]; + key = key.toLowerCase(); + newObj[key] = value; } + return newObj; } - -Yallist.prototype.get = function (n) { - for (var i = 0, walker = this.head; walker !== null && i < n; i++) { - // abort out of the list early if we hit a cycle - walker = walker.next - } - if (i === n && walker !== null) { - return walker.value - } +exports.objectKeyToLowercase = objectKeyToLowercase; +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +function unicodeJSONStringify(obj) { + return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, char => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); } - -Yallist.prototype.getReverse = function (n) { - for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { - // abort out of the list early if we hit a cycle - walker = walker.prev - } - if (i === n && walker !== null) { - return walker.value +exports.unicodeJSONStringify = unicodeJSONStringify; +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +function convertObjKeysToSnakeCase(obj) { + if (obj instanceof Date || obj instanceof RegExp) { + return obj; } -} - -Yallist.prototype.map = function (fn, thisp) { - thisp = thisp || this - var res = new Yallist() - for (var walker = this.head; walker !== null;) { - res.push(fn.call(thisp, walker.value, this)) - walker = walker.next + if (Array.isArray(obj)) { + return obj.map(convertObjKeysToSnakeCase); } - return res -} - -Yallist.prototype.mapReverse = function (fn, thisp) { - thisp = thisp || this - var res = new Yallist() - for (var walker = this.tail; walker !== null;) { - res.push(fn.call(thisp, walker.value, this)) - walker = walker.prev + if (obj instanceof Object) { + return Object.keys(obj).reduce((acc, cur) => { + const s = cur[0].toLocaleLowerCase() + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { + return `_${p1.toLowerCase()}`; + }); + acc[s] = convertObjKeysToSnakeCase(obj[cur]); + return acc; + }, Object()); } - return res + return obj; } - -Yallist.prototype.reduce = function (fn, initial) { - var acc - var walker = this.head - if (arguments.length > 1) { - acc = initial - } else if (this.head) { - walker = this.head.next - acc = this.head.value - } else { - throw new TypeError('Reduce of empty list with no initial value') - } - - for (var i = 0; walker !== null; i++) { - acc = fn(acc, walker.value, i) - walker = walker.next - } - - return acc +exports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase; +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { + const year = dateTimeToFormat.getUTCFullYear(); + const month = dateTimeToFormat.getUTCMonth() + 1; + const day = dateTimeToFormat.getUTCDate(); + const hour = dateTimeToFormat.getUTCHours(); + const minute = dateTimeToFormat.getUTCMinutes(); + const second = dateTimeToFormat.getUTCSeconds(); + let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month.toString().padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; + if (includeTime) { + resultString = `${resultString}T${hour.toString().padStart(2, '0')}${timeDelimiter}${minute.toString().padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; + } + return resultString; } - -Yallist.prototype.reduceReverse = function (fn, initial) { - var acc - var walker = this.tail - if (arguments.length > 1) { - acc = initial - } else if (this.tail) { - walker = this.tail.prev - acc = this.tail.value +exports.formatAsUTCISO = formatAsUTCISO; +/** + * Examines the runtime environment and returns the appropriate tracking string. + * @returns {string} metrics tracking string based on the current runtime environment. + */ +function getRuntimeTrackingString() { + if ( + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version.deno) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return `gl-deno/${globalThis.Deno.version.deno}`; } else { - throw new TypeError('Reduce of empty list with no initial value') - } - - for (var i = this.length - 1; walker !== null; i--) { - acc = fn(acc, walker.value, i) - walker = walker.prev - } - - return acc -} - -Yallist.prototype.toArray = function () { - var arr = new Array(this.length) - for (var i = 0, walker = this.head; walker !== null; i++) { - arr[i] = walker.value - walker = walker.next - } - return arr -} - -Yallist.prototype.toArrayReverse = function () { - var arr = new Array(this.length) - for (var i = 0, walker = this.tail; walker !== null; i++) { - arr[i] = walker.value - walker = walker.prev - } - return arr -} - -Yallist.prototype.slice = function (from, to) { - to = to || this.length - if (to < 0) { - to += this.length - } - from = from || 0 - if (from < 0) { - from += this.length - } - var ret = new Yallist() - if (to < from || to < 0) { - return ret - } - if (from < 0) { - from = 0 - } - if (to > this.length) { - to = this.length - } - for (var i = 0, walker = this.head; walker !== null && i < from; i++) { - walker = walker.next - } - for (; walker !== null && i < to; i++, walker = walker.next) { - ret.push(walker.value) - } - return ret -} - -Yallist.prototype.sliceReverse = function (from, to) { - to = to || this.length - if (to < 0) { - to += this.length - } - from = from || 0 - if (from < 0) { - from += this.length - } - var ret = new Yallist() - if (to < from || to < 0) { - return ret - } - if (from < 0) { - from = 0 - } - if (to > this.length) { - to = this.length - } - for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { - walker = walker.prev - } - for (; walker !== null && i > from; i--, walker = walker.prev) { - ret.push(walker.value) - } - return ret -} - -Yallist.prototype.splice = function (start, deleteCount, ...nodes) { - if (start > this.length) { - start = this.length - 1 - } - if (start < 0) { - start = this.length + start; - } - - for (var i = 0, walker = this.head; walker !== null && i < start; i++) { - walker = walker.next + return `gl-node/${process.versions.node}`; } - - var ret = [] - for (var i = 0; walker && i < deleteCount; i++) { - ret.push(walker.value) - walker = this.removeNode(walker) - } - if (walker === null) { - walker = this.tail - } - - if (walker !== this.head && walker !== this.tail) { - walker = walker.prev - } - - for (var i = 0; i < nodes.length; i++) { - walker = insert(this, walker, nodes[i]) - } - return ret; } - -Yallist.prototype.reverse = function () { - var head = this.head - var tail = this.tail - for (var walker = head; walker !== null; walker = walker.prev) { - var p = walker.prev - walker.prev = walker.next - walker.next = p - } - this.head = tail - this.tail = head - return this +exports.getRuntimeTrackingString = getRuntimeTrackingString; +/** + * Looks at package.json and creates the user-agent string to be applied to request headers. + * @returns {string} user agent string. + */ +function getUserAgentString() { + const pkg = (0, package_json_helper_cjs_1.getPackageJSON)(); + const hyphenatedPackageName = pkg.name.replace('@google-cloud', 'gcloud-node') // For legacy purposes. + .replace('/', '-'); // For UA spec-compliance purposes. + return hyphenatedPackageName + '/' + pkg.version; } - -function insert (self, node, value) { - var inserted = node === self.head ? - new Node(value, null, node, self) : - new Node(value, node, node.next, self) - - if (inserted.next === null) { - self.tail = inserted - } - if (inserted.prev === null) { - self.head = inserted +exports.getUserAgentString = getUserAgentString; +function getDirName() { + let dirToUse = ''; + try { + dirToUse = __dirname; + } catch (e) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + dirToUse = __dirname; } - - self.length++ - - return inserted + return dirToUse; } - -function push (self, item) { - self.tail = new Node(item, self.tail, null, self) - if (!self.head) { - self.head = self.tail - } - self.length++ +exports.getDirName = getDirName; +function getModuleFormat() { + return isEsm ? 'ESM' : 'CJS'; } - -function unshift (self, item) { - self.head = new Node(item, null, self.head, self) - if (!self.tail) { - self.tail = self.head +exports.getModuleFormat = getModuleFormat; +class PassThroughShim extends stream_1.PassThrough { + constructor() { + super(...arguments); + this.shouldEmitReading = true; + this.shouldEmitWriting = true; } - self.length++ -} - -function Node (value, prev, next, list) { - if (!(this instanceof Node)) { - return new Node(value, prev, next, list) + _read(size) { + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + super._read(size); } - - this.list = list - this.value = value - - if (prev) { - prev.next = this - this.prev = prev - } else { - this.prev = null + _write(chunk, encoding, callback) { + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + // Per the nodejs documention, callback must be invoked on the next tick + process.nextTick(() => { + super._write(chunk, encoding, callback); + }); } - - if (next) { - next.prev = this - this.next = next - } else { - this.next = null + _final(callback) { + // If the stream is empty (i.e. empty file) final will be invoked before _read / _write + // and we should still emit the proper events. + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + callback(null); } } - -try { - // add if support for Symbol.iterator is present - __nccwpck_require__(4091)(Yallist) -} catch (er) {} +exports.PassThroughShim = PassThroughShim; /***/ }), -/***/ 5185: -/***/ ((module) => { - -class Node { - /// value; - /// next; - - constructor(value) { - this.value = value; - - // TODO: Remove this when targeting Node.js 12. - this.next = undefined; - } -} - -class Queue { - // TODO: Use private class fields when targeting Node.js 12. - // #_head; - // #_tail; - // #_size; - - constructor() { - this.clear(); - } - - enqueue(value) { - const node = new Node(value); - - if (this._head) { - this._tail.next = node; - this._tail = node; - } else { - this._head = node; - this._tail = node; - } - - this._size++; - } - - dequeue() { - const current = this._head; - if (!current) { - return; - } - - this._head = this._head.next; - this._size--; - return current.value; - } - - clear() { - this._head = undefined; - this._tail = undefined; - this._size = 0; - } +/***/ 8568: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - get size() { - return this._size; - } +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. - * [Symbol.iterator]() { - let current = this._head; +/* eslint-disable node/no-missing-require */ - while (current) { - yield current.value; - current = current.next; - } - } +function getPackageJSON() { + return __nccwpck_require__(5458); } -module.exports = Queue; - - -/***/ }), - -/***/ 2877: -/***/ ((module) => { - -module.exports = eval("require")("encoding"); - - -/***/ }), - -/***/ 8418: -/***/ ((module) => { - -module.exports = eval("require")("request"); - - -/***/ }), - -/***/ 9491: -/***/ ((module) => { - -"use strict"; -module.exports = require("assert"); - -/***/ }), - -/***/ 4300: -/***/ ((module) => { - -"use strict"; -module.exports = require("buffer"); - -/***/ }), - -/***/ 2081: -/***/ ((module) => { - -"use strict"; -module.exports = require("child_process"); - -/***/ }), - -/***/ 6113: -/***/ ((module) => { - -"use strict"; -module.exports = require("crypto"); - -/***/ }), - -/***/ 2361: -/***/ ((module) => { - -"use strict"; -module.exports = require("events"); - -/***/ }), - -/***/ 7147: -/***/ ((module) => { - -"use strict"; -module.exports = require("fs"); - -/***/ }), - -/***/ 3685: -/***/ ((module) => { - -"use strict"; -module.exports = require("http"); - -/***/ }), - -/***/ 5687: -/***/ ((module) => { - -"use strict"; -module.exports = require("https"); - -/***/ }), - -/***/ 1808: -/***/ ((module) => { - -"use strict"; -module.exports = require("net"); - -/***/ }), - -/***/ 2037: -/***/ ((module) => { - -"use strict"; -module.exports = require("os"); - -/***/ }), - -/***/ 1017: -/***/ ((module) => { - -"use strict"; -module.exports = require("path"); - -/***/ }), - -/***/ 5477: -/***/ ((module) => { - -"use strict"; -module.exports = require("punycode"); - -/***/ }), - -/***/ 3477: -/***/ ((module) => { - -"use strict"; -module.exports = require("querystring"); - -/***/ }), - -/***/ 2781: -/***/ ((module) => { - -"use strict"; -module.exports = require("stream"); - -/***/ }), - -/***/ 4404: -/***/ ((module) => { - -"use strict"; -module.exports = require("tls"); - -/***/ }), - -/***/ 6224: -/***/ ((module) => { - -"use strict"; -module.exports = require("tty"); - -/***/ }), - -/***/ 7310: -/***/ ((module) => { - -"use strict"; -module.exports = require("url"); - -/***/ }), - -/***/ 3837: -/***/ ((module) => { +exports.getPackageJSON = getPackageJSON; -"use strict"; -module.exports = require("util"); /***/ }), -/***/ 9796: +/***/ 5458: /***/ ((module) => { "use strict"; -module.exports = require("zlib"); +module.exports = JSON.parse('{"name":"@google-cloud/storage","description":"Cloud Storage Client Library for Node.js","version":"7.7.0","license":"Apache-2.0","author":"Google Inc.","engines":{"node":">=14"},"repository":"googleapis/nodejs-storage","main":"./build/cjs/src/index.js","types":"./build/cjs/src/index.d.ts","type":"module","exports":{".":{"import":{"types":"./build/esm/src/index.d.ts","default":"./build/esm/src/index.js"},"require":{"types":"./build/cjs/src/index.d.ts","default":"./build/cjs/src/index.js"}}},"files":["build/cjs/src","build/cjs/package.json","!build/cjs/src/**/*.map","build/esm/src","!build/esm/src/**/*.map"],"keywords":["google apis client","google api client","google apis","google api","google","google cloud platform","google cloud","cloud","google storage","storage"],"scripts":{"all-test":"npm test && npm run system-test && npm run samples-test","benchwrapper":"node bin/benchwrapper.js","check":"gts check","clean":"rm -rf build/","compile:cjs":"tsc -p ./tsconfig.cjs.json","compile:esm":"tsc -p .","compile":"npm run compile:cjs && npm run compile:esm","conformance-test":"mocha --parallel build/cjs/conformance-test/ --require build/cjs/conformance-test/globalHooks.js","docs-test":"linkinator docs","docs":"jsdoc -c .jsdoc.json","fix":"gts fix","lint":"gts check","postcompile":"cp ./src/package-json-helper.cjs ./build/cjs/src && cp ./src/package-json-helper.cjs ./build/esm/src","postcompile:cjs":"babel --plugins gapic-tools/build/src/replaceImportMetaUrl,gapic-tools/build/src/toggleESMFlagVariable build/cjs/src/util.js -o build/cjs/src/util.js && cp internal-tooling/helpers/package.cjs.json build/cjs/package.json","precompile":"rm -rf build/","preconformance-test":"npm run compile:cjs -- --sourceMap","predocs-test":"npm run docs","predocs":"npm run compile:cjs -- --sourceMap","prelint":"cd samples; npm link ../; npm install","prepare":"npm run compile","presystem-test:esm":"npm run compile:esm","presystem-test":"npm run compile -- --sourceMap","pretest":"npm run compile -- --sourceMap","samples-test":"npm link && cd samples/ && npm link ../ && npm test && cd ../","system-test:esm":"mocha build/esm/system-test --timeout 600000 --exit","system-test":"mocha build/cjs/system-test --timeout 600000 --exit","test":"c8 mocha build/cjs/test"},"dependencies":{"@google-cloud/paginator":"^5.0.0","@google-cloud/projectify":"^4.0.0","@google-cloud/promisify":"^4.0.0","abort-controller":"^3.0.0","async-retry":"^1.3.3","compressible":"^2.0.12","duplexify":"^4.0.0","ent":"^2.2.0","fast-xml-parser":"^4.3.0","gaxios":"^6.0.2","google-auth-library":"^9.0.0","mime":"^3.0.0","mime-types":"^2.0.8","p-limit":"^3.0.1","retry-request":"^7.0.0","teeny-request":"^9.0.0","uuid":"^8.0.0"},"devDependencies":{"@babel/cli":"^7.22.10","@babel/core":"^7.22.11","@google-cloud/pubsub":"^4.0.0","@grpc/grpc-js":"^1.0.3","@grpc/proto-loader":"^0.7.0","@types/async-retry":"^1.4.3","@types/compressible":"^2.0.0","@types/ent":"^2.2.1","@types/mime":"^3.0.0","@types/mime-types":"^2.1.0","@types/mocha":"^9.1.1","@types/mockery":"^1.4.29","@types/node":"^20.4.4","@types/node-fetch":"^2.1.3","@types/proxyquire":"^1.3.28","@types/request":"^2.48.4","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^8.0.0","@types/yargs":"^17.0.10","c8":"^8.0.0","form-data":"^4.0.0","gapic-tools":"^0.2.0","gts":"^5.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","linkinator":"^4.0.0","mocha":"^9.2.2","mockery":"^2.1.0","nock":"~13.4.0","node-fetch":"^2.6.7","pack-n-play":"^2.0.0","proxyquire":"^2.1.3","sinon":"^17.0.0","tmp":"^0.2.0","typescript":"^5.1.6","yargs":"^17.3.1"}}'); /***/ }), -/***/ 5458: +/***/ 2077: /***/ ((module) => { "use strict"; -module.exports = JSON.parse('{"name":"@google-cloud/storage","description":"Cloud Storage Client Library for Node.js","version":"6.9.4","license":"Apache-2.0","author":"Google Inc.","engines":{"node":">=12"},"repository":"googleapis/nodejs-storage","main":"./build/src/index.js","types":"./build/src/index.d.ts","files":["build/src","!build/src/**/*.map"],"keywords":["google apis client","google api client","google apis","google api","google","google cloud platform","google cloud","cloud","google storage","storage"],"scripts":{"predocs":"npm run compile","docs":"jsdoc -c .jsdoc.js","system-test":"mocha build/system-test --timeout 600000 --exit","conformance-test":"mocha --parallel build/conformance-test/ --require build/conformance-test/globalHooks.js","preconformance-test":"npm run compile","presystem-test":"npm run compile","test":"c8 mocha build/test","pretest":"npm run compile","lint":"gts check","samples-test":"npm link && cd samples/ && npm link ../ && npm test && cd ../","all-test":"npm test && npm run system-test && npm run samples-test","check":"gts check","clean":"gts clean","compile":"tsc -p .","fix":"gts fix","prepare":"npm run compile","docs-test":"linkinator docs","predocs-test":"npm run docs","benchwrapper":"node bin/benchwrapper.js","prelint":"cd samples; npm link ../; npm install","precompile":"gts clean"},"dependencies":{"@google-cloud/paginator":"^3.0.7","@google-cloud/projectify":"^3.0.0","@google-cloud/promisify":"^3.0.0","abort-controller":"^3.0.0","async-retry":"^1.3.3","compressible":"^2.0.12","duplexify":"^4.0.0","ent":"^2.2.0","extend":"^3.0.2","gaxios":"^5.0.0","google-auth-library":"^8.0.1","mime":"^3.0.0","mime-types":"^2.0.8","p-limit":"^3.0.1","retry-request":"^5.0.0","teeny-request":"^8.0.0","uuid":"^8.0.0"},"devDependencies":{"@google-cloud/pubsub":"^3.0.0","@grpc/grpc-js":"^1.0.3","@grpc/proto-loader":"^0.7.0","@types/async-retry":"^1.4.3","@types/compressible":"^2.0.0","@types/ent":"^2.2.1","@types/extend":"^3.0.0","@types/mime":"^3.0.0","@types/mime-types":"^2.1.0","@types/mocha":"^9.1.1","@types/mockery":"^1.4.29","@types/node":"^18.0.0","@types/node-fetch":"^2.1.3","@types/proxyquire":"^1.3.28","@types/request":"^2.48.4","@types/sinon":"^10.0.0","@types/tmp":"0.2.3","@types/uuid":"^8.0.0","@types/yargs":"^17.0.10","c8":"^7.0.0","form-data":"^4.0.0","gts":"^3.1.0","jsdoc":"^4.0.0","jsdoc-fresh":"^2.0.0","jsdoc-region-tag":"^2.0.0","linkinator":"^4.0.0","mocha":"^9.2.2","mockery":"^2.1.0","nock":"~13.3.0","node-fetch":"^2.6.7","proxyquire":"^2.1.3","sinon":"^15.0.0","tmp":"^0.2.0","typescript":"^4.6.4","yargs":"^17.3.1"}}'); +module.exports = JSON.parse('{"Aacute;":"Á","Aacute":"Á","aacute;":"á","aacute":"á","Abreve;":"Ă","abreve;":"ă","ac;":"∾","acd;":"∿","acE;":"∾̳","Acirc;":"Â","Acirc":"Â","acirc;":"â","acirc":"â","acute;":"´","acute":"´","Acy;":"А","acy;":"а","AElig;":"Æ","AElig":"Æ","aelig;":"æ","aelig":"æ","af;":"⁡","Afr;":"𝔄","afr;":"𝔞","Agrave;":"À","Agrave":"À","agrave;":"à","agrave":"à","alefsym;":"ℵ","aleph;":"ℵ","Alpha;":"Α","alpha;":"α","Amacr;":"Ā","amacr;":"ā","amalg;":"⨿","AMP;":"&","AMP":"&","amp;":"&","amp":"&","And;":"⩓","and;":"∧","andand;":"⩕","andd;":"⩜","andslope;":"⩘","andv;":"⩚","ang;":"∠","ange;":"⦤","angle;":"∠","angmsd;":"∡","angmsdaa;":"⦨","angmsdab;":"⦩","angmsdac;":"⦪","angmsdad;":"⦫","angmsdae;":"⦬","angmsdaf;":"⦭","angmsdag;":"⦮","angmsdah;":"⦯","angrt;":"∟","angrtvb;":"⊾","angrtvbd;":"⦝","angsph;":"∢","angst;":"Å","angzarr;":"⍼","Aogon;":"Ą","aogon;":"ą","Aopf;":"𝔸","aopf;":"𝕒","ap;":"≈","apacir;":"⩯","apE;":"⩰","ape;":"≊","apid;":"≋","apos;":"\'","ApplyFunction;":"⁡","approx;":"≈","approxeq;":"≊","Aring;":"Å","Aring":"Å","aring;":"å","aring":"å","Ascr;":"𝒜","ascr;":"𝒶","Assign;":"≔","ast;":"*","asymp;":"≈","asympeq;":"≍","Atilde;":"Ã","Atilde":"Ã","atilde;":"ã","atilde":"ã","Auml;":"Ä","Auml":"Ä","auml;":"ä","auml":"ä","awconint;":"∳","awint;":"⨑","backcong;":"≌","backepsilon;":"϶","backprime;":"‵","backsim;":"∽","backsimeq;":"⋍","Backslash;":"∖","Barv;":"⫧","barvee;":"⊽","Barwed;":"⌆","barwed;":"⌅","barwedge;":"⌅","bbrk;":"⎵","bbrktbrk;":"⎶","bcong;":"≌","Bcy;":"Б","bcy;":"б","bdquo;":"„","becaus;":"∵","Because;":"∵","because;":"∵","bemptyv;":"⦰","bepsi;":"϶","bernou;":"ℬ","Bernoullis;":"ℬ","Beta;":"Β","beta;":"β","beth;":"ℶ","between;":"≬","Bfr;":"𝔅","bfr;":"𝔟","bigcap;":"⋂","bigcirc;":"◯","bigcup;":"⋃","bigodot;":"⨀","bigoplus;":"⨁","bigotimes;":"⨂","bigsqcup;":"⨆","bigstar;":"★","bigtriangledown;":"▽","bigtriangleup;":"△","biguplus;":"⨄","bigvee;":"⋁","bigwedge;":"⋀","bkarow;":"⤍","blacklozenge;":"⧫","blacksquare;":"▪","blacktriangle;":"▴","blacktriangledown;":"▾","blacktriangleleft;":"◂","blacktriangleright;":"▸","blank;":"␣","blk12;":"▒","blk14;":"░","blk34;":"▓","block;":"█","bne;":"=⃥","bnequiv;":"≡⃥","bNot;":"⫭","bnot;":"⌐","Bopf;":"𝔹","bopf;":"𝕓","bot;":"⊥","bottom;":"⊥","bowtie;":"⋈","boxbox;":"⧉","boxDL;":"╗","boxDl;":"╖","boxdL;":"╕","boxdl;":"┐","boxDR;":"╔","boxDr;":"╓","boxdR;":"╒","boxdr;":"┌","boxH;":"═","boxh;":"─","boxHD;":"╦","boxHd;":"╤","boxhD;":"╥","boxhd;":"┬","boxHU;":"╩","boxHu;":"╧","boxhU;":"╨","boxhu;":"┴","boxminus;":"⊟","boxplus;":"⊞","boxtimes;":"⊠","boxUL;":"╝","boxUl;":"╜","boxuL;":"╛","boxul;":"┘","boxUR;":"╚","boxUr;":"╙","boxuR;":"╘","boxur;":"└","boxV;":"║","boxv;":"│","boxVH;":"╬","boxVh;":"╫","boxvH;":"╪","boxvh;":"┼","boxVL;":"╣","boxVl;":"╢","boxvL;":"╡","boxvl;":"┤","boxVR;":"╠","boxVr;":"╟","boxvR;":"╞","boxvr;":"├","bprime;":"‵","Breve;":"˘","breve;":"˘","brvbar;":"¦","brvbar":"¦","Bscr;":"ℬ","bscr;":"𝒷","bsemi;":"⁏","bsim;":"∽","bsime;":"⋍","bsol;":"\\\\","bsolb;":"⧅","bsolhsub;":"⟈","bull;":"•","bullet;":"•","bump;":"≎","bumpE;":"⪮","bumpe;":"≏","Bumpeq;":"≎","bumpeq;":"≏","Cacute;":"Ć","cacute;":"ć","Cap;":"⋒","cap;":"∩","capand;":"⩄","capbrcup;":"⩉","capcap;":"⩋","capcup;":"⩇","capdot;":"⩀","CapitalDifferentialD;":"ⅅ","caps;":"∩︀","caret;":"⁁","caron;":"ˇ","Cayleys;":"ℭ","ccaps;":"⩍","Ccaron;":"Č","ccaron;":"č","Ccedil;":"Ç","Ccedil":"Ç","ccedil;":"ç","ccedil":"ç","Ccirc;":"Ĉ","ccirc;":"ĉ","Cconint;":"∰","ccups;":"⩌","ccupssm;":"⩐","Cdot;":"Ċ","cdot;":"ċ","cedil;":"¸","cedil":"¸","Cedilla;":"¸","cemptyv;":"⦲","cent;":"¢","cent":"¢","CenterDot;":"·","centerdot;":"·","Cfr;":"ℭ","cfr;":"𝔠","CHcy;":"Ч","chcy;":"ч","check;":"✓","checkmark;":"✓","Chi;":"Χ","chi;":"χ","cir;":"○","circ;":"ˆ","circeq;":"≗","circlearrowleft;":"↺","circlearrowright;":"↻","circledast;":"⊛","circledcirc;":"⊚","circleddash;":"⊝","CircleDot;":"⊙","circledR;":"®","circledS;":"Ⓢ","CircleMinus;":"⊖","CirclePlus;":"⊕","CircleTimes;":"⊗","cirE;":"⧃","cire;":"≗","cirfnint;":"⨐","cirmid;":"⫯","cirscir;":"⧂","ClockwiseContourIntegral;":"∲","CloseCurlyDoubleQuote;":"”","CloseCurlyQuote;":"’","clubs;":"♣","clubsuit;":"♣","Colon;":"∷","colon;":":","Colone;":"⩴","colone;":"≔","coloneq;":"≔","comma;":",","commat;":"@","comp;":"∁","compfn;":"∘","complement;":"∁","complexes;":"ℂ","cong;":"≅","congdot;":"⩭","Congruent;":"≡","Conint;":"∯","conint;":"∮","ContourIntegral;":"∮","Copf;":"ℂ","copf;":"𝕔","coprod;":"∐","Coproduct;":"∐","COPY;":"©","COPY":"©","copy;":"©","copy":"©","copysr;":"℗","CounterClockwiseContourIntegral;":"∳","crarr;":"↵","Cross;":"⨯","cross;":"✗","Cscr;":"𝒞","cscr;":"𝒸","csub;":"⫏","csube;":"⫑","csup;":"⫐","csupe;":"⫒","ctdot;":"⋯","cudarrl;":"⤸","cudarrr;":"⤵","cuepr;":"⋞","cuesc;":"⋟","cularr;":"↶","cularrp;":"⤽","Cup;":"⋓","cup;":"∪","cupbrcap;":"⩈","CupCap;":"≍","cupcap;":"⩆","cupcup;":"⩊","cupdot;":"⊍","cupor;":"⩅","cups;":"∪︀","curarr;":"↷","curarrm;":"⤼","curlyeqprec;":"⋞","curlyeqsucc;":"⋟","curlyvee;":"⋎","curlywedge;":"⋏","curren;":"¤","curren":"¤","curvearrowleft;":"↶","curvearrowright;":"↷","cuvee;":"⋎","cuwed;":"⋏","cwconint;":"∲","cwint;":"∱","cylcty;":"⌭","Dagger;":"‡","dagger;":"†","daleth;":"ℸ","Darr;":"↡","dArr;":"⇓","darr;":"↓","dash;":"‐","Dashv;":"⫤","dashv;":"⊣","dbkarow;":"⤏","dblac;":"˝","Dcaron;":"Ď","dcaron;":"ď","Dcy;":"Д","dcy;":"д","DD;":"ⅅ","dd;":"ⅆ","ddagger;":"‡","ddarr;":"⇊","DDotrahd;":"⤑","ddotseq;":"⩷","deg;":"°","deg":"°","Del;":"∇","Delta;":"Δ","delta;":"δ","demptyv;":"⦱","dfisht;":"⥿","Dfr;":"𝔇","dfr;":"𝔡","dHar;":"⥥","dharl;":"⇃","dharr;":"⇂","DiacriticalAcute;":"´","DiacriticalDot;":"˙","DiacriticalDoubleAcute;":"˝","DiacriticalGrave;":"`","DiacriticalTilde;":"˜","diam;":"⋄","Diamond;":"⋄","diamond;":"⋄","diamondsuit;":"♦","diams;":"♦","die;":"¨","DifferentialD;":"ⅆ","digamma;":"ϝ","disin;":"⋲","div;":"÷","divide;":"÷","divide":"÷","divideontimes;":"⋇","divonx;":"⋇","DJcy;":"Ђ","djcy;":"ђ","dlcorn;":"⌞","dlcrop;":"⌍","dollar;":"$","Dopf;":"𝔻","dopf;":"𝕕","Dot;":"¨","dot;":"˙","DotDot;":"⃜","doteq;":"≐","doteqdot;":"≑","DotEqual;":"≐","dotminus;":"∸","dotplus;":"∔","dotsquare;":"⊡","doublebarwedge;":"⌆","DoubleContourIntegral;":"∯","DoubleDot;":"¨","DoubleDownArrow;":"⇓","DoubleLeftArrow;":"⇐","DoubleLeftRightArrow;":"⇔","DoubleLeftTee;":"⫤","DoubleLongLeftArrow;":"⟸","DoubleLongLeftRightArrow;":"⟺","DoubleLongRightArrow;":"⟹","DoubleRightArrow;":"⇒","DoubleRightTee;":"⊨","DoubleUpArrow;":"⇑","DoubleUpDownArrow;":"⇕","DoubleVerticalBar;":"∥","DownArrow;":"↓","Downarrow;":"⇓","downarrow;":"↓","DownArrowBar;":"⤓","DownArrowUpArrow;":"⇵","DownBreve;":"̑","downdownarrows;":"⇊","downharpoonleft;":"⇃","downharpoonright;":"⇂","DownLeftRightVector;":"⥐","DownLeftTeeVector;":"⥞","DownLeftVector;":"↽","DownLeftVectorBar;":"⥖","DownRightTeeVector;":"⥟","DownRightVector;":"⇁","DownRightVectorBar;":"⥗","DownTee;":"⊤","DownTeeArrow;":"↧","drbkarow;":"⤐","drcorn;":"⌟","drcrop;":"⌌","Dscr;":"𝒟","dscr;":"𝒹","DScy;":"Ѕ","dscy;":"ѕ","dsol;":"⧶","Dstrok;":"Đ","dstrok;":"đ","dtdot;":"⋱","dtri;":"▿","dtrif;":"▾","duarr;":"⇵","duhar;":"⥯","dwangle;":"⦦","DZcy;":"Џ","dzcy;":"џ","dzigrarr;":"⟿","Eacute;":"É","Eacute":"É","eacute;":"é","eacute":"é","easter;":"⩮","Ecaron;":"Ě","ecaron;":"ě","ecir;":"≖","Ecirc;":"Ê","Ecirc":"Ê","ecirc;":"ê","ecirc":"ê","ecolon;":"≕","Ecy;":"Э","ecy;":"э","eDDot;":"⩷","Edot;":"Ė","eDot;":"≑","edot;":"ė","ee;":"ⅇ","efDot;":"≒","Efr;":"𝔈","efr;":"𝔢","eg;":"⪚","Egrave;":"È","Egrave":"È","egrave;":"è","egrave":"è","egs;":"⪖","egsdot;":"⪘","el;":"⪙","Element;":"∈","elinters;":"⏧","ell;":"ℓ","els;":"⪕","elsdot;":"⪗","Emacr;":"Ē","emacr;":"ē","empty;":"∅","emptyset;":"∅","EmptySmallSquare;":"◻","emptyv;":"∅","EmptyVerySmallSquare;":"▫","emsp;":" ","emsp13;":" ","emsp14;":" ","ENG;":"Ŋ","eng;":"ŋ","ensp;":" ","Eogon;":"Ę","eogon;":"ę","Eopf;":"𝔼","eopf;":"𝕖","epar;":"⋕","eparsl;":"⧣","eplus;":"⩱","epsi;":"ε","Epsilon;":"Ε","epsilon;":"ε","epsiv;":"ϵ","eqcirc;":"≖","eqcolon;":"≕","eqsim;":"≂","eqslantgtr;":"⪖","eqslantless;":"⪕","Equal;":"⩵","equals;":"=","EqualTilde;":"≂","equest;":"≟","Equilibrium;":"⇌","equiv;":"≡","equivDD;":"⩸","eqvparsl;":"⧥","erarr;":"⥱","erDot;":"≓","Escr;":"ℰ","escr;":"ℯ","esdot;":"≐","Esim;":"⩳","esim;":"≂","Eta;":"Η","eta;":"η","ETH;":"Ð","ETH":"Ð","eth;":"ð","eth":"ð","Euml;":"Ë","Euml":"Ë","euml;":"ë","euml":"ë","euro;":"€","excl;":"!","exist;":"∃","Exists;":"∃","expectation;":"ℰ","ExponentialE;":"ⅇ","exponentiale;":"ⅇ","fallingdotseq;":"≒","Fcy;":"Ф","fcy;":"ф","female;":"♀","ffilig;":"ffi","fflig;":"ff","ffllig;":"ffl","Ffr;":"𝔉","ffr;":"𝔣","filig;":"fi","FilledSmallSquare;":"◼","FilledVerySmallSquare;":"▪","fjlig;":"fj","flat;":"♭","fllig;":"fl","fltns;":"▱","fnof;":"ƒ","Fopf;":"𝔽","fopf;":"𝕗","ForAll;":"∀","forall;":"∀","fork;":"⋔","forkv;":"⫙","Fouriertrf;":"ℱ","fpartint;":"⨍","frac12;":"½","frac12":"½","frac13;":"⅓","frac14;":"¼","frac14":"¼","frac15;":"⅕","frac16;":"⅙","frac18;":"⅛","frac23;":"⅔","frac25;":"⅖","frac34;":"¾","frac34":"¾","frac35;":"⅗","frac38;":"⅜","frac45;":"⅘","frac56;":"⅚","frac58;":"⅝","frac78;":"⅞","frasl;":"⁄","frown;":"⌢","Fscr;":"ℱ","fscr;":"𝒻","gacute;":"ǵ","Gamma;":"Γ","gamma;":"γ","Gammad;":"Ϝ","gammad;":"ϝ","gap;":"⪆","Gbreve;":"Ğ","gbreve;":"ğ","Gcedil;":"Ģ","Gcirc;":"Ĝ","gcirc;":"ĝ","Gcy;":"Г","gcy;":"г","Gdot;":"Ġ","gdot;":"ġ","gE;":"≧","ge;":"≥","gEl;":"⪌","gel;":"⋛","geq;":"≥","geqq;":"≧","geqslant;":"⩾","ges;":"⩾","gescc;":"⪩","gesdot;":"⪀","gesdoto;":"⪂","gesdotol;":"⪄","gesl;":"⋛︀","gesles;":"⪔","Gfr;":"𝔊","gfr;":"𝔤","Gg;":"⋙","gg;":"≫","ggg;":"⋙","gimel;":"ℷ","GJcy;":"Ѓ","gjcy;":"ѓ","gl;":"≷","gla;":"⪥","glE;":"⪒","glj;":"⪤","gnap;":"⪊","gnapprox;":"⪊","gnE;":"≩","gne;":"⪈","gneq;":"⪈","gneqq;":"≩","gnsim;":"⋧","Gopf;":"𝔾","gopf;":"𝕘","grave;":"`","GreaterEqual;":"≥","GreaterEqualLess;":"⋛","GreaterFullEqual;":"≧","GreaterGreater;":"⪢","GreaterLess;":"≷","GreaterSlantEqual;":"⩾","GreaterTilde;":"≳","Gscr;":"𝒢","gscr;":"ℊ","gsim;":"≳","gsime;":"⪎","gsiml;":"⪐","GT;":">","GT":">","Gt;":"≫","gt;":">","gt":">","gtcc;":"⪧","gtcir;":"⩺","gtdot;":"⋗","gtlPar;":"⦕","gtquest;":"⩼","gtrapprox;":"⪆","gtrarr;":"⥸","gtrdot;":"⋗","gtreqless;":"⋛","gtreqqless;":"⪌","gtrless;":"≷","gtrsim;":"≳","gvertneqq;":"≩︀","gvnE;":"≩︀","Hacek;":"ˇ","hairsp;":" ","half;":"½","hamilt;":"ℋ","HARDcy;":"Ъ","hardcy;":"ъ","hArr;":"⇔","harr;":"↔","harrcir;":"⥈","harrw;":"↭","Hat;":"^","hbar;":"ℏ","Hcirc;":"Ĥ","hcirc;":"ĥ","hearts;":"♥","heartsuit;":"♥","hellip;":"…","hercon;":"⊹","Hfr;":"ℌ","hfr;":"𝔥","HilbertSpace;":"ℋ","hksearow;":"⤥","hkswarow;":"⤦","hoarr;":"⇿","homtht;":"∻","hookleftarrow;":"↩","hookrightarrow;":"↪","Hopf;":"ℍ","hopf;":"𝕙","horbar;":"―","HorizontalLine;":"─","Hscr;":"ℋ","hscr;":"𝒽","hslash;":"ℏ","Hstrok;":"Ħ","hstrok;":"ħ","HumpDownHump;":"≎","HumpEqual;":"≏","hybull;":"⁃","hyphen;":"‐","Iacute;":"Í","Iacute":"Í","iacute;":"í","iacute":"í","ic;":"⁣","Icirc;":"Î","Icirc":"Î","icirc;":"î","icirc":"î","Icy;":"И","icy;":"и","Idot;":"İ","IEcy;":"Е","iecy;":"е","iexcl;":"¡","iexcl":"¡","iff;":"⇔","Ifr;":"ℑ","ifr;":"𝔦","Igrave;":"Ì","Igrave":"Ì","igrave;":"ì","igrave":"ì","ii;":"ⅈ","iiiint;":"⨌","iiint;":"∭","iinfin;":"⧜","iiota;":"℩","IJlig;":"IJ","ijlig;":"ij","Im;":"ℑ","Imacr;":"Ī","imacr;":"ī","image;":"ℑ","ImaginaryI;":"ⅈ","imagline;":"ℐ","imagpart;":"ℑ","imath;":"ı","imof;":"⊷","imped;":"Ƶ","Implies;":"⇒","in;":"∈","incare;":"℅","infin;":"∞","infintie;":"⧝","inodot;":"ı","Int;":"∬","int;":"∫","intcal;":"⊺","integers;":"ℤ","Integral;":"∫","intercal;":"⊺","Intersection;":"⋂","intlarhk;":"⨗","intprod;":"⨼","InvisibleComma;":"⁣","InvisibleTimes;":"⁢","IOcy;":"Ё","iocy;":"ё","Iogon;":"Į","iogon;":"į","Iopf;":"𝕀","iopf;":"𝕚","Iota;":"Ι","iota;":"ι","iprod;":"⨼","iquest;":"¿","iquest":"¿","Iscr;":"ℐ","iscr;":"𝒾","isin;":"∈","isindot;":"⋵","isinE;":"⋹","isins;":"⋴","isinsv;":"⋳","isinv;":"∈","it;":"⁢","Itilde;":"Ĩ","itilde;":"ĩ","Iukcy;":"І","iukcy;":"і","Iuml;":"Ï","Iuml":"Ï","iuml;":"ï","iuml":"ï","Jcirc;":"Ĵ","jcirc;":"ĵ","Jcy;":"Й","jcy;":"й","Jfr;":"𝔍","jfr;":"𝔧","jmath;":"ȷ","Jopf;":"𝕁","jopf;":"𝕛","Jscr;":"𝒥","jscr;":"𝒿","Jsercy;":"Ј","jsercy;":"ј","Jukcy;":"Є","jukcy;":"є","Kappa;":"Κ","kappa;":"κ","kappav;":"ϰ","Kcedil;":"Ķ","kcedil;":"ķ","Kcy;":"К","kcy;":"к","Kfr;":"𝔎","kfr;":"𝔨","kgreen;":"ĸ","KHcy;":"Х","khcy;":"х","KJcy;":"Ќ","kjcy;":"ќ","Kopf;":"𝕂","kopf;":"𝕜","Kscr;":"𝒦","kscr;":"𝓀","lAarr;":"⇚","Lacute;":"Ĺ","lacute;":"ĺ","laemptyv;":"⦴","lagran;":"ℒ","Lambda;":"Λ","lambda;":"λ","Lang;":"⟪","lang;":"⟨","langd;":"⦑","langle;":"⟨","lap;":"⪅","Laplacetrf;":"ℒ","laquo;":"«","laquo":"«","Larr;":"↞","lArr;":"⇐","larr;":"←","larrb;":"⇤","larrbfs;":"⤟","larrfs;":"⤝","larrhk;":"↩","larrlp;":"↫","larrpl;":"⤹","larrsim;":"⥳","larrtl;":"↢","lat;":"⪫","lAtail;":"⤛","latail;":"⤙","late;":"⪭","lates;":"⪭︀","lBarr;":"⤎","lbarr;":"⤌","lbbrk;":"❲","lbrace;":"{","lbrack;":"[","lbrke;":"⦋","lbrksld;":"⦏","lbrkslu;":"⦍","Lcaron;":"Ľ","lcaron;":"ľ","Lcedil;":"Ļ","lcedil;":"ļ","lceil;":"⌈","lcub;":"{","Lcy;":"Л","lcy;":"л","ldca;":"⤶","ldquo;":"“","ldquor;":"„","ldrdhar;":"⥧","ldrushar;":"⥋","ldsh;":"↲","lE;":"≦","le;":"≤","LeftAngleBracket;":"⟨","LeftArrow;":"←","Leftarrow;":"⇐","leftarrow;":"←","LeftArrowBar;":"⇤","LeftArrowRightArrow;":"⇆","leftarrowtail;":"↢","LeftCeiling;":"⌈","LeftDoubleBracket;":"⟦","LeftDownTeeVector;":"⥡","LeftDownVector;":"⇃","LeftDownVectorBar;":"⥙","LeftFloor;":"⌊","leftharpoondown;":"↽","leftharpoonup;":"↼","leftleftarrows;":"⇇","LeftRightArrow;":"↔","Leftrightarrow;":"⇔","leftrightarrow;":"↔","leftrightarrows;":"⇆","leftrightharpoons;":"⇋","leftrightsquigarrow;":"↭","LeftRightVector;":"⥎","LeftTee;":"⊣","LeftTeeArrow;":"↤","LeftTeeVector;":"⥚","leftthreetimes;":"⋋","LeftTriangle;":"⊲","LeftTriangleBar;":"⧏","LeftTriangleEqual;":"⊴","LeftUpDownVector;":"⥑","LeftUpTeeVector;":"⥠","LeftUpVector;":"↿","LeftUpVectorBar;":"⥘","LeftVector;":"↼","LeftVectorBar;":"⥒","lEg;":"⪋","leg;":"⋚","leq;":"≤","leqq;":"≦","leqslant;":"⩽","les;":"⩽","lescc;":"⪨","lesdot;":"⩿","lesdoto;":"⪁","lesdotor;":"⪃","lesg;":"⋚︀","lesges;":"⪓","lessapprox;":"⪅","lessdot;":"⋖","lesseqgtr;":"⋚","lesseqqgtr;":"⪋","LessEqualGreater;":"⋚","LessFullEqual;":"≦","LessGreater;":"≶","lessgtr;":"≶","LessLess;":"⪡","lesssim;":"≲","LessSlantEqual;":"⩽","LessTilde;":"≲","lfisht;":"⥼","lfloor;":"⌊","Lfr;":"𝔏","lfr;":"𝔩","lg;":"≶","lgE;":"⪑","lHar;":"⥢","lhard;":"↽","lharu;":"↼","lharul;":"⥪","lhblk;":"▄","LJcy;":"Љ","ljcy;":"љ","Ll;":"⋘","ll;":"≪","llarr;":"⇇","llcorner;":"⌞","Lleftarrow;":"⇚","llhard;":"⥫","lltri;":"◺","Lmidot;":"Ŀ","lmidot;":"ŀ","lmoust;":"⎰","lmoustache;":"⎰","lnap;":"⪉","lnapprox;":"⪉","lnE;":"≨","lne;":"⪇","lneq;":"⪇","lneqq;":"≨","lnsim;":"⋦","loang;":"⟬","loarr;":"⇽","lobrk;":"⟦","LongLeftArrow;":"⟵","Longleftarrow;":"⟸","longleftarrow;":"⟵","LongLeftRightArrow;":"⟷","Longleftrightarrow;":"⟺","longleftrightarrow;":"⟷","longmapsto;":"⟼","LongRightArrow;":"⟶","Longrightarrow;":"⟹","longrightarrow;":"⟶","looparrowleft;":"↫","looparrowright;":"↬","lopar;":"⦅","Lopf;":"𝕃","lopf;":"𝕝","loplus;":"⨭","lotimes;":"⨴","lowast;":"∗","lowbar;":"_","LowerLeftArrow;":"↙","LowerRightArrow;":"↘","loz;":"◊","lozenge;":"◊","lozf;":"⧫","lpar;":"(","lparlt;":"⦓","lrarr;":"⇆","lrcorner;":"⌟","lrhar;":"⇋","lrhard;":"⥭","lrm;":"‎","lrtri;":"⊿","lsaquo;":"‹","Lscr;":"ℒ","lscr;":"𝓁","Lsh;":"↰","lsh;":"↰","lsim;":"≲","lsime;":"⪍","lsimg;":"⪏","lsqb;":"[","lsquo;":"‘","lsquor;":"‚","Lstrok;":"Ł","lstrok;":"ł","LT;":"<","LT":"<","Lt;":"≪","lt;":"<","lt":"<","ltcc;":"⪦","ltcir;":"⩹","ltdot;":"⋖","lthree;":"⋋","ltimes;":"⋉","ltlarr;":"⥶","ltquest;":"⩻","ltri;":"◃","ltrie;":"⊴","ltrif;":"◂","ltrPar;":"⦖","lurdshar;":"⥊","luruhar;":"⥦","lvertneqq;":"≨︀","lvnE;":"≨︀","macr;":"¯","macr":"¯","male;":"♂","malt;":"✠","maltese;":"✠","Map;":"⤅","map;":"↦","mapsto;":"↦","mapstodown;":"↧","mapstoleft;":"↤","mapstoup;":"↥","marker;":"▮","mcomma;":"⨩","Mcy;":"М","mcy;":"м","mdash;":"—","mDDot;":"∺","measuredangle;":"∡","MediumSpace;":" ","Mellintrf;":"ℳ","Mfr;":"𝔐","mfr;":"𝔪","mho;":"℧","micro;":"µ","micro":"µ","mid;":"∣","midast;":"*","midcir;":"⫰","middot;":"·","middot":"·","minus;":"−","minusb;":"⊟","minusd;":"∸","minusdu;":"⨪","MinusPlus;":"∓","mlcp;":"⫛","mldr;":"…","mnplus;":"∓","models;":"⊧","Mopf;":"𝕄","mopf;":"𝕞","mp;":"∓","Mscr;":"ℳ","mscr;":"𝓂","mstpos;":"∾","Mu;":"Μ","mu;":"μ","multimap;":"⊸","mumap;":"⊸","nabla;":"∇","Nacute;":"Ń","nacute;":"ń","nang;":"∠⃒","nap;":"≉","napE;":"⩰̸","napid;":"≋̸","napos;":"ʼn","napprox;":"≉","natur;":"♮","natural;":"♮","naturals;":"ℕ","nbsp;":" ","nbsp":" ","nbump;":"≎̸","nbumpe;":"≏̸","ncap;":"⩃","Ncaron;":"Ň","ncaron;":"ň","Ncedil;":"Ņ","ncedil;":"ņ","ncong;":"≇","ncongdot;":"⩭̸","ncup;":"⩂","Ncy;":"Н","ncy;":"н","ndash;":"–","ne;":"≠","nearhk;":"⤤","neArr;":"⇗","nearr;":"↗","nearrow;":"↗","nedot;":"≐̸","NegativeMediumSpace;":"​","NegativeThickSpace;":"​","NegativeThinSpace;":"​","NegativeVeryThinSpace;":"​","nequiv;":"≢","nesear;":"⤨","nesim;":"≂̸","NestedGreaterGreater;":"≫","NestedLessLess;":"≪","NewLine;":"\\n","nexist;":"∄","nexists;":"∄","Nfr;":"𝔑","nfr;":"𝔫","ngE;":"≧̸","nge;":"≱","ngeq;":"≱","ngeqq;":"≧̸","ngeqslant;":"⩾̸","nges;":"⩾̸","nGg;":"⋙̸","ngsim;":"≵","nGt;":"≫⃒","ngt;":"≯","ngtr;":"≯","nGtv;":"≫̸","nhArr;":"⇎","nharr;":"↮","nhpar;":"⫲","ni;":"∋","nis;":"⋼","nisd;":"⋺","niv;":"∋","NJcy;":"Њ","njcy;":"њ","nlArr;":"⇍","nlarr;":"↚","nldr;":"‥","nlE;":"≦̸","nle;":"≰","nLeftarrow;":"⇍","nleftarrow;":"↚","nLeftrightarrow;":"⇎","nleftrightarrow;":"↮","nleq;":"≰","nleqq;":"≦̸","nleqslant;":"⩽̸","nles;":"⩽̸","nless;":"≮","nLl;":"⋘̸","nlsim;":"≴","nLt;":"≪⃒","nlt;":"≮","nltri;":"⋪","nltrie;":"⋬","nLtv;":"≪̸","nmid;":"∤","NoBreak;":"⁠","NonBreakingSpace;":" ","Nopf;":"ℕ","nopf;":"𝕟","Not;":"⫬","not;":"¬","not":"¬","NotCongruent;":"≢","NotCupCap;":"≭","NotDoubleVerticalBar;":"∦","NotElement;":"∉","NotEqual;":"≠","NotEqualTilde;":"≂̸","NotExists;":"∄","NotGreater;":"≯","NotGreaterEqual;":"≱","NotGreaterFullEqual;":"≧̸","NotGreaterGreater;":"≫̸","NotGreaterLess;":"≹","NotGreaterSlantEqual;":"⩾̸","NotGreaterTilde;":"≵","NotHumpDownHump;":"≎̸","NotHumpEqual;":"≏̸","notin;":"∉","notindot;":"⋵̸","notinE;":"⋹̸","notinva;":"∉","notinvb;":"⋷","notinvc;":"⋶","NotLeftTriangle;":"⋪","NotLeftTriangleBar;":"⧏̸","NotLeftTriangleEqual;":"⋬","NotLess;":"≮","NotLessEqual;":"≰","NotLessGreater;":"≸","NotLessLess;":"≪̸","NotLessSlantEqual;":"⩽̸","NotLessTilde;":"≴","NotNestedGreaterGreater;":"⪢̸","NotNestedLessLess;":"⪡̸","notni;":"∌","notniva;":"∌","notnivb;":"⋾","notnivc;":"⋽","NotPrecedes;":"⊀","NotPrecedesEqual;":"⪯̸","NotPrecedesSlantEqual;":"⋠","NotReverseElement;":"∌","NotRightTriangle;":"⋫","NotRightTriangleBar;":"⧐̸","NotRightTriangleEqual;":"⋭","NotSquareSubset;":"⊏̸","NotSquareSubsetEqual;":"⋢","NotSquareSuperset;":"⊐̸","NotSquareSupersetEqual;":"⋣","NotSubset;":"⊂⃒","NotSubsetEqual;":"⊈","NotSucceeds;":"⊁","NotSucceedsEqual;":"⪰̸","NotSucceedsSlantEqual;":"⋡","NotSucceedsTilde;":"≿̸","NotSuperset;":"⊃⃒","NotSupersetEqual;":"⊉","NotTilde;":"≁","NotTildeEqual;":"≄","NotTildeFullEqual;":"≇","NotTildeTilde;":"≉","NotVerticalBar;":"∤","npar;":"∦","nparallel;":"∦","nparsl;":"⫽⃥","npart;":"∂̸","npolint;":"⨔","npr;":"⊀","nprcue;":"⋠","npre;":"⪯̸","nprec;":"⊀","npreceq;":"⪯̸","nrArr;":"⇏","nrarr;":"↛","nrarrc;":"⤳̸","nrarrw;":"↝̸","nRightarrow;":"⇏","nrightarrow;":"↛","nrtri;":"⋫","nrtrie;":"⋭","nsc;":"⊁","nsccue;":"⋡","nsce;":"⪰̸","Nscr;":"𝒩","nscr;":"𝓃","nshortmid;":"∤","nshortparallel;":"∦","nsim;":"≁","nsime;":"≄","nsimeq;":"≄","nsmid;":"∤","nspar;":"∦","nsqsube;":"⋢","nsqsupe;":"⋣","nsub;":"⊄","nsubE;":"⫅̸","nsube;":"⊈","nsubset;":"⊂⃒","nsubseteq;":"⊈","nsubseteqq;":"⫅̸","nsucc;":"⊁","nsucceq;":"⪰̸","nsup;":"⊅","nsupE;":"⫆̸","nsupe;":"⊉","nsupset;":"⊃⃒","nsupseteq;":"⊉","nsupseteqq;":"⫆̸","ntgl;":"≹","Ntilde;":"Ñ","Ntilde":"Ñ","ntilde;":"ñ","ntilde":"ñ","ntlg;":"≸","ntriangleleft;":"⋪","ntrianglelefteq;":"⋬","ntriangleright;":"⋫","ntrianglerighteq;":"⋭","Nu;":"Ν","nu;":"ν","num;":"#","numero;":"№","numsp;":" ","nvap;":"≍⃒","nVDash;":"⊯","nVdash;":"⊮","nvDash;":"⊭","nvdash;":"⊬","nvge;":"≥⃒","nvgt;":">⃒","nvHarr;":"⤄","nvinfin;":"⧞","nvlArr;":"⤂","nvle;":"≤⃒","nvlt;":"<⃒","nvltrie;":"⊴⃒","nvrArr;":"⤃","nvrtrie;":"⊵⃒","nvsim;":"∼⃒","nwarhk;":"⤣","nwArr;":"⇖","nwarr;":"↖","nwarrow;":"↖","nwnear;":"⤧","Oacute;":"Ó","Oacute":"Ó","oacute;":"ó","oacute":"ó","oast;":"⊛","ocir;":"⊚","Ocirc;":"Ô","Ocirc":"Ô","ocirc;":"ô","ocirc":"ô","Ocy;":"О","ocy;":"о","odash;":"⊝","Odblac;":"Ő","odblac;":"ő","odiv;":"⨸","odot;":"⊙","odsold;":"⦼","OElig;":"Œ","oelig;":"œ","ofcir;":"⦿","Ofr;":"𝔒","ofr;":"𝔬","ogon;":"˛","Ograve;":"Ò","Ograve":"Ò","ograve;":"ò","ograve":"ò","ogt;":"⧁","ohbar;":"⦵","ohm;":"Ω","oint;":"∮","olarr;":"↺","olcir;":"⦾","olcross;":"⦻","oline;":"‾","olt;":"⧀","Omacr;":"Ō","omacr;":"ō","Omega;":"Ω","omega;":"ω","Omicron;":"Ο","omicron;":"ο","omid;":"⦶","ominus;":"⊖","Oopf;":"𝕆","oopf;":"𝕠","opar;":"⦷","OpenCurlyDoubleQuote;":"“","OpenCurlyQuote;":"‘","operp;":"⦹","oplus;":"⊕","Or;":"⩔","or;":"∨","orarr;":"↻","ord;":"⩝","order;":"ℴ","orderof;":"ℴ","ordf;":"ª","ordf":"ª","ordm;":"º","ordm":"º","origof;":"⊶","oror;":"⩖","orslope;":"⩗","orv;":"⩛","oS;":"Ⓢ","Oscr;":"𝒪","oscr;":"ℴ","Oslash;":"Ø","Oslash":"Ø","oslash;":"ø","oslash":"ø","osol;":"⊘","Otilde;":"Õ","Otilde":"Õ","otilde;":"õ","otilde":"õ","Otimes;":"⨷","otimes;":"⊗","otimesas;":"⨶","Ouml;":"Ö","Ouml":"Ö","ouml;":"ö","ouml":"ö","ovbar;":"⌽","OverBar;":"‾","OverBrace;":"⏞","OverBracket;":"⎴","OverParenthesis;":"⏜","par;":"∥","para;":"¶","para":"¶","parallel;":"∥","parsim;":"⫳","parsl;":"⫽","part;":"∂","PartialD;":"∂","Pcy;":"П","pcy;":"п","percnt;":"%","period;":".","permil;":"‰","perp;":"⊥","pertenk;":"‱","Pfr;":"𝔓","pfr;":"𝔭","Phi;":"Φ","phi;":"φ","phiv;":"ϕ","phmmat;":"ℳ","phone;":"☎","Pi;":"Π","pi;":"π","pitchfork;":"⋔","piv;":"ϖ","planck;":"ℏ","planckh;":"ℎ","plankv;":"ℏ","plus;":"+","plusacir;":"⨣","plusb;":"⊞","pluscir;":"⨢","plusdo;":"∔","plusdu;":"⨥","pluse;":"⩲","PlusMinus;":"±","plusmn;":"±","plusmn":"±","plussim;":"⨦","plustwo;":"⨧","pm;":"±","Poincareplane;":"ℌ","pointint;":"⨕","Popf;":"ℙ","popf;":"𝕡","pound;":"£","pound":"£","Pr;":"⪻","pr;":"≺","prap;":"⪷","prcue;":"≼","prE;":"⪳","pre;":"⪯","prec;":"≺","precapprox;":"⪷","preccurlyeq;":"≼","Precedes;":"≺","PrecedesEqual;":"⪯","PrecedesSlantEqual;":"≼","PrecedesTilde;":"≾","preceq;":"⪯","precnapprox;":"⪹","precneqq;":"⪵","precnsim;":"⋨","precsim;":"≾","Prime;":"″","prime;":"′","primes;":"ℙ","prnap;":"⪹","prnE;":"⪵","prnsim;":"⋨","prod;":"∏","Product;":"∏","profalar;":"⌮","profline;":"⌒","profsurf;":"⌓","prop;":"∝","Proportion;":"∷","Proportional;":"∝","propto;":"∝","prsim;":"≾","prurel;":"⊰","Pscr;":"𝒫","pscr;":"𝓅","Psi;":"Ψ","psi;":"ψ","puncsp;":" ","Qfr;":"𝔔","qfr;":"𝔮","qint;":"⨌","Qopf;":"ℚ","qopf;":"𝕢","qprime;":"⁗","Qscr;":"𝒬","qscr;":"𝓆","quaternions;":"ℍ","quatint;":"⨖","quest;":"?","questeq;":"≟","QUOT;":"\\"","QUOT":"\\"","quot;":"\\"","quot":"\\"","rAarr;":"⇛","race;":"∽̱","Racute;":"Ŕ","racute;":"ŕ","radic;":"√","raemptyv;":"⦳","Rang;":"⟫","rang;":"⟩","rangd;":"⦒","range;":"⦥","rangle;":"⟩","raquo;":"»","raquo":"»","Rarr;":"↠","rArr;":"⇒","rarr;":"→","rarrap;":"⥵","rarrb;":"⇥","rarrbfs;":"⤠","rarrc;":"⤳","rarrfs;":"⤞","rarrhk;":"↪","rarrlp;":"↬","rarrpl;":"⥅","rarrsim;":"⥴","Rarrtl;":"⤖","rarrtl;":"↣","rarrw;":"↝","rAtail;":"⤜","ratail;":"⤚","ratio;":"∶","rationals;":"ℚ","RBarr;":"⤐","rBarr;":"⤏","rbarr;":"⤍","rbbrk;":"❳","rbrace;":"}","rbrack;":"]","rbrke;":"⦌","rbrksld;":"⦎","rbrkslu;":"⦐","Rcaron;":"Ř","rcaron;":"ř","Rcedil;":"Ŗ","rcedil;":"ŗ","rceil;":"⌉","rcub;":"}","Rcy;":"Р","rcy;":"р","rdca;":"⤷","rdldhar;":"⥩","rdquo;":"”","rdquor;":"”","rdsh;":"↳","Re;":"ℜ","real;":"ℜ","realine;":"ℛ","realpart;":"ℜ","reals;":"ℝ","rect;":"▭","REG;":"®","REG":"®","reg;":"®","reg":"®","ReverseElement;":"∋","ReverseEquilibrium;":"⇋","ReverseUpEquilibrium;":"⥯","rfisht;":"⥽","rfloor;":"⌋","Rfr;":"ℜ","rfr;":"𝔯","rHar;":"⥤","rhard;":"⇁","rharu;":"⇀","rharul;":"⥬","Rho;":"Ρ","rho;":"ρ","rhov;":"ϱ","RightAngleBracket;":"⟩","RightArrow;":"→","Rightarrow;":"⇒","rightarrow;":"→","RightArrowBar;":"⇥","RightArrowLeftArrow;":"⇄","rightarrowtail;":"↣","RightCeiling;":"⌉","RightDoubleBracket;":"⟧","RightDownTeeVector;":"⥝","RightDownVector;":"⇂","RightDownVectorBar;":"⥕","RightFloor;":"⌋","rightharpoondown;":"⇁","rightharpoonup;":"⇀","rightleftarrows;":"⇄","rightleftharpoons;":"⇌","rightrightarrows;":"⇉","rightsquigarrow;":"↝","RightTee;":"⊢","RightTeeArrow;":"↦","RightTeeVector;":"⥛","rightthreetimes;":"⋌","RightTriangle;":"⊳","RightTriangleBar;":"⧐","RightTriangleEqual;":"⊵","RightUpDownVector;":"⥏","RightUpTeeVector;":"⥜","RightUpVector;":"↾","RightUpVectorBar;":"⥔","RightVector;":"⇀","RightVectorBar;":"⥓","ring;":"˚","risingdotseq;":"≓","rlarr;":"⇄","rlhar;":"⇌","rlm;":"‏","rmoust;":"⎱","rmoustache;":"⎱","rnmid;":"⫮","roang;":"⟭","roarr;":"⇾","robrk;":"⟧","ropar;":"⦆","Ropf;":"ℝ","ropf;":"𝕣","roplus;":"⨮","rotimes;":"⨵","RoundImplies;":"⥰","rpar;":")","rpargt;":"⦔","rppolint;":"⨒","rrarr;":"⇉","Rrightarrow;":"⇛","rsaquo;":"›","Rscr;":"ℛ","rscr;":"𝓇","Rsh;":"↱","rsh;":"↱","rsqb;":"]","rsquo;":"’","rsquor;":"’","rthree;":"⋌","rtimes;":"⋊","rtri;":"▹","rtrie;":"⊵","rtrif;":"▸","rtriltri;":"⧎","RuleDelayed;":"⧴","ruluhar;":"⥨","rx;":"℞","Sacute;":"Ś","sacute;":"ś","sbquo;":"‚","Sc;":"⪼","sc;":"≻","scap;":"⪸","Scaron;":"Š","scaron;":"š","sccue;":"≽","scE;":"⪴","sce;":"⪰","Scedil;":"Ş","scedil;":"ş","Scirc;":"Ŝ","scirc;":"ŝ","scnap;":"⪺","scnE;":"⪶","scnsim;":"⋩","scpolint;":"⨓","scsim;":"≿","Scy;":"С","scy;":"с","sdot;":"⋅","sdotb;":"⊡","sdote;":"⩦","searhk;":"⤥","seArr;":"⇘","searr;":"↘","searrow;":"↘","sect;":"§","sect":"§","semi;":";","seswar;":"⤩","setminus;":"∖","setmn;":"∖","sext;":"✶","Sfr;":"𝔖","sfr;":"𝔰","sfrown;":"⌢","sharp;":"♯","SHCHcy;":"Щ","shchcy;":"щ","SHcy;":"Ш","shcy;":"ш","ShortDownArrow;":"↓","ShortLeftArrow;":"←","shortmid;":"∣","shortparallel;":"∥","ShortRightArrow;":"→","ShortUpArrow;":"↑","shy;":"­","shy":"­","Sigma;":"Σ","sigma;":"σ","sigmaf;":"ς","sigmav;":"ς","sim;":"∼","simdot;":"⩪","sime;":"≃","simeq;":"≃","simg;":"⪞","simgE;":"⪠","siml;":"⪝","simlE;":"⪟","simne;":"≆","simplus;":"⨤","simrarr;":"⥲","slarr;":"←","SmallCircle;":"∘","smallsetminus;":"∖","smashp;":"⨳","smeparsl;":"⧤","smid;":"∣","smile;":"⌣","smt;":"⪪","smte;":"⪬","smtes;":"⪬︀","SOFTcy;":"Ь","softcy;":"ь","sol;":"/","solb;":"⧄","solbar;":"⌿","Sopf;":"𝕊","sopf;":"𝕤","spades;":"♠","spadesuit;":"♠","spar;":"∥","sqcap;":"⊓","sqcaps;":"⊓︀","sqcup;":"⊔","sqcups;":"⊔︀","Sqrt;":"√","sqsub;":"⊏","sqsube;":"⊑","sqsubset;":"⊏","sqsubseteq;":"⊑","sqsup;":"⊐","sqsupe;":"⊒","sqsupset;":"⊐","sqsupseteq;":"⊒","squ;":"□","Square;":"□","square;":"□","SquareIntersection;":"⊓","SquareSubset;":"⊏","SquareSubsetEqual;":"⊑","SquareSuperset;":"⊐","SquareSupersetEqual;":"⊒","SquareUnion;":"⊔","squarf;":"▪","squf;":"▪","srarr;":"→","Sscr;":"𝒮","sscr;":"𝓈","ssetmn;":"∖","ssmile;":"⌣","sstarf;":"⋆","Star;":"⋆","star;":"☆","starf;":"★","straightepsilon;":"ϵ","straightphi;":"ϕ","strns;":"¯","Sub;":"⋐","sub;":"⊂","subdot;":"⪽","subE;":"⫅","sube;":"⊆","subedot;":"⫃","submult;":"⫁","subnE;":"⫋","subne;":"⊊","subplus;":"⪿","subrarr;":"⥹","Subset;":"⋐","subset;":"⊂","subseteq;":"⊆","subseteqq;":"⫅","SubsetEqual;":"⊆","subsetneq;":"⊊","subsetneqq;":"⫋","subsim;":"⫇","subsub;":"⫕","subsup;":"⫓","succ;":"≻","succapprox;":"⪸","succcurlyeq;":"≽","Succeeds;":"≻","SucceedsEqual;":"⪰","SucceedsSlantEqual;":"≽","SucceedsTilde;":"≿","succeq;":"⪰","succnapprox;":"⪺","succneqq;":"⪶","succnsim;":"⋩","succsim;":"≿","SuchThat;":"∋","Sum;":"∑","sum;":"∑","sung;":"♪","Sup;":"⋑","sup;":"⊃","sup1;":"¹","sup1":"¹","sup2;":"²","sup2":"²","sup3;":"³","sup3":"³","supdot;":"⪾","supdsub;":"⫘","supE;":"⫆","supe;":"⊇","supedot;":"⫄","Superset;":"⊃","SupersetEqual;":"⊇","suphsol;":"⟉","suphsub;":"⫗","suplarr;":"⥻","supmult;":"⫂","supnE;":"⫌","supne;":"⊋","supplus;":"⫀","Supset;":"⋑","supset;":"⊃","supseteq;":"⊇","supseteqq;":"⫆","supsetneq;":"⊋","supsetneqq;":"⫌","supsim;":"⫈","supsub;":"⫔","supsup;":"⫖","swarhk;":"⤦","swArr;":"⇙","swarr;":"↙","swarrow;":"↙","swnwar;":"⤪","szlig;":"ß","szlig":"ß","Tab;":"\\t","target;":"⌖","Tau;":"Τ","tau;":"τ","tbrk;":"⎴","Tcaron;":"Ť","tcaron;":"ť","Tcedil;":"Ţ","tcedil;":"ţ","Tcy;":"Т","tcy;":"т","tdot;":"⃛","telrec;":"⌕","Tfr;":"𝔗","tfr;":"𝔱","there4;":"∴","Therefore;":"∴","therefore;":"∴","Theta;":"Θ","theta;":"θ","thetasym;":"ϑ","thetav;":"ϑ","thickapprox;":"≈","thicksim;":"∼","ThickSpace;":"  ","thinsp;":" ","ThinSpace;":" ","thkap;":"≈","thksim;":"∼","THORN;":"Þ","THORN":"Þ","thorn;":"þ","thorn":"þ","Tilde;":"∼","tilde;":"˜","TildeEqual;":"≃","TildeFullEqual;":"≅","TildeTilde;":"≈","times;":"×","times":"×","timesb;":"⊠","timesbar;":"⨱","timesd;":"⨰","tint;":"∭","toea;":"⤨","top;":"⊤","topbot;":"⌶","topcir;":"⫱","Topf;":"𝕋","topf;":"𝕥","topfork;":"⫚","tosa;":"⤩","tprime;":"‴","TRADE;":"™","trade;":"™","triangle;":"▵","triangledown;":"▿","triangleleft;":"◃","trianglelefteq;":"⊴","triangleq;":"≜","triangleright;":"▹","trianglerighteq;":"⊵","tridot;":"◬","trie;":"≜","triminus;":"⨺","TripleDot;":"⃛","triplus;":"⨹","trisb;":"⧍","tritime;":"⨻","trpezium;":"⏢","Tscr;":"𝒯","tscr;":"𝓉","TScy;":"Ц","tscy;":"ц","TSHcy;":"Ћ","tshcy;":"ћ","Tstrok;":"Ŧ","tstrok;":"ŧ","twixt;":"≬","twoheadleftarrow;":"↞","twoheadrightarrow;":"↠","Uacute;":"Ú","Uacute":"Ú","uacute;":"ú","uacute":"ú","Uarr;":"↟","uArr;":"⇑","uarr;":"↑","Uarrocir;":"⥉","Ubrcy;":"Ў","ubrcy;":"ў","Ubreve;":"Ŭ","ubreve;":"ŭ","Ucirc;":"Û","Ucirc":"Û","ucirc;":"û","ucirc":"û","Ucy;":"У","ucy;":"у","udarr;":"⇅","Udblac;":"Ű","udblac;":"ű","udhar;":"⥮","ufisht;":"⥾","Ufr;":"𝔘","ufr;":"𝔲","Ugrave;":"Ù","Ugrave":"Ù","ugrave;":"ù","ugrave":"ù","uHar;":"⥣","uharl;":"↿","uharr;":"↾","uhblk;":"▀","ulcorn;":"⌜","ulcorner;":"⌜","ulcrop;":"⌏","ultri;":"◸","Umacr;":"Ū","umacr;":"ū","uml;":"¨","uml":"¨","UnderBar;":"_","UnderBrace;":"⏟","UnderBracket;":"⎵","UnderParenthesis;":"⏝","Union;":"⋃","UnionPlus;":"⊎","Uogon;":"Ų","uogon;":"ų","Uopf;":"𝕌","uopf;":"𝕦","UpArrow;":"↑","Uparrow;":"⇑","uparrow;":"↑","UpArrowBar;":"⤒","UpArrowDownArrow;":"⇅","UpDownArrow;":"↕","Updownarrow;":"⇕","updownarrow;":"↕","UpEquilibrium;":"⥮","upharpoonleft;":"↿","upharpoonright;":"↾","uplus;":"⊎","UpperLeftArrow;":"↖","UpperRightArrow;":"↗","Upsi;":"ϒ","upsi;":"υ","upsih;":"ϒ","Upsilon;":"Υ","upsilon;":"υ","UpTee;":"⊥","UpTeeArrow;":"↥","upuparrows;":"⇈","urcorn;":"⌝","urcorner;":"⌝","urcrop;":"⌎","Uring;":"Ů","uring;":"ů","urtri;":"◹","Uscr;":"𝒰","uscr;":"𝓊","utdot;":"⋰","Utilde;":"Ũ","utilde;":"ũ","utri;":"▵","utrif;":"▴","uuarr;":"⇈","Uuml;":"Ü","Uuml":"Ü","uuml;":"ü","uuml":"ü","uwangle;":"⦧","vangrt;":"⦜","varepsilon;":"ϵ","varkappa;":"ϰ","varnothing;":"∅","varphi;":"ϕ","varpi;":"ϖ","varpropto;":"∝","vArr;":"⇕","varr;":"↕","varrho;":"ϱ","varsigma;":"ς","varsubsetneq;":"⊊︀","varsubsetneqq;":"⫋︀","varsupsetneq;":"⊋︀","varsupsetneqq;":"⫌︀","vartheta;":"ϑ","vartriangleleft;":"⊲","vartriangleright;":"⊳","Vbar;":"⫫","vBar;":"⫨","vBarv;":"⫩","Vcy;":"В","vcy;":"в","VDash;":"⊫","Vdash;":"⊩","vDash;":"⊨","vdash;":"⊢","Vdashl;":"⫦","Vee;":"⋁","vee;":"∨","veebar;":"⊻","veeeq;":"≚","vellip;":"⋮","Verbar;":"‖","verbar;":"|","Vert;":"‖","vert;":"|","VerticalBar;":"∣","VerticalLine;":"|","VerticalSeparator;":"❘","VerticalTilde;":"≀","VeryThinSpace;":" ","Vfr;":"𝔙","vfr;":"𝔳","vltri;":"⊲","vnsub;":"⊂⃒","vnsup;":"⊃⃒","Vopf;":"𝕍","vopf;":"𝕧","vprop;":"∝","vrtri;":"⊳","Vscr;":"𝒱","vscr;":"𝓋","vsubnE;":"⫋︀","vsubne;":"⊊︀","vsupnE;":"⫌︀","vsupne;":"⊋︀","Vvdash;":"⊪","vzigzag;":"⦚","Wcirc;":"Ŵ","wcirc;":"ŵ","wedbar;":"⩟","Wedge;":"⋀","wedge;":"∧","wedgeq;":"≙","weierp;":"℘","Wfr;":"𝔚","wfr;":"𝔴","Wopf;":"𝕎","wopf;":"𝕨","wp;":"℘","wr;":"≀","wreath;":"≀","Wscr;":"𝒲","wscr;":"𝓌","xcap;":"⋂","xcirc;":"◯","xcup;":"⋃","xdtri;":"▽","Xfr;":"𝔛","xfr;":"𝔵","xhArr;":"⟺","xharr;":"⟷","Xi;":"Ξ","xi;":"ξ","xlArr;":"⟸","xlarr;":"⟵","xmap;":"⟼","xnis;":"⋻","xodot;":"⨀","Xopf;":"𝕏","xopf;":"𝕩","xoplus;":"⨁","xotime;":"⨂","xrArr;":"⟹","xrarr;":"⟶","Xscr;":"𝒳","xscr;":"𝓍","xsqcup;":"⨆","xuplus;":"⨄","xutri;":"△","xvee;":"⋁","xwedge;":"⋀","Yacute;":"Ý","Yacute":"Ý","yacute;":"ý","yacute":"ý","YAcy;":"Я","yacy;":"я","Ycirc;":"Ŷ","ycirc;":"ŷ","Ycy;":"Ы","ycy;":"ы","yen;":"¥","yen":"¥","Yfr;":"𝔜","yfr;":"𝔶","YIcy;":"Ї","yicy;":"ї","Yopf;":"𝕐","yopf;":"𝕪","Yscr;":"𝒴","yscr;":"𝓎","YUcy;":"Ю","yucy;":"ю","Yuml;":"Ÿ","yuml;":"ÿ","yuml":"ÿ","Zacute;":"Ź","zacute;":"ź","Zcaron;":"Ž","zcaron;":"ž","Zcy;":"З","zcy;":"з","Zdot;":"Ż","zdot;":"ż","zeetrf;":"ℨ","ZeroWidthSpace;":"​","Zeta;":"Ζ","zeta;":"ζ","Zfr;":"ℨ","zfr;":"𝔷","ZHcy;":"Ж","zhcy;":"ж","zigrarr;":"⇝","Zopf;":"ℤ","zopf;":"𝕫","Zscr;":"𝒵","zscr;":"𝓏","zwj;":"‍","zwnj;":"‌"}'); /***/ }), -/***/ 2077: +/***/ 3123: /***/ ((module) => { "use strict"; -module.exports = JSON.parse('{"Aacute;":"Á","Aacute":"Á","aacute;":"á","aacute":"á","Abreve;":"Ă","abreve;":"ă","ac;":"∾","acd;":"∿","acE;":"∾̳","Acirc;":"Â","Acirc":"Â","acirc;":"â","acirc":"â","acute;":"´","acute":"´","Acy;":"А","acy;":"а","AElig;":"Æ","AElig":"Æ","aelig;":"æ","aelig":"æ","af;":"⁡","Afr;":"𝔄","afr;":"𝔞","Agrave;":"À","Agrave":"À","agrave;":"à","agrave":"à","alefsym;":"ℵ","aleph;":"ℵ","Alpha;":"Α","alpha;":"α","Amacr;":"Ā","amacr;":"ā","amalg;":"⨿","AMP;":"&","AMP":"&","amp;":"&","amp":"&","And;":"⩓","and;":"∧","andand;":"⩕","andd;":"⩜","andslope;":"⩘","andv;":"⩚","ang;":"∠","ange;":"⦤","angle;":"∠","angmsd;":"∡","angmsdaa;":"⦨","angmsdab;":"⦩","angmsdac;":"⦪","angmsdad;":"⦫","angmsdae;":"⦬","angmsdaf;":"⦭","angmsdag;":"⦮","angmsdah;":"⦯","angrt;":"∟","angrtvb;":"⊾","angrtvbd;":"⦝","angsph;":"∢","angst;":"Å","angzarr;":"⍼","Aogon;":"Ą","aogon;":"ą","Aopf;":"𝔸","aopf;":"𝕒","ap;":"≈","apacir;":"⩯","apE;":"⩰","ape;":"≊","apid;":"≋","apos;":"\'","ApplyFunction;":"⁡","approx;":"≈","approxeq;":"≊","Aring;":"Å","Aring":"Å","aring;":"å","aring":"å","Ascr;":"𝒜","ascr;":"𝒶","Assign;":"≔","ast;":"*","asymp;":"≈","asympeq;":"≍","Atilde;":"Ã","Atilde":"Ã","atilde;":"ã","atilde":"ã","Auml;":"Ä","Auml":"Ä","auml;":"ä","auml":"ä","awconint;":"∳","awint;":"⨑","backcong;":"≌","backepsilon;":"϶","backprime;":"‵","backsim;":"∽","backsimeq;":"⋍","Backslash;":"∖","Barv;":"⫧","barvee;":"⊽","Barwed;":"⌆","barwed;":"⌅","barwedge;":"⌅","bbrk;":"⎵","bbrktbrk;":"⎶","bcong;":"≌","Bcy;":"Б","bcy;":"б","bdquo;":"„","becaus;":"∵","Because;":"∵","because;":"∵","bemptyv;":"⦰","bepsi;":"϶","bernou;":"ℬ","Bernoullis;":"ℬ","Beta;":"Β","beta;":"β","beth;":"ℶ","between;":"≬","Bfr;":"𝔅","bfr;":"𝔟","bigcap;":"⋂","bigcirc;":"◯","bigcup;":"⋃","bigodot;":"⨀","bigoplus;":"⨁","bigotimes;":"⨂","bigsqcup;":"⨆","bigstar;":"★","bigtriangledown;":"▽","bigtriangleup;":"△","biguplus;":"⨄","bigvee;":"⋁","bigwedge;":"⋀","bkarow;":"⤍","blacklozenge;":"⧫","blacksquare;":"▪","blacktriangle;":"▴","blacktriangledown;":"▾","blacktriangleleft;":"◂","blacktriangleright;":"▸","blank;":"␣","blk12;":"▒","blk14;":"░","blk34;":"▓","block;":"█","bne;":"=⃥","bnequiv;":"≡⃥","bNot;":"⫭","bnot;":"⌐","Bopf;":"𝔹","bopf;":"𝕓","bot;":"⊥","bottom;":"⊥","bowtie;":"⋈","boxbox;":"⧉","boxDL;":"╗","boxDl;":"╖","boxdL;":"╕","boxdl;":"┐","boxDR;":"╔","boxDr;":"╓","boxdR;":"╒","boxdr;":"┌","boxH;":"═","boxh;":"─","boxHD;":"╦","boxHd;":"╤","boxhD;":"╥","boxhd;":"┬","boxHU;":"╩","boxHu;":"╧","boxhU;":"╨","boxhu;":"┴","boxminus;":"⊟","boxplus;":"⊞","boxtimes;":"⊠","boxUL;":"╝","boxUl;":"╜","boxuL;":"╛","boxul;":"┘","boxUR;":"╚","boxUr;":"╙","boxuR;":"╘","boxur;":"└","boxV;":"║","boxv;":"│","boxVH;":"╬","boxVh;":"╫","boxvH;":"╪","boxvh;":"┼","boxVL;":"╣","boxVl;":"╢","boxvL;":"╡","boxvl;":"┤","boxVR;":"╠","boxVr;":"╟","boxvR;":"╞","boxvr;":"├","bprime;":"‵","Breve;":"˘","breve;":"˘","brvbar;":"¦","brvbar":"¦","Bscr;":"ℬ","bscr;":"𝒷","bsemi;":"⁏","bsim;":"∽","bsime;":"⋍","bsol;":"\\\\","bsolb;":"⧅","bsolhsub;":"⟈","bull;":"•","bullet;":"•","bump;":"≎","bumpE;":"⪮","bumpe;":"≏","Bumpeq;":"≎","bumpeq;":"≏","Cacute;":"Ć","cacute;":"ć","Cap;":"⋒","cap;":"∩","capand;":"⩄","capbrcup;":"⩉","capcap;":"⩋","capcup;":"⩇","capdot;":"⩀","CapitalDifferentialD;":"ⅅ","caps;":"∩︀","caret;":"⁁","caron;":"ˇ","Cayleys;":"ℭ","ccaps;":"⩍","Ccaron;":"Č","ccaron;":"č","Ccedil;":"Ç","Ccedil":"Ç","ccedil;":"ç","ccedil":"ç","Ccirc;":"Ĉ","ccirc;":"ĉ","Cconint;":"∰","ccups;":"⩌","ccupssm;":"⩐","Cdot;":"Ċ","cdot;":"ċ","cedil;":"¸","cedil":"¸","Cedilla;":"¸","cemptyv;":"⦲","cent;":"¢","cent":"¢","CenterDot;":"·","centerdot;":"·","Cfr;":"ℭ","cfr;":"𝔠","CHcy;":"Ч","chcy;":"ч","check;":"✓","checkmark;":"✓","Chi;":"Χ","chi;":"χ","cir;":"○","circ;":"ˆ","circeq;":"≗","circlearrowleft;":"↺","circlearrowright;":"↻","circledast;":"⊛","circledcirc;":"⊚","circleddash;":"⊝","CircleDot;":"⊙","circledR;":"®","circledS;":"Ⓢ","CircleMinus;":"⊖","CirclePlus;":"⊕","CircleTimes;":"⊗","cirE;":"⧃","cire;":"≗","cirfnint;":"⨐","cirmid;":"⫯","cirscir;":"⧂","ClockwiseContourIntegral;":"∲","CloseCurlyDoubleQuote;":"”","CloseCurlyQuote;":"’","clubs;":"♣","clubsuit;":"♣","Colon;":"∷","colon;":":","Colone;":"⩴","colone;":"≔","coloneq;":"≔","comma;":",","commat;":"@","comp;":"∁","compfn;":"∘","complement;":"∁","complexes;":"ℂ","cong;":"≅","congdot;":"⩭","Congruent;":"≡","Conint;":"∯","conint;":"∮","ContourIntegral;":"∮","Copf;":"ℂ","copf;":"𝕔","coprod;":"∐","Coproduct;":"∐","COPY;":"©","COPY":"©","copy;":"©","copy":"©","copysr;":"℗","CounterClockwiseContourIntegral;":"∳","crarr;":"↵","Cross;":"⨯","cross;":"✗","Cscr;":"𝒞","cscr;":"𝒸","csub;":"⫏","csube;":"⫑","csup;":"⫐","csupe;":"⫒","ctdot;":"⋯","cudarrl;":"⤸","cudarrr;":"⤵","cuepr;":"⋞","cuesc;":"⋟","cularr;":"↶","cularrp;":"⤽","Cup;":"⋓","cup;":"∪","cupbrcap;":"⩈","CupCap;":"≍","cupcap;":"⩆","cupcup;":"⩊","cupdot;":"⊍","cupor;":"⩅","cups;":"∪︀","curarr;":"↷","curarrm;":"⤼","curlyeqprec;":"⋞","curlyeqsucc;":"⋟","curlyvee;":"⋎","curlywedge;":"⋏","curren;":"¤","curren":"¤","curvearrowleft;":"↶","curvearrowright;":"↷","cuvee;":"⋎","cuwed;":"⋏","cwconint;":"∲","cwint;":"∱","cylcty;":"⌭","Dagger;":"‡","dagger;":"†","daleth;":"ℸ","Darr;":"↡","dArr;":"⇓","darr;":"↓","dash;":"‐","Dashv;":"⫤","dashv;":"⊣","dbkarow;":"⤏","dblac;":"˝","Dcaron;":"Ď","dcaron;":"ď","Dcy;":"Д","dcy;":"д","DD;":"ⅅ","dd;":"ⅆ","ddagger;":"‡","ddarr;":"⇊","DDotrahd;":"⤑","ddotseq;":"⩷","deg;":"°","deg":"°","Del;":"∇","Delta;":"Δ","delta;":"δ","demptyv;":"⦱","dfisht;":"⥿","Dfr;":"𝔇","dfr;":"𝔡","dHar;":"⥥","dharl;":"⇃","dharr;":"⇂","DiacriticalAcute;":"´","DiacriticalDot;":"˙","DiacriticalDoubleAcute;":"˝","DiacriticalGrave;":"`","DiacriticalTilde;":"˜","diam;":"⋄","Diamond;":"⋄","diamond;":"⋄","diamondsuit;":"♦","diams;":"♦","die;":"¨","DifferentialD;":"ⅆ","digamma;":"ϝ","disin;":"⋲","div;":"÷","divide;":"÷","divide":"÷","divideontimes;":"⋇","divonx;":"⋇","DJcy;":"Ђ","djcy;":"ђ","dlcorn;":"⌞","dlcrop;":"⌍","dollar;":"$","Dopf;":"𝔻","dopf;":"𝕕","Dot;":"¨","dot;":"˙","DotDot;":"⃜","doteq;":"≐","doteqdot;":"≑","DotEqual;":"≐","dotminus;":"∸","dotplus;":"∔","dotsquare;":"⊡","doublebarwedge;":"⌆","DoubleContourIntegral;":"∯","DoubleDot;":"¨","DoubleDownArrow;":"⇓","DoubleLeftArrow;":"⇐","DoubleLeftRightArrow;":"⇔","DoubleLeftTee;":"⫤","DoubleLongLeftArrow;":"⟸","DoubleLongLeftRightArrow;":"⟺","DoubleLongRightArrow;":"⟹","DoubleRightArrow;":"⇒","DoubleRightTee;":"⊨","DoubleUpArrow;":"⇑","DoubleUpDownArrow;":"⇕","DoubleVerticalBar;":"∥","DownArrow;":"↓","Downarrow;":"⇓","downarrow;":"↓","DownArrowBar;":"⤓","DownArrowUpArrow;":"⇵","DownBreve;":"̑","downdownarrows;":"⇊","downharpoonleft;":"⇃","downharpoonright;":"⇂","DownLeftRightVector;":"⥐","DownLeftTeeVector;":"⥞","DownLeftVector;":"↽","DownLeftVectorBar;":"⥖","DownRightTeeVector;":"⥟","DownRightVector;":"⇁","DownRightVectorBar;":"⥗","DownTee;":"⊤","DownTeeArrow;":"↧","drbkarow;":"⤐","drcorn;":"⌟","drcrop;":"⌌","Dscr;":"𝒟","dscr;":"𝒹","DScy;":"Ѕ","dscy;":"ѕ","dsol;":"⧶","Dstrok;":"Đ","dstrok;":"đ","dtdot;":"⋱","dtri;":"▿","dtrif;":"▾","duarr;":"⇵","duhar;":"⥯","dwangle;":"⦦","DZcy;":"Џ","dzcy;":"џ","dzigrarr;":"⟿","Eacute;":"É","Eacute":"É","eacute;":"é","eacute":"é","easter;":"⩮","Ecaron;":"Ě","ecaron;":"ě","ecir;":"≖","Ecirc;":"Ê","Ecirc":"Ê","ecirc;":"ê","ecirc":"ê","ecolon;":"≕","Ecy;":"Э","ecy;":"э","eDDot;":"⩷","Edot;":"Ė","eDot;":"≑","edot;":"ė","ee;":"ⅇ","efDot;":"≒","Efr;":"𝔈","efr;":"𝔢","eg;":"⪚","Egrave;":"È","Egrave":"È","egrave;":"è","egrave":"è","egs;":"⪖","egsdot;":"⪘","el;":"⪙","Element;":"∈","elinters;":"⏧","ell;":"ℓ","els;":"⪕","elsdot;":"⪗","Emacr;":"Ē","emacr;":"ē","empty;":"∅","emptyset;":"∅","EmptySmallSquare;":"◻","emptyv;":"∅","EmptyVerySmallSquare;":"▫","emsp;":" ","emsp13;":" ","emsp14;":" ","ENG;":"Ŋ","eng;":"ŋ","ensp;":" ","Eogon;":"Ę","eogon;":"ę","Eopf;":"𝔼","eopf;":"𝕖","epar;":"⋕","eparsl;":"⧣","eplus;":"⩱","epsi;":"ε","Epsilon;":"Ε","epsilon;":"ε","epsiv;":"ϵ","eqcirc;":"≖","eqcolon;":"≕","eqsim;":"≂","eqslantgtr;":"⪖","eqslantless;":"⪕","Equal;":"⩵","equals;":"=","EqualTilde;":"≂","equest;":"≟","Equilibrium;":"⇌","equiv;":"≡","equivDD;":"⩸","eqvparsl;":"⧥","erarr;":"⥱","erDot;":"≓","Escr;":"ℰ","escr;":"ℯ","esdot;":"≐","Esim;":"⩳","esim;":"≂","Eta;":"Η","eta;":"η","ETH;":"Ð","ETH":"Ð","eth;":"ð","eth":"ð","Euml;":"Ë","Euml":"Ë","euml;":"ë","euml":"ë","euro;":"€","excl;":"!","exist;":"∃","Exists;":"∃","expectation;":"ℰ","ExponentialE;":"ⅇ","exponentiale;":"ⅇ","fallingdotseq;":"≒","Fcy;":"Ф","fcy;":"ф","female;":"♀","ffilig;":"ffi","fflig;":"ff","ffllig;":"ffl","Ffr;":"𝔉","ffr;":"𝔣","filig;":"fi","FilledSmallSquare;":"◼","FilledVerySmallSquare;":"▪","fjlig;":"fj","flat;":"♭","fllig;":"fl","fltns;":"▱","fnof;":"ƒ","Fopf;":"𝔽","fopf;":"𝕗","ForAll;":"∀","forall;":"∀","fork;":"⋔","forkv;":"⫙","Fouriertrf;":"ℱ","fpartint;":"⨍","frac12;":"½","frac12":"½","frac13;":"⅓","frac14;":"¼","frac14":"¼","frac15;":"⅕","frac16;":"⅙","frac18;":"⅛","frac23;":"⅔","frac25;":"⅖","frac34;":"¾","frac34":"¾","frac35;":"⅗","frac38;":"⅜","frac45;":"⅘","frac56;":"⅚","frac58;":"⅝","frac78;":"⅞","frasl;":"⁄","frown;":"⌢","Fscr;":"ℱ","fscr;":"𝒻","gacute;":"ǵ","Gamma;":"Γ","gamma;":"γ","Gammad;":"Ϝ","gammad;":"ϝ","gap;":"⪆","Gbreve;":"Ğ","gbreve;":"ğ","Gcedil;":"Ģ","Gcirc;":"Ĝ","gcirc;":"ĝ","Gcy;":"Г","gcy;":"г","Gdot;":"Ġ","gdot;":"ġ","gE;":"≧","ge;":"≥","gEl;":"⪌","gel;":"⋛","geq;":"≥","geqq;":"≧","geqslant;":"⩾","ges;":"⩾","gescc;":"⪩","gesdot;":"⪀","gesdoto;":"⪂","gesdotol;":"⪄","gesl;":"⋛︀","gesles;":"⪔","Gfr;":"𝔊","gfr;":"𝔤","Gg;":"⋙","gg;":"≫","ggg;":"⋙","gimel;":"ℷ","GJcy;":"Ѓ","gjcy;":"ѓ","gl;":"≷","gla;":"⪥","glE;":"⪒","glj;":"⪤","gnap;":"⪊","gnapprox;":"⪊","gnE;":"≩","gne;":"⪈","gneq;":"⪈","gneqq;":"≩","gnsim;":"⋧","Gopf;":"𝔾","gopf;":"𝕘","grave;":"`","GreaterEqual;":"≥","GreaterEqualLess;":"⋛","GreaterFullEqual;":"≧","GreaterGreater;":"⪢","GreaterLess;":"≷","GreaterSlantEqual;":"⩾","GreaterTilde;":"≳","Gscr;":"𝒢","gscr;":"ℊ","gsim;":"≳","gsime;":"⪎","gsiml;":"⪐","GT;":">","GT":">","Gt;":"≫","gt;":">","gt":">","gtcc;":"⪧","gtcir;":"⩺","gtdot;":"⋗","gtlPar;":"⦕","gtquest;":"⩼","gtrapprox;":"⪆","gtrarr;":"⥸","gtrdot;":"⋗","gtreqless;":"⋛","gtreqqless;":"⪌","gtrless;":"≷","gtrsim;":"≳","gvertneqq;":"≩︀","gvnE;":"≩︀","Hacek;":"ˇ","hairsp;":" ","half;":"½","hamilt;":"ℋ","HARDcy;":"Ъ","hardcy;":"ъ","hArr;":"⇔","harr;":"↔","harrcir;":"⥈","harrw;":"↭","Hat;":"^","hbar;":"ℏ","Hcirc;":"Ĥ","hcirc;":"ĥ","hearts;":"♥","heartsuit;":"♥","hellip;":"…","hercon;":"⊹","Hfr;":"ℌ","hfr;":"𝔥","HilbertSpace;":"ℋ","hksearow;":"⤥","hkswarow;":"⤦","hoarr;":"⇿","homtht;":"∻","hookleftarrow;":"↩","hookrightarrow;":"↪","Hopf;":"ℍ","hopf;":"𝕙","horbar;":"―","HorizontalLine;":"─","Hscr;":"ℋ","hscr;":"𝒽","hslash;":"ℏ","Hstrok;":"Ħ","hstrok;":"ħ","HumpDownHump;":"≎","HumpEqual;":"≏","hybull;":"⁃","hyphen;":"‐","Iacute;":"Í","Iacute":"Í","iacute;":"í","iacute":"í","ic;":"⁣","Icirc;":"Î","Icirc":"Î","icirc;":"î","icirc":"î","Icy;":"И","icy;":"и","Idot;":"İ","IEcy;":"Е","iecy;":"е","iexcl;":"¡","iexcl":"¡","iff;":"⇔","Ifr;":"ℑ","ifr;":"𝔦","Igrave;":"Ì","Igrave":"Ì","igrave;":"ì","igrave":"ì","ii;":"ⅈ","iiiint;":"⨌","iiint;":"∭","iinfin;":"⧜","iiota;":"℩","IJlig;":"IJ","ijlig;":"ij","Im;":"ℑ","Imacr;":"Ī","imacr;":"ī","image;":"ℑ","ImaginaryI;":"ⅈ","imagline;":"ℐ","imagpart;":"ℑ","imath;":"ı","imof;":"⊷","imped;":"Ƶ","Implies;":"⇒","in;":"∈","incare;":"℅","infin;":"∞","infintie;":"⧝","inodot;":"ı","Int;":"∬","int;":"∫","intcal;":"⊺","integers;":"ℤ","Integral;":"∫","intercal;":"⊺","Intersection;":"⋂","intlarhk;":"⨗","intprod;":"⨼","InvisibleComma;":"⁣","InvisibleTimes;":"⁢","IOcy;":"Ё","iocy;":"ё","Iogon;":"Į","iogon;":"į","Iopf;":"𝕀","iopf;":"𝕚","Iota;":"Ι","iota;":"ι","iprod;":"⨼","iquest;":"¿","iquest":"¿","Iscr;":"ℐ","iscr;":"𝒾","isin;":"∈","isindot;":"⋵","isinE;":"⋹","isins;":"⋴","isinsv;":"⋳","isinv;":"∈","it;":"⁢","Itilde;":"Ĩ","itilde;":"ĩ","Iukcy;":"І","iukcy;":"і","Iuml;":"Ï","Iuml":"Ï","iuml;":"ï","iuml":"ï","Jcirc;":"Ĵ","jcirc;":"ĵ","Jcy;":"Й","jcy;":"й","Jfr;":"𝔍","jfr;":"𝔧","jmath;":"ȷ","Jopf;":"𝕁","jopf;":"𝕛","Jscr;":"𝒥","jscr;":"𝒿","Jsercy;":"Ј","jsercy;":"ј","Jukcy;":"Є","jukcy;":"є","Kappa;":"Κ","kappa;":"κ","kappav;":"ϰ","Kcedil;":"Ķ","kcedil;":"ķ","Kcy;":"К","kcy;":"к","Kfr;":"𝔎","kfr;":"𝔨","kgreen;":"ĸ","KHcy;":"Х","khcy;":"х","KJcy;":"Ќ","kjcy;":"ќ","Kopf;":"𝕂","kopf;":"𝕜","Kscr;":"𝒦","kscr;":"𝓀","lAarr;":"⇚","Lacute;":"Ĺ","lacute;":"ĺ","laemptyv;":"⦴","lagran;":"ℒ","Lambda;":"Λ","lambda;":"λ","Lang;":"⟪","lang;":"⟨","langd;":"⦑","langle;":"⟨","lap;":"⪅","Laplacetrf;":"ℒ","laquo;":"«","laquo":"«","Larr;":"↞","lArr;":"⇐","larr;":"←","larrb;":"⇤","larrbfs;":"⤟","larrfs;":"⤝","larrhk;":"↩","larrlp;":"↫","larrpl;":"⤹","larrsim;":"⥳","larrtl;":"↢","lat;":"⪫","lAtail;":"⤛","latail;":"⤙","late;":"⪭","lates;":"⪭︀","lBarr;":"⤎","lbarr;":"⤌","lbbrk;":"❲","lbrace;":"{","lbrack;":"[","lbrke;":"⦋","lbrksld;":"⦏","lbrkslu;":"⦍","Lcaron;":"Ľ","lcaron;":"ľ","Lcedil;":"Ļ","lcedil;":"ļ","lceil;":"⌈","lcub;":"{","Lcy;":"Л","lcy;":"л","ldca;":"⤶","ldquo;":"“","ldquor;":"„","ldrdhar;":"⥧","ldrushar;":"⥋","ldsh;":"↲","lE;":"≦","le;":"≤","LeftAngleBracket;":"⟨","LeftArrow;":"←","Leftarrow;":"⇐","leftarrow;":"←","LeftArrowBar;":"⇤","LeftArrowRightArrow;":"⇆","leftarrowtail;":"↢","LeftCeiling;":"⌈","LeftDoubleBracket;":"⟦","LeftDownTeeVector;":"⥡","LeftDownVector;":"⇃","LeftDownVectorBar;":"⥙","LeftFloor;":"⌊","leftharpoondown;":"↽","leftharpoonup;":"↼","leftleftarrows;":"⇇","LeftRightArrow;":"↔","Leftrightarrow;":"⇔","leftrightarrow;":"↔","leftrightarrows;":"⇆","leftrightharpoons;":"⇋","leftrightsquigarrow;":"↭","LeftRightVector;":"⥎","LeftTee;":"⊣","LeftTeeArrow;":"↤","LeftTeeVector;":"⥚","leftthreetimes;":"⋋","LeftTriangle;":"⊲","LeftTriangleBar;":"⧏","LeftTriangleEqual;":"⊴","LeftUpDownVector;":"⥑","LeftUpTeeVector;":"⥠","LeftUpVector;":"↿","LeftUpVectorBar;":"⥘","LeftVector;":"↼","LeftVectorBar;":"⥒","lEg;":"⪋","leg;":"⋚","leq;":"≤","leqq;":"≦","leqslant;":"⩽","les;":"⩽","lescc;":"⪨","lesdot;":"⩿","lesdoto;":"⪁","lesdotor;":"⪃","lesg;":"⋚︀","lesges;":"⪓","lessapprox;":"⪅","lessdot;":"⋖","lesseqgtr;":"⋚","lesseqqgtr;":"⪋","LessEqualGreater;":"⋚","LessFullEqual;":"≦","LessGreater;":"≶","lessgtr;":"≶","LessLess;":"⪡","lesssim;":"≲","LessSlantEqual;":"⩽","LessTilde;":"≲","lfisht;":"⥼","lfloor;":"⌊","Lfr;":"𝔏","lfr;":"𝔩","lg;":"≶","lgE;":"⪑","lHar;":"⥢","lhard;":"↽","lharu;":"↼","lharul;":"⥪","lhblk;":"▄","LJcy;":"Љ","ljcy;":"љ","Ll;":"⋘","ll;":"≪","llarr;":"⇇","llcorner;":"⌞","Lleftarrow;":"⇚","llhard;":"⥫","lltri;":"◺","Lmidot;":"Ŀ","lmidot;":"ŀ","lmoust;":"⎰","lmoustache;":"⎰","lnap;":"⪉","lnapprox;":"⪉","lnE;":"≨","lne;":"⪇","lneq;":"⪇","lneqq;":"≨","lnsim;":"⋦","loang;":"⟬","loarr;":"⇽","lobrk;":"⟦","LongLeftArrow;":"⟵","Longleftarrow;":"⟸","longleftarrow;":"⟵","LongLeftRightArrow;":"⟷","Longleftrightarrow;":"⟺","longleftrightarrow;":"⟷","longmapsto;":"⟼","LongRightArrow;":"⟶","Longrightarrow;":"⟹","longrightarrow;":"⟶","looparrowleft;":"↫","looparrowright;":"↬","lopar;":"⦅","Lopf;":"𝕃","lopf;":"𝕝","loplus;":"⨭","lotimes;":"⨴","lowast;":"∗","lowbar;":"_","LowerLeftArrow;":"↙","LowerRightArrow;":"↘","loz;":"◊","lozenge;":"◊","lozf;":"⧫","lpar;":"(","lparlt;":"⦓","lrarr;":"⇆","lrcorner;":"⌟","lrhar;":"⇋","lrhard;":"⥭","lrm;":"‎","lrtri;":"⊿","lsaquo;":"‹","Lscr;":"ℒ","lscr;":"𝓁","Lsh;":"↰","lsh;":"↰","lsim;":"≲","lsime;":"⪍","lsimg;":"⪏","lsqb;":"[","lsquo;":"‘","lsquor;":"‚","Lstrok;":"Ł","lstrok;":"ł","LT;":"<","LT":"<","Lt;":"≪","lt;":"<","lt":"<","ltcc;":"⪦","ltcir;":"⩹","ltdot;":"⋖","lthree;":"⋋","ltimes;":"⋉","ltlarr;":"⥶","ltquest;":"⩻","ltri;":"◃","ltrie;":"⊴","ltrif;":"◂","ltrPar;":"⦖","lurdshar;":"⥊","luruhar;":"⥦","lvertneqq;":"≨︀","lvnE;":"≨︀","macr;":"¯","macr":"¯","male;":"♂","malt;":"✠","maltese;":"✠","Map;":"⤅","map;":"↦","mapsto;":"↦","mapstodown;":"↧","mapstoleft;":"↤","mapstoup;":"↥","marker;":"▮","mcomma;":"⨩","Mcy;":"М","mcy;":"м","mdash;":"—","mDDot;":"∺","measuredangle;":"∡","MediumSpace;":" ","Mellintrf;":"ℳ","Mfr;":"𝔐","mfr;":"𝔪","mho;":"℧","micro;":"µ","micro":"µ","mid;":"∣","midast;":"*","midcir;":"⫰","middot;":"·","middot":"·","minus;":"−","minusb;":"⊟","minusd;":"∸","minusdu;":"⨪","MinusPlus;":"∓","mlcp;":"⫛","mldr;":"…","mnplus;":"∓","models;":"⊧","Mopf;":"𝕄","mopf;":"𝕞","mp;":"∓","Mscr;":"ℳ","mscr;":"𝓂","mstpos;":"∾","Mu;":"Μ","mu;":"μ","multimap;":"⊸","mumap;":"⊸","nabla;":"∇","Nacute;":"Ń","nacute;":"ń","nang;":"∠⃒","nap;":"≉","napE;":"⩰̸","napid;":"≋̸","napos;":"ʼn","napprox;":"≉","natur;":"♮","natural;":"♮","naturals;":"ℕ","nbsp;":" ","nbsp":" ","nbump;":"≎̸","nbumpe;":"≏̸","ncap;":"⩃","Ncaron;":"Ň","ncaron;":"ň","Ncedil;":"Ņ","ncedil;":"ņ","ncong;":"≇","ncongdot;":"⩭̸","ncup;":"⩂","Ncy;":"Н","ncy;":"н","ndash;":"–","ne;":"≠","nearhk;":"⤤","neArr;":"⇗","nearr;":"↗","nearrow;":"↗","nedot;":"≐̸","NegativeMediumSpace;":"​","NegativeThickSpace;":"​","NegativeThinSpace;":"​","NegativeVeryThinSpace;":"​","nequiv;":"≢","nesear;":"⤨","nesim;":"≂̸","NestedGreaterGreater;":"≫","NestedLessLess;":"≪","NewLine;":"\\n","nexist;":"∄","nexists;":"∄","Nfr;":"𝔑","nfr;":"𝔫","ngE;":"≧̸","nge;":"≱","ngeq;":"≱","ngeqq;":"≧̸","ngeqslant;":"⩾̸","nges;":"⩾̸","nGg;":"⋙̸","ngsim;":"≵","nGt;":"≫⃒","ngt;":"≯","ngtr;":"≯","nGtv;":"≫̸","nhArr;":"⇎","nharr;":"↮","nhpar;":"⫲","ni;":"∋","nis;":"⋼","nisd;":"⋺","niv;":"∋","NJcy;":"Њ","njcy;":"њ","nlArr;":"⇍","nlarr;":"↚","nldr;":"‥","nlE;":"≦̸","nle;":"≰","nLeftarrow;":"⇍","nleftarrow;":"↚","nLeftrightarrow;":"⇎","nleftrightarrow;":"↮","nleq;":"≰","nleqq;":"≦̸","nleqslant;":"⩽̸","nles;":"⩽̸","nless;":"≮","nLl;":"⋘̸","nlsim;":"≴","nLt;":"≪⃒","nlt;":"≮","nltri;":"⋪","nltrie;":"⋬","nLtv;":"≪̸","nmid;":"∤","NoBreak;":"⁠","NonBreakingSpace;":" ","Nopf;":"ℕ","nopf;":"𝕟","Not;":"⫬","not;":"¬","not":"¬","NotCongruent;":"≢","NotCupCap;":"≭","NotDoubleVerticalBar;":"∦","NotElement;":"∉","NotEqual;":"≠","NotEqualTilde;":"≂̸","NotExists;":"∄","NotGreater;":"≯","NotGreaterEqual;":"≱","NotGreaterFullEqual;":"≧̸","NotGreaterGreater;":"≫̸","NotGreaterLess;":"≹","NotGreaterSlantEqual;":"⩾̸","NotGreaterTilde;":"≵","NotHumpDownHump;":"≎̸","NotHumpEqual;":"≏̸","notin;":"∉","notindot;":"⋵̸","notinE;":"⋹̸","notinva;":"∉","notinvb;":"⋷","notinvc;":"⋶","NotLeftTriangle;":"⋪","NotLeftTriangleBar;":"⧏̸","NotLeftTriangleEqual;":"⋬","NotLess;":"≮","NotLessEqual;":"≰","NotLessGreater;":"≸","NotLessLess;":"≪̸","NotLessSlantEqual;":"⩽̸","NotLessTilde;":"≴","NotNestedGreaterGreater;":"⪢̸","NotNestedLessLess;":"⪡̸","notni;":"∌","notniva;":"∌","notnivb;":"⋾","notnivc;":"⋽","NotPrecedes;":"⊀","NotPrecedesEqual;":"⪯̸","NotPrecedesSlantEqual;":"⋠","NotReverseElement;":"∌","NotRightTriangle;":"⋫","NotRightTriangleBar;":"⧐̸","NotRightTriangleEqual;":"⋭","NotSquareSubset;":"⊏̸","NotSquareSubsetEqual;":"⋢","NotSquareSuperset;":"⊐̸","NotSquareSupersetEqual;":"⋣","NotSubset;":"⊂⃒","NotSubsetEqual;":"⊈","NotSucceeds;":"⊁","NotSucceedsEqual;":"⪰̸","NotSucceedsSlantEqual;":"⋡","NotSucceedsTilde;":"≿̸","NotSuperset;":"⊃⃒","NotSupersetEqual;":"⊉","NotTilde;":"≁","NotTildeEqual;":"≄","NotTildeFullEqual;":"≇","NotTildeTilde;":"≉","NotVerticalBar;":"∤","npar;":"∦","nparallel;":"∦","nparsl;":"⫽⃥","npart;":"∂̸","npolint;":"⨔","npr;":"⊀","nprcue;":"⋠","npre;":"⪯̸","nprec;":"⊀","npreceq;":"⪯̸","nrArr;":"⇏","nrarr;":"↛","nrarrc;":"⤳̸","nrarrw;":"↝̸","nRightarrow;":"⇏","nrightarrow;":"↛","nrtri;":"⋫","nrtrie;":"⋭","nsc;":"⊁","nsccue;":"⋡","nsce;":"⪰̸","Nscr;":"𝒩","nscr;":"𝓃","nshortmid;":"∤","nshortparallel;":"∦","nsim;":"≁","nsime;":"≄","nsimeq;":"≄","nsmid;":"∤","nspar;":"∦","nsqsube;":"⋢","nsqsupe;":"⋣","nsub;":"⊄","nsubE;":"⫅̸","nsube;":"⊈","nsubset;":"⊂⃒","nsubseteq;":"⊈","nsubseteqq;":"⫅̸","nsucc;":"⊁","nsucceq;":"⪰̸","nsup;":"⊅","nsupE;":"⫆̸","nsupe;":"⊉","nsupset;":"⊃⃒","nsupseteq;":"⊉","nsupseteqq;":"⫆̸","ntgl;":"≹","Ntilde;":"Ñ","Ntilde":"Ñ","ntilde;":"ñ","ntilde":"ñ","ntlg;":"≸","ntriangleleft;":"⋪","ntrianglelefteq;":"⋬","ntriangleright;":"⋫","ntrianglerighteq;":"⋭","Nu;":"Ν","nu;":"ν","num;":"#","numero;":"№","numsp;":" ","nvap;":"≍⃒","nVDash;":"⊯","nVdash;":"⊮","nvDash;":"⊭","nvdash;":"⊬","nvge;":"≥⃒","nvgt;":">⃒","nvHarr;":"⤄","nvinfin;":"⧞","nvlArr;":"⤂","nvle;":"≤⃒","nvlt;":"<⃒","nvltrie;":"⊴⃒","nvrArr;":"⤃","nvrtrie;":"⊵⃒","nvsim;":"∼⃒","nwarhk;":"⤣","nwArr;":"⇖","nwarr;":"↖","nwarrow;":"↖","nwnear;":"⤧","Oacute;":"Ó","Oacute":"Ó","oacute;":"ó","oacute":"ó","oast;":"⊛","ocir;":"⊚","Ocirc;":"Ô","Ocirc":"Ô","ocirc;":"ô","ocirc":"ô","Ocy;":"О","ocy;":"о","odash;":"⊝","Odblac;":"Ő","odblac;":"ő","odiv;":"⨸","odot;":"⊙","odsold;":"⦼","OElig;":"Œ","oelig;":"œ","ofcir;":"⦿","Ofr;":"𝔒","ofr;":"𝔬","ogon;":"˛","Ograve;":"Ò","Ograve":"Ò","ograve;":"ò","ograve":"ò","ogt;":"⧁","ohbar;":"⦵","ohm;":"Ω","oint;":"∮","olarr;":"↺","olcir;":"⦾","olcross;":"⦻","oline;":"‾","olt;":"⧀","Omacr;":"Ō","omacr;":"ō","Omega;":"Ω","omega;":"ω","Omicron;":"Ο","omicron;":"ο","omid;":"⦶","ominus;":"⊖","Oopf;":"𝕆","oopf;":"𝕠","opar;":"⦷","OpenCurlyDoubleQuote;":"“","OpenCurlyQuote;":"‘","operp;":"⦹","oplus;":"⊕","Or;":"⩔","or;":"∨","orarr;":"↻","ord;":"⩝","order;":"ℴ","orderof;":"ℴ","ordf;":"ª","ordf":"ª","ordm;":"º","ordm":"º","origof;":"⊶","oror;":"⩖","orslope;":"⩗","orv;":"⩛","oS;":"Ⓢ","Oscr;":"𝒪","oscr;":"ℴ","Oslash;":"Ø","Oslash":"Ø","oslash;":"ø","oslash":"ø","osol;":"⊘","Otilde;":"Õ","Otilde":"Õ","otilde;":"õ","otilde":"õ","Otimes;":"⨷","otimes;":"⊗","otimesas;":"⨶","Ouml;":"Ö","Ouml":"Ö","ouml;":"ö","ouml":"ö","ovbar;":"⌽","OverBar;":"‾","OverBrace;":"⏞","OverBracket;":"⎴","OverParenthesis;":"⏜","par;":"∥","para;":"¶","para":"¶","parallel;":"∥","parsim;":"⫳","parsl;":"⫽","part;":"∂","PartialD;":"∂","Pcy;":"П","pcy;":"п","percnt;":"%","period;":".","permil;":"‰","perp;":"⊥","pertenk;":"‱","Pfr;":"𝔓","pfr;":"𝔭","Phi;":"Φ","phi;":"φ","phiv;":"ϕ","phmmat;":"ℳ","phone;":"☎","Pi;":"Π","pi;":"π","pitchfork;":"⋔","piv;":"ϖ","planck;":"ℏ","planckh;":"ℎ","plankv;":"ℏ","plus;":"+","plusacir;":"⨣","plusb;":"⊞","pluscir;":"⨢","plusdo;":"∔","plusdu;":"⨥","pluse;":"⩲","PlusMinus;":"±","plusmn;":"±","plusmn":"±","plussim;":"⨦","plustwo;":"⨧","pm;":"±","Poincareplane;":"ℌ","pointint;":"⨕","Popf;":"ℙ","popf;":"𝕡","pound;":"£","pound":"£","Pr;":"⪻","pr;":"≺","prap;":"⪷","prcue;":"≼","prE;":"⪳","pre;":"⪯","prec;":"≺","precapprox;":"⪷","preccurlyeq;":"≼","Precedes;":"≺","PrecedesEqual;":"⪯","PrecedesSlantEqual;":"≼","PrecedesTilde;":"≾","preceq;":"⪯","precnapprox;":"⪹","precneqq;":"⪵","precnsim;":"⋨","precsim;":"≾","Prime;":"″","prime;":"′","primes;":"ℙ","prnap;":"⪹","prnE;":"⪵","prnsim;":"⋨","prod;":"∏","Product;":"∏","profalar;":"⌮","profline;":"⌒","profsurf;":"⌓","prop;":"∝","Proportion;":"∷","Proportional;":"∝","propto;":"∝","prsim;":"≾","prurel;":"⊰","Pscr;":"𝒫","pscr;":"𝓅","Psi;":"Ψ","psi;":"ψ","puncsp;":" ","Qfr;":"𝔔","qfr;":"𝔮","qint;":"⨌","Qopf;":"ℚ","qopf;":"𝕢","qprime;":"⁗","Qscr;":"𝒬","qscr;":"𝓆","quaternions;":"ℍ","quatint;":"⨖","quest;":"?","questeq;":"≟","QUOT;":"\\"","QUOT":"\\"","quot;":"\\"","quot":"\\"","rAarr;":"⇛","race;":"∽̱","Racute;":"Ŕ","racute;":"ŕ","radic;":"√","raemptyv;":"⦳","Rang;":"⟫","rang;":"⟩","rangd;":"⦒","range;":"⦥","rangle;":"⟩","raquo;":"»","raquo":"»","Rarr;":"↠","rArr;":"⇒","rarr;":"→","rarrap;":"⥵","rarrb;":"⇥","rarrbfs;":"⤠","rarrc;":"⤳","rarrfs;":"⤞","rarrhk;":"↪","rarrlp;":"↬","rarrpl;":"⥅","rarrsim;":"⥴","Rarrtl;":"⤖","rarrtl;":"↣","rarrw;":"↝","rAtail;":"⤜","ratail;":"⤚","ratio;":"∶","rationals;":"ℚ","RBarr;":"⤐","rBarr;":"⤏","rbarr;":"⤍","rbbrk;":"❳","rbrace;":"}","rbrack;":"]","rbrke;":"⦌","rbrksld;":"⦎","rbrkslu;":"⦐","Rcaron;":"Ř","rcaron;":"ř","Rcedil;":"Ŗ","rcedil;":"ŗ","rceil;":"⌉","rcub;":"}","Rcy;":"Р","rcy;":"р","rdca;":"⤷","rdldhar;":"⥩","rdquo;":"”","rdquor;":"”","rdsh;":"↳","Re;":"ℜ","real;":"ℜ","realine;":"ℛ","realpart;":"ℜ","reals;":"ℝ","rect;":"▭","REG;":"®","REG":"®","reg;":"®","reg":"®","ReverseElement;":"∋","ReverseEquilibrium;":"⇋","ReverseUpEquilibrium;":"⥯","rfisht;":"⥽","rfloor;":"⌋","Rfr;":"ℜ","rfr;":"𝔯","rHar;":"⥤","rhard;":"⇁","rharu;":"⇀","rharul;":"⥬","Rho;":"Ρ","rho;":"ρ","rhov;":"ϱ","RightAngleBracket;":"⟩","RightArrow;":"→","Rightarrow;":"⇒","rightarrow;":"→","RightArrowBar;":"⇥","RightArrowLeftArrow;":"⇄","rightarrowtail;":"↣","RightCeiling;":"⌉","RightDoubleBracket;":"⟧","RightDownTeeVector;":"⥝","RightDownVector;":"⇂","RightDownVectorBar;":"⥕","RightFloor;":"⌋","rightharpoondown;":"⇁","rightharpoonup;":"⇀","rightleftarrows;":"⇄","rightleftharpoons;":"⇌","rightrightarrows;":"⇉","rightsquigarrow;":"↝","RightTee;":"⊢","RightTeeArrow;":"↦","RightTeeVector;":"⥛","rightthreetimes;":"⋌","RightTriangle;":"⊳","RightTriangleBar;":"⧐","RightTriangleEqual;":"⊵","RightUpDownVector;":"⥏","RightUpTeeVector;":"⥜","RightUpVector;":"↾","RightUpVectorBar;":"⥔","RightVector;":"⇀","RightVectorBar;":"⥓","ring;":"˚","risingdotseq;":"≓","rlarr;":"⇄","rlhar;":"⇌","rlm;":"‏","rmoust;":"⎱","rmoustache;":"⎱","rnmid;":"⫮","roang;":"⟭","roarr;":"⇾","robrk;":"⟧","ropar;":"⦆","Ropf;":"ℝ","ropf;":"𝕣","roplus;":"⨮","rotimes;":"⨵","RoundImplies;":"⥰","rpar;":")","rpargt;":"⦔","rppolint;":"⨒","rrarr;":"⇉","Rrightarrow;":"⇛","rsaquo;":"›","Rscr;":"ℛ","rscr;":"𝓇","Rsh;":"↱","rsh;":"↱","rsqb;":"]","rsquo;":"’","rsquor;":"’","rthree;":"⋌","rtimes;":"⋊","rtri;":"▹","rtrie;":"⊵","rtrif;":"▸","rtriltri;":"⧎","RuleDelayed;":"⧴","ruluhar;":"⥨","rx;":"℞","Sacute;":"Ś","sacute;":"ś","sbquo;":"‚","Sc;":"⪼","sc;":"≻","scap;":"⪸","Scaron;":"Š","scaron;":"š","sccue;":"≽","scE;":"⪴","sce;":"⪰","Scedil;":"Ş","scedil;":"ş","Scirc;":"Ŝ","scirc;":"ŝ","scnap;":"⪺","scnE;":"⪶","scnsim;":"⋩","scpolint;":"⨓","scsim;":"≿","Scy;":"С","scy;":"с","sdot;":"⋅","sdotb;":"⊡","sdote;":"⩦","searhk;":"⤥","seArr;":"⇘","searr;":"↘","searrow;":"↘","sect;":"§","sect":"§","semi;":";","seswar;":"⤩","setminus;":"∖","setmn;":"∖","sext;":"✶","Sfr;":"𝔖","sfr;":"𝔰","sfrown;":"⌢","sharp;":"♯","SHCHcy;":"Щ","shchcy;":"щ","SHcy;":"Ш","shcy;":"ш","ShortDownArrow;":"↓","ShortLeftArrow;":"←","shortmid;":"∣","shortparallel;":"∥","ShortRightArrow;":"→","ShortUpArrow;":"↑","shy;":"­","shy":"­","Sigma;":"Σ","sigma;":"σ","sigmaf;":"ς","sigmav;":"ς","sim;":"∼","simdot;":"⩪","sime;":"≃","simeq;":"≃","simg;":"⪞","simgE;":"⪠","siml;":"⪝","simlE;":"⪟","simne;":"≆","simplus;":"⨤","simrarr;":"⥲","slarr;":"←","SmallCircle;":"∘","smallsetminus;":"∖","smashp;":"⨳","smeparsl;":"⧤","smid;":"∣","smile;":"⌣","smt;":"⪪","smte;":"⪬","smtes;":"⪬︀","SOFTcy;":"Ь","softcy;":"ь","sol;":"/","solb;":"⧄","solbar;":"⌿","Sopf;":"𝕊","sopf;":"𝕤","spades;":"♠","spadesuit;":"♠","spar;":"∥","sqcap;":"⊓","sqcaps;":"⊓︀","sqcup;":"⊔","sqcups;":"⊔︀","Sqrt;":"√","sqsub;":"⊏","sqsube;":"⊑","sqsubset;":"⊏","sqsubseteq;":"⊑","sqsup;":"⊐","sqsupe;":"⊒","sqsupset;":"⊐","sqsupseteq;":"⊒","squ;":"□","Square;":"□","square;":"□","SquareIntersection;":"⊓","SquareSubset;":"⊏","SquareSubsetEqual;":"⊑","SquareSuperset;":"⊐","SquareSupersetEqual;":"⊒","SquareUnion;":"⊔","squarf;":"▪","squf;":"▪","srarr;":"→","Sscr;":"𝒮","sscr;":"𝓈","ssetmn;":"∖","ssmile;":"⌣","sstarf;":"⋆","Star;":"⋆","star;":"☆","starf;":"★","straightepsilon;":"ϵ","straightphi;":"ϕ","strns;":"¯","Sub;":"⋐","sub;":"⊂","subdot;":"⪽","subE;":"⫅","sube;":"⊆","subedot;":"⫃","submult;":"⫁","subnE;":"⫋","subne;":"⊊","subplus;":"⪿","subrarr;":"⥹","Subset;":"⋐","subset;":"⊂","subseteq;":"⊆","subseteqq;":"⫅","SubsetEqual;":"⊆","subsetneq;":"⊊","subsetneqq;":"⫋","subsim;":"⫇","subsub;":"⫕","subsup;":"⫓","succ;":"≻","succapprox;":"⪸","succcurlyeq;":"≽","Succeeds;":"≻","SucceedsEqual;":"⪰","SucceedsSlantEqual;":"≽","SucceedsTilde;":"≿","succeq;":"⪰","succnapprox;":"⪺","succneqq;":"⪶","succnsim;":"⋩","succsim;":"≿","SuchThat;":"∋","Sum;":"∑","sum;":"∑","sung;":"♪","Sup;":"⋑","sup;":"⊃","sup1;":"¹","sup1":"¹","sup2;":"²","sup2":"²","sup3;":"³","sup3":"³","supdot;":"⪾","supdsub;":"⫘","supE;":"⫆","supe;":"⊇","supedot;":"⫄","Superset;":"⊃","SupersetEqual;":"⊇","suphsol;":"⟉","suphsub;":"⫗","suplarr;":"⥻","supmult;":"⫂","supnE;":"⫌","supne;":"⊋","supplus;":"⫀","Supset;":"⋑","supset;":"⊃","supseteq;":"⊇","supseteqq;":"⫆","supsetneq;":"⊋","supsetneqq;":"⫌","supsim;":"⫈","supsub;":"⫔","supsup;":"⫖","swarhk;":"⤦","swArr;":"⇙","swarr;":"↙","swarrow;":"↙","swnwar;":"⤪","szlig;":"ß","szlig":"ß","Tab;":"\\t","target;":"⌖","Tau;":"Τ","tau;":"τ","tbrk;":"⎴","Tcaron;":"Ť","tcaron;":"ť","Tcedil;":"Ţ","tcedil;":"ţ","Tcy;":"Т","tcy;":"т","tdot;":"⃛","telrec;":"⌕","Tfr;":"𝔗","tfr;":"𝔱","there4;":"∴","Therefore;":"∴","therefore;":"∴","Theta;":"Θ","theta;":"θ","thetasym;":"ϑ","thetav;":"ϑ","thickapprox;":"≈","thicksim;":"∼","ThickSpace;":"  ","thinsp;":" ","ThinSpace;":" ","thkap;":"≈","thksim;":"∼","THORN;":"Þ","THORN":"Þ","thorn;":"þ","thorn":"þ","Tilde;":"∼","tilde;":"˜","TildeEqual;":"≃","TildeFullEqual;":"≅","TildeTilde;":"≈","times;":"×","times":"×","timesb;":"⊠","timesbar;":"⨱","timesd;":"⨰","tint;":"∭","toea;":"⤨","top;":"⊤","topbot;":"⌶","topcir;":"⫱","Topf;":"𝕋","topf;":"𝕥","topfork;":"⫚","tosa;":"⤩","tprime;":"‴","TRADE;":"™","trade;":"™","triangle;":"▵","triangledown;":"▿","triangleleft;":"◃","trianglelefteq;":"⊴","triangleq;":"≜","triangleright;":"▹","trianglerighteq;":"⊵","tridot;":"◬","trie;":"≜","triminus;":"⨺","TripleDot;":"⃛","triplus;":"⨹","trisb;":"⧍","tritime;":"⨻","trpezium;":"⏢","Tscr;":"𝒯","tscr;":"𝓉","TScy;":"Ц","tscy;":"ц","TSHcy;":"Ћ","tshcy;":"ћ","Tstrok;":"Ŧ","tstrok;":"ŧ","twixt;":"≬","twoheadleftarrow;":"↞","twoheadrightarrow;":"↠","Uacute;":"Ú","Uacute":"Ú","uacute;":"ú","uacute":"ú","Uarr;":"↟","uArr;":"⇑","uarr;":"↑","Uarrocir;":"⥉","Ubrcy;":"Ў","ubrcy;":"ў","Ubreve;":"Ŭ","ubreve;":"ŭ","Ucirc;":"Û","Ucirc":"Û","ucirc;":"û","ucirc":"û","Ucy;":"У","ucy;":"у","udarr;":"⇅","Udblac;":"Ű","udblac;":"ű","udhar;":"⥮","ufisht;":"⥾","Ufr;":"𝔘","ufr;":"𝔲","Ugrave;":"Ù","Ugrave":"Ù","ugrave;":"ù","ugrave":"ù","uHar;":"⥣","uharl;":"↿","uharr;":"↾","uhblk;":"▀","ulcorn;":"⌜","ulcorner;":"⌜","ulcrop;":"⌏","ultri;":"◸","Umacr;":"Ū","umacr;":"ū","uml;":"¨","uml":"¨","UnderBar;":"_","UnderBrace;":"⏟","UnderBracket;":"⎵","UnderParenthesis;":"⏝","Union;":"⋃","UnionPlus;":"⊎","Uogon;":"Ų","uogon;":"ų","Uopf;":"𝕌","uopf;":"𝕦","UpArrow;":"↑","Uparrow;":"⇑","uparrow;":"↑","UpArrowBar;":"⤒","UpArrowDownArrow;":"⇅","UpDownArrow;":"↕","Updownarrow;":"⇕","updownarrow;":"↕","UpEquilibrium;":"⥮","upharpoonleft;":"↿","upharpoonright;":"↾","uplus;":"⊎","UpperLeftArrow;":"↖","UpperRightArrow;":"↗","Upsi;":"ϒ","upsi;":"υ","upsih;":"ϒ","Upsilon;":"Υ","upsilon;":"υ","UpTee;":"⊥","UpTeeArrow;":"↥","upuparrows;":"⇈","urcorn;":"⌝","urcorner;":"⌝","urcrop;":"⌎","Uring;":"Ů","uring;":"ů","urtri;":"◹","Uscr;":"𝒰","uscr;":"𝓊","utdot;":"⋰","Utilde;":"Ũ","utilde;":"ũ","utri;":"▵","utrif;":"▴","uuarr;":"⇈","Uuml;":"Ü","Uuml":"Ü","uuml;":"ü","uuml":"ü","uwangle;":"⦧","vangrt;":"⦜","varepsilon;":"ϵ","varkappa;":"ϰ","varnothing;":"∅","varphi;":"ϕ","varpi;":"ϖ","varpropto;":"∝","vArr;":"⇕","varr;":"↕","varrho;":"ϱ","varsigma;":"ς","varsubsetneq;":"⊊︀","varsubsetneqq;":"⫋︀","varsupsetneq;":"⊋︀","varsupsetneqq;":"⫌︀","vartheta;":"ϑ","vartriangleleft;":"⊲","vartriangleright;":"⊳","Vbar;":"⫫","vBar;":"⫨","vBarv;":"⫩","Vcy;":"В","vcy;":"в","VDash;":"⊫","Vdash;":"⊩","vDash;":"⊨","vdash;":"⊢","Vdashl;":"⫦","Vee;":"⋁","vee;":"∨","veebar;":"⊻","veeeq;":"≚","vellip;":"⋮","Verbar;":"‖","verbar;":"|","Vert;":"‖","vert;":"|","VerticalBar;":"∣","VerticalLine;":"|","VerticalSeparator;":"❘","VerticalTilde;":"≀","VeryThinSpace;":" ","Vfr;":"𝔙","vfr;":"𝔳","vltri;":"⊲","vnsub;":"⊂⃒","vnsup;":"⊃⃒","Vopf;":"𝕍","vopf;":"𝕧","vprop;":"∝","vrtri;":"⊳","Vscr;":"𝒱","vscr;":"𝓋","vsubnE;":"⫋︀","vsubne;":"⊊︀","vsupnE;":"⫌︀","vsupne;":"⊋︀","Vvdash;":"⊪","vzigzag;":"⦚","Wcirc;":"Ŵ","wcirc;":"ŵ","wedbar;":"⩟","Wedge;":"⋀","wedge;":"∧","wedgeq;":"≙","weierp;":"℘","Wfr;":"𝔚","wfr;":"𝔴","Wopf;":"𝕎","wopf;":"𝕨","wp;":"℘","wr;":"≀","wreath;":"≀","Wscr;":"𝒲","wscr;":"𝓌","xcap;":"⋂","xcirc;":"◯","xcup;":"⋃","xdtri;":"▽","Xfr;":"𝔛","xfr;":"𝔵","xhArr;":"⟺","xharr;":"⟷","Xi;":"Ξ","xi;":"ξ","xlArr;":"⟸","xlarr;":"⟵","xmap;":"⟼","xnis;":"⋻","xodot;":"⨀","Xopf;":"𝕏","xopf;":"𝕩","xoplus;":"⨁","xotime;":"⨂","xrArr;":"⟹","xrarr;":"⟶","Xscr;":"𝒳","xscr;":"𝓍","xsqcup;":"⨆","xuplus;":"⨄","xutri;":"△","xvee;":"⋁","xwedge;":"⋀","Yacute;":"Ý","Yacute":"Ý","yacute;":"ý","yacute":"ý","YAcy;":"Я","yacy;":"я","Ycirc;":"Ŷ","ycirc;":"ŷ","Ycy;":"Ы","ycy;":"ы","yen;":"¥","yen":"¥","Yfr;":"𝔜","yfr;":"𝔶","YIcy;":"Ї","yicy;":"ї","Yopf;":"𝕐","yopf;":"𝕪","Yscr;":"𝒴","yscr;":"𝓎","YUcy;":"Ю","yucy;":"ю","Yuml;":"Ÿ","yuml;":"ÿ","yuml":"ÿ","Zacute;":"Ź","zacute;":"ź","Zcaron;":"Ž","zcaron;":"ž","Zcy;":"З","zcy;":"з","Zdot;":"Ż","zdot;":"ż","zeetrf;":"ℨ","ZeroWidthSpace;":"​","Zeta;":"Ζ","zeta;":"ζ","Zfr;":"ℨ","zfr;":"𝔷","ZHcy;":"Ж","zhcy;":"ж","zigrarr;":"⇝","Zopf;":"ℤ","zopf;":"𝕫","Zscr;":"𝒵","zscr;":"𝓏","zwj;":"‍","zwnj;":"‌"}'); +module.exports = JSON.parse('{"9":"Tab;","10":"NewLine;","33":"excl;","34":"quot;","35":"num;","36":"dollar;","37":"percnt;","38":"amp;","39":"apos;","40":"lpar;","41":"rpar;","42":"midast;","43":"plus;","44":"comma;","46":"period;","47":"sol;","58":"colon;","59":"semi;","60":"lt;","61":"equals;","62":"gt;","63":"quest;","64":"commat;","91":"lsqb;","92":"bsol;","93":"rsqb;","94":"Hat;","95":"UnderBar;","96":"grave;","123":"lcub;","124":"VerticalLine;","125":"rcub;","160":"NonBreakingSpace;","161":"iexcl;","162":"cent;","163":"pound;","164":"curren;","165":"yen;","166":"brvbar;","167":"sect;","168":"uml;","169":"copy;","170":"ordf;","171":"laquo;","172":"not;","173":"shy;","174":"reg;","175":"strns;","176":"deg;","177":"pm;","178":"sup2;","179":"sup3;","180":"DiacriticalAcute;","181":"micro;","182":"para;","183":"middot;","184":"Cedilla;","185":"sup1;","186":"ordm;","187":"raquo;","188":"frac14;","189":"half;","190":"frac34;","191":"iquest;","192":"Agrave;","193":"Aacute;","194":"Acirc;","195":"Atilde;","196":"Auml;","197":"Aring;","198":"AElig;","199":"Ccedil;","200":"Egrave;","201":"Eacute;","202":"Ecirc;","203":"Euml;","204":"Igrave;","205":"Iacute;","206":"Icirc;","207":"Iuml;","208":"ETH;","209":"Ntilde;","210":"Ograve;","211":"Oacute;","212":"Ocirc;","213":"Otilde;","214":"Ouml;","215":"times;","216":"Oslash;","217":"Ugrave;","218":"Uacute;","219":"Ucirc;","220":"Uuml;","221":"Yacute;","222":"THORN;","223":"szlig;","224":"agrave;","225":"aacute;","226":"acirc;","227":"atilde;","228":"auml;","229":"aring;","230":"aelig;","231":"ccedil;","232":"egrave;","233":"eacute;","234":"ecirc;","235":"euml;","236":"igrave;","237":"iacute;","238":"icirc;","239":"iuml;","240":"eth;","241":"ntilde;","242":"ograve;","243":"oacute;","244":"ocirc;","245":"otilde;","246":"ouml;","247":"divide;","248":"oslash;","249":"ugrave;","250":"uacute;","251":"ucirc;","252":"uuml;","253":"yacute;","254":"thorn;","255":"yuml;","256":"Amacr;","257":"amacr;","258":"Abreve;","259":"abreve;","260":"Aogon;","261":"aogon;","262":"Cacute;","263":"cacute;","264":"Ccirc;","265":"ccirc;","266":"Cdot;","267":"cdot;","268":"Ccaron;","269":"ccaron;","270":"Dcaron;","271":"dcaron;","272":"Dstrok;","273":"dstrok;","274":"Emacr;","275":"emacr;","278":"Edot;","279":"edot;","280":"Eogon;","281":"eogon;","282":"Ecaron;","283":"ecaron;","284":"Gcirc;","285":"gcirc;","286":"Gbreve;","287":"gbreve;","288":"Gdot;","289":"gdot;","290":"Gcedil;","292":"Hcirc;","293":"hcirc;","294":"Hstrok;","295":"hstrok;","296":"Itilde;","297":"itilde;","298":"Imacr;","299":"imacr;","302":"Iogon;","303":"iogon;","304":"Idot;","305":"inodot;","306":"IJlig;","307":"ijlig;","308":"Jcirc;","309":"jcirc;","310":"Kcedil;","311":"kcedil;","312":"kgreen;","313":"Lacute;","314":"lacute;","315":"Lcedil;","316":"lcedil;","317":"Lcaron;","318":"lcaron;","319":"Lmidot;","320":"lmidot;","321":"Lstrok;","322":"lstrok;","323":"Nacute;","324":"nacute;","325":"Ncedil;","326":"ncedil;","327":"Ncaron;","328":"ncaron;","329":"napos;","330":"ENG;","331":"eng;","332":"Omacr;","333":"omacr;","336":"Odblac;","337":"odblac;","338":"OElig;","339":"oelig;","340":"Racute;","341":"racute;","342":"Rcedil;","343":"rcedil;","344":"Rcaron;","345":"rcaron;","346":"Sacute;","347":"sacute;","348":"Scirc;","349":"scirc;","350":"Scedil;","351":"scedil;","352":"Scaron;","353":"scaron;","354":"Tcedil;","355":"tcedil;","356":"Tcaron;","357":"tcaron;","358":"Tstrok;","359":"tstrok;","360":"Utilde;","361":"utilde;","362":"Umacr;","363":"umacr;","364":"Ubreve;","365":"ubreve;","366":"Uring;","367":"uring;","368":"Udblac;","369":"udblac;","370":"Uogon;","371":"uogon;","372":"Wcirc;","373":"wcirc;","374":"Ycirc;","375":"ycirc;","376":"Yuml;","377":"Zacute;","378":"zacute;","379":"Zdot;","380":"zdot;","381":"Zcaron;","382":"zcaron;","402":"fnof;","437":"imped;","501":"gacute;","567":"jmath;","710":"circ;","711":"Hacek;","728":"breve;","729":"dot;","730":"ring;","731":"ogon;","732":"tilde;","733":"DiacriticalDoubleAcute;","785":"DownBreve;","913":"Alpha;","914":"Beta;","915":"Gamma;","916":"Delta;","917":"Epsilon;","918":"Zeta;","919":"Eta;","920":"Theta;","921":"Iota;","922":"Kappa;","923":"Lambda;","924":"Mu;","925":"Nu;","926":"Xi;","927":"Omicron;","928":"Pi;","929":"Rho;","931":"Sigma;","932":"Tau;","933":"Upsilon;","934":"Phi;","935":"Chi;","936":"Psi;","937":"Omega;","945":"alpha;","946":"beta;","947":"gamma;","948":"delta;","949":"epsilon;","950":"zeta;","951":"eta;","952":"theta;","953":"iota;","954":"kappa;","955":"lambda;","956":"mu;","957":"nu;","958":"xi;","959":"omicron;","960":"pi;","961":"rho;","962":"varsigma;","963":"sigma;","964":"tau;","965":"upsilon;","966":"phi;","967":"chi;","968":"psi;","969":"omega;","977":"vartheta;","978":"upsih;","981":"varphi;","982":"varpi;","988":"Gammad;","989":"gammad;","1008":"varkappa;","1009":"varrho;","1013":"varepsilon;","1014":"bepsi;","1025":"IOcy;","1026":"DJcy;","1027":"GJcy;","1028":"Jukcy;","1029":"DScy;","1030":"Iukcy;","1031":"YIcy;","1032":"Jsercy;","1033":"LJcy;","1034":"NJcy;","1035":"TSHcy;","1036":"KJcy;","1038":"Ubrcy;","1039":"DZcy;","1040":"Acy;","1041":"Bcy;","1042":"Vcy;","1043":"Gcy;","1044":"Dcy;","1045":"IEcy;","1046":"ZHcy;","1047":"Zcy;","1048":"Icy;","1049":"Jcy;","1050":"Kcy;","1051":"Lcy;","1052":"Mcy;","1053":"Ncy;","1054":"Ocy;","1055":"Pcy;","1056":"Rcy;","1057":"Scy;","1058":"Tcy;","1059":"Ucy;","1060":"Fcy;","1061":"KHcy;","1062":"TScy;","1063":"CHcy;","1064":"SHcy;","1065":"SHCHcy;","1066":"HARDcy;","1067":"Ycy;","1068":"SOFTcy;","1069":"Ecy;","1070":"YUcy;","1071":"YAcy;","1072":"acy;","1073":"bcy;","1074":"vcy;","1075":"gcy;","1076":"dcy;","1077":"iecy;","1078":"zhcy;","1079":"zcy;","1080":"icy;","1081":"jcy;","1082":"kcy;","1083":"lcy;","1084":"mcy;","1085":"ncy;","1086":"ocy;","1087":"pcy;","1088":"rcy;","1089":"scy;","1090":"tcy;","1091":"ucy;","1092":"fcy;","1093":"khcy;","1094":"tscy;","1095":"chcy;","1096":"shcy;","1097":"shchcy;","1098":"hardcy;","1099":"ycy;","1100":"softcy;","1101":"ecy;","1102":"yucy;","1103":"yacy;","1105":"iocy;","1106":"djcy;","1107":"gjcy;","1108":"jukcy;","1109":"dscy;","1110":"iukcy;","1111":"yicy;","1112":"jsercy;","1113":"ljcy;","1114":"njcy;","1115":"tshcy;","1116":"kjcy;","1118":"ubrcy;","1119":"dzcy;","8194":"ensp;","8195":"emsp;","8196":"emsp13;","8197":"emsp14;","8199":"numsp;","8200":"puncsp;","8201":"ThinSpace;","8202":"VeryThinSpace;","8203":"ZeroWidthSpace;","8204":"zwnj;","8205":"zwj;","8206":"lrm;","8207":"rlm;","8208":"hyphen;","8211":"ndash;","8212":"mdash;","8213":"horbar;","8214":"Vert;","8216":"OpenCurlyQuote;","8217":"rsquor;","8218":"sbquo;","8220":"OpenCurlyDoubleQuote;","8221":"rdquor;","8222":"ldquor;","8224":"dagger;","8225":"ddagger;","8226":"bullet;","8229":"nldr;","8230":"mldr;","8240":"permil;","8241":"pertenk;","8242":"prime;","8243":"Prime;","8244":"tprime;","8245":"bprime;","8249":"lsaquo;","8250":"rsaquo;","8254":"OverBar;","8257":"caret;","8259":"hybull;","8260":"frasl;","8271":"bsemi;","8279":"qprime;","8287":"MediumSpace;","8288":"NoBreak;","8289":"ApplyFunction;","8290":"it;","8291":"InvisibleComma;","8364":"euro;","8411":"TripleDot;","8412":"DotDot;","8450":"Copf;","8453":"incare;","8458":"gscr;","8459":"Hscr;","8460":"Poincareplane;","8461":"quaternions;","8462":"planckh;","8463":"plankv;","8464":"Iscr;","8465":"imagpart;","8466":"Lscr;","8467":"ell;","8469":"Nopf;","8470":"numero;","8471":"copysr;","8472":"wp;","8473":"primes;","8474":"rationals;","8475":"Rscr;","8476":"Rfr;","8477":"Ropf;","8478":"rx;","8482":"trade;","8484":"Zopf;","8487":"mho;","8488":"Zfr;","8489":"iiota;","8492":"Bscr;","8493":"Cfr;","8495":"escr;","8496":"expectation;","8497":"Fscr;","8499":"phmmat;","8500":"oscr;","8501":"aleph;","8502":"beth;","8503":"gimel;","8504":"daleth;","8517":"DD;","8518":"DifferentialD;","8519":"exponentiale;","8520":"ImaginaryI;","8531":"frac13;","8532":"frac23;","8533":"frac15;","8534":"frac25;","8535":"frac35;","8536":"frac45;","8537":"frac16;","8538":"frac56;","8539":"frac18;","8540":"frac38;","8541":"frac58;","8542":"frac78;","8592":"slarr;","8593":"uparrow;","8594":"srarr;","8595":"ShortDownArrow;","8596":"leftrightarrow;","8597":"varr;","8598":"UpperLeftArrow;","8599":"UpperRightArrow;","8600":"searrow;","8601":"swarrow;","8602":"nleftarrow;","8603":"nrightarrow;","8605":"rightsquigarrow;","8606":"twoheadleftarrow;","8607":"Uarr;","8608":"twoheadrightarrow;","8609":"Darr;","8610":"leftarrowtail;","8611":"rightarrowtail;","8612":"mapstoleft;","8613":"UpTeeArrow;","8614":"RightTeeArrow;","8615":"mapstodown;","8617":"larrhk;","8618":"rarrhk;","8619":"looparrowleft;","8620":"rarrlp;","8621":"leftrightsquigarrow;","8622":"nleftrightarrow;","8624":"lsh;","8625":"rsh;","8626":"ldsh;","8627":"rdsh;","8629":"crarr;","8630":"curvearrowleft;","8631":"curvearrowright;","8634":"olarr;","8635":"orarr;","8636":"lharu;","8637":"lhard;","8638":"upharpoonright;","8639":"upharpoonleft;","8640":"RightVector;","8641":"rightharpoondown;","8642":"RightDownVector;","8643":"LeftDownVector;","8644":"rlarr;","8645":"UpArrowDownArrow;","8646":"lrarr;","8647":"llarr;","8648":"uuarr;","8649":"rrarr;","8650":"downdownarrows;","8651":"ReverseEquilibrium;","8652":"rlhar;","8653":"nLeftarrow;","8654":"nLeftrightarrow;","8655":"nRightarrow;","8656":"Leftarrow;","8657":"Uparrow;","8658":"Rightarrow;","8659":"Downarrow;","8660":"Leftrightarrow;","8661":"vArr;","8662":"nwArr;","8663":"neArr;","8664":"seArr;","8665":"swArr;","8666":"Lleftarrow;","8667":"Rrightarrow;","8669":"zigrarr;","8676":"LeftArrowBar;","8677":"RightArrowBar;","8693":"duarr;","8701":"loarr;","8702":"roarr;","8703":"hoarr;","8704":"forall;","8705":"complement;","8706":"PartialD;","8707":"Exists;","8708":"NotExists;","8709":"varnothing;","8711":"nabla;","8712":"isinv;","8713":"notinva;","8715":"SuchThat;","8716":"NotReverseElement;","8719":"Product;","8720":"Coproduct;","8721":"sum;","8722":"minus;","8723":"mp;","8724":"plusdo;","8726":"ssetmn;","8727":"lowast;","8728":"SmallCircle;","8730":"Sqrt;","8733":"vprop;","8734":"infin;","8735":"angrt;","8736":"angle;","8737":"measuredangle;","8738":"angsph;","8739":"VerticalBar;","8740":"nsmid;","8741":"spar;","8742":"nspar;","8743":"wedge;","8744":"vee;","8745":"cap;","8746":"cup;","8747":"Integral;","8748":"Int;","8749":"tint;","8750":"oint;","8751":"DoubleContourIntegral;","8752":"Cconint;","8753":"cwint;","8754":"cwconint;","8755":"CounterClockwiseContourIntegral;","8756":"therefore;","8757":"because;","8758":"ratio;","8759":"Proportion;","8760":"minusd;","8762":"mDDot;","8763":"homtht;","8764":"Tilde;","8765":"bsim;","8766":"mstpos;","8767":"acd;","8768":"wreath;","8769":"nsim;","8770":"esim;","8771":"TildeEqual;","8772":"nsimeq;","8773":"TildeFullEqual;","8774":"simne;","8775":"NotTildeFullEqual;","8776":"TildeTilde;","8777":"NotTildeTilde;","8778":"approxeq;","8779":"apid;","8780":"bcong;","8781":"CupCap;","8782":"HumpDownHump;","8783":"HumpEqual;","8784":"esdot;","8785":"eDot;","8786":"fallingdotseq;","8787":"risingdotseq;","8788":"coloneq;","8789":"eqcolon;","8790":"eqcirc;","8791":"cire;","8793":"wedgeq;","8794":"veeeq;","8796":"trie;","8799":"questeq;","8800":"NotEqual;","8801":"equiv;","8802":"NotCongruent;","8804":"leq;","8805":"GreaterEqual;","8806":"LessFullEqual;","8807":"GreaterFullEqual;","8808":"lneqq;","8809":"gneqq;","8810":"NestedLessLess;","8811":"NestedGreaterGreater;","8812":"twixt;","8813":"NotCupCap;","8814":"NotLess;","8815":"NotGreater;","8816":"NotLessEqual;","8817":"NotGreaterEqual;","8818":"lsim;","8819":"gtrsim;","8820":"NotLessTilde;","8821":"NotGreaterTilde;","8822":"lg;","8823":"gtrless;","8824":"ntlg;","8825":"ntgl;","8826":"Precedes;","8827":"Succeeds;","8828":"PrecedesSlantEqual;","8829":"SucceedsSlantEqual;","8830":"prsim;","8831":"succsim;","8832":"nprec;","8833":"nsucc;","8834":"subset;","8835":"supset;","8836":"nsub;","8837":"nsup;","8838":"SubsetEqual;","8839":"supseteq;","8840":"nsubseteq;","8841":"nsupseteq;","8842":"subsetneq;","8843":"supsetneq;","8845":"cupdot;","8846":"uplus;","8847":"SquareSubset;","8848":"SquareSuperset;","8849":"SquareSubsetEqual;","8850":"SquareSupersetEqual;","8851":"SquareIntersection;","8852":"SquareUnion;","8853":"oplus;","8854":"ominus;","8855":"otimes;","8856":"osol;","8857":"odot;","8858":"ocir;","8859":"oast;","8861":"odash;","8862":"plusb;","8863":"minusb;","8864":"timesb;","8865":"sdotb;","8866":"vdash;","8867":"LeftTee;","8868":"top;","8869":"UpTee;","8871":"models;","8872":"vDash;","8873":"Vdash;","8874":"Vvdash;","8875":"VDash;","8876":"nvdash;","8877":"nvDash;","8878":"nVdash;","8879":"nVDash;","8880":"prurel;","8882":"vltri;","8883":"vrtri;","8884":"trianglelefteq;","8885":"trianglerighteq;","8886":"origof;","8887":"imof;","8888":"mumap;","8889":"hercon;","8890":"intercal;","8891":"veebar;","8893":"barvee;","8894":"angrtvb;","8895":"lrtri;","8896":"xwedge;","8897":"xvee;","8898":"xcap;","8899":"xcup;","8900":"diamond;","8901":"sdot;","8902":"Star;","8903":"divonx;","8904":"bowtie;","8905":"ltimes;","8906":"rtimes;","8907":"lthree;","8908":"rthree;","8909":"bsime;","8910":"cuvee;","8911":"cuwed;","8912":"Subset;","8913":"Supset;","8914":"Cap;","8915":"Cup;","8916":"pitchfork;","8917":"epar;","8918":"ltdot;","8919":"gtrdot;","8920":"Ll;","8921":"ggg;","8922":"LessEqualGreater;","8923":"gtreqless;","8926":"curlyeqprec;","8927":"curlyeqsucc;","8928":"nprcue;","8929":"nsccue;","8930":"nsqsube;","8931":"nsqsupe;","8934":"lnsim;","8935":"gnsim;","8936":"prnsim;","8937":"succnsim;","8938":"ntriangleleft;","8939":"ntriangleright;","8940":"ntrianglelefteq;","8941":"ntrianglerighteq;","8942":"vellip;","8943":"ctdot;","8944":"utdot;","8945":"dtdot;","8946":"disin;","8947":"isinsv;","8948":"isins;","8949":"isindot;","8950":"notinvc;","8951":"notinvb;","8953":"isinE;","8954":"nisd;","8955":"xnis;","8956":"nis;","8957":"notnivc;","8958":"notnivb;","8965":"barwedge;","8966":"doublebarwedge;","8968":"LeftCeiling;","8969":"RightCeiling;","8970":"lfloor;","8971":"RightFloor;","8972":"drcrop;","8973":"dlcrop;","8974":"urcrop;","8975":"ulcrop;","8976":"bnot;","8978":"profline;","8979":"profsurf;","8981":"telrec;","8982":"target;","8988":"ulcorner;","8989":"urcorner;","8990":"llcorner;","8991":"lrcorner;","8994":"sfrown;","8995":"ssmile;","9005":"cylcty;","9006":"profalar;","9014":"topbot;","9021":"ovbar;","9023":"solbar;","9084":"angzarr;","9136":"lmoustache;","9137":"rmoustache;","9140":"tbrk;","9141":"UnderBracket;","9142":"bbrktbrk;","9180":"OverParenthesis;","9181":"UnderParenthesis;","9182":"OverBrace;","9183":"UnderBrace;","9186":"trpezium;","9191":"elinters;","9251":"blank;","9416":"oS;","9472":"HorizontalLine;","9474":"boxv;","9484":"boxdr;","9488":"boxdl;","9492":"boxur;","9496":"boxul;","9500":"boxvr;","9508":"boxvl;","9516":"boxhd;","9524":"boxhu;","9532":"boxvh;","9552":"boxH;","9553":"boxV;","9554":"boxdR;","9555":"boxDr;","9556":"boxDR;","9557":"boxdL;","9558":"boxDl;","9559":"boxDL;","9560":"boxuR;","9561":"boxUr;","9562":"boxUR;","9563":"boxuL;","9564":"boxUl;","9565":"boxUL;","9566":"boxvR;","9567":"boxVr;","9568":"boxVR;","9569":"boxvL;","9570":"boxVl;","9571":"boxVL;","9572":"boxHd;","9573":"boxhD;","9574":"boxHD;","9575":"boxHu;","9576":"boxhU;","9577":"boxHU;","9578":"boxvH;","9579":"boxVh;","9580":"boxVH;","9600":"uhblk;","9604":"lhblk;","9608":"block;","9617":"blk14;","9618":"blk12;","9619":"blk34;","9633":"square;","9642":"squf;","9643":"EmptyVerySmallSquare;","9645":"rect;","9646":"marker;","9649":"fltns;","9651":"xutri;","9652":"utrif;","9653":"utri;","9656":"rtrif;","9657":"triangleright;","9661":"xdtri;","9662":"dtrif;","9663":"triangledown;","9666":"ltrif;","9667":"triangleleft;","9674":"lozenge;","9675":"cir;","9708":"tridot;","9711":"xcirc;","9720":"ultri;","9721":"urtri;","9722":"lltri;","9723":"EmptySmallSquare;","9724":"FilledSmallSquare;","9733":"starf;","9734":"star;","9742":"phone;","9792":"female;","9794":"male;","9824":"spadesuit;","9827":"clubsuit;","9829":"heartsuit;","9830":"diams;","9834":"sung;","9837":"flat;","9838":"natural;","9839":"sharp;","10003":"checkmark;","10007":"cross;","10016":"maltese;","10038":"sext;","10072":"VerticalSeparator;","10098":"lbbrk;","10099":"rbbrk;","10184":"bsolhsub;","10185":"suphsol;","10214":"lobrk;","10215":"robrk;","10216":"LeftAngleBracket;","10217":"RightAngleBracket;","10218":"Lang;","10219":"Rang;","10220":"loang;","10221":"roang;","10229":"xlarr;","10230":"xrarr;","10231":"xharr;","10232":"xlArr;","10233":"xrArr;","10234":"xhArr;","10236":"xmap;","10239":"dzigrarr;","10498":"nvlArr;","10499":"nvrArr;","10500":"nvHarr;","10501":"Map;","10508":"lbarr;","10509":"rbarr;","10510":"lBarr;","10511":"rBarr;","10512":"RBarr;","10513":"DDotrahd;","10514":"UpArrowBar;","10515":"DownArrowBar;","10518":"Rarrtl;","10521":"latail;","10522":"ratail;","10523":"lAtail;","10524":"rAtail;","10525":"larrfs;","10526":"rarrfs;","10527":"larrbfs;","10528":"rarrbfs;","10531":"nwarhk;","10532":"nearhk;","10533":"searhk;","10534":"swarhk;","10535":"nwnear;","10536":"toea;","10537":"tosa;","10538":"swnwar;","10547":"rarrc;","10549":"cudarrr;","10550":"ldca;","10551":"rdca;","10552":"cudarrl;","10553":"larrpl;","10556":"curarrm;","10557":"cularrp;","10565":"rarrpl;","10568":"harrcir;","10569":"Uarrocir;","10570":"lurdshar;","10571":"ldrushar;","10574":"LeftRightVector;","10575":"RightUpDownVector;","10576":"DownLeftRightVector;","10577":"LeftUpDownVector;","10578":"LeftVectorBar;","10579":"RightVectorBar;","10580":"RightUpVectorBar;","10581":"RightDownVectorBar;","10582":"DownLeftVectorBar;","10583":"DownRightVectorBar;","10584":"LeftUpVectorBar;","10585":"LeftDownVectorBar;","10586":"LeftTeeVector;","10587":"RightTeeVector;","10588":"RightUpTeeVector;","10589":"RightDownTeeVector;","10590":"DownLeftTeeVector;","10591":"DownRightTeeVector;","10592":"LeftUpTeeVector;","10593":"LeftDownTeeVector;","10594":"lHar;","10595":"uHar;","10596":"rHar;","10597":"dHar;","10598":"luruhar;","10599":"ldrdhar;","10600":"ruluhar;","10601":"rdldhar;","10602":"lharul;","10603":"llhard;","10604":"rharul;","10605":"lrhard;","10606":"UpEquilibrium;","10607":"ReverseUpEquilibrium;","10608":"RoundImplies;","10609":"erarr;","10610":"simrarr;","10611":"larrsim;","10612":"rarrsim;","10613":"rarrap;","10614":"ltlarr;","10616":"gtrarr;","10617":"subrarr;","10619":"suplarr;","10620":"lfisht;","10621":"rfisht;","10622":"ufisht;","10623":"dfisht;","10629":"lopar;","10630":"ropar;","10635":"lbrke;","10636":"rbrke;","10637":"lbrkslu;","10638":"rbrksld;","10639":"lbrksld;","10640":"rbrkslu;","10641":"langd;","10642":"rangd;","10643":"lparlt;","10644":"rpargt;","10645":"gtlPar;","10646":"ltrPar;","10650":"vzigzag;","10652":"vangrt;","10653":"angrtvbd;","10660":"ange;","10661":"range;","10662":"dwangle;","10663":"uwangle;","10664":"angmsdaa;","10665":"angmsdab;","10666":"angmsdac;","10667":"angmsdad;","10668":"angmsdae;","10669":"angmsdaf;","10670":"angmsdag;","10671":"angmsdah;","10672":"bemptyv;","10673":"demptyv;","10674":"cemptyv;","10675":"raemptyv;","10676":"laemptyv;","10677":"ohbar;","10678":"omid;","10679":"opar;","10681":"operp;","10683":"olcross;","10684":"odsold;","10686":"olcir;","10687":"ofcir;","10688":"olt;","10689":"ogt;","10690":"cirscir;","10691":"cirE;","10692":"solb;","10693":"bsolb;","10697":"boxbox;","10701":"trisb;","10702":"rtriltri;","10703":"LeftTriangleBar;","10704":"RightTriangleBar;","10716":"iinfin;","10717":"infintie;","10718":"nvinfin;","10723":"eparsl;","10724":"smeparsl;","10725":"eqvparsl;","10731":"lozf;","10740":"RuleDelayed;","10742":"dsol;","10752":"xodot;","10753":"xoplus;","10754":"xotime;","10756":"xuplus;","10758":"xsqcup;","10764":"qint;","10765":"fpartint;","10768":"cirfnint;","10769":"awint;","10770":"rppolint;","10771":"scpolint;","10772":"npolint;","10773":"pointint;","10774":"quatint;","10775":"intlarhk;","10786":"pluscir;","10787":"plusacir;","10788":"simplus;","10789":"plusdu;","10790":"plussim;","10791":"plustwo;","10793":"mcomma;","10794":"minusdu;","10797":"loplus;","10798":"roplus;","10799":"Cross;","10800":"timesd;","10801":"timesbar;","10803":"smashp;","10804":"lotimes;","10805":"rotimes;","10806":"otimesas;","10807":"Otimes;","10808":"odiv;","10809":"triplus;","10810":"triminus;","10811":"tritime;","10812":"iprod;","10815":"amalg;","10816":"capdot;","10818":"ncup;","10819":"ncap;","10820":"capand;","10821":"cupor;","10822":"cupcap;","10823":"capcup;","10824":"cupbrcap;","10825":"capbrcup;","10826":"cupcup;","10827":"capcap;","10828":"ccups;","10829":"ccaps;","10832":"ccupssm;","10835":"And;","10836":"Or;","10837":"andand;","10838":"oror;","10839":"orslope;","10840":"andslope;","10842":"andv;","10843":"orv;","10844":"andd;","10845":"ord;","10847":"wedbar;","10854":"sdote;","10858":"simdot;","10861":"congdot;","10862":"easter;","10863":"apacir;","10864":"apE;","10865":"eplus;","10866":"pluse;","10867":"Esim;","10868":"Colone;","10869":"Equal;","10871":"eDDot;","10872":"equivDD;","10873":"ltcir;","10874":"gtcir;","10875":"ltquest;","10876":"gtquest;","10877":"LessSlantEqual;","10878":"GreaterSlantEqual;","10879":"lesdot;","10880":"gesdot;","10881":"lesdoto;","10882":"gesdoto;","10883":"lesdotor;","10884":"gesdotol;","10885":"lessapprox;","10886":"gtrapprox;","10887":"lneq;","10888":"gneq;","10889":"lnapprox;","10890":"gnapprox;","10891":"lesseqqgtr;","10892":"gtreqqless;","10893":"lsime;","10894":"gsime;","10895":"lsimg;","10896":"gsiml;","10897":"lgE;","10898":"glE;","10899":"lesges;","10900":"gesles;","10901":"eqslantless;","10902":"eqslantgtr;","10903":"elsdot;","10904":"egsdot;","10905":"el;","10906":"eg;","10909":"siml;","10910":"simg;","10911":"simlE;","10912":"simgE;","10913":"LessLess;","10914":"GreaterGreater;","10916":"glj;","10917":"gla;","10918":"ltcc;","10919":"gtcc;","10920":"lescc;","10921":"gescc;","10922":"smt;","10923":"lat;","10924":"smte;","10925":"late;","10926":"bumpE;","10927":"preceq;","10928":"succeq;","10931":"prE;","10932":"scE;","10933":"prnE;","10934":"succneqq;","10935":"precapprox;","10936":"succapprox;","10937":"prnap;","10938":"succnapprox;","10939":"Pr;","10940":"Sc;","10941":"subdot;","10942":"supdot;","10943":"subplus;","10944":"supplus;","10945":"submult;","10946":"supmult;","10947":"subedot;","10948":"supedot;","10949":"subseteqq;","10950":"supseteqq;","10951":"subsim;","10952":"supsim;","10955":"subsetneqq;","10956":"supsetneqq;","10959":"csub;","10960":"csup;","10961":"csube;","10962":"csupe;","10963":"subsup;","10964":"supsub;","10965":"subsub;","10966":"supsup;","10967":"suphsub;","10968":"supdsub;","10969":"forkv;","10970":"topfork;","10971":"mlcp;","10980":"DoubleLeftTee;","10982":"Vdashl;","10983":"Barv;","10984":"vBar;","10985":"vBarv;","10987":"Vbar;","10988":"Not;","10989":"bNot;","10990":"rnmid;","10991":"cirmid;","10992":"midcir;","10993":"topcir;","10994":"nhpar;","10995":"parsim;","11005":"parsl;","64256":"fflig;","64257":"filig;","64258":"fllig;","64259":"ffilig;","64260":"ffllig;"}'); /***/ }), -/***/ 3123: +/***/ 6318: /***/ ((module) => { "use strict"; -module.exports = JSON.parse('{"9":"Tab;","10":"NewLine;","33":"excl;","34":"quot;","35":"num;","36":"dollar;","37":"percnt;","38":"amp;","39":"apos;","40":"lpar;","41":"rpar;","42":"midast;","43":"plus;","44":"comma;","46":"period;","47":"sol;","58":"colon;","59":"semi;","60":"lt;","61":"equals;","62":"gt;","63":"quest;","64":"commat;","91":"lsqb;","92":"bsol;","93":"rsqb;","94":"Hat;","95":"UnderBar;","96":"grave;","123":"lcub;","124":"VerticalLine;","125":"rcub;","160":"NonBreakingSpace;","161":"iexcl;","162":"cent;","163":"pound;","164":"curren;","165":"yen;","166":"brvbar;","167":"sect;","168":"uml;","169":"copy;","170":"ordf;","171":"laquo;","172":"not;","173":"shy;","174":"reg;","175":"strns;","176":"deg;","177":"pm;","178":"sup2;","179":"sup3;","180":"DiacriticalAcute;","181":"micro;","182":"para;","183":"middot;","184":"Cedilla;","185":"sup1;","186":"ordm;","187":"raquo;","188":"frac14;","189":"half;","190":"frac34;","191":"iquest;","192":"Agrave;","193":"Aacute;","194":"Acirc;","195":"Atilde;","196":"Auml;","197":"Aring;","198":"AElig;","199":"Ccedil;","200":"Egrave;","201":"Eacute;","202":"Ecirc;","203":"Euml;","204":"Igrave;","205":"Iacute;","206":"Icirc;","207":"Iuml;","208":"ETH;","209":"Ntilde;","210":"Ograve;","211":"Oacute;","212":"Ocirc;","213":"Otilde;","214":"Ouml;","215":"times;","216":"Oslash;","217":"Ugrave;","218":"Uacute;","219":"Ucirc;","220":"Uuml;","221":"Yacute;","222":"THORN;","223":"szlig;","224":"agrave;","225":"aacute;","226":"acirc;","227":"atilde;","228":"auml;","229":"aring;","230":"aelig;","231":"ccedil;","232":"egrave;","233":"eacute;","234":"ecirc;","235":"euml;","236":"igrave;","237":"iacute;","238":"icirc;","239":"iuml;","240":"eth;","241":"ntilde;","242":"ograve;","243":"oacute;","244":"ocirc;","245":"otilde;","246":"ouml;","247":"divide;","248":"oslash;","249":"ugrave;","250":"uacute;","251":"ucirc;","252":"uuml;","253":"yacute;","254":"thorn;","255":"yuml;","256":"Amacr;","257":"amacr;","258":"Abreve;","259":"abreve;","260":"Aogon;","261":"aogon;","262":"Cacute;","263":"cacute;","264":"Ccirc;","265":"ccirc;","266":"Cdot;","267":"cdot;","268":"Ccaron;","269":"ccaron;","270":"Dcaron;","271":"dcaron;","272":"Dstrok;","273":"dstrok;","274":"Emacr;","275":"emacr;","278":"Edot;","279":"edot;","280":"Eogon;","281":"eogon;","282":"Ecaron;","283":"ecaron;","284":"Gcirc;","285":"gcirc;","286":"Gbreve;","287":"gbreve;","288":"Gdot;","289":"gdot;","290":"Gcedil;","292":"Hcirc;","293":"hcirc;","294":"Hstrok;","295":"hstrok;","296":"Itilde;","297":"itilde;","298":"Imacr;","299":"imacr;","302":"Iogon;","303":"iogon;","304":"Idot;","305":"inodot;","306":"IJlig;","307":"ijlig;","308":"Jcirc;","309":"jcirc;","310":"Kcedil;","311":"kcedil;","312":"kgreen;","313":"Lacute;","314":"lacute;","315":"Lcedil;","316":"lcedil;","317":"Lcaron;","318":"lcaron;","319":"Lmidot;","320":"lmidot;","321":"Lstrok;","322":"lstrok;","323":"Nacute;","324":"nacute;","325":"Ncedil;","326":"ncedil;","327":"Ncaron;","328":"ncaron;","329":"napos;","330":"ENG;","331":"eng;","332":"Omacr;","333":"omacr;","336":"Odblac;","337":"odblac;","338":"OElig;","339":"oelig;","340":"Racute;","341":"racute;","342":"Rcedil;","343":"rcedil;","344":"Rcaron;","345":"rcaron;","346":"Sacute;","347":"sacute;","348":"Scirc;","349":"scirc;","350":"Scedil;","351":"scedil;","352":"Scaron;","353":"scaron;","354":"Tcedil;","355":"tcedil;","356":"Tcaron;","357":"tcaron;","358":"Tstrok;","359":"tstrok;","360":"Utilde;","361":"utilde;","362":"Umacr;","363":"umacr;","364":"Ubreve;","365":"ubreve;","366":"Uring;","367":"uring;","368":"Udblac;","369":"udblac;","370":"Uogon;","371":"uogon;","372":"Wcirc;","373":"wcirc;","374":"Ycirc;","375":"ycirc;","376":"Yuml;","377":"Zacute;","378":"zacute;","379":"Zdot;","380":"zdot;","381":"Zcaron;","382":"zcaron;","402":"fnof;","437":"imped;","501":"gacute;","567":"jmath;","710":"circ;","711":"Hacek;","728":"breve;","729":"dot;","730":"ring;","731":"ogon;","732":"tilde;","733":"DiacriticalDoubleAcute;","785":"DownBreve;","913":"Alpha;","914":"Beta;","915":"Gamma;","916":"Delta;","917":"Epsilon;","918":"Zeta;","919":"Eta;","920":"Theta;","921":"Iota;","922":"Kappa;","923":"Lambda;","924":"Mu;","925":"Nu;","926":"Xi;","927":"Omicron;","928":"Pi;","929":"Rho;","931":"Sigma;","932":"Tau;","933":"Upsilon;","934":"Phi;","935":"Chi;","936":"Psi;","937":"Omega;","945":"alpha;","946":"beta;","947":"gamma;","948":"delta;","949":"epsilon;","950":"zeta;","951":"eta;","952":"theta;","953":"iota;","954":"kappa;","955":"lambda;","956":"mu;","957":"nu;","958":"xi;","959":"omicron;","960":"pi;","961":"rho;","962":"varsigma;","963":"sigma;","964":"tau;","965":"upsilon;","966":"phi;","967":"chi;","968":"psi;","969":"omega;","977":"vartheta;","978":"upsih;","981":"varphi;","982":"varpi;","988":"Gammad;","989":"gammad;","1008":"varkappa;","1009":"varrho;","1013":"varepsilon;","1014":"bepsi;","1025":"IOcy;","1026":"DJcy;","1027":"GJcy;","1028":"Jukcy;","1029":"DScy;","1030":"Iukcy;","1031":"YIcy;","1032":"Jsercy;","1033":"LJcy;","1034":"NJcy;","1035":"TSHcy;","1036":"KJcy;","1038":"Ubrcy;","1039":"DZcy;","1040":"Acy;","1041":"Bcy;","1042":"Vcy;","1043":"Gcy;","1044":"Dcy;","1045":"IEcy;","1046":"ZHcy;","1047":"Zcy;","1048":"Icy;","1049":"Jcy;","1050":"Kcy;","1051":"Lcy;","1052":"Mcy;","1053":"Ncy;","1054":"Ocy;","1055":"Pcy;","1056":"Rcy;","1057":"Scy;","1058":"Tcy;","1059":"Ucy;","1060":"Fcy;","1061":"KHcy;","1062":"TScy;","1063":"CHcy;","1064":"SHcy;","1065":"SHCHcy;","1066":"HARDcy;","1067":"Ycy;","1068":"SOFTcy;","1069":"Ecy;","1070":"YUcy;","1071":"YAcy;","1072":"acy;","1073":"bcy;","1074":"vcy;","1075":"gcy;","1076":"dcy;","1077":"iecy;","1078":"zhcy;","1079":"zcy;","1080":"icy;","1081":"jcy;","1082":"kcy;","1083":"lcy;","1084":"mcy;","1085":"ncy;","1086":"ocy;","1087":"pcy;","1088":"rcy;","1089":"scy;","1090":"tcy;","1091":"ucy;","1092":"fcy;","1093":"khcy;","1094":"tscy;","1095":"chcy;","1096":"shcy;","1097":"shchcy;","1098":"hardcy;","1099":"ycy;","1100":"softcy;","1101":"ecy;","1102":"yucy;","1103":"yacy;","1105":"iocy;","1106":"djcy;","1107":"gjcy;","1108":"jukcy;","1109":"dscy;","1110":"iukcy;","1111":"yicy;","1112":"jsercy;","1113":"ljcy;","1114":"njcy;","1115":"tshcy;","1116":"kjcy;","1118":"ubrcy;","1119":"dzcy;","8194":"ensp;","8195":"emsp;","8196":"emsp13;","8197":"emsp14;","8199":"numsp;","8200":"puncsp;","8201":"ThinSpace;","8202":"VeryThinSpace;","8203":"ZeroWidthSpace;","8204":"zwnj;","8205":"zwj;","8206":"lrm;","8207":"rlm;","8208":"hyphen;","8211":"ndash;","8212":"mdash;","8213":"horbar;","8214":"Vert;","8216":"OpenCurlyQuote;","8217":"rsquor;","8218":"sbquo;","8220":"OpenCurlyDoubleQuote;","8221":"rdquor;","8222":"ldquor;","8224":"dagger;","8225":"ddagger;","8226":"bullet;","8229":"nldr;","8230":"mldr;","8240":"permil;","8241":"pertenk;","8242":"prime;","8243":"Prime;","8244":"tprime;","8245":"bprime;","8249":"lsaquo;","8250":"rsaquo;","8254":"OverBar;","8257":"caret;","8259":"hybull;","8260":"frasl;","8271":"bsemi;","8279":"qprime;","8287":"MediumSpace;","8288":"NoBreak;","8289":"ApplyFunction;","8290":"it;","8291":"InvisibleComma;","8364":"euro;","8411":"TripleDot;","8412":"DotDot;","8450":"Copf;","8453":"incare;","8458":"gscr;","8459":"Hscr;","8460":"Poincareplane;","8461":"quaternions;","8462":"planckh;","8463":"plankv;","8464":"Iscr;","8465":"imagpart;","8466":"Lscr;","8467":"ell;","8469":"Nopf;","8470":"numero;","8471":"copysr;","8472":"wp;","8473":"primes;","8474":"rationals;","8475":"Rscr;","8476":"Rfr;","8477":"Ropf;","8478":"rx;","8482":"trade;","8484":"Zopf;","8487":"mho;","8488":"Zfr;","8489":"iiota;","8492":"Bscr;","8493":"Cfr;","8495":"escr;","8496":"expectation;","8497":"Fscr;","8499":"phmmat;","8500":"oscr;","8501":"aleph;","8502":"beth;","8503":"gimel;","8504":"daleth;","8517":"DD;","8518":"DifferentialD;","8519":"exponentiale;","8520":"ImaginaryI;","8531":"frac13;","8532":"frac23;","8533":"frac15;","8534":"frac25;","8535":"frac35;","8536":"frac45;","8537":"frac16;","8538":"frac56;","8539":"frac18;","8540":"frac38;","8541":"frac58;","8542":"frac78;","8592":"slarr;","8593":"uparrow;","8594":"srarr;","8595":"ShortDownArrow;","8596":"leftrightarrow;","8597":"varr;","8598":"UpperLeftArrow;","8599":"UpperRightArrow;","8600":"searrow;","8601":"swarrow;","8602":"nleftarrow;","8603":"nrightarrow;","8605":"rightsquigarrow;","8606":"twoheadleftarrow;","8607":"Uarr;","8608":"twoheadrightarrow;","8609":"Darr;","8610":"leftarrowtail;","8611":"rightarrowtail;","8612":"mapstoleft;","8613":"UpTeeArrow;","8614":"RightTeeArrow;","8615":"mapstodown;","8617":"larrhk;","8618":"rarrhk;","8619":"looparrowleft;","8620":"rarrlp;","8621":"leftrightsquigarrow;","8622":"nleftrightarrow;","8624":"lsh;","8625":"rsh;","8626":"ldsh;","8627":"rdsh;","8629":"crarr;","8630":"curvearrowleft;","8631":"curvearrowright;","8634":"olarr;","8635":"orarr;","8636":"lharu;","8637":"lhard;","8638":"upharpoonright;","8639":"upharpoonleft;","8640":"RightVector;","8641":"rightharpoondown;","8642":"RightDownVector;","8643":"LeftDownVector;","8644":"rlarr;","8645":"UpArrowDownArrow;","8646":"lrarr;","8647":"llarr;","8648":"uuarr;","8649":"rrarr;","8650":"downdownarrows;","8651":"ReverseEquilibrium;","8652":"rlhar;","8653":"nLeftarrow;","8654":"nLeftrightarrow;","8655":"nRightarrow;","8656":"Leftarrow;","8657":"Uparrow;","8658":"Rightarrow;","8659":"Downarrow;","8660":"Leftrightarrow;","8661":"vArr;","8662":"nwArr;","8663":"neArr;","8664":"seArr;","8665":"swArr;","8666":"Lleftarrow;","8667":"Rrightarrow;","8669":"zigrarr;","8676":"LeftArrowBar;","8677":"RightArrowBar;","8693":"duarr;","8701":"loarr;","8702":"roarr;","8703":"hoarr;","8704":"forall;","8705":"complement;","8706":"PartialD;","8707":"Exists;","8708":"NotExists;","8709":"varnothing;","8711":"nabla;","8712":"isinv;","8713":"notinva;","8715":"SuchThat;","8716":"NotReverseElement;","8719":"Product;","8720":"Coproduct;","8721":"sum;","8722":"minus;","8723":"mp;","8724":"plusdo;","8726":"ssetmn;","8727":"lowast;","8728":"SmallCircle;","8730":"Sqrt;","8733":"vprop;","8734":"infin;","8735":"angrt;","8736":"angle;","8737":"measuredangle;","8738":"angsph;","8739":"VerticalBar;","8740":"nsmid;","8741":"spar;","8742":"nspar;","8743":"wedge;","8744":"vee;","8745":"cap;","8746":"cup;","8747":"Integral;","8748":"Int;","8749":"tint;","8750":"oint;","8751":"DoubleContourIntegral;","8752":"Cconint;","8753":"cwint;","8754":"cwconint;","8755":"CounterClockwiseContourIntegral;","8756":"therefore;","8757":"because;","8758":"ratio;","8759":"Proportion;","8760":"minusd;","8762":"mDDot;","8763":"homtht;","8764":"Tilde;","8765":"bsim;","8766":"mstpos;","8767":"acd;","8768":"wreath;","8769":"nsim;","8770":"esim;","8771":"TildeEqual;","8772":"nsimeq;","8773":"TildeFullEqual;","8774":"simne;","8775":"NotTildeFullEqual;","8776":"TildeTilde;","8777":"NotTildeTilde;","8778":"approxeq;","8779":"apid;","8780":"bcong;","8781":"CupCap;","8782":"HumpDownHump;","8783":"HumpEqual;","8784":"esdot;","8785":"eDot;","8786":"fallingdotseq;","8787":"risingdotseq;","8788":"coloneq;","8789":"eqcolon;","8790":"eqcirc;","8791":"cire;","8793":"wedgeq;","8794":"veeeq;","8796":"trie;","8799":"questeq;","8800":"NotEqual;","8801":"equiv;","8802":"NotCongruent;","8804":"leq;","8805":"GreaterEqual;","8806":"LessFullEqual;","8807":"GreaterFullEqual;","8808":"lneqq;","8809":"gneqq;","8810":"NestedLessLess;","8811":"NestedGreaterGreater;","8812":"twixt;","8813":"NotCupCap;","8814":"NotLess;","8815":"NotGreater;","8816":"NotLessEqual;","8817":"NotGreaterEqual;","8818":"lsim;","8819":"gtrsim;","8820":"NotLessTilde;","8821":"NotGreaterTilde;","8822":"lg;","8823":"gtrless;","8824":"ntlg;","8825":"ntgl;","8826":"Precedes;","8827":"Succeeds;","8828":"PrecedesSlantEqual;","8829":"SucceedsSlantEqual;","8830":"prsim;","8831":"succsim;","8832":"nprec;","8833":"nsucc;","8834":"subset;","8835":"supset;","8836":"nsub;","8837":"nsup;","8838":"SubsetEqual;","8839":"supseteq;","8840":"nsubseteq;","8841":"nsupseteq;","8842":"subsetneq;","8843":"supsetneq;","8845":"cupdot;","8846":"uplus;","8847":"SquareSubset;","8848":"SquareSuperset;","8849":"SquareSubsetEqual;","8850":"SquareSupersetEqual;","8851":"SquareIntersection;","8852":"SquareUnion;","8853":"oplus;","8854":"ominus;","8855":"otimes;","8856":"osol;","8857":"odot;","8858":"ocir;","8859":"oast;","8861":"odash;","8862":"plusb;","8863":"minusb;","8864":"timesb;","8865":"sdotb;","8866":"vdash;","8867":"LeftTee;","8868":"top;","8869":"UpTee;","8871":"models;","8872":"vDash;","8873":"Vdash;","8874":"Vvdash;","8875":"VDash;","8876":"nvdash;","8877":"nvDash;","8878":"nVdash;","8879":"nVDash;","8880":"prurel;","8882":"vltri;","8883":"vrtri;","8884":"trianglelefteq;","8885":"trianglerighteq;","8886":"origof;","8887":"imof;","8888":"mumap;","8889":"hercon;","8890":"intercal;","8891":"veebar;","8893":"barvee;","8894":"angrtvb;","8895":"lrtri;","8896":"xwedge;","8897":"xvee;","8898":"xcap;","8899":"xcup;","8900":"diamond;","8901":"sdot;","8902":"Star;","8903":"divonx;","8904":"bowtie;","8905":"ltimes;","8906":"rtimes;","8907":"lthree;","8908":"rthree;","8909":"bsime;","8910":"cuvee;","8911":"cuwed;","8912":"Subset;","8913":"Supset;","8914":"Cap;","8915":"Cup;","8916":"pitchfork;","8917":"epar;","8918":"ltdot;","8919":"gtrdot;","8920":"Ll;","8921":"ggg;","8922":"LessEqualGreater;","8923":"gtreqless;","8926":"curlyeqprec;","8927":"curlyeqsucc;","8928":"nprcue;","8929":"nsccue;","8930":"nsqsube;","8931":"nsqsupe;","8934":"lnsim;","8935":"gnsim;","8936":"prnsim;","8937":"succnsim;","8938":"ntriangleleft;","8939":"ntriangleright;","8940":"ntrianglelefteq;","8941":"ntrianglerighteq;","8942":"vellip;","8943":"ctdot;","8944":"utdot;","8945":"dtdot;","8946":"disin;","8947":"isinsv;","8948":"isins;","8949":"isindot;","8950":"notinvc;","8951":"notinvb;","8953":"isinE;","8954":"nisd;","8955":"xnis;","8956":"nis;","8957":"notnivc;","8958":"notnivb;","8965":"barwedge;","8966":"doublebarwedge;","8968":"LeftCeiling;","8969":"RightCeiling;","8970":"lfloor;","8971":"RightFloor;","8972":"drcrop;","8973":"dlcrop;","8974":"urcrop;","8975":"ulcrop;","8976":"bnot;","8978":"profline;","8979":"profsurf;","8981":"telrec;","8982":"target;","8988":"ulcorner;","8989":"urcorner;","8990":"llcorner;","8991":"lrcorner;","8994":"sfrown;","8995":"ssmile;","9005":"cylcty;","9006":"profalar;","9014":"topbot;","9021":"ovbar;","9023":"solbar;","9084":"angzarr;","9136":"lmoustache;","9137":"rmoustache;","9140":"tbrk;","9141":"UnderBracket;","9142":"bbrktbrk;","9180":"OverParenthesis;","9181":"UnderParenthesis;","9182":"OverBrace;","9183":"UnderBrace;","9186":"trpezium;","9191":"elinters;","9251":"blank;","9416":"oS;","9472":"HorizontalLine;","9474":"boxv;","9484":"boxdr;","9488":"boxdl;","9492":"boxur;","9496":"boxul;","9500":"boxvr;","9508":"boxvl;","9516":"boxhd;","9524":"boxhu;","9532":"boxvh;","9552":"boxH;","9553":"boxV;","9554":"boxdR;","9555":"boxDr;","9556":"boxDR;","9557":"boxdL;","9558":"boxDl;","9559":"boxDL;","9560":"boxuR;","9561":"boxUr;","9562":"boxUR;","9563":"boxuL;","9564":"boxUl;","9565":"boxUL;","9566":"boxvR;","9567":"boxVr;","9568":"boxVR;","9569":"boxvL;","9570":"boxVl;","9571":"boxVL;","9572":"boxHd;","9573":"boxhD;","9574":"boxHD;","9575":"boxHu;","9576":"boxhU;","9577":"boxHU;","9578":"boxvH;","9579":"boxVh;","9580":"boxVH;","9600":"uhblk;","9604":"lhblk;","9608":"block;","9617":"blk14;","9618":"blk12;","9619":"blk34;","9633":"square;","9642":"squf;","9643":"EmptyVerySmallSquare;","9645":"rect;","9646":"marker;","9649":"fltns;","9651":"xutri;","9652":"utrif;","9653":"utri;","9656":"rtrif;","9657":"triangleright;","9661":"xdtri;","9662":"dtrif;","9663":"triangledown;","9666":"ltrif;","9667":"triangleleft;","9674":"lozenge;","9675":"cir;","9708":"tridot;","9711":"xcirc;","9720":"ultri;","9721":"urtri;","9722":"lltri;","9723":"EmptySmallSquare;","9724":"FilledSmallSquare;","9733":"starf;","9734":"star;","9742":"phone;","9792":"female;","9794":"male;","9824":"spadesuit;","9827":"clubsuit;","9829":"heartsuit;","9830":"diams;","9834":"sung;","9837":"flat;","9838":"natural;","9839":"sharp;","10003":"checkmark;","10007":"cross;","10016":"maltese;","10038":"sext;","10072":"VerticalSeparator;","10098":"lbbrk;","10099":"rbbrk;","10184":"bsolhsub;","10185":"suphsol;","10214":"lobrk;","10215":"robrk;","10216":"LeftAngleBracket;","10217":"RightAngleBracket;","10218":"Lang;","10219":"Rang;","10220":"loang;","10221":"roang;","10229":"xlarr;","10230":"xrarr;","10231":"xharr;","10232":"xlArr;","10233":"xrArr;","10234":"xhArr;","10236":"xmap;","10239":"dzigrarr;","10498":"nvlArr;","10499":"nvrArr;","10500":"nvHarr;","10501":"Map;","10508":"lbarr;","10509":"rbarr;","10510":"lBarr;","10511":"rBarr;","10512":"RBarr;","10513":"DDotrahd;","10514":"UpArrowBar;","10515":"DownArrowBar;","10518":"Rarrtl;","10521":"latail;","10522":"ratail;","10523":"lAtail;","10524":"rAtail;","10525":"larrfs;","10526":"rarrfs;","10527":"larrbfs;","10528":"rarrbfs;","10531":"nwarhk;","10532":"nearhk;","10533":"searhk;","10534":"swarhk;","10535":"nwnear;","10536":"toea;","10537":"tosa;","10538":"swnwar;","10547":"rarrc;","10549":"cudarrr;","10550":"ldca;","10551":"rdca;","10552":"cudarrl;","10553":"larrpl;","10556":"curarrm;","10557":"cularrp;","10565":"rarrpl;","10568":"harrcir;","10569":"Uarrocir;","10570":"lurdshar;","10571":"ldrushar;","10574":"LeftRightVector;","10575":"RightUpDownVector;","10576":"DownLeftRightVector;","10577":"LeftUpDownVector;","10578":"LeftVectorBar;","10579":"RightVectorBar;","10580":"RightUpVectorBar;","10581":"RightDownVectorBar;","10582":"DownLeftVectorBar;","10583":"DownRightVectorBar;","10584":"LeftUpVectorBar;","10585":"LeftDownVectorBar;","10586":"LeftTeeVector;","10587":"RightTeeVector;","10588":"RightUpTeeVector;","10589":"RightDownTeeVector;","10590":"DownLeftTeeVector;","10591":"DownRightTeeVector;","10592":"LeftUpTeeVector;","10593":"LeftDownTeeVector;","10594":"lHar;","10595":"uHar;","10596":"rHar;","10597":"dHar;","10598":"luruhar;","10599":"ldrdhar;","10600":"ruluhar;","10601":"rdldhar;","10602":"lharul;","10603":"llhard;","10604":"rharul;","10605":"lrhard;","10606":"UpEquilibrium;","10607":"ReverseUpEquilibrium;","10608":"RoundImplies;","10609":"erarr;","10610":"simrarr;","10611":"larrsim;","10612":"rarrsim;","10613":"rarrap;","10614":"ltlarr;","10616":"gtrarr;","10617":"subrarr;","10619":"suplarr;","10620":"lfisht;","10621":"rfisht;","10622":"ufisht;","10623":"dfisht;","10629":"lopar;","10630":"ropar;","10635":"lbrke;","10636":"rbrke;","10637":"lbrkslu;","10638":"rbrksld;","10639":"lbrksld;","10640":"rbrkslu;","10641":"langd;","10642":"rangd;","10643":"lparlt;","10644":"rpargt;","10645":"gtlPar;","10646":"ltrPar;","10650":"vzigzag;","10652":"vangrt;","10653":"angrtvbd;","10660":"ange;","10661":"range;","10662":"dwangle;","10663":"uwangle;","10664":"angmsdaa;","10665":"angmsdab;","10666":"angmsdac;","10667":"angmsdad;","10668":"angmsdae;","10669":"angmsdaf;","10670":"angmsdag;","10671":"angmsdah;","10672":"bemptyv;","10673":"demptyv;","10674":"cemptyv;","10675":"raemptyv;","10676":"laemptyv;","10677":"ohbar;","10678":"omid;","10679":"opar;","10681":"operp;","10683":"olcross;","10684":"odsold;","10686":"olcir;","10687":"ofcir;","10688":"olt;","10689":"ogt;","10690":"cirscir;","10691":"cirE;","10692":"solb;","10693":"bsolb;","10697":"boxbox;","10701":"trisb;","10702":"rtriltri;","10703":"LeftTriangleBar;","10704":"RightTriangleBar;","10716":"iinfin;","10717":"infintie;","10718":"nvinfin;","10723":"eparsl;","10724":"smeparsl;","10725":"eqvparsl;","10731":"lozf;","10740":"RuleDelayed;","10742":"dsol;","10752":"xodot;","10753":"xoplus;","10754":"xotime;","10756":"xuplus;","10758":"xsqcup;","10764":"qint;","10765":"fpartint;","10768":"cirfnint;","10769":"awint;","10770":"rppolint;","10771":"scpolint;","10772":"npolint;","10773":"pointint;","10774":"quatint;","10775":"intlarhk;","10786":"pluscir;","10787":"plusacir;","10788":"simplus;","10789":"plusdu;","10790":"plussim;","10791":"plustwo;","10793":"mcomma;","10794":"minusdu;","10797":"loplus;","10798":"roplus;","10799":"Cross;","10800":"timesd;","10801":"timesbar;","10803":"smashp;","10804":"lotimes;","10805":"rotimes;","10806":"otimesas;","10807":"Otimes;","10808":"odiv;","10809":"triplus;","10810":"triminus;","10811":"tritime;","10812":"iprod;","10815":"amalg;","10816":"capdot;","10818":"ncup;","10819":"ncap;","10820":"capand;","10821":"cupor;","10822":"cupcap;","10823":"capcup;","10824":"cupbrcap;","10825":"capbrcup;","10826":"cupcup;","10827":"capcap;","10828":"ccups;","10829":"ccaps;","10832":"ccupssm;","10835":"And;","10836":"Or;","10837":"andand;","10838":"oror;","10839":"orslope;","10840":"andslope;","10842":"andv;","10843":"orv;","10844":"andd;","10845":"ord;","10847":"wedbar;","10854":"sdote;","10858":"simdot;","10861":"congdot;","10862":"easter;","10863":"apacir;","10864":"apE;","10865":"eplus;","10866":"pluse;","10867":"Esim;","10868":"Colone;","10869":"Equal;","10871":"eDDot;","10872":"equivDD;","10873":"ltcir;","10874":"gtcir;","10875":"ltquest;","10876":"gtquest;","10877":"LessSlantEqual;","10878":"GreaterSlantEqual;","10879":"lesdot;","10880":"gesdot;","10881":"lesdoto;","10882":"gesdoto;","10883":"lesdotor;","10884":"gesdotol;","10885":"lessapprox;","10886":"gtrapprox;","10887":"lneq;","10888":"gneq;","10889":"lnapprox;","10890":"gnapprox;","10891":"lesseqqgtr;","10892":"gtreqqless;","10893":"lsime;","10894":"gsime;","10895":"lsimg;","10896":"gsiml;","10897":"lgE;","10898":"glE;","10899":"lesges;","10900":"gesles;","10901":"eqslantless;","10902":"eqslantgtr;","10903":"elsdot;","10904":"egsdot;","10905":"el;","10906":"eg;","10909":"siml;","10910":"simg;","10911":"simlE;","10912":"simgE;","10913":"LessLess;","10914":"GreaterGreater;","10916":"glj;","10917":"gla;","10918":"ltcc;","10919":"gtcc;","10920":"lescc;","10921":"gescc;","10922":"smt;","10923":"lat;","10924":"smte;","10925":"late;","10926":"bumpE;","10927":"preceq;","10928":"succeq;","10931":"prE;","10932":"scE;","10933":"prnE;","10934":"succneqq;","10935":"precapprox;","10936":"succapprox;","10937":"prnap;","10938":"succnapprox;","10939":"Pr;","10940":"Sc;","10941":"subdot;","10942":"supdot;","10943":"subplus;","10944":"supplus;","10945":"submult;","10946":"supmult;","10947":"subedot;","10948":"supedot;","10949":"subseteqq;","10950":"supseteqq;","10951":"subsim;","10952":"supsim;","10955":"subsetneqq;","10956":"supsetneqq;","10959":"csub;","10960":"csup;","10961":"csube;","10962":"csupe;","10963":"subsup;","10964":"supsub;","10965":"subsub;","10966":"supsup;","10967":"suphsub;","10968":"supdsub;","10969":"forkv;","10970":"topfork;","10971":"mlcp;","10980":"DoubleLeftTee;","10982":"Vdashl;","10983":"Barv;","10984":"vBar;","10985":"vBarv;","10987":"Vbar;","10988":"Not;","10989":"bNot;","10990":"rnmid;","10991":"cirmid;","10992":"midcir;","10993":"topcir;","10994":"nhpar;","10995":"parsim;","11005":"parsl;","64256":"fflig;","64257":"filig;","64258":"fllig;","64259":"ffilig;","64260":"ffllig;"}'); +module.exports = JSON.parse('{"name":"gaxios","version":"6.2.0","description":"A simple common HTTP client specifically for Google APIs and services.","main":"build/src/index.js","types":"build/src/index.d.ts","files":["build/src"],"scripts":{"lint":"gts check","test":"c8 mocha build/test","presystem-test":"npm run compile","system-test":"mocha build/system-test --timeout 80000","compile":"tsc -p .","fix":"gts fix","prepare":"npm run compile","pretest":"npm run compile","webpack":"webpack","prebrowser-test":"npm run compile","browser-test":"node build/browser-test/browser-test-runner.js","docs":"compodoc src/","docs-test":"linkinator docs","predocs-test":"npm run docs","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","prelint":"cd samples; npm link ../; npm install","clean":"gts clean","precompile":"gts clean"},"repository":"googleapis/gaxios","keywords":["google"],"engines":{"node":">=14"},"author":"Google, LLC","license":"Apache-2.0","devDependencies":{"@babel/plugin-proposal-private-methods":"^7.18.6","@compodoc/compodoc":"^1.1.9","@types/cors":"^2.8.6","@types/express":"^4.16.1","@types/extend":"^3.0.1","@types/mocha":"^9.0.0","@types/multiparty":"0.0.36","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.0.0","@types/node-fetch":"^2.5.7","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^9.0.0","abort-controller":"^3.0.0","assert":"^2.0.0","browserify":"^17.0.0","c8":"^8.0.0","cors":"^2.8.5","execa":"^5.0.0","express":"^4.16.4","form-data":"^4.0.0","gts":"^5.0.0","is-docker":"^2.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-remap-coverage":"^0.1.5","karma-sourcemap-loader":"^0.4.0","karma-webpack":"^5.0.0","linkinator":"^4.0.0","mocha":"^8.0.0","multiparty":"^4.2.1","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","puppeteer":"^21.0.0","sinon":"^17.0.0","stream-browserify":"^3.0.0","tmp":"0.2.1","ts-loader":"^8.0.0","typescript":"^5.1.6","uuid":"^9.0.0","webpack":"^5.35.0","webpack-cli":"^4.0.0"},"dependencies":{"extend":"^3.0.2","https-proxy-agent":"^7.0.1","is-stream":"^2.0.0","node-fetch":"^2.6.9"}}'); /***/ }), @@ -74869,7 +74928,7 @@ module.exports = JSON.parse('{"9":"Tab;","10":"NewLine;","33":"excl;","34":"quot /***/ ((module) => { "use strict"; -module.exports = JSON.parse('{"name":"google-auth-library","version":"8.7.0","author":"Google Inc.","description":"Google APIs Authentication Client Library for Node.js","engines":{"node":">=12"},"main":"./build/src/index.js","types":"./build/src/index.d.ts","repository":"googleapis/google-auth-library-nodejs.git","keywords":["google","api","google apis","client","client library"],"dependencies":{"arrify":"^2.0.0","base64-js":"^1.3.0","ecdsa-sig-formatter":"^1.0.11","fast-text-encoding":"^1.0.0","gaxios":"^5.0.0","gcp-metadata":"^5.0.0","gtoken":"^6.1.0","jws":"^4.0.0","lru-cache":"^6.0.0"},"devDependencies":{"@compodoc/compodoc":"^1.1.7","@types/base64-js":"^1.2.5","@types/chai":"^4.1.7","@types/jws":"^3.1.0","@types/lru-cache":"^5.0.0","@types/mocha":"^9.0.0","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^16.0.0","@types/sinon":"^10.0.0","@types/tmp":"^0.2.0","assert-rejects":"^1.0.0","c8":"^7.0.0","chai":"^4.2.0","codecov":"^3.0.2","execa":"^5.0.0","gts":"^3.1.0","is-docker":"^2.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-sourcemap-loader":"^0.3.7","karma-webpack":"^5.0.0","keypair":"^1.0.4","linkinator":"^4.0.0","mocha":"^9.2.2","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","puppeteer":"^18.0.0","sinon":"^14.0.0","tmp":"^0.2.0","ts-loader":"^8.0.0","typescript":"^4.6.3","webpack":"^5.21.2","webpack-cli":"^4.0.0"},"files":["build/src","!build/src/**/*.map"],"scripts":{"test":"c8 mocha build/test","clean":"gts clean","prepare":"npm run compile","lint":"gts check","compile":"tsc -p .","fix":"gts fix","pretest":"npm run compile","docs":"compodoc src/","samples-setup":"cd samples/ && npm link ../ && npm run setup && cd ../","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","system-test":"mocha build/system-test --timeout 60000","presystem-test":"npm run compile","webpack":"webpack","browser-test":"karma start","docs-test":"linkinator docs","predocs-test":"npm run docs","prelint":"cd samples; npm link ../; npm install","precompile":"gts clean"},"license":"Apache-2.0"}'); +module.exports = JSON.parse('{"name":"google-auth-library","version":"9.6.3","author":"Google Inc.","description":"Google APIs Authentication Client Library for Node.js","engines":{"node":">=14"},"main":"./build/src/index.js","types":"./build/src/index.d.ts","repository":"googleapis/google-auth-library-nodejs.git","keywords":["google","api","google apis","client","client library"],"dependencies":{"base64-js":"^1.3.0","ecdsa-sig-formatter":"^1.0.11","gaxios":"^6.1.1","gcp-metadata":"^6.1.0","gtoken":"^7.0.0","jws":"^4.0.0"},"devDependencies":{"@compodoc/compodoc":"^1.1.7","@types/base64-js":"^1.2.5","@types/chai":"^4.1.7","@types/jws":"^3.1.0","@types/mocha":"^9.0.0","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.4.2","@types/sinon":"^10.0.0","assert-rejects":"^1.0.0","c8":"^8.0.0","chai":"^4.2.0","codecov":"^3.0.2","execa":"^5.0.0","gts":"^5.0.0","is-docker":"^2.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","keypair":"^1.0.4","linkinator":"^4.0.0","mocha":"^9.2.2","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","puppeteer":"^21.0.0","sinon":"^15.0.0","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.21.2","webpack-cli":"^4.0.0"},"files":["build/src","!build/src/**/*.map"],"scripts":{"test":"c8 mocha build/test","clean":"gts clean","prepare":"npm run compile","lint":"gts check","compile":"tsc -p .","fix":"gts fix","pretest":"npm run compile -- --sourceMap","docs":"compodoc src/","samples-setup":"cd samples/ && npm link ../ && npm run setup && cd ../","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","system-test":"mocha build/system-test --timeout 60000","presystem-test":"npm run compile -- --sourceMap","webpack":"webpack","browser-test":"karma start","docs-test":"linkinator docs","predocs-test":"npm run docs","prelint":"cd samples; npm link ../; npm install","precompile":"gts clean"},"license":"Apache-2.0"}'); /***/ }), diff --git a/dist/main/index.js.map b/dist/main/index.js.map index 853599b..186bf61 100644 --- a/dist/main/index.js.map +++ b/dist/main/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9oBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9sBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACz5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3mGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9mCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACllCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC72FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;;;;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACt2BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpHA;AACA;AACA;;;;;;;;;ACFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/tBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7vBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1bA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/XA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChGA;AACA;AACA;AACA;;;;;;;;ACHA;;;;;;;ACAA;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1vDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClkCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACz5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACt+BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/eA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/iCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/uCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9/BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjjCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3uCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClaA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC55DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzrIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3lFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzqGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjgCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/nBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACrBA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChRA;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChMA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzaA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnEA;;;;;;;;AAAA;;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;AEDA;AACA;AACA;AACA","sources":["../webpack://throttle-action/./lib/locks.js","../webpack://throttle-action/./lib/main.js","../webpack://throttle-action/./node_modules/@actions/core/lib/command.js","../webpack://throttle-action/./node_modules/@actions/core/lib/core.js","../webpack://throttle-action/./node_modules/@actions/core/lib/file-command.js","../webpack://throttle-action/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://throttle-action/./node_modules/@actions/core/lib/path-utils.js","../webpack://throttle-action/./node_modules/@actions/core/lib/summary.js","../webpack://throttle-action/./node_modules/@actions/core/lib/utils.js","../webpack://throttle-action/./node_modules/@actions/github/lib/context.js","../webpack://throttle-action/./node_modules/@actions/github/lib/github.js","../webpack://throttle-action/./node_modules/@actions/github/lib/internal/utils.js","../webpack://throttle-action/./node_modules/@actions/github/lib/utils.js","../webpack://throttle-action/./node_modules/@actions/http-client/lib/auth.js","../webpack://throttle-action/./node_modules/@actions/http-client/lib/index.js","../webpack://throttle-action/./node_modules/@actions/http-client/lib/proxy.js","../webpack://throttle-action/./node_modules/@google-cloud/common/build/src/index.js","../webpack://throttle-action/./node_modules/@google-cloud/common/build/src/operation.js","../webpack://throttle-action/./node_modules/@google-cloud/common/build/src/service-object.js","../webpack://throttle-action/./node_modules/@google-cloud/common/build/src/service.js","../webpack://throttle-action/./node_modules/@google-cloud/common/build/src/util.js","../webpack://throttle-action/./node_modules/@google-cloud/paginator/build/src/index.js","../webpack://throttle-action/./node_modules/@google-cloud/paginator/build/src/resource-stream.js","../webpack://throttle-action/./node_modules/@google-cloud/projectify/build/src/index.js","../webpack://throttle-action/./node_modules/@google-cloud/promisify/build/src/index.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/acl.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/bucket.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/channel.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/crc32c.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/file.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/hash-stream-validator.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/hmacKey.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/iam.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/index.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/nodejs-common/index.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/nodejs-common/service-object.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/nodejs-common/service.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/nodejs-common/util.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/notification.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/resumable-upload.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/signer.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/storage.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/transfer-manager.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/util.js","../webpack://throttle-action/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/core/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/request/dist-node/index.js","../webpack://throttle-action/./node_modules/@tootallnate/once/dist/index.js","../webpack://throttle-action/./node_modules/abort-controller/dist/abort-controller.js","../webpack://throttle-action/./node_modules/agent-base/dist/src/index.js","../webpack://throttle-action/./node_modules/agent-base/dist/src/promisify.js","../webpack://throttle-action/./node_modules/arrify/index.js","../webpack://throttle-action/./node_modules/async-retry/lib/index.js","../webpack://throttle-action/./node_modules/base64-js/index.js","../webpack://throttle-action/./node_modules/before-after-hook/index.js","../webpack://throttle-action/./node_modules/before-after-hook/lib/add.js","../webpack://throttle-action/./node_modules/before-after-hook/lib/register.js","../webpack://throttle-action/./node_modules/before-after-hook/lib/remove.js","../webpack://throttle-action/./node_modules/bignumber.js/bignumber.js","../webpack://throttle-action/./node_modules/buffer-equal-constant-time/index.js","../webpack://throttle-action/./node_modules/compressible/index.js","../webpack://throttle-action/./node_modules/debug/src/browser.js","../webpack://throttle-action/./node_modules/debug/src/common.js","../webpack://throttle-action/./node_modules/debug/src/index.js","../webpack://throttle-action/./node_modules/debug/src/node.js","../webpack://throttle-action/./node_modules/deprecation/dist-node/index.js","../webpack://throttle-action/./node_modules/duplexify/index.js","../webpack://throttle-action/./node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.js","../webpack://throttle-action/./node_modules/ecdsa-sig-formatter/src/param-bytes-for-alg.js","../webpack://throttle-action/./node_modules/end-of-stream/index.js","../webpack://throttle-action/./node_modules/ent/decode.js","../webpack://throttle-action/./node_modules/ent/encode.js","../webpack://throttle-action/./node_modules/ent/index.js","../webpack://throttle-action/./node_modules/event-target-shim/dist/event-target-shim.js","../webpack://throttle-action/./node_modules/extend/index.js","../webpack://throttle-action/./node_modules/fast-text-encoding/text.min.js","../webpack://throttle-action/./node_modules/gaxios/build/src/common.js","../webpack://throttle-action/./node_modules/gaxios/build/src/gaxios.js","../webpack://throttle-action/./node_modules/gaxios/build/src/index.js","../webpack://throttle-action/./node_modules/gaxios/build/src/retry.js","../webpack://throttle-action/./node_modules/gcp-metadata/build/src/gcp-residency.js","../webpack://throttle-action/./node_modules/gcp-metadata/build/src/index.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/authclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/awsclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/awsrequestsigner.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/baseexternalclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/computeclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/downscopedclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/envDetect.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/executable-response.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/externalclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/googleauth.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/iam.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/identitypoolclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/idtokenclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/impersonated.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/jwtaccess.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/jwtclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/loginticket.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/oauth2client.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/oauth2common.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/pluggable-auth-client.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/refreshclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/stscredentials.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/crypto/browser/crypto.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/crypto/crypto.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/crypto/node/crypto.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/index.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/options.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/transporters.js","../webpack://throttle-action/./node_modules/google-p12-pem/build/src/index.js","../webpack://throttle-action/./node_modules/gtoken/build/src/index.js","../webpack://throttle-action/./node_modules/has-flag/index.js","../webpack://throttle-action/./node_modules/http-proxy-agent/dist/agent.js","../webpack://throttle-action/./node_modules/http-proxy-agent/dist/index.js","../webpack://throttle-action/./node_modules/https-proxy-agent/dist/agent.js","../webpack://throttle-action/./node_modules/https-proxy-agent/dist/index.js","../webpack://throttle-action/./node_modules/https-proxy-agent/dist/parse-proxy-response.js","../webpack://throttle-action/./node_modules/inherits/inherits.js","../webpack://throttle-action/./node_modules/inherits/inherits_browser.js","../webpack://throttle-action/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://throttle-action/./node_modules/is-stream/index.js","../webpack://throttle-action/./node_modules/json-bigint/index.js","../webpack://throttle-action/./node_modules/json-bigint/lib/parse.js","../webpack://throttle-action/./node_modules/json-bigint/lib/stringify.js","../webpack://throttle-action/./node_modules/jwa/index.js","../webpack://throttle-action/./node_modules/jws/index.js","../webpack://throttle-action/./node_modules/jws/lib/data-stream.js","../webpack://throttle-action/./node_modules/jws/lib/sign-stream.js","../webpack://throttle-action/./node_modules/jws/lib/tostring.js","../webpack://throttle-action/./node_modules/jws/lib/verify-stream.js","../webpack://throttle-action/./node_modules/lru-cache/index.js","../webpack://throttle-action/./node_modules/mime-db/index.js","../webpack://throttle-action/./node_modules/mime-types/index.js","../webpack://throttle-action/./node_modules/mime/Mime.js","../webpack://throttle-action/./node_modules/mime/index.js","../webpack://throttle-action/./node_modules/mime/types/other.js","../webpack://throttle-action/./node_modules/mime/types/standard.js","../webpack://throttle-action/./node_modules/ms/index.js","../webpack://throttle-action/./node_modules/node-fetch/lib/index.js","../webpack://throttle-action/./node_modules/node-forge/lib/aes.js","../webpack://throttle-action/./node_modules/node-forge/lib/aesCipherSuites.js","../webpack://throttle-action/./node_modules/node-forge/lib/asn1-validator.js","../webpack://throttle-action/./node_modules/node-forge/lib/asn1.js","../webpack://throttle-action/./node_modules/node-forge/lib/baseN.js","../webpack://throttle-action/./node_modules/node-forge/lib/cipher.js","../webpack://throttle-action/./node_modules/node-forge/lib/cipherModes.js","../webpack://throttle-action/./node_modules/node-forge/lib/des.js","../webpack://throttle-action/./node_modules/node-forge/lib/ed25519.js","../webpack://throttle-action/./node_modules/node-forge/lib/forge.js","../webpack://throttle-action/./node_modules/node-forge/lib/hmac.js","../webpack://throttle-action/./node_modules/node-forge/lib/index.js","../webpack://throttle-action/./node_modules/node-forge/lib/jsbn.js","../webpack://throttle-action/./node_modules/node-forge/lib/kem.js","../webpack://throttle-action/./node_modules/node-forge/lib/log.js","../webpack://throttle-action/./node_modules/node-forge/lib/md.all.js","../webpack://throttle-action/./node_modules/node-forge/lib/md.js","../webpack://throttle-action/./node_modules/node-forge/lib/md5.js","../webpack://throttle-action/./node_modules/node-forge/lib/mgf.js","../webpack://throttle-action/./node_modules/node-forge/lib/mgf1.js","../webpack://throttle-action/./node_modules/node-forge/lib/oids.js","../webpack://throttle-action/./node_modules/node-forge/lib/pbe.js","../webpack://throttle-action/./node_modules/node-forge/lib/pbkdf2.js","../webpack://throttle-action/./node_modules/node-forge/lib/pem.js","../webpack://throttle-action/./node_modules/node-forge/lib/pkcs1.js","../webpack://throttle-action/./node_modules/node-forge/lib/pkcs12.js","../webpack://throttle-action/./node_modules/node-forge/lib/pkcs7.js","../webpack://throttle-action/./node_modules/node-forge/lib/pkcs7asn1.js","../webpack://throttle-action/./node_modules/node-forge/lib/pki.js","../webpack://throttle-action/./node_modules/node-forge/lib/prime.js","../webpack://throttle-action/./node_modules/node-forge/lib/prng.js","../webpack://throttle-action/./node_modules/node-forge/lib/pss.js","../webpack://throttle-action/./node_modules/node-forge/lib/random.js","../webpack://throttle-action/./node_modules/node-forge/lib/rc2.js","../webpack://throttle-action/./node_modules/node-forge/lib/rsa.js","../webpack://throttle-action/./node_modules/node-forge/lib/sha1.js","../webpack://throttle-action/./node_modules/node-forge/lib/sha256.js","../webpack://throttle-action/./node_modules/node-forge/lib/sha512.js","../webpack://throttle-action/./node_modules/node-forge/lib/ssh.js","../webpack://throttle-action/./node_modules/node-forge/lib/tls.js","../webpack://throttle-action/./node_modules/node-forge/lib/util.js","../webpack://throttle-action/./node_modules/node-forge/lib/x509.js","../webpack://throttle-action/./node_modules/once/once.js","../webpack://throttle-action/./node_modules/p-limit/index.js","../webpack://throttle-action/./node_modules/readable-stream/errors.js","../webpack://throttle-action/./node_modules/readable-stream/lib/_stream_duplex.js","../webpack://throttle-action/./node_modules/readable-stream/lib/_stream_passthrough.js","../webpack://throttle-action/./node_modules/readable-stream/lib/_stream_readable.js","../webpack://throttle-action/./node_modules/readable-stream/lib/_stream_transform.js","../webpack://throttle-action/./node_modules/readable-stream/lib/_stream_writable.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/async_iterator.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/buffer_list.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/destroy.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/end-of-stream.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/from.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/pipeline.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/state.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/stream.js","../webpack://throttle-action/./node_modules/readable-stream/readable.js","../webpack://throttle-action/./node_modules/retry-request/index.js","../webpack://throttle-action/./node_modules/retry/index.js","../webpack://throttle-action/./node_modules/retry/lib/retry.js","../webpack://throttle-action/./node_modules/retry/lib/retry_operation.js","../webpack://throttle-action/./node_modules/safe-buffer/index.js","../webpack://throttle-action/./node_modules/stream-events/index.js","../webpack://throttle-action/./node_modules/stream-shift/index.js","../webpack://throttle-action/./node_modules/string_decoder/lib/string_decoder.js","../webpack://throttle-action/./node_modules/stubs/index.js","../webpack://throttle-action/./node_modules/supports-color/index.js","../webpack://throttle-action/./node_modules/teeny-request/build/src/TeenyStatistics.js","../webpack://throttle-action/./node_modules/teeny-request/build/src/agents.js","../webpack://throttle-action/./node_modules/teeny-request/build/src/index.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/index.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/md5.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/native.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/nil.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/parse.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/regex.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/rng.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/sha1.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/stringify.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/v1.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/v3.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/v35.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/v4.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/v5.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/validate.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/version.js","../webpack://throttle-action/./node_modules/tr46/index.js","../webpack://throttle-action/./node_modules/tunnel/index.js","../webpack://throttle-action/./node_modules/tunnel/lib/tunnel.js","../webpack://throttle-action/./node_modules/universal-user-agent/dist-node/index.js","../webpack://throttle-action/./node_modules/util-deprecate/node.js","../webpack://throttle-action/./node_modules/uuid/dist/index.js","../webpack://throttle-action/./node_modules/uuid/dist/md5.js","../webpack://throttle-action/./node_modules/uuid/dist/nil.js","../webpack://throttle-action/./node_modules/uuid/dist/parse.js","../webpack://throttle-action/./node_modules/uuid/dist/regex.js","../webpack://throttle-action/./node_modules/uuid/dist/rng.js","../webpack://throttle-action/./node_modules/uuid/dist/sha1.js","../webpack://throttle-action/./node_modules/uuid/dist/stringify.js","../webpack://throttle-action/./node_modules/uuid/dist/v1.js","../webpack://throttle-action/./node_modules/uuid/dist/v3.js","../webpack://throttle-action/./node_modules/uuid/dist/v35.js","../webpack://throttle-action/./node_modules/uuid/dist/v4.js","../webpack://throttle-action/./node_modules/uuid/dist/v5.js","../webpack://throttle-action/./node_modules/uuid/dist/validate.js","../webpack://throttle-action/./node_modules/uuid/dist/version.js","../webpack://throttle-action/./node_modules/webidl-conversions/lib/index.js","../webpack://throttle-action/./node_modules/whatwg-url/lib/URL-impl.js","../webpack://throttle-action/./node_modules/whatwg-url/lib/URL.js","../webpack://throttle-action/./node_modules/whatwg-url/lib/public-api.js","../webpack://throttle-action/./node_modules/whatwg-url/lib/url-state-machine.js","../webpack://throttle-action/./node_modules/whatwg-url/lib/utils.js","../webpack://throttle-action/./node_modules/wrappy/wrappy.js","../webpack://throttle-action/./node_modules/yallist/iterator.js","../webpack://throttle-action/./node_modules/yallist/yallist.js","../webpack://throttle-action/./node_modules/yocto-queue/index.js","../webpack://throttle-action/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://throttle-action/external node-commonjs \"assert\"","../webpack://throttle-action/external node-commonjs \"buffer\"","../webpack://throttle-action/external node-commonjs \"child_process\"","../webpack://throttle-action/external node-commonjs \"crypto\"","../webpack://throttle-action/external node-commonjs \"events\"","../webpack://throttle-action/external node-commonjs \"fs\"","../webpack://throttle-action/external node-commonjs \"http\"","../webpack://throttle-action/external node-commonjs \"https\"","../webpack://throttle-action/external node-commonjs \"net\"","../webpack://throttle-action/external node-commonjs \"os\"","../webpack://throttle-action/external node-commonjs \"path\"","../webpack://throttle-action/external node-commonjs \"punycode\"","../webpack://throttle-action/external node-commonjs \"querystring\"","../webpack://throttle-action/external node-commonjs \"stream\"","../webpack://throttle-action/external node-commonjs \"tls\"","../webpack://throttle-action/external node-commonjs \"tty\"","../webpack://throttle-action/external node-commonjs \"url\"","../webpack://throttle-action/external node-commonjs \"util\"","../webpack://throttle-action/external node-commonjs \"zlib\"","../webpack://throttle-action/webpack/bootstrap","../webpack://throttle-action/webpack/runtime/compat","../webpack://throttle-action/webpack/before-startup","../webpack://throttle-action/webpack/startup","../webpack://throttle-action/webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleCloudStorageMutex = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst github = __importStar(require(\"@actions/github\"));\nconst storage_1 = require(\"@google-cloud/storage\");\nconst common_1 = require(\"@google-cloud/common\");\nconst TIME_PERIOD_PATTERN = /^(?:(\\d+)m)?(?:(\\d+)s)?$/;\nclass TimePeriodError extends Error {\n constructor() {\n super(...arguments);\n this.name = 'TimePeriodError';\n }\n}\n/**\n * Parse a number of microseconds from a time string.\n *\n */\nfunction parseTimePeriod(period) {\n const match = period.match(TIME_PERIOD_PATTERN);\n if (match === null) {\n throw new TimePeriodError(`Invalid time period string: ${period}`);\n }\n else {\n const minutes = match[1] || 0;\n const seconds = match[2] || 0;\n return Number(minutes) * 60 + Number(seconds);\n }\n}\n/**\n * Generate a default identifier this mutex\n *\n * The identifier is used to verify that this invocation is the owner of the\n * mutex before releasing it.\n */\nfunction defaultMutexIdentifier() {\n const context = github.context;\n return `${context.repo.owner}/${context.repo.repo}/${context.workflow}/${context.runNumber}/${context.job}`;\n}\nconst INITIAL_SLEEP = 500;\nclass GoogleCloudStorageMutex {\n constructor(bucket, name, options = {}) {\n this.file = new storage_1.File(new storage_1.Bucket(new storage_1.Storage(), bucket), name, {\n generation: 0\n });\n if (options.timeout === null || options.timeout === undefined) {\n this.timeout = 5 * 60 * 1000; // 5 minutes\n }\n else if (typeof options.timeout === 'string') {\n this.timeout = parseTimePeriod(options.timeout) * 1000;\n }\n else {\n this.timeout = options.timeout;\n }\n if (options.identifier === null || options.identifier === undefined) {\n this.identifier = defaultMutexIdentifier();\n }\n else {\n this.identifier = options.identifier;\n }\n }\n acquire() {\n return __awaiter(this, void 0, void 0, function* () {\n const start = Date.now();\n let sleep = INITIAL_SLEEP;\n for (;;) {\n try {\n yield this.file.save(this.identifier, { resumable: false });\n core.info('Acquired lock');\n return;\n }\n catch (e) {\n if (e instanceof common_1.ApiError && e.code === 412) {\n const delta = Date.now() - start;\n if (delta < this.timeout) {\n core.info(`Cannot acquire lock, sleeping for ${sleep / 1000.0}s`);\n yield new Promise(resolve => setTimeout(resolve, sleep));\n sleep = sleep * 2;\n continue;\n }\n }\n core.info('Cannot acquire lock, raising');\n throw e;\n }\n }\n });\n }\n release() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n const data = yield this.file.download();\n if (data[0].toString() === this.identifier) {\n yield this.file.delete();\n }\n else {\n core.info(`Locked by someone else (${data[0]} !== ${this.identifier})`);\n }\n }\n catch (e) {\n if (e instanceof common_1.ApiError && e.code === 404) {\n // no lock to release\n }\n else {\n throw e;\n }\n }\n });\n }\n}\nexports.GoogleCloudStorageMutex = GoogleCloudStorageMutex;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst locks_1 = require(\"./locks\");\nfunction run() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n const mutex = new locks_1.GoogleCloudStorageMutex(core.getInput('bucket'), core.getInput('filename'), { timeout: core.getInput('timeout') });\n yield mutex.acquire();\n }\n catch (error) {\n if (error instanceof Error) {\n core.error(error.message);\n }\n else {\n core.error(`Unknown error ${error}`);\n }\n core.setFailed('Failed to acquire lock');\n }\n });\n}\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));\n }\n command_1.issueCommand('set-env', { name }, convertedVal);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueFileCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n if (options && options.trimWhitespace === false) {\n return inputs;\n }\n return inputs.map(input => input.trim());\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n const filePath = process.env['GITHUB_OUTPUT'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));\n }\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n const filePath = process.env['GITHUB_STATE'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));\n }\n command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.prepareKeyValueMessage = exports.issueFileCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst uuid_1 = require(\"uuid\");\nconst utils_1 = require(\"./utils\");\nfunction issueFileCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueFileCommand = issueFileCommand;\nfunction prepareKeyValueMessage(key, value) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n const convertedValue = utils_1.toCommandValue(value);\n // These should realistically never happen, but just in case someone finds a\n // way to exploit uuid generation let's not allow keys or values that contain\n // the delimiter.\n if (key.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedValue.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;\n}\nexports.prepareKeyValueMessage = prepareKeyValueMessage;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n var _a, _b, _c;\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;\n this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;\n this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options, ...additionalPlugins) {\n const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins);\n return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nexports.defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const reqHost = reqUrl.hostname;\n if (isLoopbackAddress(reqHost)) {\n return true;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperNoProxyItem === '*' ||\n upperReqHosts.some(x => x === upperNoProxyItem ||\n x.endsWith(`.${upperNoProxyItem}`) ||\n (upperNoProxyItem.startsWith('.') &&\n x.endsWith(`${upperNoProxyItem}`)))) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\nfunction isLoopbackAddress(host) {\n const hostLower = host.toLowerCase();\n return (hostLower === 'localhost' ||\n hostLower.startsWith('127.') ||\n hostLower.startsWith('[::1]') ||\n hostLower.startsWith('[0:0:0:0:0:0:0:1]'));\n}\n//# sourceMappingURL=proxy.js.map","\"use strict\";\n// Copyright 2016 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.ApiError = exports.ServiceObject = exports.Service = exports.Operation = void 0;\n/**\n * @type {module:common/operation}\n * @private\n */\nvar operation_1 = require(\"./operation\");\nObject.defineProperty(exports, \"Operation\", { enumerable: true, get: function () { return operation_1.Operation; } });\n/**\n * @type {module:common/service}\n * @private\n */\nvar service_1 = require(\"./service\");\nObject.defineProperty(exports, \"Service\", { enumerable: true, get: function () { return service_1.Service; } });\n/**\n * @type {module:common/serviceObject}\n * @private\n */\nvar service_object_1 = require(\"./service-object\");\nObject.defineProperty(exports, \"ServiceObject\", { enumerable: true, get: function () { return service_object_1.ServiceObject; } });\n/**\n * @type {module:common/util}\n * @private\n */\nvar util_1 = require(\"./util\");\nObject.defineProperty(exports, \"ApiError\", { enumerable: true, get: function () { return util_1.ApiError; } });\nObject.defineProperty(exports, \"util\", { enumerable: true, get: function () { return util_1.util; } });\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2016 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Operation = void 0;\n/*!\n * @module common/operation\n */\nconst service_object_1 = require(\"./service-object\");\nconst util_1 = require(\"util\");\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass Operation extends service_object_1.ServiceObject {\n /**\n * An Operation object allows you to interact with APIs that take longer to\n * process things.\n *\n * @constructor\n * @alias module:common/operation\n *\n * @param {object} config - Configuration object.\n * @param {module:common/service|module:common/serviceObject|module:common/grpcService|module:common/grpcServiceObject} config.parent - The parent object.\n */\n constructor(config) {\n const methods = {\n /**\n * Checks to see if an operation exists.\n */\n exists: true,\n /**\n * Retrieves the operation.\n */\n get: true,\n /**\n * Retrieves metadata for the operation.\n */\n getMetadata: {\n reqOpts: {\n name: config.id,\n },\n },\n };\n config = Object.assign({\n baseUrl: '',\n }, config);\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n config.methods = (config.methods || methods);\n super(config);\n this.completeListeners = 0;\n this.hasActiveListeners = false;\n this.listenForEvents_();\n }\n /**\n * Wraps the `complete` and `error` events in a Promise.\n *\n * @return {Promise}\n */\n promise() {\n return new Promise((resolve, reject) => {\n this.on('error', reject).on('complete', (metadata) => {\n resolve([metadata]);\n });\n });\n }\n /**\n * Begin listening for events on the operation. This method keeps track of how\n * many \"complete\" listeners are registered and removed, making sure polling\n * is handled automatically.\n *\n * As long as there is one active \"complete\" listener, the connection is open.\n * When there are no more listeners, the polling stops.\n *\n * @private\n */\n listenForEvents_() {\n this.on('newListener', (event) => {\n if (event === 'complete') {\n this.completeListeners++;\n if (!this.hasActiveListeners) {\n this.hasActiveListeners = true;\n this.startPolling_();\n }\n }\n });\n this.on('removeListener', (event) => {\n if (event === 'complete' && --this.completeListeners === 0) {\n this.hasActiveListeners = false;\n }\n });\n }\n /**\n * Poll for a status update. Returns null for an incomplete\n * status, and metadata for a complete status.\n *\n * @private\n */\n poll_(callback) {\n this.getMetadata((err, body) => {\n if (err || body.error) {\n callback(err || body.error);\n return;\n }\n if (!body.done) {\n callback(null);\n return;\n }\n callback(null, body);\n });\n }\n /**\n * Poll `getMetadata` to check the operation's status. This runs a loop to\n * ping the API on an interval.\n *\n * Note: This method is automatically called once a \"complete\" event handler\n * is registered on the operation.\n *\n * @private\n */\n async startPolling_() {\n if (!this.hasActiveListeners) {\n return;\n }\n try {\n const metadata = await (0, util_1.promisify)(this.poll_.bind(this))();\n if (!metadata) {\n setTimeout(this.startPolling_.bind(this), this.pollIntervalMs || 500);\n return;\n }\n this.emit('complete', metadata);\n }\n catch (err) {\n this.emit('error', err);\n }\n }\n}\nexports.Operation = Operation;\n//# sourceMappingURL=operation.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ServiceObject = void 0;\n/*!\n * @module common/service-object\n */\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst arrify = require(\"arrify\");\nconst events_1 = require(\"events\");\nconst extend = require(\"extend\");\nconst util_1 = require(\"./util\");\n/**\n * ServiceObject is a base class, meant to be inherited from by a \"service\n * object,\" like a BigQuery dataset or Storage bucket.\n *\n * Most of the time, these objects share common functionality; they can be\n * created or deleted, and you can get or set their metadata.\n *\n * By inheriting from this class, a service object will be extended with these\n * shared behaviors. Note that any method can be overridden when the service\n * object requires specific behavior.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass ServiceObject extends events_1.EventEmitter {\n /*\n * @constructor\n * @alias module:common/service-object\n *\n * @private\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string} config.createMethod - The method which creates this object.\n * @param {string=} config.id - The identifier of the object. For example, the\n * name of a Storage bucket or Pub/Sub topic.\n * @param {object=} config.methods - A map of each method name that should be inherited.\n * @param {object} config.methods[].reqOpts - Default request options for this\n * particular method. A common use case is when `setMetadata` requires a\n * `PUT` method to override the default `PATCH`.\n * @param {object} config.parent - The parent service instance. For example, an\n * instance of Storage if the object is Bucket.\n */\n constructor(config) {\n super();\n this.metadata = {};\n this.baseUrl = config.baseUrl;\n this.parent = config.parent; // Parent class.\n this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc).\n this.createMethod = config.createMethod;\n this.methods = config.methods || {};\n this.interceptors = [];\n this.pollIntervalMs = config.pollIntervalMs;\n this.projectId = config.projectId;\n if (config.methods) {\n // This filters the ServiceObject instance (e.g. a \"File\") to only have\n // the configured methods. We make a couple of exceptions for core-\n // functionality (\"request()\" and \"getRequestInterceptors()\")\n Object.getOwnPropertyNames(ServiceObject.prototype)\n .filter(methodName => {\n return (\n // All ServiceObjects need `request` and `getRequestInterceptors`.\n // clang-format off\n !/^request/.test(methodName) &&\n !/^getRequestInterceptors/.test(methodName) &&\n // clang-format on\n // The ServiceObject didn't redefine the method.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] ===\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ServiceObject.prototype[methodName] &&\n // This method isn't wanted.\n !config.methods[methodName]);\n })\n .forEach(methodName => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] = undefined;\n });\n }\n }\n create(optionsOrCallback, callback) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const args = [this.id];\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n if (typeof optionsOrCallback === 'object') {\n args.push(optionsOrCallback);\n }\n // Wrap the callback to return *this* instance of the object, not the\n // newly-created one.\n // tslint: disable-next-line no-any\n function onCreate(...args) {\n const [err, instance] = args;\n if (!err) {\n self.metadata = instance.metadata;\n args[1] = self; // replace the created `instance` with this one.\n }\n callback(...args);\n }\n args.push(onCreate);\n // eslint-disable-next-line prefer-spread\n this.createMethod.apply(null, args);\n }\n delete(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const ignoreNotFound = options.ignoreNotFound;\n delete options.ignoreNotFound;\n const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {};\n const reqOpts = extend(true, {\n method: 'DELETE',\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, ...args) => {\n if (err) {\n if (err.code === 404 && ignoreNotFound) {\n err = null;\n }\n }\n callback(err, ...args);\n });\n }\n exists(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n this.get(options, err => {\n if (err) {\n if (err.code === 404) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n return;\n }\n callback(null, true);\n });\n }\n get(optionsOrCallback, cb) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const [opts, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const options = Object.assign({}, opts);\n const autoCreate = options.autoCreate && typeof this.create === 'function';\n delete options.autoCreate;\n function onCreate(err, instance, apiResponse) {\n if (err) {\n if (err.code === 409) {\n self.get(options, callback);\n return;\n }\n callback(err, null, apiResponse);\n return;\n }\n callback(null, instance, apiResponse);\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n if (err.code === 404 && autoCreate) {\n const args = [];\n if (Object.keys(options).length > 0) {\n args.push(options);\n }\n args.push(onCreate);\n self.create(...args);\n return;\n }\n callback(err, null, metadata);\n return;\n }\n callback(null, self, metadata);\n });\n }\n getMetadata(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.getMetadata === 'object' &&\n this.methods.getMetadata) ||\n {};\n const reqOpts = extend(true, {\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n const localInterceptors = this.interceptors\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n return this.parent.getRequestInterceptors().concat(localInterceptors);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.setMetadata === 'object' &&\n this.methods.setMetadata) ||\n {};\n const reqOpts = extend(true, {}, {\n method: 'PATCH',\n uri: '',\n }, methodConfig.reqOpts, {\n json: metadata,\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts);\n if (this.projectId) {\n reqOpts.projectId = this.projectId;\n }\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri];\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .filter(x => x.trim()) // Limit to non-empty strings.\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/');\n const childInterceptors = arrify(reqOpts.interceptors_);\n const localInterceptors = [].slice.call(this.interceptors);\n reqOpts.interceptors_ = childInterceptors.concat(localInterceptors);\n if (reqOpts.shouldReturnStream) {\n return this.parent.requestStream(reqOpts);\n }\n this.parent.request(reqOpts, callback);\n }\n request(reqOpts, callback) {\n this.request_(reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return this.request_(opts);\n }\n}\nexports.ServiceObject = ServiceObject;\n(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] });\n//# sourceMappingURL=service-object.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0;\n/*!\n * @module common/service\n */\nconst arrify = require(\"arrify\");\nconst extend = require(\"extend\");\nconst util_1 = require(\"./util\");\nexports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}';\nclass Service {\n /**\n * Service is a base class, meant to be inherited from by a \"service,\" like\n * BigQuery or Storage.\n *\n * This handles making authenticated requests by exposing a `makeReq_`\n * function.\n *\n * @constructor\n * @alias module:common/service\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string[]} config.scopes - The scopes required for the request.\n * @param {object=} options - [Configuration object](#/docs).\n */\n constructor(config, options = {}) {\n this.baseUrl = config.baseUrl;\n this.apiEndpoint = config.apiEndpoint;\n this.timeout = options.timeout;\n this.globalInterceptors = arrify(options.interceptors_);\n this.interceptors = [];\n this.packageJson = config.packageJson;\n this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN;\n this.projectIdRequired = config.projectIdRequired !== false;\n this.providedUserAgent = options.userAgent;\n const reqCfg = extend({}, config, {\n projectIdRequired: this.projectIdRequired,\n projectId: this.projectId,\n authClient: options.authClient,\n credentials: options.credentials,\n keyFile: options.keyFilename,\n email: options.email,\n token: options.token,\n });\n this.makeAuthenticatedRequest =\n util_1.util.makeAuthenticatedRequestFactory(reqCfg);\n this.authClient = this.makeAuthenticatedRequest.authClient;\n this.getCredentials = this.makeAuthenticatedRequest.getCredentials;\n const isCloudFunctionEnv = !!process.env.FUNCTION_NAME;\n if (isCloudFunctionEnv) {\n this.interceptors.push({\n request(reqOpts) {\n reqOpts.forever = false;\n return reqOpts;\n },\n });\n }\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n return [].slice\n .call(this.globalInterceptors)\n .concat(this.interceptors)\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n }\n getProjectId(callback) {\n if (!callback) {\n return this.getProjectIdAsync();\n }\n this.getProjectIdAsync().then(p => callback(null, p), callback);\n }\n async getProjectIdAsync() {\n const projectId = await this.authClient.getProjectId();\n if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) {\n this.projectId = projectId;\n }\n return this.projectId;\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts, { timeout: this.timeout });\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl];\n if (this.projectIdRequired) {\n if (reqOpts.projectId) {\n uriComponents.push('projects');\n uriComponents.push(reqOpts.projectId);\n }\n else {\n uriComponents.push('projects');\n uriComponents.push(this.projectId);\n }\n }\n uriComponents.push(reqOpts.uri);\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/')\n // Some URIs have colon separators.\n // Bad: https://.../projects/:list\n // Good: https://.../projects:list\n .replace(/\\/:/g, ':');\n const requestInterceptors = this.getRequestInterceptors();\n arrify(reqOpts.interceptors_).forEach(interceptor => {\n if (typeof interceptor.request === 'function') {\n requestInterceptors.push(interceptor.request);\n }\n });\n requestInterceptors.forEach(requestInterceptor => {\n reqOpts = requestInterceptor(reqOpts);\n });\n delete reqOpts.interceptors_;\n const pkg = this.packageJson;\n let userAgent = util_1.util.getUserAgentFromPackageJson(pkg);\n if (this.providedUserAgent) {\n userAgent = `${this.providedUserAgent} ${userAgent}`;\n }\n reqOpts.headers = extend({}, reqOpts.headers, {\n 'User-Agent': userAgent,\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${pkg.version}`,\n });\n if (reqOpts.shouldReturnStream) {\n return this.makeAuthenticatedRequest(reqOpts);\n }\n else {\n this.makeAuthenticatedRequest(reqOpts, callback);\n }\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n * @param {function} callback - The callback function passed to `request`.\n */\n request(reqOpts, callback) {\n Service.prototype.request_.call(this, reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return Service.prototype.request_.call(this, opts);\n }\n}\nexports.Service = Service;\n//# sourceMappingURL=service.js.map","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.Util = exports.PartialFailureError = exports.ApiError = void 0;\n/*!\n * @module common/util\n */\nconst projectify_1 = require(\"@google-cloud/projectify\");\nconst ent = require(\"ent\");\nconst extend = require(\"extend\");\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst retryRequest = require(\"retry-request\");\nconst stream_1 = require(\"stream\");\nconst teeny_request_1 = require(\"teeny-request\");\nconst service_1 = require(\"./service\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst duplexify = require('duplexify');\nconst requestDefaults = {\n timeout: 60000,\n gzip: true,\n forever: true,\n pool: {\n maxSockets: Infinity,\n },\n};\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n * @private\n */\nconst AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n * @private\n */\nconst MAX_RETRY_DEFAULT = 3;\n/**\n * Custom error type for API errors.\n *\n * @param {object} errorBody - Error object.\n */\nclass ApiError extends Error {\n constructor(errorBodyOrMessage) {\n super();\n if (typeof errorBodyOrMessage !== 'object') {\n this.message = errorBodyOrMessage || '';\n return;\n }\n const errorBody = errorBodyOrMessage;\n this.code = errorBody.code;\n this.errors = errorBody.errors;\n this.response = errorBody.response;\n try {\n this.errors = JSON.parse(this.response.body).error.errors;\n }\n catch (e) {\n this.errors = errorBody.errors;\n }\n this.message = ApiError.createMultiErrorMessage(errorBody, this.errors);\n Error.captureStackTrace(this);\n }\n /**\n * Pieces together an error message by combining all unique error messages\n * returned from a single GoogleError\n *\n * @private\n *\n * @param {GoogleErrorBody} err The original error.\n * @param {GoogleInnerError[]} [errors] Inner errors, if any.\n * @returns {string}\n */\n static createMultiErrorMessage(err, errors) {\n const messages = new Set();\n if (err.message) {\n messages.add(err.message);\n }\n if (errors && errors.length) {\n errors.forEach(({ message }) => messages.add(message));\n }\n else if (err.response && err.response.body) {\n messages.add(ent.decode(err.response.body.toString()));\n }\n else if (!err.message) {\n messages.add('A failure occurred during this request.');\n }\n let messageArr = Array.from(messages);\n if (messageArr.length > 1) {\n messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`);\n messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\\n');\n messageArr.push('\\n');\n }\n return messageArr.join('\\n');\n }\n}\nexports.ApiError = ApiError;\n/**\n * Custom error type for partial errors returned from the API.\n *\n * @param {object} b - Error object.\n */\nclass PartialFailureError extends Error {\n constructor(b) {\n super();\n const errorObject = b;\n this.errors = errorObject.errors;\n this.name = 'PartialFailureError';\n this.response = errorObject.response;\n this.message = ApiError.createMultiErrorMessage(errorObject, this.errors);\n }\n}\nexports.PartialFailureError = PartialFailureError;\nclass Util {\n constructor() {\n this.ApiError = ApiError;\n this.PartialFailureError = PartialFailureError;\n }\n /**\n * No op.\n *\n * @example\n * function doSomething(callback) {\n * callback = callback || noop;\n * }\n */\n noop() { }\n /**\n * Uniformly process an API response.\n *\n * @param {*} err - Error value.\n * @param {*} resp - Response value.\n * @param {*} body - Body value.\n * @param {function} callback - The callback function.\n */\n handleResp(err, resp, body, callback) {\n callback = callback || util.noop;\n const parsedResp = extend(true, { err: err || null }, resp && util.parseHttpRespMessage(resp), body && util.parseHttpRespBody(body));\n // Assign the parsed body to resp.body, even if { json: false } was passed\n // as a request option.\n // We assume that nobody uses the previously unparsed value of resp.body.\n if (!parsedResp.err && resp && typeof parsedResp.body === 'object') {\n parsedResp.resp.body = parsedResp.body;\n }\n if (parsedResp.err && resp) {\n parsedResp.err.response = resp;\n }\n callback(parsedResp.err, parsedResp.body, parsedResp.resp);\n }\n /**\n * Sniff an incoming HTTP response message for errors.\n *\n * @param {object} httpRespMessage - An incoming HTTP response message from `request`.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.resp - The original response object.\n */\n parseHttpRespMessage(httpRespMessage) {\n const parsedHttpRespMessage = {\n resp: httpRespMessage,\n };\n if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) {\n // Unknown error. Format according to ApiError standard.\n parsedHttpRespMessage.err = new ApiError({\n errors: new Array(),\n code: httpRespMessage.statusCode,\n message: httpRespMessage.statusMessage,\n response: httpRespMessage,\n });\n }\n return parsedHttpRespMessage;\n }\n /**\n * Parse the response body from an HTTP request.\n *\n * @param {object} body - The response body.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.body - The original body value provided\n * will try to be JSON.parse'd. If it's successful, the parsed value will\n * be returned here, otherwise the original value and an error will be returned.\n */\n parseHttpRespBody(body) {\n const parsedHttpRespBody = {\n body,\n };\n if (typeof body === 'string') {\n try {\n parsedHttpRespBody.body = JSON.parse(body);\n }\n catch (err) {\n parsedHttpRespBody.body = body;\n }\n }\n if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) {\n // Error from JSON API.\n parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error);\n }\n return parsedHttpRespBody;\n }\n /**\n * Take a Duplexify stream, fetch an authenticated connection header, and\n * create an outgoing writable stream.\n *\n * @param {Duplexify} dup - Duplexify stream.\n * @param {object} options - Configuration object.\n * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through.\n * @param {object} options.metadata - Metadata to send at the head of the request.\n * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object.\n * @param {string=} options.request.method - Default: \"POST\".\n * @param {string=} options.request.qs.uploadType - Default: \"multipart\".\n * @param {string=} options.streamContentType - Default: \"application/octet-stream\".\n * @param {function} onComplete - Callback, executed after the writable Request stream has completed.\n */\n makeWritableStream(dup, options, onComplete) {\n onComplete = onComplete || util.noop;\n const writeStream = new ProgressStream();\n writeStream.on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(writeStream);\n const defaultReqOpts = {\n method: 'POST',\n qs: {\n uploadType: 'multipart',\n },\n timeout: 0,\n maxRetries: 0,\n };\n const metadata = options.metadata || {};\n const reqOpts = extend(true, defaultReqOpts, options.request, {\n multipart: [\n {\n 'Content-Type': 'application/json',\n body: JSON.stringify(metadata),\n },\n {\n 'Content-Type': metadata.contentType || 'application/octet-stream',\n body: writeStream,\n },\n ],\n });\n options.makeAuthenticatedRequest(reqOpts, {\n onAuthenticated(err, authenticatedReqOpts) {\n if (err) {\n dup.destroy(err);\n return;\n }\n const request = teeny_request_1.teenyRequest.defaults(requestDefaults);\n request(authenticatedReqOpts, (err, resp, body) => {\n util.handleResp(err, resp, body, (err, data) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n dup.emit('response', resp);\n onComplete(data);\n });\n });\n },\n });\n }\n /**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted. This is used for rate limit\n * related errors as well as intermittent server errors.\n *\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\n shouldRetryRequest(err) {\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = e.reason;\n if (reason === 'rateLimitExceeded') {\n return true;\n }\n if (reason === 'userRateLimitExceeded') {\n return true;\n }\n if (reason && reason.includes('EAI_AGAIN')) {\n return true;\n }\n }\n }\n }\n return false;\n }\n /**\n * Get a function for making authenticated requests.\n *\n * @param {object} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {object=} config.credentials - Credentials object.\n * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false.\n * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false.\n * @param {string=} config.email - Account email address, required for PEM/P12 usage.\n * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3)\n * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile.\n * @param {array} config.scopes - Array of scopes required for the API.\n */\n makeAuthenticatedRequestFactory(config) {\n const googleAutoAuthConfig = extend({}, config);\n if (googleAutoAuthConfig.projectId === service_1.DEFAULT_PROJECT_ID_TOKEN) {\n delete googleAutoAuthConfig.projectId;\n }\n let authClient;\n if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) {\n // Use an existing `GoogleAuth`\n authClient = googleAutoAuthConfig.authClient;\n }\n else {\n // Pass an `AuthClient` to `GoogleAuth`, if available\n const config = {\n ...googleAutoAuthConfig,\n authClient: googleAutoAuthConfig.authClient,\n };\n authClient = new google_auth_library_1.GoogleAuth(config);\n }\n function makeAuthenticatedRequest(reqOpts, optionsOrCallback) {\n let stream;\n let projectId;\n const reqConfig = extend({}, config);\n let activeRequest_;\n if (!optionsOrCallback) {\n stream = duplexify();\n reqConfig.stream = stream;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined;\n async function setProjectId() {\n projectId = await authClient.getProjectId();\n }\n const onAuthenticated = async (err, authenticatedReqOpts) => {\n const authLibraryError = err;\n const autoAuthFailed = err &&\n err.message.indexOf('Could not load the default credentials') > -1;\n if (autoAuthFailed) {\n // Even though authentication failed, the API might not actually\n // care.\n authenticatedReqOpts = reqOpts;\n }\n if (!err || autoAuthFailed) {\n try {\n // Try with existing `projectId` value\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n if (e instanceof projectify_1.MissingProjectIdError) {\n // A `projectId` was required, but we don't have one.\n try {\n // Attempt to get the `projectId`\n await setProjectId();\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n // Re-use the \"Could not load the default credentials error\" if\n // auto auth failed.\n err = err || e;\n }\n }\n else {\n // Some other error unrelated to missing `projectId`\n err = err || e;\n }\n }\n }\n if (err) {\n if (stream) {\n stream.destroy(err);\n }\n else {\n const fn = options && options.onAuthenticated\n ? options.onAuthenticated\n : callback;\n fn(err);\n }\n return;\n }\n if (options && options.onAuthenticated) {\n options.onAuthenticated(null, authenticatedReqOpts);\n }\n else {\n activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => {\n if (apiResponseError &&\n apiResponseError.code === 401 &&\n authLibraryError) {\n // Re-use the \"Could not load the default credentials error\" if\n // the API request failed due to missing credentials.\n apiResponseError = authLibraryError;\n }\n callback(apiResponseError, ...params);\n });\n }\n };\n const prepareRequest = async () => {\n try {\n const getProjectId = async () => {\n if (config.projectId &&\n config.projectId !== service_1.DEFAULT_PROJECT_ID_TOKEN) {\n // The user provided a project ID. We don't need to check with the\n // auth client, it could be incorrect.\n return config.projectId;\n }\n if (config.projectIdRequired === false) {\n // A projectId is not required. Return the default.\n return service_1.DEFAULT_PROJECT_ID_TOKEN;\n }\n return setProjectId();\n };\n const authorizeRequest = async () => {\n if (reqConfig.customEndpoint &&\n !reqConfig.useAuthWithCustomEndpoint) {\n // Using a custom API override. Do not use `google-auth-library` for\n // authentication. (ex: connecting to a local Datastore server)\n return reqOpts;\n }\n else {\n return authClient.authorizeRequest(reqOpts);\n }\n };\n const [_projectId, authorizedReqOpts] = await Promise.all([\n getProjectId(),\n authorizeRequest(),\n ]);\n if (_projectId) {\n projectId = _projectId;\n }\n return onAuthenticated(null, authorizedReqOpts);\n }\n catch (e) {\n return onAuthenticated(e);\n }\n };\n prepareRequest();\n if (stream) {\n return stream;\n }\n return {\n abort() {\n setImmediate(() => {\n if (activeRequest_) {\n activeRequest_.abort();\n activeRequest_ = null;\n }\n });\n },\n };\n }\n const mar = makeAuthenticatedRequest;\n mar.getCredentials = authClient.getCredentials.bind(authClient);\n mar.authClient = authClient;\n return mar;\n }\n /**\n * Make a request through the `retryRequest` module with built-in error\n * handling and exponential back off.\n *\n * @param {object} reqOpts - Request options in the format `request` expects.\n * @param {object=} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {number=} config.maxRetries - Maximum number of automatic retries\n * attempted before returning the error. (default: 3)\n * @param {object=} config.request - HTTP module for request calls.\n * @param {function} callback - The callback function.\n */\n makeRequest(reqOpts, config, callback) {\n var _a, _b, _c, _d, _e, _f, _g;\n let autoRetryValue = AUTO_RETRY_DEFAULT;\n if (config.autoRetry !== undefined &&\n ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) {\n throw new ApiError('autoRetry is deprecated. Use retryOptions.autoRetry instead.');\n }\n else if (config.autoRetry !== undefined) {\n autoRetryValue = config.autoRetry;\n }\n else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry) !== undefined) {\n autoRetryValue = config.retryOptions.autoRetry;\n }\n let maxRetryValue = MAX_RETRY_DEFAULT;\n if (config.maxRetries && ((_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries)) {\n throw new ApiError('maxRetries is deprecated. Use retryOptions.maxRetries instead.');\n }\n else if (config.maxRetries) {\n maxRetryValue = config.maxRetries;\n }\n else if ((_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries) {\n maxRetryValue = config.retryOptions.maxRetries;\n }\n const options = {\n request: teeny_request_1.teenyRequest.defaults(requestDefaults),\n retries: autoRetryValue !== false ? maxRetryValue : 0,\n noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0,\n shouldRetryFn(httpRespMessage) {\n var _a, _b;\n const err = util.parseHttpRespMessage(httpRespMessage).err;\n if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) {\n return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err));\n }\n return err && util.shouldRetryRequest(err);\n },\n maxRetryDelay: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.maxRetryDelay,\n retryDelayMultiplier: (_f = config.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier,\n totalTimeout: (_g = config.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout,\n };\n if (typeof reqOpts.maxRetries === 'number') {\n options.retries = reqOpts.maxRetries;\n }\n if (!config.stream) {\n return retryRequest(reqOpts, options, (err, response, body) => {\n util.handleResp(err, response, body, callback);\n });\n }\n const dup = config.stream;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let requestStream;\n const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET';\n if (isGetRequest) {\n requestStream = retryRequest(reqOpts, options);\n dup.setReadable(requestStream);\n }\n else {\n // Streaming writable HTTP requests cannot be retried.\n requestStream = options.request(reqOpts);\n dup.setWritable(requestStream);\n }\n // Replay the Request events back to the stream.\n requestStream\n .on('error', dup.destroy.bind(dup))\n .on('response', dup.emit.bind(dup, 'response'))\n .on('complete', dup.emit.bind(dup, 'complete'));\n dup.abort = requestStream.abort;\n return dup;\n }\n /**\n * Decorate the options about to be made in a request.\n *\n * @param {object} reqOpts - The options to be passed to `request`.\n * @param {string} projectId - The project ID.\n * @return {object} reqOpts - The decorated reqOpts.\n */\n decorateRequest(reqOpts, projectId) {\n delete reqOpts.autoPaginate;\n delete reqOpts.autoPaginateVal;\n delete reqOpts.objectMode;\n if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') {\n delete reqOpts.qs.autoPaginate;\n delete reqOpts.qs.autoPaginateVal;\n reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId);\n }\n if (Array.isArray(reqOpts.multipart)) {\n reqOpts.multipart = reqOpts.multipart.map(part => {\n return (0, projectify_1.replaceProjectIdToken)(part, projectId);\n });\n }\n if (reqOpts.json !== null && typeof reqOpts.json === 'object') {\n delete reqOpts.json.autoPaginate;\n delete reqOpts.json.autoPaginateVal;\n reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId);\n }\n reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId);\n return reqOpts;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n isCustomType(unknown, module) {\n function getConstructorName(obj) {\n return obj.constructor && obj.constructor.name.toLowerCase();\n }\n const moduleNameParts = module.split('/');\n const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase();\n const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase();\n if (subModuleName && getConstructorName(unknown) !== subModuleName) {\n return false;\n }\n let walkingModule = unknown;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n if (getConstructorName(walkingModule) === parentModuleName) {\n return true;\n }\n walkingModule = walkingModule.parent;\n if (!walkingModule) {\n return false;\n }\n }\n }\n /**\n * Create a properly-formatted User-Agent string from a package.json file.\n *\n * @param {object} packageJson - A module's package.json file.\n * @return {string} userAgent - The formatted User-Agent string.\n */\n getUserAgentFromPackageJson(packageJson) {\n const hyphenatedPackageName = packageJson.name\n .replace('@google-cloud', 'gcloud-node') // For legacy purposes.\n .replace('/', '-'); // For UA spec-compliance purposes.\n return hyphenatedPackageName + '/' + packageJson.version;\n }\n /**\n * Given two parameters, figure out if this is either:\n * - Just a callback function\n * - An options object, and then a callback function\n * @param optionsOrCallback An options object or callback.\n * @param cb A potentially undefined callback.\n */\n maybeOptionsOrCallback(optionsOrCallback, cb) {\n return typeof optionsOrCallback === 'function'\n ? [{}, optionsOrCallback]\n : [optionsOrCallback, cb];\n }\n}\nexports.Util = Util;\n/**\n * Basic Passthrough Stream that records the number of bytes read\n * every time the cursor is moved.\n */\nclass ProgressStream extends stream_1.Transform {\n constructor() {\n super(...arguments);\n this.bytesRead = 0;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _transform(chunk, encoding, callback) {\n this.bytesRead += chunk.length;\n this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' });\n this.push(chunk);\n callback();\n }\n}\nconst util = new Util();\nexports.util = util;\n//# sourceMappingURL=util.js.map","\"use strict\";\n/*!\n * Copyright 2015 Google Inc. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ResourceStream = exports.paginator = exports.Paginator = void 0;\n/*!\n * @module common/paginator\n */\nconst arrify = require(\"arrify\");\nconst extend = require(\"extend\");\nconst resource_stream_1 = require(\"./resource-stream\");\nObject.defineProperty(exports, \"ResourceStream\", { enumerable: true, get: function () { return resource_stream_1.ResourceStream; } });\n/*! Developer Documentation\n *\n * paginator is used to auto-paginate `nextQuery` methods as well as\n * streamifying them.\n *\n * Before:\n *\n * search.query('done=true', function(err, results, nextQuery) {\n * search.query(nextQuery, function(err, results, nextQuery) {});\n * });\n *\n * After:\n *\n * search.query('done=true', function(err, results) {});\n *\n * Methods to extend should be written to accept callbacks and return a\n * `nextQuery`.\n */\nclass Paginator {\n /**\n * Cache the original method, then overwrite it on the Class's prototype.\n *\n * @param {function} Class - The parent class of the methods to extend.\n * @param {string|string[]} methodNames - Name(s) of the methods to extend.\n */\n // tslint:disable-next-line:variable-name\n extend(Class, methodNames) {\n methodNames = arrify(methodNames);\n methodNames.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n // map the original method to a private member\n Class.prototype[methodName + '_'] = originalMethod;\n // overwrite the original to auto-paginate\n /* eslint-disable @typescript-eslint/no-explicit-any */\n Class.prototype[methodName] = function (...args) {\n const parsedArguments = paginator.parseArguments_(args);\n return paginator.run_(parsedArguments, originalMethod.bind(this));\n };\n });\n }\n /**\n * Wraps paginated API calls in a readable object stream.\n *\n * This method simply calls the nextQuery recursively, emitting results to a\n * stream. The stream ends when `nextQuery` is null.\n *\n * `maxResults` will act as a cap for how many results are fetched and emitted\n * to the stream.\n *\n * @param {string} methodName - Name of the method to streamify.\n * @return {function} - Wrapped function.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n streamify(methodName) {\n return function (\n /* eslint-disable @typescript-eslint/no-explicit-any */\n ...args) {\n const parsedArguments = paginator.parseArguments_(args);\n const originalMethod = this[methodName + '_'] || this[methodName];\n return paginator.runAsStream_(parsedArguments, originalMethod.bind(this));\n };\n }\n /**\n * Parse a pseudo-array `arguments` for a query and callback.\n *\n * @param {array} args - The original `arguments` pseduo-array that the original\n * method received.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n parseArguments_(args) {\n let query;\n let autoPaginate = true;\n let maxApiCalls = -1;\n let maxResults = -1;\n let callback;\n const firstArgument = args[0];\n const lastArgument = args[args.length - 1];\n if (typeof firstArgument === 'function') {\n callback = firstArgument;\n }\n else {\n query = firstArgument;\n }\n if (typeof lastArgument === 'function') {\n callback = lastArgument;\n }\n if (typeof query === 'object') {\n query = extend(true, {}, query);\n // Check if the user only asked for a certain amount of results.\n if (query.maxResults && typeof query.maxResults === 'number') {\n // `maxResults` is used API-wide.\n maxResults = query.maxResults;\n }\n else if (typeof query.pageSize === 'number') {\n // `pageSize` is Pub/Sub's `maxResults`.\n maxResults = query.pageSize;\n }\n if (query.maxApiCalls && typeof query.maxApiCalls === 'number') {\n maxApiCalls = query.maxApiCalls;\n delete query.maxApiCalls;\n }\n // maxResults is the user specified limit.\n if (maxResults !== -1 || query.autoPaginate === false) {\n autoPaginate = false;\n }\n }\n const parsedArguments = {\n query: query || {},\n autoPaginate,\n maxApiCalls,\n maxResults,\n callback,\n };\n parsedArguments.streamOptions = extend(true, {}, parsedArguments.query);\n delete parsedArguments.streamOptions.autoPaginate;\n delete parsedArguments.streamOptions.maxResults;\n delete parsedArguments.streamOptions.pageSize;\n return parsedArguments;\n }\n /**\n * This simply checks to see if `autoPaginate` is set or not, if it's true\n * then we buffer all results, otherwise simply call the original method.\n *\n * @param {array} parsedArguments - Parsed arguments from the original method\n * call.\n * @param {object=|string=} parsedArguments.query - Query object. This is most\n * commonly an object, but to make the API more simple, it can also be a\n * string in some places.\n * @param {function=} parsedArguments.callback - Callback function.\n * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled.\n * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make.\n * @param {number} parsedArguments.maxResults - Maximum results to return.\n * @param {function} originalMethod - The cached method that accepts a callback\n * and returns `nextQuery` to receive more results.\n */\n run_(parsedArguments, originalMethod) {\n const query = parsedArguments.query;\n const callback = parsedArguments.callback;\n if (!parsedArguments.autoPaginate) {\n return originalMethod(query, callback);\n }\n const results = new Array();\n const promise = new Promise((resolve, reject) => {\n paginator\n .runAsStream_(parsedArguments, originalMethod)\n .on('error', reject)\n .on('data', (data) => results.push(data))\n .on('end', () => resolve(results));\n });\n if (!callback) {\n return promise.then(results => [results]);\n }\n promise.then(results => callback(null, results), (err) => callback(err));\n }\n /**\n * This method simply calls the nextQuery recursively, emitting results to a\n * stream. The stream ends when `nextQuery` is null.\n *\n * `maxResults` will act as a cap for how many results are fetched and emitted\n * to the stream.\n *\n * @param {object=|string=} parsedArguments.query - Query object. This is most\n * commonly an object, but to make the API more simple, it can also be a\n * string in some places.\n * @param {function=} parsedArguments.callback - Callback function.\n * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled.\n * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make.\n * @param {number} parsedArguments.maxResults - Maximum results to return.\n * @param {function} originalMethod - The cached method that accepts a callback\n * and returns `nextQuery` to receive more results.\n * @return {stream} - Readable object stream.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n runAsStream_(parsedArguments, originalMethod) {\n return new resource_stream_1.ResourceStream(parsedArguments, originalMethod);\n }\n}\nexports.Paginator = Paginator;\nconst paginator = new Paginator();\nexports.paginator = paginator;\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*!\n * Copyright 2019 Google Inc. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ResourceStream = void 0;\nconst stream_1 = require(\"stream\");\nclass ResourceStream extends stream_1.Transform {\n constructor(args, requestFn) {\n const options = Object.assign({ objectMode: true }, args.streamOptions);\n super(options);\n this._ended = false;\n this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls;\n this._nextQuery = args.query;\n this._reading = false;\n this._requestFn = requestFn;\n this._requestsMade = 0;\n this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults;\n }\n /* eslint-disable @typescript-eslint/no-explicit-any */\n end(...args) {\n this._ended = true;\n return super.end(...args);\n }\n _read() {\n if (this._reading) {\n return;\n }\n this._reading = true;\n // Wrap in a try/catch to catch input linting errors, e.g.\n // an invalid BigQuery query. These errors are thrown in an\n // async fashion, which makes them un-catchable by the user.\n try {\n this._requestFn(this._nextQuery, (err, results, nextQuery) => {\n if (err) {\n this.destroy(err);\n return;\n }\n this._nextQuery = nextQuery;\n if (this._resultsToSend !== Infinity) {\n results = results.splice(0, this._resultsToSend);\n this._resultsToSend -= results.length;\n }\n let more = true;\n for (const result of results) {\n if (this._ended) {\n break;\n }\n more = this.push(result);\n }\n const isFinished = !this._nextQuery || this._resultsToSend < 1;\n const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls;\n if (isFinished || madeMaxCalls) {\n this.end();\n }\n if (more && !this._ended) {\n setImmediate(() => this._read());\n }\n this._reading = false;\n });\n }\n catch (e) {\n this.destroy(e);\n }\n }\n}\nexports.ResourceStream = ResourceStream;\n//# sourceMappingURL=resource-stream.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MissingProjectIdError = exports.replaceProjectIdToken = void 0;\nconst stream_1 = require(\"stream\");\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/**\n * Populate the `{{projectId}}` placeholder.\n *\n * @throws {Error} If a projectId is required, but one is not provided.\n *\n * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped.\n * @param {string} projectId - A projectId. If not provided\n * @return {*} - The original argument with all placeholders populated.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction replaceProjectIdToken(value, projectId) {\n if (Array.isArray(value)) {\n value = value.map(v => replaceProjectIdToken(v, projectId));\n }\n if (value !== null &&\n typeof value === 'object' &&\n !(value instanceof Buffer) &&\n !(value instanceof stream_1.Stream) &&\n typeof value.hasOwnProperty === 'function') {\n for (const opt in value) {\n // eslint-disable-next-line no-prototype-builtins\n if (value.hasOwnProperty(opt)) {\n value[opt] = replaceProjectIdToken(value[opt], projectId);\n }\n }\n }\n if (typeof value === 'string' &&\n value.indexOf('{{projectId}}') > -1) {\n if (!projectId || projectId === '{{projectId}}') {\n throw new MissingProjectIdError();\n }\n value = value.replace(/{{projectId}}/g, projectId);\n }\n return value;\n}\nexports.replaceProjectIdToken = replaceProjectIdToken;\n/**\n * Custom error type for missing project ID errors.\n */\nclass MissingProjectIdError extends Error {\n constructor() {\n super(...arguments);\n this.message = `Sorry, we cannot connect to Cloud Services without a project\n ID. You may specify one with an environment variable named\n \"GOOGLE_CLOUD_PROJECT\".`.replace(/ +/g, ' ');\n }\n}\nexports.MissingProjectIdError = MissingProjectIdError;\n//# sourceMappingURL=index.js.map","\"use strict\";\n/* eslint-disable prefer-rest-params */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0;\n/**\n * Wraps a callback style function to conditionally return a promise.\n *\n * @param {function} originalMethod - The method to promisify.\n * @param {object=} options - Promise options.\n * @param {boolean} options.singular - Resolve the promise with single arg instead of an array.\n * @return {function} wrapped\n */\nfunction promisify(originalMethod, options) {\n if (originalMethod.promisified_) {\n return originalMethod;\n }\n options = options || {};\n const slice = Array.prototype.slice;\n // tslint:disable-next-line:no-any\n const wrapper = function () {\n let last;\n for (last = arguments.length - 1; last >= 0; last--) {\n const arg = arguments[last];\n if (typeof arg === 'undefined') {\n continue; // skip trailing undefined.\n }\n if (typeof arg !== 'function') {\n break; // non-callback last argument found.\n }\n return originalMethod.apply(this, arguments);\n }\n // peel trailing undefined.\n const args = slice.call(arguments, 0, last + 1);\n // tslint:disable-next-line:variable-name\n let PromiseCtor = Promise;\n // Because dedupe will likely create a single install of\n // @google-cloud/common to be shared amongst all modules, we need to\n // localize it at the Service level.\n if (this && this.Promise) {\n PromiseCtor = this.Promise;\n }\n return new PromiseCtor((resolve, reject) => {\n // tslint:disable-next-line:no-any\n args.push((...args) => {\n const callbackArgs = slice.call(args);\n const err = callbackArgs.shift();\n if (err) {\n return reject(err);\n }\n if (options.singular && callbackArgs.length === 1) {\n resolve(callbackArgs[0]);\n }\n else {\n resolve(callbackArgs);\n }\n });\n originalMethod.apply(this, args);\n });\n };\n wrapper.promisified_ = true;\n return wrapper;\n}\nexports.promisify = promisify;\n/**\n * Promisifies certain Class methods. This will not promisify private or\n * streaming methods.\n *\n * @param {module:common/service} Class - Service class.\n * @param {object=} options - Configuration object.\n */\n// tslint:disable-next-line:variable-name\nfunction promisifyAll(Class, options) {\n const exclude = (options && options.exclude) || [];\n const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype);\n const methods = ownPropertyNames.filter(methodName => {\n // clang-format off\n return (!exclude.includes(methodName) &&\n typeof Class.prototype[methodName] === 'function' && // is it a function?\n !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable?\n );\n // clang-format on\n });\n methods.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n if (!originalMethod.promisified_) {\n Class.prototype[methodName] = exports.promisify(originalMethod, options);\n }\n });\n}\nexports.promisifyAll = promisifyAll;\n/**\n * Wraps a promisy type function to conditionally call a callback function.\n *\n * @param {function} originalMethod - The method to callbackify.\n * @param {object=} options - Callback options.\n * @param {boolean} options.singular - Pass to the callback a single arg instead of an array.\n * @return {function} wrapped\n */\nfunction callbackify(originalMethod) {\n if (originalMethod.callbackified_) {\n return originalMethod;\n }\n // tslint:disable-next-line:no-any\n const wrapper = function () {\n if (typeof arguments[arguments.length - 1] !== 'function') {\n return originalMethod.apply(this, arguments);\n }\n const cb = Array.prototype.pop.call(arguments);\n originalMethod.apply(this, arguments).then(\n // tslint:disable-next-line:no-any\n (res) => {\n res = Array.isArray(res) ? res : [res];\n cb(null, ...res);\n }, (err) => cb(err));\n };\n wrapper.callbackified_ = true;\n return wrapper;\n}\nexports.callbackify = callbackify;\n/**\n * Callbackifies certain Class methods. This will not callbackify private or\n * streaming methods.\n *\n * @param {module:common/service} Class - Service class.\n * @param {object=} options - Configuration object.\n */\nfunction callbackifyAll(\n// tslint:disable-next-line:variable-name\nClass, options) {\n const exclude = (options && options.exclude) || [];\n const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype);\n const methods = ownPropertyNames.filter(methodName => {\n // clang-format off\n return (!exclude.includes(methodName) &&\n typeof Class.prototype[methodName] === 'function' && // is it a function?\n !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable?\n );\n // clang-format on\n });\n methods.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n if (!originalMethod.callbackified_) {\n Class.prototype[methodName] = exports.callbackify(originalMethod);\n }\n });\n}\nexports.callbackifyAll = callbackifyAll;\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AclRoleAccessorMethods = exports.Acl = void 0;\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * Attach functionality to a {@link Storage.acl} instance. This will add an\n * object for each role group (owners, readers, and writers), with each object\n * containing methods to add or delete a type of entity.\n *\n * As an example, here are a few methods that are created.\n *\n * myBucket.acl.readers.deleteGroup('groupId', function(err) {});\n *\n * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {});\n *\n * myBucket.acl.writers.addDomain('example.com', function(err, acl) {});\n *\n * @private\n */\nclass AclRoleAccessorMethods {\n constructor() {\n this.owners = {};\n this.readers = {};\n this.writers = {};\n /**\n * An object of convenience methods to add or delete owner ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.owners.addAllAuthenticatedUsers`\n * - `myFile.acl.owners.deleteAllAuthenticatedUsers`\n * - `myFile.acl.owners.addAllUsers`\n * - `myFile.acl.owners.deleteAllUsers`\n * - `myFile.acl.owners.addDomain`\n * - `myFile.acl.owners.deleteDomain`\n * - `myFile.acl.owners.addGroup`\n * - `myFile.acl.owners.deleteGroup`\n * - `myFile.acl.owners.addProject`\n * - `myFile.acl.owners.deleteProject`\n * - `myFile.acl.owners.addUser`\n * - `myFile.acl.owners.deleteUser`\n *\n * @name Acl#owners\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as an owner of a file.\n * //-\n * const myBucket = gcs.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n * myFile.acl.owners.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.OWNER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.owners.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.owners = {};\n /**\n * An object of convenience methods to add or delete reader ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.readers.addAllAuthenticatedUsers`\n * - `myFile.acl.readers.deleteAllAuthenticatedUsers`\n * - `myFile.acl.readers.addAllUsers`\n * - `myFile.acl.readers.deleteAllUsers`\n * - `myFile.acl.readers.addDomain`\n * - `myFile.acl.readers.deleteDomain`\n * - `myFile.acl.readers.addGroup`\n * - `myFile.acl.readers.deleteGroup`\n * - `myFile.acl.readers.addProject`\n * - `myFile.acl.readers.deleteProject`\n * - `myFile.acl.readers.addUser`\n * - `myFile.acl.readers.deleteUser`\n *\n * @name Acl#readers\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as a reader of a file.\n * //-\n * myFile.acl.readers.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.READER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.readers.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.readers = {};\n /**\n * An object of convenience methods to add or delete writer ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.writers.addAllAuthenticatedUsers`\n * - `myFile.acl.writers.deleteAllAuthenticatedUsers`\n * - `myFile.acl.writers.addAllUsers`\n * - `myFile.acl.writers.deleteAllUsers`\n * - `myFile.acl.writers.addDomain`\n * - `myFile.acl.writers.deleteDomain`\n * - `myFile.acl.writers.addGroup`\n * - `myFile.acl.writers.deleteGroup`\n * - `myFile.acl.writers.addProject`\n * - `myFile.acl.writers.deleteProject`\n * - `myFile.acl.writers.addUser`\n * - `myFile.acl.writers.deleteUser`\n *\n * @name Acl#writers\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as a writer of a file.\n * //-\n * myFile.acl.writers.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.WRITER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.writers.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.writers = {};\n AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this));\n }\n _assignAccessMethods(role) {\n const accessMethods = AclRoleAccessorMethods.accessMethods;\n const entities = AclRoleAccessorMethods.entities;\n const roleGroup = role.toLowerCase() + 's';\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[roleGroup] = entities.reduce((acc, entity) => {\n const isPrefix = entity.charAt(entity.length - 1) === '-';\n accessMethods.forEach(accessMethod => {\n let method = accessMethod + entity[0].toUpperCase() + entity.substr(1);\n if (isPrefix) {\n method = method.replace('-', '');\n }\n // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the\n // more complex API of specifying an `entity` and `role`.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n acc[method] = (entityId, options, callback) => {\n let apiEntity;\n if (typeof options === 'function') {\n callback = options;\n options = {};\n }\n if (isPrefix) {\n apiEntity = entity + entityId;\n }\n else {\n // If the entity is not a prefix, it is a special entity group\n // that does not require further details. The accessor methods\n // only accept a callback.\n apiEntity = entity;\n callback = entityId;\n }\n options = Object.assign({\n entity: apiEntity,\n role,\n }, options);\n const args = [options];\n if (typeof callback === 'function') {\n args.push(callback);\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return this[accessMethod].apply(this, args);\n };\n });\n return acc;\n }, {});\n }\n}\nexports.AclRoleAccessorMethods = AclRoleAccessorMethods;\nAclRoleAccessorMethods.accessMethods = ['add', 'delete'];\nAclRoleAccessorMethods.entities = [\n // Special entity groups that do not require further specification.\n 'allAuthenticatedUsers',\n 'allUsers',\n // Entity groups that require specification, e.g. `user-email@example.com`.\n 'domain-',\n 'group-',\n 'project-',\n 'user-',\n];\nAclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER'];\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against an\n * object or bucket (for example, `READ` or `WRITE`); the entity defines who the\n * permission applies to (for example, a specific user or group of users).\n *\n * Where an `entity` value is accepted, we follow the format the Cloud Storage\n * API expects.\n *\n * Refer to\n * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls\n * for the most up-to-date values.\n *\n * - `user-userId`\n * - `user-email`\n * - `group-groupId`\n * - `group-email`\n * - `domain-domain`\n * - `project-team-projectId`\n * - `allUsers`\n * - `allAuthenticatedUsers`\n *\n * Examples:\n *\n * - The user \"liz@example.com\" would be `user-liz@example.com`.\n * - The group \"example@googlegroups.com\" would be\n * `group-example@googlegroups.com`.\n * - To refer to all members of the Google Apps for Business domain\n * \"example.com\", the entity would be `domain-example.com`.\n *\n * For more detailed information, see\n * {@link http://goo.gl/6qBBPO| About Access Control Lists}.\n *\n * @constructor Acl\n * @mixin\n * @param {object} options Configuration options.\n */\nclass Acl extends AclRoleAccessorMethods {\n constructor(options) {\n super();\n this.pathPrefix = options.pathPrefix;\n this.request_ = options.request;\n }\n /**\n * @typedef {array} AddAclResponse\n * @property {object} 0 The Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback AddAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} acl The Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Add access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation}\n *\n * @param {object} options Configuration options.\n * @param {string} options.entity Whose permissions will be added.\n * @param {string} options.role Permissions allowed for the defined entity.\n * See {@link https://cloud.google.com/storage/docs/access-control Access\n * Control}.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {AddAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * const options = {\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.OWNER_ROLE\n * };\n *\n * myBucket.acl.add(options, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * // Here is how you would grant ownership permissions to a user on a\n * specific\n * // revision of a file.\n * //-\n * myFile.acl.add({\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.OWNER_ROLE,\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_add_file_owner\n * Example of adding an owner to a file:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_owner\n * Example of adding an owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_default_owner\n * Example of adding a default owner to a bucket:\n */\n add(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'POST',\n uri: '',\n qs: query,\n maxRetries: 0,\n json: {\n entity: options.entity,\n role: options.role.toUpperCase(),\n },\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, this.makeAclObject_(resp), resp);\n });\n }\n /**\n * @typedef {array} RemoveAclResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback RemoveAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation}\n *\n * @param {object} options Configuration object.\n * @param {string} options.entity Whose permissions will be revoked.\n * @param {int} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {RemoveAclCallback} callback The callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * myBucket.acl.delete({\n * entity: 'user-useremail@example.com'\n * }, function(err, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.delete({\n * entity: 'user-useremail@example.com',\n * generation: 1\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_owner\n * Example of removing an owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_default_owner\n * Example of removing a default owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_file_owner\n * Example of removing an owner from a bucket:\n */\n delete(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'DELETE',\n uri: '/' + encodeURIComponent(options.entity),\n qs: query,\n }, (err, resp) => {\n callback(err, resp);\n });\n }\n /**\n * @typedef {array} GetAclResponse\n * @property {object|object[]} 0 Single or array of Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object|object[]} acl Single or array of Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get access controls on a {@link Bucket} or {@link File}. If\n * an entity is omitted, you will receive an array of all applicable access\n * controls.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation}\n *\n * @param {object|function} [options] Configuration options. If you want to\n * receive a list of all access controls, pass the callback function as\n * the only argument.\n * @param {string} [options.entity] Whose permissions will be fetched.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * myBucket.acl.get({\n * entity: 'user-useremail@example.com'\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // Get all access controls.\n * //-\n * myBucket.acl.get(function(err, aclObjects, apiResponse) {\n * // aclObjects = [\n * // {\n * // entity: 'user-useremail@example.com',\n * // role: 'owner'\n * // }\n * // ]\n * });\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.get({\n * entity: 'user-useremail@example.com',\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.get().then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_print_file_acl\n * Example of printing a file's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_file_acl_for_user\n * Example of printing a file's ACL for a specific user:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl\n * Example of printing a bucket's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl_for_user\n * Example of printing a bucket's ACL for a specific user:\n */\n get(optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;\n let path = '';\n const query = {};\n if (options) {\n path = '/' + encodeURIComponent(options.entity);\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n }\n this.request({\n uri: path,\n qs: query,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n let results;\n if (resp.items) {\n results = resp.items.map(this.makeAclObject_);\n }\n else {\n results = this.makeAclObject_(resp);\n }\n callback(null, results, resp);\n });\n }\n /**\n * @typedef {array} UpdateAclResponse\n * @property {object} 0 The updated Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback UpdateAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} acl The updated Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Update access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation}\n *\n * @param {object} options Configuration options.\n * @param {string} options.entity Whose permissions will be updated.\n * @param {string} options.role Permissions allowed for the defined entity.\n * See {@link Storage.acl}.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {UpdateAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * const options = {\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.WRITER_ROLE\n * };\n *\n * myBucket.acl.update(options, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.update({\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.WRITER_ROLE,\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.update(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n update(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'PUT',\n uri: '/' + encodeURIComponent(options.entity),\n qs: query,\n json: {\n role: options.role.toUpperCase(),\n },\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, this.makeAclObject_(resp), resp);\n });\n }\n /**\n * Transform API responses to a consistent object format.\n *\n * @private\n */\n makeAclObject_(accessControlObject) {\n const obj = {\n entity: accessControlObject.entity,\n role: accessControlObject.role,\n };\n if (accessControlObject.projectTeam) {\n obj.projectTeam = accessControlObject.projectTeam;\n }\n return obj;\n }\n /**\n * Patch requests up to the bucket's request object.\n *\n * @private\n *\n * @param {string} method Action.\n * @param {string} path Request path.\n * @param {*} query Request query object.\n * @param {*} body Request body contents.\n * @param {function} callback Callback function.\n */\n request(reqOpts, callback) {\n reqOpts.uri = this.pathPrefix + reqOpts.uri;\n this.request_(reqOpts, callback);\n }\n}\nexports.Acl = Acl;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Acl, {\n exclude: ['request'],\n});\n//# sourceMappingURL=acl.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst paginator_1 = require(\"@google-cloud/paginator\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst extend = require(\"extend\");\nconst fs = require(\"fs\");\nconst mime = require(\"mime-types\");\nconst path = require(\"path\");\nconst pLimit = require(\"p-limit\");\nconst util_1 = require(\"util\");\nconst retry = require(\"async-retry\");\nconst util_2 = require(\"./util\");\nconst acl_1 = require(\"./acl\");\nconst file_1 = require(\"./file\");\nconst iam_1 = require(\"./iam\");\nconst notification_1 = require(\"./notification\");\nconst storage_1 = require(\"./storage\");\nconst signer_1 = require(\"./signer\");\nconst stream_1 = require(\"stream\");\nconst url_1 = require(\"url\");\nvar BucketActionToHTTPMethod;\n(function (BucketActionToHTTPMethod) {\n BucketActionToHTTPMethod[\"list\"] = \"GET\";\n})(BucketActionToHTTPMethod = exports.BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = {}));\nvar AvailableServiceObjectMethods;\n(function (AvailableServiceObjectMethods) {\n AvailableServiceObjectMethods[AvailableServiceObjectMethods[\"setMetadata\"] = 0] = \"setMetadata\";\n AvailableServiceObjectMethods[AvailableServiceObjectMethods[\"delete\"] = 1] = \"delete\";\n})(AvailableServiceObjectMethods = exports.AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = {}));\nvar BucketExceptionMessages;\n(function (BucketExceptionMessages) {\n BucketExceptionMessages[\"PROVIDE_SOURCE_FILE\"] = \"You must provide at least one source file.\";\n BucketExceptionMessages[\"DESTINATION_FILE_NOT_SPECIFIED\"] = \"A destination file must be specified.\";\n BucketExceptionMessages[\"CHANNEL_ID_REQUIRED\"] = \"An ID is required to create a channel.\";\n BucketExceptionMessages[\"CHANNEL_ADDRESS_REQUIRED\"] = \"An address is required to create a channel.\";\n BucketExceptionMessages[\"TOPIC_NAME_REQUIRED\"] = \"A valid topic name is required.\";\n BucketExceptionMessages[\"CONFIGURATION_OBJECT_PREFIX_REQUIRED\"] = \"A configuration object with a prefix is required.\";\n BucketExceptionMessages[\"SPECIFY_FILE_NAME\"] = \"A file name must be specified.\";\n BucketExceptionMessages[\"METAGENERATION_NOT_PROVIDED\"] = \"A metageneration must be provided.\";\n BucketExceptionMessages[\"SUPPLY_NOTIFICATION_ID\"] = \"You must supply a notification ID.\";\n})(BucketExceptionMessages = exports.BucketExceptionMessages || (exports.BucketExceptionMessages = {}));\n/**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n/**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n/**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n/**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n/**\n * A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n *\n * @name Bucket#crc32cGenerator\n * @type {CRC32CValidator}\n */\n/**\n * Get and set IAM policies for your bucket.\n *\n * @name Bucket#iam\n * @mixes Iam\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management}\n * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Get the IAM policy for your bucket.\n * //-\n * bucket.iam.getPolicy(function(err, policy) {\n * console.log(policy);\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.getPolicy().then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_view_bucket_iam_members\n * Example of retrieving a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_add_bucket_iam_member\n * Example of adding to a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_remove_bucket_iam_member\n * Example of removing from a bucket's IAM policy:\n */\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against\n * an object or bucket (for example, `READ` or `WRITE`); the entity defines\n * who the permission applies to (for example, a specific user or group of\n * users).\n *\n * The `acl` object on a Bucket instance provides methods to get you a list of\n * the ACLs defined on your bucket, as well as set, update, and delete them.\n *\n * Buckets also have\n * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs}\n * for all created files. Default ACLs specify permissions that all new\n * objects added to the bucket will inherit by default. You can add, delete,\n * get, and update entities and permissions for these as well with\n * {@link Bucket#acl.default}.\n *\n * See {@link http://goo.gl/6qBBPO| About Access Control Lists}\n * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs}\n *\n * @name Bucket#acl\n * @mixes Acl\n * @property {Acl} default Cloud Storage Buckets have\n * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs}\n * for all created files. You can add, delete, get, and update entities and\n * permissions for these as well. The method signatures and examples are all\n * the same, after only prefixing the method call with `default`.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Make a bucket's contents publicly readable.\n * //-\n * const myBucket = storage.bucket('my-bucket');\n *\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * myBucket.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl\n * Example of printing a bucket's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl_for_user\n * Example of printing a bucket's ACL for a specific user:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_owner\n * Example of adding an owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_owner\n * Example of removing an owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_default_owner\n * Example of adding a default owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_default_owner\n * Example of removing a default owner from a bucket:\n */\n/**\n * The API-formatted resource description of the bucket.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name Bucket#metadata\n * @type {object}\n */\n/**\n * The bucket's name.\n * @name Bucket#name\n * @type {string}\n */\n/**\n * Get {@link File} objects for the files currently in the bucket as a\n * readable object stream.\n *\n * @method Bucket#getFilesStream\n * @param {GetFilesOptions} [query] Query object for listing files.\n * @returns {ReadableStream} A readable stream that emits {@link File} instances.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getFilesStream()\n * .on('error', console.error)\n * .on('data', function(file) {\n * // file is a File object.\n * })\n * .on('end', function() {\n * // All files retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * bucket.getFilesStream()\n * .on('data', function(file) {\n * this.end();\n * });\n *\n * //-\n * // If you're filtering files with a delimiter, you should use\n * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to\n * // preserve the `apiResponse` argument.\n * //-\n * const prefixes = [];\n *\n * function callback(err, files, nextQuery, apiResponse) {\n * prefixes = prefixes.concat(apiResponse.prefixes);\n *\n * if (nextQuery) {\n * bucket.getFiles(nextQuery, callback);\n * } else {\n * // prefixes = The finished array of prefixes.\n * }\n * }\n *\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/'\n * }, callback);\n * ```\n */\n/**\n * Create a Bucket object to interact with a Cloud Storage bucket.\n *\n * @class\n * @hideconstructor\n *\n * @param {Storage} storage A {@link Storage} instance.\n * @param {string} name The name of the bucket.\n * @param {object} [options] Configuration object.\n * @param {string} [options.userProject] User project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * ```\n */\nclass Bucket extends nodejs_common_1.ServiceObject {\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n getFilesStream(query) {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n constructor(storage, name, options) {\n var _a, _b, _c, _d;\n options = options || {};\n // Allow for \"gs://\"-style input, and strip any trailing slashes.\n name = name.replace(/^gs:\\/\\//, '').replace(/\\/+$/, '');\n const requestQueryObject = {};\n if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) {\n requestQueryObject.ifGenerationMatch =\n options.preconditionOpts.ifGenerationMatch;\n }\n if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) {\n requestQueryObject.ifGenerationNotMatch =\n options.preconditionOpts.ifGenerationNotMatch;\n }\n if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) {\n requestQueryObject.ifMetagenerationMatch =\n options.preconditionOpts.ifMetagenerationMatch;\n }\n if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) {\n requestQueryObject.ifMetagenerationNotMatch =\n options.preconditionOpts.ifMetagenerationNotMatch;\n }\n const userProject = options.userProject;\n if (typeof userProject === 'string') {\n requestQueryObject.userProject = userProject;\n }\n const methods = {\n /**\n * Create a bucket.\n *\n * @method Bucket#create\n * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket.\n * @param {CreateBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * bucket.create(function(err, bucket, apiResponse) {\n * if (!err) {\n * // The bucket was created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.create().then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n create: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * IamDeleteBucketOptions Configuration options.\n * @property {boolean} [ignoreNotFound = false] Ignore an error if\n * the bucket does not exist.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} DeleteBucketResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation}\n *\n * @method Bucket#delete\n * @param {DeleteBucketOptions} [options] Configuration options.\n * @param {boolean} [options.ignoreNotFound = false] Ignore an error if\n * the bucket does not exist.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * bucket.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_delete_bucket\n * Another example:\n */\n delete: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} BucketExistsResponse\n * @property {boolean} 0 Whether the {@link Bucket} exists.\n */\n /**\n * @callback BucketExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the {@link Bucket} exists.\n */\n /**\n * Check if the bucket exists.\n *\n * @method Bucket#exists\n * @param {BucketExistsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {BucketExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get()\n * @property {boolean} [autoCreate] Automatically create the object if\n * it does not exist. Default: `false`\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} GetBucketResponse\n * @property {Bucket} 0 The {@link Bucket}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket} bucket The {@link Bucket}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get a bucket if it exists.\n *\n * You may optionally use this to \"get or create\" an object by providing\n * an object with `autoCreate` set to `true`. Any extra configuration that\n * is normally required for the `create` method must be contained within\n * this object as well.\n *\n * @method Bucket#get\n * @param {GetBucketOptions} [options] Configuration options.\n * @param {boolean} [options.autoCreate] Automatically create the object if\n * it does not exist. Default: `false`\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.get(function(err, bucket, apiResponse) {\n * // `bucket.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.get().then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetBucketMetadataResponse\n * @property {object} 0 The bucket metadata.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Get the bucket's metadata.\n *\n * To set metadata, see {@link Bucket#setMetadata}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation}\n *\n * @method Bucket#getMetadata\n * @param {GetBucketMetadataOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_get_requester_pays_status\n * Example of retrieving the requester pays status of a bucket:\n */\n getMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} SetBucketMetadataResponse\n * @property {object} apiResponse The full API response.\n */\n /**\n * @callback SetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n */\n /**\n * Set the bucket's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @method Bucket#setMetadata\n * @param {object} metadata The metadata you wish to set.\n * @param {SetBucketMetadataOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Set website metadata field on the bucket.\n * //-\n * const metadata = {\n * website: {\n * mainPageSuffix: 'http://example.com',\n * notFoundPage: 'http://example.com/404.html'\n * }\n * };\n *\n * bucket.setMetadata(metadata, function(err, apiResponse) {});\n *\n * //-\n * // Enable versioning for your bucket.\n * //-\n * bucket.setMetadata({\n * versioning: {\n * enabled: true\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Enable KMS encryption for objects within this bucket.\n * //-\n * bucket.setMetadata({\n * encryption: {\n * defaultKmsKeyName: 'projects/grape-spaceship-123/...'\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Set the default event-based hold value for new objects in this\n * // bucket.\n * //-\n * bucket.setMetadata({\n * defaultEventBasedHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Remove object lifecycle rules.\n * //-\n * bucket.setMetadata({\n * lifecycle: null\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setMetadata(metadata).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n };\n super({\n parent: storage,\n baseUrl: '/b',\n id: name,\n createMethod: storage.createBucket.bind(storage),\n methods,\n });\n this.name = name;\n this.storage = storage;\n this.userProject = options.userProject;\n this.acl = new acl_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/acl',\n });\n this.acl.default = new acl_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/defaultObjectAcl',\n });\n this.crc32cGenerator =\n options.crc32cGenerator || this.storage.crc32cGenerator;\n this.iam = new iam_1.Iam(this);\n this.getFilesStream = paginator_1.paginator.streamify('getFiles');\n this.instanceRetryValue = storage.retryOptions.autoRetry;\n this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts;\n }\n /**\n * The bucket's Cloud Storage URI (`gs://`)\n *\n * @example\n * ```ts\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * // `gs://my-bucket`\n * const href = bucket.cloudStorageURI.href;\n * ```\n */\n get cloudStorageURI() {\n const uri = new url_1.URL('gs://');\n uri.host = this.name;\n return uri;\n }\n /**\n * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule().\n * @property {boolean} [append=true] The new rules will be appended to any\n * pre-existing rules.\n */\n /**\n *\n * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects\n * in this bucket.\n * @property {string|object} action The action to be taken upon matching of\n * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'.\n * **Note**: For configuring a raw-formatted rule object to be passed as `action`\n * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}.\n * @property {object} condition Condition a bucket must meet before the\n * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}.\n * @property {string} [storageClass] When using the `setStorageClass`\n * action, provide this option to dictate which storage class the object\n * should update to. Please see\n * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions.\n */\n /**\n * Add an object lifecycle management rule to the bucket.\n *\n * By default, an Object Lifecycle Management rule provided to this method\n * will be included to the existing policy. To replace all existing rules,\n * supply the `options` argument, setting `append` to `false`.\n *\n * To add multiple rules, pass a list to the `rule` parameter. Calling this\n * function multiple times asynchronously does not guarantee that all rules\n * are added correctly.\n *\n * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects\n * in this bucket.\n * @param {string|object} rule.action The action to be taken upon matching of\n * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'.\n * **Note**: For configuring a raw-formatted rule object to be passed as `action`\n * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}.\n * @param {object} rule.condition Condition a bucket must meet before the\n * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}.\n * @param {string} [rule.storageClass] When using the `setStorageClass`\n * action, provide this option to dictate which storage class the object\n * should update to.\n * @param {AddLifecycleRuleOptions} [options] Configuration object.\n * @param {boolean} [options.append=true] Append the new rule to the existing\n * policy.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Automatically have an object deleted from this bucket once it is 3 years\n * // of age.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * age: 365 * 3 // Specified in days.\n * }\n * }, function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * const lifecycleRules = bucket.metadata.lifecycle.rule;\n *\n * // Iterate over the Object Lifecycle Management rules on this bucket.\n * lifecycleRules.forEach(lifecycleRule => {});\n * });\n *\n * //-\n * // By default, the rule you provide will be added to the existing policy.\n * // Optionally, you can disable this behavior to replace all of the\n * // pre-existing rules.\n * //-\n * const options = {\n * append: false\n * };\n *\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * age: 365 * 3 // Specified in days.\n * }\n * }, options, function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // All rules have been replaced with the new \"delete\" rule.\n *\n * // Iterate over the Object Lifecycle Management rules on this bucket.\n * lifecycleRules.forEach(lifecycleRule => {});\n * });\n *\n * //-\n * // For objects created before 2018, \"downgrade\" the storage class.\n * //-\n * bucket.addLifecycleRule({\n * action: 'setStorageClass',\n * storageClass: 'COLDLINE',\n * condition: {\n * createdBefore: new Date('2018')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete objects created before 2016 which have the Coldline storage\n * // class.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * matchesStorageClass: [\n * 'COLDLINE'\n * ],\n * createdBefore: new Date('2016')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a noncurrent timestamp that is at least 100 days.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * daysSinceNoncurrentTime: 100\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a noncurrent timestamp before 2020-01-01.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * noncurrentTimeBefore: new Date('2020-01-01')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a customTime that is at least 100 days.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * daysSinceCustomTime: 100\n * }\n * }, function(err, apiResponse) ());\n *\n * //-\n * // Delete object that has a customTime before 2020-01-01.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * customTimeBefore: new Date('2020-01-01')\n * }\n * }, function(err, apiResponse) {});\n * ```\n */\n addLifecycleRule(rule, optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n options = options || {};\n const rules = Array.isArray(rule) ? rule : [rule];\n const newLifecycleRules = rules.map(rule => {\n if (typeof rule.action === 'object') {\n // This is a raw-formatted rule object, the way the API expects.\n // Just pass it through as-is.\n return rule;\n }\n const apiFormattedRule = {};\n apiFormattedRule.condition = {};\n apiFormattedRule.action = {\n type: rule.action.charAt(0).toUpperCase() + rule.action.slice(1),\n };\n if (rule.storageClass) {\n apiFormattedRule.action.storageClass = rule.storageClass;\n }\n for (const condition in rule.condition) {\n if (rule.condition[condition] instanceof Date) {\n apiFormattedRule.condition[condition] = rule.condition[condition]\n .toISOString()\n .replace(/T.+$/, '');\n }\n else {\n apiFormattedRule.condition[condition] = rule.condition[condition];\n }\n }\n return apiFormattedRule;\n });\n if (options.append === false) {\n this.setMetadata({ lifecycle: { rule: newLifecycleRules } }, options, callback);\n return;\n }\n // The default behavior appends the previously-defined lifecycle rules with\n // the new ones just passed in by the user.\n this.getMetadata((err, metadata) => {\n if (err) {\n callback(err);\n return;\n }\n const currentLifecycleRules = Array.isArray(metadata.lifecycle && metadata.lifecycle.rule)\n ? metadata.lifecycle && metadata.lifecycle.rule\n : [];\n this.setMetadata({\n lifecycle: {\n rule: currentLifecycleRules.concat(newLifecycleRules),\n },\n }, options, callback);\n });\n }\n /**\n * @typedef {object} CombineOptions\n * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of\n * the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback CombineCallback\n * @param {?Error} err Request error, if any.\n * @param {File} newFile The new {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} CombineResponse\n * @property {File} 0 The new {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * Combine multiple files into one new file.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation}\n *\n * @throws {Error} if a non-array is provided as sources argument.\n * @throws {Error} if no sources are provided.\n * @throws {Error} if no destination is provided.\n *\n * @param {string[]|File[]} sources The source files that will be\n * combined.\n * @param {string|File} destination The file you would like the\n * source files combined into.\n * @param {CombineOptions} [options] Configuration options.\n * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of\n * the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n \n * @param {CombineCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const logBucket = storage.bucket('log-bucket');\n *\n * const sources = [\n * logBucket.file('2013-logs.txt'),\n * logBucket.file('2014-logs.txt')\n * ];\n *\n * const allLogs = logBucket.file('all-logs.txt');\n *\n * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) {\n * // newFile === allLogs\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * logBucket.combine(sources, allLogs).then(function(data) {\n * const newFile = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n combine(sources, destination, optionsOrCallback, callback) {\n var _a;\n if (!Array.isArray(sources) || sources.length === 0) {\n throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE);\n }\n if (!destination) {\n throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED);\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required\n AvailableServiceObjectMethods.setMetadata, // Same as above\n options);\n const convertToFile = (file) => {\n if (file instanceof file_1.File) {\n return file;\n }\n return this.file(file);\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n sources = sources.map(convertToFile);\n const destinationFile = convertToFile(destination);\n callback = callback || nodejs_common_1.util.noop;\n if (!destinationFile.metadata.contentType) {\n const destinationContentType = mime.contentType(destinationFile.name);\n if (destinationContentType) {\n destinationFile.metadata.contentType = destinationContentType;\n }\n }\n let maxRetries = this.storage.retryOptions.maxRetries;\n if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) ===\n undefined &&\n options.ifGenerationMatch === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n maxRetries = 0;\n }\n if (options.ifGenerationMatch === undefined) {\n Object.assign(options, destinationFile.instancePreconditionOpts, options);\n }\n // Make the request from the destination File object.\n destinationFile.request({\n method: 'POST',\n uri: '/compose',\n maxRetries,\n json: {\n destination: {\n contentType: destinationFile.metadata.contentType,\n },\n sourceObjects: sources.map(source => {\n const sourceObject = {\n name: source.name,\n };\n if (source.metadata && source.metadata.generation) {\n sourceObject.generation = source.metadata.generation;\n }\n return sourceObject;\n }),\n },\n qs: options,\n }, (err, resp) => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, destinationFile, resp);\n });\n }\n /**\n * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}.\n *\n * @typedef {object} CreateChannelConfig\n * @property {string} address The address where notifications are\n * delivered for this channel.\n * @property {string} [delimiter] Returns results in a directory-like mode.\n * @property {number} [maxResults] Maximum number of `items` plus `prefixes`\n * to return in a single page of responses.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [prefix] Filter results to objects whose names begin\n * with this prefix.\n * @property {string} [projection=noAcl] Set of properties to return.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {boolean} [versions=false] If `true`, lists all versions of an object\n * as distinct results.\n */\n /**\n * @typedef {object} CreateChannelOptions\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} CreateChannelResponse\n * @property {Channel} 0 The new {@link Channel}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CreateChannelCallback\n * @param {?Error} err Request error, if any.\n * @param {Channel} channel The new {@link Channel}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Create a channel that will be notified when objects in this bucket changes.\n *\n * @throws {Error} If an ID is not provided.\n * @throws {Error} If an address is not provided.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation}\n *\n * @param {string} id The ID of the channel to create.\n * @param {CreateChannelConfig} config Configuration for creating channel.\n * @param {string} config.address The address where notifications are\n * delivered for this channel.\n * @param {string} [config.delimiter] Returns results in a directory-like mode.\n * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes`\n * to return in a single page of responses.\n * @param {string} [config.pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @param {string} [config.prefix] Filter results to objects whose names begin\n * with this prefix.\n * @param {string} [config.projection=noAcl] Set of properties to return.\n * @param {string} [config.userProject] The ID of the project which will be\n * billed for the request.\n * @param {boolean} [config.versions=false] If `true`, lists all versions of an object\n * as distinct results.\n * @param {CreateChannelOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {CreateChannelCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const id = 'new-channel-id';\n *\n * const config = {\n * address: 'https://...'\n * };\n *\n * bucket.createChannel(id, config, function(err, channel, apiResponse) {\n * if (!err) {\n * // Channel created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.createChannel(id, config).then(function(data) {\n * const channel = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n createChannel(id, config, optionsOrCallback, callback) {\n if (typeof id !== 'string') {\n throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED);\n }\n if (typeof config.address !== 'string') {\n throw new Error(BucketExceptionMessages.CHANNEL_ADDRESS_REQUIRED);\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.request({\n method: 'POST',\n uri: '/o/watch',\n json: Object.assign({\n id,\n type: 'web_hook',\n }, config),\n qs: options,\n }, (err, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n const resourceId = apiResponse.resourceId;\n const channel = this.storage.channel(id, resourceId);\n channel.metadata = apiResponse;\n callback(null, channel, apiResponse);\n });\n }\n /**\n * Metadata to set for the Notification.\n *\n * @typedef {object} CreateNotificationOptions\n * @property {object} [customAttributes] An optional list of additional\n * attributes to attach to each Cloud PubSub message published for this\n * notification subscription.\n * @property {string[]} [eventTypes] If present, only send notifications about\n * listed event types. If empty, sent notifications for all event types.\n * @property {string} [objectNamePrefix] If present, only apply this\n * notification configuration to object names that begin with this prefix.\n * @property {string} [payloadFormat] The desired content of the Payload.\n * Defaults to `JSON_API_V1`.\n *\n * Acceptable values are:\n * - `JSON_API_V1`\n *\n * - `NONE`\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback CreateNotificationCallback\n * @param {?Error} err Request error, if any.\n * @param {Notification} notification The new {@link Notification}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} CreateNotificationResponse\n * @property {Notification} 0 The new {@link Notification}.\n * @property {object} 1 The full API response.\n */\n /**\n * Creates a notification subscription for the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert}\n *\n * @param {Topic|string} topic The Cloud PubSub topic to which this\n * subscription publishes. If the project ID is omitted, the current\n * project ID will be used.\n *\n * Acceptable formats are:\n * - `projects/grape-spaceship-123/topics/my-topic`\n *\n * - `my-topic`\n * @param {CreateNotificationOptions} [options] Metadata to set for the\n * notification.\n * @param {object} [options.customAttributes] An optional list of additional\n * attributes to attach to each Cloud PubSub message published for this\n * notification subscription.\n * @param {string[]} [options.eventTypes] If present, only send notifications about\n * listed event types. If empty, sent notifications for all event types.\n * @param {string} [options.objectNamePrefix] If present, only apply this\n * notification configuration to object names that begin with this prefix.\n * @param {string} [options.payloadFormat] The desired content of the Payload.\n * Defaults to `JSON_API_V1`.\n *\n * Acceptable values are:\n * - `JSON_API_V1`\n *\n * - `NONE`\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {CreateNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a valid topic is not provided.\n * @see Notification#create\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const callback = function(err, notification, apiResponse) {\n * if (!err) {\n * // The notification was created successfully.\n * }\n * };\n *\n * myBucket.createNotification('my-topic', callback);\n *\n * //-\n * // Configure the nofiication by providing Notification metadata.\n * //-\n * const metadata = {\n * objectNamePrefix: 'prefix-'\n * };\n *\n * myBucket.createNotification('my-topic', metadata, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.createNotification('my-topic').then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/createNotification.js\n * region_tag:storage_create_bucket_notifications\n * Another example:\n */\n createNotification(topic, optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n const topicIsObject = topic !== null && typeof topic === 'object';\n if (topicIsObject && nodejs_common_1.util.isCustomType(topic, 'pubsub/topic')) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n topic = topic.name;\n }\n if (typeof topic !== 'string') {\n throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED);\n }\n const body = Object.assign({ topic }, options);\n if (body.topic.indexOf('projects') !== 0) {\n body.topic = 'projects/{{projectId}}/topics/' + body.topic;\n }\n body.topic = '//pubsub.googleapis.com/' + body.topic;\n if (!body.payloadFormat) {\n body.payloadFormat = 'JSON_API_V1';\n }\n const query = {};\n if (body.userProject) {\n query.userProject = body.userProject;\n delete body.userProject;\n }\n this.request({\n method: 'POST',\n uri: '/notificationConfigs',\n json: (0, util_2.convertObjKeysToSnakeCase)(body),\n qs: query,\n maxRetries: 0, //explicitly set this value since this is a non-idempotent function\n }, (err, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n const notification = this.notification(apiResponse.id);\n notification.metadata = apiResponse;\n callback(null, notification, apiResponse);\n });\n }\n /**\n * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles}\n * for all of the supported properties.\n * @property {boolean} [force] Suppress errors until all files have been\n * processed.\n */\n /**\n * @callback DeleteFilesCallback\n * @param {?Error|?Error[]} err Request error, if any, or array of errors from\n * files that were not able to be deleted.\n * @param {object} [apiResponse] The full API response.\n */\n /**\n * Iterate over the bucket's files, calling `file.delete()` on each.\n *\n * This is not an atomic request. A delete attempt will be\n * made for each file individually. Any one can fail, in which case only a\n * portion of the files you intended to be deleted would have.\n *\n * Operations are performed in parallel, up to 10 at once. The first error\n * breaks the loop and will execute the provided callback with it. Specify\n * `{ force: true }` to suppress the errors until all files have had a chance\n * to be processed.\n *\n * File preconditions cannot be passed to this function. It will not retry unless\n * the idempotency strategy is set to retry always.\n *\n * The `query` object passed as the first argument will also be passed to\n * {@link Bucket#getFiles}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation}\n *\n * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles}\n * @param {boolean} [query.force] Suppress errors until all files have been\n * processed.\n * @param {DeleteFilesCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Delete all of the files in the bucket.\n * //-\n * bucket.deleteFiles(function(err) {});\n *\n * //-\n * // By default, if a file cannot be deleted, this method will stop deleting\n * // files from your bucket. You can override this setting with `force:\n * // true`.\n * //-\n * bucket.deleteFiles({\n * force: true\n * }, function(errors) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * });\n *\n * //-\n * // The first argument to this method acts as a query to\n * // {@link Bucket#getFiles}. As an example, you can delete files\n * // which match a prefix.\n * //-\n * bucket.deleteFiles({\n * prefix: 'images/'\n * }, function(err) {\n * if (!err) {\n * // All files in the `images` directory have been deleted.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.deleteFiles().then(function() {});\n * ```\n */\n deleteFiles(queryOrCallback, callback) {\n let query = {};\n if (typeof queryOrCallback === 'function') {\n callback = queryOrCallback;\n }\n else if (queryOrCallback) {\n query = queryOrCallback;\n }\n const MAX_PARALLEL_LIMIT = 10;\n const MAX_QUEUE_SIZE = 1000;\n const errors = [];\n const deleteFile = (file) => {\n return file.delete(query).catch(err => {\n if (!query.force) {\n throw err;\n }\n errors.push(err);\n });\n };\n (async () => {\n try {\n let promises = [];\n const limit = pLimit(MAX_PARALLEL_LIMIT);\n const filesStream = this.getFilesStream(query);\n for await (const curFile of filesStream) {\n if (promises.length >= MAX_QUEUE_SIZE) {\n await Promise.all(promises);\n promises = [];\n }\n promises.push(limit(() => deleteFile(curFile)).catch(e => {\n filesStream.destroy();\n throw e;\n }));\n }\n await Promise.all(promises);\n callback(errors.length > 0 ? errors : null);\n }\n catch (e) {\n callback(e);\n return;\n }\n })();\n }\n /**\n * @typedef {array} DeleteLabelsResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata Bucket's metadata.\n */\n /**\n * Delete one or more labels from this bucket.\n *\n * @param {string|string[]} [labels] The labels to delete. If no labels are\n * provided, all of the labels are removed.\n * @param {DeleteLabelsCallback} [callback] Callback function.\n * @param {DeleteLabelsOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Delete all of the labels from this bucket.\n * //-\n * bucket.deleteLabels(function(err, apiResponse) {});\n *\n * //-\n * // Delete a single label.\n * //-\n * bucket.deleteLabels('labelone', function(err, apiResponse) {});\n *\n * //-\n * // Delete a specific set of labels.\n * //-\n * bucket.deleteLabels([\n * 'labelone',\n * 'labeltwo'\n * ], function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.deleteLabels().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) {\n let labels = new Array();\n let options = {};\n if (typeof labelsOrCallbackOrOptions === 'function') {\n callback = labelsOrCallbackOrOptions;\n }\n else if (typeof labelsOrCallbackOrOptions === 'string') {\n labels = [labelsOrCallbackOrOptions];\n }\n else if (Array.isArray(labelsOrCallbackOrOptions)) {\n labels = labelsOrCallbackOrOptions;\n }\n else if (labelsOrCallbackOrOptions) {\n options = labelsOrCallbackOrOptions;\n }\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n const deleteLabels = (labels) => {\n const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => {\n nullLabelMap[labelKey] = null;\n return nullLabelMap;\n }, {});\n if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) {\n this.setLabels(nullLabelMap, options, callback);\n }\n else {\n this.setLabels(nullLabelMap, callback);\n }\n };\n if (labels.length === 0) {\n this.getLabels((err, labels) => {\n if (err) {\n callback(err);\n return;\n }\n deleteLabels(Object.keys(labels));\n });\n }\n else {\n deleteLabels(labels);\n }\n }\n /**\n * @typedef {array} DisableRequesterPaysResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DisableRequesterPaysCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n *
\n * Early Access Testers Only\n *

\n * This feature is not yet widely-available.\n *

\n *
\n *\n * Disable `requesterPays` functionality from this bucket.\n *\n * @param {DisableRequesterPaysCallback} [callback] Callback function.\n * @param {DisableRequesterPaysOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.disableRequesterPays(function(err, apiResponse) {\n * if (!err) {\n * // requesterPays functionality disabled successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.disableRequesterPays().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_disable_requester_pays\n * Example of disabling requester pays:\n */\n disableRequesterPays(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.setMetadata({\n billing: {\n requesterPays: false,\n },\n }, options, callback);\n }\n /**\n * Configuration object for enabling logging.\n *\n * @typedef {object} EnableLoggingOptions\n * @property {string|Bucket} [bucket] The bucket for the log entries. By\n * default, the current bucket is used.\n * @property {string} prefix A unique prefix for log object names.\n */\n /**\n * Enable logging functionality for this bucket. This will make two API\n * requests, first to grant Cloud Storage WRITE permission to the bucket, then\n * to set the appropriate configuration on the Bucket's metadata.\n *\n * @param {EnableLoggingOptions} config Configuration options.\n * @param {string|Bucket} [config.bucket] The bucket for the log entries. By\n * default, the current bucket is used.\n * @param {string} config.prefix A unique prefix for log object names.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * const config = {\n * prefix: 'log'\n * };\n *\n * bucket.enableLogging(config, function(err, apiResponse) {\n * if (!err) {\n * // Logging functionality enabled successfully.\n * }\n * });\n *\n * ```\n * @example\n * Optionally, provide a destination bucket.\n * ```\n * const config = {\n * prefix: 'log',\n * bucket: 'destination-bucket'\n * };\n *\n * bucket.enableLogging(config, function(err, apiResponse) {});\n * ```\n *\n * @example\n * If the callback is omitted, we'll return a Promise.\n * ```\n * bucket.enableLogging(config).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n enableLogging(config, callback) {\n if (!config ||\n typeof config === 'function' ||\n typeof config.prefix === 'undefined') {\n throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED);\n }\n const logBucket = config.bucket\n ? config.bucket.id || config.bucket\n : this.id;\n const options = {};\n if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) {\n options.ifMetagenerationMatch = config.ifMetagenerationMatch;\n }\n if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) {\n options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch;\n }\n (async () => {\n try {\n const [policy] = await this.iam.getPolicy();\n policy.bindings.push({\n members: ['group:cloud-storage-analytics@google.com'],\n role: 'roles/storage.objectCreator',\n });\n await this.iam.setPolicy(policy);\n this.setMetadata({\n logging: {\n logBucket,\n logObjectPrefix: config.prefix,\n },\n }, options, callback);\n }\n catch (e) {\n callback(e);\n return;\n }\n })();\n }\n /**\n * @typedef {array} EnableRequesterPaysResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback EnableRequesterPaysCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n *
\n * Early Access Testers Only\n *

\n * This feature is not yet widely-available.\n *

\n *
\n *\n * Enable `requesterPays` functionality for this bucket. This enables you, the\n * bucket owner, to have the requesting user assume the charges for the access\n * to your bucket and its contents.\n *\n * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback]\n * Callback function or precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.enableRequesterPays(function(err, apiResponse) {\n * if (!err) {\n * // requesterPays functionality enabled successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.enableRequesterPays().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_enable_requester_pays\n * Example of enabling requester pays:\n */\n enableRequesterPays(optionsOrCallback, cb) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n cb = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.setMetadata({\n billing: {\n requesterPays: true,\n },\n }, options, cb);\n }\n /**\n * Create a {@link File} object. See {@link File} to see how to handle\n * the different use cases you may have.\n *\n * @param {string} name The name of the file in this bucket.\n * @param {FileOptions} [options] Configuration options.\n * @param {string|number} [options.generation] Only use a specific revision of\n * this file.\n * @param {string} [options.encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * KMS key ring must use the same location as the bucket.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for all requests made from File object.\n * @returns {File}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-existing-file.png');\n * ```\n */\n file(name, options) {\n if (!name) {\n throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME);\n }\n return new file_1.File(this, name, options);\n }\n /**\n * @typedef {array} GetFilesResponse\n * @property {File[]} 0 Array of {@link File} instances.\n * @param {object} nextQuery 1 A query object to receive more results.\n * @param {object} apiResponse 2 The full API response.\n */\n /**\n * @callback GetFilesCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files Array of {@link File} instances.\n * @param {object} nextQuery A query object to receive more results.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Query object for listing files.\n *\n * @typedef {object} GetFilesOptions\n * @property {boolean} [autoPaginate=true] Have pagination handled\n * automatically.\n * @property {string} [delimiter] Results will contain only objects whose\n * names, aside from the prefix, do not contain delimiter. Objects whose\n * names, aside from the prefix, contain delimiter will have their name\n * truncated after the delimiter, returned in `apiResponse.prefixes`.\n * Duplicate prefixes are omitted.\n * @property {string} [endOffset] Filter results to objects whose names are\n * lexicographically before endOffset. If startOffset is also set, the objects\n * listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @property {boolean} [includeTrailingDelimiter] If true, objects that end in\n * exactly one instance of delimiter have their metadata included in items[]\n * in addition to the relevant part of the object name appearing in prefixes[].\n * @property {string} [prefix] Filter results to objects whose names begin\n * with this prefix.\n * @property {number} [maxApiCalls] Maximum number of API calls to make.\n * @property {number} [maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [startOffset] Filter results to objects whose names are\n * lexicographically equal to or after startOffset. If endOffset is also set,\n * the objects listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {boolean} [versions] If true, returns File objects scoped to\n * their versions.\n */\n /**\n * Get {@link File} objects for the files currently in the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation}\n *\n * @param {GetFilesOptions} [query] Query object for listing files.\n * @param {boolean} [query.autoPaginate=true] Have pagination handled\n * automatically.\n * @param {string} [query.delimiter] Results will contain only objects whose\n * names, aside from the prefix, do not contain delimiter. Objects whose\n * names, aside from the prefix, contain delimiter will have their name\n * truncated after the delimiter, returned in `apiResponse.prefixes`.\n * Duplicate prefixes are omitted.\n * @param {string} [query.endOffset] Filter results to objects whose names are\n * lexicographically before endOffset. If startOffset is also set, the objects\n * listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in\n * exactly one instance of delimiter have their metadata included in items[]\n * in addition to the relevant part of the object name appearing in prefixes[].\n * @param {string} [query.prefix] Filter results to objects whose names begin\n * with this prefix.\n * @param {number} [query.maxApiCalls] Maximum number of API calls to make.\n * @param {number} [query.maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @param {string} [query.pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @param {string} [query.startOffset] Filter results to objects whose names are\n * lexicographically equal to or after startOffset. If endOffset is also set,\n * the objects listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @param {string} [query.userProject] The ID of the project which will be\n * billed for the request.\n * @param {boolean} [query.versions] If true, returns File objects scoped to\n * their versions.\n * @param {GetFilesCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getFiles(function(err, files) {\n * if (!err) {\n * // files is an array of File objects.\n * }\n * });\n *\n * //-\n * // If your bucket has versioning enabled, you can get all of your files\n * // scoped to their generation.\n * //-\n * bucket.getFiles({\n * versions: true\n * }, function(err, files) {\n * // Each file is scoped to its generation.\n * });\n *\n * //-\n * // To control how many API requests are made and page through the results\n * // manually, set `autoPaginate` to `false`.\n * //-\n * const callback = function(err, files, nextQuery, apiResponse) {\n * if (nextQuery) {\n * // More results exist.\n * bucket.getFiles(nextQuery, callback);\n * }\n *\n * // The `metadata` property is populated for you with the metadata at the\n * // time of fetching.\n * files[0].metadata;\n *\n * // However, in cases where you are concerned the metadata could have\n * // changed, use the `getMetadata` method.\n * files[0].getMetadata(function(err, metadata) {});\n * };\n *\n * bucket.getFiles({\n * autoPaginate: false\n * }, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getFiles().then(function(data) {\n * const files = data[0];\n * });\n *\n * ```\n * @example\n *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. \"a\"
  2. \"a/b/c/d\"
  3. \"b/d/e\"

Using a delimiter of `/` will return a single file, \"a\".

`apiResponse.prefixes` will return the \"sub-directories\" that were found:

  1. \"a/\"
  2. \"b/\"
\n * ```\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/'\n * }, function(err, files, nextQuery, apiResponse) {\n * // files = [\n * // {File} // File object for file \"a\"\n * // ]\n *\n * // apiResponse.prefixes = [\n * // 'a/',\n * // 'b/'\n * // ]\n * });\n * ```\n *\n * @example\n * Using prefixes, it's now possible to simulate a file system with follow-up requests.\n * ```\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/',\n * prefix: 'a/'\n * }, function(err, files, nextQuery, apiResponse) {\n * // No files found within \"directory\" a.\n * // files = []\n *\n * // However, a \"sub-directory\" was found.\n * // This prefix can be used to continue traversing the \"file system\".\n * // apiResponse.prefixes = [\n * // 'a/b/'\n * // ]\n * });\n * ```\n *\n * @example include:samples/files.js\n * region_tag:storage_list_files\n * Another example:\n *\n * @example include:samples/files.js\n * region_tag:storage_list_files_with_prefix\n * Example of listing files, filtered by a prefix:\n */\n getFiles(queryOrCallback, callback) {\n let query = typeof queryOrCallback === 'object' ? queryOrCallback : {};\n if (!callback) {\n callback = queryOrCallback;\n }\n query = Object.assign({}, query);\n this.request({\n uri: '/o',\n qs: query,\n }, (err, resp) => {\n if (err) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const files = itemsArray.map((file) => {\n const options = {};\n if (query.versions) {\n options.generation = file.generation;\n }\n if (file.kmsKeyName) {\n options.kmsKeyName = file.kmsKeyName;\n }\n const fileInstance = this.file(file.name, options);\n fileInstance.metadata = file;\n return fileInstance;\n });\n let nextQuery = null;\n if (resp.nextPageToken) {\n nextQuery = Object.assign({}, query, {\n pageToken: resp.nextPageToken,\n });\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n callback(null, files, nextQuery, resp);\n });\n }\n /**\n * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels().\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} GetLabelsResponse\n * @property {object} 0 Object of labels currently set on this bucket.\n */\n /**\n * @callback GetLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} labels Object of labels currently set on this bucket.\n */\n /**\n * Get the labels currently set on this bucket.\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetLabelsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getLabels(function(err, labels) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // labels = {\n * // label: 'labelValue',\n * // ...\n * // }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getLabels().then(function(data) {\n * const labels = data[0];\n * });\n * ```\n */\n getLabels(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n callback(err, null);\n return;\n }\n callback(null, metadata.labels || {});\n });\n }\n /**\n * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback GetNotificationsCallback\n * @param {?Error} err Request error, if any.\n * @param {Notification[]} notifications Array of {@link Notification}\n * instances.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} GetNotificationsResponse\n * @property {Notification[]} 0 Array of {@link Notification} instances.\n * @property {object} 1 The full API response.\n */\n /**\n * Retrieves a list of notification subscriptions for a given bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list}\n *\n * @param {GetNotificationsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * bucket.getNotifications(function(err, notifications, apiResponse) {\n * if (!err) {\n * // notifications is an array of Notification objects.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getNotifications().then(function(data) {\n * const notifications = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/listNotifications.js\n * region_tag:storage_list_bucket_notifications\n * Another example:\n */\n getNotifications(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.request({\n uri: '/notificationConfigs',\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const notifications = itemsArray.map((notification) => {\n const notificationInstance = this.notification(notification.id);\n notificationInstance.metadata = notification;\n return notificationInstance;\n });\n callback(null, notifications, resp);\n });\n }\n /**\n * @typedef {array} GetSignedUrlResponse\n * @property {object} 0 The signed URL.\n */\n /**\n * @callback GetSignedUrlCallback\n * @param {?Error} err Request error, if any.\n * @param {object} url The signed URL.\n */\n /**\n * @typedef {object} GetBucketSignedUrlConfig\n * @property {string} action Currently only supports \"list\" (HTTP: GET).\n * @property {*} expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * @property {string} [version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @property {string} [cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @property {object} [extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @property {object} [queryParams] Additional query parameters to include\n * in the signed URL.\n */\n /**\n * Get a signed URL to allow limited time access to a bucket.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed URL. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference}\n *\n * @throws {Error} if an expiration timestamp from the past is given.\n *\n * @param {GetBucketSignedUrlConfig} config Configuration object.\n * @param {string} config.action Currently only supports \"list\" (HTTP: GET).\n * @param {*} config.expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * @param {string} [config.version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @param {object} [config.extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @property {object} [config.queryParams] Additional query parameters to include\n * in the signed URL.\n * @param {GetSignedUrlCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * //-\n * // Generate a URL that allows temporary access to list files in a bucket.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'list',\n * expires: '03-17-2025'\n * };\n *\n * bucket.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The bucket is now available to be listed from this URL.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getSignedUrl(config).then(function(data) {\n * const url = data[0];\n * });\n * ```\n */\n getSignedUrl(cfg, callback) {\n const method = BucketActionToHTTPMethod[cfg.action];\n if (!method) {\n throw new Error(storage_1.ExceptionMessages.INVALID_ACTION);\n }\n const signConfig = {\n method,\n expires: cfg.expires,\n version: cfg.version,\n cname: cfg.cname,\n extensionHeaders: cfg.extensionHeaders || {},\n queryParams: cfg.queryParams || {},\n };\n if (!this.signer) {\n this.signer = new signer_1.URLSigner(this.storage.authClient, this);\n }\n this.signer\n .getSignedUrl(signConfig)\n .then(signedUrl => callback(null, signedUrl), callback);\n }\n /**\n * @callback BucketLockCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Lock a previously-defined retention policy. This will prevent changes to\n * the policy.\n *\n * @throws {Error} if a metageneration is not provided.\n *\n * @param {number|string} metageneration The bucket's metageneration. This is\n * accesssible from calling {@link File#getMetadata}.\n * @param {BucketLockCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const metageneration = 2;\n *\n * bucket.lock(metageneration, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.lock(metageneration).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n lock(metageneration, callback) {\n const metatype = typeof metageneration;\n if (metatype !== 'number' && metatype !== 'string') {\n throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED);\n }\n this.request({\n method: 'POST',\n uri: '/lockRetentionPolicy',\n qs: {\n ifMetagenerationMatch: metageneration,\n },\n }, callback);\n }\n /**\n * @typedef {array} MakeBucketPrivateResponse\n * @property {File[]} 0 List of files made private.\n */\n /**\n * @callback MakeBucketPrivateCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files List of files made private.\n */\n /**\n * @typedef {object} MakeBucketPrivateOptions\n * @property {boolean} [includeFiles=false] Make each file in the bucket\n * private.\n * @property {Metadata} [metadata] Define custom metadata properties to define\n * along with the operation.\n * @property {boolean} [force] Queue errors occurred while making files\n * private until all files have been processed.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Make the bucket listing private.\n *\n * You may also choose to make the contents of the bucket private by\n * specifying `includeFiles: true`. This will automatically run\n * {@link File#makePrivate} for every file in the bucket.\n *\n * When specifying `includeFiles: true`, use `force: true` to delay execution\n * of your callback until all files have been processed. By default, the\n * callback is executed after the first error. Use `force` to queue such\n * errors until all files have been processed, after which they will be\n * returned as an array as the first argument to your callback.\n *\n * NOTE: This may cause the process to be long-running and use a high number\n * of requests. Use with caution.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {MakeBucketPrivateOptions} [options] Configuration options.\n * @param {boolean} [options.includeFiles=false] Make each file in the bucket\n * private.\n * @param {Metadata} [options.metadata] Define custom metadata properties to define\n * along with the operation.\n * @param {boolean} [options.force] Queue errors occurred while making files\n * private until all files have been processed.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {MakeBucketPrivateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Make the bucket private.\n * //-\n * bucket.makePrivate(function(err) {});\n *\n * //-\n * // Make the bucket and its contents private.\n * //-\n * const opts = {\n * includeFiles: true\n * };\n *\n * bucket.makePrivate(opts, function(err, files) {\n * // `err`:\n * // The first error to occur, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made private in the bucket.\n * });\n *\n * //-\n * // Make the bucket and its contents private, using force to suppress errors\n * // until all files have been processed.\n * //-\n * const opts = {\n * includeFiles: true,\n * force: true\n * };\n *\n * bucket.makePrivate(opts, function(errors, files) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made private in the bucket.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.makePrivate(opts).then(function(data) {\n * const files = data[0];\n * });\n * ```\n */\n makePrivate(optionsOrCallback, callback) {\n var _a, _b, _c, _d;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n options.private = true;\n const query = {\n predefinedAcl: 'projectPrivate',\n };\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) {\n query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch;\n }\n if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) {\n query.ifGenerationNotMatch =\n options.preconditionOpts.ifGenerationNotMatch;\n }\n if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) {\n query.ifMetagenerationMatch =\n options.preconditionOpts.ifMetagenerationMatch;\n }\n if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) {\n query.ifMetagenerationNotMatch =\n options.preconditionOpts.ifMetagenerationNotMatch;\n }\n // You aren't allowed to set both predefinedAcl & acl properties on a bucket\n // so acl must explicitly be nullified.\n const metadata = extend({}, options.metadata, { acl: null });\n this.setMetadata(metadata, query, err => {\n if (err) {\n callback(err);\n }\n const internalCall = () => {\n if (options.includeFiles) {\n return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options);\n }\n return Promise.resolve([]);\n };\n internalCall()\n .then(files => callback(null, files))\n .catch(callback);\n });\n }\n /**\n * @typedef {object} MakeBucketPublicOptions\n * @property {boolean} [includeFiles=false] Make each file in the bucket\n * private.\n * @property {boolean} [force] Queue errors occurred while making files\n * private until all files have been processed.\n */\n /**\n * @callback MakeBucketPublicCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files List of files made public.\n */\n /**\n * @typedef {array} MakeBucketPublicResponse\n * @property {File[]} 0 List of files made public.\n */\n /**\n * Make the bucket publicly readable.\n *\n * You may also choose to make the contents of the bucket publicly readable by\n * specifying `includeFiles: true`. This will automatically run\n * {@link File#makePublic} for every file in the bucket.\n *\n * When specifying `includeFiles: true`, use `force: true` to delay execution\n * of your callback until all files have been processed. By default, the\n * callback is executed after the first error. Use `force` to queue such\n * errors until all files have been processed, after which they will be\n * returned as an array as the first argument to your callback.\n *\n * NOTE: This may cause the process to be long-running and use a high number\n * of requests. Use with caution.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {MakeBucketPublicOptions} [options] Configuration options.\n * @param {boolean} [options.includeFiles=false] Make each file in the bucket\n * private.\n * @param {boolean} [options.force] Queue errors occurred while making files\n * private until all files have been processed.\n * @param {MakeBucketPublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Make the bucket publicly readable.\n * //-\n * bucket.makePublic(function(err) {});\n *\n * //-\n * // Make the bucket and its contents publicly readable.\n * //-\n * const opts = {\n * includeFiles: true\n * };\n *\n * bucket.makePublic(opts, function(err, files) {\n * // `err`:\n * // The first error to occur, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made public in the bucket.\n * });\n *\n * //-\n * // Make the bucket and its contents publicly readable, using force to\n * // suppress errors until all files have been processed.\n * //-\n * const opts = {\n * includeFiles: true,\n * force: true\n * };\n *\n * bucket.makePublic(opts, function(errors, files) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made public in the bucket.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.makePublic(opts).then(function(data) {\n * const files = data[0];\n * });\n * ```\n */\n makePublic(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const req = extend(true, { public: true }, options);\n this.acl\n .add({\n entity: 'allUsers',\n role: 'READER',\n })\n .then(() => {\n return this.acl.default.add({\n entity: 'allUsers',\n role: 'READER',\n });\n })\n .then(() => {\n if (req.includeFiles) {\n return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req);\n }\n return [];\n })\n .then(files => callback(null, files), callback);\n }\n /**\n * Get a reference to a Cloud Pub/Sub Notification.\n *\n * @param {string} id ID of notification.\n * @returns {Notification}\n * @see Notification\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const notification = bucket.notification('1');\n * ```\n */\n notification(id) {\n if (!id) {\n throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID);\n }\n return new notification_1.Notification(this, id);\n }\n /**\n * Remove an already-existing retention policy from this bucket, if it is not\n * locked.\n *\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * bucket.removeRetentionPeriod(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.removeRetentionPeriod().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n removeRetentionPeriod(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n retentionPolicy: null,\n }, options, callback);\n }\n /**\n * Makes request and applies userProject query parameter if necessary.\n *\n * @private\n *\n * @param {object} reqOpts - The request options.\n * @param {function} callback - The callback function.\n */\n request(reqOpts, callback) {\n if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) {\n reqOpts.qs = extend(reqOpts.qs, { userProject: this.userProject });\n }\n return super.request(reqOpts, callback);\n }\n /**\n * @typedef {array} SetLabelsResponse\n * @property {object} 0 The bucket metadata.\n */\n /**\n * @callback SetLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n */\n /**\n * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Set labels on the bucket.\n *\n * This makes an underlying call to {@link Bucket#setMetadata}, which\n * is a PATCH request. This means an individual label can be overwritten, but\n * unmentioned labels will not be touched.\n *\n * @param {object} labels Labels to set on the bucket.\n * @param {SetLabelsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetLabelsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * const labels = {\n * labelone: 'labelonevalue',\n * labeltwo: 'labeltwovalue'\n * };\n *\n * bucket.setLabels(labels, function(err, metadata) {\n * if (!err) {\n * // Labels set successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setLabels(labels).then(function(data) {\n * const metadata = data[0];\n * });\n * ```\n */\n setLabels(labels, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || nodejs_common_1.util.noop;\n this.setMetadata({ labels }, options, callback);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options);\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * Lock all objects contained in the bucket, based on their creation time. Any\n * attempt to overwrite or delete objects younger than the retention period\n * will result in a `PERMISSION_DENIED` error.\n *\n * An unlocked retention policy can be modified or removed from the bucket via\n * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A\n * locked retention policy cannot be removed or shortened in duration for the\n * lifetime of the bucket. Attempting to remove or decrease period of a locked\n * retention policy will result in a `PERMISSION_DENIED` error. You can still\n * increase the policy.\n *\n * @param {*} duration In seconds, the minimum retention time for all objects\n * contained in this bucket.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataCallback} [options] Options, including precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const DURATION_SECONDS = 15780000; // 6 months.\n *\n * //-\n * // Lock the objects in this bucket for 6 months.\n * //-\n * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setRetentionPeriod(duration, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n retentionPolicy: {\n retentionPeriod: duration,\n },\n }, options, callback);\n }\n /**\n *\n * @typedef {object} Cors\n * @property {number} [maxAgeSeconds] The number of seconds the browser is\n * allowed to make requests before it must repeat the preflight request.\n * @property {string[]} [method] HTTP method allowed for cross origin resource\n * sharing with this bucket.\n * @property {string[]} [origin] an origin allowed for cross origin resource\n * sharing with this bucket.\n * @property {string[]} [responseHeader] A header allowed for cross origin\n * resource sharing with this bucket.\n */\n /**\n * This can be used to set the CORS configuration on the bucket.\n *\n * The configuration will be overwritten with the value passed into this.\n *\n * @param {Cors[]} corsConfiguration The new CORS configuration to set\n * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is\n * allowed to make requests before it must repeat the preflight request.\n * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource\n * sharing with this bucket.\n * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource\n * sharing with this bucket.\n * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin\n * resource sharing with this bucket.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataOptions} [options] Options, including precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour\n * bucket.setCorsConfiguration(corsConfiguration);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setCorsConfiguration(corsConfiguration).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n cors: corsConfiguration,\n }, options, callback);\n }\n /**\n * @typedef {object} SetBucketStorageClassOptions\n * @property {string} [userProject] - The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback SetBucketStorageClassCallback\n * @param {?Error} err Request error, if any.\n */\n /**\n * Set the default storage class for new files in this bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} storageClass The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`).\n * **Note:** The storage classes `multi_regional`, `regional`, and\n * `durable_reduced_availability` are now legacy and will be deprecated in\n * the future.\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] - The ID of the project which will be\n * billed for the request.\n * @param {SetStorageClassCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.setStorageClass('nearline', function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // The storage class was updated successfully.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setStorageClass('nearline').then(function() {});\n * ```\n */\n setStorageClass(storageClass, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n // In case we get input like `storageClass`, convert to `storage_class`.\n storageClass = storageClass\n .replace(/-/g, '_')\n .replace(/([a-z])([A-Z])/g, (_, low, up) => {\n return low + '_' + up;\n })\n .toUpperCase();\n this.setMetadata({ storageClass }, options, callback);\n }\n /**\n * Set a user project to be billed for all requests made from this Bucket\n * object and any files referenced from this Bucket object.\n *\n * @param {string} userProject The user project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.setUserProject('grape-spaceship-123');\n * ```\n */\n setUserProject(userProject) {\n this.userProject = userProject;\n const methods = [\n 'create',\n 'delete',\n 'exists',\n 'get',\n 'getMetadata',\n 'setMetadata',\n ];\n methods.forEach(method => {\n const methodConfig = this.methods[method];\n if (typeof methodConfig === 'object') {\n if (typeof methodConfig.reqOpts === 'object') {\n extend(methodConfig.reqOpts.qs, { userProject });\n }\n else {\n methodConfig.reqOpts = {\n qs: { userProject },\n };\n }\n }\n });\n }\n /**\n * @typedef {object} UploadOptions Configuration options for Bucket#upload().\n * @property {string|File} [destination] The place to save\n * your file. If given a string, the file will be uploaded to the bucket\n * using the string as a filename. When given a File object, your local\n * file will be uploaded to the File object's bucket and under the File\n * object's name. Lastly, when this argument is omitted, the file is uploaded\n * to your bucket using the name of the local file.\n * @property {string} [encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @property {boolean} [gzip] Automatically gzip the file. This will set\n * `options.metadata.contentEncoding` to `gzip`.\n * @property {string} [kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * @property {object} [metadata] See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}.\n * @property {string} [offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {boolean} [resumable=true] Resumable uploads are automatically\n * enabled and must be shut off explicitly by setting to false.\n * @property {number} [timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @property {string} [uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with an\n * MD5 checksum for maximum reliability. CRC32c will provide better\n * performance with less reliability. You may also choose to skip\n * validation completely, however this is **not recommended**.\n */\n /**\n * @typedef {array} UploadResponse\n * @property {object} 0 The uploaded {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback UploadCallback\n * @param {?Error} err Request error, if any.\n * @param {object} file The uploaded {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Upload a file to the bucket. This is a convenience method that wraps\n * {@link File#createWriteStream}.\n *\n * Resumable uploads are enabled by default\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation}\n *\n * @param {string} pathString The fully qualified path to the file you\n * wish to upload to your bucket.\n * @param {UploadOptions} [options] Configuration options.\n * @param {string|File} [options.destination] The place to save\n * your file. If given a string, the file will be uploaded to the bucket\n * using the string as a filename. When given a File object, your local\n * file will be uploaded to the File object's bucket and under the File\n * object's name. Lastly, when this argument is omitted, the file is uploaded\n * to your bucket using the name of the local file.\n * @param {string} [options.encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @param {boolean} [options.gzip] Automatically gzip the file. This will set\n * `options.metadata.contentEncoding` to `gzip`.\n * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * @param {object} [options.metadata] See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}.\n * @param {string} [options.offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @param {string} [options.predefinedAcl] Apply a predefined set of access\n * controls to this object.\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @param {boolean} [options.private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @param {boolean} [options.public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @param {boolean} [options.resumable=true] Resumable uploads are automatically\n * enabled and must be shut off explicitly by setting to false.\n * @param {number} [options.timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @param {string} [options.uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {string|boolean} [options.validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with an\n * MD5 checksum for maximum reliability. CRC32c will provide better\n * performance with less reliability. You may also choose to skip\n * validation completely, however this is **not recommended**.\n * @param {UploadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Upload a file from a local path.\n * //-\n * bucket.upload('/local/path/image.png', function(err, file, apiResponse) {\n * // Your bucket now contains:\n * // - \"image.png\" (with the contents of `/local/path/image.png')\n *\n * // `file` is an instance of a File object that refers to your new file.\n * });\n *\n *\n * //-\n * // It's not always that easy. You will likely want to specify the filename\n * // used when your new file lands in your bucket.\n * //\n * // You may also want to set metadata or customize other options.\n * //-\n * const options = {\n * destination: 'new-image.png',\n * validation: 'crc32c',\n * metadata: {\n * metadata: {\n * event: 'Fall trip to the zoo'\n * }\n * }\n * };\n *\n * bucket.upload('local-image.png', options, function(err, file) {\n * // Your bucket now contains:\n * // - \"new-image.png\" (with the contents of `local-image.png')\n *\n * // `file` is an instance of a File object that refers to your new file.\n * });\n *\n * //-\n * // You can also have a file gzip'd on the fly.\n * //-\n * bucket.upload('index.html', { gzip: true }, function(err, file) {\n * // Your bucket now contains:\n * // - \"index.html\" (automatically compressed with gzip)\n *\n * // Downloading the file with `file.download` will automatically decode\n * the\n * // file.\n * });\n *\n * //-\n * // You may also re-use a File object, {File}, that references\n * // the file you wish to create or overwrite.\n * //-\n * const options = {\n * destination: bucket.file('existing-file.png'),\n * resumable: false\n * };\n *\n * bucket.upload('local-img.png', options, function(err, newFile) {\n * // Your bucket now contains:\n * // - \"existing-file.png\" (with the contents of `local-img.png')\n *\n * // Note:\n * // The `newFile` parameter is equal to `file`.\n * });\n *\n * //-\n * // To use\n * // \n * // Customer-supplied Encryption Keys, provide the `encryptionKey`\n * option.\n * //-\n * const crypto = require('crypto');\n * const encryptionKey = crypto.randomBytes(32);\n *\n * bucket.upload('img.png', {\n * encryptionKey: encryptionKey\n * }, function(err, newFile) {\n * // `img.png` was uploaded with your custom encryption key.\n *\n * // `newFile` is already configured to use the encryption key when making\n * // operations on the remote object.\n *\n * // However, to use your encryption key later, you must create a `File`\n * // instance with the `key` supplied:\n * const file = bucket.file('img.png', {\n * encryptionKey: encryptionKey\n * });\n *\n * // Or with `file#setEncryptionKey`:\n * const file = bucket.file('img.png');\n * file.setEncryptionKey(encryptionKey);\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.upload('local-image.png').then(function(data) {\n * const file = data[0];\n * });\n *\n * To upload a file from a URL, use {@link File#createWriteStream}.\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_upload_file\n * Another example:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_upload_encrypted_file\n * Example of uploading an encrypted file:\n */\n upload(pathString, optionsOrCallback, callback) {\n var _a, _b;\n const upload = (numberOfRetries) => {\n const returnValue = retry(async (bail) => {\n await new Promise((resolve, reject) => {\n var _a, _b;\n if (numberOfRetries === 0 &&\n ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) {\n newFile.storage.retryOptions.autoRetry = false;\n }\n const writable = newFile.createWriteStream(options);\n if (options.onUploadProgress) {\n writable.on('progress', options.onUploadProgress);\n }\n fs.createReadStream(pathString)\n .on('error', bail)\n .pipe(writable)\n .on('error', err => {\n if (this.storage.retryOptions.autoRetry &&\n this.storage.retryOptions.retryableErrorFn(err)) {\n return reject(err);\n }\n else {\n return bail(err);\n }\n })\n .on('finish', () => {\n return resolve();\n });\n });\n }, {\n retries: numberOfRetries,\n factor: this.storage.retryOptions.retryDelayMultiplier,\n maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000,\n maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n if (!callback) {\n return returnValue;\n }\n else {\n return returnValue\n .then(() => {\n if (callback) {\n return callback(null, newFile, newFile.metadata);\n }\n })\n .catch(callback);\n }\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n if (global['GCLOUD_SANDBOX_ENV']) {\n return;\n }\n let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n options = Object.assign({\n metadata: {},\n }, options);\n // Do not retry if precondition option ifGenerationMatch is not set\n // because this is a file operation\n let maxRetries = this.storage.retryOptions.maxRetries;\n if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n maxRetries = 0;\n }\n let newFile;\n if (options.destination instanceof file_1.File) {\n newFile = options.destination;\n }\n else if (options.destination !== null &&\n typeof options.destination === 'string') {\n // Use the string as the name of the file.\n newFile = this.file(options.destination, {\n encryptionKey: options.encryptionKey,\n kmsKeyName: options.kmsKeyName,\n preconditionOpts: this.instancePreconditionOpts,\n });\n }\n else {\n // Resort to using the name of the incoming file.\n const destination = path.basename(pathString);\n newFile = this.file(destination, {\n encryptionKey: options.encryptionKey,\n kmsKeyName: options.kmsKeyName,\n preconditionOpts: this.instancePreconditionOpts,\n });\n }\n upload(maxRetries);\n }\n /**\n * @private\n *\n * @typedef {object} MakeAllFilesPublicPrivateOptions\n * @property {boolean} [force] Suppress errors until all files have been\n * processed.\n * @property {boolean} [private] Make files private.\n * @property {boolean} [public] Make files public.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @private\n *\n * @callback SetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files Files that were updated.\n */\n /**\n * @typedef {array} MakeAllFilesPublicPrivateResponse\n * @property {File[]} 0 List of files affected.\n */\n /**\n * Iterate over all of a bucket's files, calling `file.makePublic()` (public)\n * or `file.makePrivate()` (private) on each.\n *\n * Operations are performed in parallel, up to 10 at once. The first error\n * breaks the loop, and will execute the provided callback with it. Specify\n * `{ force: true }` to suppress the errors.\n *\n * @private\n *\n * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options.\n * @param {boolean} [options.force] Suppress errors until all files have been\n * processed.\n * @param {boolean} [options.private] Make files private.\n * @param {boolean} [options.public] Make files public.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n \n * @param {MakeAllFilesPublicPrivateCallback} callback Callback function.\n *\n * @return {Promise}\n */\n makeAllFilesPublicPrivate_(optionsOrCallback, callback) {\n const MAX_PARALLEL_LIMIT = 10;\n const errors = [];\n const updatedFiles = [];\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const processFile = async (file) => {\n try {\n await (options.public ? file.makePublic() : file.makePrivate(options));\n updatedFiles.push(file);\n }\n catch (e) {\n if (!options.force) {\n throw e;\n }\n errors.push(e);\n }\n };\n this.getFiles(options)\n .then(([files]) => {\n const limit = pLimit(MAX_PARALLEL_LIMIT);\n const promises = files.map(file => {\n return limit(() => processFile(file));\n });\n return Promise.all(promises);\n })\n .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles));\n }\n getId() {\n return this.id;\n }\n disableAutoRetryConditionallyIdempotent_(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n coreOpts, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n methodType, localPreconditionOptions) {\n var _a, _b;\n if (typeof coreOpts === 'object' &&\n ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined &&\n (methodType === AvailableServiceObjectMethods.setMetadata ||\n methodType === AvailableServiceObjectMethods.delete) &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) {\n this.storage.retryOptions.autoRetry = false;\n }\n else if (this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n }\n}\nexports.Bucket = Bucket;\n/*! Developer Documentation\n *\n * These methods can be auto-paginated.\n */\npaginator_1.paginator.extend(Bucket, 'getFiles');\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Bucket, {\n exclude: ['cloudStorageURI', 'request', 'file', 'notification'],\n});\n//# sourceMappingURL=bucket.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Channel = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * Create a channel object to interact with a Cloud Storage channel.\n *\n * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification}\n *\n * @class\n *\n * @param {string} id The ID of the channel.\n * @param {string} resourceId The resource ID of the channel.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * ```\n */\nclass Channel extends nodejs_common_1.ServiceObject {\n constructor(storage, id, resourceId) {\n const config = {\n parent: storage,\n baseUrl: '/channels',\n // An ID shouldn't be included in the API requests.\n // RE:\n // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145\n id: '',\n methods: {\n // Only need `request`.\n },\n };\n super(config);\n this.metadata.id = id;\n this.metadata.resourceId = resourceId;\n }\n /**\n * @typedef {array} StopResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback StopCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Stop this channel.\n *\n * @param {StopCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * channel.stop(function(err, apiResponse) {\n * if (!err) {\n * // Channel stopped successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * channel.stop().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n stop(callback) {\n callback = callback || nodejs_common_1.util.noop;\n this.request({\n method: 'POST',\n uri: '/stop',\n json: this.metadata,\n }, (err, apiResponse) => {\n callback(err, apiResponse);\n });\n }\n}\nexports.Channel = Channel;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Channel);\n//# sourceMappingURL=channel.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _CRC32C_crc32c;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0;\nconst fs_1 = require(\"fs\");\n/**\n * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c}\n */\nconst CRC32C_EXTENSIONS = [\n 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c,\n 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b,\n 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c,\n 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384,\n 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc,\n 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a,\n 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512,\n 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa,\n 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad,\n 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a,\n 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf,\n 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957,\n 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f,\n 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927,\n 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f,\n 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7,\n 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e,\n 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859,\n 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e,\n 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6,\n 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de,\n 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c,\n 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4,\n 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c,\n 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b,\n 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c,\n 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5,\n 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d,\n 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975,\n 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d,\n 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905,\n 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed,\n 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8,\n 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff,\n 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8,\n 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540,\n 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78,\n 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee,\n 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6,\n 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e,\n 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69,\n 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,\n 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351,\n];\nexports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS;\nconst CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS);\nexports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE;\nconst CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C();\nexports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR;\nconst CRC32C_EXCEPTION_MESSAGES = {\n INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`,\n INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`,\n INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`,\n};\nexports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES;\nclass CRC32C {\n /**\n * Constructs a new `CRC32C` object.\n *\n * Reconstruction is recommended via the `CRC32C.from` static method.\n *\n * @param initialValue An initial CRC32C value - a signed 32-bit integer.\n */\n constructor(initialValue = 0) {\n /** Current CRC32C value */\n _CRC32C_crc32c.set(this, 0);\n __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, \"f\");\n }\n /**\n * Calculates a CRC32C from a provided buffer.\n *\n * Implementation inspired from:\n * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c}\n * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c}\n * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage}\n *\n * @param data The `Buffer` to generate the CRC32C from\n */\n update(data) {\n let current = __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\") ^ 0xffffffff;\n for (const d of data) {\n const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff];\n current = tablePoly ^ (current >>> 8);\n }\n __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, \"f\");\n }\n /**\n * Validates a provided input to the current CRC32C value.\n *\n * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer\n */\n validate(input) {\n if (typeof input === 'number') {\n return input === __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\");\n }\n else if (typeof input === 'string') {\n return input === this.toString();\n }\n else if (Buffer.isBuffer(input)) {\n return Buffer.compare(input, this.toBuffer()) === 0;\n }\n else {\n // `CRC32C`-like object\n return input.toString() === this.toString();\n }\n }\n /**\n * Returns a `Buffer` representation of the CRC32C value\n */\n toBuffer() {\n const buffer = Buffer.alloc(4);\n buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, \"f\"));\n return buffer;\n }\n /**\n * Returns a JSON-compatible, base64-encoded representation of the CRC32C value.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`}\n */\n toJSON() {\n return this.toString();\n }\n /**\n * Returns a base64-encoded representation of the CRC32C value.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`}\n */\n toString() {\n return this.toBuffer().toString('base64');\n }\n /**\n * Returns the `number` representation of the CRC32C value as a signed 32-bit integer\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`}\n */\n valueOf() {\n return __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\");\n }\n /**\n * Generates a `CRC32C` from a compatible buffer format.\n *\n * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`\n */\n static fromBuffer(value) {\n let buffer;\n if (Buffer.isBuffer(value)) {\n buffer = value;\n }\n else if ('buffer' in value) {\n // `ArrayBufferView`\n buffer = Buffer.from(value.buffer);\n }\n else {\n // `ArrayBuffer`\n buffer = Buffer.from(value);\n }\n if (buffer.byteLength !== 4) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength));\n }\n return new CRC32C(buffer.readInt32BE());\n }\n static async fromFile(file) {\n const crc32c = new CRC32C();\n await new Promise((resolve, reject) => {\n (0, fs_1.createReadStream)(file)\n .on('data', (d) => crc32c.update(d))\n .on('end', resolve)\n .on('error', reject);\n });\n return crc32c;\n }\n /**\n * Generates a `CRC32C` from 4-byte base64-encoded data (string).\n *\n * @param value 4-byte base64-encoded data (string)\n */\n static fromString(value) {\n const buffer = Buffer.from(value, 'base64');\n if (buffer.byteLength !== 4) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength));\n }\n return this.fromBuffer(buffer);\n }\n /**\n * Generates a `CRC32C` from a safe, unsigned 32-bit integer.\n *\n * @param value an unsigned 32-bit integer\n */\n static fromNumber(value) {\n if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value));\n }\n return new CRC32C(value);\n }\n /**\n * Generates a `CRC32C` from a variety of compatable types.\n * Note: strings are treated as input, not as file paths to read from.\n *\n * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string)\n */\n static from(value) {\n if (typeof value === 'number') {\n return this.fromNumber(value);\n }\n else if (typeof value === 'string') {\n return this.fromString(value);\n }\n else if ('byteLength' in value) {\n // `ArrayBuffer` | `Buffer` | `ArrayBufferView`\n return this.fromBuffer(value);\n }\n else {\n // `CRC32CValidator`/`CRC32C`-like\n return this.fromString(value.toString());\n }\n }\n}\nexports.CRC32C = CRC32C;\n_CRC32C_crc32c = new WeakMap();\nCRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS;\nCRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE;\n//# sourceMappingURL=crc32c.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _File_instances, _File_validateIntegrity;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst compressible = require(\"compressible\");\nconst crypto = require(\"crypto\");\nconst extend = require(\"extend\");\nconst fs = require(\"fs\");\nconst mime = require(\"mime\");\nconst resumableUpload = require(\"./resumable-upload\");\nconst stream_1 = require(\"stream\");\nconst zlib = require(\"zlib\");\nconst storage_1 = require(\"./storage\");\nconst bucket_1 = require(\"./bucket\");\nconst acl_1 = require(\"./acl\");\nconst signer_1 = require(\"./signer\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst duplexify = require('duplexify');\nconst util_1 = require(\"./util\");\nconst hash_stream_validator_1 = require(\"./hash-stream-validator\");\nconst retry = require(\"async-retry\");\nvar ActionToHTTPMethod;\n(function (ActionToHTTPMethod) {\n ActionToHTTPMethod[\"read\"] = \"GET\";\n ActionToHTTPMethod[\"write\"] = \"PUT\";\n ActionToHTTPMethod[\"delete\"] = \"DELETE\";\n ActionToHTTPMethod[\"resumable\"] = \"POST\";\n})(ActionToHTTPMethod = exports.ActionToHTTPMethod || (exports.ActionToHTTPMethod = {}));\n/**\n * @private\n */\nexports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com';\n/**\n * @private\n */\nconst GS_URL_REGEXP = /^gs:\\/\\/([a-z0-9_.-]+)\\/(.+)$/;\nclass RequestError extends Error {\n}\nexports.RequestError = RequestError;\nconst SEVEN_DAYS = 7 * 24 * 60 * 60;\nvar FileExceptionMessages;\n(function (FileExceptionMessages) {\n FileExceptionMessages[\"EXPIRATION_TIME_NA\"] = \"An expiration time is not available.\";\n FileExceptionMessages[\"DESTINATION_NO_NAME\"] = \"Destination file should have a name.\";\n FileExceptionMessages[\"INVALID_VALIDATION_FILE_RANGE\"] = \"Cannot use validation with file ranges (start/end).\";\n FileExceptionMessages[\"MD5_NOT_AVAILABLE\"] = \"MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects.\";\n FileExceptionMessages[\"EQUALS_CONDITION_TWO_ELEMENTS\"] = \"Equals condition must be an array of 2 elements.\";\n FileExceptionMessages[\"STARTS_WITH_TWO_ELEMENTS\"] = \"StartsWith condition must be an array of 2 elements.\";\n FileExceptionMessages[\"CONTENT_LENGTH_RANGE_MIN_MAX\"] = \"ContentLengthRange must have numeric min & max fields.\";\n FileExceptionMessages[\"DOWNLOAD_MISMATCH\"] = \"The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again.\";\n FileExceptionMessages[\"UPLOAD_MISMATCH_DELETE_FAIL\"] = \"The uploaded data did not match the data from the server.\\n As a precaution, we attempted to delete the file, but it was not successful.\\n To be sure the content is the same, you should try removing the file manually,\\n then uploading the file again.\\n \\n\\nThe delete attempt failed with this message:\\n\\n \";\n FileExceptionMessages[\"UPLOAD_MISMATCH\"] = \"The uploaded data did not match the data from the server.\\n As a precaution, the file has been deleted.\\n To be sure the content is the same, you should try uploading the file again.\";\n})(FileExceptionMessages = exports.FileExceptionMessages || (exports.FileExceptionMessages = {}));\n/**\n * A File object is created from your {@link Bucket} object using\n * {@link Bucket#file}.\n *\n * @class\n */\nclass File extends nodejs_common_1.ServiceObject {\n /**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against\n * an object or bucket (for example, `READ` or `WRITE`); the entity defines\n * who the permission applies to (for example, a specific user or group of\n * users).\n *\n * The `acl` object on a File instance provides methods to get you a list of\n * the ACLs defined on your bucket, as well as set, update, and delete them.\n *\n * See {@link http://goo.gl/6qBBPO| About Access Control lists}\n *\n * @name File#acl\n * @mixes Acl\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * //-\n * // Make a file publicly readable.\n * //-\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * file.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n /**\n * The API-formatted resource description of the file.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name File#metadata\n * @type {object}\n */\n /**\n * The file's name.\n * @name File#name\n * @type {string}\n */\n /**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n /**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n /**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n /**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n /**\n * @callback Crc32cGeneratorCallback\n * @returns {CRC32CValidator}\n */\n /**\n * @typedef {object} FileOptions Options passed to the File constructor.\n * @property {string} [encryptionKey] A custom encryption key.\n * @property {number} [generation] Generation to scope the file to.\n * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this\n * object, if the object is encrypted by such a key. Limited availability;\n * usable only by enabled projects.\n * @property {string} [userProject] The ID of the project which will be\n * billed for all requests made from File object.\n * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n */\n /**\n * Constructs a file object.\n *\n * @param {Bucket} bucket The Bucket instance this file is\n * attached to.\n * @param {string} name The name of the remote file.\n * @param {FileOptions} [options] Configuration options.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * ```\n */\n constructor(bucket, name, options = {}) {\n var _a, _b;\n const requestQueryObject = {};\n let generation;\n if (options.generation !== null) {\n if (typeof options.generation === 'string') {\n generation = Number(options.generation);\n }\n else {\n generation = options.generation;\n }\n if (!isNaN(generation)) {\n requestQueryObject.generation = generation;\n }\n }\n Object.assign(requestQueryObject, options.preconditionOpts);\n const userProject = options.userProject || bucket.userProject;\n if (typeof userProject === 'string') {\n requestQueryObject.userProject = userProject;\n }\n const methods = {\n /**\n * @typedef {array} DeleteFileResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteFileCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete the file.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation}\n *\n * @method File#delete\n * @param {object} [options] Configuration options.\n * @param {boolean} [options.ignoreNotFound = false] Ignore an error if\n * the file does not exist.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteFileCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * file.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_delete_file\n * Another example:\n */\n delete: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} FileExistsResponse\n * @property {boolean} 0 Whether the {@link File} exists.\n */\n /**\n * @callback FileExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the {@link File} exists.\n */\n /**\n * Check if the file exists.\n *\n * @method File#exists\n * @param {options} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {FileExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetFileResponse\n * @property {File} 0 The {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetFileCallback\n * @param {?Error} err Request error, if any.\n * @param {File} file The {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get a file object and its metadata if it exists.\n *\n * @method File#get\n * @param {options} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetFileCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.get(function(err, file, apiResponse) {\n * // file.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.get().then(function(data) {\n * const file = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetFileMetadataResponse\n * @property {object} 0 The {@link File} metadata.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetFileMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The {@link File} metadata.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get the file's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation}\n *\n * @method File#getMetadata\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetFileMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_get_metadata\n * Another example:\n */\n getMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata().\n * @param {string} [userProject] The ID of the project which will be billed for the request.\n */\n /**\n * @callback SetFileMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} SetFileMetadataResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Merge the given metadata with the current remote file's metadata. This\n * will set metadata if it was previously unset or update previously set\n * metadata. To unset previously set metadata, set its value to null.\n *\n * You can set custom key/value pairs in the metadata key of the given\n * object, however the other properties outside of this object must adhere\n * to the {@link https://goo.gl/BOnnCK| official API documentation}.\n *\n *\n * See the examples below for more information.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation}\n *\n * @method File#setMetadata\n * @param {object} [metadata] The metadata you wish to update.\n * @param {SetFileMetadataOptions} [options] Configuration options.\n * @param {SetFileMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * const metadata = {\n * contentType: 'application/x-font-ttf',\n * metadata: {\n * my: 'custom',\n * properties: 'go here'\n * }\n * };\n *\n * file.setMetadata(metadata, function(err, apiResponse) {});\n *\n * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' }\n * file.setMetadata({\n * metadata: {\n * abc: '123', // will be set.\n * unsetMe: null, // will be unset (deleted).\n * hello: 'goodbye' // will be updated from 'world' to 'goodbye'.\n * }\n * }, function(err, apiResponse) {\n * // metadata should now be { abc: '123', hello: 'goodbye' }\n * });\n *\n * //-\n * // Set a temporary hold on this file from its bucket's retention period\n * // configuration.\n * //\n * file.setMetadata({\n * temporaryHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Alternatively, you may set a temporary hold. This will follow the\n * // same behavior as an event-based hold, with the exception that the\n * // bucket's retention policy will not renew for this file from the time\n * // the hold is released.\n * //-\n * file.setMetadata({\n * eventBasedHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.setMetadata(metadata).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n };\n super({\n parent: bucket,\n baseUrl: '/o',\n id: encodeURIComponent(name),\n methods,\n });\n _File_instances.add(this);\n this.bucket = bucket;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this.storage = bucket.parent;\n // @TODO Can this duplicate code from above be avoided?\n if (options.generation !== null) {\n let generation;\n if (typeof options.generation === 'string') {\n generation = Number(options.generation);\n }\n else {\n generation = options.generation;\n }\n if (!isNaN(generation)) {\n this.generation = generation;\n }\n }\n this.kmsKeyName = options.kmsKeyName;\n this.userProject = userProject;\n this.name = name;\n if (options.encryptionKey) {\n this.setEncryptionKey(options.encryptionKey);\n }\n this.acl = new acl_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/acl',\n });\n this.crc32cGenerator =\n options.crc32cGenerator || this.bucket.crc32cGenerator;\n this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry;\n this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts;\n }\n /**\n * The object's Cloud Storage URI (`gs://`)\n *\n * @example\n * ```ts\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('image.png');\n *\n * // `gs://my-bucket/image.png`\n * const href = file.cloudStorageURI.href;\n * ```\n */\n get cloudStorageURI() {\n const uri = this.bucket.cloudStorageURI;\n uri.pathname = this.name;\n return uri;\n }\n /**\n * A helper method for determining if a request should be retried based on preconditions.\n * This should only be used for methods where the idempotency is determined by\n * `ifGenerationMatch`\n * @private\n *\n * A request should not be retried under the following conditions:\n * - if precondition option `ifGenerationMatch` is not set OR\n * - if `idempotencyStrategy` is set to `RetryNever`\n */\n shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) {\n var _a;\n return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined &&\n ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever);\n }\n /**\n * @typedef {array} CopyResponse\n * @property {File} 0 The copied {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CopyCallback\n * @param {?Error} err Request error, if any.\n * @param {File} copiedFile The copied {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} CopyOptions Configuration options for File#copy(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @property {string} [cacheControl] The cacheControl setting for the new file.\n * @property {string} [contentEncoding] The contentEncoding setting for the new file.\n * @property {string} [contentType] The contentType setting for the new file.\n * @property {string} [destinationKmsKeyName] Resource name of the Cloud\n * KMS key, of the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @property {Metadata} [metadata] Metadata to specify on the copied file.\n * @property {string} [predefinedAcl] Set the ACL for the new file.\n * @property {string} [token] A previously-returned `rewriteToken` from an\n * unfinished rewrite request.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Copy this file to another file. By default, this will copy the file to the\n * same bucket, but you can choose to copy it to another Bucket by providing\n * a Bucket or File object or a URL starting with \"gs://\".\n * The generation of the file will not be preserved.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation}\n *\n * @throws {Error} If the destination file is not provided.\n *\n * @param {string|Bucket|File} destination Destination file.\n * @param {CopyOptions} [options] Configuration options. See an\n * @param {CopyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // You can pass in a variety of types for the destination.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // If you pass in a string for the destination, the file is copied to its\n * // current bucket, under the new name provided.\n * //-\n * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) {\n * // `my-bucket` now contains:\n * // - \"my-image.png\"\n * // - \"my-image-copy.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a string starting with \"gs://\" for the destination, the\n * // file is copied to the other bucket and under the new name provided.\n * //-\n * const newLocation = 'gs://another-bucket/my-image-copy.png';\n * file.copy(newLocation, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image-copy.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a Bucket object, the file will be copied to that bucket\n * // using the same name.\n * //-\n * const anotherBucket = storage.bucket('another-bucket');\n * file.copy(anotherBucket, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a File object, you have complete control over the new\n * // bucket and filename.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n * file.copy(anotherFile, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-awesome-image.png\"\n *\n * // Note:\n * // The `copiedFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.copy(newLocation).then(function(data) {\n * const newFile = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_copy_file\n * Another example:\n */\n copy(destination, optionsOrCallback, callback) {\n var _a, _b;\n const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME);\n if (!destination) {\n throw noDestinationError;\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n options = extend(true, {}, options);\n callback = callback || nodejs_common_1.util.noop;\n let destBucket;\n let destName;\n let newFile;\n if (typeof destination === 'string') {\n const parsedDestination = GS_URL_REGEXP.exec(destination);\n if (parsedDestination !== null && parsedDestination.length === 3) {\n destBucket = this.storage.bucket(parsedDestination[1]);\n destName = parsedDestination[2];\n }\n else {\n destBucket = this.bucket;\n destName = destination;\n }\n }\n else if (destination instanceof bucket_1.Bucket) {\n destBucket = destination;\n destName = this.name;\n }\n else if (destination instanceof File) {\n destBucket = destination.bucket;\n destName = destination.name;\n newFile = destination;\n }\n else {\n throw noDestinationError;\n }\n const query = {};\n if (this.generation !== undefined) {\n query.sourceGeneration = this.generation;\n }\n if (options.token !== undefined) {\n query.rewriteToken = options.token;\n }\n if (options.userProject !== undefined) {\n query.userProject = options.userProject;\n delete options.userProject;\n }\n if (options.predefinedAcl !== undefined) {\n query.destinationPredefinedAcl = options.predefinedAcl;\n delete options.predefinedAcl;\n }\n newFile = newFile || destBucket.file(destName);\n const headers = {};\n if (this.encryptionKey !== undefined) {\n headers['x-goog-copy-source-encryption-algorithm'] = 'AES256';\n headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64;\n headers['x-goog-copy-source-encryption-key-sha256'] =\n this.encryptionKeyHash;\n }\n if (newFile.encryptionKey !== undefined) {\n this.setEncryptionKey(newFile.encryptionKey);\n }\n else if (options.destinationKmsKeyName !== undefined) {\n query.destinationKmsKeyName = options.destinationKmsKeyName;\n delete options.destinationKmsKeyName;\n }\n else if (newFile.kmsKeyName !== undefined) {\n query.destinationKmsKeyName = newFile.kmsKeyName;\n }\n if (query.destinationKmsKeyName) {\n this.kmsKeyName = query.destinationKmsKeyName;\n const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor);\n if (keyIndex > -1) {\n this.interceptors.splice(keyIndex, 1);\n }\n }\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {\n this.storage.retryOptions.autoRetry = false;\n }\n if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) {\n query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch;\n delete options.preconditionOpts;\n }\n this.request({\n method: 'POST',\n uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`,\n qs: query,\n json: options,\n headers,\n }, (err, resp) => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n if (err) {\n callback(err, null, resp);\n return;\n }\n if (resp.rewriteToken) {\n const options = {\n token: resp.rewriteToken,\n };\n if (query.userProject) {\n options.userProject = query.userProject;\n }\n if (query.destinationKmsKeyName) {\n options.destinationKmsKeyName = query.destinationKmsKeyName;\n }\n this.copy(newFile, options, callback);\n return;\n }\n callback(null, newFile, resp);\n });\n }\n /**\n * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with a\n * CRC32c checksum. You may use MD5 if preferred, but that hash is not\n * supported for composite objects. An error will be raised if MD5 is\n * specified but is not available. You may also choose to skip validation\n * completely, however this is **not recommended**.\n * @property {number} [start] A byte offset to begin the file's download\n * from. Default is 0. NOTE: Byte ranges are inclusive; that is,\n * `options.start = 0` and `options.end = 999` represent the first 1000\n * bytes in a file or object. NOTE: when specifying a byte range, data\n * integrity is not available.\n * @property {number} [end] A byte offset to stop reading the file at.\n * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and\n * `options.end = 999` represent the first 1000 bytes in a file or object.\n * NOTE: when specifying a byte range, data integrity is not available.\n * @property {boolean} [decompress=true] Disable auto decompression of the\n * received data. By default this option is set to `true`.\n * Applicable in cases where the data was uploaded with\n * `gzip: true` option. See {@link File#createWriteStream}.\n */\n /**\n * Create a readable stream to read the contents of the remote file. It can be\n * piped to a writable stream or listened to for 'data' events to read a\n * file's contents.\n *\n * In the unlikely event there is a mismatch between what you downloaded and\n * the version in your Bucket, your error handler will receive an error with\n * code \"CONTENT_DOWNLOAD_MISMATCH\". If you receive this error, the best\n * recourse is to try downloading the file again.\n *\n * NOTE: Readable streams will emit the `end` event when the file is fully\n * downloaded.\n *\n * @param {CreateReadStreamOptions} [options] Configuration options.\n * @returns {ReadableStream}\n *\n * @example\n * ```\n * //-\n * //

Downloading a File

\n * //\n * // The example below demonstrates how we can reference a remote file, then\n * // pipe its contents to a local file. This is effectively creating a local\n * // backup of your remote data.\n * //-\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * const fs = require('fs');\n * const remoteFile = bucket.file('image.png');\n * const localFilename = '/Users/stephen/Photos/image.png';\n *\n * remoteFile.createReadStream()\n * .on('error', function(err) {})\n * .on('response', function(response) {\n * // Server connected and responded with the specified status and headers.\n * })\n * .on('end', function() {\n * // The file is fully downloaded.\n * })\n * .pipe(fs.createWriteStream(localFilename));\n *\n * //-\n * // To limit the downloaded data to only a byte range, pass an options\n * // object.\n * //-\n * const logFile = myBucket.file('access_log');\n * logFile.createReadStream({\n * start: 10000,\n * end: 20000\n * })\n * .on('error', function(err) {})\n * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));\n *\n * //-\n * // To read a tail byte range, specify only `options.end` as a negative\n * // number.\n * //-\n * const logFile = myBucket.file('access_log');\n * logFile.createReadStream({\n * end: -100\n * })\n * .on('error', function(err) {})\n * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));\n * ```\n */\n createReadStream(options = {}) {\n options = Object.assign({ decompress: true }, options);\n const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number';\n const tailRequest = options.end < 0;\n let validateStream = undefined;\n const throughStream = new util_1.PassThroughShim();\n let crc32c = true;\n let md5 = false;\n if (typeof options.validation === 'string') {\n const value = options.validation.toLowerCase().trim();\n crc32c = value === 'crc32c';\n md5 = value === 'md5';\n }\n else if (options.validation === false) {\n crc32c = false;\n }\n const shouldRunValidation = !rangeRequest && (crc32c || md5);\n if (rangeRequest) {\n if (typeof options.validation === 'string' ||\n options.validation === true) {\n throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE);\n }\n // Range requests can't receive data integrity checks.\n crc32c = false;\n md5 = false;\n }\n const onComplete = (err) => {\n if (err) {\n throughStream.destroy(err);\n }\n };\n // We listen to the response event from the request stream so that we\n // can...\n //\n // 1) Intercept any data from going to the user if an error occurred.\n // 2) Calculate the hashes from the http.IncomingMessage response\n // stream,\n // which will return the bytes from the source without decompressing\n // gzip'd content. We then send it through decompressed, if\n // applicable, to the user.\n const onResponse = (err, _body, rawResponseStream) => {\n if (err) {\n // Get error message from the body.\n this.getBufferFromReadable(rawResponseStream).then(body => {\n err.message = body.toString('utf8');\n throughStream.destroy(err);\n });\n return;\n }\n const headers = rawResponseStream.toJSON().headers;\n const isCompressed = headers['content-encoding'] === 'gzip';\n const hashes = {};\n // The object is safe to validate if:\n // 1. It was stored gzip and returned to us gzip OR\n // 2. It was never stored as gzip\n const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' &&\n isCompressed) ||\n headers['x-goog-stored-content-encoding'] === 'identity';\n const transformStreams = [];\n if (shouldRunValidation) {\n // The x-goog-hash header should be set with a crc32c and md5 hash.\n // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx'\n if (typeof headers['x-goog-hash'] === 'string') {\n headers['x-goog-hash']\n .split(',')\n .forEach((hashKeyValPair) => {\n const delimiterIndex = hashKeyValPair.indexOf('=');\n const hashType = hashKeyValPair.substr(0, delimiterIndex);\n const hashValue = hashKeyValPair.substr(delimiterIndex + 1);\n hashes[hashType] = hashValue;\n });\n }\n validateStream = new hash_stream_validator_1.HashStreamValidator({\n crc32c,\n md5,\n crc32cGenerator: this.crc32cGenerator,\n crc32cExpected: hashes.crc32c,\n md5Expected: hashes.md5,\n });\n }\n if (md5 && !hashes.md5) {\n const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE);\n hashError.code = 'MD5_NOT_AVAILABLE';\n throughStream.destroy(hashError);\n return;\n }\n if (safeToValidate && shouldRunValidation && validateStream) {\n transformStreams.push(validateStream);\n }\n if (isCompressed && options.decompress) {\n transformStreams.push(zlib.createGunzip());\n }\n (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete);\n };\n // Authenticate the request, then pipe the remote API request to the stream\n // returned to the user.\n const makeRequest = () => {\n const query = { alt: 'media' };\n if (this.generation) {\n query.generation = this.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n const headers = {\n 'Accept-Encoding': 'gzip',\n 'Cache-Control': 'no-store',\n };\n if (rangeRequest) {\n const start = typeof options.start === 'number' ? options.start : '0';\n const end = typeof options.end === 'number' ? options.end : '';\n headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`;\n }\n const reqOpts = {\n uri: '',\n headers,\n qs: query,\n };\n this.requestStream(reqOpts)\n .on('error', err => {\n throughStream.destroy(err);\n })\n .on('response', res => {\n throughStream.emit('response', res);\n nodejs_common_1.util.handleResp(null, res, null, onResponse);\n })\n .resume();\n };\n throughStream.on('reading', makeRequest);\n return throughStream;\n }\n /**\n * @callback CreateResumableUploadCallback\n * @param {?Error} err Request error, if any.\n * @param {string} uri The resumable upload's unique session URI.\n */\n /**\n * @typedef {array} CreateResumableUploadResponse\n * @property {string} 0 The resumable upload's unique session URI.\n */\n /**\n * @typedef {object} CreateResumableUploadOptions\n * @property {object} [metadata] Metadata to set on the file.\n * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload.\n * @property {string} [origin] Origin header to set for the upload.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string} [chunkSize] Create a separate request per chunk. This\n * value is in bytes and should be a multiple of 256 KiB (2^18).\n * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.}\n */\n /**\n * Create a unique resumable upload session URI. This is the first step when\n * performing a resumable upload.\n *\n * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide}\n * for more on how the entire process works.\n *\n *

Note

\n *\n * If you are just looking to perform a resumable upload without worrying\n * about any of the details, see {@link File#createWriteStream}. Resumable\n * uploads are performed by default.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide}\n *\n * @param {CreateResumableUploadOptions} [options] Configuration options.\n * @param {CreateResumableUploadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * file.createResumableUpload(function(err, uri) {\n * if (!err) {\n * // `uri` can be used to PUT data to.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.createResumableUpload().then(function(data) {\n * const uri = data[0];\n * });\n * ```\n */\n createResumableUpload(optionsOrCallback, callback) {\n var _a, _b;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const retryOptions = this.storage.retryOptions;\n if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n retryOptions.autoRetry = false;\n }\n resumableUpload.createURI({\n authClient: this.storage.authClient,\n apiEndpoint: this.storage.apiEndpoint,\n bucket: this.bucket.name,\n customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}),\n file: this.name,\n generation: this.generation,\n key: this.encryptionKey,\n kmsKeyName: this.kmsKeyName,\n metadata: options.metadata,\n offset: options.offset,\n origin: options.origin,\n predefinedAcl: options.predefinedAcl,\n private: options.private,\n public: options.public,\n userProject: options.userProject || this.userProject,\n retryOptions: retryOptions,\n params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts,\n }, callback);\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n }\n /**\n * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream().\n * @property {string} [contentType] Alias for\n * `options.metadata.contentType`. If set to `auto`, the file name is used\n * to determine the contentType.\n * @property {string|boolean} [gzip] If true, automatically gzip the file.\n * If set to `auto`, the contentType is used to determine if the file\n * should be gzipped. This will set `options.metadata.contentEncoding` to\n * `gzip` if necessary.\n * @property {object} [metadata] See the examples below or\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}\n * for more details.\n * @property {number} [offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {boolean} [resumable] Force a resumable upload. NOTE: When\n * working with streams, the file format and size is unknown until it's\n * completely consumed. Because of this, it's best for you to be explicit\n * for what makes sense given your input.\n * @property {number} [timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @property {string} [uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with a\n * CRC32c checksum. You may use MD5 if preferred, but that hash is not\n * supported for composite objects. An error will be raised if MD5 is\n * specified but is not available. You may also choose to skip validation\n * completely, however this is **not recommended**. In addition to specifying\n * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will\n * cause the server to perform validation in addition to client validation.\n * NOTE: Validation is automatically skipped for objects that were\n * uploaded using the `gzip` option and have already compressed content.\n */\n /**\n * Create a writable stream to overwrite the contents of the file in your\n * bucket.\n *\n * A File object can also be used to create files for the first time.\n *\n * Resumable uploads are automatically enabled and must be shut off explicitly\n * by setting `options.resumable` to `false`.\n *\n *\n *

\n * There is some overhead when using a resumable upload that can cause\n * noticeable performance degradation while uploading a series of small\n * files. When uploading files less than 10MB, it is recommended that the\n * resumable feature is disabled.\n *

\n *\n * NOTE: Writable streams will emit the `finish` event when the file is fully\n * uploaded.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload| Upload Options (Simple or Resumable)}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation}\n *\n * @param {CreateWriteStreamOptions} [options] Configuration options.\n * @returns {WritableStream}\n *\n * @example\n * ```\n * const fs = require('fs');\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * //

Uploading a File

\n * //\n * // Now, consider a case where we want to upload a file to your bucket. You\n * // have the option of using {@link Bucket#upload}, but that is just\n * // a convenience method which will do the following.\n * //-\n * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')\n * .pipe(file.createWriteStream())\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n *\n * //-\n * //

Uploading a File with gzip compression

\n * //-\n * fs.createReadStream('/Users/stephen/site/index.html')\n * .pipe(file.createWriteStream({ gzip: true }))\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n *\n * //-\n * // Downloading the file with `createReadStream` will automatically decode\n * // the file.\n * //-\n *\n * //-\n * //

Uploading a File with Metadata

\n * //\n * // One last case you may run into is when you want to upload a file to your\n * // bucket and set its metadata at the same time. Like above, you can use\n * // {@link Bucket#upload} to do this, which is just a wrapper around\n * // the following.\n * //-\n * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')\n * .pipe(file.createWriteStream({\n * metadata: {\n * contentType: 'image/jpeg',\n * metadata: {\n * custom: 'metadata'\n * }\n * }\n * }))\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n * ```\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n createWriteStream(options = {}) {\n options = extend(true, { metadata: {} }, options);\n if (options.contentType) {\n options.metadata.contentType = options.contentType;\n }\n if (!options.metadata.contentType ||\n options.metadata.contentType === 'auto') {\n const detectedContentType = mime.getType(this.name);\n if (detectedContentType) {\n options.metadata.contentType = detectedContentType;\n }\n }\n let gzip = options.gzip;\n if (gzip === 'auto') {\n gzip = compressible(options.metadata.contentType);\n }\n if (gzip) {\n options.metadata.contentEncoding = 'gzip';\n }\n let crc32c = true;\n let md5 = false;\n if (typeof options.validation === 'string') {\n options.validation = options.validation.toLowerCase();\n crc32c = options.validation === 'crc32c';\n md5 = options.validation === 'md5';\n }\n else if (options.validation === false) {\n crc32c = false;\n }\n /**\n * A callback for determining when the underlying pipeline is complete.\n * It's possible the pipeline callback could error before the write stream\n * calls `final` so by default this will destroy the write stream unless the\n * write stream sets this callback via its `final` handler.\n * @param error An optional error\n */\n let pipelineCallback = error => {\n writeStream.destroy(error || undefined);\n };\n // A stream for consumer to write to\n const writeStream = new stream_1.Writable({\n final(cb) {\n // Set the pipeline callback to this callback so the pipeline's results\n // can be populated to the consumer\n pipelineCallback = cb;\n emitStream.end();\n },\n write(chunk, encoding, cb) {\n emitStream.write(chunk, encoding, cb);\n },\n });\n const emitStream = new util_1.PassThroughShim();\n const hashCalculatingStream = new hash_stream_validator_1.HashStreamValidator({\n crc32c,\n md5,\n crc32cGenerator: this.crc32cGenerator,\n updateHashesOnly: true,\n });\n const fileWriteStream = duplexify();\n let fileWriteStreamMetadataReceived = false;\n // Handing off emitted events to users\n emitStream.on('reading', () => writeStream.emit('reading'));\n emitStream.on('writing', () => writeStream.emit('writing'));\n fileWriteStream.on('progress', evt => writeStream.emit('progress', evt));\n fileWriteStream.on('response', resp => writeStream.emit('response', resp));\n fileWriteStream.once('metadata', () => {\n fileWriteStreamMetadataReceived = true;\n });\n writeStream.on('writing', () => {\n if (options.resumable === false) {\n this.startSimpleUpload_(fileWriteStream, options);\n }\n else {\n this.startResumableUpload_(fileWriteStream, options);\n }\n (0, stream_1.pipeline)(emitStream, gzip ? zlib.createGzip() : new stream_1.PassThrough(), hashCalculatingStream, fileWriteStream, async (e) => {\n if (e) {\n return pipelineCallback(e);\n }\n // We want to make sure we've received the metadata from the server in order\n // to properly validate the object's integrity. Depending on the type of upload,\n // the stream could close before the response is returned.\n if (!fileWriteStreamMetadataReceived) {\n try {\n await new Promise((resolve, reject) => {\n fileWriteStream.once('metadata', resolve);\n fileWriteStream.once('error', reject);\n });\n }\n catch (e) {\n return pipelineCallback(e);\n }\n }\n try {\n await __classPrivateFieldGet(this, _File_instances, \"m\", _File_validateIntegrity).call(this, hashCalculatingStream, { crc32c, md5 });\n pipelineCallback();\n }\n catch (e) {\n pipelineCallback(e);\n }\n });\n });\n return writeStream;\n }\n delete(optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_1.AvailableServiceObjectMethods.delete, options);\n super\n .delete(options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * @typedef {array} DownloadResponse\n * @property [0] The contents of a File.\n */\n /**\n * @callback DownloadCallback\n * @param err Request error, if any.\n * @param contents The contents of a File.\n */\n /**\n * Convenience method to download a file into memory or to a local\n * destination.\n *\n * @param {object} [options] Configuration options. The arguments match those\n * passed to {@link File#createReadStream}.\n * @param {string} [options.destination] Local file path to write the file's\n * contents to.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DownloadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Download a file into memory. The contents will be available as the\n * second\n * // argument in the demonstration below, `contents`.\n * //-\n * file.download(function(err, contents) {});\n *\n * //-\n * // Download a file to a local destination.\n * //-\n * file.download({\n * destination: '/Users/me/Desktop/file-backup.txt'\n * }, function(err) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.download().then(function(data) {\n * const contents = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_download_file\n * Another example:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_download_encrypted_file\n * Example of downloading an encrypted file:\n *\n * @example include:samples/requesterPays.js\n * region_tag:storage_download_file_requester_pays\n * Example of downloading a file where the requester pays:\n */\n download(optionsOrCallback, cb) {\n let options;\n if (typeof optionsOrCallback === 'function') {\n cb = optionsOrCallback;\n options = {};\n }\n else {\n options = optionsOrCallback;\n }\n let called = false;\n const callback = ((...args) => {\n if (!called)\n cb(...args);\n called = true;\n });\n const destination = options.destination;\n delete options.destination;\n const fileStream = this.createReadStream(options);\n let receivedData = false;\n if (destination) {\n fileStream\n .on('error', callback)\n .once('data', data => {\n // We know that the file exists the server - now we can truncate/write to a file\n receivedData = true;\n const writable = fs.createWriteStream(destination);\n writable.write(data);\n fileStream\n .pipe(writable)\n .on('error', callback)\n .on('finish', callback);\n })\n .on('end', () => {\n // In the case of an empty file no data will be received before the end event fires\n if (!receivedData) {\n fs.openSync(destination, 'w');\n callback(null, Buffer.alloc(0));\n }\n });\n }\n else {\n this.getBufferFromReadable(fileStream)\n .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents))\n .catch(callback);\n }\n }\n /**\n * The Storage API allows you to use a custom key for server-side encryption.\n *\n * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}\n *\n * @param {string|buffer} encryptionKey An AES-256 encryption key.\n * @returns {File}\n *\n * @example\n * ```\n * const crypto = require('crypto');\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const encryptionKey = crypto.randomBytes(32);\n *\n * const fileWithCustomEncryption = myBucket.file('my-file');\n * fileWithCustomEncryption.setEncryptionKey(encryptionKey);\n *\n * const fileWithoutCustomEncryption = myBucket.file('my-file');\n *\n * fileWithCustomEncryption.save('data', function(err) {\n * // Try to download with the File object that hasn't had\n * // `setEncryptionKey()` called:\n * fileWithoutCustomEncryption.download(function(err) {\n * // We will receive an error:\n * // err.message === 'Bad Request'\n *\n * // Try again with the File object we called `setEncryptionKey()` on:\n * fileWithCustomEncryption.download(function(err, contents) {\n * // contents.toString() === 'data'\n * });\n * });\n * });\n *\n * ```\n * @example include:samples/encryption.js\n * region_tag:storage_upload_encrypted_file\n * Example of uploading an encrypted file:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_download_encrypted_file\n * Example of downloading an encrypted file:\n */\n setEncryptionKey(encryptionKey) {\n this.encryptionKey = encryptionKey;\n this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64');\n this.encryptionKeyHash = crypto\n .createHash('sha256')\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n .update(this.encryptionKeyBase64, 'base64')\n .digest('base64');\n this.encryptionKeyInterceptor = {\n request: reqOpts => {\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256';\n reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64;\n reqOpts.headers['x-goog-encryption-key-sha256'] =\n this.encryptionKeyHash;\n return reqOpts;\n },\n };\n this.interceptors.push(this.encryptionKeyInterceptor);\n return this;\n }\n /**\n * @typedef {array} GetExpirationDateResponse\n * @property {date} 0 A Date object representing the earliest time this file's\n * retention policy will expire.\n */\n /**\n * @callback GetExpirationDateCallback\n * @param {?Error} err Request error, if any.\n * @param {date} expirationDate A Date object representing the earliest time\n * this file's retention policy will expire.\n */\n /**\n * If this bucket has a retention policy defined, use this method to get a\n * Date object representing the earliest time this file will expire.\n *\n * @param {GetExpirationDateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.getExpirationDate(function(err, expirationDate) {\n * // expirationDate is a Date object.\n * });\n * ```\n */\n getExpirationDate(callback) {\n this.getMetadata((err, metadata, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n if (!metadata.retentionExpirationTime) {\n const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA);\n callback(error, null, apiResponse);\n return;\n }\n callback(null, new Date(metadata.retentionExpirationTime), apiResponse);\n });\n }\n /**\n * @typedef {array} GenerateSignedPostPolicyV2Response\n * @property {object} 0 The document policy.\n */\n /**\n * @callback GenerateSignedPostPolicyV2Callback\n * @param {?Error} err Request error, if any.\n * @param {object} policy The document policy.\n */\n /**\n * Get a signed policy document to allow a user to upload data with a POST\n * request.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed policy. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process}\n *\n * @throws {Error} If an expiration timestamp from the past is given.\n * @throws {Error} If options.equals has an array with less or more than two\n * members.\n * @throws {Error} If options.startsWith has an array with less or more than two\n * members.\n *\n * @param {object} options Configuration options.\n * @param {array|array[]} [options.equals] Array of request parameters and\n * their expected value (e.g. [['$', '']]). Values are\n * translated into equality constraints in the conditions field of the\n * policy document (e.g. ['eq', '$', '']). If only one\n * equality condition is to be specified, options.equals can be a one-\n * dimensional array (e.g. ['$', '']).\n * @param {*} options.expires - A timestamp when this policy will expire. Any\n * value given is passed to `new Date()`.\n * @param {array|array[]} [options.startsWith] Array of request parameters and\n * their expected prefixes (e.g. [['$', '']). Values are\n * translated into starts-with constraints in the conditions field of the\n * policy document (e.g. ['starts-with', '$', '']). If only\n * one prefix condition is to be specified, options.startsWith can be a\n * one- dimensional array (e.g. ['$', '']).\n * @param {string} [options.acl] ACL for the object from possibly predefined\n * ACLs.\n * @param {string} [options.successRedirect] The URL to which the user client\n * is redirected if the upload is successful.\n * @param {string} [options.successStatus] - The status of the Google Storage\n * response if the upload is successful (must be string).\n * @param {object} [options.contentLengthRange]\n * @param {number} [options.contentLengthRange.min] Minimum value for the\n * request's content length.\n * @param {number} [options.contentLengthRange.max] Maximum value for the\n * request's content length.\n * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const options = {\n * equals: ['$Content-Type', 'image/jpeg'],\n * expires: '10-25-2022',\n * contentLengthRange: {\n * min: 0,\n * max: 1024\n * }\n * };\n *\n * file.generateSignedPostPolicyV2(options, function(err, policy) {\n * // policy.string: the policy document in plain text.\n * // policy.base64: the policy document in base64.\n * // policy.signature: the policy signature in base64.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.generateSignedPostPolicyV2(options).then(function(data) {\n * const policy = data[0];\n * });\n * ```\n */\n generateSignedPostPolicyV2(optionsOrCallback, cb) {\n const args = (0, util_1.normalize)(optionsOrCallback, cb);\n let options = args.options;\n const callback = args.callback;\n const expires = new Date(options.expires);\n if (isNaN(expires.getTime())) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expires.valueOf() < Date.now()) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n options = Object.assign({}, options);\n const conditions = [\n ['eq', '$key', this.name],\n {\n bucket: this.bucket.name,\n },\n ];\n if (Array.isArray(options.equals)) {\n if (!Array.isArray(options.equals[0])) {\n options.equals = [options.equals];\n }\n options.equals.forEach(condition => {\n if (!Array.isArray(condition) || condition.length !== 2) {\n throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS);\n }\n conditions.push(['eq', condition[0], condition[1]]);\n });\n }\n if (Array.isArray(options.startsWith)) {\n if (!Array.isArray(options.startsWith[0])) {\n options.startsWith = [options.startsWith];\n }\n options.startsWith.forEach(condition => {\n if (!Array.isArray(condition) || condition.length !== 2) {\n throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS);\n }\n conditions.push(['starts-with', condition[0], condition[1]]);\n });\n }\n if (options.acl) {\n conditions.push({\n acl: options.acl,\n });\n }\n if (options.successRedirect) {\n conditions.push({\n success_action_redirect: options.successRedirect,\n });\n }\n if (options.successStatus) {\n conditions.push({\n success_action_status: options.successStatus,\n });\n }\n if (options.contentLengthRange) {\n const min = options.contentLengthRange.min;\n const max = options.contentLengthRange.max;\n if (typeof min !== 'number' || typeof max !== 'number') {\n throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX);\n }\n conditions.push(['content-length-range', min, max]);\n }\n const policy = {\n expiration: expires.toISOString(),\n conditions,\n };\n const policyString = JSON.stringify(policy);\n const policyBase64 = Buffer.from(policyString).toString('base64');\n this.storage.authClient.sign(policyBase64).then(signature => {\n callback(null, {\n string: policyString,\n base64: policyBase64,\n signature,\n });\n }, err => {\n callback(new signer_1.SigningError(err.message));\n });\n }\n /**\n * @typedef {object} SignedPostPolicyV4Output\n * @property {string} url The request URL.\n * @property {object} fields The form fields to include in the POST request.\n */\n /**\n * @typedef {array} GenerateSignedPostPolicyV4Response\n * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields.\n */\n /**\n * @callback GenerateSignedPostPolicyV4Callback\n * @param {?Error} err Request error, if any.\n * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields.\n */\n /**\n * Get a v4 signed policy document to allow a user to upload data with a POST\n * request.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed policy. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference}\n *\n * @param {object} options Configuration options.\n * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any\n * value given is passed to `new Date()`.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instead of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in\n * the result, e.g. \"https://cdn.example.com\".\n * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument}\n * to include in the signed policy. Any fields with key beginning with 'x-ignore-'\n * will not be included in the policy to be signed.\n * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document}\n * to include in the signed policy. All fields given in `config.fields` are\n * automatically included in the conditions array, adding the same entry\n * in both `fields` and `conditions` will result in duplicate entries.\n *\n * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const options = {\n * expires: '10-25-2022',\n * conditions: [\n * ['eq', '$Content-Type', 'image/jpeg'],\n * ['content-length-range', 0, 1024],\n * ],\n * fields: {\n * acl: 'public-read',\n * 'x-goog-meta-foo': 'bar',\n * 'x-ignore-mykey': 'data'\n * }\n * };\n *\n * file.generateSignedPostPolicyV4(options, function(err, response) {\n * // response.url The request URL\n * // response.fields The form fields (including the signature) to include\n * // to be used to upload objects by HTML forms.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.generateSignedPostPolicyV4(options).then(function(data) {\n * const response = data[0];\n * // response.url The request URL\n * // response.fields The form fields (including the signature) to include\n * // to be used to upload objects by HTML forms.\n * });\n * ```\n */\n generateSignedPostPolicyV4(optionsOrCallback, cb) {\n const args = (0, util_1.normalize)(optionsOrCallback, cb);\n let options = args.options;\n const callback = args.callback;\n const expires = new Date(options.expires);\n if (isNaN(expires.getTime())) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expires.valueOf() < Date.now()) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) {\n throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`);\n }\n options = Object.assign({}, options);\n let fields = Object.assign({}, options.fields);\n const now = new Date();\n const nowISO = (0, util_1.formatAsUTCISO)(now, true);\n const todayISO = (0, util_1.formatAsUTCISO)(now);\n const sign = async () => {\n const { client_email } = await this.storage.authClient.getCredentials();\n const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`;\n fields = {\n ...fields,\n bucket: this.bucket.name,\n key: this.name,\n 'x-goog-date': nowISO,\n 'x-goog-credential': credential,\n 'x-goog-algorithm': 'GOOG4-RSA-SHA256',\n };\n const conditions = options.conditions || [];\n Object.entries(fields).forEach(([key, value]) => {\n if (!key.startsWith('x-ignore-')) {\n conditions.push({ [key]: value });\n }\n });\n delete fields.bucket;\n const expiration = (0, util_1.formatAsUTCISO)(expires, true, '-', ':');\n const policy = {\n conditions,\n expiration,\n };\n const policyString = (0, util_1.unicodeJSONStringify)(policy);\n const policyBase64 = Buffer.from(policyString).toString('base64');\n try {\n const signature = await this.storage.authClient.sign(policyBase64);\n const signatureHex = Buffer.from(signature, 'base64').toString('hex');\n fields['policy'] = policyBase64;\n fields['x-goog-signature'] = signatureHex;\n let url;\n if (options.virtualHostedStyle) {\n url = `https://${this.bucket.name}.storage.googleapis.com/`;\n }\n else if (options.bucketBoundHostname) {\n url = `${options.bucketBoundHostname}/`;\n }\n else {\n url = `${exports.STORAGE_POST_POLICY_BASE_URL}/${this.bucket.name}/`;\n }\n return {\n url,\n fields,\n };\n }\n catch (err) {\n throw new signer_1.SigningError(err.message);\n }\n };\n sign().then(res => callback(null, res), callback);\n }\n /**\n * @typedef {array} GetSignedUrlResponse\n * @property {object} 0 The signed URL.\n */\n /**\n * @callback GetSignedUrlCallback\n * @param {?Error} err Request error, if any.\n * @param {object} url The signed URL.\n */\n /**\n * Get a signed URL to allow limited time access to the file.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed URL. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference}\n *\n * @throws {Error} if an expiration timestamp from the past is given.\n *\n * @param {object} config Configuration object.\n * @param {string} config.action \"read\" (HTTP: GET), \"write\" (HTTP: PUT), or\n * \"delete\" (HTTP: DELETE), \"resumable\" (HTTP: POST).\n * When using \"resumable\", the header `X-Goog-Resumable: start` has\n * to be sent when making a request with the signed URL.\n * @param {*} config.expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @param {string} [config.version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like\n * if you provide this, the client must provide this HTTP header with this same\n * value in its request, so to if this parameter is not provided here,\n * the client must not provide any value for this HTTP header in its request.\n * @param {string} [config.contentType] Just like if you provide this, the client\n * must provide this HTTP header with this same value in its request, so to if\n * this parameter is not provided here, the client must not provide any value\n * for this HTTP header in its request.\n * @param {object} [config.extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @param {object} [config.queryParams] Additional query parameters to include\n * in the signed URL.\n * @param {string} [config.promptSaveAs] The filename to prompt the user to\n * save the file as when the signed url is accessed. This is ignored if\n * `config.responseDisposition` is set.\n * @param {string} [config.responseDisposition] The\n * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the\n * signed url.\n * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value\n * given is passed to `new Date()`.\n * Note: Use for 'v4' only.\n * @param {string} [config.responseType] The response-content-type parameter\n * of the signed url.\n * @param {GetSignedUrlCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Generate a URL that allows temporary access to download your file.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'read',\n * expires: '03-17-2025',\n * };\n *\n * file.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file is now available to read from this URL.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // Generate a URL that allows temporary access to download your file.\n * // Access will begin at accessibleAt and end at expires.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'read',\n * expires: '03-17-2025',\n * accessibleAt: '03-13-2025'\n * };\n *\n * file.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // Generate a URL to allow write permissions. This means anyone with this\n * URL\n * // can send a POST request with new data that will overwrite the file.\n * //-\n * file.getSignedUrl({\n * action: 'write',\n * expires: '03-17-2025'\n * }, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file is now available to be written to.\n * const writeStream = request.put(url);\n * writeStream.end('New data');\n *\n * writeStream.on('complete', function(resp) {\n * // Confirm the new content was saved.\n * file.download(function(err, fileContents) {\n * console.log('Contents:', fileContents.toString());\n * // Contents: New data\n * });\n * });\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.getSignedUrl(config).then(function(data) {\n * const url = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_generate_signed_url\n * Another example:\n */\n getSignedUrl(cfg, callback) {\n const method = ActionToHTTPMethod[cfg.action];\n if (!method) {\n throw new Error(storage_1.ExceptionMessages.INVALID_ACTION);\n }\n const extensionHeaders = (0, util_1.objectKeyToLowercase)(cfg.extensionHeaders || {});\n if (cfg.action === 'resumable') {\n extensionHeaders['x-goog-resumable'] = 'start';\n }\n const queryParams = Object.assign({}, cfg.queryParams);\n if (typeof cfg.responseType === 'string') {\n queryParams['response-content-type'] = cfg.responseType;\n }\n if (typeof cfg.promptSaveAs === 'string') {\n queryParams['response-content-disposition'] =\n 'attachment; filename=\"' + cfg.promptSaveAs + '\"';\n }\n if (typeof cfg.responseDisposition === 'string') {\n queryParams['response-content-disposition'] = cfg.responseDisposition;\n }\n if (this.generation) {\n queryParams['generation'] = this.generation.toString();\n }\n const signConfig = {\n method,\n expires: cfg.expires,\n accessibleAt: cfg.accessibleAt,\n extensionHeaders,\n queryParams,\n contentMd5: cfg.contentMd5,\n contentType: cfg.contentType,\n };\n if (cfg.cname) {\n signConfig.cname = cfg.cname;\n }\n if (cfg.version) {\n signConfig.version = cfg.version;\n }\n if (cfg.virtualHostedStyle) {\n signConfig.virtualHostedStyle = cfg.virtualHostedStyle;\n }\n if (!this.signer) {\n this.signer = new signer_1.URLSigner(this.storage.authClient, this.bucket, this);\n }\n this.signer\n .getSignedUrl(signConfig)\n .then(signedUrl => callback(null, signedUrl), callback);\n }\n /**\n * @callback IsPublicCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} resp Whether file is public or not.\n */\n /**\n * @typedef {array} IsPublicResponse\n * @property {boolean} 0 Whether file is public or not.\n */\n /**\n * Check whether this file is public or not by sending\n * a HEAD request without credentials.\n * No errors from the server indicates that the current\n * file is public.\n * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden}\n * indicates that file is private.\n * Any other non 403 error is propagated to user.\n *\n * @param {IsPublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Check whether the file is publicly accessible.\n * //-\n * file.isPublic(function(err, resp) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n * console.log(`the file ${file.id} is public: ${resp}`) ;\n * })\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.isPublic().then(function(data) {\n * const resp = data[0];\n * });\n * ```\n */\n isPublic(callback) {\n var _a;\n // Build any custom headers based on the defined interceptors on the parent\n // storage object and this object\n const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || [];\n const fileInterceptors = this.interceptors || [];\n const allInterceptors = storageInterceptors.concat(fileInterceptors);\n const headers = allInterceptors.reduce((acc, curInterceptor) => {\n const currentHeaders = curInterceptor.request({\n uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`,\n });\n Object.assign(acc, currentHeaders.headers);\n return acc;\n }, {});\n nodejs_common_1.util.makeRequest({\n method: 'GET',\n uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`,\n headers,\n }, {\n retryOptions: this.storage.retryOptions,\n }, (err) => {\n if (err) {\n const apiError = err;\n if (apiError.code === 403) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n }\n else {\n callback(null, true);\n }\n });\n }\n /**\n * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate().\n * @property {Metadata} [metadata] Define custom metadata properties to define\n * along with the operation.\n * @property {boolean} [strict] If true, set the file to be private to\n * only the owner user. Otherwise, it will be private to the project.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback MakeFilePrivateCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} MakeFilePrivateResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Make a file private to the project and remove all other permissions.\n * Set `options.strict` to true to make the file private to only the owner.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation}\n *\n * @param {MakeFilePrivateOptions} [options] Configuration options.\n * @param {MakeFilePrivateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Set the file private so only project maintainers can see and modify it.\n * //-\n * file.makePrivate(function(err) {});\n *\n * //-\n * // Set the file private so only the owner can see and modify it.\n * //-\n * file.makePrivate({ strict: true }, function(err) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.makePrivate().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n makePrivate(optionsOrCallback, callback) {\n var _a, _b;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const query = {\n predefinedAcl: options.strict ? 'private' : 'projectPrivate',\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n };\n if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) {\n query.ifMetagenerationMatch =\n (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch;\n delete options.preconditionOpts;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n // You aren't allowed to set both predefinedAcl & acl properties on a file,\n // so acl must explicitly be nullified, destroying all previous acls on the\n // file.\n const metadata = extend({}, options.metadata, { acl: null });\n this.setMetadata(metadata, query, callback);\n }\n /**\n * @typedef {array} MakeFilePublicResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback MakeFilePublicCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Set a file to be publicly readable and maintain all previous permissions.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation}\n *\n * @param {MakeFilePublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.makePublic(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.makePublic().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_make_public\n * Another example:\n */\n makePublic(callback) {\n callback = callback || nodejs_common_1.util.noop;\n this.acl.add({\n entity: 'allUsers',\n role: 'READER',\n }, (err, acl, resp) => {\n callback(err, resp);\n });\n }\n /**\n * The public URL of this File\n * Use {@link File#makePublic} to enable anonymous access via the returned URL.\n *\n * @returns {string}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-file');\n *\n * // publicUrl will be \"https://storage.googleapis.com/albums/my-file\"\n * const publicUrl = file.publicUrl();\n * ```\n */\n publicUrl() {\n return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`;\n }\n /**\n * @typedef {array} MoveResponse\n * @property {File} 0 The destination File.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback MoveCallback\n * @param {?Error} err Request error, if any.\n * @param {?File} destinationFile The destination File.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} MoveOptions Configuration options for File#move(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Move this file to another location. By default, this will rename the file\n * and keep it in the same bucket, but you can choose to move it to another\n * Bucket by providing a Bucket or File object or a URL beginning with\n * \"gs://\".\n *\n * **Warning**:\n * There is currently no atomic `move` method in the Cloud Storage API,\n * so this method is a composition of {@link File#copy} (to the new\n * location) and {@link File#delete} (from the old location). While\n * unlikely, it is possible that an error returned to your callback could be\n * triggered from either one of these API calls failing, which could leave a\n * duplicate file lingering. The error message will indicate what operation\n * has failed.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation}\n *\n * @throws {Error} If the destination file is not provided.\n *\n * @param {string|Bucket|File} destination Destination file.\n * @param {MoveCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * //-\n * // You can pass in a variety of types for the destination.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // If you pass in a string for the destination, the file is moved to its\n * // current bucket, under the new name provided.\n * //-\n * file.move('my-image-new.png', function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * // but contains instead:\n * // - \"my-image-new.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a string starting with \"gs://\" for the destination, the\n * // file is copied to the other bucket and under the new name provided.\n * //-\n * const newLocation = 'gs://another-bucket/my-image-new.png';\n * file.move(newLocation, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image-new.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a Bucket object, the file will be moved to that bucket\n * // using the same name.\n * //-\n * const anotherBucket = gcs.bucket('another-bucket');\n *\n * file.move(anotherBucket, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a File object, you have complete control over the new\n * // bucket and filename.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n *\n * file.move(anotherFile, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-awesome-image.png\"\n *\n * // Note:\n * // The `destinationFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.move('my-image-new.png').then(function(data) {\n * const destinationFile = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_move_file\n * Another example:\n */\n move(destination, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || nodejs_common_1.util.noop;\n this.copy(destination, options, (err, destinationFile, copyApiResponse) => {\n if (err) {\n err.message = 'file#copy failed with an error - ' + err.message;\n callback(err, null, copyApiResponse);\n return;\n }\n if (this.name !== destinationFile.name ||\n this.bucket.name !== destinationFile.bucket.name) {\n this.delete(options, (err, apiResponse) => {\n if (err) {\n err.message = 'file#delete failed with an error - ' + err.message;\n callback(err, destinationFile, apiResponse);\n return;\n }\n callback(null, destinationFile, copyApiResponse);\n });\n }\n else {\n callback(null, destinationFile, copyApiResponse);\n }\n });\n }\n /**\n * @typedef {array} RenameResponse\n * @property {File} 0 The destination File.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback RenameCallback\n * @param {?Error} err Request error, if any.\n * @param {?File} destinationFile The destination File.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} RenameOptions Configuration options for File#move(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Rename this file.\n *\n * **Warning**:\n * There is currently no atomic `rename` method in the Cloud Storage API,\n * so this method is an alias of {@link File#move}, which in turn is a\n * composition of {@link File#copy} (to the new location) and\n * {@link File#delete} (from the old location). While\n * unlikely, it is possible that an error returned to your callback could be\n * triggered from either one of these API calls failing, which could leave a\n * duplicate file lingering. The error message will indicate what operation\n * has failed.\n *\n * @param {string|File} destinationFile Destination file.\n * @param {RenameCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // You can pass in a string or a File object.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n *\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // You can pass in a string for the destinationFile.\n * //-\n * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * // but contains instead:\n * // - \"renamed-image.png\"\n *\n * // `renamedFile` is an instance of a File object that refers to your\n * // renamed file.\n * });\n *\n * //-\n * // You can pass in a File object.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n *\n * file.rename(anotherFile, function(err, renamedFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n *\n * // Note:\n * // The `renamedFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.rename('my-renamed-image.png').then(function(data) {\n * const renamedFile = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n rename(destinationFile, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || nodejs_common_1.util.noop;\n this.move(destinationFile, options, callback);\n }\n /**\n * Makes request and applies userProject query parameter if necessary.\n *\n * @private\n *\n * @param {object} reqOpts - The request options.\n * @param {function} callback - The callback function.\n */\n request(reqOpts, callback) {\n return this.parent.request.call(this, reqOpts, callback);\n }\n /**\n * @callback RotateEncryptionKeyCallback\n * @extends CopyCallback\n */\n /**\n * @typedef RotateEncryptionKeyResponse\n * @extends CopyResponse\n */\n /**\n * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options\n * for File#rotateEncryptionKey().\n * If a string or Buffer is provided, it is interpreted as an AES-256,\n * customer-supplied encryption key. If you'd like to use a Cloud KMS key\n * name, you must specify an options object with the property name:\n * `kmsKeyName`.\n * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key.\n * @param {string} [options.kmsKeyName] A Cloud KMS key name.\n */\n /**\n * This method allows you to update the encryption key associated with this\n * file.\n *\n * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}\n *\n * @param {RotateEncryptionKeyOptions} [options] - Configuration options.\n * @param {RotateEncryptionKeyCallback} [callback]\n * @returns {Promise}\n *\n * @example include:samples/encryption.js\n * region_tag:storage_rotate_encryption_key\n * Example of rotating the encryption key for this file:\n */\n rotateEncryptionKey(optionsOrCallback, callback) {\n var _a;\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n let options = {};\n if (typeof optionsOrCallback === 'string' ||\n optionsOrCallback instanceof Buffer) {\n options = {\n encryptionKey: optionsOrCallback,\n };\n }\n else if (typeof optionsOrCallback === 'object') {\n options = optionsOrCallback;\n }\n const newFile = this.bucket.file(this.id, options);\n const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined\n ? { preconditionOpts: options.preconditionOpts }\n : {};\n this.copy(newFile, copyOptions, callback);\n }\n /**\n * @typedef {object} SaveOptions\n * @extends CreateWriteStreamOptions\n */\n /**\n * @callback SaveCallback\n * @param {?Error} err Request error, if any.\n */\n /**\n * Write strings or buffers to a file.\n *\n * *This is a convenience method which wraps {@link File#createWriteStream}.*\n * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly.\n *\n * Resumable uploads are automatically enabled and must be shut off explicitly\n * by setting `options.resumable` to `false`.\n *\n * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff.\n *\n *

\n * There is some overhead when using a resumable upload that can cause\n * noticeable performance degradation while uploading a series of small\n * files. When uploading files less than 10MB, it is recommended that the\n * resumable feature is disabled.\n *

\n *\n * @param {string | Buffer} data The data to write to a file.\n * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options`\n * parameter.\n * @param {SaveCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const contents = 'This is the contents of the file.';\n *\n * file.save(contents, function(err) {\n * if (!err) {\n * // File written successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.save(contents).then(function() {});\n * ```\n */\n save(data, optionsOrCallback, callback) {\n // tslint:enable:no-any\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n let maxRetries = this.storage.retryOptions.maxRetries;\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {\n maxRetries = 0;\n }\n const returnValue = retry(async (bail) => {\n await new Promise((resolve, reject) => {\n if (maxRetries === 0) {\n this.storage.retryOptions.autoRetry = false;\n }\n const writable = this.createWriteStream(options)\n .on('error', err => {\n if (this.storage.retryOptions.autoRetry &&\n this.storage.retryOptions.retryableErrorFn(err)) {\n return reject(err);\n }\n else {\n return bail(err);\n }\n })\n .on('finish', () => {\n return resolve();\n });\n if (options.onUploadProgress) {\n writable.on('progress', options.onUploadProgress);\n }\n writable.end(data);\n });\n }, {\n retries: maxRetries,\n factor: this.storage.retryOptions.retryDelayMultiplier,\n maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000,\n maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n if (!callback) {\n return returnValue;\n }\n else {\n return returnValue\n .then(() => {\n if (callback) {\n return callback();\n }\n })\n .catch(callback);\n }\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_1.AvailableServiceObjectMethods.setMetadata, options);\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * @typedef {array} SetStorageClassResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback SetStorageClassCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Set the storage class for this file.\n *\n * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class}\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} storageClass The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`)\n * **Note:** The storage classes `multi_regional` and `regional`\n * are now legacy and will be deprecated in the future.\n * @param {SetStorageClassOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetStorageClassCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * file.setStorageClass('nearline', function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // The storage class was updated successfully.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.setStorageClass('nearline').then(function() {});\n * ```\n */\n setStorageClass(storageClass, optionsOrCallback, callback) {\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n const req = extend(true, {}, options);\n // In case we get input like `storageClass`, convert to `storage_class`.\n req.storageClass = storageClass\n .replace(/-/g, '_')\n .replace(/([a-z])([A-Z])/g, (_, low, up) => {\n return low + '_' + up;\n })\n .toUpperCase();\n this.copy(this, req, (err, file, apiResponse) => {\n if (err) {\n callback(err, apiResponse);\n return;\n }\n this.metadata = file.metadata;\n callback(null, apiResponse);\n });\n }\n /**\n * Set a user project to be billed for all requests made from this File\n * object.\n *\n * @param {string} userProject The user project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-file');\n *\n * file.setUserProject('grape-spaceship-123');\n * ```\n */\n setUserProject(userProject) {\n this.bucket.setUserProject.call(this, userProject);\n }\n /**\n * This creates a resumable-upload upload stream.\n *\n * @param {Duplexify} stream - Duplexify stream of data to pipe to the file.\n * @param {object=} options - Configuration object.\n *\n * @private\n */\n startResumableUpload_(dup, options) {\n options = extend(true, {\n metadata: {},\n }, options);\n const retryOptions = this.storage.retryOptions;\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {\n retryOptions.autoRetry = false;\n }\n const uploadStream = resumableUpload.upload({\n authClient: this.storage.authClient,\n apiEndpoint: this.storage.apiEndpoint,\n bucket: this.bucket.name,\n customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}),\n file: this.name,\n generation: this.generation,\n key: this.encryptionKey,\n kmsKeyName: this.kmsKeyName,\n metadata: options.metadata,\n offset: options.offset,\n predefinedAcl: options.predefinedAcl,\n private: options.private,\n public: options.public,\n uri: options.uri,\n userProject: options.userProject || this.userProject,\n retryOptions: { ...retryOptions },\n params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts,\n chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize,\n });\n uploadStream\n .on('response', resp => {\n dup.emit('response', resp);\n })\n .on('metadata', metadata => {\n this.metadata = metadata;\n dup.emit('metadata');\n })\n .on('finish', () => {\n dup.emit('complete');\n })\n .on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(uploadStream);\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n }\n /**\n * Takes a readable stream and pipes it to a remote file. Unlike\n * `startResumableUpload_`, which uses the resumable upload technique, this\n * method uses a simple upload (all or nothing).\n *\n * @param {Duplexify} dup - Duplexify stream of data to pipe to the file.\n * @param {object=} options - Configuration object.\n *\n * @private\n */\n startSimpleUpload_(dup, options) {\n options = extend(true, {\n metadata: {},\n }, options);\n const apiEndpoint = this.storage.apiEndpoint;\n const bucketName = this.bucket.name;\n const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`;\n const reqOpts = {\n qs: {\n name: this.name,\n },\n uri: uri,\n };\n if (this.generation !== undefined) {\n reqOpts.qs.ifGenerationMatch = this.generation;\n }\n if (this.kmsKeyName !== undefined) {\n reqOpts.qs.kmsKeyName = this.kmsKeyName;\n }\n if (typeof options.timeout === 'number') {\n reqOpts.timeout = options.timeout;\n }\n if (options.userProject || this.userProject) {\n reqOpts.qs.userProject = options.userProject || this.userProject;\n }\n if (options.predefinedAcl) {\n reqOpts.qs.predefinedAcl = options.predefinedAcl;\n }\n else if (options.private) {\n reqOpts.qs.predefinedAcl = 'private';\n }\n else if (options.public) {\n reqOpts.qs.predefinedAcl = 'publicRead';\n }\n Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts);\n nodejs_common_1.util.makeWritableStream(dup, {\n makeAuthenticatedRequest: (reqOpts) => {\n this.request(reqOpts, (err, body, resp) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n this.metadata = body;\n dup.emit('metadata', body);\n dup.emit('response', resp);\n dup.emit('complete');\n });\n },\n metadata: options.metadata,\n request: reqOpts,\n });\n }\n disableAutoRetryConditionallyIdempotent_(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n coreOpts, methodType, localPreconditionOptions) {\n var _a, _b, _c, _d;\n if ((typeof coreOpts === 'object' &&\n ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined &&\n methodType === bucket_1.AvailableServiceObjectMethods.delete &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n if ((typeof coreOpts === 'object' &&\n ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined &&\n methodType === bucket_1.AvailableServiceObjectMethods.setMetadata &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n }\n async getBufferFromReadable(readable) {\n const buf = [];\n for await (const chunk of readable) {\n buf.push(chunk);\n }\n return Buffer.concat(buf);\n }\n}\nexports.File = File;\n_File_instances = new WeakSet(), _File_validateIntegrity = \n/**\n *\n * @param hashCalculatingStream\n * @param verify\n * @returns {boolean} Returns `true` if valid, throws with error otherwise\n */\nasync function _File_validateIntegrity(hashCalculatingStream, verify = {}) {\n const metadata = this.metadata;\n // If we're doing validation, assume the worst\n let dataMismatch = !!(verify.crc32c || verify.md5);\n if (verify.crc32c && metadata.crc32c) {\n dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c);\n }\n if (verify.md5 && metadata.md5Hash) {\n dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash);\n }\n if (dataMismatch) {\n const errors = [];\n let code = '';\n let message = '';\n try {\n await this.delete();\n if (verify.md5 && !metadata.md5Hash) {\n code = 'MD5_NOT_AVAILABLE';\n message = FileExceptionMessages.MD5_NOT_AVAILABLE;\n }\n else {\n code = 'FILE_NO_UPLOAD';\n message = FileExceptionMessages.UPLOAD_MISMATCH;\n }\n }\n catch (e) {\n const error = e;\n code = 'FILE_NO_UPLOAD_DELETE';\n message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`;\n errors.push(error);\n }\n const error = new RequestError(message);\n error.code = code;\n error.errors = errors;\n throw error;\n }\n return true;\n};\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(File, {\n exclude: [\n 'cloudStorageURI',\n 'publicUrl',\n 'request',\n 'save',\n 'setEncryptionKey',\n 'shouldRetryBasedOnPreconditionAndIdempotencyStrat',\n 'getBufferFromReadable',\n ],\n});\n//# sourceMappingURL=file.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HashStreamValidator = void 0;\nconst crypto_1 = require(\"crypto\");\nconst stream_1 = require(\"stream\");\nconst crc32c_1 = require(\"./crc32c\");\nconst file_1 = require(\"./file\");\nclass HashStreamValidator extends stream_1.Transform {\n constructor(options = {}) {\n super();\n this.updateHashesOnly = false;\n _HashStreamValidator_crc32cHash.set(this, undefined);\n _HashStreamValidator_md5Hash.set(this, undefined);\n _HashStreamValidator_md5Digest.set(this, '');\n this.crc32cEnabled = !!options.crc32c;\n this.md5Enabled = !!options.md5;\n this.updateHashesOnly = !!options.updateHashesOnly;\n this.crc32cExpected = options.crc32cExpected;\n this.md5Expected = options.md5Expected;\n if (this.crc32cEnabled) {\n const crc32cGenerator = options.crc32cGenerator || crc32c_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR;\n __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), \"f\");\n }\n if (this.md5Enabled) {\n __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), \"f\");\n }\n }\n _flush(callback) {\n if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\")) {\n __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\").digest('base64'), \"f\");\n }\n if (this.updateHashesOnly) {\n callback();\n return;\n }\n // If we're doing validation, assume the worst-- a data integrity\n // mismatch. If not, these tests won't be performed, and we can assume\n // the best.\n // We must check if the server decompressed the data on serve because hash\n // validation is not possible in this case.\n let failed = this.crc32cEnabled || this.md5Enabled;\n if (this.crc32cEnabled && this.crc32cExpected) {\n failed = !this.test('crc32c', this.crc32cExpected);\n }\n if (this.md5Enabled && this.md5Expected) {\n failed = !this.test('md5', this.md5Expected);\n }\n if (failed) {\n const mismatchError = new file_1.RequestError(file_1.FileExceptionMessages.DOWNLOAD_MISMATCH);\n mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH';\n callback(mismatchError);\n }\n else {\n callback();\n }\n }\n _transform(chunk, encoding, callback) {\n this.push(chunk, encoding);\n try {\n if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\"))\n __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\").update(chunk);\n if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\"))\n __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\").update(chunk);\n callback();\n }\n catch (e) {\n callback(e);\n }\n }\n test(hash, sum) {\n const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum;\n if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\")) {\n return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\").validate(check);\n }\n if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\")) {\n return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, \"f\") === check;\n }\n return false;\n }\n}\nexports.HashStreamValidator = HashStreamValidator;\n_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap();\n//# sourceMappingURL=hash-stream-validator.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HmacKey = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst storage_1 = require(\"./storage\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * The API-formatted resource description of the HMAC key.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name HmacKey#metadata\n * @type {object}\n */\n/**\n * An HmacKey object contains metadata of an HMAC key created from a\n * service account through the {@link Storage} client using\n * {@link Storage#createHmacKey}.\n *\n * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation}\n *\n * @class\n */\nclass HmacKey extends nodejs_common_1.ServiceObject {\n /**\n * @typedef {object} HmacKeyOptions\n * @property {string} [projectId] The project ID of the project that owns\n * the service account of the requested HMAC key. If not provided,\n * the project ID used to instantiate the Storage client will be used.\n */\n /**\n * Constructs an HmacKey object.\n *\n * Note: this only create a local reference to an HMAC key, to create\n * an HMAC key, use {@link Storage#createHmacKey}.\n *\n * @param {Storage} storage The Storage instance this HMAC key is\n * attached to.\n * @param {string} accessId The unique accessId for this HMAC key.\n * @param {HmacKeyOptions} options Constructor configurations.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const hmacKey = storage.hmacKey('access-id');\n * ```\n */\n constructor(storage, accessId, options) {\n const methods = {\n /**\n * @typedef {object} DeleteHmacKeyOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @typedef {array} DeleteHmacKeyResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteHmacKeyCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Deletes an HMAC key.\n * Key state must be set to `INACTIVE` prior to deletion.\n * Caution: HMAC keys cannot be recovered once you delete them.\n *\n * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists.\n *\n * @method HmacKey#delete\n * @param {DeleteHmacKeyOptions} [options] Configuration options.\n * @param {DeleteHmacKeyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Delete HMAC key after making the key inactive.\n * //-\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * hmacKey.delete((err) => {\n * if (err) {\n * console.error(err);\n * return;\n * }\n * // The HMAC key is deleted.\n * });\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * hmacKey\n * .setMetadata({state: 'INACTIVE'})\n * .then(() => {\n * return hmacKey.delete();\n * });\n * ```\n */\n delete: true,\n /**\n * @callback GetHmacKeyCallback\n * @param {?Error} err Request error, if any.\n * @param {HmacKey} hmacKey this {@link HmacKey} instance.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} GetHmacKeyResponse\n * @property {HmacKey} 0 This {@link HmacKey} instance.\n * @property {object} 1 The full API response.\n */\n /**\n * @typedef {object} GetHmacKeyOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * Retrieves and populate an HMAC key's metadata, and return\n * this {@link HmacKey} instance.\n *\n * HmacKey.get() does not give the HMAC key secret, as\n * it is only returned on creation.\n *\n * The authenticated user must have `storage.hmacKeys.get` permission\n * for the project in which the key exists.\n *\n * @method HmacKey#get\n * @param {GetHmacKeyOptions} [options] Configuration options.\n * @param {GetHmacKeyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Get the HmacKey's Metadata.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .get((err, hmacKey) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * // do something with the returned HmacKey object.\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .get()\n * .then((data) => {\n * const hmacKey = data[0];\n * });\n * ```\n */\n get: true,\n /**\n * @typedef {object} GetHmacKeyMetadataOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * Retrieves and populate an HMAC key's metadata, and return\n * the HMAC key's metadata as an object.\n *\n * HmacKey.getMetadata() does not give the HMAC key secret, as\n * it is only returned on creation.\n *\n * The authenticated user must have `storage.hmacKeys.get` permission\n * for the project in which the key exists.\n *\n * @method HmacKey#getMetadata\n * @param {GetHmacKeyMetadataOptions} [options] Configuration options.\n * @param {HmacKeyMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Get the HmacKey's metadata and populate to the metadata property.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .getMetadata((err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * console.log(hmacKeyMetadata);\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .getMetadata()\n * .then((data) => {\n * const hmacKeyMetadata = data[0];\n * console.log(hmacKeyMetadata);\n * });\n * ```\n */\n getMetadata: true,\n /**\n * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update.\n * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'.\n * @property {string} [etag] Include an etag from a previous get HMAC key request\n * to perform safe read-modify-write.\n */\n /**\n * @typedef {object} SetHmacKeyMetadataOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @callback HmacKeyMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} HmacKeyMetadataResponse\n * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object.\n * @property {object} 1 The full API response.\n */\n /**\n * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for\n * valid states.\n *\n * @method HmacKey#setMetadata\n * @param {SetHmacKeyMetadata} metadata The new metadata.\n * @param {SetHmacKeyMetadataOptions} [options] Configuration options.\n * @param {HmacKeyMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * const metadata = {\n * state: 'INACTIVE',\n * };\n *\n * storage.hmacKey('ACCESS_ID')\n * .setMetadata(metadata, (err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * console.log(hmacKeyMetadata);\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .setMetadata(metadata)\n * .then((data) => {\n * const hmacKeyMetadata = data[0];\n * console.log(hmacKeyMetadata);\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n method: 'PUT',\n },\n },\n };\n const projectId = (options && options.projectId) || storage.projectId;\n super({\n parent: storage,\n id: accessId,\n baseUrl: `/projects/${projectId}/hmacKeys`,\n methods,\n });\n this.storage = storage;\n this.instanceRetryValue = storage.retryOptions.autoRetry;\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways\n if (this.storage.retryOptions.idempotencyStrategy !==\n storage_1.IdempotencyStrategy.RetryAlways) {\n this.storage.retryOptions.autoRetry = false;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n}\nexports.HmacKey = HmacKey;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(HmacKey);\n//# sourceMappingURL=hmacKey.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Iam = exports.IAMExceptionMessages = void 0;\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst util_1 = require(\"./util\");\nvar IAMExceptionMessages;\n(function (IAMExceptionMessages) {\n IAMExceptionMessages[\"POLICY_OBJECT_REQUIRED\"] = \"A policy object is required.\";\n IAMExceptionMessages[\"PERMISSIONS_REQUIRED\"] = \"Permissions are required.\";\n})(IAMExceptionMessages = exports.IAMExceptionMessages || (exports.IAMExceptionMessages = {}));\n/**\n * Get and set IAM policies for your Cloud Storage bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management}\n * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @constructor Iam\n *\n * @param {Bucket} bucket The parent instance.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * // bucket.iam\n * ```\n */\nclass Iam {\n constructor(bucket) {\n this.request_ = bucket.request.bind(bucket);\n this.resourceId_ = 'buckets/' + bucket.getId();\n }\n /**\n * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy().\n * @property {number} [requestedPolicyVersion] The version of IAM policies to\n * request. If a policy with a condition is requested without setting\n * this, the server will return an error. This must be set to a value\n * of 3 to retrieve IAM policies containing conditions. This is to\n * prevent client code that isn't aware of IAM conditions from\n * interpreting and modifying policies incorrectly. The service might\n * return a policy with version lower than the one that was requested,\n * based on the feature syntax in the policy fetched.\n * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions}\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} GetPolicyResponse\n * @property {Policy} 0 The policy.\n * @property {object} 1 The full API response.\n */\n /**\n * @typedef {object} Policy\n * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles.\n * @property {string} [policy.etag] Etags are used to perform a read-modify-write.\n * @property {number} [policy.version] The syntax schema version of the Policy.\n * To set an IAM policy with conditional binding, this field must be set to\n * 3 or greater.\n * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions}\n */\n /**\n * @typedef {object} PolicyBinding\n * @property {string} role Role that is assigned to members.\n * @property {string[]} members Specifies the identities requesting access for the bucket.\n * @property {Expr} [condition] The condition that is associated with this binding.\n */\n /**\n * @typedef {object} Expr\n * @property {string} [title] An optional title for the expression, i.e. a\n * short string describing its purpose. This can be used e.g. in UIs\n * which allow to enter the expression.\n * @property {string} [description] An optional description of the\n * expression. This is a longer text which describes the expression,\n * e.g. when hovered over it in a UI.\n * @property {string} expression Textual representation of an expression in\n * Common Expression Language syntax. The application context of the\n * containing message determines which well-known feature set of CEL\n * is supported.The condition that is associated with this binding.\n *\n * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions\n */\n /**\n * Get the IAM policy.\n *\n * @param {GetPolicyOptions} [options] Request options.\n * @param {GetPolicyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * bucket.iam.getPolicy(\n * {requestedPolicyVersion: 3},\n * function(err, policy, apiResponse) {\n *\n * },\n * );\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.getPolicy({requestedPolicyVersion: 3})\n * .then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_view_bucket_iam_members\n * Example of retrieving a bucket's IAM policy:\n */\n getPolicy(optionsOrCallback, callback) {\n const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback);\n const qs = {};\n if (options.userProject) {\n qs.userProject = options.userProject;\n }\n if (options.requestedPolicyVersion !== null &&\n options.requestedPolicyVersion !== undefined) {\n qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion;\n }\n this.request_({\n uri: '/iam',\n qs,\n }, cb);\n }\n /**\n * Set the IAM policy.\n *\n * @throws {Error} If no policy is provided.\n *\n * @param {Policy} policy The policy.\n * @param {SetPolicyOptions} [options] Configuration options.\n * @param {SetPolicyCallback} callback Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * const myPolicy = {\n * bindings: [\n * {\n * role: 'roles/storage.admin',\n * members:\n * ['serviceAccount:myotherproject@appspot.gserviceaccount.com']\n * }\n * ]\n * };\n *\n * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.setPolicy(myPolicy).then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_add_bucket_iam_member\n * Example of adding to a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_remove_bucket_iam_member\n * Example of removing from a bucket's IAM policy:\n */\n setPolicy(policy, optionsOrCallback, callback) {\n if (policy === null || typeof policy !== 'object') {\n throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED);\n }\n const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback);\n let maxRetries;\n if (policy.etag === undefined) {\n maxRetries = 0;\n }\n this.request_({\n method: 'PUT',\n uri: '/iam',\n maxRetries,\n json: Object.assign({\n resourceId: this.resourceId_,\n }, policy),\n qs: options,\n }, cb);\n }\n /**\n * Test a set of permissions for a resource.\n *\n * @throws {Error} If permissions are not provided.\n *\n * @param {string|string[]} permissions The permission(s) to test for.\n * @param {TestIamPermissionsOptions} [options] Configuration object.\n * @param {TestIamPermissionsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * //-\n * // Test a single permission.\n * //-\n * const test = 'storage.buckets.delete';\n *\n * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) {\n * console.log(permissions);\n * // {\n * // \"storage.buckets.delete\": true\n * // }\n * });\n *\n * //-\n * // Test several permissions at once.\n * //-\n * const tests = [\n * 'storage.buckets.delete',\n * 'storage.buckets.get'\n * ];\n *\n * bucket.iam.testPermissions(tests, function(err, permissions) {\n * console.log(permissions);\n * // {\n * // \"storage.buckets.delete\": false,\n * // \"storage.buckets.get\": true\n * // }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.testPermissions(test).then(function(data) {\n * const permissions = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n testPermissions(permissions, optionsOrCallback, callback) {\n if (!Array.isArray(permissions) && typeof permissions !== 'string') {\n throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED);\n }\n const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback);\n const permissionsArray = Array.isArray(permissions)\n ? permissions\n : [permissions];\n const req = Object.assign({\n permissions: permissionsArray,\n }, options);\n this.request_({\n uri: '/iam/testPermissions',\n qs: req,\n useQuerystring: true,\n }, (err, resp) => {\n if (err) {\n cb(err, null, resp);\n return;\n }\n const availablePermissions = Array.isArray(resp.permissions)\n ? resp.permissions\n : [];\n const permissionsHash = permissionsArray.reduce((acc, permission) => {\n acc[permission] = availablePermissions.indexOf(permission) > -1;\n return acc;\n }, {});\n cb(null, permissionsHash, resp);\n });\n }\n}\nexports.Iam = Iam;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Iam);\n//# sourceMappingURL=iam.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Storage = exports.IdempotencyStrategy = exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = void 0;\nvar bucket_1 = require(\"./bucket\");\nObject.defineProperty(exports, \"Bucket\", { enumerable: true, get: function () { return bucket_1.Bucket; } });\n__exportStar(require(\"./crc32c\"), exports);\nvar channel_1 = require(\"./channel\");\nObject.defineProperty(exports, \"Channel\", { enumerable: true, get: function () { return channel_1.Channel; } });\nvar file_1 = require(\"./file\");\nObject.defineProperty(exports, \"File\", { enumerable: true, get: function () { return file_1.File; } });\n__exportStar(require(\"./hash-stream-validator\"), exports);\nvar hmacKey_1 = require(\"./hmacKey\");\nObject.defineProperty(exports, \"HmacKey\", { enumerable: true, get: function () { return hmacKey_1.HmacKey; } });\nvar iam_1 = require(\"./iam\");\nObject.defineProperty(exports, \"Iam\", { enumerable: true, get: function () { return iam_1.Iam; } });\nvar notification_1 = require(\"./notification\");\nObject.defineProperty(exports, \"Notification\", { enumerable: true, get: function () { return notification_1.Notification; } });\nvar storage_1 = require(\"./storage\");\nObject.defineProperty(exports, \"IdempotencyStrategy\", { enumerable: true, get: function () { return storage_1.IdempotencyStrategy; } });\nObject.defineProperty(exports, \"Storage\", { enumerable: true, get: function () { return storage_1.Storage; } });\n__exportStar(require(\"./transfer-manager\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0;\nvar service_1 = require(\"./service\");\nObject.defineProperty(exports, \"Service\", { enumerable: true, get: function () { return service_1.Service; } });\nvar service_object_1 = require(\"./service-object\");\nObject.defineProperty(exports, \"ServiceObject\", { enumerable: true, get: function () { return service_object_1.ServiceObject; } });\nvar util_1 = require(\"./util\");\nObject.defineProperty(exports, \"ApiError\", { enumerable: true, get: function () { return util_1.ApiError; } });\nObject.defineProperty(exports, \"util\", { enumerable: true, get: function () { return util_1.util; } });\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ServiceObject = void 0;\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst events_1 = require(\"events\");\nconst extend = require(\"extend\");\nconst util_1 = require(\"./util\");\n/**\n * ServiceObject is a base class, meant to be inherited from by a \"service\n * object,\" like a BigQuery dataset or Storage bucket.\n *\n * Most of the time, these objects share common functionality; they can be\n * created or deleted, and you can get or set their metadata.\n *\n * By inheriting from this class, a service object will be extended with these\n * shared behaviors. Note that any method can be overridden when the service\n * object requires specific behavior.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass ServiceObject extends events_1.EventEmitter {\n /*\n * @constructor\n * @alias module:common/service-object\n *\n * @private\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string} config.createMethod - The method which creates this object.\n * @param {string=} config.id - The identifier of the object. For example, the\n * name of a Storage bucket or Pub/Sub topic.\n * @param {object=} config.methods - A map of each method name that should be inherited.\n * @param {object} config.methods[].reqOpts - Default request options for this\n * particular method. A common use case is when `setMetadata` requires a\n * `PUT` method to override the default `PATCH`.\n * @param {object} config.parent - The parent service instance. For example, an\n * instance of Storage if the object is Bucket.\n */\n constructor(config) {\n super();\n this.metadata = {};\n this.baseUrl = config.baseUrl;\n this.parent = config.parent; // Parent class.\n this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc).\n this.createMethod = config.createMethod;\n this.methods = config.methods || {};\n this.interceptors = [];\n this.projectId = config.projectId;\n if (config.methods) {\n // This filters the ServiceObject instance (e.g. a \"File\") to only have\n // the configured methods. We make a couple of exceptions for core-\n // functionality (\"request()\" and \"getRequestInterceptors()\")\n Object.getOwnPropertyNames(ServiceObject.prototype)\n .filter(methodName => {\n return (\n // All ServiceObjects need `request` and `getRequestInterceptors`.\n // clang-format off\n !/^request/.test(methodName) &&\n !/^getRequestInterceptors/.test(methodName) &&\n // clang-format on\n // The ServiceObject didn't redefine the method.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] ===\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ServiceObject.prototype[methodName] &&\n // This method isn't wanted.\n !config.methods[methodName]);\n })\n .forEach(methodName => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] = undefined;\n });\n }\n }\n create(optionsOrCallback, callback) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const args = [this.id];\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n if (typeof optionsOrCallback === 'object') {\n args.push(optionsOrCallback);\n }\n // Wrap the callback to return *this* instance of the object, not the\n // newly-created one.\n // tslint: disable-next-line no-any\n function onCreate(...args) {\n const [err, instance] = args;\n if (!err) {\n self.metadata = instance.metadata;\n if (self.id && instance.metadata) {\n self.id = instance.metadata.id;\n }\n args[1] = self; // replace the created `instance` with this one.\n }\n callback(...args);\n }\n args.push(onCreate);\n // eslint-disable-next-line prefer-spread\n this.createMethod.apply(null, args);\n }\n delete(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const ignoreNotFound = options.ignoreNotFound;\n delete options.ignoreNotFound;\n const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {};\n const reqOpts = extend(true, {\n method: 'DELETE',\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, ...args) => {\n if (err) {\n if (err.code === 404 && ignoreNotFound) {\n err = null;\n }\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n callback(err, ...args);\n });\n }\n exists(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n this.get(options, err => {\n if (err) {\n if (err.code === 404) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n return;\n }\n callback(null, true);\n });\n }\n get(optionsOrCallback, cb) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const [opts, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const options = Object.assign({}, opts);\n const autoCreate = options.autoCreate && typeof this.create === 'function';\n delete options.autoCreate;\n function onCreate(err, instance, apiResponse) {\n if (err) {\n if (err.code === 409) {\n self.get(options, callback);\n return;\n }\n callback(err, null, apiResponse);\n return;\n }\n callback(null, instance, apiResponse);\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n if (err.code === 404 && autoCreate) {\n const args = [];\n if (Object.keys(options).length > 0) {\n args.push(options);\n }\n args.push(onCreate);\n self.create(...args);\n return;\n }\n callback(err, null, metadata);\n return;\n }\n callback(null, self, metadata);\n });\n }\n getMetadata(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.getMetadata === 'object' &&\n this.methods.getMetadata) ||\n {};\n const reqOpts = extend(true, {\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n const localInterceptors = this.interceptors\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n return this.parent.getRequestInterceptors().concat(localInterceptors);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.setMetadata === 'object' &&\n this.methods.setMetadata) ||\n {};\n const reqOpts = extend(true, {}, {\n method: 'PATCH',\n uri: '',\n }, methodConfig.reqOpts, {\n json: metadata,\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts);\n if (this.projectId) {\n reqOpts.projectId = this.projectId;\n }\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri];\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .filter(x => x.trim()) // Limit to non-empty strings.\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/');\n const childInterceptors = Array.isArray(reqOpts.interceptors_)\n ? reqOpts.interceptors_\n : [];\n const localInterceptors = [].slice.call(this.interceptors);\n reqOpts.interceptors_ = childInterceptors.concat(localInterceptors);\n if (reqOpts.shouldReturnStream) {\n return this.parent.requestStream(reqOpts);\n }\n this.parent.request(reqOpts, callback);\n }\n request(reqOpts, callback) {\n this.request_(reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return this.request_(opts);\n }\n}\nexports.ServiceObject = ServiceObject;\n(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] });\n//# sourceMappingURL=service-object.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0;\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst extend = require(\"extend\");\nconst uuid = require(\"uuid\");\nconst util_1 = require(\"./util\");\nexports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}';\nclass Service {\n /**\n * Service is a base class, meant to be inherited from by a \"service,\" like\n * BigQuery or Storage.\n *\n * This handles making authenticated requests by exposing a `makeReq_`\n * function.\n *\n * @constructor\n * @alias module:common/service\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string[]} config.scopes - The scopes required for the request.\n * @param {object=} options - [Configuration object](#/docs).\n */\n constructor(config, options = {}) {\n this.baseUrl = config.baseUrl;\n this.apiEndpoint = config.apiEndpoint;\n this.timeout = options.timeout;\n this.globalInterceptors = Array.isArray(options.interceptors_)\n ? options.interceptors_\n : [];\n this.interceptors = [];\n this.packageJson = config.packageJson;\n this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN;\n this.projectIdRequired = config.projectIdRequired !== false;\n this.providedUserAgent = options.userAgent;\n const reqCfg = extend({}, config, {\n projectIdRequired: this.projectIdRequired,\n projectId: this.projectId,\n authClient: options.authClient,\n credentials: options.credentials,\n keyFile: options.keyFilename,\n email: options.email,\n token: options.token,\n });\n this.makeAuthenticatedRequest =\n util_1.util.makeAuthenticatedRequestFactory(reqCfg);\n this.authClient = this.makeAuthenticatedRequest.authClient;\n this.getCredentials = this.makeAuthenticatedRequest.getCredentials;\n const isCloudFunctionEnv = !!process.env.FUNCTION_NAME;\n if (isCloudFunctionEnv) {\n this.interceptors.push({\n request(reqOpts) {\n reqOpts.forever = false;\n return reqOpts;\n },\n });\n }\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n return [].slice\n .call(this.globalInterceptors)\n .concat(this.interceptors)\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n }\n getProjectId(callback) {\n if (!callback) {\n return this.getProjectIdAsync();\n }\n this.getProjectIdAsync().then(p => callback(null, p), callback);\n }\n async getProjectIdAsync() {\n const projectId = await this.authClient.getProjectId();\n if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) {\n this.projectId = projectId;\n }\n return this.projectId;\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts, { timeout: this.timeout });\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl];\n if (this.projectIdRequired) {\n if (reqOpts.projectId) {\n uriComponents.push('projects');\n uriComponents.push(reqOpts.projectId);\n }\n else {\n uriComponents.push('projects');\n uriComponents.push(this.projectId);\n }\n }\n uriComponents.push(reqOpts.uri);\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/')\n // Some URIs have colon separators.\n // Bad: https://.../projects/:list\n // Good: https://.../projects:list\n .replace(/\\/:/g, ':');\n const requestInterceptors = this.getRequestInterceptors();\n const interceptorArray = Array.isArray(reqOpts.interceptors_)\n ? reqOpts.interceptors_\n : [];\n interceptorArray.forEach(interceptor => {\n if (typeof interceptor.request === 'function') {\n requestInterceptors.push(interceptor.request);\n }\n });\n requestInterceptors.forEach(requestInterceptor => {\n reqOpts = requestInterceptor(reqOpts);\n });\n delete reqOpts.interceptors_;\n const pkg = this.packageJson;\n let userAgent = util_1.util.getUserAgentFromPackageJson(pkg);\n if (this.providedUserAgent) {\n userAgent = `${this.providedUserAgent} ${userAgent}`;\n }\n reqOpts.headers = extend({}, reqOpts.headers, {\n 'User-Agent': userAgent,\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${pkg.version} gccl-invocation-id/${uuid.v4()}`,\n });\n if (reqOpts.shouldReturnStream) {\n return this.makeAuthenticatedRequest(reqOpts);\n }\n else {\n this.makeAuthenticatedRequest(reqOpts, callback);\n }\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n * @param {function} callback - The callback function passed to `request`.\n */\n request(reqOpts, callback) {\n Service.prototype.request_.call(this, reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return Service.prototype.request_.call(this, opts);\n }\n}\nexports.Service = Service;\n//# sourceMappingURL=service.js.map","\"use strict\";\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.Util = exports.PartialFailureError = exports.ApiError = void 0;\n/*!\n * @module common/util\n */\nconst projectify_1 = require(\"@google-cloud/projectify\");\nconst ent = require(\"ent\");\nconst extend = require(\"extend\");\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst retryRequest = require(\"retry-request\");\nconst stream_1 = require(\"stream\");\nconst teeny_request_1 = require(\"teeny-request\");\nconst uuid = require(\"uuid\");\nconst service_1 = require(\"./service\");\nconst packageJson = require('../../../package.json');\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst duplexify = require('duplexify');\nconst requestDefaults = {\n timeout: 60000,\n gzip: true,\n forever: true,\n pool: {\n maxSockets: Infinity,\n },\n};\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n * @private\n */\nconst AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n * @private\n */\nconst MAX_RETRY_DEFAULT = 3;\n/**\n * Custom error type for API errors.\n *\n * @param {object} errorBody - Error object.\n */\nclass ApiError extends Error {\n constructor(errorBodyOrMessage) {\n super();\n if (typeof errorBodyOrMessage !== 'object') {\n this.message = errorBodyOrMessage || '';\n return;\n }\n const errorBody = errorBodyOrMessage;\n this.code = errorBody.code;\n this.errors = errorBody.errors;\n this.response = errorBody.response;\n try {\n this.errors = JSON.parse(this.response.body).error.errors;\n }\n catch (e) {\n this.errors = errorBody.errors;\n }\n this.message = ApiError.createMultiErrorMessage(errorBody, this.errors);\n Error.captureStackTrace(this);\n }\n /**\n * Pieces together an error message by combining all unique error messages\n * returned from a single GoogleError\n *\n * @private\n *\n * @param {GoogleErrorBody} err The original error.\n * @param {GoogleInnerError[]} [errors] Inner errors, if any.\n * @returns {string}\n */\n static createMultiErrorMessage(err, errors) {\n const messages = new Set();\n if (err.message) {\n messages.add(err.message);\n }\n if (errors && errors.length) {\n errors.forEach(({ message }) => messages.add(message));\n }\n else if (err.response && err.response.body) {\n messages.add(ent.decode(err.response.body.toString()));\n }\n else if (!err.message) {\n messages.add('A failure occurred during this request.');\n }\n let messageArr = Array.from(messages);\n if (messageArr.length > 1) {\n messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`);\n messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\\n');\n messageArr.push('\\n');\n }\n return messageArr.join('\\n');\n }\n}\nexports.ApiError = ApiError;\n/**\n * Custom error type for partial errors returned from the API.\n *\n * @param {object} b - Error object.\n */\nclass PartialFailureError extends Error {\n constructor(b) {\n super();\n const errorObject = b;\n this.errors = errorObject.errors;\n this.name = 'PartialFailureError';\n this.response = errorObject.response;\n this.message = ApiError.createMultiErrorMessage(errorObject, this.errors);\n }\n}\nexports.PartialFailureError = PartialFailureError;\nclass Util {\n constructor() {\n this.ApiError = ApiError;\n this.PartialFailureError = PartialFailureError;\n }\n /**\n * No op.\n *\n * @example\n * function doSomething(callback) {\n * callback = callback || noop;\n * }\n */\n noop() { }\n /**\n * Uniformly process an API response.\n *\n * @param {*} err - Error value.\n * @param {*} resp - Response value.\n * @param {*} body - Body value.\n * @param {function} callback - The callback function.\n */\n handleResp(err, resp, body, callback) {\n callback = callback || util.noop;\n const parsedResp = extend(true, { err: err || null }, resp && util.parseHttpRespMessage(resp), body && util.parseHttpRespBody(body));\n // Assign the parsed body to resp.body, even if { json: false } was passed\n // as a request option.\n // We assume that nobody uses the previously unparsed value of resp.body.\n if (!parsedResp.err && resp && typeof parsedResp.body === 'object') {\n parsedResp.resp.body = parsedResp.body;\n }\n if (parsedResp.err && resp) {\n parsedResp.err.response = resp;\n }\n callback(parsedResp.err, parsedResp.body, parsedResp.resp);\n }\n /**\n * Sniff an incoming HTTP response message for errors.\n *\n * @param {object} httpRespMessage - An incoming HTTP response message from `request`.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.resp - The original response object.\n */\n parseHttpRespMessage(httpRespMessage) {\n const parsedHttpRespMessage = {\n resp: httpRespMessage,\n };\n if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) {\n // Unknown error. Format according to ApiError standard.\n parsedHttpRespMessage.err = new ApiError({\n errors: new Array(),\n code: httpRespMessage.statusCode,\n message: httpRespMessage.statusMessage,\n response: httpRespMessage,\n });\n }\n return parsedHttpRespMessage;\n }\n /**\n * Parse the response body from an HTTP request.\n *\n * @param {object} body - The response body.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.body - The original body value provided\n * will try to be JSON.parse'd. If it's successful, the parsed value will\n * be returned here, otherwise the original value and an error will be returned.\n */\n parseHttpRespBody(body) {\n const parsedHttpRespBody = {\n body,\n };\n if (typeof body === 'string') {\n try {\n parsedHttpRespBody.body = JSON.parse(body);\n }\n catch (err) {\n parsedHttpRespBody.body = body;\n }\n }\n if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) {\n // Error from JSON API.\n parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error);\n }\n return parsedHttpRespBody;\n }\n /**\n * Take a Duplexify stream, fetch an authenticated connection header, and\n * create an outgoing writable stream.\n *\n * @param {Duplexify} dup - Duplexify stream.\n * @param {object} options - Configuration object.\n * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through.\n * @param {object} options.metadata - Metadata to send at the head of the request.\n * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object.\n * @param {string=} options.request.method - Default: \"POST\".\n * @param {string=} options.request.qs.uploadType - Default: \"multipart\".\n * @param {string=} options.streamContentType - Default: \"application/octet-stream\".\n * @param {function} onComplete - Callback, executed after the writable Request stream has completed.\n */\n makeWritableStream(dup, options, onComplete) {\n onComplete = onComplete || util.noop;\n const writeStream = new ProgressStream();\n writeStream.on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(writeStream);\n const defaultReqOpts = {\n method: 'POST',\n qs: {\n uploadType: 'multipart',\n },\n timeout: 0,\n maxRetries: 0,\n };\n const metadata = options.metadata || {};\n const reqOpts = extend(true, defaultReqOpts, options.request, {\n multipart: [\n {\n 'Content-Type': 'application/json',\n body: JSON.stringify(metadata),\n },\n {\n 'Content-Type': metadata.contentType || 'application/octet-stream',\n body: writeStream,\n },\n ],\n });\n options.makeAuthenticatedRequest(reqOpts, {\n onAuthenticated(err, authenticatedReqOpts) {\n if (err) {\n dup.destroy(err);\n return;\n }\n requestDefaults.headers = util._getDefaultHeaders();\n const request = teeny_request_1.teenyRequest.defaults(requestDefaults);\n request(authenticatedReqOpts, (err, resp, body) => {\n util.handleResp(err, resp, body, (err, data) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n dup.emit('response', resp);\n onComplete(data);\n });\n });\n },\n });\n }\n /**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted. This is used for rate limit\n * related errors as well as intermittent server errors.\n *\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\n shouldRetryRequest(err) {\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = e.reason;\n if (reason === 'rateLimitExceeded') {\n return true;\n }\n if (reason === 'userRateLimitExceeded') {\n return true;\n }\n if (reason && reason.includes('EAI_AGAIN')) {\n return true;\n }\n }\n }\n }\n return false;\n }\n /**\n * Get a function for making authenticated requests.\n *\n * @param {object} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {object=} config.credentials - Credentials object.\n * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false.\n * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false.\n * @param {string=} config.email - Account email address, required for PEM/P12 usage.\n * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3)\n * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile.\n * @param {array} config.scopes - Array of scopes required for the API.\n */\n makeAuthenticatedRequestFactory(config) {\n const googleAutoAuthConfig = extend({}, config);\n if (googleAutoAuthConfig.projectId === service_1.DEFAULT_PROJECT_ID_TOKEN) {\n delete googleAutoAuthConfig.projectId;\n }\n let authClient;\n if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) {\n // Use an existing `GoogleAuth`\n authClient = googleAutoAuthConfig.authClient;\n }\n else {\n // Pass an `AuthClient` to `GoogleAuth`, if available\n const config = {\n ...googleAutoAuthConfig,\n authClient: googleAutoAuthConfig.authClient,\n };\n authClient = new google_auth_library_1.GoogleAuth(config);\n }\n function makeAuthenticatedRequest(reqOpts, optionsOrCallback) {\n let stream;\n let projectId;\n const reqConfig = extend({}, config);\n let activeRequest_;\n if (!optionsOrCallback) {\n stream = duplexify();\n reqConfig.stream = stream;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined;\n async function setProjectId() {\n projectId = await authClient.getProjectId();\n }\n const onAuthenticated = async (err, authenticatedReqOpts) => {\n const authLibraryError = err;\n const autoAuthFailed = err &&\n err.message.indexOf('Could not load the default credentials') > -1;\n if (autoAuthFailed) {\n // Even though authentication failed, the API might not actually\n // care.\n authenticatedReqOpts = reqOpts;\n }\n if (!err || autoAuthFailed) {\n try {\n // Try with existing `projectId` value\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n if (e instanceof projectify_1.MissingProjectIdError) {\n // A `projectId` was required, but we don't have one.\n try {\n // Attempt to get the `projectId`\n await setProjectId();\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n // Re-use the \"Could not load the default credentials error\" if\n // auto auth failed.\n err = err || e;\n }\n }\n else {\n // Some other error unrelated to missing `projectId`\n err = err || e;\n }\n }\n }\n if (err) {\n if (stream) {\n stream.destroy(err);\n }\n else {\n const fn = options && options.onAuthenticated\n ? options.onAuthenticated\n : callback;\n fn(err);\n }\n return;\n }\n if (options && options.onAuthenticated) {\n options.onAuthenticated(null, authenticatedReqOpts);\n }\n else {\n activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => {\n if (apiResponseError &&\n apiResponseError.code === 401 &&\n authLibraryError) {\n // Re-use the \"Could not load the default credentials error\" if\n // the API request failed due to missing credentials.\n apiResponseError = authLibraryError;\n }\n callback(apiResponseError, ...params);\n });\n }\n };\n const prepareRequest = async () => {\n try {\n const getProjectId = async () => {\n if (config.projectId &&\n config.projectId !== service_1.DEFAULT_PROJECT_ID_TOKEN) {\n // The user provided a project ID. We don't need to check with the\n // auth client, it could be incorrect.\n return config.projectId;\n }\n if (config.projectIdRequired === false) {\n // A projectId is not required. Return the default.\n return service_1.DEFAULT_PROJECT_ID_TOKEN;\n }\n return setProjectId();\n };\n const authorizeRequest = async () => {\n if (reqConfig.customEndpoint &&\n !reqConfig.useAuthWithCustomEndpoint) {\n // Using a custom API override. Do not use `google-auth-library` for\n // authentication. (ex: connecting to a local Datastore server)\n return reqOpts;\n }\n else {\n return authClient.authorizeRequest(reqOpts);\n }\n };\n const [_projectId, authorizedReqOpts] = await Promise.all([\n getProjectId(),\n authorizeRequest(),\n ]);\n if (_projectId) {\n projectId = _projectId;\n }\n return onAuthenticated(null, authorizedReqOpts);\n }\n catch (e) {\n return onAuthenticated(e);\n }\n };\n prepareRequest();\n if (stream) {\n return stream;\n }\n return {\n abort() {\n setImmediate(() => {\n if (activeRequest_) {\n activeRequest_.abort();\n activeRequest_ = null;\n }\n });\n },\n };\n }\n const mar = makeAuthenticatedRequest;\n mar.getCredentials = authClient.getCredentials.bind(authClient);\n mar.authClient = authClient;\n return mar;\n }\n /**\n * Make a request through the `retryRequest` module with built-in error\n * handling and exponential back off.\n *\n * @param {object} reqOpts - Request options in the format `request` expects.\n * @param {object=} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {number=} config.maxRetries - Maximum number of automatic retries\n * attempted before returning the error. (default: 3)\n * @param {object=} config.request - HTTP module for request calls.\n * @param {function} callback - The callback function.\n */\n makeRequest(reqOpts, config, callback) {\n var _a, _b, _c, _d, _e;\n let autoRetryValue = AUTO_RETRY_DEFAULT;\n if (config.autoRetry !== undefined) {\n autoRetryValue = config.autoRetry;\n }\n else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) {\n autoRetryValue = config.retryOptions.autoRetry;\n }\n let maxRetryValue = MAX_RETRY_DEFAULT;\n if (config.maxRetries !== undefined) {\n maxRetryValue = config.maxRetries;\n }\n else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) {\n maxRetryValue = config.retryOptions.maxRetries;\n }\n requestDefaults.headers = this._getDefaultHeaders();\n const options = {\n request: teeny_request_1.teenyRequest.defaults(requestDefaults),\n retries: autoRetryValue !== false ? maxRetryValue : 0,\n noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0,\n shouldRetryFn(httpRespMessage) {\n var _a, _b;\n const err = util.parseHttpRespMessage(httpRespMessage).err;\n if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) {\n return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err));\n }\n return err && util.shouldRetryRequest(err);\n },\n maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay,\n retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier,\n totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout,\n };\n if (typeof reqOpts.maxRetries === 'number') {\n options.retries = reqOpts.maxRetries;\n options.noResponseRetries = reqOpts.maxRetries;\n }\n if (!config.stream) {\n return retryRequest(reqOpts, options, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (err, response, body) => {\n util.handleResp(err, response, body, callback);\n });\n }\n const dup = config.stream;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let requestStream;\n const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET';\n if (isGetRequest) {\n requestStream = retryRequest(reqOpts, options);\n dup.setReadable(requestStream);\n }\n else {\n // Streaming writable HTTP requests cannot be retried.\n requestStream = options.request(reqOpts);\n dup.setWritable(requestStream);\n }\n // Replay the Request events back to the stream.\n requestStream\n .on('error', dup.destroy.bind(dup))\n .on('response', dup.emit.bind(dup, 'response'))\n .on('complete', dup.emit.bind(dup, 'complete'));\n dup.abort = requestStream.abort;\n return dup;\n }\n /**\n * Decorate the options about to be made in a request.\n *\n * @param {object} reqOpts - The options to be passed to `request`.\n * @param {string} projectId - The project ID.\n * @return {object} reqOpts - The decorated reqOpts.\n */\n decorateRequest(reqOpts, projectId) {\n delete reqOpts.autoPaginate;\n delete reqOpts.autoPaginateVal;\n delete reqOpts.objectMode;\n if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') {\n delete reqOpts.qs.autoPaginate;\n delete reqOpts.qs.autoPaginateVal;\n reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId);\n }\n if (Array.isArray(reqOpts.multipart)) {\n reqOpts.multipart = reqOpts.multipart.map(part => {\n return (0, projectify_1.replaceProjectIdToken)(part, projectId);\n });\n }\n if (reqOpts.json !== null && typeof reqOpts.json === 'object') {\n delete reqOpts.json.autoPaginate;\n delete reqOpts.json.autoPaginateVal;\n reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId);\n }\n reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId);\n return reqOpts;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n isCustomType(unknown, module) {\n function getConstructorName(obj) {\n return obj.constructor && obj.constructor.name.toLowerCase();\n }\n const moduleNameParts = module.split('/');\n const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase();\n const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase();\n if (subModuleName && getConstructorName(unknown) !== subModuleName) {\n return false;\n }\n let walkingModule = unknown;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n if (getConstructorName(walkingModule) === parentModuleName) {\n return true;\n }\n walkingModule = walkingModule.parent;\n if (!walkingModule) {\n return false;\n }\n }\n }\n /**\n * Create a properly-formatted User-Agent string from a package.json file.\n *\n * @param {object} packageJson - A module's package.json file.\n * @return {string} userAgent - The formatted User-Agent string.\n */\n getUserAgentFromPackageJson(packageJson) {\n const hyphenatedPackageName = packageJson.name\n .replace('@google-cloud', 'gcloud-node') // For legacy purposes.\n .replace('/', '-'); // For UA spec-compliance purposes.\n return hyphenatedPackageName + '/' + packageJson.version;\n }\n /**\n * Given two parameters, figure out if this is either:\n * - Just a callback function\n * - An options object, and then a callback function\n * @param optionsOrCallback An options object or callback.\n * @param cb A potentially undefined callback.\n */\n maybeOptionsOrCallback(optionsOrCallback, cb) {\n return typeof optionsOrCallback === 'function'\n ? [{}, optionsOrCallback]\n : [optionsOrCallback, cb];\n }\n _getDefaultHeaders() {\n return {\n 'User-Agent': util.getUserAgentFromPackageJson(packageJson),\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${uuid.v4()}`,\n };\n }\n}\nexports.Util = Util;\n/**\n * Basic Passthrough Stream that records the number of bytes read\n * every time the cursor is moved.\n */\nclass ProgressStream extends stream_1.Transform {\n constructor() {\n super(...arguments);\n this.bytesRead = 0;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _transform(chunk, encoding, callback) {\n this.bytesRead += chunk.length;\n this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' });\n this.push(chunk);\n callback();\n }\n}\nconst util = new Util();\nexports.util = util;\n//# sourceMappingURL=util.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Notification = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * The API-formatted resource description of the notification.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name Notification#metadata\n * @type {object}\n */\n/**\n * A Notification object is created from your {@link Bucket} object using\n * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub\n * notifications.\n *\n * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage}\n *\n * @class\n * @hideconstructor\n *\n * @param {Bucket} bucket The bucket instance this notification is attached to.\n * @param {string} id The ID of the notification.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const notification = myBucket.notification('1');\n * ```\n */\nclass Notification extends nodejs_common_1.ServiceObject {\n constructor(bucket, id) {\n const methods = {\n /**\n * Creates a notification subscription for the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert}\n * @method Notification#create\n *\n * @param {Topic|string} topic The Cloud PubSub topic to which this\n * subscription publishes. If the project ID is omitted, the current\n * project ID will be used.\n *\n * Acceptable formats are:\n * - `projects/grape-spaceship-123/topics/my-topic`\n *\n * - `my-topic`\n * @param {CreateNotificationRequest} [options] Metadata to set for\n * the notification.\n * @param {CreateNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a valid topic is not provided.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.create(function(err, notification, apiResponse) {\n * if (!err) {\n * // The notification was created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.create().then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n create: true,\n /**\n * @typedef {array} NotificationExistsResponse\n * @property {boolean} 0 Whether the notification exists or not.\n */\n /**\n * @callback NotificationExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the notification exists or not.\n */\n /**\n * Check if the notification exists.\n *\n * @method Notification#exists\n * @param {NotificationExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: true,\n };\n super({\n parent: bucket,\n baseUrl: '/notificationConfigs',\n id: id.toString(),\n createMethod: bucket.createNotification.bind(bucket),\n methods,\n });\n }\n /**\n * @typedef {array} DeleteNotificationResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Permanently deletes a notification subscription.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation}\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/deleteNotification.js\n * region_tag:storage_delete_bucket_notification\n * Another example:\n */\n delete(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.request({\n method: 'DELETE',\n uri: '',\n qs: options,\n }, callback || nodejs_common_1.util.noop);\n }\n /**\n * Get a notification and its metadata if it exists.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation}\n *\n * @param {object} [options] Configuration options.\n * See {@link Bucket#createNotification} for create options.\n * @param {boolean} [options.autoCreate] Automatically create the object if\n * it does not exist. Default: `false`.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationCallback} [callback] Callback function.\n * @return {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.get(function(err, notification, apiResponse) {\n * // `notification.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.get().then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const autoCreate = options.autoCreate;\n delete options.autoCreate;\n const onCreate = (err, notification, apiResponse) => {\n if (err) {\n if (err.code === 409) {\n this.get(options, callback);\n return;\n }\n callback(err, null, apiResponse);\n return;\n }\n callback(null, notification, apiResponse);\n };\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n if (err.code === 404 && autoCreate) {\n const args = [];\n if (Object.keys(options).length > 0) {\n args.push(options);\n }\n args.push(onCreate);\n // eslint-disable-next-line\n this.create.apply(this, args);\n return;\n }\n callback(err, null, metadata);\n return;\n }\n callback(null, this, metadata);\n });\n }\n /**\n * Get the notification's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation}\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/getMetadataNotifications.js\n * region_tag:storage_print_pubsub_bucket_notification\n * Another example:\n */\n getMetadata(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.request({\n uri: '',\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n this.metadata = resp;\n callback(null, this.metadata, resp);\n });\n }\n}\nexports.Notification = Notification;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Notification);\n//# sourceMappingURL=notification.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0;\nconst abort_controller_1 = require(\"abort-controller\");\nconst crypto_1 = require(\"crypto\");\nconst extend = require(\"extend\");\nconst gaxios = require(\"gaxios\");\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst stream_1 = require(\"stream\");\nconst retry = require(\"async-retry\");\nconst uuid = require(\"uuid\");\nconst NOT_FOUND_STATUS_CODE = 404;\nconst RESUMABLE_INCOMPLETE_STATUS_CODE = 308;\nconst DEFAULT_API_ENDPOINT_REGEX = /.*\\.googleapis\\.com/;\nconst packageJson = require('../../package.json');\nexports.PROTOCOL_REGEX = /^(\\w*):\\/\\//;\nclass Upload extends stream_1.Writable {\n constructor(cfg) {\n super();\n this.numBytesWritten = 0;\n this.numRetries = 0;\n this.currentInvocationId = {\n chunk: uuid.v4(),\n uri: uuid.v4(),\n offset: uuid.v4(),\n };\n this.upstreamChunkBuffer = Buffer.alloc(0);\n this.chunkBufferEncoding = undefined;\n this.numChunksReadInRequest = 0;\n /**\n * A chunk used for caching the most recent upload chunk.\n * We should not assume that the server received all bytes sent in the request.\n * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n */\n this.lastChunkSent = Buffer.alloc(0);\n this.upstreamEnded = false;\n cfg = cfg || {};\n if (!cfg.bucket || !cfg.file) {\n throw new Error('A bucket and file name are required');\n }\n cfg.authConfig = cfg.authConfig || {};\n cfg.authConfig.scopes = [\n 'https://www.googleapis.com/auth/devstorage.full_control',\n ];\n this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig);\n this.apiEndpoint = 'https://storage.googleapis.com';\n if (cfg.apiEndpoint) {\n this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint);\n if (!DEFAULT_API_ENDPOINT_REGEX.test(cfg.apiEndpoint)) {\n this.authClient = gaxios;\n }\n }\n this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`;\n this.bucket = cfg.bucket;\n const cacheKeyElements = [cfg.bucket, cfg.file];\n if (typeof cfg.generation === 'number') {\n cacheKeyElements.push(`${cfg.generation}`);\n }\n this.cacheKey = cacheKeyElements.join('/');\n this.customRequestOptions = cfg.customRequestOptions || {};\n this.file = cfg.file;\n this.generation = cfg.generation;\n this.kmsKeyName = cfg.kmsKeyName;\n this.metadata = cfg.metadata || {};\n this.offset = cfg.offset;\n this.origin = cfg.origin;\n this.params = cfg.params || {};\n this.userProject = cfg.userProject;\n this.chunkSize = cfg.chunkSize;\n this.retryOptions = cfg.retryOptions;\n if (cfg.key) {\n const base64Key = Buffer.from(cfg.key).toString('base64');\n this.encryption = {\n key: base64Key,\n hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'),\n };\n }\n this.predefinedAcl = cfg.predefinedAcl;\n if (cfg.private)\n this.predefinedAcl = 'private';\n if (cfg.public)\n this.predefinedAcl = 'publicRead';\n const autoRetry = cfg.retryOptions.autoRetry;\n this.uriProvidedManually = !!cfg.uri;\n this.uri = cfg.uri;\n this.numBytesWritten = 0;\n this.numRetries = 0; // counter for number of retries currently executed\n if (!autoRetry) {\n cfg.retryOptions.maxRetries = 0;\n }\n this.timeOfFirstRequest = Date.now();\n const contentLength = cfg.metadata\n ? Number(cfg.metadata.contentLength)\n : NaN;\n this.contentLength = isNaN(contentLength) ? '*' : contentLength;\n this.once('writing', () => {\n if (this.uri) {\n this.continueUploading();\n }\n else {\n this.createURI(err => {\n if (err) {\n return this.destroy(err);\n }\n this.startUploading();\n return;\n });\n }\n });\n }\n /**\n * Prevent 'finish' event until the upload has succeeded.\n *\n * @param fireFinishEvent The finish callback\n */\n _final(fireFinishEvent = () => { }) {\n this.upstreamEnded = true;\n this.once('uploadFinished', fireFinishEvent);\n process.nextTick(() => {\n this.emit('upstreamFinished');\n // it's possible `_write` may not be called - namely for empty object uploads\n this.emit('writing');\n });\n }\n /**\n * Handles incoming data from upstream\n *\n * @param chunk The chunk to append to the buffer\n * @param encoding The encoding of the chunk\n * @param readCallback A callback for when the buffer has been read downstream\n */\n _write(chunk, encoding, readCallback = () => { }) {\n // Backwards-compatible event\n this.emit('writing');\n this.upstreamChunkBuffer = Buffer.concat([\n this.upstreamChunkBuffer,\n typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk,\n ]);\n this.chunkBufferEncoding = encoding;\n this.once('readFromChunkBuffer', readCallback);\n process.nextTick(() => this.emit('wroteToChunkBuffer'));\n }\n /**\n * Prepends data back to the upstream chunk buffer.\n *\n * @param chunk The data to prepend\n */\n unshiftChunkBuffer(chunk) {\n this.upstreamChunkBuffer = Buffer.concat([chunk, this.upstreamChunkBuffer]);\n }\n /**\n * Retrieves data from upstream's buffer.\n *\n * @param limit The maximum amount to return from the buffer.\n * @returns The data requested.\n */\n pullFromChunkBuffer(limit) {\n const chunk = this.upstreamChunkBuffer.slice(0, limit);\n this.upstreamChunkBuffer = this.upstreamChunkBuffer.slice(limit);\n // notify upstream we've read from the buffer so it can potentially\n // send more data down.\n process.nextTick(() => this.emit('readFromChunkBuffer'));\n return chunk;\n }\n /**\n * A handler for determining if data is ready to be read from upstream.\n *\n * @returns If there will be more chunks to read in the future\n */\n async waitForNextChunk() {\n const willBeMoreChunks = await new Promise(resolve => {\n // There's data available - it should be digested\n if (this.upstreamChunkBuffer.byteLength) {\n return resolve(true);\n }\n // The upstream writable ended, we shouldn't expect any more data.\n if (this.upstreamEnded) {\n return resolve(false);\n }\n // Nothing immediate seems to be determined. We need to prepare some\n // listeners to determine next steps...\n const wroteToChunkBufferCallback = () => {\n removeListeners();\n return resolve(true);\n };\n const upstreamFinishedCallback = () => {\n removeListeners();\n // this should be the last chunk, if there's anything there\n if (this.upstreamChunkBuffer.length)\n return resolve(true);\n return resolve(false);\n };\n // Remove listeners when we're ready to callback.\n const removeListeners = () => {\n this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback);\n this.removeListener('upstreamFinished', upstreamFinishedCallback);\n };\n // If there's data recently written it should be digested\n this.once('wroteToChunkBuffer', wroteToChunkBufferCallback);\n // If the upstream finishes let's see if there's anything to grab\n this.once('upstreamFinished', upstreamFinishedCallback);\n });\n return willBeMoreChunks;\n }\n /**\n * Reads data from upstream up to the provided `limit`.\n * Ends when the limit has reached or no data is expected to be pushed from upstream.\n *\n * @param limit The most amount of data this iterator should return. `Infinity` by default.\n * @param oneChunkMode Determines if one, exhaustive chunk is yielded for the iterator\n */\n async *upstreamIterator(limit = Infinity, oneChunkMode) {\n let completeChunk = Buffer.alloc(0);\n // read from upstream chunk buffer\n while (limit && (await this.waitForNextChunk())) {\n // read until end or limit has been reached\n const chunk = this.pullFromChunkBuffer(limit);\n limit -= chunk.byteLength;\n if (oneChunkMode) {\n // return 1 chunk at the end of iteration\n completeChunk = Buffer.concat([completeChunk, chunk]);\n }\n else {\n // return many chunks throughout iteration\n yield {\n chunk,\n encoding: this.chunkBufferEncoding,\n };\n }\n }\n if (oneChunkMode) {\n yield {\n chunk: completeChunk,\n encoding: this.chunkBufferEncoding,\n };\n }\n }\n createURI(callback) {\n if (!callback) {\n return this.createURIAsync();\n }\n this.createURIAsync().then(r => callback(null, r), callback);\n }\n async createURIAsync() {\n const metadata = { ...this.metadata };\n const headers = {};\n // Delete content length and content type from metadata if they exist.\n // These are headers and should not be sent as part of the metadata.\n if (metadata.contentLength) {\n headers['X-Upload-Content-Length'] = metadata.contentLength.toString();\n delete metadata.contentLength;\n }\n if (metadata.contentType) {\n headers['X-Upload-Content-Type'] = metadata.contentType;\n delete metadata.contentType;\n }\n // Check if headers already exist before creating new ones\n const reqOpts = {\n method: 'POST',\n url: [this.baseURI, this.bucket, 'o'].join('/'),\n params: Object.assign({\n name: this.file,\n uploadType: 'resumable',\n }, this.params),\n data: metadata,\n headers: {\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.uri}`,\n ...headers,\n },\n };\n if (metadata.contentLength) {\n reqOpts.headers['X-Upload-Content-Length'] =\n metadata.contentLength.toString();\n }\n if (metadata.contentType) {\n reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType;\n }\n if (typeof this.generation !== 'undefined') {\n reqOpts.params.ifGenerationMatch = this.generation;\n }\n if (this.kmsKeyName) {\n reqOpts.params.kmsKeyName = this.kmsKeyName;\n }\n if (this.predefinedAcl) {\n reqOpts.params.predefinedAcl = this.predefinedAcl;\n }\n if (this.origin) {\n reqOpts.headers.Origin = this.origin;\n }\n const uri = await retry(async (bail) => {\n var _a, _b, _c;\n try {\n const res = await this.makeRequest(reqOpts);\n // We have successfully got a URI we can now create a new invocation id\n this.currentInvocationId.uri = uuid.v4();\n return res.headers.location;\n }\n catch (err) {\n const e = err;\n const apiError = {\n code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status,\n name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText,\n message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText,\n errors: [\n {\n reason: e.code,\n },\n ],\n };\n if (this.retryOptions.maxRetries > 0 &&\n this.retryOptions.retryableErrorFn(apiError)) {\n throw e;\n }\n else {\n return bail(e);\n }\n }\n }, {\n retries: this.retryOptions.maxRetries,\n factor: this.retryOptions.retryDelayMultiplier,\n maxTimeout: this.retryOptions.maxRetryDelay * 1000,\n maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n this.uri = uri;\n this.offset = 0;\n return uri;\n }\n async continueUploading() {\n if (typeof this.offset === 'number') {\n this.startUploading();\n return;\n }\n await this.getAndSetOffset();\n this.startUploading();\n }\n async startUploading() {\n const multiChunkMode = !!this.chunkSize;\n let responseReceived = false;\n this.numChunksReadInRequest = 0;\n if (!this.offset) {\n this.offset = 0;\n }\n // Check if the offset (server) is too far behind the current stream\n if (this.offset < this.numBytesWritten) {\n const delta = this.numBytesWritten - this.offset;\n const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`;\n this.emit('error', new RangeError(message));\n return;\n }\n // Check if we should 'fast-forward' to the relevant data to upload\n if (this.numBytesWritten < this.offset) {\n // 'fast-forward' to the byte where we need to upload.\n // only push data from the byte after the one we left off on\n const fastForwardBytes = this.offset - this.numBytesWritten;\n for await (const _chunk of this.upstreamIterator(fastForwardBytes)) {\n _chunk; // discard the data up until the point we want\n }\n this.numBytesWritten = this.offset;\n }\n let expectedUploadSize = undefined;\n // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available\n if (typeof this.contentLength === 'number') {\n expectedUploadSize = this.contentLength - this.numBytesWritten;\n }\n // `expectedUploadSize` should be no more than the `chunkSize`.\n // It's possible this is the last chunk request for a multiple\n // chunk upload, thus smaller than the chunk size.\n if (this.chunkSize) {\n expectedUploadSize = expectedUploadSize\n ? Math.min(this.chunkSize, expectedUploadSize)\n : this.chunkSize;\n }\n // A queue for the upstream data\n const upstreamQueue = this.upstreamIterator(expectedUploadSize, multiChunkMode // multi-chunk mode should return 1 chunk per request\n );\n // The primary read stream for this request. This stream retrieves no more\n // than the exact requested amount from upstream.\n const requestStream = new stream_1.Readable({\n read: async () => {\n // Don't attempt to retrieve data upstream if we already have a response\n if (responseReceived)\n requestStream.push(null);\n const result = await upstreamQueue.next();\n if (result.value) {\n this.numChunksReadInRequest++;\n this.lastChunkSent = result.value.chunk;\n this.numBytesWritten += result.value.chunk.byteLength;\n this.emit('progress', {\n bytesWritten: this.numBytesWritten,\n contentLength: this.contentLength,\n });\n requestStream.push(result.value.chunk, result.value.encoding);\n }\n if (result.done) {\n requestStream.push(null);\n }\n },\n });\n const headers = {\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.chunk}`,\n };\n // If using multiple chunk upload, set appropriate header\n if (multiChunkMode) {\n // We need to know how much data is available upstream to set the `Content-Range` header.\n const oneChunkIterator = this.upstreamIterator(expectedUploadSize, true);\n const { value } = await oneChunkIterator.next();\n const bytesToUpload = value.chunk.byteLength;\n // Important: we want to know if the upstream has ended and the queue is empty before\n // unshifting data back into the queue. This way we will know if this is the last request or not.\n const isLastChunkOfUpload = !(await this.waitForNextChunk());\n // Important: put the data back in the queue for the actual upload iterator\n this.unshiftChunkBuffer(value.chunk);\n let totalObjectSize = this.contentLength;\n if (typeof this.contentLength !== 'number' && isLastChunkOfUpload) {\n // Let's let the server know this is the last chunk since\n // we didn't know the content-length beforehand.\n totalObjectSize = bytesToUpload + this.numBytesWritten;\n }\n // `- 1` as the ending byte is inclusive in the request.\n const endingByte = bytesToUpload + this.numBytesWritten - 1;\n // `Content-Length` for multiple chunk uploads is the size of the chunk,\n // not the overall object\n headers['Content-Length'] = bytesToUpload;\n headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`;\n }\n else {\n headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`;\n }\n const reqOpts = {\n method: 'PUT',\n url: this.uri,\n headers,\n body: requestStream,\n };\n try {\n const resp = await this.makeRequestStream(reqOpts);\n if (resp) {\n responseReceived = true;\n this.responseHandler(resp);\n }\n }\n catch (e) {\n const err = e;\n if (this.retryOptions.retryableErrorFn(err)) {\n this.attemptDelayedRetry({\n status: NaN,\n data: err,\n });\n return;\n }\n this.destroy(err);\n }\n }\n // Process the API response to look for errors that came in\n // the response body.\n responseHandler(resp) {\n if (resp.data.error) {\n this.destroy(resp.data.error);\n return;\n }\n // At this point we can safely create a new id for the chunk\n this.currentInvocationId.chunk = uuid.v4();\n const shouldContinueWithNextMultiChunkRequest = this.chunkSize &&\n resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE &&\n resp.headers.range;\n if (shouldContinueWithNextMultiChunkRequest) {\n // Use the upper value in this header to determine where to start the next chunk.\n // We should not assume that the server received all bytes sent in the request.\n // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n const range = resp.headers.range;\n this.offset = Number(range.split('-')[1]) + 1;\n // We should not assume that the server received all bytes sent in the request.\n // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n const missingBytes = this.numBytesWritten - this.offset;\n if (missingBytes) {\n const dataToPrependForResending = this.lastChunkSent.slice(-missingBytes);\n // As multi-chunk uploads send one chunk per request and pulls one\n // chunk into the pipeline, prepending the missing bytes back should\n // be fine for the next request.\n this.unshiftChunkBuffer(dataToPrependForResending);\n this.numBytesWritten -= missingBytes;\n this.lastChunkSent = Buffer.alloc(0);\n }\n // continue uploading next chunk\n this.continueUploading();\n }\n else if (!this.isSuccessfulResponse(resp.status)) {\n const err = new Error('Upload failed');\n err.code = resp.status;\n err.name = 'Upload failed';\n if (resp === null || resp === void 0 ? void 0 : resp.data) {\n err.errors = [resp === null || resp === void 0 ? void 0 : resp.data];\n }\n this.destroy(err);\n }\n else {\n // remove the last chunk sent to free memory\n this.lastChunkSent = Buffer.alloc(0);\n if (resp && resp.data) {\n resp.data.size = Number(resp.data.size);\n }\n this.emit('metadata', resp.data);\n // Allow the object (Upload) to continue naturally so the user's\n // \"finish\" event fires.\n this.emit('uploadFinished');\n }\n }\n async getAndSetOffset() {\n const opts = {\n method: 'PUT',\n url: this.uri,\n headers: {\n 'Content-Length': 0,\n 'Content-Range': 'bytes */*',\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.offset}`,\n },\n };\n try {\n const resp = await this.makeRequest(opts);\n // Successfully got the offset we can now create a new offset invocation id\n this.currentInvocationId.offset = uuid.v4();\n if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) {\n if (resp.headers.range) {\n const range = resp.headers.range;\n this.offset = Number(range.split('-')[1]) + 1;\n return;\n }\n }\n this.offset = 0;\n }\n catch (e) {\n const err = e;\n if (this.retryOptions.retryableErrorFn(err)) {\n this.attemptDelayedRetry({\n status: NaN,\n data: err,\n });\n return;\n }\n this.destroy(err);\n }\n }\n async makeRequest(reqOpts) {\n if (this.encryption) {\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256';\n reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString();\n reqOpts.headers['x-goog-encryption-key-sha256'] =\n this.encryption.hash.toString();\n }\n if (this.userProject) {\n reqOpts.params = reqOpts.params || {};\n reqOpts.params.userProject = this.userProject;\n }\n // Let gaxios know we will handle a 308 error code ourselves.\n reqOpts.validateStatus = (status) => {\n return (this.isSuccessfulResponse(status) ||\n status === RESUMABLE_INCOMPLETE_STATUS_CODE);\n };\n const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts);\n const res = await this.authClient.request(combinedReqOpts);\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n return res;\n }\n async makeRequestStream(reqOpts) {\n const controller = new abort_controller_1.default();\n const errorCallback = () => controller.abort();\n this.once('error', errorCallback);\n if (this.userProject) {\n reqOpts.params = reqOpts.params || {};\n reqOpts.params.userProject = this.userProject;\n }\n reqOpts.signal = controller.signal;\n reqOpts.validateStatus = () => true;\n const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts);\n const res = await this.authClient.request(combinedReqOpts);\n const successfulRequest = this.onResponse(res);\n this.removeListener('error', errorCallback);\n return successfulRequest ? res : null;\n }\n /**\n * @return {bool} is the request good?\n */\n onResponse(resp) {\n if (resp.status !== 200 &&\n this.retryOptions.retryableErrorFn({\n code: resp.status,\n message: resp.statusText,\n name: resp.statusText,\n })) {\n this.attemptDelayedRetry(resp);\n return false;\n }\n this.emit('response', resp);\n return true;\n }\n /**\n * @param resp GaxiosResponse object from previous attempt\n */\n attemptDelayedRetry(resp) {\n if (this.numRetries < this.retryOptions.maxRetries) {\n if (resp.status === NOT_FOUND_STATUS_CODE &&\n this.numChunksReadInRequest === 0) {\n this.startUploading();\n }\n else {\n const retryDelay = this.getRetryDelay();\n if (retryDelay <= 0) {\n this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`));\n return;\n }\n // Unshift the most recent chunk back in case it's needed for the next\n // request.\n this.numBytesWritten -= this.lastChunkSent.byteLength;\n this.unshiftChunkBuffer(this.lastChunkSent);\n this.lastChunkSent = Buffer.alloc(0);\n // We don't know how much data has been received by the server.\n // `continueUploading` will recheck the offset via `getAndSetOffset`.\n // If `offset` < `numberBytesReceived` then we will raise a RangeError\n // as we've streamed too much data that has been missed - this should\n // not be the case for multi-chunk uploads as `lastChunkSent` is the\n // body of the entire request.\n this.offset = undefined;\n setTimeout(this.continueUploading.bind(this), retryDelay);\n }\n this.numRetries++;\n }\n else {\n this.destroy(new Error('Retry limit exceeded - ' + resp.data));\n }\n }\n /**\n * @returns {number} the amount of time to wait before retrying the request\n */\n getRetryDelay() {\n const randomMs = Math.round(Math.random() * 1000);\n const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) *\n 1000 +\n randomMs;\n const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 -\n (Date.now() - this.timeOfFirstRequest);\n const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000;\n return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs);\n }\n /*\n * Prepare user-defined API endpoint for compatibility with our API.\n */\n sanitizeEndpoint(url) {\n if (!exports.PROTOCOL_REGEX.test(url)) {\n url = `https://${url}`;\n }\n return url.replace(/\\/+$/, ''); // Remove trailing slashes\n }\n /**\n * Check if a given status code is 2xx\n *\n * @param status The status code to check\n * @returns if the status is 2xx\n */\n isSuccessfulResponse(status) {\n return status >= 200 && status < 300;\n }\n}\nexports.Upload = Upload;\nfunction upload(cfg) {\n return new Upload(cfg);\n}\nexports.upload = upload;\nfunction createURI(cfg, callback) {\n const up = new Upload(cfg);\n if (!callback) {\n return up.createURI();\n }\n up.createURI().then(r => callback(null, r), callback);\n}\nexports.createURI = createURI;\n//# sourceMappingURL=resumable-upload.js.map","\"use strict\";\n// Copyright 2020 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0;\nconst crypto = require(\"crypto\");\nconst url = require(\"url\");\nconst storage_1 = require(\"./storage\");\nconst util_1 = require(\"./util\");\nvar SignerExceptionMessages;\n(function (SignerExceptionMessages) {\n SignerExceptionMessages[\"ACCESSIBLE_DATE_INVALID\"] = \"The accessible at date provided was invalid.\";\n SignerExceptionMessages[\"EXPIRATION_BEFORE_ACCESSIBLE_DATE\"] = \"An expiration date cannot be before accessible date.\";\n SignerExceptionMessages[\"X_GOOG_CONTENT_SHA256\"] = \"The header X-Goog-Content-SHA256 must be a hexadecimal string.\";\n})(SignerExceptionMessages = exports.SignerExceptionMessages || (exports.SignerExceptionMessages = {}));\n/*\n * Default signing version for getSignedUrl is 'v2'.\n */\nconst DEFAULT_SIGNING_VERSION = 'v2';\nconst SEVEN_DAYS = 7 * 24 * 60 * 60;\n/**\n * @const {string}\n * @private\n */\nexports.PATH_STYLED_HOST = 'https://storage.googleapis.com';\nclass URLSigner {\n constructor(authClient, bucket, file) {\n this.bucket = bucket;\n this.file = file;\n this.authClient = authClient;\n }\n getSignedUrl(cfg) {\n const expiresInSeconds = this.parseExpires(cfg.expires);\n const method = cfg.method;\n const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt);\n if (expiresInSeconds < accessibleAtInSeconds) {\n throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE);\n }\n let customHost;\n // Default style is `path`.\n const isVirtualHostedStyle = cfg.virtualHostedStyle || false;\n if (cfg.cname) {\n customHost = cfg.cname;\n }\n else if (isVirtualHostedStyle) {\n customHost = `https://${this.bucket.name}.storage.googleapis.com`;\n }\n const secondsToMilliseconds = 1000;\n const config = Object.assign({}, cfg, {\n method,\n expiration: expiresInSeconds,\n accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds),\n bucket: this.bucket.name,\n file: this.file ? (0, util_1.encodeURI)(this.file.name, false) : undefined,\n });\n if (customHost) {\n config.cname = customHost;\n }\n const version = cfg.version || DEFAULT_SIGNING_VERSION;\n let promise;\n if (version === 'v2') {\n promise = this.getSignedUrlV2(config);\n }\n else if (version === 'v4') {\n promise = this.getSignedUrlV4(config);\n }\n else {\n throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`);\n }\n return promise.then(query => {\n query = Object.assign(query, cfg.queryParams);\n const signedUrl = new url.URL(config.cname || exports.PATH_STYLED_HOST);\n signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file);\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n signedUrl.search = (0, util_1.qsStringify)(query);\n return signedUrl.href;\n });\n }\n getSignedUrlV2(config) {\n const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {});\n const resourcePath = this.getResourcePath(false, config.bucket, config.file);\n const blobToSign = [\n config.method,\n config.contentMd5 || '',\n config.contentType || '',\n config.expiration,\n canonicalHeadersString + resourcePath,\n ].join('\\n');\n const sign = async () => {\n const authClient = this.authClient;\n try {\n const signature = await authClient.sign(blobToSign);\n const credentials = await authClient.getCredentials();\n return {\n GoogleAccessId: credentials.client_email,\n Expires: config.expiration,\n Signature: signature,\n };\n }\n catch (err) {\n const error = err;\n const signingErr = new SigningError(error.message);\n signingErr.stack = error.stack;\n throw signingErr;\n }\n };\n return sign();\n }\n getSignedUrlV4(config) {\n config.accessibleAt = config.accessibleAt\n ? config.accessibleAt\n : new Date();\n const millisecondsToSeconds = 1.0 / 1000.0;\n const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds;\n // v4 limit expiration to be 7 days maximum\n if (expiresPeriodInSeconds > SEVEN_DAYS) {\n throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`);\n }\n const extensionHeaders = Object.assign({}, config.extensionHeaders);\n const fqdn = new url.URL(config.cname || exports.PATH_STYLED_HOST);\n extensionHeaders.host = fqdn.host;\n if (config.contentMd5) {\n extensionHeaders['content-md5'] = config.contentMd5;\n }\n if (config.contentType) {\n extensionHeaders['content-type'] = config.contentType;\n }\n let contentSha256;\n const sha256Header = extensionHeaders['x-goog-content-sha256'];\n if (sha256Header) {\n if (typeof sha256Header !== 'string' ||\n !/[A-Fa-f0-9]{40}/.test(sha256Header)) {\n throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256);\n }\n contentSha256 = sha256Header;\n }\n const signedHeaders = Object.keys(extensionHeaders)\n .map(header => header.toLowerCase())\n .sort()\n .join(';');\n const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders);\n const datestamp = (0, util_1.formatAsUTCISO)(config.accessibleAt);\n const credentialScope = `${datestamp}/auto/storage/goog4_request`;\n const sign = async () => {\n const credentials = await this.authClient.getCredentials();\n const credential = `${credentials.client_email}/${credentialScope}`;\n const dateISO = (0, util_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true);\n const queryParams = {\n 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256',\n 'X-Goog-Credential': credential,\n 'X-Goog-Date': dateISO,\n 'X-Goog-Expires': expiresPeriodInSeconds.toString(10),\n 'X-Goog-SignedHeaders': signedHeaders,\n ...(config.queryParams || {}),\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const canonicalQueryParams = this.getCanonicalQueryParams(queryParams);\n const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256);\n const hash = crypto\n .createHash('sha256')\n .update(canonicalRequest)\n .digest('hex');\n const blobToSign = [\n 'GOOG4-RSA-SHA256',\n dateISO,\n credentialScope,\n hash,\n ].join('\\n');\n try {\n const signature = await this.authClient.sign(blobToSign);\n const signatureHex = Buffer.from(signature, 'base64').toString('hex');\n const signedQuery = Object.assign({}, queryParams, {\n 'X-Goog-Signature': signatureHex,\n });\n return signedQuery;\n }\n catch (err) {\n const error = err;\n const signingErr = new SigningError(error.message);\n signingErr.stack = error.stack;\n throw signingErr;\n }\n };\n return sign();\n }\n /**\n * Create canonical headers for signing v4 url.\n *\n * The canonical headers for v4-signing a request demands header names are\n * first lowercased, followed by sorting the header names.\n * Then, construct the canonical headers part of the request:\n * + \":\" + Trim() + \"\\n\"\n * ..\n * + \":\" + Trim() + \"\\n\"\n *\n * @param headers\n * @private\n */\n getCanonicalHeaders(headers) {\n // Sort headers by their lowercased names\n const sortedHeaders = (0, util_1.objectEntries)(headers)\n // Convert header names to lowercase\n .map(([headerName, value]) => [\n headerName.toLowerCase(),\n value,\n ])\n .sort((a, b) => a[0].localeCompare(b[0]));\n return sortedHeaders\n .filter(([, value]) => value !== undefined)\n .map(([headerName, value]) => {\n // - Convert Array (multi-valued header) into string, delimited by\n // ',' (no space).\n // - Trim leading and trailing spaces.\n // - Convert sequential (2+) spaces into a single space\n const canonicalValue = `${value}`.trim().replace(/\\s{2,}/g, ' ');\n return `${headerName}:${canonicalValue}\\n`;\n })\n .join('');\n }\n getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) {\n return [\n method,\n path,\n query,\n headers,\n signedHeaders,\n contentSha256 || 'UNSIGNED-PAYLOAD',\n ].join('\\n');\n }\n getCanonicalQueryParams(query) {\n return (0, util_1.objectEntries)(query)\n .map(([key, value]) => [(0, util_1.encodeURI)(key, true), (0, util_1.encodeURI)(value, true)])\n .sort((a, b) => (a[0] < b[0] ? -1 : 1))\n .map(([key, value]) => `${key}=${value}`)\n .join('&');\n }\n getResourcePath(cname, bucket, file) {\n if (cname) {\n return '/' + (file || '');\n }\n else if (file) {\n return `/${bucket}/${file}`;\n }\n else {\n return `/${bucket}`;\n }\n }\n parseExpires(expires, current = new Date()) {\n const expiresInMSeconds = new Date(expires).valueOf();\n if (isNaN(expiresInMSeconds)) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expiresInMSeconds < current.valueOf()) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n return Math.round(expiresInMSeconds / 1000); // The API expects seconds.\n }\n parseAccessibleAt(accessibleAt) {\n const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf();\n if (isNaN(accessibleAtInMSeconds)) {\n throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID);\n }\n return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds.\n }\n}\nexports.URLSigner = URLSigner;\n/**\n * Custom error type for errors related to getting signed errors and policies.\n *\n * @private\n */\nclass SigningError extends Error {\n constructor() {\n super(...arguments);\n this.name = 'SigningError';\n }\n}\nexports.SigningError = SigningError;\n//# sourceMappingURL=signer.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst paginator_1 = require(\"@google-cloud/paginator\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst stream_1 = require(\"stream\");\nconst bucket_1 = require(\"./bucket\");\nconst channel_1 = require(\"./channel\");\nconst file_1 = require(\"./file\");\nconst util_1 = require(\"./util\");\nconst hmacKey_1 = require(\"./hmacKey\");\nconst crc32c_1 = require(\"./crc32c\");\nvar IdempotencyStrategy;\n(function (IdempotencyStrategy) {\n IdempotencyStrategy[IdempotencyStrategy[\"RetryAlways\"] = 0] = \"RetryAlways\";\n IdempotencyStrategy[IdempotencyStrategy[\"RetryConditional\"] = 1] = \"RetryConditional\";\n IdempotencyStrategy[IdempotencyStrategy[\"RetryNever\"] = 2] = \"RetryNever\";\n})(IdempotencyStrategy = exports.IdempotencyStrategy || (exports.IdempotencyStrategy = {}));\nvar ExceptionMessages;\n(function (ExceptionMessages) {\n ExceptionMessages[\"EXPIRATION_DATE_INVALID\"] = \"The expiration date provided was invalid.\";\n ExceptionMessages[\"EXPIRATION_DATE_PAST\"] = \"An expiration date cannot be in the past.\";\n ExceptionMessages[\"INVALID_ACTION\"] = \"The action is not provided or invalid.\";\n})(ExceptionMessages = exports.ExceptionMessages || (exports.ExceptionMessages = {}));\nvar StorageExceptionMessages;\n(function (StorageExceptionMessages) {\n StorageExceptionMessages[\"BUCKET_NAME_REQUIRED\"] = \"A bucket name is needed to use Cloud Storage.\";\n StorageExceptionMessages[\"BUCKET_NAME_REQUIRED_CREATE\"] = \"A name is required to create a bucket.\";\n StorageExceptionMessages[\"HMAC_SERVICE_ACCOUNT\"] = \"The first argument must be a service account email to create an HMAC key.\";\n StorageExceptionMessages[\"HMAC_ACCESS_ID\"] = \"An access ID is needed to create an HmacKey object.\";\n})(StorageExceptionMessages = exports.StorageExceptionMessages || (exports.StorageExceptionMessages = {}));\nexports.PROTOCOL_REGEX = /^(\\w*):\\/\\//;\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n */\nexports.AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n */\nexports.MAX_RETRY_DEFAULT = 3;\n/**\n * Default behavior: Wait twice as long as previous retry before retrying.\n *\n * @const {number}\n */\nexports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2;\n/**\n * Default behavior: If the operation doesn't succeed after 600 seconds,\n * stop retrying.\n *\n * @const {number}\n */\nexports.TOTAL_TIMEOUT_DEFAULT = 600;\n/**\n * Default behavior: Wait no more than 64 seconds between retries.\n *\n * @const {number}\n */\nexports.MAX_RETRY_DELAY_DEFAULT = 64;\n/**\n * Default behavior: Retry conditionally idempotent operations if correct preconditions are set.\n *\n * @const {enum}\n * @private\n */\nconst IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional;\n/**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted.\n * @const\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\nconst RETRYABLE_ERR_FN_DEFAULT = function (err) {\n var _a;\n const isConnectionProblem = (reason) => {\n return (reason.includes('eai_again') || // DNS lookup error\n reason === 'econnreset' ||\n reason === 'unexpected connection closure' ||\n reason === 'epipe');\n };\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (typeof err.code === 'string') {\n const reason = err.code.toLowerCase();\n if (isConnectionProblem(reason)) {\n return true;\n }\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase();\n if (reason && isConnectionProblem(reason)) {\n return true;\n }\n }\n }\n }\n return false;\n};\nexports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT;\n/*! Developer Documentation\n *\n * Invoke this method to create a new Storage object bound with pre-determined\n * configuration options. For each object that can be created (e.g., a bucket),\n * there is an equivalent static and instance method. While they are classes,\n * they can be instantiated without use of the `new` keyword.\n */\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * This object provides constants to refer to the three permission levels that\n * can be granted to an entity:\n *\n * - `gcs.acl.OWNER_ROLE` - (\"OWNER\")\n * - `gcs.acl.READER_ROLE` - (\"READER\")\n * - `gcs.acl.WRITER_ROLE` - (\"WRITER\")\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists}\n *\n * @name Storage#acl\n * @type {object}\n * @property {string} OWNER_ROLE\n * @property {string} READER_ROLE\n * @property {string} WRITER_ROLE\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const albums = storage.bucket('albums');\n *\n * //-\n * // Make all of the files currently in a bucket publicly readable.\n * //-\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * albums.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // Make any new objects added to a bucket publicly readable.\n * //-\n * albums.acl.default.add(options, function(err, aclObject) {});\n *\n * //-\n * // Grant a user ownership permissions to a bucket.\n * //-\n * albums.acl.add({\n * entity: 'user-useremail@example.com',\n * role: storage.acl.OWNER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * albums.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n/**\n * Get {@link Bucket} objects for all of the buckets in your project as\n * a readable object stream.\n *\n * @method Storage#getBucketsStream\n * @param {GetBucketsRequest} [query] Query object for listing buckets.\n * @returns {ReadableStream} A readable stream that emits {@link Bucket}\n * instances.\n *\n * @example\n * ```\n * storage.getBucketsStream()\n * .on('error', console.error)\n * .on('data', function(bucket) {\n * // bucket is a Bucket object.\n * })\n * .on('end', function() {\n * // All buckets retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * storage.getBucketsStream()\n * .on('data', function(bucket) {\n * this.end();\n * });\n * ```\n */\n/**\n * Get {@link HmacKey} objects for all of the HMAC keys in the project in a\n * readable object stream.\n *\n * @method Storage#getHmacKeysStream\n * @param {GetHmacKeysOptions} [options] Configuration options.\n * @returns {ReadableStream} A readable stream that emits {@link HmacKey}\n * instances.\n *\n * @example\n * ```\n * storage.getHmacKeysStream()\n * .on('error', console.error)\n * .on('data', function(hmacKey) {\n * // hmacKey is an HmacKey object.\n * })\n * .on('end', function() {\n * // All HmacKey retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * storage.getHmacKeysStream()\n * .on('data', function(bucket) {\n * this.end();\n * });\n * ```\n */\n/**\n *

ACLs

\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share files with other users\n * and allow other users to access your buckets and files.\n *\n * To learn more about ACLs, read this overview on\n * {@link https://cloud.google.com/storage/docs/access-control| Access Control}.\n *\n * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview}\n * See {@link https://cloud.google.com/storage/docs/access-control| Access Control}\n *\n * @class\n */\nclass Storage extends nodejs_common_1.Service {\n getBucketsStream() {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n getHmacKeysStream() {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n /**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n /**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n /**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n /**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n /**\n * @callback Crc32cGeneratorCallback\n * @returns {CRC32CValidator}\n */\n /**\n * @typedef {object} StorageOptions\n * @property {string} [projectId] The project ID from the Google Developer's\n * Console, e.g. 'grape-spaceship-123'. We will also check the environment\n * variable `GCLOUD_PROJECT` for your project ID. If your app is running\n * in an environment which supports {@link\n * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application\n * Application Default Credentials}, your project ID will be detected\n * automatically.\n * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key\n * downloaded from the Google Developers Console. If you provide a path to\n * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and\n * .p12 require you to specify the `email` option as well.\n * @property {string} [email] Account email address. Required when using a .pem\n * or .p12 keyFilename.\n * @property {object} [credentials] Credentials object.\n * @property {string} [credentials.client_email]\n * @property {string} [credentials.private_key]\n * @property {object} [retryOptions] Options for customizing retries. Retriable server errors\n * will be retried with exponential delay between them dictated by the formula\n * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout\n * has been reached. Retries will only happen if autoRetry is set to true.\n * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to\n * increase the delay time between the completion of failed requests, and the\n * initiation of the subsequent retrying request.\n * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from\n * when the initial request is sent, after which an error will\n * be returned, regardless of the retrying attempts made meanwhile.\n * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests.\n * When this value is reached, ``retryDelayMultiplier`` will no longer be used to\n * increase delay time.\n * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries\n * attempted before returning the error.\n * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given\n * error should be retried and false otherwise.\n * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration\n * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways -\n * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional -\n * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never\n * retry a conditionally idempotent operation.\n * @property {string} [userAgent] The value to be prepended to the User-Agent\n * header in API requests.\n * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one.\n * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out.\n * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned.\n * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests.\n * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint.\n * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n */\n /**\n * Constructs the Storage client.\n *\n * @example\n * Create a client that uses Application Default Credentials\n * (ADC)\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * ```\n *\n * @example\n * Create a client with explicit credentials\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * keyFilename: '/path/to/keyfile.json'\n * });\n * ```\n *\n * @example\n * Create a client with credentials passed\n * by value as a JavaScript object\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * credentials: {\n * type: 'service_account',\n * project_id: 'xxxxxxx',\n * private_key_id: 'xxxx',\n * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\\n-----END PRIVATE KEY-----\\n',\n * client_email: 'xxxx',\n * client_id: 'xxx',\n * auth_uri: 'https://accounts.google.com/o/oauth2/auth',\n * token_uri: 'https://oauth2.googleapis.com/token',\n * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs',\n * client_x509_cert_url: 'xxx',\n * }\n * });\n * ```\n *\n * @example\n * Create a client with credentials passed\n * by loading a JSON file directly from disk\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * credentials: require('/path/to-keyfile.json')\n * });\n * ```\n *\n * @example\n * Create a client with an `AuthClient` (e.g. `DownscopedClient`)\n * ```\n * const {DownscopedClient} = require('google-auth-library');\n * const authClient = new DownscopedClient({...});\n *\n * const storage = new Storage({authClient});\n * ```\n *\n * Additional samples:\n * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1\n * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js\n *\n * @param {StorageOptions} [options] Configuration options.\n */\n constructor(options = {}) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p;\n let apiEndpoint = 'https://storage.googleapis.com';\n let customEndpoint = false;\n // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead.\n const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST;\n if (typeof EMULATOR_HOST === 'string') {\n apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST);\n customEndpoint = true;\n }\n if (options.apiEndpoint) {\n apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint);\n customEndpoint = true;\n }\n options = Object.assign({}, options, { apiEndpoint });\n // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead.\n const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`;\n const config = {\n apiEndpoint: options.apiEndpoint,\n retryOptions: {\n autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined\n ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry\n : exports.AUTO_RETRY_DEFAULT,\n maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries)\n ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries\n : exports.MAX_RETRY_DEFAULT,\n retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier)\n ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier\n : exports.RETRY_DELAY_MULTIPLIER_DEFAULT,\n totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout)\n ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout\n : exports.TOTAL_TIMEOUT_DEFAULT,\n maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay)\n ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay\n : exports.MAX_RETRY_DELAY_DEFAULT,\n retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn)\n ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn\n : exports.RETRYABLE_ERR_FN_DEFAULT,\n idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined\n ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy\n : IDEMPOTENCY_STRATEGY_DEFAULT,\n },\n baseUrl,\n customEndpoint,\n useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint,\n projectIdRequired: false,\n scopes: [\n 'https://www.googleapis.com/auth/iam',\n 'https://www.googleapis.com/auth/cloud-platform',\n 'https://www.googleapis.com/auth/devstorage.full_control',\n ],\n packageJson: require('../../package.json'),\n };\n super(config, options);\n /**\n * Reference to {@link Storage.acl}.\n *\n * @name Storage#acl\n * @see Storage.acl\n */\n this.acl = Storage.acl;\n this.crc32cGenerator =\n options.crc32cGenerator || crc32c_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR;\n this.retryOptions = config.retryOptions;\n this.getBucketsStream = paginator_1.paginator.streamify('getBuckets');\n this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys');\n }\n static sanitizeEndpoint(url) {\n if (!exports.PROTOCOL_REGEX.test(url)) {\n url = `https://${url}`;\n }\n return url.replace(/\\/+$/, ''); // Remove trailing slashes\n }\n /**\n * Get a reference to a Cloud Storage bucket.\n *\n * @param {string} name Name of the bucket.\n * @param {object} [options] Configuration object.\n * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to\n * encrypt objects inserted into this bucket, if no encryption method is\n * specified.\n * @param {string} [options.userProject] User project to be billed for all\n * requests made from this Bucket object.\n * @returns {Bucket}\n * @see Bucket\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const albums = storage.bucket('albums');\n * const photos = storage.bucket('photos');\n * ```\n */\n bucket(name, options) {\n if (!name) {\n throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED);\n }\n return new bucket_1.Bucket(this, name, options);\n }\n /**\n * Reference a channel to receive notifications about changes to your bucket.\n *\n * @param {string} id The ID of the channel.\n * @param {string} resourceId The resource ID of the channel.\n * @returns {Channel}\n * @see Channel\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * ```\n */\n channel(id, resourceId) {\n return new channel_1.Channel(this, id, resourceId);\n }\n /**\n * @typedef {array} CreateBucketResponse\n * @property {Bucket} 0 The new {@link Bucket}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CreateBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket} bucket The new {@link Bucket}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Metadata to set for the bucket.\n *\n * @typedef {object} CreateBucketRequest\n * @property {boolean} [archive=false] Specify the storage class as Archive.\n * @property {object} [autoclass.enabled=false] Specify whether Autoclass is\n * enabled for the bucket.\n * @property {boolean} [coldline=false] Specify the storage class as Coldline.\n * @property {Cors[]} [cors=[]] Specify the CORS configuration to use.\n * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets.\n * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}.\n * @property {boolean} [dra=false] Specify the storage class as Durable Reduced\n * Availability.\n * @property {string} [location] Specify the bucket's location. If specifying\n * a dual-region, the `customPlacementConfig` property should be set in conjunction.\n * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}.\n * @property {boolean} [multiRegional=false] Specify the storage class as\n * Multi-Regional.\n * @property {boolean} [nearline=false] Specify the storage class as Nearline.\n * @property {boolean} [regional=false] Specify the storage class as Regional.\n * @property {boolean} [requesterPays=false] **Early Access Testers Only**\n * Force the use of the User Project metadata field to assign operational\n * costs when an operation is made on a Bucket and its objects.\n * @property {string} [rpo] For dual-region buckets, controls whether turbo\n * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`).\n * @property {boolean} [standard=true] Specify the storage class as Standard.\n * @property {string} [storageClass] The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`).\n * **Note:** The storage classes `multi_regional`, `regional`, and\n * `durable_reduced_availability` are now legacy and will be deprecated in\n * the future.\n * @property {Versioning} [versioning=undefined] Specify the versioning status.\n * @property {string} [userProject] The ID of the project which will be billed\n * for the request.\n */\n /**\n * Create a bucket.\n *\n * Cloud Storage uses a flat namespace, so you can't create a bucket with\n * a name that is already in use. For more information, see\n * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation}\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} name Name of the bucket to create.\n * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket.\n * @param {CreateBucketCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a name is not provided.\n * @see Bucket#create\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const callback = function(err, bucket, apiResponse) {\n * // `bucket` is a Bucket object.\n * };\n *\n * storage.createBucket('new-bucket', callback);\n *\n * //-\n * // Create a bucket in a specific location and region. See the \n * // Official JSON API docs for complete details on the `location`\n * option.\n * // \n * //-\n * const metadata = {\n * location: 'US-CENTRAL1',\n * regional: true\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // Create a bucket with a retention policy of 6 months.\n * //-\n * const metadata = {\n * retentionPolicy: {\n * retentionPeriod: 15780000 // 6 months in seconds.\n * }\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // Enable versioning on a new bucket.\n * //-\n * const metadata = {\n * versioning: {\n * enabled: true\n * }\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.createBucket('new-bucket').then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_create_bucket\n * Another example:\n */\n createBucket(name, metadataOrCallback, callback) {\n if (!name) {\n throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE);\n }\n let metadata;\n if (!callback) {\n callback = metadataOrCallback;\n metadata = {};\n }\n else {\n metadata = metadataOrCallback;\n }\n const body = {\n ...metadata,\n name,\n };\n const storageClasses = {\n archive: 'ARCHIVE',\n coldline: 'COLDLINE',\n dra: 'DURABLE_REDUCED_AVAILABILITY',\n multiRegional: 'MULTI_REGIONAL',\n nearline: 'NEARLINE',\n regional: 'REGIONAL',\n standard: 'STANDARD',\n };\n const storageClassKeys = Object.keys(storageClasses);\n for (const storageClass of storageClassKeys) {\n if (body[storageClass]) {\n if (metadata.storageClass && metadata.storageClass !== storageClass) {\n throw new Error(`Both \\`${storageClass}\\` and \\`storageClass\\` were provided.`);\n }\n body.storageClass = storageClasses[storageClass];\n delete body[storageClass];\n }\n }\n if (body.requesterPays) {\n body.billing = {\n requesterPays: body.requesterPays,\n };\n delete body.requesterPays;\n }\n const query = {\n project: this.projectId,\n };\n if (body.userProject) {\n query.userProject = body.userProject;\n delete body.userProject;\n }\n this.request({\n method: 'POST',\n uri: '/b',\n qs: query,\n json: body,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const bucket = this.bucket(name);\n bucket.metadata = resp;\n callback(null, bucket, resp);\n });\n }\n /**\n * @typedef {object} CreateHmacKeyOptions\n * @property {string} [projectId] The project ID of the project that owns\n * the service account of the requested HMAC key. If not provided,\n * the project ID used to instantiate the Storage client will be used.\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @typedef {object} HmacKeyMetadata\n * @property {string} accessId The access id identifies which HMAC key was\n * used to sign a request when authenticating with HMAC.\n * @property {string} etag Used to perform a read-modify-write of the key.\n * @property {string} id The resource name of the HMAC key.\n * @property {string} projectId The project ID.\n * @property {string} serviceAccountEmail The service account's email this\n * HMAC key is created for.\n * @property {string} state The state of this HMAC key. One of \"ACTIVE\",\n * \"INACTIVE\" or \"DELETED\".\n * @property {string} timeCreated The creation time of the HMAC key in\n * RFC 3339 format.\n * @property {string} [updated] The time this HMAC key was last updated in\n * RFC 3339 format.\n */\n /**\n * @typedef {array} CreateHmacKeyResponse\n * @property {HmacKey} 0 The HmacKey instance created from API response.\n * @property {string} 1 The HMAC key's secret used to access the XML API.\n * @property {object} 3 The raw API response.\n */\n /**\n * @callback CreateHmacKeyCallback Callback function.\n * @param {?Error} err Request error, if any.\n * @param {HmacKey} hmacKey The HmacKey instance created from API response.\n * @param {string} secret The HMAC key's secret used to access the XML API.\n * @param {object} apiResponse The raw API response.\n */\n /**\n * Create an HMAC key associated with an service account to authenticate\n * requests to the Cloud Storage XML API.\n *\n * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation}\n *\n * @param {string} serviceAccountEmail The service account's email address\n * with which the HMAC key is created for.\n * @param {CreateHmacKeyCallback} [callback] Callback function.\n * @return {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('google-cloud/storage');\n * const storage = new Storage();\n *\n * // Replace with your service account's email address\n * const serviceAccountEmail =\n * 'my-service-account@appspot.gserviceaccount.com';\n *\n * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) {\n * if (!err) {\n * // Securely store the secret for use with the XML API.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.createHmacKey(serviceAccountEmail)\n * .then((response) => {\n * const hmacKey = response[0];\n * const secret = response[1];\n * // Securely store the secret for use with the XML API.\n * });\n * ```\n */\n createHmacKey(serviceAccountEmail, optionsOrCb, cb) {\n if (typeof serviceAccountEmail !== 'string') {\n throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT);\n }\n const { options, callback } = (0, util_1.normalize)(optionsOrCb, cb);\n const query = Object.assign({}, options, { serviceAccountEmail });\n const projectId = query.projectId || this.projectId;\n delete query.projectId;\n this.request({\n method: 'POST',\n uri: `/projects/${projectId}/hmacKeys`,\n qs: query,\n maxRetries: 0, //explicitly set this value since this is a non-idempotent function\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const metadata = resp.metadata;\n const hmacKey = this.hmacKey(metadata.accessId, {\n projectId: metadata.projectId,\n });\n hmacKey.metadata = resp.metadata;\n callback(null, hmacKey, resp.secret, resp);\n });\n }\n /**\n * Query object for listing buckets.\n *\n * @typedef {object} GetBucketsRequest\n * @property {boolean} [autoPaginate=true] Have pagination handled\n * automatically.\n * @property {number} [maxApiCalls] Maximum number of API calls to make.\n * @property {number} [maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [userProject] The ID of the project which will be billed\n * for the request.\n */\n /**\n * @typedef {array} GetBucketsResponse\n * @property {Bucket[]} 0 Array of {@link Bucket} instances.\n * @property {object} 1 nextQuery A query object to receive more results.\n * @property {object} 2 The full API response.\n */\n /**\n * @callback GetBucketsCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket[]} buckets Array of {@link Bucket} instances.\n * @param {object} nextQuery A query object to receive more results.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get Bucket objects for all of the buckets in your project.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation}\n *\n * @param {GetBucketsRequest} [query] Query object for listing buckets.\n * @param {GetBucketsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * storage.getBuckets(function(err, buckets) {\n * if (!err) {\n * // buckets is an array of Bucket objects.\n * }\n * });\n *\n * //-\n * // To control how many API requests are made and page through the results\n * // manually, set `autoPaginate` to `false`.\n * //-\n * const callback = function(err, buckets, nextQuery, apiResponse) {\n * if (nextQuery) {\n * // More results exist.\n * storage.getBuckets(nextQuery, callback);\n * }\n *\n * // The `metadata` property is populated for you with the metadata at the\n * // time of fetching.\n * buckets[0].metadata;\n *\n * // However, in cases where you are concerned the metadata could have\n * // changed, use the `getMetadata` method.\n * buckets[0].getMetadata(function(err, metadata, apiResponse) {});\n * };\n *\n * storage.getBuckets({\n * autoPaginate: false\n * }, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.getBuckets().then(function(data) {\n * const buckets = data[0];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_list_buckets\n * Another example:\n */\n getBuckets(optionsOrCallback, cb) {\n const { options, callback } = (0, util_1.normalize)(optionsOrCallback, cb);\n options.project = options.project || this.projectId;\n this.request({\n uri: '/b',\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const buckets = itemsArray.map((bucket) => {\n const bucketInstance = this.bucket(bucket.id);\n bucketInstance.metadata = bucket;\n return bucketInstance;\n });\n const nextQuery = resp.nextPageToken\n ? Object.assign({}, options, { pageToken: resp.nextPageToken })\n : null;\n callback(null, buckets, nextQuery, resp);\n });\n }\n getHmacKeys(optionsOrCb, cb) {\n const { options, callback } = (0, util_1.normalize)(optionsOrCb, cb);\n const query = Object.assign({}, options);\n const projectId = query.projectId || this.projectId;\n delete query.projectId;\n this.request({\n uri: `/projects/${projectId}/hmacKeys`,\n qs: query,\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const hmacKeys = itemsArray.map((hmacKey) => {\n const hmacKeyInstance = this.hmacKey(hmacKey.accessId, {\n projectId: hmacKey.projectId,\n });\n hmacKeyInstance.metadata = hmacKey;\n return hmacKeyInstance;\n });\n const nextQuery = resp.nextPageToken\n ? Object.assign({}, options, { pageToken: resp.nextPageToken })\n : null;\n callback(null, hmacKeys, nextQuery, resp);\n });\n }\n /**\n * @typedef {array} GetServiceAccountResponse\n * @property {object} 0 The service account resource.\n * @property {object} 1 The full\n * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}.\n */\n /**\n * @callback GetServiceAccountCallback\n * @param {?Error} err Request error, if any.\n * @param {object} serviceAccount The serviceAccount resource.\n * @param {string} serviceAccount.emailAddress The service account email\n * address.\n * @param {object} apiResponse The full\n * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}.\n */\n /**\n * Get the email address of this project's Google Cloud Storage service\n * account.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource}\n *\n * @param {object} [options] Configuration object.\n * @param {string} [options.userProject] User project to be billed for this\n * request.\n * @param {GetServiceAccountCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * storage.getServiceAccount(function(err, serviceAccount, apiResponse) {\n * if (!err) {\n * const serviceAccountEmail = serviceAccount.emailAddress;\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.getServiceAccount().then(function(data) {\n * const serviceAccountEmail = data[0].emailAddress;\n * const apiResponse = data[1];\n * });\n * ```\n */\n getServiceAccount(optionsOrCallback, cb) {\n const { options, callback } = (0, util_1.normalize)(optionsOrCallback, cb);\n this.request({\n uri: `/projects/${this.projectId}/serviceAccount`,\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const camelCaseResponse = {};\n for (const prop in resp) {\n // eslint-disable-next-line no-prototype-builtins\n if (resp.hasOwnProperty(prop)) {\n const camelCaseProp = prop.replace(/_(\\w)/g, (_, match) => match.toUpperCase());\n camelCaseResponse[camelCaseProp] = resp[prop];\n }\n }\n callback(null, camelCaseResponse, resp);\n });\n }\n /**\n * Get a reference to an HmacKey object.\n * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to\n * retrieve and populate the metadata.\n *\n * To get a reference to an HMAC key that's not created for a service\n * account in the same project used to instantiate the Storage client,\n * supply the project's ID as `projectId` in the `options` argument.\n *\n * @param {string} accessId The HMAC key's access ID.\n * @param {HmacKeyOptions} options HmacKey constructor options.\n * @returns {HmacKey}\n * @see HmacKey\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * ```\n */\n hmacKey(accessId, options) {\n if (!accessId) {\n throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID);\n }\n return new hmacKey_1.HmacKey(this, accessId, options);\n }\n}\nexports.Storage = Storage;\n/**\n * {@link Bucket} class.\n *\n * @name Storage.Bucket\n * @see Bucket\n * @type {Constructor}\n */\nStorage.Bucket = bucket_1.Bucket;\n/**\n * {@link Channel} class.\n *\n * @name Storage.Channel\n * @see Channel\n * @type {Constructor}\n */\nStorage.Channel = channel_1.Channel;\n/**\n * {@link File} class.\n *\n * @name Storage.File\n * @see File\n * @type {Constructor}\n */\nStorage.File = file_1.File;\n/**\n * {@link HmacKey} class.\n *\n * @name Storage.HmacKey\n * @see HmacKey\n * @type {Constructor}\n */\nStorage.HmacKey = hmacKey_1.HmacKey;\nStorage.acl = {\n OWNER_ROLE: 'OWNER',\n READER_ROLE: 'READER',\n WRITER_ROLE: 'WRITER',\n};\n/*! Developer Documentation\n *\n * These methods can be auto-paginated.\n */\npaginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']);\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Storage, {\n exclude: ['bucket', 'channel', 'hmacKey'],\n});\n//# sourceMappingURL=storage.js.map","\"use strict\";\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TransferManager = void 0;\nconst pLimit = require(\"p-limit\");\nconst path = require(\"path\");\nconst extend = require(\"extend\");\nconst fs_1 = require(\"fs\");\nconst crc32c_1 = require(\"./crc32c\");\n/**\n * Default number of concurrently executing promises to use when calling uploadManyFiles.\n * @experimental\n */\nconst DEFAULT_PARALLEL_UPLOAD_LIMIT = 2;\n/**\n * Default number of concurrently executing promises to use when calling downloadManyFiles.\n * @experimental\n */\nconst DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 2;\n/**\n * Default number of concurrently executing promises to use when calling downloadFileInChunks.\n * @experimental\n */\nconst DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 2;\n/**\n * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks.\n * @experimental\n */\nconst DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024;\n/**\n * The chunk size in bytes to use when calling downloadFileInChunks.\n * @experimental\n */\nconst DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 10 * 1024 * 1024;\nconst EMPTY_REGEX = '(?:)';\n/**\n * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket.\n *\n * @class\n * @hideconstructor\n *\n * @param {Bucket} bucket A {@link Bucket} instance\n * @experimental\n */\nclass TransferManager {\n constructor(bucket) {\n this.bucket = bucket;\n }\n /**\n * @typedef {object} UploadManyFilesOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when uploading the files.\n * @property {boolean} [skipIfExists] Do not upload the file if it already exists in\n * the bucket. This will set the precondition ifGenerationMatch = 0.\n * @property {string} [prefix] A prefix to append to all of the uploaded files.\n * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through\n * to each individual upload operation.\n * @experimental\n */\n /**\n * Upload multiple files in parallel to the bucket. This is a convenience method\n * that utilizes {@link Bucket#upload} to perform the upload.\n *\n * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name.\n * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list.\n * to be uploaded to the bucket\n * @param {UploadManyFilesOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Upload multiple files in parallel.\n * //-\n * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']);\n * // Your bucket now contains:\n * // - \"local/path/file1.txt\" (with the contents of '/local/path/file1.txt')\n * // - \"local/path/file2.txt\" (with the contents of '/local/path/file2.txt')\n * const response = await transferManager.uploadManyFiles('/local/directory');\n * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure.\n * ```\n * @experimental\n */\n async uploadManyFiles(filePathsOrDirectory, options = {}) {\n var _a;\n if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) {\n options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0;\n }\n else if (options.skipIfExists &&\n options.passthroughOptions === undefined) {\n options.passthroughOptions = {\n preconditionOpts: {\n ifGenerationMatch: 0,\n },\n };\n }\n const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT);\n const promises = [];\n let allPaths = [];\n if (!Array.isArray(filePathsOrDirectory)) {\n for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) {\n allPaths.push(curPath);\n }\n }\n else {\n allPaths = filePathsOrDirectory;\n }\n for (const filePath of allPaths) {\n const stat = await fs_1.promises.lstat(filePath);\n if (stat.isDirectory()) {\n continue;\n }\n const passThroughOptionsCopy = extend(true, {}, options.passthroughOptions);\n passThroughOptionsCopy.destination = filePath;\n if (options.prefix) {\n passThroughOptionsCopy.destination = path.join(options.prefix, passThroughOptionsCopy.destination);\n }\n promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy)));\n }\n return Promise.all(promises);\n }\n /**\n * @typedef {object} DownloadManyFilesOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when downloading the files.\n * @property {string} [prefix] A prefix to append to all of the downloaded files.\n * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files.\n * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through\n * to each individual download operation.\n * @experimental\n */\n /**\n * Download multiple files in parallel to the local filesystem. This is a convenience method\n * that utilizes {@link File#download} to perform the download.\n *\n * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If\n * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded.\n * @param {DownloadManyFilesOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Download multiple files in parallel.\n * //-\n * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']);\n * // The following files have been downloaded:\n * // - \"file1.txt\" (with the contents from my-bucket.file1.txt)\n * // - \"file2.txt\" (with the contents from my-bucket.file2.txt)\n * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]);\n * // The following files have been downloaded:\n * // - \"file1.txt\" (with the contents from my-bucket.file1.txt)\n * // - \"file2.txt\" (with the contents from my-bucket.file2.txt)\n * const response = await transferManager.downloadManyFiles('test-folder');\n * // All files with GCS prefix of 'test-folder' have been downloaded.\n * ```\n * @experimental\n */\n async downloadManyFiles(filesOrFolder, options = {}) {\n const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT);\n const promises = [];\n let files = [];\n if (!Array.isArray(filesOrFolder)) {\n const directoryFiles = await this.bucket.getFiles({\n prefix: filesOrFolder,\n });\n files = directoryFiles[0];\n }\n else {\n files = filesOrFolder.map(curFile => {\n if (typeof curFile === 'string') {\n return this.bucket.file(curFile);\n }\n return curFile;\n });\n }\n const stripRegexString = options.stripPrefix\n ? `^${options.stripPrefix}`\n : EMPTY_REGEX;\n const regex = new RegExp(stripRegexString, 'g');\n for (const file of files) {\n const passThroughOptionsCopy = extend(true, {}, options.passthroughOptions);\n if (options.prefix) {\n passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name);\n }\n if (options.stripPrefix) {\n passThroughOptionsCopy.destination = file.name.replace(regex, '');\n }\n promises.push(limit(() => file.download(passThroughOptionsCopy)));\n }\n return Promise.all(promises);\n }\n /**\n * @typedef {object} DownloadFileInChunksOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when downloading the file.\n * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded.\n * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete.\n * @experimental\n */\n /**\n * Download a large file in chunks utilizing parallel download operations. This is a convenience method\n * that utilizes {@link File#download} to perform the download.\n *\n * @param {object} [file | string] {@link File} to download.\n * @param {DownloadFileInChunksOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Download a large file in chunks utilizing parallel operations.\n * //-\n * const response = await transferManager.downloadLargeFile(bucket.file('large-file.txt');\n * // Your local directory now contains:\n * // - \"large-file.txt\" (with the contents from my-bucket.large-file.txt)\n * ```\n * @experimental\n */\n async downloadFileInChunks(fileOrName, options = {}) {\n let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE;\n let limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT);\n const promises = [];\n const file = typeof fileOrName === 'string'\n ? this.bucket.file(fileOrName)\n : fileOrName;\n const fileInfo = await file.get();\n const size = parseInt(fileInfo[0].metadata.size);\n // If the file size does not meet the threshold download it as a single chunk.\n if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) {\n limit = pLimit(1);\n chunkSize = size;\n }\n let start = 0;\n const filePath = options.destination || path.basename(file.name);\n const fileToWrite = await fs_1.promises.open(filePath, 'w+');\n while (start < size) {\n const chunkStart = start;\n let chunkEnd = start + chunkSize - 1;\n chunkEnd = chunkEnd > size ? size : chunkEnd;\n promises.push(limit(() => file.download({ start: chunkStart, end: chunkEnd }).then(resp => {\n return fileToWrite.write(resp[0], 0, resp[0].length, chunkStart);\n })));\n start += chunkSize;\n }\n return new Promise((resolve, reject) => {\n let results;\n Promise.all(promises)\n .then(data => {\n results = data.map(result => result.buffer);\n if (options.validation === 'crc32c') {\n return crc32c_1.CRC32C.fromFile(filePath);\n }\n return;\n })\n .then(() => {\n resolve(results);\n })\n .catch(e => {\n reject(e);\n })\n .finally(() => {\n fileToWrite.close();\n });\n });\n }\n async *getPathsFromDirectory(directory) {\n const filesAndSubdirectories = await fs_1.promises.readdir(directory, {\n withFileTypes: true,\n });\n for (const curFileOrDirectory of filesAndSubdirectories) {\n const fullPath = path.join(directory, curFileOrDirectory.name);\n curFileOrDirectory.isDirectory()\n ? yield* this.getPathsFromDirectory(fullPath)\n : yield fullPath;\n }\n }\n}\nexports.TransferManager = TransferManager;\n//# sourceMappingURL=transfer-manager.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PassThroughShim = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0;\nconst querystring = require(\"querystring\");\nconst stream_1 = require(\"stream\");\nfunction normalize(optionsOrCallback, cb) {\n const options = (typeof optionsOrCallback === 'object' ? optionsOrCallback : {});\n const callback = (typeof optionsOrCallback === 'function' ? optionsOrCallback : cb);\n return { options, callback };\n}\nexports.normalize = normalize;\n/**\n * Flatten an object into an Array of arrays, [[key, value], ..].\n * Implements Object.entries() for Node.js <8\n * @internal\n */\nfunction objectEntries(obj) {\n return Object.keys(obj).map(key => [key, obj[key]]);\n}\nexports.objectEntries = objectEntries;\n/**\n * Encode `str` with encodeURIComponent, plus these\n * reserved characters: `! * ' ( )`.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent}\n *\n * @param {string} str The URI component to encode.\n * @return {string} The encoded string.\n */\nfunction fixedEncodeURIComponent(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase());\n}\nexports.fixedEncodeURIComponent = fixedEncodeURIComponent;\n/**\n * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent.\n *\n * Encode every byte except `A-Z a-Z 0-9 ~ - . _`.\n *\n * @param {string} uri The URI to encode.\n * @param [boolean=false] encodeSlash If `true`, the \"/\" character is not encoded.\n * @return {string} The encoded string.\n */\nfunction encodeURI(uri, encodeSlash) {\n // Split the string by `/`, and conditionally rejoin them with either\n // %2F if encodeSlash is `true`, or '/' if `false`.\n return uri\n .split('/')\n .map(fixedEncodeURIComponent)\n .join(encodeSlash ? '%2F' : '/');\n}\nexports.encodeURI = encodeURI;\n/**\n * Serialize an object to a URL query string using util.encodeURI(uri, true).\n * @param {string} url The object to serialize.\n * @return {string} Serialized string.\n */\nfunction qsStringify(qs) {\n return querystring.stringify(qs, '&', '=', {\n encodeURIComponent: (component) => encodeURI(component, true),\n });\n}\nexports.qsStringify = qsStringify;\nfunction objectKeyToLowercase(object) {\n const newObj = {};\n for (let key of Object.keys(object)) {\n const value = object[key];\n key = key.toLowerCase();\n newObj[key] = value;\n }\n return newObj;\n}\nexports.objectKeyToLowercase = objectKeyToLowercase;\n/**\n * JSON encode str, with unicode \\u+ representation.\n * @param {object} obj The object to encode.\n * @return {string} Serialized string.\n */\nfunction unicodeJSONStringify(obj) {\n return JSON.stringify(obj).replace(/[\\u0080-\\uFFFF]/g, (char) => '\\\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4));\n}\nexports.unicodeJSONStringify = unicodeJSONStringify;\n/**\n * Converts the given objects keys to snake_case\n * @param {object} obj object to convert keys to snake case.\n * @returns {object} object with keys converted to snake case.\n */\nfunction convertObjKeysToSnakeCase(obj) {\n if (obj instanceof Date || obj instanceof RegExp) {\n return obj;\n }\n if (Array.isArray(obj)) {\n return obj.map(convertObjKeysToSnakeCase);\n }\n if (obj instanceof Object) {\n return Object.keys(obj).reduce((acc, cur) => {\n const s = cur[0].toLocaleLowerCase() +\n cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => {\n return `_${p1.toLowerCase()}`;\n });\n acc[s] = convertObjKeysToSnakeCase(obj[cur]);\n return acc;\n }, Object());\n }\n return obj;\n}\nexports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase;\n/**\n * Formats the provided date object as a UTC ISO string.\n * @param {Date} dateTimeToFormat date object to be formatted.\n * @param {boolean} includeTime flag to include hours, minutes, seconds in output.\n * @param {string} dateDelimiter delimiter between date components.\n * @param {string} timeDelimiter delimiter between time components.\n * @returns {string} UTC ISO format of provided date obect.\n */\nfunction formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') {\n const year = dateTimeToFormat.getUTCFullYear();\n const month = dateTimeToFormat.getUTCMonth() + 1;\n const day = dateTimeToFormat.getUTCDate();\n const hour = dateTimeToFormat.getUTCHours();\n const minute = dateTimeToFormat.getUTCMinutes();\n const second = dateTimeToFormat.getUTCSeconds();\n let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month\n .toString()\n .padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`;\n if (includeTime) {\n resultString = `${resultString}T${hour\n .toString()\n .padStart(2, '0')}${timeDelimiter}${minute\n .toString()\n .padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`;\n }\n return resultString;\n}\nexports.formatAsUTCISO = formatAsUTCISO;\nclass PassThroughShim extends stream_1.PassThrough {\n constructor() {\n super(...arguments);\n this.shouldEmitReading = true;\n this.shouldEmitWriting = true;\n }\n _read(size) {\n if (this.shouldEmitReading) {\n this.emit('reading');\n this.shouldEmitReading = false;\n }\n super._read(size);\n }\n _write(chunk, encoding, callback) {\n if (this.shouldEmitWriting) {\n this.emit('writing');\n this.shouldEmitWriting = false;\n }\n // Per the nodejs documention, callback must be invoked on the next tick\n process.nextTick(() => {\n super._write(chunk, encoding, callback);\n });\n }\n _final(callback) {\n // If the stream is empty (i.e. empty file) final will be invoked before _read / _write\n // and we should still emit the proper events.\n if (this.shouldEmitReading) {\n this.emit('reading');\n this.shouldEmitReading = false;\n }\n if (this.shouldEmitWriting) {\n this.emit('writing');\n this.shouldEmitWriting = false;\n }\n callback(null);\n }\n}\nexports.PassThroughShim = PassThroughShim;\n//# sourceMappingURL=util.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.6.0\";\n\nconst _excluded = [\"authStrategy\"];\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, _excluded);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.12\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.8.0\";\n\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\\n` + data.errors.map(e => ` - ${e.message}`).join(\"\\n\");\n}\n\nclass GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\"; // Expose the errors and response data in their shorthand properties.\n\n this.errors = response.errors;\n this.data = response.data; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlResponseError(requestOptions, headers, response.data);\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.GraphqlResponseError = GraphqlResponseError;\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.21.3\";\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n enumerableOnly && (symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n })), keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = null != arguments[i] ? arguments[i] : {};\n i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return _objectSpread2(_objectSpread2({}, response), {}, {\n data: []\n });\n }\n\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n\n try {\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n } catch (error) {\n if (error.status !== 409) throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\nconst paginatingEndpoints = [\"GET /app/hook/deliveries\", \"GET /app/installations\", \"GET /applications/grants\", \"GET /authorizations\", \"GET /enterprises/{enterprise}/actions/permissions/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\", \"GET /enterprises/{enterprise}/actions/runners\", \"GET /enterprises/{enterprise}/audit-log\", \"GET /enterprises/{enterprise}/secret-scanning/alerts\", \"GET /enterprises/{enterprise}/settings/billing/advanced-security\", \"GET /events\", \"GET /gists\", \"GET /gists/public\", \"GET /gists/starred\", \"GET /gists/{gist_id}/comments\", \"GET /gists/{gist_id}/commits\", \"GET /gists/{gist_id}/forks\", \"GET /installation/repositories\", \"GET /issues\", \"GET /licenses\", \"GET /marketplace_listing/plans\", \"GET /marketplace_listing/plans/{plan_id}/accounts\", \"GET /marketplace_listing/stubbed/plans\", \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\", \"GET /networks/{owner}/{repo}/events\", \"GET /notifications\", \"GET /organizations\", \"GET /orgs/{org}/actions/cache/usage-by-repository\", \"GET /orgs/{org}/actions/permissions/repositories\", \"GET /orgs/{org}/actions/runner-groups\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\", \"GET /orgs/{org}/actions/runners\", \"GET /orgs/{org}/actions/secrets\", \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/audit-log\", \"GET /orgs/{org}/blocks\", \"GET /orgs/{org}/code-scanning/alerts\", \"GET /orgs/{org}/codespaces\", \"GET /orgs/{org}/credential-authorizations\", \"GET /orgs/{org}/dependabot/secrets\", \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/events\", \"GET /orgs/{org}/external-groups\", \"GET /orgs/{org}/failed_invitations\", \"GET /orgs/{org}/hooks\", \"GET /orgs/{org}/hooks/{hook_id}/deliveries\", \"GET /orgs/{org}/installations\", \"GET /orgs/{org}/invitations\", \"GET /orgs/{org}/invitations/{invitation_id}/teams\", \"GET /orgs/{org}/issues\", \"GET /orgs/{org}/members\", \"GET /orgs/{org}/migrations\", \"GET /orgs/{org}/migrations/{migration_id}/repositories\", \"GET /orgs/{org}/outside_collaborators\", \"GET /orgs/{org}/packages\", \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", \"GET /orgs/{org}/projects\", \"GET /orgs/{org}/public_members\", \"GET /orgs/{org}/repos\", \"GET /orgs/{org}/secret-scanning/alerts\", \"GET /orgs/{org}/settings/billing/advanced-security\", \"GET /orgs/{org}/team-sync/groups\", \"GET /orgs/{org}/teams\", \"GET /orgs/{org}/teams/{team_slug}/discussions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/invitations\", \"GET /orgs/{org}/teams/{team_slug}/members\", \"GET /orgs/{org}/teams/{team_slug}/projects\", \"GET /orgs/{org}/teams/{team_slug}/repos\", \"GET /orgs/{org}/teams/{team_slug}/teams\", \"GET /projects/columns/{column_id}/cards\", \"GET /projects/{project_id}/collaborators\", \"GET /projects/{project_id}/columns\", \"GET /repos/{owner}/{repo}/actions/artifacts\", \"GET /repos/{owner}/{repo}/actions/caches\", \"GET /repos/{owner}/{repo}/actions/runners\", \"GET /repos/{owner}/{repo}/actions/runs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\", \"GET /repos/{owner}/{repo}/actions/secrets\", \"GET /repos/{owner}/{repo}/actions/workflows\", \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\", \"GET /repos/{owner}/{repo}/assignees\", \"GET /repos/{owner}/{repo}/branches\", \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", \"GET /repos/{owner}/{repo}/code-scanning/alerts\", \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", \"GET /repos/{owner}/{repo}/code-scanning/analyses\", \"GET /repos/{owner}/{repo}/codespaces\", \"GET /repos/{owner}/{repo}/codespaces/devcontainers\", \"GET /repos/{owner}/{repo}/codespaces/secrets\", \"GET /repos/{owner}/{repo}/collaborators\", \"GET /repos/{owner}/{repo}/comments\", \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/commits\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", \"GET /repos/{owner}/{repo}/commits/{ref}/status\", \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\", \"GET /repos/{owner}/{repo}/contributors\", \"GET /repos/{owner}/{repo}/dependabot/secrets\", \"GET /repos/{owner}/{repo}/deployments\", \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\", \"GET /repos/{owner}/{repo}/environments\", \"GET /repos/{owner}/{repo}/events\", \"GET /repos/{owner}/{repo}/forks\", \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\", \"GET /repos/{owner}/{repo}/hooks\", \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\", \"GET /repos/{owner}/{repo}/invitations\", \"GET /repos/{owner}/{repo}/issues\", \"GET /repos/{owner}/{repo}/issues/comments\", \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/issues/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", \"GET /repos/{owner}/{repo}/keys\", \"GET /repos/{owner}/{repo}/labels\", \"GET /repos/{owner}/{repo}/milestones\", \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\", \"GET /repos/{owner}/{repo}/notifications\", \"GET /repos/{owner}/{repo}/pages/builds\", \"GET /repos/{owner}/{repo}/projects\", \"GET /repos/{owner}/{repo}/pulls\", \"GET /repos/{owner}/{repo}/pulls/comments\", \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\", \"GET /repos/{owner}/{repo}/releases\", \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\", \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\", \"GET /repos/{owner}/{repo}/stargazers\", \"GET /repos/{owner}/{repo}/subscribers\", \"GET /repos/{owner}/{repo}/tags\", \"GET /repos/{owner}/{repo}/teams\", \"GET /repos/{owner}/{repo}/topics\", \"GET /repositories\", \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\", \"GET /search/code\", \"GET /search/commits\", \"GET /search/issues\", \"GET /search/labels\", \"GET /search/repositories\", \"GET /search/topics\", \"GET /search/users\", \"GET /teams/{team_id}/discussions\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\", \"GET /teams/{team_id}/invitations\", \"GET /teams/{team_id}/members\", \"GET /teams/{team_id}/projects\", \"GET /teams/{team_id}/repos\", \"GET /teams/{team_id}/teams\", \"GET /user/blocks\", \"GET /user/codespaces\", \"GET /user/codespaces/secrets\", \"GET /user/emails\", \"GET /user/followers\", \"GET /user/following\", \"GET /user/gpg_keys\", \"GET /user/installations\", \"GET /user/installations/{installation_id}/repositories\", \"GET /user/issues\", \"GET /user/keys\", \"GET /user/marketplace_purchases\", \"GET /user/marketplace_purchases/stubbed\", \"GET /user/memberships/orgs\", \"GET /user/migrations\", \"GET /user/migrations/{migration_id}/repositories\", \"GET /user/orgs\", \"GET /user/packages\", \"GET /user/packages/{package_type}/{package_name}/versions\", \"GET /user/public_emails\", \"GET /user/repos\", \"GET /user/repository_invitations\", \"GET /user/starred\", \"GET /user/subscriptions\", \"GET /user/teams\", \"GET /users\", \"GET /users/{username}/events\", \"GET /users/{username}/events/orgs/{org}\", \"GET /users/{username}/events/public\", \"GET /users/{username}/followers\", \"GET /users/{username}/following\", \"GET /users/{username}/gists\", \"GET /users/{username}/gpg_keys\", \"GET /users/{username}/keys\", \"GET /users/{username}/orgs\", \"GET /users/{username}/packages\", \"GET /users/{username}/projects\", \"GET /users/{username}/received_events\", \"GET /users/{username}/received_events/public\", \"GET /users/{username}/repos\", \"GET /users/{username}/starred\", \"GET /users/{username}/subscriptions\"];\n\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.isPaginatingEndpoint = isPaginatingEndpoint;\nexports.paginateRest = paginateRest;\nexports.paginatingEndpoints = paginatingEndpoints;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n\n if (enumerableOnly) {\n symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n });\n }\n\n keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i] != null ? arguments[i] : {};\n\n if (i % 2) {\n ownKeys(Object(source), true).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n });\n } else if (Object.getOwnPropertyDescriptors) {\n Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));\n } else {\n ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\nconst Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\"POST /orgs/{org}/actions/runners/{runner_id}/labels\"],\n addCustomLabelsToSelfHostedRunnerForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n approveWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateEnvironmentSecret: [\"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteActionsCacheById: [\"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"],\n deleteActionsCacheByKey: [\"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteEnvironmentSecret: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n disableSelectedRepositoryGithubActionsOrganization: [\"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n disableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunAttemptLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n enableSelectedRepositoryGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n enableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\"GET /orgs/{org}/actions/cache/usage-by-repository\"],\n getActionsCacheUsageForEnterprise: [\"GET /enterprises/{enterprise}/actions/cache/usage\"],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\"GET /orgs/{org}/actions/permissions/selected-actions\"],\n getAllowedActionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"],\n getEnvironmentSecret: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n getGithubActionsDefaultWorkflowPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/workflow\"],\n getGithubActionsPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions\"],\n getGithubActionsPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n getRepoPermissions: [\"GET /repos/{owner}/{repo}/actions/permissions\", {}, {\n renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"]\n }],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/access\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listJobsForWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"],\n listLabelsForSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}/labels\"],\n listLabelsForSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\"GET /orgs/{org}/actions/permissions/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n reviewPendingDeploymentsForRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n setAllowedActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/selected-actions\"],\n setAllowedActionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForOrg: [\"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"],\n setCustomLabelsForSelfHostedRunnerForRepo: [\"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n setGithubActionsDefaultWorkflowPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"],\n setGithubActionsPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions\"],\n setGithubActionsPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories\"],\n setWorkflowAccessToRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/access\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"]\n }],\n addRepoToInstallationForAuthenticatedUser: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\"POST /app/hook/deliveries/{delivery_id}/attempts\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"]\n }],\n removeRepoFromInstallationForAuthenticatedUser: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubAdvancedSecurityBillingGhe: [\"GET /enterprises/{enterprise}/settings/billing/advanced-security\"],\n getGithubAdvancedSecurityBillingOrg: [\"GET /orgs/{org}/settings/billing/advanced-security\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\"],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n getAnalysis: [\"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", {}, {\n renamed: [\"codeScanning\", \"listAlertInstances\"]\n }],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n codespaceMachinesForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/machines\"],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n createOrUpdateSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}\"],\n createWithPrForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"],\n createWithRepoForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/codespaces\"],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n deleteSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}\"],\n exportForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/exports\"],\n getExportDetailsForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/exports/{export_id}\"],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getPublicKeyForAuthenticatedUser: [\"GET /user/codespaces/secrets/public-key\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n getSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}\"],\n listDevcontainersInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/devcontainers\"],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\"GET /orgs/{org}/codespaces\", {}, {\n renamedParameters: {\n org_id: \"org\"\n }\n }],\n listInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}/repositories\"],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n removeRepositoryForSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n repoMachinesForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/machines\"],\n setRepositoriesForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories\"],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"],\n diffRange: [\"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n enterpriseAdmin: {\n addCustomLabelsToSelfHostedRunnerForEnterprise: [\"POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n disableSelectedOrganizationGithubActionsEnterprise: [\"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n enableSelectedOrganizationGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n getAllowedActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n getGithubActionsPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions\"],\n getServerStatistics: [\"GET /enterprise-installation/{enterprise_or_org}/server-statistics\"],\n listLabelsForSelfHostedRunnerForEnterprise: [\"GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/organizations\"],\n removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: [\"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForEnterprise: [\"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}\"],\n setAllowedActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForEnterprise: [\"PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n setGithubActionsPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions\"],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\"GET /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"]\n }],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\"],\n removeRestrictionsForYourPublicRepos: [\"DELETE /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"]\n }],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\"PUT /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"]\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\"],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\"],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\"],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\"],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/repositories\"],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {}, {\n renamed: [\"migrations\", \"listReposForAuthenticatedUser\"]\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listCustomRoles: [\"GET /organizations/{organization_id}/custom_roles\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}\"],\n deletePackageForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"],\n deletePackageForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}\"],\n deletePackageVersionForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"]\n }],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"]\n }],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions\"],\n getPackageForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}\"],\n getPackageForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}\"],\n getPackageForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}\"],\n getPackageVersionForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageVersionForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\"],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\"],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n createForRelease: [\"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n listForRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"]\n }],\n acceptInvitationForAuthenticatedUser: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\"],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\"GET /repos/{owner}/{repo}/compare/{basehead}\"],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\"],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"]\n }],\n declineInvitationForAuthenticatedUser: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteAnEnvironment: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteTagProtection: [\"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\"],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"],\n downloadArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\", {}, {\n renamed: [\"repos\", \"downloadZipballArchive\"]\n }],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\"],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\"],\n generateReleaseNotes: [\"POST /repos/{owner}/{repo}/releases/generate-notes\"],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getEnvironment: [\"GET /repos/{owner}/{repo}/environments/{environment_name}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\", {}, {\n renamed: [\"repos\", \"updateStatusCheckProtection\"]\n }],\n updateStatusCheckProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"],\n listAlertsForEnterprise: [\"GET /enterprises/{enterprise}/secret-scanning/alerts\"],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\", {}, {\n renamed: [\"users\", \"addEmailForAuthenticatedUser\"]\n }],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\", {}, {\n renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"]\n }],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\", {}, {\n renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"]\n }],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\", {}, {\n renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"]\n }],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"]\n }],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"]\n }],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"]\n }],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"]\n }],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\", {}, {\n renamed: [\"users\", \"listBlockedByAuthenticatedUser\"]\n }],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\", {}, {\n renamed: [\"users\", \"listEmailsForAuthenticatedUser\"]\n }],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\", {}, {\n renamed: [\"users\", \"listFollowedByAuthenticatedUser\"]\n }],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\", {}, {\n renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"]\n }],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\", {}, {\n renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"]\n }],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\", {}, {\n renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"]\n }],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\", {}, {\n renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"]\n }],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"5.16.2\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return _objectSpread2(_objectSpread2({}, api), {}, {\n rest: api\n });\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n\nexports.legacyRestEndpointMethods = legacyRestEndpointMethods;\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnceCode = once(deprecation => console.warn(deprecation));\nconst logOnceHeaders = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n } // redact request credentials without mutating original request options\n\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy; // deprecations\n\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new deprecation.Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n }\n\n });\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.6.3\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request)).then(async response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined\n },\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new requestError.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n\n return getResponseData(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) throw error;\n throw new requestError.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\n\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n}\n\nfunction toErrorMessage(data) {\n if (typeof data === \"string\") return data; // istanbul ignore else - just in case\n\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n\n return data.message;\n } // istanbul ignore next - just in case\n\n\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction once(emitter, name, { signal } = {}) {\n return new Promise((resolve, reject) => {\n function cleanup() {\n signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup);\n emitter.removeListener(name, onEvent);\n emitter.removeListener('error', onError);\n }\n function onEvent(...args) {\n cleanup();\n resolve(args);\n }\n function onError(err) {\n cleanup();\n reject(err);\n }\n signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup);\n emitter.on(name, onEvent);\n emitter.on('error', onError);\n });\n}\nexports.default = once;\n//# sourceMappingURL=index.js.map","/**\n * @author Toru Nagashima \n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar eventTargetShim = require('event-target-shim');\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nclass AbortSignal extends eventTargetShim.EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n constructor() {\n super();\n throw new TypeError(\"AbortSignal cannot be constructed directly\");\n }\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n get aborted() {\n const aborted = abortedFlags.get(this);\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? \"null\" : typeof this}`);\n }\n return aborted;\n }\n}\neventTargetShim.defineEventAttribute(AbortSignal.prototype, \"abort\");\n/**\n * Create an AbortSignal object.\n */\nfunction createAbortSignal() {\n const signal = Object.create(AbortSignal.prototype);\n eventTargetShim.EventTarget.call(signal);\n abortedFlags.set(signal, false);\n return signal;\n}\n/**\n * Abort a given signal.\n */\nfunction abortSignal(signal) {\n if (abortedFlags.get(signal) !== false) {\n return;\n }\n abortedFlags.set(signal, true);\n signal.dispatchEvent({ type: \"abort\" });\n}\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap();\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n});\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n });\n}\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nclass AbortController {\n /**\n * Initialize this controller.\n */\n constructor() {\n signals.set(this, createAbortSignal());\n }\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n get signal() {\n return getSignal(this);\n }\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n abort() {\n abortSignal(getSignal(this));\n }\n}\n/**\n * Associated signals.\n */\nconst signals = new WeakMap();\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller) {\n const signal = signals.get(controller);\n if (signal == null) {\n throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? \"null\" : typeof controller}`);\n }\n return signal;\n}\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n});\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n });\n}\n\nexports.AbortController = AbortController;\nexports.AbortSignal = AbortSignal;\nexports.default = AbortController;\n\nmodule.exports = AbortController\nmodule.exports.AbortController = module.exports[\"default\"] = AbortController\nmodule.exports.AbortSignal = AbortSignal\n//# sourceMappingURL=abort-controller.js.map\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst events_1 = require(\"events\");\nconst debug_1 = __importDefault(require(\"debug\"));\nconst promisify_1 = __importDefault(require(\"./promisify\"));\nconst debug = debug_1.default('agent-base');\nfunction isAgent(v) {\n return Boolean(v) && typeof v.addRequest === 'function';\n}\nfunction isSecureEndpoint() {\n const { stack } = new Error();\n if (typeof stack !== 'string')\n return false;\n return stack.split('\\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1);\n}\nfunction createAgent(callback, opts) {\n return new createAgent.Agent(callback, opts);\n}\n(function (createAgent) {\n /**\n * Base `http.Agent` implementation.\n * No pooling/keep-alive is implemented by default.\n *\n * @param {Function} callback\n * @api public\n */\n class Agent extends events_1.EventEmitter {\n constructor(callback, _opts) {\n super();\n let opts = _opts;\n if (typeof callback === 'function') {\n this.callback = callback;\n }\n else if (callback) {\n opts = callback;\n }\n // Timeout for the socket to be returned from the callback\n this.timeout = null;\n if (opts && typeof opts.timeout === 'number') {\n this.timeout = opts.timeout;\n }\n // These aren't actually used by `agent-base`, but are required\n // for the TypeScript definition files in `@types/node` :/\n this.maxFreeSockets = 1;\n this.maxSockets = 1;\n this.maxTotalSockets = Infinity;\n this.sockets = {};\n this.freeSockets = {};\n this.requests = {};\n this.options = {};\n }\n get defaultPort() {\n if (typeof this.explicitDefaultPort === 'number') {\n return this.explicitDefaultPort;\n }\n return isSecureEndpoint() ? 443 : 80;\n }\n set defaultPort(v) {\n this.explicitDefaultPort = v;\n }\n get protocol() {\n if (typeof this.explicitProtocol === 'string') {\n return this.explicitProtocol;\n }\n return isSecureEndpoint() ? 'https:' : 'http:';\n }\n set protocol(v) {\n this.explicitProtocol = v;\n }\n callback(req, opts, fn) {\n throw new Error('\"agent-base\" has no default implementation, you must subclass and override `callback()`');\n }\n /**\n * Called by node-core's \"_http_client.js\" module when creating\n * a new HTTP request with this Agent instance.\n *\n * @api public\n */\n addRequest(req, _opts) {\n const opts = Object.assign({}, _opts);\n if (typeof opts.secureEndpoint !== 'boolean') {\n opts.secureEndpoint = isSecureEndpoint();\n }\n if (opts.host == null) {\n opts.host = 'localhost';\n }\n if (opts.port == null) {\n opts.port = opts.secureEndpoint ? 443 : 80;\n }\n if (opts.protocol == null) {\n opts.protocol = opts.secureEndpoint ? 'https:' : 'http:';\n }\n if (opts.host && opts.path) {\n // If both a `host` and `path` are specified then it's most\n // likely the result of a `url.parse()` call... we need to\n // remove the `path` portion so that `net.connect()` doesn't\n // attempt to open that as a unix socket file.\n delete opts.path;\n }\n delete opts.agent;\n delete opts.hostname;\n delete opts._defaultAgent;\n delete opts.defaultPort;\n delete opts.createConnection;\n // Hint to use \"Connection: close\"\n // XXX: non-documented `http` module API :(\n req._last = true;\n req.shouldKeepAlive = false;\n let timedOut = false;\n let timeoutId = null;\n const timeoutMs = opts.timeout || this.timeout;\n const onerror = (err) => {\n if (req._hadError)\n return;\n req.emit('error', err);\n // For Safety. Some additional errors might fire later on\n // and we need to make sure we don't double-fire the error event.\n req._hadError = true;\n };\n const ontimeout = () => {\n timeoutId = null;\n timedOut = true;\n const err = new Error(`A \"socket\" was not created for HTTP request before ${timeoutMs}ms`);\n err.code = 'ETIMEOUT';\n onerror(err);\n };\n const callbackError = (err) => {\n if (timedOut)\n return;\n if (timeoutId !== null) {\n clearTimeout(timeoutId);\n timeoutId = null;\n }\n onerror(err);\n };\n const onsocket = (socket) => {\n if (timedOut)\n return;\n if (timeoutId != null) {\n clearTimeout(timeoutId);\n timeoutId = null;\n }\n if (isAgent(socket)) {\n // `socket` is actually an `http.Agent` instance, so\n // relinquish responsibility for this `req` to the Agent\n // from here on\n debug('Callback returned another Agent instance %o', socket.constructor.name);\n socket.addRequest(req, opts);\n return;\n }\n if (socket) {\n socket.once('free', () => {\n this.freeSocket(socket, opts);\n });\n req.onSocket(socket);\n return;\n }\n const err = new Error(`no Duplex stream was returned to agent-base for \\`${req.method} ${req.path}\\``);\n onerror(err);\n };\n if (typeof this.callback !== 'function') {\n onerror(new Error('`callback` is not defined'));\n return;\n }\n if (!this.promisifiedCallback) {\n if (this.callback.length >= 3) {\n debug('Converting legacy callback function to promise');\n this.promisifiedCallback = promisify_1.default(this.callback);\n }\n else {\n this.promisifiedCallback = this.callback;\n }\n }\n if (typeof timeoutMs === 'number' && timeoutMs > 0) {\n timeoutId = setTimeout(ontimeout, timeoutMs);\n }\n if ('port' in opts && typeof opts.port !== 'number') {\n opts.port = Number(opts.port);\n }\n try {\n debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`);\n Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError);\n }\n catch (err) {\n Promise.reject(err).catch(callbackError);\n }\n }\n freeSocket(socket, opts) {\n debug('Freeing socket %o %o', socket.constructor.name, opts);\n socket.destroy();\n }\n destroy() {\n debug('Destroying agent %o', this.constructor.name);\n }\n }\n createAgent.Agent = Agent;\n // So that `instanceof` works correctly\n createAgent.prototype = createAgent.Agent.prototype;\n})(createAgent || (createAgent = {}));\nmodule.exports = createAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction promisify(fn) {\n return function (req, opts) {\n return new Promise((resolve, reject) => {\n fn.call(this, req, opts, (err, rtn) => {\n if (err) {\n reject(err);\n }\n else {\n resolve(rtn);\n }\n });\n });\n };\n}\nexports.default = promisify;\n//# sourceMappingURL=promisify.js.map","'use strict';\n\nconst arrify = value => {\n\tif (value === null || value === undefined) {\n\t\treturn [];\n\t}\n\n\tif (Array.isArray(value)) {\n\t\treturn value;\n\t}\n\n\tif (typeof value === 'string') {\n\t\treturn [value];\n\t}\n\n\tif (typeof value[Symbol.iterator] === 'function') {\n\t\treturn [...value];\n\t}\n\n\treturn [value];\n};\n\nmodule.exports = arrify;\n","// Packages\nvar retrier = require('retry');\n\nfunction retry(fn, opts) {\n function run(resolve, reject) {\n var options = opts || {};\n var op;\n\n // Default `randomize` to true\n if (!('randomize' in options)) {\n options.randomize = true;\n }\n\n op = retrier.operation(options);\n\n // We allow the user to abort retrying\n // this makes sense in the cases where\n // knowledge is obtained that retrying\n // would be futile (e.g.: auth errors)\n\n function bail(err) {\n reject(err || new Error('Aborted'));\n }\n\n function onError(err, num) {\n if (err.bail) {\n bail(err);\n return;\n }\n\n if (!op.retry(err)) {\n reject(op.mainError());\n } else if (options.onRetry) {\n options.onRetry(err, num);\n }\n }\n\n function runAttempt(num) {\n var val;\n\n try {\n val = fn(bail, num);\n } catch (err) {\n onError(err, num);\n return;\n }\n\n Promise.resolve(val)\n .then(resolve)\n .catch(function catchIt(err) {\n onError(err, num);\n });\n }\n\n op.attempt(runAttempt);\n }\n\n return new Promise(run);\n}\n\nmodule.exports = retry;\n","'use strict'\n\nexports.byteLength = byteLength\nexports.toByteArray = toByteArray\nexports.fromByteArray = fromByteArray\n\nvar lookup = []\nvar revLookup = []\nvar Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array\n\nvar code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'\nfor (var i = 0, len = code.length; i < len; ++i) {\n lookup[i] = code[i]\n revLookup[code.charCodeAt(i)] = i\n}\n\n// Support decoding URL-safe base64 strings, as Node.js does.\n// See: https://en.wikipedia.org/wiki/Base64#URL_applications\nrevLookup['-'.charCodeAt(0)] = 62\nrevLookup['_'.charCodeAt(0)] = 63\n\nfunction getLens (b64) {\n var len = b64.length\n\n if (len % 4 > 0) {\n throw new Error('Invalid string. Length must be a multiple of 4')\n }\n\n // Trim off extra bytes after placeholder bytes are found\n // See: https://github.com/beatgammit/base64-js/issues/42\n var validLen = b64.indexOf('=')\n if (validLen === -1) validLen = len\n\n var placeHoldersLen = validLen === len\n ? 0\n : 4 - (validLen % 4)\n\n return [validLen, placeHoldersLen]\n}\n\n// base64 is 4/3 + up to two characters of the original data\nfunction byteLength (b64) {\n var lens = getLens(b64)\n var validLen = lens[0]\n var placeHoldersLen = lens[1]\n return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen\n}\n\nfunction _byteLength (b64, validLen, placeHoldersLen) {\n return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen\n}\n\nfunction toByteArray (b64) {\n var tmp\n var lens = getLens(b64)\n var validLen = lens[0]\n var placeHoldersLen = lens[1]\n\n var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen))\n\n var curByte = 0\n\n // if there are placeholders, only get up to the last complete 4 chars\n var len = placeHoldersLen > 0\n ? validLen - 4\n : validLen\n\n var i\n for (i = 0; i < len; i += 4) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 18) |\n (revLookup[b64.charCodeAt(i + 1)] << 12) |\n (revLookup[b64.charCodeAt(i + 2)] << 6) |\n revLookup[b64.charCodeAt(i + 3)]\n arr[curByte++] = (tmp >> 16) & 0xFF\n arr[curByte++] = (tmp >> 8) & 0xFF\n arr[curByte++] = tmp & 0xFF\n }\n\n if (placeHoldersLen === 2) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 2) |\n (revLookup[b64.charCodeAt(i + 1)] >> 4)\n arr[curByte++] = tmp & 0xFF\n }\n\n if (placeHoldersLen === 1) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 10) |\n (revLookup[b64.charCodeAt(i + 1)] << 4) |\n (revLookup[b64.charCodeAt(i + 2)] >> 2)\n arr[curByte++] = (tmp >> 8) & 0xFF\n arr[curByte++] = tmp & 0xFF\n }\n\n return arr\n}\n\nfunction tripletToBase64 (num) {\n return lookup[num >> 18 & 0x3F] +\n lookup[num >> 12 & 0x3F] +\n lookup[num >> 6 & 0x3F] +\n lookup[num & 0x3F]\n}\n\nfunction encodeChunk (uint8, start, end) {\n var tmp\n var output = []\n for (var i = start; i < end; i += 3) {\n tmp =\n ((uint8[i] << 16) & 0xFF0000) +\n ((uint8[i + 1] << 8) & 0xFF00) +\n (uint8[i + 2] & 0xFF)\n output.push(tripletToBase64(tmp))\n }\n return output.join('')\n}\n\nfunction fromByteArray (uint8) {\n var tmp\n var len = uint8.length\n var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes\n var parts = []\n var maxChunkLength = 16383 // must be multiple of 3\n\n // go through the array every three bytes, we'll deal with trailing stuff later\n for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {\n parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))\n }\n\n // pad the end with zeros, but make sure to not forget the extra bytes\n if (extraBytes === 1) {\n tmp = uint8[len - 1]\n parts.push(\n lookup[tmp >> 2] +\n lookup[(tmp << 4) & 0x3F] +\n '=='\n )\n } else if (extraBytes === 2) {\n tmp = (uint8[len - 2] << 8) + uint8[len - 1]\n parts.push(\n lookup[tmp >> 10] +\n lookup[(tmp >> 4) & 0x3F] +\n lookup[(tmp << 2) & 0x3F] +\n '='\n )\n }\n\n return parts.join('')\n}\n","var register = require(\"./lib/register\");\nvar addHook = require(\"./lib/add\");\nvar removeHook = require(\"./lib/remove\");\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind;\nvar bindable = bind.bind(bind);\n\nfunction bindApi(hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(\n null,\n name ? [state, name] : [state]\n );\n hook.api = { remove: removeHookRef };\n hook.remove = removeHookRef;\n [\"before\", \"error\", \"after\", \"wrap\"].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind];\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);\n });\n}\n\nfunction HookSingular() {\n var singularHookName = \"h\";\n var singularHookState = {\n registry: {},\n };\n var singularHook = register.bind(null, singularHookState, singularHookName);\n bindApi(singularHook, singularHookState, singularHookName);\n return singularHook;\n}\n\nfunction HookCollection() {\n var state = {\n registry: {},\n };\n\n var hook = register.bind(null, state);\n bindApi(hook, state);\n\n return hook;\n}\n\nvar collectionHookDeprecationMessageDisplayed = false;\nfunction Hook() {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn(\n '[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4'\n );\n collectionHookDeprecationMessageDisplayed = true;\n }\n return HookCollection();\n}\n\nHook.Singular = HookSingular.bind();\nHook.Collection = HookCollection.bind();\n\nmodule.exports = Hook;\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook;\nmodule.exports.Singular = Hook.Singular;\nmodule.exports.Collection = Hook.Collection;\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n",";(function (globalObject) {\r\n 'use strict';\r\n\r\n/*\r\n * bignumber.js v9.1.1\r\n * A JavaScript library for arbitrary-precision arithmetic.\r\n * https://github.com/MikeMcl/bignumber.js\r\n * Copyright (c) 2022 Michael Mclaughlin \r\n * MIT Licensed.\r\n *\r\n * BigNumber.prototype methods | BigNumber methods\r\n * |\r\n * absoluteValue abs | clone\r\n * comparedTo | config set\r\n * decimalPlaces dp | DECIMAL_PLACES\r\n * dividedBy div | ROUNDING_MODE\r\n * dividedToIntegerBy idiv | EXPONENTIAL_AT\r\n * exponentiatedBy pow | RANGE\r\n * integerValue | CRYPTO\r\n * isEqualTo eq | MODULO_MODE\r\n * isFinite | POW_PRECISION\r\n * isGreaterThan gt | FORMAT\r\n * isGreaterThanOrEqualTo gte | ALPHABET\r\n * isInteger | isBigNumber\r\n * isLessThan lt | maximum max\r\n * isLessThanOrEqualTo lte | minimum min\r\n * isNaN | random\r\n * isNegative | sum\r\n * isPositive |\r\n * isZero |\r\n * minus |\r\n * modulo mod |\r\n * multipliedBy times |\r\n * negated |\r\n * plus |\r\n * precision sd |\r\n * shiftedBy |\r\n * squareRoot sqrt |\r\n * toExponential |\r\n * toFixed |\r\n * toFormat |\r\n * toFraction |\r\n * toJSON |\r\n * toNumber |\r\n * toPrecision |\r\n * toString |\r\n * valueOf |\r\n *\r\n */\r\n\r\n\r\n var BigNumber,\r\n isNumeric = /^-?(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:e[+-]?\\d+)?$/i,\r\n mathceil = Math.ceil,\r\n mathfloor = Math.floor,\r\n\r\n bignumberError = '[BigNumber Error] ',\r\n tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ',\r\n\r\n BASE = 1e14,\r\n LOG_BASE = 14,\r\n MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1\r\n // MAX_INT32 = 0x7fffffff, // 2^31 - 1\r\n POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13],\r\n SQRT_BASE = 1e7,\r\n\r\n // EDITABLE\r\n // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and\r\n // the arguments to toExponential, toFixed, toFormat, and toPrecision.\r\n MAX = 1E9; // 0 to MAX_INT32\r\n\r\n\r\n /*\r\n * Create and return a BigNumber constructor.\r\n */\r\n function clone(configObject) {\r\n var div, convertBase, parseNumeric,\r\n P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null },\r\n ONE = new BigNumber(1),\r\n\r\n\r\n //----------------------------- EDITABLE CONFIG DEFAULTS -------------------------------\r\n\r\n\r\n // The default values below must be integers within the inclusive ranges stated.\r\n // The values can also be changed at run-time using BigNumber.set.\r\n\r\n // The maximum number of decimal places for operations involving division.\r\n DECIMAL_PLACES = 20, // 0 to MAX\r\n\r\n // The rounding mode used when rounding to the above decimal places, and when using\r\n // toExponential, toFixed, toFormat and toPrecision, and round (default value).\r\n // UP 0 Away from zero.\r\n // DOWN 1 Towards zero.\r\n // CEIL 2 Towards +Infinity.\r\n // FLOOR 3 Towards -Infinity.\r\n // HALF_UP 4 Towards nearest neighbour. If equidistant, up.\r\n // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down.\r\n // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour.\r\n // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity.\r\n // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity.\r\n ROUNDING_MODE = 4, // 0 to 8\r\n\r\n // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS]\r\n\r\n // The exponent value at and beneath which toString returns exponential notation.\r\n // Number type: -7\r\n TO_EXP_NEG = -7, // 0 to -MAX\r\n\r\n // The exponent value at and above which toString returns exponential notation.\r\n // Number type: 21\r\n TO_EXP_POS = 21, // 0 to MAX\r\n\r\n // RANGE : [MIN_EXP, MAX_EXP]\r\n\r\n // The minimum exponent value, beneath which underflow to zero occurs.\r\n // Number type: -324 (5e-324)\r\n MIN_EXP = -1e7, // -1 to -MAX\r\n\r\n // The maximum exponent value, above which overflow to Infinity occurs.\r\n // Number type: 308 (1.7976931348623157e+308)\r\n // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow.\r\n MAX_EXP = 1e7, // 1 to MAX\r\n\r\n // Whether to use cryptographically-secure random number generation, if available.\r\n CRYPTO = false, // true or false\r\n\r\n // The modulo mode used when calculating the modulus: a mod n.\r\n // The quotient (q = a / n) is calculated according to the corresponding rounding mode.\r\n // The remainder (r) is calculated as: r = a - n * q.\r\n //\r\n // UP 0 The remainder is positive if the dividend is negative, else is negative.\r\n // DOWN 1 The remainder has the same sign as the dividend.\r\n // This modulo mode is commonly known as 'truncated division' and is\r\n // equivalent to (a % n) in JavaScript.\r\n // FLOOR 3 The remainder has the same sign as the divisor (Python %).\r\n // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function.\r\n // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)).\r\n // The remainder is always positive.\r\n //\r\n // The truncated division, floored division, Euclidian division and IEEE 754 remainder\r\n // modes are commonly used for the modulus operation.\r\n // Although the other rounding modes can also be used, they may not give useful results.\r\n MODULO_MODE = 1, // 0 to 9\r\n\r\n // The maximum number of significant digits of the result of the exponentiatedBy operation.\r\n // If POW_PRECISION is 0, there will be unlimited significant digits.\r\n POW_PRECISION = 0, // 0 to MAX\r\n\r\n // The format specification used by the BigNumber.prototype.toFormat method.\r\n FORMAT = {\r\n prefix: '',\r\n groupSize: 3,\r\n secondaryGroupSize: 0,\r\n groupSeparator: ',',\r\n decimalSeparator: '.',\r\n fractionGroupSize: 0,\r\n fractionGroupSeparator: '\\xA0', // non-breaking space\r\n suffix: ''\r\n },\r\n\r\n // The alphabet used for base conversion. It must be at least 2 characters long, with no '+',\r\n // '-', '.', whitespace, or repeated character.\r\n // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_'\r\n ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz',\r\n alphabetHasNormalDecimalDigits = true;\r\n\r\n\r\n //------------------------------------------------------------------------------------------\r\n\r\n\r\n // CONSTRUCTOR\r\n\r\n\r\n /*\r\n * The BigNumber constructor and exported function.\r\n * Create and return a new instance of a BigNumber object.\r\n *\r\n * v {number|string|BigNumber} A numeric value.\r\n * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive.\r\n */\r\n function BigNumber(v, b) {\r\n var alphabet, c, caseChanged, e, i, isNum, len, str,\r\n x = this;\r\n\r\n // Enable constructor call without `new`.\r\n if (!(x instanceof BigNumber)) return new BigNumber(v, b);\r\n\r\n if (b == null) {\r\n\r\n if (v && v._isBigNumber === true) {\r\n x.s = v.s;\r\n\r\n if (!v.c || v.e > MAX_EXP) {\r\n x.c = x.e = null;\r\n } else if (v.e < MIN_EXP) {\r\n x.c = [x.e = 0];\r\n } else {\r\n x.e = v.e;\r\n x.c = v.c.slice();\r\n }\r\n\r\n return;\r\n }\r\n\r\n if ((isNum = typeof v == 'number') && v * 0 == 0) {\r\n\r\n // Use `1 / n` to handle minus zero also.\r\n x.s = 1 / v < 0 ? (v = -v, -1) : 1;\r\n\r\n // Fast path for integers, where n < 2147483648 (2**31).\r\n if (v === ~~v) {\r\n for (e = 0, i = v; i >= 10; i /= 10, e++);\r\n\r\n if (e > MAX_EXP) {\r\n x.c = x.e = null;\r\n } else {\r\n x.e = e;\r\n x.c = [v];\r\n }\r\n\r\n return;\r\n }\r\n\r\n str = String(v);\r\n } else {\r\n\r\n if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum);\r\n\r\n x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1;\r\n }\r\n\r\n // Decimal point?\r\n if ((e = str.indexOf('.')) > -1) str = str.replace('.', '');\r\n\r\n // Exponential form?\r\n if ((i = str.search(/e/i)) > 0) {\r\n\r\n // Determine exponent.\r\n if (e < 0) e = i;\r\n e += +str.slice(i + 1);\r\n str = str.substring(0, i);\r\n } else if (e < 0) {\r\n\r\n // Integer.\r\n e = str.length;\r\n }\r\n\r\n } else {\r\n\r\n // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}'\r\n intCheck(b, 2, ALPHABET.length, 'Base');\r\n\r\n // Allow exponential notation to be used with base 10 argument, while\r\n // also rounding to DECIMAL_PLACES as with other bases.\r\n if (b == 10 && alphabetHasNormalDecimalDigits) {\r\n x = new BigNumber(v);\r\n return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE);\r\n }\r\n\r\n str = String(v);\r\n\r\n if (isNum = typeof v == 'number') {\r\n\r\n // Avoid potential interpretation of Infinity and NaN as base 44+ values.\r\n if (v * 0 != 0) return parseNumeric(x, str, isNum, b);\r\n\r\n x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1;\r\n\r\n // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}'\r\n if (BigNumber.DEBUG && str.replace(/^0\\.0*|\\./, '').length > 15) {\r\n throw Error\r\n (tooManyDigits + v);\r\n }\r\n } else {\r\n x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1;\r\n }\r\n\r\n alphabet = ALPHABET.slice(0, b);\r\n e = i = 0;\r\n\r\n // Check that str is a valid base b number.\r\n // Don't use RegExp, so alphabet can contain special characters.\r\n for (len = str.length; i < len; i++) {\r\n if (alphabet.indexOf(c = str.charAt(i)) < 0) {\r\n if (c == '.') {\r\n\r\n // If '.' is not the first character and it has not be found before.\r\n if (i > e) {\r\n e = len;\r\n continue;\r\n }\r\n } else if (!caseChanged) {\r\n\r\n // Allow e.g. hexadecimal 'FF' as well as 'ff'.\r\n if (str == str.toUpperCase() && (str = str.toLowerCase()) ||\r\n str == str.toLowerCase() && (str = str.toUpperCase())) {\r\n caseChanged = true;\r\n i = -1;\r\n e = 0;\r\n continue;\r\n }\r\n }\r\n\r\n return parseNumeric(x, String(v), isNum, b);\r\n }\r\n }\r\n\r\n // Prevent later check for length on converted number.\r\n isNum = false;\r\n str = convertBase(str, b, 10, x.s);\r\n\r\n // Decimal point?\r\n if ((e = str.indexOf('.')) > -1) str = str.replace('.', '');\r\n else e = str.length;\r\n }\r\n\r\n // Determine leading zeros.\r\n for (i = 0; str.charCodeAt(i) === 48; i++);\r\n\r\n // Determine trailing zeros.\r\n for (len = str.length; str.charCodeAt(--len) === 48;);\r\n\r\n if (str = str.slice(i, ++len)) {\r\n len -= i;\r\n\r\n // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}'\r\n if (isNum && BigNumber.DEBUG &&\r\n len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) {\r\n throw Error\r\n (tooManyDigits + (x.s * v));\r\n }\r\n\r\n // Overflow?\r\n if ((e = e - i - 1) > MAX_EXP) {\r\n\r\n // Infinity.\r\n x.c = x.e = null;\r\n\r\n // Underflow?\r\n } else if (e < MIN_EXP) {\r\n\r\n // Zero.\r\n x.c = [x.e = 0];\r\n } else {\r\n x.e = e;\r\n x.c = [];\r\n\r\n // Transform base\r\n\r\n // e is the base 10 exponent.\r\n // i is where to slice str to get the first element of the coefficient array.\r\n i = (e + 1) % LOG_BASE;\r\n if (e < 0) i += LOG_BASE; // i < 1\r\n\r\n if (i < len) {\r\n if (i) x.c.push(+str.slice(0, i));\r\n\r\n for (len -= LOG_BASE; i < len;) {\r\n x.c.push(+str.slice(i, i += LOG_BASE));\r\n }\r\n\r\n i = LOG_BASE - (str = str.slice(i)).length;\r\n } else {\r\n i -= len;\r\n }\r\n\r\n for (; i--; str += '0');\r\n x.c.push(+str);\r\n }\r\n } else {\r\n\r\n // Zero.\r\n x.c = [x.e = 0];\r\n }\r\n }\r\n\r\n\r\n // CONSTRUCTOR PROPERTIES\r\n\r\n\r\n BigNumber.clone = clone;\r\n\r\n BigNumber.ROUND_UP = 0;\r\n BigNumber.ROUND_DOWN = 1;\r\n BigNumber.ROUND_CEIL = 2;\r\n BigNumber.ROUND_FLOOR = 3;\r\n BigNumber.ROUND_HALF_UP = 4;\r\n BigNumber.ROUND_HALF_DOWN = 5;\r\n BigNumber.ROUND_HALF_EVEN = 6;\r\n BigNumber.ROUND_HALF_CEIL = 7;\r\n BigNumber.ROUND_HALF_FLOOR = 8;\r\n BigNumber.EUCLID = 9;\r\n\r\n\r\n /*\r\n * Configure infrequently-changing library-wide settings.\r\n *\r\n * Accept an object with the following optional properties (if the value of a property is\r\n * a number, it must be an integer within the inclusive range stated):\r\n *\r\n * DECIMAL_PLACES {number} 0 to MAX\r\n * ROUNDING_MODE {number} 0 to 8\r\n * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX]\r\n * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX]\r\n * CRYPTO {boolean} true or false\r\n * MODULO_MODE {number} 0 to 9\r\n * POW_PRECISION {number} 0 to MAX\r\n * ALPHABET {string} A string of two or more unique characters which does\r\n * not contain '.'.\r\n * FORMAT {object} An object with some of the following properties:\r\n * prefix {string}\r\n * groupSize {number}\r\n * secondaryGroupSize {number}\r\n * groupSeparator {string}\r\n * decimalSeparator {string}\r\n * fractionGroupSize {number}\r\n * fractionGroupSeparator {string}\r\n * suffix {string}\r\n *\r\n * (The values assigned to the above FORMAT object properties are not checked for validity.)\r\n *\r\n * E.g.\r\n * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 })\r\n *\r\n * Ignore properties/parameters set to null or undefined, except for ALPHABET.\r\n *\r\n * Return an object with the properties current values.\r\n */\r\n BigNumber.config = BigNumber.set = function (obj) {\r\n var p, v;\r\n\r\n if (obj != null) {\r\n\r\n if (typeof obj == 'object') {\r\n\r\n // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive.\r\n // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) {\r\n v = obj[p];\r\n intCheck(v, 0, MAX, p);\r\n DECIMAL_PLACES = v;\r\n }\r\n\r\n // ROUNDING_MODE {number} Integer, 0 to 8 inclusive.\r\n // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) {\r\n v = obj[p];\r\n intCheck(v, 0, 8, p);\r\n ROUNDING_MODE = v;\r\n }\r\n\r\n // EXPONENTIAL_AT {number|number[]}\r\n // Integer, -MAX to MAX inclusive or\r\n // [integer -MAX to 0 inclusive, 0 to MAX inclusive].\r\n // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) {\r\n v = obj[p];\r\n if (v && v.pop) {\r\n intCheck(v[0], -MAX, 0, p);\r\n intCheck(v[1], 0, MAX, p);\r\n TO_EXP_NEG = v[0];\r\n TO_EXP_POS = v[1];\r\n } else {\r\n intCheck(v, -MAX, MAX, p);\r\n TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v);\r\n }\r\n }\r\n\r\n // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or\r\n // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive].\r\n // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}'\r\n if (obj.hasOwnProperty(p = 'RANGE')) {\r\n v = obj[p];\r\n if (v && v.pop) {\r\n intCheck(v[0], -MAX, -1, p);\r\n intCheck(v[1], 1, MAX, p);\r\n MIN_EXP = v[0];\r\n MAX_EXP = v[1];\r\n } else {\r\n intCheck(v, -MAX, MAX, p);\r\n if (v) {\r\n MIN_EXP = -(MAX_EXP = v < 0 ? -v : v);\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' cannot be zero: ' + v);\r\n }\r\n }\r\n }\r\n\r\n // CRYPTO {boolean} true or false.\r\n // '[BigNumber Error] CRYPTO not true or false: {v}'\r\n // '[BigNumber Error] crypto unavailable'\r\n if (obj.hasOwnProperty(p = 'CRYPTO')) {\r\n v = obj[p];\r\n if (v === !!v) {\r\n if (v) {\r\n if (typeof crypto != 'undefined' && crypto &&\r\n (crypto.getRandomValues || crypto.randomBytes)) {\r\n CRYPTO = v;\r\n } else {\r\n CRYPTO = !v;\r\n throw Error\r\n (bignumberError + 'crypto unavailable');\r\n }\r\n } else {\r\n CRYPTO = v;\r\n }\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' not true or false: ' + v);\r\n }\r\n }\r\n\r\n // MODULO_MODE {number} Integer, 0 to 9 inclusive.\r\n // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'MODULO_MODE')) {\r\n v = obj[p];\r\n intCheck(v, 0, 9, p);\r\n MODULO_MODE = v;\r\n }\r\n\r\n // POW_PRECISION {number} Integer, 0 to MAX inclusive.\r\n // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'POW_PRECISION')) {\r\n v = obj[p];\r\n intCheck(v, 0, MAX, p);\r\n POW_PRECISION = v;\r\n }\r\n\r\n // FORMAT {object}\r\n // '[BigNumber Error] FORMAT not an object: {v}'\r\n if (obj.hasOwnProperty(p = 'FORMAT')) {\r\n v = obj[p];\r\n if (typeof v == 'object') FORMAT = v;\r\n else throw Error\r\n (bignumberError + p + ' not an object: ' + v);\r\n }\r\n\r\n // ALPHABET {string}\r\n // '[BigNumber Error] ALPHABET invalid: {v}'\r\n if (obj.hasOwnProperty(p = 'ALPHABET')) {\r\n v = obj[p];\r\n\r\n // Disallow if less than two characters,\r\n // or if it contains '+', '-', '.', whitespace, or a repeated character.\r\n if (typeof v == 'string' && !/^.?$|[+\\-.\\s]|(.).*\\1/.test(v)) {\r\n alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789';\r\n ALPHABET = v;\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' invalid: ' + v);\r\n }\r\n }\r\n\r\n } else {\r\n\r\n // '[BigNumber Error] Object expected: {v}'\r\n throw Error\r\n (bignumberError + 'Object expected: ' + obj);\r\n }\r\n }\r\n\r\n return {\r\n DECIMAL_PLACES: DECIMAL_PLACES,\r\n ROUNDING_MODE: ROUNDING_MODE,\r\n EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS],\r\n RANGE: [MIN_EXP, MAX_EXP],\r\n CRYPTO: CRYPTO,\r\n MODULO_MODE: MODULO_MODE,\r\n POW_PRECISION: POW_PRECISION,\r\n FORMAT: FORMAT,\r\n ALPHABET: ALPHABET\r\n };\r\n };\r\n\r\n\r\n /*\r\n * Return true if v is a BigNumber instance, otherwise return false.\r\n *\r\n * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed.\r\n *\r\n * v {any}\r\n *\r\n * '[BigNumber Error] Invalid BigNumber: {v}'\r\n */\r\n BigNumber.isBigNumber = function (v) {\r\n if (!v || v._isBigNumber !== true) return false;\r\n if (!BigNumber.DEBUG) return true;\r\n\r\n var i, n,\r\n c = v.c,\r\n e = v.e,\r\n s = v.s;\r\n\r\n out: if ({}.toString.call(c) == '[object Array]') {\r\n\r\n if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) {\r\n\r\n // If the first element is zero, the BigNumber value must be zero.\r\n if (c[0] === 0) {\r\n if (e === 0 && c.length === 1) return true;\r\n break out;\r\n }\r\n\r\n // Calculate number of digits that c[0] should have, based on the exponent.\r\n i = (e + 1) % LOG_BASE;\r\n if (i < 1) i += LOG_BASE;\r\n\r\n // Calculate number of digits of c[0].\r\n //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) {\r\n if (String(c[0]).length == i) {\r\n\r\n for (i = 0; i < c.length; i++) {\r\n n = c[i];\r\n if (n < 0 || n >= BASE || n !== mathfloor(n)) break out;\r\n }\r\n\r\n // Last element cannot be zero, unless it is the only element.\r\n if (n !== 0) return true;\r\n }\r\n }\r\n\r\n // Infinity/NaN\r\n } else if (c === null && e === null && (s === null || s === 1 || s === -1)) {\r\n return true;\r\n }\r\n\r\n throw Error\r\n (bignumberError + 'Invalid BigNumber: ' + v);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the maximum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.maximum = BigNumber.max = function () {\r\n return maxOrMin(arguments, P.lt);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the minimum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.minimum = BigNumber.min = function () {\r\n return maxOrMin(arguments, P.gt);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber with a random value equal to or greater than 0 and less than 1,\r\n * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing\r\n * zeros are produced).\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}'\r\n * '[BigNumber Error] crypto unavailable'\r\n */\r\n BigNumber.random = (function () {\r\n var pow2_53 = 0x20000000000000;\r\n\r\n // Return a 53 bit integer n, where 0 <= n < 9007199254740992.\r\n // Check if Math.random() produces more than 32 bits of randomness.\r\n // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits.\r\n // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1.\r\n var random53bitInt = (Math.random() * pow2_53) & 0x1fffff\r\n ? function () { return mathfloor(Math.random() * pow2_53); }\r\n : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) +\r\n (Math.random() * 0x800000 | 0); };\r\n\r\n return function (dp) {\r\n var a, b, e, k, v,\r\n i = 0,\r\n c = [],\r\n rand = new BigNumber(ONE);\r\n\r\n if (dp == null) dp = DECIMAL_PLACES;\r\n else intCheck(dp, 0, MAX);\r\n\r\n k = mathceil(dp / LOG_BASE);\r\n\r\n if (CRYPTO) {\r\n\r\n // Browsers supporting crypto.getRandomValues.\r\n if (crypto.getRandomValues) {\r\n\r\n a = crypto.getRandomValues(new Uint32Array(k *= 2));\r\n\r\n for (; i < k;) {\r\n\r\n // 53 bits:\r\n // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2)\r\n // 11111 11111111 11111111 11111111 11100000 00000000 00000000\r\n // ((Math.pow(2, 32) - 1) >>> 11).toString(2)\r\n // 11111 11111111 11111111\r\n // 0x20000 is 2^21.\r\n v = a[i] * 0x20000 + (a[i + 1] >>> 11);\r\n\r\n // Rejection sampling:\r\n // 0 <= v < 9007199254740992\r\n // Probability that v >= 9e15, is\r\n // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251\r\n if (v >= 9e15) {\r\n b = crypto.getRandomValues(new Uint32Array(2));\r\n a[i] = b[0];\r\n a[i + 1] = b[1];\r\n } else {\r\n\r\n // 0 <= v <= 8999999999999999\r\n // 0 <= (v % 1e14) <= 99999999999999\r\n c.push(v % 1e14);\r\n i += 2;\r\n }\r\n }\r\n i = k / 2;\r\n\r\n // Node.js supporting crypto.randomBytes.\r\n } else if (crypto.randomBytes) {\r\n\r\n // buffer\r\n a = crypto.randomBytes(k *= 7);\r\n\r\n for (; i < k;) {\r\n\r\n // 0x1000000000000 is 2^48, 0x10000000000 is 2^40\r\n // 0x100000000 is 2^32, 0x1000000 is 2^24\r\n // 11111 11111111 11111111 11111111 11111111 11111111 11111111\r\n // 0 <= v < 9007199254740992\r\n v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) +\r\n (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) +\r\n (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6];\r\n\r\n if (v >= 9e15) {\r\n crypto.randomBytes(7).copy(a, i);\r\n } else {\r\n\r\n // 0 <= (v % 1e14) <= 99999999999999\r\n c.push(v % 1e14);\r\n i += 7;\r\n }\r\n }\r\n i = k / 7;\r\n } else {\r\n CRYPTO = false;\r\n throw Error\r\n (bignumberError + 'crypto unavailable');\r\n }\r\n }\r\n\r\n // Use Math.random.\r\n if (!CRYPTO) {\r\n\r\n for (; i < k;) {\r\n v = random53bitInt();\r\n if (v < 9e15) c[i++] = v % 1e14;\r\n }\r\n }\r\n\r\n k = c[--i];\r\n dp %= LOG_BASE;\r\n\r\n // Convert trailing digits to zeros according to dp.\r\n if (k && dp) {\r\n v = POWS_TEN[LOG_BASE - dp];\r\n c[i] = mathfloor(k / v) * v;\r\n }\r\n\r\n // Remove trailing elements which are zero.\r\n for (; c[i] === 0; c.pop(), i--);\r\n\r\n // Zero?\r\n if (i < 0) {\r\n c = [e = 0];\r\n } else {\r\n\r\n // Remove leading elements which are zero and adjust exponent accordingly.\r\n for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE);\r\n\r\n // Count the digits of the first element of c to determine leading zeros, and...\r\n for (i = 1, v = c[0]; v >= 10; v /= 10, i++);\r\n\r\n // adjust the exponent accordingly.\r\n if (i < LOG_BASE) e -= LOG_BASE - i;\r\n }\r\n\r\n rand.e = e;\r\n rand.c = c;\r\n return rand;\r\n };\r\n })();\r\n\r\n\r\n /*\r\n * Return a BigNumber whose value is the sum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.sum = function () {\r\n var i = 1,\r\n args = arguments,\r\n sum = new BigNumber(args[0]);\r\n for (; i < args.length;) sum = sum.plus(args[i++]);\r\n return sum;\r\n };\r\n\r\n\r\n // PRIVATE FUNCTIONS\r\n\r\n\r\n // Called by BigNumber and BigNumber.prototype.toString.\r\n convertBase = (function () {\r\n var decimal = '0123456789';\r\n\r\n /*\r\n * Convert string of baseIn to an array of numbers of baseOut.\r\n * Eg. toBaseOut('255', 10, 16) returns [15, 15].\r\n * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5].\r\n */\r\n function toBaseOut(str, baseIn, baseOut, alphabet) {\r\n var j,\r\n arr = [0],\r\n arrL,\r\n i = 0,\r\n len = str.length;\r\n\r\n for (; i < len;) {\r\n for (arrL = arr.length; arrL--; arr[arrL] *= baseIn);\r\n\r\n arr[0] += alphabet.indexOf(str.charAt(i++));\r\n\r\n for (j = 0; j < arr.length; j++) {\r\n\r\n if (arr[j] > baseOut - 1) {\r\n if (arr[j + 1] == null) arr[j + 1] = 0;\r\n arr[j + 1] += arr[j] / baseOut | 0;\r\n arr[j] %= baseOut;\r\n }\r\n }\r\n }\r\n\r\n return arr.reverse();\r\n }\r\n\r\n // Convert a numeric string of baseIn to a numeric string of baseOut.\r\n // If the caller is toString, we are converting from base 10 to baseOut.\r\n // If the caller is BigNumber, we are converting from baseIn to base 10.\r\n return function (str, baseIn, baseOut, sign, callerIsToString) {\r\n var alphabet, d, e, k, r, x, xc, y,\r\n i = str.indexOf('.'),\r\n dp = DECIMAL_PLACES,\r\n rm = ROUNDING_MODE;\r\n\r\n // Non-integer.\r\n if (i >= 0) {\r\n k = POW_PRECISION;\r\n\r\n // Unlimited precision.\r\n POW_PRECISION = 0;\r\n str = str.replace('.', '');\r\n y = new BigNumber(baseIn);\r\n x = y.pow(str.length - i);\r\n POW_PRECISION = k;\r\n\r\n // Convert str as if an integer, then restore the fraction part by dividing the\r\n // result by its base raised to a power.\r\n\r\n y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'),\r\n 10, baseOut, decimal);\r\n y.e = y.c.length;\r\n }\r\n\r\n // Convert the number as integer.\r\n\r\n xc = toBaseOut(str, baseIn, baseOut, callerIsToString\r\n ? (alphabet = ALPHABET, decimal)\r\n : (alphabet = decimal, ALPHABET));\r\n\r\n // xc now represents str as an integer and converted to baseOut. e is the exponent.\r\n e = k = xc.length;\r\n\r\n // Remove trailing zeros.\r\n for (; xc[--k] == 0; xc.pop());\r\n\r\n // Zero?\r\n if (!xc[0]) return alphabet.charAt(0);\r\n\r\n // Does str represent an integer? If so, no need for the division.\r\n if (i < 0) {\r\n --e;\r\n } else {\r\n x.c = xc;\r\n x.e = e;\r\n\r\n // The sign is needed for correct rounding.\r\n x.s = sign;\r\n x = div(x, y, dp, rm, baseOut);\r\n xc = x.c;\r\n r = x.r;\r\n e = x.e;\r\n }\r\n\r\n // xc now represents str converted to baseOut.\r\n\r\n // THe index of the rounding digit.\r\n d = e + dp + 1;\r\n\r\n // The rounding digit: the digit to the right of the digit that may be rounded up.\r\n i = xc[d];\r\n\r\n // Look at the rounding digits and mode to determine whether to round up.\r\n\r\n k = baseOut / 2;\r\n r = r || d < 0 || xc[d + 1] != null;\r\n\r\n r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2))\r\n : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 ||\r\n rm == (x.s < 0 ? 8 : 7));\r\n\r\n // If the index of the rounding digit is not greater than zero, or xc represents\r\n // zero, then the result of the base conversion is zero or, if rounding up, a value\r\n // such as 0.00001.\r\n if (d < 1 || !xc[0]) {\r\n\r\n // 1^-dp or 0\r\n str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0);\r\n } else {\r\n\r\n // Truncate xc to the required number of decimal places.\r\n xc.length = d;\r\n\r\n // Round up?\r\n if (r) {\r\n\r\n // Rounding up may mean the previous digit has to be rounded up and so on.\r\n for (--baseOut; ++xc[--d] > baseOut;) {\r\n xc[d] = 0;\r\n\r\n if (!d) {\r\n ++e;\r\n xc = [1].concat(xc);\r\n }\r\n }\r\n }\r\n\r\n // Determine trailing zeros.\r\n for (k = xc.length; !xc[--k];);\r\n\r\n // E.g. [4, 11, 15] becomes 4bf.\r\n for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++]));\r\n\r\n // Add leading zeros, decimal point and trailing zeros as required.\r\n str = toFixedPoint(str, e, alphabet.charAt(0));\r\n }\r\n\r\n // The caller will add the sign.\r\n return str;\r\n };\r\n })();\r\n\r\n\r\n // Perform division in the specified base. Called by div and convertBase.\r\n div = (function () {\r\n\r\n // Assume non-zero x and k.\r\n function multiply(x, k, base) {\r\n var m, temp, xlo, xhi,\r\n carry = 0,\r\n i = x.length,\r\n klo = k % SQRT_BASE,\r\n khi = k / SQRT_BASE | 0;\r\n\r\n for (x = x.slice(); i--;) {\r\n xlo = x[i] % SQRT_BASE;\r\n xhi = x[i] / SQRT_BASE | 0;\r\n m = khi * xlo + xhi * klo;\r\n temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry;\r\n carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi;\r\n x[i] = temp % base;\r\n }\r\n\r\n if (carry) x = [carry].concat(x);\r\n\r\n return x;\r\n }\r\n\r\n function compare(a, b, aL, bL) {\r\n var i, cmp;\r\n\r\n if (aL != bL) {\r\n cmp = aL > bL ? 1 : -1;\r\n } else {\r\n\r\n for (i = cmp = 0; i < aL; i++) {\r\n\r\n if (a[i] != b[i]) {\r\n cmp = a[i] > b[i] ? 1 : -1;\r\n break;\r\n }\r\n }\r\n }\r\n\r\n return cmp;\r\n }\r\n\r\n function subtract(a, b, aL, base) {\r\n var i = 0;\r\n\r\n // Subtract b from a.\r\n for (; aL--;) {\r\n a[aL] -= i;\r\n i = a[aL] < b[aL] ? 1 : 0;\r\n a[aL] = i * base + a[aL] - b[aL];\r\n }\r\n\r\n // Remove leading zeros.\r\n for (; !a[0] && a.length > 1; a.splice(0, 1));\r\n }\r\n\r\n // x: dividend, y: divisor.\r\n return function (x, y, dp, rm, base) {\r\n var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0,\r\n yL, yz,\r\n s = x.s == y.s ? 1 : -1,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n // Either NaN, Infinity or 0?\r\n if (!xc || !xc[0] || !yc || !yc[0]) {\r\n\r\n return new BigNumber(\r\n\r\n // Return NaN if either NaN, or both Infinity or 0.\r\n !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN :\r\n\r\n // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0.\r\n xc && xc[0] == 0 || !yc ? s * 0 : s / 0\r\n );\r\n }\r\n\r\n q = new BigNumber(s);\r\n qc = q.c = [];\r\n e = x.e - y.e;\r\n s = dp + e + 1;\r\n\r\n if (!base) {\r\n base = BASE;\r\n e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE);\r\n s = s / LOG_BASE | 0;\r\n }\r\n\r\n // Result exponent may be one less then the current value of e.\r\n // The coefficients of the BigNumbers from convertBase may have trailing zeros.\r\n for (i = 0; yc[i] == (xc[i] || 0); i++);\r\n\r\n if (yc[i] > (xc[i] || 0)) e--;\r\n\r\n if (s < 0) {\r\n qc.push(1);\r\n more = true;\r\n } else {\r\n xL = xc.length;\r\n yL = yc.length;\r\n i = 0;\r\n s += 2;\r\n\r\n // Normalise xc and yc so highest order digit of yc is >= base / 2.\r\n\r\n n = mathfloor(base / (yc[0] + 1));\r\n\r\n // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1.\r\n // if (n > 1 || n++ == 1 && yc[0] < base / 2) {\r\n if (n > 1) {\r\n yc = multiply(yc, n, base);\r\n xc = multiply(xc, n, base);\r\n yL = yc.length;\r\n xL = xc.length;\r\n }\r\n\r\n xi = yL;\r\n rem = xc.slice(0, yL);\r\n remL = rem.length;\r\n\r\n // Add zeros to make remainder as long as divisor.\r\n for (; remL < yL; rem[remL++] = 0);\r\n yz = yc.slice();\r\n yz = [0].concat(yz);\r\n yc0 = yc[0];\r\n if (yc[1] >= base / 2) yc0++;\r\n // Not necessary, but to prevent trial digit n > base, when using base 3.\r\n // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15;\r\n\r\n do {\r\n n = 0;\r\n\r\n // Compare divisor and remainder.\r\n cmp = compare(yc, rem, yL, remL);\r\n\r\n // If divisor < remainder.\r\n if (cmp < 0) {\r\n\r\n // Calculate trial digit, n.\r\n\r\n rem0 = rem[0];\r\n if (yL != remL) rem0 = rem0 * base + (rem[1] || 0);\r\n\r\n // n is how many times the divisor goes into the current remainder.\r\n n = mathfloor(rem0 / yc0);\r\n\r\n // Algorithm:\r\n // product = divisor multiplied by trial digit (n).\r\n // Compare product and remainder.\r\n // If product is greater than remainder:\r\n // Subtract divisor from product, decrement trial digit.\r\n // Subtract product from remainder.\r\n // If product was less than remainder at the last compare:\r\n // Compare new remainder and divisor.\r\n // If remainder is greater than divisor:\r\n // Subtract divisor from remainder, increment trial digit.\r\n\r\n if (n > 1) {\r\n\r\n // n may be > base only when base is 3.\r\n if (n >= base) n = base - 1;\r\n\r\n // product = divisor * trial digit.\r\n prod = multiply(yc, n, base);\r\n prodL = prod.length;\r\n remL = rem.length;\r\n\r\n // Compare product and remainder.\r\n // If product > remainder then trial digit n too high.\r\n // n is 1 too high about 5% of the time, and is not known to have\r\n // ever been more than 1 too high.\r\n while (compare(prod, rem, prodL, remL) == 1) {\r\n n--;\r\n\r\n // Subtract divisor from product.\r\n subtract(prod, yL < prodL ? yz : yc, prodL, base);\r\n prodL = prod.length;\r\n cmp = 1;\r\n }\r\n } else {\r\n\r\n // n is 0 or 1, cmp is -1.\r\n // If n is 0, there is no need to compare yc and rem again below,\r\n // so change cmp to 1 to avoid it.\r\n // If n is 1, leave cmp as -1, so yc and rem are compared again.\r\n if (n == 0) {\r\n\r\n // divisor < remainder, so n must be at least 1.\r\n cmp = n = 1;\r\n }\r\n\r\n // product = divisor\r\n prod = yc.slice();\r\n prodL = prod.length;\r\n }\r\n\r\n if (prodL < remL) prod = [0].concat(prod);\r\n\r\n // Subtract product from remainder.\r\n subtract(rem, prod, remL, base);\r\n remL = rem.length;\r\n\r\n // If product was < remainder.\r\n if (cmp == -1) {\r\n\r\n // Compare divisor and new remainder.\r\n // If divisor < new remainder, subtract divisor from remainder.\r\n // Trial digit n too low.\r\n // n is 1 too low about 5% of the time, and very rarely 2 too low.\r\n while (compare(yc, rem, yL, remL) < 1) {\r\n n++;\r\n\r\n // Subtract divisor from remainder.\r\n subtract(rem, yL < remL ? yz : yc, remL, base);\r\n remL = rem.length;\r\n }\r\n }\r\n } else if (cmp === 0) {\r\n n++;\r\n rem = [0];\r\n } // else cmp === 1 and n will be 0\r\n\r\n // Add the next digit, n, to the result array.\r\n qc[i++] = n;\r\n\r\n // Update the remainder.\r\n if (rem[0]) {\r\n rem[remL++] = xc[xi] || 0;\r\n } else {\r\n rem = [xc[xi]];\r\n remL = 1;\r\n }\r\n } while ((xi++ < xL || rem[0] != null) && s--);\r\n\r\n more = rem[0] != null;\r\n\r\n // Leading zero?\r\n if (!qc[0]) qc.splice(0, 1);\r\n }\r\n\r\n if (base == BASE) {\r\n\r\n // To calculate q.e, first get the number of digits of qc[0].\r\n for (i = 1, s = qc[0]; s >= 10; s /= 10, i++);\r\n\r\n round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more);\r\n\r\n // Caller is convertBase.\r\n } else {\r\n q.e = e;\r\n q.r = +more;\r\n }\r\n\r\n return q;\r\n };\r\n })();\r\n\r\n\r\n /*\r\n * Return a string representing the value of BigNumber n in fixed-point or exponential\r\n * notation rounded to the specified decimal places or significant digits.\r\n *\r\n * n: a BigNumber.\r\n * i: the index of the last digit required (i.e. the digit that may be rounded up).\r\n * rm: the rounding mode.\r\n * id: 1 (toExponential) or 2 (toPrecision).\r\n */\r\n function format(n, i, rm, id) {\r\n var c0, e, ne, len, str;\r\n\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n if (!n.c) return n.toString();\r\n\r\n c0 = n.c[0];\r\n ne = n.e;\r\n\r\n if (i == null) {\r\n str = coeffToString(n.c);\r\n str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS)\r\n ? toExponential(str, ne)\r\n : toFixedPoint(str, ne, '0');\r\n } else {\r\n n = round(new BigNumber(n), i, rm);\r\n\r\n // n.e may have changed if the value was rounded up.\r\n e = n.e;\r\n\r\n str = coeffToString(n.c);\r\n len = str.length;\r\n\r\n // toPrecision returns exponential notation if the number of significant digits\r\n // specified is less than the number of digits necessary to represent the integer\r\n // part of the value in fixed-point notation.\r\n\r\n // Exponential notation.\r\n if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) {\r\n\r\n // Append zeros?\r\n for (; len < i; str += '0', len++);\r\n str = toExponential(str, e);\r\n\r\n // Fixed-point notation.\r\n } else {\r\n i -= ne;\r\n str = toFixedPoint(str, e, '0');\r\n\r\n // Append zeros?\r\n if (e + 1 > len) {\r\n if (--i > 0) for (str += '.'; i--; str += '0');\r\n } else {\r\n i += e - len;\r\n if (i > 0) {\r\n if (e + 1 == len) str += '.';\r\n for (; i--; str += '0');\r\n }\r\n }\r\n }\r\n }\r\n\r\n return n.s < 0 && c0 ? '-' + str : str;\r\n }\r\n\r\n\r\n // Handle BigNumber.max and BigNumber.min.\r\n function maxOrMin(args, method) {\r\n var n,\r\n i = 1,\r\n m = new BigNumber(args[0]);\r\n\r\n for (; i < args.length; i++) {\r\n n = new BigNumber(args[i]);\r\n\r\n // If any number is NaN, return NaN.\r\n if (!n.s) {\r\n m = n;\r\n break;\r\n } else if (method.call(m, n)) {\r\n m = n;\r\n }\r\n }\r\n\r\n return m;\r\n }\r\n\r\n\r\n /*\r\n * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP.\r\n * Called by minus, plus and times.\r\n */\r\n function normalise(n, c, e) {\r\n var i = 1,\r\n j = c.length;\r\n\r\n // Remove trailing zeros.\r\n for (; !c[--j]; c.pop());\r\n\r\n // Calculate the base 10 exponent. First get the number of digits of c[0].\r\n for (j = c[0]; j >= 10; j /= 10, i++);\r\n\r\n // Overflow?\r\n if ((e = i + e * LOG_BASE - 1) > MAX_EXP) {\r\n\r\n // Infinity.\r\n n.c = n.e = null;\r\n\r\n // Underflow?\r\n } else if (e < MIN_EXP) {\r\n\r\n // Zero.\r\n n.c = [n.e = 0];\r\n } else {\r\n n.e = e;\r\n n.c = c;\r\n }\r\n\r\n return n;\r\n }\r\n\r\n\r\n // Handle values that fail the validity test in BigNumber.\r\n parseNumeric = (function () {\r\n var basePrefix = /^(-?)0([xbo])(?=\\w[\\w.]*$)/i,\r\n dotAfter = /^([^.]+)\\.$/,\r\n dotBefore = /^\\.([^.]+)$/,\r\n isInfinityOrNaN = /^-?(Infinity|NaN)$/,\r\n whitespaceOrPlus = /^\\s*\\+(?=[\\w.])|^\\s+|\\s+$/g;\r\n\r\n return function (x, str, isNum, b) {\r\n var base,\r\n s = isNum ? str : str.replace(whitespaceOrPlus, '');\r\n\r\n // No exception on ±Infinity or NaN.\r\n if (isInfinityOrNaN.test(s)) {\r\n x.s = isNaN(s) ? null : s < 0 ? -1 : 1;\r\n } else {\r\n if (!isNum) {\r\n\r\n // basePrefix = /^(-?)0([xbo])(?=\\w[\\w.]*$)/i\r\n s = s.replace(basePrefix, function (m, p1, p2) {\r\n base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8;\r\n return !b || b == base ? p1 : m;\r\n });\r\n\r\n if (b) {\r\n base = b;\r\n\r\n // E.g. '1.' to '1', '.1' to '0.1'\r\n s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1');\r\n }\r\n\r\n if (str != s) return new BigNumber(s, base);\r\n }\r\n\r\n // '[BigNumber Error] Not a number: {n}'\r\n // '[BigNumber Error] Not a base {b} number: {n}'\r\n if (BigNumber.DEBUG) {\r\n throw Error\r\n (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str);\r\n }\r\n\r\n // NaN\r\n x.s = null;\r\n }\r\n\r\n x.c = x.e = null;\r\n }\r\n })();\r\n\r\n\r\n /*\r\n * Round x to sd significant digits using rounding mode rm. Check for over/under-flow.\r\n * If r is truthy, it is known that there are more digits after the rounding digit.\r\n */\r\n function round(x, sd, rm, r) {\r\n var d, i, j, k, n, ni, rd,\r\n xc = x.c,\r\n pows10 = POWS_TEN;\r\n\r\n // if x is not Infinity or NaN...\r\n if (xc) {\r\n\r\n // rd is the rounding digit, i.e. the digit after the digit that may be rounded up.\r\n // n is a base 1e14 number, the value of the element of array x.c containing rd.\r\n // ni is the index of n within x.c.\r\n // d is the number of digits of n.\r\n // i is the index of rd within n including leading zeros.\r\n // j is the actual index of rd within n (if < 0, rd is a leading zero).\r\n out: {\r\n\r\n // Get the number of digits of the first element of xc.\r\n for (d = 1, k = xc[0]; k >= 10; k /= 10, d++);\r\n i = sd - d;\r\n\r\n // If the rounding digit is in the first element of xc...\r\n if (i < 0) {\r\n i += LOG_BASE;\r\n j = sd;\r\n n = xc[ni = 0];\r\n\r\n // Get the rounding digit at index j of n.\r\n rd = n / pows10[d - j - 1] % 10 | 0;\r\n } else {\r\n ni = mathceil((i + 1) / LOG_BASE);\r\n\r\n if (ni >= xc.length) {\r\n\r\n if (r) {\r\n\r\n // Needed by sqrt.\r\n for (; xc.length <= ni; xc.push(0));\r\n n = rd = 0;\r\n d = 1;\r\n i %= LOG_BASE;\r\n j = i - LOG_BASE + 1;\r\n } else {\r\n break out;\r\n }\r\n } else {\r\n n = k = xc[ni];\r\n\r\n // Get the number of digits of n.\r\n for (d = 1; k >= 10; k /= 10, d++);\r\n\r\n // Get the index of rd within n.\r\n i %= LOG_BASE;\r\n\r\n // Get the index of rd within n, adjusted for leading zeros.\r\n // The number of leading zeros of n is given by LOG_BASE - d.\r\n j = i - LOG_BASE + d;\r\n\r\n // Get the rounding digit at index j of n.\r\n rd = j < 0 ? 0 : n / pows10[d - j - 1] % 10 | 0;\r\n }\r\n }\r\n\r\n r = r || sd < 0 ||\r\n\r\n // Are there any non-zero digits after the rounding digit?\r\n // The expression n % pows10[d - j - 1] returns all digits of n to the right\r\n // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714.\r\n xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]);\r\n\r\n r = rm < 4\r\n ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2))\r\n : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 &&\r\n\r\n // Check whether the digit to the left of the rounding digit is odd.\r\n ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 ||\r\n rm == (x.s < 0 ? 8 : 7));\r\n\r\n if (sd < 1 || !xc[0]) {\r\n xc.length = 0;\r\n\r\n if (r) {\r\n\r\n // Convert sd to decimal places.\r\n sd -= x.e + 1;\r\n\r\n // 1, 0.1, 0.01, 0.001, 0.0001 etc.\r\n xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE];\r\n x.e = -sd || 0;\r\n } else {\r\n\r\n // Zero.\r\n xc[0] = x.e = 0;\r\n }\r\n\r\n return x;\r\n }\r\n\r\n // Remove excess digits.\r\n if (i == 0) {\r\n xc.length = ni;\r\n k = 1;\r\n ni--;\r\n } else {\r\n xc.length = ni + 1;\r\n k = pows10[LOG_BASE - i];\r\n\r\n // E.g. 56700 becomes 56000 if 7 is the rounding digit.\r\n // j > 0 means i > number of leading zeros of n.\r\n xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0;\r\n }\r\n\r\n // Round up?\r\n if (r) {\r\n\r\n for (; ;) {\r\n\r\n // If the digit to be rounded up is in the first element of xc...\r\n if (ni == 0) {\r\n\r\n // i will be the length of xc[0] before k is added.\r\n for (i = 1, j = xc[0]; j >= 10; j /= 10, i++);\r\n j = xc[0] += k;\r\n for (k = 1; j >= 10; j /= 10, k++);\r\n\r\n // if i != k the length has increased.\r\n if (i != k) {\r\n x.e++;\r\n if (xc[0] == BASE) xc[0] = 1;\r\n }\r\n\r\n break;\r\n } else {\r\n xc[ni] += k;\r\n if (xc[ni] != BASE) break;\r\n xc[ni--] = 0;\r\n k = 1;\r\n }\r\n }\r\n }\r\n\r\n // Remove trailing zeros.\r\n for (i = xc.length; xc[--i] === 0; xc.pop());\r\n }\r\n\r\n // Overflow? Infinity.\r\n if (x.e > MAX_EXP) {\r\n x.c = x.e = null;\r\n\r\n // Underflow? Zero.\r\n } else if (x.e < MIN_EXP) {\r\n x.c = [x.e = 0];\r\n }\r\n }\r\n\r\n return x;\r\n }\r\n\r\n\r\n function valueOf(n) {\r\n var str,\r\n e = n.e;\r\n\r\n if (e === null) return n.toString();\r\n\r\n str = coeffToString(n.c);\r\n\r\n str = e <= TO_EXP_NEG || e >= TO_EXP_POS\r\n ? toExponential(str, e)\r\n : toFixedPoint(str, e, '0');\r\n\r\n return n.s < 0 ? '-' + str : str;\r\n }\r\n\r\n\r\n // PROTOTYPE/INSTANCE METHODS\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the absolute value of this BigNumber.\r\n */\r\n P.absoluteValue = P.abs = function () {\r\n var x = new BigNumber(this);\r\n if (x.s < 0) x.s = 1;\r\n return x;\r\n };\r\n\r\n\r\n /*\r\n * Return\r\n * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b),\r\n * -1 if the value of this BigNumber is less than the value of BigNumber(y, b),\r\n * 0 if they have the same value,\r\n * or null if the value of either is NaN.\r\n */\r\n P.comparedTo = function (y, b) {\r\n return compare(this, new BigNumber(y, b));\r\n };\r\n\r\n\r\n /*\r\n * If dp is undefined or null or true or false, return the number of decimal places of the\r\n * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN.\r\n *\r\n * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this\r\n * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or\r\n * ROUNDING_MODE if rm is omitted.\r\n *\r\n * [dp] {number} Decimal places: integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.decimalPlaces = P.dp = function (dp, rm) {\r\n var c, n, v,\r\n x = this;\r\n\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n return round(new BigNumber(x), dp + x.e + 1, rm);\r\n }\r\n\r\n if (!(c = x.c)) return null;\r\n n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE;\r\n\r\n // Subtract the number of trailing zeros of the last number.\r\n if (v = c[v]) for (; v % 10 == 0; v /= 10, n--);\r\n if (n < 0) n = 0;\r\n\r\n return n;\r\n };\r\n\r\n\r\n /*\r\n * n / 0 = I\r\n * n / N = N\r\n * n / I = 0\r\n * 0 / n = 0\r\n * 0 / 0 = N\r\n * 0 / N = N\r\n * 0 / I = 0\r\n * N / n = N\r\n * N / 0 = N\r\n * N / N = N\r\n * N / I = N\r\n * I / n = I\r\n * I / 0 = I\r\n * I / N = N\r\n * I / I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber divided by the value of\r\n * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE.\r\n */\r\n P.dividedBy = P.div = function (y, b) {\r\n return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the integer part of dividing the value of this\r\n * BigNumber by the value of BigNumber(y, b).\r\n */\r\n P.dividedToIntegerBy = P.idiv = function (y, b) {\r\n return div(this, new BigNumber(y, b), 0, 1);\r\n };\r\n\r\n\r\n /*\r\n * Return a BigNumber whose value is the value of this BigNumber exponentiated by n.\r\n *\r\n * If m is present, return the result modulo m.\r\n * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE.\r\n * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE.\r\n *\r\n * The modular power operation works efficiently when x, n, and m are integers, otherwise it\r\n * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0.\r\n *\r\n * n {number|string|BigNumber} The exponent. An integer.\r\n * [m] {number|string|BigNumber} The modulus.\r\n *\r\n * '[BigNumber Error] Exponent not an integer: {n}'\r\n */\r\n P.exponentiatedBy = P.pow = function (n, m) {\r\n var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y,\r\n x = this;\r\n\r\n n = new BigNumber(n);\r\n\r\n // Allow NaN and ±Infinity, but not other non-integers.\r\n if (n.c && !n.isInteger()) {\r\n throw Error\r\n (bignumberError + 'Exponent not an integer: ' + valueOf(n));\r\n }\r\n\r\n if (m != null) m = new BigNumber(m);\r\n\r\n // Exponent of MAX_SAFE_INTEGER is 15.\r\n nIsBig = n.e > 14;\r\n\r\n // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0.\r\n if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) {\r\n\r\n // The sign of the result of pow when x is negative depends on the evenness of n.\r\n // If +n overflows to ±Infinity, the evenness of n would be not be known.\r\n y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n)));\r\n return m ? y.mod(m) : y;\r\n }\r\n\r\n nIsNeg = n.s < 0;\r\n\r\n if (m) {\r\n\r\n // x % m returns NaN if abs(m) is zero, or m is NaN.\r\n if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN);\r\n\r\n isModExp = !nIsNeg && x.isInteger() && m.isInteger();\r\n\r\n if (isModExp) x = x.mod(m);\r\n\r\n // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15.\r\n // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15.\r\n } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0\r\n // [1, 240000000]\r\n ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7\r\n // [80000000000000] [99999750000000]\r\n : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) {\r\n\r\n // If x is negative and n is odd, k = -0, else k = 0.\r\n k = x.s < 0 && isOdd(n) ? -0 : 0;\r\n\r\n // If x >= 1, k = ±Infinity.\r\n if (x.e > -1) k = 1 / k;\r\n\r\n // If n is negative return ±0, else return ±Infinity.\r\n return new BigNumber(nIsNeg ? 1 / k : k);\r\n\r\n } else if (POW_PRECISION) {\r\n\r\n // Truncating each coefficient array to a length of k after each multiplication\r\n // equates to truncating significant digits to POW_PRECISION + [28, 41],\r\n // i.e. there will be a minimum of 28 guard digits retained.\r\n k = mathceil(POW_PRECISION / LOG_BASE + 2);\r\n }\r\n\r\n if (nIsBig) {\r\n half = new BigNumber(0.5);\r\n if (nIsNeg) n.s = 1;\r\n nIsOdd = isOdd(n);\r\n } else {\r\n i = Math.abs(+valueOf(n));\r\n nIsOdd = i % 2;\r\n }\r\n\r\n y = new BigNumber(ONE);\r\n\r\n // Performs 54 loop iterations for n of 9007199254740991.\r\n for (; ;) {\r\n\r\n if (nIsOdd) {\r\n y = y.times(x);\r\n if (!y.c) break;\r\n\r\n if (k) {\r\n if (y.c.length > k) y.c.length = k;\r\n } else if (isModExp) {\r\n y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m));\r\n }\r\n }\r\n\r\n if (i) {\r\n i = mathfloor(i / 2);\r\n if (i === 0) break;\r\n nIsOdd = i % 2;\r\n } else {\r\n n = n.times(half);\r\n round(n, n.e + 1, 1);\r\n\r\n if (n.e > 14) {\r\n nIsOdd = isOdd(n);\r\n } else {\r\n i = +valueOf(n);\r\n if (i === 0) break;\r\n nIsOdd = i % 2;\r\n }\r\n }\r\n\r\n x = x.times(x);\r\n\r\n if (k) {\r\n if (x.c && x.c.length > k) x.c.length = k;\r\n } else if (isModExp) {\r\n x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m));\r\n }\r\n }\r\n\r\n if (isModExp) return y;\r\n if (nIsNeg) y = ONE.div(y);\r\n\r\n return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y;\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer\r\n * using rounding mode rm, or ROUNDING_MODE if rm is omitted.\r\n *\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}'\r\n */\r\n P.integerValue = function (rm) {\r\n var n = new BigNumber(this);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n return round(n, n.e + 1, rm);\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isEqualTo = P.eq = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) === 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is a finite number, otherwise return false.\r\n */\r\n P.isFinite = function () {\r\n return !!this.c;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isGreaterThan = P.gt = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) > 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is greater than or equal to the value of\r\n * BigNumber(y, b), otherwise return false.\r\n */\r\n P.isGreaterThanOrEqualTo = P.gte = function (y, b) {\r\n return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0;\r\n\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is an integer, otherwise return false.\r\n */\r\n P.isInteger = function () {\r\n return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is less than the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isLessThan = P.lt = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) < 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is less than or equal to the value of\r\n * BigNumber(y, b), otherwise return false.\r\n */\r\n P.isLessThanOrEqualTo = P.lte = function (y, b) {\r\n return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is NaN, otherwise return false.\r\n */\r\n P.isNaN = function () {\r\n return !this.s;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is negative, otherwise return false.\r\n */\r\n P.isNegative = function () {\r\n return this.s < 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is positive, otherwise return false.\r\n */\r\n P.isPositive = function () {\r\n return this.s > 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is 0 or -0, otherwise return false.\r\n */\r\n P.isZero = function () {\r\n return !!this.c && this.c[0] == 0;\r\n };\r\n\r\n\r\n /*\r\n * n - 0 = n\r\n * n - N = N\r\n * n - I = -I\r\n * 0 - n = -n\r\n * 0 - 0 = 0\r\n * 0 - N = N\r\n * 0 - I = -I\r\n * N - n = N\r\n * N - 0 = N\r\n * N - N = N\r\n * N - I = N\r\n * I - n = I\r\n * I - 0 = I\r\n * I - N = N\r\n * I - I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber minus the value of\r\n * BigNumber(y, b).\r\n */\r\n P.minus = function (y, b) {\r\n var i, j, t, xLTy,\r\n x = this,\r\n a = x.s;\r\n\r\n y = new BigNumber(y, b);\r\n b = y.s;\r\n\r\n // Either NaN?\r\n if (!a || !b) return new BigNumber(NaN);\r\n\r\n // Signs differ?\r\n if (a != b) {\r\n y.s = -b;\r\n return x.plus(y);\r\n }\r\n\r\n var xe = x.e / LOG_BASE,\r\n ye = y.e / LOG_BASE,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n if (!xe || !ye) {\r\n\r\n // Either Infinity?\r\n if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN);\r\n\r\n // Either zero?\r\n if (!xc[0] || !yc[0]) {\r\n\r\n // Return y if y is non-zero, x if x is non-zero, or zero if both are zero.\r\n return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x :\r\n\r\n // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity\r\n ROUNDING_MODE == 3 ? -0 : 0);\r\n }\r\n }\r\n\r\n xe = bitFloor(xe);\r\n ye = bitFloor(ye);\r\n xc = xc.slice();\r\n\r\n // Determine which is the bigger number.\r\n if (a = xe - ye) {\r\n\r\n if (xLTy = a < 0) {\r\n a = -a;\r\n t = xc;\r\n } else {\r\n ye = xe;\r\n t = yc;\r\n }\r\n\r\n t.reverse();\r\n\r\n // Prepend zeros to equalise exponents.\r\n for (b = a; b--; t.push(0));\r\n t.reverse();\r\n } else {\r\n\r\n // Exponents equal. Check digit by digit.\r\n j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b;\r\n\r\n for (a = b = 0; b < j; b++) {\r\n\r\n if (xc[b] != yc[b]) {\r\n xLTy = xc[b] < yc[b];\r\n break;\r\n }\r\n }\r\n }\r\n\r\n // x < y? Point xc to the array of the bigger number.\r\n if (xLTy) {\r\n t = xc;\r\n xc = yc;\r\n yc = t;\r\n y.s = -y.s;\r\n }\r\n\r\n b = (j = yc.length) - (i = xc.length);\r\n\r\n // Append zeros to xc if shorter.\r\n // No need to add zeros to yc if shorter as subtract only needs to start at yc.length.\r\n if (b > 0) for (; b--; xc[i++] = 0);\r\n b = BASE - 1;\r\n\r\n // Subtract yc from xc.\r\n for (; j > a;) {\r\n\r\n if (xc[--j] < yc[j]) {\r\n for (i = j; i && !xc[--i]; xc[i] = b);\r\n --xc[i];\r\n xc[j] += BASE;\r\n }\r\n\r\n xc[j] -= yc[j];\r\n }\r\n\r\n // Remove leading zeros and adjust exponent accordingly.\r\n for (; xc[0] == 0; xc.splice(0, 1), --ye);\r\n\r\n // Zero?\r\n if (!xc[0]) {\r\n\r\n // Following IEEE 754 (2008) 6.3,\r\n // n - n = +0 but n - n = -0 when rounding towards -Infinity.\r\n y.s = ROUNDING_MODE == 3 ? -1 : 1;\r\n y.c = [y.e = 0];\r\n return y;\r\n }\r\n\r\n // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity\r\n // for finite x and y.\r\n return normalise(y, xc, ye);\r\n };\r\n\r\n\r\n /*\r\n * n % 0 = N\r\n * n % N = N\r\n * n % I = n\r\n * 0 % n = 0\r\n * -0 % n = -0\r\n * 0 % 0 = N\r\n * 0 % N = N\r\n * 0 % I = 0\r\n * N % n = N\r\n * N % 0 = N\r\n * N % N = N\r\n * N % I = N\r\n * I % n = N\r\n * I % 0 = N\r\n * I % N = N\r\n * I % I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber modulo the value of\r\n * BigNumber(y, b). The result depends on the value of MODULO_MODE.\r\n */\r\n P.modulo = P.mod = function (y, b) {\r\n var q, s,\r\n x = this;\r\n\r\n y = new BigNumber(y, b);\r\n\r\n // Return NaN if x is Infinity or NaN, or y is NaN or zero.\r\n if (!x.c || !y.s || y.c && !y.c[0]) {\r\n return new BigNumber(NaN);\r\n\r\n // Return x if y is Infinity or x is zero.\r\n } else if (!y.c || x.c && !x.c[0]) {\r\n return new BigNumber(x);\r\n }\r\n\r\n if (MODULO_MODE == 9) {\r\n\r\n // Euclidian division: q = sign(y) * floor(x / abs(y))\r\n // r = x - qy where 0 <= r < abs(y)\r\n s = y.s;\r\n y.s = 1;\r\n q = div(x, y, 0, 3);\r\n y.s = s;\r\n q.s *= s;\r\n } else {\r\n q = div(x, y, 0, MODULO_MODE);\r\n }\r\n\r\n y = x.minus(q.times(y));\r\n\r\n // To match JavaScript %, ensure sign of zero is sign of dividend.\r\n if (!y.c[0] && MODULO_MODE == 1) y.s = x.s;\r\n\r\n return y;\r\n };\r\n\r\n\r\n /*\r\n * n * 0 = 0\r\n * n * N = N\r\n * n * I = I\r\n * 0 * n = 0\r\n * 0 * 0 = 0\r\n * 0 * N = N\r\n * 0 * I = N\r\n * N * n = N\r\n * N * 0 = N\r\n * N * N = N\r\n * N * I = N\r\n * I * n = I\r\n * I * 0 = N\r\n * I * N = N\r\n * I * I = I\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value\r\n * of BigNumber(y, b).\r\n */\r\n P.multipliedBy = P.times = function (y, b) {\r\n var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc,\r\n base, sqrtBase,\r\n x = this,\r\n xc = x.c,\r\n yc = (y = new BigNumber(y, b)).c;\r\n\r\n // Either NaN, ±Infinity or ±0?\r\n if (!xc || !yc || !xc[0] || !yc[0]) {\r\n\r\n // Return NaN if either is NaN, or one is 0 and the other is Infinity.\r\n if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) {\r\n y.c = y.e = y.s = null;\r\n } else {\r\n y.s *= x.s;\r\n\r\n // Return ±Infinity if either is ±Infinity.\r\n if (!xc || !yc) {\r\n y.c = y.e = null;\r\n\r\n // Return ±0 if either is ±0.\r\n } else {\r\n y.c = [0];\r\n y.e = 0;\r\n }\r\n }\r\n\r\n return y;\r\n }\r\n\r\n e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE);\r\n y.s *= x.s;\r\n xcL = xc.length;\r\n ycL = yc.length;\r\n\r\n // Ensure xc points to longer array and xcL to its length.\r\n if (xcL < ycL) {\r\n zc = xc;\r\n xc = yc;\r\n yc = zc;\r\n i = xcL;\r\n xcL = ycL;\r\n ycL = i;\r\n }\r\n\r\n // Initialise the result array with zeros.\r\n for (i = xcL + ycL, zc = []; i--; zc.push(0));\r\n\r\n base = BASE;\r\n sqrtBase = SQRT_BASE;\r\n\r\n for (i = ycL; --i >= 0;) {\r\n c = 0;\r\n ylo = yc[i] % sqrtBase;\r\n yhi = yc[i] / sqrtBase | 0;\r\n\r\n for (k = xcL, j = i + k; j > i;) {\r\n xlo = xc[--k] % sqrtBase;\r\n xhi = xc[k] / sqrtBase | 0;\r\n m = yhi * xlo + xhi * ylo;\r\n xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c;\r\n c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi;\r\n zc[j--] = xlo % base;\r\n }\r\n\r\n zc[j] = c;\r\n }\r\n\r\n if (c) {\r\n ++e;\r\n } else {\r\n zc.splice(0, 1);\r\n }\r\n\r\n return normalise(y, zc, e);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber negated,\r\n * i.e. multiplied by -1.\r\n */\r\n P.negated = function () {\r\n var x = new BigNumber(this);\r\n x.s = -x.s || null;\r\n return x;\r\n };\r\n\r\n\r\n /*\r\n * n + 0 = n\r\n * n + N = N\r\n * n + I = I\r\n * 0 + n = n\r\n * 0 + 0 = 0\r\n * 0 + N = N\r\n * 0 + I = I\r\n * N + n = N\r\n * N + 0 = N\r\n * N + N = N\r\n * N + I = N\r\n * I + n = I\r\n * I + 0 = I\r\n * I + N = N\r\n * I + I = I\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber plus the value of\r\n * BigNumber(y, b).\r\n */\r\n P.plus = function (y, b) {\r\n var t,\r\n x = this,\r\n a = x.s;\r\n\r\n y = new BigNumber(y, b);\r\n b = y.s;\r\n\r\n // Either NaN?\r\n if (!a || !b) return new BigNumber(NaN);\r\n\r\n // Signs differ?\r\n if (a != b) {\r\n y.s = -b;\r\n return x.minus(y);\r\n }\r\n\r\n var xe = x.e / LOG_BASE,\r\n ye = y.e / LOG_BASE,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n if (!xe || !ye) {\r\n\r\n // Return ±Infinity if either ±Infinity.\r\n if (!xc || !yc) return new BigNumber(a / 0);\r\n\r\n // Either zero?\r\n // Return y if y is non-zero, x if x is non-zero, or zero if both are zero.\r\n if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0);\r\n }\r\n\r\n xe = bitFloor(xe);\r\n ye = bitFloor(ye);\r\n xc = xc.slice();\r\n\r\n // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts.\r\n if (a = xe - ye) {\r\n if (a > 0) {\r\n ye = xe;\r\n t = yc;\r\n } else {\r\n a = -a;\r\n t = xc;\r\n }\r\n\r\n t.reverse();\r\n for (; a--; t.push(0));\r\n t.reverse();\r\n }\r\n\r\n a = xc.length;\r\n b = yc.length;\r\n\r\n // Point xc to the longer array, and b to the shorter length.\r\n if (a - b < 0) {\r\n t = yc;\r\n yc = xc;\r\n xc = t;\r\n b = a;\r\n }\r\n\r\n // Only start adding at yc.length - 1 as the further digits of xc can be ignored.\r\n for (a = 0; b;) {\r\n a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0;\r\n xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE;\r\n }\r\n\r\n if (a) {\r\n xc = [a].concat(xc);\r\n ++ye;\r\n }\r\n\r\n // No need to check for zero, as +x + +y != 0 && -x + -y != 0\r\n // ye = MAX_EXP + 1 possible\r\n return normalise(y, xc, ye);\r\n };\r\n\r\n\r\n /*\r\n * If sd is undefined or null or true or false, return the number of significant digits of\r\n * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN.\r\n * If sd is true include integer-part trailing zeros in the count.\r\n *\r\n * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this\r\n * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or\r\n * ROUNDING_MODE if rm is omitted.\r\n *\r\n * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive.\r\n * boolean: whether to count integer-part trailing zeros: true or false.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}'\r\n */\r\n P.precision = P.sd = function (sd, rm) {\r\n var c, n, v,\r\n x = this;\r\n\r\n if (sd != null && sd !== !!sd) {\r\n intCheck(sd, 1, MAX);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n return round(new BigNumber(x), sd, rm);\r\n }\r\n\r\n if (!(c = x.c)) return null;\r\n v = c.length - 1;\r\n n = v * LOG_BASE + 1;\r\n\r\n if (v = c[v]) {\r\n\r\n // Subtract the number of trailing zeros of the last element.\r\n for (; v % 10 == 0; v /= 10, n--);\r\n\r\n // Add the number of digits of the first element.\r\n for (v = c[0]; v >= 10; v /= 10, n++);\r\n }\r\n\r\n if (sd && x.e + 1 > n) n = x.e + 1;\r\n\r\n return n;\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber shifted by k places\r\n * (powers of 10). Shift to the right if n > 0, and to the left if n < 0.\r\n *\r\n * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}'\r\n */\r\n P.shiftedBy = function (k) {\r\n intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER);\r\n return this.times('1e' + k);\r\n };\r\n\r\n\r\n /*\r\n * sqrt(-n) = N\r\n * sqrt(N) = N\r\n * sqrt(-I) = N\r\n * sqrt(I) = I\r\n * sqrt(0) = 0\r\n * sqrt(-0) = -0\r\n *\r\n * Return a new BigNumber whose value is the square root of the value of this BigNumber,\r\n * rounded according to DECIMAL_PLACES and ROUNDING_MODE.\r\n */\r\n P.squareRoot = P.sqrt = function () {\r\n var m, n, r, rep, t,\r\n x = this,\r\n c = x.c,\r\n s = x.s,\r\n e = x.e,\r\n dp = DECIMAL_PLACES + 4,\r\n half = new BigNumber('0.5');\r\n\r\n // Negative/NaN/Infinity/zero?\r\n if (s !== 1 || !c || !c[0]) {\r\n return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0);\r\n }\r\n\r\n // Initial estimate.\r\n s = Math.sqrt(+valueOf(x));\r\n\r\n // Math.sqrt underflow/overflow?\r\n // Pass x to Math.sqrt as integer, then adjust the exponent of the result.\r\n if (s == 0 || s == 1 / 0) {\r\n n = coeffToString(c);\r\n if ((n.length + e) % 2 == 0) n += '0';\r\n s = Math.sqrt(+n);\r\n e = bitFloor((e + 1) / 2) - (e < 0 || e % 2);\r\n\r\n if (s == 1 / 0) {\r\n n = '5e' + e;\r\n } else {\r\n n = s.toExponential();\r\n n = n.slice(0, n.indexOf('e') + 1) + e;\r\n }\r\n\r\n r = new BigNumber(n);\r\n } else {\r\n r = new BigNumber(s + '');\r\n }\r\n\r\n // Check for zero.\r\n // r could be zero if MIN_EXP is changed after the this value was created.\r\n // This would cause a division by zero (x/t) and hence Infinity below, which would cause\r\n // coeffToString to throw.\r\n if (r.c[0]) {\r\n e = r.e;\r\n s = e + dp;\r\n if (s < 3) s = 0;\r\n\r\n // Newton-Raphson iteration.\r\n for (; ;) {\r\n t = r;\r\n r = half.times(t.plus(div(x, t, dp, 1)));\r\n\r\n if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) {\r\n\r\n // The exponent of r may here be one less than the final result exponent,\r\n // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits\r\n // are indexed correctly.\r\n if (r.e < e) --s;\r\n n = n.slice(s - 3, s + 1);\r\n\r\n // The 4th rounding digit may be in error by -1 so if the 4 rounding digits\r\n // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the\r\n // iteration.\r\n if (n == '9999' || !rep && n == '4999') {\r\n\r\n // On the first iteration only, check to see if rounding up gives the\r\n // exact result as the nines may infinitely repeat.\r\n if (!rep) {\r\n round(t, t.e + DECIMAL_PLACES + 2, 0);\r\n\r\n if (t.times(t).eq(x)) {\r\n r = t;\r\n break;\r\n }\r\n }\r\n\r\n dp += 4;\r\n s += 4;\r\n rep = 1;\r\n } else {\r\n\r\n // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact\r\n // result. If not, then there are further digits and m will be truthy.\r\n if (!+n || !+n.slice(1) && n.charAt(0) == '5') {\r\n\r\n // Truncate to the first rounding digit.\r\n round(r, r.e + DECIMAL_PLACES + 2, 1);\r\n m = !r.times(r).eq(x);\r\n }\r\n\r\n break;\r\n }\r\n }\r\n }\r\n }\r\n\r\n return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in exponential notation and\r\n * rounded using ROUNDING_MODE to dp fixed decimal places.\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.toExponential = function (dp, rm) {\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n dp++;\r\n }\r\n return format(this, dp, rm, 1);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in fixed-point notation rounding\r\n * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted.\r\n *\r\n * Note: as with JavaScript's number type, (-0).toFixed(0) is '0',\r\n * but e.g. (-0.00001).toFixed(0) is '-0'.\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.toFixed = function (dp, rm) {\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n dp = dp + this.e + 1;\r\n }\r\n return format(this, dp, rm);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in fixed-point notation rounded\r\n * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties\r\n * of the format or FORMAT object (see BigNumber.set).\r\n *\r\n * The formatting object may contain some or all of the properties shown below.\r\n *\r\n * FORMAT = {\r\n * prefix: '',\r\n * groupSize: 3,\r\n * secondaryGroupSize: 0,\r\n * groupSeparator: ',',\r\n * decimalSeparator: '.',\r\n * fractionGroupSize: 0,\r\n * fractionGroupSeparator: '\\xA0', // non-breaking space\r\n * suffix: ''\r\n * };\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n * [format] {object} Formatting options. See FORMAT pbject above.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n * '[BigNumber Error] Argument not an object: {format}'\r\n */\r\n P.toFormat = function (dp, rm, format) {\r\n var str,\r\n x = this;\r\n\r\n if (format == null) {\r\n if (dp != null && rm && typeof rm == 'object') {\r\n format = rm;\r\n rm = null;\r\n } else if (dp && typeof dp == 'object') {\r\n format = dp;\r\n dp = rm = null;\r\n } else {\r\n format = FORMAT;\r\n }\r\n } else if (typeof format != 'object') {\r\n throw Error\r\n (bignumberError + 'Argument not an object: ' + format);\r\n }\r\n\r\n str = x.toFixed(dp, rm);\r\n\r\n if (x.c) {\r\n var i,\r\n arr = str.split('.'),\r\n g1 = +format.groupSize,\r\n g2 = +format.secondaryGroupSize,\r\n groupSeparator = format.groupSeparator || '',\r\n intPart = arr[0],\r\n fractionPart = arr[1],\r\n isNeg = x.s < 0,\r\n intDigits = isNeg ? intPart.slice(1) : intPart,\r\n len = intDigits.length;\r\n\r\n if (g2) {\r\n i = g1;\r\n g1 = g2;\r\n g2 = i;\r\n len -= i;\r\n }\r\n\r\n if (g1 > 0 && len > 0) {\r\n i = len % g1 || g1;\r\n intPart = intDigits.substr(0, i);\r\n for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1);\r\n if (g2 > 0) intPart += groupSeparator + intDigits.slice(i);\r\n if (isNeg) intPart = '-' + intPart;\r\n }\r\n\r\n str = fractionPart\r\n ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize)\r\n ? fractionPart.replace(new RegExp('\\\\d{' + g2 + '}\\\\B', 'g'),\r\n '$&' + (format.fractionGroupSeparator || ''))\r\n : fractionPart)\r\n : intPart;\r\n }\r\n\r\n return (format.prefix || '') + str + (format.suffix || '');\r\n };\r\n\r\n\r\n /*\r\n * Return an array of two BigNumbers representing the value of this BigNumber as a simple\r\n * fraction with an integer numerator and an integer denominator.\r\n * The denominator will be a positive non-zero value less than or equal to the specified\r\n * maximum denominator. If a maximum denominator is not specified, the denominator will be\r\n * the lowest value necessary to represent the number exactly.\r\n *\r\n * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator.\r\n *\r\n * '[BigNumber Error] Argument {not an integer|out of range} : {md}'\r\n */\r\n P.toFraction = function (md) {\r\n var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s,\r\n x = this,\r\n xc = x.c;\r\n\r\n if (md != null) {\r\n n = new BigNumber(md);\r\n\r\n // Throw if md is less than one or is not an integer, unless it is Infinity.\r\n if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) {\r\n throw Error\r\n (bignumberError + 'Argument ' +\r\n (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n));\r\n }\r\n }\r\n\r\n if (!xc) return new BigNumber(x);\r\n\r\n d = new BigNumber(ONE);\r\n n1 = d0 = new BigNumber(ONE);\r\n d1 = n0 = new BigNumber(ONE);\r\n s = coeffToString(xc);\r\n\r\n // Determine initial denominator.\r\n // d is a power of 10 and the minimum max denominator that specifies the value exactly.\r\n e = d.e = s.length - x.e - 1;\r\n d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp];\r\n md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n;\r\n\r\n exp = MAX_EXP;\r\n MAX_EXP = 1 / 0;\r\n n = new BigNumber(s);\r\n\r\n // n0 = d1 = 0\r\n n0.c[0] = 0;\r\n\r\n for (; ;) {\r\n q = div(n, d, 0, 1);\r\n d2 = d0.plus(q.times(d1));\r\n if (d2.comparedTo(md) == 1) break;\r\n d0 = d1;\r\n d1 = d2;\r\n n1 = n0.plus(q.times(d2 = n1));\r\n n0 = d2;\r\n d = n.minus(q.times(d2 = d));\r\n n = d2;\r\n }\r\n\r\n d2 = div(md.minus(d0), d1, 0, 1);\r\n n0 = n0.plus(d2.times(n1));\r\n d0 = d0.plus(d2.times(d1));\r\n n0.s = n1.s = x.s;\r\n e = e * 2;\r\n\r\n // Determine which fraction is closer to x, n0/d0 or n1/d1\r\n r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo(\r\n div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0];\r\n\r\n MAX_EXP = exp;\r\n\r\n return r;\r\n };\r\n\r\n\r\n /*\r\n * Return the value of this BigNumber converted to a number primitive.\r\n */\r\n P.toNumber = function () {\r\n return +valueOf(this);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber rounded to sd significant digits\r\n * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits\r\n * necessary to represent the integer part of the value in fixed-point notation, then use\r\n * exponential notation.\r\n *\r\n * [sd] {number} Significant digits. Integer, 1 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}'\r\n */\r\n P.toPrecision = function (sd, rm) {\r\n if (sd != null) intCheck(sd, 1, MAX);\r\n return format(this, sd, rm, 2);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in base b, or base 10 if b is\r\n * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and\r\n * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent\r\n * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than\r\n * TO_EXP_NEG, return exponential notation.\r\n *\r\n * [b] {number} Integer, 2 to ALPHABET.length inclusive.\r\n *\r\n * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}'\r\n */\r\n P.toString = function (b) {\r\n var str,\r\n n = this,\r\n s = n.s,\r\n e = n.e;\r\n\r\n // Infinity or NaN?\r\n if (e === null) {\r\n if (s) {\r\n str = 'Infinity';\r\n if (s < 0) str = '-' + str;\r\n } else {\r\n str = 'NaN';\r\n }\r\n } else {\r\n if (b == null) {\r\n str = e <= TO_EXP_NEG || e >= TO_EXP_POS\r\n ? toExponential(coeffToString(n.c), e)\r\n : toFixedPoint(coeffToString(n.c), e, '0');\r\n } else if (b === 10 && alphabetHasNormalDecimalDigits) {\r\n n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE);\r\n str = toFixedPoint(coeffToString(n.c), n.e, '0');\r\n } else {\r\n intCheck(b, 2, ALPHABET.length, 'Base');\r\n str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true);\r\n }\r\n\r\n if (s < 0 && n.c[0]) str = '-' + str;\r\n }\r\n\r\n return str;\r\n };\r\n\r\n\r\n /*\r\n * Return as toString, but do not accept a base argument, and include the minus sign for\r\n * negative zero.\r\n */\r\n P.valueOf = P.toJSON = function () {\r\n return valueOf(this);\r\n };\r\n\r\n\r\n P._isBigNumber = true;\r\n\r\n if (configObject != null) BigNumber.set(configObject);\r\n\r\n return BigNumber;\r\n }\r\n\r\n\r\n // PRIVATE HELPER FUNCTIONS\r\n\r\n // These functions don't need access to variables,\r\n // e.g. DECIMAL_PLACES, in the scope of the `clone` function above.\r\n\r\n\r\n function bitFloor(n) {\r\n var i = n | 0;\r\n return n > 0 || n === i ? i : i - 1;\r\n }\r\n\r\n\r\n // Return a coefficient array as a string of base 10 digits.\r\n function coeffToString(a) {\r\n var s, z,\r\n i = 1,\r\n j = a.length,\r\n r = a[0] + '';\r\n\r\n for (; i < j;) {\r\n s = a[i++] + '';\r\n z = LOG_BASE - s.length;\r\n for (; z--; s = '0' + s);\r\n r += s;\r\n }\r\n\r\n // Determine trailing zeros.\r\n for (j = r.length; r.charCodeAt(--j) === 48;);\r\n\r\n return r.slice(0, j + 1 || 1);\r\n }\r\n\r\n\r\n // Compare the value of BigNumbers x and y.\r\n function compare(x, y) {\r\n var a, b,\r\n xc = x.c,\r\n yc = y.c,\r\n i = x.s,\r\n j = y.s,\r\n k = x.e,\r\n l = y.e;\r\n\r\n // Either NaN?\r\n if (!i || !j) return null;\r\n\r\n a = xc && !xc[0];\r\n b = yc && !yc[0];\r\n\r\n // Either zero?\r\n if (a || b) return a ? b ? 0 : -j : i;\r\n\r\n // Signs differ?\r\n if (i != j) return i;\r\n\r\n a = i < 0;\r\n b = k == l;\r\n\r\n // Either Infinity?\r\n if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1;\r\n\r\n // Compare exponents.\r\n if (!b) return k > l ^ a ? 1 : -1;\r\n\r\n j = (k = xc.length) < (l = yc.length) ? k : l;\r\n\r\n // Compare digit by digit.\r\n for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1;\r\n\r\n // Compare lengths.\r\n return k == l ? 0 : k > l ^ a ? 1 : -1;\r\n }\r\n\r\n\r\n /*\r\n * Check that n is a primitive number, an integer, and in range, otherwise throw.\r\n */\r\n function intCheck(n, min, max, name) {\r\n if (n < min || n > max || n !== mathfloor(n)) {\r\n throw Error\r\n (bignumberError + (name || 'Argument') + (typeof n == 'number'\r\n ? n < min || n > max ? ' out of range: ' : ' not an integer: '\r\n : ' not a primitive number: ') + String(n));\r\n }\r\n }\r\n\r\n\r\n // Assumes finite n.\r\n function isOdd(n) {\r\n var k = n.c.length - 1;\r\n return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0;\r\n }\r\n\r\n\r\n function toExponential(str, e) {\r\n return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) +\r\n (e < 0 ? 'e' : 'e+') + e;\r\n }\r\n\r\n\r\n function toFixedPoint(str, e, z) {\r\n var len, zs;\r\n\r\n // Negative exponent?\r\n if (e < 0) {\r\n\r\n // Prepend zeros.\r\n for (zs = z + '.'; ++e; zs += z);\r\n str = zs + str;\r\n\r\n // Positive exponent\r\n } else {\r\n len = str.length;\r\n\r\n // Append zeros.\r\n if (++e > len) {\r\n for (zs = z, e -= len; --e; zs += z);\r\n str += zs;\r\n } else if (e < len) {\r\n str = str.slice(0, e) + '.' + str.slice(e);\r\n }\r\n }\r\n\r\n return str;\r\n }\r\n\r\n\r\n // EXPORT\r\n\r\n\r\n BigNumber = clone();\r\n BigNumber['default'] = BigNumber.BigNumber = BigNumber;\r\n\r\n // AMD.\r\n if (typeof define == 'function' && define.amd) {\r\n define(function () { return BigNumber; });\r\n\r\n // Node.js and other environments that support module.exports.\r\n } else if (typeof module != 'undefined' && module.exports) {\r\n module.exports = BigNumber;\r\n\r\n // Browser.\r\n } else {\r\n if (!globalObject) {\r\n globalObject = typeof self != 'undefined' && self ? self : window;\r\n }\r\n\r\n globalObject.BigNumber = BigNumber;\r\n }\r\n})(this);\r\n","/*jshint node:true */\n'use strict';\nvar Buffer = require('buffer').Buffer; // browserify\nvar SlowBuffer = require('buffer').SlowBuffer;\n\nmodule.exports = bufferEq;\n\nfunction bufferEq(a, b) {\n\n // shortcutting on type is necessary for correctness\n if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {\n return false;\n }\n\n // buffer sizes should be well-known information, so despite this\n // shortcutting, it doesn't leak any information about the *contents* of the\n // buffers.\n if (a.length !== b.length) {\n return false;\n }\n\n var c = 0;\n for (var i = 0; i < a.length; i++) {\n /*jshint bitwise:false */\n c |= a[i] ^ b[i]; // XOR\n }\n return c === 0;\n}\n\nbufferEq.install = function() {\n Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) {\n return bufferEq(this, that);\n };\n};\n\nvar origBufEqual = Buffer.prototype.equal;\nvar origSlowBufEqual = SlowBuffer.prototype.equal;\nbufferEq.restore = function() {\n Buffer.prototype.equal = origBufEqual;\n SlowBuffer.prototype.equal = origSlowBufEqual;\n};\n","/*!\n * compressible\n * Copyright(c) 2013 Jonathan Ong\n * Copyright(c) 2014 Jeremiah Senkpiel\n * Copyright(c) 2015 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n'use strict'\n\n/**\n * Module dependencies.\n * @private\n */\n\nvar db = require('mime-db')\n\n/**\n * Module variables.\n * @private\n */\n\nvar COMPRESSIBLE_TYPE_REGEXP = /^text\\/|\\+(?:json|text|xml)$/i\nvar EXTRACT_TYPE_REGEXP = /^\\s*([^;\\s]*)(?:;|\\s|$)/\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = compressible\n\n/**\n * Checks if a type is compressible.\n *\n * @param {string} type\n * @return {Boolean} compressible\n * @public\n */\n\nfunction compressible (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // strip parameters\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n var mime = match && match[1].toLowerCase()\n var data = db[mime]\n\n // return database information\n if (data && data.compressible !== undefined) {\n return data.compressible\n }\n\n // fallback to regexp or unknown\n return COMPRESSIBLE_TYPE_REGEXP.test(mime) || undefined\n}\n","/* eslint-env browser */\n\n/**\n * This is the web browser implementation of `debug()`.\n */\n\nexports.formatArgs = formatArgs;\nexports.save = save;\nexports.load = load;\nexports.useColors = useColors;\nexports.storage = localstorage();\nexports.destroy = (() => {\n\tlet warned = false;\n\n\treturn () => {\n\t\tif (!warned) {\n\t\t\twarned = true;\n\t\t\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\n\t\t}\n\t};\n})();\n\n/**\n * Colors.\n */\n\nexports.colors = [\n\t'#0000CC',\n\t'#0000FF',\n\t'#0033CC',\n\t'#0033FF',\n\t'#0066CC',\n\t'#0066FF',\n\t'#0099CC',\n\t'#0099FF',\n\t'#00CC00',\n\t'#00CC33',\n\t'#00CC66',\n\t'#00CC99',\n\t'#00CCCC',\n\t'#00CCFF',\n\t'#3300CC',\n\t'#3300FF',\n\t'#3333CC',\n\t'#3333FF',\n\t'#3366CC',\n\t'#3366FF',\n\t'#3399CC',\n\t'#3399FF',\n\t'#33CC00',\n\t'#33CC33',\n\t'#33CC66',\n\t'#33CC99',\n\t'#33CCCC',\n\t'#33CCFF',\n\t'#6600CC',\n\t'#6600FF',\n\t'#6633CC',\n\t'#6633FF',\n\t'#66CC00',\n\t'#66CC33',\n\t'#9900CC',\n\t'#9900FF',\n\t'#9933CC',\n\t'#9933FF',\n\t'#99CC00',\n\t'#99CC33',\n\t'#CC0000',\n\t'#CC0033',\n\t'#CC0066',\n\t'#CC0099',\n\t'#CC00CC',\n\t'#CC00FF',\n\t'#CC3300',\n\t'#CC3333',\n\t'#CC3366',\n\t'#CC3399',\n\t'#CC33CC',\n\t'#CC33FF',\n\t'#CC6600',\n\t'#CC6633',\n\t'#CC9900',\n\t'#CC9933',\n\t'#CCCC00',\n\t'#CCCC33',\n\t'#FF0000',\n\t'#FF0033',\n\t'#FF0066',\n\t'#FF0099',\n\t'#FF00CC',\n\t'#FF00FF',\n\t'#FF3300',\n\t'#FF3333',\n\t'#FF3366',\n\t'#FF3399',\n\t'#FF33CC',\n\t'#FF33FF',\n\t'#FF6600',\n\t'#FF6633',\n\t'#FF9900',\n\t'#FF9933',\n\t'#FFCC00',\n\t'#FFCC33'\n];\n\n/**\n * Currently only WebKit-based Web Inspectors, Firefox >= v31,\n * and the Firebug extension (any Firefox version) are known\n * to support \"%c\" CSS customizations.\n *\n * TODO: add a `localStorage` variable to explicitly enable/disable colors\n */\n\n// eslint-disable-next-line complexity\nfunction useColors() {\n\t// NB: In an Electron preload script, document will be defined but not fully\n\t// initialized. Since we know we're in Chrome, we'll just detect this case\n\t// explicitly\n\tif (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {\n\t\treturn true;\n\t}\n\n\t// Internet Explorer and Edge do not support colors.\n\tif (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\\/(\\d+)/)) {\n\t\treturn false;\n\t}\n\n\t// Is webkit? http://stackoverflow.com/a/16459606/376773\n\t// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632\n\treturn (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||\n\t\t// Is firebug? http://stackoverflow.com/a/398120/376773\n\t\t(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||\n\t\t// Is firefox >= v31?\n\t\t// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages\n\t\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\\/(\\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||\n\t\t// Double check webkit in userAgent just in case we are in a worker\n\t\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\\/(\\d+)/));\n}\n\n/**\n * Colorize log arguments if enabled.\n *\n * @api public\n */\n\nfunction formatArgs(args) {\n\targs[0] = (this.useColors ? '%c' : '') +\n\t\tthis.namespace +\n\t\t(this.useColors ? ' %c' : ' ') +\n\t\targs[0] +\n\t\t(this.useColors ? '%c ' : ' ') +\n\t\t'+' + module.exports.humanize(this.diff);\n\n\tif (!this.useColors) {\n\t\treturn;\n\t}\n\n\tconst c = 'color: ' + this.color;\n\targs.splice(1, 0, c, 'color: inherit');\n\n\t// The final \"%c\" is somewhat tricky, because there could be other\n\t// arguments passed either before or after the %c, so we need to\n\t// figure out the correct index to insert the CSS into\n\tlet index = 0;\n\tlet lastC = 0;\n\targs[0].replace(/%[a-zA-Z%]/g, match => {\n\t\tif (match === '%%') {\n\t\t\treturn;\n\t\t}\n\t\tindex++;\n\t\tif (match === '%c') {\n\t\t\t// We only are interested in the *last* %c\n\t\t\t// (the user may have provided their own)\n\t\t\tlastC = index;\n\t\t}\n\t});\n\n\targs.splice(lastC, 0, c);\n}\n\n/**\n * Invokes `console.debug()` when available.\n * No-op when `console.debug` is not a \"function\".\n * If `console.debug` is not available, falls back\n * to `console.log`.\n *\n * @api public\n */\nexports.log = console.debug || console.log || (() => {});\n\n/**\n * Save `namespaces`.\n *\n * @param {String} namespaces\n * @api private\n */\nfunction save(namespaces) {\n\ttry {\n\t\tif (namespaces) {\n\t\t\texports.storage.setItem('debug', namespaces);\n\t\t} else {\n\t\t\texports.storage.removeItem('debug');\n\t\t}\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n}\n\n/**\n * Load `namespaces`.\n *\n * @return {String} returns the previously persisted debug modes\n * @api private\n */\nfunction load() {\n\tlet r;\n\ttry {\n\t\tr = exports.storage.getItem('debug');\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n\n\t// If debug isn't set in LS, and we're in Electron, try to load $DEBUG\n\tif (!r && typeof process !== 'undefined' && 'env' in process) {\n\t\tr = process.env.DEBUG;\n\t}\n\n\treturn r;\n}\n\n/**\n * Localstorage attempts to return the localstorage.\n *\n * This is necessary because safari throws\n * when a user disables cookies/localstorage\n * and you attempt to access it.\n *\n * @return {LocalStorage}\n * @api private\n */\n\nfunction localstorage() {\n\ttry {\n\t\t// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context\n\t\t// The Browser also has localStorage in the global context.\n\t\treturn localStorage;\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n}\n\nmodule.exports = require('./common')(exports);\n\nconst {formatters} = module.exports;\n\n/**\n * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.\n */\n\nformatters.j = function (v) {\n\ttry {\n\t\treturn JSON.stringify(v);\n\t} catch (error) {\n\t\treturn '[UnexpectedJSONParseError]: ' + error.message;\n\t}\n};\n","\n/**\n * This is the common logic for both the Node.js and web browser\n * implementations of `debug()`.\n */\n\nfunction setup(env) {\n\tcreateDebug.debug = createDebug;\n\tcreateDebug.default = createDebug;\n\tcreateDebug.coerce = coerce;\n\tcreateDebug.disable = disable;\n\tcreateDebug.enable = enable;\n\tcreateDebug.enabled = enabled;\n\tcreateDebug.humanize = require('ms');\n\tcreateDebug.destroy = destroy;\n\n\tObject.keys(env).forEach(key => {\n\t\tcreateDebug[key] = env[key];\n\t});\n\n\t/**\n\t* The currently active debug mode names, and names to skip.\n\t*/\n\n\tcreateDebug.names = [];\n\tcreateDebug.skips = [];\n\n\t/**\n\t* Map of special \"%n\" handling functions, for the debug \"format\" argument.\n\t*\n\t* Valid key names are a single, lower or upper-case letter, i.e. \"n\" and \"N\".\n\t*/\n\tcreateDebug.formatters = {};\n\n\t/**\n\t* Selects a color for a debug namespace\n\t* @param {String} namespace The namespace string for the debug instance to be colored\n\t* @return {Number|String} An ANSI color code for the given namespace\n\t* @api private\n\t*/\n\tfunction selectColor(namespace) {\n\t\tlet hash = 0;\n\n\t\tfor (let i = 0; i < namespace.length; i++) {\n\t\t\thash = ((hash << 5) - hash) + namespace.charCodeAt(i);\n\t\t\thash |= 0; // Convert to 32bit integer\n\t\t}\n\n\t\treturn createDebug.colors[Math.abs(hash) % createDebug.colors.length];\n\t}\n\tcreateDebug.selectColor = selectColor;\n\n\t/**\n\t* Create a debugger with the given `namespace`.\n\t*\n\t* @param {String} namespace\n\t* @return {Function}\n\t* @api public\n\t*/\n\tfunction createDebug(namespace) {\n\t\tlet prevTime;\n\t\tlet enableOverride = null;\n\t\tlet namespacesCache;\n\t\tlet enabledCache;\n\n\t\tfunction debug(...args) {\n\t\t\t// Disabled?\n\t\t\tif (!debug.enabled) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tconst self = debug;\n\n\t\t\t// Set `diff` timestamp\n\t\t\tconst curr = Number(new Date());\n\t\t\tconst ms = curr - (prevTime || curr);\n\t\t\tself.diff = ms;\n\t\t\tself.prev = prevTime;\n\t\t\tself.curr = curr;\n\t\t\tprevTime = curr;\n\n\t\t\targs[0] = createDebug.coerce(args[0]);\n\n\t\t\tif (typeof args[0] !== 'string') {\n\t\t\t\t// Anything else let's inspect with %O\n\t\t\t\targs.unshift('%O');\n\t\t\t}\n\n\t\t\t// Apply any `formatters` transformations\n\t\t\tlet index = 0;\n\t\t\targs[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {\n\t\t\t\t// If we encounter an escaped % then don't increase the array index\n\t\t\t\tif (match === '%%') {\n\t\t\t\t\treturn '%';\n\t\t\t\t}\n\t\t\t\tindex++;\n\t\t\t\tconst formatter = createDebug.formatters[format];\n\t\t\t\tif (typeof formatter === 'function') {\n\t\t\t\t\tconst val = args[index];\n\t\t\t\t\tmatch = formatter.call(self, val);\n\n\t\t\t\t\t// Now we need to remove `args[index]` since it's inlined in the `format`\n\t\t\t\t\targs.splice(index, 1);\n\t\t\t\t\tindex--;\n\t\t\t\t}\n\t\t\t\treturn match;\n\t\t\t});\n\n\t\t\t// Apply env-specific formatting (colors, etc.)\n\t\t\tcreateDebug.formatArgs.call(self, args);\n\n\t\t\tconst logFn = self.log || createDebug.log;\n\t\t\tlogFn.apply(self, args);\n\t\t}\n\n\t\tdebug.namespace = namespace;\n\t\tdebug.useColors = createDebug.useColors();\n\t\tdebug.color = createDebug.selectColor(namespace);\n\t\tdebug.extend = extend;\n\t\tdebug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.\n\n\t\tObject.defineProperty(debug, 'enabled', {\n\t\t\tenumerable: true,\n\t\t\tconfigurable: false,\n\t\t\tget: () => {\n\t\t\t\tif (enableOverride !== null) {\n\t\t\t\t\treturn enableOverride;\n\t\t\t\t}\n\t\t\t\tif (namespacesCache !== createDebug.namespaces) {\n\t\t\t\t\tnamespacesCache = createDebug.namespaces;\n\t\t\t\t\tenabledCache = createDebug.enabled(namespace);\n\t\t\t\t}\n\n\t\t\t\treturn enabledCache;\n\t\t\t},\n\t\t\tset: v => {\n\t\t\t\tenableOverride = v;\n\t\t\t}\n\t\t});\n\n\t\t// Env-specific initialization logic for debug instances\n\t\tif (typeof createDebug.init === 'function') {\n\t\t\tcreateDebug.init(debug);\n\t\t}\n\n\t\treturn debug;\n\t}\n\n\tfunction extend(namespace, delimiter) {\n\t\tconst newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);\n\t\tnewDebug.log = this.log;\n\t\treturn newDebug;\n\t}\n\n\t/**\n\t* Enables a debug mode by namespaces. This can include modes\n\t* separated by a colon and wildcards.\n\t*\n\t* @param {String} namespaces\n\t* @api public\n\t*/\n\tfunction enable(namespaces) {\n\t\tcreateDebug.save(namespaces);\n\t\tcreateDebug.namespaces = namespaces;\n\n\t\tcreateDebug.names = [];\n\t\tcreateDebug.skips = [];\n\n\t\tlet i;\n\t\tconst split = (typeof namespaces === 'string' ? namespaces : '').split(/[\\s,]+/);\n\t\tconst len = split.length;\n\n\t\tfor (i = 0; i < len; i++) {\n\t\t\tif (!split[i]) {\n\t\t\t\t// ignore empty strings\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tnamespaces = split[i].replace(/\\*/g, '.*?');\n\n\t\t\tif (namespaces[0] === '-') {\n\t\t\t\tcreateDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$'));\n\t\t\t} else {\n\t\t\t\tcreateDebug.names.push(new RegExp('^' + namespaces + '$'));\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t* Disable debug output.\n\t*\n\t* @return {String} namespaces\n\t* @api public\n\t*/\n\tfunction disable() {\n\t\tconst namespaces = [\n\t\t\t...createDebug.names.map(toNamespace),\n\t\t\t...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)\n\t\t].join(',');\n\t\tcreateDebug.enable('');\n\t\treturn namespaces;\n\t}\n\n\t/**\n\t* Returns true if the given mode name is enabled, false otherwise.\n\t*\n\t* @param {String} name\n\t* @return {Boolean}\n\t* @api public\n\t*/\n\tfunction enabled(name) {\n\t\tif (name[name.length - 1] === '*') {\n\t\t\treturn true;\n\t\t}\n\n\t\tlet i;\n\t\tlet len;\n\n\t\tfor (i = 0, len = createDebug.skips.length; i < len; i++) {\n\t\t\tif (createDebug.skips[i].test(name)) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\n\t\tfor (i = 0, len = createDebug.names.length; i < len; i++) {\n\t\t\tif (createDebug.names[i].test(name)) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\n\t\treturn false;\n\t}\n\n\t/**\n\t* Convert regexp to namespace\n\t*\n\t* @param {RegExp} regxep\n\t* @return {String} namespace\n\t* @api private\n\t*/\n\tfunction toNamespace(regexp) {\n\t\treturn regexp.toString()\n\t\t\t.substring(2, regexp.toString().length - 2)\n\t\t\t.replace(/\\.\\*\\?$/, '*');\n\t}\n\n\t/**\n\t* Coerce `val`.\n\t*\n\t* @param {Mixed} val\n\t* @return {Mixed}\n\t* @api private\n\t*/\n\tfunction coerce(val) {\n\t\tif (val instanceof Error) {\n\t\t\treturn val.stack || val.message;\n\t\t}\n\t\treturn val;\n\t}\n\n\t/**\n\t* XXX DO NOT USE. This is a temporary stub function.\n\t* XXX It WILL be removed in the next major release.\n\t*/\n\tfunction destroy() {\n\t\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\n\t}\n\n\tcreateDebug.enable(createDebug.load());\n\n\treturn createDebug;\n}\n\nmodule.exports = setup;\n","/**\n * Detect Electron renderer / nwjs process, which is node, but we should\n * treat as a browser.\n */\n\nif (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {\n\tmodule.exports = require('./browser.js');\n} else {\n\tmodule.exports = require('./node.js');\n}\n","/**\n * Module dependencies.\n */\n\nconst tty = require('tty');\nconst util = require('util');\n\n/**\n * This is the Node.js implementation of `debug()`.\n */\n\nexports.init = init;\nexports.log = log;\nexports.formatArgs = formatArgs;\nexports.save = save;\nexports.load = load;\nexports.useColors = useColors;\nexports.destroy = util.deprecate(\n\t() => {},\n\t'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'\n);\n\n/**\n * Colors.\n */\n\nexports.colors = [6, 2, 3, 4, 5, 1];\n\ntry {\n\t// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)\n\t// eslint-disable-next-line import/no-extraneous-dependencies\n\tconst supportsColor = require('supports-color');\n\n\tif (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {\n\t\texports.colors = [\n\t\t\t20,\n\t\t\t21,\n\t\t\t26,\n\t\t\t27,\n\t\t\t32,\n\t\t\t33,\n\t\t\t38,\n\t\t\t39,\n\t\t\t40,\n\t\t\t41,\n\t\t\t42,\n\t\t\t43,\n\t\t\t44,\n\t\t\t45,\n\t\t\t56,\n\t\t\t57,\n\t\t\t62,\n\t\t\t63,\n\t\t\t68,\n\t\t\t69,\n\t\t\t74,\n\t\t\t75,\n\t\t\t76,\n\t\t\t77,\n\t\t\t78,\n\t\t\t79,\n\t\t\t80,\n\t\t\t81,\n\t\t\t92,\n\t\t\t93,\n\t\t\t98,\n\t\t\t99,\n\t\t\t112,\n\t\t\t113,\n\t\t\t128,\n\t\t\t129,\n\t\t\t134,\n\t\t\t135,\n\t\t\t148,\n\t\t\t149,\n\t\t\t160,\n\t\t\t161,\n\t\t\t162,\n\t\t\t163,\n\t\t\t164,\n\t\t\t165,\n\t\t\t166,\n\t\t\t167,\n\t\t\t168,\n\t\t\t169,\n\t\t\t170,\n\t\t\t171,\n\t\t\t172,\n\t\t\t173,\n\t\t\t178,\n\t\t\t179,\n\t\t\t184,\n\t\t\t185,\n\t\t\t196,\n\t\t\t197,\n\t\t\t198,\n\t\t\t199,\n\t\t\t200,\n\t\t\t201,\n\t\t\t202,\n\t\t\t203,\n\t\t\t204,\n\t\t\t205,\n\t\t\t206,\n\t\t\t207,\n\t\t\t208,\n\t\t\t209,\n\t\t\t214,\n\t\t\t215,\n\t\t\t220,\n\t\t\t221\n\t\t];\n\t}\n} catch (error) {\n\t// Swallow - we only care if `supports-color` is available; it doesn't have to be.\n}\n\n/**\n * Build up the default `inspectOpts` object from the environment variables.\n *\n * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js\n */\n\nexports.inspectOpts = Object.keys(process.env).filter(key => {\n\treturn /^debug_/i.test(key);\n}).reduce((obj, key) => {\n\t// Camel-case\n\tconst prop = key\n\t\t.substring(6)\n\t\t.toLowerCase()\n\t\t.replace(/_([a-z])/g, (_, k) => {\n\t\t\treturn k.toUpperCase();\n\t\t});\n\n\t// Coerce string value into JS value\n\tlet val = process.env[key];\n\tif (/^(yes|on|true|enabled)$/i.test(val)) {\n\t\tval = true;\n\t} else if (/^(no|off|false|disabled)$/i.test(val)) {\n\t\tval = false;\n\t} else if (val === 'null') {\n\t\tval = null;\n\t} else {\n\t\tval = Number(val);\n\t}\n\n\tobj[prop] = val;\n\treturn obj;\n}, {});\n\n/**\n * Is stdout a TTY? Colored output is enabled when `true`.\n */\n\nfunction useColors() {\n\treturn 'colors' in exports.inspectOpts ?\n\t\tBoolean(exports.inspectOpts.colors) :\n\t\ttty.isatty(process.stderr.fd);\n}\n\n/**\n * Adds ANSI color escape codes if enabled.\n *\n * @api public\n */\n\nfunction formatArgs(args) {\n\tconst {namespace: name, useColors} = this;\n\n\tif (useColors) {\n\t\tconst c = this.color;\n\t\tconst colorCode = '\\u001B[3' + (c < 8 ? c : '8;5;' + c);\n\t\tconst prefix = ` ${colorCode};1m${name} \\u001B[0m`;\n\n\t\targs[0] = prefix + args[0].split('\\n').join('\\n' + prefix);\n\t\targs.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\\u001B[0m');\n\t} else {\n\t\targs[0] = getDate() + name + ' ' + args[0];\n\t}\n}\n\nfunction getDate() {\n\tif (exports.inspectOpts.hideDate) {\n\t\treturn '';\n\t}\n\treturn new Date().toISOString() + ' ';\n}\n\n/**\n * Invokes `util.format()` with the specified arguments and writes to stderr.\n */\n\nfunction log(...args) {\n\treturn process.stderr.write(util.format(...args) + '\\n');\n}\n\n/**\n * Save `namespaces`.\n *\n * @param {String} namespaces\n * @api private\n */\nfunction save(namespaces) {\n\tif (namespaces) {\n\t\tprocess.env.DEBUG = namespaces;\n\t} else {\n\t\t// If you set a process.env field to null or undefined, it gets cast to the\n\t\t// string 'null' or 'undefined'. Just delete instead.\n\t\tdelete process.env.DEBUG;\n\t}\n}\n\n/**\n * Load `namespaces`.\n *\n * @return {String} returns the previously persisted debug modes\n * @api private\n */\n\nfunction load() {\n\treturn process.env.DEBUG;\n}\n\n/**\n * Init logic for `debug` instances.\n *\n * Create a new `inspectOpts` object in case `useColors` is set\n * differently for a particular `debug` instance.\n */\n\nfunction init(debug) {\n\tdebug.inspectOpts = {};\n\n\tconst keys = Object.keys(exports.inspectOpts);\n\tfor (let i = 0; i < keys.length; i++) {\n\t\tdebug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];\n\t}\n}\n\nmodule.exports = require('./common')(exports);\n\nconst {formatters} = module.exports;\n\n/**\n * Map %o to `util.inspect()`, all on a single line.\n */\n\nformatters.o = function (v) {\n\tthis.inspectOpts.colors = this.useColors;\n\treturn util.inspect(v, this.inspectOpts)\n\t\t.split('\\n')\n\t\t.map(str => str.trim())\n\t\t.join(' ');\n};\n\n/**\n * Map %O to `util.inspect()`, allowing multiple lines if needed.\n */\n\nformatters.O = function (v) {\n\tthis.inspectOpts.colors = this.useColors;\n\treturn util.inspect(v, this.inspectOpts);\n};\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","var stream = require('readable-stream')\nvar eos = require('end-of-stream')\nvar inherits = require('inherits')\nvar shift = require('stream-shift')\n\nvar SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from)\n ? Buffer.from([0])\n : new Buffer([0])\n\nvar onuncork = function(self, fn) {\n if (self._corked) self.once('uncork', fn)\n else fn()\n}\n\nvar autoDestroy = function (self, err) {\n if (self._autoDestroy) self.destroy(err)\n}\n\nvar destroyer = function(self, end) {\n return function(err) {\n if (err) autoDestroy(self, err.message === 'premature close' ? null : err)\n else if (end && !self._ended) self.end()\n }\n}\n\nvar end = function(ws, fn) {\n if (!ws) return fn()\n if (ws._writableState && ws._writableState.finished) return fn()\n if (ws._writableState) return ws.end(fn)\n ws.end()\n fn()\n}\n\nvar noop = function() {}\n\nvar toStreams2 = function(rs) {\n return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs)\n}\n\nvar Duplexify = function(writable, readable, opts) {\n if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts)\n stream.Duplex.call(this, opts)\n\n this._writable = null\n this._readable = null\n this._readable2 = null\n\n this._autoDestroy = !opts || opts.autoDestroy !== false\n this._forwardDestroy = !opts || opts.destroy !== false\n this._forwardEnd = !opts || opts.end !== false\n this._corked = 1 // start corked\n this._ondrain = null\n this._drained = false\n this._forwarding = false\n this._unwrite = null\n this._unread = null\n this._ended = false\n\n this.destroyed = false\n\n if (writable) this.setWritable(writable)\n if (readable) this.setReadable(readable)\n}\n\ninherits(Duplexify, stream.Duplex)\n\nDuplexify.obj = function(writable, readable, opts) {\n if (!opts) opts = {}\n opts.objectMode = true\n opts.highWaterMark = 16\n return new Duplexify(writable, readable, opts)\n}\n\nDuplexify.prototype.cork = function() {\n if (++this._corked === 1) this.emit('cork')\n}\n\nDuplexify.prototype.uncork = function() {\n if (this._corked && --this._corked === 0) this.emit('uncork')\n}\n\nDuplexify.prototype.setWritable = function(writable) {\n if (this._unwrite) this._unwrite()\n\n if (this.destroyed) {\n if (writable && writable.destroy) writable.destroy()\n return\n }\n\n if (writable === null || writable === false) {\n this.end()\n return\n }\n\n var self = this\n var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd))\n\n var ondrain = function() {\n var ondrain = self._ondrain\n self._ondrain = null\n if (ondrain) ondrain()\n }\n\n var clear = function() {\n self._writable.removeListener('drain', ondrain)\n unend()\n }\n\n if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks\n\n this._writable = writable\n this._writable.on('drain', ondrain)\n this._unwrite = clear\n\n this.uncork() // always uncork setWritable\n}\n\nDuplexify.prototype.setReadable = function(readable) {\n if (this._unread) this._unread()\n\n if (this.destroyed) {\n if (readable && readable.destroy) readable.destroy()\n return\n }\n\n if (readable === null || readable === false) {\n this.push(null)\n this.resume()\n return\n }\n\n var self = this\n var unend = eos(readable, {writable:false, readable:true}, destroyer(this))\n\n var onreadable = function() {\n self._forward()\n }\n\n var onend = function() {\n self.push(null)\n }\n\n var clear = function() {\n self._readable2.removeListener('readable', onreadable)\n self._readable2.removeListener('end', onend)\n unend()\n }\n\n this._drained = true\n this._readable = readable\n this._readable2 = readable._readableState ? readable : toStreams2(readable)\n this._readable2.on('readable', onreadable)\n this._readable2.on('end', onend)\n this._unread = clear\n\n this._forward()\n}\n\nDuplexify.prototype._read = function() {\n this._drained = true\n this._forward()\n}\n\nDuplexify.prototype._forward = function() {\n if (this._forwarding || !this._readable2 || !this._drained) return\n this._forwarding = true\n\n var data\n\n while (this._drained && (data = shift(this._readable2)) !== null) {\n if (this.destroyed) continue\n this._drained = this.push(data)\n }\n\n this._forwarding = false\n}\n\nDuplexify.prototype.destroy = function(err, cb) {\n if (!cb) cb = noop\n if (this.destroyed) return cb(null)\n this.destroyed = true\n\n var self = this\n process.nextTick(function() {\n self._destroy(err)\n cb(null)\n })\n}\n\nDuplexify.prototype._destroy = function(err) {\n if (err) {\n var ondrain = this._ondrain\n this._ondrain = null\n if (ondrain) ondrain(err)\n else this.emit('error', err)\n }\n\n if (this._forwardDestroy) {\n if (this._readable && this._readable.destroy) this._readable.destroy()\n if (this._writable && this._writable.destroy) this._writable.destroy()\n }\n\n this.emit('close')\n}\n\nDuplexify.prototype._write = function(data, enc, cb) {\n if (this.destroyed) return\n if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb))\n if (data === SIGNAL_FLUSH) return this._finish(cb)\n if (!this._writable) return cb()\n\n if (this._writable.write(data) === false) this._ondrain = cb\n else if (!this.destroyed) cb()\n}\n\nDuplexify.prototype._finish = function(cb) {\n var self = this\n this.emit('preend')\n onuncork(this, function() {\n end(self._forwardEnd && self._writable, function() {\n // haxx to not emit prefinish twice\n if (self._writableState.prefinished === false) self._writableState.prefinished = true\n self.emit('prefinish')\n onuncork(self, cb)\n })\n })\n}\n\nDuplexify.prototype.end = function(data, enc, cb) {\n if (typeof data === 'function') return this.end(null, null, data)\n if (typeof enc === 'function') return this.end(data, null, enc)\n this._ended = true\n if (data) this.write(data)\n if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH)\n return stream.Writable.prototype.end.call(this, cb)\n}\n\nmodule.exports = Duplexify\n","'use strict';\n\nvar Buffer = require('safe-buffer').Buffer;\n\nvar getParamBytesForAlg = require('./param-bytes-for-alg');\n\nvar MAX_OCTET = 0x80,\n\tCLASS_UNIVERSAL = 0,\n\tPRIMITIVE_BIT = 0x20,\n\tTAG_SEQ = 0x10,\n\tTAG_INT = 0x02,\n\tENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6),\n\tENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6);\n\nfunction base64Url(base64) {\n\treturn base64\n\t\t.replace(/=/g, '')\n\t\t.replace(/\\+/g, '-')\n\t\t.replace(/\\//g, '_');\n}\n\nfunction signatureAsBuffer(signature) {\n\tif (Buffer.isBuffer(signature)) {\n\t\treturn signature;\n\t} else if ('string' === typeof signature) {\n\t\treturn Buffer.from(signature, 'base64');\n\t}\n\n\tthrow new TypeError('ECDSA signature must be a Base64 string or a Buffer');\n}\n\nfunction derToJose(signature, alg) {\n\tsignature = signatureAsBuffer(signature);\n\tvar paramBytes = getParamBytesForAlg(alg);\n\n\t// the DER encoded param should at most be the param size, plus a padding\n\t// zero, since due to being a signed integer\n\tvar maxEncodedParamLength = paramBytes + 1;\n\n\tvar inputLength = signature.length;\n\n\tvar offset = 0;\n\tif (signature[offset++] !== ENCODED_TAG_SEQ) {\n\t\tthrow new Error('Could not find expected \"seq\"');\n\t}\n\n\tvar seqLength = signature[offset++];\n\tif (seqLength === (MAX_OCTET | 1)) {\n\t\tseqLength = signature[offset++];\n\t}\n\n\tif (inputLength - offset < seqLength) {\n\t\tthrow new Error('\"seq\" specified length of \"' + seqLength + '\", only \"' + (inputLength - offset) + '\" remaining');\n\t}\n\n\tif (signature[offset++] !== ENCODED_TAG_INT) {\n\t\tthrow new Error('Could not find expected \"int\" for \"r\"');\n\t}\n\n\tvar rLength = signature[offset++];\n\n\tif (inputLength - offset - 2 < rLength) {\n\t\tthrow new Error('\"r\" specified length of \"' + rLength + '\", only \"' + (inputLength - offset - 2) + '\" available');\n\t}\n\n\tif (maxEncodedParamLength < rLength) {\n\t\tthrow new Error('\"r\" specified length of \"' + rLength + '\", max of \"' + maxEncodedParamLength + '\" is acceptable');\n\t}\n\n\tvar rOffset = offset;\n\toffset += rLength;\n\n\tif (signature[offset++] !== ENCODED_TAG_INT) {\n\t\tthrow new Error('Could not find expected \"int\" for \"s\"');\n\t}\n\n\tvar sLength = signature[offset++];\n\n\tif (inputLength - offset !== sLength) {\n\t\tthrow new Error('\"s\" specified length of \"' + sLength + '\", expected \"' + (inputLength - offset) + '\"');\n\t}\n\n\tif (maxEncodedParamLength < sLength) {\n\t\tthrow new Error('\"s\" specified length of \"' + sLength + '\", max of \"' + maxEncodedParamLength + '\" is acceptable');\n\t}\n\n\tvar sOffset = offset;\n\toffset += sLength;\n\n\tif (offset !== inputLength) {\n\t\tthrow new Error('Expected to consume entire buffer, but \"' + (inputLength - offset) + '\" bytes remain');\n\t}\n\n\tvar rPadding = paramBytes - rLength,\n\t\tsPadding = paramBytes - sLength;\n\n\tvar dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength);\n\n\tfor (offset = 0; offset < rPadding; ++offset) {\n\t\tdst[offset] = 0;\n\t}\n\tsignature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength);\n\n\toffset = paramBytes;\n\n\tfor (var o = offset; offset < o + sPadding; ++offset) {\n\t\tdst[offset] = 0;\n\t}\n\tsignature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength);\n\n\tdst = dst.toString('base64');\n\tdst = base64Url(dst);\n\n\treturn dst;\n}\n\nfunction countPadding(buf, start, stop) {\n\tvar padding = 0;\n\twhile (start + padding < stop && buf[start + padding] === 0) {\n\t\t++padding;\n\t}\n\n\tvar needsSign = buf[start + padding] >= MAX_OCTET;\n\tif (needsSign) {\n\t\t--padding;\n\t}\n\n\treturn padding;\n}\n\nfunction joseToDer(signature, alg) {\n\tsignature = signatureAsBuffer(signature);\n\tvar paramBytes = getParamBytesForAlg(alg);\n\n\tvar signatureBytes = signature.length;\n\tif (signatureBytes !== paramBytes * 2) {\n\t\tthrow new TypeError('\"' + alg + '\" signatures must be \"' + paramBytes * 2 + '\" bytes, saw \"' + signatureBytes + '\"');\n\t}\n\n\tvar rPadding = countPadding(signature, 0, paramBytes);\n\tvar sPadding = countPadding(signature, paramBytes, signature.length);\n\tvar rLength = paramBytes - rPadding;\n\tvar sLength = paramBytes - sPadding;\n\n\tvar rsBytes = 1 + 1 + rLength + 1 + 1 + sLength;\n\n\tvar shortLength = rsBytes < MAX_OCTET;\n\n\tvar dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes);\n\n\tvar offset = 0;\n\tdst[offset++] = ENCODED_TAG_SEQ;\n\tif (shortLength) {\n\t\t// Bit 8 has value \"0\"\n\t\t// bits 7-1 give the length.\n\t\tdst[offset++] = rsBytes;\n\t} else {\n\t\t// Bit 8 of first octet has value \"1\"\n\t\t// bits 7-1 give the number of additional length octets.\n\t\tdst[offset++] = MAX_OCTET\t| 1;\n\t\t// length, base 256\n\t\tdst[offset++] = rsBytes & 0xff;\n\t}\n\tdst[offset++] = ENCODED_TAG_INT;\n\tdst[offset++] = rLength;\n\tif (rPadding < 0) {\n\t\tdst[offset++] = 0;\n\t\toffset += signature.copy(dst, offset, 0, paramBytes);\n\t} else {\n\t\toffset += signature.copy(dst, offset, rPadding, paramBytes);\n\t}\n\tdst[offset++] = ENCODED_TAG_INT;\n\tdst[offset++] = sLength;\n\tif (sPadding < 0) {\n\t\tdst[offset++] = 0;\n\t\tsignature.copy(dst, offset, paramBytes);\n\t} else {\n\t\tsignature.copy(dst, offset, paramBytes + sPadding);\n\t}\n\n\treturn dst;\n}\n\nmodule.exports = {\n\tderToJose: derToJose,\n\tjoseToDer: joseToDer\n};\n","'use strict';\n\nfunction getParamSize(keySize) {\n\tvar result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1);\n\treturn result;\n}\n\nvar paramBytesForAlg = {\n\tES256: getParamSize(256),\n\tES384: getParamSize(384),\n\tES512: getParamSize(521)\n};\n\nfunction getParamBytesForAlg(alg) {\n\tvar paramBytes = paramBytesForAlg[alg];\n\tif (paramBytes) {\n\t\treturn paramBytes;\n\t}\n\n\tthrow new Error('Unknown algorithm \"' + alg + '\"');\n}\n\nmodule.exports = getParamBytesForAlg;\n","var once = require('once');\n\nvar noop = function() {};\n\nvar isRequest = function(stream) {\n\treturn stream.setHeader && typeof stream.abort === 'function';\n};\n\nvar isChildProcess = function(stream) {\n\treturn stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3\n};\n\nvar eos = function(stream, opts, callback) {\n\tif (typeof opts === 'function') return eos(stream, null, opts);\n\tif (!opts) opts = {};\n\n\tcallback = once(callback || noop);\n\n\tvar ws = stream._writableState;\n\tvar rs = stream._readableState;\n\tvar readable = opts.readable || (opts.readable !== false && stream.readable);\n\tvar writable = opts.writable || (opts.writable !== false && stream.writable);\n\tvar cancelled = false;\n\n\tvar onlegacyfinish = function() {\n\t\tif (!stream.writable) onfinish();\n\t};\n\n\tvar onfinish = function() {\n\t\twritable = false;\n\t\tif (!readable) callback.call(stream);\n\t};\n\n\tvar onend = function() {\n\t\treadable = false;\n\t\tif (!writable) callback.call(stream);\n\t};\n\n\tvar onexit = function(exitCode) {\n\t\tcallback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);\n\t};\n\n\tvar onerror = function(err) {\n\t\tcallback.call(stream, err);\n\t};\n\n\tvar onclose = function() {\n\t\tprocess.nextTick(onclosenexttick);\n\t};\n\n\tvar onclosenexttick = function() {\n\t\tif (cancelled) return;\n\t\tif (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));\n\t\tif (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));\n\t};\n\n\tvar onrequest = function() {\n\t\tstream.req.on('finish', onfinish);\n\t};\n\n\tif (isRequest(stream)) {\n\t\tstream.on('complete', onfinish);\n\t\tstream.on('abort', onclose);\n\t\tif (stream.req) onrequest();\n\t\telse stream.on('request', onrequest);\n\t} else if (writable && !ws) { // legacy streams\n\t\tstream.on('end', onlegacyfinish);\n\t\tstream.on('close', onlegacyfinish);\n\t}\n\n\tif (isChildProcess(stream)) stream.on('exit', onexit);\n\n\tstream.on('end', onend);\n\tstream.on('finish', onfinish);\n\tif (opts.error !== false) stream.on('error', onerror);\n\tstream.on('close', onclose);\n\n\treturn function() {\n\t\tcancelled = true;\n\t\tstream.removeListener('complete', onfinish);\n\t\tstream.removeListener('abort', onclose);\n\t\tstream.removeListener('request', onrequest);\n\t\tif (stream.req) stream.req.removeListener('finish', onfinish);\n\t\tstream.removeListener('end', onlegacyfinish);\n\t\tstream.removeListener('close', onlegacyfinish);\n\t\tstream.removeListener('finish', onfinish);\n\t\tstream.removeListener('exit', onexit);\n\t\tstream.removeListener('end', onend);\n\t\tstream.removeListener('error', onerror);\n\t\tstream.removeListener('close', onclose);\n\t};\n};\n\nmodule.exports = eos;\n","var punycode = require('punycode');\nvar entities = require('./entities.json');\n\nmodule.exports = decode;\n\nfunction decode (str) {\n if (typeof str !== 'string') {\n throw new TypeError('Expected a String');\n }\n\n return str.replace(/&(#?[^;\\W]+;?)/g, function (_, match) {\n var m;\n if (m = /^#(\\d+);?$/.exec(match)) {\n return punycode.ucs2.encode([ parseInt(m[1], 10) ]);\n } else if (m = /^#[Xx]([A-Fa-f0-9]+);?/.exec(match)) {\n return punycode.ucs2.encode([ parseInt(m[1], 16) ]);\n } else {\n // named entity\n var hasSemi = /;$/.test(match);\n var withoutSemi = hasSemi ? match.replace(/;$/, '') : match;\n var target = entities[withoutSemi] || (hasSemi && entities[match]);\n\n if (typeof target === 'number') {\n return punycode.ucs2.encode([ target ]);\n } else if (typeof target === 'string') {\n return target;\n } else {\n return '&' + match;\n }\n }\n });\n}\n","var punycode = require('punycode');\nvar revEntities = require('./reversed.json');\n\nmodule.exports = encode;\n\nfunction encode (str, opts) {\n if (typeof str !== 'string') {\n throw new TypeError('Expected a String');\n }\n if (!opts) opts = {};\n\n var numeric = true;\n if (opts.named) numeric = false;\n if (opts.numeric !== undefined) numeric = opts.numeric;\n\n var special = opts.special || {\n '\"': true, \"'\": true,\n '<': true, '>': true,\n '&': true\n };\n\n var codePoints = punycode.ucs2.decode(str);\n var chars = [];\n for (var i = 0; i < codePoints.length; i++) {\n var cc = codePoints[i];\n var c = punycode.ucs2.encode([ cc ]);\n var e = revEntities[cc];\n if (e && (cc >= 127 || special[c]) && !numeric) {\n chars.push('&' + (/;$/.test(e) ? e : e + ';'));\n }\n else if (cc < 32 || cc >= 127 || special[c]) {\n chars.push('&#' + cc + ';');\n }\n else {\n chars.push(c);\n }\n }\n return chars.join('');\n}\n","exports.encode = require('./encode');\nexports.decode = require('./decode');\n","/**\n * @author Toru Nagashima \n * @copyright 2015 Toru Nagashima. All rights reserved.\n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap();\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap();\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event);\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n );\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n );\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true;\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault();\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n });\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true });\n\n // Define accessors\n const keys = Object.keys(event);\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key));\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget;\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation();\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this);\n\n data.stopped = true;\n data.immediateStopped = true;\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation();\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this));\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true;\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this));\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n});\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype);\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event);\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value;\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event;\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto);\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event);\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n });\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key);\n const isFunc = typeof descriptor.value === \"function\";\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n );\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto);\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto);\n wrappers.set(proto, wrapper);\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nfunction wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event));\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nfunction isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nfunction setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase;\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nfunction setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget;\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nfunction setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener;\n}\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap();\n\n// Listener types\nconst CAPTURE = 1;\nconst BUBBLE = 2;\nconst ATTRIBUTE = 3;\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget);\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this);\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next;\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null; // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this);\n\n // Traverse to the tail while removing old value.\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n node = node.next;\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n };\n if (prev === null) {\n listeners.set(eventName, newNode);\n } else {\n prev.next = newNode;\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n );\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this);\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n });\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i]);\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map());\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length);\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i];\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this);\n const optionsIsObj = isObject(options);\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n };\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName);\n if (node === undefined) {\n listeners.set(eventName, newNode);\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null;\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node;\n node = node.next;\n }\n\n // Add it.\n prev.next = newNode;\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this);\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n return\n }\n\n prev = node;\n node = node.next;\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this);\n const eventName = event.type;\n let node = listeners.get(eventName);\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event);\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null;\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n );\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent);\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err);\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent);\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next;\n }\n setPassiveListener(wrappedEvent, null);\n setEventPhase(wrappedEvent, 0);\n setCurrentTarget(wrappedEvent, null);\n\n return !wrappedEvent.defaultPrevented\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n});\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype);\n}\n\nexports.defineEventAttribute = defineEventAttribute;\nexports.EventTarget = EventTarget;\nexports.default = EventTarget;\n\nmodule.exports = EventTarget\nmodule.exports.EventTarget = module.exports[\"default\"] = EventTarget\nmodule.exports.defineEventAttribute = defineEventAttribute\n//# sourceMappingURL=event-target-shim.js.map\n","'use strict';\n\nvar hasOwn = Object.prototype.hasOwnProperty;\nvar toStr = Object.prototype.toString;\nvar defineProperty = Object.defineProperty;\nvar gOPD = Object.getOwnPropertyDescriptor;\n\nvar isArray = function isArray(arr) {\n\tif (typeof Array.isArray === 'function') {\n\t\treturn Array.isArray(arr);\n\t}\n\n\treturn toStr.call(arr) === '[object Array]';\n};\n\nvar isPlainObject = function isPlainObject(obj) {\n\tif (!obj || toStr.call(obj) !== '[object Object]') {\n\t\treturn false;\n\t}\n\n\tvar hasOwnConstructor = hasOwn.call(obj, 'constructor');\n\tvar hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf');\n\t// Not own constructor property must be Object\n\tif (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) {\n\t\treturn false;\n\t}\n\n\t// Own properties are enumerated firstly, so to speed up,\n\t// if last one is own, then all properties are own.\n\tvar key;\n\tfor (key in obj) { /**/ }\n\n\treturn typeof key === 'undefined' || hasOwn.call(obj, key);\n};\n\n// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target\nvar setProperty = function setProperty(target, options) {\n\tif (defineProperty && options.name === '__proto__') {\n\t\tdefineProperty(target, options.name, {\n\t\t\tenumerable: true,\n\t\t\tconfigurable: true,\n\t\t\tvalue: options.newValue,\n\t\t\twritable: true\n\t\t});\n\t} else {\n\t\ttarget[options.name] = options.newValue;\n\t}\n};\n\n// Return undefined instead of __proto__ if '__proto__' is not an own property\nvar getProperty = function getProperty(obj, name) {\n\tif (name === '__proto__') {\n\t\tif (!hasOwn.call(obj, name)) {\n\t\t\treturn void 0;\n\t\t} else if (gOPD) {\n\t\t\t// In early versions of node, obj['__proto__'] is buggy when obj has\n\t\t\t// __proto__ as an own property. Object.getOwnPropertyDescriptor() works.\n\t\t\treturn gOPD(obj, name).value;\n\t\t}\n\t}\n\n\treturn obj[name];\n};\n\nmodule.exports = function extend() {\n\tvar options, name, src, copy, copyIsArray, clone;\n\tvar target = arguments[0];\n\tvar i = 1;\n\tvar length = arguments.length;\n\tvar deep = false;\n\n\t// Handle a deep copy situation\n\tif (typeof target === 'boolean') {\n\t\tdeep = target;\n\t\ttarget = arguments[1] || {};\n\t\t// skip the boolean and the target\n\t\ti = 2;\n\t}\n\tif (target == null || (typeof target !== 'object' && typeof target !== 'function')) {\n\t\ttarget = {};\n\t}\n\n\tfor (; i < length; ++i) {\n\t\toptions = arguments[i];\n\t\t// Only deal with non-null/undefined values\n\t\tif (options != null) {\n\t\t\t// Extend the base object\n\t\t\tfor (name in options) {\n\t\t\t\tsrc = getProperty(target, name);\n\t\t\t\tcopy = getProperty(options, name);\n\n\t\t\t\t// Prevent never-ending loop\n\t\t\t\tif (target !== copy) {\n\t\t\t\t\t// Recurse if we're merging plain objects or arrays\n\t\t\t\t\tif (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) {\n\t\t\t\t\t\tif (copyIsArray) {\n\t\t\t\t\t\t\tcopyIsArray = false;\n\t\t\t\t\t\t\tclone = src && isArray(src) ? src : [];\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tclone = src && isPlainObject(src) ? src : {};\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// Never move original objects, clone them\n\t\t\t\t\t\tsetProperty(target, { name: name, newValue: extend(deep, clone, copy) });\n\n\t\t\t\t\t// Don't bring in undefined values\n\t\t\t\t\t} else if (typeof copy !== 'undefined') {\n\t\t\t\t\t\tsetProperty(target, { name: name, newValue: copy });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Return the modified object\n\treturn target;\n};\n","(function(scope) {'use strict';\nfunction B(r,e){var f;return r instanceof Buffer?f=r:f=Buffer.from(r.buffer,r.byteOffset,r.byteLength),f.toString(e)}var w=function(r){return Buffer.from(r)};function h(r){for(var e=0,f=Math.min(256*256,r.length+1),n=new Uint16Array(f),i=[],o=0;;){var t=e=f-1){var s=n.subarray(0,o),m=s;if(i.push(String.fromCharCode.apply(null,m)),!t)return i.join(\"\");r=r.subarray(e),e=0,o=0}var a=r[e++];if((a&128)===0)n[o++]=a;else if((a&224)===192){var d=r[e++]&63;n[o++]=(a&31)<<6|d}else if((a&240)===224){var d=r[e++]&63,l=r[e++]&63;n[o++]=(a&31)<<12|d<<6|l}else if((a&248)===240){var d=r[e++]&63,l=r[e++]&63,R=r[e++]&63,c=(a&7)<<18|d<<12|l<<6|R;c>65535&&(c-=65536,n[o++]=c>>>10&1023|55296,c=56320|c&1023),n[o++]=c}}}function F(r){for(var e=0,f=r.length,n=0,i=Math.max(32,f+(f>>>1)+7),o=new Uint8Array(i>>>3<<3);e=55296&&t<=56319){if(e=55296&&t<=56319)continue}if(n+4>o.length){i+=8,i*=1+e/r.length*2,i=i>>>3<<3;var m=new Uint8Array(i);m.set(o),o=m}if((t&4294967168)===0){o[n++]=t;continue}else if((t&4294965248)===0)o[n++]=t>>>6&31|192;else if((t&4294901760)===0)o[n++]=t>>>12&15|224,o[n++]=t>>>6&63|128;else if((t&4292870144)===0)o[n++]=t>>>18&7|240,o[n++]=t>>>12&63|128,o[n++]=t>>>6&63|128;else continue;o[n++]=t&63|128}return o.slice?o.slice(0,n):o.subarray(0,n)}var u=\"Failed to \",p=function(r,e,f){if(r)throw new Error(\"\".concat(u).concat(e,\": the '\").concat(f,\"' option is unsupported.\"))};var x=typeof Buffer==\"function\"&&Buffer.from;var A=x?w:F;function v(){this.encoding=\"utf-8\"}v.prototype.encode=function(r,e){return p(e&&e.stream,\"encode\",\"stream\"),A(r)};function U(r){var e;try{var f=new Blob([r],{type:\"text/plain;charset=UTF-8\"});e=URL.createObjectURL(f);var n=new XMLHttpRequest;return n.open(\"GET\",e,!1),n.send(),n.responseText}finally{e&&URL.revokeObjectURL(e)}}var O=!x&&typeof Blob==\"function\"&&typeof URL==\"function\"&&typeof URL.createObjectURL==\"function\",S=[\"utf-8\",\"utf8\",\"unicode-1-1-utf-8\"],T=h;x?T=B:O&&(T=function(r){try{return U(r)}catch(e){return h(r)}});var y=\"construct 'TextDecoder'\",E=\"\".concat(u,\" \").concat(y,\": the \");function g(r,e){p(e&&e.fatal,y,\"fatal\"),r=r||\"utf-8\";var f;if(x?f=Buffer.isEncoding(r):f=S.indexOf(r.toLowerCase())!==-1,!f)throw new RangeError(\"\".concat(E,\" encoding label provided ('\").concat(r,\"') is invalid.\"));this.encoding=r,this.fatal=!1,this.ignoreBOM=!1}g.prototype.decode=function(r,e){p(e&&e.stream,\"decode\",\"stream\");var f;return r instanceof Uint8Array?f=r:r.buffer instanceof ArrayBuffer?f=new Uint8Array(r.buffer):f=new Uint8Array(r),T(f,this.encoding)};scope.TextEncoder=scope.TextEncoder||v;scope.TextDecoder=scope.TextDecoder||g;\n}(typeof window !== 'undefined' ? window : (typeof global !== 'undefined' ? global : this)));\n","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GaxiosError = void 0;\n/* eslint-disable @typescript-eslint/no-explicit-any */\nclass GaxiosError extends Error {\n constructor(message, options, response) {\n super(message);\n this.response = response;\n this.config = options;\n this.code = response.status.toString();\n }\n}\nexports.GaxiosError = GaxiosError;\n//# sourceMappingURL=common.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Gaxios = void 0;\nconst extend_1 = __importDefault(require(\"extend\"));\nconst https_1 = require(\"https\");\nconst node_fetch_1 = __importDefault(require(\"node-fetch\"));\nconst querystring_1 = __importDefault(require(\"querystring\"));\nconst is_stream_1 = __importDefault(require(\"is-stream\"));\nconst url_1 = require(\"url\");\nconst common_1 = require(\"./common\");\nconst retry_1 = require(\"./retry\");\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fetch = hasFetch() ? window.fetch : node_fetch_1.default;\nfunction hasWindow() {\n return typeof window !== 'undefined' && !!window;\n}\nfunction hasFetch() {\n return hasWindow() && !!window.fetch;\n}\nfunction hasBuffer() {\n return typeof Buffer !== 'undefined';\n}\nfunction hasHeader(options, header) {\n return !!getHeader(options, header);\n}\nfunction getHeader(options, header) {\n header = header.toLowerCase();\n for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) {\n if (header === key.toLowerCase()) {\n return options.headers[key];\n }\n }\n return undefined;\n}\nlet HttpsProxyAgent;\nfunction loadProxy() {\n var _a, _b, _c, _d;\n const proxy = ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.HTTPS_PROXY) ||\n ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.https_proxy) ||\n ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.HTTP_PROXY) ||\n ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.http_proxy);\n if (proxy) {\n HttpsProxyAgent = require('https-proxy-agent');\n }\n return proxy;\n}\nloadProxy();\nfunction skipProxy(url) {\n var _a;\n const noProxyEnv = (_a = process.env.NO_PROXY) !== null && _a !== void 0 ? _a : process.env.no_proxy;\n if (!noProxyEnv) {\n return false;\n }\n const noProxyUrls = noProxyEnv.split(',');\n const parsedURL = new url_1.URL(url);\n return !!noProxyUrls.find(url => {\n if (url.startsWith('*.') || url.startsWith('.')) {\n url = url.replace(/^\\*\\./, '.');\n return parsedURL.hostname.endsWith(url);\n }\n else {\n return url === parsedURL.origin || url === parsedURL.hostname;\n }\n });\n}\n// Figure out if we should be using a proxy. Only if it's required, load\n// the https-proxy-agent module as it adds startup cost.\nfunction getProxy(url) {\n // If there is a match between the no_proxy env variables and the url, then do not proxy\n if (skipProxy(url)) {\n return undefined;\n // If there is not a match between the no_proxy env variables and the url, check to see if there should be a proxy\n }\n else {\n return loadProxy();\n }\n}\nclass Gaxios {\n /**\n * The Gaxios class is responsible for making HTTP requests.\n * @param defaults The default set of options to be used for this instance.\n */\n constructor(defaults) {\n this.agentCache = new Map();\n this.defaults = defaults || {};\n }\n /**\n * Perform an HTTP request with the given options.\n * @param opts Set of HTTP options that will be used for this HTTP request.\n */\n async request(opts = {}) {\n opts = this.validateOpts(opts);\n return this._request(opts);\n }\n async _defaultAdapter(opts) {\n const fetchImpl = opts.fetchImplementation || fetch;\n const res = (await fetchImpl(opts.url, opts));\n const data = await this.getResponseData(opts, res);\n return this.translateResponse(opts, res, data);\n }\n /**\n * Internal, retryable version of the `request` method.\n * @param opts Set of HTTP options that will be used for this HTTP request.\n */\n async _request(opts = {}) {\n try {\n let translatedResponse;\n if (opts.adapter) {\n translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this));\n }\n else {\n translatedResponse = await this._defaultAdapter(opts);\n }\n if (!opts.validateStatus(translatedResponse.status)) {\n throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse);\n }\n return translatedResponse;\n }\n catch (e) {\n const err = e;\n err.config = opts;\n const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err);\n if (shouldRetry && config) {\n err.config.retryConfig.currentRetryAttempt =\n config.retryConfig.currentRetryAttempt;\n return this._request(err.config);\n }\n throw err;\n }\n }\n async getResponseData(opts, res) {\n switch (opts.responseType) {\n case 'stream':\n return res.body;\n case 'json': {\n let data = await res.text();\n try {\n data = JSON.parse(data);\n }\n catch (_a) {\n // continue\n }\n return data;\n }\n case 'arraybuffer':\n return res.arrayBuffer();\n case 'blob':\n return res.blob();\n default:\n return res.text();\n }\n }\n /**\n * Validates the options, and merges them with defaults.\n * @param opts The original options passed from the client.\n */\n validateOpts(options) {\n const opts = (0, extend_1.default)(true, {}, this.defaults, options);\n if (!opts.url) {\n throw new Error('URL is required.');\n }\n // baseUrl has been deprecated, remove in 2.0\n const baseUrl = opts.baseUrl || opts.baseURL;\n if (baseUrl) {\n opts.url = baseUrl + opts.url;\n }\n opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer;\n if (opts.params && Object.keys(opts.params).length > 0) {\n let additionalQueryParams = opts.paramsSerializer(opts.params);\n if (additionalQueryParams.startsWith('?')) {\n additionalQueryParams = additionalQueryParams.slice(1);\n }\n const prefix = opts.url.includes('?') ? '&' : '?';\n opts.url = opts.url + prefix + additionalQueryParams;\n }\n if (typeof options.maxContentLength === 'number') {\n opts.size = options.maxContentLength;\n }\n if (typeof options.maxRedirects === 'number') {\n opts.follow = options.maxRedirects;\n }\n opts.headers = opts.headers || {};\n if (opts.data) {\n const isFormData = typeof FormData === 'undefined'\n ? false\n : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData;\n if (is_stream_1.default.readable(opts.data)) {\n opts.body = opts.data;\n }\n else if (hasBuffer() && Buffer.isBuffer(opts.data)) {\n // Do not attempt to JSON.stringify() a Buffer:\n opts.body = opts.data;\n if (!hasHeader(opts, 'Content-Type')) {\n opts.headers['Content-Type'] = 'application/json';\n }\n }\n else if (typeof opts.data === 'object') {\n // If www-form-urlencoded content type has been set, but data is\n // provided as an object, serialize the content using querystring:\n if (!isFormData) {\n if (getHeader(opts, 'content-type') ===\n 'application/x-www-form-urlencoded') {\n opts.body = opts.paramsSerializer(opts.data);\n }\n else {\n // } else if (!(opts.data instanceof FormData)) {\n if (!hasHeader(opts, 'Content-Type')) {\n opts.headers['Content-Type'] = 'application/json';\n }\n opts.body = JSON.stringify(opts.data);\n }\n }\n }\n else {\n opts.body = opts.data;\n }\n }\n opts.validateStatus = opts.validateStatus || this.validateStatus;\n opts.responseType = opts.responseType || 'json';\n if (!opts.headers['Accept'] && opts.responseType === 'json') {\n opts.headers['Accept'] = 'application/json';\n }\n opts.method = opts.method || 'GET';\n const proxy = getProxy(opts.url);\n if (proxy) {\n if (this.agentCache.has(proxy)) {\n opts.agent = this.agentCache.get(proxy);\n }\n else {\n // Proxy is being used in conjunction with mTLS.\n if (opts.cert && opts.key) {\n const parsedURL = new url_1.URL(proxy);\n opts.agent = new HttpsProxyAgent({\n port: parsedURL.port,\n host: parsedURL.host,\n protocol: parsedURL.protocol,\n cert: opts.cert,\n key: opts.key,\n });\n }\n else {\n opts.agent = new HttpsProxyAgent(proxy);\n }\n this.agentCache.set(proxy, opts.agent);\n }\n }\n else if (opts.cert && opts.key) {\n // Configure client for mTLS:\n if (this.agentCache.has(opts.key)) {\n opts.agent = this.agentCache.get(opts.key);\n }\n else {\n opts.agent = new https_1.Agent({\n cert: opts.cert,\n key: opts.key,\n });\n this.agentCache.set(opts.key, opts.agent);\n }\n }\n return opts;\n }\n /**\n * By default, throw for any non-2xx status code\n * @param status status code from the HTTP response\n */\n validateStatus(status) {\n return status >= 200 && status < 300;\n }\n /**\n * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo)\n * @param params key value pars to encode\n */\n paramsSerializer(params) {\n return querystring_1.default.stringify(params);\n }\n translateResponse(opts, res, data) {\n // headers need to be converted from a map to an obj\n const headers = {};\n res.headers.forEach((value, key) => {\n headers[key] = value;\n });\n return {\n config: opts,\n data: data,\n headers,\n status: res.status,\n statusText: res.statusText,\n // XMLHttpRequestLike\n request: {\n responseURL: res.url,\n },\n };\n }\n}\nexports.Gaxios = Gaxios;\n//# sourceMappingURL=gaxios.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0;\nconst gaxios_1 = require(\"./gaxios\");\nObject.defineProperty(exports, \"Gaxios\", { enumerable: true, get: function () { return gaxios_1.Gaxios; } });\nvar common_1 = require(\"./common\");\nObject.defineProperty(exports, \"GaxiosError\", { enumerable: true, get: function () { return common_1.GaxiosError; } });\n/**\n * The default instance used when the `request` method is directly\n * invoked.\n */\nexports.instance = new gaxios_1.Gaxios();\n/**\n * Make an HTTP request using the given options.\n * @param opts Options for the request\n */\nasync function request(opts) {\n return exports.instance.request(opts);\n}\nexports.request = request;\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getRetryConfig = void 0;\nasync function getRetryConfig(err) {\n var _a;\n let config = getConfig(err);\n if (!err || !err.config || (!config && !err.config.retry)) {\n return { shouldRetry: false };\n }\n config = config || {};\n config.currentRetryAttempt = config.currentRetryAttempt || 0;\n config.retry =\n config.retry === undefined || config.retry === null ? 3 : config.retry;\n config.httpMethodsToRetry = config.httpMethodsToRetry || [\n 'GET',\n 'HEAD',\n 'PUT',\n 'OPTIONS',\n 'DELETE',\n ];\n config.noResponseRetries =\n config.noResponseRetries === undefined || config.noResponseRetries === null\n ? 2\n : config.noResponseRetries;\n // If this wasn't in the list of status codes where we want\n // to automatically retry, return.\n const retryRanges = [\n // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes\n // 1xx - Retry (Informational, request still processing)\n // 2xx - Do not retry (Success)\n // 3xx - Do not retry (Redirect)\n // 4xx - Do not retry (Client errors)\n // 429 - Retry (\"Too Many Requests\")\n // 5xx - Retry (Server errors)\n [100, 199],\n [429, 429],\n [500, 599],\n ];\n config.statusCodesToRetry = config.statusCodesToRetry || retryRanges;\n // Put the config back into the err\n err.config.retryConfig = config;\n // Determine if we should retry the request\n const shouldRetryFn = config.shouldRetry || shouldRetryRequest;\n if (!(await shouldRetryFn(err))) {\n return { shouldRetry: false, config: err.config };\n }\n // Calculate time to wait with exponential backoff.\n // If this is the first retry, look for a configured retryDelay.\n const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100;\n // Formula: retryDelay + ((2^c - 1 / 2) * 1000)\n const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000;\n // We're going to retry! Incremenent the counter.\n err.config.retryConfig.currentRetryAttempt += 1;\n // Create a promise that invokes the retry after the backOffDelay\n const backoff = config.retryBackoff\n ? config.retryBackoff(err, delay)\n : new Promise(resolve => {\n setTimeout(resolve, delay);\n });\n // Notify the user if they added an `onRetryAttempt` handler\n if (config.onRetryAttempt) {\n config.onRetryAttempt(err);\n }\n // Return the promise in which recalls Gaxios to retry the request\n await backoff;\n return { shouldRetry: true, config: err.config };\n}\nexports.getRetryConfig = getRetryConfig;\n/**\n * Determine based on config if we should retry the request.\n * @param err The GaxiosError passed to the interceptor.\n */\nfunction shouldRetryRequest(err) {\n const config = getConfig(err);\n // node-fetch raises an AbortError if signaled:\n // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal\n if (err.name === 'AbortError') {\n return false;\n }\n // If there's no config, or retries are disabled, return.\n if (!config || config.retry === 0) {\n return false;\n }\n // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc)\n if (!err.response &&\n (config.currentRetryAttempt || 0) >= config.noResponseRetries) {\n return false;\n }\n // Only retry with configured HttpMethods.\n if (!err.config.method ||\n config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) {\n return false;\n }\n // If this wasn't in the list of status codes where we want\n // to automatically retry, return.\n if (err.response && err.response.status) {\n let isInRange = false;\n for (const [min, max] of config.statusCodesToRetry) {\n const status = err.response.status;\n if (status >= min && status <= max) {\n isInRange = true;\n break;\n }\n }\n if (!isInRange) {\n return false;\n }\n }\n // If we are out of retry attempts, return\n config.currentRetryAttempt = config.currentRetryAttempt || 0;\n if (config.currentRetryAttempt >= config.retry) {\n return false;\n }\n return true;\n}\n/**\n * Acquire the raxConfig object from an GaxiosError if available.\n * @param err The Gaxios error with a config object.\n */\nfunction getConfig(err) {\n if (err && err.config && err.config.retryConfig) {\n return err.config.retryConfig;\n }\n return;\n}\n//# sourceMappingURL=retry.js.map","\"use strict\";\n/**\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\n/**\n * Known paths unique to Google Compute Engine Linux instances\n */\nexports.GCE_LINUX_BIOS_PATHS = {\n BIOS_DATE: '/sys/class/dmi/id/bios_date',\n BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor',\n};\nconst GCE_MAC_ADDRESS_REGEX = /^42:01/;\n/**\n * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance).\n *\n * Uses the:\n * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}.\n *\n * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise.\n */\nfunction isGoogleCloudServerless() {\n /**\n * `CLOUD_RUN_JOB` is used for Cloud Run Jobs\n * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n *\n * `FUNCTION_NAME` is used in older Cloud Functions environments:\n * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}.\n *\n * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments:\n * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}.\n */\n const isGFEnvironment = process.env.CLOUD_RUN_JOB ||\n process.env.FUNCTION_NAME ||\n process.env.K_SERVICE;\n return !!isGFEnvironment;\n}\nexports.isGoogleCloudServerless = isGoogleCloudServerless;\n/**\n * Determines if the process is running on a Linux Google Compute Engine instance.\n *\n * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise.\n */\nfunction isGoogleComputeEngineLinux() {\n if ((0, os_1.platform)() !== 'linux')\n return false;\n try {\n // ensure this file exist\n (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE);\n // ensure this file exist and matches\n const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8');\n return /Google/.test(biosVendor);\n }\n catch (_a) {\n return false;\n }\n}\nexports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux;\n/**\n * Determines if the process is running on a Google Compute Engine instance with a known\n * MAC address.\n *\n * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise.\n */\nfunction isGoogleComputeEngineMACAddress() {\n const interfaces = (0, os_1.networkInterfaces)();\n for (const item of Object.values(interfaces)) {\n if (!item)\n continue;\n for (const { mac } of item) {\n if (GCE_MAC_ADDRESS_REGEX.test(mac)) {\n return true;\n }\n }\n }\n return false;\n}\nexports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress;\n/**\n * Determines if the process is running on a Google Compute Engine instance.\n *\n * @returns {boolean} `true` if the process is running on GCE, `false` otherwise.\n */\nfunction isGoogleComputeEngine() {\n return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress();\n}\nexports.isGoogleComputeEngine = isGoogleComputeEngine;\n/**\n * Determines if the process is running on Google Cloud Platform.\n *\n * @returns {boolean} `true` if the process is running on GCP, `false` otherwise.\n */\nfunction detectGCPResidency() {\n return isGoogleCloudServerless() || isGoogleComputeEngine();\n}\nexports.detectGCPResidency = detectGCPResidency;\n//# sourceMappingURL=gcp-residency.js.map","\"use strict\";\n/**\n * Copyright 2018 Google LLC\n *\n * Distributed under MIT license.\n * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.requestTimeout = exports.setGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.project = exports.instance = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst jsonBigint = require(\"json-bigint\");\nconst gcp_residency_1 = require(\"./gcp-residency\");\nexports.BASE_PATH = '/computeMetadata/v1';\nexports.HOST_ADDRESS = 'http://169.254.169.254';\nexports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.';\nexports.HEADER_NAME = 'Metadata-Flavor';\nexports.HEADER_VALUE = 'Google';\nexports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE });\n/**\n * Returns the base URL while taking into account the GCE_METADATA_HOST\n * environment variable if it exists.\n *\n * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1.\n */\nfunction getBaseUrl(baseUrl) {\n if (!baseUrl) {\n baseUrl =\n process.env.GCE_METADATA_IP ||\n process.env.GCE_METADATA_HOST ||\n exports.HOST_ADDRESS;\n }\n // If no scheme is provided default to HTTP:\n if (!/^https?:\\/\\//.test(baseUrl)) {\n baseUrl = `http://${baseUrl}`;\n }\n return new URL(exports.BASE_PATH, baseUrl).href;\n}\n// Accepts an options object passed from the user to the API. In previous\n// versions of the API, it referred to a `Request` or an `Axios` request\n// options object. Now it refers to an object with very limited property\n// names. This is here to help ensure users don't pass invalid options when\n// they upgrade from 0.4 to 0.5 to 0.8.\nfunction validate(options) {\n Object.keys(options).forEach(key => {\n switch (key) {\n case 'params':\n case 'property':\n case 'headers':\n break;\n case 'qs':\n throw new Error(\"'qs' is not a valid configuration option. Please use 'params' instead.\");\n default:\n throw new Error(`'${key}' is not a valid configuration option.`);\n }\n });\n}\nasync function metadataAccessor(type, options, noResponseRetries = 3, fastFail = false) {\n options = options || {};\n if (typeof options === 'string') {\n options = { property: options };\n }\n let property = '';\n if (typeof options === 'object' && options.property) {\n property = '/' + options.property;\n }\n validate(options);\n try {\n const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request;\n const res = await requestMethod({\n url: `${getBaseUrl()}/${type}${property}`,\n headers: Object.assign({}, exports.HEADERS, options.headers),\n retryConfig: { noResponseRetries },\n params: options.params,\n responseType: 'text',\n timeout: requestTimeout(),\n });\n // NOTE: node.js converts all incoming headers to lower case.\n if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) {\n throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`);\n }\n else if (!res.data) {\n throw new Error('Invalid response from the metadata service');\n }\n if (typeof res.data === 'string') {\n try {\n return jsonBigint.parse(res.data);\n }\n catch (_a) {\n /* ignore */\n }\n }\n return res.data;\n }\n catch (e) {\n const err = e;\n if (err.response && err.response.status !== 200) {\n err.message = `Unsuccessful response status code. ${err.message}`;\n }\n throw e;\n }\n}\nasync function fastFailMetadataRequest(options) {\n const secondaryOptions = {\n ...options,\n url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)),\n };\n // We race a connection between DNS/IP to metadata server. There are a couple\n // reasons for this:\n //\n // 1. the DNS is slow in some GCP environments; by checking both, we might\n // detect the runtime environment signficantly faster.\n // 2. we can't just check the IP, which is tarpitted and slow to respond\n // on a user's local machine.\n //\n // Additional logic has been added to make sure that we don't create an\n // unhandled rejection in scenarios where a failure happens sometime\n // after a success.\n //\n // Note, however, if a failure happens prior to a success, a rejection should\n // occur, this is for folks running locally.\n //\n let responded = false;\n const r1 = (0, gaxios_1.request)(options)\n .then(res => {\n responded = true;\n return res;\n })\n .catch(err => {\n if (responded) {\n return r2;\n }\n else {\n responded = true;\n throw err;\n }\n });\n const r2 = (0, gaxios_1.request)(secondaryOptions)\n .then(res => {\n responded = true;\n return res;\n })\n .catch(err => {\n if (responded) {\n return r1;\n }\n else {\n responded = true;\n throw err;\n }\n });\n return Promise.race([r1, r2]);\n}\n/**\n * Obtain metadata for the current GCE instance\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction instance(options) {\n return metadataAccessor('instance', options);\n}\nexports.instance = instance;\n/**\n * Obtain metadata for the current GCP Project.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction project(options) {\n return metadataAccessor('project', options);\n}\nexports.project = project;\n/*\n * How many times should we retry detecting GCP environment.\n */\nfunction detectGCPAvailableRetries() {\n return process.env.DETECT_GCP_RETRIES\n ? Number(process.env.DETECT_GCP_RETRIES)\n : 0;\n}\nlet cachedIsAvailableResponse;\n/**\n * Determine if the metadata server is currently available.\n */\nasync function isAvailable() {\n try {\n // If a user is instantiating several GCP libraries at the same time,\n // this may result in multiple calls to isAvailable(), to detect the\n // runtime environment. We use the same promise for each of these calls\n // to reduce the network load.\n if (cachedIsAvailableResponse === undefined) {\n cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), \n // If the default HOST_ADDRESS has been overridden, we should not\n // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in\n // a non-GCP environment):\n !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST));\n }\n await cachedIsAvailableResponse;\n return true;\n }\n catch (e) {\n const err = e;\n if (process.env.DEBUG_AUTH) {\n console.info(err);\n }\n if (err.type === 'request-timeout') {\n // If running in a GCP environment, metadata endpoint should return\n // within ms.\n return false;\n }\n if (err.response && err.response.status === 404) {\n return false;\n }\n else {\n if (!(err.response && err.response.status === 404) &&\n // A warning is emitted if we see an unexpected err.code, or err.code\n // is not populated:\n (!err.code ||\n ![\n 'EHOSTDOWN',\n 'EHOSTUNREACH',\n 'ENETUNREACH',\n 'ENOENT',\n 'ENOTFOUND',\n 'ECONNREFUSED',\n ].includes(err.code))) {\n let code = 'UNKNOWN';\n if (err.code)\n code = err.code;\n process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning');\n }\n // Failure to resolve the metadata service means that it is not available.\n return false;\n }\n }\n}\nexports.isAvailable = isAvailable;\n/**\n * reset the memoized isAvailable() lookup.\n */\nfunction resetIsAvailableCache() {\n cachedIsAvailableResponse = undefined;\n}\nexports.resetIsAvailableCache = resetIsAvailableCache;\n/**\n * A cache for the detected GCP Residency.\n */\nexports.gcpResidencyCache = null;\n/**\n * Sets the detected GCP Residency.\n * Useful for forcing metadata server detection behavior.\n *\n * Set `null` to autodetect the environment (default behavior).\n */\nfunction setGCPResidency(value = null) {\n exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)();\n}\nexports.setGCPResidency = setGCPResidency;\n/**\n * Obtain the timeout for requests to the metadata server.\n *\n * In certain environments and conditions requests can take longer than\n * the default timeout to complete. This function will determine the\n * appropriate timeout based on the environment.\n *\n * @returns {number} a request timeout duration in milliseconds.\n */\nfunction requestTimeout() {\n // Detecting the residency can be resource-intensive. Let's cache the result.\n if (exports.gcpResidencyCache === null) {\n exports.gcpResidencyCache = (0, gcp_residency_1.detectGCPResidency)();\n }\n return exports.gcpResidencyCache ? 0 : 3000;\n}\nexports.requestTimeout = requestTimeout;\n__exportStar(require(\"./gcp-residency\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2012 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AuthClient = void 0;\nconst events_1 = require(\"events\");\nconst transporters_1 = require(\"../transporters\");\nclass AuthClient extends events_1.EventEmitter {\n constructor() {\n super(...arguments);\n this.transporter = new transporters_1.DefaultTransporter();\n this.credentials = {};\n this.eagerRefreshThresholdMillis = 5 * 60 * 1000;\n this.forceRefreshOnFailure = false;\n }\n /**\n * Sets the auth credentials.\n */\n setCredentials(credentials) {\n this.credentials = credentials;\n }\n /**\n * Append additional headers, e.g., x-goog-user-project, shared across the\n * classes inheriting AuthClient. This method should be used by any method\n * that overrides getRequestMetadataAsync(), which is a shared helper for\n * setting request information in both gRPC and HTTP API calls.\n *\n * @param headers object to append additional headers to.\n */\n addSharedMetadataHeaders(headers) {\n // quota_project_id, stored in application_default_credentials.json, is set in\n // the x-goog-user-project header, to indicate an alternate account for\n // billing and quota:\n if (!headers['x-goog-user-project'] && // don't override a value the user sets.\n this.quotaProjectId) {\n headers['x-goog-user-project'] = this.quotaProjectId;\n }\n return headers;\n }\n}\nexports.AuthClient = AuthClient;\n//# sourceMappingURL=authclient.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AwsClient = void 0;\nconst awsrequestsigner_1 = require(\"./awsrequestsigner\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\n/**\n * AWS external account client. This is used for AWS workloads, where\n * AWS STS GetCallerIdentity serialized signed requests are exchanged for\n * GCP access token.\n */\nclass AwsClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiates an AwsClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid AWS credential.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super(options, additionalOptions);\n this.environmentId = options.credential_source.environment_id;\n // This is only required if the AWS region is not available in the\n // AWS_REGION or AWS_DEFAULT_REGION environment variables.\n this.regionUrl = options.credential_source.region_url;\n // This is only required if AWS security credentials are not available in\n // environment variables.\n this.securityCredentialsUrl = options.credential_source.url;\n this.regionalCredVerificationUrl =\n options.credential_source.regional_cred_verification_url;\n this.imdsV2SessionTokenUrl =\n options.credential_source.imdsv2_session_token_url;\n this.awsRequestSigner = null;\n this.region = '';\n // data validators\n this.validateEnvironmentId();\n this.validateMetadataServerURLs();\n }\n validateEnvironmentId() {\n var _a;\n const match = (_a = this.environmentId) === null || _a === void 0 ? void 0 : _a.match(/^(aws)(\\d+)$/);\n if (!match || !this.regionalCredVerificationUrl) {\n throw new Error('No valid AWS \"credential_source\" provided');\n }\n else if (parseInt(match[2], 10) !== 1) {\n throw new Error(`aws version \"${match[2]}\" is not supported in the current build.`);\n }\n }\n validateMetadataServerURLs() {\n this.validateMetadataURL(this.regionUrl, 'region_url');\n this.validateMetadataURL(this.securityCredentialsUrl, 'url');\n this.validateMetadataURL(this.imdsV2SessionTokenUrl, 'imdsv2_session_token_url');\n }\n validateMetadataURL(value, prop) {\n if (!value)\n return;\n const url = new URL(value);\n if (url.hostname !== AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS &&\n url.hostname !== `[${AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS}]`) {\n throw new RangeError(`Invalid host \"${url.hostname}\" for \"${prop}\". Expecting ${AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS} or ${AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS}.`);\n }\n }\n /**\n * Triggered when an external subject token is needed to be exchanged for a\n * GCP access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this uses a serialized AWS signed request to the STS GetCallerIdentity\n * endpoint.\n * The logic is summarized as:\n * 1. If imdsv2_session_token_url is provided in the credential source, then\n * fetch the aws session token and include it in the headers of the\n * metadata requests. This is a requirement for IDMSv2 but optional\n * for IDMSv1.\n * 2. Retrieve AWS region from availability-zone.\n * 3a. Check AWS credentials in environment variables. If not found, get\n * from security-credentials endpoint.\n * 3b. Get AWS credentials from security-credentials endpoint. In order\n * to retrieve this, the AWS role needs to be determined by calling\n * security-credentials endpoint without any argument. Then the\n * credentials can be retrieved via: security-credentials/role_name\n * 4. Generate the signed request to AWS STS GetCallerIdentity action.\n * 5. Inject x-goog-cloud-target-resource into header and serialize the\n * signed request. This will be the subject-token to pass to GCP STS.\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n // Initialize AWS request signer if not already initialized.\n if (!this.awsRequestSigner) {\n const metadataHeaders = {};\n if (this.imdsV2SessionTokenUrl) {\n metadataHeaders['x-aws-ec2-metadata-token'] =\n await this.getImdsV2SessionToken();\n }\n this.region = await this.getAwsRegion(metadataHeaders);\n this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => {\n // Check environment variables for permanent credentials first.\n // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html\n if (process.env['AWS_ACCESS_KEY_ID'] &&\n process.env['AWS_SECRET_ACCESS_KEY']) {\n return {\n accessKeyId: process.env['AWS_ACCESS_KEY_ID'],\n secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'],\n // This is normally not available for permanent credentials.\n token: process.env['AWS_SESSION_TOKEN'],\n };\n }\n // Since the role on a VM can change, we don't need to cache it.\n const roleName = await this.getAwsRoleName(metadataHeaders);\n // Temporary credentials typically last for several hours.\n // Expiration is returned in response.\n // Consider future optimization of this logic to cache AWS tokens\n // until their natural expiration.\n const awsCreds = await this.getAwsSecurityCredentials(roleName, metadataHeaders);\n return {\n accessKeyId: awsCreds.AccessKeyId,\n secretAccessKey: awsCreds.SecretAccessKey,\n token: awsCreds.Token,\n };\n }, this.region);\n }\n // Generate signed request to AWS STS GetCallerIdentity API.\n // Use the required regional endpoint. Otherwise, the request will fail.\n const options = await this.awsRequestSigner.getRequestOptions({\n url: this.regionalCredVerificationUrl.replace('{region}', this.region),\n method: 'POST',\n });\n // The GCP STS endpoint expects the headers to be formatted as:\n // [\n // {key: 'x-amz-date', value: '...'},\n // {key: 'Authorization', value: '...'},\n // ...\n // ]\n // And then serialized as:\n // encodeURIComponent(JSON.stringify({\n // url: '...',\n // method: 'POST',\n // headers: [{key: 'x-amz-date', value: '...'}, ...]\n // }))\n const reformattedHeader = [];\n const extendedHeaders = Object.assign({\n // The full, canonical resource name of the workload identity pool\n // provider, with or without the HTTPS prefix.\n // Including this header as part of the signature is recommended to\n // ensure data integrity.\n 'x-goog-cloud-target-resource': this.audience,\n }, options.headers);\n // Reformat header to GCP STS expected format.\n for (const key in extendedHeaders) {\n reformattedHeader.push({\n key,\n value: extendedHeaders[key],\n });\n }\n // Serialize the reformatted signed request.\n return encodeURIComponent(JSON.stringify({\n url: options.url,\n method: options.method,\n headers: reformattedHeader,\n }));\n }\n /**\n * @return A promise that resolves with the IMDSv2 Session Token.\n */\n async getImdsV2SessionToken() {\n const opts = {\n url: this.imdsV2SessionTokenUrl,\n method: 'PUT',\n responseType: 'text',\n headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' },\n };\n const response = await this.transporter.request(opts);\n return response.data;\n }\n /**\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the current AWS region.\n */\n async getAwsRegion(headers) {\n // Priority order for region determination:\n // AWS_REGION > AWS_DEFAULT_REGION > metadata server.\n if (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION']) {\n return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION']);\n }\n if (!this.regionUrl) {\n throw new Error('Unable to determine AWS region due to missing ' +\n '\"options.credential_source.region_url\"');\n }\n const opts = {\n url: this.regionUrl,\n method: 'GET',\n responseType: 'text',\n headers: headers,\n };\n const response = await this.transporter.request(opts);\n // Remove last character. For example, if us-east-2b is returned,\n // the region would be us-east-2.\n return response.data.substr(0, response.data.length - 1);\n }\n /**\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the assigned role to the current\n * AWS VM. This is needed for calling the security-credentials endpoint.\n */\n async getAwsRoleName(headers) {\n if (!this.securityCredentialsUrl) {\n throw new Error('Unable to determine AWS role name due to missing ' +\n '\"options.credential_source.url\"');\n }\n const opts = {\n url: this.securityCredentialsUrl,\n method: 'GET',\n responseType: 'text',\n headers: headers,\n };\n const response = await this.transporter.request(opts);\n return response.data;\n }\n /**\n * Retrieves the temporary AWS credentials by calling the security-credentials\n * endpoint as specified in the `credential_source` object.\n * @param roleName The role attached to the current VM.\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the temporary AWS credentials\n * needed for creating the GetCallerIdentity signed request.\n */\n async getAwsSecurityCredentials(roleName, headers) {\n const response = await this.transporter.request({\n url: `${this.securityCredentialsUrl}/${roleName}`,\n responseType: 'json',\n headers: headers,\n });\n return response.data;\n }\n}\nexports.AwsClient = AwsClient;\nAwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254';\nAwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254';\n//# sourceMappingURL=awsclient.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AwsRequestSigner = void 0;\nconst crypto_1 = require(\"../crypto/crypto\");\n/** AWS Signature Version 4 signing algorithm identifier. */\nconst AWS_ALGORITHM = 'AWS4-HMAC-SHA256';\n/**\n * The termination string for the AWS credential scope value as defined in\n * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html\n */\nconst AWS_REQUEST_TYPE = 'aws4_request';\n/**\n * Implements an AWS API request signer based on the AWS Signature Version 4\n * signing process.\n * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html\n */\nclass AwsRequestSigner {\n /**\n * Instantiates an AWS API request signer used to send authenticated signed\n * requests to AWS APIs based on the AWS Signature Version 4 signing process.\n * This also provides a mechanism to generate the signed request without\n * sending it.\n * @param getCredentials A mechanism to retrieve AWS security credentials\n * when needed.\n * @param region The AWS region to use.\n */\n constructor(getCredentials, region) {\n this.getCredentials = getCredentials;\n this.region = region;\n this.crypto = (0, crypto_1.createCrypto)();\n }\n /**\n * Generates the signed request for the provided HTTP request for calling\n * an AWS API. This follows the steps described at:\n * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html\n * @param amzOptions The AWS request options that need to be signed.\n * @return A promise that resolves with the GaxiosOptions containing the\n * signed HTTP request parameters.\n */\n async getRequestOptions(amzOptions) {\n if (!amzOptions.url) {\n throw new Error('\"url\" is required in \"amzOptions\"');\n }\n // Stringify JSON requests. This will be set in the request body of the\n // generated signed request.\n const requestPayloadData = typeof amzOptions.data === 'object'\n ? JSON.stringify(amzOptions.data)\n : amzOptions.data;\n const url = amzOptions.url;\n const method = amzOptions.method || 'GET';\n const requestPayload = amzOptions.body || requestPayloadData;\n const additionalAmzHeaders = amzOptions.headers;\n const awsSecurityCredentials = await this.getCredentials();\n const uri = new URL(url);\n const headerMap = await generateAuthenticationHeaderMap({\n crypto: this.crypto,\n host: uri.host,\n canonicalUri: uri.pathname,\n canonicalQuerystring: uri.search.substr(1),\n method,\n region: this.region,\n securityCredentials: awsSecurityCredentials,\n requestPayload,\n additionalAmzHeaders,\n });\n // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc.\n const headers = Object.assign(\n // Add x-amz-date if available.\n headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, {\n Authorization: headerMap.authorizationHeader,\n host: uri.host,\n }, additionalAmzHeaders || {});\n if (awsSecurityCredentials.token) {\n Object.assign(headers, {\n 'x-amz-security-token': awsSecurityCredentials.token,\n });\n }\n const awsSignedReq = {\n url,\n method: method,\n headers,\n };\n if (typeof requestPayload !== 'undefined') {\n awsSignedReq.body = requestPayload;\n }\n return awsSignedReq;\n }\n}\nexports.AwsRequestSigner = AwsRequestSigner;\n/**\n * Creates the HMAC-SHA256 hash of the provided message using the\n * provided key.\n *\n * @param crypto The crypto instance used to facilitate cryptographic\n * operations.\n * @param key The HMAC-SHA256 key to use.\n * @param msg The message to hash.\n * @return The computed hash bytes.\n */\nasync function sign(crypto, key, msg) {\n return await crypto.signWithHmacSha256(key, msg);\n}\n/**\n * Calculates the signing key used to calculate the signature for\n * AWS Signature Version 4 based on:\n * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html\n *\n * @param crypto The crypto instance used to facilitate cryptographic\n * operations.\n * @param key The AWS secret access key.\n * @param dateStamp The '%Y%m%d' date format.\n * @param region The AWS region.\n * @param serviceName The AWS service name, eg. sts.\n * @return The signing key bytes.\n */\nasync function getSigningKey(crypto, key, dateStamp, region, serviceName) {\n const kDate = await sign(crypto, `AWS4${key}`, dateStamp);\n const kRegion = await sign(crypto, kDate, region);\n const kService = await sign(crypto, kRegion, serviceName);\n const kSigning = await sign(crypto, kService, 'aws4_request');\n return kSigning;\n}\n/**\n * Generates the authentication header map needed for generating the AWS\n * Signature Version 4 signed request.\n *\n * @param option The options needed to compute the authentication header map.\n * @return The AWS authentication header map which constitutes of the following\n * components: amz-date, authorization header and canonical query string.\n */\nasync function generateAuthenticationHeaderMap(options) {\n const additionalAmzHeaders = options.additionalAmzHeaders || {};\n const requestPayload = options.requestPayload || '';\n // iam.amazonaws.com host => iam service.\n // sts.us-east-2.amazonaws.com => sts service.\n const serviceName = options.host.split('.')[0];\n const now = new Date();\n // Format: '%Y%m%dT%H%M%SZ'.\n const amzDate = now\n .toISOString()\n .replace(/[-:]/g, '')\n .replace(/\\.[0-9]+/, '');\n // Format: '%Y%m%d'.\n const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, '');\n // Change all additional headers to be lower case.\n const reformattedAdditionalAmzHeaders = {};\n Object.keys(additionalAmzHeaders).forEach(key => {\n reformattedAdditionalAmzHeaders[key.toLowerCase()] =\n additionalAmzHeaders[key];\n });\n // Add AWS token if available.\n if (options.securityCredentials.token) {\n reformattedAdditionalAmzHeaders['x-amz-security-token'] =\n options.securityCredentials.token;\n }\n // Header keys need to be sorted alphabetically.\n const amzHeaders = Object.assign({\n host: options.host,\n }, \n // Previously the date was not fixed with x-amz- and could be provided manually.\n // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req\n reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders);\n let canonicalHeaders = '';\n const signedHeadersList = Object.keys(amzHeaders).sort();\n signedHeadersList.forEach(key => {\n canonicalHeaders += `${key}:${amzHeaders[key]}\\n`;\n });\n const signedHeaders = signedHeadersList.join(';');\n const payloadHash = await options.crypto.sha256DigestHex(requestPayload);\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html\n const canonicalRequest = `${options.method}\\n` +\n `${options.canonicalUri}\\n` +\n `${options.canonicalQuerystring}\\n` +\n `${canonicalHeaders}\\n` +\n `${signedHeaders}\\n` +\n `${payloadHash}`;\n const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`;\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html\n const stringToSign = `${AWS_ALGORITHM}\\n` +\n `${amzDate}\\n` +\n `${credentialScope}\\n` +\n (await options.crypto.sha256DigestHex(canonicalRequest));\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html\n const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName);\n const signature = await sign(options.crypto, signingKey, stringToSign);\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html\n const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` +\n `${credentialScope}, SignedHeaders=${signedHeaders}, ` +\n `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`;\n return {\n // Do not return x-amz-date if date is available.\n amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate,\n authorizationHeader,\n canonicalQuerystring: options.canonicalQuerystring,\n };\n}\n//# sourceMappingURL=awsrequestsigner.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.BaseExternalAccountClient = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0;\nconst stream = require(\"stream\");\nconst authclient_1 = require(\"./authclient\");\nconst sts = require(\"./stscredentials\");\n/**\n * The required token exchange grant_type: rfc8693#section-2.1\n */\nconst STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange';\n/**\n * The requested token exchange requested_token_type: rfc8693#section-2.1\n */\nconst STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/** The default OAuth scope to request when none is provided. */\nconst DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform';\n/** The google apis domain pattern. */\nconst GOOGLE_APIS_DOMAIN_PATTERN = '\\\\.googleapis\\\\.com$';\n/** The variable portion pattern in a Google APIs domain. */\nconst VARIABLE_PORTION_PATTERN = '[^\\\\.\\\\s\\\\/\\\\\\\\]+';\n/** Default impersonated token lifespan in seconds.*/\nconst DEFAULT_TOKEN_LIFESPAN = 3600;\n/**\n * Offset to take into account network delays and server clock skews.\n */\nexports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000;\n/**\n * The credentials JSON file type for external account clients.\n * There are 3 types of JSON configs:\n * 1. authorized_user => Google end user credential\n * 2. service_account => Google service account credential\n * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s)\n */\nexports.EXTERNAL_ACCOUNT_TYPE = 'external_account';\n/** Cloud resource manager URL used to retrieve project information. */\nexports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/';\n/** The workforce audience pattern. */\nconst WORKFORCE_AUDIENCE_PATTERN = '//iam.googleapis.com/locations/[^/]+/workforcePools/[^/]+/providers/.+';\n/**\n * Base external account client. This is used to instantiate AuthClients for\n * exchanging external account credentials for GCP access token and authorizing\n * requests to GCP APIs.\n * The base class implements common logic for exchanging various type of\n * external credentials for GCP access token. The logic of determining and\n * retrieving the external credential based on the environment and\n * credential_source will be left for the subclasses.\n */\nclass BaseExternalAccountClient extends authclient_1.AuthClient {\n /**\n * Instantiate a BaseExternalAccountClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n var _a, _b;\n super();\n if (options.type !== exports.EXTERNAL_ACCOUNT_TYPE) {\n throw new Error(`Expected \"${exports.EXTERNAL_ACCOUNT_TYPE}\" type but ` +\n `received \"${options.type}\"`);\n }\n this.clientAuth = options.client_id\n ? {\n confidentialClientType: 'basic',\n clientId: options.client_id,\n clientSecret: options.client_secret,\n }\n : undefined;\n if (!this.validateGoogleAPIsUrl('sts', options.token_url)) {\n throw new Error(`\"${options.token_url}\" is not a valid token url.`);\n }\n this.stsCredential = new sts.StsCredentials(options.token_url, this.clientAuth);\n // Default OAuth scope. This could be overridden via public property.\n this.scopes = [DEFAULT_OAUTH_SCOPE];\n this.cachedAccessToken = null;\n this.audience = options.audience;\n this.subjectTokenType = options.subject_token_type;\n this.quotaProjectId = options.quota_project_id;\n this.workforcePoolUserProject = options.workforce_pool_user_project;\n const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN);\n if (this.workforcePoolUserProject &&\n !this.audience.match(workforceAudiencePattern)) {\n throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' +\n 'credentials.');\n }\n if (typeof options.service_account_impersonation_url !== 'undefined' &&\n !this.validateGoogleAPIsUrl('iamcredentials', options.service_account_impersonation_url)) {\n throw new Error(`\"${options.service_account_impersonation_url}\" is ` +\n 'not a valid service account impersonation url.');\n }\n this.serviceAccountImpersonationUrl =\n options.service_account_impersonation_url;\n this.serviceAccountImpersonationLifetime =\n (_b = (_a = options.service_account_impersonation) === null || _a === void 0 ? void 0 : _a.token_lifetime_seconds) !== null && _b !== void 0 ? _b : DEFAULT_TOKEN_LIFESPAN;\n // As threshold could be zero,\n // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the\n // zero value.\n if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') {\n this.eagerRefreshThresholdMillis = exports.EXPIRATION_TIME_OFFSET;\n }\n else {\n this.eagerRefreshThresholdMillis = additionalOptions\n .eagerRefreshThresholdMillis;\n }\n this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure);\n this.projectId = null;\n this.projectNumber = this.getProjectNumber(this.audience);\n }\n /** The service account email to be impersonated, if available. */\n getServiceAccountEmail() {\n var _a;\n if (this.serviceAccountImpersonationUrl) {\n // Parse email from URL. The formal looks as follows:\n // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken\n const re = /serviceAccounts\\/(?[^:]+):generateAccessToken$/;\n const result = re.exec(this.serviceAccountImpersonationUrl);\n return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null;\n }\n return null;\n }\n /**\n * Provides a mechanism to inject GCP access tokens directly.\n * When the provided credential expires, a new credential, using the\n * external account options, is retrieved.\n * @param credentials The Credentials object to set on the current client.\n */\n setCredentials(credentials) {\n super.setCredentials(credentials);\n this.cachedAccessToken = credentials;\n }\n /**\n * @return A promise that resolves with the current GCP access token\n * response. If the current credential is expired, a new one is retrieved.\n */\n async getAccessToken() {\n // If cached access token is unavailable or expired, force refresh.\n if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) {\n await this.refreshAccessTokenAsync();\n }\n // Return GCP access token in GetAccessTokenResponse format.\n return {\n token: this.cachedAccessToken.access_token,\n res: this.cachedAccessToken.res,\n };\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * The result has the form:\n * { Authorization: 'Bearer ' }\n */\n async getRequestHeaders() {\n const accessTokenResponse = await this.getAccessToken();\n const headers = {\n Authorization: `Bearer ${accessTokenResponse.token}`,\n };\n return this.addSharedMetadataHeaders(headers);\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n /**\n * @return A promise that resolves with the project ID corresponding to the\n * current workload identity pool or current workforce pool if\n * determinable. For workforce pool credential, it returns the project ID\n * corresponding to the workforcePoolUserProject.\n * This is introduced to match the current pattern of using the Auth\n * library:\n * const projectId = await auth.getProjectId();\n * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`;\n * const res = await client.request({ url });\n * The resource may not have permission\n * (resourcemanager.projects.get) to call this API or the required\n * scopes may not be selected:\n * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes\n */\n async getProjectId() {\n const projectNumber = this.projectNumber || this.workforcePoolUserProject;\n if (this.projectId) {\n // Return previously determined project ID.\n return this.projectId;\n }\n else if (projectNumber) {\n // Preferable not to use request() to avoid retrial policies.\n const headers = await this.getRequestHeaders();\n const response = await this.transporter.request({\n headers,\n url: `${exports.CLOUD_RESOURCE_MANAGER}${projectNumber}`,\n responseType: 'json',\n });\n this.projectId = response.data.projectId;\n return this.projectId;\n }\n return null;\n }\n /**\n * Authenticates the provided HTTP request, processes it and resolves with the\n * returned response.\n * @param opts The HTTP request options.\n * @param retry Whether the current attempt is a retry after a failed attempt.\n * @return A promise that resolves with the successful response.\n */\n async requestAsync(opts, retry = false) {\n let response;\n try {\n const requestHeaders = await this.getRequestHeaders();\n opts.headers = opts.headers || {};\n if (requestHeaders && requestHeaders['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] =\n requestHeaders['x-goog-user-project'];\n }\n if (requestHeaders && requestHeaders.Authorization) {\n opts.headers.Authorization = requestHeaders.Authorization;\n }\n response = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - forceRefreshOnFailure is true\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n this.forceRefreshOnFailure) {\n await this.refreshAccessTokenAsync();\n return await this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return response;\n }\n /**\n * Forces token refresh, even if unexpired tokens are currently cached.\n * External credentials are exchanged for GCP access tokens via the token\n * exchange endpoint and other settings provided in the client options\n * object.\n * If the service_account_impersonation_url is provided, an additional\n * step to exchange the external account GCP access token for a service\n * account impersonated token is performed.\n * @return A promise that resolves with the fresh GCP access tokens.\n */\n async refreshAccessTokenAsync() {\n // Retrieve the external credential.\n const subjectToken = await this.retrieveSubjectToken();\n // Construct the STS credentials options.\n const stsCredentialsOptions = {\n grantType: STS_GRANT_TYPE,\n audience: this.audience,\n requestedTokenType: STS_REQUEST_TOKEN_TYPE,\n subjectToken,\n subjectTokenType: this.subjectTokenType,\n // generateAccessToken requires the provided access token to have\n // scopes:\n // https://www.googleapis.com/auth/iam or\n // https://www.googleapis.com/auth/cloud-platform\n // The new service account access token scopes will match the user\n // provided ones.\n scope: this.serviceAccountImpersonationUrl\n ? [DEFAULT_OAUTH_SCOPE]\n : this.getScopesArray(),\n };\n // Exchange the external credentials for a GCP access token.\n // Client auth is prioritized over passing the workforcePoolUserProject\n // parameter for STS token exchange.\n const additionalOptions = !this.clientAuth && this.workforcePoolUserProject\n ? { userProject: this.workforcePoolUserProject }\n : undefined;\n const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, additionalOptions);\n if (this.serviceAccountImpersonationUrl) {\n this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token);\n }\n else if (stsResponse.expires_in) {\n // Save response in cached access token.\n this.cachedAccessToken = {\n access_token: stsResponse.access_token,\n expiry_date: new Date().getTime() + stsResponse.expires_in * 1000,\n res: stsResponse.res,\n };\n }\n else {\n // Save response in cached access token.\n this.cachedAccessToken = {\n access_token: stsResponse.access_token,\n res: stsResponse.res,\n };\n }\n // Save credentials.\n this.credentials = {};\n Object.assign(this.credentials, this.cachedAccessToken);\n delete this.credentials.res;\n // Trigger tokens event to notify external listeners.\n this.emit('tokens', {\n refresh_token: null,\n expiry_date: this.cachedAccessToken.expiry_date,\n access_token: this.cachedAccessToken.access_token,\n token_type: 'Bearer',\n id_token: null,\n });\n // Return the cached access token.\n return this.cachedAccessToken;\n }\n /**\n * Returns the workload identity pool project number if it is determinable\n * from the audience resource name.\n * @param audience The STS audience used to determine the project number.\n * @return The project number associated with the workload identity pool, if\n * this can be determined from the STS audience field. Otherwise, null is\n * returned.\n */\n getProjectNumber(audience) {\n // STS audience pattern:\n // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/...\n const match = audience.match(/\\/projects\\/([^/]+)/);\n if (!match) {\n return null;\n }\n return match[1];\n }\n /**\n * Exchanges an external account GCP access token for a service\n * account impersonated access token using iamcredentials\n * GenerateAccessToken API.\n * @param token The access token to exchange for a service account access\n * token.\n * @return A promise that resolves with the service account impersonated\n * credentials response.\n */\n async getImpersonatedAccessToken(token) {\n const opts = {\n url: this.serviceAccountImpersonationUrl,\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n },\n data: {\n scope: this.getScopesArray(),\n lifetime: this.serviceAccountImpersonationLifetime + 's',\n },\n responseType: 'json',\n };\n const response = await this.transporter.request(opts);\n const successResponse = response.data;\n return {\n access_token: successResponse.accessToken,\n // Convert from ISO format to timestamp.\n expiry_date: new Date(successResponse.expireTime).getTime(),\n res: response,\n };\n }\n /**\n * Returns whether the provided credentials are expired or not.\n * If there is no expiry time, assumes the token is not expired or expiring.\n * @param accessToken The credentials to check for expiration.\n * @return Whether the credentials are expired or not.\n */\n isExpired(accessToken) {\n const now = new Date().getTime();\n return accessToken.expiry_date\n ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis\n : false;\n }\n /**\n * @return The list of scopes for the requested GCP access token.\n */\n getScopesArray() {\n // Since scopes can be provided as string or array, the type should\n // be normalized.\n if (typeof this.scopes === 'string') {\n return [this.scopes];\n }\n else if (typeof this.scopes === 'undefined') {\n return [DEFAULT_OAUTH_SCOPE];\n }\n else {\n return this.scopes;\n }\n }\n /**\n * Checks whether Google APIs URL is valid.\n * @param apiName The apiName of url.\n * @param url The Google API URL to validate.\n * @return Whether the URL is valid or not.\n */\n validateGoogleAPIsUrl(apiName, url) {\n let parsedUrl;\n // Return false if error is thrown during parsing URL.\n try {\n parsedUrl = new URL(url);\n }\n catch (e) {\n return false;\n }\n const urlDomain = parsedUrl.hostname;\n // Check the protocol is https.\n if (parsedUrl.protocol !== 'https:') {\n return false;\n }\n const googleAPIsDomainPatterns = [\n new RegExp('^' +\n VARIABLE_PORTION_PATTERN +\n '\\\\.' +\n apiName +\n GOOGLE_APIS_DOMAIN_PATTERN),\n new RegExp('^' + apiName + GOOGLE_APIS_DOMAIN_PATTERN),\n new RegExp('^' +\n apiName +\n '\\\\.' +\n VARIABLE_PORTION_PATTERN +\n GOOGLE_APIS_DOMAIN_PATTERN),\n new RegExp('^' +\n VARIABLE_PORTION_PATTERN +\n '\\\\-' +\n apiName +\n GOOGLE_APIS_DOMAIN_PATTERN),\n new RegExp('^' +\n apiName +\n '\\\\-' +\n VARIABLE_PORTION_PATTERN +\n '\\\\.p' +\n GOOGLE_APIS_DOMAIN_PATTERN),\n ];\n for (const googleAPIsDomainPattern of googleAPIsDomainPatterns) {\n if (urlDomain.match(googleAPIsDomainPattern)) {\n return true;\n }\n }\n return false;\n }\n}\nexports.BaseExternalAccountClient = BaseExternalAccountClient;\n//# sourceMappingURL=baseexternalclient.js.map","\"use strict\";\n// Copyright 2013 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Compute = void 0;\nconst arrify = require(\"arrify\");\nconst gaxios_1 = require(\"gaxios\");\nconst gcpMetadata = require(\"gcp-metadata\");\nconst oauth2client_1 = require(\"./oauth2client\");\nclass Compute extends oauth2client_1.OAuth2Client {\n /**\n * Google Compute Engine service account credentials.\n *\n * Retrieve access token from the metadata server.\n * See: https://developers.google.com/compute/docs/authentication\n */\n constructor(options = {}) {\n super(options);\n // Start with an expired refresh token, which will automatically be\n // refreshed before the first API call is made.\n this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' };\n this.serviceAccountEmail = options.serviceAccountEmail || 'default';\n this.scopes = arrify(options.scopes);\n }\n /**\n * Refreshes the access token.\n * @param refreshToken Unused parameter\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`;\n let data;\n try {\n const instanceOptions = {\n property: tokenPath,\n };\n if (this.scopes.length > 0) {\n instanceOptions.params = {\n scopes: this.scopes.join(','),\n };\n }\n data = await gcpMetadata.instance(instanceOptions);\n }\n catch (e) {\n if (e instanceof gaxios_1.GaxiosError) {\n e.message = `Could not refresh access token: ${e.message}`;\n this.wrapError(e);\n }\n throw e;\n }\n const tokens = data;\n if (data && data.expires_in) {\n tokens.expiry_date = new Date().getTime() + data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res: null };\n }\n /**\n * Fetches an ID token.\n * @param targetAudience the audience for the fetched ID token.\n */\n async fetchIdToken(targetAudience) {\n const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` +\n `?format=full&audience=${targetAudience}`;\n let idToken;\n try {\n const instanceOptions = {\n property: idTokenPath,\n };\n idToken = await gcpMetadata.instance(instanceOptions);\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Could not fetch ID token: ${e.message}`;\n }\n throw e;\n }\n return idToken;\n }\n wrapError(e) {\n const res = e.response;\n if (res && res.status) {\n e.code = res.status.toString();\n if (res.status === 403) {\n e.message =\n 'A Forbidden error was returned while attempting to retrieve an access ' +\n 'token for the Compute Engine built-in service account. This may be because the Compute ' +\n 'Engine instance does not have the correct permission scopes specified: ' +\n e.message;\n }\n else if (res.status === 404) {\n e.message =\n 'A Not Found error was returned while attempting to retrieve an access' +\n 'token for the Compute Engine built-in service account. This may be because the Compute ' +\n 'Engine instance does not have any permission scopes specified: ' +\n e.message;\n }\n }\n }\n}\nexports.Compute = Compute;\n//# sourceMappingURL=computeclient.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0;\nconst stream = require(\"stream\");\nconst authclient_1 = require(\"./authclient\");\nconst sts = require(\"./stscredentials\");\n/**\n * The required token exchange grant_type: rfc8693#section-2.1\n */\nconst STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange';\n/**\n * The requested token exchange requested_token_type: rfc8693#section-2.1\n */\nconst STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/**\n * The requested token exchange subject_token_type: rfc8693#section-2.1\n */\nconst STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/** The STS access token exchange end point. */\nconst STS_ACCESS_TOKEN_URL = 'https://sts.googleapis.com/v1/token';\n/**\n * The maximum number of access boundary rules a Credential Access Boundary\n * can contain.\n */\nexports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10;\n/**\n * Offset to take into account network delays and server clock skews.\n */\nexports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000;\n/**\n * Defines a set of Google credentials that are downscoped from an existing set\n * of Google OAuth2 credentials. This is useful to restrict the Identity and\n * Access Management (IAM) permissions that a short-lived credential can use.\n * The common pattern of usage is to have a token broker with elevated access\n * generate these downscoped credentials from higher access source credentials\n * and pass the downscoped short-lived access tokens to a token consumer via\n * some secure authenticated channel for limited access to Google Cloud Storage\n * resources.\n */\nclass DownscopedClient extends authclient_1.AuthClient {\n /**\n * Instantiates a downscoped client object using the provided source\n * AuthClient and credential access boundary rules.\n * To downscope permissions of a source AuthClient, a Credential Access\n * Boundary that specifies which resources the new credential can access, as\n * well as an upper bound on the permissions that are available on each\n * resource, has to be defined. A downscoped client can then be instantiated\n * using the source AuthClient and the Credential Access Boundary.\n * @param authClient The source AuthClient to be downscoped based on the\n * provided Credential Access Boundary rules.\n * @param credentialAccessBoundary The Credential Access Boundary which\n * contains a list of access boundary rules. Each rule contains information\n * on the resource that the rule applies to, the upper bound of the\n * permissions that are available on that resource and an optional\n * condition to further restrict permissions.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n * @param quotaProjectId Optional quota project id for setting up in the\n * x-goog-user-project header.\n */\n constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) {\n super();\n this.authClient = authClient;\n this.credentialAccessBoundary = credentialAccessBoundary;\n // Check 1-10 Access Boundary Rules are defined within Credential Access\n // Boundary.\n if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) {\n throw new Error('At least one access boundary rule needs to be defined.');\n }\n else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length >\n exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) {\n throw new Error('The provided access boundary has more than ' +\n `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`);\n }\n // Check at least one permission should be defined in each Access Boundary\n // Rule.\n for (const rule of credentialAccessBoundary.accessBoundary\n .accessBoundaryRules) {\n if (rule.availablePermissions.length === 0) {\n throw new Error('At least one permission should be defined in access boundary rules.');\n }\n }\n this.stsCredential = new sts.StsCredentials(STS_ACCESS_TOKEN_URL);\n this.cachedDownscopedAccessToken = null;\n // As threshold could be zero,\n // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the\n // zero value.\n if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') {\n this.eagerRefreshThresholdMillis = exports.EXPIRATION_TIME_OFFSET;\n }\n else {\n this.eagerRefreshThresholdMillis = additionalOptions\n .eagerRefreshThresholdMillis;\n }\n this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure);\n this.quotaProjectId = quotaProjectId;\n }\n /**\n * Provides a mechanism to inject Downscoped access tokens directly.\n * The expiry_date field is required to facilitate determination of the token\n * expiration which would make it easier for the token consumer to handle.\n * @param credentials The Credentials object to set on the current client.\n */\n setCredentials(credentials) {\n if (!credentials.expiry_date) {\n throw new Error('The access token expiry_date field is missing in the provided ' +\n 'credentials.');\n }\n super.setCredentials(credentials);\n this.cachedDownscopedAccessToken = credentials;\n }\n async getAccessToken() {\n // If the cached access token is unavailable or expired, force refresh.\n // The Downscoped access token will be returned in\n // DownscopedAccessTokenResponse format.\n if (!this.cachedDownscopedAccessToken ||\n this.isExpired(this.cachedDownscopedAccessToken)) {\n await this.refreshAccessTokenAsync();\n }\n // Return Downscoped access token in DownscopedAccessTokenResponse format.\n return {\n token: this.cachedDownscopedAccessToken.access_token,\n expirationTime: this.cachedDownscopedAccessToken.expiry_date,\n res: this.cachedDownscopedAccessToken.res,\n };\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * The result has the form:\n * { Authorization: 'Bearer ' }\n */\n async getRequestHeaders() {\n const accessTokenResponse = await this.getAccessToken();\n const headers = {\n Authorization: `Bearer ${accessTokenResponse.token}`,\n };\n return this.addSharedMetadataHeaders(headers);\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n /**\n * Authenticates the provided HTTP request, processes it and resolves with the\n * returned response.\n * @param opts The HTTP request options.\n * @param retry Whether the current attempt is a retry after a failed attempt.\n * @return A promise that resolves with the successful response.\n */\n async requestAsync(opts, retry = false) {\n let response;\n try {\n const requestHeaders = await this.getRequestHeaders();\n opts.headers = opts.headers || {};\n if (requestHeaders && requestHeaders['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] =\n requestHeaders['x-goog-user-project'];\n }\n if (requestHeaders && requestHeaders.Authorization) {\n opts.headers.Authorization = requestHeaders.Authorization;\n }\n response = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - forceRefreshOnFailure is true\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n this.forceRefreshOnFailure) {\n await this.refreshAccessTokenAsync();\n return await this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return response;\n }\n /**\n * Forces token refresh, even if unexpired tokens are currently cached.\n * GCP access tokens are retrieved from authclient object/source credential.\n * Then GCP access tokens are exchanged for downscoped access tokens via the\n * token exchange endpoint.\n * @return A promise that resolves with the fresh downscoped access token.\n */\n async refreshAccessTokenAsync() {\n var _a;\n // Retrieve GCP access token from source credential.\n const subjectToken = (await this.authClient.getAccessToken()).token;\n // Construct the STS credentials options.\n const stsCredentialsOptions = {\n grantType: STS_GRANT_TYPE,\n requestedTokenType: STS_REQUEST_TOKEN_TYPE,\n subjectToken: subjectToken,\n subjectTokenType: STS_SUBJECT_TOKEN_TYPE,\n };\n // Exchange the source AuthClient access token for a Downscoped access\n // token.\n const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary);\n /**\n * The STS endpoint will only return the expiration time for the downscoped\n * access token if the original access token represents a service account.\n * The downscoped token's expiration time will always match the source\n * credential expiration. When no expires_in is returned, we can copy the\n * source credential's expiration time.\n */\n const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null;\n const expiryDate = stsResponse.expires_in\n ? new Date().getTime() + stsResponse.expires_in * 1000\n : sourceCredExpireDate;\n // Save response in cached access token.\n this.cachedDownscopedAccessToken = {\n access_token: stsResponse.access_token,\n expiry_date: expiryDate,\n res: stsResponse.res,\n };\n // Save credentials.\n this.credentials = {};\n Object.assign(this.credentials, this.cachedDownscopedAccessToken);\n delete this.credentials.res;\n // Trigger tokens event to notify external listeners.\n this.emit('tokens', {\n refresh_token: null,\n expiry_date: this.cachedDownscopedAccessToken.expiry_date,\n access_token: this.cachedDownscopedAccessToken.access_token,\n token_type: 'Bearer',\n id_token: null,\n });\n // Return the cached access token.\n return this.cachedDownscopedAccessToken;\n }\n /**\n * Returns whether the provided credentials are expired or not.\n * If there is no expiry time, assumes the token is not expired or expiring.\n * @param downscopedAccessToken The credentials to check for expiration.\n * @return Whether the credentials are expired or not.\n */\n isExpired(downscopedAccessToken) {\n const now = new Date().getTime();\n return downscopedAccessToken.expiry_date\n ? now >=\n downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis\n : false;\n }\n}\nexports.DownscopedClient = DownscopedClient;\n//# sourceMappingURL=downscopedclient.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getEnv = exports.clear = exports.GCPEnv = void 0;\nconst gcpMetadata = require(\"gcp-metadata\");\nvar GCPEnv;\n(function (GCPEnv) {\n GCPEnv[\"APP_ENGINE\"] = \"APP_ENGINE\";\n GCPEnv[\"KUBERNETES_ENGINE\"] = \"KUBERNETES_ENGINE\";\n GCPEnv[\"CLOUD_FUNCTIONS\"] = \"CLOUD_FUNCTIONS\";\n GCPEnv[\"COMPUTE_ENGINE\"] = \"COMPUTE_ENGINE\";\n GCPEnv[\"CLOUD_RUN\"] = \"CLOUD_RUN\";\n GCPEnv[\"NONE\"] = \"NONE\";\n})(GCPEnv = exports.GCPEnv || (exports.GCPEnv = {}));\nlet envPromise;\nfunction clear() {\n envPromise = undefined;\n}\nexports.clear = clear;\nasync function getEnv() {\n if (envPromise) {\n return envPromise;\n }\n envPromise = getEnvMemoized();\n return envPromise;\n}\nexports.getEnv = getEnv;\nasync function getEnvMemoized() {\n let env = GCPEnv.NONE;\n if (isAppEngine()) {\n env = GCPEnv.APP_ENGINE;\n }\n else if (isCloudFunction()) {\n env = GCPEnv.CLOUD_FUNCTIONS;\n }\n else if (await isComputeEngine()) {\n if (await isKubernetesEngine()) {\n env = GCPEnv.KUBERNETES_ENGINE;\n }\n else if (isCloudRun()) {\n env = GCPEnv.CLOUD_RUN;\n }\n else {\n env = GCPEnv.COMPUTE_ENGINE;\n }\n }\n else {\n env = GCPEnv.NONE;\n }\n return env;\n}\nfunction isAppEngine() {\n return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME);\n}\nfunction isCloudFunction() {\n return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET);\n}\n/**\n * This check only verifies that the environment is running knative.\n * This must be run *after* checking for Kubernetes, otherwise it will\n * return a false positive.\n */\nfunction isCloudRun() {\n return !!process.env.K_CONFIGURATION;\n}\nasync function isKubernetesEngine() {\n try {\n await gcpMetadata.instance('attributes/cluster-name');\n return true;\n }\n catch (e) {\n return false;\n }\n}\nasync function isComputeEngine() {\n return gcpMetadata.isAvailable();\n}\n//# sourceMappingURL=envDetect.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0;\nconst SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2';\nconst OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token';\nconst OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt';\n/**\n * Defines the response of a 3rd party executable run by the pluggable auth client.\n */\nclass ExecutableResponse {\n /**\n * Instantiates an ExecutableResponse instance using the provided JSON object\n * from the output of the executable.\n * @param responseJson Response from a 3rd party executable, loaded from a\n * run of the executable or a cached output file.\n */\n constructor(responseJson) {\n // Check that the required fields exist in the json response.\n if (!responseJson.version) {\n throw new InvalidVersionFieldError(\"Executable response must contain a 'version' field.\");\n }\n if (responseJson.success === undefined) {\n throw new InvalidSuccessFieldError(\"Executable response must contain a 'success' field.\");\n }\n this.version = responseJson.version;\n this.success = responseJson.success;\n // Validate required fields for a successful response.\n if (this.success) {\n this.expirationTime = responseJson.expiration_time;\n this.tokenType = responseJson.token_type;\n // Validate token type field.\n if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE &&\n this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 &&\n this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) {\n throw new InvalidTokenTypeFieldError(\"Executable response must contain a 'token_type' field when successful \" +\n `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`);\n }\n // Validate subject token.\n if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) {\n if (!responseJson.saml_response) {\n throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`);\n }\n this.subjectToken = responseJson.saml_response;\n }\n else {\n if (!responseJson.id_token) {\n throw new InvalidSubjectTokenError(\"Executable response must contain a 'id_token' field when \" +\n `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`);\n }\n this.subjectToken = responseJson.id_token;\n }\n }\n else {\n // Both code and message must be provided for unsuccessful responses.\n if (!responseJson.code) {\n throw new InvalidCodeFieldError(\"Executable response must contain a 'code' field when unsuccessful.\");\n }\n if (!responseJson.message) {\n throw new InvalidMessageFieldError(\"Executable response must contain a 'message' field when unsuccessful.\");\n }\n this.errorCode = responseJson.code;\n this.errorMessage = responseJson.message;\n }\n }\n /**\n * @return A boolean representing if the response has a valid token. Returns\n * true when the response was successful and the token is not expired.\n */\n isValid() {\n return !this.isExpired() && this.success;\n }\n /**\n * @return A boolean representing if the response is expired. Returns true if the\n * provided timeout has passed.\n */\n isExpired() {\n return (this.expirationTime !== undefined &&\n this.expirationTime < Math.round(Date.now() / 1000));\n }\n}\nexports.ExecutableResponse = ExecutableResponse;\n/**\n * An error thrown by the ExecutableResponse class.\n */\nclass ExecutableResponseError extends Error {\n constructor(message) {\n super(message);\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.ExecutableResponseError = ExecutableResponseError;\n/**\n * An error thrown when the 'version' field in an executable response is missing or invalid.\n */\nclass InvalidVersionFieldError extends ExecutableResponseError {\n}\nexports.InvalidVersionFieldError = InvalidVersionFieldError;\n/**\n * An error thrown when the 'success' field in an executable response is missing or invalid.\n */\nclass InvalidSuccessFieldError extends ExecutableResponseError {\n}\nexports.InvalidSuccessFieldError = InvalidSuccessFieldError;\n/**\n * An error thrown when the 'expiration_time' field in an executable response is missing or invalid.\n */\nclass InvalidExpirationTimeFieldError extends ExecutableResponseError {\n}\nexports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError;\n/**\n * An error thrown when the 'token_type' field in an executable response is missing or invalid.\n */\nclass InvalidTokenTypeFieldError extends ExecutableResponseError {\n}\nexports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError;\n/**\n * An error thrown when the 'code' field in an executable response is missing or invalid.\n */\nclass InvalidCodeFieldError extends ExecutableResponseError {\n}\nexports.InvalidCodeFieldError = InvalidCodeFieldError;\n/**\n * An error thrown when the 'message' field in an executable response is missing or invalid.\n */\nclass InvalidMessageFieldError extends ExecutableResponseError {\n}\nexports.InvalidMessageFieldError = InvalidMessageFieldError;\n/**\n * An error thrown when the subject token in an executable response is missing or invalid.\n */\nclass InvalidSubjectTokenError extends ExecutableResponseError {\n}\nexports.InvalidSubjectTokenError = InvalidSubjectTokenError;\n//# sourceMappingURL=executable-response.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ExternalAccountClient = void 0;\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst identitypoolclient_1 = require(\"./identitypoolclient\");\nconst awsclient_1 = require(\"./awsclient\");\nconst pluggable_auth_client_1 = require(\"./pluggable-auth-client\");\n/**\n * Dummy class with no constructor. Developers are expected to use fromJSON.\n */\nclass ExternalAccountClient {\n constructor() {\n throw new Error('ExternalAccountClients should be initialized via: ' +\n 'ExternalAccountClient.fromJSON(), ' +\n 'directly via explicit constructors, eg. ' +\n 'new AwsClient(options), new IdentityPoolClient(options), new' +\n 'PluggableAuthClientOptions, or via ' +\n 'new GoogleAuth(options).getClient()');\n }\n /**\n * This static method will instantiate the\n * corresponding type of external account credential depending on the\n * underlying credential source.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n * @return A BaseExternalAccountClient instance or null if the options\n * provided do not correspond to an external account credential.\n */\n static fromJSON(options, additionalOptions) {\n var _a, _b;\n if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) {\n return new awsclient_1.AwsClient(options, additionalOptions);\n }\n else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) {\n return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions);\n }\n else {\n return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions);\n }\n }\n else {\n return null;\n }\n }\n}\nexports.ExternalAccountClient = ExternalAccountClient;\n//# sourceMappingURL=externalclient.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0;\nconst child_process_1 = require(\"child_process\");\nconst fs = require(\"fs\");\nconst gcpMetadata = require(\"gcp-metadata\");\nconst os = require(\"os\");\nconst path = require(\"path\");\nconst crypto_1 = require(\"../crypto/crypto\");\nconst transporters_1 = require(\"../transporters\");\nconst computeclient_1 = require(\"./computeclient\");\nconst idtokenclient_1 = require(\"./idtokenclient\");\nconst envDetect_1 = require(\"./envDetect\");\nconst jwtclient_1 = require(\"./jwtclient\");\nconst refreshclient_1 = require(\"./refreshclient\");\nconst impersonated_1 = require(\"./impersonated\");\nconst externalclient_1 = require(\"./externalclient\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nexports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com';\nconst GoogleAuthExceptionMessages = {\n NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \\n' +\n 'To learn more about authentication and Google APIs, visit: \\n' +\n 'https://cloud.google.com/docs/authentication/getting-started',\n};\nclass GoogleAuth {\n constructor(opts) {\n /**\n * Caches a value indicating whether the auth layer is running on Google\n * Compute Engine.\n * @private\n */\n this.checkIsGCE = undefined;\n // To save the contents of the JSON credential file\n this.jsonContent = null;\n this.cachedCredential = null;\n opts = opts || {};\n this._cachedProjectId = opts.projectId || null;\n this.cachedCredential = opts.authClient || null;\n this.keyFilename = opts.keyFilename || opts.keyFile;\n this.scopes = opts.scopes;\n this.jsonContent = opts.credentials || null;\n this.clientOptions = opts.clientOptions;\n }\n // Note: this properly is only public to satisify unit tests.\n // https://github.com/Microsoft/TypeScript/issues/5228\n get isGCE() {\n return this.checkIsGCE;\n }\n // GAPIC client libraries should always use self-signed JWTs. The following\n // variables are set on the JWT client in order to indicate the type of library,\n // and sign the JWT with the correct audience and scopes (if not supplied).\n setGapicJWTValues(client) {\n client.defaultServicePath = this.defaultServicePath;\n client.useJWTAccessWithScope = this.useJWTAccessWithScope;\n client.defaultScopes = this.defaultScopes;\n }\n getProjectId(callback) {\n if (callback) {\n this.getProjectIdAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.getProjectIdAsync();\n }\n }\n /**\n * A temporary method for internal `getProjectId` usages where `null` is\n * acceptable. In a future major release, `getProjectId` should return `null`\n * (as the `Promise` base signature describes) and this private\n * method should be removed.\n *\n * @returns Promise that resolves with project id (or `null`)\n */\n async getProjectIdOptional() {\n try {\n return await this.getProjectId();\n }\n catch (e) {\n if (e instanceof Error &&\n e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) {\n return null;\n }\n else {\n throw e;\n }\n }\n }\n /*\n * A private method for finding and caching a projectId.\n *\n * Supports environments in order of precedence:\n * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable\n * - GOOGLE_APPLICATION_CREDENTIALS JSON file\n * - Cloud SDK: `gcloud config config-helper --format json`\n * - GCE project ID from metadata server\n *\n * @returns projectId\n */\n async findAndCacheProjectId() {\n let projectId = null;\n projectId || (projectId = await this.getProductionProjectId());\n projectId || (projectId = await this.getFileProjectId());\n projectId || (projectId = await this.getDefaultServiceProjectId());\n projectId || (projectId = await this.getGCEProjectId());\n projectId || (projectId = await this.getExternalAccountClientProjectId());\n if (projectId) {\n this._cachedProjectId = projectId;\n return projectId;\n }\n else {\n throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND);\n }\n }\n async getProjectIdAsync() {\n if (this._cachedProjectId) {\n return this._cachedProjectId;\n }\n if (!this._findProjectIdPromise) {\n this._findProjectIdPromise = this.findAndCacheProjectId();\n }\n return this._findProjectIdPromise;\n }\n /**\n * @returns Any scopes (user-specified or default scopes specified by the\n * client library) that need to be set on the current Auth client.\n */\n getAnyScopes() {\n return this.scopes || this.defaultScopes;\n }\n getApplicationDefault(optionsOrCallback = {}, callback) {\n let options;\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else {\n options = optionsOrCallback;\n }\n if (callback) {\n this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback);\n }\n else {\n return this.getApplicationDefaultAsync(options);\n }\n }\n async getApplicationDefaultAsync(options = {}) {\n // If we've already got a cached credential, return it.\n // This will also preserve one's configured quota project, in case they\n // set one directly on the credential previously.\n if (this.cachedCredential) {\n return await this.prepareAndCacheADC(this.cachedCredential);\n }\n // Since this is a 'new' ADC to cache we will use the environment variable\n // if it's available. We prefer this value over the value from ADC.\n const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT'];\n let credential;\n // Check for the existence of a local environment variable pointing to the\n // location of the credential file. This is typically used in local\n // developer scenarios.\n credential =\n await this._tryGetApplicationCredentialsFromEnvironmentVariable(options);\n if (credential) {\n if (credential instanceof jwtclient_1.JWT) {\n credential.scopes = this.scopes;\n }\n else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) {\n credential.scopes = this.getAnyScopes();\n }\n return await this.prepareAndCacheADC(credential, quotaProjectIdOverride);\n }\n // Look in the well-known credential file location.\n credential = await this._tryGetApplicationCredentialsFromWellKnownFile(options);\n if (credential) {\n if (credential instanceof jwtclient_1.JWT) {\n credential.scopes = this.scopes;\n }\n else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) {\n credential.scopes = this.getAnyScopes();\n }\n return await this.prepareAndCacheADC(credential, quotaProjectIdOverride);\n }\n // Determine if we're running on GCE.\n let isGCE;\n try {\n isGCE = await this._checkIsGCE();\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Unexpected error determining execution environment: ${e.message}`;\n }\n throw e;\n }\n if (!isGCE) {\n // We failed to find the default credentials. Bail out with an error.\n throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.');\n }\n // For GCE, just return a default ComputeClient. It will take care of\n // the rest.\n options.scopes = this.getAnyScopes();\n return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride);\n }\n async prepareAndCacheADC(credential, quotaProjectIdOverride) {\n const projectId = await this.getProjectIdOptional();\n if (quotaProjectIdOverride) {\n credential.quotaProjectId = quotaProjectIdOverride;\n }\n this.cachedCredential = credential;\n return { credential, projectId };\n }\n /**\n * Determines whether the auth layer is running on Google Compute Engine.\n * @returns A promise that resolves with the boolean.\n * @api private\n */\n async _checkIsGCE() {\n if (this.checkIsGCE === undefined) {\n this.checkIsGCE = await gcpMetadata.isAvailable();\n }\n return this.checkIsGCE;\n }\n /**\n * Attempts to load default credentials from the environment variable path..\n * @returns Promise that resolves with the OAuth2Client or null.\n * @api private\n */\n async _tryGetApplicationCredentialsFromEnvironmentVariable(options) {\n const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] ||\n process.env['google_application_credentials'];\n if (!credentialsPath || credentialsPath.length === 0) {\n return null;\n }\n try {\n return this._getApplicationCredentialsFromFilePath(credentialsPath, options);\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`;\n }\n throw e;\n }\n }\n /**\n * Attempts to load default credentials from a well-known file location\n * @return Promise that resolves with the OAuth2Client or null.\n * @api private\n */\n async _tryGetApplicationCredentialsFromWellKnownFile(options) {\n // First, figure out the location of the file, depending upon the OS type.\n let location = null;\n if (this._isWindows()) {\n // Windows\n location = process.env['APPDATA'];\n }\n else {\n // Linux or Mac\n const home = process.env['HOME'];\n if (home) {\n location = path.join(home, '.config');\n }\n }\n // If we found the root path, expand it.\n if (location) {\n location = path.join(location, 'gcloud', 'application_default_credentials.json');\n if (!fs.existsSync(location)) {\n location = null;\n }\n }\n // The file does not exist.\n if (!location) {\n return null;\n }\n // The file seems to exist. Try to use it.\n const client = await this._getApplicationCredentialsFromFilePath(location, options);\n return client;\n }\n /**\n * Attempts to load default credentials from a file at the given path..\n * @param filePath The path to the file to read.\n * @returns Promise that resolves with the OAuth2Client\n * @api private\n */\n async _getApplicationCredentialsFromFilePath(filePath, options = {}) {\n // Make sure the path looks like a string.\n if (!filePath || filePath.length === 0) {\n throw new Error('The file path is invalid.');\n }\n // Make sure there is a file at the path. lstatSync will throw if there is\n // nothing there.\n try {\n // Resolve path to actual file in case of symlink. Expect a thrown error\n // if not resolvable.\n filePath = fs.realpathSync(filePath);\n if (!fs.lstatSync(filePath).isFile()) {\n throw new Error();\n }\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`;\n }\n throw err;\n }\n // Now open a read stream on the file, and parse it.\n const readStream = fs.createReadStream(filePath);\n return this.fromStream(readStream, options);\n }\n /**\n * Create a credentials instance using a given impersonated input options.\n * @param json The impersonated input object.\n * @returns JWT or UserRefresh Client with data\n */\n fromImpersonatedJSON(json) {\n var _a, _b, _c, _d;\n if (!json) {\n throw new Error('Must pass in a JSON object containing an impersonated refresh token');\n }\n if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) {\n throw new Error(`The incoming JSON object does not have the \"${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}\" type`);\n }\n if (!json.source_credentials) {\n throw new Error('The incoming JSON object does not contain a source_credentials field');\n }\n if (!json.service_account_impersonation_url) {\n throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field');\n }\n // Create source client for impersonation\n const sourceClient = new refreshclient_1.UserRefreshClient(json.source_credentials.client_id, json.source_credentials.client_secret, json.source_credentials.refresh_token);\n // Extreact service account from service_account_impersonation_url\n const targetPrincipal = (_b = (_a = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _a === void 0 ? void 0 : _a.groups) === null || _b === void 0 ? void 0 : _b.target;\n if (!targetPrincipal) {\n throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`);\n }\n const targetScopes = (_c = this.getAnyScopes()) !== null && _c !== void 0 ? _c : [];\n const client = new impersonated_1.Impersonated({\n delegates: (_d = json.delegates) !== null && _d !== void 0 ? _d : [],\n sourceClient: sourceClient,\n targetPrincipal: targetPrincipal,\n targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes],\n });\n return client;\n }\n /**\n * Create a credentials instance using the given input options.\n * @param json The input object.\n * @param options The JWT or UserRefresh options for the client\n * @returns JWT or UserRefresh Client with data\n */\n fromJSON(json, options) {\n let client;\n if (!json) {\n throw new Error('Must pass in a JSON object containing the Google auth settings.');\n }\n options = options || {};\n if (json.type === 'authorized_user') {\n client = new refreshclient_1.UserRefreshClient(options);\n client.fromJSON(json);\n }\n else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) {\n client = this.fromImpersonatedJSON(json);\n }\n else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n client = externalclient_1.ExternalAccountClient.fromJSON(json, options);\n client.scopes = this.getAnyScopes();\n }\n else {\n options.scopes = this.scopes;\n client = new jwtclient_1.JWT(options);\n this.setGapicJWTValues(client);\n client.fromJSON(json);\n }\n return client;\n }\n /**\n * Return a JWT or UserRefreshClient from JavaScript object, caching both the\n * object used to instantiate and the client.\n * @param json The input object.\n * @param options The JWT or UserRefresh options for the client\n * @returns JWT or UserRefresh Client with data\n */\n _cacheClientFromJSON(json, options) {\n let client;\n // create either a UserRefreshClient or JWT client.\n options = options || {};\n if (json.type === 'authorized_user') {\n client = new refreshclient_1.UserRefreshClient(options);\n client.fromJSON(json);\n }\n else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) {\n client = this.fromImpersonatedJSON(json);\n }\n else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n client = externalclient_1.ExternalAccountClient.fromJSON(json, options);\n client.scopes = this.getAnyScopes();\n }\n else {\n options.scopes = this.scopes;\n client = new jwtclient_1.JWT(options);\n this.setGapicJWTValues(client);\n client.fromJSON(json);\n }\n // cache both raw data used to instantiate client and client itself.\n this.jsonContent = json;\n this.cachedCredential = client;\n return client;\n }\n fromStream(inputStream, optionsOrCallback = {}, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else {\n options = optionsOrCallback;\n }\n if (callback) {\n this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback);\n }\n else {\n return this.fromStreamAsync(inputStream, options);\n }\n }\n fromStreamAsync(inputStream, options) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n throw new Error('Must pass in a stream containing the Google auth settings.');\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n try {\n const data = JSON.parse(s);\n const r = this._cacheClientFromJSON(data, options);\n return resolve(r);\n }\n catch (err) {\n // If we failed parsing this.keyFileName, assume that it\n // is a PEM or p12 certificate:\n if (!this.keyFilename)\n throw err;\n const client = new jwtclient_1.JWT({\n ...this.clientOptions,\n keyFile: this.keyFilename,\n });\n this.cachedCredential = client;\n this.setGapicJWTValues(client);\n return resolve(client);\n }\n }\n catch (err) {\n return reject(err);\n }\n });\n });\n }\n /**\n * Create a credentials instance using the given API key string.\n * @param apiKey The API key string\n * @param options An optional options object.\n * @returns A JWT loaded from the key\n */\n fromAPIKey(apiKey, options) {\n options = options || {};\n const client = new jwtclient_1.JWT(options);\n client.fromAPIKey(apiKey);\n return client;\n }\n /**\n * Determines whether the current operating system is Windows.\n * @api private\n */\n _isWindows() {\n const sys = os.platform();\n if (sys && sys.length >= 3) {\n if (sys.substring(0, 3).toLowerCase() === 'win') {\n return true;\n }\n }\n return false;\n }\n /**\n * Run the Google Cloud SDK command that prints the default project ID\n */\n async getDefaultServiceProjectId() {\n return new Promise(resolve => {\n (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => {\n if (!err && stdout) {\n try {\n const projectId = JSON.parse(stdout).configuration.properties.core.project;\n resolve(projectId);\n return;\n }\n catch (e) {\n // ignore errors\n }\n }\n resolve(null);\n });\n });\n }\n /**\n * Loads the project id from environment variables.\n * @api private\n */\n getProductionProjectId() {\n return (process.env['GCLOUD_PROJECT'] ||\n process.env['GOOGLE_CLOUD_PROJECT'] ||\n process.env['gcloud_project'] ||\n process.env['google_cloud_project']);\n }\n /**\n * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file.\n * @api private\n */\n async getFileProjectId() {\n if (this.cachedCredential) {\n // Try to read the project ID from the cached credentials file\n return this.cachedCredential.projectId;\n }\n // Ensure the projectId is loaded from the keyFile if available.\n if (this.keyFilename) {\n const creds = await this.getClient();\n if (creds && creds.projectId) {\n return creds.projectId;\n }\n }\n // Try to load a credentials file and read its project ID\n const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable();\n if (r) {\n return r.projectId;\n }\n else {\n return null;\n }\n }\n /**\n * Gets the project ID from external account client if available.\n */\n async getExternalAccountClientProjectId() {\n if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n return null;\n }\n const creds = await this.getClient();\n // Do not suppress the underlying error, as the error could contain helpful\n // information for debugging and fixing. This is especially true for\n // external account creds as in order to get the project ID, the following\n // operations have to succeed:\n // 1. Valid credentials file should be supplied.\n // 2. Ability to retrieve access tokens from STS token exchange API.\n // 3. Ability to exchange for service account impersonated credentials (if\n // enabled).\n // 4. Ability to get project info using the access token from step 2 or 3.\n // Without surfacing the error, it is harder for developers to determine\n // which step went wrong.\n return await creds.getProjectId();\n }\n /**\n * Gets the Compute Engine project ID if it can be inferred.\n */\n async getGCEProjectId() {\n try {\n const r = await gcpMetadata.project('project-id');\n return r;\n }\n catch (e) {\n // Ignore any errors\n return null;\n }\n }\n getCredentials(callback) {\n if (callback) {\n this.getCredentialsAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.getCredentialsAsync();\n }\n }\n async getCredentialsAsync() {\n const client = await this.getClient();\n if (client instanceof baseexternalclient_1.BaseExternalAccountClient) {\n const serviceAccountEmail = client.getServiceAccountEmail();\n if (serviceAccountEmail) {\n return { client_email: serviceAccountEmail };\n }\n }\n if (this.jsonContent) {\n const credential = {\n client_email: this.jsonContent.client_email,\n private_key: this.jsonContent.private_key,\n };\n return credential;\n }\n const isGCE = await this._checkIsGCE();\n if (!isGCE) {\n throw new Error('Unknown error.');\n }\n // For GCE, return the service account details from the metadata server\n // NOTE: The trailing '/' at the end of service-accounts/ is very important!\n // The GCF metadata server doesn't respect querystring params if this / is\n // not included.\n const data = await gcpMetadata.instance({\n property: 'service-accounts/',\n params: { recursive: 'true' },\n });\n if (!data || !data.default || !data.default.email) {\n throw new Error('Failure from metadata server.');\n }\n return { client_email: data.default.email };\n }\n /**\n * Automatically obtain a client based on the provided configuration. If no\n * options were passed, use Application Default Credentials.\n */\n async getClient() {\n if (!this.cachedCredential) {\n if (this.jsonContent) {\n this._cacheClientFromJSON(this.jsonContent, this.clientOptions);\n }\n else if (this.keyFilename) {\n const filePath = path.resolve(this.keyFilename);\n const stream = fs.createReadStream(filePath);\n await this.fromStreamAsync(stream, this.clientOptions);\n }\n else {\n await this.getApplicationDefaultAsync(this.clientOptions);\n }\n }\n return this.cachedCredential;\n }\n /**\n * Creates a client which will fetch an ID token for authorization.\n * @param targetAudience the audience for the fetched ID token.\n * @returns IdTokenClient for making HTTP calls authenticated with ID tokens.\n */\n async getIdTokenClient(targetAudience) {\n const client = await this.getClient();\n if (!('fetchIdToken' in client)) {\n throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.');\n }\n return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client });\n }\n /**\n * Automatically obtain application default credentials, and return\n * an access token for making requests.\n */\n async getAccessToken() {\n const client = await this.getClient();\n return (await client.getAccessToken()).token;\n }\n /**\n * Obtain the HTTP headers that will provide authorization for a given\n * request.\n */\n async getRequestHeaders(url) {\n const client = await this.getClient();\n return client.getRequestHeaders(url);\n }\n /**\n * Obtain credentials for a request, then attach the appropriate headers to\n * the request options.\n * @param opts Axios or Request options on which to attach the headers\n */\n async authorizeRequest(opts) {\n opts = opts || {};\n const url = opts.url || opts.uri;\n const client = await this.getClient();\n const headers = await client.getRequestHeaders(url);\n opts.headers = Object.assign(opts.headers || {}, headers);\n return opts;\n }\n /**\n * Automatically obtain application default credentials, and make an\n * HTTP request using the given options.\n * @param opts Axios request options for the HTTP request.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n async request(opts) {\n const client = await this.getClient();\n return client.request(opts);\n }\n /**\n * Determine the compute environment in which the code is running.\n */\n getEnv() {\n return (0, envDetect_1.getEnv)();\n }\n /**\n * Sign the given data with the current private key, or go out\n * to the IAM API to sign it.\n * @param data The data to be signed.\n */\n async sign(data) {\n const client = await this.getClient();\n const crypto = (0, crypto_1.createCrypto)();\n if (client instanceof jwtclient_1.JWT && client.key) {\n const sign = await crypto.sign(client.key, data);\n return sign;\n }\n const creds = await this.getCredentials();\n if (!creds.client_email) {\n throw new Error('Cannot sign data without `client_email`.');\n }\n return this.signBlob(crypto, creds.client_email, data);\n }\n async signBlob(crypto, emailOrUniqueId, data) {\n const url = 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' +\n `${emailOrUniqueId}:signBlob`;\n const res = await this.request({\n method: 'POST',\n url,\n data: {\n payload: crypto.encodeBase64StringUtf8(data),\n },\n });\n return res.data.signedBlob;\n }\n}\nexports.GoogleAuth = GoogleAuth;\n/**\n * Export DefaultTransporter as a static property of the class.\n */\nGoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter;\n//# sourceMappingURL=googleauth.js.map","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IAMAuth = void 0;\nclass IAMAuth {\n /**\n * IAM credentials.\n *\n * @param selector the iam authority selector\n * @param token the token\n * @constructor\n */\n constructor(selector, token) {\n this.selector = selector;\n this.token = token;\n this.selector = selector;\n this.token = token;\n }\n /**\n * Acquire the HTTP headers required to make an authenticated request.\n */\n getRequestHeaders() {\n return {\n 'x-goog-iam-authority-selector': this.selector,\n 'x-goog-iam-authorization-token': this.token,\n };\n }\n}\nexports.IAMAuth = IAMAuth;\n//# sourceMappingURL=iam.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar _a, _b, _c;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IdentityPoolClient = void 0;\nconst fs = require(\"fs\");\nconst util_1 = require(\"util\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\n// fs.readfile is undefined in browser karma tests causing\n// `npm run browser-test` to fail as test.oauth2.ts imports this file via\n// src/index.ts.\n// Fallback to void function to avoid promisify throwing a TypeError.\nconst readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { }));\nconst realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { }));\nconst lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { }));\n/**\n * Defines the Url-sourced and file-sourced external account clients mainly\n * used for K8s and Azure workloads.\n */\nclass IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiate an IdentityPoolClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid file-sourced or\n * url-sourced credential or a workforce pool user project is provided\n * with a non workforce audience.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n var _a, _b;\n super(options, additionalOptions);\n this.file = options.credential_source.file;\n this.url = options.credential_source.url;\n this.headers = options.credential_source.headers;\n if (!this.file && !this.url) {\n throw new Error('No valid Identity Pool \"credential_source\" provided');\n }\n // Text is the default format type.\n this.formatType = ((_a = options.credential_source.format) === null || _a === void 0 ? void 0 : _a.type) || 'text';\n this.formatSubjectTokenFieldName =\n (_b = options.credential_source.format) === null || _b === void 0 ? void 0 : _b.subject_token_field_name;\n if (this.formatType !== 'json' && this.formatType !== 'text') {\n throw new Error(`Invalid credential_source format \"${this.formatType}\"`);\n }\n if (this.formatType === 'json' && !this.formatSubjectTokenFieldName) {\n throw new Error('Missing subject_token_field_name for JSON credential_source format');\n }\n }\n /**\n * Triggered when a external subject token is needed to be exchanged for a GCP\n * access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this either retrieves the local credential from a file location (k8s\n * workload) or by sending a GET request to a local metadata server (Azure\n * workloads).\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n if (this.file) {\n return await this.getTokenFromFile(this.file, this.formatType, this.formatSubjectTokenFieldName);\n }\n return await this.getTokenFromUrl(this.url, this.formatType, this.formatSubjectTokenFieldName, this.headers);\n }\n /**\n * Looks up the external subject token in the file path provided and\n * resolves with that token.\n * @param file The file path where the external credential is located.\n * @param formatType The token file or URL response type (JSON or text).\n * @param formatSubjectTokenFieldName For JSON response types, this is the\n * subject_token field name. For Azure, this is access_token. For text\n * response types, this is ignored.\n * @return A promise that resolves with the external subject token.\n */\n async getTokenFromFile(filePath, formatType, formatSubjectTokenFieldName) {\n // Make sure there is a file at the path. lstatSync will throw if there is\n // nothing there.\n try {\n // Resolve path to actual file in case of symlink. Expect a thrown error\n // if not resolvable.\n filePath = await realpath(filePath);\n if (!(await lstat(filePath)).isFile()) {\n throw new Error();\n }\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`;\n }\n throw err;\n }\n let subjectToken;\n const rawText = await readFile(filePath, { encoding: 'utf8' });\n if (formatType === 'text') {\n subjectToken = rawText;\n }\n else if (formatType === 'json' && formatSubjectTokenFieldName) {\n const json = JSON.parse(rawText);\n subjectToken = json[formatSubjectTokenFieldName];\n }\n if (!subjectToken) {\n throw new Error('Unable to parse the subject_token from the credential_source file');\n }\n return subjectToken;\n }\n /**\n * Sends a GET request to the URL provided and resolves with the returned\n * external subject token.\n * @param url The URL to call to retrieve the subject token. This is typically\n * a local metadata server.\n * @param formatType The token file or URL response type (JSON or text).\n * @param formatSubjectTokenFieldName For JSON response types, this is the\n * subject_token field name. For Azure, this is access_token. For text\n * response types, this is ignored.\n * @param headers The optional additional headers to send with the request to\n * the metadata server url.\n * @return A promise that resolves with the external subject token.\n */\n async getTokenFromUrl(url, formatType, formatSubjectTokenFieldName, headers) {\n const opts = {\n url,\n method: 'GET',\n headers,\n responseType: formatType,\n };\n let subjectToken;\n if (formatType === 'text') {\n const response = await this.transporter.request(opts);\n subjectToken = response.data;\n }\n else if (formatType === 'json' && formatSubjectTokenFieldName) {\n const response = await this.transporter.request(opts);\n subjectToken = response.data[formatSubjectTokenFieldName];\n }\n if (!subjectToken) {\n throw new Error('Unable to parse the subject_token from the credential_source URL');\n }\n return subjectToken;\n }\n}\nexports.IdentityPoolClient = IdentityPoolClient;\n//# sourceMappingURL=identitypoolclient.js.map","\"use strict\";\n// Copyright 2020 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IdTokenClient = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nclass IdTokenClient extends oauth2client_1.OAuth2Client {\n /**\n * Google ID Token client\n *\n * Retrieve access token from the metadata server.\n * See: https://developers.google.com/compute/docs/authentication\n */\n constructor(options) {\n super();\n this.targetAudience = options.targetAudience;\n this.idTokenProvider = options.idTokenProvider;\n }\n async getRequestMetadataAsync(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n url) {\n if (!this.credentials.id_token ||\n (this.credentials.expiry_date || 0) < Date.now()) {\n const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience);\n this.credentials = {\n id_token: idToken,\n expiry_date: this.getIdTokenExpiryDate(idToken),\n };\n }\n const headers = {\n Authorization: 'Bearer ' + this.credentials.id_token,\n };\n return { headers };\n }\n getIdTokenExpiryDate(idToken) {\n const payloadB64 = idToken.split('.')[1];\n if (payloadB64) {\n const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii'));\n return payload.exp * 1000;\n }\n }\n}\nexports.IdTokenClient = IdTokenClient;\n//# sourceMappingURL=idtokenclient.js.map","\"use strict\";\n/**\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nconst gaxios_1 = require(\"gaxios\");\nexports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account';\nclass Impersonated extends oauth2client_1.OAuth2Client {\n /**\n * Impersonated service account credentials.\n *\n * Create a new access token by impersonating another service account.\n *\n * Impersonated Credentials allowing credentials issued to a user or\n * service account to impersonate another. The source project using\n * Impersonated Credentials must enable the \"IAMCredentials\" API.\n * Also, the target service account must grant the orginating principal\n * the \"Service Account Token Creator\" IAM role.\n *\n * @param {object} options - The configuration object.\n * @param {object} [options.sourceClient] the source credential used as to\n * acquire the impersonated credentials.\n * @param {string} [options.targetPrincipal] the service account to\n * impersonate.\n * @param {string[]} [options.delegates] the chained list of delegates\n * required to grant the final access_token. If set, the sequence of\n * identities must have \"Service Account Token Creator\" capability granted to\n * the preceding identity. For example, if set to [serviceAccountB,\n * serviceAccountC], the sourceCredential must have the Token Creator role on\n * serviceAccountB. serviceAccountB must have the Token Creator on\n * serviceAccountC. Finally, C must have Token Creator on target_principal.\n * If left unset, sourceCredential must have that role on targetPrincipal.\n * @param {string[]} [options.targetScopes] scopes to request during the\n * authorization grant.\n * @param {number} [options.lifetime] number of seconds the delegated\n * credential should be valid for up to 3600 seconds by default, or 43,200\n * seconds by extending the token's lifetime, see:\n * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth\n * @param {string} [options.endpoint] api endpoint override.\n */\n constructor(options = {}) {\n var _a, _b, _c, _d, _e, _f;\n super(options);\n this.credentials = {\n expiry_date: 1,\n refresh_token: 'impersonated-placeholder',\n };\n this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client();\n this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : '';\n this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : [];\n this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : [];\n this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600;\n this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com';\n }\n /**\n * Refreshes the access token.\n * @param refreshToken Unused parameter\n */\n async refreshToken(refreshToken) {\n var _a, _b, _c, _d, _e, _f;\n try {\n await this.sourceClient.getAccessToken();\n const name = 'projects/-/serviceAccounts/' + this.targetPrincipal;\n const u = `${this.endpoint}/v1/${name}:generateAccessToken`;\n const body = {\n delegates: this.delegates,\n scope: this.targetScopes,\n lifetime: this.lifetime + 's',\n };\n const res = await this.sourceClient.request({\n url: u,\n data: body,\n method: 'POST',\n });\n const tokenResponse = res.data;\n this.credentials.access_token = tokenResponse.accessToken;\n this.credentials.expiry_date = Date.parse(tokenResponse.expireTime);\n return {\n tokens: this.credentials,\n res,\n };\n }\n catch (error) {\n if (!(error instanceof Error))\n throw error;\n let status = 0;\n let message = '';\n if (error instanceof gaxios_1.GaxiosError) {\n status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status;\n message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message;\n }\n if (status && message) {\n error.message = `${status}: unable to impersonate: ${message}`;\n throw error;\n }\n else {\n error.message = `unable to impersonate: ${error}`;\n throw error;\n }\n }\n }\n /**\n * Generates an OpenID Connect ID token for a service account.\n *\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation}\n *\n * @param targetAudience the audience for the fetched ID token.\n * @param options the for the request\n * @return an OpenID Connect ID token\n */\n async fetchIdToken(targetAudience, options) {\n var _a;\n await this.sourceClient.getAccessToken();\n const name = `projects/-/serviceAccounts/${this.targetPrincipal}`;\n const u = `${this.endpoint}/v1/${name}:generateIdToken`;\n const body = {\n delegates: this.delegates,\n audience: targetAudience,\n includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true,\n };\n const res = await this.sourceClient.request({\n url: u,\n data: body,\n method: 'POST',\n });\n return res.data.token;\n }\n}\nexports.Impersonated = Impersonated;\n//# sourceMappingURL=impersonated.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.JWTAccess = void 0;\nconst jws = require(\"jws\");\nconst LRU = require(\"lru-cache\");\nconst DEFAULT_HEADER = {\n alg: 'RS256',\n typ: 'JWT',\n};\nclass JWTAccess {\n /**\n * JWTAccess service account credentials.\n *\n * Create a new access token by using the credential to create a new JWT token\n * that's recognized as the access token.\n *\n * @param email the service account email address.\n * @param key the private key that will be used to sign the token.\n * @param keyId the ID of the private key used to sign the token.\n */\n constructor(email, key, keyId, eagerRefreshThresholdMillis) {\n this.cache = new LRU({\n max: 500,\n maxAge: 60 * 60 * 1000,\n });\n this.email = email;\n this.key = key;\n this.keyId = keyId;\n this.eagerRefreshThresholdMillis =\n eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000;\n }\n /**\n * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url\n *\n * @param url The URI being authorized.\n * @param scopes The scope or scopes being authorized\n * @returns A string that returns the cached key.\n */\n getCachedKey(url, scopes) {\n let cacheKey = url;\n if (scopes && Array.isArray(scopes) && scopes.length) {\n cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`;\n }\n else if (typeof scopes === 'string') {\n cacheKey = url ? `${url}_${scopes}` : scopes;\n }\n if (!cacheKey) {\n throw Error('Scopes or url must be provided');\n }\n return cacheKey;\n }\n /**\n * Get a non-expired access token, after refreshing if necessary.\n *\n * @param url The URI being authorized.\n * @param additionalClaims An object with a set of additional claims to\n * include in the payload.\n * @returns An object that includes the authorization header.\n */\n getRequestHeaders(url, additionalClaims, scopes) {\n // Return cached authorization headers, unless we are within\n // eagerRefreshThresholdMillis ms of them expiring:\n const key = this.getCachedKey(url, scopes);\n const cachedToken = this.cache.get(key);\n const now = Date.now();\n if (cachedToken &&\n cachedToken.expiration - now > this.eagerRefreshThresholdMillis) {\n return cachedToken.headers;\n }\n const iat = Math.floor(Date.now() / 1000);\n const exp = JWTAccess.getExpirationTime(iat);\n let defaultClaims;\n // Turn scopes into space-separated string\n if (Array.isArray(scopes)) {\n scopes = scopes.join(' ');\n }\n // If scopes are specified, sign with scopes\n if (scopes) {\n defaultClaims = {\n iss: this.email,\n sub: this.email,\n scope: scopes,\n exp,\n iat,\n };\n }\n else {\n defaultClaims = {\n iss: this.email,\n sub: this.email,\n aud: url,\n exp,\n iat,\n };\n }\n // if additionalClaims are provided, ensure they do not collide with\n // other required claims.\n if (additionalClaims) {\n for (const claim in defaultClaims) {\n if (additionalClaims[claim]) {\n throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`);\n }\n }\n }\n const header = this.keyId\n ? { ...DEFAULT_HEADER, kid: this.keyId }\n : DEFAULT_HEADER;\n const payload = Object.assign(defaultClaims, additionalClaims);\n // Sign the jwt and add it to the cache\n const signedJWT = jws.sign({ header, payload, secret: this.key });\n const headers = { Authorization: `Bearer ${signedJWT}` };\n this.cache.set(key, {\n expiration: exp * 1000,\n headers,\n });\n return headers;\n }\n /**\n * Returns an expiration time for the JWT token.\n *\n * @param iat The issued at time for the JWT.\n * @returns An expiration time for the JWT.\n */\n static getExpirationTime(iat) {\n const exp = iat + 3600; // 3600 seconds = 1 hour\n return exp;\n }\n /**\n * Create a JWTAccess credentials instance using the given input options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the service account auth settings.');\n }\n if (!json.client_email) {\n throw new Error('The incoming JSON object does not contain a client_email field');\n }\n if (!json.private_key) {\n throw new Error('The incoming JSON object does not contain a private_key field');\n }\n // Extract the relevant information from the json key file.\n this.email = json.client_email;\n this.key = json.private_key;\n this.keyId = json.private_key_id;\n this.projectId = json.project_id;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n reject(new Error('Must pass in a stream containing the service account auth settings.'));\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('data', chunk => (s += chunk))\n .on('error', reject)\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n resolve();\n }\n catch (err) {\n reject(err);\n }\n });\n });\n }\n}\nexports.JWTAccess = JWTAccess;\n//# sourceMappingURL=jwtaccess.js.map","\"use strict\";\n// Copyright 2013 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.JWT = void 0;\nconst gtoken_1 = require(\"gtoken\");\nconst jwtaccess_1 = require(\"./jwtaccess\");\nconst oauth2client_1 = require(\"./oauth2client\");\nclass JWT extends oauth2client_1.OAuth2Client {\n constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) {\n const opts = optionsOrEmail && typeof optionsOrEmail === 'object'\n ? optionsOrEmail\n : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject };\n super({\n eagerRefreshThresholdMillis: opts.eagerRefreshThresholdMillis,\n forceRefreshOnFailure: opts.forceRefreshOnFailure,\n });\n this.email = opts.email;\n this.keyFile = opts.keyFile;\n this.key = opts.key;\n this.keyId = opts.keyId;\n this.scopes = opts.scopes;\n this.subject = opts.subject;\n this.additionalClaims = opts.additionalClaims;\n this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 };\n }\n /**\n * Creates a copy of the credential with the specified scopes.\n * @param scopes List of requested scopes or a single scope.\n * @return The cloned instance.\n */\n createScoped(scopes) {\n return new JWT({\n email: this.email,\n keyFile: this.keyFile,\n key: this.key,\n keyId: this.keyId,\n scopes,\n subject: this.subject,\n additionalClaims: this.additionalClaims,\n });\n }\n /**\n * Obtains the metadata to be sent with the request.\n *\n * @param url the URI being authorized.\n */\n async getRequestMetadataAsync(url) {\n url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url;\n const useSelfSignedJWT = (!this.hasUserScopes() && url) ||\n (this.useJWTAccessWithScope && this.hasAnyScopes());\n if (!this.apiKey && useSelfSignedJWT) {\n if (this.additionalClaims &&\n this.additionalClaims.target_audience) {\n const { tokens } = await this.refreshToken();\n return {\n headers: this.addSharedMetadataHeaders({\n Authorization: `Bearer ${tokens.id_token}`,\n }),\n };\n }\n else {\n // no scopes have been set, but a uri has been provided. Use JWTAccess\n // credentials.\n if (!this.access) {\n this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis);\n }\n let scopes;\n if (this.hasUserScopes()) {\n scopes = this.scopes;\n }\n else if (!url) {\n scopes = this.defaultScopes;\n }\n const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, \n // Scopes take precedent over audience for signing,\n // so we only provide them if useJWTAccessWithScope is on\n this.useJWTAccessWithScope ? scopes : undefined);\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n }\n else if (this.hasAnyScopes() || this.apiKey) {\n return super.getRequestMetadataAsync(url);\n }\n else {\n // If no audience, apiKey, or scopes are provided, we should not attempt\n // to populate any headers:\n return { headers: {} };\n }\n }\n /**\n * Fetches an ID token.\n * @param targetAudience the audience for the fetched ID token.\n */\n async fetchIdToken(targetAudience) {\n // Create a new gToken for fetching an ID token\n const gtoken = new gtoken_1.GoogleToken({\n iss: this.email,\n sub: this.subject,\n scope: this.scopes || this.defaultScopes,\n keyFile: this.keyFile,\n key: this.key,\n additionalClaims: { target_audience: targetAudience },\n transporter: this.transporter,\n });\n await gtoken.getToken({\n forceRefresh: true,\n });\n if (!gtoken.idToken) {\n throw new Error('Unknown error: Failed to fetch ID token');\n }\n return gtoken.idToken;\n }\n /**\n * Determine if there are currently scopes available.\n */\n hasUserScopes() {\n if (!this.scopes) {\n return false;\n }\n return this.scopes.length > 0;\n }\n /**\n * Are there any default or user scopes defined.\n */\n hasAnyScopes() {\n if (this.scopes && this.scopes.length > 0)\n return true;\n if (this.defaultScopes && this.defaultScopes.length > 0)\n return true;\n return false;\n }\n authorize(callback) {\n if (callback) {\n this.authorizeAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.authorizeAsync();\n }\n }\n async authorizeAsync() {\n const result = await this.refreshToken();\n if (!result) {\n throw new Error('No result returned');\n }\n this.credentials = result.tokens;\n this.credentials.refresh_token = 'jwt-placeholder';\n this.key = this.gtoken.key;\n this.email = this.gtoken.iss;\n return result.tokens;\n }\n /**\n * Refreshes the access token.\n * @param refreshToken ignored\n * @private\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n const gtoken = this.createGToken();\n const token = await gtoken.getToken({\n forceRefresh: this.isTokenExpiring(),\n });\n const tokens = {\n access_token: token.access_token,\n token_type: 'Bearer',\n expiry_date: gtoken.expiresAt,\n id_token: gtoken.idToken,\n };\n this.emit('tokens', tokens);\n return { res: null, tokens };\n }\n /**\n * Create a gToken if it doesn't already exist.\n */\n createGToken() {\n if (!this.gtoken) {\n this.gtoken = new gtoken_1.GoogleToken({\n iss: this.email,\n sub: this.subject,\n scope: this.scopes || this.defaultScopes,\n keyFile: this.keyFile,\n key: this.key,\n additionalClaims: this.additionalClaims,\n transporter: this.transporter,\n });\n }\n return this.gtoken;\n }\n /**\n * Create a JWT credentials instance using the given input options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the service account auth settings.');\n }\n if (!json.client_email) {\n throw new Error('The incoming JSON object does not contain a client_email field');\n }\n if (!json.private_key) {\n throw new Error('The incoming JSON object does not contain a private_key field');\n }\n // Extract the relevant information from the json key file.\n this.email = json.client_email;\n this.key = json.private_key;\n this.keyId = json.private_key_id;\n this.projectId = json.project_id;\n this.quotaProjectId = json.quota_project_id;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n throw new Error('Must pass in a stream containing the service account auth settings.');\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n resolve();\n }\n catch (e) {\n reject(e);\n }\n });\n });\n }\n /**\n * Creates a JWT credentials instance using an API Key for authentication.\n * @param apiKey The API Key in string form.\n */\n fromAPIKey(apiKey) {\n if (typeof apiKey !== 'string') {\n throw new Error('Must provide an API Key string.');\n }\n this.apiKey = apiKey;\n }\n /**\n * Using the key or keyFile on the JWT client, obtain an object that contains\n * the key and the client email.\n */\n async getCredentials() {\n if (this.key) {\n return { private_key: this.key, client_email: this.email };\n }\n else if (this.keyFile) {\n const gtoken = this.createGToken();\n const creds = await gtoken.getCredentials(this.keyFile);\n return { private_key: creds.privateKey, client_email: creds.clientEmail };\n }\n throw new Error('A key or a keyFile must be provided to getCredentials.');\n }\n}\nexports.JWT = JWT;\n//# sourceMappingURL=jwtclient.js.map","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.LoginTicket = void 0;\nclass LoginTicket {\n /**\n * Create a simple class to extract user ID from an ID Token\n *\n * @param {string} env Envelope of the jwt\n * @param {TokenPayload} pay Payload of the jwt\n * @constructor\n */\n constructor(env, pay) {\n this.envelope = env;\n this.payload = pay;\n }\n getEnvelope() {\n return this.envelope;\n }\n getPayload() {\n return this.payload;\n }\n /**\n * Create a simple class to extract user ID from an ID Token\n *\n * @return The user ID\n */\n getUserId() {\n const payload = this.getPayload();\n if (payload && payload.sub) {\n return payload.sub;\n }\n return null;\n }\n /**\n * Returns attributes from the login ticket. This can contain\n * various information about the user session.\n *\n * @return The envelope and payload\n */\n getAttributes() {\n return { envelope: this.getEnvelope(), payload: this.getPayload() };\n }\n}\nexports.LoginTicket = LoginTicket;\n//# sourceMappingURL=loginticket.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst querystring = require(\"querystring\");\nconst stream = require(\"stream\");\nconst formatEcdsa = require(\"ecdsa-sig-formatter\");\nconst crypto_1 = require(\"../crypto/crypto\");\nconst authclient_1 = require(\"./authclient\");\nconst loginticket_1 = require(\"./loginticket\");\nvar CodeChallengeMethod;\n(function (CodeChallengeMethod) {\n CodeChallengeMethod[\"Plain\"] = \"plain\";\n CodeChallengeMethod[\"S256\"] = \"S256\";\n})(CodeChallengeMethod = exports.CodeChallengeMethod || (exports.CodeChallengeMethod = {}));\nvar CertificateFormat;\n(function (CertificateFormat) {\n CertificateFormat[\"PEM\"] = \"PEM\";\n CertificateFormat[\"JWK\"] = \"JWK\";\n})(CertificateFormat = exports.CertificateFormat || (exports.CertificateFormat = {}));\nclass OAuth2Client extends authclient_1.AuthClient {\n constructor(optionsOrClientId, clientSecret, redirectUri) {\n super();\n this.certificateCache = {};\n this.certificateExpiry = null;\n this.certificateCacheFormat = CertificateFormat.PEM;\n this.refreshTokenPromises = new Map();\n const opts = optionsOrClientId && typeof optionsOrClientId === 'object'\n ? optionsOrClientId\n : { clientId: optionsOrClientId, clientSecret, redirectUri };\n this._clientId = opts.clientId;\n this._clientSecret = opts.clientSecret;\n this.redirectUri = opts.redirectUri;\n this.eagerRefreshThresholdMillis =\n opts.eagerRefreshThresholdMillis || 5 * 60 * 1000;\n this.forceRefreshOnFailure = !!opts.forceRefreshOnFailure;\n }\n /**\n * Generates URL for consent page landing.\n * @param opts Options.\n * @return URL to consent page.\n */\n generateAuthUrl(opts = {}) {\n if (opts.code_challenge_method && !opts.code_challenge) {\n throw new Error('If a code_challenge_method is provided, code_challenge must be included.');\n }\n opts.response_type = opts.response_type || 'code';\n opts.client_id = opts.client_id || this._clientId;\n opts.redirect_uri = opts.redirect_uri || this.redirectUri;\n // Allow scopes to be passed either as array or a string\n if (Array.isArray(opts.scope)) {\n opts.scope = opts.scope.join(' ');\n }\n const rootUrl = OAuth2Client.GOOGLE_OAUTH2_AUTH_BASE_URL_;\n return (rootUrl +\n '?' +\n querystring.stringify(opts));\n }\n generateCodeVerifier() {\n // To make the code compatible with browser SubtleCrypto we need to make\n // this method async.\n throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.');\n }\n /**\n * Convenience method to automatically generate a code_verifier, and its\n * resulting SHA256. If used, this must be paired with a S256\n * code_challenge_method.\n *\n * For a full example see:\n * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js\n */\n async generateCodeVerifierAsync() {\n // base64 encoding uses 6 bits per character, and we want to generate128\n // characters. 6*128/8 = 96.\n const crypto = (0, crypto_1.createCrypto)();\n const randomString = crypto.randomBytesBase64(96);\n // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/\n // \"-\"/\".\"/\"_\"/\"~\". Base64 encoded strings are pretty close, so we're just\n // swapping out a few chars.\n const codeVerifier = randomString\n .replace(/\\+/g, '~')\n .replace(/=/g, '_')\n .replace(/\\//g, '-');\n // Generate the base64 encoded SHA256\n const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier);\n // We need to use base64UrlEncoding instead of standard base64\n const codeChallenge = unencodedCodeChallenge\n .split('=')[0]\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n return { codeVerifier, codeChallenge };\n }\n getToken(codeOrOptions, callback) {\n const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions;\n if (callback) {\n this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response));\n }\n else {\n return this.getTokenAsync(options);\n }\n }\n async getTokenAsync(options) {\n const url = OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_;\n const values = {\n code: options.code,\n client_id: options.client_id || this._clientId,\n client_secret: this._clientSecret,\n redirect_uri: options.redirect_uri || this.redirectUri,\n grant_type: 'authorization_code',\n code_verifier: options.codeVerifier,\n };\n const res = await this.transporter.request({\n method: 'POST',\n url,\n data: querystring.stringify(values),\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n });\n const tokens = res.data;\n if (res.data && res.data.expires_in) {\n tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res };\n }\n /**\n * Refreshes the access token.\n * @param refresh_token Existing refresh token.\n * @private\n */\n async refreshToken(refreshToken) {\n if (!refreshToken) {\n return this.refreshTokenNoCache(refreshToken);\n }\n // If a request to refresh using the same token has started,\n // return the same promise.\n if (this.refreshTokenPromises.has(refreshToken)) {\n return this.refreshTokenPromises.get(refreshToken);\n }\n const p = this.refreshTokenNoCache(refreshToken).then(r => {\n this.refreshTokenPromises.delete(refreshToken);\n return r;\n }, e => {\n this.refreshTokenPromises.delete(refreshToken);\n throw e;\n });\n this.refreshTokenPromises.set(refreshToken, p);\n return p;\n }\n async refreshTokenNoCache(refreshToken) {\n var _a;\n if (!refreshToken) {\n throw new Error('No refresh token is set.');\n }\n const url = OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_;\n const data = {\n refresh_token: refreshToken,\n client_id: this._clientId,\n client_secret: this._clientSecret,\n grant_type: 'refresh_token',\n };\n let res;\n try {\n // request for new token\n res = await this.transporter.request({\n method: 'POST',\n url,\n data: querystring.stringify(data),\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n });\n }\n catch (e) {\n if (e instanceof gaxios_1.GaxiosError &&\n e.message === 'invalid_grant' &&\n ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) &&\n /ReAuth/i.test(e.response.data.error_description)) {\n e.message = JSON.stringify(e.response.data);\n }\n throw e;\n }\n const tokens = res.data;\n // TODO: de-duplicate this code from a few spots\n if (res.data && res.data.expires_in) {\n tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res };\n }\n refreshAccessToken(callback) {\n if (callback) {\n this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback);\n }\n else {\n return this.refreshAccessTokenAsync();\n }\n }\n async refreshAccessTokenAsync() {\n const r = await this.refreshToken(this.credentials.refresh_token);\n const tokens = r.tokens;\n tokens.refresh_token = this.credentials.refresh_token;\n this.credentials = tokens;\n return { credentials: this.credentials, res: r.res };\n }\n getAccessToken(callback) {\n if (callback) {\n this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback);\n }\n else {\n return this.getAccessTokenAsync();\n }\n }\n async getAccessTokenAsync() {\n const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring();\n if (shouldRefresh) {\n if (!this.credentials.refresh_token) {\n if (this.refreshHandler) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n return { token: this.credentials.access_token };\n }\n }\n else {\n throw new Error('No refresh token or refresh handler callback is set.');\n }\n }\n const r = await this.refreshAccessTokenAsync();\n if (!r.credentials || (r.credentials && !r.credentials.access_token)) {\n throw new Error('Could not refresh access token.');\n }\n return { token: r.credentials.access_token, res: r.res };\n }\n else {\n return { token: this.credentials.access_token };\n }\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * In OAuth2Client, the result has the form:\n * { Authorization: 'Bearer ' }\n * @param url The optional url being authorized\n */\n async getRequestHeaders(url) {\n const headers = (await this.getRequestMetadataAsync(url)).headers;\n return headers;\n }\n async getRequestMetadataAsync(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n url) {\n const thisCreds = this.credentials;\n if (!thisCreds.access_token &&\n !thisCreds.refresh_token &&\n !this.apiKey &&\n !this.refreshHandler) {\n throw new Error('No access, refresh token, API key or refresh handler callback is set.');\n }\n if (thisCreds.access_token && !this.isTokenExpiring()) {\n thisCreds.token_type = thisCreds.token_type || 'Bearer';\n const headers = {\n Authorization: thisCreds.token_type + ' ' + thisCreds.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n // If refreshHandler exists, call processAndValidateRefreshHandler().\n if (this.refreshHandler) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n const headers = {\n Authorization: 'Bearer ' + this.credentials.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n }\n if (this.apiKey) {\n return { headers: { 'X-Goog-Api-Key': this.apiKey } };\n }\n let r = null;\n let tokens = null;\n try {\n r = await this.refreshToken(thisCreds.refresh_token);\n tokens = r.tokens;\n }\n catch (err) {\n const e = err;\n if (e.response &&\n (e.response.status === 403 || e.response.status === 404)) {\n e.message = `Could not refresh access token: ${e.message}`;\n }\n throw e;\n }\n const credentials = this.credentials;\n credentials.token_type = credentials.token_type || 'Bearer';\n tokens.refresh_token = credentials.refresh_token;\n this.credentials = tokens;\n const headers = {\n Authorization: credentials.token_type + ' ' + tokens.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers), res: r.res };\n }\n /**\n * Generates an URL to revoke the given token.\n * @param token The existing token to be revoked.\n */\n static getRevokeTokenUrl(token) {\n const parameters = querystring.stringify({ token });\n return `${OAuth2Client.GOOGLE_OAUTH2_REVOKE_URL_}?${parameters}`;\n }\n revokeToken(token, callback) {\n const opts = {\n url: OAuth2Client.getRevokeTokenUrl(token),\n method: 'POST',\n };\n if (callback) {\n this.transporter\n .request(opts)\n .then(r => callback(null, r), callback);\n }\n else {\n return this.transporter.request(opts);\n }\n }\n revokeCredentials(callback) {\n if (callback) {\n this.revokeCredentialsAsync().then(res => callback(null, res), callback);\n }\n else {\n return this.revokeCredentialsAsync();\n }\n }\n async revokeCredentialsAsync() {\n const token = this.credentials.access_token;\n this.credentials = {};\n if (token) {\n return this.revokeToken(token);\n }\n else {\n throw new Error('No access token to revoke.');\n }\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n async requestAsync(opts, retry = false) {\n let r2;\n try {\n const r = await this.getRequestMetadataAsync(opts.url);\n opts.headers = opts.headers || {};\n if (r.headers && r.headers['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project'];\n }\n if (r.headers && r.headers.Authorization) {\n opts.headers.Authorization = r.headers.Authorization;\n }\n if (this.apiKey) {\n opts.headers['X-Goog-Api-Key'] = this.apiKey;\n }\n r2 = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - An access_token and refresh_token were available, but either no\n // expiry_date was available or the forceRefreshOnFailure flag is set.\n // The absent expiry_date case can happen when developers stash the\n // access_token and refresh_token for later use, but the access_token\n // fails on the first try because it's expired. Some developers may\n // choose to enable forceRefreshOnFailure to mitigate time-related\n // errors.\n // Or the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - No refresh_token was available\n // - An access_token and a refreshHandler callback were available, but\n // either no expiry_date was available or the forceRefreshOnFailure\n // flag is set. The access_token fails on the first try because it's\n // expired. Some developers may choose to enable forceRefreshOnFailure\n // to mitigate time-related errors.\n const mayRequireRefresh = this.credentials &&\n this.credentials.access_token &&\n this.credentials.refresh_token &&\n (!this.credentials.expiry_date || this.forceRefreshOnFailure);\n const mayRequireRefreshWithNoRefreshToken = this.credentials &&\n this.credentials.access_token &&\n !this.credentials.refresh_token &&\n (!this.credentials.expiry_date || this.forceRefreshOnFailure) &&\n this.refreshHandler;\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry && isAuthErr && !isReadableStream && mayRequireRefresh) {\n await this.refreshAccessTokenAsync();\n return this.requestAsync(opts, true);\n }\n else if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n mayRequireRefreshWithNoRefreshToken) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n }\n return this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return r2;\n }\n verifyIdToken(options, callback) {\n // This function used to accept two arguments instead of an options object.\n // Check the types to help users upgrade with less pain.\n // This check can be removed after a 2.0 release.\n if (callback && typeof callback !== 'function') {\n throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.');\n }\n if (callback) {\n this.verifyIdTokenAsync(options).then(r => callback(null, r), callback);\n }\n else {\n return this.verifyIdTokenAsync(options);\n }\n }\n async verifyIdTokenAsync(options) {\n if (!options.idToken) {\n throw new Error('The verifyIdToken method requires an ID Token');\n }\n const response = await this.getFederatedSignonCertsAsync();\n const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, OAuth2Client.ISSUERS_, options.maxExpiry);\n return login;\n }\n /**\n * Obtains information about the provisioned access token. Especially useful\n * if you want to check the scopes that were provisioned to a given token.\n *\n * @param accessToken Required. The Access Token for which you want to get\n * user info.\n */\n async getTokenInfo(accessToken) {\n const { data } = await this.transporter.request({\n method: 'POST',\n headers: {\n 'Content-Type': 'application/x-www-form-urlencoded',\n Authorization: `Bearer ${accessToken}`,\n },\n url: OAuth2Client.GOOGLE_TOKEN_INFO_URL,\n });\n const info = Object.assign({\n expiry_date: new Date().getTime() + data.expires_in * 1000,\n scopes: data.scope.split(' '),\n }, data);\n delete info.expires_in;\n delete info.scope;\n return info;\n }\n getFederatedSignonCerts(callback) {\n if (callback) {\n this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback);\n }\n else {\n return this.getFederatedSignonCertsAsync();\n }\n }\n async getFederatedSignonCertsAsync() {\n const nowTime = new Date().getTime();\n const format = (0, crypto_1.hasBrowserCrypto)()\n ? CertificateFormat.JWK\n : CertificateFormat.PEM;\n if (this.certificateExpiry &&\n nowTime < this.certificateExpiry.getTime() &&\n this.certificateCacheFormat === format) {\n return { certs: this.certificateCache, format };\n }\n let res;\n let url;\n switch (format) {\n case CertificateFormat.PEM:\n url = OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_PEM_CERTS_URL_;\n break;\n case CertificateFormat.JWK:\n url = OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_JWK_CERTS_URL_;\n break;\n default:\n throw new Error(`Unsupported certificate format ${format}`);\n }\n try {\n res = await this.transporter.request({ url });\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Failed to retrieve verification certificates: ${e.message}`;\n }\n throw e;\n }\n const cacheControl = res ? res.headers['cache-control'] : undefined;\n let cacheAge = -1;\n if (cacheControl) {\n const pattern = new RegExp('max-age=([0-9]*)');\n const regexResult = pattern.exec(cacheControl);\n if (regexResult && regexResult.length === 2) {\n // Cache results with max-age (in seconds)\n cacheAge = Number(regexResult[1]) * 1000; // milliseconds\n }\n }\n let certificates = {};\n switch (format) {\n case CertificateFormat.PEM:\n certificates = res.data;\n break;\n case CertificateFormat.JWK:\n for (const key of res.data.keys) {\n certificates[key.kid] = key;\n }\n break;\n default:\n throw new Error(`Unsupported certificate format ${format}`);\n }\n const now = new Date();\n this.certificateExpiry =\n cacheAge === -1 ? null : new Date(now.getTime() + cacheAge);\n this.certificateCache = certificates;\n this.certificateCacheFormat = format;\n return { certs: certificates, format, res };\n }\n getIapPublicKeys(callback) {\n if (callback) {\n this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback);\n }\n else {\n return this.getIapPublicKeysAsync();\n }\n }\n async getIapPublicKeysAsync() {\n let res;\n const url = OAuth2Client.GOOGLE_OAUTH2_IAP_PUBLIC_KEY_URL_;\n try {\n res = await this.transporter.request({ url });\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Failed to retrieve verification certificates: ${e.message}`;\n }\n throw e;\n }\n return { pubkeys: res.data, res };\n }\n verifySignedJwtWithCerts() {\n // To make the code compatible with browser SubtleCrypto we need to make\n // this method async.\n throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.');\n }\n /**\n * Verify the id token is signed with the correct certificate\n * and is from the correct audience.\n * @param jwt The jwt to verify (The ID Token in this case).\n * @param certs The array of certs to test the jwt against.\n * @param requiredAudience The audience to test the jwt against.\n * @param issuers The allowed issuers of the jwt (Optional).\n * @param maxExpiry The max expiry the certificate can be (Optional).\n * @return Returns a promise resolving to LoginTicket on verification.\n */\n async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) {\n const crypto = (0, crypto_1.createCrypto)();\n if (!maxExpiry) {\n maxExpiry = OAuth2Client.MAX_TOKEN_LIFETIME_SECS_;\n }\n const segments = jwt.split('.');\n if (segments.length !== 3) {\n throw new Error('Wrong number of segments in token: ' + jwt);\n }\n const signed = segments[0] + '.' + segments[1];\n let signature = segments[2];\n let envelope;\n let payload;\n try {\n envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0]));\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`;\n }\n throw err;\n }\n if (!envelope) {\n throw new Error(\"Can't parse token envelope: \" + segments[0]);\n }\n try {\n payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1]));\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `Can't parse token payload '${segments[0]}`;\n }\n throw err;\n }\n if (!payload) {\n throw new Error(\"Can't parse token payload: \" + segments[1]);\n }\n if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) {\n // If this is not present, then there's no reason to attempt verification\n throw new Error('No pem found for envelope: ' + JSON.stringify(envelope));\n }\n const cert = certs[envelope.kid];\n if (envelope.alg === 'ES256') {\n signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64');\n }\n const verified = await crypto.verify(cert, signed, signature);\n if (!verified) {\n throw new Error('Invalid token signature: ' + jwt);\n }\n if (!payload.iat) {\n throw new Error('No issue time in token: ' + JSON.stringify(payload));\n }\n if (!payload.exp) {\n throw new Error('No expiration time in token: ' + JSON.stringify(payload));\n }\n const iat = Number(payload.iat);\n if (isNaN(iat))\n throw new Error('iat field using invalid format');\n const exp = Number(payload.exp);\n if (isNaN(exp))\n throw new Error('exp field using invalid format');\n const now = new Date().getTime() / 1000;\n if (exp >= now + maxExpiry) {\n throw new Error('Expiration time too far in future: ' + JSON.stringify(payload));\n }\n const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_;\n const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_;\n if (now < earliest) {\n throw new Error('Token used too early, ' +\n now +\n ' < ' +\n earliest +\n ': ' +\n JSON.stringify(payload));\n }\n if (now > latest) {\n throw new Error('Token used too late, ' +\n now +\n ' > ' +\n latest +\n ': ' +\n JSON.stringify(payload));\n }\n if (issuers && issuers.indexOf(payload.iss) < 0) {\n throw new Error('Invalid issuer, expected one of [' +\n issuers +\n '], but got ' +\n payload.iss);\n }\n // Check the audience matches if we have one\n if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) {\n const aud = payload.aud;\n let audVerified = false;\n // If the requiredAudience is an array, check if it contains token\n // audience\n if (requiredAudience.constructor === Array) {\n audVerified = requiredAudience.indexOf(aud) > -1;\n }\n else {\n audVerified = aud === requiredAudience;\n }\n if (!audVerified) {\n throw new Error('Wrong recipient, payload audience != requiredAudience');\n }\n }\n return new loginticket_1.LoginTicket(envelope, payload);\n }\n /**\n * Returns a promise that resolves with AccessTokenResponse type if\n * refreshHandler is defined.\n * If not, nothing is returned.\n */\n async processAndValidateRefreshHandler() {\n if (this.refreshHandler) {\n const accessTokenResponse = await this.refreshHandler();\n if (!accessTokenResponse.access_token) {\n throw new Error('No access token is returned by the refreshHandler callback.');\n }\n return accessTokenResponse;\n }\n return;\n }\n /**\n * Returns true if a token is expired or will expire within\n * eagerRefreshThresholdMillismilliseconds.\n * If there is no expiry time, assumes the token is not expired or expiring.\n */\n isTokenExpiring() {\n const expiryDate = this.credentials.expiry_date;\n return expiryDate\n ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis\n : false;\n }\n}\nexports.OAuth2Client = OAuth2Client;\nOAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo';\n/**\n * The base URL for auth endpoints.\n */\nOAuth2Client.GOOGLE_OAUTH2_AUTH_BASE_URL_ = 'https://accounts.google.com/o/oauth2/v2/auth';\n/**\n * The base endpoint for token retrieval.\n */\nOAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_ = 'https://oauth2.googleapis.com/token';\n/**\n * The base endpoint to revoke tokens.\n */\nOAuth2Client.GOOGLE_OAUTH2_REVOKE_URL_ = 'https://oauth2.googleapis.com/revoke';\n/**\n * Google Sign on certificates in PEM format.\n */\nOAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_PEM_CERTS_URL_ = 'https://www.googleapis.com/oauth2/v1/certs';\n/**\n * Google Sign on certificates in JWK format.\n */\nOAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_JWK_CERTS_URL_ = 'https://www.googleapis.com/oauth2/v3/certs';\n/**\n * Google Sign on certificates in JWK format.\n */\nOAuth2Client.GOOGLE_OAUTH2_IAP_PUBLIC_KEY_URL_ = 'https://www.gstatic.com/iap/verify/public_key';\n/**\n * Clock skew - five minutes in seconds\n */\nOAuth2Client.CLOCK_SKEW_SECS_ = 300;\n/**\n * Max Token Lifetime is one day in seconds\n */\nOAuth2Client.MAX_TOKEN_LIFETIME_SECS_ = 86400;\n/**\n * The allowed oauth token issuers.\n */\nOAuth2Client.ISSUERS_ = [\n 'accounts.google.com',\n 'https://accounts.google.com',\n];\n//# sourceMappingURL=oauth2client.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0;\nconst querystring = require(\"querystring\");\nconst crypto_1 = require(\"../crypto/crypto\");\n/** List of HTTP methods that accept request bodies. */\nconst METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH'];\n/**\n * Abstract class for handling client authentication in OAuth-based\n * operations.\n * When request-body client authentication is used, only application/json and\n * application/x-www-form-urlencoded content types for HTTP methods that support\n * request bodies are supported.\n */\nclass OAuthClientAuthHandler {\n /**\n * Instantiates an OAuth client authentication handler.\n * @param clientAuthentication The client auth credentials.\n */\n constructor(clientAuthentication) {\n this.clientAuthentication = clientAuthentication;\n this.crypto = (0, crypto_1.createCrypto)();\n }\n /**\n * Applies client authentication on the OAuth request's headers or POST\n * body but does not process the request.\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n * @param bearerToken The optional bearer token to use for authentication.\n * When this is used, no client authentication credentials are needed.\n */\n applyClientAuthenticationOptions(opts, bearerToken) {\n // Inject authenticated header.\n this.injectAuthenticatedHeaders(opts, bearerToken);\n // Inject authenticated request body.\n if (!bearerToken) {\n this.injectAuthenticatedRequestBody(opts);\n }\n }\n /**\n * Applies client authentication on the request's header if either\n * basic authentication or bearer token authentication is selected.\n *\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n * @param bearerToken The optional bearer token to use for authentication.\n * When this is used, no client authentication credentials are needed.\n */\n injectAuthenticatedHeaders(opts, bearerToken) {\n var _a;\n // Bearer token prioritized higher than basic Auth.\n if (bearerToken) {\n opts.headers = opts.headers || {};\n Object.assign(opts.headers, {\n Authorization: `Bearer ${bearerToken}}`,\n });\n }\n else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') {\n opts.headers = opts.headers || {};\n const clientId = this.clientAuthentication.clientId;\n const clientSecret = this.clientAuthentication.clientSecret || '';\n const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`);\n Object.assign(opts.headers, {\n Authorization: `Basic ${base64EncodedCreds}`,\n });\n }\n }\n /**\n * Applies client authentication on the request's body if request-body\n * client authentication is selected.\n *\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n */\n injectAuthenticatedRequestBody(opts) {\n var _a;\n if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') {\n const method = (opts.method || 'GET').toUpperCase();\n // Inject authenticated request body.\n if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) {\n // Get content-type.\n let contentType;\n const headers = opts.headers || {};\n for (const key in headers) {\n if (key.toLowerCase() === 'content-type' && headers[key]) {\n contentType = headers[key].toLowerCase();\n break;\n }\n }\n if (contentType === 'application/x-www-form-urlencoded') {\n opts.data = opts.data || '';\n const data = querystring.parse(opts.data);\n Object.assign(data, {\n client_id: this.clientAuthentication.clientId,\n client_secret: this.clientAuthentication.clientSecret || '',\n });\n opts.data = querystring.stringify(data);\n }\n else if (contentType === 'application/json') {\n opts.data = opts.data || {};\n Object.assign(opts.data, {\n client_id: this.clientAuthentication.clientId,\n client_secret: this.clientAuthentication.clientSecret || '',\n });\n }\n else {\n throw new Error(`${contentType} content-types are not supported with ` +\n `${this.clientAuthentication.confidentialClientType} ` +\n 'client authentication');\n }\n }\n else {\n throw new Error(`${method} HTTP method does not support ` +\n `${this.clientAuthentication.confidentialClientType} ` +\n 'client authentication');\n }\n }\n }\n}\nexports.OAuthClientAuthHandler = OAuthClientAuthHandler;\n/**\n * Converts an OAuth error response to a native JavaScript Error.\n * @param resp The OAuth error response to convert to a native Error object.\n * @param err The optional original error. If provided, the error properties\n * will be copied to the new error.\n * @return The converted native Error object.\n */\nfunction getErrorFromOAuthErrorResponse(resp, err) {\n // Error response.\n const errorCode = resp.error;\n const errorDescription = resp.error_description;\n const errorUri = resp.error_uri;\n let message = `Error code ${errorCode}`;\n if (typeof errorDescription !== 'undefined') {\n message += `: ${errorDescription}`;\n }\n if (typeof errorUri !== 'undefined') {\n message += ` - ${errorUri}`;\n }\n const newError = new Error(message);\n // Copy properties from original error to newly generated error.\n if (err) {\n const keys = Object.keys(err);\n if (err.stack) {\n // Copy error.stack if available.\n keys.push('stack');\n }\n keys.forEach(key => {\n // Do not overwrite the message field.\n if (key !== 'message') {\n Object.defineProperty(newError, key, {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n value: err[key],\n writable: false,\n enumerable: true,\n });\n }\n });\n }\n return newError;\n}\nexports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse;\n//# sourceMappingURL=oauth2common.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PluggableAuthClient = exports.ExecutableError = void 0;\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst executable_response_1 = require(\"./executable-response\");\nconst pluggable_auth_handler_1 = require(\"./pluggable-auth-handler\");\n/**\n * Error thrown from the executable run by PluggableAuthClient.\n */\nclass ExecutableError extends Error {\n constructor(message, code) {\n super(`The executable failed with exit code: ${code} and error message: ${message}.`);\n this.code = code;\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.ExecutableError = ExecutableError;\n/**\n * The default executable timeout when none is provided, in milliseconds.\n */\nconst DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000;\n/**\n * The minimum allowed executable timeout in milliseconds.\n */\nconst MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000;\n/**\n * The maximum allowed executable timeout in milliseconds.\n */\nconst MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000;\n/**\n * The environment variable to check to see if executable can be run.\n * Value must be set to '1' for the executable to run.\n */\nconst GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES';\n/**\n * The maximum currently supported executable version.\n */\nconst MAXIMUM_EXECUTABLE_VERSION = 1;\n/**\n * PluggableAuthClient enables the exchange of workload identity pool external credentials for\n * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These\n * scripts/executables are completely independent of the Google Cloud Auth libraries. These\n * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token\n * to be exchanged for a Google access token.\n *\n *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable\n * must be set to '1'. This is for security reasons.\n *\n *

Both OIDC and SAML are supported. The executable must adhere to a specific response format\n * defined below.\n *\n *

The executable must print out the 3rd party token to STDOUT in JSON format. When an\n * output_file is specified in the credential configuration, the executable must also handle writing the\n * JSON response to this file.\n *\n *

\n * OIDC response sample:\n * {\n *   \"version\": 1,\n *   \"success\": true,\n *   \"token_type\": \"urn:ietf:params:oauth:token-type:id_token\",\n *   \"id_token\": \"HEADER.PAYLOAD.SIGNATURE\",\n *   \"expiration_time\": 1620433341\n * }\n *\n * SAML2 response sample:\n * {\n *   \"version\": 1,\n *   \"success\": true,\n *   \"token_type\": \"urn:ietf:params:oauth:token-type:saml2\",\n *   \"saml_response\": \"...\",\n *   \"expiration_time\": 1620433341\n * }\n *\n * Error response sample:\n * {\n *   \"version\": 1,\n *   \"success\": false,\n *   \"code\": \"401\",\n *   \"message\": \"Error message.\"\n * }\n * 
\n *\n *

The \"expiration_time\" field in the JSON response is only required for successful\n * responses when an output file was specified in the credential configuration\n *\n *

The auth libraries will populate certain environment variables that will be accessible by the\n * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE,\n * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and\n * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE.\n *\n *

Please see this repositories README for a complete executable request/response specification.\n */\nclass PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiates a PluggableAuthClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid pluggable auth credential.\n * @param options The external account options object typically loaded from\n * the external account JSON credential file.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super(options, additionalOptions);\n if (!options.credential_source.executable) {\n throw new Error('No valid Pluggable Auth \"credential_source\" provided.');\n }\n this.command = options.credential_source.executable.command;\n if (!this.command) {\n throw new Error('No valid Pluggable Auth \"credential_source\" provided.');\n }\n // Check if the provided timeout exists and if it is valid.\n if (options.credential_source.executable.timeout_millis === undefined) {\n this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS;\n }\n else {\n this.timeoutMillis = options.credential_source.executable.timeout_millis;\n if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS ||\n this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) {\n throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` +\n `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`);\n }\n }\n this.outputFile = options.credential_source.executable.output_file;\n this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({\n command: this.command,\n timeoutMillis: this.timeoutMillis,\n outputFile: this.outputFile,\n });\n }\n /**\n * Triggered when an external subject token is needed to be exchanged for a\n * GCP access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this calls a user provided executable which returns the subject token.\n * The logic is summarized as:\n * 1. Validated that the executable is allowed to run. The\n * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to\n * 1 for security reasons.\n * 2. If an output file is specified by the user, check the file location\n * for a response. If the file exists and contains a valid response,\n * return the subject token from the file.\n * 3. Call the provided executable and return response.\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n // Check if the executable is allowed to run.\n if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') {\n throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' +\n 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' +\n 'Variable to 1.');\n }\n let executableResponse = undefined;\n // Try to get cached executable response from output file.\n if (this.outputFile) {\n executableResponse = await this.handler.retrieveCachedResponse();\n }\n // If no response from output file, call the executable.\n if (!executableResponse) {\n // Set up environment map with required values for the executable.\n const envMap = new Map();\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience);\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType);\n // Always set to 0 because interactive mode is not supported.\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0');\n if (this.outputFile) {\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile);\n }\n const serviceAccountEmail = this.getServiceAccountEmail();\n if (serviceAccountEmail) {\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail);\n }\n executableResponse = await this.handler.retrieveResponseFromExecutable(envMap);\n }\n if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) {\n throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`);\n }\n // Check that response was successful.\n if (!executableResponse.success) {\n throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode);\n }\n // Check that response contains expiration time if output file was specified.\n if (this.outputFile) {\n if (!executableResponse.expirationTime) {\n throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.');\n }\n }\n // Check that response is not expired.\n if (executableResponse.isExpired()) {\n throw new Error('Executable response is expired.');\n }\n // Return subject token from response.\n return executableResponse.subjectToken;\n }\n}\nexports.PluggableAuthClient = PluggableAuthClient;\n//# sourceMappingURL=pluggable-auth-client.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PluggableAuthHandler = void 0;\nconst pluggable_auth_client_1 = require(\"./pluggable-auth-client\");\nconst executable_response_1 = require(\"./executable-response\");\nconst childProcess = require(\"child_process\");\nconst fs = require(\"fs\");\n/**\n * A handler used to retrieve 3rd party token responses from user defined\n * executables and cached file output for the PluggableAuthClient class.\n */\nclass PluggableAuthHandler {\n /**\n * Instantiates a PluggableAuthHandler instance using the provided\n * PluggableAuthHandlerOptions object.\n */\n constructor(options) {\n if (!options.command) {\n throw new Error('No command provided.');\n }\n this.commandComponents = PluggableAuthHandler.parseCommand(options.command);\n this.timeoutMillis = options.timeoutMillis;\n if (!this.timeoutMillis) {\n throw new Error('No timeoutMillis provided.');\n }\n this.outputFile = options.outputFile;\n }\n /**\n * Calls user provided executable to get a 3rd party subject token and\n * returns the response.\n * @param envMap a Map of additional Environment Variables required for\n * the executable.\n * @return A promise that resolves with the executable response.\n */\n retrieveResponseFromExecutable(envMap) {\n return new Promise((resolve, reject) => {\n // Spawn process to run executable using added environment variables.\n const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), {\n env: { ...process.env, ...Object.fromEntries(envMap) },\n });\n let output = '';\n // Append stdout to output as executable runs.\n child.stdout.on('data', (data) => {\n output += data;\n });\n // Append stderr as executable runs.\n child.stderr.on('data', (err) => {\n output += err;\n });\n // Set up a timeout to end the child process and throw an error.\n const timeout = setTimeout(() => {\n // Kill child process and remove listeners so 'close' event doesn't get\n // read after child process is killed.\n child.removeAllListeners();\n child.kill();\n return reject(new Error('The executable failed to finish within the timeout specified.'));\n }, this.timeoutMillis);\n child.on('close', (code) => {\n // Cancel timeout if executable closes before timeout is reached.\n clearTimeout(timeout);\n if (code === 0) {\n // If the executable completed successfully, try to return the parsed response.\n try {\n const responseJson = JSON.parse(output);\n const response = new executable_response_1.ExecutableResponse(responseJson);\n return resolve(response);\n }\n catch (error) {\n if (error instanceof executable_response_1.ExecutableResponseError) {\n return reject(error);\n }\n return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`));\n }\n }\n else {\n return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString()));\n }\n });\n });\n }\n /**\n * Checks user provided output file for response from previous run of\n * executable and return the response if it exists, is formatted correctly, and is not expired.\n */\n async retrieveCachedResponse() {\n if (!this.outputFile || this.outputFile.length === 0) {\n return undefined;\n }\n let filePath;\n try {\n filePath = await fs.promises.realpath(this.outputFile);\n }\n catch (_a) {\n // If file path cannot be resolved, return undefined.\n return undefined;\n }\n if (!(await fs.promises.lstat(filePath)).isFile()) {\n // If path does not lead to file, return undefined.\n return undefined;\n }\n const responseString = await fs.promises.readFile(filePath, {\n encoding: 'utf8',\n });\n if (responseString === '') {\n return undefined;\n }\n try {\n const responseJson = JSON.parse(responseString);\n const response = new executable_response_1.ExecutableResponse(responseJson);\n // Check if response is successful and unexpired.\n if (response.isValid()) {\n return new executable_response_1.ExecutableResponse(responseJson);\n }\n return undefined;\n }\n catch (error) {\n if (error instanceof executable_response_1.ExecutableResponseError) {\n throw error;\n }\n throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`);\n }\n }\n /**\n * Parses given command string into component array, splitting on spaces unless\n * spaces are between quotation marks.\n */\n static parseCommand(command) {\n // Split the command into components by splitting on spaces,\n // unless spaces are contained in quotation marks.\n const components = command.match(/(?:[^\\s\"]+|\"[^\"]*\")+/g);\n if (!components) {\n throw new Error(`Provided command: \"${command}\" could not be parsed.`);\n }\n // Remove quotation marks from the beginning and end of each component if they are present.\n for (let i = 0; i < components.length; i++) {\n if (components[i][0] === '\"' && components[i].slice(-1) === '\"') {\n components[i] = components[i].slice(1, -1);\n }\n }\n return components;\n }\n}\nexports.PluggableAuthHandler = PluggableAuthHandler;\n//# sourceMappingURL=pluggable-auth-handler.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.UserRefreshClient = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nclass UserRefreshClient extends oauth2client_1.OAuth2Client {\n constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) {\n const opts = optionsOrClientId && typeof optionsOrClientId === 'object'\n ? optionsOrClientId\n : {\n clientId: optionsOrClientId,\n clientSecret,\n refreshToken,\n eagerRefreshThresholdMillis,\n forceRefreshOnFailure,\n };\n super({\n clientId: opts.clientId,\n clientSecret: opts.clientSecret,\n eagerRefreshThresholdMillis: opts.eagerRefreshThresholdMillis,\n forceRefreshOnFailure: opts.forceRefreshOnFailure,\n });\n this._refreshToken = opts.refreshToken;\n this.credentials.refresh_token = opts.refreshToken;\n }\n /**\n * Refreshes the access token.\n * @param refreshToken An ignored refreshToken..\n * @param callback Optional callback.\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n return super.refreshTokenNoCache(this._refreshToken);\n }\n /**\n * Create a UserRefreshClient credentials instance using the given input\n * options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the user refresh token');\n }\n if (json.type !== 'authorized_user') {\n throw new Error('The incoming JSON object does not have the \"authorized_user\" type');\n }\n if (!json.client_id) {\n throw new Error('The incoming JSON object does not contain a client_id field');\n }\n if (!json.client_secret) {\n throw new Error('The incoming JSON object does not contain a client_secret field');\n }\n if (!json.refresh_token) {\n throw new Error('The incoming JSON object does not contain a refresh_token field');\n }\n this._clientId = json.client_id;\n this._clientSecret = json.client_secret;\n this._refreshToken = json.refresh_token;\n this.credentials.refresh_token = json.refresh_token;\n this.quotaProjectId = json.quota_project_id;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n async fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n return reject(new Error('Must pass in a stream containing the user refresh token.'));\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n return resolve();\n }\n catch (err) {\n return reject(err);\n }\n });\n });\n }\n}\nexports.UserRefreshClient = UserRefreshClient;\n//# sourceMappingURL=refreshclient.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.StsCredentials = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst querystring = require(\"querystring\");\nconst transporters_1 = require(\"../transporters\");\nconst oauth2common_1 = require(\"./oauth2common\");\n/**\n * Implements the OAuth 2.0 token exchange based on\n * https://tools.ietf.org/html/rfc8693\n */\nclass StsCredentials extends oauth2common_1.OAuthClientAuthHandler {\n /**\n * Initializes an STS credentials instance.\n * @param tokenExchangeEndpoint The token exchange endpoint.\n * @param clientAuthentication The client authentication credentials if\n * available.\n */\n constructor(tokenExchangeEndpoint, clientAuthentication) {\n super(clientAuthentication);\n this.tokenExchangeEndpoint = tokenExchangeEndpoint;\n this.transporter = new transporters_1.DefaultTransporter();\n }\n /**\n * Exchanges the provided token for another type of token based on the\n * rfc8693 spec.\n * @param stsCredentialsOptions The token exchange options used to populate\n * the token exchange request.\n * @param additionalHeaders Optional additional headers to pass along the\n * request.\n * @param options Optional additional GCP-specific non-spec defined options\n * to send with the request.\n * Example: `&options=${encodeUriComponent(JSON.stringified(options))}`\n * @return A promise that resolves with the token exchange response containing\n * the requested token and its expiration time.\n */\n async exchangeToken(stsCredentialsOptions, additionalHeaders, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options) {\n var _a, _b, _c;\n const values = {\n grant_type: stsCredentialsOptions.grantType,\n resource: stsCredentialsOptions.resource,\n audience: stsCredentialsOptions.audience,\n scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '),\n requested_token_type: stsCredentialsOptions.requestedTokenType,\n subject_token: stsCredentialsOptions.subjectToken,\n subject_token_type: stsCredentialsOptions.subjectTokenType,\n actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken,\n actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType,\n // Non-standard GCP-specific options.\n options: options && JSON.stringify(options),\n };\n // Remove undefined fields.\n Object.keys(values).forEach(key => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n if (typeof values[key] === 'undefined') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n delete values[key];\n }\n });\n const headers = {\n 'Content-Type': 'application/x-www-form-urlencoded',\n };\n // Inject additional STS headers if available.\n Object.assign(headers, additionalHeaders || {});\n const opts = {\n url: this.tokenExchangeEndpoint,\n method: 'POST',\n headers,\n data: querystring.stringify(values),\n responseType: 'json',\n };\n // Apply OAuth client authentication.\n this.applyClientAuthenticationOptions(opts);\n try {\n const response = await this.transporter.request(opts);\n // Successful response.\n const stsSuccessfulResponse = response.data;\n stsSuccessfulResponse.res = response;\n return stsSuccessfulResponse;\n }\n catch (error) {\n // Translate error to OAuthError.\n if (error instanceof gaxios_1.GaxiosError && error.response) {\n throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, \n // Preserve other fields from the original error.\n error);\n }\n // Request could fail before the server responds.\n throw error;\n }\n }\n}\nexports.StsCredentials = StsCredentials;\n//# sourceMappingURL=stscredentials.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/* global window */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.BrowserCrypto = void 0;\n// This file implements crypto functions we need using in-browser\n// SubtleCrypto interface `window.crypto.subtle`.\nconst base64js = require(\"base64-js\");\n// Not all browsers support `TextEncoder`. The following `require` will\n// provide a fast UTF8-only replacement for those browsers that don't support\n// text encoding natively.\n// eslint-disable-next-line node/no-unsupported-features/node-builtins\nif (typeof process === 'undefined' && typeof TextEncoder === 'undefined') {\n require('fast-text-encoding');\n}\nconst crypto_1 = require(\"../crypto\");\nclass BrowserCrypto {\n constructor() {\n if (typeof window === 'undefined' ||\n window.crypto === undefined ||\n window.crypto.subtle === undefined) {\n throw new Error(\"SubtleCrypto not found. Make sure it's an https:// website.\");\n }\n }\n async sha256DigestBase64(str) {\n // SubtleCrypto digest() method is async, so we must make\n // this method async as well.\n // To calculate SHA256 digest using SubtleCrypto, we first\n // need to convert an input string to an ArrayBuffer:\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const inputBuffer = new TextEncoder().encode(str);\n // Result is ArrayBuffer as well.\n const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer);\n return base64js.fromByteArray(new Uint8Array(outputBuffer));\n }\n randomBytesBase64(count) {\n const array = new Uint8Array(count);\n window.crypto.getRandomValues(array);\n return base64js.fromByteArray(array);\n }\n static padBase64(base64) {\n // base64js requires padding, so let's add some '='\n while (base64.length % 4 !== 0) {\n base64 += '=';\n }\n return base64;\n }\n async verify(pubkey, data, signature) {\n const algo = {\n name: 'RSASSA-PKCS1-v1_5',\n hash: { name: 'SHA-256' },\n };\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const dataArray = new TextEncoder().encode(data);\n const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature));\n const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']);\n // SubtleCrypto's verify method is async so we must make\n // this method async as well.\n const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray);\n return result;\n }\n async sign(privateKey, data) {\n const algo = {\n name: 'RSASSA-PKCS1-v1_5',\n hash: { name: 'SHA-256' },\n };\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const dataArray = new TextEncoder().encode(data);\n const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']);\n // SubtleCrypto's sign method is async so we must make\n // this method async as well.\n const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray);\n return base64js.fromByteArray(new Uint8Array(result));\n }\n decodeBase64StringUtf8(base64) {\n const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64));\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const result = new TextDecoder().decode(uint8array);\n return result;\n }\n encodeBase64StringUtf8(text) {\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const uint8array = new TextEncoder().encode(text);\n const result = base64js.fromByteArray(uint8array);\n return result;\n }\n /**\n * Computes the SHA-256 hash of the provided string.\n * @param str The plain text string to hash.\n * @return A promise that resolves with the SHA-256 hash of the provided\n * string in hexadecimal encoding.\n */\n async sha256DigestHex(str) {\n // SubtleCrypto digest() method is async, so we must make\n // this method async as well.\n // To calculate SHA256 digest using SubtleCrypto, we first\n // need to convert an input string to an ArrayBuffer:\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const inputBuffer = new TextEncoder().encode(str);\n // Result is ArrayBuffer as well.\n const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer);\n return (0, crypto_1.fromArrayBufferToHex)(outputBuffer);\n }\n /**\n * Computes the HMAC hash of a message using the provided crypto key and the\n * SHA-256 algorithm.\n * @param key The secret crypto key in utf-8 or ArrayBuffer format.\n * @param msg The plain text message.\n * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer\n * format.\n */\n async signWithHmacSha256(key, msg) {\n // Convert key, if provided in ArrayBuffer format, to string.\n const rawKey = typeof key === 'string'\n ? key\n : String.fromCharCode(...new Uint16Array(key));\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const enc = new TextEncoder();\n const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), {\n name: 'HMAC',\n hash: {\n name: 'SHA-256',\n },\n }, false, ['sign']);\n return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg));\n }\n}\nexports.BrowserCrypto = BrowserCrypto;\n//# sourceMappingURL=crypto.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/* global window */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0;\nconst crypto_1 = require(\"./browser/crypto\");\nconst crypto_2 = require(\"./node/crypto\");\nfunction createCrypto() {\n if (hasBrowserCrypto()) {\n return new crypto_1.BrowserCrypto();\n }\n return new crypto_2.NodeCrypto();\n}\nexports.createCrypto = createCrypto;\nfunction hasBrowserCrypto() {\n return (typeof window !== 'undefined' &&\n typeof window.crypto !== 'undefined' &&\n typeof window.crypto.subtle !== 'undefined');\n}\nexports.hasBrowserCrypto = hasBrowserCrypto;\n/**\n * Converts an ArrayBuffer to a hexadecimal string.\n * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string.\n * @return The hexadecimal encoding of the ArrayBuffer.\n */\nfunction fromArrayBufferToHex(arrayBuffer) {\n // Convert buffer to byte array.\n const byteArray = Array.from(new Uint8Array(arrayBuffer));\n // Convert bytes to hex string.\n return byteArray\n .map(byte => {\n return byte.toString(16).padStart(2, '0');\n })\n .join('');\n}\nexports.fromArrayBufferToHex = fromArrayBufferToHex;\n//# sourceMappingURL=crypto.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NodeCrypto = void 0;\nconst crypto = require(\"crypto\");\nclass NodeCrypto {\n async sha256DigestBase64(str) {\n return crypto.createHash('sha256').update(str).digest('base64');\n }\n randomBytesBase64(count) {\n return crypto.randomBytes(count).toString('base64');\n }\n async verify(pubkey, data, signature) {\n const verifier = crypto.createVerify('sha256');\n verifier.update(data);\n verifier.end();\n return verifier.verify(pubkey, signature, 'base64');\n }\n async sign(privateKey, data) {\n const signer = crypto.createSign('RSA-SHA256');\n signer.update(data);\n signer.end();\n return signer.sign(privateKey, 'base64');\n }\n decodeBase64StringUtf8(base64) {\n return Buffer.from(base64, 'base64').toString('utf-8');\n }\n encodeBase64StringUtf8(text) {\n return Buffer.from(text, 'utf-8').toString('base64');\n }\n /**\n * Computes the SHA-256 hash of the provided string.\n * @param str The plain text string to hash.\n * @return A promise that resolves with the SHA-256 hash of the provided\n * string in hexadecimal encoding.\n */\n async sha256DigestHex(str) {\n return crypto.createHash('sha256').update(str).digest('hex');\n }\n /**\n * Computes the HMAC hash of a message using the provided crypto key and the\n * SHA-256 algorithm.\n * @param key The secret crypto key in utf-8 or ArrayBuffer format.\n * @param msg The plain text message.\n * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer\n * format.\n */\n async signWithHmacSha256(key, msg) {\n const cryptoKey = typeof key === 'string' ? key : toBuffer(key);\n return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest());\n }\n}\nexports.NodeCrypto = NodeCrypto;\n/**\n * Converts a Node.js Buffer to an ArrayBuffer.\n * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer\n * @param buffer The Buffer input to covert.\n * @return The ArrayBuffer representation of the input.\n */\nfunction toArrayBuffer(buffer) {\n return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);\n}\n/**\n * Converts an ArrayBuffer to a Node.js Buffer.\n * @param arrayBuffer The ArrayBuffer input to covert.\n * @return The Buffer representation of the input.\n */\nfunction toBuffer(arrayBuffer) {\n return Buffer.from(arrayBuffer);\n}\n//# sourceMappingURL=crypto.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.AuthClient = void 0;\n// Copyright 2017 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nconst googleauth_1 = require(\"./auth/googleauth\");\nObject.defineProperty(exports, \"GoogleAuth\", { enumerable: true, get: function () { return googleauth_1.GoogleAuth; } });\nvar authclient_1 = require(\"./auth/authclient\");\nObject.defineProperty(exports, \"AuthClient\", { enumerable: true, get: function () { return authclient_1.AuthClient; } });\nvar computeclient_1 = require(\"./auth/computeclient\");\nObject.defineProperty(exports, \"Compute\", { enumerable: true, get: function () { return computeclient_1.Compute; } });\nvar envDetect_1 = require(\"./auth/envDetect\");\nObject.defineProperty(exports, \"GCPEnv\", { enumerable: true, get: function () { return envDetect_1.GCPEnv; } });\nvar iam_1 = require(\"./auth/iam\");\nObject.defineProperty(exports, \"IAMAuth\", { enumerable: true, get: function () { return iam_1.IAMAuth; } });\nvar idtokenclient_1 = require(\"./auth/idtokenclient\");\nObject.defineProperty(exports, \"IdTokenClient\", { enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } });\nvar jwtaccess_1 = require(\"./auth/jwtaccess\");\nObject.defineProperty(exports, \"JWTAccess\", { enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } });\nvar jwtclient_1 = require(\"./auth/jwtclient\");\nObject.defineProperty(exports, \"JWT\", { enumerable: true, get: function () { return jwtclient_1.JWT; } });\nvar impersonated_1 = require(\"./auth/impersonated\");\nObject.defineProperty(exports, \"Impersonated\", { enumerable: true, get: function () { return impersonated_1.Impersonated; } });\nvar oauth2client_1 = require(\"./auth/oauth2client\");\nObject.defineProperty(exports, \"CodeChallengeMethod\", { enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } });\nObject.defineProperty(exports, \"OAuth2Client\", { enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } });\nvar loginticket_1 = require(\"./auth/loginticket\");\nObject.defineProperty(exports, \"LoginTicket\", { enumerable: true, get: function () { return loginticket_1.LoginTicket; } });\nvar refreshclient_1 = require(\"./auth/refreshclient\");\nObject.defineProperty(exports, \"UserRefreshClient\", { enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } });\nvar awsclient_1 = require(\"./auth/awsclient\");\nObject.defineProperty(exports, \"AwsClient\", { enumerable: true, get: function () { return awsclient_1.AwsClient; } });\nvar identitypoolclient_1 = require(\"./auth/identitypoolclient\");\nObject.defineProperty(exports, \"IdentityPoolClient\", { enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } });\nvar externalclient_1 = require(\"./auth/externalclient\");\nObject.defineProperty(exports, \"ExternalAccountClient\", { enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } });\nvar baseexternalclient_1 = require(\"./auth/baseexternalclient\");\nObject.defineProperty(exports, \"BaseExternalAccountClient\", { enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } });\nvar downscopedclient_1 = require(\"./auth/downscopedclient\");\nObject.defineProperty(exports, \"DownscopedClient\", { enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } });\nvar pluggable_auth_client_1 = require(\"./auth/pluggable-auth-client\");\nObject.defineProperty(exports, \"PluggableAuthClient\", { enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } });\nvar transporters_1 = require(\"./transporters\");\nObject.defineProperty(exports, \"DefaultTransporter\", { enumerable: true, get: function () { return transporters_1.DefaultTransporter; } });\nconst auth = new googleauth_1.GoogleAuth();\nexports.auth = auth;\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2017 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validate = void 0;\n// Accepts an options object passed from the user to the API. In the\n// previous version of the API, it referred to a `Request` options object.\n// Now it refers to an Axiox Request Config object. This is here to help\n// ensure users don't pass invalid options when they upgrade from 0.x to 1.x.\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction validate(options) {\n const vpairs = [\n { invalid: 'uri', expected: 'url' },\n { invalid: 'json', expected: 'data' },\n { invalid: 'qs', expected: 'params' },\n ];\n for (const pair of vpairs) {\n if (options[pair.invalid]) {\n const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`;\n throw new Error(e);\n }\n }\n}\nexports.validate = validate;\n//# sourceMappingURL=options.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DefaultTransporter = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst options_1 = require(\"./options\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst pkg = require('../../package.json');\nconst PRODUCT_NAME = 'google-api-nodejs-client';\nclass DefaultTransporter {\n /**\n * Configures request options before making a request.\n * @param opts GaxiosOptions options.\n * @return Configured options.\n */\n configure(opts = {}) {\n opts.headers = opts.headers || {};\n if (typeof window === 'undefined') {\n // set transporter user agent if not in browser\n const uaValue = opts.headers['User-Agent'];\n if (!uaValue) {\n opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT;\n }\n else if (!uaValue.includes(`${PRODUCT_NAME}/`)) {\n opts.headers['User-Agent'] = `${uaValue} ${DefaultTransporter.USER_AGENT}`;\n }\n // track google-auth-library-nodejs version:\n const authVersion = `auth/${pkg.version}`;\n if (opts.headers['x-goog-api-client'] &&\n !opts.headers['x-goog-api-client'].includes(authVersion)) {\n opts.headers['x-goog-api-client'] = `${opts.headers['x-goog-api-client']} ${authVersion}`;\n }\n else if (!opts.headers['x-goog-api-client']) {\n const nodeVersion = process.version.replace(/^v/, '');\n opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion} ${authVersion}`;\n }\n }\n return opts;\n }\n request(opts, callback) {\n // ensure the user isn't passing in request-style options\n opts = this.configure(opts);\n try {\n (0, options_1.validate)(opts);\n }\n catch (e) {\n if (callback) {\n return callback(e);\n }\n else {\n throw e;\n }\n }\n if (callback) {\n (0, gaxios_1.request)(opts).then(r => {\n callback(null, r);\n }, e => {\n callback(this.processError(e));\n });\n }\n else {\n return (0, gaxios_1.request)(opts).catch(e => {\n throw this.processError(e);\n });\n }\n }\n /**\n * Changes the error to include details from the body.\n */\n processError(e) {\n const res = e.response;\n const err = e;\n const body = res ? res.data : null;\n if (res && body && body.error && res.status !== 200) {\n if (typeof body.error === 'string') {\n err.message = body.error;\n err.code = res.status.toString();\n }\n else if (Array.isArray(body.error.errors)) {\n err.message = body.error.errors\n .map((err2) => err2.message)\n .join('\\n');\n err.code = body.error.code;\n err.errors = body.error.errors;\n }\n else {\n err.message = body.error.message;\n err.code = body.error.code || res.status;\n }\n }\n else if (res && res.status >= 400) {\n // Consider all 4xx and 5xx responses errors.\n err.message = body;\n err.code = res.status.toString();\n }\n return err;\n }\n}\nexports.DefaultTransporter = DefaultTransporter;\n/**\n * Default user agent.\n */\nDefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`;\n//# sourceMappingURL=transporters.js.map","\"use strict\";\n/**\n * Copyright 2018 Google LLC\n *\n * Distributed under MIT license.\n * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getPem = void 0;\nconst fs = require(\"fs\");\nconst forge = require(\"node-forge\");\nconst util_1 = require(\"util\");\nconst readFile = (0, util_1.promisify)(fs.readFile);\nfunction getPem(filename, callback) {\n if (callback) {\n getPemAsync(filename)\n .then(pem => callback(null, pem))\n .catch(err => callback(err, null));\n }\n else {\n return getPemAsync(filename);\n }\n}\nexports.getPem = getPem;\nfunction getPemAsync(filename) {\n return readFile(filename, { encoding: 'base64' }).then(keyp12 => {\n return convertToPem(keyp12);\n });\n}\n/**\n * Converts a P12 in base64 encoding to a pem.\n * @param p12base64 String containing base64 encoded p12.\n * @returns a string containing the pem.\n */\nfunction convertToPem(p12base64) {\n const p12Der = forge.util.decode64(p12base64);\n const p12Asn1 = forge.asn1.fromDer(p12Der);\n const p12 = forge.pkcs12.pkcs12FromAsn1(p12Asn1, 'notasecret');\n const bags = p12.getBags({ friendlyName: 'privatekey' });\n if (bags.friendlyName) {\n const privateKey = bags.friendlyName[0].key;\n const pem = forge.pki.privateKeyToPem(privateKey);\n return pem.replace(/\\r\\n/g, '\\n');\n }\n else {\n throw new Error('Unable to get friendly name.');\n }\n}\n//# sourceMappingURL=index.js.map","\"use strict\";\n/**\n * Copyright 2018 Google LLC\n *\n * Distributed under MIT license.\n * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleToken = void 0;\nconst fs = require(\"fs\");\nconst gaxios_1 = require(\"gaxios\");\nconst jws = require(\"jws\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst readFile = fs.readFile\n ? (0, util_1.promisify)(fs.readFile)\n : async () => {\n // if running in the web-browser, fs.readFile may not have been shimmed.\n throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS');\n };\nconst GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token';\nconst GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token=';\nclass ErrorWithCode extends Error {\n constructor(message, code) {\n super(message);\n this.code = code;\n }\n}\nlet getPem;\nclass GoogleToken {\n /**\n * Create a GoogleToken.\n *\n * @param options Configuration object.\n */\n constructor(options) {\n this.transporter = {\n request: opts => (0, gaxios_1.request)(opts),\n };\n this.configure(options);\n }\n get accessToken() {\n return this.rawToken ? this.rawToken.access_token : undefined;\n }\n get idToken() {\n return this.rawToken ? this.rawToken.id_token : undefined;\n }\n get tokenType() {\n return this.rawToken ? this.rawToken.token_type : undefined;\n }\n get refreshToken() {\n return this.rawToken ? this.rawToken.refresh_token : undefined;\n }\n /**\n * Returns whether the token has expired.\n *\n * @return true if the token has expired, false otherwise.\n */\n hasExpired() {\n const now = new Date().getTime();\n if (this.rawToken && this.expiresAt) {\n return now >= this.expiresAt;\n }\n else {\n return true;\n }\n }\n /**\n * Returns whether the token will expire within eagerRefreshThresholdMillis\n *\n * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise.\n */\n isTokenExpiring() {\n var _a;\n const now = new Date().getTime();\n const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0;\n if (this.rawToken && this.expiresAt) {\n return this.expiresAt <= now + eagerRefreshThresholdMillis;\n }\n else {\n return true;\n }\n }\n getToken(callback, opts = {}) {\n if (typeof callback === 'object') {\n opts = callback;\n callback = undefined;\n }\n opts = Object.assign({\n forceRefresh: false,\n }, opts);\n if (callback) {\n const cb = callback;\n this.getTokenAsync(opts).then(t => cb(null, t), callback);\n return;\n }\n return this.getTokenAsync(opts);\n }\n /**\n * Given a keyFile, extract the key and client email if available\n * @param keyFile Path to a json, pem, or p12 file that contains the key.\n * @returns an object with privateKey and clientEmail properties\n */\n async getCredentials(keyFile) {\n const ext = path.extname(keyFile);\n switch (ext) {\n case '.json': {\n const key = await readFile(keyFile, 'utf8');\n const body = JSON.parse(key);\n const privateKey = body.private_key;\n const clientEmail = body.client_email;\n if (!privateKey || !clientEmail) {\n throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS');\n }\n return { privateKey, clientEmail };\n }\n case '.der':\n case '.crt':\n case '.pem': {\n const privateKey = await readFile(keyFile, 'utf8');\n return { privateKey };\n }\n case '.p12':\n case '.pfx': {\n // NOTE: The loading of `google-p12-pem` is deferred for performance\n // reasons. The `node-forge` npm module in `google-p12-pem` adds a fair\n // bit time to overall module loading, and is likely not frequently\n // used. In a future release, p12 support will be entirely removed.\n if (!getPem) {\n getPem = (await Promise.resolve().then(() => require('google-p12-pem'))).getPem;\n }\n const privateKey = await getPem(keyFile);\n return { privateKey };\n }\n default:\n throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' +\n 'Current supported extensions are *.json, *.pem, and *.p12.', 'UNKNOWN_CERTIFICATE_TYPE');\n }\n }\n async getTokenAsync(opts) {\n if (this.inFlightRequest && !opts.forceRefresh) {\n return this.inFlightRequest;\n }\n try {\n return await (this.inFlightRequest = this.getTokenAsyncInner(opts));\n }\n finally {\n this.inFlightRequest = undefined;\n }\n }\n async getTokenAsyncInner(opts) {\n if (this.isTokenExpiring() === false && opts.forceRefresh === false) {\n return Promise.resolve(this.rawToken);\n }\n if (!this.key && !this.keyFile) {\n throw new Error('No key or keyFile set.');\n }\n if (!this.key && this.keyFile) {\n const creds = await this.getCredentials(this.keyFile);\n this.key = creds.privateKey;\n this.iss = creds.clientEmail || this.iss;\n if (!creds.clientEmail) {\n this.ensureEmail();\n }\n }\n return this.requestToken();\n }\n ensureEmail() {\n if (!this.iss) {\n throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS');\n }\n }\n revokeToken(callback) {\n if (callback) {\n this.revokeTokenAsync().then(() => callback(), callback);\n return;\n }\n return this.revokeTokenAsync();\n }\n async revokeTokenAsync() {\n if (!this.accessToken) {\n throw new Error('No token to revoke.');\n }\n const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken;\n await this.transporter.request({ url });\n this.configure({\n email: this.iss,\n sub: this.sub,\n key: this.key,\n keyFile: this.keyFile,\n scope: this.scope,\n additionalClaims: this.additionalClaims,\n });\n }\n /**\n * Configure the GoogleToken for re-use.\n * @param {object} options Configuration object.\n */\n configure(options = {}) {\n this.keyFile = options.keyFile;\n this.key = options.key;\n this.rawToken = undefined;\n this.iss = options.email || options.iss;\n this.sub = options.sub;\n this.additionalClaims = options.additionalClaims;\n if (typeof options.scope === 'object') {\n this.scope = options.scope.join(' ');\n }\n else {\n this.scope = options.scope;\n }\n this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis;\n if (options.transporter) {\n this.transporter = options.transporter;\n }\n }\n /**\n * Request the token from Google.\n */\n async requestToken() {\n var _a, _b;\n const iat = Math.floor(new Date().getTime() / 1000);\n const additionalClaims = this.additionalClaims || {};\n const payload = Object.assign({\n iss: this.iss,\n scope: this.scope,\n aud: GOOGLE_TOKEN_URL,\n exp: iat + 3600,\n iat,\n sub: this.sub,\n }, additionalClaims);\n const signedJWT = jws.sign({\n header: { alg: 'RS256' },\n payload,\n secret: this.key,\n });\n try {\n const r = await this.transporter.request({\n method: 'POST',\n url: GOOGLE_TOKEN_URL,\n data: {\n grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer',\n assertion: signedJWT,\n },\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n responseType: 'json',\n });\n this.rawToken = r.data;\n this.expiresAt =\n r.data.expires_in === null || r.data.expires_in === undefined\n ? undefined\n : (iat + r.data.expires_in) * 1000;\n return this.rawToken;\n }\n catch (e) {\n this.rawToken = undefined;\n this.tokenExpires = undefined;\n const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data)\n ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data\n : {};\n if (body.error) {\n const desc = body.error_description\n ? `: ${body.error_description}`\n : '';\n e.message = `${body.error}${desc}`;\n }\n throw e;\n }\n }\n}\nexports.GoogleToken = GoogleToken;\n//# sourceMappingURL=index.js.map","'use strict';\n\nmodule.exports = (flag, argv = process.argv) => {\n\tconst prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');\n\tconst position = argv.indexOf(prefix + flag);\n\tconst terminatorPosition = argv.indexOf('--');\n\treturn position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);\n};\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst net_1 = __importDefault(require(\"net\"));\nconst tls_1 = __importDefault(require(\"tls\"));\nconst url_1 = __importDefault(require(\"url\"));\nconst debug_1 = __importDefault(require(\"debug\"));\nconst once_1 = __importDefault(require(\"@tootallnate/once\"));\nconst agent_base_1 = require(\"agent-base\");\nconst debug = (0, debug_1.default)('http-proxy-agent');\nfunction isHTTPS(protocol) {\n return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false;\n}\n/**\n * The `HttpProxyAgent` implements an HTTP Agent subclass that connects\n * to the specified \"HTTP proxy server\" in order to proxy HTTP requests.\n *\n * @api public\n */\nclass HttpProxyAgent extends agent_base_1.Agent {\n constructor(_opts) {\n let opts;\n if (typeof _opts === 'string') {\n opts = url_1.default.parse(_opts);\n }\n else {\n opts = _opts;\n }\n if (!opts) {\n throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');\n }\n debug('Creating new HttpProxyAgent instance: %o', opts);\n super(opts);\n const proxy = Object.assign({}, opts);\n // If `true`, then connect to the proxy server over TLS.\n // Defaults to `false`.\n this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol);\n // Prefer `hostname` over `host`, and set the `port` if needed.\n proxy.host = proxy.hostname || proxy.host;\n if (typeof proxy.port === 'string') {\n proxy.port = parseInt(proxy.port, 10);\n }\n if (!proxy.port && proxy.host) {\n proxy.port = this.secureProxy ? 443 : 80;\n }\n if (proxy.host && proxy.path) {\n // If both a `host` and `path` are specified then it's most likely\n // the result of a `url.parse()` call... we need to remove the\n // `path` portion so that `net.connect()` doesn't attempt to open\n // that as a Unix socket file.\n delete proxy.path;\n delete proxy.pathname;\n }\n this.proxy = proxy;\n }\n /**\n * Called when the node-core HTTP client library is creating a\n * new HTTP request.\n *\n * @api protected\n */\n callback(req, opts) {\n return __awaiter(this, void 0, void 0, function* () {\n const { proxy, secureProxy } = this;\n const parsed = url_1.default.parse(req.path);\n if (!parsed.protocol) {\n parsed.protocol = 'http:';\n }\n if (!parsed.hostname) {\n parsed.hostname = opts.hostname || opts.host || null;\n }\n if (parsed.port == null && typeof opts.port) {\n parsed.port = String(opts.port);\n }\n if (parsed.port === '80') {\n // if port is 80, then we can remove the port so that the\n // \":80\" portion is not on the produced URL\n parsed.port = '';\n }\n // Change the `http.ClientRequest` instance's \"path\" field\n // to the absolute path of the URL that will be requested.\n req.path = url_1.default.format(parsed);\n // Inject the `Proxy-Authorization` header if necessary.\n if (proxy.auth) {\n req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`);\n }\n // Create a socket connection to the proxy server.\n let socket;\n if (secureProxy) {\n debug('Creating `tls.Socket`: %o', proxy);\n socket = tls_1.default.connect(proxy);\n }\n else {\n debug('Creating `net.Socket`: %o', proxy);\n socket = net_1.default.connect(proxy);\n }\n // At this point, the http ClientRequest's internal `_header` field\n // might have already been set. If this is the case then we'll need\n // to re-generate the string since we just changed the `req.path`.\n if (req._header) {\n let first;\n let endOfHeaders;\n debug('Regenerating stored HTTP header string for request');\n req._header = null;\n req._implicitHeader();\n if (req.output && req.output.length > 0) {\n // Node < 12\n debug('Patching connection write() output buffer with updated header');\n first = req.output[0];\n endOfHeaders = first.indexOf('\\r\\n\\r\\n') + 4;\n req.output[0] = req._header + first.substring(endOfHeaders);\n debug('Output buffer: %o', req.output);\n }\n else if (req.outputData && req.outputData.length > 0) {\n // Node >= 12\n debug('Patching connection write() output buffer with updated header');\n first = req.outputData[0].data;\n endOfHeaders = first.indexOf('\\r\\n\\r\\n') + 4;\n req.outputData[0].data =\n req._header + first.substring(endOfHeaders);\n debug('Output buffer: %o', req.outputData[0].data);\n }\n }\n // Wait for the socket's `connect` event, so that this `callback()`\n // function throws instead of the `http` request machinery. This is\n // important for i.e. `PacProxyAgent` which determines a failed proxy\n // connection via the `callback()` function throwing.\n yield (0, once_1.default)(socket, 'connect');\n return socket;\n });\n }\n}\nexports.default = HttpProxyAgent;\n//# sourceMappingURL=agent.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst agent_1 = __importDefault(require(\"./agent\"));\nfunction createHttpProxyAgent(opts) {\n return new agent_1.default(opts);\n}\n(function (createHttpProxyAgent) {\n createHttpProxyAgent.HttpProxyAgent = agent_1.default;\n createHttpProxyAgent.prototype = agent_1.default.prototype;\n})(createHttpProxyAgent || (createHttpProxyAgent = {}));\nmodule.exports = createHttpProxyAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst net_1 = __importDefault(require(\"net\"));\nconst tls_1 = __importDefault(require(\"tls\"));\nconst url_1 = __importDefault(require(\"url\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst debug_1 = __importDefault(require(\"debug\"));\nconst agent_base_1 = require(\"agent-base\");\nconst parse_proxy_response_1 = __importDefault(require(\"./parse-proxy-response\"));\nconst debug = debug_1.default('https-proxy-agent:agent');\n/**\n * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to\n * the specified \"HTTP(s) proxy server\" in order to proxy HTTPS requests.\n *\n * Outgoing HTTP requests are first tunneled through the proxy server using the\n * `CONNECT` HTTP request method to establish a connection to the proxy server,\n * and then the proxy server connects to the destination target and issues the\n * HTTP request from the proxy server.\n *\n * `https:` requests have their socket connection upgraded to TLS once\n * the connection to the proxy server has been established.\n *\n * @api public\n */\nclass HttpsProxyAgent extends agent_base_1.Agent {\n constructor(_opts) {\n let opts;\n if (typeof _opts === 'string') {\n opts = url_1.default.parse(_opts);\n }\n else {\n opts = _opts;\n }\n if (!opts) {\n throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');\n }\n debug('creating new HttpsProxyAgent instance: %o', opts);\n super(opts);\n const proxy = Object.assign({}, opts);\n // If `true`, then connect to the proxy server over TLS.\n // Defaults to `false`.\n this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol);\n // Prefer `hostname` over `host`, and set the `port` if needed.\n proxy.host = proxy.hostname || proxy.host;\n if (typeof proxy.port === 'string') {\n proxy.port = parseInt(proxy.port, 10);\n }\n if (!proxy.port && proxy.host) {\n proxy.port = this.secureProxy ? 443 : 80;\n }\n // ALPN is supported by Node.js >= v5.\n // attempt to negotiate http/1.1 for proxy servers that support http/2\n if (this.secureProxy && !('ALPNProtocols' in proxy)) {\n proxy.ALPNProtocols = ['http 1.1'];\n }\n if (proxy.host && proxy.path) {\n // If both a `host` and `path` are specified then it's most likely\n // the result of a `url.parse()` call... we need to remove the\n // `path` portion so that `net.connect()` doesn't attempt to open\n // that as a Unix socket file.\n delete proxy.path;\n delete proxy.pathname;\n }\n this.proxy = proxy;\n }\n /**\n * Called when the node-core HTTP client library is creating a\n * new HTTP request.\n *\n * @api protected\n */\n callback(req, opts) {\n return __awaiter(this, void 0, void 0, function* () {\n const { proxy, secureProxy } = this;\n // Create a socket connection to the proxy server.\n let socket;\n if (secureProxy) {\n debug('Creating `tls.Socket`: %o', proxy);\n socket = tls_1.default.connect(proxy);\n }\n else {\n debug('Creating `net.Socket`: %o', proxy);\n socket = net_1.default.connect(proxy);\n }\n const headers = Object.assign({}, proxy.headers);\n const hostname = `${opts.host}:${opts.port}`;\n let payload = `CONNECT ${hostname} HTTP/1.1\\r\\n`;\n // Inject the `Proxy-Authorization` header if necessary.\n if (proxy.auth) {\n headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`;\n }\n // The `Host` header should only include the port\n // number when it is not the default port.\n let { host, port, secureEndpoint } = opts;\n if (!isDefaultPort(port, secureEndpoint)) {\n host += `:${port}`;\n }\n headers.Host = host;\n headers.Connection = 'close';\n for (const name of Object.keys(headers)) {\n payload += `${name}: ${headers[name]}\\r\\n`;\n }\n const proxyResponsePromise = parse_proxy_response_1.default(socket);\n socket.write(`${payload}\\r\\n`);\n const { statusCode, buffered } = yield proxyResponsePromise;\n if (statusCode === 200) {\n req.once('socket', resume);\n if (opts.secureEndpoint) {\n // The proxy is connecting to a TLS server, so upgrade\n // this socket connection to a TLS connection.\n debug('Upgrading socket connection to TLS');\n const servername = opts.servername || opts.host;\n return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket,\n servername }));\n }\n return socket;\n }\n // Some other status code that's not 200... need to re-play the HTTP\n // header \"data\" events onto the socket once the HTTP machinery is\n // attached so that the node core `http` can parse and handle the\n // error status code.\n // Close the original socket, and a new \"fake\" socket is returned\n // instead, so that the proxy doesn't get the HTTP request\n // written to it (which may contain `Authorization` headers or other\n // sensitive data).\n //\n // See: https://hackerone.com/reports/541502\n socket.destroy();\n const fakeSocket = new net_1.default.Socket({ writable: false });\n fakeSocket.readable = true;\n // Need to wait for the \"socket\" event to re-play the \"data\" events.\n req.once('socket', (s) => {\n debug('replaying proxy buffer for failed request');\n assert_1.default(s.listenerCount('data') > 0);\n // Replay the \"buffered\" Buffer onto the fake `socket`, since at\n // this point the HTTP module machinery has been hooked up for\n // the user.\n s.push(buffered);\n s.push(null);\n });\n return fakeSocket;\n });\n }\n}\nexports.default = HttpsProxyAgent;\nfunction resume(socket) {\n socket.resume();\n}\nfunction isDefaultPort(port, secure) {\n return Boolean((!secure && port === 80) || (secure && port === 443));\n}\nfunction isHTTPS(protocol) {\n return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false;\n}\nfunction omit(obj, ...keys) {\n const ret = {};\n let key;\n for (key in obj) {\n if (!keys.includes(key)) {\n ret[key] = obj[key];\n }\n }\n return ret;\n}\n//# sourceMappingURL=agent.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst agent_1 = __importDefault(require(\"./agent\"));\nfunction createHttpsProxyAgent(opts) {\n return new agent_1.default(opts);\n}\n(function (createHttpsProxyAgent) {\n createHttpsProxyAgent.HttpsProxyAgent = agent_1.default;\n createHttpsProxyAgent.prototype = agent_1.default.prototype;\n})(createHttpsProxyAgent || (createHttpsProxyAgent = {}));\nmodule.exports = createHttpsProxyAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst debug_1 = __importDefault(require(\"debug\"));\nconst debug = debug_1.default('https-proxy-agent:parse-proxy-response');\nfunction parseProxyResponse(socket) {\n return new Promise((resolve, reject) => {\n // we need to buffer any HTTP traffic that happens with the proxy before we get\n // the CONNECT response, so that if the response is anything other than an \"200\"\n // response code, then we can re-play the \"data\" events on the socket once the\n // HTTP parser is hooked up...\n let buffersLength = 0;\n const buffers = [];\n function read() {\n const b = socket.read();\n if (b)\n ondata(b);\n else\n socket.once('readable', read);\n }\n function cleanup() {\n socket.removeListener('end', onend);\n socket.removeListener('error', onerror);\n socket.removeListener('close', onclose);\n socket.removeListener('readable', read);\n }\n function onclose(err) {\n debug('onclose had error %o', err);\n }\n function onend() {\n debug('onend');\n }\n function onerror(err) {\n cleanup();\n debug('onerror %o', err);\n reject(err);\n }\n function ondata(b) {\n buffers.push(b);\n buffersLength += b.length;\n const buffered = Buffer.concat(buffers, buffersLength);\n const endOfHeaders = buffered.indexOf('\\r\\n\\r\\n');\n if (endOfHeaders === -1) {\n // keep buffering\n debug('have not received end of HTTP headers yet...');\n read();\n return;\n }\n const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\\r\\n'));\n const statusCode = +firstLine.split(' ')[1];\n debug('got proxy server response: %o', firstLine);\n resolve({\n statusCode,\n buffered\n });\n }\n socket.on('error', onerror);\n socket.on('close', onclose);\n socket.on('end', onend);\n read();\n });\n}\nexports.default = parseProxyResponse;\n//# sourceMappingURL=parse-proxy-response.js.map","try {\n var util = require('util');\n /* istanbul ignore next */\n if (typeof util.inherits !== 'function') throw '';\n module.exports = util.inherits;\n} catch (e) {\n /* istanbul ignore next */\n module.exports = require('./inherits_browser.js');\n}\n","if (typeof Object.create === 'function') {\n // implementation from standard node.js 'util' module\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n ctor.prototype = Object.create(superCtor.prototype, {\n constructor: {\n value: ctor,\n enumerable: false,\n writable: true,\n configurable: true\n }\n })\n }\n };\n} else {\n // old school shim for old browsers\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n var TempCtor = function () {}\n TempCtor.prototype = superCtor.prototype\n ctor.prototype = new TempCtor()\n ctor.prototype.constructor = ctor\n }\n }\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","'use strict';\n\nconst isStream = stream =>\n\tstream !== null &&\n\ttypeof stream === 'object' &&\n\ttypeof stream.pipe === 'function';\n\nisStream.writable = stream =>\n\tisStream(stream) &&\n\tstream.writable !== false &&\n\ttypeof stream._write === 'function' &&\n\ttypeof stream._writableState === 'object';\n\nisStream.readable = stream =>\n\tisStream(stream) &&\n\tstream.readable !== false &&\n\ttypeof stream._read === 'function' &&\n\ttypeof stream._readableState === 'object';\n\nisStream.duplex = stream =>\n\tisStream.writable(stream) &&\n\tisStream.readable(stream);\n\nisStream.transform = stream =>\n\tisStream.duplex(stream) &&\n\ttypeof stream._transform === 'function';\n\nmodule.exports = isStream;\n","var json_stringify = require('./lib/stringify.js').stringify;\nvar json_parse = require('./lib/parse.js');\n\nmodule.exports = function(options) {\n return {\n parse: json_parse(options),\n stringify: json_stringify\n }\n};\n//create the default method members with no options applied for backwards compatibility\nmodule.exports.parse = json_parse();\nmodule.exports.stringify = json_stringify;\n","var BigNumber = null;\n\n// regexpxs extracted from\n// (c) BSD-3-Clause\n// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors\n\nconst suspectProtoRx = /(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])(?:p|\\\\u0070)(?:r|\\\\u0072)(?:o|\\\\u006[Ff])(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])/;\nconst suspectConstructorRx = /(?:c|\\\\u0063)(?:o|\\\\u006[Ff])(?:n|\\\\u006[Ee])(?:s|\\\\u0073)(?:t|\\\\u0074)(?:r|\\\\u0072)(?:u|\\\\u0075)(?:c|\\\\u0063)(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:r|\\\\u0072)/;\n\n/*\n json_parse.js\n 2012-06-20\n\n Public Domain.\n\n NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\n This file creates a json_parse function.\n During create you can (optionally) specify some behavioural switches\n\n require('json-bigint')(options)\n\n The optional options parameter holds switches that drive certain\n aspects of the parsing process:\n * options.strict = true will warn about duplicate-key usage in the json.\n The default (strict = false) will silently ignore those and overwrite\n values for keys that are in duplicate use.\n\n The resulting function follows this signature:\n json_parse(text, reviver)\n This method parses a JSON text to produce an object or array.\n It can throw a SyntaxError exception.\n\n The optional reviver parameter is a function that can filter and\n transform the results. It receives each of the keys and values,\n and its return value is used instead of the original value.\n If it returns what it received, then the structure is not modified.\n If it returns undefined then the member is deleted.\n\n Example:\n\n // Parse the text. Values that look like ISO date strings will\n // be converted to Date objects.\n\n myData = json_parse(text, function (key, value) {\n var a;\n if (typeof value === 'string') {\n a =\n/^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2}(?:\\.\\d*)?)Z$/.exec(value);\n if (a) {\n return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],\n +a[5], +a[6]));\n }\n }\n return value;\n });\n\n This is a reference implementation. You are free to copy, modify, or\n redistribute.\n\n This code should be minified before deployment.\n See http://javascript.crockford.com/jsmin.html\n\n USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO\n NOT CONTROL.\n*/\n\n/*members \"\", \"\\\"\", \"\\/\", \"\\\\\", at, b, call, charAt, f, fromCharCode,\n hasOwnProperty, message, n, name, prototype, push, r, t, text\n*/\n\nvar json_parse = function (options) {\n 'use strict';\n\n // This is a function that can parse a JSON text, producing a JavaScript\n // data structure. It is a simple, recursive descent parser. It does not use\n // eval or regular expressions, so it can be used as a model for implementing\n // a JSON parser in other languages.\n\n // We are defining the function inside of another function to avoid creating\n // global variables.\n\n // Default options one can override by passing options to the parse()\n var _options = {\n strict: false, // not being strict means do not generate syntax errors for \"duplicate key\"\n storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string\n alwaysParseAsBig: false, // toggles whether all numbers should be Big\n useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js\n protoAction: 'error',\n constructorAction: 'error',\n };\n\n // If there are options, then use them to override the default _options\n if (options !== undefined && options !== null) {\n if (options.strict === true) {\n _options.strict = true;\n }\n if (options.storeAsString === true) {\n _options.storeAsString = true;\n }\n _options.alwaysParseAsBig =\n options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false;\n _options.useNativeBigInt =\n options.useNativeBigInt === true ? options.useNativeBigInt : false;\n\n if (typeof options.constructorAction !== 'undefined') {\n if (\n options.constructorAction === 'error' ||\n options.constructorAction === 'ignore' ||\n options.constructorAction === 'preserve'\n ) {\n _options.constructorAction = options.constructorAction;\n } else {\n throw new Error(\n `Incorrect value for constructorAction option, must be \"error\", \"ignore\" or undefined but passed ${options.constructorAction}`\n );\n }\n }\n\n if (typeof options.protoAction !== 'undefined') {\n if (\n options.protoAction === 'error' ||\n options.protoAction === 'ignore' ||\n options.protoAction === 'preserve'\n ) {\n _options.protoAction = options.protoAction;\n } else {\n throw new Error(\n `Incorrect value for protoAction option, must be \"error\", \"ignore\" or undefined but passed ${options.protoAction}`\n );\n }\n }\n }\n\n var at, // The index of the current character\n ch, // The current character\n escapee = {\n '\"': '\"',\n '\\\\': '\\\\',\n '/': '/',\n b: '\\b',\n f: '\\f',\n n: '\\n',\n r: '\\r',\n t: '\\t',\n },\n text,\n error = function (m) {\n // Call error when something is wrong.\n\n throw {\n name: 'SyntaxError',\n message: m,\n at: at,\n text: text,\n };\n },\n next = function (c) {\n // If a c parameter is provided, verify that it matches the current character.\n\n if (c && c !== ch) {\n error(\"Expected '\" + c + \"' instead of '\" + ch + \"'\");\n }\n\n // Get the next character. When there are no more characters,\n // return the empty string.\n\n ch = text.charAt(at);\n at += 1;\n return ch;\n },\n number = function () {\n // Parse a number value.\n\n var number,\n string = '';\n\n if (ch === '-') {\n string = '-';\n next('-');\n }\n while (ch >= '0' && ch <= '9') {\n string += ch;\n next();\n }\n if (ch === '.') {\n string += '.';\n while (next() && ch >= '0' && ch <= '9') {\n string += ch;\n }\n }\n if (ch === 'e' || ch === 'E') {\n string += ch;\n next();\n if (ch === '-' || ch === '+') {\n string += ch;\n next();\n }\n while (ch >= '0' && ch <= '9') {\n string += ch;\n next();\n }\n }\n number = +string;\n if (!isFinite(number)) {\n error('Bad number');\n } else {\n if (BigNumber == null) BigNumber = require('bignumber.js');\n //if (number > 9007199254740992 || number < -9007199254740992)\n // Bignumber has stricter check: everything with length > 15 digits disallowed\n if (string.length > 15)\n return _options.storeAsString\n ? string\n : _options.useNativeBigInt\n ? BigInt(string)\n : new BigNumber(string);\n else\n return !_options.alwaysParseAsBig\n ? number\n : _options.useNativeBigInt\n ? BigInt(number)\n : new BigNumber(number);\n }\n },\n string = function () {\n // Parse a string value.\n\n var hex,\n i,\n string = '',\n uffff;\n\n // When parsing for string values, we must look for \" and \\ characters.\n\n if (ch === '\"') {\n var startAt = at;\n while (next()) {\n if (ch === '\"') {\n if (at - 1 > startAt) string += text.substring(startAt, at - 1);\n next();\n return string;\n }\n if (ch === '\\\\') {\n if (at - 1 > startAt) string += text.substring(startAt, at - 1);\n next();\n if (ch === 'u') {\n uffff = 0;\n for (i = 0; i < 4; i += 1) {\n hex = parseInt(next(), 16);\n if (!isFinite(hex)) {\n break;\n }\n uffff = uffff * 16 + hex;\n }\n string += String.fromCharCode(uffff);\n } else if (typeof escapee[ch] === 'string') {\n string += escapee[ch];\n } else {\n break;\n }\n startAt = at;\n }\n }\n }\n error('Bad string');\n },\n white = function () {\n // Skip whitespace.\n\n while (ch && ch <= ' ') {\n next();\n }\n },\n word = function () {\n // true, false, or null.\n\n switch (ch) {\n case 't':\n next('t');\n next('r');\n next('u');\n next('e');\n return true;\n case 'f':\n next('f');\n next('a');\n next('l');\n next('s');\n next('e');\n return false;\n case 'n':\n next('n');\n next('u');\n next('l');\n next('l');\n return null;\n }\n error(\"Unexpected '\" + ch + \"'\");\n },\n value, // Place holder for the value function.\n array = function () {\n // Parse an array value.\n\n var array = [];\n\n if (ch === '[') {\n next('[');\n white();\n if (ch === ']') {\n next(']');\n return array; // empty array\n }\n while (ch) {\n array.push(value());\n white();\n if (ch === ']') {\n next(']');\n return array;\n }\n next(',');\n white();\n }\n }\n error('Bad array');\n },\n object = function () {\n // Parse an object value.\n\n var key,\n object = Object.create(null);\n\n if (ch === '{') {\n next('{');\n white();\n if (ch === '}') {\n next('}');\n return object; // empty object\n }\n while (ch) {\n key = string();\n white();\n next(':');\n if (\n _options.strict === true &&\n Object.hasOwnProperty.call(object, key)\n ) {\n error('Duplicate key \"' + key + '\"');\n }\n\n if (suspectProtoRx.test(key) === true) {\n if (_options.protoAction === 'error') {\n error('Object contains forbidden prototype property');\n } else if (_options.protoAction === 'ignore') {\n value();\n } else {\n object[key] = value();\n }\n } else if (suspectConstructorRx.test(key) === true) {\n if (_options.constructorAction === 'error') {\n error('Object contains forbidden constructor property');\n } else if (_options.constructorAction === 'ignore') {\n value();\n } else {\n object[key] = value();\n }\n } else {\n object[key] = value();\n }\n\n white();\n if (ch === '}') {\n next('}');\n return object;\n }\n next(',');\n white();\n }\n }\n error('Bad object');\n };\n\n value = function () {\n // Parse a JSON value. It could be an object, an array, a string, a number,\n // or a word.\n\n white();\n switch (ch) {\n case '{':\n return object();\n case '[':\n return array();\n case '\"':\n return string();\n case '-':\n return number();\n default:\n return ch >= '0' && ch <= '9' ? number() : word();\n }\n };\n\n // Return the json_parse function. It will have access to all of the above\n // functions and variables.\n\n return function (source, reviver) {\n var result;\n\n text = source + '';\n at = 0;\n ch = ' ';\n result = value();\n white();\n if (ch) {\n error('Syntax error');\n }\n\n // If there is a reviver function, we recursively walk the new structure,\n // passing each name/value pair to the reviver function for possible\n // transformation, starting with a temporary root object that holds the result\n // in an empty key. If there is not a reviver function, we simply return the\n // result.\n\n return typeof reviver === 'function'\n ? (function walk(holder, key) {\n var k,\n v,\n value = holder[key];\n if (value && typeof value === 'object') {\n Object.keys(value).forEach(function (k) {\n v = walk(value, k);\n if (v !== undefined) {\n value[k] = v;\n } else {\n delete value[k];\n }\n });\n }\n return reviver.call(holder, key, value);\n })({ '': result }, '')\n : result;\n };\n};\n\nmodule.exports = json_parse;\n","var BigNumber = require('bignumber.js');\n\n/*\n json2.js\n 2013-05-26\n\n Public Domain.\n\n NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\n See http://www.JSON.org/js.html\n\n\n This code should be minified before deployment.\n See http://javascript.crockford.com/jsmin.html\n\n USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO\n NOT CONTROL.\n\n\n This file creates a global JSON object containing two methods: stringify\n and parse.\n\n JSON.stringify(value, replacer, space)\n value any JavaScript value, usually an object or array.\n\n replacer an optional parameter that determines how object\n values are stringified for objects. It can be a\n function or an array of strings.\n\n space an optional parameter that specifies the indentation\n of nested structures. If it is omitted, the text will\n be packed without extra whitespace. If it is a number,\n it will specify the number of spaces to indent at each\n level. If it is a string (such as '\\t' or ' '),\n it contains the characters used to indent at each level.\n\n This method produces a JSON text from a JavaScript value.\n\n When an object value is found, if the object contains a toJSON\n method, its toJSON method will be called and the result will be\n stringified. A toJSON method does not serialize: it returns the\n value represented by the name/value pair that should be serialized,\n or undefined if nothing should be serialized. The toJSON method\n will be passed the key associated with the value, and this will be\n bound to the value\n\n For example, this would serialize Dates as ISO strings.\n\n Date.prototype.toJSON = function (key) {\n function f(n) {\n // Format integers to have at least two digits.\n return n < 10 ? '0' + n : n;\n }\n\n return this.getUTCFullYear() + '-' +\n f(this.getUTCMonth() + 1) + '-' +\n f(this.getUTCDate()) + 'T' +\n f(this.getUTCHours()) + ':' +\n f(this.getUTCMinutes()) + ':' +\n f(this.getUTCSeconds()) + 'Z';\n };\n\n You can provide an optional replacer method. It will be passed the\n key and value of each member, with this bound to the containing\n object. The value that is returned from your method will be\n serialized. If your method returns undefined, then the member will\n be excluded from the serialization.\n\n If the replacer parameter is an array of strings, then it will be\n used to select the members to be serialized. It filters the results\n such that only members with keys listed in the replacer array are\n stringified.\n\n Values that do not have JSON representations, such as undefined or\n functions, will not be serialized. Such values in objects will be\n dropped; in arrays they will be replaced with null. You can use\n a replacer function to replace those with JSON values.\n JSON.stringify(undefined) returns undefined.\n\n The optional space parameter produces a stringification of the\n value that is filled with line breaks and indentation to make it\n easier to read.\n\n If the space parameter is a non-empty string, then that string will\n be used for indentation. If the space parameter is a number, then\n the indentation will be that many spaces.\n\n Example:\n\n text = JSON.stringify(['e', {pluribus: 'unum'}]);\n // text is '[\"e\",{\"pluribus\":\"unum\"}]'\n\n\n text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\\t');\n // text is '[\\n\\t\"e\",\\n\\t{\\n\\t\\t\"pluribus\": \"unum\"\\n\\t}\\n]'\n\n text = JSON.stringify([new Date()], function (key, value) {\n return this[key] instanceof Date ?\n 'Date(' + this[key] + ')' : value;\n });\n // text is '[\"Date(---current time---)\"]'\n\n\n JSON.parse(text, reviver)\n This method parses a JSON text to produce an object or array.\n It can throw a SyntaxError exception.\n\n The optional reviver parameter is a function that can filter and\n transform the results. It receives each of the keys and values,\n and its return value is used instead of the original value.\n If it returns what it received, then the structure is not modified.\n If it returns undefined then the member is deleted.\n\n Example:\n\n // Parse the text. Values that look like ISO date strings will\n // be converted to Date objects.\n\n myData = JSON.parse(text, function (key, value) {\n var a;\n if (typeof value === 'string') {\n a =\n/^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2}(?:\\.\\d*)?)Z$/.exec(value);\n if (a) {\n return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],\n +a[5], +a[6]));\n }\n }\n return value;\n });\n\n myData = JSON.parse('[\"Date(09/09/2001)\"]', function (key, value) {\n var d;\n if (typeof value === 'string' &&\n value.slice(0, 5) === 'Date(' &&\n value.slice(-1) === ')') {\n d = new Date(value.slice(5, -1));\n if (d) {\n return d;\n }\n }\n return value;\n });\n\n\n This is a reference implementation. You are free to copy, modify, or\n redistribute.\n*/\n\n/*jslint evil: true, regexp: true */\n\n/*members \"\", \"\\b\", \"\\t\", \"\\n\", \"\\f\", \"\\r\", \"\\\"\", JSON, \"\\\\\", apply,\n call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,\n getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,\n lastIndex, length, parse, prototype, push, replace, slice, stringify,\n test, toJSON, toString, valueOf\n*/\n\n\n// Create a JSON object only if one does not already exist. We create the\n// methods in a closure to avoid creating global variables.\n\nvar JSON = module.exports;\n\n(function () {\n 'use strict';\n\n function f(n) {\n // Format integers to have at least two digits.\n return n < 10 ? '0' + n : n;\n }\n\n var cx = /[\\u0000\\u00ad\\u0600-\\u0604\\u070f\\u17b4\\u17b5\\u200c-\\u200f\\u2028-\\u202f\\u2060-\\u206f\\ufeff\\ufff0-\\uffff]/g,\n escapable = /[\\\\\\\"\\x00-\\x1f\\x7f-\\x9f\\u00ad\\u0600-\\u0604\\u070f\\u17b4\\u17b5\\u200c-\\u200f\\u2028-\\u202f\\u2060-\\u206f\\ufeff\\ufff0-\\uffff]/g,\n gap,\n indent,\n meta = { // table of character substitutions\n '\\b': '\\\\b',\n '\\t': '\\\\t',\n '\\n': '\\\\n',\n '\\f': '\\\\f',\n '\\r': '\\\\r',\n '\"' : '\\\\\"',\n '\\\\': '\\\\\\\\'\n },\n rep;\n\n\n function quote(string) {\n\n// If the string contains no control characters, no quote characters, and no\n// backslash characters, then we can safely slap some quotes around it.\n// Otherwise we must also replace the offending characters with safe escape\n// sequences.\n\n escapable.lastIndex = 0;\n return escapable.test(string) ? '\"' + string.replace(escapable, function (a) {\n var c = meta[a];\n return typeof c === 'string'\n ? c\n : '\\\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);\n }) + '\"' : '\"' + string + '\"';\n }\n\n\n function str(key, holder) {\n\n// Produce a string from holder[key].\n\n var i, // The loop counter.\n k, // The member key.\n v, // The member value.\n length,\n mind = gap,\n partial,\n value = holder[key],\n isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value));\n\n// If the value has a toJSON method, call it to obtain a replacement value.\n\n if (value && typeof value === 'object' &&\n typeof value.toJSON === 'function') {\n value = value.toJSON(key);\n }\n\n// If we were called with a replacer function, then call the replacer to\n// obtain a replacement value.\n\n if (typeof rep === 'function') {\n value = rep.call(holder, key, value);\n }\n\n// What happens next depends on the value's type.\n\n switch (typeof value) {\n case 'string':\n if (isBigNumber) {\n return value;\n } else {\n return quote(value);\n }\n\n case 'number':\n\n// JSON numbers must be finite. Encode non-finite numbers as null.\n\n return isFinite(value) ? String(value) : 'null';\n\n case 'boolean':\n case 'null':\n case 'bigint':\n\n// If the value is a boolean or null, convert it to a string. Note:\n// typeof null does not produce 'null'. The case is included here in\n// the remote chance that this gets fixed someday.\n\n return String(value);\n\n// If the type is 'object', we might be dealing with an object or an array or\n// null.\n\n case 'object':\n\n// Due to a specification blunder in ECMAScript, typeof null is 'object',\n// so watch out for that case.\n\n if (!value) {\n return 'null';\n }\n\n// Make an array to hold the partial results of stringifying this object value.\n\n gap += indent;\n partial = [];\n\n// Is the value an array?\n\n if (Object.prototype.toString.apply(value) === '[object Array]') {\n\n// The value is an array. Stringify every element. Use null as a placeholder\n// for non-JSON values.\n\n length = value.length;\n for (i = 0; i < length; i += 1) {\n partial[i] = str(i, value) || 'null';\n }\n\n// Join all of the elements together, separated with commas, and wrap them in\n// brackets.\n\n v = partial.length === 0\n ? '[]'\n : gap\n ? '[\\n' + gap + partial.join(',\\n' + gap) + '\\n' + mind + ']'\n : '[' + partial.join(',') + ']';\n gap = mind;\n return v;\n }\n\n// If the replacer is an array, use it to select the members to be stringified.\n\n if (rep && typeof rep === 'object') {\n length = rep.length;\n for (i = 0; i < length; i += 1) {\n if (typeof rep[i] === 'string') {\n k = rep[i];\n v = str(k, value);\n if (v) {\n partial.push(quote(k) + (gap ? ': ' : ':') + v);\n }\n }\n }\n } else {\n\n// Otherwise, iterate through all of the keys in the object.\n\n Object.keys(value).forEach(function(k) {\n var v = str(k, value);\n if (v) {\n partial.push(quote(k) + (gap ? ': ' : ':') + v);\n }\n });\n }\n\n// Join all of the member texts together, separated with commas,\n// and wrap them in braces.\n\n v = partial.length === 0\n ? '{}'\n : gap\n ? '{\\n' + gap + partial.join(',\\n' + gap) + '\\n' + mind + '}'\n : '{' + partial.join(',') + '}';\n gap = mind;\n return v;\n }\n }\n\n// If the JSON object does not yet have a stringify method, give it one.\n\n if (typeof JSON.stringify !== 'function') {\n JSON.stringify = function (value, replacer, space) {\n\n// The stringify method takes a value and an optional replacer, and an optional\n// space parameter, and returns a JSON text. The replacer can be a function\n// that can replace values, or an array of strings that will select the keys.\n// A default replacer method can be provided. Use of the space parameter can\n// produce text that is more easily readable.\n\n var i;\n gap = '';\n indent = '';\n\n// If the space parameter is a number, make an indent string containing that\n// many spaces.\n\n if (typeof space === 'number') {\n for (i = 0; i < space; i += 1) {\n indent += ' ';\n }\n\n// If the space parameter is a string, it will be used as the indent string.\n\n } else if (typeof space === 'string') {\n indent = space;\n }\n\n// If there is a replacer, it must be a function or an array.\n// Otherwise, throw an error.\n\n rep = replacer;\n if (replacer && typeof replacer !== 'function' &&\n (typeof replacer !== 'object' ||\n typeof replacer.length !== 'number')) {\n throw new Error('JSON.stringify');\n }\n\n// Make a fake root object containing our value under the key of ''.\n// Return the result of stringifying the value.\n\n return str('', {'': value});\n };\n }\n}());\n","var bufferEqual = require('buffer-equal-constant-time');\nvar Buffer = require('safe-buffer').Buffer;\nvar crypto = require('crypto');\nvar formatEcdsa = require('ecdsa-sig-formatter');\nvar util = require('util');\n\nvar MSG_INVALID_ALGORITHM = '\"%s\" is not a valid algorithm.\\n Supported algorithms are:\\n \"HS256\", \"HS384\", \"HS512\", \"RS256\", \"RS384\", \"RS512\", \"PS256\", \"PS384\", \"PS512\", \"ES256\", \"ES384\", \"ES512\" and \"none\".'\nvar MSG_INVALID_SECRET = 'secret must be a string or buffer';\nvar MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer';\nvar MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object';\n\nvar supportsKeyObjects = typeof crypto.createPublicKey === 'function';\nif (supportsKeyObjects) {\n MSG_INVALID_VERIFIER_KEY += ' or a KeyObject';\n MSG_INVALID_SECRET += 'or a KeyObject';\n}\n\nfunction checkIsPublicKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return;\n }\n\n if (!supportsKeyObjects) {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key !== 'object') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.type !== 'string') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.asymmetricKeyType !== 'string') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.export !== 'function') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n};\n\nfunction checkIsPrivateKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return;\n }\n\n if (typeof key === 'object') {\n return;\n }\n\n throw typeError(MSG_INVALID_SIGNER_KEY);\n};\n\nfunction checkIsSecretKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return key;\n }\n\n if (!supportsKeyObjects) {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (typeof key !== 'object') {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (key.type !== 'secret') {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (typeof key.export !== 'function') {\n throw typeError(MSG_INVALID_SECRET);\n }\n}\n\nfunction fromBase64(base64) {\n return base64\n .replace(/=/g, '')\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n}\n\nfunction toBase64(base64url) {\n base64url = base64url.toString();\n\n var padding = 4 - base64url.length % 4;\n if (padding !== 4) {\n for (var i = 0; i < padding; ++i) {\n base64url += '=';\n }\n }\n\n return base64url\n .replace(/\\-/g, '+')\n .replace(/_/g, '/');\n}\n\nfunction typeError(template) {\n var args = [].slice.call(arguments, 1);\n var errMsg = util.format.bind(util, template).apply(null, args);\n return new TypeError(errMsg);\n}\n\nfunction bufferOrString(obj) {\n return Buffer.isBuffer(obj) || typeof obj === 'string';\n}\n\nfunction normalizeInput(thing) {\n if (!bufferOrString(thing))\n thing = JSON.stringify(thing);\n return thing;\n}\n\nfunction createHmacSigner(bits) {\n return function sign(thing, secret) {\n checkIsSecretKey(secret);\n thing = normalizeInput(thing);\n var hmac = crypto.createHmac('sha' + bits, secret);\n var sig = (hmac.update(thing), hmac.digest('base64'))\n return fromBase64(sig);\n }\n}\n\nfunction createHmacVerifier(bits) {\n return function verify(thing, signature, secret) {\n var computedSig = createHmacSigner(bits)(thing, secret);\n return bufferEqual(Buffer.from(signature), Buffer.from(computedSig));\n }\n}\n\nfunction createKeySigner(bits) {\n return function sign(thing, privateKey) {\n checkIsPrivateKey(privateKey);\n thing = normalizeInput(thing);\n // Even though we are specifying \"RSA\" here, this works with ECDSA\n // keys as well.\n var signer = crypto.createSign('RSA-SHA' + bits);\n var sig = (signer.update(thing), signer.sign(privateKey, 'base64'));\n return fromBase64(sig);\n }\n}\n\nfunction createKeyVerifier(bits) {\n return function verify(thing, signature, publicKey) {\n checkIsPublicKey(publicKey);\n thing = normalizeInput(thing);\n signature = toBase64(signature);\n var verifier = crypto.createVerify('RSA-SHA' + bits);\n verifier.update(thing);\n return verifier.verify(publicKey, signature, 'base64');\n }\n}\n\nfunction createPSSKeySigner(bits) {\n return function sign(thing, privateKey) {\n checkIsPrivateKey(privateKey);\n thing = normalizeInput(thing);\n var signer = crypto.createSign('RSA-SHA' + bits);\n var sig = (signer.update(thing), signer.sign({\n key: privateKey,\n padding: crypto.constants.RSA_PKCS1_PSS_PADDING,\n saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST\n }, 'base64'));\n return fromBase64(sig);\n }\n}\n\nfunction createPSSKeyVerifier(bits) {\n return function verify(thing, signature, publicKey) {\n checkIsPublicKey(publicKey);\n thing = normalizeInput(thing);\n signature = toBase64(signature);\n var verifier = crypto.createVerify('RSA-SHA' + bits);\n verifier.update(thing);\n return verifier.verify({\n key: publicKey,\n padding: crypto.constants.RSA_PKCS1_PSS_PADDING,\n saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST\n }, signature, 'base64');\n }\n}\n\nfunction createECDSASigner(bits) {\n var inner = createKeySigner(bits);\n return function sign() {\n var signature = inner.apply(null, arguments);\n signature = formatEcdsa.derToJose(signature, 'ES' + bits);\n return signature;\n };\n}\n\nfunction createECDSAVerifer(bits) {\n var inner = createKeyVerifier(bits);\n return function verify(thing, signature, publicKey) {\n signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64');\n var result = inner(thing, signature, publicKey);\n return result;\n };\n}\n\nfunction createNoneSigner() {\n return function sign() {\n return '';\n }\n}\n\nfunction createNoneVerifier() {\n return function verify(thing, signature) {\n return signature === '';\n }\n}\n\nmodule.exports = function jwa(algorithm) {\n var signerFactories = {\n hs: createHmacSigner,\n rs: createKeySigner,\n ps: createPSSKeySigner,\n es: createECDSASigner,\n none: createNoneSigner,\n }\n var verifierFactories = {\n hs: createHmacVerifier,\n rs: createKeyVerifier,\n ps: createPSSKeyVerifier,\n es: createECDSAVerifer,\n none: createNoneVerifier,\n }\n var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/);\n if (!match)\n throw typeError(MSG_INVALID_ALGORITHM, algorithm);\n var algo = (match[1] || match[3]).toLowerCase();\n var bits = match[2];\n\n return {\n sign: signerFactories[algo](bits),\n verify: verifierFactories[algo](bits),\n }\n};\n","/*global exports*/\nvar SignStream = require('./lib/sign-stream');\nvar VerifyStream = require('./lib/verify-stream');\n\nvar ALGORITHMS = [\n 'HS256', 'HS384', 'HS512',\n 'RS256', 'RS384', 'RS512',\n 'PS256', 'PS384', 'PS512',\n 'ES256', 'ES384', 'ES512'\n];\n\nexports.ALGORITHMS = ALGORITHMS;\nexports.sign = SignStream.sign;\nexports.verify = VerifyStream.verify;\nexports.decode = VerifyStream.decode;\nexports.isValid = VerifyStream.isValid;\nexports.createSign = function createSign(opts) {\n return new SignStream(opts);\n};\nexports.createVerify = function createVerify(opts) {\n return new VerifyStream(opts);\n};\n","/*global module, process*/\nvar Buffer = require('safe-buffer').Buffer;\nvar Stream = require('stream');\nvar util = require('util');\n\nfunction DataStream(data) {\n this.buffer = null;\n this.writable = true;\n this.readable = true;\n\n // No input\n if (!data) {\n this.buffer = Buffer.alloc(0);\n return this;\n }\n\n // Stream\n if (typeof data.pipe === 'function') {\n this.buffer = Buffer.alloc(0);\n data.pipe(this);\n return this;\n }\n\n // Buffer or String\n // or Object (assumedly a passworded key)\n if (data.length || typeof data === 'object') {\n this.buffer = data;\n this.writable = false;\n process.nextTick(function () {\n this.emit('end', data);\n this.readable = false;\n this.emit('close');\n }.bind(this));\n return this;\n }\n\n throw new TypeError('Unexpected data type ('+ typeof data + ')');\n}\nutil.inherits(DataStream, Stream);\n\nDataStream.prototype.write = function write(data) {\n this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]);\n this.emit('data', data);\n};\n\nDataStream.prototype.end = function end(data) {\n if (data)\n this.write(data);\n this.emit('end', data);\n this.emit('close');\n this.writable = false;\n this.readable = false;\n};\n\nmodule.exports = DataStream;\n","/*global module*/\nvar Buffer = require('safe-buffer').Buffer;\nvar DataStream = require('./data-stream');\nvar jwa = require('jwa');\nvar Stream = require('stream');\nvar toString = require('./tostring');\nvar util = require('util');\n\nfunction base64url(string, encoding) {\n return Buffer\n .from(string, encoding)\n .toString('base64')\n .replace(/=/g, '')\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n}\n\nfunction jwsSecuredInput(header, payload, encoding) {\n encoding = encoding || 'utf8';\n var encodedHeader = base64url(toString(header), 'binary');\n var encodedPayload = base64url(toString(payload), encoding);\n return util.format('%s.%s', encodedHeader, encodedPayload);\n}\n\nfunction jwsSign(opts) {\n var header = opts.header;\n var payload = opts.payload;\n var secretOrKey = opts.secret || opts.privateKey;\n var encoding = opts.encoding;\n var algo = jwa(header.alg);\n var securedInput = jwsSecuredInput(header, payload, encoding);\n var signature = algo.sign(securedInput, secretOrKey);\n return util.format('%s.%s', securedInput, signature);\n}\n\nfunction SignStream(opts) {\n var secret = opts.secret||opts.privateKey||opts.key;\n var secretStream = new DataStream(secret);\n this.readable = true;\n this.header = opts.header;\n this.encoding = opts.encoding;\n this.secret = this.privateKey = this.key = secretStream;\n this.payload = new DataStream(opts.payload);\n this.secret.once('close', function () {\n if (!this.payload.writable && this.readable)\n this.sign();\n }.bind(this));\n\n this.payload.once('close', function () {\n if (!this.secret.writable && this.readable)\n this.sign();\n }.bind(this));\n}\nutil.inherits(SignStream, Stream);\n\nSignStream.prototype.sign = function sign() {\n try {\n var signature = jwsSign({\n header: this.header,\n payload: this.payload.buffer,\n secret: this.secret.buffer,\n encoding: this.encoding\n });\n this.emit('done', signature);\n this.emit('data', signature);\n this.emit('end');\n this.readable = false;\n return signature;\n } catch (e) {\n this.readable = false;\n this.emit('error', e);\n this.emit('close');\n }\n};\n\nSignStream.sign = jwsSign;\n\nmodule.exports = SignStream;\n","/*global module*/\nvar Buffer = require('buffer').Buffer;\n\nmodule.exports = function toString(obj) {\n if (typeof obj === 'string')\n return obj;\n if (typeof obj === 'number' || Buffer.isBuffer(obj))\n return obj.toString();\n return JSON.stringify(obj);\n};\n","/*global module*/\nvar Buffer = require('safe-buffer').Buffer;\nvar DataStream = require('./data-stream');\nvar jwa = require('jwa');\nvar Stream = require('stream');\nvar toString = require('./tostring');\nvar util = require('util');\nvar JWS_REGEX = /^[a-zA-Z0-9\\-_]+?\\.[a-zA-Z0-9\\-_]+?\\.([a-zA-Z0-9\\-_]+)?$/;\n\nfunction isObject(thing) {\n return Object.prototype.toString.call(thing) === '[object Object]';\n}\n\nfunction safeJsonParse(thing) {\n if (isObject(thing))\n return thing;\n try { return JSON.parse(thing); }\n catch (e) { return undefined; }\n}\n\nfunction headerFromJWS(jwsSig) {\n var encodedHeader = jwsSig.split('.', 1)[0];\n return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary'));\n}\n\nfunction securedInputFromJWS(jwsSig) {\n return jwsSig.split('.', 2).join('.');\n}\n\nfunction signatureFromJWS(jwsSig) {\n return jwsSig.split('.')[2];\n}\n\nfunction payloadFromJWS(jwsSig, encoding) {\n encoding = encoding || 'utf8';\n var payload = jwsSig.split('.')[1];\n return Buffer.from(payload, 'base64').toString(encoding);\n}\n\nfunction isValidJws(string) {\n return JWS_REGEX.test(string) && !!headerFromJWS(string);\n}\n\nfunction jwsVerify(jwsSig, algorithm, secretOrKey) {\n if (!algorithm) {\n var err = new Error(\"Missing algorithm parameter for jws.verify\");\n err.code = \"MISSING_ALGORITHM\";\n throw err;\n }\n jwsSig = toString(jwsSig);\n var signature = signatureFromJWS(jwsSig);\n var securedInput = securedInputFromJWS(jwsSig);\n var algo = jwa(algorithm);\n return algo.verify(securedInput, signature, secretOrKey);\n}\n\nfunction jwsDecode(jwsSig, opts) {\n opts = opts || {};\n jwsSig = toString(jwsSig);\n\n if (!isValidJws(jwsSig))\n return null;\n\n var header = headerFromJWS(jwsSig);\n\n if (!header)\n return null;\n\n var payload = payloadFromJWS(jwsSig);\n if (header.typ === 'JWT' || opts.json)\n payload = JSON.parse(payload, opts.encoding);\n\n return {\n header: header,\n payload: payload,\n signature: signatureFromJWS(jwsSig)\n };\n}\n\nfunction VerifyStream(opts) {\n opts = opts || {};\n var secretOrKey = opts.secret||opts.publicKey||opts.key;\n var secretStream = new DataStream(secretOrKey);\n this.readable = true;\n this.algorithm = opts.algorithm;\n this.encoding = opts.encoding;\n this.secret = this.publicKey = this.key = secretStream;\n this.signature = new DataStream(opts.signature);\n this.secret.once('close', function () {\n if (!this.signature.writable && this.readable)\n this.verify();\n }.bind(this));\n\n this.signature.once('close', function () {\n if (!this.secret.writable && this.readable)\n this.verify();\n }.bind(this));\n}\nutil.inherits(VerifyStream, Stream);\nVerifyStream.prototype.verify = function verify() {\n try {\n var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer);\n var obj = jwsDecode(this.signature.buffer, this.encoding);\n this.emit('done', valid, obj);\n this.emit('data', valid);\n this.emit('end');\n this.readable = false;\n return valid;\n } catch (e) {\n this.readable = false;\n this.emit('error', e);\n this.emit('close');\n }\n};\n\nVerifyStream.decode = jwsDecode;\nVerifyStream.isValid = isValidJws;\nVerifyStream.verify = jwsVerify;\n\nmodule.exports = VerifyStream;\n","'use strict'\n\n// A linked list to keep track of recently-used-ness\nconst Yallist = require('yallist')\n\nconst MAX = Symbol('max')\nconst LENGTH = Symbol('length')\nconst LENGTH_CALCULATOR = Symbol('lengthCalculator')\nconst ALLOW_STALE = Symbol('allowStale')\nconst MAX_AGE = Symbol('maxAge')\nconst DISPOSE = Symbol('dispose')\nconst NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')\nconst LRU_LIST = Symbol('lruList')\nconst CACHE = Symbol('cache')\nconst UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')\n\nconst naiveLength = () => 1\n\n// lruList is a yallist where the head is the youngest\n// item, and the tail is the oldest. the list contains the Hit\n// objects as the entries.\n// Each Hit object has a reference to its Yallist.Node. This\n// never changes.\n//\n// cache is a Map (or PseudoMap) that matches the keys to\n// the Yallist.Node object.\nclass LRUCache {\n constructor (options) {\n if (typeof options === 'number')\n options = { max: options }\n\n if (!options)\n options = {}\n\n if (options.max && (typeof options.max !== 'number' || options.max < 0))\n throw new TypeError('max must be a non-negative number')\n // Kind of weird to have a default max of Infinity, but oh well.\n const max = this[MAX] = options.max || Infinity\n\n const lc = options.length || naiveLength\n this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc\n this[ALLOW_STALE] = options.stale || false\n if (options.maxAge && typeof options.maxAge !== 'number')\n throw new TypeError('maxAge must be a number')\n this[MAX_AGE] = options.maxAge || 0\n this[DISPOSE] = options.dispose\n this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false\n this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false\n this.reset()\n }\n\n // resize the cache when the max changes.\n set max (mL) {\n if (typeof mL !== 'number' || mL < 0)\n throw new TypeError('max must be a non-negative number')\n\n this[MAX] = mL || Infinity\n trim(this)\n }\n get max () {\n return this[MAX]\n }\n\n set allowStale (allowStale) {\n this[ALLOW_STALE] = !!allowStale\n }\n get allowStale () {\n return this[ALLOW_STALE]\n }\n\n set maxAge (mA) {\n if (typeof mA !== 'number')\n throw new TypeError('maxAge must be a non-negative number')\n\n this[MAX_AGE] = mA\n trim(this)\n }\n get maxAge () {\n return this[MAX_AGE]\n }\n\n // resize the cache when the lengthCalculator changes.\n set lengthCalculator (lC) {\n if (typeof lC !== 'function')\n lC = naiveLength\n\n if (lC !== this[LENGTH_CALCULATOR]) {\n this[LENGTH_CALCULATOR] = lC\n this[LENGTH] = 0\n this[LRU_LIST].forEach(hit => {\n hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)\n this[LENGTH] += hit.length\n })\n }\n trim(this)\n }\n get lengthCalculator () { return this[LENGTH_CALCULATOR] }\n\n get length () { return this[LENGTH] }\n get itemCount () { return this[LRU_LIST].length }\n\n rforEach (fn, thisp) {\n thisp = thisp || this\n for (let walker = this[LRU_LIST].tail; walker !== null;) {\n const prev = walker.prev\n forEachStep(this, fn, walker, thisp)\n walker = prev\n }\n }\n\n forEach (fn, thisp) {\n thisp = thisp || this\n for (let walker = this[LRU_LIST].head; walker !== null;) {\n const next = walker.next\n forEachStep(this, fn, walker, thisp)\n walker = next\n }\n }\n\n keys () {\n return this[LRU_LIST].toArray().map(k => k.key)\n }\n\n values () {\n return this[LRU_LIST].toArray().map(k => k.value)\n }\n\n reset () {\n if (this[DISPOSE] &&\n this[LRU_LIST] &&\n this[LRU_LIST].length) {\n this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))\n }\n\n this[CACHE] = new Map() // hash of items by key\n this[LRU_LIST] = new Yallist() // list of items in order of use recency\n this[LENGTH] = 0 // length of items in the list\n }\n\n dump () {\n return this[LRU_LIST].map(hit =>\n isStale(this, hit) ? false : {\n k: hit.key,\n v: hit.value,\n e: hit.now + (hit.maxAge || 0)\n }).toArray().filter(h => h)\n }\n\n dumpLru () {\n return this[LRU_LIST]\n }\n\n set (key, value, maxAge) {\n maxAge = maxAge || this[MAX_AGE]\n\n if (maxAge && typeof maxAge !== 'number')\n throw new TypeError('maxAge must be a number')\n\n const now = maxAge ? Date.now() : 0\n const len = this[LENGTH_CALCULATOR](value, key)\n\n if (this[CACHE].has(key)) {\n if (len > this[MAX]) {\n del(this, this[CACHE].get(key))\n return false\n }\n\n const node = this[CACHE].get(key)\n const item = node.value\n\n // dispose of the old one before overwriting\n // split out into 2 ifs for better coverage tracking\n if (this[DISPOSE]) {\n if (!this[NO_DISPOSE_ON_SET])\n this[DISPOSE](key, item.value)\n }\n\n item.now = now\n item.maxAge = maxAge\n item.value = value\n this[LENGTH] += len - item.length\n item.length = len\n this.get(key)\n trim(this)\n return true\n }\n\n const hit = new Entry(key, value, len, now, maxAge)\n\n // oversized objects fall out of cache automatically.\n if (hit.length > this[MAX]) {\n if (this[DISPOSE])\n this[DISPOSE](key, value)\n\n return false\n }\n\n this[LENGTH] += hit.length\n this[LRU_LIST].unshift(hit)\n this[CACHE].set(key, this[LRU_LIST].head)\n trim(this)\n return true\n }\n\n has (key) {\n if (!this[CACHE].has(key)) return false\n const hit = this[CACHE].get(key).value\n return !isStale(this, hit)\n }\n\n get (key) {\n return get(this, key, true)\n }\n\n peek (key) {\n return get(this, key, false)\n }\n\n pop () {\n const node = this[LRU_LIST].tail\n if (!node)\n return null\n\n del(this, node)\n return node.value\n }\n\n del (key) {\n del(this, this[CACHE].get(key))\n }\n\n load (arr) {\n // reset the cache\n this.reset()\n\n const now = Date.now()\n // A previous serialized cache has the most recent items first\n for (let l = arr.length - 1; l >= 0; l--) {\n const hit = arr[l]\n const expiresAt = hit.e || 0\n if (expiresAt === 0)\n // the item was created without expiration in a non aged cache\n this.set(hit.k, hit.v)\n else {\n const maxAge = expiresAt - now\n // dont add already expired items\n if (maxAge > 0) {\n this.set(hit.k, hit.v, maxAge)\n }\n }\n }\n }\n\n prune () {\n this[CACHE].forEach((value, key) => get(this, key, false))\n }\n}\n\nconst get = (self, key, doUse) => {\n const node = self[CACHE].get(key)\n if (node) {\n const hit = node.value\n if (isStale(self, hit)) {\n del(self, node)\n if (!self[ALLOW_STALE])\n return undefined\n } else {\n if (doUse) {\n if (self[UPDATE_AGE_ON_GET])\n node.value.now = Date.now()\n self[LRU_LIST].unshiftNode(node)\n }\n }\n return hit.value\n }\n}\n\nconst isStale = (self, hit) => {\n if (!hit || (!hit.maxAge && !self[MAX_AGE]))\n return false\n\n const diff = Date.now() - hit.now\n return hit.maxAge ? diff > hit.maxAge\n : self[MAX_AGE] && (diff > self[MAX_AGE])\n}\n\nconst trim = self => {\n if (self[LENGTH] > self[MAX]) {\n for (let walker = self[LRU_LIST].tail;\n self[LENGTH] > self[MAX] && walker !== null;) {\n // We know that we're about to delete this one, and also\n // what the next least recently used key will be, so just\n // go ahead and set it now.\n const prev = walker.prev\n del(self, walker)\n walker = prev\n }\n }\n}\n\nconst del = (self, node) => {\n if (node) {\n const hit = node.value\n if (self[DISPOSE])\n self[DISPOSE](hit.key, hit.value)\n\n self[LENGTH] -= hit.length\n self[CACHE].delete(hit.key)\n self[LRU_LIST].removeNode(node)\n }\n}\n\nclass Entry {\n constructor (key, value, length, now, maxAge) {\n this.key = key\n this.value = value\n this.length = length\n this.now = now\n this.maxAge = maxAge || 0\n }\n}\n\nconst forEachStep = (self, fn, node, thisp) => {\n let hit = node.value\n if (isStale(self, hit)) {\n del(self, node)\n if (!self[ALLOW_STALE])\n hit = undefined\n }\n if (hit)\n fn.call(thisp, hit.value, hit.key, self)\n}\n\nmodule.exports = LRUCache\n","/*!\n * mime-db\n * Copyright(c) 2014 Jonathan Ong\n * Copyright(c) 2015-2022 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n/**\n * Module exports.\n */\n\nmodule.exports = require('./db.json')\n","/*!\n * mime-types\n * Copyright(c) 2014 Jonathan Ong\n * Copyright(c) 2015 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n'use strict'\n\n/**\n * Module dependencies.\n * @private\n */\n\nvar db = require('mime-db')\nvar extname = require('path').extname\n\n/**\n * Module variables.\n * @private\n */\n\nvar EXTRACT_TYPE_REGEXP = /^\\s*([^;\\s]*)(?:;|\\s|$)/\nvar TEXT_TYPE_REGEXP = /^text\\//i\n\n/**\n * Module exports.\n * @public\n */\n\nexports.charset = charset\nexports.charsets = { lookup: charset }\nexports.contentType = contentType\nexports.extension = extension\nexports.extensions = Object.create(null)\nexports.lookup = lookup\nexports.types = Object.create(null)\n\n// Populate the extensions/types maps\npopulateMaps(exports.extensions, exports.types)\n\n/**\n * Get the default charset for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction charset (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n var mime = match && db[match[1].toLowerCase()]\n\n if (mime && mime.charset) {\n return mime.charset\n }\n\n // default text/* to utf-8\n if (match && TEXT_TYPE_REGEXP.test(match[1])) {\n return 'UTF-8'\n }\n\n return false\n}\n\n/**\n * Create a full Content-Type header given a MIME type or extension.\n *\n * @param {string} str\n * @return {boolean|string}\n */\n\nfunction contentType (str) {\n // TODO: should this even be in this module?\n if (!str || typeof str !== 'string') {\n return false\n }\n\n var mime = str.indexOf('/') === -1\n ? exports.lookup(str)\n : str\n\n if (!mime) {\n return false\n }\n\n // TODO: use content-type or other module\n if (mime.indexOf('charset') === -1) {\n var charset = exports.charset(mime)\n if (charset) mime += '; charset=' + charset.toLowerCase()\n }\n\n return mime\n}\n\n/**\n * Get the default extension for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction extension (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n\n // get extensions\n var exts = match && exports.extensions[match[1].toLowerCase()]\n\n if (!exts || !exts.length) {\n return false\n }\n\n return exts[0]\n}\n\n/**\n * Lookup the MIME type for a file path/extension.\n *\n * @param {string} path\n * @return {boolean|string}\n */\n\nfunction lookup (path) {\n if (!path || typeof path !== 'string') {\n return false\n }\n\n // get the extension (\"ext\" or \".ext\" or full path)\n var extension = extname('x.' + path)\n .toLowerCase()\n .substr(1)\n\n if (!extension) {\n return false\n }\n\n return exports.types[extension] || false\n}\n\n/**\n * Populate the extensions and types maps.\n * @private\n */\n\nfunction populateMaps (extensions, types) {\n // source preference (least -> most)\n var preference = ['nginx', 'apache', undefined, 'iana']\n\n Object.keys(db).forEach(function forEachMimeType (type) {\n var mime = db[type]\n var exts = mime.extensions\n\n if (!exts || !exts.length) {\n return\n }\n\n // mime -> extensions\n extensions[type] = exts\n\n // extension -> mime\n for (var i = 0; i < exts.length; i++) {\n var extension = exts[i]\n\n if (types[extension]) {\n var from = preference.indexOf(db[types[extension]].source)\n var to = preference.indexOf(mime.source)\n\n if (types[extension] !== 'application/octet-stream' &&\n (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) {\n // skip the remapping\n continue\n }\n }\n\n // set the extension -> mime\n types[extension] = type\n }\n })\n}\n","'use strict';\n\n/**\n * @param typeMap [Object] Map of MIME type -> Array[extensions]\n * @param ...\n */\nfunction Mime() {\n this._types = Object.create(null);\n this._extensions = Object.create(null);\n\n for (let i = 0; i < arguments.length; i++) {\n this.define(arguments[i]);\n }\n\n this.define = this.define.bind(this);\n this.getType = this.getType.bind(this);\n this.getExtension = this.getExtension.bind(this);\n}\n\n/**\n * Define mimetype -> extension mappings. Each key is a mime-type that maps\n * to an array of extensions associated with the type. The first extension is\n * used as the default extension for the type.\n *\n * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']});\n *\n * If a type declares an extension that has already been defined, an error will\n * be thrown. To suppress this error and force the extension to be associated\n * with the new type, pass `force`=true. Alternatively, you may prefix the\n * extension with \"*\" to map the type to extension, without mapping the\n * extension to the type.\n *\n * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']});\n *\n *\n * @param map (Object) type definitions\n * @param force (Boolean) if true, force overriding of existing definitions\n */\nMime.prototype.define = function(typeMap, force) {\n for (let type in typeMap) {\n let extensions = typeMap[type].map(function(t) {\n return t.toLowerCase();\n });\n type = type.toLowerCase();\n\n for (let i = 0; i < extensions.length; i++) {\n const ext = extensions[i];\n\n // '*' prefix = not the preferred type for this extension. So fixup the\n // extension, and skip it.\n if (ext[0] === '*') {\n continue;\n }\n\n if (!force && (ext in this._types)) {\n throw new Error(\n 'Attempt to change mapping for \"' + ext +\n '\" extension from \"' + this._types[ext] + '\" to \"' + type +\n '\". Pass `force=true` to allow this, otherwise remove \"' + ext +\n '\" from the list of extensions for \"' + type + '\".'\n );\n }\n\n this._types[ext] = type;\n }\n\n // Use first extension as default\n if (force || !this._extensions[type]) {\n const ext = extensions[0];\n this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1);\n }\n }\n};\n\n/**\n * Lookup a mime type based on extension\n */\nMime.prototype.getType = function(path) {\n path = String(path);\n let last = path.replace(/^.*[/\\\\]/, '').toLowerCase();\n let ext = last.replace(/^.*\\./, '').toLowerCase();\n\n let hasPath = last.length < path.length;\n let hasDot = ext.length < last.length - 1;\n\n return (hasDot || !hasPath) && this._types[ext] || null;\n};\n\n/**\n * Return file extension associated with a mime type\n */\nMime.prototype.getExtension = function(type) {\n type = /^\\s*([^;\\s]*)/.test(type) && RegExp.$1;\n return type && this._extensions[type.toLowerCase()] || null;\n};\n\nmodule.exports = Mime;\n","'use strict';\n\nlet Mime = require('./Mime');\nmodule.exports = new Mime(require('./types/standard'), require('./types/other'));\n","module.exports = {\"application/prs.cww\":[\"cww\"],\"application/vnd.1000minds.decision-model+xml\":[\"1km\"],\"application/vnd.3gpp.pic-bw-large\":[\"plb\"],\"application/vnd.3gpp.pic-bw-small\":[\"psb\"],\"application/vnd.3gpp.pic-bw-var\":[\"pvb\"],\"application/vnd.3gpp2.tcap\":[\"tcap\"],\"application/vnd.3m.post-it-notes\":[\"pwn\"],\"application/vnd.accpac.simply.aso\":[\"aso\"],\"application/vnd.accpac.simply.imp\":[\"imp\"],\"application/vnd.acucobol\":[\"acu\"],\"application/vnd.acucorp\":[\"atc\",\"acutc\"],\"application/vnd.adobe.air-application-installer-package+zip\":[\"air\"],\"application/vnd.adobe.formscentral.fcdt\":[\"fcdt\"],\"application/vnd.adobe.fxp\":[\"fxp\",\"fxpl\"],\"application/vnd.adobe.xdp+xml\":[\"xdp\"],\"application/vnd.adobe.xfdf\":[\"xfdf\"],\"application/vnd.ahead.space\":[\"ahead\"],\"application/vnd.airzip.filesecure.azf\":[\"azf\"],\"application/vnd.airzip.filesecure.azs\":[\"azs\"],\"application/vnd.amazon.ebook\":[\"azw\"],\"application/vnd.americandynamics.acc\":[\"acc\"],\"application/vnd.amiga.ami\":[\"ami\"],\"application/vnd.android.package-archive\":[\"apk\"],\"application/vnd.anser-web-certificate-issue-initiation\":[\"cii\"],\"application/vnd.anser-web-funds-transfer-initiation\":[\"fti\"],\"application/vnd.antix.game-component\":[\"atx\"],\"application/vnd.apple.installer+xml\":[\"mpkg\"],\"application/vnd.apple.keynote\":[\"key\"],\"application/vnd.apple.mpegurl\":[\"m3u8\"],\"application/vnd.apple.numbers\":[\"numbers\"],\"application/vnd.apple.pages\":[\"pages\"],\"application/vnd.apple.pkpass\":[\"pkpass\"],\"application/vnd.aristanetworks.swi\":[\"swi\"],\"application/vnd.astraea-software.iota\":[\"iota\"],\"application/vnd.audiograph\":[\"aep\"],\"application/vnd.balsamiq.bmml+xml\":[\"bmml\"],\"application/vnd.blueice.multipass\":[\"mpm\"],\"application/vnd.bmi\":[\"bmi\"],\"application/vnd.businessobjects\":[\"rep\"],\"application/vnd.chemdraw+xml\":[\"cdxml\"],\"application/vnd.chipnuts.karaoke-mmd\":[\"mmd\"],\"application/vnd.cinderella\":[\"cdy\"],\"application/vnd.citationstyles.style+xml\":[\"csl\"],\"application/vnd.claymore\":[\"cla\"],\"application/vnd.cloanto.rp9\":[\"rp9\"],\"application/vnd.clonk.c4group\":[\"c4g\",\"c4d\",\"c4f\",\"c4p\",\"c4u\"],\"application/vnd.cluetrust.cartomobile-config\":[\"c11amc\"],\"application/vnd.cluetrust.cartomobile-config-pkg\":[\"c11amz\"],\"application/vnd.commonspace\":[\"csp\"],\"application/vnd.contact.cmsg\":[\"cdbcmsg\"],\"application/vnd.cosmocaller\":[\"cmc\"],\"application/vnd.crick.clicker\":[\"clkx\"],\"application/vnd.crick.clicker.keyboard\":[\"clkk\"],\"application/vnd.crick.clicker.palette\":[\"clkp\"],\"application/vnd.crick.clicker.template\":[\"clkt\"],\"application/vnd.crick.clicker.wordbank\":[\"clkw\"],\"application/vnd.criticaltools.wbs+xml\":[\"wbs\"],\"application/vnd.ctc-posml\":[\"pml\"],\"application/vnd.cups-ppd\":[\"ppd\"],\"application/vnd.curl.car\":[\"car\"],\"application/vnd.curl.pcurl\":[\"pcurl\"],\"application/vnd.dart\":[\"dart\"],\"application/vnd.data-vision.rdz\":[\"rdz\"],\"application/vnd.dbf\":[\"dbf\"],\"application/vnd.dece.data\":[\"uvf\",\"uvvf\",\"uvd\",\"uvvd\"],\"application/vnd.dece.ttml+xml\":[\"uvt\",\"uvvt\"],\"application/vnd.dece.unspecified\":[\"uvx\",\"uvvx\"],\"application/vnd.dece.zip\":[\"uvz\",\"uvvz\"],\"application/vnd.denovo.fcselayout-link\":[\"fe_launch\"],\"application/vnd.dna\":[\"dna\"],\"application/vnd.dolby.mlp\":[\"mlp\"],\"application/vnd.dpgraph\":[\"dpg\"],\"application/vnd.dreamfactory\":[\"dfac\"],\"application/vnd.ds-keypoint\":[\"kpxx\"],\"application/vnd.dvb.ait\":[\"ait\"],\"application/vnd.dvb.service\":[\"svc\"],\"application/vnd.dynageo\":[\"geo\"],\"application/vnd.ecowin.chart\":[\"mag\"],\"application/vnd.enliven\":[\"nml\"],\"application/vnd.epson.esf\":[\"esf\"],\"application/vnd.epson.msf\":[\"msf\"],\"application/vnd.epson.quickanime\":[\"qam\"],\"application/vnd.epson.salt\":[\"slt\"],\"application/vnd.epson.ssf\":[\"ssf\"],\"application/vnd.eszigno3+xml\":[\"es3\",\"et3\"],\"application/vnd.ezpix-album\":[\"ez2\"],\"application/vnd.ezpix-package\":[\"ez3\"],\"application/vnd.fdf\":[\"fdf\"],\"application/vnd.fdsn.mseed\":[\"mseed\"],\"application/vnd.fdsn.seed\":[\"seed\",\"dataless\"],\"application/vnd.flographit\":[\"gph\"],\"application/vnd.fluxtime.clip\":[\"ftc\"],\"application/vnd.framemaker\":[\"fm\",\"frame\",\"maker\",\"book\"],\"application/vnd.frogans.fnc\":[\"fnc\"],\"application/vnd.frogans.ltf\":[\"ltf\"],\"application/vnd.fsc.weblaunch\":[\"fsc\"],\"application/vnd.fujitsu.oasys\":[\"oas\"],\"application/vnd.fujitsu.oasys2\":[\"oa2\"],\"application/vnd.fujitsu.oasys3\":[\"oa3\"],\"application/vnd.fujitsu.oasysgp\":[\"fg5\"],\"application/vnd.fujitsu.oasysprs\":[\"bh2\"],\"application/vnd.fujixerox.ddd\":[\"ddd\"],\"application/vnd.fujixerox.docuworks\":[\"xdw\"],\"application/vnd.fujixerox.docuworks.binder\":[\"xbd\"],\"application/vnd.fuzzysheet\":[\"fzs\"],\"application/vnd.genomatix.tuxedo\":[\"txd\"],\"application/vnd.geogebra.file\":[\"ggb\"],\"application/vnd.geogebra.tool\":[\"ggt\"],\"application/vnd.geometry-explorer\":[\"gex\",\"gre\"],\"application/vnd.geonext\":[\"gxt\"],\"application/vnd.geoplan\":[\"g2w\"],\"application/vnd.geospace\":[\"g3w\"],\"application/vnd.gmx\":[\"gmx\"],\"application/vnd.google-apps.document\":[\"gdoc\"],\"application/vnd.google-apps.presentation\":[\"gslides\"],\"application/vnd.google-apps.spreadsheet\":[\"gsheet\"],\"application/vnd.google-earth.kml+xml\":[\"kml\"],\"application/vnd.google-earth.kmz\":[\"kmz\"],\"application/vnd.grafeq\":[\"gqf\",\"gqs\"],\"application/vnd.groove-account\":[\"gac\"],\"application/vnd.groove-help\":[\"ghf\"],\"application/vnd.groove-identity-message\":[\"gim\"],\"application/vnd.groove-injector\":[\"grv\"],\"application/vnd.groove-tool-message\":[\"gtm\"],\"application/vnd.groove-tool-template\":[\"tpl\"],\"application/vnd.groove-vcard\":[\"vcg\"],\"application/vnd.hal+xml\":[\"hal\"],\"application/vnd.handheld-entertainment+xml\":[\"zmm\"],\"application/vnd.hbci\":[\"hbci\"],\"application/vnd.hhe.lesson-player\":[\"les\"],\"application/vnd.hp-hpgl\":[\"hpgl\"],\"application/vnd.hp-hpid\":[\"hpid\"],\"application/vnd.hp-hps\":[\"hps\"],\"application/vnd.hp-jlyt\":[\"jlt\"],\"application/vnd.hp-pcl\":[\"pcl\"],\"application/vnd.hp-pclxl\":[\"pclxl\"],\"application/vnd.hydrostatix.sof-data\":[\"sfd-hdstx\"],\"application/vnd.ibm.minipay\":[\"mpy\"],\"application/vnd.ibm.modcap\":[\"afp\",\"listafp\",\"list3820\"],\"application/vnd.ibm.rights-management\":[\"irm\"],\"application/vnd.ibm.secure-container\":[\"sc\"],\"application/vnd.iccprofile\":[\"icc\",\"icm\"],\"application/vnd.igloader\":[\"igl\"],\"application/vnd.immervision-ivp\":[\"ivp\"],\"application/vnd.immervision-ivu\":[\"ivu\"],\"application/vnd.insors.igm\":[\"igm\"],\"application/vnd.intercon.formnet\":[\"xpw\",\"xpx\"],\"application/vnd.intergeo\":[\"i2g\"],\"application/vnd.intu.qbo\":[\"qbo\"],\"application/vnd.intu.qfx\":[\"qfx\"],\"application/vnd.ipunplugged.rcprofile\":[\"rcprofile\"],\"application/vnd.irepository.package+xml\":[\"irp\"],\"application/vnd.is-xpr\":[\"xpr\"],\"application/vnd.isac.fcs\":[\"fcs\"],\"application/vnd.jam\":[\"jam\"],\"application/vnd.jcp.javame.midlet-rms\":[\"rms\"],\"application/vnd.jisp\":[\"jisp\"],\"application/vnd.joost.joda-archive\":[\"joda\"],\"application/vnd.kahootz\":[\"ktz\",\"ktr\"],\"application/vnd.kde.karbon\":[\"karbon\"],\"application/vnd.kde.kchart\":[\"chrt\"],\"application/vnd.kde.kformula\":[\"kfo\"],\"application/vnd.kde.kivio\":[\"flw\"],\"application/vnd.kde.kontour\":[\"kon\"],\"application/vnd.kde.kpresenter\":[\"kpr\",\"kpt\"],\"application/vnd.kde.kspread\":[\"ksp\"],\"application/vnd.kde.kword\":[\"kwd\",\"kwt\"],\"application/vnd.kenameaapp\":[\"htke\"],\"application/vnd.kidspiration\":[\"kia\"],\"application/vnd.kinar\":[\"kne\",\"knp\"],\"application/vnd.koan\":[\"skp\",\"skd\",\"skt\",\"skm\"],\"application/vnd.kodak-descriptor\":[\"sse\"],\"application/vnd.las.las+xml\":[\"lasxml\"],\"application/vnd.llamagraphics.life-balance.desktop\":[\"lbd\"],\"application/vnd.llamagraphics.life-balance.exchange+xml\":[\"lbe\"],\"application/vnd.lotus-1-2-3\":[\"123\"],\"application/vnd.lotus-approach\":[\"apr\"],\"application/vnd.lotus-freelance\":[\"pre\"],\"application/vnd.lotus-notes\":[\"nsf\"],\"application/vnd.lotus-organizer\":[\"org\"],\"application/vnd.lotus-screencam\":[\"scm\"],\"application/vnd.lotus-wordpro\":[\"lwp\"],\"application/vnd.macports.portpkg\":[\"portpkg\"],\"application/vnd.mapbox-vector-tile\":[\"mvt\"],\"application/vnd.mcd\":[\"mcd\"],\"application/vnd.medcalcdata\":[\"mc1\"],\"application/vnd.mediastation.cdkey\":[\"cdkey\"],\"application/vnd.mfer\":[\"mwf\"],\"application/vnd.mfmp\":[\"mfm\"],\"application/vnd.micrografx.flo\":[\"flo\"],\"application/vnd.micrografx.igx\":[\"igx\"],\"application/vnd.mif\":[\"mif\"],\"application/vnd.mobius.daf\":[\"daf\"],\"application/vnd.mobius.dis\":[\"dis\"],\"application/vnd.mobius.mbk\":[\"mbk\"],\"application/vnd.mobius.mqy\":[\"mqy\"],\"application/vnd.mobius.msl\":[\"msl\"],\"application/vnd.mobius.plc\":[\"plc\"],\"application/vnd.mobius.txf\":[\"txf\"],\"application/vnd.mophun.application\":[\"mpn\"],\"application/vnd.mophun.certificate\":[\"mpc\"],\"application/vnd.mozilla.xul+xml\":[\"xul\"],\"application/vnd.ms-artgalry\":[\"cil\"],\"application/vnd.ms-cab-compressed\":[\"cab\"],\"application/vnd.ms-excel\":[\"xls\",\"xlm\",\"xla\",\"xlc\",\"xlt\",\"xlw\"],\"application/vnd.ms-excel.addin.macroenabled.12\":[\"xlam\"],\"application/vnd.ms-excel.sheet.binary.macroenabled.12\":[\"xlsb\"],\"application/vnd.ms-excel.sheet.macroenabled.12\":[\"xlsm\"],\"application/vnd.ms-excel.template.macroenabled.12\":[\"xltm\"],\"application/vnd.ms-fontobject\":[\"eot\"],\"application/vnd.ms-htmlhelp\":[\"chm\"],\"application/vnd.ms-ims\":[\"ims\"],\"application/vnd.ms-lrm\":[\"lrm\"],\"application/vnd.ms-officetheme\":[\"thmx\"],\"application/vnd.ms-outlook\":[\"msg\"],\"application/vnd.ms-pki.seccat\":[\"cat\"],\"application/vnd.ms-pki.stl\":[\"*stl\"],\"application/vnd.ms-powerpoint\":[\"ppt\",\"pps\",\"pot\"],\"application/vnd.ms-powerpoint.addin.macroenabled.12\":[\"ppam\"],\"application/vnd.ms-powerpoint.presentation.macroenabled.12\":[\"pptm\"],\"application/vnd.ms-powerpoint.slide.macroenabled.12\":[\"sldm\"],\"application/vnd.ms-powerpoint.slideshow.macroenabled.12\":[\"ppsm\"],\"application/vnd.ms-powerpoint.template.macroenabled.12\":[\"potm\"],\"application/vnd.ms-project\":[\"mpp\",\"mpt\"],\"application/vnd.ms-word.document.macroenabled.12\":[\"docm\"],\"application/vnd.ms-word.template.macroenabled.12\":[\"dotm\"],\"application/vnd.ms-works\":[\"wps\",\"wks\",\"wcm\",\"wdb\"],\"application/vnd.ms-wpl\":[\"wpl\"],\"application/vnd.ms-xpsdocument\":[\"xps\"],\"application/vnd.mseq\":[\"mseq\"],\"application/vnd.musician\":[\"mus\"],\"application/vnd.muvee.style\":[\"msty\"],\"application/vnd.mynfc\":[\"taglet\"],\"application/vnd.neurolanguage.nlu\":[\"nlu\"],\"application/vnd.nitf\":[\"ntf\",\"nitf\"],\"application/vnd.noblenet-directory\":[\"nnd\"],\"application/vnd.noblenet-sealer\":[\"nns\"],\"application/vnd.noblenet-web\":[\"nnw\"],\"application/vnd.nokia.n-gage.ac+xml\":[\"*ac\"],\"application/vnd.nokia.n-gage.data\":[\"ngdat\"],\"application/vnd.nokia.n-gage.symbian.install\":[\"n-gage\"],\"application/vnd.nokia.radio-preset\":[\"rpst\"],\"application/vnd.nokia.radio-presets\":[\"rpss\"],\"application/vnd.novadigm.edm\":[\"edm\"],\"application/vnd.novadigm.edx\":[\"edx\"],\"application/vnd.novadigm.ext\":[\"ext\"],\"application/vnd.oasis.opendocument.chart\":[\"odc\"],\"application/vnd.oasis.opendocument.chart-template\":[\"otc\"],\"application/vnd.oasis.opendocument.database\":[\"odb\"],\"application/vnd.oasis.opendocument.formula\":[\"odf\"],\"application/vnd.oasis.opendocument.formula-template\":[\"odft\"],\"application/vnd.oasis.opendocument.graphics\":[\"odg\"],\"application/vnd.oasis.opendocument.graphics-template\":[\"otg\"],\"application/vnd.oasis.opendocument.image\":[\"odi\"],\"application/vnd.oasis.opendocument.image-template\":[\"oti\"],\"application/vnd.oasis.opendocument.presentation\":[\"odp\"],\"application/vnd.oasis.opendocument.presentation-template\":[\"otp\"],\"application/vnd.oasis.opendocument.spreadsheet\":[\"ods\"],\"application/vnd.oasis.opendocument.spreadsheet-template\":[\"ots\"],\"application/vnd.oasis.opendocument.text\":[\"odt\"],\"application/vnd.oasis.opendocument.text-master\":[\"odm\"],\"application/vnd.oasis.opendocument.text-template\":[\"ott\"],\"application/vnd.oasis.opendocument.text-web\":[\"oth\"],\"application/vnd.olpc-sugar\":[\"xo\"],\"application/vnd.oma.dd2+xml\":[\"dd2\"],\"application/vnd.openblox.game+xml\":[\"obgx\"],\"application/vnd.openofficeorg.extension\":[\"oxt\"],\"application/vnd.openstreetmap.data+xml\":[\"osm\"],\"application/vnd.openxmlformats-officedocument.presentationml.presentation\":[\"pptx\"],\"application/vnd.openxmlformats-officedocument.presentationml.slide\":[\"sldx\"],\"application/vnd.openxmlformats-officedocument.presentationml.slideshow\":[\"ppsx\"],\"application/vnd.openxmlformats-officedocument.presentationml.template\":[\"potx\"],\"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\":[\"xlsx\"],\"application/vnd.openxmlformats-officedocument.spreadsheetml.template\":[\"xltx\"],\"application/vnd.openxmlformats-officedocument.wordprocessingml.document\":[\"docx\"],\"application/vnd.openxmlformats-officedocument.wordprocessingml.template\":[\"dotx\"],\"application/vnd.osgeo.mapguide.package\":[\"mgp\"],\"application/vnd.osgi.dp\":[\"dp\"],\"application/vnd.osgi.subsystem\":[\"esa\"],\"application/vnd.palm\":[\"pdb\",\"pqa\",\"oprc\"],\"application/vnd.pawaafile\":[\"paw\"],\"application/vnd.pg.format\":[\"str\"],\"application/vnd.pg.osasli\":[\"ei6\"],\"application/vnd.picsel\":[\"efif\"],\"application/vnd.pmi.widget\":[\"wg\"],\"application/vnd.pocketlearn\":[\"plf\"],\"application/vnd.powerbuilder6\":[\"pbd\"],\"application/vnd.previewsystems.box\":[\"box\"],\"application/vnd.proteus.magazine\":[\"mgz\"],\"application/vnd.publishare-delta-tree\":[\"qps\"],\"application/vnd.pvi.ptid1\":[\"ptid\"],\"application/vnd.quark.quarkxpress\":[\"qxd\",\"qxt\",\"qwd\",\"qwt\",\"qxl\",\"qxb\"],\"application/vnd.rar\":[\"rar\"],\"application/vnd.realvnc.bed\":[\"bed\"],\"application/vnd.recordare.musicxml\":[\"mxl\"],\"application/vnd.recordare.musicxml+xml\":[\"musicxml\"],\"application/vnd.rig.cryptonote\":[\"cryptonote\"],\"application/vnd.rim.cod\":[\"cod\"],\"application/vnd.rn-realmedia\":[\"rm\"],\"application/vnd.rn-realmedia-vbr\":[\"rmvb\"],\"application/vnd.route66.link66+xml\":[\"link66\"],\"application/vnd.sailingtracker.track\":[\"st\"],\"application/vnd.seemail\":[\"see\"],\"application/vnd.sema\":[\"sema\"],\"application/vnd.semd\":[\"semd\"],\"application/vnd.semf\":[\"semf\"],\"application/vnd.shana.informed.formdata\":[\"ifm\"],\"application/vnd.shana.informed.formtemplate\":[\"itp\"],\"application/vnd.shana.informed.interchange\":[\"iif\"],\"application/vnd.shana.informed.package\":[\"ipk\"],\"application/vnd.simtech-mindmapper\":[\"twd\",\"twds\"],\"application/vnd.smaf\":[\"mmf\"],\"application/vnd.smart.teacher\":[\"teacher\"],\"application/vnd.software602.filler.form+xml\":[\"fo\"],\"application/vnd.solent.sdkm+xml\":[\"sdkm\",\"sdkd\"],\"application/vnd.spotfire.dxp\":[\"dxp\"],\"application/vnd.spotfire.sfs\":[\"sfs\"],\"application/vnd.stardivision.calc\":[\"sdc\"],\"application/vnd.stardivision.draw\":[\"sda\"],\"application/vnd.stardivision.impress\":[\"sdd\"],\"application/vnd.stardivision.math\":[\"smf\"],\"application/vnd.stardivision.writer\":[\"sdw\",\"vor\"],\"application/vnd.stardivision.writer-global\":[\"sgl\"],\"application/vnd.stepmania.package\":[\"smzip\"],\"application/vnd.stepmania.stepchart\":[\"sm\"],\"application/vnd.sun.wadl+xml\":[\"wadl\"],\"application/vnd.sun.xml.calc\":[\"sxc\"],\"application/vnd.sun.xml.calc.template\":[\"stc\"],\"application/vnd.sun.xml.draw\":[\"sxd\"],\"application/vnd.sun.xml.draw.template\":[\"std\"],\"application/vnd.sun.xml.impress\":[\"sxi\"],\"application/vnd.sun.xml.impress.template\":[\"sti\"],\"application/vnd.sun.xml.math\":[\"sxm\"],\"application/vnd.sun.xml.writer\":[\"sxw\"],\"application/vnd.sun.xml.writer.global\":[\"sxg\"],\"application/vnd.sun.xml.writer.template\":[\"stw\"],\"application/vnd.sus-calendar\":[\"sus\",\"susp\"],\"application/vnd.svd\":[\"svd\"],\"application/vnd.symbian.install\":[\"sis\",\"sisx\"],\"application/vnd.syncml+xml\":[\"xsm\"],\"application/vnd.syncml.dm+wbxml\":[\"bdm\"],\"application/vnd.syncml.dm+xml\":[\"xdm\"],\"application/vnd.syncml.dmddf+xml\":[\"ddf\"],\"application/vnd.tao.intent-module-archive\":[\"tao\"],\"application/vnd.tcpdump.pcap\":[\"pcap\",\"cap\",\"dmp\"],\"application/vnd.tmobile-livetv\":[\"tmo\"],\"application/vnd.trid.tpt\":[\"tpt\"],\"application/vnd.triscape.mxs\":[\"mxs\"],\"application/vnd.trueapp\":[\"tra\"],\"application/vnd.ufdl\":[\"ufd\",\"ufdl\"],\"application/vnd.uiq.theme\":[\"utz\"],\"application/vnd.umajin\":[\"umj\"],\"application/vnd.unity\":[\"unityweb\"],\"application/vnd.uoml+xml\":[\"uoml\"],\"application/vnd.vcx\":[\"vcx\"],\"application/vnd.visio\":[\"vsd\",\"vst\",\"vss\",\"vsw\"],\"application/vnd.visionary\":[\"vis\"],\"application/vnd.vsf\":[\"vsf\"],\"application/vnd.wap.wbxml\":[\"wbxml\"],\"application/vnd.wap.wmlc\":[\"wmlc\"],\"application/vnd.wap.wmlscriptc\":[\"wmlsc\"],\"application/vnd.webturbo\":[\"wtb\"],\"application/vnd.wolfram.player\":[\"nbp\"],\"application/vnd.wordperfect\":[\"wpd\"],\"application/vnd.wqd\":[\"wqd\"],\"application/vnd.wt.stf\":[\"stf\"],\"application/vnd.xara\":[\"xar\"],\"application/vnd.xfdl\":[\"xfdl\"],\"application/vnd.yamaha.hv-dic\":[\"hvd\"],\"application/vnd.yamaha.hv-script\":[\"hvs\"],\"application/vnd.yamaha.hv-voice\":[\"hvp\"],\"application/vnd.yamaha.openscoreformat\":[\"osf\"],\"application/vnd.yamaha.openscoreformat.osfpvg+xml\":[\"osfpvg\"],\"application/vnd.yamaha.smaf-audio\":[\"saf\"],\"application/vnd.yamaha.smaf-phrase\":[\"spf\"],\"application/vnd.yellowriver-custom-menu\":[\"cmp\"],\"application/vnd.zul\":[\"zir\",\"zirz\"],\"application/vnd.zzazz.deck+xml\":[\"zaz\"],\"application/x-7z-compressed\":[\"7z\"],\"application/x-abiword\":[\"abw\"],\"application/x-ace-compressed\":[\"ace\"],\"application/x-apple-diskimage\":[\"*dmg\"],\"application/x-arj\":[\"arj\"],\"application/x-authorware-bin\":[\"aab\",\"x32\",\"u32\",\"vox\"],\"application/x-authorware-map\":[\"aam\"],\"application/x-authorware-seg\":[\"aas\"],\"application/x-bcpio\":[\"bcpio\"],\"application/x-bdoc\":[\"*bdoc\"],\"application/x-bittorrent\":[\"torrent\"],\"application/x-blorb\":[\"blb\",\"blorb\"],\"application/x-bzip\":[\"bz\"],\"application/x-bzip2\":[\"bz2\",\"boz\"],\"application/x-cbr\":[\"cbr\",\"cba\",\"cbt\",\"cbz\",\"cb7\"],\"application/x-cdlink\":[\"vcd\"],\"application/x-cfs-compressed\":[\"cfs\"],\"application/x-chat\":[\"chat\"],\"application/x-chess-pgn\":[\"pgn\"],\"application/x-chrome-extension\":[\"crx\"],\"application/x-cocoa\":[\"cco\"],\"application/x-conference\":[\"nsc\"],\"application/x-cpio\":[\"cpio\"],\"application/x-csh\":[\"csh\"],\"application/x-debian-package\":[\"*deb\",\"udeb\"],\"application/x-dgc-compressed\":[\"dgc\"],\"application/x-director\":[\"dir\",\"dcr\",\"dxr\",\"cst\",\"cct\",\"cxt\",\"w3d\",\"fgd\",\"swa\"],\"application/x-doom\":[\"wad\"],\"application/x-dtbncx+xml\":[\"ncx\"],\"application/x-dtbook+xml\":[\"dtb\"],\"application/x-dtbresource+xml\":[\"res\"],\"application/x-dvi\":[\"dvi\"],\"application/x-envoy\":[\"evy\"],\"application/x-eva\":[\"eva\"],\"application/x-font-bdf\":[\"bdf\"],\"application/x-font-ghostscript\":[\"gsf\"],\"application/x-font-linux-psf\":[\"psf\"],\"application/x-font-pcf\":[\"pcf\"],\"application/x-font-snf\":[\"snf\"],\"application/x-font-type1\":[\"pfa\",\"pfb\",\"pfm\",\"afm\"],\"application/x-freearc\":[\"arc\"],\"application/x-futuresplash\":[\"spl\"],\"application/x-gca-compressed\":[\"gca\"],\"application/x-glulx\":[\"ulx\"],\"application/x-gnumeric\":[\"gnumeric\"],\"application/x-gramps-xml\":[\"gramps\"],\"application/x-gtar\":[\"gtar\"],\"application/x-hdf\":[\"hdf\"],\"application/x-httpd-php\":[\"php\"],\"application/x-install-instructions\":[\"install\"],\"application/x-iso9660-image\":[\"*iso\"],\"application/x-iwork-keynote-sffkey\":[\"*key\"],\"application/x-iwork-numbers-sffnumbers\":[\"*numbers\"],\"application/x-iwork-pages-sffpages\":[\"*pages\"],\"application/x-java-archive-diff\":[\"jardiff\"],\"application/x-java-jnlp-file\":[\"jnlp\"],\"application/x-keepass2\":[\"kdbx\"],\"application/x-latex\":[\"latex\"],\"application/x-lua-bytecode\":[\"luac\"],\"application/x-lzh-compressed\":[\"lzh\",\"lha\"],\"application/x-makeself\":[\"run\"],\"application/x-mie\":[\"mie\"],\"application/x-mobipocket-ebook\":[\"prc\",\"mobi\"],\"application/x-ms-application\":[\"application\"],\"application/x-ms-shortcut\":[\"lnk\"],\"application/x-ms-wmd\":[\"wmd\"],\"application/x-ms-wmz\":[\"wmz\"],\"application/x-ms-xbap\":[\"xbap\"],\"application/x-msaccess\":[\"mdb\"],\"application/x-msbinder\":[\"obd\"],\"application/x-mscardfile\":[\"crd\"],\"application/x-msclip\":[\"clp\"],\"application/x-msdos-program\":[\"*exe\"],\"application/x-msdownload\":[\"*exe\",\"*dll\",\"com\",\"bat\",\"*msi\"],\"application/x-msmediaview\":[\"mvb\",\"m13\",\"m14\"],\"application/x-msmetafile\":[\"*wmf\",\"*wmz\",\"*emf\",\"emz\"],\"application/x-msmoney\":[\"mny\"],\"application/x-mspublisher\":[\"pub\"],\"application/x-msschedule\":[\"scd\"],\"application/x-msterminal\":[\"trm\"],\"application/x-mswrite\":[\"wri\"],\"application/x-netcdf\":[\"nc\",\"cdf\"],\"application/x-ns-proxy-autoconfig\":[\"pac\"],\"application/x-nzb\":[\"nzb\"],\"application/x-perl\":[\"pl\",\"pm\"],\"application/x-pilot\":[\"*prc\",\"*pdb\"],\"application/x-pkcs12\":[\"p12\",\"pfx\"],\"application/x-pkcs7-certificates\":[\"p7b\",\"spc\"],\"application/x-pkcs7-certreqresp\":[\"p7r\"],\"application/x-rar-compressed\":[\"*rar\"],\"application/x-redhat-package-manager\":[\"rpm\"],\"application/x-research-info-systems\":[\"ris\"],\"application/x-sea\":[\"sea\"],\"application/x-sh\":[\"sh\"],\"application/x-shar\":[\"shar\"],\"application/x-shockwave-flash\":[\"swf\"],\"application/x-silverlight-app\":[\"xap\"],\"application/x-sql\":[\"sql\"],\"application/x-stuffit\":[\"sit\"],\"application/x-stuffitx\":[\"sitx\"],\"application/x-subrip\":[\"srt\"],\"application/x-sv4cpio\":[\"sv4cpio\"],\"application/x-sv4crc\":[\"sv4crc\"],\"application/x-t3vm-image\":[\"t3\"],\"application/x-tads\":[\"gam\"],\"application/x-tar\":[\"tar\"],\"application/x-tcl\":[\"tcl\",\"tk\"],\"application/x-tex\":[\"tex\"],\"application/x-tex-tfm\":[\"tfm\"],\"application/x-texinfo\":[\"texinfo\",\"texi\"],\"application/x-tgif\":[\"*obj\"],\"application/x-ustar\":[\"ustar\"],\"application/x-virtualbox-hdd\":[\"hdd\"],\"application/x-virtualbox-ova\":[\"ova\"],\"application/x-virtualbox-ovf\":[\"ovf\"],\"application/x-virtualbox-vbox\":[\"vbox\"],\"application/x-virtualbox-vbox-extpack\":[\"vbox-extpack\"],\"application/x-virtualbox-vdi\":[\"vdi\"],\"application/x-virtualbox-vhd\":[\"vhd\"],\"application/x-virtualbox-vmdk\":[\"vmdk\"],\"application/x-wais-source\":[\"src\"],\"application/x-web-app-manifest+json\":[\"webapp\"],\"application/x-x509-ca-cert\":[\"der\",\"crt\",\"pem\"],\"application/x-xfig\":[\"fig\"],\"application/x-xliff+xml\":[\"*xlf\"],\"application/x-xpinstall\":[\"xpi\"],\"application/x-xz\":[\"xz\"],\"application/x-zmachine\":[\"z1\",\"z2\",\"z3\",\"z4\",\"z5\",\"z6\",\"z7\",\"z8\"],\"audio/vnd.dece.audio\":[\"uva\",\"uvva\"],\"audio/vnd.digital-winds\":[\"eol\"],\"audio/vnd.dra\":[\"dra\"],\"audio/vnd.dts\":[\"dts\"],\"audio/vnd.dts.hd\":[\"dtshd\"],\"audio/vnd.lucent.voice\":[\"lvp\"],\"audio/vnd.ms-playready.media.pya\":[\"pya\"],\"audio/vnd.nuera.ecelp4800\":[\"ecelp4800\"],\"audio/vnd.nuera.ecelp7470\":[\"ecelp7470\"],\"audio/vnd.nuera.ecelp9600\":[\"ecelp9600\"],\"audio/vnd.rip\":[\"rip\"],\"audio/x-aac\":[\"aac\"],\"audio/x-aiff\":[\"aif\",\"aiff\",\"aifc\"],\"audio/x-caf\":[\"caf\"],\"audio/x-flac\":[\"flac\"],\"audio/x-m4a\":[\"*m4a\"],\"audio/x-matroska\":[\"mka\"],\"audio/x-mpegurl\":[\"m3u\"],\"audio/x-ms-wax\":[\"wax\"],\"audio/x-ms-wma\":[\"wma\"],\"audio/x-pn-realaudio\":[\"ram\",\"ra\"],\"audio/x-pn-realaudio-plugin\":[\"rmp\"],\"audio/x-realaudio\":[\"*ra\"],\"audio/x-wav\":[\"*wav\"],\"chemical/x-cdx\":[\"cdx\"],\"chemical/x-cif\":[\"cif\"],\"chemical/x-cmdf\":[\"cmdf\"],\"chemical/x-cml\":[\"cml\"],\"chemical/x-csml\":[\"csml\"],\"chemical/x-xyz\":[\"xyz\"],\"image/prs.btif\":[\"btif\"],\"image/prs.pti\":[\"pti\"],\"image/vnd.adobe.photoshop\":[\"psd\"],\"image/vnd.airzip.accelerator.azv\":[\"azv\"],\"image/vnd.dece.graphic\":[\"uvi\",\"uvvi\",\"uvg\",\"uvvg\"],\"image/vnd.djvu\":[\"djvu\",\"djv\"],\"image/vnd.dvb.subtitle\":[\"*sub\"],\"image/vnd.dwg\":[\"dwg\"],\"image/vnd.dxf\":[\"dxf\"],\"image/vnd.fastbidsheet\":[\"fbs\"],\"image/vnd.fpx\":[\"fpx\"],\"image/vnd.fst\":[\"fst\"],\"image/vnd.fujixerox.edmics-mmr\":[\"mmr\"],\"image/vnd.fujixerox.edmics-rlc\":[\"rlc\"],\"image/vnd.microsoft.icon\":[\"ico\"],\"image/vnd.ms-dds\":[\"dds\"],\"image/vnd.ms-modi\":[\"mdi\"],\"image/vnd.ms-photo\":[\"wdp\"],\"image/vnd.net-fpx\":[\"npx\"],\"image/vnd.pco.b16\":[\"b16\"],\"image/vnd.tencent.tap\":[\"tap\"],\"image/vnd.valve.source.texture\":[\"vtf\"],\"image/vnd.wap.wbmp\":[\"wbmp\"],\"image/vnd.xiff\":[\"xif\"],\"image/vnd.zbrush.pcx\":[\"pcx\"],\"image/x-3ds\":[\"3ds\"],\"image/x-cmu-raster\":[\"ras\"],\"image/x-cmx\":[\"cmx\"],\"image/x-freehand\":[\"fh\",\"fhc\",\"fh4\",\"fh5\",\"fh7\"],\"image/x-icon\":[\"*ico\"],\"image/x-jng\":[\"jng\"],\"image/x-mrsid-image\":[\"sid\"],\"image/x-ms-bmp\":[\"*bmp\"],\"image/x-pcx\":[\"*pcx\"],\"image/x-pict\":[\"pic\",\"pct\"],\"image/x-portable-anymap\":[\"pnm\"],\"image/x-portable-bitmap\":[\"pbm\"],\"image/x-portable-graymap\":[\"pgm\"],\"image/x-portable-pixmap\":[\"ppm\"],\"image/x-rgb\":[\"rgb\"],\"image/x-tga\":[\"tga\"],\"image/x-xbitmap\":[\"xbm\"],\"image/x-xpixmap\":[\"xpm\"],\"image/x-xwindowdump\":[\"xwd\"],\"message/vnd.wfa.wsc\":[\"wsc\"],\"model/vnd.collada+xml\":[\"dae\"],\"model/vnd.dwf\":[\"dwf\"],\"model/vnd.gdl\":[\"gdl\"],\"model/vnd.gtw\":[\"gtw\"],\"model/vnd.mts\":[\"mts\"],\"model/vnd.opengex\":[\"ogex\"],\"model/vnd.parasolid.transmit.binary\":[\"x_b\"],\"model/vnd.parasolid.transmit.text\":[\"x_t\"],\"model/vnd.sap.vds\":[\"vds\"],\"model/vnd.usdz+zip\":[\"usdz\"],\"model/vnd.valve.source.compiled-map\":[\"bsp\"],\"model/vnd.vtu\":[\"vtu\"],\"text/prs.lines.tag\":[\"dsc\"],\"text/vnd.curl\":[\"curl\"],\"text/vnd.curl.dcurl\":[\"dcurl\"],\"text/vnd.curl.mcurl\":[\"mcurl\"],\"text/vnd.curl.scurl\":[\"scurl\"],\"text/vnd.dvb.subtitle\":[\"sub\"],\"text/vnd.fly\":[\"fly\"],\"text/vnd.fmi.flexstor\":[\"flx\"],\"text/vnd.graphviz\":[\"gv\"],\"text/vnd.in3d.3dml\":[\"3dml\"],\"text/vnd.in3d.spot\":[\"spot\"],\"text/vnd.sun.j2me.app-descriptor\":[\"jad\"],\"text/vnd.wap.wml\":[\"wml\"],\"text/vnd.wap.wmlscript\":[\"wmls\"],\"text/x-asm\":[\"s\",\"asm\"],\"text/x-c\":[\"c\",\"cc\",\"cxx\",\"cpp\",\"h\",\"hh\",\"dic\"],\"text/x-component\":[\"htc\"],\"text/x-fortran\":[\"f\",\"for\",\"f77\",\"f90\"],\"text/x-handlebars-template\":[\"hbs\"],\"text/x-java-source\":[\"java\"],\"text/x-lua\":[\"lua\"],\"text/x-markdown\":[\"mkd\"],\"text/x-nfo\":[\"nfo\"],\"text/x-opml\":[\"opml\"],\"text/x-org\":[\"*org\"],\"text/x-pascal\":[\"p\",\"pas\"],\"text/x-processing\":[\"pde\"],\"text/x-sass\":[\"sass\"],\"text/x-scss\":[\"scss\"],\"text/x-setext\":[\"etx\"],\"text/x-sfv\":[\"sfv\"],\"text/x-suse-ymp\":[\"ymp\"],\"text/x-uuencode\":[\"uu\"],\"text/x-vcalendar\":[\"vcs\"],\"text/x-vcard\":[\"vcf\"],\"video/vnd.dece.hd\":[\"uvh\",\"uvvh\"],\"video/vnd.dece.mobile\":[\"uvm\",\"uvvm\"],\"video/vnd.dece.pd\":[\"uvp\",\"uvvp\"],\"video/vnd.dece.sd\":[\"uvs\",\"uvvs\"],\"video/vnd.dece.video\":[\"uvv\",\"uvvv\"],\"video/vnd.dvb.file\":[\"dvb\"],\"video/vnd.fvt\":[\"fvt\"],\"video/vnd.mpegurl\":[\"mxu\",\"m4u\"],\"video/vnd.ms-playready.media.pyv\":[\"pyv\"],\"video/vnd.uvvu.mp4\":[\"uvu\",\"uvvu\"],\"video/vnd.vivo\":[\"viv\"],\"video/x-f4v\":[\"f4v\"],\"video/x-fli\":[\"fli\"],\"video/x-flv\":[\"flv\"],\"video/x-m4v\":[\"m4v\"],\"video/x-matroska\":[\"mkv\",\"mk3d\",\"mks\"],\"video/x-mng\":[\"mng\"],\"video/x-ms-asf\":[\"asf\",\"asx\"],\"video/x-ms-vob\":[\"vob\"],\"video/x-ms-wm\":[\"wm\"],\"video/x-ms-wmv\":[\"wmv\"],\"video/x-ms-wmx\":[\"wmx\"],\"video/x-ms-wvx\":[\"wvx\"],\"video/x-msvideo\":[\"avi\"],\"video/x-sgi-movie\":[\"movie\"],\"video/x-smv\":[\"smv\"],\"x-conference/x-cooltalk\":[\"ice\"]};","module.exports = {\"application/andrew-inset\":[\"ez\"],\"application/applixware\":[\"aw\"],\"application/atom+xml\":[\"atom\"],\"application/atomcat+xml\":[\"atomcat\"],\"application/atomdeleted+xml\":[\"atomdeleted\"],\"application/atomsvc+xml\":[\"atomsvc\"],\"application/atsc-dwd+xml\":[\"dwd\"],\"application/atsc-held+xml\":[\"held\"],\"application/atsc-rsat+xml\":[\"rsat\"],\"application/bdoc\":[\"bdoc\"],\"application/calendar+xml\":[\"xcs\"],\"application/ccxml+xml\":[\"ccxml\"],\"application/cdfx+xml\":[\"cdfx\"],\"application/cdmi-capability\":[\"cdmia\"],\"application/cdmi-container\":[\"cdmic\"],\"application/cdmi-domain\":[\"cdmid\"],\"application/cdmi-object\":[\"cdmio\"],\"application/cdmi-queue\":[\"cdmiq\"],\"application/cu-seeme\":[\"cu\"],\"application/dash+xml\":[\"mpd\"],\"application/davmount+xml\":[\"davmount\"],\"application/docbook+xml\":[\"dbk\"],\"application/dssc+der\":[\"dssc\"],\"application/dssc+xml\":[\"xdssc\"],\"application/ecmascript\":[\"es\",\"ecma\"],\"application/emma+xml\":[\"emma\"],\"application/emotionml+xml\":[\"emotionml\"],\"application/epub+zip\":[\"epub\"],\"application/exi\":[\"exi\"],\"application/express\":[\"exp\"],\"application/fdt+xml\":[\"fdt\"],\"application/font-tdpfr\":[\"pfr\"],\"application/geo+json\":[\"geojson\"],\"application/gml+xml\":[\"gml\"],\"application/gpx+xml\":[\"gpx\"],\"application/gxf\":[\"gxf\"],\"application/gzip\":[\"gz\"],\"application/hjson\":[\"hjson\"],\"application/hyperstudio\":[\"stk\"],\"application/inkml+xml\":[\"ink\",\"inkml\"],\"application/ipfix\":[\"ipfix\"],\"application/its+xml\":[\"its\"],\"application/java-archive\":[\"jar\",\"war\",\"ear\"],\"application/java-serialized-object\":[\"ser\"],\"application/java-vm\":[\"class\"],\"application/javascript\":[\"js\",\"mjs\"],\"application/json\":[\"json\",\"map\"],\"application/json5\":[\"json5\"],\"application/jsonml+json\":[\"jsonml\"],\"application/ld+json\":[\"jsonld\"],\"application/lgr+xml\":[\"lgr\"],\"application/lost+xml\":[\"lostxml\"],\"application/mac-binhex40\":[\"hqx\"],\"application/mac-compactpro\":[\"cpt\"],\"application/mads+xml\":[\"mads\"],\"application/manifest+json\":[\"webmanifest\"],\"application/marc\":[\"mrc\"],\"application/marcxml+xml\":[\"mrcx\"],\"application/mathematica\":[\"ma\",\"nb\",\"mb\"],\"application/mathml+xml\":[\"mathml\"],\"application/mbox\":[\"mbox\"],\"application/mediaservercontrol+xml\":[\"mscml\"],\"application/metalink+xml\":[\"metalink\"],\"application/metalink4+xml\":[\"meta4\"],\"application/mets+xml\":[\"mets\"],\"application/mmt-aei+xml\":[\"maei\"],\"application/mmt-usd+xml\":[\"musd\"],\"application/mods+xml\":[\"mods\"],\"application/mp21\":[\"m21\",\"mp21\"],\"application/mp4\":[\"mp4s\",\"m4p\"],\"application/msword\":[\"doc\",\"dot\"],\"application/mxf\":[\"mxf\"],\"application/n-quads\":[\"nq\"],\"application/n-triples\":[\"nt\"],\"application/node\":[\"cjs\"],\"application/octet-stream\":[\"bin\",\"dms\",\"lrf\",\"mar\",\"so\",\"dist\",\"distz\",\"pkg\",\"bpk\",\"dump\",\"elc\",\"deploy\",\"exe\",\"dll\",\"deb\",\"dmg\",\"iso\",\"img\",\"msi\",\"msp\",\"msm\",\"buffer\"],\"application/oda\":[\"oda\"],\"application/oebps-package+xml\":[\"opf\"],\"application/ogg\":[\"ogx\"],\"application/omdoc+xml\":[\"omdoc\"],\"application/onenote\":[\"onetoc\",\"onetoc2\",\"onetmp\",\"onepkg\"],\"application/oxps\":[\"oxps\"],\"application/p2p-overlay+xml\":[\"relo\"],\"application/patch-ops-error+xml\":[\"xer\"],\"application/pdf\":[\"pdf\"],\"application/pgp-encrypted\":[\"pgp\"],\"application/pgp-signature\":[\"asc\",\"sig\"],\"application/pics-rules\":[\"prf\"],\"application/pkcs10\":[\"p10\"],\"application/pkcs7-mime\":[\"p7m\",\"p7c\"],\"application/pkcs7-signature\":[\"p7s\"],\"application/pkcs8\":[\"p8\"],\"application/pkix-attr-cert\":[\"ac\"],\"application/pkix-cert\":[\"cer\"],\"application/pkix-crl\":[\"crl\"],\"application/pkix-pkipath\":[\"pkipath\"],\"application/pkixcmp\":[\"pki\"],\"application/pls+xml\":[\"pls\"],\"application/postscript\":[\"ai\",\"eps\",\"ps\"],\"application/provenance+xml\":[\"provx\"],\"application/pskc+xml\":[\"pskcxml\"],\"application/raml+yaml\":[\"raml\"],\"application/rdf+xml\":[\"rdf\",\"owl\"],\"application/reginfo+xml\":[\"rif\"],\"application/relax-ng-compact-syntax\":[\"rnc\"],\"application/resource-lists+xml\":[\"rl\"],\"application/resource-lists-diff+xml\":[\"rld\"],\"application/rls-services+xml\":[\"rs\"],\"application/route-apd+xml\":[\"rapd\"],\"application/route-s-tsid+xml\":[\"sls\"],\"application/route-usd+xml\":[\"rusd\"],\"application/rpki-ghostbusters\":[\"gbr\"],\"application/rpki-manifest\":[\"mft\"],\"application/rpki-roa\":[\"roa\"],\"application/rsd+xml\":[\"rsd\"],\"application/rss+xml\":[\"rss\"],\"application/rtf\":[\"rtf\"],\"application/sbml+xml\":[\"sbml\"],\"application/scvp-cv-request\":[\"scq\"],\"application/scvp-cv-response\":[\"scs\"],\"application/scvp-vp-request\":[\"spq\"],\"application/scvp-vp-response\":[\"spp\"],\"application/sdp\":[\"sdp\"],\"application/senml+xml\":[\"senmlx\"],\"application/sensml+xml\":[\"sensmlx\"],\"application/set-payment-initiation\":[\"setpay\"],\"application/set-registration-initiation\":[\"setreg\"],\"application/shf+xml\":[\"shf\"],\"application/sieve\":[\"siv\",\"sieve\"],\"application/smil+xml\":[\"smi\",\"smil\"],\"application/sparql-query\":[\"rq\"],\"application/sparql-results+xml\":[\"srx\"],\"application/srgs\":[\"gram\"],\"application/srgs+xml\":[\"grxml\"],\"application/sru+xml\":[\"sru\"],\"application/ssdl+xml\":[\"ssdl\"],\"application/ssml+xml\":[\"ssml\"],\"application/swid+xml\":[\"swidtag\"],\"application/tei+xml\":[\"tei\",\"teicorpus\"],\"application/thraud+xml\":[\"tfi\"],\"application/timestamped-data\":[\"tsd\"],\"application/toml\":[\"toml\"],\"application/trig\":[\"trig\"],\"application/ttml+xml\":[\"ttml\"],\"application/ubjson\":[\"ubj\"],\"application/urc-ressheet+xml\":[\"rsheet\"],\"application/urc-targetdesc+xml\":[\"td\"],\"application/voicexml+xml\":[\"vxml\"],\"application/wasm\":[\"wasm\"],\"application/widget\":[\"wgt\"],\"application/winhlp\":[\"hlp\"],\"application/wsdl+xml\":[\"wsdl\"],\"application/wspolicy+xml\":[\"wspolicy\"],\"application/xaml+xml\":[\"xaml\"],\"application/xcap-att+xml\":[\"xav\"],\"application/xcap-caps+xml\":[\"xca\"],\"application/xcap-diff+xml\":[\"xdf\"],\"application/xcap-el+xml\":[\"xel\"],\"application/xcap-ns+xml\":[\"xns\"],\"application/xenc+xml\":[\"xenc\"],\"application/xhtml+xml\":[\"xhtml\",\"xht\"],\"application/xliff+xml\":[\"xlf\"],\"application/xml\":[\"xml\",\"xsl\",\"xsd\",\"rng\"],\"application/xml-dtd\":[\"dtd\"],\"application/xop+xml\":[\"xop\"],\"application/xproc+xml\":[\"xpl\"],\"application/xslt+xml\":[\"*xsl\",\"xslt\"],\"application/xspf+xml\":[\"xspf\"],\"application/xv+xml\":[\"mxml\",\"xhvml\",\"xvml\",\"xvm\"],\"application/yang\":[\"yang\"],\"application/yin+xml\":[\"yin\"],\"application/zip\":[\"zip\"],\"audio/3gpp\":[\"*3gpp\"],\"audio/adpcm\":[\"adp\"],\"audio/amr\":[\"amr\"],\"audio/basic\":[\"au\",\"snd\"],\"audio/midi\":[\"mid\",\"midi\",\"kar\",\"rmi\"],\"audio/mobile-xmf\":[\"mxmf\"],\"audio/mp3\":[\"*mp3\"],\"audio/mp4\":[\"m4a\",\"mp4a\"],\"audio/mpeg\":[\"mpga\",\"mp2\",\"mp2a\",\"mp3\",\"m2a\",\"m3a\"],\"audio/ogg\":[\"oga\",\"ogg\",\"spx\",\"opus\"],\"audio/s3m\":[\"s3m\"],\"audio/silk\":[\"sil\"],\"audio/wav\":[\"wav\"],\"audio/wave\":[\"*wav\"],\"audio/webm\":[\"weba\"],\"audio/xm\":[\"xm\"],\"font/collection\":[\"ttc\"],\"font/otf\":[\"otf\"],\"font/ttf\":[\"ttf\"],\"font/woff\":[\"woff\"],\"font/woff2\":[\"woff2\"],\"image/aces\":[\"exr\"],\"image/apng\":[\"apng\"],\"image/avif\":[\"avif\"],\"image/bmp\":[\"bmp\"],\"image/cgm\":[\"cgm\"],\"image/dicom-rle\":[\"drle\"],\"image/emf\":[\"emf\"],\"image/fits\":[\"fits\"],\"image/g3fax\":[\"g3\"],\"image/gif\":[\"gif\"],\"image/heic\":[\"heic\"],\"image/heic-sequence\":[\"heics\"],\"image/heif\":[\"heif\"],\"image/heif-sequence\":[\"heifs\"],\"image/hej2k\":[\"hej2\"],\"image/hsj2\":[\"hsj2\"],\"image/ief\":[\"ief\"],\"image/jls\":[\"jls\"],\"image/jp2\":[\"jp2\",\"jpg2\"],\"image/jpeg\":[\"jpeg\",\"jpg\",\"jpe\"],\"image/jph\":[\"jph\"],\"image/jphc\":[\"jhc\"],\"image/jpm\":[\"jpm\"],\"image/jpx\":[\"jpx\",\"jpf\"],\"image/jxr\":[\"jxr\"],\"image/jxra\":[\"jxra\"],\"image/jxrs\":[\"jxrs\"],\"image/jxs\":[\"jxs\"],\"image/jxsc\":[\"jxsc\"],\"image/jxsi\":[\"jxsi\"],\"image/jxss\":[\"jxss\"],\"image/ktx\":[\"ktx\"],\"image/ktx2\":[\"ktx2\"],\"image/png\":[\"png\"],\"image/sgi\":[\"sgi\"],\"image/svg+xml\":[\"svg\",\"svgz\"],\"image/t38\":[\"t38\"],\"image/tiff\":[\"tif\",\"tiff\"],\"image/tiff-fx\":[\"tfx\"],\"image/webp\":[\"webp\"],\"image/wmf\":[\"wmf\"],\"message/disposition-notification\":[\"disposition-notification\"],\"message/global\":[\"u8msg\"],\"message/global-delivery-status\":[\"u8dsn\"],\"message/global-disposition-notification\":[\"u8mdn\"],\"message/global-headers\":[\"u8hdr\"],\"message/rfc822\":[\"eml\",\"mime\"],\"model/3mf\":[\"3mf\"],\"model/gltf+json\":[\"gltf\"],\"model/gltf-binary\":[\"glb\"],\"model/iges\":[\"igs\",\"iges\"],\"model/mesh\":[\"msh\",\"mesh\",\"silo\"],\"model/mtl\":[\"mtl\"],\"model/obj\":[\"obj\"],\"model/step+xml\":[\"stpx\"],\"model/step+zip\":[\"stpz\"],\"model/step-xml+zip\":[\"stpxz\"],\"model/stl\":[\"stl\"],\"model/vrml\":[\"wrl\",\"vrml\"],\"model/x3d+binary\":[\"*x3db\",\"x3dbz\"],\"model/x3d+fastinfoset\":[\"x3db\"],\"model/x3d+vrml\":[\"*x3dv\",\"x3dvz\"],\"model/x3d+xml\":[\"x3d\",\"x3dz\"],\"model/x3d-vrml\":[\"x3dv\"],\"text/cache-manifest\":[\"appcache\",\"manifest\"],\"text/calendar\":[\"ics\",\"ifb\"],\"text/coffeescript\":[\"coffee\",\"litcoffee\"],\"text/css\":[\"css\"],\"text/csv\":[\"csv\"],\"text/html\":[\"html\",\"htm\",\"shtml\"],\"text/jade\":[\"jade\"],\"text/jsx\":[\"jsx\"],\"text/less\":[\"less\"],\"text/markdown\":[\"markdown\",\"md\"],\"text/mathml\":[\"mml\"],\"text/mdx\":[\"mdx\"],\"text/n3\":[\"n3\"],\"text/plain\":[\"txt\",\"text\",\"conf\",\"def\",\"list\",\"log\",\"in\",\"ini\"],\"text/richtext\":[\"rtx\"],\"text/rtf\":[\"*rtf\"],\"text/sgml\":[\"sgml\",\"sgm\"],\"text/shex\":[\"shex\"],\"text/slim\":[\"slim\",\"slm\"],\"text/spdx\":[\"spdx\"],\"text/stylus\":[\"stylus\",\"styl\"],\"text/tab-separated-values\":[\"tsv\"],\"text/troff\":[\"t\",\"tr\",\"roff\",\"man\",\"me\",\"ms\"],\"text/turtle\":[\"ttl\"],\"text/uri-list\":[\"uri\",\"uris\",\"urls\"],\"text/vcard\":[\"vcard\"],\"text/vtt\":[\"vtt\"],\"text/xml\":[\"*xml\"],\"text/yaml\":[\"yaml\",\"yml\"],\"video/3gpp\":[\"3gp\",\"3gpp\"],\"video/3gpp2\":[\"3g2\"],\"video/h261\":[\"h261\"],\"video/h263\":[\"h263\"],\"video/h264\":[\"h264\"],\"video/iso.segment\":[\"m4s\"],\"video/jpeg\":[\"jpgv\"],\"video/jpm\":[\"*jpm\",\"jpgm\"],\"video/mj2\":[\"mj2\",\"mjp2\"],\"video/mp2t\":[\"ts\"],\"video/mp4\":[\"mp4\",\"mp4v\",\"mpg4\"],\"video/mpeg\":[\"mpeg\",\"mpg\",\"mpe\",\"m1v\",\"m2v\"],\"video/ogg\":[\"ogv\"],\"video/quicktime\":[\"qt\",\"mov\"],\"video/webm\":[\"webm\"]};","/**\n * Helpers.\n */\n\nvar s = 1000;\nvar m = s * 60;\nvar h = m * 60;\nvar d = h * 24;\nvar w = d * 7;\nvar y = d * 365.25;\n\n/**\n * Parse or format the given `val`.\n *\n * Options:\n *\n * - `long` verbose formatting [false]\n *\n * @param {String|Number} val\n * @param {Object} [options]\n * @throws {Error} throw an error if val is not a non-empty string or a number\n * @return {String|Number}\n * @api public\n */\n\nmodule.exports = function(val, options) {\n options = options || {};\n var type = typeof val;\n if (type === 'string' && val.length > 0) {\n return parse(val);\n } else if (type === 'number' && isFinite(val)) {\n return options.long ? fmtLong(val) : fmtShort(val);\n }\n throw new Error(\n 'val is not a non-empty string or a valid number. val=' +\n JSON.stringify(val)\n );\n};\n\n/**\n * Parse the given `str` and return milliseconds.\n *\n * @param {String} str\n * @return {Number}\n * @api private\n */\n\nfunction parse(str) {\n str = String(str);\n if (str.length > 100) {\n return;\n }\n var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(\n str\n );\n if (!match) {\n return;\n }\n var n = parseFloat(match[1]);\n var type = (match[2] || 'ms').toLowerCase();\n switch (type) {\n case 'years':\n case 'year':\n case 'yrs':\n case 'yr':\n case 'y':\n return n * y;\n case 'weeks':\n case 'week':\n case 'w':\n return n * w;\n case 'days':\n case 'day':\n case 'd':\n return n * d;\n case 'hours':\n case 'hour':\n case 'hrs':\n case 'hr':\n case 'h':\n return n * h;\n case 'minutes':\n case 'minute':\n case 'mins':\n case 'min':\n case 'm':\n return n * m;\n case 'seconds':\n case 'second':\n case 'secs':\n case 'sec':\n case 's':\n return n * s;\n case 'milliseconds':\n case 'millisecond':\n case 'msecs':\n case 'msec':\n case 'ms':\n return n;\n default:\n return undefined;\n }\n}\n\n/**\n * Short format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtShort(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return Math.round(ms / d) + 'd';\n }\n if (msAbs >= h) {\n return Math.round(ms / h) + 'h';\n }\n if (msAbs >= m) {\n return Math.round(ms / m) + 'm';\n }\n if (msAbs >= s) {\n return Math.round(ms / s) + 's';\n }\n return ms + 'ms';\n}\n\n/**\n * Long format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtLong(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return plural(ms, msAbs, d, 'day');\n }\n if (msAbs >= h) {\n return plural(ms, msAbs, h, 'hour');\n }\n if (msAbs >= m) {\n return plural(ms, msAbs, m, 'minute');\n }\n if (msAbs >= s) {\n return plural(ms, msAbs, s, 'second');\n }\n return ms + ' ms';\n}\n\n/**\n * Pluralization helper.\n */\n\nfunction plural(ms, msAbs, n, name) {\n var isPlural = msAbs >= n * 1.5;\n return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar whatwgUrl = _interopDefault(require('whatwg-url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\nconst URL = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\n/**\n * Wrapper around `new URL` to handle arbitrary URLs\n *\n * @param {string} urlStr\n * @return {void}\n */\nfunction parseURL(urlStr) {\n\t/*\n \tCheck whether the URL is absolute or not\n \t\tScheme: https://tools.ietf.org/html/rfc3986#section-3.1\n \tAbsolute URL: https://tools.ietf.org/html/rfc3986#section-4.3\n */\n\tif (/^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.exec(urlStr)) {\n\t\turlStr = new URL(urlStr).toString();\n\t}\n\n\t// Fallback to old implementation for arbitrary URLs\n\treturn parse_url(urlStr);\n}\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parseURL(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parseURL(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parseURL(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\nconst URL$1 = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\n\nconst isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {\n\tconst orig = new URL$1(original).hostname;\n\tconst dest = new URL$1(destination).hostname;\n\n\treturn orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);\n};\n\n/**\n * isSameProtocol reports whether the two provided URLs use the same protocol.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nconst isSameProtocol = function isSameProtocol(destination, original) {\n\tconst orig = new URL$1(original).protocol;\n\tconst dest = new URL$1(destination).protocol;\n\n\treturn orig === dest;\n};\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\tdestroyStream(request.body, error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\n\t\t\tfinalize();\n\t\t});\n\n\t\tfixResponseChunkedTransferBadEnding(req, function (err) {\n\t\t\tif (signal && signal.aborted) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\t\t});\n\n\t\t/* c8 ignore next 18 */\n\t\tif (parseInt(process.version.substring(1)) < 14) {\n\t\t\t// Before Node.js 14, pipeline() does not fully support async iterators and does not always\n\t\t\t// properly handle when the socket close/end events are out of order.\n\t\t\treq.on('socket', function (s) {\n\t\t\t\ts.addListener('close', function (hadError) {\n\t\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\t\tconst hasDataListener = s.listenerCount('data') > 0;\n\n\t\t\t\t\t// if end happened before close but the socket didn't emit an error, do it now\n\t\t\t\t\tif (response && hasDataListener && !hadError && !(signal && signal.aborted)) {\n\t\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\t\tresponse.body.emit('error', err);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t}\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL$1(location, request.url).toString();\n\t\t\t\t} catch (err) {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOpts.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\traw.on('end', function () {\n\t\t\t\t\t// some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.\n\t\t\t\t\tif (!response) {\n\t\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\t\tresolve(response);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\nfunction fixResponseChunkedTransferBadEnding(request, errorCallback) {\n\tlet socket;\n\n\trequest.on('socket', function (s) {\n\t\tsocket = s;\n\t});\n\n\trequest.on('response', function (response) {\n\t\tconst headers = response.headers;\n\n\t\tif (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {\n\t\t\tresponse.once('close', function (hadError) {\n\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\tconst hasDataListener = socket.listenerCount('data') > 0;\n\n\t\t\t\tif (hasDataListener && !hadError) {\n\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\terrorCallback(err);\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n}\n\nfunction destroyStream(stream, err) {\n\tif (stream.destroy) {\n\t\tstream.destroy(err);\n\t} else {\n\t\t// node < 8\n\t\tstream.emit('error', err);\n\t\tstream.end();\n\t}\n}\n\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","/**\n * Advanced Encryption Standard (AES) implementation.\n *\n * This implementation is based on the public domain library 'jscrypto' which\n * was written by:\n *\n * Emily Stark (estark@stanford.edu)\n * Mike Hamburg (mhamburg@stanford.edu)\n * Dan Boneh (dabo@cs.stanford.edu)\n *\n * Parts of this code are based on the OpenSSL implementation of AES:\n * http://www.openssl.org\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./cipher');\nrequire('./cipherModes');\nrequire('./util');\n\n/* AES API */\nmodule.exports = forge.aes = forge.aes || {};\n\n/**\n * Deprecated. Instead, use:\n *\n * var cipher = forge.cipher.createCipher('AES-', key);\n * cipher.start({iv: iv});\n *\n * Creates an AES cipher object to encrypt data using the given symmetric key.\n * The output will be stored in the 'output' member of the returned cipher.\n *\n * The key and iv may be given as a string of bytes, an array of bytes,\n * a byte buffer, or an array of 32-bit words.\n *\n * @param key the symmetric key to use.\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.aes.startEncrypting = function(key, iv, output, mode) {\n var cipher = _createCipher({\n key: key,\n output: output,\n decrypt: false,\n mode: mode\n });\n cipher.start(iv);\n return cipher;\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var cipher = forge.cipher.createCipher('AES-', key);\n *\n * Creates an AES cipher object to encrypt data using the given symmetric key.\n *\n * The key may be given as a string of bytes, an array of bytes, a\n * byte buffer, or an array of 32-bit words.\n *\n * @param key the symmetric key to use.\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.aes.createEncryptionCipher = function(key, mode) {\n return _createCipher({\n key: key,\n output: null,\n decrypt: false,\n mode: mode\n });\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var decipher = forge.cipher.createDecipher('AES-', key);\n * decipher.start({iv: iv});\n *\n * Creates an AES cipher object to decrypt data using the given symmetric key.\n * The output will be stored in the 'output' member of the returned cipher.\n *\n * The key and iv may be given as a string of bytes, an array of bytes,\n * a byte buffer, or an array of 32-bit words.\n *\n * @param key the symmetric key to use.\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.aes.startDecrypting = function(key, iv, output, mode) {\n var cipher = _createCipher({\n key: key,\n output: output,\n decrypt: true,\n mode: mode\n });\n cipher.start(iv);\n return cipher;\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var decipher = forge.cipher.createDecipher('AES-', key);\n *\n * Creates an AES cipher object to decrypt data using the given symmetric key.\n *\n * The key may be given as a string of bytes, an array of bytes, a\n * byte buffer, or an array of 32-bit words.\n *\n * @param key the symmetric key to use.\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.aes.createDecryptionCipher = function(key, mode) {\n return _createCipher({\n key: key,\n output: null,\n decrypt: true,\n mode: mode\n });\n};\n\n/**\n * Creates a new AES cipher algorithm object.\n *\n * @param name the name of the algorithm.\n * @param mode the mode factory function.\n *\n * @return the AES algorithm object.\n */\nforge.aes.Algorithm = function(name, mode) {\n if(!init) {\n initialize();\n }\n var self = this;\n self.name = name;\n self.mode = new mode({\n blockSize: 16,\n cipher: {\n encrypt: function(inBlock, outBlock) {\n return _updateBlock(self._w, inBlock, outBlock, false);\n },\n decrypt: function(inBlock, outBlock) {\n return _updateBlock(self._w, inBlock, outBlock, true);\n }\n }\n });\n self._init = false;\n};\n\n/**\n * Initializes this AES algorithm by expanding its key.\n *\n * @param options the options to use.\n * key the key to use with this algorithm.\n * decrypt true if the algorithm should be initialized for decryption,\n * false for encryption.\n */\nforge.aes.Algorithm.prototype.initialize = function(options) {\n if(this._init) {\n return;\n }\n\n var key = options.key;\n var tmp;\n\n /* Note: The key may be a string of bytes, an array of bytes, a byte\n buffer, or an array of 32-bit integers. If the key is in bytes, then\n it must be 16, 24, or 32 bytes in length. If it is in 32-bit\n integers, it must be 4, 6, or 8 integers long. */\n\n if(typeof key === 'string' &&\n (key.length === 16 || key.length === 24 || key.length === 32)) {\n // convert key string into byte buffer\n key = forge.util.createBuffer(key);\n } else if(forge.util.isArray(key) &&\n (key.length === 16 || key.length === 24 || key.length === 32)) {\n // convert key integer array into byte buffer\n tmp = key;\n key = forge.util.createBuffer();\n for(var i = 0; i < tmp.length; ++i) {\n key.putByte(tmp[i]);\n }\n }\n\n // convert key byte buffer into 32-bit integer array\n if(!forge.util.isArray(key)) {\n tmp = key;\n key = [];\n\n // key lengths of 16, 24, 32 bytes allowed\n var len = tmp.length();\n if(len === 16 || len === 24 || len === 32) {\n len = len >>> 2;\n for(var i = 0; i < len; ++i) {\n key.push(tmp.getInt32());\n }\n }\n }\n\n // key must be an array of 32-bit integers by now\n if(!forge.util.isArray(key) ||\n !(key.length === 4 || key.length === 6 || key.length === 8)) {\n throw new Error('Invalid key parameter.');\n }\n\n // encryption operation is always used for these modes\n var mode = this.mode.name;\n var encryptOp = (['CFB', 'OFB', 'CTR', 'GCM'].indexOf(mode) !== -1);\n\n // do key expansion\n this._w = _expandKey(key, options.decrypt && !encryptOp);\n this._init = true;\n};\n\n/**\n * Expands a key. Typically only used for testing.\n *\n * @param key the symmetric key to expand, as an array of 32-bit words.\n * @param decrypt true to expand for decryption, false for encryption.\n *\n * @return the expanded key.\n */\nforge.aes._expandKey = function(key, decrypt) {\n if(!init) {\n initialize();\n }\n return _expandKey(key, decrypt);\n};\n\n/**\n * Updates a single block. Typically only used for testing.\n *\n * @param w the expanded key to use.\n * @param input an array of block-size 32-bit words.\n * @param output an array of block-size 32-bit words.\n * @param decrypt true to decrypt, false to encrypt.\n */\nforge.aes._updateBlock = _updateBlock;\n\n/** Register AES algorithms **/\n\nregisterAlgorithm('AES-ECB', forge.cipher.modes.ecb);\nregisterAlgorithm('AES-CBC', forge.cipher.modes.cbc);\nregisterAlgorithm('AES-CFB', forge.cipher.modes.cfb);\nregisterAlgorithm('AES-OFB', forge.cipher.modes.ofb);\nregisterAlgorithm('AES-CTR', forge.cipher.modes.ctr);\nregisterAlgorithm('AES-GCM', forge.cipher.modes.gcm);\n\nfunction registerAlgorithm(name, mode) {\n var factory = function() {\n return new forge.aes.Algorithm(name, mode);\n };\n forge.cipher.registerAlgorithm(name, factory);\n}\n\n/** AES implementation **/\n\nvar init = false; // not yet initialized\nvar Nb = 4; // number of words comprising the state (AES = 4)\nvar sbox; // non-linear substitution table used in key expansion\nvar isbox; // inversion of sbox\nvar rcon; // round constant word array\nvar mix; // mix-columns table\nvar imix; // inverse mix-columns table\n\n/**\n * Performs initialization, ie: precomputes tables to optimize for speed.\n *\n * One way to understand how AES works is to imagine that 'addition' and\n * 'multiplication' are interfaces that require certain mathematical\n * properties to hold true (ie: they are associative) but they might have\n * different implementations and produce different kinds of results ...\n * provided that their mathematical properties remain true. AES defines\n * its own methods of addition and multiplication but keeps some important\n * properties the same, ie: associativity and distributivity. The\n * explanation below tries to shed some light on how AES defines addition\n * and multiplication of bytes and 32-bit words in order to perform its\n * encryption and decryption algorithms.\n *\n * The basics:\n *\n * The AES algorithm views bytes as binary representations of polynomials\n * that have either 1 or 0 as the coefficients. It defines the addition\n * or subtraction of two bytes as the XOR operation. It also defines the\n * multiplication of two bytes as a finite field referred to as GF(2^8)\n * (Note: 'GF' means \"Galois Field\" which is a field that contains a finite\n * number of elements so GF(2^8) has 256 elements).\n *\n * This means that any two bytes can be represented as binary polynomials;\n * when they multiplied together and modularly reduced by an irreducible\n * polynomial of the 8th degree, the results are the field GF(2^8). The\n * specific irreducible polynomial that AES uses in hexadecimal is 0x11b.\n * This multiplication is associative with 0x01 as the identity:\n *\n * (b * 0x01 = GF(b, 0x01) = b).\n *\n * The operation GF(b, 0x02) can be performed at the byte level by left\n * shifting b once and then XOR'ing it (to perform the modular reduction)\n * with 0x11b if b is >= 128. Repeated application of the multiplication\n * of 0x02 can be used to implement the multiplication of any two bytes.\n *\n * For instance, multiplying 0x57 and 0x13, denoted as GF(0x57, 0x13), can\n * be performed by factoring 0x13 into 0x01, 0x02, and 0x10. Then these\n * factors can each be multiplied by 0x57 and then added together. To do\n * the multiplication, values for 0x57 multiplied by each of these 3 factors\n * can be precomputed and stored in a table. To add them, the values from\n * the table are XOR'd together.\n *\n * AES also defines addition and multiplication of words, that is 4-byte\n * numbers represented as polynomials of 3 degrees where the coefficients\n * are the values of the bytes.\n *\n * The word [a0, a1, a2, a3] is a polynomial a3x^3 + a2x^2 + a1x + a0.\n *\n * Addition is performed by XOR'ing like powers of x. Multiplication\n * is performed in two steps, the first is an algebriac expansion as\n * you would do normally (where addition is XOR). But the result is\n * a polynomial larger than 3 degrees and thus it cannot fit in a word. So\n * next the result is modularly reduced by an AES-specific polynomial of\n * degree 4 which will always produce a polynomial of less than 4 degrees\n * such that it will fit in a word. In AES, this polynomial is x^4 + 1.\n *\n * The modular product of two polynomials 'a' and 'b' is thus:\n *\n * d(x) = d3x^3 + d2x^2 + d1x + d0\n * with\n * d0 = GF(a0, b0) ^ GF(a3, b1) ^ GF(a2, b2) ^ GF(a1, b3)\n * d1 = GF(a1, b0) ^ GF(a0, b1) ^ GF(a3, b2) ^ GF(a2, b3)\n * d2 = GF(a2, b0) ^ GF(a1, b1) ^ GF(a0, b2) ^ GF(a3, b3)\n * d3 = GF(a3, b0) ^ GF(a2, b1) ^ GF(a1, b2) ^ GF(a0, b3)\n *\n * As a matrix:\n *\n * [d0] = [a0 a3 a2 a1][b0]\n * [d1] [a1 a0 a3 a2][b1]\n * [d2] [a2 a1 a0 a3][b2]\n * [d3] [a3 a2 a1 a0][b3]\n *\n * Special polynomials defined by AES (0x02 == {02}):\n * a(x) = {03}x^3 + {01}x^2 + {01}x + {02}\n * a^-1(x) = {0b}x^3 + {0d}x^2 + {09}x + {0e}.\n *\n * These polynomials are used in the MixColumns() and InverseMixColumns()\n * operations, respectively, to cause each element in the state to affect\n * the output (referred to as diffusing).\n *\n * RotWord() uses: a0 = a1 = a2 = {00} and a3 = {01}, which is the\n * polynomial x3.\n *\n * The ShiftRows() method modifies the last 3 rows in the state (where\n * the state is 4 words with 4 bytes per word) by shifting bytes cyclically.\n * The 1st byte in the second row is moved to the end of the row. The 1st\n * and 2nd bytes in the third row are moved to the end of the row. The 1st,\n * 2nd, and 3rd bytes are moved in the fourth row.\n *\n * More details on how AES arithmetic works:\n *\n * In the polynomial representation of binary numbers, XOR performs addition\n * and subtraction and multiplication in GF(2^8) denoted as GF(a, b)\n * corresponds with the multiplication of polynomials modulo an irreducible\n * polynomial of degree 8. In other words, for AES, GF(a, b) will multiply\n * polynomial 'a' with polynomial 'b' and then do a modular reduction by\n * an AES-specific irreducible polynomial of degree 8.\n *\n * A polynomial is irreducible if its only divisors are one and itself. For\n * the AES algorithm, this irreducible polynomial is:\n *\n * m(x) = x^8 + x^4 + x^3 + x + 1,\n *\n * or {01}{1b} in hexadecimal notation, where each coefficient is a bit:\n * 100011011 = 283 = 0x11b.\n *\n * For example, GF(0x57, 0x83) = 0xc1 because\n *\n * 0x57 = 87 = 01010111 = x^6 + x^4 + x^2 + x + 1\n * 0x85 = 131 = 10000101 = x^7 + x + 1\n *\n * (x^6 + x^4 + x^2 + x + 1) * (x^7 + x + 1)\n * = x^13 + x^11 + x^9 + x^8 + x^7 +\n * x^7 + x^5 + x^3 + x^2 + x +\n * x^6 + x^4 + x^2 + x + 1\n * = x^13 + x^11 + x^9 + x^8 + x^6 + x^5 + x^4 + x^3 + 1 = y\n * y modulo (x^8 + x^4 + x^3 + x + 1)\n * = x^7 + x^6 + 1.\n *\n * The modular reduction by m(x) guarantees the result will be a binary\n * polynomial of less than degree 8, so that it can fit in a byte.\n *\n * The operation to multiply a binary polynomial b with x (the polynomial\n * x in binary representation is 00000010) is:\n *\n * b_7x^8 + b_6x^7 + b_5x^6 + b_4x^5 + b_3x^4 + b_2x^3 + b_1x^2 + b_0x^1\n *\n * To get GF(b, x) we must reduce that by m(x). If b_7 is 0 (that is the\n * most significant bit is 0 in b) then the result is already reduced. If\n * it is 1, then we can reduce it by subtracting m(x) via an XOR.\n *\n * It follows that multiplication by x (00000010 or 0x02) can be implemented\n * by performing a left shift followed by a conditional bitwise XOR with\n * 0x1b. This operation on bytes is denoted by xtime(). Multiplication by\n * higher powers of x can be implemented by repeated application of xtime().\n *\n * By adding intermediate results, multiplication by any constant can be\n * implemented. For instance:\n *\n * GF(0x57, 0x13) = 0xfe because:\n *\n * xtime(b) = (b & 128) ? (b << 1 ^ 0x11b) : (b << 1)\n *\n * Note: We XOR with 0x11b instead of 0x1b because in javascript our\n * datatype for b can be larger than 1 byte, so a left shift will not\n * automatically eliminate bits that overflow a byte ... by XOR'ing the\n * overflow bit with 1 (the extra one from 0x11b) we zero it out.\n *\n * GF(0x57, 0x02) = xtime(0x57) = 0xae\n * GF(0x57, 0x04) = xtime(0xae) = 0x47\n * GF(0x57, 0x08) = xtime(0x47) = 0x8e\n * GF(0x57, 0x10) = xtime(0x8e) = 0x07\n *\n * GF(0x57, 0x13) = GF(0x57, (0x01 ^ 0x02 ^ 0x10))\n *\n * And by the distributive property (since XOR is addition and GF() is\n * multiplication):\n *\n * = GF(0x57, 0x01) ^ GF(0x57, 0x02) ^ GF(0x57, 0x10)\n * = 0x57 ^ 0xae ^ 0x07\n * = 0xfe.\n */\nfunction initialize() {\n init = true;\n\n /* Populate the Rcon table. These are the values given by\n [x^(i-1),{00},{00},{00}] where x^(i-1) are powers of x (and x = 0x02)\n in the field of GF(2^8), where i starts at 1.\n\n rcon[0] = [0x00, 0x00, 0x00, 0x00]\n rcon[1] = [0x01, 0x00, 0x00, 0x00] 2^(1-1) = 2^0 = 1\n rcon[2] = [0x02, 0x00, 0x00, 0x00] 2^(2-1) = 2^1 = 2\n ...\n rcon[9] = [0x1B, 0x00, 0x00, 0x00] 2^(9-1) = 2^8 = 0x1B\n rcon[10] = [0x36, 0x00, 0x00, 0x00] 2^(10-1) = 2^9 = 0x36\n\n We only store the first byte because it is the only one used.\n */\n rcon = [0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1B, 0x36];\n\n // compute xtime table which maps i onto GF(i, 0x02)\n var xtime = new Array(256);\n for(var i = 0; i < 128; ++i) {\n xtime[i] = i << 1;\n xtime[i + 128] = (i + 128) << 1 ^ 0x11B;\n }\n\n // compute all other tables\n sbox = new Array(256);\n isbox = new Array(256);\n mix = new Array(4);\n imix = new Array(4);\n for(var i = 0; i < 4; ++i) {\n mix[i] = new Array(256);\n imix[i] = new Array(256);\n }\n var e = 0, ei = 0, e2, e4, e8, sx, sx2, me, ime;\n for(var i = 0; i < 256; ++i) {\n /* We need to generate the SubBytes() sbox and isbox tables so that\n we can perform byte substitutions. This requires us to traverse\n all of the elements in GF, find their multiplicative inverses,\n and apply to each the following affine transformation:\n\n bi' = bi ^ b(i + 4) mod 8 ^ b(i + 5) mod 8 ^ b(i + 6) mod 8 ^\n b(i + 7) mod 8 ^ ci\n for 0 <= i < 8, where bi is the ith bit of the byte, and ci is the\n ith bit of a byte c with the value {63} or {01100011}.\n\n It is possible to traverse every possible value in a Galois field\n using what is referred to as a 'generator'. There are many\n generators (128 out of 256): 3,5,6,9,11,82 to name a few. To fully\n traverse GF we iterate 255 times, multiplying by our generator\n each time.\n\n On each iteration we can determine the multiplicative inverse for\n the current element.\n\n Suppose there is an element in GF 'e'. For a given generator 'g',\n e = g^x. The multiplicative inverse of e is g^(255 - x). It turns\n out that if use the inverse of a generator as another generator\n it will produce all of the corresponding multiplicative inverses\n at the same time. For this reason, we choose 5 as our inverse\n generator because it only requires 2 multiplies and 1 add and its\n inverse, 82, requires relatively few operations as well.\n\n In order to apply the affine transformation, the multiplicative\n inverse 'ei' of 'e' can be repeatedly XOR'd (4 times) with a\n bit-cycling of 'ei'. To do this 'ei' is first stored in 's' and\n 'x'. Then 's' is left shifted and the high bit of 's' is made the\n low bit. The resulting value is stored in 's'. Then 'x' is XOR'd\n with 's' and stored in 'x'. On each subsequent iteration the same\n operation is performed. When 4 iterations are complete, 'x' is\n XOR'd with 'c' (0x63) and the transformed value is stored in 'x'.\n For example:\n\n s = 01000001\n x = 01000001\n\n iteration 1: s = 10000010, x ^= s\n iteration 2: s = 00000101, x ^= s\n iteration 3: s = 00001010, x ^= s\n iteration 4: s = 00010100, x ^= s\n x ^= 0x63\n\n This can be done with a loop where s = (s << 1) | (s >> 7). However,\n it can also be done by using a single 16-bit (in this case 32-bit)\n number 'sx'. Since XOR is an associative operation, we can set 'sx'\n to 'ei' and then XOR it with 'sx' left-shifted 1,2,3, and 4 times.\n The most significant bits will flow into the high 8 bit positions\n and be correctly XOR'd with one another. All that remains will be\n to cycle the high 8 bits by XOR'ing them all with the lower 8 bits\n afterwards.\n\n At the same time we're populating sbox and isbox we can precompute\n the multiplication we'll need to do to do MixColumns() later.\n */\n\n // apply affine transformation\n sx = ei ^ (ei << 1) ^ (ei << 2) ^ (ei << 3) ^ (ei << 4);\n sx = (sx >> 8) ^ (sx & 255) ^ 0x63;\n\n // update tables\n sbox[e] = sx;\n isbox[sx] = e;\n\n /* Mixing columns is done using matrix multiplication. The columns\n that are to be mixed are each a single word in the current state.\n The state has Nb columns (4 columns). Therefore each column is a\n 4 byte word. So to mix the columns in a single column 'c' where\n its rows are r0, r1, r2, and r3, we use the following matrix\n multiplication:\n\n [2 3 1 1]*[r0,c]=[r'0,c]\n [1 2 3 1] [r1,c] [r'1,c]\n [1 1 2 3] [r2,c] [r'2,c]\n [3 1 1 2] [r3,c] [r'3,c]\n\n r0, r1, r2, and r3 are each 1 byte of one of the words in the\n state (a column). To do matrix multiplication for each mixed\n column c' we multiply the corresponding row from the left matrix\n with the corresponding column from the right matrix. In total, we\n get 4 equations:\n\n r0,c' = 2*r0,c + 3*r1,c + 1*r2,c + 1*r3,c\n r1,c' = 1*r0,c + 2*r1,c + 3*r2,c + 1*r3,c\n r2,c' = 1*r0,c + 1*r1,c + 2*r2,c + 3*r3,c\n r3,c' = 3*r0,c + 1*r1,c + 1*r2,c + 2*r3,c\n\n As usual, the multiplication is as previously defined and the\n addition is XOR. In order to optimize mixing columns we can store\n the multiplication results in tables. If you think of the whole\n column as a word (it might help to visualize by mentally rotating\n the equations above by counterclockwise 90 degrees) then you can\n see that it would be useful to map the multiplications performed on\n each byte (r0, r1, r2, r3) onto a word as well. For instance, we\n could map 2*r0,1*r0,1*r0,3*r0 onto a word by storing 2*r0 in the\n highest 8 bits and 3*r0 in the lowest 8 bits (with the other two\n respectively in the middle). This means that a table can be\n constructed that uses r0 as an index to the word. We can do the\n same with r1, r2, and r3, creating a total of 4 tables.\n\n To construct a full c', we can just look up each byte of c in\n their respective tables and XOR the results together.\n\n Also, to build each table we only have to calculate the word\n for 2,1,1,3 for every byte ... which we can do on each iteration\n of this loop since we will iterate over every byte. After we have\n calculated 2,1,1,3 we can get the results for the other tables\n by cycling the byte at the end to the beginning. For instance\n we can take the result of table 2,1,1,3 and produce table 3,2,1,1\n by moving the right most byte to the left most position just like\n how you can imagine the 3 moved out of 2,1,1,3 and to the front\n to produce 3,2,1,1.\n\n There is another optimization in that the same multiples of\n the current element we need in order to advance our generator\n to the next iteration can be reused in performing the 2,1,1,3\n calculation. We also calculate the inverse mix column tables,\n with e,9,d,b being the inverse of 2,1,1,3.\n\n When we're done, and we need to actually mix columns, the first\n byte of each state word should be put through mix[0] (2,1,1,3),\n the second through mix[1] (3,2,1,1) and so forth. Then they should\n be XOR'd together to produce the fully mixed column.\n */\n\n // calculate mix and imix table values\n sx2 = xtime[sx];\n e2 = xtime[e];\n e4 = xtime[e2];\n e8 = xtime[e4];\n me =\n (sx2 << 24) ^ // 2\n (sx << 16) ^ // 1\n (sx << 8) ^ // 1\n (sx ^ sx2); // 3\n ime =\n (e2 ^ e4 ^ e8) << 24 ^ // E (14)\n (e ^ e8) << 16 ^ // 9\n (e ^ e4 ^ e8) << 8 ^ // D (13)\n (e ^ e2 ^ e8); // B (11)\n // produce each of the mix tables by rotating the 2,1,1,3 value\n for(var n = 0; n < 4; ++n) {\n mix[n][e] = me;\n imix[n][sx] = ime;\n // cycle the right most byte to the left most position\n // ie: 2,1,1,3 becomes 3,2,1,1\n me = me << 24 | me >>> 8;\n ime = ime << 24 | ime >>> 8;\n }\n\n // get next element and inverse\n if(e === 0) {\n // 1 is the inverse of 1\n e = ei = 1;\n } else {\n // e = 2e + 2*2*2*(10e)) = multiply e by 82 (chosen generator)\n // ei = ei + 2*2*ei = multiply ei by 5 (inverse generator)\n e = e2 ^ xtime[xtime[xtime[e2 ^ e8]]];\n ei ^= xtime[xtime[ei]];\n }\n }\n}\n\n/**\n * Generates a key schedule using the AES key expansion algorithm.\n *\n * The AES algorithm takes the Cipher Key, K, and performs a Key Expansion\n * routine to generate a key schedule. The Key Expansion generates a total\n * of Nb*(Nr + 1) words: the algorithm requires an initial set of Nb words,\n * and each of the Nr rounds requires Nb words of key data. The resulting\n * key schedule consists of a linear array of 4-byte words, denoted [wi ],\n * with i in the range 0 <= i < Nb(Nr + 1).\n *\n * KeyExpansion(byte key[4*Nk], word w[Nb*(Nr+1)], Nk)\n * AES-128 (Nb=4, Nk=4, Nr=10)\n * AES-192 (Nb=4, Nk=6, Nr=12)\n * AES-256 (Nb=4, Nk=8, Nr=14)\n * Note: Nr=Nk+6.\n *\n * Nb is the number of columns (32-bit words) comprising the State (or\n * number of bytes in a block). For AES, Nb=4.\n *\n * @param key the key to schedule (as an array of 32-bit words).\n * @param decrypt true to modify the key schedule to decrypt, false not to.\n *\n * @return the generated key schedule.\n */\nfunction _expandKey(key, decrypt) {\n // copy the key's words to initialize the key schedule\n var w = key.slice(0);\n\n /* RotWord() will rotate a word, moving the first byte to the last\n byte's position (shifting the other bytes left).\n\n We will be getting the value of Rcon at i / Nk. 'i' will iterate\n from Nk to (Nb * Nr+1). Nk = 4 (4 byte key), Nb = 4 (4 words in\n a block), Nr = Nk + 6 (10). Therefore 'i' will iterate from\n 4 to 44 (exclusive). Each time we iterate 4 times, i / Nk will\n increase by 1. We use a counter iNk to keep track of this.\n */\n\n // go through the rounds expanding the key\n var temp, iNk = 1;\n var Nk = w.length;\n var Nr1 = Nk + 6 + 1;\n var end = Nb * Nr1;\n for(var i = Nk; i < end; ++i) {\n temp = w[i - 1];\n if(i % Nk === 0) {\n // temp = SubWord(RotWord(temp)) ^ Rcon[i / Nk]\n temp =\n sbox[temp >>> 16 & 255] << 24 ^\n sbox[temp >>> 8 & 255] << 16 ^\n sbox[temp & 255] << 8 ^\n sbox[temp >>> 24] ^ (rcon[iNk] << 24);\n iNk++;\n } else if(Nk > 6 && (i % Nk === 4)) {\n // temp = SubWord(temp)\n temp =\n sbox[temp >>> 24] << 24 ^\n sbox[temp >>> 16 & 255] << 16 ^\n sbox[temp >>> 8 & 255] << 8 ^\n sbox[temp & 255];\n }\n w[i] = w[i - Nk] ^ temp;\n }\n\n /* When we are updating a cipher block we always use the code path for\n encryption whether we are decrypting or not (to shorten code and\n simplify the generation of look up tables). However, because there\n are differences in the decryption algorithm, other than just swapping\n in different look up tables, we must transform our key schedule to\n account for these changes:\n\n 1. The decryption algorithm gets its key rounds in reverse order.\n 2. The decryption algorithm adds the round key before mixing columns\n instead of afterwards.\n\n We don't need to modify our key schedule to handle the first case,\n we can just traverse the key schedule in reverse order when decrypting.\n\n The second case requires a little work.\n\n The tables we built for performing rounds will take an input and then\n perform SubBytes() and MixColumns() or, for the decrypt version,\n InvSubBytes() and InvMixColumns(). But the decrypt algorithm requires\n us to AddRoundKey() before InvMixColumns(). This means we'll need to\n apply some transformations to the round key to inverse-mix its columns\n so they'll be correct for moving AddRoundKey() to after the state has\n had its columns inverse-mixed.\n\n To inverse-mix the columns of the state when we're decrypting we use a\n lookup table that will apply InvSubBytes() and InvMixColumns() at the\n same time. However, the round key's bytes are not inverse-substituted\n in the decryption algorithm. To get around this problem, we can first\n substitute the bytes in the round key so that when we apply the\n transformation via the InvSubBytes()+InvMixColumns() table, it will\n undo our substitution leaving us with the original value that we\n want -- and then inverse-mix that value.\n\n This change will correctly alter our key schedule so that we can XOR\n each round key with our already transformed decryption state. This\n allows us to use the same code path as the encryption algorithm.\n\n We make one more change to the decryption key. Since the decryption\n algorithm runs in reverse from the encryption algorithm, we reverse\n the order of the round keys to avoid having to iterate over the key\n schedule backwards when running the encryption algorithm later in\n decryption mode. In addition to reversing the order of the round keys,\n we also swap each round key's 2nd and 4th rows. See the comments\n section where rounds are performed for more details about why this is\n done. These changes are done inline with the other substitution\n described above.\n */\n if(decrypt) {\n var tmp;\n var m0 = imix[0];\n var m1 = imix[1];\n var m2 = imix[2];\n var m3 = imix[3];\n var wnew = w.slice(0);\n end = w.length;\n for(var i = 0, wi = end - Nb; i < end; i += Nb, wi -= Nb) {\n // do not sub the first or last round key (round keys are Nb\n // words) as no column mixing is performed before they are added,\n // but do change the key order\n if(i === 0 || i === (end - Nb)) {\n wnew[i] = w[wi];\n wnew[i + 1] = w[wi + 3];\n wnew[i + 2] = w[wi + 2];\n wnew[i + 3] = w[wi + 1];\n } else {\n // substitute each round key byte because the inverse-mix\n // table will inverse-substitute it (effectively cancel the\n // substitution because round key bytes aren't sub'd in\n // decryption mode) and swap indexes 3 and 1\n for(var n = 0; n < Nb; ++n) {\n tmp = w[wi + n];\n wnew[i + (3&-n)] =\n m0[sbox[tmp >>> 24]] ^\n m1[sbox[tmp >>> 16 & 255]] ^\n m2[sbox[tmp >>> 8 & 255]] ^\n m3[sbox[tmp & 255]];\n }\n }\n }\n w = wnew;\n }\n\n return w;\n}\n\n/**\n * Updates a single block (16 bytes) using AES. The update will either\n * encrypt or decrypt the block.\n *\n * @param w the key schedule.\n * @param input the input block (an array of 32-bit words).\n * @param output the updated output block.\n * @param decrypt true to decrypt the block, false to encrypt it.\n */\nfunction _updateBlock(w, input, output, decrypt) {\n /*\n Cipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)])\n begin\n byte state[4,Nb]\n state = in\n AddRoundKey(state, w[0, Nb-1])\n for round = 1 step 1 to Nr-1\n SubBytes(state)\n ShiftRows(state)\n MixColumns(state)\n AddRoundKey(state, w[round*Nb, (round+1)*Nb-1])\n end for\n SubBytes(state)\n ShiftRows(state)\n AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])\n out = state\n end\n\n InvCipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)])\n begin\n byte state[4,Nb]\n state = in\n AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])\n for round = Nr-1 step -1 downto 1\n InvShiftRows(state)\n InvSubBytes(state)\n AddRoundKey(state, w[round*Nb, (round+1)*Nb-1])\n InvMixColumns(state)\n end for\n InvShiftRows(state)\n InvSubBytes(state)\n AddRoundKey(state, w[0, Nb-1])\n out = state\n end\n */\n\n // Encrypt: AddRoundKey(state, w[0, Nb-1])\n // Decrypt: AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])\n var Nr = w.length / 4 - 1;\n var m0, m1, m2, m3, sub;\n if(decrypt) {\n m0 = imix[0];\n m1 = imix[1];\n m2 = imix[2];\n m3 = imix[3];\n sub = isbox;\n } else {\n m0 = mix[0];\n m1 = mix[1];\n m2 = mix[2];\n m3 = mix[3];\n sub = sbox;\n }\n var a, b, c, d, a2, b2, c2;\n a = input[0] ^ w[0];\n b = input[decrypt ? 3 : 1] ^ w[1];\n c = input[2] ^ w[2];\n d = input[decrypt ? 1 : 3] ^ w[3];\n var i = 3;\n\n /* In order to share code we follow the encryption algorithm when both\n encrypting and decrypting. To account for the changes required in the\n decryption algorithm, we use different lookup tables when decrypting\n and use a modified key schedule to account for the difference in the\n order of transformations applied when performing rounds. We also get\n key rounds in reverse order (relative to encryption). */\n for(var round = 1; round < Nr; ++round) {\n /* As described above, we'll be using table lookups to perform the\n column mixing. Each column is stored as a word in the state (the\n array 'input' has one column as a word at each index). In order to\n mix a column, we perform these transformations on each row in c,\n which is 1 byte in each word. The new column for c0 is c'0:\n\n m0 m1 m2 m3\n r0,c'0 = 2*r0,c0 + 3*r1,c0 + 1*r2,c0 + 1*r3,c0\n r1,c'0 = 1*r0,c0 + 2*r1,c0 + 3*r2,c0 + 1*r3,c0\n r2,c'0 = 1*r0,c0 + 1*r1,c0 + 2*r2,c0 + 3*r3,c0\n r3,c'0 = 3*r0,c0 + 1*r1,c0 + 1*r2,c0 + 2*r3,c0\n\n So using mix tables where c0 is a word with r0 being its upper\n 8 bits and r3 being its lower 8 bits:\n\n m0[c0 >> 24] will yield this word: [2*r0,1*r0,1*r0,3*r0]\n ...\n m3[c0 & 255] will yield this word: [1*r3,1*r3,3*r3,2*r3]\n\n Therefore to mix the columns in each word in the state we\n do the following (& 255 omitted for brevity):\n c'0,r0 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]\n c'0,r1 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]\n c'0,r2 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]\n c'0,r3 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]\n\n However, before mixing, the algorithm requires us to perform\n ShiftRows(). The ShiftRows() transformation cyclically shifts the\n last 3 rows of the state over different offsets. The first row\n (r = 0) is not shifted.\n\n s'_r,c = s_r,(c + shift(r, Nb) mod Nb\n for 0 < r < 4 and 0 <= c < Nb and\n shift(1, 4) = 1\n shift(2, 4) = 2\n shift(3, 4) = 3.\n\n This causes the first byte in r = 1 to be moved to the end of\n the row, the first 2 bytes in r = 2 to be moved to the end of\n the row, the first 3 bytes in r = 3 to be moved to the end of\n the row:\n\n r1: [c0 c1 c2 c3] => [c1 c2 c3 c0]\n r2: [c0 c1 c2 c3] [c2 c3 c0 c1]\n r3: [c0 c1 c2 c3] [c3 c0 c1 c2]\n\n We can make these substitutions inline with our column mixing to\n generate an updated set of equations to produce each word in the\n state (note the columns have changed positions):\n\n c0 c1 c2 c3 => c0 c1 c2 c3\n c0 c1 c2 c3 c1 c2 c3 c0 (cycled 1 byte)\n c0 c1 c2 c3 c2 c3 c0 c1 (cycled 2 bytes)\n c0 c1 c2 c3 c3 c0 c1 c2 (cycled 3 bytes)\n\n Therefore:\n\n c'0 = 2*r0,c0 + 3*r1,c1 + 1*r2,c2 + 1*r3,c3\n c'0 = 1*r0,c0 + 2*r1,c1 + 3*r2,c2 + 1*r3,c3\n c'0 = 1*r0,c0 + 1*r1,c1 + 2*r2,c2 + 3*r3,c3\n c'0 = 3*r0,c0 + 1*r1,c1 + 1*r2,c2 + 2*r3,c3\n\n c'1 = 2*r0,c1 + 3*r1,c2 + 1*r2,c3 + 1*r3,c0\n c'1 = 1*r0,c1 + 2*r1,c2 + 3*r2,c3 + 1*r3,c0\n c'1 = 1*r0,c1 + 1*r1,c2 + 2*r2,c3 + 3*r3,c0\n c'1 = 3*r0,c1 + 1*r1,c2 + 1*r2,c3 + 2*r3,c0\n\n ... and so forth for c'2 and c'3. The important distinction is\n that the columns are cycling, with c0 being used with the m0\n map when calculating c0, but c1 being used with the m0 map when\n calculating c1 ... and so forth.\n\n When performing the inverse we transform the mirror image and\n skip the bottom row, instead of the top one, and move upwards:\n\n c3 c2 c1 c0 => c0 c3 c2 c1 (cycled 3 bytes) *same as encryption\n c3 c2 c1 c0 c1 c0 c3 c2 (cycled 2 bytes)\n c3 c2 c1 c0 c2 c1 c0 c3 (cycled 1 byte) *same as encryption\n c3 c2 c1 c0 c3 c2 c1 c0\n\n If you compare the resulting matrices for ShiftRows()+MixColumns()\n and for InvShiftRows()+InvMixColumns() the 2nd and 4th columns are\n different (in encrypt mode vs. decrypt mode). So in order to use\n the same code to handle both encryption and decryption, we will\n need to do some mapping.\n\n If in encryption mode we let a=c0, b=c1, c=c2, d=c3, and r be\n a row number in the state, then the resulting matrix in encryption\n mode for applying the above transformations would be:\n\n r1: a b c d\n r2: b c d a\n r3: c d a b\n r4: d a b c\n\n If we did the same in decryption mode we would get:\n\n r1: a d c b\n r2: b a d c\n r3: c b a d\n r4: d c b a\n\n If instead we swap d and b (set b=c3 and d=c1), then we get:\n\n r1: a b c d\n r2: d a b c\n r3: c d a b\n r4: b c d a\n\n Now the 1st and 3rd rows are the same as the encryption matrix. All\n we need to do then to make the mapping exactly the same is to swap\n the 2nd and 4th rows when in decryption mode. To do this without\n having to do it on each iteration, we swapped the 2nd and 4th rows\n in the decryption key schedule. We also have to do the swap above\n when we first pull in the input and when we set the final output. */\n a2 =\n m0[a >>> 24] ^\n m1[b >>> 16 & 255] ^\n m2[c >>> 8 & 255] ^\n m3[d & 255] ^ w[++i];\n b2 =\n m0[b >>> 24] ^\n m1[c >>> 16 & 255] ^\n m2[d >>> 8 & 255] ^\n m3[a & 255] ^ w[++i];\n c2 =\n m0[c >>> 24] ^\n m1[d >>> 16 & 255] ^\n m2[a >>> 8 & 255] ^\n m3[b & 255] ^ w[++i];\n d =\n m0[d >>> 24] ^\n m1[a >>> 16 & 255] ^\n m2[b >>> 8 & 255] ^\n m3[c & 255] ^ w[++i];\n a = a2;\n b = b2;\n c = c2;\n }\n\n /*\n Encrypt:\n SubBytes(state)\n ShiftRows(state)\n AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])\n\n Decrypt:\n InvShiftRows(state)\n InvSubBytes(state)\n AddRoundKey(state, w[0, Nb-1])\n */\n // Note: rows are shifted inline\n output[0] =\n (sub[a >>> 24] << 24) ^\n (sub[b >>> 16 & 255] << 16) ^\n (sub[c >>> 8 & 255] << 8) ^\n (sub[d & 255]) ^ w[++i];\n output[decrypt ? 3 : 1] =\n (sub[b >>> 24] << 24) ^\n (sub[c >>> 16 & 255] << 16) ^\n (sub[d >>> 8 & 255] << 8) ^\n (sub[a & 255]) ^ w[++i];\n output[2] =\n (sub[c >>> 24] << 24) ^\n (sub[d >>> 16 & 255] << 16) ^\n (sub[a >>> 8 & 255] << 8) ^\n (sub[b & 255]) ^ w[++i];\n output[decrypt ? 1 : 3] =\n (sub[d >>> 24] << 24) ^\n (sub[a >>> 16 & 255] << 16) ^\n (sub[b >>> 8 & 255] << 8) ^\n (sub[c & 255]) ^ w[++i];\n}\n\n/**\n * Deprecated. Instead, use:\n *\n * forge.cipher.createCipher('AES-', key);\n * forge.cipher.createDecipher('AES-', key);\n *\n * Creates a deprecated AES cipher object. This object's mode will default to\n * CBC (cipher-block-chaining).\n *\n * The key and iv may be given as a string of bytes, an array of bytes, a\n * byte buffer, or an array of 32-bit words.\n *\n * @param options the options to use.\n * key the symmetric key to use.\n * output the buffer to write to.\n * decrypt true for decryption, false for encryption.\n * mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nfunction _createCipher(options) {\n options = options || {};\n var mode = (options.mode || 'CBC').toUpperCase();\n var algorithm = 'AES-' + mode;\n\n var cipher;\n if(options.decrypt) {\n cipher = forge.cipher.createDecipher(algorithm, options.key);\n } else {\n cipher = forge.cipher.createCipher(algorithm, options.key);\n }\n\n // backwards compatible start API\n var start = cipher.start;\n cipher.start = function(iv, options) {\n // backwards compatibility: support second arg as output buffer\n var output = null;\n if(options instanceof forge.util.ByteBuffer) {\n output = options;\n options = {};\n }\n options = options || {};\n options.output = output;\n options.iv = iv;\n start.call(cipher, options);\n };\n\n return cipher;\n}\n","/**\n * A Javascript implementation of AES Cipher Suites for TLS.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2009-2015 Digital Bazaar, Inc.\n *\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./tls');\n\nvar tls = module.exports = forge.tls;\n\n/**\n * Supported cipher suites.\n */\ntls.CipherSuites['TLS_RSA_WITH_AES_128_CBC_SHA'] = {\n id: [0x00, 0x2f],\n name: 'TLS_RSA_WITH_AES_128_CBC_SHA',\n initSecurityParameters: function(sp) {\n sp.bulk_cipher_algorithm = tls.BulkCipherAlgorithm.aes;\n sp.cipher_type = tls.CipherType.block;\n sp.enc_key_length = 16;\n sp.block_length = 16;\n sp.fixed_iv_length = 16;\n sp.record_iv_length = 16;\n sp.mac_algorithm = tls.MACAlgorithm.hmac_sha1;\n sp.mac_length = 20;\n sp.mac_key_length = 20;\n },\n initConnectionState: initConnectionState\n};\ntls.CipherSuites['TLS_RSA_WITH_AES_256_CBC_SHA'] = {\n id: [0x00, 0x35],\n name: 'TLS_RSA_WITH_AES_256_CBC_SHA',\n initSecurityParameters: function(sp) {\n sp.bulk_cipher_algorithm = tls.BulkCipherAlgorithm.aes;\n sp.cipher_type = tls.CipherType.block;\n sp.enc_key_length = 32;\n sp.block_length = 16;\n sp.fixed_iv_length = 16;\n sp.record_iv_length = 16;\n sp.mac_algorithm = tls.MACAlgorithm.hmac_sha1;\n sp.mac_length = 20;\n sp.mac_key_length = 20;\n },\n initConnectionState: initConnectionState\n};\n\nfunction initConnectionState(state, c, sp) {\n var client = (c.entity === forge.tls.ConnectionEnd.client);\n\n // cipher setup\n state.read.cipherState = {\n init: false,\n cipher: forge.cipher.createDecipher('AES-CBC', client ?\n sp.keys.server_write_key : sp.keys.client_write_key),\n iv: client ? sp.keys.server_write_IV : sp.keys.client_write_IV\n };\n state.write.cipherState = {\n init: false,\n cipher: forge.cipher.createCipher('AES-CBC', client ?\n sp.keys.client_write_key : sp.keys.server_write_key),\n iv: client ? sp.keys.client_write_IV : sp.keys.server_write_IV\n };\n state.read.cipherFunction = decrypt_aes_cbc_sha1;\n state.write.cipherFunction = encrypt_aes_cbc_sha1;\n\n // MAC setup\n state.read.macLength = state.write.macLength = sp.mac_length;\n state.read.macFunction = state.write.macFunction = tls.hmac_sha1;\n}\n\n/**\n * Encrypts the TLSCompressed record into a TLSCipherText record using AES\n * in CBC mode.\n *\n * @param record the TLSCompressed record to encrypt.\n * @param s the ConnectionState to use.\n *\n * @return true on success, false on failure.\n */\nfunction encrypt_aes_cbc_sha1(record, s) {\n var rval = false;\n\n // append MAC to fragment, update sequence number\n var mac = s.macFunction(s.macKey, s.sequenceNumber, record);\n record.fragment.putBytes(mac);\n s.updateSequenceNumber();\n\n // TLS 1.1+ use an explicit IV every time to protect against CBC attacks\n var iv;\n if(record.version.minor === tls.Versions.TLS_1_0.minor) {\n // use the pre-generated IV when initializing for TLS 1.0, otherwise use\n // the residue from the previous encryption\n iv = s.cipherState.init ? null : s.cipherState.iv;\n } else {\n iv = forge.random.getBytesSync(16);\n }\n\n s.cipherState.init = true;\n\n // start cipher\n var cipher = s.cipherState.cipher;\n cipher.start({iv: iv});\n\n // TLS 1.1+ write IV into output\n if(record.version.minor >= tls.Versions.TLS_1_1.minor) {\n cipher.output.putBytes(iv);\n }\n\n // do encryption (default padding is appropriate)\n cipher.update(record.fragment);\n if(cipher.finish(encrypt_aes_cbc_sha1_padding)) {\n // set record fragment to encrypted output\n record.fragment = cipher.output;\n record.length = record.fragment.length();\n rval = true;\n }\n\n return rval;\n}\n\n/**\n * Handles padding for aes_cbc_sha1 in encrypt mode.\n *\n * @param blockSize the block size.\n * @param input the input buffer.\n * @param decrypt true in decrypt mode, false in encrypt mode.\n *\n * @return true on success, false on failure.\n */\nfunction encrypt_aes_cbc_sha1_padding(blockSize, input, decrypt) {\n /* The encrypted data length (TLSCiphertext.length) is one more than the sum\n of SecurityParameters.block_length, TLSCompressed.length,\n SecurityParameters.mac_length, and padding_length.\n\n The padding may be any length up to 255 bytes long, as long as it results in\n the TLSCiphertext.length being an integral multiple of the block length.\n Lengths longer than necessary might be desirable to frustrate attacks on a\n protocol based on analysis of the lengths of exchanged messages. Each uint8\n in the padding data vector must be filled with the padding length value.\n\n The padding length should be such that the total size of the\n GenericBlockCipher structure is a multiple of the cipher's block length.\n Legal values range from zero to 255, inclusive. This length specifies the\n length of the padding field exclusive of the padding_length field itself.\n\n This is slightly different from PKCS#7 because the padding value is 1\n less than the actual number of padding bytes if you include the\n padding_length uint8 itself as a padding byte. */\n if(!decrypt) {\n // get the number of padding bytes required to reach the blockSize and\n // subtract 1 for the padding value (to make room for the padding_length\n // uint8)\n var padding = blockSize - (input.length() % blockSize);\n input.fillWithByte(padding - 1, padding);\n }\n return true;\n}\n\n/**\n * Handles padding for aes_cbc_sha1 in decrypt mode.\n *\n * @param blockSize the block size.\n * @param output the output buffer.\n * @param decrypt true in decrypt mode, false in encrypt mode.\n *\n * @return true on success, false on failure.\n */\nfunction decrypt_aes_cbc_sha1_padding(blockSize, output, decrypt) {\n var rval = true;\n if(decrypt) {\n /* The last byte in the output specifies the number of padding bytes not\n including itself. Each of the padding bytes has the same value as that\n last byte (known as the padding_length). Here we check all padding\n bytes to ensure they have the value of padding_length even if one of\n them is bad in order to ward-off timing attacks. */\n var len = output.length();\n var paddingLength = output.last();\n for(var i = len - 1 - paddingLength; i < len - 1; ++i) {\n rval = rval && (output.at(i) == paddingLength);\n }\n if(rval) {\n // trim off padding bytes and last padding length byte\n output.truncate(paddingLength + 1);\n }\n }\n return rval;\n}\n\n/**\n * Decrypts a TLSCipherText record into a TLSCompressed record using\n * AES in CBC mode.\n *\n * @param record the TLSCipherText record to decrypt.\n * @param s the ConnectionState to use.\n *\n * @return true on success, false on failure.\n */\nfunction decrypt_aes_cbc_sha1(record, s) {\n var rval = false;\n\n var iv;\n if(record.version.minor === tls.Versions.TLS_1_0.minor) {\n // use pre-generated IV when initializing for TLS 1.0, otherwise use the\n // residue from the previous decryption\n iv = s.cipherState.init ? null : s.cipherState.iv;\n } else {\n // TLS 1.1+ use an explicit IV every time to protect against CBC attacks\n // that is appended to the record fragment\n iv = record.fragment.getBytes(16);\n }\n\n s.cipherState.init = true;\n\n // start cipher\n var cipher = s.cipherState.cipher;\n cipher.start({iv: iv});\n\n // do decryption\n cipher.update(record.fragment);\n rval = cipher.finish(decrypt_aes_cbc_sha1_padding);\n\n // even if decryption fails, keep going to minimize timing attacks\n\n // decrypted data:\n // first (len - 20) bytes = application data\n // last 20 bytes = MAC\n var macLen = s.macLength;\n\n // create a random MAC to check against should the mac length check fail\n // Note: do this regardless of the failure to keep timing consistent\n var mac = forge.random.getBytesSync(macLen);\n\n // get fragment and mac\n var len = cipher.output.length();\n if(len >= macLen) {\n record.fragment = cipher.output.getBytes(len - macLen);\n mac = cipher.output.getBytes(macLen);\n } else {\n // bad data, but get bytes anyway to try to keep timing consistent\n record.fragment = cipher.output.getBytes();\n }\n record.fragment = forge.util.createBuffer(record.fragment);\n record.length = record.fragment.length();\n\n // see if data integrity checks out, update sequence number\n var mac2 = s.macFunction(s.macKey, s.sequenceNumber, record);\n s.updateSequenceNumber();\n rval = compareMacs(s.macKey, mac, mac2) && rval;\n return rval;\n}\n\n/**\n * Safely compare two MACs. This function will compare two MACs in a way\n * that protects against timing attacks.\n *\n * TODO: Expose elsewhere as a utility API.\n *\n * See: https://www.nccgroup.trust/us/about-us/newsroom-and-events/blog/2011/february/double-hmac-verification/\n *\n * @param key the MAC key to use.\n * @param mac1 as a binary-encoded string of bytes.\n * @param mac2 as a binary-encoded string of bytes.\n *\n * @return true if the MACs are the same, false if not.\n */\nfunction compareMacs(key, mac1, mac2) {\n var hmac = forge.hmac.create();\n\n hmac.start('SHA1', key);\n hmac.update(mac1);\n mac1 = hmac.digest().getBytes();\n\n hmac.start(null, null);\n hmac.update(mac2);\n mac2 = hmac.digest().getBytes();\n\n return mac1 === mac2;\n}\n","/**\n * Copyright (c) 2019 Digital Bazaar, Inc.\n */\n\nvar forge = require('./forge');\nrequire('./asn1');\nvar asn1 = forge.asn1;\n\nexports.privateKeyValidator = {\n // PrivateKeyInfo\n name: 'PrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // Version (INTEGER)\n name: 'PrivateKeyInfo.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyVersion'\n }, {\n // privateKeyAlgorithm\n name: 'PrivateKeyInfo.privateKeyAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'privateKeyOid'\n }]\n }, {\n // PrivateKey\n name: 'PrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'privateKey'\n }]\n};\n\nexports.publicKeyValidator = {\n name: 'SubjectPublicKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'subjectPublicKeyInfo',\n value: [{\n name: 'SubjectPublicKeyInfo.AlgorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'publicKeyOid'\n }]\n },\n // capture group for ed25519PublicKey\n {\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n composed: true,\n captureBitStringValue: 'ed25519PublicKey'\n }\n // FIXME: this is capture group for rsaPublicKey, use it in this API or\n // discard?\n /* {\n // subjectPublicKey\n name: 'SubjectPublicKeyInfo.subjectPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n value: [{\n // RSAPublicKey\n name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n optional: true,\n captureAsn1: 'rsaPublicKey'\n }]\n } */\n ]\n};\n","/**\n * Javascript implementation of Abstract Syntax Notation Number One.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2015 Digital Bazaar, Inc.\n *\n * An API for storing data using the Abstract Syntax Notation Number One\n * format using DER (Distinguished Encoding Rules) encoding. This encoding is\n * commonly used to store data for PKI, i.e. X.509 Certificates, and this\n * implementation exists for that purpose.\n *\n * Abstract Syntax Notation Number One (ASN.1) is used to define the abstract\n * syntax of information without restricting the way the information is encoded\n * for transmission. It provides a standard that allows for open systems\n * communication. ASN.1 defines the syntax of information data and a number of\n * simple data types as well as a notation for describing them and specifying\n * values for them.\n *\n * The RSA algorithm creates public and private keys that are often stored in\n * X.509 or PKCS#X formats -- which use ASN.1 (encoded in DER format). This\n * class provides the most basic functionality required to store and load DSA\n * keys that are encoded according to ASN.1.\n *\n * The most common binary encodings for ASN.1 are BER (Basic Encoding Rules)\n * and DER (Distinguished Encoding Rules). DER is just a subset of BER that\n * has stricter requirements for how data must be encoded.\n *\n * Each ASN.1 structure has a tag (a byte identifying the ASN.1 structure type)\n * and a byte array for the value of this ASN1 structure which may be data or a\n * list of ASN.1 structures.\n *\n * Each ASN.1 structure using BER is (Tag-Length-Value):\n *\n * | byte 0 | bytes X | bytes Y |\n * |--------|---------|----------\n * | tag | length | value |\n *\n * ASN.1 allows for tags to be of \"High-tag-number form\" which allows a tag to\n * be two or more octets, but that is not supported by this class. A tag is\n * only 1 byte. Bits 1-5 give the tag number (ie the data type within a\n * particular 'class'), 6 indicates whether or not the ASN.1 value is\n * constructed from other ASN.1 values, and bits 7 and 8 give the 'class'. If\n * bits 7 and 8 are both zero, the class is UNIVERSAL. If only bit 7 is set,\n * then the class is APPLICATION. If only bit 8 is set, then the class is\n * CONTEXT_SPECIFIC. If both bits 7 and 8 are set, then the class is PRIVATE.\n * The tag numbers for the data types for the class UNIVERSAL are listed below:\n *\n * UNIVERSAL 0 Reserved for use by the encoding rules\n * UNIVERSAL 1 Boolean type\n * UNIVERSAL 2 Integer type\n * UNIVERSAL 3 Bitstring type\n * UNIVERSAL 4 Octetstring type\n * UNIVERSAL 5 Null type\n * UNIVERSAL 6 Object identifier type\n * UNIVERSAL 7 Object descriptor type\n * UNIVERSAL 8 External type and Instance-of type\n * UNIVERSAL 9 Real type\n * UNIVERSAL 10 Enumerated type\n * UNIVERSAL 11 Embedded-pdv type\n * UNIVERSAL 12 UTF8String type\n * UNIVERSAL 13 Relative object identifier type\n * UNIVERSAL 14-15 Reserved for future editions\n * UNIVERSAL 16 Sequence and Sequence-of types\n * UNIVERSAL 17 Set and Set-of types\n * UNIVERSAL 18-22, 25-30 Character string types\n * UNIVERSAL 23-24 Time types\n *\n * The length of an ASN.1 structure is specified after the tag identifier.\n * There is a definite form and an indefinite form. The indefinite form may\n * be used if the encoding is constructed and not all immediately available.\n * The indefinite form is encoded using a length byte with only the 8th bit\n * set. The end of the constructed object is marked using end-of-contents\n * octets (two zero bytes).\n *\n * The definite form looks like this:\n *\n * The length may take up 1 or more bytes, it depends on the length of the\n * value of the ASN.1 structure. DER encoding requires that if the ASN.1\n * structure has a value that has a length greater than 127, more than 1 byte\n * will be used to store its length, otherwise just one byte will be used.\n * This is strict.\n *\n * In the case that the length of the ASN.1 value is less than 127, 1 octet\n * (byte) is used to store the \"short form\" length. The 8th bit has a value of\n * 0 indicating the length is \"short form\" and not \"long form\" and bits 7-1\n * give the length of the data. (The 8th bit is the left-most, most significant\n * bit: also known as big endian or network format).\n *\n * In the case that the length of the ASN.1 value is greater than 127, 2 to\n * 127 octets (bytes) are used to store the \"long form\" length. The first\n * byte's 8th bit is set to 1 to indicate the length is \"long form.\" Bits 7-1\n * give the number of additional octets. All following octets are in base 256\n * with the most significant digit first (typical big-endian binary unsigned\n * integer storage). So, for instance, if the length of a value was 257, the\n * first byte would be set to:\n *\n * 10000010 = 130 = 0x82.\n *\n * This indicates there are 2 octets (base 256) for the length. The second and\n * third bytes (the octets just mentioned) would store the length in base 256:\n *\n * octet 2: 00000001 = 1 * 256^1 = 256\n * octet 3: 00000001 = 1 * 256^0 = 1\n * total = 257\n *\n * The algorithm for converting a js integer value of 257 to base-256 is:\n *\n * var value = 257;\n * var bytes = [];\n * bytes[0] = (value >>> 8) & 0xFF; // most significant byte first\n * bytes[1] = value & 0xFF; // least significant byte last\n *\n * On the ASN.1 UNIVERSAL Object Identifier (OID) type:\n *\n * An OID can be written like: \"value1.value2.value3...valueN\"\n *\n * The DER encoding rules:\n *\n * The first byte has the value 40 * value1 + value2.\n * The following bytes, if any, encode the remaining values. Each value is\n * encoded in base 128, most significant digit first (big endian), with as\n * few digits as possible, and the most significant bit of each byte set\n * to 1 except the last in each value's encoding. For example: Given the\n * OID \"1.2.840.113549\", its DER encoding is (remember each byte except the\n * last one in each encoding is OR'd with 0x80):\n *\n * byte 1: 40 * 1 + 2 = 42 = 0x2A.\n * bytes 2-3: 128 * 6 + 72 = 840 = 6 72 = 6 72 = 0x0648 = 0x8648\n * bytes 4-6: 16384 * 6 + 128 * 119 + 13 = 6 119 13 = 0x06770D = 0x86F70D\n *\n * The final value is: 0x2A864886F70D.\n * The full OID (including ASN.1 tag and length of 6 bytes) is:\n * 0x06062A864886F70D\n */\nvar forge = require('./forge');\nrequire('./util');\nrequire('./oids');\n\n/* ASN.1 API */\nvar asn1 = module.exports = forge.asn1 = forge.asn1 || {};\n\n/**\n * ASN.1 classes.\n */\nasn1.Class = {\n UNIVERSAL: 0x00,\n APPLICATION: 0x40,\n CONTEXT_SPECIFIC: 0x80,\n PRIVATE: 0xC0\n};\n\n/**\n * ASN.1 types. Not all types are supported by this implementation, only\n * those necessary to implement a simple PKI are implemented.\n */\nasn1.Type = {\n NONE: 0,\n BOOLEAN: 1,\n INTEGER: 2,\n BITSTRING: 3,\n OCTETSTRING: 4,\n NULL: 5,\n OID: 6,\n ODESC: 7,\n EXTERNAL: 8,\n REAL: 9,\n ENUMERATED: 10,\n EMBEDDED: 11,\n UTF8: 12,\n ROID: 13,\n SEQUENCE: 16,\n SET: 17,\n PRINTABLESTRING: 19,\n IA5STRING: 22,\n UTCTIME: 23,\n GENERALIZEDTIME: 24,\n BMPSTRING: 30\n};\n\n/**\n * Creates a new asn1 object.\n *\n * @param tagClass the tag class for the object.\n * @param type the data type (tag number) for the object.\n * @param constructed true if the asn1 object is in constructed form.\n * @param value the value for the object, if it is not constructed.\n * @param [options] the options to use:\n * [bitStringContents] the plain BIT STRING content including padding\n * byte.\n *\n * @return the asn1 object.\n */\nasn1.create = function(tagClass, type, constructed, value, options) {\n /* An asn1 object has a tagClass, a type, a constructed flag, and a\n value. The value's type depends on the constructed flag. If\n constructed, it will contain a list of other asn1 objects. If not,\n it will contain the ASN.1 value as an array of bytes formatted\n according to the ASN.1 data type. */\n\n // remove undefined values\n if(forge.util.isArray(value)) {\n var tmp = [];\n for(var i = 0; i < value.length; ++i) {\n if(value[i] !== undefined) {\n tmp.push(value[i]);\n }\n }\n value = tmp;\n }\n\n var obj = {\n tagClass: tagClass,\n type: type,\n constructed: constructed,\n composed: constructed || forge.util.isArray(value),\n value: value\n };\n if(options && 'bitStringContents' in options) {\n // TODO: copy byte buffer if it's a buffer not a string\n obj.bitStringContents = options.bitStringContents;\n // TODO: add readonly flag to avoid this overhead\n // save copy to detect changes\n obj.original = asn1.copy(obj);\n }\n return obj;\n};\n\n/**\n * Copies an asn1 object.\n *\n * @param obj the asn1 object.\n * @param [options] copy options:\n * [excludeBitStringContents] true to not copy bitStringContents\n *\n * @return the a copy of the asn1 object.\n */\nasn1.copy = function(obj, options) {\n var copy;\n\n if(forge.util.isArray(obj)) {\n copy = [];\n for(var i = 0; i < obj.length; ++i) {\n copy.push(asn1.copy(obj[i], options));\n }\n return copy;\n }\n\n if(typeof obj === 'string') {\n // TODO: copy byte buffer if it's a buffer not a string\n return obj;\n }\n\n copy = {\n tagClass: obj.tagClass,\n type: obj.type,\n constructed: obj.constructed,\n composed: obj.composed,\n value: asn1.copy(obj.value, options)\n };\n if(options && !options.excludeBitStringContents) {\n // TODO: copy byte buffer if it's a buffer not a string\n copy.bitStringContents = obj.bitStringContents;\n }\n return copy;\n};\n\n/**\n * Compares asn1 objects for equality.\n *\n * Note this function does not run in constant time.\n *\n * @param obj1 the first asn1 object.\n * @param obj2 the second asn1 object.\n * @param [options] compare options:\n * [includeBitStringContents] true to compare bitStringContents\n *\n * @return true if the asn1 objects are equal.\n */\nasn1.equals = function(obj1, obj2, options) {\n if(forge.util.isArray(obj1)) {\n if(!forge.util.isArray(obj2)) {\n return false;\n }\n if(obj1.length !== obj2.length) {\n return false;\n }\n for(var i = 0; i < obj1.length; ++i) {\n if(!asn1.equals(obj1[i], obj2[i])) {\n return false;\n }\n }\n return true;\n }\n\n if(typeof obj1 !== typeof obj2) {\n return false;\n }\n\n if(typeof obj1 === 'string') {\n return obj1 === obj2;\n }\n\n var equal = obj1.tagClass === obj2.tagClass &&\n obj1.type === obj2.type &&\n obj1.constructed === obj2.constructed &&\n obj1.composed === obj2.composed &&\n asn1.equals(obj1.value, obj2.value);\n if(options && options.includeBitStringContents) {\n equal = equal && (obj1.bitStringContents === obj2.bitStringContents);\n }\n\n return equal;\n};\n\n/**\n * Gets the length of a BER-encoded ASN.1 value.\n *\n * In case the length is not specified, undefined is returned.\n *\n * @param b the BER-encoded ASN.1 byte buffer, starting with the first\n * length byte.\n *\n * @return the length of the BER-encoded ASN.1 value or undefined.\n */\nasn1.getBerValueLength = function(b) {\n // TODO: move this function and related DER/BER functions to a der.js\n // file; better abstract ASN.1 away from der/ber.\n var b2 = b.getByte();\n if(b2 === 0x80) {\n return undefined;\n }\n\n // see if the length is \"short form\" or \"long form\" (bit 8 set)\n var length;\n var longForm = b2 & 0x80;\n if(!longForm) {\n // length is just the first byte\n length = b2;\n } else {\n // the number of bytes the length is specified in bits 7 through 1\n // and each length byte is in big-endian base-256\n length = b.getInt((b2 & 0x7F) << 3);\n }\n return length;\n};\n\n/**\n * Check if the byte buffer has enough bytes. Throws an Error if not.\n *\n * @param bytes the byte buffer to parse from.\n * @param remaining the bytes remaining in the current parsing state.\n * @param n the number of bytes the buffer must have.\n */\nfunction _checkBufferLength(bytes, remaining, n) {\n if(n > remaining) {\n var error = new Error('Too few bytes to parse DER.');\n error.available = bytes.length();\n error.remaining = remaining;\n error.requested = n;\n throw error;\n }\n}\n\n/**\n * Gets the length of a BER-encoded ASN.1 value.\n *\n * In case the length is not specified, undefined is returned.\n *\n * @param bytes the byte buffer to parse from.\n * @param remaining the bytes remaining in the current parsing state.\n *\n * @return the length of the BER-encoded ASN.1 value or undefined.\n */\nvar _getValueLength = function(bytes, remaining) {\n // TODO: move this function and related DER/BER functions to a der.js\n // file; better abstract ASN.1 away from der/ber.\n // fromDer already checked that this byte exists\n var b2 = bytes.getByte();\n remaining--;\n if(b2 === 0x80) {\n return undefined;\n }\n\n // see if the length is \"short form\" or \"long form\" (bit 8 set)\n var length;\n var longForm = b2 & 0x80;\n if(!longForm) {\n // length is just the first byte\n length = b2;\n } else {\n // the number of bytes the length is specified in bits 7 through 1\n // and each length byte is in big-endian base-256\n var longFormBytes = b2 & 0x7F;\n _checkBufferLength(bytes, remaining, longFormBytes);\n length = bytes.getInt(longFormBytes << 3);\n }\n // FIXME: this will only happen for 32 bit getInt with high bit set\n if(length < 0) {\n throw new Error('Negative length: ' + length);\n }\n return length;\n};\n\n/**\n * Parses an asn1 object from a byte buffer in DER format.\n *\n * @param bytes the byte buffer to parse from.\n * @param [strict] true to be strict when checking value lengths, false to\n * allow truncated values (default: true).\n * @param [options] object with options or boolean strict flag\n * [strict] true to be strict when checking value lengths, false to\n * allow truncated values (default: true).\n * [parseAllBytes] true to ensure all bytes are parsed\n * (default: true)\n * [decodeBitStrings] true to attempt to decode the content of\n * BIT STRINGs (not OCTET STRINGs) using strict mode. Note that\n * without schema support to understand the data context this can\n * erroneously decode values that happen to be valid ASN.1. This\n * flag will be deprecated or removed as soon as schema support is\n * available. (default: true)\n *\n * @throws Will throw an error for various malformed input conditions.\n *\n * @return the parsed asn1 object.\n */\nasn1.fromDer = function(bytes, options) {\n if(options === undefined) {\n options = {\n strict: true,\n parseAllBytes: true,\n decodeBitStrings: true\n };\n }\n if(typeof options === 'boolean') {\n options = {\n strict: options,\n parseAllBytes: true,\n decodeBitStrings: true\n };\n }\n if(!('strict' in options)) {\n options.strict = true;\n }\n if(!('parseAllBytes' in options)) {\n options.parseAllBytes = true;\n }\n if(!('decodeBitStrings' in options)) {\n options.decodeBitStrings = true;\n }\n\n // wrap in buffer if needed\n if(typeof bytes === 'string') {\n bytes = forge.util.createBuffer(bytes);\n }\n\n var byteCount = bytes.length();\n var value = _fromDer(bytes, bytes.length(), 0, options);\n if(options.parseAllBytes && bytes.length() !== 0) {\n var error = new Error('Unparsed DER bytes remain after ASN.1 parsing.');\n error.byteCount = byteCount;\n error.remaining = bytes.length();\n throw error;\n }\n return value;\n};\n\n/**\n * Internal function to parse an asn1 object from a byte buffer in DER format.\n *\n * @param bytes the byte buffer to parse from.\n * @param remaining the number of bytes remaining for this chunk.\n * @param depth the current parsing depth.\n * @param options object with same options as fromDer().\n *\n * @return the parsed asn1 object.\n */\nfunction _fromDer(bytes, remaining, depth, options) {\n // temporary storage for consumption calculations\n var start;\n\n // minimum length for ASN.1 DER structure is 2\n _checkBufferLength(bytes, remaining, 2);\n\n // get the first byte\n var b1 = bytes.getByte();\n // consumed one byte\n remaining--;\n\n // get the tag class\n var tagClass = (b1 & 0xC0);\n\n // get the type (bits 1-5)\n var type = b1 & 0x1F;\n\n // get the variable value length and adjust remaining bytes\n start = bytes.length();\n var length = _getValueLength(bytes, remaining);\n remaining -= start - bytes.length();\n\n // ensure there are enough bytes to get the value\n if(length !== undefined && length > remaining) {\n if(options.strict) {\n var error = new Error('Too few bytes to read ASN.1 value.');\n error.available = bytes.length();\n error.remaining = remaining;\n error.requested = length;\n throw error;\n }\n // Note: be lenient with truncated values and use remaining state bytes\n length = remaining;\n }\n\n // value storage\n var value;\n // possible BIT STRING contents storage\n var bitStringContents;\n\n // constructed flag is bit 6 (32 = 0x20) of the first byte\n var constructed = ((b1 & 0x20) === 0x20);\n if(constructed) {\n // parse child asn1 objects from the value\n value = [];\n if(length === undefined) {\n // asn1 object of indefinite length, read until end tag\n for(;;) {\n _checkBufferLength(bytes, remaining, 2);\n if(bytes.bytes(2) === String.fromCharCode(0, 0)) {\n bytes.getBytes(2);\n remaining -= 2;\n break;\n }\n start = bytes.length();\n value.push(_fromDer(bytes, remaining, depth + 1, options));\n remaining -= start - bytes.length();\n }\n } else {\n // parsing asn1 object of definite length\n while(length > 0) {\n start = bytes.length();\n value.push(_fromDer(bytes, length, depth + 1, options));\n remaining -= start - bytes.length();\n length -= start - bytes.length();\n }\n }\n }\n\n // if a BIT STRING, save the contents including padding\n if(value === undefined && tagClass === asn1.Class.UNIVERSAL &&\n type === asn1.Type.BITSTRING) {\n bitStringContents = bytes.bytes(length);\n }\n\n // determine if a non-constructed value should be decoded as a composed\n // value that contains other ASN.1 objects. BIT STRINGs (and OCTET STRINGs)\n // can be used this way.\n if(value === undefined && options.decodeBitStrings &&\n tagClass === asn1.Class.UNIVERSAL &&\n // FIXME: OCTET STRINGs not yet supported here\n // .. other parts of forge expect to decode OCTET STRINGs manually\n (type === asn1.Type.BITSTRING /*|| type === asn1.Type.OCTETSTRING*/) &&\n length > 1) {\n // save read position\n var savedRead = bytes.read;\n var savedRemaining = remaining;\n var unused = 0;\n if(type === asn1.Type.BITSTRING) {\n /* The first octet gives the number of bits by which the length of the\n bit string is less than the next multiple of eight (this is called\n the \"number of unused bits\").\n\n The second and following octets give the value of the bit string\n converted to an octet string. */\n _checkBufferLength(bytes, remaining, 1);\n unused = bytes.getByte();\n remaining--;\n }\n // if all bits are used, maybe the BIT/OCTET STRING holds ASN.1 objs\n if(unused === 0) {\n try {\n // attempt to parse child asn1 object from the value\n // (stored in array to signal composed value)\n start = bytes.length();\n var subOptions = {\n // enforce strict mode to avoid parsing ASN.1 from plain data\n strict: true,\n decodeBitStrings: true\n };\n var composed = _fromDer(bytes, remaining, depth + 1, subOptions);\n var used = start - bytes.length();\n remaining -= used;\n if(type == asn1.Type.BITSTRING) {\n used++;\n }\n\n // if the data all decoded and the class indicates UNIVERSAL or\n // CONTEXT_SPECIFIC then assume we've got an encapsulated ASN.1 object\n var tc = composed.tagClass;\n if(used === length &&\n (tc === asn1.Class.UNIVERSAL || tc === asn1.Class.CONTEXT_SPECIFIC)) {\n value = [composed];\n }\n } catch(ex) {\n }\n }\n if(value === undefined) {\n // restore read position\n bytes.read = savedRead;\n remaining = savedRemaining;\n }\n }\n\n if(value === undefined) {\n // asn1 not constructed or composed, get raw value\n // TODO: do DER to OID conversion and vice-versa in .toDer?\n\n if(length === undefined) {\n if(options.strict) {\n throw new Error('Non-constructed ASN.1 object of indefinite length.');\n }\n // be lenient and use remaining state bytes\n length = remaining;\n }\n\n if(type === asn1.Type.BMPSTRING) {\n value = '';\n for(; length > 0; length -= 2) {\n _checkBufferLength(bytes, remaining, 2);\n value += String.fromCharCode(bytes.getInt16());\n remaining -= 2;\n }\n } else {\n value = bytes.getBytes(length);\n remaining -= length;\n }\n }\n\n // add BIT STRING contents if available\n var asn1Options = bitStringContents === undefined ? null : {\n bitStringContents: bitStringContents\n };\n\n // create and return asn1 object\n return asn1.create(tagClass, type, constructed, value, asn1Options);\n}\n\n/**\n * Converts the given asn1 object to a buffer of bytes in DER format.\n *\n * @param asn1 the asn1 object to convert to bytes.\n *\n * @return the buffer of bytes.\n */\nasn1.toDer = function(obj) {\n var bytes = forge.util.createBuffer();\n\n // build the first byte\n var b1 = obj.tagClass | obj.type;\n\n // for storing the ASN.1 value\n var value = forge.util.createBuffer();\n\n // use BIT STRING contents if available and data not changed\n var useBitStringContents = false;\n if('bitStringContents' in obj) {\n useBitStringContents = true;\n if(obj.original) {\n useBitStringContents = asn1.equals(obj, obj.original);\n }\n }\n\n if(useBitStringContents) {\n value.putBytes(obj.bitStringContents);\n } else if(obj.composed) {\n // if composed, use each child asn1 object's DER bytes as value\n // turn on 6th bit (0x20 = 32) to indicate asn1 is constructed\n // from other asn1 objects\n if(obj.constructed) {\n b1 |= 0x20;\n } else {\n // type is a bit string, add unused bits of 0x00\n value.putByte(0x00);\n }\n\n // add all of the child DER bytes together\n for(var i = 0; i < obj.value.length; ++i) {\n if(obj.value[i] !== undefined) {\n value.putBuffer(asn1.toDer(obj.value[i]));\n }\n }\n } else {\n // use asn1.value directly\n if(obj.type === asn1.Type.BMPSTRING) {\n for(var i = 0; i < obj.value.length; ++i) {\n value.putInt16(obj.value.charCodeAt(i));\n }\n } else {\n // ensure integer is minimally-encoded\n // TODO: should all leading bytes be stripped vs just one?\n // .. ex '00 00 01' => '01'?\n if(obj.type === asn1.Type.INTEGER &&\n obj.value.length > 1 &&\n // leading 0x00 for positive integer\n ((obj.value.charCodeAt(0) === 0 &&\n (obj.value.charCodeAt(1) & 0x80) === 0) ||\n // leading 0xFF for negative integer\n (obj.value.charCodeAt(0) === 0xFF &&\n (obj.value.charCodeAt(1) & 0x80) === 0x80))) {\n value.putBytes(obj.value.substr(1));\n } else {\n value.putBytes(obj.value);\n }\n }\n }\n\n // add tag byte\n bytes.putByte(b1);\n\n // use \"short form\" encoding\n if(value.length() <= 127) {\n // one byte describes the length\n // bit 8 = 0 and bits 7-1 = length\n bytes.putByte(value.length() & 0x7F);\n } else {\n // use \"long form\" encoding\n // 2 to 127 bytes describe the length\n // first byte: bit 8 = 1 and bits 7-1 = # of additional bytes\n // other bytes: length in base 256, big-endian\n var len = value.length();\n var lenBytes = '';\n do {\n lenBytes += String.fromCharCode(len & 0xFF);\n len = len >>> 8;\n } while(len > 0);\n\n // set first byte to # bytes used to store the length and turn on\n // bit 8 to indicate long-form length is used\n bytes.putByte(lenBytes.length | 0x80);\n\n // concatenate length bytes in reverse since they were generated\n // little endian and we need big endian\n for(var i = lenBytes.length - 1; i >= 0; --i) {\n bytes.putByte(lenBytes.charCodeAt(i));\n }\n }\n\n // concatenate value bytes\n bytes.putBuffer(value);\n return bytes;\n};\n\n/**\n * Converts an OID dot-separated string to a byte buffer. The byte buffer\n * contains only the DER-encoded value, not any tag or length bytes.\n *\n * @param oid the OID dot-separated string.\n *\n * @return the byte buffer.\n */\nasn1.oidToDer = function(oid) {\n // split OID into individual values\n var values = oid.split('.');\n var bytes = forge.util.createBuffer();\n\n // first byte is 40 * value1 + value2\n bytes.putByte(40 * parseInt(values[0], 10) + parseInt(values[1], 10));\n // other bytes are each value in base 128 with 8th bit set except for\n // the last byte for each value\n var last, valueBytes, value, b;\n for(var i = 2; i < values.length; ++i) {\n // produce value bytes in reverse because we don't know how many\n // bytes it will take to store the value\n last = true;\n valueBytes = [];\n value = parseInt(values[i], 10);\n do {\n b = value & 0x7F;\n value = value >>> 7;\n // if value is not last, then turn on 8th bit\n if(!last) {\n b |= 0x80;\n }\n valueBytes.push(b);\n last = false;\n } while(value > 0);\n\n // add value bytes in reverse (needs to be in big endian)\n for(var n = valueBytes.length - 1; n >= 0; --n) {\n bytes.putByte(valueBytes[n]);\n }\n }\n\n return bytes;\n};\n\n/**\n * Converts a DER-encoded byte buffer to an OID dot-separated string. The\n * byte buffer should contain only the DER-encoded value, not any tag or\n * length bytes.\n *\n * @param bytes the byte buffer.\n *\n * @return the OID dot-separated string.\n */\nasn1.derToOid = function(bytes) {\n var oid;\n\n // wrap in buffer if needed\n if(typeof bytes === 'string') {\n bytes = forge.util.createBuffer(bytes);\n }\n\n // first byte is 40 * value1 + value2\n var b = bytes.getByte();\n oid = Math.floor(b / 40) + '.' + (b % 40);\n\n // other bytes are each value in base 128 with 8th bit set except for\n // the last byte for each value\n var value = 0;\n while(bytes.length() > 0) {\n b = bytes.getByte();\n value = value << 7;\n // not the last byte for the value\n if(b & 0x80) {\n value += b & 0x7F;\n } else {\n // last byte\n oid += '.' + (value + b);\n value = 0;\n }\n }\n\n return oid;\n};\n\n/**\n * Converts a UTCTime value to a date.\n *\n * Note: GeneralizedTime has 4 digits for the year and is used for X.509\n * dates past 2049. Parsing that structure hasn't been implemented yet.\n *\n * @param utc the UTCTime value to convert.\n *\n * @return the date.\n */\nasn1.utcTimeToDate = function(utc) {\n /* The following formats can be used:\n\n YYMMDDhhmmZ\n YYMMDDhhmm+hh'mm'\n YYMMDDhhmm-hh'mm'\n YYMMDDhhmmssZ\n YYMMDDhhmmss+hh'mm'\n YYMMDDhhmmss-hh'mm'\n\n Where:\n\n YY is the least significant two digits of the year\n MM is the month (01 to 12)\n DD is the day (01 to 31)\n hh is the hour (00 to 23)\n mm are the minutes (00 to 59)\n ss are the seconds (00 to 59)\n Z indicates that local time is GMT, + indicates that local time is\n later than GMT, and - indicates that local time is earlier than GMT\n hh' is the absolute value of the offset from GMT in hours\n mm' is the absolute value of the offset from GMT in minutes */\n var date = new Date();\n\n // if YY >= 50 use 19xx, if YY < 50 use 20xx\n var year = parseInt(utc.substr(0, 2), 10);\n year = (year >= 50) ? 1900 + year : 2000 + year;\n var MM = parseInt(utc.substr(2, 2), 10) - 1; // use 0-11 for month\n var DD = parseInt(utc.substr(4, 2), 10);\n var hh = parseInt(utc.substr(6, 2), 10);\n var mm = parseInt(utc.substr(8, 2), 10);\n var ss = 0;\n\n // not just YYMMDDhhmmZ\n if(utc.length > 11) {\n // get character after minutes\n var c = utc.charAt(10);\n var end = 10;\n\n // see if seconds are present\n if(c !== '+' && c !== '-') {\n // get seconds\n ss = parseInt(utc.substr(10, 2), 10);\n end += 2;\n }\n }\n\n // update date\n date.setUTCFullYear(year, MM, DD);\n date.setUTCHours(hh, mm, ss, 0);\n\n if(end) {\n // get +/- after end of time\n c = utc.charAt(end);\n if(c === '+' || c === '-') {\n // get hours+minutes offset\n var hhoffset = parseInt(utc.substr(end + 1, 2), 10);\n var mmoffset = parseInt(utc.substr(end + 4, 2), 10);\n\n // calculate offset in milliseconds\n var offset = hhoffset * 60 + mmoffset;\n offset *= 60000;\n\n // apply offset\n if(c === '+') {\n date.setTime(+date - offset);\n } else {\n date.setTime(+date + offset);\n }\n }\n }\n\n return date;\n};\n\n/**\n * Converts a GeneralizedTime value to a date.\n *\n * @param gentime the GeneralizedTime value to convert.\n *\n * @return the date.\n */\nasn1.generalizedTimeToDate = function(gentime) {\n /* The following formats can be used:\n\n YYYYMMDDHHMMSS\n YYYYMMDDHHMMSS.fff\n YYYYMMDDHHMMSSZ\n YYYYMMDDHHMMSS.fffZ\n YYYYMMDDHHMMSS+hh'mm'\n YYYYMMDDHHMMSS.fff+hh'mm'\n YYYYMMDDHHMMSS-hh'mm'\n YYYYMMDDHHMMSS.fff-hh'mm'\n\n Where:\n\n YYYY is the year\n MM is the month (01 to 12)\n DD is the day (01 to 31)\n hh is the hour (00 to 23)\n mm are the minutes (00 to 59)\n ss are the seconds (00 to 59)\n .fff is the second fraction, accurate to three decimal places\n Z indicates that local time is GMT, + indicates that local time is\n later than GMT, and - indicates that local time is earlier than GMT\n hh' is the absolute value of the offset from GMT in hours\n mm' is the absolute value of the offset from GMT in minutes */\n var date = new Date();\n\n var YYYY = parseInt(gentime.substr(0, 4), 10);\n var MM = parseInt(gentime.substr(4, 2), 10) - 1; // use 0-11 for month\n var DD = parseInt(gentime.substr(6, 2), 10);\n var hh = parseInt(gentime.substr(8, 2), 10);\n var mm = parseInt(gentime.substr(10, 2), 10);\n var ss = parseInt(gentime.substr(12, 2), 10);\n var fff = 0;\n var offset = 0;\n var isUTC = false;\n\n if(gentime.charAt(gentime.length - 1) === 'Z') {\n isUTC = true;\n }\n\n var end = gentime.length - 5, c = gentime.charAt(end);\n if(c === '+' || c === '-') {\n // get hours+minutes offset\n var hhoffset = parseInt(gentime.substr(end + 1, 2), 10);\n var mmoffset = parseInt(gentime.substr(end + 4, 2), 10);\n\n // calculate offset in milliseconds\n offset = hhoffset * 60 + mmoffset;\n offset *= 60000;\n\n // apply offset\n if(c === '+') {\n offset *= -1;\n }\n\n isUTC = true;\n }\n\n // check for second fraction\n if(gentime.charAt(14) === '.') {\n fff = parseFloat(gentime.substr(14), 10) * 1000;\n }\n\n if(isUTC) {\n date.setUTCFullYear(YYYY, MM, DD);\n date.setUTCHours(hh, mm, ss, fff);\n\n // apply offset\n date.setTime(+date + offset);\n } else {\n date.setFullYear(YYYY, MM, DD);\n date.setHours(hh, mm, ss, fff);\n }\n\n return date;\n};\n\n/**\n * Converts a date to a UTCTime value.\n *\n * Note: GeneralizedTime has 4 digits for the year and is used for X.509\n * dates past 2049. Converting to a GeneralizedTime hasn't been\n * implemented yet.\n *\n * @param date the date to convert.\n *\n * @return the UTCTime value.\n */\nasn1.dateToUtcTime = function(date) {\n // TODO: validate; currently assumes proper format\n if(typeof date === 'string') {\n return date;\n }\n\n var rval = '';\n\n // create format YYMMDDhhmmssZ\n var format = [];\n format.push(('' + date.getUTCFullYear()).substr(2));\n format.push('' + (date.getUTCMonth() + 1));\n format.push('' + date.getUTCDate());\n format.push('' + date.getUTCHours());\n format.push('' + date.getUTCMinutes());\n format.push('' + date.getUTCSeconds());\n\n // ensure 2 digits are used for each format entry\n for(var i = 0; i < format.length; ++i) {\n if(format[i].length < 2) {\n rval += '0';\n }\n rval += format[i];\n }\n rval += 'Z';\n\n return rval;\n};\n\n/**\n * Converts a date to a GeneralizedTime value.\n *\n * @param date the date to convert.\n *\n * @return the GeneralizedTime value as a string.\n */\nasn1.dateToGeneralizedTime = function(date) {\n // TODO: validate; currently assumes proper format\n if(typeof date === 'string') {\n return date;\n }\n\n var rval = '';\n\n // create format YYYYMMDDHHMMSSZ\n var format = [];\n format.push('' + date.getUTCFullYear());\n format.push('' + (date.getUTCMonth() + 1));\n format.push('' + date.getUTCDate());\n format.push('' + date.getUTCHours());\n format.push('' + date.getUTCMinutes());\n format.push('' + date.getUTCSeconds());\n\n // ensure 2 digits are used for each format entry\n for(var i = 0; i < format.length; ++i) {\n if(format[i].length < 2) {\n rval += '0';\n }\n rval += format[i];\n }\n rval += 'Z';\n\n return rval;\n};\n\n/**\n * Converts a javascript integer to a DER-encoded byte buffer to be used\n * as the value for an INTEGER type.\n *\n * @param x the integer.\n *\n * @return the byte buffer.\n */\nasn1.integerToDer = function(x) {\n var rval = forge.util.createBuffer();\n if(x >= -0x80 && x < 0x80) {\n return rval.putSignedInt(x, 8);\n }\n if(x >= -0x8000 && x < 0x8000) {\n return rval.putSignedInt(x, 16);\n }\n if(x >= -0x800000 && x < 0x800000) {\n return rval.putSignedInt(x, 24);\n }\n if(x >= -0x80000000 && x < 0x80000000) {\n return rval.putSignedInt(x, 32);\n }\n var error = new Error('Integer too large; max is 32-bits.');\n error.integer = x;\n throw error;\n};\n\n/**\n * Converts a DER-encoded byte buffer to a javascript integer. This is\n * typically used to decode the value of an INTEGER type.\n *\n * @param bytes the byte buffer.\n *\n * @return the integer.\n */\nasn1.derToInteger = function(bytes) {\n // wrap in buffer if needed\n if(typeof bytes === 'string') {\n bytes = forge.util.createBuffer(bytes);\n }\n\n var n = bytes.length() * 8;\n if(n > 32) {\n throw new Error('Integer too large; max is 32-bits.');\n }\n return bytes.getSignedInt(n);\n};\n\n/**\n * Validates that the given ASN.1 object is at least a super set of the\n * given ASN.1 structure. Only tag classes and types are checked. An\n * optional map may also be provided to capture ASN.1 values while the\n * structure is checked.\n *\n * To capture an ASN.1 value, set an object in the validator's 'capture'\n * parameter to the key to use in the capture map. To capture the full\n * ASN.1 object, specify 'captureAsn1'. To capture BIT STRING bytes, including\n * the leading unused bits counter byte, specify 'captureBitStringContents'.\n * To capture BIT STRING bytes, without the leading unused bits counter byte,\n * specify 'captureBitStringValue'.\n *\n * Objects in the validator may set a field 'optional' to true to indicate\n * that it isn't necessary to pass validation.\n *\n * @param obj the ASN.1 object to validate.\n * @param v the ASN.1 structure validator.\n * @param capture an optional map to capture values in.\n * @param errors an optional array for storing validation errors.\n *\n * @return true on success, false on failure.\n */\nasn1.validate = function(obj, v, capture, errors) {\n var rval = false;\n\n // ensure tag class and type are the same if specified\n if((obj.tagClass === v.tagClass || typeof(v.tagClass) === 'undefined') &&\n (obj.type === v.type || typeof(v.type) === 'undefined')) {\n // ensure constructed flag is the same if specified\n if(obj.constructed === v.constructed ||\n typeof(v.constructed) === 'undefined') {\n rval = true;\n\n // handle sub values\n if(v.value && forge.util.isArray(v.value)) {\n var j = 0;\n for(var i = 0; rval && i < v.value.length; ++i) {\n rval = v.value[i].optional || false;\n if(obj.value[j]) {\n rval = asn1.validate(obj.value[j], v.value[i], capture, errors);\n if(rval) {\n ++j;\n } else if(v.value[i].optional) {\n rval = true;\n }\n }\n if(!rval && errors) {\n errors.push(\n '[' + v.name + '] ' +\n 'Tag class \"' + v.tagClass + '\", type \"' +\n v.type + '\" expected value length \"' +\n v.value.length + '\", got \"' +\n obj.value.length + '\"');\n }\n }\n }\n\n if(rval && capture) {\n if(v.capture) {\n capture[v.capture] = obj.value;\n }\n if(v.captureAsn1) {\n capture[v.captureAsn1] = obj;\n }\n if(v.captureBitStringContents && 'bitStringContents' in obj) {\n capture[v.captureBitStringContents] = obj.bitStringContents;\n }\n if(v.captureBitStringValue && 'bitStringContents' in obj) {\n var value;\n if(obj.bitStringContents.length < 2) {\n capture[v.captureBitStringValue] = '';\n } else {\n // FIXME: support unused bits with data shifting\n var unused = obj.bitStringContents.charCodeAt(0);\n if(unused !== 0) {\n throw new Error(\n 'captureBitStringValue only supported for zero unused bits');\n }\n capture[v.captureBitStringValue] = obj.bitStringContents.slice(1);\n }\n }\n }\n } else if(errors) {\n errors.push(\n '[' + v.name + '] ' +\n 'Expected constructed \"' + v.constructed + '\", got \"' +\n obj.constructed + '\"');\n }\n } else if(errors) {\n if(obj.tagClass !== v.tagClass) {\n errors.push(\n '[' + v.name + '] ' +\n 'Expected tag class \"' + v.tagClass + '\", got \"' +\n obj.tagClass + '\"');\n }\n if(obj.type !== v.type) {\n errors.push(\n '[' + v.name + '] ' +\n 'Expected type \"' + v.type + '\", got \"' + obj.type + '\"');\n }\n }\n return rval;\n};\n\n// regex for testing for non-latin characters\nvar _nonLatinRegex = /[^\\\\u0000-\\\\u00ff]/;\n\n/**\n * Pretty prints an ASN.1 object to a string.\n *\n * @param obj the object to write out.\n * @param level the level in the tree.\n * @param indentation the indentation to use.\n *\n * @return the string.\n */\nasn1.prettyPrint = function(obj, level, indentation) {\n var rval = '';\n\n // set default level and indentation\n level = level || 0;\n indentation = indentation || 2;\n\n // start new line for deep levels\n if(level > 0) {\n rval += '\\n';\n }\n\n // create indent\n var indent = '';\n for(var i = 0; i < level * indentation; ++i) {\n indent += ' ';\n }\n\n // print class:type\n rval += indent + 'Tag: ';\n switch(obj.tagClass) {\n case asn1.Class.UNIVERSAL:\n rval += 'Universal:';\n break;\n case asn1.Class.APPLICATION:\n rval += 'Application:';\n break;\n case asn1.Class.CONTEXT_SPECIFIC:\n rval += 'Context-Specific:';\n break;\n case asn1.Class.PRIVATE:\n rval += 'Private:';\n break;\n }\n\n if(obj.tagClass === asn1.Class.UNIVERSAL) {\n rval += obj.type;\n\n // known types\n switch(obj.type) {\n case asn1.Type.NONE:\n rval += ' (None)';\n break;\n case asn1.Type.BOOLEAN:\n rval += ' (Boolean)';\n break;\n case asn1.Type.INTEGER:\n rval += ' (Integer)';\n break;\n case asn1.Type.BITSTRING:\n rval += ' (Bit string)';\n break;\n case asn1.Type.OCTETSTRING:\n rval += ' (Octet string)';\n break;\n case asn1.Type.NULL:\n rval += ' (Null)';\n break;\n case asn1.Type.OID:\n rval += ' (Object Identifier)';\n break;\n case asn1.Type.ODESC:\n rval += ' (Object Descriptor)';\n break;\n case asn1.Type.EXTERNAL:\n rval += ' (External or Instance of)';\n break;\n case asn1.Type.REAL:\n rval += ' (Real)';\n break;\n case asn1.Type.ENUMERATED:\n rval += ' (Enumerated)';\n break;\n case asn1.Type.EMBEDDED:\n rval += ' (Embedded PDV)';\n break;\n case asn1.Type.UTF8:\n rval += ' (UTF8)';\n break;\n case asn1.Type.ROID:\n rval += ' (Relative Object Identifier)';\n break;\n case asn1.Type.SEQUENCE:\n rval += ' (Sequence)';\n break;\n case asn1.Type.SET:\n rval += ' (Set)';\n break;\n case asn1.Type.PRINTABLESTRING:\n rval += ' (Printable String)';\n break;\n case asn1.Type.IA5String:\n rval += ' (IA5String (ASCII))';\n break;\n case asn1.Type.UTCTIME:\n rval += ' (UTC time)';\n break;\n case asn1.Type.GENERALIZEDTIME:\n rval += ' (Generalized time)';\n break;\n case asn1.Type.BMPSTRING:\n rval += ' (BMP String)';\n break;\n }\n } else {\n rval += obj.type;\n }\n\n rval += '\\n';\n rval += indent + 'Constructed: ' + obj.constructed + '\\n';\n\n if(obj.composed) {\n var subvalues = 0;\n var sub = '';\n for(var i = 0; i < obj.value.length; ++i) {\n if(obj.value[i] !== undefined) {\n subvalues += 1;\n sub += asn1.prettyPrint(obj.value[i], level + 1, indentation);\n if((i + 1) < obj.value.length) {\n sub += ',';\n }\n }\n }\n rval += indent + 'Sub values: ' + subvalues + sub;\n } else {\n rval += indent + 'Value: ';\n if(obj.type === asn1.Type.OID) {\n var oid = asn1.derToOid(obj.value);\n rval += oid;\n if(forge.pki && forge.pki.oids) {\n if(oid in forge.pki.oids) {\n rval += ' (' + forge.pki.oids[oid] + ') ';\n }\n }\n }\n if(obj.type === asn1.Type.INTEGER) {\n try {\n rval += asn1.derToInteger(obj.value);\n } catch(ex) {\n rval += '0x' + forge.util.bytesToHex(obj.value);\n }\n } else if(obj.type === asn1.Type.BITSTRING) {\n // TODO: shift bits as needed to display without padding\n if(obj.value.length > 1) {\n // remove unused bits field\n rval += '0x' + forge.util.bytesToHex(obj.value.slice(1));\n } else {\n rval += '(none)';\n }\n // show unused bit count\n if(obj.value.length > 0) {\n var unused = obj.value.charCodeAt(0);\n if(unused == 1) {\n rval += ' (1 unused bit shown)';\n } else if(unused > 1) {\n rval += ' (' + unused + ' unused bits shown)';\n }\n }\n } else if(obj.type === asn1.Type.OCTETSTRING) {\n if(!_nonLatinRegex.test(obj.value)) {\n rval += '(' + obj.value + ') ';\n }\n rval += '0x' + forge.util.bytesToHex(obj.value);\n } else if(obj.type === asn1.Type.UTF8) {\n try {\n rval += forge.util.decodeUtf8(obj.value);\n } catch(e) {\n if(e.message === 'URI malformed') {\n rval +=\n '0x' + forge.util.bytesToHex(obj.value) + ' (malformed UTF8)';\n } else {\n throw e;\n }\n }\n } else if(obj.type === asn1.Type.PRINTABLESTRING ||\n obj.type === asn1.Type.IA5String) {\n rval += obj.value;\n } else if(_nonLatinRegex.test(obj.value)) {\n rval += '0x' + forge.util.bytesToHex(obj.value);\n } else if(obj.value.length === 0) {\n rval += '[null]';\n } else {\n rval += obj.value;\n }\n }\n\n return rval;\n};\n","/**\n * Base-N/Base-X encoding/decoding functions.\n *\n * Original implementation from base-x:\n * https://github.com/cryptocoinjs/base-x\n *\n * Which is MIT licensed:\n *\n * The MIT License (MIT)\n *\n * Copyright base-x contributors (c) 2016\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n * DEALINGS IN THE SOFTWARE.\n */\nvar api = {};\nmodule.exports = api;\n\n// baseN alphabet indexes\nvar _reverseAlphabets = {};\n\n/**\n * BaseN-encodes a Uint8Array using the given alphabet.\n *\n * @param input the Uint8Array to encode.\n * @param maxline the maximum number of encoded characters per line to use,\n * defaults to none.\n *\n * @return the baseN-encoded output string.\n */\napi.encode = function(input, alphabet, maxline) {\n if(typeof alphabet !== 'string') {\n throw new TypeError('\"alphabet\" must be a string.');\n }\n if(maxline !== undefined && typeof maxline !== 'number') {\n throw new TypeError('\"maxline\" must be a number.');\n }\n\n var output = '';\n\n if(!(input instanceof Uint8Array)) {\n // assume forge byte buffer\n output = _encodeWithByteBuffer(input, alphabet);\n } else {\n var i = 0;\n var base = alphabet.length;\n var first = alphabet.charAt(0);\n var digits = [0];\n for(i = 0; i < input.length; ++i) {\n for(var j = 0, carry = input[i]; j < digits.length; ++j) {\n carry += digits[j] << 8;\n digits[j] = carry % base;\n carry = (carry / base) | 0;\n }\n\n while(carry > 0) {\n digits.push(carry % base);\n carry = (carry / base) | 0;\n }\n }\n\n // deal with leading zeros\n for(i = 0; input[i] === 0 && i < input.length - 1; ++i) {\n output += first;\n }\n // convert digits to a string\n for(i = digits.length - 1; i >= 0; --i) {\n output += alphabet[digits[i]];\n }\n }\n\n if(maxline) {\n var regex = new RegExp('.{1,' + maxline + '}', 'g');\n output = output.match(regex).join('\\r\\n');\n }\n\n return output;\n};\n\n/**\n * Decodes a baseN-encoded (using the given alphabet) string to a\n * Uint8Array.\n *\n * @param input the baseN-encoded input string.\n *\n * @return the Uint8Array.\n */\napi.decode = function(input, alphabet) {\n if(typeof input !== 'string') {\n throw new TypeError('\"input\" must be a string.');\n }\n if(typeof alphabet !== 'string') {\n throw new TypeError('\"alphabet\" must be a string.');\n }\n\n var table = _reverseAlphabets[alphabet];\n if(!table) {\n // compute reverse alphabet\n table = _reverseAlphabets[alphabet] = [];\n for(var i = 0; i < alphabet.length; ++i) {\n table[alphabet.charCodeAt(i)] = i;\n }\n }\n\n // remove whitespace characters\n input = input.replace(/\\s/g, '');\n\n var base = alphabet.length;\n var first = alphabet.charAt(0);\n var bytes = [0];\n for(var i = 0; i < input.length; i++) {\n var value = table[input.charCodeAt(i)];\n if(value === undefined) {\n return;\n }\n\n for(var j = 0, carry = value; j < bytes.length; ++j) {\n carry += bytes[j] * base;\n bytes[j] = carry & 0xff;\n carry >>= 8;\n }\n\n while(carry > 0) {\n bytes.push(carry & 0xff);\n carry >>= 8;\n }\n }\n\n // deal with leading zeros\n for(var k = 0; input[k] === first && k < input.length - 1; ++k) {\n bytes.push(0);\n }\n\n if(typeof Buffer !== 'undefined') {\n return Buffer.from(bytes.reverse());\n }\n\n return new Uint8Array(bytes.reverse());\n};\n\nfunction _encodeWithByteBuffer(input, alphabet) {\n var i = 0;\n var base = alphabet.length;\n var first = alphabet.charAt(0);\n var digits = [0];\n for(i = 0; i < input.length(); ++i) {\n for(var j = 0, carry = input.at(i); j < digits.length; ++j) {\n carry += digits[j] << 8;\n digits[j] = carry % base;\n carry = (carry / base) | 0;\n }\n\n while(carry > 0) {\n digits.push(carry % base);\n carry = (carry / base) | 0;\n }\n }\n\n var output = '';\n\n // deal with leading zeros\n for(i = 0; input.at(i) === 0 && i < input.length() - 1; ++i) {\n output += first;\n }\n // convert digits to a string\n for(i = digits.length - 1; i >= 0; --i) {\n output += alphabet[digits[i]];\n }\n\n return output;\n}\n","/**\n * Cipher base API.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\nmodule.exports = forge.cipher = forge.cipher || {};\n\n// registered algorithms\nforge.cipher.algorithms = forge.cipher.algorithms || {};\n\n/**\n * Creates a cipher object that can be used to encrypt data using the given\n * algorithm and key. The algorithm may be provided as a string value for a\n * previously registered algorithm or it may be given as a cipher algorithm\n * API object.\n *\n * @param algorithm the algorithm to use, either a string or an algorithm API\n * object.\n * @param key the key to use, as a binary-encoded string of bytes or a\n * byte buffer.\n *\n * @return the cipher.\n */\nforge.cipher.createCipher = function(algorithm, key) {\n var api = algorithm;\n if(typeof api === 'string') {\n api = forge.cipher.getAlgorithm(api);\n if(api) {\n api = api();\n }\n }\n if(!api) {\n throw new Error('Unsupported algorithm: ' + algorithm);\n }\n\n // assume block cipher\n return new forge.cipher.BlockCipher({\n algorithm: api,\n key: key,\n decrypt: false\n });\n};\n\n/**\n * Creates a decipher object that can be used to decrypt data using the given\n * algorithm and key. The algorithm may be provided as a string value for a\n * previously registered algorithm or it may be given as a cipher algorithm\n * API object.\n *\n * @param algorithm the algorithm to use, either a string or an algorithm API\n * object.\n * @param key the key to use, as a binary-encoded string of bytes or a\n * byte buffer.\n *\n * @return the cipher.\n */\nforge.cipher.createDecipher = function(algorithm, key) {\n var api = algorithm;\n if(typeof api === 'string') {\n api = forge.cipher.getAlgorithm(api);\n if(api) {\n api = api();\n }\n }\n if(!api) {\n throw new Error('Unsupported algorithm: ' + algorithm);\n }\n\n // assume block cipher\n return new forge.cipher.BlockCipher({\n algorithm: api,\n key: key,\n decrypt: true\n });\n};\n\n/**\n * Registers an algorithm by name. If the name was already registered, the\n * algorithm API object will be overwritten.\n *\n * @param name the name of the algorithm.\n * @param algorithm the algorithm API object.\n */\nforge.cipher.registerAlgorithm = function(name, algorithm) {\n name = name.toUpperCase();\n forge.cipher.algorithms[name] = algorithm;\n};\n\n/**\n * Gets a registered algorithm by name.\n *\n * @param name the name of the algorithm.\n *\n * @return the algorithm, if found, null if not.\n */\nforge.cipher.getAlgorithm = function(name) {\n name = name.toUpperCase();\n if(name in forge.cipher.algorithms) {\n return forge.cipher.algorithms[name];\n }\n return null;\n};\n\nvar BlockCipher = forge.cipher.BlockCipher = function(options) {\n this.algorithm = options.algorithm;\n this.mode = this.algorithm.mode;\n this.blockSize = this.mode.blockSize;\n this._finish = false;\n this._input = null;\n this.output = null;\n this._op = options.decrypt ? this.mode.decrypt : this.mode.encrypt;\n this._decrypt = options.decrypt;\n this.algorithm.initialize(options);\n};\n\n/**\n * Starts or restarts the encryption or decryption process, whichever\n * was previously configured.\n *\n * For non-GCM mode, the IV may be a binary-encoded string of bytes, an array\n * of bytes, a byte buffer, or an array of 32-bit integers. If the IV is in\n * bytes, then it must be Nb (16) bytes in length. If the IV is given in as\n * 32-bit integers, then it must be 4 integers long.\n *\n * Note: an IV is not required or used in ECB mode.\n *\n * For GCM-mode, the IV must be given as a binary-encoded string of bytes or\n * a byte buffer. The number of bytes should be 12 (96 bits) as recommended\n * by NIST SP-800-38D but another length may be given.\n *\n * @param options the options to use:\n * iv the initialization vector to use as a binary-encoded string of\n * bytes, null to reuse the last ciphered block from a previous\n * update() (this \"residue\" method is for legacy support only).\n * additionalData additional authentication data as a binary-encoded\n * string of bytes, for 'GCM' mode, (default: none).\n * tagLength desired length of authentication tag, in bits, for\n * 'GCM' mode (0-128, default: 128).\n * tag the authentication tag to check if decrypting, as a\n * binary-encoded string of bytes.\n * output the output the buffer to write to, null to create one.\n */\nBlockCipher.prototype.start = function(options) {\n options = options || {};\n var opts = {};\n for(var key in options) {\n opts[key] = options[key];\n }\n opts.decrypt = this._decrypt;\n this._finish = false;\n this._input = forge.util.createBuffer();\n this.output = options.output || forge.util.createBuffer();\n this.mode.start(opts);\n};\n\n/**\n * Updates the next block according to the cipher mode.\n *\n * @param input the buffer to read from.\n */\nBlockCipher.prototype.update = function(input) {\n if(input) {\n // input given, so empty it into the input buffer\n this._input.putBuffer(input);\n }\n\n // do cipher operation until it needs more input and not finished\n while(!this._op.call(this.mode, this._input, this.output, this._finish) &&\n !this._finish) {}\n\n // free consumed memory from input buffer\n this._input.compact();\n};\n\n/**\n * Finishes encrypting or decrypting.\n *\n * @param pad a padding function to use in CBC mode, null for default,\n * signature(blockSize, buffer, decrypt).\n *\n * @return true if successful, false on error.\n */\nBlockCipher.prototype.finish = function(pad) {\n // backwards-compatibility w/deprecated padding API\n // Note: will overwrite padding functions even after another start() call\n if(pad && (this.mode.name === 'ECB' || this.mode.name === 'CBC')) {\n this.mode.pad = function(input) {\n return pad(this.blockSize, input, false);\n };\n this.mode.unpad = function(output) {\n return pad(this.blockSize, output, true);\n };\n }\n\n // build options for padding and afterFinish functions\n var options = {};\n options.decrypt = this._decrypt;\n\n // get # of bytes that won't fill a block\n options.overflow = this._input.length() % this.blockSize;\n\n if(!this._decrypt && this.mode.pad) {\n if(!this.mode.pad(this._input, options)) {\n return false;\n }\n }\n\n // do final update\n this._finish = true;\n this.update();\n\n if(this._decrypt && this.mode.unpad) {\n if(!this.mode.unpad(this.output, options)) {\n return false;\n }\n }\n\n if(this.mode.afterFinish) {\n if(!this.mode.afterFinish(this.output, options)) {\n return false;\n }\n }\n\n return true;\n};\n","/**\n * Supported cipher modes.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\nforge.cipher = forge.cipher || {};\n\n// supported cipher modes\nvar modes = module.exports = forge.cipher.modes = forge.cipher.modes || {};\n\n/** Electronic codebook (ECB) (Don't use this; it's not secure) **/\n\nmodes.ecb = function(options) {\n options = options || {};\n this.name = 'ECB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n};\n\nmodes.ecb.prototype.start = function(options) {};\n\nmodes.ecb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // write output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n};\n\nmodes.ecb.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // decrypt block\n this.cipher.decrypt(this._inBlock, this._outBlock);\n\n // write output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n};\n\nmodes.ecb.prototype.pad = function(input, options) {\n // add PKCS#7 padding to block (each pad byte is the\n // value of the number of pad bytes)\n var padding = (input.length() === this.blockSize ?\n this.blockSize : (this.blockSize - input.length()));\n input.fillWithByte(padding, padding);\n return true;\n};\n\nmodes.ecb.prototype.unpad = function(output, options) {\n // check for error: input data not a multiple of blockSize\n if(options.overflow > 0) {\n return false;\n }\n\n // ensure padding byte count is valid\n var len = output.length();\n var count = output.at(len - 1);\n if(count > (this.blockSize << 2)) {\n return false;\n }\n\n // trim off padding bytes\n output.truncate(count);\n return true;\n};\n\n/** Cipher-block Chaining (CBC) **/\n\nmodes.cbc = function(options) {\n options = options || {};\n this.name = 'CBC';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n};\n\nmodes.cbc.prototype.start = function(options) {\n // Note: legacy support for using IV residue (has security flaws)\n // if IV is null, reuse block from previous processing\n if(options.iv === null) {\n // must have a previous block\n if(!this._prev) {\n throw new Error('Invalid IV parameter.');\n }\n this._iv = this._prev.slice(0);\n } else if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n } else {\n // save IV as \"previous\" block\n this._iv = transformIV(options.iv, this.blockSize);\n this._prev = this._iv.slice(0);\n }\n};\n\nmodes.cbc.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n // CBC XOR's IV (or previous block) with plaintext\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._prev[i] ^ input.getInt32();\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // write output, save previous block\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n this._prev = this._outBlock;\n};\n\nmodes.cbc.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // decrypt block\n this.cipher.decrypt(this._inBlock, this._outBlock);\n\n // write output, save previous ciphered block\n // CBC XOR's IV (or previous block) with ciphertext\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._prev[i] ^ this._outBlock[i]);\n }\n this._prev = this._inBlock.slice(0);\n};\n\nmodes.cbc.prototype.pad = function(input, options) {\n // add PKCS#7 padding to block (each pad byte is the\n // value of the number of pad bytes)\n var padding = (input.length() === this.blockSize ?\n this.blockSize : (this.blockSize - input.length()));\n input.fillWithByte(padding, padding);\n return true;\n};\n\nmodes.cbc.prototype.unpad = function(output, options) {\n // check for error: input data not a multiple of blockSize\n if(options.overflow > 0) {\n return false;\n }\n\n // ensure padding byte count is valid\n var len = output.length();\n var count = output.at(len - 1);\n if(count > (this.blockSize << 2)) {\n return false;\n }\n\n // trim off padding bytes\n output.truncate(count);\n return true;\n};\n\n/** Cipher feedback (CFB) **/\n\nmodes.cfb = function(options) {\n options = options || {};\n this.name = 'CFB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output, write input as output\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32() ^ this._outBlock[i];\n output.putInt32(this._inBlock[i]);\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output, write input as partial output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialBlock[i] = input.getInt32() ^ this._outBlock[i];\n this._partialOutput.putInt32(this._partialBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._partialBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block (CFB always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output, write input as output\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n output.putInt32(this._inBlock[i] ^ this._outBlock[i]);\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output, write input as partial output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialBlock[i] = input.getInt32();\n this._partialOutput.putInt32(this._partialBlock[i] ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._partialBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\n/** Output feedback (OFB) **/\n\nmodes.ofb = function(options) {\n options = options || {};\n this.name = 'OFB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(input.length() === 0) {\n return true;\n }\n\n // encrypt block (OFB always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output and update next input\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(input.getInt32() ^ this._outBlock[i]);\n this._inBlock[i] = this._outBlock[i];\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._outBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.decrypt = modes.ofb.prototype.encrypt;\n\n/** Counter (CTR) **/\n\nmodes.ctr = function(options) {\n options = options || {};\n this.name = 'CTR';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.ctr.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n this._partialBytes = 0;\n};\n\nmodes.ctr.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block (CTR always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n } else {\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n }\n\n // block complete, increment counter (input block)\n inc32(this._inBlock);\n};\n\nmodes.ctr.prototype.decrypt = modes.ctr.prototype.encrypt;\n\n/** Galois/Counter Mode (GCM) **/\n\nmodes.gcm = function(options) {\n options = options || {};\n this.name = 'GCM';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n\n // R is actually this value concatenated with 120 more zero bits, but\n // we only XOR against R so the other zeros have no effect -- we just\n // apply this value to the first integer in a block\n this._R = 0xE1000000;\n};\n\nmodes.gcm.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // ensure IV is a byte buffer\n var iv = forge.util.createBuffer(options.iv);\n\n // no ciphered data processed yet\n this._cipherLength = 0;\n\n // default additional data is none\n var additionalData;\n if('additionalData' in options) {\n additionalData = forge.util.createBuffer(options.additionalData);\n } else {\n additionalData = forge.util.createBuffer();\n }\n\n // default tag length is 128 bits\n if('tagLength' in options) {\n this._tagLength = options.tagLength;\n } else {\n this._tagLength = 128;\n }\n\n // if tag is given, ensure tag matches tag length\n this._tag = null;\n if(options.decrypt) {\n // save tag to check later\n this._tag = forge.util.createBuffer(options.tag).getBytes();\n if(this._tag.length !== (this._tagLength / 8)) {\n throw new Error('Authentication tag does not match tag length.');\n }\n }\n\n // create tmp storage for hash calculation\n this._hashBlock = new Array(this._ints);\n\n // no tag generated yet\n this.tag = null;\n\n // generate hash subkey\n // (apply block cipher to \"zero\" block)\n this._hashSubkey = new Array(this._ints);\n this.cipher.encrypt([0, 0, 0, 0], this._hashSubkey);\n\n // generate table M\n // use 4-bit tables (32 component decomposition of a 16 byte value)\n // 8-bit tables take more space and are known to have security\n // vulnerabilities (in native implementations)\n this.componentBits = 4;\n this._m = this.generateHashTable(this._hashSubkey, this.componentBits);\n\n // Note: support IV length different from 96 bits? (only supporting\n // 96 bits is recommended by NIST SP-800-38D)\n // generate J_0\n var ivLength = iv.length();\n if(ivLength === 12) {\n // 96-bit IV\n this._j0 = [iv.getInt32(), iv.getInt32(), iv.getInt32(), 1];\n } else {\n // IV is NOT 96-bits\n this._j0 = [0, 0, 0, 0];\n while(iv.length() > 0) {\n this._j0 = this.ghash(\n this._hashSubkey, this._j0,\n [iv.getInt32(), iv.getInt32(), iv.getInt32(), iv.getInt32()]);\n }\n this._j0 = this.ghash(\n this._hashSubkey, this._j0, [0, 0].concat(from64To32(ivLength * 8)));\n }\n\n // generate ICB (initial counter block)\n this._inBlock = this._j0.slice(0);\n inc32(this._inBlock);\n this._partialBytes = 0;\n\n // consume authentication data\n additionalData = forge.util.createBuffer(additionalData);\n // save additional data length as a BE 64-bit number\n this._aDataLength = from64To32(additionalData.length() * 8);\n // pad additional data to 128 bit (16 byte) block size\n var overflow = additionalData.length() % this.blockSize;\n if(overflow) {\n additionalData.fillWithByte(0, this.blockSize - overflow);\n }\n this._s = [0, 0, 0, 0];\n while(additionalData.length() > 0) {\n this._s = this.ghash(this._hashSubkey, this._s, [\n additionalData.getInt32(),\n additionalData.getInt32(),\n additionalData.getInt32(),\n additionalData.getInt32()\n ]);\n }\n};\n\nmodes.gcm.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i] ^= input.getInt32());\n }\n this._cipherLength += this.blockSize;\n } else {\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes <= 0 || finish) {\n // handle overflow prior to hashing\n if(finish) {\n // get block overflow\n var overflow = inputLength % this.blockSize;\n this._cipherLength += overflow;\n // truncate for hash function\n this._partialOutput.truncate(this.blockSize - overflow);\n } else {\n this._cipherLength += this.blockSize;\n }\n\n // get output block for hashing\n for(var i = 0; i < this._ints; ++i) {\n this._outBlock[i] = this._partialOutput.getInt32();\n }\n this._partialOutput.read -= this.blockSize;\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n // block still incomplete, restore input buffer, get partial output,\n // and return early\n input.read -= this.blockSize;\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n }\n\n // update hash block S\n this._s = this.ghash(this._hashSubkey, this._s, this._outBlock);\n\n // increment counter (input block)\n inc32(this._inBlock);\n};\n\nmodes.gcm.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n var inputLength = input.length();\n if(inputLength < this.blockSize && !(finish && inputLength > 0)) {\n return true;\n }\n\n // encrypt block (GCM always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // increment counter (input block)\n inc32(this._inBlock);\n\n // update hash block S\n this._hashBlock[0] = input.getInt32();\n this._hashBlock[1] = input.getInt32();\n this._hashBlock[2] = input.getInt32();\n this._hashBlock[3] = input.getInt32();\n this._s = this.ghash(this._hashSubkey, this._s, this._hashBlock);\n\n // XOR hash input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i] ^ this._hashBlock[i]);\n }\n\n // increment cipher data length\n if(inputLength < this.blockSize) {\n this._cipherLength += inputLength % this.blockSize;\n } else {\n this._cipherLength += this.blockSize;\n }\n};\n\nmodes.gcm.prototype.afterFinish = function(output, options) {\n var rval = true;\n\n // handle overflow\n if(options.decrypt && options.overflow) {\n output.truncate(this.blockSize - options.overflow);\n }\n\n // handle authentication tag\n this.tag = forge.util.createBuffer();\n\n // concatenate additional data length with cipher length\n var lengths = this._aDataLength.concat(from64To32(this._cipherLength * 8));\n\n // include lengths in hash\n this._s = this.ghash(this._hashSubkey, this._s, lengths);\n\n // do GCTR(J_0, S)\n var tag = [];\n this.cipher.encrypt(this._j0, tag);\n for(var i = 0; i < this._ints; ++i) {\n this.tag.putInt32(this._s[i] ^ tag[i]);\n }\n\n // trim tag to length\n this.tag.truncate(this.tag.length() % (this._tagLength / 8));\n\n // check authentication tag\n if(options.decrypt && this.tag.bytes() !== this._tag) {\n rval = false;\n }\n\n return rval;\n};\n\n/**\n * See NIST SP-800-38D 6.3 (Algorithm 1). This function performs Galois\n * field multiplication. The field, GF(2^128), is defined by the polynomial:\n *\n * x^128 + x^7 + x^2 + x + 1\n *\n * Which is represented in little-endian binary form as: 11100001 (0xe1). When\n * the value of a coefficient is 1, a bit is set. The value R, is the\n * concatenation of this value and 120 zero bits, yielding a 128-bit value\n * which matches the block size.\n *\n * This function will multiply two elements (vectors of bytes), X and Y, in\n * the field GF(2^128). The result is initialized to zero. For each bit of\n * X (out of 128), x_i, if x_i is set, then the result is multiplied (XOR'd)\n * by the current value of Y. For each bit, the value of Y will be raised by\n * a power of x (multiplied by the polynomial x). This can be achieved by\n * shifting Y once to the right. If the current value of Y, prior to being\n * multiplied by x, has 0 as its LSB, then it is a 127th degree polynomial.\n * Otherwise, we must divide by R after shifting to find the remainder.\n *\n * @param x the first block to multiply by the second.\n * @param y the second block to multiply by the first.\n *\n * @return the block result of the multiplication.\n */\nmodes.gcm.prototype.multiply = function(x, y) {\n var z_i = [0, 0, 0, 0];\n var v_i = y.slice(0);\n\n // calculate Z_128 (block has 128 bits)\n for(var i = 0; i < 128; ++i) {\n // if x_i is 0, Z_{i+1} = Z_i (unchanged)\n // else Z_{i+1} = Z_i ^ V_i\n // get x_i by finding 32-bit int position, then left shift 1 by remainder\n var x_i = x[(i / 32) | 0] & (1 << (31 - i % 32));\n if(x_i) {\n z_i[0] ^= v_i[0];\n z_i[1] ^= v_i[1];\n z_i[2] ^= v_i[2];\n z_i[3] ^= v_i[3];\n }\n\n // if LSB(V_i) is 1, V_i = V_i >> 1\n // else V_i = (V_i >> 1) ^ R\n this.pow(v_i, v_i);\n }\n\n return z_i;\n};\n\nmodes.gcm.prototype.pow = function(x, out) {\n // if LSB(x) is 1, x = x >>> 1\n // else x = (x >>> 1) ^ R\n var lsb = x[3] & 1;\n\n // always do x >>> 1:\n // starting with the rightmost integer, shift each integer to the right\n // one bit, pulling in the bit from the integer to the left as its top\n // most bit (do this for the last 3 integers)\n for(var i = 3; i > 0; --i) {\n out[i] = (x[i] >>> 1) | ((x[i - 1] & 1) << 31);\n }\n // shift the first integer normally\n out[0] = x[0] >>> 1;\n\n // if lsb was not set, then polynomial had a degree of 127 and doesn't\n // need to divided; otherwise, XOR with R to find the remainder; we only\n // need to XOR the first integer since R technically ends w/120 zero bits\n if(lsb) {\n out[0] ^= this._R;\n }\n};\n\nmodes.gcm.prototype.tableMultiply = function(x) {\n // assumes 4-bit tables are used\n var z = [0, 0, 0, 0];\n for(var i = 0; i < 32; ++i) {\n var idx = (i / 8) | 0;\n var x_i = (x[idx] >>> ((7 - (i % 8)) * 4)) & 0xF;\n var ah = this._m[i][x_i];\n z[0] ^= ah[0];\n z[1] ^= ah[1];\n z[2] ^= ah[2];\n z[3] ^= ah[3];\n }\n return z;\n};\n\n/**\n * A continuing version of the GHASH algorithm that operates on a single\n * block. The hash block, last hash value (Ym) and the new block to hash\n * are given.\n *\n * @param h the hash block.\n * @param y the previous value for Ym, use [0, 0, 0, 0] for a new hash.\n * @param x the block to hash.\n *\n * @return the hashed value (Ym).\n */\nmodes.gcm.prototype.ghash = function(h, y, x) {\n y[0] ^= x[0];\n y[1] ^= x[1];\n y[2] ^= x[2];\n y[3] ^= x[3];\n return this.tableMultiply(y);\n //return this.multiply(y, h);\n};\n\n/**\n * Precomputes a table for multiplying against the hash subkey. This\n * mechanism provides a substantial speed increase over multiplication\n * performed without a table. The table-based multiplication this table is\n * for solves X * H by multiplying each component of X by H and then\n * composing the results together using XOR.\n *\n * This function can be used to generate tables with different bit sizes\n * for the components, however, this implementation assumes there are\n * 32 components of X (which is a 16 byte vector), therefore each component\n * takes 4-bits (so the table is constructed with bits=4).\n *\n * @param h the hash subkey.\n * @param bits the bit size for a component.\n */\nmodes.gcm.prototype.generateHashTable = function(h, bits) {\n // TODO: There are further optimizations that would use only the\n // first table M_0 (or some variant) along with a remainder table;\n // this can be explored in the future\n var multiplier = 8 / bits;\n var perInt = 4 * multiplier;\n var size = 16 * multiplier;\n var m = new Array(size);\n for(var i = 0; i < size; ++i) {\n var tmp = [0, 0, 0, 0];\n var idx = (i / perInt) | 0;\n var shft = ((perInt - 1 - (i % perInt)) * bits);\n tmp[idx] = (1 << (bits - 1)) << shft;\n m[i] = this.generateSubHashTable(this.multiply(tmp, h), bits);\n }\n return m;\n};\n\n/**\n * Generates a table for multiplying against the hash subkey for one\n * particular component (out of all possible component values).\n *\n * @param mid the pre-multiplied value for the middle key of the table.\n * @param bits the bit size for a component.\n */\nmodes.gcm.prototype.generateSubHashTable = function(mid, bits) {\n // compute the table quickly by minimizing the number of\n // POW operations -- they only need to be performed for powers of 2,\n // all other entries can be composed from those powers using XOR\n var size = 1 << bits;\n var half = size >>> 1;\n var m = new Array(size);\n m[half] = mid.slice(0);\n var i = half >>> 1;\n while(i > 0) {\n // raise m0[2 * i] and store in m0[i]\n this.pow(m[2 * i], m[i] = []);\n i >>= 1;\n }\n i = 2;\n while(i < half) {\n for(var j = 1; j < i; ++j) {\n var m_i = m[i];\n var m_j = m[j];\n m[i + j] = [\n m_i[0] ^ m_j[0],\n m_i[1] ^ m_j[1],\n m_i[2] ^ m_j[2],\n m_i[3] ^ m_j[3]\n ];\n }\n i *= 2;\n }\n m[0] = [0, 0, 0, 0];\n /* Note: We could avoid storing these by doing composition during multiply\n calculate top half using composition by speed is preferred. */\n for(i = half + 1; i < size; ++i) {\n var c = m[i ^ half];\n m[i] = [mid[0] ^ c[0], mid[1] ^ c[1], mid[2] ^ c[2], mid[3] ^ c[3]];\n }\n return m;\n};\n\n/** Utility functions */\n\nfunction transformIV(iv, blockSize) {\n if(typeof iv === 'string') {\n // convert iv string into byte buffer\n iv = forge.util.createBuffer(iv);\n }\n\n if(forge.util.isArray(iv) && iv.length > 4) {\n // convert iv byte array into byte buffer\n var tmp = iv;\n iv = forge.util.createBuffer();\n for(var i = 0; i < tmp.length; ++i) {\n iv.putByte(tmp[i]);\n }\n }\n\n if(iv.length() < blockSize) {\n throw new Error(\n 'Invalid IV length; got ' + iv.length() +\n ' bytes and expected ' + blockSize + ' bytes.');\n }\n\n if(!forge.util.isArray(iv)) {\n // convert iv byte buffer into 32-bit integer array\n var ints = [];\n var blocks = blockSize / 4;\n for(var i = 0; i < blocks; ++i) {\n ints.push(iv.getInt32());\n }\n iv = ints;\n }\n\n return iv;\n}\n\nfunction inc32(block) {\n // increment last 32 bits of block only\n block[block.length - 1] = (block[block.length - 1] + 1) & 0xFFFFFFFF;\n}\n\nfunction from64To32(num) {\n // convert 64-bit number to two BE Int32s\n return [(num / 0x100000000) | 0, num & 0xFFFFFFFF];\n}\n","/**\n * DES (Data Encryption Standard) implementation.\n *\n * This implementation supports DES as well as 3DES-EDE in ECB and CBC mode.\n * It is based on the BSD-licensed implementation by Paul Tero:\n *\n * Paul Tero, July 2001\n * http://www.tero.co.uk/des/\n *\n * Optimised for performance with large blocks by\n * Michael Hayworth, November 2001\n * http://www.netdealing.com\n *\n * THIS SOFTWARE IS PROVIDED \"AS IS\" AND\n * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE\n * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS\n * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)\n * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT\n * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY\n * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF\n * SUCH DAMAGE.\n *\n * @author Stefan Siegl\n * @author Dave Longley\n *\n * Copyright (c) 2012 Stefan Siegl \n * Copyright (c) 2012-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./cipher');\nrequire('./cipherModes');\nrequire('./util');\n\n/* DES API */\nmodule.exports = forge.des = forge.des || {};\n\n/**\n * Deprecated. Instead, use:\n *\n * var cipher = forge.cipher.createCipher('DES-', key);\n * cipher.start({iv: iv});\n *\n * Creates an DES cipher object to encrypt data using the given symmetric key.\n * The output will be stored in the 'output' member of the returned cipher.\n *\n * The key and iv may be given as binary-encoded strings of bytes or\n * byte buffers.\n *\n * @param key the symmetric key to use (64 or 192 bits).\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n * @param mode the cipher mode to use (default: 'CBC' if IV is\n * given, 'ECB' if null).\n *\n * @return the cipher.\n */\nforge.des.startEncrypting = function(key, iv, output, mode) {\n var cipher = _createCipher({\n key: key,\n output: output,\n decrypt: false,\n mode: mode || (iv === null ? 'ECB' : 'CBC')\n });\n cipher.start(iv);\n return cipher;\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var cipher = forge.cipher.createCipher('DES-', key);\n *\n * Creates an DES cipher object to encrypt data using the given symmetric key.\n *\n * The key may be given as a binary-encoded string of bytes or a byte buffer.\n *\n * @param key the symmetric key to use (64 or 192 bits).\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.des.createEncryptionCipher = function(key, mode) {\n return _createCipher({\n key: key,\n output: null,\n decrypt: false,\n mode: mode\n });\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var decipher = forge.cipher.createDecipher('DES-', key);\n * decipher.start({iv: iv});\n *\n * Creates an DES cipher object to decrypt data using the given symmetric key.\n * The output will be stored in the 'output' member of the returned cipher.\n *\n * The key and iv may be given as binary-encoded strings of bytes or\n * byte buffers.\n *\n * @param key the symmetric key to use (64 or 192 bits).\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n * @param mode the cipher mode to use (default: 'CBC' if IV is\n * given, 'ECB' if null).\n *\n * @return the cipher.\n */\nforge.des.startDecrypting = function(key, iv, output, mode) {\n var cipher = _createCipher({\n key: key,\n output: output,\n decrypt: true,\n mode: mode || (iv === null ? 'ECB' : 'CBC')\n });\n cipher.start(iv);\n return cipher;\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var decipher = forge.cipher.createDecipher('DES-', key);\n *\n * Creates an DES cipher object to decrypt data using the given symmetric key.\n *\n * The key may be given as a binary-encoded string of bytes or a byte buffer.\n *\n * @param key the symmetric key to use (64 or 192 bits).\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.des.createDecryptionCipher = function(key, mode) {\n return _createCipher({\n key: key,\n output: null,\n decrypt: true,\n mode: mode\n });\n};\n\n/**\n * Creates a new DES cipher algorithm object.\n *\n * @param name the name of the algorithm.\n * @param mode the mode factory function.\n *\n * @return the DES algorithm object.\n */\nforge.des.Algorithm = function(name, mode) {\n var self = this;\n self.name = name;\n self.mode = new mode({\n blockSize: 8,\n cipher: {\n encrypt: function(inBlock, outBlock) {\n return _updateBlock(self._keys, inBlock, outBlock, false);\n },\n decrypt: function(inBlock, outBlock) {\n return _updateBlock(self._keys, inBlock, outBlock, true);\n }\n }\n });\n self._init = false;\n};\n\n/**\n * Initializes this DES algorithm by expanding its key.\n *\n * @param options the options to use.\n * key the key to use with this algorithm.\n * decrypt true if the algorithm should be initialized for decryption,\n * false for encryption.\n */\nforge.des.Algorithm.prototype.initialize = function(options) {\n if(this._init) {\n return;\n }\n\n var key = forge.util.createBuffer(options.key);\n if(this.name.indexOf('3DES') === 0) {\n if(key.length() !== 24) {\n throw new Error('Invalid Triple-DES key size: ' + key.length() * 8);\n }\n }\n\n // do key expansion to 16 or 48 subkeys (single or triple DES)\n this._keys = _createKeys(key);\n this._init = true;\n};\n\n/** Register DES algorithms **/\n\nregisterAlgorithm('DES-ECB', forge.cipher.modes.ecb);\nregisterAlgorithm('DES-CBC', forge.cipher.modes.cbc);\nregisterAlgorithm('DES-CFB', forge.cipher.modes.cfb);\nregisterAlgorithm('DES-OFB', forge.cipher.modes.ofb);\nregisterAlgorithm('DES-CTR', forge.cipher.modes.ctr);\n\nregisterAlgorithm('3DES-ECB', forge.cipher.modes.ecb);\nregisterAlgorithm('3DES-CBC', forge.cipher.modes.cbc);\nregisterAlgorithm('3DES-CFB', forge.cipher.modes.cfb);\nregisterAlgorithm('3DES-OFB', forge.cipher.modes.ofb);\nregisterAlgorithm('3DES-CTR', forge.cipher.modes.ctr);\n\nfunction registerAlgorithm(name, mode) {\n var factory = function() {\n return new forge.des.Algorithm(name, mode);\n };\n forge.cipher.registerAlgorithm(name, factory);\n}\n\n/** DES implementation **/\n\nvar spfunction1 = [0x1010400,0,0x10000,0x1010404,0x1010004,0x10404,0x4,0x10000,0x400,0x1010400,0x1010404,0x400,0x1000404,0x1010004,0x1000000,0x4,0x404,0x1000400,0x1000400,0x10400,0x10400,0x1010000,0x1010000,0x1000404,0x10004,0x1000004,0x1000004,0x10004,0,0x404,0x10404,0x1000000,0x10000,0x1010404,0x4,0x1010000,0x1010400,0x1000000,0x1000000,0x400,0x1010004,0x10000,0x10400,0x1000004,0x400,0x4,0x1000404,0x10404,0x1010404,0x10004,0x1010000,0x1000404,0x1000004,0x404,0x10404,0x1010400,0x404,0x1000400,0x1000400,0,0x10004,0x10400,0,0x1010004];\nvar spfunction2 = [-0x7fef7fe0,-0x7fff8000,0x8000,0x108020,0x100000,0x20,-0x7fefffe0,-0x7fff7fe0,-0x7fffffe0,-0x7fef7fe0,-0x7fef8000,-0x80000000,-0x7fff8000,0x100000,0x20,-0x7fefffe0,0x108000,0x100020,-0x7fff7fe0,0,-0x80000000,0x8000,0x108020,-0x7ff00000,0x100020,-0x7fffffe0,0,0x108000,0x8020,-0x7fef8000,-0x7ff00000,0x8020,0,0x108020,-0x7fefffe0,0x100000,-0x7fff7fe0,-0x7ff00000,-0x7fef8000,0x8000,-0x7ff00000,-0x7fff8000,0x20,-0x7fef7fe0,0x108020,0x20,0x8000,-0x80000000,0x8020,-0x7fef8000,0x100000,-0x7fffffe0,0x100020,-0x7fff7fe0,-0x7fffffe0,0x100020,0x108000,0,-0x7fff8000,0x8020,-0x80000000,-0x7fefffe0,-0x7fef7fe0,0x108000];\nvar spfunction3 = [0x208,0x8020200,0,0x8020008,0x8000200,0,0x20208,0x8000200,0x20008,0x8000008,0x8000008,0x20000,0x8020208,0x20008,0x8020000,0x208,0x8000000,0x8,0x8020200,0x200,0x20200,0x8020000,0x8020008,0x20208,0x8000208,0x20200,0x20000,0x8000208,0x8,0x8020208,0x200,0x8000000,0x8020200,0x8000000,0x20008,0x208,0x20000,0x8020200,0x8000200,0,0x200,0x20008,0x8020208,0x8000200,0x8000008,0x200,0,0x8020008,0x8000208,0x20000,0x8000000,0x8020208,0x8,0x20208,0x20200,0x8000008,0x8020000,0x8000208,0x208,0x8020000,0x20208,0x8,0x8020008,0x20200];\nvar spfunction4 = [0x802001,0x2081,0x2081,0x80,0x802080,0x800081,0x800001,0x2001,0,0x802000,0x802000,0x802081,0x81,0,0x800080,0x800001,0x1,0x2000,0x800000,0x802001,0x80,0x800000,0x2001,0x2080,0x800081,0x1,0x2080,0x800080,0x2000,0x802080,0x802081,0x81,0x800080,0x800001,0x802000,0x802081,0x81,0,0,0x802000,0x2080,0x800080,0x800081,0x1,0x802001,0x2081,0x2081,0x80,0x802081,0x81,0x1,0x2000,0x800001,0x2001,0x802080,0x800081,0x2001,0x2080,0x800000,0x802001,0x80,0x800000,0x2000,0x802080];\nvar spfunction5 = [0x100,0x2080100,0x2080000,0x42000100,0x80000,0x100,0x40000000,0x2080000,0x40080100,0x80000,0x2000100,0x40080100,0x42000100,0x42080000,0x80100,0x40000000,0x2000000,0x40080000,0x40080000,0,0x40000100,0x42080100,0x42080100,0x2000100,0x42080000,0x40000100,0,0x42000000,0x2080100,0x2000000,0x42000000,0x80100,0x80000,0x42000100,0x100,0x2000000,0x40000000,0x2080000,0x42000100,0x40080100,0x2000100,0x40000000,0x42080000,0x2080100,0x40080100,0x100,0x2000000,0x42080000,0x42080100,0x80100,0x42000000,0x42080100,0x2080000,0,0x40080000,0x42000000,0x80100,0x2000100,0x40000100,0x80000,0,0x40080000,0x2080100,0x40000100];\nvar spfunction6 = [0x20000010,0x20400000,0x4000,0x20404010,0x20400000,0x10,0x20404010,0x400000,0x20004000,0x404010,0x400000,0x20000010,0x400010,0x20004000,0x20000000,0x4010,0,0x400010,0x20004010,0x4000,0x404000,0x20004010,0x10,0x20400010,0x20400010,0,0x404010,0x20404000,0x4010,0x404000,0x20404000,0x20000000,0x20004000,0x10,0x20400010,0x404000,0x20404010,0x400000,0x4010,0x20000010,0x400000,0x20004000,0x20000000,0x4010,0x20000010,0x20404010,0x404000,0x20400000,0x404010,0x20404000,0,0x20400010,0x10,0x4000,0x20400000,0x404010,0x4000,0x400010,0x20004010,0,0x20404000,0x20000000,0x400010,0x20004010];\nvar spfunction7 = [0x200000,0x4200002,0x4000802,0,0x800,0x4000802,0x200802,0x4200800,0x4200802,0x200000,0,0x4000002,0x2,0x4000000,0x4200002,0x802,0x4000800,0x200802,0x200002,0x4000800,0x4000002,0x4200000,0x4200800,0x200002,0x4200000,0x800,0x802,0x4200802,0x200800,0x2,0x4000000,0x200800,0x4000000,0x200800,0x200000,0x4000802,0x4000802,0x4200002,0x4200002,0x2,0x200002,0x4000000,0x4000800,0x200000,0x4200800,0x802,0x200802,0x4200800,0x802,0x4000002,0x4200802,0x4200000,0x200800,0,0x2,0x4200802,0,0x200802,0x4200000,0x800,0x4000002,0x4000800,0x800,0x200002];\nvar spfunction8 = [0x10001040,0x1000,0x40000,0x10041040,0x10000000,0x10001040,0x40,0x10000000,0x40040,0x10040000,0x10041040,0x41000,0x10041000,0x41040,0x1000,0x40,0x10040000,0x10000040,0x10001000,0x1040,0x41000,0x40040,0x10040040,0x10041000,0x1040,0,0,0x10040040,0x10000040,0x10001000,0x41040,0x40000,0x41040,0x40000,0x10041000,0x1000,0x40,0x10040040,0x1000,0x41040,0x10001000,0x40,0x10000040,0x10040000,0x10040040,0x10000000,0x40000,0x10001040,0,0x10041040,0x40040,0x10000040,0x10040000,0x10001000,0x10001040,0,0x10041040,0x41000,0x41000,0x1040,0x1040,0x40040,0x10000000,0x10041000];\n\n/**\n * Create necessary sub keys.\n *\n * @param key the 64-bit or 192-bit key.\n *\n * @return the expanded keys.\n */\nfunction _createKeys(key) {\n var pc2bytes0 = [0,0x4,0x20000000,0x20000004,0x10000,0x10004,0x20010000,0x20010004,0x200,0x204,0x20000200,0x20000204,0x10200,0x10204,0x20010200,0x20010204],\n pc2bytes1 = [0,0x1,0x100000,0x100001,0x4000000,0x4000001,0x4100000,0x4100001,0x100,0x101,0x100100,0x100101,0x4000100,0x4000101,0x4100100,0x4100101],\n pc2bytes2 = [0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808,0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808],\n pc2bytes3 = [0,0x200000,0x8000000,0x8200000,0x2000,0x202000,0x8002000,0x8202000,0x20000,0x220000,0x8020000,0x8220000,0x22000,0x222000,0x8022000,0x8222000],\n pc2bytes4 = [0,0x40000,0x10,0x40010,0,0x40000,0x10,0x40010,0x1000,0x41000,0x1010,0x41010,0x1000,0x41000,0x1010,0x41010],\n pc2bytes5 = [0,0x400,0x20,0x420,0,0x400,0x20,0x420,0x2000000,0x2000400,0x2000020,0x2000420,0x2000000,0x2000400,0x2000020,0x2000420],\n pc2bytes6 = [0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002,0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002],\n pc2bytes7 = [0,0x10000,0x800,0x10800,0x20000000,0x20010000,0x20000800,0x20010800,0x20000,0x30000,0x20800,0x30800,0x20020000,0x20030000,0x20020800,0x20030800],\n pc2bytes8 = [0,0x40000,0,0x40000,0x2,0x40002,0x2,0x40002,0x2000000,0x2040000,0x2000000,0x2040000,0x2000002,0x2040002,0x2000002,0x2040002],\n pc2bytes9 = [0,0x10000000,0x8,0x10000008,0,0x10000000,0x8,0x10000008,0x400,0x10000400,0x408,0x10000408,0x400,0x10000400,0x408,0x10000408],\n pc2bytes10 = [0,0x20,0,0x20,0x100000,0x100020,0x100000,0x100020,0x2000,0x2020,0x2000,0x2020,0x102000,0x102020,0x102000,0x102020],\n pc2bytes11 = [0,0x1000000,0x200,0x1000200,0x200000,0x1200000,0x200200,0x1200200,0x4000000,0x5000000,0x4000200,0x5000200,0x4200000,0x5200000,0x4200200,0x5200200],\n pc2bytes12 = [0,0x1000,0x8000000,0x8001000,0x80000,0x81000,0x8080000,0x8081000,0x10,0x1010,0x8000010,0x8001010,0x80010,0x81010,0x8080010,0x8081010],\n pc2bytes13 = [0,0x4,0x100,0x104,0,0x4,0x100,0x104,0x1,0x5,0x101,0x105,0x1,0x5,0x101,0x105];\n\n // how many iterations (1 for des, 3 for triple des)\n // changed by Paul 16/6/2007 to use Triple DES for 9+ byte keys\n var iterations = key.length() > 8 ? 3 : 1;\n\n // stores the return keys\n var keys = [];\n\n // now define the left shifts which need to be done\n var shifts = [0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0];\n\n var n = 0, tmp;\n for(var j = 0; j < iterations; j++) {\n var left = key.getInt32();\n var right = key.getInt32();\n\n tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f;\n right ^= tmp;\n left ^= (tmp << 4);\n\n tmp = ((right >>> -16) ^ left) & 0x0000ffff;\n left ^= tmp;\n right ^= (tmp << -16);\n\n tmp = ((left >>> 2) ^ right) & 0x33333333;\n right ^= tmp;\n left ^= (tmp << 2);\n\n tmp = ((right >>> -16) ^ left) & 0x0000ffff;\n left ^= tmp;\n right ^= (tmp << -16);\n\n tmp = ((left >>> 1) ^ right) & 0x55555555;\n right ^= tmp;\n left ^= (tmp << 1);\n\n tmp = ((right >>> 8) ^ left) & 0x00ff00ff;\n left ^= tmp;\n right ^= (tmp << 8);\n\n tmp = ((left >>> 1) ^ right) & 0x55555555;\n right ^= tmp;\n left ^= (tmp << 1);\n\n // right needs to be shifted and OR'd with last four bits of left\n tmp = (left << 8) | ((right >>> 20) & 0x000000f0);\n\n // left needs to be put upside down\n left = ((right << 24) | ((right << 8) & 0xff0000) |\n ((right >>> 8) & 0xff00) | ((right >>> 24) & 0xf0));\n right = tmp;\n\n // now go through and perform these shifts on the left and right keys\n for(var i = 0; i < shifts.length; ++i) {\n //shift the keys either one or two bits to the left\n if(shifts[i]) {\n left = (left << 2) | (left >>> 26);\n right = (right << 2) | (right >>> 26);\n } else {\n left = (left << 1) | (left >>> 27);\n right = (right << 1) | (right >>> 27);\n }\n left &= -0xf;\n right &= -0xf;\n\n // now apply PC-2, in such a way that E is easier when encrypting or\n // decrypting this conversion will look like PC-2 except only the last 6\n // bits of each byte are used rather than 48 consecutive bits and the\n // order of lines will be according to how the S selection functions will\n // be applied: S2, S4, S6, S8, S1, S3, S5, S7\n var lefttmp = (\n pc2bytes0[left >>> 28] | pc2bytes1[(left >>> 24) & 0xf] |\n pc2bytes2[(left >>> 20) & 0xf] | pc2bytes3[(left >>> 16) & 0xf] |\n pc2bytes4[(left >>> 12) & 0xf] | pc2bytes5[(left >>> 8) & 0xf] |\n pc2bytes6[(left >>> 4) & 0xf]);\n var righttmp = (\n pc2bytes7[right >>> 28] | pc2bytes8[(right >>> 24) & 0xf] |\n pc2bytes9[(right >>> 20) & 0xf] | pc2bytes10[(right >>> 16) & 0xf] |\n pc2bytes11[(right >>> 12) & 0xf] | pc2bytes12[(right >>> 8) & 0xf] |\n pc2bytes13[(right >>> 4) & 0xf]);\n tmp = ((righttmp >>> 16) ^ lefttmp) & 0x0000ffff;\n keys[n++] = lefttmp ^ tmp;\n keys[n++] = righttmp ^ (tmp << 16);\n }\n }\n\n return keys;\n}\n\n/**\n * Updates a single block (1 byte) using DES. The update will either\n * encrypt or decrypt the block.\n *\n * @param keys the expanded keys.\n * @param input the input block (an array of 32-bit words).\n * @param output the updated output block.\n * @param decrypt true to decrypt the block, false to encrypt it.\n */\nfunction _updateBlock(keys, input, output, decrypt) {\n // set up loops for single or triple DES\n var iterations = keys.length === 32 ? 3 : 9;\n var looping;\n if(iterations === 3) {\n looping = decrypt ? [30, -2, -2] : [0, 32, 2];\n } else {\n looping = (decrypt ?\n [94, 62, -2, 32, 64, 2, 30, -2, -2] :\n [0, 32, 2, 62, 30, -2, 64, 96, 2]);\n }\n\n var tmp;\n\n var left = input[0];\n var right = input[1];\n\n // first each 64 bit chunk of the message must be permuted according to IP\n tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f;\n right ^= tmp;\n left ^= (tmp << 4);\n\n tmp = ((left >>> 16) ^ right) & 0x0000ffff;\n right ^= tmp;\n left ^= (tmp << 16);\n\n tmp = ((right >>> 2) ^ left) & 0x33333333;\n left ^= tmp;\n right ^= (tmp << 2);\n\n tmp = ((right >>> 8) ^ left) & 0x00ff00ff;\n left ^= tmp;\n right ^= (tmp << 8);\n\n tmp = ((left >>> 1) ^ right) & 0x55555555;\n right ^= tmp;\n left ^= (tmp << 1);\n\n // rotate left 1 bit\n left = ((left << 1) | (left >>> 31));\n right = ((right << 1) | (right >>> 31));\n\n for(var j = 0; j < iterations; j += 3) {\n var endloop = looping[j + 1];\n var loopinc = looping[j + 2];\n\n // now go through and perform the encryption or decryption\n for(var i = looping[j]; i != endloop; i += loopinc) {\n var right1 = right ^ keys[i];\n var right2 = ((right >>> 4) | (right << 28)) ^ keys[i + 1];\n\n // passing these bytes through the S selection functions\n tmp = left;\n left = right;\n right = tmp ^ (\n spfunction2[(right1 >>> 24) & 0x3f] |\n spfunction4[(right1 >>> 16) & 0x3f] |\n spfunction6[(right1 >>> 8) & 0x3f] |\n spfunction8[right1 & 0x3f] |\n spfunction1[(right2 >>> 24) & 0x3f] |\n spfunction3[(right2 >>> 16) & 0x3f] |\n spfunction5[(right2 >>> 8) & 0x3f] |\n spfunction7[right2 & 0x3f]);\n }\n // unreverse left and right\n tmp = left;\n left = right;\n right = tmp;\n }\n\n // rotate right 1 bit\n left = ((left >>> 1) | (left << 31));\n right = ((right >>> 1) | (right << 31));\n\n // now perform IP-1, which is IP in the opposite direction\n tmp = ((left >>> 1) ^ right) & 0x55555555;\n right ^= tmp;\n left ^= (tmp << 1);\n\n tmp = ((right >>> 8) ^ left) & 0x00ff00ff;\n left ^= tmp;\n right ^= (tmp << 8);\n\n tmp = ((right >>> 2) ^ left) & 0x33333333;\n left ^= tmp;\n right ^= (tmp << 2);\n\n tmp = ((left >>> 16) ^ right) & 0x0000ffff;\n right ^= tmp;\n left ^= (tmp << 16);\n\n tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f;\n right ^= tmp;\n left ^= (tmp << 4);\n\n output[0] = left;\n output[1] = right;\n}\n\n/**\n * Deprecated. Instead, use:\n *\n * forge.cipher.createCipher('DES-', key);\n * forge.cipher.createDecipher('DES-', key);\n *\n * Creates a deprecated DES cipher object. This object's mode will default to\n * CBC (cipher-block-chaining).\n *\n * The key may be given as a binary-encoded string of bytes or a byte buffer.\n *\n * @param options the options to use.\n * key the symmetric key to use (64 or 192 bits).\n * output the buffer to write to.\n * decrypt true for decryption, false for encryption.\n * mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nfunction _createCipher(options) {\n options = options || {};\n var mode = (options.mode || 'CBC').toUpperCase();\n var algorithm = 'DES-' + mode;\n\n var cipher;\n if(options.decrypt) {\n cipher = forge.cipher.createDecipher(algorithm, options.key);\n } else {\n cipher = forge.cipher.createCipher(algorithm, options.key);\n }\n\n // backwards compatible start API\n var start = cipher.start;\n cipher.start = function(iv, options) {\n // backwards compatibility: support second arg as output buffer\n var output = null;\n if(options instanceof forge.util.ByteBuffer) {\n output = options;\n options = {};\n }\n options = options || {};\n options.output = output;\n options.iv = iv;\n start.call(cipher, options);\n };\n\n return cipher;\n}\n","/**\n * JavaScript implementation of Ed25519.\n *\n * Copyright (c) 2017-2019 Digital Bazaar, Inc.\n *\n * This implementation is based on the most excellent TweetNaCl which is\n * in the public domain. Many thanks to its contributors:\n *\n * https://github.com/dchest/tweetnacl-js\n */\nvar forge = require('./forge');\nrequire('./jsbn');\nrequire('./random');\nrequire('./sha512');\nrequire('./util');\nvar asn1Validator = require('./asn1-validator');\nvar publicKeyValidator = asn1Validator.publicKeyValidator;\nvar privateKeyValidator = asn1Validator.privateKeyValidator;\n\nif(typeof BigInteger === 'undefined') {\n var BigInteger = forge.jsbn.BigInteger;\n}\n\nvar ByteBuffer = forge.util.ByteBuffer;\nvar NativeBuffer = typeof Buffer === 'undefined' ? Uint8Array : Buffer;\n\n/*\n * Ed25519 algorithms, see RFC 8032:\n * https://tools.ietf.org/html/rfc8032\n */\nforge.pki = forge.pki || {};\nmodule.exports = forge.pki.ed25519 = forge.ed25519 = forge.ed25519 || {};\nvar ed25519 = forge.ed25519;\n\ned25519.constants = {};\ned25519.constants.PUBLIC_KEY_BYTE_LENGTH = 32;\ned25519.constants.PRIVATE_KEY_BYTE_LENGTH = 64;\ned25519.constants.SEED_BYTE_LENGTH = 32;\ned25519.constants.SIGN_BYTE_LENGTH = 64;\ned25519.constants.HASH_BYTE_LENGTH = 64;\n\ned25519.generateKeyPair = function(options) {\n options = options || {};\n var seed = options.seed;\n if(seed === undefined) {\n // generate seed\n seed = forge.random.getBytesSync(ed25519.constants.SEED_BYTE_LENGTH);\n } else if(typeof seed === 'string') {\n if(seed.length !== ed25519.constants.SEED_BYTE_LENGTH) {\n throw new TypeError(\n '\"seed\" must be ' + ed25519.constants.SEED_BYTE_LENGTH +\n ' bytes in length.');\n }\n } else if(!(seed instanceof Uint8Array)) {\n throw new TypeError(\n '\"seed\" must be a node.js Buffer, Uint8Array, or a binary string.');\n }\n\n seed = messageToNativeBuffer({message: seed, encoding: 'binary'});\n\n var pk = new NativeBuffer(ed25519.constants.PUBLIC_KEY_BYTE_LENGTH);\n var sk = new NativeBuffer(ed25519.constants.PRIVATE_KEY_BYTE_LENGTH);\n for(var i = 0; i < 32; ++i) {\n sk[i] = seed[i];\n }\n crypto_sign_keypair(pk, sk);\n return {publicKey: pk, privateKey: sk};\n};\n\n/**\n * Converts a private key from a RFC8410 ASN.1 encoding.\n *\n * @param obj - The asn1 representation of a private key.\n *\n * @returns {Object} keyInfo - The key information.\n * @returns {Buffer|Uint8Array} keyInfo.privateKeyBytes - 32 private key bytes.\n */\ned25519.privateKeyFromAsn1 = function(obj) {\n var capture = {};\n var errors = [];\n var valid = forge.asn1.validate(obj, privateKeyValidator, capture, errors);\n if(!valid) {\n var error = new Error('Invalid Key.');\n error.errors = errors;\n throw error;\n }\n var oid = forge.asn1.derToOid(capture.privateKeyOid);\n var ed25519Oid = forge.oids.EdDSA25519;\n if(oid !== ed25519Oid) {\n throw new Error('Invalid OID \"' + oid + '\"; OID must be \"' +\n ed25519Oid + '\".');\n }\n var privateKey = capture.privateKey;\n // manually extract the private key bytes from nested octet string, see FIXME:\n // https://github.com/digitalbazaar/forge/blob/master/lib/asn1.js#L542\n var privateKeyBytes = messageToNativeBuffer({\n message: forge.asn1.fromDer(privateKey).value,\n encoding: 'binary'\n });\n // TODO: RFC8410 specifies a format for encoding the public key bytes along\n // with the private key bytes. `publicKeyBytes` can be returned in the\n // future. https://tools.ietf.org/html/rfc8410#section-10.3\n return {privateKeyBytes: privateKeyBytes};\n};\n\n/**\n * Converts a public key from a RFC8410 ASN.1 encoding.\n *\n * @param obj - The asn1 representation of a public key.\n *\n * @return {Buffer|Uint8Array} - 32 public key bytes.\n */\ned25519.publicKeyFromAsn1 = function(obj) {\n // get SubjectPublicKeyInfo\n var capture = {};\n var errors = [];\n var valid = forge.asn1.validate(obj, publicKeyValidator, capture, errors);\n if(!valid) {\n var error = new Error('Invalid Key.');\n error.errors = errors;\n throw error;\n }\n var oid = forge.asn1.derToOid(capture.publicKeyOid);\n var ed25519Oid = forge.oids.EdDSA25519;\n if(oid !== ed25519Oid) {\n throw new Error('Invalid OID \"' + oid + '\"; OID must be \"' +\n ed25519Oid + '\".');\n }\n var publicKeyBytes = capture.ed25519PublicKey;\n if(publicKeyBytes.length !== ed25519.constants.PUBLIC_KEY_BYTE_LENGTH) {\n throw new Error('Key length is invalid.');\n }\n return messageToNativeBuffer({\n message: publicKeyBytes,\n encoding: 'binary'\n });\n};\n\ned25519.publicKeyFromPrivateKey = function(options) {\n options = options || {};\n var privateKey = messageToNativeBuffer({\n message: options.privateKey, encoding: 'binary'\n });\n if(privateKey.length !== ed25519.constants.PRIVATE_KEY_BYTE_LENGTH) {\n throw new TypeError(\n '\"options.privateKey\" must have a byte length of ' +\n ed25519.constants.PRIVATE_KEY_BYTE_LENGTH);\n }\n\n var pk = new NativeBuffer(ed25519.constants.PUBLIC_KEY_BYTE_LENGTH);\n for(var i = 0; i < pk.length; ++i) {\n pk[i] = privateKey[32 + i];\n }\n return pk;\n};\n\ned25519.sign = function(options) {\n options = options || {};\n var msg = messageToNativeBuffer(options);\n var privateKey = messageToNativeBuffer({\n message: options.privateKey,\n encoding: 'binary'\n });\n if(privateKey.length === ed25519.constants.SEED_BYTE_LENGTH) {\n var keyPair = ed25519.generateKeyPair({seed: privateKey});\n privateKey = keyPair.privateKey;\n } else if(privateKey.length !== ed25519.constants.PRIVATE_KEY_BYTE_LENGTH) {\n throw new TypeError(\n '\"options.privateKey\" must have a byte length of ' +\n ed25519.constants.SEED_BYTE_LENGTH + ' or ' +\n ed25519.constants.PRIVATE_KEY_BYTE_LENGTH);\n }\n\n var signedMsg = new NativeBuffer(\n ed25519.constants.SIGN_BYTE_LENGTH + msg.length);\n crypto_sign(signedMsg, msg, msg.length, privateKey);\n\n var sig = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH);\n for(var i = 0; i < sig.length; ++i) {\n sig[i] = signedMsg[i];\n }\n return sig;\n};\n\ned25519.verify = function(options) {\n options = options || {};\n var msg = messageToNativeBuffer(options);\n if(options.signature === undefined) {\n throw new TypeError(\n '\"options.signature\" must be a node.js Buffer, a Uint8Array, a forge ' +\n 'ByteBuffer, or a binary string.');\n }\n var sig = messageToNativeBuffer({\n message: options.signature,\n encoding: 'binary'\n });\n if(sig.length !== ed25519.constants.SIGN_BYTE_LENGTH) {\n throw new TypeError(\n '\"options.signature\" must have a byte length of ' +\n ed25519.constants.SIGN_BYTE_LENGTH);\n }\n var publicKey = messageToNativeBuffer({\n message: options.publicKey,\n encoding: 'binary'\n });\n if(publicKey.length !== ed25519.constants.PUBLIC_KEY_BYTE_LENGTH) {\n throw new TypeError(\n '\"options.publicKey\" must have a byte length of ' +\n ed25519.constants.PUBLIC_KEY_BYTE_LENGTH);\n }\n\n var sm = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH + msg.length);\n var m = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH + msg.length);\n var i;\n for(i = 0; i < ed25519.constants.SIGN_BYTE_LENGTH; ++i) {\n sm[i] = sig[i];\n }\n for(i = 0; i < msg.length; ++i) {\n sm[i + ed25519.constants.SIGN_BYTE_LENGTH] = msg[i];\n }\n return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0);\n};\n\nfunction messageToNativeBuffer(options) {\n var message = options.message;\n if(message instanceof Uint8Array || message instanceof NativeBuffer) {\n return message;\n }\n\n var encoding = options.encoding;\n if(message === undefined) {\n if(options.md) {\n // TODO: more rigorous validation that `md` is a MessageDigest\n message = options.md.digest().getBytes();\n encoding = 'binary';\n } else {\n throw new TypeError('\"options.message\" or \"options.md\" not specified.');\n }\n }\n\n if(typeof message === 'string' && !encoding) {\n throw new TypeError('\"options.encoding\" must be \"binary\" or \"utf8\".');\n }\n\n if(typeof message === 'string') {\n if(typeof Buffer !== 'undefined') {\n return Buffer.from(message, encoding);\n }\n message = new ByteBuffer(message, encoding);\n } else if(!(message instanceof ByteBuffer)) {\n throw new TypeError(\n '\"options.message\" must be a node.js Buffer, a Uint8Array, a forge ' +\n 'ByteBuffer, or a string with \"options.encoding\" specifying its ' +\n 'encoding.');\n }\n\n // convert to native buffer\n var buffer = new NativeBuffer(message.length());\n for(var i = 0; i < buffer.length; ++i) {\n buffer[i] = message.at(i);\n }\n return buffer;\n}\n\nvar gf0 = gf();\nvar gf1 = gf([1]);\nvar D = gf([\n 0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070,\n 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]);\nvar D2 = gf([\n 0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0,\n 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]);\nvar X = gf([\n 0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c,\n 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]);\nvar Y = gf([\n 0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666,\n 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]);\nvar L = new Float64Array([\n 0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58,\n 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]);\nvar I = gf([\n 0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43,\n 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]);\n\n// TODO: update forge buffer implementation to use `Buffer` or `Uint8Array`,\n// whichever is available, to improve performance\nfunction sha512(msg, msgLen) {\n // Note: `out` and `msg` are NativeBuffer\n var md = forge.md.sha512.create();\n var buffer = new ByteBuffer(msg);\n md.update(buffer.getBytes(msgLen), 'binary');\n var hash = md.digest().getBytes();\n if(typeof Buffer !== 'undefined') {\n return Buffer.from(hash, 'binary');\n }\n var out = new NativeBuffer(ed25519.constants.HASH_BYTE_LENGTH);\n for(var i = 0; i < 64; ++i) {\n out[i] = hash.charCodeAt(i);\n }\n return out;\n}\n\nfunction crypto_sign_keypair(pk, sk) {\n var p = [gf(), gf(), gf(), gf()];\n var i;\n\n var d = sha512(sk, 32);\n d[0] &= 248;\n d[31] &= 127;\n d[31] |= 64;\n\n scalarbase(p, d);\n pack(pk, p);\n\n for(i = 0; i < 32; ++i) {\n sk[i + 32] = pk[i];\n }\n return 0;\n}\n\n// Note: difference from C - smlen returned, not passed as argument.\nfunction crypto_sign(sm, m, n, sk) {\n var i, j, x = new Float64Array(64);\n var p = [gf(), gf(), gf(), gf()];\n\n var d = sha512(sk, 32);\n d[0] &= 248;\n d[31] &= 127;\n d[31] |= 64;\n\n var smlen = n + 64;\n for(i = 0; i < n; ++i) {\n sm[64 + i] = m[i];\n }\n for(i = 0; i < 32; ++i) {\n sm[32 + i] = d[32 + i];\n }\n\n var r = sha512(sm.subarray(32), n + 32);\n reduce(r);\n scalarbase(p, r);\n pack(sm, p);\n\n for(i = 32; i < 64; ++i) {\n sm[i] = sk[i];\n }\n var h = sha512(sm, n + 64);\n reduce(h);\n\n for(i = 32; i < 64; ++i) {\n x[i] = 0;\n }\n for(i = 0; i < 32; ++i) {\n x[i] = r[i];\n }\n for(i = 0; i < 32; ++i) {\n for(j = 0; j < 32; j++) {\n x[i + j] += h[i] * d[j];\n }\n }\n\n modL(sm.subarray(32), x);\n return smlen;\n}\n\nfunction crypto_sign_open(m, sm, n, pk) {\n var i, mlen;\n var t = new NativeBuffer(32);\n var p = [gf(), gf(), gf(), gf()],\n q = [gf(), gf(), gf(), gf()];\n\n mlen = -1;\n if(n < 64) {\n return -1;\n }\n\n if(unpackneg(q, pk)) {\n return -1;\n }\n\n for(i = 0; i < n; ++i) {\n m[i] = sm[i];\n }\n for(i = 0; i < 32; ++i) {\n m[i + 32] = pk[i];\n }\n var h = sha512(m, n);\n reduce(h);\n scalarmult(p, q, h);\n\n scalarbase(q, sm.subarray(32));\n add(p, q);\n pack(t, p);\n\n n -= 64;\n if(crypto_verify_32(sm, 0, t, 0)) {\n for(i = 0; i < n; ++i) {\n m[i] = 0;\n }\n return -1;\n }\n\n for(i = 0; i < n; ++i) {\n m[i] = sm[i + 64];\n }\n mlen = n;\n return mlen;\n}\n\nfunction modL(r, x) {\n var carry, i, j, k;\n for(i = 63; i >= 32; --i) {\n carry = 0;\n for(j = i - 32, k = i - 12; j < k; ++j) {\n x[j] += carry - 16 * x[i] * L[j - (i - 32)];\n carry = (x[j] + 128) >> 8;\n x[j] -= carry * 256;\n }\n x[j] += carry;\n x[i] = 0;\n }\n carry = 0;\n for(j = 0; j < 32; ++j) {\n x[j] += carry - (x[31] >> 4) * L[j];\n carry = x[j] >> 8;\n x[j] &= 255;\n }\n for(j = 0; j < 32; ++j) {\n x[j] -= carry * L[j];\n }\n for(i = 0; i < 32; ++i) {\n x[i + 1] += x[i] >> 8;\n r[i] = x[i] & 255;\n }\n}\n\nfunction reduce(r) {\n var x = new Float64Array(64);\n for(var i = 0; i < 64; ++i) {\n x[i] = r[i];\n r[i] = 0;\n }\n modL(r, x);\n}\n\nfunction add(p, q) {\n var a = gf(), b = gf(), c = gf(),\n d = gf(), e = gf(), f = gf(),\n g = gf(), h = gf(), t = gf();\n\n Z(a, p[1], p[0]);\n Z(t, q[1], q[0]);\n M(a, a, t);\n A(b, p[0], p[1]);\n A(t, q[0], q[1]);\n M(b, b, t);\n M(c, p[3], q[3]);\n M(c, c, D2);\n M(d, p[2], q[2]);\n A(d, d, d);\n Z(e, b, a);\n Z(f, d, c);\n A(g, d, c);\n A(h, b, a);\n\n M(p[0], e, f);\n M(p[1], h, g);\n M(p[2], g, f);\n M(p[3], e, h);\n}\n\nfunction cswap(p, q, b) {\n for(var i = 0; i < 4; ++i) {\n sel25519(p[i], q[i], b);\n }\n}\n\nfunction pack(r, p) {\n var tx = gf(), ty = gf(), zi = gf();\n inv25519(zi, p[2]);\n M(tx, p[0], zi);\n M(ty, p[1], zi);\n pack25519(r, ty);\n r[31] ^= par25519(tx) << 7;\n}\n\nfunction pack25519(o, n) {\n var i, j, b;\n var m = gf(), t = gf();\n for(i = 0; i < 16; ++i) {\n t[i] = n[i];\n }\n car25519(t);\n car25519(t);\n car25519(t);\n for(j = 0; j < 2; ++j) {\n m[0] = t[0] - 0xffed;\n for(i = 1; i < 15; ++i) {\n m[i] = t[i] - 0xffff - ((m[i - 1] >> 16) & 1);\n m[i-1] &= 0xffff;\n }\n m[15] = t[15] - 0x7fff - ((m[14] >> 16) & 1);\n b = (m[15] >> 16) & 1;\n m[14] &= 0xffff;\n sel25519(t, m, 1 - b);\n }\n for (i = 0; i < 16; i++) {\n o[2 * i] = t[i] & 0xff;\n o[2 * i + 1] = t[i] >> 8;\n }\n}\n\nfunction unpackneg(r, p) {\n var t = gf(), chk = gf(), num = gf(),\n den = gf(), den2 = gf(), den4 = gf(),\n den6 = gf();\n\n set25519(r[2], gf1);\n unpack25519(r[1], p);\n S(num, r[1]);\n M(den, num, D);\n Z(num, num, r[2]);\n A(den, r[2], den);\n\n S(den2, den);\n S(den4, den2);\n M(den6, den4, den2);\n M(t, den6, num);\n M(t, t, den);\n\n pow2523(t, t);\n M(t, t, num);\n M(t, t, den);\n M(t, t, den);\n M(r[0], t, den);\n\n S(chk, r[0]);\n M(chk, chk, den);\n if(neq25519(chk, num)) {\n M(r[0], r[0], I);\n }\n\n S(chk, r[0]);\n M(chk, chk, den);\n if(neq25519(chk, num)) {\n return -1;\n }\n\n if(par25519(r[0]) === (p[31] >> 7)) {\n Z(r[0], gf0, r[0]);\n }\n\n M(r[3], r[0], r[1]);\n return 0;\n}\n\nfunction unpack25519(o, n) {\n var i;\n for(i = 0; i < 16; ++i) {\n o[i] = n[2 * i] + (n[2 * i + 1] << 8);\n }\n o[15] &= 0x7fff;\n}\n\nfunction pow2523(o, i) {\n var c = gf();\n var a;\n for(a = 0; a < 16; ++a) {\n c[a] = i[a];\n }\n for(a = 250; a >= 0; --a) {\n S(c, c);\n if(a !== 1) {\n M(c, c, i);\n }\n }\n for(a = 0; a < 16; ++a) {\n o[a] = c[a];\n }\n}\n\nfunction neq25519(a, b) {\n var c = new NativeBuffer(32);\n var d = new NativeBuffer(32);\n pack25519(c, a);\n pack25519(d, b);\n return crypto_verify_32(c, 0, d, 0);\n}\n\nfunction crypto_verify_32(x, xi, y, yi) {\n return vn(x, xi, y, yi, 32);\n}\n\nfunction vn(x, xi, y, yi, n) {\n var i, d = 0;\n for(i = 0; i < n; ++i) {\n d |= x[xi + i] ^ y[yi + i];\n }\n return (1 & ((d - 1) >>> 8)) - 1;\n}\n\nfunction par25519(a) {\n var d = new NativeBuffer(32);\n pack25519(d, a);\n return d[0] & 1;\n}\n\nfunction scalarmult(p, q, s) {\n var b, i;\n set25519(p[0], gf0);\n set25519(p[1], gf1);\n set25519(p[2], gf1);\n set25519(p[3], gf0);\n for(i = 255; i >= 0; --i) {\n b = (s[(i / 8)|0] >> (i & 7)) & 1;\n cswap(p, q, b);\n add(q, p);\n add(p, p);\n cswap(p, q, b);\n }\n}\n\nfunction scalarbase(p, s) {\n var q = [gf(), gf(), gf(), gf()];\n set25519(q[0], X);\n set25519(q[1], Y);\n set25519(q[2], gf1);\n M(q[3], X, Y);\n scalarmult(p, q, s);\n}\n\nfunction set25519(r, a) {\n var i;\n for(i = 0; i < 16; i++) {\n r[i] = a[i] | 0;\n }\n}\n\nfunction inv25519(o, i) {\n var c = gf();\n var a;\n for(a = 0; a < 16; ++a) {\n c[a] = i[a];\n }\n for(a = 253; a >= 0; --a) {\n S(c, c);\n if(a !== 2 && a !== 4) {\n M(c, c, i);\n }\n }\n for(a = 0; a < 16; ++a) {\n o[a] = c[a];\n }\n}\n\nfunction car25519(o) {\n var i, v, c = 1;\n for(i = 0; i < 16; ++i) {\n v = o[i] + c + 65535;\n c = Math.floor(v / 65536);\n o[i] = v - c * 65536;\n }\n o[0] += c - 1 + 37 * (c - 1);\n}\n\nfunction sel25519(p, q, b) {\n var t, c = ~(b - 1);\n for(var i = 0; i < 16; ++i) {\n t = c & (p[i] ^ q[i]);\n p[i] ^= t;\n q[i] ^= t;\n }\n}\n\nfunction gf(init) {\n var i, r = new Float64Array(16);\n if(init) {\n for(i = 0; i < init.length; ++i) {\n r[i] = init[i];\n }\n }\n return r;\n}\n\nfunction A(o, a, b) {\n for(var i = 0; i < 16; ++i) {\n o[i] = a[i] + b[i];\n }\n}\n\nfunction Z(o, a, b) {\n for(var i = 0; i < 16; ++i) {\n o[i] = a[i] - b[i];\n }\n}\n\nfunction S(o, a) {\n M(o, a, a);\n}\n\nfunction M(o, a, b) {\n var v, c,\n t0 = 0, t1 = 0, t2 = 0, t3 = 0, t4 = 0, t5 = 0, t6 = 0, t7 = 0,\n t8 = 0, t9 = 0, t10 = 0, t11 = 0, t12 = 0, t13 = 0, t14 = 0, t15 = 0,\n t16 = 0, t17 = 0, t18 = 0, t19 = 0, t20 = 0, t21 = 0, t22 = 0, t23 = 0,\n t24 = 0, t25 = 0, t26 = 0, t27 = 0, t28 = 0, t29 = 0, t30 = 0,\n b0 = b[0],\n b1 = b[1],\n b2 = b[2],\n b3 = b[3],\n b4 = b[4],\n b5 = b[5],\n b6 = b[6],\n b7 = b[7],\n b8 = b[8],\n b9 = b[9],\n b10 = b[10],\n b11 = b[11],\n b12 = b[12],\n b13 = b[13],\n b14 = b[14],\n b15 = b[15];\n\n v = a[0];\n t0 += v * b0;\n t1 += v * b1;\n t2 += v * b2;\n t3 += v * b3;\n t4 += v * b4;\n t5 += v * b5;\n t6 += v * b6;\n t7 += v * b7;\n t8 += v * b8;\n t9 += v * b9;\n t10 += v * b10;\n t11 += v * b11;\n t12 += v * b12;\n t13 += v * b13;\n t14 += v * b14;\n t15 += v * b15;\n v = a[1];\n t1 += v * b0;\n t2 += v * b1;\n t3 += v * b2;\n t4 += v * b3;\n t5 += v * b4;\n t6 += v * b5;\n t7 += v * b6;\n t8 += v * b7;\n t9 += v * b8;\n t10 += v * b9;\n t11 += v * b10;\n t12 += v * b11;\n t13 += v * b12;\n t14 += v * b13;\n t15 += v * b14;\n t16 += v * b15;\n v = a[2];\n t2 += v * b0;\n t3 += v * b1;\n t4 += v * b2;\n t5 += v * b3;\n t6 += v * b4;\n t7 += v * b5;\n t8 += v * b6;\n t9 += v * b7;\n t10 += v * b8;\n t11 += v * b9;\n t12 += v * b10;\n t13 += v * b11;\n t14 += v * b12;\n t15 += v * b13;\n t16 += v * b14;\n t17 += v * b15;\n v = a[3];\n t3 += v * b0;\n t4 += v * b1;\n t5 += v * b2;\n t6 += v * b3;\n t7 += v * b4;\n t8 += v * b5;\n t9 += v * b6;\n t10 += v * b7;\n t11 += v * b8;\n t12 += v * b9;\n t13 += v * b10;\n t14 += v * b11;\n t15 += v * b12;\n t16 += v * b13;\n t17 += v * b14;\n t18 += v * b15;\n v = a[4];\n t4 += v * b0;\n t5 += v * b1;\n t6 += v * b2;\n t7 += v * b3;\n t8 += v * b4;\n t9 += v * b5;\n t10 += v * b6;\n t11 += v * b7;\n t12 += v * b8;\n t13 += v * b9;\n t14 += v * b10;\n t15 += v * b11;\n t16 += v * b12;\n t17 += v * b13;\n t18 += v * b14;\n t19 += v * b15;\n v = a[5];\n t5 += v * b0;\n t6 += v * b1;\n t7 += v * b2;\n t8 += v * b3;\n t9 += v * b4;\n t10 += v * b5;\n t11 += v * b6;\n t12 += v * b7;\n t13 += v * b8;\n t14 += v * b9;\n t15 += v * b10;\n t16 += v * b11;\n t17 += v * b12;\n t18 += v * b13;\n t19 += v * b14;\n t20 += v * b15;\n v = a[6];\n t6 += v * b0;\n t7 += v * b1;\n t8 += v * b2;\n t9 += v * b3;\n t10 += v * b4;\n t11 += v * b5;\n t12 += v * b6;\n t13 += v * b7;\n t14 += v * b8;\n t15 += v * b9;\n t16 += v * b10;\n t17 += v * b11;\n t18 += v * b12;\n t19 += v * b13;\n t20 += v * b14;\n t21 += v * b15;\n v = a[7];\n t7 += v * b0;\n t8 += v * b1;\n t9 += v * b2;\n t10 += v * b3;\n t11 += v * b4;\n t12 += v * b5;\n t13 += v * b6;\n t14 += v * b7;\n t15 += v * b8;\n t16 += v * b9;\n t17 += v * b10;\n t18 += v * b11;\n t19 += v * b12;\n t20 += v * b13;\n t21 += v * b14;\n t22 += v * b15;\n v = a[8];\n t8 += v * b0;\n t9 += v * b1;\n t10 += v * b2;\n t11 += v * b3;\n t12 += v * b4;\n t13 += v * b5;\n t14 += v * b6;\n t15 += v * b7;\n t16 += v * b8;\n t17 += v * b9;\n t18 += v * b10;\n t19 += v * b11;\n t20 += v * b12;\n t21 += v * b13;\n t22 += v * b14;\n t23 += v * b15;\n v = a[9];\n t9 += v * b0;\n t10 += v * b1;\n t11 += v * b2;\n t12 += v * b3;\n t13 += v * b4;\n t14 += v * b5;\n t15 += v * b6;\n t16 += v * b7;\n t17 += v * b8;\n t18 += v * b9;\n t19 += v * b10;\n t20 += v * b11;\n t21 += v * b12;\n t22 += v * b13;\n t23 += v * b14;\n t24 += v * b15;\n v = a[10];\n t10 += v * b0;\n t11 += v * b1;\n t12 += v * b2;\n t13 += v * b3;\n t14 += v * b4;\n t15 += v * b5;\n t16 += v * b6;\n t17 += v * b7;\n t18 += v * b8;\n t19 += v * b9;\n t20 += v * b10;\n t21 += v * b11;\n t22 += v * b12;\n t23 += v * b13;\n t24 += v * b14;\n t25 += v * b15;\n v = a[11];\n t11 += v * b0;\n t12 += v * b1;\n t13 += v * b2;\n t14 += v * b3;\n t15 += v * b4;\n t16 += v * b5;\n t17 += v * b6;\n t18 += v * b7;\n t19 += v * b8;\n t20 += v * b9;\n t21 += v * b10;\n t22 += v * b11;\n t23 += v * b12;\n t24 += v * b13;\n t25 += v * b14;\n t26 += v * b15;\n v = a[12];\n t12 += v * b0;\n t13 += v * b1;\n t14 += v * b2;\n t15 += v * b3;\n t16 += v * b4;\n t17 += v * b5;\n t18 += v * b6;\n t19 += v * b7;\n t20 += v * b8;\n t21 += v * b9;\n t22 += v * b10;\n t23 += v * b11;\n t24 += v * b12;\n t25 += v * b13;\n t26 += v * b14;\n t27 += v * b15;\n v = a[13];\n t13 += v * b0;\n t14 += v * b1;\n t15 += v * b2;\n t16 += v * b3;\n t17 += v * b4;\n t18 += v * b5;\n t19 += v * b6;\n t20 += v * b7;\n t21 += v * b8;\n t22 += v * b9;\n t23 += v * b10;\n t24 += v * b11;\n t25 += v * b12;\n t26 += v * b13;\n t27 += v * b14;\n t28 += v * b15;\n v = a[14];\n t14 += v * b0;\n t15 += v * b1;\n t16 += v * b2;\n t17 += v * b3;\n t18 += v * b4;\n t19 += v * b5;\n t20 += v * b6;\n t21 += v * b7;\n t22 += v * b8;\n t23 += v * b9;\n t24 += v * b10;\n t25 += v * b11;\n t26 += v * b12;\n t27 += v * b13;\n t28 += v * b14;\n t29 += v * b15;\n v = a[15];\n t15 += v * b0;\n t16 += v * b1;\n t17 += v * b2;\n t18 += v * b3;\n t19 += v * b4;\n t20 += v * b5;\n t21 += v * b6;\n t22 += v * b7;\n t23 += v * b8;\n t24 += v * b9;\n t25 += v * b10;\n t26 += v * b11;\n t27 += v * b12;\n t28 += v * b13;\n t29 += v * b14;\n t30 += v * b15;\n\n t0 += 38 * t16;\n t1 += 38 * t17;\n t2 += 38 * t18;\n t3 += 38 * t19;\n t4 += 38 * t20;\n t5 += 38 * t21;\n t6 += 38 * t22;\n t7 += 38 * t23;\n t8 += 38 * t24;\n t9 += 38 * t25;\n t10 += 38 * t26;\n t11 += 38 * t27;\n t12 += 38 * t28;\n t13 += 38 * t29;\n t14 += 38 * t30;\n // t15 left as is\n\n // first car\n c = 1;\n v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536;\n v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536;\n v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536;\n v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536;\n v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536;\n v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536;\n v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536;\n v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536;\n v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536;\n v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536;\n v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536;\n v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536;\n v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536;\n v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536;\n v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536;\n v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536;\n t0 += c-1 + 37 * (c-1);\n\n // second car\n c = 1;\n v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536;\n v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536;\n v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536;\n v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536;\n v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536;\n v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536;\n v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536;\n v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536;\n v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536;\n v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536;\n v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536;\n v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536;\n v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536;\n v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536;\n v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536;\n v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536;\n t0 += c-1 + 37 * (c-1);\n\n o[ 0] = t0;\n o[ 1] = t1;\n o[ 2] = t2;\n o[ 3] = t3;\n o[ 4] = t4;\n o[ 5] = t5;\n o[ 6] = t6;\n o[ 7] = t7;\n o[ 8] = t8;\n o[ 9] = t9;\n o[10] = t10;\n o[11] = t11;\n o[12] = t12;\n o[13] = t13;\n o[14] = t14;\n o[15] = t15;\n}\n","/**\n * Node.js module for Forge.\n *\n * @author Dave Longley\n *\n * Copyright 2011-2016 Digital Bazaar, Inc.\n */\nmodule.exports = {\n // default options\n options: {\n usePureJavaScript: false\n }\n};\n","/**\n * Hash-based Message Authentication Code implementation. Requires a message\n * digest object that can be obtained, for example, from forge.md.sha1 or\n * forge.md.md5.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2012 Digital Bazaar, Inc. All rights reserved.\n */\nvar forge = require('./forge');\nrequire('./md');\nrequire('./util');\n\n/* HMAC API */\nvar hmac = module.exports = forge.hmac = forge.hmac || {};\n\n/**\n * Creates an HMAC object that uses the given message digest object.\n *\n * @return an HMAC object.\n */\nhmac.create = function() {\n // the hmac key to use\n var _key = null;\n\n // the message digest to use\n var _md = null;\n\n // the inner padding\n var _ipadding = null;\n\n // the outer padding\n var _opadding = null;\n\n // hmac context\n var ctx = {};\n\n /**\n * Starts or restarts the HMAC with the given key and message digest.\n *\n * @param md the message digest to use, null to reuse the previous one,\n * a string to use builtin 'sha1', 'md5', 'sha256'.\n * @param key the key to use as a string, array of bytes, byte buffer,\n * or null to reuse the previous key.\n */\n ctx.start = function(md, key) {\n if(md !== null) {\n if(typeof md === 'string') {\n // create builtin message digest\n md = md.toLowerCase();\n if(md in forge.md.algorithms) {\n _md = forge.md.algorithms[md].create();\n } else {\n throw new Error('Unknown hash algorithm \"' + md + '\"');\n }\n } else {\n // store message digest\n _md = md;\n }\n }\n\n if(key === null) {\n // reuse previous key\n key = _key;\n } else {\n if(typeof key === 'string') {\n // convert string into byte buffer\n key = forge.util.createBuffer(key);\n } else if(forge.util.isArray(key)) {\n // convert byte array into byte buffer\n var tmp = key;\n key = forge.util.createBuffer();\n for(var i = 0; i < tmp.length; ++i) {\n key.putByte(tmp[i]);\n }\n }\n\n // if key is longer than blocksize, hash it\n var keylen = key.length();\n if(keylen > _md.blockLength) {\n _md.start();\n _md.update(key.bytes());\n key = _md.digest();\n }\n\n // mix key into inner and outer padding\n // ipadding = [0x36 * blocksize] ^ key\n // opadding = [0x5C * blocksize] ^ key\n _ipadding = forge.util.createBuffer();\n _opadding = forge.util.createBuffer();\n keylen = key.length();\n for(var i = 0; i < keylen; ++i) {\n var tmp = key.at(i);\n _ipadding.putByte(0x36 ^ tmp);\n _opadding.putByte(0x5C ^ tmp);\n }\n\n // if key is shorter than blocksize, add additional padding\n if(keylen < _md.blockLength) {\n var tmp = _md.blockLength - keylen;\n for(var i = 0; i < tmp; ++i) {\n _ipadding.putByte(0x36);\n _opadding.putByte(0x5C);\n }\n }\n _key = key;\n _ipadding = _ipadding.bytes();\n _opadding = _opadding.bytes();\n }\n\n // digest is done like so: hash(opadding | hash(ipadding | message))\n\n // prepare to do inner hash\n // hash(ipadding | message)\n _md.start();\n _md.update(_ipadding);\n };\n\n /**\n * Updates the HMAC with the given message bytes.\n *\n * @param bytes the bytes to update with.\n */\n ctx.update = function(bytes) {\n _md.update(bytes);\n };\n\n /**\n * Produces the Message Authentication Code (MAC).\n *\n * @return a byte buffer containing the digest value.\n */\n ctx.getMac = function() {\n // digest is done like so: hash(opadding | hash(ipadding | message))\n // here we do the outer hashing\n var inner = _md.digest().bytes();\n _md.start();\n _md.update(_opadding);\n _md.update(inner);\n return _md.digest();\n };\n // alias for getMac\n ctx.digest = ctx.getMac;\n\n return ctx;\n};\n","/**\n * Node.js module for Forge.\n *\n * @author Dave Longley\n *\n * Copyright 2011-2016 Digital Bazaar, Inc.\n */\nmodule.exports = require('./forge');\nrequire('./aes');\nrequire('./aesCipherSuites');\nrequire('./asn1');\nrequire('./cipher');\nrequire('./des');\nrequire('./ed25519');\nrequire('./hmac');\nrequire('./kem');\nrequire('./log');\nrequire('./md.all');\nrequire('./mgf1');\nrequire('./pbkdf2');\nrequire('./pem');\nrequire('./pkcs1');\nrequire('./pkcs12');\nrequire('./pkcs7');\nrequire('./pki');\nrequire('./prime');\nrequire('./prng');\nrequire('./pss');\nrequire('./random');\nrequire('./rc2');\nrequire('./ssh');\nrequire('./tls');\nrequire('./util');\n","// Copyright (c) 2005 Tom Wu\n// All Rights Reserved.\n// See \"LICENSE\" for details.\n\n// Basic JavaScript BN library - subset useful for RSA encryption.\n\n/*\nLicensing (LICENSE)\n-------------------\n\nThis software is covered under the following copyright:\n*/\n/*\n * Copyright (c) 2003-2005 Tom Wu\n * All Rights Reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining\n * a copy of this software and associated documentation files (the\n * \"Software\"), to deal in the Software without restriction, including\n * without limitation the rights to use, copy, modify, merge, publish,\n * distribute, sublicense, and/or sell copies of the Software, and to\n * permit persons to whom the Software is furnished to do so, subject to\n * the following conditions:\n *\n * The above copyright notice and this permission notice shall be\n * included in all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS-IS\" AND WITHOUT WARRANTY OF ANY KIND,\n * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY\n * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.\n *\n * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,\n * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER\n * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF\n * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT\n * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n *\n * In addition, the following condition applies:\n *\n * All redistributions must retain an intact copy of this copyright notice\n * and disclaimer.\n */\n/*\nAddress all questions regarding this license to:\n\n Tom Wu\n tjw@cs.Stanford.EDU\n*/\nvar forge = require('./forge');\n\nmodule.exports = forge.jsbn = forge.jsbn || {};\n\n// Bits per digit\nvar dbits;\n\n// JavaScript engine analysis\nvar canary = 0xdeadbeefcafe;\nvar j_lm = ((canary&0xffffff)==0xefcafe);\n\n// (public) Constructor\nfunction BigInteger(a,b,c) {\n this.data = [];\n if(a != null)\n if(\"number\" == typeof a) this.fromNumber(a,b,c);\n else if(b == null && \"string\" != typeof a) this.fromString(a,256);\n else this.fromString(a,b);\n}\nforge.jsbn.BigInteger = BigInteger;\n\n// return new, unset BigInteger\nfunction nbi() { return new BigInteger(null); }\n\n// am: Compute w_j += (x*this_i), propagate carries,\n// c is initial carry, returns final carry.\n// c < 3*dvalue, x < 2*dvalue, this_i < dvalue\n// We need to select the fastest one that works in this environment.\n\n// am1: use a single mult and divide to get the high bits,\n// max digit bits should be 26 because\n// max internal value = 2*dvalue^2-2*dvalue (< 2^53)\nfunction am1(i,x,w,j,c,n) {\n while(--n >= 0) {\n var v = x*this.data[i++]+w.data[j]+c;\n c = Math.floor(v/0x4000000);\n w.data[j++] = v&0x3ffffff;\n }\n return c;\n}\n// am2 avoids a big mult-and-extract completely.\n// Max digit bits should be <= 30 because we do bitwise ops\n// on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)\nfunction am2(i,x,w,j,c,n) {\n var xl = x&0x7fff, xh = x>>15;\n while(--n >= 0) {\n var l = this.data[i]&0x7fff;\n var h = this.data[i++]>>15;\n var m = xh*l+h*xl;\n l = xl*l+((m&0x7fff)<<15)+w.data[j]+(c&0x3fffffff);\n c = (l>>>30)+(m>>>15)+xh*h+(c>>>30);\n w.data[j++] = l&0x3fffffff;\n }\n return c;\n}\n// Alternately, set max digit bits to 28 since some\n// browsers slow down when dealing with 32-bit numbers.\nfunction am3(i,x,w,j,c,n) {\n var xl = x&0x3fff, xh = x>>14;\n while(--n >= 0) {\n var l = this.data[i]&0x3fff;\n var h = this.data[i++]>>14;\n var m = xh*l+h*xl;\n l = xl*l+((m&0x3fff)<<14)+w.data[j]+c;\n c = (l>>28)+(m>>14)+xh*h;\n w.data[j++] = l&0xfffffff;\n }\n return c;\n}\n\n// node.js (no browser)\nif(typeof(navigator) === 'undefined')\n{\n BigInteger.prototype.am = am3;\n dbits = 28;\n} else if(j_lm && (navigator.appName == \"Microsoft Internet Explorer\")) {\n BigInteger.prototype.am = am2;\n dbits = 30;\n} else if(j_lm && (navigator.appName != \"Netscape\")) {\n BigInteger.prototype.am = am1;\n dbits = 26;\n} else { // Mozilla/Netscape seems to prefer am3\n BigInteger.prototype.am = am3;\n dbits = 28;\n}\n\nBigInteger.prototype.DB = dbits;\nBigInteger.prototype.DM = ((1<= 0; --i) r.data[i] = this.data[i];\n r.t = this.t;\n r.s = this.s;\n}\n\n// (protected) set from integer value x, -DV <= x < DV\nfunction bnpFromInt(x) {\n this.t = 1;\n this.s = (x<0)?-1:0;\n if(x > 0) this.data[0] = x;\n else if(x < -1) this.data[0] = x+this.DV;\n else this.t = 0;\n}\n\n// return bigint initialized to value\nfunction nbv(i) { var r = nbi(); r.fromInt(i); return r; }\n\n// (protected) set from string and radix\nfunction bnpFromString(s,b) {\n var k;\n if(b == 16) k = 4;\n else if(b == 8) k = 3;\n else if(b == 256) k = 8; // byte array\n else if(b == 2) k = 1;\n else if(b == 32) k = 5;\n else if(b == 4) k = 2;\n else { this.fromRadix(s,b); return; }\n this.t = 0;\n this.s = 0;\n var i = s.length, mi = false, sh = 0;\n while(--i >= 0) {\n var x = (k==8)?s[i]&0xff:intAt(s,i);\n if(x < 0) {\n if(s.charAt(i) == \"-\") mi = true;\n continue;\n }\n mi = false;\n if(sh == 0)\n this.data[this.t++] = x;\n else if(sh+k > this.DB) {\n this.data[this.t-1] |= (x&((1<<(this.DB-sh))-1))<>(this.DB-sh));\n } else\n this.data[this.t-1] |= x<= this.DB) sh -= this.DB;\n }\n if(k == 8 && (s[0]&0x80) != 0) {\n this.s = -1;\n if(sh > 0) this.data[this.t-1] |= ((1<<(this.DB-sh))-1)< 0 && this.data[this.t-1] == c) --this.t;\n}\n\n// (public) return string representation in given radix\nfunction bnToString(b) {\n if(this.s < 0) return \"-\"+this.negate().toString(b);\n var k;\n if(b == 16) k = 4;\n else if(b == 8) k = 3;\n else if(b == 2) k = 1;\n else if(b == 32) k = 5;\n else if(b == 4) k = 2;\n else return this.toRadix(b);\n var km = (1< 0) {\n if(p < this.DB && (d = this.data[i]>>p) > 0) { m = true; r = int2char(d); }\n while(i >= 0) {\n if(p < k) {\n d = (this.data[i]&((1<>(p+=this.DB-k);\n } else {\n d = (this.data[i]>>(p-=k))&km;\n if(p <= 0) { p += this.DB; --i; }\n }\n if(d > 0) m = true;\n if(m) r += int2char(d);\n }\n }\n return m?r:\"0\";\n}\n\n// (public) -this\nfunction bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; }\n\n// (public) |this|\nfunction bnAbs() { return (this.s<0)?this.negate():this; }\n\n// (public) return + if this > a, - if this < a, 0 if equal\nfunction bnCompareTo(a) {\n var r = this.s-a.s;\n if(r != 0) return r;\n var i = this.t;\n r = i-a.t;\n if(r != 0) return (this.s<0)?-r:r;\n while(--i >= 0) if((r=this.data[i]-a.data[i]) != 0) return r;\n return 0;\n}\n\n// returns bit length of the integer x\nfunction nbits(x) {\n var r = 1, t;\n if((t=x>>>16) != 0) { x = t; r += 16; }\n if((t=x>>8) != 0) { x = t; r += 8; }\n if((t=x>>4) != 0) { x = t; r += 4; }\n if((t=x>>2) != 0) { x = t; r += 2; }\n if((t=x>>1) != 0) { x = t; r += 1; }\n return r;\n}\n\n// (public) return the number of bits in \"this\"\nfunction bnBitLength() {\n if(this.t <= 0) return 0;\n return this.DB*(this.t-1)+nbits(this.data[this.t-1]^(this.s&this.DM));\n}\n\n// (protected) r = this << n*DB\nfunction bnpDLShiftTo(n,r) {\n var i;\n for(i = this.t-1; i >= 0; --i) r.data[i+n] = this.data[i];\n for(i = n-1; i >= 0; --i) r.data[i] = 0;\n r.t = this.t+n;\n r.s = this.s;\n}\n\n// (protected) r = this >> n*DB\nfunction bnpDRShiftTo(n,r) {\n for(var i = n; i < this.t; ++i) r.data[i-n] = this.data[i];\n r.t = Math.max(this.t-n,0);\n r.s = this.s;\n}\n\n// (protected) r = this << n\nfunction bnpLShiftTo(n,r) {\n var bs = n%this.DB;\n var cbs = this.DB-bs;\n var bm = (1<= 0; --i) {\n r.data[i+ds+1] = (this.data[i]>>cbs)|c;\n c = (this.data[i]&bm)<= 0; --i) r.data[i] = 0;\n r.data[ds] = c;\n r.t = this.t+ds+1;\n r.s = this.s;\n r.clamp();\n}\n\n// (protected) r = this >> n\nfunction bnpRShiftTo(n,r) {\n r.s = this.s;\n var ds = Math.floor(n/this.DB);\n if(ds >= this.t) { r.t = 0; return; }\n var bs = n%this.DB;\n var cbs = this.DB-bs;\n var bm = (1<>bs;\n for(var i = ds+1; i < this.t; ++i) {\n r.data[i-ds-1] |= (this.data[i]&bm)<>bs;\n }\n if(bs > 0) r.data[this.t-ds-1] |= (this.s&bm)<>= this.DB;\n }\n if(a.t < this.t) {\n c -= a.s;\n while(i < this.t) {\n c += this.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c += this.s;\n } else {\n c += this.s;\n while(i < a.t) {\n c -= a.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c -= a.s;\n }\n r.s = (c<0)?-1:0;\n if(c < -1) r.data[i++] = this.DV+c;\n else if(c > 0) r.data[i++] = c;\n r.t = i;\n r.clamp();\n}\n\n// (protected) r = this * a, r != this,a (HAC 14.12)\n// \"this\" should be the larger one if appropriate.\nfunction bnpMultiplyTo(a,r) {\n var x = this.abs(), y = a.abs();\n var i = x.t;\n r.t = i+y.t;\n while(--i >= 0) r.data[i] = 0;\n for(i = 0; i < y.t; ++i) r.data[i+x.t] = x.am(0,y.data[i],r,i,0,x.t);\n r.s = 0;\n r.clamp();\n if(this.s != a.s) BigInteger.ZERO.subTo(r,r);\n}\n\n// (protected) r = this^2, r != this (HAC 14.16)\nfunction bnpSquareTo(r) {\n var x = this.abs();\n var i = r.t = 2*x.t;\n while(--i >= 0) r.data[i] = 0;\n for(i = 0; i < x.t-1; ++i) {\n var c = x.am(i,x.data[i],r,2*i,0,1);\n if((r.data[i+x.t]+=x.am(i+1,2*x.data[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {\n r.data[i+x.t] -= x.DV;\n r.data[i+x.t+1] = 1;\n }\n }\n if(r.t > 0) r.data[r.t-1] += x.am(i,x.data[i],r,2*i,0,1);\n r.s = 0;\n r.clamp();\n}\n\n// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)\n// r != q, this != m. q or r may be null.\nfunction bnpDivRemTo(m,q,r) {\n var pm = m.abs();\n if(pm.t <= 0) return;\n var pt = this.abs();\n if(pt.t < pm.t) {\n if(q != null) q.fromInt(0);\n if(r != null) this.copyTo(r);\n return;\n }\n if(r == null) r = nbi();\n var y = nbi(), ts = this.s, ms = m.s;\n var nsh = this.DB-nbits(pm.data[pm.t-1]);\t// normalize modulus\n if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); } else { pm.copyTo(y); pt.copyTo(r); }\n var ys = y.t;\n var y0 = y.data[ys-1];\n if(y0 == 0) return;\n var yt = y0*(1<1)?y.data[ys-2]>>this.F2:0);\n var d1 = this.FV/yt, d2 = (1<= 0) {\n r.data[r.t++] = 1;\n r.subTo(t,r);\n }\n BigInteger.ONE.dlShiftTo(ys,t);\n t.subTo(y,y);\t// \"negative\" y so we can replace sub with am later\n while(y.t < ys) y.data[y.t++] = 0;\n while(--j >= 0) {\n // Estimate quotient digit\n var qd = (r.data[--i]==y0)?this.DM:Math.floor(r.data[i]*d1+(r.data[i-1]+e)*d2);\n if((r.data[i]+=y.am(0,qd,r,j,0,ys)) < qd) {\t// Try it out\n y.dlShiftTo(j,t);\n r.subTo(t,r);\n while(r.data[i] < --qd) r.subTo(t,r);\n }\n }\n if(q != null) {\n r.drShiftTo(ys,q);\n if(ts != ms) BigInteger.ZERO.subTo(q,q);\n }\n r.t = ys;\n r.clamp();\n if(nsh > 0) r.rShiftTo(nsh,r);\t// Denormalize remainder\n if(ts < 0) BigInteger.ZERO.subTo(r,r);\n}\n\n// (public) this mod a\nfunction bnMod(a) {\n var r = nbi();\n this.abs().divRemTo(a,null,r);\n if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r);\n return r;\n}\n\n// Modular reduction using \"classic\" algorithm\nfunction Classic(m) { this.m = m; }\nfunction cConvert(x) {\n if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);\n else return x;\n}\nfunction cRevert(x) { return x; }\nfunction cReduce(x) { x.divRemTo(this.m,null,x); }\nfunction cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }\nfunction cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }\n\nClassic.prototype.convert = cConvert;\nClassic.prototype.revert = cRevert;\nClassic.prototype.reduce = cReduce;\nClassic.prototype.mulTo = cMulTo;\nClassic.prototype.sqrTo = cSqrTo;\n\n// (protected) return \"-1/this % 2^DB\"; useful for Mont. reduction\n// justification:\n// xy == 1 (mod m)\n// xy = 1+km\n// xy(2-xy) = (1+km)(1-km)\n// x[y(2-xy)] = 1-k^2m^2\n// x[y(2-xy)] == 1 (mod m^2)\n// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2\n// should reduce x and y(2-xy) by m^2 at each step to keep size bounded.\n// JS multiply \"overflows\" differently from C/C++, so care is needed here.\nfunction bnpInvDigit() {\n if(this.t < 1) return 0;\n var x = this.data[0];\n if((x&1) == 0) return 0;\n var y = x&3;\t\t// y == 1/x mod 2^2\n y = (y*(2-(x&0xf)*y))&0xf;\t// y == 1/x mod 2^4\n y = (y*(2-(x&0xff)*y))&0xff;\t// y == 1/x mod 2^8\n y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff;\t// y == 1/x mod 2^16\n // last step - calculate inverse mod DV directly;\n // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints\n y = (y*(2-x*y%this.DV))%this.DV;\t\t// y == 1/x mod 2^dbits\n // we really want the negative inverse, and -DV < y < DV\n return (y>0)?this.DV-y:-y;\n}\n\n// Montgomery reduction\nfunction Montgomery(m) {\n this.m = m;\n this.mp = m.invDigit();\n this.mpl = this.mp&0x7fff;\n this.mph = this.mp>>15;\n this.um = (1<<(m.DB-15))-1;\n this.mt2 = 2*m.t;\n}\n\n// xR mod m\nfunction montConvert(x) {\n var r = nbi();\n x.abs().dlShiftTo(this.m.t,r);\n r.divRemTo(this.m,null,r);\n if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r);\n return r;\n}\n\n// x/R mod m\nfunction montRevert(x) {\n var r = nbi();\n x.copyTo(r);\n this.reduce(r);\n return r;\n}\n\n// x = x/R mod m (HAC 14.32)\nfunction montReduce(x) {\n while(x.t <= this.mt2)\t// pad x so am has enough room later\n x.data[x.t++] = 0;\n for(var i = 0; i < this.m.t; ++i) {\n // faster way of calculating u0 = x.data[i]*mp mod DV\n var j = x.data[i]&0x7fff;\n var u0 = (j*this.mpl+(((j*this.mph+(x.data[i]>>15)*this.mpl)&this.um)<<15))&x.DM;\n // use am to combine the multiply-shift-add into one call\n j = i+this.m.t;\n x.data[j] += this.m.am(0,u0,x,i,0,this.m.t);\n // propagate carry\n while(x.data[j] >= x.DV) { x.data[j] -= x.DV; x.data[++j]++; }\n }\n x.clamp();\n x.drShiftTo(this.m.t,x);\n if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);\n}\n\n// r = \"x^2/R mod m\"; x != r\nfunction montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }\n\n// r = \"xy/R mod m\"; x,y != r\nfunction montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }\n\nMontgomery.prototype.convert = montConvert;\nMontgomery.prototype.revert = montRevert;\nMontgomery.prototype.reduce = montReduce;\nMontgomery.prototype.mulTo = montMulTo;\nMontgomery.prototype.sqrTo = montSqrTo;\n\n// (protected) true iff this is even\nfunction bnpIsEven() { return ((this.t>0)?(this.data[0]&1):this.s) == 0; }\n\n// (protected) this^e, e < 2^32, doing sqr and mul with \"r\" (HAC 14.79)\nfunction bnpExp(e,z) {\n if(e > 0xffffffff || e < 1) return BigInteger.ONE;\n var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;\n g.copyTo(r);\n while(--i >= 0) {\n z.sqrTo(r,r2);\n if((e&(1< 0) z.mulTo(r2,g,r);\n else { var t = r; r = r2; r2 = t; }\n }\n return z.revert(r);\n}\n\n// (public) this^e % m, 0 <= e < 2^32\nfunction bnModPowInt(e,m) {\n var z;\n if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);\n return this.exp(e,z);\n}\n\n// protected\nBigInteger.prototype.copyTo = bnpCopyTo;\nBigInteger.prototype.fromInt = bnpFromInt;\nBigInteger.prototype.fromString = bnpFromString;\nBigInteger.prototype.clamp = bnpClamp;\nBigInteger.prototype.dlShiftTo = bnpDLShiftTo;\nBigInteger.prototype.drShiftTo = bnpDRShiftTo;\nBigInteger.prototype.lShiftTo = bnpLShiftTo;\nBigInteger.prototype.rShiftTo = bnpRShiftTo;\nBigInteger.prototype.subTo = bnpSubTo;\nBigInteger.prototype.multiplyTo = bnpMultiplyTo;\nBigInteger.prototype.squareTo = bnpSquareTo;\nBigInteger.prototype.divRemTo = bnpDivRemTo;\nBigInteger.prototype.invDigit = bnpInvDigit;\nBigInteger.prototype.isEven = bnpIsEven;\nBigInteger.prototype.exp = bnpExp;\n\n// public\nBigInteger.prototype.toString = bnToString;\nBigInteger.prototype.negate = bnNegate;\nBigInteger.prototype.abs = bnAbs;\nBigInteger.prototype.compareTo = bnCompareTo;\nBigInteger.prototype.bitLength = bnBitLength;\nBigInteger.prototype.mod = bnMod;\nBigInteger.prototype.modPowInt = bnModPowInt;\n\n// \"constants\"\nBigInteger.ZERO = nbv(0);\nBigInteger.ONE = nbv(1);\n\n// jsbn2 lib\n\n//Copyright (c) 2005-2009 Tom Wu\n//All Rights Reserved.\n//See \"LICENSE\" for details (See jsbn.js for LICENSE).\n\n//Extended JavaScript BN functions, required for RSA private ops.\n\n//Version 1.1: new BigInteger(\"0\", 10) returns \"proper\" zero\n\n//(public)\nfunction bnClone() { var r = nbi(); this.copyTo(r); return r; }\n\n//(public) return value as integer\nfunction bnIntValue() {\nif(this.s < 0) {\n if(this.t == 1) return this.data[0]-this.DV;\n else if(this.t == 0) return -1;\n} else if(this.t == 1) return this.data[0];\nelse if(this.t == 0) return 0;\n// assumes 16 < DB < 32\nreturn ((this.data[1]&((1<<(32-this.DB))-1))<>24; }\n\n//(public) return value as short (assumes DB>=16)\nfunction bnShortValue() { return (this.t==0)?this.s:(this.data[0]<<16)>>16; }\n\n//(protected) return x s.t. r^x < DV\nfunction bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); }\n\n//(public) 0 if this == 0, 1 if this > 0\nfunction bnSigNum() {\nif(this.s < 0) return -1;\nelse if(this.t <= 0 || (this.t == 1 && this.data[0] <= 0)) return 0;\nelse return 1;\n}\n\n//(protected) convert to radix string\nfunction bnpToRadix(b) {\nif(b == null) b = 10;\nif(this.signum() == 0 || b < 2 || b > 36) return \"0\";\nvar cs = this.chunkSize(b);\nvar a = Math.pow(b,cs);\nvar d = nbv(a), y = nbi(), z = nbi(), r = \"\";\nthis.divRemTo(d,y,z);\nwhile(y.signum() > 0) {\n r = (a+z.intValue()).toString(b).substr(1) + r;\n y.divRemTo(d,y,z);\n}\nreturn z.intValue().toString(b) + r;\n}\n\n//(protected) convert from radix string\nfunction bnpFromRadix(s,b) {\nthis.fromInt(0);\nif(b == null) b = 10;\nvar cs = this.chunkSize(b);\nvar d = Math.pow(b,cs), mi = false, j = 0, w = 0;\nfor(var i = 0; i < s.length; ++i) {\n var x = intAt(s,i);\n if(x < 0) {\n if(s.charAt(i) == \"-\" && this.signum() == 0) mi = true;\n continue;\n }\n w = b*w+x;\n if(++j >= cs) {\n this.dMultiply(d);\n this.dAddOffset(w,0);\n j = 0;\n w = 0;\n }\n}\nif(j > 0) {\n this.dMultiply(Math.pow(b,j));\n this.dAddOffset(w,0);\n}\nif(mi) BigInteger.ZERO.subTo(this,this);\n}\n\n//(protected) alternate constructor\nfunction bnpFromNumber(a,b,c) {\nif(\"number\" == typeof b) {\n // new BigInteger(int,int,RNG)\n if(a < 2) this.fromInt(1);\n else {\n this.fromNumber(a,c);\n if(!this.testBit(a-1)) // force MSB set\n this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this);\n if(this.isEven()) this.dAddOffset(1,0); // force odd\n while(!this.isProbablePrime(b)) {\n this.dAddOffset(2,0);\n if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this);\n }\n }\n} else {\n // new BigInteger(int,RNG)\n var x = new Array(), t = a&7;\n x.length = (a>>3)+1;\n b.nextBytes(x);\n if(t > 0) x[0] &= ((1< 0) {\n if(p < this.DB && (d = this.data[i]>>p) != (this.s&this.DM)>>p)\n r[k++] = d|(this.s<<(this.DB-p));\n while(i >= 0) {\n if(p < 8) {\n d = (this.data[i]&((1<>(p+=this.DB-8);\n } else {\n d = (this.data[i]>>(p-=8))&0xff;\n if(p <= 0) { p += this.DB; --i; }\n }\n if((d&0x80) != 0) d |= -256;\n if(k == 0 && (this.s&0x80) != (d&0x80)) ++k;\n if(k > 0 || d != this.s) r[k++] = d;\n }\n}\nreturn r;\n}\n\nfunction bnEquals(a) { return(this.compareTo(a)==0); }\nfunction bnMin(a) { return(this.compareTo(a)<0)?this:a; }\nfunction bnMax(a) { return(this.compareTo(a)>0)?this:a; }\n\n//(protected) r = this op a (bitwise)\nfunction bnpBitwiseTo(a,op,r) {\nvar i, f, m = Math.min(a.t,this.t);\nfor(i = 0; i < m; ++i) r.data[i] = op(this.data[i],a.data[i]);\nif(a.t < this.t) {\n f = a.s&this.DM;\n for(i = m; i < this.t; ++i) r.data[i] = op(this.data[i],f);\n r.t = this.t;\n} else {\n f = this.s&this.DM;\n for(i = m; i < a.t; ++i) r.data[i] = op(f,a.data[i]);\n r.t = a.t;\n}\nr.s = op(this.s,a.s);\nr.clamp();\n}\n\n//(public) this & a\nfunction op_and(x,y) { return x&y; }\nfunction bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; }\n\n//(public) this | a\nfunction op_or(x,y) { return x|y; }\nfunction bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; }\n\n//(public) this ^ a\nfunction op_xor(x,y) { return x^y; }\nfunction bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; }\n\n//(public) this & ~a\nfunction op_andnot(x,y) { return x&~y; }\nfunction bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; }\n\n//(public) ~this\nfunction bnNot() {\nvar r = nbi();\nfor(var i = 0; i < this.t; ++i) r.data[i] = this.DM&~this.data[i];\nr.t = this.t;\nr.s = ~this.s;\nreturn r;\n}\n\n//(public) this << n\nfunction bnShiftLeft(n) {\nvar r = nbi();\nif(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r);\nreturn r;\n}\n\n//(public) this >> n\nfunction bnShiftRight(n) {\nvar r = nbi();\nif(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r);\nreturn r;\n}\n\n//return index of lowest 1-bit in x, x < 2^31\nfunction lbit(x) {\nif(x == 0) return -1;\nvar r = 0;\nif((x&0xffff) == 0) { x >>= 16; r += 16; }\nif((x&0xff) == 0) { x >>= 8; r += 8; }\nif((x&0xf) == 0) { x >>= 4; r += 4; }\nif((x&3) == 0) { x >>= 2; r += 2; }\nif((x&1) == 0) ++r;\nreturn r;\n}\n\n//(public) returns index of lowest 1-bit (or -1 if none)\nfunction bnGetLowestSetBit() {\nfor(var i = 0; i < this.t; ++i)\n if(this.data[i] != 0) return i*this.DB+lbit(this.data[i]);\nif(this.s < 0) return this.t*this.DB;\nreturn -1;\n}\n\n//return number of 1 bits in x\nfunction cbit(x) {\nvar r = 0;\nwhile(x != 0) { x &= x-1; ++r; }\nreturn r;\n}\n\n//(public) return number of set bits\nfunction bnBitCount() {\nvar r = 0, x = this.s&this.DM;\nfor(var i = 0; i < this.t; ++i) r += cbit(this.data[i]^x);\nreturn r;\n}\n\n//(public) true iff nth bit is set\nfunction bnTestBit(n) {\nvar j = Math.floor(n/this.DB);\nif(j >= this.t) return(this.s!=0);\nreturn((this.data[j]&(1<<(n%this.DB)))!=0);\n}\n\n//(protected) this op (1<>= this.DB;\n}\nif(a.t < this.t) {\n c += a.s;\n while(i < this.t) {\n c += this.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c += this.s;\n} else {\n c += this.s;\n while(i < a.t) {\n c += a.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c += a.s;\n}\nr.s = (c<0)?-1:0;\nif(c > 0) r.data[i++] = c;\nelse if(c < -1) r.data[i++] = this.DV+c;\nr.t = i;\nr.clamp();\n}\n\n//(public) this + a\nfunction bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; }\n\n//(public) this - a\nfunction bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; }\n\n//(public) this * a\nfunction bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; }\n\n//(public) this / a\nfunction bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; }\n\n//(public) this % a\nfunction bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; }\n\n//(public) [this/a,this%a]\nfunction bnDivideAndRemainder(a) {\nvar q = nbi(), r = nbi();\nthis.divRemTo(a,q,r);\nreturn new Array(q,r);\n}\n\n//(protected) this *= n, this >= 0, 1 < n < DV\nfunction bnpDMultiply(n) {\nthis.data[this.t] = this.am(0,n-1,this,0,0,this.t);\n++this.t;\nthis.clamp();\n}\n\n//(protected) this += n << w words, this >= 0\nfunction bnpDAddOffset(n,w) {\nif(n == 0) return;\nwhile(this.t <= w) this.data[this.t++] = 0;\nthis.data[w] += n;\nwhile(this.data[w] >= this.DV) {\n this.data[w] -= this.DV;\n if(++w >= this.t) this.data[this.t++] = 0;\n ++this.data[w];\n}\n}\n\n//A \"null\" reducer\nfunction NullExp() {}\nfunction nNop(x) { return x; }\nfunction nMulTo(x,y,r) { x.multiplyTo(y,r); }\nfunction nSqrTo(x,r) { x.squareTo(r); }\n\nNullExp.prototype.convert = nNop;\nNullExp.prototype.revert = nNop;\nNullExp.prototype.mulTo = nMulTo;\nNullExp.prototype.sqrTo = nSqrTo;\n\n//(public) this^e\nfunction bnPow(e) { return this.exp(e,new NullExp()); }\n\n//(protected) r = lower n words of \"this * a\", a.t <= n\n//\"this\" should be the larger one if appropriate.\nfunction bnpMultiplyLowerTo(a,n,r) {\nvar i = Math.min(this.t+a.t,n);\nr.s = 0; // assumes a,this >= 0\nr.t = i;\nwhile(i > 0) r.data[--i] = 0;\nvar j;\nfor(j = r.t-this.t; i < j; ++i) r.data[i+this.t] = this.am(0,a.data[i],r,i,0,this.t);\nfor(j = Math.min(a.t,n); i < j; ++i) this.am(0,a.data[i],r,i,0,n-i);\nr.clamp();\n}\n\n//(protected) r = \"this * a\" without lower n words, n > 0\n//\"this\" should be the larger one if appropriate.\nfunction bnpMultiplyUpperTo(a,n,r) {\n--n;\nvar i = r.t = this.t+a.t-n;\nr.s = 0; // assumes a,this >= 0\nwhile(--i >= 0) r.data[i] = 0;\nfor(i = Math.max(n-this.t,0); i < a.t; ++i)\n r.data[this.t+i-n] = this.am(n-i,a.data[i],r,0,0,this.t+i-n);\nr.clamp();\nr.drShiftTo(1,r);\n}\n\n//Barrett modular reduction\nfunction Barrett(m) {\n// setup Barrett\nthis.r2 = nbi();\nthis.q3 = nbi();\nBigInteger.ONE.dlShiftTo(2*m.t,this.r2);\nthis.mu = this.r2.divide(m);\nthis.m = m;\n}\n\nfunction barrettConvert(x) {\nif(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m);\nelse if(x.compareTo(this.m) < 0) return x;\nelse { var r = nbi(); x.copyTo(r); this.reduce(r); return r; }\n}\n\nfunction barrettRevert(x) { return x; }\n\n//x = x mod m (HAC 14.42)\nfunction barrettReduce(x) {\nx.drShiftTo(this.m.t-1,this.r2);\nif(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); }\nthis.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3);\nthis.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2);\nwhile(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1);\nx.subTo(this.r2,x);\nwhile(x.compareTo(this.m) >= 0) x.subTo(this.m,x);\n}\n\n//r = x^2 mod m; x != r\nfunction barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); }\n\n//r = x*y mod m; x,y != r\nfunction barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }\n\nBarrett.prototype.convert = barrettConvert;\nBarrett.prototype.revert = barrettRevert;\nBarrett.prototype.reduce = barrettReduce;\nBarrett.prototype.mulTo = barrettMulTo;\nBarrett.prototype.sqrTo = barrettSqrTo;\n\n//(public) this^e % m (HAC 14.85)\nfunction bnModPow(e,m) {\nvar i = e.bitLength(), k, r = nbv(1), z;\nif(i <= 0) return r;\nelse if(i < 18) k = 1;\nelse if(i < 48) k = 3;\nelse if(i < 144) k = 4;\nelse if(i < 768) k = 5;\nelse k = 6;\nif(i < 8)\n z = new Classic(m);\nelse if(m.isEven())\n z = new Barrett(m);\nelse\n z = new Montgomery(m);\n\n// precomputation\nvar g = new Array(), n = 3, k1 = k-1, km = (1< 1) {\n var g2 = nbi();\n z.sqrTo(g[1],g2);\n while(n <= km) {\n g[n] = nbi();\n z.mulTo(g2,g[n-2],g[n]);\n n += 2;\n }\n}\n\nvar j = e.t-1, w, is1 = true, r2 = nbi(), t;\ni = nbits(e.data[j])-1;\nwhile(j >= 0) {\n if(i >= k1) w = (e.data[j]>>(i-k1))&km;\n else {\n w = (e.data[j]&((1<<(i+1))-1))<<(k1-i);\n if(j > 0) w |= e.data[j-1]>>(this.DB+i-k1);\n }\n\n n = k;\n while((w&1) == 0) { w >>= 1; --n; }\n if((i -= n) < 0) { i += this.DB; --j; }\n if(is1) { // ret == 1, don't bother squaring or multiplying it\n g[w].copyTo(r);\n is1 = false;\n } else {\n while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; }\n if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; }\n z.mulTo(r2,g[w],r);\n }\n\n while(j >= 0 && (e.data[j]&(1< 0) {\n x.rShiftTo(g,x);\n y.rShiftTo(g,y);\n}\nwhile(x.signum() > 0) {\n if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x);\n if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y);\n if(x.compareTo(y) >= 0) {\n x.subTo(y,x);\n x.rShiftTo(1,x);\n } else {\n y.subTo(x,y);\n y.rShiftTo(1,y);\n }\n}\nif(g > 0) y.lShiftTo(g,y);\nreturn y;\n}\n\n//(protected) this % n, n < 2^26\nfunction bnpModInt(n) {\nif(n <= 0) return 0;\nvar d = this.DV%n, r = (this.s<0)?n-1:0;\nif(this.t > 0)\n if(d == 0) r = this.data[0]%n;\n else for(var i = this.t-1; i >= 0; --i) r = (d*r+this.data[i])%n;\nreturn r;\n}\n\n//(public) 1/this % m (HAC 14.61)\nfunction bnModInverse(m) {\nvar ac = m.isEven();\nif((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO;\nvar u = m.clone(), v = this.clone();\nvar a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1);\nwhile(u.signum() != 0) {\n while(u.isEven()) {\n u.rShiftTo(1,u);\n if(ac) {\n if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); }\n a.rShiftTo(1,a);\n } else if(!b.isEven()) b.subTo(m,b);\n b.rShiftTo(1,b);\n }\n while(v.isEven()) {\n v.rShiftTo(1,v);\n if(ac) {\n if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); }\n c.rShiftTo(1,c);\n } else if(!d.isEven()) d.subTo(m,d);\n d.rShiftTo(1,d);\n }\n if(u.compareTo(v) >= 0) {\n u.subTo(v,u);\n if(ac) a.subTo(c,a);\n b.subTo(d,b);\n } else {\n v.subTo(u,v);\n if(ac) c.subTo(a,c);\n d.subTo(b,d);\n }\n}\nif(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO;\nif(d.compareTo(m) >= 0) return d.subtract(m);\nif(d.signum() < 0) d.addTo(m,d); else return d;\nif(d.signum() < 0) return d.add(m); else return d;\n}\n\nvar lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509];\nvar lplim = (1<<26)/lowprimes[lowprimes.length-1];\n\n//(public) test primality with certainty >= 1-.5^t\nfunction bnIsProbablePrime(t) {\nvar i, x = this.abs();\nif(x.t == 1 && x.data[0] <= lowprimes[lowprimes.length-1]) {\n for(i = 0; i < lowprimes.length; ++i)\n if(x.data[0] == lowprimes[i]) return true;\n return false;\n}\nif(x.isEven()) return false;\ni = 1;\nwhile(i < lowprimes.length) {\n var m = lowprimes[i], j = i+1;\n while(j < lowprimes.length && m < lplim) m *= lowprimes[j++];\n m = x.modInt(m);\n while(i < j) if(m%lowprimes[i++] == 0) return false;\n}\nreturn x.millerRabin(t);\n}\n\n//(protected) true if probably prime (HAC 4.24, Miller-Rabin)\nfunction bnpMillerRabin(t) {\nvar n1 = this.subtract(BigInteger.ONE);\nvar k = n1.getLowestSetBit();\nif(k <= 0) return false;\nvar r = n1.shiftRight(k);\nvar prng = bnGetPrng();\nvar a;\nfor(var i = 0; i < t; ++i) {\n // select witness 'a' at random from between 1 and n1\n do {\n a = new BigInteger(this.bitLength(), prng);\n }\n while(a.compareTo(BigInteger.ONE) <= 0 || a.compareTo(n1) >= 0);\n var y = a.modPow(r,this);\n if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) {\n var j = 1;\n while(j++ < k && y.compareTo(n1) != 0) {\n y = y.modPowInt(2,this);\n if(y.compareTo(BigInteger.ONE) == 0) return false;\n }\n if(y.compareTo(n1) != 0) return false;\n }\n}\nreturn true;\n}\n\n// get pseudo random number generator\nfunction bnGetPrng() {\n // create prng with api that matches BigInteger secure random\n return {\n // x is an array to fill with bytes\n nextBytes: function(x) {\n for(var i = 0; i < x.length; ++i) {\n x[i] = Math.floor(Math.random() * 0x0100);\n }\n }\n };\n}\n\n//protected\nBigInteger.prototype.chunkSize = bnpChunkSize;\nBigInteger.prototype.toRadix = bnpToRadix;\nBigInteger.prototype.fromRadix = bnpFromRadix;\nBigInteger.prototype.fromNumber = bnpFromNumber;\nBigInteger.prototype.bitwiseTo = bnpBitwiseTo;\nBigInteger.prototype.changeBit = bnpChangeBit;\nBigInteger.prototype.addTo = bnpAddTo;\nBigInteger.prototype.dMultiply = bnpDMultiply;\nBigInteger.prototype.dAddOffset = bnpDAddOffset;\nBigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo;\nBigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo;\nBigInteger.prototype.modInt = bnpModInt;\nBigInteger.prototype.millerRabin = bnpMillerRabin;\n\n//public\nBigInteger.prototype.clone = bnClone;\nBigInteger.prototype.intValue = bnIntValue;\nBigInteger.prototype.byteValue = bnByteValue;\nBigInteger.prototype.shortValue = bnShortValue;\nBigInteger.prototype.signum = bnSigNum;\nBigInteger.prototype.toByteArray = bnToByteArray;\nBigInteger.prototype.equals = bnEquals;\nBigInteger.prototype.min = bnMin;\nBigInteger.prototype.max = bnMax;\nBigInteger.prototype.and = bnAnd;\nBigInteger.prototype.or = bnOr;\nBigInteger.prototype.xor = bnXor;\nBigInteger.prototype.andNot = bnAndNot;\nBigInteger.prototype.not = bnNot;\nBigInteger.prototype.shiftLeft = bnShiftLeft;\nBigInteger.prototype.shiftRight = bnShiftRight;\nBigInteger.prototype.getLowestSetBit = bnGetLowestSetBit;\nBigInteger.prototype.bitCount = bnBitCount;\nBigInteger.prototype.testBit = bnTestBit;\nBigInteger.prototype.setBit = bnSetBit;\nBigInteger.prototype.clearBit = bnClearBit;\nBigInteger.prototype.flipBit = bnFlipBit;\nBigInteger.prototype.add = bnAdd;\nBigInteger.prototype.subtract = bnSubtract;\nBigInteger.prototype.multiply = bnMultiply;\nBigInteger.prototype.divide = bnDivide;\nBigInteger.prototype.remainder = bnRemainder;\nBigInteger.prototype.divideAndRemainder = bnDivideAndRemainder;\nBigInteger.prototype.modPow = bnModPow;\nBigInteger.prototype.modInverse = bnModInverse;\nBigInteger.prototype.pow = bnPow;\nBigInteger.prototype.gcd = bnGCD;\nBigInteger.prototype.isProbablePrime = bnIsProbablePrime;\n\n//BigInteger interfaces not implemented in jsbn:\n\n//BigInteger(int signum, byte[] magnitude)\n//double doubleValue()\n//float floatValue()\n//int hashCode()\n//long longValue()\n//static BigInteger valueOf(long val)\n","/**\n * Javascript implementation of RSA-KEM.\n *\n * @author Lautaro Cozzani Rodriguez\n * @author Dave Longley\n *\n * Copyright (c) 2014 Lautaro Cozzani \n * Copyright (c) 2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\nrequire('./random');\nrequire('./jsbn');\n\nmodule.exports = forge.kem = forge.kem || {};\n\nvar BigInteger = forge.jsbn.BigInteger;\n\n/**\n * The API for the RSA Key Encapsulation Mechanism (RSA-KEM) from ISO 18033-2.\n */\nforge.kem.rsa = {};\n\n/**\n * Creates an RSA KEM API object for generating a secret asymmetric key.\n *\n * The symmetric key may be generated via a call to 'encrypt', which will\n * produce a ciphertext to be transmitted to the recipient and a key to be\n * kept secret. The ciphertext is a parameter to be passed to 'decrypt' which\n * will produce the same secret key for the recipient to use to decrypt a\n * message that was encrypted with the secret key.\n *\n * @param kdf the KDF API to use (eg: new forge.kem.kdf1()).\n * @param options the options to use.\n * [prng] a custom crypto-secure pseudo-random number generator to use,\n * that must define \"getBytesSync\".\n */\nforge.kem.rsa.create = function(kdf, options) {\n options = options || {};\n var prng = options.prng || forge.random;\n\n var kem = {};\n\n /**\n * Generates a secret key and its encapsulation.\n *\n * @param publicKey the RSA public key to encrypt with.\n * @param keyLength the length, in bytes, of the secret key to generate.\n *\n * @return an object with:\n * encapsulation: the ciphertext for generating the secret key, as a\n * binary-encoded string of bytes.\n * key: the secret key to use for encrypting a message.\n */\n kem.encrypt = function(publicKey, keyLength) {\n // generate a random r where 1 < r < n\n var byteLength = Math.ceil(publicKey.n.bitLength() / 8);\n var r;\n do {\n r = new BigInteger(\n forge.util.bytesToHex(prng.getBytesSync(byteLength)),\n 16).mod(publicKey.n);\n } while(r.compareTo(BigInteger.ONE) <= 0);\n\n // prepend r with zeros\n r = forge.util.hexToBytes(r.toString(16));\n var zeros = byteLength - r.length;\n if(zeros > 0) {\n r = forge.util.fillString(String.fromCharCode(0), zeros) + r;\n }\n\n // encrypt the random\n var encapsulation = publicKey.encrypt(r, 'NONE');\n\n // generate the secret key\n var key = kdf.generate(r, keyLength);\n\n return {encapsulation: encapsulation, key: key};\n };\n\n /**\n * Decrypts an encapsulated secret key.\n *\n * @param privateKey the RSA private key to decrypt with.\n * @param encapsulation the ciphertext for generating the secret key, as\n * a binary-encoded string of bytes.\n * @param keyLength the length, in bytes, of the secret key to generate.\n *\n * @return the secret key as a binary-encoded string of bytes.\n */\n kem.decrypt = function(privateKey, encapsulation, keyLength) {\n // decrypt the encapsulation and generate the secret key\n var r = privateKey.decrypt(encapsulation, 'NONE');\n return kdf.generate(r, keyLength);\n };\n\n return kem;\n};\n\n// TODO: add forge.kem.kdf.create('KDF1', {md: ..., ...}) API?\n\n/**\n * Creates a key derivation API object that implements KDF1 per ISO 18033-2.\n *\n * @param md the hash API to use.\n * @param [digestLength] an optional digest length that must be positive and\n * less than or equal to md.digestLength.\n *\n * @return a KDF1 API object.\n */\nforge.kem.kdf1 = function(md, digestLength) {\n _createKDF(this, md, 0, digestLength || md.digestLength);\n};\n\n/**\n * Creates a key derivation API object that implements KDF2 per ISO 18033-2.\n *\n * @param md the hash API to use.\n * @param [digestLength] an optional digest length that must be positive and\n * less than or equal to md.digestLength.\n *\n * @return a KDF2 API object.\n */\nforge.kem.kdf2 = function(md, digestLength) {\n _createKDF(this, md, 1, digestLength || md.digestLength);\n};\n\n/**\n * Creates a KDF1 or KDF2 API object.\n *\n * @param md the hash API to use.\n * @param counterStart the starting index for the counter.\n * @param digestLength the digest length to use.\n *\n * @return the KDF API object.\n */\nfunction _createKDF(kdf, md, counterStart, digestLength) {\n /**\n * Generate a key of the specified length.\n *\n * @param x the binary-encoded byte string to generate a key from.\n * @param length the number of bytes to generate (the size of the key).\n *\n * @return the key as a binary-encoded string.\n */\n kdf.generate = function(x, length) {\n var key = new forge.util.ByteBuffer();\n\n // run counter from counterStart to ceil(length / Hash.len)\n var k = Math.ceil(length / digestLength) + counterStart;\n\n var c = new forge.util.ByteBuffer();\n for(var i = counterStart; i < k; ++i) {\n // I2OSP(i, 4): convert counter to an octet string of 4 octets\n c.putInt32(i);\n\n // digest 'x' and the counter and add the result to the key\n md.start();\n md.update(x + c.getBytes());\n var hash = md.digest();\n key.putBytes(hash.getBytes(digestLength));\n }\n\n // truncate to the correct key length\n key.truncate(key.length() - length);\n return key.getBytes();\n };\n}\n","/**\n * Cross-browser support for logging in a web application.\n *\n * @author David I. Lehn \n *\n * Copyright (c) 2008-2013 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\n/* LOG API */\nmodule.exports = forge.log = forge.log || {};\n\n/**\n * Application logging system.\n *\n * Each logger level available as it's own function of the form:\n * forge.log.level(category, args...)\n * The category is an arbitrary string, and the args are the same as\n * Firebug's console.log API. By default the call will be output as:\n * 'LEVEL [category] , args[1], ...'\n * This enables proper % formatting via the first argument.\n * Each category is enabled by default but can be enabled or disabled with\n * the setCategoryEnabled() function.\n */\n// list of known levels\nforge.log.levels = [\n 'none', 'error', 'warning', 'info', 'debug', 'verbose', 'max'];\n// info on the levels indexed by name:\n// index: level index\n// name: uppercased display name\nvar sLevelInfo = {};\n// list of loggers\nvar sLoggers = [];\n/**\n * Standard console logger. If no console support is enabled this will\n * remain null. Check before using.\n */\nvar sConsoleLogger = null;\n\n// logger flags\n/**\n * Lock the level at the current value. Used in cases where user config may\n * set the level such that only critical messages are seen but more verbose\n * messages are needed for debugging or other purposes.\n */\nforge.log.LEVEL_LOCKED = (1 << 1);\n/**\n * Always call log function. By default, the logging system will check the\n * message level against logger.level before calling the log function. This\n * flag allows the function to do its own check.\n */\nforge.log.NO_LEVEL_CHECK = (1 << 2);\n/**\n * Perform message interpolation with the passed arguments. \"%\" style\n * fields in log messages will be replaced by arguments as needed. Some\n * loggers, such as Firebug, may do this automatically. The original log\n * message will be available as 'message' and the interpolated version will\n * be available as 'fullMessage'.\n */\nforge.log.INTERPOLATE = (1 << 3);\n\n// setup each log level\nfor(var i = 0; i < forge.log.levels.length; ++i) {\n var level = forge.log.levels[i];\n sLevelInfo[level] = {\n index: i,\n name: level.toUpperCase()\n };\n}\n\n/**\n * Message logger. Will dispatch a message to registered loggers as needed.\n *\n * @param message message object\n */\nforge.log.logMessage = function(message) {\n var messageLevelIndex = sLevelInfo[message.level].index;\n for(var i = 0; i < sLoggers.length; ++i) {\n var logger = sLoggers[i];\n if(logger.flags & forge.log.NO_LEVEL_CHECK) {\n logger.f(message);\n } else {\n // get logger level\n var loggerLevelIndex = sLevelInfo[logger.level].index;\n // check level\n if(messageLevelIndex <= loggerLevelIndex) {\n // message critical enough, call logger\n logger.f(logger, message);\n }\n }\n }\n};\n\n/**\n * Sets the 'standard' key on a message object to:\n * \"LEVEL [category] \" + message\n *\n * @param message a message log object\n */\nforge.log.prepareStandard = function(message) {\n if(!('standard' in message)) {\n message.standard =\n sLevelInfo[message.level].name +\n //' ' + +message.timestamp +\n ' [' + message.category + '] ' +\n message.message;\n }\n};\n\n/**\n * Sets the 'full' key on a message object to the original message\n * interpolated via % formatting with the message arguments.\n *\n * @param message a message log object.\n */\nforge.log.prepareFull = function(message) {\n if(!('full' in message)) {\n // copy args and insert message at the front\n var args = [message.message];\n args = args.concat([] || message['arguments']);\n // format the message\n message.full = forge.util.format.apply(this, args);\n }\n};\n\n/**\n * Applies both preparseStandard() and prepareFull() to a message object and\n * store result in 'standardFull'.\n *\n * @param message a message log object.\n */\nforge.log.prepareStandardFull = function(message) {\n if(!('standardFull' in message)) {\n // FIXME implement 'standardFull' logging\n forge.log.prepareStandard(message);\n message.standardFull = message.standard;\n }\n};\n\n// create log level functions\nif(true) {\n // levels for which we want functions\n var levels = ['error', 'warning', 'info', 'debug', 'verbose'];\n for(var i = 0; i < levels.length; ++i) {\n // wrap in a function to ensure proper level var is passed\n (function(level) {\n // create function for this level\n forge.log[level] = function(category, message/*, args...*/) {\n // convert arguments to real array, remove category and message\n var args = Array.prototype.slice.call(arguments).slice(2);\n // create message object\n // Note: interpolation and standard formatting is done lazily\n var msg = {\n timestamp: new Date(),\n level: level,\n category: category,\n message: message,\n 'arguments': args\n /*standard*/\n /*full*/\n /*fullMessage*/\n };\n // process this message\n forge.log.logMessage(msg);\n };\n })(levels[i]);\n }\n}\n\n/**\n * Creates a new logger with specified custom logging function.\n *\n * The logging function has a signature of:\n * function(logger, message)\n * logger: current logger\n * message: object:\n * level: level id\n * category: category\n * message: string message\n * arguments: Array of extra arguments\n * fullMessage: interpolated message and arguments if INTERPOLATE flag set\n *\n * @param logFunction a logging function which takes a log message object\n * as a parameter.\n *\n * @return a logger object.\n */\nforge.log.makeLogger = function(logFunction) {\n var logger = {\n flags: 0,\n f: logFunction\n };\n forge.log.setLevel(logger, 'none');\n return logger;\n};\n\n/**\n * Sets the current log level on a logger.\n *\n * @param logger the target logger.\n * @param level the new maximum log level as a string.\n *\n * @return true if set, false if not.\n */\nforge.log.setLevel = function(logger, level) {\n var rval = false;\n if(logger && !(logger.flags & forge.log.LEVEL_LOCKED)) {\n for(var i = 0; i < forge.log.levels.length; ++i) {\n var aValidLevel = forge.log.levels[i];\n if(level == aValidLevel) {\n // set level\n logger.level = level;\n rval = true;\n break;\n }\n }\n }\n\n return rval;\n};\n\n/**\n * Locks the log level at its current value.\n *\n * @param logger the target logger.\n * @param lock boolean lock value, default to true.\n */\nforge.log.lock = function(logger, lock) {\n if(typeof lock === 'undefined' || lock) {\n logger.flags |= forge.log.LEVEL_LOCKED;\n } else {\n logger.flags &= ~forge.log.LEVEL_LOCKED;\n }\n};\n\n/**\n * Adds a logger.\n *\n * @param logger the logger object.\n */\nforge.log.addLogger = function(logger) {\n sLoggers.push(logger);\n};\n\n// setup the console logger if possible, else create fake console.log\nif(typeof(console) !== 'undefined' && 'log' in console) {\n var logger;\n if(console.error && console.warn && console.info && console.debug) {\n // looks like Firebug-style logging is available\n // level handlers map\n var levelHandlers = {\n error: console.error,\n warning: console.warn,\n info: console.info,\n debug: console.debug,\n verbose: console.debug\n };\n var f = function(logger, message) {\n forge.log.prepareStandard(message);\n var handler = levelHandlers[message.level];\n // prepend standard message and concat args\n var args = [message.standard];\n args = args.concat(message['arguments'].slice());\n // apply to low-level console function\n handler.apply(console, args);\n };\n logger = forge.log.makeLogger(f);\n } else {\n // only appear to have basic console.log\n var f = function(logger, message) {\n forge.log.prepareStandardFull(message);\n console.log(message.standardFull);\n };\n logger = forge.log.makeLogger(f);\n }\n forge.log.setLevel(logger, 'debug');\n forge.log.addLogger(logger);\n sConsoleLogger = logger;\n} else {\n // define fake console.log to avoid potential script errors on\n // browsers that do not have console logging\n console = {\n log: function() {}\n };\n}\n\n/*\n * Check for logging control query vars in current URL.\n *\n * console.level=\n * Set's the console log level by name. Useful to override defaults and\n * allow more verbose logging before a user config is loaded.\n *\n * console.lock=\n * Lock the console log level at whatever level it is set at. This is run\n * after console.level is processed. Useful to force a level of verbosity\n * that could otherwise be limited by a user config.\n */\nif(sConsoleLogger !== null &&\n typeof window !== 'undefined' && window.location\n) {\n var query = new URL(window.location.href).searchParams;\n if(query.has('console.level')) {\n // set with last value\n forge.log.setLevel(\n sConsoleLogger, query.get('console.level').slice(-1)[0]);\n }\n if(query.has('console.lock')) {\n // set with last value\n var lock = query.get('console.lock').slice(-1)[0];\n if(lock == 'true') {\n forge.log.lock(sConsoleLogger);\n }\n }\n}\n\n// provide public access to console logger\nforge.log.consoleLogger = sConsoleLogger;\n","/**\n * Node.js module for all known Forge message digests.\n *\n * @author Dave Longley\n *\n * Copyright 2011-2017 Digital Bazaar, Inc.\n */\nmodule.exports = require('./md');\n\nrequire('./md5');\nrequire('./sha1');\nrequire('./sha256');\nrequire('./sha512');\n","/**\n * Node.js module for Forge message digests.\n *\n * @author Dave Longley\n *\n * Copyright 2011-2017 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\n\nmodule.exports = forge.md = forge.md || {};\nforge.md.algorithms = forge.md.algorithms || {};\n","/**\n * Message Digest Algorithm 5 with 128-bit digest (MD5) implementation.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./md');\nrequire('./util');\n\nvar md5 = module.exports = forge.md5 = forge.md5 || {};\nforge.md.md5 = forge.md.algorithms.md5 = md5;\n\n/**\n * Creates an MD5 message digest object.\n *\n * @return a message digest object.\n */\nmd5.create = function() {\n // do initialization as necessary\n if(!_initialized) {\n _init();\n }\n\n // MD5 state contains four 32-bit integers\n var _state = null;\n\n // input buffer\n var _input = forge.util.createBuffer();\n\n // used for word storage\n var _w = new Array(16);\n\n // message digest object\n var md = {\n algorithm: 'md5',\n blockLength: 64,\n digestLength: 16,\n // 56-bit length of message so far (does not including padding)\n messageLength: 0,\n // true message length\n fullMessageLength: null,\n // size of message length in bytes\n messageLengthSize: 8\n };\n\n /**\n * Starts the digest.\n *\n * @return this digest object.\n */\n md.start = function() {\n // up to 56-bit message length for convenience\n md.messageLength = 0;\n\n // full message length (set md.messageLength64 for backwards-compatibility)\n md.fullMessageLength = md.messageLength64 = [];\n var int32s = md.messageLengthSize / 4;\n for(var i = 0; i < int32s; ++i) {\n md.fullMessageLength.push(0);\n }\n _input = forge.util.createBuffer();\n _state = {\n h0: 0x67452301,\n h1: 0xEFCDAB89,\n h2: 0x98BADCFE,\n h3: 0x10325476\n };\n return md;\n };\n // start digest automatically for first time\n md.start();\n\n /**\n * Updates the digest with the given message input. The given input can\n * treated as raw input (no encoding will be applied) or an encoding of\n * 'utf8' maybe given to encode the input using UTF-8.\n *\n * @param msg the message input to update with.\n * @param encoding the encoding to use (default: 'raw', other: 'utf8').\n *\n * @return this digest object.\n */\n md.update = function(msg, encoding) {\n if(encoding === 'utf8') {\n msg = forge.util.encodeUtf8(msg);\n }\n\n // update message length\n var len = msg.length;\n md.messageLength += len;\n len = [(len / 0x100000000) >>> 0, len >>> 0];\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n md.fullMessageLength[i] += len[1];\n len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);\n md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;\n len[0] = (len[1] / 0x100000000) >>> 0;\n }\n\n // add bytes to input buffer\n _input.putBytes(msg);\n\n // process bytes\n _update(_state, _w, _input);\n\n // compact input buffer every 2K or if empty\n if(_input.read > 2048 || _input.length() === 0) {\n _input.compact();\n }\n\n return md;\n };\n\n /**\n * Produces the digest.\n *\n * @return a byte buffer containing the digest value.\n */\n md.digest = function() {\n /* Note: Here we copy the remaining bytes in the input buffer and\n add the appropriate MD5 padding. Then we do the final update\n on a copy of the state so that if the user wants to get\n intermediate digests they can do so. */\n\n /* Determine the number of bytes that must be added to the message\n to ensure its length is congruent to 448 mod 512. In other words,\n the data to be digested must be a multiple of 512 bits (or 128 bytes).\n This data includes the message, some padding, and the length of the\n message. Since the length of the message will be encoded as 8 bytes (64\n bits), that means that the last segment of the data must have 56 bytes\n (448 bits) of message and padding. Therefore, the length of the message\n plus the padding must be congruent to 448 mod 512 because\n 512 - 128 = 448.\n\n In order to fill up the message length it must be filled with\n padding that begins with 1 bit followed by all 0 bits. Padding\n must *always* be present, so if the message length is already\n congruent to 448 mod 512, then 512 padding bits must be added. */\n\n var finalBlock = forge.util.createBuffer();\n finalBlock.putBytes(_input.bytes());\n\n // compute remaining size to be digested (include message length size)\n var remaining = (\n md.fullMessageLength[md.fullMessageLength.length - 1] +\n md.messageLengthSize);\n\n // add padding for overflow blockSize - overflow\n // _padding starts with 1 byte with first bit is set (byte value 128), then\n // there may be up to (blockSize - 1) other pad bytes\n var overflow = remaining & (md.blockLength - 1);\n finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));\n\n // serialize message length in bits in little-endian order; since length\n // is stored in bytes we multiply by 8 and add carry\n var bits, carry = 0;\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n bits = md.fullMessageLength[i] * 8 + carry;\n carry = (bits / 0x100000000) >>> 0;\n finalBlock.putInt32Le(bits >>> 0);\n }\n\n var s2 = {\n h0: _state.h0,\n h1: _state.h1,\n h2: _state.h2,\n h3: _state.h3\n };\n _update(s2, _w, finalBlock);\n var rval = forge.util.createBuffer();\n rval.putInt32Le(s2.h0);\n rval.putInt32Le(s2.h1);\n rval.putInt32Le(s2.h2);\n rval.putInt32Le(s2.h3);\n return rval;\n };\n\n return md;\n};\n\n// padding, constant tables for calculating md5\nvar _padding = null;\nvar _g = null;\nvar _r = null;\nvar _k = null;\nvar _initialized = false;\n\n/**\n * Initializes the constant tables.\n */\nfunction _init() {\n // create padding\n _padding = String.fromCharCode(128);\n _padding += forge.util.fillString(String.fromCharCode(0x00), 64);\n\n // g values\n _g = [\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,\n 1, 6, 11, 0, 5, 10, 15, 4, 9, 14, 3, 8, 13, 2, 7, 12,\n 5, 8, 11, 14, 1, 4, 7, 10, 13, 0, 3, 6, 9, 12, 15, 2,\n 0, 7, 14, 5, 12, 3, 10, 1, 8, 15, 6, 13, 4, 11, 2, 9];\n\n // rounds table\n _r = [\n 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22,\n 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20,\n 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23,\n 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21];\n\n // get the result of abs(sin(i + 1)) as a 32-bit integer\n _k = new Array(64);\n for(var i = 0; i < 64; ++i) {\n _k[i] = Math.floor(Math.abs(Math.sin(i + 1)) * 0x100000000);\n }\n\n // now initialized\n _initialized = true;\n}\n\n/**\n * Updates an MD5 state with the given byte buffer.\n *\n * @param s the MD5 state to update.\n * @param w the array to use to store words.\n * @param bytes the byte buffer to update with.\n */\nfunction _update(s, w, bytes) {\n // consume 512 bit (64 byte) chunks\n var t, a, b, c, d, f, r, i;\n var len = bytes.length();\n while(len >= 64) {\n // initialize hash value for this chunk\n a = s.h0;\n b = s.h1;\n c = s.h2;\n d = s.h3;\n\n // round 1\n for(i = 0; i < 16; ++i) {\n w[i] = bytes.getInt32Le();\n f = d ^ (b & (c ^ d));\n t = (a + f + _k[i] + w[i]);\n r = _r[i];\n a = d;\n d = c;\n c = b;\n b += (t << r) | (t >>> (32 - r));\n }\n // round 2\n for(; i < 32; ++i) {\n f = c ^ (d & (b ^ c));\n t = (a + f + _k[i] + w[_g[i]]);\n r = _r[i];\n a = d;\n d = c;\n c = b;\n b += (t << r) | (t >>> (32 - r));\n }\n // round 3\n for(; i < 48; ++i) {\n f = b ^ c ^ d;\n t = (a + f + _k[i] + w[_g[i]]);\n r = _r[i];\n a = d;\n d = c;\n c = b;\n b += (t << r) | (t >>> (32 - r));\n }\n // round 4\n for(; i < 64; ++i) {\n f = c ^ (b | ~d);\n t = (a + f + _k[i] + w[_g[i]]);\n r = _r[i];\n a = d;\n d = c;\n c = b;\n b += (t << r) | (t >>> (32 - r));\n }\n\n // update hash state\n s.h0 = (s.h0 + a) | 0;\n s.h1 = (s.h1 + b) | 0;\n s.h2 = (s.h2 + c) | 0;\n s.h3 = (s.h3 + d) | 0;\n\n len -= 64;\n }\n}\n","/**\n * Node.js module for Forge mask generation functions.\n *\n * @author Stefan Siegl\n *\n * Copyright 2012 Stefan Siegl \n */\nvar forge = require('./forge');\nrequire('./mgf1');\n\nmodule.exports = forge.mgf = forge.mgf || {};\nforge.mgf.mgf1 = forge.mgf1;\n","/**\n * Javascript implementation of mask generation function MGF1.\n *\n * @author Stefan Siegl\n * @author Dave Longley\n *\n * Copyright (c) 2012 Stefan Siegl \n * Copyright (c) 2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\nforge.mgf = forge.mgf || {};\nvar mgf1 = module.exports = forge.mgf.mgf1 = forge.mgf1 = forge.mgf1 || {};\n\n/**\n * Creates a MGF1 mask generation function object.\n *\n * @param md the message digest API to use (eg: forge.md.sha1.create()).\n *\n * @return a mask generation function object.\n */\nmgf1.create = function(md) {\n var mgf = {\n /**\n * Generate mask of specified length.\n *\n * @param {String} seed The seed for mask generation.\n * @param maskLen Number of bytes to generate.\n * @return {String} The generated mask.\n */\n generate: function(seed, maskLen) {\n /* 2. Let T be the empty octet string. */\n var t = new forge.util.ByteBuffer();\n\n /* 3. For counter from 0 to ceil(maskLen / hLen), do the following: */\n var len = Math.ceil(maskLen / md.digestLength);\n for(var i = 0; i < len; i++) {\n /* a. Convert counter to an octet string C of length 4 octets */\n var c = new forge.util.ByteBuffer();\n c.putInt32(i);\n\n /* b. Concatenate the hash of the seed mgfSeed and C to the octet\n * string T: */\n md.start();\n md.update(seed + c.getBytes());\n t.putBuffer(md.digest());\n }\n\n /* Output the leading maskLen octets of T as the octet string mask. */\n t.truncate(t.length() - maskLen);\n return t.getBytes();\n }\n };\n\n return mgf;\n};\n","/**\n * Object IDs for ASN.1.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2013 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\n\nforge.pki = forge.pki || {};\nvar oids = module.exports = forge.pki.oids = forge.oids = forge.oids || {};\n\n// set id to name mapping and name to id mapping\nfunction _IN(id, name) {\n oids[id] = name;\n oids[name] = id;\n}\n// set id to name mapping only\nfunction _I_(id, name) {\n oids[id] = name;\n}\n\n// algorithm OIDs\n_IN('1.2.840.113549.1.1.1', 'rsaEncryption');\n// Note: md2 & md4 not implemented\n//_IN('1.2.840.113549.1.1.2', 'md2WithRSAEncryption');\n//_IN('1.2.840.113549.1.1.3', 'md4WithRSAEncryption');\n_IN('1.2.840.113549.1.1.4', 'md5WithRSAEncryption');\n_IN('1.2.840.113549.1.1.5', 'sha1WithRSAEncryption');\n_IN('1.2.840.113549.1.1.7', 'RSAES-OAEP');\n_IN('1.2.840.113549.1.1.8', 'mgf1');\n_IN('1.2.840.113549.1.1.9', 'pSpecified');\n_IN('1.2.840.113549.1.1.10', 'RSASSA-PSS');\n_IN('1.2.840.113549.1.1.11', 'sha256WithRSAEncryption');\n_IN('1.2.840.113549.1.1.12', 'sha384WithRSAEncryption');\n_IN('1.2.840.113549.1.1.13', 'sha512WithRSAEncryption');\n// Edwards-curve Digital Signature Algorithm (EdDSA) Ed25519\n_IN('1.3.101.112', 'EdDSA25519');\n\n_IN('1.2.840.10040.4.3', 'dsa-with-sha1');\n\n_IN('1.3.14.3.2.7', 'desCBC');\n\n_IN('1.3.14.3.2.26', 'sha1');\n// Deprecated equivalent of sha1WithRSAEncryption\n_IN('1.3.14.3.2.29', 'sha1WithRSASignature');\n_IN('2.16.840.1.101.3.4.2.1', 'sha256');\n_IN('2.16.840.1.101.3.4.2.2', 'sha384');\n_IN('2.16.840.1.101.3.4.2.3', 'sha512');\n_IN('2.16.840.1.101.3.4.2.4', 'sha224');\n_IN('2.16.840.1.101.3.4.2.5', 'sha512-224');\n_IN('2.16.840.1.101.3.4.2.6', 'sha512-256');\n_IN('1.2.840.113549.2.2', 'md2');\n_IN('1.2.840.113549.2.5', 'md5');\n\n// pkcs#7 content types\n_IN('1.2.840.113549.1.7.1', 'data');\n_IN('1.2.840.113549.1.7.2', 'signedData');\n_IN('1.2.840.113549.1.7.3', 'envelopedData');\n_IN('1.2.840.113549.1.7.4', 'signedAndEnvelopedData');\n_IN('1.2.840.113549.1.7.5', 'digestedData');\n_IN('1.2.840.113549.1.7.6', 'encryptedData');\n\n// pkcs#9 oids\n_IN('1.2.840.113549.1.9.1', 'emailAddress');\n_IN('1.2.840.113549.1.9.2', 'unstructuredName');\n_IN('1.2.840.113549.1.9.3', 'contentType');\n_IN('1.2.840.113549.1.9.4', 'messageDigest');\n_IN('1.2.840.113549.1.9.5', 'signingTime');\n_IN('1.2.840.113549.1.9.6', 'counterSignature');\n_IN('1.2.840.113549.1.9.7', 'challengePassword');\n_IN('1.2.840.113549.1.9.8', 'unstructuredAddress');\n_IN('1.2.840.113549.1.9.14', 'extensionRequest');\n\n_IN('1.2.840.113549.1.9.20', 'friendlyName');\n_IN('1.2.840.113549.1.9.21', 'localKeyId');\n_IN('1.2.840.113549.1.9.22.1', 'x509Certificate');\n\n// pkcs#12 safe bags\n_IN('1.2.840.113549.1.12.10.1.1', 'keyBag');\n_IN('1.2.840.113549.1.12.10.1.2', 'pkcs8ShroudedKeyBag');\n_IN('1.2.840.113549.1.12.10.1.3', 'certBag');\n_IN('1.2.840.113549.1.12.10.1.4', 'crlBag');\n_IN('1.2.840.113549.1.12.10.1.5', 'secretBag');\n_IN('1.2.840.113549.1.12.10.1.6', 'safeContentsBag');\n\n// password-based-encryption for pkcs#12\n_IN('1.2.840.113549.1.5.13', 'pkcs5PBES2');\n_IN('1.2.840.113549.1.5.12', 'pkcs5PBKDF2');\n\n_IN('1.2.840.113549.1.12.1.1', 'pbeWithSHAAnd128BitRC4');\n_IN('1.2.840.113549.1.12.1.2', 'pbeWithSHAAnd40BitRC4');\n_IN('1.2.840.113549.1.12.1.3', 'pbeWithSHAAnd3-KeyTripleDES-CBC');\n_IN('1.2.840.113549.1.12.1.4', 'pbeWithSHAAnd2-KeyTripleDES-CBC');\n_IN('1.2.840.113549.1.12.1.5', 'pbeWithSHAAnd128BitRC2-CBC');\n_IN('1.2.840.113549.1.12.1.6', 'pbewithSHAAnd40BitRC2-CBC');\n\n// hmac OIDs\n_IN('1.2.840.113549.2.7', 'hmacWithSHA1');\n_IN('1.2.840.113549.2.8', 'hmacWithSHA224');\n_IN('1.2.840.113549.2.9', 'hmacWithSHA256');\n_IN('1.2.840.113549.2.10', 'hmacWithSHA384');\n_IN('1.2.840.113549.2.11', 'hmacWithSHA512');\n\n// symmetric key algorithm oids\n_IN('1.2.840.113549.3.7', 'des-EDE3-CBC');\n_IN('2.16.840.1.101.3.4.1.2', 'aes128-CBC');\n_IN('2.16.840.1.101.3.4.1.22', 'aes192-CBC');\n_IN('2.16.840.1.101.3.4.1.42', 'aes256-CBC');\n\n// certificate issuer/subject OIDs\n_IN('2.5.4.3', 'commonName');\n_IN('2.5.4.4', 'surname');\n_IN('2.5.4.5', 'serialNumber');\n_IN('2.5.4.6', 'countryName');\n_IN('2.5.4.7', 'localityName');\n_IN('2.5.4.8', 'stateOrProvinceName');\n_IN('2.5.4.9', 'streetAddress');\n_IN('2.5.4.10', 'organizationName');\n_IN('2.5.4.11', 'organizationalUnitName');\n_IN('2.5.4.12', 'title');\n_IN('2.5.4.13', 'description');\n_IN('2.5.4.15', 'businessCategory');\n_IN('2.5.4.17', 'postalCode');\n_IN('2.5.4.42', 'givenName');\n_IN('1.3.6.1.4.1.311.60.2.1.2', 'jurisdictionOfIncorporationStateOrProvinceName');\n_IN('1.3.6.1.4.1.311.60.2.1.3', 'jurisdictionOfIncorporationCountryName');\n\n// X.509 extension OIDs\n_IN('2.16.840.1.113730.1.1', 'nsCertType');\n_IN('2.16.840.1.113730.1.13', 'nsComment'); // deprecated in theory; still widely used\n_I_('2.5.29.1', 'authorityKeyIdentifier'); // deprecated, use .35\n_I_('2.5.29.2', 'keyAttributes'); // obsolete use .37 or .15\n_I_('2.5.29.3', 'certificatePolicies'); // deprecated, use .32\n_I_('2.5.29.4', 'keyUsageRestriction'); // obsolete use .37 or .15\n_I_('2.5.29.5', 'policyMapping'); // deprecated use .33\n_I_('2.5.29.6', 'subtreesConstraint'); // obsolete use .30\n_I_('2.5.29.7', 'subjectAltName'); // deprecated use .17\n_I_('2.5.29.8', 'issuerAltName'); // deprecated use .18\n_I_('2.5.29.9', 'subjectDirectoryAttributes');\n_I_('2.5.29.10', 'basicConstraints'); // deprecated use .19\n_I_('2.5.29.11', 'nameConstraints'); // deprecated use .30\n_I_('2.5.29.12', 'policyConstraints'); // deprecated use .36\n_I_('2.5.29.13', 'basicConstraints'); // deprecated use .19\n_IN('2.5.29.14', 'subjectKeyIdentifier');\n_IN('2.5.29.15', 'keyUsage');\n_I_('2.5.29.16', 'privateKeyUsagePeriod');\n_IN('2.5.29.17', 'subjectAltName');\n_IN('2.5.29.18', 'issuerAltName');\n_IN('2.5.29.19', 'basicConstraints');\n_I_('2.5.29.20', 'cRLNumber');\n_I_('2.5.29.21', 'cRLReason');\n_I_('2.5.29.22', 'expirationDate');\n_I_('2.5.29.23', 'instructionCode');\n_I_('2.5.29.24', 'invalidityDate');\n_I_('2.5.29.25', 'cRLDistributionPoints'); // deprecated use .31\n_I_('2.5.29.26', 'issuingDistributionPoint'); // deprecated use .28\n_I_('2.5.29.27', 'deltaCRLIndicator');\n_I_('2.5.29.28', 'issuingDistributionPoint');\n_I_('2.5.29.29', 'certificateIssuer');\n_I_('2.5.29.30', 'nameConstraints');\n_IN('2.5.29.31', 'cRLDistributionPoints');\n_IN('2.5.29.32', 'certificatePolicies');\n_I_('2.5.29.33', 'policyMappings');\n_I_('2.5.29.34', 'policyConstraints'); // deprecated use .36\n_IN('2.5.29.35', 'authorityKeyIdentifier');\n_I_('2.5.29.36', 'policyConstraints');\n_IN('2.5.29.37', 'extKeyUsage');\n_I_('2.5.29.46', 'freshestCRL');\n_I_('2.5.29.54', 'inhibitAnyPolicy');\n\n// extKeyUsage purposes\n_IN('1.3.6.1.4.1.11129.2.4.2', 'timestampList');\n_IN('1.3.6.1.5.5.7.1.1', 'authorityInfoAccess');\n_IN('1.3.6.1.5.5.7.3.1', 'serverAuth');\n_IN('1.3.6.1.5.5.7.3.2', 'clientAuth');\n_IN('1.3.6.1.5.5.7.3.3', 'codeSigning');\n_IN('1.3.6.1.5.5.7.3.4', 'emailProtection');\n_IN('1.3.6.1.5.5.7.3.8', 'timeStamping');\n","/**\n * Password-based encryption functions.\n *\n * @author Dave Longley\n * @author Stefan Siegl \n *\n * Copyright (c) 2010-2013 Digital Bazaar, Inc.\n * Copyright (c) 2012 Stefan Siegl \n *\n * An EncryptedPrivateKeyInfo:\n *\n * EncryptedPrivateKeyInfo ::= SEQUENCE {\n * encryptionAlgorithm EncryptionAlgorithmIdentifier,\n * encryptedData EncryptedData }\n *\n * EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier\n *\n * EncryptedData ::= OCTET STRING\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./asn1');\nrequire('./des');\nrequire('./md');\nrequire('./oids');\nrequire('./pbkdf2');\nrequire('./pem');\nrequire('./random');\nrequire('./rc2');\nrequire('./rsa');\nrequire('./util');\n\nif(typeof BigInteger === 'undefined') {\n var BigInteger = forge.jsbn.BigInteger;\n}\n\n// shortcut for asn.1 API\nvar asn1 = forge.asn1;\n\n/* Password-based encryption implementation. */\nvar pki = forge.pki = forge.pki || {};\nmodule.exports = pki.pbe = forge.pbe = forge.pbe || {};\nvar oids = pki.oids;\n\n// validator for an EncryptedPrivateKeyInfo structure\n// Note: Currently only works w/algorithm params\nvar encryptedPrivateKeyValidator = {\n name: 'EncryptedPrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedPrivateKeyInfo.encryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encryptionOid'\n }, {\n name: 'AlgorithmIdentifier.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'encryptionParams'\n }]\n }, {\n // encryptedData\n name: 'EncryptedPrivateKeyInfo.encryptedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'encryptedData'\n }]\n};\n\n// validator for a PBES2Algorithms structure\n// Note: Currently only works w/PBKDF2 + AES encryption schemes\nvar PBES2AlgorithmsValidator = {\n name: 'PBES2Algorithms',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.keyDerivationFunc',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.keyDerivationFunc.oid',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'kdfOid'\n }, {\n name: 'PBES2Algorithms.params',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.params.salt',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'kdfSalt'\n }, {\n name: 'PBES2Algorithms.params.iterationCount',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'kdfIterationCount'\n }, {\n name: 'PBES2Algorithms.params.keyLength',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n optional: true,\n capture: 'keyLength'\n }, {\n // prf\n name: 'PBES2Algorithms.params.prf',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n optional: true,\n value: [{\n name: 'PBES2Algorithms.params.prf.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'prfOid'\n }]\n }]\n }]\n }, {\n name: 'PBES2Algorithms.encryptionScheme',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.encryptionScheme.oid',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encOid'\n }, {\n name: 'PBES2Algorithms.encryptionScheme.iv',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'encIv'\n }]\n }]\n};\n\nvar pkcs12PbeParamsValidator = {\n name: 'pkcs-12PbeParams',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'pkcs-12PbeParams.salt',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'salt'\n }, {\n name: 'pkcs-12PbeParams.iterations',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'iterations'\n }]\n};\n\n/**\n * Encrypts a ASN.1 PrivateKeyInfo object, producing an EncryptedPrivateKeyInfo.\n *\n * PBES2Algorithms ALGORITHM-IDENTIFIER ::=\n * { {PBES2-params IDENTIFIED BY id-PBES2}, ...}\n *\n * id-PBES2 OBJECT IDENTIFIER ::= {pkcs-5 13}\n *\n * PBES2-params ::= SEQUENCE {\n * keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}},\n * encryptionScheme AlgorithmIdentifier {{PBES2-Encs}}\n * }\n *\n * PBES2-KDFs ALGORITHM-IDENTIFIER ::=\n * { {PBKDF2-params IDENTIFIED BY id-PBKDF2}, ... }\n *\n * PBES2-Encs ALGORITHM-IDENTIFIER ::= { ... }\n *\n * PBKDF2-params ::= SEQUENCE {\n * salt CHOICE {\n * specified OCTET STRING,\n * otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}}\n * },\n * iterationCount INTEGER (1..MAX),\n * keyLength INTEGER (1..MAX) OPTIONAL,\n * prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1\n * }\n *\n * @param obj the ASN.1 PrivateKeyInfo object.\n * @param password the password to encrypt with.\n * @param options:\n * algorithm the encryption algorithm to use\n * ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'.\n * count the iteration count to use.\n * saltSize the salt size to use.\n * prfAlgorithm the PRF message digest algorithm to use\n * ('sha1', 'sha224', 'sha256', 'sha384', 'sha512')\n *\n * @return the ASN.1 EncryptedPrivateKeyInfo.\n */\npki.encryptPrivateKeyInfo = function(obj, password, options) {\n // set default options\n options = options || {};\n options.saltSize = options.saltSize || 8;\n options.count = options.count || 2048;\n options.algorithm = options.algorithm || 'aes128';\n options.prfAlgorithm = options.prfAlgorithm || 'sha1';\n\n // generate PBE params\n var salt = forge.random.getBytesSync(options.saltSize);\n var count = options.count;\n var countBytes = asn1.integerToDer(count);\n var dkLen;\n var encryptionAlgorithm;\n var encryptedData;\n if(options.algorithm.indexOf('aes') === 0 || options.algorithm === 'des') {\n // do PBES2\n var ivLen, encOid, cipherFn;\n switch(options.algorithm) {\n case 'aes128':\n dkLen = 16;\n ivLen = 16;\n encOid = oids['aes128-CBC'];\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes192':\n dkLen = 24;\n ivLen = 16;\n encOid = oids['aes192-CBC'];\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes256':\n dkLen = 32;\n ivLen = 16;\n encOid = oids['aes256-CBC'];\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'des':\n dkLen = 8;\n ivLen = 8;\n encOid = oids['desCBC'];\n cipherFn = forge.des.createEncryptionCipher;\n break;\n default:\n var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.');\n error.algorithm = options.algorithm;\n throw error;\n }\n\n // get PRF message digest\n var prfAlgorithm = 'hmacWith' + options.prfAlgorithm.toUpperCase();\n var md = prfAlgorithmToMessageDigest(prfAlgorithm);\n\n // encrypt private key using pbe SHA-1 and AES/DES\n var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md);\n var iv = forge.random.getBytesSync(ivLen);\n var cipher = cipherFn(dk);\n cipher.start(iv);\n cipher.update(asn1.toDer(obj));\n cipher.finish();\n encryptedData = cipher.output.getBytes();\n\n // get PBKDF2-params\n var params = createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm);\n\n encryptionAlgorithm = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oids['pkcs5PBES2']).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // keyDerivationFunc\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oids['pkcs5PBKDF2']).getBytes()),\n // PBKDF2-params\n params\n ]),\n // encryptionScheme\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(encOid).getBytes()),\n // iv\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, iv)\n ])\n ])\n ]);\n } else if(options.algorithm === '3des') {\n // Do PKCS12 PBE\n dkLen = 24;\n\n var saltBytes = new forge.util.ByteBuffer(salt);\n var dk = pki.pbe.generatePkcs12Key(password, saltBytes, 1, count, dkLen);\n var iv = pki.pbe.generatePkcs12Key(password, saltBytes, 2, count, dkLen);\n var cipher = forge.des.createEncryptionCipher(dk);\n cipher.start(iv);\n cipher.update(asn1.toDer(obj));\n cipher.finish();\n encryptedData = cipher.output.getBytes();\n\n encryptionAlgorithm = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oids['pbeWithSHAAnd3-KeyTripleDES-CBC']).getBytes()),\n // pkcs-12PbeParams\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // salt\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt),\n // iteration count\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n countBytes.getBytes())\n ])\n ]);\n } else {\n var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.');\n error.algorithm = options.algorithm;\n throw error;\n }\n\n // EncryptedPrivateKeyInfo\n var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // encryptionAlgorithm\n encryptionAlgorithm,\n // encryptedData\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, encryptedData)\n ]);\n return rval;\n};\n\n/**\n * Decrypts a ASN.1 PrivateKeyInfo object.\n *\n * @param obj the ASN.1 EncryptedPrivateKeyInfo object.\n * @param password the password to decrypt with.\n *\n * @return the ASN.1 PrivateKeyInfo on success, null on failure.\n */\npki.decryptPrivateKeyInfo = function(obj, password) {\n var rval = null;\n\n // get PBE params\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, encryptedPrivateKeyValidator, capture, errors)) {\n var error = new Error('Cannot read encrypted private key. ' +\n 'ASN.1 object is not a supported EncryptedPrivateKeyInfo.');\n error.errors = errors;\n throw error;\n }\n\n // get cipher\n var oid = asn1.derToOid(capture.encryptionOid);\n var cipher = pki.pbe.getCipher(oid, capture.encryptionParams, password);\n\n // get encrypted data\n var encrypted = forge.util.createBuffer(capture.encryptedData);\n\n cipher.update(encrypted);\n if(cipher.finish()) {\n rval = asn1.fromDer(cipher.output);\n }\n\n return rval;\n};\n\n/**\n * Converts a EncryptedPrivateKeyInfo to PEM format.\n *\n * @param epki the EncryptedPrivateKeyInfo.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted encrypted private key.\n */\npki.encryptedPrivateKeyToPem = function(epki, maxline) {\n // convert to DER, then PEM-encode\n var msg = {\n type: 'ENCRYPTED PRIVATE KEY',\n body: asn1.toDer(epki).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Converts a PEM-encoded EncryptedPrivateKeyInfo to ASN.1 format. Decryption\n * is not performed.\n *\n * @param pem the EncryptedPrivateKeyInfo in PEM-format.\n *\n * @return the ASN.1 EncryptedPrivateKeyInfo.\n */\npki.encryptedPrivateKeyFromPem = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'ENCRYPTED PRIVATE KEY') {\n var error = new Error('Could not convert encrypted private key from PEM; ' +\n 'PEM header type is \"ENCRYPTED PRIVATE KEY\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert encrypted private key from PEM; ' +\n 'PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n return asn1.fromDer(msg.body);\n};\n\n/**\n * Encrypts an RSA private key. By default, the key will be wrapped in\n * a PrivateKeyInfo and encrypted to produce a PKCS#8 EncryptedPrivateKeyInfo.\n * This is the standard, preferred way to encrypt a private key.\n *\n * To produce a non-standard PEM-encrypted private key that uses encapsulated\n * headers to indicate the encryption algorithm (old-style non-PKCS#8 OpenSSL\n * private key encryption), set the 'legacy' option to true. Note: Using this\n * option will cause the iteration count to be forced to 1.\n *\n * Note: The 'des' algorithm is supported, but it is not considered to be\n * secure because it only uses a single 56-bit key. If possible, it is highly\n * recommended that a different algorithm be used.\n *\n * @param rsaKey the RSA key to encrypt.\n * @param password the password to use.\n * @param options:\n * algorithm: the encryption algorithm to use\n * ('aes128', 'aes192', 'aes256', '3des', 'des').\n * count: the iteration count to use.\n * saltSize: the salt size to use.\n * legacy: output an old non-PKCS#8 PEM-encrypted+encapsulated\n * headers (DEK-Info) private key.\n *\n * @return the PEM-encoded ASN.1 EncryptedPrivateKeyInfo.\n */\npki.encryptRsaPrivateKey = function(rsaKey, password, options) {\n // standard PKCS#8\n options = options || {};\n if(!options.legacy) {\n // encrypt PrivateKeyInfo\n var rval = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(rsaKey));\n rval = pki.encryptPrivateKeyInfo(rval, password, options);\n return pki.encryptedPrivateKeyToPem(rval);\n }\n\n // legacy non-PKCS#8\n var algorithm;\n var iv;\n var dkLen;\n var cipherFn;\n switch(options.algorithm) {\n case 'aes128':\n algorithm = 'AES-128-CBC';\n dkLen = 16;\n iv = forge.random.getBytesSync(16);\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes192':\n algorithm = 'AES-192-CBC';\n dkLen = 24;\n iv = forge.random.getBytesSync(16);\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes256':\n algorithm = 'AES-256-CBC';\n dkLen = 32;\n iv = forge.random.getBytesSync(16);\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case '3des':\n algorithm = 'DES-EDE3-CBC';\n dkLen = 24;\n iv = forge.random.getBytesSync(8);\n cipherFn = forge.des.createEncryptionCipher;\n break;\n case 'des':\n algorithm = 'DES-CBC';\n dkLen = 8;\n iv = forge.random.getBytesSync(8);\n cipherFn = forge.des.createEncryptionCipher;\n break;\n default:\n var error = new Error('Could not encrypt RSA private key; unsupported ' +\n 'encryption algorithm \"' + options.algorithm + '\".');\n error.algorithm = options.algorithm;\n throw error;\n }\n\n // encrypt private key using OpenSSL legacy key derivation\n var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen);\n var cipher = cipherFn(dk);\n cipher.start(iv);\n cipher.update(asn1.toDer(pki.privateKeyToAsn1(rsaKey)));\n cipher.finish();\n\n var msg = {\n type: 'RSA PRIVATE KEY',\n procType: {\n version: '4',\n type: 'ENCRYPTED'\n },\n dekInfo: {\n algorithm: algorithm,\n parameters: forge.util.bytesToHex(iv).toUpperCase()\n },\n body: cipher.output.getBytes()\n };\n return forge.pem.encode(msg);\n};\n\n/**\n * Decrypts an RSA private key.\n *\n * @param pem the PEM-formatted EncryptedPrivateKeyInfo to decrypt.\n * @param password the password to use.\n *\n * @return the RSA key on success, null on failure.\n */\npki.decryptRsaPrivateKey = function(pem, password) {\n var rval = null;\n\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'ENCRYPTED PRIVATE KEY' &&\n msg.type !== 'PRIVATE KEY' &&\n msg.type !== 'RSA PRIVATE KEY') {\n var error = new Error('Could not convert private key from PEM; PEM header type ' +\n 'is not \"ENCRYPTED PRIVATE KEY\", \"PRIVATE KEY\", or \"RSA PRIVATE KEY\".');\n error.headerType = error;\n throw error;\n }\n\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n var dkLen;\n var cipherFn;\n switch(msg.dekInfo.algorithm) {\n case 'DES-CBC':\n dkLen = 8;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n case 'DES-EDE3-CBC':\n dkLen = 24;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n case 'AES-128-CBC':\n dkLen = 16;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'AES-192-CBC':\n dkLen = 24;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'AES-256-CBC':\n dkLen = 32;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'RC2-40-CBC':\n dkLen = 5;\n cipherFn = function(key) {\n return forge.rc2.createDecryptionCipher(key, 40);\n };\n break;\n case 'RC2-64-CBC':\n dkLen = 8;\n cipherFn = function(key) {\n return forge.rc2.createDecryptionCipher(key, 64);\n };\n break;\n case 'RC2-128-CBC':\n dkLen = 16;\n cipherFn = function(key) {\n return forge.rc2.createDecryptionCipher(key, 128);\n };\n break;\n default:\n var error = new Error('Could not decrypt private key; unsupported ' +\n 'encryption algorithm \"' + msg.dekInfo.algorithm + '\".');\n error.algorithm = msg.dekInfo.algorithm;\n throw error;\n }\n\n // use OpenSSL legacy key derivation\n var iv = forge.util.hexToBytes(msg.dekInfo.parameters);\n var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen);\n var cipher = cipherFn(dk);\n cipher.start(iv);\n cipher.update(forge.util.createBuffer(msg.body));\n if(cipher.finish()) {\n rval = cipher.output.getBytes();\n } else {\n return rval;\n }\n } else {\n rval = msg.body;\n }\n\n if(msg.type === 'ENCRYPTED PRIVATE KEY') {\n rval = pki.decryptPrivateKeyInfo(asn1.fromDer(rval), password);\n } else {\n // decryption already performed above\n rval = asn1.fromDer(rval);\n }\n\n if(rval !== null) {\n rval = pki.privateKeyFromAsn1(rval);\n }\n\n return rval;\n};\n\n/**\n * Derives a PKCS#12 key.\n *\n * @param password the password to derive the key material from, null or\n * undefined for none.\n * @param salt the salt, as a ByteBuffer, to use.\n * @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC).\n * @param iter the iteration count.\n * @param n the number of bytes to derive from the password.\n * @param md the message digest to use, defaults to SHA-1.\n *\n * @return a ByteBuffer with the bytes derived from the password.\n */\npki.pbe.generatePkcs12Key = function(password, salt, id, iter, n, md) {\n var j, l;\n\n if(typeof md === 'undefined' || md === null) {\n if(!('sha1' in forge.md)) {\n throw new Error('\"sha1\" hash algorithm unavailable.');\n }\n md = forge.md.sha1.create();\n }\n\n var u = md.digestLength;\n var v = md.blockLength;\n var result = new forge.util.ByteBuffer();\n\n /* Convert password to Unicode byte buffer + trailing 0-byte. */\n var passBuf = new forge.util.ByteBuffer();\n if(password !== null && password !== undefined) {\n for(l = 0; l < password.length; l++) {\n passBuf.putInt16(password.charCodeAt(l));\n }\n passBuf.putInt16(0);\n }\n\n /* Length of salt and password in BYTES. */\n var p = passBuf.length();\n var s = salt.length();\n\n /* 1. Construct a string, D (the \"diversifier\"), by concatenating\n v copies of ID. */\n var D = new forge.util.ByteBuffer();\n D.fillWithByte(id, v);\n\n /* 2. Concatenate copies of the salt together to create a string S of length\n v * ceil(s / v) bytes (the final copy of the salt may be trunacted\n to create S).\n Note that if the salt is the empty string, then so is S. */\n var Slen = v * Math.ceil(s / v);\n var S = new forge.util.ByteBuffer();\n for(l = 0; l < Slen; l++) {\n S.putByte(salt.at(l % s));\n }\n\n /* 3. Concatenate copies of the password together to create a string P of\n length v * ceil(p / v) bytes (the final copy of the password may be\n truncated to create P).\n Note that if the password is the empty string, then so is P. */\n var Plen = v * Math.ceil(p / v);\n var P = new forge.util.ByteBuffer();\n for(l = 0; l < Plen; l++) {\n P.putByte(passBuf.at(l % p));\n }\n\n /* 4. Set I=S||P to be the concatenation of S and P. */\n var I = S;\n I.putBuffer(P);\n\n /* 5. Set c=ceil(n / u). */\n var c = Math.ceil(n / u);\n\n /* 6. For i=1, 2, ..., c, do the following: */\n for(var i = 1; i <= c; i++) {\n /* a) Set Ai=H^r(D||I). (l.e. the rth hash of D||I, H(H(H(...H(D||I)))) */\n var buf = new forge.util.ByteBuffer();\n buf.putBytes(D.bytes());\n buf.putBytes(I.bytes());\n for(var round = 0; round < iter; round++) {\n md.start();\n md.update(buf.getBytes());\n buf = md.digest();\n }\n\n /* b) Concatenate copies of Ai to create a string B of length v bytes (the\n final copy of Ai may be truncated to create B). */\n var B = new forge.util.ByteBuffer();\n for(l = 0; l < v; l++) {\n B.putByte(buf.at(l % u));\n }\n\n /* c) Treating I as a concatenation I0, I1, ..., Ik-1 of v-byte blocks,\n where k=ceil(s / v) + ceil(p / v), modify I by setting\n Ij=(Ij+B+1) mod 2v for each j. */\n var k = Math.ceil(s / v) + Math.ceil(p / v);\n var Inew = new forge.util.ByteBuffer();\n for(j = 0; j < k; j++) {\n var chunk = new forge.util.ByteBuffer(I.getBytes(v));\n var x = 0x1ff;\n for(l = B.length() - 1; l >= 0; l--) {\n x = x >> 8;\n x += B.at(l) + chunk.at(l);\n chunk.setAt(l, x & 0xff);\n }\n Inew.putBuffer(chunk);\n }\n I = Inew;\n\n /* Add Ai to A. */\n result.putBuffer(buf);\n }\n\n result.truncate(result.length() - n);\n return result;\n};\n\n/**\n * Get new Forge cipher object instance.\n *\n * @param oid the OID (in string notation).\n * @param params the ASN.1 params object.\n * @param password the password to decrypt with.\n *\n * @return new cipher object instance.\n */\npki.pbe.getCipher = function(oid, params, password) {\n switch(oid) {\n case pki.oids['pkcs5PBES2']:\n return pki.pbe.getCipherForPBES2(oid, params, password);\n\n case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']:\n case pki.oids['pbewithSHAAnd40BitRC2-CBC']:\n return pki.pbe.getCipherForPKCS12PBE(oid, params, password);\n\n default:\n var error = new Error('Cannot read encrypted PBE data block. Unsupported OID.');\n error.oid = oid;\n error.supportedOids = [\n 'pkcs5PBES2',\n 'pbeWithSHAAnd3-KeyTripleDES-CBC',\n 'pbewithSHAAnd40BitRC2-CBC'\n ];\n throw error;\n }\n};\n\n/**\n * Get new Forge cipher object instance according to PBES2 params block.\n *\n * The returned cipher instance is already started using the IV\n * from PBES2 parameter block.\n *\n * @param oid the PKCS#5 PBKDF2 OID (in string notation).\n * @param params the ASN.1 PBES2-params object.\n * @param password the password to decrypt with.\n *\n * @return new cipher object instance.\n */\npki.pbe.getCipherForPBES2 = function(oid, params, password) {\n // get PBE params\n var capture = {};\n var errors = [];\n if(!asn1.validate(params, PBES2AlgorithmsValidator, capture, errors)) {\n var error = new Error('Cannot read password-based-encryption algorithm ' +\n 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.');\n error.errors = errors;\n throw error;\n }\n\n // check oids\n oid = asn1.derToOid(capture.kdfOid);\n if(oid !== pki.oids['pkcs5PBKDF2']) {\n var error = new Error('Cannot read encrypted private key. ' +\n 'Unsupported key derivation function OID.');\n error.oid = oid;\n error.supportedOids = ['pkcs5PBKDF2'];\n throw error;\n }\n oid = asn1.derToOid(capture.encOid);\n if(oid !== pki.oids['aes128-CBC'] &&\n oid !== pki.oids['aes192-CBC'] &&\n oid !== pki.oids['aes256-CBC'] &&\n oid !== pki.oids['des-EDE3-CBC'] &&\n oid !== pki.oids['desCBC']) {\n var error = new Error('Cannot read encrypted private key. ' +\n 'Unsupported encryption scheme OID.');\n error.oid = oid;\n error.supportedOids = [\n 'aes128-CBC', 'aes192-CBC', 'aes256-CBC', 'des-EDE3-CBC', 'desCBC'];\n throw error;\n }\n\n // set PBE params\n var salt = capture.kdfSalt;\n var count = forge.util.createBuffer(capture.kdfIterationCount);\n count = count.getInt(count.length() << 3);\n var dkLen;\n var cipherFn;\n switch(pki.oids[oid]) {\n case 'aes128-CBC':\n dkLen = 16;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'aes192-CBC':\n dkLen = 24;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'aes256-CBC':\n dkLen = 32;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'des-EDE3-CBC':\n dkLen = 24;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n case 'desCBC':\n dkLen = 8;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n }\n\n // get PRF message digest\n var md = prfOidToMessageDigest(capture.prfOid);\n\n // decrypt private key using pbe with chosen PRF and AES/DES\n var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md);\n var iv = capture.encIv;\n var cipher = cipherFn(dk);\n cipher.start(iv);\n\n return cipher;\n};\n\n/**\n * Get new Forge cipher object instance for PKCS#12 PBE.\n *\n * The returned cipher instance is already started using the key & IV\n * derived from the provided password and PKCS#12 PBE salt.\n *\n * @param oid The PKCS#12 PBE OID (in string notation).\n * @param params The ASN.1 PKCS#12 PBE-params object.\n * @param password The password to decrypt with.\n *\n * @return the new cipher object instance.\n */\npki.pbe.getCipherForPKCS12PBE = function(oid, params, password) {\n // get PBE params\n var capture = {};\n var errors = [];\n if(!asn1.validate(params, pkcs12PbeParamsValidator, capture, errors)) {\n var error = new Error('Cannot read password-based-encryption algorithm ' +\n 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.');\n error.errors = errors;\n throw error;\n }\n\n var salt = forge.util.createBuffer(capture.salt);\n var count = forge.util.createBuffer(capture.iterations);\n count = count.getInt(count.length() << 3);\n\n var dkLen, dIvLen, cipherFn;\n switch(oid) {\n case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']:\n dkLen = 24;\n dIvLen = 8;\n cipherFn = forge.des.startDecrypting;\n break;\n\n case pki.oids['pbewithSHAAnd40BitRC2-CBC']:\n dkLen = 5;\n dIvLen = 8;\n cipherFn = function(key, iv) {\n var cipher = forge.rc2.createDecryptionCipher(key, 40);\n cipher.start(iv, null);\n return cipher;\n };\n break;\n\n default:\n var error = new Error('Cannot read PKCS #12 PBE data block. Unsupported OID.');\n error.oid = oid;\n throw error;\n }\n\n // get PRF message digest\n var md = prfOidToMessageDigest(capture.prfOid);\n var key = pki.pbe.generatePkcs12Key(password, salt, 1, count, dkLen, md);\n md.start();\n var iv = pki.pbe.generatePkcs12Key(password, salt, 2, count, dIvLen, md);\n\n return cipherFn(key, iv);\n};\n\n/**\n * OpenSSL's legacy key derivation function.\n *\n * See: http://www.openssl.org/docs/crypto/EVP_BytesToKey.html\n *\n * @param password the password to derive the key from.\n * @param salt the salt to use, null for none.\n * @param dkLen the number of bytes needed for the derived key.\n * @param [options] the options to use:\n * [md] an optional message digest object to use.\n */\npki.pbe.opensslDeriveBytes = function(password, salt, dkLen, md) {\n if(typeof md === 'undefined' || md === null) {\n if(!('md5' in forge.md)) {\n throw new Error('\"md5\" hash algorithm unavailable.');\n }\n md = forge.md.md5.create();\n }\n if(salt === null) {\n salt = '';\n }\n var digests = [hash(md, password + salt)];\n for(var length = 16, i = 1; length < dkLen; ++i, length += 16) {\n digests.push(hash(md, digests[i - 1] + password + salt));\n }\n return digests.join('').substr(0, dkLen);\n};\n\nfunction hash(md, bytes) {\n return md.start().update(bytes).digest().getBytes();\n}\n\nfunction prfOidToMessageDigest(prfOid) {\n // get PRF algorithm, default to SHA-1\n var prfAlgorithm;\n if(!prfOid) {\n prfAlgorithm = 'hmacWithSHA1';\n } else {\n prfAlgorithm = pki.oids[asn1.derToOid(prfOid)];\n if(!prfAlgorithm) {\n var error = new Error('Unsupported PRF OID.');\n error.oid = prfOid;\n error.supported = [\n 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384',\n 'hmacWithSHA512'];\n throw error;\n }\n }\n return prfAlgorithmToMessageDigest(prfAlgorithm);\n}\n\nfunction prfAlgorithmToMessageDigest(prfAlgorithm) {\n var factory = forge.md;\n switch(prfAlgorithm) {\n case 'hmacWithSHA224':\n factory = forge.md.sha512;\n case 'hmacWithSHA1':\n case 'hmacWithSHA256':\n case 'hmacWithSHA384':\n case 'hmacWithSHA512':\n prfAlgorithm = prfAlgorithm.substr(8).toLowerCase();\n break;\n default:\n var error = new Error('Unsupported PRF algorithm.');\n error.algorithm = prfAlgorithm;\n error.supported = [\n 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384',\n 'hmacWithSHA512'];\n throw error;\n }\n if(!factory || !(prfAlgorithm in factory)) {\n throw new Error('Unknown hash algorithm: ' + prfAlgorithm);\n }\n return factory[prfAlgorithm].create();\n}\n\nfunction createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm) {\n var params = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // salt\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt),\n // iteration count\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n countBytes.getBytes())\n ]);\n // when PRF algorithm is not SHA-1 default, add key length and PRF algorithm\n if(prfAlgorithm !== 'hmacWithSHA1') {\n params.value.push(\n // key length\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(dkLen.toString(16))),\n // AlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids[prfAlgorithm]).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]));\n }\n return params;\n}\n","/**\n * Password-Based Key-Derivation Function #2 implementation.\n *\n * See RFC 2898 for details.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2013 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./hmac');\nrequire('./md');\nrequire('./util');\n\nvar pkcs5 = forge.pkcs5 = forge.pkcs5 || {};\n\nvar crypto;\nif(forge.util.isNodejs && !forge.options.usePureJavaScript) {\n crypto = require('crypto');\n}\n\n/**\n * Derives a key from a password.\n *\n * @param p the password as a binary-encoded string of bytes.\n * @param s the salt as a binary-encoded string of bytes.\n * @param c the iteration count, a positive integer.\n * @param dkLen the intended length, in bytes, of the derived key,\n * (max: 2^32 - 1) * hash length of the PRF.\n * @param [md] the message digest (or algorithm identifier as a string) to use\n * in the PRF, defaults to SHA-1.\n * @param [callback(err, key)] presence triggers asynchronous version, called\n * once the operation completes.\n *\n * @return the derived key, as a binary-encoded string of bytes, for the\n * synchronous version (if no callback is specified).\n */\nmodule.exports = forge.pbkdf2 = pkcs5.pbkdf2 = function(\n p, s, c, dkLen, md, callback) {\n if(typeof md === 'function') {\n callback = md;\n md = null;\n }\n\n // use native implementation if possible and not disabled, note that\n // some node versions only support SHA-1, others allow digest to be changed\n if(forge.util.isNodejs && !forge.options.usePureJavaScript &&\n crypto.pbkdf2 && (md === null || typeof md !== 'object') &&\n (crypto.pbkdf2Sync.length > 4 || (!md || md === 'sha1'))) {\n if(typeof md !== 'string') {\n // default prf to SHA-1\n md = 'sha1';\n }\n p = Buffer.from(p, 'binary');\n s = Buffer.from(s, 'binary');\n if(!callback) {\n if(crypto.pbkdf2Sync.length === 4) {\n return crypto.pbkdf2Sync(p, s, c, dkLen).toString('binary');\n }\n return crypto.pbkdf2Sync(p, s, c, dkLen, md).toString('binary');\n }\n if(crypto.pbkdf2Sync.length === 4) {\n return crypto.pbkdf2(p, s, c, dkLen, function(err, key) {\n if(err) {\n return callback(err);\n }\n callback(null, key.toString('binary'));\n });\n }\n return crypto.pbkdf2(p, s, c, dkLen, md, function(err, key) {\n if(err) {\n return callback(err);\n }\n callback(null, key.toString('binary'));\n });\n }\n\n if(typeof md === 'undefined' || md === null) {\n // default prf to SHA-1\n md = 'sha1';\n }\n if(typeof md === 'string') {\n if(!(md in forge.md.algorithms)) {\n throw new Error('Unknown hash algorithm: ' + md);\n }\n md = forge.md[md].create();\n }\n\n var hLen = md.digestLength;\n\n /* 1. If dkLen > (2^32 - 1) * hLen, output \"derived key too long\" and\n stop. */\n if(dkLen > (0xFFFFFFFF * hLen)) {\n var err = new Error('Derived key is too long.');\n if(callback) {\n return callback(err);\n }\n throw err;\n }\n\n /* 2. Let len be the number of hLen-octet blocks in the derived key,\n rounding up, and let r be the number of octets in the last\n block:\n\n len = CEIL(dkLen / hLen),\n r = dkLen - (len - 1) * hLen. */\n var len = Math.ceil(dkLen / hLen);\n var r = dkLen - (len - 1) * hLen;\n\n /* 3. For each block of the derived key apply the function F defined\n below to the password P, the salt S, the iteration count c, and\n the block index to compute the block:\n\n T_1 = F(P, S, c, 1),\n T_2 = F(P, S, c, 2),\n ...\n T_len = F(P, S, c, len),\n\n where the function F is defined as the exclusive-or sum of the\n first c iterates of the underlying pseudorandom function PRF\n applied to the password P and the concatenation of the salt S\n and the block index i:\n\n F(P, S, c, i) = u_1 XOR u_2 XOR ... XOR u_c\n\n where\n\n u_1 = PRF(P, S || INT(i)),\n u_2 = PRF(P, u_1),\n ...\n u_c = PRF(P, u_{c-1}).\n\n Here, INT(i) is a four-octet encoding of the integer i, most\n significant octet first. */\n var prf = forge.hmac.create();\n prf.start(md, p);\n var dk = '';\n var xor, u_c, u_c1;\n\n // sync version\n if(!callback) {\n for(var i = 1; i <= len; ++i) {\n // PRF(P, S || INT(i)) (first iteration)\n prf.start(null, null);\n prf.update(s);\n prf.update(forge.util.int32ToBytes(i));\n xor = u_c1 = prf.digest().getBytes();\n\n // PRF(P, u_{c-1}) (other iterations)\n for(var j = 2; j <= c; ++j) {\n prf.start(null, null);\n prf.update(u_c1);\n u_c = prf.digest().getBytes();\n // F(p, s, c, i)\n xor = forge.util.xorBytes(xor, u_c, hLen);\n u_c1 = u_c;\n }\n\n /* 4. Concatenate the blocks and extract the first dkLen octets to\n produce a derived key DK:\n\n DK = T_1 || T_2 || ... || T_len<0..r-1> */\n dk += (i < len) ? xor : xor.substr(0, r);\n }\n /* 5. Output the derived key DK. */\n return dk;\n }\n\n // async version\n var i = 1, j;\n function outer() {\n if(i > len) {\n // done\n return callback(null, dk);\n }\n\n // PRF(P, S || INT(i)) (first iteration)\n prf.start(null, null);\n prf.update(s);\n prf.update(forge.util.int32ToBytes(i));\n xor = u_c1 = prf.digest().getBytes();\n\n // PRF(P, u_{c-1}) (other iterations)\n j = 2;\n inner();\n }\n\n function inner() {\n if(j <= c) {\n prf.start(null, null);\n prf.update(u_c1);\n u_c = prf.digest().getBytes();\n // F(p, s, c, i)\n xor = forge.util.xorBytes(xor, u_c, hLen);\n u_c1 = u_c;\n ++j;\n return forge.util.setImmediate(inner);\n }\n\n /* 4. Concatenate the blocks and extract the first dkLen octets to\n produce a derived key DK:\n\n DK = T_1 || T_2 || ... || T_len<0..r-1> */\n dk += (i < len) ? xor : xor.substr(0, r);\n\n ++i;\n outer();\n }\n\n outer();\n};\n","/**\n * Javascript implementation of basic PEM (Privacy Enhanced Mail) algorithms.\n *\n * See: RFC 1421.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2013-2014 Digital Bazaar, Inc.\n *\n * A Forge PEM object has the following fields:\n *\n * type: identifies the type of message (eg: \"RSA PRIVATE KEY\").\n *\n * procType: identifies the type of processing performed on the message,\n * it has two subfields: version and type, eg: 4,ENCRYPTED.\n *\n * contentDomain: identifies the type of content in the message, typically\n * only uses the value: \"RFC822\".\n *\n * dekInfo: identifies the message encryption algorithm and mode and includes\n * any parameters for the algorithm, it has two subfields: algorithm and\n * parameters, eg: DES-CBC,F8143EDE5960C597.\n *\n * headers: contains all other PEM encapsulated headers -- where order is\n * significant (for pairing data like recipient ID + key info).\n *\n * body: the binary-encoded body.\n */\nvar forge = require('./forge');\nrequire('./util');\n\n// shortcut for pem API\nvar pem = module.exports = forge.pem = forge.pem || {};\n\n/**\n * Encodes (serializes) the given PEM object.\n *\n * @param msg the PEM message object to encode.\n * @param options the options to use:\n * maxline the maximum characters per line for the body, (default: 64).\n *\n * @return the PEM-formatted string.\n */\npem.encode = function(msg, options) {\n options = options || {};\n var rval = '-----BEGIN ' + msg.type + '-----\\r\\n';\n\n // encode special headers\n var header;\n if(msg.procType) {\n header = {\n name: 'Proc-Type',\n values: [String(msg.procType.version), msg.procType.type]\n };\n rval += foldHeader(header);\n }\n if(msg.contentDomain) {\n header = {name: 'Content-Domain', values: [msg.contentDomain]};\n rval += foldHeader(header);\n }\n if(msg.dekInfo) {\n header = {name: 'DEK-Info', values: [msg.dekInfo.algorithm]};\n if(msg.dekInfo.parameters) {\n header.values.push(msg.dekInfo.parameters);\n }\n rval += foldHeader(header);\n }\n\n if(msg.headers) {\n // encode all other headers\n for(var i = 0; i < msg.headers.length; ++i) {\n rval += foldHeader(msg.headers[i]);\n }\n }\n\n // terminate header\n if(msg.procType) {\n rval += '\\r\\n';\n }\n\n // add body\n rval += forge.util.encode64(msg.body, options.maxline || 64) + '\\r\\n';\n\n rval += '-----END ' + msg.type + '-----\\r\\n';\n return rval;\n};\n\n/**\n * Decodes (deserializes) all PEM messages found in the given string.\n *\n * @param str the PEM-formatted string to decode.\n *\n * @return the PEM message objects in an array.\n */\npem.decode = function(str) {\n var rval = [];\n\n // split string into PEM messages (be lenient w/EOF on BEGIN line)\n var rMessage = /\\s*-----BEGIN ([A-Z0-9- ]+)-----\\r?\\n?([\\x21-\\x7e\\s]+?(?:\\r?\\n\\r?\\n))?([:A-Za-z0-9+\\/=\\s]+?)-----END \\1-----/g;\n var rHeader = /([\\x21-\\x7e]+):\\s*([\\x21-\\x7e\\s^:]+)/;\n var rCRLF = /\\r?\\n/;\n var match;\n while(true) {\n match = rMessage.exec(str);\n if(!match) {\n break;\n }\n\n // accept \"NEW CERTIFICATE REQUEST\" as \"CERTIFICATE REQUEST\"\n // https://datatracker.ietf.org/doc/html/rfc7468#section-7\n var type = match[1];\n if(type === 'NEW CERTIFICATE REQUEST') {\n type = 'CERTIFICATE REQUEST';\n }\n\n var msg = {\n type: type,\n procType: null,\n contentDomain: null,\n dekInfo: null,\n headers: [],\n body: forge.util.decode64(match[3])\n };\n rval.push(msg);\n\n // no headers\n if(!match[2]) {\n continue;\n }\n\n // parse headers\n var lines = match[2].split(rCRLF);\n var li = 0;\n while(match && li < lines.length) {\n // get line, trim any rhs whitespace\n var line = lines[li].replace(/\\s+$/, '');\n\n // RFC2822 unfold any following folded lines\n for(var nl = li + 1; nl < lines.length; ++nl) {\n var next = lines[nl];\n if(!/\\s/.test(next[0])) {\n break;\n }\n line += next;\n li = nl;\n }\n\n // parse header\n match = line.match(rHeader);\n if(match) {\n var header = {name: match[1], values: []};\n var values = match[2].split(',');\n for(var vi = 0; vi < values.length; ++vi) {\n header.values.push(ltrim(values[vi]));\n }\n\n // Proc-Type must be the first header\n if(!msg.procType) {\n if(header.name !== 'Proc-Type') {\n throw new Error('Invalid PEM formatted message. The first ' +\n 'encapsulated header must be \"Proc-Type\".');\n } else if(header.values.length !== 2) {\n throw new Error('Invalid PEM formatted message. The \"Proc-Type\" ' +\n 'header must have two subfields.');\n }\n msg.procType = {version: values[0], type: values[1]};\n } else if(!msg.contentDomain && header.name === 'Content-Domain') {\n // special-case Content-Domain\n msg.contentDomain = values[0] || '';\n } else if(!msg.dekInfo && header.name === 'DEK-Info') {\n // special-case DEK-Info\n if(header.values.length === 0) {\n throw new Error('Invalid PEM formatted message. The \"DEK-Info\" ' +\n 'header must have at least one subfield.');\n }\n msg.dekInfo = {algorithm: values[0], parameters: values[1] || null};\n } else {\n msg.headers.push(header);\n }\n }\n\n ++li;\n }\n\n if(msg.procType === 'ENCRYPTED' && !msg.dekInfo) {\n throw new Error('Invalid PEM formatted message. The \"DEK-Info\" ' +\n 'header must be present if \"Proc-Type\" is \"ENCRYPTED\".');\n }\n }\n\n if(rval.length === 0) {\n throw new Error('Invalid PEM formatted message.');\n }\n\n return rval;\n};\n\nfunction foldHeader(header) {\n var rval = header.name + ': ';\n\n // ensure values with CRLF are folded\n var values = [];\n var insertSpace = function(match, $1) {\n return ' ' + $1;\n };\n for(var i = 0; i < header.values.length; ++i) {\n values.push(header.values[i].replace(/^(\\S+\\r\\n)/, insertSpace));\n }\n rval += values.join(',') + '\\r\\n';\n\n // do folding\n var length = 0;\n var candidate = -1;\n for(var i = 0; i < rval.length; ++i, ++length) {\n if(length > 65 && candidate !== -1) {\n var insert = rval[candidate];\n if(insert === ',') {\n ++candidate;\n rval = rval.substr(0, candidate) + '\\r\\n ' + rval.substr(candidate);\n } else {\n rval = rval.substr(0, candidate) +\n '\\r\\n' + insert + rval.substr(candidate + 1);\n }\n length = (i - candidate - 1);\n candidate = -1;\n ++i;\n } else if(rval[i] === ' ' || rval[i] === '\\t' || rval[i] === ',') {\n candidate = i;\n }\n }\n\n return rval;\n}\n\nfunction ltrim(str) {\n return str.replace(/^\\s+/, '');\n}\n","/**\n * Partial implementation of PKCS#1 v2.2: RSA-OEAP\n *\n * Modified but based on the following MIT and BSD licensed code:\n *\n * https://github.com/kjur/jsjws/blob/master/rsa.js:\n *\n * The 'jsjws'(JSON Web Signature JavaScript Library) License\n *\n * Copyright (c) 2012 Kenji Urushima\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n *\n * http://webrsa.cvs.sourceforge.net/viewvc/webrsa/Client/RSAES-OAEP.js?content-type=text%2Fplain:\n *\n * RSAES-OAEP.js\n * $Id: RSAES-OAEP.js,v 1.1.1.1 2003/03/19 15:37:20 ellispritchard Exp $\n * JavaScript Implementation of PKCS #1 v2.1 RSA CRYPTOGRAPHY STANDARD (RSA Laboratories, June 14, 2002)\n * Copyright (C) Ellis Pritchard, Guardian Unlimited 2003.\n * Contact: ellis@nukinetics.com\n * Distributed under the BSD License.\n *\n * Official documentation: http://www.rsa.com/rsalabs/node.asp?id=2125\n *\n * @author Evan Jones (http://evanjones.ca/)\n * @author Dave Longley\n *\n * Copyright (c) 2013-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\nrequire('./random');\nrequire('./sha1');\n\n// shortcut for PKCS#1 API\nvar pkcs1 = module.exports = forge.pkcs1 = forge.pkcs1 || {};\n\n/**\n * Encode the given RSAES-OAEP message (M) using key, with optional label (L)\n * and seed.\n *\n * This method does not perform RSA encryption, it only encodes the message\n * using RSAES-OAEP.\n *\n * @param key the RSA key to use.\n * @param message the message to encode.\n * @param options the options to use:\n * label an optional label to use.\n * seed the seed to use.\n * md the message digest object to use, undefined for SHA-1.\n * mgf1 optional mgf1 parameters:\n * md the message digest object to use for MGF1.\n *\n * @return the encoded message bytes.\n */\npkcs1.encode_rsa_oaep = function(key, message, options) {\n // parse arguments\n var label;\n var seed;\n var md;\n var mgf1Md;\n // legacy args (label, seed, md)\n if(typeof options === 'string') {\n label = options;\n seed = arguments[3] || undefined;\n md = arguments[4] || undefined;\n } else if(options) {\n label = options.label || undefined;\n seed = options.seed || undefined;\n md = options.md || undefined;\n if(options.mgf1 && options.mgf1.md) {\n mgf1Md = options.mgf1.md;\n }\n }\n\n // default OAEP to SHA-1 message digest\n if(!md) {\n md = forge.md.sha1.create();\n } else {\n md.start();\n }\n\n // default MGF-1 to same as OAEP\n if(!mgf1Md) {\n mgf1Md = md;\n }\n\n // compute length in bytes and check output\n var keyLength = Math.ceil(key.n.bitLength() / 8);\n var maxLength = keyLength - 2 * md.digestLength - 2;\n if(message.length > maxLength) {\n var error = new Error('RSAES-OAEP input message length is too long.');\n error.length = message.length;\n error.maxLength = maxLength;\n throw error;\n }\n\n if(!label) {\n label = '';\n }\n md.update(label, 'raw');\n var lHash = md.digest();\n\n var PS = '';\n var PS_length = maxLength - message.length;\n for(var i = 0; i < PS_length; i++) {\n PS += '\\x00';\n }\n\n var DB = lHash.getBytes() + PS + '\\x01' + message;\n\n if(!seed) {\n seed = forge.random.getBytes(md.digestLength);\n } else if(seed.length !== md.digestLength) {\n var error = new Error('Invalid RSAES-OAEP seed. The seed length must ' +\n 'match the digest length.');\n error.seedLength = seed.length;\n error.digestLength = md.digestLength;\n throw error;\n }\n\n var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md);\n var maskedDB = forge.util.xorBytes(DB, dbMask, DB.length);\n\n var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md);\n var maskedSeed = forge.util.xorBytes(seed, seedMask, seed.length);\n\n // return encoded message\n return '\\x00' + maskedSeed + maskedDB;\n};\n\n/**\n * Decode the given RSAES-OAEP encoded message (EM) using key, with optional\n * label (L).\n *\n * This method does not perform RSA decryption, it only decodes the message\n * using RSAES-OAEP.\n *\n * @param key the RSA key to use.\n * @param em the encoded message to decode.\n * @param options the options to use:\n * label an optional label to use.\n * md the message digest object to use for OAEP, undefined for SHA-1.\n * mgf1 optional mgf1 parameters:\n * md the message digest object to use for MGF1.\n *\n * @return the decoded message bytes.\n */\npkcs1.decode_rsa_oaep = function(key, em, options) {\n // parse args\n var label;\n var md;\n var mgf1Md;\n // legacy args\n if(typeof options === 'string') {\n label = options;\n md = arguments[3] || undefined;\n } else if(options) {\n label = options.label || undefined;\n md = options.md || undefined;\n if(options.mgf1 && options.mgf1.md) {\n mgf1Md = options.mgf1.md;\n }\n }\n\n // compute length in bytes\n var keyLength = Math.ceil(key.n.bitLength() / 8);\n\n if(em.length !== keyLength) {\n var error = new Error('RSAES-OAEP encoded message length is invalid.');\n error.length = em.length;\n error.expectedLength = keyLength;\n throw error;\n }\n\n // default OAEP to SHA-1 message digest\n if(md === undefined) {\n md = forge.md.sha1.create();\n } else {\n md.start();\n }\n\n // default MGF-1 to same as OAEP\n if(!mgf1Md) {\n mgf1Md = md;\n }\n\n if(keyLength < 2 * md.digestLength + 2) {\n throw new Error('RSAES-OAEP key is too short for the hash function.');\n }\n\n if(!label) {\n label = '';\n }\n md.update(label, 'raw');\n var lHash = md.digest().getBytes();\n\n // split the message into its parts\n var y = em.charAt(0);\n var maskedSeed = em.substring(1, md.digestLength + 1);\n var maskedDB = em.substring(1 + md.digestLength);\n\n var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md);\n var seed = forge.util.xorBytes(maskedSeed, seedMask, maskedSeed.length);\n\n var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md);\n var db = forge.util.xorBytes(maskedDB, dbMask, maskedDB.length);\n\n var lHashPrime = db.substring(0, md.digestLength);\n\n // constant time check that all values match what is expected\n var error = (y !== '\\x00');\n\n // constant time check lHash vs lHashPrime\n for(var i = 0; i < md.digestLength; ++i) {\n error |= (lHash.charAt(i) !== lHashPrime.charAt(i));\n }\n\n // \"constant time\" find the 0x1 byte separating the padding (zeros) from the\n // message\n // TODO: It must be possible to do this in a better/smarter way?\n var in_ps = 1;\n var index = md.digestLength;\n for(var j = md.digestLength; j < db.length; j++) {\n var code = db.charCodeAt(j);\n\n var is_0 = (code & 0x1) ^ 0x1;\n\n // non-zero if not 0 or 1 in the ps section\n var error_mask = in_ps ? 0xfffe : 0x0000;\n error |= (code & error_mask);\n\n // latch in_ps to zero after we find 0x1\n in_ps = in_ps & is_0;\n index += in_ps;\n }\n\n if(error || db.charCodeAt(index) !== 0x1) {\n throw new Error('Invalid RSAES-OAEP padding.');\n }\n\n return db.substring(index + 1);\n};\n\nfunction rsa_mgf1(seed, maskLength, hash) {\n // default to SHA-1 message digest\n if(!hash) {\n hash = forge.md.sha1.create();\n }\n var t = '';\n var count = Math.ceil(maskLength / hash.digestLength);\n for(var i = 0; i < count; ++i) {\n var c = String.fromCharCode(\n (i >> 24) & 0xFF, (i >> 16) & 0xFF, (i >> 8) & 0xFF, i & 0xFF);\n hash.start();\n hash.update(seed + c);\n t += hash.digest().getBytes();\n }\n return t.substring(0, maskLength);\n}\n","/**\n * Javascript implementation of PKCS#12.\n *\n * @author Dave Longley\n * @author Stefan Siegl \n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n * Copyright (c) 2012 Stefan Siegl \n *\n * The ASN.1 representation of PKCS#12 is as follows\n * (see ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-12/pkcs-12-tc1.pdf for details)\n *\n * PFX ::= SEQUENCE {\n * version INTEGER {v3(3)}(v3,...),\n * authSafe ContentInfo,\n * macData MacData OPTIONAL\n * }\n *\n * MacData ::= SEQUENCE {\n * mac DigestInfo,\n * macSalt OCTET STRING,\n * iterations INTEGER DEFAULT 1\n * }\n * Note: The iterations default is for historical reasons and its use is\n * deprecated. A higher value, like 1024, is recommended.\n *\n * DigestInfo is defined in PKCS#7 as follows:\n *\n * DigestInfo ::= SEQUENCE {\n * digestAlgorithm DigestAlgorithmIdentifier,\n * digest Digest\n * }\n *\n * DigestAlgorithmIdentifier ::= AlgorithmIdentifier\n *\n * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters\n * for the algorithm, if any. In the case of SHA1 there is none.\n *\n * AlgorithmIdentifer ::= SEQUENCE {\n * algorithm OBJECT IDENTIFIER,\n * parameters ANY DEFINED BY algorithm OPTIONAL\n * }\n *\n * Digest ::= OCTET STRING\n *\n *\n * ContentInfo ::= SEQUENCE {\n * contentType ContentType,\n * content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL\n * }\n *\n * ContentType ::= OBJECT IDENTIFIER\n *\n * AuthenticatedSafe ::= SEQUENCE OF ContentInfo\n * -- Data if unencrypted\n * -- EncryptedData if password-encrypted\n * -- EnvelopedData if public key-encrypted\n *\n *\n * SafeContents ::= SEQUENCE OF SafeBag\n *\n * SafeBag ::= SEQUENCE {\n * bagId BAG-TYPE.&id ({PKCS12BagSet})\n * bagValue [0] EXPLICIT BAG-TYPE.&Type({PKCS12BagSet}{@bagId}),\n * bagAttributes SET OF PKCS12Attribute OPTIONAL\n * }\n *\n * PKCS12Attribute ::= SEQUENCE {\n * attrId ATTRIBUTE.&id ({PKCS12AttrSet}),\n * attrValues SET OF ATTRIBUTE.&Type ({PKCS12AttrSet}{@attrId})\n * } -- This type is compatible with the X.500 type 'Attribute'\n *\n * PKCS12AttrSet ATTRIBUTE ::= {\n * friendlyName | -- from PKCS #9\n * localKeyId, -- from PKCS #9\n * ... -- Other attributes are allowed\n * }\n *\n * CertBag ::= SEQUENCE {\n * certId BAG-TYPE.&id ({CertTypes}),\n * certValue [0] EXPLICIT BAG-TYPE.&Type ({CertTypes}{@certId})\n * }\n *\n * x509Certificate BAG-TYPE ::= {OCTET STRING IDENTIFIED BY {certTypes 1}}\n * -- DER-encoded X.509 certificate stored in OCTET STRING\n *\n * sdsiCertificate BAG-TYPE ::= {IA5String IDENTIFIED BY {certTypes 2}}\n * -- Base64-encoded SDSI certificate stored in IA5String\n *\n * CertTypes BAG-TYPE ::= {\n * x509Certificate |\n * sdsiCertificate,\n * ... -- For future extensions\n * }\n */\nvar forge = require('./forge');\nrequire('./asn1');\nrequire('./hmac');\nrequire('./oids');\nrequire('./pkcs7asn1');\nrequire('./pbe');\nrequire('./random');\nrequire('./rsa');\nrequire('./sha1');\nrequire('./util');\nrequire('./x509');\n\n// shortcut for asn.1 & PKI API\nvar asn1 = forge.asn1;\nvar pki = forge.pki;\n\n// shortcut for PKCS#12 API\nvar p12 = module.exports = forge.pkcs12 = forge.pkcs12 || {};\n\nvar contentInfoValidator = {\n name: 'ContentInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE, // a ContentInfo\n constructed: true,\n value: [{\n name: 'ContentInfo.contentType',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'contentType'\n }, {\n name: 'ContentInfo.content',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n constructed: true,\n captureAsn1: 'content'\n }]\n};\n\nvar pfxValidator = {\n name: 'PFX',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PFX.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n },\n contentInfoValidator, {\n name: 'PFX.macData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n optional: true,\n captureAsn1: 'mac',\n value: [{\n name: 'PFX.macData.mac',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE, // DigestInfo\n constructed: true,\n value: [{\n name: 'PFX.macData.mac.digestAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE, // DigestAlgorithmIdentifier\n constructed: true,\n value: [{\n name: 'PFX.macData.mac.digestAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'macAlgorithm'\n }, {\n name: 'PFX.macData.mac.digestAlgorithm.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n captureAsn1: 'macAlgorithmParameters'\n }]\n }, {\n name: 'PFX.macData.mac.digest',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'macDigest'\n }]\n }, {\n name: 'PFX.macData.macSalt',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'macSalt'\n }, {\n name: 'PFX.macData.iterations',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n optional: true,\n capture: 'macIterations'\n }]\n }]\n};\n\nvar safeBagValidator = {\n name: 'SafeBag',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SafeBag.bagId',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'bagId'\n }, {\n name: 'SafeBag.bagValue',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n constructed: true,\n captureAsn1: 'bagValue'\n }, {\n name: 'SafeBag.bagAttributes',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true,\n optional: true,\n capture: 'bagAttributes'\n }]\n};\n\nvar attributeValidator = {\n name: 'Attribute',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'Attribute.attrId',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'oid'\n }, {\n name: 'Attribute.attrValues',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true,\n capture: 'values'\n }]\n};\n\nvar certBagValidator = {\n name: 'CertBag',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'CertBag.certId',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'certId'\n }, {\n name: 'CertBag.certValue',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n constructed: true,\n /* So far we only support X.509 certificates (which are wrapped in\n an OCTET STRING, hence hard code that here). */\n value: [{\n name: 'CertBag.certValue[0]',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.OCTETSTRING,\n constructed: false,\n capture: 'cert'\n }]\n }]\n};\n\n/**\n * Search SafeContents structure for bags with matching attributes.\n *\n * The search can optionally be narrowed by a certain bag type.\n *\n * @param safeContents the SafeContents structure to search in.\n * @param attrName the name of the attribute to compare against.\n * @param attrValue the attribute value to search for.\n * @param [bagType] bag type to narrow search by.\n *\n * @return an array of matching bags.\n */\nfunction _getBagsByAttribute(safeContents, attrName, attrValue, bagType) {\n var result = [];\n\n for(var i = 0; i < safeContents.length; i++) {\n for(var j = 0; j < safeContents[i].safeBags.length; j++) {\n var bag = safeContents[i].safeBags[j];\n if(bagType !== undefined && bag.type !== bagType) {\n continue;\n }\n // only filter by bag type, no attribute specified\n if(attrName === null) {\n result.push(bag);\n continue;\n }\n if(bag.attributes[attrName] !== undefined &&\n bag.attributes[attrName].indexOf(attrValue) >= 0) {\n result.push(bag);\n }\n }\n }\n\n return result;\n}\n\n/**\n * Converts a PKCS#12 PFX in ASN.1 notation into a PFX object.\n *\n * @param obj The PKCS#12 PFX in ASN.1 notation.\n * @param strict true to use strict DER decoding, false not to (default: true).\n * @param {String} password Password to decrypt with (optional).\n *\n * @return PKCS#12 PFX object.\n */\np12.pkcs12FromAsn1 = function(obj, strict, password) {\n // handle args\n if(typeof strict === 'string') {\n password = strict;\n strict = true;\n } else if(strict === undefined) {\n strict = true;\n }\n\n // validate PFX and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, pfxValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#12 PFX. ' +\n 'ASN.1 object is not an PKCS#12 PFX.');\n error.errors = error;\n throw error;\n }\n\n var pfx = {\n version: capture.version.charCodeAt(0),\n safeContents: [],\n\n /**\n * Gets bags with matching attributes.\n *\n * @param filter the attributes to filter by:\n * [localKeyId] the localKeyId to search for.\n * [localKeyIdHex] the localKeyId in hex to search for.\n * [friendlyName] the friendly name to search for.\n * [bagType] bag type to narrow each attribute search by.\n *\n * @return a map of attribute type to an array of matching bags or, if no\n * attribute was given but a bag type, the map key will be the\n * bag type.\n */\n getBags: function(filter) {\n var rval = {};\n\n var localKeyId;\n if('localKeyId' in filter) {\n localKeyId = filter.localKeyId;\n } else if('localKeyIdHex' in filter) {\n localKeyId = forge.util.hexToBytes(filter.localKeyIdHex);\n }\n\n // filter on bagType only\n if(localKeyId === undefined && !('friendlyName' in filter) &&\n 'bagType' in filter) {\n rval[filter.bagType] = _getBagsByAttribute(\n pfx.safeContents, null, null, filter.bagType);\n }\n\n if(localKeyId !== undefined) {\n rval.localKeyId = _getBagsByAttribute(\n pfx.safeContents, 'localKeyId',\n localKeyId, filter.bagType);\n }\n if('friendlyName' in filter) {\n rval.friendlyName = _getBagsByAttribute(\n pfx.safeContents, 'friendlyName',\n filter.friendlyName, filter.bagType);\n }\n\n return rval;\n },\n\n /**\n * DEPRECATED: use getBags() instead.\n *\n * Get bags with matching friendlyName attribute.\n *\n * @param friendlyName the friendly name to search for.\n * @param [bagType] bag type to narrow search by.\n *\n * @return an array of bags with matching friendlyName attribute.\n */\n getBagsByFriendlyName: function(friendlyName, bagType) {\n return _getBagsByAttribute(\n pfx.safeContents, 'friendlyName', friendlyName, bagType);\n },\n\n /**\n * DEPRECATED: use getBags() instead.\n *\n * Get bags with matching localKeyId attribute.\n *\n * @param localKeyId the localKeyId to search for.\n * @param [bagType] bag type to narrow search by.\n *\n * @return an array of bags with matching localKeyId attribute.\n */\n getBagsByLocalKeyId: function(localKeyId, bagType) {\n return _getBagsByAttribute(\n pfx.safeContents, 'localKeyId', localKeyId, bagType);\n }\n };\n\n if(capture.version.charCodeAt(0) !== 3) {\n var error = new Error('PKCS#12 PFX of version other than 3 not supported.');\n error.version = capture.version.charCodeAt(0);\n throw error;\n }\n\n if(asn1.derToOid(capture.contentType) !== pki.oids.data) {\n var error = new Error('Only PKCS#12 PFX in password integrity mode supported.');\n error.oid = asn1.derToOid(capture.contentType);\n throw error;\n }\n\n var data = capture.content.value[0];\n if(data.tagClass !== asn1.Class.UNIVERSAL ||\n data.type !== asn1.Type.OCTETSTRING) {\n throw new Error('PKCS#12 authSafe content data is not an OCTET STRING.');\n }\n data = _decodePkcs7Data(data);\n\n // check for MAC\n if(capture.mac) {\n var md = null;\n var macKeyBytes = 0;\n var macAlgorithm = asn1.derToOid(capture.macAlgorithm);\n switch(macAlgorithm) {\n case pki.oids.sha1:\n md = forge.md.sha1.create();\n macKeyBytes = 20;\n break;\n case pki.oids.sha256:\n md = forge.md.sha256.create();\n macKeyBytes = 32;\n break;\n case pki.oids.sha384:\n md = forge.md.sha384.create();\n macKeyBytes = 48;\n break;\n case pki.oids.sha512:\n md = forge.md.sha512.create();\n macKeyBytes = 64;\n break;\n case pki.oids.md5:\n md = forge.md.md5.create();\n macKeyBytes = 16;\n break;\n }\n if(md === null) {\n throw new Error('PKCS#12 uses unsupported MAC algorithm: ' + macAlgorithm);\n }\n\n // verify MAC (iterations default to 1)\n var macSalt = new forge.util.ByteBuffer(capture.macSalt);\n var macIterations = (('macIterations' in capture) ?\n parseInt(forge.util.bytesToHex(capture.macIterations), 16) : 1);\n var macKey = p12.generateKey(\n password, macSalt, 3, macIterations, macKeyBytes, md);\n var mac = forge.hmac.create();\n mac.start(md, macKey);\n mac.update(data.value);\n var macValue = mac.getMac();\n if(macValue.getBytes() !== capture.macDigest) {\n throw new Error('PKCS#12 MAC could not be verified. Invalid password?');\n }\n }\n\n _decodeAuthenticatedSafe(pfx, data.value, strict, password);\n return pfx;\n};\n\n/**\n * Decodes PKCS#7 Data. PKCS#7 (RFC 2315) defines \"Data\" as an OCTET STRING,\n * but it is sometimes an OCTET STRING that is composed/constructed of chunks,\n * each its own OCTET STRING. This is BER-encoding vs. DER-encoding. This\n * function transforms this corner-case into the usual simple,\n * non-composed/constructed OCTET STRING.\n *\n * This function may be moved to ASN.1 at some point to better deal with\n * more BER-encoding issues, should they arise.\n *\n * @param data the ASN.1 Data object to transform.\n */\nfunction _decodePkcs7Data(data) {\n // handle special case of \"chunked\" data content: an octet string composed\n // of other octet strings\n if(data.composed || data.constructed) {\n var value = forge.util.createBuffer();\n for(var i = 0; i < data.value.length; ++i) {\n value.putBytes(data.value[i].value);\n }\n data.composed = data.constructed = false;\n data.value = value.getBytes();\n }\n return data;\n}\n\n/**\n * Decode PKCS#12 AuthenticatedSafe (BER encoded) into PFX object.\n *\n * The AuthenticatedSafe is a BER-encoded SEQUENCE OF ContentInfo.\n *\n * @param pfx The PKCS#12 PFX object to fill.\n * @param {String} authSafe BER-encoded AuthenticatedSafe.\n * @param strict true to use strict DER decoding, false not to.\n * @param {String} password Password to decrypt with (optional).\n */\nfunction _decodeAuthenticatedSafe(pfx, authSafe, strict, password) {\n authSafe = asn1.fromDer(authSafe, strict); /* actually it's BER encoded */\n\n if(authSafe.tagClass !== asn1.Class.UNIVERSAL ||\n authSafe.type !== asn1.Type.SEQUENCE ||\n authSafe.constructed !== true) {\n throw new Error('PKCS#12 AuthenticatedSafe expected to be a ' +\n 'SEQUENCE OF ContentInfo');\n }\n\n for(var i = 0; i < authSafe.value.length; i++) {\n var contentInfo = authSafe.value[i];\n\n // validate contentInfo and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(contentInfo, contentInfoValidator, capture, errors)) {\n var error = new Error('Cannot read ContentInfo.');\n error.errors = errors;\n throw error;\n }\n\n var obj = {\n encrypted: false\n };\n var safeContents = null;\n var data = capture.content.value[0];\n switch(asn1.derToOid(capture.contentType)) {\n case pki.oids.data:\n if(data.tagClass !== asn1.Class.UNIVERSAL ||\n data.type !== asn1.Type.OCTETSTRING) {\n throw new Error('PKCS#12 SafeContents Data is not an OCTET STRING.');\n }\n safeContents = _decodePkcs7Data(data).value;\n break;\n case pki.oids.encryptedData:\n safeContents = _decryptSafeContents(data, password);\n obj.encrypted = true;\n break;\n default:\n var error = new Error('Unsupported PKCS#12 contentType.');\n error.contentType = asn1.derToOid(capture.contentType);\n throw error;\n }\n\n obj.safeBags = _decodeSafeContents(safeContents, strict, password);\n pfx.safeContents.push(obj);\n }\n}\n\n/**\n * Decrypt PKCS#7 EncryptedData structure.\n *\n * @param data ASN.1 encoded EncryptedContentInfo object.\n * @param password The user-provided password.\n *\n * @return The decrypted SafeContents (ASN.1 object).\n */\nfunction _decryptSafeContents(data, password) {\n var capture = {};\n var errors = [];\n if(!asn1.validate(\n data, forge.pkcs7.asn1.encryptedDataValidator, capture, errors)) {\n var error = new Error('Cannot read EncryptedContentInfo.');\n error.errors = errors;\n throw error;\n }\n\n var oid = asn1.derToOid(capture.contentType);\n if(oid !== pki.oids.data) {\n var error = new Error(\n 'PKCS#12 EncryptedContentInfo ContentType is not Data.');\n error.oid = oid;\n throw error;\n }\n\n // get cipher\n oid = asn1.derToOid(capture.encAlgorithm);\n var cipher = pki.pbe.getCipher(oid, capture.encParameter, password);\n\n // get encrypted data\n var encryptedContentAsn1 = _decodePkcs7Data(capture.encryptedContentAsn1);\n var encrypted = forge.util.createBuffer(encryptedContentAsn1.value);\n\n cipher.update(encrypted);\n if(!cipher.finish()) {\n throw new Error('Failed to decrypt PKCS#12 SafeContents.');\n }\n\n return cipher.output.getBytes();\n}\n\n/**\n * Decode PKCS#12 SafeContents (BER-encoded) into array of Bag objects.\n *\n * The safeContents is a BER-encoded SEQUENCE OF SafeBag.\n *\n * @param {String} safeContents BER-encoded safeContents.\n * @param strict true to use strict DER decoding, false not to.\n * @param {String} password Password to decrypt with (optional).\n *\n * @return {Array} Array of Bag objects.\n */\nfunction _decodeSafeContents(safeContents, strict, password) {\n // if strict and no safe contents, return empty safes\n if(!strict && safeContents.length === 0) {\n return [];\n }\n\n // actually it's BER-encoded\n safeContents = asn1.fromDer(safeContents, strict);\n\n if(safeContents.tagClass !== asn1.Class.UNIVERSAL ||\n safeContents.type !== asn1.Type.SEQUENCE ||\n safeContents.constructed !== true) {\n throw new Error(\n 'PKCS#12 SafeContents expected to be a SEQUENCE OF SafeBag.');\n }\n\n var res = [];\n for(var i = 0; i < safeContents.value.length; i++) {\n var safeBag = safeContents.value[i];\n\n // validate SafeBag and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(safeBag, safeBagValidator, capture, errors)) {\n var error = new Error('Cannot read SafeBag.');\n error.errors = errors;\n throw error;\n }\n\n /* Create bag object and push to result array. */\n var bag = {\n type: asn1.derToOid(capture.bagId),\n attributes: _decodeBagAttributes(capture.bagAttributes)\n };\n res.push(bag);\n\n var validator, decoder;\n var bagAsn1 = capture.bagValue.value[0];\n switch(bag.type) {\n case pki.oids.pkcs8ShroudedKeyBag:\n /* bagAsn1 has a EncryptedPrivateKeyInfo, which we need to decrypt.\n Afterwards we can handle it like a keyBag,\n which is a PrivateKeyInfo. */\n bagAsn1 = pki.decryptPrivateKeyInfo(bagAsn1, password);\n if(bagAsn1 === null) {\n throw new Error(\n 'Unable to decrypt PKCS#8 ShroudedKeyBag, wrong password?');\n }\n\n /* fall through */\n case pki.oids.keyBag:\n /* A PKCS#12 keyBag is a simple PrivateKeyInfo as understood by our\n PKI module, hence we don't have to do validation/capturing here,\n just pass what we already got. */\n try {\n bag.key = pki.privateKeyFromAsn1(bagAsn1);\n } catch(e) {\n // ignore unknown key type, pass asn1 value\n bag.key = null;\n bag.asn1 = bagAsn1;\n }\n continue; /* Nothing more to do. */\n\n case pki.oids.certBag:\n /* A PKCS#12 certBag can wrap both X.509 and sdsi certificates.\n Therefore put the SafeBag content through another validator to\n capture the fields. Afterwards check & store the results. */\n validator = certBagValidator;\n decoder = function() {\n if(asn1.derToOid(capture.certId) !== pki.oids.x509Certificate) {\n var error = new Error(\n 'Unsupported certificate type, only X.509 supported.');\n error.oid = asn1.derToOid(capture.certId);\n throw error;\n }\n\n // true=produce cert hash\n var certAsn1 = asn1.fromDer(capture.cert, strict);\n try {\n bag.cert = pki.certificateFromAsn1(certAsn1, true);\n } catch(e) {\n // ignore unknown cert type, pass asn1 value\n bag.cert = null;\n bag.asn1 = certAsn1;\n }\n };\n break;\n\n default:\n var error = new Error('Unsupported PKCS#12 SafeBag type.');\n error.oid = bag.type;\n throw error;\n }\n\n /* Validate SafeBag value (i.e. CertBag, etc.) and capture data if needed. */\n if(validator !== undefined &&\n !asn1.validate(bagAsn1, validator, capture, errors)) {\n var error = new Error('Cannot read PKCS#12 ' + validator.name);\n error.errors = errors;\n throw error;\n }\n\n /* Call decoder function from above to store the results. */\n decoder();\n }\n\n return res;\n}\n\n/**\n * Decode PKCS#12 SET OF PKCS12Attribute into JavaScript object.\n *\n * @param attributes SET OF PKCS12Attribute (ASN.1 object).\n *\n * @return the decoded attributes.\n */\nfunction _decodeBagAttributes(attributes) {\n var decodedAttrs = {};\n\n if(attributes !== undefined) {\n for(var i = 0; i < attributes.length; ++i) {\n var capture = {};\n var errors = [];\n if(!asn1.validate(attributes[i], attributeValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#12 BagAttribute.');\n error.errors = errors;\n throw error;\n }\n\n var oid = asn1.derToOid(capture.oid);\n if(pki.oids[oid] === undefined) {\n // unsupported attribute type, ignore.\n continue;\n }\n\n decodedAttrs[pki.oids[oid]] = [];\n for(var j = 0; j < capture.values.length; ++j) {\n decodedAttrs[pki.oids[oid]].push(capture.values[j].value);\n }\n }\n }\n\n return decodedAttrs;\n}\n\n/**\n * Wraps a private key and certificate in a PKCS#12 PFX wrapper. If a\n * password is provided then the private key will be encrypted.\n *\n * An entire certificate chain may also be included. To do this, pass\n * an array for the \"cert\" parameter where the first certificate is\n * the one that is paired with the private key and each subsequent one\n * verifies the previous one. The certificates may be in PEM format or\n * have been already parsed by Forge.\n *\n * @todo implement password-based-encryption for the whole package\n *\n * @param key the private key.\n * @param cert the certificate (may be an array of certificates in order\n * to specify a certificate chain).\n * @param password the password to use, null for none.\n * @param options:\n * algorithm the encryption algorithm to use\n * ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'.\n * count the iteration count to use.\n * saltSize the salt size to use.\n * useMac true to include a MAC, false not to, defaults to true.\n * localKeyId the local key ID to use, in hex.\n * friendlyName the friendly name to use.\n * generateLocalKeyId true to generate a random local key ID,\n * false not to, defaults to true.\n *\n * @return the PKCS#12 PFX ASN.1 object.\n */\np12.toPkcs12Asn1 = function(key, cert, password, options) {\n // set default options\n options = options || {};\n options.saltSize = options.saltSize || 8;\n options.count = options.count || 2048;\n options.algorithm = options.algorithm || options.encAlgorithm || 'aes128';\n if(!('useMac' in options)) {\n options.useMac = true;\n }\n if(!('localKeyId' in options)) {\n options.localKeyId = null;\n }\n if(!('generateLocalKeyId' in options)) {\n options.generateLocalKeyId = true;\n }\n\n var localKeyId = options.localKeyId;\n var bagAttrs;\n if(localKeyId !== null) {\n localKeyId = forge.util.hexToBytes(localKeyId);\n } else if(options.generateLocalKeyId) {\n // use SHA-1 of paired cert, if available\n if(cert) {\n var pairedCert = forge.util.isArray(cert) ? cert[0] : cert;\n if(typeof pairedCert === 'string') {\n pairedCert = pki.certificateFromPem(pairedCert);\n }\n var sha1 = forge.md.sha1.create();\n sha1.update(asn1.toDer(pki.certificateToAsn1(pairedCert)).getBytes());\n localKeyId = sha1.digest().getBytes();\n } else {\n // FIXME: consider using SHA-1 of public key (which can be generated\n // from private key components), see: cert.generateSubjectKeyIdentifier\n // generate random bytes\n localKeyId = forge.random.getBytes(20);\n }\n }\n\n var attrs = [];\n if(localKeyId !== null) {\n attrs.push(\n // localKeyID\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // attrId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.localKeyId).getBytes()),\n // attrValues\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n localKeyId)\n ])\n ]));\n }\n if('friendlyName' in options) {\n attrs.push(\n // friendlyName\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // attrId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.friendlyName).getBytes()),\n // attrValues\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BMPSTRING, false,\n options.friendlyName)\n ])\n ]));\n }\n\n if(attrs.length > 0) {\n bagAttrs = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs);\n }\n\n // collect contents for AuthenticatedSafe\n var contents = [];\n\n // create safe bag(s) for certificate chain\n var chain = [];\n if(cert !== null) {\n if(forge.util.isArray(cert)) {\n chain = cert;\n } else {\n chain = [cert];\n }\n }\n\n var certSafeBags = [];\n for(var i = 0; i < chain.length; ++i) {\n // convert cert from PEM as necessary\n cert = chain[i];\n if(typeof cert === 'string') {\n cert = pki.certificateFromPem(cert);\n }\n\n // SafeBag\n var certBagAttrs = (i === 0) ? bagAttrs : undefined;\n var certAsn1 = pki.certificateToAsn1(cert);\n var certSafeBag =\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // bagId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.certBag).getBytes()),\n // bagValue\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n // CertBag\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // certId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.x509Certificate).getBytes()),\n // certValue (x509Certificate)\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n asn1.toDer(certAsn1).getBytes())\n ])])]),\n // bagAttributes (OPTIONAL)\n certBagAttrs\n ]);\n certSafeBags.push(certSafeBag);\n }\n\n if(certSafeBags.length > 0) {\n // SafeContents\n var certSafeContents = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, certSafeBags);\n\n // ContentInfo\n var certCI =\n // PKCS#7 ContentInfo\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // contentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n // OID for the content type is 'data'\n asn1.oidToDer(pki.oids.data).getBytes()),\n // content\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n asn1.toDer(certSafeContents).getBytes())\n ])\n ]);\n contents.push(certCI);\n }\n\n // create safe contents for private key\n var keyBag = null;\n if(key !== null) {\n // SafeBag\n var pkAsn1 = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(key));\n if(password === null) {\n // no encryption\n keyBag = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // bagId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.keyBag).getBytes()),\n // bagValue\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n // PrivateKeyInfo\n pkAsn1\n ]),\n // bagAttributes (OPTIONAL)\n bagAttrs\n ]);\n } else {\n // encrypted PrivateKeyInfo\n keyBag = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // bagId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.pkcs8ShroudedKeyBag).getBytes()),\n // bagValue\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n // EncryptedPrivateKeyInfo\n pki.encryptPrivateKeyInfo(pkAsn1, password, options)\n ]),\n // bagAttributes (OPTIONAL)\n bagAttrs\n ]);\n }\n\n // SafeContents\n var keySafeContents =\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [keyBag]);\n\n // ContentInfo\n var keyCI =\n // PKCS#7 ContentInfo\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // contentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n // OID for the content type is 'data'\n asn1.oidToDer(pki.oids.data).getBytes()),\n // content\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n asn1.toDer(keySafeContents).getBytes())\n ])\n ]);\n contents.push(keyCI);\n }\n\n // create AuthenticatedSafe by stringing together the contents\n var safe = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, contents);\n\n var macData;\n if(options.useMac) {\n // MacData\n var sha1 = forge.md.sha1.create();\n var macSalt = new forge.util.ByteBuffer(\n forge.random.getBytes(options.saltSize));\n var count = options.count;\n // 160-bit key\n var key = p12.generateKey(password, macSalt, 3, count, 20);\n var mac = forge.hmac.create();\n mac.start(sha1, key);\n mac.update(asn1.toDer(safe).getBytes());\n var macValue = mac.getMac();\n macData = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // mac DigestInfo\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // digestAlgorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm = SHA-1\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.sha1).getBytes()),\n // parameters = Null\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]),\n // digest\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING,\n false, macValue.getBytes())\n ]),\n // macSalt OCTET STRING\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, macSalt.getBytes()),\n // iterations INTEGER (XXX: Only support count < 65536)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(count).getBytes()\n )\n ]);\n }\n\n // PFX\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version (3)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(3).getBytes()),\n // PKCS#7 ContentInfo\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // contentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n // OID for the content type is 'data'\n asn1.oidToDer(pki.oids.data).getBytes()),\n // content\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n asn1.toDer(safe).getBytes())\n ])\n ]),\n macData\n ]);\n};\n\n/**\n * Derives a PKCS#12 key.\n *\n * @param password the password to derive the key material from, null or\n * undefined for none.\n * @param salt the salt, as a ByteBuffer, to use.\n * @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC).\n * @param iter the iteration count.\n * @param n the number of bytes to derive from the password.\n * @param md the message digest to use, defaults to SHA-1.\n *\n * @return a ByteBuffer with the bytes derived from the password.\n */\np12.generateKey = forge.pbe.generatePkcs12Key;\n","/**\n * Javascript implementation of PKCS#7 v1.5.\n *\n * @author Stefan Siegl\n * @author Dave Longley\n *\n * Copyright (c) 2012 Stefan Siegl \n * Copyright (c) 2012-2015 Digital Bazaar, Inc.\n *\n * Currently this implementation only supports ContentType of EnvelopedData,\n * EncryptedData, or SignedData at the root level. The top level elements may\n * contain only a ContentInfo of ContentType Data, i.e. plain data. Further\n * nesting is not (yet) supported.\n *\n * The Forge validators for PKCS #7's ASN.1 structures are available from\n * a separate file pkcs7asn1.js, since those are referenced from other\n * PKCS standards like PKCS #12.\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./asn1');\nrequire('./des');\nrequire('./oids');\nrequire('./pem');\nrequire('./pkcs7asn1');\nrequire('./random');\nrequire('./util');\nrequire('./x509');\n\n// shortcut for ASN.1 API\nvar asn1 = forge.asn1;\n\n// shortcut for PKCS#7 API\nvar p7 = module.exports = forge.pkcs7 = forge.pkcs7 || {};\n\n/**\n * Converts a PKCS#7 message from PEM format.\n *\n * @param pem the PEM-formatted PKCS#7 message.\n *\n * @return the PKCS#7 message.\n */\np7.messageFromPem = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'PKCS7') {\n var error = new Error('Could not convert PKCS#7 message from PEM; PEM ' +\n 'header type is not \"PKCS#7\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert PKCS#7 message from PEM; PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body);\n\n return p7.messageFromAsn1(obj);\n};\n\n/**\n * Converts a PKCS#7 message to PEM format.\n *\n * @param msg The PKCS#7 message object\n * @param maxline The maximum characters per line, defaults to 64.\n *\n * @return The PEM-formatted PKCS#7 message.\n */\np7.messageToPem = function(msg, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var pemObj = {\n type: 'PKCS7',\n body: asn1.toDer(msg.toAsn1()).getBytes()\n };\n return forge.pem.encode(pemObj, {maxline: maxline});\n};\n\n/**\n * Converts a PKCS#7 message from an ASN.1 object.\n *\n * @param obj the ASN.1 representation of a ContentInfo.\n *\n * @return the PKCS#7 message.\n */\np7.messageFromAsn1 = function(obj) {\n // validate root level ContentInfo and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, p7.asn1.contentInfoValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 message. ' +\n 'ASN.1 object is not an PKCS#7 ContentInfo.');\n error.errors = errors;\n throw error;\n }\n\n var contentType = asn1.derToOid(capture.contentType);\n var msg;\n\n switch(contentType) {\n case forge.pki.oids.envelopedData:\n msg = p7.createEnvelopedData();\n break;\n\n case forge.pki.oids.encryptedData:\n msg = p7.createEncryptedData();\n break;\n\n case forge.pki.oids.signedData:\n msg = p7.createSignedData();\n break;\n\n default:\n throw new Error('Cannot read PKCS#7 message. ContentType with OID ' +\n contentType + ' is not (yet) supported.');\n }\n\n msg.fromAsn1(capture.content.value[0]);\n return msg;\n};\n\np7.createSignedData = function() {\n var msg = null;\n msg = {\n type: forge.pki.oids.signedData,\n version: 1,\n certificates: [],\n crls: [],\n // TODO: add json-formatted signer stuff here?\n signers: [],\n // populated during sign()\n digestAlgorithmIdentifiers: [],\n contentInfo: null,\n signerInfos: [],\n\n fromAsn1: function(obj) {\n // validate SignedData content block and capture data.\n _fromAsn1(msg, obj, p7.asn1.signedDataValidator);\n msg.certificates = [];\n msg.crls = [];\n msg.digestAlgorithmIdentifiers = [];\n msg.contentInfo = null;\n msg.signerInfos = [];\n\n if(msg.rawCapture.certificates) {\n var certs = msg.rawCapture.certificates.value;\n for(var i = 0; i < certs.length; ++i) {\n msg.certificates.push(forge.pki.certificateFromAsn1(certs[i]));\n }\n }\n\n // TODO: parse crls\n },\n\n toAsn1: function() {\n // degenerate case with no content\n if(!msg.contentInfo) {\n msg.sign();\n }\n\n var certs = [];\n for(var i = 0; i < msg.certificates.length; ++i) {\n certs.push(forge.pki.certificateToAsn1(msg.certificates[i]));\n }\n\n var crls = [];\n // TODO: implement CRLs\n\n // [0] SignedData\n var signedData = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(msg.version).getBytes()),\n // DigestAlgorithmIdentifiers\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SET, true,\n msg.digestAlgorithmIdentifiers),\n // ContentInfo\n msg.contentInfo\n ])\n ]);\n if(certs.length > 0) {\n // [0] IMPLICIT ExtendedCertificatesAndCertificates OPTIONAL\n signedData.value[0].value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, certs));\n }\n if(crls.length > 0) {\n // [1] IMPLICIT CertificateRevocationLists OPTIONAL\n signedData.value[0].value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, crls));\n }\n // SignerInfos\n signedData.value[0].value.push(\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true,\n msg.signerInfos));\n\n // ContentInfo\n return asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // ContentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(msg.type).getBytes()),\n // [0] SignedData\n signedData\n ]);\n },\n\n /**\n * Add (another) entity to list of signers.\n *\n * Note: If authenticatedAttributes are provided, then, per RFC 2315,\n * they must include at least two attributes: content type and\n * message digest. The message digest attribute value will be\n * auto-calculated during signing and will be ignored if provided.\n *\n * Here's an example of providing these two attributes:\n *\n * forge.pkcs7.createSignedData();\n * p7.addSigner({\n * issuer: cert.issuer.attributes,\n * serialNumber: cert.serialNumber,\n * key: privateKey,\n * digestAlgorithm: forge.pki.oids.sha1,\n * authenticatedAttributes: [{\n * type: forge.pki.oids.contentType,\n * value: forge.pki.oids.data\n * }, {\n * type: forge.pki.oids.messageDigest\n * }]\n * });\n *\n * TODO: Support [subjectKeyIdentifier] as signer's ID.\n *\n * @param signer the signer information:\n * key the signer's private key.\n * [certificate] a certificate containing the public key\n * associated with the signer's private key; use this option as\n * an alternative to specifying signer.issuer and\n * signer.serialNumber.\n * [issuer] the issuer attributes (eg: cert.issuer.attributes).\n * [serialNumber] the signer's certificate's serial number in\n * hexadecimal (eg: cert.serialNumber).\n * [digestAlgorithm] the message digest OID, as a string, to use\n * (eg: forge.pki.oids.sha1).\n * [authenticatedAttributes] an optional array of attributes\n * to also sign along with the content.\n */\n addSigner: function(signer) {\n var issuer = signer.issuer;\n var serialNumber = signer.serialNumber;\n if(signer.certificate) {\n var cert = signer.certificate;\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n issuer = cert.issuer.attributes;\n serialNumber = cert.serialNumber;\n }\n var key = signer.key;\n if(!key) {\n throw new Error(\n 'Could not add PKCS#7 signer; no private key specified.');\n }\n if(typeof key === 'string') {\n key = forge.pki.privateKeyFromPem(key);\n }\n\n // ensure OID known for digest algorithm\n var digestAlgorithm = signer.digestAlgorithm || forge.pki.oids.sha1;\n switch(digestAlgorithm) {\n case forge.pki.oids.sha1:\n case forge.pki.oids.sha256:\n case forge.pki.oids.sha384:\n case forge.pki.oids.sha512:\n case forge.pki.oids.md5:\n break;\n default:\n throw new Error(\n 'Could not add PKCS#7 signer; unknown message digest algorithm: ' +\n digestAlgorithm);\n }\n\n // if authenticatedAttributes is present, then the attributes\n // must contain at least PKCS #9 content-type and message-digest\n var authenticatedAttributes = signer.authenticatedAttributes || [];\n if(authenticatedAttributes.length > 0) {\n var contentType = false;\n var messageDigest = false;\n for(var i = 0; i < authenticatedAttributes.length; ++i) {\n var attr = authenticatedAttributes[i];\n if(!contentType && attr.type === forge.pki.oids.contentType) {\n contentType = true;\n if(messageDigest) {\n break;\n }\n continue;\n }\n if(!messageDigest && attr.type === forge.pki.oids.messageDigest) {\n messageDigest = true;\n if(contentType) {\n break;\n }\n continue;\n }\n }\n\n if(!contentType || !messageDigest) {\n throw new Error('Invalid signer.authenticatedAttributes. If ' +\n 'signer.authenticatedAttributes is specified, then it must ' +\n 'contain at least two attributes, PKCS #9 content-type and ' +\n 'PKCS #9 message-digest.');\n }\n }\n\n msg.signers.push({\n key: key,\n version: 1,\n issuer: issuer,\n serialNumber: serialNumber,\n digestAlgorithm: digestAlgorithm,\n signatureAlgorithm: forge.pki.oids.rsaEncryption,\n signature: null,\n authenticatedAttributes: authenticatedAttributes,\n unauthenticatedAttributes: []\n });\n },\n\n /**\n * Signs the content.\n * @param options Options to apply when signing:\n * [detached] boolean. If signing should be done in detached mode. Defaults to false.\n */\n sign: function(options) {\n options = options || {};\n // auto-generate content info\n if(typeof msg.content !== 'object' || msg.contentInfo === null) {\n // use Data ContentInfo\n msg.contentInfo = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // ContentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(forge.pki.oids.data).getBytes())\n ]);\n\n // add actual content, if present\n if('content' in msg) {\n var content;\n if(msg.content instanceof forge.util.ByteBuffer) {\n content = msg.content.bytes();\n } else if(typeof msg.content === 'string') {\n content = forge.util.encodeUtf8(msg.content);\n }\n\n if (options.detached) {\n msg.detachedContent = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, content);\n } else {\n msg.contentInfo.value.push(\n // [0] EXPLICIT content\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n content)\n ]));\n }\n }\n }\n\n // no signers, return early (degenerate case for certificate container)\n if(msg.signers.length === 0) {\n return;\n }\n\n // generate digest algorithm identifiers\n var mds = addDigestAlgorithmIds();\n\n // generate signerInfos\n addSignerInfos(mds);\n },\n\n verify: function() {\n throw new Error('PKCS#7 signature verification not yet implemented.');\n },\n\n /**\n * Add a certificate.\n *\n * @param cert the certificate to add.\n */\n addCertificate: function(cert) {\n // convert from PEM\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n msg.certificates.push(cert);\n },\n\n /**\n * Add a certificate revokation list.\n *\n * @param crl the certificate revokation list to add.\n */\n addCertificateRevokationList: function(crl) {\n throw new Error('PKCS#7 CRL support not yet implemented.');\n }\n };\n return msg;\n\n function addDigestAlgorithmIds() {\n var mds = {};\n\n for(var i = 0; i < msg.signers.length; ++i) {\n var signer = msg.signers[i];\n var oid = signer.digestAlgorithm;\n if(!(oid in mds)) {\n // content digest\n mds[oid] = forge.md[forge.pki.oids[oid]].create();\n }\n if(signer.authenticatedAttributes.length === 0) {\n // no custom attributes to digest; use content message digest\n signer.md = mds[oid];\n } else {\n // custom attributes to be digested; use own message digest\n // TODO: optimize to just copy message digest state if that\n // feature is ever supported with message digests\n signer.md = forge.md[forge.pki.oids[oid]].create();\n }\n }\n\n // add unique digest algorithm identifiers\n msg.digestAlgorithmIdentifiers = [];\n for(var oid in mds) {\n msg.digestAlgorithmIdentifiers.push(\n // AlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oid).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]));\n }\n\n return mds;\n }\n\n function addSignerInfos(mds) {\n var content;\n\n if (msg.detachedContent) {\n // Signature has been made in detached mode.\n content = msg.detachedContent;\n } else {\n // Note: ContentInfo is a SEQUENCE with 2 values, second value is\n // the content field and is optional for a ContentInfo but required here\n // since signers are present\n // get ContentInfo content\n content = msg.contentInfo.value[1];\n // skip [0] EXPLICIT content wrapper\n content = content.value[0];\n }\n\n if(!content) {\n throw new Error(\n 'Could not sign PKCS#7 message; there is no content to sign.');\n }\n\n // get ContentInfo content type\n var contentType = asn1.derToOid(msg.contentInfo.value[0].value);\n\n // serialize content\n var bytes = asn1.toDer(content);\n\n // skip identifier and length per RFC 2315 9.3\n // skip identifier (1 byte)\n bytes.getByte();\n // read and discard length bytes\n asn1.getBerValueLength(bytes);\n bytes = bytes.getBytes();\n\n // digest content DER value bytes\n for(var oid in mds) {\n mds[oid].start().update(bytes);\n }\n\n // sign content\n var signingTime = new Date();\n for(var i = 0; i < msg.signers.length; ++i) {\n var signer = msg.signers[i];\n\n if(signer.authenticatedAttributes.length === 0) {\n // if ContentInfo content type is not \"Data\", then\n // authenticatedAttributes must be present per RFC 2315\n if(contentType !== forge.pki.oids.data) {\n throw new Error(\n 'Invalid signer; authenticatedAttributes must be present ' +\n 'when the ContentInfo content type is not PKCS#7 Data.');\n }\n } else {\n // process authenticated attributes\n // [0] IMPLICIT\n signer.authenticatedAttributesAsn1 = asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, 0, true, []);\n\n // per RFC 2315, attributes are to be digested using a SET container\n // not the above [0] IMPLICIT container\n var attrsAsn1 = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SET, true, []);\n\n for(var ai = 0; ai < signer.authenticatedAttributes.length; ++ai) {\n var attr = signer.authenticatedAttributes[ai];\n if(attr.type === forge.pki.oids.messageDigest) {\n // use content message digest as value\n attr.value = mds[signer.digestAlgorithm].digest();\n } else if(attr.type === forge.pki.oids.signingTime) {\n // auto-populate signing time if not already set\n if(!attr.value) {\n attr.value = signingTime;\n }\n }\n\n // convert to ASN.1 and push onto Attributes SET (for signing) and\n // onto authenticatedAttributesAsn1 to complete SignedData ASN.1\n // TODO: optimize away duplication\n attrsAsn1.value.push(_attributeToAsn1(attr));\n signer.authenticatedAttributesAsn1.value.push(_attributeToAsn1(attr));\n }\n\n // DER-serialize and digest SET OF attributes only\n bytes = asn1.toDer(attrsAsn1).getBytes();\n signer.md.start().update(bytes);\n }\n\n // sign digest\n signer.signature = signer.key.sign(signer.md, 'RSASSA-PKCS1-V1_5');\n }\n\n // add signer info\n msg.signerInfos = _signersToAsn1(msg.signers);\n }\n};\n\n/**\n * Creates an empty PKCS#7 message of type EncryptedData.\n *\n * @return the message.\n */\np7.createEncryptedData = function() {\n var msg = null;\n msg = {\n type: forge.pki.oids.encryptedData,\n version: 0,\n encryptedContent: {\n algorithm: forge.pki.oids['aes256-CBC']\n },\n\n /**\n * Reads an EncryptedData content block (in ASN.1 format)\n *\n * @param obj The ASN.1 representation of the EncryptedData content block\n */\n fromAsn1: function(obj) {\n // Validate EncryptedData content block and capture data.\n _fromAsn1(msg, obj, p7.asn1.encryptedDataValidator);\n },\n\n /**\n * Decrypt encrypted content\n *\n * @param key The (symmetric) key as a byte buffer\n */\n decrypt: function(key) {\n if(key !== undefined) {\n msg.encryptedContent.key = key;\n }\n _decryptContent(msg);\n }\n };\n return msg;\n};\n\n/**\n * Creates an empty PKCS#7 message of type EnvelopedData.\n *\n * @return the message.\n */\np7.createEnvelopedData = function() {\n var msg = null;\n msg = {\n type: forge.pki.oids.envelopedData,\n version: 0,\n recipients: [],\n encryptedContent: {\n algorithm: forge.pki.oids['aes256-CBC']\n },\n\n /**\n * Reads an EnvelopedData content block (in ASN.1 format)\n *\n * @param obj the ASN.1 representation of the EnvelopedData content block.\n */\n fromAsn1: function(obj) {\n // validate EnvelopedData content block and capture data\n var capture = _fromAsn1(msg, obj, p7.asn1.envelopedDataValidator);\n msg.recipients = _recipientsFromAsn1(capture.recipientInfos.value);\n },\n\n toAsn1: function() {\n // ContentInfo\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // ContentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(msg.type).getBytes()),\n // [0] EnvelopedData\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(msg.version).getBytes()),\n // RecipientInfos\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true,\n _recipientsToAsn1(msg.recipients)),\n // EncryptedContentInfo\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true,\n _encryptedContentToAsn1(msg.encryptedContent))\n ])\n ])\n ]);\n },\n\n /**\n * Find recipient by X.509 certificate's issuer.\n *\n * @param cert the certificate with the issuer to look for.\n *\n * @return the recipient object.\n */\n findRecipient: function(cert) {\n var sAttr = cert.issuer.attributes;\n\n for(var i = 0; i < msg.recipients.length; ++i) {\n var r = msg.recipients[i];\n var rAttr = r.issuer;\n\n if(r.serialNumber !== cert.serialNumber) {\n continue;\n }\n\n if(rAttr.length !== sAttr.length) {\n continue;\n }\n\n var match = true;\n for(var j = 0; j < sAttr.length; ++j) {\n if(rAttr[j].type !== sAttr[j].type ||\n rAttr[j].value !== sAttr[j].value) {\n match = false;\n break;\n }\n }\n\n if(match) {\n return r;\n }\n }\n\n return null;\n },\n\n /**\n * Decrypt enveloped content\n *\n * @param recipient The recipient object related to the private key\n * @param privKey The (RSA) private key object\n */\n decrypt: function(recipient, privKey) {\n if(msg.encryptedContent.key === undefined && recipient !== undefined &&\n privKey !== undefined) {\n switch(recipient.encryptedContent.algorithm) {\n case forge.pki.oids.rsaEncryption:\n case forge.pki.oids.desCBC:\n var key = privKey.decrypt(recipient.encryptedContent.content);\n msg.encryptedContent.key = forge.util.createBuffer(key);\n break;\n\n default:\n throw new Error('Unsupported asymmetric cipher, ' +\n 'OID ' + recipient.encryptedContent.algorithm);\n }\n }\n\n _decryptContent(msg);\n },\n\n /**\n * Add (another) entity to list of recipients.\n *\n * @param cert The certificate of the entity to add.\n */\n addRecipient: function(cert) {\n msg.recipients.push({\n version: 0,\n issuer: cert.issuer.attributes,\n serialNumber: cert.serialNumber,\n encryptedContent: {\n // We simply assume rsaEncryption here, since forge.pki only\n // supports RSA so far. If the PKI module supports other\n // ciphers one day, we need to modify this one as well.\n algorithm: forge.pki.oids.rsaEncryption,\n key: cert.publicKey\n }\n });\n },\n\n /**\n * Encrypt enveloped content.\n *\n * This function supports two optional arguments, cipher and key, which\n * can be used to influence symmetric encryption. Unless cipher is\n * provided, the cipher specified in encryptedContent.algorithm is used\n * (defaults to AES-256-CBC). If no key is provided, encryptedContent.key\n * is (re-)used. If that one's not set, a random key will be generated\n * automatically.\n *\n * @param [key] The key to be used for symmetric encryption.\n * @param [cipher] The OID of the symmetric cipher to use.\n */\n encrypt: function(key, cipher) {\n // Part 1: Symmetric encryption\n if(msg.encryptedContent.content === undefined) {\n cipher = cipher || msg.encryptedContent.algorithm;\n key = key || msg.encryptedContent.key;\n\n var keyLen, ivLen, ciphFn;\n switch(cipher) {\n case forge.pki.oids['aes128-CBC']:\n keyLen = 16;\n ivLen = 16;\n ciphFn = forge.aes.createEncryptionCipher;\n break;\n\n case forge.pki.oids['aes192-CBC']:\n keyLen = 24;\n ivLen = 16;\n ciphFn = forge.aes.createEncryptionCipher;\n break;\n\n case forge.pki.oids['aes256-CBC']:\n keyLen = 32;\n ivLen = 16;\n ciphFn = forge.aes.createEncryptionCipher;\n break;\n\n case forge.pki.oids['des-EDE3-CBC']:\n keyLen = 24;\n ivLen = 8;\n ciphFn = forge.des.createEncryptionCipher;\n break;\n\n default:\n throw new Error('Unsupported symmetric cipher, OID ' + cipher);\n }\n\n if(key === undefined) {\n key = forge.util.createBuffer(forge.random.getBytes(keyLen));\n } else if(key.length() != keyLen) {\n throw new Error('Symmetric key has wrong length; ' +\n 'got ' + key.length() + ' bytes, expected ' + keyLen + '.');\n }\n\n // Keep a copy of the key & IV in the object, so the caller can\n // use it for whatever reason.\n msg.encryptedContent.algorithm = cipher;\n msg.encryptedContent.key = key;\n msg.encryptedContent.parameter = forge.util.createBuffer(\n forge.random.getBytes(ivLen));\n\n var ciph = ciphFn(key);\n ciph.start(msg.encryptedContent.parameter.copy());\n ciph.update(msg.content);\n\n // The finish function does PKCS#7 padding by default, therefore\n // no action required by us.\n if(!ciph.finish()) {\n throw new Error('Symmetric encryption failed.');\n }\n\n msg.encryptedContent.content = ciph.output;\n }\n\n // Part 2: asymmetric encryption for each recipient\n for(var i = 0; i < msg.recipients.length; ++i) {\n var recipient = msg.recipients[i];\n\n // Nothing to do, encryption already done.\n if(recipient.encryptedContent.content !== undefined) {\n continue;\n }\n\n switch(recipient.encryptedContent.algorithm) {\n case forge.pki.oids.rsaEncryption:\n recipient.encryptedContent.content =\n recipient.encryptedContent.key.encrypt(\n msg.encryptedContent.key.data);\n break;\n\n default:\n throw new Error('Unsupported asymmetric cipher, OID ' +\n recipient.encryptedContent.algorithm);\n }\n }\n }\n };\n return msg;\n};\n\n/**\n * Converts a single recipient from an ASN.1 object.\n *\n * @param obj the ASN.1 RecipientInfo.\n *\n * @return the recipient object.\n */\nfunction _recipientFromAsn1(obj) {\n // validate EnvelopedData content block and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, p7.asn1.recipientInfoValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 RecipientInfo. ' +\n 'ASN.1 object is not an PKCS#7 RecipientInfo.');\n error.errors = errors;\n throw error;\n }\n\n return {\n version: capture.version.charCodeAt(0),\n issuer: forge.pki.RDNAttributesAsArray(capture.issuer),\n serialNumber: forge.util.createBuffer(capture.serial).toHex(),\n encryptedContent: {\n algorithm: asn1.derToOid(capture.encAlgorithm),\n parameter: capture.encParameter ? capture.encParameter.value : undefined,\n content: capture.encKey\n }\n };\n}\n\n/**\n * Converts a single recipient object to an ASN.1 object.\n *\n * @param obj the recipient object.\n *\n * @return the ASN.1 RecipientInfo.\n */\nfunction _recipientToAsn1(obj) {\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(obj.version).getBytes()),\n // IssuerAndSerialNumber\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Name\n forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}),\n // Serial\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(obj.serialNumber))\n ]),\n // KeyEncryptionAlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(obj.encryptedContent.algorithm).getBytes()),\n // Parameter, force NULL, only RSA supported for now.\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]),\n // EncryptedKey\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n obj.encryptedContent.content)\n ]);\n}\n\n/**\n * Map a set of RecipientInfo ASN.1 objects to recipient objects.\n *\n * @param infos an array of ASN.1 representations RecipientInfo (i.e. SET OF).\n *\n * @return an array of recipient objects.\n */\nfunction _recipientsFromAsn1(infos) {\n var ret = [];\n for(var i = 0; i < infos.length; ++i) {\n ret.push(_recipientFromAsn1(infos[i]));\n }\n return ret;\n}\n\n/**\n * Map an array of recipient objects to ASN.1 RecipientInfo objects.\n *\n * @param recipients an array of recipientInfo objects.\n *\n * @return an array of ASN.1 RecipientInfos.\n */\nfunction _recipientsToAsn1(recipients) {\n var ret = [];\n for(var i = 0; i < recipients.length; ++i) {\n ret.push(_recipientToAsn1(recipients[i]));\n }\n return ret;\n}\n\n/**\n * Converts a single signer from an ASN.1 object.\n *\n * @param obj the ASN.1 representation of a SignerInfo.\n *\n * @return the signer object.\n */\nfunction _signerFromAsn1(obj) {\n // validate EnvelopedData content block and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, p7.asn1.signerInfoValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 SignerInfo. ' +\n 'ASN.1 object is not an PKCS#7 SignerInfo.');\n error.errors = errors;\n throw error;\n }\n\n var rval = {\n version: capture.version.charCodeAt(0),\n issuer: forge.pki.RDNAttributesAsArray(capture.issuer),\n serialNumber: forge.util.createBuffer(capture.serial).toHex(),\n digestAlgorithm: asn1.derToOid(capture.digestAlgorithm),\n signatureAlgorithm: asn1.derToOid(capture.signatureAlgorithm),\n signature: capture.signature,\n authenticatedAttributes: [],\n unauthenticatedAttributes: []\n };\n\n // TODO: convert attributes\n var authenticatedAttributes = capture.authenticatedAttributes || [];\n var unauthenticatedAttributes = capture.unauthenticatedAttributes || [];\n\n return rval;\n}\n\n/**\n * Converts a single signerInfo object to an ASN.1 object.\n *\n * @param obj the signerInfo object.\n *\n * @return the ASN.1 representation of a SignerInfo.\n */\nfunction _signerToAsn1(obj) {\n // SignerInfo\n var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(obj.version).getBytes()),\n // issuerAndSerialNumber\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // name\n forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}),\n // serial\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(obj.serialNumber))\n ]),\n // digestAlgorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(obj.digestAlgorithm).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ])\n ]);\n\n // authenticatedAttributes (OPTIONAL)\n if(obj.authenticatedAttributesAsn1) {\n // add ASN.1 previously generated during signing\n rval.value.push(obj.authenticatedAttributesAsn1);\n }\n\n // digestEncryptionAlgorithm\n rval.value.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(obj.signatureAlgorithm).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]));\n\n // encryptedDigest\n rval.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, obj.signature));\n\n // unauthenticatedAttributes (OPTIONAL)\n if(obj.unauthenticatedAttributes.length > 0) {\n // [1] IMPLICIT\n var attrsAsn1 = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, []);\n for(var i = 0; i < obj.unauthenticatedAttributes.length; ++i) {\n var attr = obj.unauthenticatedAttributes[i];\n attrsAsn1.values.push(_attributeToAsn1(attr));\n }\n rval.value.push(attrsAsn1);\n }\n\n return rval;\n}\n\n/**\n * Map a set of SignerInfo ASN.1 objects to an array of signer objects.\n *\n * @param signerInfoAsn1s an array of ASN.1 SignerInfos (i.e. SET OF).\n *\n * @return an array of signers objects.\n */\nfunction _signersFromAsn1(signerInfoAsn1s) {\n var ret = [];\n for(var i = 0; i < signerInfoAsn1s.length; ++i) {\n ret.push(_signerFromAsn1(signerInfoAsn1s[i]));\n }\n return ret;\n}\n\n/**\n * Map an array of signer objects to ASN.1 objects.\n *\n * @param signers an array of signer objects.\n *\n * @return an array of ASN.1 SignerInfos.\n */\nfunction _signersToAsn1(signers) {\n var ret = [];\n for(var i = 0; i < signers.length; ++i) {\n ret.push(_signerToAsn1(signers[i]));\n }\n return ret;\n}\n\n/**\n * Convert an attribute object to an ASN.1 Attribute.\n *\n * @param attr the attribute object.\n *\n * @return the ASN.1 Attribute.\n */\nfunction _attributeToAsn1(attr) {\n var value;\n\n // TODO: generalize to support more attributes\n if(attr.type === forge.pki.oids.contentType) {\n value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.value).getBytes());\n } else if(attr.type === forge.pki.oids.messageDigest) {\n value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n attr.value.bytes());\n } else if(attr.type === forge.pki.oids.signingTime) {\n /* Note per RFC 2985: Dates between 1 January 1950 and 31 December 2049\n (inclusive) MUST be encoded as UTCTime. Any dates with year values\n before 1950 or after 2049 MUST be encoded as GeneralizedTime. [Further,]\n UTCTime values MUST be expressed in Greenwich Mean Time (Zulu) and MUST\n include seconds (i.e., times are YYMMDDHHMMSSZ), even where the\n number of seconds is zero. Midnight (GMT) must be represented as\n \"YYMMDD000000Z\". */\n // TODO: make these module-level constants\n var jan_1_1950 = new Date('1950-01-01T00:00:00Z');\n var jan_1_2050 = new Date('2050-01-01T00:00:00Z');\n var date = attr.value;\n if(typeof date === 'string') {\n // try to parse date\n var timestamp = Date.parse(date);\n if(!isNaN(timestamp)) {\n date = new Date(timestamp);\n } else if(date.length === 13) {\n // YYMMDDHHMMSSZ (13 chars for UTCTime)\n date = asn1.utcTimeToDate(date);\n } else {\n // assume generalized time\n date = asn1.generalizedTimeToDate(date);\n }\n }\n\n if(date >= jan_1_1950 && date < jan_1_2050) {\n value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false,\n asn1.dateToUtcTime(date));\n } else {\n value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false,\n asn1.dateToGeneralizedTime(date));\n }\n }\n\n // TODO: expose as common API call\n // create a RelativeDistinguishedName set\n // each value in the set is an AttributeTypeAndValue first\n // containing the type (an OID) and second the value\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AttributeType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.type).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n // AttributeValue\n value\n ])\n ]);\n}\n\n/**\n * Map messages encrypted content to ASN.1 objects.\n *\n * @param ec The encryptedContent object of the message.\n *\n * @return ASN.1 representation of the encryptedContent object (SEQUENCE).\n */\nfunction _encryptedContentToAsn1(ec) {\n return [\n // ContentType, always Data for the moment\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(forge.pki.oids.data).getBytes()),\n // ContentEncryptionAlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(ec.algorithm).getBytes()),\n // Parameters (IV)\n !ec.parameter ?\n undefined :\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n ec.parameter.getBytes())\n ]),\n // [0] EncryptedContent\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n ec.content.getBytes())\n ])\n ];\n}\n\n/**\n * Reads the \"common part\" of an PKCS#7 content block (in ASN.1 format)\n *\n * This function reads the \"common part\" of the PKCS#7 content blocks\n * EncryptedData and EnvelopedData, i.e. version number and symmetrically\n * encrypted content block.\n *\n * The result of the ASN.1 validate and capture process is returned\n * to allow the caller to extract further data, e.g. the list of recipients\n * in case of a EnvelopedData object.\n *\n * @param msg the PKCS#7 object to read the data to.\n * @param obj the ASN.1 representation of the content block.\n * @param validator the ASN.1 structure validator object to use.\n *\n * @return the value map captured by validator object.\n */\nfunction _fromAsn1(msg, obj, validator) {\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, validator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 message. ' +\n 'ASN.1 object is not a supported PKCS#7 message.');\n error.errors = error;\n throw error;\n }\n\n // Check contentType, so far we only support (raw) Data.\n var contentType = asn1.derToOid(capture.contentType);\n if(contentType !== forge.pki.oids.data) {\n throw new Error('Unsupported PKCS#7 message. ' +\n 'Only wrapped ContentType Data supported.');\n }\n\n if(capture.encryptedContent) {\n var content = '';\n if(forge.util.isArray(capture.encryptedContent)) {\n for(var i = 0; i < capture.encryptedContent.length; ++i) {\n if(capture.encryptedContent[i].type !== asn1.Type.OCTETSTRING) {\n throw new Error('Malformed PKCS#7 message, expecting encrypted ' +\n 'content constructed of only OCTET STRING objects.');\n }\n content += capture.encryptedContent[i].value;\n }\n } else {\n content = capture.encryptedContent;\n }\n msg.encryptedContent = {\n algorithm: asn1.derToOid(capture.encAlgorithm),\n parameter: forge.util.createBuffer(capture.encParameter.value),\n content: forge.util.createBuffer(content)\n };\n }\n\n if(capture.content) {\n var content = '';\n if(forge.util.isArray(capture.content)) {\n for(var i = 0; i < capture.content.length; ++i) {\n if(capture.content[i].type !== asn1.Type.OCTETSTRING) {\n throw new Error('Malformed PKCS#7 message, expecting ' +\n 'content constructed of only OCTET STRING objects.');\n }\n content += capture.content[i].value;\n }\n } else {\n content = capture.content;\n }\n msg.content = forge.util.createBuffer(content);\n }\n\n msg.version = capture.version.charCodeAt(0);\n msg.rawCapture = capture;\n\n return capture;\n}\n\n/**\n * Decrypt the symmetrically encrypted content block of the PKCS#7 message.\n *\n * Decryption is skipped in case the PKCS#7 message object already has a\n * (decrypted) content attribute. The algorithm, key and cipher parameters\n * (probably the iv) are taken from the encryptedContent attribute of the\n * message object.\n *\n * @param The PKCS#7 message object.\n */\nfunction _decryptContent(msg) {\n if(msg.encryptedContent.key === undefined) {\n throw new Error('Symmetric key not available.');\n }\n\n if(msg.content === undefined) {\n var ciph;\n\n switch(msg.encryptedContent.algorithm) {\n case forge.pki.oids['aes128-CBC']:\n case forge.pki.oids['aes192-CBC']:\n case forge.pki.oids['aes256-CBC']:\n ciph = forge.aes.createDecryptionCipher(msg.encryptedContent.key);\n break;\n\n case forge.pki.oids['desCBC']:\n case forge.pki.oids['des-EDE3-CBC']:\n ciph = forge.des.createDecryptionCipher(msg.encryptedContent.key);\n break;\n\n default:\n throw new Error('Unsupported symmetric cipher, OID ' +\n msg.encryptedContent.algorithm);\n }\n ciph.start(msg.encryptedContent.parameter);\n ciph.update(msg.encryptedContent.content);\n\n if(!ciph.finish()) {\n throw new Error('Symmetric decryption failed.');\n }\n\n msg.content = ciph.output;\n }\n}\n","/**\n * Javascript implementation of ASN.1 validators for PKCS#7 v1.5.\n *\n * @author Dave Longley\n * @author Stefan Siegl\n *\n * Copyright (c) 2012-2015 Digital Bazaar, Inc.\n * Copyright (c) 2012 Stefan Siegl \n *\n * The ASN.1 representation of PKCS#7 is as follows\n * (see RFC #2315 for details, http://www.ietf.org/rfc/rfc2315.txt):\n *\n * A PKCS#7 message consists of a ContentInfo on root level, which may\n * contain any number of further ContentInfo nested into it.\n *\n * ContentInfo ::= SEQUENCE {\n * contentType ContentType,\n * content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL\n * }\n *\n * ContentType ::= OBJECT IDENTIFIER\n *\n * EnvelopedData ::= SEQUENCE {\n * version Version,\n * recipientInfos RecipientInfos,\n * encryptedContentInfo EncryptedContentInfo\n * }\n *\n * EncryptedData ::= SEQUENCE {\n * version Version,\n * encryptedContentInfo EncryptedContentInfo\n * }\n *\n * id-signedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)\n * us(840) rsadsi(113549) pkcs(1) pkcs7(7) 2 }\n *\n * SignedData ::= SEQUENCE {\n * version INTEGER,\n * digestAlgorithms DigestAlgorithmIdentifiers,\n * contentInfo ContentInfo,\n * certificates [0] IMPLICIT Certificates OPTIONAL,\n * crls [1] IMPLICIT CertificateRevocationLists OPTIONAL,\n * signerInfos SignerInfos\n * }\n *\n * SignerInfos ::= SET OF SignerInfo\n *\n * SignerInfo ::= SEQUENCE {\n * version Version,\n * issuerAndSerialNumber IssuerAndSerialNumber,\n * digestAlgorithm DigestAlgorithmIdentifier,\n * authenticatedAttributes [0] IMPLICIT Attributes OPTIONAL,\n * digestEncryptionAlgorithm DigestEncryptionAlgorithmIdentifier,\n * encryptedDigest EncryptedDigest,\n * unauthenticatedAttributes [1] IMPLICIT Attributes OPTIONAL\n * }\n *\n * EncryptedDigest ::= OCTET STRING\n *\n * Attributes ::= SET OF Attribute\n *\n * Attribute ::= SEQUENCE {\n * attrType OBJECT IDENTIFIER,\n * attrValues SET OF AttributeValue\n * }\n *\n * AttributeValue ::= ANY\n *\n * Version ::= INTEGER\n *\n * RecipientInfos ::= SET OF RecipientInfo\n *\n * EncryptedContentInfo ::= SEQUENCE {\n * contentType ContentType,\n * contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier,\n * encryptedContent [0] IMPLICIT EncryptedContent OPTIONAL\n * }\n *\n * ContentEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier\n *\n * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters\n * for the algorithm, if any. In the case of AES and DES3, there is only one,\n * the IV.\n *\n * AlgorithmIdentifer ::= SEQUENCE {\n * algorithm OBJECT IDENTIFIER,\n * parameters ANY DEFINED BY algorithm OPTIONAL\n * }\n *\n * EncryptedContent ::= OCTET STRING\n *\n * RecipientInfo ::= SEQUENCE {\n * version Version,\n * issuerAndSerialNumber IssuerAndSerialNumber,\n * keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier,\n * encryptedKey EncryptedKey\n * }\n *\n * IssuerAndSerialNumber ::= SEQUENCE {\n * issuer Name,\n * serialNumber CertificateSerialNumber\n * }\n *\n * CertificateSerialNumber ::= INTEGER\n *\n * KeyEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier\n *\n * EncryptedKey ::= OCTET STRING\n */\nvar forge = require('./forge');\nrequire('./asn1');\nrequire('./util');\n\n// shortcut for ASN.1 API\nvar asn1 = forge.asn1;\n\n// shortcut for PKCS#7 API\nvar p7v = module.exports = forge.pkcs7asn1 = forge.pkcs7asn1 || {};\nforge.pkcs7 = forge.pkcs7 || {};\nforge.pkcs7.asn1 = p7v;\n\nvar contentInfoValidator = {\n name: 'ContentInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'ContentInfo.ContentType',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'contentType'\n }, {\n name: 'ContentInfo.content',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n captureAsn1: 'content'\n }]\n};\np7v.contentInfoValidator = contentInfoValidator;\n\nvar encryptedContentInfoValidator = {\n name: 'EncryptedContentInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedContentInfo.contentType',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'contentType'\n }, {\n name: 'EncryptedContentInfo.contentEncryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedContentInfo.contentEncryptionAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encAlgorithm'\n }, {\n name: 'EncryptedContentInfo.contentEncryptionAlgorithm.parameter',\n tagClass: asn1.Class.UNIVERSAL,\n captureAsn1: 'encParameter'\n }]\n }, {\n name: 'EncryptedContentInfo.encryptedContent',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n /* The PKCS#7 structure output by OpenSSL somewhat differs from what\n * other implementations do generate.\n *\n * OpenSSL generates a structure like this:\n * SEQUENCE {\n * ...\n * [0]\n * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38\n * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45\n * ...\n * }\n *\n * Whereas other implementations (and this PKCS#7 module) generate:\n * SEQUENCE {\n * ...\n * [0] {\n * OCTET STRING\n * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38\n * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45\n * ...\n * }\n * }\n *\n * In order to support both, we just capture the context specific\n * field here. The OCTET STRING bit is removed below.\n */\n capture: 'encryptedContent',\n captureAsn1: 'encryptedContentAsn1'\n }]\n};\n\np7v.envelopedDataValidator = {\n name: 'EnvelopedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EnvelopedData.Version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }, {\n name: 'EnvelopedData.RecipientInfos',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true,\n captureAsn1: 'recipientInfos'\n }].concat(encryptedContentInfoValidator)\n};\n\np7v.encryptedDataValidator = {\n name: 'EncryptedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedData.Version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }].concat(encryptedContentInfoValidator)\n};\n\nvar signerValidator = {\n name: 'SignerInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignerInfo.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false\n }, {\n name: 'SignerInfo.issuerAndSerialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignerInfo.issuerAndSerialNumber.issuer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'issuer'\n }, {\n name: 'SignerInfo.issuerAndSerialNumber.serialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'serial'\n }]\n }, {\n name: 'SignerInfo.digestAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignerInfo.digestAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'digestAlgorithm'\n }, {\n name: 'SignerInfo.digestAlgorithm.parameter',\n tagClass: asn1.Class.UNIVERSAL,\n constructed: false,\n captureAsn1: 'digestParameter',\n optional: true\n }]\n }, {\n name: 'SignerInfo.authenticatedAttributes',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n capture: 'authenticatedAttributes'\n }, {\n name: 'SignerInfo.digestEncryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n capture: 'signatureAlgorithm'\n }, {\n name: 'SignerInfo.encryptedDigest',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'signature'\n }, {\n name: 'SignerInfo.unauthenticatedAttributes',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n constructed: true,\n optional: true,\n capture: 'unauthenticatedAttributes'\n }]\n};\n\np7v.signedDataValidator = {\n name: 'SignedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignedData.Version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }, {\n name: 'SignedData.DigestAlgorithms',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true,\n captureAsn1: 'digestAlgorithms'\n },\n contentInfoValidator,\n {\n name: 'SignedData.Certificates',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n optional: true,\n captureAsn1: 'certificates'\n }, {\n name: 'SignedData.CertificateRevocationLists',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n optional: true,\n captureAsn1: 'crls'\n }, {\n name: 'SignedData.SignerInfos',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n capture: 'signerInfos',\n optional: true,\n value: [signerValidator]\n }]\n};\n\np7v.recipientInfoValidator = {\n name: 'RecipientInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'RecipientInfo.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }, {\n name: 'RecipientInfo.issuerAndSerial',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'RecipientInfo.issuerAndSerial.issuer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'issuer'\n }, {\n name: 'RecipientInfo.issuerAndSerial.serialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'serial'\n }]\n }, {\n name: 'RecipientInfo.keyEncryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'RecipientInfo.keyEncryptionAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encAlgorithm'\n }, {\n name: 'RecipientInfo.keyEncryptionAlgorithm.parameter',\n tagClass: asn1.Class.UNIVERSAL,\n constructed: false,\n captureAsn1: 'encParameter',\n optional: true\n }]\n }, {\n name: 'RecipientInfo.encryptedKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'encKey'\n }]\n};\n","/**\n * Javascript implementation of a basic Public Key Infrastructure, including\n * support for RSA public and private keys.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2013 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./asn1');\nrequire('./oids');\nrequire('./pbe');\nrequire('./pem');\nrequire('./pbkdf2');\nrequire('./pkcs12');\nrequire('./pss');\nrequire('./rsa');\nrequire('./util');\nrequire('./x509');\n\n// shortcut for asn.1 API\nvar asn1 = forge.asn1;\n\n/* Public Key Infrastructure (PKI) implementation. */\nvar pki = module.exports = forge.pki = forge.pki || {};\n\n/**\n * NOTE: THIS METHOD IS DEPRECATED. Use pem.decode() instead.\n *\n * Converts PEM-formatted data to DER.\n *\n * @param pem the PEM-formatted data.\n *\n * @return the DER-formatted data.\n */\npki.pemToDer = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert PEM to DER; PEM is encrypted.');\n }\n return forge.util.createBuffer(msg.body);\n};\n\n/**\n * Converts an RSA private key from PEM format.\n *\n * @param pem the PEM-formatted private key.\n *\n * @return the private key.\n */\npki.privateKeyFromPem = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'PRIVATE KEY' && msg.type !== 'RSA PRIVATE KEY') {\n var error = new Error('Could not convert private key from PEM; PEM ' +\n 'header type is not \"PRIVATE KEY\" or \"RSA PRIVATE KEY\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert private key from PEM; PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body);\n\n return pki.privateKeyFromAsn1(obj);\n};\n\n/**\n * Converts an RSA private key to PEM format.\n *\n * @param key the private key.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted private key.\n */\npki.privateKeyToPem = function(key, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'RSA PRIVATE KEY',\n body: asn1.toDer(pki.privateKeyToAsn1(key)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Converts a PrivateKeyInfo to PEM format.\n *\n * @param pki the PrivateKeyInfo.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted private key.\n */\npki.privateKeyInfoToPem = function(pki, maxline) {\n // convert to DER, then PEM-encode\n var msg = {\n type: 'PRIVATE KEY',\n body: asn1.toDer(pki).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n","/**\n * Prime number generation API.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\nrequire('./jsbn');\nrequire('./random');\n\n(function() {\n\n// forge.prime already defined\nif(forge.prime) {\n module.exports = forge.prime;\n return;\n}\n\n/* PRIME API */\nvar prime = module.exports = forge.prime = forge.prime || {};\n\nvar BigInteger = forge.jsbn.BigInteger;\n\n// primes are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29\nvar GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2];\nvar THIRTY = new BigInteger(null);\nTHIRTY.fromInt(30);\nvar op_or = function(x, y) {return x|y;};\n\n/**\n * Generates a random probable prime with the given number of bits.\n *\n * Alternative algorithms can be specified by name as a string or as an\n * object with custom options like so:\n *\n * {\n * name: 'PRIMEINC',\n * options: {\n * maxBlockTime: ,\n * millerRabinTests: ,\n * workerScript: ,\n * workers: .\n * workLoad: the size of the work load, ie: number of possible prime\n * numbers for each web worker to check per work assignment,\n * (default: 100).\n * }\n * }\n *\n * @param bits the number of bits for the prime number.\n * @param options the options to use.\n * [algorithm] the algorithm to use (default: 'PRIMEINC').\n * [prng] a custom crypto-secure pseudo-random number generator to use,\n * that must define \"getBytesSync\".\n *\n * @return callback(err, num) called once the operation completes.\n */\nprime.generateProbablePrime = function(bits, options, callback) {\n if(typeof options === 'function') {\n callback = options;\n options = {};\n }\n options = options || {};\n\n // default to PRIMEINC algorithm\n var algorithm = options.algorithm || 'PRIMEINC';\n if(typeof algorithm === 'string') {\n algorithm = {name: algorithm};\n }\n algorithm.options = algorithm.options || {};\n\n // create prng with api that matches BigInteger secure random\n var prng = options.prng || forge.random;\n var rng = {\n // x is an array to fill with bytes\n nextBytes: function(x) {\n var b = prng.getBytesSync(x.length);\n for(var i = 0; i < x.length; ++i) {\n x[i] = b.charCodeAt(i);\n }\n }\n };\n\n if(algorithm.name === 'PRIMEINC') {\n return primeincFindPrime(bits, rng, algorithm.options, callback);\n }\n\n throw new Error('Invalid prime generation algorithm: ' + algorithm.name);\n};\n\nfunction primeincFindPrime(bits, rng, options, callback) {\n if('workers' in options) {\n return primeincFindPrimeWithWorkers(bits, rng, options, callback);\n }\n return primeincFindPrimeWithoutWorkers(bits, rng, options, callback);\n}\n\nfunction primeincFindPrimeWithoutWorkers(bits, rng, options, callback) {\n // initialize random number\n var num = generateRandom(bits, rng);\n\n /* Note: All primes are of the form 30k+i for i < 30 and gcd(30, i)=1. The\n number we are given is always aligned at 30k + 1. Each time the number is\n determined not to be prime we add to get to the next 'i', eg: if the number\n was at 30k + 1 we add 6. */\n var deltaIdx = 0;\n\n // get required number of MR tests\n var mrTests = getMillerRabinTests(num.bitLength());\n if('millerRabinTests' in options) {\n mrTests = options.millerRabinTests;\n }\n\n // find prime nearest to 'num' for maxBlockTime ms\n // 10 ms gives 5ms of leeway for other calculations before dropping\n // below 60fps (1000/60 == 16.67), but in reality, the number will\n // likely be higher due to an 'atomic' big int modPow\n var maxBlockTime = 10;\n if('maxBlockTime' in options) {\n maxBlockTime = options.maxBlockTime;\n }\n\n _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback);\n}\n\nfunction _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback) {\n var start = +new Date();\n do {\n // overflow, regenerate random number\n if(num.bitLength() > bits) {\n num = generateRandom(bits, rng);\n }\n // do primality test\n if(num.isProbablePrime(mrTests)) {\n return callback(null, num);\n }\n // get next potential prime\n num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0);\n } while(maxBlockTime < 0 || (+new Date() - start < maxBlockTime));\n\n // keep trying later\n forge.util.setImmediate(function() {\n _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback);\n });\n}\n\n// NOTE: This algorithm is indeterminate in nature because workers\n// run in parallel looking at different segments of numbers. Even if this\n// algorithm is run twice with the same input from a predictable RNG, it\n// may produce different outputs.\nfunction primeincFindPrimeWithWorkers(bits, rng, options, callback) {\n // web workers unavailable\n if(typeof Worker === 'undefined') {\n return primeincFindPrimeWithoutWorkers(bits, rng, options, callback);\n }\n\n // initialize random number\n var num = generateRandom(bits, rng);\n\n // use web workers to generate keys\n var numWorkers = options.workers;\n var workLoad = options.workLoad || 100;\n var range = workLoad * 30 / 8;\n var workerScript = options.workerScript || 'forge/prime.worker.js';\n if(numWorkers === -1) {\n return forge.util.estimateCores(function(err, cores) {\n if(err) {\n // default to 2\n cores = 2;\n }\n numWorkers = cores - 1;\n generate();\n });\n }\n generate();\n\n function generate() {\n // require at least 1 worker\n numWorkers = Math.max(1, numWorkers);\n\n // TODO: consider optimizing by starting workers outside getPrime() ...\n // note that in order to clean up they will have to be made internally\n // asynchronous which may actually be slower\n\n // start workers immediately\n var workers = [];\n for(var i = 0; i < numWorkers; ++i) {\n // FIXME: fix path or use blob URLs\n workers[i] = new Worker(workerScript);\n }\n var running = numWorkers;\n\n // listen for requests from workers and assign ranges to find prime\n for(var i = 0; i < numWorkers; ++i) {\n workers[i].addEventListener('message', workerMessage);\n }\n\n /* Note: The distribution of random numbers is unknown. Therefore, each\n web worker is continuously allocated a range of numbers to check for a\n random number until one is found.\n\n Every 30 numbers will be checked just 8 times, because prime numbers\n have the form:\n\n 30k+i, for i < 30 and gcd(30, i)=1 (there are 8 values of i for this)\n\n Therefore, if we want a web worker to run N checks before asking for\n a new range of numbers, each range must contain N*30/8 numbers.\n\n For 100 checks (workLoad), this is a range of 375. */\n\n var found = false;\n function workerMessage(e) {\n // ignore message, prime already found\n if(found) {\n return;\n }\n\n --running;\n var data = e.data;\n if(data.found) {\n // terminate all workers\n for(var i = 0; i < workers.length; ++i) {\n workers[i].terminate();\n }\n found = true;\n return callback(null, new BigInteger(data.prime, 16));\n }\n\n // overflow, regenerate random number\n if(num.bitLength() > bits) {\n num = generateRandom(bits, rng);\n }\n\n // assign new range to check\n var hex = num.toString(16);\n\n // start prime search\n e.target.postMessage({\n hex: hex,\n workLoad: workLoad\n });\n\n num.dAddOffset(range, 0);\n }\n }\n}\n\n/**\n * Generates a random number using the given number of bits and RNG.\n *\n * @param bits the number of bits for the number.\n * @param rng the random number generator to use.\n *\n * @return the random number.\n */\nfunction generateRandom(bits, rng) {\n var num = new BigInteger(bits, rng);\n // force MSB set\n var bits1 = bits - 1;\n if(!num.testBit(bits1)) {\n num.bitwiseTo(BigInteger.ONE.shiftLeft(bits1), op_or, num);\n }\n // align number on 30k+1 boundary\n num.dAddOffset(31 - num.mod(THIRTY).byteValue(), 0);\n return num;\n}\n\n/**\n * Returns the required number of Miller-Rabin tests to generate a\n * prime with an error probability of (1/2)^80.\n *\n * See Handbook of Applied Cryptography Chapter 4, Table 4.4.\n *\n * @param bits the bit size.\n *\n * @return the required number of iterations.\n */\nfunction getMillerRabinTests(bits) {\n if(bits <= 100) return 27;\n if(bits <= 150) return 18;\n if(bits <= 200) return 15;\n if(bits <= 250) return 12;\n if(bits <= 300) return 9;\n if(bits <= 350) return 8;\n if(bits <= 400) return 7;\n if(bits <= 500) return 6;\n if(bits <= 600) return 5;\n if(bits <= 800) return 4;\n if(bits <= 1250) return 3;\n return 2;\n}\n\n})();\n","/**\n * A javascript implementation of a cryptographically-secure\n * Pseudo Random Number Generator (PRNG). The Fortuna algorithm is followed\n * here though the use of SHA-256 is not enforced; when generating an\n * a PRNG context, the hashing algorithm and block cipher used for\n * the generator are specified via a plugin.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\nvar _crypto = null;\nif(forge.util.isNodejs && !forge.options.usePureJavaScript &&\n !process.versions['node-webkit']) {\n _crypto = require('crypto');\n}\n\n/* PRNG API */\nvar prng = module.exports = forge.prng = forge.prng || {};\n\n/**\n * Creates a new PRNG context.\n *\n * A PRNG plugin must be passed in that will provide:\n *\n * 1. A function that initializes the key and seed of a PRNG context. It\n * will be given a 16 byte key and a 16 byte seed. Any key expansion\n * or transformation of the seed from a byte string into an array of\n * integers (or similar) should be performed.\n * 2. The cryptographic function used by the generator. It takes a key and\n * a seed.\n * 3. A seed increment function. It takes the seed and returns seed + 1.\n * 4. An api to create a message digest.\n *\n * For an example, see random.js.\n *\n * @param plugin the PRNG plugin to use.\n */\nprng.create = function(plugin) {\n var ctx = {\n plugin: plugin,\n key: null,\n seed: null,\n time: null,\n // number of reseeds so far\n reseeds: 0,\n // amount of data generated so far\n generated: 0,\n // no initial key bytes\n keyBytes: ''\n };\n\n // create 32 entropy pools (each is a message digest)\n var md = plugin.md;\n var pools = new Array(32);\n for(var i = 0; i < 32; ++i) {\n pools[i] = md.create();\n }\n ctx.pools = pools;\n\n // entropy pools are written to cyclically, starting at index 0\n ctx.pool = 0;\n\n /**\n * Generates random bytes. The bytes may be generated synchronously or\n * asynchronously. Web workers must use the asynchronous interface or\n * else the behavior is undefined.\n *\n * @param count the number of random bytes to generate.\n * @param [callback(err, bytes)] called once the operation completes.\n *\n * @return count random bytes as a string.\n */\n ctx.generate = function(count, callback) {\n // do synchronously\n if(!callback) {\n return ctx.generateSync(count);\n }\n\n // simple generator using counter-based CBC\n var cipher = ctx.plugin.cipher;\n var increment = ctx.plugin.increment;\n var formatKey = ctx.plugin.formatKey;\n var formatSeed = ctx.plugin.formatSeed;\n var b = forge.util.createBuffer();\n\n // paranoid deviation from Fortuna:\n // reset key for every request to protect previously\n // generated random bytes should the key be discovered;\n // there is no 100ms based reseeding because of this\n // forced reseed for every `generate` call\n ctx.key = null;\n\n generate();\n\n function generate(err) {\n if(err) {\n return callback(err);\n }\n\n // sufficient bytes generated\n if(b.length() >= count) {\n return callback(null, b.getBytes(count));\n }\n\n // if amount of data generated is greater than 1 MiB, trigger reseed\n if(ctx.generated > 0xfffff) {\n ctx.key = null;\n }\n\n if(ctx.key === null) {\n // prevent stack overflow\n return forge.util.nextTick(function() {\n _reseed(generate);\n });\n }\n\n // generate the random bytes\n var bytes = cipher(ctx.key, ctx.seed);\n ctx.generated += bytes.length;\n b.putBytes(bytes);\n\n // generate bytes for a new key and seed\n ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed)));\n ctx.seed = formatSeed(cipher(ctx.key, ctx.seed));\n\n forge.util.setImmediate(generate);\n }\n };\n\n /**\n * Generates random bytes synchronously.\n *\n * @param count the number of random bytes to generate.\n *\n * @return count random bytes as a string.\n */\n ctx.generateSync = function(count) {\n // simple generator using counter-based CBC\n var cipher = ctx.plugin.cipher;\n var increment = ctx.plugin.increment;\n var formatKey = ctx.plugin.formatKey;\n var formatSeed = ctx.plugin.formatSeed;\n\n // paranoid deviation from Fortuna:\n // reset key for every request to protect previously\n // generated random bytes should the key be discovered;\n // there is no 100ms based reseeding because of this\n // forced reseed for every `generateSync` call\n ctx.key = null;\n\n var b = forge.util.createBuffer();\n while(b.length() < count) {\n // if amount of data generated is greater than 1 MiB, trigger reseed\n if(ctx.generated > 0xfffff) {\n ctx.key = null;\n }\n\n if(ctx.key === null) {\n _reseedSync();\n }\n\n // generate the random bytes\n var bytes = cipher(ctx.key, ctx.seed);\n ctx.generated += bytes.length;\n b.putBytes(bytes);\n\n // generate bytes for a new key and seed\n ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed)));\n ctx.seed = formatSeed(cipher(ctx.key, ctx.seed));\n }\n\n return b.getBytes(count);\n };\n\n /**\n * Private function that asynchronously reseeds a generator.\n *\n * @param callback(err) called once the operation completes.\n */\n function _reseed(callback) {\n if(ctx.pools[0].messageLength >= 32) {\n _seed();\n return callback();\n }\n // not enough seed data...\n var needed = (32 - ctx.pools[0].messageLength) << 5;\n ctx.seedFile(needed, function(err, bytes) {\n if(err) {\n return callback(err);\n }\n ctx.collect(bytes);\n _seed();\n callback();\n });\n }\n\n /**\n * Private function that synchronously reseeds a generator.\n */\n function _reseedSync() {\n if(ctx.pools[0].messageLength >= 32) {\n return _seed();\n }\n // not enough seed data...\n var needed = (32 - ctx.pools[0].messageLength) << 5;\n ctx.collect(ctx.seedFileSync(needed));\n _seed();\n }\n\n /**\n * Private function that seeds a generator once enough bytes are available.\n */\n function _seed() {\n // update reseed count\n ctx.reseeds = (ctx.reseeds === 0xffffffff) ? 0 : ctx.reseeds + 1;\n\n // goal is to update `key` via:\n // key = hash(key + s)\n // where 's' is all collected entropy from selected pools, then...\n\n // create a plugin-based message digest\n var md = ctx.plugin.md.create();\n\n // consume current key bytes\n md.update(ctx.keyBytes);\n\n // digest the entropy of pools whose index k meet the\n // condition 'n mod 2^k == 0' where n is the number of reseeds\n var _2powK = 1;\n for(var k = 0; k < 32; ++k) {\n if(ctx.reseeds % _2powK === 0) {\n md.update(ctx.pools[k].digest().getBytes());\n ctx.pools[k].start();\n }\n _2powK = _2powK << 1;\n }\n\n // get digest for key bytes\n ctx.keyBytes = md.digest().getBytes();\n\n // paranoid deviation from Fortuna:\n // update `seed` via `seed = hash(key)`\n // instead of initializing to zero once and only\n // ever incrementing it\n md.start();\n md.update(ctx.keyBytes);\n var seedBytes = md.digest().getBytes();\n\n // update state\n ctx.key = ctx.plugin.formatKey(ctx.keyBytes);\n ctx.seed = ctx.plugin.formatSeed(seedBytes);\n ctx.generated = 0;\n }\n\n /**\n * The built-in default seedFile. This seedFile is used when entropy\n * is needed immediately.\n *\n * @param needed the number of bytes that are needed.\n *\n * @return the random bytes.\n */\n function defaultSeedFile(needed) {\n // use window.crypto.getRandomValues strong source of entropy if available\n var getRandomValues = null;\n var globalScope = forge.util.globalScope;\n var _crypto = globalScope.crypto || globalScope.msCrypto;\n if(_crypto && _crypto.getRandomValues) {\n getRandomValues = function(arr) {\n return _crypto.getRandomValues(arr);\n };\n }\n\n var b = forge.util.createBuffer();\n if(getRandomValues) {\n while(b.length() < needed) {\n // max byte length is 65536 before QuotaExceededError is thrown\n // http://www.w3.org/TR/WebCryptoAPI/#RandomSource-method-getRandomValues\n var count = Math.max(1, Math.min(needed - b.length(), 65536) / 4);\n var entropy = new Uint32Array(Math.floor(count));\n try {\n getRandomValues(entropy);\n for(var i = 0; i < entropy.length; ++i) {\n b.putInt32(entropy[i]);\n }\n } catch(e) {\n /* only ignore QuotaExceededError */\n if(!(typeof QuotaExceededError !== 'undefined' &&\n e instanceof QuotaExceededError)) {\n throw e;\n }\n }\n }\n }\n\n // be sad and add some weak random data\n if(b.length() < needed) {\n /* Draws from Park-Miller \"minimal standard\" 31 bit PRNG,\n implemented with David G. Carta's optimization: with 32 bit math\n and without division (Public Domain). */\n var hi, lo, next;\n var seed = Math.floor(Math.random() * 0x010000);\n while(b.length() < needed) {\n lo = 16807 * (seed & 0xFFFF);\n hi = 16807 * (seed >> 16);\n lo += (hi & 0x7FFF) << 16;\n lo += hi >> 15;\n lo = (lo & 0x7FFFFFFF) + (lo >> 31);\n seed = lo & 0xFFFFFFFF;\n\n // consume lower 3 bytes of seed\n for(var i = 0; i < 3; ++i) {\n // throw in more pseudo random\n next = seed >>> (i << 3);\n next ^= Math.floor(Math.random() * 0x0100);\n b.putByte(next & 0xFF);\n }\n }\n }\n\n return b.getBytes(needed);\n }\n // initialize seed file APIs\n if(_crypto) {\n // use nodejs async API\n ctx.seedFile = function(needed, callback) {\n _crypto.randomBytes(needed, function(err, bytes) {\n if(err) {\n return callback(err);\n }\n callback(null, bytes.toString());\n });\n };\n // use nodejs sync API\n ctx.seedFileSync = function(needed) {\n return _crypto.randomBytes(needed).toString();\n };\n } else {\n ctx.seedFile = function(needed, callback) {\n try {\n callback(null, defaultSeedFile(needed));\n } catch(e) {\n callback(e);\n }\n };\n ctx.seedFileSync = defaultSeedFile;\n }\n\n /**\n * Adds entropy to a prng ctx's accumulator.\n *\n * @param bytes the bytes of entropy as a string.\n */\n ctx.collect = function(bytes) {\n // iterate over pools distributing entropy cyclically\n var count = bytes.length;\n for(var i = 0; i < count; ++i) {\n ctx.pools[ctx.pool].update(bytes.substr(i, 1));\n ctx.pool = (ctx.pool === 31) ? 0 : ctx.pool + 1;\n }\n };\n\n /**\n * Collects an integer of n bits.\n *\n * @param i the integer entropy.\n * @param n the number of bits in the integer.\n */\n ctx.collectInt = function(i, n) {\n var bytes = '';\n for(var x = 0; x < n; x += 8) {\n bytes += String.fromCharCode((i >> x) & 0xFF);\n }\n ctx.collect(bytes);\n };\n\n /**\n * Registers a Web Worker to receive immediate entropy from the main thread.\n * This method is required until Web Workers can access the native crypto\n * API. This method should be called twice for each created worker, once in\n * the main thread, and once in the worker itself.\n *\n * @param worker the worker to register.\n */\n ctx.registerWorker = function(worker) {\n // worker receives random bytes\n if(worker === self) {\n ctx.seedFile = function(needed, callback) {\n function listener(e) {\n var data = e.data;\n if(data.forge && data.forge.prng) {\n self.removeEventListener('message', listener);\n callback(data.forge.prng.err, data.forge.prng.bytes);\n }\n }\n self.addEventListener('message', listener);\n self.postMessage({forge: {prng: {needed: needed}}});\n };\n } else {\n // main thread sends random bytes upon request\n var listener = function(e) {\n var data = e.data;\n if(data.forge && data.forge.prng) {\n ctx.seedFile(data.forge.prng.needed, function(err, bytes) {\n worker.postMessage({forge: {prng: {err: err, bytes: bytes}}});\n });\n }\n };\n // TODO: do we need to remove the event listener when the worker dies?\n worker.addEventListener('message', listener);\n }\n };\n\n return ctx;\n};\n","/**\n * Javascript implementation of PKCS#1 PSS signature padding.\n *\n * @author Stefan Siegl\n *\n * Copyright (c) 2012 Stefan Siegl \n */\nvar forge = require('./forge');\nrequire('./random');\nrequire('./util');\n\n// shortcut for PSS API\nvar pss = module.exports = forge.pss = forge.pss || {};\n\n/**\n * Creates a PSS signature scheme object.\n *\n * There are several ways to provide a salt for encoding:\n *\n * 1. Specify the saltLength only and the built-in PRNG will generate it.\n * 2. Specify the saltLength and a custom PRNG with 'getBytesSync' defined that\n * will be used.\n * 3. Specify the salt itself as a forge.util.ByteBuffer.\n *\n * @param options the options to use:\n * md the message digest object to use, a forge md instance.\n * mgf the mask generation function to use, a forge mgf instance.\n * [saltLength] the length of the salt in octets.\n * [prng] the pseudo-random number generator to use to produce a salt.\n * [salt] the salt to use when encoding.\n *\n * @return a signature scheme object.\n */\npss.create = function(options) {\n // backwards compatibility w/legacy args: hash, mgf, sLen\n if(arguments.length === 3) {\n options = {\n md: arguments[0],\n mgf: arguments[1],\n saltLength: arguments[2]\n };\n }\n\n var hash = options.md;\n var mgf = options.mgf;\n var hLen = hash.digestLength;\n\n var salt_ = options.salt || null;\n if(typeof salt_ === 'string') {\n // assume binary-encoded string\n salt_ = forge.util.createBuffer(salt_);\n }\n\n var sLen;\n if('saltLength' in options) {\n sLen = options.saltLength;\n } else if(salt_ !== null) {\n sLen = salt_.length();\n } else {\n throw new Error('Salt length not specified or specific salt not given.');\n }\n\n if(salt_ !== null && salt_.length() !== sLen) {\n throw new Error('Given salt length does not match length of given salt.');\n }\n\n var prng = options.prng || forge.random;\n\n var pssobj = {};\n\n /**\n * Encodes a PSS signature.\n *\n * This function implements EMSA-PSS-ENCODE as per RFC 3447, section 9.1.1.\n *\n * @param md the message digest object with the hash to sign.\n * @param modsBits the length of the RSA modulus in bits.\n *\n * @return the encoded message as a binary-encoded string of length\n * ceil((modBits - 1) / 8).\n */\n pssobj.encode = function(md, modBits) {\n var i;\n var emBits = modBits - 1;\n var emLen = Math.ceil(emBits / 8);\n\n /* 2. Let mHash = Hash(M), an octet string of length hLen. */\n var mHash = md.digest().getBytes();\n\n /* 3. If emLen < hLen + sLen + 2, output \"encoding error\" and stop. */\n if(emLen < hLen + sLen + 2) {\n throw new Error('Message is too long to encrypt.');\n }\n\n /* 4. Generate a random octet string salt of length sLen; if sLen = 0,\n * then salt is the empty string. */\n var salt;\n if(salt_ === null) {\n salt = prng.getBytesSync(sLen);\n } else {\n salt = salt_.bytes();\n }\n\n /* 5. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt; */\n var m_ = new forge.util.ByteBuffer();\n m_.fillWithByte(0, 8);\n m_.putBytes(mHash);\n m_.putBytes(salt);\n\n /* 6. Let H = Hash(M'), an octet string of length hLen. */\n hash.start();\n hash.update(m_.getBytes());\n var h = hash.digest().getBytes();\n\n /* 7. Generate an octet string PS consisting of emLen - sLen - hLen - 2\n * zero octets. The length of PS may be 0. */\n var ps = new forge.util.ByteBuffer();\n ps.fillWithByte(0, emLen - sLen - hLen - 2);\n\n /* 8. Let DB = PS || 0x01 || salt; DB is an octet string of length\n * emLen - hLen - 1. */\n ps.putByte(0x01);\n ps.putBytes(salt);\n var db = ps.getBytes();\n\n /* 9. Let dbMask = MGF(H, emLen - hLen - 1). */\n var maskLen = emLen - hLen - 1;\n var dbMask = mgf.generate(h, maskLen);\n\n /* 10. Let maskedDB = DB \\xor dbMask. */\n var maskedDB = '';\n for(i = 0; i < maskLen; i++) {\n maskedDB += String.fromCharCode(db.charCodeAt(i) ^ dbMask.charCodeAt(i));\n }\n\n /* 11. Set the leftmost 8emLen - emBits bits of the leftmost octet in\n * maskedDB to zero. */\n var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF;\n maskedDB = String.fromCharCode(maskedDB.charCodeAt(0) & ~mask) +\n maskedDB.substr(1);\n\n /* 12. Let EM = maskedDB || H || 0xbc.\n * 13. Output EM. */\n return maskedDB + h + String.fromCharCode(0xbc);\n };\n\n /**\n * Verifies a PSS signature.\n *\n * This function implements EMSA-PSS-VERIFY as per RFC 3447, section 9.1.2.\n *\n * @param mHash the message digest hash, as a binary-encoded string, to\n * compare against the signature.\n * @param em the encoded message, as a binary-encoded string\n * (RSA decryption result).\n * @param modsBits the length of the RSA modulus in bits.\n *\n * @return true if the signature was verified, false if not.\n */\n pssobj.verify = function(mHash, em, modBits) {\n var i;\n var emBits = modBits - 1;\n var emLen = Math.ceil(emBits / 8);\n\n /* c. Convert the message representative m to an encoded message EM\n * of length emLen = ceil((modBits - 1) / 8) octets, where modBits\n * is the length in bits of the RSA modulus n */\n em = em.substr(-emLen);\n\n /* 3. If emLen < hLen + sLen + 2, output \"inconsistent\" and stop. */\n if(emLen < hLen + sLen + 2) {\n throw new Error('Inconsistent parameters to PSS signature verification.');\n }\n\n /* 4. If the rightmost octet of EM does not have hexadecimal value\n * 0xbc, output \"inconsistent\" and stop. */\n if(em.charCodeAt(emLen - 1) !== 0xbc) {\n throw new Error('Encoded message does not end in 0xBC.');\n }\n\n /* 5. Let maskedDB be the leftmost emLen - hLen - 1 octets of EM, and\n * let H be the next hLen octets. */\n var maskLen = emLen - hLen - 1;\n var maskedDB = em.substr(0, maskLen);\n var h = em.substr(maskLen, hLen);\n\n /* 6. If the leftmost 8emLen - emBits bits of the leftmost octet in\n * maskedDB are not all equal to zero, output \"inconsistent\" and stop. */\n var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF;\n if((maskedDB.charCodeAt(0) & mask) !== 0) {\n throw new Error('Bits beyond keysize not zero as expected.');\n }\n\n /* 7. Let dbMask = MGF(H, emLen - hLen - 1). */\n var dbMask = mgf.generate(h, maskLen);\n\n /* 8. Let DB = maskedDB \\xor dbMask. */\n var db = '';\n for(i = 0; i < maskLen; i++) {\n db += String.fromCharCode(maskedDB.charCodeAt(i) ^ dbMask.charCodeAt(i));\n }\n\n /* 9. Set the leftmost 8emLen - emBits bits of the leftmost octet\n * in DB to zero. */\n db = String.fromCharCode(db.charCodeAt(0) & ~mask) + db.substr(1);\n\n /* 10. If the emLen - hLen - sLen - 2 leftmost octets of DB are not zero\n * or if the octet at position emLen - hLen - sLen - 1 (the leftmost\n * position is \"position 1\") does not have hexadecimal value 0x01,\n * output \"inconsistent\" and stop. */\n var checkLen = emLen - hLen - sLen - 2;\n for(i = 0; i < checkLen; i++) {\n if(db.charCodeAt(i) !== 0x00) {\n throw new Error('Leftmost octets not zero as expected');\n }\n }\n\n if(db.charCodeAt(checkLen) !== 0x01) {\n throw new Error('Inconsistent PSS signature, 0x01 marker not found');\n }\n\n /* 11. Let salt be the last sLen octets of DB. */\n var salt = db.substr(-sLen);\n\n /* 12. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt */\n var m_ = new forge.util.ByteBuffer();\n m_.fillWithByte(0, 8);\n m_.putBytes(mHash);\n m_.putBytes(salt);\n\n /* 13. Let H' = Hash(M'), an octet string of length hLen. */\n hash.start();\n hash.update(m_.getBytes());\n var h_ = hash.digest().getBytes();\n\n /* 14. If H = H', output \"consistent.\" Otherwise, output \"inconsistent.\" */\n return h === h_;\n };\n\n return pssobj;\n};\n","/**\n * An API for getting cryptographically-secure random bytes. The bytes are\n * generated using the Fortuna algorithm devised by Bruce Schneier and\n * Niels Ferguson.\n *\n * Getting strong random bytes is not yet easy to do in javascript. The only\n * truish random entropy that can be collected is from the mouse, keyboard, or\n * from timing with respect to page loads, etc. This generator makes a poor\n * attempt at providing random bytes when those sources haven't yet provided\n * enough entropy to initially seed or to reseed the PRNG.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2009-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./sha256');\nrequire('./prng');\nrequire('./util');\n\n(function() {\n\n// forge.random already defined\nif(forge.random && forge.random.getBytes) {\n module.exports = forge.random;\n return;\n}\n\n(function(jQuery) {\n\n// the default prng plugin, uses AES-128\nvar prng_aes = {};\nvar _prng_aes_output = new Array(4);\nvar _prng_aes_buffer = forge.util.createBuffer();\nprng_aes.formatKey = function(key) {\n // convert the key into 32-bit integers\n var tmp = forge.util.createBuffer(key);\n key = new Array(4);\n key[0] = tmp.getInt32();\n key[1] = tmp.getInt32();\n key[2] = tmp.getInt32();\n key[3] = tmp.getInt32();\n\n // return the expanded key\n return forge.aes._expandKey(key, false);\n};\nprng_aes.formatSeed = function(seed) {\n // convert seed into 32-bit integers\n var tmp = forge.util.createBuffer(seed);\n seed = new Array(4);\n seed[0] = tmp.getInt32();\n seed[1] = tmp.getInt32();\n seed[2] = tmp.getInt32();\n seed[3] = tmp.getInt32();\n return seed;\n};\nprng_aes.cipher = function(key, seed) {\n forge.aes._updateBlock(key, seed, _prng_aes_output, false);\n _prng_aes_buffer.putInt32(_prng_aes_output[0]);\n _prng_aes_buffer.putInt32(_prng_aes_output[1]);\n _prng_aes_buffer.putInt32(_prng_aes_output[2]);\n _prng_aes_buffer.putInt32(_prng_aes_output[3]);\n return _prng_aes_buffer.getBytes();\n};\nprng_aes.increment = function(seed) {\n // FIXME: do we care about carry or signed issues?\n ++seed[3];\n return seed;\n};\nprng_aes.md = forge.md.sha256;\n\n/**\n * Creates a new PRNG.\n */\nfunction spawnPrng() {\n var ctx = forge.prng.create(prng_aes);\n\n /**\n * Gets random bytes. If a native secure crypto API is unavailable, this\n * method tries to make the bytes more unpredictable by drawing from data that\n * can be collected from the user of the browser, eg: mouse movement.\n *\n * If a callback is given, this method will be called asynchronously.\n *\n * @param count the number of random bytes to get.\n * @param [callback(err, bytes)] called once the operation completes.\n *\n * @return the random bytes in a string.\n */\n ctx.getBytes = function(count, callback) {\n return ctx.generate(count, callback);\n };\n\n /**\n * Gets random bytes asynchronously. If a native secure crypto API is\n * unavailable, this method tries to make the bytes more unpredictable by\n * drawing from data that can be collected from the user of the browser,\n * eg: mouse movement.\n *\n * @param count the number of random bytes to get.\n *\n * @return the random bytes in a string.\n */\n ctx.getBytesSync = function(count) {\n return ctx.generate(count);\n };\n\n return ctx;\n}\n\n// create default prng context\nvar _ctx = spawnPrng();\n\n// add other sources of entropy only if window.crypto.getRandomValues is not\n// available -- otherwise this source will be automatically used by the prng\nvar getRandomValues = null;\nvar globalScope = forge.util.globalScope;\nvar _crypto = globalScope.crypto || globalScope.msCrypto;\nif(_crypto && _crypto.getRandomValues) {\n getRandomValues = function(arr) {\n return _crypto.getRandomValues(arr);\n };\n}\n\nif(forge.options.usePureJavaScript ||\n (!forge.util.isNodejs && !getRandomValues)) {\n // if this is a web worker, do not use weak entropy, instead register to\n // receive strong entropy asynchronously from the main thread\n if(typeof window === 'undefined' || window.document === undefined) {\n // FIXME:\n }\n\n // get load time entropy\n _ctx.collectInt(+new Date(), 32);\n\n // add some entropy from navigator object\n if(typeof(navigator) !== 'undefined') {\n var _navBytes = '';\n for(var key in navigator) {\n try {\n if(typeof(navigator[key]) == 'string') {\n _navBytes += navigator[key];\n }\n } catch(e) {\n /* Some navigator keys might not be accessible, e.g. the geolocation\n attribute throws an exception if touched in Mozilla chrome://\n context.\n\n Silently ignore this and just don't use this as a source of\n entropy. */\n }\n }\n _ctx.collect(_navBytes);\n _navBytes = null;\n }\n\n // add mouse and keyboard collectors if jquery is available\n if(jQuery) {\n // set up mouse entropy capture\n jQuery().mousemove(function(e) {\n // add mouse coords\n _ctx.collectInt(e.clientX, 16);\n _ctx.collectInt(e.clientY, 16);\n });\n\n // set up keyboard entropy capture\n jQuery().keypress(function(e) {\n _ctx.collectInt(e.charCode, 8);\n });\n }\n}\n\n/* Random API */\nif(!forge.random) {\n forge.random = _ctx;\n} else {\n // extend forge.random with _ctx\n for(var key in _ctx) {\n forge.random[key] = _ctx[key];\n }\n}\n\n// expose spawn PRNG\nforge.random.createInstance = spawnPrng;\n\nmodule.exports = forge.random;\n\n})(typeof(jQuery) !== 'undefined' ? jQuery : null);\n\n})();\n","/**\n * RC2 implementation.\n *\n * @author Stefan Siegl\n *\n * Copyright (c) 2012 Stefan Siegl \n *\n * Information on the RC2 cipher is available from RFC #2268,\n * http://www.ietf.org/rfc/rfc2268.txt\n */\nvar forge = require('./forge');\nrequire('./util');\n\nvar piTable = [\n 0xd9, 0x78, 0xf9, 0xc4, 0x19, 0xdd, 0xb5, 0xed, 0x28, 0xe9, 0xfd, 0x79, 0x4a, 0xa0, 0xd8, 0x9d,\n 0xc6, 0x7e, 0x37, 0x83, 0x2b, 0x76, 0x53, 0x8e, 0x62, 0x4c, 0x64, 0x88, 0x44, 0x8b, 0xfb, 0xa2,\n 0x17, 0x9a, 0x59, 0xf5, 0x87, 0xb3, 0x4f, 0x13, 0x61, 0x45, 0x6d, 0x8d, 0x09, 0x81, 0x7d, 0x32,\n 0xbd, 0x8f, 0x40, 0xeb, 0x86, 0xb7, 0x7b, 0x0b, 0xf0, 0x95, 0x21, 0x22, 0x5c, 0x6b, 0x4e, 0x82,\n 0x54, 0xd6, 0x65, 0x93, 0xce, 0x60, 0xb2, 0x1c, 0x73, 0x56, 0xc0, 0x14, 0xa7, 0x8c, 0xf1, 0xdc,\n 0x12, 0x75, 0xca, 0x1f, 0x3b, 0xbe, 0xe4, 0xd1, 0x42, 0x3d, 0xd4, 0x30, 0xa3, 0x3c, 0xb6, 0x26,\n 0x6f, 0xbf, 0x0e, 0xda, 0x46, 0x69, 0x07, 0x57, 0x27, 0xf2, 0x1d, 0x9b, 0xbc, 0x94, 0x43, 0x03,\n 0xf8, 0x11, 0xc7, 0xf6, 0x90, 0xef, 0x3e, 0xe7, 0x06, 0xc3, 0xd5, 0x2f, 0xc8, 0x66, 0x1e, 0xd7,\n 0x08, 0xe8, 0xea, 0xde, 0x80, 0x52, 0xee, 0xf7, 0x84, 0xaa, 0x72, 0xac, 0x35, 0x4d, 0x6a, 0x2a,\n 0x96, 0x1a, 0xd2, 0x71, 0x5a, 0x15, 0x49, 0x74, 0x4b, 0x9f, 0xd0, 0x5e, 0x04, 0x18, 0xa4, 0xec,\n 0xc2, 0xe0, 0x41, 0x6e, 0x0f, 0x51, 0xcb, 0xcc, 0x24, 0x91, 0xaf, 0x50, 0xa1, 0xf4, 0x70, 0x39,\n 0x99, 0x7c, 0x3a, 0x85, 0x23, 0xb8, 0xb4, 0x7a, 0xfc, 0x02, 0x36, 0x5b, 0x25, 0x55, 0x97, 0x31,\n 0x2d, 0x5d, 0xfa, 0x98, 0xe3, 0x8a, 0x92, 0xae, 0x05, 0xdf, 0x29, 0x10, 0x67, 0x6c, 0xba, 0xc9,\n 0xd3, 0x00, 0xe6, 0xcf, 0xe1, 0x9e, 0xa8, 0x2c, 0x63, 0x16, 0x01, 0x3f, 0x58, 0xe2, 0x89, 0xa9,\n 0x0d, 0x38, 0x34, 0x1b, 0xab, 0x33, 0xff, 0xb0, 0xbb, 0x48, 0x0c, 0x5f, 0xb9, 0xb1, 0xcd, 0x2e,\n 0xc5, 0xf3, 0xdb, 0x47, 0xe5, 0xa5, 0x9c, 0x77, 0x0a, 0xa6, 0x20, 0x68, 0xfe, 0x7f, 0xc1, 0xad\n];\n\nvar s = [1, 2, 3, 5];\n\n/**\n * Rotate a word left by given number of bits.\n *\n * Bits that are shifted out on the left are put back in on the right\n * hand side.\n *\n * @param word The word to shift left.\n * @param bits The number of bits to shift by.\n * @return The rotated word.\n */\nvar rol = function(word, bits) {\n return ((word << bits) & 0xffff) | ((word & 0xffff) >> (16 - bits));\n};\n\n/**\n * Rotate a word right by given number of bits.\n *\n * Bits that are shifted out on the right are put back in on the left\n * hand side.\n *\n * @param word The word to shift right.\n * @param bits The number of bits to shift by.\n * @return The rotated word.\n */\nvar ror = function(word, bits) {\n return ((word & 0xffff) >> bits) | ((word << (16 - bits)) & 0xffff);\n};\n\n/* RC2 API */\nmodule.exports = forge.rc2 = forge.rc2 || {};\n\n/**\n * Perform RC2 key expansion as per RFC #2268, section 2.\n *\n * @param key variable-length user key (between 1 and 128 bytes)\n * @param effKeyBits number of effective key bits (default: 128)\n * @return the expanded RC2 key (ByteBuffer of 128 bytes)\n */\nforge.rc2.expandKey = function(key, effKeyBits) {\n if(typeof key === 'string') {\n key = forge.util.createBuffer(key);\n }\n effKeyBits = effKeyBits || 128;\n\n /* introduce variables that match the names used in RFC #2268 */\n var L = key;\n var T = key.length();\n var T1 = effKeyBits;\n var T8 = Math.ceil(T1 / 8);\n var TM = 0xff >> (T1 & 0x07);\n var i;\n\n for(i = T; i < 128; i++) {\n L.putByte(piTable[(L.at(i - 1) + L.at(i - T)) & 0xff]);\n }\n\n L.setAt(128 - T8, piTable[L.at(128 - T8) & TM]);\n\n for(i = 127 - T8; i >= 0; i--) {\n L.setAt(i, piTable[L.at(i + 1) ^ L.at(i + T8)]);\n }\n\n return L;\n};\n\n/**\n * Creates a RC2 cipher object.\n *\n * @param key the symmetric key to use (as base for key generation).\n * @param bits the number of effective key bits.\n * @param encrypt false for decryption, true for encryption.\n *\n * @return the cipher.\n */\nvar createCipher = function(key, bits, encrypt) {\n var _finish = false, _input = null, _output = null, _iv = null;\n var mixRound, mashRound;\n var i, j, K = [];\n\n /* Expand key and fill into K[] Array */\n key = forge.rc2.expandKey(key, bits);\n for(i = 0; i < 64; i++) {\n K.push(key.getInt16Le());\n }\n\n if(encrypt) {\n /**\n * Perform one mixing round \"in place\".\n *\n * @param R Array of four words to perform mixing on.\n */\n mixRound = function(R) {\n for(i = 0; i < 4; i++) {\n R[i] += K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) +\n ((~R[(i + 3) % 4]) & R[(i + 1) % 4]);\n R[i] = rol(R[i], s[i]);\n j++;\n }\n };\n\n /**\n * Perform one mashing round \"in place\".\n *\n * @param R Array of four words to perform mashing on.\n */\n mashRound = function(R) {\n for(i = 0; i < 4; i++) {\n R[i] += K[R[(i + 3) % 4] & 63];\n }\n };\n } else {\n /**\n * Perform one r-mixing round \"in place\".\n *\n * @param R Array of four words to perform mixing on.\n */\n mixRound = function(R) {\n for(i = 3; i >= 0; i--) {\n R[i] = ror(R[i], s[i]);\n R[i] -= K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) +\n ((~R[(i + 3) % 4]) & R[(i + 1) % 4]);\n j--;\n }\n };\n\n /**\n * Perform one r-mashing round \"in place\".\n *\n * @param R Array of four words to perform mashing on.\n */\n mashRound = function(R) {\n for(i = 3; i >= 0; i--) {\n R[i] -= K[R[(i + 3) % 4] & 63];\n }\n };\n }\n\n /**\n * Run the specified cipher execution plan.\n *\n * This function takes four words from the input buffer, applies the IV on\n * it (if requested) and runs the provided execution plan.\n *\n * The plan must be put together in form of a array of arrays. Where the\n * outer one is simply a list of steps to perform and the inner one needs\n * to have two elements: the first one telling how many rounds to perform,\n * the second one telling what to do (i.e. the function to call).\n *\n * @param {Array} plan The plan to execute.\n */\n var runPlan = function(plan) {\n var R = [];\n\n /* Get data from input buffer and fill the four words into R */\n for(i = 0; i < 4; i++) {\n var val = _input.getInt16Le();\n\n if(_iv !== null) {\n if(encrypt) {\n /* We're encrypting, apply the IV first. */\n val ^= _iv.getInt16Le();\n } else {\n /* We're decryption, keep cipher text for next block. */\n _iv.putInt16Le(val);\n }\n }\n\n R.push(val & 0xffff);\n }\n\n /* Reset global \"j\" variable as per spec. */\n j = encrypt ? 0 : 63;\n\n /* Run execution plan. */\n for(var ptr = 0; ptr < plan.length; ptr++) {\n for(var ctr = 0; ctr < plan[ptr][0]; ctr++) {\n plan[ptr][1](R);\n }\n }\n\n /* Write back result to output buffer. */\n for(i = 0; i < 4; i++) {\n if(_iv !== null) {\n if(encrypt) {\n /* We're encrypting in CBC-mode, feed back encrypted bytes into\n IV buffer to carry it forward to next block. */\n _iv.putInt16Le(R[i]);\n } else {\n R[i] ^= _iv.getInt16Le();\n }\n }\n\n _output.putInt16Le(R[i]);\n }\n };\n\n /* Create cipher object */\n var cipher = null;\n cipher = {\n /**\n * Starts or restarts the encryption or decryption process, whichever\n * was previously configured.\n *\n * To use the cipher in CBC mode, iv may be given either as a string\n * of bytes, or as a byte buffer. For ECB mode, give null as iv.\n *\n * @param iv the initialization vector to use, null for ECB mode.\n * @param output the output the buffer to write to, null to create one.\n */\n start: function(iv, output) {\n if(iv) {\n /* CBC mode */\n if(typeof iv === 'string') {\n iv = forge.util.createBuffer(iv);\n }\n }\n\n _finish = false;\n _input = forge.util.createBuffer();\n _output = output || new forge.util.createBuffer();\n _iv = iv;\n\n cipher.output = _output;\n },\n\n /**\n * Updates the next block.\n *\n * @param input the buffer to read from.\n */\n update: function(input) {\n if(!_finish) {\n // not finishing, so fill the input buffer with more input\n _input.putBuffer(input);\n }\n\n while(_input.length() >= 8) {\n runPlan([\n [ 5, mixRound ],\n [ 1, mashRound ],\n [ 6, mixRound ],\n [ 1, mashRound ],\n [ 5, mixRound ]\n ]);\n }\n },\n\n /**\n * Finishes encrypting or decrypting.\n *\n * @param pad a padding function to use, null for PKCS#7 padding,\n * signature(blockSize, buffer, decrypt).\n *\n * @return true if successful, false on error.\n */\n finish: function(pad) {\n var rval = true;\n\n if(encrypt) {\n if(pad) {\n rval = pad(8, _input, !encrypt);\n } else {\n // add PKCS#7 padding to block (each pad byte is the\n // value of the number of pad bytes)\n var padding = (_input.length() === 8) ? 8 : (8 - _input.length());\n _input.fillWithByte(padding, padding);\n }\n }\n\n if(rval) {\n // do final update\n _finish = true;\n cipher.update();\n }\n\n if(!encrypt) {\n // check for error: input data not a multiple of block size\n rval = (_input.length() === 0);\n if(rval) {\n if(pad) {\n rval = pad(8, _output, !encrypt);\n } else {\n // ensure padding byte count is valid\n var len = _output.length();\n var count = _output.at(len - 1);\n\n if(count > len) {\n rval = false;\n } else {\n // trim off padding bytes\n _output.truncate(count);\n }\n }\n }\n }\n\n return rval;\n }\n };\n\n return cipher;\n};\n\n/**\n * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the\n * given symmetric key. The output will be stored in the 'output' member\n * of the returned cipher.\n *\n * The key and iv may be given as a string of bytes or a byte buffer.\n * The cipher is initialized to use 128 effective key bits.\n *\n * @param key the symmetric key to use.\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n *\n * @return the cipher.\n */\nforge.rc2.startEncrypting = function(key, iv, output) {\n var cipher = forge.rc2.createEncryptionCipher(key, 128);\n cipher.start(iv, output);\n return cipher;\n};\n\n/**\n * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the\n * given symmetric key.\n *\n * The key may be given as a string of bytes or a byte buffer.\n *\n * To start encrypting call start() on the cipher with an iv and optional\n * output buffer.\n *\n * @param key the symmetric key to use.\n *\n * @return the cipher.\n */\nforge.rc2.createEncryptionCipher = function(key, bits) {\n return createCipher(key, bits, true);\n};\n\n/**\n * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the\n * given symmetric key. The output will be stored in the 'output' member\n * of the returned cipher.\n *\n * The key and iv may be given as a string of bytes or a byte buffer.\n * The cipher is initialized to use 128 effective key bits.\n *\n * @param key the symmetric key to use.\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n *\n * @return the cipher.\n */\nforge.rc2.startDecrypting = function(key, iv, output) {\n var cipher = forge.rc2.createDecryptionCipher(key, 128);\n cipher.start(iv, output);\n return cipher;\n};\n\n/**\n * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the\n * given symmetric key.\n *\n * The key may be given as a string of bytes or a byte buffer.\n *\n * To start decrypting call start() on the cipher with an iv and optional\n * output buffer.\n *\n * @param key the symmetric key to use.\n *\n * @return the cipher.\n */\nforge.rc2.createDecryptionCipher = function(key, bits) {\n return createCipher(key, bits, false);\n};\n","/**\n * Javascript implementation of basic RSA algorithms.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n *\n * The only algorithm currently supported for PKI is RSA.\n *\n * An RSA key is often stored in ASN.1 DER format. The SubjectPublicKeyInfo\n * ASN.1 structure is composed of an algorithm of type AlgorithmIdentifier\n * and a subjectPublicKey of type bit string.\n *\n * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters\n * for the algorithm, if any. In the case of RSA, there aren't any.\n *\n * SubjectPublicKeyInfo ::= SEQUENCE {\n * algorithm AlgorithmIdentifier,\n * subjectPublicKey BIT STRING\n * }\n *\n * AlgorithmIdentifer ::= SEQUENCE {\n * algorithm OBJECT IDENTIFIER,\n * parameters ANY DEFINED BY algorithm OPTIONAL\n * }\n *\n * For an RSA public key, the subjectPublicKey is:\n *\n * RSAPublicKey ::= SEQUENCE {\n * modulus INTEGER, -- n\n * publicExponent INTEGER -- e\n * }\n *\n * PrivateKeyInfo ::= SEQUENCE {\n * version Version,\n * privateKeyAlgorithm PrivateKeyAlgorithmIdentifier,\n * privateKey PrivateKey,\n * attributes [0] IMPLICIT Attributes OPTIONAL\n * }\n *\n * Version ::= INTEGER\n * PrivateKeyAlgorithmIdentifier ::= AlgorithmIdentifier\n * PrivateKey ::= OCTET STRING\n * Attributes ::= SET OF Attribute\n *\n * An RSA private key as the following structure:\n *\n * RSAPrivateKey ::= SEQUENCE {\n * version Version,\n * modulus INTEGER, -- n\n * publicExponent INTEGER, -- e\n * privateExponent INTEGER, -- d\n * prime1 INTEGER, -- p\n * prime2 INTEGER, -- q\n * exponent1 INTEGER, -- d mod (p-1)\n * exponent2 INTEGER, -- d mod (q-1)\n * coefficient INTEGER -- (inverse of q) mod p\n * }\n *\n * Version ::= INTEGER\n *\n * The OID for the RSA key algorithm is: 1.2.840.113549.1.1.1\n */\nvar forge = require('./forge');\nrequire('./asn1');\nrequire('./jsbn');\nrequire('./oids');\nrequire('./pkcs1');\nrequire('./prime');\nrequire('./random');\nrequire('./util');\n\nif(typeof BigInteger === 'undefined') {\n var BigInteger = forge.jsbn.BigInteger;\n}\n\nvar _crypto = forge.util.isNodejs ? require('crypto') : null;\n\n// shortcut for asn.1 API\nvar asn1 = forge.asn1;\n\n// shortcut for util API\nvar util = forge.util;\n\n/*\n * RSA encryption and decryption, see RFC 2313.\n */\nforge.pki = forge.pki || {};\nmodule.exports = forge.pki.rsa = forge.rsa = forge.rsa || {};\nvar pki = forge.pki;\n\n// for finding primes, which are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29\nvar GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2];\n\n// validator for a PrivateKeyInfo structure\nvar privateKeyValidator = {\n // PrivateKeyInfo\n name: 'PrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // Version (INTEGER)\n name: 'PrivateKeyInfo.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyVersion'\n }, {\n // privateKeyAlgorithm\n name: 'PrivateKeyInfo.privateKeyAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'privateKeyOid'\n }]\n }, {\n // PrivateKey\n name: 'PrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'privateKey'\n }]\n};\n\n// validator for an RSA private key\nvar rsaPrivateKeyValidator = {\n // RSAPrivateKey\n name: 'RSAPrivateKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // Version (INTEGER)\n name: 'RSAPrivateKey.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyVersion'\n }, {\n // modulus (n)\n name: 'RSAPrivateKey.modulus',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyModulus'\n }, {\n // publicExponent (e)\n name: 'RSAPrivateKey.publicExponent',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPublicExponent'\n }, {\n // privateExponent (d)\n name: 'RSAPrivateKey.privateExponent',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPrivateExponent'\n }, {\n // prime1 (p)\n name: 'RSAPrivateKey.prime1',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPrime1'\n }, {\n // prime2 (q)\n name: 'RSAPrivateKey.prime2',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPrime2'\n }, {\n // exponent1 (d mod (p-1))\n name: 'RSAPrivateKey.exponent1',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyExponent1'\n }, {\n // exponent2 (d mod (q-1))\n name: 'RSAPrivateKey.exponent2',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyExponent2'\n }, {\n // coefficient ((inverse of q) mod p)\n name: 'RSAPrivateKey.coefficient',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyCoefficient'\n }]\n};\n\n// validator for an RSA public key\nvar rsaPublicKeyValidator = {\n // RSAPublicKey\n name: 'RSAPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // modulus (n)\n name: 'RSAPublicKey.modulus',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'publicKeyModulus'\n }, {\n // publicExponent (e)\n name: 'RSAPublicKey.exponent',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'publicKeyExponent'\n }]\n};\n\n// validator for an SubjectPublicKeyInfo structure\n// Note: Currently only works with an RSA public key\nvar publicKeyValidator = forge.pki.rsa.publicKeyValidator = {\n name: 'SubjectPublicKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'subjectPublicKeyInfo',\n value: [{\n name: 'SubjectPublicKeyInfo.AlgorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'publicKeyOid'\n }]\n }, {\n // subjectPublicKey\n name: 'SubjectPublicKeyInfo.subjectPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n value: [{\n // RSAPublicKey\n name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n optional: true,\n captureAsn1: 'rsaPublicKey'\n }]\n }]\n};\n\n// validator for a DigestInfo structure\nvar digestInfoValidator = {\n name: 'DigestInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'DigestInfo.DigestAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'DigestInfo.DigestAlgorithm.algorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'algorithmIdentifier'\n }, {\n // NULL paramters\n name: 'DigestInfo.DigestAlgorithm.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.NULL,\n // captured only to check existence for md2 and md5\n capture: 'parameters',\n optional: true,\n constructed: false\n }]\n }, {\n // digest\n name: 'DigestInfo.digest',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'digest'\n }]\n};\n\n/**\n * Wrap digest in DigestInfo object.\n *\n * This function implements EMSA-PKCS1-v1_5-ENCODE as per RFC 3447.\n *\n * DigestInfo ::= SEQUENCE {\n * digestAlgorithm DigestAlgorithmIdentifier,\n * digest Digest\n * }\n *\n * DigestAlgorithmIdentifier ::= AlgorithmIdentifier\n * Digest ::= OCTET STRING\n *\n * @param md the message digest object with the hash to sign.\n *\n * @return the encoded message (ready for RSA encrytion)\n */\nvar emsaPkcs1v15encode = function(md) {\n // get the oid for the algorithm\n var oid;\n if(md.algorithm in pki.oids) {\n oid = pki.oids[md.algorithm];\n } else {\n var error = new Error('Unknown message digest algorithm.');\n error.algorithm = md.algorithm;\n throw error;\n }\n var oidBytes = asn1.oidToDer(oid).getBytes();\n\n // create the digest info\n var digestInfo = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var digestAlgorithm = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n digestAlgorithm.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OID, false, oidBytes));\n digestAlgorithm.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.NULL, false, ''));\n var digest = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING,\n false, md.digest().getBytes());\n digestInfo.value.push(digestAlgorithm);\n digestInfo.value.push(digest);\n\n // encode digest info\n return asn1.toDer(digestInfo).getBytes();\n};\n\n/**\n * Performs x^c mod n (RSA encryption or decryption operation).\n *\n * @param x the number to raise and mod.\n * @param key the key to use.\n * @param pub true if the key is public, false if private.\n *\n * @return the result of x^c mod n.\n */\nvar _modPow = function(x, key, pub) {\n if(pub) {\n return x.modPow(key.e, key.n);\n }\n\n if(!key.p || !key.q) {\n // allow calculation without CRT params (slow)\n return x.modPow(key.d, key.n);\n }\n\n // pre-compute dP, dQ, and qInv if necessary\n if(!key.dP) {\n key.dP = key.d.mod(key.p.subtract(BigInteger.ONE));\n }\n if(!key.dQ) {\n key.dQ = key.d.mod(key.q.subtract(BigInteger.ONE));\n }\n if(!key.qInv) {\n key.qInv = key.q.modInverse(key.p);\n }\n\n /* Chinese remainder theorem (CRT) states:\n\n Suppose n1, n2, ..., nk are positive integers which are pairwise\n coprime (n1 and n2 have no common factors other than 1). For any\n integers x1, x2, ..., xk there exists an integer x solving the\n system of simultaneous congruences (where ~= means modularly\n congruent so a ~= b mod n means a mod n = b mod n):\n\n x ~= x1 mod n1\n x ~= x2 mod n2\n ...\n x ~= xk mod nk\n\n This system of congruences has a single simultaneous solution x\n between 0 and n - 1. Furthermore, each xk solution and x itself\n is congruent modulo the product n = n1*n2*...*nk.\n So x1 mod n = x2 mod n = xk mod n = x mod n.\n\n The single simultaneous solution x can be solved with the following\n equation:\n\n x = sum(xi*ri*si) mod n where ri = n/ni and si = ri^-1 mod ni.\n\n Where x is less than n, xi = x mod ni.\n\n For RSA we are only concerned with k = 2. The modulus n = pq, where\n p and q are coprime. The RSA decryption algorithm is:\n\n y = x^d mod n\n\n Given the above:\n\n x1 = x^d mod p\n r1 = n/p = q\n s1 = q^-1 mod p\n x2 = x^d mod q\n r2 = n/q = p\n s2 = p^-1 mod q\n\n So y = (x1r1s1 + x2r2s2) mod n\n = ((x^d mod p)q(q^-1 mod p) + (x^d mod q)p(p^-1 mod q)) mod n\n\n According to Fermat's Little Theorem, if the modulus P is prime,\n for any integer A not evenly divisible by P, A^(P-1) ~= 1 mod P.\n Since A is not divisible by P it follows that if:\n N ~= M mod (P - 1), then A^N mod P = A^M mod P. Therefore:\n\n A^N mod P = A^(M mod (P - 1)) mod P. (The latter takes less effort\n to calculate). In order to calculate x^d mod p more quickly the\n exponent d mod (p - 1) is stored in the RSA private key (the same\n is done for x^d mod q). These values are referred to as dP and dQ\n respectively. Therefore we now have:\n\n y = ((x^dP mod p)q(q^-1 mod p) + (x^dQ mod q)p(p^-1 mod q)) mod n\n\n Since we'll be reducing x^dP by modulo p (same for q) we can also\n reduce x by p (and q respectively) before hand. Therefore, let\n\n xp = ((x mod p)^dP mod p), and\n xq = ((x mod q)^dQ mod q), yielding:\n\n y = (xp*q*(q^-1 mod p) + xq*p*(p^-1 mod q)) mod n\n\n This can be further reduced to a simple algorithm that only\n requires 1 inverse (the q inverse is used) to be used and stored.\n The algorithm is called Garner's algorithm. If qInv is the\n inverse of q, we simply calculate:\n\n y = (qInv*(xp - xq) mod p) * q + xq\n\n However, there are two further complications. First, we need to\n ensure that xp > xq to prevent signed BigIntegers from being used\n so we add p until this is true (since we will be mod'ing with\n p anyway). Then, there is a known timing attack on algorithms\n using the CRT. To mitigate this risk, \"cryptographic blinding\"\n should be used. This requires simply generating a random number r\n between 0 and n-1 and its inverse and multiplying x by r^e before\n calculating y and then multiplying y by r^-1 afterwards. Note that\n r must be coprime with n (gcd(r, n) === 1) in order to have an\n inverse.\n */\n\n // cryptographic blinding\n var r;\n do {\n r = new BigInteger(\n forge.util.bytesToHex(forge.random.getBytes(key.n.bitLength() / 8)),\n 16);\n } while(r.compareTo(key.n) >= 0 || !r.gcd(key.n).equals(BigInteger.ONE));\n x = x.multiply(r.modPow(key.e, key.n)).mod(key.n);\n\n // calculate xp and xq\n var xp = x.mod(key.p).modPow(key.dP, key.p);\n var xq = x.mod(key.q).modPow(key.dQ, key.q);\n\n // xp must be larger than xq to avoid signed bit usage\n while(xp.compareTo(xq) < 0) {\n xp = xp.add(key.p);\n }\n\n // do last step\n var y = xp.subtract(xq)\n .multiply(key.qInv).mod(key.p)\n .multiply(key.q).add(xq);\n\n // remove effect of random for cryptographic blinding\n y = y.multiply(r.modInverse(key.n)).mod(key.n);\n\n return y;\n};\n\n/**\n * NOTE: THIS METHOD IS DEPRECATED, use 'sign' on a private key object or\n * 'encrypt' on a public key object instead.\n *\n * Performs RSA encryption.\n *\n * The parameter bt controls whether to put padding bytes before the\n * message passed in. Set bt to either true or false to disable padding\n * completely (in order to handle e.g. EMSA-PSS encoding seperately before),\n * signaling whether the encryption operation is a public key operation\n * (i.e. encrypting data) or not, i.e. private key operation (data signing).\n *\n * For PKCS#1 v1.5 padding pass in the block type to use, i.e. either 0x01\n * (for signing) or 0x02 (for encryption). The key operation mode (private\n * or public) is derived from this flag in that case).\n *\n * @param m the message to encrypt as a byte string.\n * @param key the RSA key to use.\n * @param bt for PKCS#1 v1.5 padding, the block type to use\n * (0x01 for private key, 0x02 for public),\n * to disable padding: true = public key, false = private key.\n *\n * @return the encrypted bytes as a string.\n */\npki.rsa.encrypt = function(m, key, bt) {\n var pub = bt;\n var eb;\n\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n if(bt !== false && bt !== true) {\n // legacy, default to PKCS#1 v1.5 padding\n pub = (bt === 0x02);\n eb = _encodePkcs1_v1_5(m, key, bt);\n } else {\n eb = forge.util.createBuffer();\n eb.putBytes(m);\n }\n\n // load encryption block as big integer 'x'\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var x = new BigInteger(eb.toHex(), 16);\n\n // do RSA encryption\n var y = _modPow(x, key, pub);\n\n // convert y into the encrypted data byte string, if y is shorter in\n // bytes than k, then prepend zero bytes to fill up ed\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var yhex = y.toString(16);\n var ed = forge.util.createBuffer();\n var zeros = k - Math.ceil(yhex.length / 2);\n while(zeros > 0) {\n ed.putByte(0x00);\n --zeros;\n }\n ed.putBytes(forge.util.hexToBytes(yhex));\n return ed.getBytes();\n};\n\n/**\n * NOTE: THIS METHOD IS DEPRECATED, use 'decrypt' on a private key object or\n * 'verify' on a public key object instead.\n *\n * Performs RSA decryption.\n *\n * The parameter ml controls whether to apply PKCS#1 v1.5 padding\n * or not. Set ml = false to disable padding removal completely\n * (in order to handle e.g. EMSA-PSS later on) and simply pass back\n * the RSA encryption block.\n *\n * @param ed the encrypted data to decrypt in as a byte string.\n * @param key the RSA key to use.\n * @param pub true for a public key operation, false for private.\n * @param ml the message length, if known, false to disable padding.\n *\n * @return the decrypted message as a byte string.\n */\npki.rsa.decrypt = function(ed, key, pub, ml) {\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n // error if the length of the encrypted data ED is not k\n if(ed.length !== k) {\n var error = new Error('Encrypted message length is invalid.');\n error.length = ed.length;\n error.expected = k;\n throw error;\n }\n\n // convert encrypted data into a big integer\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var y = new BigInteger(forge.util.createBuffer(ed).toHex(), 16);\n\n // y must be less than the modulus or it wasn't the result of\n // a previous mod operation (encryption) using that modulus\n if(y.compareTo(key.n) >= 0) {\n throw new Error('Encrypted message is invalid.');\n }\n\n // do RSA decryption\n var x = _modPow(y, key, pub);\n\n // create the encryption block, if x is shorter in bytes than k, then\n // prepend zero bytes to fill up eb\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var xhex = x.toString(16);\n var eb = forge.util.createBuffer();\n var zeros = k - Math.ceil(xhex.length / 2);\n while(zeros > 0) {\n eb.putByte(0x00);\n --zeros;\n }\n eb.putBytes(forge.util.hexToBytes(xhex));\n\n if(ml !== false) {\n // legacy, default to PKCS#1 v1.5 padding\n return _decodePkcs1_v1_5(eb.getBytes(), key, pub);\n }\n\n // return message\n return eb.getBytes();\n};\n\n/**\n * Creates an RSA key-pair generation state object. It is used to allow\n * key-generation to be performed in steps. It also allows for a UI to\n * display progress updates.\n *\n * @param bits the size for the private key in bits, defaults to 2048.\n * @param e the public exponent to use, defaults to 65537 (0x10001).\n * @param [options] the options to use.\n * prng a custom crypto-secure pseudo-random number generator to use,\n * that must define \"getBytesSync\".\n * algorithm the algorithm to use (default: 'PRIMEINC').\n *\n * @return the state object to use to generate the key-pair.\n */\npki.rsa.createKeyPairGenerationState = function(bits, e, options) {\n // TODO: migrate step-based prime generation code to forge.prime\n\n // set default bits\n if(typeof(bits) === 'string') {\n bits = parseInt(bits, 10);\n }\n bits = bits || 2048;\n\n // create prng with api that matches BigInteger secure random\n options = options || {};\n var prng = options.prng || forge.random;\n var rng = {\n // x is an array to fill with bytes\n nextBytes: function(x) {\n var b = prng.getBytesSync(x.length);\n for(var i = 0; i < x.length; ++i) {\n x[i] = b.charCodeAt(i);\n }\n }\n };\n\n var algorithm = options.algorithm || 'PRIMEINC';\n\n // create PRIMEINC algorithm state\n var rval;\n if(algorithm === 'PRIMEINC') {\n rval = {\n algorithm: algorithm,\n state: 0,\n bits: bits,\n rng: rng,\n eInt: e || 65537,\n e: new BigInteger(null),\n p: null,\n q: null,\n qBits: bits >> 1,\n pBits: bits - (bits >> 1),\n pqState: 0,\n num: null,\n keys: null\n };\n rval.e.fromInt(rval.eInt);\n } else {\n throw new Error('Invalid key generation algorithm: ' + algorithm);\n }\n\n return rval;\n};\n\n/**\n * Attempts to runs the key-generation algorithm for at most n seconds\n * (approximately) using the given state. When key-generation has completed,\n * the keys will be stored in state.keys.\n *\n * To use this function to update a UI while generating a key or to prevent\n * causing browser lockups/warnings, set \"n\" to a value other than 0. A\n * simple pattern for generating a key and showing a progress indicator is:\n *\n * var state = pki.rsa.createKeyPairGenerationState(2048);\n * var step = function() {\n * // step key-generation, run algorithm for 100 ms, repeat\n * if(!forge.pki.rsa.stepKeyPairGenerationState(state, 100)) {\n * setTimeout(step, 1);\n * } else {\n * // key-generation complete\n * // TODO: turn off progress indicator here\n * // TODO: use the generated key-pair in \"state.keys\"\n * }\n * };\n * // TODO: turn on progress indicator here\n * setTimeout(step, 0);\n *\n * @param state the state to use.\n * @param n the maximum number of milliseconds to run the algorithm for, 0\n * to run the algorithm to completion.\n *\n * @return true if the key-generation completed, false if not.\n */\npki.rsa.stepKeyPairGenerationState = function(state, n) {\n // set default algorithm if not set\n if(!('algorithm' in state)) {\n state.algorithm = 'PRIMEINC';\n }\n\n // TODO: migrate step-based prime generation code to forge.prime\n // TODO: abstract as PRIMEINC algorithm\n\n // do key generation (based on Tom Wu's rsa.js, see jsbn.js license)\n // with some minor optimizations and designed to run in steps\n\n // local state vars\n var THIRTY = new BigInteger(null);\n THIRTY.fromInt(30);\n var deltaIdx = 0;\n var op_or = function(x, y) {return x | y;};\n\n // keep stepping until time limit is reached or done\n var t1 = +new Date();\n var t2;\n var total = 0;\n while(state.keys === null && (n <= 0 || total < n)) {\n // generate p or q\n if(state.state === 0) {\n /* Note: All primes are of the form:\n\n 30k+i, for i < 30 and gcd(30, i)=1, where there are 8 values for i\n\n When we generate a random number, we always align it at 30k + 1. Each\n time the number is determined not to be prime we add to get to the\n next 'i', eg: if the number was at 30k + 1 we add 6. */\n var bits = (state.p === null) ? state.pBits : state.qBits;\n var bits1 = bits - 1;\n\n // get a random number\n if(state.pqState === 0) {\n state.num = new BigInteger(bits, state.rng);\n // force MSB set\n if(!state.num.testBit(bits1)) {\n state.num.bitwiseTo(\n BigInteger.ONE.shiftLeft(bits1), op_or, state.num);\n }\n // align number on 30k+1 boundary\n state.num.dAddOffset(31 - state.num.mod(THIRTY).byteValue(), 0);\n deltaIdx = 0;\n\n ++state.pqState;\n } else if(state.pqState === 1) {\n // try to make the number a prime\n if(state.num.bitLength() > bits) {\n // overflow, try again\n state.pqState = 0;\n // do primality test\n } else if(state.num.isProbablePrime(\n _getMillerRabinTests(state.num.bitLength()))) {\n ++state.pqState;\n } else {\n // get next potential prime\n state.num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0);\n }\n } else if(state.pqState === 2) {\n // ensure number is coprime with e\n state.pqState =\n (state.num.subtract(BigInteger.ONE).gcd(state.e)\n .compareTo(BigInteger.ONE) === 0) ? 3 : 0;\n } else if(state.pqState === 3) {\n // store p or q\n state.pqState = 0;\n if(state.p === null) {\n state.p = state.num;\n } else {\n state.q = state.num;\n }\n\n // advance state if both p and q are ready\n if(state.p !== null && state.q !== null) {\n ++state.state;\n }\n state.num = null;\n }\n } else if(state.state === 1) {\n // ensure p is larger than q (swap them if not)\n if(state.p.compareTo(state.q) < 0) {\n state.num = state.p;\n state.p = state.q;\n state.q = state.num;\n }\n ++state.state;\n } else if(state.state === 2) {\n // compute phi: (p - 1)(q - 1) (Euler's totient function)\n state.p1 = state.p.subtract(BigInteger.ONE);\n state.q1 = state.q.subtract(BigInteger.ONE);\n state.phi = state.p1.multiply(state.q1);\n ++state.state;\n } else if(state.state === 3) {\n // ensure e and phi are coprime\n if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) === 0) {\n // phi and e are coprime, advance\n ++state.state;\n } else {\n // phi and e aren't coprime, so generate a new p and q\n state.p = null;\n state.q = null;\n state.state = 0;\n }\n } else if(state.state === 4) {\n // create n, ensure n is has the right number of bits\n state.n = state.p.multiply(state.q);\n\n // ensure n is right number of bits\n if(state.n.bitLength() === state.bits) {\n // success, advance\n ++state.state;\n } else {\n // failed, get new q\n state.q = null;\n state.state = 0;\n }\n } else if(state.state === 5) {\n // set keys\n var d = state.e.modInverse(state.phi);\n state.keys = {\n privateKey: pki.rsa.setPrivateKey(\n state.n, state.e, d, state.p, state.q,\n d.mod(state.p1), d.mod(state.q1),\n state.q.modInverse(state.p)),\n publicKey: pki.rsa.setPublicKey(state.n, state.e)\n };\n }\n\n // update timing\n t2 = +new Date();\n total += t2 - t1;\n t1 = t2;\n }\n\n return state.keys !== null;\n};\n\n/**\n * Generates an RSA public-private key pair in a single call.\n *\n * To generate a key-pair in steps (to allow for progress updates and to\n * prevent blocking or warnings in slow browsers) then use the key-pair\n * generation state functions.\n *\n * To generate a key-pair asynchronously (either through web-workers, if\n * available, or by breaking up the work on the main thread), pass a\n * callback function.\n *\n * @param [bits] the size for the private key in bits, defaults to 2048.\n * @param [e] the public exponent to use, defaults to 65537.\n * @param [options] options for key-pair generation, if given then 'bits'\n * and 'e' must *not* be given:\n * bits the size for the private key in bits, (default: 2048).\n * e the public exponent to use, (default: 65537 (0x10001)).\n * workerScript the worker script URL.\n * workers the number of web workers (if supported) to use,\n * (default: 2).\n * workLoad the size of the work load, ie: number of possible prime\n * numbers for each web worker to check per work assignment,\n * (default: 100).\n * prng a custom crypto-secure pseudo-random number generator to use,\n * that must define \"getBytesSync\". Disables use of native APIs.\n * algorithm the algorithm to use (default: 'PRIMEINC').\n * @param [callback(err, keypair)] called once the operation completes.\n *\n * @return an object with privateKey and publicKey properties.\n */\npki.rsa.generateKeyPair = function(bits, e, options, callback) {\n // (bits), (options), (callback)\n if(arguments.length === 1) {\n if(typeof bits === 'object') {\n options = bits;\n bits = undefined;\n } else if(typeof bits === 'function') {\n callback = bits;\n bits = undefined;\n }\n } else if(arguments.length === 2) {\n // (bits, e), (bits, options), (bits, callback), (options, callback)\n if(typeof bits === 'number') {\n if(typeof e === 'function') {\n callback = e;\n e = undefined;\n } else if(typeof e !== 'number') {\n options = e;\n e = undefined;\n }\n } else {\n options = bits;\n callback = e;\n bits = undefined;\n e = undefined;\n }\n } else if(arguments.length === 3) {\n // (bits, e, options), (bits, e, callback), (bits, options, callback)\n if(typeof e === 'number') {\n if(typeof options === 'function') {\n callback = options;\n options = undefined;\n }\n } else {\n callback = options;\n options = e;\n e = undefined;\n }\n }\n options = options || {};\n if(bits === undefined) {\n bits = options.bits || 2048;\n }\n if(e === undefined) {\n e = options.e || 0x10001;\n }\n\n // use native code if permitted, available, and parameters are acceptable\n if(!forge.options.usePureJavaScript && !options.prng &&\n bits >= 256 && bits <= 16384 && (e === 0x10001 || e === 3)) {\n if(callback) {\n // try native async\n if(_detectNodeCrypto('generateKeyPair')) {\n return _crypto.generateKeyPair('rsa', {\n modulusLength: bits,\n publicExponent: e,\n publicKeyEncoding: {\n type: 'spki',\n format: 'pem'\n },\n privateKeyEncoding: {\n type: 'pkcs8',\n format: 'pem'\n }\n }, function(err, pub, priv) {\n if(err) {\n return callback(err);\n }\n callback(null, {\n privateKey: pki.privateKeyFromPem(priv),\n publicKey: pki.publicKeyFromPem(pub)\n });\n });\n }\n if(_detectSubtleCrypto('generateKey') &&\n _detectSubtleCrypto('exportKey')) {\n // use standard native generateKey\n return util.globalScope.crypto.subtle.generateKey({\n name: 'RSASSA-PKCS1-v1_5',\n modulusLength: bits,\n publicExponent: _intToUint8Array(e),\n hash: {name: 'SHA-256'}\n }, true /* key can be exported*/, ['sign', 'verify'])\n .then(function(pair) {\n return util.globalScope.crypto.subtle.exportKey(\n 'pkcs8', pair.privateKey);\n // avoiding catch(function(err) {...}) to support IE <= 8\n }).then(undefined, function(err) {\n callback(err);\n }).then(function(pkcs8) {\n if(pkcs8) {\n var privateKey = pki.privateKeyFromAsn1(\n asn1.fromDer(forge.util.createBuffer(pkcs8)));\n callback(null, {\n privateKey: privateKey,\n publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e)\n });\n }\n });\n }\n if(_detectSubtleMsCrypto('generateKey') &&\n _detectSubtleMsCrypto('exportKey')) {\n var genOp = util.globalScope.msCrypto.subtle.generateKey({\n name: 'RSASSA-PKCS1-v1_5',\n modulusLength: bits,\n publicExponent: _intToUint8Array(e),\n hash: {name: 'SHA-256'}\n }, true /* key can be exported*/, ['sign', 'verify']);\n genOp.oncomplete = function(e) {\n var pair = e.target.result;\n var exportOp = util.globalScope.msCrypto.subtle.exportKey(\n 'pkcs8', pair.privateKey);\n exportOp.oncomplete = function(e) {\n var pkcs8 = e.target.result;\n var privateKey = pki.privateKeyFromAsn1(\n asn1.fromDer(forge.util.createBuffer(pkcs8)));\n callback(null, {\n privateKey: privateKey,\n publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e)\n });\n };\n exportOp.onerror = function(err) {\n callback(err);\n };\n };\n genOp.onerror = function(err) {\n callback(err);\n };\n return;\n }\n } else {\n // try native sync\n if(_detectNodeCrypto('generateKeyPairSync')) {\n var keypair = _crypto.generateKeyPairSync('rsa', {\n modulusLength: bits,\n publicExponent: e,\n publicKeyEncoding: {\n type: 'spki',\n format: 'pem'\n },\n privateKeyEncoding: {\n type: 'pkcs8',\n format: 'pem'\n }\n });\n return {\n privateKey: pki.privateKeyFromPem(keypair.privateKey),\n publicKey: pki.publicKeyFromPem(keypair.publicKey)\n };\n }\n }\n }\n\n // use JavaScript implementation\n var state = pki.rsa.createKeyPairGenerationState(bits, e, options);\n if(!callback) {\n pki.rsa.stepKeyPairGenerationState(state, 0);\n return state.keys;\n }\n _generateKeyPair(state, options, callback);\n};\n\n/**\n * Sets an RSA public key from BigIntegers modulus and exponent.\n *\n * @param n the modulus.\n * @param e the exponent.\n *\n * @return the public key.\n */\npki.setRsaPublicKey = pki.rsa.setPublicKey = function(n, e) {\n var key = {\n n: n,\n e: e\n };\n\n /**\n * Encrypts the given data with this public key. Newer applications\n * should use the 'RSA-OAEP' decryption scheme, 'RSAES-PKCS1-V1_5' is for\n * legacy applications.\n *\n * @param data the byte string to encrypt.\n * @param scheme the encryption scheme to use:\n * 'RSAES-PKCS1-V1_5' (default),\n * 'RSA-OAEP',\n * 'RAW', 'NONE', or null to perform raw RSA encryption,\n * an object with an 'encode' property set to a function\n * with the signature 'function(data, key)' that returns\n * a binary-encoded string representing the encoded data.\n * @param schemeOptions any scheme-specific options.\n *\n * @return the encrypted byte string.\n */\n key.encrypt = function(data, scheme, schemeOptions) {\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n } else if(scheme === undefined) {\n scheme = 'RSAES-PKCS1-V1_5';\n }\n\n if(scheme === 'RSAES-PKCS1-V1_5') {\n scheme = {\n encode: function(m, key, pub) {\n return _encodePkcs1_v1_5(m, key, 0x02).getBytes();\n }\n };\n } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') {\n scheme = {\n encode: function(m, key) {\n return forge.pkcs1.encode_rsa_oaep(key, m, schemeOptions);\n }\n };\n } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) {\n scheme = {encode: function(e) {return e;}};\n } else if(typeof scheme === 'string') {\n throw new Error('Unsupported encryption scheme: \"' + scheme + '\".');\n }\n\n // do scheme-based encoding then rsa encryption\n var e = scheme.encode(data, key, true);\n return pki.rsa.encrypt(e, key, true);\n };\n\n /**\n * Verifies the given signature against the given digest.\n *\n * PKCS#1 supports multiple (currently two) signature schemes:\n * RSASSA-PKCS1-V1_5 and RSASSA-PSS.\n *\n * By default this implementation uses the \"old scheme\", i.e.\n * RSASSA-PKCS1-V1_5, in which case once RSA-decrypted, the\n * signature is an OCTET STRING that holds a DigestInfo.\n *\n * DigestInfo ::= SEQUENCE {\n * digestAlgorithm DigestAlgorithmIdentifier,\n * digest Digest\n * }\n * DigestAlgorithmIdentifier ::= AlgorithmIdentifier\n * Digest ::= OCTET STRING\n *\n * To perform PSS signature verification, provide an instance\n * of Forge PSS object as the scheme parameter.\n *\n * @param digest the message digest hash to compare against the signature,\n * as a binary-encoded string.\n * @param signature the signature to verify, as a binary-encoded string.\n * @param scheme signature verification scheme to use:\n * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5,\n * a Forge PSS object for RSASSA-PSS,\n * 'NONE' or null for none, DigestInfo will not be expected, but\n * PKCS#1 v1.5 padding will still be used.\n * @param options optional verify options\n * _parseAllDigestBytes testing flag to control parsing of all\n * digest bytes. Unsupported and not for general usage.\n * (default: true)\n *\n * @return true if the signature was verified, false if not.\n */\n key.verify = function(digest, signature, scheme, options) {\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n } else if(scheme === undefined) {\n scheme = 'RSASSA-PKCS1-V1_5';\n }\n if(options === undefined) {\n options = {\n _parseAllDigestBytes: true\n };\n }\n if(!('_parseAllDigestBytes' in options)) {\n options._parseAllDigestBytes = true;\n }\n\n if(scheme === 'RSASSA-PKCS1-V1_5') {\n scheme = {\n verify: function(digest, d) {\n // remove padding\n d = _decodePkcs1_v1_5(d, key, true);\n // d is ASN.1 BER-encoded DigestInfo\n var obj = asn1.fromDer(d, {\n parseAllBytes: options._parseAllDigestBytes\n });\n\n // validate DigestInfo\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, digestInfoValidator, capture, errors)) {\n var error = new Error(\n 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' +\n 'DigestInfo value.');\n error.errors = errors;\n throw error;\n }\n // check hash algorithm identifier\n // see PKCS1-v1-5DigestAlgorithms in RFC 8017\n // FIXME: add support to vaidator for strict value choices\n var oid = asn1.derToOid(capture.algorithmIdentifier);\n if(!(oid === forge.oids.md2 ||\n oid === forge.oids.md5 ||\n oid === forge.oids.sha1 ||\n oid === forge.oids.sha224 ||\n oid === forge.oids.sha256 ||\n oid === forge.oids.sha384 ||\n oid === forge.oids.sha512 ||\n oid === forge.oids['sha512-224'] ||\n oid === forge.oids['sha512-256'])) {\n var error = new Error(\n 'Unknown RSASSA-PKCS1-v1_5 DigestAlgorithm identifier.');\n error.oid = oid;\n throw error;\n }\n\n // special check for md2 and md5 that NULL parameters exist\n if(oid === forge.oids.md2 || oid === forge.oids.md5) {\n if(!('parameters' in capture)) {\n throw new Error(\n 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' +\n 'DigestInfo value. ' +\n 'Missing algorithm identifer NULL parameters.');\n }\n }\n\n // compare the given digest to the decrypted one\n return digest === capture.digest;\n }\n };\n } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) {\n scheme = {\n verify: function(digest, d) {\n // remove padding\n d = _decodePkcs1_v1_5(d, key, true);\n return digest === d;\n }\n };\n }\n\n // do rsa decryption w/o any decoding, then verify -- which does decoding\n var d = pki.rsa.decrypt(signature, key, true, false);\n return scheme.verify(digest, d, key.n.bitLength());\n };\n\n return key;\n};\n\n/**\n * Sets an RSA private key from BigIntegers modulus, exponent, primes,\n * prime exponents, and modular multiplicative inverse.\n *\n * @param n the modulus.\n * @param e the public exponent.\n * @param d the private exponent ((inverse of e) mod n).\n * @param p the first prime.\n * @param q the second prime.\n * @param dP exponent1 (d mod (p-1)).\n * @param dQ exponent2 (d mod (q-1)).\n * @param qInv ((inverse of q) mod p)\n *\n * @return the private key.\n */\npki.setRsaPrivateKey = pki.rsa.setPrivateKey = function(\n n, e, d, p, q, dP, dQ, qInv) {\n var key = {\n n: n,\n e: e,\n d: d,\n p: p,\n q: q,\n dP: dP,\n dQ: dQ,\n qInv: qInv\n };\n\n /**\n * Decrypts the given data with this private key. The decryption scheme\n * must match the one used to encrypt the data.\n *\n * @param data the byte string to decrypt.\n * @param scheme the decryption scheme to use:\n * 'RSAES-PKCS1-V1_5' (default),\n * 'RSA-OAEP',\n * 'RAW', 'NONE', or null to perform raw RSA decryption.\n * @param schemeOptions any scheme-specific options.\n *\n * @return the decrypted byte string.\n */\n key.decrypt = function(data, scheme, schemeOptions) {\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n } else if(scheme === undefined) {\n scheme = 'RSAES-PKCS1-V1_5';\n }\n\n // do rsa decryption w/o any decoding\n var d = pki.rsa.decrypt(data, key, false, false);\n\n if(scheme === 'RSAES-PKCS1-V1_5') {\n scheme = {decode: _decodePkcs1_v1_5};\n } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') {\n scheme = {\n decode: function(d, key) {\n return forge.pkcs1.decode_rsa_oaep(key, d, schemeOptions);\n }\n };\n } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) {\n scheme = {decode: function(d) {return d;}};\n } else {\n throw new Error('Unsupported encryption scheme: \"' + scheme + '\".');\n }\n\n // decode according to scheme\n return scheme.decode(d, key, false);\n };\n\n /**\n * Signs the given digest, producing a signature.\n *\n * PKCS#1 supports multiple (currently two) signature schemes:\n * RSASSA-PKCS1-V1_5 and RSASSA-PSS.\n *\n * By default this implementation uses the \"old scheme\", i.e.\n * RSASSA-PKCS1-V1_5. In order to generate a PSS signature, provide\n * an instance of Forge PSS object as the scheme parameter.\n *\n * @param md the message digest object with the hash to sign.\n * @param scheme the signature scheme to use:\n * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5,\n * a Forge PSS object for RSASSA-PSS,\n * 'NONE' or null for none, DigestInfo will not be used but\n * PKCS#1 v1.5 padding will still be used.\n *\n * @return the signature as a byte string.\n */\n key.sign = function(md, scheme) {\n /* Note: The internal implementation of RSA operations is being\n transitioned away from a PKCS#1 v1.5 hard-coded scheme. Some legacy\n code like the use of an encoding block identifier 'bt' will eventually\n be removed. */\n\n // private key operation\n var bt = false;\n\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n }\n\n if(scheme === undefined || scheme === 'RSASSA-PKCS1-V1_5') {\n scheme = {encode: emsaPkcs1v15encode};\n bt = 0x01;\n } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) {\n scheme = {encode: function() {return md;}};\n bt = 0x01;\n }\n\n // encode and then encrypt\n var d = scheme.encode(md, key.n.bitLength());\n return pki.rsa.encrypt(d, key, bt);\n };\n\n return key;\n};\n\n/**\n * Wraps an RSAPrivateKey ASN.1 object in an ASN.1 PrivateKeyInfo object.\n *\n * @param rsaKey the ASN.1 RSAPrivateKey.\n *\n * @return the ASN.1 PrivateKeyInfo.\n */\npki.wrapRsaPrivateKey = function(rsaKey) {\n // PrivateKeyInfo\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version (0)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(0).getBytes()),\n // privateKeyAlgorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.rsaEncryption).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]),\n // PrivateKey\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n asn1.toDer(rsaKey).getBytes())\n ]);\n};\n\n/**\n * Converts a private key from an ASN.1 object.\n *\n * @param obj the ASN.1 representation of a PrivateKeyInfo containing an\n * RSAPrivateKey or an RSAPrivateKey.\n *\n * @return the private key.\n */\npki.privateKeyFromAsn1 = function(obj) {\n // get PrivateKeyInfo\n var capture = {};\n var errors = [];\n if(asn1.validate(obj, privateKeyValidator, capture, errors)) {\n obj = asn1.fromDer(forge.util.createBuffer(capture.privateKey));\n }\n\n // get RSAPrivateKey\n capture = {};\n errors = [];\n if(!asn1.validate(obj, rsaPrivateKeyValidator, capture, errors)) {\n var error = new Error('Cannot read private key. ' +\n 'ASN.1 object does not contain an RSAPrivateKey.');\n error.errors = errors;\n throw error;\n }\n\n // Note: Version is currently ignored.\n // capture.privateKeyVersion\n // FIXME: inefficient, get a BigInteger that uses byte strings\n var n, e, d, p, q, dP, dQ, qInv;\n n = forge.util.createBuffer(capture.privateKeyModulus).toHex();\n e = forge.util.createBuffer(capture.privateKeyPublicExponent).toHex();\n d = forge.util.createBuffer(capture.privateKeyPrivateExponent).toHex();\n p = forge.util.createBuffer(capture.privateKeyPrime1).toHex();\n q = forge.util.createBuffer(capture.privateKeyPrime2).toHex();\n dP = forge.util.createBuffer(capture.privateKeyExponent1).toHex();\n dQ = forge.util.createBuffer(capture.privateKeyExponent2).toHex();\n qInv = forge.util.createBuffer(capture.privateKeyCoefficient).toHex();\n\n // set private key\n return pki.setRsaPrivateKey(\n new BigInteger(n, 16),\n new BigInteger(e, 16),\n new BigInteger(d, 16),\n new BigInteger(p, 16),\n new BigInteger(q, 16),\n new BigInteger(dP, 16),\n new BigInteger(dQ, 16),\n new BigInteger(qInv, 16));\n};\n\n/**\n * Converts a private key to an ASN.1 RSAPrivateKey.\n *\n * @param key the private key.\n *\n * @return the ASN.1 representation of an RSAPrivateKey.\n */\npki.privateKeyToAsn1 = pki.privateKeyToRSAPrivateKey = function(key) {\n // RSAPrivateKey\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version (0 = only 2 primes, 1 multiple primes)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(0).getBytes()),\n // modulus (n)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.n)),\n // publicExponent (e)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.e)),\n // privateExponent (d)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.d)),\n // privateKeyPrime1 (p)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.p)),\n // privateKeyPrime2 (q)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.q)),\n // privateKeyExponent1 (dP)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.dP)),\n // privateKeyExponent2 (dQ)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.dQ)),\n // coefficient (qInv)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.qInv))\n ]);\n};\n\n/**\n * Converts a public key from an ASN.1 SubjectPublicKeyInfo or RSAPublicKey.\n *\n * @param obj the asn1 representation of a SubjectPublicKeyInfo or RSAPublicKey.\n *\n * @return the public key.\n */\npki.publicKeyFromAsn1 = function(obj) {\n // get SubjectPublicKeyInfo\n var capture = {};\n var errors = [];\n if(asn1.validate(obj, publicKeyValidator, capture, errors)) {\n // get oid\n var oid = asn1.derToOid(capture.publicKeyOid);\n if(oid !== pki.oids.rsaEncryption) {\n var error = new Error('Cannot read public key. Unknown OID.');\n error.oid = oid;\n throw error;\n }\n obj = capture.rsaPublicKey;\n }\n\n // get RSA params\n errors = [];\n if(!asn1.validate(obj, rsaPublicKeyValidator, capture, errors)) {\n var error = new Error('Cannot read public key. ' +\n 'ASN.1 object does not contain an RSAPublicKey.');\n error.errors = errors;\n throw error;\n }\n\n // FIXME: inefficient, get a BigInteger that uses byte strings\n var n = forge.util.createBuffer(capture.publicKeyModulus).toHex();\n var e = forge.util.createBuffer(capture.publicKeyExponent).toHex();\n\n // set public key\n return pki.setRsaPublicKey(\n new BigInteger(n, 16),\n new BigInteger(e, 16));\n};\n\n/**\n * Converts a public key to an ASN.1 SubjectPublicKeyInfo.\n *\n * @param key the public key.\n *\n * @return the asn1 representation of a SubjectPublicKeyInfo.\n */\npki.publicKeyToAsn1 = pki.publicKeyToSubjectPublicKeyInfo = function(key) {\n // SubjectPublicKeyInfo\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.rsaEncryption).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]),\n // subjectPublicKey\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, [\n pki.publicKeyToRSAPublicKey(key)\n ])\n ]);\n};\n\n/**\n * Converts a public key to an ASN.1 RSAPublicKey.\n *\n * @param key the public key.\n *\n * @return the asn1 representation of a RSAPublicKey.\n */\npki.publicKeyToRSAPublicKey = function(key) {\n // RSAPublicKey\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // modulus (n)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.n)),\n // publicExponent (e)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.e))\n ]);\n};\n\n/**\n * Encodes a message using PKCS#1 v1.5 padding.\n *\n * @param m the message to encode.\n * @param key the RSA key to use.\n * @param bt the block type to use, i.e. either 0x01 (for signing) or 0x02\n * (for encryption).\n *\n * @return the padded byte buffer.\n */\nfunction _encodePkcs1_v1_5(m, key, bt) {\n var eb = forge.util.createBuffer();\n\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n /* use PKCS#1 v1.5 padding */\n if(m.length > (k - 11)) {\n var error = new Error('Message is too long for PKCS#1 v1.5 padding.');\n error.length = m.length;\n error.max = k - 11;\n throw error;\n }\n\n /* A block type BT, a padding string PS, and the data D shall be\n formatted into an octet string EB, the encryption block:\n\n EB = 00 || BT || PS || 00 || D\n\n The block type BT shall be a single octet indicating the structure of\n the encryption block. For this version of the document it shall have\n value 00, 01, or 02. For a private-key operation, the block type\n shall be 00 or 01. For a public-key operation, it shall be 02.\n\n The padding string PS shall consist of k-3-||D|| octets. For block\n type 00, the octets shall have value 00; for block type 01, they\n shall have value FF; and for block type 02, they shall be\n pseudorandomly generated and nonzero. This makes the length of the\n encryption block EB equal to k. */\n\n // build the encryption block\n eb.putByte(0x00);\n eb.putByte(bt);\n\n // create the padding\n var padNum = k - 3 - m.length;\n var padByte;\n // private key op\n if(bt === 0x00 || bt === 0x01) {\n padByte = (bt === 0x00) ? 0x00 : 0xFF;\n for(var i = 0; i < padNum; ++i) {\n eb.putByte(padByte);\n }\n } else {\n // public key op\n // pad with random non-zero values\n while(padNum > 0) {\n var numZeros = 0;\n var padBytes = forge.random.getBytes(padNum);\n for(var i = 0; i < padNum; ++i) {\n padByte = padBytes.charCodeAt(i);\n if(padByte === 0) {\n ++numZeros;\n } else {\n eb.putByte(padByte);\n }\n }\n padNum = numZeros;\n }\n }\n\n // zero followed by message\n eb.putByte(0x00);\n eb.putBytes(m);\n\n return eb;\n}\n\n/**\n * Decodes a message using PKCS#1 v1.5 padding.\n *\n * @param em the message to decode.\n * @param key the RSA key to use.\n * @param pub true if the key is a public key, false if it is private.\n * @param ml the message length, if specified.\n *\n * @return the decoded bytes.\n */\nfunction _decodePkcs1_v1_5(em, key, pub, ml) {\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n /* It is an error if any of the following conditions occurs:\n\n 1. The encryption block EB cannot be parsed unambiguously.\n 2. The padding string PS consists of fewer than eight octets\n or is inconsisent with the block type BT.\n 3. The decryption process is a public-key operation and the block\n type BT is not 00 or 01, or the decryption process is a\n private-key operation and the block type is not 02.\n */\n\n // parse the encryption block\n var eb = forge.util.createBuffer(em);\n var first = eb.getByte();\n var bt = eb.getByte();\n if(first !== 0x00 ||\n (pub && bt !== 0x00 && bt !== 0x01) ||\n (!pub && bt != 0x02) ||\n (pub && bt === 0x00 && typeof(ml) === 'undefined')) {\n throw new Error('Encryption block is invalid.');\n }\n\n var padNum = 0;\n if(bt === 0x00) {\n // check all padding bytes for 0x00\n padNum = k - 3 - ml;\n for(var i = 0; i < padNum; ++i) {\n if(eb.getByte() !== 0x00) {\n throw new Error('Encryption block is invalid.');\n }\n }\n } else if(bt === 0x01) {\n // find the first byte that isn't 0xFF, should be after all padding\n padNum = 0;\n while(eb.length() > 1) {\n if(eb.getByte() !== 0xFF) {\n --eb.read;\n break;\n }\n ++padNum;\n }\n } else if(bt === 0x02) {\n // look for 0x00 byte\n padNum = 0;\n while(eb.length() > 1) {\n if(eb.getByte() === 0x00) {\n --eb.read;\n break;\n }\n ++padNum;\n }\n }\n\n // zero must be 0x00 and padNum must be (k - 3 - message length)\n var zero = eb.getByte();\n if(zero !== 0x00 || padNum !== (k - 3 - eb.length())) {\n throw new Error('Encryption block is invalid.');\n }\n\n return eb.getBytes();\n}\n\n/**\n * Runs the key-generation algorithm asynchronously, either in the background\n * via Web Workers, or using the main thread and setImmediate.\n *\n * @param state the key-pair generation state.\n * @param [options] options for key-pair generation:\n * workerScript the worker script URL.\n * workers the number of web workers (if supported) to use,\n * (default: 2, -1 to use estimated cores minus one).\n * workLoad the size of the work load, ie: number of possible prime\n * numbers for each web worker to check per work assignment,\n * (default: 100).\n * @param callback(err, keypair) called once the operation completes.\n */\nfunction _generateKeyPair(state, options, callback) {\n if(typeof options === 'function') {\n callback = options;\n options = {};\n }\n options = options || {};\n\n var opts = {\n algorithm: {\n name: options.algorithm || 'PRIMEINC',\n options: {\n workers: options.workers || 2,\n workLoad: options.workLoad || 100,\n workerScript: options.workerScript\n }\n }\n };\n if('prng' in options) {\n opts.prng = options.prng;\n }\n\n generate();\n\n function generate() {\n // find p and then q (done in series to simplify)\n getPrime(state.pBits, function(err, num) {\n if(err) {\n return callback(err);\n }\n state.p = num;\n if(state.q !== null) {\n return finish(err, state.q);\n }\n getPrime(state.qBits, finish);\n });\n }\n\n function getPrime(bits, callback) {\n forge.prime.generateProbablePrime(bits, opts, callback);\n }\n\n function finish(err, num) {\n if(err) {\n return callback(err);\n }\n\n // set q\n state.q = num;\n\n // ensure p is larger than q (swap them if not)\n if(state.p.compareTo(state.q) < 0) {\n var tmp = state.p;\n state.p = state.q;\n state.q = tmp;\n }\n\n // ensure p is coprime with e\n if(state.p.subtract(BigInteger.ONE).gcd(state.e)\n .compareTo(BigInteger.ONE) !== 0) {\n state.p = null;\n generate();\n return;\n }\n\n // ensure q is coprime with e\n if(state.q.subtract(BigInteger.ONE).gcd(state.e)\n .compareTo(BigInteger.ONE) !== 0) {\n state.q = null;\n getPrime(state.qBits, finish);\n return;\n }\n\n // compute phi: (p - 1)(q - 1) (Euler's totient function)\n state.p1 = state.p.subtract(BigInteger.ONE);\n state.q1 = state.q.subtract(BigInteger.ONE);\n state.phi = state.p1.multiply(state.q1);\n\n // ensure e and phi are coprime\n if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) !== 0) {\n // phi and e aren't coprime, so generate a new p and q\n state.p = state.q = null;\n generate();\n return;\n }\n\n // create n, ensure n is has the right number of bits\n state.n = state.p.multiply(state.q);\n if(state.n.bitLength() !== state.bits) {\n // failed, get new q\n state.q = null;\n getPrime(state.qBits, finish);\n return;\n }\n\n // set keys\n var d = state.e.modInverse(state.phi);\n state.keys = {\n privateKey: pki.rsa.setPrivateKey(\n state.n, state.e, d, state.p, state.q,\n d.mod(state.p1), d.mod(state.q1),\n state.q.modInverse(state.p)),\n publicKey: pki.rsa.setPublicKey(state.n, state.e)\n };\n\n callback(null, state.keys);\n }\n}\n\n/**\n * Converts a positive BigInteger into 2's-complement big-endian bytes.\n *\n * @param b the big integer to convert.\n *\n * @return the bytes.\n */\nfunction _bnToBytes(b) {\n // prepend 0x00 if first byte >= 0x80\n var hex = b.toString(16);\n if(hex[0] >= '8') {\n hex = '00' + hex;\n }\n var bytes = forge.util.hexToBytes(hex);\n\n // ensure integer is minimally-encoded\n if(bytes.length > 1 &&\n // leading 0x00 for positive integer\n ((bytes.charCodeAt(0) === 0 &&\n (bytes.charCodeAt(1) & 0x80) === 0) ||\n // leading 0xFF for negative integer\n (bytes.charCodeAt(0) === 0xFF &&\n (bytes.charCodeAt(1) & 0x80) === 0x80))) {\n return bytes.substr(1);\n }\n return bytes;\n}\n\n/**\n * Returns the required number of Miller-Rabin tests to generate a\n * prime with an error probability of (1/2)^80.\n *\n * See Handbook of Applied Cryptography Chapter 4, Table 4.4.\n *\n * @param bits the bit size.\n *\n * @return the required number of iterations.\n */\nfunction _getMillerRabinTests(bits) {\n if(bits <= 100) return 27;\n if(bits <= 150) return 18;\n if(bits <= 200) return 15;\n if(bits <= 250) return 12;\n if(bits <= 300) return 9;\n if(bits <= 350) return 8;\n if(bits <= 400) return 7;\n if(bits <= 500) return 6;\n if(bits <= 600) return 5;\n if(bits <= 800) return 4;\n if(bits <= 1250) return 3;\n return 2;\n}\n\n/**\n * Performs feature detection on the Node crypto interface.\n *\n * @param fn the feature (function) to detect.\n *\n * @return true if detected, false if not.\n */\nfunction _detectNodeCrypto(fn) {\n return forge.util.isNodejs && typeof _crypto[fn] === 'function';\n}\n\n/**\n * Performs feature detection on the SubtleCrypto interface.\n *\n * @param fn the feature (function) to detect.\n *\n * @return true if detected, false if not.\n */\nfunction _detectSubtleCrypto(fn) {\n return (typeof util.globalScope !== 'undefined' &&\n typeof util.globalScope.crypto === 'object' &&\n typeof util.globalScope.crypto.subtle === 'object' &&\n typeof util.globalScope.crypto.subtle[fn] === 'function');\n}\n\n/**\n * Performs feature detection on the deprecated Microsoft Internet Explorer\n * outdated SubtleCrypto interface. This function should only be used after\n * checking for the modern, standard SubtleCrypto interface.\n *\n * @param fn the feature (function) to detect.\n *\n * @return true if detected, false if not.\n */\nfunction _detectSubtleMsCrypto(fn) {\n return (typeof util.globalScope !== 'undefined' &&\n typeof util.globalScope.msCrypto === 'object' &&\n typeof util.globalScope.msCrypto.subtle === 'object' &&\n typeof util.globalScope.msCrypto.subtle[fn] === 'function');\n}\n\nfunction _intToUint8Array(x) {\n var bytes = forge.util.hexToBytes(x.toString(16));\n var buffer = new Uint8Array(bytes.length);\n for(var i = 0; i < bytes.length; ++i) {\n buffer[i] = bytes.charCodeAt(i);\n }\n return buffer;\n}\n\nfunction _privateKeyFromJwk(jwk) {\n if(jwk.kty !== 'RSA') {\n throw new Error(\n 'Unsupported key algorithm \"' + jwk.kty + '\"; algorithm must be \"RSA\".');\n }\n return pki.setRsaPrivateKey(\n _base64ToBigInt(jwk.n),\n _base64ToBigInt(jwk.e),\n _base64ToBigInt(jwk.d),\n _base64ToBigInt(jwk.p),\n _base64ToBigInt(jwk.q),\n _base64ToBigInt(jwk.dp),\n _base64ToBigInt(jwk.dq),\n _base64ToBigInt(jwk.qi));\n}\n\nfunction _publicKeyFromJwk(jwk) {\n if(jwk.kty !== 'RSA') {\n throw new Error('Key algorithm must be \"RSA\".');\n }\n return pki.setRsaPublicKey(\n _base64ToBigInt(jwk.n),\n _base64ToBigInt(jwk.e));\n}\n\nfunction _base64ToBigInt(b64) {\n return new BigInteger(forge.util.bytesToHex(forge.util.decode64(b64)), 16);\n}\n","/**\n * Secure Hash Algorithm with 160-bit digest (SHA-1) implementation.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2015 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./md');\nrequire('./util');\n\nvar sha1 = module.exports = forge.sha1 = forge.sha1 || {};\nforge.md.sha1 = forge.md.algorithms.sha1 = sha1;\n\n/**\n * Creates a SHA-1 message digest object.\n *\n * @return a message digest object.\n */\nsha1.create = function() {\n // do initialization as necessary\n if(!_initialized) {\n _init();\n }\n\n // SHA-1 state contains five 32-bit integers\n var _state = null;\n\n // input buffer\n var _input = forge.util.createBuffer();\n\n // used for word storage\n var _w = new Array(80);\n\n // message digest object\n var md = {\n algorithm: 'sha1',\n blockLength: 64,\n digestLength: 20,\n // 56-bit length of message so far (does not including padding)\n messageLength: 0,\n // true message length\n fullMessageLength: null,\n // size of message length in bytes\n messageLengthSize: 8\n };\n\n /**\n * Starts the digest.\n *\n * @return this digest object.\n */\n md.start = function() {\n // up to 56-bit message length for convenience\n md.messageLength = 0;\n\n // full message length (set md.messageLength64 for backwards-compatibility)\n md.fullMessageLength = md.messageLength64 = [];\n var int32s = md.messageLengthSize / 4;\n for(var i = 0; i < int32s; ++i) {\n md.fullMessageLength.push(0);\n }\n _input = forge.util.createBuffer();\n _state = {\n h0: 0x67452301,\n h1: 0xEFCDAB89,\n h2: 0x98BADCFE,\n h3: 0x10325476,\n h4: 0xC3D2E1F0\n };\n return md;\n };\n // start digest automatically for first time\n md.start();\n\n /**\n * Updates the digest with the given message input. The given input can\n * treated as raw input (no encoding will be applied) or an encoding of\n * 'utf8' maybe given to encode the input using UTF-8.\n *\n * @param msg the message input to update with.\n * @param encoding the encoding to use (default: 'raw', other: 'utf8').\n *\n * @return this digest object.\n */\n md.update = function(msg, encoding) {\n if(encoding === 'utf8') {\n msg = forge.util.encodeUtf8(msg);\n }\n\n // update message length\n var len = msg.length;\n md.messageLength += len;\n len = [(len / 0x100000000) >>> 0, len >>> 0];\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n md.fullMessageLength[i] += len[1];\n len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);\n md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;\n len[0] = ((len[1] / 0x100000000) >>> 0);\n }\n\n // add bytes to input buffer\n _input.putBytes(msg);\n\n // process bytes\n _update(_state, _w, _input);\n\n // compact input buffer every 2K or if empty\n if(_input.read > 2048 || _input.length() === 0) {\n _input.compact();\n }\n\n return md;\n };\n\n /**\n * Produces the digest.\n *\n * @return a byte buffer containing the digest value.\n */\n md.digest = function() {\n /* Note: Here we copy the remaining bytes in the input buffer and\n add the appropriate SHA-1 padding. Then we do the final update\n on a copy of the state so that if the user wants to get\n intermediate digests they can do so. */\n\n /* Determine the number of bytes that must be added to the message\n to ensure its length is congruent to 448 mod 512. In other words,\n the data to be digested must be a multiple of 512 bits (or 128 bytes).\n This data includes the message, some padding, and the length of the\n message. Since the length of the message will be encoded as 8 bytes (64\n bits), that means that the last segment of the data must have 56 bytes\n (448 bits) of message and padding. Therefore, the length of the message\n plus the padding must be congruent to 448 mod 512 because\n 512 - 128 = 448.\n\n In order to fill up the message length it must be filled with\n padding that begins with 1 bit followed by all 0 bits. Padding\n must *always* be present, so if the message length is already\n congruent to 448 mod 512, then 512 padding bits must be added. */\n\n var finalBlock = forge.util.createBuffer();\n finalBlock.putBytes(_input.bytes());\n\n // compute remaining size to be digested (include message length size)\n var remaining = (\n md.fullMessageLength[md.fullMessageLength.length - 1] +\n md.messageLengthSize);\n\n // add padding for overflow blockSize - overflow\n // _padding starts with 1 byte with first bit is set (byte value 128), then\n // there may be up to (blockSize - 1) other pad bytes\n var overflow = remaining & (md.blockLength - 1);\n finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));\n\n // serialize message length in bits in big-endian order; since length\n // is stored in bytes we multiply by 8 and add carry from next int\n var next, carry;\n var bits = md.fullMessageLength[0] * 8;\n for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {\n next = md.fullMessageLength[i + 1] * 8;\n carry = (next / 0x100000000) >>> 0;\n bits += carry;\n finalBlock.putInt32(bits >>> 0);\n bits = next >>> 0;\n }\n finalBlock.putInt32(bits);\n\n var s2 = {\n h0: _state.h0,\n h1: _state.h1,\n h2: _state.h2,\n h3: _state.h3,\n h4: _state.h4\n };\n _update(s2, _w, finalBlock);\n var rval = forge.util.createBuffer();\n rval.putInt32(s2.h0);\n rval.putInt32(s2.h1);\n rval.putInt32(s2.h2);\n rval.putInt32(s2.h3);\n rval.putInt32(s2.h4);\n return rval;\n };\n\n return md;\n};\n\n// sha-1 padding bytes not initialized yet\nvar _padding = null;\nvar _initialized = false;\n\n/**\n * Initializes the constant tables.\n */\nfunction _init() {\n // create padding\n _padding = String.fromCharCode(128);\n _padding += forge.util.fillString(String.fromCharCode(0x00), 64);\n\n // now initialized\n _initialized = true;\n}\n\n/**\n * Updates a SHA-1 state with the given byte buffer.\n *\n * @param s the SHA-1 state to update.\n * @param w the array to use to store words.\n * @param bytes the byte buffer to update with.\n */\nfunction _update(s, w, bytes) {\n // consume 512 bit (64 byte) chunks\n var t, a, b, c, d, e, f, i;\n var len = bytes.length();\n while(len >= 64) {\n // the w array will be populated with sixteen 32-bit big-endian words\n // and then extended into 80 32-bit words according to SHA-1 algorithm\n // and for 32-79 using Max Locktyukhin's optimization\n\n // initialize hash value for this chunk\n a = s.h0;\n b = s.h1;\n c = s.h2;\n d = s.h3;\n e = s.h4;\n\n // round 1\n for(i = 0; i < 16; ++i) {\n t = bytes.getInt32();\n w[i] = t;\n f = d ^ (b & (c ^ d));\n t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n for(; i < 20; ++i) {\n t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]);\n t = (t << 1) | (t >>> 31);\n w[i] = t;\n f = d ^ (b & (c ^ d));\n t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n // round 2\n for(; i < 32; ++i) {\n t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]);\n t = (t << 1) | (t >>> 31);\n w[i] = t;\n f = b ^ c ^ d;\n t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n for(; i < 40; ++i) {\n t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);\n t = (t << 2) | (t >>> 30);\n w[i] = t;\n f = b ^ c ^ d;\n t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n // round 3\n for(; i < 60; ++i) {\n t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);\n t = (t << 2) | (t >>> 30);\n w[i] = t;\n f = (b & c) | (d & (b ^ c));\n t = ((a << 5) | (a >>> 27)) + f + e + 0x8F1BBCDC + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n // round 4\n for(; i < 80; ++i) {\n t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);\n t = (t << 2) | (t >>> 30);\n w[i] = t;\n f = b ^ c ^ d;\n t = ((a << 5) | (a >>> 27)) + f + e + 0xCA62C1D6 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n\n // update hash state\n s.h0 = (s.h0 + a) | 0;\n s.h1 = (s.h1 + b) | 0;\n s.h2 = (s.h2 + c) | 0;\n s.h3 = (s.h3 + d) | 0;\n s.h4 = (s.h4 + e) | 0;\n\n len -= 64;\n }\n}\n","/**\n * Secure Hash Algorithm with 256-bit digest (SHA-256) implementation.\n *\n * See FIPS 180-2 for details.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2015 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./md');\nrequire('./util');\n\nvar sha256 = module.exports = forge.sha256 = forge.sha256 || {};\nforge.md.sha256 = forge.md.algorithms.sha256 = sha256;\n\n/**\n * Creates a SHA-256 message digest object.\n *\n * @return a message digest object.\n */\nsha256.create = function() {\n // do initialization as necessary\n if(!_initialized) {\n _init();\n }\n\n // SHA-256 state contains eight 32-bit integers\n var _state = null;\n\n // input buffer\n var _input = forge.util.createBuffer();\n\n // used for word storage\n var _w = new Array(64);\n\n // message digest object\n var md = {\n algorithm: 'sha256',\n blockLength: 64,\n digestLength: 32,\n // 56-bit length of message so far (does not including padding)\n messageLength: 0,\n // true message length\n fullMessageLength: null,\n // size of message length in bytes\n messageLengthSize: 8\n };\n\n /**\n * Starts the digest.\n *\n * @return this digest object.\n */\n md.start = function() {\n // up to 56-bit message length for convenience\n md.messageLength = 0;\n\n // full message length (set md.messageLength64 for backwards-compatibility)\n md.fullMessageLength = md.messageLength64 = [];\n var int32s = md.messageLengthSize / 4;\n for(var i = 0; i < int32s; ++i) {\n md.fullMessageLength.push(0);\n }\n _input = forge.util.createBuffer();\n _state = {\n h0: 0x6A09E667,\n h1: 0xBB67AE85,\n h2: 0x3C6EF372,\n h3: 0xA54FF53A,\n h4: 0x510E527F,\n h5: 0x9B05688C,\n h6: 0x1F83D9AB,\n h7: 0x5BE0CD19\n };\n return md;\n };\n // start digest automatically for first time\n md.start();\n\n /**\n * Updates the digest with the given message input. The given input can\n * treated as raw input (no encoding will be applied) or an encoding of\n * 'utf8' maybe given to encode the input using UTF-8.\n *\n * @param msg the message input to update with.\n * @param encoding the encoding to use (default: 'raw', other: 'utf8').\n *\n * @return this digest object.\n */\n md.update = function(msg, encoding) {\n if(encoding === 'utf8') {\n msg = forge.util.encodeUtf8(msg);\n }\n\n // update message length\n var len = msg.length;\n md.messageLength += len;\n len = [(len / 0x100000000) >>> 0, len >>> 0];\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n md.fullMessageLength[i] += len[1];\n len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);\n md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;\n len[0] = ((len[1] / 0x100000000) >>> 0);\n }\n\n // add bytes to input buffer\n _input.putBytes(msg);\n\n // process bytes\n _update(_state, _w, _input);\n\n // compact input buffer every 2K or if empty\n if(_input.read > 2048 || _input.length() === 0) {\n _input.compact();\n }\n\n return md;\n };\n\n /**\n * Produces the digest.\n *\n * @return a byte buffer containing the digest value.\n */\n md.digest = function() {\n /* Note: Here we copy the remaining bytes in the input buffer and\n add the appropriate SHA-256 padding. Then we do the final update\n on a copy of the state so that if the user wants to get\n intermediate digests they can do so. */\n\n /* Determine the number of bytes that must be added to the message\n to ensure its length is congruent to 448 mod 512. In other words,\n the data to be digested must be a multiple of 512 bits (or 128 bytes).\n This data includes the message, some padding, and the length of the\n message. Since the length of the message will be encoded as 8 bytes (64\n bits), that means that the last segment of the data must have 56 bytes\n (448 bits) of message and padding. Therefore, the length of the message\n plus the padding must be congruent to 448 mod 512 because\n 512 - 128 = 448.\n\n In order to fill up the message length it must be filled with\n padding that begins with 1 bit followed by all 0 bits. Padding\n must *always* be present, so if the message length is already\n congruent to 448 mod 512, then 512 padding bits must be added. */\n\n var finalBlock = forge.util.createBuffer();\n finalBlock.putBytes(_input.bytes());\n\n // compute remaining size to be digested (include message length size)\n var remaining = (\n md.fullMessageLength[md.fullMessageLength.length - 1] +\n md.messageLengthSize);\n\n // add padding for overflow blockSize - overflow\n // _padding starts with 1 byte with first bit is set (byte value 128), then\n // there may be up to (blockSize - 1) other pad bytes\n var overflow = remaining & (md.blockLength - 1);\n finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));\n\n // serialize message length in bits in big-endian order; since length\n // is stored in bytes we multiply by 8 and add carry from next int\n var next, carry;\n var bits = md.fullMessageLength[0] * 8;\n for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {\n next = md.fullMessageLength[i + 1] * 8;\n carry = (next / 0x100000000) >>> 0;\n bits += carry;\n finalBlock.putInt32(bits >>> 0);\n bits = next >>> 0;\n }\n finalBlock.putInt32(bits);\n\n var s2 = {\n h0: _state.h0,\n h1: _state.h1,\n h2: _state.h2,\n h3: _state.h3,\n h4: _state.h4,\n h5: _state.h5,\n h6: _state.h6,\n h7: _state.h7\n };\n _update(s2, _w, finalBlock);\n var rval = forge.util.createBuffer();\n rval.putInt32(s2.h0);\n rval.putInt32(s2.h1);\n rval.putInt32(s2.h2);\n rval.putInt32(s2.h3);\n rval.putInt32(s2.h4);\n rval.putInt32(s2.h5);\n rval.putInt32(s2.h6);\n rval.putInt32(s2.h7);\n return rval;\n };\n\n return md;\n};\n\n// sha-256 padding bytes not initialized yet\nvar _padding = null;\nvar _initialized = false;\n\n// table of constants\nvar _k = null;\n\n/**\n * Initializes the constant tables.\n */\nfunction _init() {\n // create padding\n _padding = String.fromCharCode(128);\n _padding += forge.util.fillString(String.fromCharCode(0x00), 64);\n\n // create K table for SHA-256\n _k = [\n 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,\n 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,\n 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,\n 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,\n 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,\n 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,\n 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,\n 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,\n 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,\n 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,\n 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,\n 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,\n 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,\n 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,\n 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,\n 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2];\n\n // now initialized\n _initialized = true;\n}\n\n/**\n * Updates a SHA-256 state with the given byte buffer.\n *\n * @param s the SHA-256 state to update.\n * @param w the array to use to store words.\n * @param bytes the byte buffer to update with.\n */\nfunction _update(s, w, bytes) {\n // consume 512 bit (64 byte) chunks\n var t1, t2, s0, s1, ch, maj, i, a, b, c, d, e, f, g, h;\n var len = bytes.length();\n while(len >= 64) {\n // the w array will be populated with sixteen 32-bit big-endian words\n // and then extended into 64 32-bit words according to SHA-256\n for(i = 0; i < 16; ++i) {\n w[i] = bytes.getInt32();\n }\n for(; i < 64; ++i) {\n // XOR word 2 words ago rot right 17, rot right 19, shft right 10\n t1 = w[i - 2];\n t1 =\n ((t1 >>> 17) | (t1 << 15)) ^\n ((t1 >>> 19) | (t1 << 13)) ^\n (t1 >>> 10);\n // XOR word 15 words ago rot right 7, rot right 18, shft right 3\n t2 = w[i - 15];\n t2 =\n ((t2 >>> 7) | (t2 << 25)) ^\n ((t2 >>> 18) | (t2 << 14)) ^\n (t2 >>> 3);\n // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^32\n w[i] = (t1 + w[i - 7] + t2 + w[i - 16]) | 0;\n }\n\n // initialize hash value for this chunk\n a = s.h0;\n b = s.h1;\n c = s.h2;\n d = s.h3;\n e = s.h4;\n f = s.h5;\n g = s.h6;\n h = s.h7;\n\n // round function\n for(i = 0; i < 64; ++i) {\n // Sum1(e)\n s1 =\n ((e >>> 6) | (e << 26)) ^\n ((e >>> 11) | (e << 21)) ^\n ((e >>> 25) | (e << 7));\n // Ch(e, f, g) (optimized the same way as SHA-1)\n ch = g ^ (e & (f ^ g));\n // Sum0(a)\n s0 =\n ((a >>> 2) | (a << 30)) ^\n ((a >>> 13) | (a << 19)) ^\n ((a >>> 22) | (a << 10));\n // Maj(a, b, c) (optimized the same way as SHA-1)\n maj = (a & b) | (c & (a ^ b));\n\n // main algorithm\n t1 = h + s1 + ch + _k[i] + w[i];\n t2 = s0 + maj;\n h = g;\n g = f;\n f = e;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n // can't truncate with `| 0`\n e = (d + t1) >>> 0;\n d = c;\n c = b;\n b = a;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n // can't truncate with `| 0`\n a = (t1 + t2) >>> 0;\n }\n\n // update hash state\n s.h0 = (s.h0 + a) | 0;\n s.h1 = (s.h1 + b) | 0;\n s.h2 = (s.h2 + c) | 0;\n s.h3 = (s.h3 + d) | 0;\n s.h4 = (s.h4 + e) | 0;\n s.h5 = (s.h5 + f) | 0;\n s.h6 = (s.h6 + g) | 0;\n s.h7 = (s.h7 + h) | 0;\n len -= 64;\n }\n}\n","/**\n * Secure Hash Algorithm with a 1024-bit block size implementation.\n *\n * This includes: SHA-512, SHA-384, SHA-512/224, and SHA-512/256. For\n * SHA-256 (block size 512 bits), see sha256.js.\n *\n * See FIPS 180-4 for details.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2014-2015 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./md');\nrequire('./util');\n\nvar sha512 = module.exports = forge.sha512 = forge.sha512 || {};\n\n// SHA-512\nforge.md.sha512 = forge.md.algorithms.sha512 = sha512;\n\n// SHA-384\nvar sha384 = forge.sha384 = forge.sha512.sha384 = forge.sha512.sha384 || {};\nsha384.create = function() {\n return sha512.create('SHA-384');\n};\nforge.md.sha384 = forge.md.algorithms.sha384 = sha384;\n\n// SHA-512/256\nforge.sha512.sha256 = forge.sha512.sha256 || {\n create: function() {\n return sha512.create('SHA-512/256');\n }\n};\nforge.md['sha512/256'] = forge.md.algorithms['sha512/256'] =\n forge.sha512.sha256;\n\n// SHA-512/224\nforge.sha512.sha224 = forge.sha512.sha224 || {\n create: function() {\n return sha512.create('SHA-512/224');\n }\n};\nforge.md['sha512/224'] = forge.md.algorithms['sha512/224'] =\n forge.sha512.sha224;\n\n/**\n * Creates a SHA-2 message digest object.\n *\n * @param algorithm the algorithm to use (SHA-512, SHA-384, SHA-512/224,\n * SHA-512/256).\n *\n * @return a message digest object.\n */\nsha512.create = function(algorithm) {\n // do initialization as necessary\n if(!_initialized) {\n _init();\n }\n\n if(typeof algorithm === 'undefined') {\n algorithm = 'SHA-512';\n }\n\n if(!(algorithm in _states)) {\n throw new Error('Invalid SHA-512 algorithm: ' + algorithm);\n }\n\n // SHA-512 state contains eight 64-bit integers (each as two 32-bit ints)\n var _state = _states[algorithm];\n var _h = null;\n\n // input buffer\n var _input = forge.util.createBuffer();\n\n // used for 64-bit word storage\n var _w = new Array(80);\n for(var wi = 0; wi < 80; ++wi) {\n _w[wi] = new Array(2);\n }\n\n // determine digest length by algorithm name (default)\n var digestLength = 64;\n switch(algorithm) {\n case 'SHA-384':\n digestLength = 48;\n break;\n case 'SHA-512/256':\n digestLength = 32;\n break;\n case 'SHA-512/224':\n digestLength = 28;\n break;\n }\n\n // message digest object\n var md = {\n // SHA-512 => sha512\n algorithm: algorithm.replace('-', '').toLowerCase(),\n blockLength: 128,\n digestLength: digestLength,\n // 56-bit length of message so far (does not including padding)\n messageLength: 0,\n // true message length\n fullMessageLength: null,\n // size of message length in bytes\n messageLengthSize: 16\n };\n\n /**\n * Starts the digest.\n *\n * @return this digest object.\n */\n md.start = function() {\n // up to 56-bit message length for convenience\n md.messageLength = 0;\n\n // full message length (set md.messageLength128 for backwards-compatibility)\n md.fullMessageLength = md.messageLength128 = [];\n var int32s = md.messageLengthSize / 4;\n for(var i = 0; i < int32s; ++i) {\n md.fullMessageLength.push(0);\n }\n _input = forge.util.createBuffer();\n _h = new Array(_state.length);\n for(var i = 0; i < _state.length; ++i) {\n _h[i] = _state[i].slice(0);\n }\n return md;\n };\n // start digest automatically for first time\n md.start();\n\n /**\n * Updates the digest with the given message input. The given input can\n * treated as raw input (no encoding will be applied) or an encoding of\n * 'utf8' maybe given to encode the input using UTF-8.\n *\n * @param msg the message input to update with.\n * @param encoding the encoding to use (default: 'raw', other: 'utf8').\n *\n * @return this digest object.\n */\n md.update = function(msg, encoding) {\n if(encoding === 'utf8') {\n msg = forge.util.encodeUtf8(msg);\n }\n\n // update message length\n var len = msg.length;\n md.messageLength += len;\n len = [(len / 0x100000000) >>> 0, len >>> 0];\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n md.fullMessageLength[i] += len[1];\n len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);\n md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;\n len[0] = ((len[1] / 0x100000000) >>> 0);\n }\n\n // add bytes to input buffer\n _input.putBytes(msg);\n\n // process bytes\n _update(_h, _w, _input);\n\n // compact input buffer every 2K or if empty\n if(_input.read > 2048 || _input.length() === 0) {\n _input.compact();\n }\n\n return md;\n };\n\n /**\n * Produces the digest.\n *\n * @return a byte buffer containing the digest value.\n */\n md.digest = function() {\n /* Note: Here we copy the remaining bytes in the input buffer and\n add the appropriate SHA-512 padding. Then we do the final update\n on a copy of the state so that if the user wants to get\n intermediate digests they can do so. */\n\n /* Determine the number of bytes that must be added to the message\n to ensure its length is congruent to 896 mod 1024. In other words,\n the data to be digested must be a multiple of 1024 bits (or 128 bytes).\n This data includes the message, some padding, and the length of the\n message. Since the length of the message will be encoded as 16 bytes (128\n bits), that means that the last segment of the data must have 112 bytes\n (896 bits) of message and padding. Therefore, the length of the message\n plus the padding must be congruent to 896 mod 1024 because\n 1024 - 128 = 896.\n\n In order to fill up the message length it must be filled with\n padding that begins with 1 bit followed by all 0 bits. Padding\n must *always* be present, so if the message length is already\n congruent to 896 mod 1024, then 1024 padding bits must be added. */\n\n var finalBlock = forge.util.createBuffer();\n finalBlock.putBytes(_input.bytes());\n\n // compute remaining size to be digested (include message length size)\n var remaining = (\n md.fullMessageLength[md.fullMessageLength.length - 1] +\n md.messageLengthSize);\n\n // add padding for overflow blockSize - overflow\n // _padding starts with 1 byte with first bit is set (byte value 128), then\n // there may be up to (blockSize - 1) other pad bytes\n var overflow = remaining & (md.blockLength - 1);\n finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));\n\n // serialize message length in bits in big-endian order; since length\n // is stored in bytes we multiply by 8 and add carry from next int\n var next, carry;\n var bits = md.fullMessageLength[0] * 8;\n for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {\n next = md.fullMessageLength[i + 1] * 8;\n carry = (next / 0x100000000) >>> 0;\n bits += carry;\n finalBlock.putInt32(bits >>> 0);\n bits = next >>> 0;\n }\n finalBlock.putInt32(bits);\n\n var h = new Array(_h.length);\n for(var i = 0; i < _h.length; ++i) {\n h[i] = _h[i].slice(0);\n }\n _update(h, _w, finalBlock);\n var rval = forge.util.createBuffer();\n var hlen;\n if(algorithm === 'SHA-512') {\n hlen = h.length;\n } else if(algorithm === 'SHA-384') {\n hlen = h.length - 2;\n } else {\n hlen = h.length - 4;\n }\n for(var i = 0; i < hlen; ++i) {\n rval.putInt32(h[i][0]);\n if(i !== hlen - 1 || algorithm !== 'SHA-512/224') {\n rval.putInt32(h[i][1]);\n }\n }\n return rval;\n };\n\n return md;\n};\n\n// sha-512 padding bytes not initialized yet\nvar _padding = null;\nvar _initialized = false;\n\n// table of constants\nvar _k = null;\n\n// initial hash states\nvar _states = null;\n\n/**\n * Initializes the constant tables.\n */\nfunction _init() {\n // create padding\n _padding = String.fromCharCode(128);\n _padding += forge.util.fillString(String.fromCharCode(0x00), 128);\n\n // create K table for SHA-512\n _k = [\n [0x428a2f98, 0xd728ae22], [0x71374491, 0x23ef65cd],\n [0xb5c0fbcf, 0xec4d3b2f], [0xe9b5dba5, 0x8189dbbc],\n [0x3956c25b, 0xf348b538], [0x59f111f1, 0xb605d019],\n [0x923f82a4, 0xaf194f9b], [0xab1c5ed5, 0xda6d8118],\n [0xd807aa98, 0xa3030242], [0x12835b01, 0x45706fbe],\n [0x243185be, 0x4ee4b28c], [0x550c7dc3, 0xd5ffb4e2],\n [0x72be5d74, 0xf27b896f], [0x80deb1fe, 0x3b1696b1],\n [0x9bdc06a7, 0x25c71235], [0xc19bf174, 0xcf692694],\n [0xe49b69c1, 0x9ef14ad2], [0xefbe4786, 0x384f25e3],\n [0x0fc19dc6, 0x8b8cd5b5], [0x240ca1cc, 0x77ac9c65],\n [0x2de92c6f, 0x592b0275], [0x4a7484aa, 0x6ea6e483],\n [0x5cb0a9dc, 0xbd41fbd4], [0x76f988da, 0x831153b5],\n [0x983e5152, 0xee66dfab], [0xa831c66d, 0x2db43210],\n [0xb00327c8, 0x98fb213f], [0xbf597fc7, 0xbeef0ee4],\n [0xc6e00bf3, 0x3da88fc2], [0xd5a79147, 0x930aa725],\n [0x06ca6351, 0xe003826f], [0x14292967, 0x0a0e6e70],\n [0x27b70a85, 0x46d22ffc], [0x2e1b2138, 0x5c26c926],\n [0x4d2c6dfc, 0x5ac42aed], [0x53380d13, 0x9d95b3df],\n [0x650a7354, 0x8baf63de], [0x766a0abb, 0x3c77b2a8],\n [0x81c2c92e, 0x47edaee6], [0x92722c85, 0x1482353b],\n [0xa2bfe8a1, 0x4cf10364], [0xa81a664b, 0xbc423001],\n [0xc24b8b70, 0xd0f89791], [0xc76c51a3, 0x0654be30],\n [0xd192e819, 0xd6ef5218], [0xd6990624, 0x5565a910],\n [0xf40e3585, 0x5771202a], [0x106aa070, 0x32bbd1b8],\n [0x19a4c116, 0xb8d2d0c8], [0x1e376c08, 0x5141ab53],\n [0x2748774c, 0xdf8eeb99], [0x34b0bcb5, 0xe19b48a8],\n [0x391c0cb3, 0xc5c95a63], [0x4ed8aa4a, 0xe3418acb],\n [0x5b9cca4f, 0x7763e373], [0x682e6ff3, 0xd6b2b8a3],\n [0x748f82ee, 0x5defb2fc], [0x78a5636f, 0x43172f60],\n [0x84c87814, 0xa1f0ab72], [0x8cc70208, 0x1a6439ec],\n [0x90befffa, 0x23631e28], [0xa4506ceb, 0xde82bde9],\n [0xbef9a3f7, 0xb2c67915], [0xc67178f2, 0xe372532b],\n [0xca273ece, 0xea26619c], [0xd186b8c7, 0x21c0c207],\n [0xeada7dd6, 0xcde0eb1e], [0xf57d4f7f, 0xee6ed178],\n [0x06f067aa, 0x72176fba], [0x0a637dc5, 0xa2c898a6],\n [0x113f9804, 0xbef90dae], [0x1b710b35, 0x131c471b],\n [0x28db77f5, 0x23047d84], [0x32caab7b, 0x40c72493],\n [0x3c9ebe0a, 0x15c9bebc], [0x431d67c4, 0x9c100d4c],\n [0x4cc5d4be, 0xcb3e42b6], [0x597f299c, 0xfc657e2a],\n [0x5fcb6fab, 0x3ad6faec], [0x6c44198c, 0x4a475817]\n ];\n\n // initial hash states\n _states = {};\n _states['SHA-512'] = [\n [0x6a09e667, 0xf3bcc908],\n [0xbb67ae85, 0x84caa73b],\n [0x3c6ef372, 0xfe94f82b],\n [0xa54ff53a, 0x5f1d36f1],\n [0x510e527f, 0xade682d1],\n [0x9b05688c, 0x2b3e6c1f],\n [0x1f83d9ab, 0xfb41bd6b],\n [0x5be0cd19, 0x137e2179]\n ];\n _states['SHA-384'] = [\n [0xcbbb9d5d, 0xc1059ed8],\n [0x629a292a, 0x367cd507],\n [0x9159015a, 0x3070dd17],\n [0x152fecd8, 0xf70e5939],\n [0x67332667, 0xffc00b31],\n [0x8eb44a87, 0x68581511],\n [0xdb0c2e0d, 0x64f98fa7],\n [0x47b5481d, 0xbefa4fa4]\n ];\n _states['SHA-512/256'] = [\n [0x22312194, 0xFC2BF72C],\n [0x9F555FA3, 0xC84C64C2],\n [0x2393B86B, 0x6F53B151],\n [0x96387719, 0x5940EABD],\n [0x96283EE2, 0xA88EFFE3],\n [0xBE5E1E25, 0x53863992],\n [0x2B0199FC, 0x2C85B8AA],\n [0x0EB72DDC, 0x81C52CA2]\n ];\n _states['SHA-512/224'] = [\n [0x8C3D37C8, 0x19544DA2],\n [0x73E19966, 0x89DCD4D6],\n [0x1DFAB7AE, 0x32FF9C82],\n [0x679DD514, 0x582F9FCF],\n [0x0F6D2B69, 0x7BD44DA8],\n [0x77E36F73, 0x04C48942],\n [0x3F9D85A8, 0x6A1D36C8],\n [0x1112E6AD, 0x91D692A1]\n ];\n\n // now initialized\n _initialized = true;\n}\n\n/**\n * Updates a SHA-512 state with the given byte buffer.\n *\n * @param s the SHA-512 state to update.\n * @param w the array to use to store words.\n * @param bytes the byte buffer to update with.\n */\nfunction _update(s, w, bytes) {\n // consume 512 bit (128 byte) chunks\n var t1_hi, t1_lo;\n var t2_hi, t2_lo;\n var s0_hi, s0_lo;\n var s1_hi, s1_lo;\n var ch_hi, ch_lo;\n var maj_hi, maj_lo;\n var a_hi, a_lo;\n var b_hi, b_lo;\n var c_hi, c_lo;\n var d_hi, d_lo;\n var e_hi, e_lo;\n var f_hi, f_lo;\n var g_hi, g_lo;\n var h_hi, h_lo;\n var i, hi, lo, w2, w7, w15, w16;\n var len = bytes.length();\n while(len >= 128) {\n // the w array will be populated with sixteen 64-bit big-endian words\n // and then extended into 64 64-bit words according to SHA-512\n for(i = 0; i < 16; ++i) {\n w[i][0] = bytes.getInt32() >>> 0;\n w[i][1] = bytes.getInt32() >>> 0;\n }\n for(; i < 80; ++i) {\n // for word 2 words ago: ROTR 19(x) ^ ROTR 61(x) ^ SHR 6(x)\n w2 = w[i - 2];\n hi = w2[0];\n lo = w2[1];\n\n // high bits\n t1_hi = (\n ((hi >>> 19) | (lo << 13)) ^ // ROTR 19\n ((lo >>> 29) | (hi << 3)) ^ // ROTR 61/(swap + ROTR 29)\n (hi >>> 6)) >>> 0; // SHR 6\n // low bits\n t1_lo = (\n ((hi << 13) | (lo >>> 19)) ^ // ROTR 19\n ((lo << 3) | (hi >>> 29)) ^ // ROTR 61/(swap + ROTR 29)\n ((hi << 26) | (lo >>> 6))) >>> 0; // SHR 6\n\n // for word 15 words ago: ROTR 1(x) ^ ROTR 8(x) ^ SHR 7(x)\n w15 = w[i - 15];\n hi = w15[0];\n lo = w15[1];\n\n // high bits\n t2_hi = (\n ((hi >>> 1) | (lo << 31)) ^ // ROTR 1\n ((hi >>> 8) | (lo << 24)) ^ // ROTR 8\n (hi >>> 7)) >>> 0; // SHR 7\n // low bits\n t2_lo = (\n ((hi << 31) | (lo >>> 1)) ^ // ROTR 1\n ((hi << 24) | (lo >>> 8)) ^ // ROTR 8\n ((hi << 25) | (lo >>> 7))) >>> 0; // SHR 7\n\n // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^64 (carry lo overflow)\n w7 = w[i - 7];\n w16 = w[i - 16];\n lo = (t1_lo + w7[1] + t2_lo + w16[1]);\n w[i][0] = (t1_hi + w7[0] + t2_hi + w16[0] +\n ((lo / 0x100000000) >>> 0)) >>> 0;\n w[i][1] = lo >>> 0;\n }\n\n // initialize hash value for this chunk\n a_hi = s[0][0];\n a_lo = s[0][1];\n b_hi = s[1][0];\n b_lo = s[1][1];\n c_hi = s[2][0];\n c_lo = s[2][1];\n d_hi = s[3][0];\n d_lo = s[3][1];\n e_hi = s[4][0];\n e_lo = s[4][1];\n f_hi = s[5][0];\n f_lo = s[5][1];\n g_hi = s[6][0];\n g_lo = s[6][1];\n h_hi = s[7][0];\n h_lo = s[7][1];\n\n // round function\n for(i = 0; i < 80; ++i) {\n // Sum1(e) = ROTR 14(e) ^ ROTR 18(e) ^ ROTR 41(e)\n s1_hi = (\n ((e_hi >>> 14) | (e_lo << 18)) ^ // ROTR 14\n ((e_hi >>> 18) | (e_lo << 14)) ^ // ROTR 18\n ((e_lo >>> 9) | (e_hi << 23))) >>> 0; // ROTR 41/(swap + ROTR 9)\n s1_lo = (\n ((e_hi << 18) | (e_lo >>> 14)) ^ // ROTR 14\n ((e_hi << 14) | (e_lo >>> 18)) ^ // ROTR 18\n ((e_lo << 23) | (e_hi >>> 9))) >>> 0; // ROTR 41/(swap + ROTR 9)\n\n // Ch(e, f, g) (optimized the same way as SHA-1)\n ch_hi = (g_hi ^ (e_hi & (f_hi ^ g_hi))) >>> 0;\n ch_lo = (g_lo ^ (e_lo & (f_lo ^ g_lo))) >>> 0;\n\n // Sum0(a) = ROTR 28(a) ^ ROTR 34(a) ^ ROTR 39(a)\n s0_hi = (\n ((a_hi >>> 28) | (a_lo << 4)) ^ // ROTR 28\n ((a_lo >>> 2) | (a_hi << 30)) ^ // ROTR 34/(swap + ROTR 2)\n ((a_lo >>> 7) | (a_hi << 25))) >>> 0; // ROTR 39/(swap + ROTR 7)\n s0_lo = (\n ((a_hi << 4) | (a_lo >>> 28)) ^ // ROTR 28\n ((a_lo << 30) | (a_hi >>> 2)) ^ // ROTR 34/(swap + ROTR 2)\n ((a_lo << 25) | (a_hi >>> 7))) >>> 0; // ROTR 39/(swap + ROTR 7)\n\n // Maj(a, b, c) (optimized the same way as SHA-1)\n maj_hi = ((a_hi & b_hi) | (c_hi & (a_hi ^ b_hi))) >>> 0;\n maj_lo = ((a_lo & b_lo) | (c_lo & (a_lo ^ b_lo))) >>> 0;\n\n // main algorithm\n // t1 = (h + s1 + ch + _k[i] + _w[i]) modulo 2^64 (carry lo overflow)\n lo = (h_lo + s1_lo + ch_lo + _k[i][1] + w[i][1]);\n t1_hi = (h_hi + s1_hi + ch_hi + _k[i][0] + w[i][0] +\n ((lo / 0x100000000) >>> 0)) >>> 0;\n t1_lo = lo >>> 0;\n\n // t2 = s0 + maj modulo 2^64 (carry lo overflow)\n lo = s0_lo + maj_lo;\n t2_hi = (s0_hi + maj_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n t2_lo = lo >>> 0;\n\n h_hi = g_hi;\n h_lo = g_lo;\n\n g_hi = f_hi;\n g_lo = f_lo;\n\n f_hi = e_hi;\n f_lo = e_lo;\n\n // e = (d + t1) modulo 2^64 (carry lo overflow)\n lo = d_lo + t1_lo;\n e_hi = (d_hi + t1_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n e_lo = lo >>> 0;\n\n d_hi = c_hi;\n d_lo = c_lo;\n\n c_hi = b_hi;\n c_lo = b_lo;\n\n b_hi = a_hi;\n b_lo = a_lo;\n\n // a = (t1 + t2) modulo 2^64 (carry lo overflow)\n lo = t1_lo + t2_lo;\n a_hi = (t1_hi + t2_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n a_lo = lo >>> 0;\n }\n\n // update hash state (additional modulo 2^64)\n lo = s[0][1] + a_lo;\n s[0][0] = (s[0][0] + a_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[0][1] = lo >>> 0;\n\n lo = s[1][1] + b_lo;\n s[1][0] = (s[1][0] + b_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[1][1] = lo >>> 0;\n\n lo = s[2][1] + c_lo;\n s[2][0] = (s[2][0] + c_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[2][1] = lo >>> 0;\n\n lo = s[3][1] + d_lo;\n s[3][0] = (s[3][0] + d_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[3][1] = lo >>> 0;\n\n lo = s[4][1] + e_lo;\n s[4][0] = (s[4][0] + e_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[4][1] = lo >>> 0;\n\n lo = s[5][1] + f_lo;\n s[5][0] = (s[5][0] + f_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[5][1] = lo >>> 0;\n\n lo = s[6][1] + g_lo;\n s[6][0] = (s[6][0] + g_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[6][1] = lo >>> 0;\n\n lo = s[7][1] + h_lo;\n s[7][0] = (s[7][0] + h_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[7][1] = lo >>> 0;\n\n len -= 128;\n }\n}\n","/**\n * Functions to output keys in SSH-friendly formats.\n *\n * This is part of the Forge project which may be used under the terms of\n * either the BSD License or the GNU General Public License (GPL) Version 2.\n *\n * See: https://github.com/digitalbazaar/forge/blob/cbebca3780658703d925b61b2caffb1d263a6c1d/LICENSE\n *\n * @author https://github.com/shellac\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./hmac');\nrequire('./md5');\nrequire('./sha1');\nrequire('./util');\n\nvar ssh = module.exports = forge.ssh = forge.ssh || {};\n\n/**\n * Encodes (and optionally encrypts) a private RSA key as a Putty PPK file.\n *\n * @param privateKey the key.\n * @param passphrase a passphrase to protect the key (falsy for no encryption).\n * @param comment a comment to include in the key file.\n *\n * @return the PPK file as a string.\n */\nssh.privateKeyToPutty = function(privateKey, passphrase, comment) {\n comment = comment || '';\n passphrase = passphrase || '';\n var algorithm = 'ssh-rsa';\n var encryptionAlgorithm = (passphrase === '') ? 'none' : 'aes256-cbc';\n\n var ppk = 'PuTTY-User-Key-File-2: ' + algorithm + '\\r\\n';\n ppk += 'Encryption: ' + encryptionAlgorithm + '\\r\\n';\n ppk += 'Comment: ' + comment + '\\r\\n';\n\n // public key into buffer for ppk\n var pubbuffer = forge.util.createBuffer();\n _addStringToBuffer(pubbuffer, algorithm);\n _addBigIntegerToBuffer(pubbuffer, privateKey.e);\n _addBigIntegerToBuffer(pubbuffer, privateKey.n);\n\n // write public key\n var pub = forge.util.encode64(pubbuffer.bytes(), 64);\n var length = Math.floor(pub.length / 66) + 1; // 66 = 64 + \\r\\n\n ppk += 'Public-Lines: ' + length + '\\r\\n';\n ppk += pub;\n\n // private key into a buffer\n var privbuffer = forge.util.createBuffer();\n _addBigIntegerToBuffer(privbuffer, privateKey.d);\n _addBigIntegerToBuffer(privbuffer, privateKey.p);\n _addBigIntegerToBuffer(privbuffer, privateKey.q);\n _addBigIntegerToBuffer(privbuffer, privateKey.qInv);\n\n // optionally encrypt the private key\n var priv;\n if(!passphrase) {\n // use the unencrypted buffer\n priv = forge.util.encode64(privbuffer.bytes(), 64);\n } else {\n // encrypt RSA key using passphrase\n var encLen = privbuffer.length() + 16 - 1;\n encLen -= encLen % 16;\n\n // pad private key with sha1-d data -- needs to be a multiple of 16\n var padding = _sha1(privbuffer.bytes());\n\n padding.truncate(padding.length() - encLen + privbuffer.length());\n privbuffer.putBuffer(padding);\n\n var aeskey = forge.util.createBuffer();\n aeskey.putBuffer(_sha1('\\x00\\x00\\x00\\x00', passphrase));\n aeskey.putBuffer(_sha1('\\x00\\x00\\x00\\x01', passphrase));\n\n // encrypt some bytes using CBC mode\n // key is 40 bytes, so truncate *by* 8 bytes\n var cipher = forge.aes.createEncryptionCipher(aeskey.truncate(8), 'CBC');\n cipher.start(forge.util.createBuffer().fillWithByte(0, 16));\n cipher.update(privbuffer.copy());\n cipher.finish();\n var encrypted = cipher.output;\n\n // Note: this appears to differ from Putty -- is forge wrong, or putty?\n // due to padding we finish as an exact multiple of 16\n encrypted.truncate(16); // all padding\n\n priv = forge.util.encode64(encrypted.bytes(), 64);\n }\n\n // output private key\n length = Math.floor(priv.length / 66) + 1; // 64 + \\r\\n\n ppk += '\\r\\nPrivate-Lines: ' + length + '\\r\\n';\n ppk += priv;\n\n // MAC\n var mackey = _sha1('putty-private-key-file-mac-key', passphrase);\n\n var macbuffer = forge.util.createBuffer();\n _addStringToBuffer(macbuffer, algorithm);\n _addStringToBuffer(macbuffer, encryptionAlgorithm);\n _addStringToBuffer(macbuffer, comment);\n macbuffer.putInt32(pubbuffer.length());\n macbuffer.putBuffer(pubbuffer);\n macbuffer.putInt32(privbuffer.length());\n macbuffer.putBuffer(privbuffer);\n\n var hmac = forge.hmac.create();\n hmac.start('sha1', mackey);\n hmac.update(macbuffer.bytes());\n\n ppk += '\\r\\nPrivate-MAC: ' + hmac.digest().toHex() + '\\r\\n';\n\n return ppk;\n};\n\n/**\n * Encodes a public RSA key as an OpenSSH file.\n *\n * @param key the key.\n * @param comment a comment.\n *\n * @return the public key in OpenSSH format.\n */\nssh.publicKeyToOpenSSH = function(key, comment) {\n var type = 'ssh-rsa';\n comment = comment || '';\n\n var buffer = forge.util.createBuffer();\n _addStringToBuffer(buffer, type);\n _addBigIntegerToBuffer(buffer, key.e);\n _addBigIntegerToBuffer(buffer, key.n);\n\n return type + ' ' + forge.util.encode64(buffer.bytes()) + ' ' + comment;\n};\n\n/**\n * Encodes a private RSA key as an OpenSSH file.\n *\n * @param key the key.\n * @param passphrase a passphrase to protect the key (falsy for no encryption).\n *\n * @return the public key in OpenSSH format.\n */\nssh.privateKeyToOpenSSH = function(privateKey, passphrase) {\n if(!passphrase) {\n return forge.pki.privateKeyToPem(privateKey);\n }\n // OpenSSH private key is just a legacy format, it seems\n return forge.pki.encryptRsaPrivateKey(privateKey, passphrase,\n {legacy: true, algorithm: 'aes128'});\n};\n\n/**\n * Gets the SSH fingerprint for the given public key.\n *\n * @param options the options to use.\n * [md] the message digest object to use (defaults to forge.md.md5).\n * [encoding] an alternative output encoding, such as 'hex'\n * (defaults to none, outputs a byte buffer).\n * [delimiter] the delimiter to use between bytes for 'hex' encoded\n * output, eg: ':' (defaults to none).\n *\n * @return the fingerprint as a byte buffer or other encoding based on options.\n */\nssh.getPublicKeyFingerprint = function(key, options) {\n options = options || {};\n var md = options.md || forge.md.md5.create();\n\n var type = 'ssh-rsa';\n var buffer = forge.util.createBuffer();\n _addStringToBuffer(buffer, type);\n _addBigIntegerToBuffer(buffer, key.e);\n _addBigIntegerToBuffer(buffer, key.n);\n\n // hash public key bytes\n md.start();\n md.update(buffer.getBytes());\n var digest = md.digest();\n if(options.encoding === 'hex') {\n var hex = digest.toHex();\n if(options.delimiter) {\n return hex.match(/.{2}/g).join(options.delimiter);\n }\n return hex;\n } else if(options.encoding === 'binary') {\n return digest.getBytes();\n } else if(options.encoding) {\n throw new Error('Unknown encoding \"' + options.encoding + '\".');\n }\n return digest;\n};\n\n/**\n * Adds len(val) then val to a buffer.\n *\n * @param buffer the buffer to add to.\n * @param val a big integer.\n */\nfunction _addBigIntegerToBuffer(buffer, val) {\n var hexVal = val.toString(16);\n // ensure 2s complement +ve\n if(hexVal[0] >= '8') {\n hexVal = '00' + hexVal;\n }\n var bytes = forge.util.hexToBytes(hexVal);\n buffer.putInt32(bytes.length);\n buffer.putBytes(bytes);\n}\n\n/**\n * Adds len(val) then val to a buffer.\n *\n * @param buffer the buffer to add to.\n * @param val a string.\n */\nfunction _addStringToBuffer(buffer, val) {\n buffer.putInt32(val.length);\n buffer.putString(val);\n}\n\n/**\n * Hashes the arguments into one value using SHA-1.\n *\n * @return the sha1 hash of the provided arguments.\n */\nfunction _sha1() {\n var sha = forge.md.sha1.create();\n var num = arguments.length;\n for (var i = 0; i < num; ++i) {\n sha.update(arguments[i]);\n }\n return sha.digest();\n}\n","/**\n * A Javascript implementation of Transport Layer Security (TLS).\n *\n * @author Dave Longley\n *\n * Copyright (c) 2009-2014 Digital Bazaar, Inc.\n *\n * The TLS Handshake Protocol involves the following steps:\n *\n * - Exchange hello messages to agree on algorithms, exchange random values,\n * and check for session resumption.\n *\n * - Exchange the necessary cryptographic parameters to allow the client and\n * server to agree on a premaster secret.\n *\n * - Exchange certificates and cryptographic information to allow the client\n * and server to authenticate themselves.\n *\n * - Generate a master secret from the premaster secret and exchanged random\n * values.\n *\n * - Provide security parameters to the record layer.\n *\n * - Allow the client and server to verify that their peer has calculated the\n * same security parameters and that the handshake occurred without tampering\n * by an attacker.\n *\n * Up to 4 different messages may be sent during a key exchange. The server\n * certificate, the server key exchange, the client certificate, and the\n * client key exchange.\n *\n * A typical handshake (from the client's perspective).\n *\n * 1. Client sends ClientHello.\n * 2. Client receives ServerHello.\n * 3. Client receives optional Certificate.\n * 4. Client receives optional ServerKeyExchange.\n * 5. Client receives ServerHelloDone.\n * 6. Client sends optional Certificate.\n * 7. Client sends ClientKeyExchange.\n * 8. Client sends optional CertificateVerify.\n * 9. Client sends ChangeCipherSpec.\n * 10. Client sends Finished.\n * 11. Client receives ChangeCipherSpec.\n * 12. Client receives Finished.\n * 13. Client sends/receives application data.\n *\n * To reuse an existing session:\n *\n * 1. Client sends ClientHello with session ID for reuse.\n * 2. Client receives ServerHello with same session ID if reusing.\n * 3. Client receives ChangeCipherSpec message if reusing.\n * 4. Client receives Finished.\n * 5. Client sends ChangeCipherSpec.\n * 6. Client sends Finished.\n *\n * Note: Client ignores HelloRequest if in the middle of a handshake.\n *\n * Record Layer:\n *\n * The record layer fragments information blocks into TLSPlaintext records\n * carrying data in chunks of 2^14 bytes or less. Client message boundaries are\n * not preserved in the record layer (i.e., multiple client messages of the\n * same ContentType MAY be coalesced into a single TLSPlaintext record, or a\n * single message MAY be fragmented across several records).\n *\n * struct {\n * uint8 major;\n * uint8 minor;\n * } ProtocolVersion;\n *\n * struct {\n * ContentType type;\n * ProtocolVersion version;\n * uint16 length;\n * opaque fragment[TLSPlaintext.length];\n * } TLSPlaintext;\n *\n * type:\n * The higher-level protocol used to process the enclosed fragment.\n *\n * version:\n * The version of the protocol being employed. TLS Version 1.2 uses version\n * {3, 3}. TLS Version 1.0 uses version {3, 1}. Note that a client that\n * supports multiple versions of TLS may not know what version will be\n * employed before it receives the ServerHello.\n *\n * length:\n * The length (in bytes) of the following TLSPlaintext.fragment. The length\n * MUST NOT exceed 2^14 = 16384 bytes.\n *\n * fragment:\n * The application data. This data is transparent and treated as an\n * independent block to be dealt with by the higher-level protocol specified\n * by the type field.\n *\n * Implementations MUST NOT send zero-length fragments of Handshake, Alert, or\n * ChangeCipherSpec content types. Zero-length fragments of Application data\n * MAY be sent as they are potentially useful as a traffic analysis\n * countermeasure.\n *\n * Note: Data of different TLS record layer content types MAY be interleaved.\n * Application data is generally of lower precedence for transmission than\n * other content types. However, records MUST be delivered to the network in\n * the same order as they are protected by the record layer. Recipients MUST\n * receive and process interleaved application layer traffic during handshakes\n * subsequent to the first one on a connection.\n *\n * struct {\n * ContentType type; // same as TLSPlaintext.type\n * ProtocolVersion version;// same as TLSPlaintext.version\n * uint16 length;\n * opaque fragment[TLSCompressed.length];\n * } TLSCompressed;\n *\n * length:\n * The length (in bytes) of the following TLSCompressed.fragment.\n * The length MUST NOT exceed 2^14 + 1024.\n *\n * fragment:\n * The compressed form of TLSPlaintext.fragment.\n *\n * Note: A CompressionMethod.null operation is an identity operation; no fields\n * are altered. In this implementation, since no compression is supported,\n * uncompressed records are always the same as compressed records.\n *\n * Encryption Information:\n *\n * The encryption and MAC functions translate a TLSCompressed structure into a\n * TLSCiphertext. The decryption functions reverse the process. The MAC of the\n * record also includes a sequence number so that missing, extra, or repeated\n * messages are detectable.\n *\n * struct {\n * ContentType type;\n * ProtocolVersion version;\n * uint16 length;\n * select (SecurityParameters.cipher_type) {\n * case stream: GenericStreamCipher;\n * case block: GenericBlockCipher;\n * case aead: GenericAEADCipher;\n * } fragment;\n * } TLSCiphertext;\n *\n * type:\n * The type field is identical to TLSCompressed.type.\n *\n * version:\n * The version field is identical to TLSCompressed.version.\n *\n * length:\n * The length (in bytes) of the following TLSCiphertext.fragment.\n * The length MUST NOT exceed 2^14 + 2048.\n *\n * fragment:\n * The encrypted form of TLSCompressed.fragment, with the MAC.\n *\n * Note: Only CBC Block Ciphers are supported by this implementation.\n *\n * The TLSCompressed.fragment structures are converted to/from block\n * TLSCiphertext.fragment structures.\n *\n * struct {\n * opaque IV[SecurityParameters.record_iv_length];\n * block-ciphered struct {\n * opaque content[TLSCompressed.length];\n * opaque MAC[SecurityParameters.mac_length];\n * uint8 padding[GenericBlockCipher.padding_length];\n * uint8 padding_length;\n * };\n * } GenericBlockCipher;\n *\n * The MAC is generated as described in Section 6.2.3.1.\n *\n * IV:\n * The Initialization Vector (IV) SHOULD be chosen at random, and MUST be\n * unpredictable. Note that in versions of TLS prior to 1.1, there was no\n * IV field, and the last ciphertext block of the previous record (the \"CBC\n * residue\") was used as the IV. This was changed to prevent the attacks\n * described in [CBCATT]. For block ciphers, the IV length is of length\n * SecurityParameters.record_iv_length, which is equal to the\n * SecurityParameters.block_size.\n *\n * padding:\n * Padding that is added to force the length of the plaintext to be an\n * integral multiple of the block cipher's block length. The padding MAY be\n * any length up to 255 bytes, as long as it results in the\n * TLSCiphertext.length being an integral multiple of the block length.\n * Lengths longer than necessary might be desirable to frustrate attacks on\n * a protocol that are based on analysis of the lengths of exchanged\n * messages. Each uint8 in the padding data vector MUST be filled with the\n * padding length value. The receiver MUST check this padding and MUST use\n * the bad_record_mac alert to indicate padding errors.\n *\n * padding_length:\n * The padding length MUST be such that the total size of the\n * GenericBlockCipher structure is a multiple of the cipher's block length.\n * Legal values range from zero to 255, inclusive. This length specifies the\n * length of the padding field exclusive of the padding_length field itself.\n *\n * The encrypted data length (TLSCiphertext.length) is one more than the sum of\n * SecurityParameters.block_length, TLSCompressed.length,\n * SecurityParameters.mac_length, and padding_length.\n *\n * Example: If the block length is 8 bytes, the content length\n * (TLSCompressed.length) is 61 bytes, and the MAC length is 20 bytes, then the\n * length before padding is 82 bytes (this does not include the IV. Thus, the\n * padding length modulo 8 must be equal to 6 in order to make the total length\n * an even multiple of 8 bytes (the block length). The padding length can be\n * 6, 14, 22, and so on, through 254. If the padding length were the minimum\n * necessary, 6, the padding would be 6 bytes, each containing the value 6.\n * Thus, the last 8 octets of the GenericBlockCipher before block encryption\n * would be xx 06 06 06 06 06 06 06, where xx is the last octet of the MAC.\n *\n * Note: With block ciphers in CBC mode (Cipher Block Chaining), it is critical\n * that the entire plaintext of the record be known before any ciphertext is\n * transmitted. Otherwise, it is possible for the attacker to mount the attack\n * described in [CBCATT].\n *\n * Implementation note: Canvel et al. [CBCTIME] have demonstrated a timing\n * attack on CBC padding based on the time required to compute the MAC. In\n * order to defend against this attack, implementations MUST ensure that\n * record processing time is essentially the same whether or not the padding\n * is correct. In general, the best way to do this is to compute the MAC even\n * if the padding is incorrect, and only then reject the packet. For instance,\n * if the pad appears to be incorrect, the implementation might assume a\n * zero-length pad and then compute the MAC. This leaves a small timing\n * channel, since MAC performance depends, to some extent, on the size of the\n * data fragment, but it is not believed to be large enough to be exploitable,\n * due to the large block size of existing MACs and the small size of the\n * timing signal.\n */\nvar forge = require('./forge');\nrequire('./asn1');\nrequire('./hmac');\nrequire('./md5');\nrequire('./pem');\nrequire('./pki');\nrequire('./random');\nrequire('./sha1');\nrequire('./util');\n\n/**\n * Generates pseudo random bytes by mixing the result of two hash functions,\n * MD5 and SHA-1.\n *\n * prf_TLS1(secret, label, seed) =\n * P_MD5(S1, label + seed) XOR P_SHA-1(S2, label + seed);\n *\n * Each P_hash function functions as follows:\n *\n * P_hash(secret, seed) = HMAC_hash(secret, A(1) + seed) +\n * HMAC_hash(secret, A(2) + seed) +\n * HMAC_hash(secret, A(3) + seed) + ...\n * A() is defined as:\n * A(0) = seed\n * A(i) = HMAC_hash(secret, A(i-1))\n *\n * The '+' operator denotes concatenation.\n *\n * As many iterations A(N) as are needed are performed to generate enough\n * pseudo random byte output. If an iteration creates more data than is\n * necessary, then it is truncated.\n *\n * Therefore:\n * A(1) = HMAC_hash(secret, A(0))\n * = HMAC_hash(secret, seed)\n * A(2) = HMAC_hash(secret, A(1))\n * = HMAC_hash(secret, HMAC_hash(secret, seed))\n *\n * Therefore:\n * P_hash(secret, seed) =\n * HMAC_hash(secret, HMAC_hash(secret, A(0)) + seed) +\n * HMAC_hash(secret, HMAC_hash(secret, A(1)) + seed) +\n * ...\n *\n * Therefore:\n * P_hash(secret, seed) =\n * HMAC_hash(secret, HMAC_hash(secret, seed) + seed) +\n * HMAC_hash(secret, HMAC_hash(secret, HMAC_hash(secret, seed)) + seed) +\n * ...\n *\n * @param secret the secret to use.\n * @param label the label to use.\n * @param seed the seed value to use.\n * @param length the number of bytes to generate.\n *\n * @return the pseudo random bytes in a byte buffer.\n */\nvar prf_TLS1 = function(secret, label, seed, length) {\n var rval = forge.util.createBuffer();\n\n /* For TLS 1.0, the secret is split in half, into two secrets of equal\n length. If the secret has an odd length then the last byte of the first\n half will be the same as the first byte of the second. The length of the\n two secrets is half of the secret rounded up. */\n var idx = (secret.length >> 1);\n var slen = idx + (secret.length & 1);\n var s1 = secret.substr(0, slen);\n var s2 = secret.substr(idx, slen);\n var ai = forge.util.createBuffer();\n var hmac = forge.hmac.create();\n seed = label + seed;\n\n // determine the number of iterations that must be performed to generate\n // enough output bytes, md5 creates 16 byte hashes, sha1 creates 20\n var md5itr = Math.ceil(length / 16);\n var sha1itr = Math.ceil(length / 20);\n\n // do md5 iterations\n hmac.start('MD5', s1);\n var md5bytes = forge.util.createBuffer();\n ai.putBytes(seed);\n for(var i = 0; i < md5itr; ++i) {\n // HMAC_hash(secret, A(i-1))\n hmac.start(null, null);\n hmac.update(ai.getBytes());\n ai.putBuffer(hmac.digest());\n\n // HMAC_hash(secret, A(i) + seed)\n hmac.start(null, null);\n hmac.update(ai.bytes() + seed);\n md5bytes.putBuffer(hmac.digest());\n }\n\n // do sha1 iterations\n hmac.start('SHA1', s2);\n var sha1bytes = forge.util.createBuffer();\n ai.clear();\n ai.putBytes(seed);\n for(var i = 0; i < sha1itr; ++i) {\n // HMAC_hash(secret, A(i-1))\n hmac.start(null, null);\n hmac.update(ai.getBytes());\n ai.putBuffer(hmac.digest());\n\n // HMAC_hash(secret, A(i) + seed)\n hmac.start(null, null);\n hmac.update(ai.bytes() + seed);\n sha1bytes.putBuffer(hmac.digest());\n }\n\n // XOR the md5 bytes with the sha1 bytes\n rval.putBytes(forge.util.xorBytes(\n md5bytes.getBytes(), sha1bytes.getBytes(), length));\n\n return rval;\n};\n\n/**\n * Generates pseudo random bytes using a SHA256 algorithm. For TLS 1.2.\n *\n * @param secret the secret to use.\n * @param label the label to use.\n * @param seed the seed value to use.\n * @param length the number of bytes to generate.\n *\n * @return the pseudo random bytes in a byte buffer.\n */\nvar prf_sha256 = function(secret, label, seed, length) {\n // FIXME: implement me for TLS 1.2\n};\n\n/**\n * Gets a MAC for a record using the SHA-1 hash algorithm.\n *\n * @param key the mac key.\n * @param state the sequence number (array of two 32-bit integers).\n * @param record the record.\n *\n * @return the sha-1 hash (20 bytes) for the given record.\n */\nvar hmac_sha1 = function(key, seqNum, record) {\n /* MAC is computed like so:\n HMAC_hash(\n key, seqNum +\n TLSCompressed.type +\n TLSCompressed.version +\n TLSCompressed.length +\n TLSCompressed.fragment)\n */\n var hmac = forge.hmac.create();\n hmac.start('SHA1', key);\n var b = forge.util.createBuffer();\n b.putInt32(seqNum[0]);\n b.putInt32(seqNum[1]);\n b.putByte(record.type);\n b.putByte(record.version.major);\n b.putByte(record.version.minor);\n b.putInt16(record.length);\n b.putBytes(record.fragment.bytes());\n hmac.update(b.getBytes());\n return hmac.digest().getBytes();\n};\n\n/**\n * Compresses the TLSPlaintext record into a TLSCompressed record using the\n * deflate algorithm.\n *\n * @param c the TLS connection.\n * @param record the TLSPlaintext record to compress.\n * @param s the ConnectionState to use.\n *\n * @return true on success, false on failure.\n */\nvar deflate = function(c, record, s) {\n var rval = false;\n\n try {\n var bytes = c.deflate(record.fragment.getBytes());\n record.fragment = forge.util.createBuffer(bytes);\n record.length = bytes.length;\n rval = true;\n } catch(ex) {\n // deflate error, fail out\n }\n\n return rval;\n};\n\n/**\n * Decompresses the TLSCompressed record into a TLSPlaintext record using the\n * deflate algorithm.\n *\n * @param c the TLS connection.\n * @param record the TLSCompressed record to decompress.\n * @param s the ConnectionState to use.\n *\n * @return true on success, false on failure.\n */\nvar inflate = function(c, record, s) {\n var rval = false;\n\n try {\n var bytes = c.inflate(record.fragment.getBytes());\n record.fragment = forge.util.createBuffer(bytes);\n record.length = bytes.length;\n rval = true;\n } catch(ex) {\n // inflate error, fail out\n }\n\n return rval;\n};\n\n/**\n * Reads a TLS variable-length vector from a byte buffer.\n *\n * Variable-length vectors are defined by specifying a subrange of legal\n * lengths, inclusively, using the notation . When these are\n * encoded, the actual length precedes the vector's contents in the byte\n * stream. The length will be in the form of a number consuming as many bytes\n * as required to hold the vector's specified maximum (ceiling) length. A\n * variable-length vector with an actual length field of zero is referred to\n * as an empty vector.\n *\n * @param b the byte buffer.\n * @param lenBytes the number of bytes required to store the length.\n *\n * @return the resulting byte buffer.\n */\nvar readVector = function(b, lenBytes) {\n var len = 0;\n switch(lenBytes) {\n case 1:\n len = b.getByte();\n break;\n case 2:\n len = b.getInt16();\n break;\n case 3:\n len = b.getInt24();\n break;\n case 4:\n len = b.getInt32();\n break;\n }\n\n // read vector bytes into a new buffer\n return forge.util.createBuffer(b.getBytes(len));\n};\n\n/**\n * Writes a TLS variable-length vector to a byte buffer.\n *\n * @param b the byte buffer.\n * @param lenBytes the number of bytes required to store the length.\n * @param v the byte buffer vector.\n */\nvar writeVector = function(b, lenBytes, v) {\n // encode length at the start of the vector, where the number of bytes for\n // the length is the maximum number of bytes it would take to encode the\n // vector's ceiling\n b.putInt(v.length(), lenBytes << 3);\n b.putBuffer(v);\n};\n\n/**\n * The tls implementation.\n */\nvar tls = {};\n\n/**\n * Version: TLS 1.2 = 3.3, TLS 1.1 = 3.2, TLS 1.0 = 3.1. Both TLS 1.1 and\n * TLS 1.2 were still too new (ie: openSSL didn't implement them) at the time\n * of this implementation so TLS 1.0 was implemented instead.\n */\ntls.Versions = {\n TLS_1_0: {major: 3, minor: 1},\n TLS_1_1: {major: 3, minor: 2},\n TLS_1_2: {major: 3, minor: 3}\n};\ntls.SupportedVersions = [\n tls.Versions.TLS_1_1,\n tls.Versions.TLS_1_0\n];\ntls.Version = tls.SupportedVersions[0];\n\n/**\n * Maximum fragment size. True maximum is 16384, but we fragment before that\n * to allow for unusual small increases during compression.\n */\ntls.MaxFragment = 16384 - 1024;\n\n/**\n * Whether this entity is considered the \"client\" or \"server\".\n * enum { server, client } ConnectionEnd;\n */\ntls.ConnectionEnd = {\n server: 0,\n client: 1\n};\n\n/**\n * Pseudo-random function algorithm used to generate keys from the master\n * secret.\n * enum { tls_prf_sha256 } PRFAlgorithm;\n */\ntls.PRFAlgorithm = {\n tls_prf_sha256: 0\n};\n\n/**\n * Bulk encryption algorithms.\n * enum { null, rc4, des3, aes } BulkCipherAlgorithm;\n */\ntls.BulkCipherAlgorithm = {\n none: null,\n rc4: 0,\n des3: 1,\n aes: 2\n};\n\n/**\n * Cipher types.\n * enum { stream, block, aead } CipherType;\n */\ntls.CipherType = {\n stream: 0,\n block: 1,\n aead: 2\n};\n\n/**\n * MAC (Message Authentication Code) algorithms.\n * enum { null, hmac_md5, hmac_sha1, hmac_sha256,\n * hmac_sha384, hmac_sha512} MACAlgorithm;\n */\ntls.MACAlgorithm = {\n none: null,\n hmac_md5: 0,\n hmac_sha1: 1,\n hmac_sha256: 2,\n hmac_sha384: 3,\n hmac_sha512: 4\n};\n\n/**\n * Compression algorithms.\n * enum { null(0), deflate(1), (255) } CompressionMethod;\n */\ntls.CompressionMethod = {\n none: 0,\n deflate: 1\n};\n\n/**\n * TLS record content types.\n * enum {\n * change_cipher_spec(20), alert(21), handshake(22),\n * application_data(23), (255)\n * } ContentType;\n */\ntls.ContentType = {\n change_cipher_spec: 20,\n alert: 21,\n handshake: 22,\n application_data: 23,\n heartbeat: 24\n};\n\n/**\n * TLS handshake types.\n * enum {\n * hello_request(0), client_hello(1), server_hello(2),\n * certificate(11), server_key_exchange (12),\n * certificate_request(13), server_hello_done(14),\n * certificate_verify(15), client_key_exchange(16),\n * finished(20), (255)\n * } HandshakeType;\n */\ntls.HandshakeType = {\n hello_request: 0,\n client_hello: 1,\n server_hello: 2,\n certificate: 11,\n server_key_exchange: 12,\n certificate_request: 13,\n server_hello_done: 14,\n certificate_verify: 15,\n client_key_exchange: 16,\n finished: 20\n};\n\n/**\n * TLS Alert Protocol.\n *\n * enum { warning(1), fatal(2), (255) } AlertLevel;\n *\n * enum {\n * close_notify(0),\n * unexpected_message(10),\n * bad_record_mac(20),\n * decryption_failed(21),\n * record_overflow(22),\n * decompression_failure(30),\n * handshake_failure(40),\n * bad_certificate(42),\n * unsupported_certificate(43),\n * certificate_revoked(44),\n * certificate_expired(45),\n * certificate_unknown(46),\n * illegal_parameter(47),\n * unknown_ca(48),\n * access_denied(49),\n * decode_error(50),\n * decrypt_error(51),\n * export_restriction(60),\n * protocol_version(70),\n * insufficient_security(71),\n * internal_error(80),\n * user_canceled(90),\n * no_renegotiation(100),\n * (255)\n * } AlertDescription;\n *\n * struct {\n * AlertLevel level;\n * AlertDescription description;\n * } Alert;\n */\ntls.Alert = {};\ntls.Alert.Level = {\n warning: 1,\n fatal: 2\n};\ntls.Alert.Description = {\n close_notify: 0,\n unexpected_message: 10,\n bad_record_mac: 20,\n decryption_failed: 21,\n record_overflow: 22,\n decompression_failure: 30,\n handshake_failure: 40,\n bad_certificate: 42,\n unsupported_certificate: 43,\n certificate_revoked: 44,\n certificate_expired: 45,\n certificate_unknown: 46,\n illegal_parameter: 47,\n unknown_ca: 48,\n access_denied: 49,\n decode_error: 50,\n decrypt_error: 51,\n export_restriction: 60,\n protocol_version: 70,\n insufficient_security: 71,\n internal_error: 80,\n user_canceled: 90,\n no_renegotiation: 100\n};\n\n/**\n * TLS Heartbeat Message types.\n * enum {\n * heartbeat_request(1),\n * heartbeat_response(2),\n * (255)\n * } HeartbeatMessageType;\n */\ntls.HeartbeatMessageType = {\n heartbeat_request: 1,\n heartbeat_response: 2\n};\n\n/**\n * Supported cipher suites.\n */\ntls.CipherSuites = {};\n\n/**\n * Gets a supported cipher suite from its 2 byte ID.\n *\n * @param twoBytes two bytes in a string.\n *\n * @return the matching supported cipher suite or null.\n */\ntls.getCipherSuite = function(twoBytes) {\n var rval = null;\n for(var key in tls.CipherSuites) {\n var cs = tls.CipherSuites[key];\n if(cs.id[0] === twoBytes.charCodeAt(0) &&\n cs.id[1] === twoBytes.charCodeAt(1)) {\n rval = cs;\n break;\n }\n }\n return rval;\n};\n\n/**\n * Called when an unexpected record is encountered.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleUnexpected = function(c, record) {\n // if connection is client and closed, ignore unexpected messages\n var ignore = (!c.open && c.entity === tls.ConnectionEnd.client);\n if(!ignore) {\n c.error(c, {\n message: 'Unexpected message. Received TLS record out of order.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.unexpected_message\n }\n });\n }\n};\n\n/**\n * Called when a client receives a HelloRequest record.\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleHelloRequest = function(c, record, length) {\n // ignore renegotiation requests from the server during a handshake, but\n // if handshaking, send a warning alert that renegotation is denied\n if(!c.handshaking && c.handshakes > 0) {\n // send alert warning\n tls.queue(c, tls.createAlert(c, {\n level: tls.Alert.Level.warning,\n description: tls.Alert.Description.no_renegotiation\n }));\n tls.flush(c);\n }\n\n // continue\n c.process();\n};\n\n/**\n * Parses a hello message from a ClientHello or ServerHello record.\n *\n * @param record the record to parse.\n *\n * @return the parsed message.\n */\ntls.parseHelloMessage = function(c, record, length) {\n var msg = null;\n\n var client = (c.entity === tls.ConnectionEnd.client);\n\n // minimum of 38 bytes in message\n if(length < 38) {\n c.error(c, {\n message: client ?\n 'Invalid ServerHello message. Message too short.' :\n 'Invalid ClientHello message. Message too short.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n } else {\n // use 'remaining' to calculate # of remaining bytes in the message\n var b = record.fragment;\n var remaining = b.length();\n msg = {\n version: {\n major: b.getByte(),\n minor: b.getByte()\n },\n random: forge.util.createBuffer(b.getBytes(32)),\n session_id: readVector(b, 1),\n extensions: []\n };\n if(client) {\n msg.cipher_suite = b.getBytes(2);\n msg.compression_method = b.getByte();\n } else {\n msg.cipher_suites = readVector(b, 2);\n msg.compression_methods = readVector(b, 1);\n }\n\n // read extensions if there are any bytes left in the message\n remaining = length - (remaining - b.length());\n if(remaining > 0) {\n // parse extensions\n var exts = readVector(b, 2);\n while(exts.length() > 0) {\n msg.extensions.push({\n type: [exts.getByte(), exts.getByte()],\n data: readVector(exts, 2)\n });\n }\n\n // TODO: make extension support modular\n if(!client) {\n for(var i = 0; i < msg.extensions.length; ++i) {\n var ext = msg.extensions[i];\n\n // support SNI extension\n if(ext.type[0] === 0x00 && ext.type[1] === 0x00) {\n // get server name list\n var snl = readVector(ext.data, 2);\n while(snl.length() > 0) {\n // read server name type\n var snType = snl.getByte();\n\n // only HostName type (0x00) is known, break out if\n // another type is detected\n if(snType !== 0x00) {\n break;\n }\n\n // add host name to server name list\n c.session.extensions.server_name.serverNameList.push(\n readVector(snl, 2).getBytes());\n }\n }\n }\n }\n }\n\n // version already set, do not allow version change\n if(c.session.version) {\n if(msg.version.major !== c.session.version.major ||\n msg.version.minor !== c.session.version.minor) {\n return c.error(c, {\n message: 'TLS version change is disallowed during renegotiation.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.protocol_version\n }\n });\n }\n }\n\n // get the chosen (ServerHello) cipher suite\n if(client) {\n // FIXME: should be checking configured acceptable cipher suites\n c.session.cipherSuite = tls.getCipherSuite(msg.cipher_suite);\n } else {\n // get a supported preferred (ClientHello) cipher suite\n // choose the first supported cipher suite\n var tmp = forge.util.createBuffer(msg.cipher_suites.bytes());\n while(tmp.length() > 0) {\n // FIXME: should be checking configured acceptable suites\n // cipher suites take up 2 bytes\n c.session.cipherSuite = tls.getCipherSuite(tmp.getBytes(2));\n if(c.session.cipherSuite !== null) {\n break;\n }\n }\n }\n\n // cipher suite not supported\n if(c.session.cipherSuite === null) {\n return c.error(c, {\n message: 'No cipher suites in common.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.handshake_failure\n },\n cipherSuite: forge.util.bytesToHex(msg.cipher_suite)\n });\n }\n\n // TODO: handle compression methods\n if(client) {\n c.session.compressionMethod = msg.compression_method;\n } else {\n // no compression\n c.session.compressionMethod = tls.CompressionMethod.none;\n }\n }\n\n return msg;\n};\n\n/**\n * Creates security parameters for the given connection based on the given\n * hello message.\n *\n * @param c the TLS connection.\n * @param msg the hello message.\n */\ntls.createSecurityParameters = function(c, msg) {\n /* Note: security params are from TLS 1.2, some values like prf_algorithm\n are ignored for TLS 1.0/1.1 and the builtin as specified in the spec is\n used. */\n\n // TODO: handle other options from server when more supported\n\n // get client and server randoms\n var client = (c.entity === tls.ConnectionEnd.client);\n var msgRandom = msg.random.bytes();\n var cRandom = client ? c.session.sp.client_random : msgRandom;\n var sRandom = client ? msgRandom : tls.createRandom().getBytes();\n\n // create new security parameters\n c.session.sp = {\n entity: c.entity,\n prf_algorithm: tls.PRFAlgorithm.tls_prf_sha256,\n bulk_cipher_algorithm: null,\n cipher_type: null,\n enc_key_length: null,\n block_length: null,\n fixed_iv_length: null,\n record_iv_length: null,\n mac_algorithm: null,\n mac_length: null,\n mac_key_length: null,\n compression_algorithm: c.session.compressionMethod,\n pre_master_secret: null,\n master_secret: null,\n client_random: cRandom,\n server_random: sRandom\n };\n};\n\n/**\n * Called when a client receives a ServerHello record.\n *\n * When a ServerHello message will be sent:\n * The server will send this message in response to a client hello message\n * when it was able to find an acceptable set of algorithms. If it cannot\n * find such a match, it will respond with a handshake failure alert.\n *\n * uint24 length;\n * struct {\n * ProtocolVersion server_version;\n * Random random;\n * SessionID session_id;\n * CipherSuite cipher_suite;\n * CompressionMethod compression_method;\n * select(extensions_present) {\n * case false:\n * struct {};\n * case true:\n * Extension extensions<0..2^16-1>;\n * };\n * } ServerHello;\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleServerHello = function(c, record, length) {\n var msg = tls.parseHelloMessage(c, record, length);\n if(c.fail) {\n return;\n }\n\n // ensure server version is compatible\n if(msg.version.minor <= c.version.minor) {\n c.version.minor = msg.version.minor;\n } else {\n return c.error(c, {\n message: 'Incompatible TLS version.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.protocol_version\n }\n });\n }\n\n // indicate session version has been set\n c.session.version = c.version;\n\n // get the session ID from the message\n var sessionId = msg.session_id.bytes();\n\n // if the session ID is not blank and matches the cached one, resume\n // the session\n if(sessionId.length > 0 && sessionId === c.session.id) {\n // resuming session, expect a ChangeCipherSpec next\n c.expect = SCC;\n c.session.resuming = true;\n\n // get new server random\n c.session.sp.server_random = msg.random.bytes();\n } else {\n // not resuming, expect a server Certificate message next\n c.expect = SCE;\n c.session.resuming = false;\n\n // create new security parameters\n tls.createSecurityParameters(c, msg);\n }\n\n // set new session ID\n c.session.id = sessionId;\n\n // continue\n c.process();\n};\n\n/**\n * Called when a server receives a ClientHello record.\n *\n * When a ClientHello message will be sent:\n * When a client first connects to a server it is required to send the\n * client hello as its first message. The client can also send a client\n * hello in response to a hello request or on its own initiative in order\n * to renegotiate the security parameters in an existing connection.\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleClientHello = function(c, record, length) {\n var msg = tls.parseHelloMessage(c, record, length);\n if(c.fail) {\n return;\n }\n\n // get the session ID from the message\n var sessionId = msg.session_id.bytes();\n\n // see if the given session ID is in the cache\n var session = null;\n if(c.sessionCache) {\n session = c.sessionCache.getSession(sessionId);\n if(session === null) {\n // session ID not found\n sessionId = '';\n } else if(session.version.major !== msg.version.major ||\n session.version.minor > msg.version.minor) {\n // if session version is incompatible with client version, do not resume\n session = null;\n sessionId = '';\n }\n }\n\n // no session found to resume, generate a new session ID\n if(sessionId.length === 0) {\n sessionId = forge.random.getBytes(32);\n }\n\n // update session\n c.session.id = sessionId;\n c.session.clientHelloVersion = msg.version;\n c.session.sp = {};\n if(session) {\n // use version and security parameters from resumed session\n c.version = c.session.version = session.version;\n c.session.sp = session.sp;\n } else {\n // use highest compatible minor version\n var version;\n for(var i = 1; i < tls.SupportedVersions.length; ++i) {\n version = tls.SupportedVersions[i];\n if(version.minor <= msg.version.minor) {\n break;\n }\n }\n c.version = {major: version.major, minor: version.minor};\n c.session.version = c.version;\n }\n\n // if a session is set, resume it\n if(session !== null) {\n // resuming session, expect a ChangeCipherSpec next\n c.expect = CCC;\n c.session.resuming = true;\n\n // get new client random\n c.session.sp.client_random = msg.random.bytes();\n } else {\n // not resuming, expect a Certificate or ClientKeyExchange\n c.expect = (c.verifyClient !== false) ? CCE : CKE;\n c.session.resuming = false;\n\n // create new security parameters\n tls.createSecurityParameters(c, msg);\n }\n\n // connection now open\n c.open = true;\n\n // queue server hello\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createServerHello(c)\n }));\n\n if(c.session.resuming) {\n // queue change cipher spec message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.change_cipher_spec,\n data: tls.createChangeCipherSpec()\n }));\n\n // create pending state\n c.state.pending = tls.createConnectionState(c);\n\n // change current write state to pending write state\n c.state.current.write = c.state.pending.write;\n\n // queue finished\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createFinished(c)\n }));\n } else {\n // queue server certificate\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createCertificate(c)\n }));\n\n if(!c.fail) {\n // queue server key exchange\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createServerKeyExchange(c)\n }));\n\n // request client certificate if set\n if(c.verifyClient !== false) {\n // queue certificate request\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createCertificateRequest(c)\n }));\n }\n\n // queue server hello done\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createServerHelloDone(c)\n }));\n }\n }\n\n // send records\n tls.flush(c);\n\n // continue\n c.process();\n};\n\n/**\n * Called when a client receives a Certificate record.\n *\n * When this message will be sent:\n * The server must send a certificate whenever the agreed-upon key exchange\n * method is not an anonymous one. This message will always immediately\n * follow the server hello message.\n *\n * Meaning of this message:\n * The certificate type must be appropriate for the selected cipher suite's\n * key exchange algorithm, and is generally an X.509v3 certificate. It must\n * contain a key which matches the key exchange method, as follows. Unless\n * otherwise specified, the signing algorithm for the certificate must be\n * the same as the algorithm for the certificate key. Unless otherwise\n * specified, the public key may be of any length.\n *\n * opaque ASN.1Cert<1..2^24-1>;\n * struct {\n * ASN.1Cert certificate_list<1..2^24-1>;\n * } Certificate;\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleCertificate = function(c, record, length) {\n // minimum of 3 bytes in message\n if(length < 3) {\n return c.error(c, {\n message: 'Invalid Certificate message. Message too short.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n }\n\n var b = record.fragment;\n var msg = {\n certificate_list: readVector(b, 3)\n };\n\n /* The sender's certificate will be first in the list (chain), each\n subsequent one that follows will certify the previous one, but root\n certificates (self-signed) that specify the certificate authority may\n be omitted under the assumption that clients must already possess it. */\n var cert, asn1;\n var certs = [];\n try {\n while(msg.certificate_list.length() > 0) {\n // each entry in msg.certificate_list is a vector with 3 len bytes\n cert = readVector(msg.certificate_list, 3);\n asn1 = forge.asn1.fromDer(cert);\n cert = forge.pki.certificateFromAsn1(asn1, true);\n certs.push(cert);\n }\n } catch(ex) {\n return c.error(c, {\n message: 'Could not parse certificate list.',\n cause: ex,\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.bad_certificate\n }\n });\n }\n\n // ensure at least 1 certificate was provided if in client-mode\n // or if verifyClient was set to true to require a certificate\n // (as opposed to 'optional')\n var client = (c.entity === tls.ConnectionEnd.client);\n if((client || c.verifyClient === true) && certs.length === 0) {\n // error, no certificate\n c.error(c, {\n message: client ?\n 'No server certificate provided.' :\n 'No client certificate provided.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n } else if(certs.length === 0) {\n // no certs to verify\n // expect a ServerKeyExchange or ClientKeyExchange message next\n c.expect = client ? SKE : CKE;\n } else {\n // save certificate in session\n if(client) {\n c.session.serverCertificate = certs[0];\n } else {\n c.session.clientCertificate = certs[0];\n }\n\n if(tls.verifyCertificateChain(c, certs)) {\n // expect a ServerKeyExchange or ClientKeyExchange message next\n c.expect = client ? SKE : CKE;\n }\n }\n\n // continue\n c.process();\n};\n\n/**\n * Called when a client receives a ServerKeyExchange record.\n *\n * When this message will be sent:\n * This message will be sent immediately after the server certificate\n * message (or the server hello message, if this is an anonymous\n * negotiation).\n *\n * The server key exchange message is sent by the server only when the\n * server certificate message (if sent) does not contain enough data to\n * allow the client to exchange a premaster secret.\n *\n * Meaning of this message:\n * This message conveys cryptographic information to allow the client to\n * communicate the premaster secret: either an RSA public key to encrypt\n * the premaster secret with, or a Diffie-Hellman public key with which the\n * client can complete a key exchange (with the result being the premaster\n * secret.)\n *\n * enum {\n * dhe_dss, dhe_rsa, dh_anon, rsa, dh_dss, dh_rsa\n * } KeyExchangeAlgorithm;\n *\n * struct {\n * opaque dh_p<1..2^16-1>;\n * opaque dh_g<1..2^16-1>;\n * opaque dh_Ys<1..2^16-1>;\n * } ServerDHParams;\n *\n * struct {\n * select(KeyExchangeAlgorithm) {\n * case dh_anon:\n * ServerDHParams params;\n * case dhe_dss:\n * case dhe_rsa:\n * ServerDHParams params;\n * digitally-signed struct {\n * opaque client_random[32];\n * opaque server_random[32];\n * ServerDHParams params;\n * } signed_params;\n * case rsa:\n * case dh_dss:\n * case dh_rsa:\n * struct {};\n * };\n * } ServerKeyExchange;\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleServerKeyExchange = function(c, record, length) {\n // this implementation only supports RSA, no Diffie-Hellman support\n // so any length > 0 is invalid\n if(length > 0) {\n return c.error(c, {\n message: 'Invalid key parameters. Only RSA is supported.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.unsupported_certificate\n }\n });\n }\n\n // expect an optional CertificateRequest message next\n c.expect = SCR;\n\n // continue\n c.process();\n};\n\n/**\n * Called when a client receives a ClientKeyExchange record.\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleClientKeyExchange = function(c, record, length) {\n // this implementation only supports RSA, no Diffie-Hellman support\n // so any length < 48 is invalid\n if(length < 48) {\n return c.error(c, {\n message: 'Invalid key parameters. Only RSA is supported.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.unsupported_certificate\n }\n });\n }\n\n var b = record.fragment;\n var msg = {\n enc_pre_master_secret: readVector(b, 2).getBytes()\n };\n\n // do rsa decryption\n var privateKey = null;\n if(c.getPrivateKey) {\n try {\n privateKey = c.getPrivateKey(c, c.session.serverCertificate);\n privateKey = forge.pki.privateKeyFromPem(privateKey);\n } catch(ex) {\n c.error(c, {\n message: 'Could not get private key.',\n cause: ex,\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n }\n }\n\n if(privateKey === null) {\n return c.error(c, {\n message: 'No private key set.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n }\n\n try {\n // decrypt 48-byte pre-master secret\n var sp = c.session.sp;\n sp.pre_master_secret = privateKey.decrypt(msg.enc_pre_master_secret);\n\n // ensure client hello version matches first 2 bytes\n var version = c.session.clientHelloVersion;\n if(version.major !== sp.pre_master_secret.charCodeAt(0) ||\n version.minor !== sp.pre_master_secret.charCodeAt(1)) {\n // error, do not send alert (see BLEI attack below)\n throw new Error('TLS version rollback attack detected.');\n }\n } catch(ex) {\n /* Note: Daniel Bleichenbacher [BLEI] can be used to attack a\n TLS server which is using PKCS#1 encoded RSA, so instead of\n failing here, we generate 48 random bytes and use that as\n the pre-master secret. */\n sp.pre_master_secret = forge.random.getBytes(48);\n }\n\n // expect a CertificateVerify message if a Certificate was received that\n // does not have fixed Diffie-Hellman params, otherwise expect\n // ChangeCipherSpec\n c.expect = CCC;\n if(c.session.clientCertificate !== null) {\n // only RSA support, so expect CertificateVerify\n // TODO: support Diffie-Hellman\n c.expect = CCV;\n }\n\n // continue\n c.process();\n};\n\n/**\n * Called when a client receives a CertificateRequest record.\n *\n * When this message will be sent:\n * A non-anonymous server can optionally request a certificate from the\n * client, if appropriate for the selected cipher suite. This message, if\n * sent, will immediately follow the Server Key Exchange message (if it is\n * sent; otherwise, the Server Certificate message).\n *\n * enum {\n * rsa_sign(1), dss_sign(2), rsa_fixed_dh(3), dss_fixed_dh(4),\n * rsa_ephemeral_dh_RESERVED(5), dss_ephemeral_dh_RESERVED(6),\n * fortezza_dms_RESERVED(20), (255)\n * } ClientCertificateType;\n *\n * opaque DistinguishedName<1..2^16-1>;\n *\n * struct {\n * ClientCertificateType certificate_types<1..2^8-1>;\n * SignatureAndHashAlgorithm supported_signature_algorithms<2^16-1>;\n * DistinguishedName certificate_authorities<0..2^16-1>;\n * } CertificateRequest;\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleCertificateRequest = function(c, record, length) {\n // minimum of 3 bytes in message\n if(length < 3) {\n return c.error(c, {\n message: 'Invalid CertificateRequest. Message too short.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n }\n\n // TODO: TLS 1.2+ has different format including\n // SignatureAndHashAlgorithm after cert types\n var b = record.fragment;\n var msg = {\n certificate_types: readVector(b, 1),\n certificate_authorities: readVector(b, 2)\n };\n\n // save certificate request in session\n c.session.certificateRequest = msg;\n\n // expect a ServerHelloDone message next\n c.expect = SHD;\n\n // continue\n c.process();\n};\n\n/**\n * Called when a server receives a CertificateVerify record.\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleCertificateVerify = function(c, record, length) {\n if(length < 2) {\n return c.error(c, {\n message: 'Invalid CertificateVerify. Message too short.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n }\n\n // rewind to get full bytes for message so it can be manually\n // digested below (special case for CertificateVerify messages because\n // they must be digested *after* handling as opposed to all others)\n var b = record.fragment;\n b.read -= 4;\n var msgBytes = b.bytes();\n b.read += 4;\n\n var msg = {\n signature: readVector(b, 2).getBytes()\n };\n\n // TODO: add support for DSA\n\n // generate data to verify\n var verify = forge.util.createBuffer();\n verify.putBuffer(c.session.md5.digest());\n verify.putBuffer(c.session.sha1.digest());\n verify = verify.getBytes();\n\n try {\n var cert = c.session.clientCertificate;\n /*b = forge.pki.rsa.decrypt(\n msg.signature, cert.publicKey, true, verify.length);\n if(b !== verify) {*/\n if(!cert.publicKey.verify(verify, msg.signature, 'NONE')) {\n throw new Error('CertificateVerify signature does not match.');\n }\n\n // digest message now that it has been handled\n c.session.md5.update(msgBytes);\n c.session.sha1.update(msgBytes);\n } catch(ex) {\n return c.error(c, {\n message: 'Bad signature in CertificateVerify.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.handshake_failure\n }\n });\n }\n\n // expect ChangeCipherSpec\n c.expect = CCC;\n\n // continue\n c.process();\n};\n\n/**\n * Called when a client receives a ServerHelloDone record.\n *\n * When this message will be sent:\n * The server hello done message is sent by the server to indicate the end\n * of the server hello and associated messages. After sending this message\n * the server will wait for a client response.\n *\n * Meaning of this message:\n * This message means that the server is done sending messages to support\n * the key exchange, and the client can proceed with its phase of the key\n * exchange.\n *\n * Upon receipt of the server hello done message the client should verify\n * that the server provided a valid certificate if required and check that\n * the server hello parameters are acceptable.\n *\n * struct {} ServerHelloDone;\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleServerHelloDone = function(c, record, length) {\n // len must be 0 bytes\n if(length > 0) {\n return c.error(c, {\n message: 'Invalid ServerHelloDone message. Invalid length.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.record_overflow\n }\n });\n }\n\n if(c.serverCertificate === null) {\n // no server certificate was provided\n var error = {\n message: 'No server certificate provided. Not enough security.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.insufficient_security\n }\n };\n\n // call application callback\n var depth = 0;\n var ret = c.verify(c, error.alert.description, depth, []);\n if(ret !== true) {\n // check for custom alert info\n if(ret || ret === 0) {\n // set custom message and alert description\n if(typeof ret === 'object' && !forge.util.isArray(ret)) {\n if(ret.message) {\n error.message = ret.message;\n }\n if(ret.alert) {\n error.alert.description = ret.alert;\n }\n } else if(typeof ret === 'number') {\n // set custom alert description\n error.alert.description = ret;\n }\n }\n\n // send error\n return c.error(c, error);\n }\n }\n\n // create client certificate message if requested\n if(c.session.certificateRequest !== null) {\n record = tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createCertificate(c)\n });\n tls.queue(c, record);\n }\n\n // create client key exchange message\n record = tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createClientKeyExchange(c)\n });\n tls.queue(c, record);\n\n // expect no messages until the following callback has been called\n c.expect = SER;\n\n // create callback to handle client signature (for client-certs)\n var callback = function(c, signature) {\n if(c.session.certificateRequest !== null &&\n c.session.clientCertificate !== null) {\n // create certificate verify message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createCertificateVerify(c, signature)\n }));\n }\n\n // create change cipher spec message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.change_cipher_spec,\n data: tls.createChangeCipherSpec()\n }));\n\n // create pending state\n c.state.pending = tls.createConnectionState(c);\n\n // change current write state to pending write state\n c.state.current.write = c.state.pending.write;\n\n // create finished message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createFinished(c)\n }));\n\n // expect a server ChangeCipherSpec message next\n c.expect = SCC;\n\n // send records\n tls.flush(c);\n\n // continue\n c.process();\n };\n\n // if there is no certificate request or no client certificate, do\n // callback immediately\n if(c.session.certificateRequest === null ||\n c.session.clientCertificate === null) {\n return callback(c, null);\n }\n\n // otherwise get the client signature\n tls.getClientSignature(c, callback);\n};\n\n/**\n * Called when a ChangeCipherSpec record is received.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleChangeCipherSpec = function(c, record) {\n if(record.fragment.getByte() !== 0x01) {\n return c.error(c, {\n message: 'Invalid ChangeCipherSpec message received.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n }\n\n // create pending state if:\n // 1. Resuming session in client mode OR\n // 2. NOT resuming session in server mode\n var client = (c.entity === tls.ConnectionEnd.client);\n if((c.session.resuming && client) || (!c.session.resuming && !client)) {\n c.state.pending = tls.createConnectionState(c);\n }\n\n // change current read state to pending read state\n c.state.current.read = c.state.pending.read;\n\n // clear pending state if:\n // 1. NOT resuming session in client mode OR\n // 2. resuming a session in server mode\n if((!c.session.resuming && client) || (c.session.resuming && !client)) {\n c.state.pending = null;\n }\n\n // expect a Finished record next\n c.expect = client ? SFI : CFI;\n\n // continue\n c.process();\n};\n\n/**\n * Called when a Finished record is received.\n *\n * When this message will be sent:\n * A finished message is always sent immediately after a change\n * cipher spec message to verify that the key exchange and\n * authentication processes were successful. It is essential that a\n * change cipher spec message be received between the other\n * handshake messages and the Finished message.\n *\n * Meaning of this message:\n * The finished message is the first protected with the just-\n * negotiated algorithms, keys, and secrets. Recipients of finished\n * messages must verify that the contents are correct. Once a side\n * has sent its Finished message and received and validated the\n * Finished message from its peer, it may begin to send and receive\n * application data over the connection.\n *\n * struct {\n * opaque verify_data[verify_data_length];\n * } Finished;\n *\n * verify_data\n * PRF(master_secret, finished_label, Hash(handshake_messages))\n * [0..verify_data_length-1];\n *\n * finished_label\n * For Finished messages sent by the client, the string\n * \"client finished\". For Finished messages sent by the server, the\n * string \"server finished\".\n *\n * verify_data_length depends on the cipher suite. If it is not specified\n * by the cipher suite, then it is 12. Versions of TLS < 1.2 always used\n * 12 bytes.\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleFinished = function(c, record, length) {\n // rewind to get full bytes for message so it can be manually\n // digested below (special case for Finished messages because they\n // must be digested *after* handling as opposed to all others)\n var b = record.fragment;\n b.read -= 4;\n var msgBytes = b.bytes();\n b.read += 4;\n\n // message contains only verify_data\n var vd = record.fragment.getBytes();\n\n // ensure verify data is correct\n b = forge.util.createBuffer();\n b.putBuffer(c.session.md5.digest());\n b.putBuffer(c.session.sha1.digest());\n\n // set label based on entity type\n var client = (c.entity === tls.ConnectionEnd.client);\n var label = client ? 'server finished' : 'client finished';\n\n // TODO: determine prf function and verify length for TLS 1.2\n var sp = c.session.sp;\n var vdl = 12;\n var prf = prf_TLS1;\n b = prf(sp.master_secret, label, b.getBytes(), vdl);\n if(b.getBytes() !== vd) {\n return c.error(c, {\n message: 'Invalid verify_data in Finished message.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.decrypt_error\n }\n });\n }\n\n // digest finished message now that it has been handled\n c.session.md5.update(msgBytes);\n c.session.sha1.update(msgBytes);\n\n // resuming session as client or NOT resuming session as server\n if((c.session.resuming && client) || (!c.session.resuming && !client)) {\n // create change cipher spec message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.change_cipher_spec,\n data: tls.createChangeCipherSpec()\n }));\n\n // change current write state to pending write state, clear pending\n c.state.current.write = c.state.pending.write;\n c.state.pending = null;\n\n // create finished message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createFinished(c)\n }));\n }\n\n // expect application data next\n c.expect = client ? SAD : CAD;\n\n // handshake complete\n c.handshaking = false;\n ++c.handshakes;\n\n // save access to peer certificate\n c.peerCertificate = client ?\n c.session.serverCertificate : c.session.clientCertificate;\n\n // send records\n tls.flush(c);\n\n // now connected\n c.isConnected = true;\n c.connected(c);\n\n // continue\n c.process();\n};\n\n/**\n * Called when an Alert record is received.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleAlert = function(c, record) {\n // read alert\n var b = record.fragment;\n var alert = {\n level: b.getByte(),\n description: b.getByte()\n };\n\n // TODO: consider using a table?\n // get appropriate message\n var msg;\n switch(alert.description) {\n case tls.Alert.Description.close_notify:\n msg = 'Connection closed.';\n break;\n case tls.Alert.Description.unexpected_message:\n msg = 'Unexpected message.';\n break;\n case tls.Alert.Description.bad_record_mac:\n msg = 'Bad record MAC.';\n break;\n case tls.Alert.Description.decryption_failed:\n msg = 'Decryption failed.';\n break;\n case tls.Alert.Description.record_overflow:\n msg = 'Record overflow.';\n break;\n case tls.Alert.Description.decompression_failure:\n msg = 'Decompression failed.';\n break;\n case tls.Alert.Description.handshake_failure:\n msg = 'Handshake failure.';\n break;\n case tls.Alert.Description.bad_certificate:\n msg = 'Bad certificate.';\n break;\n case tls.Alert.Description.unsupported_certificate:\n msg = 'Unsupported certificate.';\n break;\n case tls.Alert.Description.certificate_revoked:\n msg = 'Certificate revoked.';\n break;\n case tls.Alert.Description.certificate_expired:\n msg = 'Certificate expired.';\n break;\n case tls.Alert.Description.certificate_unknown:\n msg = 'Certificate unknown.';\n break;\n case tls.Alert.Description.illegal_parameter:\n msg = 'Illegal parameter.';\n break;\n case tls.Alert.Description.unknown_ca:\n msg = 'Unknown certificate authority.';\n break;\n case tls.Alert.Description.access_denied:\n msg = 'Access denied.';\n break;\n case tls.Alert.Description.decode_error:\n msg = 'Decode error.';\n break;\n case tls.Alert.Description.decrypt_error:\n msg = 'Decrypt error.';\n break;\n case tls.Alert.Description.export_restriction:\n msg = 'Export restriction.';\n break;\n case tls.Alert.Description.protocol_version:\n msg = 'Unsupported protocol version.';\n break;\n case tls.Alert.Description.insufficient_security:\n msg = 'Insufficient security.';\n break;\n case tls.Alert.Description.internal_error:\n msg = 'Internal error.';\n break;\n case tls.Alert.Description.user_canceled:\n msg = 'User canceled.';\n break;\n case tls.Alert.Description.no_renegotiation:\n msg = 'Renegotiation not supported.';\n break;\n default:\n msg = 'Unknown error.';\n break;\n }\n\n // close connection on close_notify, not an error\n if(alert.description === tls.Alert.Description.close_notify) {\n return c.close();\n }\n\n // call error handler\n c.error(c, {\n message: msg,\n send: false,\n // origin is the opposite end\n origin: (c.entity === tls.ConnectionEnd.client) ? 'server' : 'client',\n alert: alert\n });\n\n // continue\n c.process();\n};\n\n/**\n * Called when a Handshake record is received.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleHandshake = function(c, record) {\n // get the handshake type and message length\n var b = record.fragment;\n var type = b.getByte();\n var length = b.getInt24();\n\n // see if the record fragment doesn't yet contain the full message\n if(length > b.length()) {\n // cache the record, clear its fragment, and reset the buffer read\n // pointer before the type and length were read\n c.fragmented = record;\n record.fragment = forge.util.createBuffer();\n b.read -= 4;\n\n // continue\n return c.process();\n }\n\n // full message now available, clear cache, reset read pointer to\n // before type and length\n c.fragmented = null;\n b.read -= 4;\n\n // save the handshake bytes for digestion after handler is found\n // (include type and length of handshake msg)\n var bytes = b.bytes(length + 4);\n\n // restore read pointer\n b.read += 4;\n\n // handle expected message\n if(type in hsTable[c.entity][c.expect]) {\n // initialize server session\n if(c.entity === tls.ConnectionEnd.server && !c.open && !c.fail) {\n c.handshaking = true;\n c.session = {\n version: null,\n extensions: {\n server_name: {\n serverNameList: []\n }\n },\n cipherSuite: null,\n compressionMethod: null,\n serverCertificate: null,\n clientCertificate: null,\n md5: forge.md.md5.create(),\n sha1: forge.md.sha1.create()\n };\n }\n\n /* Update handshake messages digest. Finished and CertificateVerify\n messages are not digested here. They can't be digested as part of\n the verify_data that they contain. These messages are manually\n digested in their handlers. HelloRequest messages are simply never\n included in the handshake message digest according to spec. */\n if(type !== tls.HandshakeType.hello_request &&\n type !== tls.HandshakeType.certificate_verify &&\n type !== tls.HandshakeType.finished) {\n c.session.md5.update(bytes);\n c.session.sha1.update(bytes);\n }\n\n // handle specific handshake type record\n hsTable[c.entity][c.expect][type](c, record, length);\n } else {\n // unexpected record\n tls.handleUnexpected(c, record);\n }\n};\n\n/**\n * Called when an ApplicationData record is received.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleApplicationData = function(c, record) {\n // buffer data, notify that its ready\n c.data.putBuffer(record.fragment);\n c.dataReady(c);\n\n // continue\n c.process();\n};\n\n/**\n * Called when a Heartbeat record is received.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleHeartbeat = function(c, record) {\n // get the heartbeat type and payload\n var b = record.fragment;\n var type = b.getByte();\n var length = b.getInt16();\n var payload = b.getBytes(length);\n\n if(type === tls.HeartbeatMessageType.heartbeat_request) {\n // discard request during handshake or if length is too large\n if(c.handshaking || length > payload.length) {\n // continue\n return c.process();\n }\n // retransmit payload\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.heartbeat,\n data: tls.createHeartbeat(\n tls.HeartbeatMessageType.heartbeat_response, payload)\n }));\n tls.flush(c);\n } else if(type === tls.HeartbeatMessageType.heartbeat_response) {\n // check payload against expected payload, discard heartbeat if no match\n if(payload !== c.expectedHeartbeatPayload) {\n // continue\n return c.process();\n }\n\n // notify that a valid heartbeat was received\n if(c.heartbeatReceived) {\n c.heartbeatReceived(c, forge.util.createBuffer(payload));\n }\n }\n\n // continue\n c.process();\n};\n\n/**\n * The transistional state tables for receiving TLS records. It maps the\n * current TLS engine state and a received record to a function to handle the\n * record and update the state.\n *\n * For instance, if the current state is SHE, then the TLS engine is expecting\n * a ServerHello record. Once a record is received, the handler function is\n * looked up using the state SHE and the record's content type.\n *\n * The resulting function will either be an error handler or a record handler.\n * The function will take whatever action is appropriate and update the state\n * for the next record.\n *\n * The states are all based on possible server record types. Note that the\n * client will never specifically expect to receive a HelloRequest or an alert\n * from the server so there is no state that reflects this. These messages may\n * occur at any time.\n *\n * There are two tables for mapping states because there is a second tier of\n * types for handshake messages. Once a record with a content type of handshake\n * is received, the handshake record handler will look up the handshake type in\n * the secondary map to get its appropriate handler.\n *\n * Valid message orders are as follows:\n *\n * =======================FULL HANDSHAKE======================\n * Client Server\n *\n * ClientHello -------->\n * ServerHello\n * Certificate*\n * ServerKeyExchange*\n * CertificateRequest*\n * <-------- ServerHelloDone\n * Certificate*\n * ClientKeyExchange\n * CertificateVerify*\n * [ChangeCipherSpec]\n * Finished -------->\n * [ChangeCipherSpec]\n * <-------- Finished\n * Application Data <-------> Application Data\n *\n * =====================SESSION RESUMPTION=====================\n * Client Server\n *\n * ClientHello -------->\n * ServerHello\n * [ChangeCipherSpec]\n * <-------- Finished\n * [ChangeCipherSpec]\n * Finished -------->\n * Application Data <-------> Application Data\n */\n// client expect states (indicate which records are expected to be received)\nvar SHE = 0; // rcv server hello\nvar SCE = 1; // rcv server certificate\nvar SKE = 2; // rcv server key exchange\nvar SCR = 3; // rcv certificate request\nvar SHD = 4; // rcv server hello done\nvar SCC = 5; // rcv change cipher spec\nvar SFI = 6; // rcv finished\nvar SAD = 7; // rcv application data\nvar SER = 8; // not expecting any messages at this point\n\n// server expect states\nvar CHE = 0; // rcv client hello\nvar CCE = 1; // rcv client certificate\nvar CKE = 2; // rcv client key exchange\nvar CCV = 3; // rcv certificate verify\nvar CCC = 4; // rcv change cipher spec\nvar CFI = 5; // rcv finished\nvar CAD = 6; // rcv application data\nvar CER = 7; // not expecting any messages at this point\n\n// map client current expect state and content type to function\nvar __ = tls.handleUnexpected;\nvar R0 = tls.handleChangeCipherSpec;\nvar R1 = tls.handleAlert;\nvar R2 = tls.handleHandshake;\nvar R3 = tls.handleApplicationData;\nvar R4 = tls.handleHeartbeat;\nvar ctTable = [];\nctTable[tls.ConnectionEnd.client] = [\n// CC,AL,HS,AD,HB\n/*SHE*/[__,R1,R2,__,R4],\n/*SCE*/[__,R1,R2,__,R4],\n/*SKE*/[__,R1,R2,__,R4],\n/*SCR*/[__,R1,R2,__,R4],\n/*SHD*/[__,R1,R2,__,R4],\n/*SCC*/[R0,R1,__,__,R4],\n/*SFI*/[__,R1,R2,__,R4],\n/*SAD*/[__,R1,R2,R3,R4],\n/*SER*/[__,R1,R2,__,R4]\n];\n\n// map server current expect state and content type to function\nctTable[tls.ConnectionEnd.server] = [\n// CC,AL,HS,AD\n/*CHE*/[__,R1,R2,__,R4],\n/*CCE*/[__,R1,R2,__,R4],\n/*CKE*/[__,R1,R2,__,R4],\n/*CCV*/[__,R1,R2,__,R4],\n/*CCC*/[R0,R1,__,__,R4],\n/*CFI*/[__,R1,R2,__,R4],\n/*CAD*/[__,R1,R2,R3,R4],\n/*CER*/[__,R1,R2,__,R4]\n];\n\n// map client current expect state and handshake type to function\nvar H0 = tls.handleHelloRequest;\nvar H1 = tls.handleServerHello;\nvar H2 = tls.handleCertificate;\nvar H3 = tls.handleServerKeyExchange;\nvar H4 = tls.handleCertificateRequest;\nvar H5 = tls.handleServerHelloDone;\nvar H6 = tls.handleFinished;\nvar hsTable = [];\nhsTable[tls.ConnectionEnd.client] = [\n// HR,01,SH,03,04,05,06,07,08,09,10,SC,SK,CR,HD,15,CK,17,18,19,FI\n/*SHE*/[__,__,H1,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*SCE*/[H0,__,__,__,__,__,__,__,__,__,__,H2,H3,H4,H5,__,__,__,__,__,__],\n/*SKE*/[H0,__,__,__,__,__,__,__,__,__,__,__,H3,H4,H5,__,__,__,__,__,__],\n/*SCR*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,H4,H5,__,__,__,__,__,__],\n/*SHD*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,H5,__,__,__,__,__,__],\n/*SCC*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*SFI*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H6],\n/*SAD*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*SER*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__]\n];\n\n// map server current expect state and handshake type to function\n// Note: CAD[CH] does not map to FB because renegotation is prohibited\nvar H7 = tls.handleClientHello;\nvar H8 = tls.handleClientKeyExchange;\nvar H9 = tls.handleCertificateVerify;\nhsTable[tls.ConnectionEnd.server] = [\n// 01,CH,02,03,04,05,06,07,08,09,10,CC,12,13,14,CV,CK,17,18,19,FI\n/*CHE*/[__,H7,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*CCE*/[__,__,__,__,__,__,__,__,__,__,__,H2,__,__,__,__,__,__,__,__,__],\n/*CKE*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H8,__,__,__,__],\n/*CCV*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H9,__,__,__,__,__],\n/*CCC*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*CFI*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H6],\n/*CAD*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*CER*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__]\n];\n\n/**\n * Generates the master_secret and keys using the given security parameters.\n *\n * The security parameters for a TLS connection state are defined as such:\n *\n * struct {\n * ConnectionEnd entity;\n * PRFAlgorithm prf_algorithm;\n * BulkCipherAlgorithm bulk_cipher_algorithm;\n * CipherType cipher_type;\n * uint8 enc_key_length;\n * uint8 block_length;\n * uint8 fixed_iv_length;\n * uint8 record_iv_length;\n * MACAlgorithm mac_algorithm;\n * uint8 mac_length;\n * uint8 mac_key_length;\n * CompressionMethod compression_algorithm;\n * opaque master_secret[48];\n * opaque client_random[32];\n * opaque server_random[32];\n * } SecurityParameters;\n *\n * Note that this definition is from TLS 1.2. In TLS 1.0 some of these\n * parameters are ignored because, for instance, the PRFAlgorithm is a\n * builtin-fixed algorithm combining iterations of MD5 and SHA-1 in TLS 1.0.\n *\n * The Record Protocol requires an algorithm to generate keys required by the\n * current connection state.\n *\n * The master secret is expanded into a sequence of secure bytes, which is then\n * split to a client write MAC key, a server write MAC key, a client write\n * encryption key, and a server write encryption key. In TLS 1.0 a client write\n * IV and server write IV are also generated. Each of these is generated from\n * the byte sequence in that order. Unused values are empty. In TLS 1.2, some\n * AEAD ciphers may additionally require a client write IV and a server write\n * IV (see Section 6.2.3.3).\n *\n * When keys, MAC keys, and IVs are generated, the master secret is used as an\n * entropy source.\n *\n * To generate the key material, compute:\n *\n * master_secret = PRF(pre_master_secret, \"master secret\",\n * ClientHello.random + ServerHello.random)\n *\n * key_block = PRF(SecurityParameters.master_secret,\n * \"key expansion\",\n * SecurityParameters.server_random +\n * SecurityParameters.client_random);\n *\n * until enough output has been generated. Then, the key_block is\n * partitioned as follows:\n *\n * client_write_MAC_key[SecurityParameters.mac_key_length]\n * server_write_MAC_key[SecurityParameters.mac_key_length]\n * client_write_key[SecurityParameters.enc_key_length]\n * server_write_key[SecurityParameters.enc_key_length]\n * client_write_IV[SecurityParameters.fixed_iv_length]\n * server_write_IV[SecurityParameters.fixed_iv_length]\n *\n * In TLS 1.2, the client_write_IV and server_write_IV are only generated for\n * implicit nonce techniques as described in Section 3.2.1 of [AEAD]. This\n * implementation uses TLS 1.0 so IVs are generated.\n *\n * Implementation note: The currently defined cipher suite which requires the\n * most material is AES_256_CBC_SHA256. It requires 2 x 32 byte keys and 2 x 32\n * byte MAC keys, for a total 128 bytes of key material. In TLS 1.0 it also\n * requires 2 x 16 byte IVs, so it actually takes 160 bytes of key material.\n *\n * @param c the connection.\n * @param sp the security parameters to use.\n *\n * @return the security keys.\n */\ntls.generateKeys = function(c, sp) {\n // TLS_RSA_WITH_AES_128_CBC_SHA (required to be compliant with TLS 1.2) &\n // TLS_RSA_WITH_AES_256_CBC_SHA are the only cipher suites implemented\n // at present\n\n // TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA is required to be compliant with\n // TLS 1.0 but we don't care right now because AES is better and we have\n // an implementation for it\n\n // TODO: TLS 1.2 implementation\n /*\n // determine the PRF\n var prf;\n switch(sp.prf_algorithm) {\n case tls.PRFAlgorithm.tls_prf_sha256:\n prf = prf_sha256;\n break;\n default:\n // should never happen\n throw new Error('Invalid PRF');\n }\n */\n\n // TLS 1.0/1.1 implementation\n var prf = prf_TLS1;\n\n // concatenate server and client random\n var random = sp.client_random + sp.server_random;\n\n // only create master secret if session is new\n if(!c.session.resuming) {\n // create master secret, clean up pre-master secret\n sp.master_secret = prf(\n sp.pre_master_secret, 'master secret', random, 48).bytes();\n sp.pre_master_secret = null;\n }\n\n // generate the amount of key material needed\n random = sp.server_random + sp.client_random;\n var length = 2 * sp.mac_key_length + 2 * sp.enc_key_length;\n\n // include IV for TLS/1.0\n var tls10 = (c.version.major === tls.Versions.TLS_1_0.major &&\n c.version.minor === tls.Versions.TLS_1_0.minor);\n if(tls10) {\n length += 2 * sp.fixed_iv_length;\n }\n var km = prf(sp.master_secret, 'key expansion', random, length);\n\n // split the key material into the MAC and encryption keys\n var rval = {\n client_write_MAC_key: km.getBytes(sp.mac_key_length),\n server_write_MAC_key: km.getBytes(sp.mac_key_length),\n client_write_key: km.getBytes(sp.enc_key_length),\n server_write_key: km.getBytes(sp.enc_key_length)\n };\n\n // include TLS 1.0 IVs\n if(tls10) {\n rval.client_write_IV = km.getBytes(sp.fixed_iv_length);\n rval.server_write_IV = km.getBytes(sp.fixed_iv_length);\n }\n\n return rval;\n};\n\n/**\n * Creates a new initialized TLS connection state. A connection state has\n * a read mode and a write mode.\n *\n * compression state:\n * The current state of the compression algorithm.\n *\n * cipher state:\n * The current state of the encryption algorithm. This will consist of the\n * scheduled key for that connection. For stream ciphers, this will also\n * contain whatever state information is necessary to allow the stream to\n * continue to encrypt or decrypt data.\n *\n * MAC key:\n * The MAC key for the connection.\n *\n * sequence number:\n * Each connection state contains a sequence number, which is maintained\n * separately for read and write states. The sequence number MUST be set to\n * zero whenever a connection state is made the active state. Sequence\n * numbers are of type uint64 and may not exceed 2^64-1. Sequence numbers do\n * not wrap. If a TLS implementation would need to wrap a sequence number,\n * it must renegotiate instead. A sequence number is incremented after each\n * record: specifically, the first record transmitted under a particular\n * connection state MUST use sequence number 0.\n *\n * @param c the connection.\n *\n * @return the new initialized TLS connection state.\n */\ntls.createConnectionState = function(c) {\n var client = (c.entity === tls.ConnectionEnd.client);\n\n var createMode = function() {\n var mode = {\n // two 32-bit numbers, first is most significant\n sequenceNumber: [0, 0],\n macKey: null,\n macLength: 0,\n macFunction: null,\n cipherState: null,\n cipherFunction: function(record) {return true;},\n compressionState: null,\n compressFunction: function(record) {return true;},\n updateSequenceNumber: function() {\n if(mode.sequenceNumber[1] === 0xFFFFFFFF) {\n mode.sequenceNumber[1] = 0;\n ++mode.sequenceNumber[0];\n } else {\n ++mode.sequenceNumber[1];\n }\n }\n };\n return mode;\n };\n var state = {\n read: createMode(),\n write: createMode()\n };\n\n // update function in read mode will decrypt then decompress a record\n state.read.update = function(c, record) {\n if(!state.read.cipherFunction(record, state.read)) {\n c.error(c, {\n message: 'Could not decrypt record or bad MAC.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n // doesn't matter if decryption failed or MAC was\n // invalid, return the same error so as not to reveal\n // which one occurred\n description: tls.Alert.Description.bad_record_mac\n }\n });\n } else if(!state.read.compressFunction(c, record, state.read)) {\n c.error(c, {\n message: 'Could not decompress record.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.decompression_failure\n }\n });\n }\n return !c.fail;\n };\n\n // update function in write mode will compress then encrypt a record\n state.write.update = function(c, record) {\n if(!state.write.compressFunction(c, record, state.write)) {\n // error, but do not send alert since it would require\n // compression as well\n c.error(c, {\n message: 'Could not compress record.',\n send: false,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n } else if(!state.write.cipherFunction(record, state.write)) {\n // error, but do not send alert since it would require\n // encryption as well\n c.error(c, {\n message: 'Could not encrypt record.',\n send: false,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n }\n return !c.fail;\n };\n\n // handle security parameters\n if(c.session) {\n var sp = c.session.sp;\n c.session.cipherSuite.initSecurityParameters(sp);\n\n // generate keys\n sp.keys = tls.generateKeys(c, sp);\n state.read.macKey = client ?\n sp.keys.server_write_MAC_key : sp.keys.client_write_MAC_key;\n state.write.macKey = client ?\n sp.keys.client_write_MAC_key : sp.keys.server_write_MAC_key;\n\n // cipher suite setup\n c.session.cipherSuite.initConnectionState(state, c, sp);\n\n // compression setup\n switch(sp.compression_algorithm) {\n case tls.CompressionMethod.none:\n break;\n case tls.CompressionMethod.deflate:\n state.read.compressFunction = inflate;\n state.write.compressFunction = deflate;\n break;\n default:\n throw new Error('Unsupported compression algorithm.');\n }\n }\n\n return state;\n};\n\n/**\n * Creates a Random structure.\n *\n * struct {\n * uint32 gmt_unix_time;\n * opaque random_bytes[28];\n * } Random;\n *\n * gmt_unix_time:\n * The current time and date in standard UNIX 32-bit format (seconds since\n * the midnight starting Jan 1, 1970, UTC, ignoring leap seconds) according\n * to the sender's internal clock. Clocks are not required to be set\n * correctly by the basic TLS protocol; higher-level or application\n * protocols may define additional requirements. Note that, for historical\n * reasons, the data element is named using GMT, the predecessor of the\n * current worldwide time base, UTC.\n * random_bytes:\n * 28 bytes generated by a secure random number generator.\n *\n * @return the Random structure as a byte array.\n */\ntls.createRandom = function() {\n // get UTC milliseconds\n var d = new Date();\n var utc = +d + d.getTimezoneOffset() * 60000;\n var rval = forge.util.createBuffer();\n rval.putInt32(utc);\n rval.putBytes(forge.random.getBytes(28));\n return rval;\n};\n\n/**\n * Creates a TLS record with the given type and data.\n *\n * @param c the connection.\n * @param options:\n * type: the record type.\n * data: the plain text data in a byte buffer.\n *\n * @return the created record.\n */\ntls.createRecord = function(c, options) {\n if(!options.data) {\n return null;\n }\n var record = {\n type: options.type,\n version: {\n major: c.version.major,\n minor: c.version.minor\n },\n length: options.data.length(),\n fragment: options.data\n };\n return record;\n};\n\n/**\n * Creates a TLS alert record.\n *\n * @param c the connection.\n * @param alert:\n * level: the TLS alert level.\n * description: the TLS alert description.\n *\n * @return the created alert record.\n */\ntls.createAlert = function(c, alert) {\n var b = forge.util.createBuffer();\n b.putByte(alert.level);\n b.putByte(alert.description);\n return tls.createRecord(c, {\n type: tls.ContentType.alert,\n data: b\n });\n};\n\n/* The structure of a TLS handshake message.\n *\n * struct {\n * HandshakeType msg_type; // handshake type\n * uint24 length; // bytes in message\n * select(HandshakeType) {\n * case hello_request: HelloRequest;\n * case client_hello: ClientHello;\n * case server_hello: ServerHello;\n * case certificate: Certificate;\n * case server_key_exchange: ServerKeyExchange;\n * case certificate_request: CertificateRequest;\n * case server_hello_done: ServerHelloDone;\n * case certificate_verify: CertificateVerify;\n * case client_key_exchange: ClientKeyExchange;\n * case finished: Finished;\n * } body;\n * } Handshake;\n */\n\n/**\n * Creates a ClientHello message.\n *\n * opaque SessionID<0..32>;\n * enum { null(0), deflate(1), (255) } CompressionMethod;\n * uint8 CipherSuite[2];\n *\n * struct {\n * ProtocolVersion client_version;\n * Random random;\n * SessionID session_id;\n * CipherSuite cipher_suites<2..2^16-2>;\n * CompressionMethod compression_methods<1..2^8-1>;\n * select(extensions_present) {\n * case false:\n * struct {};\n * case true:\n * Extension extensions<0..2^16-1>;\n * };\n * } ClientHello;\n *\n * The extension format for extended client hellos and server hellos is:\n *\n * struct {\n * ExtensionType extension_type;\n * opaque extension_data<0..2^16-1>;\n * } Extension;\n *\n * Here:\n *\n * - \"extension_type\" identifies the particular extension type.\n * - \"extension_data\" contains information specific to the particular\n * extension type.\n *\n * The extension types defined in this document are:\n *\n * enum {\n * server_name(0), max_fragment_length(1),\n * client_certificate_url(2), trusted_ca_keys(3),\n * truncated_hmac(4), status_request(5), (65535)\n * } ExtensionType;\n *\n * @param c the connection.\n *\n * @return the ClientHello byte buffer.\n */\ntls.createClientHello = function(c) {\n // save hello version\n c.session.clientHelloVersion = {\n major: c.version.major,\n minor: c.version.minor\n };\n\n // create supported cipher suites\n var cipherSuites = forge.util.createBuffer();\n for(var i = 0; i < c.cipherSuites.length; ++i) {\n var cs = c.cipherSuites[i];\n cipherSuites.putByte(cs.id[0]);\n cipherSuites.putByte(cs.id[1]);\n }\n var cSuites = cipherSuites.length();\n\n // create supported compression methods, null always supported, but\n // also support deflate if connection has inflate and deflate methods\n var compressionMethods = forge.util.createBuffer();\n compressionMethods.putByte(tls.CompressionMethod.none);\n // FIXME: deflate support disabled until issues with raw deflate data\n // without zlib headers are resolved\n /*\n if(c.inflate !== null && c.deflate !== null) {\n compressionMethods.putByte(tls.CompressionMethod.deflate);\n }\n */\n var cMethods = compressionMethods.length();\n\n // create TLS SNI (server name indication) extension if virtual host\n // has been specified, see RFC 3546\n var extensions = forge.util.createBuffer();\n if(c.virtualHost) {\n // create extension struct\n var ext = forge.util.createBuffer();\n ext.putByte(0x00); // type server_name (ExtensionType is 2 bytes)\n ext.putByte(0x00);\n\n /* In order to provide the server name, clients MAY include an\n * extension of type \"server_name\" in the (extended) client hello.\n * The \"extension_data\" field of this extension SHALL contain\n * \"ServerNameList\" where:\n *\n * struct {\n * NameType name_type;\n * select(name_type) {\n * case host_name: HostName;\n * } name;\n * } ServerName;\n *\n * enum {\n * host_name(0), (255)\n * } NameType;\n *\n * opaque HostName<1..2^16-1>;\n *\n * struct {\n * ServerName server_name_list<1..2^16-1>\n * } ServerNameList;\n */\n var serverName = forge.util.createBuffer();\n serverName.putByte(0x00); // type host_name\n writeVector(serverName, 2, forge.util.createBuffer(c.virtualHost));\n\n // ServerNameList is in extension_data\n var snList = forge.util.createBuffer();\n writeVector(snList, 2, serverName);\n writeVector(ext, 2, snList);\n extensions.putBuffer(ext);\n }\n var extLength = extensions.length();\n if(extLength > 0) {\n // add extension vector length\n extLength += 2;\n }\n\n // determine length of the handshake message\n // cipher suites and compression methods size will need to be\n // updated if more get added to the list\n var sessionId = c.session.id;\n var length =\n sessionId.length + 1 + // session ID vector\n 2 + // version (major + minor)\n 4 + 28 + // random time and random bytes\n 2 + cSuites + // cipher suites vector\n 1 + cMethods + // compression methods vector\n extLength; // extensions vector\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.client_hello);\n rval.putInt24(length); // handshake length\n rval.putByte(c.version.major); // major version\n rval.putByte(c.version.minor); // minor version\n rval.putBytes(c.session.sp.client_random); // random time + bytes\n writeVector(rval, 1, forge.util.createBuffer(sessionId));\n writeVector(rval, 2, cipherSuites);\n writeVector(rval, 1, compressionMethods);\n if(extLength > 0) {\n writeVector(rval, 2, extensions);\n }\n return rval;\n};\n\n/**\n * Creates a ServerHello message.\n *\n * @param c the connection.\n *\n * @return the ServerHello byte buffer.\n */\ntls.createServerHello = function(c) {\n // determine length of the handshake message\n var sessionId = c.session.id;\n var length =\n sessionId.length + 1 + // session ID vector\n 2 + // version (major + minor)\n 4 + 28 + // random time and random bytes\n 2 + // chosen cipher suite\n 1; // chosen compression method\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.server_hello);\n rval.putInt24(length); // handshake length\n rval.putByte(c.version.major); // major version\n rval.putByte(c.version.minor); // minor version\n rval.putBytes(c.session.sp.server_random); // random time + bytes\n writeVector(rval, 1, forge.util.createBuffer(sessionId));\n rval.putByte(c.session.cipherSuite.id[0]);\n rval.putByte(c.session.cipherSuite.id[1]);\n rval.putByte(c.session.compressionMethod);\n return rval;\n};\n\n/**\n * Creates a Certificate message.\n *\n * When this message will be sent:\n * This is the first message the client can send after receiving a server\n * hello done message and the first message the server can send after\n * sending a ServerHello. This client message is only sent if the server\n * requests a certificate. If no suitable certificate is available, the\n * client should send a certificate message containing no certificates. If\n * client authentication is required by the server for the handshake to\n * continue, it may respond with a fatal handshake failure alert.\n *\n * opaque ASN.1Cert<1..2^24-1>;\n *\n * struct {\n * ASN.1Cert certificate_list<0..2^24-1>;\n * } Certificate;\n *\n * @param c the connection.\n *\n * @return the Certificate byte buffer.\n */\ntls.createCertificate = function(c) {\n // TODO: check certificate request to ensure types are supported\n\n // get a certificate (a certificate as a PEM string)\n var client = (c.entity === tls.ConnectionEnd.client);\n var cert = null;\n if(c.getCertificate) {\n var hint;\n if(client) {\n hint = c.session.certificateRequest;\n } else {\n hint = c.session.extensions.server_name.serverNameList;\n }\n cert = c.getCertificate(c, hint);\n }\n\n // buffer to hold certificate list\n var certList = forge.util.createBuffer();\n if(cert !== null) {\n try {\n // normalize cert to a chain of certificates\n if(!forge.util.isArray(cert)) {\n cert = [cert];\n }\n var asn1 = null;\n for(var i = 0; i < cert.length; ++i) {\n var msg = forge.pem.decode(cert[i])[0];\n if(msg.type !== 'CERTIFICATE' &&\n msg.type !== 'X509 CERTIFICATE' &&\n msg.type !== 'TRUSTED CERTIFICATE') {\n var error = new Error('Could not convert certificate from PEM; PEM ' +\n 'header type is not \"CERTIFICATE\", \"X509 CERTIFICATE\", or ' +\n '\"TRUSTED CERTIFICATE\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert certificate from PEM; PEM is encrypted.');\n }\n\n var der = forge.util.createBuffer(msg.body);\n if(asn1 === null) {\n asn1 = forge.asn1.fromDer(der.bytes(), false);\n }\n\n // certificate entry is itself a vector with 3 length bytes\n var certBuffer = forge.util.createBuffer();\n writeVector(certBuffer, 3, der);\n\n // add cert vector to cert list vector\n certList.putBuffer(certBuffer);\n }\n\n // save certificate\n cert = forge.pki.certificateFromAsn1(asn1);\n if(client) {\n c.session.clientCertificate = cert;\n } else {\n c.session.serverCertificate = cert;\n }\n } catch(ex) {\n return c.error(c, {\n message: 'Could not send certificate list.',\n cause: ex,\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.bad_certificate\n }\n });\n }\n }\n\n // determine length of the handshake message\n var length = 3 + certList.length(); // cert list vector\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.certificate);\n rval.putInt24(length);\n writeVector(rval, 3, certList);\n return rval;\n};\n\n/**\n * Creates a ClientKeyExchange message.\n *\n * When this message will be sent:\n * This message is always sent by the client. It will immediately follow the\n * client certificate message, if it is sent. Otherwise it will be the first\n * message sent by the client after it receives the server hello done\n * message.\n *\n * Meaning of this message:\n * With this message, the premaster secret is set, either though direct\n * transmission of the RSA-encrypted secret, or by the transmission of\n * Diffie-Hellman parameters which will allow each side to agree upon the\n * same premaster secret. When the key exchange method is DH_RSA or DH_DSS,\n * client certification has been requested, and the client was able to\n * respond with a certificate which contained a Diffie-Hellman public key\n * whose parameters (group and generator) matched those specified by the\n * server in its certificate, this message will not contain any data.\n *\n * Meaning of this message:\n * If RSA is being used for key agreement and authentication, the client\n * generates a 48-byte premaster secret, encrypts it using the public key\n * from the server's certificate or the temporary RSA key provided in a\n * server key exchange message, and sends the result in an encrypted\n * premaster secret message. This structure is a variant of the client\n * key exchange message, not a message in itself.\n *\n * struct {\n * select(KeyExchangeAlgorithm) {\n * case rsa: EncryptedPreMasterSecret;\n * case diffie_hellman: ClientDiffieHellmanPublic;\n * } exchange_keys;\n * } ClientKeyExchange;\n *\n * struct {\n * ProtocolVersion client_version;\n * opaque random[46];\n * } PreMasterSecret;\n *\n * struct {\n * public-key-encrypted PreMasterSecret pre_master_secret;\n * } EncryptedPreMasterSecret;\n *\n * A public-key-encrypted element is encoded as a vector <0..2^16-1>.\n *\n * @param c the connection.\n *\n * @return the ClientKeyExchange byte buffer.\n */\ntls.createClientKeyExchange = function(c) {\n // create buffer to encrypt\n var b = forge.util.createBuffer();\n\n // add highest client-supported protocol to help server avoid version\n // rollback attacks\n b.putByte(c.session.clientHelloVersion.major);\n b.putByte(c.session.clientHelloVersion.minor);\n\n // generate and add 46 random bytes\n b.putBytes(forge.random.getBytes(46));\n\n // save pre-master secret\n var sp = c.session.sp;\n sp.pre_master_secret = b.getBytes();\n\n // RSA-encrypt the pre-master secret\n var key = c.session.serverCertificate.publicKey;\n b = key.encrypt(sp.pre_master_secret);\n\n /* Note: The encrypted pre-master secret will be stored in a\n public-key-encrypted opaque vector that has the length prefixed using\n 2 bytes, so include those 2 bytes in the handshake message length. This\n is done as a minor optimization instead of calling writeVector(). */\n\n // determine length of the handshake message\n var length = b.length + 2;\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.client_key_exchange);\n rval.putInt24(length);\n // add vector length bytes\n rval.putInt16(b.length);\n rval.putBytes(b);\n return rval;\n};\n\n/**\n * Creates a ServerKeyExchange message.\n *\n * @param c the connection.\n *\n * @return the ServerKeyExchange byte buffer.\n */\ntls.createServerKeyExchange = function(c) {\n // this implementation only supports RSA, no Diffie-Hellman support,\n // so this record is empty\n\n // determine length of the handshake message\n var length = 0;\n\n // build record fragment\n var rval = forge.util.createBuffer();\n if(length > 0) {\n rval.putByte(tls.HandshakeType.server_key_exchange);\n rval.putInt24(length);\n }\n return rval;\n};\n\n/**\n * Gets the signed data used to verify a client-side certificate. See\n * tls.createCertificateVerify() for details.\n *\n * @param c the connection.\n * @param callback the callback to call once the signed data is ready.\n */\ntls.getClientSignature = function(c, callback) {\n // generate data to RSA encrypt\n var b = forge.util.createBuffer();\n b.putBuffer(c.session.md5.digest());\n b.putBuffer(c.session.sha1.digest());\n b = b.getBytes();\n\n // create default signing function as necessary\n c.getSignature = c.getSignature || function(c, b, callback) {\n // do rsa encryption, call callback\n var privateKey = null;\n if(c.getPrivateKey) {\n try {\n privateKey = c.getPrivateKey(c, c.session.clientCertificate);\n privateKey = forge.pki.privateKeyFromPem(privateKey);\n } catch(ex) {\n c.error(c, {\n message: 'Could not get private key.',\n cause: ex,\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n }\n }\n if(privateKey === null) {\n c.error(c, {\n message: 'No private key set.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n } else {\n b = privateKey.sign(b, null);\n }\n callback(c, b);\n };\n\n // get client signature\n c.getSignature(c, b, callback);\n};\n\n/**\n * Creates a CertificateVerify message.\n *\n * Meaning of this message:\n * This structure conveys the client's Diffie-Hellman public value\n * (Yc) if it was not already included in the client's certificate.\n * The encoding used for Yc is determined by the enumerated\n * PublicValueEncoding. This structure is a variant of the client\n * key exchange message, not a message in itself.\n *\n * When this message will be sent:\n * This message is used to provide explicit verification of a client\n * certificate. This message is only sent following a client\n * certificate that has signing capability (i.e. all certificates\n * except those containing fixed Diffie-Hellman parameters). When\n * sent, it will immediately follow the client key exchange message.\n *\n * struct {\n * Signature signature;\n * } CertificateVerify;\n *\n * CertificateVerify.signature.md5_hash\n * MD5(handshake_messages);\n *\n * Certificate.signature.sha_hash\n * SHA(handshake_messages);\n *\n * Here handshake_messages refers to all handshake messages sent or\n * received starting at client hello up to but not including this\n * message, including the type and length fields of the handshake\n * messages.\n *\n * select(SignatureAlgorithm) {\n * case anonymous: struct { };\n * case rsa:\n * digitally-signed struct {\n * opaque md5_hash[16];\n * opaque sha_hash[20];\n * };\n * case dsa:\n * digitally-signed struct {\n * opaque sha_hash[20];\n * };\n * } Signature;\n *\n * In digital signing, one-way hash functions are used as input for a\n * signing algorithm. A digitally-signed element is encoded as an opaque\n * vector <0..2^16-1>, where the length is specified by the signing\n * algorithm and key.\n *\n * In RSA signing, a 36-byte structure of two hashes (one SHA and one\n * MD5) is signed (encrypted with the private key). It is encoded with\n * PKCS #1 block type 0 or type 1 as described in [PKCS1].\n *\n * In DSS, the 20 bytes of the SHA hash are run directly through the\n * Digital Signing Algorithm with no additional hashing.\n *\n * @param c the connection.\n * @param signature the signature to include in the message.\n *\n * @return the CertificateVerify byte buffer.\n */\ntls.createCertificateVerify = function(c, signature) {\n /* Note: The signature will be stored in a \"digitally-signed\" opaque\n vector that has the length prefixed using 2 bytes, so include those\n 2 bytes in the handshake message length. This is done as a minor\n optimization instead of calling writeVector(). */\n\n // determine length of the handshake message\n var length = signature.length + 2;\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.certificate_verify);\n rval.putInt24(length);\n // add vector length bytes\n rval.putInt16(signature.length);\n rval.putBytes(signature);\n return rval;\n};\n\n/**\n * Creates a CertificateRequest message.\n *\n * @param c the connection.\n *\n * @return the CertificateRequest byte buffer.\n */\ntls.createCertificateRequest = function(c) {\n // TODO: support other certificate types\n var certTypes = forge.util.createBuffer();\n\n // common RSA certificate type\n certTypes.putByte(0x01);\n\n // add distinguished names from CA store\n var cAs = forge.util.createBuffer();\n for(var key in c.caStore.certs) {\n var cert = c.caStore.certs[key];\n var dn = forge.pki.distinguishedNameToAsn1(cert.subject);\n var byteBuffer = forge.asn1.toDer(dn);\n cAs.putInt16(byteBuffer.length());\n cAs.putBuffer(byteBuffer);\n }\n\n // TODO: TLS 1.2+ has a different format\n\n // determine length of the handshake message\n var length =\n 1 + certTypes.length() +\n 2 + cAs.length();\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.certificate_request);\n rval.putInt24(length);\n writeVector(rval, 1, certTypes);\n writeVector(rval, 2, cAs);\n return rval;\n};\n\n/**\n * Creates a ServerHelloDone message.\n *\n * @param c the connection.\n *\n * @return the ServerHelloDone byte buffer.\n */\ntls.createServerHelloDone = function(c) {\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.server_hello_done);\n rval.putInt24(0);\n return rval;\n};\n\n/**\n * Creates a ChangeCipherSpec message.\n *\n * The change cipher spec protocol exists to signal transitions in\n * ciphering strategies. The protocol consists of a single message,\n * which is encrypted and compressed under the current (not the pending)\n * connection state. The message consists of a single byte of value 1.\n *\n * struct {\n * enum { change_cipher_spec(1), (255) } type;\n * } ChangeCipherSpec;\n *\n * @return the ChangeCipherSpec byte buffer.\n */\ntls.createChangeCipherSpec = function() {\n var rval = forge.util.createBuffer();\n rval.putByte(0x01);\n return rval;\n};\n\n/**\n * Creates a Finished message.\n *\n * struct {\n * opaque verify_data[12];\n * } Finished;\n *\n * verify_data\n * PRF(master_secret, finished_label, MD5(handshake_messages) +\n * SHA-1(handshake_messages)) [0..11];\n *\n * finished_label\n * For Finished messages sent by the client, the string \"client\n * finished\". For Finished messages sent by the server, the\n * string \"server finished\".\n *\n * handshake_messages\n * All of the data from all handshake messages up to but not\n * including this message. This is only data visible at the\n * handshake layer and does not include record layer headers.\n * This is the concatenation of all the Handshake structures as\n * defined in 7.4 exchanged thus far.\n *\n * @param c the connection.\n *\n * @return the Finished byte buffer.\n */\ntls.createFinished = function(c) {\n // generate verify_data\n var b = forge.util.createBuffer();\n b.putBuffer(c.session.md5.digest());\n b.putBuffer(c.session.sha1.digest());\n\n // TODO: determine prf function and verify length for TLS 1.2\n var client = (c.entity === tls.ConnectionEnd.client);\n var sp = c.session.sp;\n var vdl = 12;\n var prf = prf_TLS1;\n var label = client ? 'client finished' : 'server finished';\n b = prf(sp.master_secret, label, b.getBytes(), vdl);\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.finished);\n rval.putInt24(b.length());\n rval.putBuffer(b);\n return rval;\n};\n\n/**\n * Creates a HeartbeatMessage (See RFC 6520).\n *\n * struct {\n * HeartbeatMessageType type;\n * uint16 payload_length;\n * opaque payload[HeartbeatMessage.payload_length];\n * opaque padding[padding_length];\n * } HeartbeatMessage;\n *\n * The total length of a HeartbeatMessage MUST NOT exceed 2^14 or\n * max_fragment_length when negotiated as defined in [RFC6066].\n *\n * type: The message type, either heartbeat_request or heartbeat_response.\n *\n * payload_length: The length of the payload.\n *\n * payload: The payload consists of arbitrary content.\n *\n * padding: The padding is random content that MUST be ignored by the\n * receiver. The length of a HeartbeatMessage is TLSPlaintext.length\n * for TLS and DTLSPlaintext.length for DTLS. Furthermore, the\n * length of the type field is 1 byte, and the length of the\n * payload_length is 2. Therefore, the padding_length is\n * TLSPlaintext.length - payload_length - 3 for TLS and\n * DTLSPlaintext.length - payload_length - 3 for DTLS. The\n * padding_length MUST be at least 16.\n *\n * The sender of a HeartbeatMessage MUST use a random padding of at\n * least 16 bytes. The padding of a received HeartbeatMessage message\n * MUST be ignored.\n *\n * If the payload_length of a received HeartbeatMessage is too large,\n * the received HeartbeatMessage MUST be discarded silently.\n *\n * @param c the connection.\n * @param type the tls.HeartbeatMessageType.\n * @param payload the heartbeat data to send as the payload.\n * @param [payloadLength] the payload length to use, defaults to the\n * actual payload length.\n *\n * @return the HeartbeatRequest byte buffer.\n */\ntls.createHeartbeat = function(type, payload, payloadLength) {\n if(typeof payloadLength === 'undefined') {\n payloadLength = payload.length;\n }\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(type); // heartbeat message type\n rval.putInt16(payloadLength); // payload length\n rval.putBytes(payload); // payload\n // padding\n var plaintextLength = rval.length();\n var paddingLength = Math.max(16, plaintextLength - payloadLength - 3);\n rval.putBytes(forge.random.getBytes(paddingLength));\n return rval;\n};\n\n/**\n * Fragments, compresses, encrypts, and queues a record for delivery.\n *\n * @param c the connection.\n * @param record the record to queue.\n */\ntls.queue = function(c, record) {\n // error during record creation\n if(!record) {\n return;\n }\n\n if(record.fragment.length() === 0) {\n if(record.type === tls.ContentType.handshake ||\n record.type === tls.ContentType.alert ||\n record.type === tls.ContentType.change_cipher_spec) {\n // Empty handshake, alert of change cipher spec messages are not allowed per the TLS specification and should not be sent.\n return;\n }\n }\n\n // if the record is a handshake record, update handshake hashes\n if(record.type === tls.ContentType.handshake) {\n var bytes = record.fragment.bytes();\n c.session.md5.update(bytes);\n c.session.sha1.update(bytes);\n bytes = null;\n }\n\n // handle record fragmentation\n var records;\n if(record.fragment.length() <= tls.MaxFragment) {\n records = [record];\n } else {\n // fragment data as long as it is too long\n records = [];\n var data = record.fragment.bytes();\n while(data.length > tls.MaxFragment) {\n records.push(tls.createRecord(c, {\n type: record.type,\n data: forge.util.createBuffer(data.slice(0, tls.MaxFragment))\n }));\n data = data.slice(tls.MaxFragment);\n }\n // add last record\n if(data.length > 0) {\n records.push(tls.createRecord(c, {\n type: record.type,\n data: forge.util.createBuffer(data)\n }));\n }\n }\n\n // compress and encrypt all fragmented records\n for(var i = 0; i < records.length && !c.fail; ++i) {\n // update the record using current write state\n var rec = records[i];\n var s = c.state.current.write;\n if(s.update(c, rec)) {\n // store record\n c.records.push(rec);\n }\n }\n};\n\n/**\n * Flushes all queued records to the output buffer and calls the\n * tlsDataReady() handler on the given connection.\n *\n * @param c the connection.\n *\n * @return true on success, false on failure.\n */\ntls.flush = function(c) {\n for(var i = 0; i < c.records.length; ++i) {\n var record = c.records[i];\n\n // add record header and fragment\n c.tlsData.putByte(record.type);\n c.tlsData.putByte(record.version.major);\n c.tlsData.putByte(record.version.minor);\n c.tlsData.putInt16(record.fragment.length());\n c.tlsData.putBuffer(c.records[i].fragment);\n }\n c.records = [];\n return c.tlsDataReady(c);\n};\n\n/**\n * Maps a pki.certificateError to a tls.Alert.Description.\n *\n * @param error the error to map.\n *\n * @return the alert description.\n */\nvar _certErrorToAlertDesc = function(error) {\n switch(error) {\n case true:\n return true;\n case forge.pki.certificateError.bad_certificate:\n return tls.Alert.Description.bad_certificate;\n case forge.pki.certificateError.unsupported_certificate:\n return tls.Alert.Description.unsupported_certificate;\n case forge.pki.certificateError.certificate_revoked:\n return tls.Alert.Description.certificate_revoked;\n case forge.pki.certificateError.certificate_expired:\n return tls.Alert.Description.certificate_expired;\n case forge.pki.certificateError.certificate_unknown:\n return tls.Alert.Description.certificate_unknown;\n case forge.pki.certificateError.unknown_ca:\n return tls.Alert.Description.unknown_ca;\n default:\n return tls.Alert.Description.bad_certificate;\n }\n};\n\n/**\n * Maps a tls.Alert.Description to a pki.certificateError.\n *\n * @param desc the alert description.\n *\n * @return the certificate error.\n */\nvar _alertDescToCertError = function(desc) {\n switch(desc) {\n case true:\n return true;\n case tls.Alert.Description.bad_certificate:\n return forge.pki.certificateError.bad_certificate;\n case tls.Alert.Description.unsupported_certificate:\n return forge.pki.certificateError.unsupported_certificate;\n case tls.Alert.Description.certificate_revoked:\n return forge.pki.certificateError.certificate_revoked;\n case tls.Alert.Description.certificate_expired:\n return forge.pki.certificateError.certificate_expired;\n case tls.Alert.Description.certificate_unknown:\n return forge.pki.certificateError.certificate_unknown;\n case tls.Alert.Description.unknown_ca:\n return forge.pki.certificateError.unknown_ca;\n default:\n return forge.pki.certificateError.bad_certificate;\n }\n};\n\n/**\n * Verifies a certificate chain against the given connection's\n * Certificate Authority store.\n *\n * @param c the TLS connection.\n * @param chain the certificate chain to verify, with the root or highest\n * authority at the end.\n *\n * @return true if successful, false if not.\n */\ntls.verifyCertificateChain = function(c, chain) {\n try {\n // Make a copy of c.verifyOptions so that we can modify options.verify\n // without modifying c.verifyOptions.\n var options = {};\n for (var key in c.verifyOptions) {\n options[key] = c.verifyOptions[key];\n }\n\n options.verify = function(vfd, depth, chain) {\n // convert pki.certificateError to tls alert description\n var desc = _certErrorToAlertDesc(vfd);\n\n // call application callback\n var ret = c.verify(c, vfd, depth, chain);\n if(ret !== true) {\n if(typeof ret === 'object' && !forge.util.isArray(ret)) {\n // throw custom error\n var error = new Error('The application rejected the certificate.');\n error.send = true;\n error.alert = {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.bad_certificate\n };\n if(ret.message) {\n error.message = ret.message;\n }\n if(ret.alert) {\n error.alert.description = ret.alert;\n }\n throw error;\n }\n\n // convert tls alert description to pki.certificateError\n if(ret !== vfd) {\n ret = _alertDescToCertError(ret);\n }\n }\n\n return ret;\n };\n\n // verify chain\n forge.pki.verifyCertificateChain(c.caStore, chain, options);\n } catch(ex) {\n // build tls error if not already customized\n var err = ex;\n if(typeof err !== 'object' || forge.util.isArray(err)) {\n err = {\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: _certErrorToAlertDesc(ex)\n }\n };\n }\n if(!('send' in err)) {\n err.send = true;\n }\n if(!('alert' in err)) {\n err.alert = {\n level: tls.Alert.Level.fatal,\n description: _certErrorToAlertDesc(err.error)\n };\n }\n\n // send error\n c.error(c, err);\n }\n\n return !c.fail;\n};\n\n/**\n * Creates a new TLS session cache.\n *\n * @param cache optional map of session ID to cached session.\n * @param capacity the maximum size for the cache (default: 100).\n *\n * @return the new TLS session cache.\n */\ntls.createSessionCache = function(cache, capacity) {\n var rval = null;\n\n // assume input is already a session cache object\n if(cache && cache.getSession && cache.setSession && cache.order) {\n rval = cache;\n } else {\n // create cache\n rval = {};\n rval.cache = cache || {};\n rval.capacity = Math.max(capacity || 100, 1);\n rval.order = [];\n\n // store order for sessions, delete session overflow\n for(var key in cache) {\n if(rval.order.length <= capacity) {\n rval.order.push(key);\n } else {\n delete cache[key];\n }\n }\n\n // get a session from a session ID (or get any session)\n rval.getSession = function(sessionId) {\n var session = null;\n var key = null;\n\n // if session ID provided, use it\n if(sessionId) {\n key = forge.util.bytesToHex(sessionId);\n } else if(rval.order.length > 0) {\n // get first session from cache\n key = rval.order[0];\n }\n\n if(key !== null && key in rval.cache) {\n // get cached session and remove from cache\n session = rval.cache[key];\n delete rval.cache[key];\n for(var i in rval.order) {\n if(rval.order[i] === key) {\n rval.order.splice(i, 1);\n break;\n }\n }\n }\n\n return session;\n };\n\n // set a session in the cache\n rval.setSession = function(sessionId, session) {\n // remove session from cache if at capacity\n if(rval.order.length === rval.capacity) {\n var key = rval.order.shift();\n delete rval.cache[key];\n }\n // add session to cache\n var key = forge.util.bytesToHex(sessionId);\n rval.order.push(key);\n rval.cache[key] = session;\n };\n }\n\n return rval;\n};\n\n/**\n * Creates a new TLS connection.\n *\n * See public createConnection() docs for more details.\n *\n * @param options the options for this connection.\n *\n * @return the new TLS connection.\n */\ntls.createConnection = function(options) {\n var caStore = null;\n if(options.caStore) {\n // if CA store is an array, convert it to a CA store object\n if(forge.util.isArray(options.caStore)) {\n caStore = forge.pki.createCaStore(options.caStore);\n } else {\n caStore = options.caStore;\n }\n } else {\n // create empty CA store\n caStore = forge.pki.createCaStore();\n }\n\n // setup default cipher suites\n var cipherSuites = options.cipherSuites || null;\n if(cipherSuites === null) {\n cipherSuites = [];\n for(var key in tls.CipherSuites) {\n cipherSuites.push(tls.CipherSuites[key]);\n }\n }\n\n // set default entity\n var entity = (options.server || false) ?\n tls.ConnectionEnd.server : tls.ConnectionEnd.client;\n\n // create session cache if requested\n var sessionCache = options.sessionCache ?\n tls.createSessionCache(options.sessionCache) : null;\n\n // create TLS connection\n var c = {\n version: {major: tls.Version.major, minor: tls.Version.minor},\n entity: entity,\n sessionId: options.sessionId,\n caStore: caStore,\n sessionCache: sessionCache,\n cipherSuites: cipherSuites,\n connected: options.connected,\n virtualHost: options.virtualHost || null,\n verifyClient: options.verifyClient || false,\n verify: options.verify || function(cn, vfd, dpth, cts) {return vfd;},\n verifyOptions: options.verifyOptions || {},\n getCertificate: options.getCertificate || null,\n getPrivateKey: options.getPrivateKey || null,\n getSignature: options.getSignature || null,\n input: forge.util.createBuffer(),\n tlsData: forge.util.createBuffer(),\n data: forge.util.createBuffer(),\n tlsDataReady: options.tlsDataReady,\n dataReady: options.dataReady,\n heartbeatReceived: options.heartbeatReceived,\n closed: options.closed,\n error: function(c, ex) {\n // set origin if not set\n ex.origin = ex.origin ||\n ((c.entity === tls.ConnectionEnd.client) ? 'client' : 'server');\n\n // send TLS alert\n if(ex.send) {\n tls.queue(c, tls.createAlert(c, ex.alert));\n tls.flush(c);\n }\n\n // error is fatal by default\n var fatal = (ex.fatal !== false);\n if(fatal) {\n // set fail flag\n c.fail = true;\n }\n\n // call error handler first\n options.error(c, ex);\n\n if(fatal) {\n // fatal error, close connection, do not clear fail\n c.close(false);\n }\n },\n deflate: options.deflate || null,\n inflate: options.inflate || null\n };\n\n /**\n * Resets a closed TLS connection for reuse. Called in c.close().\n *\n * @param clearFail true to clear the fail flag (default: true).\n */\n c.reset = function(clearFail) {\n c.version = {major: tls.Version.major, minor: tls.Version.minor};\n c.record = null;\n c.session = null;\n c.peerCertificate = null;\n c.state = {\n pending: null,\n current: null\n };\n c.expect = (c.entity === tls.ConnectionEnd.client) ? SHE : CHE;\n c.fragmented = null;\n c.records = [];\n c.open = false;\n c.handshakes = 0;\n c.handshaking = false;\n c.isConnected = false;\n c.fail = !(clearFail || typeof(clearFail) === 'undefined');\n c.input.clear();\n c.tlsData.clear();\n c.data.clear();\n c.state.current = tls.createConnectionState(c);\n };\n\n // do initial reset of connection\n c.reset();\n\n /**\n * Updates the current TLS engine state based on the given record.\n *\n * @param c the TLS connection.\n * @param record the TLS record to act on.\n */\n var _update = function(c, record) {\n // get record handler (align type in table by subtracting lowest)\n var aligned = record.type - tls.ContentType.change_cipher_spec;\n var handlers = ctTable[c.entity][c.expect];\n if(aligned in handlers) {\n handlers[aligned](c, record);\n } else {\n // unexpected record\n tls.handleUnexpected(c, record);\n }\n };\n\n /**\n * Reads the record header and initializes the next record on the given\n * connection.\n *\n * @param c the TLS connection with the next record.\n *\n * @return 0 if the input data could be processed, otherwise the\n * number of bytes required for data to be processed.\n */\n var _readRecordHeader = function(c) {\n var rval = 0;\n\n // get input buffer and its length\n var b = c.input;\n var len = b.length();\n\n // need at least 5 bytes to initialize a record\n if(len < 5) {\n rval = 5 - len;\n } else {\n // enough bytes for header\n // initialize record\n c.record = {\n type: b.getByte(),\n version: {\n major: b.getByte(),\n minor: b.getByte()\n },\n length: b.getInt16(),\n fragment: forge.util.createBuffer(),\n ready: false\n };\n\n // check record version\n var compatibleVersion = (c.record.version.major === c.version.major);\n if(compatibleVersion && c.session && c.session.version) {\n // session version already set, require same minor version\n compatibleVersion = (c.record.version.minor === c.version.minor);\n }\n if(!compatibleVersion) {\n c.error(c, {\n message: 'Incompatible TLS version.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.protocol_version\n }\n });\n }\n }\n\n return rval;\n };\n\n /**\n * Reads the next record's contents and appends its message to any\n * previously fragmented message.\n *\n * @param c the TLS connection with the next record.\n *\n * @return 0 if the input data could be processed, otherwise the\n * number of bytes required for data to be processed.\n */\n var _readRecord = function(c) {\n var rval = 0;\n\n // ensure there is enough input data to get the entire record\n var b = c.input;\n var len = b.length();\n if(len < c.record.length) {\n // not enough data yet, return how much is required\n rval = c.record.length - len;\n } else {\n // there is enough data to parse the pending record\n // fill record fragment and compact input buffer\n c.record.fragment.putBytes(b.getBytes(c.record.length));\n b.compact();\n\n // update record using current read state\n var s = c.state.current.read;\n if(s.update(c, c.record)) {\n // see if there is a previously fragmented message that the\n // new record's message fragment should be appended to\n if(c.fragmented !== null) {\n // if the record type matches a previously fragmented\n // record, append the record fragment to it\n if(c.fragmented.type === c.record.type) {\n // concatenate record fragments\n c.fragmented.fragment.putBuffer(c.record.fragment);\n c.record = c.fragmented;\n } else {\n // error, invalid fragmented record\n c.error(c, {\n message: 'Invalid fragmented record.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description:\n tls.Alert.Description.unexpected_message\n }\n });\n }\n }\n\n // record is now ready\n c.record.ready = true;\n }\n }\n\n return rval;\n };\n\n /**\n * Performs a handshake using the TLS Handshake Protocol, as a client.\n *\n * This method should only be called if the connection is in client mode.\n *\n * @param sessionId the session ID to use, null to start a new one.\n */\n c.handshake = function(sessionId) {\n // error to call this in non-client mode\n if(c.entity !== tls.ConnectionEnd.client) {\n // not fatal error\n c.error(c, {\n message: 'Cannot initiate handshake as a server.',\n fatal: false\n });\n } else if(c.handshaking) {\n // handshake is already in progress, fail but not fatal error\n c.error(c, {\n message: 'Handshake already in progress.',\n fatal: false\n });\n } else {\n // clear fail flag on reuse\n if(c.fail && !c.open && c.handshakes === 0) {\n c.fail = false;\n }\n\n // now handshaking\n c.handshaking = true;\n\n // default to blank (new session)\n sessionId = sessionId || '';\n\n // if a session ID was specified, try to find it in the cache\n var session = null;\n if(sessionId.length > 0) {\n if(c.sessionCache) {\n session = c.sessionCache.getSession(sessionId);\n }\n\n // matching session not found in cache, clear session ID\n if(session === null) {\n sessionId = '';\n }\n }\n\n // no session given, grab a session from the cache, if available\n if(sessionId.length === 0 && c.sessionCache) {\n session = c.sessionCache.getSession();\n if(session !== null) {\n sessionId = session.id;\n }\n }\n\n // set up session\n c.session = {\n id: sessionId,\n version: null,\n cipherSuite: null,\n compressionMethod: null,\n serverCertificate: null,\n certificateRequest: null,\n clientCertificate: null,\n sp: {},\n md5: forge.md.md5.create(),\n sha1: forge.md.sha1.create()\n };\n\n // use existing session information\n if(session) {\n // only update version on connection, session version not yet set\n c.version = session.version;\n c.session.sp = session.sp;\n }\n\n // generate new client random\n c.session.sp.client_random = tls.createRandom().getBytes();\n\n // connection now open\n c.open = true;\n\n // send hello\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createClientHello(c)\n }));\n tls.flush(c);\n }\n };\n\n /**\n * Called when TLS protocol data has been received from somewhere and should\n * be processed by the TLS engine.\n *\n * @param data the TLS protocol data, as a string, to process.\n *\n * @return 0 if the data could be processed, otherwise the number of bytes\n * required for data to be processed.\n */\n c.process = function(data) {\n var rval = 0;\n\n // buffer input data\n if(data) {\n c.input.putBytes(data);\n }\n\n // process next record if no failure, process will be called after\n // each record is handled (since handling can be asynchronous)\n if(!c.fail) {\n // reset record if ready and now empty\n if(c.record !== null &&\n c.record.ready && c.record.fragment.isEmpty()) {\n c.record = null;\n }\n\n // if there is no pending record, try to read record header\n if(c.record === null) {\n rval = _readRecordHeader(c);\n }\n\n // read the next record (if record not yet ready)\n if(!c.fail && c.record !== null && !c.record.ready) {\n rval = _readRecord(c);\n }\n\n // record ready to be handled, update engine state\n if(!c.fail && c.record !== null && c.record.ready) {\n _update(c, c.record);\n }\n }\n\n return rval;\n };\n\n /**\n * Requests that application data be packaged into a TLS record. The\n * tlsDataReady handler will be called when the TLS record(s) have been\n * prepared.\n *\n * @param data the application data, as a raw 'binary' encoded string, to\n * be sent; to send utf-16/utf-8 string data, use the return value\n * of util.encodeUtf8(str).\n *\n * @return true on success, false on failure.\n */\n c.prepare = function(data) {\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.application_data,\n data: forge.util.createBuffer(data)\n }));\n return tls.flush(c);\n };\n\n /**\n * Requests that a heartbeat request be packaged into a TLS record for\n * transmission. The tlsDataReady handler will be called when TLS record(s)\n * have been prepared.\n *\n * When a heartbeat response has been received, the heartbeatReceived\n * handler will be called with the matching payload. This handler can\n * be used to clear a retransmission timer, etc.\n *\n * @param payload the heartbeat data to send as the payload in the message.\n * @param [payloadLength] the payload length to use, defaults to the\n * actual payload length.\n *\n * @return true on success, false on failure.\n */\n c.prepareHeartbeatRequest = function(payload, payloadLength) {\n if(payload instanceof forge.util.ByteBuffer) {\n payload = payload.bytes();\n }\n if(typeof payloadLength === 'undefined') {\n payloadLength = payload.length;\n }\n c.expectedHeartbeatPayload = payload;\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.heartbeat,\n data: tls.createHeartbeat(\n tls.HeartbeatMessageType.heartbeat_request, payload, payloadLength)\n }));\n return tls.flush(c);\n };\n\n /**\n * Closes the connection (sends a close_notify alert).\n *\n * @param clearFail true to clear the fail flag (default: true).\n */\n c.close = function(clearFail) {\n // save session if connection didn't fail\n if(!c.fail && c.sessionCache && c.session) {\n // only need to preserve session ID, version, and security params\n var session = {\n id: c.session.id,\n version: c.session.version,\n sp: c.session.sp\n };\n session.sp.keys = null;\n c.sessionCache.setSession(session.id, session);\n }\n\n if(c.open) {\n // connection no longer open, clear input\n c.open = false;\n c.input.clear();\n\n // if connected or handshaking, send an alert\n if(c.isConnected || c.handshaking) {\n c.isConnected = c.handshaking = false;\n\n // send close_notify alert\n tls.queue(c, tls.createAlert(c, {\n level: tls.Alert.Level.warning,\n description: tls.Alert.Description.close_notify\n }));\n tls.flush(c);\n }\n\n // call handler\n c.closed(c);\n }\n\n // reset TLS connection, do not clear fail flag\n c.reset(clearFail);\n };\n\n return c;\n};\n\n/* TLS API */\nmodule.exports = forge.tls = forge.tls || {};\n\n// expose non-functions\nfor(var key in tls) {\n if(typeof tls[key] !== 'function') {\n forge.tls[key] = tls[key];\n }\n}\n\n// expose prf_tls1 for testing\nforge.tls.prf_tls1 = prf_TLS1;\n\n// expose sha1 hmac method\nforge.tls.hmac_sha1 = hmac_sha1;\n\n// expose session cache creation\nforge.tls.createSessionCache = tls.createSessionCache;\n\n/**\n * Creates a new TLS connection. This does not make any assumptions about the\n * transport layer that TLS is working on top of, ie: it does not assume there\n * is a TCP/IP connection or establish one. A TLS connection is totally\n * abstracted away from the layer is runs on top of, it merely establishes a\n * secure channel between a client\" and a \"server\".\n *\n * A TLS connection contains 4 connection states: pending read and write, and\n * current read and write.\n *\n * At initialization, the current read and write states will be null. Only once\n * the security parameters have been set and the keys have been generated can\n * the pending states be converted into current states. Current states will be\n * updated for each record processed.\n *\n * A custom certificate verify callback may be provided to check information\n * like the common name on the server's certificate. It will be called for\n * every certificate in the chain. It has the following signature:\n *\n * variable func(c, certs, index, preVerify)\n * Where:\n * c The TLS connection\n * verified Set to true if certificate was verified, otherwise the alert\n * tls.Alert.Description for why the certificate failed.\n * depth The current index in the chain, where 0 is the server's cert.\n * certs The certificate chain, *NOTE* if the server was anonymous then\n * the chain will be empty.\n *\n * The function returns true on success and on failure either the appropriate\n * tls.Alert.Description or an object with 'alert' set to the appropriate\n * tls.Alert.Description and 'message' set to a custom error message. If true\n * is not returned then the connection will abort using, in order of\n * availability, first the returned alert description, second the preVerify\n * alert description, and lastly the default 'bad_certificate'.\n *\n * There are three callbacks that can be used to make use of client-side\n * certificates where each takes the TLS connection as the first parameter:\n *\n * getCertificate(conn, hint)\n * The second parameter is a hint as to which certificate should be\n * returned. If the connection entity is a client, then the hint will be\n * the CertificateRequest message from the server that is part of the\n * TLS protocol. If the connection entity is a server, then it will be\n * the servername list provided via an SNI extension the ClientHello, if\n * one was provided (empty array if not). The hint can be examined to\n * determine which certificate to use (advanced). Most implementations\n * will just return a certificate. The return value must be a\n * PEM-formatted certificate or an array of PEM-formatted certificates\n * that constitute a certificate chain, with the first in the array/chain\n * being the client's certificate.\n * getPrivateKey(conn, certificate)\n * The second parameter is an forge.pki X.509 certificate object that\n * is associated with the requested private key. The return value must\n * be a PEM-formatted private key.\n * getSignature(conn, bytes, callback)\n * This callback can be used instead of getPrivateKey if the private key\n * is not directly accessible in javascript or should not be. For\n * instance, a secure external web service could provide the signature\n * in exchange for appropriate credentials. The second parameter is a\n * string of bytes to be signed that are part of the TLS protocol. These\n * bytes are used to verify that the private key for the previously\n * provided client-side certificate is accessible to the client. The\n * callback is a function that takes 2 parameters, the TLS connection\n * and the RSA encrypted (signed) bytes as a string. This callback must\n * be called once the signature is ready.\n *\n * @param options the options for this connection:\n * server: true if the connection is server-side, false for client.\n * sessionId: a session ID to reuse, null for a new connection.\n * caStore: an array of certificates to trust.\n * sessionCache: a session cache to use.\n * cipherSuites: an optional array of cipher suites to use,\n * see tls.CipherSuites.\n * connected: function(conn) called when the first handshake completes.\n * virtualHost: the virtual server name to use in a TLS SNI extension.\n * verifyClient: true to require a client certificate in server mode,\n * 'optional' to request one, false not to (default: false).\n * verify: a handler used to custom verify certificates in the chain.\n * verifyOptions: an object with options for the certificate chain validation.\n * See documentation of pki.verifyCertificateChain for possible options.\n * verifyOptions.verify is ignored. If you wish to specify a verify handler\n * use the verify key.\n * getCertificate: an optional callback used to get a certificate or\n * a chain of certificates (as an array).\n * getPrivateKey: an optional callback used to get a private key.\n * getSignature: an optional callback used to get a signature.\n * tlsDataReady: function(conn) called when TLS protocol data has been\n * prepared and is ready to be used (typically sent over a socket\n * connection to its destination), read from conn.tlsData buffer.\n * dataReady: function(conn) called when application data has\n * been parsed from a TLS record and should be consumed by the\n * application, read from conn.data buffer.\n * closed: function(conn) called when the connection has been closed.\n * error: function(conn, error) called when there was an error.\n * deflate: function(inBytes) if provided, will deflate TLS records using\n * the deflate algorithm if the server supports it.\n * inflate: function(inBytes) if provided, will inflate TLS records using\n * the deflate algorithm if the server supports it.\n *\n * @return the new TLS connection.\n */\nforge.tls.createConnection = tls.createConnection;\n","/**\n * Utility functions for web applications.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2018 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nvar baseN = require('./baseN');\n\n/* Utilities API */\nvar util = module.exports = forge.util = forge.util || {};\n\n// define setImmediate and nextTick\n(function() {\n // use native nextTick (unless we're in webpack)\n // webpack (or better node-libs-browser polyfill) sets process.browser.\n // this way we can detect webpack properly\n if(typeof process !== 'undefined' && process.nextTick && !process.browser) {\n util.nextTick = process.nextTick;\n if(typeof setImmediate === 'function') {\n util.setImmediate = setImmediate;\n } else {\n // polyfill setImmediate with nextTick, older versions of node\n // (those w/o setImmediate) won't totally starve IO\n util.setImmediate = util.nextTick;\n }\n return;\n }\n\n // polyfill nextTick with native setImmediate\n if(typeof setImmediate === 'function') {\n util.setImmediate = function() { return setImmediate.apply(undefined, arguments); };\n util.nextTick = function(callback) {\n return setImmediate(callback);\n };\n return;\n }\n\n /* Note: A polyfill upgrade pattern is used here to allow combining\n polyfills. For example, MutationObserver is fast, but blocks UI updates,\n so it needs to allow UI updates periodically, so it falls back on\n postMessage or setTimeout. */\n\n // polyfill with setTimeout\n util.setImmediate = function(callback) {\n setTimeout(callback, 0);\n };\n\n // upgrade polyfill to use postMessage\n if(typeof window !== 'undefined' &&\n typeof window.postMessage === 'function') {\n var msg = 'forge.setImmediate';\n var callbacks = [];\n util.setImmediate = function(callback) {\n callbacks.push(callback);\n // only send message when one hasn't been sent in\n // the current turn of the event loop\n if(callbacks.length === 1) {\n window.postMessage(msg, '*');\n }\n };\n function handler(event) {\n if(event.source === window && event.data === msg) {\n event.stopPropagation();\n var copy = callbacks.slice();\n callbacks.length = 0;\n copy.forEach(function(callback) {\n callback();\n });\n }\n }\n window.addEventListener('message', handler, true);\n }\n\n // upgrade polyfill to use MutationObserver\n if(typeof MutationObserver !== 'undefined') {\n // polyfill with MutationObserver\n var now = Date.now();\n var attr = true;\n var div = document.createElement('div');\n var callbacks = [];\n new MutationObserver(function() {\n var copy = callbacks.slice();\n callbacks.length = 0;\n copy.forEach(function(callback) {\n callback();\n });\n }).observe(div, {attributes: true});\n var oldSetImmediate = util.setImmediate;\n util.setImmediate = function(callback) {\n if(Date.now() - now > 15) {\n now = Date.now();\n oldSetImmediate(callback);\n } else {\n callbacks.push(callback);\n // only trigger observer when it hasn't been triggered in\n // the current turn of the event loop\n if(callbacks.length === 1) {\n div.setAttribute('a', attr = !attr);\n }\n }\n };\n }\n\n util.nextTick = util.setImmediate;\n})();\n\n// check if running under Node.js\nutil.isNodejs =\n typeof process !== 'undefined' && process.versions && process.versions.node;\n\n\n// 'self' will also work in Web Workers (instance of WorkerGlobalScope) while\n// it will point to `window` in the main thread.\n// To remain compatible with older browsers, we fall back to 'window' if 'self'\n// is not available.\nutil.globalScope = (function() {\n if(util.isNodejs) {\n return global;\n }\n\n return typeof self === 'undefined' ? window : self;\n})();\n\n// define isArray\nutil.isArray = Array.isArray || function(x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n};\n\n// define isArrayBuffer\nutil.isArrayBuffer = function(x) {\n return typeof ArrayBuffer !== 'undefined' && x instanceof ArrayBuffer;\n};\n\n// define isArrayBufferView\nutil.isArrayBufferView = function(x) {\n return x && util.isArrayBuffer(x.buffer) && x.byteLength !== undefined;\n};\n\n/**\n * Ensure a bits param is 8, 16, 24, or 32. Used to validate input for\n * algorithms where bit manipulation, JavaScript limitations, and/or algorithm\n * design only allow for byte operations of a limited size.\n *\n * @param n number of bits.\n *\n * Throw Error if n invalid.\n */\nfunction _checkBitsParam(n) {\n if(!(n === 8 || n === 16 || n === 24 || n === 32)) {\n throw new Error('Only 8, 16, 24, or 32 bits supported: ' + n);\n }\n}\n\n// TODO: set ByteBuffer to best available backing\nutil.ByteBuffer = ByteStringBuffer;\n\n/** Buffer w/BinaryString backing */\n\n/**\n * Constructor for a binary string backed byte buffer.\n *\n * @param [b] the bytes to wrap (either encoded as string, one byte per\n * character, or as an ArrayBuffer or Typed Array).\n */\nfunction ByteStringBuffer(b) {\n // TODO: update to match DataBuffer API\n\n // the data in this buffer\n this.data = '';\n // the pointer for reading from this buffer\n this.read = 0;\n\n if(typeof b === 'string') {\n this.data = b;\n } else if(util.isArrayBuffer(b) || util.isArrayBufferView(b)) {\n if(typeof Buffer !== 'undefined' && b instanceof Buffer) {\n this.data = b.toString('binary');\n } else {\n // convert native buffer to forge buffer\n // FIXME: support native buffers internally instead\n var arr = new Uint8Array(b);\n try {\n this.data = String.fromCharCode.apply(null, arr);\n } catch(e) {\n for(var i = 0; i < arr.length; ++i) {\n this.putByte(arr[i]);\n }\n }\n }\n } else if(b instanceof ByteStringBuffer ||\n (typeof b === 'object' && typeof b.data === 'string' &&\n typeof b.read === 'number')) {\n // copy existing buffer\n this.data = b.data;\n this.read = b.read;\n }\n\n // used for v8 optimization\n this._constructedStringLength = 0;\n}\nutil.ByteStringBuffer = ByteStringBuffer;\n\n/* Note: This is an optimization for V8-based browsers. When V8 concatenates\n a string, the strings are only joined logically using a \"cons string\" or\n \"constructed/concatenated string\". These containers keep references to one\n another and can result in very large memory usage. For example, if a 2MB\n string is constructed by concatenating 4 bytes together at a time, the\n memory usage will be ~44MB; so ~22x increase. The strings are only joined\n together when an operation requiring their joining takes place, such as\n substr(). This function is called when adding data to this buffer to ensure\n these types of strings are periodically joined to reduce the memory\n footprint. */\nvar _MAX_CONSTRUCTED_STRING_LENGTH = 4096;\nutil.ByteStringBuffer.prototype._optimizeConstructedString = function(x) {\n this._constructedStringLength += x;\n if(this._constructedStringLength > _MAX_CONSTRUCTED_STRING_LENGTH) {\n // this substr() should cause the constructed string to join\n this.data.substr(0, 1);\n this._constructedStringLength = 0;\n }\n};\n\n/**\n * Gets the number of bytes in this buffer.\n *\n * @return the number of bytes in this buffer.\n */\nutil.ByteStringBuffer.prototype.length = function() {\n return this.data.length - this.read;\n};\n\n/**\n * Gets whether or not this buffer is empty.\n *\n * @return true if this buffer is empty, false if not.\n */\nutil.ByteStringBuffer.prototype.isEmpty = function() {\n return this.length() <= 0;\n};\n\n/**\n * Puts a byte in this buffer.\n *\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putByte = function(b) {\n return this.putBytes(String.fromCharCode(b));\n};\n\n/**\n * Puts a byte in this buffer N times.\n *\n * @param b the byte to put.\n * @param n the number of bytes of value b to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.fillWithByte = function(b, n) {\n b = String.fromCharCode(b);\n var d = this.data;\n while(n > 0) {\n if(n & 1) {\n d += b;\n }\n n >>>= 1;\n if(n > 0) {\n b += b;\n }\n }\n this.data = d;\n this._optimizeConstructedString(n);\n return this;\n};\n\n/**\n * Puts bytes in this buffer.\n *\n * @param bytes the bytes (as a binary encoded string) to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putBytes = function(bytes) {\n this.data += bytes;\n this._optimizeConstructedString(bytes.length);\n return this;\n};\n\n/**\n * Puts a UTF-16 encoded string into this buffer.\n *\n * @param str the string to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putString = function(str) {\n return this.putBytes(util.encodeUtf8(str));\n};\n\n/**\n * Puts a 16-bit integer in this buffer in big-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt16 = function(i) {\n return this.putBytes(\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n/**\n * Puts a 24-bit integer in this buffer in big-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt24 = function(i) {\n return this.putBytes(\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n/**\n * Puts a 32-bit integer in this buffer in big-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt32 = function(i) {\n return this.putBytes(\n String.fromCharCode(i >> 24 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n/**\n * Puts a 16-bit integer in this buffer in little-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt16Le = function(i) {\n return this.putBytes(\n String.fromCharCode(i & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF));\n};\n\n/**\n * Puts a 24-bit integer in this buffer in little-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt24Le = function(i) {\n return this.putBytes(\n String.fromCharCode(i & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF));\n};\n\n/**\n * Puts a 32-bit integer in this buffer in little-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt32Le = function(i) {\n return this.putBytes(\n String.fromCharCode(i & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 24 & 0xFF));\n};\n\n/**\n * Puts an n-bit integer in this buffer in big-endian order.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt = function(i, n) {\n _checkBitsParam(n);\n var bytes = '';\n do {\n n -= 8;\n bytes += String.fromCharCode((i >> n) & 0xFF);\n } while(n > 0);\n return this.putBytes(bytes);\n};\n\n/**\n * Puts a signed n-bit integer in this buffer in big-endian order. Two's\n * complement representation is used.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putSignedInt = function(i, n) {\n // putInt checks n\n if(i < 0) {\n i += 2 << (n - 1);\n }\n return this.putInt(i, n);\n};\n\n/**\n * Puts the given buffer into this buffer.\n *\n * @param buffer the buffer to put into this one.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putBuffer = function(buffer) {\n return this.putBytes(buffer.getBytes());\n};\n\n/**\n * Gets a byte from this buffer and advances the read pointer by 1.\n *\n * @return the byte.\n */\nutil.ByteStringBuffer.prototype.getByte = function() {\n return this.data.charCodeAt(this.read++);\n};\n\n/**\n * Gets a uint16 from this buffer in big-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.ByteStringBuffer.prototype.getInt16 = function() {\n var rval = (\n this.data.charCodeAt(this.read) << 8 ^\n this.data.charCodeAt(this.read + 1));\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in big-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.ByteStringBuffer.prototype.getInt24 = function() {\n var rval = (\n this.data.charCodeAt(this.read) << 16 ^\n this.data.charCodeAt(this.read + 1) << 8 ^\n this.data.charCodeAt(this.read + 2));\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in big-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.ByteStringBuffer.prototype.getInt32 = function() {\n var rval = (\n this.data.charCodeAt(this.read) << 24 ^\n this.data.charCodeAt(this.read + 1) << 16 ^\n this.data.charCodeAt(this.read + 2) << 8 ^\n this.data.charCodeAt(this.read + 3));\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets a uint16 from this buffer in little-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.ByteStringBuffer.prototype.getInt16Le = function() {\n var rval = (\n this.data.charCodeAt(this.read) ^\n this.data.charCodeAt(this.read + 1) << 8);\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in little-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.ByteStringBuffer.prototype.getInt24Le = function() {\n var rval = (\n this.data.charCodeAt(this.read) ^\n this.data.charCodeAt(this.read + 1) << 8 ^\n this.data.charCodeAt(this.read + 2) << 16);\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in little-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.ByteStringBuffer.prototype.getInt32Le = function() {\n var rval = (\n this.data.charCodeAt(this.read) ^\n this.data.charCodeAt(this.read + 1) << 8 ^\n this.data.charCodeAt(this.read + 2) << 16 ^\n this.data.charCodeAt(this.read + 3) << 24);\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets an n-bit integer from this buffer in big-endian order and advances the\n * read pointer by ceil(n/8).\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.ByteStringBuffer.prototype.getInt = function(n) {\n _checkBitsParam(n);\n var rval = 0;\n do {\n // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits.\n rval = (rval << 8) + this.data.charCodeAt(this.read++);\n n -= 8;\n } while(n > 0);\n return rval;\n};\n\n/**\n * Gets a signed n-bit integer from this buffer in big-endian order, using\n * two's complement, and advances the read pointer by n/8.\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.ByteStringBuffer.prototype.getSignedInt = function(n) {\n // getInt checks n\n var x = this.getInt(n);\n var max = 2 << (n - 2);\n if(x >= max) {\n x -= max << 1;\n }\n return x;\n};\n\n/**\n * Reads bytes out as a binary encoded string and clears them from the\n * buffer. Note that the resulting string is binary encoded (in node.js this\n * encoding is referred to as `binary`, it is *not* `utf8`).\n *\n * @param count the number of bytes to read, undefined or null for all.\n *\n * @return a binary encoded string of bytes.\n */\nutil.ByteStringBuffer.prototype.getBytes = function(count) {\n var rval;\n if(count) {\n // read count bytes\n count = Math.min(this.length(), count);\n rval = this.data.slice(this.read, this.read + count);\n this.read += count;\n } else if(count === 0) {\n rval = '';\n } else {\n // read all bytes, optimize to only copy when needed\n rval = (this.read === 0) ? this.data : this.data.slice(this.read);\n this.clear();\n }\n return rval;\n};\n\n/**\n * Gets a binary encoded string of the bytes from this buffer without\n * modifying the read pointer.\n *\n * @param count the number of bytes to get, omit to get all.\n *\n * @return a string full of binary encoded characters.\n */\nutil.ByteStringBuffer.prototype.bytes = function(count) {\n return (typeof(count) === 'undefined' ?\n this.data.slice(this.read) :\n this.data.slice(this.read, this.read + count));\n};\n\n/**\n * Gets a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n *\n * @return the byte.\n */\nutil.ByteStringBuffer.prototype.at = function(i) {\n return this.data.charCodeAt(this.read + i);\n};\n\n/**\n * Puts a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.setAt = function(i, b) {\n this.data = this.data.substr(0, this.read + i) +\n String.fromCharCode(b) +\n this.data.substr(this.read + i + 1);\n return this;\n};\n\n/**\n * Gets the last byte without modifying the read pointer.\n *\n * @return the last byte.\n */\nutil.ByteStringBuffer.prototype.last = function() {\n return this.data.charCodeAt(this.data.length - 1);\n};\n\n/**\n * Creates a copy of this buffer.\n *\n * @return the copy.\n */\nutil.ByteStringBuffer.prototype.copy = function() {\n var c = util.createBuffer(this.data);\n c.read = this.read;\n return c;\n};\n\n/**\n * Compacts this buffer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.compact = function() {\n if(this.read > 0) {\n this.data = this.data.slice(this.read);\n this.read = 0;\n }\n return this;\n};\n\n/**\n * Clears this buffer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.clear = function() {\n this.data = '';\n this.read = 0;\n return this;\n};\n\n/**\n * Shortens this buffer by triming bytes off of the end of this buffer.\n *\n * @param count the number of bytes to trim off.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.truncate = function(count) {\n var len = Math.max(0, this.length() - count);\n this.data = this.data.substr(this.read, len);\n this.read = 0;\n return this;\n};\n\n/**\n * Converts this buffer to a hexadecimal string.\n *\n * @return a hexadecimal string.\n */\nutil.ByteStringBuffer.prototype.toHex = function() {\n var rval = '';\n for(var i = this.read; i < this.data.length; ++i) {\n var b = this.data.charCodeAt(i);\n if(b < 16) {\n rval += '0';\n }\n rval += b.toString(16);\n }\n return rval;\n};\n\n/**\n * Converts this buffer to a UTF-16 string (standard JavaScript string).\n *\n * @return a UTF-16 string.\n */\nutil.ByteStringBuffer.prototype.toString = function() {\n return util.decodeUtf8(this.bytes());\n};\n\n/** End Buffer w/BinaryString backing */\n\n/** Buffer w/UInt8Array backing */\n\n/**\n * FIXME: Experimental. Do not use yet.\n *\n * Constructor for an ArrayBuffer-backed byte buffer.\n *\n * The buffer may be constructed from a string, an ArrayBuffer, DataView, or a\n * TypedArray.\n *\n * If a string is given, its encoding should be provided as an option,\n * otherwise it will default to 'binary'. A 'binary' string is encoded such\n * that each character is one byte in length and size.\n *\n * If an ArrayBuffer, DataView, or TypedArray is given, it will be used\n * *directly* without any copying. Note that, if a write to the buffer requires\n * more space, the buffer will allocate a new backing ArrayBuffer to\n * accommodate. The starting read and write offsets for the buffer may be\n * given as options.\n *\n * @param [b] the initial bytes for this buffer.\n * @param options the options to use:\n * [readOffset] the starting read offset to use (default: 0).\n * [writeOffset] the starting write offset to use (default: the\n * length of the first parameter).\n * [growSize] the minimum amount, in bytes, to grow the buffer by to\n * accommodate writes (default: 1024).\n * [encoding] the encoding ('binary', 'utf8', 'utf16', 'hex') for the\n * first parameter, if it is a string (default: 'binary').\n */\nfunction DataBuffer(b, options) {\n // default options\n options = options || {};\n\n // pointers for read from/write to buffer\n this.read = options.readOffset || 0;\n this.growSize = options.growSize || 1024;\n\n var isArrayBuffer = util.isArrayBuffer(b);\n var isArrayBufferView = util.isArrayBufferView(b);\n if(isArrayBuffer || isArrayBufferView) {\n // use ArrayBuffer directly\n if(isArrayBuffer) {\n this.data = new DataView(b);\n } else {\n // TODO: adjust read/write offset based on the type of view\n // or specify that this must be done in the options ... that the\n // offsets are byte-based\n this.data = new DataView(b.buffer, b.byteOffset, b.byteLength);\n }\n this.write = ('writeOffset' in options ?\n options.writeOffset : this.data.byteLength);\n return;\n }\n\n // initialize to empty array buffer and add any given bytes using putBytes\n this.data = new DataView(new ArrayBuffer(0));\n this.write = 0;\n\n if(b !== null && b !== undefined) {\n this.putBytes(b);\n }\n\n if('writeOffset' in options) {\n this.write = options.writeOffset;\n }\n}\nutil.DataBuffer = DataBuffer;\n\n/**\n * Gets the number of bytes in this buffer.\n *\n * @return the number of bytes in this buffer.\n */\nutil.DataBuffer.prototype.length = function() {\n return this.write - this.read;\n};\n\n/**\n * Gets whether or not this buffer is empty.\n *\n * @return true if this buffer is empty, false if not.\n */\nutil.DataBuffer.prototype.isEmpty = function() {\n return this.length() <= 0;\n};\n\n/**\n * Ensures this buffer has enough empty space to accommodate the given number\n * of bytes. An optional parameter may be given that indicates a minimum\n * amount to grow the buffer if necessary. If the parameter is not given,\n * the buffer will be grown by some previously-specified default amount\n * or heuristic.\n *\n * @param amount the number of bytes to accommodate.\n * @param [growSize] the minimum amount, in bytes, to grow the buffer by if\n * necessary.\n */\nutil.DataBuffer.prototype.accommodate = function(amount, growSize) {\n if(this.length() >= amount) {\n return this;\n }\n growSize = Math.max(growSize || this.growSize, amount);\n\n // grow buffer\n var src = new Uint8Array(\n this.data.buffer, this.data.byteOffset, this.data.byteLength);\n var dst = new Uint8Array(this.length() + growSize);\n dst.set(src);\n this.data = new DataView(dst.buffer);\n\n return this;\n};\n\n/**\n * Puts a byte in this buffer.\n *\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putByte = function(b) {\n this.accommodate(1);\n this.data.setUint8(this.write++, b);\n return this;\n};\n\n/**\n * Puts a byte in this buffer N times.\n *\n * @param b the byte to put.\n * @param n the number of bytes of value b to put.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.fillWithByte = function(b, n) {\n this.accommodate(n);\n for(var i = 0; i < n; ++i) {\n this.data.setUint8(b);\n }\n return this;\n};\n\n/**\n * Puts bytes in this buffer. The bytes may be given as a string, an\n * ArrayBuffer, a DataView, or a TypedArray.\n *\n * @param bytes the bytes to put.\n * @param [encoding] the encoding for the first parameter ('binary', 'utf8',\n * 'utf16', 'hex'), if it is a string (default: 'binary').\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putBytes = function(bytes, encoding) {\n if(util.isArrayBufferView(bytes)) {\n var src = new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength);\n var len = src.byteLength - src.byteOffset;\n this.accommodate(len);\n var dst = new Uint8Array(this.data.buffer, this.write);\n dst.set(src);\n this.write += len;\n return this;\n }\n\n if(util.isArrayBuffer(bytes)) {\n var src = new Uint8Array(bytes);\n this.accommodate(src.byteLength);\n var dst = new Uint8Array(this.data.buffer);\n dst.set(src, this.write);\n this.write += src.byteLength;\n return this;\n }\n\n // bytes is a util.DataBuffer or equivalent\n if(bytes instanceof util.DataBuffer ||\n (typeof bytes === 'object' &&\n typeof bytes.read === 'number' && typeof bytes.write === 'number' &&\n util.isArrayBufferView(bytes.data))) {\n var src = new Uint8Array(bytes.data.byteLength, bytes.read, bytes.length());\n this.accommodate(src.byteLength);\n var dst = new Uint8Array(bytes.data.byteLength, this.write);\n dst.set(src);\n this.write += src.byteLength;\n return this;\n }\n\n if(bytes instanceof util.ByteStringBuffer) {\n // copy binary string and process as the same as a string parameter below\n bytes = bytes.data;\n encoding = 'binary';\n }\n\n // string conversion\n encoding = encoding || 'binary';\n if(typeof bytes === 'string') {\n var view;\n\n // decode from string\n if(encoding === 'hex') {\n this.accommodate(Math.ceil(bytes.length / 2));\n view = new Uint8Array(this.data.buffer, this.write);\n this.write += util.binary.hex.decode(bytes, view, this.write);\n return this;\n }\n if(encoding === 'base64') {\n this.accommodate(Math.ceil(bytes.length / 4) * 3);\n view = new Uint8Array(this.data.buffer, this.write);\n this.write += util.binary.base64.decode(bytes, view, this.write);\n return this;\n }\n\n // encode text as UTF-8 bytes\n if(encoding === 'utf8') {\n // encode as UTF-8 then decode string as raw binary\n bytes = util.encodeUtf8(bytes);\n encoding = 'binary';\n }\n\n // decode string as raw binary\n if(encoding === 'binary' || encoding === 'raw') {\n // one byte per character\n this.accommodate(bytes.length);\n view = new Uint8Array(this.data.buffer, this.write);\n this.write += util.binary.raw.decode(view);\n return this;\n }\n\n // encode text as UTF-16 bytes\n if(encoding === 'utf16') {\n // two bytes per character\n this.accommodate(bytes.length * 2);\n view = new Uint16Array(this.data.buffer, this.write);\n this.write += util.text.utf16.encode(view);\n return this;\n }\n\n throw new Error('Invalid encoding: ' + encoding);\n }\n\n throw Error('Invalid parameter: ' + bytes);\n};\n\n/**\n * Puts the given buffer into this buffer.\n *\n * @param buffer the buffer to put into this one.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putBuffer = function(buffer) {\n this.putBytes(buffer);\n buffer.clear();\n return this;\n};\n\n/**\n * Puts a string into this buffer.\n *\n * @param str the string to put.\n * @param [encoding] the encoding for the string (default: 'utf16').\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putString = function(str) {\n return this.putBytes(str, 'utf16');\n};\n\n/**\n * Puts a 16-bit integer in this buffer in big-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt16 = function(i) {\n this.accommodate(2);\n this.data.setInt16(this.write, i);\n this.write += 2;\n return this;\n};\n\n/**\n * Puts a 24-bit integer in this buffer in big-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt24 = function(i) {\n this.accommodate(3);\n this.data.setInt16(this.write, i >> 8 & 0xFFFF);\n this.data.setInt8(this.write, i >> 16 & 0xFF);\n this.write += 3;\n return this;\n};\n\n/**\n * Puts a 32-bit integer in this buffer in big-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt32 = function(i) {\n this.accommodate(4);\n this.data.setInt32(this.write, i);\n this.write += 4;\n return this;\n};\n\n/**\n * Puts a 16-bit integer in this buffer in little-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt16Le = function(i) {\n this.accommodate(2);\n this.data.setInt16(this.write, i, true);\n this.write += 2;\n return this;\n};\n\n/**\n * Puts a 24-bit integer in this buffer in little-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt24Le = function(i) {\n this.accommodate(3);\n this.data.setInt8(this.write, i >> 16 & 0xFF);\n this.data.setInt16(this.write, i >> 8 & 0xFFFF, true);\n this.write += 3;\n return this;\n};\n\n/**\n * Puts a 32-bit integer in this buffer in little-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt32Le = function(i) {\n this.accommodate(4);\n this.data.setInt32(this.write, i, true);\n this.write += 4;\n return this;\n};\n\n/**\n * Puts an n-bit integer in this buffer in big-endian order.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt = function(i, n) {\n _checkBitsParam(n);\n this.accommodate(n / 8);\n do {\n n -= 8;\n this.data.setInt8(this.write++, (i >> n) & 0xFF);\n } while(n > 0);\n return this;\n};\n\n/**\n * Puts a signed n-bit integer in this buffer in big-endian order. Two's\n * complement representation is used.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putSignedInt = function(i, n) {\n _checkBitsParam(n);\n this.accommodate(n / 8);\n if(i < 0) {\n i += 2 << (n - 1);\n }\n return this.putInt(i, n);\n};\n\n/**\n * Gets a byte from this buffer and advances the read pointer by 1.\n *\n * @return the byte.\n */\nutil.DataBuffer.prototype.getByte = function() {\n return this.data.getInt8(this.read++);\n};\n\n/**\n * Gets a uint16 from this buffer in big-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.DataBuffer.prototype.getInt16 = function() {\n var rval = this.data.getInt16(this.read);\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in big-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.DataBuffer.prototype.getInt24 = function() {\n var rval = (\n this.data.getInt16(this.read) << 8 ^\n this.data.getInt8(this.read + 2));\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in big-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.DataBuffer.prototype.getInt32 = function() {\n var rval = this.data.getInt32(this.read);\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets a uint16 from this buffer in little-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.DataBuffer.prototype.getInt16Le = function() {\n var rval = this.data.getInt16(this.read, true);\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in little-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.DataBuffer.prototype.getInt24Le = function() {\n var rval = (\n this.data.getInt8(this.read) ^\n this.data.getInt16(this.read + 1, true) << 8);\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in little-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.DataBuffer.prototype.getInt32Le = function() {\n var rval = this.data.getInt32(this.read, true);\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets an n-bit integer from this buffer in big-endian order and advances the\n * read pointer by n/8.\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.DataBuffer.prototype.getInt = function(n) {\n _checkBitsParam(n);\n var rval = 0;\n do {\n // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits.\n rval = (rval << 8) + this.data.getInt8(this.read++);\n n -= 8;\n } while(n > 0);\n return rval;\n};\n\n/**\n * Gets a signed n-bit integer from this buffer in big-endian order, using\n * two's complement, and advances the read pointer by n/8.\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.DataBuffer.prototype.getSignedInt = function(n) {\n // getInt checks n\n var x = this.getInt(n);\n var max = 2 << (n - 2);\n if(x >= max) {\n x -= max << 1;\n }\n return x;\n};\n\n/**\n * Reads bytes out as a binary encoded string and clears them from the\n * buffer.\n *\n * @param count the number of bytes to read, undefined or null for all.\n *\n * @return a binary encoded string of bytes.\n */\nutil.DataBuffer.prototype.getBytes = function(count) {\n // TODO: deprecate this method, it is poorly named and\n // this.toString('binary') replaces it\n // add a toTypedArray()/toArrayBuffer() function\n var rval;\n if(count) {\n // read count bytes\n count = Math.min(this.length(), count);\n rval = this.data.slice(this.read, this.read + count);\n this.read += count;\n } else if(count === 0) {\n rval = '';\n } else {\n // read all bytes, optimize to only copy when needed\n rval = (this.read === 0) ? this.data : this.data.slice(this.read);\n this.clear();\n }\n return rval;\n};\n\n/**\n * Gets a binary encoded string of the bytes from this buffer without\n * modifying the read pointer.\n *\n * @param count the number of bytes to get, omit to get all.\n *\n * @return a string full of binary encoded characters.\n */\nutil.DataBuffer.prototype.bytes = function(count) {\n // TODO: deprecate this method, it is poorly named, add \"getString()\"\n return (typeof(count) === 'undefined' ?\n this.data.slice(this.read) :\n this.data.slice(this.read, this.read + count));\n};\n\n/**\n * Gets a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n *\n * @return the byte.\n */\nutil.DataBuffer.prototype.at = function(i) {\n return this.data.getUint8(this.read + i);\n};\n\n/**\n * Puts a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.setAt = function(i, b) {\n this.data.setUint8(i, b);\n return this;\n};\n\n/**\n * Gets the last byte without modifying the read pointer.\n *\n * @return the last byte.\n */\nutil.DataBuffer.prototype.last = function() {\n return this.data.getUint8(this.write - 1);\n};\n\n/**\n * Creates a copy of this buffer.\n *\n * @return the copy.\n */\nutil.DataBuffer.prototype.copy = function() {\n return new util.DataBuffer(this);\n};\n\n/**\n * Compacts this buffer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.compact = function() {\n if(this.read > 0) {\n var src = new Uint8Array(this.data.buffer, this.read);\n var dst = new Uint8Array(src.byteLength);\n dst.set(src);\n this.data = new DataView(dst);\n this.write -= this.read;\n this.read = 0;\n }\n return this;\n};\n\n/**\n * Clears this buffer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.clear = function() {\n this.data = new DataView(new ArrayBuffer(0));\n this.read = this.write = 0;\n return this;\n};\n\n/**\n * Shortens this buffer by triming bytes off of the end of this buffer.\n *\n * @param count the number of bytes to trim off.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.truncate = function(count) {\n this.write = Math.max(0, this.length() - count);\n this.read = Math.min(this.read, this.write);\n return this;\n};\n\n/**\n * Converts this buffer to a hexadecimal string.\n *\n * @return a hexadecimal string.\n */\nutil.DataBuffer.prototype.toHex = function() {\n var rval = '';\n for(var i = this.read; i < this.data.byteLength; ++i) {\n var b = this.data.getUint8(i);\n if(b < 16) {\n rval += '0';\n }\n rval += b.toString(16);\n }\n return rval;\n};\n\n/**\n * Converts this buffer to a string, using the given encoding. If no\n * encoding is given, 'utf8' (UTF-8) is used.\n *\n * @param [encoding] the encoding to use: 'binary', 'utf8', 'utf16', 'hex',\n * 'base64' (default: 'utf8').\n *\n * @return a string representation of the bytes in this buffer.\n */\nutil.DataBuffer.prototype.toString = function(encoding) {\n var view = new Uint8Array(this.data, this.read, this.length());\n encoding = encoding || 'utf8';\n\n // encode to string\n if(encoding === 'binary' || encoding === 'raw') {\n return util.binary.raw.encode(view);\n }\n if(encoding === 'hex') {\n return util.binary.hex.encode(view);\n }\n if(encoding === 'base64') {\n return util.binary.base64.encode(view);\n }\n\n // decode to text\n if(encoding === 'utf8') {\n return util.text.utf8.decode(view);\n }\n if(encoding === 'utf16') {\n return util.text.utf16.decode(view);\n }\n\n throw new Error('Invalid encoding: ' + encoding);\n};\n\n/** End Buffer w/UInt8Array backing */\n\n/**\n * Creates a buffer that stores bytes. A value may be given to populate the\n * buffer with data. This value can either be string of encoded bytes or a\n * regular string of characters. When passing a string of binary encoded\n * bytes, the encoding `raw` should be given. This is also the default. When\n * passing a string of characters, the encoding `utf8` should be given.\n *\n * @param [input] a string with encoded bytes to store in the buffer.\n * @param [encoding] (default: 'raw', other: 'utf8').\n */\nutil.createBuffer = function(input, encoding) {\n // TODO: deprecate, use new ByteBuffer() instead\n encoding = encoding || 'raw';\n if(input !== undefined && encoding === 'utf8') {\n input = util.encodeUtf8(input);\n }\n return new util.ByteBuffer(input);\n};\n\n/**\n * Fills a string with a particular value. If you want the string to be a byte\n * string, pass in String.fromCharCode(theByte).\n *\n * @param c the character to fill the string with, use String.fromCharCode\n * to fill the string with a byte value.\n * @param n the number of characters of value c to fill with.\n *\n * @return the filled string.\n */\nutil.fillString = function(c, n) {\n var s = '';\n while(n > 0) {\n if(n & 1) {\n s += c;\n }\n n >>>= 1;\n if(n > 0) {\n c += c;\n }\n }\n return s;\n};\n\n/**\n * Performs a per byte XOR between two byte strings and returns the result as a\n * string of bytes.\n *\n * @param s1 first string of bytes.\n * @param s2 second string of bytes.\n * @param n the number of bytes to XOR.\n *\n * @return the XOR'd result.\n */\nutil.xorBytes = function(s1, s2, n) {\n var s3 = '';\n var b = '';\n var t = '';\n var i = 0;\n var c = 0;\n for(; n > 0; --n, ++i) {\n b = s1.charCodeAt(i) ^ s2.charCodeAt(i);\n if(c >= 10) {\n s3 += t;\n t = '';\n c = 0;\n }\n t += String.fromCharCode(b);\n ++c;\n }\n s3 += t;\n return s3;\n};\n\n/**\n * Converts a hex string into a 'binary' encoded string of bytes.\n *\n * @param hex the hexadecimal string to convert.\n *\n * @return the binary-encoded string of bytes.\n */\nutil.hexToBytes = function(hex) {\n // TODO: deprecate: \"Deprecated. Use util.binary.hex.decode instead.\"\n var rval = '';\n var i = 0;\n if(hex.length & 1 == 1) {\n // odd number of characters, convert first character alone\n i = 1;\n rval += String.fromCharCode(parseInt(hex[0], 16));\n }\n // convert 2 characters (1 byte) at a time\n for(; i < hex.length; i += 2) {\n rval += String.fromCharCode(parseInt(hex.substr(i, 2), 16));\n }\n return rval;\n};\n\n/**\n * Converts a 'binary' encoded string of bytes to hex.\n *\n * @param bytes the byte string to convert.\n *\n * @return the string of hexadecimal characters.\n */\nutil.bytesToHex = function(bytes) {\n // TODO: deprecate: \"Deprecated. Use util.binary.hex.encode instead.\"\n return util.createBuffer(bytes).toHex();\n};\n\n/**\n * Converts an 32-bit integer to 4-big-endian byte string.\n *\n * @param i the integer.\n *\n * @return the byte string.\n */\nutil.int32ToBytes = function(i) {\n return (\n String.fromCharCode(i >> 24 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n// base64 characters, reverse mapping\nvar _base64 =\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';\nvar _base64Idx = [\n/*43 -43 = 0*/\n/*'+', 1, 2, 3,'/' */\n 62, -1, -1, -1, 63,\n\n/*'0','1','2','3','4','5','6','7','8','9' */\n 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,\n\n/*15, 16, 17,'=', 19, 20, 21 */\n -1, -1, -1, 64, -1, -1, -1,\n\n/*65 - 43 = 22*/\n/*'A','B','C','D','E','F','G','H','I','J','K','L','M', */\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,\n\n/*'N','O','P','Q','R','S','T','U','V','W','X','Y','Z' */\n 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,\n\n/*91 - 43 = 48 */\n/*48, 49, 50, 51, 52, 53 */\n -1, -1, -1, -1, -1, -1,\n\n/*97 - 43 = 54*/\n/*'a','b','c','d','e','f','g','h','i','j','k','l','m' */\n 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,\n\n/*'n','o','p','q','r','s','t','u','v','w','x','y','z' */\n 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51\n];\n\n// base58 characters (Bitcoin alphabet)\nvar _base58 = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz';\n\n/**\n * Base64 encodes a 'binary' encoded string of bytes.\n *\n * @param input the binary encoded string of bytes to base64-encode.\n * @param maxline the maximum number of encoded characters per line to use,\n * defaults to none.\n *\n * @return the base64-encoded output.\n */\nutil.encode64 = function(input, maxline) {\n // TODO: deprecate: \"Deprecated. Use util.binary.base64.encode instead.\"\n var line = '';\n var output = '';\n var chr1, chr2, chr3;\n var i = 0;\n while(i < input.length) {\n chr1 = input.charCodeAt(i++);\n chr2 = input.charCodeAt(i++);\n chr3 = input.charCodeAt(i++);\n\n // encode 4 character group\n line += _base64.charAt(chr1 >> 2);\n line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4));\n if(isNaN(chr2)) {\n line += '==';\n } else {\n line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6));\n line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63);\n }\n\n if(maxline && line.length > maxline) {\n output += line.substr(0, maxline) + '\\r\\n';\n line = line.substr(maxline);\n }\n }\n output += line;\n return output;\n};\n\n/**\n * Base64 decodes a string into a 'binary' encoded string of bytes.\n *\n * @param input the base64-encoded input.\n *\n * @return the binary encoded string.\n */\nutil.decode64 = function(input) {\n // TODO: deprecate: \"Deprecated. Use util.binary.base64.decode instead.\"\n\n // remove all non-base64 characters\n input = input.replace(/[^A-Za-z0-9\\+\\/\\=]/g, '');\n\n var output = '';\n var enc1, enc2, enc3, enc4;\n var i = 0;\n\n while(i < input.length) {\n enc1 = _base64Idx[input.charCodeAt(i++) - 43];\n enc2 = _base64Idx[input.charCodeAt(i++) - 43];\n enc3 = _base64Idx[input.charCodeAt(i++) - 43];\n enc4 = _base64Idx[input.charCodeAt(i++) - 43];\n\n output += String.fromCharCode((enc1 << 2) | (enc2 >> 4));\n if(enc3 !== 64) {\n // decoded at least 2 bytes\n output += String.fromCharCode(((enc2 & 15) << 4) | (enc3 >> 2));\n if(enc4 !== 64) {\n // decoded 3 bytes\n output += String.fromCharCode(((enc3 & 3) << 6) | enc4);\n }\n }\n }\n\n return output;\n};\n\n/**\n * Encodes the given string of characters (a standard JavaScript\n * string) as a binary encoded string where the bytes represent\n * a UTF-8 encoded string of characters. Non-ASCII characters will be\n * encoded as multiple bytes according to UTF-8.\n *\n * @param str a standard string of characters to encode.\n *\n * @return the binary encoded string.\n */\nutil.encodeUtf8 = function(str) {\n return unescape(encodeURIComponent(str));\n};\n\n/**\n * Decodes a binary encoded string that contains bytes that\n * represent a UTF-8 encoded string of characters -- into a\n * string of characters (a standard JavaScript string).\n *\n * @param str the binary encoded string to decode.\n *\n * @return the resulting standard string of characters.\n */\nutil.decodeUtf8 = function(str) {\n return decodeURIComponent(escape(str));\n};\n\n// binary encoding/decoding tools\n// FIXME: Experimental. Do not use yet.\nutil.binary = {\n raw: {},\n hex: {},\n base64: {},\n base58: {},\n baseN : {\n encode: baseN.encode,\n decode: baseN.decode\n }\n};\n\n/**\n * Encodes a Uint8Array as a binary-encoded string. This encoding uses\n * a value between 0 and 255 for each character.\n *\n * @param bytes the Uint8Array to encode.\n *\n * @return the binary-encoded string.\n */\nutil.binary.raw.encode = function(bytes) {\n return String.fromCharCode.apply(null, bytes);\n};\n\n/**\n * Decodes a binary-encoded string to a Uint8Array. This encoding uses\n * a value between 0 and 255 for each character.\n *\n * @param str the binary-encoded string to decode.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.binary.raw.decode = function(str, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(str.length);\n }\n offset = offset || 0;\n var j = offset;\n for(var i = 0; i < str.length; ++i) {\n out[j++] = str.charCodeAt(i);\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Encodes a 'binary' string, ArrayBuffer, DataView, TypedArray, or\n * ByteBuffer as a string of hexadecimal characters.\n *\n * @param bytes the bytes to convert.\n *\n * @return the string of hexadecimal characters.\n */\nutil.binary.hex.encode = util.bytesToHex;\n\n/**\n * Decodes a hex-encoded string to a Uint8Array.\n *\n * @param hex the hexadecimal string to convert.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.binary.hex.decode = function(hex, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(Math.ceil(hex.length / 2));\n }\n offset = offset || 0;\n var i = 0, j = offset;\n if(hex.length & 1) {\n // odd number of characters, convert first character alone\n i = 1;\n out[j++] = parseInt(hex[0], 16);\n }\n // convert 2 characters (1 byte) at a time\n for(; i < hex.length; i += 2) {\n out[j++] = parseInt(hex.substr(i, 2), 16);\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Base64-encodes a Uint8Array.\n *\n * @param input the Uint8Array to encode.\n * @param maxline the maximum number of encoded characters per line to use,\n * defaults to none.\n *\n * @return the base64-encoded output string.\n */\nutil.binary.base64.encode = function(input, maxline) {\n var line = '';\n var output = '';\n var chr1, chr2, chr3;\n var i = 0;\n while(i < input.byteLength) {\n chr1 = input[i++];\n chr2 = input[i++];\n chr3 = input[i++];\n\n // encode 4 character group\n line += _base64.charAt(chr1 >> 2);\n line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4));\n if(isNaN(chr2)) {\n line += '==';\n } else {\n line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6));\n line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63);\n }\n\n if(maxline && line.length > maxline) {\n output += line.substr(0, maxline) + '\\r\\n';\n line = line.substr(maxline);\n }\n }\n output += line;\n return output;\n};\n\n/**\n * Decodes a base64-encoded string to a Uint8Array.\n *\n * @param input the base64-encoded input string.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.binary.base64.decode = function(input, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(Math.ceil(input.length / 4) * 3);\n }\n\n // remove all non-base64 characters\n input = input.replace(/[^A-Za-z0-9\\+\\/\\=]/g, '');\n\n offset = offset || 0;\n var enc1, enc2, enc3, enc4;\n var i = 0, j = offset;\n\n while(i < input.length) {\n enc1 = _base64Idx[input.charCodeAt(i++) - 43];\n enc2 = _base64Idx[input.charCodeAt(i++) - 43];\n enc3 = _base64Idx[input.charCodeAt(i++) - 43];\n enc4 = _base64Idx[input.charCodeAt(i++) - 43];\n\n out[j++] = (enc1 << 2) | (enc2 >> 4);\n if(enc3 !== 64) {\n // decoded at least 2 bytes\n out[j++] = ((enc2 & 15) << 4) | (enc3 >> 2);\n if(enc4 !== 64) {\n // decoded 3 bytes\n out[j++] = ((enc3 & 3) << 6) | enc4;\n }\n }\n }\n\n // make sure result is the exact decoded length\n return output ? (j - offset) : out.subarray(0, j);\n};\n\n// add support for base58 encoding/decoding with Bitcoin alphabet\nutil.binary.base58.encode = function(input, maxline) {\n return util.binary.baseN.encode(input, _base58, maxline);\n};\nutil.binary.base58.decode = function(input, maxline) {\n return util.binary.baseN.decode(input, _base58, maxline);\n};\n\n// text encoding/decoding tools\n// FIXME: Experimental. Do not use yet.\nutil.text = {\n utf8: {},\n utf16: {}\n};\n\n/**\n * Encodes the given string as UTF-8 in a Uint8Array.\n *\n * @param str the string to encode.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.text.utf8.encode = function(str, output, offset) {\n str = util.encodeUtf8(str);\n var out = output;\n if(!out) {\n out = new Uint8Array(str.length);\n }\n offset = offset || 0;\n var j = offset;\n for(var i = 0; i < str.length; ++i) {\n out[j++] = str.charCodeAt(i);\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Decodes the UTF-8 contents from a Uint8Array.\n *\n * @param bytes the Uint8Array to decode.\n *\n * @return the resulting string.\n */\nutil.text.utf8.decode = function(bytes) {\n return util.decodeUtf8(String.fromCharCode.apply(null, bytes));\n};\n\n/**\n * Encodes the given string as UTF-16 in a Uint8Array.\n *\n * @param str the string to encode.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.text.utf16.encode = function(str, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(str.length * 2);\n }\n var view = new Uint16Array(out.buffer);\n offset = offset || 0;\n var j = offset;\n var k = offset;\n for(var i = 0; i < str.length; ++i) {\n view[k++] = str.charCodeAt(i);\n j += 2;\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Decodes the UTF-16 contents from a Uint8Array.\n *\n * @param bytes the Uint8Array to decode.\n *\n * @return the resulting string.\n */\nutil.text.utf16.decode = function(bytes) {\n return String.fromCharCode.apply(null, new Uint16Array(bytes.buffer));\n};\n\n/**\n * Deflates the given data using a flash interface.\n *\n * @param api the flash interface.\n * @param bytes the data.\n * @param raw true to return only raw deflate data, false to include zlib\n * header and trailer.\n *\n * @return the deflated data as a string.\n */\nutil.deflate = function(api, bytes, raw) {\n bytes = util.decode64(api.deflate(util.encode64(bytes)).rval);\n\n // strip zlib header and trailer if necessary\n if(raw) {\n // zlib header is 2 bytes (CMF,FLG) where FLG indicates that\n // there is a 4-byte DICT (alder-32) block before the data if\n // its 5th bit is set\n var start = 2;\n var flg = bytes.charCodeAt(1);\n if(flg & 0x20) {\n start = 6;\n }\n // zlib trailer is 4 bytes of adler-32\n bytes = bytes.substring(start, bytes.length - 4);\n }\n\n return bytes;\n};\n\n/**\n * Inflates the given data using a flash interface.\n *\n * @param api the flash interface.\n * @param bytes the data.\n * @param raw true if the incoming data has no zlib header or trailer and is\n * raw DEFLATE data.\n *\n * @return the inflated data as a string, null on error.\n */\nutil.inflate = function(api, bytes, raw) {\n // TODO: add zlib header and trailer if necessary/possible\n var rval = api.inflate(util.encode64(bytes)).rval;\n return (rval === null) ? null : util.decode64(rval);\n};\n\n/**\n * Sets a storage object.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param obj the storage object, null to remove.\n */\nvar _setStorageObject = function(api, id, obj) {\n if(!api) {\n throw new Error('WebStorage not available.');\n }\n\n var rval;\n if(obj === null) {\n rval = api.removeItem(id);\n } else {\n // json-encode and base64-encode object\n obj = util.encode64(JSON.stringify(obj));\n rval = api.setItem(id, obj);\n }\n\n // handle potential flash error\n if(typeof(rval) !== 'undefined' && rval.rval !== true) {\n var error = new Error(rval.error.message);\n error.id = rval.error.id;\n error.name = rval.error.name;\n throw error;\n }\n};\n\n/**\n * Gets a storage object.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n *\n * @return the storage object entry or null if none exists.\n */\nvar _getStorageObject = function(api, id) {\n if(!api) {\n throw new Error('WebStorage not available.');\n }\n\n // get the existing entry\n var rval = api.getItem(id);\n\n /* Note: We check api.init because we can't do (api == localStorage)\n on IE because of \"Class doesn't support Automation\" exception. Only\n the flash api has an init method so this works too, but we need a\n better solution in the future. */\n\n // flash returns item wrapped in an object, handle special case\n if(api.init) {\n if(rval.rval === null) {\n if(rval.error) {\n var error = new Error(rval.error.message);\n error.id = rval.error.id;\n error.name = rval.error.name;\n throw error;\n }\n // no error, but also no item\n rval = null;\n } else {\n rval = rval.rval;\n }\n }\n\n // handle decoding\n if(rval !== null) {\n // base64-decode and json-decode data\n rval = JSON.parse(util.decode64(rval));\n }\n\n return rval;\n};\n\n/**\n * Stores an item in local storage.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param data the data for the item (any javascript object/primitive).\n */\nvar _setItem = function(api, id, key, data) {\n // get storage object\n var obj = _getStorageObject(api, id);\n if(obj === null) {\n // create a new storage object\n obj = {};\n }\n // update key\n obj[key] = data;\n\n // set storage object\n _setStorageObject(api, id, obj);\n};\n\n/**\n * Gets an item from local storage.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n *\n * @return the item.\n */\nvar _getItem = function(api, id, key) {\n // get storage object\n var rval = _getStorageObject(api, id);\n if(rval !== null) {\n // return data at key\n rval = (key in rval) ? rval[key] : null;\n }\n\n return rval;\n};\n\n/**\n * Removes an item from local storage.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n */\nvar _removeItem = function(api, id, key) {\n // get storage object\n var obj = _getStorageObject(api, id);\n if(obj !== null && key in obj) {\n // remove key\n delete obj[key];\n\n // see if entry has no keys remaining\n var empty = true;\n for(var prop in obj) {\n empty = false;\n break;\n }\n if(empty) {\n // remove entry entirely if no keys are left\n obj = null;\n }\n\n // set storage object\n _setStorageObject(api, id, obj);\n }\n};\n\n/**\n * Clears the local disk storage identified by the given ID.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n */\nvar _clearItems = function(api, id) {\n _setStorageObject(api, id, null);\n};\n\n/**\n * Calls a storage function.\n *\n * @param func the function to call.\n * @param args the arguments for the function.\n * @param location the location argument.\n *\n * @return the return value from the function.\n */\nvar _callStorageFunction = function(func, args, location) {\n var rval = null;\n\n // default storage types\n if(typeof(location) === 'undefined') {\n location = ['web', 'flash'];\n }\n\n // apply storage types in order of preference\n var type;\n var done = false;\n var exception = null;\n for(var idx in location) {\n type = location[idx];\n try {\n if(type === 'flash' || type === 'both') {\n if(args[0] === null) {\n throw new Error('Flash local storage not available.');\n }\n rval = func.apply(this, args);\n done = (type === 'flash');\n }\n if(type === 'web' || type === 'both') {\n args[0] = localStorage;\n rval = func.apply(this, args);\n done = true;\n }\n } catch(ex) {\n exception = ex;\n }\n if(done) {\n break;\n }\n }\n\n if(!done) {\n throw exception;\n }\n\n return rval;\n};\n\n/**\n * Stores an item on local disk.\n *\n * The available types of local storage include 'flash', 'web', and 'both'.\n *\n * The type 'flash' refers to flash local storage (SharedObject). In order\n * to use flash local storage, the 'api' parameter must be valid. The type\n * 'web' refers to WebStorage, if supported by the browser. The type 'both'\n * refers to storing using both 'flash' and 'web', not just one or the\n * other.\n *\n * The location array should list the storage types to use in order of\n * preference:\n *\n * ['flash']: flash only storage\n * ['web']: web only storage\n * ['both']: try to store in both\n * ['flash','web']: store in flash first, but if not available, 'web'\n * ['web','flash']: store in web first, but if not available, 'flash'\n *\n * The location array defaults to: ['web', 'flash']\n *\n * @param api the flash interface, null to use only WebStorage.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param data the data for the item (any javascript object/primitive).\n * @param location an array with the preferred types of storage to use.\n */\nutil.setItem = function(api, id, key, data, location) {\n _callStorageFunction(_setItem, arguments, location);\n};\n\n/**\n * Gets an item on local disk.\n *\n * Set setItem() for details on storage types.\n *\n * @param api the flash interface, null to use only WebStorage.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param location an array with the preferred types of storage to use.\n *\n * @return the item.\n */\nutil.getItem = function(api, id, key, location) {\n return _callStorageFunction(_getItem, arguments, location);\n};\n\n/**\n * Removes an item on local disk.\n *\n * Set setItem() for details on storage types.\n *\n * @param api the flash interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param location an array with the preferred types of storage to use.\n */\nutil.removeItem = function(api, id, key, location) {\n _callStorageFunction(_removeItem, arguments, location);\n};\n\n/**\n * Clears the local disk storage identified by the given ID.\n *\n * Set setItem() for details on storage types.\n *\n * @param api the flash interface if flash is available.\n * @param id the storage ID to use.\n * @param location an array with the preferred types of storage to use.\n */\nutil.clearItems = function(api, id, location) {\n _callStorageFunction(_clearItems, arguments, location);\n};\n\n/**\n * Check if an object is empty.\n *\n * Taken from:\n * http://stackoverflow.com/questions/679915/how-do-i-test-for-an-empty-javascript-object-from-json/679937#679937\n *\n * @param object the object to check.\n */\nutil.isEmpty = function(obj) {\n for(var prop in obj) {\n if(obj.hasOwnProperty(prop)) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * Format with simple printf-style interpolation.\n *\n * %%: literal '%'\n * %s,%o: convert next argument into a string.\n *\n * @param format the string to format.\n * @param ... arguments to interpolate into the format string.\n */\nutil.format = function(format) {\n var re = /%./g;\n // current match\n var match;\n // current part\n var part;\n // current arg index\n var argi = 0;\n // collected parts to recombine later\n var parts = [];\n // last index found\n var last = 0;\n // loop while matches remain\n while((match = re.exec(format))) {\n part = format.substring(last, re.lastIndex - 2);\n // don't add empty strings (ie, parts between %s%s)\n if(part.length > 0) {\n parts.push(part);\n }\n last = re.lastIndex;\n // switch on % code\n var code = match[0][1];\n switch(code) {\n case 's':\n case 'o':\n // check if enough arguments were given\n if(argi < arguments.length) {\n parts.push(arguments[argi++ + 1]);\n } else {\n parts.push('');\n }\n break;\n // FIXME: do proper formating for numbers, etc\n //case 'f':\n //case 'd':\n case '%':\n parts.push('%');\n break;\n default:\n parts.push('<%' + code + '?>');\n }\n }\n // add trailing part of format string\n parts.push(format.substring(last));\n return parts.join('');\n};\n\n/**\n * Formats a number.\n *\n * http://snipplr.com/view/5945/javascript-numberformat--ported-from-php/\n */\nutil.formatNumber = function(number, decimals, dec_point, thousands_sep) {\n // http://kevin.vanzonneveld.net\n // + original by: Jonas Raoni Soares Silva (http://www.jsfromhell.com)\n // + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)\n // + bugfix by: Michael White (http://crestidg.com)\n // + bugfix by: Benjamin Lupton\n // + bugfix by: Allan Jensen (http://www.winternet.no)\n // + revised by: Jonas Raoni Soares Silva (http://www.jsfromhell.com)\n // * example 1: number_format(1234.5678, 2, '.', '');\n // * returns 1: 1234.57\n\n var n = number, c = isNaN(decimals = Math.abs(decimals)) ? 2 : decimals;\n var d = dec_point === undefined ? ',' : dec_point;\n var t = thousands_sep === undefined ?\n '.' : thousands_sep, s = n < 0 ? '-' : '';\n var i = parseInt((n = Math.abs(+n || 0).toFixed(c)), 10) + '';\n var j = (i.length > 3) ? i.length % 3 : 0;\n return s + (j ? i.substr(0, j) + t : '') +\n i.substr(j).replace(/(\\d{3})(?=\\d)/g, '$1' + t) +\n (c ? d + Math.abs(n - i).toFixed(c).slice(2) : '');\n};\n\n/**\n * Formats a byte size.\n *\n * http://snipplr.com/view/5949/format-humanize-file-byte-size-presentation-in-javascript/\n */\nutil.formatSize = function(size) {\n if(size >= 1073741824) {\n size = util.formatNumber(size / 1073741824, 2, '.', '') + ' GiB';\n } else if(size >= 1048576) {\n size = util.formatNumber(size / 1048576, 2, '.', '') + ' MiB';\n } else if(size >= 1024) {\n size = util.formatNumber(size / 1024, 0) + ' KiB';\n } else {\n size = util.formatNumber(size, 0) + ' bytes';\n }\n return size;\n};\n\n/**\n * Converts an IPv4 or IPv6 string representation into bytes (in network order).\n *\n * @param ip the IPv4 or IPv6 address to convert.\n *\n * @return the 4-byte IPv6 or 16-byte IPv6 address or null if the address can't\n * be parsed.\n */\nutil.bytesFromIP = function(ip) {\n if(ip.indexOf('.') !== -1) {\n return util.bytesFromIPv4(ip);\n }\n if(ip.indexOf(':') !== -1) {\n return util.bytesFromIPv6(ip);\n }\n return null;\n};\n\n/**\n * Converts an IPv4 string representation into bytes (in network order).\n *\n * @param ip the IPv4 address to convert.\n *\n * @return the 4-byte address or null if the address can't be parsed.\n */\nutil.bytesFromIPv4 = function(ip) {\n ip = ip.split('.');\n if(ip.length !== 4) {\n return null;\n }\n var b = util.createBuffer();\n for(var i = 0; i < ip.length; ++i) {\n var num = parseInt(ip[i], 10);\n if(isNaN(num)) {\n return null;\n }\n b.putByte(num);\n }\n return b.getBytes();\n};\n\n/**\n * Converts an IPv6 string representation into bytes (in network order).\n *\n * @param ip the IPv6 address to convert.\n *\n * @return the 16-byte address or null if the address can't be parsed.\n */\nutil.bytesFromIPv6 = function(ip) {\n var blanks = 0;\n ip = ip.split(':').filter(function(e) {\n if(e.length === 0) ++blanks;\n return true;\n });\n var zeros = (8 - ip.length + blanks) * 2;\n var b = util.createBuffer();\n for(var i = 0; i < 8; ++i) {\n if(!ip[i] || ip[i].length === 0) {\n b.fillWithByte(0, zeros);\n zeros = 0;\n continue;\n }\n var bytes = util.hexToBytes(ip[i]);\n if(bytes.length < 2) {\n b.putByte(0);\n }\n b.putBytes(bytes);\n }\n return b.getBytes();\n};\n\n/**\n * Converts 4-bytes into an IPv4 string representation or 16-bytes into\n * an IPv6 string representation. The bytes must be in network order.\n *\n * @param bytes the bytes to convert.\n *\n * @return the IPv4 or IPv6 string representation if 4 or 16 bytes,\n * respectively, are given, otherwise null.\n */\nutil.bytesToIP = function(bytes) {\n if(bytes.length === 4) {\n return util.bytesToIPv4(bytes);\n }\n if(bytes.length === 16) {\n return util.bytesToIPv6(bytes);\n }\n return null;\n};\n\n/**\n * Converts 4-bytes into an IPv4 string representation. The bytes must be\n * in network order.\n *\n * @param bytes the bytes to convert.\n *\n * @return the IPv4 string representation or null for an invalid # of bytes.\n */\nutil.bytesToIPv4 = function(bytes) {\n if(bytes.length !== 4) {\n return null;\n }\n var ip = [];\n for(var i = 0; i < bytes.length; ++i) {\n ip.push(bytes.charCodeAt(i));\n }\n return ip.join('.');\n};\n\n/**\n * Converts 16-bytes into an IPv16 string representation. The bytes must be\n * in network order.\n *\n * @param bytes the bytes to convert.\n *\n * @return the IPv16 string representation or null for an invalid # of bytes.\n */\nutil.bytesToIPv6 = function(bytes) {\n if(bytes.length !== 16) {\n return null;\n }\n var ip = [];\n var zeroGroups = [];\n var zeroMaxGroup = 0;\n for(var i = 0; i < bytes.length; i += 2) {\n var hex = util.bytesToHex(bytes[i] + bytes[i + 1]);\n // canonicalize zero representation\n while(hex[0] === '0' && hex !== '0') {\n hex = hex.substr(1);\n }\n if(hex === '0') {\n var last = zeroGroups[zeroGroups.length - 1];\n var idx = ip.length;\n if(!last || idx !== last.end + 1) {\n zeroGroups.push({start: idx, end: idx});\n } else {\n last.end = idx;\n if((last.end - last.start) >\n (zeroGroups[zeroMaxGroup].end - zeroGroups[zeroMaxGroup].start)) {\n zeroMaxGroup = zeroGroups.length - 1;\n }\n }\n }\n ip.push(hex);\n }\n if(zeroGroups.length > 0) {\n var group = zeroGroups[zeroMaxGroup];\n // only shorten group of length > 0\n if(group.end - group.start > 0) {\n ip.splice(group.start, group.end - group.start + 1, '');\n if(group.start === 0) {\n ip.unshift('');\n }\n if(group.end === 7) {\n ip.push('');\n }\n }\n }\n return ip.join(':');\n};\n\n/**\n * Estimates the number of processes that can be run concurrently. If\n * creating Web Workers, keep in mind that the main JavaScript process needs\n * its own core.\n *\n * @param options the options to use:\n * update true to force an update (not use the cached value).\n * @param callback(err, max) called once the operation completes.\n */\nutil.estimateCores = function(options, callback) {\n if(typeof options === 'function') {\n callback = options;\n options = {};\n }\n options = options || {};\n if('cores' in util && !options.update) {\n return callback(null, util.cores);\n }\n if(typeof navigator !== 'undefined' &&\n 'hardwareConcurrency' in navigator &&\n navigator.hardwareConcurrency > 0) {\n util.cores = navigator.hardwareConcurrency;\n return callback(null, util.cores);\n }\n if(typeof Worker === 'undefined') {\n // workers not available\n util.cores = 1;\n return callback(null, util.cores);\n }\n if(typeof Blob === 'undefined') {\n // can't estimate, default to 2\n util.cores = 2;\n return callback(null, util.cores);\n }\n\n // create worker concurrency estimation code as blob\n var blobUrl = URL.createObjectURL(new Blob(['(',\n function() {\n self.addEventListener('message', function(e) {\n // run worker for 4 ms\n var st = Date.now();\n var et = st + 4;\n while(Date.now() < et);\n self.postMessage({st: st, et: et});\n });\n }.toString(),\n ')()'], {type: 'application/javascript'}));\n\n // take 5 samples using 16 workers\n sample([], 5, 16);\n\n function sample(max, samples, numWorkers) {\n if(samples === 0) {\n // get overlap average\n var avg = Math.floor(max.reduce(function(avg, x) {\n return avg + x;\n }, 0) / max.length);\n util.cores = Math.max(1, avg);\n URL.revokeObjectURL(blobUrl);\n return callback(null, util.cores);\n }\n map(numWorkers, function(err, results) {\n max.push(reduce(numWorkers, results));\n sample(max, samples - 1, numWorkers);\n });\n }\n\n function map(numWorkers, callback) {\n var workers = [];\n var results = [];\n for(var i = 0; i < numWorkers; ++i) {\n var worker = new Worker(blobUrl);\n worker.addEventListener('message', function(e) {\n results.push(e.data);\n if(results.length === numWorkers) {\n for(var i = 0; i < numWorkers; ++i) {\n workers[i].terminate();\n }\n callback(null, results);\n }\n });\n workers.push(worker);\n }\n for(var i = 0; i < numWorkers; ++i) {\n workers[i].postMessage(i);\n }\n }\n\n function reduce(numWorkers, results) {\n // find overlapping time windows\n var overlaps = [];\n for(var n = 0; n < numWorkers; ++n) {\n var r1 = results[n];\n var overlap = overlaps[n] = [];\n for(var i = 0; i < numWorkers; ++i) {\n if(n === i) {\n continue;\n }\n var r2 = results[i];\n if((r1.st > r2.st && r1.st < r2.et) ||\n (r2.st > r1.st && r2.st < r1.et)) {\n overlap.push(i);\n }\n }\n }\n // get maximum overlaps ... don't include overlapping worker itself\n // as the main JS process was also being scheduled during the work and\n // would have to be subtracted from the estimate anyway\n return overlaps.reduce(function(max, overlap) {\n return Math.max(max, overlap.length);\n }, 0);\n }\n};\n","/**\n * Javascript implementation of X.509 and related components (such as\n * Certification Signing Requests) of a Public Key Infrastructure.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n *\n * The ASN.1 representation of an X.509v3 certificate is as follows\n * (see RFC 2459):\n *\n * Certificate ::= SEQUENCE {\n * tbsCertificate TBSCertificate,\n * signatureAlgorithm AlgorithmIdentifier,\n * signatureValue BIT STRING\n * }\n *\n * TBSCertificate ::= SEQUENCE {\n * version [0] EXPLICIT Version DEFAULT v1,\n * serialNumber CertificateSerialNumber,\n * signature AlgorithmIdentifier,\n * issuer Name,\n * validity Validity,\n * subject Name,\n * subjectPublicKeyInfo SubjectPublicKeyInfo,\n * issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL,\n * -- If present, version shall be v2 or v3\n * subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL,\n * -- If present, version shall be v2 or v3\n * extensions [3] EXPLICIT Extensions OPTIONAL\n * -- If present, version shall be v3\n * }\n *\n * Version ::= INTEGER { v1(0), v2(1), v3(2) }\n *\n * CertificateSerialNumber ::= INTEGER\n *\n * Name ::= CHOICE {\n * // only one possible choice for now\n * RDNSequence\n * }\n *\n * RDNSequence ::= SEQUENCE OF RelativeDistinguishedName\n *\n * RelativeDistinguishedName ::= SET OF AttributeTypeAndValue\n *\n * AttributeTypeAndValue ::= SEQUENCE {\n * type AttributeType,\n * value AttributeValue\n * }\n * AttributeType ::= OBJECT IDENTIFIER\n * AttributeValue ::= ANY DEFINED BY AttributeType\n *\n * Validity ::= SEQUENCE {\n * notBefore Time,\n * notAfter Time\n * }\n *\n * Time ::= CHOICE {\n * utcTime UTCTime,\n * generalTime GeneralizedTime\n * }\n *\n * UniqueIdentifier ::= BIT STRING\n *\n * SubjectPublicKeyInfo ::= SEQUENCE {\n * algorithm AlgorithmIdentifier,\n * subjectPublicKey BIT STRING\n * }\n *\n * Extensions ::= SEQUENCE SIZE (1..MAX) OF Extension\n *\n * Extension ::= SEQUENCE {\n * extnID OBJECT IDENTIFIER,\n * critical BOOLEAN DEFAULT FALSE,\n * extnValue OCTET STRING\n * }\n *\n * The only key algorithm currently supported for PKI is RSA.\n *\n * RSASSA-PSS signatures are described in RFC 3447 and RFC 4055.\n *\n * PKCS#10 v1.7 describes certificate signing requests:\n *\n * CertificationRequestInfo:\n *\n * CertificationRequestInfo ::= SEQUENCE {\n * version INTEGER { v1(0) } (v1,...),\n * subject Name,\n * subjectPKInfo SubjectPublicKeyInfo{{ PKInfoAlgorithms }},\n * attributes [0] Attributes{{ CRIAttributes }}\n * }\n *\n * Attributes { ATTRIBUTE:IOSet } ::= SET OF Attribute{{ IOSet }}\n *\n * CRIAttributes ATTRIBUTE ::= {\n * ... -- add any locally defined attributes here -- }\n *\n * Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE {\n * type ATTRIBUTE.&id({IOSet}),\n * values SET SIZE(1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type})\n * }\n *\n * CertificationRequest ::= SEQUENCE {\n * certificationRequestInfo CertificationRequestInfo,\n * signatureAlgorithm AlgorithmIdentifier{{ SignatureAlgorithms }},\n * signature BIT STRING\n * }\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./asn1');\nrequire('./des');\nrequire('./md');\nrequire('./mgf');\nrequire('./oids');\nrequire('./pem');\nrequire('./pss');\nrequire('./rsa');\nrequire('./util');\n\n// shortcut for asn.1 API\nvar asn1 = forge.asn1;\n\n/* Public Key Infrastructure (PKI) implementation. */\nvar pki = module.exports = forge.pki = forge.pki || {};\nvar oids = pki.oids;\n\n// short name OID mappings\nvar _shortNames = {};\n_shortNames['CN'] = oids['commonName'];\n_shortNames['commonName'] = 'CN';\n_shortNames['C'] = oids['countryName'];\n_shortNames['countryName'] = 'C';\n_shortNames['L'] = oids['localityName'];\n_shortNames['localityName'] = 'L';\n_shortNames['ST'] = oids['stateOrProvinceName'];\n_shortNames['stateOrProvinceName'] = 'ST';\n_shortNames['O'] = oids['organizationName'];\n_shortNames['organizationName'] = 'O';\n_shortNames['OU'] = oids['organizationalUnitName'];\n_shortNames['organizationalUnitName'] = 'OU';\n_shortNames['E'] = oids['emailAddress'];\n_shortNames['emailAddress'] = 'E';\n\n// validator for an SubjectPublicKeyInfo structure\n// Note: Currently only works with an RSA public key\nvar publicKeyValidator = forge.pki.rsa.publicKeyValidator;\n\n// validator for an X.509v3 certificate\nvar x509CertificateValidator = {\n name: 'Certificate',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'Certificate.TBSCertificate',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'tbsCertificate',\n value: [{\n name: 'Certificate.TBSCertificate.version',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n value: [{\n name: 'Certificate.TBSCertificate.version.integer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'certVersion'\n }]\n }, {\n name: 'Certificate.TBSCertificate.serialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'certSerialNumber'\n }, {\n name: 'Certificate.TBSCertificate.signature',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'Certificate.TBSCertificate.signature.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'certinfoSignatureOid'\n }, {\n name: 'Certificate.TBSCertificate.signature.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n optional: true,\n captureAsn1: 'certinfoSignatureParams'\n }]\n }, {\n name: 'Certificate.TBSCertificate.issuer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certIssuer'\n }, {\n name: 'Certificate.TBSCertificate.validity',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n // Note: UTC and generalized times may both appear so the capture\n // names are based on their detected order, the names used below\n // are only for the common case, which validity time really means\n // \"notBefore\" and which means \"notAfter\" will be determined by order\n value: [{\n // notBefore (Time) (UTC time case)\n name: 'Certificate.TBSCertificate.validity.notBefore (utc)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.UTCTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity1UTCTime'\n }, {\n // notBefore (Time) (generalized time case)\n name: 'Certificate.TBSCertificate.validity.notBefore (generalized)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.GENERALIZEDTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity2GeneralizedTime'\n }, {\n // notAfter (Time) (only UTC time is supported)\n name: 'Certificate.TBSCertificate.validity.notAfter (utc)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.UTCTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity3UTCTime'\n }, {\n // notAfter (Time) (only UTC time is supported)\n name: 'Certificate.TBSCertificate.validity.notAfter (generalized)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.GENERALIZEDTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity4GeneralizedTime'\n }]\n }, {\n // Name (subject) (RDNSequence)\n name: 'Certificate.TBSCertificate.subject',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certSubject'\n },\n // SubjectPublicKeyInfo\n publicKeyValidator,\n {\n // issuerUniqueID (optional)\n name: 'Certificate.TBSCertificate.issuerUniqueID',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n constructed: true,\n optional: true,\n value: [{\n name: 'Certificate.TBSCertificate.issuerUniqueID.id',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n // TODO: support arbitrary bit length ids\n captureBitStringValue: 'certIssuerUniqueId'\n }]\n }, {\n // subjectUniqueID (optional)\n name: 'Certificate.TBSCertificate.subjectUniqueID',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 2,\n constructed: true,\n optional: true,\n value: [{\n name: 'Certificate.TBSCertificate.subjectUniqueID.id',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n // TODO: support arbitrary bit length ids\n captureBitStringValue: 'certSubjectUniqueId'\n }]\n }, {\n // Extensions (optional)\n name: 'Certificate.TBSCertificate.extensions',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 3,\n constructed: true,\n captureAsn1: 'certExtensions',\n optional: true\n }]\n }, {\n // AlgorithmIdentifier (signature algorithm)\n name: 'Certificate.signatureAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // algorithm\n name: 'Certificate.signatureAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'certSignatureOid'\n }, {\n name: 'Certificate.TBSCertificate.signature.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n optional: true,\n captureAsn1: 'certSignatureParams'\n }]\n }, {\n // SignatureValue\n name: 'Certificate.signatureValue',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n captureBitStringValue: 'certSignature'\n }]\n};\n\nvar rsassaPssParameterValidator = {\n name: 'rsapss',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'rsapss.hashAlgorithm',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n value: [{\n name: 'rsapss.hashAlgorithm.AlgorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.SEQUENCE,\n constructed: true,\n optional: true,\n value: [{\n name: 'rsapss.hashAlgorithm.AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'hashOid'\n /* parameter block omitted, for SHA1 NULL anyhow. */\n }]\n }]\n }, {\n name: 'rsapss.maskGenAlgorithm',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n constructed: true,\n value: [{\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.SEQUENCE,\n constructed: true,\n optional: true,\n value: [{\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'maskGenOid'\n }, {\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'maskGenHashOid'\n /* parameter block omitted, for SHA1 NULL anyhow. */\n }]\n }]\n }]\n }, {\n name: 'rsapss.saltLength',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 2,\n optional: true,\n value: [{\n name: 'rsapss.saltLength.saltLength',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.INTEGER,\n constructed: false,\n capture: 'saltLength'\n }]\n }, {\n name: 'rsapss.trailerField',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 3,\n optional: true,\n value: [{\n name: 'rsapss.trailer.trailer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.INTEGER,\n constructed: false,\n capture: 'trailer'\n }]\n }]\n};\n\n// validator for a CertificationRequestInfo structure\nvar certificationRequestInfoValidator = {\n name: 'CertificationRequestInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certificationRequestInfo',\n value: [{\n name: 'CertificationRequestInfo.integer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'certificationRequestInfoVersion'\n }, {\n // Name (subject) (RDNSequence)\n name: 'CertificationRequestInfo.subject',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certificationRequestInfoSubject'\n },\n // SubjectPublicKeyInfo\n publicKeyValidator,\n {\n name: 'CertificationRequestInfo.attributes',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n capture: 'certificationRequestInfoAttributes',\n value: [{\n name: 'CertificationRequestInfo.attributes',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'CertificationRequestInfo.attributes.type',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false\n }, {\n name: 'CertificationRequestInfo.attributes.value',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true\n }]\n }]\n }]\n};\n\n// validator for a CertificationRequest structure\nvar certificationRequestValidator = {\n name: 'CertificationRequest',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'csr',\n value: [\n certificationRequestInfoValidator, {\n // AlgorithmIdentifier (signature algorithm)\n name: 'CertificationRequest.signatureAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // algorithm\n name: 'CertificationRequest.signatureAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'csrSignatureOid'\n }, {\n name: 'CertificationRequest.signatureAlgorithm.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n optional: true,\n captureAsn1: 'csrSignatureParams'\n }]\n }, {\n // signature\n name: 'CertificationRequest.signature',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n captureBitStringValue: 'csrSignature'\n }\n ]\n};\n\n/**\n * Converts an RDNSequence of ASN.1 DER-encoded RelativeDistinguishedName\n * sets into an array with objects that have type and value properties.\n *\n * @param rdn the RDNSequence to convert.\n * @param md a message digest to append type and value to if provided.\n */\npki.RDNAttributesAsArray = function(rdn, md) {\n var rval = [];\n\n // each value in 'rdn' in is a SET of RelativeDistinguishedName\n var set, attr, obj;\n for(var si = 0; si < rdn.value.length; ++si) {\n // get the RelativeDistinguishedName set\n set = rdn.value[si];\n\n // each value in the SET is an AttributeTypeAndValue sequence\n // containing first a type (an OID) and second a value (defined by\n // the OID)\n for(var i = 0; i < set.value.length; ++i) {\n obj = {};\n attr = set.value[i];\n obj.type = asn1.derToOid(attr.value[0].value);\n obj.value = attr.value[1].value;\n obj.valueTagClass = attr.value[1].type;\n // if the OID is known, get its name and short name\n if(obj.type in oids) {\n obj.name = oids[obj.type];\n if(obj.name in _shortNames) {\n obj.shortName = _shortNames[obj.name];\n }\n }\n if(md) {\n md.update(obj.type);\n md.update(obj.value);\n }\n rval.push(obj);\n }\n }\n\n return rval;\n};\n\n/**\n * Converts ASN.1 CRIAttributes into an array with objects that have type and\n * value properties.\n *\n * @param attributes the CRIAttributes to convert.\n */\npki.CRIAttributesAsArray = function(attributes) {\n var rval = [];\n\n // each value in 'attributes' in is a SEQUENCE with an OID and a SET\n for(var si = 0; si < attributes.length; ++si) {\n // get the attribute sequence\n var seq = attributes[si];\n\n // each value in the SEQUENCE containing first a type (an OID) and\n // second a set of values (defined by the OID)\n var type = asn1.derToOid(seq.value[0].value);\n var values = seq.value[1].value;\n for(var vi = 0; vi < values.length; ++vi) {\n var obj = {};\n obj.type = type;\n obj.value = values[vi].value;\n obj.valueTagClass = values[vi].type;\n // if the OID is known, get its name and short name\n if(obj.type in oids) {\n obj.name = oids[obj.type];\n if(obj.name in _shortNames) {\n obj.shortName = _shortNames[obj.name];\n }\n }\n // parse extensions\n if(obj.type === oids.extensionRequest) {\n obj.extensions = [];\n for(var ei = 0; ei < obj.value.length; ++ei) {\n obj.extensions.push(pki.certificateExtensionFromAsn1(obj.value[ei]));\n }\n }\n rval.push(obj);\n }\n }\n\n return rval;\n};\n\n/**\n * Gets an issuer or subject attribute from its name, type, or short name.\n *\n * @param obj the issuer or subject object.\n * @param options a short name string or an object with:\n * shortName the short name for the attribute.\n * name the name for the attribute.\n * type the type for the attribute.\n *\n * @return the attribute.\n */\nfunction _getAttribute(obj, options) {\n if(typeof options === 'string') {\n options = {shortName: options};\n }\n\n var rval = null;\n var attr;\n for(var i = 0; rval === null && i < obj.attributes.length; ++i) {\n attr = obj.attributes[i];\n if(options.type && options.type === attr.type) {\n rval = attr;\n } else if(options.name && options.name === attr.name) {\n rval = attr;\n } else if(options.shortName && options.shortName === attr.shortName) {\n rval = attr;\n }\n }\n return rval;\n}\n\n/**\n * Converts signature parameters from ASN.1 structure.\n *\n * Currently only RSASSA-PSS supported. The PKCS#1 v1.5 signature scheme had\n * no parameters.\n *\n * RSASSA-PSS-params ::= SEQUENCE {\n * hashAlgorithm [0] HashAlgorithm DEFAULT\n * sha1Identifier,\n * maskGenAlgorithm [1] MaskGenAlgorithm DEFAULT\n * mgf1SHA1Identifier,\n * saltLength [2] INTEGER DEFAULT 20,\n * trailerField [3] INTEGER DEFAULT 1\n * }\n *\n * HashAlgorithm ::= AlgorithmIdentifier\n *\n * MaskGenAlgorithm ::= AlgorithmIdentifier\n *\n * AlgorithmIdentifer ::= SEQUENCE {\n * algorithm OBJECT IDENTIFIER,\n * parameters ANY DEFINED BY algorithm OPTIONAL\n * }\n *\n * @param oid The OID specifying the signature algorithm\n * @param obj The ASN.1 structure holding the parameters\n * @param fillDefaults Whether to use return default values where omitted\n * @return signature parameter object\n */\nvar _readSignatureParameters = function(oid, obj, fillDefaults) {\n var params = {};\n\n if(oid !== oids['RSASSA-PSS']) {\n return params;\n }\n\n if(fillDefaults) {\n params = {\n hash: {\n algorithmOid: oids['sha1']\n },\n mgf: {\n algorithmOid: oids['mgf1'],\n hash: {\n algorithmOid: oids['sha1']\n }\n },\n saltLength: 20\n };\n }\n\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, rsassaPssParameterValidator, capture, errors)) {\n var error = new Error('Cannot read RSASSA-PSS parameter block.');\n error.errors = errors;\n throw error;\n }\n\n if(capture.hashOid !== undefined) {\n params.hash = params.hash || {};\n params.hash.algorithmOid = asn1.derToOid(capture.hashOid);\n }\n\n if(capture.maskGenOid !== undefined) {\n params.mgf = params.mgf || {};\n params.mgf.algorithmOid = asn1.derToOid(capture.maskGenOid);\n params.mgf.hash = params.mgf.hash || {};\n params.mgf.hash.algorithmOid = asn1.derToOid(capture.maskGenHashOid);\n }\n\n if(capture.saltLength !== undefined) {\n params.saltLength = capture.saltLength.charCodeAt(0);\n }\n\n return params;\n};\n\n/**\n * Create signature digest for OID.\n *\n * @param options\n * signatureOid: the OID specifying the signature algorithm.\n * type: a human readable type for error messages\n * @return a created md instance. throws if unknown oid.\n */\nvar _createSignatureDigest = function(options) {\n switch(oids[options.signatureOid]) {\n case 'sha1WithRSAEncryption':\n // deprecated alias\n case 'sha1WithRSASignature':\n return forge.md.sha1.create();\n case 'md5WithRSAEncryption':\n return forge.md.md5.create();\n case 'sha256WithRSAEncryption':\n return forge.md.sha256.create();\n case 'sha384WithRSAEncryption':\n return forge.md.sha384.create();\n case 'sha512WithRSAEncryption':\n return forge.md.sha512.create();\n case 'RSASSA-PSS':\n return forge.md.sha256.create();\n default:\n var error = new Error(\n 'Could not compute ' + options.type + ' digest. ' +\n 'Unknown signature OID.');\n error.signatureOid = options.signatureOid;\n throw error;\n }\n};\n\n/**\n * Verify signature on certificate or CSR.\n *\n * @param options:\n * certificate the certificate or CSR to verify.\n * md the signature digest.\n * signature the signature\n * @return a created md instance. throws if unknown oid.\n */\nvar _verifySignature = function(options) {\n var cert = options.certificate;\n var scheme;\n\n switch(cert.signatureOid) {\n case oids.sha1WithRSAEncryption:\n // deprecated alias\n case oids.sha1WithRSASignature:\n /* use PKCS#1 v1.5 padding scheme */\n break;\n case oids['RSASSA-PSS']:\n var hash, mgf;\n\n /* initialize mgf */\n hash = oids[cert.signatureParameters.mgf.hash.algorithmOid];\n if(hash === undefined || forge.md[hash] === undefined) {\n var error = new Error('Unsupported MGF hash function.');\n error.oid = cert.signatureParameters.mgf.hash.algorithmOid;\n error.name = hash;\n throw error;\n }\n\n mgf = oids[cert.signatureParameters.mgf.algorithmOid];\n if(mgf === undefined || forge.mgf[mgf] === undefined) {\n var error = new Error('Unsupported MGF function.');\n error.oid = cert.signatureParameters.mgf.algorithmOid;\n error.name = mgf;\n throw error;\n }\n\n mgf = forge.mgf[mgf].create(forge.md[hash].create());\n\n /* initialize hash function */\n hash = oids[cert.signatureParameters.hash.algorithmOid];\n if(hash === undefined || forge.md[hash] === undefined) {\n var error = new Error('Unsupported RSASSA-PSS hash function.');\n error.oid = cert.signatureParameters.hash.algorithmOid;\n error.name = hash;\n throw error;\n }\n\n scheme = forge.pss.create(\n forge.md[hash].create(), mgf, cert.signatureParameters.saltLength\n );\n break;\n }\n\n // verify signature on cert using public key\n return cert.publicKey.verify(\n options.md.digest().getBytes(), options.signature, scheme\n );\n};\n\n/**\n * Converts an X.509 certificate from PEM format.\n *\n * Note: If the certificate is to be verified then compute hash should\n * be set to true. This will scan the TBSCertificate part of the ASN.1\n * object while it is converted so it doesn't need to be converted back\n * to ASN.1-DER-encoding later.\n *\n * @param pem the PEM-formatted certificate.\n * @param computeHash true to compute the hash for verification.\n * @param strict true to be strict when checking ASN.1 value lengths, false to\n * allow truncated values (default: true).\n *\n * @return the certificate.\n */\npki.certificateFromPem = function(pem, computeHash, strict) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'CERTIFICATE' &&\n msg.type !== 'X509 CERTIFICATE' &&\n msg.type !== 'TRUSTED CERTIFICATE') {\n var error = new Error(\n 'Could not convert certificate from PEM; PEM header type ' +\n 'is not \"CERTIFICATE\", \"X509 CERTIFICATE\", or \"TRUSTED CERTIFICATE\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error(\n 'Could not convert certificate from PEM; PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body, strict);\n\n return pki.certificateFromAsn1(obj, computeHash);\n};\n\n/**\n * Converts an X.509 certificate to PEM format.\n *\n * @param cert the certificate.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted certificate.\n */\npki.certificateToPem = function(cert, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'CERTIFICATE',\n body: asn1.toDer(pki.certificateToAsn1(cert)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Converts an RSA public key from PEM format.\n *\n * @param pem the PEM-formatted public key.\n *\n * @return the public key.\n */\npki.publicKeyFromPem = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'PUBLIC KEY' && msg.type !== 'RSA PUBLIC KEY') {\n var error = new Error('Could not convert public key from PEM; PEM header ' +\n 'type is not \"PUBLIC KEY\" or \"RSA PUBLIC KEY\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert public key from PEM; PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body);\n\n return pki.publicKeyFromAsn1(obj);\n};\n\n/**\n * Converts an RSA public key to PEM format (using a SubjectPublicKeyInfo).\n *\n * @param key the public key.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted public key.\n */\npki.publicKeyToPem = function(key, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'PUBLIC KEY',\n body: asn1.toDer(pki.publicKeyToAsn1(key)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Converts an RSA public key to PEM format (using an RSAPublicKey).\n *\n * @param key the public key.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted public key.\n */\npki.publicKeyToRSAPublicKeyPem = function(key, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'RSA PUBLIC KEY',\n body: asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Gets a fingerprint for the given public key.\n *\n * @param options the options to use.\n * [md] the message digest object to use (defaults to forge.md.sha1).\n * [type] the type of fingerprint, such as 'RSAPublicKey',\n * 'SubjectPublicKeyInfo' (defaults to 'RSAPublicKey').\n * [encoding] an alternative output encoding, such as 'hex'\n * (defaults to none, outputs a byte buffer).\n * [delimiter] the delimiter to use between bytes for 'hex' encoded\n * output, eg: ':' (defaults to none).\n *\n * @return the fingerprint as a byte buffer or other encoding based on options.\n */\npki.getPublicKeyFingerprint = function(key, options) {\n options = options || {};\n var md = options.md || forge.md.sha1.create();\n var type = options.type || 'RSAPublicKey';\n\n var bytes;\n switch(type) {\n case 'RSAPublicKey':\n bytes = asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes();\n break;\n case 'SubjectPublicKeyInfo':\n bytes = asn1.toDer(pki.publicKeyToAsn1(key)).getBytes();\n break;\n default:\n throw new Error('Unknown fingerprint type \"' + options.type + '\".');\n }\n\n // hash public key bytes\n md.start();\n md.update(bytes);\n var digest = md.digest();\n if(options.encoding === 'hex') {\n var hex = digest.toHex();\n if(options.delimiter) {\n return hex.match(/.{2}/g).join(options.delimiter);\n }\n return hex;\n } else if(options.encoding === 'binary') {\n return digest.getBytes();\n } else if(options.encoding) {\n throw new Error('Unknown encoding \"' + options.encoding + '\".');\n }\n return digest;\n};\n\n/**\n * Converts a PKCS#10 certification request (CSR) from PEM format.\n *\n * Note: If the certification request is to be verified then compute hash\n * should be set to true. This will scan the CertificationRequestInfo part of\n * the ASN.1 object while it is converted so it doesn't need to be converted\n * back to ASN.1-DER-encoding later.\n *\n * @param pem the PEM-formatted certificate.\n * @param computeHash true to compute the hash for verification.\n * @param strict true to be strict when checking ASN.1 value lengths, false to\n * allow truncated values (default: true).\n *\n * @return the certification request (CSR).\n */\npki.certificationRequestFromPem = function(pem, computeHash, strict) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'CERTIFICATE REQUEST') {\n var error = new Error('Could not convert certification request from PEM; ' +\n 'PEM header type is not \"CERTIFICATE REQUEST\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert certification request from PEM; ' +\n 'PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body, strict);\n\n return pki.certificationRequestFromAsn1(obj, computeHash);\n};\n\n/**\n * Converts a PKCS#10 certification request (CSR) to PEM format.\n *\n * @param csr the certification request.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted certification request.\n */\npki.certificationRequestToPem = function(csr, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'CERTIFICATE REQUEST',\n body: asn1.toDer(pki.certificationRequestToAsn1(csr)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Creates an empty X.509v3 RSA certificate.\n *\n * @return the certificate.\n */\npki.createCertificate = function() {\n var cert = {};\n cert.version = 0x02;\n cert.serialNumber = '00';\n cert.signatureOid = null;\n cert.signature = null;\n cert.siginfo = {};\n cert.siginfo.algorithmOid = null;\n cert.validity = {};\n cert.validity.notBefore = new Date();\n cert.validity.notAfter = new Date();\n\n cert.issuer = {};\n cert.issuer.getField = function(sn) {\n return _getAttribute(cert.issuer, sn);\n };\n cert.issuer.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.issuer.attributes.push(attr);\n };\n cert.issuer.attributes = [];\n cert.issuer.hash = null;\n\n cert.subject = {};\n cert.subject.getField = function(sn) {\n return _getAttribute(cert.subject, sn);\n };\n cert.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.subject.attributes.push(attr);\n };\n cert.subject.attributes = [];\n cert.subject.hash = null;\n\n cert.extensions = [];\n cert.publicKey = null;\n cert.md = null;\n\n /**\n * Sets the subject of this certificate.\n *\n * @param attrs the array of subject attributes to use.\n * @param uniqueId an optional a unique ID to use.\n */\n cert.setSubject = function(attrs, uniqueId) {\n // set new attributes, clear hash\n _fillMissingFields(attrs);\n cert.subject.attributes = attrs;\n delete cert.subject.uniqueId;\n if(uniqueId) {\n // TODO: support arbitrary bit length ids\n cert.subject.uniqueId = uniqueId;\n }\n cert.subject.hash = null;\n };\n\n /**\n * Sets the issuer of this certificate.\n *\n * @param attrs the array of issuer attributes to use.\n * @param uniqueId an optional a unique ID to use.\n */\n cert.setIssuer = function(attrs, uniqueId) {\n // set new attributes, clear hash\n _fillMissingFields(attrs);\n cert.issuer.attributes = attrs;\n delete cert.issuer.uniqueId;\n if(uniqueId) {\n // TODO: support arbitrary bit length ids\n cert.issuer.uniqueId = uniqueId;\n }\n cert.issuer.hash = null;\n };\n\n /**\n * Sets the extensions of this certificate.\n *\n * @param exts the array of extensions to use.\n */\n cert.setExtensions = function(exts) {\n for(var i = 0; i < exts.length; ++i) {\n _fillMissingExtensionFields(exts[i], {cert: cert});\n }\n // set new extensions\n cert.extensions = exts;\n };\n\n /**\n * Gets an extension by its name or id.\n *\n * @param options the name to use or an object with:\n * name the name to use.\n * id the id to use.\n *\n * @return the extension or null if not found.\n */\n cert.getExtension = function(options) {\n if(typeof options === 'string') {\n options = {name: options};\n }\n\n var rval = null;\n var ext;\n for(var i = 0; rval === null && i < cert.extensions.length; ++i) {\n ext = cert.extensions[i];\n if(options.id && ext.id === options.id) {\n rval = ext;\n } else if(options.name && ext.name === options.name) {\n rval = ext;\n }\n }\n return rval;\n };\n\n /**\n * Signs this certificate using the given private key.\n *\n * @param key the private key to sign with.\n * @param md the message digest object to use (defaults to forge.md.sha1).\n */\n cert.sign = function(key, md) {\n // TODO: get signature OID from private key\n cert.md = md || forge.md.sha1.create();\n var algorithmOid = oids[cert.md.algorithm + 'WithRSAEncryption'];\n if(!algorithmOid) {\n var error = new Error('Could not compute certificate digest. ' +\n 'Unknown message digest algorithm OID.');\n error.algorithm = cert.md.algorithm;\n throw error;\n }\n cert.signatureOid = cert.siginfo.algorithmOid = algorithmOid;\n\n // get TBSCertificate, convert to DER\n cert.tbsCertificate = pki.getTBSCertificate(cert);\n var bytes = asn1.toDer(cert.tbsCertificate);\n\n // digest and sign\n cert.md.update(bytes.getBytes());\n cert.signature = key.sign(cert.md);\n };\n\n /**\n * Attempts verify the signature on the passed certificate using this\n * certificate's public key.\n *\n * @param child the certificate to verify.\n *\n * @return true if verified, false if not.\n */\n cert.verify = function(child) {\n var rval = false;\n\n if(!cert.issued(child)) {\n var issuer = child.issuer;\n var subject = cert.subject;\n var error = new Error(\n 'The parent certificate did not issue the given child ' +\n 'certificate; the child certificate\\'s issuer does not match the ' +\n 'parent\\'s subject.');\n error.expectedIssuer = subject.attributes;\n error.actualIssuer = issuer.attributes;\n throw error;\n }\n\n var md = child.md;\n if(md === null) {\n // create digest for OID signature types\n md = _createSignatureDigest({\n signatureOid: child.signatureOid,\n type: 'certificate'\n });\n\n // produce DER formatted TBSCertificate and digest it\n var tbsCertificate = child.tbsCertificate || pki.getTBSCertificate(child);\n var bytes = asn1.toDer(tbsCertificate);\n md.update(bytes.getBytes());\n }\n\n if(md !== null) {\n rval = _verifySignature({\n certificate: cert, md: md, signature: child.signature\n });\n }\n\n return rval;\n };\n\n /**\n * Returns true if this certificate's issuer matches the passed\n * certificate's subject. Note that no signature check is performed.\n *\n * @param parent the certificate to check.\n *\n * @return true if this certificate's issuer matches the passed certificate's\n * subject.\n */\n cert.isIssuer = function(parent) {\n var rval = false;\n\n var i = cert.issuer;\n var s = parent.subject;\n\n // compare hashes if present\n if(i.hash && s.hash) {\n rval = (i.hash === s.hash);\n } else if(i.attributes.length === s.attributes.length) {\n // all attributes are the same so issuer matches subject\n rval = true;\n var iattr, sattr;\n for(var n = 0; rval && n < i.attributes.length; ++n) {\n iattr = i.attributes[n];\n sattr = s.attributes[n];\n if(iattr.type !== sattr.type || iattr.value !== sattr.value) {\n // attribute mismatch\n rval = false;\n }\n }\n }\n\n return rval;\n };\n\n /**\n * Returns true if this certificate's subject matches the issuer of the\n * given certificate). Note that not signature check is performed.\n *\n * @param child the certificate to check.\n *\n * @return true if this certificate's subject matches the passed\n * certificate's issuer.\n */\n cert.issued = function(child) {\n return child.isIssuer(cert);\n };\n\n /**\n * Generates the subjectKeyIdentifier for this certificate as byte buffer.\n *\n * @return the subjectKeyIdentifier for this certificate as byte buffer.\n */\n cert.generateSubjectKeyIdentifier = function() {\n /* See: 4.2.1.2 section of the the RFC3280, keyIdentifier is either:\n\n (1) The keyIdentifier is composed of the 160-bit SHA-1 hash of the\n value of the BIT STRING subjectPublicKey (excluding the tag,\n length, and number of unused bits).\n\n (2) The keyIdentifier is composed of a four bit type field with\n the value 0100 followed by the least significant 60 bits of the\n SHA-1 hash of the value of the BIT STRING subjectPublicKey\n (excluding the tag, length, and number of unused bit string bits).\n */\n\n // skipping the tag, length, and number of unused bits is the same\n // as just using the RSAPublicKey (for RSA keys, which are the\n // only ones supported)\n return pki.getPublicKeyFingerprint(cert.publicKey, {type: 'RSAPublicKey'});\n };\n\n /**\n * Verifies the subjectKeyIdentifier extension value for this certificate\n * against its public key. If no extension is found, false will be\n * returned.\n *\n * @return true if verified, false if not.\n */\n cert.verifySubjectKeyIdentifier = function() {\n var oid = oids['subjectKeyIdentifier'];\n for(var i = 0; i < cert.extensions.length; ++i) {\n var ext = cert.extensions[i];\n if(ext.id === oid) {\n var ski = cert.generateSubjectKeyIdentifier().getBytes();\n return (forge.util.hexToBytes(ext.subjectKeyIdentifier) === ski);\n }\n }\n return false;\n };\n\n return cert;\n};\n\n/**\n * Converts an X.509v3 RSA certificate from an ASN.1 object.\n *\n * Note: If the certificate is to be verified then compute hash should\n * be set to true. There is currently no implementation for converting\n * a certificate back to ASN.1 so the TBSCertificate part of the ASN.1\n * object needs to be scanned before the cert object is created.\n *\n * @param obj the asn1 representation of an X.509v3 RSA certificate.\n * @param computeHash true to compute the hash for verification.\n *\n * @return the certificate.\n */\npki.certificateFromAsn1 = function(obj, computeHash) {\n // validate certificate and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, x509CertificateValidator, capture, errors)) {\n var error = new Error('Cannot read X.509 certificate. ' +\n 'ASN.1 object is not an X509v3 Certificate.');\n error.errors = errors;\n throw error;\n }\n\n // get oid\n var oid = asn1.derToOid(capture.publicKeyOid);\n if(oid !== pki.oids.rsaEncryption) {\n throw new Error('Cannot read public key. OID is not RSA.');\n }\n\n // create certificate\n var cert = pki.createCertificate();\n cert.version = capture.certVersion ?\n capture.certVersion.charCodeAt(0) : 0;\n var serial = forge.util.createBuffer(capture.certSerialNumber);\n cert.serialNumber = serial.toHex();\n cert.signatureOid = forge.asn1.derToOid(capture.certSignatureOid);\n cert.signatureParameters = _readSignatureParameters(\n cert.signatureOid, capture.certSignatureParams, true);\n cert.siginfo.algorithmOid = forge.asn1.derToOid(capture.certinfoSignatureOid);\n cert.siginfo.parameters = _readSignatureParameters(cert.siginfo.algorithmOid,\n capture.certinfoSignatureParams, false);\n cert.signature = capture.certSignature;\n\n var validity = [];\n if(capture.certValidity1UTCTime !== undefined) {\n validity.push(asn1.utcTimeToDate(capture.certValidity1UTCTime));\n }\n if(capture.certValidity2GeneralizedTime !== undefined) {\n validity.push(asn1.generalizedTimeToDate(\n capture.certValidity2GeneralizedTime));\n }\n if(capture.certValidity3UTCTime !== undefined) {\n validity.push(asn1.utcTimeToDate(capture.certValidity3UTCTime));\n }\n if(capture.certValidity4GeneralizedTime !== undefined) {\n validity.push(asn1.generalizedTimeToDate(\n capture.certValidity4GeneralizedTime));\n }\n if(validity.length > 2) {\n throw new Error('Cannot read notBefore/notAfter validity times; more ' +\n 'than two times were provided in the certificate.');\n }\n if(validity.length < 2) {\n throw new Error('Cannot read notBefore/notAfter validity times; they ' +\n 'were not provided as either UTCTime or GeneralizedTime.');\n }\n cert.validity.notBefore = validity[0];\n cert.validity.notAfter = validity[1];\n\n // keep TBSCertificate to preserve signature when exporting\n cert.tbsCertificate = capture.tbsCertificate;\n\n if(computeHash) {\n // create digest for OID signature type\n cert.md = _createSignatureDigest({\n signatureOid: cert.signatureOid,\n type: 'certificate'\n });\n\n // produce DER formatted TBSCertificate and digest it\n var bytes = asn1.toDer(cert.tbsCertificate);\n cert.md.update(bytes.getBytes());\n }\n\n // handle issuer, build issuer message digest\n var imd = forge.md.sha1.create();\n var ibytes = asn1.toDer(capture.certIssuer);\n imd.update(ibytes.getBytes());\n cert.issuer.getField = function(sn) {\n return _getAttribute(cert.issuer, sn);\n };\n cert.issuer.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.issuer.attributes.push(attr);\n };\n cert.issuer.attributes = pki.RDNAttributesAsArray(capture.certIssuer);\n if(capture.certIssuerUniqueId) {\n cert.issuer.uniqueId = capture.certIssuerUniqueId;\n }\n cert.issuer.hash = imd.digest().toHex();\n\n // handle subject, build subject message digest\n var smd = forge.md.sha1.create();\n var sbytes = asn1.toDer(capture.certSubject);\n smd.update(sbytes.getBytes());\n cert.subject.getField = function(sn) {\n return _getAttribute(cert.subject, sn);\n };\n cert.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.subject.attributes.push(attr);\n };\n cert.subject.attributes = pki.RDNAttributesAsArray(capture.certSubject);\n if(capture.certSubjectUniqueId) {\n cert.subject.uniqueId = capture.certSubjectUniqueId;\n }\n cert.subject.hash = smd.digest().toHex();\n\n // handle extensions\n if(capture.certExtensions) {\n cert.extensions = pki.certificateExtensionsFromAsn1(capture.certExtensions);\n } else {\n cert.extensions = [];\n }\n\n // convert RSA public key from ASN.1\n cert.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo);\n\n return cert;\n};\n\n/**\n * Converts an ASN.1 extensions object (with extension sequences as its\n * values) into an array of extension objects with types and values.\n *\n * Supported extensions:\n *\n * id-ce-keyUsage OBJECT IDENTIFIER ::= { id-ce 15 }\n * KeyUsage ::= BIT STRING {\n * digitalSignature (0),\n * nonRepudiation (1),\n * keyEncipherment (2),\n * dataEncipherment (3),\n * keyAgreement (4),\n * keyCertSign (5),\n * cRLSign (6),\n * encipherOnly (7),\n * decipherOnly (8)\n * }\n *\n * id-ce-basicConstraints OBJECT IDENTIFIER ::= { id-ce 19 }\n * BasicConstraints ::= SEQUENCE {\n * cA BOOLEAN DEFAULT FALSE,\n * pathLenConstraint INTEGER (0..MAX) OPTIONAL\n * }\n *\n * subjectAltName EXTENSION ::= {\n * SYNTAX GeneralNames\n * IDENTIFIED BY id-ce-subjectAltName\n * }\n *\n * GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName\n *\n * GeneralName ::= CHOICE {\n * otherName [0] INSTANCE OF OTHER-NAME,\n * rfc822Name [1] IA5String,\n * dNSName [2] IA5String,\n * x400Address [3] ORAddress,\n * directoryName [4] Name,\n * ediPartyName [5] EDIPartyName,\n * uniformResourceIdentifier [6] IA5String,\n * IPAddress [7] OCTET STRING,\n * registeredID [8] OBJECT IDENTIFIER\n * }\n *\n * OTHER-NAME ::= TYPE-IDENTIFIER\n *\n * EDIPartyName ::= SEQUENCE {\n * nameAssigner [0] DirectoryString {ub-name} OPTIONAL,\n * partyName [1] DirectoryString {ub-name}\n * }\n *\n * @param exts the extensions ASN.1 with extension sequences to parse.\n *\n * @return the array.\n */\npki.certificateExtensionsFromAsn1 = function(exts) {\n var rval = [];\n for(var i = 0; i < exts.value.length; ++i) {\n // get extension sequence\n var extseq = exts.value[i];\n for(var ei = 0; ei < extseq.value.length; ++ei) {\n rval.push(pki.certificateExtensionFromAsn1(extseq.value[ei]));\n }\n }\n\n return rval;\n};\n\n/**\n * Parses a single certificate extension from ASN.1.\n *\n * @param ext the extension in ASN.1 format.\n *\n * @return the parsed extension as an object.\n */\npki.certificateExtensionFromAsn1 = function(ext) {\n // an extension has:\n // [0] extnID OBJECT IDENTIFIER\n // [1] critical BOOLEAN DEFAULT FALSE\n // [2] extnValue OCTET STRING\n var e = {};\n e.id = asn1.derToOid(ext.value[0].value);\n e.critical = false;\n if(ext.value[1].type === asn1.Type.BOOLEAN) {\n e.critical = (ext.value[1].value.charCodeAt(0) !== 0x00);\n e.value = ext.value[2].value;\n } else {\n e.value = ext.value[1].value;\n }\n // if the oid is known, get its name\n if(e.id in oids) {\n e.name = oids[e.id];\n\n // handle key usage\n if(e.name === 'keyUsage') {\n // get value as BIT STRING\n var ev = asn1.fromDer(e.value);\n var b2 = 0x00;\n var b3 = 0x00;\n if(ev.value.length > 1) {\n // skip first byte, just indicates unused bits which\n // will be padded with 0s anyway\n // get bytes with flag bits\n b2 = ev.value.charCodeAt(1);\n b3 = ev.value.length > 2 ? ev.value.charCodeAt(2) : 0;\n }\n // set flags\n e.digitalSignature = (b2 & 0x80) === 0x80;\n e.nonRepudiation = (b2 & 0x40) === 0x40;\n e.keyEncipherment = (b2 & 0x20) === 0x20;\n e.dataEncipherment = (b2 & 0x10) === 0x10;\n e.keyAgreement = (b2 & 0x08) === 0x08;\n e.keyCertSign = (b2 & 0x04) === 0x04;\n e.cRLSign = (b2 & 0x02) === 0x02;\n e.encipherOnly = (b2 & 0x01) === 0x01;\n e.decipherOnly = (b3 & 0x80) === 0x80;\n } else if(e.name === 'basicConstraints') {\n // handle basic constraints\n // get value as SEQUENCE\n var ev = asn1.fromDer(e.value);\n // get cA BOOLEAN flag (defaults to false)\n if(ev.value.length > 0 && ev.value[0].type === asn1.Type.BOOLEAN) {\n e.cA = (ev.value[0].value.charCodeAt(0) !== 0x00);\n } else {\n e.cA = false;\n }\n // get path length constraint\n var value = null;\n if(ev.value.length > 0 && ev.value[0].type === asn1.Type.INTEGER) {\n value = ev.value[0].value;\n } else if(ev.value.length > 1) {\n value = ev.value[1].value;\n }\n if(value !== null) {\n e.pathLenConstraint = asn1.derToInteger(value);\n }\n } else if(e.name === 'extKeyUsage') {\n // handle extKeyUsage\n // value is a SEQUENCE of OIDs\n var ev = asn1.fromDer(e.value);\n for(var vi = 0; vi < ev.value.length; ++vi) {\n var oid = asn1.derToOid(ev.value[vi].value);\n if(oid in oids) {\n e[oids[oid]] = true;\n } else {\n e[oid] = true;\n }\n }\n } else if(e.name === 'nsCertType') {\n // handle nsCertType\n // get value as BIT STRING\n var ev = asn1.fromDer(e.value);\n var b2 = 0x00;\n if(ev.value.length > 1) {\n // skip first byte, just indicates unused bits which\n // will be padded with 0s anyway\n // get bytes with flag bits\n b2 = ev.value.charCodeAt(1);\n }\n // set flags\n e.client = (b2 & 0x80) === 0x80;\n e.server = (b2 & 0x40) === 0x40;\n e.email = (b2 & 0x20) === 0x20;\n e.objsign = (b2 & 0x10) === 0x10;\n e.reserved = (b2 & 0x08) === 0x08;\n e.sslCA = (b2 & 0x04) === 0x04;\n e.emailCA = (b2 & 0x02) === 0x02;\n e.objCA = (b2 & 0x01) === 0x01;\n } else if(\n e.name === 'subjectAltName' ||\n e.name === 'issuerAltName') {\n // handle subjectAltName/issuerAltName\n e.altNames = [];\n\n // ev is a SYNTAX SEQUENCE\n var gn;\n var ev = asn1.fromDer(e.value);\n for(var n = 0; n < ev.value.length; ++n) {\n // get GeneralName\n gn = ev.value[n];\n\n var altName = {\n type: gn.type,\n value: gn.value\n };\n e.altNames.push(altName);\n\n // Note: Support for types 1,2,6,7,8\n switch(gn.type) {\n // rfc822Name\n case 1:\n // dNSName\n case 2:\n // uniformResourceIdentifier (URI)\n case 6:\n break;\n // IPAddress\n case 7:\n // convert to IPv4/IPv6 string representation\n altName.ip = forge.util.bytesToIP(gn.value);\n break;\n // registeredID\n case 8:\n altName.oid = asn1.derToOid(gn.value);\n break;\n default:\n // unsupported\n }\n }\n } else if(e.name === 'subjectKeyIdentifier') {\n // value is an OCTETSTRING w/the hash of the key-type specific\n // public key structure (eg: RSAPublicKey)\n var ev = asn1.fromDer(e.value);\n e.subjectKeyIdentifier = forge.util.bytesToHex(ev.value);\n }\n }\n return e;\n};\n\n/**\n * Converts a PKCS#10 certification request (CSR) from an ASN.1 object.\n *\n * Note: If the certification request is to be verified then compute hash\n * should be set to true. There is currently no implementation for converting\n * a certificate back to ASN.1 so the CertificationRequestInfo part of the\n * ASN.1 object needs to be scanned before the csr object is created.\n *\n * @param obj the asn1 representation of a PKCS#10 certification request (CSR).\n * @param computeHash true to compute the hash for verification.\n *\n * @return the certification request (CSR).\n */\npki.certificationRequestFromAsn1 = function(obj, computeHash) {\n // validate certification request and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, certificationRequestValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#10 certificate request. ' +\n 'ASN.1 object is not a PKCS#10 CertificationRequest.');\n error.errors = errors;\n throw error;\n }\n\n // get oid\n var oid = asn1.derToOid(capture.publicKeyOid);\n if(oid !== pki.oids.rsaEncryption) {\n throw new Error('Cannot read public key. OID is not RSA.');\n }\n\n // create certification request\n var csr = pki.createCertificationRequest();\n csr.version = capture.csrVersion ? capture.csrVersion.charCodeAt(0) : 0;\n csr.signatureOid = forge.asn1.derToOid(capture.csrSignatureOid);\n csr.signatureParameters = _readSignatureParameters(\n csr.signatureOid, capture.csrSignatureParams, true);\n csr.siginfo.algorithmOid = forge.asn1.derToOid(capture.csrSignatureOid);\n csr.siginfo.parameters = _readSignatureParameters(\n csr.siginfo.algorithmOid, capture.csrSignatureParams, false);\n csr.signature = capture.csrSignature;\n\n // keep CertificationRequestInfo to preserve signature when exporting\n csr.certificationRequestInfo = capture.certificationRequestInfo;\n\n if(computeHash) {\n // create digest for OID signature type\n csr.md = _createSignatureDigest({\n signatureOid: csr.signatureOid,\n type: 'certification request'\n });\n\n // produce DER formatted CertificationRequestInfo and digest it\n var bytes = asn1.toDer(csr.certificationRequestInfo);\n csr.md.update(bytes.getBytes());\n }\n\n // handle subject, build subject message digest\n var smd = forge.md.sha1.create();\n csr.subject.getField = function(sn) {\n return _getAttribute(csr.subject, sn);\n };\n csr.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n csr.subject.attributes.push(attr);\n };\n csr.subject.attributes = pki.RDNAttributesAsArray(\n capture.certificationRequestInfoSubject, smd);\n csr.subject.hash = smd.digest().toHex();\n\n // convert RSA public key from ASN.1\n csr.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo);\n\n // convert attributes from ASN.1\n csr.getAttribute = function(sn) {\n return _getAttribute(csr, sn);\n };\n csr.addAttribute = function(attr) {\n _fillMissingFields([attr]);\n csr.attributes.push(attr);\n };\n csr.attributes = pki.CRIAttributesAsArray(\n capture.certificationRequestInfoAttributes || []);\n\n return csr;\n};\n\n/**\n * Creates an empty certification request (a CSR or certificate signing\n * request). Once created, its public key and attributes can be set and then\n * it can be signed.\n *\n * @return the empty certification request.\n */\npki.createCertificationRequest = function() {\n var csr = {};\n csr.version = 0x00;\n csr.signatureOid = null;\n csr.signature = null;\n csr.siginfo = {};\n csr.siginfo.algorithmOid = null;\n\n csr.subject = {};\n csr.subject.getField = function(sn) {\n return _getAttribute(csr.subject, sn);\n };\n csr.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n csr.subject.attributes.push(attr);\n };\n csr.subject.attributes = [];\n csr.subject.hash = null;\n\n csr.publicKey = null;\n csr.attributes = [];\n csr.getAttribute = function(sn) {\n return _getAttribute(csr, sn);\n };\n csr.addAttribute = function(attr) {\n _fillMissingFields([attr]);\n csr.attributes.push(attr);\n };\n csr.md = null;\n\n /**\n * Sets the subject of this certification request.\n *\n * @param attrs the array of subject attributes to use.\n */\n csr.setSubject = function(attrs) {\n // set new attributes\n _fillMissingFields(attrs);\n csr.subject.attributes = attrs;\n csr.subject.hash = null;\n };\n\n /**\n * Sets the attributes of this certification request.\n *\n * @param attrs the array of attributes to use.\n */\n csr.setAttributes = function(attrs) {\n // set new attributes\n _fillMissingFields(attrs);\n csr.attributes = attrs;\n };\n\n /**\n * Signs this certification request using the given private key.\n *\n * @param key the private key to sign with.\n * @param md the message digest object to use (defaults to forge.md.sha1).\n */\n csr.sign = function(key, md) {\n // TODO: get signature OID from private key\n csr.md = md || forge.md.sha1.create();\n var algorithmOid = oids[csr.md.algorithm + 'WithRSAEncryption'];\n if(!algorithmOid) {\n var error = new Error('Could not compute certification request digest. ' +\n 'Unknown message digest algorithm OID.');\n error.algorithm = csr.md.algorithm;\n throw error;\n }\n csr.signatureOid = csr.siginfo.algorithmOid = algorithmOid;\n\n // get CertificationRequestInfo, convert to DER\n csr.certificationRequestInfo = pki.getCertificationRequestInfo(csr);\n var bytes = asn1.toDer(csr.certificationRequestInfo);\n\n // digest and sign\n csr.md.update(bytes.getBytes());\n csr.signature = key.sign(csr.md);\n };\n\n /**\n * Attempts verify the signature on the passed certification request using\n * its public key.\n *\n * A CSR that has been exported to a file in PEM format can be verified using\n * OpenSSL using this command:\n *\n * openssl req -in -verify -noout -text\n *\n * @return true if verified, false if not.\n */\n csr.verify = function() {\n var rval = false;\n\n var md = csr.md;\n if(md === null) {\n md = _createSignatureDigest({\n signatureOid: csr.signatureOid,\n type: 'certification request'\n });\n\n // produce DER formatted CertificationRequestInfo and digest it\n var cri = csr.certificationRequestInfo ||\n pki.getCertificationRequestInfo(csr);\n var bytes = asn1.toDer(cri);\n md.update(bytes.getBytes());\n }\n\n if(md !== null) {\n rval = _verifySignature({\n certificate: csr, md: md, signature: csr.signature\n });\n }\n\n return rval;\n };\n\n return csr;\n};\n\n/**\n * Converts an X.509 subject or issuer to an ASN.1 RDNSequence.\n *\n * @param obj the subject or issuer (distinguished name).\n *\n * @return the ASN.1 RDNSequence.\n */\nfunction _dnToAsn1(obj) {\n // create an empty RDNSequence\n var rval = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n // iterate over attributes\n var attr, set;\n var attrs = obj.attributes;\n for(var i = 0; i < attrs.length; ++i) {\n attr = attrs[i];\n var value = attr.value;\n\n // reuse tag class for attribute value if available\n var valueTagClass = asn1.Type.PRINTABLESTRING;\n if('valueTagClass' in attr) {\n valueTagClass = attr.valueTagClass;\n\n if(valueTagClass === asn1.Type.UTF8) {\n value = forge.util.encodeUtf8(value);\n }\n // FIXME: handle more encodings\n }\n\n // create a RelativeDistinguishedName set\n // each value in the set is an AttributeTypeAndValue first\n // containing the type (an OID) and second the value\n set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AttributeType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.type).getBytes()),\n // AttributeValue\n asn1.create(asn1.Class.UNIVERSAL, valueTagClass, false, value)\n ])\n ]);\n rval.value.push(set);\n }\n\n return rval;\n}\n\n/**\n * Gets all printable attributes (typically of an issuer or subject) in a\n * simplified JSON format for display.\n *\n * @param attrs the attributes.\n *\n * @return the JSON for display.\n */\nfunction _getAttributesAsJson(attrs) {\n var rval = {};\n for(var i = 0; i < attrs.length; ++i) {\n var attr = attrs[i];\n if(attr.shortName && (\n attr.valueTagClass === asn1.Type.UTF8 ||\n attr.valueTagClass === asn1.Type.PRINTABLESTRING ||\n attr.valueTagClass === asn1.Type.IA5STRING)) {\n var value = attr.value;\n if(attr.valueTagClass === asn1.Type.UTF8) {\n value = forge.util.encodeUtf8(attr.value);\n }\n if(!(attr.shortName in rval)) {\n rval[attr.shortName] = value;\n } else if(forge.util.isArray(rval[attr.shortName])) {\n rval[attr.shortName].push(value);\n } else {\n rval[attr.shortName] = [rval[attr.shortName], value];\n }\n }\n }\n return rval;\n}\n\n/**\n * Fills in missing fields in attributes.\n *\n * @param attrs the attributes to fill missing fields in.\n */\nfunction _fillMissingFields(attrs) {\n var attr;\n for(var i = 0; i < attrs.length; ++i) {\n attr = attrs[i];\n\n // populate missing name\n if(typeof attr.name === 'undefined') {\n if(attr.type && attr.type in pki.oids) {\n attr.name = pki.oids[attr.type];\n } else if(attr.shortName && attr.shortName in _shortNames) {\n attr.name = pki.oids[_shortNames[attr.shortName]];\n }\n }\n\n // populate missing type (OID)\n if(typeof attr.type === 'undefined') {\n if(attr.name && attr.name in pki.oids) {\n attr.type = pki.oids[attr.name];\n } else {\n var error = new Error('Attribute type not specified.');\n error.attribute = attr;\n throw error;\n }\n }\n\n // populate missing shortname\n if(typeof attr.shortName === 'undefined') {\n if(attr.name && attr.name in _shortNames) {\n attr.shortName = _shortNames[attr.name];\n }\n }\n\n // convert extensions to value\n if(attr.type === oids.extensionRequest) {\n attr.valueConstructed = true;\n attr.valueTagClass = asn1.Type.SEQUENCE;\n if(!attr.value && attr.extensions) {\n attr.value = [];\n for(var ei = 0; ei < attr.extensions.length; ++ei) {\n attr.value.push(pki.certificateExtensionToAsn1(\n _fillMissingExtensionFields(attr.extensions[ei])));\n }\n }\n }\n\n if(typeof attr.value === 'undefined') {\n var error = new Error('Attribute value not specified.');\n error.attribute = attr;\n throw error;\n }\n }\n}\n\n/**\n * Fills in missing fields in certificate extensions.\n *\n * @param e the extension.\n * @param [options] the options to use.\n * [cert] the certificate the extensions are for.\n *\n * @return the extension.\n */\nfunction _fillMissingExtensionFields(e, options) {\n options = options || {};\n\n // populate missing name\n if(typeof e.name === 'undefined') {\n if(e.id && e.id in pki.oids) {\n e.name = pki.oids[e.id];\n }\n }\n\n // populate missing id\n if(typeof e.id === 'undefined') {\n if(e.name && e.name in pki.oids) {\n e.id = pki.oids[e.name];\n } else {\n var error = new Error('Extension ID not specified.');\n error.extension = e;\n throw error;\n }\n }\n\n if(typeof e.value !== 'undefined') {\n return e;\n }\n\n // handle missing value:\n\n // value is a BIT STRING\n if(e.name === 'keyUsage') {\n // build flags\n var unused = 0;\n var b2 = 0x00;\n var b3 = 0x00;\n if(e.digitalSignature) {\n b2 |= 0x80;\n unused = 7;\n }\n if(e.nonRepudiation) {\n b2 |= 0x40;\n unused = 6;\n }\n if(e.keyEncipherment) {\n b2 |= 0x20;\n unused = 5;\n }\n if(e.dataEncipherment) {\n b2 |= 0x10;\n unused = 4;\n }\n if(e.keyAgreement) {\n b2 |= 0x08;\n unused = 3;\n }\n if(e.keyCertSign) {\n b2 |= 0x04;\n unused = 2;\n }\n if(e.cRLSign) {\n b2 |= 0x02;\n unused = 1;\n }\n if(e.encipherOnly) {\n b2 |= 0x01;\n unused = 0;\n }\n if(e.decipherOnly) {\n b3 |= 0x80;\n unused = 7;\n }\n\n // create bit string\n var value = String.fromCharCode(unused);\n if(b3 !== 0) {\n value += String.fromCharCode(b2) + String.fromCharCode(b3);\n } else if(b2 !== 0) {\n value += String.fromCharCode(b2);\n }\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value);\n } else if(e.name === 'basicConstraints') {\n // basicConstraints is a SEQUENCE\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n // cA BOOLEAN flag defaults to false\n if(e.cA) {\n e.value.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false,\n String.fromCharCode(0xFF)));\n }\n if('pathLenConstraint' in e) {\n e.value.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(e.pathLenConstraint).getBytes()));\n }\n } else if(e.name === 'extKeyUsage') {\n // extKeyUsage is a SEQUENCE of OIDs\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var seq = e.value.value;\n for(var key in e) {\n if(e[key] !== true) {\n continue;\n }\n // key is name in OID map\n if(key in oids) {\n seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID,\n false, asn1.oidToDer(oids[key]).getBytes()));\n } else if(key.indexOf('.') !== -1) {\n // assume key is an OID\n seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID,\n false, asn1.oidToDer(key).getBytes()));\n }\n }\n } else if(e.name === 'nsCertType') {\n // nsCertType is a BIT STRING\n // build flags\n var unused = 0;\n var b2 = 0x00;\n\n if(e.client) {\n b2 |= 0x80;\n unused = 7;\n }\n if(e.server) {\n b2 |= 0x40;\n unused = 6;\n }\n if(e.email) {\n b2 |= 0x20;\n unused = 5;\n }\n if(e.objsign) {\n b2 |= 0x10;\n unused = 4;\n }\n if(e.reserved) {\n b2 |= 0x08;\n unused = 3;\n }\n if(e.sslCA) {\n b2 |= 0x04;\n unused = 2;\n }\n if(e.emailCA) {\n b2 |= 0x02;\n unused = 1;\n }\n if(e.objCA) {\n b2 |= 0x01;\n unused = 0;\n }\n\n // create bit string\n var value = String.fromCharCode(unused);\n if(b2 !== 0) {\n value += String.fromCharCode(b2);\n }\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value);\n } else if(e.name === 'subjectAltName' || e.name === 'issuerAltName') {\n // SYNTAX SEQUENCE\n e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n var altName;\n for(var n = 0; n < e.altNames.length; ++n) {\n altName = e.altNames[n];\n var value = altName.value;\n // handle IP\n if(altName.type === 7 && altName.ip) {\n value = forge.util.bytesFromIP(altName.ip);\n if(value === null) {\n var error = new Error(\n 'Extension \"ip\" value is not a valid IPv4 or IPv6 address.');\n error.extension = e;\n throw error;\n }\n } else if(altName.type === 8) {\n // handle OID\n if(altName.oid) {\n value = asn1.oidToDer(asn1.oidToDer(altName.oid));\n } else {\n // deprecated ... convert value to OID\n value = asn1.oidToDer(value);\n }\n }\n e.value.value.push(asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, altName.type, false,\n value));\n }\n } else if(e.name === 'nsComment' && options.cert) {\n // sanity check value is ASCII (req'd) and not too big\n if(!(/^[\\x00-\\x7F]*$/.test(e.comment)) ||\n (e.comment.length < 1) || (e.comment.length > 128)) {\n throw new Error('Invalid \"nsComment\" content.');\n }\n // IA5STRING opaque comment\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.IA5STRING, false, e.comment);\n } else if(e.name === 'subjectKeyIdentifier' && options.cert) {\n var ski = options.cert.generateSubjectKeyIdentifier();\n e.subjectKeyIdentifier = ski.toHex();\n // OCTETSTRING w/digest\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, ski.getBytes());\n } else if(e.name === 'authorityKeyIdentifier' && options.cert) {\n // SYNTAX SEQUENCE\n e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var seq = e.value.value;\n\n if(e.keyIdentifier) {\n var keyIdentifier = (e.keyIdentifier === true ?\n options.cert.generateSubjectKeyIdentifier().getBytes() :\n e.keyIdentifier);\n seq.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, false, keyIdentifier));\n }\n\n if(e.authorityCertIssuer) {\n var authorityCertIssuer = [\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 4, true, [\n _dnToAsn1(e.authorityCertIssuer === true ?\n options.cert.issuer : e.authorityCertIssuer)\n ])\n ];\n seq.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, authorityCertIssuer));\n }\n\n if(e.serialNumber) {\n var serialNumber = forge.util.hexToBytes(e.serialNumber === true ?\n options.cert.serialNumber : e.serialNumber);\n seq.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, false, serialNumber));\n }\n } else if(e.name === 'cRLDistributionPoints') {\n e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var seq = e.value.value;\n\n // Create sub SEQUENCE of DistributionPointName\n var subSeq = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n // Create fullName CHOICE\n var fullNameGeneralNames = asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, 0, true, []);\n var altName;\n for(var n = 0; n < e.altNames.length; ++n) {\n altName = e.altNames[n];\n var value = altName.value;\n // handle IP\n if(altName.type === 7 && altName.ip) {\n value = forge.util.bytesFromIP(altName.ip);\n if(value === null) {\n var error = new Error(\n 'Extension \"ip\" value is not a valid IPv4 or IPv6 address.');\n error.extension = e;\n throw error;\n }\n } else if(altName.type === 8) {\n // handle OID\n if(altName.oid) {\n value = asn1.oidToDer(asn1.oidToDer(altName.oid));\n } else {\n // deprecated ... convert value to OID\n value = asn1.oidToDer(value);\n }\n }\n fullNameGeneralNames.value.push(asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, altName.type, false,\n value));\n }\n\n // Add to the parent SEQUENCE\n subSeq.value.push(asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, 0, true, [fullNameGeneralNames]));\n seq.push(subSeq);\n }\n\n // ensure value has been defined by now\n if(typeof e.value === 'undefined') {\n var error = new Error('Extension value not specified.');\n error.extension = e;\n throw error;\n }\n\n return e;\n}\n\n/**\n * Convert signature parameters object to ASN.1\n *\n * @param {String} oid Signature algorithm OID\n * @param params The signature parametrs object\n * @return ASN.1 object representing signature parameters\n */\nfunction _signatureParametersToAsn1(oid, params) {\n switch(oid) {\n case oids['RSASSA-PSS']:\n var parts = [];\n\n if(params.hash.algorithmOid !== undefined) {\n parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(params.hash.algorithmOid).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ])\n ]));\n }\n\n if(params.mgf.algorithmOid !== undefined) {\n parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(params.mgf.algorithmOid).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(params.mgf.hash.algorithmOid).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ])\n ])\n ]));\n }\n\n if(params.saltLength !== undefined) {\n parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(params.saltLength).getBytes())\n ]));\n }\n\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, parts);\n\n default:\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '');\n }\n}\n\n/**\n * Converts a certification request's attributes to an ASN.1 set of\n * CRIAttributes.\n *\n * @param csr certification request.\n *\n * @return the ASN.1 set of CRIAttributes.\n */\nfunction _CRIAttributesToAsn1(csr) {\n // create an empty context-specific container\n var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, []);\n\n // no attributes, return empty container\n if(csr.attributes.length === 0) {\n return rval;\n }\n\n // each attribute has a sequence with a type and a set of values\n var attrs = csr.attributes;\n for(var i = 0; i < attrs.length; ++i) {\n var attr = attrs[i];\n var value = attr.value;\n\n // reuse tag class for attribute value if available\n var valueTagClass = asn1.Type.UTF8;\n if('valueTagClass' in attr) {\n valueTagClass = attr.valueTagClass;\n }\n if(valueTagClass === asn1.Type.UTF8) {\n value = forge.util.encodeUtf8(value);\n }\n var valueConstructed = false;\n if('valueConstructed' in attr) {\n valueConstructed = attr.valueConstructed;\n }\n // FIXME: handle more encodings\n\n // create a RelativeDistinguishedName set\n // each value in the set is an AttributeTypeAndValue first\n // containing the type (an OID) and second the value\n var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AttributeType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.type).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n // AttributeValue\n asn1.create(\n asn1.Class.UNIVERSAL, valueTagClass, valueConstructed, value)\n ])\n ]);\n rval.value.push(seq);\n }\n\n return rval;\n}\n\nvar jan_1_1950 = new Date('1950-01-01T00:00:00Z');\nvar jan_1_2050 = new Date('2050-01-01T00:00:00Z');\n\n/**\n * Converts a Date object to ASN.1\n * Handles the different format before and after 1st January 2050\n *\n * @param date date object.\n *\n * @return the ASN.1 object representing the date.\n */\nfunction _dateToAsn1(date) {\n if(date >= jan_1_1950 && date < jan_1_2050) {\n return asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false,\n asn1.dateToUtcTime(date));\n } else {\n return asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false,\n asn1.dateToGeneralizedTime(date));\n }\n}\n\n/**\n * Gets the ASN.1 TBSCertificate part of an X.509v3 certificate.\n *\n * @param cert the certificate.\n *\n * @return the asn1 TBSCertificate.\n */\npki.getTBSCertificate = function(cert) {\n // TBSCertificate\n var notBefore = _dateToAsn1(cert.validity.notBefore);\n var notAfter = _dateToAsn1(cert.validity.notAfter);\n var tbs = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n // integer\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(cert.version).getBytes())\n ]),\n // serialNumber\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(cert.serialNumber)),\n // signature\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(cert.siginfo.algorithmOid).getBytes()),\n // parameters\n _signatureParametersToAsn1(\n cert.siginfo.algorithmOid, cert.siginfo.parameters)\n ]),\n // issuer\n _dnToAsn1(cert.issuer),\n // validity\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n notBefore,\n notAfter\n ]),\n // subject\n _dnToAsn1(cert.subject),\n // SubjectPublicKeyInfo\n pki.publicKeyToAsn1(cert.publicKey)\n ]);\n\n if(cert.issuer.uniqueId) {\n // issuerUniqueID (optional)\n tbs.value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n // TODO: support arbitrary bit length ids\n String.fromCharCode(0x00) +\n cert.issuer.uniqueId\n )\n ])\n );\n }\n if(cert.subject.uniqueId) {\n // subjectUniqueID (optional)\n tbs.value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n // TODO: support arbitrary bit length ids\n String.fromCharCode(0x00) +\n cert.subject.uniqueId\n )\n ])\n );\n }\n\n if(cert.extensions.length > 0) {\n // extensions (optional)\n tbs.value.push(pki.certificateExtensionsToAsn1(cert.extensions));\n }\n\n return tbs;\n};\n\n/**\n * Gets the ASN.1 CertificationRequestInfo part of a\n * PKCS#10 CertificationRequest.\n *\n * @param csr the certification request.\n *\n * @return the asn1 CertificationRequestInfo.\n */\npki.getCertificationRequestInfo = function(csr) {\n // CertificationRequestInfo\n var cri = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(csr.version).getBytes()),\n // subject\n _dnToAsn1(csr.subject),\n // SubjectPublicKeyInfo\n pki.publicKeyToAsn1(csr.publicKey),\n // attributes\n _CRIAttributesToAsn1(csr)\n ]);\n\n return cri;\n};\n\n/**\n * Converts a DistinguishedName (subject or issuer) to an ASN.1 object.\n *\n * @param dn the DistinguishedName.\n *\n * @return the asn1 representation of a DistinguishedName.\n */\npki.distinguishedNameToAsn1 = function(dn) {\n return _dnToAsn1(dn);\n};\n\n/**\n * Converts an X.509v3 RSA certificate to an ASN.1 object.\n *\n * @param cert the certificate.\n *\n * @return the asn1 representation of an X.509v3 RSA certificate.\n */\npki.certificateToAsn1 = function(cert) {\n // prefer cached TBSCertificate over generating one\n var tbsCertificate = cert.tbsCertificate || pki.getTBSCertificate(cert);\n\n // Certificate\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // TBSCertificate\n tbsCertificate,\n // AlgorithmIdentifier (signature algorithm)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(cert.signatureOid).getBytes()),\n // parameters\n _signatureParametersToAsn1(cert.signatureOid, cert.signatureParameters)\n ]),\n // SignatureValue\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n String.fromCharCode(0x00) + cert.signature)\n ]);\n};\n\n/**\n * Converts X.509v3 certificate extensions to ASN.1.\n *\n * @param exts the extensions to convert.\n *\n * @return the extensions in ASN.1 format.\n */\npki.certificateExtensionsToAsn1 = function(exts) {\n // create top-level extension container\n var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 3, true, []);\n\n // create extension sequence (stores a sequence for each extension)\n var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n rval.value.push(seq);\n\n for(var i = 0; i < exts.length; ++i) {\n seq.value.push(pki.certificateExtensionToAsn1(exts[i]));\n }\n\n return rval;\n};\n\n/**\n * Converts a single certificate extension to ASN.1.\n *\n * @param ext the extension to convert.\n *\n * @return the extension in ASN.1 format.\n */\npki.certificateExtensionToAsn1 = function(ext) {\n // create a sequence for each extension\n var extseq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n // extnID (OID)\n extseq.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(ext.id).getBytes()));\n\n // critical defaults to false\n if(ext.critical) {\n // critical BOOLEAN DEFAULT FALSE\n extseq.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false,\n String.fromCharCode(0xFF)));\n }\n\n var value = ext.value;\n if(typeof ext.value !== 'string') {\n // value is asn.1\n value = asn1.toDer(value).getBytes();\n }\n\n // extnValue (OCTET STRING)\n extseq.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, value));\n\n return extseq;\n};\n\n/**\n * Converts a PKCS#10 certification request to an ASN.1 object.\n *\n * @param csr the certification request.\n *\n * @return the asn1 representation of a certification request.\n */\npki.certificationRequestToAsn1 = function(csr) {\n // prefer cached CertificationRequestInfo over generating one\n var cri = csr.certificationRequestInfo ||\n pki.getCertificationRequestInfo(csr);\n\n // Certificate\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // CertificationRequestInfo\n cri,\n // AlgorithmIdentifier (signature algorithm)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(csr.signatureOid).getBytes()),\n // parameters\n _signatureParametersToAsn1(csr.signatureOid, csr.signatureParameters)\n ]),\n // signature\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n String.fromCharCode(0x00) + csr.signature)\n ]);\n};\n\n/**\n * Creates a CA store.\n *\n * @param certs an optional array of certificate objects or PEM-formatted\n * certificate strings to add to the CA store.\n *\n * @return the CA store.\n */\npki.createCaStore = function(certs) {\n // create CA store\n var caStore = {\n // stored certificates\n certs: {}\n };\n\n /**\n * Gets the certificate that issued the passed certificate or its\n * 'parent'.\n *\n * @param cert the certificate to get the parent for.\n *\n * @return the parent certificate or null if none was found.\n */\n caStore.getIssuer = function(cert) {\n var rval = getBySubject(cert.issuer);\n\n // see if there are multiple matches\n /*if(forge.util.isArray(rval)) {\n // TODO: resolve multiple matches by checking\n // authorityKey/subjectKey/issuerUniqueID/other identifiers, etc.\n // FIXME: or alternatively do authority key mapping\n // if possible (X.509v1 certs can't work?)\n throw new Error('Resolving multiple issuer matches not implemented yet.');\n }*/\n\n return rval;\n };\n\n /**\n * Adds a trusted certificate to the store.\n *\n * @param cert the certificate to add as a trusted certificate (either a\n * pki.certificate object or a PEM-formatted certificate).\n */\n caStore.addCertificate = function(cert) {\n // convert from pem if necessary\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n\n ensureSubjectHasHash(cert.subject);\n\n if(!caStore.hasCertificate(cert)) { // avoid duplicate certificates in store\n if(cert.subject.hash in caStore.certs) {\n // subject hash already exists, append to array\n var tmp = caStore.certs[cert.subject.hash];\n if(!forge.util.isArray(tmp)) {\n tmp = [tmp];\n }\n tmp.push(cert);\n caStore.certs[cert.subject.hash] = tmp;\n } else {\n caStore.certs[cert.subject.hash] = cert;\n }\n }\n };\n\n /**\n * Checks to see if the given certificate is in the store.\n *\n * @param cert the certificate to check (either a pki.certificate or a\n * PEM-formatted certificate).\n *\n * @return true if the certificate is in the store, false if not.\n */\n caStore.hasCertificate = function(cert) {\n // convert from pem if necessary\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n\n var match = getBySubject(cert.subject);\n if(!match) {\n return false;\n }\n if(!forge.util.isArray(match)) {\n match = [match];\n }\n // compare DER-encoding of certificates\n var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes();\n for(var i = 0; i < match.length; ++i) {\n var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes();\n if(der1 === der2) {\n return true;\n }\n }\n return false;\n };\n\n /**\n * Lists all of the certificates kept in the store.\n *\n * @return an array of all of the pki.certificate objects in the store.\n */\n caStore.listAllCertificates = function() {\n var certList = [];\n\n for(var hash in caStore.certs) {\n if(caStore.certs.hasOwnProperty(hash)) {\n var value = caStore.certs[hash];\n if(!forge.util.isArray(value)) {\n certList.push(value);\n } else {\n for(var i = 0; i < value.length; ++i) {\n certList.push(value[i]);\n }\n }\n }\n }\n\n return certList;\n };\n\n /**\n * Removes a certificate from the store.\n *\n * @param cert the certificate to remove (either a pki.certificate or a\n * PEM-formatted certificate).\n *\n * @return the certificate that was removed or null if the certificate\n * wasn't in store.\n */\n caStore.removeCertificate = function(cert) {\n var result;\n\n // convert from pem if necessary\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n ensureSubjectHasHash(cert.subject);\n if(!caStore.hasCertificate(cert)) {\n return null;\n }\n\n var match = getBySubject(cert.subject);\n\n if(!forge.util.isArray(match)) {\n result = caStore.certs[cert.subject.hash];\n delete caStore.certs[cert.subject.hash];\n return result;\n }\n\n // compare DER-encoding of certificates\n var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes();\n for(var i = 0; i < match.length; ++i) {\n var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes();\n if(der1 === der2) {\n result = match[i];\n match.splice(i, 1);\n }\n }\n if(match.length === 0) {\n delete caStore.certs[cert.subject.hash];\n }\n\n return result;\n };\n\n function getBySubject(subject) {\n ensureSubjectHasHash(subject);\n return caStore.certs[subject.hash] || null;\n }\n\n function ensureSubjectHasHash(subject) {\n // produce subject hash if it doesn't exist\n if(!subject.hash) {\n var md = forge.md.sha1.create();\n subject.attributes = pki.RDNAttributesAsArray(_dnToAsn1(subject), md);\n subject.hash = md.digest().toHex();\n }\n }\n\n // auto-add passed in certs\n if(certs) {\n // parse PEM-formatted certificates as necessary\n for(var i = 0; i < certs.length; ++i) {\n var cert = certs[i];\n caStore.addCertificate(cert);\n }\n }\n\n return caStore;\n};\n\n/**\n * Certificate verification errors, based on TLS.\n */\npki.certificateError = {\n bad_certificate: 'forge.pki.BadCertificate',\n unsupported_certificate: 'forge.pki.UnsupportedCertificate',\n certificate_revoked: 'forge.pki.CertificateRevoked',\n certificate_expired: 'forge.pki.CertificateExpired',\n certificate_unknown: 'forge.pki.CertificateUnknown',\n unknown_ca: 'forge.pki.UnknownCertificateAuthority'\n};\n\n/**\n * Verifies a certificate chain against the given Certificate Authority store\n * with an optional custom verify callback.\n *\n * @param caStore a certificate store to verify against.\n * @param chain the certificate chain to verify, with the root or highest\n * authority at the end (an array of certificates).\n * @param options a callback to be called for every certificate in the chain or\n * an object with:\n * verify a callback to be called for every certificate in the\n * chain\n * validityCheckDate the date against which the certificate\n * validity period should be checked. Pass null to not check\n * the validity period. By default, the current date is used.\n *\n * The verify callback has the following signature:\n *\n * verified - Set to true if certificate was verified, otherwise the\n * pki.certificateError for why the certificate failed.\n * depth - The current index in the chain, where 0 is the end point's cert.\n * certs - The certificate chain, *NOTE* an empty chain indicates an anonymous\n * end point.\n *\n * The function returns true on success and on failure either the appropriate\n * pki.certificateError or an object with 'error' set to the appropriate\n * pki.certificateError and 'message' set to a custom error message.\n *\n * @return true if successful, error thrown if not.\n */\npki.verifyCertificateChain = function(caStore, chain, options) {\n /* From: RFC3280 - Internet X.509 Public Key Infrastructure Certificate\n Section 6: Certification Path Validation\n See inline parentheticals related to this particular implementation.\n\n The primary goal of path validation is to verify the binding between\n a subject distinguished name or a subject alternative name and subject\n public key, as represented in the end entity certificate, based on the\n public key of the trust anchor. This requires obtaining a sequence of\n certificates that support that binding. That sequence should be provided\n in the passed 'chain'. The trust anchor should be in the given CA\n store. The 'end entity' certificate is the certificate provided by the\n end point (typically a server) and is the first in the chain.\n\n To meet this goal, the path validation process verifies, among other\n things, that a prospective certification path (a sequence of n\n certificates or a 'chain') satisfies the following conditions:\n\n (a) for all x in {1, ..., n-1}, the subject of certificate x is\n the issuer of certificate x+1;\n\n (b) certificate 1 is issued by the trust anchor;\n\n (c) certificate n is the certificate to be validated; and\n\n (d) for all x in {1, ..., n}, the certificate was valid at the\n time in question.\n\n Note that here 'n' is index 0 in the chain and 1 is the last certificate\n in the chain and it must be signed by a certificate in the connection's\n CA store.\n\n The path validation process also determines the set of certificate\n policies that are valid for this path, based on the certificate policies\n extension, policy mapping extension, policy constraints extension, and\n inhibit any-policy extension.\n\n Note: Policy mapping extension not supported (Not Required).\n\n Note: If the certificate has an unsupported critical extension, then it\n must be rejected.\n\n Note: A certificate is self-issued if the DNs that appear in the subject\n and issuer fields are identical and are not empty.\n\n The path validation algorithm assumes the following seven inputs are\n provided to the path processing logic. What this specific implementation\n will use is provided parenthetically:\n\n (a) a prospective certification path of length n (the 'chain')\n (b) the current date/time: ('now').\n (c) user-initial-policy-set: A set of certificate policy identifiers\n naming the policies that are acceptable to the certificate user.\n The user-initial-policy-set contains the special value any-policy\n if the user is not concerned about certificate policy\n (Not implemented. Any policy is accepted).\n (d) trust anchor information, describing a CA that serves as a trust\n anchor for the certification path. The trust anchor information\n includes:\n\n (1) the trusted issuer name,\n (2) the trusted public key algorithm,\n (3) the trusted public key, and\n (4) optionally, the trusted public key parameters associated\n with the public key.\n\n (Trust anchors are provided via certificates in the CA store).\n\n The trust anchor information may be provided to the path processing\n procedure in the form of a self-signed certificate. The trusted anchor\n information is trusted because it was delivered to the path processing\n procedure by some trustworthy out-of-band procedure. If the trusted\n public key algorithm requires parameters, then the parameters are\n provided along with the trusted public key (No parameters used in this\n implementation).\n\n (e) initial-policy-mapping-inhibit, which indicates if policy mapping is\n allowed in the certification path.\n (Not implemented, no policy checking)\n\n (f) initial-explicit-policy, which indicates if the path must be valid\n for at least one of the certificate policies in the user-initial-\n policy-set.\n (Not implemented, no policy checking)\n\n (g) initial-any-policy-inhibit, which indicates whether the\n anyPolicy OID should be processed if it is included in a\n certificate.\n (Not implemented, so any policy is valid provided that it is\n not marked as critical) */\n\n /* Basic Path Processing:\n\n For each certificate in the 'chain', the following is checked:\n\n 1. The certificate validity period includes the current time.\n 2. The certificate was signed by its parent (where the parent is either\n the next in the chain or from the CA store). Allow processing to\n continue to the next step if no parent is found but the certificate is\n in the CA store.\n 3. TODO: The certificate has not been revoked.\n 4. The certificate issuer name matches the parent's subject name.\n 5. TODO: If the certificate is self-issued and not the final certificate\n in the chain, skip this step, otherwise verify that the subject name\n is within one of the permitted subtrees of X.500 distinguished names\n and that each of the alternative names in the subjectAltName extension\n (critical or non-critical) is within one of the permitted subtrees for\n that name type.\n 6. TODO: If the certificate is self-issued and not the final certificate\n in the chain, skip this step, otherwise verify that the subject name\n is not within one of the excluded subtrees for X.500 distinguished\n names and none of the subjectAltName extension names are excluded for\n that name type.\n 7. The other steps in the algorithm for basic path processing involve\n handling the policy extension which is not presently supported in this\n implementation. Instead, if a critical policy extension is found, the\n certificate is rejected as not supported.\n 8. If the certificate is not the first or if its the only certificate in\n the chain (having no parent from the CA store or is self-signed) and it\n has a critical key usage extension, verify that the keyCertSign bit is\n set. If the key usage extension exists, verify that the basic\n constraints extension exists. If the basic constraints extension exists,\n verify that the cA flag is set. If pathLenConstraint is set, ensure that\n the number of certificates that precede in the chain (come earlier\n in the chain as implemented below), excluding the very first in the\n chain (typically the end-entity one), isn't greater than the\n pathLenConstraint. This constraint limits the number of intermediate\n CAs that may appear below a CA before only end-entity certificates\n may be issued. */\n\n // if a verify callback is passed as the third parameter, package it within\n // the options object. This is to support a legacy function signature that\n // expected the verify callback as the third parameter.\n if(typeof options === 'function') {\n options = {verify: options};\n }\n options = options || {};\n\n // copy cert chain references to another array to protect against changes\n // in verify callback\n chain = chain.slice(0);\n var certs = chain.slice(0);\n\n var validityCheckDate = options.validityCheckDate;\n // if no validityCheckDate is specified, default to the current date. Make\n // sure to maintain the value null because it indicates that the validity\n // period should not be checked.\n if(typeof validityCheckDate === 'undefined') {\n validityCheckDate = new Date();\n }\n\n // verify each cert in the chain using its parent, where the parent\n // is either the next in the chain or from the CA store\n var first = true;\n var error = null;\n var depth = 0;\n do {\n var cert = chain.shift();\n var parent = null;\n var selfSigned = false;\n\n if(validityCheckDate) {\n // 1. check valid time\n if(validityCheckDate < cert.validity.notBefore ||\n validityCheckDate > cert.validity.notAfter) {\n error = {\n message: 'Certificate is not valid yet or has expired.',\n error: pki.certificateError.certificate_expired,\n notBefore: cert.validity.notBefore,\n notAfter: cert.validity.notAfter,\n // TODO: we might want to reconsider renaming 'now' to\n // 'validityCheckDate' should this API be changed in the future.\n now: validityCheckDate\n };\n }\n }\n\n // 2. verify with parent from chain or CA store\n if(error === null) {\n parent = chain[0] || caStore.getIssuer(cert);\n if(parent === null) {\n // check for self-signed cert\n if(cert.isIssuer(cert)) {\n selfSigned = true;\n parent = cert;\n }\n }\n\n if(parent) {\n // FIXME: current CA store implementation might have multiple\n // certificates where the issuer can't be determined from the\n // certificate (happens rarely with, eg: old certificates) so normalize\n // by always putting parents into an array\n // TODO: there's may be an extreme degenerate case currently uncovered\n // where an old intermediate certificate seems to have a matching parent\n // but none of the parents actually verify ... but the intermediate\n // is in the CA and it should pass this check; needs investigation\n var parents = parent;\n if(!forge.util.isArray(parents)) {\n parents = [parents];\n }\n\n // try to verify with each possible parent (typically only one)\n var verified = false;\n while(!verified && parents.length > 0) {\n parent = parents.shift();\n try {\n verified = parent.verify(cert);\n } catch(ex) {\n // failure to verify, don't care why, try next one\n }\n }\n\n if(!verified) {\n error = {\n message: 'Certificate signature is invalid.',\n error: pki.certificateError.bad_certificate\n };\n }\n }\n\n if(error === null && (!parent || selfSigned) &&\n !caStore.hasCertificate(cert)) {\n // no parent issuer and certificate itself is not trusted\n error = {\n message: 'Certificate is not trusted.',\n error: pki.certificateError.unknown_ca\n };\n }\n }\n\n // TODO: 3. check revoked\n\n // 4. check for matching issuer/subject\n if(error === null && parent && !cert.isIssuer(parent)) {\n // parent is not issuer\n error = {\n message: 'Certificate issuer is invalid.',\n error: pki.certificateError.bad_certificate\n };\n }\n\n // 5. TODO: check names with permitted names tree\n\n // 6. TODO: check names against excluded names tree\n\n // 7. check for unsupported critical extensions\n if(error === null) {\n // supported extensions\n var se = {\n keyUsage: true,\n basicConstraints: true\n };\n for(var i = 0; error === null && i < cert.extensions.length; ++i) {\n var ext = cert.extensions[i];\n if(ext.critical && !(ext.name in se)) {\n error = {\n message:\n 'Certificate has an unsupported critical extension.',\n error: pki.certificateError.unsupported_certificate\n };\n }\n }\n }\n\n // 8. check for CA if cert is not first or is the only certificate\n // remaining in chain with no parent or is self-signed\n if(error === null &&\n (!first || (chain.length === 0 && (!parent || selfSigned)))) {\n // first check keyUsage extension and then basic constraints\n var bcExt = cert.getExtension('basicConstraints');\n var keyUsageExt = cert.getExtension('keyUsage');\n if(keyUsageExt !== null) {\n // keyCertSign must be true and there must be a basic\n // constraints extension\n if(!keyUsageExt.keyCertSign || bcExt === null) {\n // bad certificate\n error = {\n message:\n 'Certificate keyUsage or basicConstraints conflict ' +\n 'or indicate that the certificate is not a CA. ' +\n 'If the certificate is the only one in the chain or ' +\n 'isn\\'t the first then the certificate must be a ' +\n 'valid CA.',\n error: pki.certificateError.bad_certificate\n };\n }\n }\n // basic constraints cA flag must be set\n if(error === null && bcExt !== null && !bcExt.cA) {\n // bad certificate\n error = {\n message:\n 'Certificate basicConstraints indicates the certificate ' +\n 'is not a CA.',\n error: pki.certificateError.bad_certificate\n };\n }\n // if error is not null and keyUsage is available, then we know it\n // has keyCertSign and there is a basic constraints extension too,\n // which means we can check pathLenConstraint (if it exists)\n if(error === null && keyUsageExt !== null &&\n 'pathLenConstraint' in bcExt) {\n // pathLen is the maximum # of intermediate CA certs that can be\n // found between the current certificate and the end-entity (depth 0)\n // certificate; this number does not include the end-entity (depth 0,\n // last in the chain) even if it happens to be a CA certificate itself\n var pathLen = depth - 1;\n if(pathLen > bcExt.pathLenConstraint) {\n // pathLenConstraint violated, bad certificate\n error = {\n message:\n 'Certificate basicConstraints pathLenConstraint violated.',\n error: pki.certificateError.bad_certificate\n };\n }\n }\n }\n\n // call application callback\n var vfd = (error === null) ? true : error.error;\n var ret = options.verify ? options.verify(vfd, depth, certs) : vfd;\n if(ret === true) {\n // clear any set error\n error = null;\n } else {\n // if passed basic tests, set default message and alert\n if(vfd === true) {\n error = {\n message: 'The application rejected the certificate.',\n error: pki.certificateError.bad_certificate\n };\n }\n\n // check for custom error info\n if(ret || ret === 0) {\n // set custom message and error\n if(typeof ret === 'object' && !forge.util.isArray(ret)) {\n if(ret.message) {\n error.message = ret.message;\n }\n if(ret.error) {\n error.error = ret.error;\n }\n } else if(typeof ret === 'string') {\n // set custom error\n error.error = ret;\n }\n }\n\n // throw error\n throw error;\n }\n\n // no longer first cert in chain\n first = false;\n ++depth;\n } while(chain.length > 0);\n\n return true;\n};\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","'use strict';\nconst Queue = require('yocto-queue');\n\nconst pLimit = concurrency => {\n\tif (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) {\n\t\tthrow new TypeError('Expected `concurrency` to be a number from 1 and up');\n\t}\n\n\tconst queue = new Queue();\n\tlet activeCount = 0;\n\n\tconst next = () => {\n\t\tactiveCount--;\n\n\t\tif (queue.size > 0) {\n\t\t\tqueue.dequeue()();\n\t\t}\n\t};\n\n\tconst run = async (fn, resolve, ...args) => {\n\t\tactiveCount++;\n\n\t\tconst result = (async () => fn(...args))();\n\n\t\tresolve(result);\n\n\t\ttry {\n\t\t\tawait result;\n\t\t} catch {}\n\n\t\tnext();\n\t};\n\n\tconst enqueue = (fn, resolve, ...args) => {\n\t\tqueue.enqueue(run.bind(null, fn, resolve, ...args));\n\n\t\t(async () => {\n\t\t\t// This function needs to wait until the next microtask before comparing\n\t\t\t// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously\n\t\t\t// when the run function is dequeued and called. The comparison in the if-statement\n\t\t\t// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.\n\t\t\tawait Promise.resolve();\n\n\t\t\tif (activeCount < concurrency && queue.size > 0) {\n\t\t\t\tqueue.dequeue()();\n\t\t\t}\n\t\t})();\n\t};\n\n\tconst generator = (fn, ...args) => new Promise(resolve => {\n\t\tenqueue(fn, resolve, ...args);\n\t});\n\n\tObject.defineProperties(generator, {\n\t\tactiveCount: {\n\t\t\tget: () => activeCount\n\t\t},\n\t\tpendingCount: {\n\t\t\tget: () => queue.size\n\t\t},\n\t\tclearQueue: {\n\t\t\tvalue: () => {\n\t\t\t\tqueue.clear();\n\t\t\t}\n\t\t}\n\t});\n\n\treturn generator;\n};\n\nmodule.exports = pLimit;\n","'use strict';\n\nconst codes = {};\n\nfunction createErrorType(code, message, Base) {\n if (!Base) {\n Base = Error\n }\n\n function getMessage (arg1, arg2, arg3) {\n if (typeof message === 'string') {\n return message\n } else {\n return message(arg1, arg2, arg3)\n }\n }\n\n class NodeError extends Base {\n constructor (arg1, arg2, arg3) {\n super(getMessage(arg1, arg2, arg3));\n }\n }\n\n NodeError.prototype.name = Base.name;\n NodeError.prototype.code = code;\n\n codes[code] = NodeError;\n}\n\n// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js\nfunction oneOf(expected, thing) {\n if (Array.isArray(expected)) {\n const len = expected.length;\n expected = expected.map((i) => String(i));\n if (len > 2) {\n return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +\n expected[len - 1];\n } else if (len === 2) {\n return `one of ${thing} ${expected[0]} or ${expected[1]}`;\n } else {\n return `of ${thing} ${expected[0]}`;\n }\n } else {\n return `of ${thing} ${String(expected)}`;\n }\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith\nfunction startsWith(str, search, pos) {\n\treturn str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith\nfunction endsWith(str, search, this_len) {\n\tif (this_len === undefined || this_len > str.length) {\n\t\tthis_len = str.length;\n\t}\n\treturn str.substring(this_len - search.length, this_len) === search;\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes\nfunction includes(str, search, start) {\n if (typeof start !== 'number') {\n start = 0;\n }\n\n if (start + search.length > str.length) {\n return false;\n } else {\n return str.indexOf(search, start) !== -1;\n }\n}\n\ncreateErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {\n return 'The value \"' + value + '\" is invalid for option \"' + name + '\"'\n}, TypeError);\ncreateErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {\n // determiner: 'must be' or 'must not be'\n let determiner;\n if (typeof expected === 'string' && startsWith(expected, 'not ')) {\n determiner = 'must not be';\n expected = expected.replace(/^not /, '');\n } else {\n determiner = 'must be';\n }\n\n let msg;\n if (endsWith(name, ' argument')) {\n // For cases like 'first argument'\n msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;\n } else {\n const type = includes(name, '.') ? 'property' : 'argument';\n msg = `The \"${name}\" ${type} ${determiner} ${oneOf(expected, 'type')}`;\n }\n\n msg += `. Received type ${typeof actual}`;\n return msg;\n}, TypeError);\ncreateErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');\ncreateErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {\n return 'The ' + name + ' method is not implemented'\n});\ncreateErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');\ncreateErrorType('ERR_STREAM_DESTROYED', function (name) {\n return 'Cannot call ' + name + ' after a stream was destroyed';\n});\ncreateErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');\ncreateErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');\ncreateErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');\ncreateErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);\ncreateErrorType('ERR_UNKNOWN_ENCODING', function (arg) {\n return 'Unknown encoding: ' + arg\n}, TypeError);\ncreateErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');\n\nmodule.exports.codes = codes;\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a duplex stream is just a stream that is both readable and writable.\n// Since JS doesn't have multiple prototypal inheritance, this class\n// prototypally inherits from Readable, and then parasitically from\n// Writable.\n\n'use strict';\n\n/**/\nvar objectKeys = Object.keys || function (obj) {\n var keys = [];\n for (var key in obj) keys.push(key);\n return keys;\n};\n/**/\n\nmodule.exports = Duplex;\nconst Readable = require('./_stream_readable');\nconst Writable = require('./_stream_writable');\nrequire('inherits')(Duplex, Readable);\n{\n // Allow the keys array to be GC'ed.\n const keys = objectKeys(Writable.prototype);\n for (var v = 0; v < keys.length; v++) {\n const method = keys[v];\n if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];\n }\n}\nfunction Duplex(options) {\n if (!(this instanceof Duplex)) return new Duplex(options);\n Readable.call(this, options);\n Writable.call(this, options);\n this.allowHalfOpen = true;\n if (options) {\n if (options.readable === false) this.readable = false;\n if (options.writable === false) this.writable = false;\n if (options.allowHalfOpen === false) {\n this.allowHalfOpen = false;\n this.once('end', onend);\n }\n }\n}\nObject.defineProperty(Duplex.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.highWaterMark;\n }\n});\nObject.defineProperty(Duplex.prototype, 'writableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState && this._writableState.getBuffer();\n }\n});\nObject.defineProperty(Duplex.prototype, 'writableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.length;\n }\n});\n\n// the no-half-open enforcer\nfunction onend() {\n // If the writable side ended, then we're ok.\n if (this._writableState.ended) return;\n\n // no more data can be written.\n // But allow more writes to happen in this tick.\n process.nextTick(onEndNT, this);\n}\nfunction onEndNT(self) {\n self.end();\n}\nObject.defineProperty(Duplex.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._readableState === undefined || this._writableState === undefined) {\n return false;\n }\n return this._readableState.destroyed && this._writableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (this._readableState === undefined || this._writableState === undefined) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n this._writableState.destroyed = value;\n }\n});","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a passthrough stream.\n// basically just the most minimal sort of Transform stream.\n// Every written chunk gets output as-is.\n\n'use strict';\n\nmodule.exports = PassThrough;\nconst Transform = require('./_stream_transform');\nrequire('inherits')(PassThrough, Transform);\nfunction PassThrough(options) {\n if (!(this instanceof PassThrough)) return new PassThrough(options);\n Transform.call(this, options);\n}\nPassThrough.prototype._transform = function (chunk, encoding, cb) {\n cb(null, chunk);\n};","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\nmodule.exports = Readable;\n\n/**/\nvar Duplex;\n/**/\n\nReadable.ReadableState = ReadableState;\n\n/**/\nconst EE = require('events').EventEmitter;\nvar EElistenerCount = function EElistenerCount(emitter, type) {\n return emitter.listeners(type).length;\n};\n/**/\n\n/**/\nvar Stream = require('./internal/streams/stream');\n/**/\n\nconst Buffer = require('buffer').Buffer;\nconst OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\n\n/**/\nconst debugUtil = require('util');\nlet debug;\nif (debugUtil && debugUtil.debuglog) {\n debug = debugUtil.debuglog('stream');\n} else {\n debug = function debug() {};\n}\n/**/\n\nconst BufferList = require('./internal/streams/buffer_list');\nconst destroyImpl = require('./internal/streams/destroy');\nconst _require = require('./internal/streams/state'),\n getHighWaterMark = _require.getHighWaterMark;\nconst _require$codes = require('../errors').codes,\n ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,\n ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT;\n\n// Lazy loaded to improve the startup performance.\nlet StringDecoder;\nlet createReadableStreamAsyncIterator;\nlet from;\nrequire('inherits')(Readable, Stream);\nconst errorOrDestroy = destroyImpl.errorOrDestroy;\nconst kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];\nfunction prependListener(emitter, event, fn) {\n // Sadly this is not cacheable as some libraries bundle their own\n // event emitter implementation with them.\n if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);\n\n // This is a hack to make sure that our error handler is attached before any\n // userland ones. NEVER DO THIS. This is here only because this code needs\n // to continue to work with older versions of Node.js that do not include\n // the prependListener() method. The goal is to eventually remove this hack.\n if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];\n}\nfunction ReadableState(options, stream, isDuplex) {\n Duplex = Duplex || require('./_stream_duplex');\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream.\n // These options can be provided separately as readableXXX and writableXXX.\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;\n\n // object stream flag. Used to make read(n) ignore n and to\n // make all the buffer merging and length checks go away\n this.objectMode = !!options.objectMode;\n if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;\n\n // the point at which it stops calling _read() to fill the buffer\n // Note: 0 is a valid value, means \"don't call _read preemptively ever\"\n this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex);\n\n // A linked list is used to store data chunks instead of an array because the\n // linked list can remove elements from the beginning faster than\n // array.shift()\n this.buffer = new BufferList();\n this.length = 0;\n this.pipes = null;\n this.pipesCount = 0;\n this.flowing = null;\n this.ended = false;\n this.endEmitted = false;\n this.reading = false;\n\n // a flag to be able to tell if the event 'readable'/'data' is emitted\n // immediately, or on a later tick. We set this to true at first, because\n // any actions that shouldn't happen until \"later\" should generally also\n // not happen before the first read call.\n this.sync = true;\n\n // whenever we return null, then we set a flag to say\n // that we're awaiting a 'readable' event emission.\n this.needReadable = false;\n this.emittedReadable = false;\n this.readableListening = false;\n this.resumeScheduled = false;\n this.paused = true;\n\n // Should close be emitted on destroy. Defaults to true.\n this.emitClose = options.emitClose !== false;\n\n // Should .destroy() be called after 'end' (and potentially 'finish')\n this.autoDestroy = !!options.autoDestroy;\n\n // has it been destroyed\n this.destroyed = false;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // the number of writers that are awaiting a drain event in .pipe()s\n this.awaitDrain = 0;\n\n // if true, a maybeReadMore has been scheduled\n this.readingMore = false;\n this.decoder = null;\n this.encoding = null;\n if (options.encoding) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n this.decoder = new StringDecoder(options.encoding);\n this.encoding = options.encoding;\n }\n}\nfunction Readable(options) {\n Duplex = Duplex || require('./_stream_duplex');\n if (!(this instanceof Readable)) return new Readable(options);\n\n // Checking for a Stream.Duplex instance is faster here instead of inside\n // the ReadableState constructor, at least with V8 6.5\n const isDuplex = this instanceof Duplex;\n this._readableState = new ReadableState(options, this, isDuplex);\n\n // legacy\n this.readable = true;\n if (options) {\n if (typeof options.read === 'function') this._read = options.read;\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n }\n Stream.call(this);\n}\nObject.defineProperty(Readable.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._readableState === undefined) {\n return false;\n }\n return this._readableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._readableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n }\n});\nReadable.prototype.destroy = destroyImpl.destroy;\nReadable.prototype._undestroy = destroyImpl.undestroy;\nReadable.prototype._destroy = function (err, cb) {\n cb(err);\n};\n\n// Manually shove something into the read() buffer.\n// This returns true if the highWaterMark has not been hit yet,\n// similar to how Writable.write() returns true if you should\n// write() some more.\nReadable.prototype.push = function (chunk, encoding) {\n var state = this._readableState;\n var skipChunkCheck;\n if (!state.objectMode) {\n if (typeof chunk === 'string') {\n encoding = encoding || state.defaultEncoding;\n if (encoding !== state.encoding) {\n chunk = Buffer.from(chunk, encoding);\n encoding = '';\n }\n skipChunkCheck = true;\n }\n } else {\n skipChunkCheck = true;\n }\n return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);\n};\n\n// Unshift should *always* be something directly out of read()\nReadable.prototype.unshift = function (chunk) {\n return readableAddChunk(this, chunk, null, true, false);\n};\nfunction readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {\n debug('readableAddChunk', chunk);\n var state = stream._readableState;\n if (chunk === null) {\n state.reading = false;\n onEofChunk(stream, state);\n } else {\n var er;\n if (!skipChunkCheck) er = chunkInvalid(state, chunk);\n if (er) {\n errorOrDestroy(stream, er);\n } else if (state.objectMode || chunk && chunk.length > 0) {\n if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n if (addToFront) {\n if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);\n } else if (state.ended) {\n errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF());\n } else if (state.destroyed) {\n return false;\n } else {\n state.reading = false;\n if (state.decoder && !encoding) {\n chunk = state.decoder.write(chunk);\n if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);\n } else {\n addChunk(stream, state, chunk, false);\n }\n }\n } else if (!addToFront) {\n state.reading = false;\n maybeReadMore(stream, state);\n }\n }\n\n // We can push more data if we are below the highWaterMark.\n // Also, if we have no data yet, we can stand some more bytes.\n // This is to work around cases where hwm=0, such as the repl.\n return !state.ended && (state.length < state.highWaterMark || state.length === 0);\n}\nfunction addChunk(stream, state, chunk, addToFront) {\n if (state.flowing && state.length === 0 && !state.sync) {\n state.awaitDrain = 0;\n stream.emit('data', chunk);\n } else {\n // update the buffer info.\n state.length += state.objectMode ? 1 : chunk.length;\n if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);\n if (state.needReadable) emitReadable(stream);\n }\n maybeReadMore(stream, state);\n}\nfunction chunkInvalid(state, chunk) {\n var er;\n if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {\n er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);\n }\n return er;\n}\nReadable.prototype.isPaused = function () {\n return this._readableState.flowing === false;\n};\n\n// backwards compatibility.\nReadable.prototype.setEncoding = function (enc) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n const decoder = new StringDecoder(enc);\n this._readableState.decoder = decoder;\n // If setEncoding(null), decoder.encoding equals utf8\n this._readableState.encoding = this._readableState.decoder.encoding;\n\n // Iterate over current buffer to convert already stored Buffers:\n let p = this._readableState.buffer.head;\n let content = '';\n while (p !== null) {\n content += decoder.write(p.data);\n p = p.next;\n }\n this._readableState.buffer.clear();\n if (content !== '') this._readableState.buffer.push(content);\n this._readableState.length = content.length;\n return this;\n};\n\n// Don't raise the hwm > 1GB\nconst MAX_HWM = 0x40000000;\nfunction computeNewHighWaterMark(n) {\n if (n >= MAX_HWM) {\n // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.\n n = MAX_HWM;\n } else {\n // Get the next highest power of 2 to prevent increasing hwm excessively in\n // tiny amounts\n n--;\n n |= n >>> 1;\n n |= n >>> 2;\n n |= n >>> 4;\n n |= n >>> 8;\n n |= n >>> 16;\n n++;\n }\n return n;\n}\n\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction howMuchToRead(n, state) {\n if (n <= 0 || state.length === 0 && state.ended) return 0;\n if (state.objectMode) return 1;\n if (n !== n) {\n // Only flow one buffer at a time\n if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;\n }\n // If we're asking for more than the current hwm, then raise the hwm.\n if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);\n if (n <= state.length) return n;\n // Don't have enough\n if (!state.ended) {\n state.needReadable = true;\n return 0;\n }\n return state.length;\n}\n\n// you can override either this method, or the async _read(n) below.\nReadable.prototype.read = function (n) {\n debug('read', n);\n n = parseInt(n, 10);\n var state = this._readableState;\n var nOrig = n;\n if (n !== 0) state.emittedReadable = false;\n\n // if we're doing read(0) to trigger a readable event, but we\n // already have a bunch of data in the buffer, then just trigger\n // the 'readable' event and move on.\n if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {\n debug('read: emitReadable', state.length, state.ended);\n if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);\n return null;\n }\n n = howMuchToRead(n, state);\n\n // if we've ended, and we're now clear, then finish it up.\n if (n === 0 && state.ended) {\n if (state.length === 0) endReadable(this);\n return null;\n }\n\n // All the actual chunk generation logic needs to be\n // *below* the call to _read. The reason is that in certain\n // synthetic stream cases, such as passthrough streams, _read\n // may be a completely synchronous operation which may change\n // the state of the read buffer, providing enough data when\n // before there was *not* enough.\n //\n // So, the steps are:\n // 1. Figure out what the state of things will be after we do\n // a read from the buffer.\n //\n // 2. If that resulting state will trigger a _read, then call _read.\n // Note that this may be asynchronous, or synchronous. Yes, it is\n // deeply ugly to write APIs this way, but that still doesn't mean\n // that the Readable class should behave improperly, as streams are\n // designed to be sync/async agnostic.\n // Take note if the _read call is sync or async (ie, if the read call\n // has returned yet), so that we know whether or not it's safe to emit\n // 'readable' etc.\n //\n // 3. Actually pull the requested chunks out of the buffer and return.\n\n // if we need a readable event, then we need to do some reading.\n var doRead = state.needReadable;\n debug('need readable', doRead);\n\n // if we currently have less than the highWaterMark, then also read some\n if (state.length === 0 || state.length - n < state.highWaterMark) {\n doRead = true;\n debug('length less than watermark', doRead);\n }\n\n // however, if we've ended, then there's no point, and if we're already\n // reading, then it's unnecessary.\n if (state.ended || state.reading) {\n doRead = false;\n debug('reading or ended', doRead);\n } else if (doRead) {\n debug('do read');\n state.reading = true;\n state.sync = true;\n // if the length is currently zero, then we *need* a readable event.\n if (state.length === 0) state.needReadable = true;\n // call internal read method\n this._read(state.highWaterMark);\n state.sync = false;\n // If _read pushed data synchronously, then `reading` will be false,\n // and we need to re-evaluate how much data we can return to the user.\n if (!state.reading) n = howMuchToRead(nOrig, state);\n }\n var ret;\n if (n > 0) ret = fromList(n, state);else ret = null;\n if (ret === null) {\n state.needReadable = state.length <= state.highWaterMark;\n n = 0;\n } else {\n state.length -= n;\n state.awaitDrain = 0;\n }\n if (state.length === 0) {\n // If we have nothing in the buffer, then we want to know\n // as soon as we *do* get something into the buffer.\n if (!state.ended) state.needReadable = true;\n\n // If we tried to read() past the EOF, then emit end on the next tick.\n if (nOrig !== n && state.ended) endReadable(this);\n }\n if (ret !== null) this.emit('data', ret);\n return ret;\n};\nfunction onEofChunk(stream, state) {\n debug('onEofChunk');\n if (state.ended) return;\n if (state.decoder) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) {\n state.buffer.push(chunk);\n state.length += state.objectMode ? 1 : chunk.length;\n }\n }\n state.ended = true;\n if (state.sync) {\n // if we are sync, wait until next tick to emit the data.\n // Otherwise we risk emitting data in the flow()\n // the readable code triggers during a read() call\n emitReadable(stream);\n } else {\n // emit 'readable' now to make sure it gets picked up.\n state.needReadable = false;\n if (!state.emittedReadable) {\n state.emittedReadable = true;\n emitReadable_(stream);\n }\n }\n}\n\n// Don't emit readable right away in sync mode, because this can trigger\n// another read() call => stack overflow. This way, it might trigger\n// a nextTick recursion warning, but that's not so bad.\nfunction emitReadable(stream) {\n var state = stream._readableState;\n debug('emitReadable', state.needReadable, state.emittedReadable);\n state.needReadable = false;\n if (!state.emittedReadable) {\n debug('emitReadable', state.flowing);\n state.emittedReadable = true;\n process.nextTick(emitReadable_, stream);\n }\n}\nfunction emitReadable_(stream) {\n var state = stream._readableState;\n debug('emitReadable_', state.destroyed, state.length, state.ended);\n if (!state.destroyed && (state.length || state.ended)) {\n stream.emit('readable');\n state.emittedReadable = false;\n }\n\n // The stream needs another readable event if\n // 1. It is not flowing, as the flow mechanism will take\n // care of it.\n // 2. It is not ended.\n // 3. It is below the highWaterMark, so we can schedule\n // another readable later.\n state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;\n flow(stream);\n}\n\n// at this point, the user has presumably seen the 'readable' event,\n// and called read() to consume some data. that may have triggered\n// in turn another _read(n) call, in which case reading = true if\n// it's in progress.\n// However, if we're not ended, or reading, and the length < hwm,\n// then go ahead and try to read some more preemptively.\nfunction maybeReadMore(stream, state) {\n if (!state.readingMore) {\n state.readingMore = true;\n process.nextTick(maybeReadMore_, stream, state);\n }\n}\nfunction maybeReadMore_(stream, state) {\n // Attempt to read more data if we should.\n //\n // The conditions for reading more data are (one of):\n // - Not enough data buffered (state.length < state.highWaterMark). The loop\n // is responsible for filling the buffer with enough data if such data\n // is available. If highWaterMark is 0 and we are not in the flowing mode\n // we should _not_ attempt to buffer any extra data. We'll get more data\n // when the stream consumer calls read() instead.\n // - No data in the buffer, and the stream is in flowing mode. In this mode\n // the loop below is responsible for ensuring read() is called. Failing to\n // call read here would abort the flow and there's no other mechanism for\n // continuing the flow if the stream consumer has just subscribed to the\n // 'data' event.\n //\n // In addition to the above conditions to keep reading data, the following\n // conditions prevent the data from being read:\n // - The stream has ended (state.ended).\n // - There is already a pending 'read' operation (state.reading). This is a\n // case where the the stream has called the implementation defined _read()\n // method, but they are processing the call asynchronously and have _not_\n // called push() with new data. In this case we skip performing more\n // read()s. The execution ends in this method again after the _read() ends\n // up calling push() with more data.\n while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) {\n const len = state.length;\n debug('maybeReadMore read 0');\n stream.read(0);\n if (len === state.length)\n // didn't get any data, stop spinning.\n break;\n }\n state.readingMore = false;\n}\n\n// abstract method. to be overridden in specific implementation classes.\n// call cb(er, data) where data is <= n in length.\n// for virtual (non-string, non-buffer) streams, \"length\" is somewhat\n// arbitrary, and perhaps not very meaningful.\nReadable.prototype._read = function (n) {\n errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()'));\n};\nReadable.prototype.pipe = function (dest, pipeOpts) {\n var src = this;\n var state = this._readableState;\n switch (state.pipesCount) {\n case 0:\n state.pipes = dest;\n break;\n case 1:\n state.pipes = [state.pipes, dest];\n break;\n default:\n state.pipes.push(dest);\n break;\n }\n state.pipesCount += 1;\n debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);\n var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;\n var endFn = doEnd ? onend : unpipe;\n if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn);\n dest.on('unpipe', onunpipe);\n function onunpipe(readable, unpipeInfo) {\n debug('onunpipe');\n if (readable === src) {\n if (unpipeInfo && unpipeInfo.hasUnpiped === false) {\n unpipeInfo.hasUnpiped = true;\n cleanup();\n }\n }\n }\n function onend() {\n debug('onend');\n dest.end();\n }\n\n // when the dest drains, it reduces the awaitDrain counter\n // on the source. This would be more elegant with a .once()\n // handler in flow(), but adding and removing repeatedly is\n // too slow.\n var ondrain = pipeOnDrain(src);\n dest.on('drain', ondrain);\n var cleanedUp = false;\n function cleanup() {\n debug('cleanup');\n // cleanup event handlers once the pipe is broken\n dest.removeListener('close', onclose);\n dest.removeListener('finish', onfinish);\n dest.removeListener('drain', ondrain);\n dest.removeListener('error', onerror);\n dest.removeListener('unpipe', onunpipe);\n src.removeListener('end', onend);\n src.removeListener('end', unpipe);\n src.removeListener('data', ondata);\n cleanedUp = true;\n\n // if the reader is waiting for a drain event from this\n // specific writer, then it would cause it to never start\n // flowing again.\n // So, if this is awaiting a drain, then we just call it now.\n // If we don't know, then assume that we are waiting for one.\n if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();\n }\n src.on('data', ondata);\n function ondata(chunk) {\n debug('ondata');\n var ret = dest.write(chunk);\n debug('dest.write', ret);\n if (ret === false) {\n // If the user unpiped during `dest.write()`, it is possible\n // to get stuck in a permanently paused state if that write\n // also returned false.\n // => Check whether `dest` is still a piping destination.\n if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {\n debug('false write response, pause', state.awaitDrain);\n state.awaitDrain++;\n }\n src.pause();\n }\n }\n\n // if the dest has an error, then stop piping into it.\n // however, don't suppress the throwing behavior for this.\n function onerror(er) {\n debug('onerror', er);\n unpipe();\n dest.removeListener('error', onerror);\n if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er);\n }\n\n // Make sure our error handler is attached before userland ones.\n prependListener(dest, 'error', onerror);\n\n // Both close and finish should trigger unpipe, but only once.\n function onclose() {\n dest.removeListener('finish', onfinish);\n unpipe();\n }\n dest.once('close', onclose);\n function onfinish() {\n debug('onfinish');\n dest.removeListener('close', onclose);\n unpipe();\n }\n dest.once('finish', onfinish);\n function unpipe() {\n debug('unpipe');\n src.unpipe(dest);\n }\n\n // tell the dest that it's being piped to\n dest.emit('pipe', src);\n\n // start the flow if it hasn't been started already.\n if (!state.flowing) {\n debug('pipe resume');\n src.resume();\n }\n return dest;\n};\nfunction pipeOnDrain(src) {\n return function pipeOnDrainFunctionResult() {\n var state = src._readableState;\n debug('pipeOnDrain', state.awaitDrain);\n if (state.awaitDrain) state.awaitDrain--;\n if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {\n state.flowing = true;\n flow(src);\n }\n };\n}\nReadable.prototype.unpipe = function (dest) {\n var state = this._readableState;\n var unpipeInfo = {\n hasUnpiped: false\n };\n\n // if we're not piping anywhere, then do nothing.\n if (state.pipesCount === 0) return this;\n\n // just one destination. most common case.\n if (state.pipesCount === 1) {\n // passed in one, but it's not the right one.\n if (dest && dest !== state.pipes) return this;\n if (!dest) dest = state.pipes;\n\n // got a match.\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n if (dest) dest.emit('unpipe', this, unpipeInfo);\n return this;\n }\n\n // slow case. multiple pipe destinations.\n\n if (!dest) {\n // remove all.\n var dests = state.pipes;\n var len = state.pipesCount;\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, {\n hasUnpiped: false\n });\n return this;\n }\n\n // try to find the right one.\n var index = indexOf(state.pipes, dest);\n if (index === -1) return this;\n state.pipes.splice(index, 1);\n state.pipesCount -= 1;\n if (state.pipesCount === 1) state.pipes = state.pipes[0];\n dest.emit('unpipe', this, unpipeInfo);\n return this;\n};\n\n// set up data events if they are asked for\n// Ensure readable listeners eventually get something\nReadable.prototype.on = function (ev, fn) {\n const res = Stream.prototype.on.call(this, ev, fn);\n const state = this._readableState;\n if (ev === 'data') {\n // update readableListening so that resume() may be a no-op\n // a few lines down. This is needed to support once('readable').\n state.readableListening = this.listenerCount('readable') > 0;\n\n // Try start flowing on next tick if stream isn't explicitly paused\n if (state.flowing !== false) this.resume();\n } else if (ev === 'readable') {\n if (!state.endEmitted && !state.readableListening) {\n state.readableListening = state.needReadable = true;\n state.flowing = false;\n state.emittedReadable = false;\n debug('on readable', state.length, state.reading);\n if (state.length) {\n emitReadable(this);\n } else if (!state.reading) {\n process.nextTick(nReadingNextTick, this);\n }\n }\n }\n return res;\n};\nReadable.prototype.addListener = Readable.prototype.on;\nReadable.prototype.removeListener = function (ev, fn) {\n const res = Stream.prototype.removeListener.call(this, ev, fn);\n if (ev === 'readable') {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this);\n }\n return res;\n};\nReadable.prototype.removeAllListeners = function (ev) {\n const res = Stream.prototype.removeAllListeners.apply(this, arguments);\n if (ev === 'readable' || ev === undefined) {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this);\n }\n return res;\n};\nfunction updateReadableListening(self) {\n const state = self._readableState;\n state.readableListening = self.listenerCount('readable') > 0;\n if (state.resumeScheduled && !state.paused) {\n // flowing needs to be set to true now, otherwise\n // the upcoming resume will not flow.\n state.flowing = true;\n\n // crude way to check if we should resume\n } else if (self.listenerCount('data') > 0) {\n self.resume();\n }\n}\nfunction nReadingNextTick(self) {\n debug('readable nexttick read 0');\n self.read(0);\n}\n\n// pause() and resume() are remnants of the legacy readable stream API\n// If the user uses them, then switch into old mode.\nReadable.prototype.resume = function () {\n var state = this._readableState;\n if (!state.flowing) {\n debug('resume');\n // we flow only if there is no one listening\n // for readable, but we still have to call\n // resume()\n state.flowing = !state.readableListening;\n resume(this, state);\n }\n state.paused = false;\n return this;\n};\nfunction resume(stream, state) {\n if (!state.resumeScheduled) {\n state.resumeScheduled = true;\n process.nextTick(resume_, stream, state);\n }\n}\nfunction resume_(stream, state) {\n debug('resume', state.reading);\n if (!state.reading) {\n stream.read(0);\n }\n state.resumeScheduled = false;\n stream.emit('resume');\n flow(stream);\n if (state.flowing && !state.reading) stream.read(0);\n}\nReadable.prototype.pause = function () {\n debug('call pause flowing=%j', this._readableState.flowing);\n if (this._readableState.flowing !== false) {\n debug('pause');\n this._readableState.flowing = false;\n this.emit('pause');\n }\n this._readableState.paused = true;\n return this;\n};\nfunction flow(stream) {\n const state = stream._readableState;\n debug('flow', state.flowing);\n while (state.flowing && stream.read() !== null);\n}\n\n// wrap an old-style stream as the async data source.\n// This is *not* part of the readable stream interface.\n// It is an ugly unfortunate mess of history.\nReadable.prototype.wrap = function (stream) {\n var state = this._readableState;\n var paused = false;\n stream.on('end', () => {\n debug('wrapped end');\n if (state.decoder && !state.ended) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) this.push(chunk);\n }\n this.push(null);\n });\n stream.on('data', chunk => {\n debug('wrapped data');\n if (state.decoder) chunk = state.decoder.write(chunk);\n\n // don't skip over falsy values in objectMode\n if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;\n var ret = this.push(chunk);\n if (!ret) {\n paused = true;\n stream.pause();\n }\n });\n\n // proxy all the other methods.\n // important when wrapping filters and duplexes.\n for (var i in stream) {\n if (this[i] === undefined && typeof stream[i] === 'function') {\n this[i] = function methodWrap(method) {\n return function methodWrapReturnFunction() {\n return stream[method].apply(stream, arguments);\n };\n }(i);\n }\n }\n\n // proxy certain important events.\n for (var n = 0; n < kProxyEvents.length; n++) {\n stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));\n }\n\n // when we try to consume some more bytes, simply unpause the\n // underlying stream.\n this._read = n => {\n debug('wrapped _read', n);\n if (paused) {\n paused = false;\n stream.resume();\n }\n };\n return this;\n};\nif (typeof Symbol === 'function') {\n Readable.prototype[Symbol.asyncIterator] = function () {\n if (createReadableStreamAsyncIterator === undefined) {\n createReadableStreamAsyncIterator = require('./internal/streams/async_iterator');\n }\n return createReadableStreamAsyncIterator(this);\n };\n}\nObject.defineProperty(Readable.prototype, 'readableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState.highWaterMark;\n }\n});\nObject.defineProperty(Readable.prototype, 'readableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState && this._readableState.buffer;\n }\n});\nObject.defineProperty(Readable.prototype, 'readableFlowing', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState.flowing;\n },\n set: function set(state) {\n if (this._readableState) {\n this._readableState.flowing = state;\n }\n }\n});\n\n// exposed for testing purposes only.\nReadable._fromList = fromList;\nObject.defineProperty(Readable.prototype, 'readableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._readableState.length;\n }\n});\n\n// Pluck off n bytes from an array of buffers.\n// Length is the combined lengths of all the buffers in the list.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction fromList(n, state) {\n // nothing buffered\n if (state.length === 0) return null;\n var ret;\n if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {\n // read it all, truncate the list\n if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);\n state.buffer.clear();\n } else {\n // read part of list\n ret = state.buffer.consume(n, state.decoder);\n }\n return ret;\n}\nfunction endReadable(stream) {\n var state = stream._readableState;\n debug('endReadable', state.endEmitted);\n if (!state.endEmitted) {\n state.ended = true;\n process.nextTick(endReadableNT, state, stream);\n }\n}\nfunction endReadableNT(state, stream) {\n debug('endReadableNT', state.endEmitted, state.length);\n\n // Check that we didn't get one last unshift.\n if (!state.endEmitted && state.length === 0) {\n state.endEmitted = true;\n stream.readable = false;\n stream.emit('end');\n if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the writable side is ready for autoDestroy as well\n const wState = stream._writableState;\n if (!wState || wState.autoDestroy && wState.finished) {\n stream.destroy();\n }\n }\n }\n}\nif (typeof Symbol === 'function') {\n Readable.from = function (iterable, opts) {\n if (from === undefined) {\n from = require('./internal/streams/from');\n }\n return from(Readable, iterable, opts);\n };\n}\nfunction indexOf(xs, x) {\n for (var i = 0, l = xs.length; i < l; i++) {\n if (xs[i] === x) return i;\n }\n return -1;\n}","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a transform stream is a readable/writable stream where you do\n// something with the data. Sometimes it's called a \"filter\",\n// but that's not a great name for it, since that implies a thing where\n// some bits pass through, and others are simply ignored. (That would\n// be a valid example of a transform, of course.)\n//\n// While the output is causally related to the input, it's not a\n// necessarily symmetric or synchronous transformation. For example,\n// a zlib stream might take multiple plain-text writes(), and then\n// emit a single compressed chunk some time in the future.\n//\n// Here's how this works:\n//\n// The Transform stream has all the aspects of the readable and writable\n// stream classes. When you write(chunk), that calls _write(chunk,cb)\n// internally, and returns false if there's a lot of pending writes\n// buffered up. When you call read(), that calls _read(n) until\n// there's enough pending readable data buffered up.\n//\n// In a transform stream, the written data is placed in a buffer. When\n// _read(n) is called, it transforms the queued up data, calling the\n// buffered _write cb's as it consumes chunks. If consuming a single\n// written chunk would result in multiple output chunks, then the first\n// outputted bit calls the readcb, and subsequent chunks just go into\n// the read buffer, and will cause it to emit 'readable' if necessary.\n//\n// This way, back-pressure is actually determined by the reading side,\n// since _read has to be called to start processing a new chunk. However,\n// a pathological inflate type of transform can cause excessive buffering\n// here. For example, imagine a stream where every byte of input is\n// interpreted as an integer from 0-255, and then results in that many\n// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in\n// 1kb of data being output. In this case, you could write a very small\n// amount of input, and end up with a very large amount of output. In\n// such a pathological inflating mechanism, there'd be no way to tell\n// the system to stop doing the transform. A single 4MB write could\n// cause the system to run out of memory.\n//\n// However, even in such a pathological case, only a single written chunk\n// would be consumed, and then the rest would wait (un-transformed) until\n// the results of the previous transformed chunk were consumed.\n\n'use strict';\n\nmodule.exports = Transform;\nconst _require$codes = require('../errors').codes,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,\n ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,\n ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;\nconst Duplex = require('./_stream_duplex');\nrequire('inherits')(Transform, Duplex);\nfunction afterTransform(er, data) {\n var ts = this._transformState;\n ts.transforming = false;\n var cb = ts.writecb;\n if (cb === null) {\n return this.emit('error', new ERR_MULTIPLE_CALLBACK());\n }\n ts.writechunk = null;\n ts.writecb = null;\n if (data != null)\n // single equals check for both `null` and `undefined`\n this.push(data);\n cb(er);\n var rs = this._readableState;\n rs.reading = false;\n if (rs.needReadable || rs.length < rs.highWaterMark) {\n this._read(rs.highWaterMark);\n }\n}\nfunction Transform(options) {\n if (!(this instanceof Transform)) return new Transform(options);\n Duplex.call(this, options);\n this._transformState = {\n afterTransform: afterTransform.bind(this),\n needTransform: false,\n transforming: false,\n writecb: null,\n writechunk: null,\n writeencoding: null\n };\n\n // start out asking for a readable event once data is transformed.\n this._readableState.needReadable = true;\n\n // we have implemented the _read method, and done the other things\n // that Readable wants before the first _read call, so unset the\n // sync guard flag.\n this._readableState.sync = false;\n if (options) {\n if (typeof options.transform === 'function') this._transform = options.transform;\n if (typeof options.flush === 'function') this._flush = options.flush;\n }\n\n // When the writable side finishes, then flush out anything remaining.\n this.on('prefinish', prefinish);\n}\nfunction prefinish() {\n if (typeof this._flush === 'function' && !this._readableState.destroyed) {\n this._flush((er, data) => {\n done(this, er, data);\n });\n } else {\n done(this, null, null);\n }\n}\nTransform.prototype.push = function (chunk, encoding) {\n this._transformState.needTransform = false;\n return Duplex.prototype.push.call(this, chunk, encoding);\n};\n\n// This is the part where you do stuff!\n// override this function in implementation classes.\n// 'chunk' is an input chunk.\n//\n// Call `push(newChunk)` to pass along transformed output\n// to the readable side. You may call 'push' zero or more times.\n//\n// Call `cb(err)` when you are done with this chunk. If you pass\n// an error, then that'll put the hurt on the whole operation. If you\n// never call cb(), then you'll never get another chunk.\nTransform.prototype._transform = function (chunk, encoding, cb) {\n cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));\n};\nTransform.prototype._write = function (chunk, encoding, cb) {\n var ts = this._transformState;\n ts.writecb = cb;\n ts.writechunk = chunk;\n ts.writeencoding = encoding;\n if (!ts.transforming) {\n var rs = this._readableState;\n if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);\n }\n};\n\n// Doesn't matter what the args are here.\n// _transform does all the work.\n// That we got here means that the readable side wants more data.\nTransform.prototype._read = function (n) {\n var ts = this._transformState;\n if (ts.writechunk !== null && !ts.transforming) {\n ts.transforming = true;\n this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);\n } else {\n // mark that we need a transform, so that any data that comes in\n // will get processed, now that we've asked for it.\n ts.needTransform = true;\n }\n};\nTransform.prototype._destroy = function (err, cb) {\n Duplex.prototype._destroy.call(this, err, err2 => {\n cb(err2);\n });\n};\nfunction done(stream, er, data) {\n if (er) return stream.emit('error', er);\n if (data != null)\n // single equals check for both `null` and `undefined`\n stream.push(data);\n\n // TODO(BridgeAR): Write a test for these two error cases\n // if there's nothing in the write buffer, then that means\n // that nothing more will ever be provided\n if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();\n if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();\n return stream.push(null);\n}","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// A bit simpler than readable streams.\n// Implement an async ._write(chunk, encoding, cb), and it'll handle all\n// the drain event emission and buffering.\n\n'use strict';\n\nmodule.exports = Writable;\n\n/* */\nfunction WriteReq(chunk, encoding, cb) {\n this.chunk = chunk;\n this.encoding = encoding;\n this.callback = cb;\n this.next = null;\n}\n\n// It seems a linked list but it is not\n// there will be only 2 of these for each stream\nfunction CorkedRequest(state) {\n this.next = null;\n this.entry = null;\n this.finish = () => {\n onCorkedFinish(this, state);\n };\n}\n/* */\n\n/**/\nvar Duplex;\n/**/\n\nWritable.WritableState = WritableState;\n\n/**/\nconst internalUtil = {\n deprecate: require('util-deprecate')\n};\n/**/\n\n/**/\nvar Stream = require('./internal/streams/stream');\n/**/\n\nconst Buffer = require('buffer').Buffer;\nconst OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\nconst destroyImpl = require('./internal/streams/destroy');\nconst _require = require('./internal/streams/state'),\n getHighWaterMark = _require.getHighWaterMark;\nconst _require$codes = require('../errors').codes,\n ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,\n ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,\n ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,\n ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,\n ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,\n ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;\nconst errorOrDestroy = destroyImpl.errorOrDestroy;\nrequire('inherits')(Writable, Stream);\nfunction nop() {}\nfunction WritableState(options, stream, isDuplex) {\n Duplex = Duplex || require('./_stream_duplex');\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream,\n // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;\n\n // object stream flag to indicate whether or not this stream\n // contains buffers or objects.\n this.objectMode = !!options.objectMode;\n if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;\n\n // the point at which write() starts returning false\n // Note: 0 is a valid value, means that we always return false if\n // the entire buffer is not flushed immediately on write()\n this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex);\n\n // if _final has been called\n this.finalCalled = false;\n\n // drain event flag.\n this.needDrain = false;\n // at the start of calling end()\n this.ending = false;\n // when end() has been called, and returned\n this.ended = false;\n // when 'finish' is emitted\n this.finished = false;\n\n // has it been destroyed\n this.destroyed = false;\n\n // should we decode strings into buffers before passing to _write?\n // this is here so that some node-core streams can optimize string\n // handling at a lower level.\n var noDecode = options.decodeStrings === false;\n this.decodeStrings = !noDecode;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // not an actual buffer we keep track of, but a measurement\n // of how much we're waiting to get pushed to some underlying\n // socket or file.\n this.length = 0;\n\n // a flag to see when we're in the middle of a write.\n this.writing = false;\n\n // when true all writes will be buffered until .uncork() call\n this.corked = 0;\n\n // a flag to be able to tell if the onwrite cb is called immediately,\n // or on a later tick. We set this to true at first, because any\n // actions that shouldn't happen until \"later\" should generally also\n // not happen before the first write call.\n this.sync = true;\n\n // a flag to know if we're processing previously buffered items, which\n // may call the _write() callback in the same tick, so that we don't\n // end up in an overlapped onwrite situation.\n this.bufferProcessing = false;\n\n // the callback that's passed to _write(chunk,cb)\n this.onwrite = function (er) {\n onwrite(stream, er);\n };\n\n // the callback that the user supplies to write(chunk,encoding,cb)\n this.writecb = null;\n\n // the amount that is being written when _write is called.\n this.writelen = 0;\n this.bufferedRequest = null;\n this.lastBufferedRequest = null;\n\n // number of pending user-supplied write callbacks\n // this must be 0 before 'finish' can be emitted\n this.pendingcb = 0;\n\n // emit prefinish if the only thing we're waiting for is _write cbs\n // This is relevant for synchronous Transform streams\n this.prefinished = false;\n\n // True if the error was already emitted and should not be thrown again\n this.errorEmitted = false;\n\n // Should close be emitted on destroy. Defaults to true.\n this.emitClose = options.emitClose !== false;\n\n // Should .destroy() be called after 'finish' (and potentially 'end')\n this.autoDestroy = !!options.autoDestroy;\n\n // count buffered requests\n this.bufferedRequestCount = 0;\n\n // allocate the first CorkedRequest, there is always\n // one allocated and free to use, and we maintain at most two\n this.corkedRequestsFree = new CorkedRequest(this);\n}\nWritableState.prototype.getBuffer = function getBuffer() {\n var current = this.bufferedRequest;\n var out = [];\n while (current) {\n out.push(current);\n current = current.next;\n }\n return out;\n};\n(function () {\n try {\n Object.defineProperty(WritableState.prototype, 'buffer', {\n get: internalUtil.deprecate(function writableStateBufferGetter() {\n return this.getBuffer();\n }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')\n });\n } catch (_) {}\n})();\n\n// Test _writableState for inheritance to account for Duplex streams,\n// whose prototype chain only points to Readable.\nvar realHasInstance;\nif (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {\n realHasInstance = Function.prototype[Symbol.hasInstance];\n Object.defineProperty(Writable, Symbol.hasInstance, {\n value: function value(object) {\n if (realHasInstance.call(this, object)) return true;\n if (this !== Writable) return false;\n return object && object._writableState instanceof WritableState;\n }\n });\n} else {\n realHasInstance = function realHasInstance(object) {\n return object instanceof this;\n };\n}\nfunction Writable(options) {\n Duplex = Duplex || require('./_stream_duplex');\n\n // Writable ctor is applied to Duplexes, too.\n // `realHasInstance` is necessary because using plain `instanceof`\n // would return false, as no `_writableState` property is attached.\n\n // Trying to use the custom `instanceof` for Writable here will also break the\n // Node.js LazyTransform implementation, which has a non-trivial getter for\n // `_writableState` that would lead to infinite recursion.\n\n // Checking for a Stream.Duplex instance is faster here instead of inside\n // the WritableState constructor, at least with V8 6.5\n const isDuplex = this instanceof Duplex;\n if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);\n this._writableState = new WritableState(options, this, isDuplex);\n\n // legacy.\n this.writable = true;\n if (options) {\n if (typeof options.write === 'function') this._write = options.write;\n if (typeof options.writev === 'function') this._writev = options.writev;\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n if (typeof options.final === 'function') this._final = options.final;\n }\n Stream.call(this);\n}\n\n// Otherwise people can pipe Writable streams, which is just wrong.\nWritable.prototype.pipe = function () {\n errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());\n};\nfunction writeAfterEnd(stream, cb) {\n var er = new ERR_STREAM_WRITE_AFTER_END();\n // TODO: defer error events consistently everywhere, not just the cb\n errorOrDestroy(stream, er);\n process.nextTick(cb, er);\n}\n\n// Checks that a user-supplied chunk is valid, especially for the particular\n// mode the stream is in. Currently this means that `null` is never accepted\n// and undefined/non-string values are only allowed in object mode.\nfunction validChunk(stream, state, chunk, cb) {\n var er;\n if (chunk === null) {\n er = new ERR_STREAM_NULL_VALUES();\n } else if (typeof chunk !== 'string' && !state.objectMode) {\n er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);\n }\n if (er) {\n errorOrDestroy(stream, er);\n process.nextTick(cb, er);\n return false;\n }\n return true;\n}\nWritable.prototype.write = function (chunk, encoding, cb) {\n var state = this._writableState;\n var ret = false;\n var isBuf = !state.objectMode && _isUint8Array(chunk);\n if (isBuf && !Buffer.isBuffer(chunk)) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;\n if (typeof cb !== 'function') cb = nop;\n if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {\n state.pendingcb++;\n ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);\n }\n return ret;\n};\nWritable.prototype.cork = function () {\n this._writableState.corked++;\n};\nWritable.prototype.uncork = function () {\n var state = this._writableState;\n if (state.corked) {\n state.corked--;\n if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);\n }\n};\nWritable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {\n // node::ParseEncoding() requires lower case.\n if (typeof encoding === 'string') encoding = encoding.toLowerCase();\n if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);\n this._writableState.defaultEncoding = encoding;\n return this;\n};\nObject.defineProperty(Writable.prototype, 'writableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState && this._writableState.getBuffer();\n }\n});\nfunction decodeChunk(state, chunk, encoding) {\n if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {\n chunk = Buffer.from(chunk, encoding);\n }\n return chunk;\n}\nObject.defineProperty(Writable.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState.highWaterMark;\n }\n});\n\n// if we're already writing something, then just put this\n// in the queue, and wait our turn. Otherwise, call _write\n// If we return false, then we need a drain event, so set that flag.\nfunction writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {\n if (!isBuf) {\n var newChunk = decodeChunk(state, chunk, encoding);\n if (chunk !== newChunk) {\n isBuf = true;\n encoding = 'buffer';\n chunk = newChunk;\n }\n }\n var len = state.objectMode ? 1 : chunk.length;\n state.length += len;\n var ret = state.length < state.highWaterMark;\n // we must ensure that previous needDrain will not be reset to false.\n if (!ret) state.needDrain = true;\n if (state.writing || state.corked) {\n var last = state.lastBufferedRequest;\n state.lastBufferedRequest = {\n chunk,\n encoding,\n isBuf,\n callback: cb,\n next: null\n };\n if (last) {\n last.next = state.lastBufferedRequest;\n } else {\n state.bufferedRequest = state.lastBufferedRequest;\n }\n state.bufferedRequestCount += 1;\n } else {\n doWrite(stream, state, false, len, chunk, encoding, cb);\n }\n return ret;\n}\nfunction doWrite(stream, state, writev, len, chunk, encoding, cb) {\n state.writelen = len;\n state.writecb = cb;\n state.writing = true;\n state.sync = true;\n if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);\n state.sync = false;\n}\nfunction onwriteError(stream, state, sync, er, cb) {\n --state.pendingcb;\n if (sync) {\n // defer the callback if we are being called synchronously\n // to avoid piling up things on the stack\n process.nextTick(cb, er);\n // this can emit finish, and it will always happen\n // after error\n process.nextTick(finishMaybe, stream, state);\n stream._writableState.errorEmitted = true;\n errorOrDestroy(stream, er);\n } else {\n // the caller expect this to happen before if\n // it is async\n cb(er);\n stream._writableState.errorEmitted = true;\n errorOrDestroy(stream, er);\n // this can emit finish, but finish must\n // always follow error\n finishMaybe(stream, state);\n }\n}\nfunction onwriteStateUpdate(state) {\n state.writing = false;\n state.writecb = null;\n state.length -= state.writelen;\n state.writelen = 0;\n}\nfunction onwrite(stream, er) {\n var state = stream._writableState;\n var sync = state.sync;\n var cb = state.writecb;\n if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();\n onwriteStateUpdate(state);\n if (er) onwriteError(stream, state, sync, er, cb);else {\n // Check if we're actually ready to finish, but don't emit yet\n var finished = needFinish(state) || stream.destroyed;\n if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {\n clearBuffer(stream, state);\n }\n if (sync) {\n process.nextTick(afterWrite, stream, state, finished, cb);\n } else {\n afterWrite(stream, state, finished, cb);\n }\n }\n}\nfunction afterWrite(stream, state, finished, cb) {\n if (!finished) onwriteDrain(stream, state);\n state.pendingcb--;\n cb();\n finishMaybe(stream, state);\n}\n\n// Must force callback to be called on nextTick, so that we don't\n// emit 'drain' before the write() consumer gets the 'false' return\n// value, and has a chance to attach a 'drain' listener.\nfunction onwriteDrain(stream, state) {\n if (state.length === 0 && state.needDrain) {\n state.needDrain = false;\n stream.emit('drain');\n }\n}\n\n// if there's something in the buffer waiting, then process it\nfunction clearBuffer(stream, state) {\n state.bufferProcessing = true;\n var entry = state.bufferedRequest;\n if (stream._writev && entry && entry.next) {\n // Fast case, write everything using _writev()\n var l = state.bufferedRequestCount;\n var buffer = new Array(l);\n var holder = state.corkedRequestsFree;\n holder.entry = entry;\n var count = 0;\n var allBuffers = true;\n while (entry) {\n buffer[count] = entry;\n if (!entry.isBuf) allBuffers = false;\n entry = entry.next;\n count += 1;\n }\n buffer.allBuffers = allBuffers;\n doWrite(stream, state, true, state.length, buffer, '', holder.finish);\n\n // doWrite is almost always async, defer these to save a bit of time\n // as the hot path ends with doWrite\n state.pendingcb++;\n state.lastBufferedRequest = null;\n if (holder.next) {\n state.corkedRequestsFree = holder.next;\n holder.next = null;\n } else {\n state.corkedRequestsFree = new CorkedRequest(state);\n }\n state.bufferedRequestCount = 0;\n } else {\n // Slow case, write chunks one-by-one\n while (entry) {\n var chunk = entry.chunk;\n var encoding = entry.encoding;\n var cb = entry.callback;\n var len = state.objectMode ? 1 : chunk.length;\n doWrite(stream, state, false, len, chunk, encoding, cb);\n entry = entry.next;\n state.bufferedRequestCount--;\n // if we didn't call the onwrite immediately, then\n // it means that we need to wait until it does.\n // also, that means that the chunk and cb are currently\n // being processed, so move the buffer counter past them.\n if (state.writing) {\n break;\n }\n }\n if (entry === null) state.lastBufferedRequest = null;\n }\n state.bufferedRequest = entry;\n state.bufferProcessing = false;\n}\nWritable.prototype._write = function (chunk, encoding, cb) {\n cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));\n};\nWritable.prototype._writev = null;\nWritable.prototype.end = function (chunk, encoding, cb) {\n var state = this._writableState;\n if (typeof chunk === 'function') {\n cb = chunk;\n chunk = null;\n encoding = null;\n } else if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);\n\n // .end() fully uncorks\n if (state.corked) {\n state.corked = 1;\n this.uncork();\n }\n\n // ignore unnecessary end() calls.\n if (!state.ending) endWritable(this, state, cb);\n return this;\n};\nObject.defineProperty(Writable.prototype, 'writableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.length;\n }\n});\nfunction needFinish(state) {\n return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;\n}\nfunction callFinal(stream, state) {\n stream._final(err => {\n state.pendingcb--;\n if (err) {\n errorOrDestroy(stream, err);\n }\n state.prefinished = true;\n stream.emit('prefinish');\n finishMaybe(stream, state);\n });\n}\nfunction prefinish(stream, state) {\n if (!state.prefinished && !state.finalCalled) {\n if (typeof stream._final === 'function' && !state.destroyed) {\n state.pendingcb++;\n state.finalCalled = true;\n process.nextTick(callFinal, stream, state);\n } else {\n state.prefinished = true;\n stream.emit('prefinish');\n }\n }\n}\nfunction finishMaybe(stream, state) {\n var need = needFinish(state);\n if (need) {\n prefinish(stream, state);\n if (state.pendingcb === 0) {\n state.finished = true;\n stream.emit('finish');\n if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the readable side is ready for autoDestroy as well\n const rState = stream._readableState;\n if (!rState || rState.autoDestroy && rState.endEmitted) {\n stream.destroy();\n }\n }\n }\n }\n return need;\n}\nfunction endWritable(stream, state, cb) {\n state.ending = true;\n finishMaybe(stream, state);\n if (cb) {\n if (state.finished) process.nextTick(cb);else stream.once('finish', cb);\n }\n state.ended = true;\n stream.writable = false;\n}\nfunction onCorkedFinish(corkReq, state, err) {\n var entry = corkReq.entry;\n corkReq.entry = null;\n while (entry) {\n var cb = entry.callback;\n state.pendingcb--;\n cb(err);\n entry = entry.next;\n }\n\n // reuse the free corkReq.\n state.corkedRequestsFree.next = corkReq;\n}\nObject.defineProperty(Writable.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._writableState === undefined) {\n return false;\n }\n return this._writableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._writableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._writableState.destroyed = value;\n }\n});\nWritable.prototype.destroy = destroyImpl.destroy;\nWritable.prototype._undestroy = destroyImpl.undestroy;\nWritable.prototype._destroy = function (err, cb) {\n cb(err);\n};","'use strict';\n\nconst finished = require('./end-of-stream');\nconst kLastResolve = Symbol('lastResolve');\nconst kLastReject = Symbol('lastReject');\nconst kError = Symbol('error');\nconst kEnded = Symbol('ended');\nconst kLastPromise = Symbol('lastPromise');\nconst kHandlePromise = Symbol('handlePromise');\nconst kStream = Symbol('stream');\nfunction createIterResult(value, done) {\n return {\n value,\n done\n };\n}\nfunction readAndResolve(iter) {\n const resolve = iter[kLastResolve];\n if (resolve !== null) {\n const data = iter[kStream].read();\n // we defer if data is null\n // we can be expecting either 'end' or\n // 'error'\n if (data !== null) {\n iter[kLastPromise] = null;\n iter[kLastResolve] = null;\n iter[kLastReject] = null;\n resolve(createIterResult(data, false));\n }\n }\n}\nfunction onReadable(iter) {\n // we wait for the next tick, because it might\n // emit an error with process.nextTick\n process.nextTick(readAndResolve, iter);\n}\nfunction wrapForNext(lastPromise, iter) {\n return (resolve, reject) => {\n lastPromise.then(() => {\n if (iter[kEnded]) {\n resolve(createIterResult(undefined, true));\n return;\n }\n iter[kHandlePromise](resolve, reject);\n }, reject);\n };\n}\nconst AsyncIteratorPrototype = Object.getPrototypeOf(function () {});\nconst ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({\n get stream() {\n return this[kStream];\n },\n next() {\n // if we have detected an error in the meanwhile\n // reject straight away\n const error = this[kError];\n if (error !== null) {\n return Promise.reject(error);\n }\n if (this[kEnded]) {\n return Promise.resolve(createIterResult(undefined, true));\n }\n if (this[kStream].destroyed) {\n // We need to defer via nextTick because if .destroy(err) is\n // called, the error will be emitted via nextTick, and\n // we cannot guarantee that there is no error lingering around\n // waiting to be emitted.\n return new Promise((resolve, reject) => {\n process.nextTick(() => {\n if (this[kError]) {\n reject(this[kError]);\n } else {\n resolve(createIterResult(undefined, true));\n }\n });\n });\n }\n\n // if we have multiple next() calls\n // we will wait for the previous Promise to finish\n // this logic is optimized to support for await loops,\n // where next() is only called once at a time\n const lastPromise = this[kLastPromise];\n let promise;\n if (lastPromise) {\n promise = new Promise(wrapForNext(lastPromise, this));\n } else {\n // fast path needed to support multiple this.push()\n // without triggering the next() queue\n const data = this[kStream].read();\n if (data !== null) {\n return Promise.resolve(createIterResult(data, false));\n }\n promise = new Promise(this[kHandlePromise]);\n }\n this[kLastPromise] = promise;\n return promise;\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n return() {\n // destroy(err, cb) is a private API\n // we can guarantee we have that here, because we control the\n // Readable class this is attached to\n return new Promise((resolve, reject) => {\n this[kStream].destroy(null, err => {\n if (err) {\n reject(err);\n return;\n }\n resolve(createIterResult(undefined, true));\n });\n });\n }\n}, AsyncIteratorPrototype);\nconst createReadableStreamAsyncIterator = stream => {\n const iterator = Object.create(ReadableStreamAsyncIteratorPrototype, {\n [kStream]: {\n value: stream,\n writable: true\n },\n [kLastResolve]: {\n value: null,\n writable: true\n },\n [kLastReject]: {\n value: null,\n writable: true\n },\n [kError]: {\n value: null,\n writable: true\n },\n [kEnded]: {\n value: stream._readableState.endEmitted,\n writable: true\n },\n // the function passed to new Promise\n // is cached so we avoid allocating a new\n // closure at every run\n [kHandlePromise]: {\n value: (resolve, reject) => {\n const data = iterator[kStream].read();\n if (data) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n resolve(createIterResult(data, false));\n } else {\n iterator[kLastResolve] = resolve;\n iterator[kLastReject] = reject;\n }\n },\n writable: true\n }\n });\n iterator[kLastPromise] = null;\n finished(stream, err => {\n if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {\n const reject = iterator[kLastReject];\n // reject if we are waiting for data in the Promise\n // returned by next() and store the error\n if (reject !== null) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n reject(err);\n }\n iterator[kError] = err;\n return;\n }\n const resolve = iterator[kLastResolve];\n if (resolve !== null) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n resolve(createIterResult(undefined, true));\n }\n iterator[kEnded] = true;\n });\n stream.on('readable', onReadable.bind(null, iterator));\n return iterator;\n};\nmodule.exports = createReadableStreamAsyncIterator;","'use strict';\n\nfunction ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }\nfunction _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }\nfunction _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }\nfunction _toPropertyKey(arg) { var key = _toPrimitive(arg, \"string\"); return typeof key === \"symbol\" ? key : String(key); }\nfunction _toPrimitive(input, hint) { if (typeof input !== \"object\" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || \"default\"); if (typeof res !== \"object\") return res; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (hint === \"string\" ? String : Number)(input); }\nconst _require = require('buffer'),\n Buffer = _require.Buffer;\nconst _require2 = require('util'),\n inspect = _require2.inspect;\nconst custom = inspect && inspect.custom || 'inspect';\nfunction copyBuffer(src, target, offset) {\n Buffer.prototype.copy.call(src, target, offset);\n}\nmodule.exports = class BufferList {\n constructor() {\n this.head = null;\n this.tail = null;\n this.length = 0;\n }\n push(v) {\n const entry = {\n data: v,\n next: null\n };\n if (this.length > 0) this.tail.next = entry;else this.head = entry;\n this.tail = entry;\n ++this.length;\n }\n unshift(v) {\n const entry = {\n data: v,\n next: this.head\n };\n if (this.length === 0) this.tail = entry;\n this.head = entry;\n ++this.length;\n }\n shift() {\n if (this.length === 0) return;\n const ret = this.head.data;\n if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;\n --this.length;\n return ret;\n }\n clear() {\n this.head = this.tail = null;\n this.length = 0;\n }\n join(s) {\n if (this.length === 0) return '';\n var p = this.head;\n var ret = '' + p.data;\n while (p = p.next) ret += s + p.data;\n return ret;\n }\n concat(n) {\n if (this.length === 0) return Buffer.alloc(0);\n const ret = Buffer.allocUnsafe(n >>> 0);\n var p = this.head;\n var i = 0;\n while (p) {\n copyBuffer(p.data, ret, i);\n i += p.data.length;\n p = p.next;\n }\n return ret;\n }\n\n // Consumes a specified amount of bytes or characters from the buffered data.\n consume(n, hasStrings) {\n var ret;\n if (n < this.head.data.length) {\n // `slice` is the same for buffers and strings.\n ret = this.head.data.slice(0, n);\n this.head.data = this.head.data.slice(n);\n } else if (n === this.head.data.length) {\n // First chunk is a perfect match.\n ret = this.shift();\n } else {\n // Result spans more than one buffer.\n ret = hasStrings ? this._getString(n) : this._getBuffer(n);\n }\n return ret;\n }\n first() {\n return this.head.data;\n }\n\n // Consumes a specified amount of characters from the buffered data.\n _getString(n) {\n var p = this.head;\n var c = 1;\n var ret = p.data;\n n -= ret.length;\n while (p = p.next) {\n const str = p.data;\n const nb = n > str.length ? str.length : n;\n if (nb === str.length) ret += str;else ret += str.slice(0, n);\n n -= nb;\n if (n === 0) {\n if (nb === str.length) {\n ++c;\n if (p.next) this.head = p.next;else this.head = this.tail = null;\n } else {\n this.head = p;\n p.data = str.slice(nb);\n }\n break;\n }\n ++c;\n }\n this.length -= c;\n return ret;\n }\n\n // Consumes a specified amount of bytes from the buffered data.\n _getBuffer(n) {\n const ret = Buffer.allocUnsafe(n);\n var p = this.head;\n var c = 1;\n p.data.copy(ret);\n n -= p.data.length;\n while (p = p.next) {\n const buf = p.data;\n const nb = n > buf.length ? buf.length : n;\n buf.copy(ret, ret.length - n, 0, nb);\n n -= nb;\n if (n === 0) {\n if (nb === buf.length) {\n ++c;\n if (p.next) this.head = p.next;else this.head = this.tail = null;\n } else {\n this.head = p;\n p.data = buf.slice(nb);\n }\n break;\n }\n ++c;\n }\n this.length -= c;\n return ret;\n }\n\n // Make sure the linked list only shows the minimal necessary information.\n [custom](_, options) {\n return inspect(this, _objectSpread(_objectSpread({}, options), {}, {\n // Only inspect one level.\n depth: 0,\n // It should not recurse.\n customInspect: false\n }));\n }\n};","'use strict';\n\n// undocumented cb() API, needed for core, not for public API\nfunction destroy(err, cb) {\n const readableDestroyed = this._readableState && this._readableState.destroyed;\n const writableDestroyed = this._writableState && this._writableState.destroyed;\n if (readableDestroyed || writableDestroyed) {\n if (cb) {\n cb(err);\n } else if (err) {\n if (!this._writableState) {\n process.nextTick(emitErrorNT, this, err);\n } else if (!this._writableState.errorEmitted) {\n this._writableState.errorEmitted = true;\n process.nextTick(emitErrorNT, this, err);\n }\n }\n return this;\n }\n\n // we set destroyed to true before firing error callbacks in order\n // to make it re-entrance safe in case destroy() is called within callbacks\n\n if (this._readableState) {\n this._readableState.destroyed = true;\n }\n\n // if this is a duplex stream mark the writable part as destroyed as well\n if (this._writableState) {\n this._writableState.destroyed = true;\n }\n this._destroy(err || null, err => {\n if (!cb && err) {\n if (!this._writableState) {\n process.nextTick(emitErrorAndCloseNT, this, err);\n } else if (!this._writableState.errorEmitted) {\n this._writableState.errorEmitted = true;\n process.nextTick(emitErrorAndCloseNT, this, err);\n } else {\n process.nextTick(emitCloseNT, this);\n }\n } else if (cb) {\n process.nextTick(emitCloseNT, this);\n cb(err);\n } else {\n process.nextTick(emitCloseNT, this);\n }\n });\n return this;\n}\nfunction emitErrorAndCloseNT(self, err) {\n emitErrorNT(self, err);\n emitCloseNT(self);\n}\nfunction emitCloseNT(self) {\n if (self._writableState && !self._writableState.emitClose) return;\n if (self._readableState && !self._readableState.emitClose) return;\n self.emit('close');\n}\nfunction undestroy() {\n if (this._readableState) {\n this._readableState.destroyed = false;\n this._readableState.reading = false;\n this._readableState.ended = false;\n this._readableState.endEmitted = false;\n }\n if (this._writableState) {\n this._writableState.destroyed = false;\n this._writableState.ended = false;\n this._writableState.ending = false;\n this._writableState.finalCalled = false;\n this._writableState.prefinished = false;\n this._writableState.finished = false;\n this._writableState.errorEmitted = false;\n }\n}\nfunction emitErrorNT(self, err) {\n self.emit('error', err);\n}\nfunction errorOrDestroy(stream, err) {\n // We have tests that rely on errors being emitted\n // in the same tick, so changing this is semver major.\n // For now when you opt-in to autoDestroy we allow\n // the error to be emitted nextTick. In a future\n // semver major update we should change the default to this.\n\n const rState = stream._readableState;\n const wState = stream._writableState;\n if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);\n}\nmodule.exports = {\n destroy,\n undestroy,\n errorOrDestroy\n};","// Ported from https://github.com/mafintosh/end-of-stream with\n// permission from the author, Mathias Buus (@mafintosh).\n\n'use strict';\n\nconst ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;\nfunction once(callback) {\n let called = false;\n return function () {\n if (called) return;\n called = true;\n for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {\n args[_key] = arguments[_key];\n }\n callback.apply(this, args);\n };\n}\nfunction noop() {}\nfunction isRequest(stream) {\n return stream.setHeader && typeof stream.abort === 'function';\n}\nfunction eos(stream, opts, callback) {\n if (typeof opts === 'function') return eos(stream, null, opts);\n if (!opts) opts = {};\n callback = once(callback || noop);\n let readable = opts.readable || opts.readable !== false && stream.readable;\n let writable = opts.writable || opts.writable !== false && stream.writable;\n const onlegacyfinish = () => {\n if (!stream.writable) onfinish();\n };\n var writableEnded = stream._writableState && stream._writableState.finished;\n const onfinish = () => {\n writable = false;\n writableEnded = true;\n if (!readable) callback.call(stream);\n };\n var readableEnded = stream._readableState && stream._readableState.endEmitted;\n const onend = () => {\n readable = false;\n readableEnded = true;\n if (!writable) callback.call(stream);\n };\n const onerror = err => {\n callback.call(stream, err);\n };\n const onclose = () => {\n let err;\n if (readable && !readableEnded) {\n if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();\n return callback.call(stream, err);\n }\n if (writable && !writableEnded) {\n if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();\n return callback.call(stream, err);\n }\n };\n const onrequest = () => {\n stream.req.on('finish', onfinish);\n };\n if (isRequest(stream)) {\n stream.on('complete', onfinish);\n stream.on('abort', onclose);\n if (stream.req) onrequest();else stream.on('request', onrequest);\n } else if (writable && !stream._writableState) {\n // legacy streams\n stream.on('end', onlegacyfinish);\n stream.on('close', onlegacyfinish);\n }\n stream.on('end', onend);\n stream.on('finish', onfinish);\n if (opts.error !== false) stream.on('error', onerror);\n stream.on('close', onclose);\n return function () {\n stream.removeListener('complete', onfinish);\n stream.removeListener('abort', onclose);\n stream.removeListener('request', onrequest);\n if (stream.req) stream.req.removeListener('finish', onfinish);\n stream.removeListener('end', onlegacyfinish);\n stream.removeListener('close', onlegacyfinish);\n stream.removeListener('finish', onfinish);\n stream.removeListener('end', onend);\n stream.removeListener('error', onerror);\n stream.removeListener('close', onclose);\n };\n}\nmodule.exports = eos;","'use strict';\n\nfunction asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }\nfunction _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"next\", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"throw\", err); } _next(undefined); }); }; }\nfunction ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }\nfunction _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }\nfunction _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }\nfunction _toPropertyKey(arg) { var key = _toPrimitive(arg, \"string\"); return typeof key === \"symbol\" ? key : String(key); }\nfunction _toPrimitive(input, hint) { if (typeof input !== \"object\" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || \"default\"); if (typeof res !== \"object\") return res; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (hint === \"string\" ? String : Number)(input); }\nconst ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE;\nfunction from(Readable, iterable, opts) {\n let iterator;\n if (iterable && typeof iterable.next === 'function') {\n iterator = iterable;\n } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable);\n const readable = new Readable(_objectSpread({\n objectMode: true\n }, opts));\n // Reading boolean to protect against _read\n // being called before last iteration completion.\n let reading = false;\n readable._read = function () {\n if (!reading) {\n reading = true;\n next();\n }\n };\n function next() {\n return _next2.apply(this, arguments);\n }\n function _next2() {\n _next2 = _asyncToGenerator(function* () {\n try {\n const _yield$iterator$next = yield iterator.next(),\n value = _yield$iterator$next.value,\n done = _yield$iterator$next.done;\n if (done) {\n readable.push(null);\n } else if (readable.push(yield value)) {\n next();\n } else {\n reading = false;\n }\n } catch (err) {\n readable.destroy(err);\n }\n });\n return _next2.apply(this, arguments);\n }\n return readable;\n}\nmodule.exports = from;","// Ported from https://github.com/mafintosh/pump with\n// permission from the author, Mathias Buus (@mafintosh).\n\n'use strict';\n\nlet eos;\nfunction once(callback) {\n let called = false;\n return function () {\n if (called) return;\n called = true;\n callback(...arguments);\n };\n}\nconst _require$codes = require('../../../errors').codes,\n ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,\n ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;\nfunction noop(err) {\n // Rethrow the error if it exists to avoid swallowing it\n if (err) throw err;\n}\nfunction isRequest(stream) {\n return stream.setHeader && typeof stream.abort === 'function';\n}\nfunction destroyer(stream, reading, writing, callback) {\n callback = once(callback);\n let closed = false;\n stream.on('close', () => {\n closed = true;\n });\n if (eos === undefined) eos = require('./end-of-stream');\n eos(stream, {\n readable: reading,\n writable: writing\n }, err => {\n if (err) return callback(err);\n closed = true;\n callback();\n });\n let destroyed = false;\n return err => {\n if (closed) return;\n if (destroyed) return;\n destroyed = true;\n\n // request.destroy just do .end - .abort is what we want\n if (isRequest(stream)) return stream.abort();\n if (typeof stream.destroy === 'function') return stream.destroy();\n callback(err || new ERR_STREAM_DESTROYED('pipe'));\n };\n}\nfunction call(fn) {\n fn();\n}\nfunction pipe(from, to) {\n return from.pipe(to);\n}\nfunction popCallback(streams) {\n if (!streams.length) return noop;\n if (typeof streams[streams.length - 1] !== 'function') return noop;\n return streams.pop();\n}\nfunction pipeline() {\n for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {\n streams[_key] = arguments[_key];\n }\n const callback = popCallback(streams);\n if (Array.isArray(streams[0])) streams = streams[0];\n if (streams.length < 2) {\n throw new ERR_MISSING_ARGS('streams');\n }\n let error;\n const destroys = streams.map(function (stream, i) {\n const reading = i < streams.length - 1;\n const writing = i > 0;\n return destroyer(stream, reading, writing, function (err) {\n if (!error) error = err;\n if (err) destroys.forEach(call);\n if (reading) return;\n destroys.forEach(call);\n callback(error);\n });\n });\n return streams.reduce(pipe);\n}\nmodule.exports = pipeline;","'use strict';\n\nconst ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;\nfunction highWaterMarkFrom(options, isDuplex, duplexKey) {\n return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;\n}\nfunction getHighWaterMark(state, options, duplexKey, isDuplex) {\n const hwm = highWaterMarkFrom(options, isDuplex, duplexKey);\n if (hwm != null) {\n if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {\n const name = isDuplex ? duplexKey : 'highWaterMark';\n throw new ERR_INVALID_OPT_VALUE(name, hwm);\n }\n return Math.floor(hwm);\n }\n\n // Default value\n return state.objectMode ? 16 : 16 * 1024;\n}\nmodule.exports = {\n getHighWaterMark\n};","module.exports = require('stream');\n","var Stream = require('stream');\nif (process.env.READABLE_STREAM === 'disable' && Stream) {\n module.exports = Stream.Readable;\n Object.assign(module.exports, Stream);\n module.exports.Stream = Stream;\n} else {\n exports = module.exports = require('./lib/_stream_readable.js');\n exports.Stream = Stream || exports;\n exports.Readable = exports;\n exports.Writable = require('./lib/_stream_writable.js');\n exports.Duplex = require('./lib/_stream_duplex.js');\n exports.Transform = require('./lib/_stream_transform.js');\n exports.PassThrough = require('./lib/_stream_passthrough.js');\n exports.finished = require('./lib/internal/streams/end-of-stream.js');\n exports.pipeline = require('./lib/internal/streams/pipeline.js');\n}\n","'use strict';\n\nconst {PassThrough} = require('stream');\nconst debug = require('debug')('retry-request');\nconst extend = require('extend');\n\nconst DEFAULTS = {\n objectMode: false,\n retries: 2,\n\n /*\n The maximum time to delay in seconds. If retryDelayMultiplier results in a\n delay greater than maxRetryDelay, retries should delay by maxRetryDelay\n seconds instead.\n */\n maxRetryDelay: 64,\n\n /*\n The multiplier by which to increase the delay time between the completion of\n failed requests, and the initiation of the subsequent retrying request.\n */\n retryDelayMultiplier: 2,\n\n /*\n The length of time to keep retrying in seconds. The last sleep period will\n be shortened as necessary, so that the last retry runs at deadline (and not\n considerably beyond it). The total time starting from when the initial\n request is sent, after which an error will be returned, regardless of the\n retrying attempts made meanwhile.\n */\n totalTimeout: 600,\n\n noResponseRetries: 2,\n currentRetryAttempt: 0,\n shouldRetryFn: function (response) {\n const retryRanges = [\n // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes\n // 1xx - Retry (Informational, request still processing)\n // 2xx - Do not retry (Success)\n // 3xx - Do not retry (Redirect)\n // 4xx - Do not retry (Client errors)\n // 429 - Retry (\"Too Many Requests\")\n // 5xx - Retry (Server errors)\n [100, 199],\n [429, 429],\n [500, 599],\n ];\n\n const statusCode = response.statusCode;\n debug(`Response status: ${statusCode}`);\n\n let range;\n while ((range = retryRanges.shift())) {\n if (statusCode >= range[0] && statusCode <= range[1]) {\n // Not a successful status or redirect.\n return true;\n }\n }\n },\n};\n\nfunction retryRequest(requestOpts, opts, callback) {\n const streamMode = typeof arguments[arguments.length - 1] !== 'function';\n\n if (typeof opts === 'function') {\n callback = opts;\n }\n\n const manualCurrentRetryAttemptWasSet =\n opts && typeof opts.currentRetryAttempt === 'number';\n opts = extend({}, DEFAULTS, opts);\n\n if (typeof opts.request === 'undefined') {\n try {\n // eslint-disable-next-line node/no-unpublished-require\n opts.request = require('request');\n } catch (e) {\n throw new Error('A request library must be provided to retry-request.');\n }\n }\n\n let currentRetryAttempt = opts.currentRetryAttempt;\n\n let numNoResponseAttempts = 0;\n let streamResponseHandled = false;\n\n let retryStream;\n let requestStream;\n let delayStream;\n\n let activeRequest;\n const retryRequest = {\n abort: function () {\n if (activeRequest && activeRequest.abort) {\n activeRequest.abort();\n }\n },\n };\n\n if (streamMode) {\n retryStream = new PassThrough({objectMode: opts.objectMode});\n retryStream.abort = resetStreams;\n }\n\n const timeOfFirstRequest = Date.now();\n if (currentRetryAttempt > 0) {\n retryAfterDelay(currentRetryAttempt);\n } else {\n makeRequest();\n }\n\n if (streamMode) {\n return retryStream;\n } else {\n return retryRequest;\n }\n\n function resetStreams() {\n delayStream = null;\n\n if (requestStream) {\n requestStream.abort && requestStream.abort();\n requestStream.cancel && requestStream.cancel();\n\n if (requestStream.destroy) {\n requestStream.destroy();\n } else if (requestStream.end) {\n requestStream.end();\n }\n }\n }\n\n function makeRequest() {\n currentRetryAttempt++;\n debug(`Current retry attempt: ${currentRetryAttempt}`);\n\n if (streamMode) {\n streamResponseHandled = false;\n\n delayStream = new PassThrough({objectMode: opts.objectMode});\n requestStream = opts.request(requestOpts);\n\n setImmediate(() => {\n retryStream.emit('request');\n });\n\n requestStream\n // gRPC via google-cloud-node can emit an `error` as well as a `response`\n // Whichever it emits, we run with-- we can't run with both. That's what\n // is up with the `streamResponseHandled` tracking.\n .on('error', err => {\n if (streamResponseHandled) {\n return;\n }\n\n streamResponseHandled = true;\n onResponse(err);\n })\n .on('response', (resp, body) => {\n if (streamResponseHandled) {\n return;\n }\n\n streamResponseHandled = true;\n onResponse(null, resp, body);\n })\n .on('complete', retryStream.emit.bind(retryStream, 'complete'));\n\n requestStream.pipe(delayStream);\n } else {\n activeRequest = opts.request(requestOpts, onResponse);\n }\n }\n\n function retryAfterDelay(currentRetryAttempt) {\n if (streamMode) {\n resetStreams();\n }\n\n const nextRetryDelay = getNextRetryDelay({\n maxRetryDelay: opts.maxRetryDelay,\n retryDelayMultiplier: opts.retryDelayMultiplier,\n retryNumber: currentRetryAttempt,\n timeOfFirstRequest,\n totalTimeout: opts.totalTimeout,\n });\n debug(`Next retry delay: ${nextRetryDelay}`);\n\n if (nextRetryDelay <= 0) {\n numNoResponseAttempts = opts.noResponseRetries + 1;\n return;\n }\n\n setTimeout(makeRequest, nextRetryDelay);\n }\n\n function onResponse(err, response, body) {\n // An error such as DNS resolution.\n if (err) {\n numNoResponseAttempts++;\n\n if (numNoResponseAttempts <= opts.noResponseRetries) {\n retryAfterDelay(numNoResponseAttempts);\n } else {\n if (streamMode) {\n retryStream.emit('error', err);\n retryStream.end();\n } else {\n callback(err, response, body);\n }\n }\n\n return;\n }\n\n // Send the response to see if we should try again.\n // NOTE: \"currentRetryAttempt\" isn't accurate by default, as it counts\n // the very first request sent as the first \"retry\". It is only accurate\n // when a user provides their own \"currentRetryAttempt\" option at\n // instantiation.\n const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet\n ? currentRetryAttempt\n : currentRetryAttempt - 1;\n if (\n adjustedCurrentRetryAttempt < opts.retries &&\n opts.shouldRetryFn(response)\n ) {\n retryAfterDelay(currentRetryAttempt);\n return;\n }\n\n // No more attempts need to be made, just continue on.\n if (streamMode) {\n retryStream.emit('response', response);\n delayStream.pipe(retryStream);\n requestStream.on('error', err => {\n retryStream.destroy(err);\n });\n } else {\n callback(err, response, body);\n }\n }\n}\n\nmodule.exports = retryRequest;\n\nfunction getNextRetryDelay(config) {\n const {\n maxRetryDelay,\n retryDelayMultiplier,\n retryNumber,\n timeOfFirstRequest,\n totalTimeout,\n } = config;\n\n const maxRetryDelayMs = maxRetryDelay * 1000;\n const totalTimeoutMs = totalTimeout * 1000;\n\n const jitter = Math.floor(Math.random() * 1000);\n const calculatedNextRetryDelay =\n Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter;\n\n const maxAllowableDelayMs =\n totalTimeoutMs - (Date.now() - timeOfFirstRequest);\n\n return Math.min(\n calculatedNextRetryDelay,\n maxAllowableDelayMs,\n maxRetryDelayMs\n );\n}\n\nmodule.exports.getNextRetryDelay = getNextRetryDelay;\n","module.exports = require('./lib/retry');","var RetryOperation = require('./retry_operation');\n\nexports.operation = function(options) {\n var timeouts = exports.timeouts(options);\n return new RetryOperation(timeouts, {\n forever: options && (options.forever || options.retries === Infinity),\n unref: options && options.unref,\n maxRetryTime: options && options.maxRetryTime\n });\n};\n\nexports.timeouts = function(options) {\n if (options instanceof Array) {\n return [].concat(options);\n }\n\n var opts = {\n retries: 10,\n factor: 2,\n minTimeout: 1 * 1000,\n maxTimeout: Infinity,\n randomize: false\n };\n for (var key in options) {\n opts[key] = options[key];\n }\n\n if (opts.minTimeout > opts.maxTimeout) {\n throw new Error('minTimeout is greater than maxTimeout');\n }\n\n var timeouts = [];\n for (var i = 0; i < opts.retries; i++) {\n timeouts.push(this.createTimeout(i, opts));\n }\n\n if (options && options.forever && !timeouts.length) {\n timeouts.push(this.createTimeout(i, opts));\n }\n\n // sort the array numerically ascending\n timeouts.sort(function(a,b) {\n return a - b;\n });\n\n return timeouts;\n};\n\nexports.createTimeout = function(attempt, opts) {\n var random = (opts.randomize)\n ? (Math.random() + 1)\n : 1;\n\n var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt));\n timeout = Math.min(timeout, opts.maxTimeout);\n\n return timeout;\n};\n\nexports.wrap = function(obj, options, methods) {\n if (options instanceof Array) {\n methods = options;\n options = null;\n }\n\n if (!methods) {\n methods = [];\n for (var key in obj) {\n if (typeof obj[key] === 'function') {\n methods.push(key);\n }\n }\n }\n\n for (var i = 0; i < methods.length; i++) {\n var method = methods[i];\n var original = obj[method];\n\n obj[method] = function retryWrapper(original) {\n var op = exports.operation(options);\n var args = Array.prototype.slice.call(arguments, 1);\n var callback = args.pop();\n\n args.push(function(err) {\n if (op.retry(err)) {\n return;\n }\n if (err) {\n arguments[0] = op.mainError();\n }\n callback.apply(this, arguments);\n });\n\n op.attempt(function() {\n original.apply(obj, args);\n });\n }.bind(obj, original);\n obj[method].options = options;\n }\n};\n","function RetryOperation(timeouts, options) {\n // Compatibility for the old (timeouts, retryForever) signature\n if (typeof options === 'boolean') {\n options = { forever: options };\n }\n\n this._originalTimeouts = JSON.parse(JSON.stringify(timeouts));\n this._timeouts = timeouts;\n this._options = options || {};\n this._maxRetryTime = options && options.maxRetryTime || Infinity;\n this._fn = null;\n this._errors = [];\n this._attempts = 1;\n this._operationTimeout = null;\n this._operationTimeoutCb = null;\n this._timeout = null;\n this._operationStart = null;\n this._timer = null;\n\n if (this._options.forever) {\n this._cachedTimeouts = this._timeouts.slice(0);\n }\n}\nmodule.exports = RetryOperation;\n\nRetryOperation.prototype.reset = function() {\n this._attempts = 1;\n this._timeouts = this._originalTimeouts.slice(0);\n}\n\nRetryOperation.prototype.stop = function() {\n if (this._timeout) {\n clearTimeout(this._timeout);\n }\n if (this._timer) {\n clearTimeout(this._timer);\n }\n\n this._timeouts = [];\n this._cachedTimeouts = null;\n};\n\nRetryOperation.prototype.retry = function(err) {\n if (this._timeout) {\n clearTimeout(this._timeout);\n }\n\n if (!err) {\n return false;\n }\n var currentTime = new Date().getTime();\n if (err && currentTime - this._operationStart >= this._maxRetryTime) {\n this._errors.push(err);\n this._errors.unshift(new Error('RetryOperation timeout occurred'));\n return false;\n }\n\n this._errors.push(err);\n\n var timeout = this._timeouts.shift();\n if (timeout === undefined) {\n if (this._cachedTimeouts) {\n // retry forever, only keep last error\n this._errors.splice(0, this._errors.length - 1);\n timeout = this._cachedTimeouts.slice(-1);\n } else {\n return false;\n }\n }\n\n var self = this;\n this._timer = setTimeout(function() {\n self._attempts++;\n\n if (self._operationTimeoutCb) {\n self._timeout = setTimeout(function() {\n self._operationTimeoutCb(self._attempts);\n }, self._operationTimeout);\n\n if (self._options.unref) {\n self._timeout.unref();\n }\n }\n\n self._fn(self._attempts);\n }, timeout);\n\n if (this._options.unref) {\n this._timer.unref();\n }\n\n return true;\n};\n\nRetryOperation.prototype.attempt = function(fn, timeoutOps) {\n this._fn = fn;\n\n if (timeoutOps) {\n if (timeoutOps.timeout) {\n this._operationTimeout = timeoutOps.timeout;\n }\n if (timeoutOps.cb) {\n this._operationTimeoutCb = timeoutOps.cb;\n }\n }\n\n var self = this;\n if (this._operationTimeoutCb) {\n this._timeout = setTimeout(function() {\n self._operationTimeoutCb();\n }, self._operationTimeout);\n }\n\n this._operationStart = new Date().getTime();\n\n this._fn(this._attempts);\n};\n\nRetryOperation.prototype.try = function(fn) {\n console.log('Using RetryOperation.try() is deprecated');\n this.attempt(fn);\n};\n\nRetryOperation.prototype.start = function(fn) {\n console.log('Using RetryOperation.start() is deprecated');\n this.attempt(fn);\n};\n\nRetryOperation.prototype.start = RetryOperation.prototype.try;\n\nRetryOperation.prototype.errors = function() {\n return this._errors;\n};\n\nRetryOperation.prototype.attempts = function() {\n return this._attempts;\n};\n\nRetryOperation.prototype.mainError = function() {\n if (this._errors.length === 0) {\n return null;\n }\n\n var counts = {};\n var mainError = null;\n var mainErrorCount = 0;\n\n for (var i = 0; i < this._errors.length; i++) {\n var error = this._errors[i];\n var message = error.message;\n var count = (counts[message] || 0) + 1;\n\n counts[message] = count;\n\n if (count >= mainErrorCount) {\n mainError = error;\n mainErrorCount = count;\n }\n }\n\n return mainError;\n};\n","/*! safe-buffer. MIT License. Feross Aboukhadijeh */\n/* eslint-disable node/no-deprecated-api */\nvar buffer = require('buffer')\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.prototype = Object.create(Buffer.prototype)\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n","'use strict';\n\nvar stubs = require('stubs')\n\n/*\n * StreamEvents can be used 2 ways:\n *\n * 1:\n * function MyStream() {\n * require('stream-events').call(this)\n * }\n *\n * 2:\n * require('stream-events')(myStream)\n */\nfunction StreamEvents(stream) {\n stream = stream || this\n\n var cfg = {\n callthrough: true,\n calls: 1\n }\n\n stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading'))\n stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing'))\n\n return stream\n}\n\nmodule.exports = StreamEvents\n","module.exports = shift\n\nfunction shift (stream) {\n var rs = stream._readableState\n if (!rs) return null\n return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs))\n}\n\nfunction getStateLength (state) {\n if (state.buffer.length) {\n // Since node 6.3.0 state.buffer is a BufferList not an array\n if (state.buffer.head) {\n return state.buffer.head.data.length\n }\n\n return state.buffer[0].length\n }\n\n return state.length\n}\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\n/**/\n\nvar Buffer = require('safe-buffer').Buffer;\n/**/\n\nvar isEncoding = Buffer.isEncoding || function (encoding) {\n encoding = '' + encoding;\n switch (encoding && encoding.toLowerCase()) {\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\n return true;\n default:\n return false;\n }\n};\n\nfunction _normalizeEncoding(enc) {\n if (!enc) return 'utf8';\n var retried;\n while (true) {\n switch (enc) {\n case 'utf8':\n case 'utf-8':\n return 'utf8';\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return 'utf16le';\n case 'latin1':\n case 'binary':\n return 'latin1';\n case 'base64':\n case 'ascii':\n case 'hex':\n return enc;\n default:\n if (retried) return; // undefined\n enc = ('' + enc).toLowerCase();\n retried = true;\n }\n }\n};\n\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\n// modules monkey-patch it to support additional encodings\nfunction normalizeEncoding(enc) {\n var nenc = _normalizeEncoding(enc);\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\n return nenc || enc;\n}\n\n// StringDecoder provides an interface for efficiently splitting a series of\n// buffers into a series of JS strings without breaking apart multi-byte\n// characters.\nexports.StringDecoder = StringDecoder;\nfunction StringDecoder(encoding) {\n this.encoding = normalizeEncoding(encoding);\n var nb;\n switch (this.encoding) {\n case 'utf16le':\n this.text = utf16Text;\n this.end = utf16End;\n nb = 4;\n break;\n case 'utf8':\n this.fillLast = utf8FillLast;\n nb = 4;\n break;\n case 'base64':\n this.text = base64Text;\n this.end = base64End;\n nb = 3;\n break;\n default:\n this.write = simpleWrite;\n this.end = simpleEnd;\n return;\n }\n this.lastNeed = 0;\n this.lastTotal = 0;\n this.lastChar = Buffer.allocUnsafe(nb);\n}\n\nStringDecoder.prototype.write = function (buf) {\n if (buf.length === 0) return '';\n var r;\n var i;\n if (this.lastNeed) {\n r = this.fillLast(buf);\n if (r === undefined) return '';\n i = this.lastNeed;\n this.lastNeed = 0;\n } else {\n i = 0;\n }\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\n return r || '';\n};\n\nStringDecoder.prototype.end = utf8End;\n\n// Returns only complete characters in a Buffer\nStringDecoder.prototype.text = utf8Text;\n\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\nStringDecoder.prototype.fillLast = function (buf) {\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\n this.lastNeed -= buf.length;\n};\n\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\n// continuation byte. If an invalid byte is detected, -2 is returned.\nfunction utf8CheckByte(byte) {\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\n return byte >> 6 === 0x02 ? -1 : -2;\n}\n\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\n// needed to complete the UTF-8 character (if applicable) are returned.\nfunction utf8CheckIncomplete(self, buf, i) {\n var j = buf.length - 1;\n if (j < i) return 0;\n var nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 1;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 2;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) {\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\n }\n return nb;\n }\n return 0;\n}\n\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\n// needed or are available. If we see a non-continuation byte where we expect\n// one, we \"replace\" the validated continuation bytes we've seen so far with\n// a single UTF-8 replacement character ('\\ufffd'), to match v8's UTF-8 decoding\n// behavior. The continuation byte check is included three times in the case\n// where all of the continuation bytes for a character exist in the same buffer.\n// It is also done this way as a slight performance increase instead of using a\n// loop.\nfunction utf8CheckExtraBytes(self, buf, p) {\n if ((buf[0] & 0xC0) !== 0x80) {\n self.lastNeed = 0;\n return '\\ufffd';\n }\n if (self.lastNeed > 1 && buf.length > 1) {\n if ((buf[1] & 0xC0) !== 0x80) {\n self.lastNeed = 1;\n return '\\ufffd';\n }\n if (self.lastNeed > 2 && buf.length > 2) {\n if ((buf[2] & 0xC0) !== 0x80) {\n self.lastNeed = 2;\n return '\\ufffd';\n }\n }\n }\n}\n\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\nfunction utf8FillLast(buf) {\n var p = this.lastTotal - this.lastNeed;\n var r = utf8CheckExtraBytes(this, buf, p);\n if (r !== undefined) return r;\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, p, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, p, 0, buf.length);\n this.lastNeed -= buf.length;\n}\n\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\n// partial character, the character's bytes are buffered until the required\n// number of bytes are available.\nfunction utf8Text(buf, i) {\n var total = utf8CheckIncomplete(this, buf, i);\n if (!this.lastNeed) return buf.toString('utf8', i);\n this.lastTotal = total;\n var end = buf.length - (total - this.lastNeed);\n buf.copy(this.lastChar, 0, end);\n return buf.toString('utf8', i, end);\n}\n\n// For UTF-8, a replacement character is added when ending on a partial\n// character.\nfunction utf8End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + '\\ufffd';\n return r;\n}\n\n// UTF-16LE typically needs two bytes per character, but even if we have an even\n// number of bytes available, we need to check if we end on a leading/high\n// surrogate. In that case, we need to wait for the next two bytes in order to\n// decode the last character properly.\nfunction utf16Text(buf, i) {\n if ((buf.length - i) % 2 === 0) {\n var r = buf.toString('utf16le', i);\n if (r) {\n var c = r.charCodeAt(r.length - 1);\n if (c >= 0xD800 && c <= 0xDBFF) {\n this.lastNeed = 2;\n this.lastTotal = 4;\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n return r.slice(0, -1);\n }\n }\n return r;\n }\n this.lastNeed = 1;\n this.lastTotal = 2;\n this.lastChar[0] = buf[buf.length - 1];\n return buf.toString('utf16le', i, buf.length - 1);\n}\n\n// For UTF-16LE we do not explicitly append special replacement characters if we\n// end on a partial character, we simply let v8 handle that.\nfunction utf16End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) {\n var end = this.lastTotal - this.lastNeed;\n return r + this.lastChar.toString('utf16le', 0, end);\n }\n return r;\n}\n\nfunction base64Text(buf, i) {\n var n = (buf.length - i) % 3;\n if (n === 0) return buf.toString('base64', i);\n this.lastNeed = 3 - n;\n this.lastTotal = 3;\n if (n === 1) {\n this.lastChar[0] = buf[buf.length - 1];\n } else {\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n }\n return buf.toString('base64', i, buf.length - n);\n}\n\nfunction base64End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\n return r;\n}\n\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\nfunction simpleWrite(buf) {\n return buf.toString(this.encoding);\n}\n\nfunction simpleEnd(buf) {\n return buf && buf.length ? this.write(buf) : '';\n}","'use strict'\n\nmodule.exports = function stubs(obj, method, cfg, stub) {\n if (!obj || !method || !obj[method])\n throw new Error('You must provide an object and a key for an existing method')\n\n if (!stub) {\n stub = cfg\n cfg = {}\n }\n\n stub = stub || function() {}\n\n cfg.callthrough = cfg.callthrough || false\n cfg.calls = cfg.calls || 0\n\n var norevert = cfg.calls === 0\n\n var cached = obj[method].bind(obj)\n\n obj[method] = function() {\n var args = [].slice.call(arguments)\n var returnVal\n\n if (cfg.callthrough)\n returnVal = cached.apply(obj, args)\n\n returnVal = stub.apply(obj, args) || returnVal\n\n if (!norevert && --cfg.calls === 0)\n obj[method] = cached\n\n return returnVal\n }\n}\n","'use strict';\nconst os = require('os');\nconst tty = require('tty');\nconst hasFlag = require('has-flag');\n\nconst {env} = process;\n\nlet forceColor;\nif (hasFlag('no-color') ||\n\thasFlag('no-colors') ||\n\thasFlag('color=false') ||\n\thasFlag('color=never')) {\n\tforceColor = 0;\n} else if (hasFlag('color') ||\n\thasFlag('colors') ||\n\thasFlag('color=true') ||\n\thasFlag('color=always')) {\n\tforceColor = 1;\n}\n\nif ('FORCE_COLOR' in env) {\n\tif (env.FORCE_COLOR === 'true') {\n\t\tforceColor = 1;\n\t} else if (env.FORCE_COLOR === 'false') {\n\t\tforceColor = 0;\n\t} else {\n\t\tforceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3);\n\t}\n}\n\nfunction translateLevel(level) {\n\tif (level === 0) {\n\t\treturn false;\n\t}\n\n\treturn {\n\t\tlevel,\n\t\thasBasic: true,\n\t\thas256: level >= 2,\n\t\thas16m: level >= 3\n\t};\n}\n\nfunction supportsColor(haveStream, streamIsTTY) {\n\tif (forceColor === 0) {\n\t\treturn 0;\n\t}\n\n\tif (hasFlag('color=16m') ||\n\t\thasFlag('color=full') ||\n\t\thasFlag('color=truecolor')) {\n\t\treturn 3;\n\t}\n\n\tif (hasFlag('color=256')) {\n\t\treturn 2;\n\t}\n\n\tif (haveStream && !streamIsTTY && forceColor === undefined) {\n\t\treturn 0;\n\t}\n\n\tconst min = forceColor || 0;\n\n\tif (env.TERM === 'dumb') {\n\t\treturn min;\n\t}\n\n\tif (process.platform === 'win32') {\n\t\t// Windows 10 build 10586 is the first Windows release that supports 256 colors.\n\t\t// Windows 10 build 14931 is the first release that supports 16m/TrueColor.\n\t\tconst osRelease = os.release().split('.');\n\t\tif (\n\t\t\tNumber(osRelease[0]) >= 10 &&\n\t\t\tNumber(osRelease[2]) >= 10586\n\t\t) {\n\t\t\treturn Number(osRelease[2]) >= 14931 ? 3 : 2;\n\t\t}\n\n\t\treturn 1;\n\t}\n\n\tif ('CI' in env) {\n\t\tif (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {\n\t\t\treturn 1;\n\t\t}\n\n\t\treturn min;\n\t}\n\n\tif ('TEAMCITY_VERSION' in env) {\n\t\treturn /^(9\\.(0*[1-9]\\d*)\\.|\\d{2,}\\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;\n\t}\n\n\tif (env.COLORTERM === 'truecolor') {\n\t\treturn 3;\n\t}\n\n\tif ('TERM_PROGRAM' in env) {\n\t\tconst version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);\n\n\t\tswitch (env.TERM_PROGRAM) {\n\t\t\tcase 'iTerm.app':\n\t\t\t\treturn version >= 3 ? 3 : 2;\n\t\t\tcase 'Apple_Terminal':\n\t\t\t\treturn 2;\n\t\t\t// No default\n\t\t}\n\t}\n\n\tif (/-256(color)?$/i.test(env.TERM)) {\n\t\treturn 2;\n\t}\n\n\tif (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {\n\t\treturn 1;\n\t}\n\n\tif ('COLORTERM' in env) {\n\t\treturn 1;\n\t}\n\n\treturn min;\n}\n\nfunction getSupportLevel(stream) {\n\tconst level = supportsColor(stream, stream && stream.isTTY);\n\treturn translateLevel(level);\n}\n\nmodule.exports = {\n\tsupportsColor: getSupportLevel,\n\tstdout: translateLevel(supportsColor(true, tty.isatty(1))),\n\tstderr: translateLevel(supportsColor(true, tty.isatty(2)))\n};\n","\"use strict\";\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0;\n/**\n * @class TeenyStatisticsWarning\n * @extends Error\n * @description While an error, is used for emitting warnings when\n * meeting certain configured thresholds.\n * @see process.emitWarning\n */\nclass TeenyStatisticsWarning extends Error {\n /**\n * @param {string} message\n */\n constructor(message) {\n super(message);\n this.threshold = 0;\n this.type = '';\n this.value = 0;\n this.name = this.constructor.name;\n Error.captureStackTrace(this, this.constructor);\n }\n}\nexports.TeenyStatisticsWarning = TeenyStatisticsWarning;\nTeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning';\n/**\n * @class TeenyStatistics\n * @description Maintain various statistics internal to teeny-request. Tracking\n * is not automatic and must be instrumented within teeny-request.\n */\nclass TeenyStatistics {\n /**\n * @param {TeenyStatisticsOptions} [opts]\n */\n constructor(opts) {\n /**\n * @type {number}\n * @private\n * @default 0\n */\n this._concurrentRequests = 0;\n /**\n * @type {boolean}\n * @private\n * @default false\n */\n this._didConcurrentRequestWarn = false;\n this._options = TeenyStatistics._prepareOptions(opts);\n }\n /**\n * Returns a copy of the current options.\n * @return {TeenyStatisticsOptions}\n */\n getOptions() {\n return Object.assign({}, this._options);\n }\n /**\n * Change configured statistics options. This will not preserve unspecified\n * options that were previously specified, i.e. this is a reset of options.\n * @param {TeenyStatisticsOptions} [opts]\n * @returns {TeenyStatisticsConfig} The previous options.\n * @see _prepareOptions\n */\n setOptions(opts) {\n const oldOpts = this._options;\n this._options = TeenyStatistics._prepareOptions(opts);\n return oldOpts;\n }\n /**\n * @readonly\n * @return {TeenyStatisticsCounters}\n */\n get counters() {\n return {\n concurrentRequests: this._concurrentRequests,\n };\n }\n /**\n * @description Should call this right before making a request.\n */\n requestStarting() {\n this._concurrentRequests++;\n if (this._options.concurrentRequests > 0 &&\n this._concurrentRequests >= this._options.concurrentRequests &&\n !this._didConcurrentRequestWarn) {\n this._didConcurrentRequestWarn = true;\n const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' +\n this._concurrentRequests +\n ' requests in-flight, which exceeds the configured threshold of ' +\n this._options.concurrentRequests +\n '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' +\n 'variable or the concurrentRequests option of teeny-request to ' +\n 'increase or disable (0) this warning.');\n warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS;\n warning.value = this._concurrentRequests;\n warning.threshold = this._options.concurrentRequests;\n process.emitWarning(warning);\n }\n }\n /**\n * @description When using `requestStarting`, call this after the request\n * has finished.\n */\n requestFinished() {\n // TODO negative?\n this._concurrentRequests--;\n }\n /**\n * Configuration Precedence:\n * 1. Dependency inversion via defined option.\n * 2. Global numeric environment variable.\n * 3. Built-in default.\n * This will not preserve unspecified options previously specified.\n * @param {TeenyStatisticsOptions} [opts]\n * @returns {TeenyStatisticsOptions}\n * @private\n */\n static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) {\n let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS;\n const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS);\n if (diConcurrentRequests !== undefined) {\n concurrentRequests = diConcurrentRequests;\n }\n else if (!Number.isNaN(envConcurrentRequests)) {\n concurrentRequests = envConcurrentRequests;\n }\n return { concurrentRequests };\n }\n}\nexports.TeenyStatistics = TeenyStatistics;\n/**\n * @description A default threshold representing when to warn about excessive\n * in-flight/concurrent requests.\n * @type {number}\n * @static\n * @readonly\n * @default 5000\n */\nTeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000;\n//# sourceMappingURL=TeenyStatistics.js.map","\"use strict\";\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getAgent = exports.pool = void 0;\nconst http_1 = require(\"http\");\nconst https_1 = require(\"https\");\n// eslint-disable-next-line node/no-deprecated-api\nconst url_1 = require(\"url\");\nexports.pool = new Map();\n/**\n * Determines if a proxy should be considered based on the environment.\n *\n * @param uri The request uri\n * @returns {boolean}\n */\nfunction shouldUseProxyForURI(uri) {\n const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy;\n if (!noProxyEnv) {\n return true;\n }\n const givenURI = new URL(uri);\n for (const noProxyRaw of noProxyEnv.split(',')) {\n const noProxy = noProxyRaw.trim();\n if (noProxy === givenURI.origin || noProxy === givenURI.hostname) {\n return false;\n }\n else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) {\n const noProxyWildcard = noProxy.replace(/^\\*\\./, '.');\n if (givenURI.hostname.endsWith(noProxyWildcard)) {\n return false;\n }\n }\n }\n return true;\n}\n/**\n * Returns a custom request Agent if one is found, otherwise returns undefined\n * which will result in the global http(s) Agent being used.\n * @private\n * @param {string} uri The request uri\n * @param {Options} reqOpts The request options\n * @returns {HttpAnyAgent|undefined}\n */\nfunction getAgent(uri, reqOpts) {\n const isHttp = uri.startsWith('http://');\n const proxy = reqOpts.proxy ||\n process.env.HTTP_PROXY ||\n process.env.http_proxy ||\n process.env.HTTPS_PROXY ||\n process.env.https_proxy;\n const poolOptions = Object.assign({}, reqOpts.pool);\n const manuallyProvidedProxy = !!reqOpts.proxy;\n const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri);\n if (proxy && shouldUseProxy) {\n // tslint:disable-next-line variable-name\n const Agent = isHttp\n ? require('http-proxy-agent')\n : require('https-proxy-agent');\n const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions };\n return new Agent(proxyOpts);\n }\n let key = isHttp ? 'http' : 'https';\n if (reqOpts.forever) {\n key += ':forever';\n if (!exports.pool.has(key)) {\n // tslint:disable-next-line variable-name\n const Agent = isHttp ? http_1.Agent : https_1.Agent;\n exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true }));\n }\n }\n return exports.pool.get(key);\n}\nexports.getAgent = getAgent;\n//# sourceMappingURL=agents.js.map","\"use strict\";\n/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.teenyRequest = exports.RequestError = void 0;\nconst node_fetch_1 = require(\"node-fetch\");\nconst stream_1 = require(\"stream\");\nconst uuid = require(\"uuid\");\nconst agents_1 = require(\"./agents\");\nconst TeenyStatistics_1 = require(\"./TeenyStatistics\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst streamEvents = require('stream-events');\nclass RequestError extends Error {\n}\nexports.RequestError = RequestError;\n/**\n * Convert options from Request to Fetch format\n * @private\n * @param reqOpts Request options\n */\nfunction requestToFetchOptions(reqOpts) {\n const options = {\n method: reqOpts.method || 'GET',\n ...(reqOpts.timeout && { timeout: reqOpts.timeout }),\n ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }),\n };\n if (typeof reqOpts.json === 'object') {\n // Add Content-type: application/json header\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['Content-Type'] = 'application/json';\n // Set body to JSON representation of value\n options.body = JSON.stringify(reqOpts.json);\n }\n else {\n if (Buffer.isBuffer(reqOpts.body)) {\n options.body = reqOpts.body;\n }\n else if (typeof reqOpts.body !== 'string') {\n options.body = JSON.stringify(reqOpts.body);\n }\n else {\n options.body = reqOpts.body;\n }\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options.headers = reqOpts.headers;\n let uri = (reqOpts.uri ||\n reqOpts.url);\n if (!uri) {\n throw new Error('Missing uri or url in reqOpts.');\n }\n if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') {\n // eslint-disable-next-line @typescript-eslint/no-var-requires\n const qs = require('querystring');\n const params = qs.stringify(reqOpts.qs);\n uri = uri + '?' + params;\n }\n options.agent = (0, agents_1.getAgent)(uri, reqOpts);\n return { uri, options };\n}\n/**\n * Convert a response from `fetch` to `request` format.\n * @private\n * @param opts The `request` options used to create the request.\n * @param res The Fetch response\n * @returns A `request` response object\n */\nfunction fetchToRequestResponse(opts, res) {\n const request = {};\n request.agent = opts.agent || false;\n request.headers = (opts.headers || {});\n request.href = res.url;\n // headers need to be converted from a map to an obj\n const resHeaders = {};\n res.headers.forEach((value, key) => (resHeaders[key] = value));\n const response = Object.assign(res.body, {\n statusCode: res.status,\n statusMessage: res.statusText,\n request,\n body: res.body,\n headers: resHeaders,\n toJSON: () => ({ headers: resHeaders }),\n });\n return response;\n}\n/**\n * Create POST body from two parts as multipart/related content-type\n * @private\n * @param boundary\n * @param multipart\n */\nfunction createMultipartStream(boundary, multipart) {\n const finale = `--${boundary}--`;\n const stream = new stream_1.PassThrough();\n for (const part of multipart) {\n const preamble = `--${boundary}\\r\\nContent-Type: ${part['Content-Type']}\\r\\n\\r\\n`;\n stream.write(preamble);\n if (typeof part.body === 'string') {\n stream.write(part.body);\n stream.write('\\r\\n');\n }\n else {\n part.body.pipe(stream, { end: false });\n part.body.on('end', () => {\n stream.write('\\r\\n');\n stream.write(finale);\n stream.end();\n });\n }\n }\n return stream;\n}\nfunction teenyRequest(reqOpts, callback) {\n const { uri, options } = requestToFetchOptions(reqOpts);\n const multipart = reqOpts.multipart;\n if (reqOpts.multipart && multipart.length === 2) {\n if (!callback) {\n // TODO: add support for multipart uploads through streaming\n throw new Error('Multipart without callback is not implemented.');\n }\n const boundary = uuid.v4();\n options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`;\n options.body = createMultipartStream(boundary, multipart);\n // Multipart upload\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n const header = res.headers.get('content-type');\n const response = fetchToRequestResponse(options, res);\n const body = response.body;\n if (header === 'application/json' ||\n header === 'application/json; charset=utf-8') {\n res.json().then(json => {\n response.body = json;\n callback(null, response, json);\n }, (err) => {\n callback(err, response, body);\n });\n return;\n }\n res.text().then(text => {\n response.body = text;\n callback(null, response, text);\n }, err => {\n callback(err, response, body);\n });\n }, err => {\n teenyRequest.stats.requestFinished();\n callback(err, null, null);\n });\n return;\n }\n if (callback === undefined) {\n // Stream mode\n const requestStream = streamEvents(new stream_1.PassThrough());\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let responseStream;\n requestStream.once('reading', () => {\n if (responseStream) {\n (0, stream_1.pipeline)(responseStream, requestStream, () => { });\n }\n else {\n requestStream.once('response', () => {\n (0, stream_1.pipeline)(responseStream, requestStream, () => { });\n });\n }\n });\n options.compress = false;\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n responseStream = res.body;\n responseStream.on('error', (err) => {\n requestStream.emit('error', err);\n });\n const response = fetchToRequestResponse(options, res);\n requestStream.emit('response', response);\n }, err => {\n teenyRequest.stats.requestFinished();\n requestStream.emit('error', err);\n });\n // fetch doesn't supply the raw HTTP stream, instead it\n // returns a PassThrough piped from the HTTP response\n // stream.\n return requestStream;\n }\n // GET or POST with callback\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n const header = res.headers.get('content-type');\n const response = fetchToRequestResponse(options, res);\n const body = response.body;\n if (header === 'application/json' ||\n header === 'application/json; charset=utf-8') {\n if (response.statusCode === 204) {\n // Probably a DELETE\n callback(null, response, body);\n return;\n }\n res.json().then(json => {\n response.body = json;\n callback(null, response, json);\n }, err => {\n callback(err, response, body);\n });\n return;\n }\n res.text().then(text => {\n const response = fetchToRequestResponse(options, res);\n response.body = text;\n callback(null, response, text);\n }, err => {\n callback(err, response, body);\n });\n }, err => {\n teenyRequest.stats.requestFinished();\n callback(err, null, null);\n });\n return;\n}\nexports.teenyRequest = teenyRequest;\nteenyRequest.defaults = (defaults) => {\n return (reqOpts, callback) => {\n const opts = { ...defaults, ...reqOpts };\n if (callback === undefined) {\n return teenyRequest(opts);\n }\n teenyRequest(opts, callback);\n };\n};\n/**\n * Single instance of an interface for keeping track of things.\n */\nteenyRequest.stats = new TeenyStatistics_1.TeenyStatistics();\nteenyRequest.resetStats = () => {\n teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions());\n};\n//# sourceMappingURL=index.js.map","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nvar _default = {\n randomUUID: _crypto.default.randomUUID\n};\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nexports.unsafeStringify = unsafeStringify;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).slice(1));\n}\n\nfunction unsafeStringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase();\n}\n\nfunction stringify(arr, offset = 0) {\n const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = require(\"./stringify.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.unsafeStringify)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.URL = exports.DNS = void 0;\nexports.default = v35;\n\nvar _stringify = require(\"./stringify.js\");\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction v35(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n var _namespace;\n\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.unsafeStringify)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _native = _interopRequireDefault(require(\"./native.js\"));\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = require(\"./stringify.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n if (_native.default.randomUUID && !buf && !options) {\n return _native.default.randomUUID();\n }\n\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.unsafeStringify)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.slice(14, 15), 16);\n}\n\nvar _default = version;\nexports.default = _default;","\"use strict\";\n\nvar punycode = require(\"punycode\");\nvar mappingTable = require(\"./lib/mappingTable.json\");\n\nvar PROCESSING_OPTIONS = {\n TRANSITIONAL: 0,\n NONTRANSITIONAL: 1\n};\n\nfunction normalize(str) { // fix bug in v8\n return str.split('\\u0000').map(function (s) { return s.normalize('NFC'); }).join('\\u0000');\n}\n\nfunction findStatus(val) {\n var start = 0;\n var end = mappingTable.length - 1;\n\n while (start <= end) {\n var mid = Math.floor((start + end) / 2);\n\n var target = mappingTable[mid];\n if (target[0][0] <= val && target[0][1] >= val) {\n return target;\n } else if (target[0][0] > val) {\n end = mid - 1;\n } else {\n start = mid + 1;\n }\n }\n\n return null;\n}\n\nvar regexAstralSymbols = /[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]/g;\n\nfunction countSymbols(string) {\n return string\n // replace every surrogate pair with a BMP symbol\n .replace(regexAstralSymbols, '_')\n // then get the length\n .length;\n}\n\nfunction mapChars(domain_name, useSTD3, processing_option) {\n var hasError = false;\n var processed = \"\";\n\n var len = countSymbols(domain_name);\n for (var i = 0; i < len; ++i) {\n var codePoint = domain_name.codePointAt(i);\n var status = findStatus(codePoint);\n\n switch (status[1]) {\n case \"disallowed\":\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n break;\n case \"ignored\":\n break;\n case \"mapped\":\n processed += String.fromCodePoint.apply(String, status[2]);\n break;\n case \"deviation\":\n if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {\n processed += String.fromCodePoint.apply(String, status[2]);\n } else {\n processed += String.fromCodePoint(codePoint);\n }\n break;\n case \"valid\":\n processed += String.fromCodePoint(codePoint);\n break;\n case \"disallowed_STD3_mapped\":\n if (useSTD3) {\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n } else {\n processed += String.fromCodePoint.apply(String, status[2]);\n }\n break;\n case \"disallowed_STD3_valid\":\n if (useSTD3) {\n hasError = true;\n }\n\n processed += String.fromCodePoint(codePoint);\n break;\n }\n }\n\n return {\n string: processed,\n error: hasError\n };\n}\n\nvar combiningMarksRegex = /[\\u0300-\\u036F\\u0483-\\u0489\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065F\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0859-\\u085B\\u08E4-\\u0903\\u093A-\\u093C\\u093E-\\u094F\\u0951-\\u0957\\u0962\\u0963\\u0981-\\u0983\\u09BC\\u09BE-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CD\\u09D7\\u09E2\\u09E3\\u0A01-\\u0A03\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81-\\u0A83\\u0ABC\\u0ABE-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AE2\\u0AE3\\u0B01-\\u0B03\\u0B3C\\u0B3E-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B62\\u0B63\\u0B82\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD7\\u0C00-\\u0C03\\u0C3E-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0C81-\\u0C83\\u0CBC\\u0CBE-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CE2\\u0CE3\\u0D01-\\u0D03\\u0D3E-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4D\\u0D57\\u0D62\\u0D63\\u0D82\\u0D83\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F3E\\u0F3F\\u0F71-\\u0F84\\u0F86\\u0F87\\u0F8D-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102B-\\u103E\\u1056-\\u1059\\u105E-\\u1060\\u1062-\\u1064\\u1067-\\u106D\\u1071-\\u1074\\u1082-\\u108D\\u108F\\u109A-\\u109D\\u135D-\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B4-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u192B\\u1930-\\u193B\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A17-\\u1A1B\\u1A55-\\u1A5E\\u1A60-\\u1A7C\\u1A7F\\u1AB0-\\u1ABE\\u1B00-\\u1B04\\u1B34-\\u1B44\\u1B6B-\\u1B73\\u1B80-\\u1B82\\u1BA1-\\u1BAD\\u1BE6-\\u1BF3\\u1C24-\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE8\\u1CED\\u1CF2-\\u1CF4\\u1CF8\\u1CF9\\u1DC0-\\u1DF5\\u1DFC-\\u1DFF\\u20D0-\\u20F0\\u2CEF-\\u2CF1\\u2D7F\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F-\\uA672\\uA674-\\uA67D\\uA69F\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA823-\\uA827\\uA880\\uA881\\uA8B4-\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA953\\uA980-\\uA983\\uA9B3-\\uA9C0\\uA9E5\\uAA29-\\uAA36\\uAA43\\uAA4C\\uAA4D\\uAA7B-\\uAA7D\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uAAEB-\\uAAEF\\uAAF5\\uAAF6\\uABE3-\\uABEA\\uABEC\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE2D]|\\uD800[\\uDDFD\\uDEE0\\uDF76-\\uDF7A]|\\uD802[\\uDE01-\\uDE03\\uDE05\\uDE06\\uDE0C-\\uDE0F\\uDE38-\\uDE3A\\uDE3F\\uDEE5\\uDEE6]|\\uD804[\\uDC00-\\uDC02\\uDC38-\\uDC46\\uDC7F-\\uDC82\\uDCB0-\\uDCBA\\uDD00-\\uDD02\\uDD27-\\uDD34\\uDD73\\uDD80-\\uDD82\\uDDB3-\\uDDC0\\uDE2C-\\uDE37\\uDEDF-\\uDEEA\\uDF01-\\uDF03\\uDF3C\\uDF3E-\\uDF44\\uDF47\\uDF48\\uDF4B-\\uDF4D\\uDF57\\uDF62\\uDF63\\uDF66-\\uDF6C\\uDF70-\\uDF74]|\\uD805[\\uDCB0-\\uDCC3\\uDDAF-\\uDDB5\\uDDB8-\\uDDC0\\uDE30-\\uDE40\\uDEAB-\\uDEB7]|\\uD81A[\\uDEF0-\\uDEF4\\uDF30-\\uDF36]|\\uD81B[\\uDF51-\\uDF7E\\uDF8F-\\uDF92]|\\uD82F[\\uDC9D\\uDC9E]|\\uD834[\\uDD65-\\uDD69\\uDD6D-\\uDD72\\uDD7B-\\uDD82\\uDD85-\\uDD8B\\uDDAA-\\uDDAD\\uDE42-\\uDE44]|\\uD83A[\\uDCD0-\\uDCD6]|\\uDB40[\\uDD00-\\uDDEF]/;\n\nfunction validateLabel(label, processing_option) {\n if (label.substr(0, 4) === \"xn--\") {\n label = punycode.toUnicode(label);\n processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;\n }\n\n var error = false;\n\n if (normalize(label) !== label ||\n (label[3] === \"-\" && label[4] === \"-\") ||\n label[0] === \"-\" || label[label.length - 1] === \"-\" ||\n label.indexOf(\".\") !== -1 ||\n label.search(combiningMarksRegex) === 0) {\n error = true;\n }\n\n var len = countSymbols(label);\n for (var i = 0; i < len; ++i) {\n var status = findStatus(label.codePointAt(i));\n if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== \"valid\") ||\n (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&\n status[1] !== \"valid\" && status[1] !== \"deviation\")) {\n error = true;\n break;\n }\n }\n\n return {\n label: label,\n error: error\n };\n}\n\nfunction processing(domain_name, useSTD3, processing_option) {\n var result = mapChars(domain_name, useSTD3, processing_option);\n result.string = normalize(result.string);\n\n var labels = result.string.split(\".\");\n for (var i = 0; i < labels.length; ++i) {\n try {\n var validation = validateLabel(labels[i]);\n labels[i] = validation.label;\n result.error = result.error || validation.error;\n } catch(e) {\n result.error = true;\n }\n }\n\n return {\n string: labels.join(\".\"),\n error: result.error\n };\n}\n\nmodule.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {\n var result = processing(domain_name, useSTD3, processing_option);\n var labels = result.string.split(\".\");\n labels = labels.map(function(l) {\n try {\n return punycode.toASCII(l);\n } catch(e) {\n result.error = true;\n return l;\n }\n });\n\n if (verifyDnsLength) {\n var total = labels.slice(0, labels.length - 1).join(\".\").length;\n if (total.length > 253 || total.length === 0) {\n result.error = true;\n }\n\n for (var i=0; i < labels.length; ++i) {\n if (labels.length > 63 || labels.length === 0) {\n result.error = true;\n break;\n }\n }\n }\n\n if (result.error) return null;\n return labels.join(\".\");\n};\n\nmodule.exports.toUnicode = function(domain_name, useSTD3) {\n var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);\n\n return {\n domain: result.string,\n error: result.error\n };\n};\n\nmodule.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","\n/**\n * For Node.js, simply re-export the core `util.deprecate` function.\n */\n\nmodule.exports = require('util').deprecate;\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","\"use strict\";\n\nvar conversions = {};\nmodule.exports = conversions;\n\nfunction sign(x) {\n return x < 0 ? -1 : 1;\n}\n\nfunction evenRound(x) {\n // Round x to the nearest integer, choosing the even integer if it lies halfway between two.\n if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)\n return Math.floor(x);\n } else {\n return Math.round(x);\n }\n}\n\nfunction createNumberConversion(bitLength, typeOpts) {\n if (!typeOpts.unsigned) {\n --bitLength;\n }\n const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);\n const upperBound = Math.pow(2, bitLength) - 1;\n\n const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);\n const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);\n\n return function(V, opts) {\n if (!opts) opts = {};\n\n let x = +V;\n\n if (opts.enforceRange) {\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite number\");\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(\"Argument is not in byte range\");\n }\n\n return x;\n }\n\n if (!isNaN(x) && opts.clamp) {\n x = evenRound(x);\n\n if (x < lowerBound) x = lowerBound;\n if (x > upperBound) x = upperBound;\n return x;\n }\n\n if (!Number.isFinite(x) || x === 0) {\n return 0;\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n x = x % moduloVal;\n\n if (!typeOpts.unsigned && x >= moduloBound) {\n return x - moduloVal;\n } else if (typeOpts.unsigned) {\n if (x < 0) {\n x += moduloVal;\n } else if (x === -0) { // don't return negative zero\n return 0;\n }\n }\n\n return x;\n }\n}\n\nconversions[\"void\"] = function () {\n return undefined;\n};\n\nconversions[\"boolean\"] = function (val) {\n return !!val;\n};\n\nconversions[\"byte\"] = createNumberConversion(8, { unsigned: false });\nconversions[\"octet\"] = createNumberConversion(8, { unsigned: true });\n\nconversions[\"short\"] = createNumberConversion(16, { unsigned: false });\nconversions[\"unsigned short\"] = createNumberConversion(16, { unsigned: true });\n\nconversions[\"long\"] = createNumberConversion(32, { unsigned: false });\nconversions[\"unsigned long\"] = createNumberConversion(32, { unsigned: true });\n\nconversions[\"long long\"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });\nconversions[\"unsigned long long\"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });\n\nconversions[\"double\"] = function (V) {\n const x = +V;\n\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite floating-point value\");\n }\n\n return x;\n};\n\nconversions[\"unrestricted double\"] = function (V) {\n const x = +V;\n\n if (isNaN(x)) {\n throw new TypeError(\"Argument is NaN\");\n }\n\n return x;\n};\n\n// not quite valid, but good enough for JS\nconversions[\"float\"] = conversions[\"double\"];\nconversions[\"unrestricted float\"] = conversions[\"unrestricted double\"];\n\nconversions[\"DOMString\"] = function (V, opts) {\n if (!opts) opts = {};\n\n if (opts.treatNullAsEmptyString && V === null) {\n return \"\";\n }\n\n return String(V);\n};\n\nconversions[\"ByteString\"] = function (V, opts) {\n const x = String(V);\n let c = undefined;\n for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {\n if (c > 255) {\n throw new TypeError(\"Argument is not a valid bytestring\");\n }\n }\n\n return x;\n};\n\nconversions[\"USVString\"] = function (V) {\n const S = String(V);\n const n = S.length;\n const U = [];\n for (let i = 0; i < n; ++i) {\n const c = S.charCodeAt(i);\n if (c < 0xD800 || c > 0xDFFF) {\n U.push(String.fromCodePoint(c));\n } else if (0xDC00 <= c && c <= 0xDFFF) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n if (i === n - 1) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n const d = S.charCodeAt(i + 1);\n if (0xDC00 <= d && d <= 0xDFFF) {\n const a = c & 0x3FF;\n const b = d & 0x3FF;\n U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));\n ++i;\n } else {\n U.push(String.fromCodePoint(0xFFFD));\n }\n }\n }\n }\n\n return U.join('');\n};\n\nconversions[\"Date\"] = function (V, opts) {\n if (!(V instanceof Date)) {\n throw new TypeError(\"Argument is not a Date object\");\n }\n if (isNaN(V)) {\n return undefined;\n }\n\n return V;\n};\n\nconversions[\"RegExp\"] = function (V, opts) {\n if (!(V instanceof RegExp)) {\n V = new RegExp(V);\n }\n\n return V;\n};\n","\"use strict\";\nconst usm = require(\"./url-state-machine\");\n\nexports.implementation = class URLImpl {\n constructor(constructorArgs) {\n const url = constructorArgs[0];\n const base = constructorArgs[1];\n\n let parsedBase = null;\n if (base !== undefined) {\n parsedBase = usm.basicURLParse(base);\n if (parsedBase === \"failure\") {\n throw new TypeError(\"Invalid base URL\");\n }\n }\n\n const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n\n // TODO: query stuff\n }\n\n get href() {\n return usm.serializeURL(this._url);\n }\n\n set href(v) {\n const parsedURL = usm.basicURLParse(v);\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n }\n\n get origin() {\n return usm.serializeURLOrigin(this._url);\n }\n\n get protocol() {\n return this._url.scheme + \":\";\n }\n\n set protocol(v) {\n usm.basicURLParse(v + \":\", { url: this._url, stateOverride: \"scheme start\" });\n }\n\n get username() {\n return this._url.username;\n }\n\n set username(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setTheUsername(this._url, v);\n }\n\n get password() {\n return this._url.password;\n }\n\n set password(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setThePassword(this._url, v);\n }\n\n get host() {\n const url = this._url;\n\n if (url.host === null) {\n return \"\";\n }\n\n if (url.port === null) {\n return usm.serializeHost(url.host);\n }\n\n return usm.serializeHost(url.host) + \":\" + usm.serializeInteger(url.port);\n }\n\n set host(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"host\" });\n }\n\n get hostname() {\n if (this._url.host === null) {\n return \"\";\n }\n\n return usm.serializeHost(this._url.host);\n }\n\n set hostname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"hostname\" });\n }\n\n get port() {\n if (this._url.port === null) {\n return \"\";\n }\n\n return usm.serializeInteger(this._url.port);\n }\n\n set port(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n if (v === \"\") {\n this._url.port = null;\n } else {\n usm.basicURLParse(v, { url: this._url, stateOverride: \"port\" });\n }\n }\n\n get pathname() {\n if (this._url.cannotBeABaseURL) {\n return this._url.path[0];\n }\n\n if (this._url.path.length === 0) {\n return \"\";\n }\n\n return \"/\" + this._url.path.join(\"/\");\n }\n\n set pathname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n this._url.path = [];\n usm.basicURLParse(v, { url: this._url, stateOverride: \"path start\" });\n }\n\n get search() {\n if (this._url.query === null || this._url.query === \"\") {\n return \"\";\n }\n\n return \"?\" + this._url.query;\n }\n\n set search(v) {\n // TODO: query stuff\n\n const url = this._url;\n\n if (v === \"\") {\n url.query = null;\n return;\n }\n\n const input = v[0] === \"?\" ? v.substring(1) : v;\n url.query = \"\";\n usm.basicURLParse(input, { url, stateOverride: \"query\" });\n }\n\n get hash() {\n if (this._url.fragment === null || this._url.fragment === \"\") {\n return \"\";\n }\n\n return \"#\" + this._url.fragment;\n }\n\n set hash(v) {\n if (v === \"\") {\n this._url.fragment = null;\n return;\n }\n\n const input = v[0] === \"#\" ? v.substring(1) : v;\n this._url.fragment = \"\";\n usm.basicURLParse(input, { url: this._url, stateOverride: \"fragment\" });\n }\n\n toJSON() {\n return this.href;\n }\n};\n","\"use strict\";\n\nconst conversions = require(\"webidl-conversions\");\nconst utils = require(\"./utils.js\");\nconst Impl = require(\".//URL-impl.js\");\n\nconst impl = utils.implSymbol;\n\nfunction URL(url) {\n if (!this || this[impl] || !(this instanceof URL)) {\n throw new TypeError(\"Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.\");\n }\n if (arguments.length < 1) {\n throw new TypeError(\"Failed to construct 'URL': 1 argument required, but only \" + arguments.length + \" present.\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 2; ++i) {\n args[i] = arguments[i];\n }\n args[0] = conversions[\"USVString\"](args[0]);\n if (args[1] !== undefined) {\n args[1] = conversions[\"USVString\"](args[1]);\n }\n\n module.exports.setup(this, args);\n}\n\nURL.prototype.toJSON = function toJSON() {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 0; ++i) {\n args[i] = arguments[i];\n }\n return this[impl].toJSON.apply(this[impl], args);\n};\nObject.defineProperty(URL.prototype, \"href\", {\n get() {\n return this[impl].href;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].href = V;\n },\n enumerable: true,\n configurable: true\n});\n\nURL.prototype.toString = function () {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n return this.href;\n};\n\nObject.defineProperty(URL.prototype, \"origin\", {\n get() {\n return this[impl].origin;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"protocol\", {\n get() {\n return this[impl].protocol;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].protocol = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"username\", {\n get() {\n return this[impl].username;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].username = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"password\", {\n get() {\n return this[impl].password;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].password = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"host\", {\n get() {\n return this[impl].host;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].host = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hostname\", {\n get() {\n return this[impl].hostname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hostname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"port\", {\n get() {\n return this[impl].port;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].port = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"pathname\", {\n get() {\n return this[impl].pathname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].pathname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"search\", {\n get() {\n return this[impl].search;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].search = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hash\", {\n get() {\n return this[impl].hash;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hash = V;\n },\n enumerable: true,\n configurable: true\n});\n\n\nmodule.exports = {\n is(obj) {\n return !!obj && obj[impl] instanceof Impl.implementation;\n },\n create(constructorArgs, privateData) {\n let obj = Object.create(URL.prototype);\n this.setup(obj, constructorArgs, privateData);\n return obj;\n },\n setup(obj, constructorArgs, privateData) {\n if (!privateData) privateData = {};\n privateData.wrapper = obj;\n\n obj[impl] = new Impl.implementation(constructorArgs, privateData);\n obj[impl][utils.wrapperSymbol] = obj;\n },\n interface: URL,\n expose: {\n Window: { URL: URL },\n Worker: { URL: URL }\n }\n};\n\n","\"use strict\";\n\nexports.URL = require(\"./URL\").interface;\nexports.serializeURL = require(\"./url-state-machine\").serializeURL;\nexports.serializeURLOrigin = require(\"./url-state-machine\").serializeURLOrigin;\nexports.basicURLParse = require(\"./url-state-machine\").basicURLParse;\nexports.setTheUsername = require(\"./url-state-machine\").setTheUsername;\nexports.setThePassword = require(\"./url-state-machine\").setThePassword;\nexports.serializeHost = require(\"./url-state-machine\").serializeHost;\nexports.serializeInteger = require(\"./url-state-machine\").serializeInteger;\nexports.parseURL = require(\"./url-state-machine\").parseURL;\n","\"use strict\";\r\nconst punycode = require(\"punycode\");\r\nconst tr46 = require(\"tr46\");\r\n\r\nconst specialSchemes = {\r\n ftp: 21,\r\n file: null,\r\n gopher: 70,\r\n http: 80,\r\n https: 443,\r\n ws: 80,\r\n wss: 443\r\n};\r\n\r\nconst failure = Symbol(\"failure\");\r\n\r\nfunction countSymbols(str) {\r\n return punycode.ucs2.decode(str).length;\r\n}\r\n\r\nfunction at(input, idx) {\r\n const c = input[idx];\r\n return isNaN(c) ? undefined : String.fromCodePoint(c);\r\n}\r\n\r\nfunction isASCIIDigit(c) {\r\n return c >= 0x30 && c <= 0x39;\r\n}\r\n\r\nfunction isASCIIAlpha(c) {\r\n return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);\r\n}\r\n\r\nfunction isASCIIAlphanumeric(c) {\r\n return isASCIIAlpha(c) || isASCIIDigit(c);\r\n}\r\n\r\nfunction isASCIIHex(c) {\r\n return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);\r\n}\r\n\r\nfunction isSingleDot(buffer) {\r\n return buffer === \".\" || buffer.toLowerCase() === \"%2e\";\r\n}\r\n\r\nfunction isDoubleDot(buffer) {\r\n buffer = buffer.toLowerCase();\r\n return buffer === \"..\" || buffer === \"%2e.\" || buffer === \".%2e\" || buffer === \"%2e%2e\";\r\n}\r\n\r\nfunction isWindowsDriveLetterCodePoints(cp1, cp2) {\r\n return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);\r\n}\r\n\r\nfunction isWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === \":\" || string[1] === \"|\");\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === \":\";\r\n}\r\n\r\nfunction containsForbiddenHostCodePoint(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|%|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction containsForbiddenHostCodePointExcludingPercent(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction isSpecialScheme(scheme) {\r\n return specialSchemes[scheme] !== undefined;\r\n}\r\n\r\nfunction isSpecial(url) {\r\n return isSpecialScheme(url.scheme);\r\n}\r\n\r\nfunction defaultPort(scheme) {\r\n return specialSchemes[scheme];\r\n}\r\n\r\nfunction percentEncode(c) {\r\n let hex = c.toString(16).toUpperCase();\r\n if (hex.length === 1) {\r\n hex = \"0\" + hex;\r\n }\r\n\r\n return \"%\" + hex;\r\n}\r\n\r\nfunction utf8PercentEncode(c) {\r\n const buf = new Buffer(c);\r\n\r\n let str = \"\";\r\n\r\n for (let i = 0; i < buf.length; ++i) {\r\n str += percentEncode(buf[i]);\r\n }\r\n\r\n return str;\r\n}\r\n\r\nfunction utf8PercentDecode(str) {\r\n const input = new Buffer(str);\r\n const output = [];\r\n for (let i = 0; i < input.length; ++i) {\r\n if (input[i] !== 37) {\r\n output.push(input[i]);\r\n } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {\r\n output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));\r\n i += 2;\r\n } else {\r\n output.push(input[i]);\r\n }\r\n }\r\n return new Buffer(output).toString();\r\n}\r\n\r\nfunction isC0ControlPercentEncode(c) {\r\n return c <= 0x1F || c > 0x7E;\r\n}\r\n\r\nconst extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);\r\nfunction isPathPercentEncode(c) {\r\n return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);\r\n}\r\n\r\nconst extraUserinfoPercentEncodeSet =\r\n new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);\r\nfunction isUserinfoPercentEncode(c) {\r\n return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);\r\n}\r\n\r\nfunction percentEncodeChar(c, encodeSetPredicate) {\r\n const cStr = String.fromCodePoint(c);\r\n\r\n if (encodeSetPredicate(c)) {\r\n return utf8PercentEncode(cStr);\r\n }\r\n\r\n return cStr;\r\n}\r\n\r\nfunction parseIPv4Number(input) {\r\n let R = 10;\r\n\r\n if (input.length >= 2 && input.charAt(0) === \"0\" && input.charAt(1).toLowerCase() === \"x\") {\r\n input = input.substring(2);\r\n R = 16;\r\n } else if (input.length >= 2 && input.charAt(0) === \"0\") {\r\n input = input.substring(1);\r\n R = 8;\r\n }\r\n\r\n if (input === \"\") {\r\n return 0;\r\n }\r\n\r\n const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);\r\n if (regex.test(input)) {\r\n return failure;\r\n }\r\n\r\n return parseInt(input, R);\r\n}\r\n\r\nfunction parseIPv4(input) {\r\n const parts = input.split(\".\");\r\n if (parts[parts.length - 1] === \"\") {\r\n if (parts.length > 1) {\r\n parts.pop();\r\n }\r\n }\r\n\r\n if (parts.length > 4) {\r\n return input;\r\n }\r\n\r\n const numbers = [];\r\n for (const part of parts) {\r\n if (part === \"\") {\r\n return input;\r\n }\r\n const n = parseIPv4Number(part);\r\n if (n === failure) {\r\n return input;\r\n }\r\n\r\n numbers.push(n);\r\n }\r\n\r\n for (let i = 0; i < numbers.length - 1; ++i) {\r\n if (numbers[i] > 255) {\r\n return failure;\r\n }\r\n }\r\n if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {\r\n return failure;\r\n }\r\n\r\n let ipv4 = numbers.pop();\r\n let counter = 0;\r\n\r\n for (const n of numbers) {\r\n ipv4 += n * Math.pow(256, 3 - counter);\r\n ++counter;\r\n }\r\n\r\n return ipv4;\r\n}\r\n\r\nfunction serializeIPv4(address) {\r\n let output = \"\";\r\n let n = address;\r\n\r\n for (let i = 1; i <= 4; ++i) {\r\n output = String(n % 256) + output;\r\n if (i !== 4) {\r\n output = \".\" + output;\r\n }\r\n n = Math.floor(n / 256);\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseIPv6(input) {\r\n const address = [0, 0, 0, 0, 0, 0, 0, 0];\r\n let pieceIndex = 0;\r\n let compress = null;\r\n let pointer = 0;\r\n\r\n input = punycode.ucs2.decode(input);\r\n\r\n if (input[pointer] === 58) {\r\n if (input[pointer + 1] !== 58) {\r\n return failure;\r\n }\r\n\r\n pointer += 2;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n }\r\n\r\n while (pointer < input.length) {\r\n if (pieceIndex === 8) {\r\n return failure;\r\n }\r\n\r\n if (input[pointer] === 58) {\r\n if (compress !== null) {\r\n return failure;\r\n }\r\n ++pointer;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n continue;\r\n }\r\n\r\n let value = 0;\r\n let length = 0;\r\n\r\n while (length < 4 && isASCIIHex(input[pointer])) {\r\n value = value * 0x10 + parseInt(at(input, pointer), 16);\r\n ++pointer;\r\n ++length;\r\n }\r\n\r\n if (input[pointer] === 46) {\r\n if (length === 0) {\r\n return failure;\r\n }\r\n\r\n pointer -= length;\r\n\r\n if (pieceIndex > 6) {\r\n return failure;\r\n }\r\n\r\n let numbersSeen = 0;\r\n\r\n while (input[pointer] !== undefined) {\r\n let ipv4Piece = null;\r\n\r\n if (numbersSeen > 0) {\r\n if (input[pointer] === 46 && numbersSeen < 4) {\r\n ++pointer;\r\n } else {\r\n return failure;\r\n }\r\n }\r\n\r\n if (!isASCIIDigit(input[pointer])) {\r\n return failure;\r\n }\r\n\r\n while (isASCIIDigit(input[pointer])) {\r\n const number = parseInt(at(input, pointer));\r\n if (ipv4Piece === null) {\r\n ipv4Piece = number;\r\n } else if (ipv4Piece === 0) {\r\n return failure;\r\n } else {\r\n ipv4Piece = ipv4Piece * 10 + number;\r\n }\r\n if (ipv4Piece > 255) {\r\n return failure;\r\n }\r\n ++pointer;\r\n }\r\n\r\n address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;\r\n\r\n ++numbersSeen;\r\n\r\n if (numbersSeen === 2 || numbersSeen === 4) {\r\n ++pieceIndex;\r\n }\r\n }\r\n\r\n if (numbersSeen !== 4) {\r\n return failure;\r\n }\r\n\r\n break;\r\n } else if (input[pointer] === 58) {\r\n ++pointer;\r\n if (input[pointer] === undefined) {\r\n return failure;\r\n }\r\n } else if (input[pointer] !== undefined) {\r\n return failure;\r\n }\r\n\r\n address[pieceIndex] = value;\r\n ++pieceIndex;\r\n }\r\n\r\n if (compress !== null) {\r\n let swaps = pieceIndex - compress;\r\n pieceIndex = 7;\r\n while (pieceIndex !== 0 && swaps > 0) {\r\n const temp = address[compress + swaps - 1];\r\n address[compress + swaps - 1] = address[pieceIndex];\r\n address[pieceIndex] = temp;\r\n --pieceIndex;\r\n --swaps;\r\n }\r\n } else if (compress === null && pieceIndex !== 8) {\r\n return failure;\r\n }\r\n\r\n return address;\r\n}\r\n\r\nfunction serializeIPv6(address) {\r\n let output = \"\";\r\n const seqResult = findLongestZeroSequence(address);\r\n const compress = seqResult.idx;\r\n let ignore0 = false;\r\n\r\n for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {\r\n if (ignore0 && address[pieceIndex] === 0) {\r\n continue;\r\n } else if (ignore0) {\r\n ignore0 = false;\r\n }\r\n\r\n if (compress === pieceIndex) {\r\n const separator = pieceIndex === 0 ? \"::\" : \":\";\r\n output += separator;\r\n ignore0 = true;\r\n continue;\r\n }\r\n\r\n output += address[pieceIndex].toString(16);\r\n\r\n if (pieceIndex !== 7) {\r\n output += \":\";\r\n }\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseHost(input, isSpecialArg) {\r\n if (input[0] === \"[\") {\r\n if (input[input.length - 1] !== \"]\") {\r\n return failure;\r\n }\r\n\r\n return parseIPv6(input.substring(1, input.length - 1));\r\n }\r\n\r\n if (!isSpecialArg) {\r\n return parseOpaqueHost(input);\r\n }\r\n\r\n const domain = utf8PercentDecode(input);\r\n const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);\r\n if (asciiDomain === null) {\r\n return failure;\r\n }\r\n\r\n if (containsForbiddenHostCodePoint(asciiDomain)) {\r\n return failure;\r\n }\r\n\r\n const ipv4Host = parseIPv4(asciiDomain);\r\n if (typeof ipv4Host === \"number\" || ipv4Host === failure) {\r\n return ipv4Host;\r\n }\r\n\r\n return asciiDomain;\r\n}\r\n\r\nfunction parseOpaqueHost(input) {\r\n if (containsForbiddenHostCodePointExcludingPercent(input)) {\r\n return failure;\r\n }\r\n\r\n let output = \"\";\r\n const decoded = punycode.ucs2.decode(input);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);\r\n }\r\n return output;\r\n}\r\n\r\nfunction findLongestZeroSequence(arr) {\r\n let maxIdx = null;\r\n let maxLen = 1; // only find elements > 1\r\n let currStart = null;\r\n let currLen = 0;\r\n\r\n for (let i = 0; i < arr.length; ++i) {\r\n if (arr[i] !== 0) {\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n currStart = null;\r\n currLen = 0;\r\n } else {\r\n if (currStart === null) {\r\n currStart = i;\r\n }\r\n ++currLen;\r\n }\r\n }\r\n\r\n // if trailing zeros\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n return {\r\n idx: maxIdx,\r\n len: maxLen\r\n };\r\n}\r\n\r\nfunction serializeHost(host) {\r\n if (typeof host === \"number\") {\r\n return serializeIPv4(host);\r\n }\r\n\r\n // IPv6 serializer\r\n if (host instanceof Array) {\r\n return \"[\" + serializeIPv6(host) + \"]\";\r\n }\r\n\r\n return host;\r\n}\r\n\r\nfunction trimControlChars(url) {\r\n return url.replace(/^[\\u0000-\\u001F\\u0020]+|[\\u0000-\\u001F\\u0020]+$/g, \"\");\r\n}\r\n\r\nfunction trimTabAndNewline(url) {\r\n return url.replace(/\\u0009|\\u000A|\\u000D/g, \"\");\r\n}\r\n\r\nfunction shortenPath(url) {\r\n const path = url.path;\r\n if (path.length === 0) {\r\n return;\r\n }\r\n if (url.scheme === \"file\" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {\r\n return;\r\n }\r\n\r\n path.pop();\r\n}\r\n\r\nfunction includesCredentials(url) {\r\n return url.username !== \"\" || url.password !== \"\";\r\n}\r\n\r\nfunction cannotHaveAUsernamePasswordPort(url) {\r\n return url.host === null || url.host === \"\" || url.cannotBeABaseURL || url.scheme === \"file\";\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetter(string) {\r\n return /^[A-Za-z]:$/.test(string);\r\n}\r\n\r\nfunction URLStateMachine(input, base, encodingOverride, url, stateOverride) {\r\n this.pointer = 0;\r\n this.input = input;\r\n this.base = base || null;\r\n this.encodingOverride = encodingOverride || \"utf-8\";\r\n this.stateOverride = stateOverride;\r\n this.url = url;\r\n this.failure = false;\r\n this.parseError = false;\r\n\r\n if (!this.url) {\r\n this.url = {\r\n scheme: \"\",\r\n username: \"\",\r\n password: \"\",\r\n host: null,\r\n port: null,\r\n path: [],\r\n query: null,\r\n fragment: null,\r\n\r\n cannotBeABaseURL: false\r\n };\r\n\r\n const res = trimControlChars(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n }\r\n\r\n const res = trimTabAndNewline(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n\r\n this.state = stateOverride || \"scheme start\";\r\n\r\n this.buffer = \"\";\r\n this.atFlag = false;\r\n this.arrFlag = false;\r\n this.passwordTokenSeenFlag = false;\r\n\r\n this.input = punycode.ucs2.decode(this.input);\r\n\r\n for (; this.pointer <= this.input.length; ++this.pointer) {\r\n const c = this.input[this.pointer];\r\n const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);\r\n\r\n // exec state machine\r\n const ret = this[\"parse \" + this.state](c, cStr);\r\n if (!ret) {\r\n break; // terminate algorithm\r\n } else if (ret === failure) {\r\n this.failure = true;\r\n break;\r\n }\r\n }\r\n}\r\n\r\nURLStateMachine.prototype[\"parse scheme start\"] = function parseSchemeStart(c, cStr) {\r\n if (isASCIIAlpha(c)) {\r\n this.buffer += cStr.toLowerCase();\r\n this.state = \"scheme\";\r\n } else if (!this.stateOverride) {\r\n this.state = \"no scheme\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse scheme\"] = function parseScheme(c, cStr) {\r\n if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {\r\n this.buffer += cStr.toLowerCase();\r\n } else if (c === 58) {\r\n if (this.stateOverride) {\r\n if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === \"file\") {\r\n return false;\r\n }\r\n\r\n if (this.url.scheme === \"file\" && (this.url.host === \"\" || this.url.host === null)) {\r\n return false;\r\n }\r\n }\r\n this.url.scheme = this.buffer;\r\n this.buffer = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n if (this.url.scheme === \"file\") {\r\n if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file\";\r\n } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {\r\n this.state = \"special relative or authority\";\r\n } else if (isSpecial(this.url)) {\r\n this.state = \"special authority slashes\";\r\n } else if (this.input[this.pointer + 1] === 47) {\r\n this.state = \"path or authority\";\r\n ++this.pointer;\r\n } else {\r\n this.url.cannotBeABaseURL = true;\r\n this.url.path.push(\"\");\r\n this.state = \"cannot-be-a-base-URL path\";\r\n }\r\n } else if (!this.stateOverride) {\r\n this.buffer = \"\";\r\n this.state = \"no scheme\";\r\n this.pointer = -1;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse no scheme\"] = function parseNoScheme(c) {\r\n if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {\r\n return failure;\r\n } else if (this.base.cannotBeABaseURL && c === 35) {\r\n this.url.scheme = this.base.scheme;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.url.cannotBeABaseURL = true;\r\n this.state = \"fragment\";\r\n } else if (this.base.scheme === \"file\") {\r\n this.state = \"file\";\r\n --this.pointer;\r\n } else {\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special relative or authority\"] = function parseSpecialRelativeOrAuthority(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path or authority\"] = function parsePathOrAuthority(c) {\r\n if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative\"] = function parseRelative(c) {\r\n this.url.scheme = this.base.scheme;\r\n if (isNaN(c)) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 47) {\r\n this.state = \"relative slash\";\r\n } else if (c === 63) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n this.state = \"relative slash\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice(0, this.base.path.length - 1);\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative slash\"] = function parseRelativeSlash(c) {\r\n if (isSpecial(this.url) && (c === 47 || c === 92)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"special authority ignore slashes\";\r\n } else if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority slashes\"] = function parseSpecialAuthoritySlashes(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"special authority ignore slashes\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority ignore slashes\"] = function parseSpecialAuthorityIgnoreSlashes(c) {\r\n if (c !== 47 && c !== 92) {\r\n this.state = \"authority\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse authority\"] = function parseAuthority(c, cStr) {\r\n if (c === 64) {\r\n this.parseError = true;\r\n if (this.atFlag) {\r\n this.buffer = \"%40\" + this.buffer;\r\n }\r\n this.atFlag = true;\r\n\r\n // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars\r\n const len = countSymbols(this.buffer);\r\n for (let pointer = 0; pointer < len; ++pointer) {\r\n const codePoint = this.buffer.codePointAt(pointer);\r\n\r\n if (codePoint === 58 && !this.passwordTokenSeenFlag) {\r\n this.passwordTokenSeenFlag = true;\r\n continue;\r\n }\r\n const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);\r\n if (this.passwordTokenSeenFlag) {\r\n this.url.password += encodedCodePoints;\r\n } else {\r\n this.url.username += encodedCodePoints;\r\n }\r\n }\r\n this.buffer = \"\";\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n if (this.atFlag && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.pointer -= countSymbols(this.buffer) + 1;\r\n this.buffer = \"\";\r\n this.state = \"host\";\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse hostname\"] =\r\nURLStateMachine.prototype[\"parse host\"] = function parseHostName(c, cStr) {\r\n if (this.stateOverride && this.url.scheme === \"file\") {\r\n --this.pointer;\r\n this.state = \"file host\";\r\n } else if (c === 58 && !this.arrFlag) {\r\n if (this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"port\";\r\n if (this.stateOverride === \"hostname\") {\r\n return false;\r\n }\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n --this.pointer;\r\n if (isSpecial(this.url) && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n } else if (this.stateOverride && this.buffer === \"\" &&\r\n (includesCredentials(this.url) || this.url.port !== null)) {\r\n this.parseError = true;\r\n return false;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n } else {\r\n if (c === 91) {\r\n this.arrFlag = true;\r\n } else if (c === 93) {\r\n this.arrFlag = false;\r\n }\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse port\"] = function parsePort(c, cStr) {\r\n if (isASCIIDigit(c)) {\r\n this.buffer += cStr;\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92) ||\r\n this.stateOverride) {\r\n if (this.buffer !== \"\") {\r\n const port = parseInt(this.buffer);\r\n if (port > Math.pow(2, 16) - 1) {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.url.port = port === defaultPort(this.url.scheme) ? null : port;\r\n this.buffer = \"\";\r\n }\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nconst fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);\r\n\r\nURLStateMachine.prototype[\"parse file\"] = function parseFile(c) {\r\n this.url.scheme = \"file\";\r\n\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file slash\";\r\n } else if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNaN(c)) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 63) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points\r\n !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||\r\n (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points\r\n !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n shortenPath(this.url);\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file slash\"] = function parseFileSlash(c) {\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file host\";\r\n } else {\r\n if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {\r\n this.url.path.push(this.base.path[0]);\r\n } else {\r\n this.url.host = this.base.host;\r\n }\r\n }\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file host\"] = function parseFileHost(c, cStr) {\r\n if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {\r\n --this.pointer;\r\n if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {\r\n this.parseError = true;\r\n this.state = \"path\";\r\n } else if (this.buffer === \"\") {\r\n this.url.host = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n } else {\r\n let host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n if (host === \"localhost\") {\r\n host = \"\";\r\n }\r\n this.url.host = host;\r\n\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n }\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path start\"] = function parsePathStart(c) {\r\n if (isSpecial(this.url)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"path\";\r\n\r\n if (c !== 47 && c !== 92) {\r\n --this.pointer;\r\n }\r\n } else if (!this.stateOverride && c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (!this.stateOverride && c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (c !== undefined) {\r\n this.state = \"path\";\r\n if (c !== 47) {\r\n --this.pointer;\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path\"] = function parsePath(c) {\r\n if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||\r\n (!this.stateOverride && (c === 63 || c === 35))) {\r\n if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n }\r\n\r\n if (isDoubleDot(this.buffer)) {\r\n shortenPath(this.url);\r\n if (c !== 47 && !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n }\r\n } else if (isSingleDot(this.buffer) && c !== 47 &&\r\n !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n } else if (!isSingleDot(this.buffer)) {\r\n if (this.url.scheme === \"file\" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {\r\n if (this.url.host !== \"\" && this.url.host !== null) {\r\n this.parseError = true;\r\n this.url.host = \"\";\r\n }\r\n this.buffer = this.buffer[0] + \":\";\r\n }\r\n this.url.path.push(this.buffer);\r\n }\r\n this.buffer = \"\";\r\n if (this.url.scheme === \"file\" && (c === undefined || c === 63 || c === 35)) {\r\n while (this.url.path.length > 1 && this.url.path[0] === \"\") {\r\n this.parseError = true;\r\n this.url.path.shift();\r\n }\r\n }\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n }\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += percentEncodeChar(c, isPathPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse cannot-be-a-base-URL path\"] = function parseCannotBeABaseURLPath(c) {\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n // TODO: Add: not a URL code point\r\n if (!isNaN(c) && c !== 37) {\r\n this.parseError = true;\r\n }\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n if (!isNaN(c)) {\r\n this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse query\"] = function parseQuery(c, cStr) {\r\n if (isNaN(c) || (!this.stateOverride && c === 35)) {\r\n if (!isSpecial(this.url) || this.url.scheme === \"ws\" || this.url.scheme === \"wss\") {\r\n this.encodingOverride = \"utf-8\";\r\n }\r\n\r\n const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead\r\n for (let i = 0; i < buffer.length; ++i) {\r\n if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||\r\n buffer[i] === 0x3C || buffer[i] === 0x3E) {\r\n this.url.query += percentEncode(buffer[i]);\r\n } else {\r\n this.url.query += String.fromCodePoint(buffer[i]);\r\n }\r\n }\r\n\r\n this.buffer = \"\";\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse fragment\"] = function parseFragment(c) {\r\n if (isNaN(c)) { // do nothing\r\n } else if (c === 0x0) {\r\n this.parseError = true;\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nfunction serializeURL(url, excludeFragment) {\r\n let output = url.scheme + \":\";\r\n if (url.host !== null) {\r\n output += \"//\";\r\n\r\n if (url.username !== \"\" || url.password !== \"\") {\r\n output += url.username;\r\n if (url.password !== \"\") {\r\n output += \":\" + url.password;\r\n }\r\n output += \"@\";\r\n }\r\n\r\n output += serializeHost(url.host);\r\n\r\n if (url.port !== null) {\r\n output += \":\" + url.port;\r\n }\r\n } else if (url.host === null && url.scheme === \"file\") {\r\n output += \"//\";\r\n }\r\n\r\n if (url.cannotBeABaseURL) {\r\n output += url.path[0];\r\n } else {\r\n for (const string of url.path) {\r\n output += \"/\" + string;\r\n }\r\n }\r\n\r\n if (url.query !== null) {\r\n output += \"?\" + url.query;\r\n }\r\n\r\n if (!excludeFragment && url.fragment !== null) {\r\n output += \"#\" + url.fragment;\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction serializeOrigin(tuple) {\r\n let result = tuple.scheme + \"://\";\r\n result += serializeHost(tuple.host);\r\n\r\n if (tuple.port !== null) {\r\n result += \":\" + tuple.port;\r\n }\r\n\r\n return result;\r\n}\r\n\r\nmodule.exports.serializeURL = serializeURL;\r\n\r\nmodule.exports.serializeURLOrigin = function (url) {\r\n // https://url.spec.whatwg.org/#concept-url-origin\r\n switch (url.scheme) {\r\n case \"blob\":\r\n try {\r\n return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));\r\n } catch (e) {\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n case \"ftp\":\r\n case \"gopher\":\r\n case \"http\":\r\n case \"https\":\r\n case \"ws\":\r\n case \"wss\":\r\n return serializeOrigin({\r\n scheme: url.scheme,\r\n host: url.host,\r\n port: url.port\r\n });\r\n case \"file\":\r\n // spec says \"exercise to the reader\", chrome says \"file://\"\r\n return \"file://\";\r\n default:\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n};\r\n\r\nmodule.exports.basicURLParse = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);\r\n if (usm.failure) {\r\n return \"failure\";\r\n }\r\n\r\n return usm.url;\r\n};\r\n\r\nmodule.exports.setTheUsername = function (url, username) {\r\n url.username = \"\";\r\n const decoded = punycode.ucs2.decode(username);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.setThePassword = function (url, password) {\r\n url.password = \"\";\r\n const decoded = punycode.ucs2.decode(password);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.serializeHost = serializeHost;\r\n\r\nmodule.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;\r\n\r\nmodule.exports.serializeInteger = function (integer) {\r\n return String(integer);\r\n};\r\n\r\nmodule.exports.parseURL = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n // We don't handle blobs, so this just delegates:\r\n return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });\r\n};\r\n","\"use strict\";\n\nmodule.exports.mixin = function mixin(target, source) {\n const keys = Object.getOwnPropertyNames(source);\n for (let i = 0; i < keys.length; ++i) {\n Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));\n }\n};\n\nmodule.exports.wrapperSymbol = Symbol(\"wrapper\");\nmodule.exports.implSymbol = Symbol(\"impl\");\n\nmodule.exports.wrapperForImpl = function (impl) {\n return impl[module.exports.wrapperSymbol];\n};\n\nmodule.exports.implForWrapper = function (wrapper) {\n return wrapper[module.exports.implSymbol];\n};\n\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","'use strict'\nmodule.exports = function (Yallist) {\n Yallist.prototype[Symbol.iterator] = function* () {\n for (let walker = this.head; walker; walker = walker.next) {\n yield walker.value\n }\n }\n}\n","'use strict'\nmodule.exports = Yallist\n\nYallist.Node = Node\nYallist.create = Yallist\n\nfunction Yallist (list) {\n var self = this\n if (!(self instanceof Yallist)) {\n self = new Yallist()\n }\n\n self.tail = null\n self.head = null\n self.length = 0\n\n if (list && typeof list.forEach === 'function') {\n list.forEach(function (item) {\n self.push(item)\n })\n } else if (arguments.length > 0) {\n for (var i = 0, l = arguments.length; i < l; i++) {\n self.push(arguments[i])\n }\n }\n\n return self\n}\n\nYallist.prototype.removeNode = function (node) {\n if (node.list !== this) {\n throw new Error('removing node which does not belong to this list')\n }\n\n var next = node.next\n var prev = node.prev\n\n if (next) {\n next.prev = prev\n }\n\n if (prev) {\n prev.next = next\n }\n\n if (node === this.head) {\n this.head = next\n }\n if (node === this.tail) {\n this.tail = prev\n }\n\n node.list.length--\n node.next = null\n node.prev = null\n node.list = null\n\n return next\n}\n\nYallist.prototype.unshiftNode = function (node) {\n if (node === this.head) {\n return\n }\n\n if (node.list) {\n node.list.removeNode(node)\n }\n\n var head = this.head\n node.list = this\n node.next = head\n if (head) {\n head.prev = node\n }\n\n this.head = node\n if (!this.tail) {\n this.tail = node\n }\n this.length++\n}\n\nYallist.prototype.pushNode = function (node) {\n if (node === this.tail) {\n return\n }\n\n if (node.list) {\n node.list.removeNode(node)\n }\n\n var tail = this.tail\n node.list = this\n node.prev = tail\n if (tail) {\n tail.next = node\n }\n\n this.tail = node\n if (!this.head) {\n this.head = node\n }\n this.length++\n}\n\nYallist.prototype.push = function () {\n for (var i = 0, l = arguments.length; i < l; i++) {\n push(this, arguments[i])\n }\n return this.length\n}\n\nYallist.prototype.unshift = function () {\n for (var i = 0, l = arguments.length; i < l; i++) {\n unshift(this, arguments[i])\n }\n return this.length\n}\n\nYallist.prototype.pop = function () {\n if (!this.tail) {\n return undefined\n }\n\n var res = this.tail.value\n this.tail = this.tail.prev\n if (this.tail) {\n this.tail.next = null\n } else {\n this.head = null\n }\n this.length--\n return res\n}\n\nYallist.prototype.shift = function () {\n if (!this.head) {\n return undefined\n }\n\n var res = this.head.value\n this.head = this.head.next\n if (this.head) {\n this.head.prev = null\n } else {\n this.tail = null\n }\n this.length--\n return res\n}\n\nYallist.prototype.forEach = function (fn, thisp) {\n thisp = thisp || this\n for (var walker = this.head, i = 0; walker !== null; i++) {\n fn.call(thisp, walker.value, i, this)\n walker = walker.next\n }\n}\n\nYallist.prototype.forEachReverse = function (fn, thisp) {\n thisp = thisp || this\n for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {\n fn.call(thisp, walker.value, i, this)\n walker = walker.prev\n }\n}\n\nYallist.prototype.get = function (n) {\n for (var i = 0, walker = this.head; walker !== null && i < n; i++) {\n // abort out of the list early if we hit a cycle\n walker = walker.next\n }\n if (i === n && walker !== null) {\n return walker.value\n }\n}\n\nYallist.prototype.getReverse = function (n) {\n for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {\n // abort out of the list early if we hit a cycle\n walker = walker.prev\n }\n if (i === n && walker !== null) {\n return walker.value\n }\n}\n\nYallist.prototype.map = function (fn, thisp) {\n thisp = thisp || this\n var res = new Yallist()\n for (var walker = this.head; walker !== null;) {\n res.push(fn.call(thisp, walker.value, this))\n walker = walker.next\n }\n return res\n}\n\nYallist.prototype.mapReverse = function (fn, thisp) {\n thisp = thisp || this\n var res = new Yallist()\n for (var walker = this.tail; walker !== null;) {\n res.push(fn.call(thisp, walker.value, this))\n walker = walker.prev\n }\n return res\n}\n\nYallist.prototype.reduce = function (fn, initial) {\n var acc\n var walker = this.head\n if (arguments.length > 1) {\n acc = initial\n } else if (this.head) {\n walker = this.head.next\n acc = this.head.value\n } else {\n throw new TypeError('Reduce of empty list with no initial value')\n }\n\n for (var i = 0; walker !== null; i++) {\n acc = fn(acc, walker.value, i)\n walker = walker.next\n }\n\n return acc\n}\n\nYallist.prototype.reduceReverse = function (fn, initial) {\n var acc\n var walker = this.tail\n if (arguments.length > 1) {\n acc = initial\n } else if (this.tail) {\n walker = this.tail.prev\n acc = this.tail.value\n } else {\n throw new TypeError('Reduce of empty list with no initial value')\n }\n\n for (var i = this.length - 1; walker !== null; i--) {\n acc = fn(acc, walker.value, i)\n walker = walker.prev\n }\n\n return acc\n}\n\nYallist.prototype.toArray = function () {\n var arr = new Array(this.length)\n for (var i = 0, walker = this.head; walker !== null; i++) {\n arr[i] = walker.value\n walker = walker.next\n }\n return arr\n}\n\nYallist.prototype.toArrayReverse = function () {\n var arr = new Array(this.length)\n for (var i = 0, walker = this.tail; walker !== null; i++) {\n arr[i] = walker.value\n walker = walker.prev\n }\n return arr\n}\n\nYallist.prototype.slice = function (from, to) {\n to = to || this.length\n if (to < 0) {\n to += this.length\n }\n from = from || 0\n if (from < 0) {\n from += this.length\n }\n var ret = new Yallist()\n if (to < from || to < 0) {\n return ret\n }\n if (from < 0) {\n from = 0\n }\n if (to > this.length) {\n to = this.length\n }\n for (var i = 0, walker = this.head; walker !== null && i < from; i++) {\n walker = walker.next\n }\n for (; walker !== null && i < to; i++, walker = walker.next) {\n ret.push(walker.value)\n }\n return ret\n}\n\nYallist.prototype.sliceReverse = function (from, to) {\n to = to || this.length\n if (to < 0) {\n to += this.length\n }\n from = from || 0\n if (from < 0) {\n from += this.length\n }\n var ret = new Yallist()\n if (to < from || to < 0) {\n return ret\n }\n if (from < 0) {\n from = 0\n }\n if (to > this.length) {\n to = this.length\n }\n for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {\n walker = walker.prev\n }\n for (; walker !== null && i > from; i--, walker = walker.prev) {\n ret.push(walker.value)\n }\n return ret\n}\n\nYallist.prototype.splice = function (start, deleteCount, ...nodes) {\n if (start > this.length) {\n start = this.length - 1\n }\n if (start < 0) {\n start = this.length + start;\n }\n\n for (var i = 0, walker = this.head; walker !== null && i < start; i++) {\n walker = walker.next\n }\n\n var ret = []\n for (var i = 0; walker && i < deleteCount; i++) {\n ret.push(walker.value)\n walker = this.removeNode(walker)\n }\n if (walker === null) {\n walker = this.tail\n }\n\n if (walker !== this.head && walker !== this.tail) {\n walker = walker.prev\n }\n\n for (var i = 0; i < nodes.length; i++) {\n walker = insert(this, walker, nodes[i])\n }\n return ret;\n}\n\nYallist.prototype.reverse = function () {\n var head = this.head\n var tail = this.tail\n for (var walker = head; walker !== null; walker = walker.prev) {\n var p = walker.prev\n walker.prev = walker.next\n walker.next = p\n }\n this.head = tail\n this.tail = head\n return this\n}\n\nfunction insert (self, node, value) {\n var inserted = node === self.head ?\n new Node(value, null, node, self) :\n new Node(value, node, node.next, self)\n\n if (inserted.next === null) {\n self.tail = inserted\n }\n if (inserted.prev === null) {\n self.head = inserted\n }\n\n self.length++\n\n return inserted\n}\n\nfunction push (self, item) {\n self.tail = new Node(item, self.tail, null, self)\n if (!self.head) {\n self.head = self.tail\n }\n self.length++\n}\n\nfunction unshift (self, item) {\n self.head = new Node(item, null, self.head, self)\n if (!self.tail) {\n self.tail = self.head\n }\n self.length++\n}\n\nfunction Node (value, prev, next, list) {\n if (!(this instanceof Node)) {\n return new Node(value, prev, next, list)\n }\n\n this.list = list\n this.value = value\n\n if (prev) {\n prev.next = this\n this.prev = prev\n } else {\n this.prev = null\n }\n\n if (next) {\n next.prev = this\n this.next = next\n } else {\n this.next = null\n }\n}\n\ntry {\n // add if support for Symbol.iterator is present\n require('./iterator.js')(Yallist)\n} catch (er) {}\n","class Node {\n\t/// value;\n\t/// next;\n\n\tconstructor(value) {\n\t\tthis.value = value;\n\n\t\t// TODO: Remove this when targeting Node.js 12.\n\t\tthis.next = undefined;\n\t}\n}\n\nclass Queue {\n\t// TODO: Use private class fields when targeting Node.js 12.\n\t// #_head;\n\t// #_tail;\n\t// #_size;\n\n\tconstructor() {\n\t\tthis.clear();\n\t}\n\n\tenqueue(value) {\n\t\tconst node = new Node(value);\n\n\t\tif (this._head) {\n\t\t\tthis._tail.next = node;\n\t\t\tthis._tail = node;\n\t\t} else {\n\t\t\tthis._head = node;\n\t\t\tthis._tail = node;\n\t\t}\n\n\t\tthis._size++;\n\t}\n\n\tdequeue() {\n\t\tconst current = this._head;\n\t\tif (!current) {\n\t\t\treturn;\n\t\t}\n\n\t\tthis._head = this._head.next;\n\t\tthis._size--;\n\t\treturn current.value;\n\t}\n\n\tclear() {\n\t\tthis._head = undefined;\n\t\tthis._tail = undefined;\n\t\tthis._size = 0;\n\t}\n\n\tget size() {\n\t\treturn this._size;\n\t}\n\n\t* [Symbol.iterator]() {\n\t\tlet current = this._head;\n\n\t\twhile (current) {\n\t\t\tyield current.value;\n\t\t\tcurrent = current.next;\n\t\t}\n\t}\n}\n\nmodule.exports = Queue;\n",null,"module.exports = require(\"assert\");","module.exports = require(\"buffer\");","module.exports = require(\"child_process\");","module.exports = require(\"crypto\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"punycode\");","module.exports = require(\"querystring\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"tty\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"zlib\");","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(3109);\n",""],"names":[],"sourceRoot":""} \ No newline at end of file +{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/oBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9oBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpkEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACz2FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;;;;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACt2BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtaA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9kBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1WA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7aA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpyBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1bA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/XA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChGA;AACA;AACA;AACA;;;;;;;;ACHA;;;;;;;ACAA;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1vDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjgCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/nBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACrBA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpSA;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACvSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChMA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACr0BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/IA;AACA;AACA;AACA;AACA;;;;;;;;;ACJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1uEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9fA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5kBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnmEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACj7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1jBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9iCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACroBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACrRA;;;;;;;;ACAA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9VA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9SA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChoBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnEA;;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7sBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACl7GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChuGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5rBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACv2BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1nCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;AEDA;AACA;AACA;AACA","sources":[".././lib/locks.js",".././lib/main.js",".././node_modules/@actions/core/lib/command.js",".././node_modules/@actions/core/lib/core.js",".././node_modules/@actions/core/lib/file-command.js",".././node_modules/@actions/core/lib/oidc-utils.js",".././node_modules/@actions/core/lib/path-utils.js",".././node_modules/@actions/core/lib/summary.js",".././node_modules/@actions/core/lib/utils.js",".././node_modules/@actions/github/lib/context.js",".././node_modules/@actions/github/lib/github.js",".././node_modules/@actions/github/lib/internal/utils.js",".././node_modules/@actions/github/lib/utils.js",".././node_modules/@actions/http-client/lib/auth.js",".././node_modules/@actions/http-client/lib/index.js",".././node_modules/@actions/http-client/lib/proxy.js",".././node_modules/@google-cloud/common/build/src/index.js",".././node_modules/@google-cloud/common/build/src/operation.js",".././node_modules/@google-cloud/common/build/src/service-object.js",".././node_modules/@google-cloud/common/build/src/service.js",".././node_modules/@google-cloud/common/build/src/util.js",".././node_modules/@google-cloud/paginator/build/src/index.js",".././node_modules/@google-cloud/paginator/build/src/resource-stream.js",".././node_modules/@google-cloud/projectify/build/src/index.js",".././node_modules/@google-cloud/promisify/build/src/index.js",".././node_modules/@octokit/auth-token/dist-node/index.js",".././node_modules/@octokit/core/dist-node/index.js",".././node_modules/@octokit/endpoint/dist-node/index.js",".././node_modules/@octokit/graphql/dist-node/index.js",".././node_modules/@octokit/plugin-paginate-rest/dist-node/index.js",".././node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js",".././node_modules/@octokit/request-error/dist-node/index.js",".././node_modules/@octokit/request/dist-node/index.js",".././node_modules/@tootallnate/once/dist/index.js",".././node_modules/abort-controller/dist/abort-controller.js",".././node_modules/agent-base/dist/src/index.js",".././node_modules/agent-base/dist/src/promisify.js",".././node_modules/arrify/index.js",".././node_modules/async-retry/lib/index.js",".././node_modules/base64-js/index.js",".././node_modules/before-after-hook/index.js",".././node_modules/before-after-hook/lib/add.js",".././node_modules/before-after-hook/lib/register.js",".././node_modules/before-after-hook/lib/remove.js",".././node_modules/bignumber.js/bignumber.js",".././node_modules/buffer-equal-constant-time/index.js",".././node_modules/compressible/index.js",".././node_modules/debug/src/browser.js",".././node_modules/debug/src/common.js",".././node_modules/debug/src/index.js",".././node_modules/debug/src/node.js",".././node_modules/deprecation/dist-node/index.js",".././node_modules/duplexify/index.js",".././node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.js",".././node_modules/ecdsa-sig-formatter/src/param-bytes-for-alg.js",".././node_modules/end-of-stream/index.js",".././node_modules/ent/decode.js",".././node_modules/ent/encode.js",".././node_modules/ent/index.js",".././node_modules/event-target-shim/dist/event-target-shim.js",".././node_modules/extend/index.js",".././node_modules/fast-xml-parser/src/fxp.js",".././node_modules/fast-xml-parser/src/util.js",".././node_modules/fast-xml-parser/src/validator.js",".././node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js",".././node_modules/fast-xml-parser/src/xmlbuilder/orderedJs2Xml.js",".././node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js",".././node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js",".././node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js",".././node_modules/fast-xml-parser/src/xmlparser/XMLParser.js",".././node_modules/fast-xml-parser/src/xmlparser/node2json.js",".././node_modules/fast-xml-parser/src/xmlparser/xmlNode.js",".././node_modules/gaxios/build/src/common.js",".././node_modules/gaxios/build/src/gaxios.js",".././node_modules/gaxios/build/src/index.js",".././node_modules/gaxios/build/src/retry.js",".././node_modules/gaxios/build/src/util.js",".././node_modules/gaxios/node_modules/agent-base/dist/helpers.js",".././node_modules/gaxios/node_modules/agent-base/dist/index.js",".././node_modules/gaxios/node_modules/https-proxy-agent/dist/index.js",".././node_modules/gaxios/node_modules/https-proxy-agent/dist/parse-proxy-response.js",".././node_modules/gcp-metadata/build/src/gcp-residency.js",".././node_modules/gcp-metadata/build/src/index.js",".././node_modules/google-auth-library/build/src/auth/authclient.js",".././node_modules/google-auth-library/build/src/auth/awsclient.js",".././node_modules/google-auth-library/build/src/auth/awsrequestsigner.js",".././node_modules/google-auth-library/build/src/auth/baseexternalclient.js",".././node_modules/google-auth-library/build/src/auth/computeclient.js",".././node_modules/google-auth-library/build/src/auth/downscopedclient.js",".././node_modules/google-auth-library/build/src/auth/envDetect.js",".././node_modules/google-auth-library/build/src/auth/executable-response.js",".././node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.js",".././node_modules/google-auth-library/build/src/auth/externalclient.js",".././node_modules/google-auth-library/build/src/auth/googleauth.js",".././node_modules/google-auth-library/build/src/auth/iam.js",".././node_modules/google-auth-library/build/src/auth/identitypoolclient.js",".././node_modules/google-auth-library/build/src/auth/idtokenclient.js",".././node_modules/google-auth-library/build/src/auth/impersonated.js",".././node_modules/google-auth-library/build/src/auth/jwtaccess.js",".././node_modules/google-auth-library/build/src/auth/jwtclient.js",".././node_modules/google-auth-library/build/src/auth/loginticket.js",".././node_modules/google-auth-library/build/src/auth/oauth2client.js",".././node_modules/google-auth-library/build/src/auth/oauth2common.js",".././node_modules/google-auth-library/build/src/auth/pluggable-auth-client.js",".././node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.js",".././node_modules/google-auth-library/build/src/auth/refreshclient.js",".././node_modules/google-auth-library/build/src/auth/stscredentials.js",".././node_modules/google-auth-library/build/src/crypto/browser/crypto.js",".././node_modules/google-auth-library/build/src/crypto/crypto.js",".././node_modules/google-auth-library/build/src/crypto/node/crypto.js",".././node_modules/google-auth-library/build/src/index.js",".././node_modules/google-auth-library/build/src/options.js",".././node_modules/google-auth-library/build/src/transporters.js",".././node_modules/google-auth-library/build/src/util.js",".././node_modules/gtoken/build/src/index.js",".././node_modules/has-flag/index.js",".././node_modules/http-proxy-agent/dist/agent.js",".././node_modules/http-proxy-agent/dist/index.js",".././node_modules/https-proxy-agent/dist/agent.js",".././node_modules/https-proxy-agent/dist/index.js",".././node_modules/https-proxy-agent/dist/parse-proxy-response.js",".././node_modules/inherits/inherits.js",".././node_modules/inherits/inherits_browser.js",".././node_modules/is-stream/index.js",".././node_modules/json-bigint/index.js",".././node_modules/json-bigint/lib/parse.js",".././node_modules/json-bigint/lib/stringify.js",".././node_modules/jwa/index.js",".././node_modules/jws/index.js",".././node_modules/jws/lib/data-stream.js",".././node_modules/jws/lib/sign-stream.js",".././node_modules/jws/lib/tostring.js",".././node_modules/jws/lib/verify-stream.js",".././node_modules/mime-db/index.js",".././node_modules/mime-types/index.js",".././node_modules/mime/Mime.js",".././node_modules/mime/index.js",".././node_modules/mime/types/other.js",".././node_modules/mime/types/standard.js",".././node_modules/ms/index.js",".././node_modules/node-fetch/lib/index.js",".././node_modules/once/once.js",".././node_modules/p-limit/index.js",".././node_modules/readable-stream/errors.js",".././node_modules/readable-stream/lib/_stream_duplex.js",".././node_modules/readable-stream/lib/_stream_passthrough.js",".././node_modules/readable-stream/lib/_stream_readable.js",".././node_modules/readable-stream/lib/_stream_transform.js",".././node_modules/readable-stream/lib/_stream_writable.js",".././node_modules/readable-stream/lib/internal/streams/async_iterator.js",".././node_modules/readable-stream/lib/internal/streams/buffer_list.js",".././node_modules/readable-stream/lib/internal/streams/destroy.js",".././node_modules/readable-stream/lib/internal/streams/end-of-stream.js",".././node_modules/readable-stream/lib/internal/streams/from.js",".././node_modules/readable-stream/lib/internal/streams/pipeline.js",".././node_modules/readable-stream/lib/internal/streams/state.js",".././node_modules/readable-stream/lib/internal/streams/stream.js",".././node_modules/readable-stream/readable.js",".././node_modules/retry-request/index.js",".././node_modules/retry/index.js",".././node_modules/retry/lib/retry.js",".././node_modules/retry/lib/retry_operation.js",".././node_modules/safe-buffer/index.js",".././node_modules/stream-events/index.js",".././node_modules/stream-shift/index.js",".././node_modules/string_decoder/lib/string_decoder.js",".././node_modules/strnum/strnum.js",".././node_modules/stubs/index.js",".././node_modules/supports-color/index.js",".././node_modules/teeny-request/build/src/TeenyStatistics.js",".././node_modules/teeny-request/build/src/agents.js",".././node_modules/teeny-request/build/src/index.js",".././node_modules/teeny-request/node_modules/uuid/dist/index.js",".././node_modules/teeny-request/node_modules/uuid/dist/md5.js",".././node_modules/teeny-request/node_modules/uuid/dist/native.js",".././node_modules/teeny-request/node_modules/uuid/dist/nil.js",".././node_modules/teeny-request/node_modules/uuid/dist/parse.js",".././node_modules/teeny-request/node_modules/uuid/dist/regex.js",".././node_modules/teeny-request/node_modules/uuid/dist/rng.js",".././node_modules/teeny-request/node_modules/uuid/dist/sha1.js",".././node_modules/teeny-request/node_modules/uuid/dist/stringify.js",".././node_modules/teeny-request/node_modules/uuid/dist/v1.js",".././node_modules/teeny-request/node_modules/uuid/dist/v3.js",".././node_modules/teeny-request/node_modules/uuid/dist/v35.js",".././node_modules/teeny-request/node_modules/uuid/dist/v4.js",".././node_modules/teeny-request/node_modules/uuid/dist/v5.js",".././node_modules/teeny-request/node_modules/uuid/dist/validate.js",".././node_modules/teeny-request/node_modules/uuid/dist/version.js",".././node_modules/tr46/index.js",".././node_modules/tunnel/index.js",".././node_modules/tunnel/lib/tunnel.js",".././node_modules/undici/index.js",".././node_modules/undici/lib/agent.js",".././node_modules/undici/lib/api/abort-signal.js",".././node_modules/undici/lib/api/api-connect.js",".././node_modules/undici/lib/api/api-pipeline.js",".././node_modules/undici/lib/api/api-request.js",".././node_modules/undici/lib/api/api-stream.js",".././node_modules/undici/lib/api/api-upgrade.js",".././node_modules/undici/lib/api/index.js",".././node_modules/undici/lib/api/readable.js",".././node_modules/undici/lib/api/util.js",".././node_modules/undici/lib/balanced-pool.js",".././node_modules/undici/lib/cache/cache.js",".././node_modules/undici/lib/cache/cachestorage.js",".././node_modules/undici/lib/cache/symbols.js",".././node_modules/undici/lib/cache/util.js",".././node_modules/undici/lib/client.js",".././node_modules/undici/lib/compat/dispatcher-weakref.js",".././node_modules/undici/lib/cookies/constants.js",".././node_modules/undici/lib/cookies/index.js",".././node_modules/undici/lib/cookies/parse.js",".././node_modules/undici/lib/cookies/util.js",".././node_modules/undici/lib/core/connect.js",".././node_modules/undici/lib/core/errors.js",".././node_modules/undici/lib/core/request.js",".././node_modules/undici/lib/core/symbols.js",".././node_modules/undici/lib/core/util.js",".././node_modules/undici/lib/dispatcher-base.js",".././node_modules/undici/lib/dispatcher.js",".././node_modules/undici/lib/fetch/body.js",".././node_modules/undici/lib/fetch/constants.js",".././node_modules/undici/lib/fetch/dataURL.js",".././node_modules/undici/lib/fetch/file.js",".././node_modules/undici/lib/fetch/formdata.js",".././node_modules/undici/lib/fetch/global.js",".././node_modules/undici/lib/fetch/headers.js",".././node_modules/undici/lib/fetch/index.js",".././node_modules/undici/lib/fetch/request.js",".././node_modules/undici/lib/fetch/response.js",".././node_modules/undici/lib/fetch/symbols.js",".././node_modules/undici/lib/fetch/util.js",".././node_modules/undici/lib/fetch/webidl.js",".././node_modules/undici/lib/fileapi/encoding.js",".././node_modules/undici/lib/fileapi/filereader.js",".././node_modules/undici/lib/fileapi/progressevent.js",".././node_modules/undici/lib/fileapi/symbols.js",".././node_modules/undici/lib/fileapi/util.js",".././node_modules/undici/lib/global.js",".././node_modules/undici/lib/handler/DecoratorHandler.js",".././node_modules/undici/lib/handler/RedirectHandler.js",".././node_modules/undici/lib/handler/RetryHandler.js",".././node_modules/undici/lib/interceptor/redirectInterceptor.js",".././node_modules/undici/lib/llhttp/constants.js",".././node_modules/undici/lib/llhttp/llhttp-wasm.js",".././node_modules/undici/lib/llhttp/llhttp_simd-wasm.js",".././node_modules/undici/lib/llhttp/utils.js",".././node_modules/undici/lib/mock/mock-agent.js",".././node_modules/undici/lib/mock/mock-client.js",".././node_modules/undici/lib/mock/mock-errors.js",".././node_modules/undici/lib/mock/mock-interceptor.js",".././node_modules/undici/lib/mock/mock-pool.js",".././node_modules/undici/lib/mock/mock-symbols.js",".././node_modules/undici/lib/mock/mock-utils.js",".././node_modules/undici/lib/mock/pending-interceptors-formatter.js",".././node_modules/undici/lib/mock/pluralizer.js",".././node_modules/undici/lib/node/fixed-queue.js",".././node_modules/undici/lib/pool-base.js",".././node_modules/undici/lib/pool-stats.js",".././node_modules/undici/lib/pool.js",".././node_modules/undici/lib/proxy-agent.js",".././node_modules/undici/lib/timers.js",".././node_modules/undici/lib/websocket/connection.js",".././node_modules/undici/lib/websocket/constants.js",".././node_modules/undici/lib/websocket/events.js",".././node_modules/undici/lib/websocket/frame.js",".././node_modules/undici/lib/websocket/receiver.js",".././node_modules/undici/lib/websocket/symbols.js",".././node_modules/undici/lib/websocket/util.js",".././node_modules/undici/lib/websocket/websocket.js",".././node_modules/universal-user-agent/dist-node/index.js",".././node_modules/util-deprecate/node.js",".././node_modules/uuid/dist/index.js",".././node_modules/uuid/dist/md5.js",".././node_modules/uuid/dist/nil.js",".././node_modules/uuid/dist/parse.js",".././node_modules/uuid/dist/regex.js",".././node_modules/uuid/dist/rng.js",".././node_modules/uuid/dist/sha1.js",".././node_modules/uuid/dist/stringify.js",".././node_modules/uuid/dist/v1.js",".././node_modules/uuid/dist/v3.js",".././node_modules/uuid/dist/v35.js",".././node_modules/uuid/dist/v4.js",".././node_modules/uuid/dist/v5.js",".././node_modules/uuid/dist/validate.js",".././node_modules/uuid/dist/version.js",".././node_modules/webidl-conversions/lib/index.js",".././node_modules/whatwg-url/lib/URL-impl.js",".././node_modules/whatwg-url/lib/URL.js",".././node_modules/whatwg-url/lib/public-api.js",".././node_modules/whatwg-url/lib/url-state-machine.js",".././node_modules/whatwg-url/lib/utils.js",".././node_modules/wrappy/wrappy.js",".././node_modules/yocto-queue/index.js",".././node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../external node-commonjs \"assert\"","../external node-commonjs \"async_hooks\"","../external node-commonjs \"buffer\"","../external node-commonjs \"child_process\"","../external node-commonjs \"console\"","../external node-commonjs \"crypto\"","../external node-commonjs \"diagnostics_channel\"","../external node-commonjs \"events\"","../external node-commonjs \"fs\"","../external node-commonjs \"http\"","../external node-commonjs \"http2\"","../external node-commonjs \"https\"","../external node-commonjs \"net\"","../external node-commonjs \"node:events\"","../external node-commonjs \"node:stream\"","../external node-commonjs \"node:util\"","../external node-commonjs \"os\"","../external node-commonjs \"path\"","../external node-commonjs \"perf_hooks\"","../external node-commonjs \"punycode\"","../external node-commonjs \"querystring\"","../external node-commonjs \"stream\"","../external node-commonjs \"stream/web\"","../external node-commonjs \"string_decoder\"","../external node-commonjs \"tls\"","../external node-commonjs \"tty\"","../external node-commonjs \"url\"","../external node-commonjs \"util\"","../external node-commonjs \"util/types\"","../external node-commonjs \"worker_threads\"","../external node-commonjs \"zlib\"",".././node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js",".././node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js",".././node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js",".././node_modules/@fastify/busboy/deps/streamsearch/sbmh.js",".././node_modules/@fastify/busboy/lib/main.js",".././node_modules/@fastify/busboy/lib/types/multipart.js",".././node_modules/@fastify/busboy/lib/types/urlencoded.js",".././node_modules/@fastify/busboy/lib/utils/Decoder.js",".././node_modules/@fastify/busboy/lib/utils/basename.js",".././node_modules/@fastify/busboy/lib/utils/decodeText.js",".././node_modules/@fastify/busboy/lib/utils/getLimit.js",".././node_modules/@fastify/busboy/lib/utils/parseParams.js",".././node_modules/@google-cloud/storage/build/cjs/src/acl.js",".././node_modules/@google-cloud/storage/build/cjs/src/bucket.js",".././node_modules/@google-cloud/storage/build/cjs/src/channel.js",".././node_modules/@google-cloud/storage/build/cjs/src/crc32c.js",".././node_modules/@google-cloud/storage/build/cjs/src/file.js",".././node_modules/@google-cloud/storage/build/cjs/src/hash-stream-validator.js",".././node_modules/@google-cloud/storage/build/cjs/src/hmacKey.js",".././node_modules/@google-cloud/storage/build/cjs/src/iam.js",".././node_modules/@google-cloud/storage/build/cjs/src/index.js",".././node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/index.js",".././node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service-object.js",".././node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service.js",".././node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/util.js",".././node_modules/@google-cloud/storage/build/cjs/src/notification.js",".././node_modules/@google-cloud/storage/build/cjs/src/resumable-upload.js",".././node_modules/@google-cloud/storage/build/cjs/src/signer.js",".././node_modules/@google-cloud/storage/build/cjs/src/storage.js",".././node_modules/@google-cloud/storage/build/cjs/src/transfer-manager.js",".././node_modules/@google-cloud/storage/build/cjs/src/util.js",".././node_modules/@google-cloud/storage/build/cjs/src/package-json-helper.cjs","../webpack/bootstrap","../webpack/runtime/compat","../webpack/before-startup","../webpack/startup","../webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleCloudStorageMutex = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst github = __importStar(require(\"@actions/github\"));\nconst storage_1 = require(\"@google-cloud/storage\");\nconst common_1 = require(\"@google-cloud/common\");\nconst TIME_PERIOD_PATTERN = /^(?:(\\d+)m)?(?:(\\d+)s)?$/;\nclass TimePeriodError extends Error {\n constructor() {\n super(...arguments);\n this.name = 'TimePeriodError';\n }\n}\n/**\n * Parse a number of microseconds from a time string.\n *\n */\nfunction parseTimePeriod(period) {\n const match = period.match(TIME_PERIOD_PATTERN);\n if (match === null) {\n throw new TimePeriodError(`Invalid time period string: ${period}`);\n }\n else {\n const minutes = match[1] || 0;\n const seconds = match[2] || 0;\n return Number(minutes) * 60 + Number(seconds);\n }\n}\n/**\n * Generate a default identifier this mutex\n *\n * The identifier is used to verify that this invocation is the owner of the\n * mutex before releasing it.\n */\nfunction defaultMutexIdentifier() {\n const context = github.context;\n return `${context.repo.owner}/${context.repo.repo}/${context.workflow}/${context.runNumber}/${context.job}`;\n}\nconst INITIAL_SLEEP = 500;\nclass GoogleCloudStorageMutex {\n constructor(bucket, name, options = {}) {\n this.file = new storage_1.File(new storage_1.Bucket(new storage_1.Storage(), bucket), name, {\n generation: 0\n });\n if (options.timeout === null || options.timeout === undefined) {\n this.timeout = 5 * 60 * 1000; // 5 minutes\n }\n else if (typeof options.timeout === 'string') {\n this.timeout = parseTimePeriod(options.timeout) * 1000;\n }\n else {\n this.timeout = options.timeout;\n }\n if (options.identifier === null || options.identifier === undefined) {\n this.identifier = defaultMutexIdentifier();\n }\n else {\n this.identifier = options.identifier;\n }\n }\n acquire() {\n return __awaiter(this, void 0, void 0, function* () {\n const start = Date.now();\n let sleep = INITIAL_SLEEP;\n for (;;) {\n try {\n yield this.file.save(this.identifier, { resumable: false });\n core.info('Acquired lock');\n return;\n }\n catch (e) {\n if (e instanceof common_1.ApiError && e.code === 412) {\n const delta = Date.now() - start;\n if (delta < this.timeout) {\n core.info(`Cannot acquire lock, sleeping for ${sleep / 1000.0}s`);\n yield new Promise(resolve => setTimeout(resolve, sleep));\n sleep = sleep * 2;\n continue;\n }\n }\n core.info('Cannot acquire lock, raising');\n throw e;\n }\n }\n });\n }\n release() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n const data = yield this.file.download();\n if (data[0].toString() === this.identifier) {\n yield this.file.delete();\n }\n else {\n core.info(`Locked by someone else (${data[0]} !== ${this.identifier})`);\n }\n }\n catch (e) {\n if (e instanceof common_1.ApiError && e.code === 404) {\n // no lock to release\n }\n else {\n throw e;\n }\n }\n });\n }\n}\nexports.GoogleCloudStorageMutex = GoogleCloudStorageMutex;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst locks_1 = require(\"./locks\");\nfunction run() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n const mutex = new locks_1.GoogleCloudStorageMutex(core.getInput('bucket'), core.getInput('filename'), { timeout: core.getInput('timeout') });\n yield mutex.acquire();\n }\n catch (error) {\n if (error instanceof Error) {\n core.error(error.message);\n }\n else {\n core.error(`Unknown error ${error}`);\n }\n core.setFailed('Failed to acquire lock');\n }\n });\n}\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));\n }\n command_1.issueCommand('set-env', { name }, convertedVal);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueFileCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n if (options && options.trimWhitespace === false) {\n return inputs;\n }\n return inputs.map(input => input.trim());\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n const filePath = process.env['GITHUB_OUTPUT'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));\n }\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n const filePath = process.env['GITHUB_STATE'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));\n }\n command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.prepareKeyValueMessage = exports.issueFileCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst uuid_1 = require(\"uuid\");\nconst utils_1 = require(\"./utils\");\nfunction issueFileCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueFileCommand = issueFileCommand;\nfunction prepareKeyValueMessage(key, value) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n const convertedValue = utils_1.toCommandValue(value);\n // These should realistically never happen, but just in case someone finds a\n // way to exploit uuid generation let's not allow keys or values that contain\n // the delimiter.\n if (key.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedValue.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;\n}\nexports.prepareKeyValueMessage = prepareKeyValueMessage;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise

} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n var _a, _b, _c;\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;\n this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;\n this.graphqlUrl =\n (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options, ...additionalPlugins) {\n const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins);\n return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nconst undici_1 = require(\"undici\");\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getProxyAgentDispatcher(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgentDispatcher(destinationUrl);\n}\nexports.getProxyAgentDispatcher = getProxyAgentDispatcher;\nfunction getProxyFetch(destinationUrl) {\n const httpDispatcher = getProxyAgentDispatcher(destinationUrl);\n const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () {\n return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher }));\n });\n return proxyFetch;\n}\nexports.getProxyFetch = getProxyFetch;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nexports.defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl),\n fetch: Utils.getProxyFetch(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nconst undici_1 = require(\"undici\");\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes || (exports.HttpCodes = HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers || (exports.Headers = Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes || (exports.MediaTypes = MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n readBodyBuffer() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n const chunks = [];\n this.message.on('data', (chunk) => {\n chunks.push(chunk);\n });\n this.message.on('end', () => {\n resolve(Buffer.concat(chunks));\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n getAgentDispatcher(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (!useProxy) {\n return;\n }\n return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _getProxyAgentDispatcher(parsedUrl, proxyUrl) {\n let proxyAgent;\n if (this._keepAlive) {\n proxyAgent = this._proxyAgentDispatcher;\n }\n // if agent is already assigned use that agent.\n if (proxyAgent) {\n return proxyAgent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {\n token: `${proxyUrl.username}:${proxyUrl.password}`\n })));\n this._proxyAgentDispatcher = proxyAgent;\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {\n rejectUnauthorized: false\n });\n }\n return proxyAgent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n try {\n return new URL(proxyVar);\n }\n catch (_a) {\n if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))\n return new URL(`http://${proxyVar}`);\n }\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const reqHost = reqUrl.hostname;\n if (isLoopbackAddress(reqHost)) {\n return true;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperNoProxyItem === '*' ||\n upperReqHosts.some(x => x === upperNoProxyItem ||\n x.endsWith(`.${upperNoProxyItem}`) ||\n (upperNoProxyItem.startsWith('.') &&\n x.endsWith(`${upperNoProxyItem}`)))) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\nfunction isLoopbackAddress(host) {\n const hostLower = host.toLowerCase();\n return (hostLower === 'localhost' ||\n hostLower.startsWith('127.') ||\n hostLower.startsWith('[::1]') ||\n hostLower.startsWith('[0:0:0:0:0:0:0:1]'));\n}\n//# sourceMappingURL=proxy.js.map","\"use strict\";\n// Copyright 2016 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.ApiError = exports.ServiceObject = exports.Service = exports.Operation = void 0;\n/**\n * @type {module:common/operation}\n * @private\n */\nvar operation_1 = require(\"./operation\");\nObject.defineProperty(exports, \"Operation\", { enumerable: true, get: function () { return operation_1.Operation; } });\n/**\n * @type {module:common/service}\n * @private\n */\nvar service_1 = require(\"./service\");\nObject.defineProperty(exports, \"Service\", { enumerable: true, get: function () { return service_1.Service; } });\n/**\n * @type {module:common/serviceObject}\n * @private\n */\nvar service_object_1 = require(\"./service-object\");\nObject.defineProperty(exports, \"ServiceObject\", { enumerable: true, get: function () { return service_object_1.ServiceObject; } });\n/**\n * @type {module:common/util}\n * @private\n */\nvar util_1 = require(\"./util\");\nObject.defineProperty(exports, \"ApiError\", { enumerable: true, get: function () { return util_1.ApiError; } });\nObject.defineProperty(exports, \"util\", { enumerable: true, get: function () { return util_1.util; } });\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2016 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Operation = void 0;\n/*!\n * @module common/operation\n */\nconst service_object_1 = require(\"./service-object\");\nconst util_1 = require(\"util\");\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass Operation extends service_object_1.ServiceObject {\n /**\n * An Operation object allows you to interact with APIs that take longer to\n * process things.\n *\n * @constructor\n * @alias module:common/operation\n *\n * @param {object} config - Configuration object.\n * @param {module:common/service|module:common/serviceObject|module:common/grpcService|module:common/grpcServiceObject} config.parent - The parent object.\n */\n constructor(config) {\n const methods = {\n /**\n * Checks to see if an operation exists.\n */\n exists: true,\n /**\n * Retrieves the operation.\n */\n get: true,\n /**\n * Retrieves metadata for the operation.\n */\n getMetadata: {\n reqOpts: {\n name: config.id,\n },\n },\n };\n config = Object.assign({\n baseUrl: '',\n }, config);\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n config.methods = (config.methods || methods);\n super(config);\n this.completeListeners = 0;\n this.hasActiveListeners = false;\n this.listenForEvents_();\n }\n /**\n * Wraps the `complete` and `error` events in a Promise.\n *\n * @return {Promise}\n */\n promise() {\n return new Promise((resolve, reject) => {\n this.on('error', reject).on('complete', (metadata) => {\n resolve([metadata]);\n });\n });\n }\n /**\n * Begin listening for events on the operation. This method keeps track of how\n * many \"complete\" listeners are registered and removed, making sure polling\n * is handled automatically.\n *\n * As long as there is one active \"complete\" listener, the connection is open.\n * When there are no more listeners, the polling stops.\n *\n * @private\n */\n listenForEvents_() {\n this.on('newListener', (event) => {\n if (event === 'complete') {\n this.completeListeners++;\n if (!this.hasActiveListeners) {\n this.hasActiveListeners = true;\n this.startPolling_();\n }\n }\n });\n this.on('removeListener', (event) => {\n if (event === 'complete' && --this.completeListeners === 0) {\n this.hasActiveListeners = false;\n }\n });\n }\n /**\n * Poll for a status update. Returns null for an incomplete\n * status, and metadata for a complete status.\n *\n * @private\n */\n poll_(callback) {\n this.getMetadata((err, body) => {\n if (err || body.error) {\n callback(err || body.error);\n return;\n }\n if (!body.done) {\n callback(null);\n return;\n }\n callback(null, body);\n });\n }\n /**\n * Poll `getMetadata` to check the operation's status. This runs a loop to\n * ping the API on an interval.\n *\n * Note: This method is automatically called once a \"complete\" event handler\n * is registered on the operation.\n *\n * @private\n */\n async startPolling_() {\n if (!this.hasActiveListeners) {\n return;\n }\n try {\n const metadata = await (0, util_1.promisify)(this.poll_.bind(this))();\n if (!metadata) {\n setTimeout(this.startPolling_.bind(this), this.pollIntervalMs || 500);\n return;\n }\n this.emit('complete', metadata);\n }\n catch (err) {\n this.emit('error', err);\n }\n }\n}\nexports.Operation = Operation;\n//# sourceMappingURL=operation.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ServiceObject = void 0;\n/*!\n * @module common/service-object\n */\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst arrify = require(\"arrify\");\nconst events_1 = require(\"events\");\nconst extend = require(\"extend\");\nconst util_1 = require(\"./util\");\n/**\n * ServiceObject is a base class, meant to be inherited from by a \"service\n * object,\" like a BigQuery dataset or Storage bucket.\n *\n * Most of the time, these objects share common functionality; they can be\n * created or deleted, and you can get or set their metadata.\n *\n * By inheriting from this class, a service object will be extended with these\n * shared behaviors. Note that any method can be overridden when the service\n * object requires specific behavior.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass ServiceObject extends events_1.EventEmitter {\n /*\n * @constructor\n * @alias module:common/service-object\n *\n * @private\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string} config.createMethod - The method which creates this object.\n * @param {string=} config.id - The identifier of the object. For example, the\n * name of a Storage bucket or Pub/Sub topic.\n * @param {object=} config.methods - A map of each method name that should be inherited.\n * @param {object} config.methods[].reqOpts - Default request options for this\n * particular method. A common use case is when `setMetadata` requires a\n * `PUT` method to override the default `PATCH`.\n * @param {object} config.parent - The parent service instance. For example, an\n * instance of Storage if the object is Bucket.\n */\n constructor(config) {\n super();\n this.metadata = {};\n this.baseUrl = config.baseUrl;\n this.parent = config.parent; // Parent class.\n this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc).\n this.createMethod = config.createMethod;\n this.methods = config.methods || {};\n this.interceptors = [];\n this.pollIntervalMs = config.pollIntervalMs;\n this.projectId = config.projectId;\n if (config.methods) {\n // This filters the ServiceObject instance (e.g. a \"File\") to only have\n // the configured methods. We make a couple of exceptions for core-\n // functionality (\"request()\" and \"getRequestInterceptors()\")\n Object.getOwnPropertyNames(ServiceObject.prototype)\n .filter(methodName => {\n return (\n // All ServiceObjects need `request` and `getRequestInterceptors`.\n // clang-format off\n !/^request/.test(methodName) &&\n !/^getRequestInterceptors/.test(methodName) &&\n // clang-format on\n // The ServiceObject didn't redefine the method.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] ===\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ServiceObject.prototype[methodName] &&\n // This method isn't wanted.\n !config.methods[methodName]);\n })\n .forEach(methodName => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] = undefined;\n });\n }\n }\n create(optionsOrCallback, callback) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const args = [this.id];\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n if (typeof optionsOrCallback === 'object') {\n args.push(optionsOrCallback);\n }\n // Wrap the callback to return *this* instance of the object, not the\n // newly-created one.\n // tslint: disable-next-line no-any\n function onCreate(...args) {\n const [err, instance] = args;\n if (!err) {\n self.metadata = instance.metadata;\n args[1] = self; // replace the created `instance` with this one.\n }\n callback(...args);\n }\n args.push(onCreate);\n // eslint-disable-next-line prefer-spread\n this.createMethod.apply(null, args);\n }\n delete(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const ignoreNotFound = options.ignoreNotFound;\n delete options.ignoreNotFound;\n const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {};\n const reqOpts = extend(true, {\n method: 'DELETE',\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, ...args) => {\n if (err) {\n if (err.code === 404 && ignoreNotFound) {\n err = null;\n }\n }\n callback(err, ...args);\n });\n }\n exists(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n this.get(options, err => {\n if (err) {\n if (err.code === 404) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n return;\n }\n callback(null, true);\n });\n }\n get(optionsOrCallback, cb) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const [opts, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const options = Object.assign({}, opts);\n const autoCreate = options.autoCreate && typeof this.create === 'function';\n delete options.autoCreate;\n function onCreate(err, instance, apiResponse) {\n if (err) {\n if (err.code === 409) {\n self.get(options, callback);\n return;\n }\n callback(err, null, apiResponse);\n return;\n }\n callback(null, instance, apiResponse);\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n if (err.code === 404 && autoCreate) {\n const args = [];\n if (Object.keys(options).length > 0) {\n args.push(options);\n }\n args.push(onCreate);\n self.create(...args);\n return;\n }\n callback(err, null, metadata);\n return;\n }\n callback(null, self, metadata);\n });\n }\n getMetadata(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.getMetadata === 'object' &&\n this.methods.getMetadata) ||\n {};\n const reqOpts = extend(true, {\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n const localInterceptors = this.interceptors\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n return this.parent.getRequestInterceptors().concat(localInterceptors);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.setMetadata === 'object' &&\n this.methods.setMetadata) ||\n {};\n const reqOpts = extend(true, {}, {\n method: 'PATCH',\n uri: '',\n }, methodConfig.reqOpts, {\n json: metadata,\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts);\n if (this.projectId) {\n reqOpts.projectId = this.projectId;\n }\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri];\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .filter(x => x.trim()) // Limit to non-empty strings.\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/');\n const childInterceptors = arrify(reqOpts.interceptors_);\n const localInterceptors = [].slice.call(this.interceptors);\n reqOpts.interceptors_ = childInterceptors.concat(localInterceptors);\n if (reqOpts.shouldReturnStream) {\n return this.parent.requestStream(reqOpts);\n }\n this.parent.request(reqOpts, callback);\n }\n request(reqOpts, callback) {\n this.request_(reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return this.request_(opts);\n }\n}\nexports.ServiceObject = ServiceObject;\n(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] });\n//# sourceMappingURL=service-object.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0;\n/*!\n * @module common/service\n */\nconst arrify = require(\"arrify\");\nconst extend = require(\"extend\");\nconst util_1 = require(\"./util\");\nexports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}';\nclass Service {\n /**\n * Service is a base class, meant to be inherited from by a \"service,\" like\n * BigQuery or Storage.\n *\n * This handles making authenticated requests by exposing a `makeReq_`\n * function.\n *\n * @constructor\n * @alias module:common/service\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string[]} config.scopes - The scopes required for the request.\n * @param {object=} options - [Configuration object](#/docs).\n */\n constructor(config, options = {}) {\n this.baseUrl = config.baseUrl;\n this.apiEndpoint = config.apiEndpoint;\n this.timeout = options.timeout;\n this.globalInterceptors = arrify(options.interceptors_);\n this.interceptors = [];\n this.packageJson = config.packageJson;\n this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN;\n this.projectIdRequired = config.projectIdRequired !== false;\n this.providedUserAgent = options.userAgent;\n const reqCfg = extend({}, config, {\n projectIdRequired: this.projectIdRequired,\n projectId: this.projectId,\n authClient: options.authClient,\n credentials: options.credentials,\n keyFile: options.keyFilename,\n email: options.email,\n token: options.token,\n });\n this.makeAuthenticatedRequest =\n util_1.util.makeAuthenticatedRequestFactory(reqCfg);\n this.authClient = this.makeAuthenticatedRequest.authClient;\n this.getCredentials = this.makeAuthenticatedRequest.getCredentials;\n const isCloudFunctionEnv = !!process.env.FUNCTION_NAME;\n if (isCloudFunctionEnv) {\n this.interceptors.push({\n request(reqOpts) {\n reqOpts.forever = false;\n return reqOpts;\n },\n });\n }\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n return [].slice\n .call(this.globalInterceptors)\n .concat(this.interceptors)\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n }\n getProjectId(callback) {\n if (!callback) {\n return this.getProjectIdAsync();\n }\n this.getProjectIdAsync().then(p => callback(null, p), callback);\n }\n async getProjectIdAsync() {\n const projectId = await this.authClient.getProjectId();\n if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) {\n this.projectId = projectId;\n }\n return this.projectId;\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts, { timeout: this.timeout });\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl];\n if (this.projectIdRequired) {\n if (reqOpts.projectId) {\n uriComponents.push('projects');\n uriComponents.push(reqOpts.projectId);\n }\n else {\n uriComponents.push('projects');\n uriComponents.push(this.projectId);\n }\n }\n uriComponents.push(reqOpts.uri);\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/')\n // Some URIs have colon separators.\n // Bad: https://.../projects/:list\n // Good: https://.../projects:list\n .replace(/\\/:/g, ':');\n const requestInterceptors = this.getRequestInterceptors();\n arrify(reqOpts.interceptors_).forEach(interceptor => {\n if (typeof interceptor.request === 'function') {\n requestInterceptors.push(interceptor.request);\n }\n });\n requestInterceptors.forEach(requestInterceptor => {\n reqOpts = requestInterceptor(reqOpts);\n });\n delete reqOpts.interceptors_;\n const pkg = this.packageJson;\n let userAgent = util_1.util.getUserAgentFromPackageJson(pkg);\n if (this.providedUserAgent) {\n userAgent = `${this.providedUserAgent} ${userAgent}`;\n }\n reqOpts.headers = extend({}, reqOpts.headers, {\n 'User-Agent': userAgent,\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${pkg.version}`,\n });\n if (reqOpts.shouldReturnStream) {\n return this.makeAuthenticatedRequest(reqOpts);\n }\n else {\n this.makeAuthenticatedRequest(reqOpts, callback);\n }\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n * @param {function} callback - The callback function passed to `request`.\n */\n request(reqOpts, callback) {\n Service.prototype.request_.call(this, reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return Service.prototype.request_.call(this, opts);\n }\n}\nexports.Service = Service;\n//# sourceMappingURL=service.js.map","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.Util = exports.PartialFailureError = exports.ApiError = void 0;\n/*!\n * @module common/util\n */\nconst projectify_1 = require(\"@google-cloud/projectify\");\nconst ent = require(\"ent\");\nconst extend = require(\"extend\");\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst retryRequest = require(\"retry-request\");\nconst stream_1 = require(\"stream\");\nconst teeny_request_1 = require(\"teeny-request\");\nconst service_1 = require(\"./service\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst duplexify = require('duplexify');\nconst requestDefaults = {\n timeout: 60000,\n gzip: true,\n forever: true,\n pool: {\n maxSockets: Infinity,\n },\n};\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n * @private\n */\nconst AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n * @private\n */\nconst MAX_RETRY_DEFAULT = 3;\n/**\n * Custom error type for API errors.\n *\n * @param {object} errorBody - Error object.\n */\nclass ApiError extends Error {\n constructor(errorBodyOrMessage) {\n super();\n if (typeof errorBodyOrMessage !== 'object') {\n this.message = errorBodyOrMessage || '';\n return;\n }\n const errorBody = errorBodyOrMessage;\n this.code = errorBody.code;\n this.errors = errorBody.errors;\n this.response = errorBody.response;\n try {\n this.errors = JSON.parse(this.response.body).error.errors;\n }\n catch (e) {\n this.errors = errorBody.errors;\n }\n this.message = ApiError.createMultiErrorMessage(errorBody, this.errors);\n Error.captureStackTrace(this);\n }\n /**\n * Pieces together an error message by combining all unique error messages\n * returned from a single GoogleError\n *\n * @private\n *\n * @param {GoogleErrorBody} err The original error.\n * @param {GoogleInnerError[]} [errors] Inner errors, if any.\n * @returns {string}\n */\n static createMultiErrorMessage(err, errors) {\n const messages = new Set();\n if (err.message) {\n messages.add(err.message);\n }\n if (errors && errors.length) {\n errors.forEach(({ message }) => messages.add(message));\n }\n else if (err.response && err.response.body) {\n messages.add(ent.decode(err.response.body.toString()));\n }\n else if (!err.message) {\n messages.add('A failure occurred during this request.');\n }\n let messageArr = Array.from(messages);\n if (messageArr.length > 1) {\n messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`);\n messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\\n');\n messageArr.push('\\n');\n }\n return messageArr.join('\\n');\n }\n}\nexports.ApiError = ApiError;\n/**\n * Custom error type for partial errors returned from the API.\n *\n * @param {object} b - Error object.\n */\nclass PartialFailureError extends Error {\n constructor(b) {\n super();\n const errorObject = b;\n this.errors = errorObject.errors;\n this.name = 'PartialFailureError';\n this.response = errorObject.response;\n this.message = ApiError.createMultiErrorMessage(errorObject, this.errors);\n }\n}\nexports.PartialFailureError = PartialFailureError;\nclass Util {\n constructor() {\n this.ApiError = ApiError;\n this.PartialFailureError = PartialFailureError;\n }\n /**\n * No op.\n *\n * @example\n * function doSomething(callback) {\n * callback = callback || noop;\n * }\n */\n noop() { }\n /**\n * Uniformly process an API response.\n *\n * @param {*} err - Error value.\n * @param {*} resp - Response value.\n * @param {*} body - Body value.\n * @param {function} callback - The callback function.\n */\n handleResp(err, resp, body, callback) {\n callback = callback || util.noop;\n const parsedResp = extend(true, { err: err || null }, resp && util.parseHttpRespMessage(resp), body && util.parseHttpRespBody(body));\n // Assign the parsed body to resp.body, even if { json: false } was passed\n // as a request option.\n // We assume that nobody uses the previously unparsed value of resp.body.\n if (!parsedResp.err && resp && typeof parsedResp.body === 'object') {\n parsedResp.resp.body = parsedResp.body;\n }\n if (parsedResp.err && resp) {\n parsedResp.err.response = resp;\n }\n callback(parsedResp.err, parsedResp.body, parsedResp.resp);\n }\n /**\n * Sniff an incoming HTTP response message for errors.\n *\n * @param {object} httpRespMessage - An incoming HTTP response message from `request`.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.resp - The original response object.\n */\n parseHttpRespMessage(httpRespMessage) {\n const parsedHttpRespMessage = {\n resp: httpRespMessage,\n };\n if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) {\n // Unknown error. Format according to ApiError standard.\n parsedHttpRespMessage.err = new ApiError({\n errors: new Array(),\n code: httpRespMessage.statusCode,\n message: httpRespMessage.statusMessage,\n response: httpRespMessage,\n });\n }\n return parsedHttpRespMessage;\n }\n /**\n * Parse the response body from an HTTP request.\n *\n * @param {object} body - The response body.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.body - The original body value provided\n * will try to be JSON.parse'd. If it's successful, the parsed value will\n * be returned here, otherwise the original value and an error will be returned.\n */\n parseHttpRespBody(body) {\n const parsedHttpRespBody = {\n body,\n };\n if (typeof body === 'string') {\n try {\n parsedHttpRespBody.body = JSON.parse(body);\n }\n catch (err) {\n parsedHttpRespBody.body = body;\n }\n }\n if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) {\n // Error from JSON API.\n parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error);\n }\n return parsedHttpRespBody;\n }\n /**\n * Take a Duplexify stream, fetch an authenticated connection header, and\n * create an outgoing writable stream.\n *\n * @param {Duplexify} dup - Duplexify stream.\n * @param {object} options - Configuration object.\n * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through.\n * @param {object} options.metadata - Metadata to send at the head of the request.\n * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object.\n * @param {string=} options.request.method - Default: \"POST\".\n * @param {string=} options.request.qs.uploadType - Default: \"multipart\".\n * @param {string=} options.streamContentType - Default: \"application/octet-stream\".\n * @param {function} onComplete - Callback, executed after the writable Request stream has completed.\n */\n makeWritableStream(dup, options, onComplete) {\n onComplete = onComplete || util.noop;\n const writeStream = new ProgressStream();\n writeStream.on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(writeStream);\n const defaultReqOpts = {\n method: 'POST',\n qs: {\n uploadType: 'multipart',\n },\n timeout: 0,\n maxRetries: 0,\n };\n const metadata = options.metadata || {};\n const reqOpts = extend(true, defaultReqOpts, options.request, {\n multipart: [\n {\n 'Content-Type': 'application/json',\n body: JSON.stringify(metadata),\n },\n {\n 'Content-Type': metadata.contentType || 'application/octet-stream',\n body: writeStream,\n },\n ],\n });\n options.makeAuthenticatedRequest(reqOpts, {\n onAuthenticated(err, authenticatedReqOpts) {\n if (err) {\n dup.destroy(err);\n return;\n }\n const request = teeny_request_1.teenyRequest.defaults(requestDefaults);\n request(authenticatedReqOpts, (err, resp, body) => {\n util.handleResp(err, resp, body, (err, data) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n dup.emit('response', resp);\n onComplete(data);\n });\n });\n },\n });\n }\n /**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted. This is used for rate limit\n * related errors as well as intermittent server errors.\n *\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\n shouldRetryRequest(err) {\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = e.reason;\n if (reason === 'rateLimitExceeded') {\n return true;\n }\n if (reason === 'userRateLimitExceeded') {\n return true;\n }\n if (reason && reason.includes('EAI_AGAIN')) {\n return true;\n }\n }\n }\n }\n return false;\n }\n /**\n * Get a function for making authenticated requests.\n *\n * @param {object} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {object=} config.credentials - Credentials object.\n * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false.\n * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false.\n * @param {string=} config.email - Account email address, required for PEM/P12 usage.\n * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3)\n * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile.\n * @param {array} config.scopes - Array of scopes required for the API.\n */\n makeAuthenticatedRequestFactory(config) {\n const googleAutoAuthConfig = extend({}, config);\n if (googleAutoAuthConfig.projectId === service_1.DEFAULT_PROJECT_ID_TOKEN) {\n delete googleAutoAuthConfig.projectId;\n }\n let authClient;\n if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) {\n // Use an existing `GoogleAuth`\n authClient = googleAutoAuthConfig.authClient;\n }\n else {\n // Pass an `AuthClient` to `GoogleAuth`, if available\n const config = {\n ...googleAutoAuthConfig,\n authClient: googleAutoAuthConfig.authClient,\n };\n authClient = new google_auth_library_1.GoogleAuth(config);\n }\n function makeAuthenticatedRequest(reqOpts, optionsOrCallback) {\n let stream;\n let projectId;\n const reqConfig = extend({}, config);\n let activeRequest_;\n if (!optionsOrCallback) {\n stream = duplexify();\n reqConfig.stream = stream;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined;\n async function setProjectId() {\n projectId = await authClient.getProjectId();\n }\n const onAuthenticated = async (err, authenticatedReqOpts) => {\n const authLibraryError = err;\n const autoAuthFailed = err &&\n err.message.indexOf('Could not load the default credentials') > -1;\n if (autoAuthFailed) {\n // Even though authentication failed, the API might not actually\n // care.\n authenticatedReqOpts = reqOpts;\n }\n if (!err || autoAuthFailed) {\n try {\n // Try with existing `projectId` value\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n if (e instanceof projectify_1.MissingProjectIdError) {\n // A `projectId` was required, but we don't have one.\n try {\n // Attempt to get the `projectId`\n await setProjectId();\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n // Re-use the \"Could not load the default credentials error\" if\n // auto auth failed.\n err = err || e;\n }\n }\n else {\n // Some other error unrelated to missing `projectId`\n err = err || e;\n }\n }\n }\n if (err) {\n if (stream) {\n stream.destroy(err);\n }\n else {\n const fn = options && options.onAuthenticated\n ? options.onAuthenticated\n : callback;\n fn(err);\n }\n return;\n }\n if (options && options.onAuthenticated) {\n options.onAuthenticated(null, authenticatedReqOpts);\n }\n else {\n activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => {\n if (apiResponseError &&\n apiResponseError.code === 401 &&\n authLibraryError) {\n // Re-use the \"Could not load the default credentials error\" if\n // the API request failed due to missing credentials.\n apiResponseError = authLibraryError;\n }\n callback(apiResponseError, ...params);\n });\n }\n };\n const prepareRequest = async () => {\n try {\n const getProjectId = async () => {\n if (config.projectId &&\n config.projectId !== service_1.DEFAULT_PROJECT_ID_TOKEN) {\n // The user provided a project ID. We don't need to check with the\n // auth client, it could be incorrect.\n return config.projectId;\n }\n if (config.projectIdRequired === false) {\n // A projectId is not required. Return the default.\n return service_1.DEFAULT_PROJECT_ID_TOKEN;\n }\n return setProjectId();\n };\n const authorizeRequest = async () => {\n if (reqConfig.customEndpoint &&\n !reqConfig.useAuthWithCustomEndpoint) {\n // Using a custom API override. Do not use `google-auth-library` for\n // authentication. (ex: connecting to a local Datastore server)\n return reqOpts;\n }\n else {\n return authClient.authorizeRequest(reqOpts);\n }\n };\n const [_projectId, authorizedReqOpts] = await Promise.all([\n getProjectId(),\n authorizeRequest(),\n ]);\n if (_projectId) {\n projectId = _projectId;\n }\n return onAuthenticated(null, authorizedReqOpts);\n }\n catch (e) {\n return onAuthenticated(e);\n }\n };\n prepareRequest();\n if (stream) {\n return stream;\n }\n return {\n abort() {\n setImmediate(() => {\n if (activeRequest_) {\n activeRequest_.abort();\n activeRequest_ = null;\n }\n });\n },\n };\n }\n const mar = makeAuthenticatedRequest;\n mar.getCredentials = authClient.getCredentials.bind(authClient);\n mar.authClient = authClient;\n return mar;\n }\n /**\n * Make a request through the `retryRequest` module with built-in error\n * handling and exponential back off.\n *\n * @param {object} reqOpts - Request options in the format `request` expects.\n * @param {object=} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {number=} config.maxRetries - Maximum number of automatic retries\n * attempted before returning the error. (default: 3)\n * @param {object=} config.request - HTTP module for request calls.\n * @param {function} callback - The callback function.\n */\n makeRequest(reqOpts, config, callback) {\n var _a, _b, _c, _d, _e, _f, _g;\n let autoRetryValue = AUTO_RETRY_DEFAULT;\n if (config.autoRetry !== undefined &&\n ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) {\n throw new ApiError('autoRetry is deprecated. Use retryOptions.autoRetry instead.');\n }\n else if (config.autoRetry !== undefined) {\n autoRetryValue = config.autoRetry;\n }\n else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry) !== undefined) {\n autoRetryValue = config.retryOptions.autoRetry;\n }\n let maxRetryValue = MAX_RETRY_DEFAULT;\n if (config.maxRetries && ((_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries)) {\n throw new ApiError('maxRetries is deprecated. Use retryOptions.maxRetries instead.');\n }\n else if (config.maxRetries) {\n maxRetryValue = config.maxRetries;\n }\n else if ((_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries) {\n maxRetryValue = config.retryOptions.maxRetries;\n }\n const options = {\n request: teeny_request_1.teenyRequest.defaults(requestDefaults),\n retries: autoRetryValue !== false ? maxRetryValue : 0,\n noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0,\n shouldRetryFn(httpRespMessage) {\n var _a, _b;\n const err = util.parseHttpRespMessage(httpRespMessage).err;\n if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) {\n return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err));\n }\n return err && util.shouldRetryRequest(err);\n },\n maxRetryDelay: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.maxRetryDelay,\n retryDelayMultiplier: (_f = config.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier,\n totalTimeout: (_g = config.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout,\n };\n if (typeof reqOpts.maxRetries === 'number') {\n options.retries = reqOpts.maxRetries;\n }\n if (!config.stream) {\n return retryRequest(reqOpts, options, (err, response, body) => {\n util.handleResp(err, response, body, callback);\n });\n }\n const dup = config.stream;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let requestStream;\n const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET';\n if (isGetRequest) {\n requestStream = retryRequest(reqOpts, options);\n dup.setReadable(requestStream);\n }\n else {\n // Streaming writable HTTP requests cannot be retried.\n requestStream = options.request(reqOpts);\n dup.setWritable(requestStream);\n }\n // Replay the Request events back to the stream.\n requestStream\n .on('error', dup.destroy.bind(dup))\n .on('response', dup.emit.bind(dup, 'response'))\n .on('complete', dup.emit.bind(dup, 'complete'));\n dup.abort = requestStream.abort;\n return dup;\n }\n /**\n * Decorate the options about to be made in a request.\n *\n * @param {object} reqOpts - The options to be passed to `request`.\n * @param {string} projectId - The project ID.\n * @return {object} reqOpts - The decorated reqOpts.\n */\n decorateRequest(reqOpts, projectId) {\n delete reqOpts.autoPaginate;\n delete reqOpts.autoPaginateVal;\n delete reqOpts.objectMode;\n if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') {\n delete reqOpts.qs.autoPaginate;\n delete reqOpts.qs.autoPaginateVal;\n reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId);\n }\n if (Array.isArray(reqOpts.multipart)) {\n reqOpts.multipart = reqOpts.multipart.map(part => {\n return (0, projectify_1.replaceProjectIdToken)(part, projectId);\n });\n }\n if (reqOpts.json !== null && typeof reqOpts.json === 'object') {\n delete reqOpts.json.autoPaginate;\n delete reqOpts.json.autoPaginateVal;\n reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId);\n }\n reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId);\n return reqOpts;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n isCustomType(unknown, module) {\n function getConstructorName(obj) {\n return obj.constructor && obj.constructor.name.toLowerCase();\n }\n const moduleNameParts = module.split('/');\n const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase();\n const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase();\n if (subModuleName && getConstructorName(unknown) !== subModuleName) {\n return false;\n }\n let walkingModule = unknown;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n if (getConstructorName(walkingModule) === parentModuleName) {\n return true;\n }\n walkingModule = walkingModule.parent;\n if (!walkingModule) {\n return false;\n }\n }\n }\n /**\n * Create a properly-formatted User-Agent string from a package.json file.\n *\n * @param {object} packageJson - A module's package.json file.\n * @return {string} userAgent - The formatted User-Agent string.\n */\n getUserAgentFromPackageJson(packageJson) {\n const hyphenatedPackageName = packageJson.name\n .replace('@google-cloud', 'gcloud-node') // For legacy purposes.\n .replace('/', '-'); // For UA spec-compliance purposes.\n return hyphenatedPackageName + '/' + packageJson.version;\n }\n /**\n * Given two parameters, figure out if this is either:\n * - Just a callback function\n * - An options object, and then a callback function\n * @param optionsOrCallback An options object or callback.\n * @param cb A potentially undefined callback.\n */\n maybeOptionsOrCallback(optionsOrCallback, cb) {\n return typeof optionsOrCallback === 'function'\n ? [{}, optionsOrCallback]\n : [optionsOrCallback, cb];\n }\n}\nexports.Util = Util;\n/**\n * Basic Passthrough Stream that records the number of bytes read\n * every time the cursor is moved.\n */\nclass ProgressStream extends stream_1.Transform {\n constructor() {\n super(...arguments);\n this.bytesRead = 0;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _transform(chunk, encoding, callback) {\n this.bytesRead += chunk.length;\n this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' });\n this.push(chunk);\n callback();\n }\n}\nconst util = new Util();\nexports.util = util;\n//# sourceMappingURL=util.js.map","\"use strict\";\n/*!\n * Copyright 2015 Google Inc. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ResourceStream = exports.paginator = exports.Paginator = void 0;\n/*!\n * @module common/paginator\n */\nconst arrify = require(\"arrify\");\nconst extend = require(\"extend\");\nconst resource_stream_1 = require(\"./resource-stream\");\nObject.defineProperty(exports, \"ResourceStream\", { enumerable: true, get: function () { return resource_stream_1.ResourceStream; } });\n/*! Developer Documentation\n *\n * paginator is used to auto-paginate `nextQuery` methods as well as\n * streamifying them.\n *\n * Before:\n *\n * search.query('done=true', function(err, results, nextQuery) {\n * search.query(nextQuery, function(err, results, nextQuery) {});\n * });\n *\n * After:\n *\n * search.query('done=true', function(err, results) {});\n *\n * Methods to extend should be written to accept callbacks and return a\n * `nextQuery`.\n */\nclass Paginator {\n /**\n * Cache the original method, then overwrite it on the Class's prototype.\n *\n * @param {function} Class - The parent class of the methods to extend.\n * @param {string|string[]} methodNames - Name(s) of the methods to extend.\n */\n // tslint:disable-next-line:variable-name\n extend(Class, methodNames) {\n methodNames = arrify(methodNames);\n methodNames.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n // map the original method to a private member\n Class.prototype[methodName + '_'] = originalMethod;\n // overwrite the original to auto-paginate\n /* eslint-disable @typescript-eslint/no-explicit-any */\n Class.prototype[methodName] = function (...args) {\n const parsedArguments = paginator.parseArguments_(args);\n return paginator.run_(parsedArguments, originalMethod.bind(this));\n };\n });\n }\n /**\n * Wraps paginated API calls in a readable object stream.\n *\n * This method simply calls the nextQuery recursively, emitting results to a\n * stream. The stream ends when `nextQuery` is null.\n *\n * `maxResults` will act as a cap for how many results are fetched and emitted\n * to the stream.\n *\n * @param {string} methodName - Name of the method to streamify.\n * @return {function} - Wrapped function.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n streamify(methodName) {\n return function (\n /* eslint-disable @typescript-eslint/no-explicit-any */\n ...args) {\n const parsedArguments = paginator.parseArguments_(args);\n const originalMethod = this[methodName + '_'] || this[methodName];\n return paginator.runAsStream_(parsedArguments, originalMethod.bind(this));\n };\n }\n /**\n * Parse a pseudo-array `arguments` for a query and callback.\n *\n * @param {array} args - The original `arguments` pseduo-array that the original\n * method received.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n parseArguments_(args) {\n let query;\n let autoPaginate = true;\n let maxApiCalls = -1;\n let maxResults = -1;\n let callback;\n const firstArgument = args[0];\n const lastArgument = args[args.length - 1];\n if (typeof firstArgument === 'function') {\n callback = firstArgument;\n }\n else {\n query = firstArgument;\n }\n if (typeof lastArgument === 'function') {\n callback = lastArgument;\n }\n if (typeof query === 'object') {\n query = extend(true, {}, query);\n // Check if the user only asked for a certain amount of results.\n if (query.maxResults && typeof query.maxResults === 'number') {\n // `maxResults` is used API-wide.\n maxResults = query.maxResults;\n }\n else if (typeof query.pageSize === 'number') {\n // `pageSize` is Pub/Sub's `maxResults`.\n maxResults = query.pageSize;\n }\n if (query.maxApiCalls && typeof query.maxApiCalls === 'number') {\n maxApiCalls = query.maxApiCalls;\n delete query.maxApiCalls;\n }\n // maxResults is the user specified limit.\n if (maxResults !== -1 || query.autoPaginate === false) {\n autoPaginate = false;\n }\n }\n const parsedArguments = {\n query: query || {},\n autoPaginate,\n maxApiCalls,\n maxResults,\n callback,\n };\n parsedArguments.streamOptions = extend(true, {}, parsedArguments.query);\n delete parsedArguments.streamOptions.autoPaginate;\n delete parsedArguments.streamOptions.maxResults;\n delete parsedArguments.streamOptions.pageSize;\n return parsedArguments;\n }\n /**\n * This simply checks to see if `autoPaginate` is set or not, if it's true\n * then we buffer all results, otherwise simply call the original method.\n *\n * @param {array} parsedArguments - Parsed arguments from the original method\n * call.\n * @param {object=|string=} parsedArguments.query - Query object. This is most\n * commonly an object, but to make the API more simple, it can also be a\n * string in some places.\n * @param {function=} parsedArguments.callback - Callback function.\n * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled.\n * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make.\n * @param {number} parsedArguments.maxResults - Maximum results to return.\n * @param {function} originalMethod - The cached method that accepts a callback\n * and returns `nextQuery` to receive more results.\n */\n run_(parsedArguments, originalMethod) {\n const query = parsedArguments.query;\n const callback = parsedArguments.callback;\n if (!parsedArguments.autoPaginate) {\n return originalMethod(query, callback);\n }\n const results = new Array();\n const promise = new Promise((resolve, reject) => {\n paginator\n .runAsStream_(parsedArguments, originalMethod)\n .on('error', reject)\n .on('data', (data) => results.push(data))\n .on('end', () => resolve(results));\n });\n if (!callback) {\n return promise.then(results => [results]);\n }\n promise.then(results => callback(null, results), (err) => callback(err));\n }\n /**\n * This method simply calls the nextQuery recursively, emitting results to a\n * stream. The stream ends when `nextQuery` is null.\n *\n * `maxResults` will act as a cap for how many results are fetched and emitted\n * to the stream.\n *\n * @param {object=|string=} parsedArguments.query - Query object. This is most\n * commonly an object, but to make the API more simple, it can also be a\n * string in some places.\n * @param {function=} parsedArguments.callback - Callback function.\n * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled.\n * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make.\n * @param {number} parsedArguments.maxResults - Maximum results to return.\n * @param {function} originalMethod - The cached method that accepts a callback\n * and returns `nextQuery` to receive more results.\n * @return {stream} - Readable object stream.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n runAsStream_(parsedArguments, originalMethod) {\n return new resource_stream_1.ResourceStream(parsedArguments, originalMethod);\n }\n}\nexports.Paginator = Paginator;\nconst paginator = new Paginator();\nexports.paginator = paginator;\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*!\n * Copyright 2019 Google Inc. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ResourceStream = void 0;\nconst stream_1 = require(\"stream\");\nclass ResourceStream extends stream_1.Transform {\n constructor(args, requestFn) {\n const options = Object.assign({ objectMode: true }, args.streamOptions);\n super(options);\n this._ended = false;\n this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls;\n this._nextQuery = args.query;\n this._reading = false;\n this._requestFn = requestFn;\n this._requestsMade = 0;\n this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults;\n }\n /* eslint-disable @typescript-eslint/no-explicit-any */\n end(...args) {\n this._ended = true;\n return super.end(...args);\n }\n _read() {\n if (this._reading) {\n return;\n }\n this._reading = true;\n // Wrap in a try/catch to catch input linting errors, e.g.\n // an invalid BigQuery query. These errors are thrown in an\n // async fashion, which makes them un-catchable by the user.\n try {\n this._requestFn(this._nextQuery, (err, results, nextQuery) => {\n if (err) {\n this.destroy(err);\n return;\n }\n this._nextQuery = nextQuery;\n if (this._resultsToSend !== Infinity) {\n results = results.splice(0, this._resultsToSend);\n this._resultsToSend -= results.length;\n }\n let more = true;\n for (const result of results) {\n if (this._ended) {\n break;\n }\n more = this.push(result);\n }\n const isFinished = !this._nextQuery || this._resultsToSend < 1;\n const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls;\n if (isFinished || madeMaxCalls) {\n this.end();\n }\n if (more && !this._ended) {\n setImmediate(() => this._read());\n }\n this._reading = false;\n });\n }\n catch (e) {\n this.destroy(e);\n }\n }\n}\nexports.ResourceStream = ResourceStream;\n//# sourceMappingURL=resource-stream.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MissingProjectIdError = exports.replaceProjectIdToken = void 0;\nconst stream_1 = require(\"stream\");\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/**\n * Populate the `{{projectId}}` placeholder.\n *\n * @throws {Error} If a projectId is required, but one is not provided.\n *\n * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped.\n * @param {string} projectId - A projectId. If not provided\n * @return {*} - The original argument with all placeholders populated.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction replaceProjectIdToken(value, projectId) {\n if (Array.isArray(value)) {\n value = value.map(v => replaceProjectIdToken(v, projectId));\n }\n if (value !== null &&\n typeof value === 'object' &&\n !(value instanceof Buffer) &&\n !(value instanceof stream_1.Stream) &&\n typeof value.hasOwnProperty === 'function') {\n for (const opt in value) {\n // eslint-disable-next-line no-prototype-builtins\n if (value.hasOwnProperty(opt)) {\n value[opt] = replaceProjectIdToken(value[opt], projectId);\n }\n }\n }\n if (typeof value === 'string' &&\n value.indexOf('{{projectId}}') > -1) {\n if (!projectId || projectId === '{{projectId}}') {\n throw new MissingProjectIdError();\n }\n value = value.replace(/{{projectId}}/g, projectId);\n }\n return value;\n}\nexports.replaceProjectIdToken = replaceProjectIdToken;\n/**\n * Custom error type for missing project ID errors.\n */\nclass MissingProjectIdError extends Error {\n constructor() {\n super(...arguments);\n this.message = `Sorry, we cannot connect to Cloud Services without a project\n ID. You may specify one with an environment variable named\n \"GOOGLE_CLOUD_PROJECT\".`.replace(/ +/g, ' ');\n }\n}\nexports.MissingProjectIdError = MissingProjectIdError;\n//# sourceMappingURL=index.js.map","\"use strict\";\n/* eslint-disable prefer-rest-params */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0;\n/**\n * Wraps a callback style function to conditionally return a promise.\n *\n * @param {function} originalMethod - The method to promisify.\n * @param {object=} options - Promise options.\n * @param {boolean} options.singular - Resolve the promise with single arg instead of an array.\n * @return {function} wrapped\n */\nfunction promisify(originalMethod, options) {\n if (originalMethod.promisified_) {\n return originalMethod;\n }\n options = options || {};\n const slice = Array.prototype.slice;\n // tslint:disable-next-line:no-any\n const wrapper = function () {\n let last;\n for (last = arguments.length - 1; last >= 0; last--) {\n const arg = arguments[last];\n if (typeof arg === 'undefined') {\n continue; // skip trailing undefined.\n }\n if (typeof arg !== 'function') {\n break; // non-callback last argument found.\n }\n return originalMethod.apply(this, arguments);\n }\n // peel trailing undefined.\n const args = slice.call(arguments, 0, last + 1);\n // tslint:disable-next-line:variable-name\n let PromiseCtor = Promise;\n // Because dedupe will likely create a single install of\n // @google-cloud/common to be shared amongst all modules, we need to\n // localize it at the Service level.\n if (this && this.Promise) {\n PromiseCtor = this.Promise;\n }\n return new PromiseCtor((resolve, reject) => {\n // tslint:disable-next-line:no-any\n args.push((...args) => {\n const callbackArgs = slice.call(args);\n const err = callbackArgs.shift();\n if (err) {\n return reject(err);\n }\n if (options.singular && callbackArgs.length === 1) {\n resolve(callbackArgs[0]);\n }\n else {\n resolve(callbackArgs);\n }\n });\n originalMethod.apply(this, args);\n });\n };\n wrapper.promisified_ = true;\n return wrapper;\n}\nexports.promisify = promisify;\n/**\n * Promisifies certain Class methods. This will not promisify private or\n * streaming methods.\n *\n * @param {module:common/service} Class - Service class.\n * @param {object=} options - Configuration object.\n */\n// tslint:disable-next-line:variable-name\nfunction promisifyAll(Class, options) {\n const exclude = (options && options.exclude) || [];\n const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype);\n const methods = ownPropertyNames.filter(methodName => {\n // clang-format off\n return (!exclude.includes(methodName) &&\n typeof Class.prototype[methodName] === 'function' && // is it a function?\n !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable?\n );\n // clang-format on\n });\n methods.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n if (!originalMethod.promisified_) {\n Class.prototype[methodName] = exports.promisify(originalMethod, options);\n }\n });\n}\nexports.promisifyAll = promisifyAll;\n/**\n * Wraps a promisy type function to conditionally call a callback function.\n *\n * @param {function} originalMethod - The method to callbackify.\n * @param {object=} options - Callback options.\n * @param {boolean} options.singular - Pass to the callback a single arg instead of an array.\n * @return {function} wrapped\n */\nfunction callbackify(originalMethod) {\n if (originalMethod.callbackified_) {\n return originalMethod;\n }\n // tslint:disable-next-line:no-any\n const wrapper = function () {\n if (typeof arguments[arguments.length - 1] !== 'function') {\n return originalMethod.apply(this, arguments);\n }\n const cb = Array.prototype.pop.call(arguments);\n originalMethod.apply(this, arguments).then(\n // tslint:disable-next-line:no-any\n (res) => {\n res = Array.isArray(res) ? res : [res];\n cb(null, ...res);\n }, (err) => cb(err));\n };\n wrapper.callbackified_ = true;\n return wrapper;\n}\nexports.callbackify = callbackify;\n/**\n * Callbackifies certain Class methods. This will not callbackify private or\n * streaming methods.\n *\n * @param {module:common/service} Class - Service class.\n * @param {object=} options - Configuration object.\n */\nfunction callbackifyAll(\n// tslint:disable-next-line:variable-name\nClass, options) {\n const exclude = (options && options.exclude) || [];\n const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype);\n const methods = ownPropertyNames.filter(methodName => {\n // clang-format off\n return (!exclude.includes(methodName) &&\n typeof Class.prototype[methodName] === 'function' && // is it a function?\n !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable?\n );\n // clang-format on\n });\n methods.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n if (!originalMethod.callbackified_) {\n Class.prototype[methodName] = exports.callbackify(originalMethod);\n }\n });\n}\nexports.callbackifyAll = callbackifyAll;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n createTokenAuth: () => createTokenAuth\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/auth.js\nvar REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nvar REGEX_IS_INSTALLATION = /^ghs_/;\nvar REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token,\n tokenType\n };\n}\n\n// pkg/dist-src/with-authorization-prefix.js\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n return `token ${token}`;\n}\n\n// pkg/dist-src/hook.js\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(\n route,\n parameters\n );\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\n// pkg/dist-src/index.js\nvar createTokenAuth = function createTokenAuth2(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n if (typeof token !== \"string\") {\n throw new Error(\n \"[@octokit/auth-token] Token passed to createTokenAuth is not a string\"\n );\n }\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n createTokenAuth\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n Octokit: () => Octokit\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_universal_user_agent = require(\"universal-user-agent\");\nvar import_before_after_hook = require(\"before-after-hook\");\nvar import_request = require(\"@octokit/request\");\nvar import_graphql = require(\"@octokit/graphql\");\nvar import_auth_token = require(\"@octokit/auth-token\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"5.1.0\";\n\n// pkg/dist-src/index.js\nvar noop = () => {\n};\nvar consoleWarn = console.warn.bind(console);\nvar consoleError = console.error.bind(console);\nvar userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;\nvar Octokit = class {\n static {\n this.VERSION = VERSION;\n }\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(\n Object.assign(\n {},\n defaults,\n options,\n options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null\n )\n );\n }\n };\n return OctokitWithDefaults;\n }\n static {\n this.plugins = [];\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n const currentPlugins = this.plugins;\n const NewOctokit = class extends this {\n static {\n this.plugins = currentPlugins.concat(\n newPlugins.filter((plugin) => !currentPlugins.includes(plugin))\n );\n }\n };\n return NewOctokit;\n }\n constructor(options = {}) {\n const hook = new import_before_after_hook.Collection();\n const requestDefaults = {\n baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n };\n requestDefaults.headers[\"user-agent\"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail;\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = import_request.request.defaults(requestDefaults);\n this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults);\n this.log = Object.assign(\n {\n debug: noop,\n info: noop,\n warn: consoleWarn,\n error: consoleError\n },\n options.log\n );\n this.hook = hook;\n if (!options.authStrategy) {\n if (!options.auth) {\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n const auth = (0, import_auth_token.createTokenAuth)(options.auth);\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(\n Object.assign(\n {\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n },\n options.auth\n )\n );\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n const classConstructor = this.constructor;\n for (let i = 0; i < classConstructor.plugins.length; ++i) {\n Object.assign(this, classConstructor.plugins[i](this, options));\n }\n }\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n Octokit\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n endpoint: () => endpoint\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/defaults.js\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"9.0.4\";\n\n// pkg/dist-src/defaults.js\nvar userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;\nvar DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\"\n }\n};\n\n// pkg/dist-src/util/lowercase-keys.js\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\n// pkg/dist-src/util/is-plain-object.js\nfunction isPlainObject(value) {\n if (typeof value !== \"object\" || value === null)\n return false;\n if (Object.prototype.toString.call(value) !== \"[object Object]\")\n return false;\n const proto = Object.getPrototypeOf(value);\n if (proto === null)\n return true;\n const Ctor = Object.prototype.hasOwnProperty.call(proto, \"constructor\") && proto.constructor;\n return typeof Ctor === \"function\" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);\n}\n\n// pkg/dist-src/util/merge-deep.js\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if (isPlainObject(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\n\n// pkg/dist-src/util/remove-undefined-properties.js\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === void 0) {\n delete obj[key];\n }\n }\n return obj;\n}\n\n// pkg/dist-src/merge.js\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n } else {\n options = Object.assign({}, route);\n }\n options.headers = lowercaseKeys(options.headers);\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n if (options.url === \"/graphql\") {\n if (defaults && defaults.mediaType.previews?.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(\n (preview) => !mergedOptions.mediaType.previews.includes(preview)\n ).concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, \"\"));\n }\n return mergedOptions;\n}\n\n// pkg/dist-src/util/add-query-parameters.js\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return url + separator + names.map((name) => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\n// pkg/dist-src/util/extract-url-variable-names.js\nvar urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\n// pkg/dist-src/util/omit.js\nfunction omit(object, keysToOmit) {\n const result = { __proto__: null };\n for (const key of Object.keys(object)) {\n if (keysToOmit.indexOf(key) === -1) {\n result[key] = object[key];\n }\n }\n return result;\n}\n\n// pkg/dist-src/util/url-template.js\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n }).join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== void 0 && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(\n encodeValue(operator, value, isKeyOperator(operator) ? key : \"\")\n );\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n result.push(\n encodeValue(operator, value2, isKeyOperator(operator) ? key : \"\")\n );\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n tmp.push(encodeValue(operator, value2));\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n template = template.replace(\n /\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g,\n function(_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function(variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n }\n );\n if (template === \"/\") {\n return template;\n } else {\n return template.replace(/\\/$/, \"\");\n }\n}\n\n// pkg/dist-src/parse.js\nfunction parse(options) {\n let method = options.method.toUpperCase();\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\"\n ]);\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n headers.accept = headers.accept.split(/,/).map(\n (format) => format.replace(\n /application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/,\n `application/vnd$1$2.${options.mediaType.format}`\n )\n ).join(\",\");\n }\n if (url.endsWith(\"/graphql\")) {\n if (options.mediaType.previews?.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n }\n }\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n }\n }\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n return Object.assign(\n { method, url, headers },\n typeof body !== \"undefined\" ? { body } : null,\n options.request ? { request: options.request } : null\n );\n}\n\n// pkg/dist-src/endpoint-with-defaults.js\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS2 = merge(oldDefaults, newDefaults);\n const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2);\n return Object.assign(endpoint2, {\n DEFAULTS: DEFAULTS2,\n defaults: withDefaults.bind(null, DEFAULTS2),\n merge: merge.bind(null, DEFAULTS2),\n parse\n });\n}\n\n// pkg/dist-src/index.js\nvar endpoint = withDefaults(null, DEFAULTS);\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n endpoint\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n GraphqlResponseError: () => GraphqlResponseError,\n graphql: () => graphql2,\n withCustomRequest: () => withCustomRequest\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_request3 = require(\"@octokit/request\");\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"7.0.2\";\n\n// pkg/dist-src/with-defaults.js\nvar import_request2 = require(\"@octokit/request\");\n\n// pkg/dist-src/graphql.js\nvar import_request = require(\"@octokit/request\");\n\n// pkg/dist-src/error.js\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\n` + data.errors.map((e) => ` - ${e.message}`).join(\"\\n\");\n}\nvar GraphqlResponseError = class extends Error {\n constructor(request2, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request2;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\";\n this.errors = response.errors;\n this.data = response.data;\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n};\n\n// pkg/dist-src/graphql.js\nvar NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\"\n];\nvar FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nvar GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request2, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(\n new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`)\n );\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(\n new Error(\n `[@octokit/graphql] \"${key}\" cannot be used as variable name`\n )\n );\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(\n parsedOptions\n ).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request2(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlResponseError(\n requestOptions,\n headers,\n response.data\n );\n }\n return response.data.data;\n });\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(request2, newDefaults) {\n const newRequest = request2.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: newRequest.endpoint\n });\n}\n\n// pkg/dist-src/index.js\nvar graphql2 = withDefaults(import_request3.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n GraphqlResponseError,\n graphql,\n withCustomRequest\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n composePaginateRest: () => composePaginateRest,\n isPaginatingEndpoint: () => isPaginatingEndpoint,\n paginateRest: () => paginateRest,\n paginatingEndpoints: () => paginatingEndpoints\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/version.js\nvar VERSION = \"9.1.5\";\n\n// pkg/dist-src/normalize-paginated-list-response.js\nfunction normalizePaginatedListResponse(response) {\n if (!response.data) {\n return {\n ...response,\n data: []\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n\n// pkg/dist-src/iterator.js\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n try {\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n url = ((normalizedResponse.headers.link || \"\").match(\n /<([^>]+)>;\\s*rel=\"next\"/\n ) || [])[1];\n return { value: normalizedResponse };\n } catch (error) {\n if (error.status !== 409)\n throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n })\n };\n}\n\n// pkg/dist-src/paginate.js\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = void 0;\n }\n return gather(\n octokit,\n [],\n iterator(octokit, route, parameters)[Symbol.asyncIterator](),\n mapFn\n );\n}\nfunction gather(octokit, results, iterator2, mapFn) {\n return iterator2.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(\n mapFn ? mapFn(result.value, done) : result.value.data\n );\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator2, mapFn);\n });\n}\n\n// pkg/dist-src/compose-paginate.js\nvar composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\n// pkg/dist-src/generated/paginating-endpoints.js\nvar paginatingEndpoints = [\n \"GET /advisories\",\n \"GET /app/hook/deliveries\",\n \"GET /app/installation-requests\",\n \"GET /app/installations\",\n \"GET /assignments/{assignment_id}/accepted_assignments\",\n \"GET /classrooms\",\n \"GET /classrooms/{classroom_id}/assignments\",\n \"GET /enterprises/{enterprise}/dependabot/alerts\",\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /licenses\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /orgs/{org}/actions/cache/usage-by-repository\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/actions/variables\",\n \"GET /orgs/{org}/actions/variables/{name}/repositories\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/code-scanning/alerts\",\n \"GET /orgs/{org}/codespaces\",\n \"GET /orgs/{org}/codespaces/secrets\",\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/copilot/billing/seats\",\n \"GET /orgs/{org}/dependabot/alerts\",\n \"GET /orgs/{org}/dependabot/secrets\",\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/members/{username}/codespaces\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/packages\",\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n \"GET /orgs/{org}/personal-access-token-requests\",\n \"GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories\",\n \"GET /orgs/{org}/personal-access-tokens\",\n \"GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/properties/values\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/rulesets\",\n \"GET /orgs/{org}/rulesets/rule-suites\",\n \"GET /orgs/{org}/secret-scanning/alerts\",\n \"GET /orgs/{org}/security-advisories\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/caches\",\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\",\n \"GET /repos/{owner}/{repo}/actions/organization-variables\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/variables\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/activity\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/codespaces\",\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\",\n \"GET /repos/{owner}/{repo}/codespaces/secrets\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/status\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/dependabot/alerts\",\n \"GET /repos/{owner}/{repo}/dependabot/secrets\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/environments\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n \"GET /repos/{owner}/{repo}/rules/branches/{branch}\",\n \"GET /repos/{owner}/{repo}/rulesets\",\n \"GET /repos/{owner}/{repo}/rulesets/rule-suites\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\",\n \"GET /repos/{owner}/{repo}/security-advisories\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repos/{owner}/{repo}/topics\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/codespaces\",\n \"GET /user/codespaces/secrets\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/packages\",\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/social_accounts\",\n \"GET /user/ssh_signing_keys\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/packages\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/social_accounts\",\n \"GET /users/{username}/ssh_signing_keys\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\"\n];\n\n// pkg/dist-src/paginating-endpoints.js\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n// pkg/dist-src/index.js\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n composePaginateRest,\n isPaginatingEndpoint,\n paginateRest,\n paginatingEndpoints\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n legacyRestEndpointMethods: () => legacyRestEndpointMethods,\n restEndpointMethods: () => restEndpointMethods\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/version.js\nvar VERSION = \"10.3.0\";\n\n// pkg/dist-src/generated/endpoints.js\nvar Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\n \"POST /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n addCustomLabelsToSelfHostedRunnerForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n approveWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"\n ],\n createEnvironmentVariable: [\n \"POST /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n createOrgVariable: [\"POST /orgs/{org}/actions/variables\"],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\"\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\"\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\"\n ],\n createRepoVariable: [\"POST /repos/{owner}/{repo}/actions/variables\"],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"\n ],\n deleteActionsCacheById: [\n \"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"\n ],\n deleteActionsCacheByKey: [\n \"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n deleteEnvironmentVariable: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteOrgVariable: [\"DELETE /orgs/{org}/actions/variables/{name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n deleteRepoVariable: [\n \"DELETE /repos/{owner}/{repo}/actions/variables/{name}\"\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\"\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"\n ],\n downloadWorkflowRunAttemptLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"\n ],\n forceCancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel\"\n ],\n generateRunnerJitconfigForOrg: [\n \"POST /orgs/{org}/actions/runners/generate-jitconfig\"\n ],\n generateRunnerJitconfigForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig\"\n ],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\n \"GET /orgs/{org}/actions/cache/usage-by-repository\"\n ],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\"\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getCustomOidcSubClaimForRepo: [\n \"GET /repos/{owner}/{repo}/actions/oidc/customization/sub\"\n ],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n getEnvironmentVariable: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/workflow\"\n ],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\"\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\"\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getOrgVariable: [\"GET /orgs/{org}/actions/variables/{name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] }\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getRepoVariable: [\"GET /repos/{owner}/{repo}/actions/variables/{name}\"],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"\n ],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"\n ],\n listEnvironmentVariables: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"\n ],\n listJobsForWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"\n ],\n listLabelsForSelfHostedRunnerForOrg: [\n \"GET /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n listLabelsForSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listOrgVariables: [\"GET /orgs/{org}/actions/variables\"],\n listRepoOrganizationSecrets: [\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\"\n ],\n listRepoOrganizationVariables: [\n \"GET /repos/{owner}/{repo}/actions/organization-variables\"\n ],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoVariables: [\"GET /repos/{owner}/{repo}/actions/variables\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\"\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n listSelectedReposForOrgVariable: [\n \"GET /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\"\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"\n ],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgVariable: [\n \"DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n reviewCustomGatesForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule\"\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\"\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n setCustomLabelsForSelfHostedRunnerForOrg: [\n \"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n setCustomLabelsForSelfHostedRunnerForRepo: [\n \"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n setCustomOidcSubClaimForRepo: [\n \"PUT /repos/{owner}/{repo}/actions/oidc/customization/sub\"\n ],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/workflow\"\n ],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\"\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\"\n ],\n setWorkflowAccessToRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n updateEnvironmentVariable: [\n \"PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n updateOrgVariable: [\"PATCH /orgs/{org}/actions/variables/{name}\"],\n updateRepoVariable: [\n \"PATCH /repos/{owner}/{repo}/actions/variables/{name}\"\n ]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\"\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\"\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\"\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\"\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\"\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\"\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"] }\n ],\n addRepoToInstallationForAuthenticatedUser: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\"\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\"\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\"\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\"\n ],\n listInstallationRequestsForAuthenticatedApp: [\n \"GET /app/installation-requests\"\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\"\n ],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\n \"POST /app/hook/deliveries/{delivery_id}/attempts\"\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"] }\n ],\n removeRepoFromInstallationForAuthenticatedUser: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\"\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\"\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\"\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\"\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\"\n ]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\n \"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"\n ],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } }\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"\n ],\n getCodeqlDatabase: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}\"\n ],\n getDefaultSetup: [\"GET /repos/{owner}/{repo}/code-scanning/default-setup\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n {},\n { renamed: [\"codeScanning\", \"listAlertInstances\"] }\n ],\n listCodeqlDatabases: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases\"\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"\n ],\n updateDefaultSetup: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/default-setup\"\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n checkPermissionsForDevcontainer: [\n \"GET /repos/{owner}/{repo}/codespaces/permissions_check\"\n ],\n codespaceMachinesForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/machines\"\n ],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}\"\n ],\n createWithPrForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"\n ],\n createWithRepoForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/codespaces\"\n ],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\n \"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/codespaces/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n deleteSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}\"\n ],\n exportForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/exports\"\n ],\n getCodespacesForUserInOrg: [\n \"GET /orgs/{org}/members/{username}/codespaces\"\n ],\n getExportDetailsForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/exports/{export_id}\"\n ],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/codespaces/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/codespaces/secrets/{secret_name}\"],\n getPublicKeyForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/public-key\"\n ],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n getSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}\"\n ],\n listDevcontainersInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\"\n ],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\n \"GET /orgs/{org}/codespaces\",\n {},\n { renamedParameters: { org_id: \"org\" } }\n ],\n listInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/codespaces/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n preFlightWithRepoForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/new\"\n ],\n publishForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/publish\"\n ],\n removeRepositoryForSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n repoMachinesForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/machines\"\n ],\n setRepositoriesForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\n \"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"\n ],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n copilot: {\n addCopilotForBusinessSeatsForTeams: [\n \"POST /orgs/{org}/copilot/billing/selected_teams\"\n ],\n addCopilotForBusinessSeatsForUsers: [\n \"POST /orgs/{org}/copilot/billing/selected_users\"\n ],\n cancelCopilotSeatAssignmentForTeams: [\n \"DELETE /orgs/{org}/copilot/billing/selected_teams\"\n ],\n cancelCopilotSeatAssignmentForUsers: [\n \"DELETE /orgs/{org}/copilot/billing/selected_users\"\n ],\n getCopilotOrganizationDetails: [\"GET /orgs/{org}/copilot/billing\"],\n getCopilotSeatDetailsForUser: [\n \"GET /orgs/{org}/members/{username}/copilot\"\n ],\n listCopilotSeats: [\"GET /orgs/{org}/copilot/billing/seats\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n getAlert: [\"GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/dependabot/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/dependabot/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/dependabot/alerts\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"\n ]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\n \"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"\n ],\n diffRange: [\n \"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"\n ],\n exportSbom: [\"GET /repos/{owner}/{repo}/dependency-graph/sbom\"]\n },\n emojis: { get: [\"GET /emojis\"] },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] }\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\"\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] }\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] }\n ]\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n checkUserCanBeAssignedToIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}\"\n ],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } }\n ]\n },\n meta: {\n get: [\"GET /meta\"],\n getAllVersions: [\"GET /versions\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\n \"DELETE /repos/{owner}/{repo}/import\",\n {},\n {\n deprecated: \"octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import\"\n }\n ],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\"\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\"\n ],\n getCommitAuthors: [\n \"GET /repos/{owner}/{repo}/import/authors\",\n {},\n {\n deprecated: \"octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors\"\n }\n ],\n getImportStatus: [\n \"GET /repos/{owner}/{repo}/import\",\n {},\n {\n deprecated: \"octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status\"\n }\n ],\n getLargeFiles: [\n \"GET /repos/{owner}/{repo}/import/large_files\",\n {},\n {\n deprecated: \"octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files\"\n }\n ],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/repositories\"\n ],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n {},\n { renamed: [\"migrations\", \"listReposForAuthenticatedUser\"] }\n ],\n mapCommitAuthor: [\n \"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\",\n {},\n {\n deprecated: \"octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author\"\n }\n ],\n setLfsPreference: [\n \"PATCH /repos/{owner}/{repo}/import/lfs\",\n {},\n {\n deprecated: \"octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference\"\n }\n ],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\n \"PUT /repos/{owner}/{repo}/import\",\n {},\n {\n deprecated: \"octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import\"\n }\n ],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n updateImport: [\n \"PATCH /repos/{owner}/{repo}/import\",\n {},\n {\n deprecated: \"octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import\"\n }\n ]\n },\n oidc: {\n getOidcCustomSubTemplateForOrg: [\n \"GET /orgs/{org}/actions/oidc/customization/sub\"\n ],\n updateOidcCustomSubTemplateForOrg: [\n \"PUT /orgs/{org}/actions/oidc/customization/sub\"\n ]\n },\n orgs: {\n addSecurityManagerTeam: [\n \"PUT /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\"\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createOrUpdateCustomProperties: [\"PATCH /orgs/{org}/properties/schema\"],\n createOrUpdateCustomPropertiesValuesForRepos: [\n \"PATCH /orgs/{org}/properties/values\"\n ],\n createOrUpdateCustomProperty: [\n \"PUT /orgs/{org}/properties/schema/{custom_property_name}\"\n ],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n delete: [\"DELETE /orgs/{org}\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n enableOrDisableSecurityProductOnAllOrgRepos: [\n \"POST /orgs/{org}/{security_product}/{enablement}\"\n ],\n get: [\"GET /orgs/{org}\"],\n getAllCustomProperties: [\"GET /orgs/{org}/properties/schema\"],\n getCustomProperty: [\n \"GET /orgs/{org}/properties/schema/{custom_property_name}\"\n ],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listCustomPropertiesValuesForRepos: [\"GET /orgs/{org}/properties/values\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPatGrantRepositories: [\n \"GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories\"\n ],\n listPatGrantRequestRepositories: [\n \"GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories\"\n ],\n listPatGrantRequests: [\"GET /orgs/{org}/personal-access-token-requests\"],\n listPatGrants: [\"GET /orgs/{org}/personal-access-tokens\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listSecurityManagerTeams: [\"GET /orgs/{org}/security-managers\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeCustomProperty: [\n \"DELETE /orgs/{org}/properties/schema/{custom_property_name}\"\n ],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\"\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\"\n ],\n removeSecurityManagerTeam: [\n \"DELETE /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n reviewPatGrantRequest: [\n \"POST /orgs/{org}/personal-access-token-requests/{pat_request_id}\"\n ],\n reviewPatGrantRequestsInBulk: [\n \"POST /orgs/{org}/personal-access-token-requests\"\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\"\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\"\n ],\n updatePatAccess: [\"POST /orgs/{org}/personal-access-tokens/{pat_id}\"],\n updatePatAccesses: [\"POST /orgs/{org}/personal-access-tokens\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\"\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n deletePackageForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}\"\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] }\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"\n ]\n }\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\"\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\"\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\"\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n listDockerMigrationConflictingPackagesForAuthenticatedUser: [\n \"GET /user/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForOrganization: [\n \"GET /orgs/{org}/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForUser: [\n \"GET /users/{username}/docker/conflicts\"\n ],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\"\n ],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\"\n ],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ]\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n createForRelease: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForRelease: [\n \"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n listForRelease: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ]\n },\n repos: {\n acceptInvitation: [\n \"PATCH /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"] }\n ],\n acceptInvitationForAuthenticatedUser: [\n \"PATCH /user/repository_invitations/{invitation_id}\"\n ],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n checkAutomatedSecurityFixes: [\n \"GET /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\n \"GET /repos/{owner}/{repo}/compare/{basehead}\"\n ],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentBranchPolicy: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n createDeploymentProtectionRule: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createOrgRuleset: [\"POST /orgs/{org}/rulesets\"],\n createPagesDeployment: [\"POST /repos/{owner}/{repo}/pages/deployment\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createRepoRuleset: [\"POST /repos/{owner}/{repo}/rulesets\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\"\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\n \"DELETE /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"] }\n ],\n declineInvitationForAuthenticatedUser: [\n \"DELETE /user/repository_invitations/{invitation_id}\"\n ],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"\n ],\n deleteDeploymentBranchPolicy: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n deleteOrgRuleset: [\"DELETE /orgs/{org}/rulesets/{ruleset_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n deleteRepoRuleset: [\"DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n deleteTagProtection: [\n \"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n disableDeploymentProtectionRule: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n disablePrivateVulnerabilityReporting: [\n \"DELETE /repos/{owner}/{repo}/private-vulnerability-reporting\"\n ],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] }\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n enablePrivateVulnerabilityReporting: [\n \"PUT /repos/{owner}/{repo}/private-vulnerability-reporting\"\n ],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n generateReleaseNotes: [\n \"POST /repos/{owner}/{repo}/releases/generate-notes\"\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n getAllDeploymentProtectionRules: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"\n ],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"\n ],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n getBranchRules: [\"GET /repos/{owner}/{repo}/rules/branches/{branch}\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getCustomDeploymentProtectionRule: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n getCustomPropertiesValues: [\"GET /repos/{owner}/{repo}/properties/values\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentBranchPolicy: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getOrgRuleSuite: [\"GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}\"],\n getOrgRuleSuites: [\"GET /orgs/{org}/rulesets/rule-suites\"],\n getOrgRuleset: [\"GET /orgs/{org}/rulesets/{ruleset_id}\"],\n getOrgRulesets: [\"GET /orgs/{org}/rulesets\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getRepoRuleSuite: [\n \"GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}\"\n ],\n getRepoRuleSuites: [\"GET /repos/{owner}/{repo}/rulesets/rule-suites\"],\n getRepoRuleset: [\"GET /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n getRepoRulesets: [\"GET /repos/{owner}/{repo}/rulesets\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n getWebhookDelivery: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n listActivities: [\"GET /repos/{owner}/{repo}/activity\"],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listCustomDeploymentRuleIntegrations: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\"\n ],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentBranchPolicies: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"\n ],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\"\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateDeploymentBranchPolicy: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n updateOrgRuleset: [\"PUT /orgs/{org}/rulesets/{ruleset_id}\"],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n updateRepoRuleset: [\"PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] }\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" }\n ]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ]\n },\n securityAdvisories: {\n createPrivateVulnerabilityReport: [\n \"POST /repos/{owner}/{repo}/security-advisories/reports\"\n ],\n createRepositoryAdvisory: [\n \"POST /repos/{owner}/{repo}/security-advisories\"\n ],\n createRepositoryAdvisoryCveRequest: [\n \"POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve\"\n ],\n getGlobalAdvisory: [\"GET /advisories/{ghsa_id}\"],\n getRepositoryAdvisory: [\n \"GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ],\n listGlobalAdvisories: [\"GET /advisories\"],\n listOrgRepositoryAdvisories: [\"GET /orgs/{org}/security-advisories\"],\n listRepositoryAdvisories: [\"GET /repos/{owner}/{repo}/security-advisories\"],\n updateRepositoryAdvisory: [\n \"PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\"\n ],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\n \"POST /user/emails\",\n {},\n { renamed: [\"users\", \"addEmailForAuthenticatedUser\"] }\n ],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n addSocialAccountForAuthenticatedUser: [\"POST /user/social_accounts\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\n \"POST /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"] }\n ],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\n \"POST /user/keys\",\n {},\n { renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"] }\n ],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n createSshSigningKeyForAuthenticatedUser: [\"POST /user/ssh_signing_keys\"],\n deleteEmailForAuthenticated: [\n \"DELETE /user/emails\",\n {},\n { renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"] }\n ],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\n \"DELETE /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"] }\n ],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\n \"DELETE /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"] }\n ],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n deleteSocialAccountForAuthenticatedUser: [\"DELETE /user/social_accounts\"],\n deleteSshSigningKeyForAuthenticatedUser: [\n \"DELETE /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\n \"GET /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"] }\n ],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\n \"GET /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"] }\n ],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n getSshSigningKeyForAuthenticatedUser: [\n \"GET /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\n \"GET /user/blocks\",\n {},\n { renamed: [\"users\", \"listBlockedByAuthenticatedUser\"] }\n ],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\n \"GET /user/emails\",\n {},\n { renamed: [\"users\", \"listEmailsForAuthenticatedUser\"] }\n ],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\n \"GET /user/following\",\n {},\n { renamed: [\"users\", \"listFollowedByAuthenticatedUser\"] }\n ],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\n \"GET /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"] }\n ],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\n \"GET /user/public_emails\",\n {},\n { renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"] }\n ],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\n \"GET /user/keys\",\n {},\n { renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"] }\n ],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n listSocialAccountsForAuthenticatedUser: [\"GET /user/social_accounts\"],\n listSocialAccountsForUser: [\"GET /users/{username}/social_accounts\"],\n listSshSigningKeysForAuthenticatedUser: [\"GET /user/ssh_signing_keys\"],\n listSshSigningKeysForUser: [\"GET /users/{username}/ssh_signing_keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\n \"PATCH /user/email/visibility\",\n {},\n { renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"] }\n ],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\n \"PATCH /user/email/visibility\"\n ],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\nvar endpoints_default = Endpoints;\n\n// pkg/dist-src/endpoints-to-methods.js\nvar endpointMethodsMap = /* @__PURE__ */ new Map();\nfor (const [scope, endpoints] of Object.entries(endpoints_default)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign(\n {\n method,\n url\n },\n defaults\n );\n if (!endpointMethodsMap.has(scope)) {\n endpointMethodsMap.set(scope, /* @__PURE__ */ new Map());\n }\n endpointMethodsMap.get(scope).set(methodName, {\n scope,\n methodName,\n endpointDefaults,\n decorations\n });\n }\n}\nvar handler = {\n has({ scope }, methodName) {\n return endpointMethodsMap.get(scope).has(methodName);\n },\n getOwnPropertyDescriptor(target, methodName) {\n return {\n value: this.get(target, methodName),\n // ensures method is in the cache\n configurable: true,\n writable: true,\n enumerable: true\n };\n },\n defineProperty(target, methodName, descriptor) {\n Object.defineProperty(target.cache, methodName, descriptor);\n return true;\n },\n deleteProperty(target, methodName) {\n delete target.cache[methodName];\n return true;\n },\n ownKeys({ scope }) {\n return [...endpointMethodsMap.get(scope).keys()];\n },\n set(target, methodName, value) {\n return target.cache[methodName] = value;\n },\n get({ octokit, scope, cache }, methodName) {\n if (cache[methodName]) {\n return cache[methodName];\n }\n const method = endpointMethodsMap.get(scope).get(methodName);\n if (!method) {\n return void 0;\n }\n const { endpointDefaults, decorations } = method;\n if (decorations) {\n cache[methodName] = decorate(\n octokit,\n scope,\n methodName,\n endpointDefaults,\n decorations\n );\n } else {\n cache[methodName] = octokit.request.defaults(endpointDefaults);\n }\n return cache[methodName];\n }\n};\nfunction endpointsToMethods(octokit) {\n const newMethods = {};\n for (const scope of endpointMethodsMap.keys()) {\n newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler);\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n function withDecorations(...args) {\n let options = requestWithDefaults.endpoint.merge(...args);\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: void 0\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(\n `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`\n );\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n const options2 = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(\n decorations.renamedParameters\n )) {\n if (name in options2) {\n octokit.log.warn(\n `\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`\n );\n if (!(alias in options2)) {\n options2[alias] = options2[name];\n }\n delete options2[name];\n }\n }\n return requestWithDefaults(options2);\n }\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\n// pkg/dist-src/index.js\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n ...api,\n rest: api\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n legacyRestEndpointMethods,\n restEndpointMethods\n});\n","\"use strict\";\nvar __create = Object.create;\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __getProtoOf = Object.getPrototypeOf;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(\n // If the importer is in node compatibility mode or this is not an ESM\n // file that has been converted to a CommonJS file using a Babel-\n // compatible transform (i.e. \"__esModule\" has not been set), then set\n // \"default\" to the CommonJS \"module.exports\" for node compatibility.\n isNodeMode || !mod || !mod.__esModule ? __defProp(target, \"default\", { value: mod, enumerable: true }) : target,\n mod\n));\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n RequestError: () => RequestError\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_deprecation = require(\"deprecation\");\nvar import_once = __toESM(require(\"once\"));\nvar logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));\nvar logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));\nvar RequestError = class extends Error {\n constructor(message, statusCode, options) {\n super(message);\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(\n / .*$/,\n \" [REDACTED]\"\n )\n });\n }\n requestCopy.url = requestCopy.url.replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\").replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(\n new import_deprecation.Deprecation(\n \"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"\n )\n );\n return statusCode;\n }\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(\n new import_deprecation.Deprecation(\n \"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"\n )\n );\n return headers || {};\n }\n });\n }\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n RequestError\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n request: () => request\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_endpoint = require(\"@octokit/endpoint\");\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"8.2.0\";\n\n// pkg/dist-src/is-plain-object.js\nfunction isPlainObject(value) {\n if (typeof value !== \"object\" || value === null)\n return false;\n if (Object.prototype.toString.call(value) !== \"[object Object]\")\n return false;\n const proto = Object.getPrototypeOf(value);\n if (proto === null)\n return true;\n const Ctor = Object.prototype.hasOwnProperty.call(proto, \"constructor\") && proto.constructor;\n return typeof Ctor === \"function\" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);\n}\n\n// pkg/dist-src/fetch-wrapper.js\nvar import_request_error = require(\"@octokit/request-error\");\n\n// pkg/dist-src/get-buffer-response.js\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\n// pkg/dist-src/fetch-wrapper.js\nfunction fetchWrapper(requestOptions) {\n var _a, _b, _c;\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false;\n if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n let { fetch } = globalThis;\n if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) {\n fetch = requestOptions.request.fetch;\n }\n if (!fetch) {\n throw new Error(\n \"fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing\"\n );\n }\n return fetch(requestOptions.url, {\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n signal: (_c = requestOptions.request) == null ? void 0 : _c.signal,\n // duplex must be set if request.body is ReadableStream or Async Iterables.\n // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.\n ...requestOptions.body && { duplex: \"half\" }\n }).then(async (response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(\n `[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`\n );\n }\n if (status === 204 || status === 205) {\n return;\n }\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new import_request_error.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: void 0\n },\n request: requestOptions\n });\n }\n if (status === 304) {\n throw new import_request_error.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new import_request_error.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n return parseSuccessResponseBody ? await getResponseData(response) : response.body;\n }).then((data) => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch((error) => {\n if (error instanceof import_request_error.RequestError)\n throw error;\n else if (error.name === \"AbortError\")\n throw error;\n let message = error.message;\n if (error.name === \"TypeError\" && \"cause\" in error) {\n if (error.cause instanceof Error) {\n message = error.cause.message;\n } else if (typeof error.cause === \"string\") {\n message = error.cause;\n }\n }\n throw new import_request_error.RequestError(message, 500, {\n request: requestOptions\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json().catch(() => response.text()).catch(() => \"\");\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBufferResponse(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\")\n return data;\n let suffix;\n if (\"documentation_url\" in data) {\n suffix = ` - ${data.documentation_url}`;\n } else {\n suffix = \"\";\n }\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}${suffix}`;\n }\n return `${data.message}${suffix}`;\n }\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint2 = oldEndpoint.defaults(newDefaults);\n const newApi = function(route, parameters) {\n const endpointOptions = endpoint2.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint2.parse(endpointOptions));\n }\n const request2 = (route2, parameters2) => {\n return fetchWrapper(\n endpoint2.parse(endpoint2.merge(route2, parameters2))\n );\n };\n Object.assign(request2, {\n endpoint: endpoint2,\n defaults: withDefaults.bind(null, endpoint2)\n });\n return endpointOptions.request.hook(request2, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint: endpoint2,\n defaults: withDefaults.bind(null, endpoint2)\n });\n}\n\n// pkg/dist-src/index.js\nvar request = withDefaults(import_endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`\n }\n});\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n request\n});\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction once(emitter, name, { signal } = {}) {\n return new Promise((resolve, reject) => {\n function cleanup() {\n signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup);\n emitter.removeListener(name, onEvent);\n emitter.removeListener('error', onError);\n }\n function onEvent(...args) {\n cleanup();\n resolve(args);\n }\n function onError(err) {\n cleanup();\n reject(err);\n }\n signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup);\n emitter.on(name, onEvent);\n emitter.on('error', onError);\n });\n}\nexports.default = once;\n//# sourceMappingURL=index.js.map","/**\n * @author Toru Nagashima \n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar eventTargetShim = require('event-target-shim');\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nclass AbortSignal extends eventTargetShim.EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n constructor() {\n super();\n throw new TypeError(\"AbortSignal cannot be constructed directly\");\n }\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n get aborted() {\n const aborted = abortedFlags.get(this);\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? \"null\" : typeof this}`);\n }\n return aborted;\n }\n}\neventTargetShim.defineEventAttribute(AbortSignal.prototype, \"abort\");\n/**\n * Create an AbortSignal object.\n */\nfunction createAbortSignal() {\n const signal = Object.create(AbortSignal.prototype);\n eventTargetShim.EventTarget.call(signal);\n abortedFlags.set(signal, false);\n return signal;\n}\n/**\n * Abort a given signal.\n */\nfunction abortSignal(signal) {\n if (abortedFlags.get(signal) !== false) {\n return;\n }\n abortedFlags.set(signal, true);\n signal.dispatchEvent({ type: \"abort\" });\n}\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap();\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n});\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n });\n}\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nclass AbortController {\n /**\n * Initialize this controller.\n */\n constructor() {\n signals.set(this, createAbortSignal());\n }\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n get signal() {\n return getSignal(this);\n }\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n abort() {\n abortSignal(getSignal(this));\n }\n}\n/**\n * Associated signals.\n */\nconst signals = new WeakMap();\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller) {\n const signal = signals.get(controller);\n if (signal == null) {\n throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? \"null\" : typeof controller}`);\n }\n return signal;\n}\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n});\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n });\n}\n\nexports.AbortController = AbortController;\nexports.AbortSignal = AbortSignal;\nexports.default = AbortController;\n\nmodule.exports = AbortController\nmodule.exports.AbortController = module.exports[\"default\"] = AbortController\nmodule.exports.AbortSignal = AbortSignal\n//# sourceMappingURL=abort-controller.js.map\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst events_1 = require(\"events\");\nconst debug_1 = __importDefault(require(\"debug\"));\nconst promisify_1 = __importDefault(require(\"./promisify\"));\nconst debug = debug_1.default('agent-base');\nfunction isAgent(v) {\n return Boolean(v) && typeof v.addRequest === 'function';\n}\nfunction isSecureEndpoint() {\n const { stack } = new Error();\n if (typeof stack !== 'string')\n return false;\n return stack.split('\\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1);\n}\nfunction createAgent(callback, opts) {\n return new createAgent.Agent(callback, opts);\n}\n(function (createAgent) {\n /**\n * Base `http.Agent` implementation.\n * No pooling/keep-alive is implemented by default.\n *\n * @param {Function} callback\n * @api public\n */\n class Agent extends events_1.EventEmitter {\n constructor(callback, _opts) {\n super();\n let opts = _opts;\n if (typeof callback === 'function') {\n this.callback = callback;\n }\n else if (callback) {\n opts = callback;\n }\n // Timeout for the socket to be returned from the callback\n this.timeout = null;\n if (opts && typeof opts.timeout === 'number') {\n this.timeout = opts.timeout;\n }\n // These aren't actually used by `agent-base`, but are required\n // for the TypeScript definition files in `@types/node` :/\n this.maxFreeSockets = 1;\n this.maxSockets = 1;\n this.maxTotalSockets = Infinity;\n this.sockets = {};\n this.freeSockets = {};\n this.requests = {};\n this.options = {};\n }\n get defaultPort() {\n if (typeof this.explicitDefaultPort === 'number') {\n return this.explicitDefaultPort;\n }\n return isSecureEndpoint() ? 443 : 80;\n }\n set defaultPort(v) {\n this.explicitDefaultPort = v;\n }\n get protocol() {\n if (typeof this.explicitProtocol === 'string') {\n return this.explicitProtocol;\n }\n return isSecureEndpoint() ? 'https:' : 'http:';\n }\n set protocol(v) {\n this.explicitProtocol = v;\n }\n callback(req, opts, fn) {\n throw new Error('\"agent-base\" has no default implementation, you must subclass and override `callback()`');\n }\n /**\n * Called by node-core's \"_http_client.js\" module when creating\n * a new HTTP request with this Agent instance.\n *\n * @api public\n */\n addRequest(req, _opts) {\n const opts = Object.assign({}, _opts);\n if (typeof opts.secureEndpoint !== 'boolean') {\n opts.secureEndpoint = isSecureEndpoint();\n }\n if (opts.host == null) {\n opts.host = 'localhost';\n }\n if (opts.port == null) {\n opts.port = opts.secureEndpoint ? 443 : 80;\n }\n if (opts.protocol == null) {\n opts.protocol = opts.secureEndpoint ? 'https:' : 'http:';\n }\n if (opts.host && opts.path) {\n // If both a `host` and `path` are specified then it's most\n // likely the result of a `url.parse()` call... we need to\n // remove the `path` portion so that `net.connect()` doesn't\n // attempt to open that as a unix socket file.\n delete opts.path;\n }\n delete opts.agent;\n delete opts.hostname;\n delete opts._defaultAgent;\n delete opts.defaultPort;\n delete opts.createConnection;\n // Hint to use \"Connection: close\"\n // XXX: non-documented `http` module API :(\n req._last = true;\n req.shouldKeepAlive = false;\n let timedOut = false;\n let timeoutId = null;\n const timeoutMs = opts.timeout || this.timeout;\n const onerror = (err) => {\n if (req._hadError)\n return;\n req.emit('error', err);\n // For Safety. Some additional errors might fire later on\n // and we need to make sure we don't double-fire the error event.\n req._hadError = true;\n };\n const ontimeout = () => {\n timeoutId = null;\n timedOut = true;\n const err = new Error(`A \"socket\" was not created for HTTP request before ${timeoutMs}ms`);\n err.code = 'ETIMEOUT';\n onerror(err);\n };\n const callbackError = (err) => {\n if (timedOut)\n return;\n if (timeoutId !== null) {\n clearTimeout(timeoutId);\n timeoutId = null;\n }\n onerror(err);\n };\n const onsocket = (socket) => {\n if (timedOut)\n return;\n if (timeoutId != null) {\n clearTimeout(timeoutId);\n timeoutId = null;\n }\n if (isAgent(socket)) {\n // `socket` is actually an `http.Agent` instance, so\n // relinquish responsibility for this `req` to the Agent\n // from here on\n debug('Callback returned another Agent instance %o', socket.constructor.name);\n socket.addRequest(req, opts);\n return;\n }\n if (socket) {\n socket.once('free', () => {\n this.freeSocket(socket, opts);\n });\n req.onSocket(socket);\n return;\n }\n const err = new Error(`no Duplex stream was returned to agent-base for \\`${req.method} ${req.path}\\``);\n onerror(err);\n };\n if (typeof this.callback !== 'function') {\n onerror(new Error('`callback` is not defined'));\n return;\n }\n if (!this.promisifiedCallback) {\n if (this.callback.length >= 3) {\n debug('Converting legacy callback function to promise');\n this.promisifiedCallback = promisify_1.default(this.callback);\n }\n else {\n this.promisifiedCallback = this.callback;\n }\n }\n if (typeof timeoutMs === 'number' && timeoutMs > 0) {\n timeoutId = setTimeout(ontimeout, timeoutMs);\n }\n if ('port' in opts && typeof opts.port !== 'number') {\n opts.port = Number(opts.port);\n }\n try {\n debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`);\n Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError);\n }\n catch (err) {\n Promise.reject(err).catch(callbackError);\n }\n }\n freeSocket(socket, opts) {\n debug('Freeing socket %o %o', socket.constructor.name, opts);\n socket.destroy();\n }\n destroy() {\n debug('Destroying agent %o', this.constructor.name);\n }\n }\n createAgent.Agent = Agent;\n // So that `instanceof` works correctly\n createAgent.prototype = createAgent.Agent.prototype;\n})(createAgent || (createAgent = {}));\nmodule.exports = createAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction promisify(fn) {\n return function (req, opts) {\n return new Promise((resolve, reject) => {\n fn.call(this, req, opts, (err, rtn) => {\n if (err) {\n reject(err);\n }\n else {\n resolve(rtn);\n }\n });\n });\n };\n}\nexports.default = promisify;\n//# sourceMappingURL=promisify.js.map","'use strict';\n\nconst arrify = value => {\n\tif (value === null || value === undefined) {\n\t\treturn [];\n\t}\n\n\tif (Array.isArray(value)) {\n\t\treturn value;\n\t}\n\n\tif (typeof value === 'string') {\n\t\treturn [value];\n\t}\n\n\tif (typeof value[Symbol.iterator] === 'function') {\n\t\treturn [...value];\n\t}\n\n\treturn [value];\n};\n\nmodule.exports = arrify;\n","// Packages\nvar retrier = require('retry');\n\nfunction retry(fn, opts) {\n function run(resolve, reject) {\n var options = opts || {};\n var op;\n\n // Default `randomize` to true\n if (!('randomize' in options)) {\n options.randomize = true;\n }\n\n op = retrier.operation(options);\n\n // We allow the user to abort retrying\n // this makes sense in the cases where\n // knowledge is obtained that retrying\n // would be futile (e.g.: auth errors)\n\n function bail(err) {\n reject(err || new Error('Aborted'));\n }\n\n function onError(err, num) {\n if (err.bail) {\n bail(err);\n return;\n }\n\n if (!op.retry(err)) {\n reject(op.mainError());\n } else if (options.onRetry) {\n options.onRetry(err, num);\n }\n }\n\n function runAttempt(num) {\n var val;\n\n try {\n val = fn(bail, num);\n } catch (err) {\n onError(err, num);\n return;\n }\n\n Promise.resolve(val)\n .then(resolve)\n .catch(function catchIt(err) {\n onError(err, num);\n });\n }\n\n op.attempt(runAttempt);\n }\n\n return new Promise(run);\n}\n\nmodule.exports = retry;\n","'use strict'\n\nexports.byteLength = byteLength\nexports.toByteArray = toByteArray\nexports.fromByteArray = fromByteArray\n\nvar lookup = []\nvar revLookup = []\nvar Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array\n\nvar code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'\nfor (var i = 0, len = code.length; i < len; ++i) {\n lookup[i] = code[i]\n revLookup[code.charCodeAt(i)] = i\n}\n\n// Support decoding URL-safe base64 strings, as Node.js does.\n// See: https://en.wikipedia.org/wiki/Base64#URL_applications\nrevLookup['-'.charCodeAt(0)] = 62\nrevLookup['_'.charCodeAt(0)] = 63\n\nfunction getLens (b64) {\n var len = b64.length\n\n if (len % 4 > 0) {\n throw new Error('Invalid string. Length must be a multiple of 4')\n }\n\n // Trim off extra bytes after placeholder bytes are found\n // See: https://github.com/beatgammit/base64-js/issues/42\n var validLen = b64.indexOf('=')\n if (validLen === -1) validLen = len\n\n var placeHoldersLen = validLen === len\n ? 0\n : 4 - (validLen % 4)\n\n return [validLen, placeHoldersLen]\n}\n\n// base64 is 4/3 + up to two characters of the original data\nfunction byteLength (b64) {\n var lens = getLens(b64)\n var validLen = lens[0]\n var placeHoldersLen = lens[1]\n return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen\n}\n\nfunction _byteLength (b64, validLen, placeHoldersLen) {\n return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen\n}\n\nfunction toByteArray (b64) {\n var tmp\n var lens = getLens(b64)\n var validLen = lens[0]\n var placeHoldersLen = lens[1]\n\n var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen))\n\n var curByte = 0\n\n // if there are placeholders, only get up to the last complete 4 chars\n var len = placeHoldersLen > 0\n ? validLen - 4\n : validLen\n\n var i\n for (i = 0; i < len; i += 4) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 18) |\n (revLookup[b64.charCodeAt(i + 1)] << 12) |\n (revLookup[b64.charCodeAt(i + 2)] << 6) |\n revLookup[b64.charCodeAt(i + 3)]\n arr[curByte++] = (tmp >> 16) & 0xFF\n arr[curByte++] = (tmp >> 8) & 0xFF\n arr[curByte++] = tmp & 0xFF\n }\n\n if (placeHoldersLen === 2) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 2) |\n (revLookup[b64.charCodeAt(i + 1)] >> 4)\n arr[curByte++] = tmp & 0xFF\n }\n\n if (placeHoldersLen === 1) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 10) |\n (revLookup[b64.charCodeAt(i + 1)] << 4) |\n (revLookup[b64.charCodeAt(i + 2)] >> 2)\n arr[curByte++] = (tmp >> 8) & 0xFF\n arr[curByte++] = tmp & 0xFF\n }\n\n return arr\n}\n\nfunction tripletToBase64 (num) {\n return lookup[num >> 18 & 0x3F] +\n lookup[num >> 12 & 0x3F] +\n lookup[num >> 6 & 0x3F] +\n lookup[num & 0x3F]\n}\n\nfunction encodeChunk (uint8, start, end) {\n var tmp\n var output = []\n for (var i = start; i < end; i += 3) {\n tmp =\n ((uint8[i] << 16) & 0xFF0000) +\n ((uint8[i + 1] << 8) & 0xFF00) +\n (uint8[i + 2] & 0xFF)\n output.push(tripletToBase64(tmp))\n }\n return output.join('')\n}\n\nfunction fromByteArray (uint8) {\n var tmp\n var len = uint8.length\n var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes\n var parts = []\n var maxChunkLength = 16383 // must be multiple of 3\n\n // go through the array every three bytes, we'll deal with trailing stuff later\n for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {\n parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))\n }\n\n // pad the end with zeros, but make sure to not forget the extra bytes\n if (extraBytes === 1) {\n tmp = uint8[len - 1]\n parts.push(\n lookup[tmp >> 2] +\n lookup[(tmp << 4) & 0x3F] +\n '=='\n )\n } else if (extraBytes === 2) {\n tmp = (uint8[len - 2] << 8) + uint8[len - 1]\n parts.push(\n lookup[tmp >> 10] +\n lookup[(tmp >> 4) & 0x3F] +\n lookup[(tmp << 2) & 0x3F] +\n '='\n )\n }\n\n return parts.join('')\n}\n","var register = require(\"./lib/register\");\nvar addHook = require(\"./lib/add\");\nvar removeHook = require(\"./lib/remove\");\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind;\nvar bindable = bind.bind(bind);\n\nfunction bindApi(hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(\n null,\n name ? [state, name] : [state]\n );\n hook.api = { remove: removeHookRef };\n hook.remove = removeHookRef;\n [\"before\", \"error\", \"after\", \"wrap\"].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind];\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);\n });\n}\n\nfunction HookSingular() {\n var singularHookName = \"h\";\n var singularHookState = {\n registry: {},\n };\n var singularHook = register.bind(null, singularHookState, singularHookName);\n bindApi(singularHook, singularHookState, singularHookName);\n return singularHook;\n}\n\nfunction HookCollection() {\n var state = {\n registry: {},\n };\n\n var hook = register.bind(null, state);\n bindApi(hook, state);\n\n return hook;\n}\n\nvar collectionHookDeprecationMessageDisplayed = false;\nfunction Hook() {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn(\n '[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4'\n );\n collectionHookDeprecationMessageDisplayed = true;\n }\n return HookCollection();\n}\n\nHook.Singular = HookSingular.bind();\nHook.Collection = HookCollection.bind();\n\nmodule.exports = Hook;\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook;\nmodule.exports.Singular = Hook.Singular;\nmodule.exports.Collection = Hook.Collection;\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n",";(function (globalObject) {\r\n 'use strict';\r\n\r\n/*\r\n * bignumber.js v9.1.2\r\n * A JavaScript library for arbitrary-precision arithmetic.\r\n * https://github.com/MikeMcl/bignumber.js\r\n * Copyright (c) 2022 Michael Mclaughlin \r\n * MIT Licensed.\r\n *\r\n * BigNumber.prototype methods | BigNumber methods\r\n * |\r\n * absoluteValue abs | clone\r\n * comparedTo | config set\r\n * decimalPlaces dp | DECIMAL_PLACES\r\n * dividedBy div | ROUNDING_MODE\r\n * dividedToIntegerBy idiv | EXPONENTIAL_AT\r\n * exponentiatedBy pow | RANGE\r\n * integerValue | CRYPTO\r\n * isEqualTo eq | MODULO_MODE\r\n * isFinite | POW_PRECISION\r\n * isGreaterThan gt | FORMAT\r\n * isGreaterThanOrEqualTo gte | ALPHABET\r\n * isInteger | isBigNumber\r\n * isLessThan lt | maximum max\r\n * isLessThanOrEqualTo lte | minimum min\r\n * isNaN | random\r\n * isNegative | sum\r\n * isPositive |\r\n * isZero |\r\n * minus |\r\n * modulo mod |\r\n * multipliedBy times |\r\n * negated |\r\n * plus |\r\n * precision sd |\r\n * shiftedBy |\r\n * squareRoot sqrt |\r\n * toExponential |\r\n * toFixed |\r\n * toFormat |\r\n * toFraction |\r\n * toJSON |\r\n * toNumber |\r\n * toPrecision |\r\n * toString |\r\n * valueOf |\r\n *\r\n */\r\n\r\n\r\n var BigNumber,\r\n isNumeric = /^-?(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:e[+-]?\\d+)?$/i,\r\n mathceil = Math.ceil,\r\n mathfloor = Math.floor,\r\n\r\n bignumberError = '[BigNumber Error] ',\r\n tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ',\r\n\r\n BASE = 1e14,\r\n LOG_BASE = 14,\r\n MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1\r\n // MAX_INT32 = 0x7fffffff, // 2^31 - 1\r\n POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13],\r\n SQRT_BASE = 1e7,\r\n\r\n // EDITABLE\r\n // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and\r\n // the arguments to toExponential, toFixed, toFormat, and toPrecision.\r\n MAX = 1E9; // 0 to MAX_INT32\r\n\r\n\r\n /*\r\n * Create and return a BigNumber constructor.\r\n */\r\n function clone(configObject) {\r\n var div, convertBase, parseNumeric,\r\n P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null },\r\n ONE = new BigNumber(1),\r\n\r\n\r\n //----------------------------- EDITABLE CONFIG DEFAULTS -------------------------------\r\n\r\n\r\n // The default values below must be integers within the inclusive ranges stated.\r\n // The values can also be changed at run-time using BigNumber.set.\r\n\r\n // The maximum number of decimal places for operations involving division.\r\n DECIMAL_PLACES = 20, // 0 to MAX\r\n\r\n // The rounding mode used when rounding to the above decimal places, and when using\r\n // toExponential, toFixed, toFormat and toPrecision, and round (default value).\r\n // UP 0 Away from zero.\r\n // DOWN 1 Towards zero.\r\n // CEIL 2 Towards +Infinity.\r\n // FLOOR 3 Towards -Infinity.\r\n // HALF_UP 4 Towards nearest neighbour. If equidistant, up.\r\n // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down.\r\n // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour.\r\n // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity.\r\n // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity.\r\n ROUNDING_MODE = 4, // 0 to 8\r\n\r\n // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS]\r\n\r\n // The exponent value at and beneath which toString returns exponential notation.\r\n // Number type: -7\r\n TO_EXP_NEG = -7, // 0 to -MAX\r\n\r\n // The exponent value at and above which toString returns exponential notation.\r\n // Number type: 21\r\n TO_EXP_POS = 21, // 0 to MAX\r\n\r\n // RANGE : [MIN_EXP, MAX_EXP]\r\n\r\n // The minimum exponent value, beneath which underflow to zero occurs.\r\n // Number type: -324 (5e-324)\r\n MIN_EXP = -1e7, // -1 to -MAX\r\n\r\n // The maximum exponent value, above which overflow to Infinity occurs.\r\n // Number type: 308 (1.7976931348623157e+308)\r\n // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow.\r\n MAX_EXP = 1e7, // 1 to MAX\r\n\r\n // Whether to use cryptographically-secure random number generation, if available.\r\n CRYPTO = false, // true or false\r\n\r\n // The modulo mode used when calculating the modulus: a mod n.\r\n // The quotient (q = a / n) is calculated according to the corresponding rounding mode.\r\n // The remainder (r) is calculated as: r = a - n * q.\r\n //\r\n // UP 0 The remainder is positive if the dividend is negative, else is negative.\r\n // DOWN 1 The remainder has the same sign as the dividend.\r\n // This modulo mode is commonly known as 'truncated division' and is\r\n // equivalent to (a % n) in JavaScript.\r\n // FLOOR 3 The remainder has the same sign as the divisor (Python %).\r\n // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function.\r\n // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)).\r\n // The remainder is always positive.\r\n //\r\n // The truncated division, floored division, Euclidian division and IEEE 754 remainder\r\n // modes are commonly used for the modulus operation.\r\n // Although the other rounding modes can also be used, they may not give useful results.\r\n MODULO_MODE = 1, // 0 to 9\r\n\r\n // The maximum number of significant digits of the result of the exponentiatedBy operation.\r\n // If POW_PRECISION is 0, there will be unlimited significant digits.\r\n POW_PRECISION = 0, // 0 to MAX\r\n\r\n // The format specification used by the BigNumber.prototype.toFormat method.\r\n FORMAT = {\r\n prefix: '',\r\n groupSize: 3,\r\n secondaryGroupSize: 0,\r\n groupSeparator: ',',\r\n decimalSeparator: '.',\r\n fractionGroupSize: 0,\r\n fractionGroupSeparator: '\\xA0', // non-breaking space\r\n suffix: ''\r\n },\r\n\r\n // The alphabet used for base conversion. It must be at least 2 characters long, with no '+',\r\n // '-', '.', whitespace, or repeated character.\r\n // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_'\r\n ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz',\r\n alphabetHasNormalDecimalDigits = true;\r\n\r\n\r\n //------------------------------------------------------------------------------------------\r\n\r\n\r\n // CONSTRUCTOR\r\n\r\n\r\n /*\r\n * The BigNumber constructor and exported function.\r\n * Create and return a new instance of a BigNumber object.\r\n *\r\n * v {number|string|BigNumber} A numeric value.\r\n * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive.\r\n */\r\n function BigNumber(v, b) {\r\n var alphabet, c, caseChanged, e, i, isNum, len, str,\r\n x = this;\r\n\r\n // Enable constructor call without `new`.\r\n if (!(x instanceof BigNumber)) return new BigNumber(v, b);\r\n\r\n if (b == null) {\r\n\r\n if (v && v._isBigNumber === true) {\r\n x.s = v.s;\r\n\r\n if (!v.c || v.e > MAX_EXP) {\r\n x.c = x.e = null;\r\n } else if (v.e < MIN_EXP) {\r\n x.c = [x.e = 0];\r\n } else {\r\n x.e = v.e;\r\n x.c = v.c.slice();\r\n }\r\n\r\n return;\r\n }\r\n\r\n if ((isNum = typeof v == 'number') && v * 0 == 0) {\r\n\r\n // Use `1 / n` to handle minus zero also.\r\n x.s = 1 / v < 0 ? (v = -v, -1) : 1;\r\n\r\n // Fast path for integers, where n < 2147483648 (2**31).\r\n if (v === ~~v) {\r\n for (e = 0, i = v; i >= 10; i /= 10, e++);\r\n\r\n if (e > MAX_EXP) {\r\n x.c = x.e = null;\r\n } else {\r\n x.e = e;\r\n x.c = [v];\r\n }\r\n\r\n return;\r\n }\r\n\r\n str = String(v);\r\n } else {\r\n\r\n if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum);\r\n\r\n x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1;\r\n }\r\n\r\n // Decimal point?\r\n if ((e = str.indexOf('.')) > -1) str = str.replace('.', '');\r\n\r\n // Exponential form?\r\n if ((i = str.search(/e/i)) > 0) {\r\n\r\n // Determine exponent.\r\n if (e < 0) e = i;\r\n e += +str.slice(i + 1);\r\n str = str.substring(0, i);\r\n } else if (e < 0) {\r\n\r\n // Integer.\r\n e = str.length;\r\n }\r\n\r\n } else {\r\n\r\n // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}'\r\n intCheck(b, 2, ALPHABET.length, 'Base');\r\n\r\n // Allow exponential notation to be used with base 10 argument, while\r\n // also rounding to DECIMAL_PLACES as with other bases.\r\n if (b == 10 && alphabetHasNormalDecimalDigits) {\r\n x = new BigNumber(v);\r\n return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE);\r\n }\r\n\r\n str = String(v);\r\n\r\n if (isNum = typeof v == 'number') {\r\n\r\n // Avoid potential interpretation of Infinity and NaN as base 44+ values.\r\n if (v * 0 != 0) return parseNumeric(x, str, isNum, b);\r\n\r\n x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1;\r\n\r\n // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}'\r\n if (BigNumber.DEBUG && str.replace(/^0\\.0*|\\./, '').length > 15) {\r\n throw Error\r\n (tooManyDigits + v);\r\n }\r\n } else {\r\n x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1;\r\n }\r\n\r\n alphabet = ALPHABET.slice(0, b);\r\n e = i = 0;\r\n\r\n // Check that str is a valid base b number.\r\n // Don't use RegExp, so alphabet can contain special characters.\r\n for (len = str.length; i < len; i++) {\r\n if (alphabet.indexOf(c = str.charAt(i)) < 0) {\r\n if (c == '.') {\r\n\r\n // If '.' is not the first character and it has not be found before.\r\n if (i > e) {\r\n e = len;\r\n continue;\r\n }\r\n } else if (!caseChanged) {\r\n\r\n // Allow e.g. hexadecimal 'FF' as well as 'ff'.\r\n if (str == str.toUpperCase() && (str = str.toLowerCase()) ||\r\n str == str.toLowerCase() && (str = str.toUpperCase())) {\r\n caseChanged = true;\r\n i = -1;\r\n e = 0;\r\n continue;\r\n }\r\n }\r\n\r\n return parseNumeric(x, String(v), isNum, b);\r\n }\r\n }\r\n\r\n // Prevent later check for length on converted number.\r\n isNum = false;\r\n str = convertBase(str, b, 10, x.s);\r\n\r\n // Decimal point?\r\n if ((e = str.indexOf('.')) > -1) str = str.replace('.', '');\r\n else e = str.length;\r\n }\r\n\r\n // Determine leading zeros.\r\n for (i = 0; str.charCodeAt(i) === 48; i++);\r\n\r\n // Determine trailing zeros.\r\n for (len = str.length; str.charCodeAt(--len) === 48;);\r\n\r\n if (str = str.slice(i, ++len)) {\r\n len -= i;\r\n\r\n // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}'\r\n if (isNum && BigNumber.DEBUG &&\r\n len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) {\r\n throw Error\r\n (tooManyDigits + (x.s * v));\r\n }\r\n\r\n // Overflow?\r\n if ((e = e - i - 1) > MAX_EXP) {\r\n\r\n // Infinity.\r\n x.c = x.e = null;\r\n\r\n // Underflow?\r\n } else if (e < MIN_EXP) {\r\n\r\n // Zero.\r\n x.c = [x.e = 0];\r\n } else {\r\n x.e = e;\r\n x.c = [];\r\n\r\n // Transform base\r\n\r\n // e is the base 10 exponent.\r\n // i is where to slice str to get the first element of the coefficient array.\r\n i = (e + 1) % LOG_BASE;\r\n if (e < 0) i += LOG_BASE; // i < 1\r\n\r\n if (i < len) {\r\n if (i) x.c.push(+str.slice(0, i));\r\n\r\n for (len -= LOG_BASE; i < len;) {\r\n x.c.push(+str.slice(i, i += LOG_BASE));\r\n }\r\n\r\n i = LOG_BASE - (str = str.slice(i)).length;\r\n } else {\r\n i -= len;\r\n }\r\n\r\n for (; i--; str += '0');\r\n x.c.push(+str);\r\n }\r\n } else {\r\n\r\n // Zero.\r\n x.c = [x.e = 0];\r\n }\r\n }\r\n\r\n\r\n // CONSTRUCTOR PROPERTIES\r\n\r\n\r\n BigNumber.clone = clone;\r\n\r\n BigNumber.ROUND_UP = 0;\r\n BigNumber.ROUND_DOWN = 1;\r\n BigNumber.ROUND_CEIL = 2;\r\n BigNumber.ROUND_FLOOR = 3;\r\n BigNumber.ROUND_HALF_UP = 4;\r\n BigNumber.ROUND_HALF_DOWN = 5;\r\n BigNumber.ROUND_HALF_EVEN = 6;\r\n BigNumber.ROUND_HALF_CEIL = 7;\r\n BigNumber.ROUND_HALF_FLOOR = 8;\r\n BigNumber.EUCLID = 9;\r\n\r\n\r\n /*\r\n * Configure infrequently-changing library-wide settings.\r\n *\r\n * Accept an object with the following optional properties (if the value of a property is\r\n * a number, it must be an integer within the inclusive range stated):\r\n *\r\n * DECIMAL_PLACES {number} 0 to MAX\r\n * ROUNDING_MODE {number} 0 to 8\r\n * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX]\r\n * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX]\r\n * CRYPTO {boolean} true or false\r\n * MODULO_MODE {number} 0 to 9\r\n * POW_PRECISION {number} 0 to MAX\r\n * ALPHABET {string} A string of two or more unique characters which does\r\n * not contain '.'.\r\n * FORMAT {object} An object with some of the following properties:\r\n * prefix {string}\r\n * groupSize {number}\r\n * secondaryGroupSize {number}\r\n * groupSeparator {string}\r\n * decimalSeparator {string}\r\n * fractionGroupSize {number}\r\n * fractionGroupSeparator {string}\r\n * suffix {string}\r\n *\r\n * (The values assigned to the above FORMAT object properties are not checked for validity.)\r\n *\r\n * E.g.\r\n * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 })\r\n *\r\n * Ignore properties/parameters set to null or undefined, except for ALPHABET.\r\n *\r\n * Return an object with the properties current values.\r\n */\r\n BigNumber.config = BigNumber.set = function (obj) {\r\n var p, v;\r\n\r\n if (obj != null) {\r\n\r\n if (typeof obj == 'object') {\r\n\r\n // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive.\r\n // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) {\r\n v = obj[p];\r\n intCheck(v, 0, MAX, p);\r\n DECIMAL_PLACES = v;\r\n }\r\n\r\n // ROUNDING_MODE {number} Integer, 0 to 8 inclusive.\r\n // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) {\r\n v = obj[p];\r\n intCheck(v, 0, 8, p);\r\n ROUNDING_MODE = v;\r\n }\r\n\r\n // EXPONENTIAL_AT {number|number[]}\r\n // Integer, -MAX to MAX inclusive or\r\n // [integer -MAX to 0 inclusive, 0 to MAX inclusive].\r\n // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) {\r\n v = obj[p];\r\n if (v && v.pop) {\r\n intCheck(v[0], -MAX, 0, p);\r\n intCheck(v[1], 0, MAX, p);\r\n TO_EXP_NEG = v[0];\r\n TO_EXP_POS = v[1];\r\n } else {\r\n intCheck(v, -MAX, MAX, p);\r\n TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v);\r\n }\r\n }\r\n\r\n // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or\r\n // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive].\r\n // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}'\r\n if (obj.hasOwnProperty(p = 'RANGE')) {\r\n v = obj[p];\r\n if (v && v.pop) {\r\n intCheck(v[0], -MAX, -1, p);\r\n intCheck(v[1], 1, MAX, p);\r\n MIN_EXP = v[0];\r\n MAX_EXP = v[1];\r\n } else {\r\n intCheck(v, -MAX, MAX, p);\r\n if (v) {\r\n MIN_EXP = -(MAX_EXP = v < 0 ? -v : v);\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' cannot be zero: ' + v);\r\n }\r\n }\r\n }\r\n\r\n // CRYPTO {boolean} true or false.\r\n // '[BigNumber Error] CRYPTO not true or false: {v}'\r\n // '[BigNumber Error] crypto unavailable'\r\n if (obj.hasOwnProperty(p = 'CRYPTO')) {\r\n v = obj[p];\r\n if (v === !!v) {\r\n if (v) {\r\n if (typeof crypto != 'undefined' && crypto &&\r\n (crypto.getRandomValues || crypto.randomBytes)) {\r\n CRYPTO = v;\r\n } else {\r\n CRYPTO = !v;\r\n throw Error\r\n (bignumberError + 'crypto unavailable');\r\n }\r\n } else {\r\n CRYPTO = v;\r\n }\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' not true or false: ' + v);\r\n }\r\n }\r\n\r\n // MODULO_MODE {number} Integer, 0 to 9 inclusive.\r\n // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'MODULO_MODE')) {\r\n v = obj[p];\r\n intCheck(v, 0, 9, p);\r\n MODULO_MODE = v;\r\n }\r\n\r\n // POW_PRECISION {number} Integer, 0 to MAX inclusive.\r\n // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'POW_PRECISION')) {\r\n v = obj[p];\r\n intCheck(v, 0, MAX, p);\r\n POW_PRECISION = v;\r\n }\r\n\r\n // FORMAT {object}\r\n // '[BigNumber Error] FORMAT not an object: {v}'\r\n if (obj.hasOwnProperty(p = 'FORMAT')) {\r\n v = obj[p];\r\n if (typeof v == 'object') FORMAT = v;\r\n else throw Error\r\n (bignumberError + p + ' not an object: ' + v);\r\n }\r\n\r\n // ALPHABET {string}\r\n // '[BigNumber Error] ALPHABET invalid: {v}'\r\n if (obj.hasOwnProperty(p = 'ALPHABET')) {\r\n v = obj[p];\r\n\r\n // Disallow if less than two characters,\r\n // or if it contains '+', '-', '.', whitespace, or a repeated character.\r\n if (typeof v == 'string' && !/^.?$|[+\\-.\\s]|(.).*\\1/.test(v)) {\r\n alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789';\r\n ALPHABET = v;\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' invalid: ' + v);\r\n }\r\n }\r\n\r\n } else {\r\n\r\n // '[BigNumber Error] Object expected: {v}'\r\n throw Error\r\n (bignumberError + 'Object expected: ' + obj);\r\n }\r\n }\r\n\r\n return {\r\n DECIMAL_PLACES: DECIMAL_PLACES,\r\n ROUNDING_MODE: ROUNDING_MODE,\r\n EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS],\r\n RANGE: [MIN_EXP, MAX_EXP],\r\n CRYPTO: CRYPTO,\r\n MODULO_MODE: MODULO_MODE,\r\n POW_PRECISION: POW_PRECISION,\r\n FORMAT: FORMAT,\r\n ALPHABET: ALPHABET\r\n };\r\n };\r\n\r\n\r\n /*\r\n * Return true if v is a BigNumber instance, otherwise return false.\r\n *\r\n * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed.\r\n *\r\n * v {any}\r\n *\r\n * '[BigNumber Error] Invalid BigNumber: {v}'\r\n */\r\n BigNumber.isBigNumber = function (v) {\r\n if (!v || v._isBigNumber !== true) return false;\r\n if (!BigNumber.DEBUG) return true;\r\n\r\n var i, n,\r\n c = v.c,\r\n e = v.e,\r\n s = v.s;\r\n\r\n out: if ({}.toString.call(c) == '[object Array]') {\r\n\r\n if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) {\r\n\r\n // If the first element is zero, the BigNumber value must be zero.\r\n if (c[0] === 0) {\r\n if (e === 0 && c.length === 1) return true;\r\n break out;\r\n }\r\n\r\n // Calculate number of digits that c[0] should have, based on the exponent.\r\n i = (e + 1) % LOG_BASE;\r\n if (i < 1) i += LOG_BASE;\r\n\r\n // Calculate number of digits of c[0].\r\n //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) {\r\n if (String(c[0]).length == i) {\r\n\r\n for (i = 0; i < c.length; i++) {\r\n n = c[i];\r\n if (n < 0 || n >= BASE || n !== mathfloor(n)) break out;\r\n }\r\n\r\n // Last element cannot be zero, unless it is the only element.\r\n if (n !== 0) return true;\r\n }\r\n }\r\n\r\n // Infinity/NaN\r\n } else if (c === null && e === null && (s === null || s === 1 || s === -1)) {\r\n return true;\r\n }\r\n\r\n throw Error\r\n (bignumberError + 'Invalid BigNumber: ' + v);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the maximum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.maximum = BigNumber.max = function () {\r\n return maxOrMin(arguments, -1);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the minimum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.minimum = BigNumber.min = function () {\r\n return maxOrMin(arguments, 1);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber with a random value equal to or greater than 0 and less than 1,\r\n * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing\r\n * zeros are produced).\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}'\r\n * '[BigNumber Error] crypto unavailable'\r\n */\r\n BigNumber.random = (function () {\r\n var pow2_53 = 0x20000000000000;\r\n\r\n // Return a 53 bit integer n, where 0 <= n < 9007199254740992.\r\n // Check if Math.random() produces more than 32 bits of randomness.\r\n // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits.\r\n // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1.\r\n var random53bitInt = (Math.random() * pow2_53) & 0x1fffff\r\n ? function () { return mathfloor(Math.random() * pow2_53); }\r\n : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) +\r\n (Math.random() * 0x800000 | 0); };\r\n\r\n return function (dp) {\r\n var a, b, e, k, v,\r\n i = 0,\r\n c = [],\r\n rand = new BigNumber(ONE);\r\n\r\n if (dp == null) dp = DECIMAL_PLACES;\r\n else intCheck(dp, 0, MAX);\r\n\r\n k = mathceil(dp / LOG_BASE);\r\n\r\n if (CRYPTO) {\r\n\r\n // Browsers supporting crypto.getRandomValues.\r\n if (crypto.getRandomValues) {\r\n\r\n a = crypto.getRandomValues(new Uint32Array(k *= 2));\r\n\r\n for (; i < k;) {\r\n\r\n // 53 bits:\r\n // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2)\r\n // 11111 11111111 11111111 11111111 11100000 00000000 00000000\r\n // ((Math.pow(2, 32) - 1) >>> 11).toString(2)\r\n // 11111 11111111 11111111\r\n // 0x20000 is 2^21.\r\n v = a[i] * 0x20000 + (a[i + 1] >>> 11);\r\n\r\n // Rejection sampling:\r\n // 0 <= v < 9007199254740992\r\n // Probability that v >= 9e15, is\r\n // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251\r\n if (v >= 9e15) {\r\n b = crypto.getRandomValues(new Uint32Array(2));\r\n a[i] = b[0];\r\n a[i + 1] = b[1];\r\n } else {\r\n\r\n // 0 <= v <= 8999999999999999\r\n // 0 <= (v % 1e14) <= 99999999999999\r\n c.push(v % 1e14);\r\n i += 2;\r\n }\r\n }\r\n i = k / 2;\r\n\r\n // Node.js supporting crypto.randomBytes.\r\n } else if (crypto.randomBytes) {\r\n\r\n // buffer\r\n a = crypto.randomBytes(k *= 7);\r\n\r\n for (; i < k;) {\r\n\r\n // 0x1000000000000 is 2^48, 0x10000000000 is 2^40\r\n // 0x100000000 is 2^32, 0x1000000 is 2^24\r\n // 11111 11111111 11111111 11111111 11111111 11111111 11111111\r\n // 0 <= v < 9007199254740992\r\n v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) +\r\n (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) +\r\n (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6];\r\n\r\n if (v >= 9e15) {\r\n crypto.randomBytes(7).copy(a, i);\r\n } else {\r\n\r\n // 0 <= (v % 1e14) <= 99999999999999\r\n c.push(v % 1e14);\r\n i += 7;\r\n }\r\n }\r\n i = k / 7;\r\n } else {\r\n CRYPTO = false;\r\n throw Error\r\n (bignumberError + 'crypto unavailable');\r\n }\r\n }\r\n\r\n // Use Math.random.\r\n if (!CRYPTO) {\r\n\r\n for (; i < k;) {\r\n v = random53bitInt();\r\n if (v < 9e15) c[i++] = v % 1e14;\r\n }\r\n }\r\n\r\n k = c[--i];\r\n dp %= LOG_BASE;\r\n\r\n // Convert trailing digits to zeros according to dp.\r\n if (k && dp) {\r\n v = POWS_TEN[LOG_BASE - dp];\r\n c[i] = mathfloor(k / v) * v;\r\n }\r\n\r\n // Remove trailing elements which are zero.\r\n for (; c[i] === 0; c.pop(), i--);\r\n\r\n // Zero?\r\n if (i < 0) {\r\n c = [e = 0];\r\n } else {\r\n\r\n // Remove leading elements which are zero and adjust exponent accordingly.\r\n for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE);\r\n\r\n // Count the digits of the first element of c to determine leading zeros, and...\r\n for (i = 1, v = c[0]; v >= 10; v /= 10, i++);\r\n\r\n // adjust the exponent accordingly.\r\n if (i < LOG_BASE) e -= LOG_BASE - i;\r\n }\r\n\r\n rand.e = e;\r\n rand.c = c;\r\n return rand;\r\n };\r\n })();\r\n\r\n\r\n /*\r\n * Return a BigNumber whose value is the sum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.sum = function () {\r\n var i = 1,\r\n args = arguments,\r\n sum = new BigNumber(args[0]);\r\n for (; i < args.length;) sum = sum.plus(args[i++]);\r\n return sum;\r\n };\r\n\r\n\r\n // PRIVATE FUNCTIONS\r\n\r\n\r\n // Called by BigNumber and BigNumber.prototype.toString.\r\n convertBase = (function () {\r\n var decimal = '0123456789';\r\n\r\n /*\r\n * Convert string of baseIn to an array of numbers of baseOut.\r\n * Eg. toBaseOut('255', 10, 16) returns [15, 15].\r\n * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5].\r\n */\r\n function toBaseOut(str, baseIn, baseOut, alphabet) {\r\n var j,\r\n arr = [0],\r\n arrL,\r\n i = 0,\r\n len = str.length;\r\n\r\n for (; i < len;) {\r\n for (arrL = arr.length; arrL--; arr[arrL] *= baseIn);\r\n\r\n arr[0] += alphabet.indexOf(str.charAt(i++));\r\n\r\n for (j = 0; j < arr.length; j++) {\r\n\r\n if (arr[j] > baseOut - 1) {\r\n if (arr[j + 1] == null) arr[j + 1] = 0;\r\n arr[j + 1] += arr[j] / baseOut | 0;\r\n arr[j] %= baseOut;\r\n }\r\n }\r\n }\r\n\r\n return arr.reverse();\r\n }\r\n\r\n // Convert a numeric string of baseIn to a numeric string of baseOut.\r\n // If the caller is toString, we are converting from base 10 to baseOut.\r\n // If the caller is BigNumber, we are converting from baseIn to base 10.\r\n return function (str, baseIn, baseOut, sign, callerIsToString) {\r\n var alphabet, d, e, k, r, x, xc, y,\r\n i = str.indexOf('.'),\r\n dp = DECIMAL_PLACES,\r\n rm = ROUNDING_MODE;\r\n\r\n // Non-integer.\r\n if (i >= 0) {\r\n k = POW_PRECISION;\r\n\r\n // Unlimited precision.\r\n POW_PRECISION = 0;\r\n str = str.replace('.', '');\r\n y = new BigNumber(baseIn);\r\n x = y.pow(str.length - i);\r\n POW_PRECISION = k;\r\n\r\n // Convert str as if an integer, then restore the fraction part by dividing the\r\n // result by its base raised to a power.\r\n\r\n y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'),\r\n 10, baseOut, decimal);\r\n y.e = y.c.length;\r\n }\r\n\r\n // Convert the number as integer.\r\n\r\n xc = toBaseOut(str, baseIn, baseOut, callerIsToString\r\n ? (alphabet = ALPHABET, decimal)\r\n : (alphabet = decimal, ALPHABET));\r\n\r\n // xc now represents str as an integer and converted to baseOut. e is the exponent.\r\n e = k = xc.length;\r\n\r\n // Remove trailing zeros.\r\n for (; xc[--k] == 0; xc.pop());\r\n\r\n // Zero?\r\n if (!xc[0]) return alphabet.charAt(0);\r\n\r\n // Does str represent an integer? If so, no need for the division.\r\n if (i < 0) {\r\n --e;\r\n } else {\r\n x.c = xc;\r\n x.e = e;\r\n\r\n // The sign is needed for correct rounding.\r\n x.s = sign;\r\n x = div(x, y, dp, rm, baseOut);\r\n xc = x.c;\r\n r = x.r;\r\n e = x.e;\r\n }\r\n\r\n // xc now represents str converted to baseOut.\r\n\r\n // THe index of the rounding digit.\r\n d = e + dp + 1;\r\n\r\n // The rounding digit: the digit to the right of the digit that may be rounded up.\r\n i = xc[d];\r\n\r\n // Look at the rounding digits and mode to determine whether to round up.\r\n\r\n k = baseOut / 2;\r\n r = r || d < 0 || xc[d + 1] != null;\r\n\r\n r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2))\r\n : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 ||\r\n rm == (x.s < 0 ? 8 : 7));\r\n\r\n // If the index of the rounding digit is not greater than zero, or xc represents\r\n // zero, then the result of the base conversion is zero or, if rounding up, a value\r\n // such as 0.00001.\r\n if (d < 1 || !xc[0]) {\r\n\r\n // 1^-dp or 0\r\n str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0);\r\n } else {\r\n\r\n // Truncate xc to the required number of decimal places.\r\n xc.length = d;\r\n\r\n // Round up?\r\n if (r) {\r\n\r\n // Rounding up may mean the previous digit has to be rounded up and so on.\r\n for (--baseOut; ++xc[--d] > baseOut;) {\r\n xc[d] = 0;\r\n\r\n if (!d) {\r\n ++e;\r\n xc = [1].concat(xc);\r\n }\r\n }\r\n }\r\n\r\n // Determine trailing zeros.\r\n for (k = xc.length; !xc[--k];);\r\n\r\n // E.g. [4, 11, 15] becomes 4bf.\r\n for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++]));\r\n\r\n // Add leading zeros, decimal point and trailing zeros as required.\r\n str = toFixedPoint(str, e, alphabet.charAt(0));\r\n }\r\n\r\n // The caller will add the sign.\r\n return str;\r\n };\r\n })();\r\n\r\n\r\n // Perform division in the specified base. Called by div and convertBase.\r\n div = (function () {\r\n\r\n // Assume non-zero x and k.\r\n function multiply(x, k, base) {\r\n var m, temp, xlo, xhi,\r\n carry = 0,\r\n i = x.length,\r\n klo = k % SQRT_BASE,\r\n khi = k / SQRT_BASE | 0;\r\n\r\n for (x = x.slice(); i--;) {\r\n xlo = x[i] % SQRT_BASE;\r\n xhi = x[i] / SQRT_BASE | 0;\r\n m = khi * xlo + xhi * klo;\r\n temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry;\r\n carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi;\r\n x[i] = temp % base;\r\n }\r\n\r\n if (carry) x = [carry].concat(x);\r\n\r\n return x;\r\n }\r\n\r\n function compare(a, b, aL, bL) {\r\n var i, cmp;\r\n\r\n if (aL != bL) {\r\n cmp = aL > bL ? 1 : -1;\r\n } else {\r\n\r\n for (i = cmp = 0; i < aL; i++) {\r\n\r\n if (a[i] != b[i]) {\r\n cmp = a[i] > b[i] ? 1 : -1;\r\n break;\r\n }\r\n }\r\n }\r\n\r\n return cmp;\r\n }\r\n\r\n function subtract(a, b, aL, base) {\r\n var i = 0;\r\n\r\n // Subtract b from a.\r\n for (; aL--;) {\r\n a[aL] -= i;\r\n i = a[aL] < b[aL] ? 1 : 0;\r\n a[aL] = i * base + a[aL] - b[aL];\r\n }\r\n\r\n // Remove leading zeros.\r\n for (; !a[0] && a.length > 1; a.splice(0, 1));\r\n }\r\n\r\n // x: dividend, y: divisor.\r\n return function (x, y, dp, rm, base) {\r\n var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0,\r\n yL, yz,\r\n s = x.s == y.s ? 1 : -1,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n // Either NaN, Infinity or 0?\r\n if (!xc || !xc[0] || !yc || !yc[0]) {\r\n\r\n return new BigNumber(\r\n\r\n // Return NaN if either NaN, or both Infinity or 0.\r\n !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN :\r\n\r\n // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0.\r\n xc && xc[0] == 0 || !yc ? s * 0 : s / 0\r\n );\r\n }\r\n\r\n q = new BigNumber(s);\r\n qc = q.c = [];\r\n e = x.e - y.e;\r\n s = dp + e + 1;\r\n\r\n if (!base) {\r\n base = BASE;\r\n e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE);\r\n s = s / LOG_BASE | 0;\r\n }\r\n\r\n // Result exponent may be one less then the current value of e.\r\n // The coefficients of the BigNumbers from convertBase may have trailing zeros.\r\n for (i = 0; yc[i] == (xc[i] || 0); i++);\r\n\r\n if (yc[i] > (xc[i] || 0)) e--;\r\n\r\n if (s < 0) {\r\n qc.push(1);\r\n more = true;\r\n } else {\r\n xL = xc.length;\r\n yL = yc.length;\r\n i = 0;\r\n s += 2;\r\n\r\n // Normalise xc and yc so highest order digit of yc is >= base / 2.\r\n\r\n n = mathfloor(base / (yc[0] + 1));\r\n\r\n // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1.\r\n // if (n > 1 || n++ == 1 && yc[0] < base / 2) {\r\n if (n > 1) {\r\n yc = multiply(yc, n, base);\r\n xc = multiply(xc, n, base);\r\n yL = yc.length;\r\n xL = xc.length;\r\n }\r\n\r\n xi = yL;\r\n rem = xc.slice(0, yL);\r\n remL = rem.length;\r\n\r\n // Add zeros to make remainder as long as divisor.\r\n for (; remL < yL; rem[remL++] = 0);\r\n yz = yc.slice();\r\n yz = [0].concat(yz);\r\n yc0 = yc[0];\r\n if (yc[1] >= base / 2) yc0++;\r\n // Not necessary, but to prevent trial digit n > base, when using base 3.\r\n // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15;\r\n\r\n do {\r\n n = 0;\r\n\r\n // Compare divisor and remainder.\r\n cmp = compare(yc, rem, yL, remL);\r\n\r\n // If divisor < remainder.\r\n if (cmp < 0) {\r\n\r\n // Calculate trial digit, n.\r\n\r\n rem0 = rem[0];\r\n if (yL != remL) rem0 = rem0 * base + (rem[1] || 0);\r\n\r\n // n is how many times the divisor goes into the current remainder.\r\n n = mathfloor(rem0 / yc0);\r\n\r\n // Algorithm:\r\n // product = divisor multiplied by trial digit (n).\r\n // Compare product and remainder.\r\n // If product is greater than remainder:\r\n // Subtract divisor from product, decrement trial digit.\r\n // Subtract product from remainder.\r\n // If product was less than remainder at the last compare:\r\n // Compare new remainder and divisor.\r\n // If remainder is greater than divisor:\r\n // Subtract divisor from remainder, increment trial digit.\r\n\r\n if (n > 1) {\r\n\r\n // n may be > base only when base is 3.\r\n if (n >= base) n = base - 1;\r\n\r\n // product = divisor * trial digit.\r\n prod = multiply(yc, n, base);\r\n prodL = prod.length;\r\n remL = rem.length;\r\n\r\n // Compare product and remainder.\r\n // If product > remainder then trial digit n too high.\r\n // n is 1 too high about 5% of the time, and is not known to have\r\n // ever been more than 1 too high.\r\n while (compare(prod, rem, prodL, remL) == 1) {\r\n n--;\r\n\r\n // Subtract divisor from product.\r\n subtract(prod, yL < prodL ? yz : yc, prodL, base);\r\n prodL = prod.length;\r\n cmp = 1;\r\n }\r\n } else {\r\n\r\n // n is 0 or 1, cmp is -1.\r\n // If n is 0, there is no need to compare yc and rem again below,\r\n // so change cmp to 1 to avoid it.\r\n // If n is 1, leave cmp as -1, so yc and rem are compared again.\r\n if (n == 0) {\r\n\r\n // divisor < remainder, so n must be at least 1.\r\n cmp = n = 1;\r\n }\r\n\r\n // product = divisor\r\n prod = yc.slice();\r\n prodL = prod.length;\r\n }\r\n\r\n if (prodL < remL) prod = [0].concat(prod);\r\n\r\n // Subtract product from remainder.\r\n subtract(rem, prod, remL, base);\r\n remL = rem.length;\r\n\r\n // If product was < remainder.\r\n if (cmp == -1) {\r\n\r\n // Compare divisor and new remainder.\r\n // If divisor < new remainder, subtract divisor from remainder.\r\n // Trial digit n too low.\r\n // n is 1 too low about 5% of the time, and very rarely 2 too low.\r\n while (compare(yc, rem, yL, remL) < 1) {\r\n n++;\r\n\r\n // Subtract divisor from remainder.\r\n subtract(rem, yL < remL ? yz : yc, remL, base);\r\n remL = rem.length;\r\n }\r\n }\r\n } else if (cmp === 0) {\r\n n++;\r\n rem = [0];\r\n } // else cmp === 1 and n will be 0\r\n\r\n // Add the next digit, n, to the result array.\r\n qc[i++] = n;\r\n\r\n // Update the remainder.\r\n if (rem[0]) {\r\n rem[remL++] = xc[xi] || 0;\r\n } else {\r\n rem = [xc[xi]];\r\n remL = 1;\r\n }\r\n } while ((xi++ < xL || rem[0] != null) && s--);\r\n\r\n more = rem[0] != null;\r\n\r\n // Leading zero?\r\n if (!qc[0]) qc.splice(0, 1);\r\n }\r\n\r\n if (base == BASE) {\r\n\r\n // To calculate q.e, first get the number of digits of qc[0].\r\n for (i = 1, s = qc[0]; s >= 10; s /= 10, i++);\r\n\r\n round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more);\r\n\r\n // Caller is convertBase.\r\n } else {\r\n q.e = e;\r\n q.r = +more;\r\n }\r\n\r\n return q;\r\n };\r\n })();\r\n\r\n\r\n /*\r\n * Return a string representing the value of BigNumber n in fixed-point or exponential\r\n * notation rounded to the specified decimal places or significant digits.\r\n *\r\n * n: a BigNumber.\r\n * i: the index of the last digit required (i.e. the digit that may be rounded up).\r\n * rm: the rounding mode.\r\n * id: 1 (toExponential) or 2 (toPrecision).\r\n */\r\n function format(n, i, rm, id) {\r\n var c0, e, ne, len, str;\r\n\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n if (!n.c) return n.toString();\r\n\r\n c0 = n.c[0];\r\n ne = n.e;\r\n\r\n if (i == null) {\r\n str = coeffToString(n.c);\r\n str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS)\r\n ? toExponential(str, ne)\r\n : toFixedPoint(str, ne, '0');\r\n } else {\r\n n = round(new BigNumber(n), i, rm);\r\n\r\n // n.e may have changed if the value was rounded up.\r\n e = n.e;\r\n\r\n str = coeffToString(n.c);\r\n len = str.length;\r\n\r\n // toPrecision returns exponential notation if the number of significant digits\r\n // specified is less than the number of digits necessary to represent the integer\r\n // part of the value in fixed-point notation.\r\n\r\n // Exponential notation.\r\n if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) {\r\n\r\n // Append zeros?\r\n for (; len < i; str += '0', len++);\r\n str = toExponential(str, e);\r\n\r\n // Fixed-point notation.\r\n } else {\r\n i -= ne;\r\n str = toFixedPoint(str, e, '0');\r\n\r\n // Append zeros?\r\n if (e + 1 > len) {\r\n if (--i > 0) for (str += '.'; i--; str += '0');\r\n } else {\r\n i += e - len;\r\n if (i > 0) {\r\n if (e + 1 == len) str += '.';\r\n for (; i--; str += '0');\r\n }\r\n }\r\n }\r\n }\r\n\r\n return n.s < 0 && c0 ? '-' + str : str;\r\n }\r\n\r\n\r\n // Handle BigNumber.max and BigNumber.min.\r\n // If any number is NaN, return NaN.\r\n function maxOrMin(args, n) {\r\n var k, y,\r\n i = 1,\r\n x = new BigNumber(args[0]);\r\n\r\n for (; i < args.length; i++) {\r\n y = new BigNumber(args[i]);\r\n if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) {\r\n x = y;\r\n }\r\n }\r\n\r\n return x;\r\n }\r\n\r\n\r\n /*\r\n * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP.\r\n * Called by minus, plus and times.\r\n */\r\n function normalise(n, c, e) {\r\n var i = 1,\r\n j = c.length;\r\n\r\n // Remove trailing zeros.\r\n for (; !c[--j]; c.pop());\r\n\r\n // Calculate the base 10 exponent. First get the number of digits of c[0].\r\n for (j = c[0]; j >= 10; j /= 10, i++);\r\n\r\n // Overflow?\r\n if ((e = i + e * LOG_BASE - 1) > MAX_EXP) {\r\n\r\n // Infinity.\r\n n.c = n.e = null;\r\n\r\n // Underflow?\r\n } else if (e < MIN_EXP) {\r\n\r\n // Zero.\r\n n.c = [n.e = 0];\r\n } else {\r\n n.e = e;\r\n n.c = c;\r\n }\r\n\r\n return n;\r\n }\r\n\r\n\r\n // Handle values that fail the validity test in BigNumber.\r\n parseNumeric = (function () {\r\n var basePrefix = /^(-?)0([xbo])(?=\\w[\\w.]*$)/i,\r\n dotAfter = /^([^.]+)\\.$/,\r\n dotBefore = /^\\.([^.]+)$/,\r\n isInfinityOrNaN = /^-?(Infinity|NaN)$/,\r\n whitespaceOrPlus = /^\\s*\\+(?=[\\w.])|^\\s+|\\s+$/g;\r\n\r\n return function (x, str, isNum, b) {\r\n var base,\r\n s = isNum ? str : str.replace(whitespaceOrPlus, '');\r\n\r\n // No exception on ±Infinity or NaN.\r\n if (isInfinityOrNaN.test(s)) {\r\n x.s = isNaN(s) ? null : s < 0 ? -1 : 1;\r\n } else {\r\n if (!isNum) {\r\n\r\n // basePrefix = /^(-?)0([xbo])(?=\\w[\\w.]*$)/i\r\n s = s.replace(basePrefix, function (m, p1, p2) {\r\n base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8;\r\n return !b || b == base ? p1 : m;\r\n });\r\n\r\n if (b) {\r\n base = b;\r\n\r\n // E.g. '1.' to '1', '.1' to '0.1'\r\n s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1');\r\n }\r\n\r\n if (str != s) return new BigNumber(s, base);\r\n }\r\n\r\n // '[BigNumber Error] Not a number: {n}'\r\n // '[BigNumber Error] Not a base {b} number: {n}'\r\n if (BigNumber.DEBUG) {\r\n throw Error\r\n (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str);\r\n }\r\n\r\n // NaN\r\n x.s = null;\r\n }\r\n\r\n x.c = x.e = null;\r\n }\r\n })();\r\n\r\n\r\n /*\r\n * Round x to sd significant digits using rounding mode rm. Check for over/under-flow.\r\n * If r is truthy, it is known that there are more digits after the rounding digit.\r\n */\r\n function round(x, sd, rm, r) {\r\n var d, i, j, k, n, ni, rd,\r\n xc = x.c,\r\n pows10 = POWS_TEN;\r\n\r\n // if x is not Infinity or NaN...\r\n if (xc) {\r\n\r\n // rd is the rounding digit, i.e. the digit after the digit that may be rounded up.\r\n // n is a base 1e14 number, the value of the element of array x.c containing rd.\r\n // ni is the index of n within x.c.\r\n // d is the number of digits of n.\r\n // i is the index of rd within n including leading zeros.\r\n // j is the actual index of rd within n (if < 0, rd is a leading zero).\r\n out: {\r\n\r\n // Get the number of digits of the first element of xc.\r\n for (d = 1, k = xc[0]; k >= 10; k /= 10, d++);\r\n i = sd - d;\r\n\r\n // If the rounding digit is in the first element of xc...\r\n if (i < 0) {\r\n i += LOG_BASE;\r\n j = sd;\r\n n = xc[ni = 0];\r\n\r\n // Get the rounding digit at index j of n.\r\n rd = mathfloor(n / pows10[d - j - 1] % 10);\r\n } else {\r\n ni = mathceil((i + 1) / LOG_BASE);\r\n\r\n if (ni >= xc.length) {\r\n\r\n if (r) {\r\n\r\n // Needed by sqrt.\r\n for (; xc.length <= ni; xc.push(0));\r\n n = rd = 0;\r\n d = 1;\r\n i %= LOG_BASE;\r\n j = i - LOG_BASE + 1;\r\n } else {\r\n break out;\r\n }\r\n } else {\r\n n = k = xc[ni];\r\n\r\n // Get the number of digits of n.\r\n for (d = 1; k >= 10; k /= 10, d++);\r\n\r\n // Get the index of rd within n.\r\n i %= LOG_BASE;\r\n\r\n // Get the index of rd within n, adjusted for leading zeros.\r\n // The number of leading zeros of n is given by LOG_BASE - d.\r\n j = i - LOG_BASE + d;\r\n\r\n // Get the rounding digit at index j of n.\r\n rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10);\r\n }\r\n }\r\n\r\n r = r || sd < 0 ||\r\n\r\n // Are there any non-zero digits after the rounding digit?\r\n // The expression n % pows10[d - j - 1] returns all digits of n to the right\r\n // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714.\r\n xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]);\r\n\r\n r = rm < 4\r\n ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2))\r\n : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 &&\r\n\r\n // Check whether the digit to the left of the rounding digit is odd.\r\n ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 ||\r\n rm == (x.s < 0 ? 8 : 7));\r\n\r\n if (sd < 1 || !xc[0]) {\r\n xc.length = 0;\r\n\r\n if (r) {\r\n\r\n // Convert sd to decimal places.\r\n sd -= x.e + 1;\r\n\r\n // 1, 0.1, 0.01, 0.001, 0.0001 etc.\r\n xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE];\r\n x.e = -sd || 0;\r\n } else {\r\n\r\n // Zero.\r\n xc[0] = x.e = 0;\r\n }\r\n\r\n return x;\r\n }\r\n\r\n // Remove excess digits.\r\n if (i == 0) {\r\n xc.length = ni;\r\n k = 1;\r\n ni--;\r\n } else {\r\n xc.length = ni + 1;\r\n k = pows10[LOG_BASE - i];\r\n\r\n // E.g. 56700 becomes 56000 if 7 is the rounding digit.\r\n // j > 0 means i > number of leading zeros of n.\r\n xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0;\r\n }\r\n\r\n // Round up?\r\n if (r) {\r\n\r\n for (; ;) {\r\n\r\n // If the digit to be rounded up is in the first element of xc...\r\n if (ni == 0) {\r\n\r\n // i will be the length of xc[0] before k is added.\r\n for (i = 1, j = xc[0]; j >= 10; j /= 10, i++);\r\n j = xc[0] += k;\r\n for (k = 1; j >= 10; j /= 10, k++);\r\n\r\n // if i != k the length has increased.\r\n if (i != k) {\r\n x.e++;\r\n if (xc[0] == BASE) xc[0] = 1;\r\n }\r\n\r\n break;\r\n } else {\r\n xc[ni] += k;\r\n if (xc[ni] != BASE) break;\r\n xc[ni--] = 0;\r\n k = 1;\r\n }\r\n }\r\n }\r\n\r\n // Remove trailing zeros.\r\n for (i = xc.length; xc[--i] === 0; xc.pop());\r\n }\r\n\r\n // Overflow? Infinity.\r\n if (x.e > MAX_EXP) {\r\n x.c = x.e = null;\r\n\r\n // Underflow? Zero.\r\n } else if (x.e < MIN_EXP) {\r\n x.c = [x.e = 0];\r\n }\r\n }\r\n\r\n return x;\r\n }\r\n\r\n\r\n function valueOf(n) {\r\n var str,\r\n e = n.e;\r\n\r\n if (e === null) return n.toString();\r\n\r\n str = coeffToString(n.c);\r\n\r\n str = e <= TO_EXP_NEG || e >= TO_EXP_POS\r\n ? toExponential(str, e)\r\n : toFixedPoint(str, e, '0');\r\n\r\n return n.s < 0 ? '-' + str : str;\r\n }\r\n\r\n\r\n // PROTOTYPE/INSTANCE METHODS\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the absolute value of this BigNumber.\r\n */\r\n P.absoluteValue = P.abs = function () {\r\n var x = new BigNumber(this);\r\n if (x.s < 0) x.s = 1;\r\n return x;\r\n };\r\n\r\n\r\n /*\r\n * Return\r\n * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b),\r\n * -1 if the value of this BigNumber is less than the value of BigNumber(y, b),\r\n * 0 if they have the same value,\r\n * or null if the value of either is NaN.\r\n */\r\n P.comparedTo = function (y, b) {\r\n return compare(this, new BigNumber(y, b));\r\n };\r\n\r\n\r\n /*\r\n * If dp is undefined or null or true or false, return the number of decimal places of the\r\n * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN.\r\n *\r\n * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this\r\n * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or\r\n * ROUNDING_MODE if rm is omitted.\r\n *\r\n * [dp] {number} Decimal places: integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.decimalPlaces = P.dp = function (dp, rm) {\r\n var c, n, v,\r\n x = this;\r\n\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n return round(new BigNumber(x), dp + x.e + 1, rm);\r\n }\r\n\r\n if (!(c = x.c)) return null;\r\n n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE;\r\n\r\n // Subtract the number of trailing zeros of the last number.\r\n if (v = c[v]) for (; v % 10 == 0; v /= 10, n--);\r\n if (n < 0) n = 0;\r\n\r\n return n;\r\n };\r\n\r\n\r\n /*\r\n * n / 0 = I\r\n * n / N = N\r\n * n / I = 0\r\n * 0 / n = 0\r\n * 0 / 0 = N\r\n * 0 / N = N\r\n * 0 / I = 0\r\n * N / n = N\r\n * N / 0 = N\r\n * N / N = N\r\n * N / I = N\r\n * I / n = I\r\n * I / 0 = I\r\n * I / N = N\r\n * I / I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber divided by the value of\r\n * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE.\r\n */\r\n P.dividedBy = P.div = function (y, b) {\r\n return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the integer part of dividing the value of this\r\n * BigNumber by the value of BigNumber(y, b).\r\n */\r\n P.dividedToIntegerBy = P.idiv = function (y, b) {\r\n return div(this, new BigNumber(y, b), 0, 1);\r\n };\r\n\r\n\r\n /*\r\n * Return a BigNumber whose value is the value of this BigNumber exponentiated by n.\r\n *\r\n * If m is present, return the result modulo m.\r\n * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE.\r\n * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE.\r\n *\r\n * The modular power operation works efficiently when x, n, and m are integers, otherwise it\r\n * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0.\r\n *\r\n * n {number|string|BigNumber} The exponent. An integer.\r\n * [m] {number|string|BigNumber} The modulus.\r\n *\r\n * '[BigNumber Error] Exponent not an integer: {n}'\r\n */\r\n P.exponentiatedBy = P.pow = function (n, m) {\r\n var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y,\r\n x = this;\r\n\r\n n = new BigNumber(n);\r\n\r\n // Allow NaN and ±Infinity, but not other non-integers.\r\n if (n.c && !n.isInteger()) {\r\n throw Error\r\n (bignumberError + 'Exponent not an integer: ' + valueOf(n));\r\n }\r\n\r\n if (m != null) m = new BigNumber(m);\r\n\r\n // Exponent of MAX_SAFE_INTEGER is 15.\r\n nIsBig = n.e > 14;\r\n\r\n // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0.\r\n if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) {\r\n\r\n // The sign of the result of pow when x is negative depends on the evenness of n.\r\n // If +n overflows to ±Infinity, the evenness of n would be not be known.\r\n y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n)));\r\n return m ? y.mod(m) : y;\r\n }\r\n\r\n nIsNeg = n.s < 0;\r\n\r\n if (m) {\r\n\r\n // x % m returns NaN if abs(m) is zero, or m is NaN.\r\n if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN);\r\n\r\n isModExp = !nIsNeg && x.isInteger() && m.isInteger();\r\n\r\n if (isModExp) x = x.mod(m);\r\n\r\n // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15.\r\n // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15.\r\n } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0\r\n // [1, 240000000]\r\n ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7\r\n // [80000000000000] [99999750000000]\r\n : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) {\r\n\r\n // If x is negative and n is odd, k = -0, else k = 0.\r\n k = x.s < 0 && isOdd(n) ? -0 : 0;\r\n\r\n // If x >= 1, k = ±Infinity.\r\n if (x.e > -1) k = 1 / k;\r\n\r\n // If n is negative return ±0, else return ±Infinity.\r\n return new BigNumber(nIsNeg ? 1 / k : k);\r\n\r\n } else if (POW_PRECISION) {\r\n\r\n // Truncating each coefficient array to a length of k after each multiplication\r\n // equates to truncating significant digits to POW_PRECISION + [28, 41],\r\n // i.e. there will be a minimum of 28 guard digits retained.\r\n k = mathceil(POW_PRECISION / LOG_BASE + 2);\r\n }\r\n\r\n if (nIsBig) {\r\n half = new BigNumber(0.5);\r\n if (nIsNeg) n.s = 1;\r\n nIsOdd = isOdd(n);\r\n } else {\r\n i = Math.abs(+valueOf(n));\r\n nIsOdd = i % 2;\r\n }\r\n\r\n y = new BigNumber(ONE);\r\n\r\n // Performs 54 loop iterations for n of 9007199254740991.\r\n for (; ;) {\r\n\r\n if (nIsOdd) {\r\n y = y.times(x);\r\n if (!y.c) break;\r\n\r\n if (k) {\r\n if (y.c.length > k) y.c.length = k;\r\n } else if (isModExp) {\r\n y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m));\r\n }\r\n }\r\n\r\n if (i) {\r\n i = mathfloor(i / 2);\r\n if (i === 0) break;\r\n nIsOdd = i % 2;\r\n } else {\r\n n = n.times(half);\r\n round(n, n.e + 1, 1);\r\n\r\n if (n.e > 14) {\r\n nIsOdd = isOdd(n);\r\n } else {\r\n i = +valueOf(n);\r\n if (i === 0) break;\r\n nIsOdd = i % 2;\r\n }\r\n }\r\n\r\n x = x.times(x);\r\n\r\n if (k) {\r\n if (x.c && x.c.length > k) x.c.length = k;\r\n } else if (isModExp) {\r\n x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m));\r\n }\r\n }\r\n\r\n if (isModExp) return y;\r\n if (nIsNeg) y = ONE.div(y);\r\n\r\n return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y;\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer\r\n * using rounding mode rm, or ROUNDING_MODE if rm is omitted.\r\n *\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}'\r\n */\r\n P.integerValue = function (rm) {\r\n var n = new BigNumber(this);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n return round(n, n.e + 1, rm);\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isEqualTo = P.eq = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) === 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is a finite number, otherwise return false.\r\n */\r\n P.isFinite = function () {\r\n return !!this.c;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isGreaterThan = P.gt = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) > 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is greater than or equal to the value of\r\n * BigNumber(y, b), otherwise return false.\r\n */\r\n P.isGreaterThanOrEqualTo = P.gte = function (y, b) {\r\n return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0;\r\n\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is an integer, otherwise return false.\r\n */\r\n P.isInteger = function () {\r\n return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is less than the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isLessThan = P.lt = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) < 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is less than or equal to the value of\r\n * BigNumber(y, b), otherwise return false.\r\n */\r\n P.isLessThanOrEqualTo = P.lte = function (y, b) {\r\n return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is NaN, otherwise return false.\r\n */\r\n P.isNaN = function () {\r\n return !this.s;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is negative, otherwise return false.\r\n */\r\n P.isNegative = function () {\r\n return this.s < 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is positive, otherwise return false.\r\n */\r\n P.isPositive = function () {\r\n return this.s > 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is 0 or -0, otherwise return false.\r\n */\r\n P.isZero = function () {\r\n return !!this.c && this.c[0] == 0;\r\n };\r\n\r\n\r\n /*\r\n * n - 0 = n\r\n * n - N = N\r\n * n - I = -I\r\n * 0 - n = -n\r\n * 0 - 0 = 0\r\n * 0 - N = N\r\n * 0 - I = -I\r\n * N - n = N\r\n * N - 0 = N\r\n * N - N = N\r\n * N - I = N\r\n * I - n = I\r\n * I - 0 = I\r\n * I - N = N\r\n * I - I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber minus the value of\r\n * BigNumber(y, b).\r\n */\r\n P.minus = function (y, b) {\r\n var i, j, t, xLTy,\r\n x = this,\r\n a = x.s;\r\n\r\n y = new BigNumber(y, b);\r\n b = y.s;\r\n\r\n // Either NaN?\r\n if (!a || !b) return new BigNumber(NaN);\r\n\r\n // Signs differ?\r\n if (a != b) {\r\n y.s = -b;\r\n return x.plus(y);\r\n }\r\n\r\n var xe = x.e / LOG_BASE,\r\n ye = y.e / LOG_BASE,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n if (!xe || !ye) {\r\n\r\n // Either Infinity?\r\n if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN);\r\n\r\n // Either zero?\r\n if (!xc[0] || !yc[0]) {\r\n\r\n // Return y if y is non-zero, x if x is non-zero, or zero if both are zero.\r\n return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x :\r\n\r\n // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity\r\n ROUNDING_MODE == 3 ? -0 : 0);\r\n }\r\n }\r\n\r\n xe = bitFloor(xe);\r\n ye = bitFloor(ye);\r\n xc = xc.slice();\r\n\r\n // Determine which is the bigger number.\r\n if (a = xe - ye) {\r\n\r\n if (xLTy = a < 0) {\r\n a = -a;\r\n t = xc;\r\n } else {\r\n ye = xe;\r\n t = yc;\r\n }\r\n\r\n t.reverse();\r\n\r\n // Prepend zeros to equalise exponents.\r\n for (b = a; b--; t.push(0));\r\n t.reverse();\r\n } else {\r\n\r\n // Exponents equal. Check digit by digit.\r\n j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b;\r\n\r\n for (a = b = 0; b < j; b++) {\r\n\r\n if (xc[b] != yc[b]) {\r\n xLTy = xc[b] < yc[b];\r\n break;\r\n }\r\n }\r\n }\r\n\r\n // x < y? Point xc to the array of the bigger number.\r\n if (xLTy) {\r\n t = xc;\r\n xc = yc;\r\n yc = t;\r\n y.s = -y.s;\r\n }\r\n\r\n b = (j = yc.length) - (i = xc.length);\r\n\r\n // Append zeros to xc if shorter.\r\n // No need to add zeros to yc if shorter as subtract only needs to start at yc.length.\r\n if (b > 0) for (; b--; xc[i++] = 0);\r\n b = BASE - 1;\r\n\r\n // Subtract yc from xc.\r\n for (; j > a;) {\r\n\r\n if (xc[--j] < yc[j]) {\r\n for (i = j; i && !xc[--i]; xc[i] = b);\r\n --xc[i];\r\n xc[j] += BASE;\r\n }\r\n\r\n xc[j] -= yc[j];\r\n }\r\n\r\n // Remove leading zeros and adjust exponent accordingly.\r\n for (; xc[0] == 0; xc.splice(0, 1), --ye);\r\n\r\n // Zero?\r\n if (!xc[0]) {\r\n\r\n // Following IEEE 754 (2008) 6.3,\r\n // n - n = +0 but n - n = -0 when rounding towards -Infinity.\r\n y.s = ROUNDING_MODE == 3 ? -1 : 1;\r\n y.c = [y.e = 0];\r\n return y;\r\n }\r\n\r\n // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity\r\n // for finite x and y.\r\n return normalise(y, xc, ye);\r\n };\r\n\r\n\r\n /*\r\n * n % 0 = N\r\n * n % N = N\r\n * n % I = n\r\n * 0 % n = 0\r\n * -0 % n = -0\r\n * 0 % 0 = N\r\n * 0 % N = N\r\n * 0 % I = 0\r\n * N % n = N\r\n * N % 0 = N\r\n * N % N = N\r\n * N % I = N\r\n * I % n = N\r\n * I % 0 = N\r\n * I % N = N\r\n * I % I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber modulo the value of\r\n * BigNumber(y, b). The result depends on the value of MODULO_MODE.\r\n */\r\n P.modulo = P.mod = function (y, b) {\r\n var q, s,\r\n x = this;\r\n\r\n y = new BigNumber(y, b);\r\n\r\n // Return NaN if x is Infinity or NaN, or y is NaN or zero.\r\n if (!x.c || !y.s || y.c && !y.c[0]) {\r\n return new BigNumber(NaN);\r\n\r\n // Return x if y is Infinity or x is zero.\r\n } else if (!y.c || x.c && !x.c[0]) {\r\n return new BigNumber(x);\r\n }\r\n\r\n if (MODULO_MODE == 9) {\r\n\r\n // Euclidian division: q = sign(y) * floor(x / abs(y))\r\n // r = x - qy where 0 <= r < abs(y)\r\n s = y.s;\r\n y.s = 1;\r\n q = div(x, y, 0, 3);\r\n y.s = s;\r\n q.s *= s;\r\n } else {\r\n q = div(x, y, 0, MODULO_MODE);\r\n }\r\n\r\n y = x.minus(q.times(y));\r\n\r\n // To match JavaScript %, ensure sign of zero is sign of dividend.\r\n if (!y.c[0] && MODULO_MODE == 1) y.s = x.s;\r\n\r\n return y;\r\n };\r\n\r\n\r\n /*\r\n * n * 0 = 0\r\n * n * N = N\r\n * n * I = I\r\n * 0 * n = 0\r\n * 0 * 0 = 0\r\n * 0 * N = N\r\n * 0 * I = N\r\n * N * n = N\r\n * N * 0 = N\r\n * N * N = N\r\n * N * I = N\r\n * I * n = I\r\n * I * 0 = N\r\n * I * N = N\r\n * I * I = I\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value\r\n * of BigNumber(y, b).\r\n */\r\n P.multipliedBy = P.times = function (y, b) {\r\n var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc,\r\n base, sqrtBase,\r\n x = this,\r\n xc = x.c,\r\n yc = (y = new BigNumber(y, b)).c;\r\n\r\n // Either NaN, ±Infinity or ±0?\r\n if (!xc || !yc || !xc[0] || !yc[0]) {\r\n\r\n // Return NaN if either is NaN, or one is 0 and the other is Infinity.\r\n if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) {\r\n y.c = y.e = y.s = null;\r\n } else {\r\n y.s *= x.s;\r\n\r\n // Return ±Infinity if either is ±Infinity.\r\n if (!xc || !yc) {\r\n y.c = y.e = null;\r\n\r\n // Return ±0 if either is ±0.\r\n } else {\r\n y.c = [0];\r\n y.e = 0;\r\n }\r\n }\r\n\r\n return y;\r\n }\r\n\r\n e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE);\r\n y.s *= x.s;\r\n xcL = xc.length;\r\n ycL = yc.length;\r\n\r\n // Ensure xc points to longer array and xcL to its length.\r\n if (xcL < ycL) {\r\n zc = xc;\r\n xc = yc;\r\n yc = zc;\r\n i = xcL;\r\n xcL = ycL;\r\n ycL = i;\r\n }\r\n\r\n // Initialise the result array with zeros.\r\n for (i = xcL + ycL, zc = []; i--; zc.push(0));\r\n\r\n base = BASE;\r\n sqrtBase = SQRT_BASE;\r\n\r\n for (i = ycL; --i >= 0;) {\r\n c = 0;\r\n ylo = yc[i] % sqrtBase;\r\n yhi = yc[i] / sqrtBase | 0;\r\n\r\n for (k = xcL, j = i + k; j > i;) {\r\n xlo = xc[--k] % sqrtBase;\r\n xhi = xc[k] / sqrtBase | 0;\r\n m = yhi * xlo + xhi * ylo;\r\n xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c;\r\n c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi;\r\n zc[j--] = xlo % base;\r\n }\r\n\r\n zc[j] = c;\r\n }\r\n\r\n if (c) {\r\n ++e;\r\n } else {\r\n zc.splice(0, 1);\r\n }\r\n\r\n return normalise(y, zc, e);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber negated,\r\n * i.e. multiplied by -1.\r\n */\r\n P.negated = function () {\r\n var x = new BigNumber(this);\r\n x.s = -x.s || null;\r\n return x;\r\n };\r\n\r\n\r\n /*\r\n * n + 0 = n\r\n * n + N = N\r\n * n + I = I\r\n * 0 + n = n\r\n * 0 + 0 = 0\r\n * 0 + N = N\r\n * 0 + I = I\r\n * N + n = N\r\n * N + 0 = N\r\n * N + N = N\r\n * N + I = N\r\n * I + n = I\r\n * I + 0 = I\r\n * I + N = N\r\n * I + I = I\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber plus the value of\r\n * BigNumber(y, b).\r\n */\r\n P.plus = function (y, b) {\r\n var t,\r\n x = this,\r\n a = x.s;\r\n\r\n y = new BigNumber(y, b);\r\n b = y.s;\r\n\r\n // Either NaN?\r\n if (!a || !b) return new BigNumber(NaN);\r\n\r\n // Signs differ?\r\n if (a != b) {\r\n y.s = -b;\r\n return x.minus(y);\r\n }\r\n\r\n var xe = x.e / LOG_BASE,\r\n ye = y.e / LOG_BASE,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n if (!xe || !ye) {\r\n\r\n // Return ±Infinity if either ±Infinity.\r\n if (!xc || !yc) return new BigNumber(a / 0);\r\n\r\n // Either zero?\r\n // Return y if y is non-zero, x if x is non-zero, or zero if both are zero.\r\n if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0);\r\n }\r\n\r\n xe = bitFloor(xe);\r\n ye = bitFloor(ye);\r\n xc = xc.slice();\r\n\r\n // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts.\r\n if (a = xe - ye) {\r\n if (a > 0) {\r\n ye = xe;\r\n t = yc;\r\n } else {\r\n a = -a;\r\n t = xc;\r\n }\r\n\r\n t.reverse();\r\n for (; a--; t.push(0));\r\n t.reverse();\r\n }\r\n\r\n a = xc.length;\r\n b = yc.length;\r\n\r\n // Point xc to the longer array, and b to the shorter length.\r\n if (a - b < 0) {\r\n t = yc;\r\n yc = xc;\r\n xc = t;\r\n b = a;\r\n }\r\n\r\n // Only start adding at yc.length - 1 as the further digits of xc can be ignored.\r\n for (a = 0; b;) {\r\n a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0;\r\n xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE;\r\n }\r\n\r\n if (a) {\r\n xc = [a].concat(xc);\r\n ++ye;\r\n }\r\n\r\n // No need to check for zero, as +x + +y != 0 && -x + -y != 0\r\n // ye = MAX_EXP + 1 possible\r\n return normalise(y, xc, ye);\r\n };\r\n\r\n\r\n /*\r\n * If sd is undefined or null or true or false, return the number of significant digits of\r\n * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN.\r\n * If sd is true include integer-part trailing zeros in the count.\r\n *\r\n * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this\r\n * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or\r\n * ROUNDING_MODE if rm is omitted.\r\n *\r\n * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive.\r\n * boolean: whether to count integer-part trailing zeros: true or false.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}'\r\n */\r\n P.precision = P.sd = function (sd, rm) {\r\n var c, n, v,\r\n x = this;\r\n\r\n if (sd != null && sd !== !!sd) {\r\n intCheck(sd, 1, MAX);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n return round(new BigNumber(x), sd, rm);\r\n }\r\n\r\n if (!(c = x.c)) return null;\r\n v = c.length - 1;\r\n n = v * LOG_BASE + 1;\r\n\r\n if (v = c[v]) {\r\n\r\n // Subtract the number of trailing zeros of the last element.\r\n for (; v % 10 == 0; v /= 10, n--);\r\n\r\n // Add the number of digits of the first element.\r\n for (v = c[0]; v >= 10; v /= 10, n++);\r\n }\r\n\r\n if (sd && x.e + 1 > n) n = x.e + 1;\r\n\r\n return n;\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber shifted by k places\r\n * (powers of 10). Shift to the right if n > 0, and to the left if n < 0.\r\n *\r\n * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}'\r\n */\r\n P.shiftedBy = function (k) {\r\n intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER);\r\n return this.times('1e' + k);\r\n };\r\n\r\n\r\n /*\r\n * sqrt(-n) = N\r\n * sqrt(N) = N\r\n * sqrt(-I) = N\r\n * sqrt(I) = I\r\n * sqrt(0) = 0\r\n * sqrt(-0) = -0\r\n *\r\n * Return a new BigNumber whose value is the square root of the value of this BigNumber,\r\n * rounded according to DECIMAL_PLACES and ROUNDING_MODE.\r\n */\r\n P.squareRoot = P.sqrt = function () {\r\n var m, n, r, rep, t,\r\n x = this,\r\n c = x.c,\r\n s = x.s,\r\n e = x.e,\r\n dp = DECIMAL_PLACES + 4,\r\n half = new BigNumber('0.5');\r\n\r\n // Negative/NaN/Infinity/zero?\r\n if (s !== 1 || !c || !c[0]) {\r\n return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0);\r\n }\r\n\r\n // Initial estimate.\r\n s = Math.sqrt(+valueOf(x));\r\n\r\n // Math.sqrt underflow/overflow?\r\n // Pass x to Math.sqrt as integer, then adjust the exponent of the result.\r\n if (s == 0 || s == 1 / 0) {\r\n n = coeffToString(c);\r\n if ((n.length + e) % 2 == 0) n += '0';\r\n s = Math.sqrt(+n);\r\n e = bitFloor((e + 1) / 2) - (e < 0 || e % 2);\r\n\r\n if (s == 1 / 0) {\r\n n = '5e' + e;\r\n } else {\r\n n = s.toExponential();\r\n n = n.slice(0, n.indexOf('e') + 1) + e;\r\n }\r\n\r\n r = new BigNumber(n);\r\n } else {\r\n r = new BigNumber(s + '');\r\n }\r\n\r\n // Check for zero.\r\n // r could be zero if MIN_EXP is changed after the this value was created.\r\n // This would cause a division by zero (x/t) and hence Infinity below, which would cause\r\n // coeffToString to throw.\r\n if (r.c[0]) {\r\n e = r.e;\r\n s = e + dp;\r\n if (s < 3) s = 0;\r\n\r\n // Newton-Raphson iteration.\r\n for (; ;) {\r\n t = r;\r\n r = half.times(t.plus(div(x, t, dp, 1)));\r\n\r\n if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) {\r\n\r\n // The exponent of r may here be one less than the final result exponent,\r\n // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits\r\n // are indexed correctly.\r\n if (r.e < e) --s;\r\n n = n.slice(s - 3, s + 1);\r\n\r\n // The 4th rounding digit may be in error by -1 so if the 4 rounding digits\r\n // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the\r\n // iteration.\r\n if (n == '9999' || !rep && n == '4999') {\r\n\r\n // On the first iteration only, check to see if rounding up gives the\r\n // exact result as the nines may infinitely repeat.\r\n if (!rep) {\r\n round(t, t.e + DECIMAL_PLACES + 2, 0);\r\n\r\n if (t.times(t).eq(x)) {\r\n r = t;\r\n break;\r\n }\r\n }\r\n\r\n dp += 4;\r\n s += 4;\r\n rep = 1;\r\n } else {\r\n\r\n // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact\r\n // result. If not, then there are further digits and m will be truthy.\r\n if (!+n || !+n.slice(1) && n.charAt(0) == '5') {\r\n\r\n // Truncate to the first rounding digit.\r\n round(r, r.e + DECIMAL_PLACES + 2, 1);\r\n m = !r.times(r).eq(x);\r\n }\r\n\r\n break;\r\n }\r\n }\r\n }\r\n }\r\n\r\n return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in exponential notation and\r\n * rounded using ROUNDING_MODE to dp fixed decimal places.\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.toExponential = function (dp, rm) {\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n dp++;\r\n }\r\n return format(this, dp, rm, 1);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in fixed-point notation rounding\r\n * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted.\r\n *\r\n * Note: as with JavaScript's number type, (-0).toFixed(0) is '0',\r\n * but e.g. (-0.00001).toFixed(0) is '-0'.\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.toFixed = function (dp, rm) {\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n dp = dp + this.e + 1;\r\n }\r\n return format(this, dp, rm);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in fixed-point notation rounded\r\n * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties\r\n * of the format or FORMAT object (see BigNumber.set).\r\n *\r\n * The formatting object may contain some or all of the properties shown below.\r\n *\r\n * FORMAT = {\r\n * prefix: '',\r\n * groupSize: 3,\r\n * secondaryGroupSize: 0,\r\n * groupSeparator: ',',\r\n * decimalSeparator: '.',\r\n * fractionGroupSize: 0,\r\n * fractionGroupSeparator: '\\xA0', // non-breaking space\r\n * suffix: ''\r\n * };\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n * [format] {object} Formatting options. See FORMAT pbject above.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n * '[BigNumber Error] Argument not an object: {format}'\r\n */\r\n P.toFormat = function (dp, rm, format) {\r\n var str,\r\n x = this;\r\n\r\n if (format == null) {\r\n if (dp != null && rm && typeof rm == 'object') {\r\n format = rm;\r\n rm = null;\r\n } else if (dp && typeof dp == 'object') {\r\n format = dp;\r\n dp = rm = null;\r\n } else {\r\n format = FORMAT;\r\n }\r\n } else if (typeof format != 'object') {\r\n throw Error\r\n (bignumberError + 'Argument not an object: ' + format);\r\n }\r\n\r\n str = x.toFixed(dp, rm);\r\n\r\n if (x.c) {\r\n var i,\r\n arr = str.split('.'),\r\n g1 = +format.groupSize,\r\n g2 = +format.secondaryGroupSize,\r\n groupSeparator = format.groupSeparator || '',\r\n intPart = arr[0],\r\n fractionPart = arr[1],\r\n isNeg = x.s < 0,\r\n intDigits = isNeg ? intPart.slice(1) : intPart,\r\n len = intDigits.length;\r\n\r\n if (g2) {\r\n i = g1;\r\n g1 = g2;\r\n g2 = i;\r\n len -= i;\r\n }\r\n\r\n if (g1 > 0 && len > 0) {\r\n i = len % g1 || g1;\r\n intPart = intDigits.substr(0, i);\r\n for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1);\r\n if (g2 > 0) intPart += groupSeparator + intDigits.slice(i);\r\n if (isNeg) intPart = '-' + intPart;\r\n }\r\n\r\n str = fractionPart\r\n ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize)\r\n ? fractionPart.replace(new RegExp('\\\\d{' + g2 + '}\\\\B', 'g'),\r\n '$&' + (format.fractionGroupSeparator || ''))\r\n : fractionPart)\r\n : intPart;\r\n }\r\n\r\n return (format.prefix || '') + str + (format.suffix || '');\r\n };\r\n\r\n\r\n /*\r\n * Return an array of two BigNumbers representing the value of this BigNumber as a simple\r\n * fraction with an integer numerator and an integer denominator.\r\n * The denominator will be a positive non-zero value less than or equal to the specified\r\n * maximum denominator. If a maximum denominator is not specified, the denominator will be\r\n * the lowest value necessary to represent the number exactly.\r\n *\r\n * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator.\r\n *\r\n * '[BigNumber Error] Argument {not an integer|out of range} : {md}'\r\n */\r\n P.toFraction = function (md) {\r\n var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s,\r\n x = this,\r\n xc = x.c;\r\n\r\n if (md != null) {\r\n n = new BigNumber(md);\r\n\r\n // Throw if md is less than one or is not an integer, unless it is Infinity.\r\n if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) {\r\n throw Error\r\n (bignumberError + 'Argument ' +\r\n (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n));\r\n }\r\n }\r\n\r\n if (!xc) return new BigNumber(x);\r\n\r\n d = new BigNumber(ONE);\r\n n1 = d0 = new BigNumber(ONE);\r\n d1 = n0 = new BigNumber(ONE);\r\n s = coeffToString(xc);\r\n\r\n // Determine initial denominator.\r\n // d is a power of 10 and the minimum max denominator that specifies the value exactly.\r\n e = d.e = s.length - x.e - 1;\r\n d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp];\r\n md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n;\r\n\r\n exp = MAX_EXP;\r\n MAX_EXP = 1 / 0;\r\n n = new BigNumber(s);\r\n\r\n // n0 = d1 = 0\r\n n0.c[0] = 0;\r\n\r\n for (; ;) {\r\n q = div(n, d, 0, 1);\r\n d2 = d0.plus(q.times(d1));\r\n if (d2.comparedTo(md) == 1) break;\r\n d0 = d1;\r\n d1 = d2;\r\n n1 = n0.plus(q.times(d2 = n1));\r\n n0 = d2;\r\n d = n.minus(q.times(d2 = d));\r\n n = d2;\r\n }\r\n\r\n d2 = div(md.minus(d0), d1, 0, 1);\r\n n0 = n0.plus(d2.times(n1));\r\n d0 = d0.plus(d2.times(d1));\r\n n0.s = n1.s = x.s;\r\n e = e * 2;\r\n\r\n // Determine which fraction is closer to x, n0/d0 or n1/d1\r\n r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo(\r\n div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0];\r\n\r\n MAX_EXP = exp;\r\n\r\n return r;\r\n };\r\n\r\n\r\n /*\r\n * Return the value of this BigNumber converted to a number primitive.\r\n */\r\n P.toNumber = function () {\r\n return +valueOf(this);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber rounded to sd significant digits\r\n * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits\r\n * necessary to represent the integer part of the value in fixed-point notation, then use\r\n * exponential notation.\r\n *\r\n * [sd] {number} Significant digits. Integer, 1 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}'\r\n */\r\n P.toPrecision = function (sd, rm) {\r\n if (sd != null) intCheck(sd, 1, MAX);\r\n return format(this, sd, rm, 2);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in base b, or base 10 if b is\r\n * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and\r\n * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent\r\n * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than\r\n * TO_EXP_NEG, return exponential notation.\r\n *\r\n * [b] {number} Integer, 2 to ALPHABET.length inclusive.\r\n *\r\n * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}'\r\n */\r\n P.toString = function (b) {\r\n var str,\r\n n = this,\r\n s = n.s,\r\n e = n.e;\r\n\r\n // Infinity or NaN?\r\n if (e === null) {\r\n if (s) {\r\n str = 'Infinity';\r\n if (s < 0) str = '-' + str;\r\n } else {\r\n str = 'NaN';\r\n }\r\n } else {\r\n if (b == null) {\r\n str = e <= TO_EXP_NEG || e >= TO_EXP_POS\r\n ? toExponential(coeffToString(n.c), e)\r\n : toFixedPoint(coeffToString(n.c), e, '0');\r\n } else if (b === 10 && alphabetHasNormalDecimalDigits) {\r\n n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE);\r\n str = toFixedPoint(coeffToString(n.c), n.e, '0');\r\n } else {\r\n intCheck(b, 2, ALPHABET.length, 'Base');\r\n str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true);\r\n }\r\n\r\n if (s < 0 && n.c[0]) str = '-' + str;\r\n }\r\n\r\n return str;\r\n };\r\n\r\n\r\n /*\r\n * Return as toString, but do not accept a base argument, and include the minus sign for\r\n * negative zero.\r\n */\r\n P.valueOf = P.toJSON = function () {\r\n return valueOf(this);\r\n };\r\n\r\n\r\n P._isBigNumber = true;\r\n\r\n if (configObject != null) BigNumber.set(configObject);\r\n\r\n return BigNumber;\r\n }\r\n\r\n\r\n // PRIVATE HELPER FUNCTIONS\r\n\r\n // These functions don't need access to variables,\r\n // e.g. DECIMAL_PLACES, in the scope of the `clone` function above.\r\n\r\n\r\n function bitFloor(n) {\r\n var i = n | 0;\r\n return n > 0 || n === i ? i : i - 1;\r\n }\r\n\r\n\r\n // Return a coefficient array as a string of base 10 digits.\r\n function coeffToString(a) {\r\n var s, z,\r\n i = 1,\r\n j = a.length,\r\n r = a[0] + '';\r\n\r\n for (; i < j;) {\r\n s = a[i++] + '';\r\n z = LOG_BASE - s.length;\r\n for (; z--; s = '0' + s);\r\n r += s;\r\n }\r\n\r\n // Determine trailing zeros.\r\n for (j = r.length; r.charCodeAt(--j) === 48;);\r\n\r\n return r.slice(0, j + 1 || 1);\r\n }\r\n\r\n\r\n // Compare the value of BigNumbers x and y.\r\n function compare(x, y) {\r\n var a, b,\r\n xc = x.c,\r\n yc = y.c,\r\n i = x.s,\r\n j = y.s,\r\n k = x.e,\r\n l = y.e;\r\n\r\n // Either NaN?\r\n if (!i || !j) return null;\r\n\r\n a = xc && !xc[0];\r\n b = yc && !yc[0];\r\n\r\n // Either zero?\r\n if (a || b) return a ? b ? 0 : -j : i;\r\n\r\n // Signs differ?\r\n if (i != j) return i;\r\n\r\n a = i < 0;\r\n b = k == l;\r\n\r\n // Either Infinity?\r\n if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1;\r\n\r\n // Compare exponents.\r\n if (!b) return k > l ^ a ? 1 : -1;\r\n\r\n j = (k = xc.length) < (l = yc.length) ? k : l;\r\n\r\n // Compare digit by digit.\r\n for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1;\r\n\r\n // Compare lengths.\r\n return k == l ? 0 : k > l ^ a ? 1 : -1;\r\n }\r\n\r\n\r\n /*\r\n * Check that n is a primitive number, an integer, and in range, otherwise throw.\r\n */\r\n function intCheck(n, min, max, name) {\r\n if (n < min || n > max || n !== mathfloor(n)) {\r\n throw Error\r\n (bignumberError + (name || 'Argument') + (typeof n == 'number'\r\n ? n < min || n > max ? ' out of range: ' : ' not an integer: '\r\n : ' not a primitive number: ') + String(n));\r\n }\r\n }\r\n\r\n\r\n // Assumes finite n.\r\n function isOdd(n) {\r\n var k = n.c.length - 1;\r\n return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0;\r\n }\r\n\r\n\r\n function toExponential(str, e) {\r\n return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) +\r\n (e < 0 ? 'e' : 'e+') + e;\r\n }\r\n\r\n\r\n function toFixedPoint(str, e, z) {\r\n var len, zs;\r\n\r\n // Negative exponent?\r\n if (e < 0) {\r\n\r\n // Prepend zeros.\r\n for (zs = z + '.'; ++e; zs += z);\r\n str = zs + str;\r\n\r\n // Positive exponent\r\n } else {\r\n len = str.length;\r\n\r\n // Append zeros.\r\n if (++e > len) {\r\n for (zs = z, e -= len; --e; zs += z);\r\n str += zs;\r\n } else if (e < len) {\r\n str = str.slice(0, e) + '.' + str.slice(e);\r\n }\r\n }\r\n\r\n return str;\r\n }\r\n\r\n\r\n // EXPORT\r\n\r\n\r\n BigNumber = clone();\r\n BigNumber['default'] = BigNumber.BigNumber = BigNumber;\r\n\r\n // AMD.\r\n if (typeof define == 'function' && define.amd) {\r\n define(function () { return BigNumber; });\r\n\r\n // Node.js and other environments that support module.exports.\r\n } else if (typeof module != 'undefined' && module.exports) {\r\n module.exports = BigNumber;\r\n\r\n // Browser.\r\n } else {\r\n if (!globalObject) {\r\n globalObject = typeof self != 'undefined' && self ? self : window;\r\n }\r\n\r\n globalObject.BigNumber = BigNumber;\r\n }\r\n})(this);\r\n","/*jshint node:true */\n'use strict';\nvar Buffer = require('buffer').Buffer; // browserify\nvar SlowBuffer = require('buffer').SlowBuffer;\n\nmodule.exports = bufferEq;\n\nfunction bufferEq(a, b) {\n\n // shortcutting on type is necessary for correctness\n if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {\n return false;\n }\n\n // buffer sizes should be well-known information, so despite this\n // shortcutting, it doesn't leak any information about the *contents* of the\n // buffers.\n if (a.length !== b.length) {\n return false;\n }\n\n var c = 0;\n for (var i = 0; i < a.length; i++) {\n /*jshint bitwise:false */\n c |= a[i] ^ b[i]; // XOR\n }\n return c === 0;\n}\n\nbufferEq.install = function() {\n Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) {\n return bufferEq(this, that);\n };\n};\n\nvar origBufEqual = Buffer.prototype.equal;\nvar origSlowBufEqual = SlowBuffer.prototype.equal;\nbufferEq.restore = function() {\n Buffer.prototype.equal = origBufEqual;\n SlowBuffer.prototype.equal = origSlowBufEqual;\n};\n","/*!\n * compressible\n * Copyright(c) 2013 Jonathan Ong\n * Copyright(c) 2014 Jeremiah Senkpiel\n * Copyright(c) 2015 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n'use strict'\n\n/**\n * Module dependencies.\n * @private\n */\n\nvar db = require('mime-db')\n\n/**\n * Module variables.\n * @private\n */\n\nvar COMPRESSIBLE_TYPE_REGEXP = /^text\\/|\\+(?:json|text|xml)$/i\nvar EXTRACT_TYPE_REGEXP = /^\\s*([^;\\s]*)(?:;|\\s|$)/\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = compressible\n\n/**\n * Checks if a type is compressible.\n *\n * @param {string} type\n * @return {Boolean} compressible\n * @public\n */\n\nfunction compressible (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // strip parameters\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n var mime = match && match[1].toLowerCase()\n var data = db[mime]\n\n // return database information\n if (data && data.compressible !== undefined) {\n return data.compressible\n }\n\n // fallback to regexp or unknown\n return COMPRESSIBLE_TYPE_REGEXP.test(mime) || undefined\n}\n","/* eslint-env browser */\n\n/**\n * This is the web browser implementation of `debug()`.\n */\n\nexports.formatArgs = formatArgs;\nexports.save = save;\nexports.load = load;\nexports.useColors = useColors;\nexports.storage = localstorage();\nexports.destroy = (() => {\n\tlet warned = false;\n\n\treturn () => {\n\t\tif (!warned) {\n\t\t\twarned = true;\n\t\t\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\n\t\t}\n\t};\n})();\n\n/**\n * Colors.\n */\n\nexports.colors = [\n\t'#0000CC',\n\t'#0000FF',\n\t'#0033CC',\n\t'#0033FF',\n\t'#0066CC',\n\t'#0066FF',\n\t'#0099CC',\n\t'#0099FF',\n\t'#00CC00',\n\t'#00CC33',\n\t'#00CC66',\n\t'#00CC99',\n\t'#00CCCC',\n\t'#00CCFF',\n\t'#3300CC',\n\t'#3300FF',\n\t'#3333CC',\n\t'#3333FF',\n\t'#3366CC',\n\t'#3366FF',\n\t'#3399CC',\n\t'#3399FF',\n\t'#33CC00',\n\t'#33CC33',\n\t'#33CC66',\n\t'#33CC99',\n\t'#33CCCC',\n\t'#33CCFF',\n\t'#6600CC',\n\t'#6600FF',\n\t'#6633CC',\n\t'#6633FF',\n\t'#66CC00',\n\t'#66CC33',\n\t'#9900CC',\n\t'#9900FF',\n\t'#9933CC',\n\t'#9933FF',\n\t'#99CC00',\n\t'#99CC33',\n\t'#CC0000',\n\t'#CC0033',\n\t'#CC0066',\n\t'#CC0099',\n\t'#CC00CC',\n\t'#CC00FF',\n\t'#CC3300',\n\t'#CC3333',\n\t'#CC3366',\n\t'#CC3399',\n\t'#CC33CC',\n\t'#CC33FF',\n\t'#CC6600',\n\t'#CC6633',\n\t'#CC9900',\n\t'#CC9933',\n\t'#CCCC00',\n\t'#CCCC33',\n\t'#FF0000',\n\t'#FF0033',\n\t'#FF0066',\n\t'#FF0099',\n\t'#FF00CC',\n\t'#FF00FF',\n\t'#FF3300',\n\t'#FF3333',\n\t'#FF3366',\n\t'#FF3399',\n\t'#FF33CC',\n\t'#FF33FF',\n\t'#FF6600',\n\t'#FF6633',\n\t'#FF9900',\n\t'#FF9933',\n\t'#FFCC00',\n\t'#FFCC33'\n];\n\n/**\n * Currently only WebKit-based Web Inspectors, Firefox >= v31,\n * and the Firebug extension (any Firefox version) are known\n * to support \"%c\" CSS customizations.\n *\n * TODO: add a `localStorage` variable to explicitly enable/disable colors\n */\n\n// eslint-disable-next-line complexity\nfunction useColors() {\n\t// NB: In an Electron preload script, document will be defined but not fully\n\t// initialized. Since we know we're in Chrome, we'll just detect this case\n\t// explicitly\n\tif (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {\n\t\treturn true;\n\t}\n\n\t// Internet Explorer and Edge do not support colors.\n\tif (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\\/(\\d+)/)) {\n\t\treturn false;\n\t}\n\n\t// Is webkit? http://stackoverflow.com/a/16459606/376773\n\t// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632\n\treturn (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||\n\t\t// Is firebug? http://stackoverflow.com/a/398120/376773\n\t\t(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||\n\t\t// Is firefox >= v31?\n\t\t// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages\n\t\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\\/(\\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||\n\t\t// Double check webkit in userAgent just in case we are in a worker\n\t\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\\/(\\d+)/));\n}\n\n/**\n * Colorize log arguments if enabled.\n *\n * @api public\n */\n\nfunction formatArgs(args) {\n\targs[0] = (this.useColors ? '%c' : '') +\n\t\tthis.namespace +\n\t\t(this.useColors ? ' %c' : ' ') +\n\t\targs[0] +\n\t\t(this.useColors ? '%c ' : ' ') +\n\t\t'+' + module.exports.humanize(this.diff);\n\n\tif (!this.useColors) {\n\t\treturn;\n\t}\n\n\tconst c = 'color: ' + this.color;\n\targs.splice(1, 0, c, 'color: inherit');\n\n\t// The final \"%c\" is somewhat tricky, because there could be other\n\t// arguments passed either before or after the %c, so we need to\n\t// figure out the correct index to insert the CSS into\n\tlet index = 0;\n\tlet lastC = 0;\n\targs[0].replace(/%[a-zA-Z%]/g, match => {\n\t\tif (match === '%%') {\n\t\t\treturn;\n\t\t}\n\t\tindex++;\n\t\tif (match === '%c') {\n\t\t\t// We only are interested in the *last* %c\n\t\t\t// (the user may have provided their own)\n\t\t\tlastC = index;\n\t\t}\n\t});\n\n\targs.splice(lastC, 0, c);\n}\n\n/**\n * Invokes `console.debug()` when available.\n * No-op when `console.debug` is not a \"function\".\n * If `console.debug` is not available, falls back\n * to `console.log`.\n *\n * @api public\n */\nexports.log = console.debug || console.log || (() => {});\n\n/**\n * Save `namespaces`.\n *\n * @param {String} namespaces\n * @api private\n */\nfunction save(namespaces) {\n\ttry {\n\t\tif (namespaces) {\n\t\t\texports.storage.setItem('debug', namespaces);\n\t\t} else {\n\t\t\texports.storage.removeItem('debug');\n\t\t}\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n}\n\n/**\n * Load `namespaces`.\n *\n * @return {String} returns the previously persisted debug modes\n * @api private\n */\nfunction load() {\n\tlet r;\n\ttry {\n\t\tr = exports.storage.getItem('debug');\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n\n\t// If debug isn't set in LS, and we're in Electron, try to load $DEBUG\n\tif (!r && typeof process !== 'undefined' && 'env' in process) {\n\t\tr = process.env.DEBUG;\n\t}\n\n\treturn r;\n}\n\n/**\n * Localstorage attempts to return the localstorage.\n *\n * This is necessary because safari throws\n * when a user disables cookies/localstorage\n * and you attempt to access it.\n *\n * @return {LocalStorage}\n * @api private\n */\n\nfunction localstorage() {\n\ttry {\n\t\t// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context\n\t\t// The Browser also has localStorage in the global context.\n\t\treturn localStorage;\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n}\n\nmodule.exports = require('./common')(exports);\n\nconst {formatters} = module.exports;\n\n/**\n * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.\n */\n\nformatters.j = function (v) {\n\ttry {\n\t\treturn JSON.stringify(v);\n\t} catch (error) {\n\t\treturn '[UnexpectedJSONParseError]: ' + error.message;\n\t}\n};\n","\n/**\n * This is the common logic for both the Node.js and web browser\n * implementations of `debug()`.\n */\n\nfunction setup(env) {\n\tcreateDebug.debug = createDebug;\n\tcreateDebug.default = createDebug;\n\tcreateDebug.coerce = coerce;\n\tcreateDebug.disable = disable;\n\tcreateDebug.enable = enable;\n\tcreateDebug.enabled = enabled;\n\tcreateDebug.humanize = require('ms');\n\tcreateDebug.destroy = destroy;\n\n\tObject.keys(env).forEach(key => {\n\t\tcreateDebug[key] = env[key];\n\t});\n\n\t/**\n\t* The currently active debug mode names, and names to skip.\n\t*/\n\n\tcreateDebug.names = [];\n\tcreateDebug.skips = [];\n\n\t/**\n\t* Map of special \"%n\" handling functions, for the debug \"format\" argument.\n\t*\n\t* Valid key names are a single, lower or upper-case letter, i.e. \"n\" and \"N\".\n\t*/\n\tcreateDebug.formatters = {};\n\n\t/**\n\t* Selects a color for a debug namespace\n\t* @param {String} namespace The namespace string for the debug instance to be colored\n\t* @return {Number|String} An ANSI color code for the given namespace\n\t* @api private\n\t*/\n\tfunction selectColor(namespace) {\n\t\tlet hash = 0;\n\n\t\tfor (let i = 0; i < namespace.length; i++) {\n\t\t\thash = ((hash << 5) - hash) + namespace.charCodeAt(i);\n\t\t\thash |= 0; // Convert to 32bit integer\n\t\t}\n\n\t\treturn createDebug.colors[Math.abs(hash) % createDebug.colors.length];\n\t}\n\tcreateDebug.selectColor = selectColor;\n\n\t/**\n\t* Create a debugger with the given `namespace`.\n\t*\n\t* @param {String} namespace\n\t* @return {Function}\n\t* @api public\n\t*/\n\tfunction createDebug(namespace) {\n\t\tlet prevTime;\n\t\tlet enableOverride = null;\n\t\tlet namespacesCache;\n\t\tlet enabledCache;\n\n\t\tfunction debug(...args) {\n\t\t\t// Disabled?\n\t\t\tif (!debug.enabled) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tconst self = debug;\n\n\t\t\t// Set `diff` timestamp\n\t\t\tconst curr = Number(new Date());\n\t\t\tconst ms = curr - (prevTime || curr);\n\t\t\tself.diff = ms;\n\t\t\tself.prev = prevTime;\n\t\t\tself.curr = curr;\n\t\t\tprevTime = curr;\n\n\t\t\targs[0] = createDebug.coerce(args[0]);\n\n\t\t\tif (typeof args[0] !== 'string') {\n\t\t\t\t// Anything else let's inspect with %O\n\t\t\t\targs.unshift('%O');\n\t\t\t}\n\n\t\t\t// Apply any `formatters` transformations\n\t\t\tlet index = 0;\n\t\t\targs[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {\n\t\t\t\t// If we encounter an escaped % then don't increase the array index\n\t\t\t\tif (match === '%%') {\n\t\t\t\t\treturn '%';\n\t\t\t\t}\n\t\t\t\tindex++;\n\t\t\t\tconst formatter = createDebug.formatters[format];\n\t\t\t\tif (typeof formatter === 'function') {\n\t\t\t\t\tconst val = args[index];\n\t\t\t\t\tmatch = formatter.call(self, val);\n\n\t\t\t\t\t// Now we need to remove `args[index]` since it's inlined in the `format`\n\t\t\t\t\targs.splice(index, 1);\n\t\t\t\t\tindex--;\n\t\t\t\t}\n\t\t\t\treturn match;\n\t\t\t});\n\n\t\t\t// Apply env-specific formatting (colors, etc.)\n\t\t\tcreateDebug.formatArgs.call(self, args);\n\n\t\t\tconst logFn = self.log || createDebug.log;\n\t\t\tlogFn.apply(self, args);\n\t\t}\n\n\t\tdebug.namespace = namespace;\n\t\tdebug.useColors = createDebug.useColors();\n\t\tdebug.color = createDebug.selectColor(namespace);\n\t\tdebug.extend = extend;\n\t\tdebug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.\n\n\t\tObject.defineProperty(debug, 'enabled', {\n\t\t\tenumerable: true,\n\t\t\tconfigurable: false,\n\t\t\tget: () => {\n\t\t\t\tif (enableOverride !== null) {\n\t\t\t\t\treturn enableOverride;\n\t\t\t\t}\n\t\t\t\tif (namespacesCache !== createDebug.namespaces) {\n\t\t\t\t\tnamespacesCache = createDebug.namespaces;\n\t\t\t\t\tenabledCache = createDebug.enabled(namespace);\n\t\t\t\t}\n\n\t\t\t\treturn enabledCache;\n\t\t\t},\n\t\t\tset: v => {\n\t\t\t\tenableOverride = v;\n\t\t\t}\n\t\t});\n\n\t\t// Env-specific initialization logic for debug instances\n\t\tif (typeof createDebug.init === 'function') {\n\t\t\tcreateDebug.init(debug);\n\t\t}\n\n\t\treturn debug;\n\t}\n\n\tfunction extend(namespace, delimiter) {\n\t\tconst newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);\n\t\tnewDebug.log = this.log;\n\t\treturn newDebug;\n\t}\n\n\t/**\n\t* Enables a debug mode by namespaces. This can include modes\n\t* separated by a colon and wildcards.\n\t*\n\t* @param {String} namespaces\n\t* @api public\n\t*/\n\tfunction enable(namespaces) {\n\t\tcreateDebug.save(namespaces);\n\t\tcreateDebug.namespaces = namespaces;\n\n\t\tcreateDebug.names = [];\n\t\tcreateDebug.skips = [];\n\n\t\tlet i;\n\t\tconst split = (typeof namespaces === 'string' ? namespaces : '').split(/[\\s,]+/);\n\t\tconst len = split.length;\n\n\t\tfor (i = 0; i < len; i++) {\n\t\t\tif (!split[i]) {\n\t\t\t\t// ignore empty strings\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tnamespaces = split[i].replace(/\\*/g, '.*?');\n\n\t\t\tif (namespaces[0] === '-') {\n\t\t\t\tcreateDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$'));\n\t\t\t} else {\n\t\t\t\tcreateDebug.names.push(new RegExp('^' + namespaces + '$'));\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t* Disable debug output.\n\t*\n\t* @return {String} namespaces\n\t* @api public\n\t*/\n\tfunction disable() {\n\t\tconst namespaces = [\n\t\t\t...createDebug.names.map(toNamespace),\n\t\t\t...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)\n\t\t].join(',');\n\t\tcreateDebug.enable('');\n\t\treturn namespaces;\n\t}\n\n\t/**\n\t* Returns true if the given mode name is enabled, false otherwise.\n\t*\n\t* @param {String} name\n\t* @return {Boolean}\n\t* @api public\n\t*/\n\tfunction enabled(name) {\n\t\tif (name[name.length - 1] === '*') {\n\t\t\treturn true;\n\t\t}\n\n\t\tlet i;\n\t\tlet len;\n\n\t\tfor (i = 0, len = createDebug.skips.length; i < len; i++) {\n\t\t\tif (createDebug.skips[i].test(name)) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\n\t\tfor (i = 0, len = createDebug.names.length; i < len; i++) {\n\t\t\tif (createDebug.names[i].test(name)) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\n\t\treturn false;\n\t}\n\n\t/**\n\t* Convert regexp to namespace\n\t*\n\t* @param {RegExp} regxep\n\t* @return {String} namespace\n\t* @api private\n\t*/\n\tfunction toNamespace(regexp) {\n\t\treturn regexp.toString()\n\t\t\t.substring(2, regexp.toString().length - 2)\n\t\t\t.replace(/\\.\\*\\?$/, '*');\n\t}\n\n\t/**\n\t* Coerce `val`.\n\t*\n\t* @param {Mixed} val\n\t* @return {Mixed}\n\t* @api private\n\t*/\n\tfunction coerce(val) {\n\t\tif (val instanceof Error) {\n\t\t\treturn val.stack || val.message;\n\t\t}\n\t\treturn val;\n\t}\n\n\t/**\n\t* XXX DO NOT USE. This is a temporary stub function.\n\t* XXX It WILL be removed in the next major release.\n\t*/\n\tfunction destroy() {\n\t\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\n\t}\n\n\tcreateDebug.enable(createDebug.load());\n\n\treturn createDebug;\n}\n\nmodule.exports = setup;\n","/**\n * Detect Electron renderer / nwjs process, which is node, but we should\n * treat as a browser.\n */\n\nif (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {\n\tmodule.exports = require('./browser.js');\n} else {\n\tmodule.exports = require('./node.js');\n}\n","/**\n * Module dependencies.\n */\n\nconst tty = require('tty');\nconst util = require('util');\n\n/**\n * This is the Node.js implementation of `debug()`.\n */\n\nexports.init = init;\nexports.log = log;\nexports.formatArgs = formatArgs;\nexports.save = save;\nexports.load = load;\nexports.useColors = useColors;\nexports.destroy = util.deprecate(\n\t() => {},\n\t'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'\n);\n\n/**\n * Colors.\n */\n\nexports.colors = [6, 2, 3, 4, 5, 1];\n\ntry {\n\t// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)\n\t// eslint-disable-next-line import/no-extraneous-dependencies\n\tconst supportsColor = require('supports-color');\n\n\tif (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {\n\t\texports.colors = [\n\t\t\t20,\n\t\t\t21,\n\t\t\t26,\n\t\t\t27,\n\t\t\t32,\n\t\t\t33,\n\t\t\t38,\n\t\t\t39,\n\t\t\t40,\n\t\t\t41,\n\t\t\t42,\n\t\t\t43,\n\t\t\t44,\n\t\t\t45,\n\t\t\t56,\n\t\t\t57,\n\t\t\t62,\n\t\t\t63,\n\t\t\t68,\n\t\t\t69,\n\t\t\t74,\n\t\t\t75,\n\t\t\t76,\n\t\t\t77,\n\t\t\t78,\n\t\t\t79,\n\t\t\t80,\n\t\t\t81,\n\t\t\t92,\n\t\t\t93,\n\t\t\t98,\n\t\t\t99,\n\t\t\t112,\n\t\t\t113,\n\t\t\t128,\n\t\t\t129,\n\t\t\t134,\n\t\t\t135,\n\t\t\t148,\n\t\t\t149,\n\t\t\t160,\n\t\t\t161,\n\t\t\t162,\n\t\t\t163,\n\t\t\t164,\n\t\t\t165,\n\t\t\t166,\n\t\t\t167,\n\t\t\t168,\n\t\t\t169,\n\t\t\t170,\n\t\t\t171,\n\t\t\t172,\n\t\t\t173,\n\t\t\t178,\n\t\t\t179,\n\t\t\t184,\n\t\t\t185,\n\t\t\t196,\n\t\t\t197,\n\t\t\t198,\n\t\t\t199,\n\t\t\t200,\n\t\t\t201,\n\t\t\t202,\n\t\t\t203,\n\t\t\t204,\n\t\t\t205,\n\t\t\t206,\n\t\t\t207,\n\t\t\t208,\n\t\t\t209,\n\t\t\t214,\n\t\t\t215,\n\t\t\t220,\n\t\t\t221\n\t\t];\n\t}\n} catch (error) {\n\t// Swallow - we only care if `supports-color` is available; it doesn't have to be.\n}\n\n/**\n * Build up the default `inspectOpts` object from the environment variables.\n *\n * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js\n */\n\nexports.inspectOpts = Object.keys(process.env).filter(key => {\n\treturn /^debug_/i.test(key);\n}).reduce((obj, key) => {\n\t// Camel-case\n\tconst prop = key\n\t\t.substring(6)\n\t\t.toLowerCase()\n\t\t.replace(/_([a-z])/g, (_, k) => {\n\t\t\treturn k.toUpperCase();\n\t\t});\n\n\t// Coerce string value into JS value\n\tlet val = process.env[key];\n\tif (/^(yes|on|true|enabled)$/i.test(val)) {\n\t\tval = true;\n\t} else if (/^(no|off|false|disabled)$/i.test(val)) {\n\t\tval = false;\n\t} else if (val === 'null') {\n\t\tval = null;\n\t} else {\n\t\tval = Number(val);\n\t}\n\n\tobj[prop] = val;\n\treturn obj;\n}, {});\n\n/**\n * Is stdout a TTY? Colored output is enabled when `true`.\n */\n\nfunction useColors() {\n\treturn 'colors' in exports.inspectOpts ?\n\t\tBoolean(exports.inspectOpts.colors) :\n\t\ttty.isatty(process.stderr.fd);\n}\n\n/**\n * Adds ANSI color escape codes if enabled.\n *\n * @api public\n */\n\nfunction formatArgs(args) {\n\tconst {namespace: name, useColors} = this;\n\n\tif (useColors) {\n\t\tconst c = this.color;\n\t\tconst colorCode = '\\u001B[3' + (c < 8 ? c : '8;5;' + c);\n\t\tconst prefix = ` ${colorCode};1m${name} \\u001B[0m`;\n\n\t\targs[0] = prefix + args[0].split('\\n').join('\\n' + prefix);\n\t\targs.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\\u001B[0m');\n\t} else {\n\t\targs[0] = getDate() + name + ' ' + args[0];\n\t}\n}\n\nfunction getDate() {\n\tif (exports.inspectOpts.hideDate) {\n\t\treturn '';\n\t}\n\treturn new Date().toISOString() + ' ';\n}\n\n/**\n * Invokes `util.format()` with the specified arguments and writes to stderr.\n */\n\nfunction log(...args) {\n\treturn process.stderr.write(util.format(...args) + '\\n');\n}\n\n/**\n * Save `namespaces`.\n *\n * @param {String} namespaces\n * @api private\n */\nfunction save(namespaces) {\n\tif (namespaces) {\n\t\tprocess.env.DEBUG = namespaces;\n\t} else {\n\t\t// If you set a process.env field to null or undefined, it gets cast to the\n\t\t// string 'null' or 'undefined'. Just delete instead.\n\t\tdelete process.env.DEBUG;\n\t}\n}\n\n/**\n * Load `namespaces`.\n *\n * @return {String} returns the previously persisted debug modes\n * @api private\n */\n\nfunction load() {\n\treturn process.env.DEBUG;\n}\n\n/**\n * Init logic for `debug` instances.\n *\n * Create a new `inspectOpts` object in case `useColors` is set\n * differently for a particular `debug` instance.\n */\n\nfunction init(debug) {\n\tdebug.inspectOpts = {};\n\n\tconst keys = Object.keys(exports.inspectOpts);\n\tfor (let i = 0; i < keys.length; i++) {\n\t\tdebug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];\n\t}\n}\n\nmodule.exports = require('./common')(exports);\n\nconst {formatters} = module.exports;\n\n/**\n * Map %o to `util.inspect()`, all on a single line.\n */\n\nformatters.o = function (v) {\n\tthis.inspectOpts.colors = this.useColors;\n\treturn util.inspect(v, this.inspectOpts)\n\t\t.split('\\n')\n\t\t.map(str => str.trim())\n\t\t.join(' ');\n};\n\n/**\n * Map %O to `util.inspect()`, allowing multiple lines if needed.\n */\n\nformatters.O = function (v) {\n\tthis.inspectOpts.colors = this.useColors;\n\treturn util.inspect(v, this.inspectOpts);\n};\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","var stream = require('readable-stream')\nvar eos = require('end-of-stream')\nvar inherits = require('inherits')\nvar shift = require('stream-shift')\n\nvar SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from)\n ? Buffer.from([0])\n : new Buffer([0])\n\nvar onuncork = function(self, fn) {\n if (self._corked) self.once('uncork', fn)\n else fn()\n}\n\nvar autoDestroy = function (self, err) {\n if (self._autoDestroy) self.destroy(err)\n}\n\nvar destroyer = function(self, end) {\n return function(err) {\n if (err) autoDestroy(self, err.message === 'premature close' ? null : err)\n else if (end && !self._ended) self.end()\n }\n}\n\nvar end = function(ws, fn) {\n if (!ws) return fn()\n if (ws._writableState && ws._writableState.finished) return fn()\n if (ws._writableState) return ws.end(fn)\n ws.end()\n fn()\n}\n\nvar noop = function() {}\n\nvar toStreams2 = function(rs) {\n return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs)\n}\n\nvar Duplexify = function(writable, readable, opts) {\n if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts)\n stream.Duplex.call(this, opts)\n\n this._writable = null\n this._readable = null\n this._readable2 = null\n\n this._autoDestroy = !opts || opts.autoDestroy !== false\n this._forwardDestroy = !opts || opts.destroy !== false\n this._forwardEnd = !opts || opts.end !== false\n this._corked = 1 // start corked\n this._ondrain = null\n this._drained = false\n this._forwarding = false\n this._unwrite = null\n this._unread = null\n this._ended = false\n\n this.destroyed = false\n\n if (writable) this.setWritable(writable)\n if (readable) this.setReadable(readable)\n}\n\ninherits(Duplexify, stream.Duplex)\n\nDuplexify.obj = function(writable, readable, opts) {\n if (!opts) opts = {}\n opts.objectMode = true\n opts.highWaterMark = 16\n return new Duplexify(writable, readable, opts)\n}\n\nDuplexify.prototype.cork = function() {\n if (++this._corked === 1) this.emit('cork')\n}\n\nDuplexify.prototype.uncork = function() {\n if (this._corked && --this._corked === 0) this.emit('uncork')\n}\n\nDuplexify.prototype.setWritable = function(writable) {\n if (this._unwrite) this._unwrite()\n\n if (this.destroyed) {\n if (writable && writable.destroy) writable.destroy()\n return\n }\n\n if (writable === null || writable === false) {\n this.end()\n return\n }\n\n var self = this\n var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd))\n\n var ondrain = function() {\n var ondrain = self._ondrain\n self._ondrain = null\n if (ondrain) ondrain()\n }\n\n var clear = function() {\n self._writable.removeListener('drain', ondrain)\n unend()\n }\n\n if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks\n\n this._writable = writable\n this._writable.on('drain', ondrain)\n this._unwrite = clear\n\n this.uncork() // always uncork setWritable\n}\n\nDuplexify.prototype.setReadable = function(readable) {\n if (this._unread) this._unread()\n\n if (this.destroyed) {\n if (readable && readable.destroy) readable.destroy()\n return\n }\n\n if (readable === null || readable === false) {\n this.push(null)\n this.resume()\n return\n }\n\n var self = this\n var unend = eos(readable, {writable:false, readable:true}, destroyer(this))\n\n var onreadable = function() {\n self._forward()\n }\n\n var onend = function() {\n self.push(null)\n }\n\n var clear = function() {\n self._readable2.removeListener('readable', onreadable)\n self._readable2.removeListener('end', onend)\n unend()\n }\n\n this._drained = true\n this._readable = readable\n this._readable2 = readable._readableState ? readable : toStreams2(readable)\n this._readable2.on('readable', onreadable)\n this._readable2.on('end', onend)\n this._unread = clear\n\n this._forward()\n}\n\nDuplexify.prototype._read = function() {\n this._drained = true\n this._forward()\n}\n\nDuplexify.prototype._forward = function() {\n if (this._forwarding || !this._readable2 || !this._drained) return\n this._forwarding = true\n\n var data\n\n while (this._drained && (data = shift(this._readable2)) !== null) {\n if (this.destroyed) continue\n this._drained = this.push(data)\n }\n\n this._forwarding = false\n}\n\nDuplexify.prototype.destroy = function(err, cb) {\n if (!cb) cb = noop\n if (this.destroyed) return cb(null)\n this.destroyed = true\n\n var self = this\n process.nextTick(function() {\n self._destroy(err)\n cb(null)\n })\n}\n\nDuplexify.prototype._destroy = function(err) {\n if (err) {\n var ondrain = this._ondrain\n this._ondrain = null\n if (ondrain) ondrain(err)\n else this.emit('error', err)\n }\n\n if (this._forwardDestroy) {\n if (this._readable && this._readable.destroy) this._readable.destroy()\n if (this._writable && this._writable.destroy) this._writable.destroy()\n }\n\n this.emit('close')\n}\n\nDuplexify.prototype._write = function(data, enc, cb) {\n if (this.destroyed) return\n if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb))\n if (data === SIGNAL_FLUSH) return this._finish(cb)\n if (!this._writable) return cb()\n\n if (this._writable.write(data) === false) this._ondrain = cb\n else if (!this.destroyed) cb()\n}\n\nDuplexify.prototype._finish = function(cb) {\n var self = this\n this.emit('preend')\n onuncork(this, function() {\n end(self._forwardEnd && self._writable, function() {\n // haxx to not emit prefinish twice\n if (self._writableState.prefinished === false) self._writableState.prefinished = true\n self.emit('prefinish')\n onuncork(self, cb)\n })\n })\n}\n\nDuplexify.prototype.end = function(data, enc, cb) {\n if (typeof data === 'function') return this.end(null, null, data)\n if (typeof enc === 'function') return this.end(data, null, enc)\n this._ended = true\n if (data) this.write(data)\n if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH)\n return stream.Writable.prototype.end.call(this, cb)\n}\n\nmodule.exports = Duplexify\n","'use strict';\n\nvar Buffer = require('safe-buffer').Buffer;\n\nvar getParamBytesForAlg = require('./param-bytes-for-alg');\n\nvar MAX_OCTET = 0x80,\n\tCLASS_UNIVERSAL = 0,\n\tPRIMITIVE_BIT = 0x20,\n\tTAG_SEQ = 0x10,\n\tTAG_INT = 0x02,\n\tENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6),\n\tENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6);\n\nfunction base64Url(base64) {\n\treturn base64\n\t\t.replace(/=/g, '')\n\t\t.replace(/\\+/g, '-')\n\t\t.replace(/\\//g, '_');\n}\n\nfunction signatureAsBuffer(signature) {\n\tif (Buffer.isBuffer(signature)) {\n\t\treturn signature;\n\t} else if ('string' === typeof signature) {\n\t\treturn Buffer.from(signature, 'base64');\n\t}\n\n\tthrow new TypeError('ECDSA signature must be a Base64 string or a Buffer');\n}\n\nfunction derToJose(signature, alg) {\n\tsignature = signatureAsBuffer(signature);\n\tvar paramBytes = getParamBytesForAlg(alg);\n\n\t// the DER encoded param should at most be the param size, plus a padding\n\t// zero, since due to being a signed integer\n\tvar maxEncodedParamLength = paramBytes + 1;\n\n\tvar inputLength = signature.length;\n\n\tvar offset = 0;\n\tif (signature[offset++] !== ENCODED_TAG_SEQ) {\n\t\tthrow new Error('Could not find expected \"seq\"');\n\t}\n\n\tvar seqLength = signature[offset++];\n\tif (seqLength === (MAX_OCTET | 1)) {\n\t\tseqLength = signature[offset++];\n\t}\n\n\tif (inputLength - offset < seqLength) {\n\t\tthrow new Error('\"seq\" specified length of \"' + seqLength + '\", only \"' + (inputLength - offset) + '\" remaining');\n\t}\n\n\tif (signature[offset++] !== ENCODED_TAG_INT) {\n\t\tthrow new Error('Could not find expected \"int\" for \"r\"');\n\t}\n\n\tvar rLength = signature[offset++];\n\n\tif (inputLength - offset - 2 < rLength) {\n\t\tthrow new Error('\"r\" specified length of \"' + rLength + '\", only \"' + (inputLength - offset - 2) + '\" available');\n\t}\n\n\tif (maxEncodedParamLength < rLength) {\n\t\tthrow new Error('\"r\" specified length of \"' + rLength + '\", max of \"' + maxEncodedParamLength + '\" is acceptable');\n\t}\n\n\tvar rOffset = offset;\n\toffset += rLength;\n\n\tif (signature[offset++] !== ENCODED_TAG_INT) {\n\t\tthrow new Error('Could not find expected \"int\" for \"s\"');\n\t}\n\n\tvar sLength = signature[offset++];\n\n\tif (inputLength - offset !== sLength) {\n\t\tthrow new Error('\"s\" specified length of \"' + sLength + '\", expected \"' + (inputLength - offset) + '\"');\n\t}\n\n\tif (maxEncodedParamLength < sLength) {\n\t\tthrow new Error('\"s\" specified length of \"' + sLength + '\", max of \"' + maxEncodedParamLength + '\" is acceptable');\n\t}\n\n\tvar sOffset = offset;\n\toffset += sLength;\n\n\tif (offset !== inputLength) {\n\t\tthrow new Error('Expected to consume entire buffer, but \"' + (inputLength - offset) + '\" bytes remain');\n\t}\n\n\tvar rPadding = paramBytes - rLength,\n\t\tsPadding = paramBytes - sLength;\n\n\tvar dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength);\n\n\tfor (offset = 0; offset < rPadding; ++offset) {\n\t\tdst[offset] = 0;\n\t}\n\tsignature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength);\n\n\toffset = paramBytes;\n\n\tfor (var o = offset; offset < o + sPadding; ++offset) {\n\t\tdst[offset] = 0;\n\t}\n\tsignature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength);\n\n\tdst = dst.toString('base64');\n\tdst = base64Url(dst);\n\n\treturn dst;\n}\n\nfunction countPadding(buf, start, stop) {\n\tvar padding = 0;\n\twhile (start + padding < stop && buf[start + padding] === 0) {\n\t\t++padding;\n\t}\n\n\tvar needsSign = buf[start + padding] >= MAX_OCTET;\n\tif (needsSign) {\n\t\t--padding;\n\t}\n\n\treturn padding;\n}\n\nfunction joseToDer(signature, alg) {\n\tsignature = signatureAsBuffer(signature);\n\tvar paramBytes = getParamBytesForAlg(alg);\n\n\tvar signatureBytes = signature.length;\n\tif (signatureBytes !== paramBytes * 2) {\n\t\tthrow new TypeError('\"' + alg + '\" signatures must be \"' + paramBytes * 2 + '\" bytes, saw \"' + signatureBytes + '\"');\n\t}\n\n\tvar rPadding = countPadding(signature, 0, paramBytes);\n\tvar sPadding = countPadding(signature, paramBytes, signature.length);\n\tvar rLength = paramBytes - rPadding;\n\tvar sLength = paramBytes - sPadding;\n\n\tvar rsBytes = 1 + 1 + rLength + 1 + 1 + sLength;\n\n\tvar shortLength = rsBytes < MAX_OCTET;\n\n\tvar dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes);\n\n\tvar offset = 0;\n\tdst[offset++] = ENCODED_TAG_SEQ;\n\tif (shortLength) {\n\t\t// Bit 8 has value \"0\"\n\t\t// bits 7-1 give the length.\n\t\tdst[offset++] = rsBytes;\n\t} else {\n\t\t// Bit 8 of first octet has value \"1\"\n\t\t// bits 7-1 give the number of additional length octets.\n\t\tdst[offset++] = MAX_OCTET\t| 1;\n\t\t// length, base 256\n\t\tdst[offset++] = rsBytes & 0xff;\n\t}\n\tdst[offset++] = ENCODED_TAG_INT;\n\tdst[offset++] = rLength;\n\tif (rPadding < 0) {\n\t\tdst[offset++] = 0;\n\t\toffset += signature.copy(dst, offset, 0, paramBytes);\n\t} else {\n\t\toffset += signature.copy(dst, offset, rPadding, paramBytes);\n\t}\n\tdst[offset++] = ENCODED_TAG_INT;\n\tdst[offset++] = sLength;\n\tif (sPadding < 0) {\n\t\tdst[offset++] = 0;\n\t\tsignature.copy(dst, offset, paramBytes);\n\t} else {\n\t\tsignature.copy(dst, offset, paramBytes + sPadding);\n\t}\n\n\treturn dst;\n}\n\nmodule.exports = {\n\tderToJose: derToJose,\n\tjoseToDer: joseToDer\n};\n","'use strict';\n\nfunction getParamSize(keySize) {\n\tvar result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1);\n\treturn result;\n}\n\nvar paramBytesForAlg = {\n\tES256: getParamSize(256),\n\tES384: getParamSize(384),\n\tES512: getParamSize(521)\n};\n\nfunction getParamBytesForAlg(alg) {\n\tvar paramBytes = paramBytesForAlg[alg];\n\tif (paramBytes) {\n\t\treturn paramBytes;\n\t}\n\n\tthrow new Error('Unknown algorithm \"' + alg + '\"');\n}\n\nmodule.exports = getParamBytesForAlg;\n","var once = require('once');\n\nvar noop = function() {};\n\nvar isRequest = function(stream) {\n\treturn stream.setHeader && typeof stream.abort === 'function';\n};\n\nvar isChildProcess = function(stream) {\n\treturn stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3\n};\n\nvar eos = function(stream, opts, callback) {\n\tif (typeof opts === 'function') return eos(stream, null, opts);\n\tif (!opts) opts = {};\n\n\tcallback = once(callback || noop);\n\n\tvar ws = stream._writableState;\n\tvar rs = stream._readableState;\n\tvar readable = opts.readable || (opts.readable !== false && stream.readable);\n\tvar writable = opts.writable || (opts.writable !== false && stream.writable);\n\tvar cancelled = false;\n\n\tvar onlegacyfinish = function() {\n\t\tif (!stream.writable) onfinish();\n\t};\n\n\tvar onfinish = function() {\n\t\twritable = false;\n\t\tif (!readable) callback.call(stream);\n\t};\n\n\tvar onend = function() {\n\t\treadable = false;\n\t\tif (!writable) callback.call(stream);\n\t};\n\n\tvar onexit = function(exitCode) {\n\t\tcallback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);\n\t};\n\n\tvar onerror = function(err) {\n\t\tcallback.call(stream, err);\n\t};\n\n\tvar onclose = function() {\n\t\tprocess.nextTick(onclosenexttick);\n\t};\n\n\tvar onclosenexttick = function() {\n\t\tif (cancelled) return;\n\t\tif (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));\n\t\tif (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));\n\t};\n\n\tvar onrequest = function() {\n\t\tstream.req.on('finish', onfinish);\n\t};\n\n\tif (isRequest(stream)) {\n\t\tstream.on('complete', onfinish);\n\t\tstream.on('abort', onclose);\n\t\tif (stream.req) onrequest();\n\t\telse stream.on('request', onrequest);\n\t} else if (writable && !ws) { // legacy streams\n\t\tstream.on('end', onlegacyfinish);\n\t\tstream.on('close', onlegacyfinish);\n\t}\n\n\tif (isChildProcess(stream)) stream.on('exit', onexit);\n\n\tstream.on('end', onend);\n\tstream.on('finish', onfinish);\n\tif (opts.error !== false) stream.on('error', onerror);\n\tstream.on('close', onclose);\n\n\treturn function() {\n\t\tcancelled = true;\n\t\tstream.removeListener('complete', onfinish);\n\t\tstream.removeListener('abort', onclose);\n\t\tstream.removeListener('request', onrequest);\n\t\tif (stream.req) stream.req.removeListener('finish', onfinish);\n\t\tstream.removeListener('end', onlegacyfinish);\n\t\tstream.removeListener('close', onlegacyfinish);\n\t\tstream.removeListener('finish', onfinish);\n\t\tstream.removeListener('exit', onexit);\n\t\tstream.removeListener('end', onend);\n\t\tstream.removeListener('error', onerror);\n\t\tstream.removeListener('close', onclose);\n\t};\n};\n\nmodule.exports = eos;\n","var punycode = require('punycode');\nvar entities = require('./entities.json');\n\nmodule.exports = decode;\n\nfunction decode (str) {\n if (typeof str !== 'string') {\n throw new TypeError('Expected a String');\n }\n\n return str.replace(/&(#?[^;\\W]+;?)/g, function (_, match) {\n var m;\n if (m = /^#(\\d+);?$/.exec(match)) {\n return punycode.ucs2.encode([ parseInt(m[1], 10) ]);\n } else if (m = /^#[Xx]([A-Fa-f0-9]+);?/.exec(match)) {\n return punycode.ucs2.encode([ parseInt(m[1], 16) ]);\n } else {\n // named entity\n var hasSemi = /;$/.test(match);\n var withoutSemi = hasSemi ? match.replace(/;$/, '') : match;\n var target = entities[withoutSemi] || (hasSemi && entities[match]);\n\n if (typeof target === 'number') {\n return punycode.ucs2.encode([ target ]);\n } else if (typeof target === 'string') {\n return target;\n } else {\n return '&' + match;\n }\n }\n });\n}\n","var punycode = require('punycode');\nvar revEntities = require('./reversed.json');\n\nmodule.exports = encode;\n\nfunction encode (str, opts) {\n if (typeof str !== 'string') {\n throw new TypeError('Expected a String');\n }\n if (!opts) opts = {};\n\n var numeric = true;\n if (opts.named) numeric = false;\n if (opts.numeric !== undefined) numeric = opts.numeric;\n\n var special = opts.special || {\n '\"': true, \"'\": true,\n '<': true, '>': true,\n '&': true\n };\n\n var codePoints = punycode.ucs2.decode(str);\n var chars = [];\n for (var i = 0; i < codePoints.length; i++) {\n var cc = codePoints[i];\n var c = punycode.ucs2.encode([ cc ]);\n var e = revEntities[cc];\n if (e && (cc >= 127 || special[c]) && !numeric) {\n chars.push('&' + (/;$/.test(e) ? e : e + ';'));\n }\n else if (cc < 32 || cc >= 127 || special[c]) {\n chars.push('&#' + cc + ';');\n }\n else {\n chars.push(c);\n }\n }\n return chars.join('');\n}\n","exports.encode = require('./encode');\nexports.decode = require('./decode');\n","/**\n * @author Toru Nagashima \n * @copyright 2015 Toru Nagashima. All rights reserved.\n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap();\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap();\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event);\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n );\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n );\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true;\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault();\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n });\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true });\n\n // Define accessors\n const keys = Object.keys(event);\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key));\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget;\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation();\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this);\n\n data.stopped = true;\n data.immediateStopped = true;\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation();\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this));\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true;\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this));\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n});\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype);\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event);\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value;\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event;\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto);\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event);\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n });\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key);\n const isFunc = typeof descriptor.value === \"function\";\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n );\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto);\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto);\n wrappers.set(proto, wrapper);\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nfunction wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event));\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nfunction isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nfunction setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase;\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nfunction setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget;\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nfunction setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener;\n}\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap();\n\n// Listener types\nconst CAPTURE = 1;\nconst BUBBLE = 2;\nconst ATTRIBUTE = 3;\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget);\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this);\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next;\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null; // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this);\n\n // Traverse to the tail while removing old value.\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n node = node.next;\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n };\n if (prev === null) {\n listeners.set(eventName, newNode);\n } else {\n prev.next = newNode;\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n );\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this);\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n });\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i]);\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map());\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length);\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i];\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this);\n const optionsIsObj = isObject(options);\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n };\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName);\n if (node === undefined) {\n listeners.set(eventName, newNode);\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null;\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node;\n node = node.next;\n }\n\n // Add it.\n prev.next = newNode;\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this);\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n return\n }\n\n prev = node;\n node = node.next;\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this);\n const eventName = event.type;\n let node = listeners.get(eventName);\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event);\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null;\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n );\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent);\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err);\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent);\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next;\n }\n setPassiveListener(wrappedEvent, null);\n setEventPhase(wrappedEvent, 0);\n setCurrentTarget(wrappedEvent, null);\n\n return !wrappedEvent.defaultPrevented\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n});\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype);\n}\n\nexports.defineEventAttribute = defineEventAttribute;\nexports.EventTarget = EventTarget;\nexports.default = EventTarget;\n\nmodule.exports = EventTarget\nmodule.exports.EventTarget = module.exports[\"default\"] = EventTarget\nmodule.exports.defineEventAttribute = defineEventAttribute\n//# sourceMappingURL=event-target-shim.js.map\n","'use strict';\n\nvar hasOwn = Object.prototype.hasOwnProperty;\nvar toStr = Object.prototype.toString;\nvar defineProperty = Object.defineProperty;\nvar gOPD = Object.getOwnPropertyDescriptor;\n\nvar isArray = function isArray(arr) {\n\tif (typeof Array.isArray === 'function') {\n\t\treturn Array.isArray(arr);\n\t}\n\n\treturn toStr.call(arr) === '[object Array]';\n};\n\nvar isPlainObject = function isPlainObject(obj) {\n\tif (!obj || toStr.call(obj) !== '[object Object]') {\n\t\treturn false;\n\t}\n\n\tvar hasOwnConstructor = hasOwn.call(obj, 'constructor');\n\tvar hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf');\n\t// Not own constructor property must be Object\n\tif (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) {\n\t\treturn false;\n\t}\n\n\t// Own properties are enumerated firstly, so to speed up,\n\t// if last one is own, then all properties are own.\n\tvar key;\n\tfor (key in obj) { /**/ }\n\n\treturn typeof key === 'undefined' || hasOwn.call(obj, key);\n};\n\n// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target\nvar setProperty = function setProperty(target, options) {\n\tif (defineProperty && options.name === '__proto__') {\n\t\tdefineProperty(target, options.name, {\n\t\t\tenumerable: true,\n\t\t\tconfigurable: true,\n\t\t\tvalue: options.newValue,\n\t\t\twritable: true\n\t\t});\n\t} else {\n\t\ttarget[options.name] = options.newValue;\n\t}\n};\n\n// Return undefined instead of __proto__ if '__proto__' is not an own property\nvar getProperty = function getProperty(obj, name) {\n\tif (name === '__proto__') {\n\t\tif (!hasOwn.call(obj, name)) {\n\t\t\treturn void 0;\n\t\t} else if (gOPD) {\n\t\t\t// In early versions of node, obj['__proto__'] is buggy when obj has\n\t\t\t// __proto__ as an own property. Object.getOwnPropertyDescriptor() works.\n\t\t\treturn gOPD(obj, name).value;\n\t\t}\n\t}\n\n\treturn obj[name];\n};\n\nmodule.exports = function extend() {\n\tvar options, name, src, copy, copyIsArray, clone;\n\tvar target = arguments[0];\n\tvar i = 1;\n\tvar length = arguments.length;\n\tvar deep = false;\n\n\t// Handle a deep copy situation\n\tif (typeof target === 'boolean') {\n\t\tdeep = target;\n\t\ttarget = arguments[1] || {};\n\t\t// skip the boolean and the target\n\t\ti = 2;\n\t}\n\tif (target == null || (typeof target !== 'object' && typeof target !== 'function')) {\n\t\ttarget = {};\n\t}\n\n\tfor (; i < length; ++i) {\n\t\toptions = arguments[i];\n\t\t// Only deal with non-null/undefined values\n\t\tif (options != null) {\n\t\t\t// Extend the base object\n\t\t\tfor (name in options) {\n\t\t\t\tsrc = getProperty(target, name);\n\t\t\t\tcopy = getProperty(options, name);\n\n\t\t\t\t// Prevent never-ending loop\n\t\t\t\tif (target !== copy) {\n\t\t\t\t\t// Recurse if we're merging plain objects or arrays\n\t\t\t\t\tif (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) {\n\t\t\t\t\t\tif (copyIsArray) {\n\t\t\t\t\t\t\tcopyIsArray = false;\n\t\t\t\t\t\t\tclone = src && isArray(src) ? src : [];\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tclone = src && isPlainObject(src) ? src : {};\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// Never move original objects, clone them\n\t\t\t\t\t\tsetProperty(target, { name: name, newValue: extend(deep, clone, copy) });\n\n\t\t\t\t\t// Don't bring in undefined values\n\t\t\t\t\t} else if (typeof copy !== 'undefined') {\n\t\t\t\t\t\tsetProperty(target, { name: name, newValue: copy });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Return the modified object\n\treturn target;\n};\n","'use strict';\n\nconst validator = require('./validator');\nconst XMLParser = require('./xmlparser/XMLParser');\nconst XMLBuilder = require('./xmlbuilder/json2xml');\n\nmodule.exports = {\n XMLParser: XMLParser,\n XMLValidator: validator,\n XMLBuilder: XMLBuilder\n}","'use strict';\n\nconst nameStartChar = ':A-Za-z_\\\\u00C0-\\\\u00D6\\\\u00D8-\\\\u00F6\\\\u00F8-\\\\u02FF\\\\u0370-\\\\u037D\\\\u037F-\\\\u1FFF\\\\u200C-\\\\u200D\\\\u2070-\\\\u218F\\\\u2C00-\\\\u2FEF\\\\u3001-\\\\uD7FF\\\\uF900-\\\\uFDCF\\\\uFDF0-\\\\uFFFD';\nconst nameChar = nameStartChar + '\\\\-.\\\\d\\\\u00B7\\\\u0300-\\\\u036F\\\\u203F-\\\\u2040';\nconst nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*'\nconst regexName = new RegExp('^' + nameRegexp + '$');\n\nconst getAllMatches = function(string, regex) {\n const matches = [];\n let match = regex.exec(string);\n while (match) {\n const allmatches = [];\n allmatches.startIndex = regex.lastIndex - match[0].length;\n const len = match.length;\n for (let index = 0; index < len; index++) {\n allmatches.push(match[index]);\n }\n matches.push(allmatches);\n match = regex.exec(string);\n }\n return matches;\n};\n\nconst isName = function(string) {\n const match = regexName.exec(string);\n return !(match === null || typeof match === 'undefined');\n};\n\nexports.isExist = function(v) {\n return typeof v !== 'undefined';\n};\n\nexports.isEmptyObject = function(obj) {\n return Object.keys(obj).length === 0;\n};\n\n/**\n * Copy all the properties of a into b.\n * @param {*} target\n * @param {*} a\n */\nexports.merge = function(target, a, arrayMode) {\n if (a) {\n const keys = Object.keys(a); // will return an array of own properties\n const len = keys.length; //don't make it inline\n for (let i = 0; i < len; i++) {\n if (arrayMode === 'strict') {\n target[keys[i]] = [ a[keys[i]] ];\n } else {\n target[keys[i]] = a[keys[i]];\n }\n }\n }\n};\n/* exports.merge =function (b,a){\n return Object.assign(b,a);\n} */\n\nexports.getValue = function(v) {\n if (exports.isExist(v)) {\n return v;\n } else {\n return '';\n }\n};\n\n// const fakeCall = function(a) {return a;};\n// const fakeCallNoReturn = function() {};\n\nexports.isName = isName;\nexports.getAllMatches = getAllMatches;\nexports.nameRegexp = nameRegexp;\n","'use strict';\n\nconst util = require('./util');\n\nconst defaultOptions = {\n allowBooleanAttributes: false, //A tag can have attributes without any value\n unpairedTags: []\n};\n\n//const tagsPattern = new RegExp(\"<\\\\/?([\\\\w:\\\\-_\\.]+)\\\\s*\\/?>\",\"g\");\nexports.validate = function (xmlData, options) {\n options = Object.assign({}, defaultOptions, options);\n\n //xmlData = xmlData.replace(/(\\r\\n|\\n|\\r)/gm,\"\");//make it single line\n //xmlData = xmlData.replace(/(^\\s*<\\?xml.*?\\?>)/g,\"\");//Remove XML starting tag\n //xmlData = xmlData.replace(/()/g,\"\");//Remove DOCTYPE\n const tags = [];\n let tagFound = false;\n\n //indicates that the root tag has been closed (aka. depth 0 has been reached)\n let reachedRoot = false;\n\n if (xmlData[0] === '\\ufeff') {\n // check for byte order mark (BOM)\n xmlData = xmlData.substr(1);\n }\n \n for (let i = 0; i < xmlData.length; i++) {\n\n if (xmlData[i] === '<' && xmlData[i+1] === '?') {\n i+=2;\n i = readPI(xmlData,i);\n if (i.err) return i;\n }else if (xmlData[i] === '<') {\n //starting of tag\n //read until you reach to '>' avoiding any '>' in attribute value\n let tagStartPos = i;\n i++;\n \n if (xmlData[i] === '!') {\n i = readCommentAndCDATA(xmlData, i);\n continue;\n } else {\n let closingTag = false;\n if (xmlData[i] === '/') {\n //closing tag\n closingTag = true;\n i++;\n }\n //read tagname\n let tagName = '';\n for (; i < xmlData.length &&\n xmlData[i] !== '>' &&\n xmlData[i] !== ' ' &&\n xmlData[i] !== '\\t' &&\n xmlData[i] !== '\\n' &&\n xmlData[i] !== '\\r'; i++\n ) {\n tagName += xmlData[i];\n }\n tagName = tagName.trim();\n //console.log(tagName);\n\n if (tagName[tagName.length - 1] === '/') {\n //self closing tag without attributes\n tagName = tagName.substring(0, tagName.length - 1);\n //continue;\n i--;\n }\n if (!validateTagName(tagName)) {\n let msg;\n if (tagName.trim().length === 0) {\n msg = \"Invalid space after '<'.\";\n } else {\n msg = \"Tag '\"+tagName+\"' is an invalid name.\";\n }\n return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i));\n }\n\n const result = readAttributeStr(xmlData, i);\n if (result === false) {\n return getErrorObject('InvalidAttr', \"Attributes for '\"+tagName+\"' have open quote.\", getLineNumberForPosition(xmlData, i));\n }\n let attrStr = result.value;\n i = result.index;\n\n if (attrStr[attrStr.length - 1] === '/') {\n //self closing tag\n const attrStrStart = i - attrStr.length;\n attrStr = attrStr.substring(0, attrStr.length - 1);\n const isValid = validateAttributeString(attrStr, options);\n if (isValid === true) {\n tagFound = true;\n //continue; //text may presents after self closing tag\n } else {\n //the result from the nested function returns the position of the error within the attribute\n //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute\n //this gives us the absolute index in the entire xml, which we can use to find the line at last\n return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line));\n }\n } else if (closingTag) {\n if (!result.tagClosed) {\n return getErrorObject('InvalidTag', \"Closing tag '\"+tagName+\"' doesn't have proper closing.\", getLineNumberForPosition(xmlData, i));\n } else if (attrStr.trim().length > 0) {\n return getErrorObject('InvalidTag', \"Closing tag '\"+tagName+\"' can't have attributes or invalid starting.\", getLineNumberForPosition(xmlData, tagStartPos));\n } else {\n const otg = tags.pop();\n if (tagName !== otg.tagName) {\n let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos);\n return getErrorObject('InvalidTag',\n \"Expected closing tag '\"+otg.tagName+\"' (opened in line \"+openPos.line+\", col \"+openPos.col+\") instead of closing tag '\"+tagName+\"'.\",\n getLineNumberForPosition(xmlData, tagStartPos));\n }\n\n //when there are no more tags, we reached the root level.\n if (tags.length == 0) {\n reachedRoot = true;\n }\n }\n } else {\n const isValid = validateAttributeString(attrStr, options);\n if (isValid !== true) {\n //the result from the nested function returns the position of the error within the attribute\n //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute\n //this gives us the absolute index in the entire xml, which we can use to find the line at last\n return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line));\n }\n\n //if the root level has been reached before ...\n if (reachedRoot === true) {\n return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i));\n } else if(options.unpairedTags.indexOf(tagName) !== -1){\n //don't push into stack\n } else {\n tags.push({tagName, tagStartPos});\n }\n tagFound = true;\n }\n\n //skip tag text value\n //It may include comments and CDATA value\n for (i++; i < xmlData.length; i++) {\n if (xmlData[i] === '<') {\n if (xmlData[i + 1] === '!') {\n //comment or CADATA\n i++;\n i = readCommentAndCDATA(xmlData, i);\n continue;\n } else if (xmlData[i+1] === '?') {\n i = readPI(xmlData, ++i);\n if (i.err) return i;\n } else{\n break;\n }\n } else if (xmlData[i] === '&') {\n const afterAmp = validateAmpersand(xmlData, i);\n if (afterAmp == -1)\n return getErrorObject('InvalidChar', \"char '&' is not expected.\", getLineNumberForPosition(xmlData, i));\n i = afterAmp;\n }else{\n if (reachedRoot === true && !isWhiteSpace(xmlData[i])) {\n return getErrorObject('InvalidXml', \"Extra text at the end\", getLineNumberForPosition(xmlData, i));\n }\n }\n } //end of reading tag text value\n if (xmlData[i] === '<') {\n i--;\n }\n }\n } else {\n if ( isWhiteSpace(xmlData[i])) {\n continue;\n }\n return getErrorObject('InvalidChar', \"char '\"+xmlData[i]+\"' is not expected.\", getLineNumberForPosition(xmlData, i));\n }\n }\n\n if (!tagFound) {\n return getErrorObject('InvalidXml', 'Start tag expected.', 1);\n }else if (tags.length == 1) {\n return getErrorObject('InvalidTag', \"Unclosed tag '\"+tags[0].tagName+\"'.\", getLineNumberForPosition(xmlData, tags[0].tagStartPos));\n }else if (tags.length > 0) {\n return getErrorObject('InvalidXml', \"Invalid '\"+\n JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\\r?\\n/g, '')+\n \"' found.\", {line: 1, col: 1});\n }\n\n return true;\n};\n\nfunction isWhiteSpace(char){\n return char === ' ' || char === '\\t' || char === '\\n' || char === '\\r';\n}\n/**\n * Read Processing insstructions and skip\n * @param {*} xmlData\n * @param {*} i\n */\nfunction readPI(xmlData, i) {\n const start = i;\n for (; i < xmlData.length; i++) {\n if (xmlData[i] == '?' || xmlData[i] == ' ') {\n //tagname\n const tagname = xmlData.substr(start, i - start);\n if (i > 5 && tagname === 'xml') {\n return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i));\n } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') {\n //check if valid attribut string\n i++;\n break;\n } else {\n continue;\n }\n }\n }\n return i;\n}\n\nfunction readCommentAndCDATA(xmlData, i) {\n if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') {\n //comment\n for (i += 3; i < xmlData.length; i++) {\n if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') {\n i += 2;\n break;\n }\n }\n } else if (\n xmlData.length > i + 8 &&\n xmlData[i + 1] === 'D' &&\n xmlData[i + 2] === 'O' &&\n xmlData[i + 3] === 'C' &&\n xmlData[i + 4] === 'T' &&\n xmlData[i + 5] === 'Y' &&\n xmlData[i + 6] === 'P' &&\n xmlData[i + 7] === 'E'\n ) {\n let angleBracketsCount = 1;\n for (i += 8; i < xmlData.length; i++) {\n if (xmlData[i] === '<') {\n angleBracketsCount++;\n } else if (xmlData[i] === '>') {\n angleBracketsCount--;\n if (angleBracketsCount === 0) {\n break;\n }\n }\n }\n } else if (\n xmlData.length > i + 9 &&\n xmlData[i + 1] === '[' &&\n xmlData[i + 2] === 'C' &&\n xmlData[i + 3] === 'D' &&\n xmlData[i + 4] === 'A' &&\n xmlData[i + 5] === 'T' &&\n xmlData[i + 6] === 'A' &&\n xmlData[i + 7] === '['\n ) {\n for (i += 8; i < xmlData.length; i++) {\n if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') {\n i += 2;\n break;\n }\n }\n }\n\n return i;\n}\n\nconst doubleQuote = '\"';\nconst singleQuote = \"'\";\n\n/**\n * Keep reading xmlData until '<' is found outside the attribute value.\n * @param {string} xmlData\n * @param {number} i\n */\nfunction readAttributeStr(xmlData, i) {\n let attrStr = '';\n let startChar = '';\n let tagClosed = false;\n for (; i < xmlData.length; i++) {\n if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) {\n if (startChar === '') {\n startChar = xmlData[i];\n } else if (startChar !== xmlData[i]) {\n //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa\n } else {\n startChar = '';\n }\n } else if (xmlData[i] === '>') {\n if (startChar === '') {\n tagClosed = true;\n break;\n }\n }\n attrStr += xmlData[i];\n }\n if (startChar !== '') {\n return false;\n }\n\n return {\n value: attrStr,\n index: i,\n tagClosed: tagClosed\n };\n}\n\n/**\n * Select all the attributes whether valid or invalid.\n */\nconst validAttrStrRegxp = new RegExp('(\\\\s*)([^\\\\s=]+)(\\\\s*=)?(\\\\s*([\\'\"])(([\\\\s\\\\S])*?)\\\\5)?', 'g');\n\n//attr, =\"sd\", a=\"amit's\", a=\"sd\"b=\"saf\", ab cd=\"\"\n\nfunction validateAttributeString(attrStr, options) {\n //console.log(\"start:\"+attrStr+\":end\");\n\n //if(attrStr.trim().length === 0) return true; //empty string\n\n const matches = util.getAllMatches(attrStr, validAttrStrRegxp);\n const attrNames = {};\n\n for (let i = 0; i < matches.length; i++) {\n if (matches[i][1].length === 0) {\n //nospace before attribute name: a=\"sd\"b=\"saf\"\n return getErrorObject('InvalidAttr', \"Attribute '\"+matches[i][2]+\"' has no space in starting.\", getPositionFromMatch(matches[i]))\n } else if (matches[i][3] !== undefined && matches[i][4] === undefined) {\n return getErrorObject('InvalidAttr', \"Attribute '\"+matches[i][2]+\"' is without value.\", getPositionFromMatch(matches[i]));\n } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) {\n //independent attribute: ab\n return getErrorObject('InvalidAttr', \"boolean attribute '\"+matches[i][2]+\"' is not allowed.\", getPositionFromMatch(matches[i]));\n }\n /* else if(matches[i][6] === undefined){//attribute without value: ab=\n return { err: { code:\"InvalidAttr\",msg:\"attribute \" + matches[i][2] + \" has no value assigned.\"}};\n } */\n const attrName = matches[i][2];\n if (!validateAttrName(attrName)) {\n return getErrorObject('InvalidAttr', \"Attribute '\"+attrName+\"' is an invalid name.\", getPositionFromMatch(matches[i]));\n }\n if (!attrNames.hasOwnProperty(attrName)) {\n //check for duplicate attribute.\n attrNames[attrName] = 1;\n } else {\n return getErrorObject('InvalidAttr', \"Attribute '\"+attrName+\"' is repeated.\", getPositionFromMatch(matches[i]));\n }\n }\n\n return true;\n}\n\nfunction validateNumberAmpersand(xmlData, i) {\n let re = /\\d/;\n if (xmlData[i] === 'x') {\n i++;\n re = /[\\da-fA-F]/;\n }\n for (; i < xmlData.length; i++) {\n if (xmlData[i] === ';')\n return i;\n if (!xmlData[i].match(re))\n break;\n }\n return -1;\n}\n\nfunction validateAmpersand(xmlData, i) {\n // https://www.w3.org/TR/xml/#dt-charref\n i++;\n if (xmlData[i] === ';')\n return -1;\n if (xmlData[i] === '#') {\n i++;\n return validateNumberAmpersand(xmlData, i);\n }\n let count = 0;\n for (; i < xmlData.length; i++, count++) {\n if (xmlData[i].match(/\\w/) && count < 20)\n continue;\n if (xmlData[i] === ';')\n break;\n return -1;\n }\n return i;\n}\n\nfunction getErrorObject(code, message, lineNumber) {\n return {\n err: {\n code: code,\n msg: message,\n line: lineNumber.line || lineNumber,\n col: lineNumber.col,\n },\n };\n}\n\nfunction validateAttrName(attrName) {\n return util.isName(attrName);\n}\n\n// const startsWithXML = /^xml/i;\n\nfunction validateTagName(tagname) {\n return util.isName(tagname) /* && !tagname.match(startsWithXML) */;\n}\n\n//this function returns the line number for the character at the given index\nfunction getLineNumberForPosition(xmlData, index) {\n const lines = xmlData.substring(0, index).split(/\\r?\\n/);\n return {\n line: lines.length,\n\n // column number is last line's length + 1, because column numbering starts at 1:\n col: lines[lines.length - 1].length + 1\n };\n}\n\n//this function returns the position of the first character of match within attrStr\nfunction getPositionFromMatch(match) {\n return match.startIndex + match[1].length;\n}\n","'use strict';\n//parse Empty Node as self closing node\nconst buildFromOrderedJs = require('./orderedJs2Xml');\n\nconst defaultOptions = {\n attributeNamePrefix: '@_',\n attributesGroupName: false,\n textNodeName: '#text',\n ignoreAttributes: true,\n cdataPropName: false,\n format: false,\n indentBy: ' ',\n suppressEmptyNode: false,\n suppressUnpairedNode: true,\n suppressBooleanAttributes: true,\n tagValueProcessor: function(key, a) {\n return a;\n },\n attributeValueProcessor: function(attrName, a) {\n return a;\n },\n preserveOrder: false,\n commentPropName: false,\n unpairedTags: [],\n entities: [\n { regex: new RegExp(\"&\", \"g\"), val: \"&\" },//it must be on top\n { regex: new RegExp(\">\", \"g\"), val: \">\" },\n { regex: new RegExp(\"<\", \"g\"), val: \"<\" },\n { regex: new RegExp(\"\\'\", \"g\"), val: \"'\" },\n { regex: new RegExp(\"\\\"\", \"g\"), val: \""\" }\n ],\n processEntities: true,\n stopNodes: [],\n // transformTagName: false,\n // transformAttributeName: false,\n oneListGroup: false\n};\n\nfunction Builder(options) {\n this.options = Object.assign({}, defaultOptions, options);\n if (this.options.ignoreAttributes || this.options.attributesGroupName) {\n this.isAttribute = function(/*a*/) {\n return false;\n };\n } else {\n this.attrPrefixLen = this.options.attributeNamePrefix.length;\n this.isAttribute = isAttribute;\n }\n\n this.processTextOrObjNode = processTextOrObjNode\n\n if (this.options.format) {\n this.indentate = indentate;\n this.tagEndChar = '>\\n';\n this.newLine = '\\n';\n } else {\n this.indentate = function() {\n return '';\n };\n this.tagEndChar = '>';\n this.newLine = '';\n }\n}\n\nBuilder.prototype.build = function(jObj) {\n if(this.options.preserveOrder){\n return buildFromOrderedJs(jObj, this.options);\n }else {\n if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){\n jObj = {\n [this.options.arrayNodeName] : jObj\n }\n }\n return this.j2x(jObj, 0).val;\n }\n};\n\nBuilder.prototype.j2x = function(jObj, level) {\n let attrStr = '';\n let val = '';\n for (let key in jObj) {\n if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue;\n if (typeof jObj[key] === 'undefined') {\n // supress undefined node only if it is not an attribute\n if (this.isAttribute(key)) {\n val += '';\n }\n } else if (jObj[key] === null) {\n // null attribute should be ignored by the attribute list, but should not cause the tag closing\n if (this.isAttribute(key)) {\n val += '';\n } else if (key[0] === '?') {\n val += this.indentate(level) + '<' + key + '?' + this.tagEndChar;\n } else {\n val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;\n }\n // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;\n } else if (jObj[key] instanceof Date) {\n val += this.buildTextValNode(jObj[key], key, '', level);\n } else if (typeof jObj[key] !== 'object') {\n //premitive type\n const attr = this.isAttribute(key);\n if (attr) {\n attrStr += this.buildAttrPairStr(attr, '' + jObj[key]);\n }else {\n //tag value\n if (key === this.options.textNodeName) {\n let newval = this.options.tagValueProcessor(key, '' + jObj[key]);\n val += this.replaceEntitiesValue(newval);\n } else {\n val += this.buildTextValNode(jObj[key], key, '', level);\n }\n }\n } else if (Array.isArray(jObj[key])) {\n //repeated nodes\n const arrLen = jObj[key].length;\n let listTagVal = \"\";\n for (let j = 0; j < arrLen; j++) {\n const item = jObj[key][j];\n if (typeof item === 'undefined') {\n // supress undefined node\n } else if (item === null) {\n if(key[0] === \"?\") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar;\n else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;\n // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;\n } else if (typeof item === 'object') {\n if(this.options.oneListGroup ){\n listTagVal += this.j2x(item, level + 1).val;\n }else{\n listTagVal += this.processTextOrObjNode(item, key, level)\n }\n } else {\n listTagVal += this.buildTextValNode(item, key, '', level);\n }\n }\n if(this.options.oneListGroup){\n listTagVal = this.buildObjectNode(listTagVal, key, '', level);\n }\n val += listTagVal;\n } else {\n //nested node\n if (this.options.attributesGroupName && key === this.options.attributesGroupName) {\n const Ks = Object.keys(jObj[key]);\n const L = Ks.length;\n for (let j = 0; j < L; j++) {\n attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]);\n }\n } else {\n val += this.processTextOrObjNode(jObj[key], key, level)\n }\n }\n }\n return {attrStr: attrStr, val: val};\n};\n\nBuilder.prototype.buildAttrPairStr = function(attrName, val){\n val = this.options.attributeValueProcessor(attrName, '' + val);\n val = this.replaceEntitiesValue(val);\n if (this.options.suppressBooleanAttributes && val === \"true\") {\n return ' ' + attrName;\n } else return ' ' + attrName + '=\"' + val + '\"';\n}\n\nfunction processTextOrObjNode (object, key, level) {\n const result = this.j2x(object, level + 1);\n if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) {\n return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level);\n } else {\n return this.buildObjectNode(result.val, key, result.attrStr, level);\n }\n}\n\nBuilder.prototype.buildObjectNode = function(val, key, attrStr, level) {\n if(val === \"\"){\n if(key[0] === \"?\") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar;\n else {\n return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar;\n }\n }else{\n\n let tagEndExp = '' + val + tagEndExp );\n } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) {\n return this.indentate(level) + `` + this.newLine;\n }else {\n return (\n this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar +\n val +\n this.indentate(level) + tagEndExp );\n }\n }\n}\n\nBuilder.prototype.closeTag = function(key){\n let closeTag = \"\";\n if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired\n if(!this.options.suppressUnpairedNode) closeTag = \"/\"\n }else if(this.options.suppressEmptyNode){ //empty\n closeTag = \"/\";\n }else{\n closeTag = `>` + this.newLine;\n }else if (this.options.commentPropName !== false && key === this.options.commentPropName) {\n return this.indentate(level) + `` + this.newLine;\n }else if(key[0] === \"?\") {//PI tag\n return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; \n }else{\n let textValue = this.options.tagValueProcessor(key, val);\n textValue = this.replaceEntitiesValue(textValue);\n \n if( textValue === ''){\n return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar;\n }else{\n return this.indentate(level) + '<' + key + attrStr + '>' +\n textValue +\n ' 0 && this.options.processEntities){\n for (let i=0; i 0) {\n indentation = EOL;\n }\n return arrToStr(jArray, options, \"\", indentation);\n}\n\nfunction arrToStr(arr, options, jPath, indentation) {\n let xmlStr = \"\";\n let isPreviousElementTag = false;\n\n for (let i = 0; i < arr.length; i++) {\n const tagObj = arr[i];\n const tagName = propName(tagObj);\n if(tagName === undefined) continue;\n\n let newJPath = \"\";\n if (jPath.length === 0) newJPath = tagName\n else newJPath = `${jPath}.${tagName}`;\n\n if (tagName === options.textNodeName) {\n let tagText = tagObj[tagName];\n if (!isStopNode(newJPath, options)) {\n tagText = options.tagValueProcessor(tagName, tagText);\n tagText = replaceEntitiesValue(tagText, options);\n }\n if (isPreviousElementTag) {\n xmlStr += indentation;\n }\n xmlStr += tagText;\n isPreviousElementTag = false;\n continue;\n } else if (tagName === options.cdataPropName) {\n if (isPreviousElementTag) {\n xmlStr += indentation;\n }\n xmlStr += ``;\n isPreviousElementTag = false;\n continue;\n } else if (tagName === options.commentPropName) {\n xmlStr += indentation + ``;\n isPreviousElementTag = true;\n continue;\n } else if (tagName[0] === \"?\") {\n const attStr = attr_to_str(tagObj[\":@\"], options);\n const tempInd = tagName === \"?xml\" ? \"\" : indentation;\n let piTextNodeName = tagObj[tagName][0][options.textNodeName];\n piTextNodeName = piTextNodeName.length !== 0 ? \" \" + piTextNodeName : \"\"; //remove extra spacing\n xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`;\n isPreviousElementTag = true;\n continue;\n }\n let newIdentation = indentation;\n if (newIdentation !== \"\") {\n newIdentation += options.indentBy;\n }\n const attStr = attr_to_str(tagObj[\":@\"], options);\n const tagStart = indentation + `<${tagName}${attStr}`;\n const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation);\n if (options.unpairedTags.indexOf(tagName) !== -1) {\n if (options.suppressUnpairedNode) xmlStr += tagStart + \">\";\n else xmlStr += tagStart + \"/>\";\n } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) {\n xmlStr += tagStart + \"/>\";\n } else if (tagValue && tagValue.endsWith(\">\")) {\n xmlStr += tagStart + `>${tagValue}${indentation}`;\n } else {\n xmlStr += tagStart + \">\";\n if (tagValue && indentation !== \"\" && (tagValue.includes(\"/>\") || tagValue.includes(\"`;\n }\n isPreviousElementTag = true;\n }\n\n return xmlStr;\n}\n\nfunction propName(obj) {\n const keys = Object.keys(obj);\n for (let i = 0; i < keys.length; i++) {\n const key = keys[i];\n if(!obj.hasOwnProperty(key)) continue;\n if (key !== \":@\") return key;\n }\n}\n\nfunction attr_to_str(attrMap, options) {\n let attrStr = \"\";\n if (attrMap && !options.ignoreAttributes) {\n for (let attr in attrMap) {\n if(!attrMap.hasOwnProperty(attr)) continue;\n let attrVal = options.attributeValueProcessor(attr, attrMap[attr]);\n attrVal = replaceEntitiesValue(attrVal, options);\n if (attrVal === true && options.suppressBooleanAttributes) {\n attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`;\n } else {\n attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}=\"${attrVal}\"`;\n }\n }\n }\n return attrStr;\n}\n\nfunction isStopNode(jPath, options) {\n jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1);\n let tagName = jPath.substr(jPath.lastIndexOf(\".\") + 1);\n for (let index in options.stopNodes) {\n if (options.stopNodes[index] === jPath || options.stopNodes[index] === \"*.\" + tagName) return true;\n }\n return false;\n}\n\nfunction replaceEntitiesValue(textValue, options) {\n if (textValue && textValue.length > 0 && options.processEntities) {\n for (let i = 0; i < options.entities.length; i++) {\n const entity = options.entities[i];\n textValue = textValue.replace(entity.regex, entity.val);\n }\n }\n return textValue;\n}\nmodule.exports = toXml;\n","const util = require('../util');\n\n//TODO: handle comments\nfunction readDocType(xmlData, i){\n \n const entities = {};\n if( xmlData[i + 3] === 'O' &&\n xmlData[i + 4] === 'C' &&\n xmlData[i + 5] === 'T' &&\n xmlData[i + 6] === 'Y' &&\n xmlData[i + 7] === 'P' &&\n xmlData[i + 8] === 'E')\n { \n i = i+9;\n let angleBracketsCount = 1;\n let hasBody = false, comment = false;\n let exp = \"\";\n for(;i') { //Read tag content\n if(comment){\n if( xmlData[i - 1] === \"-\" && xmlData[i - 2] === \"-\"){\n comment = false;\n angleBracketsCount--;\n }\n }else{\n angleBracketsCount--;\n }\n if (angleBracketsCount === 0) {\n break;\n }\n }else if( xmlData[i] === '['){\n hasBody = true;\n }else{\n exp += xmlData[i];\n }\n }\n if(angleBracketsCount !== 0){\n throw new Error(`Unclosed DOCTYPE`);\n }\n }else{\n throw new Error(`Invalid Tag instead of DOCTYPE`);\n }\n return {entities, i};\n}\n\nfunction readEntityExp(xmlData,i){\n //External entities are not supported\n // \n\n //Parameter entities are not supported\n // \n\n //Internal entities are supported\n // \n \n //read EntityName\n let entityName = \"\";\n for (; i < xmlData.length && (xmlData[i] !== \"'\" && xmlData[i] !== '\"' ); i++) {\n // if(xmlData[i] === \" \") continue;\n // else \n entityName += xmlData[i];\n }\n entityName = entityName.trim();\n if(entityName.indexOf(\" \") !== -1) throw new Error(\"External entites are not supported\");\n\n //read Entity Value\n const startChar = xmlData[i++];\n let val = \"\"\n for (; i < xmlData.length && xmlData[i] !== startChar ; i++) {\n val += xmlData[i];\n }\n return [entityName, val, i];\n}\n\nfunction isComment(xmlData, i){\n if(xmlData[i+1] === '!' &&\n xmlData[i+2] === '-' &&\n xmlData[i+3] === '-') return true\n return false\n}\nfunction isEntity(xmlData, i){\n if(xmlData[i+1] === '!' &&\n xmlData[i+2] === 'E' &&\n xmlData[i+3] === 'N' &&\n xmlData[i+4] === 'T' &&\n xmlData[i+5] === 'I' &&\n xmlData[i+6] === 'T' &&\n xmlData[i+7] === 'Y') return true\n return false\n}\nfunction isElement(xmlData, i){\n if(xmlData[i+1] === '!' &&\n xmlData[i+2] === 'E' &&\n xmlData[i+3] === 'L' &&\n xmlData[i+4] === 'E' &&\n xmlData[i+5] === 'M' &&\n xmlData[i+6] === 'E' &&\n xmlData[i+7] === 'N' &&\n xmlData[i+8] === 'T') return true\n return false\n}\n\nfunction isAttlist(xmlData, i){\n if(xmlData[i+1] === '!' &&\n xmlData[i+2] === 'A' &&\n xmlData[i+3] === 'T' &&\n xmlData[i+4] === 'T' &&\n xmlData[i+5] === 'L' &&\n xmlData[i+6] === 'I' &&\n xmlData[i+7] === 'S' &&\n xmlData[i+8] === 'T') return true\n return false\n}\nfunction isNotation(xmlData, i){\n if(xmlData[i+1] === '!' &&\n xmlData[i+2] === 'N' &&\n xmlData[i+3] === 'O' &&\n xmlData[i+4] === 'T' &&\n xmlData[i+5] === 'A' &&\n xmlData[i+6] === 'T' &&\n xmlData[i+7] === 'I' &&\n xmlData[i+8] === 'O' &&\n xmlData[i+9] === 'N') return true\n return false\n}\n\nfunction validateEntityName(name){\n if (util.isName(name))\n\treturn name;\n else\n throw new Error(`Invalid entity name ${name}`);\n}\n\nmodule.exports = readDocType;\n","\nconst defaultOptions = {\n preserveOrder: false,\n attributeNamePrefix: '@_',\n attributesGroupName: false,\n textNodeName: '#text',\n ignoreAttributes: true,\n removeNSPrefix: false, // remove NS from tag name or attribute name if true\n allowBooleanAttributes: false, //a tag can have attributes without any value\n //ignoreRootElement : false,\n parseTagValue: true,\n parseAttributeValue: false,\n trimValues: true, //Trim string values of tag and attributes\n cdataPropName: false,\n numberParseOptions: {\n hex: true,\n leadingZeros: true,\n eNotation: true\n },\n tagValueProcessor: function(tagName, val) {\n return val;\n },\n attributeValueProcessor: function(attrName, val) {\n return val;\n },\n stopNodes: [], //nested tags will not be parsed even for errors\n alwaysCreateTextNode: false,\n isArray: () => false,\n commentPropName: false,\n unpairedTags: [],\n processEntities: true,\n htmlEntities: false,\n ignoreDeclaration: false,\n ignorePiTags: false,\n transformTagName: false,\n transformAttributeName: false,\n updateTag: function(tagName, jPath, attrs){\n return tagName\n },\n // skipEmptyListItem: false\n};\n \nconst buildOptions = function(options) {\n return Object.assign({}, defaultOptions, options);\n};\n\nexports.buildOptions = buildOptions;\nexports.defaultOptions = defaultOptions;","'use strict';\n///@ts-check\n\nconst util = require('../util');\nconst xmlNode = require('./xmlNode');\nconst readDocType = require(\"./DocTypeReader\");\nconst toNumber = require(\"strnum\");\n\n// const regx =\n// '<((!\\\\[CDATA\\\\[([\\\\s\\\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\\\/)(NAME)\\\\s*>))([^<]*)'\n// .replace(/NAME/g, util.nameRegexp);\n\n//const tagsRegx = new RegExp(\"<(\\\\/?[\\\\w:\\\\-\\._]+)([^>]*)>(\\\\s*\"+cdataRegx+\")*([^<]+)?\",\"g\");\n//const tagsRegx = new RegExp(\"<(\\\\/?)((\\\\w*:)?([\\\\w:\\\\-\\._]+))([^>]*)>([^<]*)(\"+cdataRegx+\"([^<]*))*([^<]+)?\",\"g\");\n\nclass OrderedObjParser{\n constructor(options){\n this.options = options;\n this.currentNode = null;\n this.tagsNodeStack = [];\n this.docTypeEntities = {};\n this.lastEntities = {\n \"apos\" : { regex: /&(apos|#39|#x27);/g, val : \"'\"},\n \"gt\" : { regex: /&(gt|#62|#x3E);/g, val : \">\"},\n \"lt\" : { regex: /&(lt|#60|#x3C);/g, val : \"<\"},\n \"quot\" : { regex: /&(quot|#34|#x22);/g, val : \"\\\"\"},\n };\n this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : \"&\"};\n this.htmlEntities = {\n \"space\": { regex: /&(nbsp|#160);/g, val: \" \" },\n // \"lt\" : { regex: /&(lt|#60);/g, val: \"<\" },\n // \"gt\" : { regex: /&(gt|#62);/g, val: \">\" },\n // \"amp\" : { regex: /&(amp|#38);/g, val: \"&\" },\n // \"quot\" : { regex: /&(quot|#34);/g, val: \"\\\"\" },\n // \"apos\" : { regex: /&(apos|#39);/g, val: \"'\" },\n \"cent\" : { regex: /&(cent|#162);/g, val: \"¢\" },\n \"pound\" : { regex: /&(pound|#163);/g, val: \"£\" },\n \"yen\" : { regex: /&(yen|#165);/g, val: \"¥\" },\n \"euro\" : { regex: /&(euro|#8364);/g, val: \"€\" },\n \"copyright\" : { regex: /&(copy|#169);/g, val: \"©\" },\n \"reg\" : { regex: /&(reg|#174);/g, val: \"®\" },\n \"inr\" : { regex: /&(inr|#8377);/g, val: \"₹\" },\n };\n this.addExternalEntities = addExternalEntities;\n this.parseXml = parseXml;\n this.parseTextData = parseTextData;\n this.resolveNameSpace = resolveNameSpace;\n this.buildAttributesMap = buildAttributesMap;\n this.isItStopNode = isItStopNode;\n this.replaceEntitiesValue = replaceEntitiesValue;\n this.readStopNodeData = readStopNodeData;\n this.saveTextToParentTag = saveTextToParentTag;\n this.addChild = addChild;\n }\n\n}\n\nfunction addExternalEntities(externalEntities){\n const entKeys = Object.keys(externalEntities);\n for (let i = 0; i < entKeys.length; i++) {\n const ent = entKeys[i];\n this.lastEntities[ent] = {\n regex: new RegExp(\"&\"+ent+\";\",\"g\"),\n val : externalEntities[ent]\n }\n }\n}\n\n/**\n * @param {string} val\n * @param {string} tagName\n * @param {string} jPath\n * @param {boolean} dontTrim\n * @param {boolean} hasAttributes\n * @param {boolean} isLeafNode\n * @param {boolean} escapeEntities\n */\nfunction parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) {\n if (val !== undefined) {\n if (this.options.trimValues && !dontTrim) {\n val = val.trim();\n }\n if(val.length > 0){\n if(!escapeEntities) val = this.replaceEntitiesValue(val);\n \n const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode);\n if(newval === null || newval === undefined){\n //don't parse\n return val;\n }else if(typeof newval !== typeof val || newval !== val){\n //overwrite\n return newval;\n }else if(this.options.trimValues){\n return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions);\n }else{\n const trimmedVal = val.trim();\n if(trimmedVal === val){\n return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions);\n }else{\n return val;\n }\n }\n }\n }\n}\n\nfunction resolveNameSpace(tagname) {\n if (this.options.removeNSPrefix) {\n const tags = tagname.split(':');\n const prefix = tagname.charAt(0) === '/' ? '/' : '';\n if (tags[0] === 'xmlns') {\n return '';\n }\n if (tags.length === 2) {\n tagname = prefix + tags[1];\n }\n }\n return tagname;\n}\n\n//TODO: change regex to capture NS\n//const attrsRegx = new RegExp(\"([\\\\w\\\\-\\\\.\\\\:]+)\\\\s*=\\\\s*(['\\\"])((.|\\n)*?)\\\\2\",\"gm\");\nconst attrsRegx = new RegExp('([^\\\\s=]+)\\\\s*(=\\\\s*([\\'\"])([\\\\s\\\\S]*?)\\\\3)?', 'gm');\n\nfunction buildAttributesMap(attrStr, jPath, tagName) {\n if (!this.options.ignoreAttributes && typeof attrStr === 'string') {\n // attrStr = attrStr.replace(/\\r?\\n/g, ' ');\n //attrStr = attrStr || attrStr.trim();\n\n const matches = util.getAllMatches(attrStr, attrsRegx);\n const len = matches.length; //don't make it inline\n const attrs = {};\n for (let i = 0; i < len; i++) {\n const attrName = this.resolveNameSpace(matches[i][1]);\n let oldVal = matches[i][4];\n let aName = this.options.attributeNamePrefix + attrName;\n if (attrName.length) {\n if (this.options.transformAttributeName) {\n aName = this.options.transformAttributeName(aName);\n }\n if(aName === \"__proto__\") aName = \"#__proto__\";\n if (oldVal !== undefined) {\n if (this.options.trimValues) {\n oldVal = oldVal.trim();\n }\n oldVal = this.replaceEntitiesValue(oldVal);\n const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath);\n if(newVal === null || newVal === undefined){\n //don't parse\n attrs[aName] = oldVal;\n }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){\n //overwrite\n attrs[aName] = newVal;\n }else{\n //parse\n attrs[aName] = parseValue(\n oldVal,\n this.options.parseAttributeValue,\n this.options.numberParseOptions\n );\n }\n } else if (this.options.allowBooleanAttributes) {\n attrs[aName] = true;\n }\n }\n }\n if (!Object.keys(attrs).length) {\n return;\n }\n if (this.options.attributesGroupName) {\n const attrCollection = {};\n attrCollection[this.options.attributesGroupName] = attrs;\n return attrCollection;\n }\n return attrs\n }\n}\n\nconst parseXml = function(xmlData) {\n xmlData = xmlData.replace(/\\r\\n?/g, \"\\n\"); //TODO: remove this line\n const xmlObj = new xmlNode('!xml');\n let currentNode = xmlObj;\n let textData = \"\";\n let jPath = \"\";\n for(let i=0; i< xmlData.length; i++){//for each char in XML data\n const ch = xmlData[i];\n if(ch === '<'){\n // const nextIndex = i+1;\n // const _2ndChar = xmlData[nextIndex];\n if( xmlData[i+1] === '/') {//Closing Tag\n const closeIndex = findClosingIndex(xmlData, \">\", i, \"Closing Tag is not closed.\")\n let tagName = xmlData.substring(i+2,closeIndex).trim();\n\n if(this.options.removeNSPrefix){\n const colonIndex = tagName.indexOf(\":\");\n if(colonIndex !== -1){\n tagName = tagName.substr(colonIndex+1);\n }\n }\n\n if(this.options.transformTagName) {\n tagName = this.options.transformTagName(tagName);\n }\n\n if(currentNode){\n textData = this.saveTextToParentTag(textData, currentNode, jPath);\n }\n\n //check if last tag of nested tag was unpaired tag\n const lastTagName = jPath.substring(jPath.lastIndexOf(\".\")+1);\n if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){\n throw new Error(`Unpaired tag can not be used as closing tag: `);\n }\n let propIndex = 0\n if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){\n propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1)\n this.tagsNodeStack.pop();\n }else{\n propIndex = jPath.lastIndexOf(\".\");\n }\n jPath = jPath.substring(0, propIndex);\n\n currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope\n textData = \"\";\n i = closeIndex;\n } else if( xmlData[i+1] === '?') {\n\n let tagData = readTagExp(xmlData,i, false, \"?>\");\n if(!tagData) throw new Error(\"Pi Tag is not closed.\");\n\n textData = this.saveTextToParentTag(textData, currentNode, jPath);\n if( (this.options.ignoreDeclaration && tagData.tagName === \"?xml\") || this.options.ignorePiTags){\n\n }else{\n \n const childNode = new xmlNode(tagData.tagName);\n childNode.add(this.options.textNodeName, \"\");\n \n if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){\n childNode[\":@\"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName);\n }\n this.addChild(currentNode, childNode, jPath)\n\n }\n\n\n i = tagData.closeIndex + 1;\n } else if(xmlData.substr(i + 1, 3) === '!--') {\n const endIndex = findClosingIndex(xmlData, \"-->\", i+4, \"Comment is not closed.\")\n if(this.options.commentPropName){\n const comment = xmlData.substring(i + 4, endIndex - 2);\n\n textData = this.saveTextToParentTag(textData, currentNode, jPath);\n\n currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]);\n }\n i = endIndex;\n } else if( xmlData.substr(i + 1, 2) === '!D') {\n const result = readDocType(xmlData, i);\n this.docTypeEntities = result.entities;\n i = result.i;\n }else if(xmlData.substr(i + 1, 2) === '![') {\n const closeIndex = findClosingIndex(xmlData, \"]]>\", i, \"CDATA is not closed.\") - 2;\n const tagExp = xmlData.substring(i + 9,closeIndex);\n\n textData = this.saveTextToParentTag(textData, currentNode, jPath);\n\n let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true);\n if(val == undefined) val = \"\";\n\n //cdata should be set even if it is 0 length string\n if(this.options.cdataPropName){\n currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]);\n }else{\n currentNode.add(this.options.textNodeName, val);\n }\n \n i = closeIndex + 2;\n }else {//Opening tag\n let result = readTagExp(xmlData,i, this.options.removeNSPrefix);\n let tagName= result.tagName;\n const rawTagName = result.rawTagName;\n let tagExp = result.tagExp;\n let attrExpPresent = result.attrExpPresent;\n let closeIndex = result.closeIndex;\n\n if (this.options.transformTagName) {\n tagName = this.options.transformTagName(tagName);\n }\n \n //save text as child node\n if (currentNode && textData) {\n if(currentNode.tagname !== '!xml'){\n //when nested tag is found\n textData = this.saveTextToParentTag(textData, currentNode, jPath, false);\n }\n }\n\n //check if last tag was unpaired tag\n const lastTag = currentNode;\n if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){\n currentNode = this.tagsNodeStack.pop();\n jPath = jPath.substring(0, jPath.lastIndexOf(\".\"));\n }\n if(tagName !== xmlObj.tagname){\n jPath += jPath ? \".\" + tagName : tagName;\n }\n if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) {\n let tagContent = \"\";\n //self-closing tag\n if(tagExp.length > 0 && tagExp.lastIndexOf(\"/\") === tagExp.length - 1){\n i = result.closeIndex;\n }\n //unpaired tag\n else if(this.options.unpairedTags.indexOf(tagName) !== -1){\n i = result.closeIndex;\n }\n //normal tag\n else{\n //read until closing tag is found\n const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1);\n if(!result) throw new Error(`Unexpected end of ${rawTagName}`);\n i = result.i;\n tagContent = result.tagContent;\n }\n\n const childNode = new xmlNode(tagName);\n if(tagName !== tagExp && attrExpPresent){\n childNode[\":@\"] = this.buildAttributesMap(tagExp, jPath, tagName);\n }\n if(tagContent) {\n tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true);\n }\n \n jPath = jPath.substr(0, jPath.lastIndexOf(\".\"));\n childNode.add(this.options.textNodeName, tagContent);\n \n this.addChild(currentNode, childNode, jPath)\n }else{\n //selfClosing tag\n if(tagExp.length > 0 && tagExp.lastIndexOf(\"/\") === tagExp.length - 1){\n if(tagName[tagName.length - 1] === \"/\"){ //remove trailing '/'\n tagName = tagName.substr(0, tagName.length - 1);\n jPath = jPath.substr(0, jPath.length - 1);\n tagExp = tagName;\n }else{\n tagExp = tagExp.substr(0, tagExp.length - 1);\n }\n \n if(this.options.transformTagName) {\n tagName = this.options.transformTagName(tagName);\n }\n\n const childNode = new xmlNode(tagName);\n if(tagName !== tagExp && attrExpPresent){\n childNode[\":@\"] = this.buildAttributesMap(tagExp, jPath, tagName);\n }\n this.addChild(currentNode, childNode, jPath)\n jPath = jPath.substr(0, jPath.lastIndexOf(\".\"));\n }\n //opening tag\n else{\n const childNode = new xmlNode( tagName);\n this.tagsNodeStack.push(currentNode);\n \n if(tagName !== tagExp && attrExpPresent){\n childNode[\":@\"] = this.buildAttributesMap(tagExp, jPath, tagName);\n }\n this.addChild(currentNode, childNode, jPath)\n currentNode = childNode;\n }\n textData = \"\";\n i = closeIndex;\n }\n }\n }else{\n textData += xmlData[i];\n }\n }\n return xmlObj.child;\n}\n\nfunction addChild(currentNode, childNode, jPath){\n const result = this.options.updateTag(childNode.tagname, jPath, childNode[\":@\"])\n if(result === false){\n }else if(typeof result === \"string\"){\n childNode.tagname = result\n currentNode.addChild(childNode);\n }else{\n currentNode.addChild(childNode);\n }\n}\n\nconst replaceEntitiesValue = function(val){\n\n if(this.options.processEntities){\n for(let entityName in this.docTypeEntities){\n const entity = this.docTypeEntities[entityName];\n val = val.replace( entity.regx, entity.val);\n }\n for(let entityName in this.lastEntities){\n const entity = this.lastEntities[entityName];\n val = val.replace( entity.regex, entity.val);\n }\n if(this.options.htmlEntities){\n for(let entityName in this.htmlEntities){\n const entity = this.htmlEntities[entityName];\n val = val.replace( entity.regex, entity.val);\n }\n }\n val = val.replace( this.ampEntity.regex, this.ampEntity.val);\n }\n return val;\n}\nfunction saveTextToParentTag(textData, currentNode, jPath, isLeafNode) {\n if (textData) { //store previously collected data as textNode\n if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0\n \n textData = this.parseTextData(textData,\n currentNode.tagname,\n jPath,\n false,\n currentNode[\":@\"] ? Object.keys(currentNode[\":@\"]).length !== 0 : false,\n isLeafNode);\n\n if (textData !== undefined && textData !== \"\")\n currentNode.add(this.options.textNodeName, textData);\n textData = \"\";\n }\n return textData;\n}\n\n//TODO: use jPath to simplify the logic\n/**\n * \n * @param {string[]} stopNodes \n * @param {string} jPath\n * @param {string} currentTagName \n */\nfunction isItStopNode(stopNodes, jPath, currentTagName){\n const allNodesExp = \"*.\" + currentTagName;\n for (const stopNodePath in stopNodes) {\n const stopNodeExp = stopNodes[stopNodePath];\n if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true;\n }\n return false;\n}\n\n/**\n * Returns the tag Expression and where it is ending handling single-double quotes situation\n * @param {string} xmlData \n * @param {number} i starting index\n * @returns \n */\nfunction tagExpWithClosingIndex(xmlData, i, closingChar = \">\"){\n let attrBoundary;\n let tagExp = \"\";\n for (let index = i; index < xmlData.length; index++) {\n let ch = xmlData[index];\n if (attrBoundary) {\n if (ch === attrBoundary) attrBoundary = \"\";//reset\n } else if (ch === '\"' || ch === \"'\") {\n attrBoundary = ch;\n } else if (ch === closingChar[0]) {\n if(closingChar[1]){\n if(xmlData[index + 1] === closingChar[1]){\n return {\n data: tagExp,\n index: index\n }\n }\n }else{\n return {\n data: tagExp,\n index: index\n }\n }\n } else if (ch === '\\t') {\n ch = \" \"\n }\n tagExp += ch;\n }\n}\n\nfunction findClosingIndex(xmlData, str, i, errMsg){\n const closingIndex = xmlData.indexOf(str, i);\n if(closingIndex === -1){\n throw new Error(errMsg)\n }else{\n return closingIndex + str.length - 1;\n }\n}\n\nfunction readTagExp(xmlData,i, removeNSPrefix, closingChar = \">\"){\n const result = tagExpWithClosingIndex(xmlData, i+1, closingChar);\n if(!result) return;\n let tagExp = result.data;\n const closeIndex = result.index;\n const separatorIndex = tagExp.search(/\\s/);\n let tagName = tagExp;\n let attrExpPresent = true;\n if(separatorIndex !== -1){//separate tag name and attributes expression\n tagName = tagExp.substring(0, separatorIndex);\n tagExp = tagExp.substring(separatorIndex + 1).trimStart();\n }\n\n const rawTagName = tagName;\n if(removeNSPrefix){\n const colonIndex = tagName.indexOf(\":\");\n if(colonIndex !== -1){\n tagName = tagName.substr(colonIndex+1);\n attrExpPresent = tagName !== result.data.substr(colonIndex + 1);\n }\n }\n\n return {\n tagName: tagName,\n tagExp: tagExp,\n closeIndex: closeIndex,\n attrExpPresent: attrExpPresent,\n rawTagName: rawTagName,\n }\n}\n/**\n * find paired tag for a stop node\n * @param {string} xmlData \n * @param {string} tagName \n * @param {number} i \n */\nfunction readStopNodeData(xmlData, tagName, i){\n const startIndex = i;\n // Starting at 1 since we already have an open tag\n let openTagCount = 1;\n\n for (; i < xmlData.length; i++) {\n if( xmlData[i] === \"<\"){ \n if (xmlData[i+1] === \"/\") {//close tag\n const closeIndex = findClosingIndex(xmlData, \">\", i, `${tagName} is not closed`);\n let closeTagName = xmlData.substring(i+2,closeIndex).trim();\n if(closeTagName === tagName){\n openTagCount--;\n if (openTagCount === 0) {\n return {\n tagContent: xmlData.substring(startIndex, i),\n i : closeIndex\n }\n }\n }\n i=closeIndex;\n } else if(xmlData[i+1] === '?') { \n const closeIndex = findClosingIndex(xmlData, \"?>\", i+1, \"StopNode is not closed.\")\n i=closeIndex;\n } else if(xmlData.substr(i + 1, 3) === '!--') { \n const closeIndex = findClosingIndex(xmlData, \"-->\", i+3, \"StopNode is not closed.\")\n i=closeIndex;\n } else if(xmlData.substr(i + 1, 2) === '![') { \n const closeIndex = findClosingIndex(xmlData, \"]]>\", i, \"StopNode is not closed.\") - 2;\n i=closeIndex;\n } else {\n const tagData = readTagExp(xmlData, i, '>')\n\n if (tagData) {\n const openTagName = tagData && tagData.tagName;\n if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== \"/\") {\n openTagCount++;\n }\n i=tagData.closeIndex;\n }\n }\n }\n }//end for loop\n}\n\nfunction parseValue(val, shouldParse, options) {\n if (shouldParse && typeof val === 'string') {\n //console.log(options)\n const newval = val.trim();\n if(newval === 'true' ) return true;\n else if(newval === 'false' ) return false;\n else return toNumber(val, options);\n } else {\n if (util.isExist(val)) {\n return val;\n } else {\n return '';\n }\n }\n}\n\n\nmodule.exports = OrderedObjParser;\n","const { buildOptions} = require(\"./OptionsBuilder\");\nconst OrderedObjParser = require(\"./OrderedObjParser\");\nconst { prettify} = require(\"./node2json\");\nconst validator = require('../validator');\n\nclass XMLParser{\n \n constructor(options){\n this.externalEntities = {};\n this.options = buildOptions(options);\n \n }\n /**\n * Parse XML dats to JS object \n * @param {string|Buffer} xmlData \n * @param {boolean|Object} validationOption \n */\n parse(xmlData,validationOption){\n if(typeof xmlData === \"string\"){\n }else if( xmlData.toString){\n xmlData = xmlData.toString();\n }else{\n throw new Error(\"XML data is accepted in String or Bytes[] form.\")\n }\n if( validationOption){\n if(validationOption === true) validationOption = {}; //validate with default options\n \n const result = validator.validate(xmlData, validationOption);\n if (result !== true) {\n throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` )\n }\n }\n const orderedObjParser = new OrderedObjParser(this.options);\n orderedObjParser.addExternalEntities(this.externalEntities);\n const orderedResult = orderedObjParser.parseXml(xmlData);\n if(this.options.preserveOrder || orderedResult === undefined) return orderedResult;\n else return prettify(orderedResult, this.options);\n }\n\n /**\n * Add Entity which is not by default supported by this library\n * @param {string} key \n * @param {string} value \n */\n addEntity(key, value){\n if(value.indexOf(\"&\") !== -1){\n throw new Error(\"Entity value can't have '&'\")\n }else if(key.indexOf(\"&\") !== -1 || key.indexOf(\";\") !== -1){\n throw new Error(\"An entity must be set without '&' and ';'. Eg. use '#xD' for ' '\")\n }else if(value === \"&\"){\n throw new Error(\"An entity with value '&' is not permitted\");\n }else{\n this.externalEntities[key] = value;\n }\n }\n}\n\nmodule.exports = XMLParser;","'use strict';\n\n/**\n * \n * @param {array} node \n * @param {any} options \n * @returns \n */\nfunction prettify(node, options){\n return compress( node, options);\n}\n\n/**\n * \n * @param {array} arr \n * @param {object} options \n * @param {string} jPath \n * @returns object\n */\nfunction compress(arr, options, jPath){\n let text;\n const compressedObj = {};\n for (let i = 0; i < arr.length; i++) {\n const tagObj = arr[i];\n const property = propName(tagObj);\n let newJpath = \"\";\n if(jPath === undefined) newJpath = property;\n else newJpath = jPath + \".\" + property;\n\n if(property === options.textNodeName){\n if(text === undefined) text = tagObj[property];\n else text += \"\" + tagObj[property];\n }else if(property === undefined){\n continue;\n }else if(tagObj[property]){\n \n let val = compress(tagObj[property], options, newJpath);\n const isLeaf = isLeafTag(val, options);\n\n if(tagObj[\":@\"]){\n assignAttributes( val, tagObj[\":@\"], newJpath, options);\n }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){\n val = val[options.textNodeName];\n }else if(Object.keys(val).length === 0){\n if(options.alwaysCreateTextNode) val[options.textNodeName] = \"\";\n else val = \"\";\n }\n\n if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) {\n if(!Array.isArray(compressedObj[property])) {\n compressedObj[property] = [ compressedObj[property] ];\n }\n compressedObj[property].push(val);\n }else{\n //TODO: if a node is not an array, then check if it should be an array\n //also determine if it is a leaf node\n if (options.isArray(property, newJpath, isLeaf )) {\n compressedObj[property] = [val];\n }else{\n compressedObj[property] = val;\n }\n }\n }\n \n }\n // if(text && text.length > 0) compressedObj[options.textNodeName] = text;\n if(typeof text === \"string\"){\n if(text.length > 0) compressedObj[options.textNodeName] = text;\n }else if(text !== undefined) compressedObj[options.textNodeName] = text;\n return compressedObj;\n}\n\nfunction propName(obj){\n const keys = Object.keys(obj);\n for (let i = 0; i < keys.length; i++) {\n const key = keys[i];\n if(key !== \":@\") return key;\n }\n}\n\nfunction assignAttributes(obj, attrMap, jpath, options){\n if (attrMap) {\n const keys = Object.keys(attrMap);\n const len = keys.length; //don't make it inline\n for (let i = 0; i < len; i++) {\n const atrrName = keys[i];\n if (options.isArray(atrrName, jpath + \".\" + atrrName, true, true)) {\n obj[atrrName] = [ attrMap[atrrName] ];\n } else {\n obj[atrrName] = attrMap[atrrName];\n }\n }\n }\n}\n\nfunction isLeafTag(obj, options){\n const { textNodeName } = options;\n const propCount = Object.keys(obj).length;\n \n if (propCount === 0) {\n return true;\n }\n\n if (\n propCount === 1 &&\n (obj[textNodeName] || typeof obj[textNodeName] === \"boolean\" || obj[textNodeName] === 0)\n ) {\n return true;\n }\n\n return false;\n}\nexports.prettify = prettify;\n","'use strict';\n\nclass XmlNode{\n constructor(tagname) {\n this.tagname = tagname;\n this.child = []; //nested tags, text, cdata, comments in order\n this[\":@\"] = {}; //attributes map\n }\n add(key,val){\n // this.child.push( {name : key, val: val, isCdata: isCdata });\n if(key === \"__proto__\") key = \"#__proto__\";\n this.child.push( {[key]: val });\n }\n addChild(node) {\n if(node.tagname === \"__proto__\") node.tagname = \"#__proto__\";\n if(node[\":@\"] && Object.keys(node[\":@\"]).length > 0){\n this.child.push( { [node.tagname]: node.child, [\":@\"]: node[\":@\"] });\n }else{\n this.child.push( { [node.tagname]: node.child });\n }\n };\n};\n\n\nmodule.exports = XmlNode;","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.defaultErrorRedactor = exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0;\nconst url_1 = require(\"url\");\nconst util_1 = require(\"./util\");\nconst extend_1 = __importDefault(require(\"extend\"));\n/**\n * Support `instanceof` operator for `GaxiosError`s in different versions of this library.\n *\n * @see {@link GaxiosError[Symbol.hasInstance]}\n */\nexports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`);\n/* eslint-disable-next-line @typescript-eslint/no-explicit-any */\nclass GaxiosError extends Error {\n /**\n * Support `instanceof` operator for `GaxiosError` across builds/duplicated files.\n *\n * @see {@link GAXIOS_ERROR_SYMBOL}\n * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]}\n */\n static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) {\n if (instance &&\n typeof instance === 'object' &&\n exports.GAXIOS_ERROR_SYMBOL in instance &&\n instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) {\n return true;\n }\n // fallback to native\n return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance);\n }\n constructor(message, config, response, error) {\n var _b;\n super(message);\n this.config = config;\n this.response = response;\n this.error = error;\n /**\n * Support `instanceof` operator for `GaxiosError` across builds/duplicated files.\n *\n * @see {@link GAXIOS_ERROR_SYMBOL}\n * @see {@link GaxiosError[Symbol.hasInstance]}\n * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200}\n * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof}\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior}\n */\n this[_a] = util_1.pkg.version;\n // deep-copy config as we do not want to mutate\n // the existing config for future retries/use\n this.config = (0, extend_1.default)(true, {}, config);\n if (this.response) {\n this.response.config = (0, extend_1.default)(true, {}, this.response.config);\n }\n if (this.response) {\n try {\n this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data);\n }\n catch (_c) {\n // best effort - don't throw an error within an error\n // we could set `this.response.config.responseType = 'unknown'`, but\n // that would mutate future calls with this config object.\n }\n this.status = this.response.status;\n }\n if (error && 'code' in error && error.code) {\n this.code = error.code;\n }\n if (config.errorRedactor) {\n config.errorRedactor({\n config: this.config,\n response: this.response,\n });\n }\n }\n}\nexports.GaxiosError = GaxiosError;\nfunction translateData(responseType, data) {\n switch (responseType) {\n case 'stream':\n return data;\n case 'json':\n return JSON.parse(JSON.stringify(data));\n case 'arraybuffer':\n return JSON.parse(Buffer.from(data).toString('utf8'));\n case 'blob':\n return JSON.parse(data.text());\n default:\n return data;\n }\n}\n/**\n * An experimental error redactor.\n *\n * @param config Config to potentially redact properties of\n * @param response Config to potentially redact properties of\n *\n * @experimental\n */\nfunction defaultErrorRedactor(data) {\n const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.';\n function redactHeaders(headers) {\n if (!headers)\n return;\n for (const key of Object.keys(headers)) {\n // any casing of `Authentication`\n if (/^authentication$/.test(key)) {\n headers[key] = REDACT;\n }\n }\n }\n function redactString(obj, key) {\n if (typeof obj === 'object' &&\n obj !== null &&\n typeof obj[key] === 'string') {\n const text = obj[key];\n if (/grant_type=/.test(text) || /assertion=/.test(text)) {\n obj[key] = REDACT;\n }\n }\n }\n function redactObject(obj) {\n if (typeof obj === 'object' && obj !== null) {\n if ('grant_type' in obj) {\n obj['grant_type'] = REDACT;\n }\n if ('assertion' in obj) {\n obj['assertion'] = REDACT;\n }\n }\n }\n if (data.config) {\n redactHeaders(data.config.headers);\n redactString(data.config, 'data');\n redactObject(data.config.data);\n redactString(data.config, 'body');\n redactObject(data.config.body);\n try {\n const url = new url_1.URL(data.config.url || '');\n if (url.searchParams.has('token')) {\n url.searchParams.set('token', REDACT);\n }\n data.config.url = url.toString();\n }\n catch (_b) {\n // ignore error - no need to parse an invalid URL\n }\n }\n if (data.response) {\n defaultErrorRedactor({ config: data.response.config });\n redactHeaders(data.response.headers);\n redactString(data.response, 'data');\n redactObject(data.response.data);\n }\n return data;\n}\nexports.defaultErrorRedactor = defaultErrorRedactor;\n//# sourceMappingURL=common.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Gaxios = void 0;\nconst extend_1 = __importDefault(require(\"extend\"));\nconst https_1 = require(\"https\");\nconst node_fetch_1 = __importDefault(require(\"node-fetch\"));\nconst querystring_1 = __importDefault(require(\"querystring\"));\nconst is_stream_1 = __importDefault(require(\"is-stream\"));\nconst url_1 = require(\"url\");\nconst common_1 = require(\"./common\");\nconst retry_1 = require(\"./retry\");\nconst https_proxy_agent_1 = require(\"https-proxy-agent\");\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fetch = hasFetch() ? window.fetch : node_fetch_1.default;\nfunction hasWindow() {\n return typeof window !== 'undefined' && !!window;\n}\nfunction hasFetch() {\n return hasWindow() && !!window.fetch;\n}\nfunction hasBuffer() {\n return typeof Buffer !== 'undefined';\n}\nfunction hasHeader(options, header) {\n return !!getHeader(options, header);\n}\nfunction getHeader(options, header) {\n header = header.toLowerCase();\n for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) {\n if (header === key.toLowerCase()) {\n return options.headers[key];\n }\n }\n return undefined;\n}\nlet HttpsProxyAgent;\nfunction loadProxy() {\n var _a, _b, _c, _d;\n const proxy = ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.HTTPS_PROXY) ||\n ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.https_proxy) ||\n ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.HTTP_PROXY) ||\n ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.http_proxy);\n if (proxy) {\n HttpsProxyAgent = https_proxy_agent_1.HttpsProxyAgent;\n }\n return proxy;\n}\nloadProxy();\nfunction skipProxy(url) {\n var _a;\n const noProxyEnv = (_a = process.env.NO_PROXY) !== null && _a !== void 0 ? _a : process.env.no_proxy;\n if (!noProxyEnv) {\n return false;\n }\n const noProxyUrls = noProxyEnv.split(',');\n const parsedURL = new url_1.URL(url);\n return !!noProxyUrls.find(url => {\n if (url.startsWith('*.') || url.startsWith('.')) {\n url = url.replace(/^\\*\\./, '.');\n return parsedURL.hostname.endsWith(url);\n }\n else {\n return url === parsedURL.origin || url === parsedURL.hostname;\n }\n });\n}\n// Figure out if we should be using a proxy. Only if it's required, load\n// the https-proxy-agent module as it adds startup cost.\nfunction getProxy(url) {\n // If there is a match between the no_proxy env variables and the url, then do not proxy\n if (skipProxy(url)) {\n return undefined;\n // If there is not a match between the no_proxy env variables and the url, check to see if there should be a proxy\n }\n else {\n return loadProxy();\n }\n}\nclass Gaxios {\n /**\n * The Gaxios class is responsible for making HTTP requests.\n * @param defaults The default set of options to be used for this instance.\n */\n constructor(defaults) {\n this.agentCache = new Map();\n this.defaults = defaults || {};\n }\n /**\n * Perform an HTTP request with the given options.\n * @param opts Set of HTTP options that will be used for this HTTP request.\n */\n async request(opts = {}) {\n opts = this.validateOpts(opts);\n return this._request(opts);\n }\n async _defaultAdapter(opts) {\n const fetchImpl = opts.fetchImplementation || fetch;\n const res = (await fetchImpl(opts.url, opts));\n const data = await this.getResponseData(opts, res);\n return this.translateResponse(opts, res, data);\n }\n /**\n * Internal, retryable version of the `request` method.\n * @param opts Set of HTTP options that will be used for this HTTP request.\n */\n async _request(opts = {}) {\n var _a;\n try {\n let translatedResponse;\n if (opts.adapter) {\n translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this));\n }\n else {\n translatedResponse = await this._defaultAdapter(opts);\n }\n if (!opts.validateStatus(translatedResponse.status)) {\n if (opts.responseType === 'stream') {\n let response = '';\n await new Promise(resolve => {\n (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => {\n response += chunk;\n });\n (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve);\n });\n translatedResponse.data = response;\n }\n throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse);\n }\n return translatedResponse;\n }\n catch (e) {\n const err = e instanceof common_1.GaxiosError\n ? e\n : new common_1.GaxiosError(e.message, opts, undefined, e);\n const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err);\n if (shouldRetry && config) {\n err.config.retryConfig.currentRetryAttempt =\n config.retryConfig.currentRetryAttempt;\n // The error's config could be redacted - therefore we only want to\n // copy the retry state over to the existing config\n opts.retryConfig = (_a = err.config) === null || _a === void 0 ? void 0 : _a.retryConfig;\n return this._request(opts);\n }\n throw err;\n }\n }\n async getResponseData(opts, res) {\n switch (opts.responseType) {\n case 'stream':\n return res.body;\n case 'json': {\n let data = await res.text();\n try {\n data = JSON.parse(data);\n }\n catch (_a) {\n // continue\n }\n return data;\n }\n case 'arraybuffer':\n return res.arrayBuffer();\n case 'blob':\n return res.blob();\n case 'text':\n return res.text();\n default:\n return this.getResponseDataFromContentType(res);\n }\n }\n /**\n * Validates the options, and merges them with defaults.\n * @param opts The original options passed from the client.\n */\n validateOpts(options) {\n const opts = (0, extend_1.default)(true, {}, this.defaults, options);\n if (!opts.url) {\n throw new Error('URL is required.');\n }\n // baseUrl has been deprecated, remove in 2.0\n const baseUrl = opts.baseUrl || opts.baseURL;\n if (baseUrl) {\n opts.url = baseUrl + opts.url;\n }\n opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer;\n if (opts.params && Object.keys(opts.params).length > 0) {\n let additionalQueryParams = opts.paramsSerializer(opts.params);\n if (additionalQueryParams.startsWith('?')) {\n additionalQueryParams = additionalQueryParams.slice(1);\n }\n const prefix = opts.url.includes('?') ? '&' : '?';\n opts.url = opts.url + prefix + additionalQueryParams;\n }\n if (typeof options.maxContentLength === 'number') {\n opts.size = options.maxContentLength;\n }\n if (typeof options.maxRedirects === 'number') {\n opts.follow = options.maxRedirects;\n }\n opts.headers = opts.headers || {};\n if (opts.data) {\n const isFormData = typeof FormData === 'undefined'\n ? false\n : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData;\n if (is_stream_1.default.readable(opts.data)) {\n opts.body = opts.data;\n }\n else if (hasBuffer() && Buffer.isBuffer(opts.data)) {\n // Do not attempt to JSON.stringify() a Buffer:\n opts.body = opts.data;\n if (!hasHeader(opts, 'Content-Type')) {\n opts.headers['Content-Type'] = 'application/json';\n }\n }\n else if (typeof opts.data === 'object') {\n // If www-form-urlencoded content type has been set, but data is\n // provided as an object, serialize the content using querystring:\n if (!isFormData) {\n if (getHeader(opts, 'content-type') ===\n 'application/x-www-form-urlencoded') {\n opts.body = opts.paramsSerializer(opts.data);\n }\n else {\n // } else if (!(opts.data instanceof FormData)) {\n if (!hasHeader(opts, 'Content-Type')) {\n opts.headers['Content-Type'] = 'application/json';\n }\n opts.body = JSON.stringify(opts.data);\n }\n }\n }\n else {\n opts.body = opts.data;\n }\n }\n opts.validateStatus = opts.validateStatus || this.validateStatus;\n opts.responseType = opts.responseType || 'unknown';\n if (!opts.headers['Accept'] && opts.responseType === 'json') {\n opts.headers['Accept'] = 'application/json';\n }\n opts.method = opts.method || 'GET';\n const proxy = getProxy(opts.url);\n if (proxy) {\n if (this.agentCache.has(proxy)) {\n opts.agent = this.agentCache.get(proxy);\n }\n else {\n // Proxy is being used in conjunction with mTLS.\n if (opts.cert && opts.key) {\n const parsedURL = new url_1.URL(proxy);\n opts.agent = new HttpsProxyAgent({\n port: parsedURL.port,\n host: parsedURL.host,\n protocol: parsedURL.protocol,\n cert: opts.cert,\n key: opts.key,\n });\n }\n else {\n opts.agent = new HttpsProxyAgent(proxy);\n }\n this.agentCache.set(proxy, opts.agent);\n }\n }\n else if (opts.cert && opts.key) {\n // Configure client for mTLS:\n if (this.agentCache.has(opts.key)) {\n opts.agent = this.agentCache.get(opts.key);\n }\n else {\n opts.agent = new https_1.Agent({\n cert: opts.cert,\n key: opts.key,\n });\n this.agentCache.set(opts.key, opts.agent);\n }\n }\n if (typeof opts.errorRedactor !== 'function' &&\n opts.errorRedactor !== false) {\n opts.errorRedactor = common_1.defaultErrorRedactor;\n }\n return opts;\n }\n /**\n * By default, throw for any non-2xx status code\n * @param status status code from the HTTP response\n */\n validateStatus(status) {\n return status >= 200 && status < 300;\n }\n /**\n * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo)\n * @param params key value pars to encode\n */\n paramsSerializer(params) {\n return querystring_1.default.stringify(params);\n }\n translateResponse(opts, res, data) {\n // headers need to be converted from a map to an obj\n const headers = {};\n res.headers.forEach((value, key) => {\n headers[key] = value;\n });\n return {\n config: opts,\n data: data,\n headers,\n status: res.status,\n statusText: res.statusText,\n // XMLHttpRequestLike\n request: {\n responseURL: res.url,\n },\n };\n }\n /**\n * Attempts to parse a response by looking at the Content-Type header.\n * @param {FetchResponse} response the HTTP response.\n * @returns {Promise} a promise that resolves to the response data.\n */\n async getResponseDataFromContentType(response) {\n let contentType = response.headers.get('Content-Type');\n if (contentType === null) {\n // Maintain existing functionality by calling text()\n return response.text();\n }\n contentType = contentType.toLowerCase();\n if (contentType.includes('application/json')) {\n let data = await response.text();\n try {\n data = JSON.parse(data);\n }\n catch (_a) {\n // continue\n }\n return data;\n }\n else if (contentType.match(/^text\\//)) {\n return response.text();\n }\n else {\n // If the content type is something not easily handled, just return the raw data (blob)\n return response.blob();\n }\n }\n}\nexports.Gaxios = Gaxios;\n//# sourceMappingURL=gaxios.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0;\nconst gaxios_1 = require(\"./gaxios\");\nObject.defineProperty(exports, \"Gaxios\", { enumerable: true, get: function () { return gaxios_1.Gaxios; } });\nvar common_1 = require(\"./common\");\nObject.defineProperty(exports, \"GaxiosError\", { enumerable: true, get: function () { return common_1.GaxiosError; } });\n/**\n * The default instance used when the `request` method is directly\n * invoked.\n */\nexports.instance = new gaxios_1.Gaxios();\n/**\n * Make an HTTP request using the given options.\n * @param opts Options for the request\n */\nasync function request(opts) {\n return exports.instance.request(opts);\n}\nexports.request = request;\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getRetryConfig = void 0;\nasync function getRetryConfig(err) {\n var _a;\n let config = getConfig(err);\n if (!err || !err.config || (!config && !err.config.retry)) {\n return { shouldRetry: false };\n }\n config = config || {};\n config.currentRetryAttempt = config.currentRetryAttempt || 0;\n config.retry =\n config.retry === undefined || config.retry === null ? 3 : config.retry;\n config.httpMethodsToRetry = config.httpMethodsToRetry || [\n 'GET',\n 'HEAD',\n 'PUT',\n 'OPTIONS',\n 'DELETE',\n ];\n config.noResponseRetries =\n config.noResponseRetries === undefined || config.noResponseRetries === null\n ? 2\n : config.noResponseRetries;\n // If this wasn't in the list of status codes where we want\n // to automatically retry, return.\n const retryRanges = [\n // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes\n // 1xx - Retry (Informational, request still processing)\n // 2xx - Do not retry (Success)\n // 3xx - Do not retry (Redirect)\n // 4xx - Do not retry (Client errors)\n // 429 - Retry (\"Too Many Requests\")\n // 5xx - Retry (Server errors)\n [100, 199],\n [429, 429],\n [500, 599],\n ];\n config.statusCodesToRetry = config.statusCodesToRetry || retryRanges;\n // Put the config back into the err\n err.config.retryConfig = config;\n // Determine if we should retry the request\n const shouldRetryFn = config.shouldRetry || shouldRetryRequest;\n if (!(await shouldRetryFn(err))) {\n return { shouldRetry: false, config: err.config };\n }\n // Calculate time to wait with exponential backoff.\n // If this is the first retry, look for a configured retryDelay.\n const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100;\n // Formula: retryDelay + ((2^c - 1 / 2) * 1000)\n const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000;\n // We're going to retry! Incremenent the counter.\n err.config.retryConfig.currentRetryAttempt += 1;\n // Create a promise that invokes the retry after the backOffDelay\n const backoff = config.retryBackoff\n ? config.retryBackoff(err, delay)\n : new Promise(resolve => {\n setTimeout(resolve, delay);\n });\n // Notify the user if they added an `onRetryAttempt` handler\n if (config.onRetryAttempt) {\n config.onRetryAttempt(err);\n }\n // Return the promise in which recalls Gaxios to retry the request\n await backoff;\n return { shouldRetry: true, config: err.config };\n}\nexports.getRetryConfig = getRetryConfig;\n/**\n * Determine based on config if we should retry the request.\n * @param err The GaxiosError passed to the interceptor.\n */\nfunction shouldRetryRequest(err) {\n var _a;\n const config = getConfig(err);\n // node-fetch raises an AbortError if signaled:\n // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal\n if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') {\n return false;\n }\n // If there's no config, or retries are disabled, return.\n if (!config || config.retry === 0) {\n return false;\n }\n // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc)\n if (!err.response &&\n (config.currentRetryAttempt || 0) >= config.noResponseRetries) {\n return false;\n }\n // Only retry with configured HttpMethods.\n if (!err.config.method ||\n config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) {\n return false;\n }\n // If this wasn't in the list of status codes where we want\n // to automatically retry, return.\n if (err.response && err.response.status) {\n let isInRange = false;\n for (const [min, max] of config.statusCodesToRetry) {\n const status = err.response.status;\n if (status >= min && status <= max) {\n isInRange = true;\n break;\n }\n }\n if (!isInRange) {\n return false;\n }\n }\n // If we are out of retry attempts, return\n config.currentRetryAttempt = config.currentRetryAttempt || 0;\n if (config.currentRetryAttempt >= config.retry) {\n return false;\n }\n return true;\n}\n/**\n * Acquire the raxConfig object from an GaxiosError if available.\n * @param err The Gaxios error with a config object.\n */\nfunction getConfig(err) {\n if (err && err.config && err.config.retryConfig) {\n return err.config.retryConfig;\n }\n return;\n}\n//# sourceMappingURL=retry.js.map","\"use strict\";\n// Copyright 2023 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.pkg = void 0;\nexports.pkg = require('../../package.json');\n//# sourceMappingURL=util.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.req = exports.json = exports.toBuffer = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nasync function toBuffer(stream) {\n let length = 0;\n const chunks = [];\n for await (const chunk of stream) {\n length += chunk.length;\n chunks.push(chunk);\n }\n return Buffer.concat(chunks, length);\n}\nexports.toBuffer = toBuffer;\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nasync function json(stream) {\n const buf = await toBuffer(stream);\n const str = buf.toString('utf8');\n try {\n return JSON.parse(str);\n }\n catch (_err) {\n const err = _err;\n err.message += ` (input: ${str})`;\n throw err;\n }\n}\nexports.json = json;\nfunction req(url, opts = {}) {\n const href = typeof url === 'string' ? url : url.href;\n const req = (href.startsWith('https:') ? https : http).request(url, opts);\n const promise = new Promise((resolve, reject) => {\n req\n .once('response', resolve)\n .once('error', reject)\n .end();\n });\n req.then = promise.then.bind(promise);\n return req;\n}\nexports.req = req;\n//# sourceMappingURL=helpers.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Agent = void 0;\nconst http = __importStar(require(\"http\"));\n__exportStar(require(\"./helpers\"), exports);\nconst INTERNAL = Symbol('AgentBaseInternalState');\nclass Agent extends http.Agent {\n constructor(opts) {\n super(opts);\n this[INTERNAL] = {};\n }\n /**\n * Determine whether this is an `http` or `https` request.\n */\n isSecureEndpoint(options) {\n if (options) {\n // First check the `secureEndpoint` property explicitly, since this\n // means that a parent `Agent` is \"passing through\" to this instance.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n if (typeof options.secureEndpoint === 'boolean') {\n return options.secureEndpoint;\n }\n // If no explicit `secure` endpoint, check if `protocol` property is\n // set. This will usually be the case since using a full string URL\n // or `URL` instance should be the most common usage.\n if (typeof options.protocol === 'string') {\n return options.protocol === 'https:';\n }\n }\n // Finally, if no `protocol` property was set, then fall back to\n // checking the stack trace of the current call stack, and try to\n // detect the \"https\" module.\n const { stack } = new Error();\n if (typeof stack !== 'string')\n return false;\n return stack\n .split('\\n')\n .some((l) => l.indexOf('(https.js:') !== -1 ||\n l.indexOf('node:https:') !== -1);\n }\n createSocket(req, options, cb) {\n const connectOpts = {\n ...options,\n secureEndpoint: this.isSecureEndpoint(options),\n };\n Promise.resolve()\n .then(() => this.connect(req, connectOpts))\n .then((socket) => {\n if (socket instanceof http.Agent) {\n // @ts-expect-error `addRequest()` isn't defined in `@types/node`\n return socket.addRequest(req, connectOpts);\n }\n this[INTERNAL].currentSocket = socket;\n // @ts-expect-error `createSocket()` isn't defined in `@types/node`\n super.createSocket(req, options, cb);\n }, cb);\n }\n createConnection() {\n const socket = this[INTERNAL].currentSocket;\n this[INTERNAL].currentSocket = undefined;\n if (!socket) {\n throw new Error('No socket was returned in the `connect()` function');\n }\n return socket;\n }\n get defaultPort() {\n return (this[INTERNAL].defaultPort ??\n (this.protocol === 'https:' ? 443 : 80));\n }\n set defaultPort(v) {\n if (this[INTERNAL]) {\n this[INTERNAL].defaultPort = v;\n }\n }\n get protocol() {\n return (this[INTERNAL].protocol ??\n (this.isSecureEndpoint() ? 'https:' : 'http:'));\n }\n set protocol(v) {\n if (this[INTERNAL]) {\n this[INTERNAL].protocol = v;\n }\n }\n}\nexports.Agent = Agent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpsProxyAgent = void 0;\nconst net = __importStar(require(\"net\"));\nconst tls = __importStar(require(\"tls\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst debug_1 = __importDefault(require(\"debug\"));\nconst agent_base_1 = require(\"agent-base\");\nconst url_1 = require(\"url\");\nconst parse_proxy_response_1 = require(\"./parse-proxy-response\");\nconst debug = (0, debug_1.default)('https-proxy-agent');\n/**\n * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to\n * the specified \"HTTP(s) proxy server\" in order to proxy HTTPS requests.\n *\n * Outgoing HTTP requests are first tunneled through the proxy server using the\n * `CONNECT` HTTP request method to establish a connection to the proxy server,\n * and then the proxy server connects to the destination target and issues the\n * HTTP request from the proxy server.\n *\n * `https:` requests have their socket connection upgraded to TLS once\n * the connection to the proxy server has been established.\n */\nclass HttpsProxyAgent extends agent_base_1.Agent {\n constructor(proxy, opts) {\n super(opts);\n this.options = { path: undefined };\n this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy;\n this.proxyHeaders = opts?.headers ?? {};\n debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href);\n // Trim off the brackets from IPv6 addresses\n const host = (this.proxy.hostname || this.proxy.host).replace(/^\\[|\\]$/g, '');\n const port = this.proxy.port\n ? parseInt(this.proxy.port, 10)\n : this.proxy.protocol === 'https:'\n ? 443\n : 80;\n this.connectOpts = {\n // Attempt to negotiate http/1.1 for proxy servers that support http/2\n ALPNProtocols: ['http/1.1'],\n ...(opts ? omit(opts, 'headers') : null),\n host,\n port,\n };\n }\n /**\n * Called when the node-core HTTP client library is creating a\n * new HTTP request.\n */\n async connect(req, opts) {\n const { proxy } = this;\n if (!opts.host) {\n throw new TypeError('No \"host\" provided');\n }\n // Create a socket connection to the proxy server.\n let socket;\n if (proxy.protocol === 'https:') {\n debug('Creating `tls.Socket`: %o', this.connectOpts);\n const servername = this.connectOpts.servername || this.connectOpts.host;\n socket = tls.connect({\n ...this.connectOpts,\n servername: servername && net.isIP(servername) ? undefined : servername,\n });\n }\n else {\n debug('Creating `net.Socket`: %o', this.connectOpts);\n socket = net.connect(this.connectOpts);\n }\n const headers = typeof this.proxyHeaders === 'function'\n ? this.proxyHeaders()\n : { ...this.proxyHeaders };\n const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host;\n let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\\r\\n`;\n // Inject the `Proxy-Authorization` header if necessary.\n if (proxy.username || proxy.password) {\n const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`;\n headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`;\n }\n headers.Host = `${host}:${opts.port}`;\n if (!headers['Proxy-Connection']) {\n headers['Proxy-Connection'] = this.keepAlive\n ? 'Keep-Alive'\n : 'close';\n }\n for (const name of Object.keys(headers)) {\n payload += `${name}: ${headers[name]}\\r\\n`;\n }\n const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket);\n socket.write(`${payload}\\r\\n`);\n const { connect, buffered } = await proxyResponsePromise;\n req.emit('proxyConnect', connect);\n this.emit('proxyConnect', connect, req);\n if (connect.statusCode === 200) {\n req.once('socket', resume);\n if (opts.secureEndpoint) {\n // The proxy is connecting to a TLS server, so upgrade\n // this socket connection to a TLS connection.\n debug('Upgrading socket connection to TLS');\n const servername = opts.servername || opts.host;\n return tls.connect({\n ...omit(opts, 'host', 'path', 'port'),\n socket,\n servername: net.isIP(servername) ? undefined : servername,\n });\n }\n return socket;\n }\n // Some other status code that's not 200... need to re-play the HTTP\n // header \"data\" events onto the socket once the HTTP machinery is\n // attached so that the node core `http` can parse and handle the\n // error status code.\n // Close the original socket, and a new \"fake\" socket is returned\n // instead, so that the proxy doesn't get the HTTP request\n // written to it (which may contain `Authorization` headers or other\n // sensitive data).\n //\n // See: https://hackerone.com/reports/541502\n socket.destroy();\n const fakeSocket = new net.Socket({ writable: false });\n fakeSocket.readable = true;\n // Need to wait for the \"socket\" event to re-play the \"data\" events.\n req.once('socket', (s) => {\n debug('Replaying proxy buffer for failed request');\n (0, assert_1.default)(s.listenerCount('data') > 0);\n // Replay the \"buffered\" Buffer onto the fake `socket`, since at\n // this point the HTTP module machinery has been hooked up for\n // the user.\n s.push(buffered);\n s.push(null);\n });\n return fakeSocket;\n }\n}\nHttpsProxyAgent.protocols = ['http', 'https'];\nexports.HttpsProxyAgent = HttpsProxyAgent;\nfunction resume(socket) {\n socket.resume();\n}\nfunction omit(obj, ...keys) {\n const ret = {};\n let key;\n for (key in obj) {\n if (!keys.includes(key)) {\n ret[key] = obj[key];\n }\n }\n return ret;\n}\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.parseProxyResponse = void 0;\nconst debug_1 = __importDefault(require(\"debug\"));\nconst debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response');\nfunction parseProxyResponse(socket) {\n return new Promise((resolve, reject) => {\n // we need to buffer any HTTP traffic that happens with the proxy before we get\n // the CONNECT response, so that if the response is anything other than an \"200\"\n // response code, then we can re-play the \"data\" events on the socket once the\n // HTTP parser is hooked up...\n let buffersLength = 0;\n const buffers = [];\n function read() {\n const b = socket.read();\n if (b)\n ondata(b);\n else\n socket.once('readable', read);\n }\n function cleanup() {\n socket.removeListener('end', onend);\n socket.removeListener('error', onerror);\n socket.removeListener('readable', read);\n }\n function onend() {\n cleanup();\n debug('onend');\n reject(new Error('Proxy connection ended before receiving CONNECT response'));\n }\n function onerror(err) {\n cleanup();\n debug('onerror %o', err);\n reject(err);\n }\n function ondata(b) {\n buffers.push(b);\n buffersLength += b.length;\n const buffered = Buffer.concat(buffers, buffersLength);\n const endOfHeaders = buffered.indexOf('\\r\\n\\r\\n');\n if (endOfHeaders === -1) {\n // keep buffering\n debug('have not received end of HTTP headers yet...');\n read();\n return;\n }\n const headerParts = buffered\n .slice(0, endOfHeaders)\n .toString('ascii')\n .split('\\r\\n');\n const firstLine = headerParts.shift();\n if (!firstLine) {\n socket.destroy();\n return reject(new Error('No header received from proxy CONNECT response'));\n }\n const firstLineParts = firstLine.split(' ');\n const statusCode = +firstLineParts[1];\n const statusText = firstLineParts.slice(2).join(' ');\n const headers = {};\n for (const header of headerParts) {\n if (!header)\n continue;\n const firstColon = header.indexOf(':');\n if (firstColon === -1) {\n socket.destroy();\n return reject(new Error(`Invalid header from proxy CONNECT response: \"${header}\"`));\n }\n const key = header.slice(0, firstColon).toLowerCase();\n const value = header.slice(firstColon + 1).trimStart();\n const current = headers[key];\n if (typeof current === 'string') {\n headers[key] = [current, value];\n }\n else if (Array.isArray(current)) {\n current.push(value);\n }\n else {\n headers[key] = value;\n }\n }\n debug('got proxy server response: %o %o', firstLine, headers);\n cleanup();\n resolve({\n connect: {\n statusCode,\n statusText,\n headers,\n },\n buffered,\n });\n }\n socket.on('error', onerror);\n socket.on('end', onend);\n read();\n });\n}\nexports.parseProxyResponse = parseProxyResponse;\n//# sourceMappingURL=parse-proxy-response.js.map","\"use strict\";\n/**\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\n/**\n * Known paths unique to Google Compute Engine Linux instances\n */\nexports.GCE_LINUX_BIOS_PATHS = {\n BIOS_DATE: '/sys/class/dmi/id/bios_date',\n BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor',\n};\nconst GCE_MAC_ADDRESS_REGEX = /^42:01/;\n/**\n * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance).\n *\n * Uses the:\n * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}.\n *\n * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise.\n */\nfunction isGoogleCloudServerless() {\n /**\n * `CLOUD_RUN_JOB` is used for Cloud Run Jobs\n * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n *\n * `FUNCTION_NAME` is used in older Cloud Functions environments:\n * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}.\n *\n * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments:\n * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}.\n */\n const isGFEnvironment = process.env.CLOUD_RUN_JOB ||\n process.env.FUNCTION_NAME ||\n process.env.K_SERVICE;\n return !!isGFEnvironment;\n}\nexports.isGoogleCloudServerless = isGoogleCloudServerless;\n/**\n * Determines if the process is running on a Linux Google Compute Engine instance.\n *\n * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise.\n */\nfunction isGoogleComputeEngineLinux() {\n if ((0, os_1.platform)() !== 'linux')\n return false;\n try {\n // ensure this file exist\n (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE);\n // ensure this file exist and matches\n const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8');\n return /Google/.test(biosVendor);\n }\n catch (_a) {\n return false;\n }\n}\nexports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux;\n/**\n * Determines if the process is running on a Google Compute Engine instance with a known\n * MAC address.\n *\n * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise.\n */\nfunction isGoogleComputeEngineMACAddress() {\n const interfaces = (0, os_1.networkInterfaces)();\n for (const item of Object.values(interfaces)) {\n if (!item)\n continue;\n for (const { mac } of item) {\n if (GCE_MAC_ADDRESS_REGEX.test(mac)) {\n return true;\n }\n }\n }\n return false;\n}\nexports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress;\n/**\n * Determines if the process is running on a Google Compute Engine instance.\n *\n * @returns {boolean} `true` if the process is running on GCE, `false` otherwise.\n */\nfunction isGoogleComputeEngine() {\n return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress();\n}\nexports.isGoogleComputeEngine = isGoogleComputeEngine;\n/**\n * Determines if the process is running on Google Cloud Platform.\n *\n * @returns {boolean} `true` if the process is running on GCP, `false` otherwise.\n */\nfunction detectGCPResidency() {\n return isGoogleCloudServerless() || isGoogleComputeEngine();\n}\nexports.detectGCPResidency = detectGCPResidency;\n//# sourceMappingURL=gcp-residency.js.map","\"use strict\";\n/**\n * Copyright 2018 Google LLC\n *\n * Distributed under MIT license.\n * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.requestTimeout = exports.setGCPResidency = exports.getGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.bulk = exports.universe = exports.project = exports.instance = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst jsonBigint = require(\"json-bigint\");\nconst gcp_residency_1 = require(\"./gcp-residency\");\nexports.BASE_PATH = '/computeMetadata/v1';\nexports.HOST_ADDRESS = 'http://169.254.169.254';\nexports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.';\nexports.HEADER_NAME = 'Metadata-Flavor';\nexports.HEADER_VALUE = 'Google';\nexports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE });\n/**\n * Metadata server detection override options.\n *\n * Available via `process.env.METADATA_SERVER_DETECTION`.\n */\nexports.METADATA_SERVER_DETECTION = Object.freeze({\n 'assume-present': \"don't try to ping the metadata server, but assume it's present\",\n none: \"don't try to ping the metadata server, but don't try to use it either\",\n 'bios-only': \"treat the result of a BIOS probe as canonical (don't fall back to pinging)\",\n 'ping-only': 'skip the BIOS probe, and go straight to pinging',\n});\n/**\n * Returns the base URL while taking into account the GCE_METADATA_HOST\n * environment variable if it exists.\n *\n * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1.\n */\nfunction getBaseUrl(baseUrl) {\n if (!baseUrl) {\n baseUrl =\n process.env.GCE_METADATA_IP ||\n process.env.GCE_METADATA_HOST ||\n exports.HOST_ADDRESS;\n }\n // If no scheme is provided default to HTTP:\n if (!/^https?:\\/\\//.test(baseUrl)) {\n baseUrl = `http://${baseUrl}`;\n }\n return new URL(exports.BASE_PATH, baseUrl).href;\n}\n// Accepts an options object passed from the user to the API. In previous\n// versions of the API, it referred to a `Request` or an `Axios` request\n// options object. Now it refers to an object with very limited property\n// names. This is here to help ensure users don't pass invalid options when\n// they upgrade from 0.4 to 0.5 to 0.8.\nfunction validate(options) {\n Object.keys(options).forEach(key => {\n switch (key) {\n case 'params':\n case 'property':\n case 'headers':\n break;\n case 'qs':\n throw new Error(\"'qs' is not a valid configuration option. Please use 'params' instead.\");\n default:\n throw new Error(`'${key}' is not a valid configuration option.`);\n }\n });\n}\nasync function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) {\n let metadataKey = '';\n let params = {};\n let headers = {};\n if (typeof type === 'object') {\n const metadataAccessor = type;\n metadataKey = metadataAccessor.metadataKey;\n params = metadataAccessor.params || params;\n headers = metadataAccessor.headers || headers;\n noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries;\n fastFail = metadataAccessor.fastFail || fastFail;\n }\n else {\n metadataKey = type;\n }\n if (typeof options === 'string') {\n metadataKey += `/${options}`;\n }\n else {\n validate(options);\n if (options.property) {\n metadataKey += `/${options.property}`;\n }\n headers = options.headers || headers;\n params = options.params || params;\n }\n try {\n const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request;\n const res = await requestMethod({\n url: `${getBaseUrl()}/${metadataKey}`,\n headers: { ...exports.HEADERS, ...headers },\n retryConfig: { noResponseRetries },\n params,\n responseType: 'text',\n timeout: requestTimeout(),\n });\n // NOTE: node.js converts all incoming headers to lower case.\n if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) {\n throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`);\n }\n else if (!res.data) {\n throw new Error('Invalid response from the metadata service');\n }\n if (typeof res.data === 'string') {\n try {\n return jsonBigint.parse(res.data);\n }\n catch (_a) {\n /* ignore */\n }\n }\n return res.data;\n }\n catch (e) {\n const err = e;\n if (err.response && err.response.status !== 200) {\n err.message = `Unsuccessful response status code. ${err.message}`;\n }\n throw e;\n }\n}\nasync function fastFailMetadataRequest(options) {\n const secondaryOptions = {\n ...options,\n url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)),\n };\n // We race a connection between DNS/IP to metadata server. There are a couple\n // reasons for this:\n //\n // 1. the DNS is slow in some GCP environments; by checking both, we might\n // detect the runtime environment signficantly faster.\n // 2. we can't just check the IP, which is tarpitted and slow to respond\n // on a user's local machine.\n //\n // Additional logic has been added to make sure that we don't create an\n // unhandled rejection in scenarios where a failure happens sometime\n // after a success.\n //\n // Note, however, if a failure happens prior to a success, a rejection should\n // occur, this is for folks running locally.\n //\n let responded = false;\n const r1 = (0, gaxios_1.request)(options)\n .then(res => {\n responded = true;\n return res;\n })\n .catch(err => {\n if (responded) {\n return r2;\n }\n else {\n responded = true;\n throw err;\n }\n });\n const r2 = (0, gaxios_1.request)(secondaryOptions)\n .then(res => {\n responded = true;\n return res;\n })\n .catch(err => {\n if (responded) {\n return r1;\n }\n else {\n responded = true;\n throw err;\n }\n });\n return Promise.race([r1, r2]);\n}\n/**\n * Obtain metadata for the current GCE instance.\n *\n * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys}\n *\n * @example\n * ```\n * const serviceAccount: {} = await instance('service-accounts/');\n * const serviceAccountEmail: string = await instance('service-accounts/default/email');\n * ```\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction instance(options) {\n return metadataAccessor('instance', options);\n}\nexports.instance = instance;\n/**\n * Obtain metadata for the current GCP project.\n *\n * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys}\n *\n * @example\n * ```\n * const projectId: string = await project('project-id');\n * const numericProjectId: number = await project('numeric-project-id');\n * ```\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction project(options) {\n return metadataAccessor('project', options);\n}\nexports.project = project;\n/**\n * Obtain metadata for the current universe.\n *\n * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys}\n *\n * @example\n * ```\n * const universeDomain: string = await universe('universe_domain');\n * ```\n */\nfunction universe(options) {\n return metadataAccessor('universe', options);\n}\nexports.universe = universe;\n/**\n * Retrieve metadata items in parallel.\n *\n * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys}\n *\n * @example\n * ```\n * const data = await bulk([\n * {\n * metadataKey: 'instance',\n * },\n * {\n * metadataKey: 'project/project-id',\n * },\n * ] as const);\n *\n * // data.instance;\n * // data['project/project-id'];\n * ```\n *\n * @param properties The metadata properties to retrieve\n * @returns The metadata in `metadatakey:value` format\n */\nasync function bulk(properties) {\n const r = {};\n await Promise.all(properties.map(item => {\n return (async () => {\n const res = await metadataAccessor(item);\n const key = item.metadataKey;\n r[key] = res;\n })();\n }));\n return r;\n}\nexports.bulk = bulk;\n/*\n * How many times should we retry detecting GCP environment.\n */\nfunction detectGCPAvailableRetries() {\n return process.env.DETECT_GCP_RETRIES\n ? Number(process.env.DETECT_GCP_RETRIES)\n : 0;\n}\nlet cachedIsAvailableResponse;\n/**\n * Determine if the metadata server is currently available.\n */\nasync function isAvailable() {\n if (process.env.METADATA_SERVER_DETECTION) {\n const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase();\n if (!(value in exports.METADATA_SERVER_DETECTION)) {\n throw new RangeError(`Unknown \\`METADATA_SERVER_DETECTION\\` env variable. Got \\`${value}\\`, but it should be \\`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\\`, or unset`);\n }\n switch (value) {\n case 'assume-present':\n return true;\n case 'none':\n return false;\n case 'bios-only':\n return getGCPResidency();\n case 'ping-only':\n // continue, we want to ping the server\n }\n }\n try {\n // If a user is instantiating several GCP libraries at the same time,\n // this may result in multiple calls to isAvailable(), to detect the\n // runtime environment. We use the same promise for each of these calls\n // to reduce the network load.\n if (cachedIsAvailableResponse === undefined) {\n cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), \n // If the default HOST_ADDRESS has been overridden, we should not\n // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in\n // a non-GCP environment):\n !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST));\n }\n await cachedIsAvailableResponse;\n return true;\n }\n catch (e) {\n const err = e;\n if (process.env.DEBUG_AUTH) {\n console.info(err);\n }\n if (err.type === 'request-timeout') {\n // If running in a GCP environment, metadata endpoint should return\n // within ms.\n return false;\n }\n if (err.response && err.response.status === 404) {\n return false;\n }\n else {\n if (!(err.response && err.response.status === 404) &&\n // A warning is emitted if we see an unexpected err.code, or err.code\n // is not populated:\n (!err.code ||\n ![\n 'EHOSTDOWN',\n 'EHOSTUNREACH',\n 'ENETUNREACH',\n 'ENOENT',\n 'ENOTFOUND',\n 'ECONNREFUSED',\n ].includes(err.code))) {\n let code = 'UNKNOWN';\n if (err.code)\n code = err.code;\n process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning');\n }\n // Failure to resolve the metadata service means that it is not available.\n return false;\n }\n }\n}\nexports.isAvailable = isAvailable;\n/**\n * reset the memoized isAvailable() lookup.\n */\nfunction resetIsAvailableCache() {\n cachedIsAvailableResponse = undefined;\n}\nexports.resetIsAvailableCache = resetIsAvailableCache;\n/**\n * A cache for the detected GCP Residency.\n */\nexports.gcpResidencyCache = null;\n/**\n * Detects GCP Residency.\n * Caches results to reduce costs for subsequent calls.\n *\n * @see setGCPResidency for setting\n */\nfunction getGCPResidency() {\n if (exports.gcpResidencyCache === null) {\n setGCPResidency();\n }\n return exports.gcpResidencyCache;\n}\nexports.getGCPResidency = getGCPResidency;\n/**\n * Sets the detected GCP Residency.\n * Useful for forcing metadata server detection behavior.\n *\n * Set `null` to autodetect the environment (default behavior).\n * @see getGCPResidency for getting\n */\nfunction setGCPResidency(value = null) {\n exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)();\n}\nexports.setGCPResidency = setGCPResidency;\n/**\n * Obtain the timeout for requests to the metadata server.\n *\n * In certain environments and conditions requests can take longer than\n * the default timeout to complete. This function will determine the\n * appropriate timeout based on the environment.\n *\n * @returns {number} a request timeout duration in milliseconds.\n */\nfunction requestTimeout() {\n return getGCPResidency() ? 0 : 3000;\n}\nexports.requestTimeout = requestTimeout;\n__exportStar(require(\"./gcp-residency\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2012 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0;\nconst events_1 = require(\"events\");\nconst transporters_1 = require(\"../transporters\");\nconst util_1 = require(\"../util\");\n/**\n * The default cloud universe\n *\n * @see {@link AuthJSONOptions.universe_domain}\n */\nexports.DEFAULT_UNIVERSE = 'googleapis.com';\n/**\n * The default {@link AuthClientOptions.eagerRefreshThresholdMillis}\n */\nexports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000;\nclass AuthClient extends events_1.EventEmitter {\n constructor(opts = {}) {\n var _a, _b, _c, _d, _e;\n super();\n this.credentials = {};\n this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS;\n this.forceRefreshOnFailure = false;\n this.universeDomain = exports.DEFAULT_UNIVERSE;\n const options = (0, util_1.originalOrCamelOptions)(opts);\n // Shared auth options\n this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null;\n this.quotaProjectId = options.get('quota_project_id');\n this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {};\n this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE;\n // Shared client options\n this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter();\n if (opts.transporterOptions) {\n this.transporter.defaults = opts.transporterOptions;\n }\n if (opts.eagerRefreshThresholdMillis) {\n this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis;\n }\n this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false;\n }\n /**\n * Sets the auth credentials.\n */\n setCredentials(credentials) {\n this.credentials = credentials;\n }\n /**\n * Append additional headers, e.g., x-goog-user-project, shared across the\n * classes inheriting AuthClient. This method should be used by any method\n * that overrides getRequestMetadataAsync(), which is a shared helper for\n * setting request information in both gRPC and HTTP API calls.\n *\n * @param headers object to append additional headers to.\n */\n addSharedMetadataHeaders(headers) {\n // quota_project_id, stored in application_default_credentials.json, is set in\n // the x-goog-user-project header, to indicate an alternate account for\n // billing and quota:\n if (!headers['x-goog-user-project'] && // don't override a value the user sets.\n this.quotaProjectId) {\n headers['x-goog-user-project'] = this.quotaProjectId;\n }\n return headers;\n }\n}\nexports.AuthClient = AuthClient;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AwsClient = void 0;\nconst awsrequestsigner_1 = require(\"./awsrequestsigner\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\n/**\n * AWS external account client. This is used for AWS workloads, where\n * AWS STS GetCallerIdentity serialized signed requests are exchanged for\n * GCP access token.\n */\nclass AwsClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiates an AwsClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid AWS credential.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super(options, additionalOptions);\n this.environmentId = options.credential_source.environment_id;\n // This is only required if the AWS region is not available in the\n // AWS_REGION or AWS_DEFAULT_REGION environment variables.\n this.regionUrl = options.credential_source.region_url;\n // This is only required if AWS security credentials are not available in\n // environment variables.\n this.securityCredentialsUrl = options.credential_source.url;\n this.regionalCredVerificationUrl =\n options.credential_source.regional_cred_verification_url;\n this.imdsV2SessionTokenUrl =\n options.credential_source.imdsv2_session_token_url;\n this.awsRequestSigner = null;\n this.region = '';\n this.credentialSourceType = 'aws';\n // Data validators.\n this.validateEnvironmentId();\n }\n validateEnvironmentId() {\n var _a;\n const match = (_a = this.environmentId) === null || _a === void 0 ? void 0 : _a.match(/^(aws)(\\d+)$/);\n if (!match || !this.regionalCredVerificationUrl) {\n throw new Error('No valid AWS \"credential_source\" provided');\n }\n else if (parseInt(match[2], 10) !== 1) {\n throw new Error(`aws version \"${match[2]}\" is not supported in the current build.`);\n }\n }\n /**\n * Triggered when an external subject token is needed to be exchanged for a\n * GCP access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this uses a serialized AWS signed request to the STS GetCallerIdentity\n * endpoint.\n * The logic is summarized as:\n * 1. If imdsv2_session_token_url is provided in the credential source, then\n * fetch the aws session token and include it in the headers of the\n * metadata requests. This is a requirement for IDMSv2 but optional\n * for IDMSv1.\n * 2. Retrieve AWS region from availability-zone.\n * 3a. Check AWS credentials in environment variables. If not found, get\n * from security-credentials endpoint.\n * 3b. Get AWS credentials from security-credentials endpoint. In order\n * to retrieve this, the AWS role needs to be determined by calling\n * security-credentials endpoint without any argument. Then the\n * credentials can be retrieved via: security-credentials/role_name\n * 4. Generate the signed request to AWS STS GetCallerIdentity action.\n * 5. Inject x-goog-cloud-target-resource into header and serialize the\n * signed request. This will be the subject-token to pass to GCP STS.\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n // Initialize AWS request signer if not already initialized.\n if (!this.awsRequestSigner) {\n const metadataHeaders = {};\n // Only retrieve the IMDSv2 session token if both the security credentials and region are\n // not retrievable through the environment.\n // The credential config contains all the URLs by default but clients may be running this\n // where the metadata server is not available and returning the credentials through the environment.\n // Removing this check may break them.\n if (this.shouldUseMetadataServer() && this.imdsV2SessionTokenUrl) {\n metadataHeaders['x-aws-ec2-metadata-token'] =\n await this.getImdsV2SessionToken();\n }\n this.region = await this.getAwsRegion(metadataHeaders);\n this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => {\n // Check environment variables for permanent credentials first.\n // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html\n if (this.securityCredentialsFromEnv) {\n return this.securityCredentialsFromEnv;\n }\n // Since the role on a VM can change, we don't need to cache it.\n const roleName = await this.getAwsRoleName(metadataHeaders);\n // Temporary credentials typically last for several hours.\n // Expiration is returned in response.\n // Consider future optimization of this logic to cache AWS tokens\n // until their natural expiration.\n const awsCreds = await this.getAwsSecurityCredentials(roleName, metadataHeaders);\n return {\n accessKeyId: awsCreds.AccessKeyId,\n secretAccessKey: awsCreds.SecretAccessKey,\n token: awsCreds.Token,\n };\n }, this.region);\n }\n // Generate signed request to AWS STS GetCallerIdentity API.\n // Use the required regional endpoint. Otherwise, the request will fail.\n const options = await this.awsRequestSigner.getRequestOptions({\n url: this.regionalCredVerificationUrl.replace('{region}', this.region),\n method: 'POST',\n });\n // The GCP STS endpoint expects the headers to be formatted as:\n // [\n // {key: 'x-amz-date', value: '...'},\n // {key: 'Authorization', value: '...'},\n // ...\n // ]\n // And then serialized as:\n // encodeURIComponent(JSON.stringify({\n // url: '...',\n // method: 'POST',\n // headers: [{key: 'x-amz-date', value: '...'}, ...]\n // }))\n const reformattedHeader = [];\n const extendedHeaders = Object.assign({\n // The full, canonical resource name of the workload identity pool\n // provider, with or without the HTTPS prefix.\n // Including this header as part of the signature is recommended to\n // ensure data integrity.\n 'x-goog-cloud-target-resource': this.audience,\n }, options.headers);\n // Reformat header to GCP STS expected format.\n for (const key in extendedHeaders) {\n reformattedHeader.push({\n key,\n value: extendedHeaders[key],\n });\n }\n // Serialize the reformatted signed request.\n return encodeURIComponent(JSON.stringify({\n url: options.url,\n method: options.method,\n headers: reformattedHeader,\n }));\n }\n /**\n * @return A promise that resolves with the IMDSv2 Session Token.\n */\n async getImdsV2SessionToken() {\n const opts = {\n url: this.imdsV2SessionTokenUrl,\n method: 'PUT',\n responseType: 'text',\n headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' },\n };\n const response = await this.transporter.request(opts);\n return response.data;\n }\n /**\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the current AWS region.\n */\n async getAwsRegion(headers) {\n // Priority order for region determination:\n // AWS_REGION > AWS_DEFAULT_REGION > metadata server.\n if (this.regionFromEnv) {\n return this.regionFromEnv;\n }\n if (!this.regionUrl) {\n throw new Error('Unable to determine AWS region due to missing ' +\n '\"options.credential_source.region_url\"');\n }\n const opts = {\n url: this.regionUrl,\n method: 'GET',\n responseType: 'text',\n headers: headers,\n };\n const response = await this.transporter.request(opts);\n // Remove last character. For example, if us-east-2b is returned,\n // the region would be us-east-2.\n return response.data.substr(0, response.data.length - 1);\n }\n /**\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the assigned role to the current\n * AWS VM. This is needed for calling the security-credentials endpoint.\n */\n async getAwsRoleName(headers) {\n if (!this.securityCredentialsUrl) {\n throw new Error('Unable to determine AWS role name due to missing ' +\n '\"options.credential_source.url\"');\n }\n const opts = {\n url: this.securityCredentialsUrl,\n method: 'GET',\n responseType: 'text',\n headers: headers,\n };\n const response = await this.transporter.request(opts);\n return response.data;\n }\n /**\n * Retrieves the temporary AWS credentials by calling the security-credentials\n * endpoint as specified in the `credential_source` object.\n * @param roleName The role attached to the current VM.\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the temporary AWS credentials\n * needed for creating the GetCallerIdentity signed request.\n */\n async getAwsSecurityCredentials(roleName, headers) {\n const response = await this.transporter.request({\n url: `${this.securityCredentialsUrl}/${roleName}`,\n responseType: 'json',\n headers: headers,\n });\n return response.data;\n }\n shouldUseMetadataServer() {\n // The metadata server must be used when either the AWS region or AWS security\n // credentials cannot be retrieved through their defined environment variables.\n return !this.regionFromEnv || !this.securityCredentialsFromEnv;\n }\n get regionFromEnv() {\n // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION.\n // Only one is required.\n return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null);\n }\n get securityCredentialsFromEnv() {\n // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required.\n if (process.env['AWS_ACCESS_KEY_ID'] &&\n process.env['AWS_SECRET_ACCESS_KEY']) {\n return {\n accessKeyId: process.env['AWS_ACCESS_KEY_ID'],\n secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'],\n token: process.env['AWS_SESSION_TOKEN'],\n };\n }\n return null;\n }\n}\nexports.AwsClient = AwsClient;\nAwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254';\nAwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254';\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AwsRequestSigner = void 0;\nconst crypto_1 = require(\"../crypto/crypto\");\n/** AWS Signature Version 4 signing algorithm identifier. */\nconst AWS_ALGORITHM = 'AWS4-HMAC-SHA256';\n/**\n * The termination string for the AWS credential scope value as defined in\n * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html\n */\nconst AWS_REQUEST_TYPE = 'aws4_request';\n/**\n * Implements an AWS API request signer based on the AWS Signature Version 4\n * signing process.\n * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html\n */\nclass AwsRequestSigner {\n /**\n * Instantiates an AWS API request signer used to send authenticated signed\n * requests to AWS APIs based on the AWS Signature Version 4 signing process.\n * This also provides a mechanism to generate the signed request without\n * sending it.\n * @param getCredentials A mechanism to retrieve AWS security credentials\n * when needed.\n * @param region The AWS region to use.\n */\n constructor(getCredentials, region) {\n this.getCredentials = getCredentials;\n this.region = region;\n this.crypto = (0, crypto_1.createCrypto)();\n }\n /**\n * Generates the signed request for the provided HTTP request for calling\n * an AWS API. This follows the steps described at:\n * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html\n * @param amzOptions The AWS request options that need to be signed.\n * @return A promise that resolves with the GaxiosOptions containing the\n * signed HTTP request parameters.\n */\n async getRequestOptions(amzOptions) {\n if (!amzOptions.url) {\n throw new Error('\"url\" is required in \"amzOptions\"');\n }\n // Stringify JSON requests. This will be set in the request body of the\n // generated signed request.\n const requestPayloadData = typeof amzOptions.data === 'object'\n ? JSON.stringify(amzOptions.data)\n : amzOptions.data;\n const url = amzOptions.url;\n const method = amzOptions.method || 'GET';\n const requestPayload = amzOptions.body || requestPayloadData;\n const additionalAmzHeaders = amzOptions.headers;\n const awsSecurityCredentials = await this.getCredentials();\n const uri = new URL(url);\n const headerMap = await generateAuthenticationHeaderMap({\n crypto: this.crypto,\n host: uri.host,\n canonicalUri: uri.pathname,\n canonicalQuerystring: uri.search.substr(1),\n method,\n region: this.region,\n securityCredentials: awsSecurityCredentials,\n requestPayload,\n additionalAmzHeaders,\n });\n // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc.\n const headers = Object.assign(\n // Add x-amz-date if available.\n headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, {\n Authorization: headerMap.authorizationHeader,\n host: uri.host,\n }, additionalAmzHeaders || {});\n if (awsSecurityCredentials.token) {\n Object.assign(headers, {\n 'x-amz-security-token': awsSecurityCredentials.token,\n });\n }\n const awsSignedReq = {\n url,\n method: method,\n headers,\n };\n if (typeof requestPayload !== 'undefined') {\n awsSignedReq.body = requestPayload;\n }\n return awsSignedReq;\n }\n}\nexports.AwsRequestSigner = AwsRequestSigner;\n/**\n * Creates the HMAC-SHA256 hash of the provided message using the\n * provided key.\n *\n * @param crypto The crypto instance used to facilitate cryptographic\n * operations.\n * @param key The HMAC-SHA256 key to use.\n * @param msg The message to hash.\n * @return The computed hash bytes.\n */\nasync function sign(crypto, key, msg) {\n return await crypto.signWithHmacSha256(key, msg);\n}\n/**\n * Calculates the signing key used to calculate the signature for\n * AWS Signature Version 4 based on:\n * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html\n *\n * @param crypto The crypto instance used to facilitate cryptographic\n * operations.\n * @param key The AWS secret access key.\n * @param dateStamp The '%Y%m%d' date format.\n * @param region The AWS region.\n * @param serviceName The AWS service name, eg. sts.\n * @return The signing key bytes.\n */\nasync function getSigningKey(crypto, key, dateStamp, region, serviceName) {\n const kDate = await sign(crypto, `AWS4${key}`, dateStamp);\n const kRegion = await sign(crypto, kDate, region);\n const kService = await sign(crypto, kRegion, serviceName);\n const kSigning = await sign(crypto, kService, 'aws4_request');\n return kSigning;\n}\n/**\n * Generates the authentication header map needed for generating the AWS\n * Signature Version 4 signed request.\n *\n * @param option The options needed to compute the authentication header map.\n * @return The AWS authentication header map which constitutes of the following\n * components: amz-date, authorization header and canonical query string.\n */\nasync function generateAuthenticationHeaderMap(options) {\n const additionalAmzHeaders = options.additionalAmzHeaders || {};\n const requestPayload = options.requestPayload || '';\n // iam.amazonaws.com host => iam service.\n // sts.us-east-2.amazonaws.com => sts service.\n const serviceName = options.host.split('.')[0];\n const now = new Date();\n // Format: '%Y%m%dT%H%M%SZ'.\n const amzDate = now\n .toISOString()\n .replace(/[-:]/g, '')\n .replace(/\\.[0-9]+/, '');\n // Format: '%Y%m%d'.\n const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, '');\n // Change all additional headers to be lower case.\n const reformattedAdditionalAmzHeaders = {};\n Object.keys(additionalAmzHeaders).forEach(key => {\n reformattedAdditionalAmzHeaders[key.toLowerCase()] =\n additionalAmzHeaders[key];\n });\n // Add AWS token if available.\n if (options.securityCredentials.token) {\n reformattedAdditionalAmzHeaders['x-amz-security-token'] =\n options.securityCredentials.token;\n }\n // Header keys need to be sorted alphabetically.\n const amzHeaders = Object.assign({\n host: options.host,\n }, \n // Previously the date was not fixed with x-amz- and could be provided manually.\n // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req\n reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders);\n let canonicalHeaders = '';\n const signedHeadersList = Object.keys(amzHeaders).sort();\n signedHeadersList.forEach(key => {\n canonicalHeaders += `${key}:${amzHeaders[key]}\\n`;\n });\n const signedHeaders = signedHeadersList.join(';');\n const payloadHash = await options.crypto.sha256DigestHex(requestPayload);\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html\n const canonicalRequest = `${options.method}\\n` +\n `${options.canonicalUri}\\n` +\n `${options.canonicalQuerystring}\\n` +\n `${canonicalHeaders}\\n` +\n `${signedHeaders}\\n` +\n `${payloadHash}`;\n const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`;\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html\n const stringToSign = `${AWS_ALGORITHM}\\n` +\n `${amzDate}\\n` +\n `${credentialScope}\\n` +\n (await options.crypto.sha256DigestHex(canonicalRequest));\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html\n const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName);\n const signature = await sign(options.crypto, signingKey, stringToSign);\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html\n const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` +\n `${credentialScope}, SignedHeaders=${signedHeaders}, ` +\n `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`;\n return {\n // Do not return x-amz-date if date is available.\n amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate,\n authorizationHeader,\n canonicalQuerystring: options.canonicalQuerystring,\n };\n}\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0;\nconst stream = require(\"stream\");\nconst authclient_1 = require(\"./authclient\");\nconst sts = require(\"./stscredentials\");\nconst util_1 = require(\"../util\");\n/**\n * The required token exchange grant_type: rfc8693#section-2.1\n */\nconst STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange';\n/**\n * The requested token exchange requested_token_type: rfc8693#section-2.1\n */\nconst STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/** The default OAuth scope to request when none is provided. */\nconst DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform';\n/** Default impersonated token lifespan in seconds.*/\nconst DEFAULT_TOKEN_LIFESPAN = 3600;\n/**\n * Offset to take into account network delays and server clock skews.\n */\nexports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000;\n/**\n * The credentials JSON file type for external account clients.\n * There are 3 types of JSON configs:\n * 1. authorized_user => Google end user credential\n * 2. service_account => Google service account credential\n * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s)\n */\nexports.EXTERNAL_ACCOUNT_TYPE = 'external_account';\n/**\n * Cloud resource manager URL used to retrieve project information.\n *\n * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead\n **/\nexports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/';\n/** The workforce audience pattern. */\nconst WORKFORCE_AUDIENCE_PATTERN = '//iam\\\\.googleapis\\\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+';\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst pkg = require('../../../package.json');\n/**\n * For backwards compatibility.\n */\nvar authclient_2 = require(\"./authclient\");\nObject.defineProperty(exports, \"DEFAULT_UNIVERSE\", { enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } });\n/**\n * Base external account client. This is used to instantiate AuthClients for\n * exchanging external account credentials for GCP access token and authorizing\n * requests to GCP APIs.\n * The base class implements common logic for exchanging various type of\n * external credentials for GCP access token. The logic of determining and\n * retrieving the external credential based on the environment and\n * credential_source will be left for the subclasses.\n */\nclass BaseExternalAccountClient extends authclient_1.AuthClient {\n /**\n * Instantiate a BaseExternalAccountClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file. The camelCased options\n * are aliases for the snake_cased options.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super({ ...options, ...additionalOptions });\n const opts = (0, util_1.originalOrCamelOptions)(options);\n if (opts.get('type') !== exports.EXTERNAL_ACCOUNT_TYPE) {\n throw new Error(`Expected \"${exports.EXTERNAL_ACCOUNT_TYPE}\" type but ` +\n `received \"${options.type}\"`);\n }\n const clientId = opts.get('client_id');\n const clientSecret = opts.get('client_secret');\n const tokenUrl = opts.get('token_url');\n const subjectTokenType = opts.get('subject_token_type');\n const workforcePoolUserProject = opts.get('workforce_pool_user_project');\n const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url');\n const serviceAccountImpersonation = opts.get('service_account_impersonation');\n const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds');\n this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') ||\n `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`);\n if (clientId) {\n this.clientAuth = {\n confidentialClientType: 'basic',\n clientId,\n clientSecret,\n };\n }\n this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth);\n this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE];\n this.cachedAccessToken = null;\n this.audience = opts.get('audience');\n this.subjectTokenType = subjectTokenType;\n this.workforcePoolUserProject = workforcePoolUserProject;\n const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN);\n if (this.workforcePoolUserProject &&\n !this.audience.match(workforceAudiencePattern)) {\n throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' +\n 'credentials.');\n }\n this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl;\n this.serviceAccountImpersonationLifetime =\n serviceAccountImpersonationLifetime;\n if (this.serviceAccountImpersonationLifetime) {\n this.configLifetimeRequested = true;\n }\n else {\n this.configLifetimeRequested = false;\n this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN;\n }\n this.projectNumber = this.getProjectNumber(this.audience);\n }\n /** The service account email to be impersonated, if available. */\n getServiceAccountEmail() {\n var _a;\n if (this.serviceAccountImpersonationUrl) {\n if (this.serviceAccountImpersonationUrl.length > 256) {\n /**\n * Prevents DOS attacks.\n * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84}\n **/\n throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`);\n }\n // Parse email from URL. The formal looks as follows:\n // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken\n const re = /serviceAccounts\\/(?[^:]+):generateAccessToken$/;\n const result = re.exec(this.serviceAccountImpersonationUrl);\n return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null;\n }\n return null;\n }\n /**\n * Provides a mechanism to inject GCP access tokens directly.\n * When the provided credential expires, a new credential, using the\n * external account options, is retrieved.\n * @param credentials The Credentials object to set on the current client.\n */\n setCredentials(credentials) {\n super.setCredentials(credentials);\n this.cachedAccessToken = credentials;\n }\n /**\n * @return A promise that resolves with the current GCP access token\n * response. If the current credential is expired, a new one is retrieved.\n */\n async getAccessToken() {\n // If cached access token is unavailable or expired, force refresh.\n if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) {\n await this.refreshAccessTokenAsync();\n }\n // Return GCP access token in GetAccessTokenResponse format.\n return {\n token: this.cachedAccessToken.access_token,\n res: this.cachedAccessToken.res,\n };\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * The result has the form:\n * { Authorization: 'Bearer ' }\n */\n async getRequestHeaders() {\n const accessTokenResponse = await this.getAccessToken();\n const headers = {\n Authorization: `Bearer ${accessTokenResponse.token}`,\n };\n return this.addSharedMetadataHeaders(headers);\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n /**\n * @return A promise that resolves with the project ID corresponding to the\n * current workload identity pool or current workforce pool if\n * determinable. For workforce pool credential, it returns the project ID\n * corresponding to the workforcePoolUserProject.\n * This is introduced to match the current pattern of using the Auth\n * library:\n * const projectId = await auth.getProjectId();\n * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`;\n * const res = await client.request({ url });\n * The resource may not have permission\n * (resourcemanager.projects.get) to call this API or the required\n * scopes may not be selected:\n * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes\n */\n async getProjectId() {\n const projectNumber = this.projectNumber || this.workforcePoolUserProject;\n if (this.projectId) {\n // Return previously determined project ID.\n return this.projectId;\n }\n else if (projectNumber) {\n // Preferable not to use request() to avoid retrial policies.\n const headers = await this.getRequestHeaders();\n const response = await this.transporter.request({\n headers,\n url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`,\n responseType: 'json',\n });\n this.projectId = response.data.projectId;\n return this.projectId;\n }\n return null;\n }\n /**\n * Authenticates the provided HTTP request, processes it and resolves with the\n * returned response.\n * @param opts The HTTP request options.\n * @param retry Whether the current attempt is a retry after a failed attempt.\n * @return A promise that resolves with the successful response.\n */\n async requestAsync(opts, retry = false) {\n let response;\n try {\n const requestHeaders = await this.getRequestHeaders();\n opts.headers = opts.headers || {};\n if (requestHeaders && requestHeaders['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] =\n requestHeaders['x-goog-user-project'];\n }\n if (requestHeaders && requestHeaders.Authorization) {\n opts.headers.Authorization = requestHeaders.Authorization;\n }\n response = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - forceRefreshOnFailure is true\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n this.forceRefreshOnFailure) {\n await this.refreshAccessTokenAsync();\n return await this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return response;\n }\n /**\n * Forces token refresh, even if unexpired tokens are currently cached.\n * External credentials are exchanged for GCP access tokens via the token\n * exchange endpoint and other settings provided in the client options\n * object.\n * If the service_account_impersonation_url is provided, an additional\n * step to exchange the external account GCP access token for a service\n * account impersonated token is performed.\n * @return A promise that resolves with the fresh GCP access tokens.\n */\n async refreshAccessTokenAsync() {\n // Retrieve the external credential.\n const subjectToken = await this.retrieveSubjectToken();\n // Construct the STS credentials options.\n const stsCredentialsOptions = {\n grantType: STS_GRANT_TYPE,\n audience: this.audience,\n requestedTokenType: STS_REQUEST_TOKEN_TYPE,\n subjectToken,\n subjectTokenType: this.subjectTokenType,\n // generateAccessToken requires the provided access token to have\n // scopes:\n // https://www.googleapis.com/auth/iam or\n // https://www.googleapis.com/auth/cloud-platform\n // The new service account access token scopes will match the user\n // provided ones.\n scope: this.serviceAccountImpersonationUrl\n ? [DEFAULT_OAUTH_SCOPE]\n : this.getScopesArray(),\n };\n // Exchange the external credentials for a GCP access token.\n // Client auth is prioritized over passing the workforcePoolUserProject\n // parameter for STS token exchange.\n const additionalOptions = !this.clientAuth && this.workforcePoolUserProject\n ? { userProject: this.workforcePoolUserProject }\n : undefined;\n const additionalHeaders = {\n 'x-goog-api-client': this.getMetricsHeaderValue(),\n };\n const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions);\n if (this.serviceAccountImpersonationUrl) {\n this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token);\n }\n else if (stsResponse.expires_in) {\n // Save response in cached access token.\n this.cachedAccessToken = {\n access_token: stsResponse.access_token,\n expiry_date: new Date().getTime() + stsResponse.expires_in * 1000,\n res: stsResponse.res,\n };\n }\n else {\n // Save response in cached access token.\n this.cachedAccessToken = {\n access_token: stsResponse.access_token,\n res: stsResponse.res,\n };\n }\n // Save credentials.\n this.credentials = {};\n Object.assign(this.credentials, this.cachedAccessToken);\n delete this.credentials.res;\n // Trigger tokens event to notify external listeners.\n this.emit('tokens', {\n refresh_token: null,\n expiry_date: this.cachedAccessToken.expiry_date,\n access_token: this.cachedAccessToken.access_token,\n token_type: 'Bearer',\n id_token: null,\n });\n // Return the cached access token.\n return this.cachedAccessToken;\n }\n /**\n * Returns the workload identity pool project number if it is determinable\n * from the audience resource name.\n * @param audience The STS audience used to determine the project number.\n * @return The project number associated with the workload identity pool, if\n * this can be determined from the STS audience field. Otherwise, null is\n * returned.\n */\n getProjectNumber(audience) {\n // STS audience pattern:\n // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/...\n const match = audience.match(/\\/projects\\/([^/]+)/);\n if (!match) {\n return null;\n }\n return match[1];\n }\n /**\n * Exchanges an external account GCP access token for a service\n * account impersonated access token using iamcredentials\n * GenerateAccessToken API.\n * @param token The access token to exchange for a service account access\n * token.\n * @return A promise that resolves with the service account impersonated\n * credentials response.\n */\n async getImpersonatedAccessToken(token) {\n const opts = {\n url: this.serviceAccountImpersonationUrl,\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n },\n data: {\n scope: this.getScopesArray(),\n lifetime: this.serviceAccountImpersonationLifetime + 's',\n },\n responseType: 'json',\n };\n const response = await this.transporter.request(opts);\n const successResponse = response.data;\n return {\n access_token: successResponse.accessToken,\n // Convert from ISO format to timestamp.\n expiry_date: new Date(successResponse.expireTime).getTime(),\n res: response,\n };\n }\n /**\n * Returns whether the provided credentials are expired or not.\n * If there is no expiry time, assumes the token is not expired or expiring.\n * @param accessToken The credentials to check for expiration.\n * @return Whether the credentials are expired or not.\n */\n isExpired(accessToken) {\n const now = new Date().getTime();\n return accessToken.expiry_date\n ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis\n : false;\n }\n /**\n * @return The list of scopes for the requested GCP access token.\n */\n getScopesArray() {\n // Since scopes can be provided as string or array, the type should\n // be normalized.\n if (typeof this.scopes === 'string') {\n return [this.scopes];\n }\n return this.scopes || [DEFAULT_OAUTH_SCOPE];\n }\n getMetricsHeaderValue() {\n const nodeVersion = process.version.replace(/^v/, '');\n const saImpersonation = this.serviceAccountImpersonationUrl !== undefined;\n const credentialSourceType = this.credentialSourceType\n ? this.credentialSourceType\n : 'unknown';\n return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`;\n }\n}\nexports.BaseExternalAccountClient = BaseExternalAccountClient;\n","\"use strict\";\n// Copyright 2013 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Compute = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst gcpMetadata = require(\"gcp-metadata\");\nconst oauth2client_1 = require(\"./oauth2client\");\nclass Compute extends oauth2client_1.OAuth2Client {\n /**\n * Google Compute Engine service account credentials.\n *\n * Retrieve access token from the metadata server.\n * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications\n */\n constructor(options = {}) {\n super(options);\n // Start with an expired refresh token, which will automatically be\n // refreshed before the first API call is made.\n this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' };\n this.serviceAccountEmail = options.serviceAccountEmail || 'default';\n this.scopes = Array.isArray(options.scopes)\n ? options.scopes\n : options.scopes\n ? [options.scopes]\n : [];\n }\n /**\n * Refreshes the access token.\n * @param refreshToken Unused parameter\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`;\n let data;\n try {\n const instanceOptions = {\n property: tokenPath,\n };\n if (this.scopes.length > 0) {\n instanceOptions.params = {\n scopes: this.scopes.join(','),\n };\n }\n data = await gcpMetadata.instance(instanceOptions);\n }\n catch (e) {\n if (e instanceof gaxios_1.GaxiosError) {\n e.message = `Could not refresh access token: ${e.message}`;\n this.wrapError(e);\n }\n throw e;\n }\n const tokens = data;\n if (data && data.expires_in) {\n tokens.expiry_date = new Date().getTime() + data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res: null };\n }\n /**\n * Fetches an ID token.\n * @param targetAudience the audience for the fetched ID token.\n */\n async fetchIdToken(targetAudience) {\n const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` +\n `?format=full&audience=${targetAudience}`;\n let idToken;\n try {\n const instanceOptions = {\n property: idTokenPath,\n };\n idToken = await gcpMetadata.instance(instanceOptions);\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Could not fetch ID token: ${e.message}`;\n }\n throw e;\n }\n return idToken;\n }\n wrapError(e) {\n const res = e.response;\n if (res && res.status) {\n e.status = res.status;\n if (res.status === 403) {\n e.message =\n 'A Forbidden error was returned while attempting to retrieve an access ' +\n 'token for the Compute Engine built-in service account. This may be because the Compute ' +\n 'Engine instance does not have the correct permission scopes specified: ' +\n e.message;\n }\n else if (res.status === 404) {\n e.message =\n 'A Not Found error was returned while attempting to retrieve an access' +\n 'token for the Compute Engine built-in service account. This may be because the Compute ' +\n 'Engine instance does not have any permission scopes specified: ' +\n e.message;\n }\n }\n }\n}\nexports.Compute = Compute;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0;\nconst stream = require(\"stream\");\nconst authclient_1 = require(\"./authclient\");\nconst sts = require(\"./stscredentials\");\n/**\n * The required token exchange grant_type: rfc8693#section-2.1\n */\nconst STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange';\n/**\n * The requested token exchange requested_token_type: rfc8693#section-2.1\n */\nconst STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/**\n * The requested token exchange subject_token_type: rfc8693#section-2.1\n */\nconst STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/**\n * The maximum number of access boundary rules a Credential Access Boundary\n * can contain.\n */\nexports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10;\n/**\n * Offset to take into account network delays and server clock skews.\n */\nexports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000;\n/**\n * Defines a set of Google credentials that are downscoped from an existing set\n * of Google OAuth2 credentials. This is useful to restrict the Identity and\n * Access Management (IAM) permissions that a short-lived credential can use.\n * The common pattern of usage is to have a token broker with elevated access\n * generate these downscoped credentials from higher access source credentials\n * and pass the downscoped short-lived access tokens to a token consumer via\n * some secure authenticated channel for limited access to Google Cloud Storage\n * resources.\n */\nclass DownscopedClient extends authclient_1.AuthClient {\n /**\n * Instantiates a downscoped client object using the provided source\n * AuthClient and credential access boundary rules.\n * To downscope permissions of a source AuthClient, a Credential Access\n * Boundary that specifies which resources the new credential can access, as\n * well as an upper bound on the permissions that are available on each\n * resource, has to be defined. A downscoped client can then be instantiated\n * using the source AuthClient and the Credential Access Boundary.\n * @param authClient The source AuthClient to be downscoped based on the\n * provided Credential Access Boundary rules.\n * @param credentialAccessBoundary The Credential Access Boundary which\n * contains a list of access boundary rules. Each rule contains information\n * on the resource that the rule applies to, the upper bound of the\n * permissions that are available on that resource and an optional\n * condition to further restrict permissions.\n * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.**\n * Optional additional behavior customization options.\n * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.**\n * Optional quota project id for setting up in the x-goog-user-project header.\n */\n constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) {\n super({ ...additionalOptions, quotaProjectId });\n this.authClient = authClient;\n this.credentialAccessBoundary = credentialAccessBoundary;\n // Check 1-10 Access Boundary Rules are defined within Credential Access\n // Boundary.\n if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) {\n throw new Error('At least one access boundary rule needs to be defined.');\n }\n else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length >\n exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) {\n throw new Error('The provided access boundary has more than ' +\n `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`);\n }\n // Check at least one permission should be defined in each Access Boundary\n // Rule.\n for (const rule of credentialAccessBoundary.accessBoundary\n .accessBoundaryRules) {\n if (rule.availablePermissions.length === 0) {\n throw new Error('At least one permission should be defined in access boundary rules.');\n }\n }\n this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`);\n this.cachedDownscopedAccessToken = null;\n }\n /**\n * Provides a mechanism to inject Downscoped access tokens directly.\n * The expiry_date field is required to facilitate determination of the token\n * expiration which would make it easier for the token consumer to handle.\n * @param credentials The Credentials object to set on the current client.\n */\n setCredentials(credentials) {\n if (!credentials.expiry_date) {\n throw new Error('The access token expiry_date field is missing in the provided ' +\n 'credentials.');\n }\n super.setCredentials(credentials);\n this.cachedDownscopedAccessToken = credentials;\n }\n async getAccessToken() {\n // If the cached access token is unavailable or expired, force refresh.\n // The Downscoped access token will be returned in\n // DownscopedAccessTokenResponse format.\n if (!this.cachedDownscopedAccessToken ||\n this.isExpired(this.cachedDownscopedAccessToken)) {\n await this.refreshAccessTokenAsync();\n }\n // Return Downscoped access token in DownscopedAccessTokenResponse format.\n return {\n token: this.cachedDownscopedAccessToken.access_token,\n expirationTime: this.cachedDownscopedAccessToken.expiry_date,\n res: this.cachedDownscopedAccessToken.res,\n };\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * The result has the form:\n * { Authorization: 'Bearer ' }\n */\n async getRequestHeaders() {\n const accessTokenResponse = await this.getAccessToken();\n const headers = {\n Authorization: `Bearer ${accessTokenResponse.token}`,\n };\n return this.addSharedMetadataHeaders(headers);\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n /**\n * Authenticates the provided HTTP request, processes it and resolves with the\n * returned response.\n * @param opts The HTTP request options.\n * @param retry Whether the current attempt is a retry after a failed attempt.\n * @return A promise that resolves with the successful response.\n */\n async requestAsync(opts, retry = false) {\n let response;\n try {\n const requestHeaders = await this.getRequestHeaders();\n opts.headers = opts.headers || {};\n if (requestHeaders && requestHeaders['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] =\n requestHeaders['x-goog-user-project'];\n }\n if (requestHeaders && requestHeaders.Authorization) {\n opts.headers.Authorization = requestHeaders.Authorization;\n }\n response = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - forceRefreshOnFailure is true\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n this.forceRefreshOnFailure) {\n await this.refreshAccessTokenAsync();\n return await this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return response;\n }\n /**\n * Forces token refresh, even if unexpired tokens are currently cached.\n * GCP access tokens are retrieved from authclient object/source credential.\n * Then GCP access tokens are exchanged for downscoped access tokens via the\n * token exchange endpoint.\n * @return A promise that resolves with the fresh downscoped access token.\n */\n async refreshAccessTokenAsync() {\n var _a;\n // Retrieve GCP access token from source credential.\n const subjectToken = (await this.authClient.getAccessToken()).token;\n // Construct the STS credentials options.\n const stsCredentialsOptions = {\n grantType: STS_GRANT_TYPE,\n requestedTokenType: STS_REQUEST_TOKEN_TYPE,\n subjectToken: subjectToken,\n subjectTokenType: STS_SUBJECT_TOKEN_TYPE,\n };\n // Exchange the source AuthClient access token for a Downscoped access\n // token.\n const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary);\n /**\n * The STS endpoint will only return the expiration time for the downscoped\n * access token if the original access token represents a service account.\n * The downscoped token's expiration time will always match the source\n * credential expiration. When no expires_in is returned, we can copy the\n * source credential's expiration time.\n */\n const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null;\n const expiryDate = stsResponse.expires_in\n ? new Date().getTime() + stsResponse.expires_in * 1000\n : sourceCredExpireDate;\n // Save response in cached access token.\n this.cachedDownscopedAccessToken = {\n access_token: stsResponse.access_token,\n expiry_date: expiryDate,\n res: stsResponse.res,\n };\n // Save credentials.\n this.credentials = {};\n Object.assign(this.credentials, this.cachedDownscopedAccessToken);\n delete this.credentials.res;\n // Trigger tokens event to notify external listeners.\n this.emit('tokens', {\n refresh_token: null,\n expiry_date: this.cachedDownscopedAccessToken.expiry_date,\n access_token: this.cachedDownscopedAccessToken.access_token,\n token_type: 'Bearer',\n id_token: null,\n });\n // Return the cached access token.\n return this.cachedDownscopedAccessToken;\n }\n /**\n * Returns whether the provided credentials are expired or not.\n * If there is no expiry time, assumes the token is not expired or expiring.\n * @param downscopedAccessToken The credentials to check for expiration.\n * @return Whether the credentials are expired or not.\n */\n isExpired(downscopedAccessToken) {\n const now = new Date().getTime();\n return downscopedAccessToken.expiry_date\n ? now >=\n downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis\n : false;\n }\n}\nexports.DownscopedClient = DownscopedClient;\n","\"use strict\";\n// Copyright 2018 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getEnv = exports.clear = exports.GCPEnv = void 0;\nconst gcpMetadata = require(\"gcp-metadata\");\nvar GCPEnv;\n(function (GCPEnv) {\n GCPEnv[\"APP_ENGINE\"] = \"APP_ENGINE\";\n GCPEnv[\"KUBERNETES_ENGINE\"] = \"KUBERNETES_ENGINE\";\n GCPEnv[\"CLOUD_FUNCTIONS\"] = \"CLOUD_FUNCTIONS\";\n GCPEnv[\"COMPUTE_ENGINE\"] = \"COMPUTE_ENGINE\";\n GCPEnv[\"CLOUD_RUN\"] = \"CLOUD_RUN\";\n GCPEnv[\"NONE\"] = \"NONE\";\n})(GCPEnv || (exports.GCPEnv = GCPEnv = {}));\nlet envPromise;\nfunction clear() {\n envPromise = undefined;\n}\nexports.clear = clear;\nasync function getEnv() {\n if (envPromise) {\n return envPromise;\n }\n envPromise = getEnvMemoized();\n return envPromise;\n}\nexports.getEnv = getEnv;\nasync function getEnvMemoized() {\n let env = GCPEnv.NONE;\n if (isAppEngine()) {\n env = GCPEnv.APP_ENGINE;\n }\n else if (isCloudFunction()) {\n env = GCPEnv.CLOUD_FUNCTIONS;\n }\n else if (await isComputeEngine()) {\n if (await isKubernetesEngine()) {\n env = GCPEnv.KUBERNETES_ENGINE;\n }\n else if (isCloudRun()) {\n env = GCPEnv.CLOUD_RUN;\n }\n else {\n env = GCPEnv.COMPUTE_ENGINE;\n }\n }\n else {\n env = GCPEnv.NONE;\n }\n return env;\n}\nfunction isAppEngine() {\n return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME);\n}\nfunction isCloudFunction() {\n return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET);\n}\n/**\n * This check only verifies that the environment is running knative.\n * This must be run *after* checking for Kubernetes, otherwise it will\n * return a false positive.\n */\nfunction isCloudRun() {\n return !!process.env.K_CONFIGURATION;\n}\nasync function isKubernetesEngine() {\n try {\n await gcpMetadata.instance('attributes/cluster-name');\n return true;\n }\n catch (e) {\n return false;\n }\n}\nasync function isComputeEngine() {\n return gcpMetadata.isAvailable();\n}\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0;\nconst SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2';\nconst OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token';\nconst OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt';\n/**\n * Defines the response of a 3rd party executable run by the pluggable auth client.\n */\nclass ExecutableResponse {\n /**\n * Instantiates an ExecutableResponse instance using the provided JSON object\n * from the output of the executable.\n * @param responseJson Response from a 3rd party executable, loaded from a\n * run of the executable or a cached output file.\n */\n constructor(responseJson) {\n // Check that the required fields exist in the json response.\n if (!responseJson.version) {\n throw new InvalidVersionFieldError(\"Executable response must contain a 'version' field.\");\n }\n if (responseJson.success === undefined) {\n throw new InvalidSuccessFieldError(\"Executable response must contain a 'success' field.\");\n }\n this.version = responseJson.version;\n this.success = responseJson.success;\n // Validate required fields for a successful response.\n if (this.success) {\n this.expirationTime = responseJson.expiration_time;\n this.tokenType = responseJson.token_type;\n // Validate token type field.\n if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE &&\n this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 &&\n this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) {\n throw new InvalidTokenTypeFieldError(\"Executable response must contain a 'token_type' field when successful \" +\n `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`);\n }\n // Validate subject token.\n if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) {\n if (!responseJson.saml_response) {\n throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`);\n }\n this.subjectToken = responseJson.saml_response;\n }\n else {\n if (!responseJson.id_token) {\n throw new InvalidSubjectTokenError(\"Executable response must contain a 'id_token' field when \" +\n `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`);\n }\n this.subjectToken = responseJson.id_token;\n }\n }\n else {\n // Both code and message must be provided for unsuccessful responses.\n if (!responseJson.code) {\n throw new InvalidCodeFieldError(\"Executable response must contain a 'code' field when unsuccessful.\");\n }\n if (!responseJson.message) {\n throw new InvalidMessageFieldError(\"Executable response must contain a 'message' field when unsuccessful.\");\n }\n this.errorCode = responseJson.code;\n this.errorMessage = responseJson.message;\n }\n }\n /**\n * @return A boolean representing if the response has a valid token. Returns\n * true when the response was successful and the token is not expired.\n */\n isValid() {\n return !this.isExpired() && this.success;\n }\n /**\n * @return A boolean representing if the response is expired. Returns true if the\n * provided timeout has passed.\n */\n isExpired() {\n return (this.expirationTime !== undefined &&\n this.expirationTime < Math.round(Date.now() / 1000));\n }\n}\nexports.ExecutableResponse = ExecutableResponse;\n/**\n * An error thrown by the ExecutableResponse class.\n */\nclass ExecutableResponseError extends Error {\n constructor(message) {\n super(message);\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.ExecutableResponseError = ExecutableResponseError;\n/**\n * An error thrown when the 'version' field in an executable response is missing or invalid.\n */\nclass InvalidVersionFieldError extends ExecutableResponseError {\n}\nexports.InvalidVersionFieldError = InvalidVersionFieldError;\n/**\n * An error thrown when the 'success' field in an executable response is missing or invalid.\n */\nclass InvalidSuccessFieldError extends ExecutableResponseError {\n}\nexports.InvalidSuccessFieldError = InvalidSuccessFieldError;\n/**\n * An error thrown when the 'expiration_time' field in an executable response is missing or invalid.\n */\nclass InvalidExpirationTimeFieldError extends ExecutableResponseError {\n}\nexports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError;\n/**\n * An error thrown when the 'token_type' field in an executable response is missing or invalid.\n */\nclass InvalidTokenTypeFieldError extends ExecutableResponseError {\n}\nexports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError;\n/**\n * An error thrown when the 'code' field in an executable response is missing or invalid.\n */\nclass InvalidCodeFieldError extends ExecutableResponseError {\n}\nexports.InvalidCodeFieldError = InvalidCodeFieldError;\n/**\n * An error thrown when the 'message' field in an executable response is missing or invalid.\n */\nclass InvalidMessageFieldError extends ExecutableResponseError {\n}\nexports.InvalidMessageFieldError = InvalidMessageFieldError;\n/**\n * An error thrown when the subject token in an executable response is missing or invalid.\n */\nclass InvalidSubjectTokenError extends ExecutableResponseError {\n}\nexports.InvalidSubjectTokenError = InvalidSubjectTokenError;\n","\"use strict\";\n// Copyright 2023 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0;\nconst authclient_1 = require(\"./authclient\");\nconst oauth2common_1 = require(\"./oauth2common\");\nconst gaxios_1 = require(\"gaxios\");\nconst stream = require(\"stream\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\n/**\n * The credentials JSON file type for external account authorized user clients.\n */\nexports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user';\n/**\n * Handler for token refresh requests sent to the token_url endpoint for external\n * authorized user credentials.\n */\nclass ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler {\n /**\n * Initializes an ExternalAccountAuthorizedUserHandler instance.\n * @param url The URL of the token refresh endpoint.\n * @param transporter The transporter to use for the refresh request.\n * @param clientAuthentication The client authentication credentials to use\n * for the refresh request.\n */\n constructor(url, transporter, clientAuthentication) {\n super(clientAuthentication);\n this.url = url;\n this.transporter = transporter;\n }\n /**\n * Requests a new access token from the token_url endpoint using the provided\n * refresh token.\n * @param refreshToken The refresh token to use to generate a new access token.\n * @param additionalHeaders Optional additional headers to pass along the\n * request.\n * @return A promise that resolves with the token refresh response containing\n * the requested access token and its expiration time.\n */\n async refreshToken(refreshToken, additionalHeaders) {\n const values = new URLSearchParams({\n grant_type: 'refresh_token',\n refresh_token: refreshToken,\n });\n const headers = {\n 'Content-Type': 'application/x-www-form-urlencoded',\n ...additionalHeaders,\n };\n const opts = {\n url: this.url,\n method: 'POST',\n headers,\n data: values.toString(),\n responseType: 'json',\n };\n // Apply OAuth client authentication.\n this.applyClientAuthenticationOptions(opts);\n try {\n const response = await this.transporter.request(opts);\n // Successful response.\n const tokenRefreshResponse = response.data;\n tokenRefreshResponse.res = response;\n return tokenRefreshResponse;\n }\n catch (error) {\n // Translate error to OAuthError.\n if (error instanceof gaxios_1.GaxiosError && error.response) {\n throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, \n // Preserve other fields from the original error.\n error);\n }\n // Request could fail before the server responds.\n throw error;\n }\n }\n}\n/**\n * External Account Authorized User Client. This is used for OAuth2 credentials\n * sourced using external identities through Workforce Identity Federation.\n * Obtaining the initial access and refresh token can be done through the\n * Google Cloud CLI.\n */\nclass ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient {\n /**\n * Instantiates an ExternalAccountAuthorizedUserClient instances using the\n * provided JSON object loaded from a credentials files.\n * An error is throws if the credential is not valid.\n * @param options The external account authorized user option object typically\n * from the external accoutn authorized user JSON credential file.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super({ ...options, ...additionalOptions });\n this.refreshToken = options.refresh_token;\n const clientAuth = {\n confidentialClientType: 'basic',\n clientId: options.client_id,\n clientSecret: options.client_secret,\n };\n this.externalAccountAuthorizedUserHandler =\n new ExternalAccountAuthorizedUserHandler(options.token_url, this.transporter, clientAuth);\n this.cachedAccessToken = null;\n this.quotaProjectId = options.quota_project_id;\n // As threshold could be zero,\n // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the\n // zero value.\n if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') {\n this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET;\n }\n else {\n this.eagerRefreshThresholdMillis = additionalOptions\n .eagerRefreshThresholdMillis;\n }\n this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure);\n if (options.universe_domain) {\n this.universeDomain = options.universe_domain;\n }\n }\n async getAccessToken() {\n // If cached access token is unavailable or expired, force refresh.\n if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) {\n await this.refreshAccessTokenAsync();\n }\n // Return GCP access token in GetAccessTokenResponse format.\n return {\n token: this.cachedAccessToken.access_token,\n res: this.cachedAccessToken.res,\n };\n }\n async getRequestHeaders() {\n const accessTokenResponse = await this.getAccessToken();\n const headers = {\n Authorization: `Bearer ${accessTokenResponse.token}`,\n };\n return this.addSharedMetadataHeaders(headers);\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n /**\n * Authenticates the provided HTTP request, processes it and resolves with the\n * returned response.\n * @param opts The HTTP request options.\n * @param retry Whether the current attempt is a retry after a failed attempt.\n * @return A promise that resolves with the successful response.\n */\n async requestAsync(opts, retry = false) {\n let response;\n try {\n const requestHeaders = await this.getRequestHeaders();\n opts.headers = opts.headers || {};\n if (requestHeaders && requestHeaders['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] =\n requestHeaders['x-goog-user-project'];\n }\n if (requestHeaders && requestHeaders.Authorization) {\n opts.headers.Authorization = requestHeaders.Authorization;\n }\n response = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - forceRefreshOnFailure is true\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n this.forceRefreshOnFailure) {\n await this.refreshAccessTokenAsync();\n return await this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return response;\n }\n /**\n * Forces token refresh, even if unexpired tokens are currently cached.\n * @return A promise that resolves with the refreshed credential.\n */\n async refreshAccessTokenAsync() {\n // Refresh the access token using the refresh token.\n const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken);\n this.cachedAccessToken = {\n access_token: refreshResponse.access_token,\n expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000,\n res: refreshResponse.res,\n };\n if (refreshResponse.refresh_token !== undefined) {\n this.refreshToken = refreshResponse.refresh_token;\n }\n return this.cachedAccessToken;\n }\n /**\n * Returns whether the provided credentials are expired or not.\n * If there is no expiry time, assumes the token is not expired or expiring.\n * @param credentials The credentials to check for expiration.\n * @return Whether the credentials are expired or not.\n */\n isExpired(credentials) {\n const now = new Date().getTime();\n return credentials.expiry_date\n ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis\n : false;\n }\n}\nexports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ExternalAccountClient = void 0;\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst identitypoolclient_1 = require(\"./identitypoolclient\");\nconst awsclient_1 = require(\"./awsclient\");\nconst pluggable_auth_client_1 = require(\"./pluggable-auth-client\");\n/**\n * Dummy class with no constructor. Developers are expected to use fromJSON.\n */\nclass ExternalAccountClient {\n constructor() {\n throw new Error('ExternalAccountClients should be initialized via: ' +\n 'ExternalAccountClient.fromJSON(), ' +\n 'directly via explicit constructors, eg. ' +\n 'new AwsClient(options), new IdentityPoolClient(options), new' +\n 'PluggableAuthClientOptions, or via ' +\n 'new GoogleAuth(options).getClient()');\n }\n /**\n * This static method will instantiate the\n * corresponding type of external account credential depending on the\n * underlying credential source.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n * @return A BaseExternalAccountClient instance or null if the options\n * provided do not correspond to an external account credential.\n */\n static fromJSON(options, additionalOptions) {\n var _a, _b;\n if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) {\n return new awsclient_1.AwsClient(options, additionalOptions);\n }\n else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) {\n return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions);\n }\n else {\n return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions);\n }\n }\n else {\n return null;\n }\n }\n}\nexports.ExternalAccountClient = ExternalAccountClient;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0;\nconst child_process_1 = require(\"child_process\");\nconst fs = require(\"fs\");\nconst gcpMetadata = require(\"gcp-metadata\");\nconst os = require(\"os\");\nconst path = require(\"path\");\nconst crypto_1 = require(\"../crypto/crypto\");\nconst transporters_1 = require(\"../transporters\");\nconst computeclient_1 = require(\"./computeclient\");\nconst idtokenclient_1 = require(\"./idtokenclient\");\nconst envDetect_1 = require(\"./envDetect\");\nconst jwtclient_1 = require(\"./jwtclient\");\nconst refreshclient_1 = require(\"./refreshclient\");\nconst impersonated_1 = require(\"./impersonated\");\nconst externalclient_1 = require(\"./externalclient\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst authclient_1 = require(\"./authclient\");\nconst externalAccountAuthorizedUserClient_1 = require(\"./externalAccountAuthorizedUserClient\");\nconst util_1 = require(\"../util\");\nexports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com';\nconst GoogleAuthExceptionMessages = {\n NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \\n' +\n 'To learn more about authentication and Google APIs, visit: \\n' +\n 'https://cloud.google.com/docs/authentication/getting-started',\n NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \\n' +\n 'To learn more about authentication and Google APIs, visit: \\n' +\n 'https://cloud.google.com/docs/authentication/getting-started',\n NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\\n' +\n 'To learn more about Universe Domain retrieval, visit: \\n' +\n 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys',\n};\nclass GoogleAuth {\n // Note: this properly is only public to satisify unit tests.\n // https://github.com/Microsoft/TypeScript/issues/5228\n get isGCE() {\n return this.checkIsGCE;\n }\n /**\n * Configuration is resolved in the following order of precedence:\n * - {@link GoogleAuthOptions.credentials `credentials`}\n * - {@link GoogleAuthOptions.keyFilename `keyFilename`}\n * - {@link GoogleAuthOptions.keyFile `keyFile`}\n *\n * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the\n * {@link AuthClient `AuthClient`s}.\n *\n * @param opts\n */\n constructor(opts) {\n /**\n * Caches a value indicating whether the auth layer is running on Google\n * Compute Engine.\n * @private\n */\n this.checkIsGCE = undefined;\n // To save the contents of the JSON credential file\n this.jsonContent = null;\n this.cachedCredential = null;\n this.clientOptions = {};\n opts = opts || {};\n this._cachedProjectId = opts.projectId || null;\n this.cachedCredential = opts.authClient || null;\n this.keyFilename = opts.keyFilename || opts.keyFile;\n this.scopes = opts.scopes;\n this.jsonContent = opts.credentials || null;\n this.clientOptions = opts.clientOptions || {};\n if (opts.universeDomain) {\n this.clientOptions.universeDomain = opts.universeDomain;\n }\n }\n // GAPIC client libraries should always use self-signed JWTs. The following\n // variables are set on the JWT client in order to indicate the type of library,\n // and sign the JWT with the correct audience and scopes (if not supplied).\n setGapicJWTValues(client) {\n client.defaultServicePath = this.defaultServicePath;\n client.useJWTAccessWithScope = this.useJWTAccessWithScope;\n client.defaultScopes = this.defaultScopes;\n }\n getProjectId(callback) {\n if (callback) {\n this.getProjectIdAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.getProjectIdAsync();\n }\n }\n /**\n * A temporary method for internal `getProjectId` usages where `null` is\n * acceptable. In a future major release, `getProjectId` should return `null`\n * (as the `Promise` base signature describes) and this private\n * method should be removed.\n *\n * @returns Promise that resolves with project id (or `null`)\n */\n async getProjectIdOptional() {\n try {\n return await this.getProjectId();\n }\n catch (e) {\n if (e instanceof Error &&\n e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) {\n return null;\n }\n else {\n throw e;\n }\n }\n }\n /*\n * A private method for finding and caching a projectId.\n *\n * Supports environments in order of precedence:\n * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable\n * - GOOGLE_APPLICATION_CREDENTIALS JSON file\n * - Cloud SDK: `gcloud config config-helper --format json`\n * - GCE project ID from metadata server\n *\n * @returns projectId\n */\n async findAndCacheProjectId() {\n let projectId = null;\n projectId || (projectId = await this.getProductionProjectId());\n projectId || (projectId = await this.getFileProjectId());\n projectId || (projectId = await this.getDefaultServiceProjectId());\n projectId || (projectId = await this.getGCEProjectId());\n projectId || (projectId = await this.getExternalAccountClientProjectId());\n if (projectId) {\n this._cachedProjectId = projectId;\n return projectId;\n }\n else {\n throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND);\n }\n }\n async getProjectIdAsync() {\n if (this._cachedProjectId) {\n return this._cachedProjectId;\n }\n if (!this._findProjectIdPromise) {\n this._findProjectIdPromise = this.findAndCacheProjectId();\n }\n return this._findProjectIdPromise;\n }\n /**\n * Retrieves a universe domain from the metadata server via\n * {@link gcpMetadata.universe}.\n *\n * @returns a universe domain\n */\n async getUniverseDomainFromMetadataServer() {\n var _a;\n let universeDomain;\n try {\n universeDomain = await gcpMetadata.universe('universe_domain');\n universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE);\n }\n catch (e) {\n if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) {\n universeDomain = authclient_1.DEFAULT_UNIVERSE;\n }\n else {\n throw e;\n }\n }\n return universeDomain;\n }\n /**\n * Retrieves, caches, and returns the universe domain in the following order\n * of precedence:\n * - The universe domain in {@link GoogleAuth.clientOptions}\n * - An existing or ADC {@link AuthClient}'s universe domain\n * - {@link gcpMetadata.universe}, if {@link Compute} client\n *\n * @returns The universe domain\n */\n async getUniverseDomain() {\n let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain');\n try {\n universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain);\n }\n catch (_a) {\n // client or ADC is not available\n universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE);\n }\n return universeDomain;\n }\n /**\n * @returns Any scopes (user-specified or default scopes specified by the\n * client library) that need to be set on the current Auth client.\n */\n getAnyScopes() {\n return this.scopes || this.defaultScopes;\n }\n getApplicationDefault(optionsOrCallback = {}, callback) {\n let options;\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else {\n options = optionsOrCallback;\n }\n if (callback) {\n this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback);\n }\n else {\n return this.getApplicationDefaultAsync(options);\n }\n }\n async getApplicationDefaultAsync(options = {}) {\n // If we've already got a cached credential, return it.\n // This will also preserve one's configured quota project, in case they\n // set one directly on the credential previously.\n if (this.cachedCredential) {\n return await this.prepareAndCacheADC(this.cachedCredential);\n }\n // Since this is a 'new' ADC to cache we will use the environment variable\n // if it's available. We prefer this value over the value from ADC.\n const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT'];\n let credential;\n // Check for the existence of a local environment variable pointing to the\n // location of the credential file. This is typically used in local\n // developer scenarios.\n credential =\n await this._tryGetApplicationCredentialsFromEnvironmentVariable(options);\n if (credential) {\n if (credential instanceof jwtclient_1.JWT) {\n credential.scopes = this.scopes;\n }\n else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) {\n credential.scopes = this.getAnyScopes();\n }\n return await this.prepareAndCacheADC(credential, quotaProjectIdOverride);\n }\n // Look in the well-known credential file location.\n credential =\n await this._tryGetApplicationCredentialsFromWellKnownFile(options);\n if (credential) {\n if (credential instanceof jwtclient_1.JWT) {\n credential.scopes = this.scopes;\n }\n else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) {\n credential.scopes = this.getAnyScopes();\n }\n return await this.prepareAndCacheADC(credential, quotaProjectIdOverride);\n }\n // Determine if we're running on GCE.\n if (await this._checkIsGCE()) {\n // set universe domain for Compute client\n if (!(0, util_1.originalOrCamelOptions)(options).get('universe_domain')) {\n options.universeDomain =\n await this.getUniverseDomainFromMetadataServer();\n }\n options.scopes = this.getAnyScopes();\n return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride);\n }\n throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.');\n }\n async prepareAndCacheADC(credential, quotaProjectIdOverride) {\n const projectId = await this.getProjectIdOptional();\n if (quotaProjectIdOverride) {\n credential.quotaProjectId = quotaProjectIdOverride;\n }\n this.cachedCredential = credential;\n return { credential, projectId };\n }\n /**\n * Determines whether the auth layer is running on Google Compute Engine.\n * Checks for GCP Residency, then fallback to checking if metadata server\n * is available.\n *\n * @returns A promise that resolves with the boolean.\n * @api private\n */\n async _checkIsGCE() {\n if (this.checkIsGCE === undefined) {\n this.checkIsGCE =\n gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable());\n }\n return this.checkIsGCE;\n }\n /**\n * Attempts to load default credentials from the environment variable path..\n * @returns Promise that resolves with the OAuth2Client or null.\n * @api private\n */\n async _tryGetApplicationCredentialsFromEnvironmentVariable(options) {\n const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] ||\n process.env['google_application_credentials'];\n if (!credentialsPath || credentialsPath.length === 0) {\n return null;\n }\n try {\n return this._getApplicationCredentialsFromFilePath(credentialsPath, options);\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`;\n }\n throw e;\n }\n }\n /**\n * Attempts to load default credentials from a well-known file location\n * @return Promise that resolves with the OAuth2Client or null.\n * @api private\n */\n async _tryGetApplicationCredentialsFromWellKnownFile(options) {\n // First, figure out the location of the file, depending upon the OS type.\n let location = null;\n if (this._isWindows()) {\n // Windows\n location = process.env['APPDATA'];\n }\n else {\n // Linux or Mac\n const home = process.env['HOME'];\n if (home) {\n location = path.join(home, '.config');\n }\n }\n // If we found the root path, expand it.\n if (location) {\n location = path.join(location, 'gcloud', 'application_default_credentials.json');\n if (!fs.existsSync(location)) {\n location = null;\n }\n }\n // The file does not exist.\n if (!location) {\n return null;\n }\n // The file seems to exist. Try to use it.\n const client = await this._getApplicationCredentialsFromFilePath(location, options);\n return client;\n }\n /**\n * Attempts to load default credentials from a file at the given path..\n * @param filePath The path to the file to read.\n * @returns Promise that resolves with the OAuth2Client\n * @api private\n */\n async _getApplicationCredentialsFromFilePath(filePath, options = {}) {\n // Make sure the path looks like a string.\n if (!filePath || filePath.length === 0) {\n throw new Error('The file path is invalid.');\n }\n // Make sure there is a file at the path. lstatSync will throw if there is\n // nothing there.\n try {\n // Resolve path to actual file in case of symlink. Expect a thrown error\n // if not resolvable.\n filePath = fs.realpathSync(filePath);\n if (!fs.lstatSync(filePath).isFile()) {\n throw new Error();\n }\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`;\n }\n throw err;\n }\n // Now open a read stream on the file, and parse it.\n const readStream = fs.createReadStream(filePath);\n return this.fromStream(readStream, options);\n }\n /**\n * Create a credentials instance using a given impersonated input options.\n * @param json The impersonated input object.\n * @returns JWT or UserRefresh Client with data\n */\n fromImpersonatedJSON(json) {\n var _a, _b, _c, _d, _e;\n if (!json) {\n throw new Error('Must pass in a JSON object containing an impersonated refresh token');\n }\n if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) {\n throw new Error(`The incoming JSON object does not have the \"${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}\" type`);\n }\n if (!json.source_credentials) {\n throw new Error('The incoming JSON object does not contain a source_credentials field');\n }\n if (!json.service_account_impersonation_url) {\n throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field');\n }\n // Create source client for impersonation\n const sourceClient = new refreshclient_1.UserRefreshClient();\n sourceClient.fromJSON(json.source_credentials);\n if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) {\n /**\n * Prevents DOS attacks.\n * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85}\n **/\n throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`);\n }\n // Extreact service account from service_account_impersonation_url\n const targetPrincipal = (_c = (_b = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target;\n if (!targetPrincipal) {\n throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`);\n }\n const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : [];\n const client = new impersonated_1.Impersonated({\n ...json,\n delegates: (_e = json.delegates) !== null && _e !== void 0 ? _e : [],\n sourceClient: sourceClient,\n targetPrincipal: targetPrincipal,\n targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes],\n });\n return client;\n }\n /**\n * Create a credentials instance using the given input options.\n * @param json The input object.\n * @param options The JWT or UserRefresh options for the client\n * @returns JWT or UserRefresh Client with data\n */\n fromJSON(json, options = {}) {\n let client;\n // user's preferred universe domain\n const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain');\n if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) {\n client = new refreshclient_1.UserRefreshClient(options);\n client.fromJSON(json);\n }\n else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) {\n client = this.fromImpersonatedJSON(json);\n }\n else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n client = externalclient_1.ExternalAccountClient.fromJSON(json, options);\n client.scopes = this.getAnyScopes();\n }\n else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) {\n client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options);\n }\n else {\n options.scopes = this.scopes;\n client = new jwtclient_1.JWT(options);\n this.setGapicJWTValues(client);\n client.fromJSON(json);\n }\n if (preferredUniverseDomain) {\n client.universeDomain = preferredUniverseDomain;\n }\n return client;\n }\n /**\n * Return a JWT or UserRefreshClient from JavaScript object, caching both the\n * object used to instantiate and the client.\n * @param json The input object.\n * @param options The JWT or UserRefresh options for the client\n * @returns JWT or UserRefresh Client with data\n */\n _cacheClientFromJSON(json, options) {\n const client = this.fromJSON(json, options);\n // cache both raw data used to instantiate client and client itself.\n this.jsonContent = json;\n this.cachedCredential = client;\n return client;\n }\n fromStream(inputStream, optionsOrCallback = {}, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else {\n options = optionsOrCallback;\n }\n if (callback) {\n this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback);\n }\n else {\n return this.fromStreamAsync(inputStream, options);\n }\n }\n fromStreamAsync(inputStream, options) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n throw new Error('Must pass in a stream containing the Google auth settings.');\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n try {\n const data = JSON.parse(s);\n const r = this._cacheClientFromJSON(data, options);\n return resolve(r);\n }\n catch (err) {\n // If we failed parsing this.keyFileName, assume that it\n // is a PEM or p12 certificate:\n if (!this.keyFilename)\n throw err;\n const client = new jwtclient_1.JWT({\n ...this.clientOptions,\n keyFile: this.keyFilename,\n });\n this.cachedCredential = client;\n this.setGapicJWTValues(client);\n return resolve(client);\n }\n }\n catch (err) {\n return reject(err);\n }\n });\n });\n }\n /**\n * Create a credentials instance using the given API key string.\n * @param apiKey The API key string\n * @param options An optional options object.\n * @returns A JWT loaded from the key\n */\n fromAPIKey(apiKey, options) {\n options = options || {};\n const client = new jwtclient_1.JWT(options);\n client.fromAPIKey(apiKey);\n return client;\n }\n /**\n * Determines whether the current operating system is Windows.\n * @api private\n */\n _isWindows() {\n const sys = os.platform();\n if (sys && sys.length >= 3) {\n if (sys.substring(0, 3).toLowerCase() === 'win') {\n return true;\n }\n }\n return false;\n }\n /**\n * Run the Google Cloud SDK command that prints the default project ID\n */\n async getDefaultServiceProjectId() {\n return new Promise(resolve => {\n (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => {\n if (!err && stdout) {\n try {\n const projectId = JSON.parse(stdout).configuration.properties.core.project;\n resolve(projectId);\n return;\n }\n catch (e) {\n // ignore errors\n }\n }\n resolve(null);\n });\n });\n }\n /**\n * Loads the project id from environment variables.\n * @api private\n */\n getProductionProjectId() {\n return (process.env['GCLOUD_PROJECT'] ||\n process.env['GOOGLE_CLOUD_PROJECT'] ||\n process.env['gcloud_project'] ||\n process.env['google_cloud_project']);\n }\n /**\n * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file.\n * @api private\n */\n async getFileProjectId() {\n if (this.cachedCredential) {\n // Try to read the project ID from the cached credentials file\n return this.cachedCredential.projectId;\n }\n // Ensure the projectId is loaded from the keyFile if available.\n if (this.keyFilename) {\n const creds = await this.getClient();\n if (creds && creds.projectId) {\n return creds.projectId;\n }\n }\n // Try to load a credentials file and read its project ID\n const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable();\n if (r) {\n return r.projectId;\n }\n else {\n return null;\n }\n }\n /**\n * Gets the project ID from external account client if available.\n */\n async getExternalAccountClientProjectId() {\n if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n return null;\n }\n const creds = await this.getClient();\n // Do not suppress the underlying error, as the error could contain helpful\n // information for debugging and fixing. This is especially true for\n // external account creds as in order to get the project ID, the following\n // operations have to succeed:\n // 1. Valid credentials file should be supplied.\n // 2. Ability to retrieve access tokens from STS token exchange API.\n // 3. Ability to exchange for service account impersonated credentials (if\n // enabled).\n // 4. Ability to get project info using the access token from step 2 or 3.\n // Without surfacing the error, it is harder for developers to determine\n // which step went wrong.\n return await creds.getProjectId();\n }\n /**\n * Gets the Compute Engine project ID if it can be inferred.\n */\n async getGCEProjectId() {\n try {\n const r = await gcpMetadata.project('project-id');\n return r;\n }\n catch (e) {\n // Ignore any errors\n return null;\n }\n }\n getCredentials(callback) {\n if (callback) {\n this.getCredentialsAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.getCredentialsAsync();\n }\n }\n async getCredentialsAsync() {\n const client = await this.getClient();\n if (client instanceof impersonated_1.Impersonated) {\n return { client_email: client.getTargetPrincipal() };\n }\n if (client instanceof baseexternalclient_1.BaseExternalAccountClient) {\n const serviceAccountEmail = client.getServiceAccountEmail();\n if (serviceAccountEmail) {\n return {\n client_email: serviceAccountEmail,\n universe_domain: client.universeDomain,\n };\n }\n }\n if (this.jsonContent) {\n return {\n client_email: this.jsonContent.client_email,\n private_key: this.jsonContent.private_key,\n universe_domain: this.jsonContent.universe_domain,\n };\n }\n if (await this._checkIsGCE()) {\n const [client_email, universe_domain] = await Promise.all([\n gcpMetadata.instance('service-accounts/default/email'),\n this.getUniverseDomain(),\n ]);\n return { client_email, universe_domain };\n }\n throw new Error(GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND);\n }\n /**\n * Automatically obtain an {@link AuthClient `AuthClient`} based on the\n * provided configuration. If no options were passed, use Application\n * Default Credentials.\n */\n async getClient() {\n if (!this.cachedCredential) {\n if (this.jsonContent) {\n this._cacheClientFromJSON(this.jsonContent, this.clientOptions);\n }\n else if (this.keyFilename) {\n const filePath = path.resolve(this.keyFilename);\n const stream = fs.createReadStream(filePath);\n await this.fromStreamAsync(stream, this.clientOptions);\n }\n else {\n await this.getApplicationDefaultAsync(this.clientOptions);\n }\n }\n return this.cachedCredential;\n }\n /**\n * Creates a client which will fetch an ID token for authorization.\n * @param targetAudience the audience for the fetched ID token.\n * @returns IdTokenClient for making HTTP calls authenticated with ID tokens.\n */\n async getIdTokenClient(targetAudience) {\n const client = await this.getClient();\n if (!('fetchIdToken' in client)) {\n throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.');\n }\n return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client });\n }\n /**\n * Automatically obtain application default credentials, and return\n * an access token for making requests.\n */\n async getAccessToken() {\n const client = await this.getClient();\n return (await client.getAccessToken()).token;\n }\n /**\n * Obtain the HTTP headers that will provide authorization for a given\n * request.\n */\n async getRequestHeaders(url) {\n const client = await this.getClient();\n return client.getRequestHeaders(url);\n }\n /**\n * Obtain credentials for a request, then attach the appropriate headers to\n * the request options.\n * @param opts Axios or Request options on which to attach the headers\n */\n async authorizeRequest(opts) {\n opts = opts || {};\n const url = opts.url || opts.uri;\n const client = await this.getClient();\n const headers = await client.getRequestHeaders(url);\n opts.headers = Object.assign(opts.headers || {}, headers);\n return opts;\n }\n /**\n * Automatically obtain application default credentials, and make an\n * HTTP request using the given options.\n * @param opts Axios request options for the HTTP request.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n async request(opts) {\n const client = await this.getClient();\n return client.request(opts);\n }\n /**\n * Determine the compute environment in which the code is running.\n */\n getEnv() {\n return (0, envDetect_1.getEnv)();\n }\n /**\n * Sign the given data with the current private key, or go out\n * to the IAM API to sign it.\n * @param data The data to be signed.\n * @param endpoint A custom endpoint to use.\n *\n * @example\n * ```\n * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/');\n * ```\n */\n async sign(data, endpoint) {\n const client = await this.getClient();\n const universe = await this.getUniverseDomain();\n endpoint =\n endpoint ||\n `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`;\n if (client instanceof impersonated_1.Impersonated) {\n const signed = await client.sign(data);\n return signed.signedBlob;\n }\n const crypto = (0, crypto_1.createCrypto)();\n if (client instanceof jwtclient_1.JWT && client.key) {\n const sign = await crypto.sign(client.key, data);\n return sign;\n }\n const creds = await this.getCredentials();\n if (!creds.client_email) {\n throw new Error('Cannot sign data without `client_email`.');\n }\n return this.signBlob(crypto, creds.client_email, data, endpoint);\n }\n async signBlob(crypto, emailOrUniqueId, data, endpoint) {\n const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`);\n const res = await this.request({\n method: 'POST',\n url: url.href,\n data: {\n payload: crypto.encodeBase64StringUtf8(data),\n },\n });\n return res.data.signedBlob;\n }\n}\nexports.GoogleAuth = GoogleAuth;\n/**\n * Export DefaultTransporter as a static property of the class.\n */\nGoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter;\n","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IAMAuth = void 0;\nclass IAMAuth {\n /**\n * IAM credentials.\n *\n * @param selector the iam authority selector\n * @param token the token\n * @constructor\n */\n constructor(selector, token) {\n this.selector = selector;\n this.token = token;\n this.selector = selector;\n this.token = token;\n }\n /**\n * Acquire the HTTP headers required to make an authenticated request.\n */\n getRequestHeaders() {\n return {\n 'x-goog-iam-authority-selector': this.selector,\n 'x-goog-iam-authorization-token': this.token,\n };\n }\n}\nexports.IAMAuth = IAMAuth;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar _a, _b, _c;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IdentityPoolClient = void 0;\nconst fs = require(\"fs\");\nconst util_1 = require(\"util\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst util_2 = require(\"../util\");\n// fs.readfile is undefined in browser karma tests causing\n// `npm run browser-test` to fail as test.oauth2.ts imports this file via\n// src/index.ts.\n// Fallback to void function to avoid promisify throwing a TypeError.\nconst readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { }));\nconst realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { }));\nconst lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { }));\n/**\n * Defines the Url-sourced and file-sourced external account clients mainly\n * used for K8s and Azure workloads.\n */\nclass IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiate an IdentityPoolClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid file-sourced or\n * url-sourced credential or a workforce pool user project is provided\n * with a non workforce audience.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file. The camelCased options\n * are aliases for the snake_cased options.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super(options, additionalOptions);\n const opts = (0, util_2.originalOrCamelOptions)(options);\n const credentialSource = opts.get('credential_source');\n const credentialSourceOpts = (0, util_2.originalOrCamelOptions)(credentialSource);\n this.file = credentialSourceOpts.get('file');\n this.url = credentialSourceOpts.get('url');\n this.headers = credentialSourceOpts.get('headers');\n if (this.file && this.url) {\n throw new Error('No valid Identity Pool \"credential_source\" provided, must be either file or url.');\n }\n else if (this.file && !this.url) {\n this.credentialSourceType = 'file';\n }\n else if (!this.file && this.url) {\n this.credentialSourceType = 'url';\n }\n else {\n throw new Error('No valid Identity Pool \"credential_source\" provided, must be either file or url.');\n }\n const formatOpts = (0, util_2.originalOrCamelOptions)(credentialSourceOpts.get('format'));\n // Text is the default format type.\n this.formatType = formatOpts.get('type') || 'text';\n this.formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name');\n if (this.formatType !== 'json' && this.formatType !== 'text') {\n throw new Error(`Invalid credential_source format \"${this.formatType}\"`);\n }\n if (this.formatType === 'json' && !this.formatSubjectTokenFieldName) {\n throw new Error('Missing subject_token_field_name for JSON credential_source format');\n }\n }\n /**\n * Triggered when a external subject token is needed to be exchanged for a GCP\n * access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this either retrieves the local credential from a file location (k8s\n * workload) or by sending a GET request to a local metadata server (Azure\n * workloads).\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n if (this.file) {\n return await this.getTokenFromFile(this.file, this.formatType, this.formatSubjectTokenFieldName);\n }\n return await this.getTokenFromUrl(this.url, this.formatType, this.formatSubjectTokenFieldName, this.headers);\n }\n /**\n * Looks up the external subject token in the file path provided and\n * resolves with that token.\n * @param file The file path where the external credential is located.\n * @param formatType The token file or URL response type (JSON or text).\n * @param formatSubjectTokenFieldName For JSON response types, this is the\n * subject_token field name. For Azure, this is access_token. For text\n * response types, this is ignored.\n * @return A promise that resolves with the external subject token.\n */\n async getTokenFromFile(filePath, formatType, formatSubjectTokenFieldName) {\n // Make sure there is a file at the path. lstatSync will throw if there is\n // nothing there.\n try {\n // Resolve path to actual file in case of symlink. Expect a thrown error\n // if not resolvable.\n filePath = await realpath(filePath);\n if (!(await lstat(filePath)).isFile()) {\n throw new Error();\n }\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`;\n }\n throw err;\n }\n let subjectToken;\n const rawText = await readFile(filePath, { encoding: 'utf8' });\n if (formatType === 'text') {\n subjectToken = rawText;\n }\n else if (formatType === 'json' && formatSubjectTokenFieldName) {\n const json = JSON.parse(rawText);\n subjectToken = json[formatSubjectTokenFieldName];\n }\n if (!subjectToken) {\n throw new Error('Unable to parse the subject_token from the credential_source file');\n }\n return subjectToken;\n }\n /**\n * Sends a GET request to the URL provided and resolves with the returned\n * external subject token.\n * @param url The URL to call to retrieve the subject token. This is typically\n * a local metadata server.\n * @param formatType The token file or URL response type (JSON or text).\n * @param formatSubjectTokenFieldName For JSON response types, this is the\n * subject_token field name. For Azure, this is access_token. For text\n * response types, this is ignored.\n * @param headers The optional additional headers to send with the request to\n * the metadata server url.\n * @return A promise that resolves with the external subject token.\n */\n async getTokenFromUrl(url, formatType, formatSubjectTokenFieldName, headers) {\n const opts = {\n url,\n method: 'GET',\n headers,\n responseType: formatType,\n };\n let subjectToken;\n if (formatType === 'text') {\n const response = await this.transporter.request(opts);\n subjectToken = response.data;\n }\n else if (formatType === 'json' && formatSubjectTokenFieldName) {\n const response = await this.transporter.request(opts);\n subjectToken = response.data[formatSubjectTokenFieldName];\n }\n if (!subjectToken) {\n throw new Error('Unable to parse the subject_token from the credential_source URL');\n }\n return subjectToken;\n }\n}\nexports.IdentityPoolClient = IdentityPoolClient;\n","\"use strict\";\n// Copyright 2020 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IdTokenClient = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nclass IdTokenClient extends oauth2client_1.OAuth2Client {\n /**\n * Google ID Token client\n *\n * Retrieve ID token from the metadata server.\n * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server\n */\n constructor(options) {\n super(options);\n this.targetAudience = options.targetAudience;\n this.idTokenProvider = options.idTokenProvider;\n }\n async getRequestMetadataAsync(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n url) {\n if (!this.credentials.id_token ||\n !this.credentials.expiry_date ||\n this.isTokenExpiring()) {\n const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience);\n this.credentials = {\n id_token: idToken,\n expiry_date: this.getIdTokenExpiryDate(idToken),\n };\n }\n const headers = {\n Authorization: 'Bearer ' + this.credentials.id_token,\n };\n return { headers };\n }\n getIdTokenExpiryDate(idToken) {\n const payloadB64 = idToken.split('.')[1];\n if (payloadB64) {\n const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii'));\n return payload.exp * 1000;\n }\n }\n}\nexports.IdTokenClient = IdTokenClient;\n","\"use strict\";\n/**\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nconst gaxios_1 = require(\"gaxios\");\nexports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account';\nclass Impersonated extends oauth2client_1.OAuth2Client {\n /**\n * Impersonated service account credentials.\n *\n * Create a new access token by impersonating another service account.\n *\n * Impersonated Credentials allowing credentials issued to a user or\n * service account to impersonate another. The source project using\n * Impersonated Credentials must enable the \"IAMCredentials\" API.\n * Also, the target service account must grant the orginating principal\n * the \"Service Account Token Creator\" IAM role.\n *\n * @param {object} options - The configuration object.\n * @param {object} [options.sourceClient] the source credential used as to\n * acquire the impersonated credentials.\n * @param {string} [options.targetPrincipal] the service account to\n * impersonate.\n * @param {string[]} [options.delegates] the chained list of delegates\n * required to grant the final access_token. If set, the sequence of\n * identities must have \"Service Account Token Creator\" capability granted to\n * the preceding identity. For example, if set to [serviceAccountB,\n * serviceAccountC], the sourceCredential must have the Token Creator role on\n * serviceAccountB. serviceAccountB must have the Token Creator on\n * serviceAccountC. Finally, C must have Token Creator on target_principal.\n * If left unset, sourceCredential must have that role on targetPrincipal.\n * @param {string[]} [options.targetScopes] scopes to request during the\n * authorization grant.\n * @param {number} [options.lifetime] number of seconds the delegated\n * credential should be valid for up to 3600 seconds by default, or 43,200\n * seconds by extending the token's lifetime, see:\n * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth\n * @param {string} [options.endpoint] api endpoint override.\n */\n constructor(options = {}) {\n var _a, _b, _c, _d, _e, _f;\n super(options);\n // Start with an expired refresh token, which will automatically be\n // refreshed before the first API call is made.\n this.credentials = {\n expiry_date: 1,\n refresh_token: 'impersonated-placeholder',\n };\n this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client();\n this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : '';\n this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : [];\n this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : [];\n this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600;\n this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com';\n }\n /**\n * Signs some bytes.\n *\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation}\n * @param blobToSign String to sign.\n * @return denoting the keyyID and signedBlob in base64 string\n */\n async sign(blobToSign) {\n await this.sourceClient.getAccessToken();\n const name = `projects/-/serviceAccounts/${this.targetPrincipal}`;\n const u = `${this.endpoint}/v1/${name}:signBlob`;\n const body = {\n delegates: this.delegates,\n payload: Buffer.from(blobToSign).toString('base64'),\n };\n const res = await this.sourceClient.request({\n url: u,\n data: body,\n method: 'POST',\n });\n return res.data;\n }\n /** The service account email to be impersonated. */\n getTargetPrincipal() {\n return this.targetPrincipal;\n }\n /**\n * Refreshes the access token.\n * @param refreshToken Unused parameter\n */\n async refreshToken(refreshToken) {\n var _a, _b, _c, _d, _e, _f;\n try {\n await this.sourceClient.getAccessToken();\n const name = 'projects/-/serviceAccounts/' + this.targetPrincipal;\n const u = `${this.endpoint}/v1/${name}:generateAccessToken`;\n const body = {\n delegates: this.delegates,\n scope: this.targetScopes,\n lifetime: this.lifetime + 's',\n };\n const res = await this.sourceClient.request({\n url: u,\n data: body,\n method: 'POST',\n });\n const tokenResponse = res.data;\n this.credentials.access_token = tokenResponse.accessToken;\n this.credentials.expiry_date = Date.parse(tokenResponse.expireTime);\n return {\n tokens: this.credentials,\n res,\n };\n }\n catch (error) {\n if (!(error instanceof Error))\n throw error;\n let status = 0;\n let message = '';\n if (error instanceof gaxios_1.GaxiosError) {\n status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status;\n message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message;\n }\n if (status && message) {\n error.message = `${status}: unable to impersonate: ${message}`;\n throw error;\n }\n else {\n error.message = `unable to impersonate: ${error}`;\n throw error;\n }\n }\n }\n /**\n * Generates an OpenID Connect ID token for a service account.\n *\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation}\n *\n * @param targetAudience the audience for the fetched ID token.\n * @param options the for the request\n * @return an OpenID Connect ID token\n */\n async fetchIdToken(targetAudience, options) {\n var _a;\n await this.sourceClient.getAccessToken();\n const name = `projects/-/serviceAccounts/${this.targetPrincipal}`;\n const u = `${this.endpoint}/v1/${name}:generateIdToken`;\n const body = {\n delegates: this.delegates,\n audience: targetAudience,\n includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true,\n };\n const res = await this.sourceClient.request({\n url: u,\n data: body,\n method: 'POST',\n });\n return res.data.token;\n }\n}\nexports.Impersonated = Impersonated;\n","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.JWTAccess = void 0;\nconst jws = require(\"jws\");\nconst util_1 = require(\"../util\");\nconst DEFAULT_HEADER = {\n alg: 'RS256',\n typ: 'JWT',\n};\nclass JWTAccess {\n /**\n * JWTAccess service account credentials.\n *\n * Create a new access token by using the credential to create a new JWT token\n * that's recognized as the access token.\n *\n * @param email the service account email address.\n * @param key the private key that will be used to sign the token.\n * @param keyId the ID of the private key used to sign the token.\n */\n constructor(email, key, keyId, eagerRefreshThresholdMillis) {\n this.cache = new util_1.LRUCache({\n capacity: 500,\n maxAge: 60 * 60 * 1000,\n });\n this.email = email;\n this.key = key;\n this.keyId = keyId;\n this.eagerRefreshThresholdMillis =\n eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000;\n }\n /**\n * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url\n *\n * @param url The URI being authorized.\n * @param scopes The scope or scopes being authorized\n * @returns A string that returns the cached key.\n */\n getCachedKey(url, scopes) {\n let cacheKey = url;\n if (scopes && Array.isArray(scopes) && scopes.length) {\n cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`;\n }\n else if (typeof scopes === 'string') {\n cacheKey = url ? `${url}_${scopes}` : scopes;\n }\n if (!cacheKey) {\n throw Error('Scopes or url must be provided');\n }\n return cacheKey;\n }\n /**\n * Get a non-expired access token, after refreshing if necessary.\n *\n * @param url The URI being authorized.\n * @param additionalClaims An object with a set of additional claims to\n * include in the payload.\n * @returns An object that includes the authorization header.\n */\n getRequestHeaders(url, additionalClaims, scopes) {\n // Return cached authorization headers, unless we are within\n // eagerRefreshThresholdMillis ms of them expiring:\n const key = this.getCachedKey(url, scopes);\n const cachedToken = this.cache.get(key);\n const now = Date.now();\n if (cachedToken &&\n cachedToken.expiration - now > this.eagerRefreshThresholdMillis) {\n return cachedToken.headers;\n }\n const iat = Math.floor(Date.now() / 1000);\n const exp = JWTAccess.getExpirationTime(iat);\n let defaultClaims;\n // Turn scopes into space-separated string\n if (Array.isArray(scopes)) {\n scopes = scopes.join(' ');\n }\n // If scopes are specified, sign with scopes\n if (scopes) {\n defaultClaims = {\n iss: this.email,\n sub: this.email,\n scope: scopes,\n exp,\n iat,\n };\n }\n else {\n defaultClaims = {\n iss: this.email,\n sub: this.email,\n aud: url,\n exp,\n iat,\n };\n }\n // if additionalClaims are provided, ensure they do not collide with\n // other required claims.\n if (additionalClaims) {\n for (const claim in defaultClaims) {\n if (additionalClaims[claim]) {\n throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`);\n }\n }\n }\n const header = this.keyId\n ? { ...DEFAULT_HEADER, kid: this.keyId }\n : DEFAULT_HEADER;\n const payload = Object.assign(defaultClaims, additionalClaims);\n // Sign the jwt and add it to the cache\n const signedJWT = jws.sign({ header, payload, secret: this.key });\n const headers = { Authorization: `Bearer ${signedJWT}` };\n this.cache.set(key, {\n expiration: exp * 1000,\n headers,\n });\n return headers;\n }\n /**\n * Returns an expiration time for the JWT token.\n *\n * @param iat The issued at time for the JWT.\n * @returns An expiration time for the JWT.\n */\n static getExpirationTime(iat) {\n const exp = iat + 3600; // 3600 seconds = 1 hour\n return exp;\n }\n /**\n * Create a JWTAccess credentials instance using the given input options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the service account auth settings.');\n }\n if (!json.client_email) {\n throw new Error('The incoming JSON object does not contain a client_email field');\n }\n if (!json.private_key) {\n throw new Error('The incoming JSON object does not contain a private_key field');\n }\n // Extract the relevant information from the json key file.\n this.email = json.client_email;\n this.key = json.private_key;\n this.keyId = json.private_key_id;\n this.projectId = json.project_id;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n reject(new Error('Must pass in a stream containing the service account auth settings.'));\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('data', chunk => (s += chunk))\n .on('error', reject)\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n resolve();\n }\n catch (err) {\n reject(err);\n }\n });\n });\n }\n}\nexports.JWTAccess = JWTAccess;\n","\"use strict\";\n// Copyright 2013 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.JWT = void 0;\nconst gtoken_1 = require(\"gtoken\");\nconst jwtaccess_1 = require(\"./jwtaccess\");\nconst oauth2client_1 = require(\"./oauth2client\");\nconst authclient_1 = require(\"./authclient\");\nclass JWT extends oauth2client_1.OAuth2Client {\n constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) {\n const opts = optionsOrEmail && typeof optionsOrEmail === 'object'\n ? optionsOrEmail\n : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject };\n super(opts);\n this.email = opts.email;\n this.keyFile = opts.keyFile;\n this.key = opts.key;\n this.keyId = opts.keyId;\n this.scopes = opts.scopes;\n this.subject = opts.subject;\n this.additionalClaims = opts.additionalClaims;\n // Start with an expired refresh token, which will automatically be\n // refreshed before the first API call is made.\n this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 };\n }\n /**\n * Creates a copy of the credential with the specified scopes.\n * @param scopes List of requested scopes or a single scope.\n * @return The cloned instance.\n */\n createScoped(scopes) {\n const jwt = new JWT(this);\n jwt.scopes = scopes;\n return jwt;\n }\n /**\n * Obtains the metadata to be sent with the request.\n *\n * @param url the URI being authorized.\n */\n async getRequestMetadataAsync(url) {\n url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url;\n const useSelfSignedJWT = (!this.hasUserScopes() && url) ||\n (this.useJWTAccessWithScope && this.hasAnyScopes()) ||\n this.universeDomain !== authclient_1.DEFAULT_UNIVERSE;\n if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) {\n throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`);\n }\n if (!this.apiKey && useSelfSignedJWT) {\n if (this.additionalClaims &&\n this.additionalClaims.target_audience) {\n const { tokens } = await this.refreshToken();\n return {\n headers: this.addSharedMetadataHeaders({\n Authorization: `Bearer ${tokens.id_token}`,\n }),\n };\n }\n else {\n // no scopes have been set, but a uri has been provided. Use JWTAccess\n // credentials.\n if (!this.access) {\n this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis);\n }\n let scopes;\n if (this.hasUserScopes()) {\n scopes = this.scopes;\n }\n else if (!url) {\n scopes = this.defaultScopes;\n }\n const useScopes = this.useJWTAccessWithScope ||\n this.universeDomain !== authclient_1.DEFAULT_UNIVERSE;\n const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, \n // Scopes take precedent over audience for signing,\n // so we only provide them if `useJWTAccessWithScope` is on or\n // if we are in a non-default universe\n useScopes ? scopes : undefined);\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n }\n else if (this.hasAnyScopes() || this.apiKey) {\n return super.getRequestMetadataAsync(url);\n }\n else {\n // If no audience, apiKey, or scopes are provided, we should not attempt\n // to populate any headers:\n return { headers: {} };\n }\n }\n /**\n * Fetches an ID token.\n * @param targetAudience the audience for the fetched ID token.\n */\n async fetchIdToken(targetAudience) {\n // Create a new gToken for fetching an ID token\n const gtoken = new gtoken_1.GoogleToken({\n iss: this.email,\n sub: this.subject,\n scope: this.scopes || this.defaultScopes,\n keyFile: this.keyFile,\n key: this.key,\n additionalClaims: { target_audience: targetAudience },\n transporter: this.transporter,\n });\n await gtoken.getToken({\n forceRefresh: true,\n });\n if (!gtoken.idToken) {\n throw new Error('Unknown error: Failed to fetch ID token');\n }\n return gtoken.idToken;\n }\n /**\n * Determine if there are currently scopes available.\n */\n hasUserScopes() {\n if (!this.scopes) {\n return false;\n }\n return this.scopes.length > 0;\n }\n /**\n * Are there any default or user scopes defined.\n */\n hasAnyScopes() {\n if (this.scopes && this.scopes.length > 0)\n return true;\n if (this.defaultScopes && this.defaultScopes.length > 0)\n return true;\n return false;\n }\n authorize(callback) {\n if (callback) {\n this.authorizeAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.authorizeAsync();\n }\n }\n async authorizeAsync() {\n const result = await this.refreshToken();\n if (!result) {\n throw new Error('No result returned');\n }\n this.credentials = result.tokens;\n this.credentials.refresh_token = 'jwt-placeholder';\n this.key = this.gtoken.key;\n this.email = this.gtoken.iss;\n return result.tokens;\n }\n /**\n * Refreshes the access token.\n * @param refreshToken ignored\n * @private\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n const gtoken = this.createGToken();\n const token = await gtoken.getToken({\n forceRefresh: this.isTokenExpiring(),\n });\n const tokens = {\n access_token: token.access_token,\n token_type: 'Bearer',\n expiry_date: gtoken.expiresAt,\n id_token: gtoken.idToken,\n };\n this.emit('tokens', tokens);\n return { res: null, tokens };\n }\n /**\n * Create a gToken if it doesn't already exist.\n */\n createGToken() {\n if (!this.gtoken) {\n this.gtoken = new gtoken_1.GoogleToken({\n iss: this.email,\n sub: this.subject,\n scope: this.scopes || this.defaultScopes,\n keyFile: this.keyFile,\n key: this.key,\n additionalClaims: this.additionalClaims,\n transporter: this.transporter,\n });\n }\n return this.gtoken;\n }\n /**\n * Create a JWT credentials instance using the given input options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the service account auth settings.');\n }\n if (!json.client_email) {\n throw new Error('The incoming JSON object does not contain a client_email field');\n }\n if (!json.private_key) {\n throw new Error('The incoming JSON object does not contain a private_key field');\n }\n // Extract the relevant information from the json key file.\n this.email = json.client_email;\n this.key = json.private_key;\n this.keyId = json.private_key_id;\n this.projectId = json.project_id;\n this.quotaProjectId = json.quota_project_id;\n this.universeDomain = json.universe_domain || this.universeDomain;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n throw new Error('Must pass in a stream containing the service account auth settings.');\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n resolve();\n }\n catch (e) {\n reject(e);\n }\n });\n });\n }\n /**\n * Creates a JWT credentials instance using an API Key for authentication.\n * @param apiKey The API Key in string form.\n */\n fromAPIKey(apiKey) {\n if (typeof apiKey !== 'string') {\n throw new Error('Must provide an API Key string.');\n }\n this.apiKey = apiKey;\n }\n /**\n * Using the key or keyFile on the JWT client, obtain an object that contains\n * the key and the client email.\n */\n async getCredentials() {\n if (this.key) {\n return { private_key: this.key, client_email: this.email };\n }\n else if (this.keyFile) {\n const gtoken = this.createGToken();\n const creds = await gtoken.getCredentials(this.keyFile);\n return { private_key: creds.privateKey, client_email: creds.clientEmail };\n }\n throw new Error('A key or a keyFile must be provided to getCredentials.');\n }\n}\nexports.JWT = JWT;\n","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.LoginTicket = void 0;\nclass LoginTicket {\n /**\n * Create a simple class to extract user ID from an ID Token\n *\n * @param {string} env Envelope of the jwt\n * @param {TokenPayload} pay Payload of the jwt\n * @constructor\n */\n constructor(env, pay) {\n this.envelope = env;\n this.payload = pay;\n }\n getEnvelope() {\n return this.envelope;\n }\n getPayload() {\n return this.payload;\n }\n /**\n * Create a simple class to extract user ID from an ID Token\n *\n * @return The user ID\n */\n getUserId() {\n const payload = this.getPayload();\n if (payload && payload.sub) {\n return payload.sub;\n }\n return null;\n }\n /**\n * Returns attributes from the login ticket. This can contain\n * various information about the user session.\n *\n * @return The envelope and payload\n */\n getAttributes() {\n return { envelope: this.getEnvelope(), payload: this.getPayload() };\n }\n}\nexports.LoginTicket = LoginTicket;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst querystring = require(\"querystring\");\nconst stream = require(\"stream\");\nconst formatEcdsa = require(\"ecdsa-sig-formatter\");\nconst crypto_1 = require(\"../crypto/crypto\");\nconst authclient_1 = require(\"./authclient\");\nconst loginticket_1 = require(\"./loginticket\");\nvar CodeChallengeMethod;\n(function (CodeChallengeMethod) {\n CodeChallengeMethod[\"Plain\"] = \"plain\";\n CodeChallengeMethod[\"S256\"] = \"S256\";\n})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {}));\nvar CertificateFormat;\n(function (CertificateFormat) {\n CertificateFormat[\"PEM\"] = \"PEM\";\n CertificateFormat[\"JWK\"] = \"JWK\";\n})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {}));\nclass OAuth2Client extends authclient_1.AuthClient {\n constructor(optionsOrClientId, clientSecret, redirectUri) {\n const opts = optionsOrClientId && typeof optionsOrClientId === 'object'\n ? optionsOrClientId\n : { clientId: optionsOrClientId, clientSecret, redirectUri };\n super(opts);\n this.certificateCache = {};\n this.certificateExpiry = null;\n this.certificateCacheFormat = CertificateFormat.PEM;\n this.refreshTokenPromises = new Map();\n this._clientId = opts.clientId;\n this._clientSecret = opts.clientSecret;\n this.redirectUri = opts.redirectUri;\n this.endpoints = {\n tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo',\n oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth',\n oauth2TokenUrl: 'https://oauth2.googleapis.com/token',\n oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke',\n oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs',\n oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs',\n oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key',\n ...opts.endpoints,\n };\n this.issuers = opts.issuers || [\n 'accounts.google.com',\n 'https://accounts.google.com',\n this.universeDomain,\n ];\n }\n /**\n * Generates URL for consent page landing.\n * @param opts Options.\n * @return URL to consent page.\n */\n generateAuthUrl(opts = {}) {\n if (opts.code_challenge_method && !opts.code_challenge) {\n throw new Error('If a code_challenge_method is provided, code_challenge must be included.');\n }\n opts.response_type = opts.response_type || 'code';\n opts.client_id = opts.client_id || this._clientId;\n opts.redirect_uri = opts.redirect_uri || this.redirectUri;\n // Allow scopes to be passed either as array or a string\n if (Array.isArray(opts.scope)) {\n opts.scope = opts.scope.join(' ');\n }\n const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString();\n return (rootUrl +\n '?' +\n querystring.stringify(opts));\n }\n generateCodeVerifier() {\n // To make the code compatible with browser SubtleCrypto we need to make\n // this method async.\n throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.');\n }\n /**\n * Convenience method to automatically generate a code_verifier, and its\n * resulting SHA256. If used, this must be paired with a S256\n * code_challenge_method.\n *\n * For a full example see:\n * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js\n */\n async generateCodeVerifierAsync() {\n // base64 encoding uses 6 bits per character, and we want to generate128\n // characters. 6*128/8 = 96.\n const crypto = (0, crypto_1.createCrypto)();\n const randomString = crypto.randomBytesBase64(96);\n // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/\n // \"-\"/\".\"/\"_\"/\"~\". Base64 encoded strings are pretty close, so we're just\n // swapping out a few chars.\n const codeVerifier = randomString\n .replace(/\\+/g, '~')\n .replace(/=/g, '_')\n .replace(/\\//g, '-');\n // Generate the base64 encoded SHA256\n const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier);\n // We need to use base64UrlEncoding instead of standard base64\n const codeChallenge = unencodedCodeChallenge\n .split('=')[0]\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n return { codeVerifier, codeChallenge };\n }\n getToken(codeOrOptions, callback) {\n const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions;\n if (callback) {\n this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response));\n }\n else {\n return this.getTokenAsync(options);\n }\n }\n async getTokenAsync(options) {\n const url = this.endpoints.oauth2TokenUrl.toString();\n const values = {\n code: options.code,\n client_id: options.client_id || this._clientId,\n client_secret: this._clientSecret,\n redirect_uri: options.redirect_uri || this.redirectUri,\n grant_type: 'authorization_code',\n code_verifier: options.codeVerifier,\n };\n const res = await this.transporter.request({\n method: 'POST',\n url,\n data: querystring.stringify(values),\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n });\n const tokens = res.data;\n if (res.data && res.data.expires_in) {\n tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res };\n }\n /**\n * Refreshes the access token.\n * @param refresh_token Existing refresh token.\n * @private\n */\n async refreshToken(refreshToken) {\n if (!refreshToken) {\n return this.refreshTokenNoCache(refreshToken);\n }\n // If a request to refresh using the same token has started,\n // return the same promise.\n if (this.refreshTokenPromises.has(refreshToken)) {\n return this.refreshTokenPromises.get(refreshToken);\n }\n const p = this.refreshTokenNoCache(refreshToken).then(r => {\n this.refreshTokenPromises.delete(refreshToken);\n return r;\n }, e => {\n this.refreshTokenPromises.delete(refreshToken);\n throw e;\n });\n this.refreshTokenPromises.set(refreshToken, p);\n return p;\n }\n async refreshTokenNoCache(refreshToken) {\n var _a;\n if (!refreshToken) {\n throw new Error('No refresh token is set.');\n }\n const url = this.endpoints.oauth2TokenUrl.toString();\n const data = {\n refresh_token: refreshToken,\n client_id: this._clientId,\n client_secret: this._clientSecret,\n grant_type: 'refresh_token',\n };\n let res;\n try {\n // request for new token\n res = await this.transporter.request({\n method: 'POST',\n url,\n data: querystring.stringify(data),\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n });\n }\n catch (e) {\n if (e instanceof gaxios_1.GaxiosError &&\n e.message === 'invalid_grant' &&\n ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) &&\n /ReAuth/i.test(e.response.data.error_description)) {\n e.message = JSON.stringify(e.response.data);\n }\n throw e;\n }\n const tokens = res.data;\n // TODO: de-duplicate this code from a few spots\n if (res.data && res.data.expires_in) {\n tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res };\n }\n refreshAccessToken(callback) {\n if (callback) {\n this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback);\n }\n else {\n return this.refreshAccessTokenAsync();\n }\n }\n async refreshAccessTokenAsync() {\n const r = await this.refreshToken(this.credentials.refresh_token);\n const tokens = r.tokens;\n tokens.refresh_token = this.credentials.refresh_token;\n this.credentials = tokens;\n return { credentials: this.credentials, res: r.res };\n }\n getAccessToken(callback) {\n if (callback) {\n this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback);\n }\n else {\n return this.getAccessTokenAsync();\n }\n }\n async getAccessTokenAsync() {\n const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring();\n if (shouldRefresh) {\n if (!this.credentials.refresh_token) {\n if (this.refreshHandler) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n return { token: this.credentials.access_token };\n }\n }\n else {\n throw new Error('No refresh token or refresh handler callback is set.');\n }\n }\n const r = await this.refreshAccessTokenAsync();\n if (!r.credentials || (r.credentials && !r.credentials.access_token)) {\n throw new Error('Could not refresh access token.');\n }\n return { token: r.credentials.access_token, res: r.res };\n }\n else {\n return { token: this.credentials.access_token };\n }\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * In OAuth2Client, the result has the form:\n * { Authorization: 'Bearer ' }\n * @param url The optional url being authorized\n */\n async getRequestHeaders(url) {\n const headers = (await this.getRequestMetadataAsync(url)).headers;\n return headers;\n }\n async getRequestMetadataAsync(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n url) {\n const thisCreds = this.credentials;\n if (!thisCreds.access_token &&\n !thisCreds.refresh_token &&\n !this.apiKey &&\n !this.refreshHandler) {\n throw new Error('No access, refresh token, API key or refresh handler callback is set.');\n }\n if (thisCreds.access_token && !this.isTokenExpiring()) {\n thisCreds.token_type = thisCreds.token_type || 'Bearer';\n const headers = {\n Authorization: thisCreds.token_type + ' ' + thisCreds.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n // If refreshHandler exists, call processAndValidateRefreshHandler().\n if (this.refreshHandler) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n const headers = {\n Authorization: 'Bearer ' + this.credentials.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n }\n if (this.apiKey) {\n return { headers: { 'X-Goog-Api-Key': this.apiKey } };\n }\n let r = null;\n let tokens = null;\n try {\n r = await this.refreshToken(thisCreds.refresh_token);\n tokens = r.tokens;\n }\n catch (err) {\n const e = err;\n if (e.response &&\n (e.response.status === 403 || e.response.status === 404)) {\n e.message = `Could not refresh access token: ${e.message}`;\n }\n throw e;\n }\n const credentials = this.credentials;\n credentials.token_type = credentials.token_type || 'Bearer';\n tokens.refresh_token = credentials.refresh_token;\n this.credentials = tokens;\n const headers = {\n Authorization: credentials.token_type + ' ' + tokens.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers), res: r.res };\n }\n /**\n * Generates an URL to revoke the given token.\n * @param token The existing token to be revoked.\n *\n * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL}\n */\n static getRevokeTokenUrl(token) {\n return new OAuth2Client().getRevokeTokenURL(token).toString();\n }\n /**\n * Generates a URL to revoke the given token.\n *\n * @param token The existing token to be revoked.\n */\n getRevokeTokenURL(token) {\n const url = new URL(this.endpoints.oauth2RevokeUrl);\n url.searchParams.append('token', token);\n return url;\n }\n revokeToken(token, callback) {\n const opts = {\n url: this.getRevokeTokenURL(token).toString(),\n method: 'POST',\n };\n if (callback) {\n this.transporter\n .request(opts)\n .then(r => callback(null, r), callback);\n }\n else {\n return this.transporter.request(opts);\n }\n }\n revokeCredentials(callback) {\n if (callback) {\n this.revokeCredentialsAsync().then(res => callback(null, res), callback);\n }\n else {\n return this.revokeCredentialsAsync();\n }\n }\n async revokeCredentialsAsync() {\n const token = this.credentials.access_token;\n this.credentials = {};\n if (token) {\n return this.revokeToken(token);\n }\n else {\n throw new Error('No access token to revoke.');\n }\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n async requestAsync(opts, retry = false) {\n let r2;\n try {\n const r = await this.getRequestMetadataAsync(opts.url);\n opts.headers = opts.headers || {};\n if (r.headers && r.headers['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project'];\n }\n if (r.headers && r.headers.Authorization) {\n opts.headers.Authorization = r.headers.Authorization;\n }\n if (this.apiKey) {\n opts.headers['X-Goog-Api-Key'] = this.apiKey;\n }\n r2 = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - An access_token and refresh_token were available, but either no\n // expiry_date was available or the forceRefreshOnFailure flag is set.\n // The absent expiry_date case can happen when developers stash the\n // access_token and refresh_token for later use, but the access_token\n // fails on the first try because it's expired. Some developers may\n // choose to enable forceRefreshOnFailure to mitigate time-related\n // errors.\n // Or the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - No refresh_token was available\n // - An access_token and a refreshHandler callback were available, but\n // either no expiry_date was available or the forceRefreshOnFailure\n // flag is set. The access_token fails on the first try because it's\n // expired. Some developers may choose to enable forceRefreshOnFailure\n // to mitigate time-related errors.\n const mayRequireRefresh = this.credentials &&\n this.credentials.access_token &&\n this.credentials.refresh_token &&\n (!this.credentials.expiry_date || this.forceRefreshOnFailure);\n const mayRequireRefreshWithNoRefreshToken = this.credentials &&\n this.credentials.access_token &&\n !this.credentials.refresh_token &&\n (!this.credentials.expiry_date || this.forceRefreshOnFailure) &&\n this.refreshHandler;\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry && isAuthErr && !isReadableStream && mayRequireRefresh) {\n await this.refreshAccessTokenAsync();\n return this.requestAsync(opts, true);\n }\n else if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n mayRequireRefreshWithNoRefreshToken) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n }\n return this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return r2;\n }\n verifyIdToken(options, callback) {\n // This function used to accept two arguments instead of an options object.\n // Check the types to help users upgrade with less pain.\n // This check can be removed after a 2.0 release.\n if (callback && typeof callback !== 'function') {\n throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.');\n }\n if (callback) {\n this.verifyIdTokenAsync(options).then(r => callback(null, r), callback);\n }\n else {\n return this.verifyIdTokenAsync(options);\n }\n }\n async verifyIdTokenAsync(options) {\n if (!options.idToken) {\n throw new Error('The verifyIdToken method requires an ID Token');\n }\n const response = await this.getFederatedSignonCertsAsync();\n const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry);\n return login;\n }\n /**\n * Obtains information about the provisioned access token. Especially useful\n * if you want to check the scopes that were provisioned to a given token.\n *\n * @param accessToken Required. The Access Token for which you want to get\n * user info.\n */\n async getTokenInfo(accessToken) {\n const { data } = await this.transporter.request({\n method: 'POST',\n headers: {\n 'Content-Type': 'application/x-www-form-urlencoded',\n Authorization: `Bearer ${accessToken}`,\n },\n url: this.endpoints.tokenInfoUrl.toString(),\n });\n const info = Object.assign({\n expiry_date: new Date().getTime() + data.expires_in * 1000,\n scopes: data.scope.split(' '),\n }, data);\n delete info.expires_in;\n delete info.scope;\n return info;\n }\n getFederatedSignonCerts(callback) {\n if (callback) {\n this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback);\n }\n else {\n return this.getFederatedSignonCertsAsync();\n }\n }\n async getFederatedSignonCertsAsync() {\n const nowTime = new Date().getTime();\n const format = (0, crypto_1.hasBrowserCrypto)()\n ? CertificateFormat.JWK\n : CertificateFormat.PEM;\n if (this.certificateExpiry &&\n nowTime < this.certificateExpiry.getTime() &&\n this.certificateCacheFormat === format) {\n return { certs: this.certificateCache, format };\n }\n let res;\n let url;\n switch (format) {\n case CertificateFormat.PEM:\n url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString();\n break;\n case CertificateFormat.JWK:\n url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString();\n break;\n default:\n throw new Error(`Unsupported certificate format ${format}`);\n }\n try {\n res = await this.transporter.request({ url });\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Failed to retrieve verification certificates: ${e.message}`;\n }\n throw e;\n }\n const cacheControl = res ? res.headers['cache-control'] : undefined;\n let cacheAge = -1;\n if (cacheControl) {\n const pattern = new RegExp('max-age=([0-9]*)');\n const regexResult = pattern.exec(cacheControl);\n if (regexResult && regexResult.length === 2) {\n // Cache results with max-age (in seconds)\n cacheAge = Number(regexResult[1]) * 1000; // milliseconds\n }\n }\n let certificates = {};\n switch (format) {\n case CertificateFormat.PEM:\n certificates = res.data;\n break;\n case CertificateFormat.JWK:\n for (const key of res.data.keys) {\n certificates[key.kid] = key;\n }\n break;\n default:\n throw new Error(`Unsupported certificate format ${format}`);\n }\n const now = new Date();\n this.certificateExpiry =\n cacheAge === -1 ? null : new Date(now.getTime() + cacheAge);\n this.certificateCache = certificates;\n this.certificateCacheFormat = format;\n return { certs: certificates, format, res };\n }\n getIapPublicKeys(callback) {\n if (callback) {\n this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback);\n }\n else {\n return this.getIapPublicKeysAsync();\n }\n }\n async getIapPublicKeysAsync() {\n let res;\n const url = this.endpoints.oauth2IapPublicKeyUrl.toString();\n try {\n res = await this.transporter.request({ url });\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Failed to retrieve verification certificates: ${e.message}`;\n }\n throw e;\n }\n return { pubkeys: res.data, res };\n }\n verifySignedJwtWithCerts() {\n // To make the code compatible with browser SubtleCrypto we need to make\n // this method async.\n throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.');\n }\n /**\n * Verify the id token is signed with the correct certificate\n * and is from the correct audience.\n * @param jwt The jwt to verify (The ID Token in this case).\n * @param certs The array of certs to test the jwt against.\n * @param requiredAudience The audience to test the jwt against.\n * @param issuers The allowed issuers of the jwt (Optional).\n * @param maxExpiry The max expiry the certificate can be (Optional).\n * @return Returns a promise resolving to LoginTicket on verification.\n */\n async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) {\n const crypto = (0, crypto_1.createCrypto)();\n if (!maxExpiry) {\n maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_;\n }\n const segments = jwt.split('.');\n if (segments.length !== 3) {\n throw new Error('Wrong number of segments in token: ' + jwt);\n }\n const signed = segments[0] + '.' + segments[1];\n let signature = segments[2];\n let envelope;\n let payload;\n try {\n envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0]));\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`;\n }\n throw err;\n }\n if (!envelope) {\n throw new Error(\"Can't parse token envelope: \" + segments[0]);\n }\n try {\n payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1]));\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `Can't parse token payload '${segments[0]}`;\n }\n throw err;\n }\n if (!payload) {\n throw new Error(\"Can't parse token payload: \" + segments[1]);\n }\n if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) {\n // If this is not present, then there's no reason to attempt verification\n throw new Error('No pem found for envelope: ' + JSON.stringify(envelope));\n }\n const cert = certs[envelope.kid];\n if (envelope.alg === 'ES256') {\n signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64');\n }\n const verified = await crypto.verify(cert, signed, signature);\n if (!verified) {\n throw new Error('Invalid token signature: ' + jwt);\n }\n if (!payload.iat) {\n throw new Error('No issue time in token: ' + JSON.stringify(payload));\n }\n if (!payload.exp) {\n throw new Error('No expiration time in token: ' + JSON.stringify(payload));\n }\n const iat = Number(payload.iat);\n if (isNaN(iat))\n throw new Error('iat field using invalid format');\n const exp = Number(payload.exp);\n if (isNaN(exp))\n throw new Error('exp field using invalid format');\n const now = new Date().getTime() / 1000;\n if (exp >= now + maxExpiry) {\n throw new Error('Expiration time too far in future: ' + JSON.stringify(payload));\n }\n const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_;\n const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_;\n if (now < earliest) {\n throw new Error('Token used too early, ' +\n now +\n ' < ' +\n earliest +\n ': ' +\n JSON.stringify(payload));\n }\n if (now > latest) {\n throw new Error('Token used too late, ' +\n now +\n ' > ' +\n latest +\n ': ' +\n JSON.stringify(payload));\n }\n if (issuers && issuers.indexOf(payload.iss) < 0) {\n throw new Error('Invalid issuer, expected one of [' +\n issuers +\n '], but got ' +\n payload.iss);\n }\n // Check the audience matches if we have one\n if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) {\n const aud = payload.aud;\n let audVerified = false;\n // If the requiredAudience is an array, check if it contains token\n // audience\n if (requiredAudience.constructor === Array) {\n audVerified = requiredAudience.indexOf(aud) > -1;\n }\n else {\n audVerified = aud === requiredAudience;\n }\n if (!audVerified) {\n throw new Error('Wrong recipient, payload audience != requiredAudience');\n }\n }\n return new loginticket_1.LoginTicket(envelope, payload);\n }\n /**\n * Returns a promise that resolves with AccessTokenResponse type if\n * refreshHandler is defined.\n * If not, nothing is returned.\n */\n async processAndValidateRefreshHandler() {\n if (this.refreshHandler) {\n const accessTokenResponse = await this.refreshHandler();\n if (!accessTokenResponse.access_token) {\n throw new Error('No access token is returned by the refreshHandler callback.');\n }\n return accessTokenResponse;\n }\n return;\n }\n /**\n * Returns true if a token is expired or will expire within\n * eagerRefreshThresholdMillismilliseconds.\n * If there is no expiry time, assumes the token is not expired or expiring.\n */\n isTokenExpiring() {\n const expiryDate = this.credentials.expiry_date;\n return expiryDate\n ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis\n : false;\n }\n}\nexports.OAuth2Client = OAuth2Client;\n/**\n * @deprecated use instance's {@link OAuth2Client.endpoints}\n */\nOAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo';\n/**\n * Clock skew - five minutes in seconds\n */\nOAuth2Client.CLOCK_SKEW_SECS_ = 300;\n/**\n * The default max Token Lifetime is one day in seconds\n */\nOAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0;\nconst querystring = require(\"querystring\");\nconst crypto_1 = require(\"../crypto/crypto\");\n/** List of HTTP methods that accept request bodies. */\nconst METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH'];\n/**\n * Abstract class for handling client authentication in OAuth-based\n * operations.\n * When request-body client authentication is used, only application/json and\n * application/x-www-form-urlencoded content types for HTTP methods that support\n * request bodies are supported.\n */\nclass OAuthClientAuthHandler {\n /**\n * Instantiates an OAuth client authentication handler.\n * @param clientAuthentication The client auth credentials.\n */\n constructor(clientAuthentication) {\n this.clientAuthentication = clientAuthentication;\n this.crypto = (0, crypto_1.createCrypto)();\n }\n /**\n * Applies client authentication on the OAuth request's headers or POST\n * body but does not process the request.\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n * @param bearerToken The optional bearer token to use for authentication.\n * When this is used, no client authentication credentials are needed.\n */\n applyClientAuthenticationOptions(opts, bearerToken) {\n // Inject authenticated header.\n this.injectAuthenticatedHeaders(opts, bearerToken);\n // Inject authenticated request body.\n if (!bearerToken) {\n this.injectAuthenticatedRequestBody(opts);\n }\n }\n /**\n * Applies client authentication on the request's header if either\n * basic authentication or bearer token authentication is selected.\n *\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n * @param bearerToken The optional bearer token to use for authentication.\n * When this is used, no client authentication credentials are needed.\n */\n injectAuthenticatedHeaders(opts, bearerToken) {\n var _a;\n // Bearer token prioritized higher than basic Auth.\n if (bearerToken) {\n opts.headers = opts.headers || {};\n Object.assign(opts.headers, {\n Authorization: `Bearer ${bearerToken}}`,\n });\n }\n else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') {\n opts.headers = opts.headers || {};\n const clientId = this.clientAuthentication.clientId;\n const clientSecret = this.clientAuthentication.clientSecret || '';\n const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`);\n Object.assign(opts.headers, {\n Authorization: `Basic ${base64EncodedCreds}`,\n });\n }\n }\n /**\n * Applies client authentication on the request's body if request-body\n * client authentication is selected.\n *\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n */\n injectAuthenticatedRequestBody(opts) {\n var _a;\n if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') {\n const method = (opts.method || 'GET').toUpperCase();\n // Inject authenticated request body.\n if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) {\n // Get content-type.\n let contentType;\n const headers = opts.headers || {};\n for (const key in headers) {\n if (key.toLowerCase() === 'content-type' && headers[key]) {\n contentType = headers[key].toLowerCase();\n break;\n }\n }\n if (contentType === 'application/x-www-form-urlencoded') {\n opts.data = opts.data || '';\n const data = querystring.parse(opts.data);\n Object.assign(data, {\n client_id: this.clientAuthentication.clientId,\n client_secret: this.clientAuthentication.clientSecret || '',\n });\n opts.data = querystring.stringify(data);\n }\n else if (contentType === 'application/json') {\n opts.data = opts.data || {};\n Object.assign(opts.data, {\n client_id: this.clientAuthentication.clientId,\n client_secret: this.clientAuthentication.clientSecret || '',\n });\n }\n else {\n throw new Error(`${contentType} content-types are not supported with ` +\n `${this.clientAuthentication.confidentialClientType} ` +\n 'client authentication');\n }\n }\n else {\n throw new Error(`${method} HTTP method does not support ` +\n `${this.clientAuthentication.confidentialClientType} ` +\n 'client authentication');\n }\n }\n }\n}\nexports.OAuthClientAuthHandler = OAuthClientAuthHandler;\n/**\n * Converts an OAuth error response to a native JavaScript Error.\n * @param resp The OAuth error response to convert to a native Error object.\n * @param err The optional original error. If provided, the error properties\n * will be copied to the new error.\n * @return The converted native Error object.\n */\nfunction getErrorFromOAuthErrorResponse(resp, err) {\n // Error response.\n const errorCode = resp.error;\n const errorDescription = resp.error_description;\n const errorUri = resp.error_uri;\n let message = `Error code ${errorCode}`;\n if (typeof errorDescription !== 'undefined') {\n message += `: ${errorDescription}`;\n }\n if (typeof errorUri !== 'undefined') {\n message += ` - ${errorUri}`;\n }\n const newError = new Error(message);\n // Copy properties from original error to newly generated error.\n if (err) {\n const keys = Object.keys(err);\n if (err.stack) {\n // Copy error.stack if available.\n keys.push('stack');\n }\n keys.forEach(key => {\n // Do not overwrite the message field.\n if (key !== 'message') {\n Object.defineProperty(newError, key, {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n value: err[key],\n writable: false,\n enumerable: true,\n });\n }\n });\n }\n return newError;\n}\nexports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse;\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PluggableAuthClient = exports.ExecutableError = void 0;\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst executable_response_1 = require(\"./executable-response\");\nconst pluggable_auth_handler_1 = require(\"./pluggable-auth-handler\");\n/**\n * Error thrown from the executable run by PluggableAuthClient.\n */\nclass ExecutableError extends Error {\n constructor(message, code) {\n super(`The executable failed with exit code: ${code} and error message: ${message}.`);\n this.code = code;\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.ExecutableError = ExecutableError;\n/**\n * The default executable timeout when none is provided, in milliseconds.\n */\nconst DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000;\n/**\n * The minimum allowed executable timeout in milliseconds.\n */\nconst MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000;\n/**\n * The maximum allowed executable timeout in milliseconds.\n */\nconst MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000;\n/**\n * The environment variable to check to see if executable can be run.\n * Value must be set to '1' for the executable to run.\n */\nconst GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES';\n/**\n * The maximum currently supported executable version.\n */\nconst MAXIMUM_EXECUTABLE_VERSION = 1;\n/**\n * PluggableAuthClient enables the exchange of workload identity pool external credentials for\n * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These\n * scripts/executables are completely independent of the Google Cloud Auth libraries. These\n * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token\n * to be exchanged for a Google access token.\n *\n *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable\n * must be set to '1'. This is for security reasons.\n *\n *

Both OIDC and SAML are supported. The executable must adhere to a specific response format\n * defined below.\n *\n *

The executable must print out the 3rd party token to STDOUT in JSON format. When an\n * output_file is specified in the credential configuration, the executable must also handle writing the\n * JSON response to this file.\n *\n *

\n * OIDC response sample:\n * {\n *   \"version\": 1,\n *   \"success\": true,\n *   \"token_type\": \"urn:ietf:params:oauth:token-type:id_token\",\n *   \"id_token\": \"HEADER.PAYLOAD.SIGNATURE\",\n *   \"expiration_time\": 1620433341\n * }\n *\n * SAML2 response sample:\n * {\n *   \"version\": 1,\n *   \"success\": true,\n *   \"token_type\": \"urn:ietf:params:oauth:token-type:saml2\",\n *   \"saml_response\": \"...\",\n *   \"expiration_time\": 1620433341\n * }\n *\n * Error response sample:\n * {\n *   \"version\": 1,\n *   \"success\": false,\n *   \"code\": \"401\",\n *   \"message\": \"Error message.\"\n * }\n * 
\n *\n *

The \"expiration_time\" field in the JSON response is only required for successful\n * responses when an output file was specified in the credential configuration\n *\n *

The auth libraries will populate certain environment variables that will be accessible by the\n * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE,\n * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and\n * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE.\n *\n *

Please see this repositories README for a complete executable request/response specification.\n */\nclass PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiates a PluggableAuthClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid pluggable auth credential.\n * @param options The external account options object typically loaded from\n * the external account JSON credential file.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super(options, additionalOptions);\n if (!options.credential_source.executable) {\n throw new Error('No valid Pluggable Auth \"credential_source\" provided.');\n }\n this.command = options.credential_source.executable.command;\n if (!this.command) {\n throw new Error('No valid Pluggable Auth \"credential_source\" provided.');\n }\n // Check if the provided timeout exists and if it is valid.\n if (options.credential_source.executable.timeout_millis === undefined) {\n this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS;\n }\n else {\n this.timeoutMillis = options.credential_source.executable.timeout_millis;\n if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS ||\n this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) {\n throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` +\n `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`);\n }\n }\n this.outputFile = options.credential_source.executable.output_file;\n this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({\n command: this.command,\n timeoutMillis: this.timeoutMillis,\n outputFile: this.outputFile,\n });\n this.credentialSourceType = 'executable';\n }\n /**\n * Triggered when an external subject token is needed to be exchanged for a\n * GCP access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this calls a user provided executable which returns the subject token.\n * The logic is summarized as:\n * 1. Validated that the executable is allowed to run. The\n * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to\n * 1 for security reasons.\n * 2. If an output file is specified by the user, check the file location\n * for a response. If the file exists and contains a valid response,\n * return the subject token from the file.\n * 3. Call the provided executable and return response.\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n // Check if the executable is allowed to run.\n if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') {\n throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' +\n 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' +\n 'Variable to 1.');\n }\n let executableResponse = undefined;\n // Try to get cached executable response from output file.\n if (this.outputFile) {\n executableResponse = await this.handler.retrieveCachedResponse();\n }\n // If no response from output file, call the executable.\n if (!executableResponse) {\n // Set up environment map with required values for the executable.\n const envMap = new Map();\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience);\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType);\n // Always set to 0 because interactive mode is not supported.\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0');\n if (this.outputFile) {\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile);\n }\n const serviceAccountEmail = this.getServiceAccountEmail();\n if (serviceAccountEmail) {\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail);\n }\n executableResponse =\n await this.handler.retrieveResponseFromExecutable(envMap);\n }\n if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) {\n throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`);\n }\n // Check that response was successful.\n if (!executableResponse.success) {\n throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode);\n }\n // Check that response contains expiration time if output file was specified.\n if (this.outputFile) {\n if (!executableResponse.expirationTime) {\n throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.');\n }\n }\n // Check that response is not expired.\n if (executableResponse.isExpired()) {\n throw new Error('Executable response is expired.');\n }\n // Return subject token from response.\n return executableResponse.subjectToken;\n }\n}\nexports.PluggableAuthClient = PluggableAuthClient;\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PluggableAuthHandler = void 0;\nconst pluggable_auth_client_1 = require(\"./pluggable-auth-client\");\nconst executable_response_1 = require(\"./executable-response\");\nconst childProcess = require(\"child_process\");\nconst fs = require(\"fs\");\n/**\n * A handler used to retrieve 3rd party token responses from user defined\n * executables and cached file output for the PluggableAuthClient class.\n */\nclass PluggableAuthHandler {\n /**\n * Instantiates a PluggableAuthHandler instance using the provided\n * PluggableAuthHandlerOptions object.\n */\n constructor(options) {\n if (!options.command) {\n throw new Error('No command provided.');\n }\n this.commandComponents = PluggableAuthHandler.parseCommand(options.command);\n this.timeoutMillis = options.timeoutMillis;\n if (!this.timeoutMillis) {\n throw new Error('No timeoutMillis provided.');\n }\n this.outputFile = options.outputFile;\n }\n /**\n * Calls user provided executable to get a 3rd party subject token and\n * returns the response.\n * @param envMap a Map of additional Environment Variables required for\n * the executable.\n * @return A promise that resolves with the executable response.\n */\n retrieveResponseFromExecutable(envMap) {\n return new Promise((resolve, reject) => {\n // Spawn process to run executable using added environment variables.\n const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), {\n env: { ...process.env, ...Object.fromEntries(envMap) },\n });\n let output = '';\n // Append stdout to output as executable runs.\n child.stdout.on('data', (data) => {\n output += data;\n });\n // Append stderr as executable runs.\n child.stderr.on('data', (err) => {\n output += err;\n });\n // Set up a timeout to end the child process and throw an error.\n const timeout = setTimeout(() => {\n // Kill child process and remove listeners so 'close' event doesn't get\n // read after child process is killed.\n child.removeAllListeners();\n child.kill();\n return reject(new Error('The executable failed to finish within the timeout specified.'));\n }, this.timeoutMillis);\n child.on('close', (code) => {\n // Cancel timeout if executable closes before timeout is reached.\n clearTimeout(timeout);\n if (code === 0) {\n // If the executable completed successfully, try to return the parsed response.\n try {\n const responseJson = JSON.parse(output);\n const response = new executable_response_1.ExecutableResponse(responseJson);\n return resolve(response);\n }\n catch (error) {\n if (error instanceof executable_response_1.ExecutableResponseError) {\n return reject(error);\n }\n return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`));\n }\n }\n else {\n return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString()));\n }\n });\n });\n }\n /**\n * Checks user provided output file for response from previous run of\n * executable and return the response if it exists, is formatted correctly, and is not expired.\n */\n async retrieveCachedResponse() {\n if (!this.outputFile || this.outputFile.length === 0) {\n return undefined;\n }\n let filePath;\n try {\n filePath = await fs.promises.realpath(this.outputFile);\n }\n catch (_a) {\n // If file path cannot be resolved, return undefined.\n return undefined;\n }\n if (!(await fs.promises.lstat(filePath)).isFile()) {\n // If path does not lead to file, return undefined.\n return undefined;\n }\n const responseString = await fs.promises.readFile(filePath, {\n encoding: 'utf8',\n });\n if (responseString === '') {\n return undefined;\n }\n try {\n const responseJson = JSON.parse(responseString);\n const response = new executable_response_1.ExecutableResponse(responseJson);\n // Check if response is successful and unexpired.\n if (response.isValid()) {\n return new executable_response_1.ExecutableResponse(responseJson);\n }\n return undefined;\n }\n catch (error) {\n if (error instanceof executable_response_1.ExecutableResponseError) {\n throw error;\n }\n throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`);\n }\n }\n /**\n * Parses given command string into component array, splitting on spaces unless\n * spaces are between quotation marks.\n */\n static parseCommand(command) {\n // Split the command into components by splitting on spaces,\n // unless spaces are contained in quotation marks.\n const components = command.match(/(?:[^\\s\"]+|\"[^\"]*\")+/g);\n if (!components) {\n throw new Error(`Provided command: \"${command}\" could not be parsed.`);\n }\n // Remove quotation marks from the beginning and end of each component if they are present.\n for (let i = 0; i < components.length; i++) {\n if (components[i][0] === '\"' && components[i].slice(-1) === '\"') {\n components[i] = components[i].slice(1, -1);\n }\n }\n return components;\n }\n}\nexports.PluggableAuthHandler = PluggableAuthHandler;\n","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nexports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user';\nclass UserRefreshClient extends oauth2client_1.OAuth2Client {\n constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) {\n const opts = optionsOrClientId && typeof optionsOrClientId === 'object'\n ? optionsOrClientId\n : {\n clientId: optionsOrClientId,\n clientSecret,\n refreshToken,\n eagerRefreshThresholdMillis,\n forceRefreshOnFailure,\n };\n super(opts);\n this._refreshToken = opts.refreshToken;\n this.credentials.refresh_token = opts.refreshToken;\n }\n /**\n * Refreshes the access token.\n * @param refreshToken An ignored refreshToken..\n * @param callback Optional callback.\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n return super.refreshTokenNoCache(this._refreshToken);\n }\n /**\n * Create a UserRefreshClient credentials instance using the given input\n * options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the user refresh token');\n }\n if (json.type !== 'authorized_user') {\n throw new Error('The incoming JSON object does not have the \"authorized_user\" type');\n }\n if (!json.client_id) {\n throw new Error('The incoming JSON object does not contain a client_id field');\n }\n if (!json.client_secret) {\n throw new Error('The incoming JSON object does not contain a client_secret field');\n }\n if (!json.refresh_token) {\n throw new Error('The incoming JSON object does not contain a refresh_token field');\n }\n this._clientId = json.client_id;\n this._clientSecret = json.client_secret;\n this._refreshToken = json.refresh_token;\n this.credentials.refresh_token = json.refresh_token;\n this.quotaProjectId = json.quota_project_id;\n this.universeDomain = json.universe_domain || this.universeDomain;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n async fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n return reject(new Error('Must pass in a stream containing the user refresh token.'));\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n return resolve();\n }\n catch (err) {\n return reject(err);\n }\n });\n });\n }\n}\nexports.UserRefreshClient = UserRefreshClient;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.StsCredentials = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst querystring = require(\"querystring\");\nconst transporters_1 = require(\"../transporters\");\nconst oauth2common_1 = require(\"./oauth2common\");\n/**\n * Implements the OAuth 2.0 token exchange based on\n * https://tools.ietf.org/html/rfc8693\n */\nclass StsCredentials extends oauth2common_1.OAuthClientAuthHandler {\n /**\n * Initializes an STS credentials instance.\n * @param tokenExchangeEndpoint The token exchange endpoint.\n * @param clientAuthentication The client authentication credentials if\n * available.\n */\n constructor(tokenExchangeEndpoint, clientAuthentication) {\n super(clientAuthentication);\n this.tokenExchangeEndpoint = tokenExchangeEndpoint;\n this.transporter = new transporters_1.DefaultTransporter();\n }\n /**\n * Exchanges the provided token for another type of token based on the\n * rfc8693 spec.\n * @param stsCredentialsOptions The token exchange options used to populate\n * the token exchange request.\n * @param additionalHeaders Optional additional headers to pass along the\n * request.\n * @param options Optional additional GCP-specific non-spec defined options\n * to send with the request.\n * Example: `&options=${encodeUriComponent(JSON.stringified(options))}`\n * @return A promise that resolves with the token exchange response containing\n * the requested token and its expiration time.\n */\n async exchangeToken(stsCredentialsOptions, additionalHeaders, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options) {\n var _a, _b, _c;\n const values = {\n grant_type: stsCredentialsOptions.grantType,\n resource: stsCredentialsOptions.resource,\n audience: stsCredentialsOptions.audience,\n scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '),\n requested_token_type: stsCredentialsOptions.requestedTokenType,\n subject_token: stsCredentialsOptions.subjectToken,\n subject_token_type: stsCredentialsOptions.subjectTokenType,\n actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken,\n actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType,\n // Non-standard GCP-specific options.\n options: options && JSON.stringify(options),\n };\n // Remove undefined fields.\n Object.keys(values).forEach(key => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n if (typeof values[key] === 'undefined') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n delete values[key];\n }\n });\n const headers = {\n 'Content-Type': 'application/x-www-form-urlencoded',\n };\n // Inject additional STS headers if available.\n Object.assign(headers, additionalHeaders || {});\n const opts = {\n url: this.tokenExchangeEndpoint.toString(),\n method: 'POST',\n headers,\n data: querystring.stringify(values),\n responseType: 'json',\n };\n // Apply OAuth client authentication.\n this.applyClientAuthenticationOptions(opts);\n try {\n const response = await this.transporter.request(opts);\n // Successful response.\n const stsSuccessfulResponse = response.data;\n stsSuccessfulResponse.res = response;\n return stsSuccessfulResponse;\n }\n catch (error) {\n // Translate error to OAuthError.\n if (error instanceof gaxios_1.GaxiosError && error.response) {\n throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, \n // Preserve other fields from the original error.\n error);\n }\n // Request could fail before the server responds.\n throw error;\n }\n }\n}\nexports.StsCredentials = StsCredentials;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/* global window */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.BrowserCrypto = void 0;\n// This file implements crypto functions we need using in-browser\n// SubtleCrypto interface `window.crypto.subtle`.\nconst base64js = require(\"base64-js\");\nconst crypto_1 = require(\"../crypto\");\nclass BrowserCrypto {\n constructor() {\n if (typeof window === 'undefined' ||\n window.crypto === undefined ||\n window.crypto.subtle === undefined) {\n throw new Error(\"SubtleCrypto not found. Make sure it's an https:// website.\");\n }\n }\n async sha256DigestBase64(str) {\n // SubtleCrypto digest() method is async, so we must make\n // this method async as well.\n // To calculate SHA256 digest using SubtleCrypto, we first\n // need to convert an input string to an ArrayBuffer:\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const inputBuffer = new TextEncoder().encode(str);\n // Result is ArrayBuffer as well.\n const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer);\n return base64js.fromByteArray(new Uint8Array(outputBuffer));\n }\n randomBytesBase64(count) {\n const array = new Uint8Array(count);\n window.crypto.getRandomValues(array);\n return base64js.fromByteArray(array);\n }\n static padBase64(base64) {\n // base64js requires padding, so let's add some '='\n while (base64.length % 4 !== 0) {\n base64 += '=';\n }\n return base64;\n }\n async verify(pubkey, data, signature) {\n const algo = {\n name: 'RSASSA-PKCS1-v1_5',\n hash: { name: 'SHA-256' },\n };\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const dataArray = new TextEncoder().encode(data);\n const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature));\n const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']);\n // SubtleCrypto's verify method is async so we must make\n // this method async as well.\n const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray);\n return result;\n }\n async sign(privateKey, data) {\n const algo = {\n name: 'RSASSA-PKCS1-v1_5',\n hash: { name: 'SHA-256' },\n };\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const dataArray = new TextEncoder().encode(data);\n const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']);\n // SubtleCrypto's sign method is async so we must make\n // this method async as well.\n const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray);\n return base64js.fromByteArray(new Uint8Array(result));\n }\n decodeBase64StringUtf8(base64) {\n const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64));\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const result = new TextDecoder().decode(uint8array);\n return result;\n }\n encodeBase64StringUtf8(text) {\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const uint8array = new TextEncoder().encode(text);\n const result = base64js.fromByteArray(uint8array);\n return result;\n }\n /**\n * Computes the SHA-256 hash of the provided string.\n * @param str The plain text string to hash.\n * @return A promise that resolves with the SHA-256 hash of the provided\n * string in hexadecimal encoding.\n */\n async sha256DigestHex(str) {\n // SubtleCrypto digest() method is async, so we must make\n // this method async as well.\n // To calculate SHA256 digest using SubtleCrypto, we first\n // need to convert an input string to an ArrayBuffer:\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const inputBuffer = new TextEncoder().encode(str);\n // Result is ArrayBuffer as well.\n const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer);\n return (0, crypto_1.fromArrayBufferToHex)(outputBuffer);\n }\n /**\n * Computes the HMAC hash of a message using the provided crypto key and the\n * SHA-256 algorithm.\n * @param key The secret crypto key in utf-8 or ArrayBuffer format.\n * @param msg The plain text message.\n * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer\n * format.\n */\n async signWithHmacSha256(key, msg) {\n // Convert key, if provided in ArrayBuffer format, to string.\n const rawKey = typeof key === 'string'\n ? key\n : String.fromCharCode(...new Uint16Array(key));\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const enc = new TextEncoder();\n const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), {\n name: 'HMAC',\n hash: {\n name: 'SHA-256',\n },\n }, false, ['sign']);\n return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg));\n }\n}\nexports.BrowserCrypto = BrowserCrypto;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/* global window */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0;\nconst crypto_1 = require(\"./browser/crypto\");\nconst crypto_2 = require(\"./node/crypto\");\nfunction createCrypto() {\n if (hasBrowserCrypto()) {\n return new crypto_1.BrowserCrypto();\n }\n return new crypto_2.NodeCrypto();\n}\nexports.createCrypto = createCrypto;\nfunction hasBrowserCrypto() {\n return (typeof window !== 'undefined' &&\n typeof window.crypto !== 'undefined' &&\n typeof window.crypto.subtle !== 'undefined');\n}\nexports.hasBrowserCrypto = hasBrowserCrypto;\n/**\n * Converts an ArrayBuffer to a hexadecimal string.\n * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string.\n * @return The hexadecimal encoding of the ArrayBuffer.\n */\nfunction fromArrayBufferToHex(arrayBuffer) {\n // Convert buffer to byte array.\n const byteArray = Array.from(new Uint8Array(arrayBuffer));\n // Convert bytes to hex string.\n return byteArray\n .map(byte => {\n return byte.toString(16).padStart(2, '0');\n })\n .join('');\n}\nexports.fromArrayBufferToHex = fromArrayBufferToHex;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NodeCrypto = void 0;\nconst crypto = require(\"crypto\");\nclass NodeCrypto {\n async sha256DigestBase64(str) {\n return crypto.createHash('sha256').update(str).digest('base64');\n }\n randomBytesBase64(count) {\n return crypto.randomBytes(count).toString('base64');\n }\n async verify(pubkey, data, signature) {\n const verifier = crypto.createVerify('RSA-SHA256');\n verifier.update(data);\n verifier.end();\n return verifier.verify(pubkey, signature, 'base64');\n }\n async sign(privateKey, data) {\n const signer = crypto.createSign('RSA-SHA256');\n signer.update(data);\n signer.end();\n return signer.sign(privateKey, 'base64');\n }\n decodeBase64StringUtf8(base64) {\n return Buffer.from(base64, 'base64').toString('utf-8');\n }\n encodeBase64StringUtf8(text) {\n return Buffer.from(text, 'utf-8').toString('base64');\n }\n /**\n * Computes the SHA-256 hash of the provided string.\n * @param str The plain text string to hash.\n * @return A promise that resolves with the SHA-256 hash of the provided\n * string in hexadecimal encoding.\n */\n async sha256DigestHex(str) {\n return crypto.createHash('sha256').update(str).digest('hex');\n }\n /**\n * Computes the HMAC hash of a message using the provided crypto key and the\n * SHA-256 algorithm.\n * @param key The secret crypto key in utf-8 or ArrayBuffer format.\n * @param msg The plain text message.\n * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer\n * format.\n */\n async signWithHmacSha256(key, msg) {\n const cryptoKey = typeof key === 'string' ? key : toBuffer(key);\n return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest());\n }\n}\nexports.NodeCrypto = NodeCrypto;\n/**\n * Converts a Node.js Buffer to an ArrayBuffer.\n * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer\n * @param buffer The Buffer input to covert.\n * @return The ArrayBuffer representation of the input.\n */\nfunction toArrayBuffer(buffer) {\n return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);\n}\n/**\n * Converts an ArrayBuffer to a Node.js Buffer.\n * @param arrayBuffer The ArrayBuffer input to covert.\n * @return The Buffer representation of the input.\n */\nfunction toBuffer(arrayBuffer) {\n return Buffer.from(arrayBuffer);\n}\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gcpMetadata = void 0;\n// Copyright 2017 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nconst googleauth_1 = require(\"./auth/googleauth\");\nObject.defineProperty(exports, \"GoogleAuth\", { enumerable: true, get: function () { return googleauth_1.GoogleAuth; } });\nexports.gcpMetadata = require(\"gcp-metadata\");\nvar authclient_1 = require(\"./auth/authclient\");\nObject.defineProperty(exports, \"AuthClient\", { enumerable: true, get: function () { return authclient_1.AuthClient; } });\nObject.defineProperty(exports, \"DEFAULT_UNIVERSE\", { enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } });\nvar computeclient_1 = require(\"./auth/computeclient\");\nObject.defineProperty(exports, \"Compute\", { enumerable: true, get: function () { return computeclient_1.Compute; } });\nvar envDetect_1 = require(\"./auth/envDetect\");\nObject.defineProperty(exports, \"GCPEnv\", { enumerable: true, get: function () { return envDetect_1.GCPEnv; } });\nvar iam_1 = require(\"./auth/iam\");\nObject.defineProperty(exports, \"IAMAuth\", { enumerable: true, get: function () { return iam_1.IAMAuth; } });\nvar idtokenclient_1 = require(\"./auth/idtokenclient\");\nObject.defineProperty(exports, \"IdTokenClient\", { enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } });\nvar jwtaccess_1 = require(\"./auth/jwtaccess\");\nObject.defineProperty(exports, \"JWTAccess\", { enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } });\nvar jwtclient_1 = require(\"./auth/jwtclient\");\nObject.defineProperty(exports, \"JWT\", { enumerable: true, get: function () { return jwtclient_1.JWT; } });\nvar impersonated_1 = require(\"./auth/impersonated\");\nObject.defineProperty(exports, \"Impersonated\", { enumerable: true, get: function () { return impersonated_1.Impersonated; } });\nvar oauth2client_1 = require(\"./auth/oauth2client\");\nObject.defineProperty(exports, \"CodeChallengeMethod\", { enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } });\nObject.defineProperty(exports, \"OAuth2Client\", { enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } });\nvar loginticket_1 = require(\"./auth/loginticket\");\nObject.defineProperty(exports, \"LoginTicket\", { enumerable: true, get: function () { return loginticket_1.LoginTicket; } });\nvar refreshclient_1 = require(\"./auth/refreshclient\");\nObject.defineProperty(exports, \"UserRefreshClient\", { enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } });\nvar awsclient_1 = require(\"./auth/awsclient\");\nObject.defineProperty(exports, \"AwsClient\", { enumerable: true, get: function () { return awsclient_1.AwsClient; } });\nvar identitypoolclient_1 = require(\"./auth/identitypoolclient\");\nObject.defineProperty(exports, \"IdentityPoolClient\", { enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } });\nvar externalclient_1 = require(\"./auth/externalclient\");\nObject.defineProperty(exports, \"ExternalAccountClient\", { enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } });\nvar baseexternalclient_1 = require(\"./auth/baseexternalclient\");\nObject.defineProperty(exports, \"BaseExternalAccountClient\", { enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } });\nvar downscopedclient_1 = require(\"./auth/downscopedclient\");\nObject.defineProperty(exports, \"DownscopedClient\", { enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } });\nvar pluggable_auth_client_1 = require(\"./auth/pluggable-auth-client\");\nObject.defineProperty(exports, \"PluggableAuthClient\", { enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } });\nvar transporters_1 = require(\"./transporters\");\nObject.defineProperty(exports, \"DefaultTransporter\", { enumerable: true, get: function () { return transporters_1.DefaultTransporter; } });\nconst auth = new googleauth_1.GoogleAuth();\nexports.auth = auth;\n","\"use strict\";\n// Copyright 2017 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validate = void 0;\n// Accepts an options object passed from the user to the API. In the\n// previous version of the API, it referred to a `Request` options object.\n// Now it refers to an Axiox Request Config object. This is here to help\n// ensure users don't pass invalid options when they upgrade from 0.x to 1.x.\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction validate(options) {\n const vpairs = [\n { invalid: 'uri', expected: 'url' },\n { invalid: 'json', expected: 'data' },\n { invalid: 'qs', expected: 'params' },\n ];\n for (const pair of vpairs) {\n if (options[pair.invalid]) {\n const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`;\n throw new Error(e);\n }\n }\n}\nexports.validate = validate;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DefaultTransporter = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst options_1 = require(\"./options\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst pkg = require('../../package.json');\nconst PRODUCT_NAME = 'google-api-nodejs-client';\nclass DefaultTransporter {\n constructor() {\n /**\n * A configurable, replacable `Gaxios` instance.\n */\n this.instance = new gaxios_1.Gaxios();\n }\n /**\n * Configures request options before making a request.\n * @param opts GaxiosOptions options.\n * @return Configured options.\n */\n configure(opts = {}) {\n opts.headers = opts.headers || {};\n if (typeof window === 'undefined') {\n // set transporter user agent if not in browser\n const uaValue = opts.headers['User-Agent'];\n if (!uaValue) {\n opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT;\n }\n else if (!uaValue.includes(`${PRODUCT_NAME}/`)) {\n opts.headers['User-Agent'] = `${uaValue} ${DefaultTransporter.USER_AGENT}`;\n }\n // track google-auth-library-nodejs version:\n if (!opts.headers['x-goog-api-client']) {\n const nodeVersion = process.version.replace(/^v/, '');\n opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`;\n }\n }\n return opts;\n }\n /**\n * Makes a request using Gaxios with given options.\n * @param opts GaxiosOptions options.\n * @param callback optional callback that contains GaxiosResponse object.\n * @return GaxiosPromise, assuming no callback is passed.\n */\n request(opts) {\n // ensure the user isn't passing in request-style options\n opts = this.configure(opts);\n (0, options_1.validate)(opts);\n return this.instance.request(opts).catch(e => {\n throw this.processError(e);\n });\n }\n get defaults() {\n return this.instance.defaults;\n }\n set defaults(opts) {\n this.instance.defaults = opts;\n }\n /**\n * Changes the error to include details from the body.\n */\n processError(e) {\n const res = e.response;\n const err = e;\n const body = res ? res.data : null;\n if (res && body && body.error && res.status !== 200) {\n if (typeof body.error === 'string') {\n err.message = body.error;\n err.status = res.status;\n }\n else if (Array.isArray(body.error.errors)) {\n err.message = body.error.errors\n .map((err2) => err2.message)\n .join('\\n');\n err.code = body.error.code;\n err.errors = body.error.errors;\n }\n else {\n err.message = body.error.message;\n err.code = body.error.code;\n }\n }\n else if (res && res.status >= 400) {\n // Consider all 4xx and 5xx responses errors.\n err.message = body;\n err.status = res.status;\n }\n return err;\n }\n}\nexports.DefaultTransporter = DefaultTransporter;\n/**\n * Default user agent.\n */\nDefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`;\n","\"use strict\";\n// Copyright 2023 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.LRUCache = exports.originalOrCamelOptions = exports.snakeToCamel = void 0;\n/**\n * Returns the camel case of a provided string.\n *\n * @remarks\n *\n * Match any `_` and not `_` pair, then return the uppercase of the not `_`\n * character.\n *\n * @internal\n *\n * @param str the string to convert\n * @returns the camelCase'd string\n */\nfunction snakeToCamel(str) {\n return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase());\n}\nexports.snakeToCamel = snakeToCamel;\n/**\n * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference\n * for original, non-camelCase key.\n *\n * @param obj object to lookup a value in\n * @returns a `get` function for getting `obj[key || snakeKey]`, if available\n */\nfunction originalOrCamelOptions(obj) {\n /**\n *\n * @param key an index of object, preferably snake_case\n * @returns the value `obj[key || snakeKey]`, if available\n */\n function get(key) {\n var _a;\n const o = (obj || {});\n return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)];\n }\n return { get };\n}\nexports.originalOrCamelOptions = originalOrCamelOptions;\n/**\n * A simple LRU cache utility.\n * Not meant for external usage.\n *\n * @experimental\n * @internal\n */\nclass LRUCache {\n constructor(options) {\n _LRUCache_instances.add(this);\n /**\n * Maps are in order. Thus, the older item is the first item.\n *\n * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map}\n */\n _LRUCache_cache.set(this, new Map());\n this.capacity = options.capacity;\n this.maxAge = options.maxAge;\n }\n /**\n * Add an item to the cache.\n *\n * @param key the key to upsert\n * @param value the value of the key\n */\n set(key, value) {\n __classPrivateFieldGet(this, _LRUCache_instances, \"m\", _LRUCache_moveToEnd).call(this, key, value);\n __classPrivateFieldGet(this, _LRUCache_instances, \"m\", _LRUCache_evict).call(this);\n }\n /**\n * Get an item from the cache.\n *\n * @param key the key to retrieve\n */\n get(key) {\n const item = __classPrivateFieldGet(this, _LRUCache_cache, \"f\").get(key);\n if (!item)\n return;\n __classPrivateFieldGet(this, _LRUCache_instances, \"m\", _LRUCache_moveToEnd).call(this, key, item.value);\n __classPrivateFieldGet(this, _LRUCache_instances, \"m\", _LRUCache_evict).call(this);\n return item.value;\n }\n}\nexports.LRUCache = LRUCache;\n_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) {\n __classPrivateFieldGet(this, _LRUCache_cache, \"f\").delete(key);\n __classPrivateFieldGet(this, _LRUCache_cache, \"f\").set(key, {\n value,\n lastAccessed: Date.now(),\n });\n}, _LRUCache_evict = function _LRUCache_evict() {\n const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0;\n /**\n * Because we know Maps are in order, this item is both the\n * last item in the list (capacity) and oldest (maxAge).\n */\n let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, \"f\").entries().next();\n while (!oldestItem.done &&\n (__classPrivateFieldGet(this, _LRUCache_cache, \"f\").size > this.capacity || // too many\n oldestItem.value[1].lastAccessed < cutoffDate) // too old\n ) {\n __classPrivateFieldGet(this, _LRUCache_cache, \"f\").delete(oldestItem.value[0]);\n oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, \"f\").entries().next();\n }\n};\n","\"use strict\";\n/**\n * Copyright 2018 Google LLC\n *\n * Distributed under MIT license.\n * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT\n */\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleToken = void 0;\nconst fs = require(\"fs\");\nconst gaxios_1 = require(\"gaxios\");\nconst jws = require(\"jws\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst readFile = fs.readFile\n ? (0, util_1.promisify)(fs.readFile)\n : async () => {\n // if running in the web-browser, fs.readFile may not have been shimmed.\n throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS');\n };\nconst GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token';\nconst GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token=';\nclass ErrorWithCode extends Error {\n constructor(message, code) {\n super(message);\n this.code = code;\n }\n}\nclass GoogleToken {\n get accessToken() {\n return this.rawToken ? this.rawToken.access_token : undefined;\n }\n get idToken() {\n return this.rawToken ? this.rawToken.id_token : undefined;\n }\n get tokenType() {\n return this.rawToken ? this.rawToken.token_type : undefined;\n }\n get refreshToken() {\n return this.rawToken ? this.rawToken.refresh_token : undefined;\n }\n /**\n * Create a GoogleToken.\n *\n * @param options Configuration object.\n */\n constructor(options) {\n _GoogleToken_instances.add(this);\n this.transporter = {\n request: opts => (0, gaxios_1.request)(opts),\n };\n _GoogleToken_inFlightRequest.set(this, void 0);\n __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_configure).call(this, options);\n }\n /**\n * Returns whether the token has expired.\n *\n * @return true if the token has expired, false otherwise.\n */\n hasExpired() {\n const now = new Date().getTime();\n if (this.rawToken && this.expiresAt) {\n return now >= this.expiresAt;\n }\n else {\n return true;\n }\n }\n /**\n * Returns whether the token will expire within eagerRefreshThresholdMillis\n *\n * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise.\n */\n isTokenExpiring() {\n var _a;\n const now = new Date().getTime();\n const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0;\n if (this.rawToken && this.expiresAt) {\n return this.expiresAt <= now + eagerRefreshThresholdMillis;\n }\n else {\n return true;\n }\n }\n getToken(callback, opts = {}) {\n if (typeof callback === 'object') {\n opts = callback;\n callback = undefined;\n }\n opts = Object.assign({\n forceRefresh: false,\n }, opts);\n if (callback) {\n const cb = callback;\n __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback);\n return;\n }\n return __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_getTokenAsync).call(this, opts);\n }\n /**\n * Given a keyFile, extract the key and client email if available\n * @param keyFile Path to a json, pem, or p12 file that contains the key.\n * @returns an object with privateKey and clientEmail properties\n */\n async getCredentials(keyFile) {\n const ext = path.extname(keyFile);\n switch (ext) {\n case '.json': {\n const key = await readFile(keyFile, 'utf8');\n const body = JSON.parse(key);\n const privateKey = body.private_key;\n const clientEmail = body.client_email;\n if (!privateKey || !clientEmail) {\n throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS');\n }\n return { privateKey, clientEmail };\n }\n case '.der':\n case '.crt':\n case '.pem': {\n const privateKey = await readFile(keyFile, 'utf8');\n return { privateKey };\n }\n case '.p12':\n case '.pfx': {\n throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' +\n 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE');\n }\n default:\n throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' +\n 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE');\n }\n }\n revokeToken(callback) {\n if (callback) {\n __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback);\n return;\n }\n return __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_revokeTokenAsync).call(this);\n }\n}\nexports.GoogleToken = GoogleToken;\n_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) {\n if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, \"f\") && !opts.forceRefresh) {\n return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, \"f\");\n }\n try {\n return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_getTokenAsyncInner).call(this, opts), \"f\"));\n }\n finally {\n __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, \"f\");\n }\n}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) {\n if (this.isTokenExpiring() === false && opts.forceRefresh === false) {\n return Promise.resolve(this.rawToken);\n }\n if (!this.key && !this.keyFile) {\n throw new Error('No key or keyFile set.');\n }\n if (!this.key && this.keyFile) {\n const creds = await this.getCredentials(this.keyFile);\n this.key = creds.privateKey;\n this.iss = creds.clientEmail || this.iss;\n if (!creds.clientEmail) {\n __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_ensureEmail).call(this);\n }\n }\n return __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_requestToken).call(this);\n}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() {\n if (!this.iss) {\n throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS');\n }\n}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() {\n if (!this.accessToken) {\n throw new Error('No token to revoke.');\n }\n const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken;\n await this.transporter.request({\n url,\n retry: true,\n });\n __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_configure).call(this, {\n email: this.iss,\n sub: this.sub,\n key: this.key,\n keyFile: this.keyFile,\n scope: this.scope,\n additionalClaims: this.additionalClaims,\n });\n}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) {\n this.keyFile = options.keyFile;\n this.key = options.key;\n this.rawToken = undefined;\n this.iss = options.email || options.iss;\n this.sub = options.sub;\n this.additionalClaims = options.additionalClaims;\n if (typeof options.scope === 'object') {\n this.scope = options.scope.join(' ');\n }\n else {\n this.scope = options.scope;\n }\n this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis;\n if (options.transporter) {\n this.transporter = options.transporter;\n }\n}, _GoogleToken_requestToken = \n/**\n * Request the token from Google.\n */\nasync function _GoogleToken_requestToken() {\n var _a, _b;\n const iat = Math.floor(new Date().getTime() / 1000);\n const additionalClaims = this.additionalClaims || {};\n const payload = Object.assign({\n iss: this.iss,\n scope: this.scope,\n aud: GOOGLE_TOKEN_URL,\n exp: iat + 3600,\n iat,\n sub: this.sub,\n }, additionalClaims);\n const signedJWT = jws.sign({\n header: { alg: 'RS256' },\n payload,\n secret: this.key,\n });\n try {\n const r = await this.transporter.request({\n method: 'POST',\n url: GOOGLE_TOKEN_URL,\n data: {\n grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer',\n assertion: signedJWT,\n },\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n responseType: 'json',\n retryConfig: {\n httpMethodsToRetry: ['POST'],\n },\n });\n this.rawToken = r.data;\n this.expiresAt =\n r.data.expires_in === null || r.data.expires_in === undefined\n ? undefined\n : (iat + r.data.expires_in) * 1000;\n return this.rawToken;\n }\n catch (e) {\n this.rawToken = undefined;\n this.tokenExpires = undefined;\n const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data)\n ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data\n : {};\n if (body.error) {\n const desc = body.error_description\n ? `: ${body.error_description}`\n : '';\n e.message = `${body.error}${desc}`;\n }\n throw e;\n }\n};\n//# sourceMappingURL=index.js.map","'use strict';\n\nmodule.exports = (flag, argv = process.argv) => {\n\tconst prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');\n\tconst position = argv.indexOf(prefix + flag);\n\tconst terminatorPosition = argv.indexOf('--');\n\treturn position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);\n};\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst net_1 = __importDefault(require(\"net\"));\nconst tls_1 = __importDefault(require(\"tls\"));\nconst url_1 = __importDefault(require(\"url\"));\nconst debug_1 = __importDefault(require(\"debug\"));\nconst once_1 = __importDefault(require(\"@tootallnate/once\"));\nconst agent_base_1 = require(\"agent-base\");\nconst debug = (0, debug_1.default)('http-proxy-agent');\nfunction isHTTPS(protocol) {\n return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false;\n}\n/**\n * The `HttpProxyAgent` implements an HTTP Agent subclass that connects\n * to the specified \"HTTP proxy server\" in order to proxy HTTP requests.\n *\n * @api public\n */\nclass HttpProxyAgent extends agent_base_1.Agent {\n constructor(_opts) {\n let opts;\n if (typeof _opts === 'string') {\n opts = url_1.default.parse(_opts);\n }\n else {\n opts = _opts;\n }\n if (!opts) {\n throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');\n }\n debug('Creating new HttpProxyAgent instance: %o', opts);\n super(opts);\n const proxy = Object.assign({}, opts);\n // If `true`, then connect to the proxy server over TLS.\n // Defaults to `false`.\n this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol);\n // Prefer `hostname` over `host`, and set the `port` if needed.\n proxy.host = proxy.hostname || proxy.host;\n if (typeof proxy.port === 'string') {\n proxy.port = parseInt(proxy.port, 10);\n }\n if (!proxy.port && proxy.host) {\n proxy.port = this.secureProxy ? 443 : 80;\n }\n if (proxy.host && proxy.path) {\n // If both a `host` and `path` are specified then it's most likely\n // the result of a `url.parse()` call... we need to remove the\n // `path` portion so that `net.connect()` doesn't attempt to open\n // that as a Unix socket file.\n delete proxy.path;\n delete proxy.pathname;\n }\n this.proxy = proxy;\n }\n /**\n * Called when the node-core HTTP client library is creating a\n * new HTTP request.\n *\n * @api protected\n */\n callback(req, opts) {\n return __awaiter(this, void 0, void 0, function* () {\n const { proxy, secureProxy } = this;\n const parsed = url_1.default.parse(req.path);\n if (!parsed.protocol) {\n parsed.protocol = 'http:';\n }\n if (!parsed.hostname) {\n parsed.hostname = opts.hostname || opts.host || null;\n }\n if (parsed.port == null && typeof opts.port) {\n parsed.port = String(opts.port);\n }\n if (parsed.port === '80') {\n // if port is 80, then we can remove the port so that the\n // \":80\" portion is not on the produced URL\n parsed.port = '';\n }\n // Change the `http.ClientRequest` instance's \"path\" field\n // to the absolute path of the URL that will be requested.\n req.path = url_1.default.format(parsed);\n // Inject the `Proxy-Authorization` header if necessary.\n if (proxy.auth) {\n req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`);\n }\n // Create a socket connection to the proxy server.\n let socket;\n if (secureProxy) {\n debug('Creating `tls.Socket`: %o', proxy);\n socket = tls_1.default.connect(proxy);\n }\n else {\n debug('Creating `net.Socket`: %o', proxy);\n socket = net_1.default.connect(proxy);\n }\n // At this point, the http ClientRequest's internal `_header` field\n // might have already been set. If this is the case then we'll need\n // to re-generate the string since we just changed the `req.path`.\n if (req._header) {\n let first;\n let endOfHeaders;\n debug('Regenerating stored HTTP header string for request');\n req._header = null;\n req._implicitHeader();\n if (req.output && req.output.length > 0) {\n // Node < 12\n debug('Patching connection write() output buffer with updated header');\n first = req.output[0];\n endOfHeaders = first.indexOf('\\r\\n\\r\\n') + 4;\n req.output[0] = req._header + first.substring(endOfHeaders);\n debug('Output buffer: %o', req.output);\n }\n else if (req.outputData && req.outputData.length > 0) {\n // Node >= 12\n debug('Patching connection write() output buffer with updated header');\n first = req.outputData[0].data;\n endOfHeaders = first.indexOf('\\r\\n\\r\\n') + 4;\n req.outputData[0].data =\n req._header + first.substring(endOfHeaders);\n debug('Output buffer: %o', req.outputData[0].data);\n }\n }\n // Wait for the socket's `connect` event, so that this `callback()`\n // function throws instead of the `http` request machinery. This is\n // important for i.e. `PacProxyAgent` which determines a failed proxy\n // connection via the `callback()` function throwing.\n yield (0, once_1.default)(socket, 'connect');\n return socket;\n });\n }\n}\nexports.default = HttpProxyAgent;\n//# sourceMappingURL=agent.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst agent_1 = __importDefault(require(\"./agent\"));\nfunction createHttpProxyAgent(opts) {\n return new agent_1.default(opts);\n}\n(function (createHttpProxyAgent) {\n createHttpProxyAgent.HttpProxyAgent = agent_1.default;\n createHttpProxyAgent.prototype = agent_1.default.prototype;\n})(createHttpProxyAgent || (createHttpProxyAgent = {}));\nmodule.exports = createHttpProxyAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst net_1 = __importDefault(require(\"net\"));\nconst tls_1 = __importDefault(require(\"tls\"));\nconst url_1 = __importDefault(require(\"url\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst debug_1 = __importDefault(require(\"debug\"));\nconst agent_base_1 = require(\"agent-base\");\nconst parse_proxy_response_1 = __importDefault(require(\"./parse-proxy-response\"));\nconst debug = debug_1.default('https-proxy-agent:agent');\n/**\n * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to\n * the specified \"HTTP(s) proxy server\" in order to proxy HTTPS requests.\n *\n * Outgoing HTTP requests are first tunneled through the proxy server using the\n * `CONNECT` HTTP request method to establish a connection to the proxy server,\n * and then the proxy server connects to the destination target and issues the\n * HTTP request from the proxy server.\n *\n * `https:` requests have their socket connection upgraded to TLS once\n * the connection to the proxy server has been established.\n *\n * @api public\n */\nclass HttpsProxyAgent extends agent_base_1.Agent {\n constructor(_opts) {\n let opts;\n if (typeof _opts === 'string') {\n opts = url_1.default.parse(_opts);\n }\n else {\n opts = _opts;\n }\n if (!opts) {\n throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');\n }\n debug('creating new HttpsProxyAgent instance: %o', opts);\n super(opts);\n const proxy = Object.assign({}, opts);\n // If `true`, then connect to the proxy server over TLS.\n // Defaults to `false`.\n this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol);\n // Prefer `hostname` over `host`, and set the `port` if needed.\n proxy.host = proxy.hostname || proxy.host;\n if (typeof proxy.port === 'string') {\n proxy.port = parseInt(proxy.port, 10);\n }\n if (!proxy.port && proxy.host) {\n proxy.port = this.secureProxy ? 443 : 80;\n }\n // ALPN is supported by Node.js >= v5.\n // attempt to negotiate http/1.1 for proxy servers that support http/2\n if (this.secureProxy && !('ALPNProtocols' in proxy)) {\n proxy.ALPNProtocols = ['http 1.1'];\n }\n if (proxy.host && proxy.path) {\n // If both a `host` and `path` are specified then it's most likely\n // the result of a `url.parse()` call... we need to remove the\n // `path` portion so that `net.connect()` doesn't attempt to open\n // that as a Unix socket file.\n delete proxy.path;\n delete proxy.pathname;\n }\n this.proxy = proxy;\n }\n /**\n * Called when the node-core HTTP client library is creating a\n * new HTTP request.\n *\n * @api protected\n */\n callback(req, opts) {\n return __awaiter(this, void 0, void 0, function* () {\n const { proxy, secureProxy } = this;\n // Create a socket connection to the proxy server.\n let socket;\n if (secureProxy) {\n debug('Creating `tls.Socket`: %o', proxy);\n socket = tls_1.default.connect(proxy);\n }\n else {\n debug('Creating `net.Socket`: %o', proxy);\n socket = net_1.default.connect(proxy);\n }\n const headers = Object.assign({}, proxy.headers);\n const hostname = `${opts.host}:${opts.port}`;\n let payload = `CONNECT ${hostname} HTTP/1.1\\r\\n`;\n // Inject the `Proxy-Authorization` header if necessary.\n if (proxy.auth) {\n headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`;\n }\n // The `Host` header should only include the port\n // number when it is not the default port.\n let { host, port, secureEndpoint } = opts;\n if (!isDefaultPort(port, secureEndpoint)) {\n host += `:${port}`;\n }\n headers.Host = host;\n headers.Connection = 'close';\n for (const name of Object.keys(headers)) {\n payload += `${name}: ${headers[name]}\\r\\n`;\n }\n const proxyResponsePromise = parse_proxy_response_1.default(socket);\n socket.write(`${payload}\\r\\n`);\n const { statusCode, buffered } = yield proxyResponsePromise;\n if (statusCode === 200) {\n req.once('socket', resume);\n if (opts.secureEndpoint) {\n // The proxy is connecting to a TLS server, so upgrade\n // this socket connection to a TLS connection.\n debug('Upgrading socket connection to TLS');\n const servername = opts.servername || opts.host;\n return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket,\n servername }));\n }\n return socket;\n }\n // Some other status code that's not 200... need to re-play the HTTP\n // header \"data\" events onto the socket once the HTTP machinery is\n // attached so that the node core `http` can parse and handle the\n // error status code.\n // Close the original socket, and a new \"fake\" socket is returned\n // instead, so that the proxy doesn't get the HTTP request\n // written to it (which may contain `Authorization` headers or other\n // sensitive data).\n //\n // See: https://hackerone.com/reports/541502\n socket.destroy();\n const fakeSocket = new net_1.default.Socket({ writable: false });\n fakeSocket.readable = true;\n // Need to wait for the \"socket\" event to re-play the \"data\" events.\n req.once('socket', (s) => {\n debug('replaying proxy buffer for failed request');\n assert_1.default(s.listenerCount('data') > 0);\n // Replay the \"buffered\" Buffer onto the fake `socket`, since at\n // this point the HTTP module machinery has been hooked up for\n // the user.\n s.push(buffered);\n s.push(null);\n });\n return fakeSocket;\n });\n }\n}\nexports.default = HttpsProxyAgent;\nfunction resume(socket) {\n socket.resume();\n}\nfunction isDefaultPort(port, secure) {\n return Boolean((!secure && port === 80) || (secure && port === 443));\n}\nfunction isHTTPS(protocol) {\n return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false;\n}\nfunction omit(obj, ...keys) {\n const ret = {};\n let key;\n for (key in obj) {\n if (!keys.includes(key)) {\n ret[key] = obj[key];\n }\n }\n return ret;\n}\n//# sourceMappingURL=agent.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst agent_1 = __importDefault(require(\"./agent\"));\nfunction createHttpsProxyAgent(opts) {\n return new agent_1.default(opts);\n}\n(function (createHttpsProxyAgent) {\n createHttpsProxyAgent.HttpsProxyAgent = agent_1.default;\n createHttpsProxyAgent.prototype = agent_1.default.prototype;\n})(createHttpsProxyAgent || (createHttpsProxyAgent = {}));\nmodule.exports = createHttpsProxyAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst debug_1 = __importDefault(require(\"debug\"));\nconst debug = debug_1.default('https-proxy-agent:parse-proxy-response');\nfunction parseProxyResponse(socket) {\n return new Promise((resolve, reject) => {\n // we need to buffer any HTTP traffic that happens with the proxy before we get\n // the CONNECT response, so that if the response is anything other than an \"200\"\n // response code, then we can re-play the \"data\" events on the socket once the\n // HTTP parser is hooked up...\n let buffersLength = 0;\n const buffers = [];\n function read() {\n const b = socket.read();\n if (b)\n ondata(b);\n else\n socket.once('readable', read);\n }\n function cleanup() {\n socket.removeListener('end', onend);\n socket.removeListener('error', onerror);\n socket.removeListener('close', onclose);\n socket.removeListener('readable', read);\n }\n function onclose(err) {\n debug('onclose had error %o', err);\n }\n function onend() {\n debug('onend');\n }\n function onerror(err) {\n cleanup();\n debug('onerror %o', err);\n reject(err);\n }\n function ondata(b) {\n buffers.push(b);\n buffersLength += b.length;\n const buffered = Buffer.concat(buffers, buffersLength);\n const endOfHeaders = buffered.indexOf('\\r\\n\\r\\n');\n if (endOfHeaders === -1) {\n // keep buffering\n debug('have not received end of HTTP headers yet...');\n read();\n return;\n }\n const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\\r\\n'));\n const statusCode = +firstLine.split(' ')[1];\n debug('got proxy server response: %o', firstLine);\n resolve({\n statusCode,\n buffered\n });\n }\n socket.on('error', onerror);\n socket.on('close', onclose);\n socket.on('end', onend);\n read();\n });\n}\nexports.default = parseProxyResponse;\n//# sourceMappingURL=parse-proxy-response.js.map","try {\n var util = require('util');\n /* istanbul ignore next */\n if (typeof util.inherits !== 'function') throw '';\n module.exports = util.inherits;\n} catch (e) {\n /* istanbul ignore next */\n module.exports = require('./inherits_browser.js');\n}\n","if (typeof Object.create === 'function') {\n // implementation from standard node.js 'util' module\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n ctor.prototype = Object.create(superCtor.prototype, {\n constructor: {\n value: ctor,\n enumerable: false,\n writable: true,\n configurable: true\n }\n })\n }\n };\n} else {\n // old school shim for old browsers\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n var TempCtor = function () {}\n TempCtor.prototype = superCtor.prototype\n ctor.prototype = new TempCtor()\n ctor.prototype.constructor = ctor\n }\n }\n}\n","'use strict';\n\nconst isStream = stream =>\n\tstream !== null &&\n\ttypeof stream === 'object' &&\n\ttypeof stream.pipe === 'function';\n\nisStream.writable = stream =>\n\tisStream(stream) &&\n\tstream.writable !== false &&\n\ttypeof stream._write === 'function' &&\n\ttypeof stream._writableState === 'object';\n\nisStream.readable = stream =>\n\tisStream(stream) &&\n\tstream.readable !== false &&\n\ttypeof stream._read === 'function' &&\n\ttypeof stream._readableState === 'object';\n\nisStream.duplex = stream =>\n\tisStream.writable(stream) &&\n\tisStream.readable(stream);\n\nisStream.transform = stream =>\n\tisStream.duplex(stream) &&\n\ttypeof stream._transform === 'function';\n\nmodule.exports = isStream;\n","var json_stringify = require('./lib/stringify.js').stringify;\nvar json_parse = require('./lib/parse.js');\n\nmodule.exports = function(options) {\n return {\n parse: json_parse(options),\n stringify: json_stringify\n }\n};\n//create the default method members with no options applied for backwards compatibility\nmodule.exports.parse = json_parse();\nmodule.exports.stringify = json_stringify;\n","var BigNumber = null;\n\n// regexpxs extracted from\n// (c) BSD-3-Clause\n// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors\n\nconst suspectProtoRx = /(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])(?:p|\\\\u0070)(?:r|\\\\u0072)(?:o|\\\\u006[Ff])(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])/;\nconst suspectConstructorRx = /(?:c|\\\\u0063)(?:o|\\\\u006[Ff])(?:n|\\\\u006[Ee])(?:s|\\\\u0073)(?:t|\\\\u0074)(?:r|\\\\u0072)(?:u|\\\\u0075)(?:c|\\\\u0063)(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:r|\\\\u0072)/;\n\n/*\n json_parse.js\n 2012-06-20\n\n Public Domain.\n\n NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\n This file creates a json_parse function.\n During create you can (optionally) specify some behavioural switches\n\n require('json-bigint')(options)\n\n The optional options parameter holds switches that drive certain\n aspects of the parsing process:\n * options.strict = true will warn about duplicate-key usage in the json.\n The default (strict = false) will silently ignore those and overwrite\n values for keys that are in duplicate use.\n\n The resulting function follows this signature:\n json_parse(text, reviver)\n This method parses a JSON text to produce an object or array.\n It can throw a SyntaxError exception.\n\n The optional reviver parameter is a function that can filter and\n transform the results. It receives each of the keys and values,\n and its return value is used instead of the original value.\n If it returns what it received, then the structure is not modified.\n If it returns undefined then the member is deleted.\n\n Example:\n\n // Parse the text. Values that look like ISO date strings will\n // be converted to Date objects.\n\n myData = json_parse(text, function (key, value) {\n var a;\n if (typeof value === 'string') {\n a =\n/^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2}(?:\\.\\d*)?)Z$/.exec(value);\n if (a) {\n return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],\n +a[5], +a[6]));\n }\n }\n return value;\n });\n\n This is a reference implementation. You are free to copy, modify, or\n redistribute.\n\n This code should be minified before deployment.\n See http://javascript.crockford.com/jsmin.html\n\n USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO\n NOT CONTROL.\n*/\n\n/*members \"\", \"\\\"\", \"\\/\", \"\\\\\", at, b, call, charAt, f, fromCharCode,\n hasOwnProperty, message, n, name, prototype, push, r, t, text\n*/\n\nvar json_parse = function (options) {\n 'use strict';\n\n // This is a function that can parse a JSON text, producing a JavaScript\n // data structure. It is a simple, recursive descent parser. It does not use\n // eval or regular expressions, so it can be used as a model for implementing\n // a JSON parser in other languages.\n\n // We are defining the function inside of another function to avoid creating\n // global variables.\n\n // Default options one can override by passing options to the parse()\n var _options = {\n strict: false, // not being strict means do not generate syntax errors for \"duplicate key\"\n storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string\n alwaysParseAsBig: false, // toggles whether all numbers should be Big\n useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js\n protoAction: 'error',\n constructorAction: 'error',\n };\n\n // If there are options, then use them to override the default _options\n if (options !== undefined && options !== null) {\n if (options.strict === true) {\n _options.strict = true;\n }\n if (options.storeAsString === true) {\n _options.storeAsString = true;\n }\n _options.alwaysParseAsBig =\n options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false;\n _options.useNativeBigInt =\n options.useNativeBigInt === true ? options.useNativeBigInt : false;\n\n if (typeof options.constructorAction !== 'undefined') {\n if (\n options.constructorAction === 'error' ||\n options.constructorAction === 'ignore' ||\n options.constructorAction === 'preserve'\n ) {\n _options.constructorAction = options.constructorAction;\n } else {\n throw new Error(\n `Incorrect value for constructorAction option, must be \"error\", \"ignore\" or undefined but passed ${options.constructorAction}`\n );\n }\n }\n\n if (typeof options.protoAction !== 'undefined') {\n if (\n options.protoAction === 'error' ||\n options.protoAction === 'ignore' ||\n options.protoAction === 'preserve'\n ) {\n _options.protoAction = options.protoAction;\n } else {\n throw new Error(\n `Incorrect value for protoAction option, must be \"error\", \"ignore\" or undefined but passed ${options.protoAction}`\n );\n }\n }\n }\n\n var at, // The index of the current character\n ch, // The current character\n escapee = {\n '\"': '\"',\n '\\\\': '\\\\',\n '/': '/',\n b: '\\b',\n f: '\\f',\n n: '\\n',\n r: '\\r',\n t: '\\t',\n },\n text,\n error = function (m) {\n // Call error when something is wrong.\n\n throw {\n name: 'SyntaxError',\n message: m,\n at: at,\n text: text,\n };\n },\n next = function (c) {\n // If a c parameter is provided, verify that it matches the current character.\n\n if (c && c !== ch) {\n error(\"Expected '\" + c + \"' instead of '\" + ch + \"'\");\n }\n\n // Get the next character. When there are no more characters,\n // return the empty string.\n\n ch = text.charAt(at);\n at += 1;\n return ch;\n },\n number = function () {\n // Parse a number value.\n\n var number,\n string = '';\n\n if (ch === '-') {\n string = '-';\n next('-');\n }\n while (ch >= '0' && ch <= '9') {\n string += ch;\n next();\n }\n if (ch === '.') {\n string += '.';\n while (next() && ch >= '0' && ch <= '9') {\n string += ch;\n }\n }\n if (ch === 'e' || ch === 'E') {\n string += ch;\n next();\n if (ch === '-' || ch === '+') {\n string += ch;\n next();\n }\n while (ch >= '0' && ch <= '9') {\n string += ch;\n next();\n }\n }\n number = +string;\n if (!isFinite(number)) {\n error('Bad number');\n } else {\n if (BigNumber == null) BigNumber = require('bignumber.js');\n //if (number > 9007199254740992 || number < -9007199254740992)\n // Bignumber has stricter check: everything with length > 15 digits disallowed\n if (string.length > 15)\n return _options.storeAsString\n ? string\n : _options.useNativeBigInt\n ? BigInt(string)\n : new BigNumber(string);\n else\n return !_options.alwaysParseAsBig\n ? number\n : _options.useNativeBigInt\n ? BigInt(number)\n : new BigNumber(number);\n }\n },\n string = function () {\n // Parse a string value.\n\n var hex,\n i,\n string = '',\n uffff;\n\n // When parsing for string values, we must look for \" and \\ characters.\n\n if (ch === '\"') {\n var startAt = at;\n while (next()) {\n if (ch === '\"') {\n if (at - 1 > startAt) string += text.substring(startAt, at - 1);\n next();\n return string;\n }\n if (ch === '\\\\') {\n if (at - 1 > startAt) string += text.substring(startAt, at - 1);\n next();\n if (ch === 'u') {\n uffff = 0;\n for (i = 0; i < 4; i += 1) {\n hex = parseInt(next(), 16);\n if (!isFinite(hex)) {\n break;\n }\n uffff = uffff * 16 + hex;\n }\n string += String.fromCharCode(uffff);\n } else if (typeof escapee[ch] === 'string') {\n string += escapee[ch];\n } else {\n break;\n }\n startAt = at;\n }\n }\n }\n error('Bad string');\n },\n white = function () {\n // Skip whitespace.\n\n while (ch && ch <= ' ') {\n next();\n }\n },\n word = function () {\n // true, false, or null.\n\n switch (ch) {\n case 't':\n next('t');\n next('r');\n next('u');\n next('e');\n return true;\n case 'f':\n next('f');\n next('a');\n next('l');\n next('s');\n next('e');\n return false;\n case 'n':\n next('n');\n next('u');\n next('l');\n next('l');\n return null;\n }\n error(\"Unexpected '\" + ch + \"'\");\n },\n value, // Place holder for the value function.\n array = function () {\n // Parse an array value.\n\n var array = [];\n\n if (ch === '[') {\n next('[');\n white();\n if (ch === ']') {\n next(']');\n return array; // empty array\n }\n while (ch) {\n array.push(value());\n white();\n if (ch === ']') {\n next(']');\n return array;\n }\n next(',');\n white();\n }\n }\n error('Bad array');\n },\n object = function () {\n // Parse an object value.\n\n var key,\n object = Object.create(null);\n\n if (ch === '{') {\n next('{');\n white();\n if (ch === '}') {\n next('}');\n return object; // empty object\n }\n while (ch) {\n key = string();\n white();\n next(':');\n if (\n _options.strict === true &&\n Object.hasOwnProperty.call(object, key)\n ) {\n error('Duplicate key \"' + key + '\"');\n }\n\n if (suspectProtoRx.test(key) === true) {\n if (_options.protoAction === 'error') {\n error('Object contains forbidden prototype property');\n } else if (_options.protoAction === 'ignore') {\n value();\n } else {\n object[key] = value();\n }\n } else if (suspectConstructorRx.test(key) === true) {\n if (_options.constructorAction === 'error') {\n error('Object contains forbidden constructor property');\n } else if (_options.constructorAction === 'ignore') {\n value();\n } else {\n object[key] = value();\n }\n } else {\n object[key] = value();\n }\n\n white();\n if (ch === '}') {\n next('}');\n return object;\n }\n next(',');\n white();\n }\n }\n error('Bad object');\n };\n\n value = function () {\n // Parse a JSON value. It could be an object, an array, a string, a number,\n // or a word.\n\n white();\n switch (ch) {\n case '{':\n return object();\n case '[':\n return array();\n case '\"':\n return string();\n case '-':\n return number();\n default:\n return ch >= '0' && ch <= '9' ? number() : word();\n }\n };\n\n // Return the json_parse function. It will have access to all of the above\n // functions and variables.\n\n return function (source, reviver) {\n var result;\n\n text = source + '';\n at = 0;\n ch = ' ';\n result = value();\n white();\n if (ch) {\n error('Syntax error');\n }\n\n // If there is a reviver function, we recursively walk the new structure,\n // passing each name/value pair to the reviver function for possible\n // transformation, starting with a temporary root object that holds the result\n // in an empty key. If there is not a reviver function, we simply return the\n // result.\n\n return typeof reviver === 'function'\n ? (function walk(holder, key) {\n var k,\n v,\n value = holder[key];\n if (value && typeof value === 'object') {\n Object.keys(value).forEach(function (k) {\n v = walk(value, k);\n if (v !== undefined) {\n value[k] = v;\n } else {\n delete value[k];\n }\n });\n }\n return reviver.call(holder, key, value);\n })({ '': result }, '')\n : result;\n };\n};\n\nmodule.exports = json_parse;\n","var BigNumber = require('bignumber.js');\n\n/*\n json2.js\n 2013-05-26\n\n Public Domain.\n\n NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\n See http://www.JSON.org/js.html\n\n\n This code should be minified before deployment.\n See http://javascript.crockford.com/jsmin.html\n\n USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO\n NOT CONTROL.\n\n\n This file creates a global JSON object containing two methods: stringify\n and parse.\n\n JSON.stringify(value, replacer, space)\n value any JavaScript value, usually an object or array.\n\n replacer an optional parameter that determines how object\n values are stringified for objects. It can be a\n function or an array of strings.\n\n space an optional parameter that specifies the indentation\n of nested structures. If it is omitted, the text will\n be packed without extra whitespace. If it is a number,\n it will specify the number of spaces to indent at each\n level. If it is a string (such as '\\t' or ' '),\n it contains the characters used to indent at each level.\n\n This method produces a JSON text from a JavaScript value.\n\n When an object value is found, if the object contains a toJSON\n method, its toJSON method will be called and the result will be\n stringified. A toJSON method does not serialize: it returns the\n value represented by the name/value pair that should be serialized,\n or undefined if nothing should be serialized. The toJSON method\n will be passed the key associated with the value, and this will be\n bound to the value\n\n For example, this would serialize Dates as ISO strings.\n\n Date.prototype.toJSON = function (key) {\n function f(n) {\n // Format integers to have at least two digits.\n return n < 10 ? '0' + n : n;\n }\n\n return this.getUTCFullYear() + '-' +\n f(this.getUTCMonth() + 1) + '-' +\n f(this.getUTCDate()) + 'T' +\n f(this.getUTCHours()) + ':' +\n f(this.getUTCMinutes()) + ':' +\n f(this.getUTCSeconds()) + 'Z';\n };\n\n You can provide an optional replacer method. It will be passed the\n key and value of each member, with this bound to the containing\n object. The value that is returned from your method will be\n serialized. If your method returns undefined, then the member will\n be excluded from the serialization.\n\n If the replacer parameter is an array of strings, then it will be\n used to select the members to be serialized. It filters the results\n such that only members with keys listed in the replacer array are\n stringified.\n\n Values that do not have JSON representations, such as undefined or\n functions, will not be serialized. Such values in objects will be\n dropped; in arrays they will be replaced with null. You can use\n a replacer function to replace those with JSON values.\n JSON.stringify(undefined) returns undefined.\n\n The optional space parameter produces a stringification of the\n value that is filled with line breaks and indentation to make it\n easier to read.\n\n If the space parameter is a non-empty string, then that string will\n be used for indentation. If the space parameter is a number, then\n the indentation will be that many spaces.\n\n Example:\n\n text = JSON.stringify(['e', {pluribus: 'unum'}]);\n // text is '[\"e\",{\"pluribus\":\"unum\"}]'\n\n\n text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\\t');\n // text is '[\\n\\t\"e\",\\n\\t{\\n\\t\\t\"pluribus\": \"unum\"\\n\\t}\\n]'\n\n text = JSON.stringify([new Date()], function (key, value) {\n return this[key] instanceof Date ?\n 'Date(' + this[key] + ')' : value;\n });\n // text is '[\"Date(---current time---)\"]'\n\n\n JSON.parse(text, reviver)\n This method parses a JSON text to produce an object or array.\n It can throw a SyntaxError exception.\n\n The optional reviver parameter is a function that can filter and\n transform the results. It receives each of the keys and values,\n and its return value is used instead of the original value.\n If it returns what it received, then the structure is not modified.\n If it returns undefined then the member is deleted.\n\n Example:\n\n // Parse the text. Values that look like ISO date strings will\n // be converted to Date objects.\n\n myData = JSON.parse(text, function (key, value) {\n var a;\n if (typeof value === 'string') {\n a =\n/^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2}(?:\\.\\d*)?)Z$/.exec(value);\n if (a) {\n return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],\n +a[5], +a[6]));\n }\n }\n return value;\n });\n\n myData = JSON.parse('[\"Date(09/09/2001)\"]', function (key, value) {\n var d;\n if (typeof value === 'string' &&\n value.slice(0, 5) === 'Date(' &&\n value.slice(-1) === ')') {\n d = new Date(value.slice(5, -1));\n if (d) {\n return d;\n }\n }\n return value;\n });\n\n\n This is a reference implementation. You are free to copy, modify, or\n redistribute.\n*/\n\n/*jslint evil: true, regexp: true */\n\n/*members \"\", \"\\b\", \"\\t\", \"\\n\", \"\\f\", \"\\r\", \"\\\"\", JSON, \"\\\\\", apply,\n call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,\n getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,\n lastIndex, length, parse, prototype, push, replace, slice, stringify,\n test, toJSON, toString, valueOf\n*/\n\n\n// Create a JSON object only if one does not already exist. We create the\n// methods in a closure to avoid creating global variables.\n\nvar JSON = module.exports;\n\n(function () {\n 'use strict';\n\n function f(n) {\n // Format integers to have at least two digits.\n return n < 10 ? '0' + n : n;\n }\n\n var cx = /[\\u0000\\u00ad\\u0600-\\u0604\\u070f\\u17b4\\u17b5\\u200c-\\u200f\\u2028-\\u202f\\u2060-\\u206f\\ufeff\\ufff0-\\uffff]/g,\n escapable = /[\\\\\\\"\\x00-\\x1f\\x7f-\\x9f\\u00ad\\u0600-\\u0604\\u070f\\u17b4\\u17b5\\u200c-\\u200f\\u2028-\\u202f\\u2060-\\u206f\\ufeff\\ufff0-\\uffff]/g,\n gap,\n indent,\n meta = { // table of character substitutions\n '\\b': '\\\\b',\n '\\t': '\\\\t',\n '\\n': '\\\\n',\n '\\f': '\\\\f',\n '\\r': '\\\\r',\n '\"' : '\\\\\"',\n '\\\\': '\\\\\\\\'\n },\n rep;\n\n\n function quote(string) {\n\n// If the string contains no control characters, no quote characters, and no\n// backslash characters, then we can safely slap some quotes around it.\n// Otherwise we must also replace the offending characters with safe escape\n// sequences.\n\n escapable.lastIndex = 0;\n return escapable.test(string) ? '\"' + string.replace(escapable, function (a) {\n var c = meta[a];\n return typeof c === 'string'\n ? c\n : '\\\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);\n }) + '\"' : '\"' + string + '\"';\n }\n\n\n function str(key, holder) {\n\n// Produce a string from holder[key].\n\n var i, // The loop counter.\n k, // The member key.\n v, // The member value.\n length,\n mind = gap,\n partial,\n value = holder[key],\n isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value));\n\n// If the value has a toJSON method, call it to obtain a replacement value.\n\n if (value && typeof value === 'object' &&\n typeof value.toJSON === 'function') {\n value = value.toJSON(key);\n }\n\n// If we were called with a replacer function, then call the replacer to\n// obtain a replacement value.\n\n if (typeof rep === 'function') {\n value = rep.call(holder, key, value);\n }\n\n// What happens next depends on the value's type.\n\n switch (typeof value) {\n case 'string':\n if (isBigNumber) {\n return value;\n } else {\n return quote(value);\n }\n\n case 'number':\n\n// JSON numbers must be finite. Encode non-finite numbers as null.\n\n return isFinite(value) ? String(value) : 'null';\n\n case 'boolean':\n case 'null':\n case 'bigint':\n\n// If the value is a boolean or null, convert it to a string. Note:\n// typeof null does not produce 'null'. The case is included here in\n// the remote chance that this gets fixed someday.\n\n return String(value);\n\n// If the type is 'object', we might be dealing with an object or an array or\n// null.\n\n case 'object':\n\n// Due to a specification blunder in ECMAScript, typeof null is 'object',\n// so watch out for that case.\n\n if (!value) {\n return 'null';\n }\n\n// Make an array to hold the partial results of stringifying this object value.\n\n gap += indent;\n partial = [];\n\n// Is the value an array?\n\n if (Object.prototype.toString.apply(value) === '[object Array]') {\n\n// The value is an array. Stringify every element. Use null as a placeholder\n// for non-JSON values.\n\n length = value.length;\n for (i = 0; i < length; i += 1) {\n partial[i] = str(i, value) || 'null';\n }\n\n// Join all of the elements together, separated with commas, and wrap them in\n// brackets.\n\n v = partial.length === 0\n ? '[]'\n : gap\n ? '[\\n' + gap + partial.join(',\\n' + gap) + '\\n' + mind + ']'\n : '[' + partial.join(',') + ']';\n gap = mind;\n return v;\n }\n\n// If the replacer is an array, use it to select the members to be stringified.\n\n if (rep && typeof rep === 'object') {\n length = rep.length;\n for (i = 0; i < length; i += 1) {\n if (typeof rep[i] === 'string') {\n k = rep[i];\n v = str(k, value);\n if (v) {\n partial.push(quote(k) + (gap ? ': ' : ':') + v);\n }\n }\n }\n } else {\n\n// Otherwise, iterate through all of the keys in the object.\n\n Object.keys(value).forEach(function(k) {\n var v = str(k, value);\n if (v) {\n partial.push(quote(k) + (gap ? ': ' : ':') + v);\n }\n });\n }\n\n// Join all of the member texts together, separated with commas,\n// and wrap them in braces.\n\n v = partial.length === 0\n ? '{}'\n : gap\n ? '{\\n' + gap + partial.join(',\\n' + gap) + '\\n' + mind + '}'\n : '{' + partial.join(',') + '}';\n gap = mind;\n return v;\n }\n }\n\n// If the JSON object does not yet have a stringify method, give it one.\n\n if (typeof JSON.stringify !== 'function') {\n JSON.stringify = function (value, replacer, space) {\n\n// The stringify method takes a value and an optional replacer, and an optional\n// space parameter, and returns a JSON text. The replacer can be a function\n// that can replace values, or an array of strings that will select the keys.\n// A default replacer method can be provided. Use of the space parameter can\n// produce text that is more easily readable.\n\n var i;\n gap = '';\n indent = '';\n\n// If the space parameter is a number, make an indent string containing that\n// many spaces.\n\n if (typeof space === 'number') {\n for (i = 0; i < space; i += 1) {\n indent += ' ';\n }\n\n// If the space parameter is a string, it will be used as the indent string.\n\n } else if (typeof space === 'string') {\n indent = space;\n }\n\n// If there is a replacer, it must be a function or an array.\n// Otherwise, throw an error.\n\n rep = replacer;\n if (replacer && typeof replacer !== 'function' &&\n (typeof replacer !== 'object' ||\n typeof replacer.length !== 'number')) {\n throw new Error('JSON.stringify');\n }\n\n// Make a fake root object containing our value under the key of ''.\n// Return the result of stringifying the value.\n\n return str('', {'': value});\n };\n }\n}());\n","var bufferEqual = require('buffer-equal-constant-time');\nvar Buffer = require('safe-buffer').Buffer;\nvar crypto = require('crypto');\nvar formatEcdsa = require('ecdsa-sig-formatter');\nvar util = require('util');\n\nvar MSG_INVALID_ALGORITHM = '\"%s\" is not a valid algorithm.\\n Supported algorithms are:\\n \"HS256\", \"HS384\", \"HS512\", \"RS256\", \"RS384\", \"RS512\", \"PS256\", \"PS384\", \"PS512\", \"ES256\", \"ES384\", \"ES512\" and \"none\".'\nvar MSG_INVALID_SECRET = 'secret must be a string or buffer';\nvar MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer';\nvar MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object';\n\nvar supportsKeyObjects = typeof crypto.createPublicKey === 'function';\nif (supportsKeyObjects) {\n MSG_INVALID_VERIFIER_KEY += ' or a KeyObject';\n MSG_INVALID_SECRET += 'or a KeyObject';\n}\n\nfunction checkIsPublicKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return;\n }\n\n if (!supportsKeyObjects) {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key !== 'object') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.type !== 'string') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.asymmetricKeyType !== 'string') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.export !== 'function') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n};\n\nfunction checkIsPrivateKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return;\n }\n\n if (typeof key === 'object') {\n return;\n }\n\n throw typeError(MSG_INVALID_SIGNER_KEY);\n};\n\nfunction checkIsSecretKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return key;\n }\n\n if (!supportsKeyObjects) {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (typeof key !== 'object') {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (key.type !== 'secret') {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (typeof key.export !== 'function') {\n throw typeError(MSG_INVALID_SECRET);\n }\n}\n\nfunction fromBase64(base64) {\n return base64\n .replace(/=/g, '')\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n}\n\nfunction toBase64(base64url) {\n base64url = base64url.toString();\n\n var padding = 4 - base64url.length % 4;\n if (padding !== 4) {\n for (var i = 0; i < padding; ++i) {\n base64url += '=';\n }\n }\n\n return base64url\n .replace(/\\-/g, '+')\n .replace(/_/g, '/');\n}\n\nfunction typeError(template) {\n var args = [].slice.call(arguments, 1);\n var errMsg = util.format.bind(util, template).apply(null, args);\n return new TypeError(errMsg);\n}\n\nfunction bufferOrString(obj) {\n return Buffer.isBuffer(obj) || typeof obj === 'string';\n}\n\nfunction normalizeInput(thing) {\n if (!bufferOrString(thing))\n thing = JSON.stringify(thing);\n return thing;\n}\n\nfunction createHmacSigner(bits) {\n return function sign(thing, secret) {\n checkIsSecretKey(secret);\n thing = normalizeInput(thing);\n var hmac = crypto.createHmac('sha' + bits, secret);\n var sig = (hmac.update(thing), hmac.digest('base64'))\n return fromBase64(sig);\n }\n}\n\nfunction createHmacVerifier(bits) {\n return function verify(thing, signature, secret) {\n var computedSig = createHmacSigner(bits)(thing, secret);\n return bufferEqual(Buffer.from(signature), Buffer.from(computedSig));\n }\n}\n\nfunction createKeySigner(bits) {\n return function sign(thing, privateKey) {\n checkIsPrivateKey(privateKey);\n thing = normalizeInput(thing);\n // Even though we are specifying \"RSA\" here, this works with ECDSA\n // keys as well.\n var signer = crypto.createSign('RSA-SHA' + bits);\n var sig = (signer.update(thing), signer.sign(privateKey, 'base64'));\n return fromBase64(sig);\n }\n}\n\nfunction createKeyVerifier(bits) {\n return function verify(thing, signature, publicKey) {\n checkIsPublicKey(publicKey);\n thing = normalizeInput(thing);\n signature = toBase64(signature);\n var verifier = crypto.createVerify('RSA-SHA' + bits);\n verifier.update(thing);\n return verifier.verify(publicKey, signature, 'base64');\n }\n}\n\nfunction createPSSKeySigner(bits) {\n return function sign(thing, privateKey) {\n checkIsPrivateKey(privateKey);\n thing = normalizeInput(thing);\n var signer = crypto.createSign('RSA-SHA' + bits);\n var sig = (signer.update(thing), signer.sign({\n key: privateKey,\n padding: crypto.constants.RSA_PKCS1_PSS_PADDING,\n saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST\n }, 'base64'));\n return fromBase64(sig);\n }\n}\n\nfunction createPSSKeyVerifier(bits) {\n return function verify(thing, signature, publicKey) {\n checkIsPublicKey(publicKey);\n thing = normalizeInput(thing);\n signature = toBase64(signature);\n var verifier = crypto.createVerify('RSA-SHA' + bits);\n verifier.update(thing);\n return verifier.verify({\n key: publicKey,\n padding: crypto.constants.RSA_PKCS1_PSS_PADDING,\n saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST\n }, signature, 'base64');\n }\n}\n\nfunction createECDSASigner(bits) {\n var inner = createKeySigner(bits);\n return function sign() {\n var signature = inner.apply(null, arguments);\n signature = formatEcdsa.derToJose(signature, 'ES' + bits);\n return signature;\n };\n}\n\nfunction createECDSAVerifer(bits) {\n var inner = createKeyVerifier(bits);\n return function verify(thing, signature, publicKey) {\n signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64');\n var result = inner(thing, signature, publicKey);\n return result;\n };\n}\n\nfunction createNoneSigner() {\n return function sign() {\n return '';\n }\n}\n\nfunction createNoneVerifier() {\n return function verify(thing, signature) {\n return signature === '';\n }\n}\n\nmodule.exports = function jwa(algorithm) {\n var signerFactories = {\n hs: createHmacSigner,\n rs: createKeySigner,\n ps: createPSSKeySigner,\n es: createECDSASigner,\n none: createNoneSigner,\n }\n var verifierFactories = {\n hs: createHmacVerifier,\n rs: createKeyVerifier,\n ps: createPSSKeyVerifier,\n es: createECDSAVerifer,\n none: createNoneVerifier,\n }\n var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/);\n if (!match)\n throw typeError(MSG_INVALID_ALGORITHM, algorithm);\n var algo = (match[1] || match[3]).toLowerCase();\n var bits = match[2];\n\n return {\n sign: signerFactories[algo](bits),\n verify: verifierFactories[algo](bits),\n }\n};\n","/*global exports*/\nvar SignStream = require('./lib/sign-stream');\nvar VerifyStream = require('./lib/verify-stream');\n\nvar ALGORITHMS = [\n 'HS256', 'HS384', 'HS512',\n 'RS256', 'RS384', 'RS512',\n 'PS256', 'PS384', 'PS512',\n 'ES256', 'ES384', 'ES512'\n];\n\nexports.ALGORITHMS = ALGORITHMS;\nexports.sign = SignStream.sign;\nexports.verify = VerifyStream.verify;\nexports.decode = VerifyStream.decode;\nexports.isValid = VerifyStream.isValid;\nexports.createSign = function createSign(opts) {\n return new SignStream(opts);\n};\nexports.createVerify = function createVerify(opts) {\n return new VerifyStream(opts);\n};\n","/*global module, process*/\nvar Buffer = require('safe-buffer').Buffer;\nvar Stream = require('stream');\nvar util = require('util');\n\nfunction DataStream(data) {\n this.buffer = null;\n this.writable = true;\n this.readable = true;\n\n // No input\n if (!data) {\n this.buffer = Buffer.alloc(0);\n return this;\n }\n\n // Stream\n if (typeof data.pipe === 'function') {\n this.buffer = Buffer.alloc(0);\n data.pipe(this);\n return this;\n }\n\n // Buffer or String\n // or Object (assumedly a passworded key)\n if (data.length || typeof data === 'object') {\n this.buffer = data;\n this.writable = false;\n process.nextTick(function () {\n this.emit('end', data);\n this.readable = false;\n this.emit('close');\n }.bind(this));\n return this;\n }\n\n throw new TypeError('Unexpected data type ('+ typeof data + ')');\n}\nutil.inherits(DataStream, Stream);\n\nDataStream.prototype.write = function write(data) {\n this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]);\n this.emit('data', data);\n};\n\nDataStream.prototype.end = function end(data) {\n if (data)\n this.write(data);\n this.emit('end', data);\n this.emit('close');\n this.writable = false;\n this.readable = false;\n};\n\nmodule.exports = DataStream;\n","/*global module*/\nvar Buffer = require('safe-buffer').Buffer;\nvar DataStream = require('./data-stream');\nvar jwa = require('jwa');\nvar Stream = require('stream');\nvar toString = require('./tostring');\nvar util = require('util');\n\nfunction base64url(string, encoding) {\n return Buffer\n .from(string, encoding)\n .toString('base64')\n .replace(/=/g, '')\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n}\n\nfunction jwsSecuredInput(header, payload, encoding) {\n encoding = encoding || 'utf8';\n var encodedHeader = base64url(toString(header), 'binary');\n var encodedPayload = base64url(toString(payload), encoding);\n return util.format('%s.%s', encodedHeader, encodedPayload);\n}\n\nfunction jwsSign(opts) {\n var header = opts.header;\n var payload = opts.payload;\n var secretOrKey = opts.secret || opts.privateKey;\n var encoding = opts.encoding;\n var algo = jwa(header.alg);\n var securedInput = jwsSecuredInput(header, payload, encoding);\n var signature = algo.sign(securedInput, secretOrKey);\n return util.format('%s.%s', securedInput, signature);\n}\n\nfunction SignStream(opts) {\n var secret = opts.secret||opts.privateKey||opts.key;\n var secretStream = new DataStream(secret);\n this.readable = true;\n this.header = opts.header;\n this.encoding = opts.encoding;\n this.secret = this.privateKey = this.key = secretStream;\n this.payload = new DataStream(opts.payload);\n this.secret.once('close', function () {\n if (!this.payload.writable && this.readable)\n this.sign();\n }.bind(this));\n\n this.payload.once('close', function () {\n if (!this.secret.writable && this.readable)\n this.sign();\n }.bind(this));\n}\nutil.inherits(SignStream, Stream);\n\nSignStream.prototype.sign = function sign() {\n try {\n var signature = jwsSign({\n header: this.header,\n payload: this.payload.buffer,\n secret: this.secret.buffer,\n encoding: this.encoding\n });\n this.emit('done', signature);\n this.emit('data', signature);\n this.emit('end');\n this.readable = false;\n return signature;\n } catch (e) {\n this.readable = false;\n this.emit('error', e);\n this.emit('close');\n }\n};\n\nSignStream.sign = jwsSign;\n\nmodule.exports = SignStream;\n","/*global module*/\nvar Buffer = require('buffer').Buffer;\n\nmodule.exports = function toString(obj) {\n if (typeof obj === 'string')\n return obj;\n if (typeof obj === 'number' || Buffer.isBuffer(obj))\n return obj.toString();\n return JSON.stringify(obj);\n};\n","/*global module*/\nvar Buffer = require('safe-buffer').Buffer;\nvar DataStream = require('./data-stream');\nvar jwa = require('jwa');\nvar Stream = require('stream');\nvar toString = require('./tostring');\nvar util = require('util');\nvar JWS_REGEX = /^[a-zA-Z0-9\\-_]+?\\.[a-zA-Z0-9\\-_]+?\\.([a-zA-Z0-9\\-_]+)?$/;\n\nfunction isObject(thing) {\n return Object.prototype.toString.call(thing) === '[object Object]';\n}\n\nfunction safeJsonParse(thing) {\n if (isObject(thing))\n return thing;\n try { return JSON.parse(thing); }\n catch (e) { return undefined; }\n}\n\nfunction headerFromJWS(jwsSig) {\n var encodedHeader = jwsSig.split('.', 1)[0];\n return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary'));\n}\n\nfunction securedInputFromJWS(jwsSig) {\n return jwsSig.split('.', 2).join('.');\n}\n\nfunction signatureFromJWS(jwsSig) {\n return jwsSig.split('.')[2];\n}\n\nfunction payloadFromJWS(jwsSig, encoding) {\n encoding = encoding || 'utf8';\n var payload = jwsSig.split('.')[1];\n return Buffer.from(payload, 'base64').toString(encoding);\n}\n\nfunction isValidJws(string) {\n return JWS_REGEX.test(string) && !!headerFromJWS(string);\n}\n\nfunction jwsVerify(jwsSig, algorithm, secretOrKey) {\n if (!algorithm) {\n var err = new Error(\"Missing algorithm parameter for jws.verify\");\n err.code = \"MISSING_ALGORITHM\";\n throw err;\n }\n jwsSig = toString(jwsSig);\n var signature = signatureFromJWS(jwsSig);\n var securedInput = securedInputFromJWS(jwsSig);\n var algo = jwa(algorithm);\n return algo.verify(securedInput, signature, secretOrKey);\n}\n\nfunction jwsDecode(jwsSig, opts) {\n opts = opts || {};\n jwsSig = toString(jwsSig);\n\n if (!isValidJws(jwsSig))\n return null;\n\n var header = headerFromJWS(jwsSig);\n\n if (!header)\n return null;\n\n var payload = payloadFromJWS(jwsSig);\n if (header.typ === 'JWT' || opts.json)\n payload = JSON.parse(payload, opts.encoding);\n\n return {\n header: header,\n payload: payload,\n signature: signatureFromJWS(jwsSig)\n };\n}\n\nfunction VerifyStream(opts) {\n opts = opts || {};\n var secretOrKey = opts.secret||opts.publicKey||opts.key;\n var secretStream = new DataStream(secretOrKey);\n this.readable = true;\n this.algorithm = opts.algorithm;\n this.encoding = opts.encoding;\n this.secret = this.publicKey = this.key = secretStream;\n this.signature = new DataStream(opts.signature);\n this.secret.once('close', function () {\n if (!this.signature.writable && this.readable)\n this.verify();\n }.bind(this));\n\n this.signature.once('close', function () {\n if (!this.secret.writable && this.readable)\n this.verify();\n }.bind(this));\n}\nutil.inherits(VerifyStream, Stream);\nVerifyStream.prototype.verify = function verify() {\n try {\n var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer);\n var obj = jwsDecode(this.signature.buffer, this.encoding);\n this.emit('done', valid, obj);\n this.emit('data', valid);\n this.emit('end');\n this.readable = false;\n return valid;\n } catch (e) {\n this.readable = false;\n this.emit('error', e);\n this.emit('close');\n }\n};\n\nVerifyStream.decode = jwsDecode;\nVerifyStream.isValid = isValidJws;\nVerifyStream.verify = jwsVerify;\n\nmodule.exports = VerifyStream;\n","/*!\n * mime-db\n * Copyright(c) 2014 Jonathan Ong\n * Copyright(c) 2015-2022 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n/**\n * Module exports.\n */\n\nmodule.exports = require('./db.json')\n","/*!\n * mime-types\n * Copyright(c) 2014 Jonathan Ong\n * Copyright(c) 2015 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n'use strict'\n\n/**\n * Module dependencies.\n * @private\n */\n\nvar db = require('mime-db')\nvar extname = require('path').extname\n\n/**\n * Module variables.\n * @private\n */\n\nvar EXTRACT_TYPE_REGEXP = /^\\s*([^;\\s]*)(?:;|\\s|$)/\nvar TEXT_TYPE_REGEXP = /^text\\//i\n\n/**\n * Module exports.\n * @public\n */\n\nexports.charset = charset\nexports.charsets = { lookup: charset }\nexports.contentType = contentType\nexports.extension = extension\nexports.extensions = Object.create(null)\nexports.lookup = lookup\nexports.types = Object.create(null)\n\n// Populate the extensions/types maps\npopulateMaps(exports.extensions, exports.types)\n\n/**\n * Get the default charset for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction charset (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n var mime = match && db[match[1].toLowerCase()]\n\n if (mime && mime.charset) {\n return mime.charset\n }\n\n // default text/* to utf-8\n if (match && TEXT_TYPE_REGEXP.test(match[1])) {\n return 'UTF-8'\n }\n\n return false\n}\n\n/**\n * Create a full Content-Type header given a MIME type or extension.\n *\n * @param {string} str\n * @return {boolean|string}\n */\n\nfunction contentType (str) {\n // TODO: should this even be in this module?\n if (!str || typeof str !== 'string') {\n return false\n }\n\n var mime = str.indexOf('/') === -1\n ? exports.lookup(str)\n : str\n\n if (!mime) {\n return false\n }\n\n // TODO: use content-type or other module\n if (mime.indexOf('charset') === -1) {\n var charset = exports.charset(mime)\n if (charset) mime += '; charset=' + charset.toLowerCase()\n }\n\n return mime\n}\n\n/**\n * Get the default extension for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction extension (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n\n // get extensions\n var exts = match && exports.extensions[match[1].toLowerCase()]\n\n if (!exts || !exts.length) {\n return false\n }\n\n return exts[0]\n}\n\n/**\n * Lookup the MIME type for a file path/extension.\n *\n * @param {string} path\n * @return {boolean|string}\n */\n\nfunction lookup (path) {\n if (!path || typeof path !== 'string') {\n return false\n }\n\n // get the extension (\"ext\" or \".ext\" or full path)\n var extension = extname('x.' + path)\n .toLowerCase()\n .substr(1)\n\n if (!extension) {\n return false\n }\n\n return exports.types[extension] || false\n}\n\n/**\n * Populate the extensions and types maps.\n * @private\n */\n\nfunction populateMaps (extensions, types) {\n // source preference (least -> most)\n var preference = ['nginx', 'apache', undefined, 'iana']\n\n Object.keys(db).forEach(function forEachMimeType (type) {\n var mime = db[type]\n var exts = mime.extensions\n\n if (!exts || !exts.length) {\n return\n }\n\n // mime -> extensions\n extensions[type] = exts\n\n // extension -> mime\n for (var i = 0; i < exts.length; i++) {\n var extension = exts[i]\n\n if (types[extension]) {\n var from = preference.indexOf(db[types[extension]].source)\n var to = preference.indexOf(mime.source)\n\n if (types[extension] !== 'application/octet-stream' &&\n (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) {\n // skip the remapping\n continue\n }\n }\n\n // set the extension -> mime\n types[extension] = type\n }\n })\n}\n","'use strict';\n\n/**\n * @param typeMap [Object] Map of MIME type -> Array[extensions]\n * @param ...\n */\nfunction Mime() {\n this._types = Object.create(null);\n this._extensions = Object.create(null);\n\n for (let i = 0; i < arguments.length; i++) {\n this.define(arguments[i]);\n }\n\n this.define = this.define.bind(this);\n this.getType = this.getType.bind(this);\n this.getExtension = this.getExtension.bind(this);\n}\n\n/**\n * Define mimetype -> extension mappings. Each key is a mime-type that maps\n * to an array of extensions associated with the type. The first extension is\n * used as the default extension for the type.\n *\n * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']});\n *\n * If a type declares an extension that has already been defined, an error will\n * be thrown. To suppress this error and force the extension to be associated\n * with the new type, pass `force`=true. Alternatively, you may prefix the\n * extension with \"*\" to map the type to extension, without mapping the\n * extension to the type.\n *\n * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']});\n *\n *\n * @param map (Object) type definitions\n * @param force (Boolean) if true, force overriding of existing definitions\n */\nMime.prototype.define = function(typeMap, force) {\n for (let type in typeMap) {\n let extensions = typeMap[type].map(function(t) {\n return t.toLowerCase();\n });\n type = type.toLowerCase();\n\n for (let i = 0; i < extensions.length; i++) {\n const ext = extensions[i];\n\n // '*' prefix = not the preferred type for this extension. So fixup the\n // extension, and skip it.\n if (ext[0] === '*') {\n continue;\n }\n\n if (!force && (ext in this._types)) {\n throw new Error(\n 'Attempt to change mapping for \"' + ext +\n '\" extension from \"' + this._types[ext] + '\" to \"' + type +\n '\". Pass `force=true` to allow this, otherwise remove \"' + ext +\n '\" from the list of extensions for \"' + type + '\".'\n );\n }\n\n this._types[ext] = type;\n }\n\n // Use first extension as default\n if (force || !this._extensions[type]) {\n const ext = extensions[0];\n this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1);\n }\n }\n};\n\n/**\n * Lookup a mime type based on extension\n */\nMime.prototype.getType = function(path) {\n path = String(path);\n let last = path.replace(/^.*[/\\\\]/, '').toLowerCase();\n let ext = last.replace(/^.*\\./, '').toLowerCase();\n\n let hasPath = last.length < path.length;\n let hasDot = ext.length < last.length - 1;\n\n return (hasDot || !hasPath) && this._types[ext] || null;\n};\n\n/**\n * Return file extension associated with a mime type\n */\nMime.prototype.getExtension = function(type) {\n type = /^\\s*([^;\\s]*)/.test(type) && RegExp.$1;\n return type && this._extensions[type.toLowerCase()] || null;\n};\n\nmodule.exports = Mime;\n","'use strict';\n\nlet Mime = require('./Mime');\nmodule.exports = new Mime(require('./types/standard'), require('./types/other'));\n","module.exports = {\"application/prs.cww\":[\"cww\"],\"application/vnd.1000minds.decision-model+xml\":[\"1km\"],\"application/vnd.3gpp.pic-bw-large\":[\"plb\"],\"application/vnd.3gpp.pic-bw-small\":[\"psb\"],\"application/vnd.3gpp.pic-bw-var\":[\"pvb\"],\"application/vnd.3gpp2.tcap\":[\"tcap\"],\"application/vnd.3m.post-it-notes\":[\"pwn\"],\"application/vnd.accpac.simply.aso\":[\"aso\"],\"application/vnd.accpac.simply.imp\":[\"imp\"],\"application/vnd.acucobol\":[\"acu\"],\"application/vnd.acucorp\":[\"atc\",\"acutc\"],\"application/vnd.adobe.air-application-installer-package+zip\":[\"air\"],\"application/vnd.adobe.formscentral.fcdt\":[\"fcdt\"],\"application/vnd.adobe.fxp\":[\"fxp\",\"fxpl\"],\"application/vnd.adobe.xdp+xml\":[\"xdp\"],\"application/vnd.adobe.xfdf\":[\"xfdf\"],\"application/vnd.ahead.space\":[\"ahead\"],\"application/vnd.airzip.filesecure.azf\":[\"azf\"],\"application/vnd.airzip.filesecure.azs\":[\"azs\"],\"application/vnd.amazon.ebook\":[\"azw\"],\"application/vnd.americandynamics.acc\":[\"acc\"],\"application/vnd.amiga.ami\":[\"ami\"],\"application/vnd.android.package-archive\":[\"apk\"],\"application/vnd.anser-web-certificate-issue-initiation\":[\"cii\"],\"application/vnd.anser-web-funds-transfer-initiation\":[\"fti\"],\"application/vnd.antix.game-component\":[\"atx\"],\"application/vnd.apple.installer+xml\":[\"mpkg\"],\"application/vnd.apple.keynote\":[\"key\"],\"application/vnd.apple.mpegurl\":[\"m3u8\"],\"application/vnd.apple.numbers\":[\"numbers\"],\"application/vnd.apple.pages\":[\"pages\"],\"application/vnd.apple.pkpass\":[\"pkpass\"],\"application/vnd.aristanetworks.swi\":[\"swi\"],\"application/vnd.astraea-software.iota\":[\"iota\"],\"application/vnd.audiograph\":[\"aep\"],\"application/vnd.balsamiq.bmml+xml\":[\"bmml\"],\"application/vnd.blueice.multipass\":[\"mpm\"],\"application/vnd.bmi\":[\"bmi\"],\"application/vnd.businessobjects\":[\"rep\"],\"application/vnd.chemdraw+xml\":[\"cdxml\"],\"application/vnd.chipnuts.karaoke-mmd\":[\"mmd\"],\"application/vnd.cinderella\":[\"cdy\"],\"application/vnd.citationstyles.style+xml\":[\"csl\"],\"application/vnd.claymore\":[\"cla\"],\"application/vnd.cloanto.rp9\":[\"rp9\"],\"application/vnd.clonk.c4group\":[\"c4g\",\"c4d\",\"c4f\",\"c4p\",\"c4u\"],\"application/vnd.cluetrust.cartomobile-config\":[\"c11amc\"],\"application/vnd.cluetrust.cartomobile-config-pkg\":[\"c11amz\"],\"application/vnd.commonspace\":[\"csp\"],\"application/vnd.contact.cmsg\":[\"cdbcmsg\"],\"application/vnd.cosmocaller\":[\"cmc\"],\"application/vnd.crick.clicker\":[\"clkx\"],\"application/vnd.crick.clicker.keyboard\":[\"clkk\"],\"application/vnd.crick.clicker.palette\":[\"clkp\"],\"application/vnd.crick.clicker.template\":[\"clkt\"],\"application/vnd.crick.clicker.wordbank\":[\"clkw\"],\"application/vnd.criticaltools.wbs+xml\":[\"wbs\"],\"application/vnd.ctc-posml\":[\"pml\"],\"application/vnd.cups-ppd\":[\"ppd\"],\"application/vnd.curl.car\":[\"car\"],\"application/vnd.curl.pcurl\":[\"pcurl\"],\"application/vnd.dart\":[\"dart\"],\"application/vnd.data-vision.rdz\":[\"rdz\"],\"application/vnd.dbf\":[\"dbf\"],\"application/vnd.dece.data\":[\"uvf\",\"uvvf\",\"uvd\",\"uvvd\"],\"application/vnd.dece.ttml+xml\":[\"uvt\",\"uvvt\"],\"application/vnd.dece.unspecified\":[\"uvx\",\"uvvx\"],\"application/vnd.dece.zip\":[\"uvz\",\"uvvz\"],\"application/vnd.denovo.fcselayout-link\":[\"fe_launch\"],\"application/vnd.dna\":[\"dna\"],\"application/vnd.dolby.mlp\":[\"mlp\"],\"application/vnd.dpgraph\":[\"dpg\"],\"application/vnd.dreamfactory\":[\"dfac\"],\"application/vnd.ds-keypoint\":[\"kpxx\"],\"application/vnd.dvb.ait\":[\"ait\"],\"application/vnd.dvb.service\":[\"svc\"],\"application/vnd.dynageo\":[\"geo\"],\"application/vnd.ecowin.chart\":[\"mag\"],\"application/vnd.enliven\":[\"nml\"],\"application/vnd.epson.esf\":[\"esf\"],\"application/vnd.epson.msf\":[\"msf\"],\"application/vnd.epson.quickanime\":[\"qam\"],\"application/vnd.epson.salt\":[\"slt\"],\"application/vnd.epson.ssf\":[\"ssf\"],\"application/vnd.eszigno3+xml\":[\"es3\",\"et3\"],\"application/vnd.ezpix-album\":[\"ez2\"],\"application/vnd.ezpix-package\":[\"ez3\"],\"application/vnd.fdf\":[\"fdf\"],\"application/vnd.fdsn.mseed\":[\"mseed\"],\"application/vnd.fdsn.seed\":[\"seed\",\"dataless\"],\"application/vnd.flographit\":[\"gph\"],\"application/vnd.fluxtime.clip\":[\"ftc\"],\"application/vnd.framemaker\":[\"fm\",\"frame\",\"maker\",\"book\"],\"application/vnd.frogans.fnc\":[\"fnc\"],\"application/vnd.frogans.ltf\":[\"ltf\"],\"application/vnd.fsc.weblaunch\":[\"fsc\"],\"application/vnd.fujitsu.oasys\":[\"oas\"],\"application/vnd.fujitsu.oasys2\":[\"oa2\"],\"application/vnd.fujitsu.oasys3\":[\"oa3\"],\"application/vnd.fujitsu.oasysgp\":[\"fg5\"],\"application/vnd.fujitsu.oasysprs\":[\"bh2\"],\"application/vnd.fujixerox.ddd\":[\"ddd\"],\"application/vnd.fujixerox.docuworks\":[\"xdw\"],\"application/vnd.fujixerox.docuworks.binder\":[\"xbd\"],\"application/vnd.fuzzysheet\":[\"fzs\"],\"application/vnd.genomatix.tuxedo\":[\"txd\"],\"application/vnd.geogebra.file\":[\"ggb\"],\"application/vnd.geogebra.tool\":[\"ggt\"],\"application/vnd.geometry-explorer\":[\"gex\",\"gre\"],\"application/vnd.geonext\":[\"gxt\"],\"application/vnd.geoplan\":[\"g2w\"],\"application/vnd.geospace\":[\"g3w\"],\"application/vnd.gmx\":[\"gmx\"],\"application/vnd.google-apps.document\":[\"gdoc\"],\"application/vnd.google-apps.presentation\":[\"gslides\"],\"application/vnd.google-apps.spreadsheet\":[\"gsheet\"],\"application/vnd.google-earth.kml+xml\":[\"kml\"],\"application/vnd.google-earth.kmz\":[\"kmz\"],\"application/vnd.grafeq\":[\"gqf\",\"gqs\"],\"application/vnd.groove-account\":[\"gac\"],\"application/vnd.groove-help\":[\"ghf\"],\"application/vnd.groove-identity-message\":[\"gim\"],\"application/vnd.groove-injector\":[\"grv\"],\"application/vnd.groove-tool-message\":[\"gtm\"],\"application/vnd.groove-tool-template\":[\"tpl\"],\"application/vnd.groove-vcard\":[\"vcg\"],\"application/vnd.hal+xml\":[\"hal\"],\"application/vnd.handheld-entertainment+xml\":[\"zmm\"],\"application/vnd.hbci\":[\"hbci\"],\"application/vnd.hhe.lesson-player\":[\"les\"],\"application/vnd.hp-hpgl\":[\"hpgl\"],\"application/vnd.hp-hpid\":[\"hpid\"],\"application/vnd.hp-hps\":[\"hps\"],\"application/vnd.hp-jlyt\":[\"jlt\"],\"application/vnd.hp-pcl\":[\"pcl\"],\"application/vnd.hp-pclxl\":[\"pclxl\"],\"application/vnd.hydrostatix.sof-data\":[\"sfd-hdstx\"],\"application/vnd.ibm.minipay\":[\"mpy\"],\"application/vnd.ibm.modcap\":[\"afp\",\"listafp\",\"list3820\"],\"application/vnd.ibm.rights-management\":[\"irm\"],\"application/vnd.ibm.secure-container\":[\"sc\"],\"application/vnd.iccprofile\":[\"icc\",\"icm\"],\"application/vnd.igloader\":[\"igl\"],\"application/vnd.immervision-ivp\":[\"ivp\"],\"application/vnd.immervision-ivu\":[\"ivu\"],\"application/vnd.insors.igm\":[\"igm\"],\"application/vnd.intercon.formnet\":[\"xpw\",\"xpx\"],\"application/vnd.intergeo\":[\"i2g\"],\"application/vnd.intu.qbo\":[\"qbo\"],\"application/vnd.intu.qfx\":[\"qfx\"],\"application/vnd.ipunplugged.rcprofile\":[\"rcprofile\"],\"application/vnd.irepository.package+xml\":[\"irp\"],\"application/vnd.is-xpr\":[\"xpr\"],\"application/vnd.isac.fcs\":[\"fcs\"],\"application/vnd.jam\":[\"jam\"],\"application/vnd.jcp.javame.midlet-rms\":[\"rms\"],\"application/vnd.jisp\":[\"jisp\"],\"application/vnd.joost.joda-archive\":[\"joda\"],\"application/vnd.kahootz\":[\"ktz\",\"ktr\"],\"application/vnd.kde.karbon\":[\"karbon\"],\"application/vnd.kde.kchart\":[\"chrt\"],\"application/vnd.kde.kformula\":[\"kfo\"],\"application/vnd.kde.kivio\":[\"flw\"],\"application/vnd.kde.kontour\":[\"kon\"],\"application/vnd.kde.kpresenter\":[\"kpr\",\"kpt\"],\"application/vnd.kde.kspread\":[\"ksp\"],\"application/vnd.kde.kword\":[\"kwd\",\"kwt\"],\"application/vnd.kenameaapp\":[\"htke\"],\"application/vnd.kidspiration\":[\"kia\"],\"application/vnd.kinar\":[\"kne\",\"knp\"],\"application/vnd.koan\":[\"skp\",\"skd\",\"skt\",\"skm\"],\"application/vnd.kodak-descriptor\":[\"sse\"],\"application/vnd.las.las+xml\":[\"lasxml\"],\"application/vnd.llamagraphics.life-balance.desktop\":[\"lbd\"],\"application/vnd.llamagraphics.life-balance.exchange+xml\":[\"lbe\"],\"application/vnd.lotus-1-2-3\":[\"123\"],\"application/vnd.lotus-approach\":[\"apr\"],\"application/vnd.lotus-freelance\":[\"pre\"],\"application/vnd.lotus-notes\":[\"nsf\"],\"application/vnd.lotus-organizer\":[\"org\"],\"application/vnd.lotus-screencam\":[\"scm\"],\"application/vnd.lotus-wordpro\":[\"lwp\"],\"application/vnd.macports.portpkg\":[\"portpkg\"],\"application/vnd.mapbox-vector-tile\":[\"mvt\"],\"application/vnd.mcd\":[\"mcd\"],\"application/vnd.medcalcdata\":[\"mc1\"],\"application/vnd.mediastation.cdkey\":[\"cdkey\"],\"application/vnd.mfer\":[\"mwf\"],\"application/vnd.mfmp\":[\"mfm\"],\"application/vnd.micrografx.flo\":[\"flo\"],\"application/vnd.micrografx.igx\":[\"igx\"],\"application/vnd.mif\":[\"mif\"],\"application/vnd.mobius.daf\":[\"daf\"],\"application/vnd.mobius.dis\":[\"dis\"],\"application/vnd.mobius.mbk\":[\"mbk\"],\"application/vnd.mobius.mqy\":[\"mqy\"],\"application/vnd.mobius.msl\":[\"msl\"],\"application/vnd.mobius.plc\":[\"plc\"],\"application/vnd.mobius.txf\":[\"txf\"],\"application/vnd.mophun.application\":[\"mpn\"],\"application/vnd.mophun.certificate\":[\"mpc\"],\"application/vnd.mozilla.xul+xml\":[\"xul\"],\"application/vnd.ms-artgalry\":[\"cil\"],\"application/vnd.ms-cab-compressed\":[\"cab\"],\"application/vnd.ms-excel\":[\"xls\",\"xlm\",\"xla\",\"xlc\",\"xlt\",\"xlw\"],\"application/vnd.ms-excel.addin.macroenabled.12\":[\"xlam\"],\"application/vnd.ms-excel.sheet.binary.macroenabled.12\":[\"xlsb\"],\"application/vnd.ms-excel.sheet.macroenabled.12\":[\"xlsm\"],\"application/vnd.ms-excel.template.macroenabled.12\":[\"xltm\"],\"application/vnd.ms-fontobject\":[\"eot\"],\"application/vnd.ms-htmlhelp\":[\"chm\"],\"application/vnd.ms-ims\":[\"ims\"],\"application/vnd.ms-lrm\":[\"lrm\"],\"application/vnd.ms-officetheme\":[\"thmx\"],\"application/vnd.ms-outlook\":[\"msg\"],\"application/vnd.ms-pki.seccat\":[\"cat\"],\"application/vnd.ms-pki.stl\":[\"*stl\"],\"application/vnd.ms-powerpoint\":[\"ppt\",\"pps\",\"pot\"],\"application/vnd.ms-powerpoint.addin.macroenabled.12\":[\"ppam\"],\"application/vnd.ms-powerpoint.presentation.macroenabled.12\":[\"pptm\"],\"application/vnd.ms-powerpoint.slide.macroenabled.12\":[\"sldm\"],\"application/vnd.ms-powerpoint.slideshow.macroenabled.12\":[\"ppsm\"],\"application/vnd.ms-powerpoint.template.macroenabled.12\":[\"potm\"],\"application/vnd.ms-project\":[\"mpp\",\"mpt\"],\"application/vnd.ms-word.document.macroenabled.12\":[\"docm\"],\"application/vnd.ms-word.template.macroenabled.12\":[\"dotm\"],\"application/vnd.ms-works\":[\"wps\",\"wks\",\"wcm\",\"wdb\"],\"application/vnd.ms-wpl\":[\"wpl\"],\"application/vnd.ms-xpsdocument\":[\"xps\"],\"application/vnd.mseq\":[\"mseq\"],\"application/vnd.musician\":[\"mus\"],\"application/vnd.muvee.style\":[\"msty\"],\"application/vnd.mynfc\":[\"taglet\"],\"application/vnd.neurolanguage.nlu\":[\"nlu\"],\"application/vnd.nitf\":[\"ntf\",\"nitf\"],\"application/vnd.noblenet-directory\":[\"nnd\"],\"application/vnd.noblenet-sealer\":[\"nns\"],\"application/vnd.noblenet-web\":[\"nnw\"],\"application/vnd.nokia.n-gage.ac+xml\":[\"*ac\"],\"application/vnd.nokia.n-gage.data\":[\"ngdat\"],\"application/vnd.nokia.n-gage.symbian.install\":[\"n-gage\"],\"application/vnd.nokia.radio-preset\":[\"rpst\"],\"application/vnd.nokia.radio-presets\":[\"rpss\"],\"application/vnd.novadigm.edm\":[\"edm\"],\"application/vnd.novadigm.edx\":[\"edx\"],\"application/vnd.novadigm.ext\":[\"ext\"],\"application/vnd.oasis.opendocument.chart\":[\"odc\"],\"application/vnd.oasis.opendocument.chart-template\":[\"otc\"],\"application/vnd.oasis.opendocument.database\":[\"odb\"],\"application/vnd.oasis.opendocument.formula\":[\"odf\"],\"application/vnd.oasis.opendocument.formula-template\":[\"odft\"],\"application/vnd.oasis.opendocument.graphics\":[\"odg\"],\"application/vnd.oasis.opendocument.graphics-template\":[\"otg\"],\"application/vnd.oasis.opendocument.image\":[\"odi\"],\"application/vnd.oasis.opendocument.image-template\":[\"oti\"],\"application/vnd.oasis.opendocument.presentation\":[\"odp\"],\"application/vnd.oasis.opendocument.presentation-template\":[\"otp\"],\"application/vnd.oasis.opendocument.spreadsheet\":[\"ods\"],\"application/vnd.oasis.opendocument.spreadsheet-template\":[\"ots\"],\"application/vnd.oasis.opendocument.text\":[\"odt\"],\"application/vnd.oasis.opendocument.text-master\":[\"odm\"],\"application/vnd.oasis.opendocument.text-template\":[\"ott\"],\"application/vnd.oasis.opendocument.text-web\":[\"oth\"],\"application/vnd.olpc-sugar\":[\"xo\"],\"application/vnd.oma.dd2+xml\":[\"dd2\"],\"application/vnd.openblox.game+xml\":[\"obgx\"],\"application/vnd.openofficeorg.extension\":[\"oxt\"],\"application/vnd.openstreetmap.data+xml\":[\"osm\"],\"application/vnd.openxmlformats-officedocument.presentationml.presentation\":[\"pptx\"],\"application/vnd.openxmlformats-officedocument.presentationml.slide\":[\"sldx\"],\"application/vnd.openxmlformats-officedocument.presentationml.slideshow\":[\"ppsx\"],\"application/vnd.openxmlformats-officedocument.presentationml.template\":[\"potx\"],\"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\":[\"xlsx\"],\"application/vnd.openxmlformats-officedocument.spreadsheetml.template\":[\"xltx\"],\"application/vnd.openxmlformats-officedocument.wordprocessingml.document\":[\"docx\"],\"application/vnd.openxmlformats-officedocument.wordprocessingml.template\":[\"dotx\"],\"application/vnd.osgeo.mapguide.package\":[\"mgp\"],\"application/vnd.osgi.dp\":[\"dp\"],\"application/vnd.osgi.subsystem\":[\"esa\"],\"application/vnd.palm\":[\"pdb\",\"pqa\",\"oprc\"],\"application/vnd.pawaafile\":[\"paw\"],\"application/vnd.pg.format\":[\"str\"],\"application/vnd.pg.osasli\":[\"ei6\"],\"application/vnd.picsel\":[\"efif\"],\"application/vnd.pmi.widget\":[\"wg\"],\"application/vnd.pocketlearn\":[\"plf\"],\"application/vnd.powerbuilder6\":[\"pbd\"],\"application/vnd.previewsystems.box\":[\"box\"],\"application/vnd.proteus.magazine\":[\"mgz\"],\"application/vnd.publishare-delta-tree\":[\"qps\"],\"application/vnd.pvi.ptid1\":[\"ptid\"],\"application/vnd.quark.quarkxpress\":[\"qxd\",\"qxt\",\"qwd\",\"qwt\",\"qxl\",\"qxb\"],\"application/vnd.rar\":[\"rar\"],\"application/vnd.realvnc.bed\":[\"bed\"],\"application/vnd.recordare.musicxml\":[\"mxl\"],\"application/vnd.recordare.musicxml+xml\":[\"musicxml\"],\"application/vnd.rig.cryptonote\":[\"cryptonote\"],\"application/vnd.rim.cod\":[\"cod\"],\"application/vnd.rn-realmedia\":[\"rm\"],\"application/vnd.rn-realmedia-vbr\":[\"rmvb\"],\"application/vnd.route66.link66+xml\":[\"link66\"],\"application/vnd.sailingtracker.track\":[\"st\"],\"application/vnd.seemail\":[\"see\"],\"application/vnd.sema\":[\"sema\"],\"application/vnd.semd\":[\"semd\"],\"application/vnd.semf\":[\"semf\"],\"application/vnd.shana.informed.formdata\":[\"ifm\"],\"application/vnd.shana.informed.formtemplate\":[\"itp\"],\"application/vnd.shana.informed.interchange\":[\"iif\"],\"application/vnd.shana.informed.package\":[\"ipk\"],\"application/vnd.simtech-mindmapper\":[\"twd\",\"twds\"],\"application/vnd.smaf\":[\"mmf\"],\"application/vnd.smart.teacher\":[\"teacher\"],\"application/vnd.software602.filler.form+xml\":[\"fo\"],\"application/vnd.solent.sdkm+xml\":[\"sdkm\",\"sdkd\"],\"application/vnd.spotfire.dxp\":[\"dxp\"],\"application/vnd.spotfire.sfs\":[\"sfs\"],\"application/vnd.stardivision.calc\":[\"sdc\"],\"application/vnd.stardivision.draw\":[\"sda\"],\"application/vnd.stardivision.impress\":[\"sdd\"],\"application/vnd.stardivision.math\":[\"smf\"],\"application/vnd.stardivision.writer\":[\"sdw\",\"vor\"],\"application/vnd.stardivision.writer-global\":[\"sgl\"],\"application/vnd.stepmania.package\":[\"smzip\"],\"application/vnd.stepmania.stepchart\":[\"sm\"],\"application/vnd.sun.wadl+xml\":[\"wadl\"],\"application/vnd.sun.xml.calc\":[\"sxc\"],\"application/vnd.sun.xml.calc.template\":[\"stc\"],\"application/vnd.sun.xml.draw\":[\"sxd\"],\"application/vnd.sun.xml.draw.template\":[\"std\"],\"application/vnd.sun.xml.impress\":[\"sxi\"],\"application/vnd.sun.xml.impress.template\":[\"sti\"],\"application/vnd.sun.xml.math\":[\"sxm\"],\"application/vnd.sun.xml.writer\":[\"sxw\"],\"application/vnd.sun.xml.writer.global\":[\"sxg\"],\"application/vnd.sun.xml.writer.template\":[\"stw\"],\"application/vnd.sus-calendar\":[\"sus\",\"susp\"],\"application/vnd.svd\":[\"svd\"],\"application/vnd.symbian.install\":[\"sis\",\"sisx\"],\"application/vnd.syncml+xml\":[\"xsm\"],\"application/vnd.syncml.dm+wbxml\":[\"bdm\"],\"application/vnd.syncml.dm+xml\":[\"xdm\"],\"application/vnd.syncml.dmddf+xml\":[\"ddf\"],\"application/vnd.tao.intent-module-archive\":[\"tao\"],\"application/vnd.tcpdump.pcap\":[\"pcap\",\"cap\",\"dmp\"],\"application/vnd.tmobile-livetv\":[\"tmo\"],\"application/vnd.trid.tpt\":[\"tpt\"],\"application/vnd.triscape.mxs\":[\"mxs\"],\"application/vnd.trueapp\":[\"tra\"],\"application/vnd.ufdl\":[\"ufd\",\"ufdl\"],\"application/vnd.uiq.theme\":[\"utz\"],\"application/vnd.umajin\":[\"umj\"],\"application/vnd.unity\":[\"unityweb\"],\"application/vnd.uoml+xml\":[\"uoml\"],\"application/vnd.vcx\":[\"vcx\"],\"application/vnd.visio\":[\"vsd\",\"vst\",\"vss\",\"vsw\"],\"application/vnd.visionary\":[\"vis\"],\"application/vnd.vsf\":[\"vsf\"],\"application/vnd.wap.wbxml\":[\"wbxml\"],\"application/vnd.wap.wmlc\":[\"wmlc\"],\"application/vnd.wap.wmlscriptc\":[\"wmlsc\"],\"application/vnd.webturbo\":[\"wtb\"],\"application/vnd.wolfram.player\":[\"nbp\"],\"application/vnd.wordperfect\":[\"wpd\"],\"application/vnd.wqd\":[\"wqd\"],\"application/vnd.wt.stf\":[\"stf\"],\"application/vnd.xara\":[\"xar\"],\"application/vnd.xfdl\":[\"xfdl\"],\"application/vnd.yamaha.hv-dic\":[\"hvd\"],\"application/vnd.yamaha.hv-script\":[\"hvs\"],\"application/vnd.yamaha.hv-voice\":[\"hvp\"],\"application/vnd.yamaha.openscoreformat\":[\"osf\"],\"application/vnd.yamaha.openscoreformat.osfpvg+xml\":[\"osfpvg\"],\"application/vnd.yamaha.smaf-audio\":[\"saf\"],\"application/vnd.yamaha.smaf-phrase\":[\"spf\"],\"application/vnd.yellowriver-custom-menu\":[\"cmp\"],\"application/vnd.zul\":[\"zir\",\"zirz\"],\"application/vnd.zzazz.deck+xml\":[\"zaz\"],\"application/x-7z-compressed\":[\"7z\"],\"application/x-abiword\":[\"abw\"],\"application/x-ace-compressed\":[\"ace\"],\"application/x-apple-diskimage\":[\"*dmg\"],\"application/x-arj\":[\"arj\"],\"application/x-authorware-bin\":[\"aab\",\"x32\",\"u32\",\"vox\"],\"application/x-authorware-map\":[\"aam\"],\"application/x-authorware-seg\":[\"aas\"],\"application/x-bcpio\":[\"bcpio\"],\"application/x-bdoc\":[\"*bdoc\"],\"application/x-bittorrent\":[\"torrent\"],\"application/x-blorb\":[\"blb\",\"blorb\"],\"application/x-bzip\":[\"bz\"],\"application/x-bzip2\":[\"bz2\",\"boz\"],\"application/x-cbr\":[\"cbr\",\"cba\",\"cbt\",\"cbz\",\"cb7\"],\"application/x-cdlink\":[\"vcd\"],\"application/x-cfs-compressed\":[\"cfs\"],\"application/x-chat\":[\"chat\"],\"application/x-chess-pgn\":[\"pgn\"],\"application/x-chrome-extension\":[\"crx\"],\"application/x-cocoa\":[\"cco\"],\"application/x-conference\":[\"nsc\"],\"application/x-cpio\":[\"cpio\"],\"application/x-csh\":[\"csh\"],\"application/x-debian-package\":[\"*deb\",\"udeb\"],\"application/x-dgc-compressed\":[\"dgc\"],\"application/x-director\":[\"dir\",\"dcr\",\"dxr\",\"cst\",\"cct\",\"cxt\",\"w3d\",\"fgd\",\"swa\"],\"application/x-doom\":[\"wad\"],\"application/x-dtbncx+xml\":[\"ncx\"],\"application/x-dtbook+xml\":[\"dtb\"],\"application/x-dtbresource+xml\":[\"res\"],\"application/x-dvi\":[\"dvi\"],\"application/x-envoy\":[\"evy\"],\"application/x-eva\":[\"eva\"],\"application/x-font-bdf\":[\"bdf\"],\"application/x-font-ghostscript\":[\"gsf\"],\"application/x-font-linux-psf\":[\"psf\"],\"application/x-font-pcf\":[\"pcf\"],\"application/x-font-snf\":[\"snf\"],\"application/x-font-type1\":[\"pfa\",\"pfb\",\"pfm\",\"afm\"],\"application/x-freearc\":[\"arc\"],\"application/x-futuresplash\":[\"spl\"],\"application/x-gca-compressed\":[\"gca\"],\"application/x-glulx\":[\"ulx\"],\"application/x-gnumeric\":[\"gnumeric\"],\"application/x-gramps-xml\":[\"gramps\"],\"application/x-gtar\":[\"gtar\"],\"application/x-hdf\":[\"hdf\"],\"application/x-httpd-php\":[\"php\"],\"application/x-install-instructions\":[\"install\"],\"application/x-iso9660-image\":[\"*iso\"],\"application/x-iwork-keynote-sffkey\":[\"*key\"],\"application/x-iwork-numbers-sffnumbers\":[\"*numbers\"],\"application/x-iwork-pages-sffpages\":[\"*pages\"],\"application/x-java-archive-diff\":[\"jardiff\"],\"application/x-java-jnlp-file\":[\"jnlp\"],\"application/x-keepass2\":[\"kdbx\"],\"application/x-latex\":[\"latex\"],\"application/x-lua-bytecode\":[\"luac\"],\"application/x-lzh-compressed\":[\"lzh\",\"lha\"],\"application/x-makeself\":[\"run\"],\"application/x-mie\":[\"mie\"],\"application/x-mobipocket-ebook\":[\"prc\",\"mobi\"],\"application/x-ms-application\":[\"application\"],\"application/x-ms-shortcut\":[\"lnk\"],\"application/x-ms-wmd\":[\"wmd\"],\"application/x-ms-wmz\":[\"wmz\"],\"application/x-ms-xbap\":[\"xbap\"],\"application/x-msaccess\":[\"mdb\"],\"application/x-msbinder\":[\"obd\"],\"application/x-mscardfile\":[\"crd\"],\"application/x-msclip\":[\"clp\"],\"application/x-msdos-program\":[\"*exe\"],\"application/x-msdownload\":[\"*exe\",\"*dll\",\"com\",\"bat\",\"*msi\"],\"application/x-msmediaview\":[\"mvb\",\"m13\",\"m14\"],\"application/x-msmetafile\":[\"*wmf\",\"*wmz\",\"*emf\",\"emz\"],\"application/x-msmoney\":[\"mny\"],\"application/x-mspublisher\":[\"pub\"],\"application/x-msschedule\":[\"scd\"],\"application/x-msterminal\":[\"trm\"],\"application/x-mswrite\":[\"wri\"],\"application/x-netcdf\":[\"nc\",\"cdf\"],\"application/x-ns-proxy-autoconfig\":[\"pac\"],\"application/x-nzb\":[\"nzb\"],\"application/x-perl\":[\"pl\",\"pm\"],\"application/x-pilot\":[\"*prc\",\"*pdb\"],\"application/x-pkcs12\":[\"p12\",\"pfx\"],\"application/x-pkcs7-certificates\":[\"p7b\",\"spc\"],\"application/x-pkcs7-certreqresp\":[\"p7r\"],\"application/x-rar-compressed\":[\"*rar\"],\"application/x-redhat-package-manager\":[\"rpm\"],\"application/x-research-info-systems\":[\"ris\"],\"application/x-sea\":[\"sea\"],\"application/x-sh\":[\"sh\"],\"application/x-shar\":[\"shar\"],\"application/x-shockwave-flash\":[\"swf\"],\"application/x-silverlight-app\":[\"xap\"],\"application/x-sql\":[\"sql\"],\"application/x-stuffit\":[\"sit\"],\"application/x-stuffitx\":[\"sitx\"],\"application/x-subrip\":[\"srt\"],\"application/x-sv4cpio\":[\"sv4cpio\"],\"application/x-sv4crc\":[\"sv4crc\"],\"application/x-t3vm-image\":[\"t3\"],\"application/x-tads\":[\"gam\"],\"application/x-tar\":[\"tar\"],\"application/x-tcl\":[\"tcl\",\"tk\"],\"application/x-tex\":[\"tex\"],\"application/x-tex-tfm\":[\"tfm\"],\"application/x-texinfo\":[\"texinfo\",\"texi\"],\"application/x-tgif\":[\"*obj\"],\"application/x-ustar\":[\"ustar\"],\"application/x-virtualbox-hdd\":[\"hdd\"],\"application/x-virtualbox-ova\":[\"ova\"],\"application/x-virtualbox-ovf\":[\"ovf\"],\"application/x-virtualbox-vbox\":[\"vbox\"],\"application/x-virtualbox-vbox-extpack\":[\"vbox-extpack\"],\"application/x-virtualbox-vdi\":[\"vdi\"],\"application/x-virtualbox-vhd\":[\"vhd\"],\"application/x-virtualbox-vmdk\":[\"vmdk\"],\"application/x-wais-source\":[\"src\"],\"application/x-web-app-manifest+json\":[\"webapp\"],\"application/x-x509-ca-cert\":[\"der\",\"crt\",\"pem\"],\"application/x-xfig\":[\"fig\"],\"application/x-xliff+xml\":[\"*xlf\"],\"application/x-xpinstall\":[\"xpi\"],\"application/x-xz\":[\"xz\"],\"application/x-zmachine\":[\"z1\",\"z2\",\"z3\",\"z4\",\"z5\",\"z6\",\"z7\",\"z8\"],\"audio/vnd.dece.audio\":[\"uva\",\"uvva\"],\"audio/vnd.digital-winds\":[\"eol\"],\"audio/vnd.dra\":[\"dra\"],\"audio/vnd.dts\":[\"dts\"],\"audio/vnd.dts.hd\":[\"dtshd\"],\"audio/vnd.lucent.voice\":[\"lvp\"],\"audio/vnd.ms-playready.media.pya\":[\"pya\"],\"audio/vnd.nuera.ecelp4800\":[\"ecelp4800\"],\"audio/vnd.nuera.ecelp7470\":[\"ecelp7470\"],\"audio/vnd.nuera.ecelp9600\":[\"ecelp9600\"],\"audio/vnd.rip\":[\"rip\"],\"audio/x-aac\":[\"aac\"],\"audio/x-aiff\":[\"aif\",\"aiff\",\"aifc\"],\"audio/x-caf\":[\"caf\"],\"audio/x-flac\":[\"flac\"],\"audio/x-m4a\":[\"*m4a\"],\"audio/x-matroska\":[\"mka\"],\"audio/x-mpegurl\":[\"m3u\"],\"audio/x-ms-wax\":[\"wax\"],\"audio/x-ms-wma\":[\"wma\"],\"audio/x-pn-realaudio\":[\"ram\",\"ra\"],\"audio/x-pn-realaudio-plugin\":[\"rmp\"],\"audio/x-realaudio\":[\"*ra\"],\"audio/x-wav\":[\"*wav\"],\"chemical/x-cdx\":[\"cdx\"],\"chemical/x-cif\":[\"cif\"],\"chemical/x-cmdf\":[\"cmdf\"],\"chemical/x-cml\":[\"cml\"],\"chemical/x-csml\":[\"csml\"],\"chemical/x-xyz\":[\"xyz\"],\"image/prs.btif\":[\"btif\"],\"image/prs.pti\":[\"pti\"],\"image/vnd.adobe.photoshop\":[\"psd\"],\"image/vnd.airzip.accelerator.azv\":[\"azv\"],\"image/vnd.dece.graphic\":[\"uvi\",\"uvvi\",\"uvg\",\"uvvg\"],\"image/vnd.djvu\":[\"djvu\",\"djv\"],\"image/vnd.dvb.subtitle\":[\"*sub\"],\"image/vnd.dwg\":[\"dwg\"],\"image/vnd.dxf\":[\"dxf\"],\"image/vnd.fastbidsheet\":[\"fbs\"],\"image/vnd.fpx\":[\"fpx\"],\"image/vnd.fst\":[\"fst\"],\"image/vnd.fujixerox.edmics-mmr\":[\"mmr\"],\"image/vnd.fujixerox.edmics-rlc\":[\"rlc\"],\"image/vnd.microsoft.icon\":[\"ico\"],\"image/vnd.ms-dds\":[\"dds\"],\"image/vnd.ms-modi\":[\"mdi\"],\"image/vnd.ms-photo\":[\"wdp\"],\"image/vnd.net-fpx\":[\"npx\"],\"image/vnd.pco.b16\":[\"b16\"],\"image/vnd.tencent.tap\":[\"tap\"],\"image/vnd.valve.source.texture\":[\"vtf\"],\"image/vnd.wap.wbmp\":[\"wbmp\"],\"image/vnd.xiff\":[\"xif\"],\"image/vnd.zbrush.pcx\":[\"pcx\"],\"image/x-3ds\":[\"3ds\"],\"image/x-cmu-raster\":[\"ras\"],\"image/x-cmx\":[\"cmx\"],\"image/x-freehand\":[\"fh\",\"fhc\",\"fh4\",\"fh5\",\"fh7\"],\"image/x-icon\":[\"*ico\"],\"image/x-jng\":[\"jng\"],\"image/x-mrsid-image\":[\"sid\"],\"image/x-ms-bmp\":[\"*bmp\"],\"image/x-pcx\":[\"*pcx\"],\"image/x-pict\":[\"pic\",\"pct\"],\"image/x-portable-anymap\":[\"pnm\"],\"image/x-portable-bitmap\":[\"pbm\"],\"image/x-portable-graymap\":[\"pgm\"],\"image/x-portable-pixmap\":[\"ppm\"],\"image/x-rgb\":[\"rgb\"],\"image/x-tga\":[\"tga\"],\"image/x-xbitmap\":[\"xbm\"],\"image/x-xpixmap\":[\"xpm\"],\"image/x-xwindowdump\":[\"xwd\"],\"message/vnd.wfa.wsc\":[\"wsc\"],\"model/vnd.collada+xml\":[\"dae\"],\"model/vnd.dwf\":[\"dwf\"],\"model/vnd.gdl\":[\"gdl\"],\"model/vnd.gtw\":[\"gtw\"],\"model/vnd.mts\":[\"mts\"],\"model/vnd.opengex\":[\"ogex\"],\"model/vnd.parasolid.transmit.binary\":[\"x_b\"],\"model/vnd.parasolid.transmit.text\":[\"x_t\"],\"model/vnd.sap.vds\":[\"vds\"],\"model/vnd.usdz+zip\":[\"usdz\"],\"model/vnd.valve.source.compiled-map\":[\"bsp\"],\"model/vnd.vtu\":[\"vtu\"],\"text/prs.lines.tag\":[\"dsc\"],\"text/vnd.curl\":[\"curl\"],\"text/vnd.curl.dcurl\":[\"dcurl\"],\"text/vnd.curl.mcurl\":[\"mcurl\"],\"text/vnd.curl.scurl\":[\"scurl\"],\"text/vnd.dvb.subtitle\":[\"sub\"],\"text/vnd.fly\":[\"fly\"],\"text/vnd.fmi.flexstor\":[\"flx\"],\"text/vnd.graphviz\":[\"gv\"],\"text/vnd.in3d.3dml\":[\"3dml\"],\"text/vnd.in3d.spot\":[\"spot\"],\"text/vnd.sun.j2me.app-descriptor\":[\"jad\"],\"text/vnd.wap.wml\":[\"wml\"],\"text/vnd.wap.wmlscript\":[\"wmls\"],\"text/x-asm\":[\"s\",\"asm\"],\"text/x-c\":[\"c\",\"cc\",\"cxx\",\"cpp\",\"h\",\"hh\",\"dic\"],\"text/x-component\":[\"htc\"],\"text/x-fortran\":[\"f\",\"for\",\"f77\",\"f90\"],\"text/x-handlebars-template\":[\"hbs\"],\"text/x-java-source\":[\"java\"],\"text/x-lua\":[\"lua\"],\"text/x-markdown\":[\"mkd\"],\"text/x-nfo\":[\"nfo\"],\"text/x-opml\":[\"opml\"],\"text/x-org\":[\"*org\"],\"text/x-pascal\":[\"p\",\"pas\"],\"text/x-processing\":[\"pde\"],\"text/x-sass\":[\"sass\"],\"text/x-scss\":[\"scss\"],\"text/x-setext\":[\"etx\"],\"text/x-sfv\":[\"sfv\"],\"text/x-suse-ymp\":[\"ymp\"],\"text/x-uuencode\":[\"uu\"],\"text/x-vcalendar\":[\"vcs\"],\"text/x-vcard\":[\"vcf\"],\"video/vnd.dece.hd\":[\"uvh\",\"uvvh\"],\"video/vnd.dece.mobile\":[\"uvm\",\"uvvm\"],\"video/vnd.dece.pd\":[\"uvp\",\"uvvp\"],\"video/vnd.dece.sd\":[\"uvs\",\"uvvs\"],\"video/vnd.dece.video\":[\"uvv\",\"uvvv\"],\"video/vnd.dvb.file\":[\"dvb\"],\"video/vnd.fvt\":[\"fvt\"],\"video/vnd.mpegurl\":[\"mxu\",\"m4u\"],\"video/vnd.ms-playready.media.pyv\":[\"pyv\"],\"video/vnd.uvvu.mp4\":[\"uvu\",\"uvvu\"],\"video/vnd.vivo\":[\"viv\"],\"video/x-f4v\":[\"f4v\"],\"video/x-fli\":[\"fli\"],\"video/x-flv\":[\"flv\"],\"video/x-m4v\":[\"m4v\"],\"video/x-matroska\":[\"mkv\",\"mk3d\",\"mks\"],\"video/x-mng\":[\"mng\"],\"video/x-ms-asf\":[\"asf\",\"asx\"],\"video/x-ms-vob\":[\"vob\"],\"video/x-ms-wm\":[\"wm\"],\"video/x-ms-wmv\":[\"wmv\"],\"video/x-ms-wmx\":[\"wmx\"],\"video/x-ms-wvx\":[\"wvx\"],\"video/x-msvideo\":[\"avi\"],\"video/x-sgi-movie\":[\"movie\"],\"video/x-smv\":[\"smv\"],\"x-conference/x-cooltalk\":[\"ice\"]};","module.exports = {\"application/andrew-inset\":[\"ez\"],\"application/applixware\":[\"aw\"],\"application/atom+xml\":[\"atom\"],\"application/atomcat+xml\":[\"atomcat\"],\"application/atomdeleted+xml\":[\"atomdeleted\"],\"application/atomsvc+xml\":[\"atomsvc\"],\"application/atsc-dwd+xml\":[\"dwd\"],\"application/atsc-held+xml\":[\"held\"],\"application/atsc-rsat+xml\":[\"rsat\"],\"application/bdoc\":[\"bdoc\"],\"application/calendar+xml\":[\"xcs\"],\"application/ccxml+xml\":[\"ccxml\"],\"application/cdfx+xml\":[\"cdfx\"],\"application/cdmi-capability\":[\"cdmia\"],\"application/cdmi-container\":[\"cdmic\"],\"application/cdmi-domain\":[\"cdmid\"],\"application/cdmi-object\":[\"cdmio\"],\"application/cdmi-queue\":[\"cdmiq\"],\"application/cu-seeme\":[\"cu\"],\"application/dash+xml\":[\"mpd\"],\"application/davmount+xml\":[\"davmount\"],\"application/docbook+xml\":[\"dbk\"],\"application/dssc+der\":[\"dssc\"],\"application/dssc+xml\":[\"xdssc\"],\"application/ecmascript\":[\"es\",\"ecma\"],\"application/emma+xml\":[\"emma\"],\"application/emotionml+xml\":[\"emotionml\"],\"application/epub+zip\":[\"epub\"],\"application/exi\":[\"exi\"],\"application/express\":[\"exp\"],\"application/fdt+xml\":[\"fdt\"],\"application/font-tdpfr\":[\"pfr\"],\"application/geo+json\":[\"geojson\"],\"application/gml+xml\":[\"gml\"],\"application/gpx+xml\":[\"gpx\"],\"application/gxf\":[\"gxf\"],\"application/gzip\":[\"gz\"],\"application/hjson\":[\"hjson\"],\"application/hyperstudio\":[\"stk\"],\"application/inkml+xml\":[\"ink\",\"inkml\"],\"application/ipfix\":[\"ipfix\"],\"application/its+xml\":[\"its\"],\"application/java-archive\":[\"jar\",\"war\",\"ear\"],\"application/java-serialized-object\":[\"ser\"],\"application/java-vm\":[\"class\"],\"application/javascript\":[\"js\",\"mjs\"],\"application/json\":[\"json\",\"map\"],\"application/json5\":[\"json5\"],\"application/jsonml+json\":[\"jsonml\"],\"application/ld+json\":[\"jsonld\"],\"application/lgr+xml\":[\"lgr\"],\"application/lost+xml\":[\"lostxml\"],\"application/mac-binhex40\":[\"hqx\"],\"application/mac-compactpro\":[\"cpt\"],\"application/mads+xml\":[\"mads\"],\"application/manifest+json\":[\"webmanifest\"],\"application/marc\":[\"mrc\"],\"application/marcxml+xml\":[\"mrcx\"],\"application/mathematica\":[\"ma\",\"nb\",\"mb\"],\"application/mathml+xml\":[\"mathml\"],\"application/mbox\":[\"mbox\"],\"application/mediaservercontrol+xml\":[\"mscml\"],\"application/metalink+xml\":[\"metalink\"],\"application/metalink4+xml\":[\"meta4\"],\"application/mets+xml\":[\"mets\"],\"application/mmt-aei+xml\":[\"maei\"],\"application/mmt-usd+xml\":[\"musd\"],\"application/mods+xml\":[\"mods\"],\"application/mp21\":[\"m21\",\"mp21\"],\"application/mp4\":[\"mp4s\",\"m4p\"],\"application/msword\":[\"doc\",\"dot\"],\"application/mxf\":[\"mxf\"],\"application/n-quads\":[\"nq\"],\"application/n-triples\":[\"nt\"],\"application/node\":[\"cjs\"],\"application/octet-stream\":[\"bin\",\"dms\",\"lrf\",\"mar\",\"so\",\"dist\",\"distz\",\"pkg\",\"bpk\",\"dump\",\"elc\",\"deploy\",\"exe\",\"dll\",\"deb\",\"dmg\",\"iso\",\"img\",\"msi\",\"msp\",\"msm\",\"buffer\"],\"application/oda\":[\"oda\"],\"application/oebps-package+xml\":[\"opf\"],\"application/ogg\":[\"ogx\"],\"application/omdoc+xml\":[\"omdoc\"],\"application/onenote\":[\"onetoc\",\"onetoc2\",\"onetmp\",\"onepkg\"],\"application/oxps\":[\"oxps\"],\"application/p2p-overlay+xml\":[\"relo\"],\"application/patch-ops-error+xml\":[\"xer\"],\"application/pdf\":[\"pdf\"],\"application/pgp-encrypted\":[\"pgp\"],\"application/pgp-signature\":[\"asc\",\"sig\"],\"application/pics-rules\":[\"prf\"],\"application/pkcs10\":[\"p10\"],\"application/pkcs7-mime\":[\"p7m\",\"p7c\"],\"application/pkcs7-signature\":[\"p7s\"],\"application/pkcs8\":[\"p8\"],\"application/pkix-attr-cert\":[\"ac\"],\"application/pkix-cert\":[\"cer\"],\"application/pkix-crl\":[\"crl\"],\"application/pkix-pkipath\":[\"pkipath\"],\"application/pkixcmp\":[\"pki\"],\"application/pls+xml\":[\"pls\"],\"application/postscript\":[\"ai\",\"eps\",\"ps\"],\"application/provenance+xml\":[\"provx\"],\"application/pskc+xml\":[\"pskcxml\"],\"application/raml+yaml\":[\"raml\"],\"application/rdf+xml\":[\"rdf\",\"owl\"],\"application/reginfo+xml\":[\"rif\"],\"application/relax-ng-compact-syntax\":[\"rnc\"],\"application/resource-lists+xml\":[\"rl\"],\"application/resource-lists-diff+xml\":[\"rld\"],\"application/rls-services+xml\":[\"rs\"],\"application/route-apd+xml\":[\"rapd\"],\"application/route-s-tsid+xml\":[\"sls\"],\"application/route-usd+xml\":[\"rusd\"],\"application/rpki-ghostbusters\":[\"gbr\"],\"application/rpki-manifest\":[\"mft\"],\"application/rpki-roa\":[\"roa\"],\"application/rsd+xml\":[\"rsd\"],\"application/rss+xml\":[\"rss\"],\"application/rtf\":[\"rtf\"],\"application/sbml+xml\":[\"sbml\"],\"application/scvp-cv-request\":[\"scq\"],\"application/scvp-cv-response\":[\"scs\"],\"application/scvp-vp-request\":[\"spq\"],\"application/scvp-vp-response\":[\"spp\"],\"application/sdp\":[\"sdp\"],\"application/senml+xml\":[\"senmlx\"],\"application/sensml+xml\":[\"sensmlx\"],\"application/set-payment-initiation\":[\"setpay\"],\"application/set-registration-initiation\":[\"setreg\"],\"application/shf+xml\":[\"shf\"],\"application/sieve\":[\"siv\",\"sieve\"],\"application/smil+xml\":[\"smi\",\"smil\"],\"application/sparql-query\":[\"rq\"],\"application/sparql-results+xml\":[\"srx\"],\"application/srgs\":[\"gram\"],\"application/srgs+xml\":[\"grxml\"],\"application/sru+xml\":[\"sru\"],\"application/ssdl+xml\":[\"ssdl\"],\"application/ssml+xml\":[\"ssml\"],\"application/swid+xml\":[\"swidtag\"],\"application/tei+xml\":[\"tei\",\"teicorpus\"],\"application/thraud+xml\":[\"tfi\"],\"application/timestamped-data\":[\"tsd\"],\"application/toml\":[\"toml\"],\"application/trig\":[\"trig\"],\"application/ttml+xml\":[\"ttml\"],\"application/ubjson\":[\"ubj\"],\"application/urc-ressheet+xml\":[\"rsheet\"],\"application/urc-targetdesc+xml\":[\"td\"],\"application/voicexml+xml\":[\"vxml\"],\"application/wasm\":[\"wasm\"],\"application/widget\":[\"wgt\"],\"application/winhlp\":[\"hlp\"],\"application/wsdl+xml\":[\"wsdl\"],\"application/wspolicy+xml\":[\"wspolicy\"],\"application/xaml+xml\":[\"xaml\"],\"application/xcap-att+xml\":[\"xav\"],\"application/xcap-caps+xml\":[\"xca\"],\"application/xcap-diff+xml\":[\"xdf\"],\"application/xcap-el+xml\":[\"xel\"],\"application/xcap-ns+xml\":[\"xns\"],\"application/xenc+xml\":[\"xenc\"],\"application/xhtml+xml\":[\"xhtml\",\"xht\"],\"application/xliff+xml\":[\"xlf\"],\"application/xml\":[\"xml\",\"xsl\",\"xsd\",\"rng\"],\"application/xml-dtd\":[\"dtd\"],\"application/xop+xml\":[\"xop\"],\"application/xproc+xml\":[\"xpl\"],\"application/xslt+xml\":[\"*xsl\",\"xslt\"],\"application/xspf+xml\":[\"xspf\"],\"application/xv+xml\":[\"mxml\",\"xhvml\",\"xvml\",\"xvm\"],\"application/yang\":[\"yang\"],\"application/yin+xml\":[\"yin\"],\"application/zip\":[\"zip\"],\"audio/3gpp\":[\"*3gpp\"],\"audio/adpcm\":[\"adp\"],\"audio/amr\":[\"amr\"],\"audio/basic\":[\"au\",\"snd\"],\"audio/midi\":[\"mid\",\"midi\",\"kar\",\"rmi\"],\"audio/mobile-xmf\":[\"mxmf\"],\"audio/mp3\":[\"*mp3\"],\"audio/mp4\":[\"m4a\",\"mp4a\"],\"audio/mpeg\":[\"mpga\",\"mp2\",\"mp2a\",\"mp3\",\"m2a\",\"m3a\"],\"audio/ogg\":[\"oga\",\"ogg\",\"spx\",\"opus\"],\"audio/s3m\":[\"s3m\"],\"audio/silk\":[\"sil\"],\"audio/wav\":[\"wav\"],\"audio/wave\":[\"*wav\"],\"audio/webm\":[\"weba\"],\"audio/xm\":[\"xm\"],\"font/collection\":[\"ttc\"],\"font/otf\":[\"otf\"],\"font/ttf\":[\"ttf\"],\"font/woff\":[\"woff\"],\"font/woff2\":[\"woff2\"],\"image/aces\":[\"exr\"],\"image/apng\":[\"apng\"],\"image/avif\":[\"avif\"],\"image/bmp\":[\"bmp\"],\"image/cgm\":[\"cgm\"],\"image/dicom-rle\":[\"drle\"],\"image/emf\":[\"emf\"],\"image/fits\":[\"fits\"],\"image/g3fax\":[\"g3\"],\"image/gif\":[\"gif\"],\"image/heic\":[\"heic\"],\"image/heic-sequence\":[\"heics\"],\"image/heif\":[\"heif\"],\"image/heif-sequence\":[\"heifs\"],\"image/hej2k\":[\"hej2\"],\"image/hsj2\":[\"hsj2\"],\"image/ief\":[\"ief\"],\"image/jls\":[\"jls\"],\"image/jp2\":[\"jp2\",\"jpg2\"],\"image/jpeg\":[\"jpeg\",\"jpg\",\"jpe\"],\"image/jph\":[\"jph\"],\"image/jphc\":[\"jhc\"],\"image/jpm\":[\"jpm\"],\"image/jpx\":[\"jpx\",\"jpf\"],\"image/jxr\":[\"jxr\"],\"image/jxra\":[\"jxra\"],\"image/jxrs\":[\"jxrs\"],\"image/jxs\":[\"jxs\"],\"image/jxsc\":[\"jxsc\"],\"image/jxsi\":[\"jxsi\"],\"image/jxss\":[\"jxss\"],\"image/ktx\":[\"ktx\"],\"image/ktx2\":[\"ktx2\"],\"image/png\":[\"png\"],\"image/sgi\":[\"sgi\"],\"image/svg+xml\":[\"svg\",\"svgz\"],\"image/t38\":[\"t38\"],\"image/tiff\":[\"tif\",\"tiff\"],\"image/tiff-fx\":[\"tfx\"],\"image/webp\":[\"webp\"],\"image/wmf\":[\"wmf\"],\"message/disposition-notification\":[\"disposition-notification\"],\"message/global\":[\"u8msg\"],\"message/global-delivery-status\":[\"u8dsn\"],\"message/global-disposition-notification\":[\"u8mdn\"],\"message/global-headers\":[\"u8hdr\"],\"message/rfc822\":[\"eml\",\"mime\"],\"model/3mf\":[\"3mf\"],\"model/gltf+json\":[\"gltf\"],\"model/gltf-binary\":[\"glb\"],\"model/iges\":[\"igs\",\"iges\"],\"model/mesh\":[\"msh\",\"mesh\",\"silo\"],\"model/mtl\":[\"mtl\"],\"model/obj\":[\"obj\"],\"model/step+xml\":[\"stpx\"],\"model/step+zip\":[\"stpz\"],\"model/step-xml+zip\":[\"stpxz\"],\"model/stl\":[\"stl\"],\"model/vrml\":[\"wrl\",\"vrml\"],\"model/x3d+binary\":[\"*x3db\",\"x3dbz\"],\"model/x3d+fastinfoset\":[\"x3db\"],\"model/x3d+vrml\":[\"*x3dv\",\"x3dvz\"],\"model/x3d+xml\":[\"x3d\",\"x3dz\"],\"model/x3d-vrml\":[\"x3dv\"],\"text/cache-manifest\":[\"appcache\",\"manifest\"],\"text/calendar\":[\"ics\",\"ifb\"],\"text/coffeescript\":[\"coffee\",\"litcoffee\"],\"text/css\":[\"css\"],\"text/csv\":[\"csv\"],\"text/html\":[\"html\",\"htm\",\"shtml\"],\"text/jade\":[\"jade\"],\"text/jsx\":[\"jsx\"],\"text/less\":[\"less\"],\"text/markdown\":[\"markdown\",\"md\"],\"text/mathml\":[\"mml\"],\"text/mdx\":[\"mdx\"],\"text/n3\":[\"n3\"],\"text/plain\":[\"txt\",\"text\",\"conf\",\"def\",\"list\",\"log\",\"in\",\"ini\"],\"text/richtext\":[\"rtx\"],\"text/rtf\":[\"*rtf\"],\"text/sgml\":[\"sgml\",\"sgm\"],\"text/shex\":[\"shex\"],\"text/slim\":[\"slim\",\"slm\"],\"text/spdx\":[\"spdx\"],\"text/stylus\":[\"stylus\",\"styl\"],\"text/tab-separated-values\":[\"tsv\"],\"text/troff\":[\"t\",\"tr\",\"roff\",\"man\",\"me\",\"ms\"],\"text/turtle\":[\"ttl\"],\"text/uri-list\":[\"uri\",\"uris\",\"urls\"],\"text/vcard\":[\"vcard\"],\"text/vtt\":[\"vtt\"],\"text/xml\":[\"*xml\"],\"text/yaml\":[\"yaml\",\"yml\"],\"video/3gpp\":[\"3gp\",\"3gpp\"],\"video/3gpp2\":[\"3g2\"],\"video/h261\":[\"h261\"],\"video/h263\":[\"h263\"],\"video/h264\":[\"h264\"],\"video/iso.segment\":[\"m4s\"],\"video/jpeg\":[\"jpgv\"],\"video/jpm\":[\"*jpm\",\"jpgm\"],\"video/mj2\":[\"mj2\",\"mjp2\"],\"video/mp2t\":[\"ts\"],\"video/mp4\":[\"mp4\",\"mp4v\",\"mpg4\"],\"video/mpeg\":[\"mpeg\",\"mpg\",\"mpe\",\"m1v\",\"m2v\"],\"video/ogg\":[\"ogv\"],\"video/quicktime\":[\"qt\",\"mov\"],\"video/webm\":[\"webm\"]};","/**\n * Helpers.\n */\n\nvar s = 1000;\nvar m = s * 60;\nvar h = m * 60;\nvar d = h * 24;\nvar w = d * 7;\nvar y = d * 365.25;\n\n/**\n * Parse or format the given `val`.\n *\n * Options:\n *\n * - `long` verbose formatting [false]\n *\n * @param {String|Number} val\n * @param {Object} [options]\n * @throws {Error} throw an error if val is not a non-empty string or a number\n * @return {String|Number}\n * @api public\n */\n\nmodule.exports = function(val, options) {\n options = options || {};\n var type = typeof val;\n if (type === 'string' && val.length > 0) {\n return parse(val);\n } else if (type === 'number' && isFinite(val)) {\n return options.long ? fmtLong(val) : fmtShort(val);\n }\n throw new Error(\n 'val is not a non-empty string or a valid number. val=' +\n JSON.stringify(val)\n );\n};\n\n/**\n * Parse the given `str` and return milliseconds.\n *\n * @param {String} str\n * @return {Number}\n * @api private\n */\n\nfunction parse(str) {\n str = String(str);\n if (str.length > 100) {\n return;\n }\n var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(\n str\n );\n if (!match) {\n return;\n }\n var n = parseFloat(match[1]);\n var type = (match[2] || 'ms').toLowerCase();\n switch (type) {\n case 'years':\n case 'year':\n case 'yrs':\n case 'yr':\n case 'y':\n return n * y;\n case 'weeks':\n case 'week':\n case 'w':\n return n * w;\n case 'days':\n case 'day':\n case 'd':\n return n * d;\n case 'hours':\n case 'hour':\n case 'hrs':\n case 'hr':\n case 'h':\n return n * h;\n case 'minutes':\n case 'minute':\n case 'mins':\n case 'min':\n case 'm':\n return n * m;\n case 'seconds':\n case 'second':\n case 'secs':\n case 'sec':\n case 's':\n return n * s;\n case 'milliseconds':\n case 'millisecond':\n case 'msecs':\n case 'msec':\n case 'ms':\n return n;\n default:\n return undefined;\n }\n}\n\n/**\n * Short format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtShort(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return Math.round(ms / d) + 'd';\n }\n if (msAbs >= h) {\n return Math.round(ms / h) + 'h';\n }\n if (msAbs >= m) {\n return Math.round(ms / m) + 'm';\n }\n if (msAbs >= s) {\n return Math.round(ms / s) + 's';\n }\n return ms + 'ms';\n}\n\n/**\n * Long format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtLong(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return plural(ms, msAbs, d, 'day');\n }\n if (msAbs >= h) {\n return plural(ms, msAbs, h, 'hour');\n }\n if (msAbs >= m) {\n return plural(ms, msAbs, m, 'minute');\n }\n if (msAbs >= s) {\n return plural(ms, msAbs, s, 'second');\n }\n return ms + ' ms';\n}\n\n/**\n * Pluralization helper.\n */\n\nfunction plural(ms, msAbs, n, name) {\n var isPlural = msAbs >= n * 1.5;\n return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar whatwgUrl = _interopDefault(require('whatwg-url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\nconst URL = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\n/**\n * Wrapper around `new URL` to handle arbitrary URLs\n *\n * @param {string} urlStr\n * @return {void}\n */\nfunction parseURL(urlStr) {\n\t/*\n \tCheck whether the URL is absolute or not\n \t\tScheme: https://tools.ietf.org/html/rfc3986#section-3.1\n \tAbsolute URL: https://tools.ietf.org/html/rfc3986#section-4.3\n */\n\tif (/^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.exec(urlStr)) {\n\t\turlStr = new URL(urlStr).toString();\n\t}\n\n\t// Fallback to old implementation for arbitrary URLs\n\treturn parse_url(urlStr);\n}\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parseURL(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parseURL(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parseURL(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\nconst URL$1 = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\n\nconst isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {\n\tconst orig = new URL$1(original).hostname;\n\tconst dest = new URL$1(destination).hostname;\n\n\treturn orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);\n};\n\n/**\n * isSameProtocol reports whether the two provided URLs use the same protocol.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nconst isSameProtocol = function isSameProtocol(destination, original) {\n\tconst orig = new URL$1(original).protocol;\n\tconst dest = new URL$1(destination).protocol;\n\n\treturn orig === dest;\n};\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\tdestroyStream(request.body, error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\n\t\t\tfinalize();\n\t\t});\n\n\t\tfixResponseChunkedTransferBadEnding(req, function (err) {\n\t\t\tif (signal && signal.aborted) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\t\t});\n\n\t\t/* c8 ignore next 18 */\n\t\tif (parseInt(process.version.substring(1)) < 14) {\n\t\t\t// Before Node.js 14, pipeline() does not fully support async iterators and does not always\n\t\t\t// properly handle when the socket close/end events are out of order.\n\t\t\treq.on('socket', function (s) {\n\t\t\t\ts.addListener('close', function (hadError) {\n\t\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\t\tconst hasDataListener = s.listenerCount('data') > 0;\n\n\t\t\t\t\t// if end happened before close but the socket didn't emit an error, do it now\n\t\t\t\t\tif (response && hasDataListener && !hadError && !(signal && signal.aborted)) {\n\t\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\t\tresponse.body.emit('error', err);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t}\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL$1(location, request.url).toString();\n\t\t\t\t} catch (err) {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOpts.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\traw.on('end', function () {\n\t\t\t\t\t// some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.\n\t\t\t\t\tif (!response) {\n\t\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\t\tresolve(response);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\nfunction fixResponseChunkedTransferBadEnding(request, errorCallback) {\n\tlet socket;\n\n\trequest.on('socket', function (s) {\n\t\tsocket = s;\n\t});\n\n\trequest.on('response', function (response) {\n\t\tconst headers = response.headers;\n\n\t\tif (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {\n\t\t\tresponse.once('close', function (hadError) {\n\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\tconst hasDataListener = socket.listenerCount('data') > 0;\n\n\t\t\t\tif (hasDataListener && !hadError) {\n\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\terrorCallback(err);\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n}\n\nfunction destroyStream(stream, err) {\n\tif (stream.destroy) {\n\t\tstream.destroy(err);\n\t} else {\n\t\t// node < 8\n\t\tstream.emit('error', err);\n\t\tstream.end();\n\t}\n}\n\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","'use strict';\nconst Queue = require('yocto-queue');\n\nconst pLimit = concurrency => {\n\tif (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) {\n\t\tthrow new TypeError('Expected `concurrency` to be a number from 1 and up');\n\t}\n\n\tconst queue = new Queue();\n\tlet activeCount = 0;\n\n\tconst next = () => {\n\t\tactiveCount--;\n\n\t\tif (queue.size > 0) {\n\t\t\tqueue.dequeue()();\n\t\t}\n\t};\n\n\tconst run = async (fn, resolve, ...args) => {\n\t\tactiveCount++;\n\n\t\tconst result = (async () => fn(...args))();\n\n\t\tresolve(result);\n\n\t\ttry {\n\t\t\tawait result;\n\t\t} catch {}\n\n\t\tnext();\n\t};\n\n\tconst enqueue = (fn, resolve, ...args) => {\n\t\tqueue.enqueue(run.bind(null, fn, resolve, ...args));\n\n\t\t(async () => {\n\t\t\t// This function needs to wait until the next microtask before comparing\n\t\t\t// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously\n\t\t\t// when the run function is dequeued and called. The comparison in the if-statement\n\t\t\t// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.\n\t\t\tawait Promise.resolve();\n\n\t\t\tif (activeCount < concurrency && queue.size > 0) {\n\t\t\t\tqueue.dequeue()();\n\t\t\t}\n\t\t})();\n\t};\n\n\tconst generator = (fn, ...args) => new Promise(resolve => {\n\t\tenqueue(fn, resolve, ...args);\n\t});\n\n\tObject.defineProperties(generator, {\n\t\tactiveCount: {\n\t\t\tget: () => activeCount\n\t\t},\n\t\tpendingCount: {\n\t\t\tget: () => queue.size\n\t\t},\n\t\tclearQueue: {\n\t\t\tvalue: () => {\n\t\t\t\tqueue.clear();\n\t\t\t}\n\t\t}\n\t});\n\n\treturn generator;\n};\n\nmodule.exports = pLimit;\n","'use strict';\n\nconst codes = {};\n\nfunction createErrorType(code, message, Base) {\n if (!Base) {\n Base = Error\n }\n\n function getMessage (arg1, arg2, arg3) {\n if (typeof message === 'string') {\n return message\n } else {\n return message(arg1, arg2, arg3)\n }\n }\n\n class NodeError extends Base {\n constructor (arg1, arg2, arg3) {\n super(getMessage(arg1, arg2, arg3));\n }\n }\n\n NodeError.prototype.name = Base.name;\n NodeError.prototype.code = code;\n\n codes[code] = NodeError;\n}\n\n// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js\nfunction oneOf(expected, thing) {\n if (Array.isArray(expected)) {\n const len = expected.length;\n expected = expected.map((i) => String(i));\n if (len > 2) {\n return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +\n expected[len - 1];\n } else if (len === 2) {\n return `one of ${thing} ${expected[0]} or ${expected[1]}`;\n } else {\n return `of ${thing} ${expected[0]}`;\n }\n } else {\n return `of ${thing} ${String(expected)}`;\n }\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith\nfunction startsWith(str, search, pos) {\n\treturn str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith\nfunction endsWith(str, search, this_len) {\n\tif (this_len === undefined || this_len > str.length) {\n\t\tthis_len = str.length;\n\t}\n\treturn str.substring(this_len - search.length, this_len) === search;\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes\nfunction includes(str, search, start) {\n if (typeof start !== 'number') {\n start = 0;\n }\n\n if (start + search.length > str.length) {\n return false;\n } else {\n return str.indexOf(search, start) !== -1;\n }\n}\n\ncreateErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {\n return 'The value \"' + value + '\" is invalid for option \"' + name + '\"'\n}, TypeError);\ncreateErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {\n // determiner: 'must be' or 'must not be'\n let determiner;\n if (typeof expected === 'string' && startsWith(expected, 'not ')) {\n determiner = 'must not be';\n expected = expected.replace(/^not /, '');\n } else {\n determiner = 'must be';\n }\n\n let msg;\n if (endsWith(name, ' argument')) {\n // For cases like 'first argument'\n msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;\n } else {\n const type = includes(name, '.') ? 'property' : 'argument';\n msg = `The \"${name}\" ${type} ${determiner} ${oneOf(expected, 'type')}`;\n }\n\n msg += `. Received type ${typeof actual}`;\n return msg;\n}, TypeError);\ncreateErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');\ncreateErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {\n return 'The ' + name + ' method is not implemented'\n});\ncreateErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');\ncreateErrorType('ERR_STREAM_DESTROYED', function (name) {\n return 'Cannot call ' + name + ' after a stream was destroyed';\n});\ncreateErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');\ncreateErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');\ncreateErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');\ncreateErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);\ncreateErrorType('ERR_UNKNOWN_ENCODING', function (arg) {\n return 'Unknown encoding: ' + arg\n}, TypeError);\ncreateErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');\n\nmodule.exports.codes = codes;\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a duplex stream is just a stream that is both readable and writable.\n// Since JS doesn't have multiple prototypal inheritance, this class\n// prototypally inherits from Readable, and then parasitically from\n// Writable.\n\n'use strict';\n\n/**/\nvar objectKeys = Object.keys || function (obj) {\n var keys = [];\n for (var key in obj) keys.push(key);\n return keys;\n};\n/**/\n\nmodule.exports = Duplex;\nconst Readable = require('./_stream_readable');\nconst Writable = require('./_stream_writable');\nrequire('inherits')(Duplex, Readable);\n{\n // Allow the keys array to be GC'ed.\n const keys = objectKeys(Writable.prototype);\n for (var v = 0; v < keys.length; v++) {\n const method = keys[v];\n if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];\n }\n}\nfunction Duplex(options) {\n if (!(this instanceof Duplex)) return new Duplex(options);\n Readable.call(this, options);\n Writable.call(this, options);\n this.allowHalfOpen = true;\n if (options) {\n if (options.readable === false) this.readable = false;\n if (options.writable === false) this.writable = false;\n if (options.allowHalfOpen === false) {\n this.allowHalfOpen = false;\n this.once('end', onend);\n }\n }\n}\nObject.defineProperty(Duplex.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.highWaterMark;\n }\n});\nObject.defineProperty(Duplex.prototype, 'writableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState && this._writableState.getBuffer();\n }\n});\nObject.defineProperty(Duplex.prototype, 'writableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.length;\n }\n});\n\n// the no-half-open enforcer\nfunction onend() {\n // If the writable side ended, then we're ok.\n if (this._writableState.ended) return;\n\n // no more data can be written.\n // But allow more writes to happen in this tick.\n process.nextTick(onEndNT, this);\n}\nfunction onEndNT(self) {\n self.end();\n}\nObject.defineProperty(Duplex.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._readableState === undefined || this._writableState === undefined) {\n return false;\n }\n return this._readableState.destroyed && this._writableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (this._readableState === undefined || this._writableState === undefined) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n this._writableState.destroyed = value;\n }\n});","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a passthrough stream.\n// basically just the most minimal sort of Transform stream.\n// Every written chunk gets output as-is.\n\n'use strict';\n\nmodule.exports = PassThrough;\nconst Transform = require('./_stream_transform');\nrequire('inherits')(PassThrough, Transform);\nfunction PassThrough(options) {\n if (!(this instanceof PassThrough)) return new PassThrough(options);\n Transform.call(this, options);\n}\nPassThrough.prototype._transform = function (chunk, encoding, cb) {\n cb(null, chunk);\n};","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\nmodule.exports = Readable;\n\n/**/\nvar Duplex;\n/**/\n\nReadable.ReadableState = ReadableState;\n\n/**/\nconst EE = require('events').EventEmitter;\nvar EElistenerCount = function EElistenerCount(emitter, type) {\n return emitter.listeners(type).length;\n};\n/**/\n\n/**/\nvar Stream = require('./internal/streams/stream');\n/**/\n\nconst Buffer = require('buffer').Buffer;\nconst OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\n\n/**/\nconst debugUtil = require('util');\nlet debug;\nif (debugUtil && debugUtil.debuglog) {\n debug = debugUtil.debuglog('stream');\n} else {\n debug = function debug() {};\n}\n/**/\n\nconst BufferList = require('./internal/streams/buffer_list');\nconst destroyImpl = require('./internal/streams/destroy');\nconst _require = require('./internal/streams/state'),\n getHighWaterMark = _require.getHighWaterMark;\nconst _require$codes = require('../errors').codes,\n ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,\n ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT;\n\n// Lazy loaded to improve the startup performance.\nlet StringDecoder;\nlet createReadableStreamAsyncIterator;\nlet from;\nrequire('inherits')(Readable, Stream);\nconst errorOrDestroy = destroyImpl.errorOrDestroy;\nconst kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];\nfunction prependListener(emitter, event, fn) {\n // Sadly this is not cacheable as some libraries bundle their own\n // event emitter implementation with them.\n if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);\n\n // This is a hack to make sure that our error handler is attached before any\n // userland ones. NEVER DO THIS. This is here only because this code needs\n // to continue to work with older versions of Node.js that do not include\n // the prependListener() method. The goal is to eventually remove this hack.\n if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];\n}\nfunction ReadableState(options, stream, isDuplex) {\n Duplex = Duplex || require('./_stream_duplex');\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream.\n // These options can be provided separately as readableXXX and writableXXX.\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;\n\n // object stream flag. Used to make read(n) ignore n and to\n // make all the buffer merging and length checks go away\n this.objectMode = !!options.objectMode;\n if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;\n\n // the point at which it stops calling _read() to fill the buffer\n // Note: 0 is a valid value, means \"don't call _read preemptively ever\"\n this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex);\n\n // A linked list is used to store data chunks instead of an array because the\n // linked list can remove elements from the beginning faster than\n // array.shift()\n this.buffer = new BufferList();\n this.length = 0;\n this.pipes = null;\n this.pipesCount = 0;\n this.flowing = null;\n this.ended = false;\n this.endEmitted = false;\n this.reading = false;\n\n // a flag to be able to tell if the event 'readable'/'data' is emitted\n // immediately, or on a later tick. We set this to true at first, because\n // any actions that shouldn't happen until \"later\" should generally also\n // not happen before the first read call.\n this.sync = true;\n\n // whenever we return null, then we set a flag to say\n // that we're awaiting a 'readable' event emission.\n this.needReadable = false;\n this.emittedReadable = false;\n this.readableListening = false;\n this.resumeScheduled = false;\n this.paused = true;\n\n // Should close be emitted on destroy. Defaults to true.\n this.emitClose = options.emitClose !== false;\n\n // Should .destroy() be called after 'end' (and potentially 'finish')\n this.autoDestroy = !!options.autoDestroy;\n\n // has it been destroyed\n this.destroyed = false;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // the number of writers that are awaiting a drain event in .pipe()s\n this.awaitDrain = 0;\n\n // if true, a maybeReadMore has been scheduled\n this.readingMore = false;\n this.decoder = null;\n this.encoding = null;\n if (options.encoding) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n this.decoder = new StringDecoder(options.encoding);\n this.encoding = options.encoding;\n }\n}\nfunction Readable(options) {\n Duplex = Duplex || require('./_stream_duplex');\n if (!(this instanceof Readable)) return new Readable(options);\n\n // Checking for a Stream.Duplex instance is faster here instead of inside\n // the ReadableState constructor, at least with V8 6.5\n const isDuplex = this instanceof Duplex;\n this._readableState = new ReadableState(options, this, isDuplex);\n\n // legacy\n this.readable = true;\n if (options) {\n if (typeof options.read === 'function') this._read = options.read;\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n }\n Stream.call(this);\n}\nObject.defineProperty(Readable.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._readableState === undefined) {\n return false;\n }\n return this._readableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._readableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n }\n});\nReadable.prototype.destroy = destroyImpl.destroy;\nReadable.prototype._undestroy = destroyImpl.undestroy;\nReadable.prototype._destroy = function (err, cb) {\n cb(err);\n};\n\n// Manually shove something into the read() buffer.\n// This returns true if the highWaterMark has not been hit yet,\n// similar to how Writable.write() returns true if you should\n// write() some more.\nReadable.prototype.push = function (chunk, encoding) {\n var state = this._readableState;\n var skipChunkCheck;\n if (!state.objectMode) {\n if (typeof chunk === 'string') {\n encoding = encoding || state.defaultEncoding;\n if (encoding !== state.encoding) {\n chunk = Buffer.from(chunk, encoding);\n encoding = '';\n }\n skipChunkCheck = true;\n }\n } else {\n skipChunkCheck = true;\n }\n return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);\n};\n\n// Unshift should *always* be something directly out of read()\nReadable.prototype.unshift = function (chunk) {\n return readableAddChunk(this, chunk, null, true, false);\n};\nfunction readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {\n debug('readableAddChunk', chunk);\n var state = stream._readableState;\n if (chunk === null) {\n state.reading = false;\n onEofChunk(stream, state);\n } else {\n var er;\n if (!skipChunkCheck) er = chunkInvalid(state, chunk);\n if (er) {\n errorOrDestroy(stream, er);\n } else if (state.objectMode || chunk && chunk.length > 0) {\n if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n if (addToFront) {\n if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);\n } else if (state.ended) {\n errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF());\n } else if (state.destroyed) {\n return false;\n } else {\n state.reading = false;\n if (state.decoder && !encoding) {\n chunk = state.decoder.write(chunk);\n if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);\n } else {\n addChunk(stream, state, chunk, false);\n }\n }\n } else if (!addToFront) {\n state.reading = false;\n maybeReadMore(stream, state);\n }\n }\n\n // We can push more data if we are below the highWaterMark.\n // Also, if we have no data yet, we can stand some more bytes.\n // This is to work around cases where hwm=0, such as the repl.\n return !state.ended && (state.length < state.highWaterMark || state.length === 0);\n}\nfunction addChunk(stream, state, chunk, addToFront) {\n if (state.flowing && state.length === 0 && !state.sync) {\n state.awaitDrain = 0;\n stream.emit('data', chunk);\n } else {\n // update the buffer info.\n state.length += state.objectMode ? 1 : chunk.length;\n if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);\n if (state.needReadable) emitReadable(stream);\n }\n maybeReadMore(stream, state);\n}\nfunction chunkInvalid(state, chunk) {\n var er;\n if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {\n er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);\n }\n return er;\n}\nReadable.prototype.isPaused = function () {\n return this._readableState.flowing === false;\n};\n\n// backwards compatibility.\nReadable.prototype.setEncoding = function (enc) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n const decoder = new StringDecoder(enc);\n this._readableState.decoder = decoder;\n // If setEncoding(null), decoder.encoding equals utf8\n this._readableState.encoding = this._readableState.decoder.encoding;\n\n // Iterate over current buffer to convert already stored Buffers:\n let p = this._readableState.buffer.head;\n let content = '';\n while (p !== null) {\n content += decoder.write(p.data);\n p = p.next;\n }\n this._readableState.buffer.clear();\n if (content !== '') this._readableState.buffer.push(content);\n this._readableState.length = content.length;\n return this;\n};\n\n// Don't raise the hwm > 1GB\nconst MAX_HWM = 0x40000000;\nfunction computeNewHighWaterMark(n) {\n if (n >= MAX_HWM) {\n // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.\n n = MAX_HWM;\n } else {\n // Get the next highest power of 2 to prevent increasing hwm excessively in\n // tiny amounts\n n--;\n n |= n >>> 1;\n n |= n >>> 2;\n n |= n >>> 4;\n n |= n >>> 8;\n n |= n >>> 16;\n n++;\n }\n return n;\n}\n\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction howMuchToRead(n, state) {\n if (n <= 0 || state.length === 0 && state.ended) return 0;\n if (state.objectMode) return 1;\n if (n !== n) {\n // Only flow one buffer at a time\n if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;\n }\n // If we're asking for more than the current hwm, then raise the hwm.\n if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);\n if (n <= state.length) return n;\n // Don't have enough\n if (!state.ended) {\n state.needReadable = true;\n return 0;\n }\n return state.length;\n}\n\n// you can override either this method, or the async _read(n) below.\nReadable.prototype.read = function (n) {\n debug('read', n);\n n = parseInt(n, 10);\n var state = this._readableState;\n var nOrig = n;\n if (n !== 0) state.emittedReadable = false;\n\n // if we're doing read(0) to trigger a readable event, but we\n // already have a bunch of data in the buffer, then just trigger\n // the 'readable' event and move on.\n if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {\n debug('read: emitReadable', state.length, state.ended);\n if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);\n return null;\n }\n n = howMuchToRead(n, state);\n\n // if we've ended, and we're now clear, then finish it up.\n if (n === 0 && state.ended) {\n if (state.length === 0) endReadable(this);\n return null;\n }\n\n // All the actual chunk generation logic needs to be\n // *below* the call to _read. The reason is that in certain\n // synthetic stream cases, such as passthrough streams, _read\n // may be a completely synchronous operation which may change\n // the state of the read buffer, providing enough data when\n // before there was *not* enough.\n //\n // So, the steps are:\n // 1. Figure out what the state of things will be after we do\n // a read from the buffer.\n //\n // 2. If that resulting state will trigger a _read, then call _read.\n // Note that this may be asynchronous, or synchronous. Yes, it is\n // deeply ugly to write APIs this way, but that still doesn't mean\n // that the Readable class should behave improperly, as streams are\n // designed to be sync/async agnostic.\n // Take note if the _read call is sync or async (ie, if the read call\n // has returned yet), so that we know whether or not it's safe to emit\n // 'readable' etc.\n //\n // 3. Actually pull the requested chunks out of the buffer and return.\n\n // if we need a readable event, then we need to do some reading.\n var doRead = state.needReadable;\n debug('need readable', doRead);\n\n // if we currently have less than the highWaterMark, then also read some\n if (state.length === 0 || state.length - n < state.highWaterMark) {\n doRead = true;\n debug('length less than watermark', doRead);\n }\n\n // however, if we've ended, then there's no point, and if we're already\n // reading, then it's unnecessary.\n if (state.ended || state.reading) {\n doRead = false;\n debug('reading or ended', doRead);\n } else if (doRead) {\n debug('do read');\n state.reading = true;\n state.sync = true;\n // if the length is currently zero, then we *need* a readable event.\n if (state.length === 0) state.needReadable = true;\n // call internal read method\n this._read(state.highWaterMark);\n state.sync = false;\n // If _read pushed data synchronously, then `reading` will be false,\n // and we need to re-evaluate how much data we can return to the user.\n if (!state.reading) n = howMuchToRead(nOrig, state);\n }\n var ret;\n if (n > 0) ret = fromList(n, state);else ret = null;\n if (ret === null) {\n state.needReadable = state.length <= state.highWaterMark;\n n = 0;\n } else {\n state.length -= n;\n state.awaitDrain = 0;\n }\n if (state.length === 0) {\n // If we have nothing in the buffer, then we want to know\n // as soon as we *do* get something into the buffer.\n if (!state.ended) state.needReadable = true;\n\n // If we tried to read() past the EOF, then emit end on the next tick.\n if (nOrig !== n && state.ended) endReadable(this);\n }\n if (ret !== null) this.emit('data', ret);\n return ret;\n};\nfunction onEofChunk(stream, state) {\n debug('onEofChunk');\n if (state.ended) return;\n if (state.decoder) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) {\n state.buffer.push(chunk);\n state.length += state.objectMode ? 1 : chunk.length;\n }\n }\n state.ended = true;\n if (state.sync) {\n // if we are sync, wait until next tick to emit the data.\n // Otherwise we risk emitting data in the flow()\n // the readable code triggers during a read() call\n emitReadable(stream);\n } else {\n // emit 'readable' now to make sure it gets picked up.\n state.needReadable = false;\n if (!state.emittedReadable) {\n state.emittedReadable = true;\n emitReadable_(stream);\n }\n }\n}\n\n// Don't emit readable right away in sync mode, because this can trigger\n// another read() call => stack overflow. This way, it might trigger\n// a nextTick recursion warning, but that's not so bad.\nfunction emitReadable(stream) {\n var state = stream._readableState;\n debug('emitReadable', state.needReadable, state.emittedReadable);\n state.needReadable = false;\n if (!state.emittedReadable) {\n debug('emitReadable', state.flowing);\n state.emittedReadable = true;\n process.nextTick(emitReadable_, stream);\n }\n}\nfunction emitReadable_(stream) {\n var state = stream._readableState;\n debug('emitReadable_', state.destroyed, state.length, state.ended);\n if (!state.destroyed && (state.length || state.ended)) {\n stream.emit('readable');\n state.emittedReadable = false;\n }\n\n // The stream needs another readable event if\n // 1. It is not flowing, as the flow mechanism will take\n // care of it.\n // 2. It is not ended.\n // 3. It is below the highWaterMark, so we can schedule\n // another readable later.\n state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;\n flow(stream);\n}\n\n// at this point, the user has presumably seen the 'readable' event,\n// and called read() to consume some data. that may have triggered\n// in turn another _read(n) call, in which case reading = true if\n// it's in progress.\n// However, if we're not ended, or reading, and the length < hwm,\n// then go ahead and try to read some more preemptively.\nfunction maybeReadMore(stream, state) {\n if (!state.readingMore) {\n state.readingMore = true;\n process.nextTick(maybeReadMore_, stream, state);\n }\n}\nfunction maybeReadMore_(stream, state) {\n // Attempt to read more data if we should.\n //\n // The conditions for reading more data are (one of):\n // - Not enough data buffered (state.length < state.highWaterMark). The loop\n // is responsible for filling the buffer with enough data if such data\n // is available. If highWaterMark is 0 and we are not in the flowing mode\n // we should _not_ attempt to buffer any extra data. We'll get more data\n // when the stream consumer calls read() instead.\n // - No data in the buffer, and the stream is in flowing mode. In this mode\n // the loop below is responsible for ensuring read() is called. Failing to\n // call read here would abort the flow and there's no other mechanism for\n // continuing the flow if the stream consumer has just subscribed to the\n // 'data' event.\n //\n // In addition to the above conditions to keep reading data, the following\n // conditions prevent the data from being read:\n // - The stream has ended (state.ended).\n // - There is already a pending 'read' operation (state.reading). This is a\n // case where the the stream has called the implementation defined _read()\n // method, but they are processing the call asynchronously and have _not_\n // called push() with new data. In this case we skip performing more\n // read()s. The execution ends in this method again after the _read() ends\n // up calling push() with more data.\n while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) {\n const len = state.length;\n debug('maybeReadMore read 0');\n stream.read(0);\n if (len === state.length)\n // didn't get any data, stop spinning.\n break;\n }\n state.readingMore = false;\n}\n\n// abstract method. to be overridden in specific implementation classes.\n// call cb(er, data) where data is <= n in length.\n// for virtual (non-string, non-buffer) streams, \"length\" is somewhat\n// arbitrary, and perhaps not very meaningful.\nReadable.prototype._read = function (n) {\n errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()'));\n};\nReadable.prototype.pipe = function (dest, pipeOpts) {\n var src = this;\n var state = this._readableState;\n switch (state.pipesCount) {\n case 0:\n state.pipes = dest;\n break;\n case 1:\n state.pipes = [state.pipes, dest];\n break;\n default:\n state.pipes.push(dest);\n break;\n }\n state.pipesCount += 1;\n debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);\n var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;\n var endFn = doEnd ? onend : unpipe;\n if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn);\n dest.on('unpipe', onunpipe);\n function onunpipe(readable, unpipeInfo) {\n debug('onunpipe');\n if (readable === src) {\n if (unpipeInfo && unpipeInfo.hasUnpiped === false) {\n unpipeInfo.hasUnpiped = true;\n cleanup();\n }\n }\n }\n function onend() {\n debug('onend');\n dest.end();\n }\n\n // when the dest drains, it reduces the awaitDrain counter\n // on the source. This would be more elegant with a .once()\n // handler in flow(), but adding and removing repeatedly is\n // too slow.\n var ondrain = pipeOnDrain(src);\n dest.on('drain', ondrain);\n var cleanedUp = false;\n function cleanup() {\n debug('cleanup');\n // cleanup event handlers once the pipe is broken\n dest.removeListener('close', onclose);\n dest.removeListener('finish', onfinish);\n dest.removeListener('drain', ondrain);\n dest.removeListener('error', onerror);\n dest.removeListener('unpipe', onunpipe);\n src.removeListener('end', onend);\n src.removeListener('end', unpipe);\n src.removeListener('data', ondata);\n cleanedUp = true;\n\n // if the reader is waiting for a drain event from this\n // specific writer, then it would cause it to never start\n // flowing again.\n // So, if this is awaiting a drain, then we just call it now.\n // If we don't know, then assume that we are waiting for one.\n if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();\n }\n src.on('data', ondata);\n function ondata(chunk) {\n debug('ondata');\n var ret = dest.write(chunk);\n debug('dest.write', ret);\n if (ret === false) {\n // If the user unpiped during `dest.write()`, it is possible\n // to get stuck in a permanently paused state if that write\n // also returned false.\n // => Check whether `dest` is still a piping destination.\n if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {\n debug('false write response, pause', state.awaitDrain);\n state.awaitDrain++;\n }\n src.pause();\n }\n }\n\n // if the dest has an error, then stop piping into it.\n // however, don't suppress the throwing behavior for this.\n function onerror(er) {\n debug('onerror', er);\n unpipe();\n dest.removeListener('error', onerror);\n if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er);\n }\n\n // Make sure our error handler is attached before userland ones.\n prependListener(dest, 'error', onerror);\n\n // Both close and finish should trigger unpipe, but only once.\n function onclose() {\n dest.removeListener('finish', onfinish);\n unpipe();\n }\n dest.once('close', onclose);\n function onfinish() {\n debug('onfinish');\n dest.removeListener('close', onclose);\n unpipe();\n }\n dest.once('finish', onfinish);\n function unpipe() {\n debug('unpipe');\n src.unpipe(dest);\n }\n\n // tell the dest that it's being piped to\n dest.emit('pipe', src);\n\n // start the flow if it hasn't been started already.\n if (!state.flowing) {\n debug('pipe resume');\n src.resume();\n }\n return dest;\n};\nfunction pipeOnDrain(src) {\n return function pipeOnDrainFunctionResult() {\n var state = src._readableState;\n debug('pipeOnDrain', state.awaitDrain);\n if (state.awaitDrain) state.awaitDrain--;\n if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {\n state.flowing = true;\n flow(src);\n }\n };\n}\nReadable.prototype.unpipe = function (dest) {\n var state = this._readableState;\n var unpipeInfo = {\n hasUnpiped: false\n };\n\n // if we're not piping anywhere, then do nothing.\n if (state.pipesCount === 0) return this;\n\n // just one destination. most common case.\n if (state.pipesCount === 1) {\n // passed in one, but it's not the right one.\n if (dest && dest !== state.pipes) return this;\n if (!dest) dest = state.pipes;\n\n // got a match.\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n if (dest) dest.emit('unpipe', this, unpipeInfo);\n return this;\n }\n\n // slow case. multiple pipe destinations.\n\n if (!dest) {\n // remove all.\n var dests = state.pipes;\n var len = state.pipesCount;\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, {\n hasUnpiped: false\n });\n return this;\n }\n\n // try to find the right one.\n var index = indexOf(state.pipes, dest);\n if (index === -1) return this;\n state.pipes.splice(index, 1);\n state.pipesCount -= 1;\n if (state.pipesCount === 1) state.pipes = state.pipes[0];\n dest.emit('unpipe', this, unpipeInfo);\n return this;\n};\n\n// set up data events if they are asked for\n// Ensure readable listeners eventually get something\nReadable.prototype.on = function (ev, fn) {\n const res = Stream.prototype.on.call(this, ev, fn);\n const state = this._readableState;\n if (ev === 'data') {\n // update readableListening so that resume() may be a no-op\n // a few lines down. This is needed to support once('readable').\n state.readableListening = this.listenerCount('readable') > 0;\n\n // Try start flowing on next tick if stream isn't explicitly paused\n if (state.flowing !== false) this.resume();\n } else if (ev === 'readable') {\n if (!state.endEmitted && !state.readableListening) {\n state.readableListening = state.needReadable = true;\n state.flowing = false;\n state.emittedReadable = false;\n debug('on readable', state.length, state.reading);\n if (state.length) {\n emitReadable(this);\n } else if (!state.reading) {\n process.nextTick(nReadingNextTick, this);\n }\n }\n }\n return res;\n};\nReadable.prototype.addListener = Readable.prototype.on;\nReadable.prototype.removeListener = function (ev, fn) {\n const res = Stream.prototype.removeListener.call(this, ev, fn);\n if (ev === 'readable') {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this);\n }\n return res;\n};\nReadable.prototype.removeAllListeners = function (ev) {\n const res = Stream.prototype.removeAllListeners.apply(this, arguments);\n if (ev === 'readable' || ev === undefined) {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this);\n }\n return res;\n};\nfunction updateReadableListening(self) {\n const state = self._readableState;\n state.readableListening = self.listenerCount('readable') > 0;\n if (state.resumeScheduled && !state.paused) {\n // flowing needs to be set to true now, otherwise\n // the upcoming resume will not flow.\n state.flowing = true;\n\n // crude way to check if we should resume\n } else if (self.listenerCount('data') > 0) {\n self.resume();\n }\n}\nfunction nReadingNextTick(self) {\n debug('readable nexttick read 0');\n self.read(0);\n}\n\n// pause() and resume() are remnants of the legacy readable stream API\n// If the user uses them, then switch into old mode.\nReadable.prototype.resume = function () {\n var state = this._readableState;\n if (!state.flowing) {\n debug('resume');\n // we flow only if there is no one listening\n // for readable, but we still have to call\n // resume()\n state.flowing = !state.readableListening;\n resume(this, state);\n }\n state.paused = false;\n return this;\n};\nfunction resume(stream, state) {\n if (!state.resumeScheduled) {\n state.resumeScheduled = true;\n process.nextTick(resume_, stream, state);\n }\n}\nfunction resume_(stream, state) {\n debug('resume', state.reading);\n if (!state.reading) {\n stream.read(0);\n }\n state.resumeScheduled = false;\n stream.emit('resume');\n flow(stream);\n if (state.flowing && !state.reading) stream.read(0);\n}\nReadable.prototype.pause = function () {\n debug('call pause flowing=%j', this._readableState.flowing);\n if (this._readableState.flowing !== false) {\n debug('pause');\n this._readableState.flowing = false;\n this.emit('pause');\n }\n this._readableState.paused = true;\n return this;\n};\nfunction flow(stream) {\n const state = stream._readableState;\n debug('flow', state.flowing);\n while (state.flowing && stream.read() !== null);\n}\n\n// wrap an old-style stream as the async data source.\n// This is *not* part of the readable stream interface.\n// It is an ugly unfortunate mess of history.\nReadable.prototype.wrap = function (stream) {\n var state = this._readableState;\n var paused = false;\n stream.on('end', () => {\n debug('wrapped end');\n if (state.decoder && !state.ended) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) this.push(chunk);\n }\n this.push(null);\n });\n stream.on('data', chunk => {\n debug('wrapped data');\n if (state.decoder) chunk = state.decoder.write(chunk);\n\n // don't skip over falsy values in objectMode\n if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;\n var ret = this.push(chunk);\n if (!ret) {\n paused = true;\n stream.pause();\n }\n });\n\n // proxy all the other methods.\n // important when wrapping filters and duplexes.\n for (var i in stream) {\n if (this[i] === undefined && typeof stream[i] === 'function') {\n this[i] = function methodWrap(method) {\n return function methodWrapReturnFunction() {\n return stream[method].apply(stream, arguments);\n };\n }(i);\n }\n }\n\n // proxy certain important events.\n for (var n = 0; n < kProxyEvents.length; n++) {\n stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));\n }\n\n // when we try to consume some more bytes, simply unpause the\n // underlying stream.\n this._read = n => {\n debug('wrapped _read', n);\n if (paused) {\n paused = false;\n stream.resume();\n }\n };\n return this;\n};\nif (typeof Symbol === 'function') {\n Readable.prototype[Symbol.asyncIterator] = function () {\n if (createReadableStreamAsyncIterator === undefined) {\n createReadableStreamAsyncIterator = require('./internal/streams/async_iterator');\n }\n return createReadableStreamAsyncIterator(this);\n };\n}\nObject.defineProperty(Readable.prototype, 'readableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState.highWaterMark;\n }\n});\nObject.defineProperty(Readable.prototype, 'readableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState && this._readableState.buffer;\n }\n});\nObject.defineProperty(Readable.prototype, 'readableFlowing', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState.flowing;\n },\n set: function set(state) {\n if (this._readableState) {\n this._readableState.flowing = state;\n }\n }\n});\n\n// exposed for testing purposes only.\nReadable._fromList = fromList;\nObject.defineProperty(Readable.prototype, 'readableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._readableState.length;\n }\n});\n\n// Pluck off n bytes from an array of buffers.\n// Length is the combined lengths of all the buffers in the list.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction fromList(n, state) {\n // nothing buffered\n if (state.length === 0) return null;\n var ret;\n if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {\n // read it all, truncate the list\n if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);\n state.buffer.clear();\n } else {\n // read part of list\n ret = state.buffer.consume(n, state.decoder);\n }\n return ret;\n}\nfunction endReadable(stream) {\n var state = stream._readableState;\n debug('endReadable', state.endEmitted);\n if (!state.endEmitted) {\n state.ended = true;\n process.nextTick(endReadableNT, state, stream);\n }\n}\nfunction endReadableNT(state, stream) {\n debug('endReadableNT', state.endEmitted, state.length);\n\n // Check that we didn't get one last unshift.\n if (!state.endEmitted && state.length === 0) {\n state.endEmitted = true;\n stream.readable = false;\n stream.emit('end');\n if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the writable side is ready for autoDestroy as well\n const wState = stream._writableState;\n if (!wState || wState.autoDestroy && wState.finished) {\n stream.destroy();\n }\n }\n }\n}\nif (typeof Symbol === 'function') {\n Readable.from = function (iterable, opts) {\n if (from === undefined) {\n from = require('./internal/streams/from');\n }\n return from(Readable, iterable, opts);\n };\n}\nfunction indexOf(xs, x) {\n for (var i = 0, l = xs.length; i < l; i++) {\n if (xs[i] === x) return i;\n }\n return -1;\n}","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a transform stream is a readable/writable stream where you do\n// something with the data. Sometimes it's called a \"filter\",\n// but that's not a great name for it, since that implies a thing where\n// some bits pass through, and others are simply ignored. (That would\n// be a valid example of a transform, of course.)\n//\n// While the output is causally related to the input, it's not a\n// necessarily symmetric or synchronous transformation. For example,\n// a zlib stream might take multiple plain-text writes(), and then\n// emit a single compressed chunk some time in the future.\n//\n// Here's how this works:\n//\n// The Transform stream has all the aspects of the readable and writable\n// stream classes. When you write(chunk), that calls _write(chunk,cb)\n// internally, and returns false if there's a lot of pending writes\n// buffered up. When you call read(), that calls _read(n) until\n// there's enough pending readable data buffered up.\n//\n// In a transform stream, the written data is placed in a buffer. When\n// _read(n) is called, it transforms the queued up data, calling the\n// buffered _write cb's as it consumes chunks. If consuming a single\n// written chunk would result in multiple output chunks, then the first\n// outputted bit calls the readcb, and subsequent chunks just go into\n// the read buffer, and will cause it to emit 'readable' if necessary.\n//\n// This way, back-pressure is actually determined by the reading side,\n// since _read has to be called to start processing a new chunk. However,\n// a pathological inflate type of transform can cause excessive buffering\n// here. For example, imagine a stream where every byte of input is\n// interpreted as an integer from 0-255, and then results in that many\n// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in\n// 1kb of data being output. In this case, you could write a very small\n// amount of input, and end up with a very large amount of output. In\n// such a pathological inflating mechanism, there'd be no way to tell\n// the system to stop doing the transform. A single 4MB write could\n// cause the system to run out of memory.\n//\n// However, even in such a pathological case, only a single written chunk\n// would be consumed, and then the rest would wait (un-transformed) until\n// the results of the previous transformed chunk were consumed.\n\n'use strict';\n\nmodule.exports = Transform;\nconst _require$codes = require('../errors').codes,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,\n ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,\n ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;\nconst Duplex = require('./_stream_duplex');\nrequire('inherits')(Transform, Duplex);\nfunction afterTransform(er, data) {\n var ts = this._transformState;\n ts.transforming = false;\n var cb = ts.writecb;\n if (cb === null) {\n return this.emit('error', new ERR_MULTIPLE_CALLBACK());\n }\n ts.writechunk = null;\n ts.writecb = null;\n if (data != null)\n // single equals check for both `null` and `undefined`\n this.push(data);\n cb(er);\n var rs = this._readableState;\n rs.reading = false;\n if (rs.needReadable || rs.length < rs.highWaterMark) {\n this._read(rs.highWaterMark);\n }\n}\nfunction Transform(options) {\n if (!(this instanceof Transform)) return new Transform(options);\n Duplex.call(this, options);\n this._transformState = {\n afterTransform: afterTransform.bind(this),\n needTransform: false,\n transforming: false,\n writecb: null,\n writechunk: null,\n writeencoding: null\n };\n\n // start out asking for a readable event once data is transformed.\n this._readableState.needReadable = true;\n\n // we have implemented the _read method, and done the other things\n // that Readable wants before the first _read call, so unset the\n // sync guard flag.\n this._readableState.sync = false;\n if (options) {\n if (typeof options.transform === 'function') this._transform = options.transform;\n if (typeof options.flush === 'function') this._flush = options.flush;\n }\n\n // When the writable side finishes, then flush out anything remaining.\n this.on('prefinish', prefinish);\n}\nfunction prefinish() {\n if (typeof this._flush === 'function' && !this._readableState.destroyed) {\n this._flush((er, data) => {\n done(this, er, data);\n });\n } else {\n done(this, null, null);\n }\n}\nTransform.prototype.push = function (chunk, encoding) {\n this._transformState.needTransform = false;\n return Duplex.prototype.push.call(this, chunk, encoding);\n};\n\n// This is the part where you do stuff!\n// override this function in implementation classes.\n// 'chunk' is an input chunk.\n//\n// Call `push(newChunk)` to pass along transformed output\n// to the readable side. You may call 'push' zero or more times.\n//\n// Call `cb(err)` when you are done with this chunk. If you pass\n// an error, then that'll put the hurt on the whole operation. If you\n// never call cb(), then you'll never get another chunk.\nTransform.prototype._transform = function (chunk, encoding, cb) {\n cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));\n};\nTransform.prototype._write = function (chunk, encoding, cb) {\n var ts = this._transformState;\n ts.writecb = cb;\n ts.writechunk = chunk;\n ts.writeencoding = encoding;\n if (!ts.transforming) {\n var rs = this._readableState;\n if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);\n }\n};\n\n// Doesn't matter what the args are here.\n// _transform does all the work.\n// That we got here means that the readable side wants more data.\nTransform.prototype._read = function (n) {\n var ts = this._transformState;\n if (ts.writechunk !== null && !ts.transforming) {\n ts.transforming = true;\n this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);\n } else {\n // mark that we need a transform, so that any data that comes in\n // will get processed, now that we've asked for it.\n ts.needTransform = true;\n }\n};\nTransform.prototype._destroy = function (err, cb) {\n Duplex.prototype._destroy.call(this, err, err2 => {\n cb(err2);\n });\n};\nfunction done(stream, er, data) {\n if (er) return stream.emit('error', er);\n if (data != null)\n // single equals check for both `null` and `undefined`\n stream.push(data);\n\n // TODO(BridgeAR): Write a test for these two error cases\n // if there's nothing in the write buffer, then that means\n // that nothing more will ever be provided\n if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();\n if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();\n return stream.push(null);\n}","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// A bit simpler than readable streams.\n// Implement an async ._write(chunk, encoding, cb), and it'll handle all\n// the drain event emission and buffering.\n\n'use strict';\n\nmodule.exports = Writable;\n\n/* */\nfunction WriteReq(chunk, encoding, cb) {\n this.chunk = chunk;\n this.encoding = encoding;\n this.callback = cb;\n this.next = null;\n}\n\n// It seems a linked list but it is not\n// there will be only 2 of these for each stream\nfunction CorkedRequest(state) {\n this.next = null;\n this.entry = null;\n this.finish = () => {\n onCorkedFinish(this, state);\n };\n}\n/* */\n\n/**/\nvar Duplex;\n/**/\n\nWritable.WritableState = WritableState;\n\n/**/\nconst internalUtil = {\n deprecate: require('util-deprecate')\n};\n/**/\n\n/**/\nvar Stream = require('./internal/streams/stream');\n/**/\n\nconst Buffer = require('buffer').Buffer;\nconst OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\nconst destroyImpl = require('./internal/streams/destroy');\nconst _require = require('./internal/streams/state'),\n getHighWaterMark = _require.getHighWaterMark;\nconst _require$codes = require('../errors').codes,\n ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,\n ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,\n ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,\n ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,\n ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,\n ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;\nconst errorOrDestroy = destroyImpl.errorOrDestroy;\nrequire('inherits')(Writable, Stream);\nfunction nop() {}\nfunction WritableState(options, stream, isDuplex) {\n Duplex = Duplex || require('./_stream_duplex');\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream,\n // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;\n\n // object stream flag to indicate whether or not this stream\n // contains buffers or objects.\n this.objectMode = !!options.objectMode;\n if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;\n\n // the point at which write() starts returning false\n // Note: 0 is a valid value, means that we always return false if\n // the entire buffer is not flushed immediately on write()\n this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex);\n\n // if _final has been called\n this.finalCalled = false;\n\n // drain event flag.\n this.needDrain = false;\n // at the start of calling end()\n this.ending = false;\n // when end() has been called, and returned\n this.ended = false;\n // when 'finish' is emitted\n this.finished = false;\n\n // has it been destroyed\n this.destroyed = false;\n\n // should we decode strings into buffers before passing to _write?\n // this is here so that some node-core streams can optimize string\n // handling at a lower level.\n var noDecode = options.decodeStrings === false;\n this.decodeStrings = !noDecode;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // not an actual buffer we keep track of, but a measurement\n // of how much we're waiting to get pushed to some underlying\n // socket or file.\n this.length = 0;\n\n // a flag to see when we're in the middle of a write.\n this.writing = false;\n\n // when true all writes will be buffered until .uncork() call\n this.corked = 0;\n\n // a flag to be able to tell if the onwrite cb is called immediately,\n // or on a later tick. We set this to true at first, because any\n // actions that shouldn't happen until \"later\" should generally also\n // not happen before the first write call.\n this.sync = true;\n\n // a flag to know if we're processing previously buffered items, which\n // may call the _write() callback in the same tick, so that we don't\n // end up in an overlapped onwrite situation.\n this.bufferProcessing = false;\n\n // the callback that's passed to _write(chunk,cb)\n this.onwrite = function (er) {\n onwrite(stream, er);\n };\n\n // the callback that the user supplies to write(chunk,encoding,cb)\n this.writecb = null;\n\n // the amount that is being written when _write is called.\n this.writelen = 0;\n this.bufferedRequest = null;\n this.lastBufferedRequest = null;\n\n // number of pending user-supplied write callbacks\n // this must be 0 before 'finish' can be emitted\n this.pendingcb = 0;\n\n // emit prefinish if the only thing we're waiting for is _write cbs\n // This is relevant for synchronous Transform streams\n this.prefinished = false;\n\n // True if the error was already emitted and should not be thrown again\n this.errorEmitted = false;\n\n // Should close be emitted on destroy. Defaults to true.\n this.emitClose = options.emitClose !== false;\n\n // Should .destroy() be called after 'finish' (and potentially 'end')\n this.autoDestroy = !!options.autoDestroy;\n\n // count buffered requests\n this.bufferedRequestCount = 0;\n\n // allocate the first CorkedRequest, there is always\n // one allocated and free to use, and we maintain at most two\n this.corkedRequestsFree = new CorkedRequest(this);\n}\nWritableState.prototype.getBuffer = function getBuffer() {\n var current = this.bufferedRequest;\n var out = [];\n while (current) {\n out.push(current);\n current = current.next;\n }\n return out;\n};\n(function () {\n try {\n Object.defineProperty(WritableState.prototype, 'buffer', {\n get: internalUtil.deprecate(function writableStateBufferGetter() {\n return this.getBuffer();\n }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')\n });\n } catch (_) {}\n})();\n\n// Test _writableState for inheritance to account for Duplex streams,\n// whose prototype chain only points to Readable.\nvar realHasInstance;\nif (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {\n realHasInstance = Function.prototype[Symbol.hasInstance];\n Object.defineProperty(Writable, Symbol.hasInstance, {\n value: function value(object) {\n if (realHasInstance.call(this, object)) return true;\n if (this !== Writable) return false;\n return object && object._writableState instanceof WritableState;\n }\n });\n} else {\n realHasInstance = function realHasInstance(object) {\n return object instanceof this;\n };\n}\nfunction Writable(options) {\n Duplex = Duplex || require('./_stream_duplex');\n\n // Writable ctor is applied to Duplexes, too.\n // `realHasInstance` is necessary because using plain `instanceof`\n // would return false, as no `_writableState` property is attached.\n\n // Trying to use the custom `instanceof` for Writable here will also break the\n // Node.js LazyTransform implementation, which has a non-trivial getter for\n // `_writableState` that would lead to infinite recursion.\n\n // Checking for a Stream.Duplex instance is faster here instead of inside\n // the WritableState constructor, at least with V8 6.5\n const isDuplex = this instanceof Duplex;\n if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);\n this._writableState = new WritableState(options, this, isDuplex);\n\n // legacy.\n this.writable = true;\n if (options) {\n if (typeof options.write === 'function') this._write = options.write;\n if (typeof options.writev === 'function') this._writev = options.writev;\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n if (typeof options.final === 'function') this._final = options.final;\n }\n Stream.call(this);\n}\n\n// Otherwise people can pipe Writable streams, which is just wrong.\nWritable.prototype.pipe = function () {\n errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());\n};\nfunction writeAfterEnd(stream, cb) {\n var er = new ERR_STREAM_WRITE_AFTER_END();\n // TODO: defer error events consistently everywhere, not just the cb\n errorOrDestroy(stream, er);\n process.nextTick(cb, er);\n}\n\n// Checks that a user-supplied chunk is valid, especially for the particular\n// mode the stream is in. Currently this means that `null` is never accepted\n// and undefined/non-string values are only allowed in object mode.\nfunction validChunk(stream, state, chunk, cb) {\n var er;\n if (chunk === null) {\n er = new ERR_STREAM_NULL_VALUES();\n } else if (typeof chunk !== 'string' && !state.objectMode) {\n er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);\n }\n if (er) {\n errorOrDestroy(stream, er);\n process.nextTick(cb, er);\n return false;\n }\n return true;\n}\nWritable.prototype.write = function (chunk, encoding, cb) {\n var state = this._writableState;\n var ret = false;\n var isBuf = !state.objectMode && _isUint8Array(chunk);\n if (isBuf && !Buffer.isBuffer(chunk)) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;\n if (typeof cb !== 'function') cb = nop;\n if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {\n state.pendingcb++;\n ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);\n }\n return ret;\n};\nWritable.prototype.cork = function () {\n this._writableState.corked++;\n};\nWritable.prototype.uncork = function () {\n var state = this._writableState;\n if (state.corked) {\n state.corked--;\n if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);\n }\n};\nWritable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {\n // node::ParseEncoding() requires lower case.\n if (typeof encoding === 'string') encoding = encoding.toLowerCase();\n if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);\n this._writableState.defaultEncoding = encoding;\n return this;\n};\nObject.defineProperty(Writable.prototype, 'writableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState && this._writableState.getBuffer();\n }\n});\nfunction decodeChunk(state, chunk, encoding) {\n if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {\n chunk = Buffer.from(chunk, encoding);\n }\n return chunk;\n}\nObject.defineProperty(Writable.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState.highWaterMark;\n }\n});\n\n// if we're already writing something, then just put this\n// in the queue, and wait our turn. Otherwise, call _write\n// If we return false, then we need a drain event, so set that flag.\nfunction writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {\n if (!isBuf) {\n var newChunk = decodeChunk(state, chunk, encoding);\n if (chunk !== newChunk) {\n isBuf = true;\n encoding = 'buffer';\n chunk = newChunk;\n }\n }\n var len = state.objectMode ? 1 : chunk.length;\n state.length += len;\n var ret = state.length < state.highWaterMark;\n // we must ensure that previous needDrain will not be reset to false.\n if (!ret) state.needDrain = true;\n if (state.writing || state.corked) {\n var last = state.lastBufferedRequest;\n state.lastBufferedRequest = {\n chunk,\n encoding,\n isBuf,\n callback: cb,\n next: null\n };\n if (last) {\n last.next = state.lastBufferedRequest;\n } else {\n state.bufferedRequest = state.lastBufferedRequest;\n }\n state.bufferedRequestCount += 1;\n } else {\n doWrite(stream, state, false, len, chunk, encoding, cb);\n }\n return ret;\n}\nfunction doWrite(stream, state, writev, len, chunk, encoding, cb) {\n state.writelen = len;\n state.writecb = cb;\n state.writing = true;\n state.sync = true;\n if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);\n state.sync = false;\n}\nfunction onwriteError(stream, state, sync, er, cb) {\n --state.pendingcb;\n if (sync) {\n // defer the callback if we are being called synchronously\n // to avoid piling up things on the stack\n process.nextTick(cb, er);\n // this can emit finish, and it will always happen\n // after error\n process.nextTick(finishMaybe, stream, state);\n stream._writableState.errorEmitted = true;\n errorOrDestroy(stream, er);\n } else {\n // the caller expect this to happen before if\n // it is async\n cb(er);\n stream._writableState.errorEmitted = true;\n errorOrDestroy(stream, er);\n // this can emit finish, but finish must\n // always follow error\n finishMaybe(stream, state);\n }\n}\nfunction onwriteStateUpdate(state) {\n state.writing = false;\n state.writecb = null;\n state.length -= state.writelen;\n state.writelen = 0;\n}\nfunction onwrite(stream, er) {\n var state = stream._writableState;\n var sync = state.sync;\n var cb = state.writecb;\n if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();\n onwriteStateUpdate(state);\n if (er) onwriteError(stream, state, sync, er, cb);else {\n // Check if we're actually ready to finish, but don't emit yet\n var finished = needFinish(state) || stream.destroyed;\n if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {\n clearBuffer(stream, state);\n }\n if (sync) {\n process.nextTick(afterWrite, stream, state, finished, cb);\n } else {\n afterWrite(stream, state, finished, cb);\n }\n }\n}\nfunction afterWrite(stream, state, finished, cb) {\n if (!finished) onwriteDrain(stream, state);\n state.pendingcb--;\n cb();\n finishMaybe(stream, state);\n}\n\n// Must force callback to be called on nextTick, so that we don't\n// emit 'drain' before the write() consumer gets the 'false' return\n// value, and has a chance to attach a 'drain' listener.\nfunction onwriteDrain(stream, state) {\n if (state.length === 0 && state.needDrain) {\n state.needDrain = false;\n stream.emit('drain');\n }\n}\n\n// if there's something in the buffer waiting, then process it\nfunction clearBuffer(stream, state) {\n state.bufferProcessing = true;\n var entry = state.bufferedRequest;\n if (stream._writev && entry && entry.next) {\n // Fast case, write everything using _writev()\n var l = state.bufferedRequestCount;\n var buffer = new Array(l);\n var holder = state.corkedRequestsFree;\n holder.entry = entry;\n var count = 0;\n var allBuffers = true;\n while (entry) {\n buffer[count] = entry;\n if (!entry.isBuf) allBuffers = false;\n entry = entry.next;\n count += 1;\n }\n buffer.allBuffers = allBuffers;\n doWrite(stream, state, true, state.length, buffer, '', holder.finish);\n\n // doWrite is almost always async, defer these to save a bit of time\n // as the hot path ends with doWrite\n state.pendingcb++;\n state.lastBufferedRequest = null;\n if (holder.next) {\n state.corkedRequestsFree = holder.next;\n holder.next = null;\n } else {\n state.corkedRequestsFree = new CorkedRequest(state);\n }\n state.bufferedRequestCount = 0;\n } else {\n // Slow case, write chunks one-by-one\n while (entry) {\n var chunk = entry.chunk;\n var encoding = entry.encoding;\n var cb = entry.callback;\n var len = state.objectMode ? 1 : chunk.length;\n doWrite(stream, state, false, len, chunk, encoding, cb);\n entry = entry.next;\n state.bufferedRequestCount--;\n // if we didn't call the onwrite immediately, then\n // it means that we need to wait until it does.\n // also, that means that the chunk and cb are currently\n // being processed, so move the buffer counter past them.\n if (state.writing) {\n break;\n }\n }\n if (entry === null) state.lastBufferedRequest = null;\n }\n state.bufferedRequest = entry;\n state.bufferProcessing = false;\n}\nWritable.prototype._write = function (chunk, encoding, cb) {\n cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));\n};\nWritable.prototype._writev = null;\nWritable.prototype.end = function (chunk, encoding, cb) {\n var state = this._writableState;\n if (typeof chunk === 'function') {\n cb = chunk;\n chunk = null;\n encoding = null;\n } else if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);\n\n // .end() fully uncorks\n if (state.corked) {\n state.corked = 1;\n this.uncork();\n }\n\n // ignore unnecessary end() calls.\n if (!state.ending) endWritable(this, state, cb);\n return this;\n};\nObject.defineProperty(Writable.prototype, 'writableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.length;\n }\n});\nfunction needFinish(state) {\n return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;\n}\nfunction callFinal(stream, state) {\n stream._final(err => {\n state.pendingcb--;\n if (err) {\n errorOrDestroy(stream, err);\n }\n state.prefinished = true;\n stream.emit('prefinish');\n finishMaybe(stream, state);\n });\n}\nfunction prefinish(stream, state) {\n if (!state.prefinished && !state.finalCalled) {\n if (typeof stream._final === 'function' && !state.destroyed) {\n state.pendingcb++;\n state.finalCalled = true;\n process.nextTick(callFinal, stream, state);\n } else {\n state.prefinished = true;\n stream.emit('prefinish');\n }\n }\n}\nfunction finishMaybe(stream, state) {\n var need = needFinish(state);\n if (need) {\n prefinish(stream, state);\n if (state.pendingcb === 0) {\n state.finished = true;\n stream.emit('finish');\n if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the readable side is ready for autoDestroy as well\n const rState = stream._readableState;\n if (!rState || rState.autoDestroy && rState.endEmitted) {\n stream.destroy();\n }\n }\n }\n }\n return need;\n}\nfunction endWritable(stream, state, cb) {\n state.ending = true;\n finishMaybe(stream, state);\n if (cb) {\n if (state.finished) process.nextTick(cb);else stream.once('finish', cb);\n }\n state.ended = true;\n stream.writable = false;\n}\nfunction onCorkedFinish(corkReq, state, err) {\n var entry = corkReq.entry;\n corkReq.entry = null;\n while (entry) {\n var cb = entry.callback;\n state.pendingcb--;\n cb(err);\n entry = entry.next;\n }\n\n // reuse the free corkReq.\n state.corkedRequestsFree.next = corkReq;\n}\nObject.defineProperty(Writable.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._writableState === undefined) {\n return false;\n }\n return this._writableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._writableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._writableState.destroyed = value;\n }\n});\nWritable.prototype.destroy = destroyImpl.destroy;\nWritable.prototype._undestroy = destroyImpl.undestroy;\nWritable.prototype._destroy = function (err, cb) {\n cb(err);\n};","'use strict';\n\nconst finished = require('./end-of-stream');\nconst kLastResolve = Symbol('lastResolve');\nconst kLastReject = Symbol('lastReject');\nconst kError = Symbol('error');\nconst kEnded = Symbol('ended');\nconst kLastPromise = Symbol('lastPromise');\nconst kHandlePromise = Symbol('handlePromise');\nconst kStream = Symbol('stream');\nfunction createIterResult(value, done) {\n return {\n value,\n done\n };\n}\nfunction readAndResolve(iter) {\n const resolve = iter[kLastResolve];\n if (resolve !== null) {\n const data = iter[kStream].read();\n // we defer if data is null\n // we can be expecting either 'end' or\n // 'error'\n if (data !== null) {\n iter[kLastPromise] = null;\n iter[kLastResolve] = null;\n iter[kLastReject] = null;\n resolve(createIterResult(data, false));\n }\n }\n}\nfunction onReadable(iter) {\n // we wait for the next tick, because it might\n // emit an error with process.nextTick\n process.nextTick(readAndResolve, iter);\n}\nfunction wrapForNext(lastPromise, iter) {\n return (resolve, reject) => {\n lastPromise.then(() => {\n if (iter[kEnded]) {\n resolve(createIterResult(undefined, true));\n return;\n }\n iter[kHandlePromise](resolve, reject);\n }, reject);\n };\n}\nconst AsyncIteratorPrototype = Object.getPrototypeOf(function () {});\nconst ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({\n get stream() {\n return this[kStream];\n },\n next() {\n // if we have detected an error in the meanwhile\n // reject straight away\n const error = this[kError];\n if (error !== null) {\n return Promise.reject(error);\n }\n if (this[kEnded]) {\n return Promise.resolve(createIterResult(undefined, true));\n }\n if (this[kStream].destroyed) {\n // We need to defer via nextTick because if .destroy(err) is\n // called, the error will be emitted via nextTick, and\n // we cannot guarantee that there is no error lingering around\n // waiting to be emitted.\n return new Promise((resolve, reject) => {\n process.nextTick(() => {\n if (this[kError]) {\n reject(this[kError]);\n } else {\n resolve(createIterResult(undefined, true));\n }\n });\n });\n }\n\n // if we have multiple next() calls\n // we will wait for the previous Promise to finish\n // this logic is optimized to support for await loops,\n // where next() is only called once at a time\n const lastPromise = this[kLastPromise];\n let promise;\n if (lastPromise) {\n promise = new Promise(wrapForNext(lastPromise, this));\n } else {\n // fast path needed to support multiple this.push()\n // without triggering the next() queue\n const data = this[kStream].read();\n if (data !== null) {\n return Promise.resolve(createIterResult(data, false));\n }\n promise = new Promise(this[kHandlePromise]);\n }\n this[kLastPromise] = promise;\n return promise;\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n return() {\n // destroy(err, cb) is a private API\n // we can guarantee we have that here, because we control the\n // Readable class this is attached to\n return new Promise((resolve, reject) => {\n this[kStream].destroy(null, err => {\n if (err) {\n reject(err);\n return;\n }\n resolve(createIterResult(undefined, true));\n });\n });\n }\n}, AsyncIteratorPrototype);\nconst createReadableStreamAsyncIterator = stream => {\n const iterator = Object.create(ReadableStreamAsyncIteratorPrototype, {\n [kStream]: {\n value: stream,\n writable: true\n },\n [kLastResolve]: {\n value: null,\n writable: true\n },\n [kLastReject]: {\n value: null,\n writable: true\n },\n [kError]: {\n value: null,\n writable: true\n },\n [kEnded]: {\n value: stream._readableState.endEmitted,\n writable: true\n },\n // the function passed to new Promise\n // is cached so we avoid allocating a new\n // closure at every run\n [kHandlePromise]: {\n value: (resolve, reject) => {\n const data = iterator[kStream].read();\n if (data) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n resolve(createIterResult(data, false));\n } else {\n iterator[kLastResolve] = resolve;\n iterator[kLastReject] = reject;\n }\n },\n writable: true\n }\n });\n iterator[kLastPromise] = null;\n finished(stream, err => {\n if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {\n const reject = iterator[kLastReject];\n // reject if we are waiting for data in the Promise\n // returned by next() and store the error\n if (reject !== null) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n reject(err);\n }\n iterator[kError] = err;\n return;\n }\n const resolve = iterator[kLastResolve];\n if (resolve !== null) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n resolve(createIterResult(undefined, true));\n }\n iterator[kEnded] = true;\n });\n stream.on('readable', onReadable.bind(null, iterator));\n return iterator;\n};\nmodule.exports = createReadableStreamAsyncIterator;","'use strict';\n\nfunction ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }\nfunction _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }\nfunction _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }\nfunction _toPropertyKey(arg) { var key = _toPrimitive(arg, \"string\"); return typeof key === \"symbol\" ? key : String(key); }\nfunction _toPrimitive(input, hint) { if (typeof input !== \"object\" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || \"default\"); if (typeof res !== \"object\") return res; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (hint === \"string\" ? String : Number)(input); }\nconst _require = require('buffer'),\n Buffer = _require.Buffer;\nconst _require2 = require('util'),\n inspect = _require2.inspect;\nconst custom = inspect && inspect.custom || 'inspect';\nfunction copyBuffer(src, target, offset) {\n Buffer.prototype.copy.call(src, target, offset);\n}\nmodule.exports = class BufferList {\n constructor() {\n this.head = null;\n this.tail = null;\n this.length = 0;\n }\n push(v) {\n const entry = {\n data: v,\n next: null\n };\n if (this.length > 0) this.tail.next = entry;else this.head = entry;\n this.tail = entry;\n ++this.length;\n }\n unshift(v) {\n const entry = {\n data: v,\n next: this.head\n };\n if (this.length === 0) this.tail = entry;\n this.head = entry;\n ++this.length;\n }\n shift() {\n if (this.length === 0) return;\n const ret = this.head.data;\n if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;\n --this.length;\n return ret;\n }\n clear() {\n this.head = this.tail = null;\n this.length = 0;\n }\n join(s) {\n if (this.length === 0) return '';\n var p = this.head;\n var ret = '' + p.data;\n while (p = p.next) ret += s + p.data;\n return ret;\n }\n concat(n) {\n if (this.length === 0) return Buffer.alloc(0);\n const ret = Buffer.allocUnsafe(n >>> 0);\n var p = this.head;\n var i = 0;\n while (p) {\n copyBuffer(p.data, ret, i);\n i += p.data.length;\n p = p.next;\n }\n return ret;\n }\n\n // Consumes a specified amount of bytes or characters from the buffered data.\n consume(n, hasStrings) {\n var ret;\n if (n < this.head.data.length) {\n // `slice` is the same for buffers and strings.\n ret = this.head.data.slice(0, n);\n this.head.data = this.head.data.slice(n);\n } else if (n === this.head.data.length) {\n // First chunk is a perfect match.\n ret = this.shift();\n } else {\n // Result spans more than one buffer.\n ret = hasStrings ? this._getString(n) : this._getBuffer(n);\n }\n return ret;\n }\n first() {\n return this.head.data;\n }\n\n // Consumes a specified amount of characters from the buffered data.\n _getString(n) {\n var p = this.head;\n var c = 1;\n var ret = p.data;\n n -= ret.length;\n while (p = p.next) {\n const str = p.data;\n const nb = n > str.length ? str.length : n;\n if (nb === str.length) ret += str;else ret += str.slice(0, n);\n n -= nb;\n if (n === 0) {\n if (nb === str.length) {\n ++c;\n if (p.next) this.head = p.next;else this.head = this.tail = null;\n } else {\n this.head = p;\n p.data = str.slice(nb);\n }\n break;\n }\n ++c;\n }\n this.length -= c;\n return ret;\n }\n\n // Consumes a specified amount of bytes from the buffered data.\n _getBuffer(n) {\n const ret = Buffer.allocUnsafe(n);\n var p = this.head;\n var c = 1;\n p.data.copy(ret);\n n -= p.data.length;\n while (p = p.next) {\n const buf = p.data;\n const nb = n > buf.length ? buf.length : n;\n buf.copy(ret, ret.length - n, 0, nb);\n n -= nb;\n if (n === 0) {\n if (nb === buf.length) {\n ++c;\n if (p.next) this.head = p.next;else this.head = this.tail = null;\n } else {\n this.head = p;\n p.data = buf.slice(nb);\n }\n break;\n }\n ++c;\n }\n this.length -= c;\n return ret;\n }\n\n // Make sure the linked list only shows the minimal necessary information.\n [custom](_, options) {\n return inspect(this, _objectSpread(_objectSpread({}, options), {}, {\n // Only inspect one level.\n depth: 0,\n // It should not recurse.\n customInspect: false\n }));\n }\n};","'use strict';\n\n// undocumented cb() API, needed for core, not for public API\nfunction destroy(err, cb) {\n const readableDestroyed = this._readableState && this._readableState.destroyed;\n const writableDestroyed = this._writableState && this._writableState.destroyed;\n if (readableDestroyed || writableDestroyed) {\n if (cb) {\n cb(err);\n } else if (err) {\n if (!this._writableState) {\n process.nextTick(emitErrorNT, this, err);\n } else if (!this._writableState.errorEmitted) {\n this._writableState.errorEmitted = true;\n process.nextTick(emitErrorNT, this, err);\n }\n }\n return this;\n }\n\n // we set destroyed to true before firing error callbacks in order\n // to make it re-entrance safe in case destroy() is called within callbacks\n\n if (this._readableState) {\n this._readableState.destroyed = true;\n }\n\n // if this is a duplex stream mark the writable part as destroyed as well\n if (this._writableState) {\n this._writableState.destroyed = true;\n }\n this._destroy(err || null, err => {\n if (!cb && err) {\n if (!this._writableState) {\n process.nextTick(emitErrorAndCloseNT, this, err);\n } else if (!this._writableState.errorEmitted) {\n this._writableState.errorEmitted = true;\n process.nextTick(emitErrorAndCloseNT, this, err);\n } else {\n process.nextTick(emitCloseNT, this);\n }\n } else if (cb) {\n process.nextTick(emitCloseNT, this);\n cb(err);\n } else {\n process.nextTick(emitCloseNT, this);\n }\n });\n return this;\n}\nfunction emitErrorAndCloseNT(self, err) {\n emitErrorNT(self, err);\n emitCloseNT(self);\n}\nfunction emitCloseNT(self) {\n if (self._writableState && !self._writableState.emitClose) return;\n if (self._readableState && !self._readableState.emitClose) return;\n self.emit('close');\n}\nfunction undestroy() {\n if (this._readableState) {\n this._readableState.destroyed = false;\n this._readableState.reading = false;\n this._readableState.ended = false;\n this._readableState.endEmitted = false;\n }\n if (this._writableState) {\n this._writableState.destroyed = false;\n this._writableState.ended = false;\n this._writableState.ending = false;\n this._writableState.finalCalled = false;\n this._writableState.prefinished = false;\n this._writableState.finished = false;\n this._writableState.errorEmitted = false;\n }\n}\nfunction emitErrorNT(self, err) {\n self.emit('error', err);\n}\nfunction errorOrDestroy(stream, err) {\n // We have tests that rely on errors being emitted\n // in the same tick, so changing this is semver major.\n // For now when you opt-in to autoDestroy we allow\n // the error to be emitted nextTick. In a future\n // semver major update we should change the default to this.\n\n const rState = stream._readableState;\n const wState = stream._writableState;\n if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);\n}\nmodule.exports = {\n destroy,\n undestroy,\n errorOrDestroy\n};","// Ported from https://github.com/mafintosh/end-of-stream with\n// permission from the author, Mathias Buus (@mafintosh).\n\n'use strict';\n\nconst ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;\nfunction once(callback) {\n let called = false;\n return function () {\n if (called) return;\n called = true;\n for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {\n args[_key] = arguments[_key];\n }\n callback.apply(this, args);\n };\n}\nfunction noop() {}\nfunction isRequest(stream) {\n return stream.setHeader && typeof stream.abort === 'function';\n}\nfunction eos(stream, opts, callback) {\n if (typeof opts === 'function') return eos(stream, null, opts);\n if (!opts) opts = {};\n callback = once(callback || noop);\n let readable = opts.readable || opts.readable !== false && stream.readable;\n let writable = opts.writable || opts.writable !== false && stream.writable;\n const onlegacyfinish = () => {\n if (!stream.writable) onfinish();\n };\n var writableEnded = stream._writableState && stream._writableState.finished;\n const onfinish = () => {\n writable = false;\n writableEnded = true;\n if (!readable) callback.call(stream);\n };\n var readableEnded = stream._readableState && stream._readableState.endEmitted;\n const onend = () => {\n readable = false;\n readableEnded = true;\n if (!writable) callback.call(stream);\n };\n const onerror = err => {\n callback.call(stream, err);\n };\n const onclose = () => {\n let err;\n if (readable && !readableEnded) {\n if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();\n return callback.call(stream, err);\n }\n if (writable && !writableEnded) {\n if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();\n return callback.call(stream, err);\n }\n };\n const onrequest = () => {\n stream.req.on('finish', onfinish);\n };\n if (isRequest(stream)) {\n stream.on('complete', onfinish);\n stream.on('abort', onclose);\n if (stream.req) onrequest();else stream.on('request', onrequest);\n } else if (writable && !stream._writableState) {\n // legacy streams\n stream.on('end', onlegacyfinish);\n stream.on('close', onlegacyfinish);\n }\n stream.on('end', onend);\n stream.on('finish', onfinish);\n if (opts.error !== false) stream.on('error', onerror);\n stream.on('close', onclose);\n return function () {\n stream.removeListener('complete', onfinish);\n stream.removeListener('abort', onclose);\n stream.removeListener('request', onrequest);\n if (stream.req) stream.req.removeListener('finish', onfinish);\n stream.removeListener('end', onlegacyfinish);\n stream.removeListener('close', onlegacyfinish);\n stream.removeListener('finish', onfinish);\n stream.removeListener('end', onend);\n stream.removeListener('error', onerror);\n stream.removeListener('close', onclose);\n };\n}\nmodule.exports = eos;","'use strict';\n\nfunction asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }\nfunction _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"next\", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"throw\", err); } _next(undefined); }); }; }\nfunction ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }\nfunction _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }\nfunction _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }\nfunction _toPropertyKey(arg) { var key = _toPrimitive(arg, \"string\"); return typeof key === \"symbol\" ? key : String(key); }\nfunction _toPrimitive(input, hint) { if (typeof input !== \"object\" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || \"default\"); if (typeof res !== \"object\") return res; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (hint === \"string\" ? String : Number)(input); }\nconst ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE;\nfunction from(Readable, iterable, opts) {\n let iterator;\n if (iterable && typeof iterable.next === 'function') {\n iterator = iterable;\n } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable);\n const readable = new Readable(_objectSpread({\n objectMode: true\n }, opts));\n // Reading boolean to protect against _read\n // being called before last iteration completion.\n let reading = false;\n readable._read = function () {\n if (!reading) {\n reading = true;\n next();\n }\n };\n function next() {\n return _next2.apply(this, arguments);\n }\n function _next2() {\n _next2 = _asyncToGenerator(function* () {\n try {\n const _yield$iterator$next = yield iterator.next(),\n value = _yield$iterator$next.value,\n done = _yield$iterator$next.done;\n if (done) {\n readable.push(null);\n } else if (readable.push(yield value)) {\n next();\n } else {\n reading = false;\n }\n } catch (err) {\n readable.destroy(err);\n }\n });\n return _next2.apply(this, arguments);\n }\n return readable;\n}\nmodule.exports = from;","// Ported from https://github.com/mafintosh/pump with\n// permission from the author, Mathias Buus (@mafintosh).\n\n'use strict';\n\nlet eos;\nfunction once(callback) {\n let called = false;\n return function () {\n if (called) return;\n called = true;\n callback(...arguments);\n };\n}\nconst _require$codes = require('../../../errors').codes,\n ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,\n ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;\nfunction noop(err) {\n // Rethrow the error if it exists to avoid swallowing it\n if (err) throw err;\n}\nfunction isRequest(stream) {\n return stream.setHeader && typeof stream.abort === 'function';\n}\nfunction destroyer(stream, reading, writing, callback) {\n callback = once(callback);\n let closed = false;\n stream.on('close', () => {\n closed = true;\n });\n if (eos === undefined) eos = require('./end-of-stream');\n eos(stream, {\n readable: reading,\n writable: writing\n }, err => {\n if (err) return callback(err);\n closed = true;\n callback();\n });\n let destroyed = false;\n return err => {\n if (closed) return;\n if (destroyed) return;\n destroyed = true;\n\n // request.destroy just do .end - .abort is what we want\n if (isRequest(stream)) return stream.abort();\n if (typeof stream.destroy === 'function') return stream.destroy();\n callback(err || new ERR_STREAM_DESTROYED('pipe'));\n };\n}\nfunction call(fn) {\n fn();\n}\nfunction pipe(from, to) {\n return from.pipe(to);\n}\nfunction popCallback(streams) {\n if (!streams.length) return noop;\n if (typeof streams[streams.length - 1] !== 'function') return noop;\n return streams.pop();\n}\nfunction pipeline() {\n for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {\n streams[_key] = arguments[_key];\n }\n const callback = popCallback(streams);\n if (Array.isArray(streams[0])) streams = streams[0];\n if (streams.length < 2) {\n throw new ERR_MISSING_ARGS('streams');\n }\n let error;\n const destroys = streams.map(function (stream, i) {\n const reading = i < streams.length - 1;\n const writing = i > 0;\n return destroyer(stream, reading, writing, function (err) {\n if (!error) error = err;\n if (err) destroys.forEach(call);\n if (reading) return;\n destroys.forEach(call);\n callback(error);\n });\n });\n return streams.reduce(pipe);\n}\nmodule.exports = pipeline;","'use strict';\n\nconst ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;\nfunction highWaterMarkFrom(options, isDuplex, duplexKey) {\n return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;\n}\nfunction getHighWaterMark(state, options, duplexKey, isDuplex) {\n const hwm = highWaterMarkFrom(options, isDuplex, duplexKey);\n if (hwm != null) {\n if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {\n const name = isDuplex ? duplexKey : 'highWaterMark';\n throw new ERR_INVALID_OPT_VALUE(name, hwm);\n }\n return Math.floor(hwm);\n }\n\n // Default value\n return state.objectMode ? 16 : 16 * 1024;\n}\nmodule.exports = {\n getHighWaterMark\n};","module.exports = require('stream');\n","var Stream = require('stream');\nif (process.env.READABLE_STREAM === 'disable' && Stream) {\n module.exports = Stream.Readable;\n Object.assign(module.exports, Stream);\n module.exports.Stream = Stream;\n} else {\n exports = module.exports = require('./lib/_stream_readable.js');\n exports.Stream = Stream || exports;\n exports.Readable = exports;\n exports.Writable = require('./lib/_stream_writable.js');\n exports.Duplex = require('./lib/_stream_duplex.js');\n exports.Transform = require('./lib/_stream_transform.js');\n exports.PassThrough = require('./lib/_stream_passthrough.js');\n exports.finished = require('./lib/internal/streams/end-of-stream.js');\n exports.pipeline = require('./lib/internal/streams/pipeline.js');\n}\n","'use strict';\n\nconst {PassThrough} = require('stream');\nconst extend = require('extend');\n\nlet debug = () => {};\nif (\n typeof process !== 'undefined' &&\n 'env' in process &&\n typeof process.env === 'object' &&\n process.env.DEBUG === 'retry-request'\n) {\n debug = message => {\n console.log('retry-request:', message);\n };\n}\n\nconst DEFAULTS = {\n objectMode: false,\n retries: 2,\n\n /*\n The maximum time to delay in seconds. If retryDelayMultiplier results in a\n delay greater than maxRetryDelay, retries should delay by maxRetryDelay\n seconds instead.\n */\n maxRetryDelay: 64,\n\n /*\n The multiplier by which to increase the delay time between the completion of\n failed requests, and the initiation of the subsequent retrying request.\n */\n retryDelayMultiplier: 2,\n\n /*\n The length of time to keep retrying in seconds. The last sleep period will\n be shortened as necessary, so that the last retry runs at deadline (and not\n considerably beyond it). The total time starting from when the initial\n request is sent, after which an error will be returned, regardless of the\n retrying attempts made meanwhile.\n */\n totalTimeout: 600,\n\n noResponseRetries: 2,\n currentRetryAttempt: 0,\n shouldRetryFn: function (response) {\n const retryRanges = [\n // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes\n // 1xx - Retry (Informational, request still processing)\n // 2xx - Do not retry (Success)\n // 3xx - Do not retry (Redirect)\n // 4xx - Do not retry (Client errors)\n // 429 - Retry (\"Too Many Requests\")\n // 5xx - Retry (Server errors)\n [100, 199],\n [429, 429],\n [500, 599],\n ];\n\n const statusCode = response.statusCode;\n debug(`Response status: ${statusCode}`);\n\n let range;\n while ((range = retryRanges.shift())) {\n if (statusCode >= range[0] && statusCode <= range[1]) {\n // Not a successful status or redirect.\n return true;\n }\n }\n },\n};\n\nfunction retryRequest(requestOpts, opts, callback) {\n if (typeof requestOpts === 'string') {\n requestOpts = {url: requestOpts};\n }\n\n const streamMode = typeof arguments[arguments.length - 1] !== 'function';\n\n if (typeof opts === 'function') {\n callback = opts;\n }\n\n const manualCurrentRetryAttemptWasSet =\n opts && typeof opts.currentRetryAttempt === 'number';\n opts = extend({}, DEFAULTS, opts);\n\n if (typeof opts.request === 'undefined') {\n throw new Error('A request library must be provided to retry-request.');\n }\n\n let currentRetryAttempt = opts.currentRetryAttempt;\n\n let numNoResponseAttempts = 0;\n let streamResponseHandled = false;\n\n let retryStream;\n let requestStream;\n let delayStream;\n\n let activeRequest;\n const retryRequest = {\n abort: function () {\n if (activeRequest && activeRequest.abort) {\n activeRequest.abort();\n }\n },\n };\n\n if (streamMode) {\n retryStream = new PassThrough({objectMode: opts.objectMode});\n retryStream.abort = resetStreams;\n }\n\n const timeOfFirstRequest = Date.now();\n if (currentRetryAttempt > 0) {\n retryAfterDelay(currentRetryAttempt);\n } else {\n makeRequest();\n }\n\n if (streamMode) {\n return retryStream;\n } else {\n return retryRequest;\n }\n\n function resetStreams() {\n delayStream = null;\n\n if (requestStream) {\n requestStream.abort && requestStream.abort();\n requestStream.cancel && requestStream.cancel();\n\n if (requestStream.destroy) {\n requestStream.destroy();\n } else if (requestStream.end) {\n requestStream.end();\n }\n }\n }\n\n function makeRequest() {\n let finishHandled = false;\n currentRetryAttempt++;\n debug(`Current retry attempt: ${currentRetryAttempt}`);\n\n function handleFinish(args = []) {\n if (!finishHandled) {\n finishHandled = true;\n retryStream.emit('complete', ...args);\n }\n }\n\n if (streamMode) {\n streamResponseHandled = false;\n\n delayStream = new PassThrough({objectMode: opts.objectMode});\n requestStream = opts.request(requestOpts);\n\n setImmediate(() => {\n retryStream.emit('request');\n });\n\n requestStream\n // gRPC via google-cloud-node can emit an `error` as well as a `response`\n // Whichever it emits, we run with-- we can't run with both. That's what\n // is up with the `streamResponseHandled` tracking.\n .on('error', err => {\n if (streamResponseHandled) {\n return;\n }\n\n streamResponseHandled = true;\n onResponse(err);\n })\n .on('response', (resp, body) => {\n if (streamResponseHandled) {\n return;\n }\n\n streamResponseHandled = true;\n onResponse(null, resp, body);\n })\n .on('complete', (...params) => handleFinish(params))\n .on('finish', (...params) => handleFinish(params));\n\n requestStream.pipe(delayStream);\n } else {\n activeRequest = opts.request(requestOpts, onResponse);\n }\n }\n\n function retryAfterDelay(currentRetryAttempt) {\n if (streamMode) {\n resetStreams();\n }\n\n const nextRetryDelay = getNextRetryDelay({\n maxRetryDelay: opts.maxRetryDelay,\n retryDelayMultiplier: opts.retryDelayMultiplier,\n retryNumber: currentRetryAttempt,\n timeOfFirstRequest,\n totalTimeout: opts.totalTimeout,\n });\n debug(`Next retry delay: ${nextRetryDelay}`);\n\n if (nextRetryDelay <= 0) {\n numNoResponseAttempts = opts.noResponseRetries + 1;\n return;\n }\n\n setTimeout(makeRequest, nextRetryDelay);\n }\n\n function onResponse(err, response, body) {\n // An error such as DNS resolution.\n if (err) {\n numNoResponseAttempts++;\n\n if (numNoResponseAttempts <= opts.noResponseRetries) {\n retryAfterDelay(numNoResponseAttempts);\n } else {\n if (streamMode) {\n retryStream.emit('error', err);\n retryStream.end();\n } else {\n callback(err, response, body);\n }\n }\n\n return;\n }\n\n // Send the response to see if we should try again.\n // NOTE: \"currentRetryAttempt\" isn't accurate by default, as it counts\n // the very first request sent as the first \"retry\". It is only accurate\n // when a user provides their own \"currentRetryAttempt\" option at\n // instantiation.\n const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet\n ? currentRetryAttempt\n : currentRetryAttempt - 1;\n if (\n adjustedCurrentRetryAttempt < opts.retries &&\n opts.shouldRetryFn(response)\n ) {\n retryAfterDelay(currentRetryAttempt);\n return;\n }\n\n // No more attempts need to be made, just continue on.\n if (streamMode) {\n retryStream.emit('response', response);\n delayStream.pipe(retryStream);\n requestStream.on('error', err => {\n retryStream.destroy(err);\n });\n } else {\n callback(err, response, body);\n }\n }\n}\n\nmodule.exports = retryRequest;\n\nfunction getNextRetryDelay(config) {\n const {\n maxRetryDelay,\n retryDelayMultiplier,\n retryNumber,\n timeOfFirstRequest,\n totalTimeout,\n } = config;\n\n const maxRetryDelayMs = maxRetryDelay * 1000;\n const totalTimeoutMs = totalTimeout * 1000;\n\n const jitter = Math.floor(Math.random() * 1000);\n const calculatedNextRetryDelay =\n Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter;\n\n const maxAllowableDelayMs =\n totalTimeoutMs - (Date.now() - timeOfFirstRequest);\n\n return Math.min(\n calculatedNextRetryDelay,\n maxAllowableDelayMs,\n maxRetryDelayMs\n );\n}\n\nmodule.exports.defaults = DEFAULTS;\nmodule.exports.getNextRetryDelay = getNextRetryDelay;\n","module.exports = require('./lib/retry');","var RetryOperation = require('./retry_operation');\n\nexports.operation = function(options) {\n var timeouts = exports.timeouts(options);\n return new RetryOperation(timeouts, {\n forever: options && (options.forever || options.retries === Infinity),\n unref: options && options.unref,\n maxRetryTime: options && options.maxRetryTime\n });\n};\n\nexports.timeouts = function(options) {\n if (options instanceof Array) {\n return [].concat(options);\n }\n\n var opts = {\n retries: 10,\n factor: 2,\n minTimeout: 1 * 1000,\n maxTimeout: Infinity,\n randomize: false\n };\n for (var key in options) {\n opts[key] = options[key];\n }\n\n if (opts.minTimeout > opts.maxTimeout) {\n throw new Error('minTimeout is greater than maxTimeout');\n }\n\n var timeouts = [];\n for (var i = 0; i < opts.retries; i++) {\n timeouts.push(this.createTimeout(i, opts));\n }\n\n if (options && options.forever && !timeouts.length) {\n timeouts.push(this.createTimeout(i, opts));\n }\n\n // sort the array numerically ascending\n timeouts.sort(function(a,b) {\n return a - b;\n });\n\n return timeouts;\n};\n\nexports.createTimeout = function(attempt, opts) {\n var random = (opts.randomize)\n ? (Math.random() + 1)\n : 1;\n\n var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt));\n timeout = Math.min(timeout, opts.maxTimeout);\n\n return timeout;\n};\n\nexports.wrap = function(obj, options, methods) {\n if (options instanceof Array) {\n methods = options;\n options = null;\n }\n\n if (!methods) {\n methods = [];\n for (var key in obj) {\n if (typeof obj[key] === 'function') {\n methods.push(key);\n }\n }\n }\n\n for (var i = 0; i < methods.length; i++) {\n var method = methods[i];\n var original = obj[method];\n\n obj[method] = function retryWrapper(original) {\n var op = exports.operation(options);\n var args = Array.prototype.slice.call(arguments, 1);\n var callback = args.pop();\n\n args.push(function(err) {\n if (op.retry(err)) {\n return;\n }\n if (err) {\n arguments[0] = op.mainError();\n }\n callback.apply(this, arguments);\n });\n\n op.attempt(function() {\n original.apply(obj, args);\n });\n }.bind(obj, original);\n obj[method].options = options;\n }\n};\n","function RetryOperation(timeouts, options) {\n // Compatibility for the old (timeouts, retryForever) signature\n if (typeof options === 'boolean') {\n options = { forever: options };\n }\n\n this._originalTimeouts = JSON.parse(JSON.stringify(timeouts));\n this._timeouts = timeouts;\n this._options = options || {};\n this._maxRetryTime = options && options.maxRetryTime || Infinity;\n this._fn = null;\n this._errors = [];\n this._attempts = 1;\n this._operationTimeout = null;\n this._operationTimeoutCb = null;\n this._timeout = null;\n this._operationStart = null;\n this._timer = null;\n\n if (this._options.forever) {\n this._cachedTimeouts = this._timeouts.slice(0);\n }\n}\nmodule.exports = RetryOperation;\n\nRetryOperation.prototype.reset = function() {\n this._attempts = 1;\n this._timeouts = this._originalTimeouts.slice(0);\n}\n\nRetryOperation.prototype.stop = function() {\n if (this._timeout) {\n clearTimeout(this._timeout);\n }\n if (this._timer) {\n clearTimeout(this._timer);\n }\n\n this._timeouts = [];\n this._cachedTimeouts = null;\n};\n\nRetryOperation.prototype.retry = function(err) {\n if (this._timeout) {\n clearTimeout(this._timeout);\n }\n\n if (!err) {\n return false;\n }\n var currentTime = new Date().getTime();\n if (err && currentTime - this._operationStart >= this._maxRetryTime) {\n this._errors.push(err);\n this._errors.unshift(new Error('RetryOperation timeout occurred'));\n return false;\n }\n\n this._errors.push(err);\n\n var timeout = this._timeouts.shift();\n if (timeout === undefined) {\n if (this._cachedTimeouts) {\n // retry forever, only keep last error\n this._errors.splice(0, this._errors.length - 1);\n timeout = this._cachedTimeouts.slice(-1);\n } else {\n return false;\n }\n }\n\n var self = this;\n this._timer = setTimeout(function() {\n self._attempts++;\n\n if (self._operationTimeoutCb) {\n self._timeout = setTimeout(function() {\n self._operationTimeoutCb(self._attempts);\n }, self._operationTimeout);\n\n if (self._options.unref) {\n self._timeout.unref();\n }\n }\n\n self._fn(self._attempts);\n }, timeout);\n\n if (this._options.unref) {\n this._timer.unref();\n }\n\n return true;\n};\n\nRetryOperation.prototype.attempt = function(fn, timeoutOps) {\n this._fn = fn;\n\n if (timeoutOps) {\n if (timeoutOps.timeout) {\n this._operationTimeout = timeoutOps.timeout;\n }\n if (timeoutOps.cb) {\n this._operationTimeoutCb = timeoutOps.cb;\n }\n }\n\n var self = this;\n if (this._operationTimeoutCb) {\n this._timeout = setTimeout(function() {\n self._operationTimeoutCb();\n }, self._operationTimeout);\n }\n\n this._operationStart = new Date().getTime();\n\n this._fn(this._attempts);\n};\n\nRetryOperation.prototype.try = function(fn) {\n console.log('Using RetryOperation.try() is deprecated');\n this.attempt(fn);\n};\n\nRetryOperation.prototype.start = function(fn) {\n console.log('Using RetryOperation.start() is deprecated');\n this.attempt(fn);\n};\n\nRetryOperation.prototype.start = RetryOperation.prototype.try;\n\nRetryOperation.prototype.errors = function() {\n return this._errors;\n};\n\nRetryOperation.prototype.attempts = function() {\n return this._attempts;\n};\n\nRetryOperation.prototype.mainError = function() {\n if (this._errors.length === 0) {\n return null;\n }\n\n var counts = {};\n var mainError = null;\n var mainErrorCount = 0;\n\n for (var i = 0; i < this._errors.length; i++) {\n var error = this._errors[i];\n var message = error.message;\n var count = (counts[message] || 0) + 1;\n\n counts[message] = count;\n\n if (count >= mainErrorCount) {\n mainError = error;\n mainErrorCount = count;\n }\n }\n\n return mainError;\n};\n","/*! safe-buffer. MIT License. Feross Aboukhadijeh */\n/* eslint-disable node/no-deprecated-api */\nvar buffer = require('buffer')\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.prototype = Object.create(Buffer.prototype)\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n","'use strict';\n\nvar stubs = require('stubs')\n\n/*\n * StreamEvents can be used 2 ways:\n *\n * 1:\n * function MyStream() {\n * require('stream-events').call(this)\n * }\n *\n * 2:\n * require('stream-events')(myStream)\n */\nfunction StreamEvents(stream) {\n stream = stream || this\n\n var cfg = {\n callthrough: true,\n calls: 1\n }\n\n stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading'))\n stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing'))\n\n return stream\n}\n\nmodule.exports = StreamEvents\n","module.exports = shift\n\nfunction shift (stream) {\n var rs = stream._readableState\n if (!rs) return null\n return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs))\n}\n\nfunction getStateLength (state) {\n if (state.buffer.length) {\n // Since node 6.3.0 state.buffer is a BufferList not an array\n if (state.buffer.head) {\n return state.buffer.head.data.length\n }\n\n return state.buffer[0].length\n }\n\n return state.length\n}\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\n/**/\n\nvar Buffer = require('safe-buffer').Buffer;\n/**/\n\nvar isEncoding = Buffer.isEncoding || function (encoding) {\n encoding = '' + encoding;\n switch (encoding && encoding.toLowerCase()) {\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\n return true;\n default:\n return false;\n }\n};\n\nfunction _normalizeEncoding(enc) {\n if (!enc) return 'utf8';\n var retried;\n while (true) {\n switch (enc) {\n case 'utf8':\n case 'utf-8':\n return 'utf8';\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return 'utf16le';\n case 'latin1':\n case 'binary':\n return 'latin1';\n case 'base64':\n case 'ascii':\n case 'hex':\n return enc;\n default:\n if (retried) return; // undefined\n enc = ('' + enc).toLowerCase();\n retried = true;\n }\n }\n};\n\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\n// modules monkey-patch it to support additional encodings\nfunction normalizeEncoding(enc) {\n var nenc = _normalizeEncoding(enc);\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\n return nenc || enc;\n}\n\n// StringDecoder provides an interface for efficiently splitting a series of\n// buffers into a series of JS strings without breaking apart multi-byte\n// characters.\nexports.StringDecoder = StringDecoder;\nfunction StringDecoder(encoding) {\n this.encoding = normalizeEncoding(encoding);\n var nb;\n switch (this.encoding) {\n case 'utf16le':\n this.text = utf16Text;\n this.end = utf16End;\n nb = 4;\n break;\n case 'utf8':\n this.fillLast = utf8FillLast;\n nb = 4;\n break;\n case 'base64':\n this.text = base64Text;\n this.end = base64End;\n nb = 3;\n break;\n default:\n this.write = simpleWrite;\n this.end = simpleEnd;\n return;\n }\n this.lastNeed = 0;\n this.lastTotal = 0;\n this.lastChar = Buffer.allocUnsafe(nb);\n}\n\nStringDecoder.prototype.write = function (buf) {\n if (buf.length === 0) return '';\n var r;\n var i;\n if (this.lastNeed) {\n r = this.fillLast(buf);\n if (r === undefined) return '';\n i = this.lastNeed;\n this.lastNeed = 0;\n } else {\n i = 0;\n }\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\n return r || '';\n};\n\nStringDecoder.prototype.end = utf8End;\n\n// Returns only complete characters in a Buffer\nStringDecoder.prototype.text = utf8Text;\n\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\nStringDecoder.prototype.fillLast = function (buf) {\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\n this.lastNeed -= buf.length;\n};\n\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\n// continuation byte. If an invalid byte is detected, -2 is returned.\nfunction utf8CheckByte(byte) {\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\n return byte >> 6 === 0x02 ? -1 : -2;\n}\n\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\n// needed to complete the UTF-8 character (if applicable) are returned.\nfunction utf8CheckIncomplete(self, buf, i) {\n var j = buf.length - 1;\n if (j < i) return 0;\n var nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 1;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 2;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) {\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\n }\n return nb;\n }\n return 0;\n}\n\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\n// needed or are available. If we see a non-continuation byte where we expect\n// one, we \"replace\" the validated continuation bytes we've seen so far with\n// a single UTF-8 replacement character ('\\ufffd'), to match v8's UTF-8 decoding\n// behavior. The continuation byte check is included three times in the case\n// where all of the continuation bytes for a character exist in the same buffer.\n// It is also done this way as a slight performance increase instead of using a\n// loop.\nfunction utf8CheckExtraBytes(self, buf, p) {\n if ((buf[0] & 0xC0) !== 0x80) {\n self.lastNeed = 0;\n return '\\ufffd';\n }\n if (self.lastNeed > 1 && buf.length > 1) {\n if ((buf[1] & 0xC0) !== 0x80) {\n self.lastNeed = 1;\n return '\\ufffd';\n }\n if (self.lastNeed > 2 && buf.length > 2) {\n if ((buf[2] & 0xC0) !== 0x80) {\n self.lastNeed = 2;\n return '\\ufffd';\n }\n }\n }\n}\n\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\nfunction utf8FillLast(buf) {\n var p = this.lastTotal - this.lastNeed;\n var r = utf8CheckExtraBytes(this, buf, p);\n if (r !== undefined) return r;\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, p, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, p, 0, buf.length);\n this.lastNeed -= buf.length;\n}\n\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\n// partial character, the character's bytes are buffered until the required\n// number of bytes are available.\nfunction utf8Text(buf, i) {\n var total = utf8CheckIncomplete(this, buf, i);\n if (!this.lastNeed) return buf.toString('utf8', i);\n this.lastTotal = total;\n var end = buf.length - (total - this.lastNeed);\n buf.copy(this.lastChar, 0, end);\n return buf.toString('utf8', i, end);\n}\n\n// For UTF-8, a replacement character is added when ending on a partial\n// character.\nfunction utf8End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + '\\ufffd';\n return r;\n}\n\n// UTF-16LE typically needs two bytes per character, but even if we have an even\n// number of bytes available, we need to check if we end on a leading/high\n// surrogate. In that case, we need to wait for the next two bytes in order to\n// decode the last character properly.\nfunction utf16Text(buf, i) {\n if ((buf.length - i) % 2 === 0) {\n var r = buf.toString('utf16le', i);\n if (r) {\n var c = r.charCodeAt(r.length - 1);\n if (c >= 0xD800 && c <= 0xDBFF) {\n this.lastNeed = 2;\n this.lastTotal = 4;\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n return r.slice(0, -1);\n }\n }\n return r;\n }\n this.lastNeed = 1;\n this.lastTotal = 2;\n this.lastChar[0] = buf[buf.length - 1];\n return buf.toString('utf16le', i, buf.length - 1);\n}\n\n// For UTF-16LE we do not explicitly append special replacement characters if we\n// end on a partial character, we simply let v8 handle that.\nfunction utf16End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) {\n var end = this.lastTotal - this.lastNeed;\n return r + this.lastChar.toString('utf16le', 0, end);\n }\n return r;\n}\n\nfunction base64Text(buf, i) {\n var n = (buf.length - i) % 3;\n if (n === 0) return buf.toString('base64', i);\n this.lastNeed = 3 - n;\n this.lastTotal = 3;\n if (n === 1) {\n this.lastChar[0] = buf[buf.length - 1];\n } else {\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n }\n return buf.toString('base64', i, buf.length - n);\n}\n\nfunction base64End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\n return r;\n}\n\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\nfunction simpleWrite(buf) {\n return buf.toString(this.encoding);\n}\n\nfunction simpleEnd(buf) {\n return buf && buf.length ? this.write(buf) : '';\n}","const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/;\nconst numRegex = /^([\\-\\+])?(0*)(\\.[0-9]+([eE]\\-?[0-9]+)?|[0-9]+(\\.[0-9]+([eE]\\-?[0-9]+)?)?)$/;\n// const octRegex = /0x[a-z0-9]+/;\n// const binRegex = /0x[a-z0-9]+/;\n\n\n//polyfill\nif (!Number.parseInt && window.parseInt) {\n Number.parseInt = window.parseInt;\n}\nif (!Number.parseFloat && window.parseFloat) {\n Number.parseFloat = window.parseFloat;\n}\n\n \nconst consider = {\n hex : true,\n leadingZeros: true,\n decimalPoint: \"\\.\",\n eNotation: true\n //skipLike: /regex/\n};\n\nfunction toNumber(str, options = {}){\n // const options = Object.assign({}, consider);\n // if(opt.leadingZeros === false){\n // options.leadingZeros = false;\n // }else if(opt.hex === false){\n // options.hex = false;\n // }\n\n options = Object.assign({}, consider, options );\n if(!str || typeof str !== \"string\" ) return str;\n \n let trimmedStr = str.trim();\n // if(trimmedStr === \"0.0\") return 0;\n // else if(trimmedStr === \"+0.0\") return 0;\n // else if(trimmedStr === \"-0.0\") return -0;\n\n if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str;\n else if (options.hex && hexRegex.test(trimmedStr)) {\n return Number.parseInt(trimmedStr, 16);\n // } else if (options.parseOct && octRegex.test(str)) {\n // return Number.parseInt(val, 8);\n // }else if (options.parseBin && binRegex.test(str)) {\n // return Number.parseInt(val, 2);\n }else{\n //separate negative sign, leading zeros, and rest number\n const match = numRegex.exec(trimmedStr);\n if(match){\n const sign = match[1];\n const leadingZeros = match[2];\n let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros\n //trim ending zeros for floating number\n \n const eNotation = match[4] || match[6];\n if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== \".\") return str; //-0123\n else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== \".\") return str; //0123\n else{//no leading zeros or leading zeros are allowed\n const num = Number(trimmedStr);\n const numStr = \"\" + num;\n if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation\n if(options.eNotation) return num;\n else return str;\n }else if(eNotation){ //given number has enotation\n if(options.eNotation) return num;\n else return str;\n }else if(trimmedStr.indexOf(\".\") !== -1){ //floating number\n // const decimalPart = match[5].substr(1);\n // const intPart = trimmedStr.substr(0,trimmedStr.indexOf(\".\"));\n\n \n // const p = numStr.indexOf(\".\");\n // const givenIntPart = numStr.substr(0,p);\n // const givenDecPart = numStr.substr(p+1);\n if(numStr === \"0\" && (numTrimmedByZeros === \"\") ) return num; //0.0\n else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000\n else if( sign && numStr === \"-\"+numTrimmedByZeros) return num;\n else return str;\n }\n \n if(leadingZeros){\n // if(numTrimmedByZeros === numStr){\n // if(options.leadingZeros) return num;\n // else return str;\n // }else return str;\n if(numTrimmedByZeros === numStr) return num;\n else if(sign+numTrimmedByZeros === numStr) return num;\n else return str;\n }\n\n if(trimmedStr === numStr) return num;\n else if(trimmedStr === sign+numStr) return num;\n // else{\n // //number with +/- sign\n // trimmedStr.test(/[-+][0-9]);\n\n // }\n return str;\n }\n // else if(!eNotation && trimmedStr && trimmedStr !== Number(trimmedStr) ) return str;\n \n }else{ //non-numeric string\n return str;\n }\n }\n}\n\n/**\n * \n * @param {string} numStr without leading zeros\n * @returns \n */\nfunction trimZeros(numStr){\n if(numStr && numStr.indexOf(\".\") !== -1){//float\n numStr = numStr.replace(/0+$/, \"\"); //remove ending zeros\n if(numStr === \".\") numStr = \"0\";\n else if(numStr[0] === \".\") numStr = \"0\"+numStr;\n else if(numStr[numStr.length-1] === \".\") numStr = numStr.substr(0,numStr.length-1);\n return numStr;\n }\n return numStr;\n}\nmodule.exports = toNumber\n","'use strict'\n\nmodule.exports = function stubs(obj, method, cfg, stub) {\n if (!obj || !method || !obj[method])\n throw new Error('You must provide an object and a key for an existing method')\n\n if (!stub) {\n stub = cfg\n cfg = {}\n }\n\n stub = stub || function() {}\n\n cfg.callthrough = cfg.callthrough || false\n cfg.calls = cfg.calls || 0\n\n var norevert = cfg.calls === 0\n\n var cached = obj[method].bind(obj)\n\n obj[method] = function() {\n var args = [].slice.call(arguments)\n var returnVal\n\n if (cfg.callthrough)\n returnVal = cached.apply(obj, args)\n\n returnVal = stub.apply(obj, args) || returnVal\n\n if (!norevert && --cfg.calls === 0)\n obj[method] = cached\n\n return returnVal\n }\n}\n","'use strict';\nconst os = require('os');\nconst tty = require('tty');\nconst hasFlag = require('has-flag');\n\nconst {env} = process;\n\nlet forceColor;\nif (hasFlag('no-color') ||\n\thasFlag('no-colors') ||\n\thasFlag('color=false') ||\n\thasFlag('color=never')) {\n\tforceColor = 0;\n} else if (hasFlag('color') ||\n\thasFlag('colors') ||\n\thasFlag('color=true') ||\n\thasFlag('color=always')) {\n\tforceColor = 1;\n}\n\nif ('FORCE_COLOR' in env) {\n\tif (env.FORCE_COLOR === 'true') {\n\t\tforceColor = 1;\n\t} else if (env.FORCE_COLOR === 'false') {\n\t\tforceColor = 0;\n\t} else {\n\t\tforceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3);\n\t}\n}\n\nfunction translateLevel(level) {\n\tif (level === 0) {\n\t\treturn false;\n\t}\n\n\treturn {\n\t\tlevel,\n\t\thasBasic: true,\n\t\thas256: level >= 2,\n\t\thas16m: level >= 3\n\t};\n}\n\nfunction supportsColor(haveStream, streamIsTTY) {\n\tif (forceColor === 0) {\n\t\treturn 0;\n\t}\n\n\tif (hasFlag('color=16m') ||\n\t\thasFlag('color=full') ||\n\t\thasFlag('color=truecolor')) {\n\t\treturn 3;\n\t}\n\n\tif (hasFlag('color=256')) {\n\t\treturn 2;\n\t}\n\n\tif (haveStream && !streamIsTTY && forceColor === undefined) {\n\t\treturn 0;\n\t}\n\n\tconst min = forceColor || 0;\n\n\tif (env.TERM === 'dumb') {\n\t\treturn min;\n\t}\n\n\tif (process.platform === 'win32') {\n\t\t// Windows 10 build 10586 is the first Windows release that supports 256 colors.\n\t\t// Windows 10 build 14931 is the first release that supports 16m/TrueColor.\n\t\tconst osRelease = os.release().split('.');\n\t\tif (\n\t\t\tNumber(osRelease[0]) >= 10 &&\n\t\t\tNumber(osRelease[2]) >= 10586\n\t\t) {\n\t\t\treturn Number(osRelease[2]) >= 14931 ? 3 : 2;\n\t\t}\n\n\t\treturn 1;\n\t}\n\n\tif ('CI' in env) {\n\t\tif (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {\n\t\t\treturn 1;\n\t\t}\n\n\t\treturn min;\n\t}\n\n\tif ('TEAMCITY_VERSION' in env) {\n\t\treturn /^(9\\.(0*[1-9]\\d*)\\.|\\d{2,}\\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;\n\t}\n\n\tif (env.COLORTERM === 'truecolor') {\n\t\treturn 3;\n\t}\n\n\tif ('TERM_PROGRAM' in env) {\n\t\tconst version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);\n\n\t\tswitch (env.TERM_PROGRAM) {\n\t\t\tcase 'iTerm.app':\n\t\t\t\treturn version >= 3 ? 3 : 2;\n\t\t\tcase 'Apple_Terminal':\n\t\t\t\treturn 2;\n\t\t\t// No default\n\t\t}\n\t}\n\n\tif (/-256(color)?$/i.test(env.TERM)) {\n\t\treturn 2;\n\t}\n\n\tif (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {\n\t\treturn 1;\n\t}\n\n\tif ('COLORTERM' in env) {\n\t\treturn 1;\n\t}\n\n\treturn min;\n}\n\nfunction getSupportLevel(stream) {\n\tconst level = supportsColor(stream, stream && stream.isTTY);\n\treturn translateLevel(level);\n}\n\nmodule.exports = {\n\tsupportsColor: getSupportLevel,\n\tstdout: translateLevel(supportsColor(true, tty.isatty(1))),\n\tstderr: translateLevel(supportsColor(true, tty.isatty(2)))\n};\n","\"use strict\";\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0;\n/**\n * @class TeenyStatisticsWarning\n * @extends Error\n * @description While an error, is used for emitting warnings when\n * meeting certain configured thresholds.\n * @see process.emitWarning\n */\nclass TeenyStatisticsWarning extends Error {\n /**\n * @param {string} message\n */\n constructor(message) {\n super(message);\n this.threshold = 0;\n this.type = '';\n this.value = 0;\n this.name = this.constructor.name;\n Error.captureStackTrace(this, this.constructor);\n }\n}\nexports.TeenyStatisticsWarning = TeenyStatisticsWarning;\nTeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning';\n/**\n * @class TeenyStatistics\n * @description Maintain various statistics internal to teeny-request. Tracking\n * is not automatic and must be instrumented within teeny-request.\n */\nclass TeenyStatistics {\n /**\n * @param {TeenyStatisticsOptions} [opts]\n */\n constructor(opts) {\n /**\n * @type {number}\n * @private\n * @default 0\n */\n this._concurrentRequests = 0;\n /**\n * @type {boolean}\n * @private\n * @default false\n */\n this._didConcurrentRequestWarn = false;\n this._options = TeenyStatistics._prepareOptions(opts);\n }\n /**\n * Returns a copy of the current options.\n * @return {TeenyStatisticsOptions}\n */\n getOptions() {\n return Object.assign({}, this._options);\n }\n /**\n * Change configured statistics options. This will not preserve unspecified\n * options that were previously specified, i.e. this is a reset of options.\n * @param {TeenyStatisticsOptions} [opts]\n * @returns {TeenyStatisticsConfig} The previous options.\n * @see _prepareOptions\n */\n setOptions(opts) {\n const oldOpts = this._options;\n this._options = TeenyStatistics._prepareOptions(opts);\n return oldOpts;\n }\n /**\n * @readonly\n * @return {TeenyStatisticsCounters}\n */\n get counters() {\n return {\n concurrentRequests: this._concurrentRequests,\n };\n }\n /**\n * @description Should call this right before making a request.\n */\n requestStarting() {\n this._concurrentRequests++;\n if (this._options.concurrentRequests > 0 &&\n this._concurrentRequests >= this._options.concurrentRequests &&\n !this._didConcurrentRequestWarn) {\n this._didConcurrentRequestWarn = true;\n const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' +\n this._concurrentRequests +\n ' requests in-flight, which exceeds the configured threshold of ' +\n this._options.concurrentRequests +\n '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' +\n 'variable or the concurrentRequests option of teeny-request to ' +\n 'increase or disable (0) this warning.');\n warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS;\n warning.value = this._concurrentRequests;\n warning.threshold = this._options.concurrentRequests;\n process.emitWarning(warning);\n }\n }\n /**\n * @description When using `requestStarting`, call this after the request\n * has finished.\n */\n requestFinished() {\n // TODO negative?\n this._concurrentRequests--;\n }\n /**\n * Configuration Precedence:\n * 1. Dependency inversion via defined option.\n * 2. Global numeric environment variable.\n * 3. Built-in default.\n * This will not preserve unspecified options previously specified.\n * @param {TeenyStatisticsOptions} [opts]\n * @returns {TeenyStatisticsOptions}\n * @private\n */\n static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) {\n let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS;\n const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS);\n if (diConcurrentRequests !== undefined) {\n concurrentRequests = diConcurrentRequests;\n }\n else if (!Number.isNaN(envConcurrentRequests)) {\n concurrentRequests = envConcurrentRequests;\n }\n return { concurrentRequests };\n }\n}\nexports.TeenyStatistics = TeenyStatistics;\n/**\n * @description A default threshold representing when to warn about excessive\n * in-flight/concurrent requests.\n * @type {number}\n * @static\n * @readonly\n * @default 5000\n */\nTeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000;\n//# sourceMappingURL=TeenyStatistics.js.map","\"use strict\";\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getAgent = exports.pool = void 0;\nconst http_1 = require(\"http\");\nconst https_1 = require(\"https\");\n// eslint-disable-next-line node/no-deprecated-api\nconst url_1 = require(\"url\");\nexports.pool = new Map();\n/**\n * Determines if a proxy should be considered based on the environment.\n *\n * @param uri The request uri\n * @returns {boolean}\n */\nfunction shouldUseProxyForURI(uri) {\n const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy;\n if (!noProxyEnv) {\n return true;\n }\n const givenURI = new URL(uri);\n for (const noProxyRaw of noProxyEnv.split(',')) {\n const noProxy = noProxyRaw.trim();\n if (noProxy === givenURI.origin || noProxy === givenURI.hostname) {\n return false;\n }\n else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) {\n const noProxyWildcard = noProxy.replace(/^\\*\\./, '.');\n if (givenURI.hostname.endsWith(noProxyWildcard)) {\n return false;\n }\n }\n }\n return true;\n}\n/**\n * Returns a custom request Agent if one is found, otherwise returns undefined\n * which will result in the global http(s) Agent being used.\n * @private\n * @param {string} uri The request uri\n * @param {Options} reqOpts The request options\n * @returns {HttpAnyAgent|undefined}\n */\nfunction getAgent(uri, reqOpts) {\n const isHttp = uri.startsWith('http://');\n const proxy = reqOpts.proxy ||\n process.env.HTTP_PROXY ||\n process.env.http_proxy ||\n process.env.HTTPS_PROXY ||\n process.env.https_proxy;\n const poolOptions = Object.assign({}, reqOpts.pool);\n const manuallyProvidedProxy = !!reqOpts.proxy;\n const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri);\n if (proxy && shouldUseProxy) {\n // tslint:disable-next-line variable-name\n const Agent = isHttp\n ? require('http-proxy-agent')\n : require('https-proxy-agent');\n const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions };\n return new Agent(proxyOpts);\n }\n let key = isHttp ? 'http' : 'https';\n if (reqOpts.forever) {\n key += ':forever';\n if (!exports.pool.has(key)) {\n // tslint:disable-next-line variable-name\n const Agent = isHttp ? http_1.Agent : https_1.Agent;\n exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true }));\n }\n }\n return exports.pool.get(key);\n}\nexports.getAgent = getAgent;\n//# sourceMappingURL=agents.js.map","\"use strict\";\n/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.teenyRequest = exports.RequestError = void 0;\nconst node_fetch_1 = require(\"node-fetch\");\nconst stream_1 = require(\"stream\");\nconst uuid = require(\"uuid\");\nconst agents_1 = require(\"./agents\");\nconst TeenyStatistics_1 = require(\"./TeenyStatistics\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst streamEvents = require('stream-events');\nclass RequestError extends Error {\n}\nexports.RequestError = RequestError;\n/**\n * Convert options from Request to Fetch format\n * @private\n * @param reqOpts Request options\n */\nfunction requestToFetchOptions(reqOpts) {\n const options = {\n method: reqOpts.method || 'GET',\n ...(reqOpts.timeout && { timeout: reqOpts.timeout }),\n ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }),\n };\n if (typeof reqOpts.json === 'object') {\n // Add Content-type: application/json header\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['Content-Type'] = 'application/json';\n // Set body to JSON representation of value\n options.body = JSON.stringify(reqOpts.json);\n }\n else {\n if (Buffer.isBuffer(reqOpts.body)) {\n options.body = reqOpts.body;\n }\n else if (typeof reqOpts.body !== 'string') {\n options.body = JSON.stringify(reqOpts.body);\n }\n else {\n options.body = reqOpts.body;\n }\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options.headers = reqOpts.headers;\n let uri = (reqOpts.uri ||\n reqOpts.url);\n if (!uri) {\n throw new Error('Missing uri or url in reqOpts.');\n }\n if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') {\n // eslint-disable-next-line @typescript-eslint/no-var-requires\n const qs = require('querystring');\n const params = qs.stringify(reqOpts.qs);\n uri = uri + '?' + params;\n }\n options.agent = (0, agents_1.getAgent)(uri, reqOpts);\n return { uri, options };\n}\n/**\n * Convert a response from `fetch` to `request` format.\n * @private\n * @param opts The `request` options used to create the request.\n * @param res The Fetch response\n * @returns A `request` response object\n */\nfunction fetchToRequestResponse(opts, res) {\n const request = {};\n request.agent = opts.agent || false;\n request.headers = (opts.headers || {});\n request.href = res.url;\n // headers need to be converted from a map to an obj\n const resHeaders = {};\n res.headers.forEach((value, key) => (resHeaders[key] = value));\n const response = Object.assign(res.body, {\n statusCode: res.status,\n statusMessage: res.statusText,\n request,\n body: res.body,\n headers: resHeaders,\n toJSON: () => ({ headers: resHeaders }),\n });\n return response;\n}\n/**\n * Create POST body from two parts as multipart/related content-type\n * @private\n * @param boundary\n * @param multipart\n */\nfunction createMultipartStream(boundary, multipart) {\n const finale = `--${boundary}--`;\n const stream = new stream_1.PassThrough();\n for (const part of multipart) {\n const preamble = `--${boundary}\\r\\nContent-Type: ${part['Content-Type']}\\r\\n\\r\\n`;\n stream.write(preamble);\n if (typeof part.body === 'string') {\n stream.write(part.body);\n stream.write('\\r\\n');\n }\n else {\n part.body.pipe(stream, { end: false });\n part.body.on('end', () => {\n stream.write('\\r\\n');\n stream.write(finale);\n stream.end();\n });\n }\n }\n return stream;\n}\nfunction teenyRequest(reqOpts, callback) {\n const { uri, options } = requestToFetchOptions(reqOpts);\n const multipart = reqOpts.multipart;\n if (reqOpts.multipart && multipart.length === 2) {\n if (!callback) {\n // TODO: add support for multipart uploads through streaming\n throw new Error('Multipart without callback is not implemented.');\n }\n const boundary = uuid.v4();\n options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`;\n options.body = createMultipartStream(boundary, multipart);\n // Multipart upload\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n const header = res.headers.get('content-type');\n const response = fetchToRequestResponse(options, res);\n const body = response.body;\n if (header === 'application/json' ||\n header === 'application/json; charset=utf-8') {\n res.json().then(json => {\n response.body = json;\n callback(null, response, json);\n }, (err) => {\n callback(err, response, body);\n });\n return;\n }\n res.text().then(text => {\n response.body = text;\n callback(null, response, text);\n }, err => {\n callback(err, response, body);\n });\n }, err => {\n teenyRequest.stats.requestFinished();\n callback(err, null, null);\n });\n return;\n }\n if (callback === undefined) {\n // Stream mode\n const requestStream = streamEvents(new stream_1.PassThrough());\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let responseStream;\n requestStream.once('reading', () => {\n if (responseStream) {\n (0, stream_1.pipeline)(responseStream, requestStream, () => { });\n }\n else {\n requestStream.once('response', () => {\n (0, stream_1.pipeline)(responseStream, requestStream, () => { });\n });\n }\n });\n options.compress = false;\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n responseStream = res.body;\n responseStream.on('error', (err) => {\n requestStream.emit('error', err);\n });\n const response = fetchToRequestResponse(options, res);\n requestStream.emit('response', response);\n }, err => {\n teenyRequest.stats.requestFinished();\n requestStream.emit('error', err);\n });\n // fetch doesn't supply the raw HTTP stream, instead it\n // returns a PassThrough piped from the HTTP response\n // stream.\n return requestStream;\n }\n // GET or POST with callback\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n const header = res.headers.get('content-type');\n const response = fetchToRequestResponse(options, res);\n const body = response.body;\n if (header === 'application/json' ||\n header === 'application/json; charset=utf-8') {\n if (response.statusCode === 204) {\n // Probably a DELETE\n callback(null, response, body);\n return;\n }\n res.json().then(json => {\n response.body = json;\n callback(null, response, json);\n }, err => {\n callback(err, response, body);\n });\n return;\n }\n res.text().then(text => {\n const response = fetchToRequestResponse(options, res);\n response.body = text;\n callback(null, response, text);\n }, err => {\n callback(err, response, body);\n });\n }, err => {\n teenyRequest.stats.requestFinished();\n callback(err, null, null);\n });\n return;\n}\nexports.teenyRequest = teenyRequest;\nteenyRequest.defaults = (defaults) => {\n return (reqOpts, callback) => {\n const opts = { ...defaults, ...reqOpts };\n if (callback === undefined) {\n return teenyRequest(opts);\n }\n teenyRequest(opts, callback);\n };\n};\n/**\n * Single instance of an interface for keeping track of things.\n */\nteenyRequest.stats = new TeenyStatistics_1.TeenyStatistics();\nteenyRequest.resetStats = () => {\n teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions());\n};\n//# sourceMappingURL=index.js.map","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nvar _default = {\n randomUUID: _crypto.default.randomUUID\n};\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nexports.unsafeStringify = unsafeStringify;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).slice(1));\n}\n\nfunction unsafeStringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]];\n}\n\nfunction stringify(arr, offset = 0) {\n const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = require(\"./stringify.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.unsafeStringify)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.URL = exports.DNS = void 0;\nexports.default = v35;\n\nvar _stringify = require(\"./stringify.js\");\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction v35(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n var _namespace;\n\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.unsafeStringify)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _native = _interopRequireDefault(require(\"./native.js\"));\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = require(\"./stringify.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n if (_native.default.randomUUID && !buf && !options) {\n return _native.default.randomUUID();\n }\n\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.unsafeStringify)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.slice(14, 15), 16);\n}\n\nvar _default = version;\nexports.default = _default;","\"use strict\";\n\nvar punycode = require(\"punycode\");\nvar mappingTable = require(\"./lib/mappingTable.json\");\n\nvar PROCESSING_OPTIONS = {\n TRANSITIONAL: 0,\n NONTRANSITIONAL: 1\n};\n\nfunction normalize(str) { // fix bug in v8\n return str.split('\\u0000').map(function (s) { return s.normalize('NFC'); }).join('\\u0000');\n}\n\nfunction findStatus(val) {\n var start = 0;\n var end = mappingTable.length - 1;\n\n while (start <= end) {\n var mid = Math.floor((start + end) / 2);\n\n var target = mappingTable[mid];\n if (target[0][0] <= val && target[0][1] >= val) {\n return target;\n } else if (target[0][0] > val) {\n end = mid - 1;\n } else {\n start = mid + 1;\n }\n }\n\n return null;\n}\n\nvar regexAstralSymbols = /[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]/g;\n\nfunction countSymbols(string) {\n return string\n // replace every surrogate pair with a BMP symbol\n .replace(regexAstralSymbols, '_')\n // then get the length\n .length;\n}\n\nfunction mapChars(domain_name, useSTD3, processing_option) {\n var hasError = false;\n var processed = \"\";\n\n var len = countSymbols(domain_name);\n for (var i = 0; i < len; ++i) {\n var codePoint = domain_name.codePointAt(i);\n var status = findStatus(codePoint);\n\n switch (status[1]) {\n case \"disallowed\":\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n break;\n case \"ignored\":\n break;\n case \"mapped\":\n processed += String.fromCodePoint.apply(String, status[2]);\n break;\n case \"deviation\":\n if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {\n processed += String.fromCodePoint.apply(String, status[2]);\n } else {\n processed += String.fromCodePoint(codePoint);\n }\n break;\n case \"valid\":\n processed += String.fromCodePoint(codePoint);\n break;\n case \"disallowed_STD3_mapped\":\n if (useSTD3) {\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n } else {\n processed += String.fromCodePoint.apply(String, status[2]);\n }\n break;\n case \"disallowed_STD3_valid\":\n if (useSTD3) {\n hasError = true;\n }\n\n processed += String.fromCodePoint(codePoint);\n break;\n }\n }\n\n return {\n string: processed,\n error: hasError\n };\n}\n\nvar combiningMarksRegex = /[\\u0300-\\u036F\\u0483-\\u0489\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065F\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0859-\\u085B\\u08E4-\\u0903\\u093A-\\u093C\\u093E-\\u094F\\u0951-\\u0957\\u0962\\u0963\\u0981-\\u0983\\u09BC\\u09BE-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CD\\u09D7\\u09E2\\u09E3\\u0A01-\\u0A03\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81-\\u0A83\\u0ABC\\u0ABE-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AE2\\u0AE3\\u0B01-\\u0B03\\u0B3C\\u0B3E-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B62\\u0B63\\u0B82\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD7\\u0C00-\\u0C03\\u0C3E-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0C81-\\u0C83\\u0CBC\\u0CBE-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CE2\\u0CE3\\u0D01-\\u0D03\\u0D3E-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4D\\u0D57\\u0D62\\u0D63\\u0D82\\u0D83\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F3E\\u0F3F\\u0F71-\\u0F84\\u0F86\\u0F87\\u0F8D-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102B-\\u103E\\u1056-\\u1059\\u105E-\\u1060\\u1062-\\u1064\\u1067-\\u106D\\u1071-\\u1074\\u1082-\\u108D\\u108F\\u109A-\\u109D\\u135D-\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B4-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u192B\\u1930-\\u193B\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A17-\\u1A1B\\u1A55-\\u1A5E\\u1A60-\\u1A7C\\u1A7F\\u1AB0-\\u1ABE\\u1B00-\\u1B04\\u1B34-\\u1B44\\u1B6B-\\u1B73\\u1B80-\\u1B82\\u1BA1-\\u1BAD\\u1BE6-\\u1BF3\\u1C24-\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE8\\u1CED\\u1CF2-\\u1CF4\\u1CF8\\u1CF9\\u1DC0-\\u1DF5\\u1DFC-\\u1DFF\\u20D0-\\u20F0\\u2CEF-\\u2CF1\\u2D7F\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F-\\uA672\\uA674-\\uA67D\\uA69F\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA823-\\uA827\\uA880\\uA881\\uA8B4-\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA953\\uA980-\\uA983\\uA9B3-\\uA9C0\\uA9E5\\uAA29-\\uAA36\\uAA43\\uAA4C\\uAA4D\\uAA7B-\\uAA7D\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uAAEB-\\uAAEF\\uAAF5\\uAAF6\\uABE3-\\uABEA\\uABEC\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE2D]|\\uD800[\\uDDFD\\uDEE0\\uDF76-\\uDF7A]|\\uD802[\\uDE01-\\uDE03\\uDE05\\uDE06\\uDE0C-\\uDE0F\\uDE38-\\uDE3A\\uDE3F\\uDEE5\\uDEE6]|\\uD804[\\uDC00-\\uDC02\\uDC38-\\uDC46\\uDC7F-\\uDC82\\uDCB0-\\uDCBA\\uDD00-\\uDD02\\uDD27-\\uDD34\\uDD73\\uDD80-\\uDD82\\uDDB3-\\uDDC0\\uDE2C-\\uDE37\\uDEDF-\\uDEEA\\uDF01-\\uDF03\\uDF3C\\uDF3E-\\uDF44\\uDF47\\uDF48\\uDF4B-\\uDF4D\\uDF57\\uDF62\\uDF63\\uDF66-\\uDF6C\\uDF70-\\uDF74]|\\uD805[\\uDCB0-\\uDCC3\\uDDAF-\\uDDB5\\uDDB8-\\uDDC0\\uDE30-\\uDE40\\uDEAB-\\uDEB7]|\\uD81A[\\uDEF0-\\uDEF4\\uDF30-\\uDF36]|\\uD81B[\\uDF51-\\uDF7E\\uDF8F-\\uDF92]|\\uD82F[\\uDC9D\\uDC9E]|\\uD834[\\uDD65-\\uDD69\\uDD6D-\\uDD72\\uDD7B-\\uDD82\\uDD85-\\uDD8B\\uDDAA-\\uDDAD\\uDE42-\\uDE44]|\\uD83A[\\uDCD0-\\uDCD6]|\\uDB40[\\uDD00-\\uDDEF]/;\n\nfunction validateLabel(label, processing_option) {\n if (label.substr(0, 4) === \"xn--\") {\n label = punycode.toUnicode(label);\n processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;\n }\n\n var error = false;\n\n if (normalize(label) !== label ||\n (label[3] === \"-\" && label[4] === \"-\") ||\n label[0] === \"-\" || label[label.length - 1] === \"-\" ||\n label.indexOf(\".\") !== -1 ||\n label.search(combiningMarksRegex) === 0) {\n error = true;\n }\n\n var len = countSymbols(label);\n for (var i = 0; i < len; ++i) {\n var status = findStatus(label.codePointAt(i));\n if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== \"valid\") ||\n (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&\n status[1] !== \"valid\" && status[1] !== \"deviation\")) {\n error = true;\n break;\n }\n }\n\n return {\n label: label,\n error: error\n };\n}\n\nfunction processing(domain_name, useSTD3, processing_option) {\n var result = mapChars(domain_name, useSTD3, processing_option);\n result.string = normalize(result.string);\n\n var labels = result.string.split(\".\");\n for (var i = 0; i < labels.length; ++i) {\n try {\n var validation = validateLabel(labels[i]);\n labels[i] = validation.label;\n result.error = result.error || validation.error;\n } catch(e) {\n result.error = true;\n }\n }\n\n return {\n string: labels.join(\".\"),\n error: result.error\n };\n}\n\nmodule.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {\n var result = processing(domain_name, useSTD3, processing_option);\n var labels = result.string.split(\".\");\n labels = labels.map(function(l) {\n try {\n return punycode.toASCII(l);\n } catch(e) {\n result.error = true;\n return l;\n }\n });\n\n if (verifyDnsLength) {\n var total = labels.slice(0, labels.length - 1).join(\".\").length;\n if (total.length > 253 || total.length === 0) {\n result.error = true;\n }\n\n for (var i=0; i < labels.length; ++i) {\n if (labels.length > 63 || labels.length === 0) {\n result.error = true;\n break;\n }\n }\n }\n\n if (result.error) return null;\n return labels.join(\".\");\n};\n\nmodule.exports.toUnicode = function(domain_name, useSTD3) {\n var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);\n\n return {\n domain: result.string,\n error: result.error\n };\n};\n\nmodule.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict'\n\nconst Client = require('./lib/client')\nconst Dispatcher = require('./lib/dispatcher')\nconst errors = require('./lib/core/errors')\nconst Pool = require('./lib/pool')\nconst BalancedPool = require('./lib/balanced-pool')\nconst Agent = require('./lib/agent')\nconst util = require('./lib/core/util')\nconst { InvalidArgumentError } = errors\nconst api = require('./lib/api')\nconst buildConnector = require('./lib/core/connect')\nconst MockClient = require('./lib/mock/mock-client')\nconst MockAgent = require('./lib/mock/mock-agent')\nconst MockPool = require('./lib/mock/mock-pool')\nconst mockErrors = require('./lib/mock/mock-errors')\nconst ProxyAgent = require('./lib/proxy-agent')\nconst RetryHandler = require('./lib/handler/RetryHandler')\nconst { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')\nconst DecoratorHandler = require('./lib/handler/DecoratorHandler')\nconst RedirectHandler = require('./lib/handler/RedirectHandler')\nconst createRedirectInterceptor = require('./lib/interceptor/redirectInterceptor')\n\nlet hasCrypto\ntry {\n require('crypto')\n hasCrypto = true\n} catch {\n hasCrypto = false\n}\n\nObject.assign(Dispatcher.prototype, api)\n\nmodule.exports.Dispatcher = Dispatcher\nmodule.exports.Client = Client\nmodule.exports.Pool = Pool\nmodule.exports.BalancedPool = BalancedPool\nmodule.exports.Agent = Agent\nmodule.exports.ProxyAgent = ProxyAgent\nmodule.exports.RetryHandler = RetryHandler\n\nmodule.exports.DecoratorHandler = DecoratorHandler\nmodule.exports.RedirectHandler = RedirectHandler\nmodule.exports.createRedirectInterceptor = createRedirectInterceptor\n\nmodule.exports.buildConnector = buildConnector\nmodule.exports.errors = errors\n\nfunction makeDispatcher (fn) {\n return (url, opts, handler) => {\n if (typeof opts === 'function') {\n handler = opts\n opts = null\n }\n\n if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) {\n throw new InvalidArgumentError('invalid url')\n }\n\n if (opts != null && typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (opts && opts.path != null) {\n if (typeof opts.path !== 'string') {\n throw new InvalidArgumentError('invalid opts.path')\n }\n\n let path = opts.path\n if (!opts.path.startsWith('/')) {\n path = `/${path}`\n }\n\n url = new URL(util.parseOrigin(url).origin + path)\n } else {\n if (!opts) {\n opts = typeof url === 'object' ? url : {}\n }\n\n url = util.parseURL(url)\n }\n\n const { agent, dispatcher = getGlobalDispatcher() } = opts\n\n if (agent) {\n throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?')\n }\n\n return fn.call(dispatcher, {\n ...opts,\n origin: url.origin,\n path: url.search ? `${url.pathname}${url.search}` : url.pathname,\n method: opts.method || (opts.body ? 'PUT' : 'GET')\n }, handler)\n }\n}\n\nmodule.exports.setGlobalDispatcher = setGlobalDispatcher\nmodule.exports.getGlobalDispatcher = getGlobalDispatcher\n\nif (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) {\n let fetchImpl = null\n module.exports.fetch = async function fetch (resource) {\n if (!fetchImpl) {\n fetchImpl = require('./lib/fetch').fetch\n }\n\n try {\n return await fetchImpl(...arguments)\n } catch (err) {\n if (typeof err === 'object') {\n Error.captureStackTrace(err, this)\n }\n\n throw err\n }\n }\n module.exports.Headers = require('./lib/fetch/headers').Headers\n module.exports.Response = require('./lib/fetch/response').Response\n module.exports.Request = require('./lib/fetch/request').Request\n module.exports.FormData = require('./lib/fetch/formdata').FormData\n module.exports.File = require('./lib/fetch/file').File\n module.exports.FileReader = require('./lib/fileapi/filereader').FileReader\n\n const { setGlobalOrigin, getGlobalOrigin } = require('./lib/fetch/global')\n\n module.exports.setGlobalOrigin = setGlobalOrigin\n module.exports.getGlobalOrigin = getGlobalOrigin\n\n const { CacheStorage } = require('./lib/cache/cachestorage')\n const { kConstruct } = require('./lib/cache/symbols')\n\n // Cache & CacheStorage are tightly coupled with fetch. Even if it may run\n // in an older version of Node, it doesn't have any use without fetch.\n module.exports.caches = new CacheStorage(kConstruct)\n}\n\nif (util.nodeMajor >= 16) {\n const { deleteCookie, getCookies, getSetCookies, setCookie } = require('./lib/cookies')\n\n module.exports.deleteCookie = deleteCookie\n module.exports.getCookies = getCookies\n module.exports.getSetCookies = getSetCookies\n module.exports.setCookie = setCookie\n\n const { parseMIMEType, serializeAMimeType } = require('./lib/fetch/dataURL')\n\n module.exports.parseMIMEType = parseMIMEType\n module.exports.serializeAMimeType = serializeAMimeType\n}\n\nif (util.nodeMajor >= 18 && hasCrypto) {\n const { WebSocket } = require('./lib/websocket/websocket')\n\n module.exports.WebSocket = WebSocket\n}\n\nmodule.exports.request = makeDispatcher(api.request)\nmodule.exports.stream = makeDispatcher(api.stream)\nmodule.exports.pipeline = makeDispatcher(api.pipeline)\nmodule.exports.connect = makeDispatcher(api.connect)\nmodule.exports.upgrade = makeDispatcher(api.upgrade)\n\nmodule.exports.MockClient = MockClient\nmodule.exports.MockPool = MockPool\nmodule.exports.MockAgent = MockAgent\nmodule.exports.mockErrors = mockErrors\n","'use strict'\n\nconst { InvalidArgumentError } = require('./core/errors')\nconst { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('./core/symbols')\nconst DispatcherBase = require('./dispatcher-base')\nconst Pool = require('./pool')\nconst Client = require('./client')\nconst util = require('./core/util')\nconst createRedirectInterceptor = require('./interceptor/redirectInterceptor')\nconst { WeakRef, FinalizationRegistry } = require('./compat/dispatcher-weakref')()\n\nconst kOnConnect = Symbol('onConnect')\nconst kOnDisconnect = Symbol('onDisconnect')\nconst kOnConnectionError = Symbol('onConnectionError')\nconst kMaxRedirections = Symbol('maxRedirections')\nconst kOnDrain = Symbol('onDrain')\nconst kFactory = Symbol('factory')\nconst kFinalizer = Symbol('finalizer')\nconst kOptions = Symbol('options')\n\nfunction defaultFactory (origin, opts) {\n return opts && opts.connections === 1\n ? new Client(origin, opts)\n : new Pool(origin, opts)\n}\n\nclass Agent extends DispatcherBase {\n constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) {\n super()\n\n if (typeof factory !== 'function') {\n throw new InvalidArgumentError('factory must be a function.')\n }\n\n if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {\n throw new InvalidArgumentError('connect must be a function or an object')\n }\n\n if (!Number.isInteger(maxRedirections) || maxRedirections < 0) {\n throw new InvalidArgumentError('maxRedirections must be a positive number')\n }\n\n if (connect && typeof connect !== 'function') {\n connect = { ...connect }\n }\n\n this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent)\n ? options.interceptors.Agent\n : [createRedirectInterceptor({ maxRedirections })]\n\n this[kOptions] = { ...util.deepClone(options), connect }\n this[kOptions].interceptors = options.interceptors\n ? { ...options.interceptors }\n : undefined\n this[kMaxRedirections] = maxRedirections\n this[kFactory] = factory\n this[kClients] = new Map()\n this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => {\n const ref = this[kClients].get(key)\n if (ref !== undefined && ref.deref() === undefined) {\n this[kClients].delete(key)\n }\n })\n\n const agent = this\n\n this[kOnDrain] = (origin, targets) => {\n agent.emit('drain', origin, [agent, ...targets])\n }\n\n this[kOnConnect] = (origin, targets) => {\n agent.emit('connect', origin, [agent, ...targets])\n }\n\n this[kOnDisconnect] = (origin, targets, err) => {\n agent.emit('disconnect', origin, [agent, ...targets], err)\n }\n\n this[kOnConnectionError] = (origin, targets, err) => {\n agent.emit('connectionError', origin, [agent, ...targets], err)\n }\n }\n\n get [kRunning] () {\n let ret = 0\n for (const ref of this[kClients].values()) {\n const client = ref.deref()\n /* istanbul ignore next: gc is undeterministic */\n if (client) {\n ret += client[kRunning]\n }\n }\n return ret\n }\n\n [kDispatch] (opts, handler) {\n let key\n if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {\n key = String(opts.origin)\n } else {\n throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')\n }\n\n const ref = this[kClients].get(key)\n\n let dispatcher = ref ? ref.deref() : null\n if (!dispatcher) {\n dispatcher = this[kFactory](opts.origin, this[kOptions])\n .on('drain', this[kOnDrain])\n .on('connect', this[kOnConnect])\n .on('disconnect', this[kOnDisconnect])\n .on('connectionError', this[kOnConnectionError])\n\n this[kClients].set(key, new WeakRef(dispatcher))\n this[kFinalizer].register(dispatcher, key)\n }\n\n return dispatcher.dispatch(opts, handler)\n }\n\n async [kClose] () {\n const closePromises = []\n for (const ref of this[kClients].values()) {\n const client = ref.deref()\n /* istanbul ignore else: gc is undeterministic */\n if (client) {\n closePromises.push(client.close())\n }\n }\n\n await Promise.all(closePromises)\n }\n\n async [kDestroy] (err) {\n const destroyPromises = []\n for (const ref of this[kClients].values()) {\n const client = ref.deref()\n /* istanbul ignore else: gc is undeterministic */\n if (client) {\n destroyPromises.push(client.destroy(err))\n }\n }\n\n await Promise.all(destroyPromises)\n }\n}\n\nmodule.exports = Agent\n","const { addAbortListener } = require('../core/util')\nconst { RequestAbortedError } = require('../core/errors')\n\nconst kListener = Symbol('kListener')\nconst kSignal = Symbol('kSignal')\n\nfunction abort (self) {\n if (self.abort) {\n self.abort()\n } else {\n self.onError(new RequestAbortedError())\n }\n}\n\nfunction addSignal (self, signal) {\n self[kSignal] = null\n self[kListener] = null\n\n if (!signal) {\n return\n }\n\n if (signal.aborted) {\n abort(self)\n return\n }\n\n self[kSignal] = signal\n self[kListener] = () => {\n abort(self)\n }\n\n addAbortListener(self[kSignal], self[kListener])\n}\n\nfunction removeSignal (self) {\n if (!self[kSignal]) {\n return\n }\n\n if ('removeEventListener' in self[kSignal]) {\n self[kSignal].removeEventListener('abort', self[kListener])\n } else {\n self[kSignal].removeListener('abort', self[kListener])\n }\n\n self[kSignal] = null\n self[kListener] = null\n}\n\nmodule.exports = {\n addSignal,\n removeSignal\n}\n","'use strict'\n\nconst { AsyncResource } = require('async_hooks')\nconst { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')\nconst util = require('../core/util')\nconst { addSignal, removeSignal } = require('./abort-signal')\n\nclass ConnectHandler extends AsyncResource {\n constructor (opts, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n const { signal, opaque, responseHeaders } = opts\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n super('UNDICI_CONNECT')\n\n this.opaque = opaque || null\n this.responseHeaders = responseHeaders || null\n this.callback = callback\n this.abort = null\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n if (!this.callback) {\n throw new RequestAbortedError()\n }\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders () {\n throw new SocketError('bad connect', null)\n }\n\n onUpgrade (statusCode, rawHeaders, socket) {\n const { callback, opaque, context } = this\n\n removeSignal(this)\n\n this.callback = null\n\n let headers = rawHeaders\n // Indicates is an HTTP2Session\n if (headers != null) {\n headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n }\n\n this.runInAsyncScope(callback, null, null, {\n statusCode,\n headers,\n socket,\n opaque,\n context\n })\n }\n\n onError (err) {\n const { callback, opaque } = this\n\n removeSignal(this)\n\n if (callback) {\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n }\n}\n\nfunction connect (opts, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n connect.call(this, opts, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n const connectHandler = new ConnectHandler(opts, callback)\n this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler)\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts && opts.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = connect\n","'use strict'\n\nconst {\n Readable,\n Duplex,\n PassThrough\n} = require('stream')\nconst {\n InvalidArgumentError,\n InvalidReturnValueError,\n RequestAbortedError\n} = require('../core/errors')\nconst util = require('../core/util')\nconst { AsyncResource } = require('async_hooks')\nconst { addSignal, removeSignal } = require('./abort-signal')\nconst assert = require('assert')\n\nconst kResume = Symbol('resume')\n\nclass PipelineRequest extends Readable {\n constructor () {\n super({ autoDestroy: true })\n\n this[kResume] = null\n }\n\n _read () {\n const { [kResume]: resume } = this\n\n if (resume) {\n this[kResume] = null\n resume()\n }\n }\n\n _destroy (err, callback) {\n this._read()\n\n callback(err)\n }\n}\n\nclass PipelineResponse extends Readable {\n constructor (resume) {\n super({ autoDestroy: true })\n this[kResume] = resume\n }\n\n _read () {\n this[kResume]()\n }\n\n _destroy (err, callback) {\n if (!err && !this._readableState.endEmitted) {\n err = new RequestAbortedError()\n }\n\n callback(err)\n }\n}\n\nclass PipelineHandler extends AsyncResource {\n constructor (opts, handler) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (typeof handler !== 'function') {\n throw new InvalidArgumentError('invalid handler')\n }\n\n const { signal, method, opaque, onInfo, responseHeaders } = opts\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n if (method === 'CONNECT') {\n throw new InvalidArgumentError('invalid method')\n }\n\n if (onInfo && typeof onInfo !== 'function') {\n throw new InvalidArgumentError('invalid onInfo callback')\n }\n\n super('UNDICI_PIPELINE')\n\n this.opaque = opaque || null\n this.responseHeaders = responseHeaders || null\n this.handler = handler\n this.abort = null\n this.context = null\n this.onInfo = onInfo || null\n\n this.req = new PipelineRequest().on('error', util.nop)\n\n this.ret = new Duplex({\n readableObjectMode: opts.objectMode,\n autoDestroy: true,\n read: () => {\n const { body } = this\n\n if (body && body.resume) {\n body.resume()\n }\n },\n write: (chunk, encoding, callback) => {\n const { req } = this\n\n if (req.push(chunk, encoding) || req._readableState.destroyed) {\n callback()\n } else {\n req[kResume] = callback\n }\n },\n destroy: (err, callback) => {\n const { body, req, res, ret, abort } = this\n\n if (!err && !ret._readableState.endEmitted) {\n err = new RequestAbortedError()\n }\n\n if (abort && err) {\n abort()\n }\n\n util.destroy(body, err)\n util.destroy(req, err)\n util.destroy(res, err)\n\n removeSignal(this)\n\n callback(err)\n }\n }).on('prefinish', () => {\n const { req } = this\n\n // Node < 15 does not call _final in same tick.\n req.push(null)\n })\n\n this.res = null\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n const { ret, res } = this\n\n assert(!res, 'pipeline cannot be retried')\n\n if (ret.destroyed) {\n throw new RequestAbortedError()\n }\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders (statusCode, rawHeaders, resume) {\n const { opaque, handler, context } = this\n\n if (statusCode < 200) {\n if (this.onInfo) {\n const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n this.onInfo({ statusCode, headers })\n }\n return\n }\n\n this.res = new PipelineResponse(resume)\n\n let body\n try {\n this.handler = null\n const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n body = this.runInAsyncScope(handler, null, {\n statusCode,\n headers,\n opaque,\n body: this.res,\n context\n })\n } catch (err) {\n this.res.on('error', util.nop)\n throw err\n }\n\n if (!body || typeof body.on !== 'function') {\n throw new InvalidReturnValueError('expected Readable')\n }\n\n body\n .on('data', (chunk) => {\n const { ret, body } = this\n\n if (!ret.push(chunk) && body.pause) {\n body.pause()\n }\n })\n .on('error', (err) => {\n const { ret } = this\n\n util.destroy(ret, err)\n })\n .on('end', () => {\n const { ret } = this\n\n ret.push(null)\n })\n .on('close', () => {\n const { ret } = this\n\n if (!ret._readableState.ended) {\n util.destroy(ret, new RequestAbortedError())\n }\n })\n\n this.body = body\n }\n\n onData (chunk) {\n const { res } = this\n return res.push(chunk)\n }\n\n onComplete (trailers) {\n const { res } = this\n res.push(null)\n }\n\n onError (err) {\n const { ret } = this\n this.handler = null\n util.destroy(ret, err)\n }\n}\n\nfunction pipeline (opts, handler) {\n try {\n const pipelineHandler = new PipelineHandler(opts, handler)\n this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler)\n return pipelineHandler.ret\n } catch (err) {\n return new PassThrough().destroy(err)\n }\n}\n\nmodule.exports = pipeline\n","'use strict'\n\nconst Readable = require('./readable')\nconst {\n InvalidArgumentError,\n RequestAbortedError\n} = require('../core/errors')\nconst util = require('../core/util')\nconst { getResolveErrorBodyCallback } = require('./util')\nconst { AsyncResource } = require('async_hooks')\nconst { addSignal, removeSignal } = require('./abort-signal')\n\nclass RequestHandler extends AsyncResource {\n constructor (opts, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts\n\n try {\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {\n throw new InvalidArgumentError('invalid highWaterMark')\n }\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n if (method === 'CONNECT') {\n throw new InvalidArgumentError('invalid method')\n }\n\n if (onInfo && typeof onInfo !== 'function') {\n throw new InvalidArgumentError('invalid onInfo callback')\n }\n\n super('UNDICI_REQUEST')\n } catch (err) {\n if (util.isStream(body)) {\n util.destroy(body.on('error', util.nop), err)\n }\n throw err\n }\n\n this.responseHeaders = responseHeaders || null\n this.opaque = opaque || null\n this.callback = callback\n this.res = null\n this.abort = null\n this.body = body\n this.trailers = {}\n this.context = null\n this.onInfo = onInfo || null\n this.throwOnError = throwOnError\n this.highWaterMark = highWaterMark\n\n if (util.isStream(body)) {\n body.on('error', (err) => {\n this.onError(err)\n })\n }\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n if (!this.callback) {\n throw new RequestAbortedError()\n }\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders (statusCode, rawHeaders, resume, statusMessage) {\n const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this\n\n const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n\n if (statusCode < 200) {\n if (this.onInfo) {\n this.onInfo({ statusCode, headers })\n }\n return\n }\n\n const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers\n const contentType = parsedHeaders['content-type']\n const body = new Readable({ resume, abort, contentType, highWaterMark })\n\n this.callback = null\n this.res = body\n if (callback !== null) {\n if (this.throwOnError && statusCode >= 400) {\n this.runInAsyncScope(getResolveErrorBodyCallback, null,\n { callback, body, contentType, statusCode, statusMessage, headers }\n )\n } else {\n this.runInAsyncScope(callback, null, null, {\n statusCode,\n headers,\n trailers: this.trailers,\n opaque,\n body,\n context\n })\n }\n }\n }\n\n onData (chunk) {\n const { res } = this\n return res.push(chunk)\n }\n\n onComplete (trailers) {\n const { res } = this\n\n removeSignal(this)\n\n util.parseHeaders(trailers, this.trailers)\n\n res.push(null)\n }\n\n onError (err) {\n const { res, callback, body, opaque } = this\n\n removeSignal(this)\n\n if (callback) {\n // TODO: Does this need queueMicrotask?\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n\n if (res) {\n this.res = null\n // Ensure all queued handlers are invoked before destroying res.\n queueMicrotask(() => {\n util.destroy(res, err)\n })\n }\n\n if (body) {\n this.body = null\n util.destroy(body, err)\n }\n }\n}\n\nfunction request (opts, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n request.call(this, opts, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n this.dispatch(opts, new RequestHandler(opts, callback))\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts && opts.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = request\nmodule.exports.RequestHandler = RequestHandler\n","'use strict'\n\nconst { finished, PassThrough } = require('stream')\nconst {\n InvalidArgumentError,\n InvalidReturnValueError,\n RequestAbortedError\n} = require('../core/errors')\nconst util = require('../core/util')\nconst { getResolveErrorBodyCallback } = require('./util')\nconst { AsyncResource } = require('async_hooks')\nconst { addSignal, removeSignal } = require('./abort-signal')\n\nclass StreamHandler extends AsyncResource {\n constructor (opts, factory, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts\n\n try {\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n if (typeof factory !== 'function') {\n throw new InvalidArgumentError('invalid factory')\n }\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n if (method === 'CONNECT') {\n throw new InvalidArgumentError('invalid method')\n }\n\n if (onInfo && typeof onInfo !== 'function') {\n throw new InvalidArgumentError('invalid onInfo callback')\n }\n\n super('UNDICI_STREAM')\n } catch (err) {\n if (util.isStream(body)) {\n util.destroy(body.on('error', util.nop), err)\n }\n throw err\n }\n\n this.responseHeaders = responseHeaders || null\n this.opaque = opaque || null\n this.factory = factory\n this.callback = callback\n this.res = null\n this.abort = null\n this.context = null\n this.trailers = null\n this.body = body\n this.onInfo = onInfo || null\n this.throwOnError = throwOnError || false\n\n if (util.isStream(body)) {\n body.on('error', (err) => {\n this.onError(err)\n })\n }\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n if (!this.callback) {\n throw new RequestAbortedError()\n }\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders (statusCode, rawHeaders, resume, statusMessage) {\n const { factory, opaque, context, callback, responseHeaders } = this\n\n const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n\n if (statusCode < 200) {\n if (this.onInfo) {\n this.onInfo({ statusCode, headers })\n }\n return\n }\n\n this.factory = null\n\n let res\n\n if (this.throwOnError && statusCode >= 400) {\n const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers\n const contentType = parsedHeaders['content-type']\n res = new PassThrough()\n\n this.callback = null\n this.runInAsyncScope(getResolveErrorBodyCallback, null,\n { callback, body: res, contentType, statusCode, statusMessage, headers }\n )\n } else {\n if (factory === null) {\n return\n }\n\n res = this.runInAsyncScope(factory, null, {\n statusCode,\n headers,\n opaque,\n context\n })\n\n if (\n !res ||\n typeof res.write !== 'function' ||\n typeof res.end !== 'function' ||\n typeof res.on !== 'function'\n ) {\n throw new InvalidReturnValueError('expected Writable')\n }\n\n // TODO: Avoid finished. It registers an unnecessary amount of listeners.\n finished(res, { readable: false }, (err) => {\n const { callback, res, opaque, trailers, abort } = this\n\n this.res = null\n if (err || !res.readable) {\n util.destroy(res, err)\n }\n\n this.callback = null\n this.runInAsyncScope(callback, null, err || null, { opaque, trailers })\n\n if (err) {\n abort()\n }\n })\n }\n\n res.on('drain', resume)\n\n this.res = res\n\n const needDrain = res.writableNeedDrain !== undefined\n ? res.writableNeedDrain\n : res._writableState && res._writableState.needDrain\n\n return needDrain !== true\n }\n\n onData (chunk) {\n const { res } = this\n\n return res ? res.write(chunk) : true\n }\n\n onComplete (trailers) {\n const { res } = this\n\n removeSignal(this)\n\n if (!res) {\n return\n }\n\n this.trailers = util.parseHeaders(trailers)\n\n res.end()\n }\n\n onError (err) {\n const { res, callback, opaque, body } = this\n\n removeSignal(this)\n\n this.factory = null\n\n if (res) {\n this.res = null\n util.destroy(res, err)\n } else if (callback) {\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n\n if (body) {\n this.body = null\n util.destroy(body, err)\n }\n }\n}\n\nfunction stream (opts, factory, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n stream.call(this, opts, factory, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n this.dispatch(opts, new StreamHandler(opts, factory, callback))\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts && opts.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = stream\n","'use strict'\n\nconst { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')\nconst { AsyncResource } = require('async_hooks')\nconst util = require('../core/util')\nconst { addSignal, removeSignal } = require('./abort-signal')\nconst assert = require('assert')\n\nclass UpgradeHandler extends AsyncResource {\n constructor (opts, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n const { signal, opaque, responseHeaders } = opts\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n super('UNDICI_UPGRADE')\n\n this.responseHeaders = responseHeaders || null\n this.opaque = opaque || null\n this.callback = callback\n this.abort = null\n this.context = null\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n if (!this.callback) {\n throw new RequestAbortedError()\n }\n\n this.abort = abort\n this.context = null\n }\n\n onHeaders () {\n throw new SocketError('bad upgrade', null)\n }\n\n onUpgrade (statusCode, rawHeaders, socket) {\n const { callback, opaque, context } = this\n\n assert.strictEqual(statusCode, 101)\n\n removeSignal(this)\n\n this.callback = null\n const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n this.runInAsyncScope(callback, null, null, {\n headers,\n socket,\n opaque,\n context\n })\n }\n\n onError (err) {\n const { callback, opaque } = this\n\n removeSignal(this)\n\n if (callback) {\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n }\n}\n\nfunction upgrade (opts, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n upgrade.call(this, opts, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n const upgradeHandler = new UpgradeHandler(opts, callback)\n this.dispatch({\n ...opts,\n method: opts.method || 'GET',\n upgrade: opts.protocol || 'Websocket'\n }, upgradeHandler)\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts && opts.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = upgrade\n","'use strict'\n\nmodule.exports.request = require('./api-request')\nmodule.exports.stream = require('./api-stream')\nmodule.exports.pipeline = require('./api-pipeline')\nmodule.exports.upgrade = require('./api-upgrade')\nmodule.exports.connect = require('./api-connect')\n","// Ported from https://github.com/nodejs/undici/pull/907\n\n'use strict'\n\nconst assert = require('assert')\nconst { Readable } = require('stream')\nconst { RequestAbortedError, NotSupportedError, InvalidArgumentError } = require('../core/errors')\nconst util = require('../core/util')\nconst { ReadableStreamFrom, toUSVString } = require('../core/util')\n\nlet Blob\n\nconst kConsume = Symbol('kConsume')\nconst kReading = Symbol('kReading')\nconst kBody = Symbol('kBody')\nconst kAbort = Symbol('abort')\nconst kContentType = Symbol('kContentType')\n\nconst noop = () => {}\n\nmodule.exports = class BodyReadable extends Readable {\n constructor ({\n resume,\n abort,\n contentType = '',\n highWaterMark = 64 * 1024 // Same as nodejs fs streams.\n }) {\n super({\n autoDestroy: true,\n read: resume,\n highWaterMark\n })\n\n this._readableState.dataEmitted = false\n\n this[kAbort] = abort\n this[kConsume] = null\n this[kBody] = null\n this[kContentType] = contentType\n\n // Is stream being consumed through Readable API?\n // This is an optimization so that we avoid checking\n // for 'data' and 'readable' listeners in the hot path\n // inside push().\n this[kReading] = false\n }\n\n destroy (err) {\n if (this.destroyed) {\n // Node < 16\n return this\n }\n\n if (!err && !this._readableState.endEmitted) {\n err = new RequestAbortedError()\n }\n\n if (err) {\n this[kAbort]()\n }\n\n return super.destroy(err)\n }\n\n emit (ev, ...args) {\n if (ev === 'data') {\n // Node < 16.7\n this._readableState.dataEmitted = true\n } else if (ev === 'error') {\n // Node < 16\n this._readableState.errorEmitted = true\n }\n return super.emit(ev, ...args)\n }\n\n on (ev, ...args) {\n if (ev === 'data' || ev === 'readable') {\n this[kReading] = true\n }\n return super.on(ev, ...args)\n }\n\n addListener (ev, ...args) {\n return this.on(ev, ...args)\n }\n\n off (ev, ...args) {\n const ret = super.off(ev, ...args)\n if (ev === 'data' || ev === 'readable') {\n this[kReading] = (\n this.listenerCount('data') > 0 ||\n this.listenerCount('readable') > 0\n )\n }\n return ret\n }\n\n removeListener (ev, ...args) {\n return this.off(ev, ...args)\n }\n\n push (chunk) {\n if (this[kConsume] && chunk !== null && this.readableLength === 0) {\n consumePush(this[kConsume], chunk)\n return this[kReading] ? super.push(chunk) : true\n }\n return super.push(chunk)\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-text\n async text () {\n return consume(this, 'text')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-json\n async json () {\n return consume(this, 'json')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-blob\n async blob () {\n return consume(this, 'blob')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-arraybuffer\n async arrayBuffer () {\n return consume(this, 'arrayBuffer')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-formdata\n async formData () {\n // TODO: Implement.\n throw new NotSupportedError()\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-bodyused\n get bodyUsed () {\n return util.isDisturbed(this)\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-body\n get body () {\n if (!this[kBody]) {\n this[kBody] = ReadableStreamFrom(this)\n if (this[kConsume]) {\n // TODO: Is this the best way to force a lock?\n this[kBody].getReader() // Ensure stream is locked.\n assert(this[kBody].locked)\n }\n }\n return this[kBody]\n }\n\n dump (opts) {\n let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144\n const signal = opts && opts.signal\n\n if (signal) {\n try {\n if (typeof signal !== 'object' || !('aborted' in signal)) {\n throw new InvalidArgumentError('signal must be an AbortSignal')\n }\n util.throwIfAborted(signal)\n } catch (err) {\n return Promise.reject(err)\n }\n }\n\n if (this.closed) {\n return Promise.resolve(null)\n }\n\n return new Promise((resolve, reject) => {\n const signalListenerCleanup = signal\n ? util.addAbortListener(signal, () => {\n this.destroy()\n })\n : noop\n\n this\n .on('close', function () {\n signalListenerCleanup()\n if (signal && signal.aborted) {\n reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' }))\n } else {\n resolve(null)\n }\n })\n .on('error', noop)\n .on('data', function (chunk) {\n limit -= chunk.length\n if (limit <= 0) {\n this.destroy()\n }\n })\n .resume()\n })\n }\n}\n\n// https://streams.spec.whatwg.org/#readablestream-locked\nfunction isLocked (self) {\n // Consume is an implicit lock.\n return (self[kBody] && self[kBody].locked === true) || self[kConsume]\n}\n\n// https://fetch.spec.whatwg.org/#body-unusable\nfunction isUnusable (self) {\n return util.isDisturbed(self) || isLocked(self)\n}\n\nasync function consume (stream, type) {\n if (isUnusable(stream)) {\n throw new TypeError('unusable')\n }\n\n assert(!stream[kConsume])\n\n return new Promise((resolve, reject) => {\n stream[kConsume] = {\n type,\n stream,\n resolve,\n reject,\n length: 0,\n body: []\n }\n\n stream\n .on('error', function (err) {\n consumeFinish(this[kConsume], err)\n })\n .on('close', function () {\n if (this[kConsume].body !== null) {\n consumeFinish(this[kConsume], new RequestAbortedError())\n }\n })\n\n process.nextTick(consumeStart, stream[kConsume])\n })\n}\n\nfunction consumeStart (consume) {\n if (consume.body === null) {\n return\n }\n\n const { _readableState: state } = consume.stream\n\n for (const chunk of state.buffer) {\n consumePush(consume, chunk)\n }\n\n if (state.endEmitted) {\n consumeEnd(this[kConsume])\n } else {\n consume.stream.on('end', function () {\n consumeEnd(this[kConsume])\n })\n }\n\n consume.stream.resume()\n\n while (consume.stream.read() != null) {\n // Loop\n }\n}\n\nfunction consumeEnd (consume) {\n const { type, body, resolve, stream, length } = consume\n\n try {\n if (type === 'text') {\n resolve(toUSVString(Buffer.concat(body)))\n } else if (type === 'json') {\n resolve(JSON.parse(Buffer.concat(body)))\n } else if (type === 'arrayBuffer') {\n const dst = new Uint8Array(length)\n\n let pos = 0\n for (const buf of body) {\n dst.set(buf, pos)\n pos += buf.byteLength\n }\n\n resolve(dst.buffer)\n } else if (type === 'blob') {\n if (!Blob) {\n Blob = require('buffer').Blob\n }\n resolve(new Blob(body, { type: stream[kContentType] }))\n }\n\n consumeFinish(consume)\n } catch (err) {\n stream.destroy(err)\n }\n}\n\nfunction consumePush (consume, chunk) {\n consume.length += chunk.length\n consume.body.push(chunk)\n}\n\nfunction consumeFinish (consume, err) {\n if (consume.body === null) {\n return\n }\n\n if (err) {\n consume.reject(err)\n } else {\n consume.resolve()\n }\n\n consume.type = null\n consume.stream = null\n consume.resolve = null\n consume.reject = null\n consume.length = 0\n consume.body = null\n}\n","const assert = require('assert')\nconst {\n ResponseStatusCodeError\n} = require('../core/errors')\nconst { toUSVString } = require('../core/util')\n\nasync function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {\n assert(body)\n\n let chunks = []\n let limit = 0\n\n for await (const chunk of body) {\n chunks.push(chunk)\n limit += chunk.length\n if (limit > 128 * 1024) {\n chunks = null\n break\n }\n }\n\n if (statusCode === 204 || !contentType || !chunks) {\n process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))\n return\n }\n\n try {\n if (contentType.startsWith('application/json')) {\n const payload = JSON.parse(toUSVString(Buffer.concat(chunks)))\n process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))\n return\n }\n\n if (contentType.startsWith('text/')) {\n const payload = toUSVString(Buffer.concat(chunks))\n process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))\n return\n }\n } catch (err) {\n // Process in a fallback if error\n }\n\n process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))\n}\n\nmodule.exports = { getResolveErrorBodyCallback }\n","'use strict'\n\nconst {\n BalancedPoolMissingUpstreamError,\n InvalidArgumentError\n} = require('./core/errors')\nconst {\n PoolBase,\n kClients,\n kNeedDrain,\n kAddClient,\n kRemoveClient,\n kGetDispatcher\n} = require('./pool-base')\nconst Pool = require('./pool')\nconst { kUrl, kInterceptors } = require('./core/symbols')\nconst { parseOrigin } = require('./core/util')\nconst kFactory = Symbol('factory')\n\nconst kOptions = Symbol('options')\nconst kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor')\nconst kCurrentWeight = Symbol('kCurrentWeight')\nconst kIndex = Symbol('kIndex')\nconst kWeight = Symbol('kWeight')\nconst kMaxWeightPerServer = Symbol('kMaxWeightPerServer')\nconst kErrorPenalty = Symbol('kErrorPenalty')\n\nfunction getGreatestCommonDivisor (a, b) {\n if (b === 0) return a\n return getGreatestCommonDivisor(b, a % b)\n}\n\nfunction defaultFactory (origin, opts) {\n return new Pool(origin, opts)\n}\n\nclass BalancedPool extends PoolBase {\n constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) {\n super()\n\n this[kOptions] = opts\n this[kIndex] = -1\n this[kCurrentWeight] = 0\n\n this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100\n this[kErrorPenalty] = this[kOptions].errorPenalty || 15\n\n if (!Array.isArray(upstreams)) {\n upstreams = [upstreams]\n }\n\n if (typeof factory !== 'function') {\n throw new InvalidArgumentError('factory must be a function.')\n }\n\n this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool)\n ? opts.interceptors.BalancedPool\n : []\n this[kFactory] = factory\n\n for (const upstream of upstreams) {\n this.addUpstream(upstream)\n }\n this._updateBalancedPoolStats()\n }\n\n addUpstream (upstream) {\n const upstreamOrigin = parseOrigin(upstream).origin\n\n if (this[kClients].find((pool) => (\n pool[kUrl].origin === upstreamOrigin &&\n pool.closed !== true &&\n pool.destroyed !== true\n ))) {\n return this\n }\n const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions]))\n\n this[kAddClient](pool)\n pool.on('connect', () => {\n pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty])\n })\n\n pool.on('connectionError', () => {\n pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])\n this._updateBalancedPoolStats()\n })\n\n pool.on('disconnect', (...args) => {\n const err = args[2]\n if (err && err.code === 'UND_ERR_SOCKET') {\n // decrease the weight of the pool.\n pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])\n this._updateBalancedPoolStats()\n }\n })\n\n for (const client of this[kClients]) {\n client[kWeight] = this[kMaxWeightPerServer]\n }\n\n this._updateBalancedPoolStats()\n\n return this\n }\n\n _updateBalancedPoolStats () {\n this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0)\n }\n\n removeUpstream (upstream) {\n const upstreamOrigin = parseOrigin(upstream).origin\n\n const pool = this[kClients].find((pool) => (\n pool[kUrl].origin === upstreamOrigin &&\n pool.closed !== true &&\n pool.destroyed !== true\n ))\n\n if (pool) {\n this[kRemoveClient](pool)\n }\n\n return this\n }\n\n get upstreams () {\n return this[kClients]\n .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true)\n .map((p) => p[kUrl].origin)\n }\n\n [kGetDispatcher] () {\n // We validate that pools is greater than 0,\n // otherwise we would have to wait until an upstream\n // is added, which might never happen.\n if (this[kClients].length === 0) {\n throw new BalancedPoolMissingUpstreamError()\n }\n\n const dispatcher = this[kClients].find(dispatcher => (\n !dispatcher[kNeedDrain] &&\n dispatcher.closed !== true &&\n dispatcher.destroyed !== true\n ))\n\n if (!dispatcher) {\n return\n }\n\n const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true)\n\n if (allClientsBusy) {\n return\n }\n\n let counter = 0\n\n let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain])\n\n while (counter++ < this[kClients].length) {\n this[kIndex] = (this[kIndex] + 1) % this[kClients].length\n const pool = this[kClients][this[kIndex]]\n\n // find pool index with the largest weight\n if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) {\n maxWeightIndex = this[kIndex]\n }\n\n // decrease the current weight every `this[kClients].length`.\n if (this[kIndex] === 0) {\n // Set the current weight to the next lower weight.\n this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor]\n\n if (this[kCurrentWeight] <= 0) {\n this[kCurrentWeight] = this[kMaxWeightPerServer]\n }\n }\n if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) {\n return pool\n }\n }\n\n this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight]\n this[kIndex] = maxWeightIndex\n return this[kClients][maxWeightIndex]\n }\n}\n\nmodule.exports = BalancedPool\n","'use strict'\n\nconst { kConstruct } = require('./symbols')\nconst { urlEquals, fieldValues: getFieldValues } = require('./util')\nconst { kEnumerableProperty, isDisturbed } = require('../core/util')\nconst { kHeadersList } = require('../core/symbols')\nconst { webidl } = require('../fetch/webidl')\nconst { Response, cloneResponse } = require('../fetch/response')\nconst { Request } = require('../fetch/request')\nconst { kState, kHeaders, kGuard, kRealm } = require('../fetch/symbols')\nconst { fetching } = require('../fetch/index')\nconst { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = require('../fetch/util')\nconst assert = require('assert')\nconst { getGlobalDispatcher } = require('../global')\n\n/**\n * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation\n * @typedef {Object} CacheBatchOperation\n * @property {'delete' | 'put'} type\n * @property {any} request\n * @property {any} response\n * @property {import('../../types/cache').CacheQueryOptions} options\n */\n\n/**\n * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list\n * @typedef {[any, any][]} requestResponseList\n */\n\nclass Cache {\n /**\n * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list\n * @type {requestResponseList}\n */\n #relevantRequestResponseList\n\n constructor () {\n if (arguments[0] !== kConstruct) {\n webidl.illegalConstructor()\n }\n\n this.#relevantRequestResponseList = arguments[1]\n }\n\n async match (request, options = {}) {\n webidl.brandCheck(this, Cache)\n webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' })\n\n request = webidl.converters.RequestInfo(request)\n options = webidl.converters.CacheQueryOptions(options)\n\n const p = await this.matchAll(request, options)\n\n if (p.length === 0) {\n return\n }\n\n return p[0]\n }\n\n async matchAll (request = undefined, options = {}) {\n webidl.brandCheck(this, Cache)\n\n if (request !== undefined) request = webidl.converters.RequestInfo(request)\n options = webidl.converters.CacheQueryOptions(options)\n\n // 1.\n let r = null\n\n // 2.\n if (request !== undefined) {\n if (request instanceof Request) {\n // 2.1.1\n r = request[kState]\n\n // 2.1.2\n if (r.method !== 'GET' && !options.ignoreMethod) {\n return []\n }\n } else if (typeof request === 'string') {\n // 2.2.1\n r = new Request(request)[kState]\n }\n }\n\n // 5.\n // 5.1\n const responses = []\n\n // 5.2\n if (request === undefined) {\n // 5.2.1\n for (const requestResponse of this.#relevantRequestResponseList) {\n responses.push(requestResponse[1])\n }\n } else { // 5.3\n // 5.3.1\n const requestResponses = this.#queryCache(r, options)\n\n // 5.3.2\n for (const requestResponse of requestResponses) {\n responses.push(requestResponse[1])\n }\n }\n\n // 5.4\n // We don't implement CORs so we don't need to loop over the responses, yay!\n\n // 5.5.1\n const responseList = []\n\n // 5.5.2\n for (const response of responses) {\n // 5.5.2.1\n const responseObject = new Response(response.body?.source ?? null)\n const body = responseObject[kState].body\n responseObject[kState] = response\n responseObject[kState].body = body\n responseObject[kHeaders][kHeadersList] = response.headersList\n responseObject[kHeaders][kGuard] = 'immutable'\n\n responseList.push(responseObject)\n }\n\n // 6.\n return Object.freeze(responseList)\n }\n\n async add (request) {\n webidl.brandCheck(this, Cache)\n webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' })\n\n request = webidl.converters.RequestInfo(request)\n\n // 1.\n const requests = [request]\n\n // 2.\n const responseArrayPromise = this.addAll(requests)\n\n // 3.\n return await responseArrayPromise\n }\n\n async addAll (requests) {\n webidl.brandCheck(this, Cache)\n webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' })\n\n requests = webidl.converters['sequence'](requests)\n\n // 1.\n const responsePromises = []\n\n // 2.\n const requestList = []\n\n // 3.\n for (const request of requests) {\n if (typeof request === 'string') {\n continue\n }\n\n // 3.1\n const r = request[kState]\n\n // 3.2\n if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') {\n throw webidl.errors.exception({\n header: 'Cache.addAll',\n message: 'Expected http/s scheme when method is not GET.'\n })\n }\n }\n\n // 4.\n /** @type {ReturnType[]} */\n const fetchControllers = []\n\n // 5.\n for (const request of requests) {\n // 5.1\n const r = new Request(request)[kState]\n\n // 5.2\n if (!urlIsHttpHttpsScheme(r.url)) {\n throw webidl.errors.exception({\n header: 'Cache.addAll',\n message: 'Expected http/s scheme.'\n })\n }\n\n // 5.4\n r.initiator = 'fetch'\n r.destination = 'subresource'\n\n // 5.5\n requestList.push(r)\n\n // 5.6\n const responsePromise = createDeferredPromise()\n\n // 5.7\n fetchControllers.push(fetching({\n request: r,\n dispatcher: getGlobalDispatcher(),\n processResponse (response) {\n // 1.\n if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {\n responsePromise.reject(webidl.errors.exception({\n header: 'Cache.addAll',\n message: 'Received an invalid status code or the request failed.'\n }))\n } else if (response.headersList.contains('vary')) { // 2.\n // 2.1\n const fieldValues = getFieldValues(response.headersList.get('vary'))\n\n // 2.2\n for (const fieldValue of fieldValues) {\n // 2.2.1\n if (fieldValue === '*') {\n responsePromise.reject(webidl.errors.exception({\n header: 'Cache.addAll',\n message: 'invalid vary field value'\n }))\n\n for (const controller of fetchControllers) {\n controller.abort()\n }\n\n return\n }\n }\n }\n },\n processResponseEndOfBody (response) {\n // 1.\n if (response.aborted) {\n responsePromise.reject(new DOMException('aborted', 'AbortError'))\n return\n }\n\n // 2.\n responsePromise.resolve(response)\n }\n }))\n\n // 5.8\n responsePromises.push(responsePromise.promise)\n }\n\n // 6.\n const p = Promise.all(responsePromises)\n\n // 7.\n const responses = await p\n\n // 7.1\n const operations = []\n\n // 7.2\n let index = 0\n\n // 7.3\n for (const response of responses) {\n // 7.3.1\n /** @type {CacheBatchOperation} */\n const operation = {\n type: 'put', // 7.3.2\n request: requestList[index], // 7.3.3\n response // 7.3.4\n }\n\n operations.push(operation) // 7.3.5\n\n index++ // 7.3.6\n }\n\n // 7.5\n const cacheJobPromise = createDeferredPromise()\n\n // 7.6.1\n let errorData = null\n\n // 7.6.2\n try {\n this.#batchCacheOperations(operations)\n } catch (e) {\n errorData = e\n }\n\n // 7.6.3\n queueMicrotask(() => {\n // 7.6.3.1\n if (errorData === null) {\n cacheJobPromise.resolve(undefined)\n } else {\n // 7.6.3.2\n cacheJobPromise.reject(errorData)\n }\n })\n\n // 7.7\n return cacheJobPromise.promise\n }\n\n async put (request, response) {\n webidl.brandCheck(this, Cache)\n webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' })\n\n request = webidl.converters.RequestInfo(request)\n response = webidl.converters.Response(response)\n\n // 1.\n let innerRequest = null\n\n // 2.\n if (request instanceof Request) {\n innerRequest = request[kState]\n } else { // 3.\n innerRequest = new Request(request)[kState]\n }\n\n // 4.\n if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') {\n throw webidl.errors.exception({\n header: 'Cache.put',\n message: 'Expected an http/s scheme when method is not GET'\n })\n }\n\n // 5.\n const innerResponse = response[kState]\n\n // 6.\n if (innerResponse.status === 206) {\n throw webidl.errors.exception({\n header: 'Cache.put',\n message: 'Got 206 status'\n })\n }\n\n // 7.\n if (innerResponse.headersList.contains('vary')) {\n // 7.1.\n const fieldValues = getFieldValues(innerResponse.headersList.get('vary'))\n\n // 7.2.\n for (const fieldValue of fieldValues) {\n // 7.2.1\n if (fieldValue === '*') {\n throw webidl.errors.exception({\n header: 'Cache.put',\n message: 'Got * vary field value'\n })\n }\n }\n }\n\n // 8.\n if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) {\n throw webidl.errors.exception({\n header: 'Cache.put',\n message: 'Response body is locked or disturbed'\n })\n }\n\n // 9.\n const clonedResponse = cloneResponse(innerResponse)\n\n // 10.\n const bodyReadPromise = createDeferredPromise()\n\n // 11.\n if (innerResponse.body != null) {\n // 11.1\n const stream = innerResponse.body.stream\n\n // 11.2\n const reader = stream.getReader()\n\n // 11.3\n readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject)\n } else {\n bodyReadPromise.resolve(undefined)\n }\n\n // 12.\n /** @type {CacheBatchOperation[]} */\n const operations = []\n\n // 13.\n /** @type {CacheBatchOperation} */\n const operation = {\n type: 'put', // 14.\n request: innerRequest, // 15.\n response: clonedResponse // 16.\n }\n\n // 17.\n operations.push(operation)\n\n // 19.\n const bytes = await bodyReadPromise.promise\n\n if (clonedResponse.body != null) {\n clonedResponse.body.source = bytes\n }\n\n // 19.1\n const cacheJobPromise = createDeferredPromise()\n\n // 19.2.1\n let errorData = null\n\n // 19.2.2\n try {\n this.#batchCacheOperations(operations)\n } catch (e) {\n errorData = e\n }\n\n // 19.2.3\n queueMicrotask(() => {\n // 19.2.3.1\n if (errorData === null) {\n cacheJobPromise.resolve()\n } else { // 19.2.3.2\n cacheJobPromise.reject(errorData)\n }\n })\n\n return cacheJobPromise.promise\n }\n\n async delete (request, options = {}) {\n webidl.brandCheck(this, Cache)\n webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' })\n\n request = webidl.converters.RequestInfo(request)\n options = webidl.converters.CacheQueryOptions(options)\n\n /**\n * @type {Request}\n */\n let r = null\n\n if (request instanceof Request) {\n r = request[kState]\n\n if (r.method !== 'GET' && !options.ignoreMethod) {\n return false\n }\n } else {\n assert(typeof request === 'string')\n\n r = new Request(request)[kState]\n }\n\n /** @type {CacheBatchOperation[]} */\n const operations = []\n\n /** @type {CacheBatchOperation} */\n const operation = {\n type: 'delete',\n request: r,\n options\n }\n\n operations.push(operation)\n\n const cacheJobPromise = createDeferredPromise()\n\n let errorData = null\n let requestResponses\n\n try {\n requestResponses = this.#batchCacheOperations(operations)\n } catch (e) {\n errorData = e\n }\n\n queueMicrotask(() => {\n if (errorData === null) {\n cacheJobPromise.resolve(!!requestResponses?.length)\n } else {\n cacheJobPromise.reject(errorData)\n }\n })\n\n return cacheJobPromise.promise\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys\n * @param {any} request\n * @param {import('../../types/cache').CacheQueryOptions} options\n * @returns {readonly Request[]}\n */\n async keys (request = undefined, options = {}) {\n webidl.brandCheck(this, Cache)\n\n if (request !== undefined) request = webidl.converters.RequestInfo(request)\n options = webidl.converters.CacheQueryOptions(options)\n\n // 1.\n let r = null\n\n // 2.\n if (request !== undefined) {\n // 2.1\n if (request instanceof Request) {\n // 2.1.1\n r = request[kState]\n\n // 2.1.2\n if (r.method !== 'GET' && !options.ignoreMethod) {\n return []\n }\n } else if (typeof request === 'string') { // 2.2\n r = new Request(request)[kState]\n }\n }\n\n // 4.\n const promise = createDeferredPromise()\n\n // 5.\n // 5.1\n const requests = []\n\n // 5.2\n if (request === undefined) {\n // 5.2.1\n for (const requestResponse of this.#relevantRequestResponseList) {\n // 5.2.1.1\n requests.push(requestResponse[0])\n }\n } else { // 5.3\n // 5.3.1\n const requestResponses = this.#queryCache(r, options)\n\n // 5.3.2\n for (const requestResponse of requestResponses) {\n // 5.3.2.1\n requests.push(requestResponse[0])\n }\n }\n\n // 5.4\n queueMicrotask(() => {\n // 5.4.1\n const requestList = []\n\n // 5.4.2\n for (const request of requests) {\n const requestObject = new Request('https://a')\n requestObject[kState] = request\n requestObject[kHeaders][kHeadersList] = request.headersList\n requestObject[kHeaders][kGuard] = 'immutable'\n requestObject[kRealm] = request.client\n\n // 5.4.2.1\n requestList.push(requestObject)\n }\n\n // 5.4.3\n promise.resolve(Object.freeze(requestList))\n })\n\n return promise.promise\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm\n * @param {CacheBatchOperation[]} operations\n * @returns {requestResponseList}\n */\n #batchCacheOperations (operations) {\n // 1.\n const cache = this.#relevantRequestResponseList\n\n // 2.\n const backupCache = [...cache]\n\n // 3.\n const addedItems = []\n\n // 4.1\n const resultList = []\n\n try {\n // 4.2\n for (const operation of operations) {\n // 4.2.1\n if (operation.type !== 'delete' && operation.type !== 'put') {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'operation type does not match \"delete\" or \"put\"'\n })\n }\n\n // 4.2.2\n if (operation.type === 'delete' && operation.response != null) {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'delete operation should not have an associated response'\n })\n }\n\n // 4.2.3\n if (this.#queryCache(operation.request, operation.options, addedItems).length) {\n throw new DOMException('???', 'InvalidStateError')\n }\n\n // 4.2.4\n let requestResponses\n\n // 4.2.5\n if (operation.type === 'delete') {\n // 4.2.5.1\n requestResponses = this.#queryCache(operation.request, operation.options)\n\n // TODO: the spec is wrong, this is needed to pass WPTs\n if (requestResponses.length === 0) {\n return []\n }\n\n // 4.2.5.2\n for (const requestResponse of requestResponses) {\n const idx = cache.indexOf(requestResponse)\n assert(idx !== -1)\n\n // 4.2.5.2.1\n cache.splice(idx, 1)\n }\n } else if (operation.type === 'put') { // 4.2.6\n // 4.2.6.1\n if (operation.response == null) {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'put operation should have an associated response'\n })\n }\n\n // 4.2.6.2\n const r = operation.request\n\n // 4.2.6.3\n if (!urlIsHttpHttpsScheme(r.url)) {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'expected http or https scheme'\n })\n }\n\n // 4.2.6.4\n if (r.method !== 'GET') {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'not get method'\n })\n }\n\n // 4.2.6.5\n if (operation.options != null) {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'options must not be defined'\n })\n }\n\n // 4.2.6.6\n requestResponses = this.#queryCache(operation.request)\n\n // 4.2.6.7\n for (const requestResponse of requestResponses) {\n const idx = cache.indexOf(requestResponse)\n assert(idx !== -1)\n\n // 4.2.6.7.1\n cache.splice(idx, 1)\n }\n\n // 4.2.6.8\n cache.push([operation.request, operation.response])\n\n // 4.2.6.10\n addedItems.push([operation.request, operation.response])\n }\n\n // 4.2.7\n resultList.push([operation.request, operation.response])\n }\n\n // 4.3\n return resultList\n } catch (e) { // 5.\n // 5.1\n this.#relevantRequestResponseList.length = 0\n\n // 5.2\n this.#relevantRequestResponseList = backupCache\n\n // 5.3\n throw e\n }\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#query-cache\n * @param {any} requestQuery\n * @param {import('../../types/cache').CacheQueryOptions} options\n * @param {requestResponseList} targetStorage\n * @returns {requestResponseList}\n */\n #queryCache (requestQuery, options, targetStorage) {\n /** @type {requestResponseList} */\n const resultList = []\n\n const storage = targetStorage ?? this.#relevantRequestResponseList\n\n for (const requestResponse of storage) {\n const [cachedRequest, cachedResponse] = requestResponse\n if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) {\n resultList.push(requestResponse)\n }\n }\n\n return resultList\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm\n * @param {any} requestQuery\n * @param {any} request\n * @param {any | null} response\n * @param {import('../../types/cache').CacheQueryOptions | undefined} options\n * @returns {boolean}\n */\n #requestMatchesCachedItem (requestQuery, request, response = null, options) {\n // if (options?.ignoreMethod === false && request.method === 'GET') {\n // return false\n // }\n\n const queryURL = new URL(requestQuery.url)\n\n const cachedURL = new URL(request.url)\n\n if (options?.ignoreSearch) {\n cachedURL.search = ''\n\n queryURL.search = ''\n }\n\n if (!urlEquals(queryURL, cachedURL, true)) {\n return false\n }\n\n if (\n response == null ||\n options?.ignoreVary ||\n !response.headersList.contains('vary')\n ) {\n return true\n }\n\n const fieldValues = getFieldValues(response.headersList.get('vary'))\n\n for (const fieldValue of fieldValues) {\n if (fieldValue === '*') {\n return false\n }\n\n const requestValue = request.headersList.get(fieldValue)\n const queryValue = requestQuery.headersList.get(fieldValue)\n\n // If one has the header and the other doesn't, or one has\n // a different value than the other, return false\n if (requestValue !== queryValue) {\n return false\n }\n }\n\n return true\n }\n}\n\nObject.defineProperties(Cache.prototype, {\n [Symbol.toStringTag]: {\n value: 'Cache',\n configurable: true\n },\n match: kEnumerableProperty,\n matchAll: kEnumerableProperty,\n add: kEnumerableProperty,\n addAll: kEnumerableProperty,\n put: kEnumerableProperty,\n delete: kEnumerableProperty,\n keys: kEnumerableProperty\n})\n\nconst cacheQueryOptionConverters = [\n {\n key: 'ignoreSearch',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'ignoreMethod',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'ignoreVary',\n converter: webidl.converters.boolean,\n defaultValue: false\n }\n]\n\nwebidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters)\n\nwebidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([\n ...cacheQueryOptionConverters,\n {\n key: 'cacheName',\n converter: webidl.converters.DOMString\n }\n])\n\nwebidl.converters.Response = webidl.interfaceConverter(Response)\n\nwebidl.converters['sequence'] = webidl.sequenceConverter(\n webidl.converters.RequestInfo\n)\n\nmodule.exports = {\n Cache\n}\n","'use strict'\n\nconst { kConstruct } = require('./symbols')\nconst { Cache } = require('./cache')\nconst { webidl } = require('../fetch/webidl')\nconst { kEnumerableProperty } = require('../core/util')\n\nclass CacheStorage {\n /**\n * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map\n * @type {Map}\n */\n async has (cacheName) {\n webidl.brandCheck(this, CacheStorage)\n webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' })\n\n cacheName = webidl.converters.DOMString(cacheName)\n\n // 2.1.1\n // 2.2\n return this.#caches.has(cacheName)\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open\n * @param {string} cacheName\n * @returns {Promise}\n */\n async open (cacheName) {\n webidl.brandCheck(this, CacheStorage)\n webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' })\n\n cacheName = webidl.converters.DOMString(cacheName)\n\n // 2.1\n if (this.#caches.has(cacheName)) {\n // await caches.open('v1') !== await caches.open('v1')\n\n // 2.1.1\n const cache = this.#caches.get(cacheName)\n\n // 2.1.1.1\n return new Cache(kConstruct, cache)\n }\n\n // 2.2\n const cache = []\n\n // 2.3\n this.#caches.set(cacheName, cache)\n\n // 2.4\n return new Cache(kConstruct, cache)\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete\n * @param {string} cacheName\n * @returns {Promise}\n */\n async delete (cacheName) {\n webidl.brandCheck(this, CacheStorage)\n webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' })\n\n cacheName = webidl.converters.DOMString(cacheName)\n\n return this.#caches.delete(cacheName)\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys\n * @returns {string[]}\n */\n async keys () {\n webidl.brandCheck(this, CacheStorage)\n\n // 2.1\n const keys = this.#caches.keys()\n\n // 2.2\n return [...keys]\n }\n}\n\nObject.defineProperties(CacheStorage.prototype, {\n [Symbol.toStringTag]: {\n value: 'CacheStorage',\n configurable: true\n },\n match: kEnumerableProperty,\n has: kEnumerableProperty,\n open: kEnumerableProperty,\n delete: kEnumerableProperty,\n keys: kEnumerableProperty\n})\n\nmodule.exports = {\n CacheStorage\n}\n","'use strict'\n\nmodule.exports = {\n kConstruct: require('../core/symbols').kConstruct\n}\n","'use strict'\n\nconst assert = require('assert')\nconst { URLSerializer } = require('../fetch/dataURL')\nconst { isValidHeaderName } = require('../fetch/util')\n\n/**\n * @see https://url.spec.whatwg.org/#concept-url-equals\n * @param {URL} A\n * @param {URL} B\n * @param {boolean | undefined} excludeFragment\n * @returns {boolean}\n */\nfunction urlEquals (A, B, excludeFragment = false) {\n const serializedA = URLSerializer(A, excludeFragment)\n\n const serializedB = URLSerializer(B, excludeFragment)\n\n return serializedA === serializedB\n}\n\n/**\n * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262\n * @param {string} header\n */\nfunction fieldValues (header) {\n assert(header !== null)\n\n const values = []\n\n for (let value of header.split(',')) {\n value = value.trim()\n\n if (!value.length) {\n continue\n } else if (!isValidHeaderName(value)) {\n continue\n }\n\n values.push(value)\n }\n\n return values\n}\n\nmodule.exports = {\n urlEquals,\n fieldValues\n}\n","// @ts-check\n\n'use strict'\n\n/* global WebAssembly */\n\nconst assert = require('assert')\nconst net = require('net')\nconst http = require('http')\nconst { pipeline } = require('stream')\nconst util = require('./core/util')\nconst timers = require('./timers')\nconst Request = require('./core/request')\nconst DispatcherBase = require('./dispatcher-base')\nconst {\n RequestContentLengthMismatchError,\n ResponseContentLengthMismatchError,\n InvalidArgumentError,\n RequestAbortedError,\n HeadersTimeoutError,\n HeadersOverflowError,\n SocketError,\n InformationalError,\n BodyTimeoutError,\n HTTPParserError,\n ResponseExceededMaxSizeError,\n ClientDestroyedError\n} = require('./core/errors')\nconst buildConnector = require('./core/connect')\nconst {\n kUrl,\n kReset,\n kServerName,\n kClient,\n kBusy,\n kParser,\n kConnect,\n kBlocking,\n kResuming,\n kRunning,\n kPending,\n kSize,\n kWriting,\n kQueue,\n kConnected,\n kConnecting,\n kNeedDrain,\n kNoRef,\n kKeepAliveDefaultTimeout,\n kHostHeader,\n kPendingIdx,\n kRunningIdx,\n kError,\n kPipelining,\n kSocket,\n kKeepAliveTimeoutValue,\n kMaxHeadersSize,\n kKeepAliveMaxTimeout,\n kKeepAliveTimeoutThreshold,\n kHeadersTimeout,\n kBodyTimeout,\n kStrictContentLength,\n kConnector,\n kMaxRedirections,\n kMaxRequests,\n kCounter,\n kClose,\n kDestroy,\n kDispatch,\n kInterceptors,\n kLocalAddress,\n kMaxResponseSize,\n kHTTPConnVersion,\n // HTTP2\n kHost,\n kHTTP2Session,\n kHTTP2SessionState,\n kHTTP2BuildRequest,\n kHTTP2CopyHeaders,\n kHTTP1BuildRequest\n} = require('./core/symbols')\n\n/** @type {import('http2')} */\nlet http2\ntry {\n http2 = require('http2')\n} catch {\n // @ts-ignore\n http2 = { constants: {} }\n}\n\nconst {\n constants: {\n HTTP2_HEADER_AUTHORITY,\n HTTP2_HEADER_METHOD,\n HTTP2_HEADER_PATH,\n HTTP2_HEADER_SCHEME,\n HTTP2_HEADER_CONTENT_LENGTH,\n HTTP2_HEADER_EXPECT,\n HTTP2_HEADER_STATUS\n }\n} = http2\n\n// Experimental\nlet h2ExperimentalWarned = false\n\nconst FastBuffer = Buffer[Symbol.species]\n\nconst kClosedResolve = Symbol('kClosedResolve')\n\nconst channels = {}\n\ntry {\n const diagnosticsChannel = require('diagnostics_channel')\n channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders')\n channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect')\n channels.connectError = diagnosticsChannel.channel('undici:client:connectError')\n channels.connected = diagnosticsChannel.channel('undici:client:connected')\n} catch {\n channels.sendHeaders = { hasSubscribers: false }\n channels.beforeConnect = { hasSubscribers: false }\n channels.connectError = { hasSubscribers: false }\n channels.connected = { hasSubscribers: false }\n}\n\n/**\n * @type {import('../types/client').default}\n */\nclass Client extends DispatcherBase {\n /**\n *\n * @param {string|URL} url\n * @param {import('../types/client').Client.Options} options\n */\n constructor (url, {\n interceptors,\n maxHeaderSize,\n headersTimeout,\n socketTimeout,\n requestTimeout,\n connectTimeout,\n bodyTimeout,\n idleTimeout,\n keepAlive,\n keepAliveTimeout,\n maxKeepAliveTimeout,\n keepAliveMaxTimeout,\n keepAliveTimeoutThreshold,\n socketPath,\n pipelining,\n tls,\n strictContentLength,\n maxCachedSessions,\n maxRedirections,\n connect,\n maxRequestsPerClient,\n localAddress,\n maxResponseSize,\n autoSelectFamily,\n autoSelectFamilyAttemptTimeout,\n // h2\n allowH2,\n maxConcurrentStreams\n } = {}) {\n super()\n\n if (keepAlive !== undefined) {\n throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead')\n }\n\n if (socketTimeout !== undefined) {\n throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead')\n }\n\n if (requestTimeout !== undefined) {\n throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead')\n }\n\n if (idleTimeout !== undefined) {\n throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead')\n }\n\n if (maxKeepAliveTimeout !== undefined) {\n throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead')\n }\n\n if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) {\n throw new InvalidArgumentError('invalid maxHeaderSize')\n }\n\n if (socketPath != null && typeof socketPath !== 'string') {\n throw new InvalidArgumentError('invalid socketPath')\n }\n\n if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) {\n throw new InvalidArgumentError('invalid connectTimeout')\n }\n\n if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) {\n throw new InvalidArgumentError('invalid keepAliveTimeout')\n }\n\n if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) {\n throw new InvalidArgumentError('invalid keepAliveMaxTimeout')\n }\n\n if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) {\n throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold')\n }\n\n if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) {\n throw new InvalidArgumentError('headersTimeout must be a positive integer or zero')\n }\n\n if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) {\n throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero')\n }\n\n if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {\n throw new InvalidArgumentError('connect must be a function or an object')\n }\n\n if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {\n throw new InvalidArgumentError('maxRedirections must be a positive number')\n }\n\n if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) {\n throw new InvalidArgumentError('maxRequestsPerClient must be a positive number')\n }\n\n if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) {\n throw new InvalidArgumentError('localAddress must be valid string IP address')\n }\n\n if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) {\n throw new InvalidArgumentError('maxResponseSize must be a positive number')\n }\n\n if (\n autoSelectFamilyAttemptTimeout != null &&\n (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)\n ) {\n throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')\n }\n\n // h2\n if (allowH2 != null && typeof allowH2 !== 'boolean') {\n throw new InvalidArgumentError('allowH2 must be a valid boolean value')\n }\n\n if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {\n throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0')\n }\n\n if (typeof connect !== 'function') {\n connect = buildConnector({\n ...tls,\n maxCachedSessions,\n allowH2,\n socketPath,\n timeout: connectTimeout,\n ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),\n ...connect\n })\n }\n\n this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client)\n ? interceptors.Client\n : [createRedirectInterceptor({ maxRedirections })]\n this[kUrl] = util.parseOrigin(url)\n this[kConnector] = connect\n this[kSocket] = null\n this[kPipelining] = pipelining != null ? pipelining : 1\n this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize\n this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout\n this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout\n this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold\n this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout]\n this[kServerName] = null\n this[kLocalAddress] = localAddress != null ? localAddress : null\n this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming\n this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming\n this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\\r\\n`\n this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3\n this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3\n this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength\n this[kMaxRedirections] = maxRedirections\n this[kMaxRequests] = maxRequestsPerClient\n this[kClosedResolve] = null\n this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1\n this[kHTTPConnVersion] = 'h1'\n\n // HTTP/2\n this[kHTTP2Session] = null\n this[kHTTP2SessionState] = !allowH2\n ? null\n : {\n // streams: null, // Fixed queue of streams - For future support of `push`\n openStreams: 0, // Keep track of them to decide wether or not unref the session\n maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server\n }\n this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}`\n\n // kQueue is built up of 3 sections separated by\n // the kRunningIdx and kPendingIdx indices.\n // | complete | running | pending |\n // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length\n // kRunningIdx points to the first running element.\n // kPendingIdx points to the first pending element.\n // This implements a fast queue with an amortized\n // time of O(1).\n\n this[kQueue] = []\n this[kRunningIdx] = 0\n this[kPendingIdx] = 0\n }\n\n get pipelining () {\n return this[kPipelining]\n }\n\n set pipelining (value) {\n this[kPipelining] = value\n resume(this, true)\n }\n\n get [kPending] () {\n return this[kQueue].length - this[kPendingIdx]\n }\n\n get [kRunning] () {\n return this[kPendingIdx] - this[kRunningIdx]\n }\n\n get [kSize] () {\n return this[kQueue].length - this[kRunningIdx]\n }\n\n get [kConnected] () {\n return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed\n }\n\n get [kBusy] () {\n const socket = this[kSocket]\n return (\n (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) ||\n (this[kSize] >= (this[kPipelining] || 1)) ||\n this[kPending] > 0\n )\n }\n\n /* istanbul ignore: only used for test */\n [kConnect] (cb) {\n connect(this)\n this.once('connect', cb)\n }\n\n [kDispatch] (opts, handler) {\n const origin = opts.origin || this[kUrl].origin\n\n const request = this[kHTTPConnVersion] === 'h2'\n ? Request[kHTTP2BuildRequest](origin, opts, handler)\n : Request[kHTTP1BuildRequest](origin, opts, handler)\n\n this[kQueue].push(request)\n if (this[kResuming]) {\n // Do nothing.\n } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {\n // Wait a tick in case stream/iterator is ended in the same tick.\n this[kResuming] = 1\n process.nextTick(resume, this)\n } else {\n resume(this, true)\n }\n\n if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) {\n this[kNeedDrain] = 2\n }\n\n return this[kNeedDrain] < 2\n }\n\n async [kClose] () {\n // TODO: for H2 we need to gracefully flush the remaining enqueued\n // request and close each stream.\n return new Promise((resolve) => {\n if (!this[kSize]) {\n resolve(null)\n } else {\n this[kClosedResolve] = resolve\n }\n })\n }\n\n async [kDestroy] (err) {\n return new Promise((resolve) => {\n const requests = this[kQueue].splice(this[kPendingIdx])\n for (let i = 0; i < requests.length; i++) {\n const request = requests[i]\n errorRequest(this, request, err)\n }\n\n const callback = () => {\n if (this[kClosedResolve]) {\n // TODO (fix): Should we error here with ClientDestroyedError?\n this[kClosedResolve]()\n this[kClosedResolve] = null\n }\n resolve()\n }\n\n if (this[kHTTP2Session] != null) {\n util.destroy(this[kHTTP2Session], err)\n this[kHTTP2Session] = null\n this[kHTTP2SessionState] = null\n }\n\n if (!this[kSocket]) {\n queueMicrotask(callback)\n } else {\n util.destroy(this[kSocket].on('close', callback), err)\n }\n\n resume(this)\n })\n }\n}\n\nfunction onHttp2SessionError (err) {\n assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')\n\n this[kSocket][kError] = err\n\n onError(this[kClient], err)\n}\n\nfunction onHttp2FrameError (type, code, id) {\n const err = new InformationalError(`HTTP/2: \"frameError\" received - type ${type}, code ${code}`)\n\n if (id === 0) {\n this[kSocket][kError] = err\n onError(this[kClient], err)\n }\n}\n\nfunction onHttp2SessionEnd () {\n util.destroy(this, new SocketError('other side closed'))\n util.destroy(this[kSocket], new SocketError('other side closed'))\n}\n\nfunction onHTTP2GoAway (code) {\n const client = this[kClient]\n const err = new InformationalError(`HTTP/2: \"GOAWAY\" frame received with code ${code}`)\n client[kSocket] = null\n client[kHTTP2Session] = null\n\n if (client.destroyed) {\n assert(this[kPending] === 0)\n\n // Fail entire queue.\n const requests = client[kQueue].splice(client[kRunningIdx])\n for (let i = 0; i < requests.length; i++) {\n const request = requests[i]\n errorRequest(this, request, err)\n }\n } else if (client[kRunning] > 0) {\n // Fail head of pipeline.\n const request = client[kQueue][client[kRunningIdx]]\n client[kQueue][client[kRunningIdx]++] = null\n\n errorRequest(client, request, err)\n }\n\n client[kPendingIdx] = client[kRunningIdx]\n\n assert(client[kRunning] === 0)\n\n client.emit('disconnect',\n client[kUrl],\n [client],\n err\n )\n\n resume(client)\n}\n\nconst constants = require('./llhttp/constants')\nconst createRedirectInterceptor = require('./interceptor/redirectInterceptor')\nconst EMPTY_BUF = Buffer.alloc(0)\n\nasync function lazyllhttp () {\n const llhttpWasmData = process.env.JEST_WORKER_ID ? require('./llhttp/llhttp-wasm.js') : undefined\n\n let mod\n try {\n mod = await WebAssembly.compile(Buffer.from(require('./llhttp/llhttp_simd-wasm.js'), 'base64'))\n } catch (e) {\n /* istanbul ignore next */\n\n // We could check if the error was caused by the simd option not\n // being enabled, but the occurring of this other error\n // * https://github.com/emscripten-core/emscripten/issues/11495\n // got me to remove that check to avoid breaking Node 12.\n mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || require('./llhttp/llhttp-wasm.js'), 'base64'))\n }\n\n return await WebAssembly.instantiate(mod, {\n env: {\n /* eslint-disable camelcase */\n\n wasm_on_url: (p, at, len) => {\n /* istanbul ignore next */\n return 0\n },\n wasm_on_status: (p, at, len) => {\n assert.strictEqual(currentParser.ptr, p)\n const start = at - currentBufferPtr + currentBufferRef.byteOffset\n return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0\n },\n wasm_on_message_begin: (p) => {\n assert.strictEqual(currentParser.ptr, p)\n return currentParser.onMessageBegin() || 0\n },\n wasm_on_header_field: (p, at, len) => {\n assert.strictEqual(currentParser.ptr, p)\n const start = at - currentBufferPtr + currentBufferRef.byteOffset\n return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0\n },\n wasm_on_header_value: (p, at, len) => {\n assert.strictEqual(currentParser.ptr, p)\n const start = at - currentBufferPtr + currentBufferRef.byteOffset\n return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0\n },\n wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => {\n assert.strictEqual(currentParser.ptr, p)\n return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0\n },\n wasm_on_body: (p, at, len) => {\n assert.strictEqual(currentParser.ptr, p)\n const start = at - currentBufferPtr + currentBufferRef.byteOffset\n return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0\n },\n wasm_on_message_complete: (p) => {\n assert.strictEqual(currentParser.ptr, p)\n return currentParser.onMessageComplete() || 0\n }\n\n /* eslint-enable camelcase */\n }\n })\n}\n\nlet llhttpInstance = null\nlet llhttpPromise = lazyllhttp()\nllhttpPromise.catch()\n\nlet currentParser = null\nlet currentBufferRef = null\nlet currentBufferSize = 0\nlet currentBufferPtr = null\n\nconst TIMEOUT_HEADERS = 1\nconst TIMEOUT_BODY = 2\nconst TIMEOUT_IDLE = 3\n\nclass Parser {\n constructor (client, socket, { exports }) {\n assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0)\n\n this.llhttp = exports\n this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE)\n this.client = client\n this.socket = socket\n this.timeout = null\n this.timeoutValue = null\n this.timeoutType = null\n this.statusCode = null\n this.statusText = ''\n this.upgrade = false\n this.headers = []\n this.headersSize = 0\n this.headersMaxSize = client[kMaxHeadersSize]\n this.shouldKeepAlive = false\n this.paused = false\n this.resume = this.resume.bind(this)\n\n this.bytesRead = 0\n\n this.keepAlive = ''\n this.contentLength = ''\n this.connection = ''\n this.maxResponseSize = client[kMaxResponseSize]\n }\n\n setTimeout (value, type) {\n this.timeoutType = type\n if (value !== this.timeoutValue) {\n timers.clearTimeout(this.timeout)\n if (value) {\n this.timeout = timers.setTimeout(onParserTimeout, value, this)\n // istanbul ignore else: only for jest\n if (this.timeout.unref) {\n this.timeout.unref()\n }\n } else {\n this.timeout = null\n }\n this.timeoutValue = value\n } else if (this.timeout) {\n // istanbul ignore else: only for jest\n if (this.timeout.refresh) {\n this.timeout.refresh()\n }\n }\n }\n\n resume () {\n if (this.socket.destroyed || !this.paused) {\n return\n }\n\n assert(this.ptr != null)\n assert(currentParser == null)\n\n this.llhttp.llhttp_resume(this.ptr)\n\n assert(this.timeoutType === TIMEOUT_BODY)\n if (this.timeout) {\n // istanbul ignore else: only for jest\n if (this.timeout.refresh) {\n this.timeout.refresh()\n }\n }\n\n this.paused = false\n this.execute(this.socket.read() || EMPTY_BUF) // Flush parser.\n this.readMore()\n }\n\n readMore () {\n while (!this.paused && this.ptr) {\n const chunk = this.socket.read()\n if (chunk === null) {\n break\n }\n this.execute(chunk)\n }\n }\n\n execute (data) {\n assert(this.ptr != null)\n assert(currentParser == null)\n assert(!this.paused)\n\n const { socket, llhttp } = this\n\n if (data.length > currentBufferSize) {\n if (currentBufferPtr) {\n llhttp.free(currentBufferPtr)\n }\n currentBufferSize = Math.ceil(data.length / 4096) * 4096\n currentBufferPtr = llhttp.malloc(currentBufferSize)\n }\n\n new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data)\n\n // Call `execute` on the wasm parser.\n // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data,\n // and finally the length of bytes to parse.\n // The return value is an error code or `constants.ERROR.OK`.\n try {\n let ret\n\n try {\n currentBufferRef = data\n currentParser = this\n ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length)\n /* eslint-disable-next-line no-useless-catch */\n } catch (err) {\n /* istanbul ignore next: difficult to make a test case for */\n throw err\n } finally {\n currentParser = null\n currentBufferRef = null\n }\n\n const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr\n\n if (ret === constants.ERROR.PAUSED_UPGRADE) {\n this.onUpgrade(data.slice(offset))\n } else if (ret === constants.ERROR.PAUSED) {\n this.paused = true\n socket.unshift(data.slice(offset))\n } else if (ret !== constants.ERROR.OK) {\n const ptr = llhttp.llhttp_get_error_reason(this.ptr)\n let message = ''\n /* istanbul ignore else: difficult to make a test case for */\n if (ptr) {\n const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0)\n message =\n 'Response does not match the HTTP/1.1 protocol (' +\n Buffer.from(llhttp.memory.buffer, ptr, len).toString() +\n ')'\n }\n throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset))\n }\n } catch (err) {\n util.destroy(socket, err)\n }\n }\n\n destroy () {\n assert(this.ptr != null)\n assert(currentParser == null)\n\n this.llhttp.llhttp_free(this.ptr)\n this.ptr = null\n\n timers.clearTimeout(this.timeout)\n this.timeout = null\n this.timeoutValue = null\n this.timeoutType = null\n\n this.paused = false\n }\n\n onStatus (buf) {\n this.statusText = buf.toString()\n }\n\n onMessageBegin () {\n const { socket, client } = this\n\n /* istanbul ignore next: difficult to make a test case for */\n if (socket.destroyed) {\n return -1\n }\n\n const request = client[kQueue][client[kRunningIdx]]\n if (!request) {\n return -1\n }\n }\n\n onHeaderField (buf) {\n const len = this.headers.length\n\n if ((len & 1) === 0) {\n this.headers.push(buf)\n } else {\n this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf])\n }\n\n this.trackHeader(buf.length)\n }\n\n onHeaderValue (buf) {\n let len = this.headers.length\n\n if ((len & 1) === 1) {\n this.headers.push(buf)\n len += 1\n } else {\n this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf])\n }\n\n const key = this.headers[len - 2]\n if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') {\n this.keepAlive += buf.toString()\n } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') {\n this.connection += buf.toString()\n } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') {\n this.contentLength += buf.toString()\n }\n\n this.trackHeader(buf.length)\n }\n\n trackHeader (len) {\n this.headersSize += len\n if (this.headersSize >= this.headersMaxSize) {\n util.destroy(this.socket, new HeadersOverflowError())\n }\n }\n\n onUpgrade (head) {\n const { upgrade, client, socket, headers, statusCode } = this\n\n assert(upgrade)\n\n const request = client[kQueue][client[kRunningIdx]]\n assert(request)\n\n assert(!socket.destroyed)\n assert(socket === client[kSocket])\n assert(!this.paused)\n assert(request.upgrade || request.method === 'CONNECT')\n\n this.statusCode = null\n this.statusText = ''\n this.shouldKeepAlive = null\n\n assert(this.headers.length % 2 === 0)\n this.headers = []\n this.headersSize = 0\n\n socket.unshift(head)\n\n socket[kParser].destroy()\n socket[kParser] = null\n\n socket[kClient] = null\n socket[kError] = null\n socket\n .removeListener('error', onSocketError)\n .removeListener('readable', onSocketReadable)\n .removeListener('end', onSocketEnd)\n .removeListener('close', onSocketClose)\n\n client[kSocket] = null\n client[kQueue][client[kRunningIdx]++] = null\n client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade'))\n\n try {\n request.onUpgrade(statusCode, headers, socket)\n } catch (err) {\n util.destroy(socket, err)\n }\n\n resume(client)\n }\n\n onHeadersComplete (statusCode, upgrade, shouldKeepAlive) {\n const { client, socket, headers, statusText } = this\n\n /* istanbul ignore next: difficult to make a test case for */\n if (socket.destroyed) {\n return -1\n }\n\n const request = client[kQueue][client[kRunningIdx]]\n\n /* istanbul ignore next: difficult to make a test case for */\n if (!request) {\n return -1\n }\n\n assert(!this.upgrade)\n assert(this.statusCode < 200)\n\n if (statusCode === 100) {\n util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket)))\n return -1\n }\n\n /* this can only happen if server is misbehaving */\n if (upgrade && !request.upgrade) {\n util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket)))\n return -1\n }\n\n assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS)\n\n this.statusCode = statusCode\n this.shouldKeepAlive = (\n shouldKeepAlive ||\n // Override llhttp value which does not allow keepAlive for HEAD.\n (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive')\n )\n\n if (this.statusCode >= 200) {\n const bodyTimeout = request.bodyTimeout != null\n ? request.bodyTimeout\n : client[kBodyTimeout]\n this.setTimeout(bodyTimeout, TIMEOUT_BODY)\n } else if (this.timeout) {\n // istanbul ignore else: only for jest\n if (this.timeout.refresh) {\n this.timeout.refresh()\n }\n }\n\n if (request.method === 'CONNECT') {\n assert(client[kRunning] === 1)\n this.upgrade = true\n return 2\n }\n\n if (upgrade) {\n assert(client[kRunning] === 1)\n this.upgrade = true\n return 2\n }\n\n assert(this.headers.length % 2 === 0)\n this.headers = []\n this.headersSize = 0\n\n if (this.shouldKeepAlive && client[kPipelining]) {\n const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null\n\n if (keepAliveTimeout != null) {\n const timeout = Math.min(\n keepAliveTimeout - client[kKeepAliveTimeoutThreshold],\n client[kKeepAliveMaxTimeout]\n )\n if (timeout <= 0) {\n socket[kReset] = true\n } else {\n client[kKeepAliveTimeoutValue] = timeout\n }\n } else {\n client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout]\n }\n } else {\n // Stop more requests from being dispatched.\n socket[kReset] = true\n }\n\n const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false\n\n if (request.aborted) {\n return -1\n }\n\n if (request.method === 'HEAD') {\n return 1\n }\n\n if (statusCode < 200) {\n return 1\n }\n\n if (socket[kBlocking]) {\n socket[kBlocking] = false\n resume(client)\n }\n\n return pause ? constants.ERROR.PAUSED : 0\n }\n\n onBody (buf) {\n const { client, socket, statusCode, maxResponseSize } = this\n\n if (socket.destroyed) {\n return -1\n }\n\n const request = client[kQueue][client[kRunningIdx]]\n assert(request)\n\n assert.strictEqual(this.timeoutType, TIMEOUT_BODY)\n if (this.timeout) {\n // istanbul ignore else: only for jest\n if (this.timeout.refresh) {\n this.timeout.refresh()\n }\n }\n\n assert(statusCode >= 200)\n\n if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) {\n util.destroy(socket, new ResponseExceededMaxSizeError())\n return -1\n }\n\n this.bytesRead += buf.length\n\n if (request.onData(buf) === false) {\n return constants.ERROR.PAUSED\n }\n }\n\n onMessageComplete () {\n const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this\n\n if (socket.destroyed && (!statusCode || shouldKeepAlive)) {\n return -1\n }\n\n if (upgrade) {\n return\n }\n\n const request = client[kQueue][client[kRunningIdx]]\n assert(request)\n\n assert(statusCode >= 100)\n\n this.statusCode = null\n this.statusText = ''\n this.bytesRead = 0\n this.contentLength = ''\n this.keepAlive = ''\n this.connection = ''\n\n assert(this.headers.length % 2 === 0)\n this.headers = []\n this.headersSize = 0\n\n if (statusCode < 200) {\n return\n }\n\n /* istanbul ignore next: should be handled by llhttp? */\n if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) {\n util.destroy(socket, new ResponseContentLengthMismatchError())\n return -1\n }\n\n request.onComplete(headers)\n\n client[kQueue][client[kRunningIdx]++] = null\n\n if (socket[kWriting]) {\n assert.strictEqual(client[kRunning], 0)\n // Response completed before request.\n util.destroy(socket, new InformationalError('reset'))\n return constants.ERROR.PAUSED\n } else if (!shouldKeepAlive) {\n util.destroy(socket, new InformationalError('reset'))\n return constants.ERROR.PAUSED\n } else if (socket[kReset] && client[kRunning] === 0) {\n // Destroy socket once all requests have completed.\n // The request at the tail of the pipeline is the one\n // that requested reset and no further requests should\n // have been queued since then.\n util.destroy(socket, new InformationalError('reset'))\n return constants.ERROR.PAUSED\n } else if (client[kPipelining] === 1) {\n // We must wait a full event loop cycle to reuse this socket to make sure\n // that non-spec compliant servers are not closing the connection even if they\n // said they won't.\n setImmediate(resume, client)\n } else {\n resume(client)\n }\n }\n}\n\nfunction onParserTimeout (parser) {\n const { socket, timeoutType, client } = parser\n\n /* istanbul ignore else */\n if (timeoutType === TIMEOUT_HEADERS) {\n if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) {\n assert(!parser.paused, 'cannot be paused while waiting for headers')\n util.destroy(socket, new HeadersTimeoutError())\n }\n } else if (timeoutType === TIMEOUT_BODY) {\n if (!parser.paused) {\n util.destroy(socket, new BodyTimeoutError())\n }\n } else if (timeoutType === TIMEOUT_IDLE) {\n assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue])\n util.destroy(socket, new InformationalError('socket idle timeout'))\n }\n}\n\nfunction onSocketReadable () {\n const { [kParser]: parser } = this\n if (parser) {\n parser.readMore()\n }\n}\n\nfunction onSocketError (err) {\n const { [kClient]: client, [kParser]: parser } = this\n\n assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')\n\n if (client[kHTTPConnVersion] !== 'h2') {\n // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded\n // to the user.\n if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) {\n // We treat all incoming data so for as a valid response.\n parser.onMessageComplete()\n return\n }\n }\n\n this[kError] = err\n\n onError(this[kClient], err)\n}\n\nfunction onError (client, err) {\n if (\n client[kRunning] === 0 &&\n err.code !== 'UND_ERR_INFO' &&\n err.code !== 'UND_ERR_SOCKET'\n ) {\n // Error is not caused by running request and not a recoverable\n // socket error.\n\n assert(client[kPendingIdx] === client[kRunningIdx])\n\n const requests = client[kQueue].splice(client[kRunningIdx])\n for (let i = 0; i < requests.length; i++) {\n const request = requests[i]\n errorRequest(client, request, err)\n }\n assert(client[kSize] === 0)\n }\n}\n\nfunction onSocketEnd () {\n const { [kParser]: parser, [kClient]: client } = this\n\n if (client[kHTTPConnVersion] !== 'h2') {\n if (parser.statusCode && !parser.shouldKeepAlive) {\n // We treat all incoming data so far as a valid response.\n parser.onMessageComplete()\n return\n }\n }\n\n util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))\n}\n\nfunction onSocketClose () {\n const { [kClient]: client, [kParser]: parser } = this\n\n if (client[kHTTPConnVersion] === 'h1' && parser) {\n if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) {\n // We treat all incoming data so far as a valid response.\n parser.onMessageComplete()\n }\n\n this[kParser].destroy()\n this[kParser] = null\n }\n\n const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))\n\n client[kSocket] = null\n\n if (client.destroyed) {\n assert(client[kPending] === 0)\n\n // Fail entire queue.\n const requests = client[kQueue].splice(client[kRunningIdx])\n for (let i = 0; i < requests.length; i++) {\n const request = requests[i]\n errorRequest(client, request, err)\n }\n } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') {\n // Fail head of pipeline.\n const request = client[kQueue][client[kRunningIdx]]\n client[kQueue][client[kRunningIdx]++] = null\n\n errorRequest(client, request, err)\n }\n\n client[kPendingIdx] = client[kRunningIdx]\n\n assert(client[kRunning] === 0)\n\n client.emit('disconnect', client[kUrl], [client], err)\n\n resume(client)\n}\n\nasync function connect (client) {\n assert(!client[kConnecting])\n assert(!client[kSocket])\n\n let { host, hostname, protocol, port } = client[kUrl]\n\n // Resolve ipv6\n if (hostname[0] === '[') {\n const idx = hostname.indexOf(']')\n\n assert(idx !== -1)\n const ip = hostname.substring(1, idx)\n\n assert(net.isIP(ip))\n hostname = ip\n }\n\n client[kConnecting] = true\n\n if (channels.beforeConnect.hasSubscribers) {\n channels.beforeConnect.publish({\n connectParams: {\n host,\n hostname,\n protocol,\n port,\n servername: client[kServerName],\n localAddress: client[kLocalAddress]\n },\n connector: client[kConnector]\n })\n }\n\n try {\n const socket = await new Promise((resolve, reject) => {\n client[kConnector]({\n host,\n hostname,\n protocol,\n port,\n servername: client[kServerName],\n localAddress: client[kLocalAddress]\n }, (err, socket) => {\n if (err) {\n reject(err)\n } else {\n resolve(socket)\n }\n })\n })\n\n if (client.destroyed) {\n util.destroy(socket.on('error', () => {}), new ClientDestroyedError())\n return\n }\n\n client[kConnecting] = false\n\n assert(socket)\n\n const isH2 = socket.alpnProtocol === 'h2'\n if (isH2) {\n if (!h2ExperimentalWarned) {\n h2ExperimentalWarned = true\n process.emitWarning('H2 support is experimental, expect them to change at any time.', {\n code: 'UNDICI-H2'\n })\n }\n\n const session = http2.connect(client[kUrl], {\n createConnection: () => socket,\n peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams\n })\n\n client[kHTTPConnVersion] = 'h2'\n session[kClient] = client\n session[kSocket] = socket\n session.on('error', onHttp2SessionError)\n session.on('frameError', onHttp2FrameError)\n session.on('end', onHttp2SessionEnd)\n session.on('goaway', onHTTP2GoAway)\n session.on('close', onSocketClose)\n session.unref()\n\n client[kHTTP2Session] = session\n socket[kHTTP2Session] = session\n } else {\n if (!llhttpInstance) {\n llhttpInstance = await llhttpPromise\n llhttpPromise = null\n }\n\n socket[kNoRef] = false\n socket[kWriting] = false\n socket[kReset] = false\n socket[kBlocking] = false\n socket[kParser] = new Parser(client, socket, llhttpInstance)\n }\n\n socket[kCounter] = 0\n socket[kMaxRequests] = client[kMaxRequests]\n socket[kClient] = client\n socket[kError] = null\n\n socket\n .on('error', onSocketError)\n .on('readable', onSocketReadable)\n .on('end', onSocketEnd)\n .on('close', onSocketClose)\n\n client[kSocket] = socket\n\n if (channels.connected.hasSubscribers) {\n channels.connected.publish({\n connectParams: {\n host,\n hostname,\n protocol,\n port,\n servername: client[kServerName],\n localAddress: client[kLocalAddress]\n },\n connector: client[kConnector],\n socket\n })\n }\n client.emit('connect', client[kUrl], [client])\n } catch (err) {\n if (client.destroyed) {\n return\n }\n\n client[kConnecting] = false\n\n if (channels.connectError.hasSubscribers) {\n channels.connectError.publish({\n connectParams: {\n host,\n hostname,\n protocol,\n port,\n servername: client[kServerName],\n localAddress: client[kLocalAddress]\n },\n connector: client[kConnector],\n error: err\n })\n }\n\n if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {\n assert(client[kRunning] === 0)\n while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) {\n const request = client[kQueue][client[kPendingIdx]++]\n errorRequest(client, request, err)\n }\n } else {\n onError(client, err)\n }\n\n client.emit('connectionError', client[kUrl], [client], err)\n }\n\n resume(client)\n}\n\nfunction emitDrain (client) {\n client[kNeedDrain] = 0\n client.emit('drain', client[kUrl], [client])\n}\n\nfunction resume (client, sync) {\n if (client[kResuming] === 2) {\n return\n }\n\n client[kResuming] = 2\n\n _resume(client, sync)\n client[kResuming] = 0\n\n if (client[kRunningIdx] > 256) {\n client[kQueue].splice(0, client[kRunningIdx])\n client[kPendingIdx] -= client[kRunningIdx]\n client[kRunningIdx] = 0\n }\n}\n\nfunction _resume (client, sync) {\n while (true) {\n if (client.destroyed) {\n assert(client[kPending] === 0)\n return\n }\n\n if (client[kClosedResolve] && !client[kSize]) {\n client[kClosedResolve]()\n client[kClosedResolve] = null\n return\n }\n\n const socket = client[kSocket]\n\n if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') {\n if (client[kSize] === 0) {\n if (!socket[kNoRef] && socket.unref) {\n socket.unref()\n socket[kNoRef] = true\n }\n } else if (socket[kNoRef] && socket.ref) {\n socket.ref()\n socket[kNoRef] = false\n }\n\n if (client[kSize] === 0) {\n if (socket[kParser].timeoutType !== TIMEOUT_IDLE) {\n socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE)\n }\n } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) {\n if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) {\n const request = client[kQueue][client[kRunningIdx]]\n const headersTimeout = request.headersTimeout != null\n ? request.headersTimeout\n : client[kHeadersTimeout]\n socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS)\n }\n }\n }\n\n if (client[kBusy]) {\n client[kNeedDrain] = 2\n } else if (client[kNeedDrain] === 2) {\n if (sync) {\n client[kNeedDrain] = 1\n process.nextTick(emitDrain, client)\n } else {\n emitDrain(client)\n }\n continue\n }\n\n if (client[kPending] === 0) {\n return\n }\n\n if (client[kRunning] >= (client[kPipelining] || 1)) {\n return\n }\n\n const request = client[kQueue][client[kPendingIdx]]\n\n if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) {\n if (client[kRunning] > 0) {\n return\n }\n\n client[kServerName] = request.servername\n\n if (socket && socket.servername !== request.servername) {\n util.destroy(socket, new InformationalError('servername changed'))\n return\n }\n }\n\n if (client[kConnecting]) {\n return\n }\n\n if (!socket && !client[kHTTP2Session]) {\n connect(client)\n return\n }\n\n if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) {\n return\n }\n\n if (client[kRunning] > 0 && !request.idempotent) {\n // Non-idempotent request cannot be retried.\n // Ensure that no other requests are inflight and\n // could cause failure.\n return\n }\n\n if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) {\n // Don't dispatch an upgrade until all preceding requests have completed.\n // A misbehaving server might upgrade the connection before all pipelined\n // request has completed.\n return\n }\n\n if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 &&\n (util.isStream(request.body) || util.isAsyncIterable(request.body))) {\n // Request with stream or iterator body can error while other requests\n // are inflight and indirectly error those as well.\n // Ensure this doesn't happen by waiting for inflight\n // to complete before dispatching.\n\n // Request with stream or iterator body cannot be retried.\n // Ensure that no other requests are inflight and\n // could cause failure.\n return\n }\n\n if (!request.aborted && write(client, request)) {\n client[kPendingIdx]++\n } else {\n client[kQueue].splice(client[kPendingIdx], 1)\n }\n }\n}\n\n// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2\nfunction shouldSendContentLength (method) {\n return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'\n}\n\nfunction write (client, request) {\n if (client[kHTTPConnVersion] === 'h2') {\n writeH2(client, client[kHTTP2Session], request)\n return\n }\n\n const { body, method, path, host, upgrade, headers, blocking, reset } = request\n\n // https://tools.ietf.org/html/rfc7231#section-4.3.1\n // https://tools.ietf.org/html/rfc7231#section-4.3.2\n // https://tools.ietf.org/html/rfc7231#section-4.3.5\n\n // Sending a payload body on a request that does not\n // expect it can cause undefined behavior on some\n // servers and corrupt connection state. Do not\n // re-use the connection for further requests.\n\n const expectsPayload = (\n method === 'PUT' ||\n method === 'POST' ||\n method === 'PATCH'\n )\n\n if (body && typeof body.read === 'function') {\n // Try to read EOF in order to get length.\n body.read(0)\n }\n\n const bodyLength = util.bodyLength(body)\n\n let contentLength = bodyLength\n\n if (contentLength === null) {\n contentLength = request.contentLength\n }\n\n if (contentLength === 0 && !expectsPayload) {\n // https://tools.ietf.org/html/rfc7230#section-3.3.2\n // A user agent SHOULD NOT send a Content-Length header field when\n // the request message does not contain a payload body and the method\n // semantics do not anticipate such a body.\n\n contentLength = null\n }\n\n // https://github.com/nodejs/undici/issues/2046\n // A user agent may send a Content-Length header with 0 value, this should be allowed.\n if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) {\n if (client[kStrictContentLength]) {\n errorRequest(client, request, new RequestContentLengthMismatchError())\n return false\n }\n\n process.emitWarning(new RequestContentLengthMismatchError())\n }\n\n const socket = client[kSocket]\n\n try {\n request.onConnect((err) => {\n if (request.aborted || request.completed) {\n return\n }\n\n errorRequest(client, request, err || new RequestAbortedError())\n\n util.destroy(socket, new InformationalError('aborted'))\n })\n } catch (err) {\n errorRequest(client, request, err)\n }\n\n if (request.aborted) {\n return false\n }\n\n if (method === 'HEAD') {\n // https://github.com/mcollina/undici/issues/258\n // Close after a HEAD request to interop with misbehaving servers\n // that may send a body in the response.\n\n socket[kReset] = true\n }\n\n if (upgrade || method === 'CONNECT') {\n // On CONNECT or upgrade, block pipeline from dispatching further\n // requests on this connection.\n\n socket[kReset] = true\n }\n\n if (reset != null) {\n socket[kReset] = reset\n }\n\n if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) {\n socket[kReset] = true\n }\n\n if (blocking) {\n socket[kBlocking] = true\n }\n\n let header = `${method} ${path} HTTP/1.1\\r\\n`\n\n if (typeof host === 'string') {\n header += `host: ${host}\\r\\n`\n } else {\n header += client[kHostHeader]\n }\n\n if (upgrade) {\n header += `connection: upgrade\\r\\nupgrade: ${upgrade}\\r\\n`\n } else if (client[kPipelining] && !socket[kReset]) {\n header += 'connection: keep-alive\\r\\n'\n } else {\n header += 'connection: close\\r\\n'\n }\n\n if (headers) {\n header += headers\n }\n\n if (channels.sendHeaders.hasSubscribers) {\n channels.sendHeaders.publish({ request, headers: header, socket })\n }\n\n /* istanbul ignore else: assertion */\n if (!body || bodyLength === 0) {\n if (contentLength === 0) {\n socket.write(`${header}content-length: 0\\r\\n\\r\\n`, 'latin1')\n } else {\n assert(contentLength === null, 'no body must not have content length')\n socket.write(`${header}\\r\\n`, 'latin1')\n }\n request.onRequestSent()\n } else if (util.isBuffer(body)) {\n assert(contentLength === body.byteLength, 'buffer body must have content length')\n\n socket.cork()\n socket.write(`${header}content-length: ${contentLength}\\r\\n\\r\\n`, 'latin1')\n socket.write(body)\n socket.uncork()\n request.onBodySent(body)\n request.onRequestSent()\n if (!expectsPayload) {\n socket[kReset] = true\n }\n } else if (util.isBlobLike(body)) {\n if (typeof body.stream === 'function') {\n writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload })\n } else {\n writeBlob({ body, client, request, socket, contentLength, header, expectsPayload })\n }\n } else if (util.isStream(body)) {\n writeStream({ body, client, request, socket, contentLength, header, expectsPayload })\n } else if (util.isIterable(body)) {\n writeIterable({ body, client, request, socket, contentLength, header, expectsPayload })\n } else {\n assert(false)\n }\n\n return true\n}\n\nfunction writeH2 (client, session, request) {\n const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request\n\n let headers\n if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim())\n else headers = reqHeaders\n\n if (upgrade) {\n errorRequest(client, request, new Error('Upgrade not supported for H2'))\n return false\n }\n\n try {\n // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event?\n request.onConnect((err) => {\n if (request.aborted || request.completed) {\n return\n }\n\n errorRequest(client, request, err || new RequestAbortedError())\n })\n } catch (err) {\n errorRequest(client, request, err)\n }\n\n if (request.aborted) {\n return false\n }\n\n /** @type {import('node:http2').ClientHttp2Stream} */\n let stream\n const h2State = client[kHTTP2SessionState]\n\n headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost]\n headers[HTTP2_HEADER_METHOD] = method\n\n if (method === 'CONNECT') {\n session.ref()\n // we are already connected, streams are pending, first request\n // will create a new stream. We trigger a request to create the stream and wait until\n // `ready` event is triggered\n // We disabled endStream to allow the user to write to the stream\n stream = session.request(headers, { endStream: false, signal })\n\n if (stream.id && !stream.pending) {\n request.onUpgrade(null, null, stream)\n ++h2State.openStreams\n } else {\n stream.once('ready', () => {\n request.onUpgrade(null, null, stream)\n ++h2State.openStreams\n })\n }\n\n stream.once('close', () => {\n h2State.openStreams -= 1\n // TODO(HTTP/2): unref only if current streams count is 0\n if (h2State.openStreams === 0) session.unref()\n })\n\n return true\n }\n\n // https://tools.ietf.org/html/rfc7540#section-8.3\n // :path and :scheme headers must be omited when sending CONNECT\n\n headers[HTTP2_HEADER_PATH] = path\n headers[HTTP2_HEADER_SCHEME] = 'https'\n\n // https://tools.ietf.org/html/rfc7231#section-4.3.1\n // https://tools.ietf.org/html/rfc7231#section-4.3.2\n // https://tools.ietf.org/html/rfc7231#section-4.3.5\n\n // Sending a payload body on a request that does not\n // expect it can cause undefined behavior on some\n // servers and corrupt connection state. Do not\n // re-use the connection for further requests.\n\n const expectsPayload = (\n method === 'PUT' ||\n method === 'POST' ||\n method === 'PATCH'\n )\n\n if (body && typeof body.read === 'function') {\n // Try to read EOF in order to get length.\n body.read(0)\n }\n\n let contentLength = util.bodyLength(body)\n\n if (contentLength == null) {\n contentLength = request.contentLength\n }\n\n if (contentLength === 0 || !expectsPayload) {\n // https://tools.ietf.org/html/rfc7230#section-3.3.2\n // A user agent SHOULD NOT send a Content-Length header field when\n // the request message does not contain a payload body and the method\n // semantics do not anticipate such a body.\n\n contentLength = null\n }\n\n // https://github.com/nodejs/undici/issues/2046\n // A user agent may send a Content-Length header with 0 value, this should be allowed.\n if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) {\n if (client[kStrictContentLength]) {\n errorRequest(client, request, new RequestContentLengthMismatchError())\n return false\n }\n\n process.emitWarning(new RequestContentLengthMismatchError())\n }\n\n if (contentLength != null) {\n assert(body, 'no body must not have content length')\n headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`\n }\n\n session.ref()\n\n const shouldEndStream = method === 'GET' || method === 'HEAD'\n if (expectContinue) {\n headers[HTTP2_HEADER_EXPECT] = '100-continue'\n stream = session.request(headers, { endStream: shouldEndStream, signal })\n\n stream.once('continue', writeBodyH2)\n } else {\n stream = session.request(headers, {\n endStream: shouldEndStream,\n signal\n })\n writeBodyH2()\n }\n\n // Increment counter as we have new several streams open\n ++h2State.openStreams\n\n stream.once('response', headers => {\n const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers\n\n if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) {\n stream.pause()\n }\n })\n\n stream.once('end', () => {\n request.onComplete([])\n })\n\n stream.on('data', (chunk) => {\n if (request.onData(chunk) === false) {\n stream.pause()\n }\n })\n\n stream.once('close', () => {\n h2State.openStreams -= 1\n // TODO(HTTP/2): unref only if current streams count is 0\n if (h2State.openStreams === 0) {\n session.unref()\n }\n })\n\n stream.once('error', function (err) {\n if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {\n h2State.streams -= 1\n util.destroy(stream, err)\n }\n })\n\n stream.once('frameError', (type, code) => {\n const err = new InformationalError(`HTTP/2: \"frameError\" received - type ${type}, code ${code}`)\n errorRequest(client, request, err)\n\n if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {\n h2State.streams -= 1\n util.destroy(stream, err)\n }\n })\n\n // stream.on('aborted', () => {\n // // TODO(HTTP/2): Support aborted\n // })\n\n // stream.on('timeout', () => {\n // // TODO(HTTP/2): Support timeout\n // })\n\n // stream.on('push', headers => {\n // // TODO(HTTP/2): Suppor push\n // })\n\n // stream.on('trailers', headers => {\n // // TODO(HTTP/2): Support trailers\n // })\n\n return true\n\n function writeBodyH2 () {\n /* istanbul ignore else: assertion */\n if (!body) {\n request.onRequestSent()\n } else if (util.isBuffer(body)) {\n assert(contentLength === body.byteLength, 'buffer body must have content length')\n stream.cork()\n stream.write(body)\n stream.uncork()\n stream.end()\n request.onBodySent(body)\n request.onRequestSent()\n } else if (util.isBlobLike(body)) {\n if (typeof body.stream === 'function') {\n writeIterable({\n client,\n request,\n contentLength,\n h2stream: stream,\n expectsPayload,\n body: body.stream(),\n socket: client[kSocket],\n header: ''\n })\n } else {\n writeBlob({\n body,\n client,\n request,\n contentLength,\n expectsPayload,\n h2stream: stream,\n header: '',\n socket: client[kSocket]\n })\n }\n } else if (util.isStream(body)) {\n writeStream({\n body,\n client,\n request,\n contentLength,\n expectsPayload,\n socket: client[kSocket],\n h2stream: stream,\n header: ''\n })\n } else if (util.isIterable(body)) {\n writeIterable({\n body,\n client,\n request,\n contentLength,\n expectsPayload,\n header: '',\n h2stream: stream,\n socket: client[kSocket]\n })\n } else {\n assert(false)\n }\n }\n}\n\nfunction writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {\n assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')\n\n if (client[kHTTPConnVersion] === 'h2') {\n // For HTTP/2, is enough to pipe the stream\n const pipe = pipeline(\n body,\n h2stream,\n (err) => {\n if (err) {\n util.destroy(body, err)\n util.destroy(h2stream, err)\n } else {\n request.onRequestSent()\n }\n }\n )\n\n pipe.on('data', onPipeData)\n pipe.once('end', () => {\n pipe.removeListener('data', onPipeData)\n util.destroy(pipe)\n })\n\n function onPipeData (chunk) {\n request.onBodySent(chunk)\n }\n\n return\n }\n\n let finished = false\n\n const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })\n\n const onData = function (chunk) {\n if (finished) {\n return\n }\n\n try {\n if (!writer.write(chunk) && this.pause) {\n this.pause()\n }\n } catch (err) {\n util.destroy(this, err)\n }\n }\n const onDrain = function () {\n if (finished) {\n return\n }\n\n if (body.resume) {\n body.resume()\n }\n }\n const onAbort = function () {\n if (finished) {\n return\n }\n const err = new RequestAbortedError()\n queueMicrotask(() => onFinished(err))\n }\n const onFinished = function (err) {\n if (finished) {\n return\n }\n\n finished = true\n\n assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1))\n\n socket\n .off('drain', onDrain)\n .off('error', onFinished)\n\n body\n .removeListener('data', onData)\n .removeListener('end', onFinished)\n .removeListener('error', onFinished)\n .removeListener('close', onAbort)\n\n if (!err) {\n try {\n writer.end()\n } catch (er) {\n err = er\n }\n }\n\n writer.destroy(err)\n\n if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) {\n util.destroy(body, err)\n } else {\n util.destroy(body)\n }\n }\n\n body\n .on('data', onData)\n .on('end', onFinished)\n .on('error', onFinished)\n .on('close', onAbort)\n\n if (body.resume) {\n body.resume()\n }\n\n socket\n .on('drain', onDrain)\n .on('error', onFinished)\n}\n\nasync function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {\n assert(contentLength === body.size, 'blob body must have content length')\n\n const isH2 = client[kHTTPConnVersion] === 'h2'\n try {\n if (contentLength != null && contentLength !== body.size) {\n throw new RequestContentLengthMismatchError()\n }\n\n const buffer = Buffer.from(await body.arrayBuffer())\n\n if (isH2) {\n h2stream.cork()\n h2stream.write(buffer)\n h2stream.uncork()\n } else {\n socket.cork()\n socket.write(`${header}content-length: ${contentLength}\\r\\n\\r\\n`, 'latin1')\n socket.write(buffer)\n socket.uncork()\n }\n\n request.onBodySent(buffer)\n request.onRequestSent()\n\n if (!expectsPayload) {\n socket[kReset] = true\n }\n\n resume(client)\n } catch (err) {\n util.destroy(isH2 ? h2stream : socket, err)\n }\n}\n\nasync function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {\n assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')\n\n let callback = null\n function onDrain () {\n if (callback) {\n const cb = callback\n callback = null\n cb()\n }\n }\n\n const waitForDrain = () => new Promise((resolve, reject) => {\n assert(callback === null)\n\n if (socket[kError]) {\n reject(socket[kError])\n } else {\n callback = resolve\n }\n })\n\n if (client[kHTTPConnVersion] === 'h2') {\n h2stream\n .on('close', onDrain)\n .on('drain', onDrain)\n\n try {\n // It's up to the user to somehow abort the async iterable.\n for await (const chunk of body) {\n if (socket[kError]) {\n throw socket[kError]\n }\n\n const res = h2stream.write(chunk)\n request.onBodySent(chunk)\n if (!res) {\n await waitForDrain()\n }\n }\n } catch (err) {\n h2stream.destroy(err)\n } finally {\n request.onRequestSent()\n h2stream.end()\n h2stream\n .off('close', onDrain)\n .off('drain', onDrain)\n }\n\n return\n }\n\n socket\n .on('close', onDrain)\n .on('drain', onDrain)\n\n const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })\n try {\n // It's up to the user to somehow abort the async iterable.\n for await (const chunk of body) {\n if (socket[kError]) {\n throw socket[kError]\n }\n\n if (!writer.write(chunk)) {\n await waitForDrain()\n }\n }\n\n writer.end()\n } catch (err) {\n writer.destroy(err)\n } finally {\n socket\n .off('close', onDrain)\n .off('drain', onDrain)\n }\n}\n\nclass AsyncWriter {\n constructor ({ socket, request, contentLength, client, expectsPayload, header }) {\n this.socket = socket\n this.request = request\n this.contentLength = contentLength\n this.client = client\n this.bytesWritten = 0\n this.expectsPayload = expectsPayload\n this.header = header\n\n socket[kWriting] = true\n }\n\n write (chunk) {\n const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this\n\n if (socket[kError]) {\n throw socket[kError]\n }\n\n if (socket.destroyed) {\n return false\n }\n\n const len = Buffer.byteLength(chunk)\n if (!len) {\n return true\n }\n\n // We should defer writing chunks.\n if (contentLength !== null && bytesWritten + len > contentLength) {\n if (client[kStrictContentLength]) {\n throw new RequestContentLengthMismatchError()\n }\n\n process.emitWarning(new RequestContentLengthMismatchError())\n }\n\n socket.cork()\n\n if (bytesWritten === 0) {\n if (!expectsPayload) {\n socket[kReset] = true\n }\n\n if (contentLength === null) {\n socket.write(`${header}transfer-encoding: chunked\\r\\n`, 'latin1')\n } else {\n socket.write(`${header}content-length: ${contentLength}\\r\\n\\r\\n`, 'latin1')\n }\n }\n\n if (contentLength === null) {\n socket.write(`\\r\\n${len.toString(16)}\\r\\n`, 'latin1')\n }\n\n this.bytesWritten += len\n\n const ret = socket.write(chunk)\n\n socket.uncork()\n\n request.onBodySent(chunk)\n\n if (!ret) {\n if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) {\n // istanbul ignore else: only for jest\n if (socket[kParser].timeout.refresh) {\n socket[kParser].timeout.refresh()\n }\n }\n }\n\n return ret\n }\n\n end () {\n const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this\n request.onRequestSent()\n\n socket[kWriting] = false\n\n if (socket[kError]) {\n throw socket[kError]\n }\n\n if (socket.destroyed) {\n return\n }\n\n if (bytesWritten === 0) {\n if (expectsPayload) {\n // https://tools.ietf.org/html/rfc7230#section-3.3.2\n // A user agent SHOULD send a Content-Length in a request message when\n // no Transfer-Encoding is sent and the request method defines a meaning\n // for an enclosed payload body.\n\n socket.write(`${header}content-length: 0\\r\\n\\r\\n`, 'latin1')\n } else {\n socket.write(`${header}\\r\\n`, 'latin1')\n }\n } else if (contentLength === null) {\n socket.write('\\r\\n0\\r\\n\\r\\n', 'latin1')\n }\n\n if (contentLength !== null && bytesWritten !== contentLength) {\n if (client[kStrictContentLength]) {\n throw new RequestContentLengthMismatchError()\n } else {\n process.emitWarning(new RequestContentLengthMismatchError())\n }\n }\n\n if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) {\n // istanbul ignore else: only for jest\n if (socket[kParser].timeout.refresh) {\n socket[kParser].timeout.refresh()\n }\n }\n\n resume(client)\n }\n\n destroy (err) {\n const { socket, client } = this\n\n socket[kWriting] = false\n\n if (err) {\n assert(client[kRunning] <= 1, 'pipeline should only contain this request')\n util.destroy(socket, err)\n }\n }\n}\n\nfunction errorRequest (client, request, err) {\n try {\n request.onError(err)\n assert(request.aborted)\n } catch (err) {\n client.emit('error', err)\n }\n}\n\nmodule.exports = Client\n","'use strict'\n\n/* istanbul ignore file: only for Node 12 */\n\nconst { kConnected, kSize } = require('../core/symbols')\n\nclass CompatWeakRef {\n constructor (value) {\n this.value = value\n }\n\n deref () {\n return this.value[kConnected] === 0 && this.value[kSize] === 0\n ? undefined\n : this.value\n }\n}\n\nclass CompatFinalizer {\n constructor (finalizer) {\n this.finalizer = finalizer\n }\n\n register (dispatcher, key) {\n if (dispatcher.on) {\n dispatcher.on('disconnect', () => {\n if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) {\n this.finalizer(key)\n }\n })\n }\n }\n}\n\nmodule.exports = function () {\n // FIXME: remove workaround when the Node bug is fixed\n // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308\n if (process.env.NODE_V8_COVERAGE) {\n return {\n WeakRef: CompatWeakRef,\n FinalizationRegistry: CompatFinalizer\n }\n }\n return {\n WeakRef: global.WeakRef || CompatWeakRef,\n FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer\n }\n}\n","'use strict'\n\n// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size\nconst maxAttributeValueSize = 1024\n\n// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size\nconst maxNameValuePairSize = 4096\n\nmodule.exports = {\n maxAttributeValueSize,\n maxNameValuePairSize\n}\n","'use strict'\n\nconst { parseSetCookie } = require('./parse')\nconst { stringify, getHeadersList } = require('./util')\nconst { webidl } = require('../fetch/webidl')\nconst { Headers } = require('../fetch/headers')\n\n/**\n * @typedef {Object} Cookie\n * @property {string} name\n * @property {string} value\n * @property {Date|number|undefined} expires\n * @property {number|undefined} maxAge\n * @property {string|undefined} domain\n * @property {string|undefined} path\n * @property {boolean|undefined} secure\n * @property {boolean|undefined} httpOnly\n * @property {'Strict'|'Lax'|'None'} sameSite\n * @property {string[]} unparsed\n */\n\n/**\n * @param {Headers} headers\n * @returns {Record}\n */\nfunction getCookies (headers) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' })\n\n webidl.brandCheck(headers, Headers, { strict: false })\n\n const cookie = headers.get('cookie')\n const out = {}\n\n if (!cookie) {\n return out\n }\n\n for (const piece of cookie.split(';')) {\n const [name, ...value] = piece.split('=')\n\n out[name.trim()] = value.join('=')\n }\n\n return out\n}\n\n/**\n * @param {Headers} headers\n * @param {string} name\n * @param {{ path?: string, domain?: string }|undefined} attributes\n * @returns {void}\n */\nfunction deleteCookie (headers, name, attributes) {\n webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' })\n\n webidl.brandCheck(headers, Headers, { strict: false })\n\n name = webidl.converters.DOMString(name)\n attributes = webidl.converters.DeleteCookieAttributes(attributes)\n\n // Matches behavior of\n // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278\n setCookie(headers, {\n name,\n value: '',\n expires: new Date(0),\n ...attributes\n })\n}\n\n/**\n * @param {Headers} headers\n * @returns {Cookie[]}\n */\nfunction getSetCookies (headers) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' })\n\n webidl.brandCheck(headers, Headers, { strict: false })\n\n const cookies = getHeadersList(headers).cookies\n\n if (!cookies) {\n return []\n }\n\n // In older versions of undici, cookies is a list of name:value.\n return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair))\n}\n\n/**\n * @param {Headers} headers\n * @param {Cookie} cookie\n * @returns {void}\n */\nfunction setCookie (headers, cookie) {\n webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' })\n\n webidl.brandCheck(headers, Headers, { strict: false })\n\n cookie = webidl.converters.Cookie(cookie)\n\n const str = stringify(cookie)\n\n if (str) {\n headers.append('Set-Cookie', stringify(cookie))\n }\n}\n\nwebidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([\n {\n converter: webidl.nullableConverter(webidl.converters.DOMString),\n key: 'path',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters.DOMString),\n key: 'domain',\n defaultValue: null\n }\n])\n\nwebidl.converters.Cookie = webidl.dictionaryConverter([\n {\n converter: webidl.converters.DOMString,\n key: 'name'\n },\n {\n converter: webidl.converters.DOMString,\n key: 'value'\n },\n {\n converter: webidl.nullableConverter((value) => {\n if (typeof value === 'number') {\n return webidl.converters['unsigned long long'](value)\n }\n\n return new Date(value)\n }),\n key: 'expires',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters['long long']),\n key: 'maxAge',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters.DOMString),\n key: 'domain',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters.DOMString),\n key: 'path',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters.boolean),\n key: 'secure',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters.boolean),\n key: 'httpOnly',\n defaultValue: null\n },\n {\n converter: webidl.converters.USVString,\n key: 'sameSite',\n allowedValues: ['Strict', 'Lax', 'None']\n },\n {\n converter: webidl.sequenceConverter(webidl.converters.DOMString),\n key: 'unparsed',\n defaultValue: []\n }\n])\n\nmodule.exports = {\n getCookies,\n deleteCookie,\n getSetCookies,\n setCookie\n}\n","'use strict'\n\nconst { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')\nconst { isCTLExcludingHtab } = require('./util')\nconst { collectASequenceOfCodePointsFast } = require('../fetch/dataURL')\nconst assert = require('assert')\n\n/**\n * @description Parses the field-value attributes of a set-cookie header string.\n * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4\n * @param {string} header\n * @returns if the header is invalid, null will be returned\n */\nfunction parseSetCookie (header) {\n // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F\n // character (CTL characters excluding HTAB): Abort these steps and\n // ignore the set-cookie-string entirely.\n if (isCTLExcludingHtab(header)) {\n return null\n }\n\n let nameValuePair = ''\n let unparsedAttributes = ''\n let name = ''\n let value = ''\n\n // 2. If the set-cookie-string contains a %x3B (\";\") character:\n if (header.includes(';')) {\n // 1. The name-value-pair string consists of the characters up to,\n // but not including, the first %x3B (\";\"), and the unparsed-\n // attributes consist of the remainder of the set-cookie-string\n // (including the %x3B (\";\") in question).\n const position = { position: 0 }\n\n nameValuePair = collectASequenceOfCodePointsFast(';', header, position)\n unparsedAttributes = header.slice(position.position)\n } else {\n // Otherwise:\n\n // 1. The name-value-pair string consists of all the characters\n // contained in the set-cookie-string, and the unparsed-\n // attributes is the empty string.\n nameValuePair = header\n }\n\n // 3. If the name-value-pair string lacks a %x3D (\"=\") character, then\n // the name string is empty, and the value string is the value of\n // name-value-pair.\n if (!nameValuePair.includes('=')) {\n value = nameValuePair\n } else {\n // Otherwise, the name string consists of the characters up to, but\n // not including, the first %x3D (\"=\") character, and the (possibly\n // empty) value string consists of the characters after the first\n // %x3D (\"=\") character.\n const position = { position: 0 }\n name = collectASequenceOfCodePointsFast(\n '=',\n nameValuePair,\n position\n )\n value = nameValuePair.slice(position.position + 1)\n }\n\n // 4. Remove any leading or trailing WSP characters from the name\n // string and the value string.\n name = name.trim()\n value = value.trim()\n\n // 5. If the sum of the lengths of the name string and the value string\n // is more than 4096 octets, abort these steps and ignore the set-\n // cookie-string entirely.\n if (name.length + value.length > maxNameValuePairSize) {\n return null\n }\n\n // 6. The cookie-name is the name string, and the cookie-value is the\n // value string.\n return {\n name, value, ...parseUnparsedAttributes(unparsedAttributes)\n }\n}\n\n/**\n * Parses the remaining attributes of a set-cookie header\n * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4\n * @param {string} unparsedAttributes\n * @param {[Object.]={}} cookieAttributeList\n */\nfunction parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) {\n // 1. If the unparsed-attributes string is empty, skip the rest of\n // these steps.\n if (unparsedAttributes.length === 0) {\n return cookieAttributeList\n }\n\n // 2. Discard the first character of the unparsed-attributes (which\n // will be a %x3B (\";\") character).\n assert(unparsedAttributes[0] === ';')\n unparsedAttributes = unparsedAttributes.slice(1)\n\n let cookieAv = ''\n\n // 3. If the remaining unparsed-attributes contains a %x3B (\";\")\n // character:\n if (unparsedAttributes.includes(';')) {\n // 1. Consume the characters of the unparsed-attributes up to, but\n // not including, the first %x3B (\";\") character.\n cookieAv = collectASequenceOfCodePointsFast(\n ';',\n unparsedAttributes,\n { position: 0 }\n )\n unparsedAttributes = unparsedAttributes.slice(cookieAv.length)\n } else {\n // Otherwise:\n\n // 1. Consume the remainder of the unparsed-attributes.\n cookieAv = unparsedAttributes\n unparsedAttributes = ''\n }\n\n // Let the cookie-av string be the characters consumed in this step.\n\n let attributeName = ''\n let attributeValue = ''\n\n // 4. If the cookie-av string contains a %x3D (\"=\") character:\n if (cookieAv.includes('=')) {\n // 1. The (possibly empty) attribute-name string consists of the\n // characters up to, but not including, the first %x3D (\"=\")\n // character, and the (possibly empty) attribute-value string\n // consists of the characters after the first %x3D (\"=\")\n // character.\n const position = { position: 0 }\n\n attributeName = collectASequenceOfCodePointsFast(\n '=',\n cookieAv,\n position\n )\n attributeValue = cookieAv.slice(position.position + 1)\n } else {\n // Otherwise:\n\n // 1. The attribute-name string consists of the entire cookie-av\n // string, and the attribute-value string is empty.\n attributeName = cookieAv\n }\n\n // 5. Remove any leading or trailing WSP characters from the attribute-\n // name string and the attribute-value string.\n attributeName = attributeName.trim()\n attributeValue = attributeValue.trim()\n\n // 6. If the attribute-value is longer than 1024 octets, ignore the\n // cookie-av string and return to Step 1 of this algorithm.\n if (attributeValue.length > maxAttributeValueSize) {\n return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)\n }\n\n // 7. Process the attribute-name and attribute-value according to the\n // requirements in the following subsections. (Notice that\n // attributes with unrecognized attribute-names are ignored.)\n const attributeNameLowercase = attributeName.toLowerCase()\n\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1\n // If the attribute-name case-insensitively matches the string\n // \"Expires\", the user agent MUST process the cookie-av as follows.\n if (attributeNameLowercase === 'expires') {\n // 1. Let the expiry-time be the result of parsing the attribute-value\n // as cookie-date (see Section 5.1.1).\n const expiryTime = new Date(attributeValue)\n\n // 2. If the attribute-value failed to parse as a cookie date, ignore\n // the cookie-av.\n\n cookieAttributeList.expires = expiryTime\n } else if (attributeNameLowercase === 'max-age') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2\n // If the attribute-name case-insensitively matches the string \"Max-\n // Age\", the user agent MUST process the cookie-av as follows.\n\n // 1. If the first character of the attribute-value is not a DIGIT or a\n // \"-\" character, ignore the cookie-av.\n const charCode = attributeValue.charCodeAt(0)\n\n if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') {\n return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)\n }\n\n // 2. If the remainder of attribute-value contains a non-DIGIT\n // character, ignore the cookie-av.\n if (!/^\\d+$/.test(attributeValue)) {\n return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)\n }\n\n // 3. Let delta-seconds be the attribute-value converted to an integer.\n const deltaSeconds = Number(attributeValue)\n\n // 4. Let cookie-age-limit be the maximum age of the cookie (which\n // SHOULD be 400 days or less, see Section 4.1.2.2).\n\n // 5. Set delta-seconds to the smaller of its present value and cookie-\n // age-limit.\n // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs)\n\n // 6. If delta-seconds is less than or equal to zero (0), let expiry-\n // time be the earliest representable date and time. Otherwise, let\n // the expiry-time be the current date and time plus delta-seconds\n // seconds.\n // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds\n\n // 7. Append an attribute to the cookie-attribute-list with an\n // attribute-name of Max-Age and an attribute-value of expiry-time.\n cookieAttributeList.maxAge = deltaSeconds\n } else if (attributeNameLowercase === 'domain') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3\n // If the attribute-name case-insensitively matches the string \"Domain\",\n // the user agent MUST process the cookie-av as follows.\n\n // 1. Let cookie-domain be the attribute-value.\n let cookieDomain = attributeValue\n\n // 2. If cookie-domain starts with %x2E (\".\"), let cookie-domain be\n // cookie-domain without its leading %x2E (\".\").\n if (cookieDomain[0] === '.') {\n cookieDomain = cookieDomain.slice(1)\n }\n\n // 3. Convert the cookie-domain to lower case.\n cookieDomain = cookieDomain.toLowerCase()\n\n // 4. Append an attribute to the cookie-attribute-list with an\n // attribute-name of Domain and an attribute-value of cookie-domain.\n cookieAttributeList.domain = cookieDomain\n } else if (attributeNameLowercase === 'path') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4\n // If the attribute-name case-insensitively matches the string \"Path\",\n // the user agent MUST process the cookie-av as follows.\n\n // 1. If the attribute-value is empty or if the first character of the\n // attribute-value is not %x2F (\"/\"):\n let cookiePath = ''\n if (attributeValue.length === 0 || attributeValue[0] !== '/') {\n // 1. Let cookie-path be the default-path.\n cookiePath = '/'\n } else {\n // Otherwise:\n\n // 1. Let cookie-path be the attribute-value.\n cookiePath = attributeValue\n }\n\n // 2. Append an attribute to the cookie-attribute-list with an\n // attribute-name of Path and an attribute-value of cookie-path.\n cookieAttributeList.path = cookiePath\n } else if (attributeNameLowercase === 'secure') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5\n // If the attribute-name case-insensitively matches the string \"Secure\",\n // the user agent MUST append an attribute to the cookie-attribute-list\n // with an attribute-name of Secure and an empty attribute-value.\n\n cookieAttributeList.secure = true\n } else if (attributeNameLowercase === 'httponly') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6\n // If the attribute-name case-insensitively matches the string\n // \"HttpOnly\", the user agent MUST append an attribute to the cookie-\n // attribute-list with an attribute-name of HttpOnly and an empty\n // attribute-value.\n\n cookieAttributeList.httpOnly = true\n } else if (attributeNameLowercase === 'samesite') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7\n // If the attribute-name case-insensitively matches the string\n // \"SameSite\", the user agent MUST process the cookie-av as follows:\n\n // 1. Let enforcement be \"Default\".\n let enforcement = 'Default'\n\n const attributeValueLowercase = attributeValue.toLowerCase()\n // 2. If cookie-av's attribute-value is a case-insensitive match for\n // \"None\", set enforcement to \"None\".\n if (attributeValueLowercase.includes('none')) {\n enforcement = 'None'\n }\n\n // 3. If cookie-av's attribute-value is a case-insensitive match for\n // \"Strict\", set enforcement to \"Strict\".\n if (attributeValueLowercase.includes('strict')) {\n enforcement = 'Strict'\n }\n\n // 4. If cookie-av's attribute-value is a case-insensitive match for\n // \"Lax\", set enforcement to \"Lax\".\n if (attributeValueLowercase.includes('lax')) {\n enforcement = 'Lax'\n }\n\n // 5. Append an attribute to the cookie-attribute-list with an\n // attribute-name of \"SameSite\" and an attribute-value of\n // enforcement.\n cookieAttributeList.sameSite = enforcement\n } else {\n cookieAttributeList.unparsed ??= []\n\n cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`)\n }\n\n // 8. Return to Step 1 of this algorithm.\n return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)\n}\n\nmodule.exports = {\n parseSetCookie,\n parseUnparsedAttributes\n}\n","'use strict'\n\nconst assert = require('assert')\nconst { kHeadersList } = require('../core/symbols')\n\nfunction isCTLExcludingHtab (value) {\n if (value.length === 0) {\n return false\n }\n\n for (const char of value) {\n const code = char.charCodeAt(0)\n\n if (\n (code >= 0x00 || code <= 0x08) ||\n (code >= 0x0A || code <= 0x1F) ||\n code === 0x7F\n ) {\n return false\n }\n }\n}\n\n/**\n CHAR = \n token = 1*\n separators = \"(\" | \")\" | \"<\" | \">\" | \"@\"\n | \",\" | \";\" | \":\" | \"\\\" | <\">\n | \"/\" | \"[\" | \"]\" | \"?\" | \"=\"\n | \"{\" | \"}\" | SP | HT\n * @param {string} name\n */\nfunction validateCookieName (name) {\n for (const char of name) {\n const code = char.charCodeAt(0)\n\n if (\n (code <= 0x20 || code > 0x7F) ||\n char === '(' ||\n char === ')' ||\n char === '>' ||\n char === '<' ||\n char === '@' ||\n char === ',' ||\n char === ';' ||\n char === ':' ||\n char === '\\\\' ||\n char === '\"' ||\n char === '/' ||\n char === '[' ||\n char === ']' ||\n char === '?' ||\n char === '=' ||\n char === '{' ||\n char === '}'\n ) {\n throw new Error('Invalid cookie name')\n }\n }\n}\n\n/**\n cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )\n cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E\n ; US-ASCII characters excluding CTLs,\n ; whitespace DQUOTE, comma, semicolon,\n ; and backslash\n * @param {string} value\n */\nfunction validateCookieValue (value) {\n for (const char of value) {\n const code = char.charCodeAt(0)\n\n if (\n code < 0x21 || // exclude CTLs (0-31)\n code === 0x22 ||\n code === 0x2C ||\n code === 0x3B ||\n code === 0x5C ||\n code > 0x7E // non-ascii\n ) {\n throw new Error('Invalid header value')\n }\n }\n}\n\n/**\n * path-value = \n * @param {string} path\n */\nfunction validateCookiePath (path) {\n for (const char of path) {\n const code = char.charCodeAt(0)\n\n if (code < 0x21 || char === ';') {\n throw new Error('Invalid cookie path')\n }\n }\n}\n\n/**\n * I have no idea why these values aren't allowed to be honest,\n * but Deno tests these. - Khafra\n * @param {string} domain\n */\nfunction validateCookieDomain (domain) {\n if (\n domain.startsWith('-') ||\n domain.endsWith('.') ||\n domain.endsWith('-')\n ) {\n throw new Error('Invalid cookie domain')\n }\n}\n\n/**\n * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1\n * @param {number|Date} date\n IMF-fixdate = day-name \",\" SP date1 SP time-of-day SP GMT\n ; fixed length/zone/capitalization subset of the format\n ; see Section 3.3 of [RFC5322]\n\n day-name = %x4D.6F.6E ; \"Mon\", case-sensitive\n / %x54.75.65 ; \"Tue\", case-sensitive\n / %x57.65.64 ; \"Wed\", case-sensitive\n / %x54.68.75 ; \"Thu\", case-sensitive\n / %x46.72.69 ; \"Fri\", case-sensitive\n / %x53.61.74 ; \"Sat\", case-sensitive\n / %x53.75.6E ; \"Sun\", case-sensitive\n date1 = day SP month SP year\n ; e.g., 02 Jun 1982\n\n day = 2DIGIT\n month = %x4A.61.6E ; \"Jan\", case-sensitive\n / %x46.65.62 ; \"Feb\", case-sensitive\n / %x4D.61.72 ; \"Mar\", case-sensitive\n / %x41.70.72 ; \"Apr\", case-sensitive\n / %x4D.61.79 ; \"May\", case-sensitive\n / %x4A.75.6E ; \"Jun\", case-sensitive\n / %x4A.75.6C ; \"Jul\", case-sensitive\n / %x41.75.67 ; \"Aug\", case-sensitive\n / %x53.65.70 ; \"Sep\", case-sensitive\n / %x4F.63.74 ; \"Oct\", case-sensitive\n / %x4E.6F.76 ; \"Nov\", case-sensitive\n / %x44.65.63 ; \"Dec\", case-sensitive\n year = 4DIGIT\n\n GMT = %x47.4D.54 ; \"GMT\", case-sensitive\n\n time-of-day = hour \":\" minute \":\" second\n ; 00:00:00 - 23:59:60 (leap second)\n\n hour = 2DIGIT\n minute = 2DIGIT\n second = 2DIGIT\n */\nfunction toIMFDate (date) {\n if (typeof date === 'number') {\n date = new Date(date)\n }\n\n const days = [\n 'Sun', 'Mon', 'Tue', 'Wed',\n 'Thu', 'Fri', 'Sat'\n ]\n\n const months = [\n 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',\n 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'\n ]\n\n const dayName = days[date.getUTCDay()]\n const day = date.getUTCDate().toString().padStart(2, '0')\n const month = months[date.getUTCMonth()]\n const year = date.getUTCFullYear()\n const hour = date.getUTCHours().toString().padStart(2, '0')\n const minute = date.getUTCMinutes().toString().padStart(2, '0')\n const second = date.getUTCSeconds().toString().padStart(2, '0')\n\n return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT`\n}\n\n/**\n max-age-av = \"Max-Age=\" non-zero-digit *DIGIT\n ; In practice, both expires-av and max-age-av\n ; are limited to dates representable by the\n ; user agent.\n * @param {number} maxAge\n */\nfunction validateCookieMaxAge (maxAge) {\n if (maxAge < 0) {\n throw new Error('Invalid cookie max-age')\n }\n}\n\n/**\n * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1\n * @param {import('./index').Cookie} cookie\n */\nfunction stringify (cookie) {\n if (cookie.name.length === 0) {\n return null\n }\n\n validateCookieName(cookie.name)\n validateCookieValue(cookie.value)\n\n const out = [`${cookie.name}=${cookie.value}`]\n\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2\n if (cookie.name.startsWith('__Secure-')) {\n cookie.secure = true\n }\n\n if (cookie.name.startsWith('__Host-')) {\n cookie.secure = true\n cookie.domain = null\n cookie.path = '/'\n }\n\n if (cookie.secure) {\n out.push('Secure')\n }\n\n if (cookie.httpOnly) {\n out.push('HttpOnly')\n }\n\n if (typeof cookie.maxAge === 'number') {\n validateCookieMaxAge(cookie.maxAge)\n out.push(`Max-Age=${cookie.maxAge}`)\n }\n\n if (cookie.domain) {\n validateCookieDomain(cookie.domain)\n out.push(`Domain=${cookie.domain}`)\n }\n\n if (cookie.path) {\n validateCookiePath(cookie.path)\n out.push(`Path=${cookie.path}`)\n }\n\n if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') {\n out.push(`Expires=${toIMFDate(cookie.expires)}`)\n }\n\n if (cookie.sameSite) {\n out.push(`SameSite=${cookie.sameSite}`)\n }\n\n for (const part of cookie.unparsed) {\n if (!part.includes('=')) {\n throw new Error('Invalid unparsed')\n }\n\n const [key, ...value] = part.split('=')\n\n out.push(`${key.trim()}=${value.join('=')}`)\n }\n\n return out.join('; ')\n}\n\nlet kHeadersListNode\n\nfunction getHeadersList (headers) {\n if (headers[kHeadersList]) {\n return headers[kHeadersList]\n }\n\n if (!kHeadersListNode) {\n kHeadersListNode = Object.getOwnPropertySymbols(headers).find(\n (symbol) => symbol.description === 'headers list'\n )\n\n assert(kHeadersListNode, 'Headers cannot be parsed')\n }\n\n const headersList = headers[kHeadersListNode]\n assert(headersList)\n\n return headersList\n}\n\nmodule.exports = {\n isCTLExcludingHtab,\n stringify,\n getHeadersList\n}\n","'use strict'\n\nconst net = require('net')\nconst assert = require('assert')\nconst util = require('./util')\nconst { InvalidArgumentError, ConnectTimeoutError } = require('./errors')\n\nlet tls // include tls conditionally since it is not always available\n\n// TODO: session re-use does not wait for the first\n// connection to resolve the session and might therefore\n// resolve the same servername multiple times even when\n// re-use is enabled.\n\nlet SessionCache\n// FIXME: remove workaround when the Node bug is fixed\n// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308\nif (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {\n SessionCache = class WeakSessionCache {\n constructor (maxCachedSessions) {\n this._maxCachedSessions = maxCachedSessions\n this._sessionCache = new Map()\n this._sessionRegistry = new global.FinalizationRegistry((key) => {\n if (this._sessionCache.size < this._maxCachedSessions) {\n return\n }\n\n const ref = this._sessionCache.get(key)\n if (ref !== undefined && ref.deref() === undefined) {\n this._sessionCache.delete(key)\n }\n })\n }\n\n get (sessionKey) {\n const ref = this._sessionCache.get(sessionKey)\n return ref ? ref.deref() : null\n }\n\n set (sessionKey, session) {\n if (this._maxCachedSessions === 0) {\n return\n }\n\n this._sessionCache.set(sessionKey, new WeakRef(session))\n this._sessionRegistry.register(session, sessionKey)\n }\n }\n} else {\n SessionCache = class SimpleSessionCache {\n constructor (maxCachedSessions) {\n this._maxCachedSessions = maxCachedSessions\n this._sessionCache = new Map()\n }\n\n get (sessionKey) {\n return this._sessionCache.get(sessionKey)\n }\n\n set (sessionKey, session) {\n if (this._maxCachedSessions === 0) {\n return\n }\n\n if (this._sessionCache.size >= this._maxCachedSessions) {\n // remove the oldest session\n const { value: oldestKey } = this._sessionCache.keys().next()\n this._sessionCache.delete(oldestKey)\n }\n\n this._sessionCache.set(sessionKey, session)\n }\n }\n}\n\nfunction buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {\n if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {\n throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')\n }\n\n const options = { path: socketPath, ...opts }\n const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)\n timeout = timeout == null ? 10e3 : timeout\n allowH2 = allowH2 != null ? allowH2 : false\n return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {\n let socket\n if (protocol === 'https:') {\n if (!tls) {\n tls = require('tls')\n }\n servername = servername || options.servername || util.getServerName(host) || null\n\n const sessionKey = servername || hostname\n const session = sessionCache.get(sessionKey) || null\n\n assert(sessionKey)\n\n socket = tls.connect({\n highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...\n ...options,\n servername,\n session,\n localAddress,\n // TODO(HTTP/2): Add support for h2c\n ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],\n socket: httpSocket, // upgrade socket connection\n port: port || 443,\n host: hostname\n })\n\n socket\n .on('session', function (session) {\n // TODO (fix): Can a session become invalid once established? Don't think so?\n sessionCache.set(sessionKey, session)\n })\n } else {\n assert(!httpSocket, 'httpSocket can only be sent on TLS update')\n socket = net.connect({\n highWaterMark: 64 * 1024, // Same as nodejs fs streams.\n ...options,\n localAddress,\n port: port || 80,\n host: hostname\n })\n }\n\n // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket\n if (options.keepAlive == null || options.keepAlive) {\n const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay\n socket.setKeepAlive(true, keepAliveInitialDelay)\n }\n\n const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout)\n\n socket\n .setNoDelay(true)\n .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {\n cancelTimeout()\n\n if (callback) {\n const cb = callback\n callback = null\n cb(null, this)\n }\n })\n .on('error', function (err) {\n cancelTimeout()\n\n if (callback) {\n const cb = callback\n callback = null\n cb(err)\n }\n })\n\n return socket\n }\n}\n\nfunction setupTimeout (onConnectTimeout, timeout) {\n if (!timeout) {\n return () => {}\n }\n\n let s1 = null\n let s2 = null\n const timeoutId = setTimeout(() => {\n // setImmediate is added to make sure that we priotorise socket error events over timeouts\n s1 = setImmediate(() => {\n if (process.platform === 'win32') {\n // Windows needs an extra setImmediate probably due to implementation differences in the socket logic\n s2 = setImmediate(() => onConnectTimeout())\n } else {\n onConnectTimeout()\n }\n })\n }, timeout)\n return () => {\n clearTimeout(timeoutId)\n clearImmediate(s1)\n clearImmediate(s2)\n }\n}\n\nfunction onConnectTimeout (socket) {\n util.destroy(socket, new ConnectTimeoutError())\n}\n\nmodule.exports = buildConnector\n","'use strict'\n\nclass UndiciError extends Error {\n constructor (message) {\n super(message)\n this.name = 'UndiciError'\n this.code = 'UND_ERR'\n }\n}\n\nclass ConnectTimeoutError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, ConnectTimeoutError)\n this.name = 'ConnectTimeoutError'\n this.message = message || 'Connect Timeout Error'\n this.code = 'UND_ERR_CONNECT_TIMEOUT'\n }\n}\n\nclass HeadersTimeoutError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, HeadersTimeoutError)\n this.name = 'HeadersTimeoutError'\n this.message = message || 'Headers Timeout Error'\n this.code = 'UND_ERR_HEADERS_TIMEOUT'\n }\n}\n\nclass HeadersOverflowError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, HeadersOverflowError)\n this.name = 'HeadersOverflowError'\n this.message = message || 'Headers Overflow Error'\n this.code = 'UND_ERR_HEADERS_OVERFLOW'\n }\n}\n\nclass BodyTimeoutError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, BodyTimeoutError)\n this.name = 'BodyTimeoutError'\n this.message = message || 'Body Timeout Error'\n this.code = 'UND_ERR_BODY_TIMEOUT'\n }\n}\n\nclass ResponseStatusCodeError extends UndiciError {\n constructor (message, statusCode, headers, body) {\n super(message)\n Error.captureStackTrace(this, ResponseStatusCodeError)\n this.name = 'ResponseStatusCodeError'\n this.message = message || 'Response Status Code Error'\n this.code = 'UND_ERR_RESPONSE_STATUS_CODE'\n this.body = body\n this.status = statusCode\n this.statusCode = statusCode\n this.headers = headers\n }\n}\n\nclass InvalidArgumentError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, InvalidArgumentError)\n this.name = 'InvalidArgumentError'\n this.message = message || 'Invalid Argument Error'\n this.code = 'UND_ERR_INVALID_ARG'\n }\n}\n\nclass InvalidReturnValueError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, InvalidReturnValueError)\n this.name = 'InvalidReturnValueError'\n this.message = message || 'Invalid Return Value Error'\n this.code = 'UND_ERR_INVALID_RETURN_VALUE'\n }\n}\n\nclass RequestAbortedError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, RequestAbortedError)\n this.name = 'AbortError'\n this.message = message || 'Request aborted'\n this.code = 'UND_ERR_ABORTED'\n }\n}\n\nclass InformationalError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, InformationalError)\n this.name = 'InformationalError'\n this.message = message || 'Request information'\n this.code = 'UND_ERR_INFO'\n }\n}\n\nclass RequestContentLengthMismatchError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, RequestContentLengthMismatchError)\n this.name = 'RequestContentLengthMismatchError'\n this.message = message || 'Request body length does not match content-length header'\n this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'\n }\n}\n\nclass ResponseContentLengthMismatchError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, ResponseContentLengthMismatchError)\n this.name = 'ResponseContentLengthMismatchError'\n this.message = message || 'Response body length does not match content-length header'\n this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'\n }\n}\n\nclass ClientDestroyedError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, ClientDestroyedError)\n this.name = 'ClientDestroyedError'\n this.message = message || 'The client is destroyed'\n this.code = 'UND_ERR_DESTROYED'\n }\n}\n\nclass ClientClosedError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, ClientClosedError)\n this.name = 'ClientClosedError'\n this.message = message || 'The client is closed'\n this.code = 'UND_ERR_CLOSED'\n }\n}\n\nclass SocketError extends UndiciError {\n constructor (message, socket) {\n super(message)\n Error.captureStackTrace(this, SocketError)\n this.name = 'SocketError'\n this.message = message || 'Socket error'\n this.code = 'UND_ERR_SOCKET'\n this.socket = socket\n }\n}\n\nclass NotSupportedError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, NotSupportedError)\n this.name = 'NotSupportedError'\n this.message = message || 'Not supported error'\n this.code = 'UND_ERR_NOT_SUPPORTED'\n }\n}\n\nclass BalancedPoolMissingUpstreamError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, NotSupportedError)\n this.name = 'MissingUpstreamError'\n this.message = message || 'No upstream has been added to the BalancedPool'\n this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'\n }\n}\n\nclass HTTPParserError extends Error {\n constructor (message, code, data) {\n super(message)\n Error.captureStackTrace(this, HTTPParserError)\n this.name = 'HTTPParserError'\n this.code = code ? `HPE_${code}` : undefined\n this.data = data ? data.toString() : undefined\n }\n}\n\nclass ResponseExceededMaxSizeError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, ResponseExceededMaxSizeError)\n this.name = 'ResponseExceededMaxSizeError'\n this.message = message || 'Response content exceeded max size'\n this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'\n }\n}\n\nclass RequestRetryError extends UndiciError {\n constructor (message, code, { headers, data }) {\n super(message)\n Error.captureStackTrace(this, RequestRetryError)\n this.name = 'RequestRetryError'\n this.message = message || 'Request retry error'\n this.code = 'UND_ERR_REQ_RETRY'\n this.statusCode = code\n this.data = data\n this.headers = headers\n }\n}\n\nmodule.exports = {\n HTTPParserError,\n UndiciError,\n HeadersTimeoutError,\n HeadersOverflowError,\n BodyTimeoutError,\n RequestContentLengthMismatchError,\n ConnectTimeoutError,\n ResponseStatusCodeError,\n InvalidArgumentError,\n InvalidReturnValueError,\n RequestAbortedError,\n ClientDestroyedError,\n ClientClosedError,\n InformationalError,\n SocketError,\n NotSupportedError,\n ResponseContentLengthMismatchError,\n BalancedPoolMissingUpstreamError,\n ResponseExceededMaxSizeError,\n RequestRetryError\n}\n","'use strict'\n\nconst {\n InvalidArgumentError,\n NotSupportedError\n} = require('./errors')\nconst assert = require('assert')\nconst { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols')\nconst util = require('./util')\n\n// tokenRegExp and headerCharRegex have been lifted from\n// https://github.com/nodejs/node/blob/main/lib/_http_common.js\n\n/**\n * Verifies that the given val is a valid HTTP token\n * per the rules defined in RFC 7230\n * See https://tools.ietf.org/html/rfc7230#section-3.2.6\n */\nconst tokenRegExp = /^[\\^_`a-zA-Z\\-0-9!#$%&'*+.|~]+$/\n\n/**\n * Matches if val contains an invalid field-vchar\n * field-value = *( field-content / obs-fold )\n * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]\n * field-vchar = VCHAR / obs-text\n */\nconst headerCharRegex = /[^\\t\\x20-\\x7e\\x80-\\xff]/\n\n// Verifies that a given path is valid does not contain control chars \\x00 to \\x20\nconst invalidPathRegex = /[^\\u0021-\\u00ff]/\n\nconst kHandler = Symbol('handler')\n\nconst channels = {}\n\nlet extractBody\n\ntry {\n const diagnosticsChannel = require('diagnostics_channel')\n channels.create = diagnosticsChannel.channel('undici:request:create')\n channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent')\n channels.headers = diagnosticsChannel.channel('undici:request:headers')\n channels.trailers = diagnosticsChannel.channel('undici:request:trailers')\n channels.error = diagnosticsChannel.channel('undici:request:error')\n} catch {\n channels.create = { hasSubscribers: false }\n channels.bodySent = { hasSubscribers: false }\n channels.headers = { hasSubscribers: false }\n channels.trailers = { hasSubscribers: false }\n channels.error = { hasSubscribers: false }\n}\n\nclass Request {\n constructor (origin, {\n path,\n method,\n body,\n headers,\n query,\n idempotent,\n blocking,\n upgrade,\n headersTimeout,\n bodyTimeout,\n reset,\n throwOnError,\n expectContinue\n }, handler) {\n if (typeof path !== 'string') {\n throw new InvalidArgumentError('path must be a string')\n } else if (\n path[0] !== '/' &&\n !(path.startsWith('http://') || path.startsWith('https://')) &&\n method !== 'CONNECT'\n ) {\n throw new InvalidArgumentError('path must be an absolute URL or start with a slash')\n } else if (invalidPathRegex.exec(path) !== null) {\n throw new InvalidArgumentError('invalid request path')\n }\n\n if (typeof method !== 'string') {\n throw new InvalidArgumentError('method must be a string')\n } else if (tokenRegExp.exec(method) === null) {\n throw new InvalidArgumentError('invalid request method')\n }\n\n if (upgrade && typeof upgrade !== 'string') {\n throw new InvalidArgumentError('upgrade must be a string')\n }\n\n if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) {\n throw new InvalidArgumentError('invalid headersTimeout')\n }\n\n if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) {\n throw new InvalidArgumentError('invalid bodyTimeout')\n }\n\n if (reset != null && typeof reset !== 'boolean') {\n throw new InvalidArgumentError('invalid reset')\n }\n\n if (expectContinue != null && typeof expectContinue !== 'boolean') {\n throw new InvalidArgumentError('invalid expectContinue')\n }\n\n this.headersTimeout = headersTimeout\n\n this.bodyTimeout = bodyTimeout\n\n this.throwOnError = throwOnError === true\n\n this.method = method\n\n this.abort = null\n\n if (body == null) {\n this.body = null\n } else if (util.isStream(body)) {\n this.body = body\n\n const rState = this.body._readableState\n if (!rState || !rState.autoDestroy) {\n this.endHandler = function autoDestroy () {\n util.destroy(this)\n }\n this.body.on('end', this.endHandler)\n }\n\n this.errorHandler = err => {\n if (this.abort) {\n this.abort(err)\n } else {\n this.error = err\n }\n }\n this.body.on('error', this.errorHandler)\n } else if (util.isBuffer(body)) {\n this.body = body.byteLength ? body : null\n } else if (ArrayBuffer.isView(body)) {\n this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null\n } else if (body instanceof ArrayBuffer) {\n this.body = body.byteLength ? Buffer.from(body) : null\n } else if (typeof body === 'string') {\n this.body = body.length ? Buffer.from(body) : null\n } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) {\n this.body = body\n } else {\n throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')\n }\n\n this.completed = false\n\n this.aborted = false\n\n this.upgrade = upgrade || null\n\n this.path = query ? util.buildURL(path, query) : path\n\n this.origin = origin\n\n this.idempotent = idempotent == null\n ? method === 'HEAD' || method === 'GET'\n : idempotent\n\n this.blocking = blocking == null ? false : blocking\n\n this.reset = reset == null ? null : reset\n\n this.host = null\n\n this.contentLength = null\n\n this.contentType = null\n\n this.headers = ''\n\n // Only for H2\n this.expectContinue = expectContinue != null ? expectContinue : false\n\n if (Array.isArray(headers)) {\n if (headers.length % 2 !== 0) {\n throw new InvalidArgumentError('headers array must be even')\n }\n for (let i = 0; i < headers.length; i += 2) {\n processHeader(this, headers[i], headers[i + 1])\n }\n } else if (headers && typeof headers === 'object') {\n const keys = Object.keys(headers)\n for (let i = 0; i < keys.length; i++) {\n const key = keys[i]\n processHeader(this, key, headers[key])\n }\n } else if (headers != null) {\n throw new InvalidArgumentError('headers must be an object or an array')\n }\n\n if (util.isFormDataLike(this.body)) {\n if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) {\n throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.')\n }\n\n if (!extractBody) {\n extractBody = require('../fetch/body.js').extractBody\n }\n\n const [bodyStream, contentType] = extractBody(body)\n if (this.contentType == null) {\n this.contentType = contentType\n this.headers += `content-type: ${contentType}\\r\\n`\n }\n this.body = bodyStream.stream\n this.contentLength = bodyStream.length\n } else if (util.isBlobLike(body) && this.contentType == null && body.type) {\n this.contentType = body.type\n this.headers += `content-type: ${body.type}\\r\\n`\n }\n\n util.validateHandler(handler, method, upgrade)\n\n this.servername = util.getServerName(this.host)\n\n this[kHandler] = handler\n\n if (channels.create.hasSubscribers) {\n channels.create.publish({ request: this })\n }\n }\n\n onBodySent (chunk) {\n if (this[kHandler].onBodySent) {\n try {\n return this[kHandler].onBodySent(chunk)\n } catch (err) {\n this.abort(err)\n }\n }\n }\n\n onRequestSent () {\n if (channels.bodySent.hasSubscribers) {\n channels.bodySent.publish({ request: this })\n }\n\n if (this[kHandler].onRequestSent) {\n try {\n return this[kHandler].onRequestSent()\n } catch (err) {\n this.abort(err)\n }\n }\n }\n\n onConnect (abort) {\n assert(!this.aborted)\n assert(!this.completed)\n\n if (this.error) {\n abort(this.error)\n } else {\n this.abort = abort\n return this[kHandler].onConnect(abort)\n }\n }\n\n onHeaders (statusCode, headers, resume, statusText) {\n assert(!this.aborted)\n assert(!this.completed)\n\n if (channels.headers.hasSubscribers) {\n channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })\n }\n\n try {\n return this[kHandler].onHeaders(statusCode, headers, resume, statusText)\n } catch (err) {\n this.abort(err)\n }\n }\n\n onData (chunk) {\n assert(!this.aborted)\n assert(!this.completed)\n\n try {\n return this[kHandler].onData(chunk)\n } catch (err) {\n this.abort(err)\n return false\n }\n }\n\n onUpgrade (statusCode, headers, socket) {\n assert(!this.aborted)\n assert(!this.completed)\n\n return this[kHandler].onUpgrade(statusCode, headers, socket)\n }\n\n onComplete (trailers) {\n this.onFinally()\n\n assert(!this.aborted)\n\n this.completed = true\n if (channels.trailers.hasSubscribers) {\n channels.trailers.publish({ request: this, trailers })\n }\n\n try {\n return this[kHandler].onComplete(trailers)\n } catch (err) {\n // TODO (fix): This might be a bad idea?\n this.onError(err)\n }\n }\n\n onError (error) {\n this.onFinally()\n\n if (channels.error.hasSubscribers) {\n channels.error.publish({ request: this, error })\n }\n\n if (this.aborted) {\n return\n }\n this.aborted = true\n\n return this[kHandler].onError(error)\n }\n\n onFinally () {\n if (this.errorHandler) {\n this.body.off('error', this.errorHandler)\n this.errorHandler = null\n }\n\n if (this.endHandler) {\n this.body.off('end', this.endHandler)\n this.endHandler = null\n }\n }\n\n // TODO: adjust to support H2\n addHeader (key, value) {\n processHeader(this, key, value)\n return this\n }\n\n static [kHTTP1BuildRequest] (origin, opts, handler) {\n // TODO: Migrate header parsing here, to make Requests\n // HTTP agnostic\n return new Request(origin, opts, handler)\n }\n\n static [kHTTP2BuildRequest] (origin, opts, handler) {\n const headers = opts.headers\n opts = { ...opts, headers: null }\n\n const request = new Request(origin, opts, handler)\n\n request.headers = {}\n\n if (Array.isArray(headers)) {\n if (headers.length % 2 !== 0) {\n throw new InvalidArgumentError('headers array must be even')\n }\n for (let i = 0; i < headers.length; i += 2) {\n processHeader(request, headers[i], headers[i + 1], true)\n }\n } else if (headers && typeof headers === 'object') {\n const keys = Object.keys(headers)\n for (let i = 0; i < keys.length; i++) {\n const key = keys[i]\n processHeader(request, key, headers[key], true)\n }\n } else if (headers != null) {\n throw new InvalidArgumentError('headers must be an object or an array')\n }\n\n return request\n }\n\n static [kHTTP2CopyHeaders] (raw) {\n const rawHeaders = raw.split('\\r\\n')\n const headers = {}\n\n for (const header of rawHeaders) {\n const [key, value] = header.split(': ')\n\n if (value == null || value.length === 0) continue\n\n if (headers[key]) headers[key] += `,${value}`\n else headers[key] = value\n }\n\n return headers\n }\n}\n\nfunction processHeaderValue (key, val, skipAppend) {\n if (val && typeof val === 'object') {\n throw new InvalidArgumentError(`invalid ${key} header`)\n }\n\n val = val != null ? `${val}` : ''\n\n if (headerCharRegex.exec(val) !== null) {\n throw new InvalidArgumentError(`invalid ${key} header`)\n }\n\n return skipAppend ? val : `${key}: ${val}\\r\\n`\n}\n\nfunction processHeader (request, key, val, skipAppend = false) {\n if (val && (typeof val === 'object' && !Array.isArray(val))) {\n throw new InvalidArgumentError(`invalid ${key} header`)\n } else if (val === undefined) {\n return\n }\n\n if (\n request.host === null &&\n key.length === 4 &&\n key.toLowerCase() === 'host'\n ) {\n if (headerCharRegex.exec(val) !== null) {\n throw new InvalidArgumentError(`invalid ${key} header`)\n }\n // Consumed by Client\n request.host = val\n } else if (\n request.contentLength === null &&\n key.length === 14 &&\n key.toLowerCase() === 'content-length'\n ) {\n request.contentLength = parseInt(val, 10)\n if (!Number.isFinite(request.contentLength)) {\n throw new InvalidArgumentError('invalid content-length header')\n }\n } else if (\n request.contentType === null &&\n key.length === 12 &&\n key.toLowerCase() === 'content-type'\n ) {\n request.contentType = val\n if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)\n else request.headers += processHeaderValue(key, val)\n } else if (\n key.length === 17 &&\n key.toLowerCase() === 'transfer-encoding'\n ) {\n throw new InvalidArgumentError('invalid transfer-encoding header')\n } else if (\n key.length === 10 &&\n key.toLowerCase() === 'connection'\n ) {\n const value = typeof val === 'string' ? val.toLowerCase() : null\n if (value !== 'close' && value !== 'keep-alive') {\n throw new InvalidArgumentError('invalid connection header')\n } else if (value === 'close') {\n request.reset = true\n }\n } else if (\n key.length === 10 &&\n key.toLowerCase() === 'keep-alive'\n ) {\n throw new InvalidArgumentError('invalid keep-alive header')\n } else if (\n key.length === 7 &&\n key.toLowerCase() === 'upgrade'\n ) {\n throw new InvalidArgumentError('invalid upgrade header')\n } else if (\n key.length === 6 &&\n key.toLowerCase() === 'expect'\n ) {\n throw new NotSupportedError('expect header not supported')\n } else if (tokenRegExp.exec(key) === null) {\n throw new InvalidArgumentError('invalid header key')\n } else {\n if (Array.isArray(val)) {\n for (let i = 0; i < val.length; i++) {\n if (skipAppend) {\n if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`\n else request.headers[key] = processHeaderValue(key, val[i], skipAppend)\n } else {\n request.headers += processHeaderValue(key, val[i])\n }\n }\n } else {\n if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)\n else request.headers += processHeaderValue(key, val)\n }\n }\n}\n\nmodule.exports = Request\n","module.exports = {\n kClose: Symbol('close'),\n kDestroy: Symbol('destroy'),\n kDispatch: Symbol('dispatch'),\n kUrl: Symbol('url'),\n kWriting: Symbol('writing'),\n kResuming: Symbol('resuming'),\n kQueue: Symbol('queue'),\n kConnect: Symbol('connect'),\n kConnecting: Symbol('connecting'),\n kHeadersList: Symbol('headers list'),\n kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),\n kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),\n kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),\n kKeepAliveTimeoutValue: Symbol('keep alive timeout'),\n kKeepAlive: Symbol('keep alive'),\n kHeadersTimeout: Symbol('headers timeout'),\n kBodyTimeout: Symbol('body timeout'),\n kServerName: Symbol('server name'),\n kLocalAddress: Symbol('local address'),\n kHost: Symbol('host'),\n kNoRef: Symbol('no ref'),\n kBodyUsed: Symbol('used'),\n kRunning: Symbol('running'),\n kBlocking: Symbol('blocking'),\n kPending: Symbol('pending'),\n kSize: Symbol('size'),\n kBusy: Symbol('busy'),\n kQueued: Symbol('queued'),\n kFree: Symbol('free'),\n kConnected: Symbol('connected'),\n kClosed: Symbol('closed'),\n kNeedDrain: Symbol('need drain'),\n kReset: Symbol('reset'),\n kDestroyed: Symbol.for('nodejs.stream.destroyed'),\n kMaxHeadersSize: Symbol('max headers size'),\n kRunningIdx: Symbol('running index'),\n kPendingIdx: Symbol('pending index'),\n kError: Symbol('error'),\n kClients: Symbol('clients'),\n kClient: Symbol('client'),\n kParser: Symbol('parser'),\n kOnDestroyed: Symbol('destroy callbacks'),\n kPipelining: Symbol('pipelining'),\n kSocket: Symbol('socket'),\n kHostHeader: Symbol('host header'),\n kConnector: Symbol('connector'),\n kStrictContentLength: Symbol('strict content length'),\n kMaxRedirections: Symbol('maxRedirections'),\n kMaxRequests: Symbol('maxRequestsPerClient'),\n kProxy: Symbol('proxy agent options'),\n kCounter: Symbol('socket request counter'),\n kInterceptors: Symbol('dispatch interceptors'),\n kMaxResponseSize: Symbol('max response size'),\n kHTTP2Session: Symbol('http2Session'),\n kHTTP2SessionState: Symbol('http2Session state'),\n kHTTP2BuildRequest: Symbol('http2 build request'),\n kHTTP1BuildRequest: Symbol('http1 build request'),\n kHTTP2CopyHeaders: Symbol('http2 copy headers'),\n kHTTPConnVersion: Symbol('http connection version'),\n kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),\n kConstruct: Symbol('constructable')\n}\n","'use strict'\n\nconst assert = require('assert')\nconst { kDestroyed, kBodyUsed } = require('./symbols')\nconst { IncomingMessage } = require('http')\nconst stream = require('stream')\nconst net = require('net')\nconst { InvalidArgumentError } = require('./errors')\nconst { Blob } = require('buffer')\nconst nodeUtil = require('util')\nconst { stringify } = require('querystring')\n\nconst [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))\n\nfunction nop () {}\n\nfunction isStream (obj) {\n return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'\n}\n\n// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)\nfunction isBlobLike (object) {\n return (Blob && object instanceof Blob) || (\n object &&\n typeof object === 'object' &&\n (typeof object.stream === 'function' ||\n typeof object.arrayBuffer === 'function') &&\n /^(Blob|File)$/.test(object[Symbol.toStringTag])\n )\n}\n\nfunction buildURL (url, queryParams) {\n if (url.includes('?') || url.includes('#')) {\n throw new Error('Query params cannot be passed when url already contains \"?\" or \"#\".')\n }\n\n const stringified = stringify(queryParams)\n\n if (stringified) {\n url += '?' + stringified\n }\n\n return url\n}\n\nfunction parseURL (url) {\n if (typeof url === 'string') {\n url = new URL(url)\n\n if (!/^https?:/.test(url.origin || url.protocol)) {\n throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')\n }\n\n return url\n }\n\n if (!url || typeof url !== 'object') {\n throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')\n }\n\n if (!/^https?:/.test(url.origin || url.protocol)) {\n throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')\n }\n\n if (!(url instanceof URL)) {\n if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {\n throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')\n }\n\n if (url.path != null && typeof url.path !== 'string') {\n throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')\n }\n\n if (url.pathname != null && typeof url.pathname !== 'string') {\n throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')\n }\n\n if (url.hostname != null && typeof url.hostname !== 'string') {\n throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')\n }\n\n if (url.origin != null && typeof url.origin !== 'string') {\n throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')\n }\n\n const port = url.port != null\n ? url.port\n : (url.protocol === 'https:' ? 443 : 80)\n let origin = url.origin != null\n ? url.origin\n : `${url.protocol}//${url.hostname}:${port}`\n let path = url.path != null\n ? url.path\n : `${url.pathname || ''}${url.search || ''}`\n\n if (origin.endsWith('/')) {\n origin = origin.substring(0, origin.length - 1)\n }\n\n if (path && !path.startsWith('/')) {\n path = `/${path}`\n }\n // new URL(path, origin) is unsafe when `path` contains an absolute URL\n // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:\n // If first parameter is a relative URL, second param is required, and will be used as the base URL.\n // If first parameter is an absolute URL, a given second param will be ignored.\n url = new URL(origin + path)\n }\n\n return url\n}\n\nfunction parseOrigin (url) {\n url = parseURL(url)\n\n if (url.pathname !== '/' || url.search || url.hash) {\n throw new InvalidArgumentError('invalid url')\n }\n\n return url\n}\n\nfunction getHostname (host) {\n if (host[0] === '[') {\n const idx = host.indexOf(']')\n\n assert(idx !== -1)\n return host.substring(1, idx)\n }\n\n const idx = host.indexOf(':')\n if (idx === -1) return host\n\n return host.substring(0, idx)\n}\n\n// IP addresses are not valid server names per RFC6066\n// > Currently, the only server names supported are DNS hostnames\nfunction getServerName (host) {\n if (!host) {\n return null\n }\n\n assert.strictEqual(typeof host, 'string')\n\n const servername = getHostname(host)\n if (net.isIP(servername)) {\n return ''\n }\n\n return servername\n}\n\nfunction deepClone (obj) {\n return JSON.parse(JSON.stringify(obj))\n}\n\nfunction isAsyncIterable (obj) {\n return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function')\n}\n\nfunction isIterable (obj) {\n return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function'))\n}\n\nfunction bodyLength (body) {\n if (body == null) {\n return 0\n } else if (isStream(body)) {\n const state = body._readableState\n return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length)\n ? state.length\n : null\n } else if (isBlobLike(body)) {\n return body.size != null ? body.size : null\n } else if (isBuffer(body)) {\n return body.byteLength\n }\n\n return null\n}\n\nfunction isDestroyed (stream) {\n return !stream || !!(stream.destroyed || stream[kDestroyed])\n}\n\nfunction isReadableAborted (stream) {\n const state = stream && stream._readableState\n return isDestroyed(stream) && state && !state.endEmitted\n}\n\nfunction destroy (stream, err) {\n if (stream == null || !isStream(stream) || isDestroyed(stream)) {\n return\n }\n\n if (typeof stream.destroy === 'function') {\n if (Object.getPrototypeOf(stream).constructor === IncomingMessage) {\n // See: https://github.com/nodejs/node/pull/38505/files\n stream.socket = null\n }\n\n stream.destroy(err)\n } else if (err) {\n process.nextTick((stream, err) => {\n stream.emit('error', err)\n }, stream, err)\n }\n\n if (stream.destroyed !== true) {\n stream[kDestroyed] = true\n }\n}\n\nconst KEEPALIVE_TIMEOUT_EXPR = /timeout=(\\d+)/\nfunction parseKeepAliveTimeout (val) {\n const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR)\n return m ? parseInt(m[1], 10) * 1000 : null\n}\n\nfunction parseHeaders (headers, obj = {}) {\n // For H2 support\n if (!Array.isArray(headers)) return headers\n\n for (let i = 0; i < headers.length; i += 2) {\n const key = headers[i].toString().toLowerCase()\n let val = obj[key]\n\n if (!val) {\n if (Array.isArray(headers[i + 1])) {\n obj[key] = headers[i + 1].map(x => x.toString('utf8'))\n } else {\n obj[key] = headers[i + 1].toString('utf8')\n }\n } else {\n if (!Array.isArray(val)) {\n val = [val]\n obj[key] = val\n }\n val.push(headers[i + 1].toString('utf8'))\n }\n }\n\n // See https://github.com/nodejs/node/pull/46528\n if ('content-length' in obj && 'content-disposition' in obj) {\n obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1')\n }\n\n return obj\n}\n\nfunction parseRawHeaders (headers) {\n const ret = []\n let hasContentLength = false\n let contentDispositionIdx = -1\n\n for (let n = 0; n < headers.length; n += 2) {\n const key = headers[n + 0].toString()\n const val = headers[n + 1].toString('utf8')\n\n if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) {\n ret.push(key, val)\n hasContentLength = true\n } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {\n contentDispositionIdx = ret.push(key, val) - 1\n } else {\n ret.push(key, val)\n }\n }\n\n // See https://github.com/nodejs/node/pull/46528\n if (hasContentLength && contentDispositionIdx !== -1) {\n ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1')\n }\n\n return ret\n}\n\nfunction isBuffer (buffer) {\n // See, https://github.com/mcollina/undici/pull/319\n return buffer instanceof Uint8Array || Buffer.isBuffer(buffer)\n}\n\nfunction validateHandler (handler, method, upgrade) {\n if (!handler || typeof handler !== 'object') {\n throw new InvalidArgumentError('handler must be an object')\n }\n\n if (typeof handler.onConnect !== 'function') {\n throw new InvalidArgumentError('invalid onConnect method')\n }\n\n if (typeof handler.onError !== 'function') {\n throw new InvalidArgumentError('invalid onError method')\n }\n\n if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) {\n throw new InvalidArgumentError('invalid onBodySent method')\n }\n\n if (upgrade || method === 'CONNECT') {\n if (typeof handler.onUpgrade !== 'function') {\n throw new InvalidArgumentError('invalid onUpgrade method')\n }\n } else {\n if (typeof handler.onHeaders !== 'function') {\n throw new InvalidArgumentError('invalid onHeaders method')\n }\n\n if (typeof handler.onData !== 'function') {\n throw new InvalidArgumentError('invalid onData method')\n }\n\n if (typeof handler.onComplete !== 'function') {\n throw new InvalidArgumentError('invalid onComplete method')\n }\n }\n}\n\n// A body is disturbed if it has been read from and it cannot\n// be re-used without losing state or data.\nfunction isDisturbed (body) {\n return !!(body && (\n stream.isDisturbed\n ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed?\n : body[kBodyUsed] ||\n body.readableDidRead ||\n (body._readableState && body._readableState.dataEmitted) ||\n isReadableAborted(body)\n ))\n}\n\nfunction isErrored (body) {\n return !!(body && (\n stream.isErrored\n ? stream.isErrored(body)\n : /state: 'errored'/.test(nodeUtil.inspect(body)\n )))\n}\n\nfunction isReadable (body) {\n return !!(body && (\n stream.isReadable\n ? stream.isReadable(body)\n : /state: 'readable'/.test(nodeUtil.inspect(body)\n )))\n}\n\nfunction getSocketInfo (socket) {\n return {\n localAddress: socket.localAddress,\n localPort: socket.localPort,\n remoteAddress: socket.remoteAddress,\n remotePort: socket.remotePort,\n remoteFamily: socket.remoteFamily,\n timeout: socket.timeout,\n bytesWritten: socket.bytesWritten,\n bytesRead: socket.bytesRead\n }\n}\n\nasync function * convertIterableToBuffer (iterable) {\n for await (const chunk of iterable) {\n yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)\n }\n}\n\nlet ReadableStream\nfunction ReadableStreamFrom (iterable) {\n if (!ReadableStream) {\n ReadableStream = require('stream/web').ReadableStream\n }\n\n if (ReadableStream.from) {\n return ReadableStream.from(convertIterableToBuffer(iterable))\n }\n\n let iterator\n return new ReadableStream(\n {\n async start () {\n iterator = iterable[Symbol.asyncIterator]()\n },\n async pull (controller) {\n const { done, value } = await iterator.next()\n if (done) {\n queueMicrotask(() => {\n controller.close()\n })\n } else {\n const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)\n controller.enqueue(new Uint8Array(buf))\n }\n return controller.desiredSize > 0\n },\n async cancel (reason) {\n await iterator.return()\n }\n },\n 0\n )\n}\n\n// The chunk should be a FormData instance and contains\n// all the required methods.\nfunction isFormDataLike (object) {\n return (\n object &&\n typeof object === 'object' &&\n typeof object.append === 'function' &&\n typeof object.delete === 'function' &&\n typeof object.get === 'function' &&\n typeof object.getAll === 'function' &&\n typeof object.has === 'function' &&\n typeof object.set === 'function' &&\n object[Symbol.toStringTag] === 'FormData'\n )\n}\n\nfunction throwIfAborted (signal) {\n if (!signal) { return }\n if (typeof signal.throwIfAborted === 'function') {\n signal.throwIfAborted()\n } else {\n if (signal.aborted) {\n // DOMException not available < v17.0.0\n const err = new Error('The operation was aborted')\n err.name = 'AbortError'\n throw err\n }\n }\n}\n\nfunction addAbortListener (signal, listener) {\n if ('addEventListener' in signal) {\n signal.addEventListener('abort', listener, { once: true })\n return () => signal.removeEventListener('abort', listener)\n }\n signal.addListener('abort', listener)\n return () => signal.removeListener('abort', listener)\n}\n\nconst hasToWellFormed = !!String.prototype.toWellFormed\n\n/**\n * @param {string} val\n */\nfunction toUSVString (val) {\n if (hasToWellFormed) {\n return `${val}`.toWellFormed()\n } else if (nodeUtil.toUSVString) {\n return nodeUtil.toUSVString(val)\n }\n\n return `${val}`\n}\n\n// Parsed accordingly to RFC 9110\n// https://www.rfc-editor.org/rfc/rfc9110#field.content-range\nfunction parseRangeHeader (range) {\n if (range == null || range === '') return { start: 0, end: null, size: null }\n\n const m = range ? range.match(/^bytes (\\d+)-(\\d+)\\/(\\d+)?$/) : null\n return m\n ? {\n start: parseInt(m[1]),\n end: m[2] ? parseInt(m[2]) : null,\n size: m[3] ? parseInt(m[3]) : null\n }\n : null\n}\n\nconst kEnumerableProperty = Object.create(null)\nkEnumerableProperty.enumerable = true\n\nmodule.exports = {\n kEnumerableProperty,\n nop,\n isDisturbed,\n isErrored,\n isReadable,\n toUSVString,\n isReadableAborted,\n isBlobLike,\n parseOrigin,\n parseURL,\n getServerName,\n isStream,\n isIterable,\n isAsyncIterable,\n isDestroyed,\n parseRawHeaders,\n parseHeaders,\n parseKeepAliveTimeout,\n destroy,\n bodyLength,\n deepClone,\n ReadableStreamFrom,\n isBuffer,\n validateHandler,\n getSocketInfo,\n isFormDataLike,\n buildURL,\n throwIfAborted,\n addAbortListener,\n parseRangeHeader,\n nodeMajor,\n nodeMinor,\n nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13),\n safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE']\n}\n","'use strict'\n\nconst Dispatcher = require('./dispatcher')\nconst {\n ClientDestroyedError,\n ClientClosedError,\n InvalidArgumentError\n} = require('./core/errors')\nconst { kDestroy, kClose, kDispatch, kInterceptors } = require('./core/symbols')\n\nconst kDestroyed = Symbol('destroyed')\nconst kClosed = Symbol('closed')\nconst kOnDestroyed = Symbol('onDestroyed')\nconst kOnClosed = Symbol('onClosed')\nconst kInterceptedDispatch = Symbol('Intercepted Dispatch')\n\nclass DispatcherBase extends Dispatcher {\n constructor () {\n super()\n\n this[kDestroyed] = false\n this[kOnDestroyed] = null\n this[kClosed] = false\n this[kOnClosed] = []\n }\n\n get destroyed () {\n return this[kDestroyed]\n }\n\n get closed () {\n return this[kClosed]\n }\n\n get interceptors () {\n return this[kInterceptors]\n }\n\n set interceptors (newInterceptors) {\n if (newInterceptors) {\n for (let i = newInterceptors.length - 1; i >= 0; i--) {\n const interceptor = this[kInterceptors][i]\n if (typeof interceptor !== 'function') {\n throw new InvalidArgumentError('interceptor must be an function')\n }\n }\n }\n\n this[kInterceptors] = newInterceptors\n }\n\n close (callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n this.close((err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n if (this[kDestroyed]) {\n queueMicrotask(() => callback(new ClientDestroyedError(), null))\n return\n }\n\n if (this[kClosed]) {\n if (this[kOnClosed]) {\n this[kOnClosed].push(callback)\n } else {\n queueMicrotask(() => callback(null, null))\n }\n return\n }\n\n this[kClosed] = true\n this[kOnClosed].push(callback)\n\n const onClosed = () => {\n const callbacks = this[kOnClosed]\n this[kOnClosed] = null\n for (let i = 0; i < callbacks.length; i++) {\n callbacks[i](null, null)\n }\n }\n\n // Should not error.\n this[kClose]()\n .then(() => this.destroy())\n .then(() => {\n queueMicrotask(onClosed)\n })\n }\n\n destroy (err, callback) {\n if (typeof err === 'function') {\n callback = err\n err = null\n }\n\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n this.destroy(err, (err, data) => {\n return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data)\n })\n })\n }\n\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n if (this[kDestroyed]) {\n if (this[kOnDestroyed]) {\n this[kOnDestroyed].push(callback)\n } else {\n queueMicrotask(() => callback(null, null))\n }\n return\n }\n\n if (!err) {\n err = new ClientDestroyedError()\n }\n\n this[kDestroyed] = true\n this[kOnDestroyed] = this[kOnDestroyed] || []\n this[kOnDestroyed].push(callback)\n\n const onDestroyed = () => {\n const callbacks = this[kOnDestroyed]\n this[kOnDestroyed] = null\n for (let i = 0; i < callbacks.length; i++) {\n callbacks[i](null, null)\n }\n }\n\n // Should not error.\n this[kDestroy](err).then(() => {\n queueMicrotask(onDestroyed)\n })\n }\n\n [kInterceptedDispatch] (opts, handler) {\n if (!this[kInterceptors] || this[kInterceptors].length === 0) {\n this[kInterceptedDispatch] = this[kDispatch]\n return this[kDispatch](opts, handler)\n }\n\n let dispatch = this[kDispatch].bind(this)\n for (let i = this[kInterceptors].length - 1; i >= 0; i--) {\n dispatch = this[kInterceptors][i](dispatch)\n }\n this[kInterceptedDispatch] = dispatch\n return dispatch(opts, handler)\n }\n\n dispatch (opts, handler) {\n if (!handler || typeof handler !== 'object') {\n throw new InvalidArgumentError('handler must be an object')\n }\n\n try {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('opts must be an object.')\n }\n\n if (this[kDestroyed] || this[kOnDestroyed]) {\n throw new ClientDestroyedError()\n }\n\n if (this[kClosed]) {\n throw new ClientClosedError()\n }\n\n return this[kInterceptedDispatch](opts, handler)\n } catch (err) {\n if (typeof handler.onError !== 'function') {\n throw new InvalidArgumentError('invalid onError method')\n }\n\n handler.onError(err)\n\n return false\n }\n }\n}\n\nmodule.exports = DispatcherBase\n","'use strict'\n\nconst EventEmitter = require('events')\n\nclass Dispatcher extends EventEmitter {\n dispatch () {\n throw new Error('not implemented')\n }\n\n close () {\n throw new Error('not implemented')\n }\n\n destroy () {\n throw new Error('not implemented')\n }\n}\n\nmodule.exports = Dispatcher\n","'use strict'\n\nconst Busboy = require('@fastify/busboy')\nconst util = require('../core/util')\nconst {\n ReadableStreamFrom,\n isBlobLike,\n isReadableStreamLike,\n readableStreamClose,\n createDeferredPromise,\n fullyReadBody\n} = require('./util')\nconst { FormData } = require('./formdata')\nconst { kState } = require('./symbols')\nconst { webidl } = require('./webidl')\nconst { DOMException, structuredClone } = require('./constants')\nconst { Blob, File: NativeFile } = require('buffer')\nconst { kBodyUsed } = require('../core/symbols')\nconst assert = require('assert')\nconst { isErrored } = require('../core/util')\nconst { isUint8Array, isArrayBuffer } = require('util/types')\nconst { File: UndiciFile } = require('./file')\nconst { parseMIMEType, serializeAMimeType } = require('./dataURL')\n\nlet ReadableStream = globalThis.ReadableStream\n\n/** @type {globalThis['File']} */\nconst File = NativeFile ?? UndiciFile\nconst textEncoder = new TextEncoder()\nconst textDecoder = new TextDecoder()\n\n// https://fetch.spec.whatwg.org/#concept-bodyinit-extract\nfunction extractBody (object, keepalive = false) {\n if (!ReadableStream) {\n ReadableStream = require('stream/web').ReadableStream\n }\n\n // 1. Let stream be null.\n let stream = null\n\n // 2. If object is a ReadableStream object, then set stream to object.\n if (object instanceof ReadableStream) {\n stream = object\n } else if (isBlobLike(object)) {\n // 3. Otherwise, if object is a Blob object, set stream to the\n // result of running object’s get stream.\n stream = object.stream()\n } else {\n // 4. Otherwise, set stream to a new ReadableStream object, and set\n // up stream.\n stream = new ReadableStream({\n async pull (controller) {\n controller.enqueue(\n typeof source === 'string' ? textEncoder.encode(source) : source\n )\n queueMicrotask(() => readableStreamClose(controller))\n },\n start () {},\n type: undefined\n })\n }\n\n // 5. Assert: stream is a ReadableStream object.\n assert(isReadableStreamLike(stream))\n\n // 6. Let action be null.\n let action = null\n\n // 7. Let source be null.\n let source = null\n\n // 8. Let length be null.\n let length = null\n\n // 9. Let type be null.\n let type = null\n\n // 10. Switch on object:\n if (typeof object === 'string') {\n // Set source to the UTF-8 encoding of object.\n // Note: setting source to a Uint8Array here breaks some mocking assumptions.\n source = object\n\n // Set type to `text/plain;charset=UTF-8`.\n type = 'text/plain;charset=UTF-8'\n } else if (object instanceof URLSearchParams) {\n // URLSearchParams\n\n // spec says to run application/x-www-form-urlencoded on body.list\n // this is implemented in Node.js as apart of an URLSearchParams instance toString method\n // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490\n // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100\n\n // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list.\n source = object.toString()\n\n // Set type to `application/x-www-form-urlencoded;charset=UTF-8`.\n type = 'application/x-www-form-urlencoded;charset=UTF-8'\n } else if (isArrayBuffer(object)) {\n // BufferSource/ArrayBuffer\n\n // Set source to a copy of the bytes held by object.\n source = new Uint8Array(object.slice())\n } else if (ArrayBuffer.isView(object)) {\n // BufferSource/ArrayBufferView\n\n // Set source to a copy of the bytes held by object.\n source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))\n } else if (util.isFormDataLike(object)) {\n const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}`\n const prefix = `--${boundary}\\r\\nContent-Disposition: form-data`\n\n /*! formdata-polyfill. MIT License. Jimmy Wärting */\n const escape = (str) =>\n str.replace(/\\n/g, '%0A').replace(/\\r/g, '%0D').replace(/\"/g, '%22')\n const normalizeLinefeeds = (value) => value.replace(/\\r?\\n|\\r/g, '\\r\\n')\n\n // Set action to this step: run the multipart/form-data\n // encoding algorithm, with object’s entry list and UTF-8.\n // - This ensures that the body is immutable and can't be changed afterwords\n // - That the content-length is calculated in advance.\n // - And that all parts are pre-encoded and ready to be sent.\n\n const blobParts = []\n const rn = new Uint8Array([13, 10]) // '\\r\\n'\n length = 0\n let hasUnknownSizeValue = false\n\n for (const [name, value] of object) {\n if (typeof value === 'string') {\n const chunk = textEncoder.encode(prefix +\n `; name=\"${escape(normalizeLinefeeds(name))}\"` +\n `\\r\\n\\r\\n${normalizeLinefeeds(value)}\\r\\n`)\n blobParts.push(chunk)\n length += chunk.byteLength\n } else {\n const chunk = textEncoder.encode(`${prefix}; name=\"${escape(normalizeLinefeeds(name))}\"` +\n (value.name ? `; filename=\"${escape(value.name)}\"` : '') + '\\r\\n' +\n `Content-Type: ${\n value.type || 'application/octet-stream'\n }\\r\\n\\r\\n`)\n blobParts.push(chunk, value, rn)\n if (typeof value.size === 'number') {\n length += chunk.byteLength + value.size + rn.byteLength\n } else {\n hasUnknownSizeValue = true\n }\n }\n }\n\n const chunk = textEncoder.encode(`--${boundary}--`)\n blobParts.push(chunk)\n length += chunk.byteLength\n if (hasUnknownSizeValue) {\n length = null\n }\n\n // Set source to object.\n source = object\n\n action = async function * () {\n for (const part of blobParts) {\n if (part.stream) {\n yield * part.stream()\n } else {\n yield part\n }\n }\n }\n\n // Set type to `multipart/form-data; boundary=`,\n // followed by the multipart/form-data boundary string generated\n // by the multipart/form-data encoding algorithm.\n type = 'multipart/form-data; boundary=' + boundary\n } else if (isBlobLike(object)) {\n // Blob\n\n // Set source to object.\n source = object\n\n // Set length to object’s size.\n length = object.size\n\n // If object’s type attribute is not the empty byte sequence, set\n // type to its value.\n if (object.type) {\n type = object.type\n }\n } else if (typeof object[Symbol.asyncIterator] === 'function') {\n // If keepalive is true, then throw a TypeError.\n if (keepalive) {\n throw new TypeError('keepalive')\n }\n\n // If object is disturbed or locked, then throw a TypeError.\n if (util.isDisturbed(object) || object.locked) {\n throw new TypeError(\n 'Response body object should not be disturbed or locked'\n )\n }\n\n stream =\n object instanceof ReadableStream ? object : ReadableStreamFrom(object)\n }\n\n // 11. If source is a byte sequence, then set action to a\n // step that returns source and length to source’s length.\n if (typeof source === 'string' || util.isBuffer(source)) {\n length = Buffer.byteLength(source)\n }\n\n // 12. If action is non-null, then run these steps in in parallel:\n if (action != null) {\n // Run action.\n let iterator\n stream = new ReadableStream({\n async start () {\n iterator = action(object)[Symbol.asyncIterator]()\n },\n async pull (controller) {\n const { value, done } = await iterator.next()\n if (done) {\n // When running action is done, close stream.\n queueMicrotask(() => {\n controller.close()\n })\n } else {\n // Whenever one or more bytes are available and stream is not errored,\n // enqueue a Uint8Array wrapping an ArrayBuffer containing the available\n // bytes into stream.\n if (!isErrored(stream)) {\n controller.enqueue(new Uint8Array(value))\n }\n }\n return controller.desiredSize > 0\n },\n async cancel (reason) {\n await iterator.return()\n },\n type: undefined\n })\n }\n\n // 13. Let body be a body whose stream is stream, source is source,\n // and length is length.\n const body = { stream, source, length }\n\n // 14. Return (body, type).\n return [body, type]\n}\n\n// https://fetch.spec.whatwg.org/#bodyinit-safely-extract\nfunction safelyExtractBody (object, keepalive = false) {\n if (!ReadableStream) {\n // istanbul ignore next\n ReadableStream = require('stream/web').ReadableStream\n }\n\n // To safely extract a body and a `Content-Type` value from\n // a byte sequence or BodyInit object object, run these steps:\n\n // 1. If object is a ReadableStream object, then:\n if (object instanceof ReadableStream) {\n // Assert: object is neither disturbed nor locked.\n // istanbul ignore next\n assert(!util.isDisturbed(object), 'The body has already been consumed.')\n // istanbul ignore next\n assert(!object.locked, 'The stream is locked.')\n }\n\n // 2. Return the results of extracting object.\n return extractBody(object, keepalive)\n}\n\nfunction cloneBody (body) {\n // To clone a body body, run these steps:\n\n // https://fetch.spec.whatwg.org/#concept-body-clone\n\n // 1. Let « out1, out2 » be the result of teeing body’s stream.\n const [out1, out2] = body.stream.tee()\n const out2Clone = structuredClone(out2, { transfer: [out2] })\n // This, for whatever reasons, unrefs out2Clone which allows\n // the process to exit by itself.\n const [, finalClone] = out2Clone.tee()\n\n // 2. Set body’s stream to out1.\n body.stream = out1\n\n // 3. Return a body whose stream is out2 and other members are copied from body.\n return {\n stream: finalClone,\n length: body.length,\n source: body.source\n }\n}\n\nasync function * consumeBody (body) {\n if (body) {\n if (isUint8Array(body)) {\n yield body\n } else {\n const stream = body.stream\n\n if (util.isDisturbed(stream)) {\n throw new TypeError('The body has already been consumed.')\n }\n\n if (stream.locked) {\n throw new TypeError('The stream is locked.')\n }\n\n // Compat.\n stream[kBodyUsed] = true\n\n yield * stream\n }\n }\n}\n\nfunction throwIfAborted (state) {\n if (state.aborted) {\n throw new DOMException('The operation was aborted.', 'AbortError')\n }\n}\n\nfunction bodyMixinMethods (instance) {\n const methods = {\n blob () {\n // The blob() method steps are to return the result of\n // running consume body with this and the following step\n // given a byte sequence bytes: return a Blob whose\n // contents are bytes and whose type attribute is this’s\n // MIME type.\n return specConsumeBody(this, (bytes) => {\n let mimeType = bodyMimeType(this)\n\n if (mimeType === 'failure') {\n mimeType = ''\n } else if (mimeType) {\n mimeType = serializeAMimeType(mimeType)\n }\n\n // Return a Blob whose contents are bytes and type attribute\n // is mimeType.\n return new Blob([bytes], { type: mimeType })\n }, instance)\n },\n\n arrayBuffer () {\n // The arrayBuffer() method steps are to return the result\n // of running consume body with this and the following step\n // given a byte sequence bytes: return a new ArrayBuffer\n // whose contents are bytes.\n return specConsumeBody(this, (bytes) => {\n return new Uint8Array(bytes).buffer\n }, instance)\n },\n\n text () {\n // The text() method steps are to return the result of running\n // consume body with this and UTF-8 decode.\n return specConsumeBody(this, utf8DecodeBytes, instance)\n },\n\n json () {\n // The json() method steps are to return the result of running\n // consume body with this and parse JSON from bytes.\n return specConsumeBody(this, parseJSONFromBytes, instance)\n },\n\n async formData () {\n webidl.brandCheck(this, instance)\n\n throwIfAborted(this[kState])\n\n const contentType = this.headers.get('Content-Type')\n\n // If mimeType’s essence is \"multipart/form-data\", then:\n if (/multipart\\/form-data/.test(contentType)) {\n const headers = {}\n for (const [key, value] of this.headers) headers[key.toLowerCase()] = value\n\n const responseFormData = new FormData()\n\n let busboy\n\n try {\n busboy = new Busboy({\n headers,\n preservePath: true\n })\n } catch (err) {\n throw new DOMException(`${err}`, 'AbortError')\n }\n\n busboy.on('field', (name, value) => {\n responseFormData.append(name, value)\n })\n busboy.on('file', (name, value, filename, encoding, mimeType) => {\n const chunks = []\n\n if (encoding === 'base64' || encoding.toLowerCase() === 'base64') {\n let base64chunk = ''\n\n value.on('data', (chunk) => {\n base64chunk += chunk.toString().replace(/[\\r\\n]/gm, '')\n\n const end = base64chunk.length - base64chunk.length % 4\n chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64'))\n\n base64chunk = base64chunk.slice(end)\n })\n value.on('end', () => {\n chunks.push(Buffer.from(base64chunk, 'base64'))\n responseFormData.append(name, new File(chunks, filename, { type: mimeType }))\n })\n } else {\n value.on('data', (chunk) => {\n chunks.push(chunk)\n })\n value.on('end', () => {\n responseFormData.append(name, new File(chunks, filename, { type: mimeType }))\n })\n }\n })\n\n const busboyResolve = new Promise((resolve, reject) => {\n busboy.on('finish', resolve)\n busboy.on('error', (err) => reject(new TypeError(err)))\n })\n\n if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk)\n busboy.end()\n await busboyResolve\n\n return responseFormData\n } else if (/application\\/x-www-form-urlencoded/.test(contentType)) {\n // Otherwise, if mimeType’s essence is \"application/x-www-form-urlencoded\", then:\n\n // 1. Let entries be the result of parsing bytes.\n let entries\n try {\n let text = ''\n // application/x-www-form-urlencoded parser will keep the BOM.\n // https://url.spec.whatwg.org/#concept-urlencoded-parser\n // Note that streaming decoder is stateful and cannot be reused\n const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true })\n\n for await (const chunk of consumeBody(this[kState].body)) {\n if (!isUint8Array(chunk)) {\n throw new TypeError('Expected Uint8Array chunk')\n }\n text += streamingDecoder.decode(chunk, { stream: true })\n }\n text += streamingDecoder.decode()\n entries = new URLSearchParams(text)\n } catch (err) {\n // istanbul ignore next: Unclear when new URLSearchParams can fail on a string.\n // 2. If entries is failure, then throw a TypeError.\n throw Object.assign(new TypeError(), { cause: err })\n }\n\n // 3. Return a new FormData object whose entries are entries.\n const formData = new FormData()\n for (const [name, value] of entries) {\n formData.append(name, value)\n }\n return formData\n } else {\n // Wait a tick before checking if the request has been aborted.\n // Otherwise, a TypeError can be thrown when an AbortError should.\n await Promise.resolve()\n\n throwIfAborted(this[kState])\n\n // Otherwise, throw a TypeError.\n throw webidl.errors.exception({\n header: `${instance.name}.formData`,\n message: 'Could not parse content as FormData.'\n })\n }\n }\n }\n\n return methods\n}\n\nfunction mixinBody (prototype) {\n Object.assign(prototype.prototype, bodyMixinMethods(prototype))\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#concept-body-consume-body\n * @param {Response|Request} object\n * @param {(value: unknown) => unknown} convertBytesToJSValue\n * @param {Response|Request} instance\n */\nasync function specConsumeBody (object, convertBytesToJSValue, instance) {\n webidl.brandCheck(object, instance)\n\n throwIfAborted(object[kState])\n\n // 1. If object is unusable, then return a promise rejected\n // with a TypeError.\n if (bodyUnusable(object[kState].body)) {\n throw new TypeError('Body is unusable')\n }\n\n // 2. Let promise be a new promise.\n const promise = createDeferredPromise()\n\n // 3. Let errorSteps given error be to reject promise with error.\n const errorSteps = (error) => promise.reject(error)\n\n // 4. Let successSteps given a byte sequence data be to resolve\n // promise with the result of running convertBytesToJSValue\n // with data. If that threw an exception, then run errorSteps\n // with that exception.\n const successSteps = (data) => {\n try {\n promise.resolve(convertBytesToJSValue(data))\n } catch (e) {\n errorSteps(e)\n }\n }\n\n // 5. If object’s body is null, then run successSteps with an\n // empty byte sequence.\n if (object[kState].body == null) {\n successSteps(new Uint8Array())\n return promise.promise\n }\n\n // 6. Otherwise, fully read object’s body given successSteps,\n // errorSteps, and object’s relevant global object.\n await fullyReadBody(object[kState].body, successSteps, errorSteps)\n\n // 7. Return promise.\n return promise.promise\n}\n\n// https://fetch.spec.whatwg.org/#body-unusable\nfunction bodyUnusable (body) {\n // An object including the Body interface mixin is\n // said to be unusable if its body is non-null and\n // its body’s stream is disturbed or locked.\n return body != null && (body.stream.locked || util.isDisturbed(body.stream))\n}\n\n/**\n * @see https://encoding.spec.whatwg.org/#utf-8-decode\n * @param {Buffer} buffer\n */\nfunction utf8DecodeBytes (buffer) {\n if (buffer.length === 0) {\n return ''\n }\n\n // 1. Let buffer be the result of peeking three bytes from\n // ioQueue, converted to a byte sequence.\n\n // 2. If buffer is 0xEF 0xBB 0xBF, then read three\n // bytes from ioQueue. (Do nothing with those bytes.)\n if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) {\n buffer = buffer.subarray(3)\n }\n\n // 3. Process a queue with an instance of UTF-8’s\n // decoder, ioQueue, output, and \"replacement\".\n const output = textDecoder.decode(buffer)\n\n // 4. Return output.\n return output\n}\n\n/**\n * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value\n * @param {Uint8Array} bytes\n */\nfunction parseJSONFromBytes (bytes) {\n return JSON.parse(utf8DecodeBytes(bytes))\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#concept-body-mime-type\n * @param {import('./response').Response|import('./request').Request} object\n */\nfunction bodyMimeType (object) {\n const { headersList } = object[kState]\n const contentType = headersList.get('content-type')\n\n if (contentType === null) {\n return 'failure'\n }\n\n return parseMIMEType(contentType)\n}\n\nmodule.exports = {\n extractBody,\n safelyExtractBody,\n cloneBody,\n mixinBody\n}\n","'use strict'\n\nconst { MessageChannel, receiveMessageOnPort } = require('worker_threads')\n\nconst corsSafeListedMethods = ['GET', 'HEAD', 'POST']\nconst corsSafeListedMethodsSet = new Set(corsSafeListedMethods)\n\nconst nullBodyStatus = [101, 204, 205, 304]\n\nconst redirectStatus = [301, 302, 303, 307, 308]\nconst redirectStatusSet = new Set(redirectStatus)\n\n// https://fetch.spec.whatwg.org/#block-bad-port\nconst badPorts = [\n '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79',\n '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137',\n '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532',\n '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723',\n '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697',\n '10080'\n]\n\nconst badPortsSet = new Set(badPorts)\n\n// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies\nconst referrerPolicy = [\n '',\n 'no-referrer',\n 'no-referrer-when-downgrade',\n 'same-origin',\n 'origin',\n 'strict-origin',\n 'origin-when-cross-origin',\n 'strict-origin-when-cross-origin',\n 'unsafe-url'\n]\nconst referrerPolicySet = new Set(referrerPolicy)\n\nconst requestRedirect = ['follow', 'manual', 'error']\n\nconst safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE']\nconst safeMethodsSet = new Set(safeMethods)\n\nconst requestMode = ['navigate', 'same-origin', 'no-cors', 'cors']\n\nconst requestCredentials = ['omit', 'same-origin', 'include']\n\nconst requestCache = [\n 'default',\n 'no-store',\n 'reload',\n 'no-cache',\n 'force-cache',\n 'only-if-cached'\n]\n\n// https://fetch.spec.whatwg.org/#request-body-header-name\nconst requestBodyHeader = [\n 'content-encoding',\n 'content-language',\n 'content-location',\n 'content-type',\n // See https://github.com/nodejs/undici/issues/2021\n // 'Content-Length' is a forbidden header name, which is typically\n // removed in the Headers implementation. However, undici doesn't\n // filter out headers, so we add it here.\n 'content-length'\n]\n\n// https://fetch.spec.whatwg.org/#enumdef-requestduplex\nconst requestDuplex = [\n 'half'\n]\n\n// http://fetch.spec.whatwg.org/#forbidden-method\nconst forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK']\nconst forbiddenMethodsSet = new Set(forbiddenMethods)\n\nconst subresource = [\n 'audio',\n 'audioworklet',\n 'font',\n 'image',\n 'manifest',\n 'paintworklet',\n 'script',\n 'style',\n 'track',\n 'video',\n 'xslt',\n ''\n]\nconst subresourceSet = new Set(subresource)\n\n/** @type {globalThis['DOMException']} */\nconst DOMException = globalThis.DOMException ?? (() => {\n // DOMException was only made a global in Node v17.0.0,\n // but fetch supports >= v16.8.\n try {\n atob('~')\n } catch (err) {\n return Object.getPrototypeOf(err).constructor\n }\n})()\n\nlet channel\n\n/** @type {globalThis['structuredClone']} */\nconst structuredClone =\n globalThis.structuredClone ??\n // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js\n // structuredClone was added in v17.0.0, but fetch supports v16.8\n function structuredClone (value, options = undefined) {\n if (arguments.length === 0) {\n throw new TypeError('missing argument')\n }\n\n if (!channel) {\n channel = new MessageChannel()\n }\n channel.port1.unref()\n channel.port2.unref()\n channel.port1.postMessage(value, options?.transfer)\n return receiveMessageOnPort(channel.port2).message\n }\n\nmodule.exports = {\n DOMException,\n structuredClone,\n subresource,\n forbiddenMethods,\n requestBodyHeader,\n referrerPolicy,\n requestRedirect,\n requestMode,\n requestCredentials,\n requestCache,\n redirectStatus,\n corsSafeListedMethods,\n nullBodyStatus,\n safeMethods,\n badPorts,\n requestDuplex,\n subresourceSet,\n badPortsSet,\n redirectStatusSet,\n corsSafeListedMethodsSet,\n safeMethodsSet,\n forbiddenMethodsSet,\n referrerPolicySet\n}\n","const assert = require('assert')\nconst { atob } = require('buffer')\nconst { isomorphicDecode } = require('./util')\n\nconst encoder = new TextEncoder()\n\n/**\n * @see https://mimesniff.spec.whatwg.org/#http-token-code-point\n */\nconst HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/\nconst HTTP_WHITESPACE_REGEX = /(\\u000A|\\u000D|\\u0009|\\u0020)/ // eslint-disable-line\n/**\n * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point\n */\nconst HTTP_QUOTED_STRING_TOKENS = /[\\u0009|\\u0020-\\u007E|\\u0080-\\u00FF]/ // eslint-disable-line\n\n// https://fetch.spec.whatwg.org/#data-url-processor\n/** @param {URL} dataURL */\nfunction dataURLProcessor (dataURL) {\n // 1. Assert: dataURL’s scheme is \"data\".\n assert(dataURL.protocol === 'data:')\n\n // 2. Let input be the result of running the URL\n // serializer on dataURL with exclude fragment\n // set to true.\n let input = URLSerializer(dataURL, true)\n\n // 3. Remove the leading \"data:\" string from input.\n input = input.slice(5)\n\n // 4. Let position point at the start of input.\n const position = { position: 0 }\n\n // 5. Let mimeType be the result of collecting a\n // sequence of code points that are not equal\n // to U+002C (,), given position.\n let mimeType = collectASequenceOfCodePointsFast(\n ',',\n input,\n position\n )\n\n // 6. Strip leading and trailing ASCII whitespace\n // from mimeType.\n // Undici implementation note: we need to store the\n // length because if the mimetype has spaces removed,\n // the wrong amount will be sliced from the input in\n // step #9\n const mimeTypeLength = mimeType.length\n mimeType = removeASCIIWhitespace(mimeType, true, true)\n\n // 7. If position is past the end of input, then\n // return failure\n if (position.position >= input.length) {\n return 'failure'\n }\n\n // 8. Advance position by 1.\n position.position++\n\n // 9. Let encodedBody be the remainder of input.\n const encodedBody = input.slice(mimeTypeLength + 1)\n\n // 10. Let body be the percent-decoding of encodedBody.\n let body = stringPercentDecode(encodedBody)\n\n // 11. If mimeType ends with U+003B (;), followed by\n // zero or more U+0020 SPACE, followed by an ASCII\n // case-insensitive match for \"base64\", then:\n if (/;(\\u0020){0,}base64$/i.test(mimeType)) {\n // 1. Let stringBody be the isomorphic decode of body.\n const stringBody = isomorphicDecode(body)\n\n // 2. Set body to the forgiving-base64 decode of\n // stringBody.\n body = forgivingBase64(stringBody)\n\n // 3. If body is failure, then return failure.\n if (body === 'failure') {\n return 'failure'\n }\n\n // 4. Remove the last 6 code points from mimeType.\n mimeType = mimeType.slice(0, -6)\n\n // 5. Remove trailing U+0020 SPACE code points from mimeType,\n // if any.\n mimeType = mimeType.replace(/(\\u0020)+$/, '')\n\n // 6. Remove the last U+003B (;) code point from mimeType.\n mimeType = mimeType.slice(0, -1)\n }\n\n // 12. If mimeType starts with U+003B (;), then prepend\n // \"text/plain\" to mimeType.\n if (mimeType.startsWith(';')) {\n mimeType = 'text/plain' + mimeType\n }\n\n // 13. Let mimeTypeRecord be the result of parsing\n // mimeType.\n let mimeTypeRecord = parseMIMEType(mimeType)\n\n // 14. If mimeTypeRecord is failure, then set\n // mimeTypeRecord to text/plain;charset=US-ASCII.\n if (mimeTypeRecord === 'failure') {\n mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII')\n }\n\n // 15. Return a new data: URL struct whose MIME\n // type is mimeTypeRecord and body is body.\n // https://fetch.spec.whatwg.org/#data-url-struct\n return { mimeType: mimeTypeRecord, body }\n}\n\n// https://url.spec.whatwg.org/#concept-url-serializer\n/**\n * @param {URL} url\n * @param {boolean} excludeFragment\n */\nfunction URLSerializer (url, excludeFragment = false) {\n if (!excludeFragment) {\n return url.href\n }\n\n const href = url.href\n const hashLength = url.hash.length\n\n return hashLength === 0 ? href : href.substring(0, href.length - hashLength)\n}\n\n// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points\n/**\n * @param {(char: string) => boolean} condition\n * @param {string} input\n * @param {{ position: number }} position\n */\nfunction collectASequenceOfCodePoints (condition, input, position) {\n // 1. Let result be the empty string.\n let result = ''\n\n // 2. While position doesn’t point past the end of input and the\n // code point at position within input meets the condition condition:\n while (position.position < input.length && condition(input[position.position])) {\n // 1. Append that code point to the end of result.\n result += input[position.position]\n\n // 2. Advance position by 1.\n position.position++\n }\n\n // 3. Return result.\n return result\n}\n\n/**\n * A faster collectASequenceOfCodePoints that only works when comparing a single character.\n * @param {string} char\n * @param {string} input\n * @param {{ position: number }} position\n */\nfunction collectASequenceOfCodePointsFast (char, input, position) {\n const idx = input.indexOf(char, position.position)\n const start = position.position\n\n if (idx === -1) {\n position.position = input.length\n return input.slice(start)\n }\n\n position.position = idx\n return input.slice(start, position.position)\n}\n\n// https://url.spec.whatwg.org/#string-percent-decode\n/** @param {string} input */\nfunction stringPercentDecode (input) {\n // 1. Let bytes be the UTF-8 encoding of input.\n const bytes = encoder.encode(input)\n\n // 2. Return the percent-decoding of bytes.\n return percentDecode(bytes)\n}\n\n// https://url.spec.whatwg.org/#percent-decode\n/** @param {Uint8Array} input */\nfunction percentDecode (input) {\n // 1. Let output be an empty byte sequence.\n /** @type {number[]} */\n const output = []\n\n // 2. For each byte byte in input:\n for (let i = 0; i < input.length; i++) {\n const byte = input[i]\n\n // 1. If byte is not 0x25 (%), then append byte to output.\n if (byte !== 0x25) {\n output.push(byte)\n\n // 2. Otherwise, if byte is 0x25 (%) and the next two bytes\n // after byte in input are not in the ranges\n // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F),\n // and 0x61 (a) to 0x66 (f), all inclusive, append byte\n // to output.\n } else if (\n byte === 0x25 &&\n !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2]))\n ) {\n output.push(0x25)\n\n // 3. Otherwise:\n } else {\n // 1. Let bytePoint be the two bytes after byte in input,\n // decoded, and then interpreted as hexadecimal number.\n const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2])\n const bytePoint = Number.parseInt(nextTwoBytes, 16)\n\n // 2. Append a byte whose value is bytePoint to output.\n output.push(bytePoint)\n\n // 3. Skip the next two bytes in input.\n i += 2\n }\n }\n\n // 3. Return output.\n return Uint8Array.from(output)\n}\n\n// https://mimesniff.spec.whatwg.org/#parse-a-mime-type\n/** @param {string} input */\nfunction parseMIMEType (input) {\n // 1. Remove any leading and trailing HTTP whitespace\n // from input.\n input = removeHTTPWhitespace(input, true, true)\n\n // 2. Let position be a position variable for input,\n // initially pointing at the start of input.\n const position = { position: 0 }\n\n // 3. Let type be the result of collecting a sequence\n // of code points that are not U+002F (/) from\n // input, given position.\n const type = collectASequenceOfCodePointsFast(\n '/',\n input,\n position\n )\n\n // 4. If type is the empty string or does not solely\n // contain HTTP token code points, then return failure.\n // https://mimesniff.spec.whatwg.org/#http-token-code-point\n if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) {\n return 'failure'\n }\n\n // 5. If position is past the end of input, then return\n // failure\n if (position.position > input.length) {\n return 'failure'\n }\n\n // 6. Advance position by 1. (This skips past U+002F (/).)\n position.position++\n\n // 7. Let subtype be the result of collecting a sequence of\n // code points that are not U+003B (;) from input, given\n // position.\n let subtype = collectASequenceOfCodePointsFast(\n ';',\n input,\n position\n )\n\n // 8. Remove any trailing HTTP whitespace from subtype.\n subtype = removeHTTPWhitespace(subtype, false, true)\n\n // 9. If subtype is the empty string or does not solely\n // contain HTTP token code points, then return failure.\n if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) {\n return 'failure'\n }\n\n const typeLowercase = type.toLowerCase()\n const subtypeLowercase = subtype.toLowerCase()\n\n // 10. Let mimeType be a new MIME type record whose type\n // is type, in ASCII lowercase, and subtype is subtype,\n // in ASCII lowercase.\n // https://mimesniff.spec.whatwg.org/#mime-type\n const mimeType = {\n type: typeLowercase,\n subtype: subtypeLowercase,\n /** @type {Map} */\n parameters: new Map(),\n // https://mimesniff.spec.whatwg.org/#mime-type-essence\n essence: `${typeLowercase}/${subtypeLowercase}`\n }\n\n // 11. While position is not past the end of input:\n while (position.position < input.length) {\n // 1. Advance position by 1. (This skips past U+003B (;).)\n position.position++\n\n // 2. Collect a sequence of code points that are HTTP\n // whitespace from input given position.\n collectASequenceOfCodePoints(\n // https://fetch.spec.whatwg.org/#http-whitespace\n char => HTTP_WHITESPACE_REGEX.test(char),\n input,\n position\n )\n\n // 3. Let parameterName be the result of collecting a\n // sequence of code points that are not U+003B (;)\n // or U+003D (=) from input, given position.\n let parameterName = collectASequenceOfCodePoints(\n (char) => char !== ';' && char !== '=',\n input,\n position\n )\n\n // 4. Set parameterName to parameterName, in ASCII\n // lowercase.\n parameterName = parameterName.toLowerCase()\n\n // 5. If position is not past the end of input, then:\n if (position.position < input.length) {\n // 1. If the code point at position within input is\n // U+003B (;), then continue.\n if (input[position.position] === ';') {\n continue\n }\n\n // 2. Advance position by 1. (This skips past U+003D (=).)\n position.position++\n }\n\n // 6. If position is past the end of input, then break.\n if (position.position > input.length) {\n break\n }\n\n // 7. Let parameterValue be null.\n let parameterValue = null\n\n // 8. If the code point at position within input is\n // U+0022 (\"), then:\n if (input[position.position] === '\"') {\n // 1. Set parameterValue to the result of collecting\n // an HTTP quoted string from input, given position\n // and the extract-value flag.\n parameterValue = collectAnHTTPQuotedString(input, position, true)\n\n // 2. Collect a sequence of code points that are not\n // U+003B (;) from input, given position.\n collectASequenceOfCodePointsFast(\n ';',\n input,\n position\n )\n\n // 9. Otherwise:\n } else {\n // 1. Set parameterValue to the result of collecting\n // a sequence of code points that are not U+003B (;)\n // from input, given position.\n parameterValue = collectASequenceOfCodePointsFast(\n ';',\n input,\n position\n )\n\n // 2. Remove any trailing HTTP whitespace from parameterValue.\n parameterValue = removeHTTPWhitespace(parameterValue, false, true)\n\n // 3. If parameterValue is the empty string, then continue.\n if (parameterValue.length === 0) {\n continue\n }\n }\n\n // 10. If all of the following are true\n // - parameterName is not the empty string\n // - parameterName solely contains HTTP token code points\n // - parameterValue solely contains HTTP quoted-string token code points\n // - mimeType’s parameters[parameterName] does not exist\n // then set mimeType’s parameters[parameterName] to parameterValue.\n if (\n parameterName.length !== 0 &&\n HTTP_TOKEN_CODEPOINTS.test(parameterName) &&\n (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) &&\n !mimeType.parameters.has(parameterName)\n ) {\n mimeType.parameters.set(parameterName, parameterValue)\n }\n }\n\n // 12. Return mimeType.\n return mimeType\n}\n\n// https://infra.spec.whatwg.org/#forgiving-base64-decode\n/** @param {string} data */\nfunction forgivingBase64 (data) {\n // 1. Remove all ASCII whitespace from data.\n data = data.replace(/[\\u0009\\u000A\\u000C\\u000D\\u0020]/g, '') // eslint-disable-line\n\n // 2. If data’s code point length divides by 4 leaving\n // no remainder, then:\n if (data.length % 4 === 0) {\n // 1. If data ends with one or two U+003D (=) code points,\n // then remove them from data.\n data = data.replace(/=?=$/, '')\n }\n\n // 3. If data’s code point length divides by 4 leaving\n // a remainder of 1, then return failure.\n if (data.length % 4 === 1) {\n return 'failure'\n }\n\n // 4. If data contains a code point that is not one of\n // U+002B (+)\n // U+002F (/)\n // ASCII alphanumeric\n // then return failure.\n if (/[^+/0-9A-Za-z]/.test(data)) {\n return 'failure'\n }\n\n const binary = atob(data)\n const bytes = new Uint8Array(binary.length)\n\n for (let byte = 0; byte < binary.length; byte++) {\n bytes[byte] = binary.charCodeAt(byte)\n }\n\n return bytes\n}\n\n// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string\n// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string\n/**\n * @param {string} input\n * @param {{ position: number }} position\n * @param {boolean?} extractValue\n */\nfunction collectAnHTTPQuotedString (input, position, extractValue) {\n // 1. Let positionStart be position.\n const positionStart = position.position\n\n // 2. Let value be the empty string.\n let value = ''\n\n // 3. Assert: the code point at position within input\n // is U+0022 (\").\n assert(input[position.position] === '\"')\n\n // 4. Advance position by 1.\n position.position++\n\n // 5. While true:\n while (true) {\n // 1. Append the result of collecting a sequence of code points\n // that are not U+0022 (\") or U+005C (\\) from input, given\n // position, to value.\n value += collectASequenceOfCodePoints(\n (char) => char !== '\"' && char !== '\\\\',\n input,\n position\n )\n\n // 2. If position is past the end of input, then break.\n if (position.position >= input.length) {\n break\n }\n\n // 3. Let quoteOrBackslash be the code point at position within\n // input.\n const quoteOrBackslash = input[position.position]\n\n // 4. Advance position by 1.\n position.position++\n\n // 5. If quoteOrBackslash is U+005C (\\), then:\n if (quoteOrBackslash === '\\\\') {\n // 1. If position is past the end of input, then append\n // U+005C (\\) to value and break.\n if (position.position >= input.length) {\n value += '\\\\'\n break\n }\n\n // 2. Append the code point at position within input to value.\n value += input[position.position]\n\n // 3. Advance position by 1.\n position.position++\n\n // 6. Otherwise:\n } else {\n // 1. Assert: quoteOrBackslash is U+0022 (\").\n assert(quoteOrBackslash === '\"')\n\n // 2. Break.\n break\n }\n }\n\n // 6. If the extract-value flag is set, then return value.\n if (extractValue) {\n return value\n }\n\n // 7. Return the code points from positionStart to position,\n // inclusive, within input.\n return input.slice(positionStart, position.position)\n}\n\n/**\n * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type\n */\nfunction serializeAMimeType (mimeType) {\n assert(mimeType !== 'failure')\n const { parameters, essence } = mimeType\n\n // 1. Let serialization be the concatenation of mimeType’s\n // type, U+002F (/), and mimeType’s subtype.\n let serialization = essence\n\n // 2. For each name → value of mimeType’s parameters:\n for (let [name, value] of parameters.entries()) {\n // 1. Append U+003B (;) to serialization.\n serialization += ';'\n\n // 2. Append name to serialization.\n serialization += name\n\n // 3. Append U+003D (=) to serialization.\n serialization += '='\n\n // 4. If value does not solely contain HTTP token code\n // points or value is the empty string, then:\n if (!HTTP_TOKEN_CODEPOINTS.test(value)) {\n // 1. Precede each occurence of U+0022 (\") or\n // U+005C (\\) in value with U+005C (\\).\n value = value.replace(/(\\\\|\")/g, '\\\\$1')\n\n // 2. Prepend U+0022 (\") to value.\n value = '\"' + value\n\n // 3. Append U+0022 (\") to value.\n value += '\"'\n }\n\n // 5. Append value to serialization.\n serialization += value\n }\n\n // 3. Return serialization.\n return serialization\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#http-whitespace\n * @param {string} char\n */\nfunction isHTTPWhiteSpace (char) {\n return char === '\\r' || char === '\\n' || char === '\\t' || char === ' '\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#http-whitespace\n * @param {string} str\n */\nfunction removeHTTPWhitespace (str, leading = true, trailing = true) {\n let lead = 0\n let trail = str.length - 1\n\n if (leading) {\n for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++);\n }\n\n if (trailing) {\n for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--);\n }\n\n return str.slice(lead, trail + 1)\n}\n\n/**\n * @see https://infra.spec.whatwg.org/#ascii-whitespace\n * @param {string} char\n */\nfunction isASCIIWhitespace (char) {\n return char === '\\r' || char === '\\n' || char === '\\t' || char === '\\f' || char === ' '\n}\n\n/**\n * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace\n */\nfunction removeASCIIWhitespace (str, leading = true, trailing = true) {\n let lead = 0\n let trail = str.length - 1\n\n if (leading) {\n for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++);\n }\n\n if (trailing) {\n for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--);\n }\n\n return str.slice(lead, trail + 1)\n}\n\nmodule.exports = {\n dataURLProcessor,\n URLSerializer,\n collectASequenceOfCodePoints,\n collectASequenceOfCodePointsFast,\n stringPercentDecode,\n parseMIMEType,\n collectAnHTTPQuotedString,\n serializeAMimeType\n}\n","'use strict'\n\nconst { Blob, File: NativeFile } = require('buffer')\nconst { types } = require('util')\nconst { kState } = require('./symbols')\nconst { isBlobLike } = require('./util')\nconst { webidl } = require('./webidl')\nconst { parseMIMEType, serializeAMimeType } = require('./dataURL')\nconst { kEnumerableProperty } = require('../core/util')\nconst encoder = new TextEncoder()\n\nclass File extends Blob {\n constructor (fileBits, fileName, options = {}) {\n // The File constructor is invoked with two or three parameters, depending\n // on whether the optional dictionary parameter is used. When the File()\n // constructor is invoked, user agents must run the following steps:\n webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' })\n\n fileBits = webidl.converters['sequence'](fileBits)\n fileName = webidl.converters.USVString(fileName)\n options = webidl.converters.FilePropertyBag(options)\n\n // 1. Let bytes be the result of processing blob parts given fileBits and\n // options.\n // Note: Blob handles this for us\n\n // 2. Let n be the fileName argument to the constructor.\n const n = fileName\n\n // 3. Process FilePropertyBag dictionary argument by running the following\n // substeps:\n\n // 1. If the type member is provided and is not the empty string, let t\n // be set to the type dictionary member. If t contains any characters\n // outside the range U+0020 to U+007E, then set t to the empty string\n // and return from these substeps.\n // 2. Convert every character in t to ASCII lowercase.\n let t = options.type\n let d\n\n // eslint-disable-next-line no-labels\n substep: {\n if (t) {\n t = parseMIMEType(t)\n\n if (t === 'failure') {\n t = ''\n // eslint-disable-next-line no-labels\n break substep\n }\n\n t = serializeAMimeType(t).toLowerCase()\n }\n\n // 3. If the lastModified member is provided, let d be set to the\n // lastModified dictionary member. If it is not provided, set d to the\n // current date and time represented as the number of milliseconds since\n // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).\n d = options.lastModified\n }\n\n // 4. Return a new File object F such that:\n // F refers to the bytes byte sequence.\n // F.size is set to the number of total bytes in bytes.\n // F.name is set to n.\n // F.type is set to t.\n // F.lastModified is set to d.\n\n super(processBlobParts(fileBits, options), { type: t })\n this[kState] = {\n name: n,\n lastModified: d,\n type: t\n }\n }\n\n get name () {\n webidl.brandCheck(this, File)\n\n return this[kState].name\n }\n\n get lastModified () {\n webidl.brandCheck(this, File)\n\n return this[kState].lastModified\n }\n\n get type () {\n webidl.brandCheck(this, File)\n\n return this[kState].type\n }\n}\n\nclass FileLike {\n constructor (blobLike, fileName, options = {}) {\n // TODO: argument idl type check\n\n // The File constructor is invoked with two or three parameters, depending\n // on whether the optional dictionary parameter is used. When the File()\n // constructor is invoked, user agents must run the following steps:\n\n // 1. Let bytes be the result of processing blob parts given fileBits and\n // options.\n\n // 2. Let n be the fileName argument to the constructor.\n const n = fileName\n\n // 3. Process FilePropertyBag dictionary argument by running the following\n // substeps:\n\n // 1. If the type member is provided and is not the empty string, let t\n // be set to the type dictionary member. If t contains any characters\n // outside the range U+0020 to U+007E, then set t to the empty string\n // and return from these substeps.\n // TODO\n const t = options.type\n\n // 2. Convert every character in t to ASCII lowercase.\n // TODO\n\n // 3. If the lastModified member is provided, let d be set to the\n // lastModified dictionary member. If it is not provided, set d to the\n // current date and time represented as the number of milliseconds since\n // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).\n const d = options.lastModified ?? Date.now()\n\n // 4. Return a new File object F such that:\n // F refers to the bytes byte sequence.\n // F.size is set to the number of total bytes in bytes.\n // F.name is set to n.\n // F.type is set to t.\n // F.lastModified is set to d.\n\n this[kState] = {\n blobLike,\n name: n,\n type: t,\n lastModified: d\n }\n }\n\n stream (...args) {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.stream(...args)\n }\n\n arrayBuffer (...args) {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.arrayBuffer(...args)\n }\n\n slice (...args) {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.slice(...args)\n }\n\n text (...args) {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.text(...args)\n }\n\n get size () {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.size\n }\n\n get type () {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.type\n }\n\n get name () {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].name\n }\n\n get lastModified () {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].lastModified\n }\n\n get [Symbol.toStringTag] () {\n return 'File'\n }\n}\n\nObject.defineProperties(File.prototype, {\n [Symbol.toStringTag]: {\n value: 'File',\n configurable: true\n },\n name: kEnumerableProperty,\n lastModified: kEnumerableProperty\n})\n\nwebidl.converters.Blob = webidl.interfaceConverter(Blob)\n\nwebidl.converters.BlobPart = function (V, opts) {\n if (webidl.util.Type(V) === 'Object') {\n if (isBlobLike(V)) {\n return webidl.converters.Blob(V, { strict: false })\n }\n\n if (\n ArrayBuffer.isView(V) ||\n types.isAnyArrayBuffer(V)\n ) {\n return webidl.converters.BufferSource(V, opts)\n }\n }\n\n return webidl.converters.USVString(V, opts)\n}\n\nwebidl.converters['sequence'] = webidl.sequenceConverter(\n webidl.converters.BlobPart\n)\n\n// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag\nwebidl.converters.FilePropertyBag = webidl.dictionaryConverter([\n {\n key: 'lastModified',\n converter: webidl.converters['long long'],\n get defaultValue () {\n return Date.now()\n }\n },\n {\n key: 'type',\n converter: webidl.converters.DOMString,\n defaultValue: ''\n },\n {\n key: 'endings',\n converter: (value) => {\n value = webidl.converters.DOMString(value)\n value = value.toLowerCase()\n\n if (value !== 'native') {\n value = 'transparent'\n }\n\n return value\n },\n defaultValue: 'transparent'\n }\n])\n\n/**\n * @see https://www.w3.org/TR/FileAPI/#process-blob-parts\n * @param {(NodeJS.TypedArray|Blob|string)[]} parts\n * @param {{ type: string, endings: string }} options\n */\nfunction processBlobParts (parts, options) {\n // 1. Let bytes be an empty sequence of bytes.\n /** @type {NodeJS.TypedArray[]} */\n const bytes = []\n\n // 2. For each element in parts:\n for (const element of parts) {\n // 1. If element is a USVString, run the following substeps:\n if (typeof element === 'string') {\n // 1. Let s be element.\n let s = element\n\n // 2. If the endings member of options is \"native\", set s\n // to the result of converting line endings to native\n // of element.\n if (options.endings === 'native') {\n s = convertLineEndingsNative(s)\n }\n\n // 3. Append the result of UTF-8 encoding s to bytes.\n bytes.push(encoder.encode(s))\n } else if (\n types.isAnyArrayBuffer(element) ||\n types.isTypedArray(element)\n ) {\n // 2. If element is a BufferSource, get a copy of the\n // bytes held by the buffer source, and append those\n // bytes to bytes.\n if (!element.buffer) { // ArrayBuffer\n bytes.push(new Uint8Array(element))\n } else {\n bytes.push(\n new Uint8Array(element.buffer, element.byteOffset, element.byteLength)\n )\n }\n } else if (isBlobLike(element)) {\n // 3. If element is a Blob, append the bytes it represents\n // to bytes.\n bytes.push(element)\n }\n }\n\n // 3. Return bytes.\n return bytes\n}\n\n/**\n * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native\n * @param {string} s\n */\nfunction convertLineEndingsNative (s) {\n // 1. Let native line ending be be the code point U+000A LF.\n let nativeLineEnding = '\\n'\n\n // 2. If the underlying platform’s conventions are to\n // represent newlines as a carriage return and line feed\n // sequence, set native line ending to the code point\n // U+000D CR followed by the code point U+000A LF.\n if (process.platform === 'win32') {\n nativeLineEnding = '\\r\\n'\n }\n\n return s.replace(/\\r?\\n/g, nativeLineEnding)\n}\n\n// If this function is moved to ./util.js, some tools (such as\n// rollup) will warn about circular dependencies. See:\n// https://github.com/nodejs/undici/issues/1629\nfunction isFileLike (object) {\n return (\n (NativeFile && object instanceof NativeFile) ||\n object instanceof File || (\n object &&\n (typeof object.stream === 'function' ||\n typeof object.arrayBuffer === 'function') &&\n object[Symbol.toStringTag] === 'File'\n )\n )\n}\n\nmodule.exports = { File, FileLike, isFileLike }\n","'use strict'\n\nconst { isBlobLike, toUSVString, makeIterator } = require('./util')\nconst { kState } = require('./symbols')\nconst { File: UndiciFile, FileLike, isFileLike } = require('./file')\nconst { webidl } = require('./webidl')\nconst { Blob, File: NativeFile } = require('buffer')\n\n/** @type {globalThis['File']} */\nconst File = NativeFile ?? UndiciFile\n\n// https://xhr.spec.whatwg.org/#formdata\nclass FormData {\n constructor (form) {\n if (form !== undefined) {\n throw webidl.errors.conversionFailed({\n prefix: 'FormData constructor',\n argument: 'Argument 1',\n types: ['undefined']\n })\n }\n\n this[kState] = []\n }\n\n append (name, value, filename = undefined) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' })\n\n if (arguments.length === 3 && !isBlobLike(value)) {\n throw new TypeError(\n \"Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'\"\n )\n }\n\n // 1. Let value be value if given; otherwise blobValue.\n\n name = webidl.converters.USVString(name)\n value = isBlobLike(value)\n ? webidl.converters.Blob(value, { strict: false })\n : webidl.converters.USVString(value)\n filename = arguments.length === 3\n ? webidl.converters.USVString(filename)\n : undefined\n\n // 2. Let entry be the result of creating an entry with\n // name, value, and filename if given.\n const entry = makeEntry(name, value, filename)\n\n // 3. Append entry to this’s entry list.\n this[kState].push(entry)\n }\n\n delete (name) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' })\n\n name = webidl.converters.USVString(name)\n\n // The delete(name) method steps are to remove all entries whose name\n // is name from this’s entry list.\n this[kState] = this[kState].filter(entry => entry.name !== name)\n }\n\n get (name) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' })\n\n name = webidl.converters.USVString(name)\n\n // 1. If there is no entry whose name is name in this’s entry list,\n // then return null.\n const idx = this[kState].findIndex((entry) => entry.name === name)\n if (idx === -1) {\n return null\n }\n\n // 2. Return the value of the first entry whose name is name from\n // this’s entry list.\n return this[kState][idx].value\n }\n\n getAll (name) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' })\n\n name = webidl.converters.USVString(name)\n\n // 1. If there is no entry whose name is name in this’s entry list,\n // then return the empty list.\n // 2. Return the values of all entries whose name is name, in order,\n // from this’s entry list.\n return this[kState]\n .filter((entry) => entry.name === name)\n .map((entry) => entry.value)\n }\n\n has (name) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' })\n\n name = webidl.converters.USVString(name)\n\n // The has(name) method steps are to return true if there is an entry\n // whose name is name in this’s entry list; otherwise false.\n return this[kState].findIndex((entry) => entry.name === name) !== -1\n }\n\n set (name, value, filename = undefined) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' })\n\n if (arguments.length === 3 && !isBlobLike(value)) {\n throw new TypeError(\n \"Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'\"\n )\n }\n\n // The set(name, value) and set(name, blobValue, filename) method steps\n // are:\n\n // 1. Let value be value if given; otherwise blobValue.\n\n name = webidl.converters.USVString(name)\n value = isBlobLike(value)\n ? webidl.converters.Blob(value, { strict: false })\n : webidl.converters.USVString(value)\n filename = arguments.length === 3\n ? toUSVString(filename)\n : undefined\n\n // 2. Let entry be the result of creating an entry with name, value, and\n // filename if given.\n const entry = makeEntry(name, value, filename)\n\n // 3. If there are entries in this’s entry list whose name is name, then\n // replace the first such entry with entry and remove the others.\n const idx = this[kState].findIndex((entry) => entry.name === name)\n if (idx !== -1) {\n this[kState] = [\n ...this[kState].slice(0, idx),\n entry,\n ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name)\n ]\n } else {\n // 4. Otherwise, append entry to this’s entry list.\n this[kState].push(entry)\n }\n }\n\n entries () {\n webidl.brandCheck(this, FormData)\n\n return makeIterator(\n () => this[kState].map(pair => [pair.name, pair.value]),\n 'FormData',\n 'key+value'\n )\n }\n\n keys () {\n webidl.brandCheck(this, FormData)\n\n return makeIterator(\n () => this[kState].map(pair => [pair.name, pair.value]),\n 'FormData',\n 'key'\n )\n }\n\n values () {\n webidl.brandCheck(this, FormData)\n\n return makeIterator(\n () => this[kState].map(pair => [pair.name, pair.value]),\n 'FormData',\n 'value'\n )\n }\n\n /**\n * @param {(value: string, key: string, self: FormData) => void} callbackFn\n * @param {unknown} thisArg\n */\n forEach (callbackFn, thisArg = globalThis) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' })\n\n if (typeof callbackFn !== 'function') {\n throw new TypeError(\n \"Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'.\"\n )\n }\n\n for (const [key, value] of this) {\n callbackFn.apply(thisArg, [value, key, this])\n }\n }\n}\n\nFormData.prototype[Symbol.iterator] = FormData.prototype.entries\n\nObject.defineProperties(FormData.prototype, {\n [Symbol.toStringTag]: {\n value: 'FormData',\n configurable: true\n }\n})\n\n/**\n * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry\n * @param {string} name\n * @param {string|Blob} value\n * @param {?string} filename\n * @returns\n */\nfunction makeEntry (name, value, filename) {\n // 1. Set name to the result of converting name into a scalar value string.\n // \"To convert a string into a scalar value string, replace any surrogates\n // with U+FFFD.\"\n // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end\n name = Buffer.from(name).toString('utf8')\n\n // 2. If value is a string, then set value to the result of converting\n // value into a scalar value string.\n if (typeof value === 'string') {\n value = Buffer.from(value).toString('utf8')\n } else {\n // 3. Otherwise:\n\n // 1. If value is not a File object, then set value to a new File object,\n // representing the same bytes, whose name attribute value is \"blob\"\n if (!isFileLike(value)) {\n value = value instanceof Blob\n ? new File([value], 'blob', { type: value.type })\n : new FileLike(value, 'blob', { type: value.type })\n }\n\n // 2. If filename is given, then set value to a new File object,\n // representing the same bytes, whose name attribute is filename.\n if (filename !== undefined) {\n /** @type {FilePropertyBag} */\n const options = {\n type: value.type,\n lastModified: value.lastModified\n }\n\n value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile\n ? new File([value], filename, options)\n : new FileLike(value, filename, options)\n }\n }\n\n // 4. Return an entry whose name is name and whose value is value.\n return { name, value }\n}\n\nmodule.exports = { FormData }\n","'use strict'\n\n// In case of breaking changes, increase the version\n// number to avoid conflicts.\nconst globalOrigin = Symbol.for('undici.globalOrigin.1')\n\nfunction getGlobalOrigin () {\n return globalThis[globalOrigin]\n}\n\nfunction setGlobalOrigin (newOrigin) {\n if (newOrigin === undefined) {\n Object.defineProperty(globalThis, globalOrigin, {\n value: undefined,\n writable: true,\n enumerable: false,\n configurable: false\n })\n\n return\n }\n\n const parsedURL = new URL(newOrigin)\n\n if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') {\n throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`)\n }\n\n Object.defineProperty(globalThis, globalOrigin, {\n value: parsedURL,\n writable: true,\n enumerable: false,\n configurable: false\n })\n}\n\nmodule.exports = {\n getGlobalOrigin,\n setGlobalOrigin\n}\n","// https://github.com/Ethan-Arrowood/undici-fetch\n\n'use strict'\n\nconst { kHeadersList, kConstruct } = require('../core/symbols')\nconst { kGuard } = require('./symbols')\nconst { kEnumerableProperty } = require('../core/util')\nconst {\n makeIterator,\n isValidHeaderName,\n isValidHeaderValue\n} = require('./util')\nconst { webidl } = require('./webidl')\nconst assert = require('assert')\n\nconst kHeadersMap = Symbol('headers map')\nconst kHeadersSortedMap = Symbol('headers map sorted')\n\n/**\n * @param {number} code\n */\nfunction isHTTPWhiteSpaceCharCode (code) {\n return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize\n * @param {string} potentialValue\n */\nfunction headerValueNormalize (potentialValue) {\n // To normalize a byte sequence potentialValue, remove\n // any leading and trailing HTTP whitespace bytes from\n // potentialValue.\n let i = 0; let j = potentialValue.length\n\n while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j\n while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i\n\n return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j)\n}\n\nfunction fill (headers, object) {\n // To fill a Headers object headers with a given object object, run these steps:\n\n // 1. If object is a sequence, then for each header in object:\n // Note: webidl conversion to array has already been done.\n if (Array.isArray(object)) {\n for (let i = 0; i < object.length; ++i) {\n const header = object[i]\n // 1. If header does not contain exactly two items, then throw a TypeError.\n if (header.length !== 2) {\n throw webidl.errors.exception({\n header: 'Headers constructor',\n message: `expected name/value pair to be length 2, found ${header.length}.`\n })\n }\n\n // 2. Append (header’s first item, header’s second item) to headers.\n appendHeader(headers, header[0], header[1])\n }\n } else if (typeof object === 'object' && object !== null) {\n // Note: null should throw\n\n // 2. Otherwise, object is a record, then for each key → value in object,\n // append (key, value) to headers\n const keys = Object.keys(object)\n for (let i = 0; i < keys.length; ++i) {\n appendHeader(headers, keys[i], object[keys[i]])\n }\n } else {\n throw webidl.errors.conversionFailed({\n prefix: 'Headers constructor',\n argument: 'Argument 1',\n types: ['sequence>', 'record']\n })\n }\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#concept-headers-append\n */\nfunction appendHeader (headers, name, value) {\n // 1. Normalize value.\n value = headerValueNormalize(value)\n\n // 2. If name is not a header name or value is not a\n // header value, then throw a TypeError.\n if (!isValidHeaderName(name)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.append',\n value: name,\n type: 'header name'\n })\n } else if (!isValidHeaderValue(value)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.append',\n value,\n type: 'header value'\n })\n }\n\n // 3. If headers’s guard is \"immutable\", then throw a TypeError.\n // 4. Otherwise, if headers’s guard is \"request\" and name is a\n // forbidden header name, return.\n // Note: undici does not implement forbidden header names\n if (headers[kGuard] === 'immutable') {\n throw new TypeError('immutable')\n } else if (headers[kGuard] === 'request-no-cors') {\n // 5. Otherwise, if headers’s guard is \"request-no-cors\":\n // TODO\n }\n\n // 6. Otherwise, if headers’s guard is \"response\" and name is a\n // forbidden response-header name, return.\n\n // 7. Append (name, value) to headers’s header list.\n return headers[kHeadersList].append(name, value)\n\n // 8. If headers’s guard is \"request-no-cors\", then remove\n // privileged no-CORS request headers from headers\n}\n\nclass HeadersList {\n /** @type {[string, string][]|null} */\n cookies = null\n\n constructor (init) {\n if (init instanceof HeadersList) {\n this[kHeadersMap] = new Map(init[kHeadersMap])\n this[kHeadersSortedMap] = init[kHeadersSortedMap]\n this.cookies = init.cookies === null ? null : [...init.cookies]\n } else {\n this[kHeadersMap] = new Map(init)\n this[kHeadersSortedMap] = null\n }\n }\n\n // https://fetch.spec.whatwg.org/#header-list-contains\n contains (name) {\n // A header list list contains a header name name if list\n // contains a header whose name is a byte-case-insensitive\n // match for name.\n name = name.toLowerCase()\n\n return this[kHeadersMap].has(name)\n }\n\n clear () {\n this[kHeadersMap].clear()\n this[kHeadersSortedMap] = null\n this.cookies = null\n }\n\n // https://fetch.spec.whatwg.org/#concept-header-list-append\n append (name, value) {\n this[kHeadersSortedMap] = null\n\n // 1. If list contains name, then set name to the first such\n // header’s name.\n const lowercaseName = name.toLowerCase()\n const exists = this[kHeadersMap].get(lowercaseName)\n\n // 2. Append (name, value) to list.\n if (exists) {\n const delimiter = lowercaseName === 'cookie' ? '; ' : ', '\n this[kHeadersMap].set(lowercaseName, {\n name: exists.name,\n value: `${exists.value}${delimiter}${value}`\n })\n } else {\n this[kHeadersMap].set(lowercaseName, { name, value })\n }\n\n if (lowercaseName === 'set-cookie') {\n this.cookies ??= []\n this.cookies.push(value)\n }\n }\n\n // https://fetch.spec.whatwg.org/#concept-header-list-set\n set (name, value) {\n this[kHeadersSortedMap] = null\n const lowercaseName = name.toLowerCase()\n\n if (lowercaseName === 'set-cookie') {\n this.cookies = [value]\n }\n\n // 1. If list contains name, then set the value of\n // the first such header to value and remove the\n // others.\n // 2. Otherwise, append header (name, value) to list.\n this[kHeadersMap].set(lowercaseName, { name, value })\n }\n\n // https://fetch.spec.whatwg.org/#concept-header-list-delete\n delete (name) {\n this[kHeadersSortedMap] = null\n\n name = name.toLowerCase()\n\n if (name === 'set-cookie') {\n this.cookies = null\n }\n\n this[kHeadersMap].delete(name)\n }\n\n // https://fetch.spec.whatwg.org/#concept-header-list-get\n get (name) {\n const value = this[kHeadersMap].get(name.toLowerCase())\n\n // 1. If list does not contain name, then return null.\n // 2. Return the values of all headers in list whose name\n // is a byte-case-insensitive match for name,\n // separated from each other by 0x2C 0x20, in order.\n return value === undefined ? null : value.value\n }\n\n * [Symbol.iterator] () {\n // use the lowercased name\n for (const [name, { value }] of this[kHeadersMap]) {\n yield [name, value]\n }\n }\n\n get entries () {\n const headers = {}\n\n if (this[kHeadersMap].size) {\n for (const { name, value } of this[kHeadersMap].values()) {\n headers[name] = value\n }\n }\n\n return headers\n }\n}\n\n// https://fetch.spec.whatwg.org/#headers-class\nclass Headers {\n constructor (init = undefined) {\n if (init === kConstruct) {\n return\n }\n this[kHeadersList] = new HeadersList()\n\n // The new Headers(init) constructor steps are:\n\n // 1. Set this’s guard to \"none\".\n this[kGuard] = 'none'\n\n // 2. If init is given, then fill this with init.\n if (init !== undefined) {\n init = webidl.converters.HeadersInit(init)\n fill(this, init)\n }\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-append\n append (name, value) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' })\n\n name = webidl.converters.ByteString(name)\n value = webidl.converters.ByteString(value)\n\n return appendHeader(this, name, value)\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-delete\n delete (name) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' })\n\n name = webidl.converters.ByteString(name)\n\n // 1. If name is not a header name, then throw a TypeError.\n if (!isValidHeaderName(name)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.delete',\n value: name,\n type: 'header name'\n })\n }\n\n // 2. If this’s guard is \"immutable\", then throw a TypeError.\n // 3. Otherwise, if this’s guard is \"request\" and name is a\n // forbidden header name, return.\n // 4. Otherwise, if this’s guard is \"request-no-cors\", name\n // is not a no-CORS-safelisted request-header name, and\n // name is not a privileged no-CORS request-header name,\n // return.\n // 5. Otherwise, if this’s guard is \"response\" and name is\n // a forbidden response-header name, return.\n // Note: undici does not implement forbidden header names\n if (this[kGuard] === 'immutable') {\n throw new TypeError('immutable')\n } else if (this[kGuard] === 'request-no-cors') {\n // TODO\n }\n\n // 6. If this’s header list does not contain name, then\n // return.\n if (!this[kHeadersList].contains(name)) {\n return\n }\n\n // 7. Delete name from this’s header list.\n // 8. If this’s guard is \"request-no-cors\", then remove\n // privileged no-CORS request headers from this.\n this[kHeadersList].delete(name)\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-get\n get (name) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' })\n\n name = webidl.converters.ByteString(name)\n\n // 1. If name is not a header name, then throw a TypeError.\n if (!isValidHeaderName(name)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.get',\n value: name,\n type: 'header name'\n })\n }\n\n // 2. Return the result of getting name from this’s header\n // list.\n return this[kHeadersList].get(name)\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-has\n has (name) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' })\n\n name = webidl.converters.ByteString(name)\n\n // 1. If name is not a header name, then throw a TypeError.\n if (!isValidHeaderName(name)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.has',\n value: name,\n type: 'header name'\n })\n }\n\n // 2. Return true if this’s header list contains name;\n // otherwise false.\n return this[kHeadersList].contains(name)\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-set\n set (name, value) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' })\n\n name = webidl.converters.ByteString(name)\n value = webidl.converters.ByteString(value)\n\n // 1. Normalize value.\n value = headerValueNormalize(value)\n\n // 2. If name is not a header name or value is not a\n // header value, then throw a TypeError.\n if (!isValidHeaderName(name)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.set',\n value: name,\n type: 'header name'\n })\n } else if (!isValidHeaderValue(value)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.set',\n value,\n type: 'header value'\n })\n }\n\n // 3. If this’s guard is \"immutable\", then throw a TypeError.\n // 4. Otherwise, if this’s guard is \"request\" and name is a\n // forbidden header name, return.\n // 5. Otherwise, if this’s guard is \"request-no-cors\" and\n // name/value is not a no-CORS-safelisted request-header,\n // return.\n // 6. Otherwise, if this’s guard is \"response\" and name is a\n // forbidden response-header name, return.\n // Note: undici does not implement forbidden header names\n if (this[kGuard] === 'immutable') {\n throw new TypeError('immutable')\n } else if (this[kGuard] === 'request-no-cors') {\n // TODO\n }\n\n // 7. Set (name, value) in this’s header list.\n // 8. If this’s guard is \"request-no-cors\", then remove\n // privileged no-CORS request headers from this\n this[kHeadersList].set(name, value)\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie\n getSetCookie () {\n webidl.brandCheck(this, Headers)\n\n // 1. If this’s header list does not contain `Set-Cookie`, then return « ».\n // 2. Return the values of all headers in this’s header list whose name is\n // a byte-case-insensitive match for `Set-Cookie`, in order.\n\n const list = this[kHeadersList].cookies\n\n if (list) {\n return [...list]\n }\n\n return []\n }\n\n // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine\n get [kHeadersSortedMap] () {\n if (this[kHeadersList][kHeadersSortedMap]) {\n return this[kHeadersList][kHeadersSortedMap]\n }\n\n // 1. Let headers be an empty list of headers with the key being the name\n // and value the value.\n const headers = []\n\n // 2. Let names be the result of convert header names to a sorted-lowercase\n // set with all the names of the headers in list.\n const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1)\n const cookies = this[kHeadersList].cookies\n\n // 3. For each name of names:\n for (let i = 0; i < names.length; ++i) {\n const [name, value] = names[i]\n // 1. If name is `set-cookie`, then:\n if (name === 'set-cookie') {\n // 1. Let values be a list of all values of headers in list whose name\n // is a byte-case-insensitive match for name, in order.\n\n // 2. For each value of values:\n // 1. Append (name, value) to headers.\n for (let j = 0; j < cookies.length; ++j) {\n headers.push([name, cookies[j]])\n }\n } else {\n // 2. Otherwise:\n\n // 1. Let value be the result of getting name from list.\n\n // 2. Assert: value is non-null.\n assert(value !== null)\n\n // 3. Append (name, value) to headers.\n headers.push([name, value])\n }\n }\n\n this[kHeadersList][kHeadersSortedMap] = headers\n\n // 4. Return headers.\n return headers\n }\n\n keys () {\n webidl.brandCheck(this, Headers)\n\n if (this[kGuard] === 'immutable') {\n const value = this[kHeadersSortedMap]\n return makeIterator(() => value, 'Headers',\n 'key')\n }\n\n return makeIterator(\n () => [...this[kHeadersSortedMap].values()],\n 'Headers',\n 'key'\n )\n }\n\n values () {\n webidl.brandCheck(this, Headers)\n\n if (this[kGuard] === 'immutable') {\n const value = this[kHeadersSortedMap]\n return makeIterator(() => value, 'Headers',\n 'value')\n }\n\n return makeIterator(\n () => [...this[kHeadersSortedMap].values()],\n 'Headers',\n 'value'\n )\n }\n\n entries () {\n webidl.brandCheck(this, Headers)\n\n if (this[kGuard] === 'immutable') {\n const value = this[kHeadersSortedMap]\n return makeIterator(() => value, 'Headers',\n 'key+value')\n }\n\n return makeIterator(\n () => [...this[kHeadersSortedMap].values()],\n 'Headers',\n 'key+value'\n )\n }\n\n /**\n * @param {(value: string, key: string, self: Headers) => void} callbackFn\n * @param {unknown} thisArg\n */\n forEach (callbackFn, thisArg = globalThis) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' })\n\n if (typeof callbackFn !== 'function') {\n throw new TypeError(\n \"Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'.\"\n )\n }\n\n for (const [key, value] of this) {\n callbackFn.apply(thisArg, [value, key, this])\n }\n }\n\n [Symbol.for('nodejs.util.inspect.custom')] () {\n webidl.brandCheck(this, Headers)\n\n return this[kHeadersList]\n }\n}\n\nHeaders.prototype[Symbol.iterator] = Headers.prototype.entries\n\nObject.defineProperties(Headers.prototype, {\n append: kEnumerableProperty,\n delete: kEnumerableProperty,\n get: kEnumerableProperty,\n has: kEnumerableProperty,\n set: kEnumerableProperty,\n getSetCookie: kEnumerableProperty,\n keys: kEnumerableProperty,\n values: kEnumerableProperty,\n entries: kEnumerableProperty,\n forEach: kEnumerableProperty,\n [Symbol.iterator]: { enumerable: false },\n [Symbol.toStringTag]: {\n value: 'Headers',\n configurable: true\n }\n})\n\nwebidl.converters.HeadersInit = function (V) {\n if (webidl.util.Type(V) === 'Object') {\n if (V[Symbol.iterator]) {\n return webidl.converters['sequence>'](V)\n }\n\n return webidl.converters['record'](V)\n }\n\n throw webidl.errors.conversionFailed({\n prefix: 'Headers constructor',\n argument: 'Argument 1',\n types: ['sequence>', 'record']\n })\n}\n\nmodule.exports = {\n fill,\n Headers,\n HeadersList\n}\n","// https://github.com/Ethan-Arrowood/undici-fetch\n\n'use strict'\n\nconst {\n Response,\n makeNetworkError,\n makeAppropriateNetworkError,\n filterResponse,\n makeResponse\n} = require('./response')\nconst { Headers } = require('./headers')\nconst { Request, makeRequest } = require('./request')\nconst zlib = require('zlib')\nconst {\n bytesMatch,\n makePolicyContainer,\n clonePolicyContainer,\n requestBadPort,\n TAOCheck,\n appendRequestOriginHeader,\n responseLocationURL,\n requestCurrentURL,\n setRequestReferrerPolicyOnRedirect,\n tryUpgradeRequestToAPotentiallyTrustworthyURL,\n createOpaqueTimingInfo,\n appendFetchMetadata,\n corsCheck,\n crossOriginResourcePolicyCheck,\n determineRequestsReferrer,\n coarsenedSharedCurrentTime,\n createDeferredPromise,\n isBlobLike,\n sameOrigin,\n isCancelled,\n isAborted,\n isErrorLike,\n fullyReadBody,\n readableStreamClose,\n isomorphicEncode,\n urlIsLocal,\n urlIsHttpHttpsScheme,\n urlHasHttpsScheme\n} = require('./util')\nconst { kState, kHeaders, kGuard, kRealm } = require('./symbols')\nconst assert = require('assert')\nconst { safelyExtractBody } = require('./body')\nconst {\n redirectStatusSet,\n nullBodyStatus,\n safeMethodsSet,\n requestBodyHeader,\n subresourceSet,\n DOMException\n} = require('./constants')\nconst { kHeadersList } = require('../core/symbols')\nconst EE = require('events')\nconst { Readable, pipeline } = require('stream')\nconst { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = require('../core/util')\nconst { dataURLProcessor, serializeAMimeType } = require('./dataURL')\nconst { TransformStream } = require('stream/web')\nconst { getGlobalDispatcher } = require('../global')\nconst { webidl } = require('./webidl')\nconst { STATUS_CODES } = require('http')\nconst GET_OR_HEAD = ['GET', 'HEAD']\n\n/** @type {import('buffer').resolveObjectURL} */\nlet resolveObjectURL\nlet ReadableStream = globalThis.ReadableStream\n\nclass Fetch extends EE {\n constructor (dispatcher) {\n super()\n\n this.dispatcher = dispatcher\n this.connection = null\n this.dump = false\n this.state = 'ongoing'\n // 2 terminated listeners get added per request,\n // but only 1 gets removed. If there are 20 redirects,\n // 21 listeners will be added.\n // See https://github.com/nodejs/undici/issues/1711\n // TODO (fix): Find and fix root cause for leaked listener.\n this.setMaxListeners(21)\n }\n\n terminate (reason) {\n if (this.state !== 'ongoing') {\n return\n }\n\n this.state = 'terminated'\n this.connection?.destroy(reason)\n this.emit('terminated', reason)\n }\n\n // https://fetch.spec.whatwg.org/#fetch-controller-abort\n abort (error) {\n if (this.state !== 'ongoing') {\n return\n }\n\n // 1. Set controller’s state to \"aborted\".\n this.state = 'aborted'\n\n // 2. Let fallbackError be an \"AbortError\" DOMException.\n // 3. Set error to fallbackError if it is not given.\n if (!error) {\n error = new DOMException('The operation was aborted.', 'AbortError')\n }\n\n // 4. Let serializedError be StructuredSerialize(error).\n // If that threw an exception, catch it, and let\n // serializedError be StructuredSerialize(fallbackError).\n\n // 5. Set controller’s serialized abort reason to serializedError.\n this.serializedAbortReason = error\n\n this.connection?.destroy(error)\n this.emit('terminated', error)\n }\n}\n\n// https://fetch.spec.whatwg.org/#fetch-method\nfunction fetch (input, init = {}) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' })\n\n // 1. Let p be a new promise.\n const p = createDeferredPromise()\n\n // 2. Let requestObject be the result of invoking the initial value of\n // Request as constructor with input and init as arguments. If this throws\n // an exception, reject p with it and return p.\n let requestObject\n\n try {\n requestObject = new Request(input, init)\n } catch (e) {\n p.reject(e)\n return p.promise\n }\n\n // 3. Let request be requestObject’s request.\n const request = requestObject[kState]\n\n // 4. If requestObject’s signal’s aborted flag is set, then:\n if (requestObject.signal.aborted) {\n // 1. Abort the fetch() call with p, request, null, and\n // requestObject’s signal’s abort reason.\n abortFetch(p, request, null, requestObject.signal.reason)\n\n // 2. Return p.\n return p.promise\n }\n\n // 5. Let globalObject be request’s client’s global object.\n const globalObject = request.client.globalObject\n\n // 6. If globalObject is a ServiceWorkerGlobalScope object, then set\n // request’s service-workers mode to \"none\".\n if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') {\n request.serviceWorkers = 'none'\n }\n\n // 7. Let responseObject be null.\n let responseObject = null\n\n // 8. Let relevantRealm be this’s relevant Realm.\n const relevantRealm = null\n\n // 9. Let locallyAborted be false.\n let locallyAborted = false\n\n // 10. Let controller be null.\n let controller = null\n\n // 11. Add the following abort steps to requestObject’s signal:\n addAbortListener(\n requestObject.signal,\n () => {\n // 1. Set locallyAborted to true.\n locallyAborted = true\n\n // 2. Assert: controller is non-null.\n assert(controller != null)\n\n // 3. Abort controller with requestObject’s signal’s abort reason.\n controller.abort(requestObject.signal.reason)\n\n // 4. Abort the fetch() call with p, request, responseObject,\n // and requestObject’s signal’s abort reason.\n abortFetch(p, request, responseObject, requestObject.signal.reason)\n }\n )\n\n // 12. Let handleFetchDone given response response be to finalize and\n // report timing with response, globalObject, and \"fetch\".\n const handleFetchDone = (response) =>\n finalizeAndReportTiming(response, 'fetch')\n\n // 13. Set controller to the result of calling fetch given request,\n // with processResponseEndOfBody set to handleFetchDone, and processResponse\n // given response being these substeps:\n\n const processResponse = (response) => {\n // 1. If locallyAborted is true, terminate these substeps.\n if (locallyAborted) {\n return Promise.resolve()\n }\n\n // 2. If response’s aborted flag is set, then:\n if (response.aborted) {\n // 1. Let deserializedError be the result of deserialize a serialized\n // abort reason given controller’s serialized abort reason and\n // relevantRealm.\n\n // 2. Abort the fetch() call with p, request, responseObject, and\n // deserializedError.\n\n abortFetch(p, request, responseObject, controller.serializedAbortReason)\n return Promise.resolve()\n }\n\n // 3. If response is a network error, then reject p with a TypeError\n // and terminate these substeps.\n if (response.type === 'error') {\n p.reject(\n Object.assign(new TypeError('fetch failed'), { cause: response.error })\n )\n return Promise.resolve()\n }\n\n // 4. Set responseObject to the result of creating a Response object,\n // given response, \"immutable\", and relevantRealm.\n responseObject = new Response()\n responseObject[kState] = response\n responseObject[kRealm] = relevantRealm\n responseObject[kHeaders][kHeadersList] = response.headersList\n responseObject[kHeaders][kGuard] = 'immutable'\n responseObject[kHeaders][kRealm] = relevantRealm\n\n // 5. Resolve p with responseObject.\n p.resolve(responseObject)\n }\n\n controller = fetching({\n request,\n processResponseEndOfBody: handleFetchDone,\n processResponse,\n dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici\n })\n\n // 14. Return p.\n return p.promise\n}\n\n// https://fetch.spec.whatwg.org/#finalize-and-report-timing\nfunction finalizeAndReportTiming (response, initiatorType = 'other') {\n // 1. If response is an aborted network error, then return.\n if (response.type === 'error' && response.aborted) {\n return\n }\n\n // 2. If response’s URL list is null or empty, then return.\n if (!response.urlList?.length) {\n return\n }\n\n // 3. Let originalURL be response’s URL list[0].\n const originalURL = response.urlList[0]\n\n // 4. Let timingInfo be response’s timing info.\n let timingInfo = response.timingInfo\n\n // 5. Let cacheState be response’s cache state.\n let cacheState = response.cacheState\n\n // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return.\n if (!urlIsHttpHttpsScheme(originalURL)) {\n return\n }\n\n // 7. If timingInfo is null, then return.\n if (timingInfo === null) {\n return\n }\n\n // 8. If response’s timing allow passed flag is not set, then:\n if (!response.timingAllowPassed) {\n // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo.\n timingInfo = createOpaqueTimingInfo({\n startTime: timingInfo.startTime\n })\n\n // 2. Set cacheState to the empty string.\n cacheState = ''\n }\n\n // 9. Set timingInfo’s end time to the coarsened shared current time\n // given global’s relevant settings object’s cross-origin isolated\n // capability.\n // TODO: given global’s relevant settings object’s cross-origin isolated\n // capability?\n timingInfo.endTime = coarsenedSharedCurrentTime()\n\n // 10. Set response’s timing info to timingInfo.\n response.timingInfo = timingInfo\n\n // 11. Mark resource timing for timingInfo, originalURL, initiatorType,\n // global, and cacheState.\n markResourceTiming(\n timingInfo,\n originalURL,\n initiatorType,\n globalThis,\n cacheState\n )\n}\n\n// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing\nfunction markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) {\n if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) {\n performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState)\n }\n}\n\n// https://fetch.spec.whatwg.org/#abort-fetch\nfunction abortFetch (p, request, responseObject, error) {\n // Note: AbortSignal.reason was added in node v17.2.0\n // which would give us an undefined error to reject with.\n // Remove this once node v16 is no longer supported.\n if (!error) {\n error = new DOMException('The operation was aborted.', 'AbortError')\n }\n\n // 1. Reject promise with error.\n p.reject(error)\n\n // 2. If request’s body is not null and is readable, then cancel request’s\n // body with error.\n if (request.body != null && isReadable(request.body?.stream)) {\n request.body.stream.cancel(error).catch((err) => {\n if (err.code === 'ERR_INVALID_STATE') {\n // Node bug?\n return\n }\n throw err\n })\n }\n\n // 3. If responseObject is null, then return.\n if (responseObject == null) {\n return\n }\n\n // 4. Let response be responseObject’s response.\n const response = responseObject[kState]\n\n // 5. If response’s body is not null and is readable, then error response’s\n // body with error.\n if (response.body != null && isReadable(response.body?.stream)) {\n response.body.stream.cancel(error).catch((err) => {\n if (err.code === 'ERR_INVALID_STATE') {\n // Node bug?\n return\n }\n throw err\n })\n }\n}\n\n// https://fetch.spec.whatwg.org/#fetching\nfunction fetching ({\n request,\n processRequestBodyChunkLength,\n processRequestEndOfBody,\n processResponse,\n processResponseEndOfBody,\n processResponseConsumeBody,\n useParallelQueue = false,\n dispatcher // undici\n}) {\n // 1. Let taskDestination be null.\n let taskDestination = null\n\n // 2. Let crossOriginIsolatedCapability be false.\n let crossOriginIsolatedCapability = false\n\n // 3. If request’s client is non-null, then:\n if (request.client != null) {\n // 1. Set taskDestination to request’s client’s global object.\n taskDestination = request.client.globalObject\n\n // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin\n // isolated capability.\n crossOriginIsolatedCapability =\n request.client.crossOriginIsolatedCapability\n }\n\n // 4. If useParallelQueue is true, then set taskDestination to the result of\n // starting a new parallel queue.\n // TODO\n\n // 5. Let timingInfo be a new fetch timing info whose start time and\n // post-redirect start time are the coarsened shared current time given\n // crossOriginIsolatedCapability.\n const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability)\n const timingInfo = createOpaqueTimingInfo({\n startTime: currenTime\n })\n\n // 6. Let fetchParams be a new fetch params whose\n // request is request,\n // timing info is timingInfo,\n // process request body chunk length is processRequestBodyChunkLength,\n // process request end-of-body is processRequestEndOfBody,\n // process response is processResponse,\n // process response consume body is processResponseConsumeBody,\n // process response end-of-body is processResponseEndOfBody,\n // task destination is taskDestination,\n // and cross-origin isolated capability is crossOriginIsolatedCapability.\n const fetchParams = {\n controller: new Fetch(dispatcher),\n request,\n timingInfo,\n processRequestBodyChunkLength,\n processRequestEndOfBody,\n processResponse,\n processResponseConsumeBody,\n processResponseEndOfBody,\n taskDestination,\n crossOriginIsolatedCapability\n }\n\n // 7. If request’s body is a byte sequence, then set request’s body to\n // request’s body as a body.\n // NOTE: Since fetching is only called from fetch, body should already be\n // extracted.\n assert(!request.body || request.body.stream)\n\n // 8. If request’s window is \"client\", then set request’s window to request’s\n // client, if request’s client’s global object is a Window object; otherwise\n // \"no-window\".\n if (request.window === 'client') {\n // TODO: What if request.client is null?\n request.window =\n request.client?.globalObject?.constructor?.name === 'Window'\n ? request.client\n : 'no-window'\n }\n\n // 9. If request’s origin is \"client\", then set request’s origin to request’s\n // client’s origin.\n if (request.origin === 'client') {\n // TODO: What if request.client is null?\n request.origin = request.client?.origin\n }\n\n // 10. If all of the following conditions are true:\n // TODO\n\n // 11. If request’s policy container is \"client\", then:\n if (request.policyContainer === 'client') {\n // 1. If request’s client is non-null, then set request’s policy\n // container to a clone of request’s client’s policy container. [HTML]\n if (request.client != null) {\n request.policyContainer = clonePolicyContainer(\n request.client.policyContainer\n )\n } else {\n // 2. Otherwise, set request’s policy container to a new policy\n // container.\n request.policyContainer = makePolicyContainer()\n }\n }\n\n // 12. If request’s header list does not contain `Accept`, then:\n if (!request.headersList.contains('accept')) {\n // 1. Let value be `*/*`.\n const value = '*/*'\n\n // 2. A user agent should set value to the first matching statement, if\n // any, switching on request’s destination:\n // \"document\"\n // \"frame\"\n // \"iframe\"\n // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8`\n // \"image\"\n // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5`\n // \"style\"\n // `text/css,*/*;q=0.1`\n // TODO\n\n // 3. Append `Accept`/value to request’s header list.\n request.headersList.append('accept', value)\n }\n\n // 13. If request’s header list does not contain `Accept-Language`, then\n // user agents should append `Accept-Language`/an appropriate value to\n // request’s header list.\n if (!request.headersList.contains('accept-language')) {\n request.headersList.append('accept-language', '*')\n }\n\n // 14. If request’s priority is null, then use request’s initiator and\n // destination appropriately in setting request’s priority to a\n // user-agent-defined object.\n if (request.priority === null) {\n // TODO\n }\n\n // 15. If request is a subresource request, then:\n if (subresourceSet.has(request.destination)) {\n // TODO\n }\n\n // 16. Run main fetch given fetchParams.\n mainFetch(fetchParams)\n .catch(err => {\n fetchParams.controller.terminate(err)\n })\n\n // 17. Return fetchParam's controller\n return fetchParams.controller\n}\n\n// https://fetch.spec.whatwg.org/#concept-main-fetch\nasync function mainFetch (fetchParams, recursive = false) {\n // 1. Let request be fetchParams’s request.\n const request = fetchParams.request\n\n // 2. Let response be null.\n let response = null\n\n // 3. If request’s local-URLs-only flag is set and request’s current URL is\n // not local, then set response to a network error.\n if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) {\n response = makeNetworkError('local URLs only')\n }\n\n // 4. Run report Content Security Policy violations for request.\n // TODO\n\n // 5. Upgrade request to a potentially trustworthy URL, if appropriate.\n tryUpgradeRequestToAPotentiallyTrustworthyURL(request)\n\n // 6. If should request be blocked due to a bad port, should fetching request\n // be blocked as mixed content, or should request be blocked by Content\n // Security Policy returns blocked, then set response to a network error.\n if (requestBadPort(request) === 'blocked') {\n response = makeNetworkError('bad port')\n }\n // TODO: should fetching request be blocked as mixed content?\n // TODO: should request be blocked by Content Security Policy?\n\n // 7. If request’s referrer policy is the empty string, then set request’s\n // referrer policy to request’s policy container’s referrer policy.\n if (request.referrerPolicy === '') {\n request.referrerPolicy = request.policyContainer.referrerPolicy\n }\n\n // 8. If request’s referrer is not \"no-referrer\", then set request’s\n // referrer to the result of invoking determine request’s referrer.\n if (request.referrer !== 'no-referrer') {\n request.referrer = determineRequestsReferrer(request)\n }\n\n // 9. Set request’s current URL’s scheme to \"https\" if all of the following\n // conditions are true:\n // - request’s current URL’s scheme is \"http\"\n // - request’s current URL’s host is a domain\n // - Matching request’s current URL’s host per Known HSTS Host Domain Name\n // Matching results in either a superdomain match with an asserted\n // includeSubDomains directive or a congruent match (with or without an\n // asserted includeSubDomains directive). [HSTS]\n // TODO\n\n // 10. If recursive is false, then run the remaining steps in parallel.\n // TODO\n\n // 11. If response is null, then set response to the result of running\n // the steps corresponding to the first matching statement:\n if (response === null) {\n response = await (async () => {\n const currentURL = requestCurrentURL(request)\n\n if (\n // - request’s current URL’s origin is same origin with request’s origin,\n // and request’s response tainting is \"basic\"\n (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') ||\n // request’s current URL’s scheme is \"data\"\n (currentURL.protocol === 'data:') ||\n // - request’s mode is \"navigate\" or \"websocket\"\n (request.mode === 'navigate' || request.mode === 'websocket')\n ) {\n // 1. Set request’s response tainting to \"basic\".\n request.responseTainting = 'basic'\n\n // 2. Return the result of running scheme fetch given fetchParams.\n return await schemeFetch(fetchParams)\n }\n\n // request’s mode is \"same-origin\"\n if (request.mode === 'same-origin') {\n // 1. Return a network error.\n return makeNetworkError('request mode cannot be \"same-origin\"')\n }\n\n // request’s mode is \"no-cors\"\n if (request.mode === 'no-cors') {\n // 1. If request’s redirect mode is not \"follow\", then return a network\n // error.\n if (request.redirect !== 'follow') {\n return makeNetworkError(\n 'redirect mode cannot be \"follow\" for \"no-cors\" request'\n )\n }\n\n // 2. Set request’s response tainting to \"opaque\".\n request.responseTainting = 'opaque'\n\n // 3. Return the result of running scheme fetch given fetchParams.\n return await schemeFetch(fetchParams)\n }\n\n // request’s current URL’s scheme is not an HTTP(S) scheme\n if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) {\n // Return a network error.\n return makeNetworkError('URL scheme must be a HTTP(S) scheme')\n }\n\n // - request’s use-CORS-preflight flag is set\n // - request’s unsafe-request flag is set and either request’s method is\n // not a CORS-safelisted method or CORS-unsafe request-header names with\n // request’s header list is not empty\n // 1. Set request’s response tainting to \"cors\".\n // 2. Let corsWithPreflightResponse be the result of running HTTP fetch\n // given fetchParams and true.\n // 3. If corsWithPreflightResponse is a network error, then clear cache\n // entries using request.\n // 4. Return corsWithPreflightResponse.\n // TODO\n\n // Otherwise\n // 1. Set request’s response tainting to \"cors\".\n request.responseTainting = 'cors'\n\n // 2. Return the result of running HTTP fetch given fetchParams.\n return await httpFetch(fetchParams)\n })()\n }\n\n // 12. If recursive is true, then return response.\n if (recursive) {\n return response\n }\n\n // 13. If response is not a network error and response is not a filtered\n // response, then:\n if (response.status !== 0 && !response.internalResponse) {\n // If request’s response tainting is \"cors\", then:\n if (request.responseTainting === 'cors') {\n // 1. Let headerNames be the result of extracting header list values\n // given `Access-Control-Expose-Headers` and response’s header list.\n // TODO\n // 2. If request’s credentials mode is not \"include\" and headerNames\n // contains `*`, then set response’s CORS-exposed header-name list to\n // all unique header names in response’s header list.\n // TODO\n // 3. Otherwise, if headerNames is not null or failure, then set\n // response’s CORS-exposed header-name list to headerNames.\n // TODO\n }\n\n // Set response to the following filtered response with response as its\n // internal response, depending on request’s response tainting:\n if (request.responseTainting === 'basic') {\n response = filterResponse(response, 'basic')\n } else if (request.responseTainting === 'cors') {\n response = filterResponse(response, 'cors')\n } else if (request.responseTainting === 'opaque') {\n response = filterResponse(response, 'opaque')\n } else {\n assert(false)\n }\n }\n\n // 14. Let internalResponse be response, if response is a network error,\n // and response’s internal response otherwise.\n let internalResponse =\n response.status === 0 ? response : response.internalResponse\n\n // 15. If internalResponse’s URL list is empty, then set it to a clone of\n // request’s URL list.\n if (internalResponse.urlList.length === 0) {\n internalResponse.urlList.push(...request.urlList)\n }\n\n // 16. If request’s timing allow failed flag is unset, then set\n // internalResponse’s timing allow passed flag.\n if (!request.timingAllowFailed) {\n response.timingAllowPassed = true\n }\n\n // 17. If response is not a network error and any of the following returns\n // blocked\n // - should internalResponse to request be blocked as mixed content\n // - should internalResponse to request be blocked by Content Security Policy\n // - should internalResponse to request be blocked due to its MIME type\n // - should internalResponse to request be blocked due to nosniff\n // TODO\n\n // 18. If response’s type is \"opaque\", internalResponse’s status is 206,\n // internalResponse’s range-requested flag is set, and request’s header\n // list does not contain `Range`, then set response and internalResponse\n // to a network error.\n if (\n response.type === 'opaque' &&\n internalResponse.status === 206 &&\n internalResponse.rangeRequested &&\n !request.headers.contains('range')\n ) {\n response = internalResponse = makeNetworkError()\n }\n\n // 19. If response is not a network error and either request’s method is\n // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status,\n // set internalResponse’s body to null and disregard any enqueuing toward\n // it (if any).\n if (\n response.status !== 0 &&\n (request.method === 'HEAD' ||\n request.method === 'CONNECT' ||\n nullBodyStatus.includes(internalResponse.status))\n ) {\n internalResponse.body = null\n fetchParams.controller.dump = true\n }\n\n // 20. If request’s integrity metadata is not the empty string, then:\n if (request.integrity) {\n // 1. Let processBodyError be this step: run fetch finale given fetchParams\n // and a network error.\n const processBodyError = (reason) =>\n fetchFinale(fetchParams, makeNetworkError(reason))\n\n // 2. If request’s response tainting is \"opaque\", or response’s body is null,\n // then run processBodyError and abort these steps.\n if (request.responseTainting === 'opaque' || response.body == null) {\n processBodyError(response.error)\n return\n }\n\n // 3. Let processBody given bytes be these steps:\n const processBody = (bytes) => {\n // 1. If bytes do not match request’s integrity metadata,\n // then run processBodyError and abort these steps. [SRI]\n if (!bytesMatch(bytes, request.integrity)) {\n processBodyError('integrity mismatch')\n return\n }\n\n // 2. Set response’s body to bytes as a body.\n response.body = safelyExtractBody(bytes)[0]\n\n // 3. Run fetch finale given fetchParams and response.\n fetchFinale(fetchParams, response)\n }\n\n // 4. Fully read response’s body given processBody and processBodyError.\n await fullyReadBody(response.body, processBody, processBodyError)\n } else {\n // 21. Otherwise, run fetch finale given fetchParams and response.\n fetchFinale(fetchParams, response)\n }\n}\n\n// https://fetch.spec.whatwg.org/#concept-scheme-fetch\n// given a fetch params fetchParams\nfunction schemeFetch (fetchParams) {\n // Note: since the connection is destroyed on redirect, which sets fetchParams to a\n // cancelled state, we do not want this condition to trigger *unless* there have been\n // no redirects. See https://github.com/nodejs/undici/issues/1776\n // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.\n if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) {\n return Promise.resolve(makeAppropriateNetworkError(fetchParams))\n }\n\n // 2. Let request be fetchParams’s request.\n const { request } = fetchParams\n\n const { protocol: scheme } = requestCurrentURL(request)\n\n // 3. Switch on request’s current URL’s scheme and run the associated steps:\n switch (scheme) {\n case 'about:': {\n // If request’s current URL’s path is the string \"blank\", then return a new response\n // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) »,\n // and body is the empty byte sequence as a body.\n\n // Otherwise, return a network error.\n return Promise.resolve(makeNetworkError('about scheme is not supported'))\n }\n case 'blob:': {\n if (!resolveObjectURL) {\n resolveObjectURL = require('buffer').resolveObjectURL\n }\n\n // 1. Let blobURLEntry be request’s current URL’s blob URL entry.\n const blobURLEntry = requestCurrentURL(request)\n\n // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56\n // Buffer.resolveObjectURL does not ignore URL queries.\n if (blobURLEntry.search.length !== 0) {\n return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.'))\n }\n\n const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString())\n\n // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s\n // object is not a Blob object, then return a network error.\n if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) {\n return Promise.resolve(makeNetworkError('invalid method'))\n }\n\n // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object.\n const bodyWithType = safelyExtractBody(blobURLEntryObject)\n\n // 4. Let body be bodyWithType’s body.\n const body = bodyWithType[0]\n\n // 5. Let length be body’s length, serialized and isomorphic encoded.\n const length = isomorphicEncode(`${body.length}`)\n\n // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence.\n const type = bodyWithType[1] ?? ''\n\n // 7. Return a new response whose status message is `OK`, header list is\n // « (`Content-Length`, length), (`Content-Type`, type) », and body is body.\n const response = makeResponse({\n statusText: 'OK',\n headersList: [\n ['content-length', { name: 'Content-Length', value: length }],\n ['content-type', { name: 'Content-Type', value: type }]\n ]\n })\n\n response.body = body\n\n return Promise.resolve(response)\n }\n case 'data:': {\n // 1. Let dataURLStruct be the result of running the\n // data: URL processor on request’s current URL.\n const currentURL = requestCurrentURL(request)\n const dataURLStruct = dataURLProcessor(currentURL)\n\n // 2. If dataURLStruct is failure, then return a\n // network error.\n if (dataURLStruct === 'failure') {\n return Promise.resolve(makeNetworkError('failed to fetch the data URL'))\n }\n\n // 3. Let mimeType be dataURLStruct’s MIME type, serialized.\n const mimeType = serializeAMimeType(dataURLStruct.mimeType)\n\n // 4. Return a response whose status message is `OK`,\n // header list is « (`Content-Type`, mimeType) »,\n // and body is dataURLStruct’s body as a body.\n return Promise.resolve(makeResponse({\n statusText: 'OK',\n headersList: [\n ['content-type', { name: 'Content-Type', value: mimeType }]\n ],\n body: safelyExtractBody(dataURLStruct.body)[0]\n }))\n }\n case 'file:': {\n // For now, unfortunate as it is, file URLs are left as an exercise for the reader.\n // When in doubt, return a network error.\n return Promise.resolve(makeNetworkError('not implemented... yet...'))\n }\n case 'http:':\n case 'https:': {\n // Return the result of running HTTP fetch given fetchParams.\n\n return httpFetch(fetchParams)\n .catch((err) => makeNetworkError(err))\n }\n default: {\n return Promise.resolve(makeNetworkError('unknown scheme'))\n }\n }\n}\n\n// https://fetch.spec.whatwg.org/#finalize-response\nfunction finalizeResponse (fetchParams, response) {\n // 1. Set fetchParams’s request’s done flag.\n fetchParams.request.done = true\n\n // 2, If fetchParams’s process response done is not null, then queue a fetch\n // task to run fetchParams’s process response done given response, with\n // fetchParams’s task destination.\n if (fetchParams.processResponseDone != null) {\n queueMicrotask(() => fetchParams.processResponseDone(response))\n }\n}\n\n// https://fetch.spec.whatwg.org/#fetch-finale\nfunction fetchFinale (fetchParams, response) {\n // 1. If response is a network error, then:\n if (response.type === 'error') {\n // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ».\n response.urlList = [fetchParams.request.urlList[0]]\n\n // 2. Set response’s timing info to the result of creating an opaque timing\n // info for fetchParams’s timing info.\n response.timingInfo = createOpaqueTimingInfo({\n startTime: fetchParams.timingInfo.startTime\n })\n }\n\n // 2. Let processResponseEndOfBody be the following steps:\n const processResponseEndOfBody = () => {\n // 1. Set fetchParams’s request’s done flag.\n fetchParams.request.done = true\n\n // If fetchParams’s process response end-of-body is not null,\n // then queue a fetch task to run fetchParams’s process response\n // end-of-body given response with fetchParams’s task destination.\n if (fetchParams.processResponseEndOfBody != null) {\n queueMicrotask(() => fetchParams.processResponseEndOfBody(response))\n }\n }\n\n // 3. If fetchParams’s process response is non-null, then queue a fetch task\n // to run fetchParams’s process response given response, with fetchParams’s\n // task destination.\n if (fetchParams.processResponse != null) {\n queueMicrotask(() => fetchParams.processResponse(response))\n }\n\n // 4. If response’s body is null, then run processResponseEndOfBody.\n if (response.body == null) {\n processResponseEndOfBody()\n } else {\n // 5. Otherwise:\n\n // 1. Let transformStream be a new a TransformStream.\n\n // 2. Let identityTransformAlgorithm be an algorithm which, given chunk,\n // enqueues chunk in transformStream.\n const identityTransformAlgorithm = (chunk, controller) => {\n controller.enqueue(chunk)\n }\n\n // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm\n // and flushAlgorithm set to processResponseEndOfBody.\n const transformStream = new TransformStream({\n start () {},\n transform: identityTransformAlgorithm,\n flush: processResponseEndOfBody\n }, {\n size () {\n return 1\n }\n }, {\n size () {\n return 1\n }\n })\n\n // 4. Set response’s body to the result of piping response’s body through transformStream.\n response.body = { stream: response.body.stream.pipeThrough(transformStream) }\n }\n\n // 6. If fetchParams’s process response consume body is non-null, then:\n if (fetchParams.processResponseConsumeBody != null) {\n // 1. Let processBody given nullOrBytes be this step: run fetchParams’s\n // process response consume body given response and nullOrBytes.\n const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes)\n\n // 2. Let processBodyError be this step: run fetchParams’s process\n // response consume body given response and failure.\n const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure)\n\n // 3. If response’s body is null, then queue a fetch task to run processBody\n // given null, with fetchParams’s task destination.\n if (response.body == null) {\n queueMicrotask(() => processBody(null))\n } else {\n // 4. Otherwise, fully read response’s body given processBody, processBodyError,\n // and fetchParams’s task destination.\n return fullyReadBody(response.body, processBody, processBodyError)\n }\n return Promise.resolve()\n }\n}\n\n// https://fetch.spec.whatwg.org/#http-fetch\nasync function httpFetch (fetchParams) {\n // 1. Let request be fetchParams’s request.\n const request = fetchParams.request\n\n // 2. Let response be null.\n let response = null\n\n // 3. Let actualResponse be null.\n let actualResponse = null\n\n // 4. Let timingInfo be fetchParams’s timing info.\n const timingInfo = fetchParams.timingInfo\n\n // 5. If request’s service-workers mode is \"all\", then:\n if (request.serviceWorkers === 'all') {\n // TODO\n }\n\n // 6. If response is null, then:\n if (response === null) {\n // 1. If makeCORSPreflight is true and one of these conditions is true:\n // TODO\n\n // 2. If request’s redirect mode is \"follow\", then set request’s\n // service-workers mode to \"none\".\n if (request.redirect === 'follow') {\n request.serviceWorkers = 'none'\n }\n\n // 3. Set response and actualResponse to the result of running\n // HTTP-network-or-cache fetch given fetchParams.\n actualResponse = response = await httpNetworkOrCacheFetch(fetchParams)\n\n // 4. If request’s response tainting is \"cors\" and a CORS check\n // for request and response returns failure, then return a network error.\n if (\n request.responseTainting === 'cors' &&\n corsCheck(request, response) === 'failure'\n ) {\n return makeNetworkError('cors failure')\n }\n\n // 5. If the TAO check for request and response returns failure, then set\n // request’s timing allow failed flag.\n if (TAOCheck(request, response) === 'failure') {\n request.timingAllowFailed = true\n }\n }\n\n // 7. If either request’s response tainting or response’s type\n // is \"opaque\", and the cross-origin resource policy check with\n // request’s origin, request’s client, request’s destination,\n // and actualResponse returns blocked, then return a network error.\n if (\n (request.responseTainting === 'opaque' || response.type === 'opaque') &&\n crossOriginResourcePolicyCheck(\n request.origin,\n request.client,\n request.destination,\n actualResponse\n ) === 'blocked'\n ) {\n return makeNetworkError('blocked')\n }\n\n // 8. If actualResponse’s status is a redirect status, then:\n if (redirectStatusSet.has(actualResponse.status)) {\n // 1. If actualResponse’s status is not 303, request’s body is not null,\n // and the connection uses HTTP/2, then user agents may, and are even\n // encouraged to, transmit an RST_STREAM frame.\n // See, https://github.com/whatwg/fetch/issues/1288\n if (request.redirect !== 'manual') {\n fetchParams.controller.connection.destroy()\n }\n\n // 2. Switch on request’s redirect mode:\n if (request.redirect === 'error') {\n // Set response to a network error.\n response = makeNetworkError('unexpected redirect')\n } else if (request.redirect === 'manual') {\n // Set response to an opaque-redirect filtered response whose internal\n // response is actualResponse.\n // NOTE(spec): On the web this would return an `opaqueredirect` response,\n // but that doesn't make sense server side.\n // See https://github.com/nodejs/undici/issues/1193.\n response = actualResponse\n } else if (request.redirect === 'follow') {\n // Set response to the result of running HTTP-redirect fetch given\n // fetchParams and response.\n response = await httpRedirectFetch(fetchParams, response)\n } else {\n assert(false)\n }\n }\n\n // 9. Set response’s timing info to timingInfo.\n response.timingInfo = timingInfo\n\n // 10. Return response.\n return response\n}\n\n// https://fetch.spec.whatwg.org/#http-redirect-fetch\nfunction httpRedirectFetch (fetchParams, response) {\n // 1. Let request be fetchParams’s request.\n const request = fetchParams.request\n\n // 2. Let actualResponse be response, if response is not a filtered response,\n // and response’s internal response otherwise.\n const actualResponse = response.internalResponse\n ? response.internalResponse\n : response\n\n // 3. Let locationURL be actualResponse’s location URL given request’s current\n // URL’s fragment.\n let locationURL\n\n try {\n locationURL = responseLocationURL(\n actualResponse,\n requestCurrentURL(request).hash\n )\n\n // 4. If locationURL is null, then return response.\n if (locationURL == null) {\n return response\n }\n } catch (err) {\n // 5. If locationURL is failure, then return a network error.\n return Promise.resolve(makeNetworkError(err))\n }\n\n // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network\n // error.\n if (!urlIsHttpHttpsScheme(locationURL)) {\n return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme'))\n }\n\n // 7. If request’s redirect count is 20, then return a network error.\n if (request.redirectCount === 20) {\n return Promise.resolve(makeNetworkError('redirect count exceeded'))\n }\n\n // 8. Increase request’s redirect count by 1.\n request.redirectCount += 1\n\n // 9. If request’s mode is \"cors\", locationURL includes credentials, and\n // request’s origin is not same origin with locationURL’s origin, then return\n // a network error.\n if (\n request.mode === 'cors' &&\n (locationURL.username || locationURL.password) &&\n !sameOrigin(request, locationURL)\n ) {\n return Promise.resolve(makeNetworkError('cross origin not allowed for request mode \"cors\"'))\n }\n\n // 10. If request’s response tainting is \"cors\" and locationURL includes\n // credentials, then return a network error.\n if (\n request.responseTainting === 'cors' &&\n (locationURL.username || locationURL.password)\n ) {\n return Promise.resolve(makeNetworkError(\n 'URL cannot contain credentials for request mode \"cors\"'\n ))\n }\n\n // 11. If actualResponse’s status is not 303, request’s body is non-null,\n // and request’s body’s source is null, then return a network error.\n if (\n actualResponse.status !== 303 &&\n request.body != null &&\n request.body.source == null\n ) {\n return Promise.resolve(makeNetworkError())\n }\n\n // 12. If one of the following is true\n // - actualResponse’s status is 301 or 302 and request’s method is `POST`\n // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD`\n if (\n ([301, 302].includes(actualResponse.status) && request.method === 'POST') ||\n (actualResponse.status === 303 &&\n !GET_OR_HEAD.includes(request.method))\n ) {\n // then:\n // 1. Set request’s method to `GET` and request’s body to null.\n request.method = 'GET'\n request.body = null\n\n // 2. For each headerName of request-body-header name, delete headerName from\n // request’s header list.\n for (const headerName of requestBodyHeader) {\n request.headersList.delete(headerName)\n }\n }\n\n // 13. If request’s current URL’s origin is not same origin with locationURL’s\n // origin, then for each headerName of CORS non-wildcard request-header name,\n // delete headerName from request’s header list.\n if (!sameOrigin(requestCurrentURL(request), locationURL)) {\n // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name\n request.headersList.delete('authorization')\n\n // https://fetch.spec.whatwg.org/#authentication-entries\n request.headersList.delete('proxy-authorization', true)\n\n // \"Cookie\" and \"Host\" are forbidden request-headers, which undici doesn't implement.\n request.headersList.delete('cookie')\n request.headersList.delete('host')\n }\n\n // 14. If request’s body is non-null, then set request’s body to the first return\n // value of safely extracting request’s body’s source.\n if (request.body != null) {\n assert(request.body.source != null)\n request.body = safelyExtractBody(request.body.source)[0]\n }\n\n // 15. Let timingInfo be fetchParams’s timing info.\n const timingInfo = fetchParams.timingInfo\n\n // 16. Set timingInfo’s redirect end time and post-redirect start time to the\n // coarsened shared current time given fetchParams’s cross-origin isolated\n // capability.\n timingInfo.redirectEndTime = timingInfo.postRedirectStartTime =\n coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability)\n\n // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s\n // redirect start time to timingInfo’s start time.\n if (timingInfo.redirectStartTime === 0) {\n timingInfo.redirectStartTime = timingInfo.startTime\n }\n\n // 18. Append locationURL to request’s URL list.\n request.urlList.push(locationURL)\n\n // 19. Invoke set request’s referrer policy on redirect on request and\n // actualResponse.\n setRequestReferrerPolicyOnRedirect(request, actualResponse)\n\n // 20. Return the result of running main fetch given fetchParams and true.\n return mainFetch(fetchParams, true)\n}\n\n// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch\nasync function httpNetworkOrCacheFetch (\n fetchParams,\n isAuthenticationFetch = false,\n isNewConnectionFetch = false\n) {\n // 1. Let request be fetchParams’s request.\n const request = fetchParams.request\n\n // 2. Let httpFetchParams be null.\n let httpFetchParams = null\n\n // 3. Let httpRequest be null.\n let httpRequest = null\n\n // 4. Let response be null.\n let response = null\n\n // 5. Let storedResponse be null.\n // TODO: cache\n\n // 6. Let httpCache be null.\n const httpCache = null\n\n // 7. Let the revalidatingFlag be unset.\n const revalidatingFlag = false\n\n // 8. Run these steps, but abort when the ongoing fetch is terminated:\n\n // 1. If request’s window is \"no-window\" and request’s redirect mode is\n // \"error\", then set httpFetchParams to fetchParams and httpRequest to\n // request.\n if (request.window === 'no-window' && request.redirect === 'error') {\n httpFetchParams = fetchParams\n httpRequest = request\n } else {\n // Otherwise:\n\n // 1. Set httpRequest to a clone of request.\n httpRequest = makeRequest(request)\n\n // 2. Set httpFetchParams to a copy of fetchParams.\n httpFetchParams = { ...fetchParams }\n\n // 3. Set httpFetchParams’s request to httpRequest.\n httpFetchParams.request = httpRequest\n }\n\n // 3. Let includeCredentials be true if one of\n const includeCredentials =\n request.credentials === 'include' ||\n (request.credentials === 'same-origin' &&\n request.responseTainting === 'basic')\n\n // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s\n // body is non-null; otherwise null.\n const contentLength = httpRequest.body ? httpRequest.body.length : null\n\n // 5. Let contentLengthHeaderValue be null.\n let contentLengthHeaderValue = null\n\n // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or\n // `PUT`, then set contentLengthHeaderValue to `0`.\n if (\n httpRequest.body == null &&\n ['POST', 'PUT'].includes(httpRequest.method)\n ) {\n contentLengthHeaderValue = '0'\n }\n\n // 7. If contentLength is non-null, then set contentLengthHeaderValue to\n // contentLength, serialized and isomorphic encoded.\n if (contentLength != null) {\n contentLengthHeaderValue = isomorphicEncode(`${contentLength}`)\n }\n\n // 8. If contentLengthHeaderValue is non-null, then append\n // `Content-Length`/contentLengthHeaderValue to httpRequest’s header\n // list.\n if (contentLengthHeaderValue != null) {\n httpRequest.headersList.append('content-length', contentLengthHeaderValue)\n }\n\n // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`,\n // contentLengthHeaderValue) to httpRequest’s header list.\n\n // 10. If contentLength is non-null and httpRequest’s keepalive is true,\n // then:\n if (contentLength != null && httpRequest.keepalive) {\n // NOTE: keepalive is a noop outside of browser context.\n }\n\n // 11. If httpRequest’s referrer is a URL, then append\n // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded,\n // to httpRequest’s header list.\n if (httpRequest.referrer instanceof URL) {\n httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href))\n }\n\n // 12. Append a request `Origin` header for httpRequest.\n appendRequestOriginHeader(httpRequest)\n\n // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA]\n appendFetchMetadata(httpRequest)\n\n // 14. If httpRequest’s header list does not contain `User-Agent`, then\n // user agents should append `User-Agent`/default `User-Agent` value to\n // httpRequest’s header list.\n if (!httpRequest.headersList.contains('user-agent')) {\n httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node')\n }\n\n // 15. If httpRequest’s cache mode is \"default\" and httpRequest’s header\n // list contains `If-Modified-Since`, `If-None-Match`,\n // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set\n // httpRequest’s cache mode to \"no-store\".\n if (\n httpRequest.cache === 'default' &&\n (httpRequest.headersList.contains('if-modified-since') ||\n httpRequest.headersList.contains('if-none-match') ||\n httpRequest.headersList.contains('if-unmodified-since') ||\n httpRequest.headersList.contains('if-match') ||\n httpRequest.headersList.contains('if-range'))\n ) {\n httpRequest.cache = 'no-store'\n }\n\n // 16. If httpRequest’s cache mode is \"no-cache\", httpRequest’s prevent\n // no-cache cache-control header modification flag is unset, and\n // httpRequest’s header list does not contain `Cache-Control`, then append\n // `Cache-Control`/`max-age=0` to httpRequest’s header list.\n if (\n httpRequest.cache === 'no-cache' &&\n !httpRequest.preventNoCacheCacheControlHeaderModification &&\n !httpRequest.headersList.contains('cache-control')\n ) {\n httpRequest.headersList.append('cache-control', 'max-age=0')\n }\n\n // 17. If httpRequest’s cache mode is \"no-store\" or \"reload\", then:\n if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') {\n // 1. If httpRequest’s header list does not contain `Pragma`, then append\n // `Pragma`/`no-cache` to httpRequest’s header list.\n if (!httpRequest.headersList.contains('pragma')) {\n httpRequest.headersList.append('pragma', 'no-cache')\n }\n\n // 2. If httpRequest’s header list does not contain `Cache-Control`,\n // then append `Cache-Control`/`no-cache` to httpRequest’s header list.\n if (!httpRequest.headersList.contains('cache-control')) {\n httpRequest.headersList.append('cache-control', 'no-cache')\n }\n }\n\n // 18. If httpRequest’s header list contains `Range`, then append\n // `Accept-Encoding`/`identity` to httpRequest’s header list.\n if (httpRequest.headersList.contains('range')) {\n httpRequest.headersList.append('accept-encoding', 'identity')\n }\n\n // 19. Modify httpRequest’s header list per HTTP. Do not append a given\n // header if httpRequest’s header list contains that header’s name.\n // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129\n if (!httpRequest.headersList.contains('accept-encoding')) {\n if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) {\n httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate')\n } else {\n httpRequest.headersList.append('accept-encoding', 'gzip, deflate')\n }\n }\n\n httpRequest.headersList.delete('host')\n\n // 20. If includeCredentials is true, then:\n if (includeCredentials) {\n // 1. If the user agent is not configured to block cookies for httpRequest\n // (see section 7 of [COOKIES]), then:\n // TODO: credentials\n // 2. If httpRequest’s header list does not contain `Authorization`, then:\n // TODO: credentials\n }\n\n // 21. If there’s a proxy-authentication entry, use it as appropriate.\n // TODO: proxy-authentication\n\n // 22. Set httpCache to the result of determining the HTTP cache\n // partition, given httpRequest.\n // TODO: cache\n\n // 23. If httpCache is null, then set httpRequest’s cache mode to\n // \"no-store\".\n if (httpCache == null) {\n httpRequest.cache = 'no-store'\n }\n\n // 24. If httpRequest’s cache mode is neither \"no-store\" nor \"reload\",\n // then:\n if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') {\n // TODO: cache\n }\n\n // 9. If aborted, then return the appropriate network error for fetchParams.\n // TODO\n\n // 10. If response is null, then:\n if (response == null) {\n // 1. If httpRequest’s cache mode is \"only-if-cached\", then return a\n // network error.\n if (httpRequest.mode === 'only-if-cached') {\n return makeNetworkError('only if cached')\n }\n\n // 2. Let forwardResponse be the result of running HTTP-network fetch\n // given httpFetchParams, includeCredentials, and isNewConnectionFetch.\n const forwardResponse = await httpNetworkFetch(\n httpFetchParams,\n includeCredentials,\n isNewConnectionFetch\n )\n\n // 3. If httpRequest’s method is unsafe and forwardResponse’s status is\n // in the range 200 to 399, inclusive, invalidate appropriate stored\n // responses in httpCache, as per the \"Invalidation\" chapter of HTTP\n // Caching, and set storedResponse to null. [HTTP-CACHING]\n if (\n !safeMethodsSet.has(httpRequest.method) &&\n forwardResponse.status >= 200 &&\n forwardResponse.status <= 399\n ) {\n // TODO: cache\n }\n\n // 4. If the revalidatingFlag is set and forwardResponse’s status is 304,\n // then:\n if (revalidatingFlag && forwardResponse.status === 304) {\n // TODO: cache\n }\n\n // 5. If response is null, then:\n if (response == null) {\n // 1. Set response to forwardResponse.\n response = forwardResponse\n\n // 2. Store httpRequest and forwardResponse in httpCache, as per the\n // \"Storing Responses in Caches\" chapter of HTTP Caching. [HTTP-CACHING]\n // TODO: cache\n }\n }\n\n // 11. Set response’s URL list to a clone of httpRequest’s URL list.\n response.urlList = [...httpRequest.urlList]\n\n // 12. If httpRequest’s header list contains `Range`, then set response’s\n // range-requested flag.\n if (httpRequest.headersList.contains('range')) {\n response.rangeRequested = true\n }\n\n // 13. Set response’s request-includes-credentials to includeCredentials.\n response.requestIncludesCredentials = includeCredentials\n\n // 14. If response’s status is 401, httpRequest’s response tainting is not\n // \"cors\", includeCredentials is true, and request’s window is an environment\n // settings object, then:\n // TODO\n\n // 15. If response’s status is 407, then:\n if (response.status === 407) {\n // 1. If request’s window is \"no-window\", then return a network error.\n if (request.window === 'no-window') {\n return makeNetworkError()\n }\n\n // 2. ???\n\n // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams.\n if (isCancelled(fetchParams)) {\n return makeAppropriateNetworkError(fetchParams)\n }\n\n // 4. Prompt the end user as appropriate in request’s window and store\n // the result as a proxy-authentication entry. [HTTP-AUTH]\n // TODO: Invoke some kind of callback?\n\n // 5. Set response to the result of running HTTP-network-or-cache fetch given\n // fetchParams.\n // TODO\n return makeNetworkError('proxy authentication required')\n }\n\n // 16. If all of the following are true\n if (\n // response’s status is 421\n response.status === 421 &&\n // isNewConnectionFetch is false\n !isNewConnectionFetch &&\n // request’s body is null, or request’s body is non-null and request’s body’s source is non-null\n (request.body == null || request.body.source != null)\n ) {\n // then:\n\n // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.\n if (isCancelled(fetchParams)) {\n return makeAppropriateNetworkError(fetchParams)\n }\n\n // 2. Set response to the result of running HTTP-network-or-cache\n // fetch given fetchParams, isAuthenticationFetch, and true.\n\n // TODO (spec): The spec doesn't specify this but we need to cancel\n // the active response before we can start a new one.\n // https://github.com/whatwg/fetch/issues/1293\n fetchParams.controller.connection.destroy()\n\n response = await httpNetworkOrCacheFetch(\n fetchParams,\n isAuthenticationFetch,\n true\n )\n }\n\n // 17. If isAuthenticationFetch is true, then create an authentication entry\n if (isAuthenticationFetch) {\n // TODO\n }\n\n // 18. Return response.\n return response\n}\n\n// https://fetch.spec.whatwg.org/#http-network-fetch\nasync function httpNetworkFetch (\n fetchParams,\n includeCredentials = false,\n forceNewConnection = false\n) {\n assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed)\n\n fetchParams.controller.connection = {\n abort: null,\n destroyed: false,\n destroy (err) {\n if (!this.destroyed) {\n this.destroyed = true\n this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError'))\n }\n }\n }\n\n // 1. Let request be fetchParams’s request.\n const request = fetchParams.request\n\n // 2. Let response be null.\n let response = null\n\n // 3. Let timingInfo be fetchParams’s timing info.\n const timingInfo = fetchParams.timingInfo\n\n // 4. Let httpCache be the result of determining the HTTP cache partition,\n // given request.\n // TODO: cache\n const httpCache = null\n\n // 5. If httpCache is null, then set request’s cache mode to \"no-store\".\n if (httpCache == null) {\n request.cache = 'no-store'\n }\n\n // 6. Let networkPartitionKey be the result of determining the network\n // partition key given request.\n // TODO\n\n // 7. Let newConnection be \"yes\" if forceNewConnection is true; otherwise\n // \"no\".\n const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars\n\n // 8. Switch on request’s mode:\n if (request.mode === 'websocket') {\n // Let connection be the result of obtaining a WebSocket connection,\n // given request’s current URL.\n // TODO\n } else {\n // Let connection be the result of obtaining a connection, given\n // networkPartitionKey, request’s current URL’s origin,\n // includeCredentials, and forceNewConnection.\n // TODO\n }\n\n // 9. Run these steps, but abort when the ongoing fetch is terminated:\n\n // 1. If connection is failure, then return a network error.\n\n // 2. Set timingInfo’s final connection timing info to the result of\n // calling clamp and coarsen connection timing info with connection’s\n // timing info, timingInfo’s post-redirect start time, and fetchParams’s\n // cross-origin isolated capability.\n\n // 3. If connection is not an HTTP/2 connection, request’s body is non-null,\n // and request’s body’s source is null, then append (`Transfer-Encoding`,\n // `chunked`) to request’s header list.\n\n // 4. Set timingInfo’s final network-request start time to the coarsened\n // shared current time given fetchParams’s cross-origin isolated\n // capability.\n\n // 5. Set response to the result of making an HTTP request over connection\n // using request with the following caveats:\n\n // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS]\n // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH]\n\n // - If request’s body is non-null, and request’s body’s source is null,\n // then the user agent may have a buffer of up to 64 kibibytes and store\n // a part of request’s body in that buffer. If the user agent reads from\n // request’s body beyond that buffer’s size and the user agent needs to\n // resend request, then instead return a network error.\n\n // - Set timingInfo’s final network-response start time to the coarsened\n // shared current time given fetchParams’s cross-origin isolated capability,\n // immediately after the user agent’s HTTP parser receives the first byte\n // of the response (e.g., frame header bytes for HTTP/2 or response status\n // line for HTTP/1.x).\n\n // - Wait until all the headers are transmitted.\n\n // - Any responses whose status is in the range 100 to 199, inclusive,\n // and is not 101, are to be ignored, except for the purposes of setting\n // timingInfo’s final network-response start time above.\n\n // - If request’s header list contains `Transfer-Encoding`/`chunked` and\n // response is transferred via HTTP/1.0 or older, then return a network\n // error.\n\n // - If the HTTP request results in a TLS client certificate dialog, then:\n\n // 1. If request’s window is an environment settings object, make the\n // dialog available in request’s window.\n\n // 2. Otherwise, return a network error.\n\n // To transmit request’s body body, run these steps:\n let requestBody = null\n // 1. If body is null and fetchParams’s process request end-of-body is\n // non-null, then queue a fetch task given fetchParams’s process request\n // end-of-body and fetchParams’s task destination.\n if (request.body == null && fetchParams.processRequestEndOfBody) {\n queueMicrotask(() => fetchParams.processRequestEndOfBody())\n } else if (request.body != null) {\n // 2. Otherwise, if body is non-null:\n\n // 1. Let processBodyChunk given bytes be these steps:\n const processBodyChunk = async function * (bytes) {\n // 1. If the ongoing fetch is terminated, then abort these steps.\n if (isCancelled(fetchParams)) {\n return\n }\n\n // 2. Run this step in parallel: transmit bytes.\n yield bytes\n\n // 3. If fetchParams’s process request body is non-null, then run\n // fetchParams’s process request body given bytes’s length.\n fetchParams.processRequestBodyChunkLength?.(bytes.byteLength)\n }\n\n // 2. Let processEndOfBody be these steps:\n const processEndOfBody = () => {\n // 1. If fetchParams is canceled, then abort these steps.\n if (isCancelled(fetchParams)) {\n return\n }\n\n // 2. If fetchParams’s process request end-of-body is non-null,\n // then run fetchParams’s process request end-of-body.\n if (fetchParams.processRequestEndOfBody) {\n fetchParams.processRequestEndOfBody()\n }\n }\n\n // 3. Let processBodyError given e be these steps:\n const processBodyError = (e) => {\n // 1. If fetchParams is canceled, then abort these steps.\n if (isCancelled(fetchParams)) {\n return\n }\n\n // 2. If e is an \"AbortError\" DOMException, then abort fetchParams’s controller.\n if (e.name === 'AbortError') {\n fetchParams.controller.abort()\n } else {\n fetchParams.controller.terminate(e)\n }\n }\n\n // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody,\n // processBodyError, and fetchParams’s task destination.\n requestBody = (async function * () {\n try {\n for await (const bytes of request.body.stream) {\n yield * processBodyChunk(bytes)\n }\n processEndOfBody()\n } catch (err) {\n processBodyError(err)\n }\n })()\n }\n\n try {\n // socket is only provided for websockets\n const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody })\n\n if (socket) {\n response = makeResponse({ status, statusText, headersList, socket })\n } else {\n const iterator = body[Symbol.asyncIterator]()\n fetchParams.controller.next = () => iterator.next()\n\n response = makeResponse({ status, statusText, headersList })\n }\n } catch (err) {\n // 10. If aborted, then:\n if (err.name === 'AbortError') {\n // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame.\n fetchParams.controller.connection.destroy()\n\n // 2. Return the appropriate network error for fetchParams.\n return makeAppropriateNetworkError(fetchParams, err)\n }\n\n return makeNetworkError(err)\n }\n\n // 11. Let pullAlgorithm be an action that resumes the ongoing fetch\n // if it is suspended.\n const pullAlgorithm = () => {\n fetchParams.controller.resume()\n }\n\n // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s\n // controller with reason, given reason.\n const cancelAlgorithm = (reason) => {\n fetchParams.controller.abort(reason)\n }\n\n // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by\n // the user agent.\n // TODO\n\n // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object\n // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent.\n // TODO\n\n // 15. Let stream be a new ReadableStream.\n // 16. Set up stream with pullAlgorithm set to pullAlgorithm,\n // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to\n // highWaterMark, and sizeAlgorithm set to sizeAlgorithm.\n if (!ReadableStream) {\n ReadableStream = require('stream/web').ReadableStream\n }\n\n const stream = new ReadableStream(\n {\n async start (controller) {\n fetchParams.controller.controller = controller\n },\n async pull (controller) {\n await pullAlgorithm(controller)\n },\n async cancel (reason) {\n await cancelAlgorithm(reason)\n }\n },\n {\n highWaterMark: 0,\n size () {\n return 1\n }\n }\n )\n\n // 17. Run these steps, but abort when the ongoing fetch is terminated:\n\n // 1. Set response’s body to a new body whose stream is stream.\n response.body = { stream }\n\n // 2. If response is not a network error and request’s cache mode is\n // not \"no-store\", then update response in httpCache for request.\n // TODO\n\n // 3. If includeCredentials is true and the user agent is not configured\n // to block cookies for request (see section 7 of [COOKIES]), then run the\n // \"set-cookie-string\" parsing algorithm (see section 5.2 of [COOKIES]) on\n // the value of each header whose name is a byte-case-insensitive match for\n // `Set-Cookie` in response’s header list, if any, and request’s current URL.\n // TODO\n\n // 18. If aborted, then:\n // TODO\n\n // 19. Run these steps in parallel:\n\n // 1. Run these steps, but abort when fetchParams is canceled:\n fetchParams.controller.on('terminated', onAborted)\n fetchParams.controller.resume = async () => {\n // 1. While true\n while (true) {\n // 1-3. See onData...\n\n // 4. Set bytes to the result of handling content codings given\n // codings and bytes.\n let bytes\n let isFailure\n try {\n const { done, value } = await fetchParams.controller.next()\n\n if (isAborted(fetchParams)) {\n break\n }\n\n bytes = done ? undefined : value\n } catch (err) {\n if (fetchParams.controller.ended && !timingInfo.encodedBodySize) {\n // zlib doesn't like empty streams.\n bytes = undefined\n } else {\n bytes = err\n\n // err may be propagated from the result of calling readablestream.cancel,\n // which might not be an error. https://github.com/nodejs/undici/issues/2009\n isFailure = true\n }\n }\n\n if (bytes === undefined) {\n // 2. Otherwise, if the bytes transmission for response’s message\n // body is done normally and stream is readable, then close\n // stream, finalize response for fetchParams and response, and\n // abort these in-parallel steps.\n readableStreamClose(fetchParams.controller.controller)\n\n finalizeResponse(fetchParams, response)\n\n return\n }\n\n // 5. Increase timingInfo’s decoded body size by bytes’s length.\n timingInfo.decodedBodySize += bytes?.byteLength ?? 0\n\n // 6. If bytes is failure, then terminate fetchParams’s controller.\n if (isFailure) {\n fetchParams.controller.terminate(bytes)\n return\n }\n\n // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes\n // into stream.\n fetchParams.controller.controller.enqueue(new Uint8Array(bytes))\n\n // 8. If stream is errored, then terminate the ongoing fetch.\n if (isErrored(stream)) {\n fetchParams.controller.terminate()\n return\n }\n\n // 9. If stream doesn’t need more data ask the user agent to suspend\n // the ongoing fetch.\n if (!fetchParams.controller.controller.desiredSize) {\n return\n }\n }\n }\n\n // 2. If aborted, then:\n function onAborted (reason) {\n // 2. If fetchParams is aborted, then:\n if (isAborted(fetchParams)) {\n // 1. Set response’s aborted flag.\n response.aborted = true\n\n // 2. If stream is readable, then error stream with the result of\n // deserialize a serialized abort reason given fetchParams’s\n // controller’s serialized abort reason and an\n // implementation-defined realm.\n if (isReadable(stream)) {\n fetchParams.controller.controller.error(\n fetchParams.controller.serializedAbortReason\n )\n }\n } else {\n // 3. Otherwise, if stream is readable, error stream with a TypeError.\n if (isReadable(stream)) {\n fetchParams.controller.controller.error(new TypeError('terminated', {\n cause: isErrorLike(reason) ? reason : undefined\n }))\n }\n }\n\n // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame.\n // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so.\n fetchParams.controller.connection.destroy()\n }\n\n // 20. Return response.\n return response\n\n async function dispatch ({ body }) {\n const url = requestCurrentURL(request)\n /** @type {import('../..').Agent} */\n const agent = fetchParams.controller.dispatcher\n\n return new Promise((resolve, reject) => agent.dispatch(\n {\n path: url.pathname + url.search,\n origin: url.origin,\n method: request.method,\n body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body,\n headers: request.headersList.entries,\n maxRedirections: 0,\n upgrade: request.mode === 'websocket' ? 'websocket' : undefined\n },\n {\n body: null,\n abort: null,\n\n onConnect (abort) {\n // TODO (fix): Do we need connection here?\n const { connection } = fetchParams.controller\n\n if (connection.destroyed) {\n abort(new DOMException('The operation was aborted.', 'AbortError'))\n } else {\n fetchParams.controller.on('terminated', abort)\n this.abort = connection.abort = abort\n }\n },\n\n onHeaders (status, headersList, resume, statusText) {\n if (status < 200) {\n return\n }\n\n let codings = []\n let location = ''\n\n const headers = new Headers()\n\n // For H2, the headers are a plain JS object\n // We distinguish between them and iterate accordingly\n if (Array.isArray(headersList)) {\n for (let n = 0; n < headersList.length; n += 2) {\n const key = headersList[n + 0].toString('latin1')\n const val = headersList[n + 1].toString('latin1')\n if (key.toLowerCase() === 'content-encoding') {\n // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1\n // \"All content-coding values are case-insensitive...\"\n codings = val.toLowerCase().split(',').map((x) => x.trim())\n } else if (key.toLowerCase() === 'location') {\n location = val\n }\n\n headers[kHeadersList].append(key, val)\n }\n } else {\n const keys = Object.keys(headersList)\n for (const key of keys) {\n const val = headersList[key]\n if (key.toLowerCase() === 'content-encoding') {\n // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1\n // \"All content-coding values are case-insensitive...\"\n codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse()\n } else if (key.toLowerCase() === 'location') {\n location = val\n }\n\n headers[kHeadersList].append(key, val)\n }\n }\n\n this.body = new Readable({ read: resume })\n\n const decoders = []\n\n const willFollow = request.redirect === 'follow' &&\n location &&\n redirectStatusSet.has(status)\n\n // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding\n if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) {\n for (const coding of codings) {\n // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2\n if (coding === 'x-gzip' || coding === 'gzip') {\n decoders.push(zlib.createGunzip({\n // Be less strict when decoding compressed responses, since sometimes\n // servers send slightly invalid responses that are still accepted\n // by common browsers.\n // Always using Z_SYNC_FLUSH is what cURL does.\n flush: zlib.constants.Z_SYNC_FLUSH,\n finishFlush: zlib.constants.Z_SYNC_FLUSH\n }))\n } else if (coding === 'deflate') {\n decoders.push(zlib.createInflate())\n } else if (coding === 'br') {\n decoders.push(zlib.createBrotliDecompress())\n } else {\n decoders.length = 0\n break\n }\n }\n }\n\n resolve({\n status,\n statusText,\n headersList: headers[kHeadersList],\n body: decoders.length\n ? pipeline(this.body, ...decoders, () => { })\n : this.body.on('error', () => {})\n })\n\n return true\n },\n\n onData (chunk) {\n if (fetchParams.controller.dump) {\n return\n }\n\n // 1. If one or more bytes have been transmitted from response’s\n // message body, then:\n\n // 1. Let bytes be the transmitted bytes.\n const bytes = chunk\n\n // 2. Let codings be the result of extracting header list values\n // given `Content-Encoding` and response’s header list.\n // See pullAlgorithm.\n\n // 3. Increase timingInfo’s encoded body size by bytes’s length.\n timingInfo.encodedBodySize += bytes.byteLength\n\n // 4. See pullAlgorithm...\n\n return this.body.push(bytes)\n },\n\n onComplete () {\n if (this.abort) {\n fetchParams.controller.off('terminated', this.abort)\n }\n\n fetchParams.controller.ended = true\n\n this.body.push(null)\n },\n\n onError (error) {\n if (this.abort) {\n fetchParams.controller.off('terminated', this.abort)\n }\n\n this.body?.destroy(error)\n\n fetchParams.controller.terminate(error)\n\n reject(error)\n },\n\n onUpgrade (status, headersList, socket) {\n if (status !== 101) {\n return\n }\n\n const headers = new Headers()\n\n for (let n = 0; n < headersList.length; n += 2) {\n const key = headersList[n + 0].toString('latin1')\n const val = headersList[n + 1].toString('latin1')\n\n headers[kHeadersList].append(key, val)\n }\n\n resolve({\n status,\n statusText: STATUS_CODES[status],\n headersList: headers[kHeadersList],\n socket\n })\n\n return true\n }\n }\n ))\n }\n}\n\nmodule.exports = {\n fetch,\n Fetch,\n fetching,\n finalizeAndReportTiming\n}\n","/* globals AbortController */\n\n'use strict'\n\nconst { extractBody, mixinBody, cloneBody } = require('./body')\nconst { Headers, fill: fillHeaders, HeadersList } = require('./headers')\nconst { FinalizationRegistry } = require('../compat/dispatcher-weakref')()\nconst util = require('../core/util')\nconst {\n isValidHTTPToken,\n sameOrigin,\n normalizeMethod,\n makePolicyContainer,\n normalizeMethodRecord\n} = require('./util')\nconst {\n forbiddenMethodsSet,\n corsSafeListedMethodsSet,\n referrerPolicy,\n requestRedirect,\n requestMode,\n requestCredentials,\n requestCache,\n requestDuplex\n} = require('./constants')\nconst { kEnumerableProperty } = util\nconst { kHeaders, kSignal, kState, kGuard, kRealm } = require('./symbols')\nconst { webidl } = require('./webidl')\nconst { getGlobalOrigin } = require('./global')\nconst { URLSerializer } = require('./dataURL')\nconst { kHeadersList, kConstruct } = require('../core/symbols')\nconst assert = require('assert')\nconst { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = require('events')\n\nlet TransformStream = globalThis.TransformStream\n\nconst kAbortController = Symbol('abortController')\n\nconst requestFinalizer = new FinalizationRegistry(({ signal, abort }) => {\n signal.removeEventListener('abort', abort)\n})\n\n// https://fetch.spec.whatwg.org/#request-class\nclass Request {\n // https://fetch.spec.whatwg.org/#dom-request\n constructor (input, init = {}) {\n if (input === kConstruct) {\n return\n }\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' })\n\n input = webidl.converters.RequestInfo(input)\n init = webidl.converters.RequestInit(init)\n\n // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object\n this[kRealm] = {\n settingsObject: {\n baseUrl: getGlobalOrigin(),\n get origin () {\n return this.baseUrl?.origin\n },\n policyContainer: makePolicyContainer()\n }\n }\n\n // 1. Let request be null.\n let request = null\n\n // 2. Let fallbackMode be null.\n let fallbackMode = null\n\n // 3. Let baseURL be this’s relevant settings object’s API base URL.\n const baseUrl = this[kRealm].settingsObject.baseUrl\n\n // 4. Let signal be null.\n let signal = null\n\n // 5. If input is a string, then:\n if (typeof input === 'string') {\n // 1. Let parsedURL be the result of parsing input with baseURL.\n // 2. If parsedURL is failure, then throw a TypeError.\n let parsedURL\n try {\n parsedURL = new URL(input, baseUrl)\n } catch (err) {\n throw new TypeError('Failed to parse URL from ' + input, { cause: err })\n }\n\n // 3. If parsedURL includes credentials, then throw a TypeError.\n if (parsedURL.username || parsedURL.password) {\n throw new TypeError(\n 'Request cannot be constructed from a URL that includes credentials: ' +\n input\n )\n }\n\n // 4. Set request to a new request whose URL is parsedURL.\n request = makeRequest({ urlList: [parsedURL] })\n\n // 5. Set fallbackMode to \"cors\".\n fallbackMode = 'cors'\n } else {\n // 6. Otherwise:\n\n // 7. Assert: input is a Request object.\n assert(input instanceof Request)\n\n // 8. Set request to input’s request.\n request = input[kState]\n\n // 9. Set signal to input’s signal.\n signal = input[kSignal]\n }\n\n // 7. Let origin be this’s relevant settings object’s origin.\n const origin = this[kRealm].settingsObject.origin\n\n // 8. Let window be \"client\".\n let window = 'client'\n\n // 9. If request’s window is an environment settings object and its origin\n // is same origin with origin, then set window to request’s window.\n if (\n request.window?.constructor?.name === 'EnvironmentSettingsObject' &&\n sameOrigin(request.window, origin)\n ) {\n window = request.window\n }\n\n // 10. If init[\"window\"] exists and is non-null, then throw a TypeError.\n if (init.window != null) {\n throw new TypeError(`'window' option '${window}' must be null`)\n }\n\n // 11. If init[\"window\"] exists, then set window to \"no-window\".\n if ('window' in init) {\n window = 'no-window'\n }\n\n // 12. Set request to a new request with the following properties:\n request = makeRequest({\n // URL request’s URL.\n // undici implementation note: this is set as the first item in request's urlList in makeRequest\n // method request’s method.\n method: request.method,\n // header list A copy of request’s header list.\n // undici implementation note: headersList is cloned in makeRequest\n headersList: request.headersList,\n // unsafe-request flag Set.\n unsafeRequest: request.unsafeRequest,\n // client This’s relevant settings object.\n client: this[kRealm].settingsObject,\n // window window.\n window,\n // priority request’s priority.\n priority: request.priority,\n // origin request’s origin. The propagation of the origin is only significant for navigation requests\n // being handled by a service worker. In this scenario a request can have an origin that is different\n // from the current client.\n origin: request.origin,\n // referrer request’s referrer.\n referrer: request.referrer,\n // referrer policy request’s referrer policy.\n referrerPolicy: request.referrerPolicy,\n // mode request’s mode.\n mode: request.mode,\n // credentials mode request’s credentials mode.\n credentials: request.credentials,\n // cache mode request’s cache mode.\n cache: request.cache,\n // redirect mode request’s redirect mode.\n redirect: request.redirect,\n // integrity metadata request’s integrity metadata.\n integrity: request.integrity,\n // keepalive request’s keepalive.\n keepalive: request.keepalive,\n // reload-navigation flag request’s reload-navigation flag.\n reloadNavigation: request.reloadNavigation,\n // history-navigation flag request’s history-navigation flag.\n historyNavigation: request.historyNavigation,\n // URL list A clone of request’s URL list.\n urlList: [...request.urlList]\n })\n\n const initHasKey = Object.keys(init).length !== 0\n\n // 13. If init is not empty, then:\n if (initHasKey) {\n // 1. If request’s mode is \"navigate\", then set it to \"same-origin\".\n if (request.mode === 'navigate') {\n request.mode = 'same-origin'\n }\n\n // 2. Unset request’s reload-navigation flag.\n request.reloadNavigation = false\n\n // 3. Unset request’s history-navigation flag.\n request.historyNavigation = false\n\n // 4. Set request’s origin to \"client\".\n request.origin = 'client'\n\n // 5. Set request’s referrer to \"client\"\n request.referrer = 'client'\n\n // 6. Set request’s referrer policy to the empty string.\n request.referrerPolicy = ''\n\n // 7. Set request’s URL to request’s current URL.\n request.url = request.urlList[request.urlList.length - 1]\n\n // 8. Set request’s URL list to « request’s URL ».\n request.urlList = [request.url]\n }\n\n // 14. If init[\"referrer\"] exists, then:\n if (init.referrer !== undefined) {\n // 1. Let referrer be init[\"referrer\"].\n const referrer = init.referrer\n\n // 2. If referrer is the empty string, then set request’s referrer to \"no-referrer\".\n if (referrer === '') {\n request.referrer = 'no-referrer'\n } else {\n // 1. Let parsedReferrer be the result of parsing referrer with\n // baseURL.\n // 2. If parsedReferrer is failure, then throw a TypeError.\n let parsedReferrer\n try {\n parsedReferrer = new URL(referrer, baseUrl)\n } catch (err) {\n throw new TypeError(`Referrer \"${referrer}\" is not a valid URL.`, { cause: err })\n }\n\n // 3. If one of the following is true\n // - parsedReferrer’s scheme is \"about\" and path is the string \"client\"\n // - parsedReferrer’s origin is not same origin with origin\n // then set request’s referrer to \"client\".\n if (\n (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') ||\n (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl))\n ) {\n request.referrer = 'client'\n } else {\n // 4. Otherwise, set request’s referrer to parsedReferrer.\n request.referrer = parsedReferrer\n }\n }\n }\n\n // 15. If init[\"referrerPolicy\"] exists, then set request’s referrer policy\n // to it.\n if (init.referrerPolicy !== undefined) {\n request.referrerPolicy = init.referrerPolicy\n }\n\n // 16. Let mode be init[\"mode\"] if it exists, and fallbackMode otherwise.\n let mode\n if (init.mode !== undefined) {\n mode = init.mode\n } else {\n mode = fallbackMode\n }\n\n // 17. If mode is \"navigate\", then throw a TypeError.\n if (mode === 'navigate') {\n throw webidl.errors.exception({\n header: 'Request constructor',\n message: 'invalid request mode navigate.'\n })\n }\n\n // 18. If mode is non-null, set request’s mode to mode.\n if (mode != null) {\n request.mode = mode\n }\n\n // 19. If init[\"credentials\"] exists, then set request’s credentials mode\n // to it.\n if (init.credentials !== undefined) {\n request.credentials = init.credentials\n }\n\n // 18. If init[\"cache\"] exists, then set request’s cache mode to it.\n if (init.cache !== undefined) {\n request.cache = init.cache\n }\n\n // 21. If request’s cache mode is \"only-if-cached\" and request’s mode is\n // not \"same-origin\", then throw a TypeError.\n if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') {\n throw new TypeError(\n \"'only-if-cached' can be set only with 'same-origin' mode\"\n )\n }\n\n // 22. If init[\"redirect\"] exists, then set request’s redirect mode to it.\n if (init.redirect !== undefined) {\n request.redirect = init.redirect\n }\n\n // 23. If init[\"integrity\"] exists, then set request’s integrity metadata to it.\n if (init.integrity != null) {\n request.integrity = String(init.integrity)\n }\n\n // 24. If init[\"keepalive\"] exists, then set request’s keepalive to it.\n if (init.keepalive !== undefined) {\n request.keepalive = Boolean(init.keepalive)\n }\n\n // 25. If init[\"method\"] exists, then:\n if (init.method !== undefined) {\n // 1. Let method be init[\"method\"].\n let method = init.method\n\n // 2. If method is not a method or method is a forbidden method, then\n // throw a TypeError.\n if (!isValidHTTPToken(method)) {\n throw new TypeError(`'${method}' is not a valid HTTP method.`)\n }\n\n if (forbiddenMethodsSet.has(method.toUpperCase())) {\n throw new TypeError(`'${method}' HTTP method is unsupported.`)\n }\n\n // 3. Normalize method.\n method = normalizeMethodRecord[method] ?? normalizeMethod(method)\n\n // 4. Set request’s method to method.\n request.method = method\n }\n\n // 26. If init[\"signal\"] exists, then set signal to it.\n if (init.signal !== undefined) {\n signal = init.signal\n }\n\n // 27. Set this’s request to request.\n this[kState] = request\n\n // 28. Set this’s signal to a new AbortSignal object with this’s relevant\n // Realm.\n // TODO: could this be simplified with AbortSignal.any\n // (https://dom.spec.whatwg.org/#dom-abortsignal-any)\n const ac = new AbortController()\n this[kSignal] = ac.signal\n this[kSignal][kRealm] = this[kRealm]\n\n // 29. If signal is not null, then make this’s signal follow signal.\n if (signal != null) {\n if (\n !signal ||\n typeof signal.aborted !== 'boolean' ||\n typeof signal.addEventListener !== 'function'\n ) {\n throw new TypeError(\n \"Failed to construct 'Request': member signal is not of type AbortSignal.\"\n )\n }\n\n if (signal.aborted) {\n ac.abort(signal.reason)\n } else {\n // Keep a strong ref to ac while request object\n // is alive. This is needed to prevent AbortController\n // from being prematurely garbage collected.\n // See, https://github.com/nodejs/undici/issues/1926.\n this[kAbortController] = ac\n\n const acRef = new WeakRef(ac)\n const abort = function () {\n const ac = acRef.deref()\n if (ac !== undefined) {\n ac.abort(this.reason)\n }\n }\n\n // Third-party AbortControllers may not work with these.\n // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619.\n try {\n // If the max amount of listeners is equal to the default, increase it\n // This is only available in node >= v19.9.0\n if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) {\n setMaxListeners(100, signal)\n } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) {\n setMaxListeners(100, signal)\n }\n } catch {}\n\n util.addAbortListener(signal, abort)\n requestFinalizer.register(ac, { signal, abort })\n }\n }\n\n // 30. Set this’s headers to a new Headers object with this’s relevant\n // Realm, whose header list is request’s header list and guard is\n // \"request\".\n this[kHeaders] = new Headers(kConstruct)\n this[kHeaders][kHeadersList] = request.headersList\n this[kHeaders][kGuard] = 'request'\n this[kHeaders][kRealm] = this[kRealm]\n\n // 31. If this’s request’s mode is \"no-cors\", then:\n if (mode === 'no-cors') {\n // 1. If this’s request’s method is not a CORS-safelisted method,\n // then throw a TypeError.\n if (!corsSafeListedMethodsSet.has(request.method)) {\n throw new TypeError(\n `'${request.method} is unsupported in no-cors mode.`\n )\n }\n\n // 2. Set this’s headers’s guard to \"request-no-cors\".\n this[kHeaders][kGuard] = 'request-no-cors'\n }\n\n // 32. If init is not empty, then:\n if (initHasKey) {\n /** @type {HeadersList} */\n const headersList = this[kHeaders][kHeadersList]\n // 1. Let headers be a copy of this’s headers and its associated header\n // list.\n // 2. If init[\"headers\"] exists, then set headers to init[\"headers\"].\n const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList)\n\n // 3. Empty this’s headers’s header list.\n headersList.clear()\n\n // 4. If headers is a Headers object, then for each header in its header\n // list, append header’s name/header’s value to this’s headers.\n if (headers instanceof HeadersList) {\n for (const [key, val] of headers) {\n headersList.append(key, val)\n }\n // Note: Copy the `set-cookie` meta-data.\n headersList.cookies = headers.cookies\n } else {\n // 5. Otherwise, fill this’s headers with headers.\n fillHeaders(this[kHeaders], headers)\n }\n }\n\n // 33. Let inputBody be input’s request’s body if input is a Request\n // object; otherwise null.\n const inputBody = input instanceof Request ? input[kState].body : null\n\n // 34. If either init[\"body\"] exists and is non-null or inputBody is\n // non-null, and request’s method is `GET` or `HEAD`, then throw a\n // TypeError.\n if (\n (init.body != null || inputBody != null) &&\n (request.method === 'GET' || request.method === 'HEAD')\n ) {\n throw new TypeError('Request with GET/HEAD method cannot have body.')\n }\n\n // 35. Let initBody be null.\n let initBody = null\n\n // 36. If init[\"body\"] exists and is non-null, then:\n if (init.body != null) {\n // 1. Let Content-Type be null.\n // 2. Set initBody and Content-Type to the result of extracting\n // init[\"body\"], with keepalive set to request’s keepalive.\n const [extractedBody, contentType] = extractBody(\n init.body,\n request.keepalive\n )\n initBody = extractedBody\n\n // 3, If Content-Type is non-null and this’s headers’s header list does\n // not contain `Content-Type`, then append `Content-Type`/Content-Type to\n // this’s headers.\n if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) {\n this[kHeaders].append('content-type', contentType)\n }\n }\n\n // 37. Let inputOrInitBody be initBody if it is non-null; otherwise\n // inputBody.\n const inputOrInitBody = initBody ?? inputBody\n\n // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is\n // null, then:\n if (inputOrInitBody != null && inputOrInitBody.source == null) {\n // 1. If initBody is non-null and init[\"duplex\"] does not exist,\n // then throw a TypeError.\n if (initBody != null && init.duplex == null) {\n throw new TypeError('RequestInit: duplex option is required when sending a body.')\n }\n\n // 2. If this’s request’s mode is neither \"same-origin\" nor \"cors\",\n // then throw a TypeError.\n if (request.mode !== 'same-origin' && request.mode !== 'cors') {\n throw new TypeError(\n 'If request is made from ReadableStream, mode should be \"same-origin\" or \"cors\"'\n )\n }\n\n // 3. Set this’s request’s use-CORS-preflight flag.\n request.useCORSPreflightFlag = true\n }\n\n // 39. Let finalBody be inputOrInitBody.\n let finalBody = inputOrInitBody\n\n // 40. If initBody is null and inputBody is non-null, then:\n if (initBody == null && inputBody != null) {\n // 1. If input is unusable, then throw a TypeError.\n if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) {\n throw new TypeError(\n 'Cannot construct a Request with a Request object that has already been used.'\n )\n }\n\n // 2. Set finalBody to the result of creating a proxy for inputBody.\n if (!TransformStream) {\n TransformStream = require('stream/web').TransformStream\n }\n\n // https://streams.spec.whatwg.org/#readablestream-create-a-proxy\n const identityTransform = new TransformStream()\n inputBody.stream.pipeThrough(identityTransform)\n finalBody = {\n source: inputBody.source,\n length: inputBody.length,\n stream: identityTransform.readable\n }\n }\n\n // 41. Set this’s request’s body to finalBody.\n this[kState].body = finalBody\n }\n\n // Returns request’s HTTP method, which is \"GET\" by default.\n get method () {\n webidl.brandCheck(this, Request)\n\n // The method getter steps are to return this’s request’s method.\n return this[kState].method\n }\n\n // Returns the URL of request as a string.\n get url () {\n webidl.brandCheck(this, Request)\n\n // The url getter steps are to return this’s request’s URL, serialized.\n return URLSerializer(this[kState].url)\n }\n\n // Returns a Headers object consisting of the headers associated with request.\n // Note that headers added in the network layer by the user agent will not\n // be accounted for in this object, e.g., the \"Host\" header.\n get headers () {\n webidl.brandCheck(this, Request)\n\n // The headers getter steps are to return this’s headers.\n return this[kHeaders]\n }\n\n // Returns the kind of resource requested by request, e.g., \"document\"\n // or \"script\".\n get destination () {\n webidl.brandCheck(this, Request)\n\n // The destination getter are to return this’s request’s destination.\n return this[kState].destination\n }\n\n // Returns the referrer of request. Its value can be a same-origin URL if\n // explicitly set in init, the empty string to indicate no referrer, and\n // \"about:client\" when defaulting to the global’s default. This is used\n // during fetching to determine the value of the `Referer` header of the\n // request being made.\n get referrer () {\n webidl.brandCheck(this, Request)\n\n // 1. If this’s request’s referrer is \"no-referrer\", then return the\n // empty string.\n if (this[kState].referrer === 'no-referrer') {\n return ''\n }\n\n // 2. If this’s request’s referrer is \"client\", then return\n // \"about:client\".\n if (this[kState].referrer === 'client') {\n return 'about:client'\n }\n\n // Return this’s request’s referrer, serialized.\n return this[kState].referrer.toString()\n }\n\n // Returns the referrer policy associated with request.\n // This is used during fetching to compute the value of the request’s\n // referrer.\n get referrerPolicy () {\n webidl.brandCheck(this, Request)\n\n // The referrerPolicy getter steps are to return this’s request’s referrer policy.\n return this[kState].referrerPolicy\n }\n\n // Returns the mode associated with request, which is a string indicating\n // whether the request will use CORS, or will be restricted to same-origin\n // URLs.\n get mode () {\n webidl.brandCheck(this, Request)\n\n // The mode getter steps are to return this’s request’s mode.\n return this[kState].mode\n }\n\n // Returns the credentials mode associated with request,\n // which is a string indicating whether credentials will be sent with the\n // request always, never, or only when sent to a same-origin URL.\n get credentials () {\n // The credentials getter steps are to return this’s request’s credentials mode.\n return this[kState].credentials\n }\n\n // Returns the cache mode associated with request,\n // which is a string indicating how the request will\n // interact with the browser’s cache when fetching.\n get cache () {\n webidl.brandCheck(this, Request)\n\n // The cache getter steps are to return this’s request’s cache mode.\n return this[kState].cache\n }\n\n // Returns the redirect mode associated with request,\n // which is a string indicating how redirects for the\n // request will be handled during fetching. A request\n // will follow redirects by default.\n get redirect () {\n webidl.brandCheck(this, Request)\n\n // The redirect getter steps are to return this’s request’s redirect mode.\n return this[kState].redirect\n }\n\n // Returns request’s subresource integrity metadata, which is a\n // cryptographic hash of the resource being fetched. Its value\n // consists of multiple hashes separated by whitespace. [SRI]\n get integrity () {\n webidl.brandCheck(this, Request)\n\n // The integrity getter steps are to return this’s request’s integrity\n // metadata.\n return this[kState].integrity\n }\n\n // Returns a boolean indicating whether or not request can outlive the\n // global in which it was created.\n get keepalive () {\n webidl.brandCheck(this, Request)\n\n // The keepalive getter steps are to return this’s request’s keepalive.\n return this[kState].keepalive\n }\n\n // Returns a boolean indicating whether or not request is for a reload\n // navigation.\n get isReloadNavigation () {\n webidl.brandCheck(this, Request)\n\n // The isReloadNavigation getter steps are to return true if this’s\n // request’s reload-navigation flag is set; otherwise false.\n return this[kState].reloadNavigation\n }\n\n // Returns a boolean indicating whether or not request is for a history\n // navigation (a.k.a. back-foward navigation).\n get isHistoryNavigation () {\n webidl.brandCheck(this, Request)\n\n // The isHistoryNavigation getter steps are to return true if this’s request’s\n // history-navigation flag is set; otherwise false.\n return this[kState].historyNavigation\n }\n\n // Returns the signal associated with request, which is an AbortSignal\n // object indicating whether or not request has been aborted, and its\n // abort event handler.\n get signal () {\n webidl.brandCheck(this, Request)\n\n // The signal getter steps are to return this’s signal.\n return this[kSignal]\n }\n\n get body () {\n webidl.brandCheck(this, Request)\n\n return this[kState].body ? this[kState].body.stream : null\n }\n\n get bodyUsed () {\n webidl.brandCheck(this, Request)\n\n return !!this[kState].body && util.isDisturbed(this[kState].body.stream)\n }\n\n get duplex () {\n webidl.brandCheck(this, Request)\n\n return 'half'\n }\n\n // Returns a clone of request.\n clone () {\n webidl.brandCheck(this, Request)\n\n // 1. If this is unusable, then throw a TypeError.\n if (this.bodyUsed || this.body?.locked) {\n throw new TypeError('unusable')\n }\n\n // 2. Let clonedRequest be the result of cloning this’s request.\n const clonedRequest = cloneRequest(this[kState])\n\n // 3. Let clonedRequestObject be the result of creating a Request object,\n // given clonedRequest, this’s headers’s guard, and this’s relevant Realm.\n const clonedRequestObject = new Request(kConstruct)\n clonedRequestObject[kState] = clonedRequest\n clonedRequestObject[kRealm] = this[kRealm]\n clonedRequestObject[kHeaders] = new Headers(kConstruct)\n clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList\n clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard]\n clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm]\n\n // 4. Make clonedRequestObject’s signal follow this’s signal.\n const ac = new AbortController()\n if (this.signal.aborted) {\n ac.abort(this.signal.reason)\n } else {\n util.addAbortListener(\n this.signal,\n () => {\n ac.abort(this.signal.reason)\n }\n )\n }\n clonedRequestObject[kSignal] = ac.signal\n\n // 4. Return clonedRequestObject.\n return clonedRequestObject\n }\n}\n\nmixinBody(Request)\n\nfunction makeRequest (init) {\n // https://fetch.spec.whatwg.org/#requests\n const request = {\n method: 'GET',\n localURLsOnly: false,\n unsafeRequest: false,\n body: null,\n client: null,\n reservedClient: null,\n replacesClientId: '',\n window: 'client',\n keepalive: false,\n serviceWorkers: 'all',\n initiator: '',\n destination: '',\n priority: null,\n origin: 'client',\n policyContainer: 'client',\n referrer: 'client',\n referrerPolicy: '',\n mode: 'no-cors',\n useCORSPreflightFlag: false,\n credentials: 'same-origin',\n useCredentials: false,\n cache: 'default',\n redirect: 'follow',\n integrity: '',\n cryptoGraphicsNonceMetadata: '',\n parserMetadata: '',\n reloadNavigation: false,\n historyNavigation: false,\n userActivation: false,\n taintedOrigin: false,\n redirectCount: 0,\n responseTainting: 'basic',\n preventNoCacheCacheControlHeaderModification: false,\n done: false,\n timingAllowFailed: false,\n ...init,\n headersList: init.headersList\n ? new HeadersList(init.headersList)\n : new HeadersList()\n }\n request.url = request.urlList[0]\n return request\n}\n\n// https://fetch.spec.whatwg.org/#concept-request-clone\nfunction cloneRequest (request) {\n // To clone a request request, run these steps:\n\n // 1. Let newRequest be a copy of request, except for its body.\n const newRequest = makeRequest({ ...request, body: null })\n\n // 2. If request’s body is non-null, set newRequest’s body to the\n // result of cloning request’s body.\n if (request.body != null) {\n newRequest.body = cloneBody(request.body)\n }\n\n // 3. Return newRequest.\n return newRequest\n}\n\nObject.defineProperties(Request.prototype, {\n method: kEnumerableProperty,\n url: kEnumerableProperty,\n headers: kEnumerableProperty,\n redirect: kEnumerableProperty,\n clone: kEnumerableProperty,\n signal: kEnumerableProperty,\n duplex: kEnumerableProperty,\n destination: kEnumerableProperty,\n body: kEnumerableProperty,\n bodyUsed: kEnumerableProperty,\n isHistoryNavigation: kEnumerableProperty,\n isReloadNavigation: kEnumerableProperty,\n keepalive: kEnumerableProperty,\n integrity: kEnumerableProperty,\n cache: kEnumerableProperty,\n credentials: kEnumerableProperty,\n attribute: kEnumerableProperty,\n referrerPolicy: kEnumerableProperty,\n referrer: kEnumerableProperty,\n mode: kEnumerableProperty,\n [Symbol.toStringTag]: {\n value: 'Request',\n configurable: true\n }\n})\n\nwebidl.converters.Request = webidl.interfaceConverter(\n Request\n)\n\n// https://fetch.spec.whatwg.org/#requestinfo\nwebidl.converters.RequestInfo = function (V) {\n if (typeof V === 'string') {\n return webidl.converters.USVString(V)\n }\n\n if (V instanceof Request) {\n return webidl.converters.Request(V)\n }\n\n return webidl.converters.USVString(V)\n}\n\nwebidl.converters.AbortSignal = webidl.interfaceConverter(\n AbortSignal\n)\n\n// https://fetch.spec.whatwg.org/#requestinit\nwebidl.converters.RequestInit = webidl.dictionaryConverter([\n {\n key: 'method',\n converter: webidl.converters.ByteString\n },\n {\n key: 'headers',\n converter: webidl.converters.HeadersInit\n },\n {\n key: 'body',\n converter: webidl.nullableConverter(\n webidl.converters.BodyInit\n )\n },\n {\n key: 'referrer',\n converter: webidl.converters.USVString\n },\n {\n key: 'referrerPolicy',\n converter: webidl.converters.DOMString,\n // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy\n allowedValues: referrerPolicy\n },\n {\n key: 'mode',\n converter: webidl.converters.DOMString,\n // https://fetch.spec.whatwg.org/#concept-request-mode\n allowedValues: requestMode\n },\n {\n key: 'credentials',\n converter: webidl.converters.DOMString,\n // https://fetch.spec.whatwg.org/#requestcredentials\n allowedValues: requestCredentials\n },\n {\n key: 'cache',\n converter: webidl.converters.DOMString,\n // https://fetch.spec.whatwg.org/#requestcache\n allowedValues: requestCache\n },\n {\n key: 'redirect',\n converter: webidl.converters.DOMString,\n // https://fetch.spec.whatwg.org/#requestredirect\n allowedValues: requestRedirect\n },\n {\n key: 'integrity',\n converter: webidl.converters.DOMString\n },\n {\n key: 'keepalive',\n converter: webidl.converters.boolean\n },\n {\n key: 'signal',\n converter: webidl.nullableConverter(\n (signal) => webidl.converters.AbortSignal(\n signal,\n { strict: false }\n )\n )\n },\n {\n key: 'window',\n converter: webidl.converters.any\n },\n {\n key: 'duplex',\n converter: webidl.converters.DOMString,\n allowedValues: requestDuplex\n }\n])\n\nmodule.exports = { Request, makeRequest }\n","'use strict'\n\nconst { Headers, HeadersList, fill } = require('./headers')\nconst { extractBody, cloneBody, mixinBody } = require('./body')\nconst util = require('../core/util')\nconst { kEnumerableProperty } = util\nconst {\n isValidReasonPhrase,\n isCancelled,\n isAborted,\n isBlobLike,\n serializeJavascriptValueToJSONString,\n isErrorLike,\n isomorphicEncode\n} = require('./util')\nconst {\n redirectStatusSet,\n nullBodyStatus,\n DOMException\n} = require('./constants')\nconst { kState, kHeaders, kGuard, kRealm } = require('./symbols')\nconst { webidl } = require('./webidl')\nconst { FormData } = require('./formdata')\nconst { getGlobalOrigin } = require('./global')\nconst { URLSerializer } = require('./dataURL')\nconst { kHeadersList, kConstruct } = require('../core/symbols')\nconst assert = require('assert')\nconst { types } = require('util')\n\nconst ReadableStream = globalThis.ReadableStream || require('stream/web').ReadableStream\nconst textEncoder = new TextEncoder('utf-8')\n\n// https://fetch.spec.whatwg.org/#response-class\nclass Response {\n // Creates network error Response.\n static error () {\n // TODO\n const relevantRealm = { settingsObject: {} }\n\n // The static error() method steps are to return the result of creating a\n // Response object, given a new network error, \"immutable\", and this’s\n // relevant Realm.\n const responseObject = new Response()\n responseObject[kState] = makeNetworkError()\n responseObject[kRealm] = relevantRealm\n responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList\n responseObject[kHeaders][kGuard] = 'immutable'\n responseObject[kHeaders][kRealm] = relevantRealm\n return responseObject\n }\n\n // https://fetch.spec.whatwg.org/#dom-response-json\n static json (data, init = {}) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' })\n\n if (init !== null) {\n init = webidl.converters.ResponseInit(init)\n }\n\n // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data.\n const bytes = textEncoder.encode(\n serializeJavascriptValueToJSONString(data)\n )\n\n // 2. Let body be the result of extracting bytes.\n const body = extractBody(bytes)\n\n // 3. Let responseObject be the result of creating a Response object, given a new response,\n // \"response\", and this’s relevant Realm.\n const relevantRealm = { settingsObject: {} }\n const responseObject = new Response()\n responseObject[kRealm] = relevantRealm\n responseObject[kHeaders][kGuard] = 'response'\n responseObject[kHeaders][kRealm] = relevantRealm\n\n // 4. Perform initialize a response given responseObject, init, and (body, \"application/json\").\n initializeResponse(responseObject, init, { body: body[0], type: 'application/json' })\n\n // 5. Return responseObject.\n return responseObject\n }\n\n // Creates a redirect Response that redirects to url with status status.\n static redirect (url, status = 302) {\n const relevantRealm = { settingsObject: {} }\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' })\n\n url = webidl.converters.USVString(url)\n status = webidl.converters['unsigned short'](status)\n\n // 1. Let parsedURL be the result of parsing url with current settings\n // object’s API base URL.\n // 2. If parsedURL is failure, then throw a TypeError.\n // TODO: base-URL?\n let parsedURL\n try {\n parsedURL = new URL(url, getGlobalOrigin())\n } catch (err) {\n throw Object.assign(new TypeError('Failed to parse URL from ' + url), {\n cause: err\n })\n }\n\n // 3. If status is not a redirect status, then throw a RangeError.\n if (!redirectStatusSet.has(status)) {\n throw new RangeError('Invalid status code ' + status)\n }\n\n // 4. Let responseObject be the result of creating a Response object,\n // given a new response, \"immutable\", and this’s relevant Realm.\n const responseObject = new Response()\n responseObject[kRealm] = relevantRealm\n responseObject[kHeaders][kGuard] = 'immutable'\n responseObject[kHeaders][kRealm] = relevantRealm\n\n // 5. Set responseObject’s response’s status to status.\n responseObject[kState].status = status\n\n // 6. Let value be parsedURL, serialized and isomorphic encoded.\n const value = isomorphicEncode(URLSerializer(parsedURL))\n\n // 7. Append `Location`/value to responseObject’s response’s header list.\n responseObject[kState].headersList.append('location', value)\n\n // 8. Return responseObject.\n return responseObject\n }\n\n // https://fetch.spec.whatwg.org/#dom-response\n constructor (body = null, init = {}) {\n if (body !== null) {\n body = webidl.converters.BodyInit(body)\n }\n\n init = webidl.converters.ResponseInit(init)\n\n // TODO\n this[kRealm] = { settingsObject: {} }\n\n // 1. Set this’s response to a new response.\n this[kState] = makeResponse({})\n\n // 2. Set this’s headers to a new Headers object with this’s relevant\n // Realm, whose header list is this’s response’s header list and guard\n // is \"response\".\n this[kHeaders] = new Headers(kConstruct)\n this[kHeaders][kGuard] = 'response'\n this[kHeaders][kHeadersList] = this[kState].headersList\n this[kHeaders][kRealm] = this[kRealm]\n\n // 3. Let bodyWithType be null.\n let bodyWithType = null\n\n // 4. If body is non-null, then set bodyWithType to the result of extracting body.\n if (body != null) {\n const [extractedBody, type] = extractBody(body)\n bodyWithType = { body: extractedBody, type }\n }\n\n // 5. Perform initialize a response given this, init, and bodyWithType.\n initializeResponse(this, init, bodyWithType)\n }\n\n // Returns response’s type, e.g., \"cors\".\n get type () {\n webidl.brandCheck(this, Response)\n\n // The type getter steps are to return this’s response’s type.\n return this[kState].type\n }\n\n // Returns response’s URL, if it has one; otherwise the empty string.\n get url () {\n webidl.brandCheck(this, Response)\n\n const urlList = this[kState].urlList\n\n // The url getter steps are to return the empty string if this’s\n // response’s URL is null; otherwise this’s response’s URL,\n // serialized with exclude fragment set to true.\n const url = urlList[urlList.length - 1] ?? null\n\n if (url === null) {\n return ''\n }\n\n return URLSerializer(url, true)\n }\n\n // Returns whether response was obtained through a redirect.\n get redirected () {\n webidl.brandCheck(this, Response)\n\n // The redirected getter steps are to return true if this’s response’s URL\n // list has more than one item; otherwise false.\n return this[kState].urlList.length > 1\n }\n\n // Returns response’s status.\n get status () {\n webidl.brandCheck(this, Response)\n\n // The status getter steps are to return this’s response’s status.\n return this[kState].status\n }\n\n // Returns whether response’s status is an ok status.\n get ok () {\n webidl.brandCheck(this, Response)\n\n // The ok getter steps are to return true if this’s response’s status is an\n // ok status; otherwise false.\n return this[kState].status >= 200 && this[kState].status <= 299\n }\n\n // Returns response’s status message.\n get statusText () {\n webidl.brandCheck(this, Response)\n\n // The statusText getter steps are to return this’s response’s status\n // message.\n return this[kState].statusText\n }\n\n // Returns response’s headers as Headers.\n get headers () {\n webidl.brandCheck(this, Response)\n\n // The headers getter steps are to return this’s headers.\n return this[kHeaders]\n }\n\n get body () {\n webidl.brandCheck(this, Response)\n\n return this[kState].body ? this[kState].body.stream : null\n }\n\n get bodyUsed () {\n webidl.brandCheck(this, Response)\n\n return !!this[kState].body && util.isDisturbed(this[kState].body.stream)\n }\n\n // Returns a clone of response.\n clone () {\n webidl.brandCheck(this, Response)\n\n // 1. If this is unusable, then throw a TypeError.\n if (this.bodyUsed || (this.body && this.body.locked)) {\n throw webidl.errors.exception({\n header: 'Response.clone',\n message: 'Body has already been consumed.'\n })\n }\n\n // 2. Let clonedResponse be the result of cloning this’s response.\n const clonedResponse = cloneResponse(this[kState])\n\n // 3. Return the result of creating a Response object, given\n // clonedResponse, this’s headers’s guard, and this’s relevant Realm.\n const clonedResponseObject = new Response()\n clonedResponseObject[kState] = clonedResponse\n clonedResponseObject[kRealm] = this[kRealm]\n clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList\n clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard]\n clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm]\n\n return clonedResponseObject\n }\n}\n\nmixinBody(Response)\n\nObject.defineProperties(Response.prototype, {\n type: kEnumerableProperty,\n url: kEnumerableProperty,\n status: kEnumerableProperty,\n ok: kEnumerableProperty,\n redirected: kEnumerableProperty,\n statusText: kEnumerableProperty,\n headers: kEnumerableProperty,\n clone: kEnumerableProperty,\n body: kEnumerableProperty,\n bodyUsed: kEnumerableProperty,\n [Symbol.toStringTag]: {\n value: 'Response',\n configurable: true\n }\n})\n\nObject.defineProperties(Response, {\n json: kEnumerableProperty,\n redirect: kEnumerableProperty,\n error: kEnumerableProperty\n})\n\n// https://fetch.spec.whatwg.org/#concept-response-clone\nfunction cloneResponse (response) {\n // To clone a response response, run these steps:\n\n // 1. If response is a filtered response, then return a new identical\n // filtered response whose internal response is a clone of response’s\n // internal response.\n if (response.internalResponse) {\n return filterResponse(\n cloneResponse(response.internalResponse),\n response.type\n )\n }\n\n // 2. Let newResponse be a copy of response, except for its body.\n const newResponse = makeResponse({ ...response, body: null })\n\n // 3. If response’s body is non-null, then set newResponse’s body to the\n // result of cloning response’s body.\n if (response.body != null) {\n newResponse.body = cloneBody(response.body)\n }\n\n // 4. Return newResponse.\n return newResponse\n}\n\nfunction makeResponse (init) {\n return {\n aborted: false,\n rangeRequested: false,\n timingAllowPassed: false,\n requestIncludesCredentials: false,\n type: 'default',\n status: 200,\n timingInfo: null,\n cacheState: '',\n statusText: '',\n ...init,\n headersList: init.headersList\n ? new HeadersList(init.headersList)\n : new HeadersList(),\n urlList: init.urlList ? [...init.urlList] : []\n }\n}\n\nfunction makeNetworkError (reason) {\n const isError = isErrorLike(reason)\n return makeResponse({\n type: 'error',\n status: 0,\n error: isError\n ? reason\n : new Error(reason ? String(reason) : reason),\n aborted: reason && reason.name === 'AbortError'\n })\n}\n\nfunction makeFilteredResponse (response, state) {\n state = {\n internalResponse: response,\n ...state\n }\n\n return new Proxy(response, {\n get (target, p) {\n return p in state ? state[p] : target[p]\n },\n set (target, p, value) {\n assert(!(p in state))\n target[p] = value\n return true\n }\n })\n}\n\n// https://fetch.spec.whatwg.org/#concept-filtered-response\nfunction filterResponse (response, type) {\n // Set response to the following filtered response with response as its\n // internal response, depending on request’s response tainting:\n if (type === 'basic') {\n // A basic filtered response is a filtered response whose type is \"basic\"\n // and header list excludes any headers in internal response’s header list\n // whose name is a forbidden response-header name.\n\n // Note: undici does not implement forbidden response-header names\n return makeFilteredResponse(response, {\n type: 'basic',\n headersList: response.headersList\n })\n } else if (type === 'cors') {\n // A CORS filtered response is a filtered response whose type is \"cors\"\n // and header list excludes any headers in internal response’s header\n // list whose name is not a CORS-safelisted response-header name, given\n // internal response’s CORS-exposed header-name list.\n\n // Note: undici does not implement CORS-safelisted response-header names\n return makeFilteredResponse(response, {\n type: 'cors',\n headersList: response.headersList\n })\n } else if (type === 'opaque') {\n // An opaque filtered response is a filtered response whose type is\n // \"opaque\", URL list is the empty list, status is 0, status message\n // is the empty byte sequence, header list is empty, and body is null.\n\n return makeFilteredResponse(response, {\n type: 'opaque',\n urlList: Object.freeze([]),\n status: 0,\n statusText: '',\n body: null\n })\n } else if (type === 'opaqueredirect') {\n // An opaque-redirect filtered response is a filtered response whose type\n // is \"opaqueredirect\", status is 0, status message is the empty byte\n // sequence, header list is empty, and body is null.\n\n return makeFilteredResponse(response, {\n type: 'opaqueredirect',\n status: 0,\n statusText: '',\n headersList: [],\n body: null\n })\n } else {\n assert(false)\n }\n}\n\n// https://fetch.spec.whatwg.org/#appropriate-network-error\nfunction makeAppropriateNetworkError (fetchParams, err = null) {\n // 1. Assert: fetchParams is canceled.\n assert(isCancelled(fetchParams))\n\n // 2. Return an aborted network error if fetchParams is aborted;\n // otherwise return a network error.\n return isAborted(fetchParams)\n ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err }))\n : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err }))\n}\n\n// https://whatpr.org/fetch/1392.html#initialize-a-response\nfunction initializeResponse (response, init, body) {\n // 1. If init[\"status\"] is not in the range 200 to 599, inclusive, then\n // throw a RangeError.\n if (init.status !== null && (init.status < 200 || init.status > 599)) {\n throw new RangeError('init[\"status\"] must be in the range of 200 to 599, inclusive.')\n }\n\n // 2. If init[\"statusText\"] does not match the reason-phrase token production,\n // then throw a TypeError.\n if ('statusText' in init && init.statusText != null) {\n // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2:\n // reason-phrase = *( HTAB / SP / VCHAR / obs-text )\n if (!isValidReasonPhrase(String(init.statusText))) {\n throw new TypeError('Invalid statusText')\n }\n }\n\n // 3. Set response’s response’s status to init[\"status\"].\n if ('status' in init && init.status != null) {\n response[kState].status = init.status\n }\n\n // 4. Set response’s response’s status message to init[\"statusText\"].\n if ('statusText' in init && init.statusText != null) {\n response[kState].statusText = init.statusText\n }\n\n // 5. If init[\"headers\"] exists, then fill response’s headers with init[\"headers\"].\n if ('headers' in init && init.headers != null) {\n fill(response[kHeaders], init.headers)\n }\n\n // 6. If body was given, then:\n if (body) {\n // 1. If response's status is a null body status, then throw a TypeError.\n if (nullBodyStatus.includes(response.status)) {\n throw webidl.errors.exception({\n header: 'Response constructor',\n message: 'Invalid response status code ' + response.status\n })\n }\n\n // 2. Set response's body to body's body.\n response[kState].body = body.body\n\n // 3. If body's type is non-null and response's header list does not contain\n // `Content-Type`, then append (`Content-Type`, body's type) to response's header list.\n if (body.type != null && !response[kState].headersList.contains('Content-Type')) {\n response[kState].headersList.append('content-type', body.type)\n }\n }\n}\n\nwebidl.converters.ReadableStream = webidl.interfaceConverter(\n ReadableStream\n)\n\nwebidl.converters.FormData = webidl.interfaceConverter(\n FormData\n)\n\nwebidl.converters.URLSearchParams = webidl.interfaceConverter(\n URLSearchParams\n)\n\n// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit\nwebidl.converters.XMLHttpRequestBodyInit = function (V) {\n if (typeof V === 'string') {\n return webidl.converters.USVString(V)\n }\n\n if (isBlobLike(V)) {\n return webidl.converters.Blob(V, { strict: false })\n }\n\n if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) {\n return webidl.converters.BufferSource(V)\n }\n\n if (util.isFormDataLike(V)) {\n return webidl.converters.FormData(V, { strict: false })\n }\n\n if (V instanceof URLSearchParams) {\n return webidl.converters.URLSearchParams(V)\n }\n\n return webidl.converters.DOMString(V)\n}\n\n// https://fetch.spec.whatwg.org/#bodyinit\nwebidl.converters.BodyInit = function (V) {\n if (V instanceof ReadableStream) {\n return webidl.converters.ReadableStream(V)\n }\n\n // Note: the spec doesn't include async iterables,\n // this is an undici extension.\n if (V?.[Symbol.asyncIterator]) {\n return V\n }\n\n return webidl.converters.XMLHttpRequestBodyInit(V)\n}\n\nwebidl.converters.ResponseInit = webidl.dictionaryConverter([\n {\n key: 'status',\n converter: webidl.converters['unsigned short'],\n defaultValue: 200\n },\n {\n key: 'statusText',\n converter: webidl.converters.ByteString,\n defaultValue: ''\n },\n {\n key: 'headers',\n converter: webidl.converters.HeadersInit\n }\n])\n\nmodule.exports = {\n makeNetworkError,\n makeResponse,\n makeAppropriateNetworkError,\n filterResponse,\n Response,\n cloneResponse\n}\n","'use strict'\n\nmodule.exports = {\n kUrl: Symbol('url'),\n kHeaders: Symbol('headers'),\n kSignal: Symbol('signal'),\n kState: Symbol('state'),\n kGuard: Symbol('guard'),\n kRealm: Symbol('realm')\n}\n","'use strict'\n\nconst { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = require('./constants')\nconst { getGlobalOrigin } = require('./global')\nconst { performance } = require('perf_hooks')\nconst { isBlobLike, toUSVString, ReadableStreamFrom } = require('../core/util')\nconst assert = require('assert')\nconst { isUint8Array } = require('util/types')\n\n// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable\n/** @type {import('crypto')|undefined} */\nlet crypto\n\ntry {\n crypto = require('crypto')\n} catch {\n\n}\n\nfunction responseURL (response) {\n // https://fetch.spec.whatwg.org/#responses\n // A response has an associated URL. It is a pointer to the last URL\n // in response’s URL list and null if response’s URL list is empty.\n const urlList = response.urlList\n const length = urlList.length\n return length === 0 ? null : urlList[length - 1].toString()\n}\n\n// https://fetch.spec.whatwg.org/#concept-response-location-url\nfunction responseLocationURL (response, requestFragment) {\n // 1. If response’s status is not a redirect status, then return null.\n if (!redirectStatusSet.has(response.status)) {\n return null\n }\n\n // 2. Let location be the result of extracting header list values given\n // `Location` and response’s header list.\n let location = response.headersList.get('location')\n\n // 3. If location is a header value, then set location to the result of\n // parsing location with response’s URL.\n if (location !== null && isValidHeaderValue(location)) {\n location = new URL(location, responseURL(response))\n }\n\n // 4. If location is a URL whose fragment is null, then set location’s\n // fragment to requestFragment.\n if (location && !location.hash) {\n location.hash = requestFragment\n }\n\n // 5. Return location.\n return location\n}\n\n/** @returns {URL} */\nfunction requestCurrentURL (request) {\n return request.urlList[request.urlList.length - 1]\n}\n\nfunction requestBadPort (request) {\n // 1. Let url be request’s current URL.\n const url = requestCurrentURL(request)\n\n // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port,\n // then return blocked.\n if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) {\n return 'blocked'\n }\n\n // 3. Return allowed.\n return 'allowed'\n}\n\nfunction isErrorLike (object) {\n return object instanceof Error || (\n object?.constructor?.name === 'Error' ||\n object?.constructor?.name === 'DOMException'\n )\n}\n\n// Check whether |statusText| is a ByteString and\n// matches the Reason-Phrase token production.\n// RFC 2616: https://tools.ietf.org/html/rfc2616\n// RFC 7230: https://tools.ietf.org/html/rfc7230\n// \"reason-phrase = *( HTAB / SP / VCHAR / obs-text )\"\n// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116\nfunction isValidReasonPhrase (statusText) {\n for (let i = 0; i < statusText.length; ++i) {\n const c = statusText.charCodeAt(i)\n if (\n !(\n (\n c === 0x09 || // HTAB\n (c >= 0x20 && c <= 0x7e) || // SP / VCHAR\n (c >= 0x80 && c <= 0xff)\n ) // obs-text\n )\n ) {\n return false\n }\n }\n return true\n}\n\n/**\n * @see https://tools.ietf.org/html/rfc7230#section-3.2.6\n * @param {number} c\n */\nfunction isTokenCharCode (c) {\n switch (c) {\n case 0x22:\n case 0x28:\n case 0x29:\n case 0x2c:\n case 0x2f:\n case 0x3a:\n case 0x3b:\n case 0x3c:\n case 0x3d:\n case 0x3e:\n case 0x3f:\n case 0x40:\n case 0x5b:\n case 0x5c:\n case 0x5d:\n case 0x7b:\n case 0x7d:\n // DQUOTE and \"(),/:;<=>?@[\\]{}\"\n return false\n default:\n // VCHAR %x21-7E\n return c >= 0x21 && c <= 0x7e\n }\n}\n\n/**\n * @param {string} characters\n */\nfunction isValidHTTPToken (characters) {\n if (characters.length === 0) {\n return false\n }\n for (let i = 0; i < characters.length; ++i) {\n if (!isTokenCharCode(characters.charCodeAt(i))) {\n return false\n }\n }\n return true\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#header-name\n * @param {string} potentialValue\n */\nfunction isValidHeaderName (potentialValue) {\n return isValidHTTPToken(potentialValue)\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#header-value\n * @param {string} potentialValue\n */\nfunction isValidHeaderValue (potentialValue) {\n // - Has no leading or trailing HTTP tab or space bytes.\n // - Contains no 0x00 (NUL) or HTTP newline bytes.\n if (\n potentialValue.startsWith('\\t') ||\n potentialValue.startsWith(' ') ||\n potentialValue.endsWith('\\t') ||\n potentialValue.endsWith(' ')\n ) {\n return false\n }\n\n if (\n potentialValue.includes('\\0') ||\n potentialValue.includes('\\r') ||\n potentialValue.includes('\\n')\n ) {\n return false\n }\n\n return true\n}\n\n// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect\nfunction setRequestReferrerPolicyOnRedirect (request, actualResponse) {\n // Given a request request and a response actualResponse, this algorithm\n // updates request’s referrer policy according to the Referrer-Policy\n // header (if any) in actualResponse.\n\n // 1. Let policy be the result of executing § 8.1 Parse a referrer policy\n // from a Referrer-Policy header on actualResponse.\n\n // 8.1 Parse a referrer policy from a Referrer-Policy header\n // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list.\n const { headersList } = actualResponse\n // 2. Let policy be the empty string.\n // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token.\n // 4. Return policy.\n const policyHeader = (headersList.get('referrer-policy') ?? '').split(',')\n\n // Note: As the referrer-policy can contain multiple policies\n // separated by comma, we need to loop through all of them\n // and pick the first valid one.\n // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy\n let policy = ''\n if (policyHeader.length > 0) {\n // The right-most policy takes precedence.\n // The left-most policy is the fallback.\n for (let i = policyHeader.length; i !== 0; i--) {\n const token = policyHeader[i - 1].trim()\n if (referrerPolicyTokens.has(token)) {\n policy = token\n break\n }\n }\n }\n\n // 2. If policy is not the empty string, then set request’s referrer policy to policy.\n if (policy !== '') {\n request.referrerPolicy = policy\n }\n}\n\n// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check\nfunction crossOriginResourcePolicyCheck () {\n // TODO\n return 'allowed'\n}\n\n// https://fetch.spec.whatwg.org/#concept-cors-check\nfunction corsCheck () {\n // TODO\n return 'success'\n}\n\n// https://fetch.spec.whatwg.org/#concept-tao-check\nfunction TAOCheck () {\n // TODO\n return 'success'\n}\n\nfunction appendFetchMetadata (httpRequest) {\n // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header\n // TODO\n\n // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header\n\n // 1. Assert: r’s url is a potentially trustworthy URL.\n // TODO\n\n // 2. Let header be a Structured Header whose value is a token.\n let header = null\n\n // 3. Set header’s value to r’s mode.\n header = httpRequest.mode\n\n // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list.\n httpRequest.headersList.set('sec-fetch-mode', header)\n\n // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header\n // TODO\n\n // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header\n // TODO\n}\n\n// https://fetch.spec.whatwg.org/#append-a-request-origin-header\nfunction appendRequestOriginHeader (request) {\n // 1. Let serializedOrigin be the result of byte-serializing a request origin with request.\n let serializedOrigin = request.origin\n\n // 2. If request’s response tainting is \"cors\" or request’s mode is \"websocket\", then append (`Origin`, serializedOrigin) to request’s header list.\n if (request.responseTainting === 'cors' || request.mode === 'websocket') {\n if (serializedOrigin) {\n request.headersList.append('origin', serializedOrigin)\n }\n\n // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then:\n } else if (request.method !== 'GET' && request.method !== 'HEAD') {\n // 1. Switch on request’s referrer policy:\n switch (request.referrerPolicy) {\n case 'no-referrer':\n // Set serializedOrigin to `null`.\n serializedOrigin = null\n break\n case 'no-referrer-when-downgrade':\n case 'strict-origin':\n case 'strict-origin-when-cross-origin':\n // If request’s origin is a tuple origin, its scheme is \"https\", and request’s current URL’s scheme is not \"https\", then set serializedOrigin to `null`.\n if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) {\n serializedOrigin = null\n }\n break\n case 'same-origin':\n // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`.\n if (!sameOrigin(request, requestCurrentURL(request))) {\n serializedOrigin = null\n }\n break\n default:\n // Do nothing.\n }\n\n if (serializedOrigin) {\n // 2. Append (`Origin`, serializedOrigin) to request’s header list.\n request.headersList.append('origin', serializedOrigin)\n }\n }\n}\n\nfunction coarsenedSharedCurrentTime (crossOriginIsolatedCapability) {\n // TODO\n return performance.now()\n}\n\n// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info\nfunction createOpaqueTimingInfo (timingInfo) {\n return {\n startTime: timingInfo.startTime ?? 0,\n redirectStartTime: 0,\n redirectEndTime: 0,\n postRedirectStartTime: timingInfo.startTime ?? 0,\n finalServiceWorkerStartTime: 0,\n finalNetworkResponseStartTime: 0,\n finalNetworkRequestStartTime: 0,\n endTime: 0,\n encodedBodySize: 0,\n decodedBodySize: 0,\n finalConnectionTimingInfo: null\n }\n}\n\n// https://html.spec.whatwg.org/multipage/origin.html#policy-container\nfunction makePolicyContainer () {\n // Note: the fetch spec doesn't make use of embedder policy or CSP list\n return {\n referrerPolicy: 'strict-origin-when-cross-origin'\n }\n}\n\n// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container\nfunction clonePolicyContainer (policyContainer) {\n return {\n referrerPolicy: policyContainer.referrerPolicy\n }\n}\n\n// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer\nfunction determineRequestsReferrer (request) {\n // 1. Let policy be request's referrer policy.\n const policy = request.referrerPolicy\n\n // Note: policy cannot (shouldn't) be null or an empty string.\n assert(policy)\n\n // 2. Let environment be request’s client.\n\n let referrerSource = null\n\n // 3. Switch on request’s referrer:\n if (request.referrer === 'client') {\n // Note: node isn't a browser and doesn't implement document/iframes,\n // so we bypass this step and replace it with our own.\n\n const globalOrigin = getGlobalOrigin()\n\n if (!globalOrigin || globalOrigin.origin === 'null') {\n return 'no-referrer'\n }\n\n // note: we need to clone it as it's mutated\n referrerSource = new URL(globalOrigin)\n } else if (request.referrer instanceof URL) {\n // Let referrerSource be request’s referrer.\n referrerSource = request.referrer\n }\n\n // 4. Let request’s referrerURL be the result of stripping referrerSource for\n // use as a referrer.\n let referrerURL = stripURLForReferrer(referrerSource)\n\n // 5. Let referrerOrigin be the result of stripping referrerSource for use as\n // a referrer, with the origin-only flag set to true.\n const referrerOrigin = stripURLForReferrer(referrerSource, true)\n\n // 6. If the result of serializing referrerURL is a string whose length is\n // greater than 4096, set referrerURL to referrerOrigin.\n if (referrerURL.toString().length > 4096) {\n referrerURL = referrerOrigin\n }\n\n const areSameOrigin = sameOrigin(request, referrerURL)\n const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) &&\n !isURLPotentiallyTrustworthy(request.url)\n\n // 8. Execute the switch statements corresponding to the value of policy:\n switch (policy) {\n case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true)\n case 'unsafe-url': return referrerURL\n case 'same-origin':\n return areSameOrigin ? referrerOrigin : 'no-referrer'\n case 'origin-when-cross-origin':\n return areSameOrigin ? referrerURL : referrerOrigin\n case 'strict-origin-when-cross-origin': {\n const currentURL = requestCurrentURL(request)\n\n // 1. If the origin of referrerURL and the origin of request’s current\n // URL are the same, then return referrerURL.\n if (sameOrigin(referrerURL, currentURL)) {\n return referrerURL\n }\n\n // 2. If referrerURL is a potentially trustworthy URL and request’s\n // current URL is not a potentially trustworthy URL, then return no\n // referrer.\n if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) {\n return 'no-referrer'\n }\n\n // 3. Return referrerOrigin.\n return referrerOrigin\n }\n case 'strict-origin': // eslint-disable-line\n /**\n * 1. If referrerURL is a potentially trustworthy URL and\n * request’s current URL is not a potentially trustworthy URL,\n * then return no referrer.\n * 2. Return referrerOrigin\n */\n case 'no-referrer-when-downgrade': // eslint-disable-line\n /**\n * 1. If referrerURL is a potentially trustworthy URL and\n * request’s current URL is not a potentially trustworthy URL,\n * then return no referrer.\n * 2. Return referrerOrigin\n */\n\n default: // eslint-disable-line\n return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin\n }\n}\n\n/**\n * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url\n * @param {URL} url\n * @param {boolean|undefined} originOnly\n */\nfunction stripURLForReferrer (url, originOnly) {\n // 1. Assert: url is a URL.\n assert(url instanceof URL)\n\n // 2. If url’s scheme is a local scheme, then return no referrer.\n if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') {\n return 'no-referrer'\n }\n\n // 3. Set url’s username to the empty string.\n url.username = ''\n\n // 4. Set url’s password to the empty string.\n url.password = ''\n\n // 5. Set url’s fragment to null.\n url.hash = ''\n\n // 6. If the origin-only flag is true, then:\n if (originOnly) {\n // 1. Set url’s path to « the empty string ».\n url.pathname = ''\n\n // 2. Set url’s query to null.\n url.search = ''\n }\n\n // 7. Return url.\n return url\n}\n\nfunction isURLPotentiallyTrustworthy (url) {\n if (!(url instanceof URL)) {\n return false\n }\n\n // If child of about, return true\n if (url.href === 'about:blank' || url.href === 'about:srcdoc') {\n return true\n }\n\n // If scheme is data, return true\n if (url.protocol === 'data:') return true\n\n // If file, return true\n if (url.protocol === 'file:') return true\n\n return isOriginPotentiallyTrustworthy(url.origin)\n\n function isOriginPotentiallyTrustworthy (origin) {\n // If origin is explicitly null, return false\n if (origin == null || origin === 'null') return false\n\n const originAsURL = new URL(origin)\n\n // If secure, return true\n if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') {\n return true\n }\n\n // If localhost or variants, return true\n if (/^127(?:\\.[0-9]+){0,2}\\.[0-9]+$|^\\[(?:0*:)*?:?0*1\\]$/.test(originAsURL.hostname) ||\n (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) ||\n (originAsURL.hostname.endsWith('.localhost'))) {\n return true\n }\n\n // If any other, return false\n return false\n }\n}\n\n/**\n * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist\n * @param {Uint8Array} bytes\n * @param {string} metadataList\n */\nfunction bytesMatch (bytes, metadataList) {\n // If node is not built with OpenSSL support, we cannot check\n // a request's integrity, so allow it by default (the spec will\n // allow requests if an invalid hash is given, as precedence).\n /* istanbul ignore if: only if node is built with --without-ssl */\n if (crypto === undefined) {\n return true\n }\n\n // 1. Let parsedMetadata be the result of parsing metadataList.\n const parsedMetadata = parseMetadata(metadataList)\n\n // 2. If parsedMetadata is no metadata, return true.\n if (parsedMetadata === 'no metadata') {\n return true\n }\n\n // 3. If parsedMetadata is the empty set, return true.\n if (parsedMetadata.length === 0) {\n return true\n }\n\n // 4. Let metadata be the result of getting the strongest\n // metadata from parsedMetadata.\n const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))\n // get the strongest algorithm\n const strongest = list[0].algo\n // get all entries that use the strongest algorithm; ignore weaker\n const metadata = list.filter((item) => item.algo === strongest)\n\n // 5. For each item in metadata:\n for (const item of metadata) {\n // 1. Let algorithm be the alg component of item.\n const algorithm = item.algo\n\n // 2. Let expectedValue be the val component of item.\n let expectedValue = item.hash\n\n // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e\n // \"be liberal with padding\". This is annoying, and it's not even in the spec.\n\n if (expectedValue.endsWith('==')) {\n expectedValue = expectedValue.slice(0, -2)\n }\n\n // 3. Let actualValue be the result of applying algorithm to bytes.\n let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')\n\n if (actualValue.endsWith('==')) {\n actualValue = actualValue.slice(0, -2)\n }\n\n // 4. If actualValue is a case-sensitive match for expectedValue,\n // return true.\n if (actualValue === expectedValue) {\n return true\n }\n\n let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')\n\n if (actualBase64URL.endsWith('==')) {\n actualBase64URL = actualBase64URL.slice(0, -2)\n }\n\n if (actualBase64URL === expectedValue) {\n return true\n }\n }\n\n // 6. Return false.\n return false\n}\n\n// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options\n// https://www.w3.org/TR/CSP2/#source-list-syntax\n// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1\nconst parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\\x21-\\x7e]?)?/i\n\n/**\n * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata\n * @param {string} metadata\n */\nfunction parseMetadata (metadata) {\n // 1. Let result be the empty set.\n /** @type {{ algo: string, hash: string }[]} */\n const result = []\n\n // 2. Let empty be equal to true.\n let empty = true\n\n const supportedHashes = crypto.getHashes()\n\n // 3. For each token returned by splitting metadata on spaces:\n for (const token of metadata.split(' ')) {\n // 1. Set empty to false.\n empty = false\n\n // 2. Parse token as a hash-with-options.\n const parsedToken = parseHashWithOptions.exec(token)\n\n // 3. If token does not parse, continue to the next token.\n if (parsedToken === null || parsedToken.groups === undefined) {\n // Note: Chromium blocks the request at this point, but Firefox\n // gives a warning that an invalid integrity was given. The\n // correct behavior is to ignore these, and subsequently not\n // check the integrity of the resource.\n continue\n }\n\n // 4. Let algorithm be the hash-algo component of token.\n const algorithm = parsedToken.groups.algo\n\n // 5. If algorithm is a hash function recognized by the user\n // agent, add the parsed token to result.\n if (supportedHashes.includes(algorithm.toLowerCase())) {\n result.push(parsedToken.groups)\n }\n }\n\n // 4. Return no metadata if empty is true, otherwise return result.\n if (empty === true) {\n return 'no metadata'\n }\n\n return result\n}\n\n// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request\nfunction tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {\n // TODO\n}\n\n/**\n * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin}\n * @param {URL} A\n * @param {URL} B\n */\nfunction sameOrigin (A, B) {\n // 1. If A and B are the same opaque origin, then return true.\n if (A.origin === B.origin && A.origin === 'null') {\n return true\n }\n\n // 2. If A and B are both tuple origins and their schemes,\n // hosts, and port are identical, then return true.\n if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) {\n return true\n }\n\n // 3. Return false.\n return false\n}\n\nfunction createDeferredPromise () {\n let res\n let rej\n const promise = new Promise((resolve, reject) => {\n res = resolve\n rej = reject\n })\n\n return { promise, resolve: res, reject: rej }\n}\n\nfunction isAborted (fetchParams) {\n return fetchParams.controller.state === 'aborted'\n}\n\nfunction isCancelled (fetchParams) {\n return fetchParams.controller.state === 'aborted' ||\n fetchParams.controller.state === 'terminated'\n}\n\nconst normalizeMethodRecord = {\n delete: 'DELETE',\n DELETE: 'DELETE',\n get: 'GET',\n GET: 'GET',\n head: 'HEAD',\n HEAD: 'HEAD',\n options: 'OPTIONS',\n OPTIONS: 'OPTIONS',\n post: 'POST',\n POST: 'POST',\n put: 'PUT',\n PUT: 'PUT'\n}\n\n// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.\nObject.setPrototypeOf(normalizeMethodRecord, null)\n\n/**\n * @see https://fetch.spec.whatwg.org/#concept-method-normalize\n * @param {string} method\n */\nfunction normalizeMethod (method) {\n return normalizeMethodRecord[method.toLowerCase()] ?? method\n}\n\n// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string\nfunction serializeJavascriptValueToJSONString (value) {\n // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »).\n const result = JSON.stringify(value)\n\n // 2. If result is undefined, then throw a TypeError.\n if (result === undefined) {\n throw new TypeError('Value is not JSON serializable')\n }\n\n // 3. Assert: result is a string.\n assert(typeof result === 'string')\n\n // 4. Return result.\n return result\n}\n\n// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object\nconst esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))\n\n/**\n * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object\n * @param {() => unknown[]} iterator\n * @param {string} name name of the instance\n * @param {'key'|'value'|'key+value'} kind\n */\nfunction makeIterator (iterator, name, kind) {\n const object = {\n index: 0,\n kind,\n target: iterator\n }\n\n const i = {\n next () {\n // 1. Let interface be the interface for which the iterator prototype object exists.\n\n // 2. Let thisValue be the this value.\n\n // 3. Let object be ? ToObject(thisValue).\n\n // 4. If object is a platform object, then perform a security\n // check, passing:\n\n // 5. If object is not a default iterator object for interface,\n // then throw a TypeError.\n if (Object.getPrototypeOf(this) !== i) {\n throw new TypeError(\n `'next' called on an object that does not implement interface ${name} Iterator.`\n )\n }\n\n // 6. Let index be object’s index.\n // 7. Let kind be object’s kind.\n // 8. Let values be object’s target's value pairs to iterate over.\n const { index, kind, target } = object\n const values = target()\n\n // 9. Let len be the length of values.\n const len = values.length\n\n // 10. If index is greater than or equal to len, then return\n // CreateIterResultObject(undefined, true).\n if (index >= len) {\n return { value: undefined, done: true }\n }\n\n // 11. Let pair be the entry in values at index index.\n const pair = values[index]\n\n // 12. Set object’s index to index + 1.\n object.index = index + 1\n\n // 13. Return the iterator result for pair and kind.\n return iteratorResult(pair, kind)\n },\n // The class string of an iterator prototype object for a given interface is the\n // result of concatenating the identifier of the interface and the string \" Iterator\".\n [Symbol.toStringTag]: `${name} Iterator`\n }\n\n // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%.\n Object.setPrototypeOf(i, esIteratorPrototype)\n // esIteratorPrototype needs to be the prototype of i\n // which is the prototype of an empty object. Yes, it's confusing.\n return Object.setPrototypeOf({}, i)\n}\n\n// https://webidl.spec.whatwg.org/#iterator-result\nfunction iteratorResult (pair, kind) {\n let result\n\n // 1. Let result be a value determined by the value of kind:\n switch (kind) {\n case 'key': {\n // 1. Let idlKey be pair’s key.\n // 2. Let key be the result of converting idlKey to an\n // ECMAScript value.\n // 3. result is key.\n result = pair[0]\n break\n }\n case 'value': {\n // 1. Let idlValue be pair’s value.\n // 2. Let value be the result of converting idlValue to\n // an ECMAScript value.\n // 3. result is value.\n result = pair[1]\n break\n }\n case 'key+value': {\n // 1. Let idlKey be pair’s key.\n // 2. Let idlValue be pair’s value.\n // 3. Let key be the result of converting idlKey to an\n // ECMAScript value.\n // 4. Let value be the result of converting idlValue to\n // an ECMAScript value.\n // 5. Let array be ! ArrayCreate(2).\n // 6. Call ! CreateDataProperty(array, \"0\", key).\n // 7. Call ! CreateDataProperty(array, \"1\", value).\n // 8. result is array.\n result = pair\n break\n }\n }\n\n // 2. Return CreateIterResultObject(result, false).\n return { value: result, done: false }\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#body-fully-read\n */\nasync function fullyReadBody (body, processBody, processBodyError) {\n // 1. If taskDestination is null, then set taskDestination to\n // the result of starting a new parallel queue.\n\n // 2. Let successSteps given a byte sequence bytes be to queue a\n // fetch task to run processBody given bytes, with taskDestination.\n const successSteps = processBody\n\n // 3. Let errorSteps be to queue a fetch task to run processBodyError,\n // with taskDestination.\n const errorSteps = processBodyError\n\n // 4. Let reader be the result of getting a reader for body’s stream.\n // If that threw an exception, then run errorSteps with that\n // exception and return.\n let reader\n\n try {\n reader = body.stream.getReader()\n } catch (e) {\n errorSteps(e)\n return\n }\n\n // 5. Read all bytes from reader, given successSteps and errorSteps.\n try {\n const result = await readAllBytes(reader)\n successSteps(result)\n } catch (e) {\n errorSteps(e)\n }\n}\n\n/** @type {ReadableStream} */\nlet ReadableStream = globalThis.ReadableStream\n\nfunction isReadableStreamLike (stream) {\n if (!ReadableStream) {\n ReadableStream = require('stream/web').ReadableStream\n }\n\n return stream instanceof ReadableStream || (\n stream[Symbol.toStringTag] === 'ReadableStream' &&\n typeof stream.tee === 'function'\n )\n}\n\nconst MAXIMUM_ARGUMENT_LENGTH = 65535\n\n/**\n * @see https://infra.spec.whatwg.org/#isomorphic-decode\n * @param {number[]|Uint8Array} input\n */\nfunction isomorphicDecode (input) {\n // 1. To isomorphic decode a byte sequence input, return a string whose code point\n // length is equal to input’s length and whose code points have the same values\n // as the values of input’s bytes, in the same order.\n\n if (input.length < MAXIMUM_ARGUMENT_LENGTH) {\n return String.fromCharCode(...input)\n }\n\n return input.reduce((previous, current) => previous + String.fromCharCode(current), '')\n}\n\n/**\n * @param {ReadableStreamController} controller\n */\nfunction readableStreamClose (controller) {\n try {\n controller.close()\n } catch (err) {\n // TODO: add comment explaining why this error occurs.\n if (!err.message.includes('Controller is already closed')) {\n throw err\n }\n }\n}\n\n/**\n * @see https://infra.spec.whatwg.org/#isomorphic-encode\n * @param {string} input\n */\nfunction isomorphicEncode (input) {\n // 1. Assert: input contains no code points greater than U+00FF.\n for (let i = 0; i < input.length; i++) {\n assert(input.charCodeAt(i) <= 0xFF)\n }\n\n // 2. Return a byte sequence whose length is equal to input’s code\n // point length and whose bytes have the same values as the\n // values of input’s code points, in the same order\n return input\n}\n\n/**\n * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes\n * @see https://streams.spec.whatwg.org/#read-loop\n * @param {ReadableStreamDefaultReader} reader\n */\nasync function readAllBytes (reader) {\n const bytes = []\n let byteLength = 0\n\n while (true) {\n const { done, value: chunk } = await reader.read()\n\n if (done) {\n // 1. Call successSteps with bytes.\n return Buffer.concat(bytes, byteLength)\n }\n\n // 1. If chunk is not a Uint8Array object, call failureSteps\n // with a TypeError and abort these steps.\n if (!isUint8Array(chunk)) {\n throw new TypeError('Received non-Uint8Array chunk')\n }\n\n // 2. Append the bytes represented by chunk to bytes.\n bytes.push(chunk)\n byteLength += chunk.length\n\n // 3. Read-loop given reader, bytes, successSteps, and failureSteps.\n }\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#is-local\n * @param {URL} url\n */\nfunction urlIsLocal (url) {\n assert('protocol' in url) // ensure it's a url object\n\n const protocol = url.protocol\n\n return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:'\n}\n\n/**\n * @param {string|URL} url\n */\nfunction urlHasHttpsScheme (url) {\n if (typeof url === 'string') {\n return url.startsWith('https:')\n }\n\n return url.protocol === 'https:'\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#http-scheme\n * @param {URL} url\n */\nfunction urlIsHttpHttpsScheme (url) {\n assert('protocol' in url) // ensure it's a url object\n\n const protocol = url.protocol\n\n return protocol === 'http:' || protocol === 'https:'\n}\n\n/**\n * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0.\n */\nconst hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key))\n\nmodule.exports = {\n isAborted,\n isCancelled,\n createDeferredPromise,\n ReadableStreamFrom,\n toUSVString,\n tryUpgradeRequestToAPotentiallyTrustworthyURL,\n coarsenedSharedCurrentTime,\n determineRequestsReferrer,\n makePolicyContainer,\n clonePolicyContainer,\n appendFetchMetadata,\n appendRequestOriginHeader,\n TAOCheck,\n corsCheck,\n crossOriginResourcePolicyCheck,\n createOpaqueTimingInfo,\n setRequestReferrerPolicyOnRedirect,\n isValidHTTPToken,\n requestBadPort,\n requestCurrentURL,\n responseURL,\n responseLocationURL,\n isBlobLike,\n isURLPotentiallyTrustworthy,\n isValidReasonPhrase,\n sameOrigin,\n normalizeMethod,\n serializeJavascriptValueToJSONString,\n makeIterator,\n isValidHeaderName,\n isValidHeaderValue,\n hasOwn,\n isErrorLike,\n fullyReadBody,\n bytesMatch,\n isReadableStreamLike,\n readableStreamClose,\n isomorphicEncode,\n isomorphicDecode,\n urlIsLocal,\n urlHasHttpsScheme,\n urlIsHttpHttpsScheme,\n readAllBytes,\n normalizeMethodRecord\n}\n","'use strict'\n\nconst { types } = require('util')\nconst { hasOwn, toUSVString } = require('./util')\n\n/** @type {import('../../types/webidl').Webidl} */\nconst webidl = {}\nwebidl.converters = {}\nwebidl.util = {}\nwebidl.errors = {}\n\nwebidl.errors.exception = function (message) {\n return new TypeError(`${message.header}: ${message.message}`)\n}\n\nwebidl.errors.conversionFailed = function (context) {\n const plural = context.types.length === 1 ? '' : ' one of'\n const message =\n `${context.argument} could not be converted to` +\n `${plural}: ${context.types.join(', ')}.`\n\n return webidl.errors.exception({\n header: context.prefix,\n message\n })\n}\n\nwebidl.errors.invalidArgument = function (context) {\n return webidl.errors.exception({\n header: context.prefix,\n message: `\"${context.value}\" is an invalid ${context.type}.`\n })\n}\n\n// https://webidl.spec.whatwg.org/#implements\nwebidl.brandCheck = function (V, I, opts = undefined) {\n if (opts?.strict !== false && !(V instanceof I)) {\n throw new TypeError('Illegal invocation')\n } else {\n return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag]\n }\n}\n\nwebidl.argumentLengthCheck = function ({ length }, min, ctx) {\n if (length < min) {\n throw webidl.errors.exception({\n message: `${min} argument${min !== 1 ? 's' : ''} required, ` +\n `but${length ? ' only' : ''} ${length} found.`,\n ...ctx\n })\n }\n}\n\nwebidl.illegalConstructor = function () {\n throw webidl.errors.exception({\n header: 'TypeError',\n message: 'Illegal constructor'\n })\n}\n\n// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values\nwebidl.util.Type = function (V) {\n switch (typeof V) {\n case 'undefined': return 'Undefined'\n case 'boolean': return 'Boolean'\n case 'string': return 'String'\n case 'symbol': return 'Symbol'\n case 'number': return 'Number'\n case 'bigint': return 'BigInt'\n case 'function':\n case 'object': {\n if (V === null) {\n return 'Null'\n }\n\n return 'Object'\n }\n }\n}\n\n// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint\nwebidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) {\n let upperBound\n let lowerBound\n\n // 1. If bitLength is 64, then:\n if (bitLength === 64) {\n // 1. Let upperBound be 2^53 − 1.\n upperBound = Math.pow(2, 53) - 1\n\n // 2. If signedness is \"unsigned\", then let lowerBound be 0.\n if (signedness === 'unsigned') {\n lowerBound = 0\n } else {\n // 3. Otherwise let lowerBound be −2^53 + 1.\n lowerBound = Math.pow(-2, 53) + 1\n }\n } else if (signedness === 'unsigned') {\n // 2. Otherwise, if signedness is \"unsigned\", then:\n\n // 1. Let lowerBound be 0.\n lowerBound = 0\n\n // 2. Let upperBound be 2^bitLength − 1.\n upperBound = Math.pow(2, bitLength) - 1\n } else {\n // 3. Otherwise:\n\n // 1. Let lowerBound be -2^bitLength − 1.\n lowerBound = Math.pow(-2, bitLength) - 1\n\n // 2. Let upperBound be 2^bitLength − 1 − 1.\n upperBound = Math.pow(2, bitLength - 1) - 1\n }\n\n // 4. Let x be ? ToNumber(V).\n let x = Number(V)\n\n // 5. If x is −0, then set x to +0.\n if (x === 0) {\n x = 0\n }\n\n // 6. If the conversion is to an IDL type associated\n // with the [EnforceRange] extended attribute, then:\n if (opts.enforceRange === true) {\n // 1. If x is NaN, +∞, or −∞, then throw a TypeError.\n if (\n Number.isNaN(x) ||\n x === Number.POSITIVE_INFINITY ||\n x === Number.NEGATIVE_INFINITY\n ) {\n throw webidl.errors.exception({\n header: 'Integer conversion',\n message: `Could not convert ${V} to an integer.`\n })\n }\n\n // 2. Set x to IntegerPart(x).\n x = webidl.util.IntegerPart(x)\n\n // 3. If x < lowerBound or x > upperBound, then\n // throw a TypeError.\n if (x < lowerBound || x > upperBound) {\n throw webidl.errors.exception({\n header: 'Integer conversion',\n message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.`\n })\n }\n\n // 4. Return x.\n return x\n }\n\n // 7. If x is not NaN and the conversion is to an IDL\n // type associated with the [Clamp] extended\n // attribute, then:\n if (!Number.isNaN(x) && opts.clamp === true) {\n // 1. Set x to min(max(x, lowerBound), upperBound).\n x = Math.min(Math.max(x, lowerBound), upperBound)\n\n // 2. Round x to the nearest integer, choosing the\n // even integer if it lies halfway between two,\n // and choosing +0 rather than −0.\n if (Math.floor(x) % 2 === 0) {\n x = Math.floor(x)\n } else {\n x = Math.ceil(x)\n }\n\n // 3. Return x.\n return x\n }\n\n // 8. If x is NaN, +0, +∞, or −∞, then return +0.\n if (\n Number.isNaN(x) ||\n (x === 0 && Object.is(0, x)) ||\n x === Number.POSITIVE_INFINITY ||\n x === Number.NEGATIVE_INFINITY\n ) {\n return 0\n }\n\n // 9. Set x to IntegerPart(x).\n x = webidl.util.IntegerPart(x)\n\n // 10. Set x to x modulo 2^bitLength.\n x = x % Math.pow(2, bitLength)\n\n // 11. If signedness is \"signed\" and x ≥ 2^bitLength − 1,\n // then return x − 2^bitLength.\n if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) {\n return x - Math.pow(2, bitLength)\n }\n\n // 12. Otherwise, return x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart\nwebidl.util.IntegerPart = function (n) {\n // 1. Let r be floor(abs(n)).\n const r = Math.floor(Math.abs(n))\n\n // 2. If n < 0, then return -1 × r.\n if (n < 0) {\n return -1 * r\n }\n\n // 3. Otherwise, return r.\n return r\n}\n\n// https://webidl.spec.whatwg.org/#es-sequence\nwebidl.sequenceConverter = function (converter) {\n return (V) => {\n // 1. If Type(V) is not Object, throw a TypeError.\n if (webidl.util.Type(V) !== 'Object') {\n throw webidl.errors.exception({\n header: 'Sequence',\n message: `Value of type ${webidl.util.Type(V)} is not an Object.`\n })\n }\n\n // 2. Let method be ? GetMethod(V, @@iterator).\n /** @type {Generator} */\n const method = V?.[Symbol.iterator]?.()\n const seq = []\n\n // 3. If method is undefined, throw a TypeError.\n if (\n method === undefined ||\n typeof method.next !== 'function'\n ) {\n throw webidl.errors.exception({\n header: 'Sequence',\n message: 'Object is not an iterator.'\n })\n }\n\n // https://webidl.spec.whatwg.org/#create-sequence-from-iterable\n while (true) {\n const { done, value } = method.next()\n\n if (done) {\n break\n }\n\n seq.push(converter(value))\n }\n\n return seq\n }\n}\n\n// https://webidl.spec.whatwg.org/#es-to-record\nwebidl.recordConverter = function (keyConverter, valueConverter) {\n return (O) => {\n // 1. If Type(O) is not Object, throw a TypeError.\n if (webidl.util.Type(O) !== 'Object') {\n throw webidl.errors.exception({\n header: 'Record',\n message: `Value of type ${webidl.util.Type(O)} is not an Object.`\n })\n }\n\n // 2. Let result be a new empty instance of record.\n const result = {}\n\n if (!types.isProxy(O)) {\n // Object.keys only returns enumerable properties\n const keys = Object.keys(O)\n\n for (const key of keys) {\n // 1. Let typedKey be key converted to an IDL value of type K.\n const typedKey = keyConverter(key)\n\n // 2. Let value be ? Get(O, key).\n // 3. Let typedValue be value converted to an IDL value of type V.\n const typedValue = valueConverter(O[key])\n\n // 4. Set result[typedKey] to typedValue.\n result[typedKey] = typedValue\n }\n\n // 5. Return result.\n return result\n }\n\n // 3. Let keys be ? O.[[OwnPropertyKeys]]().\n const keys = Reflect.ownKeys(O)\n\n // 4. For each key of keys.\n for (const key of keys) {\n // 1. Let desc be ? O.[[GetOwnProperty]](key).\n const desc = Reflect.getOwnPropertyDescriptor(O, key)\n\n // 2. If desc is not undefined and desc.[[Enumerable]] is true:\n if (desc?.enumerable) {\n // 1. Let typedKey be key converted to an IDL value of type K.\n const typedKey = keyConverter(key)\n\n // 2. Let value be ? Get(O, key).\n // 3. Let typedValue be value converted to an IDL value of type V.\n const typedValue = valueConverter(O[key])\n\n // 4. Set result[typedKey] to typedValue.\n result[typedKey] = typedValue\n }\n }\n\n // 5. Return result.\n return result\n }\n}\n\nwebidl.interfaceConverter = function (i) {\n return (V, opts = {}) => {\n if (opts.strict !== false && !(V instanceof i)) {\n throw webidl.errors.exception({\n header: i.name,\n message: `Expected ${V} to be an instance of ${i.name}.`\n })\n }\n\n return V\n }\n}\n\nwebidl.dictionaryConverter = function (converters) {\n return (dictionary) => {\n const type = webidl.util.Type(dictionary)\n const dict = {}\n\n if (type === 'Null' || type === 'Undefined') {\n return dict\n } else if (type !== 'Object') {\n throw webidl.errors.exception({\n header: 'Dictionary',\n message: `Expected ${dictionary} to be one of: Null, Undefined, Object.`\n })\n }\n\n for (const options of converters) {\n const { key, defaultValue, required, converter } = options\n\n if (required === true) {\n if (!hasOwn(dictionary, key)) {\n throw webidl.errors.exception({\n header: 'Dictionary',\n message: `Missing required key \"${key}\".`\n })\n }\n }\n\n let value = dictionary[key]\n const hasDefault = hasOwn(options, 'defaultValue')\n\n // Only use defaultValue if value is undefined and\n // a defaultValue options was provided.\n if (hasDefault && value !== null) {\n value = value ?? defaultValue\n }\n\n // A key can be optional and have no default value.\n // When this happens, do not perform a conversion,\n // and do not assign the key a value.\n if (required || hasDefault || value !== undefined) {\n value = converter(value)\n\n if (\n options.allowedValues &&\n !options.allowedValues.includes(value)\n ) {\n throw webidl.errors.exception({\n header: 'Dictionary',\n message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.`\n })\n }\n\n dict[key] = value\n }\n }\n\n return dict\n }\n}\n\nwebidl.nullableConverter = function (converter) {\n return (V) => {\n if (V === null) {\n return V\n }\n\n return converter(V)\n }\n}\n\n// https://webidl.spec.whatwg.org/#es-DOMString\nwebidl.converters.DOMString = function (V, opts = {}) {\n // 1. If V is null and the conversion is to an IDL type\n // associated with the [LegacyNullToEmptyString]\n // extended attribute, then return the DOMString value\n // that represents the empty string.\n if (V === null && opts.legacyNullToEmptyString) {\n return ''\n }\n\n // 2. Let x be ? ToString(V).\n if (typeof V === 'symbol') {\n throw new TypeError('Could not convert argument of type symbol to string.')\n }\n\n // 3. Return the IDL DOMString value that represents the\n // same sequence of code units as the one the\n // ECMAScript String value x represents.\n return String(V)\n}\n\n// https://webidl.spec.whatwg.org/#es-ByteString\nwebidl.converters.ByteString = function (V) {\n // 1. Let x be ? ToString(V).\n // Note: DOMString converter perform ? ToString(V)\n const x = webidl.converters.DOMString(V)\n\n // 2. If the value of any element of x is greater than\n // 255, then throw a TypeError.\n for (let index = 0; index < x.length; index++) {\n if (x.charCodeAt(index) > 255) {\n throw new TypeError(\n 'Cannot convert argument to a ByteString because the character at ' +\n `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.`\n )\n }\n }\n\n // 3. Return an IDL ByteString value whose length is the\n // length of x, and where the value of each element is\n // the value of the corresponding element of x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#es-USVString\nwebidl.converters.USVString = toUSVString\n\n// https://webidl.spec.whatwg.org/#es-boolean\nwebidl.converters.boolean = function (V) {\n // 1. Let x be the result of computing ToBoolean(V).\n const x = Boolean(V)\n\n // 2. Return the IDL boolean value that is the one that represents\n // the same truth value as the ECMAScript Boolean value x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#es-any\nwebidl.converters.any = function (V) {\n return V\n}\n\n// https://webidl.spec.whatwg.org/#es-long-long\nwebidl.converters['long long'] = function (V) {\n // 1. Let x be ? ConvertToInt(V, 64, \"signed\").\n const x = webidl.util.ConvertToInt(V, 64, 'signed')\n\n // 2. Return the IDL long long value that represents\n // the same numeric value as x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#es-unsigned-long-long\nwebidl.converters['unsigned long long'] = function (V) {\n // 1. Let x be ? ConvertToInt(V, 64, \"unsigned\").\n const x = webidl.util.ConvertToInt(V, 64, 'unsigned')\n\n // 2. Return the IDL unsigned long long value that\n // represents the same numeric value as x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#es-unsigned-long\nwebidl.converters['unsigned long'] = function (V) {\n // 1. Let x be ? ConvertToInt(V, 32, \"unsigned\").\n const x = webidl.util.ConvertToInt(V, 32, 'unsigned')\n\n // 2. Return the IDL unsigned long value that\n // represents the same numeric value as x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#es-unsigned-short\nwebidl.converters['unsigned short'] = function (V, opts) {\n // 1. Let x be ? ConvertToInt(V, 16, \"unsigned\").\n const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts)\n\n // 2. Return the IDL unsigned short value that represents\n // the same numeric value as x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#idl-ArrayBuffer\nwebidl.converters.ArrayBuffer = function (V, opts = {}) {\n // 1. If Type(V) is not Object, or V does not have an\n // [[ArrayBufferData]] internal slot, then throw a\n // TypeError.\n // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances\n // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances\n if (\n webidl.util.Type(V) !== 'Object' ||\n !types.isAnyArrayBuffer(V)\n ) {\n throw webidl.errors.conversionFailed({\n prefix: `${V}`,\n argument: `${V}`,\n types: ['ArrayBuffer']\n })\n }\n\n // 2. If the conversion is not to an IDL type associated\n // with the [AllowShared] extended attribute, and\n // IsSharedArrayBuffer(V) is true, then throw a\n // TypeError.\n if (opts.allowShared === false && types.isSharedArrayBuffer(V)) {\n throw webidl.errors.exception({\n header: 'ArrayBuffer',\n message: 'SharedArrayBuffer is not allowed.'\n })\n }\n\n // 3. If the conversion is not to an IDL type associated\n // with the [AllowResizable] extended attribute, and\n // IsResizableArrayBuffer(V) is true, then throw a\n // TypeError.\n // Note: resizable ArrayBuffers are currently a proposal.\n\n // 4. Return the IDL ArrayBuffer value that is a\n // reference to the same object as V.\n return V\n}\n\nwebidl.converters.TypedArray = function (V, T, opts = {}) {\n // 1. Let T be the IDL type V is being converted to.\n\n // 2. If Type(V) is not Object, or V does not have a\n // [[TypedArrayName]] internal slot with a value\n // equal to T’s name, then throw a TypeError.\n if (\n webidl.util.Type(V) !== 'Object' ||\n !types.isTypedArray(V) ||\n V.constructor.name !== T.name\n ) {\n throw webidl.errors.conversionFailed({\n prefix: `${T.name}`,\n argument: `${V}`,\n types: [T.name]\n })\n }\n\n // 3. If the conversion is not to an IDL type associated\n // with the [AllowShared] extended attribute, and\n // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is\n // true, then throw a TypeError.\n if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {\n throw webidl.errors.exception({\n header: 'ArrayBuffer',\n message: 'SharedArrayBuffer is not allowed.'\n })\n }\n\n // 4. If the conversion is not to an IDL type associated\n // with the [AllowResizable] extended attribute, and\n // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is\n // true, then throw a TypeError.\n // Note: resizable array buffers are currently a proposal\n\n // 5. Return the IDL value of type T that is a reference\n // to the same object as V.\n return V\n}\n\nwebidl.converters.DataView = function (V, opts = {}) {\n // 1. If Type(V) is not Object, or V does not have a\n // [[DataView]] internal slot, then throw a TypeError.\n if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) {\n throw webidl.errors.exception({\n header: 'DataView',\n message: 'Object is not a DataView.'\n })\n }\n\n // 2. If the conversion is not to an IDL type associated\n // with the [AllowShared] extended attribute, and\n // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true,\n // then throw a TypeError.\n if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {\n throw webidl.errors.exception({\n header: 'ArrayBuffer',\n message: 'SharedArrayBuffer is not allowed.'\n })\n }\n\n // 3. If the conversion is not to an IDL type associated\n // with the [AllowResizable] extended attribute, and\n // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is\n // true, then throw a TypeError.\n // Note: resizable ArrayBuffers are currently a proposal\n\n // 4. Return the IDL DataView value that is a reference\n // to the same object as V.\n return V\n}\n\n// https://webidl.spec.whatwg.org/#BufferSource\nwebidl.converters.BufferSource = function (V, opts = {}) {\n if (types.isAnyArrayBuffer(V)) {\n return webidl.converters.ArrayBuffer(V, opts)\n }\n\n if (types.isTypedArray(V)) {\n return webidl.converters.TypedArray(V, V.constructor)\n }\n\n if (types.isDataView(V)) {\n return webidl.converters.DataView(V, opts)\n }\n\n throw new TypeError(`Could not convert ${V} to a BufferSource.`)\n}\n\nwebidl.converters['sequence'] = webidl.sequenceConverter(\n webidl.converters.ByteString\n)\n\nwebidl.converters['sequence>'] = webidl.sequenceConverter(\n webidl.converters['sequence']\n)\n\nwebidl.converters['record'] = webidl.recordConverter(\n webidl.converters.ByteString,\n webidl.converters.ByteString\n)\n\nmodule.exports = {\n webidl\n}\n","'use strict'\n\n/**\n * @see https://encoding.spec.whatwg.org/#concept-encoding-get\n * @param {string|undefined} label\n */\nfunction getEncoding (label) {\n if (!label) {\n return 'failure'\n }\n\n // 1. Remove any leading and trailing ASCII whitespace from label.\n // 2. If label is an ASCII case-insensitive match for any of the\n // labels listed in the table below, then return the\n // corresponding encoding; otherwise return failure.\n switch (label.trim().toLowerCase()) {\n case 'unicode-1-1-utf-8':\n case 'unicode11utf8':\n case 'unicode20utf8':\n case 'utf-8':\n case 'utf8':\n case 'x-unicode20utf8':\n return 'UTF-8'\n case '866':\n case 'cp866':\n case 'csibm866':\n case 'ibm866':\n return 'IBM866'\n case 'csisolatin2':\n case 'iso-8859-2':\n case 'iso-ir-101':\n case 'iso8859-2':\n case 'iso88592':\n case 'iso_8859-2':\n case 'iso_8859-2:1987':\n case 'l2':\n case 'latin2':\n return 'ISO-8859-2'\n case 'csisolatin3':\n case 'iso-8859-3':\n case 'iso-ir-109':\n case 'iso8859-3':\n case 'iso88593':\n case 'iso_8859-3':\n case 'iso_8859-3:1988':\n case 'l3':\n case 'latin3':\n return 'ISO-8859-3'\n case 'csisolatin4':\n case 'iso-8859-4':\n case 'iso-ir-110':\n case 'iso8859-4':\n case 'iso88594':\n case 'iso_8859-4':\n case 'iso_8859-4:1988':\n case 'l4':\n case 'latin4':\n return 'ISO-8859-4'\n case 'csisolatincyrillic':\n case 'cyrillic':\n case 'iso-8859-5':\n case 'iso-ir-144':\n case 'iso8859-5':\n case 'iso88595':\n case 'iso_8859-5':\n case 'iso_8859-5:1988':\n return 'ISO-8859-5'\n case 'arabic':\n case 'asmo-708':\n case 'csiso88596e':\n case 'csiso88596i':\n case 'csisolatinarabic':\n case 'ecma-114':\n case 'iso-8859-6':\n case 'iso-8859-6-e':\n case 'iso-8859-6-i':\n case 'iso-ir-127':\n case 'iso8859-6':\n case 'iso88596':\n case 'iso_8859-6':\n case 'iso_8859-6:1987':\n return 'ISO-8859-6'\n case 'csisolatingreek':\n case 'ecma-118':\n case 'elot_928':\n case 'greek':\n case 'greek8':\n case 'iso-8859-7':\n case 'iso-ir-126':\n case 'iso8859-7':\n case 'iso88597':\n case 'iso_8859-7':\n case 'iso_8859-7:1987':\n case 'sun_eu_greek':\n return 'ISO-8859-7'\n case 'csiso88598e':\n case 'csisolatinhebrew':\n case 'hebrew':\n case 'iso-8859-8':\n case 'iso-8859-8-e':\n case 'iso-ir-138':\n case 'iso8859-8':\n case 'iso88598':\n case 'iso_8859-8':\n case 'iso_8859-8:1988':\n case 'visual':\n return 'ISO-8859-8'\n case 'csiso88598i':\n case 'iso-8859-8-i':\n case 'logical':\n return 'ISO-8859-8-I'\n case 'csisolatin6':\n case 'iso-8859-10':\n case 'iso-ir-157':\n case 'iso8859-10':\n case 'iso885910':\n case 'l6':\n case 'latin6':\n return 'ISO-8859-10'\n case 'iso-8859-13':\n case 'iso8859-13':\n case 'iso885913':\n return 'ISO-8859-13'\n case 'iso-8859-14':\n case 'iso8859-14':\n case 'iso885914':\n return 'ISO-8859-14'\n case 'csisolatin9':\n case 'iso-8859-15':\n case 'iso8859-15':\n case 'iso885915':\n case 'iso_8859-15':\n case 'l9':\n return 'ISO-8859-15'\n case 'iso-8859-16':\n return 'ISO-8859-16'\n case 'cskoi8r':\n case 'koi':\n case 'koi8':\n case 'koi8-r':\n case 'koi8_r':\n return 'KOI8-R'\n case 'koi8-ru':\n case 'koi8-u':\n return 'KOI8-U'\n case 'csmacintosh':\n case 'mac':\n case 'macintosh':\n case 'x-mac-roman':\n return 'macintosh'\n case 'iso-8859-11':\n case 'iso8859-11':\n case 'iso885911':\n case 'tis-620':\n case 'windows-874':\n return 'windows-874'\n case 'cp1250':\n case 'windows-1250':\n case 'x-cp1250':\n return 'windows-1250'\n case 'cp1251':\n case 'windows-1251':\n case 'x-cp1251':\n return 'windows-1251'\n case 'ansi_x3.4-1968':\n case 'ascii':\n case 'cp1252':\n case 'cp819':\n case 'csisolatin1':\n case 'ibm819':\n case 'iso-8859-1':\n case 'iso-ir-100':\n case 'iso8859-1':\n case 'iso88591':\n case 'iso_8859-1':\n case 'iso_8859-1:1987':\n case 'l1':\n case 'latin1':\n case 'us-ascii':\n case 'windows-1252':\n case 'x-cp1252':\n return 'windows-1252'\n case 'cp1253':\n case 'windows-1253':\n case 'x-cp1253':\n return 'windows-1253'\n case 'cp1254':\n case 'csisolatin5':\n case 'iso-8859-9':\n case 'iso-ir-148':\n case 'iso8859-9':\n case 'iso88599':\n case 'iso_8859-9':\n case 'iso_8859-9:1989':\n case 'l5':\n case 'latin5':\n case 'windows-1254':\n case 'x-cp1254':\n return 'windows-1254'\n case 'cp1255':\n case 'windows-1255':\n case 'x-cp1255':\n return 'windows-1255'\n case 'cp1256':\n case 'windows-1256':\n case 'x-cp1256':\n return 'windows-1256'\n case 'cp1257':\n case 'windows-1257':\n case 'x-cp1257':\n return 'windows-1257'\n case 'cp1258':\n case 'windows-1258':\n case 'x-cp1258':\n return 'windows-1258'\n case 'x-mac-cyrillic':\n case 'x-mac-ukrainian':\n return 'x-mac-cyrillic'\n case 'chinese':\n case 'csgb2312':\n case 'csiso58gb231280':\n case 'gb2312':\n case 'gb_2312':\n case 'gb_2312-80':\n case 'gbk':\n case 'iso-ir-58':\n case 'x-gbk':\n return 'GBK'\n case 'gb18030':\n return 'gb18030'\n case 'big5':\n case 'big5-hkscs':\n case 'cn-big5':\n case 'csbig5':\n case 'x-x-big5':\n return 'Big5'\n case 'cseucpkdfmtjapanese':\n case 'euc-jp':\n case 'x-euc-jp':\n return 'EUC-JP'\n case 'csiso2022jp':\n case 'iso-2022-jp':\n return 'ISO-2022-JP'\n case 'csshiftjis':\n case 'ms932':\n case 'ms_kanji':\n case 'shift-jis':\n case 'shift_jis':\n case 'sjis':\n case 'windows-31j':\n case 'x-sjis':\n return 'Shift_JIS'\n case 'cseuckr':\n case 'csksc56011987':\n case 'euc-kr':\n case 'iso-ir-149':\n case 'korean':\n case 'ks_c_5601-1987':\n case 'ks_c_5601-1989':\n case 'ksc5601':\n case 'ksc_5601':\n case 'windows-949':\n return 'EUC-KR'\n case 'csiso2022kr':\n case 'hz-gb-2312':\n case 'iso-2022-cn':\n case 'iso-2022-cn-ext':\n case 'iso-2022-kr':\n case 'replacement':\n return 'replacement'\n case 'unicodefffe':\n case 'utf-16be':\n return 'UTF-16BE'\n case 'csunicode':\n case 'iso-10646-ucs-2':\n case 'ucs-2':\n case 'unicode':\n case 'unicodefeff':\n case 'utf-16':\n case 'utf-16le':\n return 'UTF-16LE'\n case 'x-user-defined':\n return 'x-user-defined'\n default: return 'failure'\n }\n}\n\nmodule.exports = {\n getEncoding\n}\n","'use strict'\n\nconst {\n staticPropertyDescriptors,\n readOperation,\n fireAProgressEvent\n} = require('./util')\nconst {\n kState,\n kError,\n kResult,\n kEvents,\n kAborted\n} = require('./symbols')\nconst { webidl } = require('../fetch/webidl')\nconst { kEnumerableProperty } = require('../core/util')\n\nclass FileReader extends EventTarget {\n constructor () {\n super()\n\n this[kState] = 'empty'\n this[kResult] = null\n this[kError] = null\n this[kEvents] = {\n loadend: null,\n error: null,\n abort: null,\n load: null,\n progress: null,\n loadstart: null\n }\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer\n * @param {import('buffer').Blob} blob\n */\n readAsArrayBuffer (blob) {\n webidl.brandCheck(this, FileReader)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' })\n\n blob = webidl.converters.Blob(blob, { strict: false })\n\n // The readAsArrayBuffer(blob) method, when invoked,\n // must initiate a read operation for blob with ArrayBuffer.\n readOperation(this, blob, 'ArrayBuffer')\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#readAsBinaryString\n * @param {import('buffer').Blob} blob\n */\n readAsBinaryString (blob) {\n webidl.brandCheck(this, FileReader)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' })\n\n blob = webidl.converters.Blob(blob, { strict: false })\n\n // The readAsBinaryString(blob) method, when invoked,\n // must initiate a read operation for blob with BinaryString.\n readOperation(this, blob, 'BinaryString')\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#readAsDataText\n * @param {import('buffer').Blob} blob\n * @param {string?} encoding\n */\n readAsText (blob, encoding = undefined) {\n webidl.brandCheck(this, FileReader)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' })\n\n blob = webidl.converters.Blob(blob, { strict: false })\n\n if (encoding !== undefined) {\n encoding = webidl.converters.DOMString(encoding)\n }\n\n // The readAsText(blob, encoding) method, when invoked,\n // must initiate a read operation for blob with Text and encoding.\n readOperation(this, blob, 'Text', encoding)\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL\n * @param {import('buffer').Blob} blob\n */\n readAsDataURL (blob) {\n webidl.brandCheck(this, FileReader)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' })\n\n blob = webidl.converters.Blob(blob, { strict: false })\n\n // The readAsDataURL(blob) method, when invoked, must\n // initiate a read operation for blob with DataURL.\n readOperation(this, blob, 'DataURL')\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dfn-abort\n */\n abort () {\n // 1. If this's state is \"empty\" or if this's state is\n // \"done\" set this's result to null and terminate\n // this algorithm.\n if (this[kState] === 'empty' || this[kState] === 'done') {\n this[kResult] = null\n return\n }\n\n // 2. If this's state is \"loading\" set this's state to\n // \"done\" and set this's result to null.\n if (this[kState] === 'loading') {\n this[kState] = 'done'\n this[kResult] = null\n }\n\n // 3. If there are any tasks from this on the file reading\n // task source in an affiliated task queue, then remove\n // those tasks from that task queue.\n this[kAborted] = true\n\n // 4. Terminate the algorithm for the read method being processed.\n // TODO\n\n // 5. Fire a progress event called abort at this.\n fireAProgressEvent('abort', this)\n\n // 6. If this's state is not \"loading\", fire a progress\n // event called loadend at this.\n if (this[kState] !== 'loading') {\n fireAProgressEvent('loadend', this)\n }\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate\n */\n get readyState () {\n webidl.brandCheck(this, FileReader)\n\n switch (this[kState]) {\n case 'empty': return this.EMPTY\n case 'loading': return this.LOADING\n case 'done': return this.DONE\n }\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dom-filereader-result\n */\n get result () {\n webidl.brandCheck(this, FileReader)\n\n // The result attribute’s getter, when invoked, must return\n // this's result.\n return this[kResult]\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dom-filereader-error\n */\n get error () {\n webidl.brandCheck(this, FileReader)\n\n // The error attribute’s getter, when invoked, must return\n // this's error.\n return this[kError]\n }\n\n get onloadend () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].loadend\n }\n\n set onloadend (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].loadend) {\n this.removeEventListener('loadend', this[kEvents].loadend)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].loadend = fn\n this.addEventListener('loadend', fn)\n } else {\n this[kEvents].loadend = null\n }\n }\n\n get onerror () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].error\n }\n\n set onerror (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].error) {\n this.removeEventListener('error', this[kEvents].error)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].error = fn\n this.addEventListener('error', fn)\n } else {\n this[kEvents].error = null\n }\n }\n\n get onloadstart () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].loadstart\n }\n\n set onloadstart (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].loadstart) {\n this.removeEventListener('loadstart', this[kEvents].loadstart)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].loadstart = fn\n this.addEventListener('loadstart', fn)\n } else {\n this[kEvents].loadstart = null\n }\n }\n\n get onprogress () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].progress\n }\n\n set onprogress (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].progress) {\n this.removeEventListener('progress', this[kEvents].progress)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].progress = fn\n this.addEventListener('progress', fn)\n } else {\n this[kEvents].progress = null\n }\n }\n\n get onload () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].load\n }\n\n set onload (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].load) {\n this.removeEventListener('load', this[kEvents].load)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].load = fn\n this.addEventListener('load', fn)\n } else {\n this[kEvents].load = null\n }\n }\n\n get onabort () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].abort\n }\n\n set onabort (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].abort) {\n this.removeEventListener('abort', this[kEvents].abort)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].abort = fn\n this.addEventListener('abort', fn)\n } else {\n this[kEvents].abort = null\n }\n }\n}\n\n// https://w3c.github.io/FileAPI/#dom-filereader-empty\nFileReader.EMPTY = FileReader.prototype.EMPTY = 0\n// https://w3c.github.io/FileAPI/#dom-filereader-loading\nFileReader.LOADING = FileReader.prototype.LOADING = 1\n// https://w3c.github.io/FileAPI/#dom-filereader-done\nFileReader.DONE = FileReader.prototype.DONE = 2\n\nObject.defineProperties(FileReader.prototype, {\n EMPTY: staticPropertyDescriptors,\n LOADING: staticPropertyDescriptors,\n DONE: staticPropertyDescriptors,\n readAsArrayBuffer: kEnumerableProperty,\n readAsBinaryString: kEnumerableProperty,\n readAsText: kEnumerableProperty,\n readAsDataURL: kEnumerableProperty,\n abort: kEnumerableProperty,\n readyState: kEnumerableProperty,\n result: kEnumerableProperty,\n error: kEnumerableProperty,\n onloadstart: kEnumerableProperty,\n onprogress: kEnumerableProperty,\n onload: kEnumerableProperty,\n onabort: kEnumerableProperty,\n onerror: kEnumerableProperty,\n onloadend: kEnumerableProperty,\n [Symbol.toStringTag]: {\n value: 'FileReader',\n writable: false,\n enumerable: false,\n configurable: true\n }\n})\n\nObject.defineProperties(FileReader, {\n EMPTY: staticPropertyDescriptors,\n LOADING: staticPropertyDescriptors,\n DONE: staticPropertyDescriptors\n})\n\nmodule.exports = {\n FileReader\n}\n","'use strict'\n\nconst { webidl } = require('../fetch/webidl')\n\nconst kState = Symbol('ProgressEvent state')\n\n/**\n * @see https://xhr.spec.whatwg.org/#progressevent\n */\nclass ProgressEvent extends Event {\n constructor (type, eventInitDict = {}) {\n type = webidl.converters.DOMString(type)\n eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {})\n\n super(type, eventInitDict)\n\n this[kState] = {\n lengthComputable: eventInitDict.lengthComputable,\n loaded: eventInitDict.loaded,\n total: eventInitDict.total\n }\n }\n\n get lengthComputable () {\n webidl.brandCheck(this, ProgressEvent)\n\n return this[kState].lengthComputable\n }\n\n get loaded () {\n webidl.brandCheck(this, ProgressEvent)\n\n return this[kState].loaded\n }\n\n get total () {\n webidl.brandCheck(this, ProgressEvent)\n\n return this[kState].total\n }\n}\n\nwebidl.converters.ProgressEventInit = webidl.dictionaryConverter([\n {\n key: 'lengthComputable',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'loaded',\n converter: webidl.converters['unsigned long long'],\n defaultValue: 0\n },\n {\n key: 'total',\n converter: webidl.converters['unsigned long long'],\n defaultValue: 0\n },\n {\n key: 'bubbles',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'cancelable',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'composed',\n converter: webidl.converters.boolean,\n defaultValue: false\n }\n])\n\nmodule.exports = {\n ProgressEvent\n}\n","'use strict'\n\nmodule.exports = {\n kState: Symbol('FileReader state'),\n kResult: Symbol('FileReader result'),\n kError: Symbol('FileReader error'),\n kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'),\n kEvents: Symbol('FileReader events'),\n kAborted: Symbol('FileReader aborted')\n}\n","'use strict'\n\nconst {\n kState,\n kError,\n kResult,\n kAborted,\n kLastProgressEventFired\n} = require('./symbols')\nconst { ProgressEvent } = require('./progressevent')\nconst { getEncoding } = require('./encoding')\nconst { DOMException } = require('../fetch/constants')\nconst { serializeAMimeType, parseMIMEType } = require('../fetch/dataURL')\nconst { types } = require('util')\nconst { StringDecoder } = require('string_decoder')\nconst { btoa } = require('buffer')\n\n/** @type {PropertyDescriptor} */\nconst staticPropertyDescriptors = {\n enumerable: true,\n writable: false,\n configurable: false\n}\n\n/**\n * @see https://w3c.github.io/FileAPI/#readOperation\n * @param {import('./filereader').FileReader} fr\n * @param {import('buffer').Blob} blob\n * @param {string} type\n * @param {string?} encodingName\n */\nfunction readOperation (fr, blob, type, encodingName) {\n // 1. If fr’s state is \"loading\", throw an InvalidStateError\n // DOMException.\n if (fr[kState] === 'loading') {\n throw new DOMException('Invalid state', 'InvalidStateError')\n }\n\n // 2. Set fr’s state to \"loading\".\n fr[kState] = 'loading'\n\n // 3. Set fr’s result to null.\n fr[kResult] = null\n\n // 4. Set fr’s error to null.\n fr[kError] = null\n\n // 5. Let stream be the result of calling get stream on blob.\n /** @type {import('stream/web').ReadableStream} */\n const stream = blob.stream()\n\n // 6. Let reader be the result of getting a reader from stream.\n const reader = stream.getReader()\n\n // 7. Let bytes be an empty byte sequence.\n /** @type {Uint8Array[]} */\n const bytes = []\n\n // 8. Let chunkPromise be the result of reading a chunk from\n // stream with reader.\n let chunkPromise = reader.read()\n\n // 9. Let isFirstChunk be true.\n let isFirstChunk = true\n\n // 10. In parallel, while true:\n // Note: \"In parallel\" just means non-blocking\n // Note 2: readOperation itself cannot be async as double\n // reading the body would then reject the promise, instead\n // of throwing an error.\n ;(async () => {\n while (!fr[kAborted]) {\n // 1. Wait for chunkPromise to be fulfilled or rejected.\n try {\n const { done, value } = await chunkPromise\n\n // 2. If chunkPromise is fulfilled, and isFirstChunk is\n // true, queue a task to fire a progress event called\n // loadstart at fr.\n if (isFirstChunk && !fr[kAborted]) {\n queueMicrotask(() => {\n fireAProgressEvent('loadstart', fr)\n })\n }\n\n // 3. Set isFirstChunk to false.\n isFirstChunk = false\n\n // 4. If chunkPromise is fulfilled with an object whose\n // done property is false and whose value property is\n // a Uint8Array object, run these steps:\n if (!done && types.isUint8Array(value)) {\n // 1. Let bs be the byte sequence represented by the\n // Uint8Array object.\n\n // 2. Append bs to bytes.\n bytes.push(value)\n\n // 3. If roughly 50ms have passed since these steps\n // were last invoked, queue a task to fire a\n // progress event called progress at fr.\n if (\n (\n fr[kLastProgressEventFired] === undefined ||\n Date.now() - fr[kLastProgressEventFired] >= 50\n ) &&\n !fr[kAborted]\n ) {\n fr[kLastProgressEventFired] = Date.now()\n queueMicrotask(() => {\n fireAProgressEvent('progress', fr)\n })\n }\n\n // 4. Set chunkPromise to the result of reading a\n // chunk from stream with reader.\n chunkPromise = reader.read()\n } else if (done) {\n // 5. Otherwise, if chunkPromise is fulfilled with an\n // object whose done property is true, queue a task\n // to run the following steps and abort this algorithm:\n queueMicrotask(() => {\n // 1. Set fr’s state to \"done\".\n fr[kState] = 'done'\n\n // 2. Let result be the result of package data given\n // bytes, type, blob’s type, and encodingName.\n try {\n const result = packageData(bytes, type, blob.type, encodingName)\n\n // 4. Else:\n\n if (fr[kAborted]) {\n return\n }\n\n // 1. Set fr’s result to result.\n fr[kResult] = result\n\n // 2. Fire a progress event called load at the fr.\n fireAProgressEvent('load', fr)\n } catch (error) {\n // 3. If package data threw an exception error:\n\n // 1. Set fr’s error to error.\n fr[kError] = error\n\n // 2. Fire a progress event called error at fr.\n fireAProgressEvent('error', fr)\n }\n\n // 5. If fr’s state is not \"loading\", fire a progress\n // event called loadend at the fr.\n if (fr[kState] !== 'loading') {\n fireAProgressEvent('loadend', fr)\n }\n })\n\n break\n }\n } catch (error) {\n if (fr[kAborted]) {\n return\n }\n\n // 6. Otherwise, if chunkPromise is rejected with an\n // error error, queue a task to run the following\n // steps and abort this algorithm:\n queueMicrotask(() => {\n // 1. Set fr’s state to \"done\".\n fr[kState] = 'done'\n\n // 2. Set fr’s error to error.\n fr[kError] = error\n\n // 3. Fire a progress event called error at fr.\n fireAProgressEvent('error', fr)\n\n // 4. If fr’s state is not \"loading\", fire a progress\n // event called loadend at fr.\n if (fr[kState] !== 'loading') {\n fireAProgressEvent('loadend', fr)\n }\n })\n\n break\n }\n }\n })()\n}\n\n/**\n * @see https://w3c.github.io/FileAPI/#fire-a-progress-event\n * @see https://dom.spec.whatwg.org/#concept-event-fire\n * @param {string} e The name of the event\n * @param {import('./filereader').FileReader} reader\n */\nfunction fireAProgressEvent (e, reader) {\n // The progress event e does not bubble. e.bubbles must be false\n // The progress event e is NOT cancelable. e.cancelable must be false\n const event = new ProgressEvent(e, {\n bubbles: false,\n cancelable: false\n })\n\n reader.dispatchEvent(event)\n}\n\n/**\n * @see https://w3c.github.io/FileAPI/#blob-package-data\n * @param {Uint8Array[]} bytes\n * @param {string} type\n * @param {string?} mimeType\n * @param {string?} encodingName\n */\nfunction packageData (bytes, type, mimeType, encodingName) {\n // 1. A Blob has an associated package data algorithm, given\n // bytes, a type, a optional mimeType, and a optional\n // encodingName, which switches on type and runs the\n // associated steps:\n\n switch (type) {\n case 'DataURL': {\n // 1. Return bytes as a DataURL [RFC2397] subject to\n // the considerations below:\n // * Use mimeType as part of the Data URL if it is\n // available in keeping with the Data URL\n // specification [RFC2397].\n // * If mimeType is not available return a Data URL\n // without a media-type. [RFC2397].\n\n // https://datatracker.ietf.org/doc/html/rfc2397#section-3\n // dataurl := \"data:\" [ mediatype ] [ \";base64\" ] \",\" data\n // mediatype := [ type \"/\" subtype ] *( \";\" parameter )\n // data := *urlchar\n // parameter := attribute \"=\" value\n let dataURL = 'data:'\n\n const parsed = parseMIMEType(mimeType || 'application/octet-stream')\n\n if (parsed !== 'failure') {\n dataURL += serializeAMimeType(parsed)\n }\n\n dataURL += ';base64,'\n\n const decoder = new StringDecoder('latin1')\n\n for (const chunk of bytes) {\n dataURL += btoa(decoder.write(chunk))\n }\n\n dataURL += btoa(decoder.end())\n\n return dataURL\n }\n case 'Text': {\n // 1. Let encoding be failure\n let encoding = 'failure'\n\n // 2. If the encodingName is present, set encoding to the\n // result of getting an encoding from encodingName.\n if (encodingName) {\n encoding = getEncoding(encodingName)\n }\n\n // 3. If encoding is failure, and mimeType is present:\n if (encoding === 'failure' && mimeType) {\n // 1. Let type be the result of parse a MIME type\n // given mimeType.\n const type = parseMIMEType(mimeType)\n\n // 2. If type is not failure, set encoding to the result\n // of getting an encoding from type’s parameters[\"charset\"].\n if (type !== 'failure') {\n encoding = getEncoding(type.parameters.get('charset'))\n }\n }\n\n // 4. If encoding is failure, then set encoding to UTF-8.\n if (encoding === 'failure') {\n encoding = 'UTF-8'\n }\n\n // 5. Decode bytes using fallback encoding encoding, and\n // return the result.\n return decode(bytes, encoding)\n }\n case 'ArrayBuffer': {\n // Return a new ArrayBuffer whose contents are bytes.\n const sequence = combineByteSequences(bytes)\n\n return sequence.buffer\n }\n case 'BinaryString': {\n // Return bytes as a binary string, in which every byte\n // is represented by a code unit of equal value [0..255].\n let binaryString = ''\n\n const decoder = new StringDecoder('latin1')\n\n for (const chunk of bytes) {\n binaryString += decoder.write(chunk)\n }\n\n binaryString += decoder.end()\n\n return binaryString\n }\n }\n}\n\n/**\n * @see https://encoding.spec.whatwg.org/#decode\n * @param {Uint8Array[]} ioQueue\n * @param {string} encoding\n */\nfunction decode (ioQueue, encoding) {\n const bytes = combineByteSequences(ioQueue)\n\n // 1. Let BOMEncoding be the result of BOM sniffing ioQueue.\n const BOMEncoding = BOMSniffing(bytes)\n\n let slice = 0\n\n // 2. If BOMEncoding is non-null:\n if (BOMEncoding !== null) {\n // 1. Set encoding to BOMEncoding.\n encoding = BOMEncoding\n\n // 2. Read three bytes from ioQueue, if BOMEncoding is\n // UTF-8; otherwise read two bytes.\n // (Do nothing with those bytes.)\n slice = BOMEncoding === 'UTF-8' ? 3 : 2\n }\n\n // 3. Process a queue with an instance of encoding’s\n // decoder, ioQueue, output, and \"replacement\".\n\n // 4. Return output.\n\n const sliced = bytes.slice(slice)\n return new TextDecoder(encoding).decode(sliced)\n}\n\n/**\n * @see https://encoding.spec.whatwg.org/#bom-sniff\n * @param {Uint8Array} ioQueue\n */\nfunction BOMSniffing (ioQueue) {\n // 1. Let BOM be the result of peeking 3 bytes from ioQueue,\n // converted to a byte sequence.\n const [a, b, c] = ioQueue\n\n // 2. For each of the rows in the table below, starting with\n // the first one and going down, if BOM starts with the\n // bytes given in the first column, then return the\n // encoding given in the cell in the second column of that\n // row. Otherwise, return null.\n if (a === 0xEF && b === 0xBB && c === 0xBF) {\n return 'UTF-8'\n } else if (a === 0xFE && b === 0xFF) {\n return 'UTF-16BE'\n } else if (a === 0xFF && b === 0xFE) {\n return 'UTF-16LE'\n }\n\n return null\n}\n\n/**\n * @param {Uint8Array[]} sequences\n */\nfunction combineByteSequences (sequences) {\n const size = sequences.reduce((a, b) => {\n return a + b.byteLength\n }, 0)\n\n let offset = 0\n\n return sequences.reduce((a, b) => {\n a.set(b, offset)\n offset += b.byteLength\n return a\n }, new Uint8Array(size))\n}\n\nmodule.exports = {\n staticPropertyDescriptors,\n readOperation,\n fireAProgressEvent\n}\n","'use strict'\n\n// We include a version number for the Dispatcher API. In case of breaking changes,\n// this version number must be increased to avoid conflicts.\nconst globalDispatcher = Symbol.for('undici.globalDispatcher.1')\nconst { InvalidArgumentError } = require('./core/errors')\nconst Agent = require('./agent')\n\nif (getGlobalDispatcher() === undefined) {\n setGlobalDispatcher(new Agent())\n}\n\nfunction setGlobalDispatcher (agent) {\n if (!agent || typeof agent.dispatch !== 'function') {\n throw new InvalidArgumentError('Argument agent must implement Agent')\n }\n Object.defineProperty(globalThis, globalDispatcher, {\n value: agent,\n writable: true,\n enumerable: false,\n configurable: false\n })\n}\n\nfunction getGlobalDispatcher () {\n return globalThis[globalDispatcher]\n}\n\nmodule.exports = {\n setGlobalDispatcher,\n getGlobalDispatcher\n}\n","'use strict'\n\nmodule.exports = class DecoratorHandler {\n constructor (handler) {\n this.handler = handler\n }\n\n onConnect (...args) {\n return this.handler.onConnect(...args)\n }\n\n onError (...args) {\n return this.handler.onError(...args)\n }\n\n onUpgrade (...args) {\n return this.handler.onUpgrade(...args)\n }\n\n onHeaders (...args) {\n return this.handler.onHeaders(...args)\n }\n\n onData (...args) {\n return this.handler.onData(...args)\n }\n\n onComplete (...args) {\n return this.handler.onComplete(...args)\n }\n\n onBodySent (...args) {\n return this.handler.onBodySent(...args)\n }\n}\n","'use strict'\n\nconst util = require('../core/util')\nconst { kBodyUsed } = require('../core/symbols')\nconst assert = require('assert')\nconst { InvalidArgumentError } = require('../core/errors')\nconst EE = require('events')\n\nconst redirectableStatusCodes = [300, 301, 302, 303, 307, 308]\n\nconst kBody = Symbol('body')\n\nclass BodyAsyncIterable {\n constructor (body) {\n this[kBody] = body\n this[kBodyUsed] = false\n }\n\n async * [Symbol.asyncIterator] () {\n assert(!this[kBodyUsed], 'disturbed')\n this[kBodyUsed] = true\n yield * this[kBody]\n }\n}\n\nclass RedirectHandler {\n constructor (dispatch, maxRedirections, opts, handler) {\n if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {\n throw new InvalidArgumentError('maxRedirections must be a positive number')\n }\n\n util.validateHandler(handler, opts.method, opts.upgrade)\n\n this.dispatch = dispatch\n this.location = null\n this.abort = null\n this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy\n this.maxRedirections = maxRedirections\n this.handler = handler\n this.history = []\n\n if (util.isStream(this.opts.body)) {\n // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp\n // so that it can be dispatched again?\n // TODO (fix): Do we need 100-expect support to provide a way to do this properly?\n if (util.bodyLength(this.opts.body) === 0) {\n this.opts.body\n .on('data', function () {\n assert(false)\n })\n }\n\n if (typeof this.opts.body.readableDidRead !== 'boolean') {\n this.opts.body[kBodyUsed] = false\n EE.prototype.on.call(this.opts.body, 'data', function () {\n this[kBodyUsed] = true\n })\n }\n } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') {\n // TODO (fix): We can't access ReadableStream internal state\n // to determine whether or not it has been disturbed. This is just\n // a workaround.\n this.opts.body = new BodyAsyncIterable(this.opts.body)\n } else if (\n this.opts.body &&\n typeof this.opts.body !== 'string' &&\n !ArrayBuffer.isView(this.opts.body) &&\n util.isIterable(this.opts.body)\n ) {\n // TODO: Should we allow re-using iterable if !this.opts.idempotent\n // or through some other flag?\n this.opts.body = new BodyAsyncIterable(this.opts.body)\n }\n }\n\n onConnect (abort) {\n this.abort = abort\n this.handler.onConnect(abort, { history: this.history })\n }\n\n onUpgrade (statusCode, headers, socket) {\n this.handler.onUpgrade(statusCode, headers, socket)\n }\n\n onError (error) {\n this.handler.onError(error)\n }\n\n onHeaders (statusCode, headers, resume, statusText) {\n this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body)\n ? null\n : parseLocation(statusCode, headers)\n\n if (this.opts.origin) {\n this.history.push(new URL(this.opts.path, this.opts.origin))\n }\n\n if (!this.location) {\n return this.handler.onHeaders(statusCode, headers, resume, statusText)\n }\n\n const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)))\n const path = search ? `${pathname}${search}` : pathname\n\n // Remove headers referring to the original URL.\n // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers.\n // https://tools.ietf.org/html/rfc7231#section-6.4\n this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin)\n this.opts.path = path\n this.opts.origin = origin\n this.opts.maxRedirections = 0\n this.opts.query = null\n\n // https://tools.ietf.org/html/rfc7231#section-6.4.4\n // In case of HTTP 303, always replace method to be either HEAD or GET\n if (statusCode === 303 && this.opts.method !== 'HEAD') {\n this.opts.method = 'GET'\n this.opts.body = null\n }\n }\n\n onData (chunk) {\n if (this.location) {\n /*\n https://tools.ietf.org/html/rfc7231#section-6.4\n\n TLDR: undici always ignores 3xx response bodies.\n\n Redirection is used to serve the requested resource from another URL, so it is assumes that\n no body is generated (and thus can be ignored). Even though generating a body is not prohibited.\n\n For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually\n (which means it's optional and not mandated) contain just an hyperlink to the value of\n the Location response header, so the body can be ignored safely.\n\n For status 300, which is \"Multiple Choices\", the spec mentions both generating a Location\n response header AND a response body with the other possible location to follow.\n Since the spec explicitily chooses not to specify a format for such body and leave it to\n servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it.\n */\n } else {\n return this.handler.onData(chunk)\n }\n }\n\n onComplete (trailers) {\n if (this.location) {\n /*\n https://tools.ietf.org/html/rfc7231#section-6.4\n\n TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections\n and neither are useful if present.\n\n See comment on onData method above for more detailed informations.\n */\n\n this.location = null\n this.abort = null\n\n this.dispatch(this.opts, this)\n } else {\n this.handler.onComplete(trailers)\n }\n }\n\n onBodySent (chunk) {\n if (this.handler.onBodySent) {\n this.handler.onBodySent(chunk)\n }\n }\n}\n\nfunction parseLocation (statusCode, headers) {\n if (redirectableStatusCodes.indexOf(statusCode) === -1) {\n return null\n }\n\n for (let i = 0; i < headers.length; i += 2) {\n if (headers[i].toString().toLowerCase() === 'location') {\n return headers[i + 1]\n }\n }\n}\n\n// https://tools.ietf.org/html/rfc7231#section-6.4.4\nfunction shouldRemoveHeader (header, removeContent, unknownOrigin) {\n return (\n (header.length === 4 && header.toString().toLowerCase() === 'host') ||\n (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||\n (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||\n (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')\n )\n}\n\n// https://tools.ietf.org/html/rfc7231#section-6.4\nfunction cleanRequestHeaders (headers, removeContent, unknownOrigin) {\n const ret = []\n if (Array.isArray(headers)) {\n for (let i = 0; i < headers.length; i += 2) {\n if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) {\n ret.push(headers[i], headers[i + 1])\n }\n }\n } else if (headers && typeof headers === 'object') {\n for (const key of Object.keys(headers)) {\n if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) {\n ret.push(key, headers[key])\n }\n }\n } else {\n assert(headers == null, 'headers must be an object or an array')\n }\n return ret\n}\n\nmodule.exports = RedirectHandler\n","const assert = require('assert')\n\nconst { kRetryHandlerDefaultRetry } = require('../core/symbols')\nconst { RequestRetryError } = require('../core/errors')\nconst { isDisturbed, parseHeaders, parseRangeHeader } = require('../core/util')\n\nfunction calculateRetryAfterHeader (retryAfter) {\n const current = Date.now()\n const diff = new Date(retryAfter).getTime() - current\n\n return diff\n}\n\nclass RetryHandler {\n constructor (opts, handlers) {\n const { retryOptions, ...dispatchOpts } = opts\n const {\n // Retry scoped\n retry: retryFn,\n maxRetries,\n maxTimeout,\n minTimeout,\n timeoutFactor,\n // Response scoped\n methods,\n errorCodes,\n retryAfter,\n statusCodes\n } = retryOptions ?? {}\n\n this.dispatch = handlers.dispatch\n this.handler = handlers.handler\n this.opts = dispatchOpts\n this.abort = null\n this.aborted = false\n this.retryOpts = {\n retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],\n retryAfter: retryAfter ?? true,\n maxTimeout: maxTimeout ?? 30 * 1000, // 30s,\n timeout: minTimeout ?? 500, // .5s\n timeoutFactor: timeoutFactor ?? 2,\n maxRetries: maxRetries ?? 5,\n // What errors we should retry\n methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'],\n // Indicates which errors to retry\n statusCodes: statusCodes ?? [500, 502, 503, 504, 429],\n // List of errors to retry\n errorCodes: errorCodes ?? [\n 'ECONNRESET',\n 'ECONNREFUSED',\n 'ENOTFOUND',\n 'ENETDOWN',\n 'ENETUNREACH',\n 'EHOSTDOWN',\n 'EHOSTUNREACH',\n 'EPIPE'\n ]\n }\n\n this.retryCount = 0\n this.start = 0\n this.end = null\n this.etag = null\n this.resume = null\n\n // Handle possible onConnect duplication\n this.handler.onConnect(reason => {\n this.aborted = true\n if (this.abort) {\n this.abort(reason)\n } else {\n this.reason = reason\n }\n })\n }\n\n onRequestSent () {\n if (this.handler.onRequestSent) {\n this.handler.onRequestSent()\n }\n }\n\n onUpgrade (statusCode, headers, socket) {\n if (this.handler.onUpgrade) {\n this.handler.onUpgrade(statusCode, headers, socket)\n }\n }\n\n onConnect (abort) {\n if (this.aborted) {\n abort(this.reason)\n } else {\n this.abort = abort\n }\n }\n\n onBodySent (chunk) {\n if (this.handler.onBodySent) return this.handler.onBodySent(chunk)\n }\n\n static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) {\n const { statusCode, code, headers } = err\n const { method, retryOptions } = opts\n const {\n maxRetries,\n timeout,\n maxTimeout,\n timeoutFactor,\n statusCodes,\n errorCodes,\n methods\n } = retryOptions\n let { counter, currentTimeout } = state\n\n currentTimeout =\n currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout\n\n // Any code that is not a Undici's originated and allowed to retry\n if (\n code &&\n code !== 'UND_ERR_REQ_RETRY' &&\n code !== 'UND_ERR_SOCKET' &&\n !errorCodes.includes(code)\n ) {\n cb(err)\n return\n }\n\n // If a set of method are provided and the current method is not in the list\n if (Array.isArray(methods) && !methods.includes(method)) {\n cb(err)\n return\n }\n\n // If a set of status code are provided and the current status code is not in the list\n if (\n statusCode != null &&\n Array.isArray(statusCodes) &&\n !statusCodes.includes(statusCode)\n ) {\n cb(err)\n return\n }\n\n // If we reached the max number of retries\n if (counter > maxRetries) {\n cb(err)\n return\n }\n\n let retryAfterHeader = headers != null && headers['retry-after']\n if (retryAfterHeader) {\n retryAfterHeader = Number(retryAfterHeader)\n retryAfterHeader = isNaN(retryAfterHeader)\n ? calculateRetryAfterHeader(retryAfterHeader)\n : retryAfterHeader * 1e3 // Retry-After is in seconds\n }\n\n const retryTimeout =\n retryAfterHeader > 0\n ? Math.min(retryAfterHeader, maxTimeout)\n : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout)\n\n state.currentTimeout = retryTimeout\n\n setTimeout(() => cb(null), retryTimeout)\n }\n\n onHeaders (statusCode, rawHeaders, resume, statusMessage) {\n const headers = parseHeaders(rawHeaders)\n\n this.retryCount += 1\n\n if (statusCode >= 300) {\n this.abort(\n new RequestRetryError('Request failed', statusCode, {\n headers,\n count: this.retryCount\n })\n )\n return false\n }\n\n // Checkpoint for resume from where we left it\n if (this.resume != null) {\n this.resume = null\n\n if (statusCode !== 206) {\n return true\n }\n\n const contentRange = parseRangeHeader(headers['content-range'])\n // If no content range\n if (!contentRange) {\n this.abort(\n new RequestRetryError('Content-Range mismatch', statusCode, {\n headers,\n count: this.retryCount\n })\n )\n return false\n }\n\n // Let's start with a weak etag check\n if (this.etag != null && this.etag !== headers.etag) {\n this.abort(\n new RequestRetryError('ETag mismatch', statusCode, {\n headers,\n count: this.retryCount\n })\n )\n return false\n }\n\n const { start, size, end = size } = contentRange\n\n assert(this.start === start, 'content-range mismatch')\n assert(this.end == null || this.end === end, 'content-range mismatch')\n\n this.resume = resume\n return true\n }\n\n if (this.end == null) {\n if (statusCode === 206) {\n // First time we receive 206\n const range = parseRangeHeader(headers['content-range'])\n\n if (range == null) {\n return this.handler.onHeaders(\n statusCode,\n rawHeaders,\n resume,\n statusMessage\n )\n }\n\n const { start, size, end = size } = range\n\n assert(\n start != null && Number.isFinite(start) && this.start !== start,\n 'content-range mismatch'\n )\n assert(Number.isFinite(start))\n assert(\n end != null && Number.isFinite(end) && this.end !== end,\n 'invalid content-length'\n )\n\n this.start = start\n this.end = end\n }\n\n // We make our best to checkpoint the body for further range headers\n if (this.end == null) {\n const contentLength = headers['content-length']\n this.end = contentLength != null ? Number(contentLength) : null\n }\n\n assert(Number.isFinite(this.start))\n assert(\n this.end == null || Number.isFinite(this.end),\n 'invalid content-length'\n )\n\n this.resume = resume\n this.etag = headers.etag != null ? headers.etag : null\n\n return this.handler.onHeaders(\n statusCode,\n rawHeaders,\n resume,\n statusMessage\n )\n }\n\n const err = new RequestRetryError('Request failed', statusCode, {\n headers,\n count: this.retryCount\n })\n\n this.abort(err)\n\n return false\n }\n\n onData (chunk) {\n this.start += chunk.length\n\n return this.handler.onData(chunk)\n }\n\n onComplete (rawTrailers) {\n this.retryCount = 0\n return this.handler.onComplete(rawTrailers)\n }\n\n onError (err) {\n if (this.aborted || isDisturbed(this.opts.body)) {\n return this.handler.onError(err)\n }\n\n this.retryOpts.retry(\n err,\n {\n state: { counter: this.retryCount++, currentTimeout: this.retryAfter },\n opts: { retryOptions: this.retryOpts, ...this.opts }\n },\n onRetry.bind(this)\n )\n\n function onRetry (err) {\n if (err != null || this.aborted || isDisturbed(this.opts.body)) {\n return this.handler.onError(err)\n }\n\n if (this.start !== 0) {\n this.opts = {\n ...this.opts,\n headers: {\n ...this.opts.headers,\n range: `bytes=${this.start}-${this.end ?? ''}`\n }\n }\n }\n\n try {\n this.dispatch(this.opts, this)\n } catch (err) {\n this.handler.onError(err)\n }\n }\n }\n}\n\nmodule.exports = RetryHandler\n","'use strict'\n\nconst RedirectHandler = require('../handler/RedirectHandler')\n\nfunction createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) {\n return (dispatch) => {\n return function Intercept (opts, handler) {\n const { maxRedirections = defaultMaxRedirections } = opts\n\n if (!maxRedirections) {\n return dispatch(opts, handler)\n }\n\n const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler)\n opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting.\n return dispatch(opts, redirectHandler)\n }\n }\n}\n\nmodule.exports = createRedirectInterceptor\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0;\nconst utils_1 = require(\"./utils\");\n// C headers\nvar ERROR;\n(function (ERROR) {\n ERROR[ERROR[\"OK\"] = 0] = \"OK\";\n ERROR[ERROR[\"INTERNAL\"] = 1] = \"INTERNAL\";\n ERROR[ERROR[\"STRICT\"] = 2] = \"STRICT\";\n ERROR[ERROR[\"LF_EXPECTED\"] = 3] = \"LF_EXPECTED\";\n ERROR[ERROR[\"UNEXPECTED_CONTENT_LENGTH\"] = 4] = \"UNEXPECTED_CONTENT_LENGTH\";\n ERROR[ERROR[\"CLOSED_CONNECTION\"] = 5] = \"CLOSED_CONNECTION\";\n ERROR[ERROR[\"INVALID_METHOD\"] = 6] = \"INVALID_METHOD\";\n ERROR[ERROR[\"INVALID_URL\"] = 7] = \"INVALID_URL\";\n ERROR[ERROR[\"INVALID_CONSTANT\"] = 8] = \"INVALID_CONSTANT\";\n ERROR[ERROR[\"INVALID_VERSION\"] = 9] = \"INVALID_VERSION\";\n ERROR[ERROR[\"INVALID_HEADER_TOKEN\"] = 10] = \"INVALID_HEADER_TOKEN\";\n ERROR[ERROR[\"INVALID_CONTENT_LENGTH\"] = 11] = \"INVALID_CONTENT_LENGTH\";\n ERROR[ERROR[\"INVALID_CHUNK_SIZE\"] = 12] = \"INVALID_CHUNK_SIZE\";\n ERROR[ERROR[\"INVALID_STATUS\"] = 13] = \"INVALID_STATUS\";\n ERROR[ERROR[\"INVALID_EOF_STATE\"] = 14] = \"INVALID_EOF_STATE\";\n ERROR[ERROR[\"INVALID_TRANSFER_ENCODING\"] = 15] = \"INVALID_TRANSFER_ENCODING\";\n ERROR[ERROR[\"CB_MESSAGE_BEGIN\"] = 16] = \"CB_MESSAGE_BEGIN\";\n ERROR[ERROR[\"CB_HEADERS_COMPLETE\"] = 17] = \"CB_HEADERS_COMPLETE\";\n ERROR[ERROR[\"CB_MESSAGE_COMPLETE\"] = 18] = \"CB_MESSAGE_COMPLETE\";\n ERROR[ERROR[\"CB_CHUNK_HEADER\"] = 19] = \"CB_CHUNK_HEADER\";\n ERROR[ERROR[\"CB_CHUNK_COMPLETE\"] = 20] = \"CB_CHUNK_COMPLETE\";\n ERROR[ERROR[\"PAUSED\"] = 21] = \"PAUSED\";\n ERROR[ERROR[\"PAUSED_UPGRADE\"] = 22] = \"PAUSED_UPGRADE\";\n ERROR[ERROR[\"PAUSED_H2_UPGRADE\"] = 23] = \"PAUSED_H2_UPGRADE\";\n ERROR[ERROR[\"USER\"] = 24] = \"USER\";\n})(ERROR = exports.ERROR || (exports.ERROR = {}));\nvar TYPE;\n(function (TYPE) {\n TYPE[TYPE[\"BOTH\"] = 0] = \"BOTH\";\n TYPE[TYPE[\"REQUEST\"] = 1] = \"REQUEST\";\n TYPE[TYPE[\"RESPONSE\"] = 2] = \"RESPONSE\";\n})(TYPE = exports.TYPE || (exports.TYPE = {}));\nvar FLAGS;\n(function (FLAGS) {\n FLAGS[FLAGS[\"CONNECTION_KEEP_ALIVE\"] = 1] = \"CONNECTION_KEEP_ALIVE\";\n FLAGS[FLAGS[\"CONNECTION_CLOSE\"] = 2] = \"CONNECTION_CLOSE\";\n FLAGS[FLAGS[\"CONNECTION_UPGRADE\"] = 4] = \"CONNECTION_UPGRADE\";\n FLAGS[FLAGS[\"CHUNKED\"] = 8] = \"CHUNKED\";\n FLAGS[FLAGS[\"UPGRADE\"] = 16] = \"UPGRADE\";\n FLAGS[FLAGS[\"CONTENT_LENGTH\"] = 32] = \"CONTENT_LENGTH\";\n FLAGS[FLAGS[\"SKIPBODY\"] = 64] = \"SKIPBODY\";\n FLAGS[FLAGS[\"TRAILING\"] = 128] = \"TRAILING\";\n // 1 << 8 is unused\n FLAGS[FLAGS[\"TRANSFER_ENCODING\"] = 512] = \"TRANSFER_ENCODING\";\n})(FLAGS = exports.FLAGS || (exports.FLAGS = {}));\nvar LENIENT_FLAGS;\n(function (LENIENT_FLAGS) {\n LENIENT_FLAGS[LENIENT_FLAGS[\"HEADERS\"] = 1] = \"HEADERS\";\n LENIENT_FLAGS[LENIENT_FLAGS[\"CHUNKED_LENGTH\"] = 2] = \"CHUNKED_LENGTH\";\n LENIENT_FLAGS[LENIENT_FLAGS[\"KEEP_ALIVE\"] = 4] = \"KEEP_ALIVE\";\n})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {}));\nvar METHODS;\n(function (METHODS) {\n METHODS[METHODS[\"DELETE\"] = 0] = \"DELETE\";\n METHODS[METHODS[\"GET\"] = 1] = \"GET\";\n METHODS[METHODS[\"HEAD\"] = 2] = \"HEAD\";\n METHODS[METHODS[\"POST\"] = 3] = \"POST\";\n METHODS[METHODS[\"PUT\"] = 4] = \"PUT\";\n /* pathological */\n METHODS[METHODS[\"CONNECT\"] = 5] = \"CONNECT\";\n METHODS[METHODS[\"OPTIONS\"] = 6] = \"OPTIONS\";\n METHODS[METHODS[\"TRACE\"] = 7] = \"TRACE\";\n /* WebDAV */\n METHODS[METHODS[\"COPY\"] = 8] = \"COPY\";\n METHODS[METHODS[\"LOCK\"] = 9] = \"LOCK\";\n METHODS[METHODS[\"MKCOL\"] = 10] = \"MKCOL\";\n METHODS[METHODS[\"MOVE\"] = 11] = \"MOVE\";\n METHODS[METHODS[\"PROPFIND\"] = 12] = \"PROPFIND\";\n METHODS[METHODS[\"PROPPATCH\"] = 13] = \"PROPPATCH\";\n METHODS[METHODS[\"SEARCH\"] = 14] = \"SEARCH\";\n METHODS[METHODS[\"UNLOCK\"] = 15] = \"UNLOCK\";\n METHODS[METHODS[\"BIND\"] = 16] = \"BIND\";\n METHODS[METHODS[\"REBIND\"] = 17] = \"REBIND\";\n METHODS[METHODS[\"UNBIND\"] = 18] = \"UNBIND\";\n METHODS[METHODS[\"ACL\"] = 19] = \"ACL\";\n /* subversion */\n METHODS[METHODS[\"REPORT\"] = 20] = \"REPORT\";\n METHODS[METHODS[\"MKACTIVITY\"] = 21] = \"MKACTIVITY\";\n METHODS[METHODS[\"CHECKOUT\"] = 22] = \"CHECKOUT\";\n METHODS[METHODS[\"MERGE\"] = 23] = \"MERGE\";\n /* upnp */\n METHODS[METHODS[\"M-SEARCH\"] = 24] = \"M-SEARCH\";\n METHODS[METHODS[\"NOTIFY\"] = 25] = \"NOTIFY\";\n METHODS[METHODS[\"SUBSCRIBE\"] = 26] = \"SUBSCRIBE\";\n METHODS[METHODS[\"UNSUBSCRIBE\"] = 27] = \"UNSUBSCRIBE\";\n /* RFC-5789 */\n METHODS[METHODS[\"PATCH\"] = 28] = \"PATCH\";\n METHODS[METHODS[\"PURGE\"] = 29] = \"PURGE\";\n /* CalDAV */\n METHODS[METHODS[\"MKCALENDAR\"] = 30] = \"MKCALENDAR\";\n /* RFC-2068, section 19.6.1.2 */\n METHODS[METHODS[\"LINK\"] = 31] = \"LINK\";\n METHODS[METHODS[\"UNLINK\"] = 32] = \"UNLINK\";\n /* icecast */\n METHODS[METHODS[\"SOURCE\"] = 33] = \"SOURCE\";\n /* RFC-7540, section 11.6 */\n METHODS[METHODS[\"PRI\"] = 34] = \"PRI\";\n /* RFC-2326 RTSP */\n METHODS[METHODS[\"DESCRIBE\"] = 35] = \"DESCRIBE\";\n METHODS[METHODS[\"ANNOUNCE\"] = 36] = \"ANNOUNCE\";\n METHODS[METHODS[\"SETUP\"] = 37] = \"SETUP\";\n METHODS[METHODS[\"PLAY\"] = 38] = \"PLAY\";\n METHODS[METHODS[\"PAUSE\"] = 39] = \"PAUSE\";\n METHODS[METHODS[\"TEARDOWN\"] = 40] = \"TEARDOWN\";\n METHODS[METHODS[\"GET_PARAMETER\"] = 41] = \"GET_PARAMETER\";\n METHODS[METHODS[\"SET_PARAMETER\"] = 42] = \"SET_PARAMETER\";\n METHODS[METHODS[\"REDIRECT\"] = 43] = \"REDIRECT\";\n METHODS[METHODS[\"RECORD\"] = 44] = \"RECORD\";\n /* RAOP */\n METHODS[METHODS[\"FLUSH\"] = 45] = \"FLUSH\";\n})(METHODS = exports.METHODS || (exports.METHODS = {}));\nexports.METHODS_HTTP = [\n METHODS.DELETE,\n METHODS.GET,\n METHODS.HEAD,\n METHODS.POST,\n METHODS.PUT,\n METHODS.CONNECT,\n METHODS.OPTIONS,\n METHODS.TRACE,\n METHODS.COPY,\n METHODS.LOCK,\n METHODS.MKCOL,\n METHODS.MOVE,\n METHODS.PROPFIND,\n METHODS.PROPPATCH,\n METHODS.SEARCH,\n METHODS.UNLOCK,\n METHODS.BIND,\n METHODS.REBIND,\n METHODS.UNBIND,\n METHODS.ACL,\n METHODS.REPORT,\n METHODS.MKACTIVITY,\n METHODS.CHECKOUT,\n METHODS.MERGE,\n METHODS['M-SEARCH'],\n METHODS.NOTIFY,\n METHODS.SUBSCRIBE,\n METHODS.UNSUBSCRIBE,\n METHODS.PATCH,\n METHODS.PURGE,\n METHODS.MKCALENDAR,\n METHODS.LINK,\n METHODS.UNLINK,\n METHODS.PRI,\n // TODO(indutny): should we allow it with HTTP?\n METHODS.SOURCE,\n];\nexports.METHODS_ICE = [\n METHODS.SOURCE,\n];\nexports.METHODS_RTSP = [\n METHODS.OPTIONS,\n METHODS.DESCRIBE,\n METHODS.ANNOUNCE,\n METHODS.SETUP,\n METHODS.PLAY,\n METHODS.PAUSE,\n METHODS.TEARDOWN,\n METHODS.GET_PARAMETER,\n METHODS.SET_PARAMETER,\n METHODS.REDIRECT,\n METHODS.RECORD,\n METHODS.FLUSH,\n // For AirPlay\n METHODS.GET,\n METHODS.POST,\n];\nexports.METHOD_MAP = utils_1.enumToMap(METHODS);\nexports.H_METHOD_MAP = {};\nObject.keys(exports.METHOD_MAP).forEach((key) => {\n if (/^H/.test(key)) {\n exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key];\n }\n});\nvar FINISH;\n(function (FINISH) {\n FINISH[FINISH[\"SAFE\"] = 0] = \"SAFE\";\n FINISH[FINISH[\"SAFE_WITH_CB\"] = 1] = \"SAFE_WITH_CB\";\n FINISH[FINISH[\"UNSAFE\"] = 2] = \"UNSAFE\";\n})(FINISH = exports.FINISH || (exports.FINISH = {}));\nexports.ALPHA = [];\nfor (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) {\n // Upper case\n exports.ALPHA.push(String.fromCharCode(i));\n // Lower case\n exports.ALPHA.push(String.fromCharCode(i + 0x20));\n}\nexports.NUM_MAP = {\n 0: 0, 1: 1, 2: 2, 3: 3, 4: 4,\n 5: 5, 6: 6, 7: 7, 8: 8, 9: 9,\n};\nexports.HEX_MAP = {\n 0: 0, 1: 1, 2: 2, 3: 3, 4: 4,\n 5: 5, 6: 6, 7: 7, 8: 8, 9: 9,\n A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF,\n a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf,\n};\nexports.NUM = [\n '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',\n];\nexports.ALPHANUM = exports.ALPHA.concat(exports.NUM);\nexports.MARK = ['-', '_', '.', '!', '~', '*', '\\'', '(', ')'];\nexports.USERINFO_CHARS = exports.ALPHANUM\n .concat(exports.MARK)\n .concat(['%', ';', ':', '&', '=', '+', '$', ',']);\n// TODO(indutny): use RFC\nexports.STRICT_URL_CHAR = [\n '!', '\"', '$', '%', '&', '\\'',\n '(', ')', '*', '+', ',', '-', '.', '/',\n ':', ';', '<', '=', '>',\n '@', '[', '\\\\', ']', '^', '_',\n '`',\n '{', '|', '}', '~',\n].concat(exports.ALPHANUM);\nexports.URL_CHAR = exports.STRICT_URL_CHAR\n .concat(['\\t', '\\f']);\n// All characters with 0x80 bit set to 1\nfor (let i = 0x80; i <= 0xff; i++) {\n exports.URL_CHAR.push(i);\n}\nexports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']);\n/* Tokens as defined by rfc 2616. Also lowercases them.\n * token = 1*\n * separators = \"(\" | \")\" | \"<\" | \">\" | \"@\"\n * | \",\" | \";\" | \":\" | \"\\\" | <\">\n * | \"/\" | \"[\" | \"]\" | \"?\" | \"=\"\n * | \"{\" | \"}\" | SP | HT\n */\nexports.STRICT_TOKEN = [\n '!', '#', '$', '%', '&', '\\'',\n '*', '+', '-', '.',\n '^', '_', '`',\n '|', '~',\n].concat(exports.ALPHANUM);\nexports.TOKEN = exports.STRICT_TOKEN.concat([' ']);\n/*\n * Verify that a char is a valid visible (printable) US-ASCII\n * character or %x80-FF\n */\nexports.HEADER_CHARS = ['\\t'];\nfor (let i = 32; i <= 255; i++) {\n if (i !== 127) {\n exports.HEADER_CHARS.push(i);\n }\n}\n// ',' = \\x44\nexports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44);\nexports.MAJOR = exports.NUM_MAP;\nexports.MINOR = exports.MAJOR;\nvar HEADER_STATE;\n(function (HEADER_STATE) {\n HEADER_STATE[HEADER_STATE[\"GENERAL\"] = 0] = \"GENERAL\";\n HEADER_STATE[HEADER_STATE[\"CONNECTION\"] = 1] = \"CONNECTION\";\n HEADER_STATE[HEADER_STATE[\"CONTENT_LENGTH\"] = 2] = \"CONTENT_LENGTH\";\n HEADER_STATE[HEADER_STATE[\"TRANSFER_ENCODING\"] = 3] = \"TRANSFER_ENCODING\";\n HEADER_STATE[HEADER_STATE[\"UPGRADE\"] = 4] = \"UPGRADE\";\n HEADER_STATE[HEADER_STATE[\"CONNECTION_KEEP_ALIVE\"] = 5] = \"CONNECTION_KEEP_ALIVE\";\n HEADER_STATE[HEADER_STATE[\"CONNECTION_CLOSE\"] = 6] = \"CONNECTION_CLOSE\";\n HEADER_STATE[HEADER_STATE[\"CONNECTION_UPGRADE\"] = 7] = \"CONNECTION_UPGRADE\";\n HEADER_STATE[HEADER_STATE[\"TRANSFER_ENCODING_CHUNKED\"] = 8] = \"TRANSFER_ENCODING_CHUNKED\";\n})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {}));\nexports.SPECIAL_HEADERS = {\n 'connection': HEADER_STATE.CONNECTION,\n 'content-length': HEADER_STATE.CONTENT_LENGTH,\n 'proxy-connection': HEADER_STATE.CONNECTION,\n 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING,\n 'upgrade': HEADER_STATE.UPGRADE,\n};\n//# sourceMappingURL=constants.js.map","module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC1kAIABBGGpCADcDACAAQgA3AwAgAEE4akIANwMAIABBMGpCADcDACAAQShqQgA3AwAgAEEgakIANwMAIABBEGpCADcDACAAQQhqQgA3AwAgAEHdATYCHEEAC3sBAX8CQCAAKAIMIgMNAAJAIAAoAgRFDQAgACABNgIECwJAIAAgASACEMSAgIAAIgMNACAAKAIMDwsgACADNgIcQQAhAyAAKAIEIgFFDQAgACABIAIgACgCCBGBgICAAAAiAUUNACAAIAI2AhQgACABNgIMIAEhAwsgAwvk8wEDDn8DfgR/I4CAgIAAQRBrIgMkgICAgAAgASEEIAEhBSABIQYgASEHIAEhCCABIQkgASEKIAEhCyABIQwgASENIAEhDiABIQ8CQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgACgCHCIQQX9qDt0B2gEB2QECAwQFBgcICQoLDA0O2AEPENcBERLWARMUFRYXGBkaG+AB3wEcHR7VAR8gISIjJCXUASYnKCkqKyzTAdIBLS7RAdABLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVG2wFHSElKzwHOAUvNAUzMAU1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4ABgQGCAYMBhAGFAYYBhwGIAYkBigGLAYwBjQGOAY8BkAGRAZIBkwGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwHLAcoBuAHJAbkByAG6AbsBvAG9Ab4BvwHAAcEBwgHDAcQBxQHGAQDcAQtBACEQDMYBC0EOIRAMxQELQQ0hEAzEAQtBDyEQDMMBC0EQIRAMwgELQRMhEAzBAQtBFCEQDMABC0EVIRAMvwELQRYhEAy+AQtBFyEQDL0BC0EYIRAMvAELQRkhEAy7AQtBGiEQDLoBC0EbIRAMuQELQRwhEAy4AQtBCCEQDLcBC0EdIRAMtgELQSAhEAy1AQtBHyEQDLQBC0EHIRAMswELQSEhEAyyAQtBIiEQDLEBC0EeIRAMsAELQSMhEAyvAQtBEiEQDK4BC0ERIRAMrQELQSQhEAysAQtBJSEQDKsBC0EmIRAMqgELQSchEAypAQtBwwEhEAyoAQtBKSEQDKcBC0ErIRAMpgELQSwhEAylAQtBLSEQDKQBC0EuIRAMowELQS8hEAyiAQtBxAEhEAyhAQtBMCEQDKABC0E0IRAMnwELQQwhEAyeAQtBMSEQDJ0BC0EyIRAMnAELQTMhEAybAQtBOSEQDJoBC0E1IRAMmQELQcUBIRAMmAELQQshEAyXAQtBOiEQDJYBC0E2IRAMlQELQQohEAyUAQtBNyEQDJMBC0E4IRAMkgELQTwhEAyRAQtBOyEQDJABC0E9IRAMjwELQQkhEAyOAQtBKCEQDI0BC0E+IRAMjAELQT8hEAyLAQtBwAAhEAyKAQtBwQAhEAyJAQtBwgAhEAyIAQtBwwAhEAyHAQtBxAAhEAyGAQtBxQAhEAyFAQtBxgAhEAyEAQtBKiEQDIMBC0HHACEQDIIBC0HIACEQDIEBC0HJACEQDIABC0HKACEQDH8LQcsAIRAMfgtBzQAhEAx9C0HMACEQDHwLQc4AIRAMewtBzwAhEAx6C0HQACEQDHkLQdEAIRAMeAtB0gAhEAx3C0HTACEQDHYLQdQAIRAMdQtB1gAhEAx0C0HVACEQDHMLQQYhEAxyC0HXACEQDHELQQUhEAxwC0HYACEQDG8LQQQhEAxuC0HZACEQDG0LQdoAIRAMbAtB2wAhEAxrC0HcACEQDGoLQQMhEAxpC0HdACEQDGgLQd4AIRAMZwtB3wAhEAxmC0HhACEQDGULQeAAIRAMZAtB4gAhEAxjC0HjACEQDGILQQIhEAxhC0HkACEQDGALQeUAIRAMXwtB5gAhEAxeC0HnACEQDF0LQegAIRAMXAtB6QAhEAxbC0HqACEQDFoLQesAIRAMWQtB7AAhEAxYC0HtACEQDFcLQe4AIRAMVgtB7wAhEAxVC0HwACEQDFQLQfEAIRAMUwtB8gAhEAxSC0HzACEQDFELQfQAIRAMUAtB9QAhEAxPC0H2ACEQDE4LQfcAIRAMTQtB+AAhEAxMC0H5ACEQDEsLQfoAIRAMSgtB+wAhEAxJC0H8ACEQDEgLQf0AIRAMRwtB/gAhEAxGC0H/ACEQDEULQYABIRAMRAtBgQEhEAxDC0GCASEQDEILQYMBIRAMQQtBhAEhEAxAC0GFASEQDD8LQYYBIRAMPgtBhwEhEAw9C0GIASEQDDwLQYkBIRAMOwtBigEhEAw6C0GLASEQDDkLQYwBIRAMOAtBjQEhEAw3C0GOASEQDDYLQY8BIRAMNQtBkAEhEAw0C0GRASEQDDMLQZIBIRAMMgtBkwEhEAwxC0GUASEQDDALQZUBIRAMLwtBlgEhEAwuC0GXASEQDC0LQZgBIRAMLAtBmQEhEAwrC0GaASEQDCoLQZsBIRAMKQtBnAEhEAwoC0GdASEQDCcLQZ4BIRAMJgtBnwEhEAwlC0GgASEQDCQLQaEBIRAMIwtBogEhEAwiC0GjASEQDCELQaQBIRAMIAtBpQEhEAwfC0GmASEQDB4LQacBIRAMHQtBqAEhEAwcC0GpASEQDBsLQaoBIRAMGgtBqwEhEAwZC0GsASEQDBgLQa0BIRAMFwtBrgEhEAwWC0EBIRAMFQtBrwEhEAwUC0GwASEQDBMLQbEBIRAMEgtBswEhEAwRC0GyASEQDBALQbQBIRAMDwtBtQEhEAwOC0G2ASEQDA0LQbcBIRAMDAtBuAEhEAwLC0G5ASEQDAoLQboBIRAMCQtBuwEhEAwIC0HGASEQDAcLQbwBIRAMBgtBvQEhEAwFC0G+ASEQDAQLQb8BIRAMAwtBwAEhEAwCC0HCASEQDAELQcEBIRALA0ACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQDscBAAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxweHyAhIyUoP0BBREVGR0hJSktMTU9QUVJT3gNXWVtcXWBiZWZnaGlqa2xtb3BxcnN0dXZ3eHl6e3x9foABggGFAYYBhwGJAYsBjAGNAY4BjwGQAZEBlAGVAZYBlwGYAZkBmgGbAZwBnQGeAZ8BoAGhAaIBowGkAaUBpgGnAagBqQGqAasBrAGtAa4BrwGwAbEBsgGzAbQBtQG2AbcBuAG5AboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBxwHIAckBygHLAcwBzQHOAc8B0AHRAdIB0wHUAdUB1gHXAdgB2QHaAdsB3AHdAd4B4AHhAeIB4wHkAeUB5gHnAegB6QHqAesB7AHtAe4B7wHwAfEB8gHzAZkCpAKwAv4C/gILIAEiBCACRw3zAUHdASEQDP8DCyABIhAgAkcN3QFBwwEhEAz+AwsgASIBIAJHDZABQfcAIRAM/QMLIAEiASACRw2GAUHvACEQDPwDCyABIgEgAkcNf0HqACEQDPsDCyABIgEgAkcNe0HoACEQDPoDCyABIgEgAkcNeEHmACEQDPkDCyABIgEgAkcNGkEYIRAM+AMLIAEiASACRw0UQRIhEAz3AwsgASIBIAJHDVlBxQAhEAz2AwsgASIBIAJHDUpBPyEQDPUDCyABIgEgAkcNSEE8IRAM9AMLIAEiASACRw1BQTEhEAzzAwsgAC0ALkEBRg3rAwyHAgsgACABIgEgAhDAgICAAEEBRw3mASAAQgA3AyAM5wELIAAgASIBIAIQtICAgAAiEA3nASABIQEM9QILAkAgASIBIAJHDQBBBiEQDPADCyAAIAFBAWoiASACELuAgIAAIhAN6AEgASEBDDELIABCADcDIEESIRAM1QMLIAEiECACRw0rQR0hEAztAwsCQCABIgEgAkYNACABQQFqIQFBECEQDNQDC0EHIRAM7AMLIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN5QFBCCEQDOsDCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEUIRAM0gMLQQkhEAzqAwsgASEBIAApAyBQDeQBIAEhAQzyAgsCQCABIgEgAkcNAEELIRAM6QMLIAAgAUEBaiIBIAIQtoCAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3mASABIQEMDQsgACABIgEgAhC6gICAACIQDecBIAEhAQzwAgsCQCABIgEgAkcNAEEPIRAM5QMLIAEtAAAiEEE7Rg0IIBBBDUcN6AEgAUEBaiEBDO8CCyAAIAEiASACELqAgIAAIhAN6AEgASEBDPICCwNAAkAgAS0AAEHwtYCAAGotAAAiEEEBRg0AIBBBAkcN6wEgACgCBCEQIABBADYCBCAAIBAgAUEBaiIBELmAgIAAIhAN6gEgASEBDPQCCyABQQFqIgEgAkcNAAtBEiEQDOIDCyAAIAEiASACELqAgIAAIhAN6QEgASEBDAoLIAEiASACRw0GQRshEAzgAwsCQCABIgEgAkcNAEEWIRAM4AMLIABBioCAgAA2AgggACABNgIEIAAgASACELiAgIAAIhAN6gEgASEBQSAhEAzGAwsCQCABIgEgAkYNAANAAkAgAS0AAEHwt4CAAGotAAAiEEECRg0AAkAgEEF/ag4E5QHsAQDrAewBCyABQQFqIQFBCCEQDMgDCyABQQFqIgEgAkcNAAtBFSEQDN8DC0EVIRAM3gMLA0ACQCABLQAAQfC5gIAAai0AACIQQQJGDQAgEEF/ag4E3gHsAeAB6wHsAQsgAUEBaiIBIAJHDQALQRghEAzdAwsCQCABIgEgAkYNACAAQYuAgIAANgIIIAAgATYCBCABIQFBByEQDMQDC0EZIRAM3AMLIAFBAWohAQwCCwJAIAEiFCACRw0AQRohEAzbAwsgFCEBAkAgFC0AAEFzag4U3QLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gIA7gILQQAhECAAQQA2AhwgAEGvi4CAADYCECAAQQI2AgwgACAUQQFqNgIUDNoDCwJAIAEtAAAiEEE7Rg0AIBBBDUcN6AEgAUEBaiEBDOUCCyABQQFqIQELQSIhEAy/AwsCQCABIhAgAkcNAEEcIRAM2AMLQgAhESAQIQEgEC0AAEFQag435wHmAQECAwQFBgcIAAAAAAAAAAkKCwwNDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxAREhMUAAtBHiEQDL0DC0ICIREM5QELQgMhEQzkAQtCBCERDOMBC0IFIREM4gELQgYhEQzhAQtCByERDOABC0IIIREM3wELQgkhEQzeAQtCCiERDN0BC0ILIREM3AELQgwhEQzbAQtCDSERDNoBC0IOIREM2QELQg8hEQzYAQtCCiERDNcBC0ILIREM1gELQgwhEQzVAQtCDSERDNQBC0IOIREM0wELQg8hEQzSAQtCACERAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQLQAAQVBqDjflAeQBAAECAwQFBgfmAeYB5gHmAeYB5gHmAQgJCgsMDeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gEODxAREhPmAQtCAiERDOQBC0IDIREM4wELQgQhEQziAQtCBSERDOEBC0IGIREM4AELQgchEQzfAQtCCCERDN4BC0IJIREM3QELQgohEQzcAQtCCyERDNsBC0IMIREM2gELQg0hEQzZAQtCDiERDNgBC0IPIREM1wELQgohEQzWAQtCCyERDNUBC0IMIREM1AELQg0hEQzTAQtCDiERDNIBC0IPIREM0QELIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN0gFBHyEQDMADCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEkIRAMpwMLQSAhEAy/AwsgACABIhAgAhC+gICAAEF/ag4FtgEAxQIB0QHSAQtBESEQDKQDCyAAQQE6AC8gECEBDLsDCyABIgEgAkcN0gFBJCEQDLsDCyABIg0gAkcNHkHGACEQDLoDCyAAIAEiASACELKAgIAAIhAN1AEgASEBDLUBCyABIhAgAkcNJkHQACEQDLgDCwJAIAEiASACRw0AQSghEAy4AwsgAEEANgIEIABBjICAgAA2AgggACABIAEQsYCAgAAiEA3TASABIQEM2AELAkAgASIQIAJHDQBBKSEQDLcDCyAQLQAAIgFBIEYNFCABQQlHDdMBIBBBAWohAQwVCwJAIAEiASACRg0AIAFBAWohAQwXC0EqIRAMtQMLAkAgASIQIAJHDQBBKyEQDLUDCwJAIBAtAAAiAUEJRg0AIAFBIEcN1QELIAAtACxBCEYN0wEgECEBDJEDCwJAIAEiASACRw0AQSwhEAy0AwsgAS0AAEEKRw3VASABQQFqIQEMyQILIAEiDiACRw3VAUEvIRAMsgMLA0ACQCABLQAAIhBBIEYNAAJAIBBBdmoOBADcAdwBANoBCyABIQEM4AELIAFBAWoiASACRw0AC0ExIRAMsQMLQTIhECABIhQgAkYNsAMgAiAUayAAKAIAIgFqIRUgFCABa0EDaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfC7gIAAai0AAEcNAQJAIAFBA0cNAEEGIQEMlgMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLEDCyAAQQA2AgAgFCEBDNkBC0EzIRAgASIUIAJGDa8DIAIgFGsgACgCACIBaiEVIBQgAWtBCGohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUH0u4CAAGotAABHDQECQCABQQhHDQBBBSEBDJUDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAywAwsgAEEANgIAIBQhAQzYAQtBNCEQIAEiFCACRg2uAyACIBRrIAAoAgAiAWohFSAUIAFrQQVqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw0BAkAgAUEFRw0AQQchAQyUAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMrwMLIABBADYCACAUIQEM1wELAkAgASIBIAJGDQADQAJAIAEtAABBgL6AgABqLQAAIhBBAUYNACAQQQJGDQogASEBDN0BCyABQQFqIgEgAkcNAAtBMCEQDK4DC0EwIRAMrQMLAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AIBBBdmoOBNkB2gHaAdkB2gELIAFBAWoiASACRw0AC0E4IRAMrQMLQTghEAysAwsDQAJAIAEtAAAiEEEgRg0AIBBBCUcNAwsgAUEBaiIBIAJHDQALQTwhEAyrAwsDQAJAIAEtAAAiEEEgRg0AAkACQCAQQXZqDgTaAQEB2gEACyAQQSxGDdsBCyABIQEMBAsgAUEBaiIBIAJHDQALQT8hEAyqAwsgASEBDNsBC0HAACEQIAEiFCACRg2oAyACIBRrIAAoAgAiAWohFiAUIAFrQQZqIRcCQANAIBQtAABBIHIgAUGAwICAAGotAABHDQEgAUEGRg2OAyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAypAwsgAEEANgIAIBQhAQtBNiEQDI4DCwJAIAEiDyACRw0AQcEAIRAMpwMLIABBjICAgAA2AgggACAPNgIEIA8hASAALQAsQX9qDgTNAdUB1wHZAYcDCyABQQFqIQEMzAELAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgciAQIBBBv39qQf8BcUEaSRtB/wFxIhBBCUYNACAQQSBGDQACQAJAAkACQCAQQZ1/ag4TAAMDAwMDAwMBAwMDAwMDAwMDAgMLIAFBAWohAUExIRAMkQMLIAFBAWohAUEyIRAMkAMLIAFBAWohAUEzIRAMjwMLIAEhAQzQAQsgAUEBaiIBIAJHDQALQTUhEAylAwtBNSEQDKQDCwJAIAEiASACRg0AA0ACQCABLQAAQYC8gIAAai0AAEEBRg0AIAEhAQzTAQsgAUEBaiIBIAJHDQALQT0hEAykAwtBPSEQDKMDCyAAIAEiASACELCAgIAAIhAN1gEgASEBDAELIBBBAWohAQtBPCEQDIcDCwJAIAEiASACRw0AQcIAIRAMoAMLAkADQAJAIAEtAABBd2oOGAAC/gL+AoQD/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4CAP4CCyABQQFqIgEgAkcNAAtBwgAhEAygAwsgAUEBaiEBIAAtAC1BAXFFDb0BIAEhAQtBLCEQDIUDCyABIgEgAkcN0wFBxAAhEAydAwsDQAJAIAEtAABBkMCAgABqLQAAQQFGDQAgASEBDLcCCyABQQFqIgEgAkcNAAtBxQAhEAycAwsgDS0AACIQQSBGDbMBIBBBOkcNgQMgACgCBCEBIABBADYCBCAAIAEgDRCvgICAACIBDdABIA1BAWohAQyzAgtBxwAhECABIg0gAkYNmgMgAiANayAAKAIAIgFqIRYgDSABa0EFaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGQwoCAAGotAABHDYADIAFBBUYN9AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmgMLQcgAIRAgASINIAJGDZkDIAIgDWsgACgCACIBaiEWIA0gAWtBCWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBlsKAgABqLQAARw3/AgJAIAFBCUcNAEECIQEM9QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJkDCwJAIAEiDSACRw0AQckAIRAMmQMLAkACQCANLQAAIgFBIHIgASABQb9/akH/AXFBGkkbQf8BcUGSf2oOBwCAA4ADgAOAA4ADAYADCyANQQFqIQFBPiEQDIADCyANQQFqIQFBPyEQDP8CC0HKACEQIAEiDSACRg2XAyACIA1rIAAoAgAiAWohFiANIAFrQQFqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaDCgIAAai0AAEcN/QIgAUEBRg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyXAwtBywAhECABIg0gAkYNlgMgAiANayAAKAIAIgFqIRYgDSABa0EOaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGiwoCAAGotAABHDfwCIAFBDkYN8AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlgMLQcwAIRAgASINIAJGDZUDIAIgDWsgACgCACIBaiEWIA0gAWtBD2ohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBwMKAgABqLQAARw37AgJAIAFBD0cNAEEDIQEM8QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJUDC0HNACEQIAEiDSACRg2UAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQdDCgIAAai0AAEcN+gICQCABQQVHDQBBBCEBDPACCyABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyUAwsCQCABIg0gAkcNAEHOACEQDJQDCwJAAkACQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZ1/ag4TAP0C/QL9Av0C/QL9Av0C/QL9Av0C/QL9AgH9Av0C/QICA/0CCyANQQFqIQFBwQAhEAz9AgsgDUEBaiEBQcIAIRAM/AILIA1BAWohAUHDACEQDPsCCyANQQFqIQFBxAAhEAz6AgsCQCABIgEgAkYNACAAQY2AgIAANgIIIAAgATYCBCABIQFBxQAhEAz6AgtBzwAhEAySAwsgECEBAkACQCAQLQAAQXZqDgQBqAKoAgCoAgsgEEEBaiEBC0EnIRAM+AILAkAgASIBIAJHDQBB0QAhEAyRAwsCQCABLQAAQSBGDQAgASEBDI0BCyABQQFqIQEgAC0ALUEBcUUNxwEgASEBDIwBCyABIhcgAkcNyAFB0gAhEAyPAwtB0wAhECABIhQgAkYNjgMgAiAUayAAKAIAIgFqIRYgFCABa0EBaiEXA0AgFC0AACABQdbCgIAAai0AAEcNzAEgAUEBRg3HASABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAyOAwsCQCABIgEgAkcNAEHVACEQDI4DCyABLQAAQQpHDcwBIAFBAWohAQzHAQsCQCABIgEgAkcNAEHWACEQDI0DCwJAAkAgAS0AAEF2ag4EAM0BzQEBzQELIAFBAWohAQzHAQsgAUEBaiEBQcoAIRAM8wILIAAgASIBIAIQroCAgAAiEA3LASABIQFBzQAhEAzyAgsgAC0AKUEiRg2FAwymAgsCQCABIgEgAkcNAEHbACEQDIoDC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgAS0AAEFQag4K1AHTAQABAgMEBQYI1QELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMzAELQQkhEEEBIRRBACEXQQAhFgzLAQsCQCABIgEgAkcNAEHdACEQDIkDCyABLQAAQS5HDcwBIAFBAWohAQymAgsgASIBIAJHDcwBQd8AIRAMhwMLAkAgASIBIAJGDQAgAEGOgICAADYCCCAAIAE2AgQgASEBQdAAIRAM7gILQeAAIRAMhgMLQeEAIRAgASIBIAJGDYUDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHiwoCAAGotAABHDc0BIBRBA0YNzAEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhQMLQeIAIRAgASIBIAJGDYQDIAIgAWsgACgCACIUaiEWIAEgFGtBAmohFwNAIAEtAAAgFEHmwoCAAGotAABHDcwBIBRBAkYNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhAMLQeMAIRAgASIBIAJGDYMDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHpwoCAAGotAABHDcsBIBRBA0YNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMgwMLAkAgASIBIAJHDQBB5QAhEAyDAwsgACABQQFqIgEgAhCogICAACIQDc0BIAEhAUHWACEQDOkCCwJAIAEiASACRg0AA0ACQCABLQAAIhBBIEYNAAJAAkACQCAQQbh/ag4LAAHPAc8BzwHPAc8BzwHPAc8BAs8BCyABQQFqIQFB0gAhEAztAgsgAUEBaiEBQdMAIRAM7AILIAFBAWohAUHUACEQDOsCCyABQQFqIgEgAkcNAAtB5AAhEAyCAwtB5AAhEAyBAwsDQAJAIAEtAABB8MKAgABqLQAAIhBBAUYNACAQQX5qDgPPAdAB0QHSAQsgAUEBaiIBIAJHDQALQeYAIRAMgAMLAkAgASIBIAJGDQAgAUEBaiEBDAMLQecAIRAM/wILA0ACQCABLQAAQfDEgIAAai0AACIQQQFGDQACQCAQQX5qDgTSAdMB1AEA1QELIAEhAUHXACEQDOcCCyABQQFqIgEgAkcNAAtB6AAhEAz+AgsCQCABIgEgAkcNAEHpACEQDP4CCwJAIAEtAAAiEEF2ag4augHVAdUBvAHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHKAdUB1QEA0wELIAFBAWohAQtBBiEQDOMCCwNAAkAgAS0AAEHwxoCAAGotAABBAUYNACABIQEMngILIAFBAWoiASACRw0AC0HqACEQDPsCCwJAIAEiASACRg0AIAFBAWohAQwDC0HrACEQDPoCCwJAIAEiASACRw0AQewAIRAM+gILIAFBAWohAQwBCwJAIAEiASACRw0AQe0AIRAM+QILIAFBAWohAQtBBCEQDN4CCwJAIAEiFCACRw0AQe4AIRAM9wILIBQhAQJAAkACQCAULQAAQfDIgIAAai0AAEF/ag4H1AHVAdYBAJwCAQLXAQsgFEEBaiEBDAoLIBRBAWohAQzNAQtBACEQIABBADYCHCAAQZuSgIAANgIQIABBBzYCDCAAIBRBAWo2AhQM9gILAkADQAJAIAEtAABB8MiAgABqLQAAIhBBBEYNAAJAAkAgEEF/ag4H0gHTAdQB2QEABAHZAQsgASEBQdoAIRAM4AILIAFBAWohAUHcACEQDN8CCyABQQFqIgEgAkcNAAtB7wAhEAz2AgsgAUEBaiEBDMsBCwJAIAEiFCACRw0AQfAAIRAM9QILIBQtAABBL0cN1AEgFEEBaiEBDAYLAkAgASIUIAJHDQBB8QAhEAz0AgsCQCAULQAAIgFBL0cNACAUQQFqIQFB3QAhEAzbAgsgAUF2aiIEQRZLDdMBQQEgBHRBiYCAAnFFDdMBDMoCCwJAIAEiASACRg0AIAFBAWohAUHeACEQDNoCC0HyACEQDPICCwJAIAEiFCACRw0AQfQAIRAM8gILIBQhAQJAIBQtAABB8MyAgABqLQAAQX9qDgPJApQCANQBC0HhACEQDNgCCwJAIAEiFCACRg0AA0ACQCAULQAAQfDKgIAAai0AACIBQQNGDQACQCABQX9qDgLLAgDVAQsgFCEBQd8AIRAM2gILIBRBAWoiFCACRw0AC0HzACEQDPECC0HzACEQDPACCwJAIAEiASACRg0AIABBj4CAgAA2AgggACABNgIEIAEhAUHgACEQDNcCC0H1ACEQDO8CCwJAIAEiASACRw0AQfYAIRAM7wILIABBj4CAgAA2AgggACABNgIEIAEhAQtBAyEQDNQCCwNAIAEtAABBIEcNwwIgAUEBaiIBIAJHDQALQfcAIRAM7AILAkAgASIBIAJHDQBB+AAhEAzsAgsgAS0AAEEgRw3OASABQQFqIQEM7wELIAAgASIBIAIQrICAgAAiEA3OASABIQEMjgILAkAgASIEIAJHDQBB+gAhEAzqAgsgBC0AAEHMAEcN0QEgBEEBaiEBQRMhEAzPAQsCQCABIgQgAkcNAEH7ACEQDOkCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRADQCAELQAAIAFB8M6AgABqLQAARw3QASABQQVGDc4BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQfsAIRAM6AILAkAgASIEIAJHDQBB/AAhEAzoAgsCQAJAIAQtAABBvX9qDgwA0QHRAdEB0QHRAdEB0QHRAdEB0QEB0QELIARBAWohAUHmACEQDM8CCyAEQQFqIQFB5wAhEAzOAgsCQCABIgQgAkcNAEH9ACEQDOcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDc8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH9ACEQDOcCCyAAQQA2AgAgEEEBaiEBQRAhEAzMAQsCQCABIgQgAkcNAEH+ACEQDOYCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUH2zoCAAGotAABHDc4BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH+ACEQDOYCCyAAQQA2AgAgEEEBaiEBQRYhEAzLAQsCQCABIgQgAkcNAEH/ACEQDOUCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUH8zoCAAGotAABHDc0BIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH/ACEQDOUCCyAAQQA2AgAgEEEBaiEBQQUhEAzKAQsCQCABIgQgAkcNAEGAASEQDOQCCyAELQAAQdkARw3LASAEQQFqIQFBCCEQDMkBCwJAIAEiBCACRw0AQYEBIRAM4wILAkACQCAELQAAQbJ/ag4DAMwBAcwBCyAEQQFqIQFB6wAhEAzKAgsgBEEBaiEBQewAIRAMyQILAkAgASIEIAJHDQBBggEhEAziAgsCQAJAIAQtAABBuH9qDggAywHLAcsBywHLAcsBAcsBCyAEQQFqIQFB6gAhEAzJAgsgBEEBaiEBQe0AIRAMyAILAkAgASIEIAJHDQBBgwEhEAzhAgsgAiAEayAAKAIAIgFqIRAgBCABa0ECaiEUAkADQCAELQAAIAFBgM+AgABqLQAARw3JASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBA2AgBBgwEhEAzhAgtBACEQIABBADYCACAUQQFqIQEMxgELAkAgASIEIAJHDQBBhAEhEAzgAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBg8+AgABqLQAARw3IASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhAEhEAzgAgsgAEEANgIAIBBBAWohAUEjIRAMxQELAkAgASIEIAJHDQBBhQEhEAzfAgsCQAJAIAQtAABBtH9qDggAyAHIAcgByAHIAcgBAcgBCyAEQQFqIQFB7wAhEAzGAgsgBEEBaiEBQfAAIRAMxQILAkAgASIEIAJHDQBBhgEhEAzeAgsgBC0AAEHFAEcNxQEgBEEBaiEBDIMCCwJAIAEiBCACRw0AQYcBIRAM3QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQYjPgIAAai0AAEcNxQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYcBIRAM3QILIABBADYCACAQQQFqIQFBLSEQDMIBCwJAIAEiBCACRw0AQYgBIRAM3AILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNxAEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYgBIRAM3AILIABBADYCACAQQQFqIQFBKSEQDMEBCwJAIAEiASACRw0AQYkBIRAM2wILQQEhECABLQAAQd8ARw3AASABQQFqIQEMgQILAkAgASIEIAJHDQBBigEhEAzaAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQA0AgBC0AACABQYzPgIAAai0AAEcNwQEgAUEBRg2vAiABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGKASEQDNkCCwJAIAEiBCACRw0AQYsBIRAM2QILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQY7PgIAAai0AAEcNwQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYsBIRAM2QILIABBADYCACAQQQFqIQFBAiEQDL4BCwJAIAEiBCACRw0AQYwBIRAM2AILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNwAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYwBIRAM2AILIABBADYCACAQQQFqIQFBHyEQDL0BCwJAIAEiBCACRw0AQY0BIRAM1wILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNvwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY0BIRAM1wILIABBADYCACAQQQFqIQFBCSEQDLwBCwJAIAEiBCACRw0AQY4BIRAM1gILAkACQCAELQAAQbd/ag4HAL8BvwG/Ab8BvwEBvwELIARBAWohAUH4ACEQDL0CCyAEQQFqIQFB+QAhEAy8AgsCQCABIgQgAkcNAEGPASEQDNUCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGRz4CAAGotAABHDb0BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGPASEQDNUCCyAAQQA2AgAgEEEBaiEBQRghEAy6AQsCQCABIgQgAkcNAEGQASEQDNQCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUGXz4CAAGotAABHDbwBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGQASEQDNQCCyAAQQA2AgAgEEEBaiEBQRchEAy5AQsCQCABIgQgAkcNAEGRASEQDNMCCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUGaz4CAAGotAABHDbsBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGRASEQDNMCCyAAQQA2AgAgEEEBaiEBQRUhEAy4AQsCQCABIgQgAkcNAEGSASEQDNICCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGhz4CAAGotAABHDboBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGSASEQDNICCyAAQQA2AgAgEEEBaiEBQR4hEAy3AQsCQCABIgQgAkcNAEGTASEQDNECCyAELQAAQcwARw24ASAEQQFqIQFBCiEQDLYBCwJAIAQgAkcNAEGUASEQDNACCwJAAkAgBC0AAEG/f2oODwC5AbkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AQG5AQsgBEEBaiEBQf4AIRAMtwILIARBAWohAUH/ACEQDLYCCwJAIAQgAkcNAEGVASEQDM8CCwJAAkAgBC0AAEG/f2oOAwC4AQG4AQsgBEEBaiEBQf0AIRAMtgILIARBAWohBEGAASEQDLUCCwJAIAQgAkcNAEGWASEQDM4CCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUGnz4CAAGotAABHDbYBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGWASEQDM4CCyAAQQA2AgAgEEEBaiEBQQshEAyzAQsCQCAEIAJHDQBBlwEhEAzNAgsCQAJAAkACQCAELQAAQVNqDiMAuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AQG4AbgBuAG4AbgBArgBuAG4AQO4AQsgBEEBaiEBQfsAIRAMtgILIARBAWohAUH8ACEQDLUCCyAEQQFqIQRBgQEhEAy0AgsgBEEBaiEEQYIBIRAMswILAkAgBCACRw0AQZgBIRAMzAILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQanPgIAAai0AAEcNtAEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZgBIRAMzAILIABBADYCACAQQQFqIQFBGSEQDLEBCwJAIAQgAkcNAEGZASEQDMsCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGuz4CAAGotAABHDbMBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGZASEQDMsCCyAAQQA2AgAgEEEBaiEBQQYhEAywAQsCQCAEIAJHDQBBmgEhEAzKAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBtM+AgABqLQAARw2yASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmgEhEAzKAgsgAEEANgIAIBBBAWohAUEcIRAMrwELAkAgBCACRw0AQZsBIRAMyQILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbbPgIAAai0AAEcNsQEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZsBIRAMyQILIABBADYCACAQQQFqIQFBJyEQDK4BCwJAIAQgAkcNAEGcASEQDMgCCwJAAkAgBC0AAEGsf2oOAgABsQELIARBAWohBEGGASEQDK8CCyAEQQFqIQRBhwEhEAyuAgsCQCAEIAJHDQBBnQEhEAzHAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBuM+AgABqLQAARw2vASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBnQEhEAzHAgsgAEEANgIAIBBBAWohAUEmIRAMrAELAkAgBCACRw0AQZ4BIRAMxgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbrPgIAAai0AAEcNrgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ4BIRAMxgILIABBADYCACAQQQFqIQFBAyEQDKsBCwJAIAQgAkcNAEGfASEQDMUCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDa0BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGfASEQDMUCCyAAQQA2AgAgEEEBaiEBQQwhEAyqAQsCQCAEIAJHDQBBoAEhEAzEAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBvM+AgABqLQAARw2sASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBoAEhEAzEAgsgAEEANgIAIBBBAWohAUENIRAMqQELAkAgBCACRw0AQaEBIRAMwwILAkACQCAELQAAQbp/ag4LAKwBrAGsAawBrAGsAawBrAGsAQGsAQsgBEEBaiEEQYsBIRAMqgILIARBAWohBEGMASEQDKkCCwJAIAQgAkcNAEGiASEQDMICCyAELQAAQdAARw2pASAEQQFqIQQM6QELAkAgBCACRw0AQaMBIRAMwQILAkACQCAELQAAQbd/ag4HAaoBqgGqAaoBqgEAqgELIARBAWohBEGOASEQDKgCCyAEQQFqIQFBIiEQDKYBCwJAIAQgAkcNAEGkASEQDMACCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHAz4CAAGotAABHDagBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGkASEQDMACCyAAQQA2AgAgEEEBaiEBQR0hEAylAQsCQCAEIAJHDQBBpQEhEAy/AgsCQAJAIAQtAABBrn9qDgMAqAEBqAELIARBAWohBEGQASEQDKYCCyAEQQFqIQFBBCEQDKQBCwJAIAQgAkcNAEGmASEQDL4CCwJAAkACQAJAAkAgBC0AAEG/f2oOFQCqAaoBqgGqAaoBqgGqAaoBqgGqAQGqAaoBAqoBqgEDqgGqAQSqAQsgBEEBaiEEQYgBIRAMqAILIARBAWohBEGJASEQDKcCCyAEQQFqIQRBigEhEAymAgsgBEEBaiEEQY8BIRAMpQILIARBAWohBEGRASEQDKQCCwJAIAQgAkcNAEGnASEQDL0CCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDaUBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGnASEQDL0CCyAAQQA2AgAgEEEBaiEBQREhEAyiAQsCQCAEIAJHDQBBqAEhEAy8AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBws+AgABqLQAARw2kASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqAEhEAy8AgsgAEEANgIAIBBBAWohAUEsIRAMoQELAkAgBCACRw0AQakBIRAMuwILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQcXPgIAAai0AAEcNowEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQakBIRAMuwILIABBADYCACAQQQFqIQFBKyEQDKABCwJAIAQgAkcNAEGqASEQDLoCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHKz4CAAGotAABHDaIBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGqASEQDLoCCyAAQQA2AgAgEEEBaiEBQRQhEAyfAQsCQCAEIAJHDQBBqwEhEAy5AgsCQAJAAkACQCAELQAAQb5/ag4PAAECpAGkAaQBpAGkAaQBpAGkAaQBpAGkAQOkAQsgBEEBaiEEQZMBIRAMogILIARBAWohBEGUASEQDKECCyAEQQFqIQRBlQEhEAygAgsgBEEBaiEEQZYBIRAMnwILAkAgBCACRw0AQawBIRAMuAILIAQtAABBxQBHDZ8BIARBAWohBAzgAQsCQCAEIAJHDQBBrQEhEAy3AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBzc+AgABqLQAARw2fASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrQEhEAy3AgsgAEEANgIAIBBBAWohAUEOIRAMnAELAkAgBCACRw0AQa4BIRAMtgILIAQtAABB0ABHDZ0BIARBAWohAUElIRAMmwELAkAgBCACRw0AQa8BIRAMtQILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNnQEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQa8BIRAMtQILIABBADYCACAQQQFqIQFBKiEQDJoBCwJAIAQgAkcNAEGwASEQDLQCCwJAAkAgBC0AAEGrf2oOCwCdAZ0BnQGdAZ0BnQGdAZ0BnQEBnQELIARBAWohBEGaASEQDJsCCyAEQQFqIQRBmwEhEAyaAgsCQCAEIAJHDQBBsQEhEAyzAgsCQAJAIAQtAABBv39qDhQAnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBAZwBCyAEQQFqIQRBmQEhEAyaAgsgBEEBaiEEQZwBIRAMmQILAkAgBCACRw0AQbIBIRAMsgILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQdnPgIAAai0AAEcNmgEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbIBIRAMsgILIABBADYCACAQQQFqIQFBISEQDJcBCwJAIAQgAkcNAEGzASEQDLECCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUHdz4CAAGotAABHDZkBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGzASEQDLECCyAAQQA2AgAgEEEBaiEBQRohEAyWAQsCQCAEIAJHDQBBtAEhEAywAgsCQAJAAkAgBC0AAEG7f2oOEQCaAZoBmgGaAZoBmgGaAZoBmgEBmgGaAZoBmgGaAQKaAQsgBEEBaiEEQZ0BIRAMmAILIARBAWohBEGeASEQDJcCCyAEQQFqIQRBnwEhEAyWAgsCQCAEIAJHDQBBtQEhEAyvAgsgAiAEayAAKAIAIgFqIRQgBCABa0EFaiEQAkADQCAELQAAIAFB5M+AgABqLQAARw2XASABQQVGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtQEhEAyvAgsgAEEANgIAIBBBAWohAUEoIRAMlAELAkAgBCACRw0AQbYBIRAMrgILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQerPgIAAai0AAEcNlgEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbYBIRAMrgILIABBADYCACAQQQFqIQFBByEQDJMBCwJAIAQgAkcNAEG3ASEQDK0CCwJAAkAgBC0AAEG7f2oODgCWAZYBlgGWAZYBlgGWAZYBlgGWAZYBlgEBlgELIARBAWohBEGhASEQDJQCCyAEQQFqIQRBogEhEAyTAgsCQCAEIAJHDQBBuAEhEAysAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB7c+AgABqLQAARw2UASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuAEhEAysAgsgAEEANgIAIBBBAWohAUESIRAMkQELAkAgBCACRw0AQbkBIRAMqwILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNkwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbkBIRAMqwILIABBADYCACAQQQFqIQFBICEQDJABCwJAIAQgAkcNAEG6ASEQDKoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHyz4CAAGotAABHDZIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG6ASEQDKoCCyAAQQA2AgAgEEEBaiEBQQ8hEAyPAQsCQCAEIAJHDQBBuwEhEAypAgsCQAJAIAQtAABBt39qDgcAkgGSAZIBkgGSAQGSAQsgBEEBaiEEQaUBIRAMkAILIARBAWohBEGmASEQDI8CCwJAIAQgAkcNAEG8ASEQDKgCCyACIARrIAAoAgAiAWohFCAEIAFrQQdqIRACQANAIAQtAAAgAUH0z4CAAGotAABHDZABIAFBB0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG8ASEQDKgCCyAAQQA2AgAgEEEBaiEBQRshEAyNAQsCQCAEIAJHDQBBvQEhEAynAgsCQAJAAkAgBC0AAEG+f2oOEgCRAZEBkQGRAZEBkQGRAZEBkQEBkQGRAZEBkQGRAZEBApEBCyAEQQFqIQRBpAEhEAyPAgsgBEEBaiEEQacBIRAMjgILIARBAWohBEGoASEQDI0CCwJAIAQgAkcNAEG+ASEQDKYCCyAELQAAQc4ARw2NASAEQQFqIQQMzwELAkAgBCACRw0AQb8BIRAMpQILAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgBC0AAEG/f2oOFQABAgOcAQQFBpwBnAGcAQcICQoLnAEMDQ4PnAELIARBAWohAUHoACEQDJoCCyAEQQFqIQFB6QAhEAyZAgsgBEEBaiEBQe4AIRAMmAILIARBAWohAUHyACEQDJcCCyAEQQFqIQFB8wAhEAyWAgsgBEEBaiEBQfYAIRAMlQILIARBAWohAUH3ACEQDJQCCyAEQQFqIQFB+gAhEAyTAgsgBEEBaiEEQYMBIRAMkgILIARBAWohBEGEASEQDJECCyAEQQFqIQRBhQEhEAyQAgsgBEEBaiEEQZIBIRAMjwILIARBAWohBEGYASEQDI4CCyAEQQFqIQRBoAEhEAyNAgsgBEEBaiEEQaMBIRAMjAILIARBAWohBEGqASEQDIsCCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEGrASEQDIsCC0HAASEQDKMCCyAAIAUgAhCqgICAACIBDYsBIAUhAQxcCwJAIAYgAkYNACAGQQFqIQUMjQELQcIBIRAMoQILA0ACQCAQLQAAQXZqDgSMAQAAjwEACyAQQQFqIhAgAkcNAAtBwwEhEAygAgsCQCAHIAJGDQAgAEGRgICAADYCCCAAIAc2AgQgByEBQQEhEAyHAgtBxAEhEAyfAgsCQCAHIAJHDQBBxQEhEAyfAgsCQAJAIActAABBdmoOBAHOAc4BAM4BCyAHQQFqIQYMjQELIAdBAWohBQyJAQsCQCAHIAJHDQBBxgEhEAyeAgsCQAJAIActAABBdmoOFwGPAY8BAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAQCPAQsgB0EBaiEHC0GwASEQDIQCCwJAIAggAkcNAEHIASEQDJ0CCyAILQAAQSBHDY0BIABBADsBMiAIQQFqIQFBswEhEAyDAgsgASEXAkADQCAXIgcgAkYNASAHLQAAQVBqQf8BcSIQQQpPDcwBAkAgAC8BMiIUQZkzSw0AIAAgFEEKbCIUOwEyIBBB//8DcyAUQf7/A3FJDQAgB0EBaiEXIAAgFCAQaiIQOwEyIBBB//8DcUHoB0kNAQsLQQAhECAAQQA2AhwgAEHBiYCAADYCECAAQQ02AgwgACAHQQFqNgIUDJwCC0HHASEQDJsCCyAAIAggAhCugICAACIQRQ3KASAQQRVHDYwBIABByAE2AhwgACAINgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAyaAgsCQCAJIAJHDQBBzAEhEAyaAgtBACEUQQEhF0EBIRZBACEQAkACQAJAAkACQAJAAkACQAJAIAktAABBUGoOCpYBlQEAAQIDBAUGCJcBC0ECIRAMBgtBAyEQDAULQQQhEAwEC0EFIRAMAwtBBiEQDAILQQchEAwBC0EIIRALQQAhF0EAIRZBACEUDI4BC0EJIRBBASEUQQAhF0EAIRYMjQELAkAgCiACRw0AQc4BIRAMmQILIAotAABBLkcNjgEgCkEBaiEJDMoBCyALIAJHDY4BQdABIRAMlwILAkAgCyACRg0AIABBjoCAgAA2AgggACALNgIEQbcBIRAM/gELQdEBIRAMlgILAkAgBCACRw0AQdIBIRAMlgILIAIgBGsgACgCACIQaiEUIAQgEGtBBGohCwNAIAQtAAAgEEH8z4CAAGotAABHDY4BIBBBBEYN6QEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB0gEhEAyVAgsgACAMIAIQrICAgAAiAQ2NASAMIQEMuAELAkAgBCACRw0AQdQBIRAMlAILIAIgBGsgACgCACIQaiEUIAQgEGtBAWohDANAIAQtAAAgEEGB0ICAAGotAABHDY8BIBBBAUYNjgEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB1AEhEAyTAgsCQCAEIAJHDQBB1gEhEAyTAgsgAiAEayAAKAIAIhBqIRQgBCAQa0ECaiELA0AgBC0AACAQQYPQgIAAai0AAEcNjgEgEEECRg2QASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHWASEQDJICCwJAIAQgAkcNAEHXASEQDJICCwJAAkAgBC0AAEG7f2oOEACPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAY8BCyAEQQFqIQRBuwEhEAz5AQsgBEEBaiEEQbwBIRAM+AELAkAgBCACRw0AQdgBIRAMkQILIAQtAABByABHDYwBIARBAWohBAzEAQsCQCAEIAJGDQAgAEGQgICAADYCCCAAIAQ2AgRBvgEhEAz3AQtB2QEhEAyPAgsCQCAEIAJHDQBB2gEhEAyPAgsgBC0AAEHIAEYNwwEgAEEBOgAoDLkBCyAAQQI6AC8gACAEIAIQpoCAgAAiEA2NAUHCASEQDPQBCyAALQAoQX9qDgK3AbkBuAELA0ACQCAELQAAQXZqDgQAjgGOAQCOAQsgBEEBaiIEIAJHDQALQd0BIRAMiwILIABBADoALyAALQAtQQRxRQ2EAgsgAEEAOgAvIABBAToANCABIQEMjAELIBBBFUYN2gEgAEEANgIcIAAgATYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMiAILAkAgACAQIAIQtICAgAAiBA0AIBAhAQyBAgsCQCAEQRVHDQAgAEEDNgIcIAAgEDYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMiAILIABBADYCHCAAIBA2AhQgAEGnjoCAADYCECAAQRI2AgxBACEQDIcCCyAQQRVGDdYBIABBADYCHCAAIAE2AhQgAEHajYCAADYCECAAQRQ2AgxBACEQDIYCCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNjQEgAEEHNgIcIAAgEDYCFCAAIBQ2AgxBACEQDIUCCyAAIAAvATBBgAFyOwEwIAEhAQtBKiEQDOoBCyAQQRVGDdEBIABBADYCHCAAIAE2AhQgAEGDjICAADYCECAAQRM2AgxBACEQDIICCyAQQRVGDc8BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDIECCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyNAQsgAEEMNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDIACCyAQQRVGDcwBIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDP8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyMAQsgAEENNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDP4BCyAQQRVGDckBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDP0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyLAQsgAEEONgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPwBCyAAQQA2AhwgACABNgIUIABBwJWAgAA2AhAgAEECNgIMQQAhEAz7AQsgEEEVRg3FASAAQQA2AhwgACABNgIUIABBxoyAgAA2AhAgAEEjNgIMQQAhEAz6AQsgAEEQNgIcIAAgATYCFCAAIBA2AgxBACEQDPkBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQzxAQsgAEERNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPgBCyAQQRVGDcEBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPcBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyIAQsgAEETNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPYBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQztAQsgAEEUNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPUBCyAQQRVGDb0BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDPQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyGAQsgAEEWNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPMBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQt4CAgAAiBA0AIAFBAWohAQzpAQsgAEEXNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPIBCyAAQQA2AhwgACABNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzxAQtCASERCyAQQQFqIQECQCAAKQMgIhJC//////////8PVg0AIAAgEkIEhiARhDcDICABIQEMhAELIABBADYCHCAAIAE2AhQgAEGtiYCAADYCECAAQQw2AgxBACEQDO8BCyAAQQA2AhwgACAQNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzuAQsgACgCBCEXIABBADYCBCAQIBGnaiIWIQEgACAXIBAgFiAUGyIQELWAgIAAIhRFDXMgAEEFNgIcIAAgEDYCFCAAIBQ2AgxBACEQDO0BCyAAQQA2AhwgACAQNgIUIABBqpyAgAA2AhAgAEEPNgIMQQAhEAzsAQsgACAQIAIQtICAgAAiAQ0BIBAhAQtBDiEQDNEBCwJAIAFBFUcNACAAQQI2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAzqAQsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAM6QELIAFBAWohEAJAIAAvATAiAUGAAXFFDQACQCAAIBAgAhC7gICAACIBDQAgECEBDHALIAFBFUcNugEgAEEFNgIcIAAgEDYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAM6QELAkAgAUGgBHFBoARHDQAgAC0ALUECcQ0AIABBADYCHCAAIBA2AhQgAEGWk4CAADYCECAAQQQ2AgxBACEQDOkBCyAAIBAgAhC9gICAABogECEBAkACQAJAAkACQCAAIBAgAhCzgICAAA4WAgEABAQEBAQEBAQEBAQEBAQEBAQEAwQLIABBAToALgsgACAALwEwQcAAcjsBMCAQIQELQSYhEAzRAQsgAEEjNgIcIAAgEDYCFCAAQaWWgIAANgIQIABBFTYCDEEAIRAM6QELIABBADYCHCAAIBA2AhQgAEHVi4CAADYCECAAQRE2AgxBACEQDOgBCyAALQAtQQFxRQ0BQcMBIRAMzgELAkAgDSACRg0AA0ACQCANLQAAQSBGDQAgDSEBDMQBCyANQQFqIg0gAkcNAAtBJSEQDOcBC0ElIRAM5gELIAAoAgQhBCAAQQA2AgQgACAEIA0Qr4CAgAAiBEUNrQEgAEEmNgIcIAAgBDYCDCAAIA1BAWo2AhRBACEQDOUBCyAQQRVGDasBIABBADYCHCAAIAE2AhQgAEH9jYCAADYCECAAQR02AgxBACEQDOQBCyAAQSc2AhwgACABNgIUIAAgEDYCDEEAIRAM4wELIBAhAUEBIRQCQAJAAkACQAJAAkACQCAALQAsQX5qDgcGBQUDAQIABQsgACAALwEwQQhyOwEwDAMLQQIhFAwBC0EEIRQLIABBAToALCAAIAAvATAgFHI7ATALIBAhAQtBKyEQDMoBCyAAQQA2AhwgACAQNgIUIABBq5KAgAA2AhAgAEELNgIMQQAhEAziAQsgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDEEAIRAM4QELIABBADoALCAQIQEMvQELIBAhAUEBIRQCQAJAAkACQAJAIAAtACxBe2oOBAMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0EpIRAMxQELIABBADYCHCAAIAE2AhQgAEHwlICAADYCECAAQQM2AgxBACEQDN0BCwJAIA4tAABBDUcNACAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA5BAWohAQx1CyAAQSw2AhwgACABNgIMIAAgDkEBajYCFEEAIRAM3QELIAAtAC1BAXFFDQFBxAEhEAzDAQsCQCAOIAJHDQBBLSEQDNwBCwJAAkADQAJAIA4tAABBdmoOBAIAAAMACyAOQQFqIg4gAkcNAAtBLSEQDN0BCyAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA4hAQx0CyAAQSw2AhwgACAONgIUIAAgATYCDEEAIRAM3AELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHMLIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzbAQsgACgCBCEEIABBADYCBCAAIAQgDhCxgICAACIEDaABIA4hAQzOAQsgEEEsRw0BIAFBAWohEEEBIQECQAJAAkACQAJAIAAtACxBe2oOBAMBAgQACyAQIQEMBAtBAiEBDAELQQQhAQsgAEEBOgAsIAAgAC8BMCABcjsBMCAQIQEMAQsgACAALwEwQQhyOwEwIBAhAQtBOSEQDL8BCyAAQQA6ACwgASEBC0E0IRAMvQELIAAgAC8BMEEgcjsBMCABIQEMAgsgACgCBCEEIABBADYCBAJAIAAgBCABELGAgIAAIgQNACABIQEMxwELIABBNzYCHCAAIAE2AhQgACAENgIMQQAhEAzUAQsgAEEIOgAsIAEhAQtBMCEQDLkBCwJAIAAtAChBAUYNACABIQEMBAsgAC0ALUEIcUUNkwEgASEBDAMLIAAtADBBIHENlAFBxQEhEAy3AQsCQCAPIAJGDQACQANAAkAgDy0AAEFQaiIBQf8BcUEKSQ0AIA8hAUE1IRAMugELIAApAyAiEUKZs+bMmbPmzBlWDQEgACARQgp+IhE3AyAgESABrUL/AYMiEkJ/hVYNASAAIBEgEnw3AyAgD0EBaiIPIAJHDQALQTkhEAzRAQsgACgCBCECIABBADYCBCAAIAIgD0EBaiIEELGAgIAAIgINlQEgBCEBDMMBC0E5IRAMzwELAkAgAC8BMCIBQQhxRQ0AIAAtAChBAUcNACAALQAtQQhxRQ2QAQsgACABQff7A3FBgARyOwEwIA8hAQtBNyEQDLQBCyAAIAAvATBBEHI7ATAMqwELIBBBFUYNiwEgAEEANgIcIAAgATYCFCAAQfCOgIAANgIQIABBHDYCDEEAIRAMywELIABBwwA2AhwgACABNgIMIAAgDUEBajYCFEEAIRAMygELAkAgAS0AAEE6Rw0AIAAoAgQhECAAQQA2AgQCQCAAIBAgARCvgICAACIQDQAgAUEBaiEBDGMLIABBwwA2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMygELIABBADYCHCAAIAE2AhQgAEGxkYCAADYCECAAQQo2AgxBACEQDMkBCyAAQQA2AhwgACABNgIUIABBoJmAgAA2AhAgAEEeNgIMQQAhEAzIAQsgAEEANgIACyAAQYASOwEqIAAgF0EBaiIBIAIQqICAgAAiEA0BIAEhAQtBxwAhEAysAQsgEEEVRw2DASAAQdEANgIcIAAgATYCFCAAQeOXgIAANgIQIABBFTYCDEEAIRAMxAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDF4LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMwwELIABBADYCHCAAIBQ2AhQgAEHBqICAADYCECAAQQc2AgwgAEEANgIAQQAhEAzCAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAzBAQtBACEQIABBADYCHCAAIAE2AhQgAEGAkYCAADYCECAAQQk2AgwMwAELIBBBFUYNfSAAQQA2AhwgACABNgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAy/AQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgAUEBaiEBAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBAJAIAAgECABEK2AgIAAIhANACABIQEMXAsgAEHYADYCHCAAIAE2AhQgACAQNgIMQQAhEAy+AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMrQELIABB2QA2AhwgACABNgIUIAAgBDYCDEEAIRAMvQELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKsBCyAAQdoANgIcIAAgATYCFCAAIAQ2AgxBACEQDLwBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQypAQsgAEHcADYCHCAAIAE2AhQgACAENgIMQQAhEAy7AQsCQCABLQAAQVBqIhBB/wFxQQpPDQAgACAQOgAqIAFBAWohAUHPACEQDKIBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQynAQsgAEHeADYCHCAAIAE2AhQgACAENgIMQQAhEAy6AQsgAEEANgIAIBdBAWohAQJAIAAtAClBI08NACABIQEMWQsgAEEANgIcIAAgATYCFCAAQdOJgIAANgIQIABBCDYCDEEAIRAMuQELIABBADYCAAtBACEQIABBADYCHCAAIAE2AhQgAEGQs4CAADYCECAAQQg2AgwMtwELIABBADYCACAXQQFqIQECQCAALQApQSFHDQAgASEBDFYLIABBADYCHCAAIAE2AhQgAEGbioCAADYCECAAQQg2AgxBACEQDLYBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKSIQQV1qQQtPDQAgASEBDFULAkAgEEEGSw0AQQEgEHRBygBxRQ0AIAEhAQxVC0EAIRAgAEEANgIcIAAgATYCFCAAQfeJgIAANgIQIABBCDYCDAy1AQsgEEEVRg1xIABBADYCHCAAIAE2AhQgAEG5jYCAADYCECAAQRo2AgxBACEQDLQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxUCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLMBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDLIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDLEBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxRCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLABCyAAQQA2AhwgACABNgIUIABBxoqAgAA2AhAgAEEHNgIMQQAhEAyvAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAyuAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAytAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMTQsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAysAQsgAEEANgIcIAAgATYCFCAAQdyIgIAANgIQIABBBzYCDEEAIRAMqwELIBBBP0cNASABQQFqIQELQQUhEAyQAQtBACEQIABBADYCHCAAIAE2AhQgAEH9koCAADYCECAAQQc2AgwMqAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMpwELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMpgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEYLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMpQELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0gA2AhwgACAUNgIUIAAgATYCDEEAIRAMpAELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0wA2AhwgACAUNgIUIAAgATYCDEEAIRAMowELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDEMLIABB5QA2AhwgACAUNgIUIAAgATYCDEEAIRAMogELIABBADYCHCAAIBQ2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKEBCyAAQQA2AhwgACABNgIUIABBw4+AgAA2AhAgAEEHNgIMQQAhEAygAQtBACEQIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgwMnwELIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgxBACEQDJ4BCyAAQQA2AhwgACAUNgIUIABB/pGAgAA2AhAgAEEHNgIMQQAhEAydAQsgAEEANgIcIAAgATYCFCAAQY6bgIAANgIQIABBBjYCDEEAIRAMnAELIBBBFUYNVyAAQQA2AhwgACABNgIUIABBzI6AgAA2AhAgAEEgNgIMQQAhEAybAQsgAEEANgIAIBBBAWohAUEkIRALIAAgEDoAKSAAKAIEIRAgAEEANgIEIAAgECABEKuAgIAAIhANVCABIQEMPgsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQfGbgIAANgIQIABBBjYCDAyXAQsgAUEVRg1QIABBADYCHCAAIAU2AhQgAEHwjICAADYCECAAQRs2AgxBACEQDJYBCyAAKAIEIQUgAEEANgIEIAAgBSAQEKmAgIAAIgUNASAQQQFqIQULQa0BIRAMewsgAEHBATYCHCAAIAU2AgwgACAQQQFqNgIUQQAhEAyTAQsgACgCBCEGIABBADYCBCAAIAYgEBCpgICAACIGDQEgEEEBaiEGC0GuASEQDHgLIABBwgE2AhwgACAGNgIMIAAgEEEBajYCFEEAIRAMkAELIABBADYCHCAAIAc2AhQgAEGXi4CAADYCECAAQQ02AgxBACEQDI8BCyAAQQA2AhwgACAINgIUIABB45CAgAA2AhAgAEEJNgIMQQAhEAyOAQsgAEEANgIcIAAgCDYCFCAAQZSNgIAANgIQIABBITYCDEEAIRAMjQELQQEhFkEAIRdBACEUQQEhEAsgACAQOgArIAlBAWohCAJAAkAgAC0ALUEQcQ0AAkACQAJAIAAtACoOAwEAAgQLIBZFDQMMAgsgFA0BDAILIBdFDQELIAAoAgQhECAAQQA2AgQgACAQIAgQrYCAgAAiEEUNPSAAQckBNgIcIAAgCDYCFCAAIBA2AgxBACEQDIwBCyAAKAIEIQQgAEEANgIEIAAgBCAIEK2AgIAAIgRFDXYgAEHKATYCHCAAIAg2AhQgACAENgIMQQAhEAyLAQsgACgCBCEEIABBADYCBCAAIAQgCRCtgICAACIERQ10IABBywE2AhwgACAJNgIUIAAgBDYCDEEAIRAMigELIAAoAgQhBCAAQQA2AgQgACAEIAoQrYCAgAAiBEUNciAAQc0BNgIcIAAgCjYCFCAAIAQ2AgxBACEQDIkBCwJAIAstAABBUGoiEEH/AXFBCk8NACAAIBA6ACogC0EBaiEKQbYBIRAMcAsgACgCBCEEIABBADYCBCAAIAQgCxCtgICAACIERQ1wIABBzwE2AhwgACALNgIUIAAgBDYCDEEAIRAMiAELIABBADYCHCAAIAQ2AhQgAEGQs4CAADYCECAAQQg2AgwgAEEANgIAQQAhEAyHAQsgAUEVRg0/IABBADYCHCAAIAw2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDIYBCyAAQYEEOwEoIAAoAgQhECAAQgA3AwAgACAQIAxBAWoiDBCrgICAACIQRQ04IABB0wE2AhwgACAMNgIUIAAgEDYCDEEAIRAMhQELIABBADYCAAtBACEQIABBADYCHCAAIAQ2AhQgAEHYm4CAADYCECAAQQg2AgwMgwELIAAoAgQhECAAQgA3AwAgACAQIAtBAWoiCxCrgICAACIQDQFBxgEhEAxpCyAAQQI6ACgMVQsgAEHVATYCHCAAIAs2AhQgACAQNgIMQQAhEAyAAQsgEEEVRg03IABBADYCHCAAIAQ2AhQgAEGkjICAADYCECAAQRA2AgxBACEQDH8LIAAtADRBAUcNNCAAIAQgAhC8gICAACIQRQ00IBBBFUcNNSAAQdwBNgIcIAAgBDYCFCAAQdWWgIAANgIQIABBFTYCDEEAIRAMfgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQMfQtBACEQDGMLQQIhEAxiC0ENIRAMYQtBDyEQDGALQSUhEAxfC0ETIRAMXgtBFSEQDF0LQRYhEAxcC0EXIRAMWwtBGCEQDFoLQRkhEAxZC0EaIRAMWAtBGyEQDFcLQRwhEAxWC0EdIRAMVQtBHyEQDFQLQSEhEAxTC0EjIRAMUgtBxgAhEAxRC0EuIRAMUAtBLyEQDE8LQTshEAxOC0E9IRAMTQtByAAhEAxMC0HJACEQDEsLQcsAIRAMSgtBzAAhEAxJC0HOACEQDEgLQdEAIRAMRwtB1QAhEAxGC0HYACEQDEULQdkAIRAMRAtB2wAhEAxDC0HkACEQDEILQeUAIRAMQQtB8QAhEAxAC0H0ACEQDD8LQY0BIRAMPgtBlwEhEAw9C0GpASEQDDwLQawBIRAMOwtBwAEhEAw6C0G5ASEQDDkLQa8BIRAMOAtBsQEhEAw3C0GyASEQDDYLQbQBIRAMNQtBtQEhEAw0C0G6ASEQDDMLQb0BIRAMMgtBvwEhEAwxC0HBASEQDDALIABBADYCHCAAIAQ2AhQgAEHpi4CAADYCECAAQR82AgxBACEQDEgLIABB2wE2AhwgACAENgIUIABB+paAgAA2AhAgAEEVNgIMQQAhEAxHCyAAQfgANgIcIAAgDDYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMRgsgAEHRADYCHCAAIAU2AhQgAEGwl4CAADYCECAAQRU2AgxBACEQDEULIABB+QA2AhwgACABNgIUIAAgEDYCDEEAIRAMRAsgAEH4ADYCHCAAIAE2AhQgAEHKmICAADYCECAAQRU2AgxBACEQDEMLIABB5AA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAxCCyAAQdcANgIcIAAgATYCFCAAQcmXgIAANgIQIABBFTYCDEEAIRAMQQsgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMQAsgAEHCADYCHCAAIAE2AhQgAEHjmICAADYCECAAQRU2AgxBACEQDD8LIABBADYCBCAAIA8gDxCxgICAACIERQ0BIABBOjYCHCAAIAQ2AgwgACAPQQFqNgIUQQAhEAw+CyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBEUNACAAQTs2AhwgACAENgIMIAAgAUEBajYCFEEAIRAMPgsgAUEBaiEBDC0LIA9BAWohAQwtCyAAQQA2AhwgACAPNgIUIABB5JKAgAA2AhAgAEEENgIMQQAhEAw7CyAAQTY2AhwgACAENgIUIAAgAjYCDEEAIRAMOgsgAEEuNgIcIAAgDjYCFCAAIAQ2AgxBACEQDDkLIABB0AA2AhwgACABNgIUIABBkZiAgAA2AhAgAEEVNgIMQQAhEAw4CyANQQFqIQEMLAsgAEEVNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMNgsgAEEbNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNQsgAEEPNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNAsgAEELNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMMwsgAEEaNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMgsgAEELNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMQsgAEEKNgIcIAAgATYCFCAAQeSWgIAANgIQIABBFTYCDEEAIRAMMAsgAEEeNgIcIAAgATYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAMLwsgAEEANgIcIAAgEDYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMLgsgAEEENgIcIAAgATYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMLQsgAEEANgIAIAtBAWohCwtBuAEhEAwSCyAAQQA2AgAgEEEBaiEBQfUAIRAMEQsgASEBAkAgAC0AKUEFRw0AQeMAIRAMEQtB4gAhEAwQC0EAIRAgAEEANgIcIABB5JGAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAwoCyAAQQA2AgAgF0EBaiEBQcAAIRAMDgtBASEBCyAAIAE6ACwgAEEANgIAIBdBAWohAQtBKCEQDAsLIAEhAQtBOCEQDAkLAkAgASIPIAJGDQADQAJAIA8tAABBgL6AgABqLQAAIgFBAUYNACABQQJHDQMgD0EBaiEBDAQLIA9BAWoiDyACRw0AC0E+IRAMIgtBPiEQDCELIABBADoALCAPIQEMAQtBCyEQDAYLQTohEAwFCyABQQFqIQFBLSEQDAQLIAAgAToALCAAQQA2AgAgFkEBaiEBQQwhEAwDCyAAQQA2AgAgF0EBaiEBQQohEAwCCyAAQQA2AgALIABBADoALCANIQFBCSEQDAALC0EAIRAgAEEANgIcIAAgCzYCFCAAQc2QgIAANgIQIABBCTYCDAwXC0EAIRAgAEEANgIcIAAgCjYCFCAAQemKgIAANgIQIABBCTYCDAwWC0EAIRAgAEEANgIcIAAgCTYCFCAAQbeQgIAANgIQIABBCTYCDAwVC0EAIRAgAEEANgIcIAAgCDYCFCAAQZyRgIAANgIQIABBCTYCDAwUC0EAIRAgAEEANgIcIAAgATYCFCAAQc2QgIAANgIQIABBCTYCDAwTC0EAIRAgAEEANgIcIAAgATYCFCAAQemKgIAANgIQIABBCTYCDAwSC0EAIRAgAEEANgIcIAAgATYCFCAAQbeQgIAANgIQIABBCTYCDAwRC0EAIRAgAEEANgIcIAAgATYCFCAAQZyRgIAANgIQIABBCTYCDAwQC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwPC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwOC0EAIRAgAEEANgIcIAAgATYCFCAAQcCSgIAANgIQIABBCzYCDAwNC0EAIRAgAEEANgIcIAAgATYCFCAAQZWJgIAANgIQIABBCzYCDAwMC0EAIRAgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDAwLC0EAIRAgAEEANgIcIAAgATYCFCAAQfuPgIAANgIQIABBCjYCDAwKC0EAIRAgAEEANgIcIAAgATYCFCAAQfGZgIAANgIQIABBAjYCDAwJC0EAIRAgAEEANgIcIAAgATYCFCAAQcSUgIAANgIQIABBAjYCDAwIC0EAIRAgAEEANgIcIAAgATYCFCAAQfKVgIAANgIQIABBAjYCDAwHCyAAQQI2AhwgACABNgIUIABBnJqAgAA2AhAgAEEWNgIMQQAhEAwGC0EBIRAMBQtB1AAhECABIgQgAkYNBCADQQhqIAAgBCACQdjCgIAAQQoQxYCAgAAgAygCDCEEIAMoAggOAwEEAgALEMqAgIAAAAsgAEEANgIcIABBtZqAgAA2AhAgAEEXNgIMIAAgBEEBajYCFEEAIRAMAgsgAEEANgIcIAAgBDYCFCAAQcqagIAANgIQIABBCTYCDEEAIRAMAQsCQCABIgQgAkcNAEEiIRAMAQsgAEGJgICAADYCCCAAIAQ2AgRBISEQCyADQRBqJICAgIAAIBALrwEBAn8gASgCACEGAkACQCACIANGDQAgBCAGaiEEIAYgA2ogAmshByACIAZBf3MgBWoiBmohBQNAAkAgAi0AACAELQAARg0AQQIhBAwDCwJAIAYNAEEAIQQgBSECDAMLIAZBf2ohBiAEQQFqIQQgAkEBaiICIANHDQALIAchBiADIQILIABBATYCACABIAY2AgAgACACNgIEDwsgAUEANgIAIAAgBDYCACAAIAI2AgQLCgAgABDHgICAAAvyNgELfyOAgICAAEEQayIBJICAgIAAAkBBACgCoNCAgAANAEEAEMuAgIAAQYDUhIAAayICQdkASQ0AQQAhAwJAQQAoAuDTgIAAIgQNAEEAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEIakFwcUHYqtWqBXMiBDYC4NOAgABBAEEANgL004CAAEEAQQA2AsTTgIAAC0EAIAI2AszTgIAAQQBBgNSEgAA2AsjTgIAAQQBBgNSEgAA2ApjQgIAAQQAgBDYCrNCAgABBAEF/NgKo0ICAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALQYDUhIAAQXhBgNSEgABrQQ9xQQBBgNSEgABBCGpBD3EbIgNqIgRBBGogAkFIaiIFIANrIgNBAXI2AgBBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAQYDUhIAAIAVqQTg2AgQLAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABB7AFLDQACQEEAKAKI0ICAACIGQRAgAEETakFwcSAAQQtJGyICQQN2IgR2IgNBA3FFDQACQAJAIANBAXEgBHJBAXMiBUEDdCIEQbDQgIAAaiIDIARBuNCAgABqKAIAIgQoAggiAkcNAEEAIAZBfiAFd3E2AojQgIAADAELIAMgAjYCCCACIAM2AgwLIARBCGohAyAEIAVBA3QiBUEDcjYCBCAEIAVqIgQgBCgCBEEBcjYCBAwMCyACQQAoApDQgIAAIgdNDQECQCADRQ0AAkACQCADIAR0QQIgBHQiA0EAIANrcnEiA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqIgRBA3QiA0Gw0ICAAGoiBSADQbjQgIAAaigCACIDKAIIIgBHDQBBACAGQX4gBHdxIgY2AojQgIAADAELIAUgADYCCCAAIAU2AgwLIAMgAkEDcjYCBCADIARBA3QiBGogBCACayIFNgIAIAMgAmoiACAFQQFyNgIEAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQQCQAJAIAZBASAHQQN2dCIIcQ0AQQAgBiAIcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCAENgIMIAIgBDYCCCAEIAI2AgwgBCAINgIICyADQQhqIQNBACAANgKc0ICAAEEAIAU2ApDQgIAADAwLQQAoAozQgIAAIglFDQEgCUEAIAlrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqQQJ0QbjSgIAAaigCACIAKAIEQXhxIAJrIQQgACEFAkADQAJAIAUoAhAiAw0AIAVBFGooAgAiA0UNAgsgAygCBEF4cSACayIFIAQgBSAESSIFGyEEIAMgACAFGyEAIAMhBQwACwsgACgCGCEKAkAgACgCDCIIIABGDQAgACgCCCIDQQAoApjQgIAASRogCCADNgIIIAMgCDYCDAwLCwJAIABBFGoiBSgCACIDDQAgACgCECIDRQ0DIABBEGohBQsDQCAFIQsgAyIIQRRqIgUoAgAiAw0AIAhBEGohBSAIKAIQIgMNAAsgC0EANgIADAoLQX8hAiAAQb9/Sw0AIABBE2oiA0FwcSECQQAoAozQgIAAIgdFDQBBACELAkAgAkGAAkkNAEEfIQsgAkH///8HSw0AIANBCHYiAyADQYD+P2pBEHZBCHEiA3QiBCAEQYDgH2pBEHZBBHEiBHQiBSAFQYCAD2pBEHZBAnEiBXRBD3YgAyAEciAFcmsiA0EBdCACIANBFWp2QQFxckEcaiELC0EAIAJrIQQCQAJAAkACQCALQQJ0QbjSgIAAaigCACIFDQBBACEDQQAhCAwBC0EAIQMgAkEAQRkgC0EBdmsgC0EfRht0IQBBACEIA0ACQCAFKAIEQXhxIAJrIgYgBE8NACAGIQQgBSEIIAYNAEEAIQQgBSEIIAUhAwwDCyADIAVBFGooAgAiBiAGIAUgAEEddkEEcWpBEGooAgAiBUYbIAMgBhshAyAAQQF0IQAgBQ0ACwsCQCADIAhyDQBBACEIQQIgC3QiA0EAIANrciAHcSIDRQ0DIANBACADa3FBf2oiAyADQQx2QRBxIgN2IgVBBXZBCHEiACADciAFIAB2IgNBAnZBBHEiBXIgAyAFdiIDQQF2QQJxIgVyIAMgBXYiA0EBdkEBcSIFciADIAV2akECdEG40oCAAGooAgAhAwsgA0UNAQsDQCADKAIEQXhxIAJrIgYgBEkhAAJAIAMoAhAiBQ0AIANBFGooAgAhBQsgBiAEIAAbIQQgAyAIIAAbIQggBSEDIAUNAAsLIAhFDQAgBEEAKAKQ0ICAACACa08NACAIKAIYIQsCQCAIKAIMIgAgCEYNACAIKAIIIgNBACgCmNCAgABJGiAAIAM2AgggAyAANgIMDAkLAkAgCEEUaiIFKAIAIgMNACAIKAIQIgNFDQMgCEEQaiEFCwNAIAUhBiADIgBBFGoiBSgCACIDDQAgAEEQaiEFIAAoAhAiAw0ACyAGQQA2AgAMCAsCQEEAKAKQ0ICAACIDIAJJDQBBACgCnNCAgAAhBAJAAkAgAyACayIFQRBJDQAgBCACaiIAIAVBAXI2AgRBACAFNgKQ0ICAAEEAIAA2ApzQgIAAIAQgA2ogBTYCACAEIAJBA3I2AgQMAQsgBCADQQNyNgIEIAQgA2oiAyADKAIEQQFyNgIEQQBBADYCnNCAgABBAEEANgKQ0ICAAAsgBEEIaiEDDAoLAkBBACgClNCAgAAiACACTQ0AQQAoAqDQgIAAIgMgAmoiBCAAIAJrIgVBAXI2AgRBACAFNgKU0ICAAEEAIAQ2AqDQgIAAIAMgAkEDcjYCBCADQQhqIQMMCgsCQAJAQQAoAuDTgIAARQ0AQQAoAujTgIAAIQQMAQtBAEJ/NwLs04CAAEEAQoCAhICAgMAANwLk04CAAEEAIAFBDGpBcHFB2KrVqgVzNgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgABBgIAEIQQLQQAhAwJAIAQgAkHHAGoiB2oiBkEAIARrIgtxIgggAksNAEEAQTA2AvjTgIAADAoLAkBBACgCwNOAgAAiA0UNAAJAQQAoArjTgIAAIgQgCGoiBSAETQ0AIAUgA00NAQtBACEDQQBBMDYC+NOAgAAMCgtBAC0AxNOAgABBBHENBAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQAJAIAMoAgAiBSAESw0AIAUgAygCBGogBEsNAwsgAygCCCIDDQALC0EAEMuAgIAAIgBBf0YNBSAIIQYCQEEAKALk04CAACIDQX9qIgQgAHFFDQAgCCAAayAEIABqQQAgA2txaiEGCyAGIAJNDQUgBkH+////B0sNBQJAQQAoAsDTgIAAIgNFDQBBACgCuNOAgAAiBCAGaiIFIARNDQYgBSADSw0GCyAGEMuAgIAAIgMgAEcNAQwHCyAGIABrIAtxIgZB/v///wdLDQQgBhDLgICAACIAIAMoAgAgAygCBGpGDQMgACEDCwJAIANBf0YNACACQcgAaiAGTQ0AAkAgByAGa0EAKALo04CAACIEakEAIARrcSIEQf7///8HTQ0AIAMhAAwHCwJAIAQQy4CAgABBf0YNACAEIAZqIQYgAyEADAcLQQAgBmsQy4CAgAAaDAQLIAMhACADQX9HDQUMAwtBACEIDAcLQQAhAAwFCyAAQX9HDQILQQBBACgCxNOAgABBBHI2AsTTgIAACyAIQf7///8HSw0BIAgQy4CAgAAhAEEAEMuAgIAAIQMgAEF/Rg0BIANBf0YNASAAIANPDQEgAyAAayIGIAJBOGpNDQELQQBBACgCuNOAgAAgBmoiAzYCuNOAgAACQCADQQAoArzTgIAATQ0AQQAgAzYCvNOAgAALAkACQAJAAkBBACgCoNCAgAAiBEUNAEHI04CAACEDA0AgACADKAIAIgUgAygCBCIIakYNAiADKAIIIgMNAAwDCwsCQAJAQQAoApjQgIAAIgNFDQAgACADTw0BC0EAIAA2ApjQgIAAC0EAIQNBACAGNgLM04CAAEEAIAA2AsjTgIAAQQBBfzYCqNCAgABBAEEAKALg04CAADYCrNCAgABBAEEANgLU04CAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgQgBkFIaiIFIANrIgNBAXI2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAIAAgBWpBODYCBAwCCyADLQAMQQhxDQAgBCAFSQ0AIAQgAE8NACAEQXggBGtBD3FBACAEQQhqQQ9xGyIFaiIAQQAoApTQgIAAIAZqIgsgBWsiBUEBcjYCBCADIAggBmo2AgRBAEEAKALw04CAADYCpNCAgABBACAFNgKU0ICAAEEAIAA2AqDQgIAAIAQgC2pBODYCBAwBCwJAIABBACgCmNCAgAAiCE8NAEEAIAA2ApjQgIAAIAAhCAsgACAGaiEFQcjTgIAAIQMCQAJAAkACQAJAAkACQANAIAMoAgAgBUYNASADKAIIIgMNAAwCCwsgAy0ADEEIcUUNAQtByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiIFIARLDQMLIAMoAgghAwwACwsgAyAANgIAIAMgAygCBCAGajYCBCAAQXggAGtBD3FBACAAQQhqQQ9xG2oiCyACQQNyNgIEIAVBeCAFa0EPcUEAIAVBCGpBD3EbaiIGIAsgAmoiAmshAwJAIAYgBEcNAEEAIAI2AqDQgIAAQQBBACgClNCAgAAgA2oiAzYClNCAgAAgAiADQQFyNgIEDAMLAkAgBkEAKAKc0ICAAEcNAEEAIAI2ApzQgIAAQQBBACgCkNCAgAAgA2oiAzYCkNCAgAAgAiADQQFyNgIEIAIgA2ogAzYCAAwDCwJAIAYoAgQiBEEDcUEBRw0AIARBeHEhBwJAAkAgBEH/AUsNACAGKAIIIgUgBEEDdiIIQQN0QbDQgIAAaiIARhoCQCAGKAIMIgQgBUcNAEEAQQAoAojQgIAAQX4gCHdxNgKI0ICAAAwCCyAEIABGGiAEIAU2AgggBSAENgIMDAELIAYoAhghCQJAAkAgBigCDCIAIAZGDQAgBigCCCIEIAhJGiAAIAQ2AgggBCAANgIMDAELAkAgBkEUaiIEKAIAIgUNACAGQRBqIgQoAgAiBQ0AQQAhAAwBCwNAIAQhCCAFIgBBFGoiBCgCACIFDQAgAEEQaiEEIAAoAhAiBQ0ACyAIQQA2AgALIAlFDQACQAJAIAYgBigCHCIFQQJ0QbjSgIAAaiIEKAIARw0AIAQgADYCACAADQFBAEEAKAKM0ICAAEF+IAV3cTYCjNCAgAAMAgsgCUEQQRQgCSgCECAGRhtqIAA2AgAgAEUNAQsgACAJNgIYAkAgBigCECIERQ0AIAAgBDYCECAEIAA2AhgLIAYoAhQiBEUNACAAQRRqIAQ2AgAgBCAANgIYCyAHIANqIQMgBiAHaiIGKAIEIQQLIAYgBEF+cTYCBCACIANqIAM2AgAgAiADQQFyNgIEAkAgA0H/AUsNACADQXhxQbDQgIAAaiEEAkACQEEAKAKI0ICAACIFQQEgA0EDdnQiA3ENAEEAIAUgA3I2AojQgIAAIAQhAwwBCyAEKAIIIQMLIAMgAjYCDCAEIAI2AgggAiAENgIMIAIgAzYCCAwDC0EfIQQCQCADQf///wdLDQAgA0EIdiIEIARBgP4/akEQdkEIcSIEdCIFIAVBgOAfakEQdkEEcSIFdCIAIABBgIAPakEQdkECcSIAdEEPdiAEIAVyIAByayIEQQF0IAMgBEEVanZBAXFyQRxqIQQLIAIgBDYCHCACQgA3AhAgBEECdEG40oCAAGohBQJAQQAoAozQgIAAIgBBASAEdCIIcQ0AIAUgAjYCAEEAIAAgCHI2AozQgIAAIAIgBTYCGCACIAI2AgggAiACNgIMDAMLIANBAEEZIARBAXZrIARBH0YbdCEEIAUoAgAhAANAIAAiBSgCBEF4cSADRg0CIARBHXYhACAEQQF0IQQgBSAAQQRxakEQaiIIKAIAIgANAAsgCCACNgIAIAIgBTYCGCACIAI2AgwgAiACNgIIDAILIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgsgBkFIaiIIIANrIgNBAXI2AgQgACAIakE4NgIEIAQgBUE3IAVrQQ9xQQAgBUFJakEPcRtqQUFqIgggCCAEQRBqSRsiCEEjNgIEQQBBACgC8NOAgAA2AqTQgIAAQQAgAzYClNCAgABBACALNgKg0ICAACAIQRBqQQApAtDTgIAANwIAIAhBACkCyNOAgAA3AghBACAIQQhqNgLQ04CAAEEAIAY2AszTgIAAQQAgADYCyNOAgABBAEEANgLU04CAACAIQSRqIQMDQCADQQc2AgAgA0EEaiIDIAVJDQALIAggBEYNAyAIIAgoAgRBfnE2AgQgCCAIIARrIgA2AgAgBCAAQQFyNgIEAkAgAEH/AUsNACAAQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgAEEDdnQiAHENAEEAIAUgAHI2AojQgIAAIAMhBQwBCyADKAIIIQULIAUgBDYCDCADIAQ2AgggBCADNgIMIAQgBTYCCAwEC0EfIQMCQCAAQf///wdLDQAgAEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCIIIAhBgIAPakEQdkECcSIIdEEPdiADIAVyIAhyayIDQQF0IAAgA0EVanZBAXFyQRxqIQMLIAQgAzYCHCAEQgA3AhAgA0ECdEG40oCAAGohBQJAQQAoAozQgIAAIghBASADdCIGcQ0AIAUgBDYCAEEAIAggBnI2AozQgIAAIAQgBTYCGCAEIAQ2AgggBCAENgIMDAQLIABBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhCANAIAgiBSgCBEF4cSAARg0DIANBHXYhCCADQQF0IQMgBSAIQQRxakEQaiIGKAIAIggNAAsgBiAENgIAIAQgBTYCGCAEIAQ2AgwgBCAENgIIDAMLIAUoAggiAyACNgIMIAUgAjYCCCACQQA2AhggAiAFNgIMIAIgAzYCCAsgC0EIaiEDDAULIAUoAggiAyAENgIMIAUgBDYCCCAEQQA2AhggBCAFNgIMIAQgAzYCCAtBACgClNCAgAAiAyACTQ0AQQAoAqDQgIAAIgQgAmoiBSADIAJrIgNBAXI2AgRBACADNgKU0ICAAEEAIAU2AqDQgIAAIAQgAkEDcjYCBCAEQQhqIQMMAwtBACEDQQBBMDYC+NOAgAAMAgsCQCALRQ0AAkACQCAIIAgoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAA2AgAgAA0BQQAgB0F+IAV3cSIHNgKM0ICAAAwCCyALQRBBFCALKAIQIAhGG2ogADYCACAARQ0BCyAAIAs2AhgCQCAIKAIQIgNFDQAgACADNgIQIAMgADYCGAsgCEEUaigCACIDRQ0AIABBFGogAzYCACADIAA2AhgLAkACQCAEQQ9LDQAgCCAEIAJqIgNBA3I2AgQgCCADaiIDIAMoAgRBAXI2AgQMAQsgCCACaiIAIARBAXI2AgQgCCACQQNyNgIEIAAgBGogBDYCAAJAIARB/wFLDQAgBEF4cUGw0ICAAGohAwJAAkBBACgCiNCAgAAiBUEBIARBA3Z0IgRxDQBBACAFIARyNgKI0ICAACADIQQMAQsgAygCCCEECyAEIAA2AgwgAyAANgIIIAAgAzYCDCAAIAQ2AggMAQtBHyEDAkAgBEH///8HSw0AIARBCHYiAyADQYD+P2pBEHZBCHEiA3QiBSAFQYDgH2pBEHZBBHEiBXQiAiACQYCAD2pBEHZBAnEiAnRBD3YgAyAFciACcmsiA0EBdCAEIANBFWp2QQFxckEcaiEDCyAAIAM2AhwgAEIANwIQIANBAnRBuNKAgABqIQUCQCAHQQEgA3QiAnENACAFIAA2AgBBACAHIAJyNgKM0ICAACAAIAU2AhggACAANgIIIAAgADYCDAwBCyAEQQBBGSADQQF2ayADQR9GG3QhAyAFKAIAIQICQANAIAIiBSgCBEF4cSAERg0BIANBHXYhAiADQQF0IQMgBSACQQRxakEQaiIGKAIAIgINAAsgBiAANgIAIAAgBTYCGCAAIAA2AgwgACAANgIIDAELIAUoAggiAyAANgIMIAUgADYCCCAAQQA2AhggACAFNgIMIAAgAzYCCAsgCEEIaiEDDAELAkAgCkUNAAJAAkAgACAAKAIcIgVBAnRBuNKAgABqIgMoAgBHDQAgAyAINgIAIAgNAUEAIAlBfiAFd3E2AozQgIAADAILIApBEEEUIAooAhAgAEYbaiAINgIAIAhFDQELIAggCjYCGAJAIAAoAhAiA0UNACAIIAM2AhAgAyAINgIYCyAAQRRqKAIAIgNFDQAgCEEUaiADNgIAIAMgCDYCGAsCQAJAIARBD0sNACAAIAQgAmoiA0EDcjYCBCAAIANqIgMgAygCBEEBcjYCBAwBCyAAIAJqIgUgBEEBcjYCBCAAIAJBA3I2AgQgBSAEaiAENgIAAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQMCQAJAQQEgB0EDdnQiCCAGcQ0AQQAgCCAGcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCADNgIMIAIgAzYCCCADIAI2AgwgAyAINgIIC0EAIAU2ApzQgIAAQQAgBDYCkNCAgAALIABBCGohAwsgAUEQaiSAgICAACADCwoAIAAQyYCAgAAL4g0BB38CQCAARQ0AIABBeGoiASAAQXxqKAIAIgJBeHEiAGohAwJAIAJBAXENACACQQNxRQ0BIAEgASgCACICayIBQQAoApjQgIAAIgRJDQEgAiAAaiEAAkAgAUEAKAKc0ICAAEYNAAJAIAJB/wFLDQAgASgCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgASgCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAwsgAiAGRhogAiAENgIIIAQgAjYCDAwCCyABKAIYIQcCQAJAIAEoAgwiBiABRg0AIAEoAggiAiAESRogBiACNgIIIAIgBjYCDAwBCwJAIAFBFGoiAigCACIEDQAgAUEQaiICKAIAIgQNAEEAIQYMAQsDQCACIQUgBCIGQRRqIgIoAgAiBA0AIAZBEGohAiAGKAIQIgQNAAsgBUEANgIACyAHRQ0BAkACQCABIAEoAhwiBEECdEG40oCAAGoiAigCAEcNACACIAY2AgAgBg0BQQBBACgCjNCAgABBfiAEd3E2AozQgIAADAMLIAdBEEEUIAcoAhAgAUYbaiAGNgIAIAZFDQILIAYgBzYCGAJAIAEoAhAiAkUNACAGIAI2AhAgAiAGNgIYCyABKAIUIgJFDQEgBkEUaiACNgIAIAIgBjYCGAwBCyADKAIEIgJBA3FBA0cNACADIAJBfnE2AgRBACAANgKQ0ICAACABIABqIAA2AgAgASAAQQFyNgIEDwsgASADTw0AIAMoAgQiAkEBcUUNAAJAAkAgAkECcQ0AAkAgA0EAKAKg0ICAAEcNAEEAIAE2AqDQgIAAQQBBACgClNCAgAAgAGoiADYClNCAgAAgASAAQQFyNgIEIAFBACgCnNCAgABHDQNBAEEANgKQ0ICAAEEAQQA2ApzQgIAADwsCQCADQQAoApzQgIAARw0AQQAgATYCnNCAgABBAEEAKAKQ0ICAACAAaiIANgKQ0ICAACABIABBAXI2AgQgASAAaiAANgIADwsgAkF4cSAAaiEAAkACQCACQf8BSw0AIAMoAggiBCACQQN2IgVBA3RBsNCAgABqIgZGGgJAIAMoAgwiAiAERw0AQQBBACgCiNCAgABBfiAFd3E2AojQgIAADAILIAIgBkYaIAIgBDYCCCAEIAI2AgwMAQsgAygCGCEHAkACQCADKAIMIgYgA0YNACADKAIIIgJBACgCmNCAgABJGiAGIAI2AgggAiAGNgIMDAELAkAgA0EUaiICKAIAIgQNACADQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQACQAJAIAMgAygCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAgsgB0EQQRQgBygCECADRhtqIAY2AgAgBkUNAQsgBiAHNgIYAkAgAygCECICRQ0AIAYgAjYCECACIAY2AhgLIAMoAhQiAkUNACAGQRRqIAI2AgAgAiAGNgIYCyABIABqIAA2AgAgASAAQQFyNgIEIAFBACgCnNCAgABHDQFBACAANgKQ0ICAAA8LIAMgAkF+cTYCBCABIABqIAA2AgAgASAAQQFyNgIECwJAIABB/wFLDQAgAEF4cUGw0ICAAGohAgJAAkBBACgCiNCAgAAiBEEBIABBA3Z0IgBxDQBBACAEIAByNgKI0ICAACACIQAMAQsgAigCCCEACyAAIAE2AgwgAiABNgIIIAEgAjYCDCABIAA2AggPC0EfIQICQCAAQf///wdLDQAgAEEIdiICIAJBgP4/akEQdkEIcSICdCIEIARBgOAfakEQdkEEcSIEdCIGIAZBgIAPakEQdkECcSIGdEEPdiACIARyIAZyayICQQF0IAAgAkEVanZBAXFyQRxqIQILIAEgAjYCHCABQgA3AhAgAkECdEG40oCAAGohBAJAAkBBACgCjNCAgAAiBkEBIAJ0IgNxDQAgBCABNgIAQQAgBiADcjYCjNCAgAAgASAENgIYIAEgATYCCCABIAE2AgwMAQsgAEEAQRkgAkEBdmsgAkEfRht0IQIgBCgCACEGAkADQCAGIgQoAgRBeHEgAEYNASACQR12IQYgAkEBdCECIAQgBkEEcWpBEGoiAygCACIGDQALIAMgATYCACABIAQ2AhggASABNgIMIAEgATYCCAwBCyAEKAIIIgAgATYCDCAEIAE2AgggAUEANgIYIAEgBDYCDCABIAA2AggLQQBBACgCqNCAgABBf2oiAUF/IAEbNgKo0ICAAAsLBAAAAAtOAAJAIAANAD8AQRB0DwsCQCAAQf//A3ENACAAQX9MDQACQCAAQRB2QAAiAEF/Rw0AQQBBMDYC+NOAgABBfw8LIABBEHQPCxDKgICAAAAL8gICA38BfgJAIAJFDQAgACABOgAAIAIgAGoiA0F/aiABOgAAIAJBA0kNACAAIAE6AAIgACABOgABIANBfWogAToAACADQX5qIAE6AAAgAkEHSQ0AIAAgAToAAyADQXxqIAE6AAAgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIFayICQSBJDQAgAa1CgYCAgBB+IQYgAyAFaiEBA0AgASAGNwMYIAEgBjcDECABIAY3AwggASAGNwMAIAFBIGohASACQWBqIgJBH0sNAAsLIAALC45IAQBBgAgLhkgBAAAAAgAAAAMAAAAAAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAGAAAABwAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEludmFsaWQgY2hhciBpbiB1cmwgcXVlcnkAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9ib2R5AENvbnRlbnQtTGVuZ3RoIG92ZXJmbG93AENodW5rIHNpemUgb3ZlcmZsb3cAUmVzcG9uc2Ugb3ZlcmZsb3cASW52YWxpZCBtZXRob2QgZm9yIEhUVFAveC54IHJlcXVlc3QASW52YWxpZCBtZXRob2QgZm9yIFJUU1AveC54IHJlcXVlc3QARXhwZWN0ZWQgU09VUkNFIG1ldGhvZCBmb3IgSUNFL3gueCByZXF1ZXN0AEludmFsaWQgY2hhciBpbiB1cmwgZnJhZ21lbnQgc3RhcnQARXhwZWN0ZWQgZG90AFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fc3RhdHVzAEludmFsaWQgcmVzcG9uc2Ugc3RhdHVzAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMAVXNlciBjYWxsYmFjayBlcnJvcgBgb25fcmVzZXRgIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19oZWFkZXJgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2JlZ2luYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlYCBjYWxsYmFjayBlcnJvcgBgb25fc3RhdHVzX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdmVyc2lvbl9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX3VybF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWVzc2FnZV9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX21ldGhvZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lYCBjYWxsYmFjayBlcnJvcgBVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNlcnZlcgBJbnZhbGlkIGhlYWRlciB2YWx1ZSBjaGFyAEludmFsaWQgaGVhZGVyIGZpZWxkIGNoYXIAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl92ZXJzaW9uAEludmFsaWQgbWlub3IgdmVyc2lvbgBJbnZhbGlkIG1ham9yIHZlcnNpb24ARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgdmVyc2lvbgBFeHBlY3RlZCBDUkxGIGFmdGVyIHZlcnNpb24ASW52YWxpZCBIVFRQIHZlcnNpb24ASW52YWxpZCBoZWFkZXIgdG9rZW4AU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl91cmwASW52YWxpZCBjaGFyYWN0ZXJzIGluIHVybABVbmV4cGVjdGVkIHN0YXJ0IGNoYXIgaW4gdXJsAERvdWJsZSBAIGluIHVybABFbXB0eSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXJhY3RlciBpbiBDb250ZW50LUxlbmd0aABEdXBsaWNhdGUgQ29udGVudC1MZW5ndGgASW52YWxpZCBjaGFyIGluIHVybCBwYXRoAENvbnRlbnQtTGVuZ3RoIGNhbid0IGJlIHByZXNlbnQgd2l0aCBUcmFuc2Zlci1FbmNvZGluZwBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBzaXplAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX3ZhbHVlAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgdmFsdWUATWlzc2luZyBleHBlY3RlZCBMRiBhZnRlciBoZWFkZXIgdmFsdWUASW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIHF1b3RlIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGVkIHZhbHVlAFBhdXNlZCBieSBvbl9oZWFkZXJzX2NvbXBsZXRlAEludmFsaWQgRU9GIHN0YXRlAG9uX3Jlc2V0IHBhdXNlAG9uX2NodW5rX2hlYWRlciBwYXVzZQBvbl9tZXNzYWdlX2JlZ2luIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl92YWx1ZSBwYXVzZQBvbl9zdGF0dXNfY29tcGxldGUgcGF1c2UAb25fdmVyc2lvbl9jb21wbGV0ZSBwYXVzZQBvbl91cmxfY29tcGxldGUgcGF1c2UAb25fY2h1bmtfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlIHBhdXNlAG9uX21lc3NhZ2VfY29tcGxldGUgcGF1c2UAb25fbWV0aG9kX2NvbXBsZXRlIHBhdXNlAG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19leHRlbnNpb25fbmFtZSBwYXVzZQBVbmV4cGVjdGVkIHNwYWNlIGFmdGVyIHN0YXJ0IGxpbmUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fbmFtZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIG5hbWUAUGF1c2Ugb24gQ09OTkVDVC9VcGdyYWRlAFBhdXNlIG9uIFBSSS9VcGdyYWRlAEV4cGVjdGVkIEhUVFAvMiBDb25uZWN0aW9uIFByZWZhY2UAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9tZXRob2QARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgbWV0aG9kAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX2ZpZWxkAFBhdXNlZABJbnZhbGlkIHdvcmQgZW5jb3VudGVyZWQASW52YWxpZCBtZXRob2QgZW5jb3VudGVyZWQAVW5leHBlY3RlZCBjaGFyIGluIHVybCBzY2hlbWEAUmVxdWVzdCBoYXMgaW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgAFNXSVRDSF9QUk9YWQBVU0VfUFJPWFkATUtBQ1RJVklUWQBVTlBST0NFU1NBQkxFX0VOVElUWQBDT1BZAE1PVkVEX1BFUk1BTkVOVExZAFRPT19FQVJMWQBOT1RJRlkARkFJTEVEX0RFUEVOREVOQ1kAQkFEX0dBVEVXQVkAUExBWQBQVVQAQ0hFQ0tPVVQAR0FURVdBWV9USU1FT1VUAFJFUVVFU1RfVElNRU9VVABORVRXT1JLX0NPTk5FQ1RfVElNRU9VVABDT05ORUNUSU9OX1RJTUVPVVQATE9HSU5fVElNRU9VVABORVRXT1JLX1JFQURfVElNRU9VVABQT1NUAE1JU0RJUkVDVEVEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9SRVFVRVNUAENMSUVOVF9DTE9TRURfTE9BRF9CQUxBTkNFRF9SRVFVRVNUAEJBRF9SRVFVRVNUAEhUVFBfUkVRVUVTVF9TRU5UX1RPX0hUVFBTX1BPUlQAUkVQT1JUAElNX0FfVEVBUE9UAFJFU0VUX0NPTlRFTlQATk9fQ09OVEVOVABQQVJUSUFMX0NPTlRFTlQASFBFX0lOVkFMSURfQ09OU1RBTlQASFBFX0NCX1JFU0VUAEdFVABIUEVfU1RSSUNUAENPTkZMSUNUAFRFTVBPUkFSWV9SRURJUkVDVABQRVJNQU5FTlRfUkVESVJFQ1QAQ09OTkVDVABNVUxUSV9TVEFUVVMASFBFX0lOVkFMSURfU1RBVFVTAFRPT19NQU5ZX1JFUVVFU1RTAEVBUkxZX0hJTlRTAFVOQVZBSUxBQkxFX0ZPUl9MRUdBTF9SRUFTT05TAE9QVElPTlMAU1dJVENISU5HX1BST1RPQ09MUwBWQVJJQU5UX0FMU09fTkVHT1RJQVRFUwBNVUxUSVBMRV9DSE9JQ0VTAElOVEVSTkFMX1NFUlZFUl9FUlJPUgBXRUJfU0VSVkVSX1VOS05PV05fRVJST1IAUkFJTEdVTl9FUlJPUgBJREVOVElUWV9QUk9WSURFUl9BVVRIRU5USUNBVElPTl9FUlJPUgBTU0xfQ0VSVElGSUNBVEVfRVJST1IASU5WQUxJRF9YX0ZPUldBUkRFRF9GT1IAU0VUX1BBUkFNRVRFUgBHRVRfUEFSQU1FVEVSAEhQRV9VU0VSAFNFRV9PVEhFUgBIUEVfQ0JfQ0hVTktfSEVBREVSAE1LQ0FMRU5EQVIAU0VUVVAAV0VCX1NFUlZFUl9JU19ET1dOAFRFQVJET1dOAEhQRV9DTE9TRURfQ09OTkVDVElPTgBIRVVSSVNUSUNfRVhQSVJBVElPTgBESVNDT05ORUNURURfT1BFUkFUSU9OAE5PTl9BVVRIT1JJVEFUSVZFX0lORk9STUFUSU9OAEhQRV9JTlZBTElEX1ZFUlNJT04ASFBFX0NCX01FU1NBR0VfQkVHSU4AU0lURV9JU19GUk9aRU4ASFBFX0lOVkFMSURfSEVBREVSX1RPS0VOAElOVkFMSURfVE9LRU4ARk9SQklEREVOAEVOSEFOQ0VfWU9VUl9DQUxNAEhQRV9JTlZBTElEX1VSTABCTE9DS0VEX0JZX1BBUkVOVEFMX0NPTlRST0wATUtDT0wAQUNMAEhQRV9JTlRFUk5BTABSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFX1VOT0ZGSUNJQUwASFBFX09LAFVOTElOSwBVTkxPQ0sAUFJJAFJFVFJZX1dJVEgASFBFX0lOVkFMSURfQ09OVEVOVF9MRU5HVEgASFBFX1VORVhQRUNURURfQ09OVEVOVF9MRU5HVEgARkxVU0gAUFJPUFBBVENIAE0tU0VBUkNIAFVSSV9UT09fTE9ORwBQUk9DRVNTSU5HAE1JU0NFTExBTkVPVVNfUEVSU0lTVEVOVF9XQVJOSU5HAE1JU0NFTExBTkVPVVNfV0FSTklORwBIUEVfSU5WQUxJRF9UUkFOU0ZFUl9FTkNPRElORwBFeHBlY3RlZCBDUkxGAEhQRV9JTlZBTElEX0NIVU5LX1NJWkUATU9WRQBDT05USU5VRQBIUEVfQ0JfU1RBVFVTX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJTX0NPTVBMRVRFAEhQRV9DQl9WRVJTSU9OX0NPTVBMRVRFAEhQRV9DQl9VUkxfQ09NUExFVEUASFBFX0NCX0NIVU5LX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfVkFMVUVfQ09NUExFVEUASFBFX0NCX0NIVU5LX0VYVEVOU0lPTl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX05BTUVfQ09NUExFVEUASFBFX0NCX01FU1NBR0VfQ09NUExFVEUASFBFX0NCX01FVEhPRF9DT01QTEVURQBIUEVfQ0JfSEVBREVSX0ZJRUxEX0NPTVBMRVRFAERFTEVURQBIUEVfSU5WQUxJRF9FT0ZfU1RBVEUASU5WQUxJRF9TU0xfQ0VSVElGSUNBVEUAUEFVU0UATk9fUkVTUE9OU0UAVU5TVVBQT1JURURfTUVESUFfVFlQRQBHT05FAE5PVF9BQ0NFUFRBQkxFAFNFUlZJQ0VfVU5BVkFJTEFCTEUAUkFOR0VfTk9UX1NBVElTRklBQkxFAE9SSUdJTl9JU19VTlJFQUNIQUJMRQBSRVNQT05TRV9JU19TVEFMRQBQVVJHRQBNRVJHRQBSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFAFJFUVVFU1RfSEVBREVSX1RPT19MQVJHRQBQQVlMT0FEX1RPT19MQVJHRQBJTlNVRkZJQ0lFTlRfU1RPUkFHRQBIUEVfUEFVU0VEX1VQR1JBREUASFBFX1BBVVNFRF9IMl9VUEdSQURFAFNPVVJDRQBBTk5PVU5DRQBUUkFDRQBIUEVfVU5FWFBFQ1RFRF9TUEFDRQBERVNDUklCRQBVTlNVQlNDUklCRQBSRUNPUkQASFBFX0lOVkFMSURfTUVUSE9EAE5PVF9GT1VORABQUk9QRklORABVTkJJTkQAUkVCSU5EAFVOQVVUSE9SSVpFRABNRVRIT0RfTk9UX0FMTE9XRUQASFRUUF9WRVJTSU9OX05PVF9TVVBQT1JURUQAQUxSRUFEWV9SRVBPUlRFRABBQ0NFUFRFRABOT1RfSU1QTEVNRU5URUQATE9PUF9ERVRFQ1RFRABIUEVfQ1JfRVhQRUNURUQASFBFX0xGX0VYUEVDVEVEAENSRUFURUQASU1fVVNFRABIUEVfUEFVU0VEAFRJTUVPVVRfT0NDVVJFRABQQVlNRU5UX1JFUVVJUkVEAFBSRUNPTkRJVElPTl9SRVFVSVJFRABQUk9YWV9BVVRIRU5USUNBVElPTl9SRVFVSVJFRABORVRXT1JLX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAExFTkdUSF9SRVFVSVJFRABTU0xfQ0VSVElGSUNBVEVfUkVRVUlSRUQAVVBHUkFERV9SRVFVSVJFRABQQUdFX0VYUElSRUQAUFJFQ09ORElUSU9OX0ZBSUxFRABFWFBFQ1RBVElPTl9GQUlMRUQAUkVWQUxJREFUSU9OX0ZBSUxFRABTU0xfSEFORFNIQUtFX0ZBSUxFRABMT0NLRUQAVFJBTlNGT1JNQVRJT05fQVBQTElFRABOT1RfTU9ESUZJRUQATk9UX0VYVEVOREVEAEJBTkRXSURUSF9MSU1JVF9FWENFRURFRABTSVRFX0lTX09WRVJMT0FERUQASEVBRABFeHBlY3RlZCBIVFRQLwAAXhMAACYTAAAwEAAA8BcAAJ0TAAAVEgAAORcAAPASAAAKEAAAdRIAAK0SAACCEwAATxQAAH8QAACgFQAAIxQAAIkSAACLFAAATRUAANQRAADPFAAAEBgAAMkWAADcFgAAwREAAOAXAAC7FAAAdBQAAHwVAADlFAAACBcAAB8QAABlFQAAoxQAACgVAAACFQAAmRUAACwQAACLGQAATw8AANQOAABqEAAAzhAAAAIXAACJDgAAbhMAABwTAABmFAAAVhcAAMETAADNEwAAbBMAAGgXAABmFwAAXxcAACITAADODwAAaQ4AANgOAABjFgAAyxMAAKoOAAAoFwAAJhcAAMUTAABdFgAA6BEAAGcTAABlEwAA8hYAAHMTAAAdFwAA+RYAAPMRAADPDgAAzhUAAAwSAACzEQAApREAAGEQAAAyFwAAuxMAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIDAgICAgIAAAICAAICAAICAgICAgICAgIABAAAAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAACAAICAgICAAACAgACAgACAgICAgICAgICAAMABAAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAgACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbG9zZWVlcC1hbGl2ZQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBY2h1bmtlZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEAAAEBAAEBAAEBAQEBAQEBAQEAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlY3Rpb25lbnQtbGVuZ3Rob25yb3h5LWNvbm5lY3Rpb24AAAAAAAAAAAAAAAAAAAByYW5zZmVyLWVuY29kaW5ncGdyYWRlDQoNCg0KU00NCg0KVFRQL0NFL1RTUC8AAAAAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQIAAQMAAAAAAAAAAAAAAAAAAAAAAAAEAQEFAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAAAAAAAAAAAAMEAAAEBAQEBAQEBAQEBAUEBAQEBAQEBAQEBAQABAAGBwQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAIAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOT1VOQ0VFQ0tPVVRORUNURVRFQ1JJQkVMVVNIRVRFQURTRUFSQ0hSR0VDVElWSVRZTEVOREFSVkVPVElGWVBUSU9OU0NIU0VBWVNUQVRDSEdFT1JESVJFQ1RPUlRSQ0hQQVJBTUVURVJVUkNFQlNDUklCRUFSRE9XTkFDRUlORE5LQ0tVQlNDUklCRUhUVFAvQURUUC8='\n","module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC0kBAXsgAEEQav0MAAAAAAAAAAAAAAAAAAAAACIB/QsDACAAIAH9CwMAIABBMGogAf0LAwAgAEEgaiAB/QsDACAAQd0BNgIcQQALewEBfwJAIAAoAgwiAw0AAkAgACgCBEUNACAAIAE2AgQLAkAgACABIAIQxICAgAAiAw0AIAAoAgwPCyAAIAM2AhxBACEDIAAoAgQiAUUNACAAIAEgAiAAKAIIEYGAgIAAACIBRQ0AIAAgAjYCFCAAIAE2AgwgASEDCyADC+TzAQMOfwN+BH8jgICAgABBEGsiAySAgICAACABIQQgASEFIAEhBiABIQcgASEIIAEhCSABIQogASELIAEhDCABIQ0gASEOIAEhDwJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAKAIcIhBBf2oO3QHaAQHZAQIDBAUGBwgJCgsMDQ7YAQ8Q1wEREtYBExQVFhcYGRob4AHfARwdHtUBHyAhIiMkJdQBJicoKSorLNMB0gEtLtEB0AEvMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUbbAUdISUrPAc4BS80BTMwBTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gAGBAYIBgwGEAYUBhgGHAYgBiQGKAYsBjAGNAY4BjwGQAZEBkgGTAZQBlQGWAZcBmAGZAZoBmwGcAZ0BngGfAaABoQGiAaMBpAGlAaYBpwGoAakBqgGrAawBrQGuAa8BsAGxAbIBswG0AbUBtgG3AcsBygG4AckBuQHIAboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBANwBC0EAIRAMxgELQQ4hEAzFAQtBDSEQDMQBC0EPIRAMwwELQRAhEAzCAQtBEyEQDMEBC0EUIRAMwAELQRUhEAy/AQtBFiEQDL4BC0EXIRAMvQELQRghEAy8AQtBGSEQDLsBC0EaIRAMugELQRshEAy5AQtBHCEQDLgBC0EIIRAMtwELQR0hEAy2AQtBICEQDLUBC0EfIRAMtAELQQchEAyzAQtBISEQDLIBC0EiIRAMsQELQR4hEAywAQtBIyEQDK8BC0ESIRAMrgELQREhEAytAQtBJCEQDKwBC0ElIRAMqwELQSYhEAyqAQtBJyEQDKkBC0HDASEQDKgBC0EpIRAMpwELQSshEAymAQtBLCEQDKUBC0EtIRAMpAELQS4hEAyjAQtBLyEQDKIBC0HEASEQDKEBC0EwIRAMoAELQTQhEAyfAQtBDCEQDJ4BC0ExIRAMnQELQTIhEAycAQtBMyEQDJsBC0E5IRAMmgELQTUhEAyZAQtBxQEhEAyYAQtBCyEQDJcBC0E6IRAMlgELQTYhEAyVAQtBCiEQDJQBC0E3IRAMkwELQTghEAySAQtBPCEQDJEBC0E7IRAMkAELQT0hEAyPAQtBCSEQDI4BC0EoIRAMjQELQT4hEAyMAQtBPyEQDIsBC0HAACEQDIoBC0HBACEQDIkBC0HCACEQDIgBC0HDACEQDIcBC0HEACEQDIYBC0HFACEQDIUBC0HGACEQDIQBC0EqIRAMgwELQccAIRAMggELQcgAIRAMgQELQckAIRAMgAELQcoAIRAMfwtBywAhEAx+C0HNACEQDH0LQcwAIRAMfAtBzgAhEAx7C0HPACEQDHoLQdAAIRAMeQtB0QAhEAx4C0HSACEQDHcLQdMAIRAMdgtB1AAhEAx1C0HWACEQDHQLQdUAIRAMcwtBBiEQDHILQdcAIRAMcQtBBSEQDHALQdgAIRAMbwtBBCEQDG4LQdkAIRAMbQtB2gAhEAxsC0HbACEQDGsLQdwAIRAMagtBAyEQDGkLQd0AIRAMaAtB3gAhEAxnC0HfACEQDGYLQeEAIRAMZQtB4AAhEAxkC0HiACEQDGMLQeMAIRAMYgtBAiEQDGELQeQAIRAMYAtB5QAhEAxfC0HmACEQDF4LQecAIRAMXQtB6AAhEAxcC0HpACEQDFsLQeoAIRAMWgtB6wAhEAxZC0HsACEQDFgLQe0AIRAMVwtB7gAhEAxWC0HvACEQDFULQfAAIRAMVAtB8QAhEAxTC0HyACEQDFILQfMAIRAMUQtB9AAhEAxQC0H1ACEQDE8LQfYAIRAMTgtB9wAhEAxNC0H4ACEQDEwLQfkAIRAMSwtB+gAhEAxKC0H7ACEQDEkLQfwAIRAMSAtB/QAhEAxHC0H+ACEQDEYLQf8AIRAMRQtBgAEhEAxEC0GBASEQDEMLQYIBIRAMQgtBgwEhEAxBC0GEASEQDEALQYUBIRAMPwtBhgEhEAw+C0GHASEQDD0LQYgBIRAMPAtBiQEhEAw7C0GKASEQDDoLQYsBIRAMOQtBjAEhEAw4C0GNASEQDDcLQY4BIRAMNgtBjwEhEAw1C0GQASEQDDQLQZEBIRAMMwtBkgEhEAwyC0GTASEQDDELQZQBIRAMMAtBlQEhEAwvC0GWASEQDC4LQZcBIRAMLQtBmAEhEAwsC0GZASEQDCsLQZoBIRAMKgtBmwEhEAwpC0GcASEQDCgLQZ0BIRAMJwtBngEhEAwmC0GfASEQDCULQaABIRAMJAtBoQEhEAwjC0GiASEQDCILQaMBIRAMIQtBpAEhEAwgC0GlASEQDB8LQaYBIRAMHgtBpwEhEAwdC0GoASEQDBwLQakBIRAMGwtBqgEhEAwaC0GrASEQDBkLQawBIRAMGAtBrQEhEAwXC0GuASEQDBYLQQEhEAwVC0GvASEQDBQLQbABIRAMEwtBsQEhEAwSC0GzASEQDBELQbIBIRAMEAtBtAEhEAwPC0G1ASEQDA4LQbYBIRAMDQtBtwEhEAwMC0G4ASEQDAsLQbkBIRAMCgtBugEhEAwJC0G7ASEQDAgLQcYBIRAMBwtBvAEhEAwGC0G9ASEQDAULQb4BIRAMBAtBvwEhEAwDC0HAASEQDAILQcIBIRAMAQtBwQEhEAsDQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAOxwEAAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB4fICEjJSg/QEFERUZHSElKS0xNT1BRUlPeA1dZW1xdYGJlZmdoaWprbG1vcHFyc3R1dnd4eXp7fH1+gAGCAYUBhgGHAYkBiwGMAY0BjgGPAZABkQGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwG4AbkBugG7AbwBvQG+Ab8BwAHBAcIBwwHEAcUBxgHHAcgByQHKAcsBzAHNAc4BzwHQAdEB0gHTAdQB1QHWAdcB2AHZAdoB2wHcAd0B3gHgAeEB4gHjAeQB5QHmAecB6AHpAeoB6wHsAe0B7gHvAfAB8QHyAfMBmQKkArAC/gL+AgsgASIEIAJHDfMBQd0BIRAM/wMLIAEiECACRw3dAUHDASEQDP4DCyABIgEgAkcNkAFB9wAhEAz9AwsgASIBIAJHDYYBQe8AIRAM/AMLIAEiASACRw1/QeoAIRAM+wMLIAEiASACRw17QegAIRAM+gMLIAEiASACRw14QeYAIRAM+QMLIAEiASACRw0aQRghEAz4AwsgASIBIAJHDRRBEiEQDPcDCyABIgEgAkcNWUHFACEQDPYDCyABIgEgAkcNSkE/IRAM9QMLIAEiASACRw1IQTwhEAz0AwsgASIBIAJHDUFBMSEQDPMDCyAALQAuQQFGDesDDIcCCyAAIAEiASACEMCAgIAAQQFHDeYBIABCADcDIAznAQsgACABIgEgAhC0gICAACIQDecBIAEhAQz1AgsCQCABIgEgAkcNAEEGIRAM8AMLIAAgAUEBaiIBIAIQu4CAgAAiEA3oASABIQEMMQsgAEIANwMgQRIhEAzVAwsgASIQIAJHDStBHSEQDO0DCwJAIAEiASACRg0AIAFBAWohAUEQIRAM1AMLQQchEAzsAwsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3lAUEIIRAM6wMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQRQhEAzSAwtBCSEQDOoDCyABIQEgACkDIFAN5AEgASEBDPICCwJAIAEiASACRw0AQQshEAzpAwsgACABQQFqIgEgAhC2gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeYBIAEhAQwNCyAAIAEiASACELqAgIAAIhAN5wEgASEBDPACCwJAIAEiASACRw0AQQ8hEAzlAwsgAS0AACIQQTtGDQggEEENRw3oASABQQFqIQEM7wILIAAgASIBIAIQuoCAgAAiEA3oASABIQEM8gILA0ACQCABLQAAQfC1gIAAai0AACIQQQFGDQAgEEECRw3rASAAKAIEIRAgAEEANgIEIAAgECABQQFqIgEQuYCAgAAiEA3qASABIQEM9AILIAFBAWoiASACRw0AC0ESIRAM4gMLIAAgASIBIAIQuoCAgAAiEA3pASABIQEMCgsgASIBIAJHDQZBGyEQDOADCwJAIAEiASACRw0AQRYhEAzgAwsgAEGKgICAADYCCCAAIAE2AgQgACABIAIQuICAgAAiEA3qASABIQFBICEQDMYDCwJAIAEiASACRg0AA0ACQCABLQAAQfC3gIAAai0AACIQQQJGDQACQCAQQX9qDgTlAewBAOsB7AELIAFBAWohAUEIIRAMyAMLIAFBAWoiASACRw0AC0EVIRAM3wMLQRUhEAzeAwsDQAJAIAEtAABB8LmAgABqLQAAIhBBAkYNACAQQX9qDgTeAewB4AHrAewBCyABQQFqIgEgAkcNAAtBGCEQDN0DCwJAIAEiASACRg0AIABBi4CAgAA2AgggACABNgIEIAEhAUEHIRAMxAMLQRkhEAzcAwsgAUEBaiEBDAILAkAgASIUIAJHDQBBGiEQDNsDCyAUIQECQCAULQAAQXNqDhTdAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAgDuAgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQM2gMLAkAgAS0AACIQQTtGDQAgEEENRw3oASABQQFqIQEM5QILIAFBAWohAQtBIiEQDL8DCwJAIAEiECACRw0AQRwhEAzYAwtCACERIBAhASAQLQAAQVBqDjfnAeYBAQIDBAUGBwgAAAAAAAAACQoLDA0OAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPEBESExQAC0EeIRAMvQMLQgIhEQzlAQtCAyERDOQBC0IEIREM4wELQgUhEQziAQtCBiERDOEBC0IHIREM4AELQgghEQzfAQtCCSERDN4BC0IKIREM3QELQgshEQzcAQtCDCERDNsBC0INIREM2gELQg4hEQzZAQtCDyERDNgBC0IKIREM1wELQgshEQzWAQtCDCERDNUBC0INIREM1AELQg4hEQzTAQtCDyERDNIBC0IAIRECQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAtAABBUGoON+UB5AEAAQIDBAUGB+YB5gHmAeYB5gHmAeYBCAkKCwwN5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAQ4PEBESE+YBC0ICIREM5AELQgMhEQzjAQtCBCERDOIBC0IFIREM4QELQgYhEQzgAQtCByERDN8BC0IIIREM3gELQgkhEQzdAQtCCiERDNwBC0ILIREM2wELQgwhEQzaAQtCDSERDNkBC0IOIREM2AELQg8hEQzXAQtCCiERDNYBC0ILIREM1QELQgwhEQzUAQtCDSERDNMBC0IOIREM0gELQg8hEQzRAQsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3SAUEfIRAMwAMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQSQhEAynAwtBICEQDL8DCyAAIAEiECACEL6AgIAAQX9qDgW2AQDFAgHRAdIBC0ERIRAMpAMLIABBAToALyAQIQEMuwMLIAEiASACRw3SAUEkIRAMuwMLIAEiDSACRw0eQcYAIRAMugMLIAAgASIBIAIQsoCAgAAiEA3UASABIQEMtQELIAEiECACRw0mQdAAIRAMuAMLAkAgASIBIAJHDQBBKCEQDLgDCyAAQQA2AgQgAEGMgICAADYCCCAAIAEgARCxgICAACIQDdMBIAEhAQzYAQsCQCABIhAgAkcNAEEpIRAMtwMLIBAtAAAiAUEgRg0UIAFBCUcN0wEgEEEBaiEBDBULAkAgASIBIAJGDQAgAUEBaiEBDBcLQSohEAy1AwsCQCABIhAgAkcNAEErIRAMtQMLAkAgEC0AACIBQQlGDQAgAUEgRw3VAQsgAC0ALEEIRg3TASAQIQEMkQMLAkAgASIBIAJHDQBBLCEQDLQDCyABLQAAQQpHDdUBIAFBAWohAQzJAgsgASIOIAJHDdUBQS8hEAyyAwsDQAJAIAEtAAAiEEEgRg0AAkAgEEF2ag4EANwB3AEA2gELIAEhAQzgAQsgAUEBaiIBIAJHDQALQTEhEAyxAwtBMiEQIAEiFCACRg2wAyACIBRrIAAoAgAiAWohFSAUIAFrQQNqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB8LuAgABqLQAARw0BAkAgAUEDRw0AQQYhAQyWAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMsQMLIABBADYCACAUIQEM2QELQTMhECABIhQgAkYNrwMgAiAUayAAKAIAIgFqIRUgFCABa0EIaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfS7gIAAai0AAEcNAQJAIAFBCEcNAEEFIQEMlQMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLADCyAAQQA2AgAgFCEBDNgBC0E0IRAgASIUIAJGDa4DIAIgFGsgACgCACIBaiEVIBQgAWtBBWohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUHQwoCAAGotAABHDQECQCABQQVHDQBBByEBDJQDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAyvAwsgAEEANgIAIBQhAQzXAQsCQCABIgEgAkYNAANAAkAgAS0AAEGAvoCAAGotAAAiEEEBRg0AIBBBAkYNCiABIQEM3QELIAFBAWoiASACRw0AC0EwIRAMrgMLQTAhEAytAwsCQCABIgEgAkYNAANAAkAgAS0AACIQQSBGDQAgEEF2ag4E2QHaAdoB2QHaAQsgAUEBaiIBIAJHDQALQTghEAytAwtBOCEQDKwDCwNAAkAgAS0AACIQQSBGDQAgEEEJRw0DCyABQQFqIgEgAkcNAAtBPCEQDKsDCwNAAkAgAS0AACIQQSBGDQACQAJAIBBBdmoOBNoBAQHaAQALIBBBLEYN2wELIAEhAQwECyABQQFqIgEgAkcNAAtBPyEQDKoDCyABIQEM2wELQcAAIRAgASIUIAJGDagDIAIgFGsgACgCACIBaiEWIBQgAWtBBmohFwJAA0AgFC0AAEEgciABQYDAgIAAai0AAEcNASABQQZGDY4DIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADKkDCyAAQQA2AgAgFCEBC0E2IRAMjgMLAkAgASIPIAJHDQBBwQAhEAynAwsgAEGMgICAADYCCCAAIA82AgQgDyEBIAAtACxBf2oOBM0B1QHXAdkBhwMLIAFBAWohAQzMAQsCQCABIgEgAkYNAANAAkAgAS0AACIQQSByIBAgEEG/f2pB/wFxQRpJG0H/AXEiEEEJRg0AIBBBIEYNAAJAAkACQAJAIBBBnX9qDhMAAwMDAwMDAwEDAwMDAwMDAwMCAwsgAUEBaiEBQTEhEAyRAwsgAUEBaiEBQTIhEAyQAwsgAUEBaiEBQTMhEAyPAwsgASEBDNABCyABQQFqIgEgAkcNAAtBNSEQDKUDC0E1IRAMpAMLAkAgASIBIAJGDQADQAJAIAEtAABBgLyAgABqLQAAQQFGDQAgASEBDNMBCyABQQFqIgEgAkcNAAtBPSEQDKQDC0E9IRAMowMLIAAgASIBIAIQsICAgAAiEA3WASABIQEMAQsgEEEBaiEBC0E8IRAMhwMLAkAgASIBIAJHDQBBwgAhEAygAwsCQANAAkAgAS0AAEF3ag4YAAL+Av4ChAP+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gIA/gILIAFBAWoiASACRw0AC0HCACEQDKADCyABQQFqIQEgAC0ALUEBcUUNvQEgASEBC0EsIRAMhQMLIAEiASACRw3TAUHEACEQDJ0DCwNAAkAgAS0AAEGQwICAAGotAABBAUYNACABIQEMtwILIAFBAWoiASACRw0AC0HFACEQDJwDCyANLQAAIhBBIEYNswEgEEE6Rw2BAyAAKAIEIQEgAEEANgIEIAAgASANEK+AgIAAIgEN0AEgDUEBaiEBDLMCC0HHACEQIAEiDSACRg2aAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQZDCgIAAai0AAEcNgAMgAUEFRg30AiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyaAwtByAAhECABIg0gAkYNmQMgAiANayAAKAIAIgFqIRYgDSABa0EJaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGWwoCAAGotAABHDf8CAkAgAUEJRw0AQQIhAQz1AgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmQMLAkAgASINIAJHDQBByQAhEAyZAwsCQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZJ/ag4HAIADgAOAA4ADgAMBgAMLIA1BAWohAUE+IRAMgAMLIA1BAWohAUE/IRAM/wILQcoAIRAgASINIAJGDZcDIAIgDWsgACgCACIBaiEWIA0gAWtBAWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBoMKAgABqLQAARw39AiABQQFGDfACIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJcDC0HLACEQIAEiDSACRg2WAyACIA1rIAAoAgAiAWohFiANIAFrQQ5qIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaLCgIAAai0AAEcN/AIgAUEORg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyWAwtBzAAhECABIg0gAkYNlQMgAiANayAAKAIAIgFqIRYgDSABa0EPaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUHAwoCAAGotAABHDfsCAkAgAUEPRw0AQQMhAQzxAgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlQMLQc0AIRAgASINIAJGDZQDIAIgDWsgACgCACIBaiEWIA0gAWtBBWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw36AgJAIAFBBUcNAEEEIQEM8AILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJQDCwJAIAEiDSACRw0AQc4AIRAMlAMLAkACQAJAAkAgDS0AACIBQSByIAEgAUG/f2pB/wFxQRpJG0H/AXFBnX9qDhMA/QL9Av0C/QL9Av0C/QL9Av0C/QL9Av0CAf0C/QL9AgID/QILIA1BAWohAUHBACEQDP0CCyANQQFqIQFBwgAhEAz8AgsgDUEBaiEBQcMAIRAM+wILIA1BAWohAUHEACEQDPoCCwJAIAEiASACRg0AIABBjYCAgAA2AgggACABNgIEIAEhAUHFACEQDPoCC0HPACEQDJIDCyAQIQECQAJAIBAtAABBdmoOBAGoAqgCAKgCCyAQQQFqIQELQSchEAz4AgsCQCABIgEgAkcNAEHRACEQDJEDCwJAIAEtAABBIEYNACABIQEMjQELIAFBAWohASAALQAtQQFxRQ3HASABIQEMjAELIAEiFyACRw3IAUHSACEQDI8DC0HTACEQIAEiFCACRg2OAyACIBRrIAAoAgAiAWohFiAUIAFrQQFqIRcDQCAULQAAIAFB1sKAgABqLQAARw3MASABQQFGDccBIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADI4DCwJAIAEiASACRw0AQdUAIRAMjgMLIAEtAABBCkcNzAEgAUEBaiEBDMcBCwJAIAEiASACRw0AQdYAIRAMjQMLAkACQCABLQAAQXZqDgQAzQHNAQHNAQsgAUEBaiEBDMcBCyABQQFqIQFBygAhEAzzAgsgACABIgEgAhCugICAACIQDcsBIAEhAUHNACEQDPICCyAALQApQSJGDYUDDKYCCwJAIAEiASACRw0AQdsAIRAMigMLQQAhFEEBIRdBASEWQQAhEAJAAkACQAJAAkACQAJAAkACQCABLQAAQVBqDgrUAdMBAAECAwQFBgjVAQtBAiEQDAYLQQMhEAwFC0EEIRAMBAtBBSEQDAMLQQYhEAwCC0EHIRAMAQtBCCEQC0EAIRdBACEWQQAhFAzMAQtBCSEQQQEhFEEAIRdBACEWDMsBCwJAIAEiASACRw0AQd0AIRAMiQMLIAEtAABBLkcNzAEgAUEBaiEBDKYCCyABIgEgAkcNzAFB3wAhEAyHAwsCQCABIgEgAkYNACAAQY6AgIAANgIIIAAgATYCBCABIQFB0AAhEAzuAgtB4AAhEAyGAwtB4QAhECABIgEgAkYNhQMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQeLCgIAAai0AAEcNzQEgFEEDRg3MASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyFAwtB4gAhECABIgEgAkYNhAMgAiABayAAKAIAIhRqIRYgASAUa0ECaiEXA0AgAS0AACAUQebCgIAAai0AAEcNzAEgFEECRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyEAwtB4wAhECABIgEgAkYNgwMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQenCgIAAai0AAEcNywEgFEEDRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyDAwsCQCABIgEgAkcNAEHlACEQDIMDCyAAIAFBAWoiASACEKiAgIAAIhANzQEgASEBQdYAIRAM6QILAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AAkACQAJAIBBBuH9qDgsAAc8BzwHPAc8BzwHPAc8BzwECzwELIAFBAWohAUHSACEQDO0CCyABQQFqIQFB0wAhEAzsAgsgAUEBaiEBQdQAIRAM6wILIAFBAWoiASACRw0AC0HkACEQDIIDC0HkACEQDIEDCwNAAkAgAS0AAEHwwoCAAGotAAAiEEEBRg0AIBBBfmoOA88B0AHRAdIBCyABQQFqIgEgAkcNAAtB5gAhEAyAAwsCQCABIgEgAkYNACABQQFqIQEMAwtB5wAhEAz/AgsDQAJAIAEtAABB8MSAgABqLQAAIhBBAUYNAAJAIBBBfmoOBNIB0wHUAQDVAQsgASEBQdcAIRAM5wILIAFBAWoiASACRw0AC0HoACEQDP4CCwJAIAEiASACRw0AQekAIRAM/gILAkAgAS0AACIQQXZqDhq6AdUB1QG8AdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAcoB1QHVAQDTAQsgAUEBaiEBC0EGIRAM4wILA0ACQCABLQAAQfDGgIAAai0AAEEBRg0AIAEhAQyeAgsgAUEBaiIBIAJHDQALQeoAIRAM+wILAkAgASIBIAJGDQAgAUEBaiEBDAMLQesAIRAM+gILAkAgASIBIAJHDQBB7AAhEAz6AgsgAUEBaiEBDAELAkAgASIBIAJHDQBB7QAhEAz5AgsgAUEBaiEBC0EEIRAM3gILAkAgASIUIAJHDQBB7gAhEAz3AgsgFCEBAkACQAJAIBQtAABB8MiAgABqLQAAQX9qDgfUAdUB1gEAnAIBAtcBCyAUQQFqIQEMCgsgFEEBaiEBDM0BC0EAIRAgAEEANgIcIABBm5KAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAz2AgsCQANAAkAgAS0AAEHwyICAAGotAAAiEEEERg0AAkACQCAQQX9qDgfSAdMB1AHZAQAEAdkBCyABIQFB2gAhEAzgAgsgAUEBaiEBQdwAIRAM3wILIAFBAWoiASACRw0AC0HvACEQDPYCCyABQQFqIQEMywELAkAgASIUIAJHDQBB8AAhEAz1AgsgFC0AAEEvRw3UASAUQQFqIQEMBgsCQCABIhQgAkcNAEHxACEQDPQCCwJAIBQtAAAiAUEvRw0AIBRBAWohAUHdACEQDNsCCyABQXZqIgRBFksN0wFBASAEdEGJgIACcUUN0wEMygILAkAgASIBIAJGDQAgAUEBaiEBQd4AIRAM2gILQfIAIRAM8gILAkAgASIUIAJHDQBB9AAhEAzyAgsgFCEBAkAgFC0AAEHwzICAAGotAABBf2oOA8kClAIA1AELQeEAIRAM2AILAkAgASIUIAJGDQADQAJAIBQtAABB8MqAgABqLQAAIgFBA0YNAAJAIAFBf2oOAssCANUBCyAUIQFB3wAhEAzaAgsgFEEBaiIUIAJHDQALQfMAIRAM8QILQfMAIRAM8AILAkAgASIBIAJGDQAgAEGPgICAADYCCCAAIAE2AgQgASEBQeAAIRAM1wILQfUAIRAM7wILAkAgASIBIAJHDQBB9gAhEAzvAgsgAEGPgICAADYCCCAAIAE2AgQgASEBC0EDIRAM1AILA0AgAS0AAEEgRw3DAiABQQFqIgEgAkcNAAtB9wAhEAzsAgsCQCABIgEgAkcNAEH4ACEQDOwCCyABLQAAQSBHDc4BIAFBAWohAQzvAQsgACABIgEgAhCsgICAACIQDc4BIAEhAQyOAgsCQCABIgQgAkcNAEH6ACEQDOoCCyAELQAAQcwARw3RASAEQQFqIQFBEyEQDM8BCwJAIAEiBCACRw0AQfsAIRAM6QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEANAIAQtAAAgAUHwzoCAAGotAABHDdABIAFBBUYNzgEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBB+wAhEAzoAgsCQCABIgQgAkcNAEH8ACEQDOgCCwJAAkAgBC0AAEG9f2oODADRAdEB0QHRAdEB0QHRAdEB0QHRAQHRAQsgBEEBaiEBQeYAIRAMzwILIARBAWohAUHnACEQDM4CCwJAIAEiBCACRw0AQf0AIRAM5wILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNzwEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf0AIRAM5wILIABBADYCACAQQQFqIQFBECEQDMwBCwJAIAEiBCACRw0AQf4AIRAM5gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQfbOgIAAai0AAEcNzgEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf4AIRAM5gILIABBADYCACAQQQFqIQFBFiEQDMsBCwJAIAEiBCACRw0AQf8AIRAM5QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQfzOgIAAai0AAEcNzQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf8AIRAM5QILIABBADYCACAQQQFqIQFBBSEQDMoBCwJAIAEiBCACRw0AQYABIRAM5AILIAQtAABB2QBHDcsBIARBAWohAUEIIRAMyQELAkAgASIEIAJHDQBBgQEhEAzjAgsCQAJAIAQtAABBsn9qDgMAzAEBzAELIARBAWohAUHrACEQDMoCCyAEQQFqIQFB7AAhEAzJAgsCQCABIgQgAkcNAEGCASEQDOICCwJAAkAgBC0AAEG4f2oOCADLAcsBywHLAcsBywEBywELIARBAWohAUHqACEQDMkCCyAEQQFqIQFB7QAhEAzIAgsCQCABIgQgAkcNAEGDASEQDOECCyACIARrIAAoAgAiAWohECAEIAFrQQJqIRQCQANAIAQtAAAgAUGAz4CAAGotAABHDckBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgEDYCAEGDASEQDOECC0EAIRAgAEEANgIAIBRBAWohAQzGAQsCQCABIgQgAkcNAEGEASEQDOACCyACIARrIAAoAgAiAWohFCAEIAFrQQRqIRACQANAIAQtAAAgAUGDz4CAAGotAABHDcgBIAFBBEYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGEASEQDOACCyAAQQA2AgAgEEEBaiEBQSMhEAzFAQsCQCABIgQgAkcNAEGFASEQDN8CCwJAAkAgBC0AAEG0f2oOCADIAcgByAHIAcgByAEByAELIARBAWohAUHvACEQDMYCCyAEQQFqIQFB8AAhEAzFAgsCQCABIgQgAkcNAEGGASEQDN4CCyAELQAAQcUARw3FASAEQQFqIQEMgwILAkAgASIEIAJHDQBBhwEhEAzdAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBiM+AgABqLQAARw3FASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhwEhEAzdAgsgAEEANgIAIBBBAWohAUEtIRAMwgELAkAgASIEIAJHDQBBiAEhEAzcAgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw3EASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiAEhEAzcAgsgAEEANgIAIBBBAWohAUEpIRAMwQELAkAgASIBIAJHDQBBiQEhEAzbAgtBASEQIAEtAABB3wBHDcABIAFBAWohAQyBAgsCQCABIgQgAkcNAEGKASEQDNoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRADQCAELQAAIAFBjM+AgABqLQAARw3BASABQQFGDa8CIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYoBIRAM2QILAkAgASIEIAJHDQBBiwEhEAzZAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBjs+AgABqLQAARw3BASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiwEhEAzZAgsgAEEANgIAIBBBAWohAUECIRAMvgELAkAgASIEIAJHDQBBjAEhEAzYAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw3AASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjAEhEAzYAgsgAEEANgIAIBBBAWohAUEfIRAMvQELAkAgASIEIAJHDQBBjQEhEAzXAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8s+AgABqLQAARw2/ASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjQEhEAzXAgsgAEEANgIAIBBBAWohAUEJIRAMvAELAkAgASIEIAJHDQBBjgEhEAzWAgsCQAJAIAQtAABBt39qDgcAvwG/Ab8BvwG/AQG/AQsgBEEBaiEBQfgAIRAMvQILIARBAWohAUH5ACEQDLwCCwJAIAEiBCACRw0AQY8BIRAM1QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQZHPgIAAai0AAEcNvQEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY8BIRAM1QILIABBADYCACAQQQFqIQFBGCEQDLoBCwJAIAEiBCACRw0AQZABIRAM1AILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQZfPgIAAai0AAEcNvAEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZABIRAM1AILIABBADYCACAQQQFqIQFBFyEQDLkBCwJAIAEiBCACRw0AQZEBIRAM0wILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQZrPgIAAai0AAEcNuwEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZEBIRAM0wILIABBADYCACAQQQFqIQFBFSEQDLgBCwJAIAEiBCACRw0AQZIBIRAM0gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQaHPgIAAai0AAEcNugEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZIBIRAM0gILIABBADYCACAQQQFqIQFBHiEQDLcBCwJAIAEiBCACRw0AQZMBIRAM0QILIAQtAABBzABHDbgBIARBAWohAUEKIRAMtgELAkAgBCACRw0AQZQBIRAM0AILAkACQCAELQAAQb9/ag4PALkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AbkBAbkBCyAEQQFqIQFB/gAhEAy3AgsgBEEBaiEBQf8AIRAMtgILAkAgBCACRw0AQZUBIRAMzwILAkACQCAELQAAQb9/ag4DALgBAbgBCyAEQQFqIQFB/QAhEAy2AgsgBEEBaiEEQYABIRAMtQILAkAgBCACRw0AQZYBIRAMzgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQafPgIAAai0AAEcNtgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZYBIRAMzgILIABBADYCACAQQQFqIQFBCyEQDLMBCwJAIAQgAkcNAEGXASEQDM0CCwJAAkACQAJAIAQtAABBU2oOIwC4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBAbgBuAG4AbgBuAECuAG4AbgBA7gBCyAEQQFqIQFB+wAhEAy2AgsgBEEBaiEBQfwAIRAMtQILIARBAWohBEGBASEQDLQCCyAEQQFqIQRBggEhEAyzAgsCQCAEIAJHDQBBmAEhEAzMAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBqc+AgABqLQAARw20ASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmAEhEAzMAgsgAEEANgIAIBBBAWohAUEZIRAMsQELAkAgBCACRw0AQZkBIRAMywILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQa7PgIAAai0AAEcNswEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZkBIRAMywILIABBADYCACAQQQFqIQFBBiEQDLABCwJAIAQgAkcNAEGaASEQDMoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG0z4CAAGotAABHDbIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGaASEQDMoCCyAAQQA2AgAgEEEBaiEBQRwhEAyvAQsCQCAEIAJHDQBBmwEhEAzJAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBts+AgABqLQAARw2xASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmwEhEAzJAgsgAEEANgIAIBBBAWohAUEnIRAMrgELAkAgBCACRw0AQZwBIRAMyAILAkACQCAELQAAQax/ag4CAAGxAQsgBEEBaiEEQYYBIRAMrwILIARBAWohBEGHASEQDK4CCwJAIAQgAkcNAEGdASEQDMcCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG4z4CAAGotAABHDa8BIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGdASEQDMcCCyAAQQA2AgAgEEEBaiEBQSYhEAysAQsCQCAEIAJHDQBBngEhEAzGAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBus+AgABqLQAARw2uASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBngEhEAzGAgsgAEEANgIAIBBBAWohAUEDIRAMqwELAkAgBCACRw0AQZ8BIRAMxQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNrQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ8BIRAMxQILIABBADYCACAQQQFqIQFBDCEQDKoBCwJAIAQgAkcNAEGgASEQDMQCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUG8z4CAAGotAABHDawBIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGgASEQDMQCCyAAQQA2AgAgEEEBaiEBQQ0hEAypAQsCQCAEIAJHDQBBoQEhEAzDAgsCQAJAIAQtAABBun9qDgsArAGsAawBrAGsAawBrAGsAawBAawBCyAEQQFqIQRBiwEhEAyqAgsgBEEBaiEEQYwBIRAMqQILAkAgBCACRw0AQaIBIRAMwgILIAQtAABB0ABHDakBIARBAWohBAzpAQsCQCAEIAJHDQBBowEhEAzBAgsCQAJAIAQtAABBt39qDgcBqgGqAaoBqgGqAQCqAQsgBEEBaiEEQY4BIRAMqAILIARBAWohAUEiIRAMpgELAkAgBCACRw0AQaQBIRAMwAILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQcDPgIAAai0AAEcNqAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaQBIRAMwAILIABBADYCACAQQQFqIQFBHSEQDKUBCwJAIAQgAkcNAEGlASEQDL8CCwJAAkAgBC0AAEGuf2oOAwCoAQGoAQsgBEEBaiEEQZABIRAMpgILIARBAWohAUEEIRAMpAELAkAgBCACRw0AQaYBIRAMvgILAkACQAJAAkACQCAELQAAQb9/ag4VAKoBqgGqAaoBqgGqAaoBqgGqAaoBAaoBqgECqgGqAQOqAaoBBKoBCyAEQQFqIQRBiAEhEAyoAgsgBEEBaiEEQYkBIRAMpwILIARBAWohBEGKASEQDKYCCyAEQQFqIQRBjwEhEAylAgsgBEEBaiEEQZEBIRAMpAILAkAgBCACRw0AQacBIRAMvQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNpQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQacBIRAMvQILIABBADYCACAQQQFqIQFBESEQDKIBCwJAIAQgAkcNAEGoASEQDLwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHCz4CAAGotAABHDaQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGoASEQDLwCCyAAQQA2AgAgEEEBaiEBQSwhEAyhAQsCQCAEIAJHDQBBqQEhEAy7AgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBxc+AgABqLQAARw2jASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqQEhEAy7AgsgAEEANgIAIBBBAWohAUErIRAMoAELAkAgBCACRw0AQaoBIRAMugILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQcrPgIAAai0AAEcNogEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaoBIRAMugILIABBADYCACAQQQFqIQFBFCEQDJ8BCwJAIAQgAkcNAEGrASEQDLkCCwJAAkACQAJAIAQtAABBvn9qDg8AAQKkAaQBpAGkAaQBpAGkAaQBpAGkAaQBA6QBCyAEQQFqIQRBkwEhEAyiAgsgBEEBaiEEQZQBIRAMoQILIARBAWohBEGVASEQDKACCyAEQQFqIQRBlgEhEAyfAgsCQCAEIAJHDQBBrAEhEAy4AgsgBC0AAEHFAEcNnwEgBEEBaiEEDOABCwJAIAQgAkcNAEGtASEQDLcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHNz4CAAGotAABHDZ8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGtASEQDLcCCyAAQQA2AgAgEEEBaiEBQQ4hEAycAQsCQCAEIAJHDQBBrgEhEAy2AgsgBC0AAEHQAEcNnQEgBEEBaiEBQSUhEAybAQsCQCAEIAJHDQBBrwEhEAy1AgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw2dASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrwEhEAy1AgsgAEEANgIAIBBBAWohAUEqIRAMmgELAkAgBCACRw0AQbABIRAMtAILAkACQCAELQAAQat/ag4LAJ0BnQGdAZ0BnQGdAZ0BnQGdAQGdAQsgBEEBaiEEQZoBIRAMmwILIARBAWohBEGbASEQDJoCCwJAIAQgAkcNAEGxASEQDLMCCwJAAkAgBC0AAEG/f2oOFACcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAEBnAELIARBAWohBEGZASEQDJoCCyAEQQFqIQRBnAEhEAyZAgsCQCAEIAJHDQBBsgEhEAyyAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFB2c+AgABqLQAARw2aASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBsgEhEAyyAgsgAEEANgIAIBBBAWohAUEhIRAMlwELAkAgBCACRw0AQbMBIRAMsQILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQd3PgIAAai0AAEcNmQEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbMBIRAMsQILIABBADYCACAQQQFqIQFBGiEQDJYBCwJAIAQgAkcNAEG0ASEQDLACCwJAAkACQCAELQAAQbt/ag4RAJoBmgGaAZoBmgGaAZoBmgGaAQGaAZoBmgGaAZoBApoBCyAEQQFqIQRBnQEhEAyYAgsgBEEBaiEEQZ4BIRAMlwILIARBAWohBEGfASEQDJYCCwJAIAQgAkcNAEG1ASEQDK8CCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUHkz4CAAGotAABHDZcBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG1ASEQDK8CCyAAQQA2AgAgEEEBaiEBQSghEAyUAQsCQCAEIAJHDQBBtgEhEAyuAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB6s+AgABqLQAARw2WASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtgEhEAyuAgsgAEEANgIAIBBBAWohAUEHIRAMkwELAkAgBCACRw0AQbcBIRAMrQILAkACQCAELQAAQbt/ag4OAJYBlgGWAZYBlgGWAZYBlgGWAZYBlgGWAQGWAQsgBEEBaiEEQaEBIRAMlAILIARBAWohBEGiASEQDJMCCwJAIAQgAkcNAEG4ASEQDKwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDZQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG4ASEQDKwCCyAAQQA2AgAgEEEBaiEBQRIhEAyRAQsCQCAEIAJHDQBBuQEhEAyrAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw2TASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuQEhEAyrAgsgAEEANgIAIBBBAWohAUEgIRAMkAELAkAgBCACRw0AQboBIRAMqgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNkgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQboBIRAMqgILIABBADYCACAQQQFqIQFBDyEQDI8BCwJAIAQgAkcNAEG7ASEQDKkCCwJAAkAgBC0AAEG3f2oOBwCSAZIBkgGSAZIBAZIBCyAEQQFqIQRBpQEhEAyQAgsgBEEBaiEEQaYBIRAMjwILAkAgBCACRw0AQbwBIRAMqAILIAIgBGsgACgCACIBaiEUIAQgAWtBB2ohEAJAA0AgBC0AACABQfTPgIAAai0AAEcNkAEgAUEHRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbwBIRAMqAILIABBADYCACAQQQFqIQFBGyEQDI0BCwJAIAQgAkcNAEG9ASEQDKcCCwJAAkACQCAELQAAQb5/ag4SAJEBkQGRAZEBkQGRAZEBkQGRAQGRAZEBkQGRAZEBkQECkQELIARBAWohBEGkASEQDI8CCyAEQQFqIQRBpwEhEAyOAgsgBEEBaiEEQagBIRAMjQILAkAgBCACRw0AQb4BIRAMpgILIAQtAABBzgBHDY0BIARBAWohBAzPAQsCQCAEIAJHDQBBvwEhEAylAgsCQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAELQAAQb9/ag4VAAECA5wBBAUGnAGcAZwBBwgJCgucAQwNDg+cAQsgBEEBaiEBQegAIRAMmgILIARBAWohAUHpACEQDJkCCyAEQQFqIQFB7gAhEAyYAgsgBEEBaiEBQfIAIRAMlwILIARBAWohAUHzACEQDJYCCyAEQQFqIQFB9gAhEAyVAgsgBEEBaiEBQfcAIRAMlAILIARBAWohAUH6ACEQDJMCCyAEQQFqIQRBgwEhEAySAgsgBEEBaiEEQYQBIRAMkQILIARBAWohBEGFASEQDJACCyAEQQFqIQRBkgEhEAyPAgsgBEEBaiEEQZgBIRAMjgILIARBAWohBEGgASEQDI0CCyAEQQFqIQRBowEhEAyMAgsgBEEBaiEEQaoBIRAMiwILAkAgBCACRg0AIABBkICAgAA2AgggACAENgIEQasBIRAMiwILQcABIRAMowILIAAgBSACEKqAgIAAIgENiwEgBSEBDFwLAkAgBiACRg0AIAZBAWohBQyNAQtBwgEhEAyhAgsDQAJAIBAtAABBdmoOBIwBAACPAQALIBBBAWoiECACRw0AC0HDASEQDKACCwJAIAcgAkYNACAAQZGAgIAANgIIIAAgBzYCBCAHIQFBASEQDIcCC0HEASEQDJ8CCwJAIAcgAkcNAEHFASEQDJ8CCwJAAkAgBy0AAEF2ag4EAc4BzgEAzgELIAdBAWohBgyNAQsgB0EBaiEFDIkBCwJAIAcgAkcNAEHGASEQDJ4CCwJAAkAgBy0AAEF2ag4XAY8BjwEBjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAI8BCyAHQQFqIQcLQbABIRAMhAILAkAgCCACRw0AQcgBIRAMnQILIAgtAABBIEcNjQEgAEEAOwEyIAhBAWohAUGzASEQDIMCCyABIRcCQANAIBciByACRg0BIActAABBUGpB/wFxIhBBCk8NzAECQCAALwEyIhRBmTNLDQAgACAUQQpsIhQ7ATIgEEH//wNzIBRB/v8DcUkNACAHQQFqIRcgACAUIBBqIhA7ATIgEEH//wNxQegHSQ0BCwtBACEQIABBADYCHCAAQcGJgIAANgIQIABBDTYCDCAAIAdBAWo2AhQMnAILQccBIRAMmwILIAAgCCACEK6AgIAAIhBFDcoBIBBBFUcNjAEgAEHIATYCHCAAIAg2AhQgAEHJl4CAADYCECAAQRU2AgxBACEQDJoCCwJAIAkgAkcNAEHMASEQDJoCC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgCS0AAEFQag4KlgGVAQABAgMEBQYIlwELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMjgELQQkhEEEBIRRBACEXQQAhFgyNAQsCQCAKIAJHDQBBzgEhEAyZAgsgCi0AAEEuRw2OASAKQQFqIQkMygELIAsgAkcNjgFB0AEhEAyXAgsCQCALIAJGDQAgAEGOgICAADYCCCAAIAs2AgRBtwEhEAz+AQtB0QEhEAyWAgsCQCAEIAJHDQBB0gEhEAyWAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EEaiELA0AgBC0AACAQQfzPgIAAai0AAEcNjgEgEEEERg3pASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHSASEQDJUCCyAAIAwgAhCsgICAACIBDY0BIAwhAQy4AQsCQCAEIAJHDQBB1AEhEAyUAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EBaiEMA0AgBC0AACAQQYHQgIAAai0AAEcNjwEgEEEBRg2OASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHUASEQDJMCCwJAIAQgAkcNAEHWASEQDJMCCyACIARrIAAoAgAiEGohFCAEIBBrQQJqIQsDQCAELQAAIBBBg9CAgABqLQAARw2OASAQQQJGDZABIBBBAWohECAEQQFqIgQgAkcNAAsgACAUNgIAQdYBIRAMkgILAkAgBCACRw0AQdcBIRAMkgILAkACQCAELQAAQbt/ag4QAI8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwEBjwELIARBAWohBEG7ASEQDPkBCyAEQQFqIQRBvAEhEAz4AQsCQCAEIAJHDQBB2AEhEAyRAgsgBC0AAEHIAEcNjAEgBEEBaiEEDMQBCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEG+ASEQDPcBC0HZASEQDI8CCwJAIAQgAkcNAEHaASEQDI8CCyAELQAAQcgARg3DASAAQQE6ACgMuQELIABBAjoALyAAIAQgAhCmgICAACIQDY0BQcIBIRAM9AELIAAtAChBf2oOArcBuQG4AQsDQAJAIAQtAABBdmoOBACOAY4BAI4BCyAEQQFqIgQgAkcNAAtB3QEhEAyLAgsgAEEAOgAvIAAtAC1BBHFFDYQCCyAAQQA6AC8gAEEBOgA0IAEhAQyMAQsgEEEVRg3aASAAQQA2AhwgACABNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAyIAgsCQCAAIBAgAhC0gICAACIEDQAgECEBDIECCwJAIARBFUcNACAAQQM2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAyIAgsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMhwILIBBBFUYN1gEgAEEANgIcIAAgATYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMhgILIAAoAgQhFyAAQQA2AgQgECARp2oiFiEBIAAgFyAQIBYgFBsiEBC1gICAACIURQ2NASAAQQc2AhwgACAQNgIUIAAgFDYCDEEAIRAMhQILIAAgAC8BMEGAAXI7ATAgASEBC0EqIRAM6gELIBBBFUYN0QEgAEEANgIcIAAgATYCFCAAQYOMgIAANgIQIABBEzYCDEEAIRAMggILIBBBFUYNzwEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAMgQILIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDI0BCyAAQQw2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMgAILIBBBFUYNzAEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM/wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIwBCyAAQQ02AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/gELIBBBFUYNyQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM/QELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIsBCyAAQQ42AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/AELIABBADYCHCAAIAE2AhQgAEHAlYCAADYCECAAQQI2AgxBACEQDPsBCyAQQRVGDcUBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPoBCyAAQRA2AhwgACABNgIUIAAgEDYCDEEAIRAM+QELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDPEBCyAAQRE2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM+AELIBBBFUYNwQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM9wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIgBCyAAQRM2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM9gELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDO0BCyAAQRQ2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM9QELIBBBFUYNvQEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM9AELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIYBCyAAQRY2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM8wELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC3gICAACIEDQAgAUEBaiEBDOkBCyAAQRc2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM8gELIABBADYCHCAAIAE2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDPEBC0IBIRELIBBBAWohAQJAIAApAyAiEkL//////////w9WDQAgACASQgSGIBGENwMgIAEhAQyEAQsgAEEANgIcIAAgATYCFCAAQa2JgIAANgIQIABBDDYCDEEAIRAM7wELIABBADYCHCAAIBA2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDO4BCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNcyAAQQU2AhwgACAQNgIUIAAgFDYCDEEAIRAM7QELIABBADYCHCAAIBA2AhQgAEGqnICAADYCECAAQQ82AgxBACEQDOwBCyAAIBAgAhC0gICAACIBDQEgECEBC0EOIRAM0QELAkAgAUEVRw0AIABBAjYCHCAAIBA2AhQgAEGwmICAADYCECAAQRU2AgxBACEQDOoBCyAAQQA2AhwgACAQNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAzpAQsgAUEBaiEQAkAgAC8BMCIBQYABcUUNAAJAIAAgECACELuAgIAAIgENACAQIQEMcAsgAUEVRw26ASAAQQU2AhwgACAQNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAzpAQsCQCABQaAEcUGgBEcNACAALQAtQQJxDQAgAEEANgIcIAAgEDYCFCAAQZaTgIAANgIQIABBBDYCDEEAIRAM6QELIAAgECACEL2AgIAAGiAQIQECQAJAAkACQAJAIAAgECACELOAgIAADhYCAQAEBAQEBAQEBAQEBAQEBAQEBAQDBAsgAEEBOgAuCyAAIAAvATBBwAByOwEwIBAhAQtBJiEQDNEBCyAAQSM2AhwgACAQNgIUIABBpZaAgAA2AhAgAEEVNgIMQQAhEAzpAQsgAEEANgIcIAAgEDYCFCAAQdWLgIAANgIQIABBETYCDEEAIRAM6AELIAAtAC1BAXFFDQFBwwEhEAzOAQsCQCANIAJGDQADQAJAIA0tAABBIEYNACANIQEMxAELIA1BAWoiDSACRw0AC0ElIRAM5wELQSUhEAzmAQsgACgCBCEEIABBADYCBCAAIAQgDRCvgICAACIERQ2tASAAQSY2AhwgACAENgIMIAAgDUEBajYCFEEAIRAM5QELIBBBFUYNqwEgAEEANgIcIAAgATYCFCAAQf2NgIAANgIQIABBHTYCDEEAIRAM5AELIABBJzYCHCAAIAE2AhQgACAQNgIMQQAhEAzjAQsgECEBQQEhFAJAAkACQAJAAkACQAJAIAAtACxBfmoOBwYFBQMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0ErIRAMygELIABBADYCHCAAIBA2AhQgAEGrkoCAADYCECAAQQs2AgxBACEQDOIBCyAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMQQAhEAzhAQsgAEEAOgAsIBAhAQy9AQsgECEBQQEhFAJAAkACQAJAAkAgAC0ALEF7ag4EAwECAAULIAAgAC8BMEEIcjsBMAwDC0ECIRQMAQtBBCEUCyAAQQE6ACwgACAALwEwIBRyOwEwCyAQIQELQSkhEAzFAQsgAEEANgIcIAAgATYCFCAAQfCUgIAANgIQIABBAzYCDEEAIRAM3QELAkAgDi0AAEENRw0AIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHULIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzdAQsgAC0ALUEBcUUNAUHEASEQDMMBCwJAIA4gAkcNAEEtIRAM3AELAkACQANAAkAgDi0AAEF2ag4EAgAAAwALIA5BAWoiDiACRw0AC0EtIRAM3QELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDiEBDHQLIABBLDYCHCAAIA42AhQgACABNgIMQQAhEAzcAQsgACgCBCEBIABBADYCBAJAIAAgASAOELGAgIAAIgENACAOQQFqIQEMcwsgAEEsNgIcIAAgATYCDCAAIA5BAWo2AhRBACEQDNsBCyAAKAIEIQQgAEEANgIEIAAgBCAOELGAgIAAIgQNoAEgDiEBDM4BCyAQQSxHDQEgAUEBaiEQQQEhAQJAAkACQAJAAkAgAC0ALEF7ag4EAwECBAALIBAhAQwEC0ECIQEMAQtBBCEBCyAAQQE6ACwgACAALwEwIAFyOwEwIBAhAQwBCyAAIAAvATBBCHI7ATAgECEBC0E5IRAMvwELIABBADoALCABIQELQTQhEAy9AQsgACAALwEwQSByOwEwIAEhAQwCCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBA0AIAEhAQzHAQsgAEE3NgIcIAAgATYCFCAAIAQ2AgxBACEQDNQBCyAAQQg6ACwgASEBC0EwIRAMuQELAkAgAC0AKEEBRg0AIAEhAQwECyAALQAtQQhxRQ2TASABIQEMAwsgAC0AMEEgcQ2UAUHFASEQDLcBCwJAIA8gAkYNAAJAA0ACQCAPLQAAQVBqIgFB/wFxQQpJDQAgDyEBQTUhEAy6AQsgACkDICIRQpmz5syZs+bMGVYNASAAIBFCCn4iETcDICARIAGtQv8BgyISQn+FVg0BIAAgESASfDcDICAPQQFqIg8gAkcNAAtBOSEQDNEBCyAAKAIEIQIgAEEANgIEIAAgAiAPQQFqIgQQsYCAgAAiAg2VASAEIQEMwwELQTkhEAzPAQsCQCAALwEwIgFBCHFFDQAgAC0AKEEBRw0AIAAtAC1BCHFFDZABCyAAIAFB9/sDcUGABHI7ATAgDyEBC0E3IRAMtAELIAAgAC8BMEEQcjsBMAyrAQsgEEEVRg2LASAAQQA2AhwgACABNgIUIABB8I6AgAA2AhAgAEEcNgIMQQAhEAzLAQsgAEHDADYCHCAAIAE2AgwgACANQQFqNgIUQQAhEAzKAQsCQCABLQAAQTpHDQAgACgCBCEQIABBADYCBAJAIAAgECABEK+AgIAAIhANACABQQFqIQEMYwsgAEHDADYCHCAAIBA2AgwgACABQQFqNgIUQQAhEAzKAQsgAEEANgIcIAAgATYCFCAAQbGRgIAANgIQIABBCjYCDEEAIRAMyQELIABBADYCHCAAIAE2AhQgAEGgmYCAADYCECAAQR42AgxBACEQDMgBCyAAQQA2AgALIABBgBI7ASogACAXQQFqIgEgAhCogICAACIQDQEgASEBC0HHACEQDKwBCyAQQRVHDYMBIABB0QA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAzEAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAzDAQsgAEEANgIcIAAgFDYCFCAAQcGogIAANgIQIABBBzYCDCAAQQA2AgBBACEQDMIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxdCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDMEBC0EAIRAgAEEANgIcIAAgATYCFCAAQYCRgIAANgIQIABBCTYCDAzAAQsgEEEVRg19IABBADYCHCAAIAE2AhQgAEGUjYCAADYCECAAQSE2AgxBACEQDL8BC0EBIRZBACEXQQAhFEEBIRALIAAgEDoAKyABQQFqIQECQAJAIAAtAC1BEHENAAJAAkACQCAALQAqDgMBAAIECyAWRQ0DDAILIBQNAQwCCyAXRQ0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQrYCAgAAiEA0AIAEhAQxcCyAAQdgANgIcIAAgATYCFCAAIBA2AgxBACEQDL4BCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQytAQsgAEHZADYCHCAAIAE2AhQgACAENgIMQQAhEAy9AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMqwELIABB2gA2AhwgACABNgIUIAAgBDYCDEEAIRAMvAELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKkBCyAAQdwANgIcIAAgATYCFCAAIAQ2AgxBACEQDLsBCwJAIAEtAABBUGoiEEH/AXFBCk8NACAAIBA6ACogAUEBaiEBQc8AIRAMogELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKcBCyAAQd4ANgIcIAAgATYCFCAAIAQ2AgxBACEQDLoBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKUEjTw0AIAEhAQxZCyAAQQA2AhwgACABNgIUIABB04mAgAA2AhAgAEEINgIMQQAhEAy5AQsgAEEANgIAC0EAIRAgAEEANgIcIAAgATYCFCAAQZCzgIAANgIQIABBCDYCDAy3AQsgAEEANgIAIBdBAWohAQJAIAAtAClBIUcNACABIQEMVgsgAEEANgIcIAAgATYCFCAAQZuKgIAANgIQIABBCDYCDEEAIRAMtgELIABBADYCACAXQQFqIQECQCAALQApIhBBXWpBC08NACABIQEMVQsCQCAQQQZLDQBBASAQdEHKAHFFDQAgASEBDFULQQAhECAAQQA2AhwgACABNgIUIABB94mAgAA2AhAgAEEINgIMDLUBCyAQQRVGDXEgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMtAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFQLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMswELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMsgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMsQELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFELIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMsAELIABBADYCHCAAIAE2AhQgAEHGioCAADYCECAAQQc2AgxBACEQDK8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDK4BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDK0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDKwBCyAAQQA2AhwgACABNgIUIABB3IiAgAA2AhAgAEEHNgIMQQAhEAyrAQsgEEE/Rw0BIAFBAWohAQtBBSEQDJABC0EAIRAgAEEANgIcIAAgATYCFCAAQf2SgIAANgIQIABBBzYCDAyoAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAynAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAymAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMRgsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAylAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHSADYCHCAAIBQ2AhQgACABNgIMQQAhEAykAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHTADYCHCAAIBQ2AhQgACABNgIMQQAhEAyjAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMQwsgAEHlADYCHCAAIBQ2AhQgACABNgIMQQAhEAyiAQsgAEEANgIcIAAgFDYCFCAAQcOPgIAANgIQIABBBzYCDEEAIRAMoQELIABBADYCHCAAIAE2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKABC0EAIRAgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDAyfAQsgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDEEAIRAMngELIABBADYCHCAAIBQ2AhQgAEH+kYCAADYCECAAQQc2AgxBACEQDJ0BCyAAQQA2AhwgACABNgIUIABBjpuAgAA2AhAgAEEGNgIMQQAhEAycAQsgEEEVRg1XIABBADYCHCAAIAE2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDJsBCyAAQQA2AgAgEEEBaiEBQSQhEAsgACAQOgApIAAoAgQhECAAQQA2AgQgACAQIAEQq4CAgAAiEA1UIAEhAQw+CyAAQQA2AgALQQAhECAAQQA2AhwgACAENgIUIABB8ZuAgAA2AhAgAEEGNgIMDJcBCyABQRVGDVAgAEEANgIcIAAgBTYCFCAAQfCMgIAANgIQIABBGzYCDEEAIRAMlgELIAAoAgQhBSAAQQA2AgQgACAFIBAQqYCAgAAiBQ0BIBBBAWohBQtBrQEhEAx7CyAAQcEBNgIcIAAgBTYCDCAAIBBBAWo2AhRBACEQDJMBCyAAKAIEIQYgAEEANgIEIAAgBiAQEKmAgIAAIgYNASAQQQFqIQYLQa4BIRAMeAsgAEHCATYCHCAAIAY2AgwgACAQQQFqNgIUQQAhEAyQAQsgAEEANgIcIAAgBzYCFCAAQZeLgIAANgIQIABBDTYCDEEAIRAMjwELIABBADYCHCAAIAg2AhQgAEHjkICAADYCECAAQQk2AgxBACEQDI4BCyAAQQA2AhwgACAINgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAyNAQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgCUEBaiEIAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBCAAIBAgCBCtgICAACIQRQ09IABByQE2AhwgACAINgIUIAAgEDYCDEEAIRAMjAELIAAoAgQhBCAAQQA2AgQgACAEIAgQrYCAgAAiBEUNdiAAQcoBNgIcIAAgCDYCFCAAIAQ2AgxBACEQDIsBCyAAKAIEIQQgAEEANgIEIAAgBCAJEK2AgIAAIgRFDXQgAEHLATYCHCAAIAk2AhQgACAENgIMQQAhEAyKAQsgACgCBCEEIABBADYCBCAAIAQgChCtgICAACIERQ1yIABBzQE2AhwgACAKNgIUIAAgBDYCDEEAIRAMiQELAkAgCy0AAEFQaiIQQf8BcUEKTw0AIAAgEDoAKiALQQFqIQpBtgEhEAxwCyAAKAIEIQQgAEEANgIEIAAgBCALEK2AgIAAIgRFDXAgAEHPATYCHCAAIAs2AhQgACAENgIMQQAhEAyIAQsgAEEANgIcIAAgBDYCFCAAQZCzgIAANgIQIABBCDYCDCAAQQA2AgBBACEQDIcBCyABQRVGDT8gAEEANgIcIAAgDDYCFCAAQcyOgIAANgIQIABBIDYCDEEAIRAMhgELIABBgQQ7ASggACgCBCEQIABCADcDACAAIBAgDEEBaiIMEKuAgIAAIhBFDTggAEHTATYCHCAAIAw2AhQgACAQNgIMQQAhEAyFAQsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQdibgIAANgIQIABBCDYCDAyDAQsgACgCBCEQIABCADcDACAAIBAgC0EBaiILEKuAgIAAIhANAUHGASEQDGkLIABBAjoAKAxVCyAAQdUBNgIcIAAgCzYCFCAAIBA2AgxBACEQDIABCyAQQRVGDTcgAEEANgIcIAAgBDYCFCAAQaSMgIAANgIQIABBEDYCDEEAIRAMfwsgAC0ANEEBRw00IAAgBCACELyAgIAAIhBFDTQgEEEVRw01IABB3AE2AhwgACAENgIUIABB1ZaAgAA2AhAgAEEVNgIMQQAhEAx+C0EAIRAgAEEANgIcIABBr4uAgAA2AhAgAEECNgIMIAAgFEEBajYCFAx9C0EAIRAMYwtBAiEQDGILQQ0hEAxhC0EPIRAMYAtBJSEQDF8LQRMhEAxeC0EVIRAMXQtBFiEQDFwLQRchEAxbC0EYIRAMWgtBGSEQDFkLQRohEAxYC0EbIRAMVwtBHCEQDFYLQR0hEAxVC0EfIRAMVAtBISEQDFMLQSMhEAxSC0HGACEQDFELQS4hEAxQC0EvIRAMTwtBOyEQDE4LQT0hEAxNC0HIACEQDEwLQckAIRAMSwtBywAhEAxKC0HMACEQDEkLQc4AIRAMSAtB0QAhEAxHC0HVACEQDEYLQdgAIRAMRQtB2QAhEAxEC0HbACEQDEMLQeQAIRAMQgtB5QAhEAxBC0HxACEQDEALQfQAIRAMPwtBjQEhEAw+C0GXASEQDD0LQakBIRAMPAtBrAEhEAw7C0HAASEQDDoLQbkBIRAMOQtBrwEhEAw4C0GxASEQDDcLQbIBIRAMNgtBtAEhEAw1C0G1ASEQDDQLQboBIRAMMwtBvQEhEAwyC0G/ASEQDDELQcEBIRAMMAsgAEEANgIcIAAgBDYCFCAAQemLgIAANgIQIABBHzYCDEEAIRAMSAsgAEHbATYCHCAAIAQ2AhQgAEH6loCAADYCECAAQRU2AgxBACEQDEcLIABB+AA2AhwgACAMNgIUIABBypiAgAA2AhAgAEEVNgIMQQAhEAxGCyAAQdEANgIcIAAgBTYCFCAAQbCXgIAANgIQIABBFTYCDEEAIRAMRQsgAEH5ADYCHCAAIAE2AhQgACAQNgIMQQAhEAxECyAAQfgANgIcIAAgATYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMQwsgAEHkADYCHCAAIAE2AhQgAEHjl4CAADYCECAAQRU2AgxBACEQDEILIABB1wA2AhwgACABNgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAxBCyAAQQA2AhwgACABNgIUIABBuY2AgAA2AhAgAEEaNgIMQQAhEAxACyAAQcIANgIcIAAgATYCFCAAQeOYgIAANgIQIABBFTYCDEEAIRAMPwsgAEEANgIEIAAgDyAPELGAgIAAIgRFDQEgAEE6NgIcIAAgBDYCDCAAIA9BAWo2AhRBACEQDD4LIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCxgICAACIERQ0AIABBOzYCHCAAIAQ2AgwgACABQQFqNgIUQQAhEAw+CyABQQFqIQEMLQsgD0EBaiEBDC0LIABBADYCHCAAIA82AhQgAEHkkoCAADYCECAAQQQ2AgxBACEQDDsLIABBNjYCHCAAIAQ2AhQgACACNgIMQQAhEAw6CyAAQS42AhwgACAONgIUIAAgBDYCDEEAIRAMOQsgAEHQADYCHCAAIAE2AhQgAEGRmICAADYCECAAQRU2AgxBACEQDDgLIA1BAWohAQwsCyAAQRU2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAw2CyAAQRs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw1CyAAQQ82AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw0CyAAQQs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAwzCyAAQRo2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwyCyAAQQs2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwxCyAAQQo2AhwgACABNgIUIABB5JaAgAA2AhAgAEEVNgIMQQAhEAwwCyAAQR42AhwgACABNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAwvCyAAQQA2AhwgACAQNgIUIABB2o2AgAA2AhAgAEEUNgIMQQAhEAwuCyAAQQQ2AhwgACABNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAwtCyAAQQA2AgAgC0EBaiELC0G4ASEQDBILIABBADYCACAQQQFqIQFB9QAhEAwRCyABIQECQCAALQApQQVHDQBB4wAhEAwRC0HiACEQDBALQQAhECAAQQA2AhwgAEHkkYCAADYCECAAQQc2AgwgACAUQQFqNgIUDCgLIABBADYCACAXQQFqIQFBwAAhEAwOC0EBIQELIAAgAToALCAAQQA2AgAgF0EBaiEBC0EoIRAMCwsgASEBC0E4IRAMCQsCQCABIg8gAkYNAANAAkAgDy0AAEGAvoCAAGotAAAiAUEBRg0AIAFBAkcNAyAPQQFqIQEMBAsgD0EBaiIPIAJHDQALQT4hEAwiC0E+IRAMIQsgAEEAOgAsIA8hAQwBC0ELIRAMBgtBOiEQDAULIAFBAWohAUEtIRAMBAsgACABOgAsIABBADYCACAWQQFqIQFBDCEQDAMLIABBADYCACAXQQFqIQFBCiEQDAILIABBADYCAAsgAEEAOgAsIA0hAUEJIRAMAAsLQQAhECAAQQA2AhwgACALNgIUIABBzZCAgAA2AhAgAEEJNgIMDBcLQQAhECAAQQA2AhwgACAKNgIUIABB6YqAgAA2AhAgAEEJNgIMDBYLQQAhECAAQQA2AhwgACAJNgIUIABBt5CAgAA2AhAgAEEJNgIMDBULQQAhECAAQQA2AhwgACAINgIUIABBnJGAgAA2AhAgAEEJNgIMDBQLQQAhECAAQQA2AhwgACABNgIUIABBzZCAgAA2AhAgAEEJNgIMDBMLQQAhECAAQQA2AhwgACABNgIUIABB6YqAgAA2AhAgAEEJNgIMDBILQQAhECAAQQA2AhwgACABNgIUIABBt5CAgAA2AhAgAEEJNgIMDBELQQAhECAAQQA2AhwgACABNgIUIABBnJGAgAA2AhAgAEEJNgIMDBALQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA8LQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA4LQQAhECAAQQA2AhwgACABNgIUIABBwJKAgAA2AhAgAEELNgIMDA0LQQAhECAAQQA2AhwgACABNgIUIABBlYmAgAA2AhAgAEELNgIMDAwLQQAhECAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMDAsLQQAhECAAQQA2AhwgACABNgIUIABB+4+AgAA2AhAgAEEKNgIMDAoLQQAhECAAQQA2AhwgACABNgIUIABB8ZmAgAA2AhAgAEECNgIMDAkLQQAhECAAQQA2AhwgACABNgIUIABBxJSAgAA2AhAgAEECNgIMDAgLQQAhECAAQQA2AhwgACABNgIUIABB8pWAgAA2AhAgAEECNgIMDAcLIABBAjYCHCAAIAE2AhQgAEGcmoCAADYCECAAQRY2AgxBACEQDAYLQQEhEAwFC0HUACEQIAEiBCACRg0EIANBCGogACAEIAJB2MKAgABBChDFgICAACADKAIMIQQgAygCCA4DAQQCAAsQyoCAgAAACyAAQQA2AhwgAEG1moCAADYCECAAQRc2AgwgACAEQQFqNgIUQQAhEAwCCyAAQQA2AhwgACAENgIUIABBypqAgAA2AhAgAEEJNgIMQQAhEAwBCwJAIAEiBCACRw0AQSIhEAwBCyAAQYmAgIAANgIIIAAgBDYCBEEhIRALIANBEGokgICAgAAgEAuvAQECfyABKAIAIQYCQAJAIAIgA0YNACAEIAZqIQQgBiADaiACayEHIAIgBkF/cyAFaiIGaiEFA0ACQCACLQAAIAQtAABGDQBBAiEEDAMLAkAgBg0AQQAhBCAFIQIMAwsgBkF/aiEGIARBAWohBCACQQFqIgIgA0cNAAsgByEGIAMhAgsgAEEBNgIAIAEgBjYCACAAIAI2AgQPCyABQQA2AgAgACAENgIAIAAgAjYCBAsKACAAEMeAgIAAC/I2AQt/I4CAgIAAQRBrIgEkgICAgAACQEEAKAKg0ICAAA0AQQAQy4CAgABBgNSEgABrIgJB2QBJDQBBACEDAkBBACgC4NOAgAAiBA0AQQBCfzcC7NOAgABBAEKAgISAgIDAADcC5NOAgABBACABQQhqQXBxQdiq1aoFcyIENgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgAALQQAgAjYCzNOAgABBAEGA1ISAADYCyNOAgABBAEGA1ISAADYCmNCAgABBACAENgKs0ICAAEEAQX82AqjQgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAtBgNSEgABBeEGA1ISAAGtBD3FBAEGA1ISAAEEIakEPcRsiA2oiBEEEaiACQUhqIgUgA2siA0EBcjYCAEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgABBgNSEgAAgBWpBODYCBAsCQAJAAkACQAJAAkACQAJAAkACQAJAAkAgAEHsAUsNAAJAQQAoAojQgIAAIgZBECAAQRNqQXBxIABBC0kbIgJBA3YiBHYiA0EDcUUNAAJAAkAgA0EBcSAEckEBcyIFQQN0IgRBsNCAgABqIgMgBEG40ICAAGooAgAiBCgCCCICRw0AQQAgBkF+IAV3cTYCiNCAgAAMAQsgAyACNgIIIAIgAzYCDAsgBEEIaiEDIAQgBUEDdCIFQQNyNgIEIAQgBWoiBCAEKAIEQQFyNgIEDAwLIAJBACgCkNCAgAAiB00NAQJAIANFDQACQAJAIAMgBHRBAiAEdCIDQQAgA2tycSIDQQAgA2txQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmoiBEEDdCIDQbDQgIAAaiIFIANBuNCAgABqKAIAIgMoAggiAEcNAEEAIAZBfiAEd3EiBjYCiNCAgAAMAQsgBSAANgIIIAAgBTYCDAsgAyACQQNyNgIEIAMgBEEDdCIEaiAEIAJrIgU2AgAgAyACaiIAIAVBAXI2AgQCQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhBAJAAkAgBkEBIAdBA3Z0IghxDQBBACAGIAhyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAQ2AgwgAiAENgIIIAQgAjYCDCAEIAg2AggLIANBCGohA0EAIAA2ApzQgIAAQQAgBTYCkNCAgAAMDAtBACgCjNCAgAAiCUUNASAJQQAgCWtxQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmpBAnRBuNKAgABqKAIAIgAoAgRBeHEgAmshBCAAIQUCQANAAkAgBSgCECIDDQAgBUEUaigCACIDRQ0CCyADKAIEQXhxIAJrIgUgBCAFIARJIgUbIQQgAyAAIAUbIQAgAyEFDAALCyAAKAIYIQoCQCAAKAIMIgggAEYNACAAKAIIIgNBACgCmNCAgABJGiAIIAM2AgggAyAINgIMDAsLAkAgAEEUaiIFKAIAIgMNACAAKAIQIgNFDQMgAEEQaiEFCwNAIAUhCyADIghBFGoiBSgCACIDDQAgCEEQaiEFIAgoAhAiAw0ACyALQQA2AgAMCgtBfyECIABBv39LDQAgAEETaiIDQXBxIQJBACgCjNCAgAAiB0UNAEEAIQsCQCACQYACSQ0AQR8hCyACQf///wdLDQAgA0EIdiIDIANBgP4/akEQdkEIcSIDdCIEIARBgOAfakEQdkEEcSIEdCIFIAVBgIAPakEQdkECcSIFdEEPdiADIARyIAVyayIDQQF0IAIgA0EVanZBAXFyQRxqIQsLQQAgAmshBAJAAkACQAJAIAtBAnRBuNKAgABqKAIAIgUNAEEAIQNBACEIDAELQQAhAyACQQBBGSALQQF2ayALQR9GG3QhAEEAIQgDQAJAIAUoAgRBeHEgAmsiBiAETw0AIAYhBCAFIQggBg0AQQAhBCAFIQggBSEDDAMLIAMgBUEUaigCACIGIAYgBSAAQR12QQRxakEQaigCACIFRhsgAyAGGyEDIABBAXQhACAFDQALCwJAIAMgCHINAEEAIQhBAiALdCIDQQAgA2tyIAdxIgNFDQMgA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBUEFdkEIcSIAIANyIAUgAHYiA0ECdkEEcSIFciADIAV2IgNBAXZBAnEiBXIgAyAFdiIDQQF2QQFxIgVyIAMgBXZqQQJ0QbjSgIAAaigCACEDCyADRQ0BCwNAIAMoAgRBeHEgAmsiBiAESSEAAkAgAygCECIFDQAgA0EUaigCACEFCyAGIAQgABshBCADIAggABshCCAFIQMgBQ0ACwsgCEUNACAEQQAoApDQgIAAIAJrTw0AIAgoAhghCwJAIAgoAgwiACAIRg0AIAgoAggiA0EAKAKY0ICAAEkaIAAgAzYCCCADIAA2AgwMCQsCQCAIQRRqIgUoAgAiAw0AIAgoAhAiA0UNAyAIQRBqIQULA0AgBSEGIAMiAEEUaiIFKAIAIgMNACAAQRBqIQUgACgCECIDDQALIAZBADYCAAwICwJAQQAoApDQgIAAIgMgAkkNAEEAKAKc0ICAACEEAkACQCADIAJrIgVBEEkNACAEIAJqIgAgBUEBcjYCBEEAIAU2ApDQgIAAQQAgADYCnNCAgAAgBCADaiAFNgIAIAQgAkEDcjYCBAwBCyAEIANBA3I2AgQgBCADaiIDIAMoAgRBAXI2AgRBAEEANgKc0ICAAEEAQQA2ApDQgIAACyAEQQhqIQMMCgsCQEEAKAKU0ICAACIAIAJNDQBBACgCoNCAgAAiAyACaiIEIAAgAmsiBUEBcjYCBEEAIAU2ApTQgIAAQQAgBDYCoNCAgAAgAyACQQNyNgIEIANBCGohAwwKCwJAAkBBACgC4NOAgABFDQBBACgC6NOAgAAhBAwBC0EAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEMakFwcUHYqtWqBXM2AuDTgIAAQQBBADYC9NOAgABBAEEANgLE04CAAEGAgAQhBAtBACEDAkAgBCACQccAaiIHaiIGQQAgBGsiC3EiCCACSw0AQQBBMDYC+NOAgAAMCgsCQEEAKALA04CAACIDRQ0AAkBBACgCuNOAgAAiBCAIaiIFIARNDQAgBSADTQ0BC0EAIQNBAEEwNgL404CAAAwKC0EALQDE04CAAEEEcQ0EAkACQAJAQQAoAqDQgIAAIgRFDQBByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiAESw0DCyADKAIIIgMNAAsLQQAQy4CAgAAiAEF/Rg0FIAghBgJAQQAoAuTTgIAAIgNBf2oiBCAAcUUNACAIIABrIAQgAGpBACADa3FqIQYLIAYgAk0NBSAGQf7///8HSw0FAkBBACgCwNOAgAAiA0UNAEEAKAK404CAACIEIAZqIgUgBE0NBiAFIANLDQYLIAYQy4CAgAAiAyAARw0BDAcLIAYgAGsgC3EiBkH+////B0sNBCAGEMuAgIAAIgAgAygCACADKAIEakYNAyAAIQMLAkAgA0F/Rg0AIAJByABqIAZNDQACQCAHIAZrQQAoAujTgIAAIgRqQQAgBGtxIgRB/v///wdNDQAgAyEADAcLAkAgBBDLgICAAEF/Rg0AIAQgBmohBiADIQAMBwtBACAGaxDLgICAABoMBAsgAyEAIANBf0cNBQwDC0EAIQgMBwtBACEADAULIABBf0cNAgtBAEEAKALE04CAAEEEcjYCxNOAgAALIAhB/v///wdLDQEgCBDLgICAACEAQQAQy4CAgAAhAyAAQX9GDQEgA0F/Rg0BIAAgA08NASADIABrIgYgAkE4ak0NAQtBAEEAKAK404CAACAGaiIDNgK404CAAAJAIANBACgCvNOAgABNDQBBACADNgK804CAAAsCQAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQCAAIAMoAgAiBSADKAIEIghqRg0CIAMoAggiAw0ADAMLCwJAAkBBACgCmNCAgAAiA0UNACAAIANPDQELQQAgADYCmNCAgAALQQAhA0EAIAY2AszTgIAAQQAgADYCyNOAgABBAEF/NgKo0ICAAEEAQQAoAuDTgIAANgKs0ICAAEEAQQA2AtTTgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiBCAGQUhqIgUgA2siA0EBcjYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgAAgACAFakE4NgIEDAILIAMtAAxBCHENACAEIAVJDQAgBCAATw0AIARBeCAEa0EPcUEAIARBCGpBD3EbIgVqIgBBACgClNCAgAAgBmoiCyAFayIFQQFyNgIEIAMgCCAGajYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAU2ApTQgIAAQQAgADYCoNCAgAAgBCALakE4NgIEDAELAkAgAEEAKAKY0ICAACIITw0AQQAgADYCmNCAgAAgACEICyAAIAZqIQVByNOAgAAhAwJAAkACQAJAAkACQAJAA0AgAygCACAFRg0BIAMoAggiAw0ADAILCyADLQAMQQhxRQ0BC0HI04CAACEDA0ACQCADKAIAIgUgBEsNACAFIAMoAgRqIgUgBEsNAwsgAygCCCEDDAALCyADIAA2AgAgAyADKAIEIAZqNgIEIABBeCAAa0EPcUEAIABBCGpBD3EbaiILIAJBA3I2AgQgBUF4IAVrQQ9xQQAgBUEIakEPcRtqIgYgCyACaiICayEDAkAgBiAERw0AQQAgAjYCoNCAgABBAEEAKAKU0ICAACADaiIDNgKU0ICAACACIANBAXI2AgQMAwsCQCAGQQAoApzQgIAARw0AQQAgAjYCnNCAgABBAEEAKAKQ0ICAACADaiIDNgKQ0ICAACACIANBAXI2AgQgAiADaiADNgIADAMLAkAgBigCBCIEQQNxQQFHDQAgBEF4cSEHAkACQCAEQf8BSw0AIAYoAggiBSAEQQN2IghBA3RBsNCAgABqIgBGGgJAIAYoAgwiBCAFRw0AQQBBACgCiNCAgABBfiAId3E2AojQgIAADAILIAQgAEYaIAQgBTYCCCAFIAQ2AgwMAQsgBigCGCEJAkACQCAGKAIMIgAgBkYNACAGKAIIIgQgCEkaIAAgBDYCCCAEIAA2AgwMAQsCQCAGQRRqIgQoAgAiBQ0AIAZBEGoiBCgCACIFDQBBACEADAELA0AgBCEIIAUiAEEUaiIEKAIAIgUNACAAQRBqIQQgACgCECIFDQALIAhBADYCAAsgCUUNAAJAAkAgBiAGKAIcIgVBAnRBuNKAgABqIgQoAgBHDQAgBCAANgIAIAANAUEAQQAoAozQgIAAQX4gBXdxNgKM0ICAAAwCCyAJQRBBFCAJKAIQIAZGG2ogADYCACAARQ0BCyAAIAk2AhgCQCAGKAIQIgRFDQAgACAENgIQIAQgADYCGAsgBigCFCIERQ0AIABBFGogBDYCACAEIAA2AhgLIAcgA2ohAyAGIAdqIgYoAgQhBAsgBiAEQX5xNgIEIAIgA2ogAzYCACACIANBAXI2AgQCQCADQf8BSw0AIANBeHFBsNCAgABqIQQCQAJAQQAoAojQgIAAIgVBASADQQN2dCIDcQ0AQQAgBSADcjYCiNCAgAAgBCEDDAELIAQoAgghAwsgAyACNgIMIAQgAjYCCCACIAQ2AgwgAiADNgIIDAMLQR8hBAJAIANB////B0sNACADQQh2IgQgBEGA/j9qQRB2QQhxIgR0IgUgBUGA4B9qQRB2QQRxIgV0IgAgAEGAgA9qQRB2QQJxIgB0QQ92IAQgBXIgAHJrIgRBAXQgAyAEQRVqdkEBcXJBHGohBAsgAiAENgIcIAJCADcCECAEQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiAEEBIAR0IghxDQAgBSACNgIAQQAgACAIcjYCjNCAgAAgAiAFNgIYIAIgAjYCCCACIAI2AgwMAwsgA0EAQRkgBEEBdmsgBEEfRht0IQQgBSgCACEAA0AgACIFKAIEQXhxIANGDQIgBEEddiEAIARBAXQhBCAFIABBBHFqQRBqIggoAgAiAA0ACyAIIAI2AgAgAiAFNgIYIAIgAjYCDCACIAI2AggMAgsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiCyAGQUhqIgggA2siA0EBcjYCBCAAIAhqQTg2AgQgBCAFQTcgBWtBD3FBACAFQUlqQQ9xG2pBQWoiCCAIIARBEGpJGyIIQSM2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAs2AqDQgIAAIAhBEGpBACkC0NOAgAA3AgAgCEEAKQLI04CAADcCCEEAIAhBCGo2AtDTgIAAQQAgBjYCzNOAgABBACAANgLI04CAAEEAQQA2AtTTgIAAIAhBJGohAwNAIANBBzYCACADQQRqIgMgBUkNAAsgCCAERg0DIAggCCgCBEF+cTYCBCAIIAggBGsiADYCACAEIABBAXI2AgQCQCAAQf8BSw0AIABBeHFBsNCAgABqIQMCQAJAQQAoAojQgIAAIgVBASAAQQN2dCIAcQ0AQQAgBSAAcjYCiNCAgAAgAyEFDAELIAMoAgghBQsgBSAENgIMIAMgBDYCCCAEIAM2AgwgBCAFNgIIDAQLQR8hAwJAIABB////B0sNACAAQQh2IgMgA0GA/j9qQRB2QQhxIgN0IgUgBUGA4B9qQRB2QQRxIgV0IgggCEGAgA9qQRB2QQJxIgh0QQ92IAMgBXIgCHJrIgNBAXQgACADQRVqdkEBcXJBHGohAwsgBCADNgIcIARCADcCECADQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiCEEBIAN0IgZxDQAgBSAENgIAQQAgCCAGcjYCjNCAgAAgBCAFNgIYIAQgBDYCCCAEIAQ2AgwMBAsgAEEAQRkgA0EBdmsgA0EfRht0IQMgBSgCACEIA0AgCCIFKAIEQXhxIABGDQMgA0EddiEIIANBAXQhAyAFIAhBBHFqQRBqIgYoAgAiCA0ACyAGIAQ2AgAgBCAFNgIYIAQgBDYCDCAEIAQ2AggMAwsgBSgCCCIDIAI2AgwgBSACNgIIIAJBADYCGCACIAU2AgwgAiADNgIICyALQQhqIQMMBQsgBSgCCCIDIAQ2AgwgBSAENgIIIARBADYCGCAEIAU2AgwgBCADNgIIC0EAKAKU0ICAACIDIAJNDQBBACgCoNCAgAAiBCACaiIFIAMgAmsiA0EBcjYCBEEAIAM2ApTQgIAAQQAgBTYCoNCAgAAgBCACQQNyNgIEIARBCGohAwwDC0EAIQNBAEEwNgL404CAAAwCCwJAIAtFDQACQAJAIAggCCgCHCIFQQJ0QbjSgIAAaiIDKAIARw0AIAMgADYCACAADQFBACAHQX4gBXdxIgc2AozQgIAADAILIAtBEEEUIAsoAhAgCEYbaiAANgIAIABFDQELIAAgCzYCGAJAIAgoAhAiA0UNACAAIAM2AhAgAyAANgIYCyAIQRRqKAIAIgNFDQAgAEEUaiADNgIAIAMgADYCGAsCQAJAIARBD0sNACAIIAQgAmoiA0EDcjYCBCAIIANqIgMgAygCBEEBcjYCBAwBCyAIIAJqIgAgBEEBcjYCBCAIIAJBA3I2AgQgACAEaiAENgIAAkAgBEH/AUsNACAEQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgBEEDdnQiBHENAEEAIAUgBHI2AojQgIAAIAMhBAwBCyADKAIIIQQLIAQgADYCDCADIAA2AgggACADNgIMIAAgBDYCCAwBC0EfIQMCQCAEQf///wdLDQAgBEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCICIAJBgIAPakEQdkECcSICdEEPdiADIAVyIAJyayIDQQF0IAQgA0EVanZBAXFyQRxqIQMLIAAgAzYCHCAAQgA3AhAgA0ECdEG40oCAAGohBQJAIAdBASADdCICcQ0AIAUgADYCAEEAIAcgAnI2AozQgIAAIAAgBTYCGCAAIAA2AgggACAANgIMDAELIARBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhAgJAA0AgAiIFKAIEQXhxIARGDQEgA0EddiECIANBAXQhAyAFIAJBBHFqQRBqIgYoAgAiAg0ACyAGIAA2AgAgACAFNgIYIAAgADYCDCAAIAA2AggMAQsgBSgCCCIDIAA2AgwgBSAANgIIIABBADYCGCAAIAU2AgwgACADNgIICyAIQQhqIQMMAQsCQCAKRQ0AAkACQCAAIAAoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAg2AgAgCA0BQQAgCUF+IAV3cTYCjNCAgAAMAgsgCkEQQRQgCigCECAARhtqIAg2AgAgCEUNAQsgCCAKNgIYAkAgACgCECIDRQ0AIAggAzYCECADIAg2AhgLIABBFGooAgAiA0UNACAIQRRqIAM2AgAgAyAINgIYCwJAAkAgBEEPSw0AIAAgBCACaiIDQQNyNgIEIAAgA2oiAyADKAIEQQFyNgIEDAELIAAgAmoiBSAEQQFyNgIEIAAgAkEDcjYCBCAFIARqIAQ2AgACQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhAwJAAkBBASAHQQN2dCIIIAZxDQBBACAIIAZyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAM2AgwgAiADNgIIIAMgAjYCDCADIAg2AggLQQAgBTYCnNCAgABBACAENgKQ0ICAAAsgAEEIaiEDCyABQRBqJICAgIAAIAMLCgAgABDJgICAAAviDQEHfwJAIABFDQAgAEF4aiIBIABBfGooAgAiAkF4cSIAaiEDAkAgAkEBcQ0AIAJBA3FFDQEgASABKAIAIgJrIgFBACgCmNCAgAAiBEkNASACIABqIQACQCABQQAoApzQgIAARg0AAkAgAkH/AUsNACABKAIIIgQgAkEDdiIFQQN0QbDQgIAAaiIGRhoCQCABKAIMIgIgBEcNAEEAQQAoAojQgIAAQX4gBXdxNgKI0ICAAAwDCyACIAZGGiACIAQ2AgggBCACNgIMDAILIAEoAhghBwJAAkAgASgCDCIGIAFGDQAgASgCCCICIARJGiAGIAI2AgggAiAGNgIMDAELAkAgAUEUaiICKAIAIgQNACABQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQECQAJAIAEgASgCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAwsgB0EQQRQgBygCECABRhtqIAY2AgAgBkUNAgsgBiAHNgIYAkAgASgCECICRQ0AIAYgAjYCECACIAY2AhgLIAEoAhQiAkUNASAGQRRqIAI2AgAgAiAGNgIYDAELIAMoAgQiAkEDcUEDRw0AIAMgAkF+cTYCBEEAIAA2ApDQgIAAIAEgAGogADYCACABIABBAXI2AgQPCyABIANPDQAgAygCBCICQQFxRQ0AAkACQCACQQJxDQACQCADQQAoAqDQgIAARw0AQQAgATYCoNCAgABBAEEAKAKU0ICAACAAaiIANgKU0ICAACABIABBAXI2AgQgAUEAKAKc0ICAAEcNA0EAQQA2ApDQgIAAQQBBADYCnNCAgAAPCwJAIANBACgCnNCAgABHDQBBACABNgKc0ICAAEEAQQAoApDQgIAAIABqIgA2ApDQgIAAIAEgAEEBcjYCBCABIABqIAA2AgAPCyACQXhxIABqIQACQAJAIAJB/wFLDQAgAygCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgAygCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAgsgAiAGRhogAiAENgIIIAQgAjYCDAwBCyADKAIYIQcCQAJAIAMoAgwiBiADRg0AIAMoAggiAkEAKAKY0ICAAEkaIAYgAjYCCCACIAY2AgwMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEGDAELA0AgAiEFIAQiBkEUaiICKAIAIgQNACAGQRBqIQIgBigCECIEDQALIAVBADYCAAsgB0UNAAJAAkAgAyADKAIcIgRBAnRBuNKAgABqIgIoAgBHDQAgAiAGNgIAIAYNAUEAQQAoAozQgIAAQX4gBHdxNgKM0ICAAAwCCyAHQRBBFCAHKAIQIANGG2ogBjYCACAGRQ0BCyAGIAc2AhgCQCADKAIQIgJFDQAgBiACNgIQIAIgBjYCGAsgAygCFCICRQ0AIAZBFGogAjYCACACIAY2AhgLIAEgAGogADYCACABIABBAXI2AgQgAUEAKAKc0ICAAEcNAUEAIAA2ApDQgIAADwsgAyACQX5xNgIEIAEgAGogADYCACABIABBAXI2AgQLAkAgAEH/AUsNACAAQXhxQbDQgIAAaiECAkACQEEAKAKI0ICAACIEQQEgAEEDdnQiAHENAEEAIAQgAHI2AojQgIAAIAIhAAwBCyACKAIIIQALIAAgATYCDCACIAE2AgggASACNgIMIAEgADYCCA8LQR8hAgJAIABB////B0sNACAAQQh2IgIgAkGA/j9qQRB2QQhxIgJ0IgQgBEGA4B9qQRB2QQRxIgR0IgYgBkGAgA9qQRB2QQJxIgZ0QQ92IAIgBHIgBnJrIgJBAXQgACACQRVqdkEBcXJBHGohAgsgASACNgIcIAFCADcCECACQQJ0QbjSgIAAaiEEAkACQEEAKAKM0ICAACIGQQEgAnQiA3ENACAEIAE2AgBBACAGIANyNgKM0ICAACABIAQ2AhggASABNgIIIAEgATYCDAwBCyAAQQBBGSACQQF2ayACQR9GG3QhAiAEKAIAIQYCQANAIAYiBCgCBEF4cSAARg0BIAJBHXYhBiACQQF0IQIgBCAGQQRxakEQaiIDKAIAIgYNAAsgAyABNgIAIAEgBDYCGCABIAE2AgwgASABNgIIDAELIAQoAggiACABNgIMIAQgATYCCCABQQA2AhggASAENgIMIAEgADYCCAtBAEEAKAKo0ICAAEF/aiIBQX8gARs2AqjQgIAACwsEAAAAC04AAkAgAA0APwBBEHQPCwJAIABB//8DcQ0AIABBf0wNAAJAIABBEHZAACIAQX9HDQBBAEEwNgL404CAAEF/DwsgAEEQdA8LEMqAgIAAAAvyAgIDfwF+AkAgAkUNACAAIAE6AAAgAiAAaiIDQX9qIAE6AAAgAkEDSQ0AIAAgAToAAiAAIAE6AAEgA0F9aiABOgAAIANBfmogAToAACACQQdJDQAgACABOgADIANBfGogAToAACACQQlJDQAgAEEAIABrQQNxIgRqIgMgAUH/AXFBgYKECGwiATYCACADIAIgBGtBfHEiBGoiAkF8aiABNgIAIARBCUkNACADIAE2AgggAyABNgIEIAJBeGogATYCACACQXRqIAE2AgAgBEEZSQ0AIAMgATYCGCADIAE2AhQgAyABNgIQIAMgATYCDCACQXBqIAE2AgAgAkFsaiABNgIAIAJBaGogATYCACACQWRqIAE2AgAgBCADQQRxQRhyIgVrIgJBIEkNACABrUKBgICAEH4hBiADIAVqIQEDQCABIAY3AxggASAGNwMQIAEgBjcDCCABIAY3AwAgAUEgaiEBIAJBYGoiAkEfSw0ACwsgAAsLjkgBAEGACAuGSAEAAAACAAAAAwAAAAAAAAAAAAAABAAAAAUAAAAAAAAAAAAAAAYAAAAHAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASW52YWxpZCBjaGFyIGluIHVybCBxdWVyeQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2JvZHkAQ29udGVudC1MZW5ndGggb3ZlcmZsb3cAQ2h1bmsgc2l6ZSBvdmVyZmxvdwBSZXNwb25zZSBvdmVyZmxvdwBJbnZhbGlkIG1ldGhvZCBmb3IgSFRUUC94LnggcmVxdWVzdABJbnZhbGlkIG1ldGhvZCBmb3IgUlRTUC94LnggcmVxdWVzdABFeHBlY3RlZCBTT1VSQ0UgbWV0aG9kIGZvciBJQ0UveC54IHJlcXVlc3QASW52YWxpZCBjaGFyIGluIHVybCBmcmFnbWVudCBzdGFydABFeHBlY3RlZCBkb3QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9zdGF0dXMASW52YWxpZCByZXNwb25zZSBzdGF0dXMASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucwBVc2VyIGNhbGxiYWNrIGVycm9yAGBvbl9yZXNldGAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2hlYWRlcmAgY2FsbGJhY2sgZXJyb3IAYG9uX21lc3NhZ2VfYmVnaW5gIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19leHRlbnNpb25fdmFsdWVgIGNhbGxiYWNrIGVycm9yAGBvbl9zdGF0dXNfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl92ZXJzaW9uX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdXJsX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWV0aG9kX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX25hbWVgIGNhbGxiYWNrIGVycm9yAFVuZXhwZWN0ZWQgY2hhciBpbiB1cmwgc2VydmVyAEludmFsaWQgaGVhZGVyIHZhbHVlIGNoYXIASW52YWxpZCBoZWFkZXIgZmllbGQgY2hhcgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3ZlcnNpb24ASW52YWxpZCBtaW5vciB2ZXJzaW9uAEludmFsaWQgbWFqb3IgdmVyc2lvbgBFeHBlY3RlZCBzcGFjZSBhZnRlciB2ZXJzaW9uAEV4cGVjdGVkIENSTEYgYWZ0ZXIgdmVyc2lvbgBJbnZhbGlkIEhUVFAgdmVyc2lvbgBJbnZhbGlkIGhlYWRlciB0b2tlbgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3VybABJbnZhbGlkIGNoYXJhY3RlcnMgaW4gdXJsAFVuZXhwZWN0ZWQgc3RhcnQgY2hhciBpbiB1cmwARG91YmxlIEAgaW4gdXJsAEVtcHR5IENvbnRlbnQtTGVuZ3RoAEludmFsaWQgY2hhcmFjdGVyIGluIENvbnRlbnQtTGVuZ3RoAER1cGxpY2F0ZSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXIgaW4gdXJsIHBhdGgAQ29udGVudC1MZW5ndGggY2FuJ3QgYmUgcHJlc2VudCB3aXRoIFRyYW5zZmVyLUVuY29kaW5nAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIHNpemUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfdmFsdWUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyB2YWx1ZQBNaXNzaW5nIGV4cGVjdGVkIExGIGFmdGVyIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AgaGVhZGVyIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGUgdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyBxdW90ZWQgdmFsdWUAUGF1c2VkIGJ5IG9uX2hlYWRlcnNfY29tcGxldGUASW52YWxpZCBFT0Ygc3RhdGUAb25fcmVzZXQgcGF1c2UAb25fY2h1bmtfaGVhZGVyIHBhdXNlAG9uX21lc3NhZ2VfYmVnaW4gcGF1c2UAb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlIHBhdXNlAG9uX3N0YXR1c19jb21wbGV0ZSBwYXVzZQBvbl92ZXJzaW9uX2NvbXBsZXRlIHBhdXNlAG9uX3VybF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19jb21wbGV0ZSBwYXVzZQBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGUgcGF1c2UAb25fbWVzc2FnZV9jb21wbGV0ZSBwYXVzZQBvbl9tZXRob2RfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lIHBhdXNlAFVuZXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgc3RhcnQgbGluZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgbmFtZQBQYXVzZSBvbiBDT05ORUNUL1VwZ3JhZGUAUGF1c2Ugb24gUFJJL1VwZ3JhZGUARXhwZWN0ZWQgSFRUUC8yIENvbm5lY3Rpb24gUHJlZmFjZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX21ldGhvZABFeHBlY3RlZCBzcGFjZSBhZnRlciBtZXRob2QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfZmllbGQAUGF1c2VkAEludmFsaWQgd29yZCBlbmNvdW50ZXJlZABJbnZhbGlkIG1ldGhvZCBlbmNvdW50ZXJlZABVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNjaGVtYQBSZXF1ZXN0IGhhcyBpbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AAU1dJVENIX1BST1hZAFVTRV9QUk9YWQBNS0FDVElWSVRZAFVOUFJPQ0VTU0FCTEVfRU5USVRZAENPUFkATU9WRURfUEVSTUFORU5UTFkAVE9PX0VBUkxZAE5PVElGWQBGQUlMRURfREVQRU5ERU5DWQBCQURfR0FURVdBWQBQTEFZAFBVVABDSEVDS09VVABHQVRFV0FZX1RJTUVPVVQAUkVRVUVTVF9USU1FT1VUAE5FVFdPUktfQ09OTkVDVF9USU1FT1VUAENPTk5FQ1RJT05fVElNRU9VVABMT0dJTl9USU1FT1VUAE5FVFdPUktfUkVBRF9USU1FT1VUAFBPU1QATUlTRElSRUNURURfUkVRVUVTVABDTElFTlRfQ0xPU0VEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9MT0FEX0JBTEFOQ0VEX1JFUVVFU1QAQkFEX1JFUVVFU1QASFRUUF9SRVFVRVNUX1NFTlRfVE9fSFRUUFNfUE9SVABSRVBPUlQASU1fQV9URUFQT1QAUkVTRVRfQ09OVEVOVABOT19DT05URU5UAFBBUlRJQUxfQ09OVEVOVABIUEVfSU5WQUxJRF9DT05TVEFOVABIUEVfQ0JfUkVTRVQAR0VUAEhQRV9TVFJJQ1QAQ09ORkxJQ1QAVEVNUE9SQVJZX1JFRElSRUNUAFBFUk1BTkVOVF9SRURJUkVDVABDT05ORUNUAE1VTFRJX1NUQVRVUwBIUEVfSU5WQUxJRF9TVEFUVVMAVE9PX01BTllfUkVRVUVTVFMARUFSTFlfSElOVFMAVU5BVkFJTEFCTEVfRk9SX0xFR0FMX1JFQVNPTlMAT1BUSU9OUwBTV0lUQ0hJTkdfUFJPVE9DT0xTAFZBUklBTlRfQUxTT19ORUdPVElBVEVTAE1VTFRJUExFX0NIT0lDRVMASU5URVJOQUxfU0VSVkVSX0VSUk9SAFdFQl9TRVJWRVJfVU5LTk9XTl9FUlJPUgBSQUlMR1VOX0VSUk9SAElERU5USVRZX1BST1ZJREVSX0FVVEhFTlRJQ0FUSU9OX0VSUk9SAFNTTF9DRVJUSUZJQ0FURV9FUlJPUgBJTlZBTElEX1hfRk9SV0FSREVEX0ZPUgBTRVRfUEFSQU1FVEVSAEdFVF9QQVJBTUVURVIASFBFX1VTRVIAU0VFX09USEVSAEhQRV9DQl9DSFVOS19IRUFERVIATUtDQUxFTkRBUgBTRVRVUABXRUJfU0VSVkVSX0lTX0RPV04AVEVBUkRPV04ASFBFX0NMT1NFRF9DT05ORUNUSU9OAEhFVVJJU1RJQ19FWFBJUkFUSU9OAERJU0NPTk5FQ1RFRF9PUEVSQVRJT04ATk9OX0FVVEhPUklUQVRJVkVfSU5GT1JNQVRJT04ASFBFX0lOVkFMSURfVkVSU0lPTgBIUEVfQ0JfTUVTU0FHRV9CRUdJTgBTSVRFX0lTX0ZST1pFTgBIUEVfSU5WQUxJRF9IRUFERVJfVE9LRU4ASU5WQUxJRF9UT0tFTgBGT1JCSURERU4ARU5IQU5DRV9ZT1VSX0NBTE0ASFBFX0lOVkFMSURfVVJMAEJMT0NLRURfQllfUEFSRU5UQUxfQ09OVFJPTABNS0NPTABBQ0wASFBFX0lOVEVSTkFMAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0VfVU5PRkZJQ0lBTABIUEVfT0sAVU5MSU5LAFVOTE9DSwBQUkkAUkVUUllfV0lUSABIUEVfSU5WQUxJRF9DT05URU5UX0xFTkdUSABIUEVfVU5FWFBFQ1RFRF9DT05URU5UX0xFTkdUSABGTFVTSABQUk9QUEFUQ0gATS1TRUFSQ0gAVVJJX1RPT19MT05HAFBST0NFU1NJTkcATUlTQ0VMTEFORU9VU19QRVJTSVNURU5UX1dBUk5JTkcATUlTQ0VMTEFORU9VU19XQVJOSU5HAEhQRV9JTlZBTElEX1RSQU5TRkVSX0VOQ09ESU5HAEV4cGVjdGVkIENSTEYASFBFX0lOVkFMSURfQ0hVTktfU0laRQBNT1ZFAENPTlRJTlVFAEhQRV9DQl9TVEFUVVNfQ09NUExFVEUASFBFX0NCX0hFQURFUlNfQ09NUExFVEUASFBFX0NCX1ZFUlNJT05fQ09NUExFVEUASFBFX0NCX1VSTF9DT01QTEVURQBIUEVfQ0JfQ0hVTktfQ09NUExFVEUASFBFX0NCX0hFQURFUl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX1ZBTFVFX0NPTVBMRVRFAEhQRV9DQl9DSFVOS19FWFRFTlNJT05fTkFNRV9DT01QTEVURQBIUEVfQ0JfTUVTU0FHRV9DT01QTEVURQBIUEVfQ0JfTUVUSE9EX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfRklFTERfQ09NUExFVEUAREVMRVRFAEhQRV9JTlZBTElEX0VPRl9TVEFURQBJTlZBTElEX1NTTF9DRVJUSUZJQ0FURQBQQVVTRQBOT19SRVNQT05TRQBVTlNVUFBPUlRFRF9NRURJQV9UWVBFAEdPTkUATk9UX0FDQ0VQVEFCTEUAU0VSVklDRV9VTkFWQUlMQUJMRQBSQU5HRV9OT1RfU0FUSVNGSUFCTEUAT1JJR0lOX0lTX1VOUkVBQ0hBQkxFAFJFU1BPTlNFX0lTX1NUQUxFAFBVUkdFAE1FUkdFAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0UAUkVRVUVTVF9IRUFERVJfVE9PX0xBUkdFAFBBWUxPQURfVE9PX0xBUkdFAElOU1VGRklDSUVOVF9TVE9SQUdFAEhQRV9QQVVTRURfVVBHUkFERQBIUEVfUEFVU0VEX0gyX1VQR1JBREUAU09VUkNFAEFOTk9VTkNFAFRSQUNFAEhQRV9VTkVYUEVDVEVEX1NQQUNFAERFU0NSSUJFAFVOU1VCU0NSSUJFAFJFQ09SRABIUEVfSU5WQUxJRF9NRVRIT0QATk9UX0ZPVU5EAFBST1BGSU5EAFVOQklORABSRUJJTkQAVU5BVVRIT1JJWkVEAE1FVEhPRF9OT1RfQUxMT1dFRABIVFRQX1ZFUlNJT05fTk9UX1NVUFBPUlRFRABBTFJFQURZX1JFUE9SVEVEAEFDQ0VQVEVEAE5PVF9JTVBMRU1FTlRFRABMT09QX0RFVEVDVEVEAEhQRV9DUl9FWFBFQ1RFRABIUEVfTEZfRVhQRUNURUQAQ1JFQVRFRABJTV9VU0VEAEhQRV9QQVVTRUQAVElNRU9VVF9PQ0NVUkVEAFBBWU1FTlRfUkVRVUlSRUQAUFJFQ09ORElUSU9OX1JFUVVJUkVEAFBST1hZX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAE5FVFdPUktfQVVUSEVOVElDQVRJT05fUkVRVUlSRUQATEVOR1RIX1JFUVVJUkVEAFNTTF9DRVJUSUZJQ0FURV9SRVFVSVJFRABVUEdSQURFX1JFUVVJUkVEAFBBR0VfRVhQSVJFRABQUkVDT05ESVRJT05fRkFJTEVEAEVYUEVDVEFUSU9OX0ZBSUxFRABSRVZBTElEQVRJT05fRkFJTEVEAFNTTF9IQU5EU0hBS0VfRkFJTEVEAExPQ0tFRABUUkFOU0ZPUk1BVElPTl9BUFBMSUVEAE5PVF9NT0RJRklFRABOT1RfRVhURU5ERUQAQkFORFdJRFRIX0xJTUlUX0VYQ0VFREVEAFNJVEVfSVNfT1ZFUkxPQURFRABIRUFEAEV4cGVjdGVkIEhUVFAvAABeEwAAJhMAADAQAADwFwAAnRMAABUSAAA5FwAA8BIAAAoQAAB1EgAArRIAAIITAABPFAAAfxAAAKAVAAAjFAAAiRIAAIsUAABNFQAA1BEAAM8UAAAQGAAAyRYAANwWAADBEQAA4BcAALsUAAB0FAAAfBUAAOUUAAAIFwAAHxAAAGUVAACjFAAAKBUAAAIVAACZFQAALBAAAIsZAABPDwAA1A4AAGoQAADOEAAAAhcAAIkOAABuEwAAHBMAAGYUAABWFwAAwRMAAM0TAABsEwAAaBcAAGYXAABfFwAAIhMAAM4PAABpDgAA2A4AAGMWAADLEwAAqg4AACgXAAAmFwAAxRMAAF0WAADoEQAAZxMAAGUTAADyFgAAcxMAAB0XAAD5FgAA8xEAAM8OAADOFQAADBIAALMRAAClEQAAYRAAADIXAAC7EwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAgMCAgICAgAAAgIAAgIAAgICAgICAgICAgAEAAAAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAgICAAIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIAAgICAgIAAAICAAICAAICAgICAgICAgIAAwAEAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsb3NlZWVwLWFsaXZlAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQFjaHVua2VkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQABAQEBAQAAAQEAAQEAAQEBAQEBAQEBAQAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVjdGlvbmVudC1sZW5ndGhvbnJveHktY29ubmVjdGlvbgAAAAAAAAAAAAAAAAAAAHJhbnNmZXItZW5jb2RpbmdwZ3JhZGUNCg0KDQpTTQ0KDQpUVFAvQ0UvVFNQLwAAAAAAAAAAAAAAAAECAAEDAAAAAAAAAAAAAAAAAAAAAAAABAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQUBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAABAAACAAAAAAAAAAAAAAAAAAAAAAAAAwQAAAQEBAQEBAQEBAQEBQQEBAQEBAQEBAQEBAAEAAYHBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQABAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAgAAAAACAAAAAAAAAAAAAAAAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5PVU5DRUVDS09VVE5FQ1RFVEVDUklCRUxVU0hFVEVBRFNFQVJDSFJHRUNUSVZJVFlMRU5EQVJWRU9USUZZUFRJT05TQ0hTRUFZU1RBVENIR0VPUkRJUkVDVE9SVFJDSFBBUkFNRVRFUlVSQ0VCU0NSSUJFQVJET1dOQUNFSU5ETktDS1VCU0NSSUJFSFRUUC9BRFRQLw=='\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.enumToMap = void 0;\nfunction enumToMap(obj) {\n const res = {};\n Object.keys(obj).forEach((key) => {\n const value = obj[key];\n if (typeof value === 'number') {\n res[key] = value;\n }\n });\n return res;\n}\nexports.enumToMap = enumToMap;\n//# sourceMappingURL=utils.js.map","'use strict'\n\nconst { kClients } = require('../core/symbols')\nconst Agent = require('../agent')\nconst {\n kAgent,\n kMockAgentSet,\n kMockAgentGet,\n kDispatches,\n kIsMockActive,\n kNetConnect,\n kGetNetConnect,\n kOptions,\n kFactory\n} = require('./mock-symbols')\nconst MockClient = require('./mock-client')\nconst MockPool = require('./mock-pool')\nconst { matchValue, buildMockOptions } = require('./mock-utils')\nconst { InvalidArgumentError, UndiciError } = require('../core/errors')\nconst Dispatcher = require('../dispatcher')\nconst Pluralizer = require('./pluralizer')\nconst PendingInterceptorsFormatter = require('./pending-interceptors-formatter')\n\nclass FakeWeakRef {\n constructor (value) {\n this.value = value\n }\n\n deref () {\n return this.value\n }\n}\n\nclass MockAgent extends Dispatcher {\n constructor (opts) {\n super(opts)\n\n this[kNetConnect] = true\n this[kIsMockActive] = true\n\n // Instantiate Agent and encapsulate\n if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) {\n throw new InvalidArgumentError('Argument opts.agent must implement Agent')\n }\n const agent = opts && opts.agent ? opts.agent : new Agent(opts)\n this[kAgent] = agent\n\n this[kClients] = agent[kClients]\n this[kOptions] = buildMockOptions(opts)\n }\n\n get (origin) {\n let dispatcher = this[kMockAgentGet](origin)\n\n if (!dispatcher) {\n dispatcher = this[kFactory](origin)\n this[kMockAgentSet](origin, dispatcher)\n }\n return dispatcher\n }\n\n dispatch (opts, handler) {\n // Call MockAgent.get to perform additional setup before dispatching as normal\n this.get(opts.origin)\n return this[kAgent].dispatch(opts, handler)\n }\n\n async close () {\n await this[kAgent].close()\n this[kClients].clear()\n }\n\n deactivate () {\n this[kIsMockActive] = false\n }\n\n activate () {\n this[kIsMockActive] = true\n }\n\n enableNetConnect (matcher) {\n if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) {\n if (Array.isArray(this[kNetConnect])) {\n this[kNetConnect].push(matcher)\n } else {\n this[kNetConnect] = [matcher]\n }\n } else if (typeof matcher === 'undefined') {\n this[kNetConnect] = true\n } else {\n throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.')\n }\n }\n\n disableNetConnect () {\n this[kNetConnect] = false\n }\n\n // This is required to bypass issues caused by using global symbols - see:\n // https://github.com/nodejs/undici/issues/1447\n get isMockActive () {\n return this[kIsMockActive]\n }\n\n [kMockAgentSet] (origin, dispatcher) {\n this[kClients].set(origin, new FakeWeakRef(dispatcher))\n }\n\n [kFactory] (origin) {\n const mockOptions = Object.assign({ agent: this }, this[kOptions])\n return this[kOptions] && this[kOptions].connections === 1\n ? new MockClient(origin, mockOptions)\n : new MockPool(origin, mockOptions)\n }\n\n [kMockAgentGet] (origin) {\n // First check if we can immediately find it\n const ref = this[kClients].get(origin)\n if (ref) {\n return ref.deref()\n }\n\n // If the origin is not a string create a dummy parent pool and return to user\n if (typeof origin !== 'string') {\n const dispatcher = this[kFactory]('http://localhost:9999')\n this[kMockAgentSet](origin, dispatcher)\n return dispatcher\n }\n\n // If we match, create a pool and assign the same dispatches\n for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) {\n const nonExplicitDispatcher = nonExplicitRef.deref()\n if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) {\n const dispatcher = this[kFactory](origin)\n this[kMockAgentSet](origin, dispatcher)\n dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches]\n return dispatcher\n }\n }\n }\n\n [kGetNetConnect] () {\n return this[kNetConnect]\n }\n\n pendingInterceptors () {\n const mockAgentClients = this[kClients]\n\n return Array.from(mockAgentClients.entries())\n .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin })))\n .filter(({ pending }) => pending)\n }\n\n assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) {\n const pending = this.pendingInterceptors()\n\n if (pending.length === 0) {\n return\n }\n\n const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length)\n\n throw new UndiciError(`\n${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending:\n\n${pendingInterceptorsFormatter.format(pending)}\n`.trim())\n }\n}\n\nmodule.exports = MockAgent\n","'use strict'\n\nconst { promisify } = require('util')\nconst Client = require('../client')\nconst { buildMockDispatch } = require('./mock-utils')\nconst {\n kDispatches,\n kMockAgent,\n kClose,\n kOriginalClose,\n kOrigin,\n kOriginalDispatch,\n kConnected\n} = require('./mock-symbols')\nconst { MockInterceptor } = require('./mock-interceptor')\nconst Symbols = require('../core/symbols')\nconst { InvalidArgumentError } = require('../core/errors')\n\n/**\n * MockClient provides an API that extends the Client to influence the mockDispatches.\n */\nclass MockClient extends Client {\n constructor (origin, opts) {\n super(origin, opts)\n\n if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {\n throw new InvalidArgumentError('Argument opts.agent must implement Agent')\n }\n\n this[kMockAgent] = opts.agent\n this[kOrigin] = origin\n this[kDispatches] = []\n this[kConnected] = 1\n this[kOriginalDispatch] = this.dispatch\n this[kOriginalClose] = this.close.bind(this)\n\n this.dispatch = buildMockDispatch.call(this)\n this.close = this[kClose]\n }\n\n get [Symbols.kConnected] () {\n return this[kConnected]\n }\n\n /**\n * Sets up the base interceptor for mocking replies from undici.\n */\n intercept (opts) {\n return new MockInterceptor(opts, this[kDispatches])\n }\n\n async [kClose] () {\n await promisify(this[kOriginalClose])()\n this[kConnected] = 0\n this[kMockAgent][Symbols.kClients].delete(this[kOrigin])\n }\n}\n\nmodule.exports = MockClient\n","'use strict'\n\nconst { UndiciError } = require('../core/errors')\n\nclass MockNotMatchedError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, MockNotMatchedError)\n this.name = 'MockNotMatchedError'\n this.message = message || 'The request does not match any registered mock dispatches'\n this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'\n }\n}\n\nmodule.exports = {\n MockNotMatchedError\n}\n","'use strict'\n\nconst { getResponseData, buildKey, addMockDispatch } = require('./mock-utils')\nconst {\n kDispatches,\n kDispatchKey,\n kDefaultHeaders,\n kDefaultTrailers,\n kContentLength,\n kMockDispatch\n} = require('./mock-symbols')\nconst { InvalidArgumentError } = require('../core/errors')\nconst { buildURL } = require('../core/util')\n\n/**\n * Defines the scope API for an interceptor reply\n */\nclass MockScope {\n constructor (mockDispatch) {\n this[kMockDispatch] = mockDispatch\n }\n\n /**\n * Delay a reply by a set amount in ms.\n */\n delay (waitInMs) {\n if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) {\n throw new InvalidArgumentError('waitInMs must be a valid integer > 0')\n }\n\n this[kMockDispatch].delay = waitInMs\n return this\n }\n\n /**\n * For a defined reply, never mark as consumed.\n */\n persist () {\n this[kMockDispatch].persist = true\n return this\n }\n\n /**\n * Allow one to define a reply for a set amount of matching requests.\n */\n times (repeatTimes) {\n if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) {\n throw new InvalidArgumentError('repeatTimes must be a valid integer > 0')\n }\n\n this[kMockDispatch].times = repeatTimes\n return this\n }\n}\n\n/**\n * Defines an interceptor for a Mock\n */\nclass MockInterceptor {\n constructor (opts, mockDispatches) {\n if (typeof opts !== 'object') {\n throw new InvalidArgumentError('opts must be an object')\n }\n if (typeof opts.path === 'undefined') {\n throw new InvalidArgumentError('opts.path must be defined')\n }\n if (typeof opts.method === 'undefined') {\n opts.method = 'GET'\n }\n // See https://github.com/nodejs/undici/issues/1245\n // As per RFC 3986, clients are not supposed to send URI\n // fragments to servers when they retrieve a document,\n if (typeof opts.path === 'string') {\n if (opts.query) {\n opts.path = buildURL(opts.path, opts.query)\n } else {\n // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811\n const parsedURL = new URL(opts.path, 'data://')\n opts.path = parsedURL.pathname + parsedURL.search\n }\n }\n if (typeof opts.method === 'string') {\n opts.method = opts.method.toUpperCase()\n }\n\n this[kDispatchKey] = buildKey(opts)\n this[kDispatches] = mockDispatches\n this[kDefaultHeaders] = {}\n this[kDefaultTrailers] = {}\n this[kContentLength] = false\n }\n\n createMockScopeDispatchData (statusCode, data, responseOptions = {}) {\n const responseData = getResponseData(data)\n const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {}\n const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }\n const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers }\n\n return { statusCode, data, headers, trailers }\n }\n\n validateReplyParameters (statusCode, data, responseOptions) {\n if (typeof statusCode === 'undefined') {\n throw new InvalidArgumentError('statusCode must be defined')\n }\n if (typeof data === 'undefined') {\n throw new InvalidArgumentError('data must be defined')\n }\n if (typeof responseOptions !== 'object') {\n throw new InvalidArgumentError('responseOptions must be an object')\n }\n }\n\n /**\n * Mock an undici request with a defined reply.\n */\n reply (replyData) {\n // Values of reply aren't available right now as they\n // can only be available when the reply callback is invoked.\n if (typeof replyData === 'function') {\n // We'll first wrap the provided callback in another function,\n // this function will properly resolve the data from the callback\n // when invoked.\n const wrappedDefaultsCallback = (opts) => {\n // Our reply options callback contains the parameter for statusCode, data and options.\n const resolvedData = replyData(opts)\n\n // Check if it is in the right format\n if (typeof resolvedData !== 'object') {\n throw new InvalidArgumentError('reply options callback must return an object')\n }\n\n const { statusCode, data = '', responseOptions = {} } = resolvedData\n this.validateReplyParameters(statusCode, data, responseOptions)\n // Since the values can be obtained immediately we return them\n // from this higher order function that will be resolved later.\n return {\n ...this.createMockScopeDispatchData(statusCode, data, responseOptions)\n }\n }\n\n // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data.\n const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback)\n return new MockScope(newMockDispatch)\n }\n\n // We can have either one or three parameters, if we get here,\n // we should have 1-3 parameters. So we spread the arguments of\n // this function to obtain the parameters, since replyData will always\n // just be the statusCode.\n const [statusCode, data = '', responseOptions = {}] = [...arguments]\n this.validateReplyParameters(statusCode, data, responseOptions)\n\n // Send in-already provided data like usual\n const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions)\n const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData)\n return new MockScope(newMockDispatch)\n }\n\n /**\n * Mock an undici request with a defined error.\n */\n replyWithError (error) {\n if (typeof error === 'undefined') {\n throw new InvalidArgumentError('error must be defined')\n }\n\n const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error })\n return new MockScope(newMockDispatch)\n }\n\n /**\n * Set default reply headers on the interceptor for subsequent replies\n */\n defaultReplyHeaders (headers) {\n if (typeof headers === 'undefined') {\n throw new InvalidArgumentError('headers must be defined')\n }\n\n this[kDefaultHeaders] = headers\n return this\n }\n\n /**\n * Set default reply trailers on the interceptor for subsequent replies\n */\n defaultReplyTrailers (trailers) {\n if (typeof trailers === 'undefined') {\n throw new InvalidArgumentError('trailers must be defined')\n }\n\n this[kDefaultTrailers] = trailers\n return this\n }\n\n /**\n * Set reply content length header for replies on the interceptor\n */\n replyContentLength () {\n this[kContentLength] = true\n return this\n }\n}\n\nmodule.exports.MockInterceptor = MockInterceptor\nmodule.exports.MockScope = MockScope\n","'use strict'\n\nconst { promisify } = require('util')\nconst Pool = require('../pool')\nconst { buildMockDispatch } = require('./mock-utils')\nconst {\n kDispatches,\n kMockAgent,\n kClose,\n kOriginalClose,\n kOrigin,\n kOriginalDispatch,\n kConnected\n} = require('./mock-symbols')\nconst { MockInterceptor } = require('./mock-interceptor')\nconst Symbols = require('../core/symbols')\nconst { InvalidArgumentError } = require('../core/errors')\n\n/**\n * MockPool provides an API that extends the Pool to influence the mockDispatches.\n */\nclass MockPool extends Pool {\n constructor (origin, opts) {\n super(origin, opts)\n\n if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {\n throw new InvalidArgumentError('Argument opts.agent must implement Agent')\n }\n\n this[kMockAgent] = opts.agent\n this[kOrigin] = origin\n this[kDispatches] = []\n this[kConnected] = 1\n this[kOriginalDispatch] = this.dispatch\n this[kOriginalClose] = this.close.bind(this)\n\n this.dispatch = buildMockDispatch.call(this)\n this.close = this[kClose]\n }\n\n get [Symbols.kConnected] () {\n return this[kConnected]\n }\n\n /**\n * Sets up the base interceptor for mocking replies from undici.\n */\n intercept (opts) {\n return new MockInterceptor(opts, this[kDispatches])\n }\n\n async [kClose] () {\n await promisify(this[kOriginalClose])()\n this[kConnected] = 0\n this[kMockAgent][Symbols.kClients].delete(this[kOrigin])\n }\n}\n\nmodule.exports = MockPool\n","'use strict'\n\nmodule.exports = {\n kAgent: Symbol('agent'),\n kOptions: Symbol('options'),\n kFactory: Symbol('factory'),\n kDispatches: Symbol('dispatches'),\n kDispatchKey: Symbol('dispatch key'),\n kDefaultHeaders: Symbol('default headers'),\n kDefaultTrailers: Symbol('default trailers'),\n kContentLength: Symbol('content length'),\n kMockAgent: Symbol('mock agent'),\n kMockAgentSet: Symbol('mock agent set'),\n kMockAgentGet: Symbol('mock agent get'),\n kMockDispatch: Symbol('mock dispatch'),\n kClose: Symbol('close'),\n kOriginalClose: Symbol('original agent close'),\n kOrigin: Symbol('origin'),\n kIsMockActive: Symbol('is mock active'),\n kNetConnect: Symbol('net connect'),\n kGetNetConnect: Symbol('get net connect'),\n kConnected: Symbol('connected')\n}\n","'use strict'\n\nconst { MockNotMatchedError } = require('./mock-errors')\nconst {\n kDispatches,\n kMockAgent,\n kOriginalDispatch,\n kOrigin,\n kGetNetConnect\n} = require('./mock-symbols')\nconst { buildURL, nop } = require('../core/util')\nconst { STATUS_CODES } = require('http')\nconst {\n types: {\n isPromise\n }\n} = require('util')\n\nfunction matchValue (match, value) {\n if (typeof match === 'string') {\n return match === value\n }\n if (match instanceof RegExp) {\n return match.test(value)\n }\n if (typeof match === 'function') {\n return match(value) === true\n }\n return false\n}\n\nfunction lowerCaseEntries (headers) {\n return Object.fromEntries(\n Object.entries(headers).map(([headerName, headerValue]) => {\n return [headerName.toLocaleLowerCase(), headerValue]\n })\n )\n}\n\n/**\n * @param {import('../../index').Headers|string[]|Record} headers\n * @param {string} key\n */\nfunction getHeaderByName (headers, key) {\n if (Array.isArray(headers)) {\n for (let i = 0; i < headers.length; i += 2) {\n if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) {\n return headers[i + 1]\n }\n }\n\n return undefined\n } else if (typeof headers.get === 'function') {\n return headers.get(key)\n } else {\n return lowerCaseEntries(headers)[key.toLocaleLowerCase()]\n }\n}\n\n/** @param {string[]} headers */\nfunction buildHeadersFromArray (headers) { // fetch HeadersList\n const clone = headers.slice()\n const entries = []\n for (let index = 0; index < clone.length; index += 2) {\n entries.push([clone[index], clone[index + 1]])\n }\n return Object.fromEntries(entries)\n}\n\nfunction matchHeaders (mockDispatch, headers) {\n if (typeof mockDispatch.headers === 'function') {\n if (Array.isArray(headers)) { // fetch HeadersList\n headers = buildHeadersFromArray(headers)\n }\n return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {})\n }\n if (typeof mockDispatch.headers === 'undefined') {\n return true\n }\n if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') {\n return false\n }\n\n for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) {\n const headerValue = getHeaderByName(headers, matchHeaderName)\n\n if (!matchValue(matchHeaderValue, headerValue)) {\n return false\n }\n }\n return true\n}\n\nfunction safeUrl (path) {\n if (typeof path !== 'string') {\n return path\n }\n\n const pathSegments = path.split('?')\n\n if (pathSegments.length !== 2) {\n return path\n }\n\n const qp = new URLSearchParams(pathSegments.pop())\n qp.sort()\n return [...pathSegments, qp.toString()].join('?')\n}\n\nfunction matchKey (mockDispatch, { path, method, body, headers }) {\n const pathMatch = matchValue(mockDispatch.path, path)\n const methodMatch = matchValue(mockDispatch.method, method)\n const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true\n const headersMatch = matchHeaders(mockDispatch, headers)\n return pathMatch && methodMatch && bodyMatch && headersMatch\n}\n\nfunction getResponseData (data) {\n if (Buffer.isBuffer(data)) {\n return data\n } else if (typeof data === 'object') {\n return JSON.stringify(data)\n } else {\n return data.toString()\n }\n}\n\nfunction getMockDispatch (mockDispatches, key) {\n const basePath = key.query ? buildURL(key.path, key.query) : key.path\n const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath\n\n // Match path\n let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath))\n if (matchedMockDispatches.length === 0) {\n throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`)\n }\n\n // Match method\n matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method))\n if (matchedMockDispatches.length === 0) {\n throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`)\n }\n\n // Match body\n matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true)\n if (matchedMockDispatches.length === 0) {\n throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`)\n }\n\n // Match headers\n matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers))\n if (matchedMockDispatches.length === 0) {\n throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`)\n }\n\n return matchedMockDispatches[0]\n}\n\nfunction addMockDispatch (mockDispatches, key, data) {\n const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false }\n const replyData = typeof data === 'function' ? { callback: data } : { ...data }\n const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } }\n mockDispatches.push(newMockDispatch)\n return newMockDispatch\n}\n\nfunction deleteMockDispatch (mockDispatches, key) {\n const index = mockDispatches.findIndex(dispatch => {\n if (!dispatch.consumed) {\n return false\n }\n return matchKey(dispatch, key)\n })\n if (index !== -1) {\n mockDispatches.splice(index, 1)\n }\n}\n\nfunction buildKey (opts) {\n const { path, method, body, headers, query } = opts\n return {\n path,\n method,\n body,\n headers,\n query\n }\n}\n\nfunction generateKeyValues (data) {\n return Object.entries(data).reduce((keyValuePairs, [key, value]) => [\n ...keyValuePairs,\n Buffer.from(`${key}`),\n Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`)\n ], [])\n}\n\n/**\n * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status\n * @param {number} statusCode\n */\nfunction getStatusText (statusCode) {\n return STATUS_CODES[statusCode] || 'unknown'\n}\n\nasync function getResponse (body) {\n const buffers = []\n for await (const data of body) {\n buffers.push(data)\n }\n return Buffer.concat(buffers).toString('utf8')\n}\n\n/**\n * Mock dispatch function used to simulate undici dispatches\n */\nfunction mockDispatch (opts, handler) {\n // Get mock dispatch from built key\n const key = buildKey(opts)\n const mockDispatch = getMockDispatch(this[kDispatches], key)\n\n mockDispatch.timesInvoked++\n\n // Here's where we resolve a callback if a callback is present for the dispatch data.\n if (mockDispatch.data.callback) {\n mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) }\n }\n\n // Parse mockDispatch data\n const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch\n const { timesInvoked, times } = mockDispatch\n\n // If it's used up and not persistent, mark as consumed\n mockDispatch.consumed = !persist && timesInvoked >= times\n mockDispatch.pending = timesInvoked < times\n\n // If specified, trigger dispatch error\n if (error !== null) {\n deleteMockDispatch(this[kDispatches], key)\n handler.onError(error)\n return true\n }\n\n // Handle the request with a delay if necessary\n if (typeof delay === 'number' && delay > 0) {\n setTimeout(() => {\n handleReply(this[kDispatches])\n }, delay)\n } else {\n handleReply(this[kDispatches])\n }\n\n function handleReply (mockDispatches, _data = data) {\n // fetch's HeadersList is a 1D string array\n const optsHeaders = Array.isArray(opts.headers)\n ? buildHeadersFromArray(opts.headers)\n : opts.headers\n const body = typeof _data === 'function'\n ? _data({ ...opts, headers: optsHeaders })\n : _data\n\n // util.types.isPromise is likely needed for jest.\n if (isPromise(body)) {\n // If handleReply is asynchronous, throwing an error\n // in the callback will reject the promise, rather than\n // synchronously throw the error, which breaks some tests.\n // Rather, we wait for the callback to resolve if it is a\n // promise, and then re-run handleReply with the new body.\n body.then((newData) => handleReply(mockDispatches, newData))\n return\n }\n\n const responseData = getResponseData(body)\n const responseHeaders = generateKeyValues(headers)\n const responseTrailers = generateKeyValues(trailers)\n\n handler.abort = nop\n handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode))\n handler.onData(Buffer.from(responseData))\n handler.onComplete(responseTrailers)\n deleteMockDispatch(mockDispatches, key)\n }\n\n function resume () {}\n\n return true\n}\n\nfunction buildMockDispatch () {\n const agent = this[kMockAgent]\n const origin = this[kOrigin]\n const originalDispatch = this[kOriginalDispatch]\n\n return function dispatch (opts, handler) {\n if (agent.isMockActive) {\n try {\n mockDispatch.call(this, opts, handler)\n } catch (error) {\n if (error instanceof MockNotMatchedError) {\n const netConnect = agent[kGetNetConnect]()\n if (netConnect === false) {\n throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`)\n }\n if (checkNetConnect(netConnect, origin)) {\n originalDispatch.call(this, opts, handler)\n } else {\n throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`)\n }\n } else {\n throw error\n }\n }\n } else {\n originalDispatch.call(this, opts, handler)\n }\n }\n}\n\nfunction checkNetConnect (netConnect, origin) {\n const url = new URL(origin)\n if (netConnect === true) {\n return true\n } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) {\n return true\n }\n return false\n}\n\nfunction buildMockOptions (opts) {\n if (opts) {\n const { agent, ...mockOptions } = opts\n return mockOptions\n }\n}\n\nmodule.exports = {\n getResponseData,\n getMockDispatch,\n addMockDispatch,\n deleteMockDispatch,\n buildKey,\n generateKeyValues,\n matchValue,\n getResponse,\n getStatusText,\n mockDispatch,\n buildMockDispatch,\n checkNetConnect,\n buildMockOptions,\n getHeaderByName\n}\n","'use strict'\n\nconst { Transform } = require('stream')\nconst { Console } = require('console')\n\n/**\n * Gets the output of `console.table(…)` as a string.\n */\nmodule.exports = class PendingInterceptorsFormatter {\n constructor ({ disableColors } = {}) {\n this.transform = new Transform({\n transform (chunk, _enc, cb) {\n cb(null, chunk)\n }\n })\n\n this.logger = new Console({\n stdout: this.transform,\n inspectOptions: {\n colors: !disableColors && !process.env.CI\n }\n })\n }\n\n format (pendingInterceptors) {\n const withPrettyHeaders = pendingInterceptors.map(\n ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({\n Method: method,\n Origin: origin,\n Path: path,\n 'Status code': statusCode,\n Persistent: persist ? '✅' : '❌',\n Invocations: timesInvoked,\n Remaining: persist ? Infinity : times - timesInvoked\n }))\n\n this.logger.table(withPrettyHeaders)\n return this.transform.read().toString()\n }\n}\n","'use strict'\n\nconst singulars = {\n pronoun: 'it',\n is: 'is',\n was: 'was',\n this: 'this'\n}\n\nconst plurals = {\n pronoun: 'they',\n is: 'are',\n was: 'were',\n this: 'these'\n}\n\nmodule.exports = class Pluralizer {\n constructor (singular, plural) {\n this.singular = singular\n this.plural = plural\n }\n\n pluralize (count) {\n const one = count === 1\n const keys = one ? singulars : plurals\n const noun = one ? this.singular : this.plural\n return { ...keys, count, noun }\n }\n}\n","/* eslint-disable */\n\n'use strict'\n\n// Extracted from node/lib/internal/fixed_queue.js\n\n// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two.\nconst kSize = 2048;\nconst kMask = kSize - 1;\n\n// The FixedQueue is implemented as a singly-linked list of fixed-size\n// circular buffers. It looks something like this:\n//\n// head tail\n// | |\n// v v\n// +-----------+ <-----\\ +-----------+ <------\\ +-----------+\n// | [null] | \\----- | next | \\------- | next |\n// +-----------+ +-----------+ +-----------+\n// | item | <-- bottom | item | <-- bottom | [empty] |\n// | item | | item | | [empty] |\n// | item | | item | | [empty] |\n// | item | | item | | [empty] |\n// | item | | item | bottom --> | item |\n// | item | | item | | item |\n// | ... | | ... | | ... |\n// | item | | item | | item |\n// | item | | item | | item |\n// | [empty] | <-- top | item | | item |\n// | [empty] | | item | | item |\n// | [empty] | | [empty] | <-- top top --> | [empty] |\n// +-----------+ +-----------+ +-----------+\n//\n// Or, if there is only one circular buffer, it looks something\n// like either of these:\n//\n// head tail head tail\n// | | | |\n// v v v v\n// +-----------+ +-----------+\n// | [null] | | [null] |\n// +-----------+ +-----------+\n// | [empty] | | item |\n// | [empty] | | item |\n// | item | <-- bottom top --> | [empty] |\n// | item | | [empty] |\n// | [empty] | <-- top bottom --> | item |\n// | [empty] | | item |\n// +-----------+ +-----------+\n//\n// Adding a value means moving `top` forward by one, removing means\n// moving `bottom` forward by one. After reaching the end, the queue\n// wraps around.\n//\n// When `top === bottom` the current queue is empty and when\n// `top + 1 === bottom` it's full. This wastes a single space of storage\n// but allows much quicker checks.\n\nclass FixedCircularBuffer {\n constructor() {\n this.bottom = 0;\n this.top = 0;\n this.list = new Array(kSize);\n this.next = null;\n }\n\n isEmpty() {\n return this.top === this.bottom;\n }\n\n isFull() {\n return ((this.top + 1) & kMask) === this.bottom;\n }\n\n push(data) {\n this.list[this.top] = data;\n this.top = (this.top + 1) & kMask;\n }\n\n shift() {\n const nextItem = this.list[this.bottom];\n if (nextItem === undefined)\n return null;\n this.list[this.bottom] = undefined;\n this.bottom = (this.bottom + 1) & kMask;\n return nextItem;\n }\n}\n\nmodule.exports = class FixedQueue {\n constructor() {\n this.head = this.tail = new FixedCircularBuffer();\n }\n\n isEmpty() {\n return this.head.isEmpty();\n }\n\n push(data) {\n if (this.head.isFull()) {\n // Head is full: Creates a new queue, sets the old queue's `.next` to it,\n // and sets it as the new main queue.\n this.head = this.head.next = new FixedCircularBuffer();\n }\n this.head.push(data);\n }\n\n shift() {\n const tail = this.tail;\n const next = tail.shift();\n if (tail.isEmpty() && tail.next !== null) {\n // If there is another queue, it forms the new tail.\n this.tail = tail.next;\n }\n return next;\n }\n};\n","'use strict'\n\nconst DispatcherBase = require('./dispatcher-base')\nconst FixedQueue = require('./node/fixed-queue')\nconst { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('./core/symbols')\nconst PoolStats = require('./pool-stats')\n\nconst kClients = Symbol('clients')\nconst kNeedDrain = Symbol('needDrain')\nconst kQueue = Symbol('queue')\nconst kClosedResolve = Symbol('closed resolve')\nconst kOnDrain = Symbol('onDrain')\nconst kOnConnect = Symbol('onConnect')\nconst kOnDisconnect = Symbol('onDisconnect')\nconst kOnConnectionError = Symbol('onConnectionError')\nconst kGetDispatcher = Symbol('get dispatcher')\nconst kAddClient = Symbol('add client')\nconst kRemoveClient = Symbol('remove client')\nconst kStats = Symbol('stats')\n\nclass PoolBase extends DispatcherBase {\n constructor () {\n super()\n\n this[kQueue] = new FixedQueue()\n this[kClients] = []\n this[kQueued] = 0\n\n const pool = this\n\n this[kOnDrain] = function onDrain (origin, targets) {\n const queue = pool[kQueue]\n\n let needDrain = false\n\n while (!needDrain) {\n const item = queue.shift()\n if (!item) {\n break\n }\n pool[kQueued]--\n needDrain = !this.dispatch(item.opts, item.handler)\n }\n\n this[kNeedDrain] = needDrain\n\n if (!this[kNeedDrain] && pool[kNeedDrain]) {\n pool[kNeedDrain] = false\n pool.emit('drain', origin, [pool, ...targets])\n }\n\n if (pool[kClosedResolve] && queue.isEmpty()) {\n Promise\n .all(pool[kClients].map(c => c.close()))\n .then(pool[kClosedResolve])\n }\n }\n\n this[kOnConnect] = (origin, targets) => {\n pool.emit('connect', origin, [pool, ...targets])\n }\n\n this[kOnDisconnect] = (origin, targets, err) => {\n pool.emit('disconnect', origin, [pool, ...targets], err)\n }\n\n this[kOnConnectionError] = (origin, targets, err) => {\n pool.emit('connectionError', origin, [pool, ...targets], err)\n }\n\n this[kStats] = new PoolStats(this)\n }\n\n get [kBusy] () {\n return this[kNeedDrain]\n }\n\n get [kConnected] () {\n return this[kClients].filter(client => client[kConnected]).length\n }\n\n get [kFree] () {\n return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length\n }\n\n get [kPending] () {\n let ret = this[kQueued]\n for (const { [kPending]: pending } of this[kClients]) {\n ret += pending\n }\n return ret\n }\n\n get [kRunning] () {\n let ret = 0\n for (const { [kRunning]: running } of this[kClients]) {\n ret += running\n }\n return ret\n }\n\n get [kSize] () {\n let ret = this[kQueued]\n for (const { [kSize]: size } of this[kClients]) {\n ret += size\n }\n return ret\n }\n\n get stats () {\n return this[kStats]\n }\n\n async [kClose] () {\n if (this[kQueue].isEmpty()) {\n return Promise.all(this[kClients].map(c => c.close()))\n } else {\n return new Promise((resolve) => {\n this[kClosedResolve] = resolve\n })\n }\n }\n\n async [kDestroy] (err) {\n while (true) {\n const item = this[kQueue].shift()\n if (!item) {\n break\n }\n item.handler.onError(err)\n }\n\n return Promise.all(this[kClients].map(c => c.destroy(err)))\n }\n\n [kDispatch] (opts, handler) {\n const dispatcher = this[kGetDispatcher]()\n\n if (!dispatcher) {\n this[kNeedDrain] = true\n this[kQueue].push({ opts, handler })\n this[kQueued]++\n } else if (!dispatcher.dispatch(opts, handler)) {\n dispatcher[kNeedDrain] = true\n this[kNeedDrain] = !this[kGetDispatcher]()\n }\n\n return !this[kNeedDrain]\n }\n\n [kAddClient] (client) {\n client\n .on('drain', this[kOnDrain])\n .on('connect', this[kOnConnect])\n .on('disconnect', this[kOnDisconnect])\n .on('connectionError', this[kOnConnectionError])\n\n this[kClients].push(client)\n\n if (this[kNeedDrain]) {\n process.nextTick(() => {\n if (this[kNeedDrain]) {\n this[kOnDrain](client[kUrl], [this, client])\n }\n })\n }\n\n return this\n }\n\n [kRemoveClient] (client) {\n client.close(() => {\n const idx = this[kClients].indexOf(client)\n if (idx !== -1) {\n this[kClients].splice(idx, 1)\n }\n })\n\n this[kNeedDrain] = this[kClients].some(dispatcher => (\n !dispatcher[kNeedDrain] &&\n dispatcher.closed !== true &&\n dispatcher.destroyed !== true\n ))\n }\n}\n\nmodule.exports = {\n PoolBase,\n kClients,\n kNeedDrain,\n kAddClient,\n kRemoveClient,\n kGetDispatcher\n}\n","const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('./core/symbols')\nconst kPool = Symbol('pool')\n\nclass PoolStats {\n constructor (pool) {\n this[kPool] = pool\n }\n\n get connected () {\n return this[kPool][kConnected]\n }\n\n get free () {\n return this[kPool][kFree]\n }\n\n get pending () {\n return this[kPool][kPending]\n }\n\n get queued () {\n return this[kPool][kQueued]\n }\n\n get running () {\n return this[kPool][kRunning]\n }\n\n get size () {\n return this[kPool][kSize]\n }\n}\n\nmodule.exports = PoolStats\n","'use strict'\n\nconst {\n PoolBase,\n kClients,\n kNeedDrain,\n kAddClient,\n kGetDispatcher\n} = require('./pool-base')\nconst Client = require('./client')\nconst {\n InvalidArgumentError\n} = require('./core/errors')\nconst util = require('./core/util')\nconst { kUrl, kInterceptors } = require('./core/symbols')\nconst buildConnector = require('./core/connect')\n\nconst kOptions = Symbol('options')\nconst kConnections = Symbol('connections')\nconst kFactory = Symbol('factory')\n\nfunction defaultFactory (origin, opts) {\n return new Client(origin, opts)\n}\n\nclass Pool extends PoolBase {\n constructor (origin, {\n connections,\n factory = defaultFactory,\n connect,\n connectTimeout,\n tls,\n maxCachedSessions,\n socketPath,\n autoSelectFamily,\n autoSelectFamilyAttemptTimeout,\n allowH2,\n ...options\n } = {}) {\n super()\n\n if (connections != null && (!Number.isFinite(connections) || connections < 0)) {\n throw new InvalidArgumentError('invalid connections')\n }\n\n if (typeof factory !== 'function') {\n throw new InvalidArgumentError('factory must be a function.')\n }\n\n if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {\n throw new InvalidArgumentError('connect must be a function or an object')\n }\n\n if (typeof connect !== 'function') {\n connect = buildConnector({\n ...tls,\n maxCachedSessions,\n allowH2,\n socketPath,\n timeout: connectTimeout,\n ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),\n ...connect\n })\n }\n\n this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool)\n ? options.interceptors.Pool\n : []\n this[kConnections] = connections || null\n this[kUrl] = util.parseOrigin(origin)\n this[kOptions] = { ...util.deepClone(options), connect, allowH2 }\n this[kOptions].interceptors = options.interceptors\n ? { ...options.interceptors }\n : undefined\n this[kFactory] = factory\n }\n\n [kGetDispatcher] () {\n let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain])\n\n if (dispatcher) {\n return dispatcher\n }\n\n if (!this[kConnections] || this[kClients].length < this[kConnections]) {\n dispatcher = this[kFactory](this[kUrl], this[kOptions])\n this[kAddClient](dispatcher)\n }\n\n return dispatcher\n }\n}\n\nmodule.exports = Pool\n","'use strict'\n\nconst { kProxy, kClose, kDestroy, kInterceptors } = require('./core/symbols')\nconst { URL } = require('url')\nconst Agent = require('./agent')\nconst Pool = require('./pool')\nconst DispatcherBase = require('./dispatcher-base')\nconst { InvalidArgumentError, RequestAbortedError } = require('./core/errors')\nconst buildConnector = require('./core/connect')\n\nconst kAgent = Symbol('proxy agent')\nconst kClient = Symbol('proxy client')\nconst kProxyHeaders = Symbol('proxy headers')\nconst kRequestTls = Symbol('request tls settings')\nconst kProxyTls = Symbol('proxy tls settings')\nconst kConnectEndpoint = Symbol('connect endpoint function')\n\nfunction defaultProtocolPort (protocol) {\n return protocol === 'https:' ? 443 : 80\n}\n\nfunction buildProxyOptions (opts) {\n if (typeof opts === 'string') {\n opts = { uri: opts }\n }\n\n if (!opts || !opts.uri) {\n throw new InvalidArgumentError('Proxy opts.uri is mandatory')\n }\n\n return {\n uri: opts.uri,\n protocol: opts.protocol || 'https'\n }\n}\n\nfunction defaultFactory (origin, opts) {\n return new Pool(origin, opts)\n}\n\nclass ProxyAgent extends DispatcherBase {\n constructor (opts) {\n super(opts)\n this[kProxy] = buildProxyOptions(opts)\n this[kAgent] = new Agent(opts)\n this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent)\n ? opts.interceptors.ProxyAgent\n : []\n\n if (typeof opts === 'string') {\n opts = { uri: opts }\n }\n\n if (!opts || !opts.uri) {\n throw new InvalidArgumentError('Proxy opts.uri is mandatory')\n }\n\n const { clientFactory = defaultFactory } = opts\n\n if (typeof clientFactory !== 'function') {\n throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')\n }\n\n this[kRequestTls] = opts.requestTls\n this[kProxyTls] = opts.proxyTls\n this[kProxyHeaders] = opts.headers || {}\n\n const resolvedUrl = new URL(opts.uri)\n const { origin, port, host, username, password } = resolvedUrl\n\n if (opts.auth && opts.token) {\n throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')\n } else if (opts.auth) {\n /* @deprecated in favour of opts.token */\n this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}`\n } else if (opts.token) {\n this[kProxyHeaders]['proxy-authorization'] = opts.token\n } else if (username && password) {\n this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}`\n }\n\n const connect = buildConnector({ ...opts.proxyTls })\n this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })\n this[kClient] = clientFactory(resolvedUrl, { connect })\n this[kAgent] = new Agent({\n ...opts,\n connect: async (opts, callback) => {\n let requestedHost = opts.host\n if (!opts.port) {\n requestedHost += `:${defaultProtocolPort(opts.protocol)}`\n }\n try {\n const { socket, statusCode } = await this[kClient].connect({\n origin,\n port,\n path: requestedHost,\n signal: opts.signal,\n headers: {\n ...this[kProxyHeaders],\n host\n }\n })\n if (statusCode !== 200) {\n socket.on('error', () => {}).destroy()\n callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))\n }\n if (opts.protocol !== 'https:') {\n callback(null, socket)\n return\n }\n let servername\n if (this[kRequestTls]) {\n servername = this[kRequestTls].servername\n } else {\n servername = opts.servername\n }\n this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback)\n } catch (err) {\n callback(err)\n }\n }\n })\n }\n\n dispatch (opts, handler) {\n const { host } = new URL(opts.origin)\n const headers = buildHeaders(opts.headers)\n throwIfProxyAuthIsSent(headers)\n return this[kAgent].dispatch(\n {\n ...opts,\n headers: {\n ...headers,\n host\n }\n },\n handler\n )\n }\n\n async [kClose] () {\n await this[kAgent].close()\n await this[kClient].close()\n }\n\n async [kDestroy] () {\n await this[kAgent].destroy()\n await this[kClient].destroy()\n }\n}\n\n/**\n * @param {string[] | Record} headers\n * @returns {Record}\n */\nfunction buildHeaders (headers) {\n // When using undici.fetch, the headers list is stored\n // as an array.\n if (Array.isArray(headers)) {\n /** @type {Record} */\n const headersPair = {}\n\n for (let i = 0; i < headers.length; i += 2) {\n headersPair[headers[i]] = headers[i + 1]\n }\n\n return headersPair\n }\n\n return headers\n}\n\n/**\n * @param {Record} headers\n *\n * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers\n * Nevertheless, it was changed and to avoid a security vulnerability by end users\n * this check was created.\n * It should be removed in the next major version for performance reasons\n */\nfunction throwIfProxyAuthIsSent (headers) {\n const existProxyAuth = headers && Object.keys(headers)\n .find((key) => key.toLowerCase() === 'proxy-authorization')\n if (existProxyAuth) {\n throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor')\n }\n}\n\nmodule.exports = ProxyAgent\n","'use strict'\n\nlet fastNow = Date.now()\nlet fastNowTimeout\n\nconst fastTimers = []\n\nfunction onTimeout () {\n fastNow = Date.now()\n\n let len = fastTimers.length\n let idx = 0\n while (idx < len) {\n const timer = fastTimers[idx]\n\n if (timer.state === 0) {\n timer.state = fastNow + timer.delay\n } else if (timer.state > 0 && fastNow >= timer.state) {\n timer.state = -1\n timer.callback(timer.opaque)\n }\n\n if (timer.state === -1) {\n timer.state = -2\n if (idx !== len - 1) {\n fastTimers[idx] = fastTimers.pop()\n } else {\n fastTimers.pop()\n }\n len -= 1\n } else {\n idx += 1\n }\n }\n\n if (fastTimers.length > 0) {\n refreshTimeout()\n }\n}\n\nfunction refreshTimeout () {\n if (fastNowTimeout && fastNowTimeout.refresh) {\n fastNowTimeout.refresh()\n } else {\n clearTimeout(fastNowTimeout)\n fastNowTimeout = setTimeout(onTimeout, 1e3)\n if (fastNowTimeout.unref) {\n fastNowTimeout.unref()\n }\n }\n}\n\nclass Timeout {\n constructor (callback, delay, opaque) {\n this.callback = callback\n this.delay = delay\n this.opaque = opaque\n\n // -2 not in timer list\n // -1 in timer list but inactive\n // 0 in timer list waiting for time\n // > 0 in timer list waiting for time to expire\n this.state = -2\n\n this.refresh()\n }\n\n refresh () {\n if (this.state === -2) {\n fastTimers.push(this)\n if (!fastNowTimeout || fastTimers.length === 1) {\n refreshTimeout()\n }\n }\n\n this.state = 0\n }\n\n clear () {\n this.state = -1\n }\n}\n\nmodule.exports = {\n setTimeout (callback, delay, opaque) {\n return delay < 1e3\n ? setTimeout(callback, delay, opaque)\n : new Timeout(callback, delay, opaque)\n },\n clearTimeout (timeout) {\n if (timeout instanceof Timeout) {\n timeout.clear()\n } else {\n clearTimeout(timeout)\n }\n }\n}\n","'use strict'\n\nconst diagnosticsChannel = require('diagnostics_channel')\nconst { uid, states } = require('./constants')\nconst {\n kReadyState,\n kSentClose,\n kByteParser,\n kReceivedClose\n} = require('./symbols')\nconst { fireEvent, failWebsocketConnection } = require('./util')\nconst { CloseEvent } = require('./events')\nconst { makeRequest } = require('../fetch/request')\nconst { fetching } = require('../fetch/index')\nconst { Headers } = require('../fetch/headers')\nconst { getGlobalDispatcher } = require('../global')\nconst { kHeadersList } = require('../core/symbols')\n\nconst channels = {}\nchannels.open = diagnosticsChannel.channel('undici:websocket:open')\nchannels.close = diagnosticsChannel.channel('undici:websocket:close')\nchannels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error')\n\n/** @type {import('crypto')} */\nlet crypto\ntry {\n crypto = require('crypto')\n} catch {\n\n}\n\n/**\n * @see https://websockets.spec.whatwg.org/#concept-websocket-establish\n * @param {URL} url\n * @param {string|string[]} protocols\n * @param {import('./websocket').WebSocket} ws\n * @param {(response: any) => void} onEstablish\n * @param {Partial} options\n */\nfunction establishWebSocketConnection (url, protocols, ws, onEstablish, options) {\n // 1. Let requestURL be a copy of url, with its scheme set to \"http\", if url’s\n // scheme is \"ws\", and to \"https\" otherwise.\n const requestURL = url\n\n requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:'\n\n // 2. Let request be a new request, whose URL is requestURL, client is client,\n // service-workers mode is \"none\", referrer is \"no-referrer\", mode is\n // \"websocket\", credentials mode is \"include\", cache mode is \"no-store\" ,\n // and redirect mode is \"error\".\n const request = makeRequest({\n urlList: [requestURL],\n serviceWorkers: 'none',\n referrer: 'no-referrer',\n mode: 'websocket',\n credentials: 'include',\n cache: 'no-store',\n redirect: 'error'\n })\n\n // Note: undici extension, allow setting custom headers.\n if (options.headers) {\n const headersList = new Headers(options.headers)[kHeadersList]\n\n request.headersList = headersList\n }\n\n // 3. Append (`Upgrade`, `websocket`) to request’s header list.\n // 4. Append (`Connection`, `Upgrade`) to request’s header list.\n // Note: both of these are handled by undici currently.\n // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397\n\n // 5. Let keyValue be a nonce consisting of a randomly selected\n // 16-byte value that has been forgiving-base64-encoded and\n // isomorphic encoded.\n const keyValue = crypto.randomBytes(16).toString('base64')\n\n // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s\n // header list.\n request.headersList.append('sec-websocket-key', keyValue)\n\n // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s\n // header list.\n request.headersList.append('sec-websocket-version', '13')\n\n // 8. For each protocol in protocols, combine\n // (`Sec-WebSocket-Protocol`, protocol) in request’s header\n // list.\n for (const protocol of protocols) {\n request.headersList.append('sec-websocket-protocol', protocol)\n }\n\n // 9. Let permessageDeflate be a user-agent defined\n // \"permessage-deflate\" extension header value.\n // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673\n // TODO: enable once permessage-deflate is supported\n const permessageDeflate = '' // 'permessage-deflate; 15'\n\n // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to\n // request’s header list.\n // request.headersList.append('sec-websocket-extensions', permessageDeflate)\n\n // 11. Fetch request with useParallelQueue set to true, and\n // processResponse given response being these steps:\n const controller = fetching({\n request,\n useParallelQueue: true,\n dispatcher: options.dispatcher ?? getGlobalDispatcher(),\n processResponse (response) {\n // 1. If response is a network error or its status is not 101,\n // fail the WebSocket connection.\n if (response.type === 'error' || response.status !== 101) {\n failWebsocketConnection(ws, 'Received network error or non-101 status code.')\n return\n }\n\n // 2. If protocols is not the empty list and extracting header\n // list values given `Sec-WebSocket-Protocol` and response’s\n // header list results in null, failure, or the empty byte\n // sequence, then fail the WebSocket connection.\n if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) {\n failWebsocketConnection(ws, 'Server did not respond with sent protocols.')\n return\n }\n\n // 3. Follow the requirements stated step 2 to step 6, inclusive,\n // of the last set of steps in section 4.1 of The WebSocket\n // Protocol to validate response. This either results in fail\n // the WebSocket connection or the WebSocket connection is\n // established.\n\n // 2. If the response lacks an |Upgrade| header field or the |Upgrade|\n // header field contains a value that is not an ASCII case-\n // insensitive match for the value \"websocket\", the client MUST\n // _Fail the WebSocket Connection_.\n if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') {\n failWebsocketConnection(ws, 'Server did not set Upgrade header to \"websocket\".')\n return\n }\n\n // 3. If the response lacks a |Connection| header field or the\n // |Connection| header field doesn't contain a token that is an\n // ASCII case-insensitive match for the value \"Upgrade\", the client\n // MUST _Fail the WebSocket Connection_.\n if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') {\n failWebsocketConnection(ws, 'Server did not set Connection header to \"upgrade\".')\n return\n }\n\n // 4. If the response lacks a |Sec-WebSocket-Accept| header field or\n // the |Sec-WebSocket-Accept| contains a value other than the\n // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket-\n // Key| (as a string, not base64-decoded) with the string \"258EAFA5-\n // E914-47DA-95CA-C5AB0DC85B11\" but ignoring any leading and\n // trailing whitespace, the client MUST _Fail the WebSocket\n // Connection_.\n const secWSAccept = response.headersList.get('Sec-WebSocket-Accept')\n const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64')\n if (secWSAccept !== digest) {\n failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.')\n return\n }\n\n // 5. If the response includes a |Sec-WebSocket-Extensions| header\n // field and this header field indicates the use of an extension\n // that was not present in the client's handshake (the server has\n // indicated an extension not requested by the client), the client\n // MUST _Fail the WebSocket Connection_. (The parsing of this\n // header field to determine which extensions are requested is\n // discussed in Section 9.1.)\n const secExtension = response.headersList.get('Sec-WebSocket-Extensions')\n\n if (secExtension !== null && secExtension !== permessageDeflate) {\n failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.')\n return\n }\n\n // 6. If the response includes a |Sec-WebSocket-Protocol| header field\n // and this header field indicates the use of a subprotocol that was\n // not present in the client's handshake (the server has indicated a\n // subprotocol not requested by the client), the client MUST _Fail\n // the WebSocket Connection_.\n const secProtocol = response.headersList.get('Sec-WebSocket-Protocol')\n\n if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) {\n failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.')\n return\n }\n\n response.socket.on('data', onSocketData)\n response.socket.on('close', onSocketClose)\n response.socket.on('error', onSocketError)\n\n if (channels.open.hasSubscribers) {\n channels.open.publish({\n address: response.socket.address(),\n protocol: secProtocol,\n extensions: secExtension\n })\n }\n\n onEstablish(response)\n }\n })\n\n return controller\n}\n\n/**\n * @param {Buffer} chunk\n */\nfunction onSocketData (chunk) {\n if (!this.ws[kByteParser].write(chunk)) {\n this.pause()\n }\n}\n\n/**\n * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol\n * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4\n */\nfunction onSocketClose () {\n const { ws } = this\n\n // If the TCP connection was closed after the\n // WebSocket closing handshake was completed, the WebSocket connection\n // is said to have been closed _cleanly_.\n const wasClean = ws[kSentClose] && ws[kReceivedClose]\n\n let code = 1005\n let reason = ''\n\n const result = ws[kByteParser].closingInfo\n\n if (result) {\n code = result.code ?? 1005\n reason = result.reason\n } else if (!ws[kSentClose]) {\n // If _The WebSocket\n // Connection is Closed_ and no Close control frame was received by the\n // endpoint (such as could occur if the underlying transport connection\n // is lost), _The WebSocket Connection Close Code_ is considered to be\n // 1006.\n code = 1006\n }\n\n // 1. Change the ready state to CLOSED (3).\n ws[kReadyState] = states.CLOSED\n\n // 2. If the user agent was required to fail the WebSocket\n // connection, or if the WebSocket connection was closed\n // after being flagged as full, fire an event named error\n // at the WebSocket object.\n // TODO\n\n // 3. Fire an event named close at the WebSocket object,\n // using CloseEvent, with the wasClean attribute\n // initialized to true if the connection closed cleanly\n // and false otherwise, the code attribute initialized to\n // the WebSocket connection close code, and the reason\n // attribute initialized to the result of applying UTF-8\n // decode without BOM to the WebSocket connection close\n // reason.\n fireEvent('close', ws, CloseEvent, {\n wasClean, code, reason\n })\n\n if (channels.close.hasSubscribers) {\n channels.close.publish({\n websocket: ws,\n code,\n reason\n })\n }\n}\n\nfunction onSocketError (error) {\n const { ws } = this\n\n ws[kReadyState] = states.CLOSING\n\n if (channels.socketError.hasSubscribers) {\n channels.socketError.publish(error)\n }\n\n this.destroy()\n}\n\nmodule.exports = {\n establishWebSocketConnection\n}\n","'use strict'\n\n// This is a Globally Unique Identifier unique used\n// to validate that the endpoint accepts websocket\n// connections.\n// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3\nconst uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11'\n\n/** @type {PropertyDescriptor} */\nconst staticPropertyDescriptors = {\n enumerable: true,\n writable: false,\n configurable: false\n}\n\nconst states = {\n CONNECTING: 0,\n OPEN: 1,\n CLOSING: 2,\n CLOSED: 3\n}\n\nconst opcodes = {\n CONTINUATION: 0x0,\n TEXT: 0x1,\n BINARY: 0x2,\n CLOSE: 0x8,\n PING: 0x9,\n PONG: 0xA\n}\n\nconst maxUnsigned16Bit = 2 ** 16 - 1 // 65535\n\nconst parserStates = {\n INFO: 0,\n PAYLOADLENGTH_16: 2,\n PAYLOADLENGTH_64: 3,\n READ_DATA: 4\n}\n\nconst emptyBuffer = Buffer.allocUnsafe(0)\n\nmodule.exports = {\n uid,\n staticPropertyDescriptors,\n states,\n opcodes,\n maxUnsigned16Bit,\n parserStates,\n emptyBuffer\n}\n","'use strict'\n\nconst { webidl } = require('../fetch/webidl')\nconst { kEnumerableProperty } = require('../core/util')\nconst { MessagePort } = require('worker_threads')\n\n/**\n * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent\n */\nclass MessageEvent extends Event {\n #eventInit\n\n constructor (type, eventInitDict = {}) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' })\n\n type = webidl.converters.DOMString(type)\n eventInitDict = webidl.converters.MessageEventInit(eventInitDict)\n\n super(type, eventInitDict)\n\n this.#eventInit = eventInitDict\n }\n\n get data () {\n webidl.brandCheck(this, MessageEvent)\n\n return this.#eventInit.data\n }\n\n get origin () {\n webidl.brandCheck(this, MessageEvent)\n\n return this.#eventInit.origin\n }\n\n get lastEventId () {\n webidl.brandCheck(this, MessageEvent)\n\n return this.#eventInit.lastEventId\n }\n\n get source () {\n webidl.brandCheck(this, MessageEvent)\n\n return this.#eventInit.source\n }\n\n get ports () {\n webidl.brandCheck(this, MessageEvent)\n\n if (!Object.isFrozen(this.#eventInit.ports)) {\n Object.freeze(this.#eventInit.ports)\n }\n\n return this.#eventInit.ports\n }\n\n initMessageEvent (\n type,\n bubbles = false,\n cancelable = false,\n data = null,\n origin = '',\n lastEventId = '',\n source = null,\n ports = []\n ) {\n webidl.brandCheck(this, MessageEvent)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' })\n\n return new MessageEvent(type, {\n bubbles, cancelable, data, origin, lastEventId, source, ports\n })\n }\n}\n\n/**\n * @see https://websockets.spec.whatwg.org/#the-closeevent-interface\n */\nclass CloseEvent extends Event {\n #eventInit\n\n constructor (type, eventInitDict = {}) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' })\n\n type = webidl.converters.DOMString(type)\n eventInitDict = webidl.converters.CloseEventInit(eventInitDict)\n\n super(type, eventInitDict)\n\n this.#eventInit = eventInitDict\n }\n\n get wasClean () {\n webidl.brandCheck(this, CloseEvent)\n\n return this.#eventInit.wasClean\n }\n\n get code () {\n webidl.brandCheck(this, CloseEvent)\n\n return this.#eventInit.code\n }\n\n get reason () {\n webidl.brandCheck(this, CloseEvent)\n\n return this.#eventInit.reason\n }\n}\n\n// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface\nclass ErrorEvent extends Event {\n #eventInit\n\n constructor (type, eventInitDict) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' })\n\n super(type, eventInitDict)\n\n type = webidl.converters.DOMString(type)\n eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {})\n\n this.#eventInit = eventInitDict\n }\n\n get message () {\n webidl.brandCheck(this, ErrorEvent)\n\n return this.#eventInit.message\n }\n\n get filename () {\n webidl.brandCheck(this, ErrorEvent)\n\n return this.#eventInit.filename\n }\n\n get lineno () {\n webidl.brandCheck(this, ErrorEvent)\n\n return this.#eventInit.lineno\n }\n\n get colno () {\n webidl.brandCheck(this, ErrorEvent)\n\n return this.#eventInit.colno\n }\n\n get error () {\n webidl.brandCheck(this, ErrorEvent)\n\n return this.#eventInit.error\n }\n}\n\nObject.defineProperties(MessageEvent.prototype, {\n [Symbol.toStringTag]: {\n value: 'MessageEvent',\n configurable: true\n },\n data: kEnumerableProperty,\n origin: kEnumerableProperty,\n lastEventId: kEnumerableProperty,\n source: kEnumerableProperty,\n ports: kEnumerableProperty,\n initMessageEvent: kEnumerableProperty\n})\n\nObject.defineProperties(CloseEvent.prototype, {\n [Symbol.toStringTag]: {\n value: 'CloseEvent',\n configurable: true\n },\n reason: kEnumerableProperty,\n code: kEnumerableProperty,\n wasClean: kEnumerableProperty\n})\n\nObject.defineProperties(ErrorEvent.prototype, {\n [Symbol.toStringTag]: {\n value: 'ErrorEvent',\n configurable: true\n },\n message: kEnumerableProperty,\n filename: kEnumerableProperty,\n lineno: kEnumerableProperty,\n colno: kEnumerableProperty,\n error: kEnumerableProperty\n})\n\nwebidl.converters.MessagePort = webidl.interfaceConverter(MessagePort)\n\nwebidl.converters['sequence'] = webidl.sequenceConverter(\n webidl.converters.MessagePort\n)\n\nconst eventInit = [\n {\n key: 'bubbles',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'cancelable',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'composed',\n converter: webidl.converters.boolean,\n defaultValue: false\n }\n]\n\nwebidl.converters.MessageEventInit = webidl.dictionaryConverter([\n ...eventInit,\n {\n key: 'data',\n converter: webidl.converters.any,\n defaultValue: null\n },\n {\n key: 'origin',\n converter: webidl.converters.USVString,\n defaultValue: ''\n },\n {\n key: 'lastEventId',\n converter: webidl.converters.DOMString,\n defaultValue: ''\n },\n {\n key: 'source',\n // Node doesn't implement WindowProxy or ServiceWorker, so the only\n // valid value for source is a MessagePort.\n converter: webidl.nullableConverter(webidl.converters.MessagePort),\n defaultValue: null\n },\n {\n key: 'ports',\n converter: webidl.converters['sequence'],\n get defaultValue () {\n return []\n }\n }\n])\n\nwebidl.converters.CloseEventInit = webidl.dictionaryConverter([\n ...eventInit,\n {\n key: 'wasClean',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'code',\n converter: webidl.converters['unsigned short'],\n defaultValue: 0\n },\n {\n key: 'reason',\n converter: webidl.converters.USVString,\n defaultValue: ''\n }\n])\n\nwebidl.converters.ErrorEventInit = webidl.dictionaryConverter([\n ...eventInit,\n {\n key: 'message',\n converter: webidl.converters.DOMString,\n defaultValue: ''\n },\n {\n key: 'filename',\n converter: webidl.converters.USVString,\n defaultValue: ''\n },\n {\n key: 'lineno',\n converter: webidl.converters['unsigned long'],\n defaultValue: 0\n },\n {\n key: 'colno',\n converter: webidl.converters['unsigned long'],\n defaultValue: 0\n },\n {\n key: 'error',\n converter: webidl.converters.any\n }\n])\n\nmodule.exports = {\n MessageEvent,\n CloseEvent,\n ErrorEvent\n}\n","'use strict'\n\nconst { maxUnsigned16Bit } = require('./constants')\n\n/** @type {import('crypto')} */\nlet crypto\ntry {\n crypto = require('crypto')\n} catch {\n\n}\n\nclass WebsocketFrameSend {\n /**\n * @param {Buffer|undefined} data\n */\n constructor (data) {\n this.frameData = data\n this.maskKey = crypto.randomBytes(4)\n }\n\n createFrame (opcode) {\n const bodyLength = this.frameData?.byteLength ?? 0\n\n /** @type {number} */\n let payloadLength = bodyLength // 0-125\n let offset = 6\n\n if (bodyLength > maxUnsigned16Bit) {\n offset += 8 // payload length is next 8 bytes\n payloadLength = 127\n } else if (bodyLength > 125) {\n offset += 2 // payload length is next 2 bytes\n payloadLength = 126\n }\n\n const buffer = Buffer.allocUnsafe(bodyLength + offset)\n\n // Clear first 2 bytes, everything else is overwritten\n buffer[0] = buffer[1] = 0\n buffer[0] |= 0x80 // FIN\n buffer[0] = (buffer[0] & 0xF0) + opcode // opcode\n\n /*! ws. MIT License. Einar Otto Stangvik */\n buffer[offset - 4] = this.maskKey[0]\n buffer[offset - 3] = this.maskKey[1]\n buffer[offset - 2] = this.maskKey[2]\n buffer[offset - 1] = this.maskKey[3]\n\n buffer[1] = payloadLength\n\n if (payloadLength === 126) {\n buffer.writeUInt16BE(bodyLength, 2)\n } else if (payloadLength === 127) {\n // Clear extended payload length\n buffer[2] = buffer[3] = 0\n buffer.writeUIntBE(bodyLength, 4, 6)\n }\n\n buffer[1] |= 0x80 // MASK\n\n // mask body\n for (let i = 0; i < bodyLength; i++) {\n buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4]\n }\n\n return buffer\n }\n}\n\nmodule.exports = {\n WebsocketFrameSend\n}\n","'use strict'\n\nconst { Writable } = require('stream')\nconst diagnosticsChannel = require('diagnostics_channel')\nconst { parserStates, opcodes, states, emptyBuffer } = require('./constants')\nconst { kReadyState, kSentClose, kResponse, kReceivedClose } = require('./symbols')\nconst { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = require('./util')\nconst { WebsocketFrameSend } = require('./frame')\n\n// This code was influenced by ws released under the MIT license.\n// Copyright (c) 2011 Einar Otto Stangvik \n// Copyright (c) 2013 Arnout Kazemier and contributors\n// Copyright (c) 2016 Luigi Pinca and contributors\n\nconst channels = {}\nchannels.ping = diagnosticsChannel.channel('undici:websocket:ping')\nchannels.pong = diagnosticsChannel.channel('undici:websocket:pong')\n\nclass ByteParser extends Writable {\n #buffers = []\n #byteOffset = 0\n\n #state = parserStates.INFO\n\n #info = {}\n #fragments = []\n\n constructor (ws) {\n super()\n\n this.ws = ws\n }\n\n /**\n * @param {Buffer} chunk\n * @param {() => void} callback\n */\n _write (chunk, _, callback) {\n this.#buffers.push(chunk)\n this.#byteOffset += chunk.length\n\n this.run(callback)\n }\n\n /**\n * Runs whenever a new chunk is received.\n * Callback is called whenever there are no more chunks buffering,\n * or not enough bytes are buffered to parse.\n */\n run (callback) {\n while (true) {\n if (this.#state === parserStates.INFO) {\n // If there aren't enough bytes to parse the payload length, etc.\n if (this.#byteOffset < 2) {\n return callback()\n }\n\n const buffer = this.consume(2)\n\n this.#info.fin = (buffer[0] & 0x80) !== 0\n this.#info.opcode = buffer[0] & 0x0F\n\n // If we receive a fragmented message, we use the type of the first\n // frame to parse the full message as binary/text, when it's terminated\n this.#info.originalOpcode ??= this.#info.opcode\n\n this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION\n\n if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) {\n // Only text and binary frames can be fragmented\n failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.')\n return\n }\n\n const payloadLength = buffer[1] & 0x7F\n\n if (payloadLength <= 125) {\n this.#info.payloadLength = payloadLength\n this.#state = parserStates.READ_DATA\n } else if (payloadLength === 126) {\n this.#state = parserStates.PAYLOADLENGTH_16\n } else if (payloadLength === 127) {\n this.#state = parserStates.PAYLOADLENGTH_64\n }\n\n if (this.#info.fragmented && payloadLength > 125) {\n // A fragmented frame can't be fragmented itself\n failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.')\n return\n } else if (\n (this.#info.opcode === opcodes.PING ||\n this.#info.opcode === opcodes.PONG ||\n this.#info.opcode === opcodes.CLOSE) &&\n payloadLength > 125\n ) {\n // Control frames can have a payload length of 125 bytes MAX\n failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.')\n return\n } else if (this.#info.opcode === opcodes.CLOSE) {\n if (payloadLength === 1) {\n failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.')\n return\n }\n\n const body = this.consume(payloadLength)\n\n this.#info.closeInfo = this.parseCloseBody(false, body)\n\n if (!this.ws[kSentClose]) {\n // If an endpoint receives a Close frame and did not previously send a\n // Close frame, the endpoint MUST send a Close frame in response. (When\n // sending a Close frame in response, the endpoint typically echos the\n // status code it received.)\n const body = Buffer.allocUnsafe(2)\n body.writeUInt16BE(this.#info.closeInfo.code, 0)\n const closeFrame = new WebsocketFrameSend(body)\n\n this.ws[kResponse].socket.write(\n closeFrame.createFrame(opcodes.CLOSE),\n (err) => {\n if (!err) {\n this.ws[kSentClose] = true\n }\n }\n )\n }\n\n // Upon either sending or receiving a Close control frame, it is said\n // that _The WebSocket Closing Handshake is Started_ and that the\n // WebSocket connection is in the CLOSING state.\n this.ws[kReadyState] = states.CLOSING\n this.ws[kReceivedClose] = true\n\n this.end()\n\n return\n } else if (this.#info.opcode === opcodes.PING) {\n // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in\n // response, unless it already received a Close frame.\n // A Pong frame sent in response to a Ping frame must have identical\n // \"Application data\"\n\n const body = this.consume(payloadLength)\n\n if (!this.ws[kReceivedClose]) {\n const frame = new WebsocketFrameSend(body)\n\n this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG))\n\n if (channels.ping.hasSubscribers) {\n channels.ping.publish({\n payload: body\n })\n }\n }\n\n this.#state = parserStates.INFO\n\n if (this.#byteOffset > 0) {\n continue\n } else {\n callback()\n return\n }\n } else if (this.#info.opcode === opcodes.PONG) {\n // A Pong frame MAY be sent unsolicited. This serves as a\n // unidirectional heartbeat. A response to an unsolicited Pong frame is\n // not expected.\n\n const body = this.consume(payloadLength)\n\n if (channels.pong.hasSubscribers) {\n channels.pong.publish({\n payload: body\n })\n }\n\n if (this.#byteOffset > 0) {\n continue\n } else {\n callback()\n return\n }\n }\n } else if (this.#state === parserStates.PAYLOADLENGTH_16) {\n if (this.#byteOffset < 2) {\n return callback()\n }\n\n const buffer = this.consume(2)\n\n this.#info.payloadLength = buffer.readUInt16BE(0)\n this.#state = parserStates.READ_DATA\n } else if (this.#state === parserStates.PAYLOADLENGTH_64) {\n if (this.#byteOffset < 8) {\n return callback()\n }\n\n const buffer = this.consume(8)\n const upper = buffer.readUInt32BE(0)\n\n // 2^31 is the maxinimum bytes an arraybuffer can contain\n // on 32-bit systems. Although, on 64-bit systems, this is\n // 2^53-1 bytes.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length\n // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275\n // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e\n if (upper > 2 ** 31 - 1) {\n failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.')\n return\n }\n\n const lower = buffer.readUInt32BE(4)\n\n this.#info.payloadLength = (upper << 8) + lower\n this.#state = parserStates.READ_DATA\n } else if (this.#state === parserStates.READ_DATA) {\n if (this.#byteOffset < this.#info.payloadLength) {\n // If there is still more data in this chunk that needs to be read\n return callback()\n } else if (this.#byteOffset >= this.#info.payloadLength) {\n // If the server sent multiple frames in a single chunk\n\n const body = this.consume(this.#info.payloadLength)\n\n this.#fragments.push(body)\n\n // If the frame is unfragmented, or a fragmented frame was terminated,\n // a message was received\n if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) {\n const fullMessage = Buffer.concat(this.#fragments)\n\n websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage)\n\n this.#info = {}\n this.#fragments.length = 0\n }\n\n this.#state = parserStates.INFO\n }\n }\n\n if (this.#byteOffset > 0) {\n continue\n } else {\n callback()\n break\n }\n }\n }\n\n /**\n * Take n bytes from the buffered Buffers\n * @param {number} n\n * @returns {Buffer|null}\n */\n consume (n) {\n if (n > this.#byteOffset) {\n return null\n } else if (n === 0) {\n return emptyBuffer\n }\n\n if (this.#buffers[0].length === n) {\n this.#byteOffset -= this.#buffers[0].length\n return this.#buffers.shift()\n }\n\n const buffer = Buffer.allocUnsafe(n)\n let offset = 0\n\n while (offset !== n) {\n const next = this.#buffers[0]\n const { length } = next\n\n if (length + offset === n) {\n buffer.set(this.#buffers.shift(), offset)\n break\n } else if (length + offset > n) {\n buffer.set(next.subarray(0, n - offset), offset)\n this.#buffers[0] = next.subarray(n - offset)\n break\n } else {\n buffer.set(this.#buffers.shift(), offset)\n offset += next.length\n }\n }\n\n this.#byteOffset -= n\n\n return buffer\n }\n\n parseCloseBody (onlyCode, data) {\n // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5\n /** @type {number|undefined} */\n let code\n\n if (data.length >= 2) {\n // _The WebSocket Connection Close Code_ is\n // defined as the status code (Section 7.4) contained in the first Close\n // control frame received by the application\n code = data.readUInt16BE(0)\n }\n\n if (onlyCode) {\n if (!isValidStatusCode(code)) {\n return null\n }\n\n return { code }\n }\n\n // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6\n /** @type {Buffer} */\n let reason = data.subarray(2)\n\n // Remove BOM\n if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) {\n reason = reason.subarray(3)\n }\n\n if (code !== undefined && !isValidStatusCode(code)) {\n return null\n }\n\n try {\n // TODO: optimize this\n reason = new TextDecoder('utf-8', { fatal: true }).decode(reason)\n } catch {\n return null\n }\n\n return { code, reason }\n }\n\n get closingInfo () {\n return this.#info.closeInfo\n }\n}\n\nmodule.exports = {\n ByteParser\n}\n","'use strict'\n\nmodule.exports = {\n kWebSocketURL: Symbol('url'),\n kReadyState: Symbol('ready state'),\n kController: Symbol('controller'),\n kResponse: Symbol('response'),\n kBinaryType: Symbol('binary type'),\n kSentClose: Symbol('sent close'),\n kReceivedClose: Symbol('received close'),\n kByteParser: Symbol('byte parser')\n}\n","'use strict'\n\nconst { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = require('./symbols')\nconst { states, opcodes } = require('./constants')\nconst { MessageEvent, ErrorEvent } = require('./events')\n\n/* globals Blob */\n\n/**\n * @param {import('./websocket').WebSocket} ws\n */\nfunction isEstablished (ws) {\n // If the server's response is validated as provided for above, it is\n // said that _The WebSocket Connection is Established_ and that the\n // WebSocket Connection is in the OPEN state.\n return ws[kReadyState] === states.OPEN\n}\n\n/**\n * @param {import('./websocket').WebSocket} ws\n */\nfunction isClosing (ws) {\n // Upon either sending or receiving a Close control frame, it is said\n // that _The WebSocket Closing Handshake is Started_ and that the\n // WebSocket connection is in the CLOSING state.\n return ws[kReadyState] === states.CLOSING\n}\n\n/**\n * @param {import('./websocket').WebSocket} ws\n */\nfunction isClosed (ws) {\n return ws[kReadyState] === states.CLOSED\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#concept-event-fire\n * @param {string} e\n * @param {EventTarget} target\n * @param {EventInit | undefined} eventInitDict\n */\nfunction fireEvent (e, target, eventConstructor = Event, eventInitDict) {\n // 1. If eventConstructor is not given, then let eventConstructor be Event.\n\n // 2. Let event be the result of creating an event given eventConstructor,\n // in the relevant realm of target.\n // 3. Initialize event’s type attribute to e.\n const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap\n\n // 4. Initialize any other IDL attributes of event as described in the\n // invocation of this algorithm.\n\n // 5. Return the result of dispatching event at target, with legacy target\n // override flag set if set.\n target.dispatchEvent(event)\n}\n\n/**\n * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol\n * @param {import('./websocket').WebSocket} ws\n * @param {number} type Opcode\n * @param {Buffer} data application data\n */\nfunction websocketMessageReceived (ws, type, data) {\n // 1. If ready state is not OPEN (1), then return.\n if (ws[kReadyState] !== states.OPEN) {\n return\n }\n\n // 2. Let dataForEvent be determined by switching on type and binary type:\n let dataForEvent\n\n if (type === opcodes.TEXT) {\n // -> type indicates that the data is Text\n // a new DOMString containing data\n try {\n dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data)\n } catch {\n failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.')\n return\n }\n } else if (type === opcodes.BINARY) {\n if (ws[kBinaryType] === 'blob') {\n // -> type indicates that the data is Binary and binary type is \"blob\"\n // a new Blob object, created in the relevant Realm of the WebSocket\n // object, that represents data as its raw data\n dataForEvent = new Blob([data])\n } else {\n // -> type indicates that the data is Binary and binary type is \"arraybuffer\"\n // a new ArrayBuffer object, created in the relevant Realm of the\n // WebSocket object, whose contents are data\n dataForEvent = new Uint8Array(data).buffer\n }\n }\n\n // 3. Fire an event named message at the WebSocket object, using MessageEvent,\n // with the origin attribute initialized to the serialization of the WebSocket\n // object’s url's origin, and the data attribute initialized to dataForEvent.\n fireEvent('message', ws, MessageEvent, {\n origin: ws[kWebSocketURL].origin,\n data: dataForEvent\n })\n}\n\n/**\n * @see https://datatracker.ietf.org/doc/html/rfc6455\n * @see https://datatracker.ietf.org/doc/html/rfc2616\n * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407\n * @param {string} protocol\n */\nfunction isValidSubprotocol (protocol) {\n // If present, this value indicates one\n // or more comma-separated subprotocol the client wishes to speak,\n // ordered by preference. The elements that comprise this value\n // MUST be non-empty strings with characters in the range U+0021 to\n // U+007E not including separator characters as defined in\n // [RFC2616] and MUST all be unique strings.\n if (protocol.length === 0) {\n return false\n }\n\n for (const char of protocol) {\n const code = char.charCodeAt(0)\n\n if (\n code < 0x21 ||\n code > 0x7E ||\n char === '(' ||\n char === ')' ||\n char === '<' ||\n char === '>' ||\n char === '@' ||\n char === ',' ||\n char === ';' ||\n char === ':' ||\n char === '\\\\' ||\n char === '\"' ||\n char === '/' ||\n char === '[' ||\n char === ']' ||\n char === '?' ||\n char === '=' ||\n char === '{' ||\n char === '}' ||\n code === 32 || // SP\n code === 9 // HT\n ) {\n return false\n }\n }\n\n return true\n}\n\n/**\n * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4\n * @param {number} code\n */\nfunction isValidStatusCode (code) {\n if (code >= 1000 && code < 1015) {\n return (\n code !== 1004 && // reserved\n code !== 1005 && // \"MUST NOT be set as a status code\"\n code !== 1006 // \"MUST NOT be set as a status code\"\n )\n }\n\n return code >= 3000 && code <= 4999\n}\n\n/**\n * @param {import('./websocket').WebSocket} ws\n * @param {string|undefined} reason\n */\nfunction failWebsocketConnection (ws, reason) {\n const { [kController]: controller, [kResponse]: response } = ws\n\n controller.abort()\n\n if (response?.socket && !response.socket.destroyed) {\n response.socket.destroy()\n }\n\n if (reason) {\n fireEvent('error', ws, ErrorEvent, {\n error: new Error(reason)\n })\n }\n}\n\nmodule.exports = {\n isEstablished,\n isClosing,\n isClosed,\n fireEvent,\n isValidSubprotocol,\n isValidStatusCode,\n failWebsocketConnection,\n websocketMessageReceived\n}\n","'use strict'\n\nconst { webidl } = require('../fetch/webidl')\nconst { DOMException } = require('../fetch/constants')\nconst { URLSerializer } = require('../fetch/dataURL')\nconst { getGlobalOrigin } = require('../fetch/global')\nconst { staticPropertyDescriptors, states, opcodes, emptyBuffer } = require('./constants')\nconst {\n kWebSocketURL,\n kReadyState,\n kController,\n kBinaryType,\n kResponse,\n kSentClose,\n kByteParser\n} = require('./symbols')\nconst { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = require('./util')\nconst { establishWebSocketConnection } = require('./connection')\nconst { WebsocketFrameSend } = require('./frame')\nconst { ByteParser } = require('./receiver')\nconst { kEnumerableProperty, isBlobLike } = require('../core/util')\nconst { getGlobalDispatcher } = require('../global')\nconst { types } = require('util')\n\nlet experimentalWarned = false\n\n// https://websockets.spec.whatwg.org/#interface-definition\nclass WebSocket extends EventTarget {\n #events = {\n open: null,\n error: null,\n close: null,\n message: null\n }\n\n #bufferedAmount = 0\n #protocol = ''\n #extensions = ''\n\n /**\n * @param {string} url\n * @param {string|string[]} protocols\n */\n constructor (url, protocols = []) {\n super()\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' })\n\n if (!experimentalWarned) {\n experimentalWarned = true\n process.emitWarning('WebSockets are experimental, expect them to change at any time.', {\n code: 'UNDICI-WS'\n })\n }\n\n const options = webidl.converters['DOMString or sequence or WebSocketInit'](protocols)\n\n url = webidl.converters.USVString(url)\n protocols = options.protocols\n\n // 1. Let baseURL be this's relevant settings object's API base URL.\n const baseURL = getGlobalOrigin()\n\n // 1. Let urlRecord be the result of applying the URL parser to url with baseURL.\n let urlRecord\n\n try {\n urlRecord = new URL(url, baseURL)\n } catch (e) {\n // 3. If urlRecord is failure, then throw a \"SyntaxError\" DOMException.\n throw new DOMException(e, 'SyntaxError')\n }\n\n // 4. If urlRecord’s scheme is \"http\", then set urlRecord’s scheme to \"ws\".\n if (urlRecord.protocol === 'http:') {\n urlRecord.protocol = 'ws:'\n } else if (urlRecord.protocol === 'https:') {\n // 5. Otherwise, if urlRecord’s scheme is \"https\", set urlRecord’s scheme to \"wss\".\n urlRecord.protocol = 'wss:'\n }\n\n // 6. If urlRecord’s scheme is not \"ws\" or \"wss\", then throw a \"SyntaxError\" DOMException.\n if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') {\n throw new DOMException(\n `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`,\n 'SyntaxError'\n )\n }\n\n // 7. If urlRecord’s fragment is non-null, then throw a \"SyntaxError\"\n // DOMException.\n if (urlRecord.hash || urlRecord.href.endsWith('#')) {\n throw new DOMException('Got fragment', 'SyntaxError')\n }\n\n // 8. If protocols is a string, set protocols to a sequence consisting\n // of just that string.\n if (typeof protocols === 'string') {\n protocols = [protocols]\n }\n\n // 9. If any of the values in protocols occur more than once or otherwise\n // fail to match the requirements for elements that comprise the value\n // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket\n // protocol, then throw a \"SyntaxError\" DOMException.\n if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) {\n throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError')\n }\n\n if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) {\n throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError')\n }\n\n // 10. Set this's url to urlRecord.\n this[kWebSocketURL] = new URL(urlRecord.href)\n\n // 11. Let client be this's relevant settings object.\n\n // 12. Run this step in parallel:\n\n // 1. Establish a WebSocket connection given urlRecord, protocols,\n // and client.\n this[kController] = establishWebSocketConnection(\n urlRecord,\n protocols,\n this,\n (response) => this.#onConnectionEstablished(response),\n options\n )\n\n // Each WebSocket object has an associated ready state, which is a\n // number representing the state of the connection. Initially it must\n // be CONNECTING (0).\n this[kReadyState] = WebSocket.CONNECTING\n\n // The extensions attribute must initially return the empty string.\n\n // The protocol attribute must initially return the empty string.\n\n // Each WebSocket object has an associated binary type, which is a\n // BinaryType. Initially it must be \"blob\".\n this[kBinaryType] = 'blob'\n }\n\n /**\n * @see https://websockets.spec.whatwg.org/#dom-websocket-close\n * @param {number|undefined} code\n * @param {string|undefined} reason\n */\n close (code = undefined, reason = undefined) {\n webidl.brandCheck(this, WebSocket)\n\n if (code !== undefined) {\n code = webidl.converters['unsigned short'](code, { clamp: true })\n }\n\n if (reason !== undefined) {\n reason = webidl.converters.USVString(reason)\n }\n\n // 1. If code is present, but is neither an integer equal to 1000 nor an\n // integer in the range 3000 to 4999, inclusive, throw an\n // \"InvalidAccessError\" DOMException.\n if (code !== undefined) {\n if (code !== 1000 && (code < 3000 || code > 4999)) {\n throw new DOMException('invalid code', 'InvalidAccessError')\n }\n }\n\n let reasonByteLength = 0\n\n // 2. If reason is present, then run these substeps:\n if (reason !== undefined) {\n // 1. Let reasonBytes be the result of encoding reason.\n // 2. If reasonBytes is longer than 123 bytes, then throw a\n // \"SyntaxError\" DOMException.\n reasonByteLength = Buffer.byteLength(reason)\n\n if (reasonByteLength > 123) {\n throw new DOMException(\n `Reason must be less than 123 bytes; received ${reasonByteLength}`,\n 'SyntaxError'\n )\n }\n }\n\n // 3. Run the first matching steps from the following list:\n if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) {\n // If this's ready state is CLOSING (2) or CLOSED (3)\n // Do nothing.\n } else if (!isEstablished(this)) {\n // If the WebSocket connection is not yet established\n // Fail the WebSocket connection and set this's ready state\n // to CLOSING (2).\n failWebsocketConnection(this, 'Connection was closed before it was established.')\n this[kReadyState] = WebSocket.CLOSING\n } else if (!isClosing(this)) {\n // If the WebSocket closing handshake has not yet been started\n // Start the WebSocket closing handshake and set this's ready\n // state to CLOSING (2).\n // - If neither code nor reason is present, the WebSocket Close\n // message must not have a body.\n // - If code is present, then the status code to use in the\n // WebSocket Close message must be the integer given by code.\n // - If reason is also present, then reasonBytes must be\n // provided in the Close message after the status code.\n\n const frame = new WebsocketFrameSend()\n\n // If neither code nor reason is present, the WebSocket Close\n // message must not have a body.\n\n // If code is present, then the status code to use in the\n // WebSocket Close message must be the integer given by code.\n if (code !== undefined && reason === undefined) {\n frame.frameData = Buffer.allocUnsafe(2)\n frame.frameData.writeUInt16BE(code, 0)\n } else if (code !== undefined && reason !== undefined) {\n // If reason is also present, then reasonBytes must be\n // provided in the Close message after the status code.\n frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength)\n frame.frameData.writeUInt16BE(code, 0)\n // the body MAY contain UTF-8-encoded data with value /reason/\n frame.frameData.write(reason, 2, 'utf-8')\n } else {\n frame.frameData = emptyBuffer\n }\n\n /** @type {import('stream').Duplex} */\n const socket = this[kResponse].socket\n\n socket.write(frame.createFrame(opcodes.CLOSE), (err) => {\n if (!err) {\n this[kSentClose] = true\n }\n })\n\n // Upon either sending or receiving a Close control frame, it is said\n // that _The WebSocket Closing Handshake is Started_ and that the\n // WebSocket connection is in the CLOSING state.\n this[kReadyState] = states.CLOSING\n } else {\n // Otherwise\n // Set this's ready state to CLOSING (2).\n this[kReadyState] = WebSocket.CLOSING\n }\n }\n\n /**\n * @see https://websockets.spec.whatwg.org/#dom-websocket-send\n * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data\n */\n send (data) {\n webidl.brandCheck(this, WebSocket)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' })\n\n data = webidl.converters.WebSocketSendData(data)\n\n // 1. If this's ready state is CONNECTING, then throw an\n // \"InvalidStateError\" DOMException.\n if (this[kReadyState] === WebSocket.CONNECTING) {\n throw new DOMException('Sent before connected.', 'InvalidStateError')\n }\n\n // 2. Run the appropriate set of steps from the following list:\n // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1\n // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2\n\n if (!isEstablished(this) || isClosing(this)) {\n return\n }\n\n /** @type {import('stream').Duplex} */\n const socket = this[kResponse].socket\n\n // If data is a string\n if (typeof data === 'string') {\n // If the WebSocket connection is established and the WebSocket\n // closing handshake has not yet started, then the user agent\n // must send a WebSocket Message comprised of the data argument\n // using a text frame opcode; if the data cannot be sent, e.g.\n // because it would need to be buffered but the buffer is full,\n // the user agent must flag the WebSocket as full and then close\n // the WebSocket connection. Any invocation of this method with a\n // string argument that does not throw an exception must increase\n // the bufferedAmount attribute by the number of bytes needed to\n // express the argument as UTF-8.\n\n const value = Buffer.from(data)\n const frame = new WebsocketFrameSend(value)\n const buffer = frame.createFrame(opcodes.TEXT)\n\n this.#bufferedAmount += value.byteLength\n socket.write(buffer, () => {\n this.#bufferedAmount -= value.byteLength\n })\n } else if (types.isArrayBuffer(data)) {\n // If the WebSocket connection is established, and the WebSocket\n // closing handshake has not yet started, then the user agent must\n // send a WebSocket Message comprised of data using a binary frame\n // opcode; if the data cannot be sent, e.g. because it would need\n // to be buffered but the buffer is full, the user agent must flag\n // the WebSocket as full and then close the WebSocket connection.\n // The data to be sent is the data stored in the buffer described\n // by the ArrayBuffer object. Any invocation of this method with an\n // ArrayBuffer argument that does not throw an exception must\n // increase the bufferedAmount attribute by the length of the\n // ArrayBuffer in bytes.\n\n const value = Buffer.from(data)\n const frame = new WebsocketFrameSend(value)\n const buffer = frame.createFrame(opcodes.BINARY)\n\n this.#bufferedAmount += value.byteLength\n socket.write(buffer, () => {\n this.#bufferedAmount -= value.byteLength\n })\n } else if (ArrayBuffer.isView(data)) {\n // If the WebSocket connection is established, and the WebSocket\n // closing handshake has not yet started, then the user agent must\n // send a WebSocket Message comprised of data using a binary frame\n // opcode; if the data cannot be sent, e.g. because it would need to\n // be buffered but the buffer is full, the user agent must flag the\n // WebSocket as full and then close the WebSocket connection. The\n // data to be sent is the data stored in the section of the buffer\n // described by the ArrayBuffer object that data references. Any\n // invocation of this method with this kind of argument that does\n // not throw an exception must increase the bufferedAmount attribute\n // by the length of data’s buffer in bytes.\n\n const ab = Buffer.from(data, data.byteOffset, data.byteLength)\n\n const frame = new WebsocketFrameSend(ab)\n const buffer = frame.createFrame(opcodes.BINARY)\n\n this.#bufferedAmount += ab.byteLength\n socket.write(buffer, () => {\n this.#bufferedAmount -= ab.byteLength\n })\n } else if (isBlobLike(data)) {\n // If the WebSocket connection is established, and the WebSocket\n // closing handshake has not yet started, then the user agent must\n // send a WebSocket Message comprised of data using a binary frame\n // opcode; if the data cannot be sent, e.g. because it would need to\n // be buffered but the buffer is full, the user agent must flag the\n // WebSocket as full and then close the WebSocket connection. The data\n // to be sent is the raw data represented by the Blob object. Any\n // invocation of this method with a Blob argument that does not throw\n // an exception must increase the bufferedAmount attribute by the size\n // of the Blob object’s raw data, in bytes.\n\n const frame = new WebsocketFrameSend()\n\n data.arrayBuffer().then((ab) => {\n const value = Buffer.from(ab)\n frame.frameData = value\n const buffer = frame.createFrame(opcodes.BINARY)\n\n this.#bufferedAmount += value.byteLength\n socket.write(buffer, () => {\n this.#bufferedAmount -= value.byteLength\n })\n })\n }\n }\n\n get readyState () {\n webidl.brandCheck(this, WebSocket)\n\n // The readyState getter steps are to return this's ready state.\n return this[kReadyState]\n }\n\n get bufferedAmount () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#bufferedAmount\n }\n\n get url () {\n webidl.brandCheck(this, WebSocket)\n\n // The url getter steps are to return this's url, serialized.\n return URLSerializer(this[kWebSocketURL])\n }\n\n get extensions () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#extensions\n }\n\n get protocol () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#protocol\n }\n\n get onopen () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#events.open\n }\n\n set onopen (fn) {\n webidl.brandCheck(this, WebSocket)\n\n if (this.#events.open) {\n this.removeEventListener('open', this.#events.open)\n }\n\n if (typeof fn === 'function') {\n this.#events.open = fn\n this.addEventListener('open', fn)\n } else {\n this.#events.open = null\n }\n }\n\n get onerror () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#events.error\n }\n\n set onerror (fn) {\n webidl.brandCheck(this, WebSocket)\n\n if (this.#events.error) {\n this.removeEventListener('error', this.#events.error)\n }\n\n if (typeof fn === 'function') {\n this.#events.error = fn\n this.addEventListener('error', fn)\n } else {\n this.#events.error = null\n }\n }\n\n get onclose () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#events.close\n }\n\n set onclose (fn) {\n webidl.brandCheck(this, WebSocket)\n\n if (this.#events.close) {\n this.removeEventListener('close', this.#events.close)\n }\n\n if (typeof fn === 'function') {\n this.#events.close = fn\n this.addEventListener('close', fn)\n } else {\n this.#events.close = null\n }\n }\n\n get onmessage () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#events.message\n }\n\n set onmessage (fn) {\n webidl.brandCheck(this, WebSocket)\n\n if (this.#events.message) {\n this.removeEventListener('message', this.#events.message)\n }\n\n if (typeof fn === 'function') {\n this.#events.message = fn\n this.addEventListener('message', fn)\n } else {\n this.#events.message = null\n }\n }\n\n get binaryType () {\n webidl.brandCheck(this, WebSocket)\n\n return this[kBinaryType]\n }\n\n set binaryType (type) {\n webidl.brandCheck(this, WebSocket)\n\n if (type !== 'blob' && type !== 'arraybuffer') {\n this[kBinaryType] = 'blob'\n } else {\n this[kBinaryType] = type\n }\n }\n\n /**\n * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol\n */\n #onConnectionEstablished (response) {\n // processResponse is called when the \"response’s header list has been received and initialized.\"\n // once this happens, the connection is open\n this[kResponse] = response\n\n const parser = new ByteParser(this)\n parser.on('drain', function onParserDrain () {\n this.ws[kResponse].socket.resume()\n })\n\n response.socket.ws = this\n this[kByteParser] = parser\n\n // 1. Change the ready state to OPEN (1).\n this[kReadyState] = states.OPEN\n\n // 2. Change the extensions attribute’s value to the extensions in use, if\n // it is not the null value.\n // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1\n const extensions = response.headersList.get('sec-websocket-extensions')\n\n if (extensions !== null) {\n this.#extensions = extensions\n }\n\n // 3. Change the protocol attribute’s value to the subprotocol in use, if\n // it is not the null value.\n // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9\n const protocol = response.headersList.get('sec-websocket-protocol')\n\n if (protocol !== null) {\n this.#protocol = protocol\n }\n\n // 4. Fire an event named open at the WebSocket object.\n fireEvent('open', this)\n }\n}\n\n// https://websockets.spec.whatwg.org/#dom-websocket-connecting\nWebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING\n// https://websockets.spec.whatwg.org/#dom-websocket-open\nWebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN\n// https://websockets.spec.whatwg.org/#dom-websocket-closing\nWebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING\n// https://websockets.spec.whatwg.org/#dom-websocket-closed\nWebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED\n\nObject.defineProperties(WebSocket.prototype, {\n CONNECTING: staticPropertyDescriptors,\n OPEN: staticPropertyDescriptors,\n CLOSING: staticPropertyDescriptors,\n CLOSED: staticPropertyDescriptors,\n url: kEnumerableProperty,\n readyState: kEnumerableProperty,\n bufferedAmount: kEnumerableProperty,\n onopen: kEnumerableProperty,\n onerror: kEnumerableProperty,\n onclose: kEnumerableProperty,\n close: kEnumerableProperty,\n onmessage: kEnumerableProperty,\n binaryType: kEnumerableProperty,\n send: kEnumerableProperty,\n extensions: kEnumerableProperty,\n protocol: kEnumerableProperty,\n [Symbol.toStringTag]: {\n value: 'WebSocket',\n writable: false,\n enumerable: false,\n configurable: true\n }\n})\n\nObject.defineProperties(WebSocket, {\n CONNECTING: staticPropertyDescriptors,\n OPEN: staticPropertyDescriptors,\n CLOSING: staticPropertyDescriptors,\n CLOSED: staticPropertyDescriptors\n})\n\nwebidl.converters['sequence'] = webidl.sequenceConverter(\n webidl.converters.DOMString\n)\n\nwebidl.converters['DOMString or sequence'] = function (V) {\n if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) {\n return webidl.converters['sequence'](V)\n }\n\n return webidl.converters.DOMString(V)\n}\n\n// This implements the propsal made in https://github.com/whatwg/websockets/issues/42\nwebidl.converters.WebSocketInit = webidl.dictionaryConverter([\n {\n key: 'protocols',\n converter: webidl.converters['DOMString or sequence'],\n get defaultValue () {\n return []\n }\n },\n {\n key: 'dispatcher',\n converter: (V) => V,\n get defaultValue () {\n return getGlobalDispatcher()\n }\n },\n {\n key: 'headers',\n converter: webidl.nullableConverter(webidl.converters.HeadersInit)\n }\n])\n\nwebidl.converters['DOMString or sequence or WebSocketInit'] = function (V) {\n if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) {\n return webidl.converters.WebSocketInit(V)\n }\n\n return { protocols: webidl.converters['DOMString or sequence'](V) }\n}\n\nwebidl.converters.WebSocketSendData = function (V) {\n if (webidl.util.Type(V) === 'Object') {\n if (isBlobLike(V)) {\n return webidl.converters.Blob(V, { strict: false })\n }\n\n if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) {\n return webidl.converters.BufferSource(V)\n }\n }\n\n return webidl.converters.USVString(V)\n}\n\nmodule.exports = {\n WebSocket\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && process.version !== undefined) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","\n/**\n * For Node.js, simply re-export the core `util.deprecate` function.\n */\n\nmodule.exports = require('util').deprecate;\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","\"use strict\";\n\nvar conversions = {};\nmodule.exports = conversions;\n\nfunction sign(x) {\n return x < 0 ? -1 : 1;\n}\n\nfunction evenRound(x) {\n // Round x to the nearest integer, choosing the even integer if it lies halfway between two.\n if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)\n return Math.floor(x);\n } else {\n return Math.round(x);\n }\n}\n\nfunction createNumberConversion(bitLength, typeOpts) {\n if (!typeOpts.unsigned) {\n --bitLength;\n }\n const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);\n const upperBound = Math.pow(2, bitLength) - 1;\n\n const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);\n const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);\n\n return function(V, opts) {\n if (!opts) opts = {};\n\n let x = +V;\n\n if (opts.enforceRange) {\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite number\");\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(\"Argument is not in byte range\");\n }\n\n return x;\n }\n\n if (!isNaN(x) && opts.clamp) {\n x = evenRound(x);\n\n if (x < lowerBound) x = lowerBound;\n if (x > upperBound) x = upperBound;\n return x;\n }\n\n if (!Number.isFinite(x) || x === 0) {\n return 0;\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n x = x % moduloVal;\n\n if (!typeOpts.unsigned && x >= moduloBound) {\n return x - moduloVal;\n } else if (typeOpts.unsigned) {\n if (x < 0) {\n x += moduloVal;\n } else if (x === -0) { // don't return negative zero\n return 0;\n }\n }\n\n return x;\n }\n}\n\nconversions[\"void\"] = function () {\n return undefined;\n};\n\nconversions[\"boolean\"] = function (val) {\n return !!val;\n};\n\nconversions[\"byte\"] = createNumberConversion(8, { unsigned: false });\nconversions[\"octet\"] = createNumberConversion(8, { unsigned: true });\n\nconversions[\"short\"] = createNumberConversion(16, { unsigned: false });\nconversions[\"unsigned short\"] = createNumberConversion(16, { unsigned: true });\n\nconversions[\"long\"] = createNumberConversion(32, { unsigned: false });\nconversions[\"unsigned long\"] = createNumberConversion(32, { unsigned: true });\n\nconversions[\"long long\"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });\nconversions[\"unsigned long long\"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });\n\nconversions[\"double\"] = function (V) {\n const x = +V;\n\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite floating-point value\");\n }\n\n return x;\n};\n\nconversions[\"unrestricted double\"] = function (V) {\n const x = +V;\n\n if (isNaN(x)) {\n throw new TypeError(\"Argument is NaN\");\n }\n\n return x;\n};\n\n// not quite valid, but good enough for JS\nconversions[\"float\"] = conversions[\"double\"];\nconversions[\"unrestricted float\"] = conversions[\"unrestricted double\"];\n\nconversions[\"DOMString\"] = function (V, opts) {\n if (!opts) opts = {};\n\n if (opts.treatNullAsEmptyString && V === null) {\n return \"\";\n }\n\n return String(V);\n};\n\nconversions[\"ByteString\"] = function (V, opts) {\n const x = String(V);\n let c = undefined;\n for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {\n if (c > 255) {\n throw new TypeError(\"Argument is not a valid bytestring\");\n }\n }\n\n return x;\n};\n\nconversions[\"USVString\"] = function (V) {\n const S = String(V);\n const n = S.length;\n const U = [];\n for (let i = 0; i < n; ++i) {\n const c = S.charCodeAt(i);\n if (c < 0xD800 || c > 0xDFFF) {\n U.push(String.fromCodePoint(c));\n } else if (0xDC00 <= c && c <= 0xDFFF) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n if (i === n - 1) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n const d = S.charCodeAt(i + 1);\n if (0xDC00 <= d && d <= 0xDFFF) {\n const a = c & 0x3FF;\n const b = d & 0x3FF;\n U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));\n ++i;\n } else {\n U.push(String.fromCodePoint(0xFFFD));\n }\n }\n }\n }\n\n return U.join('');\n};\n\nconversions[\"Date\"] = function (V, opts) {\n if (!(V instanceof Date)) {\n throw new TypeError(\"Argument is not a Date object\");\n }\n if (isNaN(V)) {\n return undefined;\n }\n\n return V;\n};\n\nconversions[\"RegExp\"] = function (V, opts) {\n if (!(V instanceof RegExp)) {\n V = new RegExp(V);\n }\n\n return V;\n};\n","\"use strict\";\nconst usm = require(\"./url-state-machine\");\n\nexports.implementation = class URLImpl {\n constructor(constructorArgs) {\n const url = constructorArgs[0];\n const base = constructorArgs[1];\n\n let parsedBase = null;\n if (base !== undefined) {\n parsedBase = usm.basicURLParse(base);\n if (parsedBase === \"failure\") {\n throw new TypeError(\"Invalid base URL\");\n }\n }\n\n const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n\n // TODO: query stuff\n }\n\n get href() {\n return usm.serializeURL(this._url);\n }\n\n set href(v) {\n const parsedURL = usm.basicURLParse(v);\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n }\n\n get origin() {\n return usm.serializeURLOrigin(this._url);\n }\n\n get protocol() {\n return this._url.scheme + \":\";\n }\n\n set protocol(v) {\n usm.basicURLParse(v + \":\", { url: this._url, stateOverride: \"scheme start\" });\n }\n\n get username() {\n return this._url.username;\n }\n\n set username(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setTheUsername(this._url, v);\n }\n\n get password() {\n return this._url.password;\n }\n\n set password(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setThePassword(this._url, v);\n }\n\n get host() {\n const url = this._url;\n\n if (url.host === null) {\n return \"\";\n }\n\n if (url.port === null) {\n return usm.serializeHost(url.host);\n }\n\n return usm.serializeHost(url.host) + \":\" + usm.serializeInteger(url.port);\n }\n\n set host(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"host\" });\n }\n\n get hostname() {\n if (this._url.host === null) {\n return \"\";\n }\n\n return usm.serializeHost(this._url.host);\n }\n\n set hostname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"hostname\" });\n }\n\n get port() {\n if (this._url.port === null) {\n return \"\";\n }\n\n return usm.serializeInteger(this._url.port);\n }\n\n set port(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n if (v === \"\") {\n this._url.port = null;\n } else {\n usm.basicURLParse(v, { url: this._url, stateOverride: \"port\" });\n }\n }\n\n get pathname() {\n if (this._url.cannotBeABaseURL) {\n return this._url.path[0];\n }\n\n if (this._url.path.length === 0) {\n return \"\";\n }\n\n return \"/\" + this._url.path.join(\"/\");\n }\n\n set pathname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n this._url.path = [];\n usm.basicURLParse(v, { url: this._url, stateOverride: \"path start\" });\n }\n\n get search() {\n if (this._url.query === null || this._url.query === \"\") {\n return \"\";\n }\n\n return \"?\" + this._url.query;\n }\n\n set search(v) {\n // TODO: query stuff\n\n const url = this._url;\n\n if (v === \"\") {\n url.query = null;\n return;\n }\n\n const input = v[0] === \"?\" ? v.substring(1) : v;\n url.query = \"\";\n usm.basicURLParse(input, { url, stateOverride: \"query\" });\n }\n\n get hash() {\n if (this._url.fragment === null || this._url.fragment === \"\") {\n return \"\";\n }\n\n return \"#\" + this._url.fragment;\n }\n\n set hash(v) {\n if (v === \"\") {\n this._url.fragment = null;\n return;\n }\n\n const input = v[0] === \"#\" ? v.substring(1) : v;\n this._url.fragment = \"\";\n usm.basicURLParse(input, { url: this._url, stateOverride: \"fragment\" });\n }\n\n toJSON() {\n return this.href;\n }\n};\n","\"use strict\";\n\nconst conversions = require(\"webidl-conversions\");\nconst utils = require(\"./utils.js\");\nconst Impl = require(\".//URL-impl.js\");\n\nconst impl = utils.implSymbol;\n\nfunction URL(url) {\n if (!this || this[impl] || !(this instanceof URL)) {\n throw new TypeError(\"Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.\");\n }\n if (arguments.length < 1) {\n throw new TypeError(\"Failed to construct 'URL': 1 argument required, but only \" + arguments.length + \" present.\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 2; ++i) {\n args[i] = arguments[i];\n }\n args[0] = conversions[\"USVString\"](args[0]);\n if (args[1] !== undefined) {\n args[1] = conversions[\"USVString\"](args[1]);\n }\n\n module.exports.setup(this, args);\n}\n\nURL.prototype.toJSON = function toJSON() {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 0; ++i) {\n args[i] = arguments[i];\n }\n return this[impl].toJSON.apply(this[impl], args);\n};\nObject.defineProperty(URL.prototype, \"href\", {\n get() {\n return this[impl].href;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].href = V;\n },\n enumerable: true,\n configurable: true\n});\n\nURL.prototype.toString = function () {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n return this.href;\n};\n\nObject.defineProperty(URL.prototype, \"origin\", {\n get() {\n return this[impl].origin;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"protocol\", {\n get() {\n return this[impl].protocol;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].protocol = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"username\", {\n get() {\n return this[impl].username;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].username = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"password\", {\n get() {\n return this[impl].password;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].password = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"host\", {\n get() {\n return this[impl].host;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].host = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hostname\", {\n get() {\n return this[impl].hostname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hostname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"port\", {\n get() {\n return this[impl].port;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].port = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"pathname\", {\n get() {\n return this[impl].pathname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].pathname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"search\", {\n get() {\n return this[impl].search;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].search = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hash\", {\n get() {\n return this[impl].hash;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hash = V;\n },\n enumerable: true,\n configurable: true\n});\n\n\nmodule.exports = {\n is(obj) {\n return !!obj && obj[impl] instanceof Impl.implementation;\n },\n create(constructorArgs, privateData) {\n let obj = Object.create(URL.prototype);\n this.setup(obj, constructorArgs, privateData);\n return obj;\n },\n setup(obj, constructorArgs, privateData) {\n if (!privateData) privateData = {};\n privateData.wrapper = obj;\n\n obj[impl] = new Impl.implementation(constructorArgs, privateData);\n obj[impl][utils.wrapperSymbol] = obj;\n },\n interface: URL,\n expose: {\n Window: { URL: URL },\n Worker: { URL: URL }\n }\n};\n\n","\"use strict\";\n\nexports.URL = require(\"./URL\").interface;\nexports.serializeURL = require(\"./url-state-machine\").serializeURL;\nexports.serializeURLOrigin = require(\"./url-state-machine\").serializeURLOrigin;\nexports.basicURLParse = require(\"./url-state-machine\").basicURLParse;\nexports.setTheUsername = require(\"./url-state-machine\").setTheUsername;\nexports.setThePassword = require(\"./url-state-machine\").setThePassword;\nexports.serializeHost = require(\"./url-state-machine\").serializeHost;\nexports.serializeInteger = require(\"./url-state-machine\").serializeInteger;\nexports.parseURL = require(\"./url-state-machine\").parseURL;\n","\"use strict\";\r\nconst punycode = require(\"punycode\");\r\nconst tr46 = require(\"tr46\");\r\n\r\nconst specialSchemes = {\r\n ftp: 21,\r\n file: null,\r\n gopher: 70,\r\n http: 80,\r\n https: 443,\r\n ws: 80,\r\n wss: 443\r\n};\r\n\r\nconst failure = Symbol(\"failure\");\r\n\r\nfunction countSymbols(str) {\r\n return punycode.ucs2.decode(str).length;\r\n}\r\n\r\nfunction at(input, idx) {\r\n const c = input[idx];\r\n return isNaN(c) ? undefined : String.fromCodePoint(c);\r\n}\r\n\r\nfunction isASCIIDigit(c) {\r\n return c >= 0x30 && c <= 0x39;\r\n}\r\n\r\nfunction isASCIIAlpha(c) {\r\n return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);\r\n}\r\n\r\nfunction isASCIIAlphanumeric(c) {\r\n return isASCIIAlpha(c) || isASCIIDigit(c);\r\n}\r\n\r\nfunction isASCIIHex(c) {\r\n return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);\r\n}\r\n\r\nfunction isSingleDot(buffer) {\r\n return buffer === \".\" || buffer.toLowerCase() === \"%2e\";\r\n}\r\n\r\nfunction isDoubleDot(buffer) {\r\n buffer = buffer.toLowerCase();\r\n return buffer === \"..\" || buffer === \"%2e.\" || buffer === \".%2e\" || buffer === \"%2e%2e\";\r\n}\r\n\r\nfunction isWindowsDriveLetterCodePoints(cp1, cp2) {\r\n return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);\r\n}\r\n\r\nfunction isWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === \":\" || string[1] === \"|\");\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === \":\";\r\n}\r\n\r\nfunction containsForbiddenHostCodePoint(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|%|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction containsForbiddenHostCodePointExcludingPercent(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction isSpecialScheme(scheme) {\r\n return specialSchemes[scheme] !== undefined;\r\n}\r\n\r\nfunction isSpecial(url) {\r\n return isSpecialScheme(url.scheme);\r\n}\r\n\r\nfunction defaultPort(scheme) {\r\n return specialSchemes[scheme];\r\n}\r\n\r\nfunction percentEncode(c) {\r\n let hex = c.toString(16).toUpperCase();\r\n if (hex.length === 1) {\r\n hex = \"0\" + hex;\r\n }\r\n\r\n return \"%\" + hex;\r\n}\r\n\r\nfunction utf8PercentEncode(c) {\r\n const buf = new Buffer(c);\r\n\r\n let str = \"\";\r\n\r\n for (let i = 0; i < buf.length; ++i) {\r\n str += percentEncode(buf[i]);\r\n }\r\n\r\n return str;\r\n}\r\n\r\nfunction utf8PercentDecode(str) {\r\n const input = new Buffer(str);\r\n const output = [];\r\n for (let i = 0; i < input.length; ++i) {\r\n if (input[i] !== 37) {\r\n output.push(input[i]);\r\n } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {\r\n output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));\r\n i += 2;\r\n } else {\r\n output.push(input[i]);\r\n }\r\n }\r\n return new Buffer(output).toString();\r\n}\r\n\r\nfunction isC0ControlPercentEncode(c) {\r\n return c <= 0x1F || c > 0x7E;\r\n}\r\n\r\nconst extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);\r\nfunction isPathPercentEncode(c) {\r\n return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);\r\n}\r\n\r\nconst extraUserinfoPercentEncodeSet =\r\n new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);\r\nfunction isUserinfoPercentEncode(c) {\r\n return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);\r\n}\r\n\r\nfunction percentEncodeChar(c, encodeSetPredicate) {\r\n const cStr = String.fromCodePoint(c);\r\n\r\n if (encodeSetPredicate(c)) {\r\n return utf8PercentEncode(cStr);\r\n }\r\n\r\n return cStr;\r\n}\r\n\r\nfunction parseIPv4Number(input) {\r\n let R = 10;\r\n\r\n if (input.length >= 2 && input.charAt(0) === \"0\" && input.charAt(1).toLowerCase() === \"x\") {\r\n input = input.substring(2);\r\n R = 16;\r\n } else if (input.length >= 2 && input.charAt(0) === \"0\") {\r\n input = input.substring(1);\r\n R = 8;\r\n }\r\n\r\n if (input === \"\") {\r\n return 0;\r\n }\r\n\r\n const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);\r\n if (regex.test(input)) {\r\n return failure;\r\n }\r\n\r\n return parseInt(input, R);\r\n}\r\n\r\nfunction parseIPv4(input) {\r\n const parts = input.split(\".\");\r\n if (parts[parts.length - 1] === \"\") {\r\n if (parts.length > 1) {\r\n parts.pop();\r\n }\r\n }\r\n\r\n if (parts.length > 4) {\r\n return input;\r\n }\r\n\r\n const numbers = [];\r\n for (const part of parts) {\r\n if (part === \"\") {\r\n return input;\r\n }\r\n const n = parseIPv4Number(part);\r\n if (n === failure) {\r\n return input;\r\n }\r\n\r\n numbers.push(n);\r\n }\r\n\r\n for (let i = 0; i < numbers.length - 1; ++i) {\r\n if (numbers[i] > 255) {\r\n return failure;\r\n }\r\n }\r\n if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {\r\n return failure;\r\n }\r\n\r\n let ipv4 = numbers.pop();\r\n let counter = 0;\r\n\r\n for (const n of numbers) {\r\n ipv4 += n * Math.pow(256, 3 - counter);\r\n ++counter;\r\n }\r\n\r\n return ipv4;\r\n}\r\n\r\nfunction serializeIPv4(address) {\r\n let output = \"\";\r\n let n = address;\r\n\r\n for (let i = 1; i <= 4; ++i) {\r\n output = String(n % 256) + output;\r\n if (i !== 4) {\r\n output = \".\" + output;\r\n }\r\n n = Math.floor(n / 256);\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseIPv6(input) {\r\n const address = [0, 0, 0, 0, 0, 0, 0, 0];\r\n let pieceIndex = 0;\r\n let compress = null;\r\n let pointer = 0;\r\n\r\n input = punycode.ucs2.decode(input);\r\n\r\n if (input[pointer] === 58) {\r\n if (input[pointer + 1] !== 58) {\r\n return failure;\r\n }\r\n\r\n pointer += 2;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n }\r\n\r\n while (pointer < input.length) {\r\n if (pieceIndex === 8) {\r\n return failure;\r\n }\r\n\r\n if (input[pointer] === 58) {\r\n if (compress !== null) {\r\n return failure;\r\n }\r\n ++pointer;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n continue;\r\n }\r\n\r\n let value = 0;\r\n let length = 0;\r\n\r\n while (length < 4 && isASCIIHex(input[pointer])) {\r\n value = value * 0x10 + parseInt(at(input, pointer), 16);\r\n ++pointer;\r\n ++length;\r\n }\r\n\r\n if (input[pointer] === 46) {\r\n if (length === 0) {\r\n return failure;\r\n }\r\n\r\n pointer -= length;\r\n\r\n if (pieceIndex > 6) {\r\n return failure;\r\n }\r\n\r\n let numbersSeen = 0;\r\n\r\n while (input[pointer] !== undefined) {\r\n let ipv4Piece = null;\r\n\r\n if (numbersSeen > 0) {\r\n if (input[pointer] === 46 && numbersSeen < 4) {\r\n ++pointer;\r\n } else {\r\n return failure;\r\n }\r\n }\r\n\r\n if (!isASCIIDigit(input[pointer])) {\r\n return failure;\r\n }\r\n\r\n while (isASCIIDigit(input[pointer])) {\r\n const number = parseInt(at(input, pointer));\r\n if (ipv4Piece === null) {\r\n ipv4Piece = number;\r\n } else if (ipv4Piece === 0) {\r\n return failure;\r\n } else {\r\n ipv4Piece = ipv4Piece * 10 + number;\r\n }\r\n if (ipv4Piece > 255) {\r\n return failure;\r\n }\r\n ++pointer;\r\n }\r\n\r\n address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;\r\n\r\n ++numbersSeen;\r\n\r\n if (numbersSeen === 2 || numbersSeen === 4) {\r\n ++pieceIndex;\r\n }\r\n }\r\n\r\n if (numbersSeen !== 4) {\r\n return failure;\r\n }\r\n\r\n break;\r\n } else if (input[pointer] === 58) {\r\n ++pointer;\r\n if (input[pointer] === undefined) {\r\n return failure;\r\n }\r\n } else if (input[pointer] !== undefined) {\r\n return failure;\r\n }\r\n\r\n address[pieceIndex] = value;\r\n ++pieceIndex;\r\n }\r\n\r\n if (compress !== null) {\r\n let swaps = pieceIndex - compress;\r\n pieceIndex = 7;\r\n while (pieceIndex !== 0 && swaps > 0) {\r\n const temp = address[compress + swaps - 1];\r\n address[compress + swaps - 1] = address[pieceIndex];\r\n address[pieceIndex] = temp;\r\n --pieceIndex;\r\n --swaps;\r\n }\r\n } else if (compress === null && pieceIndex !== 8) {\r\n return failure;\r\n }\r\n\r\n return address;\r\n}\r\n\r\nfunction serializeIPv6(address) {\r\n let output = \"\";\r\n const seqResult = findLongestZeroSequence(address);\r\n const compress = seqResult.idx;\r\n let ignore0 = false;\r\n\r\n for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {\r\n if (ignore0 && address[pieceIndex] === 0) {\r\n continue;\r\n } else if (ignore0) {\r\n ignore0 = false;\r\n }\r\n\r\n if (compress === pieceIndex) {\r\n const separator = pieceIndex === 0 ? \"::\" : \":\";\r\n output += separator;\r\n ignore0 = true;\r\n continue;\r\n }\r\n\r\n output += address[pieceIndex].toString(16);\r\n\r\n if (pieceIndex !== 7) {\r\n output += \":\";\r\n }\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseHost(input, isSpecialArg) {\r\n if (input[0] === \"[\") {\r\n if (input[input.length - 1] !== \"]\") {\r\n return failure;\r\n }\r\n\r\n return parseIPv6(input.substring(1, input.length - 1));\r\n }\r\n\r\n if (!isSpecialArg) {\r\n return parseOpaqueHost(input);\r\n }\r\n\r\n const domain = utf8PercentDecode(input);\r\n const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);\r\n if (asciiDomain === null) {\r\n return failure;\r\n }\r\n\r\n if (containsForbiddenHostCodePoint(asciiDomain)) {\r\n return failure;\r\n }\r\n\r\n const ipv4Host = parseIPv4(asciiDomain);\r\n if (typeof ipv4Host === \"number\" || ipv4Host === failure) {\r\n return ipv4Host;\r\n }\r\n\r\n return asciiDomain;\r\n}\r\n\r\nfunction parseOpaqueHost(input) {\r\n if (containsForbiddenHostCodePointExcludingPercent(input)) {\r\n return failure;\r\n }\r\n\r\n let output = \"\";\r\n const decoded = punycode.ucs2.decode(input);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);\r\n }\r\n return output;\r\n}\r\n\r\nfunction findLongestZeroSequence(arr) {\r\n let maxIdx = null;\r\n let maxLen = 1; // only find elements > 1\r\n let currStart = null;\r\n let currLen = 0;\r\n\r\n for (let i = 0; i < arr.length; ++i) {\r\n if (arr[i] !== 0) {\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n currStart = null;\r\n currLen = 0;\r\n } else {\r\n if (currStart === null) {\r\n currStart = i;\r\n }\r\n ++currLen;\r\n }\r\n }\r\n\r\n // if trailing zeros\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n return {\r\n idx: maxIdx,\r\n len: maxLen\r\n };\r\n}\r\n\r\nfunction serializeHost(host) {\r\n if (typeof host === \"number\") {\r\n return serializeIPv4(host);\r\n }\r\n\r\n // IPv6 serializer\r\n if (host instanceof Array) {\r\n return \"[\" + serializeIPv6(host) + \"]\";\r\n }\r\n\r\n return host;\r\n}\r\n\r\nfunction trimControlChars(url) {\r\n return url.replace(/^[\\u0000-\\u001F\\u0020]+|[\\u0000-\\u001F\\u0020]+$/g, \"\");\r\n}\r\n\r\nfunction trimTabAndNewline(url) {\r\n return url.replace(/\\u0009|\\u000A|\\u000D/g, \"\");\r\n}\r\n\r\nfunction shortenPath(url) {\r\n const path = url.path;\r\n if (path.length === 0) {\r\n return;\r\n }\r\n if (url.scheme === \"file\" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {\r\n return;\r\n }\r\n\r\n path.pop();\r\n}\r\n\r\nfunction includesCredentials(url) {\r\n return url.username !== \"\" || url.password !== \"\";\r\n}\r\n\r\nfunction cannotHaveAUsernamePasswordPort(url) {\r\n return url.host === null || url.host === \"\" || url.cannotBeABaseURL || url.scheme === \"file\";\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetter(string) {\r\n return /^[A-Za-z]:$/.test(string);\r\n}\r\n\r\nfunction URLStateMachine(input, base, encodingOverride, url, stateOverride) {\r\n this.pointer = 0;\r\n this.input = input;\r\n this.base = base || null;\r\n this.encodingOverride = encodingOverride || \"utf-8\";\r\n this.stateOverride = stateOverride;\r\n this.url = url;\r\n this.failure = false;\r\n this.parseError = false;\r\n\r\n if (!this.url) {\r\n this.url = {\r\n scheme: \"\",\r\n username: \"\",\r\n password: \"\",\r\n host: null,\r\n port: null,\r\n path: [],\r\n query: null,\r\n fragment: null,\r\n\r\n cannotBeABaseURL: false\r\n };\r\n\r\n const res = trimControlChars(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n }\r\n\r\n const res = trimTabAndNewline(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n\r\n this.state = stateOverride || \"scheme start\";\r\n\r\n this.buffer = \"\";\r\n this.atFlag = false;\r\n this.arrFlag = false;\r\n this.passwordTokenSeenFlag = false;\r\n\r\n this.input = punycode.ucs2.decode(this.input);\r\n\r\n for (; this.pointer <= this.input.length; ++this.pointer) {\r\n const c = this.input[this.pointer];\r\n const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);\r\n\r\n // exec state machine\r\n const ret = this[\"parse \" + this.state](c, cStr);\r\n if (!ret) {\r\n break; // terminate algorithm\r\n } else if (ret === failure) {\r\n this.failure = true;\r\n break;\r\n }\r\n }\r\n}\r\n\r\nURLStateMachine.prototype[\"parse scheme start\"] = function parseSchemeStart(c, cStr) {\r\n if (isASCIIAlpha(c)) {\r\n this.buffer += cStr.toLowerCase();\r\n this.state = \"scheme\";\r\n } else if (!this.stateOverride) {\r\n this.state = \"no scheme\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse scheme\"] = function parseScheme(c, cStr) {\r\n if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {\r\n this.buffer += cStr.toLowerCase();\r\n } else if (c === 58) {\r\n if (this.stateOverride) {\r\n if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === \"file\") {\r\n return false;\r\n }\r\n\r\n if (this.url.scheme === \"file\" && (this.url.host === \"\" || this.url.host === null)) {\r\n return false;\r\n }\r\n }\r\n this.url.scheme = this.buffer;\r\n this.buffer = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n if (this.url.scheme === \"file\") {\r\n if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file\";\r\n } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {\r\n this.state = \"special relative or authority\";\r\n } else if (isSpecial(this.url)) {\r\n this.state = \"special authority slashes\";\r\n } else if (this.input[this.pointer + 1] === 47) {\r\n this.state = \"path or authority\";\r\n ++this.pointer;\r\n } else {\r\n this.url.cannotBeABaseURL = true;\r\n this.url.path.push(\"\");\r\n this.state = \"cannot-be-a-base-URL path\";\r\n }\r\n } else if (!this.stateOverride) {\r\n this.buffer = \"\";\r\n this.state = \"no scheme\";\r\n this.pointer = -1;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse no scheme\"] = function parseNoScheme(c) {\r\n if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {\r\n return failure;\r\n } else if (this.base.cannotBeABaseURL && c === 35) {\r\n this.url.scheme = this.base.scheme;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.url.cannotBeABaseURL = true;\r\n this.state = \"fragment\";\r\n } else if (this.base.scheme === \"file\") {\r\n this.state = \"file\";\r\n --this.pointer;\r\n } else {\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special relative or authority\"] = function parseSpecialRelativeOrAuthority(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path or authority\"] = function parsePathOrAuthority(c) {\r\n if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative\"] = function parseRelative(c) {\r\n this.url.scheme = this.base.scheme;\r\n if (isNaN(c)) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 47) {\r\n this.state = \"relative slash\";\r\n } else if (c === 63) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n this.state = \"relative slash\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice(0, this.base.path.length - 1);\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative slash\"] = function parseRelativeSlash(c) {\r\n if (isSpecial(this.url) && (c === 47 || c === 92)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"special authority ignore slashes\";\r\n } else if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority slashes\"] = function parseSpecialAuthoritySlashes(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"special authority ignore slashes\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority ignore slashes\"] = function parseSpecialAuthorityIgnoreSlashes(c) {\r\n if (c !== 47 && c !== 92) {\r\n this.state = \"authority\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse authority\"] = function parseAuthority(c, cStr) {\r\n if (c === 64) {\r\n this.parseError = true;\r\n if (this.atFlag) {\r\n this.buffer = \"%40\" + this.buffer;\r\n }\r\n this.atFlag = true;\r\n\r\n // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars\r\n const len = countSymbols(this.buffer);\r\n for (let pointer = 0; pointer < len; ++pointer) {\r\n const codePoint = this.buffer.codePointAt(pointer);\r\n\r\n if (codePoint === 58 && !this.passwordTokenSeenFlag) {\r\n this.passwordTokenSeenFlag = true;\r\n continue;\r\n }\r\n const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);\r\n if (this.passwordTokenSeenFlag) {\r\n this.url.password += encodedCodePoints;\r\n } else {\r\n this.url.username += encodedCodePoints;\r\n }\r\n }\r\n this.buffer = \"\";\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n if (this.atFlag && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.pointer -= countSymbols(this.buffer) + 1;\r\n this.buffer = \"\";\r\n this.state = \"host\";\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse hostname\"] =\r\nURLStateMachine.prototype[\"parse host\"] = function parseHostName(c, cStr) {\r\n if (this.stateOverride && this.url.scheme === \"file\") {\r\n --this.pointer;\r\n this.state = \"file host\";\r\n } else if (c === 58 && !this.arrFlag) {\r\n if (this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"port\";\r\n if (this.stateOverride === \"hostname\") {\r\n return false;\r\n }\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n --this.pointer;\r\n if (isSpecial(this.url) && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n } else if (this.stateOverride && this.buffer === \"\" &&\r\n (includesCredentials(this.url) || this.url.port !== null)) {\r\n this.parseError = true;\r\n return false;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n } else {\r\n if (c === 91) {\r\n this.arrFlag = true;\r\n } else if (c === 93) {\r\n this.arrFlag = false;\r\n }\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse port\"] = function parsePort(c, cStr) {\r\n if (isASCIIDigit(c)) {\r\n this.buffer += cStr;\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92) ||\r\n this.stateOverride) {\r\n if (this.buffer !== \"\") {\r\n const port = parseInt(this.buffer);\r\n if (port > Math.pow(2, 16) - 1) {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.url.port = port === defaultPort(this.url.scheme) ? null : port;\r\n this.buffer = \"\";\r\n }\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nconst fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);\r\n\r\nURLStateMachine.prototype[\"parse file\"] = function parseFile(c) {\r\n this.url.scheme = \"file\";\r\n\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file slash\";\r\n } else if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNaN(c)) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 63) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points\r\n !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||\r\n (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points\r\n !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n shortenPath(this.url);\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file slash\"] = function parseFileSlash(c) {\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file host\";\r\n } else {\r\n if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {\r\n this.url.path.push(this.base.path[0]);\r\n } else {\r\n this.url.host = this.base.host;\r\n }\r\n }\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file host\"] = function parseFileHost(c, cStr) {\r\n if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {\r\n --this.pointer;\r\n if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {\r\n this.parseError = true;\r\n this.state = \"path\";\r\n } else if (this.buffer === \"\") {\r\n this.url.host = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n } else {\r\n let host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n if (host === \"localhost\") {\r\n host = \"\";\r\n }\r\n this.url.host = host;\r\n\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n }\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path start\"] = function parsePathStart(c) {\r\n if (isSpecial(this.url)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"path\";\r\n\r\n if (c !== 47 && c !== 92) {\r\n --this.pointer;\r\n }\r\n } else if (!this.stateOverride && c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (!this.stateOverride && c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (c !== undefined) {\r\n this.state = \"path\";\r\n if (c !== 47) {\r\n --this.pointer;\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path\"] = function parsePath(c) {\r\n if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||\r\n (!this.stateOverride && (c === 63 || c === 35))) {\r\n if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n }\r\n\r\n if (isDoubleDot(this.buffer)) {\r\n shortenPath(this.url);\r\n if (c !== 47 && !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n }\r\n } else if (isSingleDot(this.buffer) && c !== 47 &&\r\n !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n } else if (!isSingleDot(this.buffer)) {\r\n if (this.url.scheme === \"file\" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {\r\n if (this.url.host !== \"\" && this.url.host !== null) {\r\n this.parseError = true;\r\n this.url.host = \"\";\r\n }\r\n this.buffer = this.buffer[0] + \":\";\r\n }\r\n this.url.path.push(this.buffer);\r\n }\r\n this.buffer = \"\";\r\n if (this.url.scheme === \"file\" && (c === undefined || c === 63 || c === 35)) {\r\n while (this.url.path.length > 1 && this.url.path[0] === \"\") {\r\n this.parseError = true;\r\n this.url.path.shift();\r\n }\r\n }\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n }\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += percentEncodeChar(c, isPathPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse cannot-be-a-base-URL path\"] = function parseCannotBeABaseURLPath(c) {\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n // TODO: Add: not a URL code point\r\n if (!isNaN(c) && c !== 37) {\r\n this.parseError = true;\r\n }\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n if (!isNaN(c)) {\r\n this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse query\"] = function parseQuery(c, cStr) {\r\n if (isNaN(c) || (!this.stateOverride && c === 35)) {\r\n if (!isSpecial(this.url) || this.url.scheme === \"ws\" || this.url.scheme === \"wss\") {\r\n this.encodingOverride = \"utf-8\";\r\n }\r\n\r\n const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead\r\n for (let i = 0; i < buffer.length; ++i) {\r\n if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||\r\n buffer[i] === 0x3C || buffer[i] === 0x3E) {\r\n this.url.query += percentEncode(buffer[i]);\r\n } else {\r\n this.url.query += String.fromCodePoint(buffer[i]);\r\n }\r\n }\r\n\r\n this.buffer = \"\";\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse fragment\"] = function parseFragment(c) {\r\n if (isNaN(c)) { // do nothing\r\n } else if (c === 0x0) {\r\n this.parseError = true;\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nfunction serializeURL(url, excludeFragment) {\r\n let output = url.scheme + \":\";\r\n if (url.host !== null) {\r\n output += \"//\";\r\n\r\n if (url.username !== \"\" || url.password !== \"\") {\r\n output += url.username;\r\n if (url.password !== \"\") {\r\n output += \":\" + url.password;\r\n }\r\n output += \"@\";\r\n }\r\n\r\n output += serializeHost(url.host);\r\n\r\n if (url.port !== null) {\r\n output += \":\" + url.port;\r\n }\r\n } else if (url.host === null && url.scheme === \"file\") {\r\n output += \"//\";\r\n }\r\n\r\n if (url.cannotBeABaseURL) {\r\n output += url.path[0];\r\n } else {\r\n for (const string of url.path) {\r\n output += \"/\" + string;\r\n }\r\n }\r\n\r\n if (url.query !== null) {\r\n output += \"?\" + url.query;\r\n }\r\n\r\n if (!excludeFragment && url.fragment !== null) {\r\n output += \"#\" + url.fragment;\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction serializeOrigin(tuple) {\r\n let result = tuple.scheme + \"://\";\r\n result += serializeHost(tuple.host);\r\n\r\n if (tuple.port !== null) {\r\n result += \":\" + tuple.port;\r\n }\r\n\r\n return result;\r\n}\r\n\r\nmodule.exports.serializeURL = serializeURL;\r\n\r\nmodule.exports.serializeURLOrigin = function (url) {\r\n // https://url.spec.whatwg.org/#concept-url-origin\r\n switch (url.scheme) {\r\n case \"blob\":\r\n try {\r\n return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));\r\n } catch (e) {\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n case \"ftp\":\r\n case \"gopher\":\r\n case \"http\":\r\n case \"https\":\r\n case \"ws\":\r\n case \"wss\":\r\n return serializeOrigin({\r\n scheme: url.scheme,\r\n host: url.host,\r\n port: url.port\r\n });\r\n case \"file\":\r\n // spec says \"exercise to the reader\", chrome says \"file://\"\r\n return \"file://\";\r\n default:\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n};\r\n\r\nmodule.exports.basicURLParse = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);\r\n if (usm.failure) {\r\n return \"failure\";\r\n }\r\n\r\n return usm.url;\r\n};\r\n\r\nmodule.exports.setTheUsername = function (url, username) {\r\n url.username = \"\";\r\n const decoded = punycode.ucs2.decode(username);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.setThePassword = function (url, password) {\r\n url.password = \"\";\r\n const decoded = punycode.ucs2.decode(password);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.serializeHost = serializeHost;\r\n\r\nmodule.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;\r\n\r\nmodule.exports.serializeInteger = function (integer) {\r\n return String(integer);\r\n};\r\n\r\nmodule.exports.parseURL = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n // We don't handle blobs, so this just delegates:\r\n return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });\r\n};\r\n","\"use strict\";\n\nmodule.exports.mixin = function mixin(target, source) {\n const keys = Object.getOwnPropertyNames(source);\n for (let i = 0; i < keys.length; ++i) {\n Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));\n }\n};\n\nmodule.exports.wrapperSymbol = Symbol(\"wrapper\");\nmodule.exports.implSymbol = Symbol(\"impl\");\n\nmodule.exports.wrapperForImpl = function (impl) {\n return impl[module.exports.wrapperSymbol];\n};\n\nmodule.exports.implForWrapper = function (wrapper) {\n return wrapper[module.exports.implSymbol];\n};\n\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","class Node {\n\t/// value;\n\t/// next;\n\n\tconstructor(value) {\n\t\tthis.value = value;\n\n\t\t// TODO: Remove this when targeting Node.js 12.\n\t\tthis.next = undefined;\n\t}\n}\n\nclass Queue {\n\t// TODO: Use private class fields when targeting Node.js 12.\n\t// #_head;\n\t// #_tail;\n\t// #_size;\n\n\tconstructor() {\n\t\tthis.clear();\n\t}\n\n\tenqueue(value) {\n\t\tconst node = new Node(value);\n\n\t\tif (this._head) {\n\t\t\tthis._tail.next = node;\n\t\t\tthis._tail = node;\n\t\t} else {\n\t\t\tthis._head = node;\n\t\t\tthis._tail = node;\n\t\t}\n\n\t\tthis._size++;\n\t}\n\n\tdequeue() {\n\t\tconst current = this._head;\n\t\tif (!current) {\n\t\t\treturn;\n\t\t}\n\n\t\tthis._head = this._head.next;\n\t\tthis._size--;\n\t\treturn current.value;\n\t}\n\n\tclear() {\n\t\tthis._head = undefined;\n\t\tthis._tail = undefined;\n\t\tthis._size = 0;\n\t}\n\n\tget size() {\n\t\treturn this._size;\n\t}\n\n\t* [Symbol.iterator]() {\n\t\tlet current = this._head;\n\n\t\twhile (current) {\n\t\t\tyield current.value;\n\t\t\tcurrent = current.next;\n\t\t}\n\t}\n}\n\nmodule.exports = Queue;\n",null,"module.exports = require(\"assert\");","module.exports = require(\"async_hooks\");","module.exports = require(\"buffer\");","module.exports = require(\"child_process\");","module.exports = require(\"console\");","module.exports = require(\"crypto\");","module.exports = require(\"diagnostics_channel\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"http2\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"node:events\");","module.exports = require(\"node:stream\");","module.exports = require(\"node:util\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"perf_hooks\");","module.exports = require(\"punycode\");","module.exports = require(\"querystring\");","module.exports = require(\"stream\");","module.exports = require(\"stream/web\");","module.exports = require(\"string_decoder\");","module.exports = require(\"tls\");","module.exports = require(\"tty\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"util/types\");","module.exports = require(\"worker_threads\");","module.exports = require(\"zlib\");","'use strict'\n\nconst WritableStream = require('node:stream').Writable\nconst inherits = require('node:util').inherits\n\nconst StreamSearch = require('../../streamsearch/sbmh')\n\nconst PartStream = require('./PartStream')\nconst HeaderParser = require('./HeaderParser')\n\nconst DASH = 45\nconst B_ONEDASH = Buffer.from('-')\nconst B_CRLF = Buffer.from('\\r\\n')\nconst EMPTY_FN = function () {}\n\nfunction Dicer (cfg) {\n if (!(this instanceof Dicer)) { return new Dicer(cfg) }\n WritableStream.call(this, cfg)\n\n if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') }\n\n if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined }\n\n this._headerFirst = cfg.headerFirst\n\n this._dashes = 0\n this._parts = 0\n this._finished = false\n this._realFinish = false\n this._isPreamble = true\n this._justMatched = false\n this._firstWrite = true\n this._inHeader = true\n this._part = undefined\n this._cb = undefined\n this._ignoreData = false\n this._partOpts = { highWaterMark: cfg.partHwm }\n this._pause = false\n\n const self = this\n this._hparser = new HeaderParser(cfg)\n this._hparser.on('header', function (header) {\n self._inHeader = false\n self._part.emit('header', header)\n })\n}\ninherits(Dicer, WritableStream)\n\nDicer.prototype.emit = function (ev) {\n if (ev === 'finish' && !this._realFinish) {\n if (!this._finished) {\n const self = this\n process.nextTick(function () {\n self.emit('error', new Error('Unexpected end of multipart data'))\n if (self._part && !self._ignoreData) {\n const type = (self._isPreamble ? 'Preamble' : 'Part')\n self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'))\n self._part.push(null)\n process.nextTick(function () {\n self._realFinish = true\n self.emit('finish')\n self._realFinish = false\n })\n return\n }\n self._realFinish = true\n self.emit('finish')\n self._realFinish = false\n })\n }\n } else { WritableStream.prototype.emit.apply(this, arguments) }\n}\n\nDicer.prototype._write = function (data, encoding, cb) {\n // ignore unexpected data (e.g. extra trailer data after finished)\n if (!this._hparser && !this._bparser) { return cb() }\n\n if (this._headerFirst && this._isPreamble) {\n if (!this._part) {\n this._part = new PartStream(this._partOpts)\n if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() }\n }\n const r = this._hparser.push(data)\n if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }\n }\n\n // allows for \"easier\" testing\n if (this._firstWrite) {\n this._bparser.push(B_CRLF)\n this._firstWrite = false\n }\n\n this._bparser.push(data)\n\n if (this._pause) { this._cb = cb } else { cb() }\n}\n\nDicer.prototype.reset = function () {\n this._part = undefined\n this._bparser = undefined\n this._hparser = undefined\n}\n\nDicer.prototype.setBoundary = function (boundary) {\n const self = this\n this._bparser = new StreamSearch('\\r\\n--' + boundary)\n this._bparser.on('info', function (isMatch, data, start, end) {\n self._oninfo(isMatch, data, start, end)\n })\n}\n\nDicer.prototype._ignore = function () {\n if (this._part && !this._ignoreData) {\n this._ignoreData = true\n this._part.on('error', EMPTY_FN)\n // we must perform some kind of read on the stream even though we are\n // ignoring the data, otherwise node's Readable stream will not emit 'end'\n // after pushing null to the stream\n this._part.resume()\n }\n}\n\nDicer.prototype._oninfo = function (isMatch, data, start, end) {\n let buf; const self = this; let i = 0; let r; let shouldWriteMore = true\n\n if (!this._part && this._justMatched && data) {\n while (this._dashes < 2 && (start + i) < end) {\n if (data[start + i] === DASH) {\n ++i\n ++this._dashes\n } else {\n if (this._dashes) { buf = B_ONEDASH }\n this._dashes = 0\n break\n }\n }\n if (this._dashes === 2) {\n if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) }\n this.reset()\n this._finished = true\n // no more parts will be added\n if (self._parts === 0) {\n self._realFinish = true\n self.emit('finish')\n self._realFinish = false\n }\n }\n if (this._dashes) { return }\n }\n if (this._justMatched) { this._justMatched = false }\n if (!this._part) {\n this._part = new PartStream(this._partOpts)\n this._part._read = function (n) {\n self._unpause()\n }\n if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() }\n if (!this._isPreamble) { this._inHeader = true }\n }\n if (data && start < end && !this._ignoreData) {\n if (this._isPreamble || !this._inHeader) {\n if (buf) { shouldWriteMore = this._part.push(buf) }\n shouldWriteMore = this._part.push(data.slice(start, end))\n if (!shouldWriteMore) { this._pause = true }\n } else if (!this._isPreamble && this._inHeader) {\n if (buf) { this._hparser.push(buf) }\n r = this._hparser.push(data.slice(start, end))\n if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) }\n }\n }\n if (isMatch) {\n this._hparser.reset()\n if (this._isPreamble) { this._isPreamble = false } else {\n if (start !== end) {\n ++this._parts\n this._part.on('end', function () {\n if (--self._parts === 0) {\n if (self._finished) {\n self._realFinish = true\n self.emit('finish')\n self._realFinish = false\n } else {\n self._unpause()\n }\n }\n })\n }\n }\n this._part.push(null)\n this._part = undefined\n this._ignoreData = false\n this._justMatched = true\n this._dashes = 0\n }\n}\n\nDicer.prototype._unpause = function () {\n if (!this._pause) { return }\n\n this._pause = false\n if (this._cb) {\n const cb = this._cb\n this._cb = undefined\n cb()\n }\n}\n\nmodule.exports = Dicer\n","'use strict'\n\nconst EventEmitter = require('node:events').EventEmitter\nconst inherits = require('node:util').inherits\nconst getLimit = require('../../../lib/utils/getLimit')\n\nconst StreamSearch = require('../../streamsearch/sbmh')\n\nconst B_DCRLF = Buffer.from('\\r\\n\\r\\n')\nconst RE_CRLF = /\\r\\n/g\nconst RE_HDR = /^([^:]+):[ \\t]?([\\x00-\\xFF]+)?$/ // eslint-disable-line no-control-regex\n\nfunction HeaderParser (cfg) {\n EventEmitter.call(this)\n\n cfg = cfg || {}\n const self = this\n this.nread = 0\n this.maxed = false\n this.npairs = 0\n this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000)\n this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024)\n this.buffer = ''\n this.header = {}\n this.finished = false\n this.ss = new StreamSearch(B_DCRLF)\n this.ss.on('info', function (isMatch, data, start, end) {\n if (data && !self.maxed) {\n if (self.nread + end - start >= self.maxHeaderSize) {\n end = self.maxHeaderSize - self.nread + start\n self.nread = self.maxHeaderSize\n self.maxed = true\n } else { self.nread += (end - start) }\n\n self.buffer += data.toString('binary', start, end)\n }\n if (isMatch) { self._finish() }\n })\n}\ninherits(HeaderParser, EventEmitter)\n\nHeaderParser.prototype.push = function (data) {\n const r = this.ss.push(data)\n if (this.finished) { return r }\n}\n\nHeaderParser.prototype.reset = function () {\n this.finished = false\n this.buffer = ''\n this.header = {}\n this.ss.reset()\n}\n\nHeaderParser.prototype._finish = function () {\n if (this.buffer) { this._parseHeader() }\n this.ss.matches = this.ss.maxMatches\n const header = this.header\n this.header = {}\n this.buffer = ''\n this.finished = true\n this.nread = this.npairs = 0\n this.maxed = false\n this.emit('header', header)\n}\n\nHeaderParser.prototype._parseHeader = function () {\n if (this.npairs === this.maxHeaderPairs) { return }\n\n const lines = this.buffer.split(RE_CRLF)\n const len = lines.length\n let m, h\n\n for (var i = 0; i < len; ++i) { // eslint-disable-line no-var\n if (lines[i].length === 0) { continue }\n if (lines[i][0] === '\\t' || lines[i][0] === ' ') {\n // folded header content\n // RFC2822 says to just remove the CRLF and not the whitespace following\n // it, so we follow the RFC and include the leading whitespace ...\n if (h) {\n this.header[h][this.header[h].length - 1] += lines[i]\n continue\n }\n }\n\n const posColon = lines[i].indexOf(':')\n if (\n posColon === -1 ||\n posColon === 0\n ) {\n return\n }\n m = RE_HDR.exec(lines[i])\n h = m[1].toLowerCase()\n this.header[h] = this.header[h] || []\n this.header[h].push((m[2] || ''))\n if (++this.npairs === this.maxHeaderPairs) { break }\n }\n}\n\nmodule.exports = HeaderParser\n","'use strict'\n\nconst inherits = require('node:util').inherits\nconst ReadableStream = require('node:stream').Readable\n\nfunction PartStream (opts) {\n ReadableStream.call(this, opts)\n}\ninherits(PartStream, ReadableStream)\n\nPartStream.prototype._read = function (n) {}\n\nmodule.exports = PartStream\n","'use strict'\n\n/**\n * Copyright Brian White. All rights reserved.\n *\n * @see https://github.com/mscdex/streamsearch\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n *\n * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation\n * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool\n */\nconst EventEmitter = require('node:events').EventEmitter\nconst inherits = require('node:util').inherits\n\nfunction SBMH (needle) {\n if (typeof needle === 'string') {\n needle = Buffer.from(needle)\n }\n\n if (!Buffer.isBuffer(needle)) {\n throw new TypeError('The needle has to be a String or a Buffer.')\n }\n\n const needleLength = needle.length\n\n if (needleLength === 0) {\n throw new Error('The needle cannot be an empty String/Buffer.')\n }\n\n if (needleLength > 256) {\n throw new Error('The needle cannot have a length bigger than 256.')\n }\n\n this.maxMatches = Infinity\n this.matches = 0\n\n this._occ = new Array(256)\n .fill(needleLength) // Initialize occurrence table.\n this._lookbehind_size = 0\n this._needle = needle\n this._bufpos = 0\n\n this._lookbehind = Buffer.alloc(needleLength)\n\n // Populate occurrence table with analysis of the needle,\n // ignoring last letter.\n for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var\n this._occ[needle[i]] = needleLength - 1 - i\n }\n}\ninherits(SBMH, EventEmitter)\n\nSBMH.prototype.reset = function () {\n this._lookbehind_size = 0\n this.matches = 0\n this._bufpos = 0\n}\n\nSBMH.prototype.push = function (chunk, pos) {\n if (!Buffer.isBuffer(chunk)) {\n chunk = Buffer.from(chunk, 'binary')\n }\n const chlen = chunk.length\n this._bufpos = pos || 0\n let r\n while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) }\n return r\n}\n\nSBMH.prototype._sbmh_feed = function (data) {\n const len = data.length\n const needle = this._needle\n const needleLength = needle.length\n const lastNeedleChar = needle[needleLength - 1]\n\n // Positive: points to a position in `data`\n // pos == 3 points to data[3]\n // Negative: points to a position in the lookbehind buffer\n // pos == -2 points to lookbehind[lookbehind_size - 2]\n let pos = -this._lookbehind_size\n let ch\n\n if (pos < 0) {\n // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool\n // search with character lookup code that considers both the\n // lookbehind buffer and the current round's haystack data.\n //\n // Loop until\n // there is a match.\n // or until\n // we've moved past the position that requires the\n // lookbehind buffer. In this case we switch to the\n // optimized loop.\n // or until\n // the character to look at lies outside the haystack.\n while (pos < 0 && pos <= len - needleLength) {\n ch = this._sbmh_lookup_char(data, pos + needleLength - 1)\n\n if (\n ch === lastNeedleChar &&\n this._sbmh_memcmp(data, pos, needleLength - 1)\n ) {\n this._lookbehind_size = 0\n ++this.matches\n this.emit('info', true)\n\n return (this._bufpos = pos + needleLength)\n }\n pos += this._occ[ch]\n }\n\n // No match.\n\n if (pos < 0) {\n // There's too few data for Boyer-Moore-Horspool to run,\n // so let's use a different algorithm to skip as much as\n // we can.\n // Forward pos until\n // the trailing part of lookbehind + data\n // looks like the beginning of the needle\n // or until\n // pos == 0\n while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos }\n }\n\n if (pos >= 0) {\n // Discard lookbehind buffer.\n this.emit('info', false, this._lookbehind, 0, this._lookbehind_size)\n this._lookbehind_size = 0\n } else {\n // Cut off part of the lookbehind buffer that has\n // been processed and append the entire haystack\n // into it.\n const bytesToCutOff = this._lookbehind_size + pos\n if (bytesToCutOff > 0) {\n // The cut off data is guaranteed not to contain the needle.\n this.emit('info', false, this._lookbehind, 0, bytesToCutOff)\n }\n\n this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff,\n this._lookbehind_size - bytesToCutOff)\n this._lookbehind_size -= bytesToCutOff\n\n data.copy(this._lookbehind, this._lookbehind_size)\n this._lookbehind_size += len\n\n this._bufpos = len\n return len\n }\n }\n\n pos += (pos >= 0) * this._bufpos\n\n // Lookbehind buffer is now empty. We only need to check if the\n // needle is in the haystack.\n if (data.indexOf(needle, pos) !== -1) {\n pos = data.indexOf(needle, pos)\n ++this.matches\n if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) }\n\n return (this._bufpos = pos + needleLength)\n } else {\n pos = len - needleLength\n }\n\n // There was no match. If there's trailing haystack data that we cannot\n // match yet using the Boyer-Moore-Horspool algorithm (because the trailing\n // data is less than the needle size) then match using a modified\n // algorithm that starts matching from the beginning instead of the end.\n // Whatever trailing data is left after running this algorithm is added to\n // the lookbehind buffer.\n while (\n pos < len &&\n (\n data[pos] !== needle[0] ||\n (\n (Buffer.compare(\n data.subarray(pos, pos + len - pos),\n needle.subarray(0, len - pos)\n ) !== 0)\n )\n )\n ) {\n ++pos\n }\n if (pos < len) {\n data.copy(this._lookbehind, 0, pos, pos + (len - pos))\n this._lookbehind_size = len - pos\n }\n\n // Everything until pos is guaranteed not to contain needle data.\n if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) }\n\n this._bufpos = len\n return len\n}\n\nSBMH.prototype._sbmh_lookup_char = function (data, pos) {\n return (pos < 0)\n ? this._lookbehind[this._lookbehind_size + pos]\n : data[pos]\n}\n\nSBMH.prototype._sbmh_memcmp = function (data, pos, len) {\n for (var i = 0; i < len; ++i) { // eslint-disable-line no-var\n if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false }\n }\n return true\n}\n\nmodule.exports = SBMH\n","'use strict'\n\nconst WritableStream = require('node:stream').Writable\nconst { inherits } = require('node:util')\nconst Dicer = require('../deps/dicer/lib/Dicer')\n\nconst MultipartParser = require('./types/multipart')\nconst UrlencodedParser = require('./types/urlencoded')\nconst parseParams = require('./utils/parseParams')\n\nfunction Busboy (opts) {\n if (!(this instanceof Busboy)) { return new Busboy(opts) }\n\n if (typeof opts !== 'object') {\n throw new TypeError('Busboy expected an options-Object.')\n }\n if (typeof opts.headers !== 'object') {\n throw new TypeError('Busboy expected an options-Object with headers-attribute.')\n }\n if (typeof opts.headers['content-type'] !== 'string') {\n throw new TypeError('Missing Content-Type-header.')\n }\n\n const {\n headers,\n ...streamOptions\n } = opts\n\n this.opts = {\n autoDestroy: false,\n ...streamOptions\n }\n WritableStream.call(this, this.opts)\n\n this._done = false\n this._parser = this.getParserByHeaders(headers)\n this._finished = false\n}\ninherits(Busboy, WritableStream)\n\nBusboy.prototype.emit = function (ev) {\n if (ev === 'finish') {\n if (!this._done) {\n this._parser?.end()\n return\n } else if (this._finished) {\n return\n }\n this._finished = true\n }\n WritableStream.prototype.emit.apply(this, arguments)\n}\n\nBusboy.prototype.getParserByHeaders = function (headers) {\n const parsed = parseParams(headers['content-type'])\n\n const cfg = {\n defCharset: this.opts.defCharset,\n fileHwm: this.opts.fileHwm,\n headers,\n highWaterMark: this.opts.highWaterMark,\n isPartAFile: this.opts.isPartAFile,\n limits: this.opts.limits,\n parsedConType: parsed,\n preservePath: this.opts.preservePath\n }\n\n if (MultipartParser.detect.test(parsed[0])) {\n return new MultipartParser(this, cfg)\n }\n if (UrlencodedParser.detect.test(parsed[0])) {\n return new UrlencodedParser(this, cfg)\n }\n throw new Error('Unsupported Content-Type.')\n}\n\nBusboy.prototype._write = function (chunk, encoding, cb) {\n this._parser.write(chunk, cb)\n}\n\nmodule.exports = Busboy\nmodule.exports.default = Busboy\nmodule.exports.Busboy = Busboy\n\nmodule.exports.Dicer = Dicer\n","'use strict'\n\n// TODO:\n// * support 1 nested multipart level\n// (see second multipart example here:\n// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)\n// * support limits.fieldNameSize\n// -- this will require modifications to utils.parseParams\n\nconst { Readable } = require('node:stream')\nconst { inherits } = require('node:util')\n\nconst Dicer = require('../../deps/dicer/lib/Dicer')\n\nconst parseParams = require('../utils/parseParams')\nconst decodeText = require('../utils/decodeText')\nconst basename = require('../utils/basename')\nconst getLimit = require('../utils/getLimit')\n\nconst RE_BOUNDARY = /^boundary$/i\nconst RE_FIELD = /^form-data$/i\nconst RE_CHARSET = /^charset$/i\nconst RE_FILENAME = /^filename$/i\nconst RE_NAME = /^name$/i\n\nMultipart.detect = /^multipart\\/form-data/i\nfunction Multipart (boy, cfg) {\n let i\n let len\n const self = this\n let boundary\n const limits = cfg.limits\n const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined))\n const parsedConType = cfg.parsedConType || []\n const defCharset = cfg.defCharset || 'utf8'\n const preservePath = cfg.preservePath\n const fileOpts = { highWaterMark: cfg.fileHwm }\n\n for (i = 0, len = parsedConType.length; i < len; ++i) {\n if (Array.isArray(parsedConType[i]) &&\n RE_BOUNDARY.test(parsedConType[i][0])) {\n boundary = parsedConType[i][1]\n break\n }\n }\n\n function checkFinished () {\n if (nends === 0 && finished && !boy._done) {\n finished = false\n self.end()\n }\n }\n\n if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') }\n\n const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)\n const fileSizeLimit = getLimit(limits, 'fileSize', Infinity)\n const filesLimit = getLimit(limits, 'files', Infinity)\n const fieldsLimit = getLimit(limits, 'fields', Infinity)\n const partsLimit = getLimit(limits, 'parts', Infinity)\n const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)\n const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024)\n\n let nfiles = 0\n let nfields = 0\n let nends = 0\n let curFile\n let curField\n let finished = false\n\n this._needDrain = false\n this._pause = false\n this._cb = undefined\n this._nparts = 0\n this._boy = boy\n\n const parserCfg = {\n boundary,\n maxHeaderPairs: headerPairsLimit,\n maxHeaderSize: headerSizeLimit,\n partHwm: fileOpts.highWaterMark,\n highWaterMark: cfg.highWaterMark\n }\n\n this.parser = new Dicer(parserCfg)\n this.parser.on('drain', function () {\n self._needDrain = false\n if (self._cb && !self._pause) {\n const cb = self._cb\n self._cb = undefined\n cb()\n }\n }).on('part', function onPart (part) {\n if (++self._nparts > partsLimit) {\n self.parser.removeListener('part', onPart)\n self.parser.on('part', skipPart)\n boy.hitPartsLimit = true\n boy.emit('partsLimit')\n return skipPart(part)\n }\n\n // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let\n // us emit 'end' early since we know the part has ended if we are already\n // seeing the next part\n if (curField) {\n const field = curField\n field.emit('end')\n field.removeAllListeners('end')\n }\n\n part.on('header', function (header) {\n let contype\n let fieldname\n let parsed\n let charset\n let encoding\n let filename\n let nsize = 0\n\n if (header['content-type']) {\n parsed = parseParams(header['content-type'][0])\n if (parsed[0]) {\n contype = parsed[0].toLowerCase()\n for (i = 0, len = parsed.length; i < len; ++i) {\n if (RE_CHARSET.test(parsed[i][0])) {\n charset = parsed[i][1].toLowerCase()\n break\n }\n }\n }\n }\n\n if (contype === undefined) { contype = 'text/plain' }\n if (charset === undefined) { charset = defCharset }\n\n if (header['content-disposition']) {\n parsed = parseParams(header['content-disposition'][0])\n if (!RE_FIELD.test(parsed[0])) { return skipPart(part) }\n for (i = 0, len = parsed.length; i < len; ++i) {\n if (RE_NAME.test(parsed[i][0])) {\n fieldname = parsed[i][1]\n } else if (RE_FILENAME.test(parsed[i][0])) {\n filename = parsed[i][1]\n if (!preservePath) { filename = basename(filename) }\n }\n }\n } else { return skipPart(part) }\n\n if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' }\n\n let onData,\n onEnd\n\n if (isPartAFile(fieldname, contype, filename)) {\n // file/binary field\n if (nfiles === filesLimit) {\n if (!boy.hitFilesLimit) {\n boy.hitFilesLimit = true\n boy.emit('filesLimit')\n }\n return skipPart(part)\n }\n\n ++nfiles\n\n if (!boy._events.file) {\n self.parser._ignore()\n return\n }\n\n ++nends\n const file = new FileStream(fileOpts)\n curFile = file\n file.on('end', function () {\n --nends\n self._pause = false\n checkFinished()\n if (self._cb && !self._needDrain) {\n const cb = self._cb\n self._cb = undefined\n cb()\n }\n })\n file._read = function (n) {\n if (!self._pause) { return }\n self._pause = false\n if (self._cb && !self._needDrain) {\n const cb = self._cb\n self._cb = undefined\n cb()\n }\n }\n boy.emit('file', fieldname, file, filename, encoding, contype)\n\n onData = function (data) {\n if ((nsize += data.length) > fileSizeLimit) {\n const extralen = fileSizeLimit - nsize + data.length\n if (extralen > 0) { file.push(data.slice(0, extralen)) }\n file.truncated = true\n file.bytesRead = fileSizeLimit\n part.removeAllListeners('data')\n file.emit('limit')\n return\n } else if (!file.push(data)) { self._pause = true }\n\n file.bytesRead = nsize\n }\n\n onEnd = function () {\n curFile = undefined\n file.push(null)\n }\n } else {\n // non-file field\n if (nfields === fieldsLimit) {\n if (!boy.hitFieldsLimit) {\n boy.hitFieldsLimit = true\n boy.emit('fieldsLimit')\n }\n return skipPart(part)\n }\n\n ++nfields\n ++nends\n let buffer = ''\n let truncated = false\n curField = part\n\n onData = function (data) {\n if ((nsize += data.length) > fieldSizeLimit) {\n const extralen = (fieldSizeLimit - (nsize - data.length))\n buffer += data.toString('binary', 0, extralen)\n truncated = true\n part.removeAllListeners('data')\n } else { buffer += data.toString('binary') }\n }\n\n onEnd = function () {\n curField = undefined\n if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) }\n boy.emit('field', fieldname, buffer, false, truncated, encoding, contype)\n --nends\n checkFinished()\n }\n }\n\n /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become\n broken. Streams2/streams3 is a huge black box of confusion, but\n somehow overriding the sync state seems to fix things again (and still\n seems to work for previous node versions).\n */\n part._readableState.sync = false\n\n part.on('data', onData)\n part.on('end', onEnd)\n }).on('error', function (err) {\n if (curFile) { curFile.emit('error', err) }\n })\n }).on('error', function (err) {\n boy.emit('error', err)\n }).on('finish', function () {\n finished = true\n checkFinished()\n })\n}\n\nMultipart.prototype.write = function (chunk, cb) {\n const r = this.parser.write(chunk)\n if (r && !this._pause) {\n cb()\n } else {\n this._needDrain = !r\n this._cb = cb\n }\n}\n\nMultipart.prototype.end = function () {\n const self = this\n\n if (self.parser.writable) {\n self.parser.end()\n } else if (!self._boy._done) {\n process.nextTick(function () {\n self._boy._done = true\n self._boy.emit('finish')\n })\n }\n}\n\nfunction skipPart (part) {\n part.resume()\n}\n\nfunction FileStream (opts) {\n Readable.call(this, opts)\n\n this.bytesRead = 0\n\n this.truncated = false\n}\n\ninherits(FileStream, Readable)\n\nFileStream.prototype._read = function (n) {}\n\nmodule.exports = Multipart\n","'use strict'\n\nconst Decoder = require('../utils/Decoder')\nconst decodeText = require('../utils/decodeText')\nconst getLimit = require('../utils/getLimit')\n\nconst RE_CHARSET = /^charset$/i\n\nUrlEncoded.detect = /^application\\/x-www-form-urlencoded/i\nfunction UrlEncoded (boy, cfg) {\n const limits = cfg.limits\n const parsedConType = cfg.parsedConType\n this.boy = boy\n\n this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)\n this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100)\n this.fieldsLimit = getLimit(limits, 'fields', Infinity)\n\n let charset\n for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var\n if (Array.isArray(parsedConType[i]) &&\n RE_CHARSET.test(parsedConType[i][0])) {\n charset = parsedConType[i][1].toLowerCase()\n break\n }\n }\n\n if (charset === undefined) { charset = cfg.defCharset || 'utf8' }\n\n this.decoder = new Decoder()\n this.charset = charset\n this._fields = 0\n this._state = 'key'\n this._checkingBytes = true\n this._bytesKey = 0\n this._bytesVal = 0\n this._key = ''\n this._val = ''\n this._keyTrunc = false\n this._valTrunc = false\n this._hitLimit = false\n}\n\nUrlEncoded.prototype.write = function (data, cb) {\n if (this._fields === this.fieldsLimit) {\n if (!this.boy.hitFieldsLimit) {\n this.boy.hitFieldsLimit = true\n this.boy.emit('fieldsLimit')\n }\n return cb()\n }\n\n let idxeq; let idxamp; let i; let p = 0; const len = data.length\n\n while (p < len) {\n if (this._state === 'key') {\n idxeq = idxamp = undefined\n for (i = p; i < len; ++i) {\n if (!this._checkingBytes) { ++p }\n if (data[i] === 0x3D/* = */) {\n idxeq = i\n break\n } else if (data[i] === 0x26/* & */) {\n idxamp = i\n break\n }\n if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {\n this._hitLimit = true\n break\n } else if (this._checkingBytes) { ++this._bytesKey }\n }\n\n if (idxeq !== undefined) {\n // key with assignment\n if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) }\n this._state = 'val'\n\n this._hitLimit = false\n this._checkingBytes = true\n this._val = ''\n this._bytesVal = 0\n this._valTrunc = false\n this.decoder.reset()\n\n p = idxeq + 1\n } else if (idxamp !== undefined) {\n // key with no assignment\n ++this._fields\n let key; const keyTrunc = this._keyTrunc\n if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key }\n\n this._hitLimit = false\n this._checkingBytes = true\n this._key = ''\n this._bytesKey = 0\n this._keyTrunc = false\n this.decoder.reset()\n\n if (key.length) {\n this.boy.emit('field', decodeText(key, 'binary', this.charset),\n '',\n keyTrunc,\n false)\n }\n\n p = idxamp + 1\n if (this._fields === this.fieldsLimit) { return cb() }\n } else if (this._hitLimit) {\n // we may not have hit the actual limit if there are encoded bytes...\n if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) }\n p = i\n if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {\n // yep, we actually did hit the limit\n this._checkingBytes = false\n this._keyTrunc = true\n }\n } else {\n if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) }\n p = len\n }\n } else {\n idxamp = undefined\n for (i = p; i < len; ++i) {\n if (!this._checkingBytes) { ++p }\n if (data[i] === 0x26/* & */) {\n idxamp = i\n break\n }\n if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {\n this._hitLimit = true\n break\n } else if (this._checkingBytes) { ++this._bytesVal }\n }\n\n if (idxamp !== undefined) {\n ++this._fields\n if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) }\n this.boy.emit('field', decodeText(this._key, 'binary', this.charset),\n decodeText(this._val, 'binary', this.charset),\n this._keyTrunc,\n this._valTrunc)\n this._state = 'key'\n\n this._hitLimit = false\n this._checkingBytes = true\n this._key = ''\n this._bytesKey = 0\n this._keyTrunc = false\n this.decoder.reset()\n\n p = idxamp + 1\n if (this._fields === this.fieldsLimit) { return cb() }\n } else if (this._hitLimit) {\n // we may not have hit the actual limit if there are encoded bytes...\n if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) }\n p = i\n if ((this._val === '' && this.fieldSizeLimit === 0) ||\n (this._bytesVal = this._val.length) === this.fieldSizeLimit) {\n // yep, we actually did hit the limit\n this._checkingBytes = false\n this._valTrunc = true\n }\n } else {\n if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) }\n p = len\n }\n }\n }\n cb()\n}\n\nUrlEncoded.prototype.end = function () {\n if (this.boy._done) { return }\n\n if (this._state === 'key' && this._key.length > 0) {\n this.boy.emit('field', decodeText(this._key, 'binary', this.charset),\n '',\n this._keyTrunc,\n false)\n } else if (this._state === 'val') {\n this.boy.emit('field', decodeText(this._key, 'binary', this.charset),\n decodeText(this._val, 'binary', this.charset),\n this._keyTrunc,\n this._valTrunc)\n }\n this.boy._done = true\n this.boy.emit('finish')\n}\n\nmodule.exports = UrlEncoded\n","'use strict'\n\nconst RE_PLUS = /\\+/g\n\nconst HEX = [\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,\n 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n]\n\nfunction Decoder () {\n this.buffer = undefined\n}\nDecoder.prototype.write = function (str) {\n // Replace '+' with ' ' before decoding\n str = str.replace(RE_PLUS, ' ')\n let res = ''\n let i = 0; let p = 0; const len = str.length\n for (; i < len; ++i) {\n if (this.buffer !== undefined) {\n if (!HEX[str.charCodeAt(i)]) {\n res += '%' + this.buffer\n this.buffer = undefined\n --i // retry character\n } else {\n this.buffer += str[i]\n ++p\n if (this.buffer.length === 2) {\n res += String.fromCharCode(parseInt(this.buffer, 16))\n this.buffer = undefined\n }\n }\n } else if (str[i] === '%') {\n if (i > p) {\n res += str.substring(p, i)\n p = i\n }\n this.buffer = ''\n ++p\n }\n }\n if (p < len && this.buffer === undefined) { res += str.substring(p) }\n return res\n}\nDecoder.prototype.reset = function () {\n this.buffer = undefined\n}\n\nmodule.exports = Decoder\n","'use strict'\n\nmodule.exports = function basename (path) {\n if (typeof path !== 'string') { return '' }\n for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var\n switch (path.charCodeAt(i)) {\n case 0x2F: // '/'\n case 0x5C: // '\\'\n path = path.slice(i + 1)\n return (path === '..' || path === '.' ? '' : path)\n }\n }\n return (path === '..' || path === '.' ? '' : path)\n}\n","'use strict'\n\n// Node has always utf-8\nconst utf8Decoder = new TextDecoder('utf-8')\nconst textDecoders = new Map([\n ['utf-8', utf8Decoder],\n ['utf8', utf8Decoder]\n])\n\nfunction getDecoder (charset) {\n let lc\n while (true) {\n switch (charset) {\n case 'utf-8':\n case 'utf8':\n return decoders.utf8\n case 'latin1':\n case 'ascii': // TODO: Make these a separate, strict decoder?\n case 'us-ascii':\n case 'iso-8859-1':\n case 'iso8859-1':\n case 'iso88591':\n case 'iso_8859-1':\n case 'windows-1252':\n case 'iso_8859-1:1987':\n case 'cp1252':\n case 'x-cp1252':\n return decoders.latin1\n case 'utf16le':\n case 'utf-16le':\n case 'ucs2':\n case 'ucs-2':\n return decoders.utf16le\n case 'base64':\n return decoders.base64\n default:\n if (lc === undefined) {\n lc = true\n charset = charset.toLowerCase()\n continue\n }\n return decoders.other.bind(charset)\n }\n }\n}\n\nconst decoders = {\n utf8: (data, sourceEncoding) => {\n if (data.length === 0) {\n return ''\n }\n if (typeof data === 'string') {\n data = Buffer.from(data, sourceEncoding)\n }\n return data.utf8Slice(0, data.length)\n },\n\n latin1: (data, sourceEncoding) => {\n if (data.length === 0) {\n return ''\n }\n if (typeof data === 'string') {\n return data\n }\n return data.latin1Slice(0, data.length)\n },\n\n utf16le: (data, sourceEncoding) => {\n if (data.length === 0) {\n return ''\n }\n if (typeof data === 'string') {\n data = Buffer.from(data, sourceEncoding)\n }\n return data.ucs2Slice(0, data.length)\n },\n\n base64: (data, sourceEncoding) => {\n if (data.length === 0) {\n return ''\n }\n if (typeof data === 'string') {\n data = Buffer.from(data, sourceEncoding)\n }\n return data.base64Slice(0, data.length)\n },\n\n other: (data, sourceEncoding) => {\n if (data.length === 0) {\n return ''\n }\n if (typeof data === 'string') {\n data = Buffer.from(data, sourceEncoding)\n }\n\n if (textDecoders.has(this.toString())) {\n try {\n return textDecoders.get(this).decode(data)\n } catch (e) { }\n }\n return typeof data === 'string'\n ? data\n : data.toString()\n }\n}\n\nfunction decodeText (text, sourceEncoding, destEncoding) {\n if (text) {\n return getDecoder(destEncoding)(text, sourceEncoding)\n }\n return text\n}\n\nmodule.exports = decodeText\n","'use strict'\n\nmodule.exports = function getLimit (limits, name, defaultLimit) {\n if (\n !limits ||\n limits[name] === undefined ||\n limits[name] === null\n ) { return defaultLimit }\n\n if (\n typeof limits[name] !== 'number' ||\n isNaN(limits[name])\n ) { throw new TypeError('Limit ' + name + ' is not a valid number') }\n\n return limits[name]\n}\n","/* eslint-disable object-property-newline */\n'use strict'\n\nconst decodeText = require('./decodeText')\n\nconst RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g\n\nconst EncodedLookup = {\n '%00': '\\x00', '%01': '\\x01', '%02': '\\x02', '%03': '\\x03', '%04': '\\x04',\n '%05': '\\x05', '%06': '\\x06', '%07': '\\x07', '%08': '\\x08', '%09': '\\x09',\n '%0a': '\\x0a', '%0A': '\\x0a', '%0b': '\\x0b', '%0B': '\\x0b', '%0c': '\\x0c',\n '%0C': '\\x0c', '%0d': '\\x0d', '%0D': '\\x0d', '%0e': '\\x0e', '%0E': '\\x0e',\n '%0f': '\\x0f', '%0F': '\\x0f', '%10': '\\x10', '%11': '\\x11', '%12': '\\x12',\n '%13': '\\x13', '%14': '\\x14', '%15': '\\x15', '%16': '\\x16', '%17': '\\x17',\n '%18': '\\x18', '%19': '\\x19', '%1a': '\\x1a', '%1A': '\\x1a', '%1b': '\\x1b',\n '%1B': '\\x1b', '%1c': '\\x1c', '%1C': '\\x1c', '%1d': '\\x1d', '%1D': '\\x1d',\n '%1e': '\\x1e', '%1E': '\\x1e', '%1f': '\\x1f', '%1F': '\\x1f', '%20': '\\x20',\n '%21': '\\x21', '%22': '\\x22', '%23': '\\x23', '%24': '\\x24', '%25': '\\x25',\n '%26': '\\x26', '%27': '\\x27', '%28': '\\x28', '%29': '\\x29', '%2a': '\\x2a',\n '%2A': '\\x2a', '%2b': '\\x2b', '%2B': '\\x2b', '%2c': '\\x2c', '%2C': '\\x2c',\n '%2d': '\\x2d', '%2D': '\\x2d', '%2e': '\\x2e', '%2E': '\\x2e', '%2f': '\\x2f',\n '%2F': '\\x2f', '%30': '\\x30', '%31': '\\x31', '%32': '\\x32', '%33': '\\x33',\n '%34': '\\x34', '%35': '\\x35', '%36': '\\x36', '%37': '\\x37', '%38': '\\x38',\n '%39': '\\x39', '%3a': '\\x3a', '%3A': '\\x3a', '%3b': '\\x3b', '%3B': '\\x3b',\n '%3c': '\\x3c', '%3C': '\\x3c', '%3d': '\\x3d', '%3D': '\\x3d', '%3e': '\\x3e',\n '%3E': '\\x3e', '%3f': '\\x3f', '%3F': '\\x3f', '%40': '\\x40', '%41': '\\x41',\n '%42': '\\x42', '%43': '\\x43', '%44': '\\x44', '%45': '\\x45', '%46': '\\x46',\n '%47': '\\x47', '%48': '\\x48', '%49': '\\x49', '%4a': '\\x4a', '%4A': '\\x4a',\n '%4b': '\\x4b', '%4B': '\\x4b', '%4c': '\\x4c', '%4C': '\\x4c', '%4d': '\\x4d',\n '%4D': '\\x4d', '%4e': '\\x4e', '%4E': '\\x4e', '%4f': '\\x4f', '%4F': '\\x4f',\n '%50': '\\x50', '%51': '\\x51', '%52': '\\x52', '%53': '\\x53', '%54': '\\x54',\n '%55': '\\x55', '%56': '\\x56', '%57': '\\x57', '%58': '\\x58', '%59': '\\x59',\n '%5a': '\\x5a', '%5A': '\\x5a', '%5b': '\\x5b', '%5B': '\\x5b', '%5c': '\\x5c',\n '%5C': '\\x5c', '%5d': '\\x5d', '%5D': '\\x5d', '%5e': '\\x5e', '%5E': '\\x5e',\n '%5f': '\\x5f', '%5F': '\\x5f', '%60': '\\x60', '%61': '\\x61', '%62': '\\x62',\n '%63': '\\x63', '%64': '\\x64', '%65': '\\x65', '%66': '\\x66', '%67': '\\x67',\n '%68': '\\x68', '%69': '\\x69', '%6a': '\\x6a', '%6A': '\\x6a', '%6b': '\\x6b',\n '%6B': '\\x6b', '%6c': '\\x6c', '%6C': '\\x6c', '%6d': '\\x6d', '%6D': '\\x6d',\n '%6e': '\\x6e', '%6E': '\\x6e', '%6f': '\\x6f', '%6F': '\\x6f', '%70': '\\x70',\n '%71': '\\x71', '%72': '\\x72', '%73': '\\x73', '%74': '\\x74', '%75': '\\x75',\n '%76': '\\x76', '%77': '\\x77', '%78': '\\x78', '%79': '\\x79', '%7a': '\\x7a',\n '%7A': '\\x7a', '%7b': '\\x7b', '%7B': '\\x7b', '%7c': '\\x7c', '%7C': '\\x7c',\n '%7d': '\\x7d', '%7D': '\\x7d', '%7e': '\\x7e', '%7E': '\\x7e', '%7f': '\\x7f',\n '%7F': '\\x7f', '%80': '\\x80', '%81': '\\x81', '%82': '\\x82', '%83': '\\x83',\n '%84': '\\x84', '%85': '\\x85', '%86': '\\x86', '%87': '\\x87', '%88': '\\x88',\n '%89': '\\x89', '%8a': '\\x8a', '%8A': '\\x8a', '%8b': '\\x8b', '%8B': '\\x8b',\n '%8c': '\\x8c', '%8C': '\\x8c', '%8d': '\\x8d', '%8D': '\\x8d', '%8e': '\\x8e',\n '%8E': '\\x8e', '%8f': '\\x8f', '%8F': '\\x8f', '%90': '\\x90', '%91': '\\x91',\n '%92': '\\x92', '%93': '\\x93', '%94': '\\x94', '%95': '\\x95', '%96': '\\x96',\n '%97': '\\x97', '%98': '\\x98', '%99': '\\x99', '%9a': '\\x9a', '%9A': '\\x9a',\n '%9b': '\\x9b', '%9B': '\\x9b', '%9c': '\\x9c', '%9C': '\\x9c', '%9d': '\\x9d',\n '%9D': '\\x9d', '%9e': '\\x9e', '%9E': '\\x9e', '%9f': '\\x9f', '%9F': '\\x9f',\n '%a0': '\\xa0', '%A0': '\\xa0', '%a1': '\\xa1', '%A1': '\\xa1', '%a2': '\\xa2',\n '%A2': '\\xa2', '%a3': '\\xa3', '%A3': '\\xa3', '%a4': '\\xa4', '%A4': '\\xa4',\n '%a5': '\\xa5', '%A5': '\\xa5', '%a6': '\\xa6', '%A6': '\\xa6', '%a7': '\\xa7',\n '%A7': '\\xa7', '%a8': '\\xa8', '%A8': '\\xa8', '%a9': '\\xa9', '%A9': '\\xa9',\n '%aa': '\\xaa', '%Aa': '\\xaa', '%aA': '\\xaa', '%AA': '\\xaa', '%ab': '\\xab',\n '%Ab': '\\xab', '%aB': '\\xab', '%AB': '\\xab', '%ac': '\\xac', '%Ac': '\\xac',\n '%aC': '\\xac', '%AC': '\\xac', '%ad': '\\xad', '%Ad': '\\xad', '%aD': '\\xad',\n '%AD': '\\xad', '%ae': '\\xae', '%Ae': '\\xae', '%aE': '\\xae', '%AE': '\\xae',\n '%af': '\\xaf', '%Af': '\\xaf', '%aF': '\\xaf', '%AF': '\\xaf', '%b0': '\\xb0',\n '%B0': '\\xb0', '%b1': '\\xb1', '%B1': '\\xb1', '%b2': '\\xb2', '%B2': '\\xb2',\n '%b3': '\\xb3', '%B3': '\\xb3', '%b4': '\\xb4', '%B4': '\\xb4', '%b5': '\\xb5',\n '%B5': '\\xb5', '%b6': '\\xb6', '%B6': '\\xb6', '%b7': '\\xb7', '%B7': '\\xb7',\n '%b8': '\\xb8', '%B8': '\\xb8', '%b9': '\\xb9', '%B9': '\\xb9', '%ba': '\\xba',\n '%Ba': '\\xba', '%bA': '\\xba', '%BA': '\\xba', '%bb': '\\xbb', '%Bb': '\\xbb',\n '%bB': '\\xbb', '%BB': '\\xbb', '%bc': '\\xbc', '%Bc': '\\xbc', '%bC': '\\xbc',\n '%BC': '\\xbc', '%bd': '\\xbd', '%Bd': '\\xbd', '%bD': '\\xbd', '%BD': '\\xbd',\n '%be': '\\xbe', '%Be': '\\xbe', '%bE': '\\xbe', '%BE': '\\xbe', '%bf': '\\xbf',\n '%Bf': '\\xbf', '%bF': '\\xbf', '%BF': '\\xbf', '%c0': '\\xc0', '%C0': '\\xc0',\n '%c1': '\\xc1', '%C1': '\\xc1', '%c2': '\\xc2', '%C2': '\\xc2', '%c3': '\\xc3',\n '%C3': '\\xc3', '%c4': '\\xc4', '%C4': '\\xc4', '%c5': '\\xc5', '%C5': '\\xc5',\n '%c6': '\\xc6', '%C6': '\\xc6', '%c7': '\\xc7', '%C7': '\\xc7', '%c8': '\\xc8',\n '%C8': '\\xc8', '%c9': '\\xc9', '%C9': '\\xc9', '%ca': '\\xca', '%Ca': '\\xca',\n '%cA': '\\xca', '%CA': '\\xca', '%cb': '\\xcb', '%Cb': '\\xcb', '%cB': '\\xcb',\n '%CB': '\\xcb', '%cc': '\\xcc', '%Cc': '\\xcc', '%cC': '\\xcc', '%CC': '\\xcc',\n '%cd': '\\xcd', '%Cd': '\\xcd', '%cD': '\\xcd', '%CD': '\\xcd', '%ce': '\\xce',\n '%Ce': '\\xce', '%cE': '\\xce', '%CE': '\\xce', '%cf': '\\xcf', '%Cf': '\\xcf',\n '%cF': '\\xcf', '%CF': '\\xcf', '%d0': '\\xd0', '%D0': '\\xd0', '%d1': '\\xd1',\n '%D1': '\\xd1', '%d2': '\\xd2', '%D2': '\\xd2', '%d3': '\\xd3', '%D3': '\\xd3',\n '%d4': '\\xd4', '%D4': '\\xd4', '%d5': '\\xd5', '%D5': '\\xd5', '%d6': '\\xd6',\n '%D6': '\\xd6', '%d7': '\\xd7', '%D7': '\\xd7', '%d8': '\\xd8', '%D8': '\\xd8',\n '%d9': '\\xd9', '%D9': '\\xd9', '%da': '\\xda', '%Da': '\\xda', '%dA': '\\xda',\n '%DA': '\\xda', '%db': '\\xdb', '%Db': '\\xdb', '%dB': '\\xdb', '%DB': '\\xdb',\n '%dc': '\\xdc', '%Dc': '\\xdc', '%dC': '\\xdc', '%DC': '\\xdc', '%dd': '\\xdd',\n '%Dd': '\\xdd', '%dD': '\\xdd', '%DD': '\\xdd', '%de': '\\xde', '%De': '\\xde',\n '%dE': '\\xde', '%DE': '\\xde', '%df': '\\xdf', '%Df': '\\xdf', '%dF': '\\xdf',\n '%DF': '\\xdf', '%e0': '\\xe0', '%E0': '\\xe0', '%e1': '\\xe1', '%E1': '\\xe1',\n '%e2': '\\xe2', '%E2': '\\xe2', '%e3': '\\xe3', '%E3': '\\xe3', '%e4': '\\xe4',\n '%E4': '\\xe4', '%e5': '\\xe5', '%E5': '\\xe5', '%e6': '\\xe6', '%E6': '\\xe6',\n '%e7': '\\xe7', '%E7': '\\xe7', '%e8': '\\xe8', '%E8': '\\xe8', '%e9': '\\xe9',\n '%E9': '\\xe9', '%ea': '\\xea', '%Ea': '\\xea', '%eA': '\\xea', '%EA': '\\xea',\n '%eb': '\\xeb', '%Eb': '\\xeb', '%eB': '\\xeb', '%EB': '\\xeb', '%ec': '\\xec',\n '%Ec': '\\xec', '%eC': '\\xec', '%EC': '\\xec', '%ed': '\\xed', '%Ed': '\\xed',\n '%eD': '\\xed', '%ED': '\\xed', '%ee': '\\xee', '%Ee': '\\xee', '%eE': '\\xee',\n '%EE': '\\xee', '%ef': '\\xef', '%Ef': '\\xef', '%eF': '\\xef', '%EF': '\\xef',\n '%f0': '\\xf0', '%F0': '\\xf0', '%f1': '\\xf1', '%F1': '\\xf1', '%f2': '\\xf2',\n '%F2': '\\xf2', '%f3': '\\xf3', '%F3': '\\xf3', '%f4': '\\xf4', '%F4': '\\xf4',\n '%f5': '\\xf5', '%F5': '\\xf5', '%f6': '\\xf6', '%F6': '\\xf6', '%f7': '\\xf7',\n '%F7': '\\xf7', '%f8': '\\xf8', '%F8': '\\xf8', '%f9': '\\xf9', '%F9': '\\xf9',\n '%fa': '\\xfa', '%Fa': '\\xfa', '%fA': '\\xfa', '%FA': '\\xfa', '%fb': '\\xfb',\n '%Fb': '\\xfb', '%fB': '\\xfb', '%FB': '\\xfb', '%fc': '\\xfc', '%Fc': '\\xfc',\n '%fC': '\\xfc', '%FC': '\\xfc', '%fd': '\\xfd', '%Fd': '\\xfd', '%fD': '\\xfd',\n '%FD': '\\xfd', '%fe': '\\xfe', '%Fe': '\\xfe', '%fE': '\\xfe', '%FE': '\\xfe',\n '%ff': '\\xff', '%Ff': '\\xff', '%fF': '\\xff', '%FF': '\\xff'\n}\n\nfunction encodedReplacer (match) {\n return EncodedLookup[match]\n}\n\nconst STATE_KEY = 0\nconst STATE_VALUE = 1\nconst STATE_CHARSET = 2\nconst STATE_LANG = 3\n\nfunction parseParams (str) {\n const res = []\n let state = STATE_KEY\n let charset = ''\n let inquote = false\n let escaping = false\n let p = 0\n let tmp = ''\n const len = str.length\n\n for (var i = 0; i < len; ++i) { // eslint-disable-line no-var\n const char = str[i]\n if (char === '\\\\' && inquote) {\n if (escaping) { escaping = false } else {\n escaping = true\n continue\n }\n } else if (char === '\"') {\n if (!escaping) {\n if (inquote) {\n inquote = false\n state = STATE_KEY\n } else { inquote = true }\n continue\n } else { escaping = false }\n } else {\n if (escaping && inquote) { tmp += '\\\\' }\n escaping = false\n if ((state === STATE_CHARSET || state === STATE_LANG) && char === \"'\") {\n if (state === STATE_CHARSET) {\n state = STATE_LANG\n charset = tmp.substring(1)\n } else { state = STATE_VALUE }\n tmp = ''\n continue\n } else if (state === STATE_KEY &&\n (char === '*' || char === '=') &&\n res.length) {\n state = char === '*'\n ? STATE_CHARSET\n : STATE_VALUE\n res[p] = [tmp, undefined]\n tmp = ''\n continue\n } else if (!inquote && char === ';') {\n state = STATE_KEY\n if (charset) {\n if (tmp.length) {\n tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),\n 'binary',\n charset)\n }\n charset = ''\n } else if (tmp.length) {\n tmp = decodeText(tmp, 'binary', 'utf8')\n }\n if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp }\n tmp = ''\n ++p\n continue\n } else if (!inquote && (char === ' ' || char === '\\t')) { continue }\n }\n tmp += char\n }\n if (charset && tmp.length) {\n tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),\n 'binary',\n charset)\n } else if (tmp) {\n tmp = decodeText(tmp, 'binary', 'utf8')\n }\n\n if (res[p] === undefined) {\n if (tmp) { res[p] = tmp }\n } else { res[p][1] = tmp }\n\n return res\n}\n\nmodule.exports = parseParams\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AclRoleAccessorMethods = exports.Acl = void 0;\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * Attach functionality to a {@link Storage.acl} instance. This will add an\n * object for each role group (owners, readers, and writers), with each object\n * containing methods to add or delete a type of entity.\n *\n * As an example, here are a few methods that are created.\n *\n * myBucket.acl.readers.deleteGroup('groupId', function(err) {});\n *\n * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {});\n *\n * myBucket.acl.writers.addDomain('example.com', function(err, acl) {});\n *\n * @private\n */\nclass AclRoleAccessorMethods {\n constructor() {\n this.owners = {};\n this.readers = {};\n this.writers = {};\n /**\n * An object of convenience methods to add or delete owner ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.owners.addAllAuthenticatedUsers`\n * - `myFile.acl.owners.deleteAllAuthenticatedUsers`\n * - `myFile.acl.owners.addAllUsers`\n * - `myFile.acl.owners.deleteAllUsers`\n * - `myFile.acl.owners.addDomain`\n * - `myFile.acl.owners.deleteDomain`\n * - `myFile.acl.owners.addGroup`\n * - `myFile.acl.owners.deleteGroup`\n * - `myFile.acl.owners.addProject`\n * - `myFile.acl.owners.deleteProject`\n * - `myFile.acl.owners.addUser`\n * - `myFile.acl.owners.deleteUser`\n *\n * @name Acl#owners\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as an owner of a file.\n * //-\n * const myBucket = gcs.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n * myFile.acl.owners.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.OWNER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.owners.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.owners = {};\n /**\n * An object of convenience methods to add or delete reader ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.readers.addAllAuthenticatedUsers`\n * - `myFile.acl.readers.deleteAllAuthenticatedUsers`\n * - `myFile.acl.readers.addAllUsers`\n * - `myFile.acl.readers.deleteAllUsers`\n * - `myFile.acl.readers.addDomain`\n * - `myFile.acl.readers.deleteDomain`\n * - `myFile.acl.readers.addGroup`\n * - `myFile.acl.readers.deleteGroup`\n * - `myFile.acl.readers.addProject`\n * - `myFile.acl.readers.deleteProject`\n * - `myFile.acl.readers.addUser`\n * - `myFile.acl.readers.deleteUser`\n *\n * @name Acl#readers\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as a reader of a file.\n * //-\n * myFile.acl.readers.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.READER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.readers.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.readers = {};\n /**\n * An object of convenience methods to add or delete writer ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.writers.addAllAuthenticatedUsers`\n * - `myFile.acl.writers.deleteAllAuthenticatedUsers`\n * - `myFile.acl.writers.addAllUsers`\n * - `myFile.acl.writers.deleteAllUsers`\n * - `myFile.acl.writers.addDomain`\n * - `myFile.acl.writers.deleteDomain`\n * - `myFile.acl.writers.addGroup`\n * - `myFile.acl.writers.deleteGroup`\n * - `myFile.acl.writers.addProject`\n * - `myFile.acl.writers.deleteProject`\n * - `myFile.acl.writers.addUser`\n * - `myFile.acl.writers.deleteUser`\n *\n * @name Acl#writers\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as a writer of a file.\n * //-\n * myFile.acl.writers.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.WRITER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.writers.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.writers = {};\n AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this));\n }\n _assignAccessMethods(role) {\n const accessMethods = AclRoleAccessorMethods.accessMethods;\n const entities = AclRoleAccessorMethods.entities;\n const roleGroup = role.toLowerCase() + 's';\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[roleGroup] = entities.reduce((acc, entity) => {\n const isPrefix = entity.charAt(entity.length - 1) === '-';\n accessMethods.forEach(accessMethod => {\n let method = accessMethod + entity[0].toUpperCase() + entity.substring(1);\n if (isPrefix) {\n method = method.replace('-', '');\n }\n // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the\n // more complex API of specifying an `entity` and `role`.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n acc[method] = (entityId, options, callback) => {\n let apiEntity;\n if (typeof options === 'function') {\n callback = options;\n options = {};\n }\n if (isPrefix) {\n apiEntity = entity + entityId;\n }\n else {\n // If the entity is not a prefix, it is a special entity group\n // that does not require further details. The accessor methods\n // only accept a callback.\n apiEntity = entity;\n callback = entityId;\n }\n options = Object.assign({\n entity: apiEntity,\n role,\n }, options);\n const args = [options];\n if (typeof callback === 'function') {\n args.push(callback);\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return this[accessMethod].apply(this, args);\n };\n });\n return acc;\n }, {});\n }\n}\nexports.AclRoleAccessorMethods = AclRoleAccessorMethods;\nAclRoleAccessorMethods.accessMethods = ['add', 'delete'];\nAclRoleAccessorMethods.entities = [\n // Special entity groups that do not require further specification.\n 'allAuthenticatedUsers',\n 'allUsers',\n // Entity groups that require specification, e.g. `user-email@example.com`.\n 'domain-',\n 'group-',\n 'project-',\n 'user-',\n];\nAclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER'];\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against an\n * object or bucket (for example, `READ` or `WRITE`); the entity defines who the\n * permission applies to (for example, a specific user or group of users).\n *\n * Where an `entity` value is accepted, we follow the format the Cloud Storage\n * API expects.\n *\n * Refer to\n * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls\n * for the most up-to-date values.\n *\n * - `user-userId`\n * - `user-email`\n * - `group-groupId`\n * - `group-email`\n * - `domain-domain`\n * - `project-team-projectId`\n * - `allUsers`\n * - `allAuthenticatedUsers`\n *\n * Examples:\n *\n * - The user \"liz@example.com\" would be `user-liz@example.com`.\n * - The group \"example@googlegroups.com\" would be\n * `group-example@googlegroups.com`.\n * - To refer to all members of the Google Apps for Business domain\n * \"example.com\", the entity would be `domain-example.com`.\n *\n * For more detailed information, see\n * {@link http://goo.gl/6qBBPO| About Access Control Lists}.\n *\n * @constructor Acl\n * @mixin\n * @param {object} options Configuration options.\n */\nclass Acl extends AclRoleAccessorMethods {\n constructor(options) {\n super();\n this.pathPrefix = options.pathPrefix;\n this.request_ = options.request;\n }\n /**\n * @typedef {array} AddAclResponse\n * @property {object} 0 The Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback AddAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} acl The Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Add access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation}\n *\n * @param {object} options Configuration options.\n * @param {string} options.entity Whose permissions will be added.\n * @param {string} options.role Permissions allowed for the defined entity.\n * See {@link https://cloud.google.com/storage/docs/access-control Access\n * Control}.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {AddAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * const options = {\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.OWNER_ROLE\n * };\n *\n * myBucket.acl.add(options, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * // Here is how you would grant ownership permissions to a user on a\n * specific\n * // revision of a file.\n * //-\n * myFile.acl.add({\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.OWNER_ROLE,\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_add_file_owner\n * Example of adding an owner to a file:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_owner\n * Example of adding an owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_default_owner\n * Example of adding a default owner to a bucket:\n */\n add(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'POST',\n uri: '',\n qs: query,\n maxRetries: 0, //explicitly set this value since this is a non-idempotent function\n json: {\n entity: options.entity,\n role: options.role.toUpperCase(),\n },\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, this.makeAclObject_(resp), resp);\n });\n }\n /**\n * @typedef {array} RemoveAclResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback RemoveAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation}\n *\n * @param {object} options Configuration object.\n * @param {string} options.entity Whose permissions will be revoked.\n * @param {int} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {RemoveAclCallback} callback The callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * myBucket.acl.delete({\n * entity: 'user-useremail@example.com'\n * }, function(err, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.delete({\n * entity: 'user-useremail@example.com',\n * generation: 1\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_owner\n * Example of removing an owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_default_owner\n * Example of removing a default owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_file_owner\n * Example of removing an owner from a bucket:\n */\n delete(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'DELETE',\n uri: '/' + encodeURIComponent(options.entity),\n qs: query,\n }, (err, resp) => {\n callback(err, resp);\n });\n }\n /**\n * @typedef {array} GetAclResponse\n * @property {object|object[]} 0 Single or array of Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object|object[]} acl Single or array of Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get access controls on a {@link Bucket} or {@link File}. If\n * an entity is omitted, you will receive an array of all applicable access\n * controls.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation}\n *\n * @param {object|function} [options] Configuration options. If you want to\n * receive a list of all access controls, pass the callback function as\n * the only argument.\n * @param {string} options.entity Whose permissions will be fetched.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * myBucket.acl.get({\n * entity: 'user-useremail@example.com'\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // Get all access controls.\n * //-\n * myBucket.acl.get(function(err, aclObjects, apiResponse) {\n * // aclObjects = [\n * // {\n * // entity: 'user-useremail@example.com',\n * // role: 'owner'\n * // }\n * // ]\n * });\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.get({\n * entity: 'user-useremail@example.com',\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.get().then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_print_file_acl\n * Example of printing a file's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_file_acl_for_user\n * Example of printing a file's ACL for a specific user:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl\n * Example of printing a bucket's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl_for_user\n * Example of printing a bucket's ACL for a specific user:\n */\n get(optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;\n let path = '';\n const query = {};\n if (options) {\n path = '/' + encodeURIComponent(options.entity);\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n }\n this.request({\n uri: path,\n qs: query,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n let results;\n if (resp.items) {\n results = resp.items.map(this.makeAclObject_);\n }\n else {\n results = this.makeAclObject_(resp);\n }\n callback(null, results, resp);\n });\n }\n /**\n * @typedef {array} UpdateAclResponse\n * @property {object} 0 The updated Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback UpdateAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} acl The updated Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Update access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation}\n *\n * @param {object} options Configuration options.\n * @param {string} options.entity Whose permissions will be updated.\n * @param {string} options.role Permissions allowed for the defined entity.\n * See {@link Storage.acl}.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {UpdateAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * const options = {\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.WRITER_ROLE\n * };\n *\n * myBucket.acl.update(options, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.update({\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.WRITER_ROLE,\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.update(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n update(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'PUT',\n uri: '/' + encodeURIComponent(options.entity),\n qs: query,\n json: {\n role: options.role.toUpperCase(),\n },\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, this.makeAclObject_(resp), resp);\n });\n }\n /**\n * Transform API responses to a consistent object format.\n *\n * @private\n */\n makeAclObject_(accessControlObject) {\n const obj = {\n entity: accessControlObject.entity,\n role: accessControlObject.role,\n };\n if (accessControlObject.projectTeam) {\n obj.projectTeam = accessControlObject.projectTeam;\n }\n return obj;\n }\n /**\n * Patch requests up to the bucket's request object.\n *\n * @private\n *\n * @param {string} method Action.\n * @param {string} path Request path.\n * @param {*} query Request query object.\n * @param {*} body Request body contents.\n * @param {function} callback Callback function.\n */\n request(reqOpts, callback) {\n reqOpts.uri = this.pathPrefix + reqOpts.uri;\n this.request_(reqOpts, callback);\n }\n}\nexports.Acl = Acl;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Acl, {\n exclude: ['request'],\n});\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst paginator_1 = require(\"@google-cloud/paginator\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst fs = __importStar(require(\"fs\"));\nconst mime = __importStar(require(\"mime-types\"));\nconst path = __importStar(require(\"path\"));\nconst p_limit_1 = __importDefault(require(\"p-limit\"));\nconst util_1 = require(\"util\");\nconst async_retry_1 = __importDefault(require(\"async-retry\"));\nconst util_js_1 = require(\"./util.js\");\nconst acl_js_1 = require(\"./acl.js\");\nconst file_js_1 = require(\"./file.js\");\nconst iam_js_1 = require(\"./iam.js\");\nconst notification_js_1 = require(\"./notification.js\");\nconst storage_js_1 = require(\"./storage.js\");\nconst signer_js_1 = require(\"./signer.js\");\nconst stream_1 = require(\"stream\");\nconst url_1 = require(\"url\");\nvar BucketActionToHTTPMethod;\n(function (BucketActionToHTTPMethod) {\n BucketActionToHTTPMethod[\"list\"] = \"GET\";\n})(BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = BucketActionToHTTPMethod = {}));\nvar AvailableServiceObjectMethods;\n(function (AvailableServiceObjectMethods) {\n AvailableServiceObjectMethods[AvailableServiceObjectMethods[\"setMetadata\"] = 0] = \"setMetadata\";\n AvailableServiceObjectMethods[AvailableServiceObjectMethods[\"delete\"] = 1] = \"delete\";\n})(AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = AvailableServiceObjectMethods = {}));\nvar BucketExceptionMessages;\n(function (BucketExceptionMessages) {\n BucketExceptionMessages[\"PROVIDE_SOURCE_FILE\"] = \"You must provide at least one source file.\";\n BucketExceptionMessages[\"DESTINATION_FILE_NOT_SPECIFIED\"] = \"A destination file must be specified.\";\n BucketExceptionMessages[\"CHANNEL_ID_REQUIRED\"] = \"An ID is required to create a channel.\";\n BucketExceptionMessages[\"TOPIC_NAME_REQUIRED\"] = \"A valid topic name is required.\";\n BucketExceptionMessages[\"CONFIGURATION_OBJECT_PREFIX_REQUIRED\"] = \"A configuration object with a prefix is required.\";\n BucketExceptionMessages[\"SPECIFY_FILE_NAME\"] = \"A file name must be specified.\";\n BucketExceptionMessages[\"METAGENERATION_NOT_PROVIDED\"] = \"A metageneration must be provided.\";\n BucketExceptionMessages[\"SUPPLY_NOTIFICATION_ID\"] = \"You must supply a notification ID.\";\n})(BucketExceptionMessages || (exports.BucketExceptionMessages = BucketExceptionMessages = {}));\n/**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n/**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n/**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n/**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n/**\n * A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n *\n * @name Bucket#crc32cGenerator\n * @type {CRC32CValidator}\n */\n/**\n * Get and set IAM policies for your bucket.\n *\n * @name Bucket#iam\n * @mixes Iam\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management}\n * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Get the IAM policy for your bucket.\n * //-\n * bucket.iam.getPolicy(function(err, policy) {\n * console.log(policy);\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.getPolicy().then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_view_bucket_iam_members\n * Example of retrieving a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_add_bucket_iam_member\n * Example of adding to a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_remove_bucket_iam_member\n * Example of removing from a bucket's IAM policy:\n */\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against\n * an object or bucket (for example, `READ` or `WRITE`); the entity defines\n * who the permission applies to (for example, a specific user or group of\n * users).\n *\n * The `acl` object on a Bucket instance provides methods to get you a list of\n * the ACLs defined on your bucket, as well as set, update, and delete them.\n *\n * Buckets also have\n * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs}\n * for all created files. Default ACLs specify permissions that all new\n * objects added to the bucket will inherit by default. You can add, delete,\n * get, and update entities and permissions for these as well with\n * {@link Bucket#acl.default}.\n *\n * See {@link http://goo.gl/6qBBPO| About Access Control Lists}\n * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs}\n *\n * @name Bucket#acl\n * @mixes Acl\n * @property {Acl} default Cloud Storage Buckets have\n * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs}\n * for all created files. You can add, delete, get, and update entities and\n * permissions for these as well. The method signatures and examples are all\n * the same, after only prefixing the method call with `default`.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Make a bucket's contents publicly readable.\n * //-\n * const myBucket = storage.bucket('my-bucket');\n *\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * myBucket.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl\n * Example of printing a bucket's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl_for_user\n * Example of printing a bucket's ACL for a specific user:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_owner\n * Example of adding an owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_owner\n * Example of removing an owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_default_owner\n * Example of adding a default owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_default_owner\n * Example of removing a default owner from a bucket:\n */\n/**\n * The API-formatted resource description of the bucket.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name Bucket#metadata\n * @type {object}\n */\n/**\n * The bucket's name.\n * @name Bucket#name\n * @type {string}\n */\n/**\n * Get {@link File} objects for the files currently in the bucket as a\n * readable object stream.\n *\n * @method Bucket#getFilesStream\n * @param {GetFilesOptions} [query] Query object for listing files.\n * @returns {ReadableStream} A readable stream that emits {@link File} instances.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getFilesStream()\n * .on('error', console.error)\n * .on('data', function(file) {\n * // file is a File object.\n * })\n * .on('end', function() {\n * // All files retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * bucket.getFilesStream()\n * .on('data', function(file) {\n * this.end();\n * });\n *\n * //-\n * // If you're filtering files with a delimiter, you should use\n * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to\n * // preserve the `apiResponse` argument.\n * //-\n * const prefixes = [];\n *\n * function callback(err, files, nextQuery, apiResponse) {\n * prefixes = prefixes.concat(apiResponse.prefixes);\n *\n * if (nextQuery) {\n * bucket.getFiles(nextQuery, callback);\n * } else {\n * // prefixes = The finished array of prefixes.\n * }\n * }\n *\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/'\n * }, callback);\n * ```\n */\n/**\n * Create a Bucket object to interact with a Cloud Storage bucket.\n *\n * @class\n * @hideconstructor\n *\n * @param {Storage} storage A {@link Storage} instance.\n * @param {string} name The name of the bucket.\n * @param {object} [options] Configuration object.\n * @param {string} [options.userProject] User project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * ```\n */\nclass Bucket extends index_js_1.ServiceObject {\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n getFilesStream(query) {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n constructor(storage, name, options) {\n var _a, _b, _c, _d;\n options = options || {};\n // Allow for \"gs://\"-style input, and strip any trailing slashes.\n name = name.replace(/^gs:\\/\\//, '').replace(/\\/+$/, '');\n const requestQueryObject = {};\n if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) {\n requestQueryObject.ifGenerationMatch =\n options.preconditionOpts.ifGenerationMatch;\n }\n if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) {\n requestQueryObject.ifGenerationNotMatch =\n options.preconditionOpts.ifGenerationNotMatch;\n }\n if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) {\n requestQueryObject.ifMetagenerationMatch =\n options.preconditionOpts.ifMetagenerationMatch;\n }\n if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) {\n requestQueryObject.ifMetagenerationNotMatch =\n options.preconditionOpts.ifMetagenerationNotMatch;\n }\n const userProject = options.userProject;\n if (typeof userProject === 'string') {\n requestQueryObject.userProject = userProject;\n }\n const methods = {\n /**\n * Create a bucket.\n *\n * @method Bucket#create\n * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket.\n * @param {CreateBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * bucket.create(function(err, bucket, apiResponse) {\n * if (!err) {\n * // The bucket was created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.create().then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n create: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * IamDeleteBucketOptions Configuration options.\n * @property {boolean} [ignoreNotFound = false] Ignore an error if\n * the bucket does not exist.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} DeleteBucketResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation}\n *\n * @method Bucket#delete\n * @param {DeleteBucketOptions} [options] Configuration options.\n * @param {boolean} [options.ignoreNotFound = false] Ignore an error if\n * the bucket does not exist.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * bucket.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_delete_bucket\n * Another example:\n */\n delete: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} BucketExistsResponse\n * @property {boolean} 0 Whether the {@link Bucket} exists.\n */\n /**\n * @callback BucketExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the {@link Bucket} exists.\n */\n /**\n * Check if the bucket exists.\n *\n * @method Bucket#exists\n * @param {BucketExistsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {BucketExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get()\n * @property {boolean} [autoCreate] Automatically create the object if\n * it does not exist. Default: `false`\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} GetBucketResponse\n * @property {Bucket} 0 The {@link Bucket}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket} bucket The {@link Bucket}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get a bucket if it exists.\n *\n * You may optionally use this to \"get or create\" an object by providing\n * an object with `autoCreate` set to `true`. Any extra configuration that\n * is normally required for the `create` method must be contained within\n * this object as well.\n *\n * @method Bucket#get\n * @param {GetBucketOptions} [options] Configuration options.\n * @param {boolean} [options.autoCreate] Automatically create the object if\n * it does not exist. Default: `false`\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.get(function(err, bucket, apiResponse) {\n * // `bucket.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.get().then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetBucketMetadataResponse\n * @property {object} 0 The bucket metadata.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Get the bucket's metadata.\n *\n * To set metadata, see {@link Bucket#setMetadata}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation}\n *\n * @method Bucket#getMetadata\n * @param {GetBucketMetadataOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_get_requester_pays_status\n * Example of retrieving the requester pays status of a bucket:\n */\n getMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} SetBucketMetadataResponse\n * @property {object} apiResponse The full API response.\n */\n /**\n * @callback SetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n */\n /**\n * Set the bucket's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @method Bucket#setMetadata\n * @param {object} metadata The metadata you wish to set.\n * @param {SetBucketMetadataOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Set website metadata field on the bucket.\n * //-\n * const metadata = {\n * website: {\n * mainPageSuffix: 'http://example.com',\n * notFoundPage: 'http://example.com/404.html'\n * }\n * };\n *\n * bucket.setMetadata(metadata, function(err, apiResponse) {});\n *\n * //-\n * // Enable versioning for your bucket.\n * //-\n * bucket.setMetadata({\n * versioning: {\n * enabled: true\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Enable KMS encryption for objects within this bucket.\n * //-\n * bucket.setMetadata({\n * encryption: {\n * defaultKmsKeyName: 'projects/grape-spaceship-123/...'\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Set the default event-based hold value for new objects in this\n * // bucket.\n * //-\n * bucket.setMetadata({\n * defaultEventBasedHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Remove object lifecycle rules.\n * //-\n * bucket.setMetadata({\n * lifecycle: null\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setMetadata(metadata).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n };\n super({\n parent: storage,\n baseUrl: '/b',\n id: name,\n createMethod: storage.createBucket.bind(storage),\n methods,\n });\n this.name = name;\n this.storage = storage;\n this.userProject = options.userProject;\n this.acl = new acl_js_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/acl',\n });\n this.acl.default = new acl_js_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/defaultObjectAcl',\n });\n this.crc32cGenerator =\n options.crc32cGenerator || this.storage.crc32cGenerator;\n this.iam = new iam_js_1.Iam(this);\n this.getFilesStream = paginator_1.paginator.streamify('getFiles');\n this.instanceRetryValue = storage.retryOptions.autoRetry;\n this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts;\n }\n /**\n * The bucket's Cloud Storage URI (`gs://`)\n *\n * @example\n * ```ts\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * // `gs://my-bucket`\n * const href = bucket.cloudStorageURI.href;\n * ```\n */\n get cloudStorageURI() {\n const uri = new url_1.URL('gs://');\n uri.host = this.name;\n return uri;\n }\n /**\n * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule().\n * @property {boolean} [append=true] The new rules will be appended to any\n * pre-existing rules.\n */\n /**\n *\n * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects\n * in this bucket.\n * @property {string|object} action The action to be taken upon matching of\n * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'.\n * **Note**: For configuring a raw-formatted rule object to be passed as `action`\n * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}.\n * @property {object} condition Condition a bucket must meet before the\n * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}.\n * @property {string} [storageClass] When using the `setStorageClass`\n * action, provide this option to dictate which storage class the object\n * should update to. Please see\n * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions.\n */\n /**\n * Add an object lifecycle management rule to the bucket.\n *\n * By default, an Object Lifecycle Management rule provided to this method\n * will be included to the existing policy. To replace all existing rules,\n * supply the `options` argument, setting `append` to `false`.\n *\n * To add multiple rules, pass a list to the `rule` parameter. Calling this\n * function multiple times asynchronously does not guarantee that all rules\n * are added correctly.\n *\n * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects\n * in this bucket.\n * @param {string|object} rule.action The action to be taken upon matching of\n * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'.\n * **Note**: For configuring a raw-formatted rule object to be passed as `action`\n * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}.\n * @param {object} rule.condition Condition a bucket must meet before the\n * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}.\n * @param {string} [rule.storageClass] When using the `setStorageClass`\n * action, provide this option to dictate which storage class the object\n * should update to.\n * @param {AddLifecycleRuleOptions} [options] Configuration object.\n * @param {boolean} [options.append=true] Append the new rule to the existing\n * policy.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Automatically have an object deleted from this bucket once it is 3 years\n * // of age.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * age: 365 * 3 // Specified in days.\n * }\n * }, function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * const lifecycleRules = bucket.metadata.lifecycle.rule;\n *\n * // Iterate over the Object Lifecycle Management rules on this bucket.\n * lifecycleRules.forEach(lifecycleRule => {});\n * });\n *\n * //-\n * // By default, the rule you provide will be added to the existing policy.\n * // Optionally, you can disable this behavior to replace all of the\n * // pre-existing rules.\n * //-\n * const options = {\n * append: false\n * };\n *\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * age: 365 * 3 // Specified in days.\n * }\n * }, options, function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // All rules have been replaced with the new \"delete\" rule.\n *\n * // Iterate over the Object Lifecycle Management rules on this bucket.\n * lifecycleRules.forEach(lifecycleRule => {});\n * });\n *\n * //-\n * // For objects created before 2018, \"downgrade\" the storage class.\n * //-\n * bucket.addLifecycleRule({\n * action: 'setStorageClass',\n * storageClass: 'COLDLINE',\n * condition: {\n * createdBefore: new Date('2018')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete objects created before 2016 which have the Coldline storage\n * // class.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * matchesStorageClass: [\n * 'COLDLINE'\n * ],\n * createdBefore: new Date('2016')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a noncurrent timestamp that is at least 100 days.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * daysSinceNoncurrentTime: 100\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a noncurrent timestamp before 2020-01-01.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * noncurrentTimeBefore: new Date('2020-01-01')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a customTime that is at least 100 days.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * daysSinceCustomTime: 100\n * }\n * }, function(err, apiResponse) ());\n *\n * //-\n * // Delete object that has a customTime before 2020-01-01.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * customTimeBefore: new Date('2020-01-01')\n * }\n * }, function(err, apiResponse) {});\n * ```\n */\n addLifecycleRule(rule, optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n options = options || {};\n const rules = Array.isArray(rule) ? rule : [rule];\n for (const curRule of rules) {\n if (curRule.condition.createdBefore instanceof Date) {\n curRule.condition.createdBefore = curRule.condition.createdBefore\n .toISOString()\n .replace(/T.+$/, '');\n }\n if (curRule.condition.customTimeBefore instanceof Date) {\n curRule.condition.customTimeBefore = curRule.condition.customTimeBefore\n .toISOString()\n .replace(/T.+$/, '');\n }\n if (curRule.condition.noncurrentTimeBefore instanceof Date) {\n curRule.condition.noncurrentTimeBefore =\n curRule.condition.noncurrentTimeBefore\n .toISOString()\n .replace(/T.+$/, '');\n }\n }\n if (options.append === false) {\n this.setMetadata({ lifecycle: { rule: rules } }, options, callback);\n return;\n }\n // The default behavior appends the previously-defined lifecycle rules with\n // the new ones just passed in by the user.\n this.getMetadata((err, metadata) => {\n var _a, _b;\n if (err) {\n callback(err);\n return;\n }\n const currentLifecycleRules = Array.isArray((_a = metadata.lifecycle) === null || _a === void 0 ? void 0 : _a.rule)\n ? (_b = metadata.lifecycle) === null || _b === void 0 ? void 0 : _b.rule\n : [];\n this.setMetadata({\n lifecycle: { rule: currentLifecycleRules.concat(rules) },\n }, options, callback);\n });\n }\n /**\n * @typedef {object} CombineOptions\n * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of\n * the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback CombineCallback\n * @param {?Error} err Request error, if any.\n * @param {File} newFile The new {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} CombineResponse\n * @property {File} 0 The new {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * Combine multiple files into one new file.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation}\n *\n * @throws {Error} if a non-array is provided as sources argument.\n * @throws {Error} if no sources are provided.\n * @throws {Error} if no destination is provided.\n *\n * @param {string[]|File[]} sources The source files that will be\n * combined.\n * @param {string|File} destination The file you would like the\n * source files combined into.\n * @param {CombineOptions} [options] Configuration options.\n * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of\n * the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n \n * @param {CombineCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const logBucket = storage.bucket('log-bucket');\n *\n * const sources = [\n * logBucket.file('2013-logs.txt'),\n * logBucket.file('2014-logs.txt')\n * ];\n *\n * const allLogs = logBucket.file('all-logs.txt');\n *\n * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) {\n * // newFile === allLogs\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * logBucket.combine(sources, allLogs).then(function(data) {\n * const newFile = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n combine(sources, destination, optionsOrCallback, callback) {\n var _a;\n if (!Array.isArray(sources) || sources.length === 0) {\n throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE);\n }\n if (!destination) {\n throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED);\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required\n AvailableServiceObjectMethods.setMetadata, // Same as above\n options);\n const convertToFile = (file) => {\n if (file instanceof file_js_1.File) {\n return file;\n }\n return this.file(file);\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n sources = sources.map(convertToFile);\n const destinationFile = convertToFile(destination);\n callback = callback || index_js_1.util.noop;\n if (!destinationFile.metadata.contentType) {\n const destinationContentType = mime.contentType(destinationFile.name);\n if (destinationContentType) {\n destinationFile.metadata.contentType = destinationContentType;\n }\n }\n let maxRetries = this.storage.retryOptions.maxRetries;\n if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) ===\n undefined &&\n options.ifGenerationMatch === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n maxRetries = 0;\n }\n if (options.ifGenerationMatch === undefined) {\n Object.assign(options, destinationFile.instancePreconditionOpts, options);\n }\n // Make the request from the destination File object.\n destinationFile.request({\n method: 'POST',\n uri: '/compose',\n maxRetries,\n json: {\n destination: {\n contentType: destinationFile.metadata.contentType,\n },\n sourceObjects: sources.map(source => {\n const sourceObject = {\n name: source.name,\n };\n if (source.metadata && source.metadata.generation) {\n sourceObject.generation = parseInt(source.metadata.generation.toString());\n }\n return sourceObject;\n }),\n },\n qs: options,\n }, (err, resp) => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, destinationFile, resp);\n });\n }\n /**\n * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}.\n *\n * @typedef {object} CreateChannelConfig\n * @property {string} address The address where notifications are\n * delivered for this channel.\n * @property {string} [delimiter] Returns results in a directory-like mode.\n * @property {number} [maxResults] Maximum number of `items` plus `prefixes`\n * to return in a single page of responses.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [prefix] Filter results to objects whose names begin\n * with this prefix.\n * @property {string} [projection=noAcl] Set of properties to return.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {boolean} [versions=false] If `true`, lists all versions of an object\n * as distinct results.\n */\n /**\n * @typedef {object} CreateChannelOptions\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} CreateChannelResponse\n * @property {Channel} 0 The new {@link Channel}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CreateChannelCallback\n * @param {?Error} err Request error, if any.\n * @param {Channel} channel The new {@link Channel}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Create a channel that will be notified when objects in this bucket changes.\n *\n * @throws {Error} If an ID is not provided.\n * @throws {Error} If an address is not provided.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation}\n *\n * @param {string} id The ID of the channel to create.\n * @param {CreateChannelConfig} config Configuration for creating channel.\n * @param {string} config.address The address where notifications are\n * delivered for this channel.\n * @param {string} [config.delimiter] Returns results in a directory-like mode.\n * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes`\n * to return in a single page of responses.\n * @param {string} [config.pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @param {string} [config.prefix] Filter results to objects whose names begin\n * with this prefix.\n * @param {string} [config.projection=noAcl] Set of properties to return.\n * @param {string} [config.userProject] The ID of the project which will be\n * billed for the request.\n * @param {boolean} [config.versions=false] If `true`, lists all versions of an object\n * as distinct results.\n * @param {CreateChannelOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {CreateChannelCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const id = 'new-channel-id';\n *\n * const config = {\n * address: 'https://...'\n * };\n *\n * bucket.createChannel(id, config, function(err, channel, apiResponse) {\n * if (!err) {\n * // Channel created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.createChannel(id, config).then(function(data) {\n * const channel = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n createChannel(id, config, optionsOrCallback, callback) {\n if (typeof id !== 'string') {\n throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED);\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.request({\n method: 'POST',\n uri: '/o/watch',\n json: Object.assign({\n id,\n type: 'web_hook',\n }, config),\n qs: options,\n }, (err, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n const resourceId = apiResponse.resourceId;\n const channel = this.storage.channel(id, resourceId);\n channel.metadata = apiResponse;\n callback(null, channel, apiResponse);\n });\n }\n /**\n * Metadata to set for the Notification.\n *\n * @typedef {object} CreateNotificationOptions\n * @property {object} [customAttributes] An optional list of additional\n * attributes to attach to each Cloud PubSub message published for this\n * notification subscription.\n * @property {string[]} [eventTypes] If present, only send notifications about\n * listed event types. If empty, sent notifications for all event types.\n * @property {string} [objectNamePrefix] If present, only apply this\n * notification configuration to object names that begin with this prefix.\n * @property {string} [payloadFormat] The desired content of the Payload.\n * Defaults to `JSON_API_V1`.\n *\n * Acceptable values are:\n * - `JSON_API_V1`\n *\n * - `NONE`\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback CreateNotificationCallback\n * @param {?Error} err Request error, if any.\n * @param {Notification} notification The new {@link Notification}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} CreateNotificationResponse\n * @property {Notification} 0 The new {@link Notification}.\n * @property {object} 1 The full API response.\n */\n /**\n * Creates a notification subscription for the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert}\n *\n * @param {Topic|string} topic The Cloud PubSub topic to which this\n * subscription publishes. If the project ID is omitted, the current\n * project ID will be used.\n *\n * Acceptable formats are:\n * - `projects/grape-spaceship-123/topics/my-topic`\n *\n * - `my-topic`\n * @param {CreateNotificationOptions} [options] Metadata to set for the\n * notification.\n * @param {object} [options.customAttributes] An optional list of additional\n * attributes to attach to each Cloud PubSub message published for this\n * notification subscription.\n * @param {string[]} [options.eventTypes] If present, only send notifications about\n * listed event types. If empty, sent notifications for all event types.\n * @param {string} [options.objectNamePrefix] If present, only apply this\n * notification configuration to object names that begin with this prefix.\n * @param {string} [options.payloadFormat] The desired content of the Payload.\n * Defaults to `JSON_API_V1`.\n *\n * Acceptable values are:\n * - `JSON_API_V1`\n *\n * - `NONE`\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {CreateNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a valid topic is not provided.\n * @see Notification#create\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const callback = function(err, notification, apiResponse) {\n * if (!err) {\n * // The notification was created successfully.\n * }\n * };\n *\n * myBucket.createNotification('my-topic', callback);\n *\n * //-\n * // Configure the nofiication by providing Notification metadata.\n * //-\n * const metadata = {\n * objectNamePrefix: 'prefix-'\n * };\n *\n * myBucket.createNotification('my-topic', metadata, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.createNotification('my-topic').then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/createNotification.js\n * region_tag:storage_create_bucket_notifications\n * Another example:\n */\n createNotification(topic, optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n const topicIsObject = topic !== null && typeof topic === 'object';\n if (topicIsObject && index_js_1.util.isCustomType(topic, 'pubsub/topic')) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n topic = topic.name;\n }\n if (typeof topic !== 'string') {\n throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED);\n }\n const body = Object.assign({ topic }, options);\n if (body.topic.indexOf('projects') !== 0) {\n body.topic = 'projects/{{projectId}}/topics/' + body.topic;\n }\n body.topic = '//pubsub.googleapis.com/' + body.topic;\n if (!body.payloadFormat) {\n body.payloadFormat = 'JSON_API_V1';\n }\n const query = {};\n if (body.userProject) {\n query.userProject = body.userProject;\n delete body.userProject;\n }\n this.request({\n method: 'POST',\n uri: '/notificationConfigs',\n json: (0, util_js_1.convertObjKeysToSnakeCase)(body),\n qs: query,\n maxRetries: 0, //explicitly set this value since this is a non-idempotent function\n }, (err, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n const notification = this.notification(apiResponse.id);\n notification.metadata = apiResponse;\n callback(null, notification, apiResponse);\n });\n }\n /**\n * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles}\n * for all of the supported properties.\n * @property {boolean} [force] Suppress errors until all files have been\n * processed.\n */\n /**\n * @callback DeleteFilesCallback\n * @param {?Error|?Error[]} err Request error, if any, or array of errors from\n * files that were not able to be deleted.\n * @param {object} [apiResponse] The full API response.\n */\n /**\n * Iterate over the bucket's files, calling `file.delete()` on each.\n *\n * This is not an atomic request. A delete attempt will be\n * made for each file individually. Any one can fail, in which case only a\n * portion of the files you intended to be deleted would have.\n *\n * Operations are performed in parallel, up to 10 at once. The first error\n * breaks the loop and will execute the provided callback with it. Specify\n * `{ force: true }` to suppress the errors until all files have had a chance\n * to be processed.\n *\n * File preconditions cannot be passed to this function. It will not retry unless\n * the idempotency strategy is set to retry always.\n *\n * The `query` object passed as the first argument will also be passed to\n * {@link Bucket#getFiles}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation}\n *\n * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles}\n * @param {boolean} [query.force] Suppress errors until all files have been\n * processed.\n * @param {DeleteFilesCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Delete all of the files in the bucket.\n * //-\n * bucket.deleteFiles(function(err) {});\n *\n * //-\n * // By default, if a file cannot be deleted, this method will stop deleting\n * // files from your bucket. You can override this setting with `force:\n * // true`.\n * //-\n * bucket.deleteFiles({\n * force: true\n * }, function(errors) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * });\n *\n * //-\n * // The first argument to this method acts as a query to\n * // {@link Bucket#getFiles}. As an example, you can delete files\n * // which match a prefix.\n * //-\n * bucket.deleteFiles({\n * prefix: 'images/'\n * }, function(err) {\n * if (!err) {\n * // All files in the `images` directory have been deleted.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.deleteFiles().then(function() {});\n * ```\n */\n deleteFiles(queryOrCallback, callback) {\n let query = {};\n if (typeof queryOrCallback === 'function') {\n callback = queryOrCallback;\n }\n else if (queryOrCallback) {\n query = queryOrCallback;\n }\n const MAX_PARALLEL_LIMIT = 10;\n const MAX_QUEUE_SIZE = 1000;\n const errors = [];\n const deleteFile = (file) => {\n return file.delete(query).catch(err => {\n if (!query.force) {\n throw err;\n }\n errors.push(err);\n });\n };\n (async () => {\n try {\n let promises = [];\n const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT);\n const filesStream = this.getFilesStream(query);\n for await (const curFile of filesStream) {\n if (promises.length >= MAX_QUEUE_SIZE) {\n await Promise.all(promises);\n promises = [];\n }\n promises.push(limit(() => deleteFile(curFile)).catch(e => {\n filesStream.destroy();\n throw e;\n }));\n }\n await Promise.all(promises);\n callback(errors.length > 0 ? errors : null);\n }\n catch (e) {\n callback(e);\n return;\n }\n })();\n }\n /**\n * @deprecated\n * @typedef {array} DeleteLabelsResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @deprecated\n * @callback DeleteLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata Bucket's metadata.\n */\n /**\n * @deprecated Use setMetadata directly\n * Delete one or more labels from this bucket.\n *\n * @param {string|string[]} [labels] The labels to delete. If no labels are\n * provided, all of the labels are removed.\n * @param {DeleteLabelsCallback} [callback] Callback function.\n * @param {DeleteLabelsOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Delete all of the labels from this bucket.\n * //-\n * bucket.deleteLabels(function(err, apiResponse) {});\n *\n * //-\n * // Delete a single label.\n * //-\n * bucket.deleteLabels('labelone', function(err, apiResponse) {});\n *\n * //-\n * // Delete a specific set of labels.\n * //-\n * bucket.deleteLabels([\n * 'labelone',\n * 'labeltwo'\n * ], function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.deleteLabels().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) {\n let labels = new Array();\n let options = {};\n if (typeof labelsOrCallbackOrOptions === 'function') {\n callback = labelsOrCallbackOrOptions;\n }\n else if (typeof labelsOrCallbackOrOptions === 'string') {\n labels = [labelsOrCallbackOrOptions];\n }\n else if (Array.isArray(labelsOrCallbackOrOptions)) {\n labels = labelsOrCallbackOrOptions;\n }\n else if (labelsOrCallbackOrOptions) {\n options = labelsOrCallbackOrOptions;\n }\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n const deleteLabels = (labels) => {\n const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => {\n nullLabelMap[labelKey] = null;\n return nullLabelMap;\n }, {});\n if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) {\n this.setLabels(nullLabelMap, options, callback);\n }\n else {\n this.setLabels(nullLabelMap, callback);\n }\n };\n if (labels.length === 0) {\n this.getLabels((err, labels) => {\n if (err) {\n callback(err);\n return;\n }\n deleteLabels(Object.keys(labels));\n });\n }\n else {\n deleteLabels(labels);\n }\n }\n /**\n * @typedef {array} DisableRequesterPaysResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DisableRequesterPaysCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n *

\n * Early Access Testers Only\n *

\n * This feature is not yet widely-available.\n *

\n *
\n *\n * Disable `requesterPays` functionality from this bucket.\n *\n * @param {DisableRequesterPaysCallback} [callback] Callback function.\n * @param {DisableRequesterPaysOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.disableRequesterPays(function(err, apiResponse) {\n * if (!err) {\n * // requesterPays functionality disabled successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.disableRequesterPays().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_disable_requester_pays\n * Example of disabling requester pays:\n */\n disableRequesterPays(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.setMetadata({\n billing: {\n requesterPays: false,\n },\n }, options, callback);\n }\n /**\n * Configuration object for enabling logging.\n *\n * @typedef {object} EnableLoggingOptions\n * @property {string|Bucket} [bucket] The bucket for the log entries. By\n * default, the current bucket is used.\n * @property {string} prefix A unique prefix for log object names.\n */\n /**\n * Enable logging functionality for this bucket. This will make two API\n * requests, first to grant Cloud Storage WRITE permission to the bucket, then\n * to set the appropriate configuration on the Bucket's metadata.\n *\n * @param {EnableLoggingOptions} config Configuration options.\n * @param {string|Bucket} [config.bucket] The bucket for the log entries. By\n * default, the current bucket is used.\n * @param {string} config.prefix A unique prefix for log object names.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * const config = {\n * prefix: 'log'\n * };\n *\n * bucket.enableLogging(config, function(err, apiResponse) {\n * if (!err) {\n * // Logging functionality enabled successfully.\n * }\n * });\n *\n * ```\n * @example\n * Optionally, provide a destination bucket.\n * ```\n * const config = {\n * prefix: 'log',\n * bucket: 'destination-bucket'\n * };\n *\n * bucket.enableLogging(config, function(err, apiResponse) {});\n * ```\n *\n * @example\n * If the callback is omitted, we'll return a Promise.\n * ```\n * bucket.enableLogging(config).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n enableLogging(config, callback) {\n if (!config ||\n typeof config === 'function' ||\n typeof config.prefix === 'undefined') {\n throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED);\n }\n let logBucket = this.id;\n if (config.bucket && config.bucket instanceof Bucket) {\n logBucket = config.bucket.id;\n }\n else if (config.bucket && typeof config.bucket === 'string') {\n logBucket = config.bucket;\n }\n const options = {};\n if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) {\n options.ifMetagenerationMatch = config.ifMetagenerationMatch;\n }\n if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) {\n options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch;\n }\n (async () => {\n try {\n const [policy] = await this.iam.getPolicy();\n policy.bindings.push({\n members: ['group:cloud-storage-analytics@google.com'],\n role: 'roles/storage.objectCreator',\n });\n await this.iam.setPolicy(policy);\n this.setMetadata({\n logging: {\n logBucket,\n logObjectPrefix: config.prefix,\n },\n }, options, callback);\n }\n catch (e) {\n callback(e);\n return;\n }\n })();\n }\n /**\n * @typedef {array} EnableRequesterPaysResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback EnableRequesterPaysCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n *
\n * Early Access Testers Only\n *

\n * This feature is not yet widely-available.\n *

\n *
\n *\n * Enable `requesterPays` functionality for this bucket. This enables you, the\n * bucket owner, to have the requesting user assume the charges for the access\n * to your bucket and its contents.\n *\n * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback]\n * Callback function or precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.enableRequesterPays(function(err, apiResponse) {\n * if (!err) {\n * // requesterPays functionality enabled successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.enableRequesterPays().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_enable_requester_pays\n * Example of enabling requester pays:\n */\n enableRequesterPays(optionsOrCallback, cb) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n cb = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.setMetadata({\n billing: {\n requesterPays: true,\n },\n }, options, cb);\n }\n /**\n * Create a {@link File} object. See {@link File} to see how to handle\n * the different use cases you may have.\n *\n * @param {string} name The name of the file in this bucket.\n * @param {FileOptions} [options] Configuration options.\n * @param {string|number} [options.generation] Only use a specific revision of\n * this file.\n * @param {string} [options.encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * KMS key ring must use the same location as the bucket.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for all requests made from File object.\n * @returns {File}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-existing-file.png');\n * ```\n */\n file(name, options) {\n if (!name) {\n throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME);\n }\n return new file_js_1.File(this, name, options);\n }\n /**\n * @typedef {array} GetFilesResponse\n * @property {File[]} 0 Array of {@link File} instances.\n * @param {object} nextQuery 1 A query object to receive more results.\n * @param {object} apiResponse 2 The full API response.\n */\n /**\n * @callback GetFilesCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files Array of {@link File} instances.\n * @param {object} nextQuery A query object to receive more results.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Query object for listing files.\n *\n * @typedef {object} GetFilesOptions\n * @property {boolean} [autoPaginate=true] Have pagination handled\n * automatically.\n * @property {string} [delimiter] Results will contain only objects whose\n * names, aside from the prefix, do not contain delimiter. Objects whose\n * names, aside from the prefix, contain delimiter will have their name\n * truncated after the delimiter, returned in `apiResponse.prefixes`.\n * Duplicate prefixes are omitted.\n * @property {string} [endOffset] Filter results to objects whose names are\n * lexicographically before endOffset. If startOffset is also set, the objects\n * listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @property {boolean} [includeTrailingDelimiter] If true, objects that end in\n * exactly one instance of delimiter have their metadata included in items[]\n * in addition to the relevant part of the object name appearing in prefixes[].\n * @property {string} [prefix] Filter results to objects whose names begin\n * with this prefix.\n * @property {string} [matchGlob] A glob pattern used to filter results,\n * for example foo*bar\n * @property {number} [maxApiCalls] Maximum number of API calls to make.\n * @property {number} [maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [startOffset] Filter results to objects whose names are\n * lexicographically equal to or after startOffset. If endOffset is also set,\n * the objects listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {boolean} [versions] If true, returns File objects scoped to\n * their versions.\n */\n /**\n * Get {@link File} objects for the files currently in the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation}\n *\n * @param {GetFilesOptions} [query] Query object for listing files.\n * @param {boolean} [query.autoPaginate=true] Have pagination handled\n * automatically.\n * @param {string} [query.delimiter] Results will contain only objects whose\n * names, aside from the prefix, do not contain delimiter. Objects whose\n * names, aside from the prefix, contain delimiter will have their name\n * truncated after the delimiter, returned in `apiResponse.prefixes`.\n * Duplicate prefixes are omitted.\n * @param {string} [query.endOffset] Filter results to objects whose names are\n * lexicographically before endOffset. If startOffset is also set, the objects\n * listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in\n * exactly one instance of delimiter have their metadata included in items[]\n * in addition to the relevant part of the object name appearing in prefixes[].\n * @param {string} [query.prefix] Filter results to objects whose names begin\n * with this prefix.\n * @param {number} [query.maxApiCalls] Maximum number of API calls to make.\n * @param {number} [query.maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @param {string} [query.pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @param {string} [query.startOffset] Filter results to objects whose names are\n * lexicographically equal to or after startOffset. If endOffset is also set,\n * the objects listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @param {string} [query.userProject] The ID of the project which will be\n * billed for the request.\n * @param {boolean} [query.versions] If true, returns File objects scoped to\n * their versions.\n * @param {GetFilesCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getFiles(function(err, files) {\n * if (!err) {\n * // files is an array of File objects.\n * }\n * });\n *\n * //-\n * // If your bucket has versioning enabled, you can get all of your files\n * // scoped to their generation.\n * //-\n * bucket.getFiles({\n * versions: true\n * }, function(err, files) {\n * // Each file is scoped to its generation.\n * });\n *\n * //-\n * // To control how many API requests are made and page through the results\n * // manually, set `autoPaginate` to `false`.\n * //-\n * const callback = function(err, files, nextQuery, apiResponse) {\n * if (nextQuery) {\n * // More results exist.\n * bucket.getFiles(nextQuery, callback);\n * }\n *\n * // The `metadata` property is populated for you with the metadata at the\n * // time of fetching.\n * files[0].metadata;\n *\n * // However, in cases where you are concerned the metadata could have\n * // changed, use the `getMetadata` method.\n * files[0].getMetadata(function(err, metadata) {});\n * };\n *\n * bucket.getFiles({\n * autoPaginate: false\n * }, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getFiles().then(function(data) {\n * const files = data[0];\n * });\n *\n * ```\n * @example\n *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. \"a\"
  2. \"a/b/c/d\"
  3. \"b/d/e\"

Using a delimiter of `/` will return a single file, \"a\".

`apiResponse.prefixes` will return the \"sub-directories\" that were found:

  1. \"a/\"
  2. \"b/\"
\n * ```\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/'\n * }, function(err, files, nextQuery, apiResponse) {\n * // files = [\n * // {File} // File object for file \"a\"\n * // ]\n *\n * // apiResponse.prefixes = [\n * // 'a/',\n * // 'b/'\n * // ]\n * });\n * ```\n *\n * @example\n * Using prefixes, it's now possible to simulate a file system with follow-up requests.\n * ```\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/',\n * prefix: 'a/'\n * }, function(err, files, nextQuery, apiResponse) {\n * // No files found within \"directory\" a.\n * // files = []\n *\n * // However, a \"sub-directory\" was found.\n * // This prefix can be used to continue traversing the \"file system\".\n * // apiResponse.prefixes = [\n * // 'a/b/'\n * // ]\n * });\n * ```\n *\n * @example include:samples/files.js\n * region_tag:storage_list_files\n * Another example:\n *\n * @example include:samples/files.js\n * region_tag:storage_list_files_with_prefix\n * Example of listing files, filtered by a prefix:\n */\n getFiles(queryOrCallback, callback) {\n let query = typeof queryOrCallback === 'object' ? queryOrCallback : {};\n if (!callback) {\n callback = queryOrCallback;\n }\n query = Object.assign({}, query);\n this.request({\n uri: '/o',\n qs: query,\n }, (err, resp) => {\n if (err) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const files = itemsArray.map((file) => {\n const options = {};\n if (query.versions) {\n options.generation = file.generation;\n }\n if (file.kmsKeyName) {\n options.kmsKeyName = file.kmsKeyName;\n }\n const fileInstance = this.file(file.name, options);\n fileInstance.metadata = file;\n return fileInstance;\n });\n let nextQuery = null;\n if (resp.nextPageToken) {\n nextQuery = Object.assign({}, query, {\n pageToken: resp.nextPageToken,\n });\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n callback(null, files, nextQuery, resp);\n });\n }\n /**\n * @deprecated\n * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels().\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @deprecated\n * @typedef {array} GetLabelsResponse\n * @property {object} 0 Object of labels currently set on this bucket.\n */\n /**\n * @deprecated\n * @callback GetLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} labels Object of labels currently set on this bucket.\n */\n /**\n * @deprecated Use getMetadata directly.\n * Get the labels currently set on this bucket.\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetLabelsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getLabels(function(err, labels) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // labels = {\n * // label: 'labelValue',\n * // ...\n * // }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getLabels().then(function(data) {\n * const labels = data[0];\n * });\n * ```\n */\n getLabels(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n callback(err, null);\n return;\n }\n callback(null, (metadata === null || metadata === void 0 ? void 0 : metadata.labels) || {});\n });\n }\n /**\n * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback GetNotificationsCallback\n * @param {?Error} err Request error, if any.\n * @param {Notification[]} notifications Array of {@link Notification}\n * instances.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} GetNotificationsResponse\n * @property {Notification[]} 0 Array of {@link Notification} instances.\n * @property {object} 1 The full API response.\n */\n /**\n * Retrieves a list of notification subscriptions for a given bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list}\n *\n * @param {GetNotificationsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * bucket.getNotifications(function(err, notifications, apiResponse) {\n * if (!err) {\n * // notifications is an array of Notification objects.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getNotifications().then(function(data) {\n * const notifications = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/listNotifications.js\n * region_tag:storage_list_bucket_notifications\n * Another example:\n */\n getNotifications(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.request({\n uri: '/notificationConfigs',\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const notifications = itemsArray.map((notification) => {\n const notificationInstance = this.notification(notification.id);\n notificationInstance.metadata = notification;\n return notificationInstance;\n });\n callback(null, notifications, resp);\n });\n }\n /**\n * @typedef {array} GetSignedUrlResponse\n * @property {object} 0 The signed URL.\n */\n /**\n * @callback GetSignedUrlCallback\n * @param {?Error} err Request error, if any.\n * @param {object} url The signed URL.\n */\n /**\n * @typedef {object} GetBucketSignedUrlConfig\n * @property {string} action Currently only supports \"list\" (HTTP: GET).\n * @property {*} expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * @property {string} [version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @property {string} [cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @property {object} [extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @property {object} [queryParams] Additional query parameters to include\n * in the signed URL.\n */\n /**\n * Get a signed URL to allow limited time access to a bucket.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed URL. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference}\n *\n * @throws {Error} if an expiration timestamp from the past is given.\n *\n * @param {GetBucketSignedUrlConfig} config Configuration object.\n * @param {string} config.action Currently only supports \"list\" (HTTP: GET).\n * @param {*} config.expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * @param {string} [config.version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @param {object} [config.extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @property {object} [config.queryParams] Additional query parameters to include\n * in the signed URL.\n * @param {GetSignedUrlCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * //-\n * // Generate a URL that allows temporary access to list files in a bucket.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'list',\n * expires: '03-17-2025'\n * };\n *\n * bucket.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The bucket is now available to be listed from this URL.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getSignedUrl(config).then(function(data) {\n * const url = data[0];\n * });\n * ```\n */\n getSignedUrl(cfg, callback) {\n const method = BucketActionToHTTPMethod[cfg.action];\n const signConfig = {\n method,\n expires: cfg.expires,\n version: cfg.version,\n cname: cfg.cname,\n extensionHeaders: cfg.extensionHeaders || {},\n queryParams: cfg.queryParams || {},\n };\n if (!this.signer) {\n this.signer = new signer_js_1.URLSigner(this.storage.authClient, this);\n }\n this.signer\n .getSignedUrl(signConfig)\n .then(signedUrl => callback(null, signedUrl), callback);\n }\n /**\n * @callback BucketLockCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Lock a previously-defined retention policy. This will prevent changes to\n * the policy.\n *\n * @throws {Error} if a metageneration is not provided.\n *\n * @param {number|string} metageneration The bucket's metageneration. This is\n * accesssible from calling {@link File#getMetadata}.\n * @param {BucketLockCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const metageneration = 2;\n *\n * bucket.lock(metageneration, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.lock(metageneration).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n lock(metageneration, callback) {\n const metatype = typeof metageneration;\n if (metatype !== 'number' && metatype !== 'string') {\n throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED);\n }\n this.request({\n method: 'POST',\n uri: '/lockRetentionPolicy',\n qs: {\n ifMetagenerationMatch: metageneration,\n },\n }, callback);\n }\n /**\n * @typedef {array} MakeBucketPrivateResponse\n * @property {File[]} 0 List of files made private.\n */\n /**\n * @callback MakeBucketPrivateCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files List of files made private.\n */\n /**\n * @typedef {object} MakeBucketPrivateOptions\n * @property {boolean} [includeFiles=false] Make each file in the bucket\n * private.\n * @property {Metadata} [metadata] Define custom metadata properties to define\n * along with the operation.\n * @property {boolean} [force] Queue errors occurred while making files\n * private until all files have been processed.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Make the bucket listing private.\n *\n * You may also choose to make the contents of the bucket private by\n * specifying `includeFiles: true`. This will automatically run\n * {@link File#makePrivate} for every file in the bucket.\n *\n * When specifying `includeFiles: true`, use `force: true` to delay execution\n * of your callback until all files have been processed. By default, the\n * callback is executed after the first error. Use `force` to queue such\n * errors until all files have been processed, after which they will be\n * returned as an array as the first argument to your callback.\n *\n * NOTE: This may cause the process to be long-running and use a high number\n * of requests. Use with caution.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {MakeBucketPrivateOptions} [options] Configuration options.\n * @param {boolean} [options.includeFiles=false] Make each file in the bucket\n * private.\n * @param {Metadata} [options.metadata] Define custom metadata properties to define\n * along with the operation.\n * @param {boolean} [options.force] Queue errors occurred while making files\n * private until all files have been processed.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {MakeBucketPrivateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Make the bucket private.\n * //-\n * bucket.makePrivate(function(err) {});\n *\n * //-\n * // Make the bucket and its contents private.\n * //-\n * const opts = {\n * includeFiles: true\n * };\n *\n * bucket.makePrivate(opts, function(err, files) {\n * // `err`:\n * // The first error to occur, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made private in the bucket.\n * });\n *\n * //-\n * // Make the bucket and its contents private, using force to suppress errors\n * // until all files have been processed.\n * //-\n * const opts = {\n * includeFiles: true,\n * force: true\n * };\n *\n * bucket.makePrivate(opts, function(errors, files) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made private in the bucket.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.makePrivate(opts).then(function(data) {\n * const files = data[0];\n * });\n * ```\n */\n makePrivate(optionsOrCallback, callback) {\n var _a, _b, _c, _d;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n options.private = true;\n const query = {\n predefinedAcl: 'projectPrivate',\n };\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) {\n query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch;\n }\n if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) {\n query.ifGenerationNotMatch =\n options.preconditionOpts.ifGenerationNotMatch;\n }\n if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) {\n query.ifMetagenerationMatch =\n options.preconditionOpts.ifMetagenerationMatch;\n }\n if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) {\n query.ifMetagenerationNotMatch =\n options.preconditionOpts.ifMetagenerationNotMatch;\n }\n // You aren't allowed to set both predefinedAcl & acl properties on a bucket\n // so acl must explicitly be nullified.\n const metadata = { ...options.metadata, acl: null };\n this.setMetadata(metadata, query, (err) => {\n if (err) {\n callback(err);\n }\n const internalCall = () => {\n if (options.includeFiles) {\n return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options);\n }\n return Promise.resolve([]);\n };\n internalCall()\n .then(files => callback(null, files))\n .catch(callback);\n });\n }\n /**\n * @typedef {object} MakeBucketPublicOptions\n * @property {boolean} [includeFiles=false] Make each file in the bucket\n * private.\n * @property {boolean} [force] Queue errors occurred while making files\n * private until all files have been processed.\n */\n /**\n * @callback MakeBucketPublicCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files List of files made public.\n */\n /**\n * @typedef {array} MakeBucketPublicResponse\n * @property {File[]} 0 List of files made public.\n */\n /**\n * Make the bucket publicly readable.\n *\n * You may also choose to make the contents of the bucket publicly readable by\n * specifying `includeFiles: true`. This will automatically run\n * {@link File#makePublic} for every file in the bucket.\n *\n * When specifying `includeFiles: true`, use `force: true` to delay execution\n * of your callback until all files have been processed. By default, the\n * callback is executed after the first error. Use `force` to queue such\n * errors until all files have been processed, after which they will be\n * returned as an array as the first argument to your callback.\n *\n * NOTE: This may cause the process to be long-running and use a high number\n * of requests. Use with caution.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {MakeBucketPublicOptions} [options] Configuration options.\n * @param {boolean} [options.includeFiles=false] Make each file in the bucket\n * private.\n * @param {boolean} [options.force] Queue errors occurred while making files\n * private until all files have been processed.\n * @param {MakeBucketPublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Make the bucket publicly readable.\n * //-\n * bucket.makePublic(function(err) {});\n *\n * //-\n * // Make the bucket and its contents publicly readable.\n * //-\n * const opts = {\n * includeFiles: true\n * };\n *\n * bucket.makePublic(opts, function(err, files) {\n * // `err`:\n * // The first error to occur, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made public in the bucket.\n * });\n *\n * //-\n * // Make the bucket and its contents publicly readable, using force to\n * // suppress errors until all files have been processed.\n * //-\n * const opts = {\n * includeFiles: true,\n * force: true\n * };\n *\n * bucket.makePublic(opts, function(errors, files) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made public in the bucket.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.makePublic(opts).then(function(data) {\n * const files = data[0];\n * });\n * ```\n */\n makePublic(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const req = { public: true, ...options };\n this.acl\n .add({\n entity: 'allUsers',\n role: 'READER',\n })\n .then(() => {\n return this.acl.default.add({\n entity: 'allUsers',\n role: 'READER',\n });\n })\n .then(() => {\n if (req.includeFiles) {\n return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req);\n }\n return [];\n })\n .then(files => callback(null, files), callback);\n }\n /**\n * Get a reference to a Cloud Pub/Sub Notification.\n *\n * @param {string} id ID of notification.\n * @returns {Notification}\n * @see Notification\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const notification = bucket.notification('1');\n * ```\n */\n notification(id) {\n if (!id) {\n throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID);\n }\n return new notification_js_1.Notification(this, id);\n }\n /**\n * Remove an already-existing retention policy from this bucket, if it is not\n * locked.\n *\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * bucket.removeRetentionPeriod(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.removeRetentionPeriod().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n removeRetentionPeriod(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n retentionPolicy: null,\n }, options, callback);\n }\n /**\n * Makes request and applies userProject query parameter if necessary.\n *\n * @private\n *\n * @param {object} reqOpts - The request options.\n * @param {function} callback - The callback function.\n */\n request(reqOpts, callback) {\n if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) {\n reqOpts.qs = { ...reqOpts.qs, userProject: this.userProject };\n }\n return super.request(reqOpts, callback);\n }\n /**\n * @deprecated\n * @typedef {array} SetLabelsResponse\n * @property {object} 0 The bucket metadata.\n */\n /**\n * @deprecated\n * @callback SetLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n */\n /**\n * @deprecated\n * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @deprecated Use setMetadata directly.\n * Set labels on the bucket.\n *\n * This makes an underlying call to {@link Bucket#setMetadata}, which\n * is a PATCH request. This means an individual label can be overwritten, but\n * unmentioned labels will not be touched.\n *\n * @param {object} labels Labels to set on the bucket.\n * @param {SetLabelsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetLabelsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * const labels = {\n * labelone: 'labelonevalue',\n * labeltwo: 'labeltwovalue'\n * };\n *\n * bucket.setLabels(labels, function(err, metadata) {\n * if (!err) {\n * // Labels set successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setLabels(labels).then(function(data) {\n * const metadata = data[0];\n * });\n * ```\n */\n setLabels(labels, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || index_js_1.util.noop;\n this.setMetadata({ labels }, options, callback);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options);\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * Lock all objects contained in the bucket, based on their creation time. Any\n * attempt to overwrite or delete objects younger than the retention period\n * will result in a `PERMISSION_DENIED` error.\n *\n * An unlocked retention policy can be modified or removed from the bucket via\n * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A\n * locked retention policy cannot be removed or shortened in duration for the\n * lifetime of the bucket. Attempting to remove or decrease period of a locked\n * retention policy will result in a `PERMISSION_DENIED` error. You can still\n * increase the policy.\n *\n * @param {*} duration In seconds, the minimum retention time for all objects\n * contained in this bucket.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataCallback} [options] Options, including precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const DURATION_SECONDS = 15780000; // 6 months.\n *\n * //-\n * // Lock the objects in this bucket for 6 months.\n * //-\n * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setRetentionPeriod(duration, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n retentionPolicy: {\n retentionPeriod: duration.toString(),\n },\n }, options, callback);\n }\n /**\n *\n * @typedef {object} Cors\n * @property {number} [maxAgeSeconds] The number of seconds the browser is\n * allowed to make requests before it must repeat the preflight request.\n * @property {string[]} [method] HTTP method allowed for cross origin resource\n * sharing with this bucket.\n * @property {string[]} [origin] an origin allowed for cross origin resource\n * sharing with this bucket.\n * @property {string[]} [responseHeader] A header allowed for cross origin\n * resource sharing with this bucket.\n */\n /**\n * This can be used to set the CORS configuration on the bucket.\n *\n * The configuration will be overwritten with the value passed into this.\n *\n * @param {Cors[]} corsConfiguration The new CORS configuration to set\n * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is\n * allowed to make requests before it must repeat the preflight request.\n * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource\n * sharing with this bucket.\n * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource\n * sharing with this bucket.\n * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin\n * resource sharing with this bucket.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataOptions} [options] Options, including precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour\n * bucket.setCorsConfiguration(corsConfiguration);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setCorsConfiguration(corsConfiguration).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n cors: corsConfiguration,\n }, options, callback);\n }\n /**\n * @typedef {object} SetBucketStorageClassOptions\n * @property {string} [userProject] - The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback SetBucketStorageClassCallback\n * @param {?Error} err Request error, if any.\n */\n /**\n * Set the default storage class for new files in this bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} storageClass The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`).\n * **Note:** The storage classes `multi_regional`, `regional`, and\n * `durable_reduced_availability` are now legacy and will be deprecated in\n * the future.\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] - The ID of the project which will be\n * billed for the request.\n * @param {SetStorageClassCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.setStorageClass('nearline', function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // The storage class was updated successfully.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setStorageClass('nearline').then(function() {});\n * ```\n */\n setStorageClass(storageClass, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n // In case we get input like `storageClass`, convert to `storage_class`.\n storageClass = storageClass\n .replace(/-/g, '_')\n .replace(/([a-z])([A-Z])/g, (_, low, up) => {\n return low + '_' + up;\n })\n .toUpperCase();\n this.setMetadata({ storageClass }, options, callback);\n }\n /**\n * Set a user project to be billed for all requests made from this Bucket\n * object and any files referenced from this Bucket object.\n *\n * @param {string} userProject The user project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.setUserProject('grape-spaceship-123');\n * ```\n */\n setUserProject(userProject) {\n this.userProject = userProject;\n const methods = [\n 'create',\n 'delete',\n 'exists',\n 'get',\n 'getMetadata',\n 'setMetadata',\n ];\n methods.forEach(method => {\n const methodConfig = this.methods[method];\n if (typeof methodConfig === 'object') {\n if (typeof methodConfig.reqOpts === 'object') {\n Object.assign(methodConfig.reqOpts.qs, { userProject });\n }\n else {\n methodConfig.reqOpts = {\n qs: { userProject },\n };\n }\n }\n });\n }\n /**\n * @typedef {object} UploadOptions Configuration options for Bucket#upload().\n * @property {string|File} [destination] The place to save\n * your file. If given a string, the file will be uploaded to the bucket\n * using the string as a filename. When given a File object, your local\n * file will be uploaded to the File object's bucket and under the File\n * object's name. Lastly, when this argument is omitted, the file is uploaded\n * to your bucket using the name of the local file.\n * @property {string} [encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @property {boolean} [gzip] Automatically gzip the file. This will set\n * `options.metadata.contentEncoding` to `gzip`.\n * @property {string} [kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * @property {object} [metadata] See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}.\n * @property {string} [offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {boolean} [resumable=true] Resumable uploads are automatically\n * enabled and must be shut off explicitly by setting to false.\n * @property {number} [timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @property {string} [uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with an\n * MD5 checksum for maximum reliability. CRC32c will provide better\n * performance with less reliability. You may also choose to skip\n * validation completely, however this is **not recommended**.\n */\n /**\n * @typedef {array} UploadResponse\n * @property {object} 0 The uploaded {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback UploadCallback\n * @param {?Error} err Request error, if any.\n * @param {object} file The uploaded {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Upload a file to the bucket. This is a convenience method that wraps\n * {@link File#createWriteStream}.\n *\n * Resumable uploads are enabled by default\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation}\n *\n * @param {string} pathString The fully qualified path to the file you\n * wish to upload to your bucket.\n * @param {UploadOptions} [options] Configuration options.\n * @param {string|File} [options.destination] The place to save\n * your file. If given a string, the file will be uploaded to the bucket\n * using the string as a filename. When given a File object, your local\n * file will be uploaded to the File object's bucket and under the File\n * object's name. Lastly, when this argument is omitted, the file is uploaded\n * to your bucket using the name of the local file.\n * @param {string} [options.encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @param {boolean} [options.gzip] Automatically gzip the file. This will set\n * `options.metadata.contentEncoding` to `gzip`.\n * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * @param {object} [options.metadata] See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}.\n * @param {string} [options.offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @param {string} [options.predefinedAcl] Apply a predefined set of access\n * controls to this object.\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @param {boolean} [options.private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @param {boolean} [options.public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @param {boolean} [options.resumable=true] Resumable uploads are automatically\n * enabled and must be shut off explicitly by setting to false.\n * @param {number} [options.timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @param {string} [options.uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {string|boolean} [options.validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with an\n * MD5 checksum for maximum reliability. CRC32c will provide better\n * performance with less reliability. You may also choose to skip\n * validation completely, however this is **not recommended**.\n * @param {UploadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Upload a file from a local path.\n * //-\n * bucket.upload('/local/path/image.png', function(err, file, apiResponse) {\n * // Your bucket now contains:\n * // - \"image.png\" (with the contents of `/local/path/image.png')\n *\n * // `file` is an instance of a File object that refers to your new file.\n * });\n *\n *\n * //-\n * // It's not always that easy. You will likely want to specify the filename\n * // used when your new file lands in your bucket.\n * //\n * // You may also want to set metadata or customize other options.\n * //-\n * const options = {\n * destination: 'new-image.png',\n * validation: 'crc32c',\n * metadata: {\n * metadata: {\n * event: 'Fall trip to the zoo'\n * }\n * }\n * };\n *\n * bucket.upload('local-image.png', options, function(err, file) {\n * // Your bucket now contains:\n * // - \"new-image.png\" (with the contents of `local-image.png')\n *\n * // `file` is an instance of a File object that refers to your new file.\n * });\n *\n * //-\n * // You can also have a file gzip'd on the fly.\n * //-\n * bucket.upload('index.html', { gzip: true }, function(err, file) {\n * // Your bucket now contains:\n * // - \"index.html\" (automatically compressed with gzip)\n *\n * // Downloading the file with `file.download` will automatically decode\n * the\n * // file.\n * });\n *\n * //-\n * // You may also re-use a File object, {File}, that references\n * // the file you wish to create or overwrite.\n * //-\n * const options = {\n * destination: bucket.file('existing-file.png'),\n * resumable: false\n * };\n *\n * bucket.upload('local-img.png', options, function(err, newFile) {\n * // Your bucket now contains:\n * // - \"existing-file.png\" (with the contents of `local-img.png')\n *\n * // Note:\n * // The `newFile` parameter is equal to `file`.\n * });\n *\n * //-\n * // To use\n * // \n * // Customer-supplied Encryption Keys, provide the `encryptionKey`\n * option.\n * //-\n * const crypto = require('crypto');\n * const encryptionKey = crypto.randomBytes(32);\n *\n * bucket.upload('img.png', {\n * encryptionKey: encryptionKey\n * }, function(err, newFile) {\n * // `img.png` was uploaded with your custom encryption key.\n *\n * // `newFile` is already configured to use the encryption key when making\n * // operations on the remote object.\n *\n * // However, to use your encryption key later, you must create a `File`\n * // instance with the `key` supplied:\n * const file = bucket.file('img.png', {\n * encryptionKey: encryptionKey\n * });\n *\n * // Or with `file#setEncryptionKey`:\n * const file = bucket.file('img.png');\n * file.setEncryptionKey(encryptionKey);\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.upload('local-image.png').then(function(data) {\n * const file = data[0];\n * });\n *\n * To upload a file from a URL, use {@link File#createWriteStream}.\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_upload_file\n * Another example:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_upload_encrypted_file\n * Example of uploading an encrypted file:\n */\n upload(pathString, optionsOrCallback, callback) {\n var _a, _b;\n const upload = (numberOfRetries) => {\n const returnValue = (0, async_retry_1.default)(async (bail) => {\n await new Promise((resolve, reject) => {\n var _a, _b;\n if (numberOfRetries === 0 &&\n ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) {\n newFile.storage.retryOptions.autoRetry = false;\n }\n const writable = newFile.createWriteStream(options);\n if (options.onUploadProgress) {\n writable.on('progress', options.onUploadProgress);\n }\n fs.createReadStream(pathString)\n .on('error', bail)\n .pipe(writable)\n .on('error', err => {\n if (this.storage.retryOptions.autoRetry &&\n this.storage.retryOptions.retryableErrorFn(err)) {\n return reject(err);\n }\n else {\n return bail(err);\n }\n })\n .on('finish', () => {\n return resolve();\n });\n });\n }, {\n retries: numberOfRetries,\n factor: this.storage.retryOptions.retryDelayMultiplier,\n maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds\n maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n if (!callback) {\n return returnValue;\n }\n else {\n return returnValue\n .then(() => {\n if (callback) {\n return callback(null, newFile, newFile.metadata);\n }\n })\n .catch(callback);\n }\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n if (global['GCLOUD_SANDBOX_ENV']) {\n return;\n }\n let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n options = Object.assign({\n metadata: {},\n }, options);\n // Do not retry if precondition option ifGenerationMatch is not set\n // because this is a file operation\n let maxRetries = this.storage.retryOptions.maxRetries;\n if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n maxRetries = 0;\n }\n let newFile;\n if (options.destination instanceof file_js_1.File) {\n newFile = options.destination;\n }\n else if (options.destination !== null &&\n typeof options.destination === 'string') {\n // Use the string as the name of the file.\n newFile = this.file(options.destination, {\n encryptionKey: options.encryptionKey,\n kmsKeyName: options.kmsKeyName,\n preconditionOpts: this.instancePreconditionOpts,\n });\n }\n else {\n // Resort to using the name of the incoming file.\n const destination = path.basename(pathString);\n newFile = this.file(destination, {\n encryptionKey: options.encryptionKey,\n kmsKeyName: options.kmsKeyName,\n preconditionOpts: this.instancePreconditionOpts,\n });\n }\n upload(maxRetries);\n }\n /**\n * @private\n *\n * @typedef {object} MakeAllFilesPublicPrivateOptions\n * @property {boolean} [force] Suppress errors until all files have been\n * processed.\n * @property {boolean} [private] Make files private.\n * @property {boolean} [public] Make files public.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @private\n *\n * @callback SetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files Files that were updated.\n */\n /**\n * @typedef {array} MakeAllFilesPublicPrivateResponse\n * @property {File[]} 0 List of files affected.\n */\n /**\n * Iterate over all of a bucket's files, calling `file.makePublic()` (public)\n * or `file.makePrivate()` (private) on each.\n *\n * Operations are performed in parallel, up to 10 at once. The first error\n * breaks the loop, and will execute the provided callback with it. Specify\n * `{ force: true }` to suppress the errors.\n *\n * @private\n *\n * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options.\n * @param {boolean} [options.force] Suppress errors until all files have been\n * processed.\n * @param {boolean} [options.private] Make files private.\n * @param {boolean} [options.public] Make files public.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n \n * @param {MakeAllFilesPublicPrivateCallback} callback Callback function.\n *\n * @return {Promise}\n */\n makeAllFilesPublicPrivate_(optionsOrCallback, callback) {\n const MAX_PARALLEL_LIMIT = 10;\n const errors = [];\n const updatedFiles = [];\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const processFile = async (file) => {\n try {\n await (options.public ? file.makePublic() : file.makePrivate(options));\n updatedFiles.push(file);\n }\n catch (e) {\n if (!options.force) {\n throw e;\n }\n errors.push(e);\n }\n };\n this.getFiles(options)\n .then(([files]) => {\n const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT);\n const promises = files.map(file => {\n return limit(() => processFile(file));\n });\n return Promise.all(promises);\n })\n .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles));\n }\n getId() {\n return this.id;\n }\n disableAutoRetryConditionallyIdempotent_(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n coreOpts, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n methodType, localPreconditionOptions) {\n var _a, _b;\n if (typeof coreOpts === 'object' &&\n ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined &&\n (methodType === AvailableServiceObjectMethods.setMetadata ||\n methodType === AvailableServiceObjectMethods.delete) &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) {\n this.storage.retryOptions.autoRetry = false;\n }\n else if (this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n }\n}\nexports.Bucket = Bucket;\n/*! Developer Documentation\n *\n * These methods can be auto-paginated.\n */\npaginator_1.paginator.extend(Bucket, 'getFiles');\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Bucket, {\n exclude: ['cloudStorageURI', 'request', 'file', 'notification'],\n});\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Channel = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * Create a channel object to interact with a Cloud Storage channel.\n *\n * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification}\n *\n * @class\n *\n * @param {string} id The ID of the channel.\n * @param {string} resourceId The resource ID of the channel.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * ```\n */\nclass Channel extends index_js_1.ServiceObject {\n constructor(storage, id, resourceId) {\n const config = {\n parent: storage,\n baseUrl: '/channels',\n // An ID shouldn't be included in the API requests.\n // RE:\n // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145\n id: '',\n methods: {\n // Only need `request`.\n },\n };\n super(config);\n this.metadata.id = id;\n this.metadata.resourceId = resourceId;\n }\n /**\n * @typedef {array} StopResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback StopCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Stop this channel.\n *\n * @param {StopCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * channel.stop(function(err, apiResponse) {\n * if (!err) {\n * // Channel stopped successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * channel.stop().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n stop(callback) {\n callback = callback || index_js_1.util.noop;\n this.request({\n method: 'POST',\n uri: '/stop',\n json: this.metadata,\n }, (err, apiResponse) => {\n callback(err, apiResponse);\n });\n }\n}\nexports.Channel = Channel;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Channel);\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _CRC32C_crc32c;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0;\nconst fs_1 = require(\"fs\");\n/**\n * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c}\n */\nconst CRC32C_EXTENSIONS = [\n 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c,\n 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b,\n 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c,\n 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384,\n 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc,\n 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a,\n 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512,\n 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa,\n 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad,\n 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a,\n 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf,\n 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957,\n 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f,\n 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927,\n 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f,\n 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7,\n 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e,\n 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859,\n 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e,\n 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6,\n 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de,\n 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c,\n 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4,\n 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c,\n 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b,\n 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c,\n 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5,\n 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d,\n 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975,\n 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d,\n 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905,\n 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed,\n 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8,\n 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff,\n 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8,\n 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540,\n 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78,\n 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee,\n 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6,\n 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e,\n 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69,\n 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,\n 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351,\n];\nexports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS;\nconst CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS);\nexports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE;\nconst CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C();\nexports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR;\nconst CRC32C_EXCEPTION_MESSAGES = {\n INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`,\n INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`,\n INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`,\n};\nexports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES;\nclass CRC32C {\n /**\n * Constructs a new `CRC32C` object.\n *\n * Reconstruction is recommended via the `CRC32C.from` static method.\n *\n * @param initialValue An initial CRC32C value - a signed 32-bit integer.\n */\n constructor(initialValue = 0) {\n /** Current CRC32C value */\n _CRC32C_crc32c.set(this, 0);\n __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, \"f\");\n }\n /**\n * Calculates a CRC32C from a provided buffer.\n *\n * Implementation inspired from:\n * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c}\n * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c}\n * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage}\n *\n * @param data The `Buffer` to generate the CRC32C from\n */\n update(data) {\n let current = __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\") ^ 0xffffffff;\n for (const d of data) {\n const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff];\n current = tablePoly ^ (current >>> 8);\n }\n __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, \"f\");\n }\n /**\n * Validates a provided input to the current CRC32C value.\n *\n * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer\n */\n validate(input) {\n if (typeof input === 'number') {\n return input === __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\");\n }\n else if (typeof input === 'string') {\n return input === this.toString();\n }\n else if (Buffer.isBuffer(input)) {\n return Buffer.compare(input, this.toBuffer()) === 0;\n }\n else {\n // `CRC32C`-like object\n return input.toString() === this.toString();\n }\n }\n /**\n * Returns a `Buffer` representation of the CRC32C value\n */\n toBuffer() {\n const buffer = Buffer.alloc(4);\n buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, \"f\"));\n return buffer;\n }\n /**\n * Returns a JSON-compatible, base64-encoded representation of the CRC32C value.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`}\n */\n toJSON() {\n return this.toString();\n }\n /**\n * Returns a base64-encoded representation of the CRC32C value.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`}\n */\n toString() {\n return this.toBuffer().toString('base64');\n }\n /**\n * Returns the `number` representation of the CRC32C value as a signed 32-bit integer\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`}\n */\n valueOf() {\n return __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\");\n }\n /**\n * Generates a `CRC32C` from a compatible buffer format.\n *\n * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`\n */\n static fromBuffer(value) {\n let buffer;\n if (Buffer.isBuffer(value)) {\n buffer = value;\n }\n else if ('buffer' in value) {\n // `ArrayBufferView`\n buffer = Buffer.from(value.buffer);\n }\n else {\n // `ArrayBuffer`\n buffer = Buffer.from(value);\n }\n if (buffer.byteLength !== 4) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength));\n }\n return new CRC32C(buffer.readInt32BE());\n }\n static async fromFile(file) {\n const crc32c = new CRC32C();\n await new Promise((resolve, reject) => {\n (0, fs_1.createReadStream)(file)\n .on('data', (d) => crc32c.update(d))\n .on('end', resolve)\n .on('error', reject);\n });\n return crc32c;\n }\n /**\n * Generates a `CRC32C` from 4-byte base64-encoded data (string).\n *\n * @param value 4-byte base64-encoded data (string)\n */\n static fromString(value) {\n const buffer = Buffer.from(value, 'base64');\n if (buffer.byteLength !== 4) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength));\n }\n return this.fromBuffer(buffer);\n }\n /**\n * Generates a `CRC32C` from a safe, unsigned 32-bit integer.\n *\n * @param value an unsigned 32-bit integer\n */\n static fromNumber(value) {\n if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value));\n }\n return new CRC32C(value);\n }\n /**\n * Generates a `CRC32C` from a variety of compatable types.\n * Note: strings are treated as input, not as file paths to read from.\n *\n * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string)\n */\n static from(value) {\n if (typeof value === 'number') {\n return this.fromNumber(value);\n }\n else if (typeof value === 'string') {\n return this.fromString(value);\n }\n else if ('byteLength' in value) {\n // `ArrayBuffer` | `Buffer` | `ArrayBufferView`\n return this.fromBuffer(value);\n }\n else {\n // `CRC32CValidator`/`CRC32C`-like\n return this.fromString(value.toString());\n }\n }\n}\nexports.CRC32C = CRC32C;\n_CRC32C_crc32c = new WeakMap();\nCRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS;\nCRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _File_instances, _File_validateIntegrity;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst compressible_1 = __importDefault(require(\"compressible\"));\nconst crypto = __importStar(require(\"crypto\"));\nconst fs = __importStar(require(\"fs\"));\nconst mime_1 = __importDefault(require(\"mime\"));\nconst resumableUpload = __importStar(require(\"./resumable-upload.js\"));\nconst stream_1 = require(\"stream\");\nconst zlib = __importStar(require(\"zlib\"));\nconst storage_js_1 = require(\"./storage.js\");\nconst bucket_js_1 = require(\"./bucket.js\");\nconst acl_js_1 = require(\"./acl.js\");\nconst signer_js_1 = require(\"./signer.js\");\nconst util_js_1 = require(\"./nodejs-common/util.js\");\nconst duplexify_1 = __importDefault(require(\"duplexify\"));\nconst util_js_2 = require(\"./util.js\");\nconst crc32c_js_1 = require(\"./crc32c.js\");\nconst hash_stream_validator_js_1 = require(\"./hash-stream-validator.js\");\nconst async_retry_1 = __importDefault(require(\"async-retry\"));\nvar ActionToHTTPMethod;\n(function (ActionToHTTPMethod) {\n ActionToHTTPMethod[\"read\"] = \"GET\";\n ActionToHTTPMethod[\"write\"] = \"PUT\";\n ActionToHTTPMethod[\"delete\"] = \"DELETE\";\n ActionToHTTPMethod[\"resumable\"] = \"POST\";\n})(ActionToHTTPMethod || (exports.ActionToHTTPMethod = ActionToHTTPMethod = {}));\n/**\n * @private\n */\nexports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com';\n/**\n * @private\n */\nconst GS_URL_REGEXP = /^gs:\\/\\/([a-z0-9_.-]+)\\/(.+)$/;\nclass RequestError extends Error {\n}\nexports.RequestError = RequestError;\nconst SEVEN_DAYS = 7 * 24 * 60 * 60;\nvar FileExceptionMessages;\n(function (FileExceptionMessages) {\n FileExceptionMessages[\"EXPIRATION_TIME_NA\"] = \"An expiration time is not available.\";\n FileExceptionMessages[\"DESTINATION_NO_NAME\"] = \"Destination file should have a name.\";\n FileExceptionMessages[\"INVALID_VALIDATION_FILE_RANGE\"] = \"Cannot use validation with file ranges (start/end).\";\n FileExceptionMessages[\"MD5_NOT_AVAILABLE\"] = \"MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects.\";\n FileExceptionMessages[\"EQUALS_CONDITION_TWO_ELEMENTS\"] = \"Equals condition must be an array of 2 elements.\";\n FileExceptionMessages[\"STARTS_WITH_TWO_ELEMENTS\"] = \"StartsWith condition must be an array of 2 elements.\";\n FileExceptionMessages[\"CONTENT_LENGTH_RANGE_MIN_MAX\"] = \"ContentLengthRange must have numeric min & max fields.\";\n FileExceptionMessages[\"DOWNLOAD_MISMATCH\"] = \"The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again.\";\n FileExceptionMessages[\"UPLOAD_MISMATCH_DELETE_FAIL\"] = \"The uploaded data did not match the data from the server.\\n As a precaution, we attempted to delete the file, but it was not successful.\\n To be sure the content is the same, you should try removing the file manually,\\n then uploading the file again.\\n \\n\\nThe delete attempt failed with this message:\\n\\n \";\n FileExceptionMessages[\"UPLOAD_MISMATCH\"] = \"The uploaded data did not match the data from the server.\\n As a precaution, the file has been deleted.\\n To be sure the content is the same, you should try uploading the file again.\";\n FileExceptionMessages[\"MD5_RESUMED_UPLOAD\"] = \"MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value\";\n FileExceptionMessages[\"MISSING_RESUME_CRC32C_FINAL_UPLOAD\"] = \"The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`.\";\n})(FileExceptionMessages || (exports.FileExceptionMessages = FileExceptionMessages = {}));\n/**\n * A File object is created from your {@link Bucket} object using\n * {@link Bucket#file}.\n *\n * @class\n */\nclass File extends index_js_1.ServiceObject {\n /**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against\n * an object or bucket (for example, `READ` or `WRITE`); the entity defines\n * who the permission applies to (for example, a specific user or group of\n * users).\n *\n * The `acl` object on a File instance provides methods to get you a list of\n * the ACLs defined on your bucket, as well as set, update, and delete them.\n *\n * See {@link http://goo.gl/6qBBPO| About Access Control lists}\n *\n * @name File#acl\n * @mixes Acl\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * //-\n * // Make a file publicly readable.\n * //-\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * file.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n /**\n * The API-formatted resource description of the file.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name File#metadata\n * @type {object}\n */\n /**\n * The file's name.\n * @name File#name\n * @type {string}\n */\n /**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n /**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n /**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n /**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n /**\n * @callback Crc32cGeneratorCallback\n * @returns {CRC32CValidator}\n */\n /**\n * @typedef {object} FileOptions Options passed to the File constructor.\n * @property {string} [encryptionKey] A custom encryption key.\n * @property {number} [generation] Generation to scope the file to.\n * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this\n * object, if the object is encrypted by such a key. Limited availability;\n * usable only by enabled projects.\n * @property {string} [userProject] The ID of the project which will be\n * billed for all requests made from File object.\n * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n */\n /**\n * Constructs a file object.\n *\n * @param {Bucket} bucket The Bucket instance this file is\n * attached to.\n * @param {string} name The name of the remote file.\n * @param {FileOptions} [options] Configuration options.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * ```\n */\n constructor(bucket, name, options = {}) {\n var _a, _b;\n const requestQueryObject = {};\n let generation;\n if (options.generation !== null) {\n if (typeof options.generation === 'string') {\n generation = Number(options.generation);\n }\n else {\n generation = options.generation;\n }\n if (!isNaN(generation)) {\n requestQueryObject.generation = generation;\n }\n }\n Object.assign(requestQueryObject, options.preconditionOpts);\n const userProject = options.userProject || bucket.userProject;\n if (typeof userProject === 'string') {\n requestQueryObject.userProject = userProject;\n }\n const methods = {\n /**\n * @typedef {array} DeleteFileResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteFileCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete the file.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation}\n *\n * @method File#delete\n * @param {object} [options] Configuration options.\n * @param {boolean} [options.ignoreNotFound = false] Ignore an error if\n * the file does not exist.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteFileCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * file.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_delete_file\n * Another example:\n */\n delete: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} FileExistsResponse\n * @property {boolean} 0 Whether the {@link File} exists.\n */\n /**\n * @callback FileExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the {@link File} exists.\n */\n /**\n * Check if the file exists.\n *\n * @method File#exists\n * @param {options} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {FileExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetFileResponse\n * @property {File} 0 The {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetFileCallback\n * @param {?Error} err Request error, if any.\n * @param {File} file The {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get a file object and its metadata if it exists.\n *\n * @method File#get\n * @param {options} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetFileCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.get(function(err, file, apiResponse) {\n * // file.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.get().then(function(data) {\n * const file = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetFileMetadataResponse\n * @property {object} 0 The {@link File} metadata.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetFileMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The {@link File} metadata.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get the file's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation}\n *\n * @method File#getMetadata\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetFileMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_get_metadata\n * Another example:\n */\n getMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata().\n * @param {string} [userProject] The ID of the project which will be billed for the request.\n */\n /**\n * @callback SetFileMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} SetFileMetadataResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Merge the given metadata with the current remote file's metadata. This\n * will set metadata if it was previously unset or update previously set\n * metadata. To unset previously set metadata, set its value to null.\n *\n * You can set custom key/value pairs in the metadata key of the given\n * object, however the other properties outside of this object must adhere\n * to the {@link https://goo.gl/BOnnCK| official API documentation}.\n *\n *\n * See the examples below for more information.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation}\n *\n * @method File#setMetadata\n * @param {object} [metadata] The metadata you wish to update.\n * @param {SetFileMetadataOptions} [options] Configuration options.\n * @param {SetFileMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * const metadata = {\n * contentType: 'application/x-font-ttf',\n * metadata: {\n * my: 'custom',\n * properties: 'go here'\n * }\n * };\n *\n * file.setMetadata(metadata, function(err, apiResponse) {});\n *\n * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' }\n * file.setMetadata({\n * metadata: {\n * abc: '123', // will be set.\n * unsetMe: null, // will be unset (deleted).\n * hello: 'goodbye' // will be updated from 'world' to 'goodbye'.\n * }\n * }, function(err, apiResponse) {\n * // metadata should now be { abc: '123', hello: 'goodbye' }\n * });\n *\n * //-\n * // Set a temporary hold on this file from its bucket's retention period\n * // configuration.\n * //\n * file.setMetadata({\n * temporaryHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Alternatively, you may set a temporary hold. This will follow the\n * // same behavior as an event-based hold, with the exception that the\n * // bucket's retention policy will not renew for this file from the time\n * // the hold is released.\n * //-\n * file.setMetadata({\n * eventBasedHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.setMetadata(metadata).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n };\n super({\n parent: bucket,\n baseUrl: '/o',\n id: encodeURIComponent(name),\n methods,\n });\n _File_instances.add(this);\n this.bucket = bucket;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this.storage = bucket.parent;\n // @TODO Can this duplicate code from above be avoided?\n if (options.generation !== null) {\n let generation;\n if (typeof options.generation === 'string') {\n generation = Number(options.generation);\n }\n else {\n generation = options.generation;\n }\n if (!isNaN(generation)) {\n this.generation = generation;\n }\n }\n this.kmsKeyName = options.kmsKeyName;\n this.userProject = userProject;\n this.name = name;\n if (options.encryptionKey) {\n this.setEncryptionKey(options.encryptionKey);\n }\n this.acl = new acl_js_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/acl',\n });\n this.crc32cGenerator =\n options.crc32cGenerator || this.bucket.crc32cGenerator;\n this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry;\n this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts;\n }\n /**\n * The object's Cloud Storage URI (`gs://`)\n *\n * @example\n * ```ts\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('image.png');\n *\n * // `gs://my-bucket/image.png`\n * const href = file.cloudStorageURI.href;\n * ```\n */\n get cloudStorageURI() {\n const uri = this.bucket.cloudStorageURI;\n uri.pathname = this.name;\n return uri;\n }\n /**\n * A helper method for determining if a request should be retried based on preconditions.\n * This should only be used for methods where the idempotency is determined by\n * `ifGenerationMatch`\n * @private\n *\n * A request should not be retried under the following conditions:\n * - if precondition option `ifGenerationMatch` is not set OR\n * - if `idempotencyStrategy` is set to `RetryNever`\n */\n shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) {\n var _a;\n return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined &&\n ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever);\n }\n /**\n * @typedef {array} CopyResponse\n * @property {File} 0 The copied {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CopyCallback\n * @param {?Error} err Request error, if any.\n * @param {File} copiedFile The copied {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} CopyOptions Configuration options for File#copy(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @property {string} [cacheControl] The cacheControl setting for the new file.\n * @property {string} [contentEncoding] The contentEncoding setting for the new file.\n * @property {string} [contentType] The contentType setting for the new file.\n * @property {string} [destinationKmsKeyName] Resource name of the Cloud\n * KMS key, of the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @property {Metadata} [metadata] Metadata to specify on the copied file.\n * @property {string} [predefinedAcl] Set the ACL for the new file.\n * @property {string} [token] A previously-returned `rewriteToken` from an\n * unfinished rewrite request.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Copy this file to another file. By default, this will copy the file to the\n * same bucket, but you can choose to copy it to another Bucket by providing\n * a Bucket or File object or a URL starting with \"gs://\".\n * The generation of the file will not be preserved.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation}\n *\n * @throws {Error} If the destination file is not provided.\n *\n * @param {string|Bucket|File} destination Destination file.\n * @param {CopyOptions} [options] Configuration options. See an\n * @param {CopyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // You can pass in a variety of types for the destination.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // If you pass in a string for the destination, the file is copied to its\n * // current bucket, under the new name provided.\n * //-\n * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) {\n * // `my-bucket` now contains:\n * // - \"my-image.png\"\n * // - \"my-image-copy.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a string starting with \"gs://\" for the destination, the\n * // file is copied to the other bucket and under the new name provided.\n * //-\n * const newLocation = 'gs://another-bucket/my-image-copy.png';\n * file.copy(newLocation, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image-copy.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a Bucket object, the file will be copied to that bucket\n * // using the same name.\n * //-\n * const anotherBucket = storage.bucket('another-bucket');\n * file.copy(anotherBucket, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a File object, you have complete control over the new\n * // bucket and filename.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n * file.copy(anotherFile, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-awesome-image.png\"\n *\n * // Note:\n * // The `copiedFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.copy(newLocation).then(function(data) {\n * const newFile = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_copy_file\n * Another example:\n */\n copy(destination, optionsOrCallback, callback) {\n var _a, _b;\n const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME);\n if (!destination) {\n throw noDestinationError;\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = { ...optionsOrCallback };\n }\n callback = callback || index_js_1.util.noop;\n let destBucket;\n let destName;\n let newFile;\n if (typeof destination === 'string') {\n const parsedDestination = GS_URL_REGEXP.exec(destination);\n if (parsedDestination !== null && parsedDestination.length === 3) {\n destBucket = this.storage.bucket(parsedDestination[1]);\n destName = parsedDestination[2];\n }\n else {\n destBucket = this.bucket;\n destName = destination;\n }\n }\n else if (destination instanceof bucket_js_1.Bucket) {\n destBucket = destination;\n destName = this.name;\n }\n else if (destination instanceof File) {\n destBucket = destination.bucket;\n destName = destination.name;\n newFile = destination;\n }\n else {\n throw noDestinationError;\n }\n const query = {};\n if (this.generation !== undefined) {\n query.sourceGeneration = this.generation;\n }\n if (options.token !== undefined) {\n query.rewriteToken = options.token;\n }\n if (options.userProject !== undefined) {\n query.userProject = options.userProject;\n delete options.userProject;\n }\n if (options.predefinedAcl !== undefined) {\n query.destinationPredefinedAcl = options.predefinedAcl;\n delete options.predefinedAcl;\n }\n newFile = newFile || destBucket.file(destName);\n const headers = {};\n if (this.encryptionKey !== undefined) {\n headers['x-goog-copy-source-encryption-algorithm'] = 'AES256';\n headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64;\n headers['x-goog-copy-source-encryption-key-sha256'] =\n this.encryptionKeyHash;\n }\n if (newFile.encryptionKey !== undefined) {\n this.setEncryptionKey(newFile.encryptionKey);\n }\n else if (options.destinationKmsKeyName !== undefined) {\n query.destinationKmsKeyName = options.destinationKmsKeyName;\n delete options.destinationKmsKeyName;\n }\n else if (newFile.kmsKeyName !== undefined) {\n query.destinationKmsKeyName = newFile.kmsKeyName;\n }\n if (query.destinationKmsKeyName) {\n this.kmsKeyName = query.destinationKmsKeyName;\n const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor);\n if (keyIndex > -1) {\n this.interceptors.splice(keyIndex, 1);\n }\n }\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {\n this.storage.retryOptions.autoRetry = false;\n }\n if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) {\n query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch;\n delete options.preconditionOpts;\n }\n this.request({\n method: 'POST',\n uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`,\n qs: query,\n json: options,\n headers,\n }, (err, resp) => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n if (err) {\n callback(err, null, resp);\n return;\n }\n if (resp.rewriteToken) {\n const options = {\n token: resp.rewriteToken,\n };\n if (query.userProject) {\n options.userProject = query.userProject;\n }\n if (query.destinationKmsKeyName) {\n options.destinationKmsKeyName = query.destinationKmsKeyName;\n }\n this.copy(newFile, options, callback);\n return;\n }\n callback(null, newFile, resp);\n });\n }\n /**\n * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with a\n * CRC32c checksum. You may use MD5 if preferred, but that hash is not\n * supported for composite objects. An error will be raised if MD5 is\n * specified but is not available. You may also choose to skip validation\n * completely, however this is **not recommended**.\n * @property {number} [start] A byte offset to begin the file's download\n * from. Default is 0. NOTE: Byte ranges are inclusive; that is,\n * `options.start = 0` and `options.end = 999` represent the first 1000\n * bytes in a file or object. NOTE: when specifying a byte range, data\n * integrity is not available.\n * @property {number} [end] A byte offset to stop reading the file at.\n * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and\n * `options.end = 999` represent the first 1000 bytes in a file or object.\n * NOTE: when specifying a byte range, data integrity is not available.\n * @property {boolean} [decompress=true] Disable auto decompression of the\n * received data. By default this option is set to `true`.\n * Applicable in cases where the data was uploaded with\n * `gzip: true` option. See {@link File#createWriteStream}.\n */\n /**\n * Create a readable stream to read the contents of the remote file. It can be\n * piped to a writable stream or listened to for 'data' events to read a\n * file's contents.\n *\n * In the unlikely event there is a mismatch between what you downloaded and\n * the version in your Bucket, your error handler will receive an error with\n * code \"CONTENT_DOWNLOAD_MISMATCH\". If you receive this error, the best\n * recourse is to try downloading the file again.\n *\n * NOTE: Readable streams will emit the `end` event when the file is fully\n * downloaded.\n *\n * @param {CreateReadStreamOptions} [options] Configuration options.\n * @returns {ReadableStream}\n *\n * @example\n * ```\n * //-\n * //

Downloading a File

\n * //\n * // The example below demonstrates how we can reference a remote file, then\n * // pipe its contents to a local file. This is effectively creating a local\n * // backup of your remote data.\n * //-\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * const fs = require('fs');\n * const remoteFile = bucket.file('image.png');\n * const localFilename = '/Users/stephen/Photos/image.png';\n *\n * remoteFile.createReadStream()\n * .on('error', function(err) {})\n * .on('response', function(response) {\n * // Server connected and responded with the specified status and headers.\n * })\n * .on('end', function() {\n * // The file is fully downloaded.\n * })\n * .pipe(fs.createWriteStream(localFilename));\n *\n * //-\n * // To limit the downloaded data to only a byte range, pass an options\n * // object.\n * //-\n * const logFile = myBucket.file('access_log');\n * logFile.createReadStream({\n * start: 10000,\n * end: 20000\n * })\n * .on('error', function(err) {})\n * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));\n *\n * //-\n * // To read a tail byte range, specify only `options.end` as a negative\n * // number.\n * //-\n * const logFile = myBucket.file('access_log');\n * logFile.createReadStream({\n * end: -100\n * })\n * .on('error', function(err) {})\n * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));\n * ```\n */\n createReadStream(options = {}) {\n options = Object.assign({ decompress: true }, options);\n const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number';\n const tailRequest = options.end < 0;\n let validateStream = undefined;\n let request = undefined;\n const throughStream = new util_js_2.PassThroughShim();\n let crc32c = true;\n let md5 = false;\n if (typeof options.validation === 'string') {\n const value = options.validation.toLowerCase().trim();\n crc32c = value === 'crc32c';\n md5 = value === 'md5';\n }\n else if (options.validation === false) {\n crc32c = false;\n }\n const shouldRunValidation = !rangeRequest && (crc32c || md5);\n if (rangeRequest) {\n if (typeof options.validation === 'string' ||\n options.validation === true) {\n throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE);\n }\n // Range requests can't receive data integrity checks.\n crc32c = false;\n md5 = false;\n }\n const onComplete = (err) => {\n if (err) {\n // There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed.\n // This causes a memory leak, so cleanup the sockets manually here by destroying the agent.\n if (request === null || request === void 0 ? void 0 : request.agent) {\n request.agent.destroy();\n }\n throughStream.destroy(err);\n }\n };\n // We listen to the response event from the request stream so that we\n // can...\n //\n // 1) Intercept any data from going to the user if an error occurred.\n // 2) Calculate the hashes from the http.IncomingMessage response\n // stream,\n // which will return the bytes from the source without decompressing\n // gzip'd content. We then send it through decompressed, if\n // applicable, to the user.\n const onResponse = (err, _body, rawResponseStream) => {\n if (err) {\n // Get error message from the body.\n this.getBufferFromReadable(rawResponseStream).then(body => {\n err.message = body.toString('utf8');\n throughStream.destroy(err);\n });\n return;\n }\n request = rawResponseStream.request;\n const headers = rawResponseStream.toJSON().headers;\n const isCompressed = headers['content-encoding'] === 'gzip';\n const hashes = {};\n // The object is safe to validate if:\n // 1. It was stored gzip and returned to us gzip OR\n // 2. It was never stored as gzip\n const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' &&\n isCompressed) ||\n headers['x-goog-stored-content-encoding'] === 'identity';\n const transformStreams = [];\n if (shouldRunValidation) {\n // The x-goog-hash header should be set with a crc32c and md5 hash.\n // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx'\n if (typeof headers['x-goog-hash'] === 'string') {\n headers['x-goog-hash']\n .split(',')\n .forEach((hashKeyValPair) => {\n const delimiterIndex = hashKeyValPair.indexOf('=');\n const hashType = hashKeyValPair.substring(0, delimiterIndex);\n const hashValue = hashKeyValPair.substring(delimiterIndex + 1);\n hashes[hashType] = hashValue;\n });\n }\n validateStream = new hash_stream_validator_js_1.HashStreamValidator({\n crc32c,\n md5,\n crc32cGenerator: this.crc32cGenerator,\n crc32cExpected: hashes.crc32c,\n md5Expected: hashes.md5,\n });\n }\n if (md5 && !hashes.md5) {\n const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE);\n hashError.code = 'MD5_NOT_AVAILABLE';\n throughStream.destroy(hashError);\n return;\n }\n if (safeToValidate && shouldRunValidation && validateStream) {\n transformStreams.push(validateStream);\n }\n if (isCompressed && options.decompress) {\n transformStreams.push(zlib.createGunzip());\n }\n (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete);\n };\n // Authenticate the request, then pipe the remote API request to the stream\n // returned to the user.\n const makeRequest = () => {\n const query = { alt: 'media' };\n if (this.generation) {\n query.generation = this.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n const headers = {\n 'Accept-Encoding': 'gzip',\n 'Cache-Control': 'no-store',\n };\n if (rangeRequest) {\n const start = typeof options.start === 'number' ? options.start : '0';\n const end = typeof options.end === 'number' ? options.end : '';\n headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`;\n }\n const reqOpts = {\n uri: '',\n headers,\n qs: query,\n };\n if (options[util_js_1.GCCL_GCS_CMD_KEY]) {\n reqOpts[util_js_1.GCCL_GCS_CMD_KEY] = options[util_js_1.GCCL_GCS_CMD_KEY];\n }\n this.requestStream(reqOpts)\n .on('error', err => {\n throughStream.destroy(err);\n })\n .on('response', res => {\n throughStream.emit('response', res);\n index_js_1.util.handleResp(null, res, null, onResponse);\n })\n .resume();\n };\n throughStream.on('reading', makeRequest);\n return throughStream;\n }\n /**\n * @callback CreateResumableUploadCallback\n * @param {?Error} err Request error, if any.\n * @param {string} uri The resumable upload's unique session URI.\n */\n /**\n * @typedef {array} CreateResumableUploadResponse\n * @property {string} 0 The resumable upload's unique session URI.\n */\n /**\n * @typedef {object} CreateResumableUploadOptions\n * @property {object} [metadata] Metadata to set on the file.\n * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload.\n * @property {string} [origin] Origin header to set for the upload.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string} [chunkSize] Create a separate request per chunk. This\n * value is in bytes and should be a multiple of 256 KiB (2^18).\n * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.}\n */\n /**\n * Create a unique resumable upload session URI. This is the first step when\n * performing a resumable upload.\n *\n * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide}\n * for more on how the entire process works.\n *\n *

Note

\n *\n * If you are just looking to perform a resumable upload without worrying\n * about any of the details, see {@link File#createWriteStream}. Resumable\n * uploads are performed by default.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide}\n *\n * @param {CreateResumableUploadOptions} [options] Configuration options.\n * @param {CreateResumableUploadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * file.createResumableUpload(function(err, uri) {\n * if (!err) {\n * // `uri` can be used to PUT data to.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.createResumableUpload().then(function(data) {\n * const uri = data[0];\n * });\n * ```\n */\n createResumableUpload(optionsOrCallback, callback) {\n var _a, _b;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const retryOptions = this.storage.retryOptions;\n if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n retryOptions.autoRetry = false;\n }\n resumableUpload.createURI({\n authClient: this.storage.authClient,\n apiEndpoint: this.storage.apiEndpoint,\n bucket: this.bucket.name,\n customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}),\n file: this.name,\n generation: this.generation,\n key: this.encryptionKey,\n kmsKeyName: this.kmsKeyName,\n metadata: options.metadata,\n offset: options.offset,\n origin: options.origin,\n predefinedAcl: options.predefinedAcl,\n private: options.private,\n public: options.public,\n userProject: options.userProject || this.userProject,\n retryOptions: retryOptions,\n params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts,\n [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY],\n }, callback);\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n }\n /**\n * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream().\n * @property {string} [contentType] Alias for\n * `options.metadata.contentType`. If set to `auto`, the file name is used\n * to determine the contentType.\n * @property {string|boolean} [gzip] If true, automatically gzip the file.\n * If set to `auto`, the contentType is used to determine if the file\n * should be gzipped. This will set `options.metadata.contentEncoding` to\n * `gzip` if necessary.\n * @property {object} [metadata] See the examples below or\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}\n * for more details.\n * @property {number} [offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {boolean} [resumable] Force a resumable upload. NOTE: When\n * working with streams, the file format and size is unknown until it's\n * completely consumed. Because of this, it's best for you to be explicit\n * for what makes sense given your input.\n * @property {number} [timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @property {string} [uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with a\n * CRC32c checksum. You may use MD5 if preferred, but that hash is not\n * supported for composite objects. An error will be raised if MD5 is\n * specified but is not available. You may also choose to skip validation\n * completely, however this is **not recommended**. In addition to specifying\n * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will\n * cause the server to perform validation in addition to client validation.\n * NOTE: Validation is automatically skipped for objects that were\n * uploaded using the `gzip` option and have already compressed content.\n */\n /**\n * Create a writable stream to overwrite the contents of the file in your\n * bucket.\n *\n * A File object can also be used to create files for the first time.\n *\n * Resumable uploads are automatically enabled and must be shut off explicitly\n * by setting `options.resumable` to `false`.\n *\n *\n *

\n * There is some overhead when using a resumable upload that can cause\n * noticeable performance degradation while uploading a series of small\n * files. When uploading files less than 10MB, it is recommended that the\n * resumable feature is disabled.\n *

\n *\n * NOTE: Writable streams will emit the `finish` event when the file is fully\n * uploaded.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation}\n *\n * @param {CreateWriteStreamOptions} [options] Configuration options.\n * @returns {WritableStream}\n *\n * @example\n * ```\n * const fs = require('fs');\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * //

Uploading a File

\n * //\n * // Now, consider a case where we want to upload a file to your bucket. You\n * // have the option of using {@link Bucket#upload}, but that is just\n * // a convenience method which will do the following.\n * //-\n * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')\n * .pipe(file.createWriteStream())\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n *\n * //-\n * //

Uploading a File with gzip compression

\n * //-\n * fs.createReadStream('/Users/stephen/site/index.html')\n * .pipe(file.createWriteStream({ gzip: true }))\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n *\n * //-\n * // Downloading the file with `createReadStream` will automatically decode\n * // the file.\n * //-\n *\n * //-\n * //

Uploading a File with Metadata

\n * //\n * // One last case you may run into is when you want to upload a file to your\n * // bucket and set its metadata at the same time. Like above, you can use\n * // {@link Bucket#upload} to do this, which is just a wrapper around\n * // the following.\n * //-\n * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')\n * .pipe(file.createWriteStream({\n * metadata: {\n * contentType: 'image/jpeg',\n * metadata: {\n * custom: 'metadata'\n * }\n * }\n * }))\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n * ```\n *\n * //-\n * //

Continuing a Resumable Upload

\n * //\n * // One can capture a `uri` from a resumable upload to reuse later.\n * // Additionally, for validation, one can also capture and pass `crc32c`.\n * //-\n * let uri: string | undefined = undefined;\n * let resumeCRC32C: string | undefined = undefined;\n *\n * fs.createWriteStream()\n * .on('uri', link => {uri = link})\n * .on('crc32', crc32c => {resumeCRC32C = crc32c});\n *\n * // later...\n * fs.createWriteStream({uri, resumeCRC32C});\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n createWriteStream(options = {}) {\n var _a;\n (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {});\n if (options.contentType) {\n options.metadata.contentType = options.contentType;\n }\n if (!options.metadata.contentType ||\n options.metadata.contentType === 'auto') {\n const detectedContentType = mime_1.default.getType(this.name);\n if (detectedContentType) {\n options.metadata.contentType = detectedContentType;\n }\n }\n let gzip = options.gzip;\n if (gzip === 'auto') {\n gzip = (0, compressible_1.default)(options.metadata.contentType || '');\n }\n if (gzip) {\n options.metadata.contentEncoding = 'gzip';\n }\n let crc32c = true;\n let md5 = false;\n if (typeof options.validation === 'string') {\n options.validation = options.validation.toLowerCase();\n crc32c = options.validation === 'crc32c';\n md5 = options.validation === 'md5';\n }\n else if (options.validation === false) {\n crc32c = false;\n md5 = false;\n }\n if (options.offset) {\n if (md5) {\n throw new RangeError(FileExceptionMessages.MD5_RESUMED_UPLOAD);\n }\n if (crc32c && !options.isPartialUpload && !options.resumeCRC32C) {\n throw new RangeError(FileExceptionMessages.MISSING_RESUME_CRC32C_FINAL_UPLOAD);\n }\n }\n /**\n * A callback for determining when the underlying pipeline is complete.\n * It's possible the pipeline callback could error before the write stream\n * calls `final` so by default this will destroy the write stream unless the\n * write stream sets this callback via its `final` handler.\n * @param error An optional error\n */\n let pipelineCallback = error => {\n writeStream.destroy(error || undefined);\n };\n // A stream for consumer to write to\n const writeStream = new stream_1.Writable({\n final(cb) {\n // Set the pipeline callback to this callback so the pipeline's results\n // can be populated to the consumer\n pipelineCallback = cb;\n emitStream.end();\n },\n write(chunk, encoding, cb) {\n emitStream.write(chunk, encoding, cb);\n },\n });\n const transformStreams = [];\n if (gzip) {\n transformStreams.push(zlib.createGzip());\n }\n const emitStream = new util_js_2.PassThroughShim();\n let hashCalculatingStream = null;\n if (crc32c || md5) {\n const crc32cInstance = options.resumeCRC32C\n ? crc32c_js_1.CRC32C.from(options.resumeCRC32C)\n : undefined;\n hashCalculatingStream = new hash_stream_validator_js_1.HashStreamValidator({\n crc32c,\n crc32cInstance,\n md5,\n crc32cGenerator: this.crc32cGenerator,\n updateHashesOnly: true,\n });\n transformStreams.push(hashCalculatingStream);\n }\n const fileWriteStream = (0, duplexify_1.default)();\n let fileWriteStreamMetadataReceived = false;\n // Handing off emitted events to users\n emitStream.on('reading', () => writeStream.emit('reading'));\n emitStream.on('writing', () => writeStream.emit('writing'));\n fileWriteStream.on('uri', evt => writeStream.emit('uri', evt));\n fileWriteStream.on('progress', evt => writeStream.emit('progress', evt));\n fileWriteStream.on('response', resp => writeStream.emit('response', resp));\n fileWriteStream.once('metadata', () => {\n fileWriteStreamMetadataReceived = true;\n });\n writeStream.once('writing', () => {\n if (options.resumable === false) {\n this.startSimpleUpload_(fileWriteStream, options);\n }\n else {\n this.startResumableUpload_(fileWriteStream, options);\n }\n (0, stream_1.pipeline)(emitStream, ...transformStreams, fileWriteStream, async (e) => {\n if (e) {\n return pipelineCallback(e);\n }\n // We want to make sure we've received the metadata from the server in order\n // to properly validate the object's integrity. Depending on the type of upload,\n // the stream could close before the response is returned.\n if (!fileWriteStreamMetadataReceived) {\n try {\n await new Promise((resolve, reject) => {\n fileWriteStream.once('metadata', resolve);\n fileWriteStream.once('error', reject);\n });\n }\n catch (e) {\n return pipelineCallback(e);\n }\n }\n // Emit the local CRC32C value for future validation, if validation is enabled.\n if (hashCalculatingStream === null || hashCalculatingStream === void 0 ? void 0 : hashCalculatingStream.crc32c) {\n writeStream.emit('crc32c', hashCalculatingStream.crc32c);\n }\n try {\n // Metadata may not be ready if the upload is a partial upload,\n // nothing to validate yet.\n const metadataNotReady = options.isPartialUpload && !this.metadata;\n if (hashCalculatingStream && !metadataNotReady) {\n await __classPrivateFieldGet(this, _File_instances, \"m\", _File_validateIntegrity).call(this, hashCalculatingStream, {\n crc32c,\n md5,\n });\n }\n pipelineCallback();\n }\n catch (e) {\n pipelineCallback(e);\n }\n });\n });\n return writeStream;\n }\n delete(optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_js_1.AvailableServiceObjectMethods.delete, options);\n super\n .delete(options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * @typedef {array} DownloadResponse\n * @property [0] The contents of a File.\n */\n /**\n * @callback DownloadCallback\n * @param err Request error, if any.\n * @param contents The contents of a File.\n */\n /**\n * Convenience method to download a file into memory or to a local\n * destination.\n *\n * @param {object} [options] Configuration options. The arguments match those\n * passed to {@link File#createReadStream}.\n * @param {string} [options.destination] Local file path to write the file's\n * contents to.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DownloadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Download a file into memory. The contents will be available as the\n * second\n * // argument in the demonstration below, `contents`.\n * //-\n * file.download(function(err, contents) {});\n *\n * //-\n * // Download a file to a local destination.\n * //-\n * file.download({\n * destination: '/Users/me/Desktop/file-backup.txt'\n * }, function(err) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.download().then(function(data) {\n * const contents = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_download_file\n * Another example:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_download_encrypted_file\n * Example of downloading an encrypted file:\n *\n * @example include:samples/requesterPays.js\n * region_tag:storage_download_file_requester_pays\n * Example of downloading a file where the requester pays:\n */\n download(optionsOrCallback, cb) {\n let options;\n if (typeof optionsOrCallback === 'function') {\n cb = optionsOrCallback;\n options = {};\n }\n else {\n options = optionsOrCallback;\n }\n let called = false;\n const callback = ((...args) => {\n if (!called)\n cb(...args);\n called = true;\n });\n const destination = options.destination;\n delete options.destination;\n const fileStream = this.createReadStream(options);\n let receivedData = false;\n if (destination) {\n fileStream\n .on('error', callback)\n .once('data', data => {\n // We know that the file exists the server - now we can truncate/write to a file\n receivedData = true;\n const writable = fs.createWriteStream(destination);\n writable.write(data);\n fileStream\n .pipe(writable)\n .on('error', callback)\n .on('finish', callback);\n })\n .on('end', () => {\n // In the case of an empty file no data will be received before the end event fires\n if (!receivedData) {\n const data = Buffer.alloc(0);\n try {\n fs.writeFileSync(destination, data);\n callback(null, data);\n }\n catch (e) {\n callback(e, data);\n }\n }\n });\n }\n else {\n this.getBufferFromReadable(fileStream)\n .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents))\n .catch(callback);\n }\n }\n /**\n * The Storage API allows you to use a custom key for server-side encryption.\n *\n * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}\n *\n * @param {string|buffer} encryptionKey An AES-256 encryption key.\n * @returns {File}\n *\n * @example\n * ```\n * const crypto = require('crypto');\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const encryptionKey = crypto.randomBytes(32);\n *\n * const fileWithCustomEncryption = myBucket.file('my-file');\n * fileWithCustomEncryption.setEncryptionKey(encryptionKey);\n *\n * const fileWithoutCustomEncryption = myBucket.file('my-file');\n *\n * fileWithCustomEncryption.save('data', function(err) {\n * // Try to download with the File object that hasn't had\n * // `setEncryptionKey()` called:\n * fileWithoutCustomEncryption.download(function(err) {\n * // We will receive an error:\n * // err.message === 'Bad Request'\n *\n * // Try again with the File object we called `setEncryptionKey()` on:\n * fileWithCustomEncryption.download(function(err, contents) {\n * // contents.toString() === 'data'\n * });\n * });\n * });\n *\n * ```\n * @example include:samples/encryption.js\n * region_tag:storage_upload_encrypted_file\n * Example of uploading an encrypted file:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_download_encrypted_file\n * Example of downloading an encrypted file:\n */\n setEncryptionKey(encryptionKey) {\n this.encryptionKey = encryptionKey;\n this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64');\n this.encryptionKeyHash = crypto\n .createHash('sha256')\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n .update(this.encryptionKeyBase64, 'base64')\n .digest('base64');\n this.encryptionKeyInterceptor = {\n request: reqOpts => {\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256';\n reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64;\n reqOpts.headers['x-goog-encryption-key-sha256'] =\n this.encryptionKeyHash;\n return reqOpts;\n },\n };\n this.interceptors.push(this.encryptionKeyInterceptor);\n return this;\n }\n /**\n * @typedef {array} GetExpirationDateResponse\n * @property {date} 0 A Date object representing the earliest time this file's\n * retention policy will expire.\n */\n /**\n * @callback GetExpirationDateCallback\n * @param {?Error} err Request error, if any.\n * @param {date} expirationDate A Date object representing the earliest time\n * this file's retention policy will expire.\n */\n /**\n * If this bucket has a retention policy defined, use this method to get a\n * Date object representing the earliest time this file will expire.\n *\n * @param {GetExpirationDateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.getExpirationDate(function(err, expirationDate) {\n * // expirationDate is a Date object.\n * });\n * ```\n */\n getExpirationDate(callback) {\n this.getMetadata((err, metadata, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n if (!metadata.retentionExpirationTime) {\n const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA);\n callback(error, null, apiResponse);\n return;\n }\n callback(null, new Date(metadata.retentionExpirationTime), apiResponse);\n });\n }\n /**\n * @typedef {array} GenerateSignedPostPolicyV2Response\n * @property {object} 0 The document policy.\n */\n /**\n * @callback GenerateSignedPostPolicyV2Callback\n * @param {?Error} err Request error, if any.\n * @param {object} policy The document policy.\n */\n /**\n * Get a signed policy document to allow a user to upload data with a POST\n * request.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed policy. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process}\n *\n * @throws {Error} If an expiration timestamp from the past is given.\n * @throws {Error} If options.equals has an array with less or more than two\n * members.\n * @throws {Error} If options.startsWith has an array with less or more than two\n * members.\n *\n * @param {object} options Configuration options.\n * @param {array|array[]} [options.equals] Array of request parameters and\n * their expected value (e.g. [['$', '']]). Values are\n * translated into equality constraints in the conditions field of the\n * policy document (e.g. ['eq', '$', '']). If only one\n * equality condition is to be specified, options.equals can be a one-\n * dimensional array (e.g. ['$', '']).\n * @param {*} options.expires - A timestamp when this policy will expire. Any\n * value given is passed to `new Date()`.\n * @param {array|array[]} [options.startsWith] Array of request parameters and\n * their expected prefixes (e.g. [['$', '']). Values are\n * translated into starts-with constraints in the conditions field of the\n * policy document (e.g. ['starts-with', '$', '']). If only\n * one prefix condition is to be specified, options.startsWith can be a\n * one- dimensional array (e.g. ['$', '']).\n * @param {string} [options.acl] ACL for the object from possibly predefined\n * ACLs.\n * @param {string} [options.successRedirect] The URL to which the user client\n * is redirected if the upload is successful.\n * @param {string} [options.successStatus] - The status of the Google Storage\n * response if the upload is successful (must be string).\n * @param {object} [options.contentLengthRange]\n * @param {number} [options.contentLengthRange.min] Minimum value for the\n * request's content length.\n * @param {number} [options.contentLengthRange.max] Maximum value for the\n * request's content length.\n * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const options = {\n * equals: ['$Content-Type', 'image/jpeg'],\n * expires: '10-25-2022',\n * contentLengthRange: {\n * min: 0,\n * max: 1024\n * }\n * };\n *\n * file.generateSignedPostPolicyV2(options, function(err, policy) {\n * // policy.string: the policy document in plain text.\n * // policy.base64: the policy document in base64.\n * // policy.signature: the policy signature in base64.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.generateSignedPostPolicyV2(options).then(function(data) {\n * const policy = data[0];\n * });\n * ```\n */\n generateSignedPostPolicyV2(optionsOrCallback, cb) {\n const args = (0, util_js_2.normalize)(optionsOrCallback, cb);\n let options = args.options;\n const callback = args.callback;\n const expires = new Date(options.expires);\n if (isNaN(expires.getTime())) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expires.valueOf() < Date.now()) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n options = Object.assign({}, options);\n const conditions = [\n ['eq', '$key', this.name],\n {\n bucket: this.bucket.name,\n },\n ];\n if (Array.isArray(options.equals)) {\n if (!Array.isArray(options.equals[0])) {\n options.equals = [options.equals];\n }\n options.equals.forEach(condition => {\n if (!Array.isArray(condition) || condition.length !== 2) {\n throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS);\n }\n conditions.push(['eq', condition[0], condition[1]]);\n });\n }\n if (Array.isArray(options.startsWith)) {\n if (!Array.isArray(options.startsWith[0])) {\n options.startsWith = [options.startsWith];\n }\n options.startsWith.forEach(condition => {\n if (!Array.isArray(condition) || condition.length !== 2) {\n throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS);\n }\n conditions.push(['starts-with', condition[0], condition[1]]);\n });\n }\n if (options.acl) {\n conditions.push({\n acl: options.acl,\n });\n }\n if (options.successRedirect) {\n conditions.push({\n success_action_redirect: options.successRedirect,\n });\n }\n if (options.successStatus) {\n conditions.push({\n success_action_status: options.successStatus,\n });\n }\n if (options.contentLengthRange) {\n const min = options.contentLengthRange.min;\n const max = options.contentLengthRange.max;\n if (typeof min !== 'number' || typeof max !== 'number') {\n throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX);\n }\n conditions.push(['content-length-range', min, max]);\n }\n const policy = {\n expiration: expires.toISOString(),\n conditions,\n };\n const policyString = JSON.stringify(policy);\n const policyBase64 = Buffer.from(policyString).toString('base64');\n this.storage.authClient.sign(policyBase64).then(signature => {\n callback(null, {\n string: policyString,\n base64: policyBase64,\n signature,\n });\n }, err => {\n callback(new signer_js_1.SigningError(err.message));\n });\n }\n /**\n * @typedef {object} SignedPostPolicyV4Output\n * @property {string} url The request URL.\n * @property {object} fields The form fields to include in the POST request.\n */\n /**\n * @typedef {array} GenerateSignedPostPolicyV4Response\n * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields.\n */\n /**\n * @callback GenerateSignedPostPolicyV4Callback\n * @param {?Error} err Request error, if any.\n * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields.\n */\n /**\n * Get a v4 signed policy document to allow a user to upload data with a POST\n * request.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed policy. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference}\n *\n * @param {object} options Configuration options.\n * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any\n * value given is passed to `new Date()`.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instead of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in\n * the result, e.g. \"https://cdn.example.com\".\n * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument}\n * to include in the signed policy. Any fields with key beginning with 'x-ignore-'\n * will not be included in the policy to be signed.\n * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document}\n * to include in the signed policy. All fields given in `config.fields` are\n * automatically included in the conditions array, adding the same entry\n * in both `fields` and `conditions` will result in duplicate entries.\n *\n * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const options = {\n * expires: '10-25-2022',\n * conditions: [\n * ['eq', '$Content-Type', 'image/jpeg'],\n * ['content-length-range', 0, 1024],\n * ],\n * fields: {\n * acl: 'public-read',\n * 'x-goog-meta-foo': 'bar',\n * 'x-ignore-mykey': 'data'\n * }\n * };\n *\n * file.generateSignedPostPolicyV4(options, function(err, response) {\n * // response.url The request URL\n * // response.fields The form fields (including the signature) to include\n * // to be used to upload objects by HTML forms.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.generateSignedPostPolicyV4(options).then(function(data) {\n * const response = data[0];\n * // response.url The request URL\n * // response.fields The form fields (including the signature) to include\n * // to be used to upload objects by HTML forms.\n * });\n * ```\n */\n generateSignedPostPolicyV4(optionsOrCallback, cb) {\n const args = (0, util_js_2.normalize)(optionsOrCallback, cb);\n let options = args.options;\n const callback = args.callback;\n const expires = new Date(options.expires);\n if (isNaN(expires.getTime())) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expires.valueOf() < Date.now()) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) {\n throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`);\n }\n options = Object.assign({}, options);\n let fields = Object.assign({}, options.fields);\n const now = new Date();\n const nowISO = (0, util_js_2.formatAsUTCISO)(now, true);\n const todayISO = (0, util_js_2.formatAsUTCISO)(now);\n const sign = async () => {\n const { client_email } = await this.storage.authClient.getCredentials();\n const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`;\n fields = {\n ...fields,\n bucket: this.bucket.name,\n key: this.name,\n 'x-goog-date': nowISO,\n 'x-goog-credential': credential,\n 'x-goog-algorithm': 'GOOG4-RSA-SHA256',\n };\n const conditions = options.conditions || [];\n Object.entries(fields).forEach(([key, value]) => {\n if (!key.startsWith('x-ignore-')) {\n conditions.push({ [key]: value });\n }\n });\n delete fields.bucket;\n const expiration = (0, util_js_2.formatAsUTCISO)(expires, true, '-', ':');\n const policy = {\n conditions,\n expiration,\n };\n const policyString = (0, util_js_2.unicodeJSONStringify)(policy);\n const policyBase64 = Buffer.from(policyString).toString('base64');\n try {\n const signature = await this.storage.authClient.sign(policyBase64);\n const signatureHex = Buffer.from(signature, 'base64').toString('hex');\n fields['policy'] = policyBase64;\n fields['x-goog-signature'] = signatureHex;\n let url;\n if (options.virtualHostedStyle) {\n url = `https://${this.bucket.name}.storage.googleapis.com/`;\n }\n else if (options.bucketBoundHostname) {\n url = `${options.bucketBoundHostname}/`;\n }\n else {\n url = `${exports.STORAGE_POST_POLICY_BASE_URL}/${this.bucket.name}/`;\n }\n return {\n url,\n fields,\n };\n }\n catch (err) {\n throw new signer_js_1.SigningError(err.message);\n }\n };\n sign().then(res => callback(null, res), callback);\n }\n /**\n * @typedef {array} GetSignedUrlResponse\n * @property {object} 0 The signed URL.\n */\n /**\n * @callback GetSignedUrlCallback\n * @param {?Error} err Request error, if any.\n * @param {object} url The signed URL.\n */\n /**\n * Get a signed URL to allow limited time access to the file.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed URL. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference}\n *\n * @throws {Error} if an expiration timestamp from the past is given.\n *\n * @param {object} config Configuration object.\n * @param {string} config.action \"read\" (HTTP: GET), \"write\" (HTTP: PUT), or\n * \"delete\" (HTTP: DELETE), \"resumable\" (HTTP: POST).\n * When using \"resumable\", the header `X-Goog-Resumable: start` has\n * to be sent when making a request with the signed URL.\n * @param {*} config.expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @param {string} [config.version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like\n * if you provide this, the client must provide this HTTP header with this same\n * value in its request, so to if this parameter is not provided here,\n * the client must not provide any value for this HTTP header in its request.\n * @param {string} [config.contentType] Just like if you provide this, the client\n * must provide this HTTP header with this same value in its request, so to if\n * this parameter is not provided here, the client must not provide any value\n * for this HTTP header in its request.\n * @param {object} [config.extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @param {object} [config.queryParams] Additional query parameters to include\n * in the signed URL.\n * @param {string} [config.promptSaveAs] The filename to prompt the user to\n * save the file as when the signed url is accessed. This is ignored if\n * `config.responseDisposition` is set.\n * @param {string} [config.responseDisposition] The\n * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the\n * signed url.\n * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value\n * given is passed to `new Date()`.\n * Note: Use for 'v4' only.\n * @param {string} [config.responseType] The response-content-type parameter\n * of the signed url.\n * @param {GetSignedUrlCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Generate a URL that allows temporary access to download your file.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'read',\n * expires: '03-17-2025',\n * };\n *\n * file.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file is now available to read from this URL.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // Generate a URL that allows temporary access to download your file.\n * // Access will begin at accessibleAt and end at expires.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'read',\n * expires: '03-17-2025',\n * accessibleAt: '03-13-2025'\n * };\n *\n * file.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // Generate a URL to allow write permissions. This means anyone with this\n * URL\n * // can send a POST request with new data that will overwrite the file.\n * //-\n * file.getSignedUrl({\n * action: 'write',\n * expires: '03-17-2025'\n * }, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file is now available to be written to.\n * const writeStream = request.put(url);\n * writeStream.end('New data');\n *\n * writeStream.on('complete', function(resp) {\n * // Confirm the new content was saved.\n * file.download(function(err, fileContents) {\n * console.log('Contents:', fileContents.toString());\n * // Contents: New data\n * });\n * });\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.getSignedUrl(config).then(function(data) {\n * const url = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_generate_signed_url\n * Another example:\n */\n getSignedUrl(cfg, callback) {\n const method = ActionToHTTPMethod[cfg.action];\n const extensionHeaders = (0, util_js_2.objectKeyToLowercase)(cfg.extensionHeaders || {});\n if (cfg.action === 'resumable') {\n extensionHeaders['x-goog-resumable'] = 'start';\n }\n const queryParams = Object.assign({}, cfg.queryParams);\n if (typeof cfg.responseType === 'string') {\n queryParams['response-content-type'] = cfg.responseType;\n }\n if (typeof cfg.promptSaveAs === 'string') {\n queryParams['response-content-disposition'] =\n 'attachment; filename=\"' + cfg.promptSaveAs + '\"';\n }\n if (typeof cfg.responseDisposition === 'string') {\n queryParams['response-content-disposition'] = cfg.responseDisposition;\n }\n if (this.generation) {\n queryParams['generation'] = this.generation.toString();\n }\n const signConfig = {\n method,\n expires: cfg.expires,\n accessibleAt: cfg.accessibleAt,\n extensionHeaders,\n queryParams,\n contentMd5: cfg.contentMd5,\n contentType: cfg.contentType,\n };\n if (cfg.cname) {\n signConfig.cname = cfg.cname;\n }\n if (cfg.version) {\n signConfig.version = cfg.version;\n }\n if (cfg.virtualHostedStyle) {\n signConfig.virtualHostedStyle = cfg.virtualHostedStyle;\n }\n if (!this.signer) {\n this.signer = new signer_js_1.URLSigner(this.storage.authClient, this.bucket, this);\n }\n this.signer\n .getSignedUrl(signConfig)\n .then(signedUrl => callback(null, signedUrl), callback);\n }\n /**\n * @callback IsPublicCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} resp Whether file is public or not.\n */\n /**\n * @typedef {array} IsPublicResponse\n * @property {boolean} 0 Whether file is public or not.\n */\n /**\n * Check whether this file is public or not by sending\n * a HEAD request without credentials.\n * No errors from the server indicates that the current\n * file is public.\n * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden}\n * indicates that file is private.\n * Any other non 403 error is propagated to user.\n *\n * @param {IsPublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Check whether the file is publicly accessible.\n * //-\n * file.isPublic(function(err, resp) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n * console.log(`the file ${file.id} is public: ${resp}`) ;\n * })\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.isPublic().then(function(data) {\n * const resp = data[0];\n * });\n * ```\n */\n isPublic(callback) {\n var _a;\n // Build any custom headers based on the defined interceptors on the parent\n // storage object and this object\n const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || [];\n const fileInterceptors = this.interceptors || [];\n const allInterceptors = storageInterceptors.concat(fileInterceptors);\n const headers = allInterceptors.reduce((acc, curInterceptor) => {\n const currentHeaders = curInterceptor.request({\n uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`,\n });\n Object.assign(acc, currentHeaders.headers);\n return acc;\n }, {});\n index_js_1.util.makeRequest({\n method: 'GET',\n uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`,\n headers,\n }, {\n retryOptions: this.storage.retryOptions,\n }, (err) => {\n if (err) {\n const apiError = err;\n if (apiError.code === 403) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n }\n else {\n callback(null, true);\n }\n });\n }\n /**\n * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate().\n * @property {Metadata} [metadata] Define custom metadata properties to define\n * along with the operation.\n * @property {boolean} [strict] If true, set the file to be private to\n * only the owner user. Otherwise, it will be private to the project.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback MakeFilePrivateCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} MakeFilePrivateResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Make a file private to the project and remove all other permissions.\n * Set `options.strict` to true to make the file private to only the owner.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation}\n *\n * @param {MakeFilePrivateOptions} [options] Configuration options.\n * @param {MakeFilePrivateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Set the file private so only project maintainers can see and modify it.\n * //-\n * file.makePrivate(function(err) {});\n *\n * //-\n * // Set the file private so only the owner can see and modify it.\n * //-\n * file.makePrivate({ strict: true }, function(err) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.makePrivate().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n makePrivate(optionsOrCallback, callback) {\n var _a, _b;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const query = {\n predefinedAcl: options.strict ? 'private' : 'projectPrivate',\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n };\n if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) {\n query.ifMetagenerationMatch =\n (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch;\n delete options.preconditionOpts;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n // You aren't allowed to set both predefinedAcl & acl properties on a file,\n // so acl must explicitly be nullified, destroying all previous acls on the\n // file.\n const metadata = { ...options.metadata, acl: null };\n this.setMetadata(metadata, query, callback);\n }\n /**\n * @typedef {array} MakeFilePublicResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback MakeFilePublicCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Set a file to be publicly readable and maintain all previous permissions.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation}\n *\n * @param {MakeFilePublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.makePublic(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.makePublic().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_make_public\n * Another example:\n */\n makePublic(callback) {\n callback = callback || index_js_1.util.noop;\n this.acl.add({\n entity: 'allUsers',\n role: 'READER',\n }, (err, acl, resp) => {\n callback(err, resp);\n });\n }\n /**\n * The public URL of this File\n * Use {@link File#makePublic} to enable anonymous access via the returned URL.\n *\n * @returns {string}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-file');\n *\n * // publicUrl will be \"https://storage.googleapis.com/albums/my-file\"\n * const publicUrl = file.publicUrl();\n * ```\n */\n publicUrl() {\n return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`;\n }\n /**\n * @typedef {array} MoveResponse\n * @property {File} 0 The destination File.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback MoveCallback\n * @param {?Error} err Request error, if any.\n * @param {?File} destinationFile The destination File.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} MoveOptions Configuration options for File#move(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Move this file to another location. By default, this will rename the file\n * and keep it in the same bucket, but you can choose to move it to another\n * Bucket by providing a Bucket or File object or a URL beginning with\n * \"gs://\".\n *\n * **Warning**:\n * There is currently no atomic `move` method in the Cloud Storage API,\n * so this method is a composition of {@link File#copy} (to the new\n * location) and {@link File#delete} (from the old location). While\n * unlikely, it is possible that an error returned to your callback could be\n * triggered from either one of these API calls failing, which could leave a\n * duplicate file lingering. The error message will indicate what operation\n * has failed.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation}\n *\n * @throws {Error} If the destination file is not provided.\n *\n * @param {string|Bucket|File} destination Destination file.\n * @param {MoveCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * //-\n * // You can pass in a variety of types for the destination.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // If you pass in a string for the destination, the file is moved to its\n * // current bucket, under the new name provided.\n * //-\n * file.move('my-image-new.png', function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * // but contains instead:\n * // - \"my-image-new.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a string starting with \"gs://\" for the destination, the\n * // file is copied to the other bucket and under the new name provided.\n * //-\n * const newLocation = 'gs://another-bucket/my-image-new.png';\n * file.move(newLocation, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image-new.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a Bucket object, the file will be moved to that bucket\n * // using the same name.\n * //-\n * const anotherBucket = gcs.bucket('another-bucket');\n *\n * file.move(anotherBucket, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a File object, you have complete control over the new\n * // bucket and filename.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n *\n * file.move(anotherFile, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-awesome-image.png\"\n *\n * // Note:\n * // The `destinationFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.move('my-image-new.png').then(function(data) {\n * const destinationFile = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_move_file\n * Another example:\n */\n move(destination, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || index_js_1.util.noop;\n this.copy(destination, options, (err, destinationFile, copyApiResponse) => {\n if (err) {\n err.message = 'file#copy failed with an error - ' + err.message;\n callback(err, null, copyApiResponse);\n return;\n }\n if (this.name !== destinationFile.name ||\n this.bucket.name !== destinationFile.bucket.name) {\n this.delete(options, (err, apiResponse) => {\n if (err) {\n err.message = 'file#delete failed with an error - ' + err.message;\n callback(err, destinationFile, apiResponse);\n return;\n }\n callback(null, destinationFile, copyApiResponse);\n });\n }\n else {\n callback(null, destinationFile, copyApiResponse);\n }\n });\n }\n /**\n * @typedef {array} RenameResponse\n * @property {File} 0 The destination File.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback RenameCallback\n * @param {?Error} err Request error, if any.\n * @param {?File} destinationFile The destination File.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} RenameOptions Configuration options for File#move(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Rename this file.\n *\n * **Warning**:\n * There is currently no atomic `rename` method in the Cloud Storage API,\n * so this method is an alias of {@link File#move}, which in turn is a\n * composition of {@link File#copy} (to the new location) and\n * {@link File#delete} (from the old location). While\n * unlikely, it is possible that an error returned to your callback could be\n * triggered from either one of these API calls failing, which could leave a\n * duplicate file lingering. The error message will indicate what operation\n * has failed.\n *\n * @param {string|File} destinationFile Destination file.\n * @param {RenameCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // You can pass in a string or a File object.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n *\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // You can pass in a string for the destinationFile.\n * //-\n * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * // but contains instead:\n * // - \"renamed-image.png\"\n *\n * // `renamedFile` is an instance of a File object that refers to your\n * // renamed file.\n * });\n *\n * //-\n * // You can pass in a File object.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n *\n * file.rename(anotherFile, function(err, renamedFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n *\n * // Note:\n * // The `renamedFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.rename('my-renamed-image.png').then(function(data) {\n * const renamedFile = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n rename(destinationFile, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || index_js_1.util.noop;\n this.move(destinationFile, options, callback);\n }\n /**\n * Makes request and applies userProject query parameter if necessary.\n *\n * @private\n *\n * @param {object} reqOpts - The request options.\n * @param {function} callback - The callback function.\n */\n request(reqOpts, callback) {\n return this.parent.request.call(this, reqOpts, callback);\n }\n /**\n * @callback RotateEncryptionKeyCallback\n * @extends CopyCallback\n */\n /**\n * @typedef RotateEncryptionKeyResponse\n * @extends CopyResponse\n */\n /**\n * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options\n * for File#rotateEncryptionKey().\n * If a string or Buffer is provided, it is interpreted as an AES-256,\n * customer-supplied encryption key. If you'd like to use a Cloud KMS key\n * name, you must specify an options object with the property name:\n * `kmsKeyName`.\n * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key.\n * @param {string} [options.kmsKeyName] A Cloud KMS key name.\n */\n /**\n * This method allows you to update the encryption key associated with this\n * file.\n *\n * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}\n *\n * @param {RotateEncryptionKeyOptions} [options] - Configuration options.\n * @param {RotateEncryptionKeyCallback} [callback]\n * @returns {Promise}\n *\n * @example include:samples/encryption.js\n * region_tag:storage_rotate_encryption_key\n * Example of rotating the encryption key for this file:\n */\n rotateEncryptionKey(optionsOrCallback, callback) {\n var _a;\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n let options = {};\n if (typeof optionsOrCallback === 'string' ||\n optionsOrCallback instanceof Buffer) {\n options = {\n encryptionKey: optionsOrCallback,\n };\n }\n else if (typeof optionsOrCallback === 'object') {\n options = optionsOrCallback;\n }\n const newFile = this.bucket.file(this.id, options);\n const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined\n ? { preconditionOpts: options.preconditionOpts }\n : {};\n this.copy(newFile, copyOptions, callback);\n }\n /**\n * @typedef {object} SaveOptions\n * @extends CreateWriteStreamOptions\n */\n /**\n * @callback SaveCallback\n * @param {?Error} err Request error, if any.\n */\n /**\n * Write strings or buffers to a file.\n *\n * *This is a convenience method which wraps {@link File#createWriteStream}.*\n * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly.\n *\n * Resumable uploads are automatically enabled and must be shut off explicitly\n * by setting `options.resumable` to `false`.\n *\n * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff.\n *\n *

\n * There is some overhead when using a resumable upload that can cause\n * noticeable performance degradation while uploading a series of small\n * files. When uploading files less than 10MB, it is recommended that the\n * resumable feature is disabled.\n *

\n *\n * @param {SaveData} data The data to write to a file.\n * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options`\n * parameter.\n * @param {SaveCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const contents = 'This is the contents of the file.';\n *\n * file.save(contents, function(err) {\n * if (!err) {\n * // File written successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.save(contents).then(function() {});\n * ```\n */\n save(data, optionsOrCallback, callback) {\n // tslint:enable:no-any\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n let maxRetries = this.storage.retryOptions.maxRetries;\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {\n maxRetries = 0;\n }\n const returnValue = (0, async_retry_1.default)(async (bail) => {\n return new Promise((resolve, reject) => {\n if (maxRetries === 0) {\n this.storage.retryOptions.autoRetry = false;\n }\n const writable = this.createWriteStream(options);\n if (options.onUploadProgress) {\n writable.on('progress', options.onUploadProgress);\n }\n const handleError = (err) => {\n if (this.storage.retryOptions.autoRetry &&\n this.storage.retryOptions.retryableErrorFn(err)) {\n return reject(err);\n }\n return bail(err);\n };\n if (typeof data === 'string' || Buffer.isBuffer(data)) {\n writable\n .on('error', handleError)\n .on('finish', () => resolve())\n .end(data);\n }\n else {\n (0, stream_1.pipeline)(data, writable, err => {\n if (err) {\n if (typeof data !== 'function') {\n // Only PipelineSourceFunction can be retried. Async-iterables\n // and Readable streams can only be consumed once.\n return bail(err);\n }\n handleError(err);\n }\n else {\n resolve();\n }\n });\n }\n });\n }, {\n retries: maxRetries,\n factor: this.storage.retryOptions.retryDelayMultiplier,\n maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds\n maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n if (!callback) {\n return returnValue;\n }\n else {\n return returnValue\n .then(() => {\n if (callback) {\n return callback();\n }\n })\n .catch(callback);\n }\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_js_1.AvailableServiceObjectMethods.setMetadata, options);\n if (metadata.retention !== undefined) {\n options.overrideUnlockedRetention = true;\n }\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * @typedef {array} SetStorageClassResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback SetStorageClassCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Set the storage class for this file.\n *\n * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class}\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} storageClass The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`)\n * **Note:** The storage classes `multi_regional` and `regional`\n * are now legacy and will be deprecated in the future.\n * @param {SetStorageClassOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetStorageClassCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * file.setStorageClass('nearline', function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // The storage class was updated successfully.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.setStorageClass('nearline').then(function() {});\n * ```\n */\n setStorageClass(storageClass, optionsOrCallback, callback) {\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n const req = {\n ...options,\n // In case we get input like `storageClass`, convert to `storage_class`.\n storageClass: storageClass\n .replace(/-/g, '_')\n .replace(/([a-z])([A-Z])/g, (_, low, up) => {\n return low + '_' + up;\n })\n .toUpperCase(),\n };\n this.copy(this, req, (err, file, apiResponse) => {\n if (err) {\n callback(err, apiResponse);\n return;\n }\n this.metadata = file.metadata;\n callback(null, apiResponse);\n });\n }\n /**\n * Set a user project to be billed for all requests made from this File\n * object.\n *\n * @param {string} userProject The user project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-file');\n *\n * file.setUserProject('grape-spaceship-123');\n * ```\n */\n setUserProject(userProject) {\n this.bucket.setUserProject.call(this, userProject);\n }\n /**\n * This creates a resumable-upload upload stream.\n *\n * @param {Duplexify} stream - Duplexify stream of data to pipe to the file.\n * @param {object=} options - Configuration object.\n *\n * @private\n */\n startResumableUpload_(dup, options = {}) {\n var _a;\n (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {});\n const retryOptions = this.storage.retryOptions;\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options.preconditionOpts)) {\n retryOptions.autoRetry = false;\n }\n const uploadStream = resumableUpload.upload({\n authClient: this.storage.authClient,\n apiEndpoint: this.storage.apiEndpoint,\n bucket: this.bucket.name,\n customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}),\n file: this.name,\n generation: this.generation,\n isPartialUpload: options.isPartialUpload,\n key: this.encryptionKey,\n kmsKeyName: this.kmsKeyName,\n metadata: options.metadata,\n offset: options.offset,\n predefinedAcl: options.predefinedAcl,\n private: options.private,\n public: options.public,\n uri: options.uri,\n userProject: options.userProject || this.userProject,\n retryOptions: { ...retryOptions },\n params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts,\n chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize,\n highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark,\n [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY],\n });\n uploadStream\n .on('response', resp => {\n dup.emit('response', resp);\n })\n .on('uri', uri => {\n dup.emit('uri', uri);\n })\n .on('metadata', metadata => {\n this.metadata = metadata;\n dup.emit('metadata');\n })\n .on('finish', () => {\n dup.emit('complete');\n })\n .on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(uploadStream);\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n }\n /**\n * Takes a readable stream and pipes it to a remote file. Unlike\n * `startResumableUpload_`, which uses the resumable upload technique, this\n * method uses a simple upload (all or nothing).\n *\n * @param {Duplexify} dup - Duplexify stream of data to pipe to the file.\n * @param {object=} options - Configuration object.\n *\n * @private\n */\n startSimpleUpload_(dup, options = {}) {\n var _a;\n (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {});\n const apiEndpoint = this.storage.apiEndpoint;\n const bucketName = this.bucket.name;\n const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`;\n const reqOpts = {\n qs: {\n name: this.name,\n },\n uri: uri,\n [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY],\n };\n if (this.generation !== undefined) {\n reqOpts.qs.ifGenerationMatch = this.generation;\n }\n if (this.kmsKeyName !== undefined) {\n reqOpts.qs.kmsKeyName = this.kmsKeyName;\n }\n if (typeof options.timeout === 'number') {\n reqOpts.timeout = options.timeout;\n }\n if (options.userProject || this.userProject) {\n reqOpts.qs.userProject = options.userProject || this.userProject;\n }\n if (options.predefinedAcl) {\n reqOpts.qs.predefinedAcl = options.predefinedAcl;\n }\n else if (options.private) {\n reqOpts.qs.predefinedAcl = 'private';\n }\n else if (options.public) {\n reqOpts.qs.predefinedAcl = 'publicRead';\n }\n Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts);\n index_js_1.util.makeWritableStream(dup, {\n makeAuthenticatedRequest: (reqOpts) => {\n this.request(reqOpts, (err, body, resp) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n this.metadata = body;\n dup.emit('metadata', body);\n dup.emit('response', resp);\n dup.emit('complete');\n });\n },\n metadata: options.metadata,\n request: reqOpts,\n });\n }\n disableAutoRetryConditionallyIdempotent_(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n coreOpts, methodType, localPreconditionOptions) {\n var _a, _b, _c, _d;\n if ((typeof coreOpts === 'object' &&\n ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined &&\n methodType === bucket_js_1.AvailableServiceObjectMethods.delete &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n if ((typeof coreOpts === 'object' &&\n ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined &&\n methodType === bucket_js_1.AvailableServiceObjectMethods.setMetadata &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n }\n async getBufferFromReadable(readable) {\n const buf = [];\n for await (const chunk of readable) {\n buf.push(chunk);\n }\n return Buffer.concat(buf);\n }\n}\nexports.File = File;\n_File_instances = new WeakSet(), _File_validateIntegrity = \n/**\n *\n * @param hashCalculatingStream\n * @param verify\n * @returns {boolean} Returns `true` if valid, throws with error otherwise\n */\nasync function _File_validateIntegrity(hashCalculatingStream, verify = {}) {\n const metadata = this.metadata;\n // If we're doing validation, assume the worst\n let dataMismatch = !!(verify.crc32c || verify.md5);\n if (verify.crc32c && metadata.crc32c) {\n dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c);\n }\n if (verify.md5 && metadata.md5Hash) {\n dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash);\n }\n if (dataMismatch) {\n const errors = [];\n let code = '';\n let message = '';\n try {\n await this.delete();\n if (verify.md5 && !metadata.md5Hash) {\n code = 'MD5_NOT_AVAILABLE';\n message = FileExceptionMessages.MD5_NOT_AVAILABLE;\n }\n else {\n code = 'FILE_NO_UPLOAD';\n message = FileExceptionMessages.UPLOAD_MISMATCH;\n }\n }\n catch (e) {\n const error = e;\n code = 'FILE_NO_UPLOAD_DELETE';\n message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`;\n errors.push(error);\n }\n const error = new RequestError(message);\n error.code = code;\n error.errors = errors;\n throw error;\n }\n return true;\n};\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(File, {\n exclude: [\n 'cloudStorageURI',\n 'publicUrl',\n 'request',\n 'save',\n 'setEncryptionKey',\n 'shouldRetryBasedOnPreconditionAndIdempotencyStrat',\n 'getBufferFromReadable',\n ],\n});\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HashStreamValidator = void 0;\nconst crypto_1 = require(\"crypto\");\nconst stream_1 = require(\"stream\");\nconst crc32c_js_1 = require(\"./crc32c.js\");\nconst file_js_1 = require(\"./file.js\");\nclass HashStreamValidator extends stream_1.Transform {\n constructor(options = {}) {\n super();\n this.updateHashesOnly = false;\n _HashStreamValidator_crc32cHash.set(this, undefined);\n _HashStreamValidator_md5Hash.set(this, undefined);\n _HashStreamValidator_md5Digest.set(this, '');\n this.crc32cEnabled = !!options.crc32c;\n this.md5Enabled = !!options.md5;\n this.updateHashesOnly = !!options.updateHashesOnly;\n this.crc32cExpected = options.crc32cExpected;\n this.md5Expected = options.md5Expected;\n if (this.crc32cEnabled) {\n if (options.crc32cInstance) {\n __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, \"f\");\n }\n else {\n const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR;\n __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), \"f\");\n }\n }\n if (this.md5Enabled) {\n __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), \"f\");\n }\n }\n /**\n * Return the current CRC32C value, if available.\n */\n get crc32c() {\n var _a;\n return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\")) === null || _a === void 0 ? void 0 : _a.toString();\n }\n _flush(callback) {\n if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\")) {\n __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\").digest('base64'), \"f\");\n }\n if (this.updateHashesOnly) {\n callback();\n return;\n }\n // If we're doing validation, assume the worst-- a data integrity\n // mismatch. If not, these tests won't be performed, and we can assume\n // the best.\n // We must check if the server decompressed the data on serve because hash\n // validation is not possible in this case.\n let failed = this.crc32cEnabled || this.md5Enabled;\n if (this.crc32cEnabled && this.crc32cExpected) {\n failed = !this.test('crc32c', this.crc32cExpected);\n }\n if (this.md5Enabled && this.md5Expected) {\n failed = !this.test('md5', this.md5Expected);\n }\n if (failed) {\n const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH);\n mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH';\n callback(mismatchError);\n }\n else {\n callback();\n }\n }\n _transform(chunk, encoding, callback) {\n this.push(chunk, encoding);\n try {\n if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\"))\n __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\").update(chunk);\n if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\"))\n __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\").update(chunk);\n callback();\n }\n catch (e) {\n callback(e);\n }\n }\n test(hash, sum) {\n const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum;\n if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\")) {\n return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\").validate(check);\n }\n if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\")) {\n return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, \"f\") === check;\n }\n return false;\n }\n}\nexports.HashStreamValidator = HashStreamValidator;\n_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap();\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HmacKey = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst storage_js_1 = require(\"./storage.js\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * The API-formatted resource description of the HMAC key.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name HmacKey#metadata\n * @type {object}\n */\n/**\n * An HmacKey object contains metadata of an HMAC key created from a\n * service account through the {@link Storage} client using\n * {@link Storage#createHmacKey}.\n *\n * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation}\n *\n * @class\n */\nclass HmacKey extends index_js_1.ServiceObject {\n /**\n * @typedef {object} HmacKeyOptions\n * @property {string} [projectId] The project ID of the project that owns\n * the service account of the requested HMAC key. If not provided,\n * the project ID used to instantiate the Storage client will be used.\n */\n /**\n * Constructs an HmacKey object.\n *\n * Note: this only create a local reference to an HMAC key, to create\n * an HMAC key, use {@link Storage#createHmacKey}.\n *\n * @param {Storage} storage The Storage instance this HMAC key is\n * attached to.\n * @param {string} accessId The unique accessId for this HMAC key.\n * @param {HmacKeyOptions} options Constructor configurations.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const hmacKey = storage.hmacKey('access-id');\n * ```\n */\n constructor(storage, accessId, options) {\n const methods = {\n /**\n * @typedef {object} DeleteHmacKeyOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @typedef {array} DeleteHmacKeyResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteHmacKeyCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Deletes an HMAC key.\n * Key state must be set to `INACTIVE` prior to deletion.\n * Caution: HMAC keys cannot be recovered once you delete them.\n *\n * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists.\n *\n * @method HmacKey#delete\n * @param {DeleteHmacKeyOptions} [options] Configuration options.\n * @param {DeleteHmacKeyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Delete HMAC key after making the key inactive.\n * //-\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * hmacKey.delete((err) => {\n * if (err) {\n * console.error(err);\n * return;\n * }\n * // The HMAC key is deleted.\n * });\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * hmacKey\n * .setMetadata({state: 'INACTIVE'})\n * .then(() => {\n * return hmacKey.delete();\n * });\n * ```\n */\n delete: true,\n /**\n * @callback GetHmacKeyCallback\n * @param {?Error} err Request error, if any.\n * @param {HmacKey} hmacKey this {@link HmacKey} instance.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} GetHmacKeyResponse\n * @property {HmacKey} 0 This {@link HmacKey} instance.\n * @property {object} 1 The full API response.\n */\n /**\n * @typedef {object} GetHmacKeyOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * Retrieves and populate an HMAC key's metadata, and return\n * this {@link HmacKey} instance.\n *\n * HmacKey.get() does not give the HMAC key secret, as\n * it is only returned on creation.\n *\n * The authenticated user must have `storage.hmacKeys.get` permission\n * for the project in which the key exists.\n *\n * @method HmacKey#get\n * @param {GetHmacKeyOptions} [options] Configuration options.\n * @param {GetHmacKeyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Get the HmacKey's Metadata.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .get((err, hmacKey) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * // do something with the returned HmacKey object.\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .get()\n * .then((data) => {\n * const hmacKey = data[0];\n * });\n * ```\n */\n get: true,\n /**\n * @typedef {object} GetHmacKeyMetadataOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * Retrieves and populate an HMAC key's metadata, and return\n * the HMAC key's metadata as an object.\n *\n * HmacKey.getMetadata() does not give the HMAC key secret, as\n * it is only returned on creation.\n *\n * The authenticated user must have `storage.hmacKeys.get` permission\n * for the project in which the key exists.\n *\n * @method HmacKey#getMetadata\n * @param {GetHmacKeyMetadataOptions} [options] Configuration options.\n * @param {HmacKeyMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Get the HmacKey's metadata and populate to the metadata property.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .getMetadata((err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * console.log(hmacKeyMetadata);\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .getMetadata()\n * .then((data) => {\n * const hmacKeyMetadata = data[0];\n * console.log(hmacKeyMetadata);\n * });\n * ```\n */\n getMetadata: true,\n /**\n * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update.\n * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'.\n * @property {string} [etag] Include an etag from a previous get HMAC key request\n * to perform safe read-modify-write.\n */\n /**\n * @typedef {object} SetHmacKeyMetadataOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @callback HmacKeyMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} HmacKeyMetadataResponse\n * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object.\n * @property {object} 1 The full API response.\n */\n /**\n * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for\n * valid states.\n *\n * @method HmacKey#setMetadata\n * @param {SetHmacKeyMetadata} metadata The new metadata.\n * @param {SetHmacKeyMetadataOptions} [options] Configuration options.\n * @param {HmacKeyMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * const metadata = {\n * state: 'INACTIVE',\n * };\n *\n * storage.hmacKey('ACCESS_ID')\n * .setMetadata(metadata, (err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * console.log(hmacKeyMetadata);\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .setMetadata(metadata)\n * .then((data) => {\n * const hmacKeyMetadata = data[0];\n * console.log(hmacKeyMetadata);\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n method: 'PUT',\n },\n },\n };\n const projectId = (options && options.projectId) || storage.projectId;\n super({\n parent: storage,\n id: accessId,\n baseUrl: `/projects/${projectId}/hmacKeys`,\n methods,\n });\n this.storage = storage;\n this.instanceRetryValue = storage.retryOptions.autoRetry;\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways\n if (this.storage.retryOptions.idempotencyStrategy !==\n storage_js_1.IdempotencyStrategy.RetryAlways) {\n this.storage.retryOptions.autoRetry = false;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n}\nexports.HmacKey = HmacKey;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(HmacKey);\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Iam = exports.IAMExceptionMessages = void 0;\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst util_js_1 = require(\"./util.js\");\nvar IAMExceptionMessages;\n(function (IAMExceptionMessages) {\n IAMExceptionMessages[\"POLICY_OBJECT_REQUIRED\"] = \"A policy object is required.\";\n IAMExceptionMessages[\"PERMISSIONS_REQUIRED\"] = \"Permissions are required.\";\n})(IAMExceptionMessages || (exports.IAMExceptionMessages = IAMExceptionMessages = {}));\n/**\n * Get and set IAM policies for your Cloud Storage bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management}\n * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @constructor Iam\n *\n * @param {Bucket} bucket The parent instance.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * // bucket.iam\n * ```\n */\nclass Iam {\n constructor(bucket) {\n this.request_ = bucket.request.bind(bucket);\n this.resourceId_ = 'buckets/' + bucket.getId();\n }\n /**\n * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy().\n * @property {number} [requestedPolicyVersion] The version of IAM policies to\n * request. If a policy with a condition is requested without setting\n * this, the server will return an error. This must be set to a value\n * of 3 to retrieve IAM policies containing conditions. This is to\n * prevent client code that isn't aware of IAM conditions from\n * interpreting and modifying policies incorrectly. The service might\n * return a policy with version lower than the one that was requested,\n * based on the feature syntax in the policy fetched.\n * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions}\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} GetPolicyResponse\n * @property {Policy} 0 The policy.\n * @property {object} 1 The full API response.\n */\n /**\n * @typedef {object} Policy\n * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles.\n * @property {string} [policy.etag] Etags are used to perform a read-modify-write.\n * @property {number} [policy.version] The syntax schema version of the Policy.\n * To set an IAM policy with conditional binding, this field must be set to\n * 3 or greater.\n * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions}\n */\n /**\n * @typedef {object} PolicyBinding\n * @property {string} role Role that is assigned to members.\n * @property {string[]} members Specifies the identities requesting access for the bucket.\n * @property {Expr} [condition] The condition that is associated with this binding.\n */\n /**\n * @typedef {object} Expr\n * @property {string} [title] An optional title for the expression, i.e. a\n * short string describing its purpose. This can be used e.g. in UIs\n * which allow to enter the expression.\n * @property {string} [description] An optional description of the\n * expression. This is a longer text which describes the expression,\n * e.g. when hovered over it in a UI.\n * @property {string} expression Textual representation of an expression in\n * Common Expression Language syntax. The application context of the\n * containing message determines which well-known feature set of CEL\n * is supported.The condition that is associated with this binding.\n *\n * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions\n */\n /**\n * Get the IAM policy.\n *\n * @param {GetPolicyOptions} [options] Request options.\n * @param {GetPolicyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * bucket.iam.getPolicy(\n * {requestedPolicyVersion: 3},\n * function(err, policy, apiResponse) {\n *\n * },\n * );\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.getPolicy({requestedPolicyVersion: 3})\n * .then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_view_bucket_iam_members\n * Example of retrieving a bucket's IAM policy:\n */\n getPolicy(optionsOrCallback, callback) {\n const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback);\n const qs = {};\n if (options.userProject) {\n qs.userProject = options.userProject;\n }\n if (options.requestedPolicyVersion !== null &&\n options.requestedPolicyVersion !== undefined) {\n qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion;\n }\n this.request_({\n uri: '/iam',\n qs,\n }, cb);\n }\n /**\n * Set the IAM policy.\n *\n * @throws {Error} If no policy is provided.\n *\n * @param {Policy} policy The policy.\n * @param {SetPolicyOptions} [options] Configuration options.\n * @param {SetPolicyCallback} callback Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * const myPolicy = {\n * bindings: [\n * {\n * role: 'roles/storage.admin',\n * members:\n * ['serviceAccount:myotherproject@appspot.gserviceaccount.com']\n * }\n * ]\n * };\n *\n * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.setPolicy(myPolicy).then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_add_bucket_iam_member\n * Example of adding to a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_remove_bucket_iam_member\n * Example of removing from a bucket's IAM policy:\n */\n setPolicy(policy, optionsOrCallback, callback) {\n if (policy === null || typeof policy !== 'object') {\n throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED);\n }\n const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback);\n let maxRetries;\n if (policy.etag === undefined) {\n maxRetries = 0;\n }\n this.request_({\n method: 'PUT',\n uri: '/iam',\n maxRetries,\n json: Object.assign({\n resourceId: this.resourceId_,\n }, policy),\n qs: options,\n }, cb);\n }\n /**\n * Test a set of permissions for a resource.\n *\n * @throws {Error} If permissions are not provided.\n *\n * @param {string|string[]} permissions The permission(s) to test for.\n * @param {TestIamPermissionsOptions} [options] Configuration object.\n * @param {TestIamPermissionsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * //-\n * // Test a single permission.\n * //-\n * const test = 'storage.buckets.delete';\n *\n * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) {\n * console.log(permissions);\n * // {\n * // \"storage.buckets.delete\": true\n * // }\n * });\n *\n * //-\n * // Test several permissions at once.\n * //-\n * const tests = [\n * 'storage.buckets.delete',\n * 'storage.buckets.get'\n * ];\n *\n * bucket.iam.testPermissions(tests, function(err, permissions) {\n * console.log(permissions);\n * // {\n * // \"storage.buckets.delete\": false,\n * // \"storage.buckets.get\": true\n * // }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.testPermissions(test).then(function(data) {\n * const permissions = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n testPermissions(permissions, optionsOrCallback, callback) {\n if (!Array.isArray(permissions) && typeof permissions !== 'string') {\n throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED);\n }\n const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback);\n const permissionsArray = Array.isArray(permissions)\n ? permissions\n : [permissions];\n const req = Object.assign({\n permissions: permissionsArray,\n }, options);\n this.request_({\n uri: '/iam/testPermissions',\n qs: req,\n useQuerystring: true,\n }, (err, resp) => {\n if (err) {\n cb(err, null, resp);\n return;\n }\n const availablePermissions = Array.isArray(resp.permissions)\n ? resp.permissions\n : [];\n const permissionsHash = permissionsArray.reduce((acc, permission) => {\n acc[permission] = availablePermissions.indexOf(permission) > -1;\n return acc;\n }, {});\n cb(null, permissionsHash, resp);\n });\n }\n}\nexports.Iam = Iam;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Iam);\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = exports.Storage = exports.IdempotencyStrategy = exports.ApiError = void 0;\n/**\n * The `@google-cloud/storage` package has a single named export which is the\n * {@link Storage} (ES6) class, which should be instantiated with `new`.\n *\n * See {@link Storage} and {@link ClientConfig} for client methods and\n * configuration options.\n *\n * @module {Storage} @google-cloud/storage\n * @alias nodejs-storage\n *\n * @example\n * Install the client library with npm:\n * ```\n * npm install --save @google-cloud/storage\n * ```\n *\n * @example\n * Import the client library\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * ```\n *\n * @example\n * Create a client that uses Application\n * Default Credentials (ADC):\n * ```\n * const storage = new Storage();\n * ```\n *\n * @example\n * Create a client with explicit\n * credentials:\n * ```\n * const storage = new Storage({ projectId:\n * 'your-project-id', keyFilename: '/path/to/keyfile.json'\n * });\n * ```\n *\n * @example include:samples/quickstart.js\n * region_tag:storage_quickstart\n * Full quickstart example:\n */\nvar index_js_1 = require(\"./nodejs-common/index.js\");\nObject.defineProperty(exports, \"ApiError\", { enumerable: true, get: function () { return index_js_1.ApiError; } });\nvar storage_js_1 = require(\"./storage.js\");\nObject.defineProperty(exports, \"IdempotencyStrategy\", { enumerable: true, get: function () { return storage_js_1.IdempotencyStrategy; } });\nObject.defineProperty(exports, \"Storage\", { enumerable: true, get: function () { return storage_js_1.Storage; } });\nvar bucket_js_1 = require(\"./bucket.js\");\nObject.defineProperty(exports, \"Bucket\", { enumerable: true, get: function () { return bucket_js_1.Bucket; } });\n__exportStar(require(\"./crc32c.js\"), exports);\nvar channel_js_1 = require(\"./channel.js\");\nObject.defineProperty(exports, \"Channel\", { enumerable: true, get: function () { return channel_js_1.Channel; } });\nvar file_js_1 = require(\"./file.js\");\nObject.defineProperty(exports, \"File\", { enumerable: true, get: function () { return file_js_1.File; } });\n__exportStar(require(\"./hash-stream-validator.js\"), exports);\nvar hmacKey_js_1 = require(\"./hmacKey.js\");\nObject.defineProperty(exports, \"HmacKey\", { enumerable: true, get: function () { return hmacKey_js_1.HmacKey; } });\nvar iam_js_1 = require(\"./iam.js\");\nObject.defineProperty(exports, \"Iam\", { enumerable: true, get: function () { return iam_js_1.Iam; } });\nvar notification_js_1 = require(\"./notification.js\");\nObject.defineProperty(exports, \"Notification\", { enumerable: true, get: function () { return notification_js_1.Notification; } });\n__exportStar(require(\"./transfer-manager.js\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0;\nvar service_js_1 = require(\"./service.js\");\nObject.defineProperty(exports, \"Service\", { enumerable: true, get: function () { return service_js_1.Service; } });\nvar service_object_js_1 = require(\"./service-object.js\");\nObject.defineProperty(exports, \"ServiceObject\", { enumerable: true, get: function () { return service_object_js_1.ServiceObject; } });\nvar util_js_1 = require(\"./util.js\");\nObject.defineProperty(exports, \"ApiError\", { enumerable: true, get: function () { return util_js_1.ApiError; } });\nObject.defineProperty(exports, \"util\", { enumerable: true, get: function () { return util_js_1.util; } });\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ServiceObject = void 0;\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst events_1 = require(\"events\");\nconst util_js_1 = require(\"./util.js\");\n/**\n * ServiceObject is a base class, meant to be inherited from by a \"service\n * object,\" like a BigQuery dataset or Storage bucket.\n *\n * Most of the time, these objects share common functionality; they can be\n * created or deleted, and you can get or set their metadata.\n *\n * By inheriting from this class, a service object will be extended with these\n * shared behaviors. Note that any method can be overridden when the service\n * object requires specific behavior.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass ServiceObject extends events_1.EventEmitter {\n /*\n * @constructor\n * @alias module:common/service-object\n *\n * @private\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string} config.createMethod - The method which creates this object.\n * @param {string=} config.id - The identifier of the object. For example, the\n * name of a Storage bucket or Pub/Sub topic.\n * @param {object=} config.methods - A map of each method name that should be inherited.\n * @param {object} config.methods[].reqOpts - Default request options for this\n * particular method. A common use case is when `setMetadata` requires a\n * `PUT` method to override the default `PATCH`.\n * @param {object} config.parent - The parent service instance. For example, an\n * instance of Storage if the object is Bucket.\n */\n constructor(config) {\n super();\n this.metadata = {};\n this.baseUrl = config.baseUrl;\n this.parent = config.parent; // Parent class.\n this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc).\n this.createMethod = config.createMethod;\n this.methods = config.methods || {};\n this.interceptors = [];\n this.projectId = config.projectId;\n if (config.methods) {\n // This filters the ServiceObject instance (e.g. a \"File\") to only have\n // the configured methods. We make a couple of exceptions for core-\n // functionality (\"request()\" and \"getRequestInterceptors()\")\n Object.getOwnPropertyNames(ServiceObject.prototype)\n .filter(methodName => {\n return (\n // All ServiceObjects need `request` and `getRequestInterceptors`.\n // clang-format off\n !/^request/.test(methodName) &&\n !/^getRequestInterceptors/.test(methodName) &&\n // clang-format on\n // The ServiceObject didn't redefine the method.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] ===\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ServiceObject.prototype[methodName] &&\n // This method isn't wanted.\n !config.methods[methodName]);\n })\n .forEach(methodName => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] = undefined;\n });\n }\n }\n create(optionsOrCallback, callback) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const args = [this.id];\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n if (typeof optionsOrCallback === 'object') {\n args.push(optionsOrCallback);\n }\n // Wrap the callback to return *this* instance of the object, not the\n // newly-created one.\n // tslint: disable-next-line no-any\n function onCreate(...args) {\n const [err, instance] = args;\n if (!err) {\n self.metadata = instance.metadata;\n if (self.id && instance.metadata) {\n self.id = instance.metadata.id;\n }\n args[1] = self; // replace the created `instance` with this one.\n }\n callback(...args);\n }\n args.push(onCreate);\n // eslint-disable-next-line prefer-spread\n this.createMethod.apply(null, args);\n }\n delete(optionsOrCallback, cb) {\n var _a;\n const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const ignoreNotFound = options.ignoreNotFound;\n delete options.ignoreNotFound;\n const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {};\n const reqOpts = {\n method: 'DELETE',\n uri: '',\n ...methodConfig.reqOpts,\n qs: {\n ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs,\n ...options,\n },\n };\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n if (err) {\n if (err.code === 404 && ignoreNotFound) {\n err = null;\n }\n }\n callback(err, res);\n });\n }\n exists(optionsOrCallback, cb) {\n const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n this.get(options, err => {\n if (err) {\n if (err.code === 404) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n return;\n }\n callback(null, true);\n });\n }\n get(optionsOrCallback, cb) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const [opts, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const options = Object.assign({}, opts);\n const autoCreate = options.autoCreate && typeof this.create === 'function';\n delete options.autoCreate;\n function onCreate(err, instance, apiResponse) {\n if (err) {\n if (err.code === 409) {\n self.get(options, callback);\n return;\n }\n callback(err, null, apiResponse);\n return;\n }\n callback(null, instance, apiResponse);\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n if (err.code === 404 && autoCreate) {\n const args = [];\n if (Object.keys(options).length > 0) {\n args.push(options);\n }\n args.push(onCreate);\n self.create(...args);\n return;\n }\n callback(err, null, metadata);\n return;\n }\n callback(null, self, metadata);\n });\n }\n getMetadata(optionsOrCallback, cb) {\n var _a;\n const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.getMetadata === 'object' &&\n this.methods.getMetadata) ||\n {};\n const reqOpts = {\n uri: '',\n ...methodConfig.reqOpts,\n qs: {\n ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs,\n ...options,\n },\n };\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n const localInterceptors = this.interceptors\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n return this.parent.getRequestInterceptors().concat(localInterceptors);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n var _a, _b;\n const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.setMetadata === 'object' &&\n this.methods.setMetadata) ||\n {};\n const reqOpts = {\n method: 'PATCH',\n uri: '',\n ...methodConfig.reqOpts,\n json: {\n ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.json,\n ...metadata,\n },\n qs: {\n ...(_b = methodConfig.reqOpts) === null || _b === void 0 ? void 0 : _b.qs,\n ...options,\n },\n };\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n request_(reqOpts, callback) {\n reqOpts = { ...reqOpts };\n if (this.projectId) {\n reqOpts.projectId = this.projectId;\n }\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri];\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .filter(x => x.trim()) // Limit to non-empty strings.\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/');\n const childInterceptors = Array.isArray(reqOpts.interceptors_)\n ? reqOpts.interceptors_\n : [];\n const localInterceptors = [].slice.call(this.interceptors);\n reqOpts.interceptors_ = childInterceptors.concat(localInterceptors);\n if (reqOpts.shouldReturnStream) {\n return this.parent.requestStream(reqOpts);\n }\n this.parent.request(reqOpts, callback);\n }\n request(reqOpts, callback) {\n this.request_(reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = { ...reqOpts, shouldReturnStream: true };\n return this.request_(opts);\n }\n}\nexports.ServiceObject = ServiceObject;\n(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] });\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0;\nconst uuid = __importStar(require(\"uuid\"));\nconst util_js_1 = require(\"./util.js\");\nconst util_js_2 = require(\"../util.js\");\nexports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}';\nclass Service {\n /**\n * Service is a base class, meant to be inherited from by a \"service,\" like\n * BigQuery or Storage.\n *\n * This handles making authenticated requests by exposing a `makeReq_`\n * function.\n *\n * @constructor\n * @alias module:common/service\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string[]} config.scopes - The scopes required for the request.\n * @param {object=} options - [Configuration object](#/docs).\n */\n constructor(config, options = {}) {\n this.baseUrl = config.baseUrl;\n this.apiEndpoint = config.apiEndpoint;\n this.timeout = options.timeout;\n this.globalInterceptors = Array.isArray(options.interceptors_)\n ? options.interceptors_\n : [];\n this.interceptors = [];\n this.packageJson = config.packageJson;\n this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN;\n this.projectIdRequired = config.projectIdRequired !== false;\n this.providedUserAgent = options.userAgent;\n const reqCfg = {\n ...config,\n projectIdRequired: this.projectIdRequired,\n projectId: this.projectId,\n authClient: options.authClient || config.authClient,\n credentials: options.credentials,\n keyFile: options.keyFilename,\n email: options.email,\n token: options.token,\n };\n this.makeAuthenticatedRequest =\n util_js_1.util.makeAuthenticatedRequestFactory(reqCfg);\n this.authClient = this.makeAuthenticatedRequest.authClient;\n this.getCredentials = this.makeAuthenticatedRequest.getCredentials;\n const isCloudFunctionEnv = !!process.env.FUNCTION_NAME;\n if (isCloudFunctionEnv) {\n this.interceptors.push({\n request(reqOpts) {\n reqOpts.forever = false;\n return reqOpts;\n },\n });\n }\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n return [].slice\n .call(this.globalInterceptors)\n .concat(this.interceptors)\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n }\n getProjectId(callback) {\n if (!callback) {\n return this.getProjectIdAsync();\n }\n this.getProjectIdAsync().then(p => callback(null, p), callback);\n }\n async getProjectIdAsync() {\n const projectId = await this.authClient.getProjectId();\n if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) {\n this.projectId = projectId;\n }\n return this.projectId;\n }\n request_(reqOpts, callback) {\n reqOpts = { ...reqOpts, timeout: this.timeout };\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl];\n if (this.projectIdRequired) {\n if (reqOpts.projectId) {\n uriComponents.push('projects');\n uriComponents.push(reqOpts.projectId);\n }\n else {\n uriComponents.push('projects');\n uriComponents.push(this.projectId);\n }\n }\n uriComponents.push(reqOpts.uri);\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/')\n // Some URIs have colon separators.\n // Bad: https://.../projects/:list\n // Good: https://.../projects:list\n .replace(/\\/:/g, ':');\n const requestInterceptors = this.getRequestInterceptors();\n const interceptorArray = Array.isArray(reqOpts.interceptors_)\n ? reqOpts.interceptors_\n : [];\n interceptorArray.forEach(interceptor => {\n if (typeof interceptor.request === 'function') {\n requestInterceptors.push(interceptor.request);\n }\n });\n requestInterceptors.forEach(requestInterceptor => {\n reqOpts = requestInterceptor(reqOpts);\n });\n delete reqOpts.interceptors_;\n const pkg = this.packageJson;\n let userAgent = (0, util_js_2.getUserAgentString)();\n if (this.providedUserAgent) {\n userAgent = `${this.providedUserAgent} ${userAgent}`;\n }\n reqOpts.headers = {\n ...reqOpts.headers,\n 'User-Agent': userAgent,\n 'x-goog-api-client': `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${pkg.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`,\n };\n if (reqOpts[util_js_1.GCCL_GCS_CMD_KEY]) {\n reqOpts.headers['x-goog-api-client'] += ` gccl-gcs-cmd/${reqOpts[util_js_1.GCCL_GCS_CMD_KEY]}`;\n }\n if (reqOpts.shouldReturnStream) {\n return this.makeAuthenticatedRequest(reqOpts);\n }\n else {\n this.makeAuthenticatedRequest(reqOpts, callback);\n }\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n * @param {function} callback - The callback function passed to `request`.\n */\n request(reqOpts, callback) {\n Service.prototype.request_.call(this, reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = { ...reqOpts, shouldReturnStream: true };\n return Service.prototype.request_.call(this, opts);\n }\n}\nexports.Service = Service;\n","\"use strict\";\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.Util = exports.PartialFailureError = exports.ApiError = exports.GCCL_GCS_CMD_KEY = void 0;\n/*!\n * @module common/util\n */\nconst projectify_1 = require(\"@google-cloud/projectify\");\nconst ent = __importStar(require(\"ent\"));\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst retry_request_1 = __importDefault(require(\"retry-request\"));\nconst stream_1 = require(\"stream\");\nconst teeny_request_1 = require(\"teeny-request\");\nconst uuid = __importStar(require(\"uuid\"));\nconst service_js_1 = require(\"./service.js\");\nconst util_js_1 = require(\"../util.js\");\nconst duplexify_1 = __importDefault(require(\"duplexify\"));\n// eslint-disable-next-line @typescript-eslint/ban-ts-comment\n// @ts-ignore\nconst package_json_helper_cjs_1 = require(\"../package-json-helper.cjs\");\nconst packageJson = (0, package_json_helper_cjs_1.getPackageJSON)();\n/**\n * A unique symbol for providing a `gccl-gcs-cmd` value\n * for the `X-Goog-API-Client` header.\n *\n * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V`\n **/\nexports.GCCL_GCS_CMD_KEY = Symbol.for('GCCL_GCS_CMD');\nconst requestDefaults = {\n timeout: 60000,\n gzip: true,\n forever: true,\n pool: {\n maxSockets: Infinity,\n },\n};\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n * @private\n */\nconst AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n * @private\n */\nconst MAX_RETRY_DEFAULT = 3;\n/**\n * Custom error type for API errors.\n *\n * @param {object} errorBody - Error object.\n */\nclass ApiError extends Error {\n constructor(errorBodyOrMessage) {\n super();\n if (typeof errorBodyOrMessage !== 'object') {\n this.message = errorBodyOrMessage || '';\n return;\n }\n const errorBody = errorBodyOrMessage;\n this.code = errorBody.code;\n this.errors = errorBody.errors;\n this.response = errorBody.response;\n try {\n this.errors = JSON.parse(this.response.body).error.errors;\n }\n catch (e) {\n this.errors = errorBody.errors;\n }\n this.message = ApiError.createMultiErrorMessage(errorBody, this.errors);\n Error.captureStackTrace(this);\n }\n /**\n * Pieces together an error message by combining all unique error messages\n * returned from a single GoogleError\n *\n * @private\n *\n * @param {GoogleErrorBody} err The original error.\n * @param {GoogleInnerError[]} [errors] Inner errors, if any.\n * @returns {string}\n */\n static createMultiErrorMessage(err, errors) {\n const messages = new Set();\n if (err.message) {\n messages.add(err.message);\n }\n if (errors && errors.length) {\n errors.forEach(({ message }) => messages.add(message));\n }\n else if (err.response && err.response.body) {\n messages.add(ent.decode(err.response.body.toString()));\n }\n else if (!err.message) {\n messages.add('A failure occurred during this request.');\n }\n let messageArr = Array.from(messages);\n if (messageArr.length > 1) {\n messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`);\n messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\\n');\n messageArr.push('\\n');\n }\n return messageArr.join('\\n');\n }\n}\nexports.ApiError = ApiError;\n/**\n * Custom error type for partial errors returned from the API.\n *\n * @param {object} b - Error object.\n */\nclass PartialFailureError extends Error {\n constructor(b) {\n super();\n const errorObject = b;\n this.errors = errorObject.errors;\n this.name = 'PartialFailureError';\n this.response = errorObject.response;\n this.message = ApiError.createMultiErrorMessage(errorObject, this.errors);\n }\n}\nexports.PartialFailureError = PartialFailureError;\nclass Util {\n constructor() {\n this.ApiError = ApiError;\n this.PartialFailureError = PartialFailureError;\n }\n /**\n * No op.\n *\n * @example\n * function doSomething(callback) {\n * callback = callback || noop;\n * }\n */\n noop() { }\n /**\n * Uniformly process an API response.\n *\n * @param {*} err - Error value.\n * @param {*} resp - Response value.\n * @param {*} body - Body value.\n * @param {function} callback - The callback function.\n */\n handleResp(err, resp, body, callback) {\n callback = callback || util.noop;\n const parsedResp = {\n err: err || null,\n ...(resp && util.parseHttpRespMessage(resp)),\n ...(body && util.parseHttpRespBody(body)),\n };\n // Assign the parsed body to resp.body, even if { json: false } was passed\n // as a request option.\n // We assume that nobody uses the previously unparsed value of resp.body.\n if (!parsedResp.err && resp && typeof parsedResp.body === 'object') {\n parsedResp.resp.body = parsedResp.body;\n }\n if (parsedResp.err && resp) {\n parsedResp.err.response = resp;\n }\n callback(parsedResp.err, parsedResp.body, parsedResp.resp);\n }\n /**\n * Sniff an incoming HTTP response message for errors.\n *\n * @param {object} httpRespMessage - An incoming HTTP response message from `request`.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.resp - The original response object.\n */\n parseHttpRespMessage(httpRespMessage) {\n const parsedHttpRespMessage = {\n resp: httpRespMessage,\n };\n if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) {\n // Unknown error. Format according to ApiError standard.\n parsedHttpRespMessage.err = new ApiError({\n errors: new Array(),\n code: httpRespMessage.statusCode,\n message: httpRespMessage.statusMessage,\n response: httpRespMessage,\n });\n }\n return parsedHttpRespMessage;\n }\n /**\n * Parse the response body from an HTTP request.\n *\n * @param {object} body - The response body.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.body - The original body value provided\n * will try to be JSON.parse'd. If it's successful, the parsed value will\n * be returned here, otherwise the original value and an error will be returned.\n */\n parseHttpRespBody(body) {\n const parsedHttpRespBody = {\n body,\n };\n if (typeof body === 'string') {\n try {\n parsedHttpRespBody.body = JSON.parse(body);\n }\n catch (err) {\n parsedHttpRespBody.body = body;\n }\n }\n if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) {\n // Error from JSON API.\n parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error);\n }\n return parsedHttpRespBody;\n }\n /**\n * Take a Duplexify stream, fetch an authenticated connection header, and\n * create an outgoing writable stream.\n *\n * @param {Duplexify} dup - Duplexify stream.\n * @param {object} options - Configuration object.\n * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through.\n * @param {object} options.metadata - Metadata to send at the head of the request.\n * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object.\n * @param {string=} options.request.method - Default: \"POST\".\n * @param {string=} options.request.qs.uploadType - Default: \"multipart\".\n * @param {string=} options.streamContentType - Default: \"application/octet-stream\".\n * @param {function} onComplete - Callback, executed after the writable Request stream has completed.\n */\n makeWritableStream(dup, options, onComplete) {\n var _a;\n onComplete = onComplete || util.noop;\n const writeStream = new ProgressStream();\n writeStream.on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(writeStream);\n const defaultReqOpts = {\n method: 'POST',\n qs: {\n uploadType: 'multipart',\n },\n timeout: 0,\n maxRetries: 0,\n };\n const metadata = options.metadata || {};\n const reqOpts = {\n ...defaultReqOpts,\n ...options.request,\n qs: {\n ...defaultReqOpts.qs,\n ...(_a = options.request) === null || _a === void 0 ? void 0 : _a.qs,\n },\n multipart: [\n {\n 'Content-Type': 'application/json',\n body: JSON.stringify(metadata),\n },\n {\n 'Content-Type': metadata.contentType || 'application/octet-stream',\n body: writeStream,\n },\n ],\n };\n options.makeAuthenticatedRequest(reqOpts, {\n onAuthenticated(err, authenticatedReqOpts) {\n if (err) {\n dup.destroy(err);\n return;\n }\n requestDefaults.headers = util._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]);\n const request = teeny_request_1.teenyRequest.defaults(requestDefaults);\n request(authenticatedReqOpts, (err, resp, body) => {\n util.handleResp(err, resp, body, (err, data) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n dup.emit('response', resp);\n onComplete(data);\n });\n });\n },\n });\n }\n /**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted. This is used for rate limit\n * related errors as well as intermittent server errors.\n *\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\n shouldRetryRequest(err) {\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = e.reason;\n if (reason === 'rateLimitExceeded') {\n return true;\n }\n if (reason === 'userRateLimitExceeded') {\n return true;\n }\n if (reason && reason.includes('EAI_AGAIN')) {\n return true;\n }\n }\n }\n }\n return false;\n }\n /**\n * Get a function for making authenticated requests.\n *\n * @param {object} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {object=} config.credentials - Credentials object.\n * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false.\n * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false.\n * @param {string=} config.email - Account email address, required for PEM/P12 usage.\n * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3)\n * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile.\n * @param {array} config.scopes - Array of scopes required for the API.\n */\n makeAuthenticatedRequestFactory(config) {\n const googleAutoAuthConfig = { ...config };\n if (googleAutoAuthConfig.projectId === service_js_1.DEFAULT_PROJECT_ID_TOKEN) {\n delete googleAutoAuthConfig.projectId;\n }\n let authClient;\n if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) {\n // Use an existing `GoogleAuth`\n authClient = googleAutoAuthConfig.authClient;\n }\n else {\n // Pass an `AuthClient` to `GoogleAuth`, if available\n const config = {\n ...googleAutoAuthConfig,\n authClient: googleAutoAuthConfig.authClient,\n };\n authClient = new google_auth_library_1.GoogleAuth(config);\n }\n function makeAuthenticatedRequest(reqOpts, optionsOrCallback) {\n let stream;\n let projectId;\n const reqConfig = { ...config };\n let activeRequest_;\n if (!optionsOrCallback) {\n stream = (0, duplexify_1.default)();\n reqConfig.stream = stream;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined;\n async function setProjectId() {\n projectId = await authClient.getProjectId();\n }\n const onAuthenticated = async (err, authenticatedReqOpts) => {\n const authLibraryError = err;\n const autoAuthFailed = err &&\n typeof err.message === 'string' &&\n err.message.indexOf('Could not load the default credentials') > -1;\n if (autoAuthFailed) {\n // Even though authentication failed, the API might not actually\n // care.\n authenticatedReqOpts = reqOpts;\n }\n if (!err || autoAuthFailed) {\n try {\n // Try with existing `projectId` value\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n if (e instanceof projectify_1.MissingProjectIdError) {\n // A `projectId` was required, but we don't have one.\n try {\n // Attempt to get the `projectId`\n await setProjectId();\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n // Re-use the \"Could not load the default credentials error\" if\n // auto auth failed.\n err = err || e;\n }\n }\n else {\n // Some other error unrelated to missing `projectId`\n err = err || e;\n }\n }\n }\n if (err) {\n if (stream) {\n stream.destroy(err);\n }\n else {\n const fn = options && options.onAuthenticated\n ? options.onAuthenticated\n : callback;\n fn(err);\n }\n return;\n }\n if (options && options.onAuthenticated) {\n options.onAuthenticated(null, authenticatedReqOpts);\n }\n else {\n activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => {\n if (apiResponseError &&\n apiResponseError.code === 401 &&\n authLibraryError) {\n // Re-use the \"Could not load the default credentials error\" if\n // the API request failed due to missing credentials.\n apiResponseError = authLibraryError;\n }\n callback(apiResponseError, ...params);\n });\n }\n };\n const prepareRequest = async () => {\n try {\n const getProjectId = async () => {\n if (config.projectId &&\n config.projectId !== service_js_1.DEFAULT_PROJECT_ID_TOKEN) {\n // The user provided a project ID. We don't need to check with the\n // auth client, it could be incorrect.\n return config.projectId;\n }\n if (config.projectIdRequired === false) {\n // A projectId is not required. Return the default.\n return service_js_1.DEFAULT_PROJECT_ID_TOKEN;\n }\n return setProjectId();\n };\n const authorizeRequest = async () => {\n if (reqConfig.customEndpoint &&\n !reqConfig.useAuthWithCustomEndpoint) {\n // Using a custom API override. Do not use `google-auth-library` for\n // authentication. (ex: connecting to a local Datastore server)\n return reqOpts;\n }\n else {\n return authClient.authorizeRequest(reqOpts);\n }\n };\n const [_projectId, authorizedReqOpts] = await Promise.all([\n getProjectId(),\n authorizeRequest(),\n ]);\n if (_projectId) {\n projectId = _projectId;\n }\n return onAuthenticated(null, authorizedReqOpts);\n }\n catch (e) {\n return onAuthenticated(e);\n }\n };\n prepareRequest();\n if (stream) {\n return stream;\n }\n return {\n abort() {\n setImmediate(() => {\n if (activeRequest_) {\n activeRequest_.abort();\n activeRequest_ = null;\n }\n });\n },\n };\n }\n const mar = makeAuthenticatedRequest;\n mar.getCredentials = authClient.getCredentials.bind(authClient);\n mar.authClient = authClient;\n return mar;\n }\n /**\n * Make a request through the `retryRequest` module with built-in error\n * handling and exponential back off.\n *\n * @param {object} reqOpts - Request options in the format `request` expects.\n * @param {object=} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {number=} config.maxRetries - Maximum number of automatic retries\n * attempted before returning the error. (default: 3)\n * @param {object=} config.request - HTTP module for request calls.\n * @param {function} callback - The callback function.\n */\n makeRequest(reqOpts, config, callback) {\n var _a, _b, _c, _d, _e;\n let autoRetryValue = AUTO_RETRY_DEFAULT;\n if (config.autoRetry !== undefined) {\n autoRetryValue = config.autoRetry;\n }\n else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) {\n autoRetryValue = config.retryOptions.autoRetry;\n }\n let maxRetryValue = MAX_RETRY_DEFAULT;\n if (config.maxRetries !== undefined) {\n maxRetryValue = config.maxRetries;\n }\n else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) {\n maxRetryValue = config.retryOptions.maxRetries;\n }\n requestDefaults.headers = this._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]);\n const options = {\n request: teeny_request_1.teenyRequest.defaults(requestDefaults),\n retries: autoRetryValue !== false ? maxRetryValue : 0,\n noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0,\n shouldRetryFn(httpRespMessage) {\n var _a, _b;\n const err = util.parseHttpRespMessage(httpRespMessage).err;\n if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) {\n return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err));\n }\n return err && util.shouldRetryRequest(err);\n },\n maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay,\n retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier,\n totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout,\n };\n if (typeof reqOpts.maxRetries === 'number') {\n options.retries = reqOpts.maxRetries;\n options.noResponseRetries = reqOpts.maxRetries;\n }\n if (!config.stream) {\n return (0, retry_request_1.default)(reqOpts, options, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (err, response, body) => {\n util.handleResp(err, response, body, callback);\n });\n }\n const dup = config.stream;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let requestStream;\n const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET';\n if (isGetRequest) {\n requestStream = (0, retry_request_1.default)(reqOpts, options);\n dup.setReadable(requestStream);\n }\n else {\n // Streaming writable HTTP requests cannot be retried.\n requestStream = options.request(reqOpts);\n dup.setWritable(requestStream);\n }\n // Replay the Request events back to the stream.\n requestStream\n .on('error', dup.destroy.bind(dup))\n .on('response', dup.emit.bind(dup, 'response'))\n .on('complete', dup.emit.bind(dup, 'complete'));\n dup.abort = requestStream.abort;\n return dup;\n }\n /**\n * Decorate the options about to be made in a request.\n *\n * @param {object} reqOpts - The options to be passed to `request`.\n * @param {string} projectId - The project ID.\n * @return {object} reqOpts - The decorated reqOpts.\n */\n decorateRequest(reqOpts, projectId) {\n delete reqOpts.autoPaginate;\n delete reqOpts.autoPaginateVal;\n delete reqOpts.objectMode;\n if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') {\n delete reqOpts.qs.autoPaginate;\n delete reqOpts.qs.autoPaginateVal;\n reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId);\n }\n if (Array.isArray(reqOpts.multipart)) {\n reqOpts.multipart = reqOpts.multipart.map(part => {\n return (0, projectify_1.replaceProjectIdToken)(part, projectId);\n });\n }\n if (reqOpts.json !== null && typeof reqOpts.json === 'object') {\n delete reqOpts.json.autoPaginate;\n delete reqOpts.json.autoPaginateVal;\n reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId);\n }\n reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId);\n return reqOpts;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n isCustomType(unknown, module) {\n function getConstructorName(obj) {\n return obj.constructor && obj.constructor.name.toLowerCase();\n }\n const moduleNameParts = module.split('/');\n const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase();\n const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase();\n if (subModuleName && getConstructorName(unknown) !== subModuleName) {\n return false;\n }\n let walkingModule = unknown;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n if (getConstructorName(walkingModule) === parentModuleName) {\n return true;\n }\n walkingModule = walkingModule.parent;\n if (!walkingModule) {\n return false;\n }\n }\n }\n /**\n * Given two parameters, figure out if this is either:\n * - Just a callback function\n * - An options object, and then a callback function\n * @param optionsOrCallback An options object or callback.\n * @param cb A potentially undefined callback.\n */\n maybeOptionsOrCallback(optionsOrCallback, cb) {\n return typeof optionsOrCallback === 'function'\n ? [{}, optionsOrCallback]\n : [optionsOrCallback, cb];\n }\n _getDefaultHeaders(gcclGcsCmd) {\n const headers = {\n 'User-Agent': (0, util_js_1.getUserAgentString)(),\n 'x-goog-api-client': `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`,\n };\n if (gcclGcsCmd) {\n headers['x-goog-api-client'] += ` gccl-gcs-cmd/${gcclGcsCmd}`;\n }\n return headers;\n }\n}\nexports.Util = Util;\n/**\n * Basic Passthrough Stream that records the number of bytes read\n * every time the cursor is moved.\n */\nclass ProgressStream extends stream_1.Transform {\n constructor() {\n super(...arguments);\n this.bytesRead = 0;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _transform(chunk, encoding, callback) {\n this.bytesRead += chunk.length;\n this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' });\n this.push(chunk);\n callback();\n }\n}\nconst util = new Util();\nexports.util = util;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Notification = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * The API-formatted resource description of the notification.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name Notification#metadata\n * @type {object}\n */\n/**\n * A Notification object is created from your {@link Bucket} object using\n * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub\n * notifications.\n *\n * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage}\n *\n * @class\n * @hideconstructor\n *\n * @param {Bucket} bucket The bucket instance this notification is attached to.\n * @param {string} id The ID of the notification.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const notification = myBucket.notification('1');\n * ```\n */\nclass Notification extends index_js_1.ServiceObject {\n constructor(bucket, id) {\n const requestQueryObject = {};\n const methods = {\n /**\n * Creates a notification subscription for the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert}\n * @method Notification#create\n *\n * @param {Topic|string} topic The Cloud PubSub topic to which this\n * subscription publishes. If the project ID is omitted, the current\n * project ID will be used.\n *\n * Acceptable formats are:\n * - `projects/grape-spaceship-123/topics/my-topic`\n *\n * - `my-topic`\n * @param {CreateNotificationRequest} [options] Metadata to set for\n * the notification.\n * @param {CreateNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a valid topic is not provided.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.create(function(err, notification, apiResponse) {\n * if (!err) {\n * // The notification was created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.create().then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n create: true,\n /**\n * @typedef {array} DeleteNotificationResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Permanently deletes a notification subscription.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation}\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/deleteNotification.js\n * region_tag:storage_delete_bucket_notification\n * Another example:\n */\n delete: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * Get a notification and its metadata if it exists.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation}\n *\n * @param {object} [options] Configuration options.\n * See {@link Bucket#createNotification} for create options.\n * @param {boolean} [options.autoCreate] Automatically create the object if\n * it does not exist. Default: `false`.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationCallback} [callback] Callback function.\n * @return {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.get(function(err, notification, apiResponse) {\n * // `notification.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.get().then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * Get the notification's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation}\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/getMetadataNotifications.js\n * region_tag:storage_print_pubsub_bucket_notification\n * Another example:\n */\n getMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} NotificationExistsResponse\n * @property {boolean} 0 Whether the notification exists or not.\n */\n /**\n * @callback NotificationExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the notification exists or not.\n */\n /**\n * Check if the notification exists.\n *\n * @method Notification#exists\n * @param {NotificationExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: true,\n };\n super({\n parent: bucket,\n baseUrl: '/notificationConfigs',\n id: id.toString(),\n createMethod: bucket.createNotification.bind(bucket),\n methods,\n });\n }\n}\nexports.Notification = Notification;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Notification);\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkUploadStatus = exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0;\nconst abort_controller_1 = __importDefault(require(\"abort-controller\"));\nconst crypto_1 = require(\"crypto\");\nconst gaxios = __importStar(require(\"gaxios\"));\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst stream_1 = require(\"stream\");\nconst async_retry_1 = __importDefault(require(\"async-retry\"));\nconst uuid = __importStar(require(\"uuid\"));\nconst util_js_1 = require(\"./util.js\");\nconst util_js_2 = require(\"./nodejs-common/util.js\");\n// eslint-disable-next-line @typescript-eslint/ban-ts-comment\n// @ts-ignore\nconst package_json_helper_cjs_1 = require(\"./package-json-helper.cjs\");\nconst NOT_FOUND_STATUS_CODE = 404;\nconst RESUMABLE_INCOMPLETE_STATUS_CODE = 308;\nconst DEFAULT_API_ENDPOINT_REGEX = /.*\\.googleapis\\.com/;\nconst packageJson = (0, package_json_helper_cjs_1.getPackageJSON)();\nexports.PROTOCOL_REGEX = /^(\\w*):\\/\\//;\nclass Upload extends stream_1.Writable {\n constructor(cfg) {\n var _a;\n super(cfg);\n _Upload_instances.add(this);\n this.numBytesWritten = 0;\n this.numRetries = 0;\n this.currentInvocationId = {\n checkUploadStatus: uuid.v4(),\n chunk: uuid.v4(),\n uri: uuid.v4(),\n };\n /**\n * A cache of buffers written to this instance, ready for consuming\n */\n this.writeBuffers = [];\n this.numChunksReadInRequest = 0;\n /**\n * An array of buffers used for caching the most recent upload chunk.\n * We should not assume that the server received all bytes sent in the request.\n * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n */\n this.localWriteCache = [];\n this.localWriteCacheByteLength = 0;\n this.upstreamEnded = false;\n _Upload_gcclGcsCmd.set(this, void 0);\n cfg = cfg || {};\n if (!cfg.bucket || !cfg.file) {\n throw new Error('A bucket and file name are required');\n }\n if (cfg.offset && !cfg.uri) {\n throw new RangeError('Cannot provide an `offset` without providing a `uri`');\n }\n if (cfg.isPartialUpload && !cfg.chunkSize) {\n throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`');\n }\n cfg.authConfig = cfg.authConfig || {};\n cfg.authConfig.scopes = [\n 'https://www.googleapis.com/auth/devstorage.full_control',\n ];\n this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig);\n this.apiEndpoint = 'https://storage.googleapis.com';\n if (cfg.apiEndpoint) {\n this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint);\n if (!DEFAULT_API_ENDPOINT_REGEX.test(cfg.apiEndpoint)) {\n this.authClient = gaxios;\n }\n }\n this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`;\n this.bucket = cfg.bucket;\n const cacheKeyElements = [cfg.bucket, cfg.file];\n if (typeof cfg.generation === 'number') {\n cacheKeyElements.push(`${cfg.generation}`);\n }\n this.cacheKey = cacheKeyElements.join('/');\n this.customRequestOptions = cfg.customRequestOptions || {};\n this.file = cfg.file;\n this.generation = cfg.generation;\n this.kmsKeyName = cfg.kmsKeyName;\n this.metadata = cfg.metadata || {};\n this.offset = cfg.offset;\n this.origin = cfg.origin;\n this.params = cfg.params || {};\n this.userProject = cfg.userProject;\n this.chunkSize = cfg.chunkSize;\n this.retryOptions = cfg.retryOptions;\n this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false;\n if (cfg.key) {\n const base64Key = Buffer.from(cfg.key).toString('base64');\n this.encryption = {\n key: base64Key,\n hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'),\n };\n }\n this.predefinedAcl = cfg.predefinedAcl;\n if (cfg.private)\n this.predefinedAcl = 'private';\n if (cfg.public)\n this.predefinedAcl = 'publicRead';\n const autoRetry = cfg.retryOptions.autoRetry;\n this.uriProvidedManually = !!cfg.uri;\n this.uri = cfg.uri;\n if (this.offset) {\n // we're resuming an incomplete upload\n this.numBytesWritten = this.offset;\n }\n this.numRetries = 0; // counter for number of retries currently executed\n if (!autoRetry) {\n cfg.retryOptions.maxRetries = 0;\n }\n this.timeOfFirstRequest = Date.now();\n const contentLength = cfg.metadata\n ? Number(cfg.metadata.contentLength)\n : NaN;\n this.contentLength = isNaN(contentLength) ? '*' : contentLength;\n __classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_2.GCCL_GCS_CMD_KEY], \"f\");\n this.once('writing', () => {\n if (this.uri) {\n this.continueUploading();\n }\n else {\n this.createURI(err => {\n if (err) {\n return this.destroy(err);\n }\n this.startUploading();\n return;\n });\n }\n });\n }\n /**\n * Prevent 'finish' event until the upload has succeeded.\n *\n * @param fireFinishEvent The finish callback\n */\n _final(fireFinishEvent = () => { }) {\n this.upstreamEnded = true;\n this.once('uploadFinished', fireFinishEvent);\n process.nextTick(() => {\n this.emit('upstreamFinished');\n // it's possible `_write` may not be called - namely for empty object uploads\n this.emit('writing');\n });\n }\n /**\n * Handles incoming data from upstream\n *\n * @param chunk The chunk to append to the buffer\n * @param encoding The encoding of the chunk\n * @param readCallback A callback for when the buffer has been read downstream\n */\n _write(chunk, encoding, readCallback = () => { }) {\n // Backwards-compatible event\n this.emit('writing');\n this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk);\n this.once('readFromChunkBuffer', readCallback);\n process.nextTick(() => this.emit('wroteToChunkBuffer'));\n }\n /**\n * Prepends the local buffer to write buffer and resets it.\n *\n * @param keepLastBytes number of bytes to keep from the end of the local buffer.\n */\n prependLocalBufferToUpstream(keepLastBytes) {\n // Typically, the upstream write buffers should be smaller than the local\n // cache, so we can save time by setting the local cache as the new\n // upstream write buffer array and appending the old array to it\n let initialBuffers = [];\n if (keepLastBytes) {\n // we only want the last X bytes\n let bytesKept = 0;\n while (keepLastBytes > bytesKept) {\n // load backwards because we want the last X bytes\n // note: `localWriteCacheByteLength` is reset below\n let buf = this.localWriteCache.pop();\n if (!buf)\n break;\n bytesKept += buf.byteLength;\n if (bytesKept > keepLastBytes) {\n // we have gone over the amount desired, let's keep the last X bytes\n // of this buffer\n const diff = bytesKept - keepLastBytes;\n buf = buf.subarray(diff);\n bytesKept -= diff;\n }\n initialBuffers.unshift(buf);\n }\n }\n else {\n // we're keeping all of the local cache, simply use it as the initial buffer\n initialBuffers = this.localWriteCache;\n }\n // Append the old upstream to the new\n const append = this.writeBuffers;\n this.writeBuffers = initialBuffers;\n for (const buf of append) {\n this.writeBuffers.push(buf);\n }\n // reset last buffers sent\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_resetLocalBuffersCache).call(this);\n }\n /**\n * Retrieves data from upstream's buffer.\n *\n * @param limit The maximum amount to return from the buffer.\n */\n *pullFromChunkBuffer(limit) {\n while (limit) {\n const buf = this.writeBuffers.shift();\n if (!buf)\n break;\n let bufToYield = buf;\n if (buf.byteLength > limit) {\n bufToYield = buf.subarray(0, limit);\n this.writeBuffers.unshift(buf.subarray(limit));\n limit = 0;\n }\n else {\n limit -= buf.byteLength;\n }\n yield bufToYield;\n // Notify upstream we've read from the buffer and we're able to consume\n // more. It can also potentially send more data down as we're currently\n // iterating.\n this.emit('readFromChunkBuffer');\n }\n }\n /**\n * A handler for determining if data is ready to be read from upstream.\n *\n * @returns If there will be more chunks to read in the future\n */\n async waitForNextChunk() {\n const willBeMoreChunks = await new Promise(resolve => {\n // There's data available - it should be digested\n if (this.writeBuffers.length) {\n return resolve(true);\n }\n // The upstream writable ended, we shouldn't expect any more data.\n if (this.upstreamEnded) {\n return resolve(false);\n }\n // Nothing immediate seems to be determined. We need to prepare some\n // listeners to determine next steps...\n const wroteToChunkBufferCallback = () => {\n removeListeners();\n return resolve(true);\n };\n const upstreamFinishedCallback = () => {\n removeListeners();\n // this should be the last chunk, if there's anything there\n if (this.writeBuffers.length)\n return resolve(true);\n return resolve(false);\n };\n // Remove listeners when we're ready to callback.\n const removeListeners = () => {\n this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback);\n this.removeListener('upstreamFinished', upstreamFinishedCallback);\n };\n // If there's data recently written it should be digested\n this.once('wroteToChunkBuffer', wroteToChunkBufferCallback);\n // If the upstream finishes let's see if there's anything to grab\n this.once('upstreamFinished', upstreamFinishedCallback);\n });\n return willBeMoreChunks;\n }\n /**\n * Reads data from upstream up to the provided `limit`.\n * Ends when the limit has reached or no data is expected to be pushed from upstream.\n *\n * @param limit The most amount of data this iterator should return. `Infinity` by default.\n */\n async *upstreamIterator(limit = Infinity) {\n // read from upstream chunk buffer\n while (limit && (await this.waitForNextChunk())) {\n // read until end or limit has been reached\n for (const chunk of this.pullFromChunkBuffer(limit)) {\n limit -= chunk.byteLength;\n yield chunk;\n }\n }\n }\n createURI(callback) {\n if (!callback) {\n return this.createURIAsync();\n }\n this.createURIAsync().then(r => callback(null, r), callback);\n }\n async createURIAsync() {\n const metadata = { ...this.metadata };\n const headers = {};\n // Delete content length and content type from metadata if they exist.\n // These are headers and should not be sent as part of the metadata.\n if (metadata.contentLength) {\n headers['X-Upload-Content-Length'] = metadata.contentLength.toString();\n delete metadata.contentLength;\n }\n if (metadata.contentType) {\n headers['X-Upload-Content-Type'] = metadata.contentType;\n delete metadata.contentType;\n }\n let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`;\n if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")) {\n googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")}`;\n }\n // Check if headers already exist before creating new ones\n const reqOpts = {\n method: 'POST',\n url: [this.baseURI, this.bucket, 'o'].join('/'),\n params: Object.assign({\n name: this.file,\n uploadType: 'resumable',\n }, this.params),\n data: metadata,\n headers: {\n 'User-Agent': (0, util_js_1.getUserAgentString)(),\n 'x-goog-api-client': googAPIClient,\n ...headers,\n },\n };\n if (metadata.contentLength) {\n reqOpts.headers['X-Upload-Content-Length'] =\n metadata.contentLength.toString();\n }\n if (metadata.contentType) {\n reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType;\n }\n if (typeof this.generation !== 'undefined') {\n reqOpts.params.ifGenerationMatch = this.generation;\n }\n if (this.kmsKeyName) {\n reqOpts.params.kmsKeyName = this.kmsKeyName;\n }\n if (this.predefinedAcl) {\n reqOpts.params.predefinedAcl = this.predefinedAcl;\n }\n if (this.origin) {\n reqOpts.headers.Origin = this.origin;\n }\n const uri = await (0, async_retry_1.default)(async (bail) => {\n var _a, _b, _c;\n try {\n const res = await this.makeRequest(reqOpts);\n // We have successfully got a URI we can now create a new invocation id\n this.currentInvocationId.uri = uuid.v4();\n return res.headers.location;\n }\n catch (err) {\n const e = err;\n const apiError = {\n code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status,\n name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText,\n message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText,\n errors: [\n {\n reason: e.code,\n },\n ],\n };\n if (this.retryOptions.maxRetries > 0 &&\n this.retryOptions.retryableErrorFn(apiError)) {\n throw e;\n }\n else {\n return bail(e);\n }\n }\n }, {\n retries: this.retryOptions.maxRetries,\n factor: this.retryOptions.retryDelayMultiplier,\n maxTimeout: this.retryOptions.maxRetryDelay * 1000, //convert to milliseconds\n maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n this.uri = uri;\n this.offset = 0;\n // emit the newly generated URI for future reuse, if necessary.\n this.emit('uri', uri);\n return uri;\n }\n async continueUploading() {\n var _a;\n (_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset());\n return this.startUploading();\n }\n async startUploading() {\n const multiChunkMode = !!this.chunkSize;\n let responseReceived = false;\n this.numChunksReadInRequest = 0;\n if (!this.offset) {\n this.offset = 0;\n }\n // Check if the offset (server) is too far behind the current stream\n if (this.offset < this.numBytesWritten) {\n const delta = this.numBytesWritten - this.offset;\n const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`;\n this.emit('error', new RangeError(message));\n return;\n }\n // Check if we should 'fast-forward' to the relevant data to upload\n if (this.numBytesWritten < this.offset) {\n // 'fast-forward' to the byte where we need to upload.\n // only push data from the byte after the one we left off on\n const fastForwardBytes = this.offset - this.numBytesWritten;\n for await (const _chunk of this.upstreamIterator(fastForwardBytes)) {\n _chunk; // discard the data up until the point we want\n }\n this.numBytesWritten = this.offset;\n }\n let expectedUploadSize = undefined;\n // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available\n if (typeof this.contentLength === 'number') {\n expectedUploadSize = this.contentLength - this.numBytesWritten;\n }\n // `expectedUploadSize` should be no more than the `chunkSize`.\n // It's possible this is the last chunk request for a multiple\n // chunk upload, thus smaller than the chunk size.\n if (this.chunkSize) {\n expectedUploadSize = expectedUploadSize\n ? Math.min(this.chunkSize, expectedUploadSize)\n : this.chunkSize;\n }\n // A queue for the upstream data\n const upstreamQueue = this.upstreamIterator(expectedUploadSize);\n // The primary read stream for this request. This stream retrieves no more\n // than the exact requested amount from upstream.\n const requestStream = new stream_1.Readable({\n read: async () => {\n // Don't attempt to retrieve data upstream if we already have a response\n if (responseReceived)\n requestStream.push(null);\n const result = await upstreamQueue.next();\n if (result.value) {\n this.numChunksReadInRequest++;\n if (multiChunkMode) {\n // save ever buffer used in the request in multi-chunk mode\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_addLocalBufferCache).call(this, result.value);\n }\n else {\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_resetLocalBuffersCache).call(this);\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_addLocalBufferCache).call(this, result.value);\n }\n this.numBytesWritten += result.value.byteLength;\n this.emit('progress', {\n bytesWritten: this.numBytesWritten,\n contentLength: this.contentLength,\n });\n requestStream.push(result.value);\n }\n if (result.done) {\n requestStream.push(null);\n }\n },\n });\n let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`;\n if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")) {\n googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")}`;\n }\n const headers = {\n 'User-Agent': (0, util_js_1.getUserAgentString)(),\n 'x-goog-api-client': googAPIClient,\n };\n // If using multiple chunk upload, set appropriate header\n if (multiChunkMode) {\n // We need to know how much data is available upstream to set the `Content-Range` header.\n // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n for await (const chunk of this.upstreamIterator(expectedUploadSize)) {\n // This will conveniently track and keep the size of the buffers.\n // We will reach either the expected upload size or the remainder of the stream.\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_addLocalBufferCache).call(this, chunk);\n }\n // This is the sum from the `#addLocalBufferCache` calls\n const bytesToUpload = this.localWriteCacheByteLength;\n // Important: we want to know if the upstream has ended and the queue is empty before\n // unshifting data back into the queue. This way we will know if this is the last request or not.\n const isLastChunkOfUpload = !(await this.waitForNextChunk());\n // Important: put the data back in the queue for the actual upload\n this.prependLocalBufferToUpstream();\n let totalObjectSize = this.contentLength;\n if (typeof this.contentLength !== 'number' &&\n isLastChunkOfUpload &&\n !this.isPartialUpload) {\n // Let's let the server know this is the last chunk of the object since we didn't set it before.\n totalObjectSize = bytesToUpload + this.numBytesWritten;\n }\n // `- 1` as the ending byte is inclusive in the request.\n const endingByte = bytesToUpload + this.numBytesWritten - 1;\n // `Content-Length` for multiple chunk uploads is the size of the chunk,\n // not the overall object\n headers['Content-Length'] = bytesToUpload;\n headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`;\n }\n else {\n headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`;\n }\n const reqOpts = {\n method: 'PUT',\n url: this.uri,\n headers,\n body: requestStream,\n };\n try {\n const resp = await this.makeRequestStream(reqOpts);\n if (resp) {\n responseReceived = true;\n await this.responseHandler(resp);\n }\n }\n catch (e) {\n const err = e;\n if (this.retryOptions.retryableErrorFn(err)) {\n this.attemptDelayedRetry({\n status: NaN,\n data: err,\n });\n return;\n }\n this.destroy(err);\n }\n }\n // Process the API response to look for errors that came in\n // the response body.\n async responseHandler(resp) {\n if (resp.data.error) {\n this.destroy(resp.data.error);\n return;\n }\n // At this point we can safely create a new id for the chunk\n this.currentInvocationId.chunk = uuid.v4();\n const moreDataToUpload = await this.waitForNextChunk();\n const shouldContinueWithNextMultiChunkRequest = this.chunkSize &&\n resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE &&\n resp.headers.range &&\n moreDataToUpload;\n /**\n * This is true when we're expecting to upload more data in a future request,\n * yet the upstream for the upload session has been exhausted.\n */\n const shouldContinueUploadInAnotherRequest = this.isPartialUpload &&\n resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE &&\n !moreDataToUpload;\n if (shouldContinueWithNextMultiChunkRequest) {\n // Use the upper value in this header to determine where to start the next chunk.\n // We should not assume that the server received all bytes sent in the request.\n // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n const range = resp.headers.range;\n this.offset = Number(range.split('-')[1]) + 1;\n // We should not assume that the server received all bytes sent in the request.\n // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n const missingBytes = this.numBytesWritten - this.offset;\n if (missingBytes) {\n // As multi-chunk uploads send one chunk per request and pulls one\n // chunk into the pipeline, prepending the missing bytes back should\n // be fine for the next request.\n this.prependLocalBufferToUpstream(missingBytes);\n this.numBytesWritten -= missingBytes;\n }\n else {\n // No bytes missing - no need to keep the local cache\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_resetLocalBuffersCache).call(this);\n }\n // continue uploading next chunk\n this.continueUploading();\n }\n else if (!this.isSuccessfulResponse(resp.status) &&\n !shouldContinueUploadInAnotherRequest) {\n const err = new Error('Upload failed');\n err.code = resp.status;\n err.name = 'Upload failed';\n if (resp === null || resp === void 0 ? void 0 : resp.data) {\n err.errors = [resp === null || resp === void 0 ? void 0 : resp.data];\n }\n this.destroy(err);\n }\n else {\n // no need to keep the cache\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_resetLocalBuffersCache).call(this);\n if (resp && resp.data) {\n resp.data.size = Number(resp.data.size);\n }\n this.emit('metadata', resp.data);\n // Allow the object (Upload) to continue naturally so the user's\n // \"finish\" event fires.\n this.emit('uploadFinished');\n }\n }\n /**\n * Check the status of an existing resumable upload.\n *\n * @param cfg A configuration to use. `uri` is required.\n * @returns the current upload status\n */\n async checkUploadStatus(config = {}) {\n let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`;\n if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")) {\n googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")}`;\n }\n const opts = {\n method: 'PUT',\n url: this.uri,\n headers: {\n 'Content-Length': 0,\n 'Content-Range': 'bytes */*',\n 'User-Agent': (0, util_js_1.getUserAgentString)(),\n 'x-goog-api-client': googAPIClient,\n },\n };\n try {\n const resp = await this.makeRequest(opts);\n // Successfully got the offset we can now create a new offset invocation id\n this.currentInvocationId.checkUploadStatus = uuid.v4();\n return resp;\n }\n catch (e) {\n if (config.retry === false ||\n !(e instanceof Error) ||\n !this.retryOptions.retryableErrorFn(e)) {\n throw e;\n }\n const retryDelay = this.getRetryDelay();\n if (retryDelay <= 0) {\n throw e;\n }\n await new Promise(res => setTimeout(res, retryDelay));\n return this.checkUploadStatus(config);\n }\n }\n async getAndSetOffset() {\n try {\n // we want to handle retries in this method.\n const resp = await this.checkUploadStatus({ retry: false });\n if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) {\n if (typeof resp.headers.range === 'string') {\n this.offset = Number(resp.headers.range.split('-')[1]) + 1;\n return;\n }\n }\n this.offset = 0;\n }\n catch (e) {\n const err = e;\n if (this.retryOptions.retryableErrorFn(err)) {\n this.attemptDelayedRetry({\n status: NaN,\n data: err,\n });\n return;\n }\n this.destroy(err);\n }\n }\n async makeRequest(reqOpts) {\n if (this.encryption) {\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256';\n reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString();\n reqOpts.headers['x-goog-encryption-key-sha256'] =\n this.encryption.hash.toString();\n }\n if (this.userProject) {\n reqOpts.params = reqOpts.params || {};\n reqOpts.params.userProject = this.userProject;\n }\n // Let gaxios know we will handle a 308 error code ourselves.\n reqOpts.validateStatus = (status) => {\n return (this.isSuccessfulResponse(status) ||\n status === RESUMABLE_INCOMPLETE_STATUS_CODE);\n };\n const combinedReqOpts = {\n ...this.customRequestOptions,\n ...reqOpts,\n headers: {\n ...this.customRequestOptions.headers,\n ...reqOpts.headers,\n },\n };\n const res = await this.authClient.request(combinedReqOpts);\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n return res;\n }\n async makeRequestStream(reqOpts) {\n const controller = new abort_controller_1.default();\n const errorCallback = () => controller.abort();\n this.once('error', errorCallback);\n if (this.userProject) {\n reqOpts.params = reqOpts.params || {};\n reqOpts.params.userProject = this.userProject;\n }\n reqOpts.signal = controller.signal;\n reqOpts.validateStatus = () => true;\n const combinedReqOpts = {\n ...this.customRequestOptions,\n ...reqOpts,\n headers: {\n ...this.customRequestOptions.headers,\n ...reqOpts.headers,\n },\n };\n const res = await this.authClient.request(combinedReqOpts);\n const successfulRequest = this.onResponse(res);\n this.removeListener('error', errorCallback);\n return successfulRequest ? res : null;\n }\n /**\n * @return {bool} is the request good?\n */\n onResponse(resp) {\n if (resp.status !== 200 &&\n this.retryOptions.retryableErrorFn({\n code: resp.status,\n message: resp.statusText,\n name: resp.statusText,\n })) {\n this.attemptDelayedRetry(resp);\n return false;\n }\n this.emit('response', resp);\n return true;\n }\n /**\n * @param resp GaxiosResponse object from previous attempt\n */\n attemptDelayedRetry(resp) {\n if (this.numRetries < this.retryOptions.maxRetries) {\n if (resp.status === NOT_FOUND_STATUS_CODE &&\n this.numChunksReadInRequest === 0) {\n this.startUploading();\n }\n else {\n const retryDelay = this.getRetryDelay();\n if (retryDelay <= 0) {\n this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`));\n return;\n }\n // Unshift the local cache back in case it's needed for the next request.\n this.numBytesWritten -= this.localWriteCacheByteLength;\n this.prependLocalBufferToUpstream();\n // We don't know how much data has been received by the server.\n // `continueUploading` will recheck the offset via `getAndSetOffset`.\n // If `offset` < `numberBytesReceived` then we will raise a RangeError\n // as we've streamed too much data that has been missed - this should\n // not be the case for multi-chunk uploads as `lastChunkSent` is the\n // body of the entire request.\n this.offset = undefined;\n setTimeout(this.continueUploading.bind(this), retryDelay);\n }\n this.numRetries++;\n }\n else {\n this.destroy(new Error('Retry limit exceeded - ' + resp.data));\n }\n }\n /**\n * The amount of time to wait before retrying the request, in milliseconds.\n * If negative, do not retry.\n *\n * @returns the amount of time to wait, in milliseconds.\n */\n getRetryDelay() {\n const randomMs = Math.round(Math.random() * 1000);\n const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) *\n 1000 +\n randomMs;\n const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 -\n (Date.now() - this.timeOfFirstRequest);\n const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000;\n return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs);\n }\n /*\n * Prepare user-defined API endpoint for compatibility with our API.\n */\n sanitizeEndpoint(url) {\n if (!exports.PROTOCOL_REGEX.test(url)) {\n url = `https://${url}`;\n }\n return url.replace(/\\/+$/, ''); // Remove trailing slashes\n }\n /**\n * Check if a given status code is 2xx\n *\n * @param status The status code to check\n * @returns if the status is 2xx\n */\n isSuccessfulResponse(status) {\n return status >= 200 && status < 300;\n }\n}\nexports.Upload = Upload;\n_Upload_gcclGcsCmd = new WeakMap(), _Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() {\n this.localWriteCache = [];\n this.localWriteCacheByteLength = 0;\n}, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) {\n this.localWriteCache.push(buf);\n this.localWriteCacheByteLength += buf.byteLength;\n};\nfunction upload(cfg) {\n return new Upload(cfg);\n}\nexports.upload = upload;\nfunction createURI(cfg, callback) {\n const up = new Upload(cfg);\n if (!callback) {\n return up.createURI();\n }\n up.createURI().then(r => callback(null, r), callback);\n}\nexports.createURI = createURI;\n/**\n * Check the status of an existing resumable upload.\n *\n * @param cfg A configuration to use. `uri` is required.\n * @returns the current upload status\n */\nfunction checkUploadStatus(cfg) {\n const up = new Upload(cfg);\n return up.checkUploadStatus();\n}\nexports.checkUploadStatus = checkUploadStatus;\n","\"use strict\";\n// Copyright 2020 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0;\nconst crypto = __importStar(require(\"crypto\"));\nconst url = __importStar(require(\"url\"));\nconst storage_js_1 = require(\"./storage.js\");\nconst util_js_1 = require(\"./util.js\");\nvar SignerExceptionMessages;\n(function (SignerExceptionMessages) {\n SignerExceptionMessages[\"ACCESSIBLE_DATE_INVALID\"] = \"The accessible at date provided was invalid.\";\n SignerExceptionMessages[\"EXPIRATION_BEFORE_ACCESSIBLE_DATE\"] = \"An expiration date cannot be before accessible date.\";\n SignerExceptionMessages[\"X_GOOG_CONTENT_SHA256\"] = \"The header X-Goog-Content-SHA256 must be a hexadecimal string.\";\n})(SignerExceptionMessages || (exports.SignerExceptionMessages = SignerExceptionMessages = {}));\n/*\n * Default signing version for getSignedUrl is 'v2'.\n */\nconst DEFAULT_SIGNING_VERSION = 'v2';\nconst SEVEN_DAYS = 7 * 24 * 60 * 60;\n/**\n * @const {string}\n * @private\n */\nexports.PATH_STYLED_HOST = 'https://storage.googleapis.com';\nclass URLSigner {\n constructor(authClient, bucket, file) {\n this.bucket = bucket;\n this.file = file;\n this.authClient = authClient;\n }\n getSignedUrl(cfg) {\n const expiresInSeconds = this.parseExpires(cfg.expires);\n const method = cfg.method;\n const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt);\n if (expiresInSeconds < accessibleAtInSeconds) {\n throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE);\n }\n let customHost;\n // Default style is `path`.\n const isVirtualHostedStyle = cfg.virtualHostedStyle || false;\n if (cfg.cname) {\n customHost = cfg.cname;\n }\n else if (isVirtualHostedStyle) {\n customHost = `https://${this.bucket.name}.storage.googleapis.com`;\n }\n const secondsToMilliseconds = 1000;\n const config = Object.assign({}, cfg, {\n method,\n expiration: expiresInSeconds,\n accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds),\n bucket: this.bucket.name,\n file: this.file ? (0, util_js_1.encodeURI)(this.file.name, false) : undefined,\n });\n if (customHost) {\n config.cname = customHost;\n }\n const version = cfg.version || DEFAULT_SIGNING_VERSION;\n let promise;\n if (version === 'v2') {\n promise = this.getSignedUrlV2(config);\n }\n else if (version === 'v4') {\n promise = this.getSignedUrlV4(config);\n }\n else {\n throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`);\n }\n return promise.then(query => {\n query = Object.assign(query, cfg.queryParams);\n const signedUrl = new url.URL(config.cname || exports.PATH_STYLED_HOST);\n signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file);\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n signedUrl.search = (0, util_js_1.qsStringify)(query);\n return signedUrl.href;\n });\n }\n getSignedUrlV2(config) {\n const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {});\n const resourcePath = this.getResourcePath(false, config.bucket, config.file);\n const blobToSign = [\n config.method,\n config.contentMd5 || '',\n config.contentType || '',\n config.expiration,\n canonicalHeadersString + resourcePath,\n ].join('\\n');\n const sign = async () => {\n const authClient = this.authClient;\n try {\n const signature = await authClient.sign(blobToSign);\n const credentials = await authClient.getCredentials();\n return {\n GoogleAccessId: credentials.client_email,\n Expires: config.expiration,\n Signature: signature,\n };\n }\n catch (err) {\n const error = err;\n const signingErr = new SigningError(error.message);\n signingErr.stack = error.stack;\n throw signingErr;\n }\n };\n return sign();\n }\n getSignedUrlV4(config) {\n config.accessibleAt = config.accessibleAt\n ? config.accessibleAt\n : new Date();\n const millisecondsToSeconds = 1.0 / 1000.0;\n const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds;\n // v4 limit expiration to be 7 days maximum\n if (expiresPeriodInSeconds > SEVEN_DAYS) {\n throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`);\n }\n const extensionHeaders = Object.assign({}, config.extensionHeaders);\n const fqdn = new url.URL(config.cname || exports.PATH_STYLED_HOST);\n extensionHeaders.host = fqdn.host;\n if (config.contentMd5) {\n extensionHeaders['content-md5'] = config.contentMd5;\n }\n if (config.contentType) {\n extensionHeaders['content-type'] = config.contentType;\n }\n let contentSha256;\n const sha256Header = extensionHeaders['x-goog-content-sha256'];\n if (sha256Header) {\n if (typeof sha256Header !== 'string' ||\n !/[A-Fa-f0-9]{40}/.test(sha256Header)) {\n throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256);\n }\n contentSha256 = sha256Header;\n }\n const signedHeaders = Object.keys(extensionHeaders)\n .map(header => header.toLowerCase())\n .sort()\n .join(';');\n const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders);\n const datestamp = (0, util_js_1.formatAsUTCISO)(config.accessibleAt);\n const credentialScope = `${datestamp}/auto/storage/goog4_request`;\n const sign = async () => {\n const credentials = await this.authClient.getCredentials();\n const credential = `${credentials.client_email}/${credentialScope}`;\n const dateISO = (0, util_js_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true);\n const queryParams = {\n 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256',\n 'X-Goog-Credential': credential,\n 'X-Goog-Date': dateISO,\n 'X-Goog-Expires': expiresPeriodInSeconds.toString(10),\n 'X-Goog-SignedHeaders': signedHeaders,\n ...(config.queryParams || {}),\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const canonicalQueryParams = this.getCanonicalQueryParams(queryParams);\n const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256);\n const hash = crypto\n .createHash('sha256')\n .update(canonicalRequest)\n .digest('hex');\n const blobToSign = [\n 'GOOG4-RSA-SHA256',\n dateISO,\n credentialScope,\n hash,\n ].join('\\n');\n try {\n const signature = await this.authClient.sign(blobToSign);\n const signatureHex = Buffer.from(signature, 'base64').toString('hex');\n const signedQuery = Object.assign({}, queryParams, {\n 'X-Goog-Signature': signatureHex,\n });\n return signedQuery;\n }\n catch (err) {\n const error = err;\n const signingErr = new SigningError(error.message);\n signingErr.stack = error.stack;\n throw signingErr;\n }\n };\n return sign();\n }\n /**\n * Create canonical headers for signing v4 url.\n *\n * The canonical headers for v4-signing a request demands header names are\n * first lowercased, followed by sorting the header names.\n * Then, construct the canonical headers part of the request:\n * + \":\" + Trim() + \"\\n\"\n * ..\n * + \":\" + Trim() + \"\\n\"\n *\n * @param headers\n * @private\n */\n getCanonicalHeaders(headers) {\n // Sort headers by their lowercased names\n const sortedHeaders = (0, util_js_1.objectEntries)(headers)\n // Convert header names to lowercase\n .map(([headerName, value]) => [\n headerName.toLowerCase(),\n value,\n ])\n .sort((a, b) => a[0].localeCompare(b[0]));\n return sortedHeaders\n .filter(([, value]) => value !== undefined)\n .map(([headerName, value]) => {\n // - Convert Array (multi-valued header) into string, delimited by\n // ',' (no space).\n // - Trim leading and trailing spaces.\n // - Convert sequential (2+) spaces into a single space\n const canonicalValue = `${value}`.trim().replace(/\\s{2,}/g, ' ');\n return `${headerName}:${canonicalValue}\\n`;\n })\n .join('');\n }\n getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) {\n return [\n method,\n path,\n query,\n headers,\n signedHeaders,\n contentSha256 || 'UNSIGNED-PAYLOAD',\n ].join('\\n');\n }\n getCanonicalQueryParams(query) {\n return (0, util_js_1.objectEntries)(query)\n .map(([key, value]) => [(0, util_js_1.encodeURI)(key, true), (0, util_js_1.encodeURI)(value, true)])\n .sort((a, b) => (a[0] < b[0] ? -1 : 1))\n .map(([key, value]) => `${key}=${value}`)\n .join('&');\n }\n getResourcePath(cname, bucket, file) {\n if (cname) {\n return '/' + (file || '');\n }\n else if (file) {\n return `/${bucket}/${file}`;\n }\n else {\n return `/${bucket}`;\n }\n }\n parseExpires(expires, current = new Date()) {\n const expiresInMSeconds = new Date(expires).valueOf();\n if (isNaN(expiresInMSeconds)) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expiresInMSeconds < current.valueOf()) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n return Math.floor(expiresInMSeconds / 1000); // The API expects seconds.\n }\n parseAccessibleAt(accessibleAt) {\n const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf();\n if (isNaN(accessibleAtInMSeconds)) {\n throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID);\n }\n return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds.\n }\n}\nexports.URLSigner = URLSigner;\n/**\n * Custom error type for errors related to getting signed errors and policies.\n *\n * @private\n */\nclass SigningError extends Error {\n constructor() {\n super(...arguments);\n this.name = 'SigningError';\n }\n}\nexports.SigningError = SigningError;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst paginator_1 = require(\"@google-cloud/paginator\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst stream_1 = require(\"stream\");\nconst bucket_js_1 = require(\"./bucket.js\");\nconst channel_js_1 = require(\"./channel.js\");\nconst file_js_1 = require(\"./file.js\");\nconst util_js_1 = require(\"./util.js\");\n// eslint-disable-next-line @typescript-eslint/ban-ts-comment\n// @ts-ignore\nconst package_json_helper_cjs_1 = require(\"./package-json-helper.cjs\");\nconst hmacKey_js_1 = require(\"./hmacKey.js\");\nconst crc32c_js_1 = require(\"./crc32c.js\");\nvar IdempotencyStrategy;\n(function (IdempotencyStrategy) {\n IdempotencyStrategy[IdempotencyStrategy[\"RetryAlways\"] = 0] = \"RetryAlways\";\n IdempotencyStrategy[IdempotencyStrategy[\"RetryConditional\"] = 1] = \"RetryConditional\";\n IdempotencyStrategy[IdempotencyStrategy[\"RetryNever\"] = 2] = \"RetryNever\";\n})(IdempotencyStrategy || (exports.IdempotencyStrategy = IdempotencyStrategy = {}));\nvar ExceptionMessages;\n(function (ExceptionMessages) {\n ExceptionMessages[\"EXPIRATION_DATE_INVALID\"] = \"The expiration date provided was invalid.\";\n ExceptionMessages[\"EXPIRATION_DATE_PAST\"] = \"An expiration date cannot be in the past.\";\n})(ExceptionMessages || (exports.ExceptionMessages = ExceptionMessages = {}));\nvar StorageExceptionMessages;\n(function (StorageExceptionMessages) {\n StorageExceptionMessages[\"BUCKET_NAME_REQUIRED\"] = \"A bucket name is needed to use Cloud Storage.\";\n StorageExceptionMessages[\"BUCKET_NAME_REQUIRED_CREATE\"] = \"A name is required to create a bucket.\";\n StorageExceptionMessages[\"HMAC_SERVICE_ACCOUNT\"] = \"The first argument must be a service account email to create an HMAC key.\";\n StorageExceptionMessages[\"HMAC_ACCESS_ID\"] = \"An access ID is needed to create an HmacKey object.\";\n})(StorageExceptionMessages || (exports.StorageExceptionMessages = StorageExceptionMessages = {}));\nexports.PROTOCOL_REGEX = /^(\\w*):\\/\\//;\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n */\nexports.AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n */\nexports.MAX_RETRY_DEFAULT = 3;\n/**\n * Default behavior: Wait twice as long as previous retry before retrying.\n *\n * @const {number}\n */\nexports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2;\n/**\n * Default behavior: If the operation doesn't succeed after 600 seconds,\n * stop retrying.\n *\n * @const {number}\n */\nexports.TOTAL_TIMEOUT_DEFAULT = 600;\n/**\n * Default behavior: Wait no more than 64 seconds between retries.\n *\n * @const {number}\n */\nexports.MAX_RETRY_DELAY_DEFAULT = 64;\n/**\n * Default behavior: Retry conditionally idempotent operations if correct preconditions are set.\n *\n * @const {enum}\n * @private\n */\nconst IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional;\n/**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted.\n * @const\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\nconst RETRYABLE_ERR_FN_DEFAULT = function (err) {\n var _a;\n const isConnectionProblem = (reason) => {\n return (reason.includes('eai_again') || // DNS lookup error\n reason === 'econnreset' ||\n reason === 'unexpected connection closure' ||\n reason === 'epipe' ||\n reason === 'socket connection timeout');\n };\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (typeof err.code === 'string') {\n if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) {\n return true;\n }\n const reason = err.code.toLowerCase();\n if (isConnectionProblem(reason)) {\n return true;\n }\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase();\n if (reason && isConnectionProblem(reason)) {\n return true;\n }\n }\n }\n }\n return false;\n};\nexports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT;\n/*! Developer Documentation\n *\n * Invoke this method to create a new Storage object bound with pre-determined\n * configuration options. For each object that can be created (e.g., a bucket),\n * there is an equivalent static and instance method. While they are classes,\n * they can be instantiated without use of the `new` keyword.\n */\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * This object provides constants to refer to the three permission levels that\n * can be granted to an entity:\n *\n * - `gcs.acl.OWNER_ROLE` - (\"OWNER\")\n * - `gcs.acl.READER_ROLE` - (\"READER\")\n * - `gcs.acl.WRITER_ROLE` - (\"WRITER\")\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists}\n *\n * @name Storage#acl\n * @type {object}\n * @property {string} OWNER_ROLE\n * @property {string} READER_ROLE\n * @property {string} WRITER_ROLE\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const albums = storage.bucket('albums');\n *\n * //-\n * // Make all of the files currently in a bucket publicly readable.\n * //-\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * albums.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // Make any new objects added to a bucket publicly readable.\n * //-\n * albums.acl.default.add(options, function(err, aclObject) {});\n *\n * //-\n * // Grant a user ownership permissions to a bucket.\n * //-\n * albums.acl.add({\n * entity: 'user-useremail@example.com',\n * role: storage.acl.OWNER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * albums.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n/**\n * Get {@link Bucket} objects for all of the buckets in your project as\n * a readable object stream.\n *\n * @method Storage#getBucketsStream\n * @param {GetBucketsRequest} [query] Query object for listing buckets.\n * @returns {ReadableStream} A readable stream that emits {@link Bucket}\n * instances.\n *\n * @example\n * ```\n * storage.getBucketsStream()\n * .on('error', console.error)\n * .on('data', function(bucket) {\n * // bucket is a Bucket object.\n * })\n * .on('end', function() {\n * // All buckets retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * storage.getBucketsStream()\n * .on('data', function(bucket) {\n * this.end();\n * });\n * ```\n */\n/**\n * Get {@link HmacKey} objects for all of the HMAC keys in the project in a\n * readable object stream.\n *\n * @method Storage#getHmacKeysStream\n * @param {GetHmacKeysOptions} [options] Configuration options.\n * @returns {ReadableStream} A readable stream that emits {@link HmacKey}\n * instances.\n *\n * @example\n * ```\n * storage.getHmacKeysStream()\n * .on('error', console.error)\n * .on('data', function(hmacKey) {\n * // hmacKey is an HmacKey object.\n * })\n * .on('end', function() {\n * // All HmacKey retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * storage.getHmacKeysStream()\n * .on('data', function(bucket) {\n * this.end();\n * });\n * ```\n */\n/**\n *

ACLs

\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share files with other users\n * and allow other users to access your buckets and files.\n *\n * To learn more about ACLs, read this overview on\n * {@link https://cloud.google.com/storage/docs/access-control| Access Control}.\n *\n * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview}\n * See {@link https://cloud.google.com/storage/docs/access-control| Access Control}\n *\n * @class\n */\nclass Storage extends index_js_1.Service {\n getBucketsStream() {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n getHmacKeysStream() {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n /**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n /**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n /**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n /**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n /**\n * @callback Crc32cGeneratorCallback\n * @returns {CRC32CValidator}\n */\n /**\n * @typedef {object} StorageOptions\n * @property {string} [projectId] The project ID from the Google Developer's\n * Console, e.g. 'grape-spaceship-123'. We will also check the environment\n * variable `GCLOUD_PROJECT` for your project ID. If your app is running\n * in an environment which supports {@link\n * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application\n * Application Default Credentials}, your project ID will be detected\n * automatically.\n * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key\n * downloaded from the Google Developers Console. If you provide a path to\n * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and\n * .p12 require you to specify the `email` option as well.\n * @property {string} [email] Account email address. Required when using a .pem\n * or .p12 keyFilename.\n * @property {object} [credentials] Credentials object.\n * @property {string} [credentials.client_email]\n * @property {string} [credentials.private_key]\n * @property {object} [retryOptions] Options for customizing retries. Retriable server errors\n * will be retried with exponential delay between them dictated by the formula\n * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout\n * has been reached. Retries will only happen if autoRetry is set to true.\n * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to\n * increase the delay time between the completion of failed requests, and the\n * initiation of the subsequent retrying request.\n * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from\n * when the initial request is sent, after which an error will\n * be returned, regardless of the retrying attempts made meanwhile.\n * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests.\n * When this value is reached, ``retryDelayMultiplier`` will no longer be used to\n * increase delay time.\n * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries\n * attempted before returning the error.\n * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given\n * error should be retried and false otherwise.\n * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration\n * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways -\n * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional -\n * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never\n * retry a conditionally idempotent operation.\n * @property {string} [userAgent] The value to be prepended to the User-Agent\n * header in API requests.\n * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one.\n * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out.\n * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned.\n * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests.\n * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint.\n * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n */\n /**\n * Constructs the Storage client.\n *\n * @example\n * Create a client that uses Application Default Credentials\n * (ADC)\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * ```\n *\n * @example\n * Create a client with explicit credentials\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * keyFilename: '/path/to/keyfile.json'\n * });\n * ```\n *\n * @example\n * Create a client with credentials passed\n * by value as a JavaScript object\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * credentials: {\n * type: 'service_account',\n * project_id: 'xxxxxxx',\n * private_key_id: 'xxxx',\n * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\\n-----END PRIVATE KEY-----\\n',\n * client_email: 'xxxx',\n * client_id: 'xxx',\n * auth_uri: 'https://accounts.google.com/o/oauth2/auth',\n * token_uri: 'https://oauth2.googleapis.com/token',\n * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs',\n * client_x509_cert_url: 'xxx',\n * }\n * });\n * ```\n *\n * @example\n * Create a client with credentials passed\n * by loading a JSON file directly from disk\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * credentials: require('/path/to-keyfile.json')\n * });\n * ```\n *\n * @example\n * Create a client with an `AuthClient` (e.g. `DownscopedClient`)\n * ```\n * const {DownscopedClient} = require('google-auth-library');\n * const authClient = new DownscopedClient({...});\n *\n * const storage = new Storage({authClient});\n * ```\n *\n * Additional samples:\n * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1\n * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js\n *\n * @param {StorageOptions} [options] Configuration options.\n */\n constructor(options = {}) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p;\n let apiEndpoint = 'https://storage.googleapis.com';\n let customEndpoint = false;\n // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead.\n const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST;\n if (typeof EMULATOR_HOST === 'string') {\n apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST);\n customEndpoint = true;\n }\n if (options.apiEndpoint) {\n apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint);\n customEndpoint = true;\n }\n options = Object.assign({}, options, { apiEndpoint });\n // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead.\n const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`;\n const config = {\n apiEndpoint: options.apiEndpoint,\n retryOptions: {\n autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined\n ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry\n : exports.AUTO_RETRY_DEFAULT,\n maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries)\n ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries\n : exports.MAX_RETRY_DEFAULT,\n retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier)\n ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier\n : exports.RETRY_DELAY_MULTIPLIER_DEFAULT,\n totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout)\n ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout\n : exports.TOTAL_TIMEOUT_DEFAULT,\n maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay)\n ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay\n : exports.MAX_RETRY_DELAY_DEFAULT,\n retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn)\n ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn\n : exports.RETRYABLE_ERR_FN_DEFAULT,\n idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined\n ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy\n : IDEMPOTENCY_STRATEGY_DEFAULT,\n },\n baseUrl,\n customEndpoint,\n useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint,\n projectIdRequired: false,\n scopes: [\n 'https://www.googleapis.com/auth/iam',\n 'https://www.googleapis.com/auth/cloud-platform',\n 'https://www.googleapis.com/auth/devstorage.full_control',\n ],\n packageJson: (0, package_json_helper_cjs_1.getPackageJSON)(),\n };\n super(config, options);\n /**\n * Reference to {@link Storage.acl}.\n *\n * @name Storage#acl\n * @see Storage.acl\n */\n this.acl = Storage.acl;\n this.crc32cGenerator =\n options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR;\n this.retryOptions = config.retryOptions;\n this.getBucketsStream = paginator_1.paginator.streamify('getBuckets');\n this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys');\n }\n static sanitizeEndpoint(url) {\n if (!exports.PROTOCOL_REGEX.test(url)) {\n url = `https://${url}`;\n }\n return url.replace(/\\/+$/, ''); // Remove trailing slashes\n }\n /**\n * Get a reference to a Cloud Storage bucket.\n *\n * @param {string} name Name of the bucket.\n * @param {object} [options] Configuration object.\n * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to\n * encrypt objects inserted into this bucket, if no encryption method is\n * specified.\n * @param {string} [options.userProject] User project to be billed for all\n * requests made from this Bucket object.\n * @returns {Bucket}\n * @see Bucket\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const albums = storage.bucket('albums');\n * const photos = storage.bucket('photos');\n * ```\n */\n bucket(name, options) {\n if (!name) {\n throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED);\n }\n return new bucket_js_1.Bucket(this, name, options);\n }\n /**\n * Reference a channel to receive notifications about changes to your bucket.\n *\n * @param {string} id The ID of the channel.\n * @param {string} resourceId The resource ID of the channel.\n * @returns {Channel}\n * @see Channel\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * ```\n */\n channel(id, resourceId) {\n return new channel_js_1.Channel(this, id, resourceId);\n }\n /**\n * @typedef {array} CreateBucketResponse\n * @property {Bucket} 0 The new {@link Bucket}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CreateBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket} bucket The new {@link Bucket}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Metadata to set for the bucket.\n *\n * @typedef {object} CreateBucketRequest\n * @property {boolean} [archive=false] Specify the storage class as Archive.\n * @property {object} [autoclass.enabled=false] Specify whether Autoclass is\n * enabled for the bucket.\n * @property {object} [autoclass.terminalStorageClass='NEARLINE'] The storage class that objects in an Autoclass bucket eventually transition to if\n * they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE.\n * @property {boolean} [coldline=false] Specify the storage class as Coldline.\n * @property {Cors[]} [cors=[]] Specify the CORS configuration to use.\n * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets.\n * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}.\n * @property {boolean} [dra=false] Specify the storage class as Durable Reduced\n * Availability.\n * @property {boolean} [enableObjectRetention=false] Specifiy whether or not object retention should be enabled on this bucket.\n * @property {string} [location] Specify the bucket's location. If specifying\n * a dual-region, the `customPlacementConfig` property should be set in conjunction.\n * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}.\n * @property {boolean} [multiRegional=false] Specify the storage class as\n * Multi-Regional.\n * @property {boolean} [nearline=false] Specify the storage class as Nearline.\n * @property {boolean} [regional=false] Specify the storage class as Regional.\n * @property {boolean} [requesterPays=false] **Early Access Testers Only**\n * Force the use of the User Project metadata field to assign operational\n * costs when an operation is made on a Bucket and its objects.\n * @property {string} [rpo] For dual-region buckets, controls whether turbo\n * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`).\n * @property {boolean} [standard=true] Specify the storage class as Standard.\n * @property {string} [storageClass] The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`).\n * **Note:** The storage classes `multi_regional`, `regional`, and\n * `durable_reduced_availability` are now legacy and will be deprecated in\n * the future.\n * @property {Versioning} [versioning=undefined] Specify the versioning status.\n * @property {string} [userProject] The ID of the project which will be billed\n * for the request.\n */\n /**\n * Create a bucket.\n *\n * Cloud Storage uses a flat namespace, so you can't create a bucket with\n * a name that is already in use. For more information, see\n * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation}\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} name Name of the bucket to create.\n * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket.\n * @param {CreateBucketCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a name is not provided.\n * @see Bucket#create\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const callback = function(err, bucket, apiResponse) {\n * // `bucket` is a Bucket object.\n * };\n *\n * storage.createBucket('new-bucket', callback);\n *\n * //-\n * // Create a bucket in a specific location and region. See the \n * // Official JSON API docs for complete details on the `location`\n * option.\n * // \n * //-\n * const metadata = {\n * location: 'US-CENTRAL1',\n * regional: true\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // Create a bucket with a retention policy of 6 months.\n * //-\n * const metadata = {\n * retentionPolicy: {\n * retentionPeriod: 15780000 // 6 months in seconds.\n * }\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // Enable versioning on a new bucket.\n * //-\n * const metadata = {\n * versioning: {\n * enabled: true\n * }\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.createBucket('new-bucket').then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_create_bucket\n * Another example:\n */\n createBucket(name, metadataOrCallback, callback) {\n if (!name) {\n throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE);\n }\n let metadata;\n if (!callback) {\n callback = metadataOrCallback;\n metadata = {};\n }\n else {\n metadata = metadataOrCallback;\n }\n const body = {\n ...metadata,\n name,\n };\n const storageClasses = {\n archive: 'ARCHIVE',\n coldline: 'COLDLINE',\n dra: 'DURABLE_REDUCED_AVAILABILITY',\n multiRegional: 'MULTI_REGIONAL',\n nearline: 'NEARLINE',\n regional: 'REGIONAL',\n standard: 'STANDARD',\n };\n const storageClassKeys = Object.keys(storageClasses);\n for (const storageClass of storageClassKeys) {\n if (body[storageClass]) {\n if (metadata.storageClass && metadata.storageClass !== storageClass) {\n throw new Error(`Both \\`${storageClass}\\` and \\`storageClass\\` were provided.`);\n }\n body.storageClass = storageClasses[storageClass];\n delete body[storageClass];\n }\n }\n if (body.requesterPays) {\n body.billing = {\n requesterPays: body.requesterPays,\n };\n delete body.requesterPays;\n }\n const query = {\n project: this.projectId,\n };\n if (body.userProject) {\n query.userProject = body.userProject;\n delete body.userProject;\n }\n if (body.enableObjectRetention) {\n query.enableObjectRetention = body.enableObjectRetention;\n delete body.enableObjectRetention;\n }\n this.request({\n method: 'POST',\n uri: '/b',\n qs: query,\n json: body,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const bucket = this.bucket(name);\n bucket.metadata = resp;\n callback(null, bucket, resp);\n });\n }\n /**\n * @typedef {object} CreateHmacKeyOptions\n * @property {string} [projectId] The project ID of the project that owns\n * the service account of the requested HMAC key. If not provided,\n * the project ID used to instantiate the Storage client will be used.\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @typedef {object} HmacKeyMetadata\n * @property {string} accessId The access id identifies which HMAC key was\n * used to sign a request when authenticating with HMAC.\n * @property {string} etag Used to perform a read-modify-write of the key.\n * @property {string} id The resource name of the HMAC key.\n * @property {string} projectId The project ID.\n * @property {string} serviceAccountEmail The service account's email this\n * HMAC key is created for.\n * @property {string} state The state of this HMAC key. One of \"ACTIVE\",\n * \"INACTIVE\" or \"DELETED\".\n * @property {string} timeCreated The creation time of the HMAC key in\n * RFC 3339 format.\n * @property {string} [updated] The time this HMAC key was last updated in\n * RFC 3339 format.\n */\n /**\n * @typedef {array} CreateHmacKeyResponse\n * @property {HmacKey} 0 The HmacKey instance created from API response.\n * @property {string} 1 The HMAC key's secret used to access the XML API.\n * @property {object} 3 The raw API response.\n */\n /**\n * @callback CreateHmacKeyCallback Callback function.\n * @param {?Error} err Request error, if any.\n * @param {HmacKey} hmacKey The HmacKey instance created from API response.\n * @param {string} secret The HMAC key's secret used to access the XML API.\n * @param {object} apiResponse The raw API response.\n */\n /**\n * Create an HMAC key associated with an service account to authenticate\n * requests to the Cloud Storage XML API.\n *\n * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation}\n *\n * @param {string} serviceAccountEmail The service account's email address\n * with which the HMAC key is created for.\n * @param {CreateHmacKeyCallback} [callback] Callback function.\n * @return {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('google-cloud/storage');\n * const storage = new Storage();\n *\n * // Replace with your service account's email address\n * const serviceAccountEmail =\n * 'my-service-account@appspot.gserviceaccount.com';\n *\n * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) {\n * if (!err) {\n * // Securely store the secret for use with the XML API.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.createHmacKey(serviceAccountEmail)\n * .then((response) => {\n * const hmacKey = response[0];\n * const secret = response[1];\n * // Securely store the secret for use with the XML API.\n * });\n * ```\n */\n createHmacKey(serviceAccountEmail, optionsOrCb, cb) {\n if (typeof serviceAccountEmail !== 'string') {\n throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT);\n }\n const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb);\n const query = Object.assign({}, options, { serviceAccountEmail });\n const projectId = query.projectId || this.projectId;\n delete query.projectId;\n this.request({\n method: 'POST',\n uri: `/projects/${projectId}/hmacKeys`,\n qs: query,\n maxRetries: 0, //explicitly set this value since this is a non-idempotent function\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const metadata = resp.metadata;\n const hmacKey = this.hmacKey(metadata.accessId, {\n projectId: metadata.projectId,\n });\n hmacKey.metadata = resp.metadata;\n callback(null, hmacKey, resp.secret, resp);\n });\n }\n /**\n * Query object for listing buckets.\n *\n * @typedef {object} GetBucketsRequest\n * @property {boolean} [autoPaginate=true] Have pagination handled\n * automatically.\n * @property {number} [maxApiCalls] Maximum number of API calls to make.\n * @property {number} [maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [userProject] The ID of the project which will be billed\n * for the request.\n */\n /**\n * @typedef {array} GetBucketsResponse\n * @property {Bucket[]} 0 Array of {@link Bucket} instances.\n * @property {object} 1 nextQuery A query object to receive more results.\n * @property {object} 2 The full API response.\n */\n /**\n * @callback GetBucketsCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket[]} buckets Array of {@link Bucket} instances.\n * @param {object} nextQuery A query object to receive more results.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get Bucket objects for all of the buckets in your project.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation}\n *\n * @param {GetBucketsRequest} [query] Query object for listing buckets.\n * @param {GetBucketsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * storage.getBuckets(function(err, buckets) {\n * if (!err) {\n * // buckets is an array of Bucket objects.\n * }\n * });\n *\n * //-\n * // To control how many API requests are made and page through the results\n * // manually, set `autoPaginate` to `false`.\n * //-\n * const callback = function(err, buckets, nextQuery, apiResponse) {\n * if (nextQuery) {\n * // More results exist.\n * storage.getBuckets(nextQuery, callback);\n * }\n *\n * // The `metadata` property is populated for you with the metadata at the\n * // time of fetching.\n * buckets[0].metadata;\n *\n * // However, in cases where you are concerned the metadata could have\n * // changed, use the `getMetadata` method.\n * buckets[0].getMetadata(function(err, metadata, apiResponse) {});\n * };\n *\n * storage.getBuckets({\n * autoPaginate: false\n * }, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.getBuckets().then(function(data) {\n * const buckets = data[0];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_list_buckets\n * Another example:\n */\n getBuckets(optionsOrCallback, cb) {\n const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb);\n options.project = options.project || this.projectId;\n this.request({\n uri: '/b',\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const buckets = itemsArray.map((bucket) => {\n const bucketInstance = this.bucket(bucket.id);\n bucketInstance.metadata = bucket;\n return bucketInstance;\n });\n const nextQuery = resp.nextPageToken\n ? Object.assign({}, options, { pageToken: resp.nextPageToken })\n : null;\n callback(null, buckets, nextQuery, resp);\n });\n }\n getHmacKeys(optionsOrCb, cb) {\n const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb);\n const query = Object.assign({}, options);\n const projectId = query.projectId || this.projectId;\n delete query.projectId;\n this.request({\n uri: `/projects/${projectId}/hmacKeys`,\n qs: query,\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const hmacKeys = itemsArray.map((hmacKey) => {\n const hmacKeyInstance = this.hmacKey(hmacKey.accessId, {\n projectId: hmacKey.projectId,\n });\n hmacKeyInstance.metadata = hmacKey;\n return hmacKeyInstance;\n });\n const nextQuery = resp.nextPageToken\n ? Object.assign({}, options, { pageToken: resp.nextPageToken })\n : null;\n callback(null, hmacKeys, nextQuery, resp);\n });\n }\n /**\n * @typedef {array} GetServiceAccountResponse\n * @property {object} 0 The service account resource.\n * @property {object} 1 The full\n * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}.\n */\n /**\n * @callback GetServiceAccountCallback\n * @param {?Error} err Request error, if any.\n * @param {object} serviceAccount The serviceAccount resource.\n * @param {string} serviceAccount.emailAddress The service account email\n * address.\n * @param {object} apiResponse The full\n * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}.\n */\n /**\n * Get the email address of this project's Google Cloud Storage service\n * account.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource}\n *\n * @param {object} [options] Configuration object.\n * @param {string} [options.userProject] User project to be billed for this\n * request.\n * @param {GetServiceAccountCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * storage.getServiceAccount(function(err, serviceAccount, apiResponse) {\n * if (!err) {\n * const serviceAccountEmail = serviceAccount.emailAddress;\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.getServiceAccount().then(function(data) {\n * const serviceAccountEmail = data[0].emailAddress;\n * const apiResponse = data[1];\n * });\n * ```\n */\n getServiceAccount(optionsOrCallback, cb) {\n const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb);\n this.request({\n uri: `/projects/${this.projectId}/serviceAccount`,\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const camelCaseResponse = {};\n for (const prop in resp) {\n // eslint-disable-next-line no-prototype-builtins\n if (resp.hasOwnProperty(prop)) {\n const camelCaseProp = prop.replace(/_(\\w)/g, (_, match) => match.toUpperCase());\n camelCaseResponse[camelCaseProp] = resp[prop];\n }\n }\n callback(null, camelCaseResponse, resp);\n });\n }\n /**\n * Get a reference to an HmacKey object.\n * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to\n * retrieve and populate the metadata.\n *\n * To get a reference to an HMAC key that's not created for a service\n * account in the same project used to instantiate the Storage client,\n * supply the project's ID as `projectId` in the `options` argument.\n *\n * @param {string} accessId The HMAC key's access ID.\n * @param {HmacKeyOptions} options HmacKey constructor options.\n * @returns {HmacKey}\n * @see HmacKey\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * ```\n */\n hmacKey(accessId, options) {\n if (!accessId) {\n throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID);\n }\n return new hmacKey_js_1.HmacKey(this, accessId, options);\n }\n}\nexports.Storage = Storage;\n/**\n * {@link Bucket} class.\n *\n * @name Storage.Bucket\n * @see Bucket\n * @type {Constructor}\n */\nStorage.Bucket = bucket_js_1.Bucket;\n/**\n * {@link Channel} class.\n *\n * @name Storage.Channel\n * @see Channel\n * @type {Constructor}\n */\nStorage.Channel = channel_js_1.Channel;\n/**\n * {@link File} class.\n *\n * @name Storage.File\n * @see File\n * @type {Constructor}\n */\nStorage.File = file_js_1.File;\n/**\n * {@link HmacKey} class.\n *\n * @name Storage.HmacKey\n * @see HmacKey\n * @type {Constructor}\n */\nStorage.HmacKey = hmacKey_js_1.HmacKey;\nStorage.acl = {\n OWNER_ROLE: 'OWNER',\n READER_ROLE: 'READER',\n WRITER_ROLE: 'WRITER',\n};\n/*! Developer Documentation\n *\n * These methods can be auto-paginated.\n */\npaginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']);\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Storage, {\n exclude: ['bucket', 'channel', 'hmacKey'],\n});\n","\"use strict\";\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiClientHeaders, _XMLMultiPartUploadHelper_handleErrorResponse;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TransferManager = exports.MultiPartUploadError = void 0;\nconst file_js_1 = require(\"./file.js\");\nconst p_limit_1 = __importDefault(require(\"p-limit\"));\nconst path = __importStar(require(\"path\"));\nconst fs_1 = require(\"fs\");\nconst crc32c_js_1 = require(\"./crc32c.js\");\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst fast_xml_parser_1 = require(\"fast-xml-parser\");\nconst async_retry_1 = __importDefault(require(\"async-retry\"));\nconst crypto_1 = require(\"crypto\");\nconst util_js_1 = require(\"./nodejs-common/util.js\");\nconst util_js_2 = require(\"./util.js\");\n// eslint-disable-next-line @typescript-eslint/ban-ts-comment\n// @ts-ignore\nconst package_json_helper_cjs_1 = require(\"./package-json-helper.cjs\");\nconst packageJson = (0, package_json_helper_cjs_1.getPackageJSON)();\n/**\n * Default number of concurrently executing promises to use when calling uploadManyFiles.\n *\n */\nconst DEFAULT_PARALLEL_UPLOAD_LIMIT = 5;\n/**\n * Default number of concurrently executing promises to use when calling downloadManyFiles.\n *\n */\nconst DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 5;\n/**\n * Default number of concurrently executing promises to use when calling downloadFileInChunks.\n *\n */\nconst DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 5;\n/**\n * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks.\n *\n */\nconst DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024;\n/**\n * The chunk size in bytes to use when calling downloadFileInChunks.\n *\n */\nconst DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024;\n/**\n * The chunk size in bytes to use when calling uploadFileInChunks.\n *\n */\nconst UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024;\n/**\n * Default number of concurrently executing promises to use when calling uploadFileInChunks.\n *\n */\nconst DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 5;\nconst EMPTY_REGEX = '(?:)';\n/**\n * The `gccl-gcs-cmd` value for the `X-Goog-API-Client` header.\n * Example: `gccl-gcs-cmd/tm.upload_many`\n *\n * @see {@link GCCL_GCS_CMD}.\n * @see {@link GCCL_GCS_CMD_KEY}.\n */\nconst GCCL_GCS_CMD_FEATURE = {\n UPLOAD_MANY: 'tm.upload_many',\n DOWNLOAD_MANY: 'tm.download_many',\n UPLOAD_SHARDED: 'tm.upload_sharded',\n DOWNLOAD_SHARDED: 'tm.download_sharded',\n};\nconst defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => {\n return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap);\n};\nclass MultiPartUploadError extends Error {\n constructor(message, uploadId, partsMap) {\n super(message);\n this.uploadId = uploadId;\n this.partsMap = partsMap;\n }\n}\nexports.MultiPartUploadError = MultiPartUploadError;\n/**\n * Class representing an implementation of MPU in the XML API. This class is not meant for public usage.\n *\n * @private\n *\n */\nclass XMLMultiPartUploadHelper {\n constructor(bucket, fileName, uploadId, partsMap) {\n _XMLMultiPartUploadHelper_instances.add(this);\n this.authClient = bucket.storage.authClient || new google_auth_library_1.GoogleAuth();\n this.uploadId = uploadId || '';\n this.bucket = bucket;\n this.fileName = fileName;\n this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`;\n this.xmlBuilder = new fast_xml_parser_1.XMLBuilder({ arrayNodeName: 'Part' });\n this.xmlParser = new fast_xml_parser_1.XMLParser();\n this.partsMap = partsMap || new Map();\n this.retryOptions = {\n retries: this.bucket.storage.retryOptions.maxRetries,\n factor: this.bucket.storage.retryOptions.retryDelayMultiplier,\n maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000,\n maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000,\n };\n }\n /**\n * Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id.\n *\n * @returns {Promise}\n */\n async initiateUpload(headers = {}) {\n const url = `${this.baseUrl}?uploads`;\n return (0, async_retry_1.default)(async (bail) => {\n try {\n const res = await this.authClient.request({\n headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this, headers),\n method: 'POST',\n url,\n });\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n const parsedXML = this.xmlParser.parse(res.data);\n this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId;\n }\n catch (e) {\n __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail);\n }\n }, this.retryOptions);\n }\n /**\n * Uploads the provided chunk of data to the XML API using the previously created upload id.\n *\n * @param {number} partNumber the sequence number of this chunk.\n * @param {Buffer} chunk the chunk of data to be uploaded.\n * @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server\n * to validate the chunk was not corrupted.\n * @returns {Promise}\n */\n async uploadPart(partNumber, chunk, validation) {\n const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`;\n let headers = __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this);\n if (validation === 'md5') {\n const hash = (0, crypto_1.createHash)('md5').update(chunk).digest('base64');\n headers = {\n 'Content-MD5': hash,\n };\n }\n return (0, async_retry_1.default)(async (bail) => {\n try {\n const res = await this.authClient.request({\n url,\n method: 'PUT',\n body: chunk,\n headers,\n });\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n this.partsMap.set(partNumber, res.headers['etag']);\n }\n catch (e) {\n __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail);\n }\n }, this.retryOptions);\n }\n /**\n * Sends the final request of the MPU to tell GCS the upload is now complete.\n *\n * @returns {Promise}\n */\n async completeUpload() {\n const url = `${this.baseUrl}?uploadId=${this.uploadId}`;\n const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0]));\n const parts = [];\n for (const entry of sortedMap.entries()) {\n parts.push({ PartNumber: entry[0], ETag: entry[1] });\n }\n const body = `${this.xmlBuilder.build(parts)}`;\n return (0, async_retry_1.default)(async (bail) => {\n try {\n const res = await this.authClient.request({\n headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this),\n url,\n method: 'POST',\n body,\n });\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n return res;\n }\n catch (e) {\n __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail);\n return;\n }\n }, this.retryOptions);\n }\n /**\n * Aborts an multipart upload that is in progress. Once aborted, any parts in the process of being uploaded fail,\n * and future requests using the upload ID fail.\n *\n * @returns {Promise}\n */\n async abortUpload() {\n const url = `${this.baseUrl}?uploadId=${this.uploadId}`;\n return (0, async_retry_1.default)(async (bail) => {\n try {\n const res = await this.authClient.request({\n url,\n method: 'DELETE',\n });\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n }\n catch (e) {\n __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail);\n return;\n }\n }, this.retryOptions);\n }\n}\n_XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_setGoogApiClientHeaders = function _XMLMultiPartUploadHelper_setGoogApiClientHeaders(headers = {}) {\n let headerFound = false;\n let userAgentFound = false;\n for (const [key, value] of Object.entries(headers)) {\n if (key.toLocaleLowerCase().trim() === 'x-goog-api-client') {\n headerFound = true;\n // Prepend command feature to value, if not already there\n if (!value.includes(GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED)) {\n headers[key] = `${value} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`;\n }\n }\n else if (key.toLocaleLowerCase().trim() === 'user-agent') {\n userAgentFound = true;\n }\n }\n // If the header isn't present, add it\n if (!headerFound) {\n headers['x-goog-api-client'] = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`;\n }\n // If the User-Agent isn't present, add it\n if (!userAgentFound) {\n headers['User-Agent'] = (0, util_js_2.getUserAgentString)();\n }\n return headers;\n}, _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) {\n if (this.bucket.storage.retryOptions.autoRetry &&\n this.bucket.storage.retryOptions.retryableErrorFn(err)) {\n throw err;\n }\n else {\n bail(err);\n }\n};\n/**\n * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket.\n *\n * @class\n * @hideconstructor\n *\n * @param {Bucket} bucket A {@link Bucket} instance\n *\n */\nclass TransferManager {\n constructor(bucket) {\n this.bucket = bucket;\n }\n /**\n * @typedef {object} UploadManyFilesOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when uploading the files.\n * @property {boolean} [skipIfExists] Do not upload the file if it already exists in\n * the bucket. This will set the precondition ifGenerationMatch = 0.\n * @property {string} [prefix] A prefix to append to all of the uploaded files.\n * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through\n * to each individual upload operation.\n *\n */\n /**\n * Upload multiple files in parallel to the bucket. This is a convenience method\n * that utilizes {@link Bucket#upload} to perform the upload.\n *\n * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name.\n * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list.\n * to be uploaded to the bucket\n * @param {UploadManyFilesOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Upload multiple files in parallel.\n * //-\n * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']);\n * // Your bucket now contains:\n * // - \"local/path/file1.txt\" (with the contents of '/local/path/file1.txt')\n * // - \"local/path/file2.txt\" (with the contents of '/local/path/file2.txt')\n * const response = await transferManager.uploadManyFiles('/local/directory');\n * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure.\n * ```\n *\n */\n async uploadManyFiles(filePathsOrDirectory, options = {}) {\n var _a;\n if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) {\n options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0;\n }\n else if (options.skipIfExists &&\n options.passthroughOptions === undefined) {\n options.passthroughOptions = {\n preconditionOpts: {\n ifGenerationMatch: 0,\n },\n };\n }\n const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT);\n const promises = [];\n let allPaths = [];\n if (!Array.isArray(filePathsOrDirectory)) {\n for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) {\n allPaths.push(curPath);\n }\n }\n else {\n allPaths = filePathsOrDirectory;\n }\n for (const filePath of allPaths) {\n const stat = await fs_1.promises.lstat(filePath);\n if (stat.isDirectory()) {\n continue;\n }\n const passThroughOptionsCopy = {\n ...options.passthroughOptions,\n [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.UPLOAD_MANY,\n };\n passThroughOptionsCopy.destination = filePath\n .split(path.sep)\n .join(path.posix.sep);\n if (options.prefix) {\n passThroughOptionsCopy.destination = path.posix.join(...options.prefix.split(path.sep), passThroughOptionsCopy.destination);\n }\n promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy)));\n }\n return Promise.all(promises);\n }\n /**\n * @typedef {object} DownloadManyFilesOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when downloading the files.\n * @property {string} [prefix] A prefix to append to all of the downloaded files.\n * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files.\n * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through\n * to each individual download operation.\n *\n */\n /**\n * Download multiple files in parallel to the local filesystem. This is a convenience method\n * that utilizes {@link File#download} to perform the download.\n *\n * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If\n * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded.\n * @param {DownloadManyFilesOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Download multiple files in parallel.\n * //-\n * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']);\n * // The following files have been downloaded:\n * // - \"file1.txt\" (with the contents from my-bucket.file1.txt)\n * // - \"file2.txt\" (with the contents from my-bucket.file2.txt)\n * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]);\n * // The following files have been downloaded:\n * // - \"file1.txt\" (with the contents from my-bucket.file1.txt)\n * // - \"file2.txt\" (with the contents from my-bucket.file2.txt)\n * const response = await transferManager.downloadManyFiles('test-folder');\n * // All files with GCS prefix of 'test-folder' have been downloaded.\n * ```\n *\n */\n async downloadManyFiles(filesOrFolder, options = {}) {\n const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT);\n const promises = [];\n let files = [];\n if (!Array.isArray(filesOrFolder)) {\n const directoryFiles = await this.bucket.getFiles({\n prefix: filesOrFolder,\n });\n files = directoryFiles[0];\n }\n else {\n files = filesOrFolder.map(curFile => {\n if (typeof curFile === 'string') {\n return this.bucket.file(curFile);\n }\n return curFile;\n });\n }\n const stripRegexString = options.stripPrefix\n ? `^${options.stripPrefix}`\n : EMPTY_REGEX;\n const regex = new RegExp(stripRegexString, 'g');\n for (const file of files) {\n const passThroughOptionsCopy = {\n ...options.passthroughOptions,\n [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_MANY,\n };\n if (options.prefix) {\n passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name);\n }\n if (options.stripPrefix) {\n passThroughOptionsCopy.destination = file.name.replace(regex, '');\n }\n promises.push(limit(() => file.download(passThroughOptionsCopy)));\n }\n return Promise.all(promises);\n }\n /**\n * @typedef {object} DownloadFileInChunksOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when downloading the file.\n * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded.\n * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete.\n * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory.\n *\n */\n /**\n * Download a large file in chunks utilizing parallel download operations. This is a convenience method\n * that utilizes {@link File#download} to perform the download.\n *\n * @param {File | string} fileOrName {@link File} to download.\n * @param {DownloadFileInChunksOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Download a large file in chunks utilizing parallel operations.\n * //-\n * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt');\n * // Your local directory now contains:\n * // - \"large-file.txt\" (with the contents from my-bucket.large-file.txt)\n * ```\n *\n */\n async downloadFileInChunks(fileOrName, options = {}) {\n let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE;\n let limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT);\n const noReturnData = Boolean(options.noReturnData);\n const promises = [];\n const file = typeof fileOrName === 'string'\n ? this.bucket.file(fileOrName)\n : fileOrName;\n const fileInfo = await file.get();\n const size = parseInt(fileInfo[0].metadata.size.toString());\n // If the file size does not meet the threshold download it as a single chunk.\n if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) {\n limit = (0, p_limit_1.default)(1);\n chunkSize = size;\n }\n let start = 0;\n const filePath = options.destination || path.basename(file.name);\n const fileToWrite = await fs_1.promises.open(filePath, 'w');\n while (start < size) {\n const chunkStart = start;\n let chunkEnd = start + chunkSize - 1;\n chunkEnd = chunkEnd > size ? size : chunkEnd;\n promises.push(limit(async () => {\n const resp = await file.download({\n start: chunkStart,\n end: chunkEnd,\n [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_SHARDED,\n });\n const result = await fileToWrite.write(resp[0], 0, resp[0].length, chunkStart);\n if (noReturnData)\n return;\n return result.buffer;\n }));\n start += chunkSize;\n }\n let chunks;\n try {\n chunks = await Promise.all(promises);\n }\n finally {\n await fileToWrite.close();\n }\n if (options.validation === 'crc32c' && fileInfo[0].metadata.crc32c) {\n const downloadedCrc32C = await crc32c_js_1.CRC32C.fromFile(filePath);\n if (!downloadedCrc32C.validate(fileInfo[0].metadata.crc32c)) {\n const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH);\n mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH';\n throw mismatchError;\n }\n }\n if (noReturnData)\n return;\n return [Buffer.concat(chunks, size)];\n }\n /**\n * @typedef {object} UploadFileInChunksOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when uploading the file.\n * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded.\n * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path.\n * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified\n * defaults to the specified concurrency limit.\n * @property {string} [uploadId] If specified attempts to resume a previous upload.\n * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk\n * specified in partsMap\n * @property {object} [headers] headers to be sent when initiating the multipart upload.\n * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload}\n * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set,\n * failures will be automatically aborted.\n *\n */\n /**\n * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and\n * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to\n * resume the upload.\n *\n * @param {string} [filePath] The path of the file to be uploaded\n * @param {UploadFileInChunksOptions} [options] Configuration options.\n * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this.\n * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Upload a large file in chunks utilizing parallel operations.\n * //-\n * const response = await transferManager.uploadFileInChunks('large-file.txt');\n * // Your bucket now contains:\n * // - \"large-file.txt\"\n * ```\n *\n *\n */\n async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) {\n const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE;\n const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT);\n const maxQueueSize = options.maxQueueSize ||\n options.concurrencyLimit ||\n DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT;\n const fileName = options.uploadName || path.basename(filePath);\n const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap);\n let partNumber = 1;\n let promises = [];\n try {\n if (options.uploadId === undefined) {\n await mpuHelper.initiateUpload(options.headers);\n }\n const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize;\n const readStream = (0, fs_1.createReadStream)(filePath, {\n highWaterMark: chunkSize,\n start: startOrResumptionByte,\n });\n // p-limit only limits the number of running promises. We do not want to hold an entire\n // large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory.\n for await (const curChunk of readStream) {\n if (promises.length >= maxQueueSize) {\n await Promise.all(promises);\n promises = [];\n }\n promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation)));\n }\n await Promise.all(promises);\n return await mpuHelper.completeUpload();\n }\n catch (e) {\n if ((options.autoAbortFailure === undefined || options.autoAbortFailure) &&\n mpuHelper.uploadId) {\n try {\n await mpuHelper.abortUpload();\n return;\n }\n catch (e) {\n throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap);\n }\n }\n throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap);\n }\n }\n async *getPathsFromDirectory(directory) {\n const filesAndSubdirectories = await fs_1.promises.readdir(directory, {\n withFileTypes: true,\n });\n for (const curFileOrDirectory of filesAndSubdirectories) {\n const fullPath = path.join(directory, curFileOrDirectory.name);\n curFileOrDirectory.isDirectory()\n ? yield* this.getPathsFromDirectory(fullPath)\n : yield fullPath;\n }\n }\n}\nexports.TransferManager = TransferManager;\n","\"use strict\";\n\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = {\n enumerable: true,\n get: function () {\n return m[k];\n }\n };\n }\n Object.defineProperty(o, k2, desc);\n} : function (o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n});\nvar __setModuleDefault = this && this.__setModuleDefault || (Object.create ? function (o, v) {\n Object.defineProperty(o, \"default\", {\n enumerable: true,\n value: v\n });\n} : function (o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = this && this.__importStar || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PassThroughShim = exports.getModuleFormat = exports.getDirName = exports.getUserAgentString = exports.getRuntimeTrackingString = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0;\nconst path = __importStar(require(\"path\"));\nconst querystring = __importStar(require(\"querystring\"));\nconst stream_1 = require(\"stream\");\nconst url = __importStar(require(\"url\"));\n// eslint-disable-next-line @typescript-eslint/ban-ts-comment\n// @ts-ignore\nconst package_json_helper_cjs_1 = require(\"./package-json-helper.cjs\");\n// Done to avoid a problem with mangling of identifiers when using esModuleInterop\nconst fileURLToPath = url.fileURLToPath;\nconst isEsm = false;\nfunction normalize(optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;\n return {\n options,\n callback\n };\n}\nexports.normalize = normalize;\n/**\n * Flatten an object into an Array of arrays, [[key, value], ..].\n * Implements Object.entries() for Node.js <8\n * @internal\n */\nfunction objectEntries(obj) {\n return Object.keys(obj).map(key => [key, obj[key]]);\n}\nexports.objectEntries = objectEntries;\n/**\n * Encode `str` with encodeURIComponent, plus these\n * reserved characters: `! * ' ( )`.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent}\n *\n * @param {string} str The URI component to encode.\n * @return {string} The encoded string.\n */\nfunction fixedEncodeURIComponent(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase());\n}\nexports.fixedEncodeURIComponent = fixedEncodeURIComponent;\n/**\n * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent.\n *\n * Encode every byte except `A-Z a-Z 0-9 ~ - . _`.\n *\n * @param {string} uri The URI to encode.\n * @param [boolean=false] encodeSlash If `true`, the \"/\" character is not encoded.\n * @return {string} The encoded string.\n */\nfunction encodeURI(uri, encodeSlash) {\n // Split the string by `/`, and conditionally rejoin them with either\n // %2F if encodeSlash is `true`, or '/' if `false`.\n return uri.split('/').map(fixedEncodeURIComponent).join(encodeSlash ? '%2F' : '/');\n}\nexports.encodeURI = encodeURI;\n/**\n * Serialize an object to a URL query string using util.encodeURI(uri, true).\n * @param {string} url The object to serialize.\n * @return {string} Serialized string.\n */\nfunction qsStringify(qs) {\n return querystring.stringify(qs, '&', '=', {\n encodeURIComponent: component => encodeURI(component, true)\n });\n}\nexports.qsStringify = qsStringify;\nfunction objectKeyToLowercase(object) {\n const newObj = {};\n for (let key of Object.keys(object)) {\n const value = object[key];\n key = key.toLowerCase();\n newObj[key] = value;\n }\n return newObj;\n}\nexports.objectKeyToLowercase = objectKeyToLowercase;\n/**\n * JSON encode str, with unicode \\u+ representation.\n * @param {object} obj The object to encode.\n * @return {string} Serialized string.\n */\nfunction unicodeJSONStringify(obj) {\n return JSON.stringify(obj).replace(/[\\u0080-\\uFFFF]/g, char => '\\\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4));\n}\nexports.unicodeJSONStringify = unicodeJSONStringify;\n/**\n * Converts the given objects keys to snake_case\n * @param {object} obj object to convert keys to snake case.\n * @returns {object} object with keys converted to snake case.\n */\nfunction convertObjKeysToSnakeCase(obj) {\n if (obj instanceof Date || obj instanceof RegExp) {\n return obj;\n }\n if (Array.isArray(obj)) {\n return obj.map(convertObjKeysToSnakeCase);\n }\n if (obj instanceof Object) {\n return Object.keys(obj).reduce((acc, cur) => {\n const s = cur[0].toLocaleLowerCase() + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => {\n return `_${p1.toLowerCase()}`;\n });\n acc[s] = convertObjKeysToSnakeCase(obj[cur]);\n return acc;\n }, Object());\n }\n return obj;\n}\nexports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase;\n/**\n * Formats the provided date object as a UTC ISO string.\n * @param {Date} dateTimeToFormat date object to be formatted.\n * @param {boolean} includeTime flag to include hours, minutes, seconds in output.\n * @param {string} dateDelimiter delimiter between date components.\n * @param {string} timeDelimiter delimiter between time components.\n * @returns {string} UTC ISO format of provided date obect.\n */\nfunction formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') {\n const year = dateTimeToFormat.getUTCFullYear();\n const month = dateTimeToFormat.getUTCMonth() + 1;\n const day = dateTimeToFormat.getUTCDate();\n const hour = dateTimeToFormat.getUTCHours();\n const minute = dateTimeToFormat.getUTCMinutes();\n const second = dateTimeToFormat.getUTCSeconds();\n let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month.toString().padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`;\n if (includeTime) {\n resultString = `${resultString}T${hour.toString().padStart(2, '0')}${timeDelimiter}${minute.toString().padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`;\n }\n return resultString;\n}\nexports.formatAsUTCISO = formatAsUTCISO;\n/**\n * Examines the runtime environment and returns the appropriate tracking string.\n * @returns {string} metrics tracking string based on the current runtime environment.\n */\nfunction getRuntimeTrackingString() {\n if (\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n globalThis.Deno &&\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n globalThis.Deno.version &&\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n globalThis.Deno.version.deno) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n return `gl-deno/${globalThis.Deno.version.deno}`;\n } else {\n return `gl-node/${process.versions.node}`;\n }\n}\nexports.getRuntimeTrackingString = getRuntimeTrackingString;\n/**\n * Looks at package.json and creates the user-agent string to be applied to request headers.\n * @returns {string} user agent string.\n */\nfunction getUserAgentString() {\n const pkg = (0, package_json_helper_cjs_1.getPackageJSON)();\n const hyphenatedPackageName = pkg.name.replace('@google-cloud', 'gcloud-node') // For legacy purposes.\n .replace('/', '-'); // For UA spec-compliance purposes.\n return hyphenatedPackageName + '/' + pkg.version;\n}\nexports.getUserAgentString = getUserAgentString;\nfunction getDirName() {\n let dirToUse = '';\n try {\n dirToUse = __dirname;\n } catch (e) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n dirToUse = __dirname;\n }\n return dirToUse;\n}\nexports.getDirName = getDirName;\nfunction getModuleFormat() {\n return isEsm ? 'ESM' : 'CJS';\n}\nexports.getModuleFormat = getModuleFormat;\nclass PassThroughShim extends stream_1.PassThrough {\n constructor() {\n super(...arguments);\n this.shouldEmitReading = true;\n this.shouldEmitWriting = true;\n }\n _read(size) {\n if (this.shouldEmitReading) {\n this.emit('reading');\n this.shouldEmitReading = false;\n }\n super._read(size);\n }\n _write(chunk, encoding, callback) {\n if (this.shouldEmitWriting) {\n this.emit('writing');\n this.shouldEmitWriting = false;\n }\n // Per the nodejs documention, callback must be invoked on the next tick\n process.nextTick(() => {\n super._write(chunk, encoding, callback);\n });\n }\n _final(callback) {\n // If the stream is empty (i.e. empty file) final will be invoked before _read / _write\n // and we should still emit the proper events.\n if (this.shouldEmitReading) {\n this.emit('reading');\n this.shouldEmitReading = false;\n }\n if (this.shouldEmitWriting) {\n this.emit('writing');\n this.shouldEmitWriting = false;\n }\n callback(null);\n }\n}\nexports.PassThroughShim = PassThroughShim;\n","// Copyright 2023 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n/* eslint-disable node/no-missing-require */\n\nfunction getPackageJSON() {\n return require('../../../package.json');\n}\n\nexports.getPackageJSON = getPackageJSON;\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(3109);\n",""],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/dist/main/licenses.txt b/dist/main/licenses.txt index ad7b961..fddbd30 100644 --- a/dist/main/licenses.txt +++ b/dist/main/licenses.txt @@ -47,6 +47,28 @@ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +@fastify/busboy +MIT +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + @google-cloud/common Apache-2.0 @@ -1585,7 +1607,7 @@ MIT The MIT License (MIT) ===================== -Copyright © `<2022>` `Michael Mclaughlin` +Copyright © `<2023>` `Michael Mclaughlin` Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation @@ -2025,209 +2047,29 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -fast-text-encoding -Apache-2.0 - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. +fast-xml-parser +MIT +MIT License - Copyright {yyyy} {name of copyright owner} +Copyright (c) 2017 Amit Kumar Gupta - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: - http://www.apache.org/licenses/LICENSE-2.0 +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. gaxios @@ -2848,31 +2690,6 @@ Apache-2.0 limitations under the License. -google-p12-pem -MIT -The MIT License (MIT) - -Copyright (c) 2014 Ryan Seys - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - gtoken MIT The MIT License (MIT) @@ -2916,6 +2733,28 @@ MIT https-proxy-agent MIT +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. inherits ISC @@ -2937,31 +2776,6 @@ PERFORMANCE OF THIS SOFTWARE. -is-plain-object -MIT -The MIT License (MIT) - -Copyright (c) 2014-2017, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - is-stream MIT MIT License @@ -3041,25 +2855,6 @@ FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TOR ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -lru-cache -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - mime MIT The MIT License (MIT) @@ -3190,341 +2985,6 @@ SOFTWARE. -node-forge -(BSD-3-Clause OR GPL-2.0) -You may use the Forge project under the terms of either the BSD License or the -GNU General Public License (GPL) Version 2. - -The BSD License is recommended for most projects. It is simple and easy to -understand and it places almost no restrictions on what you can do with the -Forge project. - -If the GPL suits your project better you are also free to use Forge under -that license. - -You don't have to do anything special to choose one license or the other and -you don't have to notify anyone which license you are using. You are free to -use this project in commercial projects as long as the copyright header is -left intact. - -If you are a commercial entity and use this set of libraries in your -commercial software then reasonable payment to Digital Bazaar, if you can -afford it, is not required but is expected and would be appreciated. If this -library saves you time, then it's saving you money. The cost of developing -the Forge software was on the order of several hundred hours and tens of -thousands of dollars. We are attempting to strike a balance between helping -the development community while not being taken advantage of by lucrative -commercial entities for our efforts. - -------------------------------------------------------------------------------- -New BSD License (3-clause) -Copyright (c) 2010, Digital Bazaar, Inc. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Digital Bazaar, Inc. nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL DIGITAL BAZAAR BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -------------------------------------------------------------------------------- - GNU GENERAL PUBLIC LICENSE - Version 2, June 1991 - - Copyright (C) 1989, 1991 Free Software Foundation, Inc. - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -License is intended to guarantee your freedom to share and change free -software--to make sure the software is free for all its users. This -General Public License applies to most of the Free Software -Foundation's software and to any other program whose authors commit to -using it. (Some other Free Software Foundation software is covered by -the GNU Lesser General Public License instead.) You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -this service if you wish), that you receive source code or can get it -if you want it, that you can change the software or use pieces of it -in new free programs; and that you know you can do these things. - - To protect your rights, we need to make restrictions that forbid -anyone to deny you these rights or to ask you to surrender the rights. -These restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must give the recipients all the rights that -you have. You must make sure that they, too, receive or can get the -source code. And you must show them these terms so they know their -rights. - - We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - - Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - - Finally, any free program is threatened constantly by software -patents. We wish to avoid the danger that redistributors of a free -program will individually obtain patent licenses, in effect making the -program proprietary. To prevent this, we have made it clear that any -patent must be licensed for everyone's free use or not licensed at all. - - The precise terms and conditions for copying, distribution and -modification follow. - - GNU GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License applies to any program or other work which contains -a notice placed by the copyright holder saying it may be distributed -under the terms of this General Public License. The "Program", below, -refers to any such program or work, and a "work based on the Program" -means either the Program or any derivative work under copyright law: -that is to say, a work containing the Program or a portion of it, -either verbatim or with modifications and/or translated into another -language. (Hereinafter, translation is included without limitation in -the term "modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running the Program is not restricted, and the output from the Program -is covered only if its contents constitute a work based on the -Program (independent of having been made by running the Program). -Whether that is true depends on what the Program does. - - 1. You may copy and distribute verbatim copies of the Program's -source code as you receive it, in any medium, provided that you -conspicuously and appropriately publish on each copy an appropriate -copyright notice and disclaimer of warranty; keep intact all the -notices that refer to this License and to the absence of any warranty; -and give any other recipients of the Program a copy of this License -along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - - 2. You may modify your copy or copies of the Program or any portion -of it, thus forming a work based on the Program, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any - part thereof, to be licensed as a whole at no charge to all third - parties under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a - notice that there is no warranty (or else, saying that you provide - a warranty) and that users may redistribute the program under - these conditions, and telling the user how to view a copy of this - License. (Exception: if the Program itself is interactive but - does not normally print such an announcement, your work based on - the Program is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Program, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections - 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your - cost of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer - to distribute corresponding source code. (This alternative is - allowed only for noncommercial distribution and only if you - received the program in object code or executable form with such - an offer, in accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source -code means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to -control compilation and installation of the executable. However, as a -special exception, the source code distributed need not include -anything that is normally distributed (in either source or binary -form) with the major components (compiler, kernel, and so on) of the -operating system on which the executable runs, unless that component -itself accompanies the executable. - -If distribution of executable or object code is made by offering -access to copy from a designated place, then offering equivalent -access to copy the source code from the same place counts as -distribution of the source code, even though third parties are not -compelled to copy the source along with the object code. - - 4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt -otherwise to copy, modify, sublicense or distribute the Program is -void, and will automatically terminate your rights under this License. -However, parties who have received copies, or rights, from you under -this License will not have their licenses terminated so long as such -parties remain in full compliance. - - 5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Program or works based on it. - - 6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties to -this License. - - 7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Program at all. For example, if a patent -license would not permit royalty-free redistribution of the Program by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License -may add an explicit geographical distribution limitation excluding -those countries, so that distribution is permitted only in or among -countries not thus excluded. In such case, this License incorporates -the limitation as if written in the body of this License. - - 9. The Free Software Foundation may publish revised and/or new versions -of the General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and conditions -either of that version or of any later version published by the Free -Software Foundation. If the Program does not specify a version number of -this License, you may choose any version ever published by the Free Software -Foundation. - - 10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the author -to ask for permission. For software which is copyrighted by the Free -Software Foundation, write to the Free Software Foundation; we sometimes -make exceptions for this. Our decision will be guided by the two goals -of preserving the free status of all derivatives of our free software and -of promoting the sharing and reuse of software generally. - - NO WARRANTY - - 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY -FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN -OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES -PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED -OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS -TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE -PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, -REPAIR OR CORRECTION. - - 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR -REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, -INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING -OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED -TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY -YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER -PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGES. - - - once ISC The ISC License @@ -3762,6 +3222,31 @@ IN THE SOFTWARE. +strnum +MIT +MIT License + +Copyright (c) 2021 Natural Intelligence + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + stubs MIT @@ -4012,6 +3497,31 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +undici +MIT +MIT License + +Copyright (c) Matteo Collina and Undici contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + universal-user-agent ISC # [ISC License](https://spdx.org/licenses/ISC) @@ -4124,25 +3634,6 @@ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -yallist -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - yocto-queue MIT MIT License diff --git a/dist/post/index.js b/dist/post/index.js index 478a00f..544ba7b 100644 --- a/dist/post/index.js +++ b/dist/post/index.js @@ -42,7 +42,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.GoogleCloudStorageMutex = void 0; const core = __importStar(__nccwpck_require__(2186)); const github = __importStar(__nccwpck_require__(5438)); -const storage_1 = __nccwpck_require__(8174); +const storage_1 = __nccwpck_require__(7577); const common_1 = __nccwpck_require__(4777); const TIME_PERIOD_PATTERN = /^(?:(\d+)m)?(?:(\d+)s)?$/; class TimePeriodError extends Error { @@ -770,7 +770,7 @@ class OidcClient { .catch(error => { throw new Error(`Failed to get ID Token. \n Error Code : ${error.statusCode}\n - Error Message: ${error.result.message}`); + Error Message: ${error.message}`); }); const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; if (!id_token) { @@ -1223,8 +1223,8 @@ class Context { var _a, _b, _c; this.payload = {}; if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); } else { const path = process.env.GITHUB_EVENT_PATH; @@ -1242,7 +1242,8 @@ class Context { this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; + this.graphqlUrl = + (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } get issue() { const payload = this.payload; @@ -1274,7 +1275,11 @@ exports.Context = Context; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1287,7 +1292,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -1304,7 +1309,7 @@ exports.context = new Context.Context(); */ function getOctokit(token, options, ...additionalPlugins) { const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); - return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options)); + return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); } exports.getOctokit = getOctokit; //# sourceMappingURL=github.js.map @@ -1318,7 +1323,11 @@ exports.getOctokit = getOctokit; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1331,13 +1340,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; +exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; const httpClient = __importStar(__nccwpck_require__(6255)); +const undici_1 = __nccwpck_require__(1773); function getAuthString(token, options) { if (!token && !options.auth) { throw new Error('Parameter token or opts.auth is required'); @@ -1353,6 +1372,19 @@ function getProxyAgent(destinationUrl) { return hc.getAgent(destinationUrl); } exports.getProxyAgent = getProxyAgent; +function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); +} +exports.getProxyAgentDispatcher = getProxyAgentDispatcher; +function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }); + return proxyFetch; +} +exports.getProxyFetch = getProxyFetch; function getApiBaseUrl() { return process.env['GITHUB_API_URL'] || 'https://api.github.com'; } @@ -1368,7 +1400,11 @@ exports.getApiBaseUrl = getApiBaseUrl; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1381,7 +1417,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -1398,7 +1434,8 @@ const baseUrl = Utils.getApiBaseUrl(); exports.defaults = { baseUrl, request: { - agent: Utils.getProxyAgent(baseUrl) + agent: Utils.getProxyAgent(baseUrl), + fetch: Utils.getProxyFetch(baseUrl) } }; exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); @@ -1518,7 +1555,11 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand /* eslint-disable @typescript-eslint/no-explicit-any */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1531,7 +1572,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -1550,6 +1591,7 @@ const http = __importStar(__nccwpck_require__(3685)); const https = __importStar(__nccwpck_require__(5687)); const pm = __importStar(__nccwpck_require__(9835)); const tunnel = __importStar(__nccwpck_require__(4294)); +const undici_1 = __nccwpck_require__(1773); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -1579,16 +1621,16 @@ var HttpCodes; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); var Headers; (function (Headers) { Headers["Accept"] = "accept"; Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); +})(Headers || (exports.Headers = Headers = {})); var MediaTypes; (function (MediaTypes) { MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); /** * Returns the proxy URL, depending upon the supplied url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com @@ -1639,6 +1681,19 @@ class HttpClientResponse { })); }); } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { @@ -1944,6 +1999,15 @@ class HttpClient { const parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } _prepareRequest(method, requestUrl, headers) { const info = {}; info.parsedUrl = requestUrl; @@ -2043,6 +2107,30 @@ class HttpClient { } return agent; } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `${proxyUrl.username}:${proxyUrl.password}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } _performExponentialBackoff(retryNumber) { return __awaiter(this, void 0, void 0, function* () { retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); @@ -2143,7 +2231,13 @@ function getProxyUrl(reqUrl) { } })(); if (proxyVar) { - return new URL(proxyVar); + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } } else { return undefined; @@ -4062,13688 +4156,1178 @@ exports.callbackifyAll = callbackifyAll; /***/ }), -/***/ 1672: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 334: +/***/ ((module) => { "use strict"; -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AclRoleAccessorMethods = exports.Acl = void 0; -const promisify_1 = __nccwpck_require__(9203); -/** - * Attach functionality to a {@link Storage.acl} instance. This will add an - * object for each role group (owners, readers, and writers), with each object - * containing methods to add or delete a type of entity. - * - * As an example, here are a few methods that are created. - * - * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); - * - * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); - * - * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); - * - * @private - */ -class AclRoleAccessorMethods { - constructor() { - this.owners = {}; - this.readers = {}; - this.writers = {}; - /** - * An object of convenience methods to add or delete owner ACL permissions - * for a given entity. - * - * The supported methods include: - * - * - `myFile.acl.owners.addAllAuthenticatedUsers` - * - `myFile.acl.owners.deleteAllAuthenticatedUsers` - * - `myFile.acl.owners.addAllUsers` - * - `myFile.acl.owners.deleteAllUsers` - * - `myFile.acl.owners.addDomain` - * - `myFile.acl.owners.deleteDomain` - * - `myFile.acl.owners.addGroup` - * - `myFile.acl.owners.deleteGroup` - * - `myFile.acl.owners.addProject` - * - `myFile.acl.owners.deleteProject` - * - `myFile.acl.owners.addUser` - * - `myFile.acl.owners.deleteUser` - * - * @name Acl#owners - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * //- - * // Add a user as an owner of a file. - * //- - * const myBucket = gcs.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) - * {}); - * - * //- - * // For reference, the above command is the same as running the following. - * //- - * myFile.acl.add({ - * entity: 'user-email@example.com', - * role: gcs.acl.OWNER_ROLE - * }, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myFile.acl.owners.addUser('email@example.com').then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - this.owners = {}; - /** - * An object of convenience methods to add or delete reader ACL permissions - * for a given entity. - * - * The supported methods include: - * - * - `myFile.acl.readers.addAllAuthenticatedUsers` - * - `myFile.acl.readers.deleteAllAuthenticatedUsers` - * - `myFile.acl.readers.addAllUsers` - * - `myFile.acl.readers.deleteAllUsers` - * - `myFile.acl.readers.addDomain` - * - `myFile.acl.readers.deleteDomain` - * - `myFile.acl.readers.addGroup` - * - `myFile.acl.readers.deleteGroup` - * - `myFile.acl.readers.addProject` - * - `myFile.acl.readers.deleteProject` - * - `myFile.acl.readers.addUser` - * - `myFile.acl.readers.deleteUser` - * - * @name Acl#readers - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * //- - * // Add a user as a reader of a file. - * //- - * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) - * {}); - * - * //- - * // For reference, the above command is the same as running the following. - * //- - * myFile.acl.add({ - * entity: 'user-email@example.com', - * role: gcs.acl.READER_ROLE - * }, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myFile.acl.readers.addUser('email@example.com').then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - this.readers = {}; - /** - * An object of convenience methods to add or delete writer ACL permissions - * for a given entity. - * - * The supported methods include: - * - * - `myFile.acl.writers.addAllAuthenticatedUsers` - * - `myFile.acl.writers.deleteAllAuthenticatedUsers` - * - `myFile.acl.writers.addAllUsers` - * - `myFile.acl.writers.deleteAllUsers` - * - `myFile.acl.writers.addDomain` - * - `myFile.acl.writers.deleteDomain` - * - `myFile.acl.writers.addGroup` - * - `myFile.acl.writers.deleteGroup` - * - `myFile.acl.writers.addProject` - * - `myFile.acl.writers.deleteProject` - * - `myFile.acl.writers.addUser` - * - `myFile.acl.writers.deleteUser` - * - * @name Acl#writers - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * //- - * // Add a user as a writer of a file. - * //- - * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) - * {}); - * - * //- - * // For reference, the above command is the same as running the following. - * //- - * myFile.acl.add({ - * entity: 'user-email@example.com', - * role: gcs.acl.WRITER_ROLE - * }, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myFile.acl.writers.addUser('email@example.com').then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - this.writers = {}; - AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; +} + +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} + +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 6762: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = __nccwpck_require__(5030); +var import_before_after_hook = __nccwpck_require__(3682); +var import_request = __nccwpck_require__(6234); +var import_graphql = __nccwpck_require__(8467); +var import_auth_token = __nccwpck_require__(334); + +// pkg/dist-src/version.js +var VERSION = "5.1.0"; + +// pkg/dist-src/index.js +var noop = () => { +}; +var consoleWarn = console.warn.bind(console); +var consoleError = console.error.bind(console); +var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var Octokit = class { + static { + this.VERSION = VERSION; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; + } + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; } - _assignAccessMethods(role) { - const accessMethods = AclRoleAccessorMethods.accessMethods; - const entities = AclRoleAccessorMethods.entities; - const roleGroup = role.toLowerCase() + 's'; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - this[roleGroup] = entities.reduce((acc, entity) => { - const isPrefix = entity.charAt(entity.length - 1) === '-'; - accessMethods.forEach(accessMethod => { - let method = accessMethod + entity[0].toUpperCase() + entity.substr(1); - if (isPrefix) { - method = method.replace('-', ''); - } - // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the - // more complex API of specifying an `entity` and `role`. - // eslint-disable-next-line @typescript-eslint/no-explicit-any - acc[method] = (entityId, options, callback) => { - let apiEntity; - if (typeof options === 'function') { - callback = options; - options = {}; - } - if (isPrefix) { - apiEntity = entity + entityId; - } - else { - // If the entity is not a prefix, it is a special entity group - // that does not require further details. The accessor methods - // only accept a callback. - apiEntity = entity; - callback = entityId; - } - options = Object.assign({ - entity: apiEntity, - role, - }, options); - const args = [options]; - if (typeof callback === 'function') { - args.push(callback); - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - return this[accessMethod].apply(this, args); - }; - }); - return acc; - }, {}); + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; } -} -exports.AclRoleAccessorMethods = AclRoleAccessorMethods; -AclRoleAccessorMethods.accessMethods = ['add', 'delete']; -AclRoleAccessorMethods.entities = [ - // Special entity groups that do not require further specification. - 'allAuthenticatedUsers', - 'allUsers', - // Entity groups that require specification, e.g. `user-email@example.com`. - 'domain-', - 'group-', - 'project-', - 'user-', -]; -AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; -/** - * Cloud Storage uses access control lists (ACLs) to manage object and - * bucket access. ACLs are the mechanism you use to share objects with other - * users and allow other users to access your buckets and objects. - * - * An ACL consists of one or more entries, where each entry grants permissions - * to an entity. Permissions define the actions that can be performed against an - * object or bucket (for example, `READ` or `WRITE`); the entity defines who the - * permission applies to (for example, a specific user or group of users). - * - * Where an `entity` value is accepted, we follow the format the Cloud Storage - * API expects. - * - * Refer to - * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls - * for the most up-to-date values. - * - * - `user-userId` - * - `user-email` - * - `group-groupId` - * - `group-email` - * - `domain-domain` - * - `project-team-projectId` - * - `allUsers` - * - `allAuthenticatedUsers` - * - * Examples: - * - * - The user "liz@example.com" would be `user-liz@example.com`. - * - The group "example@googlegroups.com" would be - * `group-example@googlegroups.com`. - * - To refer to all members of the Google Apps for Business domain - * "example.com", the entity would be `domain-example.com`. - * - * For more detailed information, see - * {@link http://goo.gl/6qBBPO| About Access Control Lists}. - * - * @constructor Acl - * @mixin - * @param {object} options Configuration options. - */ -class Acl extends AclRoleAccessorMethods { - constructor(options) { - super(); - this.pathPrefix = options.pathPrefix; - this.request_ = options.request; + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; } - /** - * @typedef {array} AddAclResponse - * @property {object} 0 The Acl Objects. - * @property {object} 1 The full API response. - */ - /** - * @callback AddAclCallback - * @param {?Error} err Request error, if any. - * @param {object} acl The Acl Objects. - * @param {object} apiResponse The full API response. - */ - /** - * Add access controls on a {@link Bucket} or {@link File}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} - * - * @param {object} options Configuration options. - * @param {string} options.entity Whose permissions will be added. - * @param {string} options.role Permissions allowed for the defined entity. - * See {@link https://cloud.google.com/storage/docs/access-control Access - * Control}. - * @param {number} [options.generation] **File Objects Only** Select a specific - * revision of this file (as opposed to the latest version, the default). - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {AddAclCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * const options = { - * entity: 'user-useremail@example.com', - * role: gcs.acl.OWNER_ROLE - * }; - * - * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); - * - * //- - * // For file ACL operations, you can also specify a `generation` property. - * // Here is how you would grant ownership permissions to a user on a - * specific - * // revision of a file. - * //- - * myFile.acl.add({ - * entity: 'user-useremail@example.com', - * role: gcs.acl.OWNER_ROLE, - * generation: 1 - * }, function(err, aclObject, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myBucket.acl.add(options).then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/acl.js - * region_tag:storage_add_file_owner - * Example of adding an owner to a file: - * - * @example include:samples/acl.js - * region_tag:storage_add_bucket_owner - * Example of adding an owner to a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_add_bucket_default_owner - * Example of adding a default owner to a bucket: - */ - add(options, callback) { - const query = {}; - if (options.generation) { - query.generation = options.generation; - } - if (options.userProject) { - query.userProject = options.userProject; - } - this.request({ - method: 'POST', - uri: '', - qs: query, - maxRetries: 0, - json: { - entity: options.entity, - role: options.role.toUpperCase(), - }, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - callback(null, this.makeAclObject_(resp), resp); - }); - } - /** - * @typedef {array} RemoveAclResponse - * @property {object} 0 The full API response. - */ - /** - * @callback RemoveAclCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Delete access controls on a {@link Bucket} or {@link File}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} - * - * @param {object} options Configuration object. - * @param {string} options.entity Whose permissions will be revoked. - * @param {int} [options.generation] **File Objects Only** Select a specific - * revision of this file (as opposed to the latest version, the default). - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {RemoveAclCallback} callback The callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * myBucket.acl.delete({ - * entity: 'user-useremail@example.com' - * }, function(err, apiResponse) {}); - * - * //- - * // For file ACL operations, you can also specify a `generation` property. - * //- - * myFile.acl.delete({ - * entity: 'user-useremail@example.com', - * generation: 1 - * }, function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myFile.acl.delete().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/acl.js - * region_tag:storage_remove_bucket_owner - * Example of removing an owner from a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_remove_bucket_default_owner - * Example of removing a default owner from a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_remove_file_owner - * Example of removing an owner from a bucket: - */ - delete(options, callback) { - const query = {}; - if (options.generation) { - query.generation = options.generation; - } - if (options.userProject) { - query.userProject = options.userProject; - } - this.request({ - method: 'DELETE', - uri: '/' + encodeURIComponent(options.entity), - qs: query, - }, (err, resp) => { - callback(err, resp); + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; } - /** - * @typedef {array} GetAclResponse - * @property {object|object[]} 0 Single or array of Acl Objects. - * @property {object} 1 The full API response. - */ - /** - * @callback GetAclCallback - * @param {?Error} err Request error, if any. - * @param {object|object[]} acl Single or array of Acl Objects. - * @param {object} apiResponse The full API response. - */ - /** - * Get access controls on a {@link Bucket} or {@link File}. If - * an entity is omitted, you will receive an array of all applicable access - * controls. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} - * - * @param {object|function} [options] Configuration options. If you want to - * receive a list of all access controls, pass the callback function as - * the only argument. - * @param {string} [options.entity] Whose permissions will be fetched. - * @param {number} [options.generation] **File Objects Only** Select a specific - * revision of this file (as opposed to the latest version, the default). - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetAclCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * myBucket.acl.get({ - * entity: 'user-useremail@example.com' - * }, function(err, aclObject, apiResponse) {}); - * - * //- - * // Get all access controls. - * //- - * myBucket.acl.get(function(err, aclObjects, apiResponse) { - * // aclObjects = [ - * // { - * // entity: 'user-useremail@example.com', - * // role: 'owner' - * // } - * // ] - * }); - * - * //- - * // For file ACL operations, you can also specify a `generation` property. - * //- - * myFile.acl.get({ - * entity: 'user-useremail@example.com', - * generation: 1 - * }, function(err, aclObject, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myBucket.acl.get().then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/acl.js - * region_tag:storage_print_file_acl - * Example of printing a file's ACL: - * - * @example include:samples/acl.js - * region_tag:storage_print_file_acl_for_user - * Example of printing a file's ACL for a specific user: - * - * @example include:samples/acl.js - * region_tag:storage_print_bucket_acl - * Example of printing a bucket's ACL: - * - * @example include:samples/acl.js - * region_tag:storage_print_bucket_acl_for_user - * Example of printing a bucket's ACL for a specific user: - */ - get(optionsOrCallback, cb) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; - const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - let path = ''; - const query = {}; - if (options) { - path = '/' + encodeURIComponent(options.entity); - if (options.generation) { - query.generation = options.generation; - } - if (options.userProject) { - query.userProject = options.userProject; - } - } - this.request({ - uri: path, - qs: query, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - let results; - if (resp.items) { - results = resp.items.map(this.makeAclObject_); - } - else { - results = this.makeAclObject_(resp); - } - callback(null, results, resp); - }); + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); } - /** - * @typedef {array} UpdateAclResponse - * @property {object} 0 The updated Acl Objects. - * @property {object} 1 The full API response. - */ - /** - * @callback UpdateAclCallback - * @param {?Error} err Request error, if any. - * @param {object} acl The updated Acl Objects. - * @param {object} apiResponse The full API response. - */ - /** - * Update access controls on a {@link Bucket} or {@link File}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} - * - * @param {object} options Configuration options. - * @param {string} options.entity Whose permissions will be updated. - * @param {string} options.role Permissions allowed for the defined entity. - * See {@link Storage.acl}. - * @param {number} [options.generation] **File Objects Only** Select a specific - * revision of this file (as opposed to the latest version, the default). - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {UpdateAclCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * const myFile = myBucket.file('my-file'); - * - * const options = { - * entity: 'user-useremail@example.com', - * role: gcs.acl.WRITER_ROLE - * }; - * - * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); - * - * //- - * // For file ACL operations, you can also specify a `generation` property. - * //- - * myFile.acl.update({ - * entity: 'user-useremail@example.com', - * role: gcs.acl.WRITER_ROLE, - * generation: 1 - * }, function(err, aclObject, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myFile.acl.update(options).then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - update(options, callback) { - const query = {}; - if (options.generation) { - query.generation = options.generation; - } - if (options.userProject) { - query.userProject = options.userProject; - } - this.request({ - method: 'PUT', - uri: '/' + encodeURIComponent(options.entity), - qs: query, - json: { - role: options.role.toUpperCase(), - }, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - callback(null, this.makeAclObject_(resp), resp); - }); + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 9440: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/defaults.js +var import_universal_user_agent = __nccwpck_require__(5030); + +// pkg/dist-src/version.js +var VERSION = "9.0.4"; + +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } +}; + +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/util/merge-deep.js +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); } - /** - * Transform API responses to a consistent object format. - * - * @private - */ - makeAclObject_(accessControlObject) { - const obj = { - entity: accessControlObject.entity, - role: accessControlObject.role, - }; - if (accessControlObject.projectTeam) { - obj.projectTeam = accessControlObject.projectTeam; - } - return obj; + }); + return result; +} + +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; } - /** - * Patch requests up to the bucket's request object. - * - * @private - * - * @param {string} method Action. - * @param {string} path Request path. - * @param {*} query Request query object. - * @param {*} body Request body contents. - * @param {function} callback Callback function. - */ - request(reqOpts, callback) { - reqOpts.uri = this.pathPrefix + reqOpts.uri; - this.request_(reqOpts, callback); + } + return obj; +} + +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); + } + return mergedOptions; } -exports.Acl = Acl; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Acl, { - exclude: ['request'], -}); -//# sourceMappingURL=acl.js.map -/***/ }), +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} -/***/ 8561: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} -"use strict"; +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } + } + return result; +} -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const paginator_1 = __nccwpck_require__(6412); -const promisify_1 = __nccwpck_require__(9203); -const extend = __nccwpck_require__(8171); -const fs = __nccwpck_require__(7147); -const mime = __nccwpck_require__(3583); -const path = __nccwpck_require__(1017); -const pLimit = __nccwpck_require__(7684); -const util_1 = __nccwpck_require__(3837); -const retry = __nccwpck_require__(3415); -const util_2 = __nccwpck_require__(1862); -const acl_1 = __nccwpck_require__(1672); -const file_1 = __nccwpck_require__(5373); -const iam_1 = __nccwpck_require__(66); -const notification_1 = __nccwpck_require__(7523); -const storage_1 = __nccwpck_require__(346); -const signer_1 = __nccwpck_require__(9665); -const stream_1 = __nccwpck_require__(2781); -const url_1 = __nccwpck_require__(7310); -var BucketActionToHTTPMethod; -(function (BucketActionToHTTPMethod) { - BucketActionToHTTPMethod["list"] = "GET"; -})(BucketActionToHTTPMethod = exports.BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = {})); -var AvailableServiceObjectMethods; -(function (AvailableServiceObjectMethods) { - AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; - AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; -})(AvailableServiceObjectMethods = exports.AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = {})); -var BucketExceptionMessages; -(function (BucketExceptionMessages) { - BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; - BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; - BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; - BucketExceptionMessages["CHANNEL_ADDRESS_REQUIRED"] = "An address is required to create a channel."; - BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; - BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; - BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; - BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; - BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; -})(BucketExceptionMessages = exports.BucketExceptionMessages || (exports.BucketExceptionMessages = {})); -/** - * @callback Crc32cGeneratorToStringCallback - * A method returning the CRC32C as a base64-encoded string. - * - * @returns {string} - * - * @example - * Hashing the string 'data' should return 'rth90Q==' - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.toString(); // 'rth90Q==' - * ``` - **/ -/** - * @callback Crc32cGeneratorValidateCallback - * A method validating a base64-encoded CRC32C string. - * - * @param {string} [value] base64-encoded CRC32C string to validate - * @returns {boolean} - * - * @example - * Should return `true` if the value matches, `false` otherwise - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.validate('DkjKuA=='); // false - * crc32c.validate('rth90Q=='); // true - * ``` - **/ -/** - * @callback Crc32cGeneratorUpdateCallback - * A method for passing `Buffer`s for CRC32C generation. - * - * @param {Buffer} [data] data to update CRC32C value with - * @returns {undefined} - * - * @example - * Hashing buffers from 'some ' and 'text\n' - * - * ```js - * const buffer1 = Buffer.from('some '); - * crc32c.update(buffer1); - * - * const buffer2 = Buffer.from('text\n'); - * crc32c.update(buffer2); - * - * crc32c.toString(); // 'DkjKuA==' - * ``` - **/ -/** - * @typedef {object} CRC32CValidator - * @property {Crc32cGeneratorToStringCallback} - * @property {Crc32cGeneratorValidateCallback} - * @property {Crc32cGeneratorUpdateCallback} - */ -/** - * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} - * - * @name Bucket#crc32cGenerator - * @type {CRC32CValidator} - */ -/** - * Get and set IAM policies for your bucket. - * - * @name Bucket#iam - * @mixes Iam - * - * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} - * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} - * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Get the IAM policy for your bucket. - * //- - * bucket.iam.getPolicy(function(err, policy) { - * console.log(policy); - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.iam.getPolicy().then(function(data) { - * const policy = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/iam.js - * region_tag:storage_view_bucket_iam_members - * Example of retrieving a bucket's IAM policy: - * - * @example include:samples/iam.js - * region_tag:storage_add_bucket_iam_member - * Example of adding to a bucket's IAM policy: - * - * @example include:samples/iam.js - * region_tag:storage_remove_bucket_iam_member - * Example of removing from a bucket's IAM policy: - */ -/** - * Cloud Storage uses access control lists (ACLs) to manage object and - * bucket access. ACLs are the mechanism you use to share objects with other - * users and allow other users to access your buckets and objects. - * - * An ACL consists of one or more entries, where each entry grants permissions - * to an entity. Permissions define the actions that can be performed against - * an object or bucket (for example, `READ` or `WRITE`); the entity defines - * who the permission applies to (for example, a specific user or group of - * users). - * - * The `acl` object on a Bucket instance provides methods to get you a list of - * the ACLs defined on your bucket, as well as set, update, and delete them. - * - * Buckets also have - * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} - * for all created files. Default ACLs specify permissions that all new - * objects added to the bucket will inherit by default. You can add, delete, - * get, and update entities and permissions for these as well with - * {@link Bucket#acl.default}. - * - * See {@link http://goo.gl/6qBBPO| About Access Control Lists} - * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} - * - * @name Bucket#acl - * @mixes Acl - * @property {Acl} default Cloud Storage Buckets have - * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} - * for all created files. You can add, delete, get, and update entities and - * permissions for these as well. The method signatures and examples are all - * the same, after only prefixing the method call with `default`. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // Make a bucket's contents publicly readable. - * //- - * const myBucket = storage.bucket('my-bucket'); - * - * const options = { - * entity: 'allUsers', - * role: storage.acl.READER_ROLE - * }; - * - * myBucket.acl.add(options, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myBucket.acl.add(options).then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/acl.js - * region_tag:storage_print_bucket_acl - * Example of printing a bucket's ACL: - * - * @example include:samples/acl.js - * region_tag:storage_print_bucket_acl_for_user - * Example of printing a bucket's ACL for a specific user: - * - * @example include:samples/acl.js - * region_tag:storage_add_bucket_owner - * Example of adding an owner to a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_remove_bucket_owner - * Example of removing an owner from a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_add_bucket_default_owner - * Example of adding a default owner to a bucket: - * - * @example include:samples/acl.js - * region_tag:storage_remove_bucket_default_owner - * Example of removing a default owner from a bucket: - */ -/** - * The API-formatted resource description of the bucket. - * - * Note: This is not guaranteed to be up-to-date when accessed. To get the - * latest record, call the `getMetadata()` method. - * - * @name Bucket#metadata - * @type {object} - */ -/** - * The bucket's name. - * @name Bucket#name - * @type {string} - */ -/** - * Get {@link File} objects for the files currently in the bucket as a - * readable object stream. - * - * @method Bucket#getFilesStream - * @param {GetFilesOptions} [query] Query object for listing files. - * @returns {ReadableStream} A readable stream that emits {@link File} instances. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.getFilesStream() - * .on('error', console.error) - * .on('data', function(file) { - * // file is a File object. - * }) - * .on('end', function() { - * // All files retrieved. - * }); - * - * //- - * // If you anticipate many results, you can end a stream early to prevent - * // unnecessary processing and API requests. - * //- - * bucket.getFilesStream() - * .on('data', function(file) { - * this.end(); - * }); - * - * //- - * // If you're filtering files with a delimiter, you should use - * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to - * // preserve the `apiResponse` argument. - * //- - * const prefixes = []; - * - * function callback(err, files, nextQuery, apiResponse) { - * prefixes = prefixes.concat(apiResponse.prefixes); - * - * if (nextQuery) { - * bucket.getFiles(nextQuery, callback); - * } else { - * // prefixes = The finished array of prefixes. - * } - * } - * - * bucket.getFiles({ - * autoPaginate: false, - * delimiter: '/' - * }, callback); - * ``` - */ -/** - * Create a Bucket object to interact with a Cloud Storage bucket. - * - * @class - * @hideconstructor - * - * @param {Storage} storage A {@link Storage} instance. - * @param {string} name The name of the bucket. - * @param {object} [options] Configuration object. - * @param {string} [options.userProject] User project. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * ``` - */ -class Bucket extends nodejs_common_1.ServiceObject { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - getFilesStream(query) { - // placeholder body, overwritten in constructor - return new stream_1.Readable(); +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); } - constructor(storage, name, options) { - var _a, _b, _c, _d; - options = options || {}; - // Allow for "gs://"-style input, and strip any trailing slashes. - name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); - const requestQueryObject = {}; - if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { - requestQueryObject.ifGenerationMatch = - options.preconditionOpts.ifGenerationMatch; - } - if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { - requestQueryObject.ifGenerationNotMatch = - options.preconditionOpts.ifGenerationNotMatch; + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} +function isDefined(value) { + return value !== void 0 && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); } - if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { - requestQueryObject.ifMetagenerationMatch = - options.preconditionOpts.ifMetagenerationMatch; + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); } - if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { - requestQueryObject.ifMetagenerationNotMatch = - options.preconditionOpts.ifMetagenerationNotMatch; + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); } - const userProject = options.userProject; - if (typeof userProject === 'string') { - requestQueryObject.userProject = userProject; + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); } - const methods = { - /** - * Create a bucket. - * - * @method Bucket#create - * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. - * @param {CreateBucketCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * bucket.create(function(err, bucket, apiResponse) { - * if (!err) { - * // The bucket was created successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.create().then(function(data) { - * const bucket = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - create: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * IamDeleteBucketOptions Configuration options. - * @property {boolean} [ignoreNotFound = false] Ignore an error if - * the bucket does not exist. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} DeleteBucketResponse - * @property {object} 0 The full API response. - */ - /** - * @callback DeleteBucketCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Delete the bucket. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} - * - * @method Bucket#delete - * @param {DeleteBucketOptions} [options] Configuration options. - * @param {boolean} [options.ignoreNotFound = false] Ignore an error if - * the bucket does not exist. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {DeleteBucketCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * bucket.delete(function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.delete().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/buckets.js - * region_tag:storage_delete_bucket - * Another example: - */ - delete: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} BucketExistsResponse - * @property {boolean} 0 Whether the {@link Bucket} exists. - */ - /** - * @callback BucketExistsCallback - * @param {?Error} err Request error, if any. - * @param {boolean} exists Whether the {@link Bucket} exists. - */ - /** - * Check if the bucket exists. - * - * @method Bucket#exists - * @param {BucketExistsOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {BucketExistsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.exists(function(err, exists) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.exists().then(function(data) { - * const exists = data[0]; - * }); - * ``` - */ - exists: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() - * @property {boolean} [autoCreate] Automatically create the object if - * it does not exist. Default: `false` - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} GetBucketResponse - * @property {Bucket} 0 The {@link Bucket}. - * @property {object} 1 The full API response. - */ - /** - * @callback GetBucketCallback - * @param {?Error} err Request error, if any. - * @param {Bucket} bucket The {@link Bucket}. - * @param {object} apiResponse The full API response. - */ - /** - * Get a bucket if it exists. - * - * You may optionally use this to "get or create" an object by providing - * an object with `autoCreate` set to `true`. Any extra configuration that - * is normally required for the `create` method must be contained within - * this object as well. - * - * @method Bucket#get - * @param {GetBucketOptions} [options] Configuration options. - * @param {boolean} [options.autoCreate] Automatically create the object if - * it does not exist. Default: `false` - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetBucketCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.get(function(err, bucket, apiResponse) { - * // `bucket.metadata` has been populated. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.get().then(function(data) { - * const bucket = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - get: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {array} GetBucketMetadataResponse - * @property {object} 0 The bucket metadata. - * @property {object} 1 The full API response. - */ - /** - * @callback GetBucketMetadataCallback - * @param {?Error} err Request error, if any. - * @param {object} metadata The bucket metadata. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Get the bucket's metadata. - * - * To set metadata, see {@link Bucket#setMetadata}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} - * - * @method Bucket#getMetadata - * @param {GetBucketMetadataOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetBucketMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.getMetadata(function(err, metadata, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.getMetadata().then(function(data) { - * const metadata = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/requesterPays.js - * region_tag:storage_get_requester_pays_status - * Example of retrieving the requester pays status of a bucket: - */ - getMetadata: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} SetBucketMetadataResponse - * @property {object} apiResponse The full API response. - */ - /** - * @callback SetBucketMetadataCallback - * @param {?Error} err Request error, if any. - * @param {object} metadata The bucket metadata. - */ - /** - * Set the bucket's metadata. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} - * - * @method Bucket#setMetadata - * @param {object} metadata The metadata you wish to set. - * @param {SetBucketMetadataOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Set website metadata field on the bucket. - * //- - * const metadata = { - * website: { - * mainPageSuffix: 'http://example.com', - * notFoundPage: 'http://example.com/404.html' - * } - * }; - * - * bucket.setMetadata(metadata, function(err, apiResponse) {}); - * - * //- - * // Enable versioning for your bucket. - * //- - * bucket.setMetadata({ - * versioning: { - * enabled: true - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Enable KMS encryption for objects within this bucket. - * //- - * bucket.setMetadata({ - * encryption: { - * defaultKmsKeyName: 'projects/grape-spaceship-123/...' - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Set the default event-based hold value for new objects in this - * // bucket. - * //- - * bucket.setMetadata({ - * defaultEventBasedHold: true - * }, function(err, apiResponse) {}); - * - * //- - * // Remove object lifecycle rules. - * //- - * bucket.setMetadata({ - * lifecycle: null - * }, function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.setMetadata(metadata).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - setMetadata: { - reqOpts: { - qs: requestQueryObject, - }, - }, - }; - super({ - parent: storage, - baseUrl: '/b', - id: name, - createMethod: storage.createBucket.bind(storage), - methods, - }); - this.name = name; - this.storage = storage; - this.userProject = options.userProject; - this.acl = new acl_1.Acl({ - request: this.request.bind(this), - pathPrefix: '/acl', - }); - this.acl.default = new acl_1.Acl({ - request: this.request.bind(this), - pathPrefix: '/defaultObjectAcl', - }); - this.crc32cGenerator = - options.crc32cGenerator || this.storage.crc32cGenerator; - this.iam = new iam_1.Iam(this); - this.getFilesStream = paginator_1.paginator.streamify('getFiles'); - this.instanceRetryValue = storage.retryOptions.autoRetry; - this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } else { + return encodeReserved(literal); + } } - /** - * The bucket's Cloud Storage URI (`gs://`) - * - * @example - * ```ts - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * // `gs://my-bucket` - * const href = bucket.cloudStorageURI.href; - * ``` - */ - get cloudStorageURI() { - const uri = new url_1.URL('gs://'); - uri.host = this.name; - return uri; + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); + } +} + +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } } - /** - * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). - * @property {boolean} [append=true] The new rules will be appended to any - * pre-existing rules. - */ - /** - * - * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects - * in this bucket. - * @property {string|object} action The action to be taken upon matching of - * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. - * **Note**: For configuring a raw-formatted rule object to be passed as `action` - * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. - * @property {object} condition Condition a bucket must meet before the - * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. - * @property {string} [storageClass] When using the `setStorageClass` - * action, provide this option to dictate which storage class the object - * should update to. Please see - * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. - */ - /** - * Add an object lifecycle management rule to the bucket. - * - * By default, an Object Lifecycle Management rule provided to this method - * will be included to the existing policy. To replace all existing rules, - * supply the `options` argument, setting `append` to `false`. - * - * To add multiple rules, pass a list to the `rule` parameter. Calling this - * function multiple times asynchronously does not guarantee that all rules - * are added correctly. - * - * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} - * - * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects - * in this bucket. - * @param {string|object} rule.action The action to be taken upon matching of - * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. - * **Note**: For configuring a raw-formatted rule object to be passed as `action` - * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. - * @param {object} rule.condition Condition a bucket must meet before the - * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. - * @param {string} [rule.storageClass] When using the `setStorageClass` - * action, provide this option to dictate which storage class the object - * should update to. - * @param {AddLifecycleRuleOptions} [options] Configuration object. - * @param {boolean} [options.append=true] Append the new rule to the existing - * policy. - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Automatically have an object deleted from this bucket once it is 3 years - * // of age. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * age: 365 * 3 // Specified in days. - * } - * }, function(err, apiResponse) { - * if (err) { - * // Error handling omitted. - * } - * - * const lifecycleRules = bucket.metadata.lifecycle.rule; - * - * // Iterate over the Object Lifecycle Management rules on this bucket. - * lifecycleRules.forEach(lifecycleRule => {}); - * }); - * - * //- - * // By default, the rule you provide will be added to the existing policy. - * // Optionally, you can disable this behavior to replace all of the - * // pre-existing rules. - * //- - * const options = { - * append: false - * }; - * - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * age: 365 * 3 // Specified in days. - * } - * }, options, function(err, apiResponse) { - * if (err) { - * // Error handling omitted. - * } - * - * // All rules have been replaced with the new "delete" rule. - * - * // Iterate over the Object Lifecycle Management rules on this bucket. - * lifecycleRules.forEach(lifecycleRule => {}); - * }); - * - * //- - * // For objects created before 2018, "downgrade" the storage class. - * //- - * bucket.addLifecycleRule({ - * action: 'setStorageClass', - * storageClass: 'COLDLINE', - * condition: { - * createdBefore: new Date('2018') - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Delete objects created before 2016 which have the Coldline storage - * // class. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * matchesStorageClass: [ - * 'COLDLINE' - * ], - * createdBefore: new Date('2016') - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Delete object that has a noncurrent timestamp that is at least 100 days. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * daysSinceNoncurrentTime: 100 - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Delete object that has a noncurrent timestamp before 2020-01-01. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * noncurrentTimeBefore: new Date('2020-01-01') - * } - * }, function(err, apiResponse) {}); - * - * //- - * // Delete object that has a customTime that is at least 100 days. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * daysSinceCustomTime: 100 - * } - * }, function(err, apiResponse) ()); - * - * //- - * // Delete object that has a customTime before 2020-01-01. - * //- - * bucket.addLifecycleRule({ - * action: 'delete', - * condition: { - * customTimeBefore: new Date('2020-01-01') - * } - * }, function(err, apiResponse) {}); - * ``` - */ - addLifecycleRule(rule, optionsOrCallback, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - options = options || {}; - const rules = Array.isArray(rule) ? rule : [rule]; - const newLifecycleRules = rules.map(rule => { - if (typeof rule.action === 'object') { - // This is a raw-formatted rule object, the way the API expects. - // Just pass it through as-is. - return rule; - } - const apiFormattedRule = {}; - apiFormattedRule.condition = {}; - apiFormattedRule.action = { - type: rule.action.charAt(0).toUpperCase() + rule.action.slice(1), - }; - if (rule.storageClass) { - apiFormattedRule.action.storageClass = rule.storageClass; - } - for (const condition in rule.condition) { - if (rule.condition[condition] instanceof Date) { - apiFormattedRule.condition[condition] = rule.condition[condition] - .toISOString() - .replace(/T.+$/, ''); - } - else { - apiFormattedRule.condition[condition] = rule.condition[condition]; - } - } - return apiFormattedRule; - }); - if (options.append === false) { - this.setMetadata({ lifecycle: { rule: newLifecycleRules } }, options, callback); - return; - } - // The default behavior appends the previously-defined lifecycle rules with - // the new ones just passed in by the user. - this.getMetadata((err, metadata) => { - if (err) { - callback(err); - return; - } - const currentLifecycleRules = Array.isArray(metadata.lifecycle && metadata.lifecycle.rule) - ? metadata.lifecycle && metadata.lifecycle.rule - : []; - this.setMetadata({ - lifecycle: { - rule: currentLifecycleRules.concat(newLifecycleRules), - }, - }, options, callback); - }); + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } } - /** - * @typedef {object} CombineOptions - * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of - * the form - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, - * that will be used to encrypt the object. Overwrites the object - * metadata's `kms_key_name` value, if any. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @callback CombineCallback - * @param {?Error} err Request error, if any. - * @param {File} newFile The new {@link File}. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} CombineResponse - * @property {File} 0 The new {@link File}. - * @property {object} 1 The full API response. - */ - /** - * Combine multiple files into one new file. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} - * - * @throws {Error} if a non-array is provided as sources argument. - * @throws {Error} if no sources are provided. - * @throws {Error} if no destination is provided. - * - * @param {string[]|File[]} sources The source files that will be - * combined. - * @param {string|File} destination The file you would like the - * source files combined into. - * @param {CombineOptions} [options] Configuration options. - * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of - * the form - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, - * that will be used to encrypt the object. Overwrites the object - * metadata's `kms_key_name` value, if any. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - - * @param {CombineCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const logBucket = storage.bucket('log-bucket'); - * - * const sources = [ - * logBucket.file('2013-logs.txt'), - * logBucket.file('2014-logs.txt') - * ]; - * - * const allLogs = logBucket.file('all-logs.txt'); - * - * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { - * // newFile === allLogs - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * logBucket.combine(sources, allLogs).then(function(data) { - * const newFile = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - combine(sources, destination, optionsOrCallback, callback) { - var _a; - if (!Array.isArray(sources) || sources.length === 0) { - throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); - } - if (!destination) { - throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); - } - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required - AvailableServiceObjectMethods.setMetadata, // Same as above - options); - const convertToFile = (file) => { - if (file instanceof file_1.File) { - return file; - } - return this.file(file); - }; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - sources = sources.map(convertToFile); - const destinationFile = convertToFile(destination); - callback = callback || nodejs_common_1.util.noop; - if (!destinationFile.metadata.contentType) { - const destinationContentType = mime.contentType(destinationFile.name); - if (destinationContentType) { - destinationFile.metadata.contentType = destinationContentType; + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); +} + +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); +} + +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 8467: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request3 = __nccwpck_require__(6234); +var import_universal_user_agent = __nccwpck_require__(5030); + +// pkg/dist-src/version.js +var VERSION = "7.0.2"; + +// pkg/dist-src/with-defaults.js +var import_request2 = __nccwpck_require__(6234); + +// pkg/dist-src/graphql.js +var import_request = __nccwpck_require__(6234); + +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}; + +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); +} + +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request3.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 4193: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "9.1.5"; + +// pkg/dist-src/normalize-paginated-list-response.js +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} + +// pkg/dist-src/iterator.js +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] } + }; } - let maxRetries = this.storage.retryOptions.maxRetries; - if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === - undefined && - options.ifGenerationMatch === undefined && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - maxRetries = 0; - } - if (options.ifGenerationMatch === undefined) { - Object.assign(options, destinationFile.instancePreconditionOpts, options); - } - // Make the request from the destination File object. - destinationFile.request({ - method: 'POST', - uri: '/compose', - maxRetries, - json: { - destination: { - contentType: destinationFile.metadata.contentType, - }, - sourceObjects: sources.map(source => { - const sourceObject = { - name: source.name, - }; - if (source.metadata && source.metadata.generation) { - sourceObject.generation = source.metadata.generation; - } - return sourceObject; - }), - }, - qs: options, - }, (err, resp) => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - if (err) { - callback(err, null, resp); - return; - } - callback(null, destinationFile, resp); - }); + } + }) + }; +} + +// pkg/dist-src/paginate.js +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; + } + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); +} +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; } - /** - * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. - * - * @typedef {object} CreateChannelConfig - * @property {string} address The address where notifications are - * delivered for this channel. - * @property {string} [delimiter] Returns results in a directory-like mode. - * @property {number} [maxResults] Maximum number of `items` plus `prefixes` - * to return in a single page of responses. - * @property {string} [pageToken] A previously-returned page token - * representing part of the larger set of results to view. - * @property {string} [prefix] Filter results to objects whose names begin - * with this prefix. - * @property {string} [projection=noAcl] Set of properties to return. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {boolean} [versions=false] If `true`, lists all versions of an object - * as distinct results. - */ - /** - * @typedef {object} CreateChannelOptions - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} CreateChannelResponse - * @property {Channel} 0 The new {@link Channel}. - * @property {object} 1 The full API response. - */ - /** - * @callback CreateChannelCallback - * @param {?Error} err Request error, if any. - * @param {Channel} channel The new {@link Channel}. - * @param {object} apiResponse The full API response. - */ - /** - * Create a channel that will be notified when objects in this bucket changes. - * - * @throws {Error} If an ID is not provided. - * @throws {Error} If an address is not provided. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} - * - * @param {string} id The ID of the channel to create. - * @param {CreateChannelConfig} config Configuration for creating channel. - * @param {string} config.address The address where notifications are - * delivered for this channel. - * @param {string} [config.delimiter] Returns results in a directory-like mode. - * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` - * to return in a single page of responses. - * @param {string} [config.pageToken] A previously-returned page token - * representing part of the larger set of results to view. - * @param {string} [config.prefix] Filter results to objects whose names begin - * with this prefix. - * @param {string} [config.projection=noAcl] Set of properties to return. - * @param {string} [config.userProject] The ID of the project which will be - * billed for the request. - * @param {boolean} [config.versions=false] If `true`, lists all versions of an object - * as distinct results. - * @param {CreateChannelOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {CreateChannelCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * const id = 'new-channel-id'; - * - * const config = { - * address: 'https://...' - * }; - * - * bucket.createChannel(id, config, function(err, channel, apiResponse) { - * if (!err) { - * // Channel created successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.createChannel(id, config).then(function(data) { - * const channel = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - createChannel(id, config, optionsOrCallback, callback) { - if (typeof id !== 'string') { - throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); - } - if (typeof config.address !== 'string') { - throw new Error(BucketExceptionMessages.CHANNEL_ADDRESS_REQUIRED); - } - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.request({ - method: 'POST', - uri: '/o/watch', - json: Object.assign({ - id, - type: 'web_hook', - }, config), - qs: options, - }, (err, apiResponse) => { - if (err) { - callback(err, null, apiResponse); - return; - } - const resourceId = apiResponse.resourceId; - const channel = this.storage.channel(id, resourceId); - channel.metadata = apiResponse; - callback(null, channel, apiResponse); - }); + let earlyExit = false; + function done() { + earlyExit = true; } - /** - * Metadata to set for the Notification. - * - * @typedef {object} CreateNotificationOptions - * @property {object} [customAttributes] An optional list of additional - * attributes to attach to each Cloud PubSub message published for this - * notification subscription. - * @property {string[]} [eventTypes] If present, only send notifications about - * listed event types. If empty, sent notifications for all event types. - * @property {string} [objectNamePrefix] If present, only apply this - * notification configuration to object names that begin with this prefix. - * @property {string} [payloadFormat] The desired content of the Payload. - * Defaults to `JSON_API_V1`. - * - * Acceptable values are: - * - `JSON_API_V1` - * - * - `NONE` - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @callback CreateNotificationCallback - * @param {?Error} err Request error, if any. - * @param {Notification} notification The new {@link Notification}. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} CreateNotificationResponse - * @property {Notification} 0 The new {@link Notification}. - * @property {object} 1 The full API response. - */ - /** - * Creates a notification subscription for the bucket. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} - * - * @param {Topic|string} topic The Cloud PubSub topic to which this - * subscription publishes. If the project ID is omitted, the current - * project ID will be used. - * - * Acceptable formats are: - * - `projects/grape-spaceship-123/topics/my-topic` - * - * - `my-topic` - * @param {CreateNotificationOptions} [options] Metadata to set for the - * notification. - * @param {object} [options.customAttributes] An optional list of additional - * attributes to attach to each Cloud PubSub message published for this - * notification subscription. - * @param {string[]} [options.eventTypes] If present, only send notifications about - * listed event types. If empty, sent notifications for all event types. - * @param {string} [options.objectNamePrefix] If present, only apply this - * notification configuration to object names that begin with this prefix. - * @param {string} [options.payloadFormat] The desired content of the Payload. - * Defaults to `JSON_API_V1`. - * - * Acceptable values are: - * - `JSON_API_V1` - * - * - `NONE` - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {CreateNotificationCallback} [callback] Callback function. - * @returns {Promise} - * @throws {Error} If a valid topic is not provided. - * @see Notification#create - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const callback = function(err, notification, apiResponse) { - * if (!err) { - * // The notification was created successfully. - * } - * }; - * - * myBucket.createNotification('my-topic', callback); - * - * //- - * // Configure the nofiication by providing Notification metadata. - * //- - * const metadata = { - * objectNamePrefix: 'prefix-' - * }; - * - * myBucket.createNotification('my-topic', metadata, callback); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * myBucket.createNotification('my-topic').then(function(data) { - * const notification = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/createNotification.js - * region_tag:storage_create_bucket_notifications - * Another example: - */ - createNotification(topic, optionsOrCallback, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - const topicIsObject = topic !== null && typeof topic === 'object'; - if (topicIsObject && nodejs_common_1.util.isCustomType(topic, 'pubsub/topic')) { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - topic = topic.name; - } - if (typeof topic !== 'string') { - throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); - } - const body = Object.assign({ topic }, options); - if (body.topic.indexOf('projects') !== 0) { - body.topic = 'projects/{{projectId}}/topics/' + body.topic; - } - body.topic = '//pubsub.googleapis.com/' + body.topic; - if (!body.payloadFormat) { - body.payloadFormat = 'JSON_API_V1'; - } - const query = {}; - if (body.userProject) { - query.userProject = body.userProject; - delete body.userProject; - } - this.request({ - method: 'POST', - uri: '/notificationConfigs', - json: (0, util_2.convertObjKeysToSnakeCase)(body), - qs: query, - maxRetries: 0, //explicitly set this value since this is a non-idempotent function - }, (err, apiResponse) => { - if (err) { - callback(err, null, apiResponse); - return; - } - const notification = this.notification(apiResponse.id); - notification.metadata = apiResponse; - callback(null, notification, apiResponse); - }); - } - /** - * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} - * for all of the supported properties. - * @property {boolean} [force] Suppress errors until all files have been - * processed. - */ - /** - * @callback DeleteFilesCallback - * @param {?Error|?Error[]} err Request error, if any, or array of errors from - * files that were not able to be deleted. - * @param {object} [apiResponse] The full API response. - */ - /** - * Iterate over the bucket's files, calling `file.delete()` on each. - * - * This is not an atomic request. A delete attempt will be - * made for each file individually. Any one can fail, in which case only a - * portion of the files you intended to be deleted would have. - * - * Operations are performed in parallel, up to 10 at once. The first error - * breaks the loop and will execute the provided callback with it. Specify - * `{ force: true }` to suppress the errors until all files have had a chance - * to be processed. - * - * File preconditions cannot be passed to this function. It will not retry unless - * the idempotency strategy is set to retry always. - * - * The `query` object passed as the first argument will also be passed to - * {@link Bucket#getFiles}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} - * - * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} - * @param {boolean} [query.force] Suppress errors until all files have been - * processed. - * @param {DeleteFilesCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Delete all of the files in the bucket. - * //- - * bucket.deleteFiles(function(err) {}); - * - * //- - * // By default, if a file cannot be deleted, this method will stop deleting - * // files from your bucket. You can override this setting with `force: - * // true`. - * //- - * bucket.deleteFiles({ - * force: true - * }, function(errors) { - * // `errors`: - * // Array of errors if any occurred, otherwise null. - * }); - * - * //- - * // The first argument to this method acts as a query to - * // {@link Bucket#getFiles}. As an example, you can delete files - * // which match a prefix. - * //- - * bucket.deleteFiles({ - * prefix: 'images/' - * }, function(err) { - * if (!err) { - * // All files in the `images` directory have been deleted. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.deleteFiles().then(function() {}); - * ``` - */ - deleteFiles(queryOrCallback, callback) { - let query = {}; - if (typeof queryOrCallback === 'function') { - callback = queryOrCallback; - } - else if (queryOrCallback) { - query = queryOrCallback; - } - const MAX_PARALLEL_LIMIT = 10; - const MAX_QUEUE_SIZE = 1000; - const errors = []; - const deleteFile = (file) => { - return file.delete(query).catch(err => { - if (!query.force) { - throw err; - } - errors.push(err); - }); - }; - (async () => { - try { - let promises = []; - const limit = pLimit(MAX_PARALLEL_LIMIT); - const filesStream = this.getFilesStream(query); - for await (const curFile of filesStream) { - if (promises.length >= MAX_QUEUE_SIZE) { - await Promise.all(promises); - promises = []; - } - promises.push(limit(() => deleteFile(curFile)).catch(e => { - filesStream.destroy(); - throw e; - })); - } - await Promise.all(promises); - callback(errors.length > 0 ? errors : null); - } - catch (e) { - callback(e); - return; - } - })(); - } - /** - * @typedef {array} DeleteLabelsResponse - * @property {object} 0 The full API response. - */ - /** - * @callback DeleteLabelsCallback - * @param {?Error} err Request error, if any. - * @param {object} metadata Bucket's metadata. - */ - /** - * Delete one or more labels from this bucket. - * - * @param {string|string[]} [labels] The labels to delete. If no labels are - * provided, all of the labels are removed. - * @param {DeleteLabelsCallback} [callback] Callback function. - * @param {DeleteLabelsOptions} [options] Options, including precondition options - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Delete all of the labels from this bucket. - * //- - * bucket.deleteLabels(function(err, apiResponse) {}); - * - * //- - * // Delete a single label. - * //- - * bucket.deleteLabels('labelone', function(err, apiResponse) {}); - * - * //- - * // Delete a specific set of labels. - * //- - * bucket.deleteLabels([ - * 'labelone', - * 'labeltwo' - * ], function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.deleteLabels().then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { - let labels = new Array(); - let options = {}; - if (typeof labelsOrCallbackOrOptions === 'function') { - callback = labelsOrCallbackOrOptions; - } - else if (typeof labelsOrCallbackOrOptions === 'string') { - labels = [labelsOrCallbackOrOptions]; - } - else if (Array.isArray(labelsOrCallbackOrOptions)) { - labels = labelsOrCallbackOrOptions; - } - else if (labelsOrCallbackOrOptions) { - options = labelsOrCallbackOrOptions; - } - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - const deleteLabels = (labels) => { - const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { - nullLabelMap[labelKey] = null; - return nullLabelMap; - }, {}); - if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { - this.setLabels(nullLabelMap, options, callback); - } - else { - this.setLabels(nullLabelMap, callback); - } - }; - if (labels.length === 0) { - this.getLabels((err, labels) => { - if (err) { - callback(err); - return; - } - deleteLabels(Object.keys(labels)); - }); - } - else { - deleteLabels(labels); - } - } - /** - * @typedef {array} DisableRequesterPaysResponse - * @property {object} 0 The full API response. - */ - /** - * @callback DisableRequesterPaysCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - *
- * Early Access Testers Only - *

- * This feature is not yet widely-available. - *

- *
- * - * Disable `requesterPays` functionality from this bucket. - * - * @param {DisableRequesterPaysCallback} [callback] Callback function. - * @param {DisableRequesterPaysOptions} [options] Options, including precondition options - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.disableRequesterPays(function(err, apiResponse) { - * if (!err) { - * // requesterPays functionality disabled successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.disableRequesterPays().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/requesterPays.js - * region_tag:storage_disable_requester_pays - * Example of disabling requester pays: - */ - disableRequesterPays(optionsOrCallback, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.setMetadata({ - billing: { - requesterPays: false, - }, - }, options, callback); - } - /** - * Configuration object for enabling logging. - * - * @typedef {object} EnableLoggingOptions - * @property {string|Bucket} [bucket] The bucket for the log entries. By - * default, the current bucket is used. - * @property {string} prefix A unique prefix for log object names. - */ - /** - * Enable logging functionality for this bucket. This will make two API - * requests, first to grant Cloud Storage WRITE permission to the bucket, then - * to set the appropriate configuration on the Bucket's metadata. - * - * @param {EnableLoggingOptions} config Configuration options. - * @param {string|Bucket} [config.bucket] The bucket for the log entries. By - * default, the current bucket is used. - * @param {string} config.prefix A unique prefix for log object names. - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * const config = { - * prefix: 'log' - * }; - * - * bucket.enableLogging(config, function(err, apiResponse) { - * if (!err) { - * // Logging functionality enabled successfully. - * } - * }); - * - * ``` - * @example - * Optionally, provide a destination bucket. - * ``` - * const config = { - * prefix: 'log', - * bucket: 'destination-bucket' - * }; - * - * bucket.enableLogging(config, function(err, apiResponse) {}); - * ``` - * - * @example - * If the callback is omitted, we'll return a Promise. - * ``` - * bucket.enableLogging(config).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - enableLogging(config, callback) { - if (!config || - typeof config === 'function' || - typeof config.prefix === 'undefined') { - throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); - } - const logBucket = config.bucket - ? config.bucket.id || config.bucket - : this.id; - const options = {}; - if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { - options.ifMetagenerationMatch = config.ifMetagenerationMatch; - } - if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { - options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; - } - (async () => { - try { - const [policy] = await this.iam.getPolicy(); - policy.bindings.push({ - members: ['group:cloud-storage-analytics@google.com'], - role: 'roles/storage.objectCreator', - }); - await this.iam.setPolicy(policy); - this.setMetadata({ - logging: { - logBucket, - logObjectPrefix: config.prefix, - }, - }, options, callback); - } - catch (e) { - callback(e); - return; - } - })(); - } - /** - * @typedef {array} EnableRequesterPaysResponse - * @property {object} 0 The full API response. - */ - /** - * @callback EnableRequesterPaysCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - *
- * Early Access Testers Only - *

- * This feature is not yet widely-available. - *

- *
- * - * Enable `requesterPays` functionality for this bucket. This enables you, the - * bucket owner, to have the requesting user assume the charges for the access - * to your bucket and its contents. - * - * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] - * Callback function or precondition options. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.enableRequesterPays(function(err, apiResponse) { - * if (!err) { - * // requesterPays functionality enabled successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.enableRequesterPays().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/requesterPays.js - * region_tag:storage_enable_requester_pays - * Example of enabling requester pays: - */ - enableRequesterPays(optionsOrCallback, cb) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - cb = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.setMetadata({ - billing: { - requesterPays: true, - }, - }, options, cb); + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; } - /** - * Create a {@link File} object. See {@link File} to see how to handle - * the different use cases you may have. - * - * @param {string} name The name of the file in this bucket. - * @param {FileOptions} [options] Configuration options. - * @param {string|number} [options.generation] Only use a specific revision of - * this file. - * @param {string} [options.encryptionKey] A custom encryption key. See - * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. - * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will - * be used to encrypt the object. Must be in the format: - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. - * KMS key ring must use the same location as the bucket. - * @param {string} [options.userProject] The ID of the project which will be - * billed for all requests made from File object. - * @returns {File} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * const file = bucket.file('my-existing-file.png'); - * ``` - */ - file(name, options) { - if (!name) { - throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); - } - return new file_1.File(this, name, options); - } - /** - * @typedef {array} GetFilesResponse - * @property {File[]} 0 Array of {@link File} instances. - * @param {object} nextQuery 1 A query object to receive more results. - * @param {object} apiResponse 2 The full API response. - */ - /** - * @callback GetFilesCallback - * @param {?Error} err Request error, if any. - * @param {File[]} files Array of {@link File} instances. - * @param {object} nextQuery A query object to receive more results. - * @param {object} apiResponse The full API response. - */ - /** - * Query object for listing files. - * - * @typedef {object} GetFilesOptions - * @property {boolean} [autoPaginate=true] Have pagination handled - * automatically. - * @property {string} [delimiter] Results will contain only objects whose - * names, aside from the prefix, do not contain delimiter. Objects whose - * names, aside from the prefix, contain delimiter will have their name - * truncated after the delimiter, returned in `apiResponse.prefixes`. - * Duplicate prefixes are omitted. - * @property {string} [endOffset] Filter results to objects whose names are - * lexicographically before endOffset. If startOffset is also set, the objects - * listed have names between startOffset (inclusive) and endOffset (exclusive). - * @property {boolean} [includeTrailingDelimiter] If true, objects that end in - * exactly one instance of delimiter have their metadata included in items[] - * in addition to the relevant part of the object name appearing in prefixes[]. - * @property {string} [prefix] Filter results to objects whose names begin - * with this prefix. - * @property {number} [maxApiCalls] Maximum number of API calls to make. - * @property {number} [maxResults] Maximum number of items plus prefixes to - * return per call. - * Note: By default will handle pagination automatically - * if more than 1 page worth of results are requested per call. - * When `autoPaginate` is set to `false` the smaller of `maxResults` - * or 1 page of results will be returned per call. - * @property {string} [pageToken] A previously-returned page token - * representing part of the larger set of results to view. - * @property {string} [startOffset] Filter results to objects whose names are - * lexicographically equal to or after startOffset. If endOffset is also set, - * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {boolean} [versions] If true, returns File objects scoped to - * their versions. - */ - /** - * Get {@link File} objects for the files currently in the bucket. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} - * - * @param {GetFilesOptions} [query] Query object for listing files. - * @param {boolean} [query.autoPaginate=true] Have pagination handled - * automatically. - * @param {string} [query.delimiter] Results will contain only objects whose - * names, aside from the prefix, do not contain delimiter. Objects whose - * names, aside from the prefix, contain delimiter will have their name - * truncated after the delimiter, returned in `apiResponse.prefixes`. - * Duplicate prefixes are omitted. - * @param {string} [query.endOffset] Filter results to objects whose names are - * lexicographically before endOffset. If startOffset is also set, the objects - * listed have names between startOffset (inclusive) and endOffset (exclusive). - * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in - * exactly one instance of delimiter have their metadata included in items[] - * in addition to the relevant part of the object name appearing in prefixes[]. - * @param {string} [query.prefix] Filter results to objects whose names begin - * with this prefix. - * @param {number} [query.maxApiCalls] Maximum number of API calls to make. - * @param {number} [query.maxResults] Maximum number of items plus prefixes to - * return per call. - * Note: By default will handle pagination automatically - * if more than 1 page worth of results are requested per call. - * When `autoPaginate` is set to `false` the smaller of `maxResults` - * or 1 page of results will be returned per call. - * @param {string} [query.pageToken] A previously-returned page token - * representing part of the larger set of results to view. - * @param {string} [query.startOffset] Filter results to objects whose names are - * lexicographically equal to or after startOffset. If endOffset is also set, - * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). - * @param {string} [query.userProject] The ID of the project which will be - * billed for the request. - * @param {boolean} [query.versions] If true, returns File objects scoped to - * their versions. - * @param {GetFilesCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.getFiles(function(err, files) { - * if (!err) { - * // files is an array of File objects. - * } - * }); - * - * //- - * // If your bucket has versioning enabled, you can get all of your files - * // scoped to their generation. - * //- - * bucket.getFiles({ - * versions: true - * }, function(err, files) { - * // Each file is scoped to its generation. - * }); - * - * //- - * // To control how many API requests are made and page through the results - * // manually, set `autoPaginate` to `false`. - * //- - * const callback = function(err, files, nextQuery, apiResponse) { - * if (nextQuery) { - * // More results exist. - * bucket.getFiles(nextQuery, callback); - * } - * - * // The `metadata` property is populated for you with the metadata at the - * // time of fetching. - * files[0].metadata; - * - * // However, in cases where you are concerned the metadata could have - * // changed, use the `getMetadata` method. - * files[0].getMetadata(function(err, metadata) {}); - * }; - * - * bucket.getFiles({ - * autoPaginate: false - * }, callback); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.getFiles().then(function(data) { - * const files = data[0]; - * }); - * - * ``` - * @example - *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
- * ``` - * bucket.getFiles({ - * autoPaginate: false, - * delimiter: '/' - * }, function(err, files, nextQuery, apiResponse) { - * // files = [ - * // {File} // File object for file "a" - * // ] - * - * // apiResponse.prefixes = [ - * // 'a/', - * // 'b/' - * // ] - * }); - * ``` - * - * @example - * Using prefixes, it's now possible to simulate a file system with follow-up requests. - * ``` - * bucket.getFiles({ - * autoPaginate: false, - * delimiter: '/', - * prefix: 'a/' - * }, function(err, files, nextQuery, apiResponse) { - * // No files found within "directory" a. - * // files = [] - * - * // However, a "sub-directory" was found. - * // This prefix can be used to continue traversing the "file system". - * // apiResponse.prefixes = [ - * // 'a/b/' - * // ] - * }); - * ``` - * - * @example include:samples/files.js - * region_tag:storage_list_files - * Another example: - * - * @example include:samples/files.js - * region_tag:storage_list_files_with_prefix - * Example of listing files, filtered by a prefix: - */ - getFiles(queryOrCallback, callback) { - let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; - if (!callback) { - callback = queryOrCallback; - } - query = Object.assign({}, query); - this.request({ - uri: '/o', - qs: query, - }, (err, resp) => { - if (err) { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - callback(err, null, null, resp); - return; - } - const itemsArray = resp.items ? resp.items : []; - const files = itemsArray.map((file) => { - const options = {}; - if (query.versions) { - options.generation = file.generation; - } - if (file.kmsKeyName) { - options.kmsKeyName = file.kmsKeyName; - } - const fileInstance = this.file(file.name, options); - fileInstance.metadata = file; - return fileInstance; - }); - let nextQuery = null; - if (resp.nextPageToken) { - nextQuery = Object.assign({}, query, { - pageToken: resp.nextPageToken, - }); - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - callback(null, files, nextQuery, resp); - }); - } - /** - * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). - * @param {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} GetLabelsResponse - * @property {object} 0 Object of labels currently set on this bucket. - */ - /** - * @callback GetLabelsCallback - * @param {?Error} err Request error, if any. - * @param {object} labels Object of labels currently set on this bucket. - */ - /** - * Get the labels currently set on this bucket. - * - * @param {object} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetLabelsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.getLabels(function(err, labels) { - * if (err) { - * // Error handling omitted. - * } - * - * // labels = { - * // label: 'labelValue', - * // ... - * // } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.getLabels().then(function(data) { - * const labels = data[0]; - * }); - * ``` - */ - getLabels(optionsOrCallback, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.getMetadata(options, (err, metadata) => { - if (err) { - callback(err, null); - return; - } - callback(null, metadata.labels || {}); - }); - } - /** - * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @callback GetNotificationsCallback - * @param {?Error} err Request error, if any. - * @param {Notification[]} notifications Array of {@link Notification} - * instances. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} GetNotificationsResponse - * @property {Notification[]} 0 Array of {@link Notification} instances. - * @property {object} 1 The full API response. - */ - /** - * Retrieves a list of notification subscriptions for a given bucket. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} - * - * @param {GetNotificationsOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetNotificationsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * bucket.getNotifications(function(err, notifications, apiResponse) { - * if (!err) { - * // notifications is an array of Notification objects. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.getNotifications().then(function(data) { - * const notifications = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/listNotifications.js - * region_tag:storage_list_bucket_notifications - * Another example: - */ - getNotifications(optionsOrCallback, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - this.request({ - uri: '/notificationConfigs', - qs: options, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - const itemsArray = resp.items ? resp.items : []; - const notifications = itemsArray.map((notification) => { - const notificationInstance = this.notification(notification.id); - notificationInstance.metadata = notification; - return notificationInstance; - }); - callback(null, notifications, resp); - }); - } - /** - * @typedef {array} GetSignedUrlResponse - * @property {object} 0 The signed URL. - */ - /** - * @callback GetSignedUrlCallback - * @param {?Error} err Request error, if any. - * @param {object} url The signed URL. - */ - /** - * @typedef {object} GetBucketSignedUrlConfig - * @property {string} action Currently only supports "list" (HTTP: GET). - * @property {*} expires A timestamp when this link will expire. Any value - * given is passed to `new Date()`. - * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. - * @property {string} [version='v2'] The signing version to use, either - * 'v2' or 'v4'. - * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style - * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style - * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs - * should generally be preferred instaed of path-style URL. - * Currently defaults to `false` for path-style, although this may change in a - * future major-version release. - * @property {string} [cname] The cname for this bucket, i.e., - * "https://cdn.example.com". - * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} - * @property {object} [extensionHeaders] If these headers are used, the - * server will check to make sure that the client provides matching - * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} - * for the requirements of this feature, most notably: - * - The header name must be prefixed with `x-goog-` - * - The header name must be all lowercase - * - * Note: Multi-valued header passed as an array in the extensionHeaders - * object is converted into a string, delimited by `,` with - * no space. Requests made using the signed URL will need to - * delimit multi-valued headers using a single `,` as well, or - * else the server will report a mismatched signature. - * @property {object} [queryParams] Additional query parameters to include - * in the signed URL. - */ - /** - * Get a signed URL to allow limited time access to a bucket. - * - * In Google Cloud Platform environments, such as Cloud Functions and App - * Engine, you usually don't provide a `keyFilename` or `credentials` during - * instantiation. In those environments, we call the - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} - * to create a signed URL. That API requires either the - * `https://www.googleapis.com/auth/iam` or - * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are - * enabled. - * - * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} - * - * @throws {Error} if an expiration timestamp from the past is given. - * - * @param {GetBucketSignedUrlConfig} config Configuration object. - * @param {string} config.action Currently only supports "list" (HTTP: GET). - * @param {*} config.expires A timestamp when this link will expire. Any value - * given is passed to `new Date()`. - * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. - * @param {string} [config.version='v2'] The signing version to use, either - * 'v2' or 'v4'. - * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style - * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style - * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs - * should generally be preferred instaed of path-style URL. - * Currently defaults to `false` for path-style, although this may change in a - * future major-version release. - * @param {string} [config.cname] The cname for this bucket, i.e., - * "https://cdn.example.com". - * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} - * @param {object} [config.extensionHeaders] If these headers are used, the - * server will check to make sure that the client provides matching - * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} - * for the requirements of this feature, most notably: - * - The header name must be prefixed with `x-goog-` - * - The header name must be all lowercase - * - * Note: Multi-valued header passed as an array in the extensionHeaders - * object is converted into a string, delimited by `,` with - * no space. Requests made using the signed URL will need to - * delimit multi-valued headers using a single `,` as well, or - * else the server will report a mismatched signature. - * @property {object} [config.queryParams] Additional query parameters to include - * in the signed URL. - * @param {GetSignedUrlCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * //- - * // Generate a URL that allows temporary access to list files in a bucket. - * //- - * const request = require('request'); - * - * const config = { - * action: 'list', - * expires: '03-17-2025' - * }; - * - * bucket.getSignedUrl(config, function(err, url) { - * if (err) { - * console.error(err); - * return; - * } - * - * // The bucket is now available to be listed from this URL. - * request(url, function(err, resp) { - * // resp.statusCode = 200 - * }); - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.getSignedUrl(config).then(function(data) { - * const url = data[0]; - * }); - * ``` - */ - getSignedUrl(cfg, callback) { - const method = BucketActionToHTTPMethod[cfg.action]; - if (!method) { - throw new Error(storage_1.ExceptionMessages.INVALID_ACTION); - } - const signConfig = { - method, - expires: cfg.expires, - version: cfg.version, - cname: cfg.cname, - extensionHeaders: cfg.extensionHeaders || {}, - queryParams: cfg.queryParams || {}, - }; - if (!this.signer) { - this.signer = new signer_1.URLSigner(this.storage.authClient, this); - } - this.signer - .getSignedUrl(signConfig) - .then(signedUrl => callback(null, signedUrl), callback); - } - /** - * @callback BucketLockCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Lock a previously-defined retention policy. This will prevent changes to - * the policy. - * - * @throws {Error} if a metageneration is not provided. - * - * @param {number|string} metageneration The bucket's metageneration. This is - * accesssible from calling {@link File#getMetadata}. - * @param {BucketLockCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const bucket = storage.bucket('albums'); - * - * const metageneration = 2; - * - * bucket.lock(metageneration, function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.lock(metageneration).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - lock(metageneration, callback) { - const metatype = typeof metageneration; - if (metatype !== 'number' && metatype !== 'string') { - throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); - } - this.request({ - method: 'POST', - uri: '/lockRetentionPolicy', - qs: { - ifMetagenerationMatch: metageneration, - }, - }, callback); - } - /** - * @typedef {array} MakeBucketPrivateResponse - * @property {File[]} 0 List of files made private. - */ - /** - * @callback MakeBucketPrivateCallback - * @param {?Error} err Request error, if any. - * @param {File[]} files List of files made private. - */ - /** - * @typedef {object} MakeBucketPrivateOptions - * @property {boolean} [includeFiles=false] Make each file in the bucket - * private. - * @property {Metadata} [metadata] Define custom metadata properties to define - * along with the operation. - * @property {boolean} [force] Queue errors occurred while making files - * private until all files have been processed. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Make the bucket listing private. - * - * You may also choose to make the contents of the bucket private by - * specifying `includeFiles: true`. This will automatically run - * {@link File#makePrivate} for every file in the bucket. - * - * When specifying `includeFiles: true`, use `force: true` to delay execution - * of your callback until all files have been processed. By default, the - * callback is executed after the first error. Use `force` to queue such - * errors until all files have been processed, after which they will be - * returned as an array as the first argument to your callback. - * - * NOTE: This may cause the process to be long-running and use a high number - * of requests. Use with caution. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} - * - * @param {MakeBucketPrivateOptions} [options] Configuration options. - * @param {boolean} [options.includeFiles=false] Make each file in the bucket - * private. - * @param {Metadata} [options.metadata] Define custom metadata properties to define - * along with the operation. - * @param {boolean} [options.force] Queue errors occurred while making files - * private until all files have been processed. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {MakeBucketPrivateCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Make the bucket private. - * //- - * bucket.makePrivate(function(err) {}); - * - * //- - * // Make the bucket and its contents private. - * //- - * const opts = { - * includeFiles: true - * }; - * - * bucket.makePrivate(opts, function(err, files) { - * // `err`: - * // The first error to occur, otherwise null. - * // - * // `files`: - * // Array of files successfully made private in the bucket. - * }); - * - * //- - * // Make the bucket and its contents private, using force to suppress errors - * // until all files have been processed. - * //- - * const opts = { - * includeFiles: true, - * force: true - * }; - * - * bucket.makePrivate(opts, function(errors, files) { - * // `errors`: - * // Array of errors if any occurred, otherwise null. - * // - * // `files`: - * // Array of files successfully made private in the bucket. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.makePrivate(opts).then(function(data) { - * const files = data[0]; - * }); - * ``` - */ - makePrivate(optionsOrCallback, callback) { - var _a, _b, _c, _d; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - options.private = true; - const query = { - predefinedAcl: 'projectPrivate', - }; - if (options.userProject) { - query.userProject = options.userProject; - } - if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { - query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; - } - if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { - query.ifGenerationNotMatch = - options.preconditionOpts.ifGenerationNotMatch; - } - if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { - query.ifMetagenerationMatch = - options.preconditionOpts.ifMetagenerationMatch; - } - if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { - query.ifMetagenerationNotMatch = - options.preconditionOpts.ifMetagenerationNotMatch; - } - // You aren't allowed to set both predefinedAcl & acl properties on a bucket - // so acl must explicitly be nullified. - const metadata = extend({}, options.metadata, { acl: null }); - this.setMetadata(metadata, query, err => { - if (err) { - callback(err); - } - const internalCall = () => { - if (options.includeFiles) { - return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options); - } - return Promise.resolve([]); - }; - internalCall() - .then(files => callback(null, files)) - .catch(callback); - }); - } - /** - * @typedef {object} MakeBucketPublicOptions - * @property {boolean} [includeFiles=false] Make each file in the bucket - * private. - * @property {boolean} [force] Queue errors occurred while making files - * private until all files have been processed. - */ - /** - * @callback MakeBucketPublicCallback - * @param {?Error} err Request error, if any. - * @param {File[]} files List of files made public. - */ - /** - * @typedef {array} MakeBucketPublicResponse - * @property {File[]} 0 List of files made public. - */ - /** - * Make the bucket publicly readable. - * - * You may also choose to make the contents of the bucket publicly readable by - * specifying `includeFiles: true`. This will automatically run - * {@link File#makePublic} for every file in the bucket. - * - * When specifying `includeFiles: true`, use `force: true` to delay execution - * of your callback until all files have been processed. By default, the - * callback is executed after the first error. Use `force` to queue such - * errors until all files have been processed, after which they will be - * returned as an array as the first argument to your callback. - * - * NOTE: This may cause the process to be long-running and use a high number - * of requests. Use with caution. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} - * - * @param {MakeBucketPublicOptions} [options] Configuration options. - * @param {boolean} [options.includeFiles=false] Make each file in the bucket - * private. - * @param {boolean} [options.force] Queue errors occurred while making files - * private until all files have been processed. - * @param {MakeBucketPublicCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Make the bucket publicly readable. - * //- - * bucket.makePublic(function(err) {}); - * - * //- - * // Make the bucket and its contents publicly readable. - * //- - * const opts = { - * includeFiles: true - * }; - * - * bucket.makePublic(opts, function(err, files) { - * // `err`: - * // The first error to occur, otherwise null. - * // - * // `files`: - * // Array of files successfully made public in the bucket. - * }); - * - * //- - * // Make the bucket and its contents publicly readable, using force to - * // suppress errors until all files have been processed. - * //- - * const opts = { - * includeFiles: true, - * force: true - * }; - * - * bucket.makePublic(opts, function(errors, files) { - * // `errors`: - * // Array of errors if any occurred, otherwise null. - * // - * // `files`: - * // Array of files successfully made public in the bucket. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.makePublic(opts).then(function(data) { - * const files = data[0]; - * }); - * ``` - */ - makePublic(optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const req = extend(true, { public: true }, options); - this.acl - .add({ - entity: 'allUsers', - role: 'READER', - }) - .then(() => { - return this.acl.default.add({ - entity: 'allUsers', - role: 'READER', - }); - }) - .then(() => { - if (req.includeFiles) { - return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req); - } - return []; - }) - .then(files => callback(null, files), callback); - } - /** - * Get a reference to a Cloud Pub/Sub Notification. - * - * @param {string} id ID of notification. - * @returns {Notification} - * @see Notification - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * const notification = bucket.notification('1'); - * ``` - */ - notification(id) { - if (!id) { - throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); - } - return new notification_1.Notification(this, id); - } - /** - * Remove an already-existing retention policy from this bucket, if it is not - * locked. - * - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @param {SetBucketMetadataOptions} [options] Options, including precondition options - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const bucket = storage.bucket('albums'); - * - * bucket.removeRetentionPeriod(function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.removeRetentionPeriod().then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - removeRetentionPeriod(optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - this.setMetadata({ - retentionPolicy: null, - }, options, callback); - } - /** - * Makes request and applies userProject query parameter if necessary. - * - * @private - * - * @param {object} reqOpts - The request options. - * @param {function} callback - The callback function. - */ - request(reqOpts, callback) { - if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { - reqOpts.qs = extend(reqOpts.qs, { userProject: this.userProject }); - } - return super.request(reqOpts, callback); - } - /** - * @typedef {array} SetLabelsResponse - * @property {object} 0 The bucket metadata. - */ - /** - * @callback SetLabelsCallback - * @param {?Error} err Request error, if any. - * @param {object} metadata The bucket metadata. - */ - /** - * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Set labels on the bucket. - * - * This makes an underlying call to {@link Bucket#setMetadata}, which - * is a PATCH request. This means an individual label can be overwritten, but - * unmentioned labels will not be touched. - * - * @param {object} labels Labels to set on the bucket. - * @param {SetLabelsOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {SetLabelsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * const labels = { - * labelone: 'labelonevalue', - * labeltwo: 'labeltwovalue' - * }; - * - * bucket.setLabels(labels, function(err, metadata) { - * if (!err) { - * // Labels set successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.setLabels(labels).then(function(data) { - * const metadata = data[0]; - * }); - * ``` - */ - setLabels(labels, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - callback = callback || nodejs_common_1.util.noop; - this.setMetadata({ labels }, options, callback); - } - setMetadata(metadata, optionsOrCallback, cb) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - cb = - typeof optionsOrCallback === 'function' - ? optionsOrCallback - : cb; - this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); - super - .setMetadata(metadata, options) - .then(resp => cb(null, ...resp)) - .catch(cb) - .finally(() => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - }); - } - /** - * Lock all objects contained in the bucket, based on their creation time. Any - * attempt to overwrite or delete objects younger than the retention period - * will result in a `PERMISSION_DENIED` error. - * - * An unlocked retention policy can be modified or removed from the bucket via - * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A - * locked retention policy cannot be removed or shortened in duration for the - * lifetime of the bucket. Attempting to remove or decrease period of a locked - * retention policy will result in a `PERMISSION_DENIED` error. You can still - * increase the policy. - * - * @param {*} duration In seconds, the minimum retention time for all objects - * contained in this bucket. - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @param {SetBucketMetadataCallback} [options] Options, including precondition options. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const bucket = storage.bucket('albums'); - * - * const DURATION_SECONDS = 15780000; // 6 months. - * - * //- - * // Lock the objects in this bucket for 6 months. - * //- - * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - setRetentionPeriod(duration, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - this.setMetadata({ - retentionPolicy: { - retentionPeriod: duration, - }, - }, options, callback); - } - /** - * - * @typedef {object} Cors - * @property {number} [maxAgeSeconds] The number of seconds the browser is - * allowed to make requests before it must repeat the preflight request. - * @property {string[]} [method] HTTP method allowed for cross origin resource - * sharing with this bucket. - * @property {string[]} [origin] an origin allowed for cross origin resource - * sharing with this bucket. - * @property {string[]} [responseHeader] A header allowed for cross origin - * resource sharing with this bucket. - */ - /** - * This can be used to set the CORS configuration on the bucket. - * - * The configuration will be overwritten with the value passed into this. - * - * @param {Cors[]} corsConfiguration The new CORS configuration to set - * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is - * allowed to make requests before it must repeat the preflight request. - * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource - * sharing with this bucket. - * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource - * sharing with this bucket. - * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin - * resource sharing with this bucket. - * @param {SetBucketMetadataCallback} [callback] Callback function. - * @param {SetBucketMetadataOptions} [options] Options, including precondition options. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const bucket = storage.bucket('albums'); - * - * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour - * bucket.setCorsConfiguration(corsConfiguration); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - this.setMetadata({ - cors: corsConfiguration, - }, options, callback); - } - /** - * @typedef {object} SetBucketStorageClassOptions - * @property {string} [userProject] - The ID of the project which will be - * billed for the request. - */ - /** - * @callback SetBucketStorageClassCallback - * @param {?Error} err Request error, if any. - */ - /** - * Set the default storage class for new files in this bucket. - * - * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} - * - * @param {string} storageClass The new storage class. (`standard`, - * `nearline`, `coldline`, or `archive`). - * **Note:** The storage classes `multi_regional`, `regional`, and - * `durable_reduced_availability` are now legacy and will be deprecated in - * the future. - * @param {object} [options] Configuration options. - * @param {string} [options.userProject] - The ID of the project which will be - * billed for the request. - * @param {SetStorageClassCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.setStorageClass('nearline', function(err, apiResponse) { - * if (err) { - * // Error handling omitted. - * } - * - * // The storage class was updated successfully. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.setStorageClass('nearline').then(function() {}); - * ``` - */ - setStorageClass(storageClass, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - // In case we get input like `storageClass`, convert to `storage_class`. - storageClass = storageClass - .replace(/-/g, '_') - .replace(/([a-z])([A-Z])/g, (_, low, up) => { - return low + '_' + up; - }) - .toUpperCase(); - this.setMetadata({ storageClass }, options, callback); - } - /** - * Set a user project to be billed for all requests made from this Bucket - * object and any files referenced from this Bucket object. - * - * @param {string} userProject The user project. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * bucket.setUserProject('grape-spaceship-123'); - * ``` - */ - setUserProject(userProject) { - this.userProject = userProject; - const methods = [ - 'create', - 'delete', - 'exists', - 'get', - 'getMetadata', - 'setMetadata', - ]; - methods.forEach(method => { - const methodConfig = this.methods[method]; - if (typeof methodConfig === 'object') { - if (typeof methodConfig.reqOpts === 'object') { - extend(methodConfig.reqOpts.qs, { userProject }); - } - else { - methodConfig.reqOpts = { - qs: { userProject }, - }; - } - } - }); - } - /** - * @typedef {object} UploadOptions Configuration options for Bucket#upload(). - * @property {string|File} [destination] The place to save - * your file. If given a string, the file will be uploaded to the bucket - * using the string as a filename. When given a File object, your local - * file will be uploaded to the File object's bucket and under the File - * object's name. Lastly, when this argument is omitted, the file is uploaded - * to your bucket using the name of the local file. - * @property {string} [encryptionKey] A custom encryption key. See - * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. - * @property {boolean} [gzip] Automatically gzip the file. This will set - * `options.metadata.contentEncoding` to `gzip`. - * @property {string} [kmsKeyName] The name of the Cloud KMS key that will - * be used to encrypt the object. Must be in the format: - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. - * @property {object} [metadata] See an - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. - * @property {string} [offset] The starting byte of the upload stream, for - * resuming an interrupted upload. Defaults to 0. - * @property {string} [predefinedAcl] Apply a predefined set of access - * controls to this object. - * - * Acceptable values are: - * - **`authenticatedRead`** - Object owner gets `OWNER` access, and - * `allAuthenticatedUsers` get `READER` access. - * - * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and - * project team owners get `OWNER` access. - * - * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project - * team owners get `READER` access. - * - * - **`private`** - Object owner gets `OWNER` access. - * - * - **`projectPrivate`** - Object owner gets `OWNER` access, and project - * team members get access according to their roles. - * - * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` - * get `READER` access. - * @property {boolean} [private] Make the uploaded file private. (Alias for - * `options.predefinedAcl = 'private'`) - * @property {boolean} [public] Make the uploaded file public. (Alias for - * `options.predefinedAcl = 'publicRead'`) - * @property {boolean} [resumable=true] Resumable uploads are automatically - * enabled and must be shut off explicitly by setting to false. - * @property {number} [timeout=60000] Set the HTTP request timeout in - * milliseconds. This option is not available for resumable uploads. - * Default: `60000` - * @property {string} [uri] The URI for an already-created resumable - * upload. See {@link File#createResumableUpload}. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {string|boolean} [validation] Possible values: `"md5"`, - * `"crc32c"`, or `false`. By default, data integrity is validated with an - * MD5 checksum for maximum reliability. CRC32c will provide better - * performance with less reliability. You may also choose to skip - * validation completely, however this is **not recommended**. - */ - /** - * @typedef {array} UploadResponse - * @property {object} 0 The uploaded {@link File}. - * @property {object} 1 The full API response. - */ - /** - * @callback UploadCallback - * @param {?Error} err Request error, if any. - * @param {object} file The uploaded {@link File}. - * @param {object} apiResponse The full API response. - */ - /** - * Upload a file to the bucket. This is a convenience method that wraps - * {@link File#createWriteStream}. - * - * Resumable uploads are enabled by default - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} - * - * @param {string} pathString The fully qualified path to the file you - * wish to upload to your bucket. - * @param {UploadOptions} [options] Configuration options. - * @param {string|File} [options.destination] The place to save - * your file. If given a string, the file will be uploaded to the bucket - * using the string as a filename. When given a File object, your local - * file will be uploaded to the File object's bucket and under the File - * object's name. Lastly, when this argument is omitted, the file is uploaded - * to your bucket using the name of the local file. - * @param {string} [options.encryptionKey] A custom encryption key. See - * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. - * @param {boolean} [options.gzip] Automatically gzip the file. This will set - * `options.metadata.contentEncoding` to `gzip`. - * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will - * be used to encrypt the object. Must be in the format: - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. - * @param {object} [options.metadata] See an - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. - * @param {string} [options.offset] The starting byte of the upload stream, for - * resuming an interrupted upload. Defaults to 0. - * @param {string} [options.predefinedAcl] Apply a predefined set of access - * controls to this object. - * Acceptable values are: - * - **`authenticatedRead`** - Object owner gets `OWNER` access, and - * `allAuthenticatedUsers` get `READER` access. - * - * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and - * project team owners get `OWNER` access. - * - * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project - * team owners get `READER` access. - * - * - **`private`** - Object owner gets `OWNER` access. - * - * - **`projectPrivate`** - Object owner gets `OWNER` access, and project - * team members get access according to their roles. - * - * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` - * get `READER` access. - * @param {boolean} [options.private] Make the uploaded file private. (Alias for - * `options.predefinedAcl = 'private'`) - * @param {boolean} [options.public] Make the uploaded file public. (Alias for - * `options.predefinedAcl = 'publicRead'`) - * @param {boolean} [options.resumable=true] Resumable uploads are automatically - * enabled and must be shut off explicitly by setting to false. - * @param {number} [options.timeout=60000] Set the HTTP request timeout in - * milliseconds. This option is not available for resumable uploads. - * Default: `60000` - * @param {string} [options.uri] The URI for an already-created resumable - * upload. See {@link File#createResumableUpload}. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {string|boolean} [options.validation] Possible values: `"md5"`, - * `"crc32c"`, or `false`. By default, data integrity is validated with an - * MD5 checksum for maximum reliability. CRC32c will provide better - * performance with less reliability. You may also choose to skip - * validation completely, however this is **not recommended**. - * @param {UploadCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * - * //- - * // Upload a file from a local path. - * //- - * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { - * // Your bucket now contains: - * // - "image.png" (with the contents of `/local/path/image.png') - * - * // `file` is an instance of a File object that refers to your new file. - * }); - * - * - * //- - * // It's not always that easy. You will likely want to specify the filename - * // used when your new file lands in your bucket. - * // - * // You may also want to set metadata or customize other options. - * //- - * const options = { - * destination: 'new-image.png', - * validation: 'crc32c', - * metadata: { - * metadata: { - * event: 'Fall trip to the zoo' - * } - * } - * }; - * - * bucket.upload('local-image.png', options, function(err, file) { - * // Your bucket now contains: - * // - "new-image.png" (with the contents of `local-image.png') - * - * // `file` is an instance of a File object that refers to your new file. - * }); - * - * //- - * // You can also have a file gzip'd on the fly. - * //- - * bucket.upload('index.html', { gzip: true }, function(err, file) { - * // Your bucket now contains: - * // - "index.html" (automatically compressed with gzip) - * - * // Downloading the file with `file.download` will automatically decode - * the - * // file. - * }); - * - * //- - * // You may also re-use a File object, {File}, that references - * // the file you wish to create or overwrite. - * //- - * const options = { - * destination: bucket.file('existing-file.png'), - * resumable: false - * }; - * - * bucket.upload('local-img.png', options, function(err, newFile) { - * // Your bucket now contains: - * // - "existing-file.png" (with the contents of `local-img.png') - * - * // Note: - * // The `newFile` parameter is equal to `file`. - * }); - * - * //- - * // To use - * // - * // Customer-supplied Encryption Keys, provide the `encryptionKey` - * option. - * //- - * const crypto = require('crypto'); - * const encryptionKey = crypto.randomBytes(32); - * - * bucket.upload('img.png', { - * encryptionKey: encryptionKey - * }, function(err, newFile) { - * // `img.png` was uploaded with your custom encryption key. - * - * // `newFile` is already configured to use the encryption key when making - * // operations on the remote object. - * - * // However, to use your encryption key later, you must create a `File` - * // instance with the `key` supplied: - * const file = bucket.file('img.png', { - * encryptionKey: encryptionKey - * }); - * - * // Or with `file#setEncryptionKey`: - * const file = bucket.file('img.png'); - * file.setEncryptionKey(encryptionKey); - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.upload('local-image.png').then(function(data) { - * const file = data[0]; - * }); - * - * To upload a file from a URL, use {@link File#createWriteStream}. - * - * ``` - * @example include:samples/files.js - * region_tag:storage_upload_file - * Another example: - * - * @example include:samples/encryption.js - * region_tag:storage_upload_encrypted_file - * Example of uploading an encrypted file: - */ - upload(pathString, optionsOrCallback, callback) { - var _a, _b; - const upload = (numberOfRetries) => { - const returnValue = retry(async (bail) => { - await new Promise((resolve, reject) => { - var _a, _b; - if (numberOfRetries === 0 && - ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { - newFile.storage.retryOptions.autoRetry = false; - } - const writable = newFile.createWriteStream(options); - if (options.onUploadProgress) { - writable.on('progress', options.onUploadProgress); - } - fs.createReadStream(pathString) - .on('error', bail) - .pipe(writable) - .on('error', err => { - if (this.storage.retryOptions.autoRetry && - this.storage.retryOptions.retryableErrorFn(err)) { - return reject(err); - } - else { - return bail(err); - } - }) - .on('finish', () => { - return resolve(); - }); - }); - }, { - retries: numberOfRetries, - factor: this.storage.retryOptions.retryDelayMultiplier, - maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, - maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds - }); - if (!callback) { - return returnValue; - } - else { - return returnValue - .then(() => { - if (callback) { - return callback(null, newFile, newFile.metadata); - } - }) - .catch(callback); - } - }; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - if (global['GCLOUD_SANDBOX_ENV']) { - return; - } - let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - options = Object.assign({ - metadata: {}, - }, options); - // Do not retry if precondition option ifGenerationMatch is not set - // because this is a file operation - let maxRetries = this.storage.retryOptions.maxRetries; - if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && - ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - maxRetries = 0; - } - let newFile; - if (options.destination instanceof file_1.File) { - newFile = options.destination; - } - else if (options.destination !== null && - typeof options.destination === 'string') { - // Use the string as the name of the file. - newFile = this.file(options.destination, { - encryptionKey: options.encryptionKey, - kmsKeyName: options.kmsKeyName, - preconditionOpts: this.instancePreconditionOpts, - }); - } - else { - // Resort to using the name of the incoming file. - const destination = path.basename(pathString); - newFile = this.file(destination, { - encryptionKey: options.encryptionKey, - kmsKeyName: options.kmsKeyName, - preconditionOpts: this.instancePreconditionOpts, - }); - } - upload(maxRetries); - } - /** - * @private - * - * @typedef {object} MakeAllFilesPublicPrivateOptions - * @property {boolean} [force] Suppress errors until all files have been - * processed. - * @property {boolean} [private] Make files private. - * @property {boolean} [public] Make files public. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @private - * - * @callback SetBucketMetadataCallback - * @param {?Error} err Request error, if any. - * @param {File[]} files Files that were updated. - */ - /** - * @typedef {array} MakeAllFilesPublicPrivateResponse - * @property {File[]} 0 List of files affected. - */ - /** - * Iterate over all of a bucket's files, calling `file.makePublic()` (public) - * or `file.makePrivate()` (private) on each. - * - * Operations are performed in parallel, up to 10 at once. The first error - * breaks the loop, and will execute the provided callback with it. Specify - * `{ force: true }` to suppress the errors. - * - * @private - * - * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. - * @param {boolean} [options.force] Suppress errors until all files have been - * processed. - * @param {boolean} [options.private] Make files private. - * @param {boolean} [options.public] Make files public. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - - * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. - * - * @return {Promise} - */ - makeAllFilesPublicPrivate_(optionsOrCallback, callback) { - const MAX_PARALLEL_LIMIT = 10; - const errors = []; - const updatedFiles = []; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const processFile = async (file) => { - try { - await (options.public ? file.makePublic() : file.makePrivate(options)); - updatedFiles.push(file); - } - catch (e) { - if (!options.force) { - throw e; - } - errors.push(e); - } - }; - this.getFiles(options) - .then(([files]) => { - const limit = pLimit(MAX_PARALLEL_LIMIT); - const promises = files.map(file => { - return limit(() => processFile(file)); - }); - return Promise.all(promises); - }) - .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); - } - getId() { - return this.id; - } - disableAutoRetryConditionallyIdempotent_( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - coreOpts, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - methodType, localPreconditionOptions) { - var _a, _b; - if (typeof coreOpts === 'object' && - ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && - (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && - (methodType === AvailableServiceObjectMethods.setMetadata || - methodType === AvailableServiceObjectMethods.delete) && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) { - this.storage.retryOptions.autoRetry = false; - } - else if (this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - this.storage.retryOptions.autoRetry = false; - } - } -} -exports.Bucket = Bucket; -/*! Developer Documentation - * - * These methods can be auto-paginated. - */ -paginator_1.paginator.extend(Bucket, 'getFiles'); -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Bucket, { - exclude: ['cloudStorageURI', 'request', 'file', 'notification'], -}); -//# sourceMappingURL=bucket.js.map - -/***/ }), - -/***/ 8953: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Channel = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const promisify_1 = __nccwpck_require__(9203); -/** - * Create a channel object to interact with a Cloud Storage channel. - * - * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} - * - * @class - * - * @param {string} id The ID of the channel. - * @param {string} resourceId The resource ID of the channel. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const channel = storage.channel('id', 'resource-id'); - * ``` - */ -class Channel extends nodejs_common_1.ServiceObject { - constructor(storage, id, resourceId) { - const config = { - parent: storage, - baseUrl: '/channels', - // An ID shouldn't be included in the API requests. - // RE: - // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 - id: '', - methods: { - // Only need `request`. - }, - }; - super(config); - this.metadata.id = id; - this.metadata.resourceId = resourceId; - } - /** - * @typedef {array} StopResponse - * @property {object} 0 The full API response. - */ - /** - * @callback StopCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Stop this channel. - * - * @param {StopCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const channel = storage.channel('id', 'resource-id'); - * channel.stop(function(err, apiResponse) { - * if (!err) { - * // Channel stopped successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * channel.stop().then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - stop(callback) { - callback = callback || nodejs_common_1.util.noop; - this.request({ - method: 'POST', - uri: '/stop', - json: this.metadata, - }, (err, apiResponse) => { - callback(err, apiResponse); - }); - } -} -exports.Channel = Channel; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Channel); -//# sourceMappingURL=channel.js.map - -/***/ }), - -/***/ 4921: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; -}; -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _CRC32C_crc32c; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; -const fs_1 = __nccwpck_require__(7147); -/** - * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} - */ -const CRC32C_EXTENSIONS = [ - 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, - 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, - 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, - 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, - 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, - 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, - 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, - 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, - 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, - 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, - 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, - 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, - 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, - 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, - 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, - 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, - 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, - 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, - 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, - 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, - 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, - 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, - 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, - 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, - 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, - 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, - 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, - 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, - 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, - 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, - 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, - 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, - 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, - 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, - 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, - 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, - 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, - 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, - 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, - 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, - 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, - 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, - 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, -]; -exports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; -const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); -exports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; -const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); -exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR; -const CRC32C_EXCEPTION_MESSAGES = { - INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, - INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, - INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, -}; -exports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES; -class CRC32C { - /** - * Constructs a new `CRC32C` object. - * - * Reconstruction is recommended via the `CRC32C.from` static method. - * - * @param initialValue An initial CRC32C value - a signed 32-bit integer. - */ - constructor(initialValue = 0) { - /** Current CRC32C value */ - _CRC32C_crc32c.set(this, 0); - __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); - } - /** - * Calculates a CRC32C from a provided buffer. - * - * Implementation inspired from: - * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} - * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} - * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} - * - * @param data The `Buffer` to generate the CRC32C from - */ - update(data) { - let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; - for (const d of data) { - const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; - current = tablePoly ^ (current >>> 8); - } - __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); - } - /** - * Validates a provided input to the current CRC32C value. - * - * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer - */ - validate(input) { - if (typeof input === 'number') { - return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); - } - else if (typeof input === 'string') { - return input === this.toString(); - } - else if (Buffer.isBuffer(input)) { - return Buffer.compare(input, this.toBuffer()) === 0; - } - else { - // `CRC32C`-like object - return input.toString() === this.toString(); - } - } - /** - * Returns a `Buffer` representation of the CRC32C value - */ - toBuffer() { - const buffer = Buffer.alloc(4); - buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); - return buffer; - } - /** - * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. - * - * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} - */ - toJSON() { - return this.toString(); - } - /** - * Returns a base64-encoded representation of the CRC32C value. - * - * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} - */ - toString() { - return this.toBuffer().toString('base64'); - } - /** - * Returns the `number` representation of the CRC32C value as a signed 32-bit integer - * - * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} - */ - valueOf() { - return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); - } - /** - * Generates a `CRC32C` from a compatible buffer format. - * - * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` - */ - static fromBuffer(value) { - let buffer; - if (Buffer.isBuffer(value)) { - buffer = value; - } - else if ('buffer' in value) { - // `ArrayBufferView` - buffer = Buffer.from(value.buffer); - } - else { - // `ArrayBuffer` - buffer = Buffer.from(value); - } - if (buffer.byteLength !== 4) { - throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); - } - return new CRC32C(buffer.readInt32BE()); - } - static async fromFile(file) { - const crc32c = new CRC32C(); - await new Promise((resolve, reject) => { - (0, fs_1.createReadStream)(file) - .on('data', (d) => crc32c.update(d)) - .on('end', resolve) - .on('error', reject); - }); - return crc32c; - } - /** - * Generates a `CRC32C` from 4-byte base64-encoded data (string). - * - * @param value 4-byte base64-encoded data (string) - */ - static fromString(value) { - const buffer = Buffer.from(value, 'base64'); - if (buffer.byteLength !== 4) { - throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); - } - return this.fromBuffer(buffer); - } - /** - * Generates a `CRC32C` from a safe, unsigned 32-bit integer. - * - * @param value an unsigned 32-bit integer - */ - static fromNumber(value) { - if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { - throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); - } - return new CRC32C(value); - } - /** - * Generates a `CRC32C` from a variety of compatable types. - * Note: strings are treated as input, not as file paths to read from. - * - * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) - */ - static from(value) { - if (typeof value === 'number') { - return this.fromNumber(value); - } - else if (typeof value === 'string') { - return this.fromString(value); - } - else if ('byteLength' in value) { - // `ArrayBuffer` | `Buffer` | `ArrayBufferView` - return this.fromBuffer(value); - } - else { - // `CRC32CValidator`/`CRC32C`-like - return this.fromString(value.toString()); - } - } -} -exports.CRC32C = CRC32C; -_CRC32C_crc32c = new WeakMap(); -CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; -CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; -//# sourceMappingURL=crc32c.js.map - -/***/ }), - -/***/ 5373: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _File_instances, _File_validateIntegrity; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const promisify_1 = __nccwpck_require__(9203); -const compressible = __nccwpck_require__(6763); -const crypto = __nccwpck_require__(6113); -const extend = __nccwpck_require__(8171); -const fs = __nccwpck_require__(7147); -const mime = __nccwpck_require__(5377); -const resumableUpload = __nccwpck_require__(8807); -const stream_1 = __nccwpck_require__(2781); -const zlib = __nccwpck_require__(9796); -const storage_1 = __nccwpck_require__(346); -const bucket_1 = __nccwpck_require__(8561); -const acl_1 = __nccwpck_require__(1672); -const signer_1 = __nccwpck_require__(9665); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const duplexify = __nccwpck_require__(6599); -const util_1 = __nccwpck_require__(1862); -const hash_stream_validator_1 = __nccwpck_require__(8386); -const retry = __nccwpck_require__(3415); -var ActionToHTTPMethod; -(function (ActionToHTTPMethod) { - ActionToHTTPMethod["read"] = "GET"; - ActionToHTTPMethod["write"] = "PUT"; - ActionToHTTPMethod["delete"] = "DELETE"; - ActionToHTTPMethod["resumable"] = "POST"; -})(ActionToHTTPMethod = exports.ActionToHTTPMethod || (exports.ActionToHTTPMethod = {})); -/** - * @private - */ -exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; -/** - * @private - */ -const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; -class RequestError extends Error { -} -exports.RequestError = RequestError; -const SEVEN_DAYS = 7 * 24 * 60 * 60; -var FileExceptionMessages; -(function (FileExceptionMessages) { - FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; - FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; - FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; - FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; - FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; - FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; - FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; - FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; - FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; - FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; -})(FileExceptionMessages = exports.FileExceptionMessages || (exports.FileExceptionMessages = {})); -/** - * A File object is created from your {@link Bucket} object using - * {@link Bucket#file}. - * - * @class - */ -class File extends nodejs_common_1.ServiceObject { - /** - * Cloud Storage uses access control lists (ACLs) to manage object and - * bucket access. ACLs are the mechanism you use to share objects with other - * users and allow other users to access your buckets and objects. - * - * An ACL consists of one or more entries, where each entry grants permissions - * to an entity. Permissions define the actions that can be performed against - * an object or bucket (for example, `READ` or `WRITE`); the entity defines - * who the permission applies to (for example, a specific user or group of - * users). - * - * The `acl` object on a File instance provides methods to get you a list of - * the ACLs defined on your bucket, as well as set, update, and delete them. - * - * See {@link http://goo.gl/6qBBPO| About Access Control lists} - * - * @name File#acl - * @mixes Acl - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * //- - * // Make a file publicly readable. - * //- - * const options = { - * entity: 'allUsers', - * role: storage.acl.READER_ROLE - * }; - * - * file.acl.add(options, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.acl.add(options).then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - /** - * The API-formatted resource description of the file. - * - * Note: This is not guaranteed to be up-to-date when accessed. To get the - * latest record, call the `getMetadata()` method. - * - * @name File#metadata - * @type {object} - */ - /** - * The file's name. - * @name File#name - * @type {string} - */ - /** - * @callback Crc32cGeneratorToStringCallback - * A method returning the CRC32C as a base64-encoded string. - * - * @returns {string} - * - * @example - * Hashing the string 'data' should return 'rth90Q==' - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.toString(); // 'rth90Q==' - * ``` - **/ - /** - * @callback Crc32cGeneratorValidateCallback - * A method validating a base64-encoded CRC32C string. - * - * @param {string} [value] base64-encoded CRC32C string to validate - * @returns {boolean} - * - * @example - * Should return `true` if the value matches, `false` otherwise - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.validate('DkjKuA=='); // false - * crc32c.validate('rth90Q=='); // true - * ``` - **/ - /** - * @callback Crc32cGeneratorUpdateCallback - * A method for passing `Buffer`s for CRC32C generation. - * - * @param {Buffer} [data] data to update CRC32C value with - * @returns {undefined} - * - * @example - * Hashing buffers from 'some ' and 'text\n' - * - * ```js - * const buffer1 = Buffer.from('some '); - * crc32c.update(buffer1); - * - * const buffer2 = Buffer.from('text\n'); - * crc32c.update(buffer2); - * - * crc32c.toString(); // 'DkjKuA==' - * ``` - **/ - /** - * @typedef {object} CRC32CValidator - * @property {Crc32cGeneratorToStringCallback} - * @property {Crc32cGeneratorValidateCallback} - * @property {Crc32cGeneratorUpdateCallback} - */ - /** - * @callback Crc32cGeneratorCallback - * @returns {CRC32CValidator} - */ - /** - * @typedef {object} FileOptions Options passed to the File constructor. - * @property {string} [encryptionKey] A custom encryption key. - * @property {number} [generation] Generation to scope the file to. - * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this - * object, if the object is encrypted by such a key. Limited availability; - * usable only by enabled projects. - * @property {string} [userProject] The ID of the project which will be - * billed for all requests made from File object. - * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} - */ - /** - * Constructs a file object. - * - * @param {Bucket} bucket The Bucket instance this file is - * attached to. - * @param {string} name The name of the remote file. - * @param {FileOptions} [options] Configuration options. - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * ``` - */ - constructor(bucket, name, options = {}) { - var _a, _b; - const requestQueryObject = {}; - let generation; - if (options.generation !== null) { - if (typeof options.generation === 'string') { - generation = Number(options.generation); - } - else { - generation = options.generation; - } - if (!isNaN(generation)) { - requestQueryObject.generation = generation; - } - } - Object.assign(requestQueryObject, options.preconditionOpts); - const userProject = options.userProject || bucket.userProject; - if (typeof userProject === 'string') { - requestQueryObject.userProject = userProject; - } - const methods = { - /** - * @typedef {array} DeleteFileResponse - * @property {object} 0 The full API response. - */ - /** - * @callback DeleteFileCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Delete the file. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} - * - * @method File#delete - * @param {object} [options] Configuration options. - * @param {boolean} [options.ignoreNotFound = false] Ignore an error if - * the file does not exist. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {DeleteFileCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * file.delete(function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.delete().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_delete_file - * Another example: - */ - delete: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {array} FileExistsResponse - * @property {boolean} 0 Whether the {@link File} exists. - */ - /** - * @callback FileExistsCallback - * @param {?Error} err Request error, if any. - * @param {boolean} exists Whether the {@link File} exists. - */ - /** - * Check if the file exists. - * - * @method File#exists - * @param {options} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {FileExistsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * file.exists(function(err, exists) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.exists().then(function(data) { - * const exists = data[0]; - * }); - * ``` - */ - exists: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {array} GetFileResponse - * @property {File} 0 The {@link File}. - * @property {object} 1 The full API response. - */ - /** - * @callback GetFileCallback - * @param {?Error} err Request error, if any. - * @param {File} file The {@link File}. - * @param {object} apiResponse The full API response. - */ - /** - * Get a file object and its metadata if it exists. - * - * @method File#get - * @param {options} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetFileCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * file.get(function(err, file, apiResponse) { - * // file.metadata` has been populated. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.get().then(function(data) { - * const file = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - get: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {array} GetFileMetadataResponse - * @property {object} 0 The {@link File} metadata. - * @property {object} 1 The full API response. - */ - /** - * @callback GetFileMetadataCallback - * @param {?Error} err Request error, if any. - * @param {object} metadata The {@link File} metadata. - * @param {object} apiResponse The full API response. - */ - /** - * Get the file's metadata. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} - * - * @method File#getMetadata - * @param {object} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetFileMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * file.getMetadata(function(err, metadata, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.getMetadata().then(function(data) { - * const metadata = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_get_metadata - * Another example: - */ - getMetadata: { - reqOpts: { - qs: requestQueryObject, - }, - }, - /** - * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). - * @param {string} [userProject] The ID of the project which will be billed for the request. - */ - /** - * @callback SetFileMetadataCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} SetFileMetadataResponse - * @property {object} 0 The full API response. - */ - /** - * Merge the given metadata with the current remote file's metadata. This - * will set metadata if it was previously unset or update previously set - * metadata. To unset previously set metadata, set its value to null. - * - * You can set custom key/value pairs in the metadata key of the given - * object, however the other properties outside of this object must adhere - * to the {@link https://goo.gl/BOnnCK| official API documentation}. - * - * - * See the examples below for more information. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} - * - * @method File#setMetadata - * @param {object} [metadata] The metadata you wish to update. - * @param {SetFileMetadataOptions} [options] Configuration options. - * @param {SetFileMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * const metadata = { - * contentType: 'application/x-font-ttf', - * metadata: { - * my: 'custom', - * properties: 'go here' - * } - * }; - * - * file.setMetadata(metadata, function(err, apiResponse) {}); - * - * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } - * file.setMetadata({ - * metadata: { - * abc: '123', // will be set. - * unsetMe: null, // will be unset (deleted). - * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. - * } - * }, function(err, apiResponse) { - * // metadata should now be { abc: '123', hello: 'goodbye' } - * }); - * - * //- - * // Set a temporary hold on this file from its bucket's retention period - * // configuration. - * // - * file.setMetadata({ - * temporaryHold: true - * }, function(err, apiResponse) {}); - * - * //- - * // Alternatively, you may set a temporary hold. This will follow the - * // same behavior as an event-based hold, with the exception that the - * // bucket's retention policy will not renew for this file from the time - * // the hold is released. - * //- - * file.setMetadata({ - * eventBasedHold: true - * }, function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.setMetadata(metadata).then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - setMetadata: { - reqOpts: { - qs: requestQueryObject, - }, - }, - }; - super({ - parent: bucket, - baseUrl: '/o', - id: encodeURIComponent(name), - methods, - }); - _File_instances.add(this); - this.bucket = bucket; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - this.storage = bucket.parent; - // @TODO Can this duplicate code from above be avoided? - if (options.generation !== null) { - let generation; - if (typeof options.generation === 'string') { - generation = Number(options.generation); - } - else { - generation = options.generation; - } - if (!isNaN(generation)) { - this.generation = generation; - } - } - this.kmsKeyName = options.kmsKeyName; - this.userProject = userProject; - this.name = name; - if (options.encryptionKey) { - this.setEncryptionKey(options.encryptionKey); - } - this.acl = new acl_1.Acl({ - request: this.request.bind(this), - pathPrefix: '/acl', - }); - this.crc32cGenerator = - options.crc32cGenerator || this.bucket.crc32cGenerator; - this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; - this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; - } - /** - * The object's Cloud Storage URI (`gs://`) - * - * @example - * ```ts - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * const file = bucket.file('image.png'); - * - * // `gs://my-bucket/image.png` - * const href = file.cloudStorageURI.href; - * ``` - */ - get cloudStorageURI() { - const uri = this.bucket.cloudStorageURI; - uri.pathname = this.name; - return uri; - } - /** - * A helper method for determining if a request should be retried based on preconditions. - * This should only be used for methods where the idempotency is determined by - * `ifGenerationMatch` - * @private - * - * A request should not be retried under the following conditions: - * - if precondition option `ifGenerationMatch` is not set OR - * - if `idempotencyStrategy` is set to `RetryNever` - */ - shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { - var _a; - return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && - ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever); - } - /** - * @typedef {array} CopyResponse - * @property {File} 0 The copied {@link File}. - * @property {object} 1 The full API response. - */ - /** - * @callback CopyCallback - * @param {?Error} err Request error, if any. - * @param {File} copiedFile The copied {@link File}. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {object} CopyOptions Configuration options for File#copy(). See an - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. - * @property {string} [cacheControl] The cacheControl setting for the new file. - * @property {string} [contentEncoding] The contentEncoding setting for the new file. - * @property {string} [contentType] The contentType setting for the new file. - * @property {string} [destinationKmsKeyName] Resource name of the Cloud - * KMS key, of the form - * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, - * that will be used to encrypt the object. Overwrites the object - * metadata's `kms_key_name` value, if any. - * @property {Metadata} [metadata] Metadata to specify on the copied file. - * @property {string} [predefinedAcl] Set the ACL for the new file. - * @property {string} [token] A previously-returned `rewriteToken` from an - * unfinished rewrite request. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Copy this file to another file. By default, this will copy the file to the - * same bucket, but you can choose to copy it to another Bucket by providing - * a Bucket or File object or a URL starting with "gs://". - * The generation of the file will not be preserved. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} - * - * @throws {Error} If the destination file is not provided. - * - * @param {string|Bucket|File} destination Destination file. - * @param {CopyOptions} [options] Configuration options. See an - * @param {CopyCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // You can pass in a variety of types for the destination. - * // - * // For all of the below examples, assume we are working with the following - * // Bucket and File objects. - * //- - * const bucket = storage.bucket('my-bucket'); - * const file = bucket.file('my-image.png'); - * - * //- - * // If you pass in a string for the destination, the file is copied to its - * // current bucket, under the new name provided. - * //- - * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { - * // `my-bucket` now contains: - * // - "my-image.png" - * // - "my-image-copy.png" - * - * // `copiedFile` is an instance of a File object that refers to your new - * // file. - * }); - * - * //- - * // If you pass in a string starting with "gs://" for the destination, the - * // file is copied to the other bucket and under the new name provided. - * //- - * const newLocation = 'gs://another-bucket/my-image-copy.png'; - * file.copy(newLocation, function(err, copiedFile, apiResponse) { - * // `my-bucket` still contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-image-copy.png" - * - * // `copiedFile` is an instance of a File object that refers to your new - * // file. - * }); - * - * //- - * // If you pass in a Bucket object, the file will be copied to that bucket - * // using the same name. - * //- - * const anotherBucket = storage.bucket('another-bucket'); - * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { - * // `my-bucket` still contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-image.png" - * - * // `copiedFile` is an instance of a File object that refers to your new - * // file. - * }); - * - * //- - * // If you pass in a File object, you have complete control over the new - * // bucket and filename. - * //- - * const anotherFile = anotherBucket.file('my-awesome-image.png'); - * file.copy(anotherFile, function(err, copiedFile, apiResponse) { - * // `my-bucket` still contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-awesome-image.png" - * - * // Note: - * // The `copiedFile` parameter is equal to `anotherFile`. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.copy(newLocation).then(function(data) { - * const newFile = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_copy_file - * Another example: - */ - copy(destination, optionsOrCallback, callback) { - var _a, _b; - const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); - if (!destination) { - throw noDestinationError; - } - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else if (optionsOrCallback) { - options = optionsOrCallback; - } - options = extend(true, {}, options); - callback = callback || nodejs_common_1.util.noop; - let destBucket; - let destName; - let newFile; - if (typeof destination === 'string') { - const parsedDestination = GS_URL_REGEXP.exec(destination); - if (parsedDestination !== null && parsedDestination.length === 3) { - destBucket = this.storage.bucket(parsedDestination[1]); - destName = parsedDestination[2]; - } - else { - destBucket = this.bucket; - destName = destination; - } - } - else if (destination instanceof bucket_1.Bucket) { - destBucket = destination; - destName = this.name; - } - else if (destination instanceof File) { - destBucket = destination.bucket; - destName = destination.name; - newFile = destination; - } - else { - throw noDestinationError; - } - const query = {}; - if (this.generation !== undefined) { - query.sourceGeneration = this.generation; - } - if (options.token !== undefined) { - query.rewriteToken = options.token; - } - if (options.userProject !== undefined) { - query.userProject = options.userProject; - delete options.userProject; - } - if (options.predefinedAcl !== undefined) { - query.destinationPredefinedAcl = options.predefinedAcl; - delete options.predefinedAcl; - } - newFile = newFile || destBucket.file(destName); - const headers = {}; - if (this.encryptionKey !== undefined) { - headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; - headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; - headers['x-goog-copy-source-encryption-key-sha256'] = - this.encryptionKeyHash; - } - if (newFile.encryptionKey !== undefined) { - this.setEncryptionKey(newFile.encryptionKey); - } - else if (options.destinationKmsKeyName !== undefined) { - query.destinationKmsKeyName = options.destinationKmsKeyName; - delete options.destinationKmsKeyName; - } - else if (newFile.kmsKeyName !== undefined) { - query.destinationKmsKeyName = newFile.kmsKeyName; - } - if (query.destinationKmsKeyName) { - this.kmsKeyName = query.destinationKmsKeyName; - const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); - if (keyIndex > -1) { - this.interceptors.splice(keyIndex, 1); - } - } - if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { - this.storage.retryOptions.autoRetry = false; - } - if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { - query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; - delete options.preconditionOpts; - } - this.request({ - method: 'POST', - uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, - qs: query, - json: options, - headers, - }, (err, resp) => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - if (err) { - callback(err, null, resp); - return; - } - if (resp.rewriteToken) { - const options = { - token: resp.rewriteToken, - }; - if (query.userProject) { - options.userProject = query.userProject; - } - if (query.destinationKmsKeyName) { - options.destinationKmsKeyName = query.destinationKmsKeyName; - } - this.copy(newFile, options, callback); - return; - } - callback(null, newFile, resp); - }); - } - /** - * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {string|boolean} [validation] Possible values: `"md5"`, - * `"crc32c"`, or `false`. By default, data integrity is validated with a - * CRC32c checksum. You may use MD5 if preferred, but that hash is not - * supported for composite objects. An error will be raised if MD5 is - * specified but is not available. You may also choose to skip validation - * completely, however this is **not recommended**. - * @property {number} [start] A byte offset to begin the file's download - * from. Default is 0. NOTE: Byte ranges are inclusive; that is, - * `options.start = 0` and `options.end = 999` represent the first 1000 - * bytes in a file or object. NOTE: when specifying a byte range, data - * integrity is not available. - * @property {number} [end] A byte offset to stop reading the file at. - * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and - * `options.end = 999` represent the first 1000 bytes in a file or object. - * NOTE: when specifying a byte range, data integrity is not available. - * @property {boolean} [decompress=true] Disable auto decompression of the - * received data. By default this option is set to `true`. - * Applicable in cases where the data was uploaded with - * `gzip: true` option. See {@link File#createWriteStream}. - */ - /** - * Create a readable stream to read the contents of the remote file. It can be - * piped to a writable stream or listened to for 'data' events to read a - * file's contents. - * - * In the unlikely event there is a mismatch between what you downloaded and - * the version in your Bucket, your error handler will receive an error with - * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best - * recourse is to try downloading the file again. - * - * NOTE: Readable streams will emit the `end` event when the file is fully - * downloaded. - * - * @param {CreateReadStreamOptions} [options] Configuration options. - * @returns {ReadableStream} - * - * @example - * ``` - * //- - * //

Downloading a File

- * // - * // The example below demonstrates how we can reference a remote file, then - * // pipe its contents to a local file. This is effectively creating a local - * // backup of your remote data. - * //- - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * const fs = require('fs'); - * const remoteFile = bucket.file('image.png'); - * const localFilename = '/Users/stephen/Photos/image.png'; - * - * remoteFile.createReadStream() - * .on('error', function(err) {}) - * .on('response', function(response) { - * // Server connected and responded with the specified status and headers. - * }) - * .on('end', function() { - * // The file is fully downloaded. - * }) - * .pipe(fs.createWriteStream(localFilename)); - * - * //- - * // To limit the downloaded data to only a byte range, pass an options - * // object. - * //- - * const logFile = myBucket.file('access_log'); - * logFile.createReadStream({ - * start: 10000, - * end: 20000 - * }) - * .on('error', function(err) {}) - * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); - * - * //- - * // To read a tail byte range, specify only `options.end` as a negative - * // number. - * //- - * const logFile = myBucket.file('access_log'); - * logFile.createReadStream({ - * end: -100 - * }) - * .on('error', function(err) {}) - * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); - * ``` - */ - createReadStream(options = {}) { - options = Object.assign({ decompress: true }, options); - const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; - const tailRequest = options.end < 0; - let validateStream = undefined; - const throughStream = new util_1.PassThroughShim(); - let crc32c = true; - let md5 = false; - if (typeof options.validation === 'string') { - const value = options.validation.toLowerCase().trim(); - crc32c = value === 'crc32c'; - md5 = value === 'md5'; - } - else if (options.validation === false) { - crc32c = false; - } - const shouldRunValidation = !rangeRequest && (crc32c || md5); - if (rangeRequest) { - if (typeof options.validation === 'string' || - options.validation === true) { - throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); - } - // Range requests can't receive data integrity checks. - crc32c = false; - md5 = false; - } - const onComplete = (err) => { - if (err) { - throughStream.destroy(err); - } - }; - // We listen to the response event from the request stream so that we - // can... - // - // 1) Intercept any data from going to the user if an error occurred. - // 2) Calculate the hashes from the http.IncomingMessage response - // stream, - // which will return the bytes from the source without decompressing - // gzip'd content. We then send it through decompressed, if - // applicable, to the user. - const onResponse = (err, _body, rawResponseStream) => { - if (err) { - // Get error message from the body. - this.getBufferFromReadable(rawResponseStream).then(body => { - err.message = body.toString('utf8'); - throughStream.destroy(err); - }); - return; - } - const headers = rawResponseStream.toJSON().headers; - const isCompressed = headers['content-encoding'] === 'gzip'; - const hashes = {}; - // The object is safe to validate if: - // 1. It was stored gzip and returned to us gzip OR - // 2. It was never stored as gzip - const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && - isCompressed) || - headers['x-goog-stored-content-encoding'] === 'identity'; - const transformStreams = []; - if (shouldRunValidation) { - // The x-goog-hash header should be set with a crc32c and md5 hash. - // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' - if (typeof headers['x-goog-hash'] === 'string') { - headers['x-goog-hash'] - .split(',') - .forEach((hashKeyValPair) => { - const delimiterIndex = hashKeyValPair.indexOf('='); - const hashType = hashKeyValPair.substr(0, delimiterIndex); - const hashValue = hashKeyValPair.substr(delimiterIndex + 1); - hashes[hashType] = hashValue; - }); - } - validateStream = new hash_stream_validator_1.HashStreamValidator({ - crc32c, - md5, - crc32cGenerator: this.crc32cGenerator, - crc32cExpected: hashes.crc32c, - md5Expected: hashes.md5, - }); - } - if (md5 && !hashes.md5) { - const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); - hashError.code = 'MD5_NOT_AVAILABLE'; - throughStream.destroy(hashError); - return; - } - if (safeToValidate && shouldRunValidation && validateStream) { - transformStreams.push(validateStream); - } - if (isCompressed && options.decompress) { - transformStreams.push(zlib.createGunzip()); - } - (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete); - }; - // Authenticate the request, then pipe the remote API request to the stream - // returned to the user. - const makeRequest = () => { - const query = { alt: 'media' }; - if (this.generation) { - query.generation = this.generation; - } - if (options.userProject) { - query.userProject = options.userProject; - } - const headers = { - 'Accept-Encoding': 'gzip', - 'Cache-Control': 'no-store', - }; - if (rangeRequest) { - const start = typeof options.start === 'number' ? options.start : '0'; - const end = typeof options.end === 'number' ? options.end : ''; - headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; - } - const reqOpts = { - uri: '', - headers, - qs: query, - }; - this.requestStream(reqOpts) - .on('error', err => { - throughStream.destroy(err); - }) - .on('response', res => { - throughStream.emit('response', res); - nodejs_common_1.util.handleResp(null, res, null, onResponse); - }) - .resume(); - }; - throughStream.on('reading', makeRequest); - return throughStream; - } - /** - * @callback CreateResumableUploadCallback - * @param {?Error} err Request error, if any. - * @param {string} uri The resumable upload's unique session URI. - */ - /** - * @typedef {array} CreateResumableUploadResponse - * @property {string} 0 The resumable upload's unique session URI. - */ - /** - * @typedef {object} CreateResumableUploadOptions - * @property {object} [metadata] Metadata to set on the file. - * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. - * @property {string} [origin] Origin header to set for the upload. - * @property {string} [predefinedAcl] Apply a predefined set of access - * controls to this object. - * - * Acceptable values are: - * - **`authenticatedRead`** - Object owner gets `OWNER` access, and - * `allAuthenticatedUsers` get `READER` access. - * - * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and - * project team owners get `OWNER` access. - * - * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project - * team owners get `READER` access. - * - * - **`private`** - Object owner gets `OWNER` access. - * - * - **`projectPrivate`** - Object owner gets `OWNER` access, and project - * team members get access according to their roles. - * - * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` - * get `READER` access. - * @property {boolean} [private] Make the uploaded file private. (Alias for - * `options.predefinedAcl = 'private'`) - * @property {boolean} [public] Make the uploaded file public. (Alias for - * `options.predefinedAcl = 'publicRead'`) - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {string} [chunkSize] Create a separate request per chunk. This - * value is in bytes and should be a multiple of 256 KiB (2^18). - * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} - */ - /** - * Create a unique resumable upload session URI. This is the first step when - * performing a resumable upload. - * - * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} - * for more on how the entire process works. - * - *

Note

- * - * If you are just looking to perform a resumable upload without worrying - * about any of the details, see {@link File#createWriteStream}. Resumable - * uploads are performed by default. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} - * - * @param {CreateResumableUploadOptions} [options] Configuration options. - * @param {CreateResumableUploadCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * file.createResumableUpload(function(err, uri) { - * if (!err) { - * // `uri` can be used to PUT data to. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.createResumableUpload().then(function(data) { - * const uri = data[0]; - * }); - * ``` - */ - createResumableUpload(optionsOrCallback, callback) { - var _a, _b; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const retryOptions = this.storage.retryOptions; - if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && - ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - retryOptions.autoRetry = false; - } - resumableUpload.createURI({ - authClient: this.storage.authClient, - apiEndpoint: this.storage.apiEndpoint, - bucket: this.bucket.name, - customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), - file: this.name, - generation: this.generation, - key: this.encryptionKey, - kmsKeyName: this.kmsKeyName, - metadata: options.metadata, - offset: options.offset, - origin: options.origin, - predefinedAcl: options.predefinedAcl, - private: options.private, - public: options.public, - userProject: options.userProject || this.userProject, - retryOptions: retryOptions, - params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, - }, callback); - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - } - /** - * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). - * @property {string} [contentType] Alias for - * `options.metadata.contentType`. If set to `auto`, the file name is used - * to determine the contentType. - * @property {string|boolean} [gzip] If true, automatically gzip the file. - * If set to `auto`, the contentType is used to determine if the file - * should be gzipped. This will set `options.metadata.contentEncoding` to - * `gzip` if necessary. - * @property {object} [metadata] See the examples below or - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} - * for more details. - * @property {number} [offset] The starting byte of the upload stream, for - * resuming an interrupted upload. Defaults to 0. - * @property {string} [predefinedAcl] Apply a predefined set of access - * controls to this object. - * - * Acceptable values are: - * - **`authenticatedRead`** - Object owner gets `OWNER` access, and - * `allAuthenticatedUsers` get `READER` access. - * - * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and - * project team owners get `OWNER` access. - * - * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project - * team owners get `READER` access. - * - * - **`private`** - Object owner gets `OWNER` access. - * - * - **`projectPrivate`** - Object owner gets `OWNER` access, and project - * team members get access according to their roles. - * - * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` - * get `READER` access. - * @property {boolean} [private] Make the uploaded file private. (Alias for - * `options.predefinedAcl = 'private'`) - * @property {boolean} [public] Make the uploaded file public. (Alias for - * `options.predefinedAcl = 'publicRead'`) - * @property {boolean} [resumable] Force a resumable upload. NOTE: When - * working with streams, the file format and size is unknown until it's - * completely consumed. Because of this, it's best for you to be explicit - * for what makes sense given your input. - * @property {number} [timeout=60000] Set the HTTP request timeout in - * milliseconds. This option is not available for resumable uploads. - * Default: `60000` - * @property {string} [uri] The URI for an already-created resumable - * upload. See {@link File#createResumableUpload}. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - * @property {string|boolean} [validation] Possible values: `"md5"`, - * `"crc32c"`, or `false`. By default, data integrity is validated with a - * CRC32c checksum. You may use MD5 if preferred, but that hash is not - * supported for composite objects. An error will be raised if MD5 is - * specified but is not available. You may also choose to skip validation - * completely, however this is **not recommended**. In addition to specifying - * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will - * cause the server to perform validation in addition to client validation. - * NOTE: Validation is automatically skipped for objects that were - * uploaded using the `gzip` option and have already compressed content. - */ - /** - * Create a writable stream to overwrite the contents of the file in your - * bucket. - * - * A File object can also be used to create files for the first time. - * - * Resumable uploads are automatically enabled and must be shut off explicitly - * by setting `options.resumable` to `false`. - * - * - *

- * There is some overhead when using a resumable upload that can cause - * noticeable performance degradation while uploading a series of small - * files. When uploading files less than 10MB, it is recommended that the - * resumable feature is disabled. - *

- * - * NOTE: Writable streams will emit the `finish` event when the file is fully - * uploaded. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload| Upload Options (Simple or Resumable)} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} - * - * @param {CreateWriteStreamOptions} [options] Configuration options. - * @returns {WritableStream} - * - * @example - * ``` - * const fs = require('fs'); - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * //- - * //

Uploading a File

- * // - * // Now, consider a case where we want to upload a file to your bucket. You - * // have the option of using {@link Bucket#upload}, but that is just - * // a convenience method which will do the following. - * //- - * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') - * .pipe(file.createWriteStream()) - * .on('error', function(err) {}) - * .on('finish', function() { - * // The file upload is complete. - * }); - * - * //- - * //

Uploading a File with gzip compression

- * //- - * fs.createReadStream('/Users/stephen/site/index.html') - * .pipe(file.createWriteStream({ gzip: true })) - * .on('error', function(err) {}) - * .on('finish', function() { - * // The file upload is complete. - * }); - * - * //- - * // Downloading the file with `createReadStream` will automatically decode - * // the file. - * //- - * - * //- - * //

Uploading a File with Metadata

- * // - * // One last case you may run into is when you want to upload a file to your - * // bucket and set its metadata at the same time. Like above, you can use - * // {@link Bucket#upload} to do this, which is just a wrapper around - * // the following. - * //- - * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') - * .pipe(file.createWriteStream({ - * metadata: { - * contentType: 'image/jpeg', - * metadata: { - * custom: 'metadata' - * } - * } - * })) - * .on('error', function(err) {}) - * .on('finish', function() { - * // The file upload is complete. - * }); - * ``` - */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - createWriteStream(options = {}) { - options = extend(true, { metadata: {} }, options); - if (options.contentType) { - options.metadata.contentType = options.contentType; - } - if (!options.metadata.contentType || - options.metadata.contentType === 'auto') { - const detectedContentType = mime.getType(this.name); - if (detectedContentType) { - options.metadata.contentType = detectedContentType; - } - } - let gzip = options.gzip; - if (gzip === 'auto') { - gzip = compressible(options.metadata.contentType); - } - if (gzip) { - options.metadata.contentEncoding = 'gzip'; - } - let crc32c = true; - let md5 = false; - if (typeof options.validation === 'string') { - options.validation = options.validation.toLowerCase(); - crc32c = options.validation === 'crc32c'; - md5 = options.validation === 'md5'; - } - else if (options.validation === false) { - crc32c = false; - } - /** - * A callback for determining when the underlying pipeline is complete. - * It's possible the pipeline callback could error before the write stream - * calls `final` so by default this will destroy the write stream unless the - * write stream sets this callback via its `final` handler. - * @param error An optional error - */ - let pipelineCallback = error => { - writeStream.destroy(error || undefined); - }; - // A stream for consumer to write to - const writeStream = new stream_1.Writable({ - final(cb) { - // Set the pipeline callback to this callback so the pipeline's results - // can be populated to the consumer - pipelineCallback = cb; - emitStream.end(); - }, - write(chunk, encoding, cb) { - emitStream.write(chunk, encoding, cb); - }, - }); - const emitStream = new util_1.PassThroughShim(); - const hashCalculatingStream = new hash_stream_validator_1.HashStreamValidator({ - crc32c, - md5, - crc32cGenerator: this.crc32cGenerator, - updateHashesOnly: true, - }); - const fileWriteStream = duplexify(); - let fileWriteStreamMetadataReceived = false; - // Handing off emitted events to users - emitStream.on('reading', () => writeStream.emit('reading')); - emitStream.on('writing', () => writeStream.emit('writing')); - fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); - fileWriteStream.on('response', resp => writeStream.emit('response', resp)); - fileWriteStream.once('metadata', () => { - fileWriteStreamMetadataReceived = true; - }); - writeStream.on('writing', () => { - if (options.resumable === false) { - this.startSimpleUpload_(fileWriteStream, options); - } - else { - this.startResumableUpload_(fileWriteStream, options); - } - (0, stream_1.pipeline)(emitStream, gzip ? zlib.createGzip() : new stream_1.PassThrough(), hashCalculatingStream, fileWriteStream, async (e) => { - if (e) { - return pipelineCallback(e); - } - // We want to make sure we've received the metadata from the server in order - // to properly validate the object's integrity. Depending on the type of upload, - // the stream could close before the response is returned. - if (!fileWriteStreamMetadataReceived) { - try { - await new Promise((resolve, reject) => { - fileWriteStream.once('metadata', resolve); - fileWriteStream.once('error', reject); - }); - } - catch (e) { - return pipelineCallback(e); - } - } - try { - await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { crc32c, md5 }); - pipelineCallback(); - } - catch (e) { - pipelineCallback(e); - } - }); - }); - return writeStream; - } - delete(optionsOrCallback, cb) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; - this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_1.AvailableServiceObjectMethods.delete, options); - super - .delete(options) - .then(resp => cb(null, ...resp)) - .catch(cb) - .finally(() => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - }); - } - /** - * @typedef {array} DownloadResponse - * @property [0] The contents of a File. - */ - /** - * @callback DownloadCallback - * @param err Request error, if any. - * @param contents The contents of a File. - */ - /** - * Convenience method to download a file into memory or to a local - * destination. - * - * @param {object} [options] Configuration options. The arguments match those - * passed to {@link File#createReadStream}. - * @param {string} [options.destination] Local file path to write the file's - * contents to. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {DownloadCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * //- - * // Download a file into memory. The contents will be available as the - * second - * // argument in the demonstration below, `contents`. - * //- - * file.download(function(err, contents) {}); - * - * //- - * // Download a file to a local destination. - * //- - * file.download({ - * destination: '/Users/me/Desktop/file-backup.txt' - * }, function(err) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.download().then(function(data) { - * const contents = data[0]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_download_file - * Another example: - * - * @example include:samples/encryption.js - * region_tag:storage_download_encrypted_file - * Example of downloading an encrypted file: - * - * @example include:samples/requesterPays.js - * region_tag:storage_download_file_requester_pays - * Example of downloading a file where the requester pays: - */ - download(optionsOrCallback, cb) { - let options; - if (typeof optionsOrCallback === 'function') { - cb = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback; - } - let called = false; - const callback = ((...args) => { - if (!called) - cb(...args); - called = true; - }); - const destination = options.destination; - delete options.destination; - const fileStream = this.createReadStream(options); - let receivedData = false; - if (destination) { - fileStream - .on('error', callback) - .once('data', data => { - // We know that the file exists the server - now we can truncate/write to a file - receivedData = true; - const writable = fs.createWriteStream(destination); - writable.write(data); - fileStream - .pipe(writable) - .on('error', callback) - .on('finish', callback); - }) - .on('end', () => { - // In the case of an empty file no data will be received before the end event fires - if (!receivedData) { - fs.openSync(destination, 'w'); - callback(null, Buffer.alloc(0)); - } - }); - } - else { - this.getBufferFromReadable(fileStream) - .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) - .catch(callback); - } - } - /** - * The Storage API allows you to use a custom key for server-side encryption. - * - * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} - * - * @param {string|buffer} encryptionKey An AES-256 encryption key. - * @returns {File} - * - * @example - * ``` - * const crypto = require('crypto'); - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const encryptionKey = crypto.randomBytes(32); - * - * const fileWithCustomEncryption = myBucket.file('my-file'); - * fileWithCustomEncryption.setEncryptionKey(encryptionKey); - * - * const fileWithoutCustomEncryption = myBucket.file('my-file'); - * - * fileWithCustomEncryption.save('data', function(err) { - * // Try to download with the File object that hasn't had - * // `setEncryptionKey()` called: - * fileWithoutCustomEncryption.download(function(err) { - * // We will receive an error: - * // err.message === 'Bad Request' - * - * // Try again with the File object we called `setEncryptionKey()` on: - * fileWithCustomEncryption.download(function(err, contents) { - * // contents.toString() === 'data' - * }); - * }); - * }); - * - * ``` - * @example include:samples/encryption.js - * region_tag:storage_upload_encrypted_file - * Example of uploading an encrypted file: - * - * @example include:samples/encryption.js - * region_tag:storage_download_encrypted_file - * Example of downloading an encrypted file: - */ - setEncryptionKey(encryptionKey) { - this.encryptionKey = encryptionKey; - this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); - this.encryptionKeyHash = crypto - .createHash('sha256') - // eslint-disable-next-line @typescript-eslint/no-explicit-any - .update(this.encryptionKeyBase64, 'base64') - .digest('base64'); - this.encryptionKeyInterceptor = { - request: reqOpts => { - reqOpts.headers = reqOpts.headers || {}; - reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; - reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; - reqOpts.headers['x-goog-encryption-key-sha256'] = - this.encryptionKeyHash; - return reqOpts; - }, - }; - this.interceptors.push(this.encryptionKeyInterceptor); - return this; - } - /** - * @typedef {array} GetExpirationDateResponse - * @property {date} 0 A Date object representing the earliest time this file's - * retention policy will expire. - */ - /** - * @callback GetExpirationDateCallback - * @param {?Error} err Request error, if any. - * @param {date} expirationDate A Date object representing the earliest time - * this file's retention policy will expire. - */ - /** - * If this bucket has a retention policy defined, use this method to get a - * Date object representing the earliest time this file will expire. - * - * @param {GetExpirationDateCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const storage = require('@google-cloud/storage')(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * file.getExpirationDate(function(err, expirationDate) { - * // expirationDate is a Date object. - * }); - * ``` - */ - getExpirationDate(callback) { - this.getMetadata((err, metadata, apiResponse) => { - if (err) { - callback(err, null, apiResponse); - return; - } - if (!metadata.retentionExpirationTime) { - const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); - callback(error, null, apiResponse); - return; - } - callback(null, new Date(metadata.retentionExpirationTime), apiResponse); - }); - } - /** - * @typedef {array} GenerateSignedPostPolicyV2Response - * @property {object} 0 The document policy. - */ - /** - * @callback GenerateSignedPostPolicyV2Callback - * @param {?Error} err Request error, if any. - * @param {object} policy The document policy. - */ - /** - * Get a signed policy document to allow a user to upload data with a POST - * request. - * - * In Google Cloud Platform environments, such as Cloud Functions and App - * Engine, you usually don't provide a `keyFilename` or `credentials` during - * instantiation. In those environments, we call the - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} - * to create a signed policy. That API requires either the - * `https://www.googleapis.com/auth/iam` or - * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are - * enabled. - * - * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} - * - * @throws {Error} If an expiration timestamp from the past is given. - * @throws {Error} If options.equals has an array with less or more than two - * members. - * @throws {Error} If options.startsWith has an array with less or more than two - * members. - * - * @param {object} options Configuration options. - * @param {array|array[]} [options.equals] Array of request parameters and - * their expected value (e.g. [['$', '']]). Values are - * translated into equality constraints in the conditions field of the - * policy document (e.g. ['eq', '$', '']). If only one - * equality condition is to be specified, options.equals can be a one- - * dimensional array (e.g. ['$', '']). - * @param {*} options.expires - A timestamp when this policy will expire. Any - * value given is passed to `new Date()`. - * @param {array|array[]} [options.startsWith] Array of request parameters and - * their expected prefixes (e.g. [['$', '']). Values are - * translated into starts-with constraints in the conditions field of the - * policy document (e.g. ['starts-with', '$', '']). If only - * one prefix condition is to be specified, options.startsWith can be a - * one- dimensional array (e.g. ['$', '']). - * @param {string} [options.acl] ACL for the object from possibly predefined - * ACLs. - * @param {string} [options.successRedirect] The URL to which the user client - * is redirected if the upload is successful. - * @param {string} [options.successStatus] - The status of the Google Storage - * response if the upload is successful (must be string). - * @param {object} [options.contentLengthRange] - * @param {number} [options.contentLengthRange.min] Minimum value for the - * request's content length. - * @param {number} [options.contentLengthRange.max] Maximum value for the - * request's content length. - * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * const options = { - * equals: ['$Content-Type', 'image/jpeg'], - * expires: '10-25-2022', - * contentLengthRange: { - * min: 0, - * max: 1024 - * } - * }; - * - * file.generateSignedPostPolicyV2(options, function(err, policy) { - * // policy.string: the policy document in plain text. - * // policy.base64: the policy document in base64. - * // policy.signature: the policy signature in base64. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.generateSignedPostPolicyV2(options).then(function(data) { - * const policy = data[0]; - * }); - * ``` - */ - generateSignedPostPolicyV2(optionsOrCallback, cb) { - const args = (0, util_1.normalize)(optionsOrCallback, cb); - let options = args.options; - const callback = args.callback; - const expires = new Date(options.expires); - if (isNaN(expires.getTime())) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID); - } - if (expires.valueOf() < Date.now()) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST); - } - options = Object.assign({}, options); - const conditions = [ - ['eq', '$key', this.name], - { - bucket: this.bucket.name, - }, - ]; - if (Array.isArray(options.equals)) { - if (!Array.isArray(options.equals[0])) { - options.equals = [options.equals]; - } - options.equals.forEach(condition => { - if (!Array.isArray(condition) || condition.length !== 2) { - throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); - } - conditions.push(['eq', condition[0], condition[1]]); - }); - } - if (Array.isArray(options.startsWith)) { - if (!Array.isArray(options.startsWith[0])) { - options.startsWith = [options.startsWith]; - } - options.startsWith.forEach(condition => { - if (!Array.isArray(condition) || condition.length !== 2) { - throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); - } - conditions.push(['starts-with', condition[0], condition[1]]); - }); - } - if (options.acl) { - conditions.push({ - acl: options.acl, - }); - } - if (options.successRedirect) { - conditions.push({ - success_action_redirect: options.successRedirect, - }); - } - if (options.successStatus) { - conditions.push({ - success_action_status: options.successStatus, - }); - } - if (options.contentLengthRange) { - const min = options.contentLengthRange.min; - const max = options.contentLengthRange.max; - if (typeof min !== 'number' || typeof max !== 'number') { - throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); - } - conditions.push(['content-length-range', min, max]); - } - const policy = { - expiration: expires.toISOString(), - conditions, - }; - const policyString = JSON.stringify(policy); - const policyBase64 = Buffer.from(policyString).toString('base64'); - this.storage.authClient.sign(policyBase64).then(signature => { - callback(null, { - string: policyString, - base64: policyBase64, - signature, - }); - }, err => { - callback(new signer_1.SigningError(err.message)); - }); - } - /** - * @typedef {object} SignedPostPolicyV4Output - * @property {string} url The request URL. - * @property {object} fields The form fields to include in the POST request. - */ - /** - * @typedef {array} GenerateSignedPostPolicyV4Response - * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. - */ - /** - * @callback GenerateSignedPostPolicyV4Callback - * @param {?Error} err Request error, if any. - * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. - */ - /** - * Get a v4 signed policy document to allow a user to upload data with a POST - * request. - * - * In Google Cloud Platform environments, such as Cloud Functions and App - * Engine, you usually don't provide a `keyFilename` or `credentials` during - * instantiation. In those environments, we call the - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} - * to create a signed policy. That API requires either the - * `https://www.googleapis.com/auth/iam` or - * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are - * enabled. - * - * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} - * - * @param {object} options Configuration options. - * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any - * value given is passed to `new Date()`. - * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style - * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style - * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs - * should generally be preferred instead of path-style URL. - * Currently defaults to `false` for path-style, although this may change in a - * future major-version release. - * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in - * the result, e.g. "https://cdn.example.com". - * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} - * to include in the signed policy. Any fields with key beginning with 'x-ignore-' - * will not be included in the policy to be signed. - * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} - * to include in the signed policy. All fields given in `config.fields` are - * automatically included in the conditions array, adding the same entry - * in both `fields` and `conditions` will result in duplicate entries. - * - * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * const options = { - * expires: '10-25-2022', - * conditions: [ - * ['eq', '$Content-Type', 'image/jpeg'], - * ['content-length-range', 0, 1024], - * ], - * fields: { - * acl: 'public-read', - * 'x-goog-meta-foo': 'bar', - * 'x-ignore-mykey': 'data' - * } - * }; - * - * file.generateSignedPostPolicyV4(options, function(err, response) { - * // response.url The request URL - * // response.fields The form fields (including the signature) to include - * // to be used to upload objects by HTML forms. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.generateSignedPostPolicyV4(options).then(function(data) { - * const response = data[0]; - * // response.url The request URL - * // response.fields The form fields (including the signature) to include - * // to be used to upload objects by HTML forms. - * }); - * ``` - */ - generateSignedPostPolicyV4(optionsOrCallback, cb) { - const args = (0, util_1.normalize)(optionsOrCallback, cb); - let options = args.options; - const callback = args.callback; - const expires = new Date(options.expires); - if (isNaN(expires.getTime())) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID); - } - if (expires.valueOf() < Date.now()) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST); - } - if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { - throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); - } - options = Object.assign({}, options); - let fields = Object.assign({}, options.fields); - const now = new Date(); - const nowISO = (0, util_1.formatAsUTCISO)(now, true); - const todayISO = (0, util_1.formatAsUTCISO)(now); - const sign = async () => { - const { client_email } = await this.storage.authClient.getCredentials(); - const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; - fields = { - ...fields, - bucket: this.bucket.name, - key: this.name, - 'x-goog-date': nowISO, - 'x-goog-credential': credential, - 'x-goog-algorithm': 'GOOG4-RSA-SHA256', - }; - const conditions = options.conditions || []; - Object.entries(fields).forEach(([key, value]) => { - if (!key.startsWith('x-ignore-')) { - conditions.push({ [key]: value }); - } - }); - delete fields.bucket; - const expiration = (0, util_1.formatAsUTCISO)(expires, true, '-', ':'); - const policy = { - conditions, - expiration, - }; - const policyString = (0, util_1.unicodeJSONStringify)(policy); - const policyBase64 = Buffer.from(policyString).toString('base64'); - try { - const signature = await this.storage.authClient.sign(policyBase64); - const signatureHex = Buffer.from(signature, 'base64').toString('hex'); - fields['policy'] = policyBase64; - fields['x-goog-signature'] = signatureHex; - let url; - if (options.virtualHostedStyle) { - url = `https://${this.bucket.name}.storage.googleapis.com/`; - } - else if (options.bucketBoundHostname) { - url = `${options.bucketBoundHostname}/`; - } - else { - url = `${exports.STORAGE_POST_POLICY_BASE_URL}/${this.bucket.name}/`; - } - return { - url, - fields, - }; - } - catch (err) { - throw new signer_1.SigningError(err.message); - } - }; - sign().then(res => callback(null, res), callback); - } - /** - * @typedef {array} GetSignedUrlResponse - * @property {object} 0 The signed URL. - */ - /** - * @callback GetSignedUrlCallback - * @param {?Error} err Request error, if any. - * @param {object} url The signed URL. - */ - /** - * Get a signed URL to allow limited time access to the file. - * - * In Google Cloud Platform environments, such as Cloud Functions and App - * Engine, you usually don't provide a `keyFilename` or `credentials` during - * instantiation. In those environments, we call the - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} - * to create a signed URL. That API requires either the - * `https://www.googleapis.com/auth/iam` or - * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are - * enabled. - * - * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} - * - * @throws {Error} if an expiration timestamp from the past is given. - * - * @param {object} config Configuration object. - * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or - * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). - * When using "resumable", the header `X-Goog-Resumable: start` has - * to be sent when making a request with the signed URL. - * @param {*} config.expires A timestamp when this link will expire. Any value - * given is passed to `new Date()`. - * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. - * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} - * @param {string} [config.version='v2'] The signing version to use, either - * 'v2' or 'v4'. - * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style - * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style - * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs - * should generally be preferred instaed of path-style URL. - * Currently defaults to `false` for path-style, although this may change in a - * future major-version release. - * @param {string} [config.cname] The cname for this bucket, i.e., - * "https://cdn.example.com". - * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like - * if you provide this, the client must provide this HTTP header with this same - * value in its request, so to if this parameter is not provided here, - * the client must not provide any value for this HTTP header in its request. - * @param {string} [config.contentType] Just like if you provide this, the client - * must provide this HTTP header with this same value in its request, so to if - * this parameter is not provided here, the client must not provide any value - * for this HTTP header in its request. - * @param {object} [config.extensionHeaders] If these headers are used, the - * server will check to make sure that the client provides matching - * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} - * for the requirements of this feature, most notably: - * - The header name must be prefixed with `x-goog-` - * - The header name must be all lowercase - * - * Note: Multi-valued header passed as an array in the extensionHeaders - * object is converted into a string, delimited by `,` with - * no space. Requests made using the signed URL will need to - * delimit multi-valued headers using a single `,` as well, or - * else the server will report a mismatched signature. - * @param {object} [config.queryParams] Additional query parameters to include - * in the signed URL. - * @param {string} [config.promptSaveAs] The filename to prompt the user to - * save the file as when the signed url is accessed. This is ignored if - * `config.responseDisposition` is set. - * @param {string} [config.responseDisposition] The - * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the - * signed url. - * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value - * given is passed to `new Date()`. - * Note: Use for 'v4' only. - * @param {string} [config.responseType] The response-content-type parameter - * of the signed url. - * @param {GetSignedUrlCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * //- - * // Generate a URL that allows temporary access to download your file. - * //- - * const request = require('request'); - * - * const config = { - * action: 'read', - * expires: '03-17-2025', - * }; - * - * file.getSignedUrl(config, function(err, url) { - * if (err) { - * console.error(err); - * return; - * } - * - * // The file is now available to read from this URL. - * request(url, function(err, resp) { - * // resp.statusCode = 200 - * }); - * }); - * - * //- - * // Generate a URL that allows temporary access to download your file. - * // Access will begin at accessibleAt and end at expires. - * //- - * const request = require('request'); - * - * const config = { - * action: 'read', - * expires: '03-17-2025', - * accessibleAt: '03-13-2025' - * }; - * - * file.getSignedUrl(config, function(err, url) { - * if (err) { - * console.error(err); - * return; - * } - * - * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. - * request(url, function(err, resp) { - * // resp.statusCode = 200 - * }); - * }); - * - * //- - * // Generate a URL to allow write permissions. This means anyone with this - * URL - * // can send a POST request with new data that will overwrite the file. - * //- - * file.getSignedUrl({ - * action: 'write', - * expires: '03-17-2025' - * }, function(err, url) { - * if (err) { - * console.error(err); - * return; - * } - * - * // The file is now available to be written to. - * const writeStream = request.put(url); - * writeStream.end('New data'); - * - * writeStream.on('complete', function(resp) { - * // Confirm the new content was saved. - * file.download(function(err, fileContents) { - * console.log('Contents:', fileContents.toString()); - * // Contents: New data - * }); - * }); - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.getSignedUrl(config).then(function(data) { - * const url = data[0]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_generate_signed_url - * Another example: - */ - getSignedUrl(cfg, callback) { - const method = ActionToHTTPMethod[cfg.action]; - if (!method) { - throw new Error(storage_1.ExceptionMessages.INVALID_ACTION); - } - const extensionHeaders = (0, util_1.objectKeyToLowercase)(cfg.extensionHeaders || {}); - if (cfg.action === 'resumable') { - extensionHeaders['x-goog-resumable'] = 'start'; - } - const queryParams = Object.assign({}, cfg.queryParams); - if (typeof cfg.responseType === 'string') { - queryParams['response-content-type'] = cfg.responseType; - } - if (typeof cfg.promptSaveAs === 'string') { - queryParams['response-content-disposition'] = - 'attachment; filename="' + cfg.promptSaveAs + '"'; - } - if (typeof cfg.responseDisposition === 'string') { - queryParams['response-content-disposition'] = cfg.responseDisposition; - } - if (this.generation) { - queryParams['generation'] = this.generation.toString(); - } - const signConfig = { - method, - expires: cfg.expires, - accessibleAt: cfg.accessibleAt, - extensionHeaders, - queryParams, - contentMd5: cfg.contentMd5, - contentType: cfg.contentType, - }; - if (cfg.cname) { - signConfig.cname = cfg.cname; - } - if (cfg.version) { - signConfig.version = cfg.version; - } - if (cfg.virtualHostedStyle) { - signConfig.virtualHostedStyle = cfg.virtualHostedStyle; - } - if (!this.signer) { - this.signer = new signer_1.URLSigner(this.storage.authClient, this.bucket, this); - } - this.signer - .getSignedUrl(signConfig) - .then(signedUrl => callback(null, signedUrl), callback); - } - /** - * @callback IsPublicCallback - * @param {?Error} err Request error, if any. - * @param {boolean} resp Whether file is public or not. - */ - /** - * @typedef {array} IsPublicResponse - * @property {boolean} 0 Whether file is public or not. - */ - /** - * Check whether this file is public or not by sending - * a HEAD request without credentials. - * No errors from the server indicates that the current - * file is public. - * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} - * indicates that file is private. - * Any other non 403 error is propagated to user. - * - * @param {IsPublicCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * //- - * // Check whether the file is publicly accessible. - * //- - * file.isPublic(function(err, resp) { - * if (err) { - * console.error(err); - * return; - * } - * console.log(`the file ${file.id} is public: ${resp}`) ; - * }) - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.isPublic().then(function(data) { - * const resp = data[0]; - * }); - * ``` - */ - isPublic(callback) { - var _a; - // Build any custom headers based on the defined interceptors on the parent - // storage object and this object - const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; - const fileInterceptors = this.interceptors || []; - const allInterceptors = storageInterceptors.concat(fileInterceptors); - const headers = allInterceptors.reduce((acc, curInterceptor) => { - const currentHeaders = curInterceptor.request({ - uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, - }); - Object.assign(acc, currentHeaders.headers); - return acc; - }, {}); - nodejs_common_1.util.makeRequest({ - method: 'GET', - uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, - headers, - }, { - retryOptions: this.storage.retryOptions, - }, (err) => { - if (err) { - const apiError = err; - if (apiError.code === 403) { - callback(null, false); - } - else { - callback(err); - } - } - else { - callback(null, true); - } - }); - } - /** - * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). - * @property {Metadata} [metadata] Define custom metadata properties to define - * along with the operation. - * @property {boolean} [strict] If true, set the file to be private to - * only the owner user. Otherwise, it will be private to the project. - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @callback MakeFilePrivateCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} MakeFilePrivateResponse - * @property {object} 0 The full API response. - */ - /** - * Make a file private to the project and remove all other permissions. - * Set `options.strict` to true to make the file private to only the owner. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} - * - * @param {MakeFilePrivateOptions} [options] Configuration options. - * @param {MakeFilePrivateCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * //- - * // Set the file private so only project maintainers can see and modify it. - * //- - * file.makePrivate(function(err) {}); - * - * //- - * // Set the file private so only the owner can see and modify it. - * //- - * file.makePrivate({ strict: true }, function(err) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.makePrivate().then(function(data) { - * const apiResponse = data[0]; - * }); - * ``` - */ - makePrivate(optionsOrCallback, callback) { - var _a, _b; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const query = { - predefinedAcl: options.strict ? 'private' : 'projectPrivate', - // eslint-disable-next-line @typescript-eslint/no-explicit-any - }; - if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { - query.ifMetagenerationMatch = - (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; - delete options.preconditionOpts; - } - if (options.userProject) { - query.userProject = options.userProject; - } - // You aren't allowed to set both predefinedAcl & acl properties on a file, - // so acl must explicitly be nullified, destroying all previous acls on the - // file. - const metadata = extend({}, options.metadata, { acl: null }); - this.setMetadata(metadata, query, callback); - } - /** - * @typedef {array} MakeFilePublicResponse - * @property {object} 0 The full API response. - */ - /** - * @callback MakeFilePublicCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Set a file to be publicly readable and maintain all previous permissions. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} - * - * @param {MakeFilePublicCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * - * file.makePublic(function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.makePublic().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_make_public - * Another example: - */ - makePublic(callback) { - callback = callback || nodejs_common_1.util.noop; - this.acl.add({ - entity: 'allUsers', - role: 'READER', - }, (err, acl, resp) => { - callback(err, resp); - }); - } - /** - * The public URL of this File - * Use {@link File#makePublic} to enable anonymous access via the returned URL. - * - * @returns {string} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * const file = bucket.file('my-file'); - * - * // publicUrl will be "https://storage.googleapis.com/albums/my-file" - * const publicUrl = file.publicUrl(); - * ``` - */ - publicUrl() { - return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; - } - /** - * @typedef {array} MoveResponse - * @property {File} 0 The destination File. - * @property {object} 1 The full API response. - */ - /** - * @callback MoveCallback - * @param {?Error} err Request error, if any. - * @param {?File} destinationFile The destination File. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {object} MoveOptions Configuration options for File#move(). See an - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. - * @param {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Move this file to another location. By default, this will rename the file - * and keep it in the same bucket, but you can choose to move it to another - * Bucket by providing a Bucket or File object or a URL beginning with - * "gs://". - * - * **Warning**: - * There is currently no atomic `move` method in the Cloud Storage API, - * so this method is a composition of {@link File#copy} (to the new - * location) and {@link File#delete} (from the old location). While - * unlikely, it is possible that an error returned to your callback could be - * triggered from either one of these API calls failing, which could leave a - * duplicate file lingering. The error message will indicate what operation - * has failed. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} - * - * @throws {Error} If the destination file is not provided. - * - * @param {string|Bucket|File} destination Destination file. - * @param {MoveCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * //- - * // You can pass in a variety of types for the destination. - * // - * // For all of the below examples, assume we are working with the following - * // Bucket and File objects. - * //- - * const bucket = storage.bucket('my-bucket'); - * const file = bucket.file('my-image.png'); - * - * //- - * // If you pass in a string for the destination, the file is moved to its - * // current bucket, under the new name provided. - * //- - * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * // but contains instead: - * // - "my-image-new.png" - * - * // `destinationFile` is an instance of a File object that refers to your - * // new file. - * }); - * - * //- - * // If you pass in a string starting with "gs://" for the destination, the - * // file is copied to the other bucket and under the new name provided. - * //- - * const newLocation = 'gs://another-bucket/my-image-new.png'; - * file.move(newLocation, function(err, destinationFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-image-new.png" - * - * // `destinationFile` is an instance of a File object that refers to your - * // new file. - * }); - * - * //- - * // If you pass in a Bucket object, the file will be moved to that bucket - * // using the same name. - * //- - * const anotherBucket = gcs.bucket('another-bucket'); - * - * file.move(anotherBucket, function(err, destinationFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-image.png" - * - * // `destinationFile` is an instance of a File object that refers to your - * // new file. - * }); - * - * //- - * // If you pass in a File object, you have complete control over the new - * // bucket and filename. - * //- - * const anotherFile = anotherBucket.file('my-awesome-image.png'); - * - * file.move(anotherFile, function(err, destinationFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * // - * // `another-bucket` now contains: - * // - "my-awesome-image.png" - * - * // Note: - * // The `destinationFile` parameter is equal to `anotherFile`. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.move('my-image-new.png').then(function(data) { - * const destinationFile = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/files.js - * region_tag:storage_move_file - * Another example: - */ - move(destination, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - callback = callback || nodejs_common_1.util.noop; - this.copy(destination, options, (err, destinationFile, copyApiResponse) => { - if (err) { - err.message = 'file#copy failed with an error - ' + err.message; - callback(err, null, copyApiResponse); - return; - } - if (this.name !== destinationFile.name || - this.bucket.name !== destinationFile.bucket.name) { - this.delete(options, (err, apiResponse) => { - if (err) { - err.message = 'file#delete failed with an error - ' + err.message; - callback(err, destinationFile, apiResponse); - return; - } - callback(null, destinationFile, copyApiResponse); - }); - } - else { - callback(null, destinationFile, copyApiResponse); - } - }); - } - /** - * @typedef {array} RenameResponse - * @property {File} 0 The destination File. - * @property {object} 1 The full API response. - */ - /** - * @callback RenameCallback - * @param {?Error} err Request error, if any. - * @param {?File} destinationFile The destination File. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {object} RenameOptions Configuration options for File#move(). See an - * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. - * @param {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * Rename this file. - * - * **Warning**: - * There is currently no atomic `rename` method in the Cloud Storage API, - * so this method is an alias of {@link File#move}, which in turn is a - * composition of {@link File#copy} (to the new location) and - * {@link File#delete} (from the old location). While - * unlikely, it is possible that an error returned to your callback could be - * triggered from either one of these API calls failing, which could leave a - * duplicate file lingering. The error message will indicate what operation - * has failed. - * - * @param {string|File} destinationFile Destination file. - * @param {RenameCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // You can pass in a string or a File object. - * // - * // For all of the below examples, assume we are working with the following - * // Bucket and File objects. - * //- - * - * const bucket = storage.bucket('my-bucket'); - * const file = bucket.file('my-image.png'); - * - * //- - * // You can pass in a string for the destinationFile. - * //- - * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * // but contains instead: - * // - "renamed-image.png" - * - * // `renamedFile` is an instance of a File object that refers to your - * // renamed file. - * }); - * - * //- - * // You can pass in a File object. - * //- - * const anotherFile = anotherBucket.file('my-awesome-image.png'); - * - * file.rename(anotherFile, function(err, renamedFile, apiResponse) { - * // `my-bucket` no longer contains: - * // - "my-image.png" - * - * // Note: - * // The `renamedFile` parameter is equal to `anotherFile`. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.rename('my-renamed-image.png').then(function(data) { - * const renamedFile = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - rename(destinationFile, optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - callback = callback || nodejs_common_1.util.noop; - this.move(destinationFile, options, callback); - } - /** - * Makes request and applies userProject query parameter if necessary. - * - * @private - * - * @param {object} reqOpts - The request options. - * @param {function} callback - The callback function. - */ - request(reqOpts, callback) { - return this.parent.request.call(this, reqOpts, callback); - } - /** - * @callback RotateEncryptionKeyCallback - * @extends CopyCallback - */ - /** - * @typedef RotateEncryptionKeyResponse - * @extends CopyResponse - */ - /** - * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options - * for File#rotateEncryptionKey(). - * If a string or Buffer is provided, it is interpreted as an AES-256, - * customer-supplied encryption key. If you'd like to use a Cloud KMS key - * name, you must specify an options object with the property name: - * `kmsKeyName`. - * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. - * @param {string} [options.kmsKeyName] A Cloud KMS key name. - */ - /** - * This method allows you to update the encryption key associated with this - * file. - * - * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} - * - * @param {RotateEncryptionKeyOptions} [options] - Configuration options. - * @param {RotateEncryptionKeyCallback} [callback] - * @returns {Promise} - * - * @example include:samples/encryption.js - * region_tag:storage_rotate_encryption_key - * Example of rotating the encryption key for this file: - */ - rotateEncryptionKey(optionsOrCallback, callback) { - var _a; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - let options = {}; - if (typeof optionsOrCallback === 'string' || - optionsOrCallback instanceof Buffer) { - options = { - encryptionKey: optionsOrCallback, - }; - } - else if (typeof optionsOrCallback === 'object') { - options = optionsOrCallback; - } - const newFile = this.bucket.file(this.id, options); - const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined - ? { preconditionOpts: options.preconditionOpts } - : {}; - this.copy(newFile, copyOptions, callback); - } - /** - * @typedef {object} SaveOptions - * @extends CreateWriteStreamOptions - */ - /** - * @callback SaveCallback - * @param {?Error} err Request error, if any. - */ - /** - * Write strings or buffers to a file. - * - * *This is a convenience method which wraps {@link File#createWriteStream}.* - * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. - * - * Resumable uploads are automatically enabled and must be shut off explicitly - * by setting `options.resumable` to `false`. - * - * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. - * - *

- * There is some overhead when using a resumable upload that can cause - * noticeable performance degradation while uploading a series of small - * files. When uploading files less than 10MB, it is recommended that the - * resumable feature is disabled. - *

- * - * @param {string | Buffer} data The data to write to a file. - * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` - * parameter. - * @param {SaveCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * const contents = 'This is the contents of the file.'; - * - * file.save(contents, function(err) { - * if (!err) { - * // File written successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.save(contents).then(function() {}); - * ``` - */ - save(data, optionsOrCallback, callback) { - // tslint:enable:no-any - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - let maxRetries = this.storage.retryOptions.maxRetries; - if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { - maxRetries = 0; - } - const returnValue = retry(async (bail) => { - await new Promise((resolve, reject) => { - if (maxRetries === 0) { - this.storage.retryOptions.autoRetry = false; - } - const writable = this.createWriteStream(options) - .on('error', err => { - if (this.storage.retryOptions.autoRetry && - this.storage.retryOptions.retryableErrorFn(err)) { - return reject(err); - } - else { - return bail(err); - } - }) - .on('finish', () => { - return resolve(); - }); - if (options.onUploadProgress) { - writable.on('progress', options.onUploadProgress); - } - writable.end(data); - }); - }, { - retries: maxRetries, - factor: this.storage.retryOptions.retryDelayMultiplier, - maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, - maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds - }); - if (!callback) { - return returnValue; - } - else { - return returnValue - .then(() => { - if (callback) { - return callback(); - } - }) - .catch(callback); - } - } - setMetadata(metadata, optionsOrCallback, cb) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - cb = - typeof optionsOrCallback === 'function' - ? optionsOrCallback - : cb; - this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_1.AvailableServiceObjectMethods.setMetadata, options); - super - .setMetadata(metadata, options) - .then(resp => cb(null, ...resp)) - .catch(cb) - .finally(() => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - }); - } - /** - * @typedef {array} SetStorageClassResponse - * @property {object} 0 The full API response. - */ - /** - * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @callback SetStorageClassCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Set the storage class for this file. - * - * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} - * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} - * - * @param {string} storageClass The new storage class. (`standard`, - * `nearline`, `coldline`, or `archive`) - * **Note:** The storage classes `multi_regional` and `regional` - * are now legacy and will be deprecated in the future. - * @param {SetStorageClassOptions} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {SetStorageClassCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * file.setStorageClass('nearline', function(err, apiResponse) { - * if (err) { - * // Error handling omitted. - * } - * - * // The storage class was updated successfully. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.setStorageClass('nearline').then(function() {}); - * ``` - */ - setStorageClass(storageClass, optionsOrCallback, callback) { - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - const req = extend(true, {}, options); - // In case we get input like `storageClass`, convert to `storage_class`. - req.storageClass = storageClass - .replace(/-/g, '_') - .replace(/([a-z])([A-Z])/g, (_, low, up) => { - return low + '_' + up; - }) - .toUpperCase(); - this.copy(this, req, (err, file, apiResponse) => { - if (err) { - callback(err, apiResponse); - return; - } - this.metadata = file.metadata; - callback(null, apiResponse); - }); - } - /** - * Set a user project to be billed for all requests made from this File - * object. - * - * @param {string} userProject The user project. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('albums'); - * const file = bucket.file('my-file'); - * - * file.setUserProject('grape-spaceship-123'); - * ``` - */ - setUserProject(userProject) { - this.bucket.setUserProject.call(this, userProject); - } - /** - * This creates a resumable-upload upload stream. - * - * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. - * @param {object=} options - Configuration object. - * - * @private - */ - startResumableUpload_(dup, options) { - options = extend(true, { - metadata: {}, - }, options); - const retryOptions = this.storage.retryOptions; - if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { - retryOptions.autoRetry = false; - } - const uploadStream = resumableUpload.upload({ - authClient: this.storage.authClient, - apiEndpoint: this.storage.apiEndpoint, - bucket: this.bucket.name, - customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), - file: this.name, - generation: this.generation, - key: this.encryptionKey, - kmsKeyName: this.kmsKeyName, - metadata: options.metadata, - offset: options.offset, - predefinedAcl: options.predefinedAcl, - private: options.private, - public: options.public, - uri: options.uri, - userProject: options.userProject || this.userProject, - retryOptions: { ...retryOptions }, - params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, - chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, - }); - uploadStream - .on('response', resp => { - dup.emit('response', resp); - }) - .on('metadata', metadata => { - this.metadata = metadata; - dup.emit('metadata'); - }) - .on('finish', () => { - dup.emit('complete'); - }) - .on('progress', evt => dup.emit('progress', evt)); - dup.setWritable(uploadStream); - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - } - /** - * Takes a readable stream and pipes it to a remote file. Unlike - * `startResumableUpload_`, which uses the resumable upload technique, this - * method uses a simple upload (all or nothing). - * - * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. - * @param {object=} options - Configuration object. - * - * @private - */ - startSimpleUpload_(dup, options) { - options = extend(true, { - metadata: {}, - }, options); - const apiEndpoint = this.storage.apiEndpoint; - const bucketName = this.bucket.name; - const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; - const reqOpts = { - qs: { - name: this.name, - }, - uri: uri, - }; - if (this.generation !== undefined) { - reqOpts.qs.ifGenerationMatch = this.generation; - } - if (this.kmsKeyName !== undefined) { - reqOpts.qs.kmsKeyName = this.kmsKeyName; - } - if (typeof options.timeout === 'number') { - reqOpts.timeout = options.timeout; - } - if (options.userProject || this.userProject) { - reqOpts.qs.userProject = options.userProject || this.userProject; - } - if (options.predefinedAcl) { - reqOpts.qs.predefinedAcl = options.predefinedAcl; - } - else if (options.private) { - reqOpts.qs.predefinedAcl = 'private'; - } - else if (options.public) { - reqOpts.qs.predefinedAcl = 'publicRead'; - } - Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); - nodejs_common_1.util.makeWritableStream(dup, { - makeAuthenticatedRequest: (reqOpts) => { - this.request(reqOpts, (err, body, resp) => { - if (err) { - dup.destroy(err); - return; - } - this.metadata = body; - dup.emit('metadata', body); - dup.emit('response', resp); - dup.emit('complete'); - }); - }, - metadata: options.metadata, - request: reqOpts, - }); - } - disableAutoRetryConditionallyIdempotent_( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - coreOpts, methodType, localPreconditionOptions) { - var _a, _b, _c, _d; - if ((typeof coreOpts === 'object' && - ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && - (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && - methodType === bucket_1.AvailableServiceObjectMethods.delete && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - this.storage.retryOptions.autoRetry = false; - } - if ((typeof coreOpts === 'object' && - ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && - (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && - methodType === bucket_1.AvailableServiceObjectMethods.setMetadata && - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryConditional) || - this.storage.retryOptions.idempotencyStrategy === - storage_1.IdempotencyStrategy.RetryNever) { - this.storage.retryOptions.autoRetry = false; - } - } - async getBufferFromReadable(readable) { - const buf = []; - for await (const chunk of readable) { - buf.push(chunk); - } - return Buffer.concat(buf); - } -} -exports.File = File; -_File_instances = new WeakSet(), _File_validateIntegrity = -/** - * - * @param hashCalculatingStream - * @param verify - * @returns {boolean} Returns `true` if valid, throws with error otherwise - */ -async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { - const metadata = this.metadata; - // If we're doing validation, assume the worst - let dataMismatch = !!(verify.crc32c || verify.md5); - if (verify.crc32c && metadata.crc32c) { - dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); - } - if (verify.md5 && metadata.md5Hash) { - dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); - } - if (dataMismatch) { - const errors = []; - let code = ''; - let message = ''; - try { - await this.delete(); - if (verify.md5 && !metadata.md5Hash) { - code = 'MD5_NOT_AVAILABLE'; - message = FileExceptionMessages.MD5_NOT_AVAILABLE; - } - else { - code = 'FILE_NO_UPLOAD'; - message = FileExceptionMessages.UPLOAD_MISMATCH; - } - } - catch (e) { - const error = e; - code = 'FILE_NO_UPLOAD_DELETE'; - message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; - errors.push(error); - } - const error = new RequestError(message); - error.code = code; - error.errors = errors; - throw error; - } - return true; -}; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(File, { - exclude: [ - 'cloudStorageURI', - 'publicUrl', - 'request', - 'save', - 'setEncryptionKey', - 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', - 'getBufferFromReadable', - ], -}); -//# sourceMappingURL=file.js.map - -/***/ }), - -/***/ 8386: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; -}; -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HashStreamValidator = void 0; -const crypto_1 = __nccwpck_require__(6113); -const stream_1 = __nccwpck_require__(2781); -const crc32c_1 = __nccwpck_require__(4921); -const file_1 = __nccwpck_require__(5373); -class HashStreamValidator extends stream_1.Transform { - constructor(options = {}) { - super(); - this.updateHashesOnly = false; - _HashStreamValidator_crc32cHash.set(this, undefined); - _HashStreamValidator_md5Hash.set(this, undefined); - _HashStreamValidator_md5Digest.set(this, ''); - this.crc32cEnabled = !!options.crc32c; - this.md5Enabled = !!options.md5; - this.updateHashesOnly = !!options.updateHashesOnly; - this.crc32cExpected = options.crc32cExpected; - this.md5Expected = options.md5Expected; - if (this.crc32cEnabled) { - const crc32cGenerator = options.crc32cGenerator || crc32c_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; - __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); - } - if (this.md5Enabled) { - __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), "f"); - } - } - _flush(callback) { - if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { - __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); - } - if (this.updateHashesOnly) { - callback(); - return; - } - // If we're doing validation, assume the worst-- a data integrity - // mismatch. If not, these tests won't be performed, and we can assume - // the best. - // We must check if the server decompressed the data on serve because hash - // validation is not possible in this case. - let failed = this.crc32cEnabled || this.md5Enabled; - if (this.crc32cEnabled && this.crc32cExpected) { - failed = !this.test('crc32c', this.crc32cExpected); - } - if (this.md5Enabled && this.md5Expected) { - failed = !this.test('md5', this.md5Expected); - } - if (failed) { - const mismatchError = new file_1.RequestError(file_1.FileExceptionMessages.DOWNLOAD_MISMATCH); - mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; - callback(mismatchError); - } - else { - callback(); - } - } - _transform(chunk, encoding, callback) { - this.push(chunk, encoding); - try { - if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) - __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); - if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) - __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); - callback(); - } - catch (e) { - callback(e); - } - } - test(hash, sum) { - const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; - if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { - return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); - } - if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { - return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; - } - return false; - } -} -exports.HashStreamValidator = HashStreamValidator; -_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); -//# sourceMappingURL=hash-stream-validator.js.map - -/***/ }), - -/***/ 9930: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HmacKey = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const storage_1 = __nccwpck_require__(346); -const promisify_1 = __nccwpck_require__(9203); -/** - * The API-formatted resource description of the HMAC key. - * - * Note: This is not guaranteed to be up-to-date when accessed. To get the - * latest record, call the `getMetadata()` method. - * - * @name HmacKey#metadata - * @type {object} - */ -/** - * An HmacKey object contains metadata of an HMAC key created from a - * service account through the {@link Storage} client using - * {@link Storage#createHmacKey}. - * - * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} - * - * @class - */ -class HmacKey extends nodejs_common_1.ServiceObject { - /** - * @typedef {object} HmacKeyOptions - * @property {string} [projectId] The project ID of the project that owns - * the service account of the requested HMAC key. If not provided, - * the project ID used to instantiate the Storage client will be used. - */ - /** - * Constructs an HmacKey object. - * - * Note: this only create a local reference to an HMAC key, to create - * an HMAC key, use {@link Storage#createHmacKey}. - * - * @param {Storage} storage The Storage instance this HMAC key is - * attached to. - * @param {string} accessId The unique accessId for this HMAC key. - * @param {HmacKeyOptions} options Constructor configurations. - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const hmacKey = storage.hmacKey('access-id'); - * ``` - */ - constructor(storage, accessId, options) { - const methods = { - /** - * @typedef {object} DeleteHmacKeyOptions - * @property {string} [userProject] This parameter is currently ignored. - */ - /** - * @typedef {array} DeleteHmacKeyResponse - * @property {object} 0 The full API response. - */ - /** - * @callback DeleteHmacKeyCallback - * @param {?Error} err Request error, if any. - * @param {object} apiResponse The full API response. - */ - /** - * Deletes an HMAC key. - * Key state must be set to `INACTIVE` prior to deletion. - * Caution: HMAC keys cannot be recovered once you delete them. - * - * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. - * - * @method HmacKey#delete - * @param {DeleteHmacKeyOptions} [options] Configuration options. - * @param {DeleteHmacKeyCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // Delete HMAC key after making the key inactive. - * //- - * const hmacKey = storage.hmacKey('ACCESS_ID'); - * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { - * if (err) { - * // The request was an error. - * console.error(err); - * return; - * } - * hmacKey.delete((err) => { - * if (err) { - * console.error(err); - * return; - * } - * // The HMAC key is deleted. - * }); - * }); - * - * //- - * // If the callback is omitted, a promise is returned. - * //- - * const hmacKey = storage.hmacKey('ACCESS_ID'); - * hmacKey - * .setMetadata({state: 'INACTIVE'}) - * .then(() => { - * return hmacKey.delete(); - * }); - * ``` - */ - delete: true, - /** - * @callback GetHmacKeyCallback - * @param {?Error} err Request error, if any. - * @param {HmacKey} hmacKey this {@link HmacKey} instance. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} GetHmacKeyResponse - * @property {HmacKey} 0 This {@link HmacKey} instance. - * @property {object} 1 The full API response. - */ - /** - * @typedef {object} GetHmacKeyOptions - * @property {string} [userProject] This parameter is currently ignored. - */ - /** - * Retrieves and populate an HMAC key's metadata, and return - * this {@link HmacKey} instance. - * - * HmacKey.get() does not give the HMAC key secret, as - * it is only returned on creation. - * - * The authenticated user must have `storage.hmacKeys.get` permission - * for the project in which the key exists. - * - * @method HmacKey#get - * @param {GetHmacKeyOptions} [options] Configuration options. - * @param {GetHmacKeyCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // Get the HmacKey's Metadata. - * //- - * storage.hmacKey('ACCESS_ID') - * .get((err, hmacKey) => { - * if (err) { - * // The request was an error. - * console.error(err); - * return; - * } - * // do something with the returned HmacKey object. - * }); - * - * //- - * // If the callback is omitted, a promise is returned. - * //- - * storage.hmacKey('ACCESS_ID') - * .get() - * .then((data) => { - * const hmacKey = data[0]; - * }); - * ``` - */ - get: true, - /** - * @typedef {object} GetHmacKeyMetadataOptions - * @property {string} [userProject] This parameter is currently ignored. - */ - /** - * Retrieves and populate an HMAC key's metadata, and return - * the HMAC key's metadata as an object. - * - * HmacKey.getMetadata() does not give the HMAC key secret, as - * it is only returned on creation. - * - * The authenticated user must have `storage.hmacKeys.get` permission - * for the project in which the key exists. - * - * @method HmacKey#getMetadata - * @param {GetHmacKeyMetadataOptions} [options] Configuration options. - * @param {HmacKeyMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * //- - * // Get the HmacKey's metadata and populate to the metadata property. - * //- - * storage.hmacKey('ACCESS_ID') - * .getMetadata((err, hmacKeyMetadata) => { - * if (err) { - * // The request was an error. - * console.error(err); - * return; - * } - * console.log(hmacKeyMetadata); - * }); - * - * //- - * // If the callback is omitted, a promise is returned. - * //- - * storage.hmacKey('ACCESS_ID') - * .getMetadata() - * .then((data) => { - * const hmacKeyMetadata = data[0]; - * console.log(hmacKeyMetadata); - * }); - * ``` - */ - getMetadata: true, - /** - * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. - * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. - * @property {string} [etag] Include an etag from a previous get HMAC key request - * to perform safe read-modify-write. - */ - /** - * @typedef {object} SetHmacKeyMetadataOptions - * @property {string} [userProject] This parameter is currently ignored. - */ - /** - * @callback HmacKeyMetadataCallback - * @param {?Error} err Request error, if any. - * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. - * @param {object} apiResponse The full API response. - */ - /** - * @typedef {array} HmacKeyMetadataResponse - * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. - * @property {object} 1 The full API response. - */ - /** - * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for - * valid states. - * - * @method HmacKey#setMetadata - * @param {SetHmacKeyMetadata} metadata The new metadata. - * @param {SetHmacKeyMetadataOptions} [options] Configuration options. - * @param {HmacKeyMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * const metadata = { - * state: 'INACTIVE', - * }; - * - * storage.hmacKey('ACCESS_ID') - * .setMetadata(metadata, (err, hmacKeyMetadata) => { - * if (err) { - * // The request was an error. - * console.error(err); - * return; - * } - * console.log(hmacKeyMetadata); - * }); - * - * //- - * // If the callback is omitted, a promise is returned. - * //- - * storage.hmacKey('ACCESS_ID') - * .setMetadata(metadata) - * .then((data) => { - * const hmacKeyMetadata = data[0]; - * console.log(hmacKeyMetadata); - * }); - * ``` - */ - setMetadata: { - reqOpts: { - method: 'PUT', - }, - }, - }; - const projectId = (options && options.projectId) || storage.projectId; - super({ - parent: storage, - id: accessId, - baseUrl: `/projects/${projectId}/hmacKeys`, - methods, - }); - this.storage = storage; - this.instanceRetryValue = storage.retryOptions.autoRetry; - } - setMetadata(metadata, optionsOrCallback, cb) { - // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways - if (this.storage.retryOptions.idempotencyStrategy !== - storage_1.IdempotencyStrategy.RetryAlways) { - this.storage.retryOptions.autoRetry = false; - } - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - cb = - typeof optionsOrCallback === 'function' - ? optionsOrCallback - : cb; - super - .setMetadata(metadata, options) - .then(resp => cb(null, ...resp)) - .catch(cb) - .finally(() => { - this.storage.retryOptions.autoRetry = this.instanceRetryValue; - }); - } -} -exports.HmacKey = HmacKey; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(HmacKey); -//# sourceMappingURL=hmacKey.js.map - -/***/ }), - -/***/ 66: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Iam = exports.IAMExceptionMessages = void 0; -const promisify_1 = __nccwpck_require__(9203); -const util_1 = __nccwpck_require__(1862); -var IAMExceptionMessages; -(function (IAMExceptionMessages) { - IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; - IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; -})(IAMExceptionMessages = exports.IAMExceptionMessages || (exports.IAMExceptionMessages = {})); -/** - * Get and set IAM policies for your Cloud Storage bucket. - * - * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} - * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} - * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} - * - * @constructor Iam - * - * @param {Bucket} bucket The parent instance. - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * // bucket.iam - * ``` - */ -class Iam { - constructor(bucket) { - this.request_ = bucket.request.bind(bucket); - this.resourceId_ = 'buckets/' + bucket.getId(); - } - /** - * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). - * @property {number} [requestedPolicyVersion] The version of IAM policies to - * request. If a policy with a condition is requested without setting - * this, the server will return an error. This must be set to a value - * of 3 to retrieve IAM policies containing conditions. This is to - * prevent client code that isn't aware of IAM conditions from - * interpreting and modifying policies incorrectly. The service might - * return a policy with version lower than the one that was requested, - * based on the feature syntax in the policy fetched. - * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} - * @property {string} [userProject] The ID of the project which will be - * billed for the request. - */ - /** - * @typedef {array} GetPolicyResponse - * @property {Policy} 0 The policy. - * @property {object} 1 The full API response. - */ - /** - * @typedef {object} Policy - * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. - * @property {string} [policy.etag] Etags are used to perform a read-modify-write. - * @property {number} [policy.version] The syntax schema version of the Policy. - * To set an IAM policy with conditional binding, this field must be set to - * 3 or greater. - * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} - */ - /** - * @typedef {object} PolicyBinding - * @property {string} role Role that is assigned to members. - * @property {string[]} members Specifies the identities requesting access for the bucket. - * @property {Expr} [condition] The condition that is associated with this binding. - */ - /** - * @typedef {object} Expr - * @property {string} [title] An optional title for the expression, i.e. a - * short string describing its purpose. This can be used e.g. in UIs - * which allow to enter the expression. - * @property {string} [description] An optional description of the - * expression. This is a longer text which describes the expression, - * e.g. when hovered over it in a UI. - * @property {string} expression Textual representation of an expression in - * Common Expression Language syntax. The application context of the - * containing message determines which well-known feature set of CEL - * is supported.The condition that is associated with this binding. - * - * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions - */ - /** - * Get the IAM policy. - * - * @param {GetPolicyOptions} [options] Request options. - * @param {GetPolicyCallback} [callback] Callback function. - * @returns {Promise} - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * bucket.iam.getPolicy( - * {requestedPolicyVersion: 3}, - * function(err, policy, apiResponse) { - * - * }, - * ); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.iam.getPolicy({requestedPolicyVersion: 3}) - * .then(function(data) { - * const policy = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/iam.js - * region_tag:storage_view_bucket_iam_members - * Example of retrieving a bucket's IAM policy: - */ - getPolicy(optionsOrCallback, callback) { - const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback); - const qs = {}; - if (options.userProject) { - qs.userProject = options.userProject; - } - if (options.requestedPolicyVersion !== null && - options.requestedPolicyVersion !== undefined) { - qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; - } - this.request_({ - uri: '/iam', - qs, - }, cb); - } - /** - * Set the IAM policy. - * - * @throws {Error} If no policy is provided. - * - * @param {Policy} policy The policy. - * @param {SetPolicyOptions} [options] Configuration options. - * @param {SetPolicyCallback} callback Callback function. - * @returns {Promise} - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} - * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * const myPolicy = { - * bindings: [ - * { - * role: 'roles/storage.admin', - * members: - * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] - * } - * ] - * }; - * - * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.iam.setPolicy(myPolicy).then(function(data) { - * const policy = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/iam.js - * region_tag:storage_add_bucket_iam_member - * Example of adding to a bucket's IAM policy: - * - * @example include:samples/iam.js - * region_tag:storage_remove_bucket_iam_member - * Example of removing from a bucket's IAM policy: - */ - setPolicy(policy, optionsOrCallback, callback) { - if (policy === null || typeof policy !== 'object') { - throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); - } - const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback); - let maxRetries; - if (policy.etag === undefined) { - maxRetries = 0; - } - this.request_({ - method: 'PUT', - uri: '/iam', - maxRetries, - json: Object.assign({ - resourceId: this.resourceId_, - }, policy), - qs: options, - }, cb); - } - /** - * Test a set of permissions for a resource. - * - * @throws {Error} If permissions are not provided. - * - * @param {string|string[]} permissions The permission(s) to test for. - * @param {TestIamPermissionsOptions} [options] Configuration object. - * @param {TestIamPermissionsCallback} [callback] Callback function. - * @returns {Promise} - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * - * //- - * // Test a single permission. - * //- - * const test = 'storage.buckets.delete'; - * - * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { - * console.log(permissions); - * // { - * // "storage.buckets.delete": true - * // } - * }); - * - * //- - * // Test several permissions at once. - * //- - * const tests = [ - * 'storage.buckets.delete', - * 'storage.buckets.get' - * ]; - * - * bucket.iam.testPermissions(tests, function(err, permissions) { - * console.log(permissions); - * // { - * // "storage.buckets.delete": false, - * // "storage.buckets.get": true - * // } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * bucket.iam.testPermissions(test).then(function(data) { - * const permissions = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - testPermissions(permissions, optionsOrCallback, callback) { - if (!Array.isArray(permissions) && typeof permissions !== 'string') { - throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); - } - const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback); - const permissionsArray = Array.isArray(permissions) - ? permissions - : [permissions]; - const req = Object.assign({ - permissions: permissionsArray, - }, options); - this.request_({ - uri: '/iam/testPermissions', - qs: req, - useQuerystring: true, - }, (err, resp) => { - if (err) { - cb(err, null, resp); - return; - } - const availablePermissions = Array.isArray(resp.permissions) - ? resp.permissions - : []; - const permissionsHash = permissionsArray.reduce((acc, permission) => { - acc[permission] = availablePermissions.indexOf(permission) > -1; - return acc; - }, {}); - cb(null, permissionsHash, resp); - }); - } -} -exports.Iam = Iam; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Iam); -//# sourceMappingURL=iam.js.map - -/***/ }), - -/***/ 8174: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Storage = exports.IdempotencyStrategy = exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = void 0; -var bucket_1 = __nccwpck_require__(8561); -Object.defineProperty(exports, "Bucket", ({ enumerable: true, get: function () { return bucket_1.Bucket; } })); -__exportStar(__nccwpck_require__(4921), exports); -var channel_1 = __nccwpck_require__(8953); -Object.defineProperty(exports, "Channel", ({ enumerable: true, get: function () { return channel_1.Channel; } })); -var file_1 = __nccwpck_require__(5373); -Object.defineProperty(exports, "File", ({ enumerable: true, get: function () { return file_1.File; } })); -__exportStar(__nccwpck_require__(8386), exports); -var hmacKey_1 = __nccwpck_require__(9930); -Object.defineProperty(exports, "HmacKey", ({ enumerable: true, get: function () { return hmacKey_1.HmacKey; } })); -var iam_1 = __nccwpck_require__(66); -Object.defineProperty(exports, "Iam", ({ enumerable: true, get: function () { return iam_1.Iam; } })); -var notification_1 = __nccwpck_require__(7523); -Object.defineProperty(exports, "Notification", ({ enumerable: true, get: function () { return notification_1.Notification; } })); -var storage_1 = __nccwpck_require__(346); -Object.defineProperty(exports, "IdempotencyStrategy", ({ enumerable: true, get: function () { return storage_1.IdempotencyStrategy; } })); -Object.defineProperty(exports, "Storage", ({ enumerable: true, get: function () { return storage_1.Storage; } })); -__exportStar(__nccwpck_require__(9904), exports); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 2881: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; -var service_1 = __nccwpck_require__(8440); -Object.defineProperty(exports, "Service", ({ enumerable: true, get: function () { return service_1.Service; } })); -var service_object_1 = __nccwpck_require__(822); -Object.defineProperty(exports, "ServiceObject", ({ enumerable: true, get: function () { return service_object_1.ServiceObject; } })); -var util_1 = __nccwpck_require__(2469); -Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return util_1.ApiError; } })); -Object.defineProperty(exports, "util", ({ enumerable: true, get: function () { return util_1.util; } })); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 822: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ServiceObject = void 0; -/*! - * Copyright 2022 Google LLC. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -const promisify_1 = __nccwpck_require__(9203); -const events_1 = __nccwpck_require__(2361); -const extend = __nccwpck_require__(8171); -const util_1 = __nccwpck_require__(2469); -/** - * ServiceObject is a base class, meant to be inherited from by a "service - * object," like a BigQuery dataset or Storage bucket. - * - * Most of the time, these objects share common functionality; they can be - * created or deleted, and you can get or set their metadata. - * - * By inheriting from this class, a service object will be extended with these - * shared behaviors. Note that any method can be overridden when the service - * object requires specific behavior. - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -class ServiceObject extends events_1.EventEmitter { - /* - * @constructor - * @alias module:common/service-object - * - * @private - * - * @param {object} config - Configuration object. - * @param {string} config.baseUrl - The base URL to make API requests to. - * @param {string} config.createMethod - The method which creates this object. - * @param {string=} config.id - The identifier of the object. For example, the - * name of a Storage bucket or Pub/Sub topic. - * @param {object=} config.methods - A map of each method name that should be inherited. - * @param {object} config.methods[].reqOpts - Default request options for this - * particular method. A common use case is when `setMetadata` requires a - * `PUT` method to override the default `PATCH`. - * @param {object} config.parent - The parent service instance. For example, an - * instance of Storage if the object is Bucket. - */ - constructor(config) { - super(); - this.metadata = {}; - this.baseUrl = config.baseUrl; - this.parent = config.parent; // Parent class. - this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). - this.createMethod = config.createMethod; - this.methods = config.methods || {}; - this.interceptors = []; - this.projectId = config.projectId; - if (config.methods) { - // This filters the ServiceObject instance (e.g. a "File") to only have - // the configured methods. We make a couple of exceptions for core- - // functionality ("request()" and "getRequestInterceptors()") - Object.getOwnPropertyNames(ServiceObject.prototype) - .filter(methodName => { - return ( - // All ServiceObjects need `request` and `getRequestInterceptors`. - // clang-format off - !/^request/.test(methodName) && - !/^getRequestInterceptors/.test(methodName) && - // clang-format on - // The ServiceObject didn't redefine the method. - // eslint-disable-next-line @typescript-eslint/no-explicit-any - this[methodName] === - // eslint-disable-next-line @typescript-eslint/no-explicit-any - ServiceObject.prototype[methodName] && - // This method isn't wanted. - !config.methods[methodName]); - }) - .forEach(methodName => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - this[methodName] = undefined; - }); - } - } - create(optionsOrCallback, callback) { - // eslint-disable-next-line @typescript-eslint/no-this-alias - const self = this; - const args = [this.id]; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - if (typeof optionsOrCallback === 'object') { - args.push(optionsOrCallback); - } - // Wrap the callback to return *this* instance of the object, not the - // newly-created one. - // tslint: disable-next-line no-any - function onCreate(...args) { - const [err, instance] = args; - if (!err) { - self.metadata = instance.metadata; - if (self.id && instance.metadata) { - self.id = instance.metadata.id; - } - args[1] = self; // replace the created `instance` with this one. - } - callback(...args); - } - args.push(onCreate); - // eslint-disable-next-line prefer-spread - this.createMethod.apply(null, args); - } - delete(optionsOrCallback, cb) { - const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); - const ignoreNotFound = options.ignoreNotFound; - delete options.ignoreNotFound; - const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; - const reqOpts = extend(true, { - method: 'DELETE', - uri: '', - }, methodConfig.reqOpts, { - qs: options, - }); - // The `request` method may have been overridden to hold any special - // behavior. Ensure we call the original `request` method. - ServiceObject.prototype.request.call(this, reqOpts, (err, ...args) => { - if (err) { - if (err.code === 404 && ignoreNotFound) { - err = null; - } - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - callback(err, ...args); - }); - } - exists(optionsOrCallback, cb) { - const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); - this.get(options, err => { - if (err) { - if (err.code === 404) { - callback(null, false); - } - else { - callback(err); - } - return; - } - callback(null, true); - }); - } - get(optionsOrCallback, cb) { - // eslint-disable-next-line @typescript-eslint/no-this-alias - const self = this; - const [opts, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); - const options = Object.assign({}, opts); - const autoCreate = options.autoCreate && typeof this.create === 'function'; - delete options.autoCreate; - function onCreate(err, instance, apiResponse) { - if (err) { - if (err.code === 409) { - self.get(options, callback); - return; - } - callback(err, null, apiResponse); - return; - } - callback(null, instance, apiResponse); - } - this.getMetadata(options, (err, metadata) => { - if (err) { - if (err.code === 404 && autoCreate) { - const args = []; - if (Object.keys(options).length > 0) { - args.push(options); - } - args.push(onCreate); - self.create(...args); - return; - } - callback(err, null, metadata); - return; - } - callback(null, self, metadata); - }); - } - getMetadata(optionsOrCallback, cb) { - const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); - const methodConfig = (typeof this.methods.getMetadata === 'object' && - this.methods.getMetadata) || - {}; - const reqOpts = extend(true, { - uri: '', - }, methodConfig.reqOpts, { - qs: options, - }); - // The `request` method may have been overridden to hold any special - // behavior. Ensure we call the original `request` method. - ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { - this.metadata = body; - callback(err, this.metadata, res); - }); - } - /** - * Return the user's custom request interceptors. - */ - getRequestInterceptors() { - // Interceptors should be returned in the order they were assigned. - const localInterceptors = this.interceptors - .filter(interceptor => typeof interceptor.request === 'function') - .map(interceptor => interceptor.request); - return this.parent.getRequestInterceptors().concat(localInterceptors); - } - setMetadata(metadata, optionsOrCallback, cb) { - const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); - const methodConfig = (typeof this.methods.setMetadata === 'object' && - this.methods.setMetadata) || - {}; - const reqOpts = extend(true, {}, { - method: 'PATCH', - uri: '', - }, methodConfig.reqOpts, { - json: metadata, - qs: options, - }); - // The `request` method may have been overridden to hold any special - // behavior. Ensure we call the original `request` method. - ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { - this.metadata = body; - callback(err, this.metadata, res); - }); - } - request_(reqOpts, callback) { - reqOpts = extend(true, {}, reqOpts); - if (this.projectId) { - reqOpts.projectId = this.projectId; - } - const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; - const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; - if (isAbsoluteUrl) { - uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); - } - reqOpts.uri = uriComponents - .filter(x => x.trim()) // Limit to non-empty strings. - .map(uriComponent => { - const trimSlashesRegex = /^\/*|\/*$/g; - return uriComponent.replace(trimSlashesRegex, ''); - }) - .join('/'); - const childInterceptors = Array.isArray(reqOpts.interceptors_) - ? reqOpts.interceptors_ - : []; - const localInterceptors = [].slice.call(this.interceptors); - reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); - if (reqOpts.shouldReturnStream) { - return this.parent.requestStream(reqOpts); - } - this.parent.request(reqOpts, callback); - } - request(reqOpts, callback) { - this.request_(reqOpts, callback); - } - /** - * Make an authenticated API request. - * - * @param {object} reqOpts - Request options that are passed to `request`. - * @param {string} reqOpts.uri - A URI relative to the baseUrl. - */ - requestStream(reqOpts) { - const opts = extend(true, reqOpts, { shouldReturnStream: true }); - return this.request_(opts); - } -} -exports.ServiceObject = ServiceObject; -(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] }); -//# sourceMappingURL=service-object.js.map - -/***/ }), - -/***/ 8440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; -/*! - * Copyright 2022 Google LLC. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -const extend = __nccwpck_require__(8171); -const uuid = __nccwpck_require__(5840); -const util_1 = __nccwpck_require__(2469); -exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; -class Service { - /** - * Service is a base class, meant to be inherited from by a "service," like - * BigQuery or Storage. - * - * This handles making authenticated requests by exposing a `makeReq_` - * function. - * - * @constructor - * @alias module:common/service - * - * @param {object} config - Configuration object. - * @param {string} config.baseUrl - The base URL to make API requests to. - * @param {string[]} config.scopes - The scopes required for the request. - * @param {object=} options - [Configuration object](#/docs). - */ - constructor(config, options = {}) { - this.baseUrl = config.baseUrl; - this.apiEndpoint = config.apiEndpoint; - this.timeout = options.timeout; - this.globalInterceptors = Array.isArray(options.interceptors_) - ? options.interceptors_ - : []; - this.interceptors = []; - this.packageJson = config.packageJson; - this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN; - this.projectIdRequired = config.projectIdRequired !== false; - this.providedUserAgent = options.userAgent; - const reqCfg = extend({}, config, { - projectIdRequired: this.projectIdRequired, - projectId: this.projectId, - authClient: options.authClient, - credentials: options.credentials, - keyFile: options.keyFilename, - email: options.email, - token: options.token, - }); - this.makeAuthenticatedRequest = - util_1.util.makeAuthenticatedRequestFactory(reqCfg); - this.authClient = this.makeAuthenticatedRequest.authClient; - this.getCredentials = this.makeAuthenticatedRequest.getCredentials; - const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; - if (isCloudFunctionEnv) { - this.interceptors.push({ - request(reqOpts) { - reqOpts.forever = false; - return reqOpts; - }, - }); - } - } - /** - * Return the user's custom request interceptors. - */ - getRequestInterceptors() { - // Interceptors should be returned in the order they were assigned. - return [].slice - .call(this.globalInterceptors) - .concat(this.interceptors) - .filter(interceptor => typeof interceptor.request === 'function') - .map(interceptor => interceptor.request); - } - getProjectId(callback) { - if (!callback) { - return this.getProjectIdAsync(); - } - this.getProjectIdAsync().then(p => callback(null, p), callback); - } - async getProjectIdAsync() { - const projectId = await this.authClient.getProjectId(); - if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) { - this.projectId = projectId; - } - return this.projectId; - } - request_(reqOpts, callback) { - reqOpts = extend(true, {}, reqOpts, { timeout: this.timeout }); - const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; - const uriComponents = [this.baseUrl]; - if (this.projectIdRequired) { - if (reqOpts.projectId) { - uriComponents.push('projects'); - uriComponents.push(reqOpts.projectId); - } - else { - uriComponents.push('projects'); - uriComponents.push(this.projectId); - } - } - uriComponents.push(reqOpts.uri); - if (isAbsoluteUrl) { - uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); - } - reqOpts.uri = uriComponents - .map(uriComponent => { - const trimSlashesRegex = /^\/*|\/*$/g; - return uriComponent.replace(trimSlashesRegex, ''); - }) - .join('/') - // Some URIs have colon separators. - // Bad: https://.../projects/:list - // Good: https://.../projects:list - .replace(/\/:/g, ':'); - const requestInterceptors = this.getRequestInterceptors(); - const interceptorArray = Array.isArray(reqOpts.interceptors_) - ? reqOpts.interceptors_ - : []; - interceptorArray.forEach(interceptor => { - if (typeof interceptor.request === 'function') { - requestInterceptors.push(interceptor.request); - } - }); - requestInterceptors.forEach(requestInterceptor => { - reqOpts = requestInterceptor(reqOpts); - }); - delete reqOpts.interceptors_; - const pkg = this.packageJson; - let userAgent = util_1.util.getUserAgentFromPackageJson(pkg); - if (this.providedUserAgent) { - userAgent = `${this.providedUserAgent} ${userAgent}`; - } - reqOpts.headers = extend({}, reqOpts.headers, { - 'User-Agent': userAgent, - 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${pkg.version} gccl-invocation-id/${uuid.v4()}`, - }); - if (reqOpts.shouldReturnStream) { - return this.makeAuthenticatedRequest(reqOpts); - } - else { - this.makeAuthenticatedRequest(reqOpts, callback); - } - } - /** - * Make an authenticated API request. - * - * @param {object} reqOpts - Request options that are passed to `request`. - * @param {string} reqOpts.uri - A URI relative to the baseUrl. - * @param {function} callback - The callback function passed to `request`. - */ - request(reqOpts, callback) { - Service.prototype.request_.call(this, reqOpts, callback); - } - /** - * Make an authenticated API request. - * - * @param {object} reqOpts - Request options that are passed to `request`. - * @param {string} reqOpts.uri - A URI relative to the baseUrl. - */ - requestStream(reqOpts) { - const opts = extend(true, reqOpts, { shouldReturnStream: true }); - return Service.prototype.request_.call(this, opts); - } -} -exports.Service = Service; -//# sourceMappingURL=service.js.map - -/***/ }), - -/***/ 2469: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/*! - * Copyright 2022 Google LLC. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = void 0; -/*! - * @module common/util - */ -const projectify_1 = __nccwpck_require__(3497); -const ent = __nccwpck_require__(1151); -const extend = __nccwpck_require__(8171); -const google_auth_library_1 = __nccwpck_require__(810); -const retryRequest = __nccwpck_require__(3515); -const stream_1 = __nccwpck_require__(2781); -const teeny_request_1 = __nccwpck_require__(6886); -const uuid = __nccwpck_require__(5840); -const service_1 = __nccwpck_require__(8440); -const packageJson = __nccwpck_require__(5458); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const duplexify = __nccwpck_require__(6599); -const requestDefaults = { - timeout: 60000, - gzip: true, - forever: true, - pool: { - maxSockets: Infinity, - }, -}; -/** - * Default behavior: Automatically retry retriable server errors. - * - * @const {boolean} - * @private - */ -const AUTO_RETRY_DEFAULT = true; -/** - * Default behavior: Only attempt to retry retriable errors 3 times. - * - * @const {number} - * @private - */ -const MAX_RETRY_DEFAULT = 3; -/** - * Custom error type for API errors. - * - * @param {object} errorBody - Error object. - */ -class ApiError extends Error { - constructor(errorBodyOrMessage) { - super(); - if (typeof errorBodyOrMessage !== 'object') { - this.message = errorBodyOrMessage || ''; - return; - } - const errorBody = errorBodyOrMessage; - this.code = errorBody.code; - this.errors = errorBody.errors; - this.response = errorBody.response; - try { - this.errors = JSON.parse(this.response.body).error.errors; - } - catch (e) { - this.errors = errorBody.errors; - } - this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); - Error.captureStackTrace(this); - } - /** - * Pieces together an error message by combining all unique error messages - * returned from a single GoogleError - * - * @private - * - * @param {GoogleErrorBody} err The original error. - * @param {GoogleInnerError[]} [errors] Inner errors, if any. - * @returns {string} - */ - static createMultiErrorMessage(err, errors) { - const messages = new Set(); - if (err.message) { - messages.add(err.message); - } - if (errors && errors.length) { - errors.forEach(({ message }) => messages.add(message)); - } - else if (err.response && err.response.body) { - messages.add(ent.decode(err.response.body.toString())); - } - else if (!err.message) { - messages.add('A failure occurred during this request.'); - } - let messageArr = Array.from(messages); - if (messageArr.length > 1) { - messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); - messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); - messageArr.push('\n'); - } - return messageArr.join('\n'); - } -} -exports.ApiError = ApiError; -/** - * Custom error type for partial errors returned from the API. - * - * @param {object} b - Error object. - */ -class PartialFailureError extends Error { - constructor(b) { - super(); - const errorObject = b; - this.errors = errorObject.errors; - this.name = 'PartialFailureError'; - this.response = errorObject.response; - this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); - } -} -exports.PartialFailureError = PartialFailureError; -class Util { - constructor() { - this.ApiError = ApiError; - this.PartialFailureError = PartialFailureError; - } - /** - * No op. - * - * @example - * function doSomething(callback) { - * callback = callback || noop; - * } - */ - noop() { } - /** - * Uniformly process an API response. - * - * @param {*} err - Error value. - * @param {*} resp - Response value. - * @param {*} body - Body value. - * @param {function} callback - The callback function. - */ - handleResp(err, resp, body, callback) { - callback = callback || util.noop; - const parsedResp = extend(true, { err: err || null }, resp && util.parseHttpRespMessage(resp), body && util.parseHttpRespBody(body)); - // Assign the parsed body to resp.body, even if { json: false } was passed - // as a request option. - // We assume that nobody uses the previously unparsed value of resp.body. - if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { - parsedResp.resp.body = parsedResp.body; - } - if (parsedResp.err && resp) { - parsedResp.err.response = resp; - } - callback(parsedResp.err, parsedResp.body, parsedResp.resp); - } - /** - * Sniff an incoming HTTP response message for errors. - * - * @param {object} httpRespMessage - An incoming HTTP response message from `request`. - * @return {object} parsedHttpRespMessage - The parsed response. - * @param {?error} parsedHttpRespMessage.err - An error detected. - * @param {object} parsedHttpRespMessage.resp - The original response object. - */ - parseHttpRespMessage(httpRespMessage) { - const parsedHttpRespMessage = { - resp: httpRespMessage, - }; - if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { - // Unknown error. Format according to ApiError standard. - parsedHttpRespMessage.err = new ApiError({ - errors: new Array(), - code: httpRespMessage.statusCode, - message: httpRespMessage.statusMessage, - response: httpRespMessage, - }); - } - return parsedHttpRespMessage; - } - /** - * Parse the response body from an HTTP request. - * - * @param {object} body - The response body. - * @return {object} parsedHttpRespMessage - The parsed response. - * @param {?error} parsedHttpRespMessage.err - An error detected. - * @param {object} parsedHttpRespMessage.body - The original body value provided - * will try to be JSON.parse'd. If it's successful, the parsed value will - * be returned here, otherwise the original value and an error will be returned. - */ - parseHttpRespBody(body) { - const parsedHttpRespBody = { - body, - }; - if (typeof body === 'string') { - try { - parsedHttpRespBody.body = JSON.parse(body); - } - catch (err) { - parsedHttpRespBody.body = body; - } - } - if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { - // Error from JSON API. - parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); - } - return parsedHttpRespBody; - } - /** - * Take a Duplexify stream, fetch an authenticated connection header, and - * create an outgoing writable stream. - * - * @param {Duplexify} dup - Duplexify stream. - * @param {object} options - Configuration object. - * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. - * @param {object} options.metadata - Metadata to send at the head of the request. - * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. - * @param {string=} options.request.method - Default: "POST". - * @param {string=} options.request.qs.uploadType - Default: "multipart". - * @param {string=} options.streamContentType - Default: "application/octet-stream". - * @param {function} onComplete - Callback, executed after the writable Request stream has completed. - */ - makeWritableStream(dup, options, onComplete) { - onComplete = onComplete || util.noop; - const writeStream = new ProgressStream(); - writeStream.on('progress', evt => dup.emit('progress', evt)); - dup.setWritable(writeStream); - const defaultReqOpts = { - method: 'POST', - qs: { - uploadType: 'multipart', - }, - timeout: 0, - maxRetries: 0, - }; - const metadata = options.metadata || {}; - const reqOpts = extend(true, defaultReqOpts, options.request, { - multipart: [ - { - 'Content-Type': 'application/json', - body: JSON.stringify(metadata), - }, - { - 'Content-Type': metadata.contentType || 'application/octet-stream', - body: writeStream, - }, - ], - }); - options.makeAuthenticatedRequest(reqOpts, { - onAuthenticated(err, authenticatedReqOpts) { - if (err) { - dup.destroy(err); - return; - } - requestDefaults.headers = util._getDefaultHeaders(); - const request = teeny_request_1.teenyRequest.defaults(requestDefaults); - request(authenticatedReqOpts, (err, resp, body) => { - util.handleResp(err, resp, body, (err, data) => { - if (err) { - dup.destroy(err); - return; - } - dup.emit('response', resp); - onComplete(data); - }); - }); - }, - }); - } - /** - * Returns true if the API request should be retried, given the error that was - * given the first time the request was attempted. This is used for rate limit - * related errors as well as intermittent server errors. - * - * @param {error} err - The API error to check if it is appropriate to retry. - * @return {boolean} True if the API request should be retried, false otherwise. - */ - shouldRetryRequest(err) { - if (err) { - if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { - return true; - } - if (err.errors) { - for (const e of err.errors) { - const reason = e.reason; - if (reason === 'rateLimitExceeded') { - return true; - } - if (reason === 'userRateLimitExceeded') { - return true; - } - if (reason && reason.includes('EAI_AGAIN')) { - return true; - } - } - } - } - return false; - } - /** - * Get a function for making authenticated requests. - * - * @param {object} config - Configuration object. - * @param {boolean=} config.autoRetry - Automatically retry requests if the - * response is related to rate limits or certain intermittent server - * errors. We will exponentially backoff subsequent requests by default. - * (default: true) - * @param {object=} config.credentials - Credentials object. - * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. - * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. - * @param {string=} config.email - Account email address, required for PEM/P12 usage. - * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) - * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. - * @param {array} config.scopes - Array of scopes required for the API. - */ - makeAuthenticatedRequestFactory(config) { - const googleAutoAuthConfig = extend({}, config); - if (googleAutoAuthConfig.projectId === service_1.DEFAULT_PROJECT_ID_TOKEN) { - delete googleAutoAuthConfig.projectId; - } - let authClient; - if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) { - // Use an existing `GoogleAuth` - authClient = googleAutoAuthConfig.authClient; - } - else { - // Pass an `AuthClient` to `GoogleAuth`, if available - const config = { - ...googleAutoAuthConfig, - authClient: googleAutoAuthConfig.authClient, - }; - authClient = new google_auth_library_1.GoogleAuth(config); - } - function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { - let stream; - let projectId; - const reqConfig = extend({}, config); - let activeRequest_; - if (!optionsOrCallback) { - stream = duplexify(); - reqConfig.stream = stream; - } - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; - const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; - async function setProjectId() { - projectId = await authClient.getProjectId(); - } - const onAuthenticated = async (err, authenticatedReqOpts) => { - const authLibraryError = err; - const autoAuthFailed = err && - err.message.indexOf('Could not load the default credentials') > -1; - if (autoAuthFailed) { - // Even though authentication failed, the API might not actually - // care. - authenticatedReqOpts = reqOpts; - } - if (!err || autoAuthFailed) { - try { - // Try with existing `projectId` value - authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); - err = null; - } - catch (e) { - if (e instanceof projectify_1.MissingProjectIdError) { - // A `projectId` was required, but we don't have one. - try { - // Attempt to get the `projectId` - await setProjectId(); - authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); - err = null; - } - catch (e) { - // Re-use the "Could not load the default credentials error" if - // auto auth failed. - err = err || e; - } - } - else { - // Some other error unrelated to missing `projectId` - err = err || e; - } - } - } - if (err) { - if (stream) { - stream.destroy(err); - } - else { - const fn = options && options.onAuthenticated - ? options.onAuthenticated - : callback; - fn(err); - } - return; - } - if (options && options.onAuthenticated) { - options.onAuthenticated(null, authenticatedReqOpts); - } - else { - activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { - if (apiResponseError && - apiResponseError.code === 401 && - authLibraryError) { - // Re-use the "Could not load the default credentials error" if - // the API request failed due to missing credentials. - apiResponseError = authLibraryError; - } - callback(apiResponseError, ...params); - }); - } - }; - const prepareRequest = async () => { - try { - const getProjectId = async () => { - if (config.projectId && - config.projectId !== service_1.DEFAULT_PROJECT_ID_TOKEN) { - // The user provided a project ID. We don't need to check with the - // auth client, it could be incorrect. - return config.projectId; - } - if (config.projectIdRequired === false) { - // A projectId is not required. Return the default. - return service_1.DEFAULT_PROJECT_ID_TOKEN; - } - return setProjectId(); - }; - const authorizeRequest = async () => { - if (reqConfig.customEndpoint && - !reqConfig.useAuthWithCustomEndpoint) { - // Using a custom API override. Do not use `google-auth-library` for - // authentication. (ex: connecting to a local Datastore server) - return reqOpts; - } - else { - return authClient.authorizeRequest(reqOpts); - } - }; - const [_projectId, authorizedReqOpts] = await Promise.all([ - getProjectId(), - authorizeRequest(), - ]); - if (_projectId) { - projectId = _projectId; - } - return onAuthenticated(null, authorizedReqOpts); - } - catch (e) { - return onAuthenticated(e); - } - }; - prepareRequest(); - if (stream) { - return stream; - } - return { - abort() { - setImmediate(() => { - if (activeRequest_) { - activeRequest_.abort(); - activeRequest_ = null; - } - }); - }, - }; - } - const mar = makeAuthenticatedRequest; - mar.getCredentials = authClient.getCredentials.bind(authClient); - mar.authClient = authClient; - return mar; - } - /** - * Make a request through the `retryRequest` module with built-in error - * handling and exponential back off. - * - * @param {object} reqOpts - Request options in the format `request` expects. - * @param {object=} config - Configuration object. - * @param {boolean=} config.autoRetry - Automatically retry requests if the - * response is related to rate limits or certain intermittent server - * errors. We will exponentially backoff subsequent requests by default. - * (default: true) - * @param {number=} config.maxRetries - Maximum number of automatic retries - * attempted before returning the error. (default: 3) - * @param {object=} config.request - HTTP module for request calls. - * @param {function} callback - The callback function. - */ - makeRequest(reqOpts, config, callback) { - var _a, _b, _c, _d, _e; - let autoRetryValue = AUTO_RETRY_DEFAULT; - if (config.autoRetry !== undefined) { - autoRetryValue = config.autoRetry; - } - else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { - autoRetryValue = config.retryOptions.autoRetry; - } - let maxRetryValue = MAX_RETRY_DEFAULT; - if (config.maxRetries !== undefined) { - maxRetryValue = config.maxRetries; - } - else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { - maxRetryValue = config.retryOptions.maxRetries; - } - requestDefaults.headers = this._getDefaultHeaders(); - const options = { - request: teeny_request_1.teenyRequest.defaults(requestDefaults), - retries: autoRetryValue !== false ? maxRetryValue : 0, - noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, - shouldRetryFn(httpRespMessage) { - var _a, _b; - const err = util.parseHttpRespMessage(httpRespMessage).err; - if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { - return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); - } - return err && util.shouldRetryRequest(err); - }, - maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, - retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, - totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, - }; - if (typeof reqOpts.maxRetries === 'number') { - options.retries = reqOpts.maxRetries; - options.noResponseRetries = reqOpts.maxRetries; - } - if (!config.stream) { - return retryRequest(reqOpts, options, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (err, response, body) => { - util.handleResp(err, response, body, callback); - }); - } - const dup = config.stream; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let requestStream; - const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; - if (isGetRequest) { - requestStream = retryRequest(reqOpts, options); - dup.setReadable(requestStream); - } - else { - // Streaming writable HTTP requests cannot be retried. - requestStream = options.request(reqOpts); - dup.setWritable(requestStream); - } - // Replay the Request events back to the stream. - requestStream - .on('error', dup.destroy.bind(dup)) - .on('response', dup.emit.bind(dup, 'response')) - .on('complete', dup.emit.bind(dup, 'complete')); - dup.abort = requestStream.abort; - return dup; - } - /** - * Decorate the options about to be made in a request. - * - * @param {object} reqOpts - The options to be passed to `request`. - * @param {string} projectId - The project ID. - * @return {object} reqOpts - The decorated reqOpts. - */ - decorateRequest(reqOpts, projectId) { - delete reqOpts.autoPaginate; - delete reqOpts.autoPaginateVal; - delete reqOpts.objectMode; - if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { - delete reqOpts.qs.autoPaginate; - delete reqOpts.qs.autoPaginateVal; - reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId); - } - if (Array.isArray(reqOpts.multipart)) { - reqOpts.multipart = reqOpts.multipart.map(part => { - return (0, projectify_1.replaceProjectIdToken)(part, projectId); - }); - } - if (reqOpts.json !== null && typeof reqOpts.json === 'object') { - delete reqOpts.json.autoPaginate; - delete reqOpts.json.autoPaginateVal; - reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId); - } - reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId); - return reqOpts; - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - isCustomType(unknown, module) { - function getConstructorName(obj) { - return obj.constructor && obj.constructor.name.toLowerCase(); - } - const moduleNameParts = module.split('/'); - const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); - const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); - if (subModuleName && getConstructorName(unknown) !== subModuleName) { - return false; - } - let walkingModule = unknown; - // eslint-disable-next-line no-constant-condition - while (true) { - if (getConstructorName(walkingModule) === parentModuleName) { - return true; - } - walkingModule = walkingModule.parent; - if (!walkingModule) { - return false; - } - } - } - /** - * Create a properly-formatted User-Agent string from a package.json file. - * - * @param {object} packageJson - A module's package.json file. - * @return {string} userAgent - The formatted User-Agent string. - */ - getUserAgentFromPackageJson(packageJson) { - const hyphenatedPackageName = packageJson.name - .replace('@google-cloud', 'gcloud-node') // For legacy purposes. - .replace('/', '-'); // For UA spec-compliance purposes. - return hyphenatedPackageName + '/' + packageJson.version; - } - /** - * Given two parameters, figure out if this is either: - * - Just a callback function - * - An options object, and then a callback function - * @param optionsOrCallback An options object or callback. - * @param cb A potentially undefined callback. - */ - maybeOptionsOrCallback(optionsOrCallback, cb) { - return typeof optionsOrCallback === 'function' - ? [{}, optionsOrCallback] - : [optionsOrCallback, cb]; - } - _getDefaultHeaders() { - return { - 'User-Agent': util.getUserAgentFromPackageJson(packageJson), - 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${uuid.v4()}`, - }; - } -} -exports.Util = Util; -/** - * Basic Passthrough Stream that records the number of bytes read - * every time the cursor is moved. - */ -class ProgressStream extends stream_1.Transform { - constructor() { - super(...arguments); - this.bytesRead = 0; - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - _transform(chunk, encoding, callback) { - this.bytesRead += chunk.length; - this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); - this.push(chunk); - callback(); - } -} -const util = new Util(); -exports.util = util; -//# sourceMappingURL=util.js.map - -/***/ }), - -/***/ 7523: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Notification = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const promisify_1 = __nccwpck_require__(9203); -/** - * The API-formatted resource description of the notification. - * - * Note: This is not guaranteed to be up-to-date when accessed. To get the - * latest record, call the `getMetadata()` method. - * - * @name Notification#metadata - * @type {object} - */ -/** - * A Notification object is created from your {@link Bucket} object using - * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub - * notifications. - * - * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} - * - * @class - * @hideconstructor - * - * @param {Bucket} bucket The bucket instance this notification is attached to. - * @param {string} id The ID of the notification. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const notification = myBucket.notification('1'); - * ``` - */ -class Notification extends nodejs_common_1.ServiceObject { - constructor(bucket, id) { - const methods = { - /** - * Creates a notification subscription for the bucket. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} - * @method Notification#create - * - * @param {Topic|string} topic The Cloud PubSub topic to which this - * subscription publishes. If the project ID is omitted, the current - * project ID will be used. - * - * Acceptable formats are: - * - `projects/grape-spaceship-123/topics/my-topic` - * - * - `my-topic` - * @param {CreateNotificationRequest} [options] Metadata to set for - * the notification. - * @param {CreateNotificationCallback} [callback] Callback function. - * @returns {Promise} - * @throws {Error} If a valid topic is not provided. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * const notification = myBucket.notification('1'); - * - * notification.create(function(err, notification, apiResponse) { - * if (!err) { - * // The notification was created successfully. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * notification.create().then(function(data) { - * const notification = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - create: true, - /** - * @typedef {array} NotificationExistsResponse - * @property {boolean} 0 Whether the notification exists or not. - */ - /** - * @callback NotificationExistsCallback - * @param {?Error} err Request error, if any. - * @param {boolean} exists Whether the notification exists or not. - */ - /** - * Check if the notification exists. - * - * @method Notification#exists - * @param {NotificationExistsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * const notification = myBucket.notification('1'); - * - * notification.exists(function(err, exists) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * notification.exists().then(function(data) { - * const exists = data[0]; - * }); - * ``` - */ - exists: true, - }; - super({ - parent: bucket, - baseUrl: '/notificationConfigs', - id: id.toString(), - createMethod: bucket.createNotification.bind(bucket), - methods, - }); - } - /** - * @typedef {array} DeleteNotificationResponse - * @property {object} 0 The full API response. - */ - /** - * Permanently deletes a notification subscription. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} - * - * @param {object} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {DeleteNotificationCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * const notification = myBucket.notification('1'); - * - * notification.delete(function(err, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * notification.delete().then(function(data) { - * const apiResponse = data[0]; - * }); - * - * ``` - * @example include:samples/deleteNotification.js - * region_tag:storage_delete_bucket_notification - * Another example: - */ - delete(optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - this.request({ - method: 'DELETE', - uri: '', - qs: options, - }, callback || nodejs_common_1.util.noop); - } - /** - * Get a notification and its metadata if it exists. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} - * - * @param {object} [options] Configuration options. - * See {@link Bucket#createNotification} for create options. - * @param {boolean} [options.autoCreate] Automatically create the object if - * it does not exist. Default: `false`. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetNotificationCallback} [callback] Callback function. - * @return {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * const notification = myBucket.notification('1'); - * - * notification.get(function(err, notification, apiResponse) { - * // `notification.metadata` has been populated. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * notification.get().then(function(data) { - * const notification = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ - get(optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - const autoCreate = options.autoCreate; - delete options.autoCreate; - const onCreate = (err, notification, apiResponse) => { - if (err) { - if (err.code === 409) { - this.get(options, callback); - return; - } - callback(err, null, apiResponse); - return; - } - callback(null, notification, apiResponse); - }; - this.getMetadata(options, (err, metadata) => { - if (err) { - if (err.code === 404 && autoCreate) { - const args = []; - if (Object.keys(options).length > 0) { - args.push(options); - } - args.push(onCreate); - // eslint-disable-next-line - this.create.apply(this, args); - return; - } - callback(err, null, metadata); - return; - } - callback(null, this, metadata); - }); - } - /** - * Get the notification's metadata. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} - * - * @param {object} [options] Configuration options. - * @param {string} [options.userProject] The ID of the project which will be - * billed for the request. - * @param {GetNotificationMetadataCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * const notification = myBucket.notification('1'); - * - * notification.getMetadata(function(err, metadata, apiResponse) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * notification.getMetadata().then(function(data) { - * const metadata = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/getMetadataNotifications.js - * region_tag:storage_print_pubsub_bucket_notification - * Another example: - */ - getMetadata(optionsOrCallback, callback) { - const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; - callback = - typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; - this.request({ - uri: '', - qs: options, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - this.metadata = resp; - callback(null, this.metadata, resp); - }); - } -} -exports.Notification = Notification; -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Notification); -//# sourceMappingURL=notification.js.map - -/***/ }), - -/***/ 8807: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0; -const abort_controller_1 = __nccwpck_require__(1659); -const crypto_1 = __nccwpck_require__(6113); -const extend = __nccwpck_require__(8171); -const gaxios = __nccwpck_require__(9555); -const google_auth_library_1 = __nccwpck_require__(810); -const stream_1 = __nccwpck_require__(2781); -const retry = __nccwpck_require__(3415); -const uuid = __nccwpck_require__(5840); -const NOT_FOUND_STATUS_CODE = 404; -const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; -const DEFAULT_API_ENDPOINT_REGEX = /.*\.googleapis\.com/; -const packageJson = __nccwpck_require__(5458); -exports.PROTOCOL_REGEX = /^(\w*):\/\//; -class Upload extends stream_1.Writable { - constructor(cfg) { - super(); - this.numBytesWritten = 0; - this.numRetries = 0; - this.currentInvocationId = { - chunk: uuid.v4(), - uri: uuid.v4(), - offset: uuid.v4(), - }; - this.upstreamChunkBuffer = Buffer.alloc(0); - this.chunkBufferEncoding = undefined; - this.numChunksReadInRequest = 0; - /** - * A chunk used for caching the most recent upload chunk. - * We should not assume that the server received all bytes sent in the request. - * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload - */ - this.lastChunkSent = Buffer.alloc(0); - this.upstreamEnded = false; - cfg = cfg || {}; - if (!cfg.bucket || !cfg.file) { - throw new Error('A bucket and file name are required'); - } - cfg.authConfig = cfg.authConfig || {}; - cfg.authConfig.scopes = [ - 'https://www.googleapis.com/auth/devstorage.full_control', - ]; - this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig); - this.apiEndpoint = 'https://storage.googleapis.com'; - if (cfg.apiEndpoint) { - this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); - if (!DEFAULT_API_ENDPOINT_REGEX.test(cfg.apiEndpoint)) { - this.authClient = gaxios; - } - } - this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; - this.bucket = cfg.bucket; - const cacheKeyElements = [cfg.bucket, cfg.file]; - if (typeof cfg.generation === 'number') { - cacheKeyElements.push(`${cfg.generation}`); - } - this.cacheKey = cacheKeyElements.join('/'); - this.customRequestOptions = cfg.customRequestOptions || {}; - this.file = cfg.file; - this.generation = cfg.generation; - this.kmsKeyName = cfg.kmsKeyName; - this.metadata = cfg.metadata || {}; - this.offset = cfg.offset; - this.origin = cfg.origin; - this.params = cfg.params || {}; - this.userProject = cfg.userProject; - this.chunkSize = cfg.chunkSize; - this.retryOptions = cfg.retryOptions; - if (cfg.key) { - const base64Key = Buffer.from(cfg.key).toString('base64'); - this.encryption = { - key: base64Key, - hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), - }; - } - this.predefinedAcl = cfg.predefinedAcl; - if (cfg.private) - this.predefinedAcl = 'private'; - if (cfg.public) - this.predefinedAcl = 'publicRead'; - const autoRetry = cfg.retryOptions.autoRetry; - this.uriProvidedManually = !!cfg.uri; - this.uri = cfg.uri; - this.numBytesWritten = 0; - this.numRetries = 0; // counter for number of retries currently executed - if (!autoRetry) { - cfg.retryOptions.maxRetries = 0; - } - this.timeOfFirstRequest = Date.now(); - const contentLength = cfg.metadata - ? Number(cfg.metadata.contentLength) - : NaN; - this.contentLength = isNaN(contentLength) ? '*' : contentLength; - this.once('writing', () => { - if (this.uri) { - this.continueUploading(); - } - else { - this.createURI(err => { - if (err) { - return this.destroy(err); - } - this.startUploading(); - return; - }); - } - }); - } - /** - * Prevent 'finish' event until the upload has succeeded. - * - * @param fireFinishEvent The finish callback - */ - _final(fireFinishEvent = () => { }) { - this.upstreamEnded = true; - this.once('uploadFinished', fireFinishEvent); - process.nextTick(() => { - this.emit('upstreamFinished'); - // it's possible `_write` may not be called - namely for empty object uploads - this.emit('writing'); - }); - } - /** - * Handles incoming data from upstream - * - * @param chunk The chunk to append to the buffer - * @param encoding The encoding of the chunk - * @param readCallback A callback for when the buffer has been read downstream - */ - _write(chunk, encoding, readCallback = () => { }) { - // Backwards-compatible event - this.emit('writing'); - this.upstreamChunkBuffer = Buffer.concat([ - this.upstreamChunkBuffer, - typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk, - ]); - this.chunkBufferEncoding = encoding; - this.once('readFromChunkBuffer', readCallback); - process.nextTick(() => this.emit('wroteToChunkBuffer')); - } - /** - * Prepends data back to the upstream chunk buffer. - * - * @param chunk The data to prepend - */ - unshiftChunkBuffer(chunk) { - this.upstreamChunkBuffer = Buffer.concat([chunk, this.upstreamChunkBuffer]); - } - /** - * Retrieves data from upstream's buffer. - * - * @param limit The maximum amount to return from the buffer. - * @returns The data requested. - */ - pullFromChunkBuffer(limit) { - const chunk = this.upstreamChunkBuffer.slice(0, limit); - this.upstreamChunkBuffer = this.upstreamChunkBuffer.slice(limit); - // notify upstream we've read from the buffer so it can potentially - // send more data down. - process.nextTick(() => this.emit('readFromChunkBuffer')); - return chunk; - } - /** - * A handler for determining if data is ready to be read from upstream. - * - * @returns If there will be more chunks to read in the future - */ - async waitForNextChunk() { - const willBeMoreChunks = await new Promise(resolve => { - // There's data available - it should be digested - if (this.upstreamChunkBuffer.byteLength) { - return resolve(true); - } - // The upstream writable ended, we shouldn't expect any more data. - if (this.upstreamEnded) { - return resolve(false); - } - // Nothing immediate seems to be determined. We need to prepare some - // listeners to determine next steps... - const wroteToChunkBufferCallback = () => { - removeListeners(); - return resolve(true); - }; - const upstreamFinishedCallback = () => { - removeListeners(); - // this should be the last chunk, if there's anything there - if (this.upstreamChunkBuffer.length) - return resolve(true); - return resolve(false); - }; - // Remove listeners when we're ready to callback. - const removeListeners = () => { - this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); - this.removeListener('upstreamFinished', upstreamFinishedCallback); - }; - // If there's data recently written it should be digested - this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); - // If the upstream finishes let's see if there's anything to grab - this.once('upstreamFinished', upstreamFinishedCallback); - }); - return willBeMoreChunks; - } - /** - * Reads data from upstream up to the provided `limit`. - * Ends when the limit has reached or no data is expected to be pushed from upstream. - * - * @param limit The most amount of data this iterator should return. `Infinity` by default. - * @param oneChunkMode Determines if one, exhaustive chunk is yielded for the iterator - */ - async *upstreamIterator(limit = Infinity, oneChunkMode) { - let completeChunk = Buffer.alloc(0); - // read from upstream chunk buffer - while (limit && (await this.waitForNextChunk())) { - // read until end or limit has been reached - const chunk = this.pullFromChunkBuffer(limit); - limit -= chunk.byteLength; - if (oneChunkMode) { - // return 1 chunk at the end of iteration - completeChunk = Buffer.concat([completeChunk, chunk]); - } - else { - // return many chunks throughout iteration - yield { - chunk, - encoding: this.chunkBufferEncoding, - }; - } - } - if (oneChunkMode) { - yield { - chunk: completeChunk, - encoding: this.chunkBufferEncoding, - }; - } - } - createURI(callback) { - if (!callback) { - return this.createURIAsync(); - } - this.createURIAsync().then(r => callback(null, r), callback); - } - async createURIAsync() { - const metadata = { ...this.metadata }; - const headers = {}; - // Delete content length and content type from metadata if they exist. - // These are headers and should not be sent as part of the metadata. - if (metadata.contentLength) { - headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); - delete metadata.contentLength; - } - if (metadata.contentType) { - headers['X-Upload-Content-Type'] = metadata.contentType; - delete metadata.contentType; - } - // Check if headers already exist before creating new ones - const reqOpts = { - method: 'POST', - url: [this.baseURI, this.bucket, 'o'].join('/'), - params: Object.assign({ - name: this.file, - uploadType: 'resumable', - }, this.params), - data: metadata, - headers: { - 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.uri}`, - ...headers, - }, - }; - if (metadata.contentLength) { - reqOpts.headers['X-Upload-Content-Length'] = - metadata.contentLength.toString(); - } - if (metadata.contentType) { - reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; - } - if (typeof this.generation !== 'undefined') { - reqOpts.params.ifGenerationMatch = this.generation; - } - if (this.kmsKeyName) { - reqOpts.params.kmsKeyName = this.kmsKeyName; - } - if (this.predefinedAcl) { - reqOpts.params.predefinedAcl = this.predefinedAcl; - } - if (this.origin) { - reqOpts.headers.Origin = this.origin; - } - const uri = await retry(async (bail) => { - var _a, _b, _c; - try { - const res = await this.makeRequest(reqOpts); - // We have successfully got a URI we can now create a new invocation id - this.currentInvocationId.uri = uuid.v4(); - return res.headers.location; - } - catch (err) { - const e = err; - const apiError = { - code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, - name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, - message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, - errors: [ - { - reason: e.code, - }, - ], - }; - if (this.retryOptions.maxRetries > 0 && - this.retryOptions.retryableErrorFn(apiError)) { - throw e; - } - else { - return bail(e); - } - } - }, { - retries: this.retryOptions.maxRetries, - factor: this.retryOptions.retryDelayMultiplier, - maxTimeout: this.retryOptions.maxRetryDelay * 1000, - maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds - }); - this.uri = uri; - this.offset = 0; - return uri; - } - async continueUploading() { - if (typeof this.offset === 'number') { - this.startUploading(); - return; - } - await this.getAndSetOffset(); - this.startUploading(); - } - async startUploading() { - const multiChunkMode = !!this.chunkSize; - let responseReceived = false; - this.numChunksReadInRequest = 0; - if (!this.offset) { - this.offset = 0; - } - // Check if the offset (server) is too far behind the current stream - if (this.offset < this.numBytesWritten) { - const delta = this.numBytesWritten - this.offset; - const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; - this.emit('error', new RangeError(message)); - return; - } - // Check if we should 'fast-forward' to the relevant data to upload - if (this.numBytesWritten < this.offset) { - // 'fast-forward' to the byte where we need to upload. - // only push data from the byte after the one we left off on - const fastForwardBytes = this.offset - this.numBytesWritten; - for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { - _chunk; // discard the data up until the point we want - } - this.numBytesWritten = this.offset; - } - let expectedUploadSize = undefined; - // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available - if (typeof this.contentLength === 'number') { - expectedUploadSize = this.contentLength - this.numBytesWritten; - } - // `expectedUploadSize` should be no more than the `chunkSize`. - // It's possible this is the last chunk request for a multiple - // chunk upload, thus smaller than the chunk size. - if (this.chunkSize) { - expectedUploadSize = expectedUploadSize - ? Math.min(this.chunkSize, expectedUploadSize) - : this.chunkSize; - } - // A queue for the upstream data - const upstreamQueue = this.upstreamIterator(expectedUploadSize, multiChunkMode // multi-chunk mode should return 1 chunk per request - ); - // The primary read stream for this request. This stream retrieves no more - // than the exact requested amount from upstream. - const requestStream = new stream_1.Readable({ - read: async () => { - // Don't attempt to retrieve data upstream if we already have a response - if (responseReceived) - requestStream.push(null); - const result = await upstreamQueue.next(); - if (result.value) { - this.numChunksReadInRequest++; - this.lastChunkSent = result.value.chunk; - this.numBytesWritten += result.value.chunk.byteLength; - this.emit('progress', { - bytesWritten: this.numBytesWritten, - contentLength: this.contentLength, - }); - requestStream.push(result.value.chunk, result.value.encoding); - } - if (result.done) { - requestStream.push(null); - } - }, - }); - const headers = { - 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.chunk}`, - }; - // If using multiple chunk upload, set appropriate header - if (multiChunkMode) { - // We need to know how much data is available upstream to set the `Content-Range` header. - const oneChunkIterator = this.upstreamIterator(expectedUploadSize, true); - const { value } = await oneChunkIterator.next(); - const bytesToUpload = value.chunk.byteLength; - // Important: we want to know if the upstream has ended and the queue is empty before - // unshifting data back into the queue. This way we will know if this is the last request or not. - const isLastChunkOfUpload = !(await this.waitForNextChunk()); - // Important: put the data back in the queue for the actual upload iterator - this.unshiftChunkBuffer(value.chunk); - let totalObjectSize = this.contentLength; - if (typeof this.contentLength !== 'number' && isLastChunkOfUpload) { - // Let's let the server know this is the last chunk since - // we didn't know the content-length beforehand. - totalObjectSize = bytesToUpload + this.numBytesWritten; - } - // `- 1` as the ending byte is inclusive in the request. - const endingByte = bytesToUpload + this.numBytesWritten - 1; - // `Content-Length` for multiple chunk uploads is the size of the chunk, - // not the overall object - headers['Content-Length'] = bytesToUpload; - headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; - } - else { - headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; - } - const reqOpts = { - method: 'PUT', - url: this.uri, - headers, - body: requestStream, - }; - try { - const resp = await this.makeRequestStream(reqOpts); - if (resp) { - responseReceived = true; - this.responseHandler(resp); - } - } - catch (e) { - const err = e; - if (this.retryOptions.retryableErrorFn(err)) { - this.attemptDelayedRetry({ - status: NaN, - data: err, - }); - return; - } - this.destroy(err); - } - } - // Process the API response to look for errors that came in - // the response body. - responseHandler(resp) { - if (resp.data.error) { - this.destroy(resp.data.error); - return; - } - // At this point we can safely create a new id for the chunk - this.currentInvocationId.chunk = uuid.v4(); - const shouldContinueWithNextMultiChunkRequest = this.chunkSize && - resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && - resp.headers.range; - if (shouldContinueWithNextMultiChunkRequest) { - // Use the upper value in this header to determine where to start the next chunk. - // We should not assume that the server received all bytes sent in the request. - // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload - const range = resp.headers.range; - this.offset = Number(range.split('-')[1]) + 1; - // We should not assume that the server received all bytes sent in the request. - // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload - const missingBytes = this.numBytesWritten - this.offset; - if (missingBytes) { - const dataToPrependForResending = this.lastChunkSent.slice(-missingBytes); - // As multi-chunk uploads send one chunk per request and pulls one - // chunk into the pipeline, prepending the missing bytes back should - // be fine for the next request. - this.unshiftChunkBuffer(dataToPrependForResending); - this.numBytesWritten -= missingBytes; - this.lastChunkSent = Buffer.alloc(0); - } - // continue uploading next chunk - this.continueUploading(); - } - else if (!this.isSuccessfulResponse(resp.status)) { - const err = new Error('Upload failed'); - err.code = resp.status; - err.name = 'Upload failed'; - if (resp === null || resp === void 0 ? void 0 : resp.data) { - err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; - } - this.destroy(err); - } - else { - // remove the last chunk sent to free memory - this.lastChunkSent = Buffer.alloc(0); - if (resp && resp.data) { - resp.data.size = Number(resp.data.size); - } - this.emit('metadata', resp.data); - // Allow the object (Upload) to continue naturally so the user's - // "finish" event fires. - this.emit('uploadFinished'); - } - } - async getAndSetOffset() { - const opts = { - method: 'PUT', - url: this.uri, - headers: { - 'Content-Length': 0, - 'Content-Range': 'bytes */*', - 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.offset}`, - }, - }; - try { - const resp = await this.makeRequest(opts); - // Successfully got the offset we can now create a new offset invocation id - this.currentInvocationId.offset = uuid.v4(); - if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { - if (resp.headers.range) { - const range = resp.headers.range; - this.offset = Number(range.split('-')[1]) + 1; - return; - } - } - this.offset = 0; - } - catch (e) { - const err = e; - if (this.retryOptions.retryableErrorFn(err)) { - this.attemptDelayedRetry({ - status: NaN, - data: err, - }); - return; - } - this.destroy(err); - } - } - async makeRequest(reqOpts) { - if (this.encryption) { - reqOpts.headers = reqOpts.headers || {}; - reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; - reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); - reqOpts.headers['x-goog-encryption-key-sha256'] = - this.encryption.hash.toString(); - } - if (this.userProject) { - reqOpts.params = reqOpts.params || {}; - reqOpts.params.userProject = this.userProject; - } - // Let gaxios know we will handle a 308 error code ourselves. - reqOpts.validateStatus = (status) => { - return (this.isSuccessfulResponse(status) || - status === RESUMABLE_INCOMPLETE_STATUS_CODE); - }; - const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts); - const res = await this.authClient.request(combinedReqOpts); - if (res.data && res.data.error) { - throw res.data.error; - } - return res; - } - async makeRequestStream(reqOpts) { - const controller = new abort_controller_1.default(); - const errorCallback = () => controller.abort(); - this.once('error', errorCallback); - if (this.userProject) { - reqOpts.params = reqOpts.params || {}; - reqOpts.params.userProject = this.userProject; - } - reqOpts.signal = controller.signal; - reqOpts.validateStatus = () => true; - const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts); - const res = await this.authClient.request(combinedReqOpts); - const successfulRequest = this.onResponse(res); - this.removeListener('error', errorCallback); - return successfulRequest ? res : null; - } - /** - * @return {bool} is the request good? - */ - onResponse(resp) { - if (resp.status !== 200 && - this.retryOptions.retryableErrorFn({ - code: resp.status, - message: resp.statusText, - name: resp.statusText, - })) { - this.attemptDelayedRetry(resp); - return false; - } - this.emit('response', resp); - return true; - } - /** - * @param resp GaxiosResponse object from previous attempt - */ - attemptDelayedRetry(resp) { - if (this.numRetries < this.retryOptions.maxRetries) { - if (resp.status === NOT_FOUND_STATUS_CODE && - this.numChunksReadInRequest === 0) { - this.startUploading(); - } - else { - const retryDelay = this.getRetryDelay(); - if (retryDelay <= 0) { - this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`)); - return; - } - // Unshift the most recent chunk back in case it's needed for the next - // request. - this.numBytesWritten -= this.lastChunkSent.byteLength; - this.unshiftChunkBuffer(this.lastChunkSent); - this.lastChunkSent = Buffer.alloc(0); - // We don't know how much data has been received by the server. - // `continueUploading` will recheck the offset via `getAndSetOffset`. - // If `offset` < `numberBytesReceived` then we will raise a RangeError - // as we've streamed too much data that has been missed - this should - // not be the case for multi-chunk uploads as `lastChunkSent` is the - // body of the entire request. - this.offset = undefined; - setTimeout(this.continueUploading.bind(this), retryDelay); - } - this.numRetries++; - } - else { - this.destroy(new Error('Retry limit exceeded - ' + resp.data)); - } - } - /** - * @returns {number} the amount of time to wait before retrying the request - */ - getRetryDelay() { - const randomMs = Math.round(Math.random() * 1000); - const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * - 1000 + - randomMs; - const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - - (Date.now() - this.timeOfFirstRequest); - const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; - return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); - } - /* - * Prepare user-defined API endpoint for compatibility with our API. - */ - sanitizeEndpoint(url) { - if (!exports.PROTOCOL_REGEX.test(url)) { - url = `https://${url}`; - } - return url.replace(/\/+$/, ''); // Remove trailing slashes - } - /** - * Check if a given status code is 2xx - * - * @param status The status code to check - * @returns if the status is 2xx - */ - isSuccessfulResponse(status) { - return status >= 200 && status < 300; - } -} -exports.Upload = Upload; -function upload(cfg) { - return new Upload(cfg); -} -exports.upload = upload; -function createURI(cfg, callback) { - const up = new Upload(cfg); - if (!callback) { - return up.createURI(); - } - up.createURI().then(r => callback(null, r), callback); -} -exports.createURI = createURI; -//# sourceMappingURL=resumable-upload.js.map - -/***/ }), - -/***/ 9665: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; -const crypto = __nccwpck_require__(6113); -const url = __nccwpck_require__(7310); -const storage_1 = __nccwpck_require__(346); -const util_1 = __nccwpck_require__(1862); -var SignerExceptionMessages; -(function (SignerExceptionMessages) { - SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; - SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; - SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; -})(SignerExceptionMessages = exports.SignerExceptionMessages || (exports.SignerExceptionMessages = {})); -/* - * Default signing version for getSignedUrl is 'v2'. - */ -const DEFAULT_SIGNING_VERSION = 'v2'; -const SEVEN_DAYS = 7 * 24 * 60 * 60; -/** - * @const {string} - * @private - */ -exports.PATH_STYLED_HOST = 'https://storage.googleapis.com'; -class URLSigner { - constructor(authClient, bucket, file) { - this.bucket = bucket; - this.file = file; - this.authClient = authClient; - } - getSignedUrl(cfg) { - const expiresInSeconds = this.parseExpires(cfg.expires); - const method = cfg.method; - const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); - if (expiresInSeconds < accessibleAtInSeconds) { - throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); - } - let customHost; - // Default style is `path`. - const isVirtualHostedStyle = cfg.virtualHostedStyle || false; - if (cfg.cname) { - customHost = cfg.cname; - } - else if (isVirtualHostedStyle) { - customHost = `https://${this.bucket.name}.storage.googleapis.com`; - } - const secondsToMilliseconds = 1000; - const config = Object.assign({}, cfg, { - method, - expiration: expiresInSeconds, - accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), - bucket: this.bucket.name, - file: this.file ? (0, util_1.encodeURI)(this.file.name, false) : undefined, - }); - if (customHost) { - config.cname = customHost; - } - const version = cfg.version || DEFAULT_SIGNING_VERSION; - let promise; - if (version === 'v2') { - promise = this.getSignedUrlV2(config); - } - else if (version === 'v4') { - promise = this.getSignedUrlV4(config); - } - else { - throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); - } - return promise.then(query => { - query = Object.assign(query, cfg.queryParams); - const signedUrl = new url.URL(config.cname || exports.PATH_STYLED_HOST); - signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - signedUrl.search = (0, util_1.qsStringify)(query); - return signedUrl.href; - }); - } - getSignedUrlV2(config) { - const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); - const resourcePath = this.getResourcePath(false, config.bucket, config.file); - const blobToSign = [ - config.method, - config.contentMd5 || '', - config.contentType || '', - config.expiration, - canonicalHeadersString + resourcePath, - ].join('\n'); - const sign = async () => { - const authClient = this.authClient; - try { - const signature = await authClient.sign(blobToSign); - const credentials = await authClient.getCredentials(); - return { - GoogleAccessId: credentials.client_email, - Expires: config.expiration, - Signature: signature, - }; - } - catch (err) { - const error = err; - const signingErr = new SigningError(error.message); - signingErr.stack = error.stack; - throw signingErr; - } - }; - return sign(); - } - getSignedUrlV4(config) { - config.accessibleAt = config.accessibleAt - ? config.accessibleAt - : new Date(); - const millisecondsToSeconds = 1.0 / 1000.0; - const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; - // v4 limit expiration to be 7 days maximum - if (expiresPeriodInSeconds > SEVEN_DAYS) { - throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); - } - const extensionHeaders = Object.assign({}, config.extensionHeaders); - const fqdn = new url.URL(config.cname || exports.PATH_STYLED_HOST); - extensionHeaders.host = fqdn.host; - if (config.contentMd5) { - extensionHeaders['content-md5'] = config.contentMd5; - } - if (config.contentType) { - extensionHeaders['content-type'] = config.contentType; - } - let contentSha256; - const sha256Header = extensionHeaders['x-goog-content-sha256']; - if (sha256Header) { - if (typeof sha256Header !== 'string' || - !/[A-Fa-f0-9]{40}/.test(sha256Header)) { - throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); - } - contentSha256 = sha256Header; - } - const signedHeaders = Object.keys(extensionHeaders) - .map(header => header.toLowerCase()) - .sort() - .join(';'); - const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); - const datestamp = (0, util_1.formatAsUTCISO)(config.accessibleAt); - const credentialScope = `${datestamp}/auto/storage/goog4_request`; - const sign = async () => { - const credentials = await this.authClient.getCredentials(); - const credential = `${credentials.client_email}/${credentialScope}`; - const dateISO = (0, util_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true); - const queryParams = { - 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', - 'X-Goog-Credential': credential, - 'X-Goog-Date': dateISO, - 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), - 'X-Goog-SignedHeaders': signedHeaders, - ...(config.queryParams || {}), - }; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); - const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); - const hash = crypto - .createHash('sha256') - .update(canonicalRequest) - .digest('hex'); - const blobToSign = [ - 'GOOG4-RSA-SHA256', - dateISO, - credentialScope, - hash, - ].join('\n'); - try { - const signature = await this.authClient.sign(blobToSign); - const signatureHex = Buffer.from(signature, 'base64').toString('hex'); - const signedQuery = Object.assign({}, queryParams, { - 'X-Goog-Signature': signatureHex, - }); - return signedQuery; - } - catch (err) { - const error = err; - const signingErr = new SigningError(error.message); - signingErr.stack = error.stack; - throw signingErr; - } - }; - return sign(); - } - /** - * Create canonical headers for signing v4 url. - * - * The canonical headers for v4-signing a request demands header names are - * first lowercased, followed by sorting the header names. - * Then, construct the canonical headers part of the request: - * + ":" + Trim() + "\n" - * .. - * + ":" + Trim() + "\n" - * - * @param headers - * @private - */ - getCanonicalHeaders(headers) { - // Sort headers by their lowercased names - const sortedHeaders = (0, util_1.objectEntries)(headers) - // Convert header names to lowercase - .map(([headerName, value]) => [ - headerName.toLowerCase(), - value, - ]) - .sort((a, b) => a[0].localeCompare(b[0])); - return sortedHeaders - .filter(([, value]) => value !== undefined) - .map(([headerName, value]) => { - // - Convert Array (multi-valued header) into string, delimited by - // ',' (no space). - // - Trim leading and trailing spaces. - // - Convert sequential (2+) spaces into a single space - const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); - return `${headerName}:${canonicalValue}\n`; - }) - .join(''); - } - getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { - return [ - method, - path, - query, - headers, - signedHeaders, - contentSha256 || 'UNSIGNED-PAYLOAD', - ].join('\n'); - } - getCanonicalQueryParams(query) { - return (0, util_1.objectEntries)(query) - .map(([key, value]) => [(0, util_1.encodeURI)(key, true), (0, util_1.encodeURI)(value, true)]) - .sort((a, b) => (a[0] < b[0] ? -1 : 1)) - .map(([key, value]) => `${key}=${value}`) - .join('&'); - } - getResourcePath(cname, bucket, file) { - if (cname) { - return '/' + (file || ''); - } - else if (file) { - return `/${bucket}/${file}`; - } - else { - return `/${bucket}`; - } - } - parseExpires(expires, current = new Date()) { - const expiresInMSeconds = new Date(expires).valueOf(); - if (isNaN(expiresInMSeconds)) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID); - } - if (expiresInMSeconds < current.valueOf()) { - throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST); - } - return Math.round(expiresInMSeconds / 1000); // The API expects seconds. - } - parseAccessibleAt(accessibleAt) { - const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); - if (isNaN(accessibleAtInMSeconds)) { - throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); - } - return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. - } -} -exports.URLSigner = URLSigner; -/** - * Custom error type for errors related to getting signed errors and policies. - * - * @private - */ -class SigningError extends Error { - constructor() { - super(...arguments); - this.name = 'SigningError'; - } -} -exports.SigningError = SigningError; -//# sourceMappingURL=signer.js.map - -/***/ }), - -/***/ 346: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; -const nodejs_common_1 = __nccwpck_require__(2881); -const paginator_1 = __nccwpck_require__(6412); -const promisify_1 = __nccwpck_require__(9203); -const stream_1 = __nccwpck_require__(2781); -const bucket_1 = __nccwpck_require__(8561); -const channel_1 = __nccwpck_require__(8953); -const file_1 = __nccwpck_require__(5373); -const util_1 = __nccwpck_require__(1862); -const hmacKey_1 = __nccwpck_require__(9930); -const crc32c_1 = __nccwpck_require__(4921); -var IdempotencyStrategy; -(function (IdempotencyStrategy) { - IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; - IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; - IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; -})(IdempotencyStrategy = exports.IdempotencyStrategy || (exports.IdempotencyStrategy = {})); -var ExceptionMessages; -(function (ExceptionMessages) { - ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; - ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; - ExceptionMessages["INVALID_ACTION"] = "The action is not provided or invalid."; -})(ExceptionMessages = exports.ExceptionMessages || (exports.ExceptionMessages = {})); -var StorageExceptionMessages; -(function (StorageExceptionMessages) { - StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; - StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; - StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; - StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; -})(StorageExceptionMessages = exports.StorageExceptionMessages || (exports.StorageExceptionMessages = {})); -exports.PROTOCOL_REGEX = /^(\w*):\/\//; -/** - * Default behavior: Automatically retry retriable server errors. - * - * @const {boolean} - */ -exports.AUTO_RETRY_DEFAULT = true; -/** - * Default behavior: Only attempt to retry retriable errors 3 times. - * - * @const {number} - */ -exports.MAX_RETRY_DEFAULT = 3; -/** - * Default behavior: Wait twice as long as previous retry before retrying. - * - * @const {number} - */ -exports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2; -/** - * Default behavior: If the operation doesn't succeed after 600 seconds, - * stop retrying. - * - * @const {number} - */ -exports.TOTAL_TIMEOUT_DEFAULT = 600; -/** - * Default behavior: Wait no more than 64 seconds between retries. - * - * @const {number} - */ -exports.MAX_RETRY_DELAY_DEFAULT = 64; -/** - * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. - * - * @const {enum} - * @private - */ -const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; -/** - * Returns true if the API request should be retried, given the error that was - * given the first time the request was attempted. - * @const - * @param {error} err - The API error to check if it is appropriate to retry. - * @return {boolean} True if the API request should be retried, false otherwise. - */ -const RETRYABLE_ERR_FN_DEFAULT = function (err) { - var _a; - const isConnectionProblem = (reason) => { - return (reason.includes('eai_again') || // DNS lookup error - reason === 'econnreset' || - reason === 'unexpected connection closure' || - reason === 'epipe'); - }; - if (err) { - if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { - return true; - } - if (typeof err.code === 'string') { - const reason = err.code.toLowerCase(); - if (isConnectionProblem(reason)) { - return true; - } - } - if (err.errors) { - for (const e of err.errors) { - const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); - if (reason && isConnectionProblem(reason)) { - return true; - } - } - } - } - return false; -}; -exports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT; -/*! Developer Documentation - * - * Invoke this method to create a new Storage object bound with pre-determined - * configuration options. For each object that can be created (e.g., a bucket), - * there is an equivalent static and instance method. While they are classes, - * they can be instantiated without use of the `new` keyword. - */ -/** - * Cloud Storage uses access control lists (ACLs) to manage object and - * bucket access. ACLs are the mechanism you use to share objects with other - * users and allow other users to access your buckets and objects. - * - * This object provides constants to refer to the three permission levels that - * can be granted to an entity: - * - * - `gcs.acl.OWNER_ROLE` - ("OWNER") - * - `gcs.acl.READER_ROLE` - ("READER") - * - `gcs.acl.WRITER_ROLE` - ("WRITER") - * - * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} - * - * @name Storage#acl - * @type {object} - * @property {string} OWNER_ROLE - * @property {string} READER_ROLE - * @property {string} WRITER_ROLE - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const albums = storage.bucket('albums'); - * - * //- - * // Make all of the files currently in a bucket publicly readable. - * //- - * const options = { - * entity: 'allUsers', - * role: storage.acl.READER_ROLE - * }; - * - * albums.acl.add(options, function(err, aclObject) {}); - * - * //- - * // Make any new objects added to a bucket publicly readable. - * //- - * albums.acl.default.add(options, function(err, aclObject) {}); - * - * //- - * // Grant a user ownership permissions to a bucket. - * //- - * albums.acl.add({ - * entity: 'user-useremail@example.com', - * role: storage.acl.OWNER_ROLE - * }, function(err, aclObject) {}); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * albums.acl.add(options).then(function(data) { - * const aclObject = data[0]; - * const apiResponse = data[1]; - * }); - * ``` - */ -/** - * Get {@link Bucket} objects for all of the buckets in your project as - * a readable object stream. - * - * @method Storage#getBucketsStream - * @param {GetBucketsRequest} [query] Query object for listing buckets. - * @returns {ReadableStream} A readable stream that emits {@link Bucket} - * instances. - * - * @example - * ``` - * storage.getBucketsStream() - * .on('error', console.error) - * .on('data', function(bucket) { - * // bucket is a Bucket object. - * }) - * .on('end', function() { - * // All buckets retrieved. - * }); - * - * //- - * // If you anticipate many results, you can end a stream early to prevent - * // unnecessary processing and API requests. - * //- - * storage.getBucketsStream() - * .on('data', function(bucket) { - * this.end(); - * }); - * ``` - */ -/** - * Get {@link HmacKey} objects for all of the HMAC keys in the project in a - * readable object stream. - * - * @method Storage#getHmacKeysStream - * @param {GetHmacKeysOptions} [options] Configuration options. - * @returns {ReadableStream} A readable stream that emits {@link HmacKey} - * instances. - * - * @example - * ``` - * storage.getHmacKeysStream() - * .on('error', console.error) - * .on('data', function(hmacKey) { - * // hmacKey is an HmacKey object. - * }) - * .on('end', function() { - * // All HmacKey retrieved. - * }); - * - * //- - * // If you anticipate many results, you can end a stream early to prevent - * // unnecessary processing and API requests. - * //- - * storage.getHmacKeysStream() - * .on('data', function(bucket) { - * this.end(); - * }); - * ``` - */ -/** - *

ACLs

- * Cloud Storage uses access control lists (ACLs) to manage object and - * bucket access. ACLs are the mechanism you use to share files with other users - * and allow other users to access your buckets and files. - * - * To learn more about ACLs, read this overview on - * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. - * - * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} - * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} - * - * @class - */ -class Storage extends nodejs_common_1.Service { - getBucketsStream() { - // placeholder body, overwritten in constructor - return new stream_1.Readable(); - } - getHmacKeysStream() { - // placeholder body, overwritten in constructor - return new stream_1.Readable(); - } - /** - * @callback Crc32cGeneratorToStringCallback - * A method returning the CRC32C as a base64-encoded string. - * - * @returns {string} - * - * @example - * Hashing the string 'data' should return 'rth90Q==' - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.toString(); // 'rth90Q==' - * ``` - **/ - /** - * @callback Crc32cGeneratorValidateCallback - * A method validating a base64-encoded CRC32C string. - * - * @param {string} [value] base64-encoded CRC32C string to validate - * @returns {boolean} - * - * @example - * Should return `true` if the value matches, `false` otherwise - * - * ```js - * const buffer = Buffer.from('data'); - * crc32c.update(buffer); - * crc32c.validate('DkjKuA=='); // false - * crc32c.validate('rth90Q=='); // true - * ``` - **/ - /** - * @callback Crc32cGeneratorUpdateCallback - * A method for passing `Buffer`s for CRC32C generation. - * - * @param {Buffer} [data] data to update CRC32C value with - * @returns {undefined} - * - * @example - * Hashing buffers from 'some ' and 'text\n' - * - * ```js - * const buffer1 = Buffer.from('some '); - * crc32c.update(buffer1); - * - * const buffer2 = Buffer.from('text\n'); - * crc32c.update(buffer2); - * - * crc32c.toString(); // 'DkjKuA==' - * ``` - **/ - /** - * @typedef {object} CRC32CValidator - * @property {Crc32cGeneratorToStringCallback} - * @property {Crc32cGeneratorValidateCallback} - * @property {Crc32cGeneratorUpdateCallback} - */ - /** - * @callback Crc32cGeneratorCallback - * @returns {CRC32CValidator} - */ - /** - * @typedef {object} StorageOptions - * @property {string} [projectId] The project ID from the Google Developer's - * Console, e.g. 'grape-spaceship-123'. We will also check the environment - * variable `GCLOUD_PROJECT` for your project ID. If your app is running - * in an environment which supports {@link - * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application - * Application Default Credentials}, your project ID will be detected - * automatically. - * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key - * downloaded from the Google Developers Console. If you provide a path to - * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and - * .p12 require you to specify the `email` option as well. - * @property {string} [email] Account email address. Required when using a .pem - * or .p12 keyFilename. - * @property {object} [credentials] Credentials object. - * @property {string} [credentials.client_email] - * @property {string} [credentials.private_key] - * @property {object} [retryOptions] Options for customizing retries. Retriable server errors - * will be retried with exponential delay between them dictated by the formula - * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout - * has been reached. Retries will only happen if autoRetry is set to true. - * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the - * response is related to rate limits or certain intermittent server - * errors. We will exponentially backoff subsequent requests by default. - * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to - * increase the delay time between the completion of failed requests, and the - * initiation of the subsequent retrying request. - * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from - * when the initial request is sent, after which an error will - * be returned, regardless of the retrying attempts made meanwhile. - * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. - * When this value is reached, ``retryDelayMultiplier`` will no longer be used to - * increase delay time. - * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries - * attempted before returning the error. - * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given - * error should be retried and false otherwise. - * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration - * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - - * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - - * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never - * retry a conditionally idempotent operation. - * @property {string} [userAgent] The value to be prepended to the User-Agent - * header in API requests. - * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. - * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. - * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. - * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. - * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. - * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} - */ - /** - * Constructs the Storage client. - * - * @example - * Create a client that uses Application Default Credentials - * (ADC) - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * ``` - * - * @example - * Create a client with explicit credentials - * ``` - * const storage = new Storage({ - * projectId: 'your-project-id', - * keyFilename: '/path/to/keyfile.json' - * }); - * ``` - * - * @example - * Create a client with credentials passed - * by value as a JavaScript object - * ``` - * const storage = new Storage({ - * projectId: 'your-project-id', - * credentials: { - * type: 'service_account', - * project_id: 'xxxxxxx', - * private_key_id: 'xxxx', - * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', - * client_email: 'xxxx', - * client_id: 'xxx', - * auth_uri: 'https://accounts.google.com/o/oauth2/auth', - * token_uri: 'https://oauth2.googleapis.com/token', - * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', - * client_x509_cert_url: 'xxx', - * } - * }); - * ``` - * - * @example - * Create a client with credentials passed - * by loading a JSON file directly from disk - * ``` - * const storage = new Storage({ - * projectId: 'your-project-id', - * credentials: require('/path/to-keyfile.json') - * }); - * ``` - * - * @example - * Create a client with an `AuthClient` (e.g. `DownscopedClient`) - * ``` - * const {DownscopedClient} = require('google-auth-library'); - * const authClient = new DownscopedClient({...}); - * - * const storage = new Storage({authClient}); - * ``` - * - * Additional samples: - * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 - * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js - * - * @param {StorageOptions} [options] Configuration options. - */ - constructor(options = {}) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; - let apiEndpoint = 'https://storage.googleapis.com'; - let customEndpoint = false; - // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. - const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; - if (typeof EMULATOR_HOST === 'string') { - apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); - customEndpoint = true; - } - if (options.apiEndpoint) { - apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); - customEndpoint = true; - } - options = Object.assign({}, options, { apiEndpoint }); - // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. - const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; - const config = { - apiEndpoint: options.apiEndpoint, - retryOptions: { - autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined - ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry - : exports.AUTO_RETRY_DEFAULT, - maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) - ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries - : exports.MAX_RETRY_DEFAULT, - retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) - ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier - : exports.RETRY_DELAY_MULTIPLIER_DEFAULT, - totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) - ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout - : exports.TOTAL_TIMEOUT_DEFAULT, - maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) - ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay - : exports.MAX_RETRY_DELAY_DEFAULT, - retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) - ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn - : exports.RETRYABLE_ERR_FN_DEFAULT, - idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined - ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy - : IDEMPOTENCY_STRATEGY_DEFAULT, - }, - baseUrl, - customEndpoint, - useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, - projectIdRequired: false, - scopes: [ - 'https://www.googleapis.com/auth/iam', - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/devstorage.full_control', - ], - packageJson: __nccwpck_require__(5458), - }; - super(config, options); - /** - * Reference to {@link Storage.acl}. - * - * @name Storage#acl - * @see Storage.acl - */ - this.acl = Storage.acl; - this.crc32cGenerator = - options.crc32cGenerator || crc32c_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; - this.retryOptions = config.retryOptions; - this.getBucketsStream = paginator_1.paginator.streamify('getBuckets'); - this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys'); - } - static sanitizeEndpoint(url) { - if (!exports.PROTOCOL_REGEX.test(url)) { - url = `https://${url}`; - } - return url.replace(/\/+$/, ''); // Remove trailing slashes - } - /** - * Get a reference to a Cloud Storage bucket. - * - * @param {string} name Name of the bucket. - * @param {object} [options] Configuration object. - * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to - * encrypt objects inserted into this bucket, if no encryption method is - * specified. - * @param {string} [options.userProject] User project to be billed for all - * requests made from this Bucket object. - * @returns {Bucket} - * @see Bucket - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const albums = storage.bucket('albums'); - * const photos = storage.bucket('photos'); - * ``` - */ - bucket(name, options) { - if (!name) { - throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); - } - return new bucket_1.Bucket(this, name, options); - } - /** - * Reference a channel to receive notifications about changes to your bucket. - * - * @param {string} id The ID of the channel. - * @param {string} resourceId The resource ID of the channel. - * @returns {Channel} - * @see Channel - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const channel = storage.channel('id', 'resource-id'); - * ``` - */ - channel(id, resourceId) { - return new channel_1.Channel(this, id, resourceId); - } - /** - * @typedef {array} CreateBucketResponse - * @property {Bucket} 0 The new {@link Bucket}. - * @property {object} 1 The full API response. - */ - /** - * @callback CreateBucketCallback - * @param {?Error} err Request error, if any. - * @param {Bucket} bucket The new {@link Bucket}. - * @param {object} apiResponse The full API response. - */ - /** - * Metadata to set for the bucket. - * - * @typedef {object} CreateBucketRequest - * @property {boolean} [archive=false] Specify the storage class as Archive. - * @property {object} [autoclass.enabled=false] Specify whether Autoclass is - * enabled for the bucket. - * @property {boolean} [coldline=false] Specify the storage class as Coldline. - * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. - * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. - * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. - * @property {boolean} [dra=false] Specify the storage class as Durable Reduced - * Availability. - * @property {string} [location] Specify the bucket's location. If specifying - * a dual-region, the `customPlacementConfig` property should be set in conjunction. - * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. - * @property {boolean} [multiRegional=false] Specify the storage class as - * Multi-Regional. - * @property {boolean} [nearline=false] Specify the storage class as Nearline. - * @property {boolean} [regional=false] Specify the storage class as Regional. - * @property {boolean} [requesterPays=false] **Early Access Testers Only** - * Force the use of the User Project metadata field to assign operational - * costs when an operation is made on a Bucket and its objects. - * @property {string} [rpo] For dual-region buckets, controls whether turbo - * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). - * @property {boolean} [standard=true] Specify the storage class as Standard. - * @property {string} [storageClass] The new storage class. (`standard`, - * `nearline`, `coldline`, or `archive`). - * **Note:** The storage classes `multi_regional`, `regional`, and - * `durable_reduced_availability` are now legacy and will be deprecated in - * the future. - * @property {Versioning} [versioning=undefined] Specify the versioning status. - * @property {string} [userProject] The ID of the project which will be billed - * for the request. - */ - /** - * Create a bucket. - * - * Cloud Storage uses a flat namespace, so you can't create a bucket with - * a name that is already in use. For more information, see - * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} - * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} - * - * @param {string} name Name of the bucket to create. - * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. - * @param {CreateBucketCallback} [callback] Callback function. - * @returns {Promise} - * @throws {Error} If a name is not provided. - * @see Bucket#create - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const callback = function(err, bucket, apiResponse) { - * // `bucket` is a Bucket object. - * }; - * - * storage.createBucket('new-bucket', callback); - * - * //- - * // Create a bucket in a specific location and region. See the - * // Official JSON API docs for complete details on the `location` - * option. - * // - * //- - * const metadata = { - * location: 'US-CENTRAL1', - * regional: true - * }; - * - * storage.createBucket('new-bucket', metadata, callback); - * - * //- - * // Create a bucket with a retention policy of 6 months. - * //- - * const metadata = { - * retentionPolicy: { - * retentionPeriod: 15780000 // 6 months in seconds. - * } - * }; - * - * storage.createBucket('new-bucket', metadata, callback); - * - * //- - * // Enable versioning on a new bucket. - * //- - * const metadata = { - * versioning: { - * enabled: true - * } - * }; - * - * storage.createBucket('new-bucket', metadata, callback); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * storage.createBucket('new-bucket').then(function(data) { - * const bucket = data[0]; - * const apiResponse = data[1]; - * }); - * - * ``` - * @example include:samples/buckets.js - * region_tag:storage_create_bucket - * Another example: - */ - createBucket(name, metadataOrCallback, callback) { - if (!name) { - throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); - } - let metadata; - if (!callback) { - callback = metadataOrCallback; - metadata = {}; - } - else { - metadata = metadataOrCallback; - } - const body = { - ...metadata, - name, - }; - const storageClasses = { - archive: 'ARCHIVE', - coldline: 'COLDLINE', - dra: 'DURABLE_REDUCED_AVAILABILITY', - multiRegional: 'MULTI_REGIONAL', - nearline: 'NEARLINE', - regional: 'REGIONAL', - standard: 'STANDARD', - }; - const storageClassKeys = Object.keys(storageClasses); - for (const storageClass of storageClassKeys) { - if (body[storageClass]) { - if (metadata.storageClass && metadata.storageClass !== storageClass) { - throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); - } - body.storageClass = storageClasses[storageClass]; - delete body[storageClass]; - } - } - if (body.requesterPays) { - body.billing = { - requesterPays: body.requesterPays, - }; - delete body.requesterPays; - } - const query = { - project: this.projectId, - }; - if (body.userProject) { - query.userProject = body.userProject; - delete body.userProject; - } - this.request({ - method: 'POST', - uri: '/b', - qs: query, - json: body, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - const bucket = this.bucket(name); - bucket.metadata = resp; - callback(null, bucket, resp); - }); - } - /** - * @typedef {object} CreateHmacKeyOptions - * @property {string} [projectId] The project ID of the project that owns - * the service account of the requested HMAC key. If not provided, - * the project ID used to instantiate the Storage client will be used. - * @property {string} [userProject] This parameter is currently ignored. - */ - /** - * @typedef {object} HmacKeyMetadata - * @property {string} accessId The access id identifies which HMAC key was - * used to sign a request when authenticating with HMAC. - * @property {string} etag Used to perform a read-modify-write of the key. - * @property {string} id The resource name of the HMAC key. - * @property {string} projectId The project ID. - * @property {string} serviceAccountEmail The service account's email this - * HMAC key is created for. - * @property {string} state The state of this HMAC key. One of "ACTIVE", - * "INACTIVE" or "DELETED". - * @property {string} timeCreated The creation time of the HMAC key in - * RFC 3339 format. - * @property {string} [updated] The time this HMAC key was last updated in - * RFC 3339 format. - */ - /** - * @typedef {array} CreateHmacKeyResponse - * @property {HmacKey} 0 The HmacKey instance created from API response. - * @property {string} 1 The HMAC key's secret used to access the XML API. - * @property {object} 3 The raw API response. - */ - /** - * @callback CreateHmacKeyCallback Callback function. - * @param {?Error} err Request error, if any. - * @param {HmacKey} hmacKey The HmacKey instance created from API response. - * @param {string} secret The HMAC key's secret used to access the XML API. - * @param {object} apiResponse The raw API response. - */ - /** - * Create an HMAC key associated with an service account to authenticate - * requests to the Cloud Storage XML API. - * - * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} - * - * @param {string} serviceAccountEmail The service account's email address - * with which the HMAC key is created for. - * @param {CreateHmacKeyCallback} [callback] Callback function. - * @return {Promise} - * - * @example - * ``` - * const {Storage} = require('google-cloud/storage'); - * const storage = new Storage(); - * - * // Replace with your service account's email address - * const serviceAccountEmail = - * 'my-service-account@appspot.gserviceaccount.com'; - * - * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { - * if (!err) { - * // Securely store the secret for use with the XML API. - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * storage.createHmacKey(serviceAccountEmail) - * .then((response) => { - * const hmacKey = response[0]; - * const secret = response[1]; - * // Securely store the secret for use with the XML API. - * }); - * ``` - */ - createHmacKey(serviceAccountEmail, optionsOrCb, cb) { - if (typeof serviceAccountEmail !== 'string') { - throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); - } - const { options, callback } = (0, util_1.normalize)(optionsOrCb, cb); - const query = Object.assign({}, options, { serviceAccountEmail }); - const projectId = query.projectId || this.projectId; - delete query.projectId; - this.request({ - method: 'POST', - uri: `/projects/${projectId}/hmacKeys`, - qs: query, - maxRetries: 0, //explicitly set this value since this is a non-idempotent function - }, (err, resp) => { - if (err) { - callback(err, null, null, resp); - return; - } - const metadata = resp.metadata; - const hmacKey = this.hmacKey(metadata.accessId, { - projectId: metadata.projectId, - }); - hmacKey.metadata = resp.metadata; - callback(null, hmacKey, resp.secret, resp); - }); - } - /** - * Query object for listing buckets. - * - * @typedef {object} GetBucketsRequest - * @property {boolean} [autoPaginate=true] Have pagination handled - * automatically. - * @property {number} [maxApiCalls] Maximum number of API calls to make. - * @property {number} [maxResults] Maximum number of items plus prefixes to - * return per call. - * Note: By default will handle pagination automatically - * if more than 1 page worth of results are requested per call. - * When `autoPaginate` is set to `false` the smaller of `maxResults` - * or 1 page of results will be returned per call. - * @property {string} [pageToken] A previously-returned page token - * representing part of the larger set of results to view. - * @property {string} [userProject] The ID of the project which will be billed - * for the request. - */ - /** - * @typedef {array} GetBucketsResponse - * @property {Bucket[]} 0 Array of {@link Bucket} instances. - * @property {object} 1 nextQuery A query object to receive more results. - * @property {object} 2 The full API response. - */ - /** - * @callback GetBucketsCallback - * @param {?Error} err Request error, if any. - * @param {Bucket[]} buckets Array of {@link Bucket} instances. - * @param {object} nextQuery A query object to receive more results. - * @param {object} apiResponse The full API response. - */ - /** - * Get Bucket objects for all of the buckets in your project. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} - * - * @param {GetBucketsRequest} [query] Query object for listing buckets. - * @param {GetBucketsCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * storage.getBuckets(function(err, buckets) { - * if (!err) { - * // buckets is an array of Bucket objects. - * } - * }); - * - * //- - * // To control how many API requests are made and page through the results - * // manually, set `autoPaginate` to `false`. - * //- - * const callback = function(err, buckets, nextQuery, apiResponse) { - * if (nextQuery) { - * // More results exist. - * storage.getBuckets(nextQuery, callback); - * } - * - * // The `metadata` property is populated for you with the metadata at the - * // time of fetching. - * buckets[0].metadata; - * - * // However, in cases where you are concerned the metadata could have - * // changed, use the `getMetadata` method. - * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); - * }; - * - * storage.getBuckets({ - * autoPaginate: false - * }, callback); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * storage.getBuckets().then(function(data) { - * const buckets = data[0]; - * }); - * - * ``` - * @example include:samples/buckets.js - * region_tag:storage_list_buckets - * Another example: - */ - getBuckets(optionsOrCallback, cb) { - const { options, callback } = (0, util_1.normalize)(optionsOrCallback, cb); - options.project = options.project || this.projectId; - this.request({ - uri: '/b', - qs: options, - }, (err, resp) => { - if (err) { - callback(err, null, null, resp); - return; - } - const itemsArray = resp.items ? resp.items : []; - const buckets = itemsArray.map((bucket) => { - const bucketInstance = this.bucket(bucket.id); - bucketInstance.metadata = bucket; - return bucketInstance; - }); - const nextQuery = resp.nextPageToken - ? Object.assign({}, options, { pageToken: resp.nextPageToken }) - : null; - callback(null, buckets, nextQuery, resp); - }); - } - getHmacKeys(optionsOrCb, cb) { - const { options, callback } = (0, util_1.normalize)(optionsOrCb, cb); - const query = Object.assign({}, options); - const projectId = query.projectId || this.projectId; - delete query.projectId; - this.request({ - uri: `/projects/${projectId}/hmacKeys`, - qs: query, - }, (err, resp) => { - if (err) { - callback(err, null, null, resp); - return; - } - const itemsArray = resp.items ? resp.items : []; - const hmacKeys = itemsArray.map((hmacKey) => { - const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { - projectId: hmacKey.projectId, - }); - hmacKeyInstance.metadata = hmacKey; - return hmacKeyInstance; - }); - const nextQuery = resp.nextPageToken - ? Object.assign({}, options, { pageToken: resp.nextPageToken }) - : null; - callback(null, hmacKeys, nextQuery, resp); - }); - } - /** - * @typedef {array} GetServiceAccountResponse - * @property {object} 0 The service account resource. - * @property {object} 1 The full - * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. - */ - /** - * @callback GetServiceAccountCallback - * @param {?Error} err Request error, if any. - * @param {object} serviceAccount The serviceAccount resource. - * @param {string} serviceAccount.emailAddress The service account email - * address. - * @param {object} apiResponse The full - * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. - */ - /** - * Get the email address of this project's Google Cloud Storage service - * account. - * - * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} - * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} - * - * @param {object} [options] Configuration object. - * @param {string} [options.userProject] User project to be billed for this - * request. - * @param {GetServiceAccountCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * - * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { - * if (!err) { - * const serviceAccountEmail = serviceAccount.emailAddress; - * } - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * storage.getServiceAccount().then(function(data) { - * const serviceAccountEmail = data[0].emailAddress; - * const apiResponse = data[1]; - * }); - * ``` - */ - getServiceAccount(optionsOrCallback, cb) { - const { options, callback } = (0, util_1.normalize)(optionsOrCallback, cb); - this.request({ - uri: `/projects/${this.projectId}/serviceAccount`, - qs: options, - }, (err, resp) => { - if (err) { - callback(err, null, resp); - return; - } - const camelCaseResponse = {}; - for (const prop in resp) { - // eslint-disable-next-line no-prototype-builtins - if (resp.hasOwnProperty(prop)) { - const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); - camelCaseResponse[camelCaseProp] = resp[prop]; - } - } - callback(null, camelCaseResponse, resp); - }); - } - /** - * Get a reference to an HmacKey object. - * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to - * retrieve and populate the metadata. - * - * To get a reference to an HMAC key that's not created for a service - * account in the same project used to instantiate the Storage client, - * supply the project's ID as `projectId` in the `options` argument. - * - * @param {string} accessId The HMAC key's access ID. - * @param {HmacKeyOptions} options HmacKey constructor options. - * @returns {HmacKey} - * @see HmacKey - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const hmacKey = storage.hmacKey('ACCESS_ID'); - * ``` - */ - hmacKey(accessId, options) { - if (!accessId) { - throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); - } - return new hmacKey_1.HmacKey(this, accessId, options); - } -} -exports.Storage = Storage; -/** - * {@link Bucket} class. - * - * @name Storage.Bucket - * @see Bucket - * @type {Constructor} - */ -Storage.Bucket = bucket_1.Bucket; -/** - * {@link Channel} class. - * - * @name Storage.Channel - * @see Channel - * @type {Constructor} - */ -Storage.Channel = channel_1.Channel; -/** - * {@link File} class. - * - * @name Storage.File - * @see File - * @type {Constructor} - */ -Storage.File = file_1.File; -/** - * {@link HmacKey} class. - * - * @name Storage.HmacKey - * @see HmacKey - * @type {Constructor} - */ -Storage.HmacKey = hmacKey_1.HmacKey; -Storage.acl = { - OWNER_ROLE: 'OWNER', - READER_ROLE: 'READER', - WRITER_ROLE: 'WRITER', -}; -/*! Developer Documentation - * - * These methods can be auto-paginated. - */ -paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); -/*! Developer Documentation - * - * All async methods (except for streams) will return a Promise in the event - * that a callback is omitted. - */ -(0, promisify_1.promisifyAll)(Storage, { - exclude: ['bucket', 'channel', 'hmacKey'], -}); -//# sourceMappingURL=storage.js.map - -/***/ }), - -/***/ 9904: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/*! - * Copyright 2022 Google LLC. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TransferManager = void 0; -const pLimit = __nccwpck_require__(7684); -const path = __nccwpck_require__(1017); -const extend = __nccwpck_require__(8171); -const fs_1 = __nccwpck_require__(7147); -const crc32c_1 = __nccwpck_require__(4921); -/** - * Default number of concurrently executing promises to use when calling uploadManyFiles. - * @experimental - */ -const DEFAULT_PARALLEL_UPLOAD_LIMIT = 2; -/** - * Default number of concurrently executing promises to use when calling downloadManyFiles. - * @experimental - */ -const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 2; -/** - * Default number of concurrently executing promises to use when calling downloadFileInChunks. - * @experimental - */ -const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 2; -/** - * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. - * @experimental - */ -const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; -/** - * The chunk size in bytes to use when calling downloadFileInChunks. - * @experimental - */ -const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 10 * 1024 * 1024; -const EMPTY_REGEX = '(?:)'; -/** - * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. - * - * @class - * @hideconstructor - * - * @param {Bucket} bucket A {@link Bucket} instance - * @experimental - */ -class TransferManager { - constructor(bucket) { - this.bucket = bucket; - } - /** - * @typedef {object} UploadManyFilesOptions - * @property {number} [concurrencyLimit] The number of concurrently executing promises - * to use when uploading the files. - * @property {boolean} [skipIfExists] Do not upload the file if it already exists in - * the bucket. This will set the precondition ifGenerationMatch = 0. - * @property {string} [prefix] A prefix to append to all of the uploaded files. - * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through - * to each individual upload operation. - * @experimental - */ - /** - * Upload multiple files in parallel to the bucket. This is a convenience method - * that utilizes {@link Bucket#upload} to perform the upload. - * - * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. - * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. - * to be uploaded to the bucket - * @param {UploadManyFilesOptions} [options] Configuration options. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * const transferManager = new TransferManager(bucket); - * - * //- - * // Upload multiple files in parallel. - * //- - * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); - * // Your bucket now contains: - * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') - * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') - * const response = await transferManager.uploadManyFiles('/local/directory'); - * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. - * ``` - * @experimental - */ - async uploadManyFiles(filePathsOrDirectory, options = {}) { - var _a; - if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { - options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; - } - else if (options.skipIfExists && - options.passthroughOptions === undefined) { - options.passthroughOptions = { - preconditionOpts: { - ifGenerationMatch: 0, - }, - }; - } - const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); - const promises = []; - let allPaths = []; - if (!Array.isArray(filePathsOrDirectory)) { - for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { - allPaths.push(curPath); - } - } - else { - allPaths = filePathsOrDirectory; - } - for (const filePath of allPaths) { - const stat = await fs_1.promises.lstat(filePath); - if (stat.isDirectory()) { - continue; - } - const passThroughOptionsCopy = extend(true, {}, options.passthroughOptions); - passThroughOptionsCopy.destination = filePath; - if (options.prefix) { - passThroughOptionsCopy.destination = path.join(options.prefix, passThroughOptionsCopy.destination); - } - promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); - } - return Promise.all(promises); - } - /** - * @typedef {object} DownloadManyFilesOptions - * @property {number} [concurrencyLimit] The number of concurrently executing promises - * to use when downloading the files. - * @property {string} [prefix] A prefix to append to all of the downloaded files. - * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. - * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through - * to each individual download operation. - * @experimental - */ - /** - * Download multiple files in parallel to the local filesystem. This is a convenience method - * that utilizes {@link File#download} to perform the download. - * - * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If - * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. - * @param {DownloadManyFilesOptions} [options] Configuration options. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * const transferManager = new TransferManager(bucket); - * - * //- - * // Download multiple files in parallel. - * //- - * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); - * // The following files have been downloaded: - * // - "file1.txt" (with the contents from my-bucket.file1.txt) - * // - "file2.txt" (with the contents from my-bucket.file2.txt) - * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); - * // The following files have been downloaded: - * // - "file1.txt" (with the contents from my-bucket.file1.txt) - * // - "file2.txt" (with the contents from my-bucket.file2.txt) - * const response = await transferManager.downloadManyFiles('test-folder'); - * // All files with GCS prefix of 'test-folder' have been downloaded. - * ``` - * @experimental - */ - async downloadManyFiles(filesOrFolder, options = {}) { - const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); - const promises = []; - let files = []; - if (!Array.isArray(filesOrFolder)) { - const directoryFiles = await this.bucket.getFiles({ - prefix: filesOrFolder, - }); - files = directoryFiles[0]; - } - else { - files = filesOrFolder.map(curFile => { - if (typeof curFile === 'string') { - return this.bucket.file(curFile); - } - return curFile; - }); - } - const stripRegexString = options.stripPrefix - ? `^${options.stripPrefix}` - : EMPTY_REGEX; - const regex = new RegExp(stripRegexString, 'g'); - for (const file of files) { - const passThroughOptionsCopy = extend(true, {}, options.passthroughOptions); - if (options.prefix) { - passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); - } - if (options.stripPrefix) { - passThroughOptionsCopy.destination = file.name.replace(regex, ''); - } - promises.push(limit(() => file.download(passThroughOptionsCopy))); - } - return Promise.all(promises); - } - /** - * @typedef {object} DownloadFileInChunksOptions - * @property {number} [concurrencyLimit] The number of concurrently executing promises - * to use when downloading the file. - * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. - * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. - * @experimental - */ - /** - * Download a large file in chunks utilizing parallel download operations. This is a convenience method - * that utilizes {@link File#download} to perform the download. - * - * @param {object} [file | string] {@link File} to download. - * @param {DownloadFileInChunksOptions} [options] Configuration options. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const bucket = storage.bucket('my-bucket'); - * const transferManager = new TransferManager(bucket); - * - * //- - * // Download a large file in chunks utilizing parallel operations. - * //- - * const response = await transferManager.downloadLargeFile(bucket.file('large-file.txt'); - * // Your local directory now contains: - * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) - * ``` - * @experimental - */ - async downloadFileInChunks(fileOrName, options = {}) { - let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; - let limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); - const promises = []; - const file = typeof fileOrName === 'string' - ? this.bucket.file(fileOrName) - : fileOrName; - const fileInfo = await file.get(); - const size = parseInt(fileInfo[0].metadata.size); - // If the file size does not meet the threshold download it as a single chunk. - if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { - limit = pLimit(1); - chunkSize = size; - } - let start = 0; - const filePath = options.destination || path.basename(file.name); - const fileToWrite = await fs_1.promises.open(filePath, 'w+'); - while (start < size) { - const chunkStart = start; - let chunkEnd = start + chunkSize - 1; - chunkEnd = chunkEnd > size ? size : chunkEnd; - promises.push(limit(() => file.download({ start: chunkStart, end: chunkEnd }).then(resp => { - return fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); - }))); - start += chunkSize; - } - return new Promise((resolve, reject) => { - let results; - Promise.all(promises) - .then(data => { - results = data.map(result => result.buffer); - if (options.validation === 'crc32c') { - return crc32c_1.CRC32C.fromFile(filePath); - } - return; - }) - .then(() => { - resolve(results); - }) - .catch(e => { - reject(e); - }) - .finally(() => { - fileToWrite.close(); - }); - }); - } - async *getPathsFromDirectory(directory) { - const filesAndSubdirectories = await fs_1.promises.readdir(directory, { - withFileTypes: true, - }); - for (const curFileOrDirectory of filesAndSubdirectories) { - const fullPath = path.join(directory, curFileOrDirectory.name); - curFileOrDirectory.isDirectory() - ? yield* this.getPathsFromDirectory(fullPath) - : yield fullPath; - } - } -} -exports.TransferManager = TransferManager; -//# sourceMappingURL=transfer-manager.js.map - -/***/ }), - -/***/ 1862: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PassThroughShim = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0; -const querystring = __nccwpck_require__(3477); -const stream_1 = __nccwpck_require__(2781); -function normalize(optionsOrCallback, cb) { - const options = (typeof optionsOrCallback === 'object' ? optionsOrCallback : {}); - const callback = (typeof optionsOrCallback === 'function' ? optionsOrCallback : cb); - return { options, callback }; -} -exports.normalize = normalize; -/** - * Flatten an object into an Array of arrays, [[key, value], ..]. - * Implements Object.entries() for Node.js <8 - * @internal - */ -function objectEntries(obj) { - return Object.keys(obj).map(key => [key, obj[key]]); -} -exports.objectEntries = objectEntries; -/** - * Encode `str` with encodeURIComponent, plus these - * reserved characters: `! * ' ( )`. - * - * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} - * - * @param {string} str The URI component to encode. - * @return {string} The encoded string. - */ -function fixedEncodeURIComponent(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); -} -exports.fixedEncodeURIComponent = fixedEncodeURIComponent; -/** - * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. - * - * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. - * - * @param {string} uri The URI to encode. - * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. - * @return {string} The encoded string. - */ -function encodeURI(uri, encodeSlash) { - // Split the string by `/`, and conditionally rejoin them with either - // %2F if encodeSlash is `true`, or '/' if `false`. - return uri - .split('/') - .map(fixedEncodeURIComponent) - .join(encodeSlash ? '%2F' : '/'); -} -exports.encodeURI = encodeURI; -/** - * Serialize an object to a URL query string using util.encodeURI(uri, true). - * @param {string} url The object to serialize. - * @return {string} Serialized string. - */ -function qsStringify(qs) { - return querystring.stringify(qs, '&', '=', { - encodeURIComponent: (component) => encodeURI(component, true), - }); -} -exports.qsStringify = qsStringify; -function objectKeyToLowercase(object) { - const newObj = {}; - for (let key of Object.keys(object)) { - const value = object[key]; - key = key.toLowerCase(); - newObj[key] = value; - } - return newObj; -} -exports.objectKeyToLowercase = objectKeyToLowercase; -/** - * JSON encode str, with unicode \u+ representation. - * @param {object} obj The object to encode. - * @return {string} Serialized string. - */ -function unicodeJSONStringify(obj) { - return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, (char) => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); -} -exports.unicodeJSONStringify = unicodeJSONStringify; -/** - * Converts the given objects keys to snake_case - * @param {object} obj object to convert keys to snake case. - * @returns {object} object with keys converted to snake case. - */ -function convertObjKeysToSnakeCase(obj) { - if (obj instanceof Date || obj instanceof RegExp) { - return obj; - } - if (Array.isArray(obj)) { - return obj.map(convertObjKeysToSnakeCase); - } - if (obj instanceof Object) { - return Object.keys(obj).reduce((acc, cur) => { - const s = cur[0].toLocaleLowerCase() + - cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { - return `_${p1.toLowerCase()}`; - }); - acc[s] = convertObjKeysToSnakeCase(obj[cur]); - return acc; - }, Object()); - } - return obj; -} -exports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase; -/** - * Formats the provided date object as a UTC ISO string. - * @param {Date} dateTimeToFormat date object to be formatted. - * @param {boolean} includeTime flag to include hours, minutes, seconds in output. - * @param {string} dateDelimiter delimiter between date components. - * @param {string} timeDelimiter delimiter between time components. - * @returns {string} UTC ISO format of provided date obect. - */ -function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { - const year = dateTimeToFormat.getUTCFullYear(); - const month = dateTimeToFormat.getUTCMonth() + 1; - const day = dateTimeToFormat.getUTCDate(); - const hour = dateTimeToFormat.getUTCHours(); - const minute = dateTimeToFormat.getUTCMinutes(); - const second = dateTimeToFormat.getUTCSeconds(); - let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month - .toString() - .padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; - if (includeTime) { - resultString = `${resultString}T${hour - .toString() - .padStart(2, '0')}${timeDelimiter}${minute - .toString() - .padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; - } - return resultString; -} -exports.formatAsUTCISO = formatAsUTCISO; -class PassThroughShim extends stream_1.PassThrough { - constructor() { - super(...arguments); - this.shouldEmitReading = true; - this.shouldEmitWriting = true; - } - _read(size) { - if (this.shouldEmitReading) { - this.emit('reading'); - this.shouldEmitReading = false; - } - super._read(size); - } - _write(chunk, encoding, callback) { - if (this.shouldEmitWriting) { - this.emit('writing'); - this.shouldEmitWriting = false; - } - // Per the nodejs documention, callback must be invoked on the next tick - process.nextTick(() => { - super._write(chunk, encoding, callback); - }); - } - _final(callback) { - // If the stream is empty (i.e. empty file) final will be invoked before _read / _write - // and we should still emit the proper events. - if (this.shouldEmitReading) { - this.emit('reading'); - this.shouldEmitReading = false; - } - if (this.shouldEmitWriting) { - this.emit('writing'); - this.shouldEmitWriting = false; - } - callback(null); - } -} -exports.PassThroughShim = PassThroughShim; -//# sourceMappingURL=util.js.map - -/***/ }), - -/***/ 334: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; -const REGEX_IS_INSTALLATION = /^ghs_/; -const REGEX_IS_USER_TO_SERVER = /^ghu_/; -async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} - -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - - return `token ${token}`; -} - -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} - -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } - - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 6762: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var universalUserAgent = __nccwpck_require__(5030); -var beforeAfterHook = __nccwpck_require__(3682); -var request = __nccwpck_require__(6234); -var graphql = __nccwpck_require__(8467); -var authToken = __nccwpck_require__(334); - -function _objectWithoutPropertiesLoose(source, excluded) { - if (source == null) return {}; - var target = {}; - var sourceKeys = Object.keys(source); - var key, i; - - for (i = 0; i < sourceKeys.length; i++) { - key = sourceKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - target[key] = source[key]; - } - - return target; -} - -function _objectWithoutProperties(source, excluded) { - if (source == null) return {}; - - var target = _objectWithoutPropertiesLoose(source, excluded); - - var key, i; - - if (Object.getOwnPropertySymbols) { - var sourceSymbolKeys = Object.getOwnPropertySymbols(source); - - for (i = 0; i < sourceSymbolKeys.length; i++) { - key = sourceSymbolKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; - target[key] = source[key]; - } - } - - return target; -} - -const VERSION = "3.6.0"; - -const _excluded = ["authStrategy"]; -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; // prepend default user agent with `options.userAgent` if set - - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); - - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } - - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } - - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; - } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const { - authStrategy - } = options, - otherOptions = _objectWithoutProperties(options, _excluded); - - const auth = authStrategy(Object.assign({ - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(...newPlugins) { - var _a; - - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; - } - -} -Octokit.VERSION = VERSION; -Octokit.plugins = []; - -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var isPlainObject = __nccwpck_require__(3287); -var universalUserAgent = __nccwpck_require__(5030); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; -} - -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === undefined) { - delete obj[key]; - } - } - - return obj; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; -} - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequest) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "6.0.12"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 8467: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var request = __nccwpck_require__(6234); -var universalUserAgent = __nccwpck_require__(5030); - -const VERSION = "4.8.0"; - -function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); -} - -class GraphqlResponseError extends Error { - constructor(request, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. - - this.errors = response.errors; - this.data = response.data; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } - -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); - } - - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; - return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); - } - } - - const parsedOptions = typeof query === "string" ? Object.assign({ - query - }, options) : query; - const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; - } - - if (!result.variables) { - result.variables = {}; - } - - result.variables[key] = parsedOptions[key]; - return result; - }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix - // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 - - const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; - - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - - return request(requestOptions).then(response => { - if (response.data.errors) { - const headers = {}; - - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - - throw new GraphqlResponseError(requestOptions, headers, response.data); - } - - return response.data.data; - }); -} - -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} - -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} - -exports.GraphqlResponseError = GraphqlResponseError; -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 4193: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.21.3"; - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - enumerableOnly && (symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - })), keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = null != arguments[i] ? arguments[i] : {}; - i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { - _defineProperty(target, key, source[key]); - }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ -function normalizePaginatedListResponse(response) { - // endpoints can respond with 204 if repository is empty - if (!response.data) { - return _objectSpread2(_objectSpread2({}, response), {}, { - data: [] - }); - } - - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - return response; -} - -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) return { - done: true - }; - - try { - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; - } catch (error) { - if (error.status !== 409) throw error; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] - } - }; - } - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); -} - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; - } - - let earlyExit = false; - - function done() { - earlyExit = true; - } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; - } - - return gather(octokit, results, iterator, mapFn); + return gather(octokit, results, iterator2, mapFn); }); } -const composePaginateRest = Object.assign(paginate, { +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { iterator }); -const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; +// pkg/dist-src/paginating-endpoints.js function isPaginatingEndpoint(arg) { if (typeof arg === "string") { return paginatingEndpoints.includes(arg); @@ -17752,11 +5336,7 @@ function isPaginatingEndpoint(arg) { } } -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - +// pkg/dist-src/index.js function paginateRest(octokit) { return { paginate: Object.assign(paginate.bind(null, octokit), { @@ -17765,202 +5345,390 @@ function paginateRest(octokit) { }; } paginateRest.VERSION = VERSION; - -exports.composePaginateRest = composePaginateRest; -exports.isPaginatingEndpoint = isPaginatingEndpoint; -exports.paginateRest = paginateRest; -exports.paginatingEndpoints = paginatingEndpoints; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 3044: -/***/ ((__unused_webpack_module, exports) => { +/***/ ((module) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - - if (enumerableOnly) { - symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - } - - keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; - - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} +// pkg/dist-src/version.js +var VERSION = "10.3.0"; -const Endpoints = { +// pkg/dist-src/generated/endpoints.js +var Endpoints = { actions: { - addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"], - addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], - deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], - disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], - enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], - getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], - getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], - getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"], - getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], - getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], + getCustomOidcSubClaimForRepo: [ + "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { - renamed: ["actions", "getGithubActionsPermissionsRepository"] - }], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], - listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"], - listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"], - removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], - setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"], - setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"], - setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], - setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], - setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"], - setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"] + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setCustomOidcSubClaimForRepo: [ + "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] }, activity: { checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], getFeeds: ["GET /feeds"], getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], listEventsForAuthenticatedUser: ["GET /users/{username}/events"], listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], listPublicEvents: ["GET /events"], listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], listPublicEventsForUser: ["GET /users/{username}/events/public"], listPublicOrgEvents: ["GET /orgs/{org}/events"], listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], listReposStarredByAuthenticatedUser: ["GET /user/starred"], listReposStarredByUser: ["GET /users/{username}/starred"], listReposWatchedByUser: ["GET /users/{username}/subscriptions"], @@ -17971,18 +5739,26 @@ const Endpoints = { markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] }, apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] - }], - addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], checkToken: ["POST /applications/{client_id}/token"], createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], deleteAuthorization: ["DELETE /applications/{client_id}/grant"], deleteInstallation: ["DELETE /app/installations/{installation_id}"], deleteToken: ["DELETE /applications/{client_id}/token"], @@ -17991,75 +5767,132 @@ const Endpoints = { getInstallation: ["GET /app/installations/{installation_id}"], getOrgInstallation: ["GET /orgs/{org}/installation"], getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], getUserInstallation: ["GET /users/{username}/installation"], getWebhookConfigForApp: ["GET /app/hook/config"], getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], listInstallations: ["GET /app/installations"], listInstallationsForAuthenticatedUser: ["GET /user/installations"], listPlans: ["GET /marketplace_listing/plans"], listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], listReposAccessibleToInstallation: ["GET /installation/repositories"], listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], listWebhookDeliveries: ["GET /app/hook/deliveries"], - redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] - }], - removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], resetToken: ["PATCH /applications/{client_id}/token"], revokeInstallationAccessToken: ["DELETE /installation/token"], scopeToken: ["POST /applications/{client_id}/token/scoped"], suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], updateWebhookConfigForApp: ["PATCH /app/hook/config"] }, billing: { getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], - getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"], - getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], - getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], - getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] }, checks: { create: ["POST /repos/{owner}/{repo}/check-runs"], createSuite: ["POST /repos/{owner}/{repo}/check-suites"], get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] }, codeScanning: { - deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { - renamedParameters: { - alert_id: "alert_number" - } - }], - getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { - renamed: ["codeScanning", "listAlertInstances"] - }], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] }, codesOfConduct: { @@ -18067,82 +5900,190 @@ const Endpoints = { getConductCode: ["GET /codes_of_conduct/{key}"] }, codespaces: { - addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"], + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], createForAuthenticatedUser: ["POST /user/codespaces"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"], - createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"], - createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], - deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"], - exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"], - getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], - getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], - getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"], - listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], listForAuthenticatedUser: ["GET /user/codespaces"], - listInOrganization: ["GET /orgs/{org}/codespaces", {}, { - renamedParameters: { - org_id: "org" - } - }], - listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], - listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], - removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], - repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"], - setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], - stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] }, + copilot: { + addCopilotForBusinessSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotForBusinessSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + }, dependabot: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"] + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] }, dependencyGraph: { - createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"], - diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"] - }, - emojis: { - get: ["GET /emojis"] - }, - enterpriseAdmin: { - addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], - getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], - getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"], - listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], - removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"], - setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], - setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], - setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] }, + emojis: { get: ["GET /emojis"] }, gists: { checkIsStarred: ["GET /gists/{gist_id}/star"], create: ["POST /gists"], @@ -18188,33 +6129,52 @@ const Endpoints = { getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { - renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] - }], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], - removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { - renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] - }], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { - renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] - }] + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] }, issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], createLabel: ["POST /repos/{owner}/{repo}/labels"], createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], @@ -18226,24 +6186,38 @@ const Endpoints = { listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], listForAuthenticatedUser: ["GET /user/issues"], listForOrg: ["GET /orgs/{org}/issues"], listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], listMilestones: ["GET /repos/{owner}/{repo}/milestones"], lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] }, licenses: { get: ["GET /licenses/{license}"], @@ -18252,65 +6226,159 @@ const Endpoints = { }, markdown: { render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] }, meta: { get: ["GET /meta"], + getAllVersions: ["GET /versions"], getOctocat: ["GET /octocat"], getZen: ["GET /zen"], root: ["GET /"] }, migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" + } + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, + { + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" + } + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" + } + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, + { + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + } + ], getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], listForAuthenticatedUser: ["GET /user/migrations"], listForOrg: ["GET /orgs/{org}/migrations"], - listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], - listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, { - renamed: ["migrations", "listReposForAuthenticatedUser"] - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, + { + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" + } + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" + } + ], startForAuthenticatedUser: ["POST /user/migrations"], startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" + } + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" + } + ] + }, + oidc: { + getOidcCustomSubTemplateForOrg: [ + "GET /orgs/{org}/actions/oidc/customization/sub" + ], + updateOidcCustomSubTemplateForOrg: [ + "PUT /orgs/{org}/actions/oidc/customization/sub" + ] }, orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], blockUser: ["PUT /orgs/{org}/blocks/{username}"], cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], list: ["GET /organizations"], listAppInstallations: ["GET /orgs/{org}/installations"], listBlockedUsers: ["GET /orgs/{org}/blocks"], - listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], listForAuthenticatedUser: ["GET /user/orgs"], listForUser: ["GET /users/{username}/orgs"], @@ -18318,55 +6386,148 @@ const Endpoints = { listMembers: ["GET /orgs/{org}/members"], listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], listPendingInvitations: ["GET /orgs/{org}/invitations"], listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], listWebhooks: ["GET /orgs/{org}/hooks"], pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], removeMember: ["DELETE /orgs/{org}/members/{username}"], removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" + ], setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] }, packages: { - deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], - deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], - deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"], - deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] - }], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] - }], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], - getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], - getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], - getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], - getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], listPackagesForAuthenticatedUser: ["GET /user/packages"], listPackagesForOrganization: ["GET /orgs/{org}/packages"], listPackagesForUser: ["GET /users/{username}/packages"], - restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] }, projects: { addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], @@ -18381,7 +6542,9 @@ const Endpoints = { get: ["GET /projects/{project_id}"], getCard: ["GET /projects/columns/cards/{card_id}"], getColumn: ["GET /projects/columns/{column_id}"], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], listCards: ["GET /projects/columns/{column_id}/cards"], listCollaborators: ["GET /projects/{project_id}/collaborators"], listColumns: ["GET /projects/{project_id}/columns"], @@ -18390,7 +6553,9 @@ const Endpoints = { listForUser: ["GET /users/{username}/projects"], moveCard: ["POST /projects/columns/cards/{card_id}/moves"], moveColumn: ["POST /projects/columns/{column_id}/moves"], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], update: ["PATCH /projects/{project_id}"], updateCard: ["PATCH /projects/columns/cards/{card_id}"], updateColumn: ["PATCH /projects/columns/{column_id}"] @@ -18398,191 +6563,411 @@ const Endpoints = { pulls: { checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] }, + rateLimit: { get: ["GET /rate_limit"] }, reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"], - deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"], + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], - listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"] + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] }, repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "acceptInvitationForAuthenticatedUser"] - }], - acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" + ], checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], createDeployKey: ["POST /repos/{owner}/{repo}/keys"], createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], createForAuthenticatedUser: ["POST /user/repos"], createFork: ["POST /repos/{owner}/{repo}/forks"], createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], createPagesSite: ["POST /repos/{owner}/{repo}/pages"], createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, { - renamed: ["repos", "declineInvitationForAuthenticatedUser"] - }], - declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"], - disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"], - downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { - renamed: ["repos", "downloadZipballArchive"] - }], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"], - enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"], - generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], getAllTopics: ["GET /repos/{owner}/{repo}/topics"], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], getPages: ["GET /repos/{owner}/{repo}/pages"], getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], getReadme: ["GET /repos/{owner}/{repo}/readme"], getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" + ], + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], getViews: ["GET /repos/{owner}/{repo}/traffic/views"], getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], - getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listActivities: ["GET /repos/{owner}/{repo}/activity"], listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], listCommits: ["GET /repos/{owner}/{repo}/commits"], listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], listDeployments: ["GET /repos/{owner}/{repo}/deployments"], listForAuthenticatedUser: ["GET /user/repos"], listForOrg: ["GET /orgs/{org}/repos"], @@ -18593,67 +6978,117 @@ const Endpoints = { listLanguages: ["GET /repos/{owner}/{repo}/languages"], listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], listReleases: ["GET /repos/{owner}/{repo}/releases"], listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], listTags: ["GET /repos/{owner}/{repo}/tags"], listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], merge: ["POST /repos/{owner}/{repo}/merges"], mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], transfer: ["POST /repos/{owner}/{repo}/transfer"], update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { - renamed: ["repos", "updateStatusCheckProtection"] - }], - updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] }, search: { code: ["GET /search/code"], @@ -18665,390 +7100,596 @@ const Endpoints = { users: ["GET /search/users"] }, secretScanning: { - getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], - listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"], + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"], - updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] }, teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], list: ["GET /orgs/{org}/teams"], listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], listForAuthenticatedUser: ["GET /user/teams"], listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] }, users: { - addEmailForAuthenticated: ["POST /user/emails", {}, { - renamed: ["users", "addEmailForAuthenticatedUser"] - }], + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], block: ["PUT /user/blocks/{username}"], checkBlocked: ["GET /user/blocks/{username}"], checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, { - renamed: ["users", "createGpgKeyForAuthenticatedUser"] - }], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, { - renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] - }], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails", {}, { - renamed: ["users", "deleteEmailForAuthenticatedUser"] - }], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] - }], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, { - renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] - }], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], follow: ["PUT /user/following/{username}"], getAuthenticated: ["GET /user"], getByUsername: ["GET /users/{username}"], getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "getGpgKeyForAuthenticatedUser"] - }], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, { - renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] - }], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks", {}, { - renamed: ["users", "listBlockedByAuthenticatedUser"] - }], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], listBlockedByAuthenticatedUser: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails", {}, { - renamed: ["users", "listEmailsForAuthenticatedUser"] - }], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], listEmailsForAuthenticatedUser: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following", {}, { - renamed: ["users", "listFollowedByAuthenticatedUser"] - }], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], listFollowedByAuthenticatedUser: ["GET /user/following"], listFollowersForAuthenticatedUser: ["GET /user/followers"], listFollowersForUser: ["GET /users/{username}/followers"], listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, { - renamed: ["users", "listGpgKeysForAuthenticatedUser"] - }], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, { - renamed: ["users", "listPublicEmailsForAuthenticatedUser"] - }], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, { - renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] - }], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, { - renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] - }], - setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], unblock: ["DELETE /user/blocks/{username}"], unfollow: ["DELETE /user/following/{username}"], updateAuthenticated: ["PATCH /user"] } }; +var endpoints_default = Endpoints; -const VERSION = "5.16.2"; - -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { method, url - }, defaults); - - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - - const scopeMethods = newMethods[scope]; - - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); + }, + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; + }, + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; + } + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); } - return newMethods; } - function decorate(octokit, scope, methodName, defaults, decorations) { const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - + let options = requestWithDefaults.endpoint.merge(...args); if (decorations.mapToData) { options = Object.assign({}, options, { data: options[decorations.mapToData], - [decorations.mapToData]: undefined + [decorations.mapToData]: void 0 }); return requestWithDefaults(options); } - if (decorations.renamed) { const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); } - if (decorations.deprecated) { octokit.log.warn(decorations.deprecated); } - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - - if (!(alias in options)) { - options[alias] = options[name]; + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; } - - delete options[name]; + delete options2[name]; } } - - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - - + return requestWithDefaults(options2); + } return requestWithDefaults(...args); } - return Object.assign(withDecorations, requestWithDefaults); } +// pkg/dist-src/index.js function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); + const api = endpointsToMethods(octokit); return { rest: api }; } restEndpointMethods.VERSION = VERSION; function legacyRestEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return _objectSpread2(_objectSpread2({}, api), {}, { + const api = endpointsToMethods(octokit); + return { + ...api, rest: api - }); + }; } legacyRestEndpointMethods.VERSION = VERSION; - -exports.legacyRestEndpointMethods = legacyRestEndpointMethods; -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __nccwpck_require__(8932); -var once = _interopDefault(__nccwpck_require__(1223)); - -const logOnceCode = once(deprecation => console.warn(deprecation)); -const logOnceHeaders = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(8932); +var import_once = __toESM(__nccwpck_require__(1223)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - + super(message); if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } - this.name = "HttpError"; this.status = statusCode; let headers; - if ("headers" in options && typeof options.headers !== "undefined") { headers = options.headers; } - if ("response" in options) { this.response = options.response; headers = options.response.headers; - } // redact request credentials without mutating original request options - - + } const requestCopy = Object.assign({}, options.request); - if (options.request.headers.authorization) { requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) }); } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; // deprecations - + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; Object.defineProperty(this, "code", { get() { - logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); return statusCode; } - }); Object.defineProperty(this, "headers", { get() { - logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); return headers || {}; } - }); } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), /***/ 6234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -Object.defineProperty(exports, "__esModule", ({ value: true })); +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(9440); +var import_universal_user_agent = __nccwpck_require__(5030); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +// pkg/dist-src/version.js +var VERSION = "8.2.0"; -var endpoint = __nccwpck_require__(9440); -var universalUserAgent = __nccwpck_require__(5030); -var isPlainObject = __nccwpck_require__(3287); -var nodeFetch = _interopDefault(__nccwpck_require__(467)); -var requestError = __nccwpck_require__(537); +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} -const VERSION = "5.6.3"; +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(537); +// pkg/dist-src/get-buffer-response.js function getBufferResponse(response) { return response.arrayBuffer(); } +// pkg/dist-src/fetch-wrapper.js function fetchWrapper(requestOptions) { + var _a, _b, _c; const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; - - if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { requestOptions.body = JSON.stringify(requestOptions.body); } - let headers = {}; let status; let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { method: requestOptions.method, body: requestOptions.body, headers: requestOptions.headers, - redirect: requestOptions.redirect - }, // `requestOptions.request.agent` type is incompatible - // see https://github.com/octokit/types.ts/pull/264 - requestOptions.request)).then(async response => { + signal: (_c = requestOptions.request) == null ? void 0 : _c.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { url = response.url; status = response.status; - for (const keyAndValue of response.headers) { headers[keyAndValue[0]] = keyAndValue[1]; } - if ("deprecation" in headers) { const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); const deprecationLink = matches && matches.pop(); - log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); } - if (status === 204 || status === 205) { return; - } // GitHub API returns 200 for HEAD requests - - + } if (requestOptions.method === "HEAD") { if (status < 400) { return; } - - throw new requestError.RequestError(response.statusText, status, { + throw new import_request_error.RequestError(response.statusText, status, { response: { url, status, headers, - data: undefined + data: void 0 }, request: requestOptions }); } - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { + throw new import_request_error.RequestError("Not modified", status, { response: { url, status, @@ -19058,10 +7699,9 @@ function fetchWrapper(requestOptions) { request: requestOptions }); } - if (status >= 400) { const data = await getResponseData(response); - const error = new requestError.RequestError(toErrorMessage(data), status, { + const error = new import_request_error.RequestError(toErrorMessage(data), status, { response: { url, status, @@ -19072,87 +7712,93 @@ function fetchWrapper(requestOptions) { }); throw error; } - - return getResponseData(response); - }).then(data => { + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { return { status, url, headers, data }; - }).catch(error => { - if (error instanceof requestError.RequestError) throw error; - throw new requestError.RequestError(error.message, 500, { + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { request: requestOptions }); }); } - async function getResponseData(response) { const contentType = response.headers.get("content-type"); - if (/application\/json/.test(contentType)) { - return response.json(); + return response.json().catch(() => response.text()).catch(() => ""); } - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { return response.text(); } - return getBufferResponse(response); } - function toErrorMessage(data) { - if (typeof data === "string") return data; // istanbul ignore else - just in case - + if (typeof data === "string") + return data; + let suffix; + if ("documentation_url" in data) { + suffix = ` - ${data.documentation_url}`; + } else { + suffix = ""; + } if ("message" in data) { if (Array.isArray(data.errors)) { - return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; } - - return data.message; - } // istanbul ignore next - just in case - - + return `${data.message}${suffix}`; + } return `Unknown error: ${JSON.stringify(data)}`; } +// pkg/dist-src/with-defaults.js function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); + return fetchWrapper(endpoint2.parse(endpointOptions)); } - - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); }; - - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) }); - return endpointOptions.request.hook(request, endpointOptions); + return endpointOptions.request.hook(request2, endpointOptions); }; - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) }); } -const request = withDefaults(endpoint.endpoint, { +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` } }); - -exports.request = request; -//# sourceMappingURL=index.js.map +// Annotate the CommonJS export names for ESM import in node: +0 && (0); /***/ }), @@ -19593,7 +8239,7 @@ module.exports = arrify; /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { // Packages -var retrier = __nccwpck_require__(4347); +var retrier = __nccwpck_require__(1604); function retry(fn, opts) { function run(resolve, reject) { @@ -20003,7 +8649,7 @@ function removeHook(state, name, method) { 'use strict'; /* - * bignumber.js v9.1.1 + * bignumber.js v9.1.2 * A JavaScript library for arbitrary-precision arithmetic. * https://github.com/MikeMcl/bignumber.js * Copyright (c) 2022 Michael Mclaughlin @@ -20638,7 +9284,7 @@ function removeHook(state, name, method) { * arguments {number|string|BigNumber} */ BigNumber.maximum = BigNumber.max = function () { - return maxOrMin(arguments, P.lt); + return maxOrMin(arguments, -1); }; @@ -20648,7 +9294,7 @@ function removeHook(state, name, method) { * arguments {number|string|BigNumber} */ BigNumber.minimum = BigNumber.min = function () { - return maxOrMin(arguments, P.gt); + return maxOrMin(arguments, 1); }; @@ -21292,24 +9938,20 @@ function removeHook(state, name, method) { // Handle BigNumber.max and BigNumber.min. - function maxOrMin(args, method) { - var n, + // If any number is NaN, return NaN. + function maxOrMin(args, n) { + var k, y, i = 1, - m = new BigNumber(args[0]); + x = new BigNumber(args[0]); for (; i < args.length; i++) { - n = new BigNumber(args[i]); - - // If any number is NaN, return NaN. - if (!n.s) { - m = n; - break; - } else if (method.call(m, n)) { - m = n; + y = new BigNumber(args[i]); + if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { + x = y; } } - return m; + return x; } @@ -21428,7 +10070,7 @@ function removeHook(state, name, method) { n = xc[ni = 0]; // Get the rounding digit at index j of n. - rd = n / pows10[d - j - 1] % 10 | 0; + rd = mathfloor(n / pows10[d - j - 1] % 10); } else { ni = mathceil((i + 1) / LOG_BASE); @@ -21459,7 +10101,7 @@ function removeHook(state, name, method) { j = i - LOG_BASE + d; // Get the rounding digit at index j of n. - rd = j < 0 ? 0 : n / pows10[d - j - 1] % 10 | 0; + rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); } } @@ -22929,2684 +11571,8328 @@ function removeHook(state, name, method) { /***/ }), -/***/ 9239: +/***/ 9239: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/*jshint node:true */ + +var Buffer = (__nccwpck_require__(4300).Buffer); // browserify +var SlowBuffer = (__nccwpck_require__(4300).SlowBuffer); + +module.exports = bufferEq; + +function bufferEq(a, b) { + + // shortcutting on type is necessary for correctness + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + return false; + } + + // buffer sizes should be well-known information, so despite this + // shortcutting, it doesn't leak any information about the *contents* of the + // buffers. + if (a.length !== b.length) { + return false; + } + + var c = 0; + for (var i = 0; i < a.length; i++) { + /*jshint bitwise:false */ + c |= a[i] ^ b[i]; // XOR + } + return c === 0; +} + +bufferEq.install = function() { + Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { + return bufferEq(this, that); + }; +}; + +var origBufEqual = Buffer.prototype.equal; +var origSlowBufEqual = SlowBuffer.prototype.equal; +bufferEq.restore = function() { + Buffer.prototype.equal = origBufEqual; + SlowBuffer.prototype.equal = origSlowBufEqual; +}; + + +/***/ }), + +/***/ 6763: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/*! + * compressible + * Copyright(c) 2013 Jonathan Ong + * Copyright(c) 2014 Jeremiah Senkpiel + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + + + +/** + * Module dependencies. + * @private + */ + +var db = __nccwpck_require__(7426) + +/** + * Module variables. + * @private + */ + +var COMPRESSIBLE_TYPE_REGEXP = /^text\/|\+(?:json|text|xml)$/i +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ + +/** + * Module exports. + * @public + */ + +module.exports = compressible + +/** + * Checks if a type is compressible. + * + * @param {string} type + * @return {Boolean} compressible + * @public + */ + +function compressible (type) { + if (!type || typeof type !== 'string') { + return false + } + + // strip parameters + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && match[1].toLowerCase() + var data = db[mime] + + // return database information + if (data && data.compressible !== undefined) { + return data.compressible + } + + // fallback to regexp or unknown + return COMPRESSIBLE_TYPE_REGEXP.test(mime) || undefined +} + + +/***/ }), + +/***/ 8222: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = __nccwpck_require__(6243)(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; + + +/***/ }), + +/***/ 6243: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __nccwpck_require__(900); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; + + +/***/ }), + +/***/ 8237: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = __nccwpck_require__(8222); +} else { + module.exports = __nccwpck_require__(4874); +} + + +/***/ }), + +/***/ 4874: +/***/ ((module, exports, __nccwpck_require__) => { + +/** + * Module dependencies. + */ + +const tty = __nccwpck_require__(6224); +const util = __nccwpck_require__(3837); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = __nccwpck_require__(9318); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = __nccwpck_require__(6243)(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + + +/***/ }), + +/***/ 8932: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +exports.Deprecation = Deprecation; + + +/***/ }), + +/***/ 6599: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var stream = __nccwpck_require__(1642) +var eos = __nccwpck_require__(1205) +var inherits = __nccwpck_require__(4124) +var shift = __nccwpck_require__(6121) + +var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from([0]) + : new Buffer([0]) + +var onuncork = function(self, fn) { + if (self._corked) self.once('uncork', fn) + else fn() +} + +var autoDestroy = function (self, err) { + if (self._autoDestroy) self.destroy(err) +} + +var destroyer = function(self, end) { + return function(err) { + if (err) autoDestroy(self, err.message === 'premature close' ? null : err) + else if (end && !self._ended) self.end() + } +} + +var end = function(ws, fn) { + if (!ws) return fn() + if (ws._writableState && ws._writableState.finished) return fn() + if (ws._writableState) return ws.end(fn) + ws.end() + fn() +} + +var noop = function() {} + +var toStreams2 = function(rs) { + return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) +} + +var Duplexify = function(writable, readable, opts) { + if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) + stream.Duplex.call(this, opts) + + this._writable = null + this._readable = null + this._readable2 = null + + this._autoDestroy = !opts || opts.autoDestroy !== false + this._forwardDestroy = !opts || opts.destroy !== false + this._forwardEnd = !opts || opts.end !== false + this._corked = 1 // start corked + this._ondrain = null + this._drained = false + this._forwarding = false + this._unwrite = null + this._unread = null + this._ended = false + + this.destroyed = false + + if (writable) this.setWritable(writable) + if (readable) this.setReadable(readable) +} + +inherits(Duplexify, stream.Duplex) + +Duplexify.obj = function(writable, readable, opts) { + if (!opts) opts = {} + opts.objectMode = true + opts.highWaterMark = 16 + return new Duplexify(writable, readable, opts) +} + +Duplexify.prototype.cork = function() { + if (++this._corked === 1) this.emit('cork') +} + +Duplexify.prototype.uncork = function() { + if (this._corked && --this._corked === 0) this.emit('uncork') +} + +Duplexify.prototype.setWritable = function(writable) { + if (this._unwrite) this._unwrite() + + if (this.destroyed) { + if (writable && writable.destroy) writable.destroy() + return + } + + if (writable === null || writable === false) { + this.end() + return + } + + var self = this + var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) + + var ondrain = function() { + var ondrain = self._ondrain + self._ondrain = null + if (ondrain) ondrain() + } + + var clear = function() { + self._writable.removeListener('drain', ondrain) + unend() + } + + if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks + + this._writable = writable + this._writable.on('drain', ondrain) + this._unwrite = clear + + this.uncork() // always uncork setWritable +} + +Duplexify.prototype.setReadable = function(readable) { + if (this._unread) this._unread() + + if (this.destroyed) { + if (readable && readable.destroy) readable.destroy() + return + } + + if (readable === null || readable === false) { + this.push(null) + this.resume() + return + } + + var self = this + var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) + + var onreadable = function() { + self._forward() + } + + var onend = function() { + self.push(null) + } + + var clear = function() { + self._readable2.removeListener('readable', onreadable) + self._readable2.removeListener('end', onend) + unend() + } + + this._drained = true + this._readable = readable + this._readable2 = readable._readableState ? readable : toStreams2(readable) + this._readable2.on('readable', onreadable) + this._readable2.on('end', onend) + this._unread = clear + + this._forward() +} + +Duplexify.prototype._read = function() { + this._drained = true + this._forward() +} + +Duplexify.prototype._forward = function() { + if (this._forwarding || !this._readable2 || !this._drained) return + this._forwarding = true + + var data + + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue + this._drained = this.push(data) + } + + this._forwarding = false +} + +Duplexify.prototype.destroy = function(err, cb) { + if (!cb) cb = noop + if (this.destroyed) return cb(null) + this.destroyed = true + + var self = this + process.nextTick(function() { + self._destroy(err) + cb(null) + }) +} + +Duplexify.prototype._destroy = function(err) { + if (err) { + var ondrain = this._ondrain + this._ondrain = null + if (ondrain) ondrain(err) + else this.emit('error', err) + } + + if (this._forwardDestroy) { + if (this._readable && this._readable.destroy) this._readable.destroy() + if (this._writable && this._writable.destroy) this._writable.destroy() + } + + this.emit('close') +} + +Duplexify.prototype._write = function(data, enc, cb) { + if (this.destroyed) return + if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) + if (data === SIGNAL_FLUSH) return this._finish(cb) + if (!this._writable) return cb() + + if (this._writable.write(data) === false) this._ondrain = cb + else if (!this.destroyed) cb() +} + +Duplexify.prototype._finish = function(cb) { + var self = this + this.emit('preend') + onuncork(this, function() { + end(self._forwardEnd && self._writable, function() { + // haxx to not emit prefinish twice + if (self._writableState.prefinished === false) self._writableState.prefinished = true + self.emit('prefinish') + onuncork(self, cb) + }) + }) +} + +Duplexify.prototype.end = function(data, enc, cb) { + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + this._ended = true + if (data) this.write(data) + if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} + +module.exports = Duplexify + + +/***/ }), + +/***/ 1728: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Buffer = (__nccwpck_require__(1867).Buffer); + +var getParamBytesForAlg = __nccwpck_require__(528); + +var MAX_OCTET = 0x80, + CLASS_UNIVERSAL = 0, + PRIMITIVE_BIT = 0x20, + TAG_SEQ = 0x10, + TAG_INT = 0x02, + ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), + ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); + +function base64Url(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function signatureAsBuffer(signature) { + if (Buffer.isBuffer(signature)) { + return signature; + } else if ('string' === typeof signature) { + return Buffer.from(signature, 'base64'); + } + + throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +} + +function derToJose(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + // the DER encoded param should at most be the param size, plus a padding + // zero, since due to being a signed integer + var maxEncodedParamLength = paramBytes + 1; + + var inputLength = signature.length; + + var offset = 0; + if (signature[offset++] !== ENCODED_TAG_SEQ) { + throw new Error('Could not find expected "seq"'); + } + + var seqLength = signature[offset++]; + if (seqLength === (MAX_OCTET | 1)) { + seqLength = signature[offset++]; + } + + if (inputLength - offset < seqLength) { + throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); + } + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "r"'); + } + + var rLength = signature[offset++]; + + if (inputLength - offset - 2 < rLength) { + throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); + } + + if (maxEncodedParamLength < rLength) { + throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var rOffset = offset; + offset += rLength; + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "s"'); + } + + var sLength = signature[offset++]; + + if (inputLength - offset !== sLength) { + throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); + } + + if (maxEncodedParamLength < sLength) { + throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var sOffset = offset; + offset += sLength; + + if (offset !== inputLength) { + throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); + } + + var rPadding = paramBytes - rLength, + sPadding = paramBytes - sLength; + + var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); + + for (offset = 0; offset < rPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); + + offset = paramBytes; + + for (var o = offset; offset < o + sPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); + + dst = dst.toString('base64'); + dst = base64Url(dst); + + return dst; +} + +function countPadding(buf, start, stop) { + var padding = 0; + while (start + padding < stop && buf[start + padding] === 0) { + ++padding; + } + + var needsSign = buf[start + padding] >= MAX_OCTET; + if (needsSign) { + --padding; + } + + return padding; +} + +function joseToDer(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + var signatureBytes = signature.length; + if (signatureBytes !== paramBytes * 2) { + throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); + } + + var rPadding = countPadding(signature, 0, paramBytes); + var sPadding = countPadding(signature, paramBytes, signature.length); + var rLength = paramBytes - rPadding; + var sLength = paramBytes - sPadding; + + var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; + + var shortLength = rsBytes < MAX_OCTET; + + var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); + + var offset = 0; + dst[offset++] = ENCODED_TAG_SEQ; + if (shortLength) { + // Bit 8 has value "0" + // bits 7-1 give the length. + dst[offset++] = rsBytes; + } else { + // Bit 8 of first octet has value "1" + // bits 7-1 give the number of additional length octets. + dst[offset++] = MAX_OCTET | 1; + // length, base 256 + dst[offset++] = rsBytes & 0xff; + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = rLength; + if (rPadding < 0) { + dst[offset++] = 0; + offset += signature.copy(dst, offset, 0, paramBytes); + } else { + offset += signature.copy(dst, offset, rPadding, paramBytes); + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = sLength; + if (sPadding < 0) { + dst[offset++] = 0; + signature.copy(dst, offset, paramBytes); + } else { + signature.copy(dst, offset, paramBytes + sPadding); + } + + return dst; +} + +module.exports = { + derToJose: derToJose, + joseToDer: joseToDer +}; + + +/***/ }), + +/***/ 528: +/***/ ((module) => { + +"use strict"; + + +function getParamSize(keySize) { + var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); + return result; +} + +var paramBytesForAlg = { + ES256: getParamSize(256), + ES384: getParamSize(384), + ES512: getParamSize(521) +}; + +function getParamBytesForAlg(alg) { + var paramBytes = paramBytesForAlg[alg]; + if (paramBytes) { + return paramBytes; + } + + throw new Error('Unknown algorithm "' + alg + '"'); +} + +module.exports = getParamBytesForAlg; + + +/***/ }), + +/***/ 1205: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var once = __nccwpck_require__(1223); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; + + +/***/ }), + +/***/ 5771: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var punycode = __nccwpck_require__(5477); +var entities = __nccwpck_require__(2077); + +module.exports = decode; + +function decode (str) { + if (typeof str !== 'string') { + throw new TypeError('Expected a String'); + } + + return str.replace(/&(#?[^;\W]+;?)/g, function (_, match) { + var m; + if (m = /^#(\d+);?$/.exec(match)) { + return punycode.ucs2.encode([ parseInt(m[1], 10) ]); + } else if (m = /^#[Xx]([A-Fa-f0-9]+);?/.exec(match)) { + return punycode.ucs2.encode([ parseInt(m[1], 16) ]); + } else { + // named entity + var hasSemi = /;$/.test(match); + var withoutSemi = hasSemi ? match.replace(/;$/, '') : match; + var target = entities[withoutSemi] || (hasSemi && entities[match]); + + if (typeof target === 'number') { + return punycode.ucs2.encode([ target ]); + } else if (typeof target === 'string') { + return target; + } else { + return '&' + match; + } + } + }); +} + + +/***/ }), + +/***/ 6521: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var punycode = __nccwpck_require__(5477); +var revEntities = __nccwpck_require__(3123); + +module.exports = encode; + +function encode (str, opts) { + if (typeof str !== 'string') { + throw new TypeError('Expected a String'); + } + if (!opts) opts = {}; + + var numeric = true; + if (opts.named) numeric = false; + if (opts.numeric !== undefined) numeric = opts.numeric; + + var special = opts.special || { + '"': true, "'": true, + '<': true, '>': true, + '&': true + }; + + var codePoints = punycode.ucs2.decode(str); + var chars = []; + for (var i = 0; i < codePoints.length; i++) { + var cc = codePoints[i]; + var c = punycode.ucs2.encode([ cc ]); + var e = revEntities[cc]; + if (e && (cc >= 127 || special[c]) && !numeric) { + chars.push('&' + (/;$/.test(e) ? e : e + ';')); + } + else if (cc < 32 || cc >= 127 || special[c]) { + chars.push('&#' + cc + ';'); + } + else { + chars.push(c); + } + } + return chars.join(''); +} + + +/***/ }), + +/***/ 1151: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +exports.encode = __nccwpck_require__(6521); +exports.decode = __nccwpck_require__(5771); + + +/***/ }), + +/***/ 4697: +/***/ ((module, exports) => { + +"use strict"; +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports["default"] = EventTarget; + +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map + + +/***/ }), + +/***/ 8171: +/***/ ((module) => { + +"use strict"; + + +var hasOwn = Object.prototype.hasOwnProperty; +var toStr = Object.prototype.toString; +var defineProperty = Object.defineProperty; +var gOPD = Object.getOwnPropertyDescriptor; + +var isArray = function isArray(arr) { + if (typeof Array.isArray === 'function') { + return Array.isArray(arr); + } + + return toStr.call(arr) === '[object Array]'; +}; + +var isPlainObject = function isPlainObject(obj) { + if (!obj || toStr.call(obj) !== '[object Object]') { + return false; + } + + var hasOwnConstructor = hasOwn.call(obj, 'constructor'); + var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); + // Not own constructor property must be Object + if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { + return false; + } + + // Own properties are enumerated firstly, so to speed up, + // if last one is own, then all properties are own. + var key; + for (key in obj) { /**/ } + + return typeof key === 'undefined' || hasOwn.call(obj, key); +}; + +// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target +var setProperty = function setProperty(target, options) { + if (defineProperty && options.name === '__proto__') { + defineProperty(target, options.name, { + enumerable: true, + configurable: true, + value: options.newValue, + writable: true + }); + } else { + target[options.name] = options.newValue; + } +}; + +// Return undefined instead of __proto__ if '__proto__' is not an own property +var getProperty = function getProperty(obj, name) { + if (name === '__proto__') { + if (!hasOwn.call(obj, name)) { + return void 0; + } else if (gOPD) { + // In early versions of node, obj['__proto__'] is buggy when obj has + // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. + return gOPD(obj, name).value; + } + } + + return obj[name]; +}; + +module.exports = function extend() { + var options, name, src, copy, copyIsArray, clone; + var target = arguments[0]; + var i = 1; + var length = arguments.length; + var deep = false; + + // Handle a deep copy situation + if (typeof target === 'boolean') { + deep = target; + target = arguments[1] || {}; + // skip the boolean and the target + i = 2; + } + if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { + target = {}; + } + + for (; i < length; ++i) { + options = arguments[i]; + // Only deal with non-null/undefined values + if (options != null) { + // Extend the base object + for (name in options) { + src = getProperty(target, name); + copy = getProperty(options, name); + + // Prevent never-ending loop + if (target !== copy) { + // Recurse if we're merging plain objects or arrays + if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { + if (copyIsArray) { + copyIsArray = false; + clone = src && isArray(src) ? src : []; + } else { + clone = src && isPlainObject(src) ? src : {}; + } + + // Never move original objects, clone them + setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); + + // Don't bring in undefined values + } else if (typeof copy !== 'undefined') { + setProperty(target, { name: name, newValue: copy }); + } + } + } + } + } + + // Return the modified object + return target; +}; + + +/***/ }), + +/***/ 2603: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const validator = __nccwpck_require__(1739); +const XMLParser = __nccwpck_require__(2380); +const XMLBuilder = __nccwpck_require__(660); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} + +/***/ }), + +/***/ 8280: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; + + +/***/ }), + +/***/ 1739: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(8280); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} + + +/***/ }), + +/***/ 660: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -/*jshint node:true */ -var Buffer = (__nccwpck_require__(4300).Buffer); // browserify -var SlowBuffer = (__nccwpck_require__(4300).SlowBuffer); +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(2462); + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } -module.exports = bufferEq; + this.processTextOrObjNode = processTextOrObjNode -function bufferEq(a, b) { + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} - // shortcutting on type is necessary for correctness - if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { - return false; +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0).val; } +}; - // buffer sizes should be well-known information, so despite this - // shortcutting, it doesn't leak any information about the *contents* of the - // buffers. - if (a.length !== b.length) { - return false; +Builder.prototype.j2x = function(jObj, level) { + let attrStr = ''; + let val = ''; + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + }else { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup ){ + listTagVal += this.j2x(item, level + 1).val; + }else{ + listTagVal += this.processTextOrObjNode(item, key, level) + } + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, '', level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level) + } + } } + return {attrStr: attrStr, val: val}; +}; - var c = 0; - for (var i = 0; i < a.length; i++) { - /*jshint bitwise:false */ - c |= a[i] ^ b[i]; // XOR +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level) { + const result = this.j2x(object, level + 1); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); } - return c === 0; } -bufferEq.install = function() { - Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { - return bufferEq(this, that); - }; -}; +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ -var origBufEqual = Buffer.prototype.equal; -var origSlowBufEqual = SlowBuffer.prototype.equal; -bufferEq.restore = function() { - Buffer.prototype.equal = origBufEqual; - SlowBuffer.prototype.equal = origSlowBufEqual; + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { + +const EOL = "\n"; + +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; + + +/***/ }), + +/***/ 6072: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const util = __nccwpck_require__(8280); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; + + +/***/ }), + +/***/ 2821: +/***/ ((__unused_webpack_module, exports) => { + + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); }; +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; /***/ }), -/***/ 6763: +/***/ 5832: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -/*! - * compressible - * Copyright(c) 2013 Jonathan Ong - * Copyright(c) 2014 Jeremiah Senkpiel - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ +///@ts-check + +const util = __nccwpck_require__(8280); +const xmlNode = __nccwpck_require__(7462); +const readDocType = __nccwpck_require__(6072); +const toNumber = __nccwpck_require__(4526); + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + } + +} +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} /** - * Module dependencies. - * @private + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} -var db = __nccwpck_require__(7426) +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic /** - * Module variables. - * @private + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName */ - -var COMPRESSIBLE_TYPE_REGEXP = /^text\/|\+(?:json|text|xml)$/i -var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} /** - * Module exports. - * @public + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; + + +/***/ }), + +/***/ 2380: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { buildOptions} = __nccwpck_require__(2821); +const OrderedObjParser = __nccwpck_require__(5832); +const { prettify} = __nccwpck_require__(2882); +const validator = __nccwpck_require__(1739); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; + +/***/ }), + +/***/ 2882: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; -module.exports = compressible /** - * Checks if a type is compressible. - * - * @param {string} type - * @return {Boolean} compressible - * @public + * + * @param {array} node + * @param {any} options + * @returns */ +function prettify(node, options){ + return compress( node, options); +} -function compressible (type) { - if (!type || typeof type !== 'string') { - return false +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} - // strip parameters - var match = EXTRACT_TYPE_REGEXP.exec(type) - var mime = match && match[1].toLowerCase() - var data = db[mime] +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} - // return database information - if (data && data.compressible !== undefined) { - return data.compressible +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } } +} - // fallback to regexp or unknown - return COMPRESSIBLE_TYPE_REGEXP.test(mime) || undefined +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; } +exports.prettify = prettify; + + +/***/ }), + +/***/ 7462: +/***/ ((module) => { + +"use strict"; -/***/ }), +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; -/***/ 8222: -/***/ ((module, exports, __nccwpck_require__) => { -/* eslint-env browser */ +module.exports = XmlNode; -/** - * This is the web browser implementation of `debug()`. - */ +/***/ }), -exports.formatArgs = formatArgs; -exports.save = save; -exports.load = load; -exports.useColors = useColors; -exports.storage = localstorage(); -exports.destroy = (() => { - let warned = false; +/***/ 6129: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - return () => { - if (!warned) { - warned = true; - console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); - } - }; -})(); +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultErrorRedactor = exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0; +const url_1 = __nccwpck_require__(7310); +const util_1 = __nccwpck_require__(1980); +const extend_1 = __importDefault(__nccwpck_require__(8171)); /** - * Colors. + * Support `instanceof` operator for `GaxiosError`s in different versions of this library. + * + * @see {@link GaxiosError[Symbol.hasInstance]} */ - -exports.colors = [ - '#0000CC', - '#0000FF', - '#0033CC', - '#0033FF', - '#0066CC', - '#0066FF', - '#0099CC', - '#0099FF', - '#00CC00', - '#00CC33', - '#00CC66', - '#00CC99', - '#00CCCC', - '#00CCFF', - '#3300CC', - '#3300FF', - '#3333CC', - '#3333FF', - '#3366CC', - '#3366FF', - '#3399CC', - '#3399FF', - '#33CC00', - '#33CC33', - '#33CC66', - '#33CC99', - '#33CCCC', - '#33CCFF', - '#6600CC', - '#6600FF', - '#6633CC', - '#6633FF', - '#66CC00', - '#66CC33', - '#9900CC', - '#9900FF', - '#9933CC', - '#9933FF', - '#99CC00', - '#99CC33', - '#CC0000', - '#CC0033', - '#CC0066', - '#CC0099', - '#CC00CC', - '#CC00FF', - '#CC3300', - '#CC3333', - '#CC3366', - '#CC3399', - '#CC33CC', - '#CC33FF', - '#CC6600', - '#CC6633', - '#CC9900', - '#CC9933', - '#CCCC00', - '#CCCC33', - '#FF0000', - '#FF0033', - '#FF0066', - '#FF0099', - '#FF00CC', - '#FF00FF', - '#FF3300', - '#FF3333', - '#FF3366', - '#FF3399', - '#FF33CC', - '#FF33FF', - '#FF6600', - '#FF6633', - '#FF9900', - '#FF9933', - '#FFCC00', - '#FFCC33' -]; - +exports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`); +/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ +class GaxiosError extends Error { + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} + */ + static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) { + if (instance && + typeof instance === 'object' && + exports.GAXIOS_ERROR_SYMBOL in instance && + instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) { + return true; + } + // fallback to native + return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance); + } + constructor(message, config, response, error) { + var _b; + super(message); + this.config = config; + this.response = response; + this.error = error; + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[Symbol.hasInstance]} + * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} + * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} + */ + this[_a] = util_1.pkg.version; + // deep-copy config as we do not want to mutate + // the existing config for future retries/use + this.config = (0, extend_1.default)(true, {}, config); + if (this.response) { + this.response.config = (0, extend_1.default)(true, {}, this.response.config); + } + if (this.response) { + try { + this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data); + } + catch (_c) { + // best effort - don't throw an error within an error + // we could set `this.response.config.responseType = 'unknown'`, but + // that would mutate future calls with this config object. + } + this.status = this.response.status; + } + if (error && 'code' in error && error.code) { + this.code = error.code; + } + if (config.errorRedactor) { + config.errorRedactor({ + config: this.config, + response: this.response, + }); + } + } +} +exports.GaxiosError = GaxiosError; +function translateData(responseType, data) { + switch (responseType) { + case 'stream': + return data; + case 'json': + return JSON.parse(JSON.stringify(data)); + case 'arraybuffer': + return JSON.parse(Buffer.from(data).toString('utf8')); + case 'blob': + return JSON.parse(data.text()); + default: + return data; + } +} /** - * Currently only WebKit-based Web Inspectors, Firefox >= v31, - * and the Firebug extension (any Firefox version) are known - * to support "%c" CSS customizations. + * An experimental error redactor. * - * TODO: add a `localStorage` variable to explicitly enable/disable colors + * @param config Config to potentially redact properties of + * @param response Config to potentially redact properties of + * + * @experimental */ +function defaultErrorRedactor(data) { + const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.'; + function redactHeaders(headers) { + if (!headers) + return; + for (const key of Object.keys(headers)) { + // any casing of `Authentication` + if (/^authentication$/.test(key)) { + headers[key] = REDACT; + } + } + } + function redactString(obj, key) { + if (typeof obj === 'object' && + obj !== null && + typeof obj[key] === 'string') { + const text = obj[key]; + if (/grant_type=/.test(text) || /assertion=/.test(text)) { + obj[key] = REDACT; + } + } + } + function redactObject(obj) { + if (typeof obj === 'object' && obj !== null) { + if ('grant_type' in obj) { + obj['grant_type'] = REDACT; + } + if ('assertion' in obj) { + obj['assertion'] = REDACT; + } + } + } + if (data.config) { + redactHeaders(data.config.headers); + redactString(data.config, 'data'); + redactObject(data.config.data); + redactString(data.config, 'body'); + redactObject(data.config.body); + try { + const url = new url_1.URL(data.config.url || ''); + if (url.searchParams.has('token')) { + url.searchParams.set('token', REDACT); + } + data.config.url = url.toString(); + } + catch (_b) { + // ignore error - no need to parse an invalid URL + } + } + if (data.response) { + defaultErrorRedactor({ config: data.response.config }); + redactHeaders(data.response.headers); + redactString(data.response, 'data'); + redactObject(data.response.data); + } + return data; +} +exports.defaultErrorRedactor = defaultErrorRedactor; +//# sourceMappingURL=common.js.map -// eslint-disable-next-line complexity -function useColors() { - // NB: In an Electron preload script, document will be defined but not fully - // initialized. Since we know we're in Chrome, we'll just detect this case - // explicitly - if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { - return true; - } +/***/ }), - // Internet Explorer and Edge do not support colors. - if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { - return false; - } +/***/ 8133: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - // Is webkit? http://stackoverflow.com/a/16459606/376773 - // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 - return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || - // Is firebug? http://stackoverflow.com/a/398120/376773 - (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || - // Is firefox >= v31? - // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages - (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || - // Double check webkit in userAgent just in case we are in a worker - (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Gaxios = void 0; +const extend_1 = __importDefault(__nccwpck_require__(8171)); +const https_1 = __nccwpck_require__(5687); +const node_fetch_1 = __importDefault(__nccwpck_require__(467)); +const querystring_1 = __importDefault(__nccwpck_require__(3477)); +const is_stream_1 = __importDefault(__nccwpck_require__(1554)); +const url_1 = __nccwpck_require__(7310); +const common_1 = __nccwpck_require__(6129); +const retry_1 = __nccwpck_require__(1052); +const https_proxy_agent_1 = __nccwpck_require__(4522); +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fetch = hasFetch() ? window.fetch : node_fetch_1.default; +function hasWindow() { + return typeof window !== 'undefined' && !!window; +} +function hasFetch() { + return hasWindow() && !!window.fetch; +} +function hasBuffer() { + return typeof Buffer !== 'undefined'; +} +function hasHeader(options, header) { + return !!getHeader(options, header); +} +function getHeader(options, header) { + header = header.toLowerCase(); + for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { + if (header === key.toLowerCase()) { + return options.headers[key]; + } + } + return undefined; +} +let HttpsProxyAgent; +function loadProxy() { + var _a, _b, _c, _d; + const proxy = ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.HTTPS_PROXY) || + ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.https_proxy) || + ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.HTTP_PROXY) || + ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.http_proxy); + if (proxy) { + HttpsProxyAgent = https_proxy_agent_1.HttpsProxyAgent; + } + return proxy; +} +loadProxy(); +function skipProxy(url) { + var _a; + const noProxyEnv = (_a = process.env.NO_PROXY) !== null && _a !== void 0 ? _a : process.env.no_proxy; + if (!noProxyEnv) { + return false; + } + const noProxyUrls = noProxyEnv.split(','); + const parsedURL = new url_1.URL(url); + return !!noProxyUrls.find(url => { + if (url.startsWith('*.') || url.startsWith('.')) { + url = url.replace(/^\*\./, '.'); + return parsedURL.hostname.endsWith(url); + } + else { + return url === parsedURL.origin || url === parsedURL.hostname; + } + }); +} +// Figure out if we should be using a proxy. Only if it's required, load +// the https-proxy-agent module as it adds startup cost. +function getProxy(url) { + // If there is a match between the no_proxy env variables and the url, then do not proxy + if (skipProxy(url)) { + return undefined; + // If there is not a match between the no_proxy env variables and the url, check to see if there should be a proxy + } + else { + return loadProxy(); + } +} +class Gaxios { + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults) { + this.agentCache = new Map(); + this.defaults = defaults || {}; + } + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async request(opts = {}) { + opts = this.validateOpts(opts); + return this._request(opts); + } + async _defaultAdapter(opts) { + const fetchImpl = opts.fetchImplementation || fetch; + const res = (await fetchImpl(opts.url, opts)); + const data = await this.getResponseData(opts, res); + return this.translateResponse(opts, res, data); + } + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async _request(opts = {}) { + var _a; + try { + let translatedResponse; + if (opts.adapter) { + translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); + } + else { + translatedResponse = await this._defaultAdapter(opts); + } + if (!opts.validateStatus(translatedResponse.status)) { + if (opts.responseType === 'stream') { + let response = ''; + await new Promise(resolve => { + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => { + response += chunk; + }); + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve); + }); + translatedResponse.data = response; + } + throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); + } + return translatedResponse; + } + catch (e) { + const err = e instanceof common_1.GaxiosError + ? e + : new common_1.GaxiosError(e.message, opts, undefined, e); + const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); + if (shouldRetry && config) { + err.config.retryConfig.currentRetryAttempt = + config.retryConfig.currentRetryAttempt; + // The error's config could be redacted - therefore we only want to + // copy the retry state over to the existing config + opts.retryConfig = (_a = err.config) === null || _a === void 0 ? void 0 : _a.retryConfig; + return this._request(opts); + } + throw err; + } + } + async getResponseData(opts, res) { + switch (opts.responseType) { + case 'stream': + return res.body; + case 'json': { + let data = await res.text(); + try { + data = JSON.parse(data); + } + catch (_a) { + // continue + } + return data; + } + case 'arraybuffer': + return res.arrayBuffer(); + case 'blob': + return res.blob(); + case 'text': + return res.text(); + default: + return this.getResponseDataFromContentType(res); + } + } + /** + * Validates the options, and merges them with defaults. + * @param opts The original options passed from the client. + */ + validateOpts(options) { + const opts = (0, extend_1.default)(true, {}, this.defaults, options); + if (!opts.url) { + throw new Error('URL is required.'); + } + // baseUrl has been deprecated, remove in 2.0 + const baseUrl = opts.baseUrl || opts.baseURL; + if (baseUrl) { + opts.url = baseUrl + opts.url; + } + opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; + if (opts.params && Object.keys(opts.params).length > 0) { + let additionalQueryParams = opts.paramsSerializer(opts.params); + if (additionalQueryParams.startsWith('?')) { + additionalQueryParams = additionalQueryParams.slice(1); + } + const prefix = opts.url.includes('?') ? '&' : '?'; + opts.url = opts.url + prefix + additionalQueryParams; + } + if (typeof options.maxContentLength === 'number') { + opts.size = options.maxContentLength; + } + if (typeof options.maxRedirects === 'number') { + opts.follow = options.maxRedirects; + } + opts.headers = opts.headers || {}; + if (opts.data) { + const isFormData = typeof FormData === 'undefined' + ? false + : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; + if (is_stream_1.default.readable(opts.data)) { + opts.body = opts.data; + } + else if (hasBuffer() && Buffer.isBuffer(opts.data)) { + // Do not attempt to JSON.stringify() a Buffer: + opts.body = opts.data; + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + } + else if (typeof opts.data === 'object') { + // If www-form-urlencoded content type has been set, but data is + // provided as an object, serialize the content using querystring: + if (!isFormData) { + if (getHeader(opts, 'content-type') === + 'application/x-www-form-urlencoded') { + opts.body = opts.paramsSerializer(opts.data); + } + else { + // } else if (!(opts.data instanceof FormData)) { + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + opts.body = JSON.stringify(opts.data); + } + } + } + else { + opts.body = opts.data; + } + } + opts.validateStatus = opts.validateStatus || this.validateStatus; + opts.responseType = opts.responseType || 'unknown'; + if (!opts.headers['Accept'] && opts.responseType === 'json') { + opts.headers['Accept'] = 'application/json'; + } + opts.method = opts.method || 'GET'; + const proxy = getProxy(opts.url); + if (proxy) { + if (this.agentCache.has(proxy)) { + opts.agent = this.agentCache.get(proxy); + } + else { + // Proxy is being used in conjunction with mTLS. + if (opts.cert && opts.key) { + const parsedURL = new url_1.URL(proxy); + opts.agent = new HttpsProxyAgent({ + port: parsedURL.port, + host: parsedURL.host, + protocol: parsedURL.protocol, + cert: opts.cert, + key: opts.key, + }); + } + else { + opts.agent = new HttpsProxyAgent(proxy); + } + this.agentCache.set(proxy, opts.agent); + } + } + else if (opts.cert && opts.key) { + // Configure client for mTLS: + if (this.agentCache.has(opts.key)) { + opts.agent = this.agentCache.get(opts.key); + } + else { + opts.agent = new https_1.Agent({ + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(opts.key, opts.agent); + } + } + if (typeof opts.errorRedactor !== 'function' && + opts.errorRedactor !== false) { + opts.errorRedactor = common_1.defaultErrorRedactor; + } + return opts; + } + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + validateStatus(status) { + return status >= 200 && status < 300; + } + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + paramsSerializer(params) { + return querystring_1.default.stringify(params); + } + translateResponse(opts, res, data) { + // headers need to be converted from a map to an obj + const headers = {}; + res.headers.forEach((value, key) => { + headers[key] = value; + }); + return { + config: opts, + data: data, + headers, + status: res.status, + statusText: res.statusText, + // XMLHttpRequestLike + request: { + responseURL: res.url, + }, + }; + } + /** + * Attempts to parse a response by looking at the Content-Type header. + * @param {FetchResponse} response the HTTP response. + * @returns {Promise} a promise that resolves to the response data. + */ + async getResponseDataFromContentType(response) { + let contentType = response.headers.get('Content-Type'); + if (contentType === null) { + // Maintain existing functionality by calling text() + return response.text(); + } + contentType = contentType.toLowerCase(); + if (contentType.includes('application/json')) { + let data = await response.text(); + try { + data = JSON.parse(data); + } + catch (_a) { + // continue + } + return data; + } + else if (contentType.match(/^text\//)) { + return response.text(); + } + else { + // If the content type is something not easily handled, just return the raw data (blob) + return response.blob(); + } + } } +exports.Gaxios = Gaxios; +//# sourceMappingURL=gaxios.js.map -/** - * Colorize log arguments if enabled. - * - * @api public - */ - -function formatArgs(args) { - args[0] = (this.useColors ? '%c' : '') + - this.namespace + - (this.useColors ? ' %c' : ' ') + - args[0] + - (this.useColors ? '%c ' : ' ') + - '+' + module.exports.humanize(this.diff); - - if (!this.useColors) { - return; - } - - const c = 'color: ' + this.color; - args.splice(1, 0, c, 'color: inherit'); +/***/ }), - // The final "%c" is somewhat tricky, because there could be other - // arguments passed either before or after the %c, so we need to - // figure out the correct index to insert the CSS into - let index = 0; - let lastC = 0; - args[0].replace(/%[a-zA-Z%]/g, match => { - if (match === '%%') { - return; - } - index++; - if (match === '%c') { - // We only are interested in the *last* %c - // (the user may have provided their own) - lastC = index; - } - }); +/***/ 9555: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - args.splice(lastC, 0, c); -} +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0; +const gaxios_1 = __nccwpck_require__(8133); +Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); +var common_1 = __nccwpck_require__(6129); +Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); /** - * Invokes `console.debug()` when available. - * No-op when `console.debug` is not a "function". - * If `console.debug` is not available, falls back - * to `console.log`. - * - * @api public + * The default instance used when the `request` method is directly + * invoked. */ -exports.log = console.debug || console.log || (() => {}); - +exports.instance = new gaxios_1.Gaxios(); /** - * Save `namespaces`. - * - * @param {String} namespaces - * @api private + * Make an HTTP request using the given options. + * @param opts Options for the request */ -function save(namespaces) { - try { - if (namespaces) { - exports.storage.setItem('debug', namespaces); - } else { - exports.storage.removeItem('debug'); - } - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } +async function request(opts) { + return exports.instance.request(opts); } +exports.request = request; +//# sourceMappingURL=index.js.map -/** - * Load `namespaces`. - * - * @return {String} returns the previously persisted debug modes - * @api private - */ -function load() { - let r; - try { - r = exports.storage.getItem('debug'); - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } - - // If debug isn't set in LS, and we're in Electron, try to load $DEBUG - if (!r && typeof process !== 'undefined' && 'env' in process) { - r = process.env.DEBUG; - } +/***/ }), - return r; -} +/***/ 1052: +/***/ ((__unused_webpack_module, exports) => { -/** - * Localstorage attempts to return the localstorage. - * - * This is necessary because safari throws - * when a user disables cookies/localstorage - * and you attempt to access it. - * - * @return {LocalStorage} - * @api private - */ +"use strict"; -function localstorage() { - try { - // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context - // The Browser also has localStorage in the global context. - return localStorage; - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRetryConfig = void 0; +async function getRetryConfig(err) { + var _a; + let config = getConfig(err); + if (!err || !err.config || (!config && !err.config.retry)) { + return { shouldRetry: false }; + } + config = config || {}; + config.currentRetryAttempt = config.currentRetryAttempt || 0; + config.retry = + config.retry === undefined || config.retry === null ? 3 : config.retry; + config.httpMethodsToRetry = config.httpMethodsToRetry || [ + 'GET', + 'HEAD', + 'PUT', + 'OPTIONS', + 'DELETE', + ]; + config.noResponseRetries = + config.noResponseRetries === undefined || config.noResponseRetries === null + ? 2 + : config.noResponseRetries; + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; + config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; + // Put the config back into the err + err.config.retryConfig = config; + // Determine if we should retry the request + const shouldRetryFn = config.shouldRetry || shouldRetryRequest; + if (!(await shouldRetryFn(err))) { + return { shouldRetry: false, config: err.config }; + } + // Calculate time to wait with exponential backoff. + // If this is the first retry, look for a configured retryDelay. + const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; + // Formula: retryDelay + ((2^c - 1 / 2) * 1000) + const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000; + // We're going to retry! Incremenent the counter. + err.config.retryConfig.currentRetryAttempt += 1; + // Create a promise that invokes the retry after the backOffDelay + const backoff = config.retryBackoff + ? config.retryBackoff(err, delay) + : new Promise(resolve => { + setTimeout(resolve, delay); + }); + // Notify the user if they added an `onRetryAttempt` handler + if (config.onRetryAttempt) { + config.onRetryAttempt(err); + } + // Return the promise in which recalls Gaxios to retry the request + await backoff; + return { shouldRetry: true, config: err.config }; } - -module.exports = __nccwpck_require__(6243)(exports); - -const {formatters} = module.exports; - +exports.getRetryConfig = getRetryConfig; /** - * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + * Determine based on config if we should retry the request. + * @param err The GaxiosError passed to the interceptor. */ - -formatters.j = function (v) { - try { - return JSON.stringify(v); - } catch (error) { - return '[UnexpectedJSONParseError]: ' + error.message; - } -}; - - -/***/ }), - -/***/ 6243: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - - +function shouldRetryRequest(err) { + var _a; + const config = getConfig(err); + // node-fetch raises an AbortError if signaled: + // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') { + return false; + } + // If there's no config, or retries are disabled, return. + if (!config || config.retry === 0) { + return false; + } + // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) + if (!err.response && + (config.currentRetryAttempt || 0) >= config.noResponseRetries) { + return false; + } + // Only retry with configured HttpMethods. + if (!err.config.method || + config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { + return false; + } + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + if (err.response && err.response.status) { + let isInRange = false; + for (const [min, max] of config.statusCodesToRetry) { + const status = err.response.status; + if (status >= min && status <= max) { + isInRange = true; + break; + } + } + if (!isInRange) { + return false; + } + } + // If we are out of retry attempts, return + config.currentRetryAttempt = config.currentRetryAttempt || 0; + if (config.currentRetryAttempt >= config.retry) { + return false; + } + return true; +} /** - * This is the common logic for both the Node.js and web browser - * implementations of `debug()`. + * Acquire the raxConfig object from an GaxiosError if available. + * @param err The Gaxios error with a config object. */ +function getConfig(err) { + if (err && err.config && err.config.retryConfig) { + return err.config.retryConfig; + } + return; +} +//# sourceMappingURL=retry.js.map -function setup(env) { - createDebug.debug = createDebug; - createDebug.default = createDebug; - createDebug.coerce = coerce; - createDebug.disable = disable; - createDebug.enable = enable; - createDebug.enabled = enabled; - createDebug.humanize = __nccwpck_require__(900); - createDebug.destroy = destroy; - - Object.keys(env).forEach(key => { - createDebug[key] = env[key]; - }); - - /** - * The currently active debug mode names, and names to skip. - */ - - createDebug.names = []; - createDebug.skips = []; - - /** - * Map of special "%n" handling functions, for the debug "format" argument. - * - * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". - */ - createDebug.formatters = {}; - - /** - * Selects a color for a debug namespace - * @param {String} namespace The namespace string for the debug instance to be colored - * @return {Number|String} An ANSI color code for the given namespace - * @api private - */ - function selectColor(namespace) { - let hash = 0; - - for (let i = 0; i < namespace.length; i++) { - hash = ((hash << 5) - hash) + namespace.charCodeAt(i); - hash |= 0; // Convert to 32bit integer - } - - return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; - } - createDebug.selectColor = selectColor; - - /** - * Create a debugger with the given `namespace`. - * - * @param {String} namespace - * @return {Function} - * @api public - */ - function createDebug(namespace) { - let prevTime; - let enableOverride = null; - let namespacesCache; - let enabledCache; - - function debug(...args) { - // Disabled? - if (!debug.enabled) { - return; - } - - const self = debug; - - // Set `diff` timestamp - const curr = Number(new Date()); - const ms = curr - (prevTime || curr); - self.diff = ms; - self.prev = prevTime; - self.curr = curr; - prevTime = curr; - - args[0] = createDebug.coerce(args[0]); - - if (typeof args[0] !== 'string') { - // Anything else let's inspect with %O - args.unshift('%O'); - } - - // Apply any `formatters` transformations - let index = 0; - args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { - // If we encounter an escaped % then don't increase the array index - if (match === '%%') { - return '%'; - } - index++; - const formatter = createDebug.formatters[format]; - if (typeof formatter === 'function') { - const val = args[index]; - match = formatter.call(self, val); - - // Now we need to remove `args[index]` since it's inlined in the `format` - args.splice(index, 1); - index--; - } - return match; - }); - - // Apply env-specific formatting (colors, etc.) - createDebug.formatArgs.call(self, args); - - const logFn = self.log || createDebug.log; - logFn.apply(self, args); - } - - debug.namespace = namespace; - debug.useColors = createDebug.useColors(); - debug.color = createDebug.selectColor(namespace); - debug.extend = extend; - debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. - - Object.defineProperty(debug, 'enabled', { - enumerable: true, - configurable: false, - get: () => { - if (enableOverride !== null) { - return enableOverride; - } - if (namespacesCache !== createDebug.namespaces) { - namespacesCache = createDebug.namespaces; - enabledCache = createDebug.enabled(namespace); - } - - return enabledCache; - }, - set: v => { - enableOverride = v; - } - }); - - // Env-specific initialization logic for debug instances - if (typeof createDebug.init === 'function') { - createDebug.init(debug); - } - - return debug; - } - - function extend(namespace, delimiter) { - const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); - newDebug.log = this.log; - return newDebug; - } - - /** - * Enables a debug mode by namespaces. This can include modes - * separated by a colon and wildcards. - * - * @param {String} namespaces - * @api public - */ - function enable(namespaces) { - createDebug.save(namespaces); - createDebug.namespaces = namespaces; - - createDebug.names = []; - createDebug.skips = []; - - let i; - const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); - const len = split.length; - - for (i = 0; i < len; i++) { - if (!split[i]) { - // ignore empty strings - continue; - } - - namespaces = split[i].replace(/\*/g, '.*?'); - - if (namespaces[0] === '-') { - createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); - } else { - createDebug.names.push(new RegExp('^' + namespaces + '$')); - } - } - } - - /** - * Disable debug output. - * - * @return {String} namespaces - * @api public - */ - function disable() { - const namespaces = [ - ...createDebug.names.map(toNamespace), - ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) - ].join(','); - createDebug.enable(''); - return namespaces; - } - - /** - * Returns true if the given mode name is enabled, false otherwise. - * - * @param {String} name - * @return {Boolean} - * @api public - */ - function enabled(name) { - if (name[name.length - 1] === '*') { - return true; - } - - let i; - let len; - - for (i = 0, len = createDebug.skips.length; i < len; i++) { - if (createDebug.skips[i].test(name)) { - return false; - } - } +/***/ }), - for (i = 0, len = createDebug.names.length; i < len; i++) { - if (createDebug.names[i].test(name)) { - return true; - } - } +/***/ 1980: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return false; - } +"use strict"; - /** - * Convert regexp to namespace - * - * @param {RegExp} regxep - * @return {String} namespace - * @api private - */ - function toNamespace(regexp) { - return regexp.toString() - .substring(2, regexp.toString().length - 2) - .replace(/\.\*\?$/, '*'); - } +// Copyright 2023 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pkg = void 0; +exports.pkg = __nccwpck_require__(6318); +//# sourceMappingURL=util.js.map - /** - * Coerce `val`. - * - * @param {Mixed} val - * @return {Mixed} - * @api private - */ - function coerce(val) { - if (val instanceof Error) { - return val.stack || val.message; - } - return val; - } +/***/ }), - /** - * XXX DO NOT USE. This is a temporary stub function. - * XXX It WILL be removed in the next major release. - */ - function destroy() { - console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); - } +/***/ 9910: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - createDebug.enable(createDebug.load()); +"use strict"; - return createDebug; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.req = exports.json = exports.toBuffer = void 0; +const http = __importStar(__nccwpck_require__(3685)); +const https = __importStar(__nccwpck_require__(5687)); +async function toBuffer(stream) { + let length = 0; + const chunks = []; + for await (const chunk of stream) { + length += chunk.length; + chunks.push(chunk); + } + return Buffer.concat(chunks, length); } - -module.exports = setup; - +exports.toBuffer = toBuffer; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function json(stream) { + const buf = await toBuffer(stream); + const str = buf.toString('utf8'); + try { + return JSON.parse(str); + } + catch (_err) { + const err = _err; + err.message += ` (input: ${str})`; + throw err; + } +} +exports.json = json; +function req(url, opts = {}) { + const href = typeof url === 'string' ? url : url.href; + const req = (href.startsWith('https:') ? https : http).request(url, opts); + const promise = new Promise((resolve, reject) => { + req + .once('response', resolve) + .once('error', reject) + .end(); + }); + req.then = promise.then.bind(promise); + return req; +} +exports.req = req; +//# sourceMappingURL=helpers.js.map /***/ }), -/***/ 8237: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 7863: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -/** - * Detect Electron renderer / nwjs process, which is node, but we should - * treat as a browser. - */ +"use strict"; -if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { - module.exports = __nccwpck_require__(8222); -} else { - module.exports = __nccwpck_require__(4874); +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Agent = void 0; +const http = __importStar(__nccwpck_require__(3685)); +__exportStar(__nccwpck_require__(9910), exports); +const INTERNAL = Symbol('AgentBaseInternalState'); +class Agent extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; + } + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + // First check the `secureEndpoint` property explicitly, since this + // means that a parent `Agent` is "passing through" to this instance. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof options.secureEndpoint === 'boolean') { + return options.secureEndpoint; + } + // If no explicit `secure` endpoint, check if `protocol` property is + // set. This will usually be the case since using a full string URL + // or `URL` instance should be the most common usage. + if (typeof options.protocol === 'string') { + return options.protocol === 'https:'; + } + } + // Finally, if no `protocol` property was set, then fall back to + // checking the stack trace of the current call stack, and try to + // detect the "https" module. + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack + .split('\n') + .some((l) => l.indexOf('(https.js:') !== -1 || + l.indexOf('node:https:') !== -1); + } + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options), + }; + Promise.resolve() + .then(() => this.connect(req, connectOpts)) + .then((socket) => { + if (socket instanceof http.Agent) { + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + return socket.addRequest(req, connectOpts); + } + this[INTERNAL].currentSocket = socket; + // @ts-expect-error `createSocket()` isn't defined in `@types/node` + super.createSocket(req, options, cb); + }, cb); + } + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = undefined; + if (!socket) { + throw new Error('No socket was returned in the `connect()` function'); + } + return socket; + } + get defaultPort() { + return (this[INTERNAL].defaultPort ?? + (this.protocol === 'https:' ? 443 : 80)); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; + } + } + get protocol() { + return (this[INTERNAL].protocol ?? + (this.isSecureEndpoint() ? 'https:' : 'http:')); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; + } + } } - +exports.Agent = Agent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 4874: -/***/ ((module, exports, __nccwpck_require__) => { +/***/ 4522: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpsProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(1808)); +const tls = __importStar(__nccwpck_require__(4404)); +const assert_1 = __importDefault(__nccwpck_require__(9491)); +const debug_1 = __importDefault(__nccwpck_require__(8237)); +const agent_base_1 = __nccwpck_require__(7863); +const url_1 = __nccwpck_require__(7310); +const parse_proxy_response_1 = __nccwpck_require__(7224); +const debug = (0, debug_1.default)('https-proxy-agent'); /** - * Module dependencies. + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + const servername = this.connectOpts.servername || this.connectOpts.host; + socket = tls.connect({ + ...this.connectOpts, + servername: servername && net.isIP(servername) ? undefined : servername, + }); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls.connect({ + ...omit(opts, 'host', 'path', 'port'), + socket, + servername: net.isIP(servername) ? undefined : servername, + }); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + } +} +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map -const tty = __nccwpck_require__(6224); -const util = __nccwpck_require__(3837); +/***/ }), -/** - * This is the Node.js implementation of `debug()`. - */ +/***/ 7224: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -exports.init = init; -exports.log = log; -exports.formatArgs = formatArgs; -exports.save = save; -exports.load = load; -exports.useColors = useColors; -exports.destroy = util.deprecate( - () => {}, - 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' -); +"use strict"; -/** - * Colors. - */ +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(__nccwpck_require__(8237)); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); + } + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const headerParts = buffered + .slice(0, endOfHeaders) + .toString('ascii') + .split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; + } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, + }); + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); +} +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map -exports.colors = [6, 2, 3, 4, 5, 1]; +/***/ }), -try { - // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) - // eslint-disable-next-line import/no-extraneous-dependencies - const supportsColor = __nccwpck_require__(9318); +/***/ 1904: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { - exports.colors = [ - 20, - 21, - 26, - 27, - 32, - 33, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 56, - 57, - 62, - 63, - 68, - 69, - 74, - 75, - 76, - 77, - 78, - 79, - 80, - 81, - 92, - 93, - 98, - 99, - 112, - 113, - 128, - 129, - 134, - 135, - 148, - 149, - 160, - 161, - 162, - 163, - 164, - 165, - 166, - 167, - 168, - 169, - 170, - 171, - 172, - 173, - 178, - 179, - 184, - 185, - 196, - 197, - 198, - 199, - 200, - 201, - 202, - 203, - 204, - 205, - 206, - 207, - 208, - 209, - 214, - 215, - 220, - 221 - ]; - } -} catch (error) { - // Swallow - we only care if `supports-color` is available; it doesn't have to be. -} +"use strict"; /** - * Build up the default `inspectOpts` object from the environment variables. + * Copyright 2022 Google LLC * - * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ - -exports.inspectOpts = Object.keys(process.env).filter(key => { - return /^debug_/i.test(key); -}).reduce((obj, key) => { - // Camel-case - const prop = key - .substring(6) - .toLowerCase() - .replace(/_([a-z])/g, (_, k) => { - return k.toUpperCase(); - }); - - // Coerce string value into JS value - let val = process.env[key]; - if (/^(yes|on|true|enabled)$/i.test(val)) { - val = true; - } else if (/^(no|off|false|disabled)$/i.test(val)) { - val = false; - } else if (val === 'null') { - val = null; - } else { - val = Number(val); - } - - obj[prop] = val; - return obj; -}, {}); - +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0; +const fs_1 = __nccwpck_require__(7147); +const os_1 = __nccwpck_require__(2037); /** - * Is stdout a TTY? Colored output is enabled when `true`. + * Known paths unique to Google Compute Engine Linux instances */ - -function useColors() { - return 'colors' in exports.inspectOpts ? - Boolean(exports.inspectOpts.colors) : - tty.isatty(process.stderr.fd); +exports.GCE_LINUX_BIOS_PATHS = { + BIOS_DATE: '/sys/class/dmi/id/bios_date', + BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', +}; +const GCE_MAC_ADDRESS_REGEX = /^42:01/; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +function isGoogleCloudServerless() { + /** + * `CLOUD_RUN_JOB` is used for Cloud Run Jobs + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * + * `FUNCTION_NAME` is used in older Cloud Functions environments: + * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * + * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + */ + const isGFEnvironment = process.env.CLOUD_RUN_JOB || + process.env.FUNCTION_NAME || + process.env.K_SERVICE; + return !!isGFEnvironment; } - +exports.isGoogleCloudServerless = isGoogleCloudServerless; /** - * Adds ANSI color escape codes if enabled. + * Determines if the process is running on a Linux Google Compute Engine instance. * - * @api public - */ - -function formatArgs(args) { - const {namespace: name, useColors} = this; - - if (useColors) { - const c = this.color; - const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); - const prefix = ` ${colorCode};1m${name} \u001B[0m`; - - args[0] = prefix + args[0].split('\n').join('\n' + prefix); - args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); - } else { - args[0] = getDate() + name + ' ' + args[0]; - } -} - -function getDate() { - if (exports.inspectOpts.hideDate) { - return ''; - } - return new Date().toISOString() + ' '; -} - -/** - * Invokes `util.format()` with the specified arguments and writes to stderr. + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. */ - -function log(...args) { - return process.stderr.write(util.format(...args) + '\n'); +function isGoogleComputeEngineLinux() { + if ((0, os_1.platform)() !== 'linux') + return false; + try { + // ensure this file exist + (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); + // ensure this file exist and matches + const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); + return /Google/.test(biosVendor); + } + catch (_a) { + return false; + } } - +exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; /** - * Save `namespaces`. + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. * - * @param {String} namespaces - * @api private + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. */ -function save(namespaces) { - if (namespaces) { - process.env.DEBUG = namespaces; - } else { - // If you set a process.env field to null or undefined, it gets cast to the - // string 'null' or 'undefined'. Just delete instead. - delete process.env.DEBUG; - } +function isGoogleComputeEngineMACAddress() { + const interfaces = (0, os_1.networkInterfaces)(); + for (const item of Object.values(interfaces)) { + if (!item) + continue; + for (const { mac } of item) { + if (GCE_MAC_ADDRESS_REGEX.test(mac)) { + return true; + } + } + } + return false; } - +exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; /** - * Load `namespaces`. + * Determines if the process is running on a Google Compute Engine instance. * - * @return {String} returns the previously persisted debug modes - * @api private + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. */ - -function load() { - return process.env.DEBUG; +function isGoogleComputeEngine() { + return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); } - +exports.isGoogleComputeEngine = isGoogleComputeEngine; /** - * Init logic for `debug` instances. + * Determines if the process is running on Google Cloud Platform. * - * Create a new `inspectOpts` object in case `useColors` is set - * differently for a particular `debug` instance. + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. */ - -function init(debug) { - debug.inspectOpts = {}; - - const keys = Object.keys(exports.inspectOpts); - for (let i = 0; i < keys.length; i++) { - debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; - } +function detectGCPResidency() { + return isGoogleCloudServerless() || isGoogleComputeEngine(); } +exports.detectGCPResidency = detectGCPResidency; +//# sourceMappingURL=gcp-residency.js.map -module.exports = __nccwpck_require__(6243)(exports); - -const {formatters} = module.exports; +/***/ }), -/** - * Map %o to `util.inspect()`, all on a single line. - */ +/***/ 3563: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -formatters.o = function (v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts) - .split('\n') - .map(str => str.trim()) - .join(' '); -}; +"use strict"; /** - * Map %O to `util.inspect()`, allowing multiple lines if needed. + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT */ - -formatters.O = function (v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts); +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; - - -/***/ }), - -/***/ 8932: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - Object.defineProperty(exports, "__esModule", ({ value: true })); - -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); +exports.requestTimeout = exports.setGCPResidency = exports.getGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.bulk = exports.universe = exports.project = exports.instance = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; +const gaxios_1 = __nccwpck_require__(9555); +const jsonBigint = __nccwpck_require__(5031); +const gcp_residency_1 = __nccwpck_require__(1904); +exports.BASE_PATH = '/computeMetadata/v1'; +exports.HOST_ADDRESS = 'http://169.254.169.254'; +exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; +exports.HEADER_NAME = 'Metadata-Flavor'; +exports.HEADER_VALUE = 'Google'; +exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); +/** + * Metadata server detection override options. + * + * Available via `process.env.METADATA_SERVER_DETECTION`. + */ +exports.METADATA_SERVER_DETECTION = Object.freeze({ + 'assume-present': "don't try to ping the metadata server, but assume it's present", + none: "don't try to ping the metadata server, but don't try to use it either", + 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)", + 'ping-only': 'skip the BIOS probe, and go straight to pinging', +}); +/** + * Returns the base URL while taking into account the GCE_METADATA_HOST + * environment variable if it exists. + * + * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + */ +function getBaseUrl(baseUrl) { + if (!baseUrl) { + baseUrl = + process.env.GCE_METADATA_IP || + process.env.GCE_METADATA_HOST || + exports.HOST_ADDRESS; } - - this.name = 'Deprecation'; - } - -} - -exports.Deprecation = Deprecation; - - -/***/ }), - -/***/ 6599: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var stream = __nccwpck_require__(1642) -var eos = __nccwpck_require__(1205) -var inherits = __nccwpck_require__(4124) -var shift = __nccwpck_require__(6121) - -var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) - ? Buffer.from([0]) - : new Buffer([0]) - -var onuncork = function(self, fn) { - if (self._corked) self.once('uncork', fn) - else fn() -} - -var autoDestroy = function (self, err) { - if (self._autoDestroy) self.destroy(err) -} - -var destroyer = function(self, end) { - return function(err) { - if (err) autoDestroy(self, err.message === 'premature close' ? null : err) - else if (end && !self._ended) self.end() - } -} - -var end = function(ws, fn) { - if (!ws) return fn() - if (ws._writableState && ws._writableState.finished) return fn() - if (ws._writableState) return ws.end(fn) - ws.end() - fn() -} - -var noop = function() {} - -var toStreams2 = function(rs) { - return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) -} - -var Duplexify = function(writable, readable, opts) { - if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) - stream.Duplex.call(this, opts) - - this._writable = null - this._readable = null - this._readable2 = null - - this._autoDestroy = !opts || opts.autoDestroy !== false - this._forwardDestroy = !opts || opts.destroy !== false - this._forwardEnd = !opts || opts.end !== false - this._corked = 1 // start corked - this._ondrain = null - this._drained = false - this._forwarding = false - this._unwrite = null - this._unread = null - this._ended = false - - this.destroyed = false - - if (writable) this.setWritable(writable) - if (readable) this.setReadable(readable) -} - -inherits(Duplexify, stream.Duplex) - -Duplexify.obj = function(writable, readable, opts) { - if (!opts) opts = {} - opts.objectMode = true - opts.highWaterMark = 16 - return new Duplexify(writable, readable, opts) -} - -Duplexify.prototype.cork = function() { - if (++this._corked === 1) this.emit('cork') -} - -Duplexify.prototype.uncork = function() { - if (this._corked && --this._corked === 0) this.emit('uncork') -} - -Duplexify.prototype.setWritable = function(writable) { - if (this._unwrite) this._unwrite() - - if (this.destroyed) { - if (writable && writable.destroy) writable.destroy() - return - } - - if (writable === null || writable === false) { - this.end() - return - } - - var self = this - var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) - - var ondrain = function() { - var ondrain = self._ondrain - self._ondrain = null - if (ondrain) ondrain() - } - - var clear = function() { - self._writable.removeListener('drain', ondrain) - unend() - } - - if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks - - this._writable = writable - this._writable.on('drain', ondrain) - this._unwrite = clear - - this.uncork() // always uncork setWritable -} - -Duplexify.prototype.setReadable = function(readable) { - if (this._unread) this._unread() - - if (this.destroyed) { - if (readable && readable.destroy) readable.destroy() - return - } - - if (readable === null || readable === false) { - this.push(null) - this.resume() - return - } - - var self = this - var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) - - var onreadable = function() { - self._forward() - } - - var onend = function() { - self.push(null) - } - - var clear = function() { - self._readable2.removeListener('readable', onreadable) - self._readable2.removeListener('end', onend) - unend() - } - - this._drained = true - this._readable = readable - this._readable2 = readable._readableState ? readable : toStreams2(readable) - this._readable2.on('readable', onreadable) - this._readable2.on('end', onend) - this._unread = clear - - this._forward() -} - -Duplexify.prototype._read = function() { - this._drained = true - this._forward() + // If no scheme is provided default to HTTP: + if (!/^https?:\/\//.test(baseUrl)) { + baseUrl = `http://${baseUrl}`; + } + return new URL(exports.BASE_PATH, baseUrl).href; } - -Duplexify.prototype._forward = function() { - if (this._forwarding || !this._readable2 || !this._drained) return - this._forwarding = true - - var data - - while (this._drained && (data = shift(this._readable2)) !== null) { - if (this.destroyed) continue - this._drained = this.push(data) - } - - this._forwarding = false +// Accepts an options object passed from the user to the API. In previous +// versions of the API, it referred to a `Request` or an `Axios` request +// options object. Now it refers to an object with very limited property +// names. This is here to help ensure users don't pass invalid options when +// they upgrade from 0.4 to 0.5 to 0.8. +function validate(options) { + Object.keys(options).forEach(key => { + switch (key) { + case 'params': + case 'property': + case 'headers': + break; + case 'qs': + throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); + default: + throw new Error(`'${key}' is not a valid configuration option.`); + } + }); } - -Duplexify.prototype.destroy = function(err, cb) { - if (!cb) cb = noop - if (this.destroyed) return cb(null) - this.destroyed = true - - var self = this - process.nextTick(function() { - self._destroy(err) - cb(null) - }) +async function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) { + let metadataKey = ''; + let params = {}; + let headers = {}; + if (typeof type === 'object') { + const metadataAccessor = type; + metadataKey = metadataAccessor.metadataKey; + params = metadataAccessor.params || params; + headers = metadataAccessor.headers || headers; + noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; + fastFail = metadataAccessor.fastFail || fastFail; + } + else { + metadataKey = type; + } + if (typeof options === 'string') { + metadataKey += `/${options}`; + } + else { + validate(options); + if (options.property) { + metadataKey += `/${options.property}`; + } + headers = options.headers || headers; + params = options.params || params; + } + try { + const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; + const res = await requestMethod({ + url: `${getBaseUrl()}/${metadataKey}`, + headers: { ...exports.HEADERS, ...headers }, + retryConfig: { noResponseRetries }, + params, + responseType: 'text', + timeout: requestTimeout(), + }); + // NOTE: node.js converts all incoming headers to lower case. + if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { + throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`); + } + else if (!res.data) { + throw new Error('Invalid response from the metadata service'); + } + if (typeof res.data === 'string') { + try { + return jsonBigint.parse(res.data); + } + catch (_a) { + /* ignore */ + } + } + return res.data; + } + catch (e) { + const err = e; + if (err.response && err.response.status !== 200) { + err.message = `Unsuccessful response status code. ${err.message}`; + } + throw e; + } } - -Duplexify.prototype._destroy = function(err) { - if (err) { - var ondrain = this._ondrain - this._ondrain = null - if (ondrain) ondrain(err) - else this.emit('error', err) - } - - if (this._forwardDestroy) { - if (this._readable && this._readable.destroy) this._readable.destroy() - if (this._writable && this._writable.destroy) this._writable.destroy() - } - - this.emit('close') +async function fastFailMetadataRequest(options) { + const secondaryOptions = { + ...options, + url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), + }; + // We race a connection between DNS/IP to metadata server. There are a couple + // reasons for this: + // + // 1. the DNS is slow in some GCP environments; by checking both, we might + // detect the runtime environment signficantly faster. + // 2. we can't just check the IP, which is tarpitted and slow to respond + // on a user's local machine. + // + // Additional logic has been added to make sure that we don't create an + // unhandled rejection in scenarios where a failure happens sometime + // after a success. + // + // Note, however, if a failure happens prior to a success, a rejection should + // occur, this is for folks running locally. + // + let responded = false; + const r1 = (0, gaxios_1.request)(options) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r2; + } + else { + responded = true; + throw err; + } + }); + const r2 = (0, gaxios_1.request)(secondaryOptions) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r1; + } + else { + responded = true; + throw err; + } + }); + return Promise.race([r1, r2]); } - -Duplexify.prototype._write = function(data, enc, cb) { - if (this.destroyed) return - if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) - if (data === SIGNAL_FLUSH) return this._finish(cb) - if (!this._writable) return cb() - - if (this._writable.write(data) === false) this._ondrain = cb - else if (!this.destroyed) cb() +/** + * Obtain metadata for the current GCE instance. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const serviceAccount: {} = await instance('service-accounts/'); + * const serviceAccountEmail: string = await instance('service-accounts/default/email'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function instance(options) { + return metadataAccessor('instance', options); } - -Duplexify.prototype._finish = function(cb) { - var self = this - this.emit('preend') - onuncork(this, function() { - end(self._forwardEnd && self._writable, function() { - // haxx to not emit prefinish twice - if (self._writableState.prefinished === false) self._writableState.prefinished = true - self.emit('prefinish') - onuncork(self, cb) - }) - }) +exports.instance = instance; +/** + * Obtain metadata for the current GCP project. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const projectId: string = await project('project-id'); + * const numericProjectId: number = await project('numeric-project-id'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function project(options) { + return metadataAccessor('project', options); } - -Duplexify.prototype.end = function(data, enc, cb) { - if (typeof data === 'function') return this.end(null, null, data) - if (typeof enc === 'function') return this.end(data, null, enc) - this._ended = true - if (data) this.write(data) - if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) - return stream.Writable.prototype.end.call(this, cb) +exports.project = project; +/** + * Obtain metadata for the current universe. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const universeDomain: string = await universe('universe_domain'); + * ``` + */ +function universe(options) { + return metadataAccessor('universe', options); } - -module.exports = Duplexify - - -/***/ }), - -/***/ 1728: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var Buffer = (__nccwpck_require__(1867).Buffer); - -var getParamBytesForAlg = __nccwpck_require__(528); - -var MAX_OCTET = 0x80, - CLASS_UNIVERSAL = 0, - PRIMITIVE_BIT = 0x20, - TAG_SEQ = 0x10, - TAG_INT = 0x02, - ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), - ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); - -function base64Url(base64) { - return base64 - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); +exports.universe = universe; +/** + * Retrieve metadata items in parallel. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const data = await bulk([ + * { + * metadataKey: 'instance', + * }, + * { + * metadataKey: 'project/project-id', + * }, + * ] as const); + * + * // data.instance; + * // data['project/project-id']; + * ``` + * + * @param properties The metadata properties to retrieve + * @returns The metadata in `metadatakey:value` format + */ +async function bulk(properties) { + const r = {}; + await Promise.all(properties.map(item => { + return (async () => { + const res = await metadataAccessor(item); + const key = item.metadataKey; + r[key] = res; + })(); + })); + return r; } - -function signatureAsBuffer(signature) { - if (Buffer.isBuffer(signature)) { - return signature; - } else if ('string' === typeof signature) { - return Buffer.from(signature, 'base64'); - } - - throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +exports.bulk = bulk; +/* + * How many times should we retry detecting GCP environment. + */ +function detectGCPAvailableRetries() { + return process.env.DETECT_GCP_RETRIES + ? Number(process.env.DETECT_GCP_RETRIES) + : 0; } - -function derToJose(signature, alg) { - signature = signatureAsBuffer(signature); - var paramBytes = getParamBytesForAlg(alg); - - // the DER encoded param should at most be the param size, plus a padding - // zero, since due to being a signed integer - var maxEncodedParamLength = paramBytes + 1; - - var inputLength = signature.length; - - var offset = 0; - if (signature[offset++] !== ENCODED_TAG_SEQ) { - throw new Error('Could not find expected "seq"'); - } - - var seqLength = signature[offset++]; - if (seqLength === (MAX_OCTET | 1)) { - seqLength = signature[offset++]; - } - - if (inputLength - offset < seqLength) { - throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); - } - - if (signature[offset++] !== ENCODED_TAG_INT) { - throw new Error('Could not find expected "int" for "r"'); - } - - var rLength = signature[offset++]; - - if (inputLength - offset - 2 < rLength) { - throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); - } - - if (maxEncodedParamLength < rLength) { - throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); - } - - var rOffset = offset; - offset += rLength; - - if (signature[offset++] !== ENCODED_TAG_INT) { - throw new Error('Could not find expected "int" for "s"'); - } - - var sLength = signature[offset++]; - - if (inputLength - offset !== sLength) { - throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); - } - - if (maxEncodedParamLength < sLength) { - throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); - } - - var sOffset = offset; - offset += sLength; - - if (offset !== inputLength) { - throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); - } - - var rPadding = paramBytes - rLength, - sPadding = paramBytes - sLength; - - var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); - - for (offset = 0; offset < rPadding; ++offset) { - dst[offset] = 0; - } - signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); - - offset = paramBytes; - - for (var o = offset; offset < o + sPadding; ++offset) { - dst[offset] = 0; - } - signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); - - dst = dst.toString('base64'); - dst = base64Url(dst); - - return dst; +let cachedIsAvailableResponse; +/** + * Determine if the metadata server is currently available. + */ +async function isAvailable() { + if (process.env.METADATA_SERVER_DETECTION) { + const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); + if (!(value in exports.METADATA_SERVER_DETECTION)) { + throw new RangeError(`Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\`, or unset`); + } + switch (value) { + case 'assume-present': + return true; + case 'none': + return false; + case 'bios-only': + return getGCPResidency(); + case 'ping-only': + // continue, we want to ping the server + } + } + try { + // If a user is instantiating several GCP libraries at the same time, + // this may result in multiple calls to isAvailable(), to detect the + // runtime environment. We use the same promise for each of these calls + // to reduce the network load. + if (cachedIsAvailableResponse === undefined) { + cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), + // If the default HOST_ADDRESS has been overridden, we should not + // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in + // a non-GCP environment): + !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + } + await cachedIsAvailableResponse; + return true; + } + catch (e) { + const err = e; + if (process.env.DEBUG_AUTH) { + console.info(err); + } + if (err.type === 'request-timeout') { + // If running in a GCP environment, metadata endpoint should return + // within ms. + return false; + } + if (err.response && err.response.status === 404) { + return false; + } + else { + if (!(err.response && err.response.status === 404) && + // A warning is emitted if we see an unexpected err.code, or err.code + // is not populated: + (!err.code || + ![ + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'ENETUNREACH', + 'ENOENT', + 'ENOTFOUND', + 'ECONNREFUSED', + ].includes(err.code))) { + let code = 'UNKNOWN'; + if (err.code) + code = err.code; + process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); + } + // Failure to resolve the metadata service means that it is not available. + return false; + } + } } - -function countPadding(buf, start, stop) { - var padding = 0; - while (start + padding < stop && buf[start + padding] === 0) { - ++padding; - } - - var needsSign = buf[start + padding] >= MAX_OCTET; - if (needsSign) { - --padding; - } - - return padding; +exports.isAvailable = isAvailable; +/** + * reset the memoized isAvailable() lookup. + */ +function resetIsAvailableCache() { + cachedIsAvailableResponse = undefined; } - -function joseToDer(signature, alg) { - signature = signatureAsBuffer(signature); - var paramBytes = getParamBytesForAlg(alg); - - var signatureBytes = signature.length; - if (signatureBytes !== paramBytes * 2) { - throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); - } - - var rPadding = countPadding(signature, 0, paramBytes); - var sPadding = countPadding(signature, paramBytes, signature.length); - var rLength = paramBytes - rPadding; - var sLength = paramBytes - sPadding; - - var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; - - var shortLength = rsBytes < MAX_OCTET; - - var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); - - var offset = 0; - dst[offset++] = ENCODED_TAG_SEQ; - if (shortLength) { - // Bit 8 has value "0" - // bits 7-1 give the length. - dst[offset++] = rsBytes; - } else { - // Bit 8 of first octet has value "1" - // bits 7-1 give the number of additional length octets. - dst[offset++] = MAX_OCTET | 1; - // length, base 256 - dst[offset++] = rsBytes & 0xff; - } - dst[offset++] = ENCODED_TAG_INT; - dst[offset++] = rLength; - if (rPadding < 0) { - dst[offset++] = 0; - offset += signature.copy(dst, offset, 0, paramBytes); - } else { - offset += signature.copy(dst, offset, rPadding, paramBytes); - } - dst[offset++] = ENCODED_TAG_INT; - dst[offset++] = sLength; - if (sPadding < 0) { - dst[offset++] = 0; - signature.copy(dst, offset, paramBytes); - } else { - signature.copy(dst, offset, paramBytes + sPadding); - } - - return dst; +exports.resetIsAvailableCache = resetIsAvailableCache; +/** + * A cache for the detected GCP Residency. + */ +exports.gcpResidencyCache = null; +/** + * Detects GCP Residency. + * Caches results to reduce costs for subsequent calls. + * + * @see setGCPResidency for setting + */ +function getGCPResidency() { + if (exports.gcpResidencyCache === null) { + setGCPResidency(); + } + return exports.gcpResidencyCache; } - -module.exports = { - derToJose: derToJose, - joseToDer: joseToDer -}; - - -/***/ }), - -/***/ 528: -/***/ ((module) => { - -"use strict"; - - -function getParamSize(keySize) { - var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); - return result; +exports.getGCPResidency = getGCPResidency; +/** + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + * @see getGCPResidency for getting + */ +function setGCPResidency(value = null) { + exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); } - -var paramBytesForAlg = { - ES256: getParamSize(256), - ES384: getParamSize(384), - ES512: getParamSize(521) -}; - -function getParamBytesForAlg(alg) { - var paramBytes = paramBytesForAlg[alg]; - if (paramBytes) { - return paramBytes; - } - - throw new Error('Unknown algorithm "' + alg + '"'); +exports.setGCPResidency = setGCPResidency; +/** + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. + */ +function requestTimeout() { + return getGCPResidency() ? 0 : 3000; } - -module.exports = getParamBytesForAlg; - +exports.requestTimeout = requestTimeout; +__exportStar(__nccwpck_require__(1904), exports); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 1205: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var once = __nccwpck_require__(1223); - -var noop = function() {}; - -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; - -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; - -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - - callback = once(callback || noop); - - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; - - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; - - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; - - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; - - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; - - var onerror = function(err) { - callback.call(stream, err); - }; - - var onclose = function() { - process.nextTick(onclosenexttick); - }; - - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; - - var onrequest = function() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - if (isChildProcess(stream)) stream.on('exit', onexit); - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); +/***/ 4627: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -}; +"use strict"; -module.exports = eos; +// Copyright 2012 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0; +const events_1 = __nccwpck_require__(2361); +const transporters_1 = __nccwpck_require__(2649); +const util_1 = __nccwpck_require__(8905); +/** + * The default cloud universe + * + * @see {@link AuthJSONOptions.universe_domain} + */ +exports.DEFAULT_UNIVERSE = 'googleapis.com'; +/** + * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} + */ +exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; +class AuthClient extends events_1.EventEmitter { + constructor(opts = {}) { + var _a, _b, _c, _d, _e; + super(); + this.credentials = {}; + this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS; + this.forceRefreshOnFailure = false; + this.universeDomain = exports.DEFAULT_UNIVERSE; + const options = (0, util_1.originalOrCamelOptions)(opts); + // Shared auth options + this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null; + this.quotaProjectId = options.get('quota_project_id'); + this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {}; + this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE; + // Shared client options + this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter(); + if (opts.transporterOptions) { + this.transporter.defaults = opts.transporterOptions; + } + if (opts.eagerRefreshThresholdMillis) { + this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false; + } + /** + * Sets the auth credentials. + */ + setCredentials(credentials) { + this.credentials = credentials; + } + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + addSharedMetadataHeaders(headers) { + // quota_project_id, stored in application_default_credentials.json, is set in + // the x-goog-user-project header, to indicate an alternate account for + // billing and quota: + if (!headers['x-goog-user-project'] && // don't override a value the user sets. + this.quotaProjectId) { + headers['x-goog-user-project'] = this.quotaProjectId; + } + return headers; + } +} +exports.AuthClient = AuthClient; /***/ }), -/***/ 5771: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var punycode = __nccwpck_require__(5477); -var entities = __nccwpck_require__(2077); +/***/ 1569: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -module.exports = decode; +"use strict"; -function decode (str) { - if (typeof str !== 'string') { - throw new TypeError('Expected a String'); +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsClient = void 0; +const awsrequestsigner_1 = __nccwpck_require__(1754); +const baseexternalclient_1 = __nccwpck_require__(7391); +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + this.environmentId = options.credential_source.environment_id; + // This is only required if the AWS region is not available in the + // AWS_REGION or AWS_DEFAULT_REGION environment variables. + this.regionUrl = options.credential_source.region_url; + // This is only required if AWS security credentials are not available in + // environment variables. + this.securityCredentialsUrl = options.credential_source.url; + this.regionalCredVerificationUrl = + options.credential_source.regional_cred_verification_url; + this.imdsV2SessionTokenUrl = + options.credential_source.imdsv2_session_token_url; + this.awsRequestSigner = null; + this.region = ''; + this.credentialSourceType = 'aws'; + // Data validators. + this.validateEnvironmentId(); } - - return str.replace(/&(#?[^;\W]+;?)/g, function (_, match) { - var m; - if (m = /^#(\d+);?$/.exec(match)) { - return punycode.ucs2.encode([ parseInt(m[1], 10) ]); - } else if (m = /^#[Xx]([A-Fa-f0-9]+);?/.exec(match)) { - return punycode.ucs2.encode([ parseInt(m[1], 16) ]); - } else { - // named entity - var hasSemi = /;$/.test(match); - var withoutSemi = hasSemi ? match.replace(/;$/, '') : match; - var target = entities[withoutSemi] || (hasSemi && entities[match]); - - if (typeof target === 'number') { - return punycode.ucs2.encode([ target ]); - } else if (typeof target === 'string') { - return target; - } else { - return '&' + match; + validateEnvironmentId() { + var _a; + const match = (_a = this.environmentId) === null || _a === void 0 ? void 0 : _a.match(/^(aws)(\d+)$/); + if (!match || !this.regionalCredVerificationUrl) { + throw new Error('No valid AWS "credential_source" provided'); + } + else if (parseInt(match[2], 10) !== 1) { + throw new Error(`aws version "${match[2]}" is not supported in the current build.`); + } + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this uses a serialized AWS signed request to the STS GetCallerIdentity + * endpoint. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Initialize AWS request signer if not already initialized. + if (!this.awsRequestSigner) { + const metadataHeaders = {}; + // Only retrieve the IMDSv2 session token if both the security credentials and region are + // not retrievable through the environment. + // The credential config contains all the URLs by default but clients may be running this + // where the metadata server is not available and returning the credentials through the environment. + // Removing this check may break them. + if (this.shouldUseMetadataServer() && this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await this.getImdsV2SessionToken(); } + this.region = await this.getAwsRegion(metadataHeaders); + this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { + // Check environment variables for permanent credentials first. + // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html + if (this.securityCredentialsFromEnv) { + return this.securityCredentialsFromEnv; + } + // Since the role on a VM can change, we don't need to cache it. + const roleName = await this.getAwsRoleName(metadataHeaders); + // Temporary credentials typically last for several hours. + // Expiration is returned in response. + // Consider future optimization of this logic to cache AWS tokens + // until their natural expiration. + const awsCreds = await this.getAwsSecurityCredentials(roleName, metadataHeaders); + return { + accessKeyId: awsCreds.AccessKeyId, + secretAccessKey: awsCreds.SecretAccessKey, + token: awsCreds.Token, + }; + }, this.region); } - }); + // Generate signed request to AWS STS GetCallerIdentity API. + // Use the required regional endpoint. Otherwise, the request will fail. + const options = await this.awsRequestSigner.getRequestOptions({ + url: this.regionalCredVerificationUrl.replace('{region}', this.region), + method: 'POST', + }); + // The GCP STS endpoint expects the headers to be formatted as: + // [ + // {key: 'x-amz-date', value: '...'}, + // {key: 'Authorization', value: '...'}, + // ... + // ] + // And then serialized as: + // encodeURIComponent(JSON.stringify({ + // url: '...', + // method: 'POST', + // headers: [{key: 'x-amz-date', value: '...'}, ...] + // })) + const reformattedHeader = []; + const extendedHeaders = Object.assign({ + // The full, canonical resource name of the workload identity pool + // provider, with or without the HTTPS prefix. + // Including this header as part of the signature is recommended to + // ensure data integrity. + 'x-goog-cloud-target-resource': this.audience, + }, options.headers); + // Reformat header to GCP STS expected format. + for (const key in extendedHeaders) { + reformattedHeader.push({ + key, + value: extendedHeaders[key], + }); + } + // Serialize the reformatted signed request. + return encodeURIComponent(JSON.stringify({ + url: options.url, + method: options.method, + headers: reformattedHeader, + })); + } + /** + * @return A promise that resolves with the IMDSv2 Session Token. + */ + async getImdsV2SessionToken() { + const opts = { + url: this.imdsV2SessionTokenUrl, + method: 'PUT', + responseType: 'text', + headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, + }; + const response = await this.transporter.request(opts); + return response.data; + } + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the current AWS region. + */ + async getAwsRegion(headers) { + // Priority order for region determination: + // AWS_REGION > AWS_DEFAULT_REGION > metadata server. + if (this.regionFromEnv) { + return this.regionFromEnv; + } + if (!this.regionUrl) { + throw new Error('Unable to determine AWS region due to missing ' + + '"options.credential_source.region_url"'); + } + const opts = { + url: this.regionUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await this.transporter.request(opts); + // Remove last character. For example, if us-east-2b is returned, + // the region would be us-east-2. + return response.data.substr(0, response.data.length - 1); + } + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ + async getAwsRoleName(headers) { + if (!this.securityCredentialsUrl) { + throw new Error('Unable to determine AWS role name due to missing ' + + '"options.credential_source.url"'); + } + const opts = { + url: this.securityCredentialsUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await this.transporter.request(opts); + return response.data; + } + /** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ + async getAwsSecurityCredentials(roleName, headers) { + const response = await this.transporter.request({ + url: `${this.securityCredentialsUrl}/${roleName}`, + responseType: 'json', + headers: headers, + }); + return response.data; + } + shouldUseMetadataServer() { + // The metadata server must be used when either the AWS region or AWS security + // credentials cannot be retrieved through their defined environment variables. + return !this.regionFromEnv || !this.securityCredentialsFromEnv; + } + get regionFromEnv() { + // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. + // Only one is required. + return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); + } + get securityCredentialsFromEnv() { + // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. + if (process.env['AWS_ACCESS_KEY_ID'] && + process.env['AWS_SECRET_ACCESS_KEY']) { + return { + accessKeyId: process.env['AWS_ACCESS_KEY_ID'], + secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], + token: process.env['AWS_SESSION_TOKEN'], + }; + } + return null; + } } +exports.AwsClient = AwsClient; +AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; +AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; /***/ }), -/***/ 6521: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1754: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var punycode = __nccwpck_require__(5477); -var revEntities = __nccwpck_require__(3123); +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsRequestSigner = void 0; +const crypto_1 = __nccwpck_require__(8043); +/** AWS Signature Version 4 signing algorithm identifier. */ +const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; +/** + * The termination string for the AWS credential scope value as defined in + * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + */ +const AWS_REQUEST_TYPE = 'aws4_request'; +/** + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + */ +class AwsRequestSigner { + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials, region) { + this.getCredentials = getCredentials; + this.region = region; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + async getRequestOptions(amzOptions) { + if (!amzOptions.url) { + throw new Error('"url" is required in "amzOptions"'); + } + // Stringify JSON requests. This will be set in the request body of the + // generated signed request. + const requestPayloadData = typeof amzOptions.data === 'object' + ? JSON.stringify(amzOptions.data) + : amzOptions.data; + const url = amzOptions.url; + const method = amzOptions.method || 'GET'; + const requestPayload = amzOptions.body || requestPayloadData; + const additionalAmzHeaders = amzOptions.headers; + const awsSecurityCredentials = await this.getCredentials(); + const uri = new URL(url); + const headerMap = await generateAuthenticationHeaderMap({ + crypto: this.crypto, + host: uri.host, + canonicalUri: uri.pathname, + canonicalQuerystring: uri.search.substr(1), + method, + region: this.region, + securityCredentials: awsSecurityCredentials, + requestPayload, + additionalAmzHeaders, + }); + // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. + const headers = Object.assign( + // Add x-amz-date if available. + headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { + Authorization: headerMap.authorizationHeader, + host: uri.host, + }, additionalAmzHeaders || {}); + if (awsSecurityCredentials.token) { + Object.assign(headers, { + 'x-amz-security-token': awsSecurityCredentials.token, + }); + } + const awsSignedReq = { + url, + method: method, + headers, + }; + if (typeof requestPayload !== 'undefined') { + awsSignedReq.body = requestPayload; + } + return awsSignedReq; + } +} +exports.AwsRequestSigner = AwsRequestSigner; +/** + * Creates the HMAC-SHA256 hash of the provided message using the + * provided key. + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The HMAC-SHA256 key to use. + * @param msg The message to hash. + * @return The computed hash bytes. + */ +async function sign(crypto, key, msg) { + return await crypto.signWithHmacSha256(key, msg); +} +/** + * Calculates the signing key used to calculate the signature for + * AWS Signature Version 4 based on: + * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The AWS secret access key. + * @param dateStamp The '%Y%m%d' date format. + * @param region The AWS region. + * @param serviceName The AWS service name, eg. sts. + * @return The signing key bytes. + */ +async function getSigningKey(crypto, key, dateStamp, region, serviceName) { + const kDate = await sign(crypto, `AWS4${key}`, dateStamp); + const kRegion = await sign(crypto, kDate, region); + const kService = await sign(crypto, kRegion, serviceName); + const kSigning = await sign(crypto, kService, 'aws4_request'); + return kSigning; +} +/** + * Generates the authentication header map needed for generating the AWS + * Signature Version 4 signed request. + * + * @param option The options needed to compute the authentication header map. + * @return The AWS authentication header map which constitutes of the following + * components: amz-date, authorization header and canonical query string. + */ +async function generateAuthenticationHeaderMap(options) { + const additionalAmzHeaders = options.additionalAmzHeaders || {}; + const requestPayload = options.requestPayload || ''; + // iam.amazonaws.com host => iam service. + // sts.us-east-2.amazonaws.com => sts service. + const serviceName = options.host.split('.')[0]; + const now = new Date(); + // Format: '%Y%m%dT%H%M%SZ'. + const amzDate = now + .toISOString() + .replace(/[-:]/g, '') + .replace(/\.[0-9]+/, ''); + // Format: '%Y%m%d'. + const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); + // Change all additional headers to be lower case. + const reformattedAdditionalAmzHeaders = {}; + Object.keys(additionalAmzHeaders).forEach(key => { + reformattedAdditionalAmzHeaders[key.toLowerCase()] = + additionalAmzHeaders[key]; + }); + // Add AWS token if available. + if (options.securityCredentials.token) { + reformattedAdditionalAmzHeaders['x-amz-security-token'] = + options.securityCredentials.token; + } + // Header keys need to be sorted alphabetically. + const amzHeaders = Object.assign({ + host: options.host, + }, + // Previously the date was not fixed with x-amz- and could be provided manually. + // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req + reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); + let canonicalHeaders = ''; + const signedHeadersList = Object.keys(amzHeaders).sort(); + signedHeadersList.forEach(key => { + canonicalHeaders += `${key}:${amzHeaders[key]}\n`; + }); + const signedHeaders = signedHeadersList.join(';'); + const payloadHash = await options.crypto.sha256DigestHex(requestPayload); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html + const canonicalRequest = `${options.method}\n` + + `${options.canonicalUri}\n` + + `${options.canonicalQuerystring}\n` + + `${canonicalHeaders}\n` + + `${signedHeaders}\n` + + `${payloadHash}`; + const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + const stringToSign = `${AWS_ALGORITHM}\n` + + `${amzDate}\n` + + `${credentialScope}\n` + + (await options.crypto.sha256DigestHex(canonicalRequest)); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); + const signature = await sign(options.crypto, signingKey, stringToSign); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html + const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + + `${credentialScope}, SignedHeaders=${signedHeaders}, ` + + `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; + return { + // Do not return x-amz-date if date is available. + amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, + authorizationHeader, + canonicalQuerystring: options.canonicalQuerystring, + }; +} -module.exports = encode; -function encode (str, opts) { - if (typeof str !== 'string') { - throw new TypeError('Expected a String'); - } - if (!opts) opts = {}; +/***/ }), - var numeric = true; - if (opts.named) numeric = false; - if (opts.numeric !== undefined) numeric = opts.numeric; +/***/ 7391: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - var special = opts.special || { - '"': true, "'": true, - '<': true, '>': true, - '&': true - }; +"use strict"; - var codePoints = punycode.ucs2.decode(str); - var chars = []; - for (var i = 0; i < codePoints.length; i++) { - var cc = codePoints[i]; - var c = punycode.ucs2.encode([ cc ]); - var e = revEntities[cc]; - if (e && (cc >= 127 || special[c]) && !numeric) { - chars.push('&' + (/;$/.test(e) ? e : e + ';')); +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +const stream = __nccwpck_require__(2781); +const authclient_1 = __nccwpck_require__(4627); +const sts = __nccwpck_require__(6308); +const util_1 = __nccwpck_require__(8905); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** The default OAuth scope to request when none is provided. */ +const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; +/** Default impersonated token lifespan in seconds.*/ +const DEFAULT_TOKEN_LIFESPAN = 3600; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; +/** + * Cloud resource manager URL used to retrieve project information. + * + * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead + **/ +exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; +/** The workforce audience pattern. */ +const WORKFORCE_AUDIENCE_PATTERN = '//iam\\.googleapis\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(1402); +/** + * For backwards compatibility. + */ +var authclient_2 = __nccwpck_require__(4627); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } })); +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +class BaseExternalAccountClient extends authclient_1.AuthClient { + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super({ ...options, ...additionalOptions }); + const opts = (0, util_1.originalOrCamelOptions)(options); + if (opts.get('type') !== exports.EXTERNAL_ACCOUNT_TYPE) { + throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + + `received "${options.type}"`); } - else if (cc < 32 || cc >= 127 || special[c]) { - chars.push('&#' + cc + ';'); + const clientId = opts.get('client_id'); + const clientSecret = opts.get('client_secret'); + const tokenUrl = opts.get('token_url'); + const subjectTokenType = opts.get('subject_token_type'); + const workforcePoolUserProject = opts.get('workforce_pool_user_project'); + const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url'); + const serviceAccountImpersonation = opts.get('service_account_impersonation'); + const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds'); + this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') || + `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`); + if (clientId) { + this.clientAuth = { + confidentialClientType: 'basic', + clientId, + clientSecret, + }; + } + this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth); + this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE]; + this.cachedAccessToken = null; + this.audience = opts.get('audience'); + this.subjectTokenType = subjectTokenType; + this.workforcePoolUserProject = workforcePoolUserProject; + const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); + if (this.workforcePoolUserProject && + !this.audience.match(workforceAudiencePattern)) { + throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + + 'credentials.'); + } + this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl; + this.serviceAccountImpersonationLifetime = + serviceAccountImpersonationLifetime; + if (this.serviceAccountImpersonationLifetime) { + this.configLifetimeRequested = true; } else { - chars.push(c); + this.configLifetimeRequested = false; + this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN; } + this.projectNumber = this.getProjectNumber(this.audience); + } + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail() { + var _a; + if (this.serviceAccountImpersonationUrl) { + if (this.serviceAccountImpersonationUrl.length > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84} + **/ + throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`); + } + // Parse email from URL. The formal looks as follows: + // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken + const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; + const result = re.exec(this.serviceAccountImpersonationUrl); + return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + } + return null; + } + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + super.setCredentials(credentials); + this.cachedAccessToken = credentials; + } + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + async getProjectId() { + const projectNumber = this.projectNumber || this.workforcePoolUserProject; + if (this.projectId) { + // Return previously determined project ID. + return this.projectId; + } + else if (projectNumber) { + // Preferable not to use request() to avoid retrial policies. + const headers = await this.getRequestHeaders(); + const response = await this.transporter.request({ + headers, + url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`, + responseType: 'json', + }); + this.projectId = response.data.projectId; + return this.projectId; + } + return null; + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + async refreshAccessTokenAsync() { + // Retrieve the external credential. + const subjectToken = await this.retrieveSubjectToken(); + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + audience: this.audience, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken, + subjectTokenType: this.subjectTokenType, + // generateAccessToken requires the provided access token to have + // scopes: + // https://www.googleapis.com/auth/iam or + // https://www.googleapis.com/auth/cloud-platform + // The new service account access token scopes will match the user + // provided ones. + scope: this.serviceAccountImpersonationUrl + ? [DEFAULT_OAUTH_SCOPE] + : this.getScopesArray(), + }; + // Exchange the external credentials for a GCP access token. + // Client auth is prioritized over passing the workforcePoolUserProject + // parameter for STS token exchange. + const additionalOptions = !this.clientAuth && this.workforcePoolUserProject + ? { userProject: this.workforcePoolUserProject } + : undefined; + const additionalHeaders = { + 'x-goog-api-client': this.getMetricsHeaderValue(), + }; + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions); + if (this.serviceAccountImpersonationUrl) { + this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + } + else if (stsResponse.expires_in) { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, + res: stsResponse.res, + }; + } + else { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + res: stsResponse.res, + }; + } + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedAccessToken.expiry_date, + access_token: this.cachedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedAccessToken; + } + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + getProjectNumber(audience) { + // STS audience pattern: + // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... + const match = audience.match(/\/projects\/([^/]+)/); + if (!match) { + return null; + } + return match[1]; + } + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + async getImpersonatedAccessToken(token) { + const opts = { + url: this.serviceAccountImpersonationUrl, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + data: { + scope: this.getScopesArray(), + lifetime: this.serviceAccountImpersonationLifetime + 's', + }, + responseType: 'json', + }; + const response = await this.transporter.request(opts); + const successResponse = response.data; + return { + access_token: successResponse.accessToken, + // Convert from ISO format to timestamp. + expiry_date: new Date(successResponse.expireTime).getTime(), + res: response, + }; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(accessToken) { + const now = new Date().getTime(); + return accessToken.expiry_date + ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } + /** + * @return The list of scopes for the requested GCP access token. + */ + getScopesArray() { + // Since scopes can be provided as string or array, the type should + // be normalized. + if (typeof this.scopes === 'string') { + return [this.scopes]; + } + return this.scopes || [DEFAULT_OAUTH_SCOPE]; + } + getMetricsHeaderValue() { + const nodeVersion = process.version.replace(/^v/, ''); + const saImpersonation = this.serviceAccountImpersonationUrl !== undefined; + const credentialSourceType = this.credentialSourceType + ? this.credentialSourceType + : 'unknown'; + return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`; } - return chars.join(''); } +exports.BaseExternalAccountClient = BaseExternalAccountClient; /***/ }), -/***/ 1151: +/***/ 6875: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -exports.encode = __nccwpck_require__(6521); -exports.decode = __nccwpck_require__(5771); +"use strict"; + +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Compute = void 0; +const gaxios_1 = __nccwpck_require__(9555); +const gcpMetadata = __nccwpck_require__(3563); +const oauth2client_1 = __nccwpck_require__(3936); +class Compute extends oauth2client_1.OAuth2Client { + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications + */ + constructor(options = {}) { + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; + this.serviceAccountEmail = options.serviceAccountEmail || 'default'; + this.scopes = Array.isArray(options.scopes) + ? options.scopes + : options.scopes + ? [options.scopes] + : []; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; + let data; + try { + const instanceOptions = { + property: tokenPath, + }; + if (this.scopes.length > 0) { + instanceOptions.params = { + scopes: this.scopes.join(','), + }; + } + data = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError) { + e.message = `Could not refresh access token: ${e.message}`; + this.wrapError(e); + } + throw e; + } + const tokens = data; + if (data && data.expires_in) { + tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res: null }; + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + + `?format=full&audience=${targetAudience}`; + let idToken; + try { + const instanceOptions = { + property: idTokenPath, + }; + idToken = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof Error) { + e.message = `Could not fetch ID token: ${e.message}`; + } + throw e; + } + return idToken; + } + wrapError(e) { + const res = e.response; + if (res && res.status) { + e.status = res.status; + if (res.status === 403) { + e.message = + 'A Forbidden error was returned while attempting to retrieve an access ' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have the correct permission scopes specified: ' + + e.message; + } + else if (res.status === 404) { + e.message = + 'A Not Found error was returned while attempting to retrieve an access' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have any permission scopes specified: ' + + e.message; + } + } + } +} +exports.Compute = Compute; /***/ }), -/***/ 4697: -/***/ ((module, exports) => { +/***/ 6270: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/** - * @author Toru Nagashima - * @copyright 2015 Toru Nagashima. All rights reserved. - * See LICENSE file in root directory for full license. - */ - +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); - -/** - * @typedef {object} PrivateData - * @property {EventTarget} eventTarget The event target. - * @property {{type:string}} event The original event object. - * @property {number} eventPhase The current event phase. - * @property {EventTarget|null} currentTarget The current event target. - * @property {boolean} canceled The flag to prevent default. - * @property {boolean} stopped The flag to stop propagation. - * @property {boolean} immediateStopped The flag to stop propagation immediately. - * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. - * @property {number} timeStamp The unix time. - * @private - */ - +exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; +const stream = __nccwpck_require__(2781); +const authclient_1 = __nccwpck_require__(4627); +const sts = __nccwpck_require__(6308); /** - * Private data for event wrappers. - * @type {WeakMap} - * @private + * The required token exchange grant_type: rfc8693#section-2.1 */ -const privateData = new WeakMap(); - +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; /** - * Cache for wrapper classes. - * @type {WeakMap} - * @private + * The requested token exchange requested_token_type: rfc8693#section-2.1 */ -const wrappers = new WeakMap(); - +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; /** - * Get private data. - * @param {Event} event The event object to get private data. - * @returns {PrivateData} The private data of the event. - * @private + * The requested token exchange subject_token_type: rfc8693#section-2.1 */ -function pd(event) { - const retv = privateData.get(event); - console.assert( - retv != null, - "'this' is expected an Event object, but got", - event - ); - return retv -} - +const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; /** - * https://dom.spec.whatwg.org/#set-the-canceled-flag - * @param data {PrivateData} private data. + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. */ -function setCancelFlag(data) { - if (data.passiveListener != null) { - if ( - typeof console !== "undefined" && - typeof console.error === "function" - ) { - console.error( - "Unable to preventDefault inside passive event listener invocation.", - data.passiveListener - ); - } - return - } - if (!data.event.cancelable) { - return - } - - data.canceled = true; - if (typeof data.event.preventDefault === "function") { - data.event.preventDefault(); - } -} - +exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; /** - * @see https://dom.spec.whatwg.org/#interface-event - * @private + * Offset to take into account network delays and server clock skews. */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; /** - * The event wrapper. - * @constructor - * @param {EventTarget} eventTarget The event target of this dispatching. - * @param {Event|{type:string}} event The original event to wrap. + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. */ -function Event(eventTarget, event) { - privateData.set(this, { - eventTarget, - event, - eventPhase: 2, - currentTarget: eventTarget, - canceled: false, - stopped: false, - immediateStopped: false, - passiveListener: null, - timeStamp: event.timeStamp || Date.now(), - }); - - // https://heycam.github.io/webidl/#Unforgeable - Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); - - // Define accessors - const keys = Object.keys(event); - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - if (!(key in this)) { - Object.defineProperty(this, key, defineRedirectDescriptor(key)); - } - } -} - -// Should be enumerable, but class methods are not enumerable. -Event.prototype = { - /** - * The type of this event. - * @type {string} - */ - get type() { - return pd(this).event.type - }, - - /** - * The target of this event. - * @type {EventTarget} - */ - get target() { - return pd(this).eventTarget - }, - - /** - * The target of this event. - * @type {EventTarget} - */ - get currentTarget() { - return pd(this).currentTarget - }, - +class DownscopedClient extends authclient_1.AuthClient { /** - * @returns {EventTarget[]} The composed path of this event. + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** + * Optional additional behavior customization options. + * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** + * Optional quota project id for setting up in the x-goog-user-project header. */ - composedPath() { - const currentTarget = pd(this).currentTarget; - if (currentTarget == null) { - return [] + constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { + super({ ...additionalOptions, quotaProjectId }); + this.authClient = authClient; + this.credentialAccessBoundary = credentialAccessBoundary; + // Check 1-10 Access Boundary Rules are defined within Credential Access + // Boundary. + if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { + throw new Error('At least one access boundary rule needs to be defined.'); } - return [currentTarget] - }, - - /** - * Constant of NONE. - * @type {number} - */ - get NONE() { - return 0 - }, - - /** - * Constant of CAPTURING_PHASE. - * @type {number} - */ - get CAPTURING_PHASE() { - return 1 - }, - - /** - * Constant of AT_TARGET. - * @type {number} - */ - get AT_TARGET() { - return 2 - }, - - /** - * Constant of BUBBLING_PHASE. - * @type {number} - */ - get BUBBLING_PHASE() { - return 3 - }, - - /** - * The target of this event. - * @type {number} - */ - get eventPhase() { - return pd(this).eventPhase - }, - - /** - * Stop event bubbling. - * @returns {void} - */ - stopPropagation() { - const data = pd(this); - - data.stopped = true; - if (typeof data.event.stopPropagation === "function") { - data.event.stopPropagation(); + else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > + exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { + throw new Error('The provided access boundary has more than ' + + `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); } - }, - - /** - * Stop event bubbling. - * @returns {void} - */ - stopImmediatePropagation() { - const data = pd(this); - - data.stopped = true; - data.immediateStopped = true; - if (typeof data.event.stopImmediatePropagation === "function") { - data.event.stopImmediatePropagation(); + // Check at least one permission should be defined in each Access Boundary + // Rule. + for (const rule of credentialAccessBoundary.accessBoundary + .accessBoundaryRules) { + if (rule.availablePermissions.length === 0) { + throw new Error('At least one permission should be defined in access boundary rules.'); + } } - }, - - /** - * The flag to be bubbling. - * @type {boolean} - */ - get bubbles() { - return Boolean(pd(this).event.bubbles) - }, - - /** - * The flag to be cancelable. - * @type {boolean} - */ - get cancelable() { - return Boolean(pd(this).event.cancelable) - }, - - /** - * Cancel this event. - * @returns {void} - */ - preventDefault() { - setCancelFlag(pd(this)); - }, - + this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`); + this.cachedDownscopedAccessToken = null; + } /** - * The flag to indicate cancellation state. - * @type {boolean} + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. */ - get defaultPrevented() { - return pd(this).canceled - }, - + setCredentials(credentials) { + if (!credentials.expiry_date) { + throw new Error('The access token expiry_date field is missing in the provided ' + + 'credentials.'); + } + super.setCredentials(credentials); + this.cachedDownscopedAccessToken = credentials; + } + async getAccessToken() { + // If the cached access token is unavailable or expired, force refresh. + // The Downscoped access token will be returned in + // DownscopedAccessTokenResponse format. + if (!this.cachedDownscopedAccessToken || + this.isExpired(this.cachedDownscopedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return Downscoped access token in DownscopedAccessTokenResponse format. + return { + token: this.cachedDownscopedAccessToken.access_token, + expirationTime: this.cachedDownscopedAccessToken.expiry_date, + res: this.cachedDownscopedAccessToken.res, + }; + } /** - * The flag to be composed. - * @type {boolean} + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } */ - get composed() { - return Boolean(pd(this).event.composed) - }, - + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } /** - * The unix time of this event. - * @type {number} + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. */ - get timeStamp() { - return pd(this).timeStamp - }, - + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } /** - * The target of this event. - * @type {EventTarget} - * @deprecated + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. */ - get srcElement() { - return pd(this).eventTarget - }, - + async refreshAccessTokenAsync() { + var _a; + // Retrieve GCP access token from source credential. + const subjectToken = (await this.authClient.getAccessToken()).token; + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken: subjectToken, + subjectTokenType: STS_SUBJECT_TOKEN_TYPE, + }; + // Exchange the source AuthClient access token for a Downscoped access + // token. + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); + /** + * The STS endpoint will only return the expiration time for the downscoped + * access token if the original access token represents a service account. + * The downscoped token's expiration time will always match the source + * credential expiration. When no expires_in is returned, we can copy the + * source credential's expiration time. + */ + const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; + const expiryDate = stsResponse.expires_in + ? new Date().getTime() + stsResponse.expires_in * 1000 + : sourceCredExpireDate; + // Save response in cached access token. + this.cachedDownscopedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: expiryDate, + res: stsResponse.res, + }; + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedDownscopedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedDownscopedAccessToken.expiry_date, + access_token: this.cachedDownscopedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedDownscopedAccessToken; + } /** - * The flag to stop event bubbling. - * @type {boolean} - * @deprecated + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. */ - get cancelBubble() { - return pd(this).stopped - }, - set cancelBubble(value) { - if (!value) { - return - } - const data = pd(this); - - data.stopped = true; - if (typeof data.event.cancelBubble === "boolean") { - data.event.cancelBubble = true; - } - }, + isExpired(downscopedAccessToken) { + const now = new Date().getTime(); + return downscopedAccessToken.expiry_date + ? now >= + downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.DownscopedClient = DownscopedClient; - /** - * The flag to indicate cancellation state. - * @type {boolean} - * @deprecated - */ - get returnValue() { - return !pd(this).canceled - }, - set returnValue(value) { - if (!value) { - setCancelFlag(pd(this)); - } - }, - /** - * Initialize this event object. But do nothing under event dispatching. - * @param {string} type The event type. - * @param {boolean} [bubbles=false] The flag to be possible to bubble up. - * @param {boolean} [cancelable=false] The flag to be possible to cancel. - * @deprecated - */ - initEvent() { - // Do nothing. - }, -}; +/***/ }), -// `constructor` is not enumerable. -Object.defineProperty(Event.prototype, "constructor", { - value: Event, - configurable: true, - writable: true, -}); +/***/ 1380: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// Ensure `event instanceof window.Event` is `true`. -if (typeof window !== "undefined" && typeof window.Event !== "undefined") { - Object.setPrototypeOf(Event.prototype, window.Event.prototype); +"use strict"; - // Make association for wrappers. - wrappers.set(window.Event.prototype, Event); +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEnv = exports.clear = exports.GCPEnv = void 0; +const gcpMetadata = __nccwpck_require__(3563); +var GCPEnv; +(function (GCPEnv) { + GCPEnv["APP_ENGINE"] = "APP_ENGINE"; + GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; + GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; + GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; + GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; + GCPEnv["NONE"] = "NONE"; +})(GCPEnv || (exports.GCPEnv = GCPEnv = {})); +let envPromise; +function clear() { + envPromise = undefined; } - -/** - * Get the property descriptor to redirect a given property. - * @param {string} key Property name to define property descriptor. - * @returns {PropertyDescriptor} The property descriptor to redirect the property. - * @private - */ -function defineRedirectDescriptor(key) { - return { - get() { - return pd(this).event[key] - }, - set(value) { - pd(this).event[key] = value; - }, - configurable: true, - enumerable: true, +exports.clear = clear; +async function getEnv() { + if (envPromise) { + return envPromise; } + envPromise = getEnvMemoized(); + return envPromise; } - -/** - * Get the property descriptor to call a given method property. - * @param {string} key Property name to define property descriptor. - * @returns {PropertyDescriptor} The property descriptor to call the method property. - * @private - */ -function defineCallDescriptor(key) { - return { - value() { - const event = pd(this).event; - return event[key].apply(event, arguments) - }, - configurable: true, - enumerable: true, +exports.getEnv = getEnv; +async function getEnvMemoized() { + let env = GCPEnv.NONE; + if (isAppEngine()) { + env = GCPEnv.APP_ENGINE; } + else if (isCloudFunction()) { + env = GCPEnv.CLOUD_FUNCTIONS; + } + else if (await isComputeEngine()) { + if (await isKubernetesEngine()) { + env = GCPEnv.KUBERNETES_ENGINE; + } + else if (isCloudRun()) { + env = GCPEnv.CLOUD_RUN; + } + else { + env = GCPEnv.COMPUTE_ENGINE; + } + } + else { + env = GCPEnv.NONE; + } + return env; +} +function isAppEngine() { + return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); +} +function isCloudFunction() { + return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); } - /** - * Define new wrapper class. - * @param {Function} BaseEvent The base wrapper class. - * @param {Object} proto The prototype of the original event. - * @returns {Function} The defined wrapper class. - * @private + * This check only verifies that the environment is running knative. + * This must be run *after* checking for Kubernetes, otherwise it will + * return a false positive. */ -function defineWrapper(BaseEvent, proto) { - const keys = Object.keys(proto); - if (keys.length === 0) { - return BaseEvent +function isCloudRun() { + return !!process.env.K_CONFIGURATION; +} +async function isKubernetesEngine() { + try { + await gcpMetadata.instance('attributes/cluster-name'); + return true; } - - /** CustomEvent */ - function CustomEvent(eventTarget, event) { - BaseEvent.call(this, eventTarget, event); + catch (e) { + return false; } +} +async function isComputeEngine() { + return gcpMetadata.isAvailable(); +} - CustomEvent.prototype = Object.create(BaseEvent.prototype, { - constructor: { value: CustomEvent, configurable: true, writable: true }, - }); - // Define accessors. - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - if (!(key in BaseEvent.prototype)) { - const descriptor = Object.getOwnPropertyDescriptor(proto, key); - const isFunc = typeof descriptor.value === "function"; - Object.defineProperty( - CustomEvent.prototype, - key, - isFunc - ? defineCallDescriptor(key) - : defineRedirectDescriptor(key) - ); - } - } +/***/ }), - return CustomEvent -} +/***/ 8749: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; +const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; +const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; +const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; /** - * Get the wrapper class of a given prototype. - * @param {Object} proto The prototype of the original event to get its wrapper. - * @returns {Function} The wrapper class. - * @private + * Defines the response of a 3rd party executable run by the pluggable auth client. */ -function getWrapper(proto) { - if (proto == null || proto === Object.prototype) { - return Event +class ExecutableResponse { + /** + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. + */ + constructor(responseJson) { + // Check that the required fields exist in the json response. + if (!responseJson.version) { + throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + } + if (responseJson.success === undefined) { + throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + } + this.version = responseJson.version; + this.success = responseJson.success; + // Validate required fields for a successful response. + if (this.success) { + this.expirationTime = responseJson.expiration_time; + this.tokenType = responseJson.token_type; + // Validate token type field. + if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { + throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + + `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + } + // Validate subject token. + if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { + if (!responseJson.saml_response) { + throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); + } + this.subjectToken = responseJson.saml_response; + } + else { + if (!responseJson.id_token) { + throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + + `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); + } + this.subjectToken = responseJson.id_token; + } + } + else { + // Both code and message must be provided for unsuccessful responses. + if (!responseJson.code) { + throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); + } + if (!responseJson.message) { + throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); + } + this.errorCode = responseJson.code; + this.errorMessage = responseJson.message; + } } - - let wrapper = wrappers.get(proto); - if (wrapper == null) { - wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); - wrappers.set(proto, wrapper); + /** + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. + */ + isValid() { + return !this.isExpired() && this.success; + } + /** + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. + */ + isExpired() { + return (this.expirationTime !== undefined && + this.expirationTime < Math.round(Date.now() / 1000)); } - return wrapper -} - -/** - * Wrap a given event to management a dispatching. - * @param {EventTarget} eventTarget The event target of this dispatching. - * @param {Object} event The event to wrap. - * @returns {Event} The wrapper instance. - * @private - */ -function wrapEvent(eventTarget, event) { - const Wrapper = getWrapper(Object.getPrototypeOf(event)); - return new Wrapper(eventTarget, event) } - +exports.ExecutableResponse = ExecutableResponse; /** - * Get the immediateStopped flag of a given event. - * @param {Event} event The event to get. - * @returns {boolean} The flag to stop propagation immediately. - * @private + * An error thrown by the ExecutableResponse class. */ -function isStopped(event) { - return pd(event).immediateStopped +class ExecutableResponseError extends Error { + constructor(message) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } } - +exports.ExecutableResponseError = ExecutableResponseError; /** - * Set the current event phase of a given event. - * @param {Event} event The event to set current target. - * @param {number} eventPhase New event phase. - * @returns {void} - * @private + * An error thrown when the 'version' field in an executable response is missing or invalid. */ -function setEventPhase(event, eventPhase) { - pd(event).eventPhase = eventPhase; +class InvalidVersionFieldError extends ExecutableResponseError { } - +exports.InvalidVersionFieldError = InvalidVersionFieldError; /** - * Set the current target of a given event. - * @param {Event} event The event to set current target. - * @param {EventTarget|null} currentTarget New current target. - * @returns {void} - * @private + * An error thrown when the 'success' field in an executable response is missing or invalid. */ -function setCurrentTarget(event, currentTarget) { - pd(event).currentTarget = currentTarget; +class InvalidSuccessFieldError extends ExecutableResponseError { } - +exports.InvalidSuccessFieldError = InvalidSuccessFieldError; /** - * Set a passive listener of a given event. - * @param {Event} event The event to set current target. - * @param {Function|null} passiveListener New passive listener. - * @returns {void} - * @private + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. */ -function setPassiveListener(event, passiveListener) { - pd(event).passiveListener = passiveListener; +class InvalidExpirationTimeFieldError extends ExecutableResponseError { } - -/** - * @typedef {object} ListenerNode - * @property {Function} listener - * @property {1|2|3} listenerType - * @property {boolean} passive - * @property {boolean} once - * @property {ListenerNode|null} next - * @private - */ - -/** - * @type {WeakMap>} - * @private - */ -const listenersMap = new WeakMap(); - -// Listener types -const CAPTURE = 1; -const BUBBLE = 2; -const ATTRIBUTE = 3; - +exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; /** - * Check whether a given value is an object or not. - * @param {any} x The value to check. - * @returns {boolean} `true` if the value is an object. + * An error thrown when the 'token_type' field in an executable response is missing or invalid. */ -function isObject(x) { - return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +class InvalidTokenTypeFieldError extends ExecutableResponseError { } - +exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; /** - * Get listeners. - * @param {EventTarget} eventTarget The event target to get. - * @returns {Map} The listeners. - * @private + * An error thrown when the 'code' field in an executable response is missing or invalid. */ -function getListeners(eventTarget) { - const listeners = listenersMap.get(eventTarget); - if (listeners == null) { - throw new TypeError( - "'this' is expected an EventTarget object, but got another value." - ) - } - return listeners +class InvalidCodeFieldError extends ExecutableResponseError { } - +exports.InvalidCodeFieldError = InvalidCodeFieldError; /** - * Get the property descriptor for the event attribute of a given event. - * @param {string} eventName The event name to get property descriptor. - * @returns {PropertyDescriptor} The property descriptor. - * @private + * An error thrown when the 'message' field in an executable response is missing or invalid. */ -function defineEventAttributeDescriptor(eventName) { - return { - get() { - const listeners = getListeners(this); - let node = listeners.get(eventName); - while (node != null) { - if (node.listenerType === ATTRIBUTE) { - return node.listener - } - node = node.next; - } - return null - }, - - set(listener) { - if (typeof listener !== "function" && !isObject(listener)) { - listener = null; // eslint-disable-line no-param-reassign - } - const listeners = getListeners(this); - - // Traverse to the tail while removing old value. - let prev = null; - let node = listeners.get(eventName); - while (node != null) { - if (node.listenerType === ATTRIBUTE) { - // Remove old value. - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - } else { - prev = node; - } - - node = node.next; - } - - // Add new value. - if (listener !== null) { - const newNode = { - listener, - listenerType: ATTRIBUTE, - passive: false, - once: false, - next: null, - }; - if (prev === null) { - listeners.set(eventName, newNode); - } else { - prev.next = newNode; - } - } - }, - configurable: true, - enumerable: true, - } +class InvalidMessageFieldError extends ExecutableResponseError { } - +exports.InvalidMessageFieldError = InvalidMessageFieldError; /** - * Define an event attribute (e.g. `eventTarget.onclick`). - * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. - * @param {string} eventName The event name to define. - * @returns {void} + * An error thrown when the subject token in an executable response is missing or invalid. */ -function defineEventAttribute(eventTargetPrototype, eventName) { - Object.defineProperty( - eventTargetPrototype, - `on${eventName}`, - defineEventAttributeDescriptor(eventName) - ); +class InvalidSubjectTokenError extends ExecutableResponseError { } +exports.InvalidSubjectTokenError = InvalidSubjectTokenError; -/** - * Define a custom EventTarget with event attributes. - * @param {string[]} eventNames Event names for event attributes. - * @returns {EventTarget} The custom EventTarget. - * @private - */ -function defineCustomEventTarget(eventNames) { - /** CustomEventTarget */ - function CustomEventTarget() { - EventTarget.call(this); - } - CustomEventTarget.prototype = Object.create(EventTarget.prototype, { - constructor: { - value: CustomEventTarget, - configurable: true, - writable: true, - }, - }); +/***/ }), - for (let i = 0; i < eventNames.length; ++i) { - defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); - } +/***/ 8765: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return CustomEventTarget -} +"use strict"; -/** - * EventTarget. - * - * - This is constructor if no arguments. - * - This is a function which returns a CustomEventTarget constructor if there are arguments. - * - * For example: - * - * class A extends EventTarget {} - * class B extends EventTarget("message") {} - * class C extends EventTarget("message", "error") {} - * class D extends EventTarget(["message", "error"]) {} +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; +const authclient_1 = __nccwpck_require__(4627); +const oauth2common_1 = __nccwpck_require__(9510); +const gaxios_1 = __nccwpck_require__(9555); +const stream = __nccwpck_require__(2781); +const baseexternalclient_1 = __nccwpck_require__(7391); +/** + * The credentials JSON file type for external account authorized user clients. */ -function EventTarget() { - /*eslint-disable consistent-return */ - if (this instanceof EventTarget) { - listenersMap.set(this, new Map()); - return - } - if (arguments.length === 1 && Array.isArray(arguments[0])) { - return defineCustomEventTarget(arguments[0]) +exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; +/** + * Handler for token refresh requests sent to the token_url endpoint for external + * authorized user credentials. + */ +class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an ExternalAccountAuthorizedUserHandler instance. + * @param url The URL of the token refresh endpoint. + * @param transporter The transporter to use for the refresh request. + * @param clientAuthentication The client authentication credentials to use + * for the refresh request. + */ + constructor(url, transporter, clientAuthentication) { + super(clientAuthentication); + this.url = url; + this.transporter = transporter; } - if (arguments.length > 0) { - const types = new Array(arguments.length); - for (let i = 0; i < arguments.length; ++i) { - types[i] = arguments[i]; + /** + * Requests a new access token from the token_url endpoint using the provided + * refresh token. + * @param refreshToken The refresh token to use to generate a new access token. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @return A promise that resolves with the token refresh response containing + * the requested access token and its expiration time. + */ + async refreshToken(refreshToken, additionalHeaders) { + const values = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + ...additionalHeaders, + }; + const opts = { + url: this.url, + method: 'POST', + headers, + data: values.toString(), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const tokenRefreshResponse = response.data; + tokenRefreshResponse.res = response; + return tokenRefreshResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; } - return defineCustomEventTarget(types) } - throw new TypeError("Cannot call a class as a function") - /*eslint-enable consistent-return */ } - -// Should be enumerable, but class methods are not enumerable. -EventTarget.prototype = { +/** + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. + */ +class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { /** - * Add a given listener to this event target. - * @param {string} eventName The event name to add. - * @param {Function} listener The listener to add. - * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. - * @returns {void} + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. */ - addEventListener(eventName, listener, options) { - if (listener == null) { - return + constructor(options, additionalOptions) { + super({ ...options, ...additionalOptions }); + this.refreshToken = options.refresh_token; + const clientAuth = { + confidentialClientType: 'basic', + clientId: options.client_id, + clientSecret: options.client_secret, + }; + this.externalAccountAuthorizedUserHandler = + new ExternalAccountAuthorizedUserHandler(options.token_url, this.transporter, clientAuth); + this.cachedAccessToken = null; + this.quotaProjectId = options.quota_project_id; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; } - if (typeof listener !== "function" && !isObject(listener)) { - throw new TypeError("'listener' should be a function or an object.") + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; } - - const listeners = getListeners(this); - const optionsIsObj = isObject(options); - const capture = optionsIsObj - ? Boolean(options.capture) - : Boolean(options); - const listenerType = capture ? CAPTURE : BUBBLE; - const newNode = { - listener, - listenerType, - passive: optionsIsObj && Boolean(options.passive), - once: optionsIsObj && Boolean(options.once), - next: null, + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); + if (options.universe_domain) { + this.universeDomain = options.universe_domain; + } + } + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, }; - - // Set it as the first node if the first node is null. - let node = listeners.get(eventName); - if (node === undefined) { - listeners.set(eventName, newNode); - return + } + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); } - - // Traverse to the tail while checking duplication.. - let prev = null; - while (node != null) { - if ( - node.listener === listener && - node.listenerType === listenerType - ) { - // Should ignore duplication. - return - } - prev = node; - node = node.next; + else { + return this.requestAsync(opts); } - - // Add it. - prev.next = newNode; - }, - + } /** - * Remove a given listener from this event target. - * @param {string} eventName The event name to remove. - * @param {Function} listener The listener to remove. - * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. - * @returns {void} + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. */ - removeEventListener(eventName, listener, options) { - if (listener == null) { - return + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); } - - const listeners = getListeners(this); - const capture = isObject(options) - ? Boolean(options.capture) - : Boolean(options); - const listenerType = capture ? CAPTURE : BUBBLE; - - let prev = null; - let node = listeners.get(eventName); - while (node != null) { - if ( - node.listener === listener && - node.listenerType === listenerType - ) { - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); } - return } - - prev = node; - node = node.next; + throw e; } - }, - + return response; + } /** - * Dispatch a given event. - * @param {Event|{type:string}} event The event to dispatch. - * @returns {boolean} `false` if canceled. + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. */ - dispatchEvent(event) { - if (event == null || typeof event.type !== "string") { - throw new TypeError('"event.type" should be a string.') - } - - // If listeners aren't registered, terminate. - const listeners = getListeners(this); - const eventName = event.type; - let node = listeners.get(eventName); - if (node == null) { - return true - } - - // Since we cannot rewrite several properties, so wrap object. - const wrappedEvent = wrapEvent(this, event); - - // This doesn't process capturing phase and bubbling phase. - // This isn't participating in a tree. - let prev = null; - while (node != null) { - // Remove this listener if it's once - if (node.once) { - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - } else { - prev = node; - } - - // Call this listener - setPassiveListener( - wrappedEvent, - node.passive ? node.listener : null - ); - if (typeof node.listener === "function") { - try { - node.listener.call(this, wrappedEvent); - } catch (err) { - if ( - typeof console !== "undefined" && - typeof console.error === "function" - ) { - console.error(err); - } - } - } else if ( - node.listenerType !== ATTRIBUTE && - typeof node.listener.handleEvent === "function" - ) { - node.listener.handleEvent(wrappedEvent); - } - - // Break if `event.stopImmediatePropagation` was called. - if (isStopped(wrappedEvent)) { - break - } - - node = node.next; + async refreshAccessTokenAsync() { + // Refresh the access token using the refresh token. + const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); + this.cachedAccessToken = { + access_token: refreshResponse.access_token, + expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, + res: refreshResponse.res, + }; + if (refreshResponse.refresh_token !== undefined) { + this.refreshToken = refreshResponse.refresh_token; } - setPassiveListener(wrappedEvent, null); - setEventPhase(wrappedEvent, 0); - setCurrentTarget(wrappedEvent, null); - - return !wrappedEvent.defaultPrevented - }, -}; - -// `constructor` is not enumerable. -Object.defineProperty(EventTarget.prototype, "constructor", { - value: EventTarget, - configurable: true, - writable: true, -}); - -// Ensure `eventTarget instanceof window.EventTarget` is `true`. -if ( - typeof window !== "undefined" && - typeof window.EventTarget !== "undefined" -) { - Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); + return this.cachedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(credentials) { + const now = new Date().getTime(); + return credentials.expiry_date + ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis + : false; + } } - -exports.defineEventAttribute = defineEventAttribute; -exports.EventTarget = EventTarget; -exports["default"] = EventTarget; - -module.exports = EventTarget -module.exports.EventTarget = module.exports["default"] = EventTarget -module.exports.defineEventAttribute = defineEventAttribute -//# sourceMappingURL=event-target-shim.js.map - - -/***/ }), - -/***/ 8171: -/***/ ((module) => { - -"use strict"; - - -var hasOwn = Object.prototype.hasOwnProperty; -var toStr = Object.prototype.toString; -var defineProperty = Object.defineProperty; -var gOPD = Object.getOwnPropertyDescriptor; - -var isArray = function isArray(arr) { - if (typeof Array.isArray === 'function') { - return Array.isArray(arr); - } - - return toStr.call(arr) === '[object Array]'; -}; - -var isPlainObject = function isPlainObject(obj) { - if (!obj || toStr.call(obj) !== '[object Object]') { - return false; - } - - var hasOwnConstructor = hasOwn.call(obj, 'constructor'); - var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); - // Not own constructor property must be Object - if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { - return false; - } - - // Own properties are enumerated firstly, so to speed up, - // if last one is own, then all properties are own. - var key; - for (key in obj) { /**/ } - - return typeof key === 'undefined' || hasOwn.call(obj, key); -}; - -// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target -var setProperty = function setProperty(target, options) { - if (defineProperty && options.name === '__proto__') { - defineProperty(target, options.name, { - enumerable: true, - configurable: true, - value: options.newValue, - writable: true - }); - } else { - target[options.name] = options.newValue; - } -}; - -// Return undefined instead of __proto__ if '__proto__' is not an own property -var getProperty = function getProperty(obj, name) { - if (name === '__proto__') { - if (!hasOwn.call(obj, name)) { - return void 0; - } else if (gOPD) { - // In early versions of node, obj['__proto__'] is buggy when obj has - // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. - return gOPD(obj, name).value; - } - } - - return obj[name]; -}; - -module.exports = function extend() { - var options, name, src, copy, copyIsArray, clone; - var target = arguments[0]; - var i = 1; - var length = arguments.length; - var deep = false; - - // Handle a deep copy situation - if (typeof target === 'boolean') { - deep = target; - target = arguments[1] || {}; - // skip the boolean and the target - i = 2; - } - if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { - target = {}; - } - - for (; i < length; ++i) { - options = arguments[i]; - // Only deal with non-null/undefined values - if (options != null) { - // Extend the base object - for (name in options) { - src = getProperty(target, name); - copy = getProperty(options, name); - - // Prevent never-ending loop - if (target !== copy) { - // Recurse if we're merging plain objects or arrays - if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { - if (copyIsArray) { - copyIsArray = false; - clone = src && isArray(src) ? src : []; - } else { - clone = src && isPlainObject(src) ? src : {}; - } - - // Never move original objects, clone them - setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); - - // Don't bring in undefined values - } else if (typeof copy !== 'undefined') { - setProperty(target, { name: name, newValue: copy }); - } - } - } - } - } - - // Return the modified object - return target; -}; - - -/***/ }), - -/***/ 1917: -/***/ (function() { - -(function(scope) {'use strict'; -function B(r,e){var f;return r instanceof Buffer?f=r:f=Buffer.from(r.buffer,r.byteOffset,r.byteLength),f.toString(e)}var w=function(r){return Buffer.from(r)};function h(r){for(var e=0,f=Math.min(256*256,r.length+1),n=new Uint16Array(f),i=[],o=0;;){var t=e=f-1){var s=n.subarray(0,o),m=s;if(i.push(String.fromCharCode.apply(null,m)),!t)return i.join("");r=r.subarray(e),e=0,o=0}var a=r[e++];if((a&128)===0)n[o++]=a;else if((a&224)===192){var d=r[e++]&63;n[o++]=(a&31)<<6|d}else if((a&240)===224){var d=r[e++]&63,l=r[e++]&63;n[o++]=(a&31)<<12|d<<6|l}else if((a&248)===240){var d=r[e++]&63,l=r[e++]&63,R=r[e++]&63,c=(a&7)<<18|d<<12|l<<6|R;c>65535&&(c-=65536,n[o++]=c>>>10&1023|55296,c=56320|c&1023),n[o++]=c}}}function F(r){for(var e=0,f=r.length,n=0,i=Math.max(32,f+(f>>>1)+7),o=new Uint8Array(i>>>3<<3);e=55296&&t<=56319){if(e=55296&&t<=56319)continue}if(n+4>o.length){i+=8,i*=1+e/r.length*2,i=i>>>3<<3;var m=new Uint8Array(i);m.set(o),o=m}if((t&4294967168)===0){o[n++]=t;continue}else if((t&4294965248)===0)o[n++]=t>>>6&31|192;else if((t&4294901760)===0)o[n++]=t>>>12&15|224,o[n++]=t>>>6&63|128;else if((t&4292870144)===0)o[n++]=t>>>18&7|240,o[n++]=t>>>12&63|128,o[n++]=t>>>6&63|128;else continue;o[n++]=t&63|128}return o.slice?o.slice(0,n):o.subarray(0,n)}var u="Failed to ",p=function(r,e,f){if(r)throw new Error("".concat(u).concat(e,": the '").concat(f,"' option is unsupported."))};var x=typeof Buffer=="function"&&Buffer.from;var A=x?w:F;function v(){this.encoding="utf-8"}v.prototype.encode=function(r,e){return p(e&&e.stream,"encode","stream"),A(r)};function U(r){var e;try{var f=new Blob([r],{type:"text/plain;charset=UTF-8"});e=URL.createObjectURL(f);var n=new XMLHttpRequest;return n.open("GET",e,!1),n.send(),n.responseText}finally{e&&URL.revokeObjectURL(e)}}var O=!x&&typeof Blob=="function"&&typeof URL=="function"&&typeof URL.createObjectURL=="function",S=["utf-8","utf8","unicode-1-1-utf-8"],T=h;x?T=B:O&&(T=function(r){try{return U(r)}catch(e){return h(r)}});var y="construct 'TextDecoder'",E="".concat(u," ").concat(y,": the ");function g(r,e){p(e&&e.fatal,y,"fatal"),r=r||"utf-8";var f;if(x?f=Buffer.isEncoding(r):f=S.indexOf(r.toLowerCase())!==-1,!f)throw new RangeError("".concat(E," encoding label provided ('").concat(r,"') is invalid."));this.encoding=r,this.fatal=!1,this.ignoreBOM=!1}g.prototype.decode=function(r,e){p(e&&e.stream,"decode","stream");var f;return r instanceof Uint8Array?f=r:r.buffer instanceof ArrayBuffer?f=new Uint8Array(r.buffer):f=new Uint8Array(r),T(f,this.encoding)};scope.TextEncoder=scope.TextEncoder||v;scope.TextDecoder=scope.TextDecoder||g; -}(typeof window !== 'undefined' ? window : (typeof global !== 'undefined' ? global : this))); +exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; /***/ }), -/***/ 6129: -/***/ ((__unused_webpack_module, exports) => { +/***/ 4381: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2018 Google LLC +// Copyright 2021 Google LLC +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -25614,945 +19900,1106 @@ function B(r,e){var f;return r instanceof Buffer?f=r:f=Buffer.from(r.buffer,r.by // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GaxiosError = void 0; -/* eslint-disable @typescript-eslint/no-explicit-any */ -class GaxiosError extends Error { - constructor(message, options, response) { - super(message); - this.response = response; - this.config = options; - this.code = response.status.toString(); +exports.ExternalAccountClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(7391); +const identitypoolclient_1 = __nccwpck_require__(117); +const awsclient_1 = __nccwpck_require__(1569); +const pluggable_auth_client_1 = __nccwpck_require__(4782); +/** + * Dummy class with no constructor. Developers are expected to use fromJSON. + */ +class ExternalAccountClient { + constructor() { + throw new Error('ExternalAccountClients should be initialized via: ' + + 'ExternalAccountClient.fromJSON(), ' + + 'directly via explicit constructors, eg. ' + + 'new AwsClient(options), new IdentityPoolClient(options), new' + + 'PluggableAuthClientOptions, or via ' + + 'new GoogleAuth(options).getClient()'); + } + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options, additionalOptions) { + var _a, _b; + if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { + return new awsclient_1.AwsClient(options, additionalOptions); + } + else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { + return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + } + else { + return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + } + } + else { + return null; + } } } -exports.GaxiosError = GaxiosError; -//# sourceMappingURL=common.js.map +exports.ExternalAccountClient = ExternalAccountClient; + /***/ }), -/***/ 8133: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 695: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2018 Google LLC +// Copyright 2019 Google LLC +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Gaxios = void 0; -const extend_1 = __importDefault(__nccwpck_require__(8171)); -const https_1 = __nccwpck_require__(5687); -const node_fetch_1 = __importDefault(__nccwpck_require__(467)); -const querystring_1 = __importDefault(__nccwpck_require__(3477)); -const is_stream_1 = __importDefault(__nccwpck_require__(1554)); -const url_1 = __nccwpck_require__(7310); -const common_1 = __nccwpck_require__(6129); -const retry_1 = __nccwpck_require__(1052); -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fetch = hasFetch() ? window.fetch : node_fetch_1.default; -function hasWindow() { - return typeof window !== 'undefined' && !!window; -} -function hasFetch() { - return hasWindow() && !!window.fetch; -} -function hasBuffer() { - return typeof Buffer !== 'undefined'; -} -function hasHeader(options, header) { - return !!getHeader(options, header); -} -function getHeader(options, header) { - header = header.toLowerCase(); - for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { - if (header === key.toLowerCase()) { - return options.headers[key]; +exports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0; +const child_process_1 = __nccwpck_require__(2081); +const fs = __nccwpck_require__(7147); +const gcpMetadata = __nccwpck_require__(3563); +const os = __nccwpck_require__(2037); +const path = __nccwpck_require__(1017); +const crypto_1 = __nccwpck_require__(8043); +const transporters_1 = __nccwpck_require__(2649); +const computeclient_1 = __nccwpck_require__(6875); +const idtokenclient_1 = __nccwpck_require__(298); +const envDetect_1 = __nccwpck_require__(1380); +const jwtclient_1 = __nccwpck_require__(3959); +const refreshclient_1 = __nccwpck_require__(8790); +const impersonated_1 = __nccwpck_require__(1103); +const externalclient_1 = __nccwpck_require__(4381); +const baseexternalclient_1 = __nccwpck_require__(7391); +const authclient_1 = __nccwpck_require__(4627); +const externalAccountAuthorizedUserClient_1 = __nccwpck_require__(8765); +const util_1 = __nccwpck_require__(8905); +exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; +const GoogleAuthExceptionMessages = { + NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\n' + + 'To learn more about Universe Domain retrieval, visit: \n' + + 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys', +}; +class GoogleAuth { + // Note: this properly is only public to satisify unit tests. + // https://github.com/Microsoft/TypeScript/issues/5228 + get isGCE() { + return this.checkIsGCE; + } + /** + * Configuration is resolved in the following order of precedence: + * - {@link GoogleAuthOptions.credentials `credentials`} + * - {@link GoogleAuthOptions.keyFilename `keyFilename`} + * - {@link GoogleAuthOptions.keyFile `keyFile`} + * + * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the + * {@link AuthClient `AuthClient`s}. + * + * @param opts + */ + constructor(opts) { + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + this.checkIsGCE = undefined; + // To save the contents of the JSON credential file + this.jsonContent = null; + this.cachedCredential = null; + this.clientOptions = {}; + opts = opts || {}; + this._cachedProjectId = opts.projectId || null; + this.cachedCredential = opts.authClient || null; + this.keyFilename = opts.keyFilename || opts.keyFile; + this.scopes = opts.scopes; + this.jsonContent = opts.credentials || null; + this.clientOptions = opts.clientOptions || {}; + if (opts.universeDomain) { + this.clientOptions.universeDomain = opts.universeDomain; } } - return undefined; -} -let HttpsProxyAgent; -function loadProxy() { - var _a, _b, _c, _d; - const proxy = ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.HTTPS_PROXY) || - ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.https_proxy) || - ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.HTTP_PROXY) || - ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.http_proxy); - if (proxy) { - HttpsProxyAgent = __nccwpck_require__(7219); + // GAPIC client libraries should always use self-signed JWTs. The following + // variables are set on the JWT client in order to indicate the type of library, + // and sign the JWT with the correct audience and scopes (if not supplied). + setGapicJWTValues(client) { + client.defaultServicePath = this.defaultServicePath; + client.useJWTAccessWithScope = this.useJWTAccessWithScope; + client.defaultScopes = this.defaultScopes; } - return proxy; -} -loadProxy(); -function skipProxy(url) { - var _a; - const noProxyEnv = (_a = process.env.NO_PROXY) !== null && _a !== void 0 ? _a : process.env.no_proxy; - if (!noProxyEnv) { - return false; + getProjectId(callback) { + if (callback) { + this.getProjectIdAsync().then(r => callback(null, r), callback); + } + else { + return this.getProjectIdAsync(); + } } - const noProxyUrls = noProxyEnv.split(','); - const parsedURL = new url_1.URL(url); - return !!noProxyUrls.find(url => { - if (url.startsWith('*.') || url.startsWith('.')) { - url = url.replace(/^\*\./, '.'); - return parsedURL.hostname.endsWith(url); + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + async getProjectIdOptional() { + try { + return await this.getProjectId(); + } + catch (e) { + if (e instanceof Error && + e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { + return null; + } + else { + throw e; + } + } + } + /* + * A private method for finding and caching a projectId. + * + * Supports environments in order of precedence: + * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable + * - GOOGLE_APPLICATION_CREDENTIALS JSON file + * - Cloud SDK: `gcloud config config-helper --format json` + * - GCE project ID from metadata server + * + * @returns projectId + */ + async findAndCacheProjectId() { + let projectId = null; + projectId || (projectId = await this.getProductionProjectId()); + projectId || (projectId = await this.getFileProjectId()); + projectId || (projectId = await this.getDefaultServiceProjectId()); + projectId || (projectId = await this.getGCEProjectId()); + projectId || (projectId = await this.getExternalAccountClientProjectId()); + if (projectId) { + this._cachedProjectId = projectId; + return projectId; } else { - return url === parsedURL.origin || url === parsedURL.hostname; + throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); } - }); -} -// Figure out if we should be using a proxy. Only if it's required, load -// the https-proxy-agent module as it adds startup cost. -function getProxy(url) { - // If there is a match between the no_proxy env variables and the url, then do not proxy - if (skipProxy(url)) { - return undefined; - // If there is not a match between the no_proxy env variables and the url, check to see if there should be a proxy } - else { - return loadProxy(); + async getProjectIdAsync() { + if (this._cachedProjectId) { + return this._cachedProjectId; + } + if (!this._findProjectIdPromise) { + this._findProjectIdPromise = this.findAndCacheProjectId(); + } + return this._findProjectIdPromise; + } + /** + * Retrieves a universe domain from the metadata server via + * {@link gcpMetadata.universe}. + * + * @returns a universe domain + */ + async getUniverseDomainFromMetadataServer() { + var _a; + let universeDomain; + try { + universeDomain = await gcpMetadata.universe('universe_domain'); + universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + catch (e) { + if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) { + universeDomain = authclient_1.DEFAULT_UNIVERSE; + } + else { + throw e; + } + } + return universeDomain; + } + /** + * Retrieves, caches, and returns the universe domain in the following order + * of precedence: + * - The universe domain in {@link GoogleAuth.clientOptions} + * - An existing or ADC {@link AuthClient}'s universe domain + * - {@link gcpMetadata.universe}, if {@link Compute} client + * + * @returns The universe domain + */ + async getUniverseDomain() { + let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain'); + try { + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain); + } + catch (_a) { + // client or ADC is not available + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + return universeDomain; + } + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + getAnyScopes() { + return this.scopes || this.defaultScopes; + } + getApplicationDefault(optionsOrCallback = {}, callback) { + let options; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); + } + else { + return this.getApplicationDefaultAsync(options); + } + } + async getApplicationDefaultAsync(options = {}) { + // If we've already got a cached credential, return it. + // This will also preserve one's configured quota project, in case they + // set one directly on the credential previously. + if (this.cachedCredential) { + return await this.prepareAndCacheADC(this.cachedCredential); + } + // Since this is a 'new' ADC to cache we will use the environment variable + // if it's available. We prefer this value over the value from ADC. + const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT']; + let credential; + // Check for the existence of a local environment variable pointing to the + // location of the credential file. This is typically used in local + // developer scenarios. + credential = + await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); + } + // Look in the well-known credential file location. + credential = + await this._tryGetApplicationCredentialsFromWellKnownFile(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); + } + // Determine if we're running on GCE. + if (await this._checkIsGCE()) { + // set universe domain for Compute client + if (!(0, util_1.originalOrCamelOptions)(options).get('universe_domain')) { + options.universeDomain = + await this.getUniverseDomainFromMetadataServer(); + } + options.scopes = this.getAnyScopes(); + return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride); + } + throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.'); + } + async prepareAndCacheADC(credential, quotaProjectIdOverride) { + const projectId = await this.getProjectIdOptional(); + if (quotaProjectIdOverride) { + credential.quotaProjectId = quotaProjectIdOverride; + } + this.cachedCredential = credential; + return { credential, projectId }; + } + /** + * Determines whether the auth layer is running on Google Compute Engine. + * Checks for GCP Residency, then fallback to checking if metadata server + * is available. + * + * @returns A promise that resolves with the boolean. + * @api private + */ + async _checkIsGCE() { + if (this.checkIsGCE === undefined) { + this.checkIsGCE = + gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable()); + } + return this.checkIsGCE; + } + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { + const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || + process.env['google_application_credentials']; + if (!credentialsPath || credentialsPath.length === 0) { + return null; + } + try { + return this._getApplicationCredentialsFromFilePath(credentialsPath, options); + } + catch (e) { + if (e instanceof Error) { + e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; + } + throw e; + } + } + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromWellKnownFile(options) { + // First, figure out the location of the file, depending upon the OS type. + let location = null; + if (this._isWindows()) { + // Windows + location = process.env['APPDATA']; + } + else { + // Linux or Mac + const home = process.env['HOME']; + if (home) { + location = path.join(home, '.config'); + } + } + // If we found the root path, expand it. + if (location) { + location = path.join(location, 'gcloud', 'application_default_credentials.json'); + if (!fs.existsSync(location)) { + location = null; + } + } + // The file does not exist. + if (!location) { + return null; + } + // The file seems to exist. Try to use it. + const client = await this._getApplicationCredentialsFromFilePath(location, options); + return client; + } + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + async _getApplicationCredentialsFromFilePath(filePath, options = {}) { + // Make sure the path looks like a string. + if (!filePath || filePath.length === 0) { + throw new Error('The file path is invalid.'); + } + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = fs.realpathSync(filePath); + if (!fs.lstatSync(filePath).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + // Now open a read stream on the file, and parse it. + const readStream = fs.createReadStream(filePath); + return this.fromStream(readStream, options); + } + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json) { + var _a, _b, _c, _d, _e; + if (!json) { + throw new Error('Must pass in a JSON object containing an impersonated refresh token'); + } + if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + } + if (!json.source_credentials) { + throw new Error('The incoming JSON object does not contain a source_credentials field'); + } + if (!json.service_account_impersonation_url) { + throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + } + // Create source client for impersonation + const sourceClient = new refreshclient_1.UserRefreshClient(); + sourceClient.fromJSON(json.source_credentials); + if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85} + **/ + throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`); + } + // Extreact service account from service_account_impersonation_url + const targetPrincipal = (_c = (_b = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target; + if (!targetPrincipal) { + throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + } + const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : []; + const client = new impersonated_1.Impersonated({ + ...json, + delegates: (_e = json.delegates) !== null && _e !== void 0 ? _e : [], + sourceClient: sourceClient, + targetPrincipal: targetPrincipal, + targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], + }); + return client; + } + /** + * Create a credentials instance using the given input options. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json, options = {}) { + let client; + // user's preferred universe domain + const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { + client = new refreshclient_1.UserRefreshClient(options); + client.fromJSON(json); + } + else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + client = this.fromImpersonatedJSON(json); + } + else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + client = externalclient_1.ExternalAccountClient.fromJSON(json, options); + client.scopes = this.getAnyScopes(); + } + else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { + client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); + } + else { + options.scopes = this.scopes; + client = new jwtclient_1.JWT(options); + this.setGapicJWTValues(client); + client.fromJSON(json); + } + if (preferredUniverseDomain) { + client.universeDomain = preferredUniverseDomain; + } + return client; + } + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + _cacheClientFromJSON(json, options) { + const client = this.fromJSON(json, options); + // cache both raw data used to instantiate client and client itself. + this.jsonContent = json; + this.cachedCredential = client; + return client; + } + fromStream(inputStream, optionsOrCallback = {}, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); + } + else { + return this.fromStreamAsync(inputStream, options); + } + } + fromStreamAsync(inputStream, options) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the Google auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + try { + const data = JSON.parse(s); + const r = this._cacheClientFromJSON(data, options); + return resolve(r); + } + catch (err) { + // If we failed parsing this.keyFileName, assume that it + // is a PEM or p12 certificate: + if (!this.keyFilename) + throw err; + const client = new jwtclient_1.JWT({ + ...this.clientOptions, + keyFile: this.keyFilename, + }); + this.cachedCredential = client; + this.setGapicJWTValues(client); + return resolve(client); + } + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a credentials instance using the given API key string. + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey, options) { + options = options || {}; + const client = new jwtclient_1.JWT(options); + client.fromAPIKey(apiKey); + return client; } -} -class Gaxios { /** - * The Gaxios class is responsible for making HTTP requests. - * @param defaults The default set of options to be used for this instance. + * Determines whether the current operating system is Windows. + * @api private */ - constructor(defaults) { - this.agentCache = new Map(); - this.defaults = defaults || {}; + _isWindows() { + const sys = os.platform(); + if (sys && sys.length >= 3) { + if (sys.substring(0, 3).toLowerCase() === 'win') { + return true; + } + } + return false; } /** - * Perform an HTTP request with the given options. - * @param opts Set of HTTP options that will be used for this HTTP request. + * Run the Google Cloud SDK command that prints the default project ID */ - async request(opts = {}) { - opts = this.validateOpts(opts); - return this._request(opts); + async getDefaultServiceProjectId() { + return new Promise(resolve => { + (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { + if (!err && stdout) { + try { + const projectId = JSON.parse(stdout).configuration.properties.core.project; + resolve(projectId); + return; + } + catch (e) { + // ignore errors + } + } + resolve(null); + }); + }); } - async _defaultAdapter(opts) { - const fetchImpl = opts.fetchImplementation || fetch; - const res = (await fetchImpl(opts.url, opts)); - const data = await this.getResponseData(opts, res); - return this.translateResponse(opts, res, data); + /** + * Loads the project id from environment variables. + * @api private + */ + getProductionProjectId() { + return (process.env['GCLOUD_PROJECT'] || + process.env['GOOGLE_CLOUD_PROJECT'] || + process.env['gcloud_project'] || + process.env['google_cloud_project']); } /** - * Internal, retryable version of the `request` method. - * @param opts Set of HTTP options that will be used for this HTTP request. + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private */ - async _request(opts = {}) { - try { - let translatedResponse; - if (opts.adapter) { - translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); - } - else { - translatedResponse = await this._defaultAdapter(opts); - } - if (!opts.validateStatus(translatedResponse.status)) { - throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); - } - return translatedResponse; + async getFileProjectId() { + if (this.cachedCredential) { + // Try to read the project ID from the cached credentials file + return this.cachedCredential.projectId; } - catch (e) { - const err = e; - err.config = opts; - const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); - if (shouldRetry && config) { - err.config.retryConfig.currentRetryAttempt = - config.retryConfig.currentRetryAttempt; - return this._request(err.config); + // Ensure the projectId is loaded from the keyFile if available. + if (this.keyFilename) { + const creds = await this.getClient(); + if (creds && creds.projectId) { + return creds.projectId; } - throw err; + } + // Try to load a credentials file and read its project ID + const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); + if (r) { + return r.projectId; + } + else { + return null; } } - async getResponseData(opts, res) { - switch (opts.responseType) { - case 'stream': - return res.body; - case 'json': { - let data = await res.text(); - try { - data = JSON.parse(data); - } - catch (_a) { - // continue - } - return data; - } - case 'arraybuffer': - return res.arrayBuffer(); - case 'blob': - return res.blob(); - default: - return res.text(); + /** + * Gets the project ID from external account client if available. + */ + async getExternalAccountClientProjectId() { + if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + return null; } + const creds = await this.getClient(); + // Do not suppress the underlying error, as the error could contain helpful + // information for debugging and fixing. This is especially true for + // external account creds as in order to get the project ID, the following + // operations have to succeed: + // 1. Valid credentials file should be supplied. + // 2. Ability to retrieve access tokens from STS token exchange API. + // 3. Ability to exchange for service account impersonated credentials (if + // enabled). + // 4. Ability to get project info using the access token from step 2 or 3. + // Without surfacing the error, it is harder for developers to determine + // which step went wrong. + return await creds.getProjectId(); } /** - * Validates the options, and merges them with defaults. - * @param opts The original options passed from the client. + * Gets the Compute Engine project ID if it can be inferred. */ - validateOpts(options) { - const opts = (0, extend_1.default)(true, {}, this.defaults, options); - if (!opts.url) { - throw new Error('URL is required.'); + async getGCEProjectId() { + try { + const r = await gcpMetadata.project('project-id'); + return r; } - // baseUrl has been deprecated, remove in 2.0 - const baseUrl = opts.baseUrl || opts.baseURL; - if (baseUrl) { - opts.url = baseUrl + opts.url; + catch (e) { + // Ignore any errors + return null; } - opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; - if (opts.params && Object.keys(opts.params).length > 0) { - let additionalQueryParams = opts.paramsSerializer(opts.params); - if (additionalQueryParams.startsWith('?')) { - additionalQueryParams = additionalQueryParams.slice(1); - } - const prefix = opts.url.includes('?') ? '&' : '?'; - opts.url = opts.url + prefix + additionalQueryParams; + } + getCredentials(callback) { + if (callback) { + this.getCredentialsAsync().then(r => callback(null, r), callback); } - if (typeof options.maxContentLength === 'number') { - opts.size = options.maxContentLength; + else { + return this.getCredentialsAsync(); } - if (typeof options.maxRedirects === 'number') { - opts.follow = options.maxRedirects; + } + async getCredentialsAsync() { + const client = await this.getClient(); + if (client instanceof impersonated_1.Impersonated) { + return { client_email: client.getTargetPrincipal() }; } - opts.headers = opts.headers || {}; - if (opts.data) { - const isFormData = typeof FormData === 'undefined' - ? false - : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; - if (is_stream_1.default.readable(opts.data)) { - opts.body = opts.data; - } - else if (hasBuffer() && Buffer.isBuffer(opts.data)) { - // Do not attempt to JSON.stringify() a Buffer: - opts.body = opts.data; - if (!hasHeader(opts, 'Content-Type')) { - opts.headers['Content-Type'] = 'application/json'; - } - } - else if (typeof opts.data === 'object') { - // If www-form-urlencoded content type has been set, but data is - // provided as an object, serialize the content using querystring: - if (!isFormData) { - if (getHeader(opts, 'content-type') === - 'application/x-www-form-urlencoded') { - opts.body = opts.paramsSerializer(opts.data); - } - else { - // } else if (!(opts.data instanceof FormData)) { - if (!hasHeader(opts, 'Content-Type')) { - opts.headers['Content-Type'] = 'application/json'; - } - opts.body = JSON.stringify(opts.data); - } - } - } - else { - opts.body = opts.data; + if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { + const serviceAccountEmail = client.getServiceAccountEmail(); + if (serviceAccountEmail) { + return { + client_email: serviceAccountEmail, + universe_domain: client.universeDomain, + }; } } - opts.validateStatus = opts.validateStatus || this.validateStatus; - opts.responseType = opts.responseType || 'json'; - if (!opts.headers['Accept'] && opts.responseType === 'json') { - opts.headers['Accept'] = 'application/json'; + if (this.jsonContent) { + return { + client_email: this.jsonContent.client_email, + private_key: this.jsonContent.private_key, + universe_domain: this.jsonContent.universe_domain, + }; } - opts.method = opts.method || 'GET'; - const proxy = getProxy(opts.url); - if (proxy) { - if (this.agentCache.has(proxy)) { - opts.agent = this.agentCache.get(proxy); - } - else { - // Proxy is being used in conjunction with mTLS. - if (opts.cert && opts.key) { - const parsedURL = new url_1.URL(proxy); - opts.agent = new HttpsProxyAgent({ - port: parsedURL.port, - host: parsedURL.host, - protocol: parsedURL.protocol, - cert: opts.cert, - key: opts.key, - }); - } - else { - opts.agent = new HttpsProxyAgent(proxy); - } - this.agentCache.set(proxy, opts.agent); - } + if (await this._checkIsGCE()) { + const [client_email, universe_domain] = await Promise.all([ + gcpMetadata.instance('service-accounts/default/email'), + this.getUniverseDomain(), + ]); + return { client_email, universe_domain }; } - else if (opts.cert && opts.key) { - // Configure client for mTLS: - if (this.agentCache.has(opts.key)) { - opts.agent = this.agentCache.get(opts.key); + throw new Error(GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND); + } + /** + * Automatically obtain an {@link AuthClient `AuthClient`} based on the + * provided configuration. If no options were passed, use Application + * Default Credentials. + */ + async getClient() { + if (!this.cachedCredential) { + if (this.jsonContent) { + this._cacheClientFromJSON(this.jsonContent, this.clientOptions); + } + else if (this.keyFilename) { + const filePath = path.resolve(this.keyFilename); + const stream = fs.createReadStream(filePath); + await this.fromStreamAsync(stream, this.clientOptions); } else { - opts.agent = new https_1.Agent({ - cert: opts.cert, - key: opts.key, - }); - this.agentCache.set(opts.key, opts.agent); + await this.getApplicationDefaultAsync(this.clientOptions); } } - return opts; + return this.cachedCredential; } /** - * By default, throw for any non-2xx status code - * @param status status code from the HTTP response + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. */ - validateStatus(status) { - return status >= 200 && status < 300; + async getIdTokenClient(targetAudience) { + const client = await this.getClient(); + if (!('fetchIdToken' in client)) { + throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); + } + return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); } /** - * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) - * @param params key value pars to encode + * Automatically obtain application default credentials, and return + * an access token for making requests. */ - paramsSerializer(params) { - return querystring_1.default.stringify(params); - } - translateResponse(opts, res, data) { - // headers need to be converted from a map to an obj - const headers = {}; - res.headers.forEach((value, key) => { - headers[key] = value; - }); - return { - config: opts, - data: data, - headers, - status: res.status, - statusText: res.statusText, - // XMLHttpRequestLike - request: { - responseURL: res.url, - }, - }; - } -} -exports.Gaxios = Gaxios; -//# sourceMappingURL=gaxios.js.map - -/***/ }), - -/***/ 9555: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2018 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0; -const gaxios_1 = __nccwpck_require__(8133); -Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); -var common_1 = __nccwpck_require__(6129); -Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); -/** - * The default instance used when the `request` method is directly - * invoked. - */ -exports.instance = new gaxios_1.Gaxios(); -/** - * Make an HTTP request using the given options. - * @param opts Options for the request - */ -async function request(opts) { - return exports.instance.request(opts); -} -exports.request = request; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 1052: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2018 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getRetryConfig = void 0; -async function getRetryConfig(err) { - var _a; - let config = getConfig(err); - if (!err || !err.config || (!config && !err.config.retry)) { - return { shouldRetry: false }; - } - config = config || {}; - config.currentRetryAttempt = config.currentRetryAttempt || 0; - config.retry = - config.retry === undefined || config.retry === null ? 3 : config.retry; - config.httpMethodsToRetry = config.httpMethodsToRetry || [ - 'GET', - 'HEAD', - 'PUT', - 'OPTIONS', - 'DELETE', - ]; - config.noResponseRetries = - config.noResponseRetries === undefined || config.noResponseRetries === null - ? 2 - : config.noResponseRetries; - // If this wasn't in the list of status codes where we want - // to automatically retry, return. - const retryRanges = [ - // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes - // 1xx - Retry (Informational, request still processing) - // 2xx - Do not retry (Success) - // 3xx - Do not retry (Redirect) - // 4xx - Do not retry (Client errors) - // 429 - Retry ("Too Many Requests") - // 5xx - Retry (Server errors) - [100, 199], - [429, 429], - [500, 599], - ]; - config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; - // Put the config back into the err - err.config.retryConfig = config; - // Determine if we should retry the request - const shouldRetryFn = config.shouldRetry || shouldRetryRequest; - if (!(await shouldRetryFn(err))) { - return { shouldRetry: false, config: err.config }; - } - // Calculate time to wait with exponential backoff. - // If this is the first retry, look for a configured retryDelay. - const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; - // Formula: retryDelay + ((2^c - 1 / 2) * 1000) - const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000; - // We're going to retry! Incremenent the counter. - err.config.retryConfig.currentRetryAttempt += 1; - // Create a promise that invokes the retry after the backOffDelay - const backoff = config.retryBackoff - ? config.retryBackoff(err, delay) - : new Promise(resolve => { - setTimeout(resolve, delay); - }); - // Notify the user if they added an `onRetryAttempt` handler - if (config.onRetryAttempt) { - config.onRetryAttempt(err); + async getAccessToken() { + const client = await this.getClient(); + return (await client.getAccessToken()).token; } - // Return the promise in which recalls Gaxios to retry the request - await backoff; - return { shouldRetry: true, config: err.config }; -} -exports.getRetryConfig = getRetryConfig; -/** - * Determine based on config if we should retry the request. - * @param err The GaxiosError passed to the interceptor. - */ -function shouldRetryRequest(err) { - const config = getConfig(err); - // node-fetch raises an AbortError if signaled: - // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal - if (err.name === 'AbortError') { - return false; + /** + * Obtain the HTTP headers that will provide authorization for a given + * request. + */ + async getRequestHeaders(url) { + const client = await this.getClient(); + return client.getRequestHeaders(url); } - // If there's no config, or retries are disabled, return. - if (!config || config.retry === 0) { - return false; + /** + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers + */ + async authorizeRequest(opts) { + opts = opts || {}; + const url = opts.url || opts.uri; + const client = await this.getClient(); + const headers = await client.getRequestHeaders(url); + opts.headers = Object.assign(opts.headers || {}, headers); + return opts; } - // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) - if (!err.response && - (config.currentRetryAttempt || 0) >= config.noResponseRetries) { - return false; + /** + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async request(opts) { + const client = await this.getClient(); + return client.request(opts); } - // Only retry with configured HttpMethods. - if (!err.config.method || - config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { - return false; + /** + * Determine the compute environment in which the code is running. + */ + getEnv() { + return (0, envDetect_1.getEnv)(); } - // If this wasn't in the list of status codes where we want - // to automatically retry, return. - if (err.response && err.response.status) { - let isInRange = false; - for (const [min, max] of config.statusCodesToRetry) { - const status = err.response.status; - if (status >= min && status <= max) { - isInRange = true; - break; - } + /** + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + * @param endpoint A custom endpoint to use. + * + * @example + * ``` + * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); + * ``` + */ + async sign(data, endpoint) { + const client = await this.getClient(); + const universe = await this.getUniverseDomain(); + endpoint = + endpoint || + `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`; + if (client instanceof impersonated_1.Impersonated) { + const signed = await client.sign(data); + return signed.signedBlob; } - if (!isInRange) { - return false; + const crypto = (0, crypto_1.createCrypto)(); + if (client instanceof jwtclient_1.JWT && client.key) { + const sign = await crypto.sign(client.key, data); + return sign; + } + const creds = await this.getCredentials(); + if (!creds.client_email) { + throw new Error('Cannot sign data without `client_email`.'); } + return this.signBlob(crypto, creds.client_email, data, endpoint); } - // If we are out of retry attempts, return - config.currentRetryAttempt = config.currentRetryAttempt || 0; - if (config.currentRetryAttempt >= config.retry) { - return false; + async signBlob(crypto, emailOrUniqueId, data, endpoint) { + const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`); + const res = await this.request({ + method: 'POST', + url: url.href, + data: { + payload: crypto.encodeBase64StringUtf8(data), + }, + }); + return res.data.signedBlob; } - return true; } +exports.GoogleAuth = GoogleAuth; /** - * Acquire the raxConfig object from an GaxiosError if available. - * @param err The Gaxios error with a config object. + * Export DefaultTransporter as a static property of the class. */ -function getConfig(err) { - if (err && err.config && err.config.retryConfig) { - return err.config.retryConfig; - } - return; -} -//# sourceMappingURL=retry.js.map +GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; + /***/ }), -/***/ 1904: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9735: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/** - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0; -const fs_1 = __nccwpck_require__(7147); -const os_1 = __nccwpck_require__(2037); -/** - * Known paths unique to Google Compute Engine Linux instances - */ -exports.GCE_LINUX_BIOS_PATHS = { - BIOS_DATE: '/sys/class/dmi/id/bios_date', - BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', -}; -const GCE_MAC_ADDRESS_REGEX = /^42:01/; -/** - * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). - * - * Uses the: - * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. - * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. - * - * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. - */ -function isGoogleCloudServerless() { +exports.IAMAuth = void 0; +class IAMAuth { /** - * `CLOUD_RUN_JOB` is used for Cloud Run Jobs - * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. - * - * `FUNCTION_NAME` is used in older Cloud Functions environments: - * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * IAM credentials. * - * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: - * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. - * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + * @param selector the iam authority selector + * @param token the token + * @constructor */ - const isGFEnvironment = process.env.CLOUD_RUN_JOB || - process.env.FUNCTION_NAME || - process.env.K_SERVICE; - return !!isGFEnvironment; -} -exports.isGoogleCloudServerless = isGoogleCloudServerless; -/** - * Determines if the process is running on a Linux Google Compute Engine instance. - * - * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. - */ -function isGoogleComputeEngineLinux() { - if ((0, os_1.platform)() !== 'linux') - return false; - try { - // ensure this file exist - (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); - // ensure this file exist and matches - const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); - return /Google/.test(biosVendor); - } - catch (_a) { - return false; + constructor(selector, token) { + this.selector = selector; + this.token = token; + this.selector = selector; + this.token = token; } -} -exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; -/** - * Determines if the process is running on a Google Compute Engine instance with a known - * MAC address. - * - * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. - */ -function isGoogleComputeEngineMACAddress() { - const interfaces = (0, os_1.networkInterfaces)(); - for (const item of Object.values(interfaces)) { - if (!item) - continue; - for (const { mac } of item) { - if (GCE_MAC_ADDRESS_REGEX.test(mac)) { - return true; - } - } + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders() { + return { + 'x-goog-iam-authority-selector': this.selector, + 'x-goog-iam-authorization-token': this.token, + }; } - return false; -} -exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; -/** - * Determines if the process is running on a Google Compute Engine instance. - * - * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. - */ -function isGoogleComputeEngine() { - return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); -} -exports.isGoogleComputeEngine = isGoogleComputeEngine; -/** - * Determines if the process is running on Google Cloud Platform. - * - * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. - */ -function detectGCPResidency() { - return isGoogleCloudServerless() || isGoogleComputeEngine(); } -exports.detectGCPResidency = detectGCPResidency; -//# sourceMappingURL=gcp-residency.js.map +exports.IAMAuth = IAMAuth; + /***/ }), -/***/ 3563: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 117: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/** - * Copyright 2018 Google LLC - * - * Distributed under MIT license. - * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var _a, _b, _c; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.requestTimeout = exports.setGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.project = exports.instance = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const jsonBigint = __nccwpck_require__(5031); -const gcp_residency_1 = __nccwpck_require__(1904); -exports.BASE_PATH = '/computeMetadata/v1'; -exports.HOST_ADDRESS = 'http://169.254.169.254'; -exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; -exports.HEADER_NAME = 'Metadata-Flavor'; -exports.HEADER_VALUE = 'Google'; -exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); +exports.IdentityPoolClient = void 0; +const fs = __nccwpck_require__(7147); +const util_1 = __nccwpck_require__(3837); +const baseexternalclient_1 = __nccwpck_require__(7391); +const util_2 = __nccwpck_require__(8905); +// fs.readfile is undefined in browser karma tests causing +// `npm run browser-test` to fail as test.oauth2.ts imports this file via +// src/index.ts. +// Fallback to void function to avoid promisify throwing a TypeError. +const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); +const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); +const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); /** - * Returns the base URL while taking into account the GCE_METADATA_HOST - * environment variable if it exists. - * - * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. */ -function getBaseUrl(baseUrl) { - if (!baseUrl) { - baseUrl = - process.env.GCE_METADATA_IP || - process.env.GCE_METADATA_HOST || - exports.HOST_ADDRESS; - } - // If no scheme is provided default to HTTP: - if (!/^https?:\/\//.test(baseUrl)) { - baseUrl = `http://${baseUrl}`; - } - return new URL(exports.BASE_PATH, baseUrl).href; -} -// Accepts an options object passed from the user to the API. In previous -// versions of the API, it referred to a `Request` or an `Axios` request -// options object. Now it refers to an object with very limited property -// names. This is here to help ensure users don't pass invalid options when -// they upgrade from 0.4 to 0.5 to 0.8. -function validate(options) { - Object.keys(options).forEach(key => { - switch (key) { - case 'params': - case 'property': - case 'headers': - break; - case 'qs': - throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); - default: - throw new Error(`'${key}' is not a valid configuration option.`); +class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiate an IdentityPoolClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_2.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const credentialSourceOpts = (0, util_2.originalOrCamelOptions)(credentialSource); + this.file = credentialSourceOpts.get('file'); + this.url = credentialSourceOpts.get('url'); + this.headers = credentialSourceOpts.get('headers'); + if (this.file && this.url) { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); } - }); -} -async function metadataAccessor(type, options, noResponseRetries = 3, fastFail = false) { - options = options || {}; - if (typeof options === 'string') { - options = { property: options }; - } - let property = ''; - if (typeof options === 'object' && options.property) { - property = '/' + options.property; - } - validate(options); - try { - const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; - const res = await requestMethod({ - url: `${getBaseUrl()}/${type}${property}`, - headers: Object.assign({}, exports.HEADERS, options.headers), - retryConfig: { noResponseRetries }, - params: options.params, - responseType: 'text', - timeout: requestTimeout(), - }); - // NOTE: node.js converts all incoming headers to lower case. - if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { - throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`); + else if (this.file && !this.url) { + this.credentialSourceType = 'file'; } - else if (!res.data) { - throw new Error('Invalid response from the metadata service'); + else if (!this.file && this.url) { + this.credentialSourceType = 'url'; } - if (typeof res.data === 'string') { - try { - return jsonBigint.parse(res.data); - } - catch (_a) { - /* ignore */ - } + else { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + const formatOpts = (0, util_2.originalOrCamelOptions)(credentialSourceOpts.get('format')); + // Text is the default format type. + this.formatType = formatOpts.get('type') || 'text'; + this.formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name'); + if (this.formatType !== 'json' && this.formatType !== 'text') { + throw new Error(`Invalid credential_source format "${this.formatType}"`); + } + if (this.formatType === 'json' && !this.formatSubjectTokenFieldName) { + throw new Error('Missing subject_token_field_name for JSON credential_source format'); } - return res.data; } - catch (e) { - const err = e; - if (err.response && err.response.status !== 200) { - err.message = `Unsuccessful response status code. ${err.message}`; + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this either retrieves the local credential from a file location (k8s + * workload) or by sending a GET request to a local metadata server (Azure + * workloads). + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + if (this.file) { + return await this.getTokenFromFile(this.file, this.formatType, this.formatSubjectTokenFieldName); } - throw e; + return await this.getTokenFromUrl(this.url, this.formatType, this.formatSubjectTokenFieldName, this.headers); } -} -async function fastFailMetadataRequest(options) { - const secondaryOptions = { - ...options, - url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), - }; - // We race a connection between DNS/IP to metadata server. There are a couple - // reasons for this: - // - // 1. the DNS is slow in some GCP environments; by checking both, we might - // detect the runtime environment signficantly faster. - // 2. we can't just check the IP, which is tarpitted and slow to respond - // on a user's local machine. - // - // Additional logic has been added to make sure that we don't create an - // unhandled rejection in scenarios where a failure happens sometime - // after a success. - // - // Note, however, if a failure happens prior to a success, a rejection should - // occur, this is for folks running locally. - // - let responded = false; - const r1 = (0, gaxios_1.request)(options) - .then(res => { - responded = true; - return res; - }) - .catch(err => { - if (responded) { - return r2; + /** + * Looks up the external subject token in the file path provided and + * resolves with that token. + * @param file The file path where the external credential is located. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @return A promise that resolves with the external subject token. + */ + async getTokenFromFile(filePath, formatType, formatSubjectTokenFieldName) { + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = await realpath(filePath); + if (!(await lstat(filePath)).isFile()) { + throw new Error(); + } } - else { - responded = true; + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } throw err; } - }); - const r2 = (0, gaxios_1.request)(secondaryOptions) - .then(res => { - responded = true; - return res; - }) - .catch(err => { - if (responded) { - return r1; + let subjectToken; + const rawText = await readFile(filePath, { encoding: 'utf8' }); + if (formatType === 'text') { + subjectToken = rawText; } - else { - responded = true; - throw err; + else if (formatType === 'json' && formatSubjectTokenFieldName) { + const json = JSON.parse(rawText); + subjectToken = json[formatSubjectTokenFieldName]; } - }); - return Promise.race([r1, r2]); -} -/** - * Obtain metadata for the current GCE instance - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function instance(options) { - return metadataAccessor('instance', options); -} -exports.instance = instance; -/** - * Obtain metadata for the current GCP Project. - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function project(options) { - return metadataAccessor('project', options); -} -exports.project = project; -/* - * How many times should we retry detecting GCP environment. - */ -function detectGCPAvailableRetries() { - return process.env.DETECT_GCP_RETRIES - ? Number(process.env.DETECT_GCP_RETRIES) - : 0; -} -let cachedIsAvailableResponse; -/** - * Determine if the metadata server is currently available. - */ -async function isAvailable() { - try { - // If a user is instantiating several GCP libraries at the same time, - // this may result in multiple calls to isAvailable(), to detect the - // runtime environment. We use the same promise for each of these calls - // to reduce the network load. - if (cachedIsAvailableResponse === undefined) { - cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), - // If the default HOST_ADDRESS has been overridden, we should not - // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in - // a non-GCP environment): - !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source file'); } - await cachedIsAvailableResponse; - return true; + return subjectToken; } - catch (e) { - const err = e; - if (process.env.DEBUG_AUTH) { - console.info(err); - } - if (err.type === 'request-timeout') { - // If running in a GCP environment, metadata endpoint should return - // within ms. - return false; - } - if (err.response && err.response.status === 404) { - return false; - } - else { - if (!(err.response && err.response.status === 404) && - // A warning is emitted if we see an unexpected err.code, or err.code - // is not populated: - (!err.code || - ![ - 'EHOSTDOWN', - 'EHOSTUNREACH', - 'ENETUNREACH', - 'ENOENT', - 'ENOTFOUND', - 'ECONNREFUSED', - ].includes(err.code))) { - let code = 'UNKNOWN'; - if (err.code) - code = err.code; - process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); - } - // Failure to resolve the metadata service means that it is not available. - return false; + /** + * Sends a GET request to the URL provided and resolves with the returned + * external subject token. + * @param url The URL to call to retrieve the subject token. This is typically + * a local metadata server. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @param headers The optional additional headers to send with the request to + * the metadata server url. + * @return A promise that resolves with the external subject token. + */ + async getTokenFromUrl(url, formatType, formatSubjectTokenFieldName, headers) { + const opts = { + url, + method: 'GET', + headers, + responseType: formatType, + }; + let subjectToken; + if (formatType === 'text') { + const response = await this.transporter.request(opts); + subjectToken = response.data; } + else if (formatType === 'json' && formatSubjectTokenFieldName) { + const response = await this.transporter.request(opts); + subjectToken = response.data[formatSubjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source URL'); + } + return subjectToken; } } -exports.isAvailable = isAvailable; -/** - * reset the memoized isAvailable() lookup. - */ -function resetIsAvailableCache() { - cachedIsAvailableResponse = undefined; -} -exports.resetIsAvailableCache = resetIsAvailableCache; -/** - * A cache for the detected GCP Residency. - */ -exports.gcpResidencyCache = null; -/** - * Sets the detected GCP Residency. - * Useful for forcing metadata server detection behavior. - * - * Set `null` to autodetect the environment (default behavior). - */ -function setGCPResidency(value = null) { - exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); -} -exports.setGCPResidency = setGCPResidency; -/** - * Obtain the timeout for requests to the metadata server. - * - * In certain environments and conditions requests can take longer than - * the default timeout to complete. This function will determine the - * appropriate timeout based on the environment. - * - * @returns {number} a request timeout duration in milliseconds. - */ -function requestTimeout() { - // Detecting the residency can be resource-intensive. Let's cache the result. - if (exports.gcpResidencyCache === null) { - exports.gcpResidencyCache = (0, gcp_residency_1.detectGCPResidency)(); - } - return exports.gcpResidencyCache ? 0 : 3000; -} -exports.requestTimeout = requestTimeout; -__exportStar(__nccwpck_require__(1904), exports); -//# sourceMappingURL=index.js.map +exports.IdentityPoolClient = IdentityPoolClient; + /***/ }), -/***/ 4627: +/***/ 298: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2012 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26566,53 +21013,235 @@ __exportStar(__nccwpck_require__(1904), exports); // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AuthClient = void 0; -const events_1 = __nccwpck_require__(2361); -const transporters_1 = __nccwpck_require__(2649); -class AuthClient extends events_1.EventEmitter { - constructor() { - super(...arguments); - this.transporter = new transporters_1.DefaultTransporter(); - this.credentials = {}; - this.eagerRefreshThresholdMillis = 5 * 60 * 1000; - this.forceRefreshOnFailure = false; +exports.IdTokenClient = void 0; +const oauth2client_1 = __nccwpck_require__(3936); +class IdTokenClient extends oauth2client_1.OAuth2Client { + /** + * Google ID Token client + * + * Retrieve ID token from the metadata server. + * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server + */ + constructor(options) { + super(options); + this.targetAudience = options.targetAudience; + this.idTokenProvider = options.idTokenProvider; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + if (!this.credentials.id_token || + !this.credentials.expiry_date || + this.isTokenExpiring()) { + const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); + this.credentials = { + id_token: idToken, + expiry_date: this.getIdTokenExpiryDate(idToken), + }; + } + const headers = { + Authorization: 'Bearer ' + this.credentials.id_token, + }; + return { headers }; + } + getIdTokenExpiryDate(idToken) { + const payloadB64 = idToken.split('.')[1]; + if (payloadB64) { + const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); + return payload.exp * 1000; + } } +} +exports.IdTokenClient = IdTokenClient; + + +/***/ }), + +/***/ 1103: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(3936); +const gaxios_1 = __nccwpck_require__(9555); +exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; +class Impersonated extends oauth2client_1.OAuth2Client { /** - * Sets the auth credentials. + * Impersonated service account credentials. + * + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. */ - setCredentials(credentials) { - this.credentials = credentials; + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f; + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { + expiry_date: 1, + refresh_token: 'impersonated-placeholder', + }; + this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); + this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; + this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; + this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; + this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; + this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com'; } /** - * Append additional headers, e.g., x-goog-user-project, shared across the - * classes inheriting AuthClient. This method should be used by any method - * that overrides getRequestMetadataAsync(), which is a shared helper for - * setting request information in both gRPC and HTTP API calls. + * Signs some bytes. * - * @param headers object to append additional headers to. + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} + * @param blobToSign String to sign. + * @return denoting the keyyID and signedBlob in base64 string */ - addSharedMetadataHeaders(headers) { - // quota_project_id, stored in application_default_credentials.json, is set in - // the x-goog-user-project header, to indicate an alternate account for - // billing and quota: - if (!headers['x-goog-user-project'] && // don't override a value the user sets. - this.quotaProjectId) { - headers['x-goog-user-project'] = this.quotaProjectId; + async sign(blobToSign) { + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:signBlob`; + const body = { + delegates: this.delegates, + payload: Buffer.from(blobToSign).toString('base64'), + }; + const res = await this.sourceClient.request({ + url: u, + data: body, + method: 'POST', + }); + return res.data; + } + /** The service account email to be impersonated. */ + getTargetPrincipal() { + return this.targetPrincipal; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshToken(refreshToken) { + var _a, _b, _c, _d, _e, _f; + try { + await this.sourceClient.getAccessToken(); + const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; + const u = `${this.endpoint}/v1/${name}:generateAccessToken`; + const body = { + delegates: this.delegates, + scope: this.targetScopes, + lifetime: this.lifetime + 's', + }; + const res = await this.sourceClient.request({ + url: u, + data: body, + method: 'POST', + }); + const tokenResponse = res.data; + this.credentials.access_token = tokenResponse.accessToken; + this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); + return { + tokens: this.credentials, + res, + }; } - return headers; + catch (error) { + if (!(error instanceof Error)) + throw error; + let status = 0; + let message = ''; + if (error instanceof gaxios_1.GaxiosError) { + status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; + message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; + } + if (status && message) { + error.message = `${status}: unable to impersonate: ${message}`; + throw error; + } + else { + error.message = `unable to impersonate: ${error}`; + throw error; + } + } + } + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + async fetchIdToken(targetAudience, options) { + var _a; + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:generateIdToken`; + const body = { + delegates: this.delegates, + audience: targetAudience, + includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, + }; + const res = await this.sourceClient.request({ + url: u, + data: body, + method: 'POST', + }); + return res.data.token; } } -exports.AuthClient = AuthClient; -//# sourceMappingURL=authclient.js.map +exports.Impersonated = Impersonated; + /***/ }), -/***/ 1569: +/***/ 8740: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2021 Google LLC +// Copyright 2015 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26626,254 +21255,193 @@ exports.AuthClient = AuthClient; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AwsClient = void 0; -const awsrequestsigner_1 = __nccwpck_require__(1754); -const baseexternalclient_1 = __nccwpck_require__(7391); -/** - * AWS external account client. This is used for AWS workloads, where - * AWS STS GetCallerIdentity serialized signed requests are exchanged for - * GCP access token. - */ -class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { +exports.JWTAccess = void 0; +const jws = __nccwpck_require__(4636); +const util_1 = __nccwpck_require__(8905); +const DEFAULT_HEADER = { + alg: 'RS256', + typ: 'JWT', +}; +class JWTAccess { /** - * Instantiates an AwsClient instance using the provided JSON - * object loaded from an external account credentials file. - * An error is thrown if the credential is not a valid AWS credential. - * @param options The external account options object typically loaded - * from the external account JSON credential file. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. */ - constructor(options, additionalOptions) { - super(options, additionalOptions); - this.environmentId = options.credential_source.environment_id; - // This is only required if the AWS region is not available in the - // AWS_REGION or AWS_DEFAULT_REGION environment variables. - this.regionUrl = options.credential_source.region_url; - // This is only required if AWS security credentials are not available in - // environment variables. - this.securityCredentialsUrl = options.credential_source.url; - this.regionalCredVerificationUrl = - options.credential_source.regional_cred_verification_url; - this.imdsV2SessionTokenUrl = - options.credential_source.imdsv2_session_token_url; - this.awsRequestSigner = null; - this.region = ''; - // data validators - this.validateEnvironmentId(); - this.validateMetadataServerURLs(); + constructor(email, key, keyId, eagerRefreshThresholdMillis) { + this.cache = new util_1.LRUCache({ + capacity: 500, + maxAge: 60 * 60 * 1000, + }); + this.email = email; + this.key = key; + this.keyId = keyId; + this.eagerRefreshThresholdMillis = + eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; } - validateEnvironmentId() { - var _a; - const match = (_a = this.environmentId) === null || _a === void 0 ? void 0 : _a.match(/^(aws)(\d+)$/); - if (!match || !this.regionalCredVerificationUrl) { - throw new Error('No valid AWS "credential_source" provided'); + /** + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. + */ + getCachedKey(url, scopes) { + let cacheKey = url; + if (scopes && Array.isArray(scopes) && scopes.length) { + cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; } - else if (parseInt(match[2], 10) !== 1) { - throw new Error(`aws version "${match[2]}" is not supported in the current build.`); + else if (typeof scopes === 'string') { + cacheKey = url ? `${url}_${scopes}` : scopes; } - } - validateMetadataServerURLs() { - this.validateMetadataURL(this.regionUrl, 'region_url'); - this.validateMetadataURL(this.securityCredentialsUrl, 'url'); - this.validateMetadataURL(this.imdsV2SessionTokenUrl, 'imdsv2_session_token_url'); - } - validateMetadataURL(value, prop) { - if (!value) - return; - const url = new URL(value); - if (url.hostname !== AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS && - url.hostname !== `[${AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS}]`) { - throw new RangeError(`Invalid host "${url.hostname}" for "${prop}". Expecting ${AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS} or ${AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS}.`); + if (!cacheKey) { + throw Error('Scopes or url must be provided'); } + return cacheKey; } /** - * Triggered when an external subject token is needed to be exchanged for a - * GCP access token via GCP STS endpoint. - * This uses the `options.credential_source` object to figure out how - * to retrieve the token using the current environment. In this case, - * this uses a serialized AWS signed request to the STS GetCallerIdentity - * endpoint. - * The logic is summarized as: - * 1. If imdsv2_session_token_url is provided in the credential source, then - * fetch the aws session token and include it in the headers of the - * metadata requests. This is a requirement for IDMSv2 but optional - * for IDMSv1. - * 2. Retrieve AWS region from availability-zone. - * 3a. Check AWS credentials in environment variables. If not found, get - * from security-credentials endpoint. - * 3b. Get AWS credentials from security-credentials endpoint. In order - * to retrieve this, the AWS role needs to be determined by calling - * security-credentials endpoint without any argument. Then the - * credentials can be retrieved via: security-credentials/role_name - * 4. Generate the signed request to AWS STS GetCallerIdentity action. - * 5. Inject x-goog-cloud-target-resource into header and serialize the - * signed request. This will be the subject-token to pass to GCP STS. - * @return A promise that resolves with the external subject token. + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. */ - async retrieveSubjectToken() { - // Initialize AWS request signer if not already initialized. - if (!this.awsRequestSigner) { - const metadataHeaders = {}; - if (this.imdsV2SessionTokenUrl) { - metadataHeaders['x-aws-ec2-metadata-token'] = - await this.getImdsV2SessionToken(); - } - this.region = await this.getAwsRegion(metadataHeaders); - this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { - // Check environment variables for permanent credentials first. - // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html - if (process.env['AWS_ACCESS_KEY_ID'] && - process.env['AWS_SECRET_ACCESS_KEY']) { - return { - accessKeyId: process.env['AWS_ACCESS_KEY_ID'], - secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], - // This is normally not available for permanent credentials. - token: process.env['AWS_SESSION_TOKEN'], - }; + getRequestHeaders(url, additionalClaims, scopes) { + // Return cached authorization headers, unless we are within + // eagerRefreshThresholdMillis ms of them expiring: + const key = this.getCachedKey(url, scopes); + const cachedToken = this.cache.get(key); + const now = Date.now(); + if (cachedToken && + cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { + return cachedToken.headers; + } + const iat = Math.floor(Date.now() / 1000); + const exp = JWTAccess.getExpirationTime(iat); + let defaultClaims; + // Turn scopes into space-separated string + if (Array.isArray(scopes)) { + scopes = scopes.join(' '); + } + // If scopes are specified, sign with scopes + if (scopes) { + defaultClaims = { + iss: this.email, + sub: this.email, + scope: scopes, + exp, + iat, + }; + } + else { + defaultClaims = { + iss: this.email, + sub: this.email, + aud: url, + exp, + iat, + }; + } + // if additionalClaims are provided, ensure they do not collide with + // other required claims. + if (additionalClaims) { + for (const claim in defaultClaims) { + if (additionalClaims[claim]) { + throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); } - // Since the role on a VM can change, we don't need to cache it. - const roleName = await this.getAwsRoleName(metadataHeaders); - // Temporary credentials typically last for several hours. - // Expiration is returned in response. - // Consider future optimization of this logic to cache AWS tokens - // until their natural expiration. - const awsCreds = await this.getAwsSecurityCredentials(roleName, metadataHeaders); - return { - accessKeyId: awsCreds.AccessKeyId, - secretAccessKey: awsCreds.SecretAccessKey, - token: awsCreds.Token, - }; - }, this.region); + } } - // Generate signed request to AWS STS GetCallerIdentity API. - // Use the required regional endpoint. Otherwise, the request will fail. - const options = await this.awsRequestSigner.getRequestOptions({ - url: this.regionalCredVerificationUrl.replace('{region}', this.region), - method: 'POST', + const header = this.keyId + ? { ...DEFAULT_HEADER, kid: this.keyId } + : DEFAULT_HEADER; + const payload = Object.assign(defaultClaims, additionalClaims); + // Sign the jwt and add it to the cache + const signedJWT = jws.sign({ header, payload, secret: this.key }); + const headers = { Authorization: `Bearer ${signedJWT}` }; + this.cache.set(key, { + expiration: exp * 1000, + headers, }); - // The GCP STS endpoint expects the headers to be formatted as: - // [ - // {key: 'x-amz-date', value: '...'}, - // {key: 'Authorization', value: '...'}, - // ... - // ] - // And then serialized as: - // encodeURIComponent(JSON.stringify({ - // url: '...', - // method: 'POST', - // headers: [{key: 'x-amz-date', value: '...'}, ...] - // })) - const reformattedHeader = []; - const extendedHeaders = Object.assign({ - // The full, canonical resource name of the workload identity pool - // provider, with or without the HTTPS prefix. - // Including this header as part of the signature is recommended to - // ensure data integrity. - 'x-goog-cloud-target-resource': this.audience, - }, options.headers); - // Reformat header to GCP STS expected format. - for (const key in extendedHeaders) { - reformattedHeader.push({ - key, - value: extendedHeaders[key], - }); - } - // Serialize the reformatted signed request. - return encodeURIComponent(JSON.stringify({ - url: options.url, - method: options.method, - headers: reformattedHeader, - })); + return headers; } /** - * @return A promise that resolves with the IMDSv2 Session Token. + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. */ - async getImdsV2SessionToken() { - const opts = { - url: this.imdsV2SessionTokenUrl, - method: 'PUT', - responseType: 'text', - headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, - }; - const response = await this.transporter.request(opts); - return response.data; + static getExpirationTime(iat) { + const exp = iat + 3600; // 3600 seconds = 1 hour + return exp; } /** - * @param headers The headers to be used in the metadata request. - * @return A promise that resolves with the current AWS region. + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. */ - async getAwsRegion(headers) { - // Priority order for region determination: - // AWS_REGION > AWS_DEFAULT_REGION > metadata server. - if (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION']) { - return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION']); + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); } - if (!this.regionUrl) { - throw new Error('Unable to determine AWS region due to missing ' + - '"options.credential_source.region_url"'); + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); } - const opts = { - url: this.regionUrl, - method: 'GET', - responseType: 'text', - headers: headers, - }; - const response = await this.transporter.request(opts); - // Remove last character. For example, if us-east-2b is returned, - // the region would be us-east-2. - return response.data.substr(0, response.data.length - 1); + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; } - /** - * @param headers The headers to be used in the metadata request. - * @return A promise that resolves with the assigned role to the current - * AWS VM. This is needed for calling the security-credentials endpoint. - */ - async getAwsRoleName(headers) { - if (!this.securityCredentialsUrl) { - throw new Error('Unable to determine AWS role name due to missing ' + - '"options.credential_source.url"'); + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); } - const opts = { - url: this.securityCredentialsUrl, - method: 'GET', - responseType: 'text', - headers: headers, - }; - const response = await this.transporter.request(opts); - return response.data; } - /** - * Retrieves the temporary AWS credentials by calling the security-credentials - * endpoint as specified in the `credential_source` object. - * @param roleName The role attached to the current VM. - * @param headers The headers to be used in the metadata request. - * @return A promise that resolves with the temporary AWS credentials - * needed for creating the GetCallerIdentity signed request. - */ - async getAwsSecurityCredentials(roleName, headers) { - const response = await this.transporter.request({ - url: `${this.securityCredentialsUrl}/${roleName}`, - responseType: 'json', - headers: headers, + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + reject(new Error('Must pass in a stream containing the service account auth settings.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('data', chunk => (s += chunk)) + .on('error', reject) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (err) { + reject(err); + } + }); }); - return response.data; } } -exports.AwsClient = AwsClient; -AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; -AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; -//# sourceMappingURL=awsclient.js.map +exports.JWTAccess = JWTAccess; + /***/ }), -/***/ 1754: +/***/ 3959: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2021 Google LLC +// Copyright 2013 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26887,210 +21455,346 @@ AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AwsRequestSigner = void 0; -const crypto_1 = __nccwpck_require__(8043); -/** AWS Signature Version 4 signing algorithm identifier. */ -const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; -/** - * The termination string for the AWS credential scope value as defined in - * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html - */ -const AWS_REQUEST_TYPE = 'aws4_request'; -/** - * Implements an AWS API request signer based on the AWS Signature Version 4 - * signing process. - * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html - */ -class AwsRequestSigner { +exports.JWT = void 0; +const gtoken_1 = __nccwpck_require__(6031); +const jwtaccess_1 = __nccwpck_require__(8740); +const oauth2client_1 = __nccwpck_require__(3936); +const authclient_1 = __nccwpck_require__(4627); +class JWT extends oauth2client_1.OAuth2Client { + constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { + const opts = optionsOrEmail && typeof optionsOrEmail === 'object' + ? optionsOrEmail + : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; + super(opts); + this.email = opts.email; + this.keyFile = opts.keyFile; + this.key = opts.key; + this.keyId = opts.keyId; + this.scopes = opts.scopes; + this.subject = opts.subject; + this.additionalClaims = opts.additionalClaims; + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; + } /** - * Instantiates an AWS API request signer used to send authenticated signed - * requests to AWS APIs based on the AWS Signature Version 4 signing process. - * This also provides a mechanism to generate the signed request without - * sending it. - * @param getCredentials A mechanism to retrieve AWS security credentials - * when needed. - * @param region The AWS region to use. + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. */ - constructor(getCredentials, region) { - this.getCredentials = getCredentials; - this.region = region; - this.crypto = (0, crypto_1.createCrypto)(); + createScoped(scopes) { + const jwt = new JWT(this); + jwt.scopes = scopes; + return jwt; } /** - * Generates the signed request for the provided HTTP request for calling - * an AWS API. This follows the steps described at: - * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html - * @param amzOptions The AWS request options that need to be signed. - * @return A promise that resolves with the GaxiosOptions containing the - * signed HTTP request parameters. + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. */ - async getRequestOptions(amzOptions) { - if (!amzOptions.url) { - throw new Error('"url" is required in "amzOptions"'); + async getRequestMetadataAsync(url) { + url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; + const useSelfSignedJWT = (!this.hasUserScopes() && url) || + (this.useJWTAccessWithScope && this.hasAnyScopes()) || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) { + throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`); } - // Stringify JSON requests. This will be set in the request body of the - // generated signed request. - const requestPayloadData = typeof amzOptions.data === 'object' - ? JSON.stringify(amzOptions.data) - : amzOptions.data; - const url = amzOptions.url; - const method = amzOptions.method || 'GET'; - const requestPayload = amzOptions.body || requestPayloadData; - const additionalAmzHeaders = amzOptions.headers; - const awsSecurityCredentials = await this.getCredentials(); - const uri = new URL(url); - const headerMap = await generateAuthenticationHeaderMap({ - crypto: this.crypto, - host: uri.host, - canonicalUri: uri.pathname, - canonicalQuerystring: uri.search.substr(1), - method, - region: this.region, - securityCredentials: awsSecurityCredentials, - requestPayload, - additionalAmzHeaders, + if (!this.apiKey && useSelfSignedJWT) { + if (this.additionalClaims && + this.additionalClaims.target_audience) { + const { tokens } = await this.refreshToken(); + return { + headers: this.addSharedMetadataHeaders({ + Authorization: `Bearer ${tokens.id_token}`, + }), + }; + } + else { + // no scopes have been set, but a uri has been provided. Use JWTAccess + // credentials. + if (!this.access) { + this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); + } + let scopes; + if (this.hasUserScopes()) { + scopes = this.scopes; + } + else if (!url) { + scopes = this.defaultScopes; + } + const useScopes = this.useJWTAccessWithScope || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, + // Scopes take precedent over audience for signing, + // so we only provide them if `useJWTAccessWithScope` is on or + // if we are in a non-default universe + useScopes ? scopes : undefined); + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + else if (this.hasAnyScopes() || this.apiKey) { + return super.getRequestMetadataAsync(url); + } + else { + // If no audience, apiKey, or scopes are provided, we should not attempt + // to populate any headers: + return { headers: {} }; + } + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + // Create a new gToken for fetching an ID token + const gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: { target_audience: targetAudience }, + transporter: this.transporter, }); - // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. - const headers = Object.assign( - // Add x-amz-date if available. - headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { - Authorization: headerMap.authorizationHeader, - host: uri.host, - }, additionalAmzHeaders || {}); - if (awsSecurityCredentials.token) { - Object.assign(headers, { - 'x-amz-security-token': awsSecurityCredentials.token, - }); + await gtoken.getToken({ + forceRefresh: true, + }); + if (!gtoken.idToken) { + throw new Error('Unknown error: Failed to fetch ID token'); } - const awsSignedReq = { - url, - method: method, - headers, + return gtoken.idToken; + } + /** + * Determine if there are currently scopes available. + */ + hasUserScopes() { + if (!this.scopes) { + return false; + } + return this.scopes.length > 0; + } + /** + * Are there any default or user scopes defined. + */ + hasAnyScopes() { + if (this.scopes && this.scopes.length > 0) + return true; + if (this.defaultScopes && this.defaultScopes.length > 0) + return true; + return false; + } + authorize(callback) { + if (callback) { + this.authorizeAsync().then(r => callback(null, r), callback); + } + else { + return this.authorizeAsync(); + } + } + async authorizeAsync() { + const result = await this.refreshToken(); + if (!result) { + throw new Error('No result returned'); + } + this.credentials = result.tokens; + this.credentials.refresh_token = 'jwt-placeholder'; + this.key = this.gtoken.key; + this.email = this.gtoken.iss; + return result.tokens; + } + /** + * Refreshes the access token. + * @param refreshToken ignored + * @private + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const gtoken = this.createGToken(); + const token = await gtoken.getToken({ + forceRefresh: this.isTokenExpiring(), + }); + const tokens = { + access_token: token.access_token, + token_type: 'Bearer', + expiry_date: gtoken.expiresAt, + id_token: gtoken.idToken, }; - if (typeof requestPayload !== 'undefined') { - awsSignedReq.body = requestPayload; + this.emit('tokens', tokens); + return { res: null, tokens }; + } + /** + * Create a gToken if it doesn't already exist. + */ + createGToken() { + if (!this.gtoken) { + this.gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: this.additionalClaims, + transporter: this.transporter, + }); } - return awsSignedReq; + return this.gtoken; + } + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the service account auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (e) { + reject(e); + } + }); + }); + } + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey) { + if (typeof apiKey !== 'string') { + throw new Error('Must provide an API Key string.'); + } + this.apiKey = apiKey; + } + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + async getCredentials() { + if (this.key) { + return { private_key: this.key, client_email: this.email }; + } + else if (this.keyFile) { + const gtoken = this.createGToken(); + const creds = await gtoken.getCredentials(this.keyFile); + return { private_key: creds.privateKey, client_email: creds.clientEmail }; + } + throw new Error('A key or a keyFile must be provided to getCredentials.'); } } -exports.AwsRequestSigner = AwsRequestSigner; -/** - * Creates the HMAC-SHA256 hash of the provided message using the - * provided key. - * - * @param crypto The crypto instance used to facilitate cryptographic - * operations. - * @param key The HMAC-SHA256 key to use. - * @param msg The message to hash. - * @return The computed hash bytes. - */ -async function sign(crypto, key, msg) { - return await crypto.signWithHmacSha256(key, msg); -} -/** - * Calculates the signing key used to calculate the signature for - * AWS Signature Version 4 based on: - * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html - * - * @param crypto The crypto instance used to facilitate cryptographic - * operations. - * @param key The AWS secret access key. - * @param dateStamp The '%Y%m%d' date format. - * @param region The AWS region. - * @param serviceName The AWS service name, eg. sts. - * @return The signing key bytes. - */ -async function getSigningKey(crypto, key, dateStamp, region, serviceName) { - const kDate = await sign(crypto, `AWS4${key}`, dateStamp); - const kRegion = await sign(crypto, kDate, region); - const kService = await sign(crypto, kRegion, serviceName); - const kSigning = await sign(crypto, kService, 'aws4_request'); - return kSigning; -} -/** - * Generates the authentication header map needed for generating the AWS - * Signature Version 4 signed request. - * - * @param option The options needed to compute the authentication header map. - * @return The AWS authentication header map which constitutes of the following - * components: amz-date, authorization header and canonical query string. - */ -async function generateAuthenticationHeaderMap(options) { - const additionalAmzHeaders = options.additionalAmzHeaders || {}; - const requestPayload = options.requestPayload || ''; - // iam.amazonaws.com host => iam service. - // sts.us-east-2.amazonaws.com => sts service. - const serviceName = options.host.split('.')[0]; - const now = new Date(); - // Format: '%Y%m%dT%H%M%SZ'. - const amzDate = now - .toISOString() - .replace(/[-:]/g, '') - .replace(/\.[0-9]+/, ''); - // Format: '%Y%m%d'. - const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); - // Change all additional headers to be lower case. - const reformattedAdditionalAmzHeaders = {}; - Object.keys(additionalAmzHeaders).forEach(key => { - reformattedAdditionalAmzHeaders[key.toLowerCase()] = - additionalAmzHeaders[key]; - }); - // Add AWS token if available. - if (options.securityCredentials.token) { - reformattedAdditionalAmzHeaders['x-amz-security-token'] = - options.securityCredentials.token; +exports.JWT = JWT; + + +/***/ }), + +/***/ 4524: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LoginTicket = void 0; +class LoginTicket { + /** + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor + */ + constructor(env, pay) { + this.envelope = env; + this.payload = pay; + } + getEnvelope() { + return this.envelope; + } + getPayload() { + return this.payload; + } + /** + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID + */ + getUserId() { + const payload = this.getPayload(); + if (payload && payload.sub) { + return payload.sub; + } + return null; + } + /** + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload + */ + getAttributes() { + return { envelope: this.getEnvelope(), payload: this.getPayload() }; } - // Header keys need to be sorted alphabetically. - const amzHeaders = Object.assign({ - host: options.host, - }, - // Previously the date was not fixed with x-amz- and could be provided manually. - // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req - reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); - let canonicalHeaders = ''; - const signedHeadersList = Object.keys(amzHeaders).sort(); - signedHeadersList.forEach(key => { - canonicalHeaders += `${key}:${amzHeaders[key]}\n`; - }); - const signedHeaders = signedHeadersList.join(';'); - const payloadHash = await options.crypto.sha256DigestHex(requestPayload); - // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html - const canonicalRequest = `${options.method}\n` + - `${options.canonicalUri}\n` + - `${options.canonicalQuerystring}\n` + - `${canonicalHeaders}\n` + - `${signedHeaders}\n` + - `${payloadHash}`; - const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; - // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html - const stringToSign = `${AWS_ALGORITHM}\n` + - `${amzDate}\n` + - `${credentialScope}\n` + - (await options.crypto.sha256DigestHex(canonicalRequest)); - // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html - const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); - const signature = await sign(options.crypto, signingKey, stringToSign); - // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html - const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + - `${credentialScope}, SignedHeaders=${signedHeaders}, ` + - `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; - return { - // Do not return x-amz-date if date is available. - amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, - authorizationHeader, - canonicalQuerystring: options.canonicalQuerystring, - }; } -//# sourceMappingURL=awsrequestsigner.js.map +exports.LoginTicket = LoginTicket; + /***/ }), -/***/ 7391: +/***/ 3936: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2021 Google LLC +// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27104,231 +21808,396 @@ async function generateAuthenticationHeaderMap(options) { // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BaseExternalAccountClient = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +exports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; +const gaxios_1 = __nccwpck_require__(9555); +const querystring = __nccwpck_require__(3477); const stream = __nccwpck_require__(2781); +const formatEcdsa = __nccwpck_require__(1728); +const crypto_1 = __nccwpck_require__(8043); const authclient_1 = __nccwpck_require__(4627); -const sts = __nccwpck_require__(6308); -/** - * The required token exchange grant_type: rfc8693#section-2.1 - */ -const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; -/** - * The requested token exchange requested_token_type: rfc8693#section-2.1 - */ -const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; -/** The default OAuth scope to request when none is provided. */ -const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; -/** The google apis domain pattern. */ -const GOOGLE_APIS_DOMAIN_PATTERN = '\\.googleapis\\.com$'; -/** The variable portion pattern in a Google APIs domain. */ -const VARIABLE_PORTION_PATTERN = '[^\\.\\s\\/\\\\]+'; -/** Default impersonated token lifespan in seconds.*/ -const DEFAULT_TOKEN_LIFESPAN = 3600; -/** - * Offset to take into account network delays and server clock skews. - */ -exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; -/** - * The credentials JSON file type for external account clients. - * There are 3 types of JSON configs: - * 1. authorized_user => Google end user credential - * 2. service_account => Google service account credential - * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) - */ -exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; -/** Cloud resource manager URL used to retrieve project information. */ -exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; -/** The workforce audience pattern. */ -const WORKFORCE_AUDIENCE_PATTERN = '//iam.googleapis.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; -/** - * Base external account client. This is used to instantiate AuthClients for - * exchanging external account credentials for GCP access token and authorizing - * requests to GCP APIs. - * The base class implements common logic for exchanging various type of - * external credentials for GCP access token. The logic of determining and - * retrieving the external credential based on the environment and - * credential_source will be left for the subclasses. - */ -class BaseExternalAccountClient extends authclient_1.AuthClient { +const loginticket_1 = __nccwpck_require__(4524); +var CodeChallengeMethod; +(function (CodeChallengeMethod) { + CodeChallengeMethod["Plain"] = "plain"; + CodeChallengeMethod["S256"] = "S256"; +})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {})); +var CertificateFormat; +(function (CertificateFormat) { + CertificateFormat["PEM"] = "PEM"; + CertificateFormat["JWK"] = "JWK"; +})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {})); +class OAuth2Client extends authclient_1.AuthClient { + constructor(optionsOrClientId, clientSecret, redirectUri) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { clientId: optionsOrClientId, clientSecret, redirectUri }; + super(opts); + this.certificateCache = {}; + this.certificateExpiry = null; + this.certificateCacheFormat = CertificateFormat.PEM; + this.refreshTokenPromises = new Map(); + this._clientId = opts.clientId; + this._clientSecret = opts.clientSecret; + this.redirectUri = opts.redirectUri; + this.endpoints = { + tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo', + oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth', + oauth2TokenUrl: 'https://oauth2.googleapis.com/token', + oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke', + oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs', + oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs', + oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key', + ...opts.endpoints, + }; + this.issuers = opts.issuers || [ + 'accounts.google.com', + 'https://accounts.google.com', + this.universeDomain, + ]; + } /** - * Instantiate a BaseExternalAccountClient instance using the provided JSON - * object loaded from an external account credentials file. - * @param options The external account options object typically loaded - * from the external account JSON credential file. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. */ - constructor(options, additionalOptions) { - var _a, _b; - super(); - if (options.type !== exports.EXTERNAL_ACCOUNT_TYPE) { - throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + - `received "${options.type}"`); - } - this.clientAuth = options.client_id - ? { - confidentialClientType: 'basic', - clientId: options.client_id, - clientSecret: options.client_secret, - } - : undefined; - if (!this.validateGoogleAPIsUrl('sts', options.token_url)) { - throw new Error(`"${options.token_url}" is not a valid token url.`); - } - this.stsCredential = new sts.StsCredentials(options.token_url, this.clientAuth); - // Default OAuth scope. This could be overridden via public property. - this.scopes = [DEFAULT_OAUTH_SCOPE]; - this.cachedAccessToken = null; - this.audience = options.audience; - this.subjectTokenType = options.subject_token_type; - this.quotaProjectId = options.quota_project_id; - this.workforcePoolUserProject = options.workforce_pool_user_project; - const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); - if (this.workforcePoolUserProject && - !this.audience.match(workforceAudiencePattern)) { - throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + - 'credentials.'); + generateAuthUrl(opts = {}) { + if (opts.code_challenge_method && !opts.code_challenge) { + throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); } - if (typeof options.service_account_impersonation_url !== 'undefined' && - !this.validateGoogleAPIsUrl('iamcredentials', options.service_account_impersonation_url)) { - throw new Error(`"${options.service_account_impersonation_url}" is ` + - 'not a valid service account impersonation url.'); + opts.response_type = opts.response_type || 'code'; + opts.client_id = opts.client_id || this._clientId; + opts.redirect_uri = opts.redirect_uri || this.redirectUri; + // Allow scopes to be passed either as array or a string + if (Array.isArray(opts.scope)) { + opts.scope = opts.scope.join(' '); } - this.serviceAccountImpersonationUrl = - options.service_account_impersonation_url; - this.serviceAccountImpersonationLifetime = - (_b = (_a = options.service_account_impersonation) === null || _a === void 0 ? void 0 : _a.token_lifetime_seconds) !== null && _b !== void 0 ? _b : DEFAULT_TOKEN_LIFESPAN; - // As threshold could be zero, - // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the - // zero value. - if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { - this.eagerRefreshThresholdMillis = exports.EXPIRATION_TIME_OFFSET; + const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString(); + return (rootUrl + + '?' + + querystring.stringify(opts)); + } + generateCodeVerifier() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); + } + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + async generateCodeVerifierAsync() { + // base64 encoding uses 6 bits per character, and we want to generate128 + // characters. 6*128/8 = 96. + const crypto = (0, crypto_1.createCrypto)(); + const randomString = crypto.randomBytesBase64(96); + // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ + // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just + // swapping out a few chars. + const codeVerifier = randomString + .replace(/\+/g, '~') + .replace(/=/g, '_') + .replace(/\//g, '-'); + // Generate the base64 encoded SHA256 + const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); + // We need to use base64UrlEncoding instead of standard base64 + const codeChallenge = unencodedCodeChallenge + .split('=')[0] + .replace(/\+/g, '-') + .replace(/\//g, '_'); + return { codeVerifier, codeChallenge }; + } + getToken(codeOrOptions, callback) { + const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; + if (callback) { + this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); } else { - this.eagerRefreshThresholdMillis = additionalOptions - .eagerRefreshThresholdMillis; + return this.getTokenAsync(options); } - this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); - this.projectId = null; - this.projectNumber = this.getProjectNumber(this.audience); } - /** The service account email to be impersonated, if available. */ - getServiceAccountEmail() { - var _a; - if (this.serviceAccountImpersonationUrl) { - // Parse email from URL. The formal looks as follows: - // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken - const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; - const result = re.exec(this.serviceAccountImpersonationUrl); - return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + async getTokenAsync(options) { + const url = this.endpoints.oauth2TokenUrl.toString(); + const values = { + code: options.code, + client_id: options.client_id || this._clientId, + client_secret: this._clientSecret, + redirect_uri: options.redirect_uri || this.redirectUri, + grant_type: 'authorization_code', + code_verifier: options.codeVerifier, + }; + const res = await this.transporter.request({ + method: 'POST', + url, + data: querystring.stringify(values), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + const tokens = res.data; + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; } - return null; + this.emit('tokens', tokens); + return { tokens, res }; } /** - * Provides a mechanism to inject GCP access tokens directly. - * When the provided credential expires, a new credential, using the - * external account options, is retrieved. - * @param credentials The Credentials object to set on the current client. + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private */ - setCredentials(credentials) { - super.setCredentials(credentials); - this.cachedAccessToken = credentials; + async refreshToken(refreshToken) { + if (!refreshToken) { + return this.refreshTokenNoCache(refreshToken); + } + // If a request to refresh using the same token has started, + // return the same promise. + if (this.refreshTokenPromises.has(refreshToken)) { + return this.refreshTokenPromises.get(refreshToken); + } + const p = this.refreshTokenNoCache(refreshToken).then(r => { + this.refreshTokenPromises.delete(refreshToken); + return r; + }, e => { + this.refreshTokenPromises.delete(refreshToken); + throw e; + }); + this.refreshTokenPromises.set(refreshToken, p); + return p; } - /** - * @return A promise that resolves with the current GCP access token - * response. If the current credential is expired, a new one is retrieved. - */ - async getAccessToken() { - // If cached access token is unavailable or expired, force refresh. - if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { - await this.refreshAccessTokenAsync(); + async refreshTokenNoCache(refreshToken) { + var _a; + if (!refreshToken) { + throw new Error('No refresh token is set.'); } - // Return GCP access token in GetAccessTokenResponse format. - return { - token: this.cachedAccessToken.access_token, - res: this.cachedAccessToken.res, + const url = this.endpoints.oauth2TokenUrl.toString(); + const data = { + refresh_token: refreshToken, + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', }; + let res; + try { + // request for new token + res = await this.transporter.request({ + method: 'POST', + url, + data: querystring.stringify(data), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError && + e.message === 'invalid_grant' && + ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && + /ReAuth/i.test(e.response.data.error_description)) { + e.message = JSON.stringify(e.response.data); + } + throw e; + } + const tokens = res.data; + // TODO: de-duplicate this code from a few spots + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + refreshAccessToken(callback) { + if (callback) { + this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); + } + else { + return this.refreshAccessTokenAsync(); + } + } + async refreshAccessTokenAsync() { + const r = await this.refreshToken(this.credentials.refresh_token); + const tokens = r.tokens; + tokens.refresh_token = this.credentials.refresh_token; + this.credentials = tokens; + return { credentials: this.credentials, res: r.res }; + } + getAccessToken(callback) { + if (callback) { + this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + else { + return this.getAccessTokenAsync(); + } + } + async getAccessTokenAsync() { + const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); + if (shouldRefresh) { + if (!this.credentials.refresh_token) { + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + return { token: this.credentials.access_token }; + } + } + else { + throw new Error('No refresh token or refresh handler callback is set.'); + } + } + const r = await this.refreshAccessTokenAsync(); + if (!r.credentials || (r.credentials && !r.credentials.access_token)) { + throw new Error('Could not refresh access token.'); + } + return { token: r.credentials.access_token, res: r.res }; + } + else { + return { token: this.credentials.access_token }; + } } /** - * The main authentication interface. It takes an optional url which when + * The main authentication interface. It takes an optional url which when * present is the endpoint being accessed, and returns a Promise which * resolves with authorization header fields. * - * The result has the form: + * In OAuth2Client, the result has the form: * { Authorization: 'Bearer ' } + * @param url The optional url being authorized */ - async getRequestHeaders() { - const accessTokenResponse = await this.getAccessToken(); + async getRequestHeaders(url) { + const headers = (await this.getRequestMetadataAsync(url)).headers; + return headers; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + const thisCreds = this.credentials; + if (!thisCreds.access_token && + !thisCreds.refresh_token && + !this.apiKey && + !this.refreshHandler) { + throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + } + if (thisCreds.access_token && !this.isTokenExpiring()) { + thisCreds.token_type = thisCreds.token_type || 'Bearer'; + const headers = { + Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + // If refreshHandler exists, call processAndValidateRefreshHandler(). + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + const headers = { + Authorization: 'Bearer ' + this.credentials.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + if (this.apiKey) { + return { headers: { 'X-Goog-Api-Key': this.apiKey } }; + } + let r = null; + let tokens = null; + try { + r = await this.refreshToken(thisCreds.refresh_token); + tokens = r.tokens; + } + catch (err) { + const e = err; + if (e.response && + (e.response.status === 403 || e.response.status === 404)) { + e.message = `Could not refresh access token: ${e.message}`; + } + throw e; + } + const credentials = this.credentials; + credentials.token_type = credentials.token_type || 'Bearer'; + tokens.refresh_token = credentials.refresh_token; + this.credentials = tokens; const headers = { - Authorization: `Bearer ${accessTokenResponse.token}`, + Authorization: credentials.token_type + ' ' + tokens.access_token, }; - return this.addSharedMetadataHeaders(headers); + return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; } - request(opts, callback) { + /** + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + * + * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} + */ + static getRevokeTokenUrl(token) { + return new OAuth2Client().getRevokeTokenURL(token).toString(); + } + /** + * Generates a URL to revoke the given token. + * + * @param token The existing token to be revoked. + */ + getRevokeTokenURL(token) { + const url = new URL(this.endpoints.oauth2RevokeUrl); + url.searchParams.append('token', token); + return url; + } + revokeToken(token, callback) { + const opts = { + url: this.getRevokeTokenURL(token).toString(), + method: 'POST', + }; if (callback) { - this.requestAsync(opts).then(r => callback(null, r), e => { - return callback(e, e.response); - }); + this.transporter + .request(opts) + .then(r => callback(null, r), callback); } else { - return this.requestAsync(opts); + return this.transporter.request(opts); } } - /** - * @return A promise that resolves with the project ID corresponding to the - * current workload identity pool or current workforce pool if - * determinable. For workforce pool credential, it returns the project ID - * corresponding to the workforcePoolUserProject. - * This is introduced to match the current pattern of using the Auth - * library: - * const projectId = await auth.getProjectId(); - * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; - * const res = await client.request({ url }); - * The resource may not have permission - * (resourcemanager.projects.get) to call this API or the required - * scopes may not be selected: - * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes - */ - async getProjectId() { - const projectNumber = this.projectNumber || this.workforcePoolUserProject; - if (this.projectId) { - // Return previously determined project ID. - return this.projectId; + revokeCredentials(callback) { + if (callback) { + this.revokeCredentialsAsync().then(res => callback(null, res), callback); + } + else { + return this.revokeCredentialsAsync(); } - else if (projectNumber) { - // Preferable not to use request() to avoid retrial policies. - const headers = await this.getRequestHeaders(); - const response = await this.transporter.request({ - headers, - url: `${exports.CLOUD_RESOURCE_MANAGER}${projectNumber}`, - responseType: 'json', + } + async revokeCredentialsAsync() { + const token = this.credentials.access_token; + this.credentials = {}; + if (token) { + return this.revokeToken(token); + } + else { + throw new Error('No access token to revoke.'); + } + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); }); - this.projectId = response.data.projectId; - return this.projectId; } - return null; + else { + return this.requestAsync(opts); + } } - /** - * Authenticates the provided HTTP request, processes it and resolves with the - * returned response. - * @param opts The HTTP request options. - * @param retry Whether the current attempt is a retry after a failed attempt. - * @return A promise that resolves with the successful response. - */ async requestAsync(opts, retry = false) { - let response; + let r2; try { - const requestHeaders = await this.getRequestHeaders(); + const r = await this.getRequestMetadataAsync(opts.url); opts.headers = opts.headers || {}; - if (requestHeaders && requestHeaders['x-goog-user-project']) { - opts.headers['x-goog-user-project'] = - requestHeaders['x-goog-user-project']; + if (r.headers && r.headers['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; } - if (requestHeaders && requestHeaders.Authorization) { - opts.headers.Authorization = requestHeaders.Authorization; + if (r.headers && r.headers.Authorization) { + opts.headers.Authorization = r.headers.Authorization; } - response = await this.transporter.request(opts); + if (this.apiKey) { + opts.headers['X-Goog-Api-Key'] = this.apiKey; + } + r2 = await this.transporter.request(opts); } catch (e) { const res = e.response; @@ -27338,884 +22207,357 @@ class BaseExternalAccountClient extends authclient_1.AuthClient { // - We haven't already retried. It only makes sense to retry once. // - The response was a 401 or a 403 // - The request didn't send a readableStream - // - forceRefreshOnFailure is true + // - An access_token and refresh_token were available, but either no + // expiry_date was available or the forceRefreshOnFailure flag is set. + // The absent expiry_date case can happen when developers stash the + // access_token and refresh_token for later use, but the access_token + // fails on the first try because it's expired. Some developers may + // choose to enable forceRefreshOnFailure to mitigate time-related + // errors. + // Or the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - No refresh_token was available + // - An access_token and a refreshHandler callback were available, but + // either no expiry_date was available or the forceRefreshOnFailure + // flag is set. The access_token fails on the first try because it's + // expired. Some developers may choose to enable forceRefreshOnFailure + // to mitigate time-related errors. + const mayRequireRefresh = this.credentials && + this.credentials.access_token && + this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure); + const mayRequireRefreshWithNoRefreshToken = this.credentials && + this.credentials.access_token && + !this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure) && + this.refreshHandler; const isReadableStream = res.config.data instanceof stream.Readable; const isAuthErr = statusCode === 401 || statusCode === 403; - if (!retry && + if (!retry && isAuthErr && !isReadableStream && mayRequireRefresh) { + await this.refreshAccessTokenAsync(); + return this.requestAsync(opts, true); + } + else if (!retry && isAuthErr && !isReadableStream && - this.forceRefreshOnFailure) { - await this.refreshAccessTokenAsync(); - return await this.requestAsync(opts, true); + mayRequireRefreshWithNoRefreshToken) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + } + return this.requestAsync(opts, true); } } throw e; } - return response; + return r2; } - /** - * Forces token refresh, even if unexpired tokens are currently cached. - * External credentials are exchanged for GCP access tokens via the token - * exchange endpoint and other settings provided in the client options - * object. - * If the service_account_impersonation_url is provided, an additional - * step to exchange the external account GCP access token for a service - * account impersonated token is performed. - * @return A promise that resolves with the fresh GCP access tokens. - */ - async refreshAccessTokenAsync() { - // Retrieve the external credential. - const subjectToken = await this.retrieveSubjectToken(); - // Construct the STS credentials options. - const stsCredentialsOptions = { - grantType: STS_GRANT_TYPE, - audience: this.audience, - requestedTokenType: STS_REQUEST_TOKEN_TYPE, - subjectToken, - subjectTokenType: this.subjectTokenType, - // generateAccessToken requires the provided access token to have - // scopes: - // https://www.googleapis.com/auth/iam or - // https://www.googleapis.com/auth/cloud-platform - // The new service account access token scopes will match the user - // provided ones. - scope: this.serviceAccountImpersonationUrl - ? [DEFAULT_OAUTH_SCOPE] - : this.getScopesArray(), - }; - // Exchange the external credentials for a GCP access token. - // Client auth is prioritized over passing the workforcePoolUserProject - // parameter for STS token exchange. - const additionalOptions = !this.clientAuth && this.workforcePoolUserProject - ? { userProject: this.workforcePoolUserProject } - : undefined; - const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, additionalOptions); - if (this.serviceAccountImpersonationUrl) { - this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + verifyIdToken(options, callback) { + // This function used to accept two arguments instead of an options object. + // Check the types to help users upgrade with less pain. + // This check can be removed after a 2.0 release. + if (callback && typeof callback !== 'function') { + throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); } - else if (stsResponse.expires_in) { - // Save response in cached access token. - this.cachedAccessToken = { - access_token: stsResponse.access_token, - expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, - res: stsResponse.res, - }; + if (callback) { + this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); } else { - // Save response in cached access token. - this.cachedAccessToken = { - access_token: stsResponse.access_token, - res: stsResponse.res, - }; + return this.verifyIdTokenAsync(options); } - // Save credentials. - this.credentials = {}; - Object.assign(this.credentials, this.cachedAccessToken); - delete this.credentials.res; - // Trigger tokens event to notify external listeners. - this.emit('tokens', { - refresh_token: null, - expiry_date: this.cachedAccessToken.expiry_date, - access_token: this.cachedAccessToken.access_token, - token_type: 'Bearer', - id_token: null, - }); - // Return the cached access token. - return this.cachedAccessToken; } - /** - * Returns the workload identity pool project number if it is determinable - * from the audience resource name. - * @param audience The STS audience used to determine the project number. - * @return The project number associated with the workload identity pool, if - * this can be determined from the STS audience field. Otherwise, null is - * returned. - */ - getProjectNumber(audience) { - // STS audience pattern: - // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... - const match = audience.match(/\/projects\/([^/]+)/); - if (!match) { - return null; + async verifyIdTokenAsync(options) { + if (!options.idToken) { + throw new Error('The verifyIdToken method requires an ID Token'); } - return match[1]; + const response = await this.getFederatedSignonCertsAsync(); + const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry); + return login; } /** - * Exchanges an external account GCP access token for a service - * account impersonated access token using iamcredentials - * GenerateAccessToken API. - * @param token The access token to exchange for a service account access - * token. - * @return A promise that resolves with the service account impersonated - * credentials response. + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. */ - async getImpersonatedAccessToken(token) { - const opts = { - url: this.serviceAccountImpersonationUrl, + async getTokenInfo(accessToken) { + const { data } = await this.transporter.request({ method: 'POST', headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${token}`, - }, - data: { - scope: this.getScopesArray(), - lifetime: this.serviceAccountImpersonationLifetime + 's', + 'Content-Type': 'application/x-www-form-urlencoded', + Authorization: `Bearer ${accessToken}`, }, - responseType: 'json', - }; - const response = await this.transporter.request(opts); - const successResponse = response.data; - return { - access_token: successResponse.accessToken, - // Convert from ISO format to timestamp. - expiry_date: new Date(successResponse.expireTime).getTime(), - res: response, - }; - } - /** - * Returns whether the provided credentials are expired or not. - * If there is no expiry time, assumes the token is not expired or expiring. - * @param accessToken The credentials to check for expiration. - * @return Whether the credentials are expired or not. - */ - isExpired(accessToken) { - const now = new Date().getTime(); - return accessToken.expiry_date - ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis - : false; + url: this.endpoints.tokenInfoUrl.toString(), + }); + const info = Object.assign({ + expiry_date: new Date().getTime() + data.expires_in * 1000, + scopes: data.scope.split(' '), + }, data); + delete info.expires_in; + delete info.scope; + return info; } - /** - * @return The list of scopes for the requested GCP access token. - */ - getScopesArray() { - // Since scopes can be provided as string or array, the type should - // be normalized. - if (typeof this.scopes === 'string') { - return [this.scopes]; - } - else if (typeof this.scopes === 'undefined') { - return [DEFAULT_OAUTH_SCOPE]; + getFederatedSignonCerts(callback) { + if (callback) { + this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); } else { - return this.scopes; - } - } - /** - * Checks whether Google APIs URL is valid. - * @param apiName The apiName of url. - * @param url The Google API URL to validate. - * @return Whether the URL is valid or not. - */ - validateGoogleAPIsUrl(apiName, url) { - let parsedUrl; - // Return false if error is thrown during parsing URL. - try { - parsedUrl = new URL(url); - } - catch (e) { - return false; - } - const urlDomain = parsedUrl.hostname; - // Check the protocol is https. - if (parsedUrl.protocol !== 'https:') { - return false; - } - const googleAPIsDomainPatterns = [ - new RegExp('^' + - VARIABLE_PORTION_PATTERN + - '\\.' + - apiName + - GOOGLE_APIS_DOMAIN_PATTERN), - new RegExp('^' + apiName + GOOGLE_APIS_DOMAIN_PATTERN), - new RegExp('^' + - apiName + - '\\.' + - VARIABLE_PORTION_PATTERN + - GOOGLE_APIS_DOMAIN_PATTERN), - new RegExp('^' + - VARIABLE_PORTION_PATTERN + - '\\-' + - apiName + - GOOGLE_APIS_DOMAIN_PATTERN), - new RegExp('^' + - apiName + - '\\-' + - VARIABLE_PORTION_PATTERN + - '\\.p' + - GOOGLE_APIS_DOMAIN_PATTERN), - ]; - for (const googleAPIsDomainPattern of googleAPIsDomainPatterns) { - if (urlDomain.match(googleAPIsDomainPattern)) { - return true; - } + return this.getFederatedSignonCertsAsync(); } - return false; - } -} -exports.BaseExternalAccountClient = BaseExternalAccountClient; -//# sourceMappingURL=baseexternalclient.js.map - -/***/ }), - -/***/ 6875: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2013 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Compute = void 0; -const arrify = __nccwpck_require__(1546); -const gaxios_1 = __nccwpck_require__(9555); -const gcpMetadata = __nccwpck_require__(3563); -const oauth2client_1 = __nccwpck_require__(3936); -class Compute extends oauth2client_1.OAuth2Client { - /** - * Google Compute Engine service account credentials. - * - * Retrieve access token from the metadata server. - * See: https://developers.google.com/compute/docs/authentication - */ - constructor(options = {}) { - super(options); - // Start with an expired refresh token, which will automatically be - // refreshed before the first API call is made. - this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; - this.serviceAccountEmail = options.serviceAccountEmail || 'default'; - this.scopes = arrify(options.scopes); } - /** - * Refreshes the access token. - * @param refreshToken Unused parameter - */ - async refreshTokenNoCache( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - refreshToken) { - const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; - let data; - try { - const instanceOptions = { - property: tokenPath, - }; - if (this.scopes.length > 0) { - instanceOptions.params = { - scopes: this.scopes.join(','), - }; - } - data = await gcpMetadata.instance(instanceOptions); - } - catch (e) { - if (e instanceof gaxios_1.GaxiosError) { - e.message = `Could not refresh access token: ${e.message}`; - this.wrapError(e); - } - throw e; + async getFederatedSignonCertsAsync() { + const nowTime = new Date().getTime(); + const format = (0, crypto_1.hasBrowserCrypto)() + ? CertificateFormat.JWK + : CertificateFormat.PEM; + if (this.certificateExpiry && + nowTime < this.certificateExpiry.getTime() && + this.certificateCacheFormat === format) { + return { certs: this.certificateCache, format }; } - const tokens = data; - if (data && data.expires_in) { - tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; - delete tokens.expires_in; + let res; + let url; + switch (format) { + case CertificateFormat.PEM: + url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString(); + break; + case CertificateFormat.JWK: + url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString(); + break; + default: + throw new Error(`Unsupported certificate format ${format}`); } - this.emit('tokens', tokens); - return { tokens, res: null }; - } - /** - * Fetches an ID token. - * @param targetAudience the audience for the fetched ID token. - */ - async fetchIdToken(targetAudience) { - const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + - `?format=full&audience=${targetAudience}`; - let idToken; try { - const instanceOptions = { - property: idTokenPath, - }; - idToken = await gcpMetadata.instance(instanceOptions); + res = await this.transporter.request({ url }); } catch (e) { if (e instanceof Error) { - e.message = `Could not fetch ID token: ${e.message}`; - } - throw e; - } - return idToken; - } - wrapError(e) { - const res = e.response; - if (res && res.status) { - e.code = res.status.toString(); - if (res.status === 403) { - e.message = - 'A Forbidden error was returned while attempting to retrieve an access ' + - 'token for the Compute Engine built-in service account. This may be because the Compute ' + - 'Engine instance does not have the correct permission scopes specified: ' + - e.message; - } - else if (res.status === 404) { - e.message = - 'A Not Found error was returned while attempting to retrieve an access' + - 'token for the Compute Engine built-in service account. This may be because the Compute ' + - 'Engine instance does not have any permission scopes specified: ' + - e.message; - } - } - } -} -exports.Compute = Compute; -//# sourceMappingURL=computeclient.js.map - -/***/ }), - -/***/ 6270: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; -const stream = __nccwpck_require__(2781); -const authclient_1 = __nccwpck_require__(4627); -const sts = __nccwpck_require__(6308); -/** - * The required token exchange grant_type: rfc8693#section-2.1 - */ -const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; -/** - * The requested token exchange requested_token_type: rfc8693#section-2.1 - */ -const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; -/** - * The requested token exchange subject_token_type: rfc8693#section-2.1 - */ -const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; -/** The STS access token exchange end point. */ -const STS_ACCESS_TOKEN_URL = 'https://sts.googleapis.com/v1/token'; -/** - * The maximum number of access boundary rules a Credential Access Boundary - * can contain. - */ -exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; -/** - * Offset to take into account network delays and server clock skews. - */ -exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; -/** - * Defines a set of Google credentials that are downscoped from an existing set - * of Google OAuth2 credentials. This is useful to restrict the Identity and - * Access Management (IAM) permissions that a short-lived credential can use. - * The common pattern of usage is to have a token broker with elevated access - * generate these downscoped credentials from higher access source credentials - * and pass the downscoped short-lived access tokens to a token consumer via - * some secure authenticated channel for limited access to Google Cloud Storage - * resources. - */ -class DownscopedClient extends authclient_1.AuthClient { - /** - * Instantiates a downscoped client object using the provided source - * AuthClient and credential access boundary rules. - * To downscope permissions of a source AuthClient, a Credential Access - * Boundary that specifies which resources the new credential can access, as - * well as an upper bound on the permissions that are available on each - * resource, has to be defined. A downscoped client can then be instantiated - * using the source AuthClient and the Credential Access Boundary. - * @param authClient The source AuthClient to be downscoped based on the - * provided Credential Access Boundary rules. - * @param credentialAccessBoundary The Credential Access Boundary which - * contains a list of access boundary rules. Each rule contains information - * on the resource that the rule applies to, the upper bound of the - * permissions that are available on that resource and an optional - * condition to further restrict permissions. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. - * @param quotaProjectId Optional quota project id for setting up in the - * x-goog-user-project header. - */ - constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { - super(); - this.authClient = authClient; - this.credentialAccessBoundary = credentialAccessBoundary; - // Check 1-10 Access Boundary Rules are defined within Credential Access - // Boundary. - if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { - throw new Error('At least one access boundary rule needs to be defined.'); - } - else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > - exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { - throw new Error('The provided access boundary has more than ' + - `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); - } - // Check at least one permission should be defined in each Access Boundary - // Rule. - for (const rule of credentialAccessBoundary.accessBoundary - .accessBoundaryRules) { - if (rule.availablePermissions.length === 0) { - throw new Error('At least one permission should be defined in access boundary rules.'); - } - } - this.stsCredential = new sts.StsCredentials(STS_ACCESS_TOKEN_URL); - this.cachedDownscopedAccessToken = null; - // As threshold could be zero, - // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the - // zero value. - if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { - this.eagerRefreshThresholdMillis = exports.EXPIRATION_TIME_OFFSET; - } - else { - this.eagerRefreshThresholdMillis = additionalOptions - .eagerRefreshThresholdMillis; + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; } - this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); - this.quotaProjectId = quotaProjectId; - } - /** - * Provides a mechanism to inject Downscoped access tokens directly. - * The expiry_date field is required to facilitate determination of the token - * expiration which would make it easier for the token consumer to handle. - * @param credentials The Credentials object to set on the current client. - */ - setCredentials(credentials) { - if (!credentials.expiry_date) { - throw new Error('The access token expiry_date field is missing in the provided ' + - 'credentials.'); + const cacheControl = res ? res.headers['cache-control'] : undefined; + let cacheAge = -1; + if (cacheControl) { + const pattern = new RegExp('max-age=([0-9]*)'); + const regexResult = pattern.exec(cacheControl); + if (regexResult && regexResult.length === 2) { + // Cache results with max-age (in seconds) + cacheAge = Number(regexResult[1]) * 1000; // milliseconds + } } - super.setCredentials(credentials); - this.cachedDownscopedAccessToken = credentials; - } - async getAccessToken() { - // If the cached access token is unavailable or expired, force refresh. - // The Downscoped access token will be returned in - // DownscopedAccessTokenResponse format. - if (!this.cachedDownscopedAccessToken || - this.isExpired(this.cachedDownscopedAccessToken)) { - await this.refreshAccessTokenAsync(); + let certificates = {}; + switch (format) { + case CertificateFormat.PEM: + certificates = res.data; + break; + case CertificateFormat.JWK: + for (const key of res.data.keys) { + certificates[key.kid] = key; + } + break; + default: + throw new Error(`Unsupported certificate format ${format}`); } - // Return Downscoped access token in DownscopedAccessTokenResponse format. - return { - token: this.cachedDownscopedAccessToken.access_token, - expirationTime: this.cachedDownscopedAccessToken.expiry_date, - res: this.cachedDownscopedAccessToken.res, - }; - } - /** - * The main authentication interface. It takes an optional url which when - * present is the endpoint being accessed, and returns a Promise which - * resolves with authorization header fields. - * - * The result has the form: - * { Authorization: 'Bearer ' } - */ - async getRequestHeaders() { - const accessTokenResponse = await this.getAccessToken(); - const headers = { - Authorization: `Bearer ${accessTokenResponse.token}`, - }; - return this.addSharedMetadataHeaders(headers); + const now = new Date(); + this.certificateExpiry = + cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); + this.certificateCache = certificates; + this.certificateCacheFormat = format; + return { certs: certificates, format, res }; } - request(opts, callback) { + getIapPublicKeys(callback) { if (callback) { - this.requestAsync(opts).then(r => callback(null, r), e => { - return callback(e, e.response); - }); + this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); } else { - return this.requestAsync(opts); + return this.getIapPublicKeysAsync(); } } - /** - * Authenticates the provided HTTP request, processes it and resolves with the - * returned response. - * @param opts The HTTP request options. - * @param retry Whether the current attempt is a retry after a failed attempt. - * @return A promise that resolves with the successful response. - */ - async requestAsync(opts, retry = false) { - let response; + async getIapPublicKeysAsync() { + let res; + const url = this.endpoints.oauth2IapPublicKeyUrl.toString(); try { - const requestHeaders = await this.getRequestHeaders(); - opts.headers = opts.headers || {}; - if (requestHeaders && requestHeaders['x-goog-user-project']) { - opts.headers['x-goog-user-project'] = - requestHeaders['x-goog-user-project']; - } - if (requestHeaders && requestHeaders.Authorization) { - opts.headers.Authorization = requestHeaders.Authorization; - } - response = await this.transporter.request(opts); + res = await this.transporter.request({ url }); } catch (e) { - const res = e.response; - if (res) { - const statusCode = res.status; - // Retry the request for metadata if the following criteria are true: - // - We haven't already retried. It only makes sense to retry once. - // - The response was a 401 or a 403 - // - The request didn't send a readableStream - // - forceRefreshOnFailure is true - const isReadableStream = res.config.data instanceof stream.Readable; - const isAuthErr = statusCode === 401 || statusCode === 403; - if (!retry && - isAuthErr && - !isReadableStream && - this.forceRefreshOnFailure) { - await this.refreshAccessTokenAsync(); - return await this.requestAsync(opts, true); - } + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; } throw e; } - return response; + return { pubkeys: res.data, res }; } - /** - * Forces token refresh, even if unexpired tokens are currently cached. - * GCP access tokens are retrieved from authclient object/source credential. - * Then GCP access tokens are exchanged for downscoped access tokens via the - * token exchange endpoint. - * @return A promise that resolves with the fresh downscoped access token. - */ - async refreshAccessTokenAsync() { - var _a; - // Retrieve GCP access token from source credential. - const subjectToken = (await this.authClient.getAccessToken()).token; - // Construct the STS credentials options. - const stsCredentialsOptions = { - grantType: STS_GRANT_TYPE, - requestedTokenType: STS_REQUEST_TOKEN_TYPE, - subjectToken: subjectToken, - subjectTokenType: STS_SUBJECT_TOKEN_TYPE, - }; - // Exchange the source AuthClient access token for a Downscoped access - // token. - const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); - /** - * The STS endpoint will only return the expiration time for the downscoped - * access token if the original access token represents a service account. - * The downscoped token's expiration time will always match the source - * credential expiration. When no expires_in is returned, we can copy the - * source credential's expiration time. - */ - const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; - const expiryDate = stsResponse.expires_in - ? new Date().getTime() + stsResponse.expires_in * 1000 - : sourceCredExpireDate; - // Save response in cached access token. - this.cachedDownscopedAccessToken = { - access_token: stsResponse.access_token, - expiry_date: expiryDate, - res: stsResponse.res, - }; - // Save credentials. - this.credentials = {}; - Object.assign(this.credentials, this.cachedDownscopedAccessToken); - delete this.credentials.res; - // Trigger tokens event to notify external listeners. - this.emit('tokens', { - refresh_token: null, - expiry_date: this.cachedDownscopedAccessToken.expiry_date, - access_token: this.cachedDownscopedAccessToken.access_token, - token_type: 'Bearer', - id_token: null, - }); - // Return the cached access token. - return this.cachedDownscopedAccessToken; + verifySignedJwtWithCerts() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); } /** - * Returns whether the provided credentials are expired or not. - * If there is no expiry time, assumes the token is not expired or expiring. - * @param downscopedAccessToken The credentials to check for expiration. - * @return Whether the credentials are expired or not. + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. */ - isExpired(downscopedAccessToken) { - const now = new Date().getTime(); - return downscopedAccessToken.expiry_date - ? now >= - downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis - : false; - } -} -exports.DownscopedClient = DownscopedClient; -//# sourceMappingURL=downscopedclient.js.map - -/***/ }), - -/***/ 1380: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2018 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getEnv = exports.clear = exports.GCPEnv = void 0; -const gcpMetadata = __nccwpck_require__(3563); -var GCPEnv; -(function (GCPEnv) { - GCPEnv["APP_ENGINE"] = "APP_ENGINE"; - GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; - GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; - GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; - GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; - GCPEnv["NONE"] = "NONE"; -})(GCPEnv = exports.GCPEnv || (exports.GCPEnv = {})); -let envPromise; -function clear() { - envPromise = undefined; -} -exports.clear = clear; -async function getEnv() { - if (envPromise) { - return envPromise; - } - envPromise = getEnvMemoized(); - return envPromise; -} -exports.getEnv = getEnv; -async function getEnvMemoized() { - let env = GCPEnv.NONE; - if (isAppEngine()) { - env = GCPEnv.APP_ENGINE; - } - else if (isCloudFunction()) { - env = GCPEnv.CLOUD_FUNCTIONS; - } - else if (await isComputeEngine()) { - if (await isKubernetesEngine()) { - env = GCPEnv.KUBERNETES_ENGINE; + async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { + const crypto = (0, crypto_1.createCrypto)(); + if (!maxExpiry) { + maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_; } - else if (isCloudRun()) { - env = GCPEnv.CLOUD_RUN; + const segments = jwt.split('.'); + if (segments.length !== 3) { + throw new Error('Wrong number of segments in token: ' + jwt); } - else { - env = GCPEnv.COMPUTE_ENGINE; + const signed = segments[0] + '.' + segments[1]; + let signature = segments[2]; + let envelope; + let payload; + try { + envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); } - } - else { - env = GCPEnv.NONE; - } - return env; -} -function isAppEngine() { - return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); -} -function isCloudFunction() { - return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); -} -/** - * This check only verifies that the environment is running knative. - * This must be run *after* checking for Kubernetes, otherwise it will - * return a false positive. - */ -function isCloudRun() { - return !!process.env.K_CONFIGURATION; -} -async function isKubernetesEngine() { - try { - await gcpMetadata.instance('attributes/cluster-name'); - return true; - } - catch (e) { - return false; - } -} -async function isComputeEngine() { - return gcpMetadata.isAvailable(); -} -//# sourceMappingURL=envDetect.js.map - -/***/ }), - -/***/ 8749: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; -const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; -const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; -const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; -/** - * Defines the response of a 3rd party executable run by the pluggable auth client. - */ -class ExecutableResponse { - /** - * Instantiates an ExecutableResponse instance using the provided JSON object - * from the output of the executable. - * @param responseJson Response from a 3rd party executable, loaded from a - * run of the executable or a cached output file. - */ - constructor(responseJson) { - // Check that the required fields exist in the json response. - if (!responseJson.version) { - throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; + } + throw err; } - if (responseJson.success === undefined) { - throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + if (!envelope) { + throw new Error("Can't parse token envelope: " + segments[0]); } - this.version = responseJson.version; - this.success = responseJson.success; - // Validate required fields for a successful response. - if (this.success) { - this.expirationTime = responseJson.expiration_time; - this.tokenType = responseJson.token_type; - // Validate token type field. - if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && - this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && - this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { - throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + - `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + try { + payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token payload '${segments[0]}`; } - // Validate subject token. - if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { - if (!responseJson.saml_response) { - throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); - } - this.subjectToken = responseJson.saml_response; + throw err; + } + if (!payload) { + throw new Error("Can't parse token payload: " + segments[1]); + } + if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { + // If this is not present, then there's no reason to attempt verification + throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); + } + const cert = certs[envelope.kid]; + if (envelope.alg === 'ES256') { + signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); + } + const verified = await crypto.verify(cert, signed, signature); + if (!verified) { + throw new Error('Invalid token signature: ' + jwt); + } + if (!payload.iat) { + throw new Error('No issue time in token: ' + JSON.stringify(payload)); + } + if (!payload.exp) { + throw new Error('No expiration time in token: ' + JSON.stringify(payload)); + } + const iat = Number(payload.iat); + if (isNaN(iat)) + throw new Error('iat field using invalid format'); + const exp = Number(payload.exp); + if (isNaN(exp)) + throw new Error('exp field using invalid format'); + const now = new Date().getTime() / 1000; + if (exp >= now + maxExpiry) { + throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); + } + const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; + const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; + if (now < earliest) { + throw new Error('Token used too early, ' + + now + + ' < ' + + earliest + + ': ' + + JSON.stringify(payload)); + } + if (now > latest) { + throw new Error('Token used too late, ' + + now + + ' > ' + + latest + + ': ' + + JSON.stringify(payload)); + } + if (issuers && issuers.indexOf(payload.iss) < 0) { + throw new Error('Invalid issuer, expected one of [' + + issuers + + '], but got ' + + payload.iss); + } + // Check the audience matches if we have one + if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { + const aud = payload.aud; + let audVerified = false; + // If the requiredAudience is an array, check if it contains token + // audience + if (requiredAudience.constructor === Array) { + audVerified = requiredAudience.indexOf(aud) > -1; } else { - if (!responseJson.id_token) { - throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + - `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); - } - this.subjectToken = responseJson.id_token; - } - } - else { - // Both code and message must be provided for unsuccessful responses. - if (!responseJson.code) { - throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); + audVerified = aud === requiredAudience; } - if (!responseJson.message) { - throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); + if (!audVerified) { + throw new Error('Wrong recipient, payload audience != requiredAudience'); } - this.errorCode = responseJson.code; - this.errorMessage = responseJson.message; } + return new loginticket_1.LoginTicket(envelope, payload); } /** - * @return A boolean representing if the response has a valid token. Returns - * true when the response was successful and the token is not expired. - */ - isValid() { - return !this.isExpired() && this.success; - } - /** - * @return A boolean representing if the response is expired. Returns true if the - * provided timeout has passed. + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. */ - isExpired() { - return (this.expirationTime !== undefined && - this.expirationTime < Math.round(Date.now() / 1000)); - } -} -exports.ExecutableResponse = ExecutableResponse; -/** - * An error thrown by the ExecutableResponse class. - */ -class ExecutableResponseError extends Error { - constructor(message) { - super(message); - Object.setPrototypeOf(this, new.target.prototype); - } -} -exports.ExecutableResponseError = ExecutableResponseError; -/** - * An error thrown when the 'version' field in an executable response is missing or invalid. - */ -class InvalidVersionFieldError extends ExecutableResponseError { -} -exports.InvalidVersionFieldError = InvalidVersionFieldError; -/** - * An error thrown when the 'success' field in an executable response is missing or invalid. - */ -class InvalidSuccessFieldError extends ExecutableResponseError { -} -exports.InvalidSuccessFieldError = InvalidSuccessFieldError; -/** - * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. - */ -class InvalidExpirationTimeFieldError extends ExecutableResponseError { -} -exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; -/** - * An error thrown when the 'token_type' field in an executable response is missing or invalid. - */ -class InvalidTokenTypeFieldError extends ExecutableResponseError { + async processAndValidateRefreshHandler() { + if (this.refreshHandler) { + const accessTokenResponse = await this.refreshHandler(); + if (!accessTokenResponse.access_token) { + throw new Error('No access token is returned by the refreshHandler callback.'); + } + return accessTokenResponse; + } + return; + } + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + isTokenExpiring() { + const expiryDate = this.credentials.expiry_date; + return expiryDate + ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis + : false; + } } -exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; +exports.OAuth2Client = OAuth2Client; /** - * An error thrown when the 'code' field in an executable response is missing or invalid. + * @deprecated use instance's {@link OAuth2Client.endpoints} */ -class InvalidCodeFieldError extends ExecutableResponseError { -} -exports.InvalidCodeFieldError = InvalidCodeFieldError; +OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; /** - * An error thrown when the 'message' field in an executable response is missing or invalid. + * Clock skew - five minutes in seconds */ -class InvalidMessageFieldError extends ExecutableResponseError { -} -exports.InvalidMessageFieldError = InvalidMessageFieldError; +OAuth2Client.CLOCK_SKEW_SECS_ = 300; /** - * An error thrown when the subject token in an executable response is missing or invalid. + * The default max Token Lifetime is one day in seconds */ -class InvalidSubjectTokenError extends ExecutableResponseError { -} -exports.InvalidSubjectTokenError = InvalidSubjectTokenError; -//# sourceMappingURL=executable-response.js.map +OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400; + /***/ }), -/***/ 4381: +/***/ 9510: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -28234,64 +22576,176 @@ exports.InvalidSubjectTokenError = InvalidSubjectTokenError; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ExternalAccountClient = void 0; -const baseexternalclient_1 = __nccwpck_require__(7391); -const identitypoolclient_1 = __nccwpck_require__(117); -const awsclient_1 = __nccwpck_require__(1569); -const pluggable_auth_client_1 = __nccwpck_require__(4782); +exports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0; +const querystring = __nccwpck_require__(3477); +const crypto_1 = __nccwpck_require__(8043); +/** List of HTTP methods that accept request bodies. */ +const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; /** - * Dummy class with no constructor. Developers are expected to use fromJSON. + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. */ -class ExternalAccountClient { - constructor() { - throw new Error('ExternalAccountClients should be initialized via: ' + - 'ExternalAccountClient.fromJSON(), ' + - 'directly via explicit constructors, eg. ' + - 'new AwsClient(options), new IdentityPoolClient(options), new' + - 'PluggableAuthClientOptions, or via ' + - 'new GoogleAuth(options).getClient()'); +class OAuthClientAuthHandler { + /** + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. + */ + constructor(clientAuthentication) { + this.clientAuthentication = clientAuthentication; + this.crypto = (0, crypto_1.createCrypto)(); } /** - * This static method will instantiate the - * corresponding type of external account credential depending on the - * underlying credential source. - * @param options The external account options object typically loaded - * from the external account JSON credential file. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. - * @return A BaseExternalAccountClient instance or null if the options - * provided do not correspond to an external account credential. + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. */ - static fromJSON(options, additionalOptions) { - var _a, _b; - if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { - if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { - return new awsclient_1.AwsClient(options, additionalOptions); - } - else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { - return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + applyClientAuthenticationOptions(opts, bearerToken) { + // Inject authenticated header. + this.injectAuthenticatedHeaders(opts, bearerToken); + // Inject authenticated request body. + if (!bearerToken) { + this.injectAuthenticatedRequestBody(opts); + } + } + /** + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + injectAuthenticatedHeaders(opts, bearerToken) { + var _a; + // Bearer token prioritized higher than basic Auth. + if (bearerToken) { + opts.headers = opts.headers || {}; + Object.assign(opts.headers, { + Authorization: `Bearer ${bearerToken}}`, + }); + } + else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { + opts.headers = opts.headers || {}; + const clientId = this.clientAuthentication.clientId; + const clientSecret = this.clientAuthentication.clientSecret || ''; + const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); + Object.assign(opts.headers, { + Authorization: `Basic ${base64EncodedCreds}`, + }); + } + } + /** + * Applies client authentication on the request's body if request-body + * client authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + */ + injectAuthenticatedRequestBody(opts) { + var _a; + if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { + const method = (opts.method || 'GET').toUpperCase(); + // Inject authenticated request body. + if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { + // Get content-type. + let contentType; + const headers = opts.headers || {}; + for (const key in headers) { + if (key.toLowerCase() === 'content-type' && headers[key]) { + contentType = headers[key].toLowerCase(); + break; + } + } + if (contentType === 'application/x-www-form-urlencoded') { + opts.data = opts.data || ''; + const data = querystring.parse(opts.data); + Object.assign(data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + opts.data = querystring.stringify(data); + } + else if (contentType === 'application/json') { + opts.data = opts.data || {}; + Object.assign(opts.data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + } + else { + throw new Error(`${contentType} content-types are not supported with ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } } else { - return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + throw new Error(`${method} HTTP method does not support ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); } } - else { - return null; + } +} +exports.OAuthClientAuthHandler = OAuthClientAuthHandler; +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +function getErrorFromOAuthErrorResponse(resp, err) { + // Error response. + const errorCode = resp.error; + const errorDescription = resp.error_description; + const errorUri = resp.error_uri; + let message = `Error code ${errorCode}`; + if (typeof errorDescription !== 'undefined') { + message += `: ${errorDescription}`; + } + if (typeof errorUri !== 'undefined') { + message += ` - ${errorUri}`; + } + const newError = new Error(message); + // Copy properties from original error to newly generated error. + if (err) { + const keys = Object.keys(err); + if (err.stack) { + // Copy error.stack if available. + keys.push('stack'); } + keys.forEach(key => { + // Do not overwrite the message field. + if (key !== 'message') { + Object.defineProperty(newError, key, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: err[key], + writable: false, + enumerable: true, + }); + } + }); } + return newError; } -exports.ExternalAccountClient = ExternalAccountClient; -//# sourceMappingURL=externalclient.js.map +exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; + /***/ }), -/***/ 695: +/***/ 4782: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2019 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -28305,736 +22759,961 @@ exports.ExternalAccountClient = ExternalAccountClient; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0; -const child_process_1 = __nccwpck_require__(2081); -const fs = __nccwpck_require__(7147); -const gcpMetadata = __nccwpck_require__(3563); -const os = __nccwpck_require__(2037); -const path = __nccwpck_require__(1017); -const crypto_1 = __nccwpck_require__(8043); -const transporters_1 = __nccwpck_require__(2649); -const computeclient_1 = __nccwpck_require__(6875); -const idtokenclient_1 = __nccwpck_require__(298); -const envDetect_1 = __nccwpck_require__(1380); -const jwtclient_1 = __nccwpck_require__(3959); -const refreshclient_1 = __nccwpck_require__(8790); -const impersonated_1 = __nccwpck_require__(1103); -const externalclient_1 = __nccwpck_require__(4381); +exports.PluggableAuthClient = exports.ExecutableError = void 0; const baseexternalclient_1 = __nccwpck_require__(7391); -exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; -const GoogleAuthExceptionMessages = { - NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + - 'To learn more about authentication and Google APIs, visit: \n' + - 'https://cloud.google.com/docs/authentication/getting-started', -}; -class GoogleAuth { - constructor(opts) { - /** - * Caches a value indicating whether the auth layer is running on Google - * Compute Engine. - * @private - */ - this.checkIsGCE = undefined; - // To save the contents of the JSON credential file - this.jsonContent = null; - this.cachedCredential = null; - opts = opts || {}; - this._cachedProjectId = opts.projectId || null; - this.cachedCredential = opts.authClient || null; - this.keyFilename = opts.keyFilename || opts.keyFile; - this.scopes = opts.scopes; - this.jsonContent = opts.credentials || null; - this.clientOptions = opts.clientOptions; - } - // Note: this properly is only public to satisify unit tests. - // https://github.com/Microsoft/TypeScript/issues/5228 - get isGCE() { - return this.checkIsGCE; - } - // GAPIC client libraries should always use self-signed JWTs. The following - // variables are set on the JWT client in order to indicate the type of library, - // and sign the JWT with the correct audience and scopes (if not supplied). - setGapicJWTValues(client) { - client.defaultServicePath = this.defaultServicePath; - client.useJWTAccessWithScope = this.useJWTAccessWithScope; - client.defaultScopes = this.defaultScopes; - } - getProjectId(callback) { - if (callback) { - this.getProjectIdAsync().then(r => callback(null, r), callback); - } - else { - return this.getProjectIdAsync(); - } - } - /** - * A temporary method for internal `getProjectId` usages where `null` is - * acceptable. In a future major release, `getProjectId` should return `null` - * (as the `Promise` base signature describes) and this private - * method should be removed. - * - * @returns Promise that resolves with project id (or `null`) - */ - async getProjectIdOptional() { - try { - return await this.getProjectId(); - } - catch (e) { - if (e instanceof Error && - e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { - return null; - } - else { - throw e; - } - } - } - /* - * A private method for finding and caching a projectId. - * - * Supports environments in order of precedence: - * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable - * - GOOGLE_APPLICATION_CREDENTIALS JSON file - * - Cloud SDK: `gcloud config config-helper --format json` - * - GCE project ID from metadata server - * - * @returns projectId - */ - async findAndCacheProjectId() { - let projectId = null; - projectId || (projectId = await this.getProductionProjectId()); - projectId || (projectId = await this.getFileProjectId()); - projectId || (projectId = await this.getDefaultServiceProjectId()); - projectId || (projectId = await this.getGCEProjectId()); - projectId || (projectId = await this.getExternalAccountClientProjectId()); - if (projectId) { - this._cachedProjectId = projectId; - return projectId; - } - else { - throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); - } - } - async getProjectIdAsync() { - if (this._cachedProjectId) { - return this._cachedProjectId; - } - if (!this._findProjectIdPromise) { - this._findProjectIdPromise = this.findAndCacheProjectId(); - } - return this._findProjectIdPromise; - } - /** - * @returns Any scopes (user-specified or default scopes specified by the - * client library) that need to be set on the current Auth client. - */ - getAnyScopes() { - return this.scopes || this.defaultScopes; - } - getApplicationDefault(optionsOrCallback = {}, callback) { - let options; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else { - options = optionsOrCallback; - } - if (callback) { - this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); - } - else { - return this.getApplicationDefaultAsync(options); - } - } - async getApplicationDefaultAsync(options = {}) { - // If we've already got a cached credential, return it. - // This will also preserve one's configured quota project, in case they - // set one directly on the credential previously. - if (this.cachedCredential) { - return await this.prepareAndCacheADC(this.cachedCredential); - } - // Since this is a 'new' ADC to cache we will use the environment variable - // if it's available. We prefer this value over the value from ADC. - const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT']; - let credential; - // Check for the existence of a local environment variable pointing to the - // location of the credential file. This is typically used in local - // developer scenarios. - credential = - await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); - if (credential) { - if (credential instanceof jwtclient_1.JWT) { - credential.scopes = this.scopes; - } - else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { - credential.scopes = this.getAnyScopes(); - } - return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); - } - // Look in the well-known credential file location. - credential = await this._tryGetApplicationCredentialsFromWellKnownFile(options); - if (credential) { - if (credential instanceof jwtclient_1.JWT) { - credential.scopes = this.scopes; - } - else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { - credential.scopes = this.getAnyScopes(); - } - return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); - } - // Determine if we're running on GCE. - let isGCE; - try { - isGCE = await this._checkIsGCE(); - } - catch (e) { - if (e instanceof Error) { - e.message = `Unexpected error determining execution environment: ${e.message}`; - } - throw e; - } - if (!isGCE) { - // We failed to find the default credentials. Bail out with an error. - throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.'); - } - // For GCE, just return a default ComputeClient. It will take care of - // the rest. - options.scopes = this.getAnyScopes(); - return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride); - } - async prepareAndCacheADC(credential, quotaProjectIdOverride) { - const projectId = await this.getProjectIdOptional(); - if (quotaProjectIdOverride) { - credential.quotaProjectId = quotaProjectIdOverride; - } - this.cachedCredential = credential; - return { credential, projectId }; - } - /** - * Determines whether the auth layer is running on Google Compute Engine. - * @returns A promise that resolves with the boolean. - * @api private - */ - async _checkIsGCE() { - if (this.checkIsGCE === undefined) { - this.checkIsGCE = await gcpMetadata.isAvailable(); - } - return this.checkIsGCE; +const executable_response_1 = __nccwpck_require__(8749); +const pluggable_auth_handler_1 = __nccwpck_require__(8941); +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +class ExecutableError extends Error { + constructor(message, code) { + super(`The executable failed with exit code: ${code} and error message: ${message}.`); + this.code = code; + Object.setPrototypeOf(this, new.target.prototype); } +} +exports.ExecutableError = ExecutableError; +/** + * The default executable timeout when none is provided, in milliseconds. + */ +const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; +/** + * The minimum allowed executable timeout in milliseconds. + */ +const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; +/** + * The maximum allowed executable timeout in milliseconds. + */ +const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; +/** + * The environment variable to check to see if executable can be run. + * Value must be set to '1' for the executable to run. + */ +const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; +/** + * The maximum currently supported executable version. + */ +const MAXIMUM_EXECUTABLE_VERSION = 1; +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { /** - * Attempts to load default credentials from the environment variable path.. - * @returns Promise that resolves with the OAuth2Client or null. - * @api private + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. */ - async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { - const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || - process.env['google_application_credentials']; - if (!credentialsPath || credentialsPath.length === 0) { - return null; - } - try { - return this._getApplicationCredentialsFromFilePath(credentialsPath, options); + constructor(options, additionalOptions) { + super(options, additionalOptions); + if (!options.credential_source.executable) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); } - catch (e) { - if (e instanceof Error) { - e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; - } - throw e; + this.command = options.credential_source.executable.command; + if (!this.command) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); } - } - /** - * Attempts to load default credentials from a well-known file location - * @return Promise that resolves with the OAuth2Client or null. - * @api private - */ - async _tryGetApplicationCredentialsFromWellKnownFile(options) { - // First, figure out the location of the file, depending upon the OS type. - let location = null; - if (this._isWindows()) { - // Windows - location = process.env['APPDATA']; + // Check if the provided timeout exists and if it is valid. + if (options.credential_source.executable.timeout_millis === undefined) { + this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; } else { - // Linux or Mac - const home = process.env['HOME']; - if (home) { - location = path.join(home, '.config'); - } - } - // If we found the root path, expand it. - if (location) { - location = path.join(location, 'gcloud', 'application_default_credentials.json'); - if (!fs.existsSync(location)) { - location = null; + this.timeoutMillis = options.credential_source.executable.timeout_millis; + if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || + this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { + throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + + `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); } } - // The file does not exist. - if (!location) { - return null; - } - // The file seems to exist. Try to use it. - const client = await this._getApplicationCredentialsFromFilePath(location, options); - return client; + this.outputFile = options.credential_source.executable.output_file; + this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ + command: this.command, + timeoutMillis: this.timeoutMillis, + outputFile: this.outputFile, + }); + this.credentialSourceType = 'executable'; } /** - * Attempts to load default credentials from a file at the given path.. - * @param filePath The path to the file to read. - * @returns Promise that resolves with the OAuth2Client - * @api private + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. */ - async _getApplicationCredentialsFromFilePath(filePath, options = {}) { - // Make sure the path looks like a string. - if (!filePath || filePath.length === 0) { - throw new Error('The file path is invalid.'); + async retrieveSubjectToken() { + // Check if the executable is allowed to run. + if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { + throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + + 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + + 'Variable to 1.'); } - // Make sure there is a file at the path. lstatSync will throw if there is - // nothing there. - try { - // Resolve path to actual file in case of symlink. Expect a thrown error - // if not resolvable. - filePath = fs.realpathSync(filePath); - if (!fs.lstatSync(filePath).isFile()) { - throw new Error(); - } + let executableResponse = undefined; + // Try to get cached executable response from output file. + if (this.outputFile) { + executableResponse = await this.handler.retrieveCachedResponse(); } - catch (err) { - if (err instanceof Error) { - err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + // If no response from output file, call the executable. + if (!executableResponse) { + // Set up environment map with required values for the executable. + const envMap = new Map(); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); + // Always set to 0 because interactive mode is not supported. + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); + if (this.outputFile) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); } - throw err; - } - // Now open a read stream on the file, and parse it. - const readStream = fs.createReadStream(filePath); - return this.fromStream(readStream, options); - } - /** - * Create a credentials instance using a given impersonated input options. - * @param json The impersonated input object. - * @returns JWT or UserRefresh Client with data - */ - fromImpersonatedJSON(json) { - var _a, _b, _c, _d; - if (!json) { - throw new Error('Must pass in a JSON object containing an impersonated refresh token'); + const serviceAccountEmail = this.getServiceAccountEmail(); + if (serviceAccountEmail) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); + } + executableResponse = + await this.handler.retrieveResponseFromExecutable(envMap); } - if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { - throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { + throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); } - if (!json.source_credentials) { - throw new Error('The incoming JSON object does not contain a source_credentials field'); + // Check that response was successful. + if (!executableResponse.success) { + throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); } - if (!json.service_account_impersonation_url) { - throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + // Check that response contains expiration time if output file was specified. + if (this.outputFile) { + if (!executableResponse.expirationTime) { + throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); + } } - // Create source client for impersonation - const sourceClient = new refreshclient_1.UserRefreshClient(json.source_credentials.client_id, json.source_credentials.client_secret, json.source_credentials.refresh_token); - // Extreact service account from service_account_impersonation_url - const targetPrincipal = (_b = (_a = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _a === void 0 ? void 0 : _a.groups) === null || _b === void 0 ? void 0 : _b.target; - if (!targetPrincipal) { - throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + // Check that response is not expired. + if (executableResponse.isExpired()) { + throw new Error('Executable response is expired.'); } - const targetScopes = (_c = this.getAnyScopes()) !== null && _c !== void 0 ? _c : []; - const client = new impersonated_1.Impersonated({ - delegates: (_d = json.delegates) !== null && _d !== void 0 ? _d : [], - sourceClient: sourceClient, - targetPrincipal: targetPrincipal, - targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], - }); - return client; + // Return subject token from response. + return executableResponse.subjectToken; } +} +exports.PluggableAuthClient = PluggableAuthClient; + + +/***/ }), + +/***/ 8941: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthHandler = void 0; +const pluggable_auth_client_1 = __nccwpck_require__(4782); +const executable_response_1 = __nccwpck_require__(8749); +const childProcess = __nccwpck_require__(2081); +const fs = __nccwpck_require__(7147); +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +class PluggableAuthHandler { /** - * Create a credentials instance using the given input options. - * @param json The input object. - * @param options The JWT or UserRefresh options for the client - * @returns JWT or UserRefresh Client with data + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. */ - fromJSON(json, options) { - let client; - if (!json) { - throw new Error('Must pass in a JSON object containing the Google auth settings.'); - } - options = options || {}; - if (json.type === 'authorized_user') { - client = new refreshclient_1.UserRefreshClient(options); - client.fromJSON(json); - } - else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { - client = this.fromImpersonatedJSON(json); - } - else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { - client = externalclient_1.ExternalAccountClient.fromJSON(json, options); - client.scopes = this.getAnyScopes(); + constructor(options) { + if (!options.command) { + throw new Error('No command provided.'); } - else { - options.scopes = this.scopes; - client = new jwtclient_1.JWT(options); - this.setGapicJWTValues(client); - client.fromJSON(json); + this.commandComponents = PluggableAuthHandler.parseCommand(options.command); + this.timeoutMillis = options.timeoutMillis; + if (!this.timeoutMillis) { + throw new Error('No timeoutMillis provided.'); } - return client; + this.outputFile = options.outputFile; } /** - * Return a JWT or UserRefreshClient from JavaScript object, caching both the - * object used to instantiate and the client. - * @param json The input object. - * @param options The JWT or UserRefresh options for the client - * @returns JWT or UserRefresh Client with data + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. */ - _cacheClientFromJSON(json, options) { - let client; - // create either a UserRefreshClient or JWT client. - options = options || {}; - if (json.type === 'authorized_user') { - client = new refreshclient_1.UserRefreshClient(options); - client.fromJSON(json); - } - else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { - client = this.fromImpersonatedJSON(json); - } - else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { - client = externalclient_1.ExternalAccountClient.fromJSON(json, options); - client.scopes = this.getAnyScopes(); - } - else { - options.scopes = this.scopes; - client = new jwtclient_1.JWT(options); - this.setGapicJWTValues(client); - client.fromJSON(json); - } - // cache both raw data used to instantiate client and client itself. - this.jsonContent = json; - this.cachedCredential = client; - return client; - } - fromStream(inputStream, optionsOrCallback = {}, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else { - options = optionsOrCallback; - } - if (callback) { - this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); - } - else { - return this.fromStreamAsync(inputStream, options); - } - } - fromStreamAsync(inputStream, options) { + retrieveResponseFromExecutable(envMap) { return new Promise((resolve, reject) => { - if (!inputStream) { - throw new Error('Must pass in a stream containing the Google auth settings.'); - } - let s = ''; - inputStream - .setEncoding('utf8') - .on('error', reject) - .on('data', chunk => (s += chunk)) - .on('end', () => { - try { + // Spawn process to run executable using added environment variables. + const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { + env: { ...process.env, ...Object.fromEntries(envMap) }, + }); + let output = ''; + // Append stdout to output as executable runs. + child.stdout.on('data', (data) => { + output += data; + }); + // Append stderr as executable runs. + child.stderr.on('data', (err) => { + output += err; + }); + // Set up a timeout to end the child process and throw an error. + const timeout = setTimeout(() => { + // Kill child process and remove listeners so 'close' event doesn't get + // read after child process is killed. + child.removeAllListeners(); + child.kill(); + return reject(new Error('The executable failed to finish within the timeout specified.')); + }, this.timeoutMillis); + child.on('close', (code) => { + // Cancel timeout if executable closes before timeout is reached. + clearTimeout(timeout); + if (code === 0) { + // If the executable completed successfully, try to return the parsed response. try { - const data = JSON.parse(s); - const r = this._cacheClientFromJSON(data, options); - return resolve(r); + const responseJson = JSON.parse(output); + const response = new executable_response_1.ExecutableResponse(responseJson); + return resolve(response); } - catch (err) { - // If we failed parsing this.keyFileName, assume that it - // is a PEM or p12 certificate: - if (!this.keyFilename) - throw err; - const client = new jwtclient_1.JWT({ - ...this.clientOptions, - keyFile: this.keyFilename, - }); - this.cachedCredential = client; - this.setGapicJWTValues(client); - return resolve(client); + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + return reject(error); + } + return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); } } - catch (err) { - return reject(err); + else { + return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); } }); }); } /** - * Create a credentials instance using the given API key string. - * @param apiKey The API key string - * @param options An optional options object. - * @returns A JWT loaded from the key - */ - fromAPIKey(apiKey, options) { - options = options || {}; - const client = new jwtclient_1.JWT(options); - client.fromAPIKey(apiKey); - return client; - } - /** - * Determines whether the current operating system is Windows. - * @api private + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. */ - _isWindows() { - const sys = os.platform(); - if (sys && sys.length >= 3) { - if (sys.substring(0, 3).toLowerCase() === 'win') { - return true; - } + async retrieveCachedResponse() { + if (!this.outputFile || this.outputFile.length === 0) { + return undefined; } - return false; - } - /** - * Run the Google Cloud SDK command that prints the default project ID - */ - async getDefaultServiceProjectId() { - return new Promise(resolve => { - (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { - if (!err && stdout) { - try { - const projectId = JSON.parse(stdout).configuration.properties.core.project; - resolve(projectId); - return; - } - catch (e) { - // ignore errors - } - } - resolve(null); - }); + let filePath; + try { + filePath = await fs.promises.realpath(this.outputFile); + } + catch (_a) { + // If file path cannot be resolved, return undefined. + return undefined; + } + if (!(await fs.promises.lstat(filePath)).isFile()) { + // If path does not lead to file, return undefined. + return undefined; + } + const responseString = await fs.promises.readFile(filePath, { + encoding: 'utf8', }); + if (responseString === '') { + return undefined; + } + try { + const responseJson = JSON.parse(responseString); + const response = new executable_response_1.ExecutableResponse(responseJson); + // Check if response is successful and unexpired. + if (response.isValid()) { + return new executable_response_1.ExecutableResponse(responseJson); + } + return undefined; + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + throw error; + } + throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); + } } /** - * Loads the project id from environment variables. - * @api private - */ - getProductionProjectId() { - return (process.env['GCLOUD_PROJECT'] || - process.env['GOOGLE_CLOUD_PROJECT'] || - process.env['gcloud_project'] || - process.env['google_cloud_project']); - } - /** - * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. - * @api private + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. */ - async getFileProjectId() { - if (this.cachedCredential) { - // Try to read the project ID from the cached credentials file - return this.cachedCredential.projectId; + static parseCommand(command) { + // Split the command into components by splitting on spaces, + // unless spaces are contained in quotation marks. + const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); + if (!components) { + throw new Error(`Provided command: "${command}" could not be parsed.`); } - // Ensure the projectId is loaded from the keyFile if available. - if (this.keyFilename) { - const creds = await this.getClient(); - if (creds && creds.projectId) { - return creds.projectId; + // Remove quotation marks from the beginning and end of each component if they are present. + for (let i = 0; i < components.length; i++) { + if (components[i][0] === '"' && components[i].slice(-1) === '"') { + components[i] = components[i].slice(1, -1); } } - // Try to load a credentials file and read its project ID - const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); - if (r) { - return r.projectId; - } - else { - return null; - } + return components; + } +} +exports.PluggableAuthHandler = PluggableAuthHandler; + + +/***/ }), + +/***/ 8790: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(3936); +exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; +class UserRefreshClient extends oauth2client_1.OAuth2Client { + constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { + clientId: optionsOrClientId, + clientSecret, + refreshToken, + eagerRefreshThresholdMillis, + forceRefreshOnFailure, + }; + super(opts); + this._refreshToken = opts.refreshToken; + this.credentials.refresh_token = opts.refreshToken; } /** - * Gets the project ID from external account client if available. + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. */ - async getExternalAccountClientProjectId() { - if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { - return null; - } - const creds = await this.getClient(); - // Do not suppress the underlying error, as the error could contain helpful - // information for debugging and fixing. This is especially true for - // external account creds as in order to get the project ID, the following - // operations have to succeed: - // 1. Valid credentials file should be supplied. - // 2. Ability to retrieve access tokens from STS token exchange API. - // 3. Ability to exchange for service account impersonated credentials (if - // enabled). - // 4. Ability to get project info using the access token from step 2 or 3. - // Without surfacing the error, it is harder for developers to determine - // which step went wrong. - return await creds.getProjectId(); + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + return super.refreshTokenNoCache(this._refreshToken); } /** - * Gets the Compute Engine project ID if it can be inferred. + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. */ - async getGCEProjectId() { - try { - const r = await gcpMetadata.project('project-id'); - return r; + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the user refresh token'); } - catch (e) { - // Ignore any errors - return null; + if (json.type !== 'authorized_user') { + throw new Error('The incoming JSON object does not have the "authorized_user" type'); + } + if (!json.client_id) { + throw new Error('The incoming JSON object does not contain a client_id field'); } + if (!json.client_secret) { + throw new Error('The incoming JSON object does not contain a client_secret field'); + } + if (!json.refresh_token) { + throw new Error('The incoming JSON object does not contain a refresh_token field'); + } + this._clientId = json.client_id; + this._clientSecret = json.client_secret; + this._refreshToken = json.refresh_token; + this.credentials.refresh_token = json.refresh_token; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; } - getCredentials(callback) { + fromStream(inputStream, callback) { if (callback) { - this.getCredentialsAsync().then(r => callback(null, r), callback); + this.fromStreamAsync(inputStream).then(() => callback(), callback); } else { - return this.getCredentialsAsync(); + return this.fromStreamAsync(inputStream); } } - async getCredentialsAsync() { - const client = await this.getClient(); - if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { - const serviceAccountEmail = client.getServiceAccountEmail(); - if (serviceAccountEmail) { - return { client_email: serviceAccountEmail }; + async fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + return reject(new Error('Must pass in a stream containing the user refresh token.')); } - } - if (this.jsonContent) { - const credential = { - client_email: this.jsonContent.client_email, - private_key: this.jsonContent.private_key, - }; - return credential; - } - const isGCE = await this._checkIsGCE(); - if (!isGCE) { - throw new Error('Unknown error.'); - } - // For GCE, return the service account details from the metadata server - // NOTE: The trailing '/' at the end of service-accounts/ is very important! - // The GCF metadata server doesn't respect querystring params if this / is - // not included. - const data = await gcpMetadata.instance({ - property: 'service-accounts/', - params: { recursive: 'true' }, + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + return resolve(); + } + catch (err) { + return reject(err); + } + }); }); - if (!data || !data.default || !data.default.email) { - throw new Error('Failure from metadata server.'); - } - return { client_email: data.default.email }; + } +} +exports.UserRefreshClient = UserRefreshClient; + + +/***/ }), + +/***/ 6308: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StsCredentials = void 0; +const gaxios_1 = __nccwpck_require__(9555); +const querystring = __nccwpck_require__(3477); +const transporters_1 = __nccwpck_require__(2649); +const oauth2common_1 = __nccwpck_require__(9510); +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. + */ + constructor(tokenExchangeEndpoint, clientAuthentication) { + super(clientAuthentication); + this.tokenExchangeEndpoint = tokenExchangeEndpoint; + this.transporter = new transporters_1.DefaultTransporter(); } /** - * Automatically obtain a client based on the provided configuration. If no - * options were passed, use Application Default Credentials. + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. */ - async getClient() { - if (!this.cachedCredential) { - if (this.jsonContent) { - this._cacheClientFromJSON(this.jsonContent, this.clientOptions); - } - else if (this.keyFilename) { - const filePath = path.resolve(this.keyFilename); - const stream = fs.createReadStream(filePath); - await this.fromStreamAsync(stream, this.clientOptions); + async exchangeToken(stsCredentialsOptions, additionalHeaders, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options) { + var _a, _b, _c; + const values = { + grant_type: stsCredentialsOptions.grantType, + resource: stsCredentialsOptions.resource, + audience: stsCredentialsOptions.audience, + scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), + requested_token_type: stsCredentialsOptions.requestedTokenType, + subject_token: stsCredentialsOptions.subjectToken, + subject_token_type: stsCredentialsOptions.subjectTokenType, + actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, + actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, + // Non-standard GCP-specific options. + options: options && JSON.stringify(options), + }; + // Remove undefined fields. + Object.keys(values).forEach(key => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof values[key] === 'undefined') { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete values[key]; } - else { - await this.getApplicationDefaultAsync(this.clientOptions); + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + // Inject additional STS headers if available. + Object.assign(headers, additionalHeaders || {}); + const opts = { + url: this.tokenExchangeEndpoint.toString(), + method: 'POST', + headers, + data: querystring.stringify(values), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const stsSuccessfulResponse = response.data; + stsSuccessfulResponse.res = response; + return stsSuccessfulResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); } + // Request could fail before the server responds. + throw error; } - return this.cachedCredential; } - /** - * Creates a client which will fetch an ID token for authorization. - * @param targetAudience the audience for the fetched ID token. - * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. - */ - async getIdTokenClient(targetAudience) { - const client = await this.getClient(); - if (!('fetchIdToken' in client)) { - throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); +} +exports.StsCredentials = StsCredentials; + + +/***/ }), + +/***/ 4693: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BrowserCrypto = void 0; +// This file implements crypto functions we need using in-browser +// SubtleCrypto interface `window.crypto.subtle`. +const base64js = __nccwpck_require__(6463); +const crypto_1 = __nccwpck_require__(8043); +class BrowserCrypto { + constructor() { + if (typeof window === 'undefined' || + window.crypto === undefined || + window.crypto.subtle === undefined) { + throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); } - return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); } - /** - * Automatically obtain application default credentials, and return - * an access token for making requests. - */ - async getAccessToken() { - const client = await this.getClient(); - return (await client.getAccessToken()).token; + async sha256DigestBase64(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return base64js.fromByteArray(new Uint8Array(outputBuffer)); } - /** - * Obtain the HTTP headers that will provide authorization for a given - * request. - */ - async getRequestHeaders(url) { - const client = await this.getClient(); - return client.getRequestHeaders(url); + randomBytesBase64(count) { + const array = new Uint8Array(count); + window.crypto.getRandomValues(array); + return base64js.fromByteArray(array); + } + static padBase64(base64) { + // base64js requires padding, so let's add some '=' + while (base64.length % 4 !== 0) { + base64 += '='; + } + return base64; + } + async verify(pubkey, data, signature) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const dataArray = new TextEncoder().encode(data); + const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); + const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); + // SubtleCrypto's verify method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); + return result; + } + async sign(privateKey, data) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const dataArray = new TextEncoder().encode(data); + const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); + // SubtleCrypto's sign method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); + return base64js.fromByteArray(new Uint8Array(result)); + } + decodeBase64StringUtf8(base64) { + const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const result = new TextDecoder().decode(uint8array); + return result; + } + encodeBase64StringUtf8(text) { + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const uint8array = new TextEncoder().encode(text); + const result = base64js.fromByteArray(uint8array); + return result; } /** - * Obtain credentials for a request, then attach the appropriate headers to - * the request options. - * @param opts Axios or Request options on which to attach the headers + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. */ - async authorizeRequest(opts) { - opts = opts || {}; - const url = opts.url || opts.uri; - const client = await this.getClient(); - const headers = await client.getRequestHeaders(url); - opts.headers = Object.assign(opts.headers || {}, headers); - return opts; + async sha256DigestHex(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); } /** - * Automatically obtain application default credentials, and make an - * HTTP request using the given options. - * @param opts Axios request options for the HTTP request. + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - async request(opts) { - const client = await this.getClient(); - return client.request(opts); + async signWithHmacSha256(key, msg) { + // Convert key, if provided in ArrayBuffer format, to string. + const rawKey = typeof key === 'string' + ? key + : String.fromCharCode(...new Uint16Array(key)); + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const enc = new TextEncoder(); + const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { + name: 'HMAC', + hash: { + name: 'SHA-256', + }, + }, false, ['sign']); + return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); + } +} +exports.BrowserCrypto = BrowserCrypto; + + +/***/ }), + +/***/ 8043: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0; +const crypto_1 = __nccwpck_require__(4693); +const crypto_2 = __nccwpck_require__(757); +function createCrypto() { + if (hasBrowserCrypto()) { + return new crypto_1.BrowserCrypto(); + } + return new crypto_2.NodeCrypto(); +} +exports.createCrypto = createCrypto; +function hasBrowserCrypto() { + return (typeof window !== 'undefined' && + typeof window.crypto !== 'undefined' && + typeof window.crypto.subtle !== 'undefined'); +} +exports.hasBrowserCrypto = hasBrowserCrypto; +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +function fromArrayBufferToHex(arrayBuffer) { + // Convert buffer to byte array. + const byteArray = Array.from(new Uint8Array(arrayBuffer)); + // Convert bytes to hex string. + return byteArray + .map(byte => { + return byte.toString(16).padStart(2, '0'); + }) + .join(''); +} +exports.fromArrayBufferToHex = fromArrayBufferToHex; + + +/***/ }), + +/***/ 757: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeCrypto = void 0; +const crypto = __nccwpck_require__(6113); +class NodeCrypto { + async sha256DigestBase64(str) { + return crypto.createHash('sha256').update(str).digest('base64'); + } + randomBytesBase64(count) { + return crypto.randomBytes(count).toString('base64'); + } + async verify(pubkey, data, signature) { + const verifier = crypto.createVerify('RSA-SHA256'); + verifier.update(data); + verifier.end(); + return verifier.verify(pubkey, signature, 'base64'); + } + async sign(privateKey, data) { + const signer = crypto.createSign('RSA-SHA256'); + signer.update(data); + signer.end(); + return signer.sign(privateKey, 'base64'); + } + decodeBase64StringUtf8(base64) { + return Buffer.from(base64, 'base64').toString('utf-8'); + } + encodeBase64StringUtf8(text) { + return Buffer.from(text, 'utf-8').toString('base64'); } /** - * Determine the compute environment in which the code is running. + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. */ - getEnv() { - return (0, envDetect_1.getEnv)(); + async sha256DigestHex(str) { + return crypto.createHash('sha256').update(str).digest('hex'); } /** - * Sign the given data with the current private key, or go out - * to the IAM API to sign it. - * @param data The data to be signed. + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. */ - async sign(data) { - const client = await this.getClient(); - const crypto = (0, crypto_1.createCrypto)(); - if (client instanceof jwtclient_1.JWT && client.key) { - const sign = await crypto.sign(client.key, data); - return sign; - } - const creds = await this.getCredentials(); - if (!creds.client_email) { - throw new Error('Cannot sign data without `client_email`.'); - } - return this.signBlob(crypto, creds.client_email, data); - } - async signBlob(crypto, emailOrUniqueId, data) { - const url = 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' + - `${emailOrUniqueId}:signBlob`; - const res = await this.request({ - method: 'POST', - url, - data: { - payload: crypto.encodeBase64StringUtf8(data), - }, - }); - return res.data.signedBlob; + async signWithHmacSha256(key, msg) { + const cryptoKey = typeof key === 'string' ? key : toBuffer(key); + return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); } } -exports.GoogleAuth = GoogleAuth; +exports.NodeCrypto = NodeCrypto; /** - * Export DefaultTransporter as a static property of the class. + * Converts a Node.js Buffer to an ArrayBuffer. + * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer + * @param buffer The Buffer input to covert. + * @return The ArrayBuffer representation of the input. */ -GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; -//# sourceMappingURL=googleauth.js.map +function toArrayBuffer(buffer) { + return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); +} +/** + * Converts an ArrayBuffer to a Node.js Buffer. + * @param arrayBuffer The ArrayBuffer input to covert. + * @return The Buffer representation of the input. + */ +function toBuffer(arrayBuffer) { + return Buffer.from(arrayBuffer); +} + /***/ }), -/***/ 9735: +/***/ 810: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gcpMetadata = void 0; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const googleauth_1 = __nccwpck_require__(695); +Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); +exports.gcpMetadata = __nccwpck_require__(3563); +var authclient_1 = __nccwpck_require__(4627); +Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } })); +var computeclient_1 = __nccwpck_require__(6875); +Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); +var envDetect_1 = __nccwpck_require__(1380); +Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); +var iam_1 = __nccwpck_require__(9735); +Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); +var idtokenclient_1 = __nccwpck_require__(298); +Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); +var jwtaccess_1 = __nccwpck_require__(8740); +Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); +var jwtclient_1 = __nccwpck_require__(3959); +Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); +var impersonated_1 = __nccwpck_require__(1103); +Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); +var oauth2client_1 = __nccwpck_require__(3936); +Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); +Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); +var loginticket_1 = __nccwpck_require__(4524); +Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); +var refreshclient_1 = __nccwpck_require__(8790); +Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); +var awsclient_1 = __nccwpck_require__(1569); +Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); +var identitypoolclient_1 = __nccwpck_require__(117); +Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); +var externalclient_1 = __nccwpck_require__(4381); +Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); +var baseexternalclient_1 = __nccwpck_require__(7391); +Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); +var downscopedclient_1 = __nccwpck_require__(6270); +Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); +var pluggable_auth_client_1 = __nccwpck_require__(4782); +Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); +var transporters_1 = __nccwpck_require__(2649); +Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); +const auth = new googleauth_1.GoogleAuth(); +exports.auth = auth; + + +/***/ }), + +/***/ 6608: /***/ ((__unused_webpack_module, exports) => { "use strict"; -// Copyright 2014 Google LLC +// Copyright 2017 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -29048,42 +23727,36 @@ GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IAMAuth = void 0; -class IAMAuth { - /** - * IAM credentials. - * - * @param selector the iam authority selector - * @param token the token - * @constructor - */ - constructor(selector, token) { - this.selector = selector; - this.token = token; - this.selector = selector; - this.token = token; - } - /** - * Acquire the HTTP headers required to make an authenticated request. - */ - getRequestHeaders() { - return { - 'x-goog-iam-authority-selector': this.selector, - 'x-goog-iam-authorization-token': this.token, - }; +exports.validate = void 0; +// Accepts an options object passed from the user to the API. In the +// previous version of the API, it referred to a `Request` options object. +// Now it refers to an Axiox Request Config object. This is here to help +// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function validate(options) { + const vpairs = [ + { invalid: 'uri', expected: 'url' }, + { invalid: 'json', expected: 'data' }, + { invalid: 'qs', expected: 'params' }, + ]; + for (const pair of vpairs) { + if (options[pair.invalid]) { + const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; + throw new Error(e); + } } } -exports.IAMAuth = IAMAuth; -//# sourceMappingURL=iam.js.map +exports.validate = validate; + /***/ }), -/***/ 117: +/***/ 2649: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2021 Google LLC +// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -29096,159 +23769,111 @@ exports.IAMAuth = IAMAuth; // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -var _a, _b, _c; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IdentityPoolClient = void 0; -const fs = __nccwpck_require__(7147); -const util_1 = __nccwpck_require__(3837); -const baseexternalclient_1 = __nccwpck_require__(7391); -// fs.readfile is undefined in browser karma tests causing -// `npm run browser-test` to fail as test.oauth2.ts imports this file via -// src/index.ts. -// Fallback to void function to avoid promisify throwing a TypeError. -const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); -const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); -const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); -/** - * Defines the Url-sourced and file-sourced external account clients mainly - * used for K8s and Azure workloads. - */ -class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { +exports.DefaultTransporter = void 0; +const gaxios_1 = __nccwpck_require__(9555); +const options_1 = __nccwpck_require__(6608); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(1402); +const PRODUCT_NAME = 'google-api-nodejs-client'; +class DefaultTransporter { + constructor() { + /** + * A configurable, replacable `Gaxios` instance. + */ + this.instance = new gaxios_1.Gaxios(); + } /** - * Instantiate an IdentityPoolClient instance using the provided JSON - * object loaded from an external account credentials file. - * An error is thrown if the credential is not a valid file-sourced or - * url-sourced credential or a workforce pool user project is provided - * with a non workforce audience. - * @param options The external account options object typically loaded - * from the external account JSON credential file. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. */ - constructor(options, additionalOptions) { - var _a, _b; - super(options, additionalOptions); - this.file = options.credential_source.file; - this.url = options.credential_source.url; - this.headers = options.credential_source.headers; - if (!this.file && !this.url) { - throw new Error('No valid Identity Pool "credential_source" provided'); - } - // Text is the default format type. - this.formatType = ((_a = options.credential_source.format) === null || _a === void 0 ? void 0 : _a.type) || 'text'; - this.formatSubjectTokenFieldName = - (_b = options.credential_source.format) === null || _b === void 0 ? void 0 : _b.subject_token_field_name; - if (this.formatType !== 'json' && this.formatType !== 'text') { - throw new Error(`Invalid credential_source format "${this.formatType}"`); - } - if (this.formatType === 'json' && !this.formatSubjectTokenFieldName) { - throw new Error('Missing subject_token_field_name for JSON credential_source format'); + configure(opts = {}) { + opts.headers = opts.headers || {}; + if (typeof window === 'undefined') { + // set transporter user agent if not in browser + const uaValue = opts.headers['User-Agent']; + if (!uaValue) { + opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; + } + else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { + opts.headers['User-Agent'] = `${uaValue} ${DefaultTransporter.USER_AGENT}`; + } + // track google-auth-library-nodejs version: + if (!opts.headers['x-goog-api-client']) { + const nodeVersion = process.version.replace(/^v/, ''); + opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`; + } } + return opts; } /** - * Triggered when a external subject token is needed to be exchanged for a GCP - * access token via GCP STS endpoint. - * This uses the `options.credential_source` object to figure out how - * to retrieve the token using the current environment. In this case, - * this either retrieves the local credential from a file location (k8s - * workload) or by sending a GET request to a local metadata server (Azure - * workloads). - * @return A promise that resolves with the external subject token. + * Makes a request using Gaxios with given options. + * @param opts GaxiosOptions options. + * @param callback optional callback that contains GaxiosResponse object. + * @return GaxiosPromise, assuming no callback is passed. */ - async retrieveSubjectToken() { - if (this.file) { - return await this.getTokenFromFile(this.file, this.formatType, this.formatSubjectTokenFieldName); - } - return await this.getTokenFromUrl(this.url, this.formatType, this.formatSubjectTokenFieldName, this.headers); + request(opts) { + // ensure the user isn't passing in request-style options + opts = this.configure(opts); + (0, options_1.validate)(opts); + return this.instance.request(opts).catch(e => { + throw this.processError(e); + }); + } + get defaults() { + return this.instance.defaults; + } + set defaults(opts) { + this.instance.defaults = opts; } /** - * Looks up the external subject token in the file path provided and - * resolves with that token. - * @param file The file path where the external credential is located. - * @param formatType The token file or URL response type (JSON or text). - * @param formatSubjectTokenFieldName For JSON response types, this is the - * subject_token field name. For Azure, this is access_token. For text - * response types, this is ignored. - * @return A promise that resolves with the external subject token. + * Changes the error to include details from the body. */ - async getTokenFromFile(filePath, formatType, formatSubjectTokenFieldName) { - // Make sure there is a file at the path. lstatSync will throw if there is - // nothing there. - try { - // Resolve path to actual file in case of symlink. Expect a thrown error - // if not resolvable. - filePath = await realpath(filePath); - if (!(await lstat(filePath)).isFile()) { - throw new Error(); + processError(e) { + const res = e.response; + const err = e; + const body = res ? res.data : null; + if (res && body && body.error && res.status !== 200) { + if (typeof body.error === 'string') { + err.message = body.error; + err.status = res.status; } - } - catch (err) { - if (err instanceof Error) { - err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + else if (Array.isArray(body.error.errors)) { + err.message = body.error.errors + .map((err2) => err2.message) + .join('\n'); + err.code = body.error.code; + err.errors = body.error.errors; + } + else { + err.message = body.error.message; + err.code = body.error.code; } - throw err; - } - let subjectToken; - const rawText = await readFile(filePath, { encoding: 'utf8' }); - if (formatType === 'text') { - subjectToken = rawText; - } - else if (formatType === 'json' && formatSubjectTokenFieldName) { - const json = JSON.parse(rawText); - subjectToken = json[formatSubjectTokenFieldName]; - } - if (!subjectToken) { - throw new Error('Unable to parse the subject_token from the credential_source file'); - } - return subjectToken; - } - /** - * Sends a GET request to the URL provided and resolves with the returned - * external subject token. - * @param url The URL to call to retrieve the subject token. This is typically - * a local metadata server. - * @param formatType The token file or URL response type (JSON or text). - * @param formatSubjectTokenFieldName For JSON response types, this is the - * subject_token field name. For Azure, this is access_token. For text - * response types, this is ignored. - * @param headers The optional additional headers to send with the request to - * the metadata server url. - * @return A promise that resolves with the external subject token. - */ - async getTokenFromUrl(url, formatType, formatSubjectTokenFieldName, headers) { - const opts = { - url, - method: 'GET', - headers, - responseType: formatType, - }; - let subjectToken; - if (formatType === 'text') { - const response = await this.transporter.request(opts); - subjectToken = response.data; - } - else if (formatType === 'json' && formatSubjectTokenFieldName) { - const response = await this.transporter.request(opts); - subjectToken = response.data[formatSubjectTokenFieldName]; } - if (!subjectToken) { - throw new Error('Unable to parse the subject_token from the credential_source URL'); + else if (res && res.status >= 400) { + // Consider all 4xx and 5xx responses errors. + err.message = body; + err.status = res.status; } - return subjectToken; + return err; } } -exports.IdentityPoolClient = IdentityPoolClient; -//# sourceMappingURL=identitypoolclient.js.map +exports.DefaultTransporter = DefaultTransporter; +/** + * Default user agent. + */ +DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; + /***/ }), -/***/ 298: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 8905: +/***/ (function(__unused_webpack_module, exports) { "use strict"; -// Copyright 2020 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -29261,42203 +23886,32735 @@ exports.IdentityPoolClient = IdentityPoolClient; // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IdTokenClient = void 0; -const oauth2client_1 = __nccwpck_require__(3936); -class IdTokenClient extends oauth2client_1.OAuth2Client { - /** - * Google ID Token client - * - * Retrieve access token from the metadata server. - * See: https://developers.google.com/compute/docs/authentication - */ - constructor(options) { - super(); - this.targetAudience = options.targetAudience; - this.idTokenProvider = options.idTokenProvider; - } - async getRequestMetadataAsync( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - url) { - if (!this.credentials.id_token || - (this.credentials.expiry_date || 0) < Date.now()) { - const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); - this.credentials = { - id_token: idToken, - expiry_date: this.getIdTokenExpiryDate(idToken), - }; - } - const headers = { - Authorization: 'Bearer ' + this.credentials.id_token, - }; - return { headers }; - } - getIdTokenExpiryDate(idToken) { - const payloadB64 = idToken.split('.')[1]; - if (payloadB64) { - const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); - return payload.exp * 1000; - } - } -} -exports.IdTokenClient = IdTokenClient; -//# sourceMappingURL=idtokenclient.js.map - -/***/ }), - -/***/ 1103: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - +exports.LRUCache = exports.originalOrCamelOptions = exports.snakeToCamel = void 0; /** - * Copyright 2021 Google LLC + * Returns the camel case of a provided string. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * @remarks * - * http://www.apache.org/licenses/LICENSE-2.0 + * Match any `_` and not `_` pair, then return the uppercase of the not `_` + * character. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * @internal + * + * @param str the string to convert + * @returns the camelCase'd string */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; -const oauth2client_1 = __nccwpck_require__(3936); -const gaxios_1 = __nccwpck_require__(9555); -exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; -class Impersonated extends oauth2client_1.OAuth2Client { +function snakeToCamel(str) { + return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase()); +} +exports.snakeToCamel = snakeToCamel; +/** + * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference + * for original, non-camelCase key. + * + * @param obj object to lookup a value in + * @returns a `get` function for getting `obj[key || snakeKey]`, if available + */ +function originalOrCamelOptions(obj) { /** - * Impersonated service account credentials. - * - * Create a new access token by impersonating another service account. * - * Impersonated Credentials allowing credentials issued to a user or - * service account to impersonate another. The source project using - * Impersonated Credentials must enable the "IAMCredentials" API. - * Also, the target service account must grant the orginating principal - * the "Service Account Token Creator" IAM role. - * - * @param {object} options - The configuration object. - * @param {object} [options.sourceClient] the source credential used as to - * acquire the impersonated credentials. - * @param {string} [options.targetPrincipal] the service account to - * impersonate. - * @param {string[]} [options.delegates] the chained list of delegates - * required to grant the final access_token. If set, the sequence of - * identities must have "Service Account Token Creator" capability granted to - * the preceding identity. For example, if set to [serviceAccountB, - * serviceAccountC], the sourceCredential must have the Token Creator role on - * serviceAccountB. serviceAccountB must have the Token Creator on - * serviceAccountC. Finally, C must have Token Creator on target_principal. - * If left unset, sourceCredential must have that role on targetPrincipal. - * @param {string[]} [options.targetScopes] scopes to request during the - * authorization grant. - * @param {number} [options.lifetime] number of seconds the delegated - * credential should be valid for up to 3600 seconds by default, or 43,200 - * seconds by extending the token's lifetime, see: - * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth - * @param {string} [options.endpoint] api endpoint override. + * @param key an index of object, preferably snake_case + * @returns the value `obj[key || snakeKey]`, if available */ - constructor(options = {}) { - var _a, _b, _c, _d, _e, _f; - super(options); - this.credentials = { - expiry_date: 1, - refresh_token: 'impersonated-placeholder', - }; - this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); - this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; - this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; - this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; - this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; - this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com'; + function get(key) { + var _a; + const o = (obj || {}); + return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)]; } - /** - * Refreshes the access token. - * @param refreshToken Unused parameter - */ - async refreshToken(refreshToken) { - var _a, _b, _c, _d, _e, _f; - try { - await this.sourceClient.getAccessToken(); - const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; - const u = `${this.endpoint}/v1/${name}:generateAccessToken`; - const body = { - delegates: this.delegates, - scope: this.targetScopes, - lifetime: this.lifetime + 's', - }; - const res = await this.sourceClient.request({ - url: u, - data: body, - method: 'POST', - }); - const tokenResponse = res.data; - this.credentials.access_token = tokenResponse.accessToken; - this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); - return { - tokens: this.credentials, - res, - }; - } - catch (error) { - if (!(error instanceof Error)) - throw error; - let status = 0; - let message = ''; - if (error instanceof gaxios_1.GaxiosError) { - status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; - message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; - } - if (status && message) { - error.message = `${status}: unable to impersonate: ${message}`; - throw error; - } - else { - error.message = `unable to impersonate: ${error}`; - throw error; - } - } + return { get }; +} +exports.originalOrCamelOptions = originalOrCamelOptions; +/** + * A simple LRU cache utility. + * Not meant for external usage. + * + * @experimental + * @internal + */ +class LRUCache { + constructor(options) { + _LRUCache_instances.add(this); + /** + * Maps are in order. Thus, the older item is the first item. + * + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map} + */ + _LRUCache_cache.set(this, new Map()); + this.capacity = options.capacity; + this.maxAge = options.maxAge; } /** - * Generates an OpenID Connect ID token for a service account. + * Add an item to the cache. * - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * @param key the key to upsert + * @param value the value of the key + */ + set(key, value) { + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + } + /** + * Get an item from the cache. * - * @param targetAudience the audience for the fetched ID token. - * @param options the for the request - * @return an OpenID Connect ID token + * @param key the key to retrieve */ - async fetchIdToken(targetAudience, options) { - var _a; - await this.sourceClient.getAccessToken(); - const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; - const u = `${this.endpoint}/v1/${name}:generateIdToken`; - const body = { - delegates: this.delegates, - audience: targetAudience, - includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, - }; - const res = await this.sourceClient.request({ - url: u, - data: body, - method: 'POST', - }); - return res.data.token; + get(key) { + const item = __classPrivateFieldGet(this, _LRUCache_cache, "f").get(key); + if (!item) + return; + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, item.value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + return item.value; } } -exports.Impersonated = Impersonated; -//# sourceMappingURL=impersonated.js.map +exports.LRUCache = LRUCache; +_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(key); + __classPrivateFieldGet(this, _LRUCache_cache, "f").set(key, { + value, + lastAccessed: Date.now(), + }); +}, _LRUCache_evict = function _LRUCache_evict() { + const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0; + /** + * Because we know Maps are in order, this item is both the + * last item in the list (capacity) and oldest (maxAge). + */ + let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + while (!oldestItem.done && + (__classPrivateFieldGet(this, _LRUCache_cache, "f").size > this.capacity || // too many + oldestItem.value[1].lastAccessed < cutoffDate) // too old + ) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(oldestItem.value[0]); + oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + } +}; + /***/ }), -/***/ 8740: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 6031: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -// Copyright 2015 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JWTAccess = void 0; +exports.GoogleToken = void 0; +const fs = __nccwpck_require__(7147); +const gaxios_1 = __nccwpck_require__(9555); const jws = __nccwpck_require__(4636); -const LRU = __nccwpck_require__(7129); -const DEFAULT_HEADER = { - alg: 'RS256', - typ: 'JWT', -}; -class JWTAccess { +const path = __nccwpck_require__(1017); +const util_1 = __nccwpck_require__(3837); +const readFile = fs.readFile + ? (0, util_1.promisify)(fs.readFile) + : async () => { + // if running in the web-browser, fs.readFile may not have been shimmed. + throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); + }; +const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; +const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; +class ErrorWithCode extends Error { + constructor(message, code) { + super(message); + this.code = code; + } +} +class GoogleToken { + get accessToken() { + return this.rawToken ? this.rawToken.access_token : undefined; + } + get idToken() { + return this.rawToken ? this.rawToken.id_token : undefined; + } + get tokenType() { + return this.rawToken ? this.rawToken.token_type : undefined; + } + get refreshToken() { + return this.rawToken ? this.rawToken.refresh_token : undefined; + } /** - * JWTAccess service account credentials. - * - * Create a new access token by using the credential to create a new JWT token - * that's recognized as the access token. + * Create a GoogleToken. * - * @param email the service account email address. - * @param key the private key that will be used to sign the token. - * @param keyId the ID of the private key used to sign the token. + * @param options Configuration object. */ - constructor(email, key, keyId, eagerRefreshThresholdMillis) { - this.cache = new LRU({ - max: 500, - maxAge: 60 * 60 * 1000, - }); - this.email = email; - this.key = key; - this.keyId = keyId; - this.eagerRefreshThresholdMillis = - eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; + constructor(options) { + _GoogleToken_instances.add(this); + this.transporter = { + request: opts => (0, gaxios_1.request)(opts), + }; + _GoogleToken_inFlightRequest.set(this, void 0); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, options); } /** - * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * Returns whether the token has expired. * - * @param url The URI being authorized. - * @param scopes The scope or scopes being authorized - * @returns A string that returns the cached key. + * @return true if the token has expired, false otherwise. */ - getCachedKey(url, scopes) { - let cacheKey = url; - if (scopes && Array.isArray(scopes) && scopes.length) { - cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; - } - else if (typeof scopes === 'string') { - cacheKey = url ? `${url}_${scopes}` : scopes; + hasExpired() { + const now = new Date().getTime(); + if (this.rawToken && this.expiresAt) { + return now >= this.expiresAt; } - if (!cacheKey) { - throw Error('Scopes or url must be provided'); + else { + return true; } - return cacheKey; } /** - * Get a non-expired access token, after refreshing if necessary. + * Returns whether the token will expire within eagerRefreshThresholdMillis * - * @param url The URI being authorized. - * @param additionalClaims An object with a set of additional claims to - * include in the payload. - * @returns An object that includes the authorization header. + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. */ - getRequestHeaders(url, additionalClaims, scopes) { - // Return cached authorization headers, unless we are within - // eagerRefreshThresholdMillis ms of them expiring: - const key = this.getCachedKey(url, scopes); - const cachedToken = this.cache.get(key); - const now = Date.now(); - if (cachedToken && - cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { - return cachedToken.headers; - } - const iat = Math.floor(Date.now() / 1000); - const exp = JWTAccess.getExpirationTime(iat); - let defaultClaims; - // Turn scopes into space-separated string - if (Array.isArray(scopes)) { - scopes = scopes.join(' '); - } - // If scopes are specified, sign with scopes - if (scopes) { - defaultClaims = { - iss: this.email, - sub: this.email, - scope: scopes, - exp, - iat, - }; + isTokenExpiring() { + var _a; + const now = new Date().getTime(); + const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; + if (this.rawToken && this.expiresAt) { + return this.expiresAt <= now + eagerRefreshThresholdMillis; } else { - defaultClaims = { - iss: this.email, - sub: this.email, - aud: url, - exp, - iat, - }; + return true; } - // if additionalClaims are provided, ensure they do not collide with - // other required claims. - if (additionalClaims) { - for (const claim in defaultClaims) { - if (additionalClaims[claim]) { - throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); - } - } + } + getToken(callback, opts = {}) { + if (typeof callback === 'object') { + opts = callback; + callback = undefined; } - const header = this.keyId - ? { ...DEFAULT_HEADER, kid: this.keyId } - : DEFAULT_HEADER; - const payload = Object.assign(defaultClaims, additionalClaims); - // Sign the jwt and add it to the cache - const signedJWT = jws.sign({ header, payload, secret: this.key }); - const headers = { Authorization: `Bearer ${signedJWT}` }; - this.cache.set(key, { - expiration: exp * 1000, - headers, - }); - return headers; + opts = Object.assign({ + forceRefresh: false, + }, opts); + if (callback) { + const cb = callback; + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts); } /** - * Returns an expiration time for the JWT token. - * - * @param iat The issued at time for the JWT. - * @returns An expiration time for the JWT. + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties */ - static getExpirationTime(iat) { - const exp = iat + 3600; // 3600 seconds = 1 hour - return exp; + async getCredentials(keyFile) { + const ext = path.extname(keyFile); + switch (ext) { + case '.json': { + const key = await readFile(keyFile, 'utf8'); + const body = JSON.parse(key); + const privateKey = body.private_key; + const clientEmail = body.client_email; + if (!privateKey || !clientEmail) { + throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); + } + return { privateKey, clientEmail }; + } + case '.der': + case '.crt': + case '.pem': { + const privateKey = await readFile(keyFile, 'utf8'); + return { privateKey }; + } + case '.p12': + case '.pfx': { + throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' + + 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + default: + throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + + 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE'); + } } - /** - * Create a JWTAccess credentials instance using the given input options. - * @param json The input object. - */ - fromJSON(json) { - if (!json) { - throw new Error('Must pass in a JSON object containing the service account auth settings.'); + revokeToken(callback) { + if (callback) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback); + return; } - if (!json.client_email) { - throw new Error('The incoming JSON object does not contain a client_email field'); + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this); + } +} +exports.GoogleToken = GoogleToken; +_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) { + if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f") && !opts.forceRefresh) { + return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f"); + } + try { + return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsyncInner).call(this, opts), "f")); + } + finally { + __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, "f"); + } +}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) { + if (this.isTokenExpiring() === false && opts.forceRefresh === false) { + return Promise.resolve(this.rawToken); + } + if (!this.key && !this.keyFile) { + throw new Error('No key or keyFile set.'); + } + if (!this.key && this.keyFile) { + const creds = await this.getCredentials(this.keyFile); + this.key = creds.privateKey; + this.iss = creds.clientEmail || this.iss; + if (!creds.clientEmail) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_ensureEmail).call(this); } - if (!json.private_key) { - throw new Error('The incoming JSON object does not contain a private_key field'); + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_requestToken).call(this); +}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() { + if (!this.iss) { + throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); + } +}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() { + if (!this.accessToken) { + throw new Error('No token to revoke.'); + } + const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; + await this.transporter.request({ + url, + retry: true, + }); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, { + email: this.iss, + sub: this.sub, + key: this.key, + keyFile: this.keyFile, + scope: this.scope, + additionalClaims: this.additionalClaims, + }); +}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) { + this.keyFile = options.keyFile; + this.key = options.key; + this.rawToken = undefined; + this.iss = options.email || options.iss; + this.sub = options.sub; + this.additionalClaims = options.additionalClaims; + if (typeof options.scope === 'object') { + this.scope = options.scope.join(' '); + } + else { + this.scope = options.scope; + } + this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; + if (options.transporter) { + this.transporter = options.transporter; + } +}, _GoogleToken_requestToken = +/** + * Request the token from Google. + */ +async function _GoogleToken_requestToken() { + var _a, _b; + const iat = Math.floor(new Date().getTime() / 1000); + const additionalClaims = this.additionalClaims || {}; + const payload = Object.assign({ + iss: this.iss, + scope: this.scope, + aud: GOOGLE_TOKEN_URL, + exp: iat + 3600, + iat, + sub: this.sub, + }, additionalClaims); + const signedJWT = jws.sign({ + header: { alg: 'RS256' }, + payload, + secret: this.key, + }); + try { + const r = await this.transporter.request({ + method: 'POST', + url: GOOGLE_TOKEN_URL, + data: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: signedJWT, + }, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + responseType: 'json', + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + this.rawToken = r.data; + this.expiresAt = + r.data.expires_in === null || r.data.expires_in === undefined + ? undefined + : (iat + r.data.expires_in) * 1000; + return this.rawToken; + } + catch (e) { + this.rawToken = undefined; + this.tokenExpires = undefined; + const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) + ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data + : {}; + if (body.error) { + const desc = body.error_description + ? `: ${body.error_description}` + : ''; + e.message = `${body.error}${desc}`; } - // Extract the relevant information from the json key file. - this.email = json.client_email; - this.key = json.private_key; - this.keyId = json.private_key_id; - this.projectId = json.project_id; + throw e; } - fromStream(inputStream, callback) { - if (callback) { - this.fromStreamAsync(inputStream).then(() => callback(), callback); +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 1621: +/***/ ((module) => { + +"use strict"; + + +module.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +}; + + +/***/ }), + +/***/ 7492: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const net_1 = __importDefault(__nccwpck_require__(1808)); +const tls_1 = __importDefault(__nccwpck_require__(4404)); +const url_1 = __importDefault(__nccwpck_require__(7310)); +const debug_1 = __importDefault(__nccwpck_require__(8237)); +const once_1 = __importDefault(__nccwpck_require__(1040)); +const agent_base_1 = __nccwpck_require__(9690); +const debug = (0, debug_1.default)('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); } else { - return this.fromStreamAsync(inputStream); + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; } - fromStreamAsync(inputStream) { - return new Promise((resolve, reject) => { - if (!inputStream) { - reject(new Error('Must pass in a stream containing the service account auth settings.')); + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; } - let s = ''; - inputStream - .setEncoding('utf8') - .on('data', chunk => (s += chunk)) - .on('error', reject) - .on('end', () => { - try { - const data = JSON.parse(s); - this.fromJSON(data); - resolve(); + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + parsed.port = ''; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); } - catch (err) { - reject(err); + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); } - }); + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield (0, once_1.default)(socket, 'connect'); + return socket; }); } } -exports.JWTAccess = JWTAccess; -//# sourceMappingURL=jwtaccess.js.map +exports["default"] = HttpProxyAgent; +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 3764: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(7492)); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 3959: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5098: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -// Copyright 2013 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JWT = void 0; -const gtoken_1 = __nccwpck_require__(6031); -const jwtaccess_1 = __nccwpck_require__(8740); -const oauth2client_1 = __nccwpck_require__(3936); -class JWT extends oauth2client_1.OAuth2Client { - constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { - const opts = optionsOrEmail && typeof optionsOrEmail === 'object' - ? optionsOrEmail - : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; - super({ - eagerRefreshThresholdMillis: opts.eagerRefreshThresholdMillis, - forceRefreshOnFailure: opts.forceRefreshOnFailure, - }); - this.email = opts.email; - this.keyFile = opts.keyFile; - this.key = opts.key; - this.keyId = opts.keyId; - this.scopes = opts.scopes; - this.subject = opts.subject; - this.additionalClaims = opts.additionalClaims; - this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; - } - /** - * Creates a copy of the credential with the specified scopes. - * @param scopes List of requested scopes or a single scope. - * @return The cloned instance. - */ - createScoped(scopes) { - return new JWT({ - email: this.email, - keyFile: this.keyFile, - key: this.key, - keyId: this.keyId, - scopes, - subject: this.subject, - additionalClaims: this.additionalClaims, - }); - } - /** - * Obtains the metadata to be sent with the request. - * - * @param url the URI being authorized. - */ - async getRequestMetadataAsync(url) { - url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; - const useSelfSignedJWT = (!this.hasUserScopes() && url) || - (this.useJWTAccessWithScope && this.hasAnyScopes()); - if (!this.apiKey && useSelfSignedJWT) { - if (this.additionalClaims && - this.additionalClaims.target_audience) { - const { tokens } = await this.refreshToken(); - return { - headers: this.addSharedMetadataHeaders({ - Authorization: `Bearer ${tokens.id_token}`, - }), - }; - } - else { - // no scopes have been set, but a uri has been provided. Use JWTAccess - // credentials. - if (!this.access) { - this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); - } - let scopes; - if (this.hasUserScopes()) { - scopes = this.scopes; - } - else if (!url) { - scopes = this.defaultScopes; - } - const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, - // Scopes take precedent over audience for signing, - // so we only provide them if useJWTAccessWithScope is on - this.useJWTAccessWithScope ? scopes : undefined); - return { headers: this.addSharedMetadataHeaders(headers) }; - } - } - else if (this.hasAnyScopes() || this.apiKey) { - return super.getRequestMetadataAsync(url); +const net_1 = __importDefault(__nccwpck_require__(1808)); +const tls_1 = __importDefault(__nccwpck_require__(4404)); +const url_1 = __importDefault(__nccwpck_require__(7310)); +const assert_1 = __importDefault(__nccwpck_require__(9491)); +const debug_1 = __importDefault(__nccwpck_require__(8237)); +const agent_base_1 = __nccwpck_require__(9690); +const parse_proxy_response_1 = __importDefault(__nccwpck_require__(595)); +const debug = debug_1.default('https-proxy-agent:agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); } else { - // If no audience, apiKey, or scopes are provided, we should not attempt - // to populate any headers: - return { headers: {} }; + opts = _opts; } - } - /** - * Fetches an ID token. - * @param targetAudience the audience for the fetched ID token. - */ - async fetchIdToken(targetAudience) { - // Create a new gToken for fetching an ID token - const gtoken = new gtoken_1.GoogleToken({ - iss: this.email, - sub: this.subject, - scope: this.scopes || this.defaultScopes, - keyFile: this.keyFile, - key: this.key, - additionalClaims: { target_audience: targetAudience }, - transporter: this.transporter, - }); - await gtoken.getToken({ - forceRefresh: true, - }); - if (!gtoken.idToken) { - throw new Error('Unknown error: Failed to fetch ID token'); + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); } - return gtoken.idToken; - } - /** - * Determine if there are currently scopes available. - */ - hasUserScopes() { - if (!this.scopes) { - return false; + debug('creating new HttpsProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); } - return this.scopes.length > 0; - } - /** - * Are there any default or user scopes defined. - */ - hasAnyScopes() { - if (this.scopes && this.scopes.length > 0) - return true; - if (this.defaultScopes && this.defaultScopes.length > 0) - return true; - return false; - } - authorize(callback) { - if (callback) { - this.authorizeAsync().then(r => callback(null, r), callback); + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; } - else { - return this.authorizeAsync(); + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; } - } - async authorizeAsync() { - const result = await this.refreshToken(); - if (!result) { - throw new Error('No result returned'); + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; } - this.credentials = result.tokens; - this.credentials.refresh_token = 'jwt-placeholder'; - this.key = this.gtoken.key; - this.email = this.gtoken.iss; - return result.tokens; + this.proxy = proxy; } /** - * Refreshes the access token. - * @param refreshToken ignored - * @private + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected */ - async refreshTokenNoCache( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - refreshToken) { - const gtoken = this.createGToken(); - const token = await gtoken.getToken({ - forceRefresh: this.isTokenExpiring(), + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + const headers = Object.assign({}, proxy.headers); + const hostname = `${opts.host}:${opts.port}`; + let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; + } + // The `Host` header should only include the port + // number when it is not the default port. + let { host, port, secureEndpoint } = opts; + if (!isDefaultPort(port, secureEndpoint)) { + host += `:${port}`; + } + headers.Host = host; + headers.Connection = 'close'; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = parse_proxy_response_1.default(socket); + socket.write(`${payload}\r\n`); + const { statusCode, buffered } = yield proxyResponsePromise; + if (statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername })); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net_1.default.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('replaying proxy buffer for failed request'); + assert_1.default(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; }); - const tokens = { - access_token: token.access_token, - token_type: 'Bearer', - expiry_date: gtoken.expiresAt, - id_token: gtoken.idToken, - }; - this.emit('tokens', tokens); - return { res: null, tokens }; } - /** - * Create a gToken if it doesn't already exist. - */ - createGToken() { - if (!this.gtoken) { - this.gtoken = new gtoken_1.GoogleToken({ - iss: this.email, - sub: this.subject, - scope: this.scopes || this.defaultScopes, - keyFile: this.keyFile, - key: this.key, - additionalClaims: this.additionalClaims, - transporter: this.transporter, - }); +} +exports["default"] = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function isDefaultPort(port, secure) { + return Boolean((!secure && port === 80) || (secure && port === 443)); +} +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; } - return this.gtoken; } - /** - * Create a JWT credentials instance using the given input options. - * @param json The input object. - */ - fromJSON(json) { - if (!json) { - throw new Error('Must pass in a JSON object containing the service account auth settings.'); + return ret; +} +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 7219: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(5098)); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 595: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const debug_1 = __importDefault(__nccwpck_require__(8237)); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); } - if (!json.client_email) { - throw new Error('The incoming JSON object does not contain a client_email field'); + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); } - if (!json.private_key) { - throw new Error('The incoming JSON object does not contain a private_key field'); + function onclose(err) { + debug('onclose had error %o', err); } - // Extract the relevant information from the json key file. - this.email = json.client_email; - this.key = json.private_key; - this.keyId = json.private_key_id; - this.projectId = json.project_id; - this.quotaProjectId = json.quota_project_id; - } - fromStream(inputStream, callback) { - if (callback) { - this.fromStreamAsync(inputStream).then(() => callback(), callback); + function onend() { + debug('onend'); } - else { - return this.fromStreamAsync(inputStream); + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); } - } - fromStreamAsync(inputStream) { - return new Promise((resolve, reject) => { - if (!inputStream) { - throw new Error('Must pass in a stream containing the service account auth settings.'); + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; } - let s = ''; - inputStream - .setEncoding('utf8') - .on('error', reject) - .on('data', chunk => (s += chunk)) - .on('end', () => { - try { - const data = JSON.parse(s); - this.fromJSON(data); - resolve(); - } - catch (e) { - reject(e); - } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered }); - }); - } - /** - * Creates a JWT credentials instance using an API Key for authentication. - * @param apiKey The API Key in string form. - */ - fromAPIKey(apiKey) { - if (typeof apiKey !== 'string') { - throw new Error('Must provide an API Key string.'); - } - this.apiKey = apiKey; - } - /** - * Using the key or keyFile on the JWT client, obtain an object that contains - * the key and the client email. - */ - async getCredentials() { - if (this.key) { - return { private_key: this.key, client_email: this.email }; - } - else if (this.keyFile) { - const gtoken = this.createGToken(); - const creds = await gtoken.getCredentials(this.keyFile); - return { private_key: creds.privateKey, client_email: creds.clientEmail }; } - throw new Error('A key or a keyFile must be provided to getCredentials.'); - } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); + }); +} +exports["default"] = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map + +/***/ }), + +/***/ 4124: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +try { + var util = __nccwpck_require__(3837); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(8544); } -exports.JWT = JWT; -//# sourceMappingURL=jwtclient.js.map -/***/ }), -/***/ 4524: -/***/ ((__unused_webpack_module, exports) => { +/***/ }), -"use strict"; +/***/ 8544: +/***/ ((module) => { -// Copyright 2014 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.LoginTicket = void 0; -class LoginTicket { - /** - * Create a simple class to extract user ID from an ID Token - * - * @param {string} env Envelope of the jwt - * @param {TokenPayload} pay Payload of the jwt - * @constructor - */ - constructor(env, pay) { - this.envelope = env; - this.payload = pay; - } - getEnvelope() { - return this.envelope; - } - getPayload() { - return this.payload; - } - /** - * Create a simple class to extract user ID from an ID Token - * - * @return The user ID - */ - getUserId() { - const payload = this.getPayload(); - if (payload && payload.sub) { - return payload.sub; +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true } - return null; + }) } - /** - * Returns attributes from the login ticket. This can contain - * various information about the user session. - * - * @return The envelope and payload - */ - getAttributes() { - return { envelope: this.getEnvelope(), payload: this.getPayload() }; + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor } + } } -exports.LoginTicket = LoginTicket; -//# sourceMappingURL=loginticket.js.map + /***/ }), -/***/ 3936: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 1554: +/***/ ((module) => { "use strict"; -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const querystring = __nccwpck_require__(3477); -const stream = __nccwpck_require__(2781); -const formatEcdsa = __nccwpck_require__(1728); -const crypto_1 = __nccwpck_require__(8043); -const authclient_1 = __nccwpck_require__(4627); -const loginticket_1 = __nccwpck_require__(4524); -var CodeChallengeMethod; -(function (CodeChallengeMethod) { - CodeChallengeMethod["Plain"] = "plain"; - CodeChallengeMethod["S256"] = "S256"; -})(CodeChallengeMethod = exports.CodeChallengeMethod || (exports.CodeChallengeMethod = {})); -var CertificateFormat; -(function (CertificateFormat) { - CertificateFormat["PEM"] = "PEM"; - CertificateFormat["JWK"] = "JWK"; -})(CertificateFormat = exports.CertificateFormat || (exports.CertificateFormat = {})); -class OAuth2Client extends authclient_1.AuthClient { - constructor(optionsOrClientId, clientSecret, redirectUri) { - super(); - this.certificateCache = {}; - this.certificateExpiry = null; - this.certificateCacheFormat = CertificateFormat.PEM; - this.refreshTokenPromises = new Map(); - const opts = optionsOrClientId && typeof optionsOrClientId === 'object' - ? optionsOrClientId - : { clientId: optionsOrClientId, clientSecret, redirectUri }; - this._clientId = opts.clientId; - this._clientSecret = opts.clientSecret; - this.redirectUri = opts.redirectUri; - this.eagerRefreshThresholdMillis = - opts.eagerRefreshThresholdMillis || 5 * 60 * 1000; - this.forceRefreshOnFailure = !!opts.forceRefreshOnFailure; - } - /** - * Generates URL for consent page landing. - * @param opts Options. - * @return URL to consent page. - */ - generateAuthUrl(opts = {}) { - if (opts.code_challenge_method && !opts.code_challenge) { - throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); - } - opts.response_type = opts.response_type || 'code'; - opts.client_id = opts.client_id || this._clientId; - opts.redirect_uri = opts.redirect_uri || this.redirectUri; - // Allow scopes to be passed either as array or a string - if (Array.isArray(opts.scope)) { - opts.scope = opts.scope.join(' '); - } - const rootUrl = OAuth2Client.GOOGLE_OAUTH2_AUTH_BASE_URL_; - return (rootUrl + - '?' + - querystring.stringify(opts)); - } - generateCodeVerifier() { - // To make the code compatible with browser SubtleCrypto we need to make - // this method async. - throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); - } - /** - * Convenience method to automatically generate a code_verifier, and its - * resulting SHA256. If used, this must be paired with a S256 - * code_challenge_method. - * - * For a full example see: - * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js - */ - async generateCodeVerifierAsync() { - // base64 encoding uses 6 bits per character, and we want to generate128 - // characters. 6*128/8 = 96. - const crypto = (0, crypto_1.createCrypto)(); - const randomString = crypto.randomBytesBase64(96); - // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ - // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just - // swapping out a few chars. - const codeVerifier = randomString - .replace(/\+/g, '~') - .replace(/=/g, '_') - .replace(/\//g, '-'); - // Generate the base64 encoded SHA256 - const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); - // We need to use base64UrlEncoding instead of standard base64 - const codeChallenge = unencodedCodeChallenge - .split('=')[0] - .replace(/\+/g, '-') - .replace(/\//g, '_'); - return { codeVerifier, codeChallenge }; + +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; + +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; + +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; + +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); + +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; + +module.exports = isStream; + + +/***/ }), + +/***/ 5031: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var json_stringify = (__nccwpck_require__(8574).stringify); +var json_parse = __nccwpck_require__(9099); + +module.exports = function(options) { + return { + parse: json_parse(options), + stringify: json_stringify } - getToken(codeOrOptions, callback) { - const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; - if (callback) { - this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); - } - else { - return this.getTokenAsync(options); - } +}; +//create the default method members with no options applied for backwards compatibility +module.exports.parse = json_parse(); +module.exports.stringify = json_stringify; + + +/***/ }), + +/***/ 9099: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = null; + +// regexpxs extracted from +// (c) BSD-3-Clause +// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors + +const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; +const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; + +/* + json_parse.js + 2012-06-20 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + This file creates a json_parse function. + During create you can (optionally) specify some behavioural switches + + require('json-bigint')(options) + + The optional options parameter holds switches that drive certain + aspects of the parsing process: + * options.strict = true will warn about duplicate-key usage in the json. + The default (strict = false) will silently ignore those and overwrite + values for keys that are in duplicate use. + + The resulting function follows this signature: + json_parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = json_parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + This is a reference implementation. You are free to copy, modify, or + redistribute. + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. +*/ + +/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, + hasOwnProperty, message, n, name, prototype, push, r, t, text +*/ + +var json_parse = function (options) { + 'use strict'; + + // This is a function that can parse a JSON text, producing a JavaScript + // data structure. It is a simple, recursive descent parser. It does not use + // eval or regular expressions, so it can be used as a model for implementing + // a JSON parser in other languages. + + // We are defining the function inside of another function to avoid creating + // global variables. + + // Default options one can override by passing options to the parse() + var _options = { + strict: false, // not being strict means do not generate syntax errors for "duplicate key" + storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string + alwaysParseAsBig: false, // toggles whether all numbers should be Big + useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js + protoAction: 'error', + constructorAction: 'error', + }; + + // If there are options, then use them to override the default _options + if (options !== undefined && options !== null) { + if (options.strict === true) { + _options.strict = true; } - async getTokenAsync(options) { - const url = OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_; - const values = { - code: options.code, - client_id: options.client_id || this._clientId, - client_secret: this._clientSecret, - redirect_uri: options.redirect_uri || this.redirectUri, - grant_type: 'authorization_code', - code_verifier: options.codeVerifier, - }; - const res = await this.transporter.request({ - method: 'POST', - url, - data: querystring.stringify(values), - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - }); - const tokens = res.data; - if (res.data && res.data.expires_in) { - tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; - delete tokens.expires_in; - } - this.emit('tokens', tokens); - return { tokens, res }; + if (options.storeAsString === true) { + _options.storeAsString = true; } - /** - * Refreshes the access token. - * @param refresh_token Existing refresh token. - * @private - */ - async refreshToken(refreshToken) { - if (!refreshToken) { - return this.refreshTokenNoCache(refreshToken); - } - // If a request to refresh using the same token has started, - // return the same promise. - if (this.refreshTokenPromises.has(refreshToken)) { - return this.refreshTokenPromises.get(refreshToken); - } - const p = this.refreshTokenNoCache(refreshToken).then(r => { - this.refreshTokenPromises.delete(refreshToken); - return r; - }, e => { - this.refreshTokenPromises.delete(refreshToken); - throw e; - }); - this.refreshTokenPromises.set(refreshToken, p); - return p; + _options.alwaysParseAsBig = + options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; + _options.useNativeBigInt = + options.useNativeBigInt === true ? options.useNativeBigInt : false; + + if (typeof options.constructorAction !== 'undefined') { + if ( + options.constructorAction === 'error' || + options.constructorAction === 'ignore' || + options.constructorAction === 'preserve' + ) { + _options.constructorAction = options.constructorAction; + } else { + throw new Error( + `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` + ); + } } - async refreshTokenNoCache(refreshToken) { - var _a; - if (!refreshToken) { - throw new Error('No refresh token is set.'); - } - const url = OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_; - const data = { - refresh_token: refreshToken, - client_id: this._clientId, - client_secret: this._clientSecret, - grant_type: 'refresh_token', - }; - let res; - try { - // request for new token - res = await this.transporter.request({ - method: 'POST', - url, - data: querystring.stringify(data), - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - }); - } - catch (e) { - if (e instanceof gaxios_1.GaxiosError && - e.message === 'invalid_grant' && - ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && - /ReAuth/i.test(e.response.data.error_description)) { - e.message = JSON.stringify(e.response.data); - } - throw e; - } - const tokens = res.data; - // TODO: de-duplicate this code from a few spots - if (res.data && res.data.expires_in) { - tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; - delete tokens.expires_in; - } - this.emit('tokens', tokens); - return { tokens, res }; + + if (typeof options.protoAction !== 'undefined') { + if ( + options.protoAction === 'error' || + options.protoAction === 'ignore' || + options.protoAction === 'preserve' + ) { + _options.protoAction = options.protoAction; + } else { + throw new Error( + `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` + ); + } } - refreshAccessToken(callback) { - if (callback) { - this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); - } - else { - return this.refreshAccessTokenAsync(); + } + + var at, // The index of the current character + ch, // The current character + escapee = { + '"': '"', + '\\': '\\', + '/': '/', + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + }, + text, + error = function (m) { + // Call error when something is wrong. + + throw { + name: 'SyntaxError', + message: m, + at: at, + text: text, + }; + }, + next = function (c) { + // If a c parameter is provided, verify that it matches the current character. + + if (c && c !== ch) { + error("Expected '" + c + "' instead of '" + ch + "'"); + } + + // Get the next character. When there are no more characters, + // return the empty string. + + ch = text.charAt(at); + at += 1; + return ch; + }, + number = function () { + // Parse a number value. + + var number, + string = ''; + + if (ch === '-') { + string = '-'; + next('-'); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + if (ch === '.') { + string += '.'; + while (next() && ch >= '0' && ch <= '9') { + string += ch; } - } - async refreshAccessTokenAsync() { - const r = await this.refreshToken(this.credentials.refresh_token); - const tokens = r.tokens; - tokens.refresh_token = this.credentials.refresh_token; - this.credentials = tokens; - return { credentials: this.credentials, res: r.res }; - } - getAccessToken(callback) { - if (callback) { - this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + if (ch === 'e' || ch === 'E') { + string += ch; + next(); + if (ch === '-' || ch === '+') { + string += ch; + next(); } - else { - return this.getAccessTokenAsync(); + while (ch >= '0' && ch <= '9') { + string += ch; + next(); } - } - async getAccessTokenAsync() { - const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); - if (shouldRefresh) { - if (!this.credentials.refresh_token) { - if (this.refreshHandler) { - const refreshedAccessToken = await this.processAndValidateRefreshHandler(); - if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { - this.setCredentials(refreshedAccessToken); - return { token: this.credentials.access_token }; - } - } - else { - throw new Error('No refresh token or refresh handler callback is set.'); + } + number = +string; + if (!isFinite(number)) { + error('Bad number'); + } else { + if (BigNumber == null) BigNumber = __nccwpck_require__(7558); + //if (number > 9007199254740992 || number < -9007199254740992) + // Bignumber has stricter check: everything with length > 15 digits disallowed + if (string.length > 15) + return _options.storeAsString + ? string + : _options.useNativeBigInt + ? BigInt(string) + : new BigNumber(string); + else + return !_options.alwaysParseAsBig + ? number + : _options.useNativeBigInt + ? BigInt(number) + : new BigNumber(number); + } + }, + string = function () { + // Parse a string value. + + var hex, + i, + string = '', + uffff; + + // When parsing for string values, we must look for " and \ characters. + + if (ch === '"') { + var startAt = at; + while (next()) { + if (ch === '"') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + return string; + } + if (ch === '\\') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + if (ch === 'u') { + uffff = 0; + for (i = 0; i < 4; i += 1) { + hex = parseInt(next(), 16); + if (!isFinite(hex)) { + break; } + uffff = uffff * 16 + hex; + } + string += String.fromCharCode(uffff); + } else if (typeof escapee[ch] === 'string') { + string += escapee[ch]; + } else { + break; } - const r = await this.refreshAccessTokenAsync(); - if (!r.credentials || (r.credentials && !r.credentials.access_token)) { - throw new Error('Could not refresh access token.'); - } - return { token: r.credentials.access_token, res: r.res }; + startAt = at; + } } - else { - return { token: this.credentials.access_token }; + } + error('Bad string'); + }, + white = function () { + // Skip whitespace. + + while (ch && ch <= ' ') { + next(); + } + }, + word = function () { + // true, false, or null. + + switch (ch) { + case 't': + next('t'); + next('r'); + next('u'); + next('e'); + return true; + case 'f': + next('f'); + next('a'); + next('l'); + next('s'); + next('e'); + return false; + case 'n': + next('n'); + next('u'); + next('l'); + next('l'); + return null; + } + error("Unexpected '" + ch + "'"); + }, + value, // Place holder for the value function. + array = function () { + // Parse an array value. + + var array = []; + + if (ch === '[') { + next('['); + white(); + if (ch === ']') { + next(']'); + return array; // empty array } - } - /** - * The main authentication interface. It takes an optional url which when - * present is the endpoint being accessed, and returns a Promise which - * resolves with authorization header fields. - * - * In OAuth2Client, the result has the form: - * { Authorization: 'Bearer ' } - * @param url The optional url being authorized - */ - async getRequestHeaders(url) { - const headers = (await this.getRequestMetadataAsync(url)).headers; - return headers; - } - async getRequestMetadataAsync( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - url) { - const thisCreds = this.credentials; - if (!thisCreds.access_token && - !thisCreds.refresh_token && - !this.apiKey && - !this.refreshHandler) { - throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + while (ch) { + array.push(value()); + white(); + if (ch === ']') { + next(']'); + return array; + } + next(','); + white(); } - if (thisCreds.access_token && !this.isTokenExpiring()) { - thisCreds.token_type = thisCreds.token_type || 'Bearer'; - const headers = { - Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, - }; - return { headers: this.addSharedMetadataHeaders(headers) }; + } + error('Bad array'); + }, + object = function () { + // Parse an object value. + + var key, + object = Object.create(null); + + if (ch === '{') { + next('{'); + white(); + if (ch === '}') { + next('}'); + return object; // empty object } - // If refreshHandler exists, call processAndValidateRefreshHandler(). - if (this.refreshHandler) { - const refreshedAccessToken = await this.processAndValidateRefreshHandler(); - if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { - this.setCredentials(refreshedAccessToken); - const headers = { - Authorization: 'Bearer ' + this.credentials.access_token, - }; - return { headers: this.addSharedMetadataHeaders(headers) }; + while (ch) { + key = string(); + white(); + next(':'); + if ( + _options.strict === true && + Object.hasOwnProperty.call(object, key) + ) { + error('Duplicate key "' + key + '"'); + } + + if (suspectProtoRx.test(key) === true) { + if (_options.protoAction === 'error') { + error('Object contains forbidden prototype property'); + } else if (_options.protoAction === 'ignore') { + value(); + } else { + object[key] = value(); } - } - if (this.apiKey) { - return { headers: { 'X-Goog-Api-Key': this.apiKey } }; - } - let r = null; - let tokens = null; - try { - r = await this.refreshToken(thisCreds.refresh_token); - tokens = r.tokens; - } - catch (err) { - const e = err; - if (e.response && - (e.response.status === 403 || e.response.status === 404)) { - e.message = `Could not refresh access token: ${e.message}`; + } else if (suspectConstructorRx.test(key) === true) { + if (_options.constructorAction === 'error') { + error('Object contains forbidden constructor property'); + } else if (_options.constructorAction === 'ignore') { + value(); + } else { + object[key] = value(); } - throw e; + } else { + object[key] = value(); + } + + white(); + if (ch === '}') { + next('}'); + return object; + } + next(','); + white(); } - const credentials = this.credentials; - credentials.token_type = credentials.token_type || 'Bearer'; - tokens.refresh_token = credentials.refresh_token; - this.credentials = tokens; - const headers = { - Authorization: credentials.token_type + ' ' + tokens.access_token, - }; - return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; - } - /** - * Generates an URL to revoke the given token. - * @param token The existing token to be revoked. - */ - static getRevokeTokenUrl(token) { - const parameters = querystring.stringify({ token }); - return `${OAuth2Client.GOOGLE_OAUTH2_REVOKE_URL_}?${parameters}`; + } + error('Bad object'); + }; + + value = function () { + // Parse a JSON value. It could be an object, an array, a string, a number, + // or a word. + + white(); + switch (ch) { + case '{': + return object(); + case '[': + return array(); + case '"': + return string(); + case '-': + return number(); + default: + return ch >= '0' && ch <= '9' ? number() : word(); } - revokeToken(token, callback) { - const opts = { - url: OAuth2Client.getRevokeTokenUrl(token), - method: 'POST', - }; - if (callback) { - this.transporter - .request(opts) - .then(r => callback(null, r), callback); - } - else { - return this.transporter.request(opts); - } + }; + + // Return the json_parse function. It will have access to all of the above + // functions and variables. + + return function (source, reviver) { + var result; + + text = source + ''; + at = 0; + ch = ' '; + result = value(); + white(); + if (ch) { + error('Syntax error'); } - revokeCredentials(callback) { - if (callback) { - this.revokeCredentialsAsync().then(res => callback(null, res), callback); - } - else { - return this.revokeCredentialsAsync(); - } + + // If there is a reviver function, we recursively walk the new structure, + // passing each name/value pair to the reviver function for possible + // transformation, starting with a temporary root object that holds the result + // in an empty key. If there is not a reviver function, we simply return the + // result. + + return typeof reviver === 'function' + ? (function walk(holder, key) { + var k, + v, + value = holder[key]; + if (value && typeof value === 'object') { + Object.keys(value).forEach(function (k) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + }); + } + return reviver.call(holder, key, value); + })({ '': result }, '') + : result; + }; +}; + +module.exports = json_parse; + + +/***/ }), + +/***/ 8574: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = __nccwpck_require__(7558); + +/* + json2.js + 2013-05-26 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. + + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, regexp: true */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +var JSON = module.exports; + +(function () { + 'use strict'; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; } - async revokeCredentialsAsync() { - const token = this.credentials.access_token; - this.credentials = {}; - if (token) { - return this.revokeToken(token); - } - else { - throw new Error('No access token to revoke.'); - } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' + ? c + : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; } - request(opts, callback) { - if (callback) { - this.requestAsync(opts).then(r => callback(null, r), e => { - return callback(e, e.response); - }); + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key], + isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); } - else { - return this.requestAsync(opts); + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); } - } - async requestAsync(opts, retry = false) { - let r2; - try { - const r = await this.getRequestMetadataAsync(opts.url); - opts.headers = opts.headers || {}; - if (r.headers && r.headers['x-goog-user-project']) { - opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; - } - if (r.headers && r.headers.Authorization) { - opts.headers.Authorization = r.headers.Authorization; + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + if (isBigNumber) { + return value; + } else { + return quote(value); } - if (this.apiKey) { - opts.headers['X-Goog-Api-Key'] = this.apiKey; + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + case 'bigint': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; } - r2 = await this.transporter.request(opts); - } - catch (e) { - const res = e.response; - if (res) { - const statusCode = res.status; - // Retry the request for metadata if the following criteria are true: - // - We haven't already retried. It only makes sense to retry once. - // - The response was a 401 or a 403 - // - The request didn't send a readableStream - // - An access_token and refresh_token were available, but either no - // expiry_date was available or the forceRefreshOnFailure flag is set. - // The absent expiry_date case can happen when developers stash the - // access_token and refresh_token for later use, but the access_token - // fails on the first try because it's expired. Some developers may - // choose to enable forceRefreshOnFailure to mitigate time-related - // errors. - // Or the following criteria are true: - // - We haven't already retried. It only makes sense to retry once. - // - The response was a 401 or a 403 - // - The request didn't send a readableStream - // - No refresh_token was available - // - An access_token and a refreshHandler callback were available, but - // either no expiry_date was available or the forceRefreshOnFailure - // flag is set. The access_token fails on the first try because it's - // expired. Some developers may choose to enable forceRefreshOnFailure - // to mitigate time-related errors. - const mayRequireRefresh = this.credentials && - this.credentials.access_token && - this.credentials.refresh_token && - (!this.credentials.expiry_date || this.forceRefreshOnFailure); - const mayRequireRefreshWithNoRefreshToken = this.credentials && - this.credentials.access_token && - !this.credentials.refresh_token && - (!this.credentials.expiry_date || this.forceRefreshOnFailure) && - this.refreshHandler; - const isReadableStream = res.config.data instanceof stream.Readable; - const isAuthErr = statusCode === 401 || statusCode === 403; - if (!retry && isAuthErr && !isReadableStream && mayRequireRefresh) { - await this.refreshAccessTokenAsync(); - return this.requestAsync(opts, true); + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; } - else if (!retry && - isAuthErr && - !isReadableStream && - mayRequireRefreshWithNoRefreshToken) { - const refreshedAccessToken = await this.processAndValidateRefreshHandler(); - if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { - this.setCredentials(refreshedAccessToken); + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 + ? '[]' + : gap + ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' + : '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + if (typeof rep[i] === 'string') { + k = rep[i]; + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } } - return this.requestAsync(opts, true); } + } else { + +// Otherwise, iterate through all of the keys in the object. + + Object.keys(value).forEach(function(k) { + var v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + }); } - throw e; - } - return r2; - } - verifyIdToken(options, callback) { - // This function used to accept two arguments instead of an options object. - // Check the types to help users upgrade with less pain. - // This check can be removed after a 2.0 release. - if (callback && typeof callback !== 'function') { - throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); - } - if (callback) { - this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); - } - else { - return this.verifyIdTokenAsync(options); - } - } - async verifyIdTokenAsync(options) { - if (!options.idToken) { - throw new Error('The verifyIdToken method requires an ID Token'); - } - const response = await this.getFederatedSignonCertsAsync(); - const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, OAuth2Client.ISSUERS_, options.maxExpiry); - return login; - } - /** - * Obtains information about the provisioned access token. Especially useful - * if you want to check the scopes that were provisioned to a given token. - * - * @param accessToken Required. The Access Token for which you want to get - * user info. - */ - async getTokenInfo(accessToken) { - const { data } = await this.transporter.request({ - method: 'POST', - headers: { - 'Content-Type': 'application/x-www-form-urlencoded', - Authorization: `Bearer ${accessToken}`, - }, - url: OAuth2Client.GOOGLE_TOKEN_INFO_URL, - }); - const info = Object.assign({ - expiry_date: new Date().getTime() + data.expires_in * 1000, - scopes: data.scope.split(' '), - }, data); - delete info.expires_in; - delete info.scope; - return info; - } - getFederatedSignonCerts(callback) { - if (callback) { - this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); - } - else { - return this.getFederatedSignonCertsAsync(); + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 + ? '{}' + : gap + ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' + : '{' + partial.join(',') + '}'; + gap = mind; + return v; } } - async getFederatedSignonCertsAsync() { - const nowTime = new Date().getTime(); - const format = (0, crypto_1.hasBrowserCrypto)() - ? CertificateFormat.JWK - : CertificateFormat.PEM; - if (this.certificateExpiry && - nowTime < this.certificateExpiry.getTime() && - this.certificateCacheFormat === format) { - return { certs: this.certificateCache, format }; - } - let res; - let url; - switch (format) { - case CertificateFormat.PEM: - url = OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_PEM_CERTS_URL_; - break; - case CertificateFormat.JWK: - url = OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_JWK_CERTS_URL_; - break; - default: - throw new Error(`Unsupported certificate format ${format}`); - } - try { - res = await this.transporter.request({ url }); - } - catch (e) { - if (e instanceof Error) { - e.message = `Failed to retrieve verification certificates: ${e.message}`; + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; } - throw e; - } - const cacheControl = res ? res.headers['cache-control'] : undefined; - let cacheAge = -1; - if (cacheControl) { - const pattern = new RegExp('max-age=([0-9]*)'); - const regexResult = pattern.exec(cacheControl); - if (regexResult && regexResult.length === 2) { - // Cache results with max-age (in seconds) - cacheAge = Number(regexResult[1]) * 1000; // milliseconds + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); } - } - let certificates = {}; - switch (format) { - case CertificateFormat.PEM: - certificates = res.data; - break; - case CertificateFormat.JWK: - for (const key of res.data.keys) { - certificates[key.kid] = key; - } - break; - default: - throw new Error(`Unsupported certificate format ${format}`); - } - const now = new Date(); - this.certificateExpiry = - cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); - this.certificateCache = certificates; - this.certificateCacheFormat = format; - return { certs: certificates, format, res }; + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } +}()); + + +/***/ }), + +/***/ 6010: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var bufferEqual = __nccwpck_require__(9239); +var Buffer = (__nccwpck_require__(1867).Buffer); +var crypto = __nccwpck_require__(6113); +var formatEcdsa = __nccwpck_require__(1728); +var util = __nccwpck_require__(3837); + +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; + +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} + +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; + +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (typeof key === 'object') { + return; + } + + throw typeError(MSG_INVALID_SIGNER_KEY); +}; + +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return key; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } + + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} + +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function toBase64(base64url) { + base64url = base64url.toString(); + + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } + } + + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} + +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} + +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} + +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} + +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} + +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} + +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} + +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} + +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} + +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} + +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} + +function createNoneSigner() { + return function sign() { + return ''; + } +} + +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} + +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; + + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; + + +/***/ }), + +/***/ 4636: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +/*global exports*/ +var SignStream = __nccwpck_require__(3334); +var VerifyStream = __nccwpck_require__(5522); + +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; + +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); +}; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); +}; + + +/***/ }), + +/***/ 1868: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module, process*/ +var Buffer = (__nccwpck_require__(1867).Buffer); +var Stream = __nccwpck_require__(2781); +var util = __nccwpck_require__(3837); + +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; + + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } + + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } + + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } + + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); + +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); +}; + +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; +}; + +module.exports = DataStream; + + +/***/ }), + +/***/ 3334: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(1867).Buffer); +var DataStream = __nccwpck_require__(1868); +var jwa = __nccwpck_require__(6010); +var Stream = __nccwpck_require__(2781); +var toString = __nccwpck_require__(5292); +var util = __nccwpck_require__(3837); + +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} + +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} + +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); + + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); + +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +SignStream.sign = jwsSign; + +module.exports = SignStream; + + +/***/ }), + +/***/ 5292: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(4300).Buffer); + +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); +}; + + +/***/ }), + +/***/ 5522: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(1867).Buffer); +var DataStream = __nccwpck_require__(1868); +var jwa = __nccwpck_require__(6010); +var Stream = __nccwpck_require__(2781); +var toString = __nccwpck_require__(5292); +var util = __nccwpck_require__(3837); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; +} + +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} + +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} + +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} + +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; +} + +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} + +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} + +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} + +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); + + if (!isValidJws(jwsSig)) + return null; + + var header = headerFromJWS(jwsSig); + + if (!header) + return null; + + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); + + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} + +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); + + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; + +module.exports = VerifyStream; + + +/***/ }), + +/***/ 7426: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2022 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = __nccwpck_require__(3765) + + +/***/ }), + +/***/ 3583: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + + + +/** + * Module dependencies. + * @private + */ + +var db = __nccwpck_require__(7426) +var extname = (__nccwpck_require__(1017).extname) + +/** + * Module variables. + * @private + */ + +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i + +/** + * Module exports. + * @public + */ + +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) + +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) + +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] + + if (mime && mime.charset) { + return mime.charset + } + + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } + + return false +} + +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ + +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } + + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str + + if (!mime) { + return false + } + + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } + + return mime +} + +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] +} + +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ + +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } + + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} + +/** + * Populate the extensions and types maps. + * @private + */ + +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] + + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions + + if (!exts || !exts.length) { + return } - getIapPublicKeys(callback) { - if (callback) { - this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); - } - else { - return this.getIapPublicKeysAsync(); + + // mime -> extensions + extensions[type] = exts + + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] + + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) + + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue } + } + + // set the extension -> mime + types[extension] = type } - async getIapPublicKeysAsync() { - let res; - const url = OAuth2Client.GOOGLE_OAUTH2_IAP_PUBLIC_KEY_URL_; - try { - res = await this.transporter.request({ url }); - } - catch (e) { - if (e instanceof Error) { - e.message = `Failed to retrieve verification certificates: ${e.message}`; - } - throw e; - } - return { pubkeys: res.data, res }; + }) +} + + +/***/ }), + +/***/ 6038: +/***/ ((module) => { + +"use strict"; + + +/** + * @param typeMap [Object] Map of MIME type -> Array[extensions] + * @param ... + */ +function Mime() { + this._types = Object.create(null); + this._extensions = Object.create(null); + + for (let i = 0; i < arguments.length; i++) { + this.define(arguments[i]); + } + + this.define = this.define.bind(this); + this.getType = this.getType.bind(this); + this.getExtension = this.getExtension.bind(this); +} + +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * If a type declares an extension that has already been defined, an error will + * be thrown. To suppress this error and force the extension to be associated + * with the new type, pass `force`=true. Alternatively, you may prefix the + * extension with "*" to map the type to extension, without mapping the + * extension to the type. + * + * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * + * + * @param map (Object) type definitions + * @param force (Boolean) if true, force overriding of existing definitions + */ +Mime.prototype.define = function(typeMap, force) { + for (let type in typeMap) { + let extensions = typeMap[type].map(function(t) { + return t.toLowerCase(); + }); + type = type.toLowerCase(); + + for (let i = 0; i < extensions.length; i++) { + const ext = extensions[i]; + + // '*' prefix = not the preferred type for this extension. So fixup the + // extension, and skip it. + if (ext[0] === '*') { + continue; + } + + if (!force && (ext in this._types)) { + throw new Error( + 'Attempt to change mapping for "' + ext + + '" extension from "' + this._types[ext] + '" to "' + type + + '". Pass `force=true` to allow this, otherwise remove "' + ext + + '" from the list of extensions for "' + type + '".' + ); + } + + this._types[ext] = type; } - verifySignedJwtWithCerts() { - // To make the code compatible with browser SubtleCrypto we need to make - // this method async. - throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); + + // Use first extension as default + if (force || !this._extensions[type]) { + const ext = extensions[0]; + this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); } - /** - * Verify the id token is signed with the correct certificate - * and is from the correct audience. - * @param jwt The jwt to verify (The ID Token in this case). - * @param certs The array of certs to test the jwt against. - * @param requiredAudience The audience to test the jwt against. - * @param issuers The allowed issuers of the jwt (Optional). - * @param maxExpiry The max expiry the certificate can be (Optional). - * @return Returns a promise resolving to LoginTicket on verification. - */ - async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { - const crypto = (0, crypto_1.createCrypto)(); - if (!maxExpiry) { - maxExpiry = OAuth2Client.MAX_TOKEN_LIFETIME_SECS_; - } - const segments = jwt.split('.'); - if (segments.length !== 3) { - throw new Error('Wrong number of segments in token: ' + jwt); - } - const signed = segments[0] + '.' + segments[1]; - let signature = segments[2]; - let envelope; - let payload; - try { - envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); - } - catch (err) { - if (err instanceof Error) { - err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; - } - throw err; - } - if (!envelope) { - throw new Error("Can't parse token envelope: " + segments[0]); - } - try { - payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); - } - catch (err) { - if (err instanceof Error) { - err.message = `Can't parse token payload '${segments[0]}`; - } - throw err; - } - if (!payload) { - throw new Error("Can't parse token payload: " + segments[1]); - } - if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { - // If this is not present, then there's no reason to attempt verification - throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); - } - const cert = certs[envelope.kid]; - if (envelope.alg === 'ES256') { - signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); - } - const verified = await crypto.verify(cert, signed, signature); - if (!verified) { - throw new Error('Invalid token signature: ' + jwt); - } - if (!payload.iat) { - throw new Error('No issue time in token: ' + JSON.stringify(payload)); - } - if (!payload.exp) { - throw new Error('No expiration time in token: ' + JSON.stringify(payload)); - } - const iat = Number(payload.iat); - if (isNaN(iat)) - throw new Error('iat field using invalid format'); - const exp = Number(payload.exp); - if (isNaN(exp)) - throw new Error('exp field using invalid format'); - const now = new Date().getTime() / 1000; - if (exp >= now + maxExpiry) { - throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); - } - const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; - const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; - if (now < earliest) { - throw new Error('Token used too early, ' + - now + - ' < ' + - earliest + - ': ' + - JSON.stringify(payload)); - } - if (now > latest) { - throw new Error('Token used too late, ' + - now + - ' > ' + - latest + - ': ' + - JSON.stringify(payload)); - } - if (issuers && issuers.indexOf(payload.iss) < 0) { - throw new Error('Invalid issuer, expected one of [' + - issuers + - '], but got ' + - payload.iss); - } - // Check the audience matches if we have one - if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { - const aud = payload.aud; - let audVerified = false; - // If the requiredAudience is an array, check if it contains token - // audience - if (requiredAudience.constructor === Array) { - audVerified = requiredAudience.indexOf(aud) > -1; - } - else { - audVerified = aud === requiredAudience; - } - if (!audVerified) { - throw new Error('Wrong recipient, payload audience != requiredAudience'); - } - } - return new loginticket_1.LoginTicket(envelope, payload); + } +}; + +/** + * Lookup a mime type based on extension + */ +Mime.prototype.getType = function(path) { + path = String(path); + let last = path.replace(/^.*[/\\]/, '').toLowerCase(); + let ext = last.replace(/^.*\./, '').toLowerCase(); + + let hasPath = last.length < path.length; + let hasDot = ext.length < last.length - 1; + + return (hasDot || !hasPath) && this._types[ext] || null; +}; + +/** + * Return file extension associated with a mime type + */ +Mime.prototype.getExtension = function(type) { + type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; + return type && this._extensions[type.toLowerCase()] || null; +}; + +module.exports = Mime; + + +/***/ }), + +/***/ 9994: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +let Mime = __nccwpck_require__(6038); +module.exports = new Mime(__nccwpck_require__(3114), __nccwpck_require__(8809)); + + +/***/ }), + +/***/ 8809: +/***/ ((module) => { + +module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; + +/***/ }), + +/***/ 3114: +/***/ ((module) => { + +module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; + +/***/ }), + +/***/ 900: +/***/ ((module) => { + +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} + + +/***/ }), + +/***/ 467: +/***/ ((module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(__nccwpck_require__(2781)); +var http = _interopDefault(__nccwpck_require__(3685)); +var Url = _interopDefault(__nccwpck_require__(7310)); +var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); +var https = _interopDefault(__nccwpck_require__(5687)); +var zlib = _interopDefault(__nccwpck_require__(9796)); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = (__nccwpck_require__(2877).convert); +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close'); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports["default"] = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; + + +/***/ }), + +/***/ 1223: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(2940) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 7684: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const Queue = __nccwpck_require__(5185); + +const pLimit = concurrency => { + if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) { + throw new TypeError('Expected `concurrency` to be a number from 1 and up'); + } + + const queue = new Queue(); + let activeCount = 0; + + const next = () => { + activeCount--; + + if (queue.size > 0) { + queue.dequeue()(); + } + }; + + const run = async (fn, resolve, ...args) => { + activeCount++; + + const result = (async () => fn(...args))(); + + resolve(result); + + try { + await result; + } catch {} + + next(); + }; + + const enqueue = (fn, resolve, ...args) => { + queue.enqueue(run.bind(null, fn, resolve, ...args)); + + (async () => { + // This function needs to wait until the next microtask before comparing + // `activeCount` to `concurrency`, because `activeCount` is updated asynchronously + // when the run function is dequeued and called. The comparison in the if-statement + // needs to happen asynchronously as well to get an up-to-date value for `activeCount`. + await Promise.resolve(); + + if (activeCount < concurrency && queue.size > 0) { + queue.dequeue()(); + } + })(); + }; + + const generator = (fn, ...args) => new Promise(resolve => { + enqueue(fn, resolve, ...args); + }); + + Object.defineProperties(generator, { + activeCount: { + get: () => activeCount + }, + pendingCount: { + get: () => queue.size + }, + clearQueue: { + value: () => { + queue.clear(); + } + } + }); + + return generator; +}; + +module.exports = pLimit; + + +/***/ }), + +/***/ 7214: +/***/ ((module) => { + +"use strict"; + + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) } - /** - * Returns a promise that resolves with AccessTokenResponse type if - * refreshHandler is defined. - * If not, nothing is returned. - */ - async processAndValidateRefreshHandler() { - if (this.refreshHandler) { - const accessTokenResponse = await this.refreshHandler(); - if (!accessTokenResponse.access_token) { - throw new Error('No access token is returned by the refreshHandler callback.'); - } - return accessTokenResponse; - } - return; + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); } - /** - * Returns true if a token is expired or will expire within - * eagerRefreshThresholdMillismilliseconds. - * If there is no expiry time, assumes the token is not expired or expiring. - */ - isTokenExpiring() { - const expiryDate = this.credentials.expiry_date; - return expiryDate - ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis - : false; + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; } + } else { + return `of ${thing} ${String(expected)}`; + } } -exports.OAuth2Client = OAuth2Client; -OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; -/** - * The base URL for auth endpoints. - */ -OAuth2Client.GOOGLE_OAUTH2_AUTH_BASE_URL_ = 'https://accounts.google.com/o/oauth2/v2/auth'; -/** - * The base endpoint for token retrieval. - */ -OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_ = 'https://oauth2.googleapis.com/token'; -/** - * The base endpoint to revoke tokens. - */ -OAuth2Client.GOOGLE_OAUTH2_REVOKE_URL_ = 'https://oauth2.googleapis.com/revoke'; -/** - * Google Sign on certificates in PEM format. - */ -OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_PEM_CERTS_URL_ = 'https://www.googleapis.com/oauth2/v1/certs'; -/** - * Google Sign on certificates in JWK format. - */ -OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_JWK_CERTS_URL_ = 'https://www.googleapis.com/oauth2/v3/certs'; -/** - * Google Sign on certificates in JWK format. - */ -OAuth2Client.GOOGLE_OAUTH2_IAP_PUBLIC_KEY_URL_ = 'https://www.gstatic.com/iap/verify/public_key'; -/** - * Clock skew - five minutes in seconds - */ -OAuth2Client.CLOCK_SKEW_SECS_ = 300; -/** - * Max Token Lifetime is one day in seconds - */ -OAuth2Client.MAX_TOKEN_LIFETIME_SECS_ = 86400; -/** - * The allowed oauth token issuers. - */ -OAuth2Client.ISSUERS_ = [ - 'accounts.google.com', - 'https://accounts.google.com', -]; -//# sourceMappingURL=oauth2client.js.map + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.q = codes; + /***/ }), -/***/ 9510: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 1359: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2021 Google LLC +// Copyright Joyent, Inc. and other Node contributors. // -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: // -// http://www.apache.org/licenses/LICENSE-2.0 +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0; -const querystring = __nccwpck_require__(3477); -const crypto_1 = __nccwpck_require__(8043); -/** List of HTTP methods that accept request bodies. */ -const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; -/** - * Abstract class for handling client authentication in OAuth-based - * operations. - * When request-body client authentication is used, only application/json and - * application/x-www-form-urlencoded content types for HTTP methods that support - * request bodies are supported. - */ -class OAuthClientAuthHandler { - /** - * Instantiates an OAuth client authentication handler. - * @param clientAuthentication The client auth credentials. - */ - constructor(clientAuthentication) { - this.clientAuthentication = clientAuthentication; - this.crypto = (0, crypto_1.createCrypto)(); - } - /** - * Applies client authentication on the OAuth request's headers or POST - * body but does not process the request. - * @param opts The GaxiosOptions whose headers or data are to be modified - * depending on the client authentication mechanism to be used. - * @param bearerToken The optional bearer token to use for authentication. - * When this is used, no client authentication credentials are needed. - */ - applyClientAuthenticationOptions(opts, bearerToken) { - // Inject authenticated header. - this.injectAuthenticatedHeaders(opts, bearerToken); - // Inject authenticated request body. - if (!bearerToken) { - this.injectAuthenticatedRequestBody(opts); - } +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + + + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +const Readable = __nccwpck_require__(1433); +const Writable = __nccwpck_require__(6993); +__nccwpck_require__(4124)(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + const keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + const method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); } - /** - * Applies client authentication on the request's header if either - * basic authentication or bearer token authentication is selected. - * - * @param opts The GaxiosOptions whose headers or data are to be modified - * depending on the client authentication mechanism to be used. - * @param bearerToken The optional bearer token to use for authentication. - * When this is used, no client authentication credentials are needed. - */ - injectAuthenticatedHeaders(opts, bearerToken) { - var _a; - // Bearer token prioritized higher than basic Auth. - if (bearerToken) { - opts.headers = opts.headers || {}; - Object.assign(opts.headers, { - Authorization: `Bearer ${bearerToken}}`, - }); - } - else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { - opts.headers = opts.headers || {}; - const clientId = this.clientAuthentication.clientId; - const clientSecret = this.clientAuthentication.clientSecret || ''; - const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); - Object.assign(opts.headers, { - Authorization: `Basic ${base64EncodedCreds}`, - }); - } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; } - /** - * Applies client authentication on the request's body if request-body - * client authentication is selected. - * - * @param opts The GaxiosOptions whose headers or data are to be modified - * depending on the client authentication mechanism to be used. - */ - injectAuthenticatedRequestBody(opts) { - var _a; - if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { - const method = (opts.method || 'GET').toUpperCase(); - // Inject authenticated request body. - if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { - // Get content-type. - let contentType; - const headers = opts.headers || {}; - for (const key in headers) { - if (key.toLowerCase() === 'content-type' && headers[key]) { - contentType = headers[key].toLowerCase(); - break; - } - } - if (contentType === 'application/x-www-form-urlencoded') { - opts.data = opts.data || ''; - const data = querystring.parse(opts.data); - Object.assign(data, { - client_id: this.clientAuthentication.clientId, - client_secret: this.clientAuthentication.clientSecret || '', - }); - opts.data = querystring.stringify(data); - } - else if (contentType === 'application/json') { - opts.data = opts.data || {}; - Object.assign(opts.data, { - client_id: this.clientAuthentication.clientId, - client_secret: this.clientAuthentication.clientSecret || '', - }); - } - else { - throw new Error(`${contentType} content-types are not supported with ` + - `${this.clientAuthentication.confidentialClientType} ` + - 'client authentication'); - } - } - else { - throw new Error(`${method} HTTP method does not support ` + - `${this.clientAuthentication.confidentialClientType} ` + - 'client authentication'); - } - } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +/***/ }), + +/***/ 1542: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + + + +module.exports = PassThrough; +const Transform = __nccwpck_require__(4415); +__nccwpck_require__(4124)(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + +/***/ }), + +/***/ 1433: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +const EE = (__nccwpck_require__(2361).EventEmitter); +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(2387); +/**/ + +const Buffer = (__nccwpck_require__(4300).Buffer); +const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +const debugUtil = __nccwpck_require__(3837); +let debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; } -exports.OAuthClientAuthHandler = OAuthClientAuthHandler; -/** - * Converts an OAuth error response to a native JavaScript Error. - * @param resp The OAuth error response to convert to a native Error object. - * @param err The optional original error. If provided, the error properties - * will be copied to the new error. - * @return The converted native Error object. - */ -function getErrorFromOAuthErrorResponse(resp, err) { - // Error response. - const errorCode = resp.error; - const errorDescription = resp.error_description; - const errorUri = resp.error_uri; - let message = `Error code ${errorCode}`; - if (typeof errorDescription !== 'undefined') { - message += `: ${errorDescription}`; - } - if (typeof errorUri !== 'undefined') { - message += ` - ${errorUri}`; - } - const newError = new Error(message); - // Copy properties from original error to newly generated error. - if (err) { - const keys = Object.keys(err); - if (err.stack) { - // Copy error.stack if available. - keys.push('stack'); - } - keys.forEach(key => { - // Do not overwrite the message field. - if (key !== 'message') { - Object.defineProperty(newError, key, { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - value: err[key], - writable: false, - enumerable: true, - }); - } - }); - } - return newError; +/**/ + +const BufferList = __nccwpck_require__(6522); +const destroyImpl = __nccwpck_require__(7049); +const _require = __nccwpck_require__(9948), + getHighWaterMark = _require.getHighWaterMark; +const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +let StringDecoder; +let createReadableStreamAsyncIterator; +let from; +__nccwpck_require__(4124)(Readable, Stream); +const errorOrDestroy = destroyImpl.errorOrDestroy; +const kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; } -exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; -//# sourceMappingURL=oauth2common.js.map +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(1359); + options = options || {}; -/***/ }), + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; -/***/ 4782: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; -"use strict"; + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PluggableAuthClient = exports.ExecutableError = void 0; -const baseexternalclient_1 = __nccwpck_require__(7391); -const executable_response_1 = __nccwpck_require__(8749); -const pluggable_auth_handler_1 = __nccwpck_require__(8941); -/** - * Error thrown from the executable run by PluggableAuthClient. - */ -class ExecutableError extends Error { - constructor(message, code) { - super(`The executable failed with exit code: ${code} and error message: ${message}.`); - this.code = code; - Object.setPrototypeOf(this, new.target.prototype); - } + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } } -exports.ExecutableError = ExecutableError; -/** - * The default executable timeout when none is provided, in milliseconds. - */ -const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; -/** - * The minimum allowed executable timeout in milliseconds. - */ -const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; -/** - * The maximum allowed executable timeout in milliseconds. - */ -const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; -/** - * The environment variable to check to see if executable can be run. - * Value must be set to '1' for the executable to run. - */ -const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; -/** - * The maximum currently supported executable version. - */ -const MAXIMUM_EXECUTABLE_VERSION = 1; -/** - * PluggableAuthClient enables the exchange of workload identity pool external credentials for - * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These - * scripts/executables are completely independent of the Google Cloud Auth libraries. These - * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token - * to be exchanged for a Google access token. - * - *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable - * must be set to '1'. This is for security reasons. - * - *

Both OIDC and SAML are supported. The executable must adhere to a specific response format - * defined below. - * - *

The executable must print out the 3rd party token to STDOUT in JSON format. When an - * output_file is specified in the credential configuration, the executable must also handle writing the - * JSON response to this file. - * - *

- * OIDC response sample:
- * {
- *   "version": 1,
- *   "success": true,
- *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
- *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
- *   "expiration_time": 1620433341
- * }
- *
- * SAML2 response sample:
- * {
- *   "version": 1,
- *   "success": true,
- *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
- *   "saml_response": "...",
- *   "expiration_time": 1620433341
- * }
- *
- * Error response sample:
- * {
- *   "version": 1,
- *   "success": false,
- *   "code": "401",
- *   "message": "Error message."
- * }
- * 
- * - *

The "expiration_time" field in the JSON response is only required for successful - * responses when an output file was specified in the credential configuration - * - *

The auth libraries will populate certain environment variables that will be accessible by the - * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, - * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and - * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. - * - *

Please see this repositories README for a complete executable request/response specification. - */ -class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { - /** - * Instantiates a PluggableAuthClient instance using the provided JSON - * object loaded from an external account credentials file. - * An error is thrown if the credential is not a valid pluggable auth credential. - * @param options The external account options object typically loaded from - * the external account JSON credential file. - * @param additionalOptions Optional additional behavior customization - * options. These currently customize expiration threshold time and - * whether to retry on 401/403 API request errors. - */ - constructor(options, additionalOptions) { - super(options, additionalOptions); - if (!options.credential_source.executable) { - throw new Error('No valid Pluggable Auth "credential_source" provided.'); - } - this.command = options.credential_source.executable.command; - if (!this.command) { - throw new Error('No valid Pluggable Auth "credential_source" provided.'); - } - // Check if the provided timeout exists and if it is valid. - if (options.credential_source.executable.timeout_millis === undefined) { - this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; - } - else { - this.timeoutMillis = options.credential_source.executable.timeout_millis; - if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || - this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { - throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + - `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); - } - } - this.outputFile = options.credential_source.executable.output_file; - this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ - command: this.command, - timeoutMillis: this.timeoutMillis, - outputFile: this.outputFile, - }); +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(1359); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + const isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + if (this._readableState === undefined) { + return false; } - /** - * Triggered when an external subject token is needed to be exchanged for a - * GCP access token via GCP STS endpoint. - * This uses the `options.credential_source` object to figure out how - * to retrieve the token using the current environment. In this case, - * this calls a user provided executable which returns the subject token. - * The logic is summarized as: - * 1. Validated that the executable is allowed to run. The - * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to - * 1 for security reasons. - * 2. If an output file is specified by the user, check the file location - * for a response. If the file exists and contains a valid response, - * return the subject token from the file. - * 3. Call the provided executable and return response. - * @return A promise that resolves with the external subject token. - */ - async retrieveSubjectToken() { - // Check if the executable is allowed to run. - if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { - throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + - 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + - 'Variable to 1.'); - } - let executableResponse = undefined; - // Try to get cached executable response from output file. - if (this.outputFile) { - executableResponse = await this.handler.retrieveCachedResponse(); - } - // If no response from output file, call the executable. - if (!executableResponse) { - // Set up environment map with required values for the executable. - const envMap = new Map(); - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); - // Always set to 0 because interactive mode is not supported. - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); - if (this.outputFile) { - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); - } - const serviceAccountEmail = this.getServiceAccountEmail(); - if (serviceAccountEmail) { - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); - } - executableResponse = await this.handler.retrieveResponseFromExecutable(envMap); - } - if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { - throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); - } - // Check that response was successful. - if (!executableResponse.success) { - throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); - } - // Check that response contains expiration time if output file was specified. - if (this.outputFile) { - if (!executableResponse.expirationTime) { - throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); - } - } - // Check that response is not expired. - if (executableResponse.isExpired()) { - throw new Error('Executable response is expired.'); + return this._readableState.destroyed; + }, + set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); } - // Return subject token from response. - return executableResponse.subjectToken; + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); + const decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + let p = this._readableState.buffer.head; + let content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +const MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; } -exports.PluggableAuthClient = PluggableAuthClient; -//# sourceMappingURL=pluggable-auth-client.js.map -/***/ }), +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} -/***/ 8941: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; -"use strict"; + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PluggableAuthHandler = void 0; -const pluggable_auth_client_1 = __nccwpck_require__(4782); -const executable_response_1 = __nccwpck_require__(8749); -const childProcess = __nccwpck_require__(2081); -const fs = __nccwpck_require__(7147); -/** - * A handler used to retrieve 3rd party token responses from user defined - * executables and cached file output for the PluggableAuthClient class. - */ -class PluggableAuthHandler { - /** - * Instantiates a PluggableAuthHandler instance using the provided - * PluggableAuthHandlerOptions object. - */ - constructor(options) { - if (!options.command) { - throw new Error('No command provided.'); - } - this.commandComponents = PluggableAuthHandler.parseCommand(options.command); - this.timeoutMillis = options.timeoutMillis; - if (!this.timeoutMillis) { - throw new Error('No timeoutMillis provided.'); - } - this.outputFile = options.outputFile; - } - /** - * Calls user provided executable to get a 3rd party subject token and - * returns the response. - * @param envMap a Map of additional Environment Variables required for - * the executable. - * @return A promise that resolves with the executable response. - */ - retrieveResponseFromExecutable(envMap) { - return new Promise((resolve, reject) => { - // Spawn process to run executable using added environment variables. - const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { - env: { ...process.env, ...Object.fromEntries(envMap) }, - }); - let output = ''; - // Append stdout to output as executable runs. - child.stdout.on('data', (data) => { - output += data; - }); - // Append stderr as executable runs. - child.stderr.on('data', (err) => { - output += err; - }); - // Set up a timeout to end the child process and throw an error. - const timeout = setTimeout(() => { - // Kill child process and remove listeners so 'close' event doesn't get - // read after child process is killed. - child.removeAllListeners(); - child.kill(); - return reject(new Error('The executable failed to finish within the timeout specified.')); - }, this.timeoutMillis); - child.on('close', (code) => { - // Cancel timeout if executable closes before timeout is reached. - clearTimeout(timeout); - if (code === 0) { - // If the executable completed successfully, try to return the parsed response. - try { - const responseJson = JSON.parse(output); - const response = new executable_response_1.ExecutableResponse(responseJson); - return resolve(response); - } - catch (error) { - if (error instanceof executable_response_1.ExecutableResponseError) { - return reject(error); - } - return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); - } - } - else { - return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); - } - }); - }); - } - /** - * Checks user provided output file for response from previous run of - * executable and return the response if it exists, is formatted correctly, and is not expired. - */ - async retrieveCachedResponse() { - if (!this.outputFile || this.outputFile.length === 0) { - return undefined; - } - let filePath; - try { - filePath = await fs.promises.realpath(this.outputFile); - } - catch (_a) { - // If file path cannot be resolved, return undefined. - return undefined; - } - if (!(await fs.promises.lstat(filePath)).isFile()) { - // If path does not lead to file, return undefined. - return undefined; - } - const responseString = await fs.promises.readFile(filePath, { - encoding: 'utf8', - }); - if (responseString === '') { - return undefined; - } - try { - const responseJson = JSON.parse(responseString); - const response = new executable_response_1.ExecutableResponse(responseJson); - // Check if response is successful and unexpired. - if (response.isValid()) { - return new executable_response_1.ExecutableResponse(responseJson); - } - return undefined; - } - catch (error) { - if (error instanceof executable_response_1.ExecutableResponseError) { - throw error; - } - throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); - } + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; } - /** - * Parses given command string into component array, splitting on spaces unless - * spaces are between quotation marks. - */ - static parseCommand(command) { - // Split the command into components by splitting on spaces, - // unless spaces are contained in quotation marks. - const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); - if (!components) { - throw new Error(`Provided command: "${command}" could not be parsed.`); - } - // Remove quotation marks from the beginning and end of each component if they are present. - for (let i = 0; i < components.length; i++) { - if (components[i][0] === '"' && components[i].slice(-1) === '"') { - components[i] = components[i].slice(1, -1); - } - } - return components; + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); } + } } -exports.PluggableAuthHandler = PluggableAuthHandler; -//# sourceMappingURL=pluggable-auth-handler.js.map -/***/ }), +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } -/***/ 8790: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} -"use strict"; +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + const len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} -// Copyright 2015 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.UserRefreshClient = void 0; -const oauth2client_1 = __nccwpck_require__(3936); -class UserRefreshClient extends oauth2client_1.OAuth2Client { - constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { - const opts = optionsOrClientId && typeof optionsOrClientId === 'object' - ? optionsOrClientId - : { - clientId: optionsOrClientId, - clientSecret, - refreshToken, - eagerRefreshThresholdMillis, - forceRefreshOnFailure, - }; - super({ - clientId: opts.clientId, - clientSecret: opts.clientSecret, - eagerRefreshThresholdMillis: opts.eagerRefreshThresholdMillis, - forceRefreshOnFailure: opts.forceRefreshOnFailure, - }); - this._refreshToken = opts.refreshToken; - this.credentials.refresh_token = opts.refreshToken; - } - /** - * Refreshes the access token. - * @param refreshToken An ignored refreshToken.. - * @param callback Optional callback. - */ - async refreshTokenNoCache( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - refreshToken) { - return super.refreshTokenNoCache(this._refreshToken); - } - /** - * Create a UserRefreshClient credentials instance using the given input - * options. - * @param json The input object. - */ - fromJSON(json) { - if (!json) { - throw new Error('Must pass in a JSON object containing the user refresh token'); - } - if (json.type !== 'authorized_user') { - throw new Error('The incoming JSON object does not have the "authorized_user" type'); - } - if (!json.client_id) { - throw new Error('The incoming JSON object does not contain a client_id field'); - } - if (!json.client_secret) { - throw new Error('The incoming JSON object does not contain a client_secret field'); - } - if (!json.refresh_token) { - throw new Error('The incoming JSON object does not contain a refresh_token field'); - } - this._clientId = json.client_id; - this._clientSecret = json.client_secret; - this._refreshToken = json.refresh_token; - this.credentials.refresh_token = json.refresh_token; - this.quotaProjectId = json.quota_project_id; - } - fromStream(inputStream, callback) { - if (callback) { - this.fromStreamAsync(inputStream).then(() => callback(), callback); - } - else { - return this.fromStreamAsync(inputStream); - } +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } } - async fromStreamAsync(inputStream) { - return new Promise((resolve, reject) => { - if (!inputStream) { - return reject(new Error('Must pass in a stream containing the user refresh token.')); - } - let s = ''; - inputStream - .setEncoding('utf8') - .on('error', reject) - .on('data', chunk => (s += chunk)) - .on('end', () => { - try { - const data = JSON.parse(s); - this.fromJSON(data); - return resolve(); - } - catch (err) { - return reject(err); - } - }); - }); + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); } -} -exports.UserRefreshClient = UserRefreshClient; -//# sourceMappingURL=refreshclient.js.map + } -/***/ }), + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } -/***/ 6308: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); -"use strict"; + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.StsCredentials = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const querystring = __nccwpck_require__(3477); -const transporters_1 = __nccwpck_require__(2649); -const oauth2common_1 = __nccwpck_require__(9510); -/** - * Implements the OAuth 2.0 token exchange based on - * https://tools.ietf.org/html/rfc8693 - */ -class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { - /** - * Initializes an STS credentials instance. - * @param tokenExchangeEndpoint The token exchange endpoint. - * @param clientAuthentication The client authentication credentials if - * available. - */ - constructor(tokenExchangeEndpoint, clientAuthentication) { - super(clientAuthentication); - this.tokenExchangeEndpoint = tokenExchangeEndpoint; - this.transporter = new transporters_1.DefaultTransporter(); - } - /** - * Exchanges the provided token for another type of token based on the - * rfc8693 spec. - * @param stsCredentialsOptions The token exchange options used to populate - * the token exchange request. - * @param additionalHeaders Optional additional headers to pass along the - * request. - * @param options Optional additional GCP-specific non-spec defined options - * to send with the request. - * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` - * @return A promise that resolves with the token exchange response containing - * the requested token and its expiration time. - */ - async exchangeToken(stsCredentialsOptions, additionalHeaders, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - options) { - var _a, _b, _c; - const values = { - grant_type: stsCredentialsOptions.grantType, - resource: stsCredentialsOptions.resource, - audience: stsCredentialsOptions.audience, - scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), - requested_token_type: stsCredentialsOptions.requestedTokenType, - subject_token: stsCredentialsOptions.subjectToken, - subject_token_type: stsCredentialsOptions.subjectTokenType, - actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, - actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, - // Non-standard GCP-specific options. - options: options && JSON.stringify(options), - }; - // Remove undefined fields. - Object.keys(values).forEach(key => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - if (typeof values[key] === 'undefined') { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - delete values[key]; - } - }); - const headers = { - 'Content-Type': 'application/x-www-form-urlencoded', - }; - // Inject additional STS headers if available. - Object.assign(headers, additionalHeaders || {}); - const opts = { - url: this.tokenExchangeEndpoint, - method: 'POST', - headers, - data: querystring.stringify(values), - responseType: 'json', - }; - // Apply OAuth client authentication. - this.applyClientAuthenticationOptions(opts); - try { - const response = await this.transporter.request(opts); - // Successful response. - const stsSuccessfulResponse = response.data; - stsSuccessfulResponse.res = response; - return stsSuccessfulResponse; - } - catch (error) { - // Translate error to OAuthError. - if (error instanceof gaxios_1.GaxiosError && error.response) { - throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, - // Preserve other fields from the original error. - error); - } - // Request could fail before the server responds. - throw error; - } + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); } + }; } -exports.StsCredentials = StsCredentials; -//# sourceMappingURL=stscredentials.js.map +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; -/***/ }), + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; -/***/ 4693: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; -"use strict"; + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -/* global window */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BrowserCrypto = void 0; -// This file implements crypto functions we need using in-browser -// SubtleCrypto interface `window.crypto.subtle`. -const base64js = __nccwpck_require__(6463); -// Not all browsers support `TextEncoder`. The following `require` will -// provide a fast UTF8-only replacement for those browsers that don't support -// text encoding natively. -// eslint-disable-next-line node/no-unsupported-features/node-builtins -if (typeof process === 'undefined' && typeof TextEncoder === 'undefined') { - __nccwpck_require__(1917); -} -const crypto_1 = __nccwpck_require__(8043); -class BrowserCrypto { - constructor() { - if (typeof window === 'undefined' || - window.crypto === undefined || - window.crypto.subtle === undefined) { - throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); - } - } - async sha256DigestBase64(str) { - // SubtleCrypto digest() method is async, so we must make - // this method async as well. - // To calculate SHA256 digest using SubtleCrypto, we first - // need to convert an input string to an ArrayBuffer: - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const inputBuffer = new TextEncoder().encode(str); - // Result is ArrayBuffer as well. - const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); - return base64js.fromByteArray(new Uint8Array(outputBuffer)); - } - randomBytesBase64(count) { - const array = new Uint8Array(count); - window.crypto.getRandomValues(array); - return base64js.fromByteArray(array); + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + const res = Stream.prototype.on.call(this, ev, fn); + const state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } } - static padBase64(base64) { - // base64js requires padding, so let's add some '=' - while (base64.length % 4 !== 0) { - base64 += '='; - } - return base64; + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + const res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + const res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + const state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + const state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var state = this._readableState; + var paused = false; + stream.on('end', () => { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) this.push(chunk); } - async verify(pubkey, data, signature) { - const algo = { - name: 'RSASSA-PKCS1-v1_5', - hash: { name: 'SHA-256' }, - }; - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const dataArray = new TextEncoder().encode(data); - const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); - const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); - // SubtleCrypto's verify method is async so we must make - // this method async as well. - const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); - return result; + this.push(null); + }); + stream.on('data', chunk => { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); } - async sign(privateKey, data) { - const algo = { - name: 'RSASSA-PKCS1-v1_5', - hash: { name: 'SHA-256' }, + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); }; - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const dataArray = new TextEncoder().encode(data); - const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); - // SubtleCrypto's sign method is async so we must make - // this method async as well. - const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); - return base64js.fromByteArray(new Uint8Array(result)); - } - decodeBase64StringUtf8(base64) { - const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const result = new TextDecoder().decode(uint8array); - return result; - } - encodeBase64StringUtf8(text) { - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const uint8array = new TextEncoder().encode(text); - const result = base64js.fromByteArray(uint8array); - return result; + }(i); } - /** - * Computes the SHA-256 hash of the provided string. - * @param str The plain text string to hash. - * @return A promise that resolves with the SHA-256 hash of the provided - * string in hexadecimal encoding. - */ - async sha256DigestHex(str) { - // SubtleCrypto digest() method is async, so we must make - // this method async as well. - // To calculate SHA256 digest using SubtleCrypto, we first - // need to convert an input string to an ArrayBuffer: - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const inputBuffer = new TextEncoder().encode(str); - // Result is ArrayBuffer as well. - const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); - return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = n => { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); } - /** - * Computes the HMAC hash of a message using the provided crypto key and the - * SHA-256 algorithm. - * @param key The secret crypto key in utf-8 or ArrayBuffer format. - * @param msg The plain text message. - * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer - * format. - */ - async signWithHmacSha256(key, msg) { - // Convert key, if provided in ArrayBuffer format, to string. - const rawKey = typeof key === 'string' - ? key - : String.fromCharCode(...new Uint16Array(key)); - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const enc = new TextEncoder(); - const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { - name: 'HMAC', - hash: { - name: 'SHA-256', - }, - }, false, ['sign']); - return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = __nccwpck_require__(3306); } + return createReadableStreamAsyncIterator(this); + }; } -exports.BrowserCrypto = BrowserCrypto; -//# sourceMappingURL=crypto.js.map - -/***/ }), +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); -/***/ 8043: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + return this._readableState.length; + } +}); -"use strict"; +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -/* global window */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0; -const crypto_1 = __nccwpck_require__(4693); -const crypto_2 = __nccwpck_require__(757); -function createCrypto() { - if (hasBrowserCrypto()) { - return new crypto_1.BrowserCrypto(); + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + const wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } } - return new crypto_2.NodeCrypto(); + } } -exports.createCrypto = createCrypto; -function hasBrowserCrypto() { - return (typeof window !== 'undefined' && - typeof window.crypto !== 'undefined' && - typeof window.crypto.subtle !== 'undefined'); +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = __nccwpck_require__(9082); + } + return from(Readable, iterable, opts); + }; } -exports.hasBrowserCrypto = hasBrowserCrypto; -/** - * Converts an ArrayBuffer to a hexadecimal string. - * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. - * @return The hexadecimal encoding of the ArrayBuffer. - */ -function fromArrayBufferToHex(arrayBuffer) { - // Convert buffer to byte array. - const byteArray = Array.from(new Uint8Array(arrayBuffer)); - // Convert bytes to hex string. - return byteArray - .map(byte => { - return byte.toString(16).padStart(2, '0'); - }) - .join(''); +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; } -exports.fromArrayBufferToHex = fromArrayBufferToHex; -//# sourceMappingURL=crypto.js.map /***/ }), -/***/ 757: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 4415: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2019 Google LLC +// Copyright Joyent, Inc. and other Node contributors. // -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: // -// http://www.apache.org/licenses/LICENSE-2.0 +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NodeCrypto = void 0; -const crypto = __nccwpck_require__(6113); -class NodeCrypto { - async sha256DigestBase64(str) { - return crypto.createHash('sha256').update(str).digest('base64'); - } - randomBytesBase64(count) { - return crypto.randomBytes(count).toString('base64'); - } - async verify(pubkey, data, signature) { - const verifier = crypto.createVerify('sha256'); - verifier.update(data); - verifier.end(); - return verifier.verify(pubkey, signature, 'base64'); - } - async sign(privateKey, data) { - const signer = crypto.createSign('RSA-SHA256'); - signer.update(data); - signer.end(); - return signer.sign(privateKey, 'base64'); - } - decodeBase64StringUtf8(base64) { - return Buffer.from(base64, 'base64').toString('utf-8'); - } - encodeBase64StringUtf8(text) { - return Buffer.from(text, 'utf-8').toString('base64'); - } - /** - * Computes the SHA-256 hash of the provided string. - * @param str The plain text string to hash. - * @return A promise that resolves with the SHA-256 hash of the provided - * string in hexadecimal encoding. - */ - async sha256DigestHex(str) { - return crypto.createHash('sha256').update(str).digest('hex'); - } - /** - * Computes the HMAC hash of a message using the provided crypto key and the - * SHA-256 algorithm. - * @param key The secret crypto key in utf-8 or ArrayBuffer format. - * @param msg The plain text message. - * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer - * format. - */ - async signWithHmacSha256(key, msg) { - const cryptoKey = typeof key === 'string' ? key : toBuffer(key); - return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); - } -} -exports.NodeCrypto = NodeCrypto; -/** - * Converts a Node.js Buffer to an ArrayBuffer. - * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer - * @param buffer The Buffer input to covert. - * @return The ArrayBuffer representation of the input. - */ -function toArrayBuffer(buffer) { - return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); -} -/** - * Converts an ArrayBuffer to a Node.js Buffer. - * @param arrayBuffer The ArrayBuffer input to covert. - * @return The Buffer representation of the input. - */ -function toBuffer(arrayBuffer) { - return Buffer.from(arrayBuffer); -} -//# sourceMappingURL=crypto.js.map - -/***/ }), - -/***/ 810: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.AuthClient = void 0; -// Copyright 2017 Google LLC +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) // -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. // -// http://www.apache.org/licenses/LICENSE-2.0 +// Here's how this works: // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -const googleauth_1 = __nccwpck_require__(695); -Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); -var authclient_1 = __nccwpck_require__(4627); -Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); -var computeclient_1 = __nccwpck_require__(6875); -Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); -var envDetect_1 = __nccwpck_require__(1380); -Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); -var iam_1 = __nccwpck_require__(9735); -Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); -var idtokenclient_1 = __nccwpck_require__(298); -Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); -var jwtaccess_1 = __nccwpck_require__(8740); -Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); -var jwtclient_1 = __nccwpck_require__(3959); -Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); -var impersonated_1 = __nccwpck_require__(1103); -Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); -var oauth2client_1 = __nccwpck_require__(3936); -Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); -Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); -var loginticket_1 = __nccwpck_require__(4524); -Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); -var refreshclient_1 = __nccwpck_require__(8790); -Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); -var awsclient_1 = __nccwpck_require__(1569); -Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); -var identitypoolclient_1 = __nccwpck_require__(117); -Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); -var externalclient_1 = __nccwpck_require__(4381); -Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); -var baseexternalclient_1 = __nccwpck_require__(7391); -Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); -var downscopedclient_1 = __nccwpck_require__(6270); -Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); -var pluggable_auth_client_1 = __nccwpck_require__(4782); -Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); -var transporters_1 = __nccwpck_require__(2649); -Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); -const auth = new googleauth_1.GoogleAuth(); -exports.auth = auth; -//# sourceMappingURL=index.js.map +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. -/***/ }), -/***/ 6608: -/***/ ((__unused_webpack_module, exports) => { -"use strict"; +module.exports = Transform; +const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +const Duplex = __nccwpck_require__(1359); +__nccwpck_require__(4124)(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; -// Copyright 2017 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush((er, data) => { + done(this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. // -// http://www.apache.org/licenses/LICENSE-2.0 +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validate = void 0; -// Accepts an options object passed from the user to the API. In the -// previous version of the API, it referred to a `Request` options object. -// Now it refers to an Axiox Request Config object. This is here to help -// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function validate(options) { - const vpairs = [ - { invalid: 'uri', expected: 'url' }, - { invalid: 'json', expected: 'data' }, - { invalid: 'qs', expected: 'params' }, - ]; - for (const pair of vpairs) { - if (options[pair.invalid]) { - const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; - throw new Error(e); - } - } +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, err2 => { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); } -exports.validate = validate; -//# sourceMappingURL=options.js.map /***/ }), -/***/ 2649: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 6993: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2019 Google LLC +// Copyright Joyent, Inc. and other Node contributors. // -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: // -// http://www.apache.org/licenses/LICENSE-2.0 +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultTransporter = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const options_1 = __nccwpck_require__(6608); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const pkg = __nccwpck_require__(1402); -const PRODUCT_NAME = 'google-api-nodejs-client'; -class DefaultTransporter { - /** - * Configures request options before making a request. - * @param opts GaxiosOptions options. - * @return Configured options. - */ - configure(opts = {}) { - opts.headers = opts.headers || {}; - if (typeof window === 'undefined') { - // set transporter user agent if not in browser - const uaValue = opts.headers['User-Agent']; - if (!uaValue) { - opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; - } - else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { - opts.headers['User-Agent'] = `${uaValue} ${DefaultTransporter.USER_AGENT}`; - } - // track google-auth-library-nodejs version: - const authVersion = `auth/${pkg.version}`; - if (opts.headers['x-goog-api-client'] && - !opts.headers['x-goog-api-client'].includes(authVersion)) { - opts.headers['x-goog-api-client'] = `${opts.headers['x-goog-api-client']} ${authVersion}`; - } - else if (!opts.headers['x-goog-api-client']) { - const nodeVersion = process.version.replace(/^v/, ''); - opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion} ${authVersion}`; - } - } - return opts; - } - request(opts, callback) { - // ensure the user isn't passing in request-style options - opts = this.configure(opts); - try { - (0, options_1.validate)(opts); - } - catch (e) { - if (callback) { - return callback(e); - } - else { - throw e; - } - } - if (callback) { - (0, gaxios_1.request)(opts).then(r => { - callback(null, r); - }, e => { - callback(this.processError(e)); - }); - } - else { - return (0, gaxios_1.request)(opts).catch(e => { - throw this.processError(e); - }); - } - } - /** - * Changes the error to include details from the body. - */ - processError(e) { - const res = e.response; - const err = e; - const body = res ? res.data : null; - if (res && body && body.error && res.status !== 200) { - if (typeof body.error === 'string') { - err.message = body.error; - err.code = res.status.toString(); - } - else if (Array.isArray(body.error.errors)) { - err.message = body.error.errors - .map((err2) => err2.message) - .join('\n'); - err.code = body.error.code; - err.errors = body.error.errors; - } - else { - err.message = body.error.message; - err.code = body.error.code || res.status; - } - } - else if (res && res.status >= 400) { - // Consider all 4xx and 5xx responses errors. - err.message = body; - err.code = res.status.toString(); - } - return err; - } -} -exports.DefaultTransporter = DefaultTransporter; -/** - * Default user agent. - */ -DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; -//# sourceMappingURL=transporters.js.map +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -/***/ }), +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. -/***/ 2098: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; -/** - * Copyright 2018 Google LLC - * - * Distributed under MIT license. - * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getPem = void 0; -const fs = __nccwpck_require__(7147); -const forge = __nccwpck_require__(7655); -const util_1 = __nccwpck_require__(3837); -const readFile = (0, util_1.promisify)(fs.readFile); -function getPem(filename, callback) { - if (callback) { - getPemAsync(filename) - .then(pem => callback(null, pem)) - .catch(err => callback(err, null)); - } - else { - return getPemAsync(filename); - } +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; } -exports.getPem = getPem; -function getPemAsync(filename) { - return readFile(filename, { encoding: 'base64' }).then(keyp12 => { - return convertToPem(keyp12); - }); + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + this.next = null; + this.entry = null; + this.finish = () => { + onCorkedFinish(this, state); + }; } -/** - * Converts a P12 in base64 encoding to a pem. - * @param p12base64 String containing base64 encoded p12. - * @returns a string containing the pem. - */ -function convertToPem(p12base64) { - const p12Der = forge.util.decode64(p12base64); - const p12Asn1 = forge.asn1.fromDer(p12Der); - const p12 = forge.pkcs12.pkcs12FromAsn1(p12Asn1, 'notasecret'); - const bags = p12.getBags({ friendlyName: 'privatekey' }); - if (bags.friendlyName) { - const privateKey = bags.friendlyName[0].key; - const pem = forge.pki.privateKeyToPem(privateKey); - return pem.replace(/\r\n/g, '\n'); - } - else { - throw new Error('Unable to get friendly name.'); - } +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +const internalUtil = { + deprecate: __nccwpck_require__(7127) +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(2387); +/**/ + +const Buffer = (__nccwpck_require__(4300).Buffer); +const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); } -//# sourceMappingURL=index.js.map +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +const destroyImpl = __nccwpck_require__(7049); +const _require = __nccwpck_require__(9948), + getHighWaterMark = _require.getHighWaterMark; +const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +const errorOrDestroy = destroyImpl.errorOrDestroy; +__nccwpck_require__(4124)(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(1359); + options = options || {}; -/***/ }), + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; -/***/ 6031: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; -"use strict"; + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); -/** - * Copyright 2018 Google LLC - * - * Distributed under MIT license. - * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GoogleToken = void 0; -const fs = __nccwpck_require__(7147); -const gaxios_1 = __nccwpck_require__(9555); -const jws = __nccwpck_require__(4636); -const path = __nccwpck_require__(1017); -const util_1 = __nccwpck_require__(3837); -const readFile = fs.readFile - ? (0, util_1.promisify)(fs.readFile) - : async () => { - // if running in the web-browser, fs.readFile may not have been shimmed. - throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); - }; -const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; -const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; -class ErrorWithCode extends Error { - constructor(message, code) { - super(message); - this.code = code; - } + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); } -let getPem; -class GoogleToken { - /** - * Create a GoogleToken. - * - * @param options Configuration object. - */ - constructor(options) { - this.transporter = { - request: opts => (0, gaxios_1.request)(opts), - }; - this.configure(options); - } - get accessToken() { - return this.rawToken ? this.rawToken.access_token : undefined; - } - get idToken() { - return this.rawToken ? this.rawToken.id_token : undefined; - } - get tokenType() { - return this.rawToken ? this.rawToken.token_type : undefined; - } - get refreshToken() { - return this.rawToken ? this.rawToken.refresh_token : undefined; - } - /** - * Returns whether the token has expired. - * - * @return true if the token has expired, false otherwise. - */ - hasExpired() { - const now = new Date().getTime(); - if (this.rawToken && this.expiresAt) { - return now >= this.expiresAt; - } - else { - return true; - } - } - /** - * Returns whether the token will expire within eagerRefreshThresholdMillis - * - * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. - */ - isTokenExpiring() { - var _a; - const now = new Date().getTime(); - const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; - if (this.rawToken && this.expiresAt) { - return this.expiresAt <= now + eagerRefreshThresholdMillis; - } - else { - return true; - } - } - getToken(callback, opts = {}) { - if (typeof callback === 'object') { - opts = callback; - callback = undefined; - } - opts = Object.assign({ - forceRefresh: false, - }, opts); - if (callback) { - const cb = callback; - this.getTokenAsync(opts).then(t => cb(null, t), callback); - return; - } - return this.getTokenAsync(opts); - } - /** - * Given a keyFile, extract the key and client email if available - * @param keyFile Path to a json, pem, or p12 file that contains the key. - * @returns an object with privateKey and clientEmail properties - */ - async getCredentials(keyFile) { - const ext = path.extname(keyFile); - switch (ext) { - case '.json': { - const key = await readFile(keyFile, 'utf8'); - const body = JSON.parse(key); - const privateKey = body.private_key; - const clientEmail = body.client_email; - if (!privateKey || !clientEmail) { - throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); - } - return { privateKey, clientEmail }; - } - case '.der': - case '.crt': - case '.pem': { - const privateKey = await readFile(keyFile, 'utf8'); - return { privateKey }; - } - case '.p12': - case '.pfx': { - // NOTE: The loading of `google-p12-pem` is deferred for performance - // reasons. The `node-forge` npm module in `google-p12-pem` adds a fair - // bit time to overall module loading, and is likely not frequently - // used. In a future release, p12 support will be entirely removed. - if (!getPem) { - getPem = (await Promise.resolve().then(() => __nccwpck_require__(2098))).getPem; - } - const privateKey = await getPem(keyFile); - return { privateKey }; - } - default: - throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + - 'Current supported extensions are *.json, *.pem, and *.p12.', 'UNKNOWN_CERTIFICATE_TYPE'); - } - } - async getTokenAsync(opts) { - if (this.inFlightRequest && !opts.forceRefresh) { - return this.inFlightRequest; - } - try { - return await (this.inFlightRequest = this.getTokenAsyncInner(opts)); - } - finally { - this.inFlightRequest = undefined; - } - } - async getTokenAsyncInner(opts) { - if (this.isTokenExpiring() === false && opts.forceRefresh === false) { - return Promise.resolve(this.rawToken); - } - if (!this.key && !this.keyFile) { - throw new Error('No key or keyFile set.'); - } - if (!this.key && this.keyFile) { - const creds = await this.getCredentials(this.keyFile); - this.key = creds.privateKey; - this.iss = creds.clientEmail || this.iss; - if (!creds.clientEmail) { - this.ensureEmail(); - } - } - return this.requestToken(); - } - ensureEmail() { - if (!this.iss) { - throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); - } - } - revokeToken(callback) { - if (callback) { - this.revokeTokenAsync().then(() => callback(), callback); - return; - } - return this.revokeTokenAsync(); - } - async revokeTokenAsync() { - if (!this.accessToken) { - throw new Error('No token to revoke.'); - } - const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; - await this.transporter.request({ url }); - this.configure({ - email: this.iss, - sub: this.sub, - key: this.key, - keyFile: this.keyFile, - scope: this.scope, - additionalClaims: this.additionalClaims, - }); - } - /** - * Configure the GoogleToken for re-use. - * @param {object} options Configuration object. - */ - configure(options = {}) { - this.keyFile = options.keyFile; - this.key = options.key; - this.rawToken = undefined; - this.iss = options.email || options.iss; - this.sub = options.sub; - this.additionalClaims = options.additionalClaims; - if (typeof options.scope === 'object') { - this.scope = options.scope.join(' '); - } - else { - this.scope = options.scope; - } - this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; - if (options.transporter) { - this.transporter = options.transporter; - } - } - /** - * Request the token from Google. - */ - async requestToken() { - var _a, _b; - const iat = Math.floor(new Date().getTime() / 1000); - const additionalClaims = this.additionalClaims || {}; - const payload = Object.assign({ - iss: this.iss, - scope: this.scope, - aud: GOOGLE_TOKEN_URL, - exp: iat + 3600, - iat, - sub: this.sub, - }, additionalClaims); - const signedJWT = jws.sign({ - header: { alg: 'RS256' }, - payload, - secret: this.key, - }); - try { - const r = await this.transporter.request({ - method: 'POST', - url: GOOGLE_TOKEN_URL, - data: { - grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', - assertion: signedJWT, - }, - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - responseType: 'json', - }); - this.rawToken = r.data; - this.expiresAt = - r.data.expires_in === null || r.data.expires_in === undefined - ? undefined - : (iat + r.data.expires_in) * 1000; - return this.rawToken; - } - catch (e) { - this.rawToken = undefined; - this.tokenExpires = undefined; - const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) - ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data - : {}; - if (body.error) { - const desc = body.error_description - ? `: ${body.error_description}` - : ''; - e.message = `${body.error}${desc}`; - } - throw e; - } +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; } -exports.GoogleToken = GoogleToken; -//# sourceMappingURL=index.js.map +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(1359); -/***/ }), + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. -/***/ 1621: -/***/ ((module) => { + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. -"use strict"; + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + const isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} -module.exports = (flag, argv = process.argv) => { - const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); - const position = argv.indexOf(prefix + flag); - const terminatorPosition = argv.indexOf('--'); - return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); }; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} - -/***/ }), - -/***/ 7492: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; }; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; +Writable.prototype.cork = function () { + this._writableState.corked++; }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const net_1 = __importDefault(__nccwpck_require__(1808)); -const tls_1 = __importDefault(__nccwpck_require__(4404)); -const url_1 = __importDefault(__nccwpck_require__(7310)); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const once_1 = __importDefault(__nccwpck_require__(1040)); -const agent_base_1 = __nccwpck_require__(9690); -const debug = (0, debug_1.default)('http-proxy-agent'); -function isHTTPS(protocol) { - return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; -} -/** - * The `HttpProxyAgent` implements an HTTP Agent subclass that connects - * to the specified "HTTP proxy server" in order to proxy HTTP requests. - * - * @api public - */ -class HttpProxyAgent extends agent_base_1.Agent { - constructor(_opts) { - let opts; - if (typeof _opts === 'string') { - opts = url_1.default.parse(_opts); - } - else { - opts = _opts; - } - if (!opts) { - throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); - } - debug('Creating new HttpProxyAgent instance: %o', opts); - super(opts); - const proxy = Object.assign({}, opts); - // If `true`, then connect to the proxy server over TLS. - // Defaults to `false`. - this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); - // Prefer `hostname` over `host`, and set the `port` if needed. - proxy.host = proxy.hostname || proxy.host; - if (typeof proxy.port === 'string') { - proxy.port = parseInt(proxy.port, 10); - } - if (!proxy.port && proxy.host) { - proxy.port = this.secureProxy ? 443 : 80; - } - if (proxy.host && proxy.path) { - // If both a `host` and `path` are specified then it's most likely - // the result of a `url.parse()` call... we need to remove the - // `path` portion so that `net.connect()` doesn't attempt to open - // that as a Unix socket file. - delete proxy.path; - delete proxy.pathname; - } - this.proxy = proxy; - } - /** - * Called when the node-core HTTP client library is creating a - * new HTTP request. - * - * @api protected - */ - callback(req, opts) { - return __awaiter(this, void 0, void 0, function* () { - const { proxy, secureProxy } = this; - const parsed = url_1.default.parse(req.path); - if (!parsed.protocol) { - parsed.protocol = 'http:'; - } - if (!parsed.hostname) { - parsed.hostname = opts.hostname || opts.host || null; - } - if (parsed.port == null && typeof opts.port) { - parsed.port = String(opts.port); - } - if (parsed.port === '80') { - // if port is 80, then we can remove the port so that the - // ":80" portion is not on the produced URL - parsed.port = ''; - } - // Change the `http.ClientRequest` instance's "path" field - // to the absolute path of the URL that will be requested. - req.path = url_1.default.format(parsed); - // Inject the `Proxy-Authorization` header if necessary. - if (proxy.auth) { - req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); - } - // Create a socket connection to the proxy server. - let socket; - if (secureProxy) { - debug('Creating `tls.Socket`: %o', proxy); - socket = tls_1.default.connect(proxy); - } - else { - debug('Creating `net.Socket`: %o', proxy); - socket = net_1.default.connect(proxy); - } - // At this point, the http ClientRequest's internal `_header` field - // might have already been set. If this is the case then we'll need - // to re-generate the string since we just changed the `req.path`. - if (req._header) { - let first; - let endOfHeaders; - debug('Regenerating stored HTTP header string for request'); - req._header = null; - req._implicitHeader(); - if (req.output && req.output.length > 0) { - // Node < 12 - debug('Patching connection write() output buffer with updated header'); - first = req.output[0]; - endOfHeaders = first.indexOf('\r\n\r\n') + 4; - req.output[0] = req._header + first.substring(endOfHeaders); - debug('Output buffer: %o', req.output); - } - else if (req.outputData && req.outputData.length > 0) { - // Node >= 12 - debug('Patching connection write() output buffer with updated header'); - first = req.outputData[0].data; - endOfHeaders = first.indexOf('\r\n\r\n') + 4; - req.outputData[0].data = - req._header + first.substring(endOfHeaders); - debug('Output buffer: %o', req.outputData[0].data); - } - } - // Wait for the socket's `connect` event, so that this `callback()` - // function throws instead of the `http` request machinery. This is - // important for i.e. `PacProxyAgent` which determines a failed proxy - // connection via the `callback()` function throwing. - yield (0, once_1.default)(socket, 'connect'); - return socket; - }); - } -} -exports["default"] = HttpProxyAgent; -//# sourceMappingURL=agent.js.map - -/***/ }), - -/***/ 3764: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { - -"use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } }; -const agent_1 = __importDefault(__nccwpck_require__(7492)); -function createHttpProxyAgent(opts) { - return new agent_1.default(opts); +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; } -(function (createHttpProxyAgent) { - createHttpProxyAgent.HttpProxyAgent = agent_1.default; - createHttpProxyAgent.prototype = agent_1.default.prototype; -})(createHttpProxyAgent || (createHttpProxyAgent = {})); -module.exports = createHttpProxyAgent; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 5098: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const net_1 = __importDefault(__nccwpck_require__(1808)); -const tls_1 = __importDefault(__nccwpck_require__(4404)); -const url_1 = __importDefault(__nccwpck_require__(7310)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const agent_base_1 = __nccwpck_require__(9690); -const parse_proxy_response_1 = __importDefault(__nccwpck_require__(595)); -const debug = debug_1.default('https-proxy-agent:agent'); -/** - * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to - * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. - * - * Outgoing HTTP requests are first tunneled through the proxy server using the - * `CONNECT` HTTP request method to establish a connection to the proxy server, - * and then the proxy server connects to the destination target and issues the - * HTTP request from the proxy server. - * - * `https:` requests have their socket connection upgraded to TLS once - * the connection to the proxy server has been established. - * - * @api public - */ -class HttpsProxyAgent extends agent_base_1.Agent { - constructor(_opts) { - let opts; - if (typeof _opts === 'string') { - opts = url_1.default.parse(_opts); - } - else { - opts = _opts; - } - if (!opts) { - throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); - } - debug('creating new HttpsProxyAgent instance: %o', opts); - super(opts); - const proxy = Object.assign({}, opts); - // If `true`, then connect to the proxy server over TLS. - // Defaults to `false`. - this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); - // Prefer `hostname` over `host`, and set the `port` if needed. - proxy.host = proxy.hostname || proxy.host; - if (typeof proxy.port === 'string') { - proxy.port = parseInt(proxy.port, 10); - } - if (!proxy.port && proxy.host) { - proxy.port = this.secureProxy ? 443 : 80; - } - // ALPN is supported by Node.js >= v5. - // attempt to negotiate http/1.1 for proxy servers that support http/2 - if (this.secureProxy && !('ALPNProtocols' in proxy)) { - proxy.ALPNProtocols = ['http 1.1']; - } - if (proxy.host && proxy.path) { - // If both a `host` and `path` are specified then it's most likely - // the result of a `url.parse()` call... we need to remove the - // `path` portion so that `net.connect()` doesn't attempt to open - // that as a Unix socket file. - delete proxy.path; - delete proxy.pathname; - } - this.proxy = proxy; +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; } - /** - * Called when the node-core HTTP client library is creating a - * new HTTP request. - * - * @api protected - */ - callback(req, opts) { - return __awaiter(this, void 0, void 0, function* () { - const { proxy, secureProxy } = this; - // Create a socket connection to the proxy server. - let socket; - if (secureProxy) { - debug('Creating `tls.Socket`: %o', proxy); - socket = tls_1.default.connect(proxy); - } - else { - debug('Creating `net.Socket`: %o', proxy); - socket = net_1.default.connect(proxy); - } - const headers = Object.assign({}, proxy.headers); - const hostname = `${opts.host}:${opts.port}`; - let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; - // Inject the `Proxy-Authorization` header if necessary. - if (proxy.auth) { - headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; - } - // The `Host` header should only include the port - // number when it is not the default port. - let { host, port, secureEndpoint } = opts; - if (!isDefaultPort(port, secureEndpoint)) { - host += `:${port}`; - } - headers.Host = host; - headers.Connection = 'close'; - for (const name of Object.keys(headers)) { - payload += `${name}: ${headers[name]}\r\n`; - } - const proxyResponsePromise = parse_proxy_response_1.default(socket); - socket.write(`${payload}\r\n`); - const { statusCode, buffered } = yield proxyResponsePromise; - if (statusCode === 200) { - req.once('socket', resume); - if (opts.secureEndpoint) { - // The proxy is connecting to a TLS server, so upgrade - // this socket connection to a TLS connection. - debug('Upgrading socket connection to TLS'); - const servername = opts.servername || opts.host; - return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, - servername })); - } - return socket; - } - // Some other status code that's not 200... need to re-play the HTTP - // header "data" events onto the socket once the HTTP machinery is - // attached so that the node core `http` can parse and handle the - // error status code. - // Close the original socket, and a new "fake" socket is returned - // instead, so that the proxy doesn't get the HTTP request - // written to it (which may contain `Authorization` headers or other - // sensitive data). - // - // See: https://hackerone.com/reports/541502 - socket.destroy(); - const fakeSocket = new net_1.default.Socket({ writable: false }); - fakeSocket.readable = true; - // Need to wait for the "socket" event to re-play the "data" events. - req.once('socket', (s) => { - debug('replaying proxy buffer for failed request'); - assert_1.default(s.listenerCount('data') > 0); - // Replay the "buffered" Buffer onto the fake `socket`, since at - // this point the HTTP module machinery has been hooked up for - // the user. - s.push(buffered); - s.push(null); - }); - return fakeSocket; - }); + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk, + encoding, + isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; } -exports["default"] = HttpsProxyAgent; -function resume(socket) { - socket.resume(); +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; } -function isDefaultPort(port, secure) { - return Boolean((!secure && port === 80) || (secure && port === 443)); +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } } -function isHTTPS(protocol) { - return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; } -function omit(obj, ...keys) { - const ret = {}; - let key; - for (key in obj) { - if (!keys.includes(key)) { - ret[key] = obj[key]; - } +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); } - return ret; + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); } -//# sourceMappingURL=agent.js.map - -/***/ }), - -/***/ 7219: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { - -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -const agent_1 = __importDefault(__nccwpck_require__(5098)); -function createHttpsProxyAgent(opts) { - return new agent_1.default(opts); +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } } -(function (createHttpsProxyAgent) { - createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; - createHttpsProxyAgent.prototype = agent_1.default.prototype; -})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); -module.exports = createHttpsProxyAgent; -//# sourceMappingURL=index.js.map -/***/ }), +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); -/***/ 595: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); -"use strict"; + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); -function parseProxyResponse(socket) { - return new Promise((resolve, reject) => { - // we need to buffer any HTTP traffic that happens with the proxy before we get - // the CONNECT response, so that if the response is anything other than an "200" - // response code, then we can re-play the "data" events on the socket once the - // HTTP parser is hooked up... - let buffersLength = 0; - const buffers = []; - function read() { - const b = socket.read(); - if (b) - ondata(b); - else - socket.once('readable', read); - } - function cleanup() { - socket.removeListener('end', onend); - socket.removeListener('error', onerror); - socket.removeListener('close', onclose); - socket.removeListener('readable', read); - } - function onclose(err) { - debug('onclose had error %o', err); - } - function onend() { - debug('onend'); - } - function onerror(err) { - cleanup(); - debug('onerror %o', err); - reject(err); - } - function ondata(b) { - buffers.push(b); - buffersLength += b.length; - const buffered = Buffer.concat(buffers, buffersLength); - const endOfHeaders = buffered.indexOf('\r\n\r\n'); - if (endOfHeaders === -1) { - // keep buffering - debug('have not received end of HTTP headers yet...'); - read(); - return; - } - const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); - const statusCode = +firstLine.split(' ')[1]; - debug('got proxy server response: %o', firstLine); - resolve({ - statusCode, - buffered - }); - } - socket.on('error', onerror); - socket.on('close', onclose); - socket.on('end', onend); - read(); - }); +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; } -exports["default"] = parseProxyResponse; -//# sourceMappingURL=parse-proxy-response.js.map - -/***/ }), - -/***/ 4124: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -try { - var util = __nccwpck_require__(3837); - /* istanbul ignore next */ - if (typeof util.inherits !== 'function') throw ''; - module.exports = util.inherits; -} catch (e) { - /* istanbul ignore next */ - module.exports = __nccwpck_require__(8544); +function callFinal(stream, state) { + stream._final(err => { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); } - - -/***/ }), - -/***/ 8544: -/***/ ((module) => { - -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }) +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); } - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + const rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } } } + return need; } - - -/***/ }), - -/***/ 3287: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; } - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; } - // Most likely a plain Object - return true; + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; } - -exports.isPlainObject = isPlainObject; - - -/***/ }), - -/***/ 1554: -/***/ ((module) => { - -"use strict"; - - -const isStream = stream => - stream !== null && - typeof stream === 'object' && - typeof stream.pipe === 'function'; - -isStream.writable = stream => - isStream(stream) && - stream.writable !== false && - typeof stream._write === 'function' && - typeof stream._writableState === 'object'; - -isStream.readable = stream => - isStream(stream) && - stream.readable !== false && - typeof stream._read === 'function' && - typeof stream._readableState === 'object'; - -isStream.duplex = stream => - isStream.writable(stream) && - isStream.readable(stream); - -isStream.transform = stream => - isStream.duplex(stream) && - typeof stream._transform === 'function'; - -module.exports = isStream; - - -/***/ }), - -/***/ 5031: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var json_stringify = (__nccwpck_require__(8574).stringify); -var json_parse = __nccwpck_require__(9099); - -module.exports = function(options) { - return { - parse: json_parse(options), - stringify: json_stringify +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; } -}; -//create the default method members with no options applied for backwards compatibility -module.exports.parse = json_parse(); -module.exports.stringify = json_stringify; - - -/***/ }), - -/***/ 9099: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var BigNumber = null; - -// regexpxs extracted from -// (c) BSD-3-Clause -// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors - -const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; -const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; - -/* - json_parse.js - 2012-06-20 - - Public Domain. - - NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. - - This file creates a json_parse function. - During create you can (optionally) specify some behavioural switches - - require('json-bigint')(options) - - The optional options parameter holds switches that drive certain - aspects of the parsing process: - * options.strict = true will warn about duplicate-key usage in the json. - The default (strict = false) will silently ignore those and overwrite - values for keys that are in duplicate use. - - The resulting function follows this signature: - json_parse(text, reviver) - This method parses a JSON text to produce an object or array. - It can throw a SyntaxError exception. - - The optional reviver parameter is a function that can filter and - transform the results. It receives each of the keys and values, - and its return value is used instead of the original value. - If it returns what it received, then the structure is not modified. - If it returns undefined then the member is deleted. - - Example: - - // Parse the text. Values that look like ISO date strings will - // be converted to Date objects. - - myData = json_parse(text, function (key, value) { - var a; - if (typeof value === 'string') { - a = -/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); - if (a) { - return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], - +a[5], +a[6])); - } - } - return value; - }); - - This is a reference implementation. You are free to copy, modify, or - redistribute. - - This code should be minified before deployment. - See http://javascript.crockford.com/jsmin.html - - USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO - NOT CONTROL. -*/ -/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, - hasOwnProperty, message, n, name, prototype, push, r, t, text -*/ + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; -var json_parse = function (options) { - 'use strict'; +/***/ }), - // This is a function that can parse a JSON text, producing a JavaScript - // data structure. It is a simple, recursive descent parser. It does not use - // eval or regular expressions, so it can be used as a model for implementing - // a JSON parser in other languages. +/***/ 3306: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // We are defining the function inside of another function to avoid creating - // global variables. +"use strict"; - // Default options one can override by passing options to the parse() - var _options = { - strict: false, // not being strict means do not generate syntax errors for "duplicate key" - storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string - alwaysParseAsBig: false, // toggles whether all numbers should be Big - useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js - protoAction: 'error', - constructorAction: 'error', - }; - // If there are options, then use them to override the default _options - if (options !== undefined && options !== null) { - if (options.strict === true) { - _options.strict = true; - } - if (options.storeAsString === true) { - _options.storeAsString = true; +const finished = __nccwpck_require__(6080); +const kLastResolve = Symbol('lastResolve'); +const kLastReject = Symbol('lastReject'); +const kError = Symbol('error'); +const kEnded = Symbol('ended'); +const kLastPromise = Symbol('lastPromise'); +const kHandlePromise = Symbol('handlePromise'); +const kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value, + done + }; +} +function readAndResolve(iter) { + const resolve = iter[kLastResolve]; + if (resolve !== null) { + const data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); } - _options.alwaysParseAsBig = - options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; - _options.useNativeBigInt = - options.useNativeBigInt === true ? options.useNativeBigInt : false; - - if (typeof options.constructorAction !== 'undefined') { - if ( - options.constructorAction === 'error' || - options.constructorAction === 'ignore' || - options.constructorAction === 'preserve' - ) { - _options.constructorAction = options.constructorAction; - } else { - throw new Error( - `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` - ); + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return (resolve, reject) => { + lastPromise.then(() => { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +const AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({ + get stream() { + return this[kStream]; + }, + next() { + // if we have detected an error in the meanwhile + // reject straight away + const error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise((resolve, reject) => { + process.nextTick(() => { + if (this[kError]) { + reject(this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); } - if (typeof options.protoAction !== 'undefined') { - if ( - options.protoAction === 'error' || - options.protoAction === 'ignore' || - options.protoAction === 'preserve' - ) { - _options.protoAction = options.protoAction; - } else { - throw new Error( - `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` - ); + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + const lastPromise = this[kLastPromise]; + let promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + const data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); } + promise = new Promise(this[kHandlePromise]); } + this[kLastPromise] = promise; + return promise; + }, + [Symbol.asyncIterator]() { + return this; + }, + return() { + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise((resolve, reject) => { + this[kStream].destroy(null, err => { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); } - - var at, // The index of the current character - ch, // The current character - escapee = { - '"': '"', - '\\': '\\', - '/': '/', - b: '\b', - f: '\f', - n: '\n', - r: '\r', - t: '\t', +}, AsyncIteratorPrototype); +const createReadableStreamAsyncIterator = stream => { + const iterator = Object.create(ReadableStreamAsyncIteratorPrototype, { + [kStream]: { + value: stream, + writable: true }, - text, - error = function (m) { - // Call error when something is wrong. - - throw { - name: 'SyntaxError', - message: m, - at: at, - text: text, - }; + [kLastResolve]: { + value: null, + writable: true }, - next = function (c) { - // If a c parameter is provided, verify that it matches the current character. - - if (c && c !== ch) { - error("Expected '" + c + "' instead of '" + ch + "'"); - } - - // Get the next character. When there are no more characters, - // return the empty string. - - ch = text.charAt(at); - at += 1; - return ch; + [kLastReject]: { + value: null, + writable: true }, - number = function () { - // Parse a number value. - - var number, - string = ''; - - if (ch === '-') { - string = '-'; - next('-'); - } - while (ch >= '0' && ch <= '9') { - string += ch; - next(); - } - if (ch === '.') { - string += '.'; - while (next() && ch >= '0' && ch <= '9') { - string += ch; - } - } - if (ch === 'e' || ch === 'E') { - string += ch; - next(); - if (ch === '-' || ch === '+') { - string += ch; - next(); - } - while (ch >= '0' && ch <= '9') { - string += ch; - next(); + [kError]: { + value: null, + writable: true + }, + [kEnded]: { + value: stream._readableState.endEmitted, + writable: true + }, + // the function passed to new Promise + // is cached so we avoid allocating a new + // closure at every run + [kHandlePromise]: { + value: (resolve, reject) => { + const data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; } + }, + writable: true + } + }); + iterator[kLastPromise] = null; + finished(stream, err => { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + const reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); } - number = +string; - if (!isFinite(number)) { - error('Bad number'); - } else { - if (BigNumber == null) BigNumber = __nccwpck_require__(7558); - //if (number > 9007199254740992 || number < -9007199254740992) - // Bignumber has stricter check: everything with length > 15 digits disallowed - if (string.length > 15) - return _options.storeAsString - ? string - : _options.useNativeBigInt - ? BigInt(string) - : new BigNumber(string); - else - return !_options.alwaysParseAsBig - ? number - : _options.useNativeBigInt - ? BigInt(number) - : new BigNumber(number); - } - }, - string = function () { - // Parse a string value. + iterator[kError] = err; + return; + } + const resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; - var hex, - i, - string = '', - uffff; +/***/ }), - // When parsing for string values, we must look for " and \ characters. +/***/ 6522: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (ch === '"') { - var startAt = at; - while (next()) { - if (ch === '"') { - if (at - 1 > startAt) string += text.substring(startAt, at - 1); - next(); - return string; - } - if (ch === '\\') { - if (at - 1 > startAt) string += text.substring(startAt, at - 1); - next(); - if (ch === 'u') { - uffff = 0; - for (i = 0; i < 4; i += 1) { - hex = parseInt(next(), 16); - if (!isFinite(hex)) { - break; - } - uffff = uffff * 16 + hex; - } - string += String.fromCharCode(uffff); - } else if (typeof escapee[ch] === 'string') { - string += escapee[ch]; - } else { - break; - } - startAt = at; - } - } - } - error('Bad string'); - }, - white = function () { - // Skip whitespace. +"use strict"; - while (ch && ch <= ' ') { - next(); - } - }, - word = function () { - // true, false, or null. - switch (ch) { - case 't': - next('t'); - next('r'); - next('u'); - next('e'); - return true; - case 'f': - next('f'); - next('a'); - next('l'); - next('s'); - next('e'); - return false; - case 'n': - next('n'); - next('u'); - next('l'); - next('l'); - return null; - } - error("Unexpected '" + ch + "'"); - }, - value, // Place holder for the value function. - array = function () { - // Parse an array value. +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +const _require = __nccwpck_require__(4300), + Buffer = _require.Buffer; +const _require2 = __nccwpck_require__(3837), + inspect = _require2.inspect; +const custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = class BufferList { + constructor() { + this.head = null; + this.tail = null; + this.length = 0; + } + push(v) { + const entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + unshift(v) { + const entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + shift() { + if (this.length === 0) return; + const ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + clear() { + this.head = this.tail = null; + this.length = 0; + } + join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + concat(n) { + if (this.length === 0) return Buffer.alloc(0); + const ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } - var array = []; + // Consumes a specified amount of bytes or characters from the buffered data. + consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + first() { + return this.head.data; + } - if (ch === '[') { - next('['); - white(); - if (ch === ']') { - next(']'); - return array; // empty array + // Consumes a specified amount of characters from the buffered data. + _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + const str = p.data; + const nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); } - while (ch) { - array.push(value()); - white(); - if (ch === ']') { - next(']'); - return array; - } - next(','); - white(); + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + _getBuffer(n) { + const ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + const buf = p.data; + const nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); } + break; } - error('Bad array'); - }, - object = function () { - // Parse an object value. + ++c; + } + this.length -= c; + return ret; + } - var key, - object = Object.create(null); + // Make sure the linked list only shows the minimal necessary information. + [custom](_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } +}; - if (ch === '{') { - next('{'); - white(); - if (ch === '}') { - next('}'); - return object; // empty object - } - while (ch) { - key = string(); - white(); - next(':'); - if ( - _options.strict === true && - Object.hasOwnProperty.call(object, key) - ) { - error('Duplicate key "' + key + '"'); - } +/***/ }), - if (suspectProtoRx.test(key) === true) { - if (_options.protoAction === 'error') { - error('Object contains forbidden prototype property'); - } else if (_options.protoAction === 'ignore') { - value(); - } else { - object[key] = value(); - } - } else if (suspectConstructorRx.test(key) === true) { - if (_options.constructorAction === 'error') { - error('Object contains forbidden constructor property'); - } else if (_options.constructorAction === 'ignore') { - value(); - } else { - object[key] = value(); - } - } else { - object[key] = value(); - } +/***/ 7049: +/***/ ((module) => { - white(); - if (ch === '}') { - next('}'); - return object; - } - next(','); - white(); - } - } - error('Bad object'); - }; +"use strict"; - value = function () { - // Parse a JSON value. It could be an object, an array, a string, a number, - // or a word. - white(); - switch (ch) { - case '{': - return object(); - case '[': - return array(); - case '"': - return string(); - case '-': - return number(); - default: - return ch >= '0' && ch <= '9' ? number() : word(); +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + const readableDestroyed = this._readableState && this._readableState.destroyed; + const writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } } - }; + return this; + } - // Return the json_parse function. It will have access to all of the above - // functions and variables. + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks - return function (source, reviver) { - var result; + if (this._readableState) { + this._readableState.destroyed = true; + } - text = source + ''; - at = 0; - ch = ' '; - result = value(); - white(); - if (ch) { - error('Syntax error'); + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, err => { + if (!cb && err) { + if (!this._writableState) { + process.nextTick(emitErrorAndCloseNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, this, err); + } else { + process.nextTick(emitCloseNT, this); + } + } else if (cb) { + process.nextTick(emitCloseNT, this); + cb(err); + } else { + process.nextTick(emitCloseNT, this); } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. - // If there is a reviver function, we recursively walk the new structure, - // passing each name/value pair to the reviver function for possible - // transformation, starting with a temporary root object that holds the result - // in an empty key. If there is not a reviver function, we simply return the - // result. - - return typeof reviver === 'function' - ? (function walk(holder, key) { - var k, - v, - value = holder[key]; - if (value && typeof value === 'object') { - Object.keys(value).forEach(function (k) { - v = walk(value, k); - if (v !== undefined) { - value[k] = v; - } else { - delete value[k]; - } - }); - } - return reviver.call(holder, key, value); - })({ '': result }, '') - : result; - }; + const rState = stream._readableState; + const wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy, + undestroy, + errorOrDestroy }; -module.exports = json_parse; - - /***/ }), -/***/ 8574: +/***/ 6080: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var BigNumber = __nccwpck_require__(7558); - -/* - json2.js - 2013-05-26 - - Public Domain. - - NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. - - See http://www.JSON.org/js.html - - - This code should be minified before deployment. - See http://javascript.crockford.com/jsmin.html +"use strict"; +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). - USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO - NOT CONTROL. - This file creates a global JSON object containing two methods: stringify - and parse. +const ERR_STREAM_PREMATURE_CLOSE = (__nccwpck_require__(7214)/* .codes.ERR_STREAM_PREMATURE_CLOSE */ .q.ERR_STREAM_PREMATURE_CLOSE); +function once(callback) { + let called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + let readable = opts.readable || opts.readable !== false && stream.readable; + let writable = opts.writable || opts.writable !== false && stream.writable; + const onlegacyfinish = () => { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + const onfinish = () => { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + const onend = () => { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + const onerror = err => { + callback.call(stream, err); + }; + const onclose = () => { + let err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + const onrequest = () => { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; - JSON.stringify(value, replacer, space) - value any JavaScript value, usually an object or array. +/***/ }), - replacer an optional parameter that determines how object - values are stringified for objects. It can be a - function or an array of strings. +/***/ 9082: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - space an optional parameter that specifies the indentation - of nested structures. If it is omitted, the text will - be packed without extra whitespace. If it is a number, - it will specify the number of spaces to indent at each - level. If it is a string (such as '\t' or ' '), - it contains the characters used to indent at each level. +"use strict"; - This method produces a JSON text from a JavaScript value. - When an object value is found, if the object contains a toJSON - method, its toJSON method will be called and the result will be - stringified. A toJSON method does not serialize: it returns the - value represented by the name/value pair that should be serialized, - or undefined if nothing should be serialized. The toJSON method - will be passed the key associated with the value, and this will be - bound to the value +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +const ERR_INVALID_ARG_TYPE = (__nccwpck_require__(7214)/* .codes.ERR_INVALID_ARG_TYPE */ .q.ERR_INVALID_ARG_TYPE); +function from(Readable, iterable, opts) { + let iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + const readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + let reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + const _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; - For example, this would serialize Dates as ISO strings. +/***/ }), - Date.prototype.toJSON = function (key) { - function f(n) { - // Format integers to have at least two digits. - return n < 10 ? '0' + n : n; - } +/***/ 6989: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return this.getUTCFullYear() + '-' + - f(this.getUTCMonth() + 1) + '-' + - f(this.getUTCDate()) + 'T' + - f(this.getUTCHours()) + ':' + - f(this.getUTCMinutes()) + ':' + - f(this.getUTCSeconds()) + 'Z'; - }; +"use strict"; +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). - You can provide an optional replacer method. It will be passed the - key and value of each member, with this bound to the containing - object. The value that is returned from your method will be - serialized. If your method returns undefined, then the member will - be excluded from the serialization. - If the replacer parameter is an array of strings, then it will be - used to select the members to be serialized. It filters the results - such that only members with keys listed in the replacer array are - stringified. - Values that do not have JSON representations, such as undefined or - functions, will not be serialized. Such values in objects will be - dropped; in arrays they will be replaced with null. You can use - a replacer function to replace those with JSON values. - JSON.stringify(undefined) returns undefined. +let eos; +function once(callback) { + let called = false; + return function () { + if (called) return; + called = true; + callback(...arguments); + }; +} +const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + let closed = false; + stream.on('close', () => { + closed = true; + }); + if (eos === undefined) eos = __nccwpck_require__(6080); + eos(stream, { + readable: reading, + writable: writing + }, err => { + if (err) return callback(err); + closed = true; + callback(); + }); + let destroyed = false; + return err => { + if (closed) return; + if (destroyed) return; + destroyed = true; - The optional space parameter produces a stringification of the - value that is filled with line breaks and indentation to make it - easier to read. + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + const callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + let error; + const destroys = streams.map(function (stream, i) { + const reading = i < streams.length - 1; + const writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; - If the space parameter is a non-empty string, then that string will - be used for indentation. If the space parameter is a number, then - the indentation will be that many spaces. +/***/ }), - Example: +/***/ 9948: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - text = JSON.stringify(['e', {pluribus: 'unum'}]); - // text is '["e",{"pluribus":"unum"}]' +"use strict"; - text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); - // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' +const ERR_INVALID_OPT_VALUE = (__nccwpck_require__(7214)/* .codes.ERR_INVALID_OPT_VALUE */ .q.ERR_INVALID_OPT_VALUE); +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + const name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } - text = JSON.stringify([new Date()], function (key, value) { - return this[key] instanceof Date ? - 'Date(' + this[key] + ')' : value; - }); - // text is '["Date(---current time---)"]' + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark +}; +/***/ }), - JSON.parse(text, reviver) - This method parses a JSON text to produce an object or array. - It can throw a SyntaxError exception. +/***/ 2387: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - The optional reviver parameter is a function that can filter and - transform the results. It receives each of the keys and values, - and its return value is used instead of the original value. - If it returns what it received, then the structure is not modified. - If it returns undefined then the member is deleted. +module.exports = __nccwpck_require__(2781); - Example: - // Parse the text. Values that look like ISO date strings will - // be converted to Date objects. +/***/ }), - myData = JSON.parse(text, function (key, value) { - var a; - if (typeof value === 'string') { - a = -/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); - if (a) { - return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], - +a[5], +a[6])); - } - } - return value; - }); +/***/ 1642: +/***/ ((module, exports, __nccwpck_require__) => { - myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { - var d; - if (typeof value === 'string' && - value.slice(0, 5) === 'Date(' && - value.slice(-1) === ')') { - d = new Date(value.slice(5, -1)); - if (d) { - return d; - } - } - return value; - }); +var Stream = __nccwpck_require__(2781); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = __nccwpck_require__(1433); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(6993); + exports.Duplex = __nccwpck_require__(1359); + exports.Transform = __nccwpck_require__(4415); + exports.PassThrough = __nccwpck_require__(1542); + exports.finished = __nccwpck_require__(6080); + exports.pipeline = __nccwpck_require__(6989); +} - This is a reference implementation. You are free to copy, modify, or - redistribute. -*/ +/***/ }), -/*jslint evil: true, regexp: true */ +/***/ 3515: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, - call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, - getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, - lastIndex, length, parse, prototype, push, replace, slice, stringify, - test, toJSON, toString, valueOf -*/ +"use strict"; -// Create a JSON object only if one does not already exist. We create the -// methods in a closure to avoid creating global variables. +const {PassThrough} = __nccwpck_require__(2781); +const extend = __nccwpck_require__(8171); -var JSON = module.exports; +let debug = () => {}; +if ( + typeof process !== 'undefined' && + 'env' in process && + typeof process.env === 'object' && + process.env.DEBUG === 'retry-request' +) { + debug = message => { + console.log('retry-request:', message); + }; +} -(function () { - 'use strict'; +const DEFAULTS = { + objectMode: false, + retries: 2, - function f(n) { - // Format integers to have at least two digits. - return n < 10 ? '0' + n : n; - } + /* + The maximum time to delay in seconds. If retryDelayMultiplier results in a + delay greater than maxRetryDelay, retries should delay by maxRetryDelay + seconds instead. + */ + maxRetryDelay: 64, - var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, - escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, - gap, - indent, - meta = { // table of character substitutions - '\b': '\\b', - '\t': '\\t', - '\n': '\\n', - '\f': '\\f', - '\r': '\\r', - '"' : '\\"', - '\\': '\\\\' - }, - rep; + /* + The multiplier by which to increase the delay time between the completion of + failed requests, and the initiation of the subsequent retrying request. + */ + retryDelayMultiplier: 2, + /* + The length of time to keep retrying in seconds. The last sleep period will + be shortened as necessary, so that the last retry runs at deadline (and not + considerably beyond it). The total time starting from when the initial + request is sent, after which an error will be returned, regardless of the + retrying attempts made meanwhile. + */ + totalTimeout: 600, - function quote(string) { + noResponseRetries: 2, + currentRetryAttempt: 0, + shouldRetryFn: function (response) { + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; -// If the string contains no control characters, no quote characters, and no -// backslash characters, then we can safely slap some quotes around it. -// Otherwise we must also replace the offending characters with safe escape -// sequences. + const statusCode = response.statusCode; + debug(`Response status: ${statusCode}`); - escapable.lastIndex = 0; - return escapable.test(string) ? '"' + string.replace(escapable, function (a) { - var c = meta[a]; - return typeof c === 'string' - ? c - : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); - }) + '"' : '"' + string + '"'; + let range; + while ((range = retryRanges.shift())) { + if (statusCode >= range[0] && statusCode <= range[1]) { + // Not a successful status or redirect. + return true; + } } + }, +}; +function retryRequest(requestOpts, opts, callback) { + if (typeof requestOpts === 'string') { + requestOpts = {url: requestOpts}; + } - function str(key, holder) { - -// Produce a string from holder[key]. - - var i, // The loop counter. - k, // The member key. - v, // The member value. - length, - mind = gap, - partial, - value = holder[key], - isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); - -// If the value has a toJSON method, call it to obtain a replacement value. - - if (value && typeof value === 'object' && - typeof value.toJSON === 'function') { - value = value.toJSON(key); - } + const streamMode = typeof arguments[arguments.length - 1] !== 'function'; -// If we were called with a replacer function, then call the replacer to -// obtain a replacement value. + if (typeof opts === 'function') { + callback = opts; + } - if (typeof rep === 'function') { - value = rep.call(holder, key, value); - } + const manualCurrentRetryAttemptWasSet = + opts && typeof opts.currentRetryAttempt === 'number'; + opts = extend({}, DEFAULTS, opts); -// What happens next depends on the value's type. + if (typeof opts.request === 'undefined') { + throw new Error('A request library must be provided to retry-request.'); + } - switch (typeof value) { - case 'string': - if (isBigNumber) { - return value; - } else { - return quote(value); - } + let currentRetryAttempt = opts.currentRetryAttempt; - case 'number': + let numNoResponseAttempts = 0; + let streamResponseHandled = false; -// JSON numbers must be finite. Encode non-finite numbers as null. + let retryStream; + let requestStream; + let delayStream; - return isFinite(value) ? String(value) : 'null'; + let activeRequest; + const retryRequest = { + abort: function () { + if (activeRequest && activeRequest.abort) { + activeRequest.abort(); + } + }, + }; - case 'boolean': - case 'null': - case 'bigint': + if (streamMode) { + retryStream = new PassThrough({objectMode: opts.objectMode}); + retryStream.abort = resetStreams; + } -// If the value is a boolean or null, convert it to a string. Note: -// typeof null does not produce 'null'. The case is included here in -// the remote chance that this gets fixed someday. + const timeOfFirstRequest = Date.now(); + if (currentRetryAttempt > 0) { + retryAfterDelay(currentRetryAttempt); + } else { + makeRequest(); + } - return String(value); + if (streamMode) { + return retryStream; + } else { + return retryRequest; + } -// If the type is 'object', we might be dealing with an object or an array or -// null. + function resetStreams() { + delayStream = null; - case 'object': + if (requestStream) { + requestStream.abort && requestStream.abort(); + requestStream.cancel && requestStream.cancel(); -// Due to a specification blunder in ECMAScript, typeof null is 'object', -// so watch out for that case. + if (requestStream.destroy) { + requestStream.destroy(); + } else if (requestStream.end) { + requestStream.end(); + } + } + } - if (!value) { - return 'null'; - } + function makeRequest() { + let finishHandled = false; + currentRetryAttempt++; + debug(`Current retry attempt: ${currentRetryAttempt}`); -// Make an array to hold the partial results of stringifying this object value. + function handleFinish(args = []) { + if (!finishHandled) { + finishHandled = true; + retryStream.emit('complete', ...args); + } + } - gap += indent; - partial = []; + if (streamMode) { + streamResponseHandled = false; -// Is the value an array? + delayStream = new PassThrough({objectMode: opts.objectMode}); + requestStream = opts.request(requestOpts); - if (Object.prototype.toString.apply(value) === '[object Array]') { + setImmediate(() => { + retryStream.emit('request'); + }); -// The value is an array. Stringify every element. Use null as a placeholder -// for non-JSON values. + requestStream + // gRPC via google-cloud-node can emit an `error` as well as a `response` + // Whichever it emits, we run with-- we can't run with both. That's what + // is up with the `streamResponseHandled` tracking. + .on('error', err => { + if (streamResponseHandled) { + return; + } - length = value.length; - for (i = 0; i < length; i += 1) { - partial[i] = str(i, value) || 'null'; - } + streamResponseHandled = true; + onResponse(err); + }) + .on('response', (resp, body) => { + if (streamResponseHandled) { + return; + } -// Join all of the elements together, separated with commas, and wrap them in -// brackets. + streamResponseHandled = true; + onResponse(null, resp, body); + }) + .on('complete', (...params) => handleFinish(params)) + .on('finish', (...params) => handleFinish(params)); - v = partial.length === 0 - ? '[]' - : gap - ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' - : '[' + partial.join(',') + ']'; - gap = mind; - return v; - } + requestStream.pipe(delayStream); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } + } -// If the replacer is an array, use it to select the members to be stringified. + function retryAfterDelay(currentRetryAttempt) { + if (streamMode) { + resetStreams(); + } - if (rep && typeof rep === 'object') { - length = rep.length; - for (i = 0; i < length; i += 1) { - if (typeof rep[i] === 'string') { - k = rep[i]; - v = str(k, value); - if (v) { - partial.push(quote(k) + (gap ? ': ' : ':') + v); - } - } - } - } else { + const nextRetryDelay = getNextRetryDelay({ + maxRetryDelay: opts.maxRetryDelay, + retryDelayMultiplier: opts.retryDelayMultiplier, + retryNumber: currentRetryAttempt, + timeOfFirstRequest, + totalTimeout: opts.totalTimeout, + }); + debug(`Next retry delay: ${nextRetryDelay}`); -// Otherwise, iterate through all of the keys in the object. + if (nextRetryDelay <= 0) { + numNoResponseAttempts = opts.noResponseRetries + 1; + return; + } - Object.keys(value).forEach(function(k) { - var v = str(k, value); - if (v) { - partial.push(quote(k) + (gap ? ': ' : ':') + v); - } - }); - } + setTimeout(makeRequest, nextRetryDelay); + } -// Join all of the member texts together, separated with commas, -// and wrap them in braces. + function onResponse(err, response, body) { + // An error such as DNS resolution. + if (err) { + numNoResponseAttempts++; - v = partial.length === 0 - ? '{}' - : gap - ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' - : '{' + partial.join(',') + '}'; - gap = mind; - return v; + if (numNoResponseAttempts <= opts.noResponseRetries) { + retryAfterDelay(numNoResponseAttempts); + } else { + if (streamMode) { + retryStream.emit('error', err); + retryStream.end(); + } else { + callback(err, response, body); } - } - -// If the JSON object does not yet have a stringify method, give it one. - - if (typeof JSON.stringify !== 'function') { - JSON.stringify = function (value, replacer, space) { + } -// The stringify method takes a value and an optional replacer, and an optional -// space parameter, and returns a JSON text. The replacer can be a function -// that can replace values, or an array of strings that will select the keys. -// A default replacer method can be provided. Use of the space parameter can -// produce text that is more easily readable. + return; + } - var i; - gap = ''; - indent = ''; + // Send the response to see if we should try again. + // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts + // the very first request sent as the first "retry". It is only accurate + // when a user provides their own "currentRetryAttempt" option at + // instantiation. + const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet + ? currentRetryAttempt + : currentRetryAttempt - 1; + if ( + adjustedCurrentRetryAttempt < opts.retries && + opts.shouldRetryFn(response) + ) { + retryAfterDelay(currentRetryAttempt); + return; + } -// If the space parameter is a number, make an indent string containing that -// many spaces. + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); + delayStream.pipe(retryStream); + requestStream.on('error', err => { + retryStream.destroy(err); + }); + } else { + callback(err, response, body); + } + } +} - if (typeof space === 'number') { - for (i = 0; i < space; i += 1) { - indent += ' '; - } +module.exports = retryRequest; -// If the space parameter is a string, it will be used as the indent string. +function getNextRetryDelay(config) { + const { + maxRetryDelay, + retryDelayMultiplier, + retryNumber, + timeOfFirstRequest, + totalTimeout, + } = config; - } else if (typeof space === 'string') { - indent = space; - } + const maxRetryDelayMs = maxRetryDelay * 1000; + const totalTimeoutMs = totalTimeout * 1000; -// If there is a replacer, it must be a function or an array. -// Otherwise, throw an error. + const jitter = Math.floor(Math.random() * 1000); + const calculatedNextRetryDelay = + Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; - rep = replacer; - if (replacer && typeof replacer !== 'function' && - (typeof replacer !== 'object' || - typeof replacer.length !== 'number')) { - throw new Error('JSON.stringify'); - } + const maxAllowableDelayMs = + totalTimeoutMs - (Date.now() - timeOfFirstRequest); -// Make a fake root object containing our value under the key of ''. -// Return the result of stringifying the value. + return Math.min( + calculatedNextRetryDelay, + maxAllowableDelayMs, + maxRetryDelayMs + ); +} - return str('', {'': value}); - }; - } -}()); +module.exports.defaults = DEFAULTS; +module.exports.getNextRetryDelay = getNextRetryDelay; /***/ }), -/***/ 6010: +/***/ 1604: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var bufferEqual = __nccwpck_require__(9239); -var Buffer = (__nccwpck_require__(1867).Buffer); -var crypto = __nccwpck_require__(6113); -var formatEcdsa = __nccwpck_require__(1728); -var util = __nccwpck_require__(3837); - -var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' -var MSG_INVALID_SECRET = 'secret must be a string or buffer'; -var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; -var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; - -var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; -if (supportsKeyObjects) { - MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; - MSG_INVALID_SECRET += 'or a KeyObject'; -} +module.exports = __nccwpck_require__(6244); -function checkIsPublicKey(key) { - if (Buffer.isBuffer(key)) { - return; - } +/***/ }), - if (typeof key === 'string') { - return; - } +/***/ 6244: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (!supportsKeyObjects) { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } +var RetryOperation = __nccwpck_require__(5369); - if (typeof key !== 'object') { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } +exports.operation = function(options) { + var timeouts = exports.timeouts(options); + return new RetryOperation(timeouts, { + forever: options && (options.forever || options.retries === Infinity), + unref: options && options.unref, + maxRetryTime: options && options.maxRetryTime + }); +}; - if (typeof key.type !== 'string') { - throw typeError(MSG_INVALID_VERIFIER_KEY); +exports.timeouts = function(options) { + if (options instanceof Array) { + return [].concat(options); } - if (typeof key.asymmetricKeyType !== 'string') { - throw typeError(MSG_INVALID_VERIFIER_KEY); + var opts = { + retries: 10, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: Infinity, + randomize: false + }; + for (var key in options) { + opts[key] = options[key]; } - if (typeof key.export !== 'function') { - throw typeError(MSG_INVALID_VERIFIER_KEY); + if (opts.minTimeout > opts.maxTimeout) { + throw new Error('minTimeout is greater than maxTimeout'); } -}; -function checkIsPrivateKey(key) { - if (Buffer.isBuffer(key)) { - return; + var timeouts = []; + for (var i = 0; i < opts.retries; i++) { + timeouts.push(this.createTimeout(i, opts)); } - if (typeof key === 'string') { - return; + if (options && options.forever && !timeouts.length) { + timeouts.push(this.createTimeout(i, opts)); } - if (typeof key === 'object') { - return; - } + // sort the array numerically ascending + timeouts.sort(function(a,b) { + return a - b; + }); - throw typeError(MSG_INVALID_SIGNER_KEY); + return timeouts; }; -function checkIsSecretKey(key) { - if (Buffer.isBuffer(key)) { - return; - } +exports.createTimeout = function(attempt, opts) { + var random = (opts.randomize) + ? (Math.random() + 1) + : 1; - if (typeof key === 'string') { - return key; - } + var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); + timeout = Math.min(timeout, opts.maxTimeout); - if (!supportsKeyObjects) { - throw typeError(MSG_INVALID_SECRET); - } + return timeout; +}; - if (typeof key !== 'object') { - throw typeError(MSG_INVALID_SECRET); +exports.wrap = function(obj, options, methods) { + if (options instanceof Array) { + methods = options; + options = null; } - if (key.type !== 'secret') { - throw typeError(MSG_INVALID_SECRET); + if (!methods) { + methods = []; + for (var key in obj) { + if (typeof obj[key] === 'function') { + methods.push(key); + } + } } - if (typeof key.export !== 'function') { - throw typeError(MSG_INVALID_SECRET); - } -} + for (var i = 0; i < methods.length; i++) { + var method = methods[i]; + var original = obj[method]; -function fromBase64(base64) { - return base64 - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); -} + obj[method] = function retryWrapper(original) { + var op = exports.operation(options); + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); -function toBase64(base64url) { - base64url = base64url.toString(); + args.push(function(err) { + if (op.retry(err)) { + return; + } + if (err) { + arguments[0] = op.mainError(); + } + callback.apply(this, arguments); + }); - var padding = 4 - base64url.length % 4; - if (padding !== 4) { - for (var i = 0; i < padding; ++i) { - base64url += '='; - } + op.attempt(function() { + original.apply(obj, args); + }); + }.bind(obj, original); + obj[method].options = options; } +}; - return base64url - .replace(/\-/g, '+') - .replace(/_/g, '/'); -} - -function typeError(template) { - var args = [].slice.call(arguments, 1); - var errMsg = util.format.bind(util, template).apply(null, args); - return new TypeError(errMsg); -} -function bufferOrString(obj) { - return Buffer.isBuffer(obj) || typeof obj === 'string'; -} +/***/ }), -function normalizeInput(thing) { - if (!bufferOrString(thing)) - thing = JSON.stringify(thing); - return thing; -} +/***/ 5369: +/***/ ((module) => { -function createHmacSigner(bits) { - return function sign(thing, secret) { - checkIsSecretKey(secret); - thing = normalizeInput(thing); - var hmac = crypto.createHmac('sha' + bits, secret); - var sig = (hmac.update(thing), hmac.digest('base64')) - return fromBase64(sig); +function RetryOperation(timeouts, options) { + // Compatibility for the old (timeouts, retryForever) signature + if (typeof options === 'boolean') { + options = { forever: options }; } -} -function createHmacVerifier(bits) { - return function verify(thing, signature, secret) { - var computedSig = createHmacSigner(bits)(thing, secret); - return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); - } -} + this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); + this._timeouts = timeouts; + this._options = options || {}; + this._maxRetryTime = options && options.maxRetryTime || Infinity; + this._fn = null; + this._errors = []; + this._attempts = 1; + this._operationTimeout = null; + this._operationTimeoutCb = null; + this._timeout = null; + this._operationStart = null; + this._timer = null; -function createKeySigner(bits) { - return function sign(thing, privateKey) { - checkIsPrivateKey(privateKey); - thing = normalizeInput(thing); - // Even though we are specifying "RSA" here, this works with ECDSA - // keys as well. - var signer = crypto.createSign('RSA-SHA' + bits); - var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); - return fromBase64(sig); + if (this._options.forever) { + this._cachedTimeouts = this._timeouts.slice(0); } } +module.exports = RetryOperation; -function createKeyVerifier(bits) { - return function verify(thing, signature, publicKey) { - checkIsPublicKey(publicKey); - thing = normalizeInput(thing); - signature = toBase64(signature); - var verifier = crypto.createVerify('RSA-SHA' + bits); - verifier.update(thing); - return verifier.verify(publicKey, signature, 'base64'); - } +RetryOperation.prototype.reset = function() { + this._attempts = 1; + this._timeouts = this._originalTimeouts.slice(0); } -function createPSSKeySigner(bits) { - return function sign(thing, privateKey) { - checkIsPrivateKey(privateKey); - thing = normalizeInput(thing); - var signer = crypto.createSign('RSA-SHA' + bits); - var sig = (signer.update(thing), signer.sign({ - key: privateKey, - padding: crypto.constants.RSA_PKCS1_PSS_PADDING, - saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST - }, 'base64')); - return fromBase64(sig); +RetryOperation.prototype.stop = function() { + if (this._timeout) { + clearTimeout(this._timeout); } -} - -function createPSSKeyVerifier(bits) { - return function verify(thing, signature, publicKey) { - checkIsPublicKey(publicKey); - thing = normalizeInput(thing); - signature = toBase64(signature); - var verifier = crypto.createVerify('RSA-SHA' + bits); - verifier.update(thing); - return verifier.verify({ - key: publicKey, - padding: crypto.constants.RSA_PKCS1_PSS_PADDING, - saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST - }, signature, 'base64'); + if (this._timer) { + clearTimeout(this._timer); } -} - -function createECDSASigner(bits) { - var inner = createKeySigner(bits); - return function sign() { - var signature = inner.apply(null, arguments); - signature = formatEcdsa.derToJose(signature, 'ES' + bits); - return signature; - }; -} - -function createECDSAVerifer(bits) { - var inner = createKeyVerifier(bits); - return function verify(thing, signature, publicKey) { - signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); - var result = inner(thing, signature, publicKey); - return result; - }; -} -function createNoneSigner() { - return function sign() { - return ''; - } -} + this._timeouts = []; + this._cachedTimeouts = null; +}; -function createNoneVerifier() { - return function verify(thing, signature) { - return signature === ''; +RetryOperation.prototype.retry = function(err) { + if (this._timeout) { + clearTimeout(this._timeout); } -} -module.exports = function jwa(algorithm) { - var signerFactories = { - hs: createHmacSigner, - rs: createKeySigner, - ps: createPSSKeySigner, - es: createECDSASigner, - none: createNoneSigner, + if (!err) { + return false; } - var verifierFactories = { - hs: createHmacVerifier, - rs: createKeyVerifier, - ps: createPSSKeyVerifier, - es: createECDSAVerifer, - none: createNoneVerifier, + var currentTime = new Date().getTime(); + if (err && currentTime - this._operationStart >= this._maxRetryTime) { + this._errors.push(err); + this._errors.unshift(new Error('RetryOperation timeout occurred')); + return false; } - var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); - if (!match) - throw typeError(MSG_INVALID_ALGORITHM, algorithm); - var algo = (match[1] || match[3]).toLowerCase(); - var bits = match[2]; - return { - sign: signerFactories[algo](bits), - verify: verifierFactories[algo](bits), + this._errors.push(err); + + var timeout = this._timeouts.shift(); + if (timeout === undefined) { + if (this._cachedTimeouts) { + // retry forever, only keep last error + this._errors.splice(0, this._errors.length - 1); + timeout = this._cachedTimeouts.slice(-1); + } else { + return false; + } } -}; + var self = this; + this._timer = setTimeout(function() { + self._attempts++; -/***/ }), + if (self._operationTimeoutCb) { + self._timeout = setTimeout(function() { + self._operationTimeoutCb(self._attempts); + }, self._operationTimeout); -/***/ 4636: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (self._options.unref) { + self._timeout.unref(); + } + } -/*global exports*/ -var SignStream = __nccwpck_require__(3334); -var VerifyStream = __nccwpck_require__(5522); + self._fn(self._attempts); + }, timeout); -var ALGORITHMS = [ - 'HS256', 'HS384', 'HS512', - 'RS256', 'RS384', 'RS512', - 'PS256', 'PS384', 'PS512', - 'ES256', 'ES384', 'ES512' -]; + if (this._options.unref) { + this._timer.unref(); + } -exports.ALGORITHMS = ALGORITHMS; -exports.sign = SignStream.sign; -exports.verify = VerifyStream.verify; -exports.decode = VerifyStream.decode; -exports.isValid = VerifyStream.isValid; -exports.createSign = function createSign(opts) { - return new SignStream(opts); -}; -exports.createVerify = function createVerify(opts) { - return new VerifyStream(opts); + return true; }; +RetryOperation.prototype.attempt = function(fn, timeoutOps) { + this._fn = fn; -/***/ }), - -/***/ 1868: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/*global module, process*/ -var Buffer = (__nccwpck_require__(1867).Buffer); -var Stream = __nccwpck_require__(2781); -var util = __nccwpck_require__(3837); - -function DataStream(data) { - this.buffer = null; - this.writable = true; - this.readable = true; - - // No input - if (!data) { - this.buffer = Buffer.alloc(0); - return this; - } - - // Stream - if (typeof data.pipe === 'function') { - this.buffer = Buffer.alloc(0); - data.pipe(this); - return this; + if (timeoutOps) { + if (timeoutOps.timeout) { + this._operationTimeout = timeoutOps.timeout; + } + if (timeoutOps.cb) { + this._operationTimeoutCb = timeoutOps.cb; + } } - // Buffer or String - // or Object (assumedly a passworded key) - if (data.length || typeof data === 'object') { - this.buffer = data; - this.writable = false; - process.nextTick(function () { - this.emit('end', data); - this.readable = false; - this.emit('close'); - }.bind(this)); - return this; + var self = this; + if (this._operationTimeoutCb) { + this._timeout = setTimeout(function() { + self._operationTimeoutCb(); + }, self._operationTimeout); } - throw new TypeError('Unexpected data type ('+ typeof data + ')'); -} -util.inherits(DataStream, Stream); + this._operationStart = new Date().getTime(); -DataStream.prototype.write = function write(data) { - this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); - this.emit('data', data); + this._fn(this._attempts); }; -DataStream.prototype.end = function end(data) { - if (data) - this.write(data); - this.emit('end', data); - this.emit('close'); - this.writable = false; - this.readable = false; +RetryOperation.prototype.try = function(fn) { + console.log('Using RetryOperation.try() is deprecated'); + this.attempt(fn); }; -module.exports = DataStream; - - -/***/ }), - -/***/ 3334: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/*global module*/ -var Buffer = (__nccwpck_require__(1867).Buffer); -var DataStream = __nccwpck_require__(1868); -var jwa = __nccwpck_require__(6010); -var Stream = __nccwpck_require__(2781); -var toString = __nccwpck_require__(5292); -var util = __nccwpck_require__(3837); - -function base64url(string, encoding) { - return Buffer - .from(string, encoding) - .toString('base64') - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); -} - -function jwsSecuredInput(header, payload, encoding) { - encoding = encoding || 'utf8'; - var encodedHeader = base64url(toString(header), 'binary'); - var encodedPayload = base64url(toString(payload), encoding); - return util.format('%s.%s', encodedHeader, encodedPayload); -} - -function jwsSign(opts) { - var header = opts.header; - var payload = opts.payload; - var secretOrKey = opts.secret || opts.privateKey; - var encoding = opts.encoding; - var algo = jwa(header.alg); - var securedInput = jwsSecuredInput(header, payload, encoding); - var signature = algo.sign(securedInput, secretOrKey); - return util.format('%s.%s', securedInput, signature); -} - -function SignStream(opts) { - var secret = opts.secret||opts.privateKey||opts.key; - var secretStream = new DataStream(secret); - this.readable = true; - this.header = opts.header; - this.encoding = opts.encoding; - this.secret = this.privateKey = this.key = secretStream; - this.payload = new DataStream(opts.payload); - this.secret.once('close', function () { - if (!this.payload.writable && this.readable) - this.sign(); - }.bind(this)); - - this.payload.once('close', function () { - if (!this.secret.writable && this.readable) - this.sign(); - }.bind(this)); -} -util.inherits(SignStream, Stream); +RetryOperation.prototype.start = function(fn) { + console.log('Using RetryOperation.start() is deprecated'); + this.attempt(fn); +}; -SignStream.prototype.sign = function sign() { - try { - var signature = jwsSign({ - header: this.header, - payload: this.payload.buffer, - secret: this.secret.buffer, - encoding: this.encoding - }); - this.emit('done', signature); - this.emit('data', signature); - this.emit('end'); - this.readable = false; - return signature; - } catch (e) { - this.readable = false; - this.emit('error', e); - this.emit('close'); - } +RetryOperation.prototype.start = RetryOperation.prototype.try; + +RetryOperation.prototype.errors = function() { + return this._errors; }; -SignStream.sign = jwsSign; +RetryOperation.prototype.attempts = function() { + return this._attempts; +}; -module.exports = SignStream; +RetryOperation.prototype.mainError = function() { + if (this._errors.length === 0) { + return null; + } + var counts = {}; + var mainError = null; + var mainErrorCount = 0; -/***/ }), + for (var i = 0; i < this._errors.length; i++) { + var error = this._errors[i]; + var message = error.message; + var count = (counts[message] || 0) + 1; -/***/ 5292: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + counts[message] = count; -/*global module*/ -var Buffer = (__nccwpck_require__(4300).Buffer); + if (count >= mainErrorCount) { + mainError = error; + mainErrorCount = count; + } + } -module.exports = function toString(obj) { - if (typeof obj === 'string') - return obj; - if (typeof obj === 'number' || Buffer.isBuffer(obj)) - return obj.toString(); - return JSON.stringify(obj); + return mainError; }; /***/ }), -/***/ 5522: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1867: +/***/ ((module, exports, __nccwpck_require__) => { -/*global module*/ -var Buffer = (__nccwpck_require__(1867).Buffer); -var DataStream = __nccwpck_require__(1868); -var jwa = __nccwpck_require__(6010); -var Stream = __nccwpck_require__(2781); -var toString = __nccwpck_require__(5292); -var util = __nccwpck_require__(3837); -var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(4300) +var Buffer = buffer.Buffer -function isObject(thing) { - return Object.prototype.toString.call(thing) === '[object Object]'; +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } } - -function safeJsonParse(thing) { - if (isObject(thing)) - return thing; - try { return JSON.parse(thing); } - catch (e) { return undefined; } +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer } -function headerFromJWS(jwsSig) { - var encodedHeader = jwsSig.split('.', 1)[0]; - return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) } -function securedInputFromJWS(jwsSig) { - return jwsSig.split('.', 2).join('.'); -} +SafeBuffer.prototype = Object.create(Buffer.prototype) -function signatureFromJWS(jwsSig) { - return jwsSig.split('.')[2]; +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) } -function payloadFromJWS(jwsSig, encoding) { - encoding = encoding || 'utf8'; - var payload = jwsSig.split('.')[1]; - return Buffer.from(payload, 'base64').toString(encoding); +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf } -function isValidJws(string) { - return JWS_REGEX.test(string) && !!headerFromJWS(string); +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) } -function jwsVerify(jwsSig, algorithm, secretOrKey) { - if (!algorithm) { - var err = new Error("Missing algorithm parameter for jws.verify"); - err.code = "MISSING_ALGORITHM"; - throw err; +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') } - jwsSig = toString(jwsSig); - var signature = signatureFromJWS(jwsSig); - var securedInput = securedInputFromJWS(jwsSig); - var algo = jwa(algorithm); - return algo.verify(securedInput, signature, secretOrKey); + return buffer.SlowBuffer(size) } -function jwsDecode(jwsSig, opts) { - opts = opts || {}; - jwsSig = toString(jwsSig); - if (!isValidJws(jwsSig)) - return null; +/***/ }), - var header = headerFromJWS(jwsSig); +/***/ 9626: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (!header) - return null; +"use strict"; - var payload = payloadFromJWS(jwsSig); - if (header.typ === 'JWT' || opts.json) - payload = JSON.parse(payload, opts.encoding); - return { - header: header, - payload: payload, - signature: signatureFromJWS(jwsSig) - }; -} +var stubs = __nccwpck_require__(1099) -function VerifyStream(opts) { - opts = opts || {}; - var secretOrKey = opts.secret||opts.publicKey||opts.key; - var secretStream = new DataStream(secretOrKey); - this.readable = true; - this.algorithm = opts.algorithm; - this.encoding = opts.encoding; - this.secret = this.publicKey = this.key = secretStream; - this.signature = new DataStream(opts.signature); - this.secret.once('close', function () { - if (!this.signature.writable && this.readable) - this.verify(); - }.bind(this)); +/* + * StreamEvents can be used 2 ways: + * + * 1: + * function MyStream() { + * require('stream-events').call(this) + * } + * + * 2: + * require('stream-events')(myStream) + */ +function StreamEvents(stream) { + stream = stream || this - this.signature.once('close', function () { - if (!this.secret.writable && this.readable) - this.verify(); - }.bind(this)); -} -util.inherits(VerifyStream, Stream); -VerifyStream.prototype.verify = function verify() { - try { - var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); - var obj = jwsDecode(this.signature.buffer, this.encoding); - this.emit('done', valid, obj); - this.emit('data', valid); - this.emit('end'); - this.readable = false; - return valid; - } catch (e) { - this.readable = false; - this.emit('error', e); - this.emit('close'); + var cfg = { + callthrough: true, + calls: 1 } -}; -VerifyStream.decode = jwsDecode; -VerifyStream.isValid = isValidJws; -VerifyStream.verify = jwsVerify; + stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) + stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) -module.exports = VerifyStream; + return stream +} + +module.exports = StreamEvents /***/ }), -/***/ 7129: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 6121: +/***/ ((module) => { -"use strict"; +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } + + return state.buffer[0].length + } + return state.length +} -// A linked list to keep track of recently-used-ness -const Yallist = __nccwpck_require__(665) -const MAX = Symbol('max') -const LENGTH = Symbol('length') -const LENGTH_CALCULATOR = Symbol('lengthCalculator') -const ALLOW_STALE = Symbol('allowStale') -const MAX_AGE = Symbol('maxAge') -const DISPOSE = Symbol('dispose') -const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') -const LRU_LIST = Symbol('lruList') -const CACHE = Symbol('cache') -const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') +/***/ }), -const naiveLength = () => 1 +/***/ 4841: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// lruList is a yallist where the head is the youngest -// item, and the tail is the oldest. the list contains the Hit -// objects as the entries. -// Each Hit object has a reference to its Yallist.Node. This -// never changes. +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. // -// cache is a Map (or PseudoMap) that matches the keys to -// the Yallist.Node object. -class LRUCache { - constructor (options) { - if (typeof options === 'number') - options = { max: options } +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - if (!options) - options = {} - if (options.max && (typeof options.max !== 'number' || options.max < 0)) - throw new TypeError('max must be a non-negative number') - // Kind of weird to have a default max of Infinity, but oh well. - const max = this[MAX] = options.max || Infinity - const lc = options.length || naiveLength - this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc - this[ALLOW_STALE] = options.stale || false - if (options.maxAge && typeof options.maxAge !== 'number') - throw new TypeError('maxAge must be a number') - this[MAX_AGE] = options.maxAge || 0 - this[DISPOSE] = options.dispose - this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false - this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false - this.reset() - } +/**/ - // resize the cache when the max changes. - set max (mL) { - if (typeof mL !== 'number' || mL < 0) - throw new TypeError('max must be a non-negative number') +var Buffer = (__nccwpck_require__(1867).Buffer); +/**/ - this[MAX] = mL || Infinity - trim(this) +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; } - get max () { - return this[MAX] +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} - set allowStale (allowStale) { - this[ALLOW_STALE] = !!allowStale +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.s = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; } - get allowStale () { - return this[ALLOW_STALE] + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; - set maxAge (mA) { - if (typeof mA !== 'number') - throw new TypeError('maxAge must be a non-negative number') +StringDecoder.prototype.end = utf8End; - this[MAX_AGE] = mA - trim(this) - } - get maxAge () { - return this[MAX_AGE] +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; - // resize the cache when the lengthCalculator changes. - set lengthCalculator (lC) { - if (typeof lC !== 'function') - lC = naiveLength +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} - if (lC !== this[LENGTH_CALCULATOR]) { - this[LENGTH_CALCULATOR] = lC - this[LENGTH] = 0 - this[LRU_LIST].forEach(hit => { - hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) - this[LENGTH] += hit.length - }) +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; } - trim(this) + return nb; } - get lengthCalculator () { return this[LENGTH_CALCULATOR] } - - get length () { return this[LENGTH] } - get itemCount () { return this[LRU_LIST].length } + return 0; +} - rforEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].tail; walker !== null;) { - const prev = walker.prev - forEachStep(this, fn, walker, thisp) - walker = prev +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } } } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} - forEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].head; walker !== null;) { - const next = walker.next - forEachStep(this, fn, walker, thisp) - walker = next +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } } + return r; } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} - keys () { - return this[LRU_LIST].toArray().map(k => k.key) +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); } + return r; +} - values () { - return this[LRU_LIST].toArray().map(k => k.value) +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; } + return buf.toString('base64', i, buf.length - n); +} - reset () { - if (this[DISPOSE] && - this[LRU_LIST] && - this[LRU_LIST].length) { - this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) - } +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} - this[CACHE] = new Map() // hash of items by key - this[LRU_LIST] = new Yallist() // list of items in order of use recency - this[LENGTH] = 0 // length of items in the list - } +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} - dump () { - return this[LRU_LIST].map(hit => - isStale(this, hit) ? false : { - k: hit.key, - v: hit.value, - e: hit.now + (hit.maxAge || 0) - }).toArray().filter(h => h) - } +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} - dumpLru () { - return this[LRU_LIST] - } +/***/ }), - set (key, value, maxAge) { - maxAge = maxAge || this[MAX_AGE] +/***/ 4526: +/***/ ((module) => { - if (maxAge && typeof maxAge !== 'number') - throw new TypeError('maxAge must be a number') +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)(\.[0-9]+([eE]\-?[0-9]+)?|[0-9]+(\.[0-9]+([eE]\-?[0-9]+)?)?)$/; +// const octRegex = /0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; - const now = maxAge ? Date.now() : 0 - const len = this[LENGTH_CALCULATOR](value, key) - if (this[CACHE].has(key)) { - if (len > this[MAX]) { - del(this, this[CACHE].get(key)) - return false - } +//polyfill +if (!Number.parseInt && window.parseInt) { + Number.parseInt = window.parseInt; +} +if (!Number.parseFloat && window.parseFloat) { + Number.parseFloat = window.parseFloat; +} - const node = this[CACHE].get(key) - const item = node.value + +const consider = { + hex : true, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + // const options = Object.assign({}, consider); + // if(opt.leadingZeros === false){ + // options.leadingZeros = false; + // }else if(opt.hex === false){ + // options.hex = false; + // } + + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + // if(trimmedStr === "0.0") return 0; + // else if(trimmedStr === "+0.0") return 0; + // else if(trimmedStr === "-0.0") return -0; + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if (options.hex && hexRegex.test(trimmedStr)) { + return Number.parseInt(trimmedStr, 16); + // } else if (options.parseOct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + const eNotation = match[4] || match[6]; + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(eNotation){ //given number has enotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + // const decimalPart = match[5].substr(1); + // const intPart = trimmedStr.substr(0,trimmedStr.indexOf(".")); + + + // const p = numStr.indexOf("."); + // const givenIntPart = numStr.substr(0,p); + // const givenDecPart = numStr.substr(p+1); + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + // if(numTrimmedByZeros === numStr){ + // if(options.leadingZeros) return num; + // else return str; + // }else return str; + if(numTrimmedByZeros === numStr) return num; + else if(sign+numTrimmedByZeros === numStr) return num; + else return str; + } - // dispose of the old one before overwriting - // split out into 2 ifs for better coverage tracking - if (this[DISPOSE]) { - if (!this[NO_DISPOSE_ON_SET]) - this[DISPOSE](key, item.value) - } + if(trimmedStr === numStr) return num; + else if(trimmedStr === sign+numStr) return num; + // else{ + // //number with +/- sign + // trimmedStr.test(/[-+][0-9]); - item.now = now - item.maxAge = maxAge - item.value = value - this[LENGTH] += len - item.length - item.length = len - this.get(key) - trim(this) - return true + // } + return str; + } + // else if(!eNotation && trimmedStr && trimmedStr !== Number(trimmedStr) ) return str; + + }else{ //non-numeric string + return str; + } } +} - const hit = new Entry(key, value, len, now, maxAge) - - // oversized objects fall out of cache automatically. - if (hit.length > this[MAX]) { - if (this[DISPOSE]) - this[DISPOSE](key, value) - - return false +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; } + return numStr; +} +module.exports = toNumber - this[LENGTH] += hit.length - this[LRU_LIST].unshift(hit) - this[CACHE].set(key, this[LRU_LIST].head) - trim(this) - return true - } - - has (key) { - if (!this[CACHE].has(key)) return false - const hit = this[CACHE].get(key).value - return !isStale(this, hit) - } - - get (key) { - return get(this, key, true) - } - peek (key) { - return get(this, key, false) - } +/***/ }), - pop () { - const node = this[LRU_LIST].tail - if (!node) - return null +/***/ 1099: +/***/ ((module) => { - del(this, node) - return node.value - } +"use strict"; - del (key) { - del(this, this[CACHE].get(key)) - } - load (arr) { - // reset the cache - this.reset() +module.exports = function stubs(obj, method, cfg, stub) { + if (!obj || !method || !obj[method]) + throw new Error('You must provide an object and a key for an existing method') - const now = Date.now() - // A previous serialized cache has the most recent items first - for (let l = arr.length - 1; l >= 0; l--) { - const hit = arr[l] - const expiresAt = hit.e || 0 - if (expiresAt === 0) - // the item was created without expiration in a non aged cache - this.set(hit.k, hit.v) - else { - const maxAge = expiresAt - now - // dont add already expired items - if (maxAge > 0) { - this.set(hit.k, hit.v, maxAge) - } - } - } + if (!stub) { + stub = cfg + cfg = {} } - prune () { - this[CACHE].forEach((value, key) => get(this, key, false)) - } -} + stub = stub || function() {} -const get = (self, key, doUse) => { - const node = self[CACHE].get(key) - if (node) { - const hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - return undefined - } else { - if (doUse) { - if (self[UPDATE_AGE_ON_GET]) - node.value.now = Date.now() - self[LRU_LIST].unshiftNode(node) - } - } - return hit.value - } -} + cfg.callthrough = cfg.callthrough || false + cfg.calls = cfg.calls || 0 -const isStale = (self, hit) => { - if (!hit || (!hit.maxAge && !self[MAX_AGE])) - return false + var norevert = cfg.calls === 0 - const diff = Date.now() - hit.now - return hit.maxAge ? diff > hit.maxAge - : self[MAX_AGE] && (diff > self[MAX_AGE]) -} + var cached = obj[method].bind(obj) -const trim = self => { - if (self[LENGTH] > self[MAX]) { - for (let walker = self[LRU_LIST].tail; - self[LENGTH] > self[MAX] && walker !== null;) { - // We know that we're about to delete this one, and also - // what the next least recently used key will be, so just - // go ahead and set it now. - const prev = walker.prev - del(self, walker) - walker = prev - } - } -} + obj[method] = function() { + var args = [].slice.call(arguments) + var returnVal -const del = (self, node) => { - if (node) { - const hit = node.value - if (self[DISPOSE]) - self[DISPOSE](hit.key, hit.value) + if (cfg.callthrough) + returnVal = cached.apply(obj, args) - self[LENGTH] -= hit.length - self[CACHE].delete(hit.key) - self[LRU_LIST].removeNode(node) - } -} + returnVal = stub.apply(obj, args) || returnVal -class Entry { - constructor (key, value, length, now, maxAge) { - this.key = key - this.value = value - this.length = length - this.now = now - this.maxAge = maxAge || 0 - } -} + if (!norevert && --cfg.calls === 0) + obj[method] = cached -const forEachStep = (self, fn, node, thisp) => { - let hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - hit = undefined + return returnVal } - if (hit) - fn.call(thisp, hit.value, hit.key, self) } -module.exports = LRUCache - /***/ }), -/***/ 7426: +/***/ 9318: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/*! - * mime-db - * Copyright(c) 2014 Jonathan Ong - * Copyright(c) 2015-2022 Douglas Christopher Wilson - * MIT Licensed - */ - -/** - * Module exports. - */ - -module.exports = __nccwpck_require__(3765) - +"use strict"; -/***/ }), +const os = __nccwpck_require__(2037); +const tty = __nccwpck_require__(6224); +const hasFlag = __nccwpck_require__(1621); -/***/ 3583: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +const {env} = process; -"use strict"; -/*! - * mime-types - * Copyright(c) 2014 Jonathan Ong - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false') || + hasFlag('color=never')) { + forceColor = 0; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = 1; +} +if ('FORCE_COLOR' in env) { + if (env.FORCE_COLOR === 'true') { + forceColor = 1; + } else if (env.FORCE_COLOR === 'false') { + forceColor = 0; + } else { + forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); + } +} +function translateLevel(level) { + if (level === 0) { + return false; + } -/** - * Module dependencies. - * @private - */ + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} -var db = __nccwpck_require__(7426) -var extname = (__nccwpck_require__(1017).extname) +function supportsColor(haveStream, streamIsTTY) { + if (forceColor === 0) { + return 0; + } -/** - * Module variables. - * @private - */ + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } -var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ -var TEXT_TYPE_REGEXP = /^text\//i + if (hasFlag('color=256')) { + return 2; + } -/** - * Module exports. - * @public - */ + if (haveStream && !streamIsTTY && forceColor === undefined) { + return 0; + } -exports.charset = charset -exports.charsets = { lookup: charset } -exports.contentType = contentType -exports.extension = extension -exports.extensions = Object.create(null) -exports.lookup = lookup -exports.types = Object.create(null) + const min = forceColor || 0; -// Populate the extensions/types maps -populateMaps(exports.extensions, exports.types) + if (env.TERM === 'dumb') { + return min; + } -/** - * Get the default charset for a MIME type. - * - * @param {string} type - * @return {boolean|string} - */ + if (process.platform === 'win32') { + // Windows 10 build 10586 is the first Windows release that supports 256 colors. + // Windows 10 build 14931 is the first release that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } -function charset (type) { - if (!type || typeof type !== 'string') { - return false - } + return 1; + } - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) - var mime = match && db[match[1].toLowerCase()] + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } - if (mime && mime.charset) { - return mime.charset - } + return min; + } - // default text/* to utf-8 - if (match && TEXT_TYPE_REGEXP.test(match[1])) { - return 'UTF-8' - } + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } - return false -} + if (env.COLORTERM === 'truecolor') { + return 3; + } -/** - * Create a full Content-Type header given a MIME type or extension. - * - * @param {string} str - * @return {boolean|string} - */ + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); -function contentType (str) { - // TODO: should this even be in this module? - if (!str || typeof str !== 'string') { - return false - } + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } - var mime = str.indexOf('/') === -1 - ? exports.lookup(str) - : str + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } - if (!mime) { - return false - } + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } - // TODO: use content-type or other module - if (mime.indexOf('charset') === -1) { - var charset = exports.charset(mime) - if (charset) mime += '; charset=' + charset.toLowerCase() - } + if ('COLORTERM' in env) { + return 1; + } - return mime + return min; } -/** - * Get the default extension for a MIME type. - * - * @param {string} type - * @return {boolean|string} - */ +function getSupportLevel(stream) { + const level = supportsColor(stream, stream && stream.isTTY); + return translateLevel(level); +} -function extension (type) { - if (!type || typeof type !== 'string') { - return false - } +module.exports = { + supportsColor: getSupportLevel, + stdout: translateLevel(supportsColor(true, tty.isatty(1))), + stderr: translateLevel(supportsColor(true, tty.isatty(2))) +}; - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) - // get extensions - var exts = match && exports.extensions[match[1].toLowerCase()] +/***/ }), - if (!exts || !exts.length) { - return false - } +/***/ 4920: +/***/ ((__unused_webpack_module, exports) => { - return exts[0] -} +"use strict"; /** - * Lookup the MIME type for a file path/extension. + * @license + * Copyright 2020 Google LLC * - * @param {string} path - * @return {boolean|string} + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ - -function lookup (path) { - if (!path || typeof path !== 'string') { - return false - } - - // get the extension ("ext" or ".ext" or full path) - var extension = extname('x.' + path) - .toLowerCase() - .substr(1) - - if (!extension) { - return false - } - - return exports.types[extension] || false +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; +/** + * @class TeenyStatisticsWarning + * @extends Error + * @description While an error, is used for emitting warnings when + * meeting certain configured thresholds. + * @see process.emitWarning + */ +class TeenyStatisticsWarning extends Error { + /** + * @param {string} message + */ + constructor(message) { + super(message); + this.threshold = 0; + this.type = ''; + this.value = 0; + this.name = this.constructor.name; + Error.captureStackTrace(this, this.constructor); + } } - +exports.TeenyStatisticsWarning = TeenyStatisticsWarning; +TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; /** - * Populate the extensions and types maps. - * @private + * @class TeenyStatistics + * @description Maintain various statistics internal to teeny-request. Tracking + * is not automatic and must be instrumented within teeny-request. */ - -function populateMaps (extensions, types) { - // source preference (least -> most) - var preference = ['nginx', 'apache', undefined, 'iana'] - - Object.keys(db).forEach(function forEachMimeType (type) { - var mime = db[type] - var exts = mime.extensions - - if (!exts || !exts.length) { - return +class TeenyStatistics { + /** + * @param {TeenyStatisticsOptions} [opts] + */ + constructor(opts) { + /** + * @type {number} + * @private + * @default 0 + */ + this._concurrentRequests = 0; + /** + * @type {boolean} + * @private + * @default false + */ + this._didConcurrentRequestWarn = false; + this._options = TeenyStatistics._prepareOptions(opts); } - - // mime -> extensions - extensions[type] = exts - - // extension -> mime - for (var i = 0; i < exts.length; i++) { - var extension = exts[i] - - if (types[extension]) { - var from = preference.indexOf(db[types[extension]].source) - var to = preference.indexOf(mime.source) - - if (types[extension] !== 'application/octet-stream' && - (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { - // skip the remapping - continue + /** + * Returns a copy of the current options. + * @return {TeenyStatisticsOptions} + */ + getOptions() { + return Object.assign({}, this._options); + } + /** + * Change configured statistics options. This will not preserve unspecified + * options that were previously specified, i.e. this is a reset of options. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsConfig} The previous options. + * @see _prepareOptions + */ + setOptions(opts) { + const oldOpts = this._options; + this._options = TeenyStatistics._prepareOptions(opts); + return oldOpts; + } + /** + * @readonly + * @return {TeenyStatisticsCounters} + */ + get counters() { + return { + concurrentRequests: this._concurrentRequests, + }; + } + /** + * @description Should call this right before making a request. + */ + requestStarting() { + this._concurrentRequests++; + if (this._options.concurrentRequests > 0 && + this._concurrentRequests >= this._options.concurrentRequests && + !this._didConcurrentRequestWarn) { + this._didConcurrentRequestWarn = true; + const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + + this._concurrentRequests + + ' requests in-flight, which exceeds the configured threshold of ' + + this._options.concurrentRequests + + '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + + 'variable or the concurrentRequests option of teeny-request to ' + + 'increase or disable (0) this warning.'); + warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; + warning.value = this._concurrentRequests; + warning.threshold = this._options.concurrentRequests; + process.emitWarning(warning); + } + } + /** + * @description When using `requestStarting`, call this after the request + * has finished. + */ + requestFinished() { + // TODO negative? + this._concurrentRequests--; + } + /** + * Configuration Precedence: + * 1. Dependency inversion via defined option. + * 2. Global numeric environment variable. + * 3. Built-in default. + * This will not preserve unspecified options previously specified. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsOptions} + * @private + */ + static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { + let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; + const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); + if (diConcurrentRequests !== undefined) { + concurrentRequests = diConcurrentRequests; } - } - - // set the extension -> mime - types[extension] = type + else if (!Number.isNaN(envConcurrentRequests)) { + concurrentRequests = envConcurrentRequests; + } + return { concurrentRequests }; } - }) } - +exports.TeenyStatistics = TeenyStatistics; +/** + * @description A default threshold representing when to warn about excessive + * in-flight/concurrent requests. + * @type {number} + * @static + * @readonly + * @default 5000 + */ +TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; +//# sourceMappingURL=TeenyStatistics.js.map /***/ }), -/***/ 6038: -/***/ ((module) => { +/***/ 446: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -/** - * @param typeMap [Object] Map of MIME type -> Array[extensions] - * @param ... - */ -function Mime() { - this._types = Object.create(null); - this._extensions = Object.create(null); - - for (let i = 0; i < arguments.length; i++) { - this.define(arguments[i]); - } - - this.define = this.define.bind(this); - this.getType = this.getType.bind(this); - this.getExtension = this.getExtension.bind(this); -} - /** - * Define mimetype -> extension mappings. Each key is a mime-type that maps - * to an array of extensions associated with the type. The first extension is - * used as the default extension for the type. - * - * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * @license + * Copyright 2019 Google LLC * - * If a type declares an extension that has already been defined, an error will - * be thrown. To suppress this error and force the extension to be associated - * with the new type, pass `force`=true. Alternatively, you may prefix the - * extension with "*" to map the type to extension, without mapping the - * extension to the type. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at * - * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * http://www.apache.org/licenses/LICENSE-2.0 * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAgent = exports.pool = void 0; +const http_1 = __nccwpck_require__(3685); +const https_1 = __nccwpck_require__(5687); +// eslint-disable-next-line node/no-deprecated-api +const url_1 = __nccwpck_require__(7310); +exports.pool = new Map(); +/** + * Determines if a proxy should be considered based on the environment. * - * @param map (Object) type definitions - * @param force (Boolean) if true, force overriding of existing definitions + * @param uri The request uri + * @returns {boolean} */ -Mime.prototype.define = function(typeMap, force) { - for (let type in typeMap) { - let extensions = typeMap[type].map(function(t) { - return t.toLowerCase(); - }); - type = type.toLowerCase(); - - for (let i = 0; i < extensions.length; i++) { - const ext = extensions[i]; - - // '*' prefix = not the preferred type for this extension. So fixup the - // extension, and skip it. - if (ext[0] === '*') { - continue; - } - - if (!force && (ext in this._types)) { - throw new Error( - 'Attempt to change mapping for "' + ext + - '" extension from "' + this._types[ext] + '" to "' + type + - '". Pass `force=true` to allow this, otherwise remove "' + ext + - '" from the list of extensions for "' + type + '".' - ); - } - - this._types[ext] = type; +function shouldUseProxyForURI(uri) { + const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; + if (!noProxyEnv) { + return true; } - - // Use first extension as default - if (force || !this._extensions[type]) { - const ext = extensions[0]; - this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); + const givenURI = new URL(uri); + for (const noProxyRaw of noProxyEnv.split(',')) { + const noProxy = noProxyRaw.trim(); + if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { + return false; + } + else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { + const noProxyWildcard = noProxy.replace(/^\*\./, '.'); + if (givenURI.hostname.endsWith(noProxyWildcard)) { + return false; + } + } } - } -}; - -/** - * Lookup a mime type based on extension - */ -Mime.prototype.getType = function(path) { - path = String(path); - let last = path.replace(/^.*[/\\]/, '').toLowerCase(); - let ext = last.replace(/^.*\./, '').toLowerCase(); - - let hasPath = last.length < path.length; - let hasDot = ext.length < last.length - 1; - - return (hasDot || !hasPath) && this._types[ext] || null; -}; - + return true; +} /** - * Return file extension associated with a mime type + * Returns a custom request Agent if one is found, otherwise returns undefined + * which will result in the global http(s) Agent being used. + * @private + * @param {string} uri The request uri + * @param {Options} reqOpts The request options + * @returns {HttpAnyAgent|undefined} */ -Mime.prototype.getExtension = function(type) { - type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; - return type && this._extensions[type.toLowerCase()] || null; -}; - -module.exports = Mime; - +function getAgent(uri, reqOpts) { + const isHttp = uri.startsWith('http://'); + const proxy = reqOpts.proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || + process.env.HTTPS_PROXY || + process.env.https_proxy; + const poolOptions = Object.assign({}, reqOpts.pool); + const manuallyProvidedProxy = !!reqOpts.proxy; + const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); + if (proxy && shouldUseProxy) { + // tslint:disable-next-line variable-name + const Agent = isHttp + ? __nccwpck_require__(3764) + : __nccwpck_require__(7219); + const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; + return new Agent(proxyOpts); + } + let key = isHttp ? 'http' : 'https'; + if (reqOpts.forever) { + key += ':forever'; + if (!exports.pool.has(key)) { + // tslint:disable-next-line variable-name + const Agent = isHttp ? http_1.Agent : https_1.Agent; + exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); + } + } + return exports.pool.get(key); +} +exports.getAgent = getAgent; +//# sourceMappingURL=agents.js.map /***/ }), -/***/ 5377: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 6886: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -let Mime = __nccwpck_require__(6038); -module.exports = new Mime(__nccwpck_require__(3114), __nccwpck_require__(8809)); - - -/***/ }), - -/***/ 8809: -/***/ ((module) => { - -module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; - -/***/ }), - -/***/ 3114: -/***/ ((module) => { - -module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; - -/***/ }), - -/***/ 900: -/***/ ((module) => { - -/** - * Helpers. - */ - -var s = 1000; -var m = s * 60; -var h = m * 60; -var d = h * 24; -var w = d * 7; -var y = d * 365.25; - /** - * Parse or format the given `val`. + * @license + * Copyright 2018 Google LLC * - * Options: + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at * - * - `long` verbose formatting [false] + * http://www.apache.org/licenses/LICENSE-2.0 * - * @param {String|Number} val - * @param {Object} [options] - * @throws {Error} throw an error if val is not a non-empty string or a number - * @return {String|Number} - * @api public + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ - -module.exports = function(val, options) { - options = options || {}; - var type = typeof val; - if (type === 'string' && val.length > 0) { - return parse(val); - } else if (type === 'number' && isFinite(val)) { - return options.long ? fmtLong(val) : fmtShort(val); - } - throw new Error( - 'val is not a non-empty string or a valid number. val=' + - JSON.stringify(val) - ); -}; - +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.teenyRequest = exports.RequestError = void 0; +const node_fetch_1 = __nccwpck_require__(467); +const stream_1 = __nccwpck_require__(2781); +const uuid = __nccwpck_require__(7835); +const agents_1 = __nccwpck_require__(446); +const TeenyStatistics_1 = __nccwpck_require__(4920); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const streamEvents = __nccwpck_require__(9626); +class RequestError extends Error { +} +exports.RequestError = RequestError; /** - * Parse the given `str` and return milliseconds. - * - * @param {String} str - * @return {Number} - * @api private + * Convert options from Request to Fetch format + * @private + * @param reqOpts Request options */ - -function parse(str) { - str = String(str); - if (str.length > 100) { - return; - } - var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( - str - ); - if (!match) { - return; - } - var n = parseFloat(match[1]); - var type = (match[2] || 'ms').toLowerCase(); - switch (type) { - case 'years': - case 'year': - case 'yrs': - case 'yr': - case 'y': - return n * y; - case 'weeks': - case 'week': - case 'w': - return n * w; - case 'days': - case 'day': - case 'd': - return n * d; - case 'hours': - case 'hour': - case 'hrs': - case 'hr': - case 'h': - return n * h; - case 'minutes': - case 'minute': - case 'mins': - case 'min': - case 'm': - return n * m; - case 'seconds': - case 'second': - case 'secs': - case 'sec': - case 's': - return n * s; - case 'milliseconds': - case 'millisecond': - case 'msecs': - case 'msec': - case 'ms': - return n; - default: - return undefined; - } +function requestToFetchOptions(reqOpts) { + const options = { + method: reqOpts.method || 'GET', + ...(reqOpts.timeout && { timeout: reqOpts.timeout }), + ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), + }; + if (typeof reqOpts.json === 'object') { + // Add Content-type: application/json header + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['Content-Type'] = 'application/json'; + // Set body to JSON representation of value + options.body = JSON.stringify(reqOpts.json); + } + else { + if (Buffer.isBuffer(reqOpts.body)) { + options.body = reqOpts.body; + } + else if (typeof reqOpts.body !== 'string') { + options.body = JSON.stringify(reqOpts.body); + } + else { + options.body = reqOpts.body; + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options.headers = reqOpts.headers; + let uri = (reqOpts.uri || + reqOpts.url); + if (!uri) { + throw new Error('Missing uri or url in reqOpts.'); + } + if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const qs = __nccwpck_require__(3477); + const params = qs.stringify(reqOpts.qs); + uri = uri + '?' + params; + } + options.agent = (0, agents_1.getAgent)(uri, reqOpts); + return { uri, options }; +} +/** + * Convert a response from `fetch` to `request` format. + * @private + * @param opts The `request` options used to create the request. + * @param res The Fetch response + * @returns A `request` response object + */ +function fetchToRequestResponse(opts, res) { + const request = {}; + request.agent = opts.agent || false; + request.headers = (opts.headers || {}); + request.href = res.url; + // headers need to be converted from a map to an obj + const resHeaders = {}; + res.headers.forEach((value, key) => (resHeaders[key] = value)); + const response = Object.assign(res.body, { + statusCode: res.status, + statusMessage: res.statusText, + request, + body: res.body, + headers: resHeaders, + toJSON: () => ({ headers: resHeaders }), + }); + return response; +} +/** + * Create POST body from two parts as multipart/related content-type + * @private + * @param boundary + * @param multipart + */ +function createMultipartStream(boundary, multipart) { + const finale = `--${boundary}--`; + const stream = new stream_1.PassThrough(); + for (const part of multipart) { + const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; + stream.write(preamble); + if (typeof part.body === 'string') { + stream.write(part.body); + stream.write('\r\n'); + } + else { + part.body.pipe(stream, { end: false }); + part.body.on('end', () => { + stream.write('\r\n'); + stream.write(finale); + stream.end(); + }); + } + } + return stream; +} +function teenyRequest(reqOpts, callback) { + const { uri, options } = requestToFetchOptions(reqOpts); + const multipart = reqOpts.multipart; + if (reqOpts.multipart && multipart.length === 2) { + if (!callback) { + // TODO: add support for multipart uploads through streaming + throw new Error('Multipart without callback is not implemented.'); + } + const boundary = uuid.v4(); + options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + options.body = createMultipartStream(boundary, multipart); + // Multipart upload + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, (err) => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; + } + if (callback === undefined) { + // Stream mode + const requestStream = streamEvents(new stream_1.PassThrough()); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let responseStream; + requestStream.once('reading', () => { + if (responseStream) { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + } + else { + requestStream.once('response', () => { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + }); + } + }); + options.compress = false; + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + responseStream = res.body; + responseStream.on('error', (err) => { + requestStream.emit('error', err); + }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { + teenyRequest.stats.requestFinished(); + requestStream.emit('error', err); + }); + // fetch doesn't supply the raw HTTP stream, instead it + // returns a PassThrough piped from the HTTP response + // stream. + return requestStream; + } + // GET or POST with callback + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + if (response.statusCode === 204) { + // Probably a DELETE + callback(null, response, body); + return; + } + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, err => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + const response = fetchToRequestResponse(options, res); + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; } - +exports.teenyRequest = teenyRequest; +teenyRequest.defaults = (defaults) => { + return (reqOpts, callback) => { + const opts = { ...defaults, ...reqOpts }; + if (callback === undefined) { + return teenyRequest(opts); + } + teenyRequest(opts, callback); + }; +}; /** - * Short format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private + * Single instance of an interface for keeping track of things. */ +teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); +teenyRequest.resetStats = () => { + teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); +}; +//# sourceMappingURL=index.js.map -function fmtShort(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return Math.round(ms / d) + 'd'; +/***/ }), + +/***/ 7835: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; } - if (msAbs >= h) { - return Math.round(ms / h) + 'h'; +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; } - if (msAbs >= m) { - return Math.round(ms / m) + 'm'; +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; } - if (msAbs >= s) { - return Math.round(ms / s) + 's'; +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; } - return ms + 'ms'; -} - -/** - * Long format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private - */ - -function fmtLong(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return plural(ms, msAbs, d, 'day'); +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; } - if (msAbs >= h) { - return plural(ms, msAbs, h, 'hour'); +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; } - if (msAbs >= m) { - return plural(ms, msAbs, m, 'minute'); +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; } - if (msAbs >= s) { - return plural(ms, msAbs, s, 'second'); +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; } - return ms + ' ms'; -} - -/** - * Pluralization helper. - */ - -function plural(ms, msAbs, n, name) { - var isPlural = msAbs >= n * 1.5; - return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); -} +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +var _v = _interopRequireDefault(__nccwpck_require__(618)); -/***/ }), +var _v2 = _interopRequireDefault(__nccwpck_require__(6888)); -/***/ 467: -/***/ ((module, exports, __nccwpck_require__) => { +var _v3 = _interopRequireDefault(__nccwpck_require__(1186)); -"use strict"; +var _v4 = _interopRequireDefault(__nccwpck_require__(2385)); +var _nil = _interopRequireDefault(__nccwpck_require__(5879)); -Object.defineProperty(exports, "__esModule", ({ value: true })); +var _version = _interopRequireDefault(__nccwpck_require__(694)); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +var _validate = _interopRequireDefault(__nccwpck_require__(4875)); -var Stream = _interopDefault(__nccwpck_require__(2781)); -var http = _interopDefault(__nccwpck_require__(3685)); -var Url = _interopDefault(__nccwpck_require__(7310)); -var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); -var https = _interopDefault(__nccwpck_require__(5687)); -var zlib = _interopDefault(__nccwpck_require__(9796)); +var _stringify = _interopRequireDefault(__nccwpck_require__(5802)); -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js +var _parse = _interopRequireDefault(__nccwpck_require__(893)); -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); +/***/ }), -class Blob { - constructor() { - this[TYPE] = ''; +/***/ 9576: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const blobParts = arguments[0]; - const options = arguments[1]; +"use strict"; - const buffers = []; - let size = 0; - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - this[BUFFER] = Buffer.concat(buffers); +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } + return _crypto.default.createHash('md5').update(bytes).digest(); } -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } +var _default = md5; +exports["default"] = _default; - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} +/***/ }), -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; +/***/ 4974: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -let convert; -try { - convert = (__nccwpck_require__(2877).convert); -} catch (e) {} +"use strict"; -const INTERNALS = Symbol('Body internals'); -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; +/***/ }), - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} +/***/ 5879: +/***/ ((__unused_webpack_module, exports) => { -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, +"use strict"; - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, +/***/ }), - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; +/***/ 893: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, +"use strict"; - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; +var _validate = _interopRequireDefault(__nccwpck_require__(4875)); - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ - this[INTERNALS].disturbed = true; + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) - let body = this.body; + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } +var _default = parse; +exports["default"] = _default; - // body is blob - if (isBlob(body)) { - body = body.stream(); - } +/***/ }), - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } +/***/ 7658: +/***/ ((__unused_webpack_module, exports) => { - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } +"use strict"; - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - return new Body.Promise(function (resolve, reject) { - let resTimeout; +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } +/***/ }), - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); +/***/ 2042: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } +"use strict"; - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - accumBytes += chunk.length; - accum.push(chunk); - }); +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; - body.on('end', function () { - if (abort) { - return; - } +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - clearTimeout(resTimeout); +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } +let poolPtr = rnds8Pool.length; - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } + poolPtr = 0; + } - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} - // html5 - if (!res && str) { - res = / { - if (res) { - res = /charset=(.*)/i.exec(res.pop()); - } - } +"use strict"; - // xml - if (!res && str) { - res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str); - } - // found charset - if (res) { - charset = res.pop(); +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - // prevent decode issues when sites use incorrect encoding - // ref: https://hsivonen.fi/encoding-menu/ - if (charset === 'gb2312' || charset === 'gbk') { - charset = 'gb18030'; - } - } +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - // turn raw buffers into a single utf-8 buffer - return convert(buffer, 'UTF-8', charset).toString(); -} +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -/** - * Detect a URLSearchParams object - * ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143 - * - * @param Object obj Object to detect by type or brand - * @return String - */ -function isURLSearchParams(obj) { - // Duck-typing as a necessary condition. - if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { - return false; - } +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } - // Brand-checking and more duck-typing as optional condition. - return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; + return _crypto.default.createHash('sha1').update(bytes).digest(); } -/** - * Check if `obj` is a W3C `Blob` object (which `File` inherits from) - * @param {*} obj - * @return {boolean} - */ -function isBlob(obj) { - return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); -} +var _default = sha1; +exports["default"] = _default; -/** - * Clone body given Res/Req instance - * - * @param Mixed instance Response or Request instance - * @return Mixed - */ -function clone(instance) { - let p1, p2; - let body = instance.body; +/***/ }), - // don't allow cloning a used body - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used'); - } +/***/ 5802: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // check that body is a stream and not form-data object - // note: we can't clone the form-data object without having it as a dependency - if (body instanceof Stream && typeof body.getBoundary !== 'function') { - // tee instance body - p1 = new PassThrough(); - p2 = new PassThrough(); - body.pipe(p1); - body.pipe(p2); - // set instance body to teed body and return the other teed body - instance[INTERNALS].body = p1; - body = p2; - } +"use strict"; - return body; -} -/** - * Performs the operation "extract a `Content-Type` value from |object|" as - * specified in the specification: - * https://fetch.spec.whatwg.org/#concept-bodyinit-extract - * - * This function assumes that instance.body is present. - * - * @param Mixed instance Any options.body input - */ -function extractContentType(body) { - if (body === null) { - // body is null - return null; - } else if (typeof body === 'string') { - // body is string - return 'text/plain;charset=UTF-8'; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - return 'application/x-www-form-urlencoded;charset=UTF-8'; - } else if (isBlob(body)) { - // body is blob - return body.type || null; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return null; - } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - return null; - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - return null; - } else if (typeof body.getBoundary === 'function') { - // detect form data input from form-data module - return `multipart/form-data;boundary=${body.getBoundary()}`; - } else if (body instanceof Stream) { - // body is stream - // can't really do much about this - return null; - } else { - // Body constructor defaults other things to string - return 'text/plain;charset=UTF-8'; - } -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(__nccwpck_require__(4875)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /** - * The Fetch Standard treats this as if "total bytes" is a property on the body. - * For us, we have to explicitly get it with a function. - * - * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes - * - * @param Body instance Instance of Body - * @return Number? Number of bytes, or null if not possible + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX */ -function getTotalBytes(instance) { - const body = instance.body; +const byteToHex = []; +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} - if (body === null) { - // body is null - return 0; - } else if (isBlob(body)) { - return body.size; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return body.length; - } else if (body && typeof body.getLengthSync === 'function') { - // detect form data input from form-data module - if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) { - // 2.x - return body.getLengthSync(); - } - return null; - } else { - // body is stream - return null; - } +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; } -/** - * Write a Body to a Node.js WritableStream (e.g. http.Request) object. - * - * @param Body instance Instance of Body - * @return Void - */ -function writeToStream(dest, instance) { - const body = instance.body; +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } - if (body === null) { - // body is null - dest.end(); - } else if (isBlob(body)) { - body.stream().pipe(dest); - } else if (Buffer.isBuffer(body)) { - // body is buffer - dest.write(body); - dest.end(); - } else { - // body is stream - body.pipe(dest); - } + return uuid; } -// expose Promise -Body.Promise = global.Promise; - -/** - * headers.js - * - * Headers class offers convenient helpers - */ +var _default = stringify; +exports["default"] = _default; -const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; +/***/ }), -function validateName(name) { - name = `${name}`; - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`); - } -} +/***/ 618: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function validateValue(value) { - value = `${value}`; - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`); - } -} +"use strict"; -/** - * Find the key in the map object given a header name. - * - * Returns undefined if not found. - * - * @param String name Header name - * @return String|Undefined - */ -function find(map, name) { - name = name.toLowerCase(); - for (const key in map) { - if (key.toLowerCase() === name) { - return key; - } - } - return undefined; -} -const MAP = Symbol('map'); -class Headers { - /** - * Headers class - * - * @param Object headers Response headers - * @return Void - */ - constructor() { - let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - this[MAP] = Object.create(null); +var _rng = _interopRequireDefault(__nccwpck_require__(2042)); - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); +var _stringify = __nccwpck_require__(5802); - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - return; - } +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } +let _clockseq; // Previous uuid creation time - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 - return this[MAP][key].join(', '); - } + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); + msecs += 12219292800000; // `time_low` -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` -const INTERNAL = Symbol('internal'); + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; + b[i++] = clockseq & 0xff; // `node` - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } - this[INTERNAL].index = index + 1; + return buf || (0, _stringify.unsafeStringify)(b); +} - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); +var _default = v1; +exports["default"] = _default; -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); +/***/ }), -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); +/***/ 6888: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } +"use strict"; - return obj; -} -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -const INTERNALS$1 = Symbol('Response internals'); +var _v = _interopRequireDefault(__nccwpck_require__(8392)); -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; +var _md = _interopRequireDefault(__nccwpck_require__(9576)); -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - Body.call(this, body, opts); +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; - const status = opts.status || 200; - const headers = new Headers(opts.headers); +/***/ }), - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } +/***/ 8392: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } +"use strict"; - get url() { - return this[INTERNALS$1].url || ''; - } - get status() { - return this[INTERNALS$1].status; - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } +var _stringify = __nccwpck_require__(5802); - get redirected() { - return this[INTERNALS$1].counter > 0; - } +var _parse = _interopRequireDefault(__nccwpck_require__(893)); - get statusText() { - return this[INTERNALS$1].statusText; - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - get headers() { - return this[INTERNALS$1].headers; - } +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; } -Body.mixIn(Response.prototype); +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); + if (typeof value === 'string') { + value = stringToBytes(value); + } -const INTERNALS$2 = Symbol('Request internals'); -const URL = Url.URL || whatwgUrl.URL; + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` -/** - * Wrapper around `new URL` to handle arbitrary URLs - * - * @param {string} urlStr - * @return {void} - */ -function parseURL(urlStr) { - /* - Check whether the URL is absolute or not - Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 - Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 - */ - if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { - urlStr = new URL(urlStr).toString(); - } - // Fallback to old implementation for arbitrary URLs - return parse_url(urlStr); -} + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + if (buf) { + offset = offset || 0; -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} + return buf; + } -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) - let parsedURL; - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parseURL(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parseURL(`${input}`); - } - input = {}; - } else { - parsedURL = parseURL(input.url); - } + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; +/***/ }), - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); +/***/ 1186: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const headers = new Headers(init.headers || input.headers || {}); +"use strict"; - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } +var _native = _interopRequireDefault(__nccwpck_require__(4974)); - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; +var _rng = _interopRequireDefault(__nccwpck_require__(2042)); - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } +var _stringify = __nccwpck_require__(5802); - get method() { - return this[INTERNALS$2].method; - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } - get headers() { - return this[INTERNALS$2].headers; - } + options = options || {}; - get redirect() { - return this[INTERNALS$2].redirect; - } + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - get signal() { - return this[INTERNALS$2].signal; - } - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided -Body.mixIn(Request.prototype); + if (buf) { + offset = offset || 0; -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); + return buf; + } -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); + return (0, _stringify.unsafeStringify)(rnds); +} - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } +var _default = v4; +exports["default"] = _default; - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } +/***/ }), - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } +/***/ 2385: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } +"use strict"; - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } +var _v = _interopRequireDefault(__nccwpck_require__(8392)); - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } +var _sha = _interopRequireDefault(__nccwpck_require__(6723)); - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} +/***/ }), -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ +/***/ 4875: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); +"use strict"; - this.type = 'aborted'; - this.message = message; - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; +var _regex = _interopRequireDefault(__nccwpck_require__(7658)); -const URL$1 = Url.URL || whatwgUrl.URL; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; +var _default = validate; +exports["default"] = _default; - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; +/***/ }), -/** - * isSameProtocol reports whether the two provided URLs use the same protocol. - * - * Both domains must already be in canonical form. - * @param {string|URL} original - * @param {string|URL} destination - */ -const isSameProtocol = function isSameProtocol(destination, original) { - const orig = new URL$1(original).protocol; - const dest = new URL$1(destination).protocol; +/***/ 694: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return orig === dest; -}; +"use strict"; -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - Body.Promise = fetch.Promise; +var _validate = _interopRequireDefault(__nccwpck_require__(4875)); - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } - let response = null; + return parseInt(uuid.slice(14, 15), 16); +} - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - destroyStream(request.body, error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; +var _default = version; +exports["default"] = _default; - if (signal && signal.aborted) { - abort(); - return; - } +/***/ }), - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; +/***/ 4256: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // send request - const req = send(options); - let reqTimeout; +"use strict"; - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } +var punycode = __nccwpck_require__(5477); +var mappingTable = __nccwpck_require__(2020); - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } +var PROCESSING_OPTIONS = { + TRANSITIONAL: 0, + NONTRANSITIONAL: 1 +}; - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); +function normalize(str) { // fix bug in v8 + return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); +} - if (response && response.body) { - destroyStream(response.body, err); - } +function findStatus(val) { + var start = 0; + var end = mappingTable.length - 1; - finalize(); - }); + while (start <= end) { + var mid = Math.floor((start + end) / 2); - fixResponseChunkedTransferBadEnding(req, function (err) { - if (signal && signal.aborted) { - return; - } + var target = mappingTable[mid]; + if (target[0][0] <= val && target[0][1] >= val) { + return target; + } else if (target[0][0] > val) { + end = mid - 1; + } else { + start = mid + 1; + } + } - if (response && response.body) { - destroyStream(response.body, err); - } - }); + return null; +} - /* c8 ignore next 18 */ - if (parseInt(process.version.substring(1)) < 14) { - // Before Node.js 14, pipeline() does not fully support async iterators and does not always - // properly handle when the socket close/end events are out of order. - req.on('socket', function (s) { - s.addListener('close', function (hadError) { - // if a data listener is still present we didn't end cleanly - const hasDataListener = s.listenerCount('data') > 0; +var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; - // if end happened before close but the socket didn't emit an error, do it now - if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - response.body.emit('error', err); - } - }); - }); - } +function countSymbols(string) { + return string + // replace every surrogate pair with a BMP symbol + .replace(regexAstralSymbols, '_') + // then get the length + .length; +} - req.on('response', function (res) { - clearTimeout(reqTimeout); +function mapChars(domain_name, useSTD3, processing_option) { + var hasError = false; + var processed = ""; - const headers = createHeadersLenient(res.headers); + var len = countSymbols(domain_name); + for (var i = 0; i < len; ++i) { + var codePoint = domain_name.codePointAt(i); + var status = findStatus(codePoint); - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); + switch (status[1]) { + case "disallowed": + hasError = true; + processed += String.fromCodePoint(codePoint); + break; + case "ignored": + break; + case "mapped": + processed += String.fromCodePoint.apply(String, status[2]); + break; + case "deviation": + if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { + processed += String.fromCodePoint.apply(String, status[2]); + } else { + processed += String.fromCodePoint(codePoint); + } + break; + case "valid": + processed += String.fromCodePoint(codePoint); + break; + case "disallowed_STD3_mapped": + if (useSTD3) { + hasError = true; + processed += String.fromCodePoint(codePoint); + } else { + processed += String.fromCodePoint.apply(String, status[2]); + } + break; + case "disallowed_STD3_valid": + if (useSTD3) { + hasError = true; + } - // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } + processed += String.fromCodePoint(codePoint); + break; + } + } - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } + return { + string: processed, + error: hasError + }; +} - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } +var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; +function validateLabel(label, processing_option) { + if (label.substr(0, 4) === "xn--") { + label = punycode.toUnicode(label); + processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; + } - if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } + var error = false; - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } + if (normalize(label) !== label || + (label[3] === "-" && label[4] === "-") || + label[0] === "-" || label[label.length - 1] === "-" || + label.indexOf(".") !== -1 || + label.search(combiningMarksRegex) === 0) { + error = true; + } - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } + var len = countSymbols(label); + for (var i = 0; i < len; ++i) { + var status = findStatus(label.codePointAt(i)); + if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || + (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && + status[1] !== "valid" && status[1] !== "deviation")) { + error = true; + break; + } + } - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } + return { + label: label, + error: error + }; +} - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); +function processing(domain_name, useSTD3, processing_option) { + var result = mapChars(domain_name, useSTD3, processing_option); + result.string = normalize(result.string); - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; + var labels = result.string.split("."); + for (var i = 0; i < labels.length; ++i) { + try { + var validation = validateLabel(labels[i]); + labels[i] = validation.label; + result.error = result.error || validation.error; + } catch(e) { + result.error = true; + } + } - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); + return { + string: labels.join("."), + error: result.error + }; +} - // HTTP-network fetch step 12.1.1.4: handle content codings +module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { + var result = processing(domain_name, useSTD3, processing_option); + var labels = result.string.split("."); + labels = labels.map(function(l) { + try { + return punycode.toASCII(l); + } catch(e) { + result.error = true; + return l; + } + }); - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } + if (verifyDnsLength) { + var total = labels.slice(0, labels.length - 1).join(".").length; + if (total.length > 253 || total.length === 0) { + result.error = true; + } - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; + for (var i=0; i < labels.length; ++i) { + if (labels.length > 63 || labels.length === 0) { + result.error = true; + break; + } + } + } - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } + if (result.error) return null; + return labels.join("."); +}; - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - raw.on('end', function () { - // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. - if (!response) { - response = new Response(body, response_options); - resolve(response); - } - }); - return; - } +module.exports.toUnicode = function(domain_name, useSTD3) { + var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } + return { + domain: result.string, + error: result.error + }; +}; - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); +module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; - writeToStream(req, request); - }); -} -function fixResponseChunkedTransferBadEnding(request, errorCallback) { - let socket; - request.on('socket', function (s) { - socket = s; - }); +/***/ }), - request.on('response', function (response) { - const headers = response.headers; +/***/ 4294: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { - response.once('close', function (hadError) { - // if a data listener is still present we didn't end cleanly - const hasDataListener = socket.listenerCount('data') > 0; +module.exports = __nccwpck_require__(4219); - if (hasDataListener && !hadError) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - errorCallback(err); - } - }); - } - }); -} -function destroyStream(stream, err) { - if (stream.destroy) { - stream.destroy(err); - } else { - // node < 8 - stream.emit('error', err); - stream.end(); - } -} +/***/ }), -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; +/***/ 4219: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// expose Promise -fetch.Promise = global.Promise; +"use strict"; -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports["default"] = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; +var net = __nccwpck_require__(1808); +var tls = __nccwpck_require__(4404); +var http = __nccwpck_require__(3685); +var https = __nccwpck_require__(5687); +var events = __nccwpck_require__(2361); +var assert = __nccwpck_require__(9491); +var util = __nccwpck_require__(3837); -/***/ }), -/***/ 7994: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; -/** - * Advanced Encryption Standard (AES) implementation. - * - * This implementation is based on the public domain library 'jscrypto' which - * was written by: - * - * Emily Stark (estark@stanford.edu) - * Mike Hamburg (mhamburg@stanford.edu) - * Dan Boneh (dabo@cs.stanford.edu) - * - * Parts of this code are based on the OpenSSL implementation of AES: - * http://www.openssl.org - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7088); -__nccwpck_require__(873); -__nccwpck_require__(8339); -/* AES API */ -module.exports = forge.aes = forge.aes || {}; +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} -/** - * Deprecated. Instead, use: - * - * var cipher = forge.cipher.createCipher('AES-', key); - * cipher.start({iv: iv}); - * - * Creates an AES cipher object to encrypt data using the given symmetric key. - * The output will be stored in the 'output' member of the returned cipher. - * - * The key and iv may be given as a string of bytes, an array of bytes, - * a byte buffer, or an array of 32-bit words. - * - * @param key the symmetric key to use. - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.aes.startEncrypting = function(key, iv, output, mode) { - var cipher = _createCipher({ - key: key, - output: output, - decrypt: false, - mode: mode - }); - cipher.start(iv); - return cipher; -}; +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} -/** - * Deprecated. Instead, use: - * - * var cipher = forge.cipher.createCipher('AES-', key); - * - * Creates an AES cipher object to encrypt data using the given symmetric key. - * - * The key may be given as a string of bytes, an array of bytes, a - * byte buffer, or an array of 32-bit words. - * - * @param key the symmetric key to use. - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.aes.createEncryptionCipher = function(key, mode) { - return _createCipher({ - key: key, - output: null, - decrypt: false, - mode: mode - }); -}; +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} -/** - * Deprecated. Instead, use: - * - * var decipher = forge.cipher.createDecipher('AES-', key); - * decipher.start({iv: iv}); - * - * Creates an AES cipher object to decrypt data using the given symmetric key. - * The output will be stored in the 'output' member of the returned cipher. - * - * The key and iv may be given as a string of bytes, an array of bytes, - * a byte buffer, or an array of 32-bit words. - * - * @param key the symmetric key to use. - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.aes.startDecrypting = function(key, iv, output, mode) { - var cipher = _createCipher({ - key: key, - output: output, - decrypt: true, - mode: mode - }); - cipher.start(iv); - return cipher; -}; +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} -/** - * Deprecated. Instead, use: - * - * var decipher = forge.cipher.createDecipher('AES-', key); - * - * Creates an AES cipher object to decrypt data using the given symmetric key. - * - * The key may be given as a string of bytes, an array of bytes, a - * byte buffer, or an array of 32-bit words. - * - * @param key the symmetric key to use. - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.aes.createDecryptionCipher = function(key, mode) { - return _createCipher({ - key: key, - output: null, - decrypt: true, - mode: mode - }); -}; -/** - * Creates a new AES cipher algorithm object. - * - * @param name the name of the algorithm. - * @param mode the mode factory function. - * - * @return the AES algorithm object. - */ -forge.aes.Algorithm = function(name, mode) { - if(!init) { - initialize(); - } +function TunnelingAgent(options) { var self = this; - self.name = name; - self.mode = new mode({ - blockSize: 16, - cipher: { - encrypt: function(inBlock, outBlock) { - return _updateBlock(self._w, inBlock, outBlock, false); - }, - decrypt: function(inBlock, outBlock) { - return _updateBlock(self._w, inBlock, outBlock, true); + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; } } + socket.destroy(); + self.removeSocket(socket); }); - self._init = false; -}; +} +util.inherits(TunnelingAgent, events.EventEmitter); -/** - * Initializes this AES algorithm by expanding its key. - * - * @param options the options to use. - * key the key to use with this algorithm. - * decrypt true if the algorithm should be initialized for decryption, - * false for encryption. - */ -forge.aes.Algorithm.prototype.initialize = function(options) { - if(this._init) { +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); return; } - var key = options.key; - var tmp; + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); - /* Note: The key may be a string of bytes, an array of bytes, a byte - buffer, or an array of 32-bit integers. If the key is in bytes, then - it must be 16, 24, or 32 bytes in length. If it is in 32-bit - integers, it must be 4, 6, or 8 integers long. */ + function onFree() { + self.emit('free', socket, options); + } - if(typeof key === 'string' && - (key.length === 16 || key.length === 24 || key.length === 32)) { - // convert key string into byte buffer - key = forge.util.createBuffer(key); - } else if(forge.util.isArray(key) && - (key.length === 16 || key.length === 24 || key.length === 32)) { - // convert key integer array into byte buffer - tmp = key; - key = forge.util.createBuffer(); - for(var i = 0; i < tmp.length; ++i) { - key.putByte(tmp[i]); + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); } - } + }); +}; - // convert key byte buffer into 32-bit integer array - if(!forge.util.isArray(key)) { - tmp = key; - key = []; +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); - // key lengths of 16, 24, 32 bytes allowed - var len = tmp.length(); - if(len === 16 || len === 24 || len === 32) { - len = len >>> 2; - for(var i = 0; i < len; ++i) { - key.push(tmp.getInt32()); - } + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; } - - // key must be an array of 32-bit integers by now - if(!forge.util.isArray(key) || - !(key.length === 4 || key.length === 6 || key.length === 8)) { - throw new Error('Invalid key parameter.'); + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); } - // encryption operation is always used for these modes - var mode = this.mode.name; - var encryptOp = (['CFB', 'OFB', 'CTR', 'GCM'].indexOf(mode) !== -1); + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); - // do key expansion - this._w = _expandKey(key, options.decrypt && !encryptOp); - this._init = true; -}; + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } -/** - * Expands a key. Typically only used for testing. - * - * @param key the symmetric key to expand, as an array of 32-bit words. - * @param decrypt true to expand for decryption, false for encryption. - * - * @return the expanded key. - */ -forge.aes._expandKey = function(key, decrypt) { - if(!init) { - initialize(); + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); } - return _expandKey(key, decrypt); -}; -/** - * Updates a single block. Typically only used for testing. - * - * @param w the expanded key to use. - * @param input an array of block-size 32-bit words. - * @param output an array of block-size 32-bit words. - * @param decrypt true to decrypt, false to encrypt. - */ -forge.aes._updateBlock = _updateBlock; + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); -/** Register AES algorithms **/ + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } -registerAlgorithm('AES-ECB', forge.cipher.modes.ecb); -registerAlgorithm('AES-CBC', forge.cipher.modes.cbc); -registerAlgorithm('AES-CFB', forge.cipher.modes.cfb); -registerAlgorithm('AES-OFB', forge.cipher.modes.ofb); -registerAlgorithm('AES-CTR', forge.cipher.modes.ctr); -registerAlgorithm('AES-GCM', forge.cipher.modes.gcm); + function onError(cause) { + connectReq.removeAllListeners(); -function registerAlgorithm(name, mode) { - var factory = function() { - return new forge.aes.Algorithm(name, mode); - }; - forge.cipher.registerAlgorithm(name, factory); -} + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; -/** AES implementation **/ +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); -var init = false; // not yet initialized -var Nb = 4; // number of words comprising the state (AES = 4) -var sbox; // non-linear substitution table used in key expansion -var isbox; // inversion of sbox -var rcon; // round constant word array -var mix; // mix-columns table -var imix; // inverse mix-columns table + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; -/** - * Performs initialization, ie: precomputes tables to optimize for speed. - * - * One way to understand how AES works is to imagine that 'addition' and - * 'multiplication' are interfaces that require certain mathematical - * properties to hold true (ie: they are associative) but they might have - * different implementations and produce different kinds of results ... - * provided that their mathematical properties remain true. AES defines - * its own methods of addition and multiplication but keeps some important - * properties the same, ie: associativity and distributivity. The - * explanation below tries to shed some light on how AES defines addition - * and multiplication of bytes and 32-bit words in order to perform its - * encryption and decryption algorithms. - * - * The basics: - * - * The AES algorithm views bytes as binary representations of polynomials - * that have either 1 or 0 as the coefficients. It defines the addition - * or subtraction of two bytes as the XOR operation. It also defines the - * multiplication of two bytes as a finite field referred to as GF(2^8) - * (Note: 'GF' means "Galois Field" which is a field that contains a finite - * number of elements so GF(2^8) has 256 elements). - * - * This means that any two bytes can be represented as binary polynomials; - * when they multiplied together and modularly reduced by an irreducible - * polynomial of the 8th degree, the results are the field GF(2^8). The - * specific irreducible polynomial that AES uses in hexadecimal is 0x11b. - * This multiplication is associative with 0x01 as the identity: - * - * (b * 0x01 = GF(b, 0x01) = b). - * - * The operation GF(b, 0x02) can be performed at the byte level by left - * shifting b once and then XOR'ing it (to perform the modular reduction) - * with 0x11b if b is >= 128. Repeated application of the multiplication - * of 0x02 can be used to implement the multiplication of any two bytes. - * - * For instance, multiplying 0x57 and 0x13, denoted as GF(0x57, 0x13), can - * be performed by factoring 0x13 into 0x01, 0x02, and 0x10. Then these - * factors can each be multiplied by 0x57 and then added together. To do - * the multiplication, values for 0x57 multiplied by each of these 3 factors - * can be precomputed and stored in a table. To add them, the values from - * the table are XOR'd together. - * - * AES also defines addition and multiplication of words, that is 4-byte - * numbers represented as polynomials of 3 degrees where the coefficients - * are the values of the bytes. - * - * The word [a0, a1, a2, a3] is a polynomial a3x^3 + a2x^2 + a1x + a0. - * - * Addition is performed by XOR'ing like powers of x. Multiplication - * is performed in two steps, the first is an algebriac expansion as - * you would do normally (where addition is XOR). But the result is - * a polynomial larger than 3 degrees and thus it cannot fit in a word. So - * next the result is modularly reduced by an AES-specific polynomial of - * degree 4 which will always produce a polynomial of less than 4 degrees - * such that it will fit in a word. In AES, this polynomial is x^4 + 1. - * - * The modular product of two polynomials 'a' and 'b' is thus: - * - * d(x) = d3x^3 + d2x^2 + d1x + d0 - * with - * d0 = GF(a0, b0) ^ GF(a3, b1) ^ GF(a2, b2) ^ GF(a1, b3) - * d1 = GF(a1, b0) ^ GF(a0, b1) ^ GF(a3, b2) ^ GF(a2, b3) - * d2 = GF(a2, b0) ^ GF(a1, b1) ^ GF(a0, b2) ^ GF(a3, b3) - * d3 = GF(a3, b0) ^ GF(a2, b1) ^ GF(a1, b2) ^ GF(a0, b3) - * - * As a matrix: - * - * [d0] = [a0 a3 a2 a1][b0] - * [d1] [a1 a0 a3 a2][b1] - * [d2] [a2 a1 a0 a3][b2] - * [d3] [a3 a2 a1 a0][b3] - * - * Special polynomials defined by AES (0x02 == {02}): - * a(x) = {03}x^3 + {01}x^2 + {01}x + {02} - * a^-1(x) = {0b}x^3 + {0d}x^2 + {09}x + {0e}. - * - * These polynomials are used in the MixColumns() and InverseMixColumns() - * operations, respectively, to cause each element in the state to affect - * the output (referred to as diffusing). - * - * RotWord() uses: a0 = a1 = a2 = {00} and a3 = {01}, which is the - * polynomial x3. - * - * The ShiftRows() method modifies the last 3 rows in the state (where - * the state is 4 words with 4 bytes per word) by shifting bytes cyclically. - * The 1st byte in the second row is moved to the end of the row. The 1st - * and 2nd bytes in the third row are moved to the end of the row. The 1st, - * 2nd, and 3rd bytes are moved in the fourth row. - * - * More details on how AES arithmetic works: - * - * In the polynomial representation of binary numbers, XOR performs addition - * and subtraction and multiplication in GF(2^8) denoted as GF(a, b) - * corresponds with the multiplication of polynomials modulo an irreducible - * polynomial of degree 8. In other words, for AES, GF(a, b) will multiply - * polynomial 'a' with polynomial 'b' and then do a modular reduction by - * an AES-specific irreducible polynomial of degree 8. - * - * A polynomial is irreducible if its only divisors are one and itself. For - * the AES algorithm, this irreducible polynomial is: - * - * m(x) = x^8 + x^4 + x^3 + x + 1, - * - * or {01}{1b} in hexadecimal notation, where each coefficient is a bit: - * 100011011 = 283 = 0x11b. - * - * For example, GF(0x57, 0x83) = 0xc1 because - * - * 0x57 = 87 = 01010111 = x^6 + x^4 + x^2 + x + 1 - * 0x85 = 131 = 10000101 = x^7 + x + 1 - * - * (x^6 + x^4 + x^2 + x + 1) * (x^7 + x + 1) - * = x^13 + x^11 + x^9 + x^8 + x^7 + - * x^7 + x^5 + x^3 + x^2 + x + - * x^6 + x^4 + x^2 + x + 1 - * = x^13 + x^11 + x^9 + x^8 + x^6 + x^5 + x^4 + x^3 + 1 = y - * y modulo (x^8 + x^4 + x^3 + x + 1) - * = x^7 + x^6 + 1. - * - * The modular reduction by m(x) guarantees the result will be a binary - * polynomial of less than degree 8, so that it can fit in a byte. - * - * The operation to multiply a binary polynomial b with x (the polynomial - * x in binary representation is 00000010) is: - * - * b_7x^8 + b_6x^7 + b_5x^6 + b_4x^5 + b_3x^4 + b_2x^3 + b_1x^2 + b_0x^1 - * - * To get GF(b, x) we must reduce that by m(x). If b_7 is 0 (that is the - * most significant bit is 0 in b) then the result is already reduced. If - * it is 1, then we can reduce it by subtracting m(x) via an XOR. - * - * It follows that multiplication by x (00000010 or 0x02) can be implemented - * by performing a left shift followed by a conditional bitwise XOR with - * 0x1b. This operation on bytes is denoted by xtime(). Multiplication by - * higher powers of x can be implemented by repeated application of xtime(). - * - * By adding intermediate results, multiplication by any constant can be - * implemented. For instance: - * - * GF(0x57, 0x13) = 0xfe because: - * - * xtime(b) = (b & 128) ? (b << 1 ^ 0x11b) : (b << 1) - * - * Note: We XOR with 0x11b instead of 0x1b because in javascript our - * datatype for b can be larger than 1 byte, so a left shift will not - * automatically eliminate bits that overflow a byte ... by XOR'ing the - * overflow bit with 1 (the extra one from 0x11b) we zero it out. - * - * GF(0x57, 0x02) = xtime(0x57) = 0xae - * GF(0x57, 0x04) = xtime(0xae) = 0x47 - * GF(0x57, 0x08) = xtime(0x47) = 0x8e - * GF(0x57, 0x10) = xtime(0x8e) = 0x07 - * - * GF(0x57, 0x13) = GF(0x57, (0x01 ^ 0x02 ^ 0x10)) - * - * And by the distributive property (since XOR is addition and GF() is - * multiplication): - * - * = GF(0x57, 0x01) ^ GF(0x57, 0x02) ^ GF(0x57, 0x10) - * = 0x57 ^ 0xae ^ 0x07 - * = 0xfe. - */ -function initialize() { - init = true; +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); - /* Populate the Rcon table. These are the values given by - [x^(i-1),{00},{00},{00}] where x^(i-1) are powers of x (and x = 0x02) - in the field of GF(2^8), where i starts at 1. + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} - rcon[0] = [0x00, 0x00, 0x00, 0x00] - rcon[1] = [0x01, 0x00, 0x00, 0x00] 2^(1-1) = 2^0 = 1 - rcon[2] = [0x02, 0x00, 0x00, 0x00] 2^(2-1) = 2^1 = 2 - ... - rcon[9] = [0x1B, 0x00, 0x00, 0x00] 2^(9-1) = 2^8 = 0x1B - rcon[10] = [0x36, 0x00, 0x00, 0x00] 2^(10-1) = 2^9 = 0x36 - We only store the first byte because it is the only one used. - */ - rcon = [0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1B, 0x36]; - - // compute xtime table which maps i onto GF(i, 0x02) - var xtime = new Array(256); - for(var i = 0; i < 128; ++i) { - xtime[i] = i << 1; - xtime[i + 128] = (i + 128) << 1 ^ 0x11B; - } - - // compute all other tables - sbox = new Array(256); - isbox = new Array(256); - mix = new Array(4); - imix = new Array(4); - for(var i = 0; i < 4; ++i) { - mix[i] = new Array(256); - imix[i] = new Array(256); - } - var e = 0, ei = 0, e2, e4, e8, sx, sx2, me, ime; - for(var i = 0; i < 256; ++i) { - /* We need to generate the SubBytes() sbox and isbox tables so that - we can perform byte substitutions. This requires us to traverse - all of the elements in GF, find their multiplicative inverses, - and apply to each the following affine transformation: - - bi' = bi ^ b(i + 4) mod 8 ^ b(i + 5) mod 8 ^ b(i + 6) mod 8 ^ - b(i + 7) mod 8 ^ ci - for 0 <= i < 8, where bi is the ith bit of the byte, and ci is the - ith bit of a byte c with the value {63} or {01100011}. - - It is possible to traverse every possible value in a Galois field - using what is referred to as a 'generator'. There are many - generators (128 out of 256): 3,5,6,9,11,82 to name a few. To fully - traverse GF we iterate 255 times, multiplying by our generator - each time. - - On each iteration we can determine the multiplicative inverse for - the current element. - - Suppose there is an element in GF 'e'. For a given generator 'g', - e = g^x. The multiplicative inverse of e is g^(255 - x). It turns - out that if use the inverse of a generator as another generator - it will produce all of the corresponding multiplicative inverses - at the same time. For this reason, we choose 5 as our inverse - generator because it only requires 2 multiplies and 1 add and its - inverse, 82, requires relatively few operations as well. - - In order to apply the affine transformation, the multiplicative - inverse 'ei' of 'e' can be repeatedly XOR'd (4 times) with a - bit-cycling of 'ei'. To do this 'ei' is first stored in 's' and - 'x'. Then 's' is left shifted and the high bit of 's' is made the - low bit. The resulting value is stored in 's'. Then 'x' is XOR'd - with 's' and stored in 'x'. On each subsequent iteration the same - operation is performed. When 4 iterations are complete, 'x' is - XOR'd with 'c' (0x63) and the transformed value is stored in 'x'. - For example: - - s = 01000001 - x = 01000001 - - iteration 1: s = 10000010, x ^= s - iteration 2: s = 00000101, x ^= s - iteration 3: s = 00001010, x ^= s - iteration 4: s = 00010100, x ^= s - x ^= 0x63 - - This can be done with a loop where s = (s << 1) | (s >> 7). However, - it can also be done by using a single 16-bit (in this case 32-bit) - number 'sx'. Since XOR is an associative operation, we can set 'sx' - to 'ei' and then XOR it with 'sx' left-shifted 1,2,3, and 4 times. - The most significant bits will flow into the high 8 bit positions - and be correctly XOR'd with one another. All that remains will be - to cycle the high 8 bits by XOR'ing them all with the lower 8 bits - afterwards. - - At the same time we're populating sbox and isbox we can precompute - the multiplication we'll need to do to do MixColumns() later. - */ - - // apply affine transformation - sx = ei ^ (ei << 1) ^ (ei << 2) ^ (ei << 3) ^ (ei << 4); - sx = (sx >> 8) ^ (sx & 255) ^ 0x63; - - // update tables - sbox[e] = sx; - isbox[sx] = e; - - /* Mixing columns is done using matrix multiplication. The columns - that are to be mixed are each a single word in the current state. - The state has Nb columns (4 columns). Therefore each column is a - 4 byte word. So to mix the columns in a single column 'c' where - its rows are r0, r1, r2, and r3, we use the following matrix - multiplication: - - [2 3 1 1]*[r0,c]=[r'0,c] - [1 2 3 1] [r1,c] [r'1,c] - [1 1 2 3] [r2,c] [r'2,c] - [3 1 1 2] [r3,c] [r'3,c] - - r0, r1, r2, and r3 are each 1 byte of one of the words in the - state (a column). To do matrix multiplication for each mixed - column c' we multiply the corresponding row from the left matrix - with the corresponding column from the right matrix. In total, we - get 4 equations: - - r0,c' = 2*r0,c + 3*r1,c + 1*r2,c + 1*r3,c - r1,c' = 1*r0,c + 2*r1,c + 3*r2,c + 1*r3,c - r2,c' = 1*r0,c + 1*r1,c + 2*r2,c + 3*r3,c - r3,c' = 3*r0,c + 1*r1,c + 1*r2,c + 2*r3,c - - As usual, the multiplication is as previously defined and the - addition is XOR. In order to optimize mixing columns we can store - the multiplication results in tables. If you think of the whole - column as a word (it might help to visualize by mentally rotating - the equations above by counterclockwise 90 degrees) then you can - see that it would be useful to map the multiplications performed on - each byte (r0, r1, r2, r3) onto a word as well. For instance, we - could map 2*r0,1*r0,1*r0,3*r0 onto a word by storing 2*r0 in the - highest 8 bits and 3*r0 in the lowest 8 bits (with the other two - respectively in the middle). This means that a table can be - constructed that uses r0 as an index to the word. We can do the - same with r1, r2, and r3, creating a total of 4 tables. - - To construct a full c', we can just look up each byte of c in - their respective tables and XOR the results together. - - Also, to build each table we only have to calculate the word - for 2,1,1,3 for every byte ... which we can do on each iteration - of this loop since we will iterate over every byte. After we have - calculated 2,1,1,3 we can get the results for the other tables - by cycling the byte at the end to the beginning. For instance - we can take the result of table 2,1,1,3 and produce table 3,2,1,1 - by moving the right most byte to the left most position just like - how you can imagine the 3 moved out of 2,1,1,3 and to the front - to produce 3,2,1,1. - - There is another optimization in that the same multiples of - the current element we need in order to advance our generator - to the next iteration can be reused in performing the 2,1,1,3 - calculation. We also calculate the inverse mix column tables, - with e,9,d,b being the inverse of 2,1,1,3. - - When we're done, and we need to actually mix columns, the first - byte of each state word should be put through mix[0] (2,1,1,3), - the second through mix[1] (3,2,1,1) and so forth. Then they should - be XOR'd together to produce the fully mixed column. - */ - - // calculate mix and imix table values - sx2 = xtime[sx]; - e2 = xtime[e]; - e4 = xtime[e2]; - e8 = xtime[e4]; - me = - (sx2 << 24) ^ // 2 - (sx << 16) ^ // 1 - (sx << 8) ^ // 1 - (sx ^ sx2); // 3 - ime = - (e2 ^ e4 ^ e8) << 24 ^ // E (14) - (e ^ e8) << 16 ^ // 9 - (e ^ e4 ^ e8) << 8 ^ // D (13) - (e ^ e2 ^ e8); // B (11) - // produce each of the mix tables by rotating the 2,1,1,3 value - for(var n = 0; n < 4; ++n) { - mix[n][e] = me; - imix[n][sx] = ime; - // cycle the right most byte to the left most position - // ie: 2,1,1,3 becomes 3,2,1,1 - me = me << 24 | me >>> 8; - ime = ime << 24 | ime >>> 8; - } - - // get next element and inverse - if(e === 0) { - // 1 is the inverse of 1 - e = ei = 1; - } else { - // e = 2e + 2*2*2*(10e)) = multiply e by 82 (chosen generator) - // ei = ei + 2*2*ei = multiply ei by 5 (inverse generator) - e = e2 ^ xtime[xtime[xtime[e2 ^ e8]]]; - ei ^= xtime[xtime[ei]]; - } +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; } + return host; // for v0.11 or later } -/** - * Generates a key schedule using the AES key expansion algorithm. - * - * The AES algorithm takes the Cipher Key, K, and performs a Key Expansion - * routine to generate a key schedule. The Key Expansion generates a total - * of Nb*(Nr + 1) words: the algorithm requires an initial set of Nb words, - * and each of the Nr rounds requires Nb words of key data. The resulting - * key schedule consists of a linear array of 4-byte words, denoted [wi ], - * with i in the range 0 <= i < Nb(Nr + 1). - * - * KeyExpansion(byte key[4*Nk], word w[Nb*(Nr+1)], Nk) - * AES-128 (Nb=4, Nk=4, Nr=10) - * AES-192 (Nb=4, Nk=6, Nr=12) - * AES-256 (Nb=4, Nk=8, Nr=14) - * Note: Nr=Nk+6. - * - * Nb is the number of columns (32-bit words) comprising the State (or - * number of bytes in a block). For AES, Nb=4. - * - * @param key the key to schedule (as an array of 32-bit words). - * @param decrypt true to modify the key schedule to decrypt, false not to. - * - * @return the generated key schedule. - */ -function _expandKey(key, decrypt) { - // copy the key's words to initialize the key schedule - var w = key.slice(0); - - /* RotWord() will rotate a word, moving the first byte to the last - byte's position (shifting the other bytes left). - - We will be getting the value of Rcon at i / Nk. 'i' will iterate - from Nk to (Nb * Nr+1). Nk = 4 (4 byte key), Nb = 4 (4 words in - a block), Nr = Nk + 6 (10). Therefore 'i' will iterate from - 4 to 44 (exclusive). Each time we iterate 4 times, i / Nk will - increase by 1. We use a counter iNk to keep track of this. - */ - - // go through the rounds expanding the key - var temp, iNk = 1; - var Nk = w.length; - var Nr1 = Nk + 6 + 1; - var end = Nb * Nr1; - for(var i = Nk; i < end; ++i) { - temp = w[i - 1]; - if(i % Nk === 0) { - // temp = SubWord(RotWord(temp)) ^ Rcon[i / Nk] - temp = - sbox[temp >>> 16 & 255] << 24 ^ - sbox[temp >>> 8 & 255] << 16 ^ - sbox[temp & 255] << 8 ^ - sbox[temp >>> 24] ^ (rcon[iNk] << 24); - iNk++; - } else if(Nk > 6 && (i % Nk === 4)) { - // temp = SubWord(temp) - temp = - sbox[temp >>> 24] << 24 ^ - sbox[temp >>> 16 & 255] << 16 ^ - sbox[temp >>> 8 & 255] << 8 ^ - sbox[temp & 255]; - } - w[i] = w[i - Nk] ^ temp; - } - - /* When we are updating a cipher block we always use the code path for - encryption whether we are decrypting or not (to shorten code and - simplify the generation of look up tables). However, because there - are differences in the decryption algorithm, other than just swapping - in different look up tables, we must transform our key schedule to - account for these changes: - - 1. The decryption algorithm gets its key rounds in reverse order. - 2. The decryption algorithm adds the round key before mixing columns - instead of afterwards. - - We don't need to modify our key schedule to handle the first case, - we can just traverse the key schedule in reverse order when decrypting. - - The second case requires a little work. - - The tables we built for performing rounds will take an input and then - perform SubBytes() and MixColumns() or, for the decrypt version, - InvSubBytes() and InvMixColumns(). But the decrypt algorithm requires - us to AddRoundKey() before InvMixColumns(). This means we'll need to - apply some transformations to the round key to inverse-mix its columns - so they'll be correct for moving AddRoundKey() to after the state has - had its columns inverse-mixed. - - To inverse-mix the columns of the state when we're decrypting we use a - lookup table that will apply InvSubBytes() and InvMixColumns() at the - same time. However, the round key's bytes are not inverse-substituted - in the decryption algorithm. To get around this problem, we can first - substitute the bytes in the round key so that when we apply the - transformation via the InvSubBytes()+InvMixColumns() table, it will - undo our substitution leaving us with the original value that we - want -- and then inverse-mix that value. - - This change will correctly alter our key schedule so that we can XOR - each round key with our already transformed decryption state. This - allows us to use the same code path as the encryption algorithm. - - We make one more change to the decryption key. Since the decryption - algorithm runs in reverse from the encryption algorithm, we reverse - the order of the round keys to avoid having to iterate over the key - schedule backwards when running the encryption algorithm later in - decryption mode. In addition to reversing the order of the round keys, - we also swap each round key's 2nd and 4th rows. See the comments - section where rounds are performed for more details about why this is - done. These changes are done inline with the other substitution - described above. - */ - if(decrypt) { - var tmp; - var m0 = imix[0]; - var m1 = imix[1]; - var m2 = imix[2]; - var m3 = imix[3]; - var wnew = w.slice(0); - end = w.length; - for(var i = 0, wi = end - Nb; i < end; i += Nb, wi -= Nb) { - // do not sub the first or last round key (round keys are Nb - // words) as no column mixing is performed before they are added, - // but do change the key order - if(i === 0 || i === (end - Nb)) { - wnew[i] = w[wi]; - wnew[i + 1] = w[wi + 3]; - wnew[i + 2] = w[wi + 2]; - wnew[i + 3] = w[wi + 1]; - } else { - // substitute each round key byte because the inverse-mix - // table will inverse-substitute it (effectively cancel the - // substitution because round key bytes aren't sub'd in - // decryption mode) and swap indexes 3 and 1 - for(var n = 0; n < Nb; ++n) { - tmp = w[wi + n]; - wnew[i + (3&-n)] = - m0[sbox[tmp >>> 24]] ^ - m1[sbox[tmp >>> 16 & 255]] ^ - m2[sbox[tmp >>> 8 & 255]] ^ - m3[sbox[tmp & 255]]; +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; } } } - w = wnew; } - - return w; + return target; } -/** - * Updates a single block (16 bytes) using AES. The update will either - * encrypt or decrypt the block. - * - * @param w the key schedule. - * @param input the input block (an array of 32-bit words). - * @param output the updated output block. - * @param decrypt true to decrypt the block, false to encrypt it. - */ -function _updateBlock(w, input, output, decrypt) { - /* - Cipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)]) - begin - byte state[4,Nb] - state = in - AddRoundKey(state, w[0, Nb-1]) - for round = 1 step 1 to Nr-1 - SubBytes(state) - ShiftRows(state) - MixColumns(state) - AddRoundKey(state, w[round*Nb, (round+1)*Nb-1]) - end for - SubBytes(state) - ShiftRows(state) - AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) - out = state - end - - InvCipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)]) - begin - byte state[4,Nb] - state = in - AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) - for round = Nr-1 step -1 downto 1 - InvShiftRows(state) - InvSubBytes(state) - AddRoundKey(state, w[round*Nb, (round+1)*Nb-1]) - InvMixColumns(state) - end for - InvShiftRows(state) - InvSubBytes(state) - AddRoundKey(state, w[0, Nb-1]) - out = state - end - */ - // Encrypt: AddRoundKey(state, w[0, Nb-1]) - // Decrypt: AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) - var Nr = w.length / 4 - 1; - var m0, m1, m2, m3, sub; - if(decrypt) { - m0 = imix[0]; - m1 = imix[1]; - m2 = imix[2]; - m3 = imix[3]; - sub = isbox; - } else { - m0 = mix[0]; - m1 = mix[1]; - m2 = mix[2]; - m3 = mix[3]; - sub = sbox; - } - var a, b, c, d, a2, b2, c2; - a = input[0] ^ w[0]; - b = input[decrypt ? 3 : 1] ^ w[1]; - c = input[2] ^ w[2]; - d = input[decrypt ? 1 : 3] ^ w[3]; - var i = 3; - - /* In order to share code we follow the encryption algorithm when both - encrypting and decrypting. To account for the changes required in the - decryption algorithm, we use different lookup tables when decrypting - and use a modified key schedule to account for the difference in the - order of transformations applied when performing rounds. We also get - key rounds in reverse order (relative to encryption). */ - for(var round = 1; round < Nr; ++round) { - /* As described above, we'll be using table lookups to perform the - column mixing. Each column is stored as a word in the state (the - array 'input' has one column as a word at each index). In order to - mix a column, we perform these transformations on each row in c, - which is 1 byte in each word. The new column for c0 is c'0: - - m0 m1 m2 m3 - r0,c'0 = 2*r0,c0 + 3*r1,c0 + 1*r2,c0 + 1*r3,c0 - r1,c'0 = 1*r0,c0 + 2*r1,c0 + 3*r2,c0 + 1*r3,c0 - r2,c'0 = 1*r0,c0 + 1*r1,c0 + 2*r2,c0 + 3*r3,c0 - r3,c'0 = 3*r0,c0 + 1*r1,c0 + 1*r2,c0 + 2*r3,c0 - - So using mix tables where c0 is a word with r0 being its upper - 8 bits and r3 being its lower 8 bits: - - m0[c0 >> 24] will yield this word: [2*r0,1*r0,1*r0,3*r0] - ... - m3[c0 & 255] will yield this word: [1*r3,1*r3,3*r3,2*r3] - - Therefore to mix the columns in each word in the state we - do the following (& 255 omitted for brevity): - c'0,r0 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] - c'0,r1 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] - c'0,r2 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] - c'0,r3 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] - - However, before mixing, the algorithm requires us to perform - ShiftRows(). The ShiftRows() transformation cyclically shifts the - last 3 rows of the state over different offsets. The first row - (r = 0) is not shifted. - - s'_r,c = s_r,(c + shift(r, Nb) mod Nb - for 0 < r < 4 and 0 <= c < Nb and - shift(1, 4) = 1 - shift(2, 4) = 2 - shift(3, 4) = 3. - - This causes the first byte in r = 1 to be moved to the end of - the row, the first 2 bytes in r = 2 to be moved to the end of - the row, the first 3 bytes in r = 3 to be moved to the end of - the row: - - r1: [c0 c1 c2 c3] => [c1 c2 c3 c0] - r2: [c0 c1 c2 c3] [c2 c3 c0 c1] - r3: [c0 c1 c2 c3] [c3 c0 c1 c2] - - We can make these substitutions inline with our column mixing to - generate an updated set of equations to produce each word in the - state (note the columns have changed positions): - - c0 c1 c2 c3 => c0 c1 c2 c3 - c0 c1 c2 c3 c1 c2 c3 c0 (cycled 1 byte) - c0 c1 c2 c3 c2 c3 c0 c1 (cycled 2 bytes) - c0 c1 c2 c3 c3 c0 c1 c2 (cycled 3 bytes) - - Therefore: - - c'0 = 2*r0,c0 + 3*r1,c1 + 1*r2,c2 + 1*r3,c3 - c'0 = 1*r0,c0 + 2*r1,c1 + 3*r2,c2 + 1*r3,c3 - c'0 = 1*r0,c0 + 1*r1,c1 + 2*r2,c2 + 3*r3,c3 - c'0 = 3*r0,c0 + 1*r1,c1 + 1*r2,c2 + 2*r3,c3 - - c'1 = 2*r0,c1 + 3*r1,c2 + 1*r2,c3 + 1*r3,c0 - c'1 = 1*r0,c1 + 2*r1,c2 + 3*r2,c3 + 1*r3,c0 - c'1 = 1*r0,c1 + 1*r1,c2 + 2*r2,c3 + 3*r3,c0 - c'1 = 3*r0,c1 + 1*r1,c2 + 1*r2,c3 + 2*r3,c0 - - ... and so forth for c'2 and c'3. The important distinction is - that the columns are cycling, with c0 being used with the m0 - map when calculating c0, but c1 being used with the m0 map when - calculating c1 ... and so forth. - - When performing the inverse we transform the mirror image and - skip the bottom row, instead of the top one, and move upwards: - - c3 c2 c1 c0 => c0 c3 c2 c1 (cycled 3 bytes) *same as encryption - c3 c2 c1 c0 c1 c0 c3 c2 (cycled 2 bytes) - c3 c2 c1 c0 c2 c1 c0 c3 (cycled 1 byte) *same as encryption - c3 c2 c1 c0 c3 c2 c1 c0 - - If you compare the resulting matrices for ShiftRows()+MixColumns() - and for InvShiftRows()+InvMixColumns() the 2nd and 4th columns are - different (in encrypt mode vs. decrypt mode). So in order to use - the same code to handle both encryption and decryption, we will - need to do some mapping. - - If in encryption mode we let a=c0, b=c1, c=c2, d=c3, and r be - a row number in the state, then the resulting matrix in encryption - mode for applying the above transformations would be: - - r1: a b c d - r2: b c d a - r3: c d a b - r4: d a b c - - If we did the same in decryption mode we would get: - - r1: a d c b - r2: b a d c - r3: c b a d - r4: d c b a - - If instead we swap d and b (set b=c3 and d=c1), then we get: - - r1: a b c d - r2: d a b c - r3: c d a b - r4: b c d a - - Now the 1st and 3rd rows are the same as the encryption matrix. All - we need to do then to make the mapping exactly the same is to swap - the 2nd and 4th rows when in decryption mode. To do this without - having to do it on each iteration, we swapped the 2nd and 4th rows - in the decryption key schedule. We also have to do the swap above - when we first pull in the input and when we set the final output. */ - a2 = - m0[a >>> 24] ^ - m1[b >>> 16 & 255] ^ - m2[c >>> 8 & 255] ^ - m3[d & 255] ^ w[++i]; - b2 = - m0[b >>> 24] ^ - m1[c >>> 16 & 255] ^ - m2[d >>> 8 & 255] ^ - m3[a & 255] ^ w[++i]; - c2 = - m0[c >>> 24] ^ - m1[d >>> 16 & 255] ^ - m2[a >>> 8 & 255] ^ - m3[b & 255] ^ w[++i]; - d = - m0[d >>> 24] ^ - m1[a >>> 16 & 255] ^ - m2[b >>> 8 & 255] ^ - m3[c & 255] ^ w[++i]; - a = a2; - b = b2; - c = c2; +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); } - - /* - Encrypt: - SubBytes(state) - ShiftRows(state) - AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) - - Decrypt: - InvShiftRows(state) - InvSubBytes(state) - AddRoundKey(state, w[0, Nb-1]) - */ - // Note: rows are shifted inline - output[0] = - (sub[a >>> 24] << 24) ^ - (sub[b >>> 16 & 255] << 16) ^ - (sub[c >>> 8 & 255] << 8) ^ - (sub[d & 255]) ^ w[++i]; - output[decrypt ? 3 : 1] = - (sub[b >>> 24] << 24) ^ - (sub[c >>> 16 & 255] << 16) ^ - (sub[d >>> 8 & 255] << 8) ^ - (sub[a & 255]) ^ w[++i]; - output[2] = - (sub[c >>> 24] << 24) ^ - (sub[d >>> 16 & 255] << 16) ^ - (sub[a >>> 8 & 255] << 8) ^ - (sub[b & 255]) ^ w[++i]; - output[decrypt ? 1 : 3] = - (sub[d >>> 24] << 24) ^ - (sub[a >>> 16 & 255] << 16) ^ - (sub[b >>> 8 & 255] << 8) ^ - (sub[c & 255]) ^ w[++i]; +} else { + debug = function() {}; } +exports.debug = debug; // for test -/** - * Deprecated. Instead, use: - * - * forge.cipher.createCipher('AES-', key); - * forge.cipher.createDecipher('AES-', key); - * - * Creates a deprecated AES cipher object. This object's mode will default to - * CBC (cipher-block-chaining). - * - * The key and iv may be given as a string of bytes, an array of bytes, a - * byte buffer, or an array of 32-bit words. - * - * @param options the options to use. - * key the symmetric key to use. - * output the buffer to write to. - * decrypt true for decryption, false for encryption. - * mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -function _createCipher(options) { - options = options || {}; - var mode = (options.mode || 'CBC').toUpperCase(); - var algorithm = 'AES-' + mode; - var cipher; - if(options.decrypt) { - cipher = forge.cipher.createDecipher(algorithm, options.key); - } else { - cipher = forge.cipher.createCipher(algorithm, options.key); - } +/***/ }), - // backwards compatible start API - var start = cipher.start; - cipher.start = function(iv, options) { - // backwards compatibility: support second arg as output buffer - var output = null; - if(options instanceof forge.util.ByteBuffer) { - output = options; - options = {}; - } - options = options || {}; - options.output = output; - options.iv = iv; - start.call(cipher, options); - }; +/***/ 1773: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return cipher; -} +"use strict"; -/***/ }), +const Client = __nccwpck_require__(3598) +const Dispatcher = __nccwpck_require__(412) +const errors = __nccwpck_require__(8045) +const Pool = __nccwpck_require__(4634) +const BalancedPool = __nccwpck_require__(7931) +const Agent = __nccwpck_require__(7890) +const util = __nccwpck_require__(3983) +const { InvalidArgumentError } = errors +const api = __nccwpck_require__(4059) +const buildConnector = __nccwpck_require__(2067) +const MockClient = __nccwpck_require__(8687) +const MockAgent = __nccwpck_require__(6771) +const MockPool = __nccwpck_require__(6193) +const mockErrors = __nccwpck_require__(888) +const ProxyAgent = __nccwpck_require__(7858) +const RetryHandler = __nccwpck_require__(2286) +const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(1892) +const DecoratorHandler = __nccwpck_require__(6930) +const RedirectHandler = __nccwpck_require__(2860) +const createRedirectInterceptor = __nccwpck_require__(8861) + +let hasCrypto +try { + __nccwpck_require__(6113) + hasCrypto = true +} catch { + hasCrypto = false +} -/***/ 1449: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Object.assign(Dispatcher.prototype, api) -/** - * A Javascript implementation of AES Cipher Suites for TLS. - * - * @author Dave Longley - * - * Copyright (c) 2009-2015 Digital Bazaar, Inc. - * - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(9167); +module.exports.Dispatcher = Dispatcher +module.exports.Client = Client +module.exports.Pool = Pool +module.exports.BalancedPool = BalancedPool +module.exports.Agent = Agent +module.exports.ProxyAgent = ProxyAgent +module.exports.RetryHandler = RetryHandler -var tls = module.exports = forge.tls; +module.exports.DecoratorHandler = DecoratorHandler +module.exports.RedirectHandler = RedirectHandler +module.exports.createRedirectInterceptor = createRedirectInterceptor -/** - * Supported cipher suites. - */ -tls.CipherSuites['TLS_RSA_WITH_AES_128_CBC_SHA'] = { - id: [0x00, 0x2f], - name: 'TLS_RSA_WITH_AES_128_CBC_SHA', - initSecurityParameters: function(sp) { - sp.bulk_cipher_algorithm = tls.BulkCipherAlgorithm.aes; - sp.cipher_type = tls.CipherType.block; - sp.enc_key_length = 16; - sp.block_length = 16; - sp.fixed_iv_length = 16; - sp.record_iv_length = 16; - sp.mac_algorithm = tls.MACAlgorithm.hmac_sha1; - sp.mac_length = 20; - sp.mac_key_length = 20; - }, - initConnectionState: initConnectionState -}; -tls.CipherSuites['TLS_RSA_WITH_AES_256_CBC_SHA'] = { - id: [0x00, 0x35], - name: 'TLS_RSA_WITH_AES_256_CBC_SHA', - initSecurityParameters: function(sp) { - sp.bulk_cipher_algorithm = tls.BulkCipherAlgorithm.aes; - sp.cipher_type = tls.CipherType.block; - sp.enc_key_length = 32; - sp.block_length = 16; - sp.fixed_iv_length = 16; - sp.record_iv_length = 16; - sp.mac_algorithm = tls.MACAlgorithm.hmac_sha1; - sp.mac_length = 20; - sp.mac_key_length = 20; - }, - initConnectionState: initConnectionState -}; +module.exports.buildConnector = buildConnector +module.exports.errors = errors -function initConnectionState(state, c, sp) { - var client = (c.entity === forge.tls.ConnectionEnd.client); +function makeDispatcher (fn) { + return (url, opts, handler) => { + if (typeof opts === 'function') { + handler = opts + opts = null + } - // cipher setup - state.read.cipherState = { - init: false, - cipher: forge.cipher.createDecipher('AES-CBC', client ? - sp.keys.server_write_key : sp.keys.client_write_key), - iv: client ? sp.keys.server_write_IV : sp.keys.client_write_IV - }; - state.write.cipherState = { - init: false, - cipher: forge.cipher.createCipher('AES-CBC', client ? - sp.keys.client_write_key : sp.keys.server_write_key), - iv: client ? sp.keys.client_write_IV : sp.keys.server_write_IV - }; - state.read.cipherFunction = decrypt_aes_cbc_sha1; - state.write.cipherFunction = encrypt_aes_cbc_sha1; + if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) { + throw new InvalidArgumentError('invalid url') + } - // MAC setup - state.read.macLength = state.write.macLength = sp.mac_length; - state.read.macFunction = state.write.macFunction = tls.hmac_sha1; -} + if (opts != null && typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } -/** - * Encrypts the TLSCompressed record into a TLSCipherText record using AES - * in CBC mode. - * - * @param record the TLSCompressed record to encrypt. - * @param s the ConnectionState to use. - * - * @return true on success, false on failure. - */ -function encrypt_aes_cbc_sha1(record, s) { - var rval = false; - - // append MAC to fragment, update sequence number - var mac = s.macFunction(s.macKey, s.sequenceNumber, record); - record.fragment.putBytes(mac); - s.updateSequenceNumber(); - - // TLS 1.1+ use an explicit IV every time to protect against CBC attacks - var iv; - if(record.version.minor === tls.Versions.TLS_1_0.minor) { - // use the pre-generated IV when initializing for TLS 1.0, otherwise use - // the residue from the previous encryption - iv = s.cipherState.init ? null : s.cipherState.iv; - } else { - iv = forge.random.getBytesSync(16); - } + if (opts && opts.path != null) { + if (typeof opts.path !== 'string') { + throw new InvalidArgumentError('invalid opts.path') + } - s.cipherState.init = true; + let path = opts.path + if (!opts.path.startsWith('/')) { + path = `/${path}` + } - // start cipher - var cipher = s.cipherState.cipher; - cipher.start({iv: iv}); + url = new URL(util.parseOrigin(url).origin + path) + } else { + if (!opts) { + opts = typeof url === 'object' ? url : {} + } - // TLS 1.1+ write IV into output - if(record.version.minor >= tls.Versions.TLS_1_1.minor) { - cipher.output.putBytes(iv); - } + url = util.parseURL(url) + } - // do encryption (default padding is appropriate) - cipher.update(record.fragment); - if(cipher.finish(encrypt_aes_cbc_sha1_padding)) { - // set record fragment to encrypted output - record.fragment = cipher.output; - record.length = record.fragment.length(); - rval = true; - } + const { agent, dispatcher = getGlobalDispatcher() } = opts - return rval; -} + if (agent) { + throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?') + } -/** - * Handles padding for aes_cbc_sha1 in encrypt mode. - * - * @param blockSize the block size. - * @param input the input buffer. - * @param decrypt true in decrypt mode, false in encrypt mode. - * - * @return true on success, false on failure. - */ -function encrypt_aes_cbc_sha1_padding(blockSize, input, decrypt) { - /* The encrypted data length (TLSCiphertext.length) is one more than the sum - of SecurityParameters.block_length, TLSCompressed.length, - SecurityParameters.mac_length, and padding_length. - - The padding may be any length up to 255 bytes long, as long as it results in - the TLSCiphertext.length being an integral multiple of the block length. - Lengths longer than necessary might be desirable to frustrate attacks on a - protocol based on analysis of the lengths of exchanged messages. Each uint8 - in the padding data vector must be filled with the padding length value. - - The padding length should be such that the total size of the - GenericBlockCipher structure is a multiple of the cipher's block length. - Legal values range from zero to 255, inclusive. This length specifies the - length of the padding field exclusive of the padding_length field itself. - - This is slightly different from PKCS#7 because the padding value is 1 - less than the actual number of padding bytes if you include the - padding_length uint8 itself as a padding byte. */ - if(!decrypt) { - // get the number of padding bytes required to reach the blockSize and - // subtract 1 for the padding value (to make room for the padding_length - // uint8) - var padding = blockSize - (input.length() % blockSize); - input.fillWithByte(padding - 1, padding); + return fn.call(dispatcher, { + ...opts, + origin: url.origin, + path: url.search ? `${url.pathname}${url.search}` : url.pathname, + method: opts.method || (opts.body ? 'PUT' : 'GET') + }, handler) } - return true; } -/** - * Handles padding for aes_cbc_sha1 in decrypt mode. - * - * @param blockSize the block size. - * @param output the output buffer. - * @param decrypt true in decrypt mode, false in encrypt mode. - * - * @return true on success, false on failure. - */ -function decrypt_aes_cbc_sha1_padding(blockSize, output, decrypt) { - var rval = true; - if(decrypt) { - /* The last byte in the output specifies the number of padding bytes not - including itself. Each of the padding bytes has the same value as that - last byte (known as the padding_length). Here we check all padding - bytes to ensure they have the value of padding_length even if one of - them is bad in order to ward-off timing attacks. */ - var len = output.length(); - var paddingLength = output.last(); - for(var i = len - 1 - paddingLength; i < len - 1; ++i) { - rval = rval && (output.at(i) == paddingLength); - } - if(rval) { - // trim off padding bytes and last padding length byte - output.truncate(paddingLength + 1); +module.exports.setGlobalDispatcher = setGlobalDispatcher +module.exports.getGlobalDispatcher = getGlobalDispatcher + +if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) { + let fetchImpl = null + module.exports.fetch = async function fetch (resource) { + if (!fetchImpl) { + fetchImpl = (__nccwpck_require__(4881).fetch) } - } - return rval; -} -/** - * Decrypts a TLSCipherText record into a TLSCompressed record using - * AES in CBC mode. - * - * @param record the TLSCipherText record to decrypt. - * @param s the ConnectionState to use. - * - * @return true on success, false on failure. - */ -function decrypt_aes_cbc_sha1(record, s) { - var rval = false; - - var iv; - if(record.version.minor === tls.Versions.TLS_1_0.minor) { - // use pre-generated IV when initializing for TLS 1.0, otherwise use the - // residue from the previous decryption - iv = s.cipherState.init ? null : s.cipherState.iv; - } else { - // TLS 1.1+ use an explicit IV every time to protect against CBC attacks - // that is appended to the record fragment - iv = record.fragment.getBytes(16); + try { + return await fetchImpl(...arguments) + } catch (err) { + if (typeof err === 'object') { + Error.captureStackTrace(err, this) + } + + throw err + } } + module.exports.Headers = __nccwpck_require__(554).Headers + module.exports.Response = __nccwpck_require__(7823).Response + module.exports.Request = __nccwpck_require__(8359).Request + module.exports.FormData = __nccwpck_require__(2015).FormData + module.exports.File = __nccwpck_require__(8511).File + module.exports.FileReader = __nccwpck_require__(1446).FileReader - s.cipherState.init = true; + const { setGlobalOrigin, getGlobalOrigin } = __nccwpck_require__(1246) - // start cipher - var cipher = s.cipherState.cipher; - cipher.start({iv: iv}); + module.exports.setGlobalOrigin = setGlobalOrigin + module.exports.getGlobalOrigin = getGlobalOrigin - // do decryption - cipher.update(record.fragment); - rval = cipher.finish(decrypt_aes_cbc_sha1_padding); + const { CacheStorage } = __nccwpck_require__(7907) + const { kConstruct } = __nccwpck_require__(9174) - // even if decryption fails, keep going to minimize timing attacks + // Cache & CacheStorage are tightly coupled with fetch. Even if it may run + // in an older version of Node, it doesn't have any use without fetch. + module.exports.caches = new CacheStorage(kConstruct) +} - // decrypted data: - // first (len - 20) bytes = application data - // last 20 bytes = MAC - var macLen = s.macLength; +if (util.nodeMajor >= 16) { + const { deleteCookie, getCookies, getSetCookies, setCookie } = __nccwpck_require__(1724) - // create a random MAC to check against should the mac length check fail - // Note: do this regardless of the failure to keep timing consistent - var mac = forge.random.getBytesSync(macLen); + module.exports.deleteCookie = deleteCookie + module.exports.getCookies = getCookies + module.exports.getSetCookies = getSetCookies + module.exports.setCookie = setCookie - // get fragment and mac - var len = cipher.output.length(); - if(len >= macLen) { - record.fragment = cipher.output.getBytes(len - macLen); - mac = cipher.output.getBytes(macLen); - } else { - // bad data, but get bytes anyway to try to keep timing consistent - record.fragment = cipher.output.getBytes(); - } - record.fragment = forge.util.createBuffer(record.fragment); - record.length = record.fragment.length(); + const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) - // see if data integrity checks out, update sequence number - var mac2 = s.macFunction(s.macKey, s.sequenceNumber, record); - s.updateSequenceNumber(); - rval = compareMacs(s.macKey, mac, mac2) && rval; - return rval; + module.exports.parseMIMEType = parseMIMEType + module.exports.serializeAMimeType = serializeAMimeType } -/** - * Safely compare two MACs. This function will compare two MACs in a way - * that protects against timing attacks. - * - * TODO: Expose elsewhere as a utility API. - * - * See: https://www.nccgroup.trust/us/about-us/newsroom-and-events/blog/2011/february/double-hmac-verification/ - * - * @param key the MAC key to use. - * @param mac1 as a binary-encoded string of bytes. - * @param mac2 as a binary-encoded string of bytes. - * - * @return true if the MACs are the same, false if not. - */ -function compareMacs(key, mac1, mac2) { - var hmac = forge.hmac.create(); +if (util.nodeMajor >= 18 && hasCrypto) { + const { WebSocket } = __nccwpck_require__(4284) - hmac.start('SHA1', key); - hmac.update(mac1); - mac1 = hmac.digest().getBytes(); + module.exports.WebSocket = WebSocket +} - hmac.start(null, null); - hmac.update(mac2); - mac2 = hmac.digest().getBytes(); +module.exports.request = makeDispatcher(api.request) +module.exports.stream = makeDispatcher(api.stream) +module.exports.pipeline = makeDispatcher(api.pipeline) +module.exports.connect = makeDispatcher(api.connect) +module.exports.upgrade = makeDispatcher(api.upgrade) - return mac1 === mac2; -} +module.exports.MockClient = MockClient +module.exports.MockPool = MockPool +module.exports.MockAgent = MockAgent +module.exports.mockErrors = mockErrors /***/ }), -/***/ 9414: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7890: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Copyright (c) 2019 Digital Bazaar, Inc. - */ +"use strict"; -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -var asn1 = forge.asn1; - -exports.privateKeyValidator = { - // PrivateKeyInfo - name: 'PrivateKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // Version (INTEGER) - name: 'PrivateKeyInfo.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyVersion' - }, { - // privateKeyAlgorithm - name: 'PrivateKeyInfo.privateKeyAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'privateKeyOid' - }] - }, { - // PrivateKey - name: 'PrivateKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'privateKey' - }] -}; -exports.publicKeyValidator = { - name: 'SubjectPublicKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'subjectPublicKeyInfo', - value: [{ - name: 'SubjectPublicKeyInfo.AlgorithmIdentifier', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'publicKeyOid' - }] - }, - // capture group for ed25519PublicKey - { - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - composed: true, - captureBitStringValue: 'ed25519PublicKey' - } - // FIXME: this is capture group for rsaPublicKey, use it in this API or - // discard? - /* { - // subjectPublicKey - name: 'SubjectPublicKeyInfo.subjectPublicKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - value: [{ - // RSAPublicKey - name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - optional: true, - captureAsn1: 'rsaPublicKey' - }] - } */ - ] -}; +const { InvalidArgumentError } = __nccwpck_require__(8045) +const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = __nccwpck_require__(2785) +const DispatcherBase = __nccwpck_require__(4839) +const Pool = __nccwpck_require__(4634) +const Client = __nccwpck_require__(3598) +const util = __nccwpck_require__(3983) +const createRedirectInterceptor = __nccwpck_require__(8861) +const { WeakRef, FinalizationRegistry } = __nccwpck_require__(6436)() +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kMaxRedirections = Symbol('maxRedirections') +const kOnDrain = Symbol('onDrain') +const kFactory = Symbol('factory') +const kFinalizer = Symbol('finalizer') +const kOptions = Symbol('options') -/***/ }), +function defaultFactory (origin, opts) { + return opts && opts.connections === 1 + ? new Client(origin, opts) + : new Pool(origin, opts) +} -/***/ 9549: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +class Agent extends DispatcherBase { + constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) { + super() -/** - * Javascript implementation of Abstract Syntax Notation Number One. - * - * @author Dave Longley - * - * Copyright (c) 2010-2015 Digital Bazaar, Inc. - * - * An API for storing data using the Abstract Syntax Notation Number One - * format using DER (Distinguished Encoding Rules) encoding. This encoding is - * commonly used to store data for PKI, i.e. X.509 Certificates, and this - * implementation exists for that purpose. - * - * Abstract Syntax Notation Number One (ASN.1) is used to define the abstract - * syntax of information without restricting the way the information is encoded - * for transmission. It provides a standard that allows for open systems - * communication. ASN.1 defines the syntax of information data and a number of - * simple data types as well as a notation for describing them and specifying - * values for them. - * - * The RSA algorithm creates public and private keys that are often stored in - * X.509 or PKCS#X formats -- which use ASN.1 (encoded in DER format). This - * class provides the most basic functionality required to store and load DSA - * keys that are encoded according to ASN.1. - * - * The most common binary encodings for ASN.1 are BER (Basic Encoding Rules) - * and DER (Distinguished Encoding Rules). DER is just a subset of BER that - * has stricter requirements for how data must be encoded. - * - * Each ASN.1 structure has a tag (a byte identifying the ASN.1 structure type) - * and a byte array for the value of this ASN1 structure which may be data or a - * list of ASN.1 structures. - * - * Each ASN.1 structure using BER is (Tag-Length-Value): - * - * | byte 0 | bytes X | bytes Y | - * |--------|---------|---------- - * | tag | length | value | - * - * ASN.1 allows for tags to be of "High-tag-number form" which allows a tag to - * be two or more octets, but that is not supported by this class. A tag is - * only 1 byte. Bits 1-5 give the tag number (ie the data type within a - * particular 'class'), 6 indicates whether or not the ASN.1 value is - * constructed from other ASN.1 values, and bits 7 and 8 give the 'class'. If - * bits 7 and 8 are both zero, the class is UNIVERSAL. If only bit 7 is set, - * then the class is APPLICATION. If only bit 8 is set, then the class is - * CONTEXT_SPECIFIC. If both bits 7 and 8 are set, then the class is PRIVATE. - * The tag numbers for the data types for the class UNIVERSAL are listed below: - * - * UNIVERSAL 0 Reserved for use by the encoding rules - * UNIVERSAL 1 Boolean type - * UNIVERSAL 2 Integer type - * UNIVERSAL 3 Bitstring type - * UNIVERSAL 4 Octetstring type - * UNIVERSAL 5 Null type - * UNIVERSAL 6 Object identifier type - * UNIVERSAL 7 Object descriptor type - * UNIVERSAL 8 External type and Instance-of type - * UNIVERSAL 9 Real type - * UNIVERSAL 10 Enumerated type - * UNIVERSAL 11 Embedded-pdv type - * UNIVERSAL 12 UTF8String type - * UNIVERSAL 13 Relative object identifier type - * UNIVERSAL 14-15 Reserved for future editions - * UNIVERSAL 16 Sequence and Sequence-of types - * UNIVERSAL 17 Set and Set-of types - * UNIVERSAL 18-22, 25-30 Character string types - * UNIVERSAL 23-24 Time types - * - * The length of an ASN.1 structure is specified after the tag identifier. - * There is a definite form and an indefinite form. The indefinite form may - * be used if the encoding is constructed and not all immediately available. - * The indefinite form is encoded using a length byte with only the 8th bit - * set. The end of the constructed object is marked using end-of-contents - * octets (two zero bytes). - * - * The definite form looks like this: - * - * The length may take up 1 or more bytes, it depends on the length of the - * value of the ASN.1 structure. DER encoding requires that if the ASN.1 - * structure has a value that has a length greater than 127, more than 1 byte - * will be used to store its length, otherwise just one byte will be used. - * This is strict. - * - * In the case that the length of the ASN.1 value is less than 127, 1 octet - * (byte) is used to store the "short form" length. The 8th bit has a value of - * 0 indicating the length is "short form" and not "long form" and bits 7-1 - * give the length of the data. (The 8th bit is the left-most, most significant - * bit: also known as big endian or network format). - * - * In the case that the length of the ASN.1 value is greater than 127, 2 to - * 127 octets (bytes) are used to store the "long form" length. The first - * byte's 8th bit is set to 1 to indicate the length is "long form." Bits 7-1 - * give the number of additional octets. All following octets are in base 256 - * with the most significant digit first (typical big-endian binary unsigned - * integer storage). So, for instance, if the length of a value was 257, the - * first byte would be set to: - * - * 10000010 = 130 = 0x82. - * - * This indicates there are 2 octets (base 256) for the length. The second and - * third bytes (the octets just mentioned) would store the length in base 256: - * - * octet 2: 00000001 = 1 * 256^1 = 256 - * octet 3: 00000001 = 1 * 256^0 = 1 - * total = 257 - * - * The algorithm for converting a js integer value of 257 to base-256 is: - * - * var value = 257; - * var bytes = []; - * bytes[0] = (value >>> 8) & 0xFF; // most significant byte first - * bytes[1] = value & 0xFF; // least significant byte last - * - * On the ASN.1 UNIVERSAL Object Identifier (OID) type: - * - * An OID can be written like: "value1.value2.value3...valueN" - * - * The DER encoding rules: - * - * The first byte has the value 40 * value1 + value2. - * The following bytes, if any, encode the remaining values. Each value is - * encoded in base 128, most significant digit first (big endian), with as - * few digits as possible, and the most significant bit of each byte set - * to 1 except the last in each value's encoding. For example: Given the - * OID "1.2.840.113549", its DER encoding is (remember each byte except the - * last one in each encoding is OR'd with 0x80): - * - * byte 1: 40 * 1 + 2 = 42 = 0x2A. - * bytes 2-3: 128 * 6 + 72 = 840 = 6 72 = 6 72 = 0x0648 = 0x8648 - * bytes 4-6: 16384 * 6 + 128 * 119 + 13 = 6 119 13 = 0x06770D = 0x86F70D - * - * The final value is: 0x2A864886F70D. - * The full OID (including ASN.1 tag and length of 6 bytes) is: - * 0x06062A864886F70D - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); -__nccwpck_require__(1925); + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } -/* ASN.1 API */ -var asn1 = module.exports = forge.asn1 = forge.asn1 || {}; + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } -/** - * ASN.1 classes. - */ -asn1.Class = { - UNIVERSAL: 0x00, - APPLICATION: 0x40, - CONTEXT_SPECIFIC: 0x80, - PRIVATE: 0xC0 -}; + if (!Number.isInteger(maxRedirections) || maxRedirections < 0) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } -/** - * ASN.1 types. Not all types are supported by this implementation, only - * those necessary to implement a simple PKI are implemented. - */ -asn1.Type = { - NONE: 0, - BOOLEAN: 1, - INTEGER: 2, - BITSTRING: 3, - OCTETSTRING: 4, - NULL: 5, - OID: 6, - ODESC: 7, - EXTERNAL: 8, - REAL: 9, - ENUMERATED: 10, - EMBEDDED: 11, - UTF8: 12, - ROID: 13, - SEQUENCE: 16, - SET: 17, - PRINTABLESTRING: 19, - IA5STRING: 22, - UTCTIME: 23, - GENERALIZEDTIME: 24, - BMPSTRING: 30 -}; + if (connect && typeof connect !== 'function') { + connect = { ...connect } + } -/** - * Creates a new asn1 object. - * - * @param tagClass the tag class for the object. - * @param type the data type (tag number) for the object. - * @param constructed true if the asn1 object is in constructed form. - * @param value the value for the object, if it is not constructed. - * @param [options] the options to use: - * [bitStringContents] the plain BIT STRING content including padding - * byte. - * - * @return the asn1 object. - */ -asn1.create = function(tagClass, type, constructed, value, options) { - /* An asn1 object has a tagClass, a type, a constructed flag, and a - value. The value's type depends on the constructed flag. If - constructed, it will contain a list of other asn1 objects. If not, - it will contain the ASN.1 value as an array of bytes formatted - according to the ASN.1 data type. */ - - // remove undefined values - if(forge.util.isArray(value)) { - var tmp = []; - for(var i = 0; i < value.length; ++i) { - if(value[i] !== undefined) { - tmp.push(value[i]); + this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent) + ? options.interceptors.Agent + : [createRedirectInterceptor({ maxRedirections })] + + this[kOptions] = { ...util.deepClone(options), connect } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kMaxRedirections] = maxRedirections + this[kFactory] = factory + this[kClients] = new Map() + this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => { + const ref = this[kClients].get(key) + if (ref !== undefined && ref.deref() === undefined) { + this[kClients].delete(key) } - } - value = tmp; - } + }) - var obj = { - tagClass: tagClass, - type: type, - constructed: constructed, - composed: constructed || forge.util.isArray(value), - value: value - }; - if(options && 'bitStringContents' in options) { - // TODO: copy byte buffer if it's a buffer not a string - obj.bitStringContents = options.bitStringContents; - // TODO: add readonly flag to avoid this overhead - // save copy to detect changes - obj.original = asn1.copy(obj); - } - return obj; -}; + const agent = this -/** - * Copies an asn1 object. - * - * @param obj the asn1 object. - * @param [options] copy options: - * [excludeBitStringContents] true to not copy bitStringContents - * - * @return the a copy of the asn1 object. - */ -asn1.copy = function(obj, options) { - var copy; + this[kOnDrain] = (origin, targets) => { + agent.emit('drain', origin, [agent, ...targets]) + } - if(forge.util.isArray(obj)) { - copy = []; - for(var i = 0; i < obj.length; ++i) { - copy.push(asn1.copy(obj[i], options)); + this[kOnConnect] = (origin, targets) => { + agent.emit('connect', origin, [agent, ...targets]) } - return copy; - } - if(typeof obj === 'string') { - // TODO: copy byte buffer if it's a buffer not a string - return obj; + this[kOnDisconnect] = (origin, targets, err) => { + agent.emit('disconnect', origin, [agent, ...targets], err) + } + + this[kOnConnectionError] = (origin, targets, err) => { + agent.emit('connectionError', origin, [agent, ...targets], err) + } } - copy = { - tagClass: obj.tagClass, - type: obj.type, - constructed: obj.constructed, - composed: obj.composed, - value: asn1.copy(obj.value, options) - }; - if(options && !options.excludeBitStringContents) { - // TODO: copy byte buffer if it's a buffer not a string - copy.bitStringContents = obj.bitStringContents; + get [kRunning] () { + let ret = 0 + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore next: gc is undeterministic */ + if (client) { + ret += client[kRunning] + } + } + return ret } - return copy; -}; -/** - * Compares asn1 objects for equality. - * - * Note this function does not run in constant time. - * - * @param obj1 the first asn1 object. - * @param obj2 the second asn1 object. - * @param [options] compare options: - * [includeBitStringContents] true to compare bitStringContents - * - * @return true if the asn1 objects are equal. - */ -asn1.equals = function(obj1, obj2, options) { - if(forge.util.isArray(obj1)) { - if(!forge.util.isArray(obj2)) { - return false; + [kDispatch] (opts, handler) { + let key + if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) { + key = String(opts.origin) + } else { + throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.') } - if(obj1.length !== obj2.length) { - return false; + + const ref = this[kClients].get(key) + + let dispatcher = ref ? ref.deref() : null + if (!dispatcher) { + dispatcher = this[kFactory](opts.origin, this[kOptions]) + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].set(key, new WeakRef(dispatcher)) + this[kFinalizer].register(dispatcher, key) } - for(var i = 0; i < obj1.length; ++i) { - if(!asn1.equals(obj1[i], obj2[i])) { - return false; + + return dispatcher.dispatch(opts, handler) + } + + async [kClose] () { + const closePromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + closePromises.push(client.close()) } } - return true; - } - if(typeof obj1 !== typeof obj2) { - return false; + await Promise.all(closePromises) } - if(typeof obj1 === 'string') { - return obj1 === obj2; + async [kDestroy] (err) { + const destroyPromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + destroyPromises.push(client.destroy(err)) + } + } + + await Promise.all(destroyPromises) } +} + +module.exports = Agent + - var equal = obj1.tagClass === obj2.tagClass && - obj1.type === obj2.type && - obj1.constructed === obj2.constructed && - obj1.composed === obj2.composed && - asn1.equals(obj1.value, obj2.value); - if(options && options.includeBitStringContents) { - equal = equal && (obj1.bitStringContents === obj2.bitStringContents); +/***/ }), + +/***/ 7032: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { addAbortListener } = __nccwpck_require__(3983) +const { RequestAbortedError } = __nccwpck_require__(8045) + +const kListener = Symbol('kListener') +const kSignal = Symbol('kSignal') + +function abort (self) { + if (self.abort) { + self.abort() + } else { + self.onError(new RequestAbortedError()) } +} - return equal; -}; +function addSignal (self, signal) { + self[kSignal] = null + self[kListener] = null -/** - * Gets the length of a BER-encoded ASN.1 value. - * - * In case the length is not specified, undefined is returned. - * - * @param b the BER-encoded ASN.1 byte buffer, starting with the first - * length byte. - * - * @return the length of the BER-encoded ASN.1 value or undefined. - */ -asn1.getBerValueLength = function(b) { - // TODO: move this function and related DER/BER functions to a der.js - // file; better abstract ASN.1 away from der/ber. - var b2 = b.getByte(); - if(b2 === 0x80) { - return undefined; + if (!signal) { + return } - // see if the length is "short form" or "long form" (bit 8 set) - var length; - var longForm = b2 & 0x80; - if(!longForm) { - // length is just the first byte - length = b2; - } else { - // the number of bytes the length is specified in bits 7 through 1 - // and each length byte is in big-endian base-256 - length = b.getInt((b2 & 0x7F) << 3); + if (signal.aborted) { + abort(self) + return } - return length; -}; -/** - * Check if the byte buffer has enough bytes. Throws an Error if not. - * - * @param bytes the byte buffer to parse from. - * @param remaining the bytes remaining in the current parsing state. - * @param n the number of bytes the buffer must have. - */ -function _checkBufferLength(bytes, remaining, n) { - if(n > remaining) { - var error = new Error('Too few bytes to parse DER.'); - error.available = bytes.length(); - error.remaining = remaining; - error.requested = n; - throw error; + self[kSignal] = signal + self[kListener] = () => { + abort(self) } + + addAbortListener(self[kSignal], self[kListener]) } -/** - * Gets the length of a BER-encoded ASN.1 value. - * - * In case the length is not specified, undefined is returned. - * - * @param bytes the byte buffer to parse from. - * @param remaining the bytes remaining in the current parsing state. - * - * @return the length of the BER-encoded ASN.1 value or undefined. - */ -var _getValueLength = function(bytes, remaining) { - // TODO: move this function and related DER/BER functions to a der.js - // file; better abstract ASN.1 away from der/ber. - // fromDer already checked that this byte exists - var b2 = bytes.getByte(); - remaining--; - if(b2 === 0x80) { - return undefined; +function removeSignal (self) { + if (!self[kSignal]) { + return } - // see if the length is "short form" or "long form" (bit 8 set) - var length; - var longForm = b2 & 0x80; - if(!longForm) { - // length is just the first byte - length = b2; + if ('removeEventListener' in self[kSignal]) { + self[kSignal].removeEventListener('abort', self[kListener]) } else { - // the number of bytes the length is specified in bits 7 through 1 - // and each length byte is in big-endian base-256 - var longFormBytes = b2 & 0x7F; - _checkBufferLength(bytes, remaining, longFormBytes); - length = bytes.getInt(longFormBytes << 3); + self[kSignal].removeListener('abort', self[kListener]) } - // FIXME: this will only happen for 32 bit getInt with high bit set - if(length < 0) { - throw new Error('Negative length: ' + length); - } - return length; -}; -/** - * Parses an asn1 object from a byte buffer in DER format. - * - * @param bytes the byte buffer to parse from. - * @param [strict] true to be strict when checking value lengths, false to - * allow truncated values (default: true). - * @param [options] object with options or boolean strict flag - * [strict] true to be strict when checking value lengths, false to - * allow truncated values (default: true). - * [parseAllBytes] true to ensure all bytes are parsed - * (default: true) - * [decodeBitStrings] true to attempt to decode the content of - * BIT STRINGs (not OCTET STRINGs) using strict mode. Note that - * without schema support to understand the data context this can - * erroneously decode values that happen to be valid ASN.1. This - * flag will be deprecated or removed as soon as schema support is - * available. (default: true) - * - * @throws Will throw an error for various malformed input conditions. - * - * @return the parsed asn1 object. - */ -asn1.fromDer = function(bytes, options) { - if(options === undefined) { - options = { - strict: true, - parseAllBytes: true, - decodeBitStrings: true - }; - } - if(typeof options === 'boolean') { - options = { - strict: options, - parseAllBytes: true, - decodeBitStrings: true - }; - } - if(!('strict' in options)) { - options.strict = true; - } - if(!('parseAllBytes' in options)) { - options.parseAllBytes = true; - } - if(!('decodeBitStrings' in options)) { - options.decodeBitStrings = true; - } + self[kSignal] = null + self[kListener] = null +} - // wrap in buffer if needed - if(typeof bytes === 'string') { - bytes = forge.util.createBuffer(bytes); - } +module.exports = { + addSignal, + removeSignal +} - var byteCount = bytes.length(); - var value = _fromDer(bytes, bytes.length(), 0, options); - if(options.parseAllBytes && bytes.length() !== 0) { - var error = new Error('Unparsed DER bytes remain after ASN.1 parsing.'); - error.byteCount = byteCount; - error.remaining = bytes.length(); - throw error; - } - return value; -}; -/** - * Internal function to parse an asn1 object from a byte buffer in DER format. - * - * @param bytes the byte buffer to parse from. - * @param remaining the number of bytes remaining for this chunk. - * @param depth the current parsing depth. - * @param options object with same options as fromDer(). - * - * @return the parsed asn1 object. - */ -function _fromDer(bytes, remaining, depth, options) { - // temporary storage for consumption calculations - var start; - - // minimum length for ASN.1 DER structure is 2 - _checkBufferLength(bytes, remaining, 2); - - // get the first byte - var b1 = bytes.getByte(); - // consumed one byte - remaining--; - - // get the tag class - var tagClass = (b1 & 0xC0); - - // get the type (bits 1-5) - var type = b1 & 0x1F; - - // get the variable value length and adjust remaining bytes - start = bytes.length(); - var length = _getValueLength(bytes, remaining); - remaining -= start - bytes.length(); - - // ensure there are enough bytes to get the value - if(length !== undefined && length > remaining) { - if(options.strict) { - var error = new Error('Too few bytes to read ASN.1 value.'); - error.available = bytes.length(); - error.remaining = remaining; - error.requested = length; - throw error; - } - // Note: be lenient with truncated values and use remaining state bytes - length = remaining; - } - - // value storage - var value; - // possible BIT STRING contents storage - var bitStringContents; - - // constructed flag is bit 6 (32 = 0x20) of the first byte - var constructed = ((b1 & 0x20) === 0x20); - if(constructed) { - // parse child asn1 objects from the value - value = []; - if(length === undefined) { - // asn1 object of indefinite length, read until end tag - for(;;) { - _checkBufferLength(bytes, remaining, 2); - if(bytes.bytes(2) === String.fromCharCode(0, 0)) { - bytes.getBytes(2); - remaining -= 2; - break; - } - start = bytes.length(); - value.push(_fromDer(bytes, remaining, depth + 1, options)); - remaining -= start - bytes.length(); - } - } else { - // parsing asn1 object of definite length - while(length > 0) { - start = bytes.length(); - value.push(_fromDer(bytes, length, depth + 1, options)); - remaining -= start - bytes.length(); - length -= start - bytes.length(); - } - } - } +/***/ }), - // if a BIT STRING, save the contents including padding - if(value === undefined && tagClass === asn1.Class.UNIVERSAL && - type === asn1.Type.BITSTRING) { - bitStringContents = bytes.bytes(length); - } - - // determine if a non-constructed value should be decoded as a composed - // value that contains other ASN.1 objects. BIT STRINGs (and OCTET STRINGs) - // can be used this way. - if(value === undefined && options.decodeBitStrings && - tagClass === asn1.Class.UNIVERSAL && - // FIXME: OCTET STRINGs not yet supported here - // .. other parts of forge expect to decode OCTET STRINGs manually - (type === asn1.Type.BITSTRING /*|| type === asn1.Type.OCTETSTRING*/) && - length > 1) { - // save read position - var savedRead = bytes.read; - var savedRemaining = remaining; - var unused = 0; - if(type === asn1.Type.BITSTRING) { - /* The first octet gives the number of bits by which the length of the - bit string is less than the next multiple of eight (this is called - the "number of unused bits"). - - The second and following octets give the value of the bit string - converted to an octet string. */ - _checkBufferLength(bytes, remaining, 1); - unused = bytes.getByte(); - remaining--; - } - // if all bits are used, maybe the BIT/OCTET STRING holds ASN.1 objs - if(unused === 0) { - try { - // attempt to parse child asn1 object from the value - // (stored in array to signal composed value) - start = bytes.length(); - var subOptions = { - // enforce strict mode to avoid parsing ASN.1 from plain data - strict: true, - decodeBitStrings: true - }; - var composed = _fromDer(bytes, remaining, depth + 1, subOptions); - var used = start - bytes.length(); - remaining -= used; - if(type == asn1.Type.BITSTRING) { - used++; - } +/***/ 9744: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // if the data all decoded and the class indicates UNIVERSAL or - // CONTEXT_SPECIFIC then assume we've got an encapsulated ASN.1 object - var tc = composed.tagClass; - if(used === length && - (tc === asn1.Class.UNIVERSAL || tc === asn1.Class.CONTEXT_SPECIFIC)) { - value = [composed]; - } - } catch(ex) { - } - } - if(value === undefined) { - // restore read position - bytes.read = savedRead; - remaining = savedRemaining; - } - } +"use strict"; - if(value === undefined) { - // asn1 not constructed or composed, get raw value - // TODO: do DER to OID conversion and vice-versa in .toDer? - if(length === undefined) { - if(options.strict) { - throw new Error('Non-constructed ASN.1 object of indefinite length.'); - } - // be lenient and use remaining state bytes - length = remaining; - } +const { AsyncResource } = __nccwpck_require__(852) +const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { addSignal, removeSignal } = __nccwpck_require__(7032) - if(type === asn1.Type.BMPSTRING) { - value = ''; - for(; length > 0; length -= 2) { - _checkBufferLength(bytes, remaining, 2); - value += String.fromCharCode(bytes.getInt16()); - remaining -= 2; - } - } else { - value = bytes.getBytes(length); - remaining -= length; +class ConnectHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') } - } - // add BIT STRING contents if available - var asn1Options = bitStringContents === undefined ? null : { - bitStringContents: bitStringContents - }; + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } - // create and return asn1 object - return asn1.create(tagClass, type, constructed, value, asn1Options); -} + const { signal, opaque, responseHeaders } = opts -/** - * Converts the given asn1 object to a buffer of bytes in DER format. - * - * @param asn1 the asn1 object to convert to bytes. - * - * @return the buffer of bytes. - */ -asn1.toDer = function(obj) { - var bytes = forge.util.createBuffer(); + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } - // build the first byte - var b1 = obj.tagClass | obj.type; + super('UNDICI_CONNECT') - // for storing the ASN.1 value - var value = forge.util.createBuffer(); + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.callback = callback + this.abort = null - // use BIT STRING contents if available and data not changed - var useBitStringContents = false; - if('bitStringContents' in obj) { - useBitStringContents = true; - if(obj.original) { - useBitStringContents = asn1.equals(obj, obj.original); - } + addSignal(this, signal) } - if(useBitStringContents) { - value.putBytes(obj.bitStringContents); - } else if(obj.composed) { - // if composed, use each child asn1 object's DER bytes as value - // turn on 6th bit (0x20 = 32) to indicate asn1 is constructed - // from other asn1 objects - if(obj.constructed) { - b1 |= 0x20; - } else { - // type is a bit string, add unused bits of 0x00 - value.putByte(0x00); + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() } - // add all of the child DER bytes together - for(var i = 0; i < obj.value.length; ++i) { - if(obj.value[i] !== undefined) { - value.putBuffer(asn1.toDer(obj.value[i])); - } - } - } else { - // use asn1.value directly - if(obj.type === asn1.Type.BMPSTRING) { - for(var i = 0; i < obj.value.length; ++i) { - value.putInt16(obj.value.charCodeAt(i)); - } - } else { - // ensure integer is minimally-encoded - // TODO: should all leading bytes be stripped vs just one? - // .. ex '00 00 01' => '01'? - if(obj.type === asn1.Type.INTEGER && - obj.value.length > 1 && - // leading 0x00 for positive integer - ((obj.value.charCodeAt(0) === 0 && - (obj.value.charCodeAt(1) & 0x80) === 0) || - // leading 0xFF for negative integer - (obj.value.charCodeAt(0) === 0xFF && - (obj.value.charCodeAt(1) & 0x80) === 0x80))) { - value.putBytes(obj.value.substr(1)); - } else { - value.putBytes(obj.value); - } - } + this.abort = abort + this.context = context } - // add tag byte - bytes.putByte(b1); + onHeaders () { + throw new SocketError('bad connect', null) + } - // use "short form" encoding - if(value.length() <= 127) { - // one byte describes the length - // bit 8 = 0 and bits 7-1 = length - bytes.putByte(value.length() & 0x7F); - } else { - // use "long form" encoding - // 2 to 127 bytes describe the length - // first byte: bit 8 = 1 and bits 7-1 = # of additional bytes - // other bytes: length in base 256, big-endian - var len = value.length(); - var lenBytes = ''; - do { - lenBytes += String.fromCharCode(len & 0xFF); - len = len >>> 8; - } while(len > 0); - - // set first byte to # bytes used to store the length and turn on - // bit 8 to indicate long-form length is used - bytes.putByte(lenBytes.length | 0x80); - - // concatenate length bytes in reverse since they were generated - // little endian and we need big endian - for(var i = lenBytes.length - 1; i >= 0; --i) { - bytes.putByte(lenBytes.charCodeAt(i)); - } - } - - // concatenate value bytes - bytes.putBuffer(value); - return bytes; -}; + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this -/** - * Converts an OID dot-separated string to a byte buffer. The byte buffer - * contains only the DER-encoded value, not any tag or length bytes. - * - * @param oid the OID dot-separated string. - * - * @return the byte buffer. - */ -asn1.oidToDer = function(oid) { - // split OID into individual values - var values = oid.split('.'); - var bytes = forge.util.createBuffer(); - - // first byte is 40 * value1 + value2 - bytes.putByte(40 * parseInt(values[0], 10) + parseInt(values[1], 10)); - // other bytes are each value in base 128 with 8th bit set except for - // the last byte for each value - var last, valueBytes, value, b; - for(var i = 2; i < values.length; ++i) { - // produce value bytes in reverse because we don't know how many - // bytes it will take to store the value - last = true; - valueBytes = []; - value = parseInt(values[i], 10); - do { - b = value & 0x7F; - value = value >>> 7; - // if value is not last, then turn on 8th bit - if(!last) { - b |= 0x80; - } - valueBytes.push(b); - last = false; - } while(value > 0); + removeSignal(this) - // add value bytes in reverse (needs to be in big endian) - for(var n = valueBytes.length - 1; n >= 0; --n) { - bytes.putByte(valueBytes[n]); + this.callback = null + + let headers = rawHeaders + // Indicates is an HTTP2Session + if (headers != null) { + headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) } + + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + socket, + opaque, + context + }) } - return bytes; -}; + onError (err) { + const { callback, opaque } = this -/** - * Converts a DER-encoded byte buffer to an OID dot-separated string. The - * byte buffer should contain only the DER-encoded value, not any tag or - * length bytes. - * - * @param bytes the byte buffer. - * - * @return the OID dot-separated string. - */ -asn1.derToOid = function(bytes) { - var oid; - - // wrap in buffer if needed - if(typeof bytes === 'string') { - bytes = forge.util.createBuffer(bytes); - } - - // first byte is 40 * value1 + value2 - var b = bytes.getByte(); - oid = Math.floor(b / 40) + '.' + (b % 40); - - // other bytes are each value in base 128 with 8th bit set except for - // the last byte for each value - var value = 0; - while(bytes.length() > 0) { - b = bytes.getByte(); - value = value << 7; - // not the last byte for the value - if(b & 0x80) { - value += b & 0x7F; - } else { - // last byte - oid += '.' + (value + b); - value = 0; + removeSignal(this) + + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } } +} - return oid; -}; +function connect (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + connect.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } -/** - * Converts a UTCTime value to a date. - * - * Note: GeneralizedTime has 4 digits for the year and is used for X.509 - * dates past 2049. Parsing that structure hasn't been implemented yet. - * - * @param utc the UTCTime value to convert. - * - * @return the date. - */ -asn1.utcTimeToDate = function(utc) { - /* The following formats can be used: - - YYMMDDhhmmZ - YYMMDDhhmm+hh'mm' - YYMMDDhhmm-hh'mm' - YYMMDDhhmmssZ - YYMMDDhhmmss+hh'mm' - YYMMDDhhmmss-hh'mm' - - Where: - - YY is the least significant two digits of the year - MM is the month (01 to 12) - DD is the day (01 to 31) - hh is the hour (00 to 23) - mm are the minutes (00 to 59) - ss are the seconds (00 to 59) - Z indicates that local time is GMT, + indicates that local time is - later than GMT, and - indicates that local time is earlier than GMT - hh' is the absolute value of the offset from GMT in hours - mm' is the absolute value of the offset from GMT in minutes */ - var date = new Date(); - - // if YY >= 50 use 19xx, if YY < 50 use 20xx - var year = parseInt(utc.substr(0, 2), 10); - year = (year >= 50) ? 1900 + year : 2000 + year; - var MM = parseInt(utc.substr(2, 2), 10) - 1; // use 0-11 for month - var DD = parseInt(utc.substr(4, 2), 10); - var hh = parseInt(utc.substr(6, 2), 10); - var mm = parseInt(utc.substr(8, 2), 10); - var ss = 0; - - // not just YYMMDDhhmmZ - if(utc.length > 11) { - // get character after minutes - var c = utc.charAt(10); - var end = 10; - - // see if seconds are present - if(c !== '+' && c !== '-') { - // get seconds - ss = parseInt(utc.substr(10, 2), 10); - end += 2; - } - } - - // update date - date.setUTCFullYear(year, MM, DD); - date.setUTCHours(hh, mm, ss, 0); - - if(end) { - // get +/- after end of time - c = utc.charAt(end); - if(c === '+' || c === '-') { - // get hours+minutes offset - var hhoffset = parseInt(utc.substr(end + 1, 2), 10); - var mmoffset = parseInt(utc.substr(end + 4, 2), 10); - - // calculate offset in milliseconds - var offset = hhoffset * 60 + mmoffset; - offset *= 60000; - - // apply offset - if(c === '+') { - date.setTime(+date - offset); - } else { - date.setTime(+date + offset); - } + try { + const connectHandler = new ConnectHandler(opts, callback) + this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) } +} - return date; -}; +module.exports = connect -/** - * Converts a GeneralizedTime value to a date. - * - * @param gentime the GeneralizedTime value to convert. - * - * @return the date. - */ -asn1.generalizedTimeToDate = function(gentime) { - /* The following formats can be used: - - YYYYMMDDHHMMSS - YYYYMMDDHHMMSS.fff - YYYYMMDDHHMMSSZ - YYYYMMDDHHMMSS.fffZ - YYYYMMDDHHMMSS+hh'mm' - YYYYMMDDHHMMSS.fff+hh'mm' - YYYYMMDDHHMMSS-hh'mm' - YYYYMMDDHHMMSS.fff-hh'mm' - - Where: - - YYYY is the year - MM is the month (01 to 12) - DD is the day (01 to 31) - hh is the hour (00 to 23) - mm are the minutes (00 to 59) - ss are the seconds (00 to 59) - .fff is the second fraction, accurate to three decimal places - Z indicates that local time is GMT, + indicates that local time is - later than GMT, and - indicates that local time is earlier than GMT - hh' is the absolute value of the offset from GMT in hours - mm' is the absolute value of the offset from GMT in minutes */ - var date = new Date(); - - var YYYY = parseInt(gentime.substr(0, 4), 10); - var MM = parseInt(gentime.substr(4, 2), 10) - 1; // use 0-11 for month - var DD = parseInt(gentime.substr(6, 2), 10); - var hh = parseInt(gentime.substr(8, 2), 10); - var mm = parseInt(gentime.substr(10, 2), 10); - var ss = parseInt(gentime.substr(12, 2), 10); - var fff = 0; - var offset = 0; - var isUTC = false; - - if(gentime.charAt(gentime.length - 1) === 'Z') { - isUTC = true; - } - - var end = gentime.length - 5, c = gentime.charAt(end); - if(c === '+' || c === '-') { - // get hours+minutes offset - var hhoffset = parseInt(gentime.substr(end + 1, 2), 10); - var mmoffset = parseInt(gentime.substr(end + 4, 2), 10); - - // calculate offset in milliseconds - offset = hhoffset * 60 + mmoffset; - offset *= 60000; - - // apply offset - if(c === '+') { - offset *= -1; - } - - isUTC = true; - } - - // check for second fraction - if(gentime.charAt(14) === '.') { - fff = parseFloat(gentime.substr(14), 10) * 1000; - } - - if(isUTC) { - date.setUTCFullYear(YYYY, MM, DD); - date.setUTCHours(hh, mm, ss, fff); - - // apply offset - date.setTime(+date + offset); - } else { - date.setFullYear(YYYY, MM, DD); - date.setHours(hh, mm, ss, fff); - } - return date; -}; +/***/ }), -/** - * Converts a date to a UTCTime value. - * - * Note: GeneralizedTime has 4 digits for the year and is used for X.509 - * dates past 2049. Converting to a GeneralizedTime hasn't been - * implemented yet. - * - * @param date the date to convert. - * - * @return the UTCTime value. - */ -asn1.dateToUtcTime = function(date) { - // TODO: validate; currently assumes proper format - if(typeof date === 'string') { - return date; - } +/***/ 8752: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var rval = ''; +"use strict"; - // create format YYMMDDhhmmssZ - var format = []; - format.push(('' + date.getUTCFullYear()).substr(2)); - format.push('' + (date.getUTCMonth() + 1)); - format.push('' + date.getUTCDate()); - format.push('' + date.getUTCHours()); - format.push('' + date.getUTCMinutes()); - format.push('' + date.getUTCSeconds()); - // ensure 2 digits are used for each format entry - for(var i = 0; i < format.length; ++i) { - if(format[i].length < 2) { - rval += '0'; - } - rval += format[i]; - } - rval += 'Z'; +const { + Readable, + Duplex, + PassThrough +} = __nccwpck_require__(2781) +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { AsyncResource } = __nccwpck_require__(852) +const { addSignal, removeSignal } = __nccwpck_require__(7032) +const assert = __nccwpck_require__(9491) - return rval; -}; +const kResume = Symbol('resume') -/** - * Converts a date to a GeneralizedTime value. - * - * @param date the date to convert. - * - * @return the GeneralizedTime value as a string. - */ -asn1.dateToGeneralizedTime = function(date) { - // TODO: validate; currently assumes proper format - if(typeof date === 'string') { - return date; - } +class PipelineRequest extends Readable { + constructor () { + super({ autoDestroy: true }) - var rval = ''; + this[kResume] = null + } - // create format YYYYMMDDHHMMSSZ - var format = []; - format.push('' + date.getUTCFullYear()); - format.push('' + (date.getUTCMonth() + 1)); - format.push('' + date.getUTCDate()); - format.push('' + date.getUTCHours()); - format.push('' + date.getUTCMinutes()); - format.push('' + date.getUTCSeconds()); + _read () { + const { [kResume]: resume } = this - // ensure 2 digits are used for each format entry - for(var i = 0; i < format.length; ++i) { - if(format[i].length < 2) { - rval += '0'; + if (resume) { + this[kResume] = null + resume() } - rval += format[i]; } - rval += 'Z'; - return rval; -}; + _destroy (err, callback) { + this._read() -/** - * Converts a javascript integer to a DER-encoded byte buffer to be used - * as the value for an INTEGER type. - * - * @param x the integer. - * - * @return the byte buffer. - */ -asn1.integerToDer = function(x) { - var rval = forge.util.createBuffer(); - if(x >= -0x80 && x < 0x80) { - return rval.putSignedInt(x, 8); - } - if(x >= -0x8000 && x < 0x8000) { - return rval.putSignedInt(x, 16); - } - if(x >= -0x800000 && x < 0x800000) { - return rval.putSignedInt(x, 24); + callback(err) } - if(x >= -0x80000000 && x < 0x80000000) { - return rval.putSignedInt(x, 32); - } - var error = new Error('Integer too large; max is 32-bits.'); - error.integer = x; - throw error; -}; +} -/** - * Converts a DER-encoded byte buffer to a javascript integer. This is - * typically used to decode the value of an INTEGER type. - * - * @param bytes the byte buffer. - * - * @return the integer. - */ -asn1.derToInteger = function(bytes) { - // wrap in buffer if needed - if(typeof bytes === 'string') { - bytes = forge.util.createBuffer(bytes); +class PipelineResponse extends Readable { + constructor (resume) { + super({ autoDestroy: true }) + this[kResume] = resume } - var n = bytes.length() * 8; - if(n > 32) { - throw new Error('Integer too large; max is 32-bits.'); + _read () { + this[kResume]() } - return bytes.getSignedInt(n); -}; -/** - * Validates that the given ASN.1 object is at least a super set of the - * given ASN.1 structure. Only tag classes and types are checked. An - * optional map may also be provided to capture ASN.1 values while the - * structure is checked. - * - * To capture an ASN.1 value, set an object in the validator's 'capture' - * parameter to the key to use in the capture map. To capture the full - * ASN.1 object, specify 'captureAsn1'. To capture BIT STRING bytes, including - * the leading unused bits counter byte, specify 'captureBitStringContents'. - * To capture BIT STRING bytes, without the leading unused bits counter byte, - * specify 'captureBitStringValue'. - * - * Objects in the validator may set a field 'optional' to true to indicate - * that it isn't necessary to pass validation. - * - * @param obj the ASN.1 object to validate. - * @param v the ASN.1 structure validator. - * @param capture an optional map to capture values in. - * @param errors an optional array for storing validation errors. - * - * @return true on success, false on failure. - */ -asn1.validate = function(obj, v, capture, errors) { - var rval = false; - - // ensure tag class and type are the same if specified - if((obj.tagClass === v.tagClass || typeof(v.tagClass) === 'undefined') && - (obj.type === v.type || typeof(v.type) === 'undefined')) { - // ensure constructed flag is the same if specified - if(obj.constructed === v.constructed || - typeof(v.constructed) === 'undefined') { - rval = true; - - // handle sub values - if(v.value && forge.util.isArray(v.value)) { - var j = 0; - for(var i = 0; rval && i < v.value.length; ++i) { - rval = v.value[i].optional || false; - if(obj.value[j]) { - rval = asn1.validate(obj.value[j], v.value[i], capture, errors); - if(rval) { - ++j; - } else if(v.value[i].optional) { - rval = true; - } - } - if(!rval && errors) { - errors.push( - '[' + v.name + '] ' + - 'Tag class "' + v.tagClass + '", type "' + - v.type + '" expected value length "' + - v.value.length + '", got "' + - obj.value.length + '"'); - } - } - } + _destroy (err, callback) { + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() + } - if(rval && capture) { - if(v.capture) { - capture[v.capture] = obj.value; - } - if(v.captureAsn1) { - capture[v.captureAsn1] = obj; - } - if(v.captureBitStringContents && 'bitStringContents' in obj) { - capture[v.captureBitStringContents] = obj.bitStringContents; - } - if(v.captureBitStringValue && 'bitStringContents' in obj) { - var value; - if(obj.bitStringContents.length < 2) { - capture[v.captureBitStringValue] = ''; - } else { - // FIXME: support unused bits with data shifting - var unused = obj.bitStringContents.charCodeAt(0); - if(unused !== 0) { - throw new Error( - 'captureBitStringValue only supported for zero unused bits'); - } - capture[v.captureBitStringValue] = obj.bitStringContents.slice(1); - } - } - } - } else if(errors) { - errors.push( - '[' + v.name + '] ' + - 'Expected constructed "' + v.constructed + '", got "' + - obj.constructed + '"'); - } - } else if(errors) { - if(obj.tagClass !== v.tagClass) { - errors.push( - '[' + v.name + '] ' + - 'Expected tag class "' + v.tagClass + '", got "' + - obj.tagClass + '"'); - } - if(obj.type !== v.type) { - errors.push( - '[' + v.name + '] ' + - 'Expected type "' + v.type + '", got "' + obj.type + '"'); - } - } - return rval; -}; + callback(err) + } +} -// regex for testing for non-latin characters -var _nonLatinRegex = /[^\\u0000-\\u00ff]/; +class PipelineHandler extends AsyncResource { + constructor (opts, handler) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } -/** - * Pretty prints an ASN.1 object to a string. - * - * @param obj the object to write out. - * @param level the level in the tree. - * @param indentation the indentation to use. - * - * @return the string. - */ -asn1.prettyPrint = function(obj, level, indentation) { - var rval = ''; - - // set default level and indentation - level = level || 0; - indentation = indentation || 2; - - // start new line for deep levels - if(level > 0) { - rval += '\n'; - } - - // create indent - var indent = ''; - for(var i = 0; i < level * indentation; ++i) { - indent += ' '; - } - - // print class:type - rval += indent + 'Tag: '; - switch(obj.tagClass) { - case asn1.Class.UNIVERSAL: - rval += 'Universal:'; - break; - case asn1.Class.APPLICATION: - rval += 'Application:'; - break; - case asn1.Class.CONTEXT_SPECIFIC: - rval += 'Context-Specific:'; - break; - case asn1.Class.PRIVATE: - rval += 'Private:'; - break; - } - - if(obj.tagClass === asn1.Class.UNIVERSAL) { - rval += obj.type; - - // known types - switch(obj.type) { - case asn1.Type.NONE: - rval += ' (None)'; - break; - case asn1.Type.BOOLEAN: - rval += ' (Boolean)'; - break; - case asn1.Type.INTEGER: - rval += ' (Integer)'; - break; - case asn1.Type.BITSTRING: - rval += ' (Bit string)'; - break; - case asn1.Type.OCTETSTRING: - rval += ' (Octet string)'; - break; - case asn1.Type.NULL: - rval += ' (Null)'; - break; - case asn1.Type.OID: - rval += ' (Object Identifier)'; - break; - case asn1.Type.ODESC: - rval += ' (Object Descriptor)'; - break; - case asn1.Type.EXTERNAL: - rval += ' (External or Instance of)'; - break; - case asn1.Type.REAL: - rval += ' (Real)'; - break; - case asn1.Type.ENUMERATED: - rval += ' (Enumerated)'; - break; - case asn1.Type.EMBEDDED: - rval += ' (Embedded PDV)'; - break; - case asn1.Type.UTF8: - rval += ' (UTF8)'; - break; - case asn1.Type.ROID: - rval += ' (Relative Object Identifier)'; - break; - case asn1.Type.SEQUENCE: - rval += ' (Sequence)'; - break; - case asn1.Type.SET: - rval += ' (Set)'; - break; - case asn1.Type.PRINTABLESTRING: - rval += ' (Printable String)'; - break; - case asn1.Type.IA5String: - rval += ' (IA5String (ASCII))'; - break; - case asn1.Type.UTCTIME: - rval += ' (UTC time)'; - break; - case asn1.Type.GENERALIZEDTIME: - rval += ' (Generalized time)'; - break; - case asn1.Type.BMPSTRING: - rval += ' (BMP String)'; - break; + if (typeof handler !== 'function') { + throw new InvalidArgumentError('invalid handler') } - } else { - rval += obj.type; - } - rval += '\n'; - rval += indent + 'Constructed: ' + obj.constructed + '\n'; + const { signal, method, opaque, onInfo, responseHeaders } = opts - if(obj.composed) { - var subvalues = 0; - var sub = ''; - for(var i = 0; i < obj.value.length; ++i) { - if(obj.value[i] !== undefined) { - subvalues += 1; - sub += asn1.prettyPrint(obj.value[i], level + 1, indentation); - if((i + 1) < obj.value.length) { - sub += ','; - } - } + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') } - rval += indent + 'Sub values: ' + subvalues + sub; - } else { - rval += indent + 'Value: '; - if(obj.type === asn1.Type.OID) { - var oid = asn1.derToOid(obj.value); - rval += oid; - if(forge.pki && forge.pki.oids) { - if(oid in forge.pki.oids) { - rval += ' (' + forge.pki.oids[oid] + ') '; - } - } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') } - if(obj.type === asn1.Type.INTEGER) { - try { - rval += asn1.derToInteger(obj.value); - } catch(ex) { - rval += '0x' + forge.util.bytesToHex(obj.value); - } - } else if(obj.type === asn1.Type.BITSTRING) { - // TODO: shift bits as needed to display without padding - if(obj.value.length > 1) { - // remove unused bits field - rval += '0x' + forge.util.bytesToHex(obj.value.slice(1)); - } else { - rval += '(none)'; - } - // show unused bit count - if(obj.value.length > 0) { - var unused = obj.value.charCodeAt(0); - if(unused == 1) { - rval += ' (1 unused bit shown)'; - } else if(unused > 1) { - rval += ' (' + unused + ' unused bits shown)'; - } - } - } else if(obj.type === asn1.Type.OCTETSTRING) { - if(!_nonLatinRegex.test(obj.value)) { - rval += '(' + obj.value + ') '; - } - rval += '0x' + forge.util.bytesToHex(obj.value); - } else if(obj.type === asn1.Type.UTF8) { - try { - rval += forge.util.decodeUtf8(obj.value); - } catch(e) { - if(e.message === 'URI malformed') { - rval += - '0x' + forge.util.bytesToHex(obj.value) + ' (malformed UTF8)'; - } else { - throw e; - } - } - } else if(obj.type === asn1.Type.PRINTABLESTRING || - obj.type === asn1.Type.IA5String) { - rval += obj.value; - } else if(_nonLatinRegex.test(obj.value)) { - rval += '0x' + forge.util.bytesToHex(obj.value); - } else if(obj.value.length === 0) { - rval += '[null]'; - } else { - rval += obj.value; + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') } - } - return rval; -}; + super('UNDICI_PIPELINE') + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.handler = handler + this.abort = null + this.context = null + this.onInfo = onInfo || null -/***/ }), + this.req = new PipelineRequest().on('error', util.nop) -/***/ 2300: -/***/ ((module) => { + this.ret = new Duplex({ + readableObjectMode: opts.objectMode, + autoDestroy: true, + read: () => { + const { body } = this -/** - * Base-N/Base-X encoding/decoding functions. - * - * Original implementation from base-x: - * https://github.com/cryptocoinjs/base-x - * - * Which is MIT licensed: - * - * The MIT License (MIT) - * - * Copyright base-x contributors (c) 2016 - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ -var api = {}; -module.exports = api; + if (body && body.resume) { + body.resume() + } + }, + write: (chunk, encoding, callback) => { + const { req } = this + + if (req.push(chunk, encoding) || req._readableState.destroyed) { + callback() + } else { + req[kResume] = callback + } + }, + destroy: (err, callback) => { + const { body, req, res, ret, abort } = this -// baseN alphabet indexes -var _reverseAlphabets = {}; + if (!err && !ret._readableState.endEmitted) { + err = new RequestAbortedError() + } -/** - * BaseN-encodes a Uint8Array using the given alphabet. - * - * @param input the Uint8Array to encode. - * @param maxline the maximum number of encoded characters per line to use, - * defaults to none. - * - * @return the baseN-encoded output string. - */ -api.encode = function(input, alphabet, maxline) { - if(typeof alphabet !== 'string') { - throw new TypeError('"alphabet" must be a string.'); - } - if(maxline !== undefined && typeof maxline !== 'number') { - throw new TypeError('"maxline" must be a number.'); - } + if (abort && err) { + abort() + } - var output = ''; + util.destroy(body, err) + util.destroy(req, err) + util.destroy(res, err) - if(!(input instanceof Uint8Array)) { - // assume forge byte buffer - output = _encodeWithByteBuffer(input, alphabet); - } else { - var i = 0; - var base = alphabet.length; - var first = alphabet.charAt(0); - var digits = [0]; - for(i = 0; i < input.length; ++i) { - for(var j = 0, carry = input[i]; j < digits.length; ++j) { - carry += digits[j] << 8; - digits[j] = carry % base; - carry = (carry / base) | 0; - } + removeSignal(this) - while(carry > 0) { - digits.push(carry % base); - carry = (carry / base) | 0; + callback(err) } - } + }).on('prefinish', () => { + const { req } = this - // deal with leading zeros - for(i = 0; input[i] === 0 && i < input.length - 1; ++i) { - output += first; - } - // convert digits to a string - for(i = digits.length - 1; i >= 0; --i) { - output += alphabet[digits[i]]; - } - } + // Node < 15 does not call _final in same tick. + req.push(null) + }) - if(maxline) { - var regex = new RegExp('.{1,' + maxline + '}', 'g'); - output = output.match(regex).join('\r\n'); + this.res = null + + addSignal(this, signal) } - return output; -}; + onConnect (abort, context) { + const { ret, res } = this -/** - * Decodes a baseN-encoded (using the given alphabet) string to a - * Uint8Array. - * - * @param input the baseN-encoded input string. - * - * @return the Uint8Array. - */ -api.decode = function(input, alphabet) { - if(typeof input !== 'string') { - throw new TypeError('"input" must be a string.'); - } - if(typeof alphabet !== 'string') { - throw new TypeError('"alphabet" must be a string.'); - } + assert(!res, 'pipeline cannot be retried') - var table = _reverseAlphabets[alphabet]; - if(!table) { - // compute reverse alphabet - table = _reverseAlphabets[alphabet] = []; - for(var i = 0; i < alphabet.length; ++i) { - table[alphabet.charCodeAt(i)] = i; + if (ret.destroyed) { + throw new RequestAbortedError() } + + this.abort = abort + this.context = context } - // remove whitespace characters - input = input.replace(/\s/g, ''); + onHeaders (statusCode, rawHeaders, resume) { + const { opaque, handler, context } = this - var base = alphabet.length; - var first = alphabet.charAt(0); - var bytes = [0]; - for(var i = 0; i < input.length; i++) { - var value = table[input.charCodeAt(i)]; - if(value === undefined) { - return; + if (statusCode < 200) { + if (this.onInfo) { + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.onInfo({ statusCode, headers }) + } + return } - for(var j = 0, carry = value; j < bytes.length; ++j) { - carry += bytes[j] * base; - bytes[j] = carry & 0xff; - carry >>= 8; + this.res = new PipelineResponse(resume) + + let body + try { + this.handler = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + body = this.runInAsyncScope(handler, null, { + statusCode, + headers, + opaque, + body: this.res, + context + }) + } catch (err) { + this.res.on('error', util.nop) + throw err } - while(carry > 0) { - bytes.push(carry & 0xff); - carry >>= 8; + if (!body || typeof body.on !== 'function') { + throw new InvalidReturnValueError('expected Readable') } - } - // deal with leading zeros - for(var k = 0; input[k] === first && k < input.length - 1; ++k) { - bytes.push(0); - } + body + .on('data', (chunk) => { + const { ret, body } = this - if(typeof Buffer !== 'undefined') { - return Buffer.from(bytes.reverse()); - } + if (!ret.push(chunk) && body.pause) { + body.pause() + } + }) + .on('error', (err) => { + const { ret } = this - return new Uint8Array(bytes.reverse()); -}; + util.destroy(ret, err) + }) + .on('end', () => { + const { ret } = this -function _encodeWithByteBuffer(input, alphabet) { - var i = 0; - var base = alphabet.length; - var first = alphabet.charAt(0); - var digits = [0]; - for(i = 0; i < input.length(); ++i) { - for(var j = 0, carry = input.at(i); j < digits.length; ++j) { - carry += digits[j] << 8; - digits[j] = carry % base; - carry = (carry / base) | 0; - } + ret.push(null) + }) + .on('close', () => { + const { ret } = this - while(carry > 0) { - digits.push(carry % base); - carry = (carry / base) | 0; - } + if (!ret._readableState.ended) { + util.destroy(ret, new RequestAbortedError()) + } + }) + + this.body = body } - var output = ''; + onData (chunk) { + const { res } = this + return res.push(chunk) + } - // deal with leading zeros - for(i = 0; input.at(i) === 0 && i < input.length() - 1; ++i) { - output += first; + onComplete (trailers) { + const { res } = this + res.push(null) } - // convert digits to a string - for(i = digits.length - 1; i >= 0; --i) { - output += alphabet[digits[i]]; + + onError (err) { + const { ret } = this + this.handler = null + util.destroy(ret, err) } +} - return output; +function pipeline (opts, handler) { + try { + const pipelineHandler = new PipelineHandler(opts, handler) + this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler) + return pipelineHandler.ret + } catch (err) { + return new PassThrough().destroy(err) + } } +module.exports = pipeline + /***/ }), -/***/ 7088: +/***/ 5448: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Cipher base API. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); +"use strict"; -module.exports = forge.cipher = forge.cipher || {}; -// registered algorithms -forge.cipher.algorithms = forge.cipher.algorithms || {}; +const Readable = __nccwpck_require__(3858) +const { + InvalidArgumentError, + RequestAbortedError +} = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { getResolveErrorBodyCallback } = __nccwpck_require__(7474) +const { AsyncResource } = __nccwpck_require__(852) +const { addSignal, removeSignal } = __nccwpck_require__(7032) -/** - * Creates a cipher object that can be used to encrypt data using the given - * algorithm and key. The algorithm may be provided as a string value for a - * previously registered algorithm or it may be given as a cipher algorithm - * API object. - * - * @param algorithm the algorithm to use, either a string or an algorithm API - * object. - * @param key the key to use, as a binary-encoded string of bytes or a - * byte buffer. - * - * @return the cipher. - */ -forge.cipher.createCipher = function(algorithm, key) { - var api = algorithm; - if(typeof api === 'string') { - api = forge.cipher.getAlgorithm(api); - if(api) { - api = api(); +class RequestHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') } - } - if(!api) { - throw new Error('Unsupported algorithm: ' + algorithm); - } - // assume block cipher - return new forge.cipher.BlockCipher({ - algorithm: api, - key: key, - decrypt: false - }); -}; + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts -/** - * Creates a decipher object that can be used to decrypt data using the given - * algorithm and key. The algorithm may be provided as a string value for a - * previously registered algorithm or it may be given as a cipher algorithm - * API object. - * - * @param algorithm the algorithm to use, either a string or an algorithm API - * object. - * @param key the key to use, as a binary-encoded string of bytes or a - * byte buffer. - * - * @return the cipher. - */ -forge.cipher.createDecipher = function(algorithm, key) { - var api = algorithm; - if(typeof api === 'string') { - api = forge.cipher.getAlgorithm(api); - if(api) { - api = api(); + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) { + throw new InvalidArgumentError('invalid highWaterMark') + } + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_REQUEST') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.res = null + this.abort = null + this.body = body + this.trailers = {} + this.context = null + this.onInfo = onInfo || null + this.throwOnError = throwOnError + this.highWaterMark = highWaterMark + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) } + + addSignal(this, signal) } - if(!api) { - throw new Error('Unsupported algorithm: ' + algorithm); + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context } - // assume block cipher - return new forge.cipher.BlockCipher({ - algorithm: api, - key: key, - decrypt: true - }); -}; + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this -/** - * Registers an algorithm by name. If the name was already registered, the - * algorithm API object will be overwritten. - * - * @param name the name of the algorithm. - * @param algorithm the algorithm API object. - */ -forge.cipher.registerAlgorithm = function(name, algorithm) { - name = name.toUpperCase(); - forge.cipher.algorithms[name] = algorithm; -}; + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) -/** - * Gets a registered algorithm by name. - * - * @param name the name of the algorithm. - * - * @return the algorithm, if found, null if not. - */ -forge.cipher.getAlgorithm = function(name) { - name = name.toUpperCase(); - if(name in forge.cipher.algorithms) { - return forge.cipher.algorithms[name]; - } - return null; -}; + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } -var BlockCipher = forge.cipher.BlockCipher = function(options) { - this.algorithm = options.algorithm; - this.mode = this.algorithm.mode; - this.blockSize = this.mode.blockSize; - this._finish = false; - this._input = null; - this.output = null; - this._op = options.decrypt ? this.mode.decrypt : this.mode.encrypt; - this._decrypt = options.decrypt; - this.algorithm.initialize(options); -}; + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + const body = new Readable({ resume, abort, contentType, highWaterMark }) -/** - * Starts or restarts the encryption or decryption process, whichever - * was previously configured. - * - * For non-GCM mode, the IV may be a binary-encoded string of bytes, an array - * of bytes, a byte buffer, or an array of 32-bit integers. If the IV is in - * bytes, then it must be Nb (16) bytes in length. If the IV is given in as - * 32-bit integers, then it must be 4 integers long. - * - * Note: an IV is not required or used in ECB mode. - * - * For GCM-mode, the IV must be given as a binary-encoded string of bytes or - * a byte buffer. The number of bytes should be 12 (96 bits) as recommended - * by NIST SP-800-38D but another length may be given. - * - * @param options the options to use: - * iv the initialization vector to use as a binary-encoded string of - * bytes, null to reuse the last ciphered block from a previous - * update() (this "residue" method is for legacy support only). - * additionalData additional authentication data as a binary-encoded - * string of bytes, for 'GCM' mode, (default: none). - * tagLength desired length of authentication tag, in bits, for - * 'GCM' mode (0-128, default: 128). - * tag the authentication tag to check if decrypting, as a - * binary-encoded string of bytes. - * output the output the buffer to write to, null to create one. - */ -BlockCipher.prototype.start = function(options) { - options = options || {}; - var opts = {}; - for(var key in options) { - opts[key] = options[key]; + this.callback = null + this.res = body + if (callback !== null) { + if (this.throwOnError && statusCode >= 400) { + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body, contentType, statusCode, statusMessage, headers } + ) + } else { + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + trailers: this.trailers, + opaque, + body, + context + }) + } + } } - opts.decrypt = this._decrypt; - this._finish = false; - this._input = forge.util.createBuffer(); - this.output = options.output || forge.util.createBuffer(); - this.mode.start(opts); -}; -/** - * Updates the next block according to the cipher mode. - * - * @param input the buffer to read from. - */ -BlockCipher.prototype.update = function(input) { - if(input) { - // input given, so empty it into the input buffer - this._input.putBuffer(input); + onData (chunk) { + const { res } = this + return res.push(chunk) } - // do cipher operation until it needs more input and not finished - while(!this._op.call(this.mode, this._input, this.output, this._finish) && - !this._finish) {} + onComplete (trailers) { + const { res } = this - // free consumed memory from input buffer - this._input.compact(); -}; + removeSignal(this) -/** - * Finishes encrypting or decrypting. - * - * @param pad a padding function to use in CBC mode, null for default, - * signature(blockSize, buffer, decrypt). - * - * @return true if successful, false on error. - */ -BlockCipher.prototype.finish = function(pad) { - // backwards-compatibility w/deprecated padding API - // Note: will overwrite padding functions even after another start() call - if(pad && (this.mode.name === 'ECB' || this.mode.name === 'CBC')) { - this.mode.pad = function(input) { - return pad(this.blockSize, input, false); - }; - this.mode.unpad = function(output) { - return pad(this.blockSize, output, true); - }; + util.parseHeaders(trailers, this.trailers) + + res.push(null) } - // build options for padding and afterFinish functions - var options = {}; - options.decrypt = this._decrypt; + onError (err) { + const { res, callback, body, opaque } = this - // get # of bytes that won't fill a block - options.overflow = this._input.length() % this.blockSize; + removeSignal(this) - if(!this._decrypt && this.mode.pad) { - if(!this.mode.pad(this._input, options)) { - return false; + if (callback) { + // TODO: Does this need queueMicrotask? + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } - } - // do final update - this._finish = true; - this.update(); + if (res) { + this.res = null + // Ensure all queued handlers are invoked before destroying res. + queueMicrotask(() => { + util.destroy(res, err) + }) + } - if(this._decrypt && this.mode.unpad) { - if(!this.mode.unpad(this.output, options)) { - return false; + if (body) { + this.body = null + util.destroy(body, err) } } +} - if(this.mode.afterFinish) { - if(!this.mode.afterFinish(this.output, options)) { - return false; +function request (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + request.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + this.dispatch(opts, new RequestHandler(opts, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) } +} - return true; -}; +module.exports = request +module.exports.RequestHandler = RequestHandler /***/ }), -/***/ 873: +/***/ 5395: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Supported cipher modes. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); - -forge.cipher = forge.cipher || {}; - -// supported cipher modes -var modes = module.exports = forge.cipher.modes = forge.cipher.modes || {}; +"use strict"; -/** Electronic codebook (ECB) (Don't use this; it's not secure) **/ -modes.ecb = function(options) { - options = options || {}; - this.name = 'ECB'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = new Array(this._ints); - this._outBlock = new Array(this._ints); -}; +const { finished, PassThrough } = __nccwpck_require__(2781) +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { getResolveErrorBodyCallback } = __nccwpck_require__(7474) +const { AsyncResource } = __nccwpck_require__(852) +const { addSignal, removeSignal } = __nccwpck_require__(7032) -modes.ecb.prototype.start = function(options) {}; +class StreamHandler extends AsyncResource { + constructor (opts, factory, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } -modes.ecb.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - if(input.length() < this.blockSize && !(finish && input.length() > 0)) { - return true; - } + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts - // get next block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = input.getInt32(); - } + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } - // encrypt block - this.cipher.encrypt(this._inBlock, this._outBlock); + if (typeof factory !== 'function') { + throw new InvalidArgumentError('invalid factory') + } - // write output - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._outBlock[i]); - } -}; + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } -modes.ecb.prototype.decrypt = function(input, output, finish) { - // not enough input to decrypt - if(input.length() < this.blockSize && !(finish && input.length() > 0)) { - return true; - } + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } - // get next block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = input.getInt32(); - } + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } - // decrypt block - this.cipher.decrypt(this._inBlock, this._outBlock); + super('UNDICI_STREAM') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.factory = factory + this.callback = callback + this.res = null + this.abort = null + this.context = null + this.trailers = null + this.body = body + this.onInfo = onInfo || null + this.throwOnError = throwOnError || false + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) + } - // write output - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._outBlock[i]); + addSignal(this, signal) } -}; -modes.ecb.prototype.pad = function(input, options) { - // add PKCS#7 padding to block (each pad byte is the - // value of the number of pad bytes) - var padding = (input.length() === this.blockSize ? - this.blockSize : (this.blockSize - input.length())); - input.fillWithByte(padding, padding); - return true; -}; - -modes.ecb.prototype.unpad = function(output, options) { - // check for error: input data not a multiple of blockSize - if(options.overflow > 0) { - return false; - } + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } - // ensure padding byte count is valid - var len = output.length(); - var count = output.at(len - 1); - if(count > (this.blockSize << 2)) { - return false; + this.abort = abort + this.context = context } - // trim off padding bytes - output.truncate(count); - return true; -}; - -/** Cipher-block Chaining (CBC) **/ - -modes.cbc = function(options) { - options = options || {}; - this.name = 'CBC'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = new Array(this._ints); - this._outBlock = new Array(this._ints); -}; - -modes.cbc.prototype.start = function(options) { - // Note: legacy support for using IV residue (has security flaws) - // if IV is null, reuse block from previous processing - if(options.iv === null) { - // must have a previous block - if(!this._prev) { - throw new Error('Invalid IV parameter.'); - } - this._iv = this._prev.slice(0); - } else if(!('iv' in options)) { - throw new Error('Invalid IV parameter.'); - } else { - // save IV as "previous" block - this._iv = transformIV(options.iv, this.blockSize); - this._prev = this._iv.slice(0); - } -}; + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { factory, opaque, context, callback, responseHeaders } = this -modes.cbc.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - if(input.length() < this.blockSize && !(finish && input.length() > 0)) { - return true; - } + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) - // get next block - // CBC XOR's IV (or previous block) with plaintext - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = this._prev[i] ^ input.getInt32(); - } + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } - // encrypt block - this.cipher.encrypt(this._inBlock, this._outBlock); + this.factory = null - // write output, save previous block - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._outBlock[i]); - } - this._prev = this._outBlock; -}; + let res -modes.cbc.prototype.decrypt = function(input, output, finish) { - // not enough input to decrypt - if(input.length() < this.blockSize && !(finish && input.length() > 0)) { - return true; - } + if (this.throwOnError && statusCode >= 400) { + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + res = new PassThrough() - // get next block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = input.getInt32(); - } + this.callback = null + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body: res, contentType, statusCode, statusMessage, headers } + ) + } else { + if (factory === null) { + return + } - // decrypt block - this.cipher.decrypt(this._inBlock, this._outBlock); + res = this.runInAsyncScope(factory, null, { + statusCode, + headers, + opaque, + context + }) - // write output, save previous ciphered block - // CBC XOR's IV (or previous block) with ciphertext - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._prev[i] ^ this._outBlock[i]); - } - this._prev = this._inBlock.slice(0); -}; + if ( + !res || + typeof res.write !== 'function' || + typeof res.end !== 'function' || + typeof res.on !== 'function' + ) { + throw new InvalidReturnValueError('expected Writable') + } -modes.cbc.prototype.pad = function(input, options) { - // add PKCS#7 padding to block (each pad byte is the - // value of the number of pad bytes) - var padding = (input.length() === this.blockSize ? - this.blockSize : (this.blockSize - input.length())); - input.fillWithByte(padding, padding); - return true; -}; + // TODO: Avoid finished. It registers an unnecessary amount of listeners. + finished(res, { readable: false }, (err) => { + const { callback, res, opaque, trailers, abort } = this -modes.cbc.prototype.unpad = function(output, options) { - // check for error: input data not a multiple of blockSize - if(options.overflow > 0) { - return false; - } + this.res = null + if (err || !res.readable) { + util.destroy(res, err) + } - // ensure padding byte count is valid - var len = output.length(); - var count = output.at(len - 1); - if(count > (this.blockSize << 2)) { - return false; - } + this.callback = null + this.runInAsyncScope(callback, null, err || null, { opaque, trailers }) - // trim off padding bytes - output.truncate(count); - return true; -}; + if (err) { + abort() + } + }) + } -/** Cipher feedback (CFB) **/ + res.on('drain', resume) -modes.cfb = function(options) { - options = options || {}; - this.name = 'CFB'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = null; - this._outBlock = new Array(this._ints); - this._partialBlock = new Array(this._ints); - this._partialOutput = forge.util.createBuffer(); - this._partialBytes = 0; -}; + this.res = res -modes.cfb.prototype.start = function(options) { - if(!('iv' in options)) { - throw new Error('Invalid IV parameter.'); - } - // use IV as first input - this._iv = transformIV(options.iv, this.blockSize); - this._inBlock = this._iv.slice(0); - this._partialBytes = 0; -}; + const needDrain = res.writableNeedDrain !== undefined + ? res.writableNeedDrain + : res._writableState && res._writableState.needDrain -modes.cfb.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - var inputLength = input.length(); - if(inputLength === 0) { - return true; + return needDrain !== true } - // encrypt block - this.cipher.encrypt(this._inBlock, this._outBlock); + onData (chunk) { + const { res } = this - // handle full block - if(this._partialBytes === 0 && inputLength >= this.blockSize) { - // XOR input with output, write input as output - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = input.getInt32() ^ this._outBlock[i]; - output.putInt32(this._inBlock[i]); - } - return; + return res ? res.write(chunk) : true } - // handle partial block - var partialBytes = (this.blockSize - inputLength) % this.blockSize; - if(partialBytes > 0) { - partialBytes = this.blockSize - partialBytes; - } + onComplete (trailers) { + const { res } = this - // XOR input with output, write input as partial output - this._partialOutput.clear(); - for(var i = 0; i < this._ints; ++i) { - this._partialBlock[i] = input.getInt32() ^ this._outBlock[i]; - this._partialOutput.putInt32(this._partialBlock[i]); - } + removeSignal(this) - if(partialBytes > 0) { - // block still incomplete, restore input buffer - input.read -= this.blockSize; - } else { - // block complete, update input block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = this._partialBlock[i]; + if (!res) { + return } - } - // skip any previous partial bytes - if(this._partialBytes > 0) { - this._partialOutput.getBytes(this._partialBytes); - } + this.trailers = util.parseHeaders(trailers) - if(partialBytes > 0 && !finish) { - output.putBytes(this._partialOutput.getBytes( - partialBytes - this._partialBytes)); - this._partialBytes = partialBytes; - return true; + res.end() } - output.putBytes(this._partialOutput.getBytes( - inputLength - this._partialBytes)); - this._partialBytes = 0; -}; + onError (err) { + const { res, callback, opaque, body } = this -modes.cfb.prototype.decrypt = function(input, output, finish) { - // not enough input to decrypt - var inputLength = input.length(); - if(inputLength === 0) { - return true; - } + removeSignal(this) - // encrypt block (CFB always uses encryption mode) - this.cipher.encrypt(this._inBlock, this._outBlock); + this.factory = null - // handle full block - if(this._partialBytes === 0 && inputLength >= this.blockSize) { - // XOR input with output, write input as output - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = input.getInt32(); - output.putInt32(this._inBlock[i] ^ this._outBlock[i]); + if (res) { + this.res = null + util.destroy(res, err) + } else if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } - return; - } - // handle partial block - var partialBytes = (this.blockSize - inputLength) % this.blockSize; - if(partialBytes > 0) { - partialBytes = this.blockSize - partialBytes; + if (body) { + this.body = null + util.destroy(body, err) + } } +} - // XOR input with output, write input as partial output - this._partialOutput.clear(); - for(var i = 0; i < this._ints; ++i) { - this._partialBlock[i] = input.getInt32(); - this._partialOutput.putInt32(this._partialBlock[i] ^ this._outBlock[i]); +function stream (opts, factory, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + stream.call(this, opts, factory, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) } - if(partialBytes > 0) { - // block still incomplete, restore input buffer - input.read -= this.blockSize; - } else { - // block complete, update input block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = this._partialBlock[i]; + try { + this.dispatch(opts, new StreamHandler(opts, factory, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) } +} - // skip any previous partial bytes - if(this._partialBytes > 0) { - this._partialOutput.getBytes(this._partialBytes); - } +module.exports = stream - if(partialBytes > 0 && !finish) { - output.putBytes(this._partialOutput.getBytes( - partialBytes - this._partialBytes)); - this._partialBytes = partialBytes; - return true; - } - output.putBytes(this._partialOutput.getBytes( - inputLength - this._partialBytes)); - this._partialBytes = 0; -}; +/***/ }), -/** Output feedback (OFB) **/ +/***/ 6923: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -modes.ofb = function(options) { - options = options || {}; - this.name = 'OFB'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = null; - this._outBlock = new Array(this._ints); - this._partialOutput = forge.util.createBuffer(); - this._partialBytes = 0; -}; +"use strict"; -modes.ofb.prototype.start = function(options) { - if(!('iv' in options)) { - throw new Error('Invalid IV parameter.'); - } - // use IV as first input - this._iv = transformIV(options.iv, this.blockSize); - this._inBlock = this._iv.slice(0); - this._partialBytes = 0; -}; -modes.ofb.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - var inputLength = input.length(); - if(input.length() === 0) { - return true; - } +const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8045) +const { AsyncResource } = __nccwpck_require__(852) +const util = __nccwpck_require__(3983) +const { addSignal, removeSignal } = __nccwpck_require__(7032) +const assert = __nccwpck_require__(9491) - // encrypt block (OFB always uses encryption mode) - this.cipher.encrypt(this._inBlock, this._outBlock); +class UpgradeHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } - // handle full block - if(this._partialBytes === 0 && inputLength >= this.blockSize) { - // XOR input with output and update next input - for(var i = 0; i < this._ints; ++i) { - output.putInt32(input.getInt32() ^ this._outBlock[i]); - this._inBlock[i] = this._outBlock[i]; + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') } - return; - } - // handle partial block - var partialBytes = (this.blockSize - inputLength) % this.blockSize; - if(partialBytes > 0) { - partialBytes = this.blockSize - partialBytes; - } + const { signal, opaque, responseHeaders } = opts - // XOR input with output - this._partialOutput.clear(); - for(var i = 0; i < this._ints; ++i) { - this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]); + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + super('UNDICI_UPGRADE') + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.abort = null + this.context = null + + addSignal(this, signal) } - if(partialBytes > 0) { - // block still incomplete, restore input buffer - input.read -= this.blockSize; - } else { - // block complete, update input block - for(var i = 0; i < this._ints; ++i) { - this._inBlock[i] = this._outBlock[i]; + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() } - } - // skip any previous partial bytes - if(this._partialBytes > 0) { - this._partialOutput.getBytes(this._partialBytes); + this.abort = abort + this.context = null } - if(partialBytes > 0 && !finish) { - output.putBytes(this._partialOutput.getBytes( - partialBytes - this._partialBytes)); - this._partialBytes = partialBytes; - return true; + onHeaders () { + throw new SocketError('bad upgrade', null) } - output.putBytes(this._partialOutput.getBytes( - inputLength - this._partialBytes)); - this._partialBytes = 0; -}; - -modes.ofb.prototype.decrypt = modes.ofb.prototype.encrypt; + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this -/** Counter (CTR) **/ + assert.strictEqual(statusCode, 101) -modes.ctr = function(options) { - options = options || {}; - this.name = 'CTR'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = null; - this._outBlock = new Array(this._ints); - this._partialOutput = forge.util.createBuffer(); - this._partialBytes = 0; -}; + removeSignal(this) -modes.ctr.prototype.start = function(options) { - if(!('iv' in options)) { - throw new Error('Invalid IV parameter.'); + this.callback = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.runInAsyncScope(callback, null, null, { + headers, + socket, + opaque, + context + }) } - // use IV as first input - this._iv = transformIV(options.iv, this.blockSize); - this._inBlock = this._iv.slice(0); - this._partialBytes = 0; -}; -modes.ctr.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - var inputLength = input.length(); - if(inputLength === 0) { - return true; - } + onError (err) { + const { callback, opaque } = this - // encrypt block (CTR always uses encryption mode) - this.cipher.encrypt(this._inBlock, this._outBlock); + removeSignal(this) - // handle full block - if(this._partialBytes === 0 && inputLength >= this.blockSize) { - // XOR input with output - for(var i = 0; i < this._ints; ++i) { - output.putInt32(input.getInt32() ^ this._outBlock[i]); - } - } else { - // handle partial block - var partialBytes = (this.blockSize - inputLength) % this.blockSize; - if(partialBytes > 0) { - partialBytes = this.blockSize - partialBytes; + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } + } +} - // XOR input with output - this._partialOutput.clear(); - for(var i = 0; i < this._ints; ++i) { - this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]); - } +function upgrade (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + upgrade.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } - if(partialBytes > 0) { - // block still incomplete, restore input buffer - input.read -= this.blockSize; + try { + const upgradeHandler = new UpgradeHandler(opts, callback) + this.dispatch({ + ...opts, + method: opts.method || 'GET', + upgrade: opts.protocol || 'Websocket' + }, upgradeHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} - // skip any previous partial bytes - if(this._partialBytes > 0) { - this._partialOutput.getBytes(this._partialBytes); - } +module.exports = upgrade - if(partialBytes > 0 && !finish) { - output.putBytes(this._partialOutput.getBytes( - partialBytes - this._partialBytes)); - this._partialBytes = partialBytes; - return true; - } - output.putBytes(this._partialOutput.getBytes( - inputLength - this._partialBytes)); - this._partialBytes = 0; - } +/***/ }), - // block complete, increment counter (input block) - inc32(this._inBlock); -}; +/***/ 4059: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -modes.ctr.prototype.decrypt = modes.ctr.prototype.encrypt; +"use strict"; -/** Galois/Counter Mode (GCM) **/ -modes.gcm = function(options) { - options = options || {}; - this.name = 'GCM'; - this.cipher = options.cipher; - this.blockSize = options.blockSize || 16; - this._ints = this.blockSize / 4; - this._inBlock = new Array(this._ints); - this._outBlock = new Array(this._ints); - this._partialOutput = forge.util.createBuffer(); - this._partialBytes = 0; - - // R is actually this value concatenated with 120 more zero bits, but - // we only XOR against R so the other zeros have no effect -- we just - // apply this value to the first integer in a block - this._R = 0xE1000000; -}; +module.exports.request = __nccwpck_require__(5448) +module.exports.stream = __nccwpck_require__(5395) +module.exports.pipeline = __nccwpck_require__(8752) +module.exports.upgrade = __nccwpck_require__(6923) +module.exports.connect = __nccwpck_require__(9744) -modes.gcm.prototype.start = function(options) { - if(!('iv' in options)) { - throw new Error('Invalid IV parameter.'); - } - // ensure IV is a byte buffer - var iv = forge.util.createBuffer(options.iv); - // no ciphered data processed yet - this._cipherLength = 0; +/***/ }), - // default additional data is none - var additionalData; - if('additionalData' in options) { - additionalData = forge.util.createBuffer(options.additionalData); - } else { - additionalData = forge.util.createBuffer(); - } +/***/ 3858: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // default tag length is 128 bits - if('tagLength' in options) { - this._tagLength = options.tagLength; - } else { - this._tagLength = 128; - } +"use strict"; +// Ported from https://github.com/nodejs/undici/pull/907 - // if tag is given, ensure tag matches tag length - this._tag = null; - if(options.decrypt) { - // save tag to check later - this._tag = forge.util.createBuffer(options.tag).getBytes(); - if(this._tag.length !== (this._tagLength / 8)) { - throw new Error('Authentication tag does not match tag length.'); - } - } - // create tmp storage for hash calculation - this._hashBlock = new Array(this._ints); - // no tag generated yet - this.tag = null; +const assert = __nccwpck_require__(9491) +const { Readable } = __nccwpck_require__(2781) +const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { ReadableStreamFrom, toUSVString } = __nccwpck_require__(3983) - // generate hash subkey - // (apply block cipher to "zero" block) - this._hashSubkey = new Array(this._ints); - this.cipher.encrypt([0, 0, 0, 0], this._hashSubkey); +let Blob - // generate table M - // use 4-bit tables (32 component decomposition of a 16 byte value) - // 8-bit tables take more space and are known to have security - // vulnerabilities (in native implementations) - this.componentBits = 4; - this._m = this.generateHashTable(this._hashSubkey, this.componentBits); +const kConsume = Symbol('kConsume') +const kReading = Symbol('kReading') +const kBody = Symbol('kBody') +const kAbort = Symbol('abort') +const kContentType = Symbol('kContentType') - // Note: support IV length different from 96 bits? (only supporting - // 96 bits is recommended by NIST SP-800-38D) - // generate J_0 - var ivLength = iv.length(); - if(ivLength === 12) { - // 96-bit IV - this._j0 = [iv.getInt32(), iv.getInt32(), iv.getInt32(), 1]; - } else { - // IV is NOT 96-bits - this._j0 = [0, 0, 0, 0]; - while(iv.length() > 0) { - this._j0 = this.ghash( - this._hashSubkey, this._j0, - [iv.getInt32(), iv.getInt32(), iv.getInt32(), iv.getInt32()]); - } - this._j0 = this.ghash( - this._hashSubkey, this._j0, [0, 0].concat(from64To32(ivLength * 8))); - } - - // generate ICB (initial counter block) - this._inBlock = this._j0.slice(0); - inc32(this._inBlock); - this._partialBytes = 0; - - // consume authentication data - additionalData = forge.util.createBuffer(additionalData); - // save additional data length as a BE 64-bit number - this._aDataLength = from64To32(additionalData.length() * 8); - // pad additional data to 128 bit (16 byte) block size - var overflow = additionalData.length() % this.blockSize; - if(overflow) { - additionalData.fillWithByte(0, this.blockSize - overflow); - } - this._s = [0, 0, 0, 0]; - while(additionalData.length() > 0) { - this._s = this.ghash(this._hashSubkey, this._s, [ - additionalData.getInt32(), - additionalData.getInt32(), - additionalData.getInt32(), - additionalData.getInt32() - ]); - } -}; +const noop = () => {} -modes.gcm.prototype.encrypt = function(input, output, finish) { - // not enough input to encrypt - var inputLength = input.length(); - if(inputLength === 0) { - return true; - } +module.exports = class BodyReadable extends Readable { + constructor ({ + resume, + abort, + contentType = '', + highWaterMark = 64 * 1024 // Same as nodejs fs streams. + }) { + super({ + autoDestroy: true, + read: resume, + highWaterMark + }) - // encrypt block - this.cipher.encrypt(this._inBlock, this._outBlock); + this._readableState.dataEmitted = false - // handle full block - if(this._partialBytes === 0 && inputLength >= this.blockSize) { - // XOR input with output - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._outBlock[i] ^= input.getInt32()); - } - this._cipherLength += this.blockSize; - } else { - // handle partial block - var partialBytes = (this.blockSize - inputLength) % this.blockSize; - if(partialBytes > 0) { - partialBytes = this.blockSize - partialBytes; - } - - // XOR input with output - this._partialOutput.clear(); - for(var i = 0; i < this._ints; ++i) { - this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]); - } - - if(partialBytes <= 0 || finish) { - // handle overflow prior to hashing - if(finish) { - // get block overflow - var overflow = inputLength % this.blockSize; - this._cipherLength += overflow; - // truncate for hash function - this._partialOutput.truncate(this.blockSize - overflow); - } else { - this._cipherLength += this.blockSize; - } + this[kAbort] = abort + this[kConsume] = null + this[kBody] = null + this[kContentType] = contentType - // get output block for hashing - for(var i = 0; i < this._ints; ++i) { - this._outBlock[i] = this._partialOutput.getInt32(); - } - this._partialOutput.read -= this.blockSize; + // Is stream being consumed through Readable API? + // This is an optimization so that we avoid checking + // for 'data' and 'readable' listeners in the hot path + // inside push(). + this[kReading] = false + } + + destroy (err) { + if (this.destroyed) { + // Node < 16 + return this } - // skip any previous partial bytes - if(this._partialBytes > 0) { - this._partialOutput.getBytes(this._partialBytes); + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() } - if(partialBytes > 0 && !finish) { - // block still incomplete, restore input buffer, get partial output, - // and return early - input.read -= this.blockSize; - output.putBytes(this._partialOutput.getBytes( - partialBytes - this._partialBytes)); - this._partialBytes = partialBytes; - return true; + if (err) { + this[kAbort]() } - output.putBytes(this._partialOutput.getBytes( - inputLength - this._partialBytes)); - this._partialBytes = 0; + return super.destroy(err) } - // update hash block S - this._s = this.ghash(this._hashSubkey, this._s, this._outBlock); - - // increment counter (input block) - inc32(this._inBlock); -}; - -modes.gcm.prototype.decrypt = function(input, output, finish) { - // not enough input to decrypt - var inputLength = input.length(); - if(inputLength < this.blockSize && !(finish && inputLength > 0)) { - return true; + emit (ev, ...args) { + if (ev === 'data') { + // Node < 16.7 + this._readableState.dataEmitted = true + } else if (ev === 'error') { + // Node < 16 + this._readableState.errorEmitted = true + } + return super.emit(ev, ...args) } - // encrypt block (GCM always uses encryption mode) - this.cipher.encrypt(this._inBlock, this._outBlock); - - // increment counter (input block) - inc32(this._inBlock); + on (ev, ...args) { + if (ev === 'data' || ev === 'readable') { + this[kReading] = true + } + return super.on(ev, ...args) + } - // update hash block S - this._hashBlock[0] = input.getInt32(); - this._hashBlock[1] = input.getInt32(); - this._hashBlock[2] = input.getInt32(); - this._hashBlock[3] = input.getInt32(); - this._s = this.ghash(this._hashSubkey, this._s, this._hashBlock); + addListener (ev, ...args) { + return this.on(ev, ...args) + } - // XOR hash input with output - for(var i = 0; i < this._ints; ++i) { - output.putInt32(this._outBlock[i] ^ this._hashBlock[i]); + off (ev, ...args) { + const ret = super.off(ev, ...args) + if (ev === 'data' || ev === 'readable') { + this[kReading] = ( + this.listenerCount('data') > 0 || + this.listenerCount('readable') > 0 + ) + } + return ret } - // increment cipher data length - if(inputLength < this.blockSize) { - this._cipherLength += inputLength % this.blockSize; - } else { - this._cipherLength += this.blockSize; + removeListener (ev, ...args) { + return this.off(ev, ...args) } -}; -modes.gcm.prototype.afterFinish = function(output, options) { - var rval = true; + push (chunk) { + if (this[kConsume] && chunk !== null && this.readableLength === 0) { + consumePush(this[kConsume], chunk) + return this[kReading] ? super.push(chunk) : true + } + return super.push(chunk) + } - // handle overflow - if(options.decrypt && options.overflow) { - output.truncate(this.blockSize - options.overflow); + // https://fetch.spec.whatwg.org/#dom-body-text + async text () { + return consume(this, 'text') } - // handle authentication tag - this.tag = forge.util.createBuffer(); + // https://fetch.spec.whatwg.org/#dom-body-json + async json () { + return consume(this, 'json') + } - // concatenate additional data length with cipher length - var lengths = this._aDataLength.concat(from64To32(this._cipherLength * 8)); + // https://fetch.spec.whatwg.org/#dom-body-blob + async blob () { + return consume(this, 'blob') + } - // include lengths in hash - this._s = this.ghash(this._hashSubkey, this._s, lengths); + // https://fetch.spec.whatwg.org/#dom-body-arraybuffer + async arrayBuffer () { + return consume(this, 'arrayBuffer') + } - // do GCTR(J_0, S) - var tag = []; - this.cipher.encrypt(this._j0, tag); - for(var i = 0; i < this._ints; ++i) { - this.tag.putInt32(this._s[i] ^ tag[i]); + // https://fetch.spec.whatwg.org/#dom-body-formdata + async formData () { + // TODO: Implement. + throw new NotSupportedError() } - // trim tag to length - this.tag.truncate(this.tag.length() % (this._tagLength / 8)); + // https://fetch.spec.whatwg.org/#dom-body-bodyused + get bodyUsed () { + return util.isDisturbed(this) + } - // check authentication tag - if(options.decrypt && this.tag.bytes() !== this._tag) { - rval = false; + // https://fetch.spec.whatwg.org/#dom-body-body + get body () { + if (!this[kBody]) { + this[kBody] = ReadableStreamFrom(this) + if (this[kConsume]) { + // TODO: Is this the best way to force a lock? + this[kBody].getReader() // Ensure stream is locked. + assert(this[kBody].locked) + } + } + return this[kBody] } - return rval; -}; + dump (opts) { + let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 + const signal = opts && opts.signal -/** - * See NIST SP-800-38D 6.3 (Algorithm 1). This function performs Galois - * field multiplication. The field, GF(2^128), is defined by the polynomial: - * - * x^128 + x^7 + x^2 + x + 1 - * - * Which is represented in little-endian binary form as: 11100001 (0xe1). When - * the value of a coefficient is 1, a bit is set. The value R, is the - * concatenation of this value and 120 zero bits, yielding a 128-bit value - * which matches the block size. - * - * This function will multiply two elements (vectors of bytes), X and Y, in - * the field GF(2^128). The result is initialized to zero. For each bit of - * X (out of 128), x_i, if x_i is set, then the result is multiplied (XOR'd) - * by the current value of Y. For each bit, the value of Y will be raised by - * a power of x (multiplied by the polynomial x). This can be achieved by - * shifting Y once to the right. If the current value of Y, prior to being - * multiplied by x, has 0 as its LSB, then it is a 127th degree polynomial. - * Otherwise, we must divide by R after shifting to find the remainder. - * - * @param x the first block to multiply by the second. - * @param y the second block to multiply by the first. - * - * @return the block result of the multiplication. - */ -modes.gcm.prototype.multiply = function(x, y) { - var z_i = [0, 0, 0, 0]; - var v_i = y.slice(0); + if (signal) { + try { + if (typeof signal !== 'object' || !('aborted' in signal)) { + throw new InvalidArgumentError('signal must be an AbortSignal') + } + util.throwIfAborted(signal) + } catch (err) { + return Promise.reject(err) + } + } - // calculate Z_128 (block has 128 bits) - for(var i = 0; i < 128; ++i) { - // if x_i is 0, Z_{i+1} = Z_i (unchanged) - // else Z_{i+1} = Z_i ^ V_i - // get x_i by finding 32-bit int position, then left shift 1 by remainder - var x_i = x[(i / 32) | 0] & (1 << (31 - i % 32)); - if(x_i) { - z_i[0] ^= v_i[0]; - z_i[1] ^= v_i[1]; - z_i[2] ^= v_i[2]; - z_i[3] ^= v_i[3]; + if (this.closed) { + return Promise.resolve(null) } - // if LSB(V_i) is 1, V_i = V_i >> 1 - // else V_i = (V_i >> 1) ^ R - this.pow(v_i, v_i); + return new Promise((resolve, reject) => { + const signalListenerCleanup = signal + ? util.addAbortListener(signal, () => { + this.destroy() + }) + : noop + + this + .on('close', function () { + signalListenerCleanup() + if (signal && signal.aborted) { + reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) + } else { + resolve(null) + } + }) + .on('error', noop) + .on('data', function (chunk) { + limit -= chunk.length + if (limit <= 0) { + this.destroy() + } + }) + .resume() + }) } +} - return z_i; -}; +// https://streams.spec.whatwg.org/#readablestream-locked +function isLocked (self) { + // Consume is an implicit lock. + return (self[kBody] && self[kBody].locked === true) || self[kConsume] +} -modes.gcm.prototype.pow = function(x, out) { - // if LSB(x) is 1, x = x >>> 1 - // else x = (x >>> 1) ^ R - var lsb = x[3] & 1; +// https://fetch.spec.whatwg.org/#body-unusable +function isUnusable (self) { + return util.isDisturbed(self) || isLocked(self) +} - // always do x >>> 1: - // starting with the rightmost integer, shift each integer to the right - // one bit, pulling in the bit from the integer to the left as its top - // most bit (do this for the last 3 integers) - for(var i = 3; i > 0; --i) { - out[i] = (x[i] >>> 1) | ((x[i - 1] & 1) << 31); +async function consume (stream, type) { + if (isUnusable(stream)) { + throw new TypeError('unusable') } - // shift the first integer normally - out[0] = x[0] >>> 1; - // if lsb was not set, then polynomial had a degree of 127 and doesn't - // need to divided; otherwise, XOR with R to find the remainder; we only - // need to XOR the first integer since R technically ends w/120 zero bits - if(lsb) { - out[0] ^= this._R; - } -}; + assert(!stream[kConsume]) -modes.gcm.prototype.tableMultiply = function(x) { - // assumes 4-bit tables are used - var z = [0, 0, 0, 0]; - for(var i = 0; i < 32; ++i) { - var idx = (i / 8) | 0; - var x_i = (x[idx] >>> ((7 - (i % 8)) * 4)) & 0xF; - var ah = this._m[i][x_i]; - z[0] ^= ah[0]; - z[1] ^= ah[1]; - z[2] ^= ah[2]; - z[3] ^= ah[3]; - } - return z; -}; + return new Promise((resolve, reject) => { + stream[kConsume] = { + type, + stream, + resolve, + reject, + length: 0, + body: [] + } -/** - * A continuing version of the GHASH algorithm that operates on a single - * block. The hash block, last hash value (Ym) and the new block to hash - * are given. - * - * @param h the hash block. - * @param y the previous value for Ym, use [0, 0, 0, 0] for a new hash. - * @param x the block to hash. - * - * @return the hashed value (Ym). - */ -modes.gcm.prototype.ghash = function(h, y, x) { - y[0] ^= x[0]; - y[1] ^= x[1]; - y[2] ^= x[2]; - y[3] ^= x[3]; - return this.tableMultiply(y); - //return this.multiply(y, h); -}; + stream + .on('error', function (err) { + consumeFinish(this[kConsume], err) + }) + .on('close', function () { + if (this[kConsume].body !== null) { + consumeFinish(this[kConsume], new RequestAbortedError()) + } + }) -/** - * Precomputes a table for multiplying against the hash subkey. This - * mechanism provides a substantial speed increase over multiplication - * performed without a table. The table-based multiplication this table is - * for solves X * H by multiplying each component of X by H and then - * composing the results together using XOR. - * - * This function can be used to generate tables with different bit sizes - * for the components, however, this implementation assumes there are - * 32 components of X (which is a 16 byte vector), therefore each component - * takes 4-bits (so the table is constructed with bits=4). - * - * @param h the hash subkey. - * @param bits the bit size for a component. - */ -modes.gcm.prototype.generateHashTable = function(h, bits) { - // TODO: There are further optimizations that would use only the - // first table M_0 (or some variant) along with a remainder table; - // this can be explored in the future - var multiplier = 8 / bits; - var perInt = 4 * multiplier; - var size = 16 * multiplier; - var m = new Array(size); - for(var i = 0; i < size; ++i) { - var tmp = [0, 0, 0, 0]; - var idx = (i / perInt) | 0; - var shft = ((perInt - 1 - (i % perInt)) * bits); - tmp[idx] = (1 << (bits - 1)) << shft; - m[i] = this.generateSubHashTable(this.multiply(tmp, h), bits); - } - return m; -}; + process.nextTick(consumeStart, stream[kConsume]) + }) +} -/** - * Generates a table for multiplying against the hash subkey for one - * particular component (out of all possible component values). - * - * @param mid the pre-multiplied value for the middle key of the table. - * @param bits the bit size for a component. - */ -modes.gcm.prototype.generateSubHashTable = function(mid, bits) { - // compute the table quickly by minimizing the number of - // POW operations -- they only need to be performed for powers of 2, - // all other entries can be composed from those powers using XOR - var size = 1 << bits; - var half = size >>> 1; - var m = new Array(size); - m[half] = mid.slice(0); - var i = half >>> 1; - while(i > 0) { - // raise m0[2 * i] and store in m0[i] - this.pow(m[2 * i], m[i] = []); - i >>= 1; - } - i = 2; - while(i < half) { - for(var j = 1; j < i; ++j) { - var m_i = m[i]; - var m_j = m[j]; - m[i + j] = [ - m_i[0] ^ m_j[0], - m_i[1] ^ m_j[1], - m_i[2] ^ m_j[2], - m_i[3] ^ m_j[3] - ]; - } - i *= 2; - } - m[0] = [0, 0, 0, 0]; - /* Note: We could avoid storing these by doing composition during multiply - calculate top half using composition by speed is preferred. */ - for(i = half + 1; i < size; ++i) { - var c = m[i ^ half]; - m[i] = [mid[0] ^ c[0], mid[1] ^ c[1], mid[2] ^ c[2], mid[3] ^ c[3]]; - } - return m; -}; +function consumeStart (consume) { + if (consume.body === null) { + return + } -/** Utility functions */ + const { _readableState: state } = consume.stream -function transformIV(iv, blockSize) { - if(typeof iv === 'string') { - // convert iv string into byte buffer - iv = forge.util.createBuffer(iv); + for (const chunk of state.buffer) { + consumePush(consume, chunk) } - if(forge.util.isArray(iv) && iv.length > 4) { - // convert iv byte array into byte buffer - var tmp = iv; - iv = forge.util.createBuffer(); - for(var i = 0; i < tmp.length; ++i) { - iv.putByte(tmp[i]); - } + if (state.endEmitted) { + consumeEnd(this[kConsume]) + } else { + consume.stream.on('end', function () { + consumeEnd(this[kConsume]) + }) } - if(iv.length() < blockSize) { - throw new Error( - 'Invalid IV length; got ' + iv.length() + - ' bytes and expected ' + blockSize + ' bytes.'); + consume.stream.resume() + + while (consume.stream.read() != null) { + // Loop } +} - if(!forge.util.isArray(iv)) { - // convert iv byte buffer into 32-bit integer array - var ints = []; - var blocks = blockSize / 4; - for(var i = 0; i < blocks; ++i) { - ints.push(iv.getInt32()); +function consumeEnd (consume) { + const { type, body, resolve, stream, length } = consume + + try { + if (type === 'text') { + resolve(toUSVString(Buffer.concat(body))) + } else if (type === 'json') { + resolve(JSON.parse(Buffer.concat(body))) + } else if (type === 'arrayBuffer') { + const dst = new Uint8Array(length) + + let pos = 0 + for (const buf of body) { + dst.set(buf, pos) + pos += buf.byteLength + } + + resolve(dst.buffer) + } else if (type === 'blob') { + if (!Blob) { + Blob = (__nccwpck_require__(4300).Blob) + } + resolve(new Blob(body, { type: stream[kContentType] })) } - iv = ints; - } - return iv; + consumeFinish(consume) + } catch (err) { + stream.destroy(err) + } } -function inc32(block) { - // increment last 32 bits of block only - block[block.length - 1] = (block[block.length - 1] + 1) & 0xFFFFFFFF; +function consumePush (consume, chunk) { + consume.length += chunk.length + consume.body.push(chunk) } -function from64To32(num) { - // convert 64-bit number to two BE Int32s - return [(num / 0x100000000) | 0, num & 0xFFFFFFFF]; +function consumeFinish (consume, err) { + if (consume.body === null) { + return + } + + if (err) { + consume.reject(err) + } else { + consume.resolve() + } + + consume.type = null + consume.stream = null + consume.resolve = null + consume.reject = null + consume.length = 0 + consume.body = null } /***/ }), -/***/ 7157: +/***/ 7474: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * DES (Data Encryption Standard) implementation. - * - * This implementation supports DES as well as 3DES-EDE in ECB and CBC mode. - * It is based on the BSD-licensed implementation by Paul Tero: - * - * Paul Tero, July 2001 - * http://www.tero.co.uk/des/ - * - * Optimised for performance with large blocks by - * Michael Hayworth, November 2001 - * http://www.netdealing.com - * - * THIS SOFTWARE IS PROVIDED "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS - * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY - * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF - * SUCH DAMAGE. - * - * @author Stefan Siegl - * @author Dave Longley - * - * Copyright (c) 2012 Stefan Siegl - * Copyright (c) 2012-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7088); -__nccwpck_require__(873); -__nccwpck_require__(8339); - -/* DES API */ -module.exports = forge.des = forge.des || {}; - -/** - * Deprecated. Instead, use: - * - * var cipher = forge.cipher.createCipher('DES-', key); - * cipher.start({iv: iv}); - * - * Creates an DES cipher object to encrypt data using the given symmetric key. - * The output will be stored in the 'output' member of the returned cipher. - * - * The key and iv may be given as binary-encoded strings of bytes or - * byte buffers. - * - * @param key the symmetric key to use (64 or 192 bits). - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * @param mode the cipher mode to use (default: 'CBC' if IV is - * given, 'ECB' if null). - * - * @return the cipher. - */ -forge.des.startEncrypting = function(key, iv, output, mode) { - var cipher = _createCipher({ - key: key, - output: output, - decrypt: false, - mode: mode || (iv === null ? 'ECB' : 'CBC') - }); - cipher.start(iv); - return cipher; -}; - -/** - * Deprecated. Instead, use: - * - * var cipher = forge.cipher.createCipher('DES-', key); - * - * Creates an DES cipher object to encrypt data using the given symmetric key. - * - * The key may be given as a binary-encoded string of bytes or a byte buffer. - * - * @param key the symmetric key to use (64 or 192 bits). - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.des.createEncryptionCipher = function(key, mode) { - return _createCipher({ - key: key, - output: null, - decrypt: false, - mode: mode - }); -}; +const assert = __nccwpck_require__(9491) +const { + ResponseStatusCodeError +} = __nccwpck_require__(8045) +const { toUSVString } = __nccwpck_require__(3983) -/** - * Deprecated. Instead, use: - * - * var decipher = forge.cipher.createDecipher('DES-', key); - * decipher.start({iv: iv}); - * - * Creates an DES cipher object to decrypt data using the given symmetric key. - * The output will be stored in the 'output' member of the returned cipher. - * - * The key and iv may be given as binary-encoded strings of bytes or - * byte buffers. - * - * @param key the symmetric key to use (64 or 192 bits). - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * @param mode the cipher mode to use (default: 'CBC' if IV is - * given, 'ECB' if null). - * - * @return the cipher. - */ -forge.des.startDecrypting = function(key, iv, output, mode) { - var cipher = _createCipher({ - key: key, - output: output, - decrypt: true, - mode: mode || (iv === null ? 'ECB' : 'CBC') - }); - cipher.start(iv); - return cipher; -}; +async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) { + assert(body) -/** - * Deprecated. Instead, use: - * - * var decipher = forge.cipher.createDecipher('DES-', key); - * - * Creates an DES cipher object to decrypt data using the given symmetric key. - * - * The key may be given as a binary-encoded string of bytes or a byte buffer. - * - * @param key the symmetric key to use (64 or 192 bits). - * @param mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -forge.des.createDecryptionCipher = function(key, mode) { - return _createCipher({ - key: key, - output: null, - decrypt: true, - mode: mode - }); -}; + let chunks = [] + let limit = 0 -/** - * Creates a new DES cipher algorithm object. - * - * @param name the name of the algorithm. - * @param mode the mode factory function. - * - * @return the DES algorithm object. - */ -forge.des.Algorithm = function(name, mode) { - var self = this; - self.name = name; - self.mode = new mode({ - blockSize: 8, - cipher: { - encrypt: function(inBlock, outBlock) { - return _updateBlock(self._keys, inBlock, outBlock, false); - }, - decrypt: function(inBlock, outBlock) { - return _updateBlock(self._keys, inBlock, outBlock, true); - } + for await (const chunk of body) { + chunks.push(chunk) + limit += chunk.length + if (limit > 128 * 1024) { + chunks = null + break } - }); - self._init = false; -}; + } -/** - * Initializes this DES algorithm by expanding its key. - * - * @param options the options to use. - * key the key to use with this algorithm. - * decrypt true if the algorithm should be initialized for decryption, - * false for encryption. - */ -forge.des.Algorithm.prototype.initialize = function(options) { - if(this._init) { - return; + if (statusCode === 204 || !contentType || !chunks) { + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) + return } - var key = forge.util.createBuffer(options.key); - if(this.name.indexOf('3DES') === 0) { - if(key.length() !== 24) { - throw new Error('Invalid Triple-DES key size: ' + key.length() * 8); + try { + if (contentType.startsWith('application/json')) { + const payload = JSON.parse(toUSVString(Buffer.concat(chunks))) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } + + if (contentType.startsWith('text/')) { + const payload = toUSVString(Buffer.concat(chunks)) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return } + } catch (err) { + // Process in a fallback if error } - // do key expansion to 16 or 48 subkeys (single or triple DES) - this._keys = _createKeys(key); - this._init = true; -}; + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) +} -/** Register DES algorithms **/ +module.exports = { getResolveErrorBodyCallback } -registerAlgorithm('DES-ECB', forge.cipher.modes.ecb); -registerAlgorithm('DES-CBC', forge.cipher.modes.cbc); -registerAlgorithm('DES-CFB', forge.cipher.modes.cfb); -registerAlgorithm('DES-OFB', forge.cipher.modes.ofb); -registerAlgorithm('DES-CTR', forge.cipher.modes.ctr); -registerAlgorithm('3DES-ECB', forge.cipher.modes.ecb); -registerAlgorithm('3DES-CBC', forge.cipher.modes.cbc); -registerAlgorithm('3DES-CFB', forge.cipher.modes.cfb); -registerAlgorithm('3DES-OFB', forge.cipher.modes.ofb); -registerAlgorithm('3DES-CTR', forge.cipher.modes.ctr); +/***/ }), -function registerAlgorithm(name, mode) { - var factory = function() { - return new forge.des.Algorithm(name, mode); - }; - forge.cipher.registerAlgorithm(name, factory); -} +/***/ 7931: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** DES implementation **/ +"use strict"; -var spfunction1 = [0x1010400,0,0x10000,0x1010404,0x1010004,0x10404,0x4,0x10000,0x400,0x1010400,0x1010404,0x400,0x1000404,0x1010004,0x1000000,0x4,0x404,0x1000400,0x1000400,0x10400,0x10400,0x1010000,0x1010000,0x1000404,0x10004,0x1000004,0x1000004,0x10004,0,0x404,0x10404,0x1000000,0x10000,0x1010404,0x4,0x1010000,0x1010400,0x1000000,0x1000000,0x400,0x1010004,0x10000,0x10400,0x1000004,0x400,0x4,0x1000404,0x10404,0x1010404,0x10004,0x1010000,0x1000404,0x1000004,0x404,0x10404,0x1010400,0x404,0x1000400,0x1000400,0,0x10004,0x10400,0,0x1010004]; -var spfunction2 = [-0x7fef7fe0,-0x7fff8000,0x8000,0x108020,0x100000,0x20,-0x7fefffe0,-0x7fff7fe0,-0x7fffffe0,-0x7fef7fe0,-0x7fef8000,-0x80000000,-0x7fff8000,0x100000,0x20,-0x7fefffe0,0x108000,0x100020,-0x7fff7fe0,0,-0x80000000,0x8000,0x108020,-0x7ff00000,0x100020,-0x7fffffe0,0,0x108000,0x8020,-0x7fef8000,-0x7ff00000,0x8020,0,0x108020,-0x7fefffe0,0x100000,-0x7fff7fe0,-0x7ff00000,-0x7fef8000,0x8000,-0x7ff00000,-0x7fff8000,0x20,-0x7fef7fe0,0x108020,0x20,0x8000,-0x80000000,0x8020,-0x7fef8000,0x100000,-0x7fffffe0,0x100020,-0x7fff7fe0,-0x7fffffe0,0x100020,0x108000,0,-0x7fff8000,0x8020,-0x80000000,-0x7fefffe0,-0x7fef7fe0,0x108000]; -var spfunction3 = [0x208,0x8020200,0,0x8020008,0x8000200,0,0x20208,0x8000200,0x20008,0x8000008,0x8000008,0x20000,0x8020208,0x20008,0x8020000,0x208,0x8000000,0x8,0x8020200,0x200,0x20200,0x8020000,0x8020008,0x20208,0x8000208,0x20200,0x20000,0x8000208,0x8,0x8020208,0x200,0x8000000,0x8020200,0x8000000,0x20008,0x208,0x20000,0x8020200,0x8000200,0,0x200,0x20008,0x8020208,0x8000200,0x8000008,0x200,0,0x8020008,0x8000208,0x20000,0x8000000,0x8020208,0x8,0x20208,0x20200,0x8000008,0x8020000,0x8000208,0x208,0x8020000,0x20208,0x8,0x8020008,0x20200]; -var spfunction4 = [0x802001,0x2081,0x2081,0x80,0x802080,0x800081,0x800001,0x2001,0,0x802000,0x802000,0x802081,0x81,0,0x800080,0x800001,0x1,0x2000,0x800000,0x802001,0x80,0x800000,0x2001,0x2080,0x800081,0x1,0x2080,0x800080,0x2000,0x802080,0x802081,0x81,0x800080,0x800001,0x802000,0x802081,0x81,0,0,0x802000,0x2080,0x800080,0x800081,0x1,0x802001,0x2081,0x2081,0x80,0x802081,0x81,0x1,0x2000,0x800001,0x2001,0x802080,0x800081,0x2001,0x2080,0x800000,0x802001,0x80,0x800000,0x2000,0x802080]; -var spfunction5 = [0x100,0x2080100,0x2080000,0x42000100,0x80000,0x100,0x40000000,0x2080000,0x40080100,0x80000,0x2000100,0x40080100,0x42000100,0x42080000,0x80100,0x40000000,0x2000000,0x40080000,0x40080000,0,0x40000100,0x42080100,0x42080100,0x2000100,0x42080000,0x40000100,0,0x42000000,0x2080100,0x2000000,0x42000000,0x80100,0x80000,0x42000100,0x100,0x2000000,0x40000000,0x2080000,0x42000100,0x40080100,0x2000100,0x40000000,0x42080000,0x2080100,0x40080100,0x100,0x2000000,0x42080000,0x42080100,0x80100,0x42000000,0x42080100,0x2080000,0,0x40080000,0x42000000,0x80100,0x2000100,0x40000100,0x80000,0,0x40080000,0x2080100,0x40000100]; -var spfunction6 = [0x20000010,0x20400000,0x4000,0x20404010,0x20400000,0x10,0x20404010,0x400000,0x20004000,0x404010,0x400000,0x20000010,0x400010,0x20004000,0x20000000,0x4010,0,0x400010,0x20004010,0x4000,0x404000,0x20004010,0x10,0x20400010,0x20400010,0,0x404010,0x20404000,0x4010,0x404000,0x20404000,0x20000000,0x20004000,0x10,0x20400010,0x404000,0x20404010,0x400000,0x4010,0x20000010,0x400000,0x20004000,0x20000000,0x4010,0x20000010,0x20404010,0x404000,0x20400000,0x404010,0x20404000,0,0x20400010,0x10,0x4000,0x20400000,0x404010,0x4000,0x400010,0x20004010,0,0x20404000,0x20000000,0x400010,0x20004010]; -var spfunction7 = [0x200000,0x4200002,0x4000802,0,0x800,0x4000802,0x200802,0x4200800,0x4200802,0x200000,0,0x4000002,0x2,0x4000000,0x4200002,0x802,0x4000800,0x200802,0x200002,0x4000800,0x4000002,0x4200000,0x4200800,0x200002,0x4200000,0x800,0x802,0x4200802,0x200800,0x2,0x4000000,0x200800,0x4000000,0x200800,0x200000,0x4000802,0x4000802,0x4200002,0x4200002,0x2,0x200002,0x4000000,0x4000800,0x200000,0x4200800,0x802,0x200802,0x4200800,0x802,0x4000002,0x4200802,0x4200000,0x200800,0,0x2,0x4200802,0,0x200802,0x4200000,0x800,0x4000002,0x4000800,0x800,0x200002]; -var spfunction8 = [0x10001040,0x1000,0x40000,0x10041040,0x10000000,0x10001040,0x40,0x10000000,0x40040,0x10040000,0x10041040,0x41000,0x10041000,0x41040,0x1000,0x40,0x10040000,0x10000040,0x10001000,0x1040,0x41000,0x40040,0x10040040,0x10041000,0x1040,0,0,0x10040040,0x10000040,0x10001000,0x41040,0x40000,0x41040,0x40000,0x10041000,0x1000,0x40,0x10040040,0x1000,0x41040,0x10001000,0x40,0x10000040,0x10040000,0x10040040,0x10000000,0x40000,0x10001040,0,0x10041040,0x40040,0x10000040,0x10040000,0x10001000,0x10001040,0,0x10041040,0x41000,0x41000,0x1040,0x1040,0x40040,0x10000000,0x10041000]; -/** - * Create necessary sub keys. - * - * @param key the 64-bit or 192-bit key. - * - * @return the expanded keys. - */ -function _createKeys(key) { - var pc2bytes0 = [0,0x4,0x20000000,0x20000004,0x10000,0x10004,0x20010000,0x20010004,0x200,0x204,0x20000200,0x20000204,0x10200,0x10204,0x20010200,0x20010204], - pc2bytes1 = [0,0x1,0x100000,0x100001,0x4000000,0x4000001,0x4100000,0x4100001,0x100,0x101,0x100100,0x100101,0x4000100,0x4000101,0x4100100,0x4100101], - pc2bytes2 = [0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808,0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808], - pc2bytes3 = [0,0x200000,0x8000000,0x8200000,0x2000,0x202000,0x8002000,0x8202000,0x20000,0x220000,0x8020000,0x8220000,0x22000,0x222000,0x8022000,0x8222000], - pc2bytes4 = [0,0x40000,0x10,0x40010,0,0x40000,0x10,0x40010,0x1000,0x41000,0x1010,0x41010,0x1000,0x41000,0x1010,0x41010], - pc2bytes5 = [0,0x400,0x20,0x420,0,0x400,0x20,0x420,0x2000000,0x2000400,0x2000020,0x2000420,0x2000000,0x2000400,0x2000020,0x2000420], - pc2bytes6 = [0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002,0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002], - pc2bytes7 = [0,0x10000,0x800,0x10800,0x20000000,0x20010000,0x20000800,0x20010800,0x20000,0x30000,0x20800,0x30800,0x20020000,0x20030000,0x20020800,0x20030800], - pc2bytes8 = [0,0x40000,0,0x40000,0x2,0x40002,0x2,0x40002,0x2000000,0x2040000,0x2000000,0x2040000,0x2000002,0x2040002,0x2000002,0x2040002], - pc2bytes9 = [0,0x10000000,0x8,0x10000008,0,0x10000000,0x8,0x10000008,0x400,0x10000400,0x408,0x10000408,0x400,0x10000400,0x408,0x10000408], - pc2bytes10 = [0,0x20,0,0x20,0x100000,0x100020,0x100000,0x100020,0x2000,0x2020,0x2000,0x2020,0x102000,0x102020,0x102000,0x102020], - pc2bytes11 = [0,0x1000000,0x200,0x1000200,0x200000,0x1200000,0x200200,0x1200200,0x4000000,0x5000000,0x4000200,0x5000200,0x4200000,0x5200000,0x4200200,0x5200200], - pc2bytes12 = [0,0x1000,0x8000000,0x8001000,0x80000,0x81000,0x8080000,0x8081000,0x10,0x1010,0x8000010,0x8001010,0x80010,0x81010,0x8080010,0x8081010], - pc2bytes13 = [0,0x4,0x100,0x104,0,0x4,0x100,0x104,0x1,0x5,0x101,0x105,0x1,0x5,0x101,0x105]; - - // how many iterations (1 for des, 3 for triple des) - // changed by Paul 16/6/2007 to use Triple DES for 9+ byte keys - var iterations = key.length() > 8 ? 3 : 1; - - // stores the return keys - var keys = []; +const { + BalancedPoolMissingUpstreamError, + InvalidArgumentError +} = __nccwpck_require__(8045) +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} = __nccwpck_require__(3198) +const Pool = __nccwpck_require__(4634) +const { kUrl, kInterceptors } = __nccwpck_require__(2785) +const { parseOrigin } = __nccwpck_require__(3983) +const kFactory = Symbol('factory') - // now define the left shifts which need to be done - var shifts = [0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0]; +const kOptions = Symbol('options') +const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor') +const kCurrentWeight = Symbol('kCurrentWeight') +const kIndex = Symbol('kIndex') +const kWeight = Symbol('kWeight') +const kMaxWeightPerServer = Symbol('kMaxWeightPerServer') +const kErrorPenalty = Symbol('kErrorPenalty') - var n = 0, tmp; - for(var j = 0; j < iterations; j++) { - var left = key.getInt32(); - var right = key.getInt32(); +function getGreatestCommonDivisor (a, b) { + if (b === 0) return a + return getGreatestCommonDivisor(b, a % b) +} - tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f; - right ^= tmp; - left ^= (tmp << 4); +function defaultFactory (origin, opts) { + return new Pool(origin, opts) +} - tmp = ((right >>> -16) ^ left) & 0x0000ffff; - left ^= tmp; - right ^= (tmp << -16); +class BalancedPool extends PoolBase { + constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) { + super() - tmp = ((left >>> 2) ^ right) & 0x33333333; - right ^= tmp; - left ^= (tmp << 2); + this[kOptions] = opts + this[kIndex] = -1 + this[kCurrentWeight] = 0 - tmp = ((right >>> -16) ^ left) & 0x0000ffff; - left ^= tmp; - right ^= (tmp << -16); + this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100 + this[kErrorPenalty] = this[kOptions].errorPenalty || 15 - tmp = ((left >>> 1) ^ right) & 0x55555555; - right ^= tmp; - left ^= (tmp << 1); + if (!Array.isArray(upstreams)) { + upstreams = [upstreams] + } - tmp = ((right >>> 8) ^ left) & 0x00ff00ff; - left ^= tmp; - right ^= (tmp << 8); + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } - tmp = ((left >>> 1) ^ right) & 0x55555555; - right ^= tmp; - left ^= (tmp << 1); + this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool) + ? opts.interceptors.BalancedPool + : [] + this[kFactory] = factory - // right needs to be shifted and OR'd with last four bits of left - tmp = (left << 8) | ((right >>> 20) & 0x000000f0); + for (const upstream of upstreams) { + this.addUpstream(upstream) + } + this._updateBalancedPoolStats() + } - // left needs to be put upside down - left = ((right << 24) | ((right << 8) & 0xff0000) | - ((right >>> 8) & 0xff00) | ((right >>> 24) & 0xf0)); - right = tmp; + addUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin - // now go through and perform these shifts on the left and right keys - for(var i = 0; i < shifts.length; ++i) { - //shift the keys either one or two bits to the left - if(shifts[i]) { - left = (left << 2) | (left >>> 26); - right = (right << 2) | (right >>> 26); - } else { - left = (left << 1) | (left >>> 27); - right = (right << 1) | (right >>> 27); - } - left &= -0xf; - right &= -0xf; - - // now apply PC-2, in such a way that E is easier when encrypting or - // decrypting this conversion will look like PC-2 except only the last 6 - // bits of each byte are used rather than 48 consecutive bits and the - // order of lines will be according to how the S selection functions will - // be applied: S2, S4, S6, S8, S1, S3, S5, S7 - var lefttmp = ( - pc2bytes0[left >>> 28] | pc2bytes1[(left >>> 24) & 0xf] | - pc2bytes2[(left >>> 20) & 0xf] | pc2bytes3[(left >>> 16) & 0xf] | - pc2bytes4[(left >>> 12) & 0xf] | pc2bytes5[(left >>> 8) & 0xf] | - pc2bytes6[(left >>> 4) & 0xf]); - var righttmp = ( - pc2bytes7[right >>> 28] | pc2bytes8[(right >>> 24) & 0xf] | - pc2bytes9[(right >>> 20) & 0xf] | pc2bytes10[(right >>> 16) & 0xf] | - pc2bytes11[(right >>> 12) & 0xf] | pc2bytes12[(right >>> 8) & 0xf] | - pc2bytes13[(right >>> 4) & 0xf]); - tmp = ((righttmp >>> 16) ^ lefttmp) & 0x0000ffff; - keys[n++] = lefttmp ^ tmp; - keys[n++] = righttmp ^ (tmp << 16); + if (this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + ))) { + return this } - } + const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions])) - return keys; -} + this[kAddClient](pool) + pool.on('connect', () => { + pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty]) + }) -/** - * Updates a single block (1 byte) using DES. The update will either - * encrypt or decrypt the block. - * - * @param keys the expanded keys. - * @param input the input block (an array of 32-bit words). - * @param output the updated output block. - * @param decrypt true to decrypt the block, false to encrypt it. - */ -function _updateBlock(keys, input, output, decrypt) { - // set up loops for single or triple DES - var iterations = keys.length === 32 ? 3 : 9; - var looping; - if(iterations === 3) { - looping = decrypt ? [30, -2, -2] : [0, 32, 2]; - } else { - looping = (decrypt ? - [94, 62, -2, 32, 64, 2, 30, -2, -2] : - [0, 32, 2, 62, 30, -2, 64, 96, 2]); - } + pool.on('connectionError', () => { + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + }) - var tmp; + pool.on('disconnect', (...args) => { + const err = args[2] + if (err && err.code === 'UND_ERR_SOCKET') { + // decrease the weight of the pool. + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + } + }) - var left = input[0]; - var right = input[1]; + for (const client of this[kClients]) { + client[kWeight] = this[kMaxWeightPerServer] + } - // first each 64 bit chunk of the message must be permuted according to IP - tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f; - right ^= tmp; - left ^= (tmp << 4); + this._updateBalancedPoolStats() - tmp = ((left >>> 16) ^ right) & 0x0000ffff; - right ^= tmp; - left ^= (tmp << 16); + return this + } - tmp = ((right >>> 2) ^ left) & 0x33333333; - left ^= tmp; - right ^= (tmp << 2); + _updateBalancedPoolStats () { + this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0) + } - tmp = ((right >>> 8) ^ left) & 0x00ff00ff; - left ^= tmp; - right ^= (tmp << 8); + removeUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin - tmp = ((left >>> 1) ^ right) & 0x55555555; - right ^= tmp; - left ^= (tmp << 1); + const pool = this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + )) - // rotate left 1 bit - left = ((left << 1) | (left >>> 31)); - right = ((right << 1) | (right >>> 31)); + if (pool) { + this[kRemoveClient](pool) + } - for(var j = 0; j < iterations; j += 3) { - var endloop = looping[j + 1]; - var loopinc = looping[j + 2]; + return this + } - // now go through and perform the encryption or decryption - for(var i = looping[j]; i != endloop; i += loopinc) { - var right1 = right ^ keys[i]; - var right2 = ((right >>> 4) | (right << 28)) ^ keys[i + 1]; + get upstreams () { + return this[kClients] + .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true) + .map((p) => p[kUrl].origin) + } - // passing these bytes through the S selection functions - tmp = left; - left = right; - right = tmp ^ ( - spfunction2[(right1 >>> 24) & 0x3f] | - spfunction4[(right1 >>> 16) & 0x3f] | - spfunction6[(right1 >>> 8) & 0x3f] | - spfunction8[right1 & 0x3f] | - spfunction1[(right2 >>> 24) & 0x3f] | - spfunction3[(right2 >>> 16) & 0x3f] | - spfunction5[(right2 >>> 8) & 0x3f] | - spfunction7[right2 & 0x3f]); + [kGetDispatcher] () { + // We validate that pools is greater than 0, + // otherwise we would have to wait until an upstream + // is added, which might never happen. + if (this[kClients].length === 0) { + throw new BalancedPoolMissingUpstreamError() } - // unreverse left and right - tmp = left; - left = right; - right = tmp; - } - // rotate right 1 bit - left = ((left >>> 1) | (left << 31)); - right = ((right >>> 1) | (right << 31)); + const dispatcher = this[kClients].find(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) - // now perform IP-1, which is IP in the opposite direction - tmp = ((left >>> 1) ^ right) & 0x55555555; - right ^= tmp; - left ^= (tmp << 1); + if (!dispatcher) { + return + } - tmp = ((right >>> 8) ^ left) & 0x00ff00ff; - left ^= tmp; - right ^= (tmp << 8); + const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true) - tmp = ((right >>> 2) ^ left) & 0x33333333; - left ^= tmp; - right ^= (tmp << 2); + if (allClientsBusy) { + return + } - tmp = ((left >>> 16) ^ right) & 0x0000ffff; - right ^= tmp; - left ^= (tmp << 16); + let counter = 0 - tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f; - right ^= tmp; - left ^= (tmp << 4); + let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain]) - output[0] = left; - output[1] = right; -} + while (counter++ < this[kClients].length) { + this[kIndex] = (this[kIndex] + 1) % this[kClients].length + const pool = this[kClients][this[kIndex]] -/** - * Deprecated. Instead, use: - * - * forge.cipher.createCipher('DES-', key); - * forge.cipher.createDecipher('DES-', key); - * - * Creates a deprecated DES cipher object. This object's mode will default to - * CBC (cipher-block-chaining). - * - * The key may be given as a binary-encoded string of bytes or a byte buffer. - * - * @param options the options to use. - * key the symmetric key to use (64 or 192 bits). - * output the buffer to write to. - * decrypt true for decryption, false for encryption. - * mode the cipher mode to use (default: 'CBC'). - * - * @return the cipher. - */ -function _createCipher(options) { - options = options || {}; - var mode = (options.mode || 'CBC').toUpperCase(); - var algorithm = 'DES-' + mode; + // find pool index with the largest weight + if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) { + maxWeightIndex = this[kIndex] + } - var cipher; - if(options.decrypt) { - cipher = forge.cipher.createDecipher(algorithm, options.key); - } else { - cipher = forge.cipher.createCipher(algorithm, options.key); - } + // decrease the current weight every `this[kClients].length`. + if (this[kIndex] === 0) { + // Set the current weight to the next lower weight. + this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor] - // backwards compatible start API - var start = cipher.start; - cipher.start = function(iv, options) { - // backwards compatibility: support second arg as output buffer - var output = null; - if(options instanceof forge.util.ByteBuffer) { - output = options; - options = {}; + if (this[kCurrentWeight] <= 0) { + this[kCurrentWeight] = this[kMaxWeightPerServer] + } + } + if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) { + return pool + } } - options = options || {}; - options.output = output; - options.iv = iv; - start.call(cipher, options); - }; - return cipher; + this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight] + this[kIndex] = maxWeightIndex + return this[kClients][maxWeightIndex] + } } +module.exports = BalancedPool + /***/ }), -/***/ 0: +/***/ 6101: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * JavaScript implementation of Ed25519. - * - * Copyright (c) 2017-2019 Digital Bazaar, Inc. - * - * This implementation is based on the most excellent TweetNaCl which is - * in the public domain. Many thanks to its contributors: - * - * https://github.com/dchest/tweetnacl-js - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7052); -__nccwpck_require__(7821); -__nccwpck_require__(9542); -__nccwpck_require__(8339); -var asn1Validator = __nccwpck_require__(9414); -var publicKeyValidator = asn1Validator.publicKeyValidator; -var privateKeyValidator = asn1Validator.privateKeyValidator; - -if(typeof BigInteger === 'undefined') { - var BigInteger = forge.jsbn.BigInteger; -} - -var ByteBuffer = forge.util.ByteBuffer; -var NativeBuffer = typeof Buffer === 'undefined' ? Uint8Array : Buffer; - -/* - * Ed25519 algorithms, see RFC 8032: - * https://tools.ietf.org/html/rfc8032 - */ -forge.pki = forge.pki || {}; -module.exports = forge.pki.ed25519 = forge.ed25519 = forge.ed25519 || {}; -var ed25519 = forge.ed25519; - -ed25519.constants = {}; -ed25519.constants.PUBLIC_KEY_BYTE_LENGTH = 32; -ed25519.constants.PRIVATE_KEY_BYTE_LENGTH = 64; -ed25519.constants.SEED_BYTE_LENGTH = 32; -ed25519.constants.SIGN_BYTE_LENGTH = 64; -ed25519.constants.HASH_BYTE_LENGTH = 64; - -ed25519.generateKeyPair = function(options) { - options = options || {}; - var seed = options.seed; - if(seed === undefined) { - // generate seed - seed = forge.random.getBytesSync(ed25519.constants.SEED_BYTE_LENGTH); - } else if(typeof seed === 'string') { - if(seed.length !== ed25519.constants.SEED_BYTE_LENGTH) { - throw new TypeError( - '"seed" must be ' + ed25519.constants.SEED_BYTE_LENGTH + - ' bytes in length.'); - } - } else if(!(seed instanceof Uint8Array)) { - throw new TypeError( - '"seed" must be a node.js Buffer, Uint8Array, or a binary string.'); - } +"use strict"; - seed = messageToNativeBuffer({message: seed, encoding: 'binary'}); - var pk = new NativeBuffer(ed25519.constants.PUBLIC_KEY_BYTE_LENGTH); - var sk = new NativeBuffer(ed25519.constants.PRIVATE_KEY_BYTE_LENGTH); - for(var i = 0; i < 32; ++i) { - sk[i] = seed[i]; - } - crypto_sign_keypair(pk, sk); - return {publicKey: pk, privateKey: sk}; -}; +const { kConstruct } = __nccwpck_require__(9174) +const { urlEquals, fieldValues: getFieldValues } = __nccwpck_require__(2396) +const { kEnumerableProperty, isDisturbed } = __nccwpck_require__(3983) +const { kHeadersList } = __nccwpck_require__(2785) +const { webidl } = __nccwpck_require__(1744) +const { Response, cloneResponse } = __nccwpck_require__(7823) +const { Request } = __nccwpck_require__(8359) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) +const { fetching } = __nccwpck_require__(4881) +const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = __nccwpck_require__(2538) +const assert = __nccwpck_require__(9491) +const { getGlobalDispatcher } = __nccwpck_require__(1892) /** - * Converts a private key from a RFC8410 ASN.1 encoding. - * - * @param obj - The asn1 representation of a private key. - * - * @returns {Object} keyInfo - The key information. - * @returns {Buffer|Uint8Array} keyInfo.privateKeyBytes - 32 private key bytes. + * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation + * @typedef {Object} CacheBatchOperation + * @property {'delete' | 'put'} type + * @property {any} request + * @property {any} response + * @property {import('../../types/cache').CacheQueryOptions} options */ -ed25519.privateKeyFromAsn1 = function(obj) { - var capture = {}; - var errors = []; - var valid = forge.asn1.validate(obj, privateKeyValidator, capture, errors); - if(!valid) { - var error = new Error('Invalid Key.'); - error.errors = errors; - throw error; - } - var oid = forge.asn1.derToOid(capture.privateKeyOid); - var ed25519Oid = forge.oids.EdDSA25519; - if(oid !== ed25519Oid) { - throw new Error('Invalid OID "' + oid + '"; OID must be "' + - ed25519Oid + '".'); - } - var privateKey = capture.privateKey; - // manually extract the private key bytes from nested octet string, see FIXME: - // https://github.com/digitalbazaar/forge/blob/master/lib/asn1.js#L542 - var privateKeyBytes = messageToNativeBuffer({ - message: forge.asn1.fromDer(privateKey).value, - encoding: 'binary' - }); - // TODO: RFC8410 specifies a format for encoding the public key bytes along - // with the private key bytes. `publicKeyBytes` can be returned in the - // future. https://tools.ietf.org/html/rfc8410#section-10.3 - return {privateKeyBytes: privateKeyBytes}; -}; /** - * Converts a public key from a RFC8410 ASN.1 encoding. - * - * @param obj - The asn1 representation of a public key. - * - * @return {Buffer|Uint8Array} - 32 public key bytes. + * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list + * @typedef {[any, any][]} requestResponseList */ -ed25519.publicKeyFromAsn1 = function(obj) { - // get SubjectPublicKeyInfo - var capture = {}; - var errors = []; - var valid = forge.asn1.validate(obj, publicKeyValidator, capture, errors); - if(!valid) { - var error = new Error('Invalid Key.'); - error.errors = errors; - throw error; - } - var oid = forge.asn1.derToOid(capture.publicKeyOid); - var ed25519Oid = forge.oids.EdDSA25519; - if(oid !== ed25519Oid) { - throw new Error('Invalid OID "' + oid + '"; OID must be "' + - ed25519Oid + '".'); - } - var publicKeyBytes = capture.ed25519PublicKey; - if(publicKeyBytes.length !== ed25519.constants.PUBLIC_KEY_BYTE_LENGTH) { - throw new Error('Key length is invalid.'); - } - return messageToNativeBuffer({ - message: publicKeyBytes, - encoding: 'binary' - }); -}; -ed25519.publicKeyFromPrivateKey = function(options) { - options = options || {}; - var privateKey = messageToNativeBuffer({ - message: options.privateKey, encoding: 'binary' - }); - if(privateKey.length !== ed25519.constants.PRIVATE_KEY_BYTE_LENGTH) { - throw new TypeError( - '"options.privateKey" must have a byte length of ' + - ed25519.constants.PRIVATE_KEY_BYTE_LENGTH); - } +class Cache { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list + * @type {requestResponseList} + */ + #relevantRequestResponseList - var pk = new NativeBuffer(ed25519.constants.PUBLIC_KEY_BYTE_LENGTH); - for(var i = 0; i < pk.length; ++i) { - pk[i] = privateKey[32 + i]; - } - return pk; -}; + constructor () { + if (arguments[0] !== kConstruct) { + webidl.illegalConstructor() + } -ed25519.sign = function(options) { - options = options || {}; - var msg = messageToNativeBuffer(options); - var privateKey = messageToNativeBuffer({ - message: options.privateKey, - encoding: 'binary' - }); - if(privateKey.length === ed25519.constants.SEED_BYTE_LENGTH) { - var keyPair = ed25519.generateKeyPair({seed: privateKey}); - privateKey = keyPair.privateKey; - } else if(privateKey.length !== ed25519.constants.PRIVATE_KEY_BYTE_LENGTH) { - throw new TypeError( - '"options.privateKey" must have a byte length of ' + - ed25519.constants.SEED_BYTE_LENGTH + ' or ' + - ed25519.constants.PRIVATE_KEY_BYTE_LENGTH); + this.#relevantRequestResponseList = arguments[1] } - var signedMsg = new NativeBuffer( - ed25519.constants.SIGN_BYTE_LENGTH + msg.length); - crypto_sign(signedMsg, msg, msg.length, privateKey); + async match (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' }) - var sig = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH); - for(var i = 0; i < sig.length; ++i) { - sig[i] = signedMsg[i]; - } - return sig; -}; + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) -ed25519.verify = function(options) { - options = options || {}; - var msg = messageToNativeBuffer(options); - if(options.signature === undefined) { - throw new TypeError( - '"options.signature" must be a node.js Buffer, a Uint8Array, a forge ' + - 'ByteBuffer, or a binary string.'); - } - var sig = messageToNativeBuffer({ - message: options.signature, - encoding: 'binary' - }); - if(sig.length !== ed25519.constants.SIGN_BYTE_LENGTH) { - throw new TypeError( - '"options.signature" must have a byte length of ' + - ed25519.constants.SIGN_BYTE_LENGTH); - } - var publicKey = messageToNativeBuffer({ - message: options.publicKey, - encoding: 'binary' - }); - if(publicKey.length !== ed25519.constants.PUBLIC_KEY_BYTE_LENGTH) { - throw new TypeError( - '"options.publicKey" must have a byte length of ' + - ed25519.constants.PUBLIC_KEY_BYTE_LENGTH); - } + const p = await this.matchAll(request, options) - var sm = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH + msg.length); - var m = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH + msg.length); - var i; - for(i = 0; i < ed25519.constants.SIGN_BYTE_LENGTH; ++i) { - sm[i] = sig[i]; - } - for(i = 0; i < msg.length; ++i) { - sm[i + ed25519.constants.SIGN_BYTE_LENGTH] = msg[i]; - } - return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0); -}; + if (p.length === 0) { + return + } -function messageToNativeBuffer(options) { - var message = options.message; - if(message instanceof Uint8Array || message instanceof NativeBuffer) { - return message; + return p[0] } - var encoding = options.encoding; - if(message === undefined) { - if(options.md) { - // TODO: more rigorous validation that `md` is a MessageDigest - message = options.md.digest().getBytes(); - encoding = 'binary'; - } else { - throw new TypeError('"options.message" or "options.md" not specified.'); - } - } - - if(typeof message === 'string' && !encoding) { - throw new TypeError('"options.encoding" must be "binary" or "utf8".'); - } - - if(typeof message === 'string') { - if(typeof Buffer !== 'undefined') { - return Buffer.from(message, encoding); - } - message = new ByteBuffer(message, encoding); - } else if(!(message instanceof ByteBuffer)) { - throw new TypeError( - '"options.message" must be a node.js Buffer, a Uint8Array, a forge ' + - 'ByteBuffer, or a string with "options.encoding" specifying its ' + - 'encoding.'); - } - - // convert to native buffer - var buffer = new NativeBuffer(message.length()); - for(var i = 0; i < buffer.length; ++i) { - buffer[i] = message.at(i); - } - return buffer; -} - -var gf0 = gf(); -var gf1 = gf([1]); -var D = gf([ - 0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, - 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]); -var D2 = gf([ - 0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, - 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]); -var X = gf([ - 0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, - 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]); -var Y = gf([ - 0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, - 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]); -var L = new Float64Array([ - 0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, - 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]); -var I = gf([ - 0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, - 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]); - -// TODO: update forge buffer implementation to use `Buffer` or `Uint8Array`, -// whichever is available, to improve performance -function sha512(msg, msgLen) { - // Note: `out` and `msg` are NativeBuffer - var md = forge.md.sha512.create(); - var buffer = new ByteBuffer(msg); - md.update(buffer.getBytes(msgLen), 'binary'); - var hash = md.digest().getBytes(); - if(typeof Buffer !== 'undefined') { - return Buffer.from(hash, 'binary'); - } - var out = new NativeBuffer(ed25519.constants.HASH_BYTE_LENGTH); - for(var i = 0; i < 64; ++i) { - out[i] = hash.charCodeAt(i); - } - return out; -} + async matchAll (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) -function crypto_sign_keypair(pk, sk) { - var p = [gf(), gf(), gf(), gf()]; - var i; + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) - var d = sha512(sk, 32); - d[0] &= 248; - d[31] &= 127; - d[31] |= 64; + // 1. + let r = null - scalarbase(p, d); - pack(pk, p); + // 2. + if (request !== undefined) { + if (request instanceof Request) { + // 2.1.1 + r = request[kState] - for(i = 0; i < 32; ++i) { - sk[i + 32] = pk[i]; - } - return 0; -} + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { + // 2.2.1 + r = new Request(request)[kState] + } + } -// Note: difference from C - smlen returned, not passed as argument. -function crypto_sign(sm, m, n, sk) { - var i, j, x = new Float64Array(64); - var p = [gf(), gf(), gf(), gf()]; + // 5. + // 5.1 + const responses = [] - var d = sha512(sk, 32); - d[0] &= 248; - d[31] &= 127; - d[31] |= 64; + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + responses.push(requestResponse[1]) + } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) - var smlen = n + 64; - for(i = 0; i < n; ++i) { - sm[64 + i] = m[i]; - } - for(i = 0; i < 32; ++i) { - sm[32 + i] = d[32 + i]; - } + // 5.3.2 + for (const requestResponse of requestResponses) { + responses.push(requestResponse[1]) + } + } - var r = sha512(sm.subarray(32), n + 32); - reduce(r); - scalarbase(p, r); - pack(sm, p); + // 5.4 + // We don't implement CORs so we don't need to loop over the responses, yay! - for(i = 32; i < 64; ++i) { - sm[i] = sk[i]; - } - var h = sha512(sm, n + 64); - reduce(h); + // 5.5.1 + const responseList = [] - for(i = 32; i < 64; ++i) { - x[i] = 0; - } - for(i = 0; i < 32; ++i) { - x[i] = r[i]; - } - for(i = 0; i < 32; ++i) { - for(j = 0; j < 32; j++) { - x[i + j] += h[i] * d[j]; + // 5.5.2 + for (const response of responses) { + // 5.5.2.1 + const responseObject = new Response(response.body?.source ?? null) + const body = responseObject[kState].body + responseObject[kState] = response + responseObject[kState].body = body + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + + responseList.push(responseObject) } + + // 6. + return Object.freeze(responseList) } - modL(sm.subarray(32), x); - return smlen; -} + async add (request) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' }) -function crypto_sign_open(m, sm, n, pk) { - var i, mlen; - var t = new NativeBuffer(32); - var p = [gf(), gf(), gf(), gf()], - q = [gf(), gf(), gf(), gf()]; + request = webidl.converters.RequestInfo(request) - mlen = -1; - if(n < 64) { - return -1; - } + // 1. + const requests = [request] - if(unpackneg(q, pk)) { - return -1; - } + // 2. + const responseArrayPromise = this.addAll(requests) - for(i = 0; i < n; ++i) { - m[i] = sm[i]; - } - for(i = 0; i < 32; ++i) { - m[i + 32] = pk[i]; + // 3. + return await responseArrayPromise } - var h = sha512(m, n); - reduce(h); - scalarmult(p, q, h); - scalarbase(q, sm.subarray(32)); - add(p, q); - pack(t, p); - - n -= 64; - if(crypto_verify_32(sm, 0, t, 0)) { - for(i = 0; i < n; ++i) { - m[i] = 0; - } - return -1; - } + async addAll (requests) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' }) - for(i = 0; i < n; ++i) { - m[i] = sm[i + 64]; - } - mlen = n; - return mlen; -} + requests = webidl.converters['sequence'](requests) -function modL(r, x) { - var carry, i, j, k; - for(i = 63; i >= 32; --i) { - carry = 0; - for(j = i - 32, k = i - 12; j < k; ++j) { - x[j] += carry - 16 * x[i] * L[j - (i - 32)]; - carry = (x[j] + 128) >> 8; - x[j] -= carry * 256; - } - x[j] += carry; - x[i] = 0; - } - carry = 0; - for(j = 0; j < 32; ++j) { - x[j] += carry - (x[31] >> 4) * L[j]; - carry = x[j] >> 8; - x[j] &= 255; - } - for(j = 0; j < 32; ++j) { - x[j] -= carry * L[j]; - } - for(i = 0; i < 32; ++i) { - x[i + 1] += x[i] >> 8; - r[i] = x[i] & 255; - } -} + // 1. + const responsePromises = [] -function reduce(r) { - var x = new Float64Array(64); - for(var i = 0; i < 64; ++i) { - x[i] = r[i]; - r[i] = 0; - } - modL(r, x); -} + // 2. + const requestList = [] -function add(p, q) { - var a = gf(), b = gf(), c = gf(), - d = gf(), e = gf(), f = gf(), - g = gf(), h = gf(), t = gf(); + // 3. + for (const request of requests) { + if (typeof request === 'string') { + continue + } - Z(a, p[1], p[0]); - Z(t, q[1], q[0]); - M(a, a, t); - A(b, p[0], p[1]); - A(t, q[0], q[1]); - M(b, b, t); - M(c, p[3], q[3]); - M(c, c, D2); - M(d, p[2], q[2]); - A(d, d, d); - Z(e, b, a); - Z(f, d, c); - A(g, d, c); - A(h, b, a); + // 3.1 + const r = request[kState] - M(p[0], e, f); - M(p[1], h, g); - M(p[2], g, f); - M(p[3], e, h); -} + // 3.2 + if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme when method is not GET.' + }) + } + } -function cswap(p, q, b) { - for(var i = 0; i < 4; ++i) { - sel25519(p[i], q[i], b); - } -} + // 4. + /** @type {ReturnType[]} */ + const fetchControllers = [] -function pack(r, p) { - var tx = gf(), ty = gf(), zi = gf(); - inv25519(zi, p[2]); - M(tx, p[0], zi); - M(ty, p[1], zi); - pack25519(r, ty); - r[31] ^= par25519(tx) << 7; -} + // 5. + for (const request of requests) { + // 5.1 + const r = new Request(request)[kState] -function pack25519(o, n) { - var i, j, b; - var m = gf(), t = gf(); - for(i = 0; i < 16; ++i) { - t[i] = n[i]; - } - car25519(t); - car25519(t); - car25519(t); - for(j = 0; j < 2; ++j) { - m[0] = t[0] - 0xffed; - for(i = 1; i < 15; ++i) { - m[i] = t[i] - 0xffff - ((m[i - 1] >> 16) & 1); - m[i-1] &= 0xffff; - } - m[15] = t[15] - 0x7fff - ((m[14] >> 16) & 1); - b = (m[15] >> 16) & 1; - m[14] &= 0xffff; - sel25519(t, m, 1 - b); - } - for (i = 0; i < 16; i++) { - o[2 * i] = t[i] & 0xff; - o[2 * i + 1] = t[i] >> 8; - } -} - -function unpackneg(r, p) { - var t = gf(), chk = gf(), num = gf(), - den = gf(), den2 = gf(), den4 = gf(), - den6 = gf(); - - set25519(r[2], gf1); - unpack25519(r[1], p); - S(num, r[1]); - M(den, num, D); - Z(num, num, r[2]); - A(den, r[2], den); - - S(den2, den); - S(den4, den2); - M(den6, den4, den2); - M(t, den6, num); - M(t, t, den); - - pow2523(t, t); - M(t, t, num); - M(t, t, den); - M(t, t, den); - M(r[0], t, den); - - S(chk, r[0]); - M(chk, chk, den); - if(neq25519(chk, num)) { - M(r[0], r[0], I); - } - - S(chk, r[0]); - M(chk, chk, den); - if(neq25519(chk, num)) { - return -1; - } + // 5.2 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme.' + }) + } - if(par25519(r[0]) === (p[31] >> 7)) { - Z(r[0], gf0, r[0]); - } + // 5.4 + r.initiator = 'fetch' + r.destination = 'subresource' + + // 5.5 + requestList.push(r) + + // 5.6 + const responsePromise = createDeferredPromise() + + // 5.7 + fetchControllers.push(fetching({ + request: r, + dispatcher: getGlobalDispatcher(), + processResponse (response) { + // 1. + if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Received an invalid status code or the request failed.' + })) + } else if (response.headersList.contains('vary')) { // 2. + // 2.1 + const fieldValues = getFieldValues(response.headersList.get('vary')) + + // 2.2 + for (const fieldValue of fieldValues) { + // 2.2.1 + if (fieldValue === '*') { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'invalid vary field value' + })) + + for (const controller of fetchControllers) { + controller.abort() + } - M(r[3], r[0], r[1]); - return 0; -} + return + } + } + } + }, + processResponseEndOfBody (response) { + // 1. + if (response.aborted) { + responsePromise.reject(new DOMException('aborted', 'AbortError')) + return + } -function unpack25519(o, n) { - var i; - for(i = 0; i < 16; ++i) { - o[i] = n[2 * i] + (n[2 * i + 1] << 8); - } - o[15] &= 0x7fff; -} + // 2. + responsePromise.resolve(response) + } + })) -function pow2523(o, i) { - var c = gf(); - var a; - for(a = 0; a < 16; ++a) { - c[a] = i[a]; - } - for(a = 250; a >= 0; --a) { - S(c, c); - if(a !== 1) { - M(c, c, i); + // 5.8 + responsePromises.push(responsePromise.promise) } - } - for(a = 0; a < 16; ++a) { - o[a] = c[a]; - } -} - -function neq25519(a, b) { - var c = new NativeBuffer(32); - var d = new NativeBuffer(32); - pack25519(c, a); - pack25519(d, b); - return crypto_verify_32(c, 0, d, 0); -} -function crypto_verify_32(x, xi, y, yi) { - return vn(x, xi, y, yi, 32); -} + // 6. + const p = Promise.all(responsePromises) -function vn(x, xi, y, yi, n) { - var i, d = 0; - for(i = 0; i < n; ++i) { - d |= x[xi + i] ^ y[yi + i]; - } - return (1 & ((d - 1) >>> 8)) - 1; -} + // 7. + const responses = await p -function par25519(a) { - var d = new NativeBuffer(32); - pack25519(d, a); - return d[0] & 1; -} + // 7.1 + const operations = [] -function scalarmult(p, q, s) { - var b, i; - set25519(p[0], gf0); - set25519(p[1], gf1); - set25519(p[2], gf1); - set25519(p[3], gf0); - for(i = 255; i >= 0; --i) { - b = (s[(i / 8)|0] >> (i & 7)) & 1; - cswap(p, q, b); - add(q, p); - add(p, p); - cswap(p, q, b); - } -} + // 7.2 + let index = 0 -function scalarbase(p, s) { - var q = [gf(), gf(), gf(), gf()]; - set25519(q[0], X); - set25519(q[1], Y); - set25519(q[2], gf1); - M(q[3], X, Y); - scalarmult(p, q, s); -} + // 7.3 + for (const response of responses) { + // 7.3.1 + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 7.3.2 + request: requestList[index], // 7.3.3 + response // 7.3.4 + } -function set25519(r, a) { - var i; - for(i = 0; i < 16; i++) { - r[i] = a[i] | 0; - } -} + operations.push(operation) // 7.3.5 -function inv25519(o, i) { - var c = gf(); - var a; - for(a = 0; a < 16; ++a) { - c[a] = i[a]; - } - for(a = 253; a >= 0; --a) { - S(c, c); - if(a !== 2 && a !== 4) { - M(c, c, i); + index++ // 7.3.6 } - } - for(a = 0; a < 16; ++a) { - o[a] = c[a]; - } -} -function car25519(o) { - var i, v, c = 1; - for(i = 0; i < 16; ++i) { - v = o[i] + c + 65535; - c = Math.floor(v / 65536); - o[i] = v - c * 65536; - } - o[0] += c - 1 + 37 * (c - 1); -} + // 7.5 + const cacheJobPromise = createDeferredPromise() -function sel25519(p, q, b) { - var t, c = ~(b - 1); - for(var i = 0; i < 16; ++i) { - t = c & (p[i] ^ q[i]); - p[i] ^= t; - q[i] ^= t; - } -} + // 7.6.1 + let errorData = null -function gf(init) { - var i, r = new Float64Array(16); - if(init) { - for(i = 0; i < init.length; ++i) { - r[i] = init[i]; + // 7.6.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e } - } - return r; -} -function A(o, a, b) { - for(var i = 0; i < 16; ++i) { - o[i] = a[i] + b[i]; - } -} - -function Z(o, a, b) { - for(var i = 0; i < 16; ++i) { - o[i] = a[i] - b[i]; - } -} - -function S(o, a) { - M(o, a, a); -} - -function M(o, a, b) { - var v, c, - t0 = 0, t1 = 0, t2 = 0, t3 = 0, t4 = 0, t5 = 0, t6 = 0, t7 = 0, - t8 = 0, t9 = 0, t10 = 0, t11 = 0, t12 = 0, t13 = 0, t14 = 0, t15 = 0, - t16 = 0, t17 = 0, t18 = 0, t19 = 0, t20 = 0, t21 = 0, t22 = 0, t23 = 0, - t24 = 0, t25 = 0, t26 = 0, t27 = 0, t28 = 0, t29 = 0, t30 = 0, - b0 = b[0], - b1 = b[1], - b2 = b[2], - b3 = b[3], - b4 = b[4], - b5 = b[5], - b6 = b[6], - b7 = b[7], - b8 = b[8], - b9 = b[9], - b10 = b[10], - b11 = b[11], - b12 = b[12], - b13 = b[13], - b14 = b[14], - b15 = b[15]; - - v = a[0]; - t0 += v * b0; - t1 += v * b1; - t2 += v * b2; - t3 += v * b3; - t4 += v * b4; - t5 += v * b5; - t6 += v * b6; - t7 += v * b7; - t8 += v * b8; - t9 += v * b9; - t10 += v * b10; - t11 += v * b11; - t12 += v * b12; - t13 += v * b13; - t14 += v * b14; - t15 += v * b15; - v = a[1]; - t1 += v * b0; - t2 += v * b1; - t3 += v * b2; - t4 += v * b3; - t5 += v * b4; - t6 += v * b5; - t7 += v * b6; - t8 += v * b7; - t9 += v * b8; - t10 += v * b9; - t11 += v * b10; - t12 += v * b11; - t13 += v * b12; - t14 += v * b13; - t15 += v * b14; - t16 += v * b15; - v = a[2]; - t2 += v * b0; - t3 += v * b1; - t4 += v * b2; - t5 += v * b3; - t6 += v * b4; - t7 += v * b5; - t8 += v * b6; - t9 += v * b7; - t10 += v * b8; - t11 += v * b9; - t12 += v * b10; - t13 += v * b11; - t14 += v * b12; - t15 += v * b13; - t16 += v * b14; - t17 += v * b15; - v = a[3]; - t3 += v * b0; - t4 += v * b1; - t5 += v * b2; - t6 += v * b3; - t7 += v * b4; - t8 += v * b5; - t9 += v * b6; - t10 += v * b7; - t11 += v * b8; - t12 += v * b9; - t13 += v * b10; - t14 += v * b11; - t15 += v * b12; - t16 += v * b13; - t17 += v * b14; - t18 += v * b15; - v = a[4]; - t4 += v * b0; - t5 += v * b1; - t6 += v * b2; - t7 += v * b3; - t8 += v * b4; - t9 += v * b5; - t10 += v * b6; - t11 += v * b7; - t12 += v * b8; - t13 += v * b9; - t14 += v * b10; - t15 += v * b11; - t16 += v * b12; - t17 += v * b13; - t18 += v * b14; - t19 += v * b15; - v = a[5]; - t5 += v * b0; - t6 += v * b1; - t7 += v * b2; - t8 += v * b3; - t9 += v * b4; - t10 += v * b5; - t11 += v * b6; - t12 += v * b7; - t13 += v * b8; - t14 += v * b9; - t15 += v * b10; - t16 += v * b11; - t17 += v * b12; - t18 += v * b13; - t19 += v * b14; - t20 += v * b15; - v = a[6]; - t6 += v * b0; - t7 += v * b1; - t8 += v * b2; - t9 += v * b3; - t10 += v * b4; - t11 += v * b5; - t12 += v * b6; - t13 += v * b7; - t14 += v * b8; - t15 += v * b9; - t16 += v * b10; - t17 += v * b11; - t18 += v * b12; - t19 += v * b13; - t20 += v * b14; - t21 += v * b15; - v = a[7]; - t7 += v * b0; - t8 += v * b1; - t9 += v * b2; - t10 += v * b3; - t11 += v * b4; - t12 += v * b5; - t13 += v * b6; - t14 += v * b7; - t15 += v * b8; - t16 += v * b9; - t17 += v * b10; - t18 += v * b11; - t19 += v * b12; - t20 += v * b13; - t21 += v * b14; - t22 += v * b15; - v = a[8]; - t8 += v * b0; - t9 += v * b1; - t10 += v * b2; - t11 += v * b3; - t12 += v * b4; - t13 += v * b5; - t14 += v * b6; - t15 += v * b7; - t16 += v * b8; - t17 += v * b9; - t18 += v * b10; - t19 += v * b11; - t20 += v * b12; - t21 += v * b13; - t22 += v * b14; - t23 += v * b15; - v = a[9]; - t9 += v * b0; - t10 += v * b1; - t11 += v * b2; - t12 += v * b3; - t13 += v * b4; - t14 += v * b5; - t15 += v * b6; - t16 += v * b7; - t17 += v * b8; - t18 += v * b9; - t19 += v * b10; - t20 += v * b11; - t21 += v * b12; - t22 += v * b13; - t23 += v * b14; - t24 += v * b15; - v = a[10]; - t10 += v * b0; - t11 += v * b1; - t12 += v * b2; - t13 += v * b3; - t14 += v * b4; - t15 += v * b5; - t16 += v * b6; - t17 += v * b7; - t18 += v * b8; - t19 += v * b9; - t20 += v * b10; - t21 += v * b11; - t22 += v * b12; - t23 += v * b13; - t24 += v * b14; - t25 += v * b15; - v = a[11]; - t11 += v * b0; - t12 += v * b1; - t13 += v * b2; - t14 += v * b3; - t15 += v * b4; - t16 += v * b5; - t17 += v * b6; - t18 += v * b7; - t19 += v * b8; - t20 += v * b9; - t21 += v * b10; - t22 += v * b11; - t23 += v * b12; - t24 += v * b13; - t25 += v * b14; - t26 += v * b15; - v = a[12]; - t12 += v * b0; - t13 += v * b1; - t14 += v * b2; - t15 += v * b3; - t16 += v * b4; - t17 += v * b5; - t18 += v * b6; - t19 += v * b7; - t20 += v * b8; - t21 += v * b9; - t22 += v * b10; - t23 += v * b11; - t24 += v * b12; - t25 += v * b13; - t26 += v * b14; - t27 += v * b15; - v = a[13]; - t13 += v * b0; - t14 += v * b1; - t15 += v * b2; - t16 += v * b3; - t17 += v * b4; - t18 += v * b5; - t19 += v * b6; - t20 += v * b7; - t21 += v * b8; - t22 += v * b9; - t23 += v * b10; - t24 += v * b11; - t25 += v * b12; - t26 += v * b13; - t27 += v * b14; - t28 += v * b15; - v = a[14]; - t14 += v * b0; - t15 += v * b1; - t16 += v * b2; - t17 += v * b3; - t18 += v * b4; - t19 += v * b5; - t20 += v * b6; - t21 += v * b7; - t22 += v * b8; - t23 += v * b9; - t24 += v * b10; - t25 += v * b11; - t26 += v * b12; - t27 += v * b13; - t28 += v * b14; - t29 += v * b15; - v = a[15]; - t15 += v * b0; - t16 += v * b1; - t17 += v * b2; - t18 += v * b3; - t19 += v * b4; - t20 += v * b5; - t21 += v * b6; - t22 += v * b7; - t23 += v * b8; - t24 += v * b9; - t25 += v * b10; - t26 += v * b11; - t27 += v * b12; - t28 += v * b13; - t29 += v * b14; - t30 += v * b15; - - t0 += 38 * t16; - t1 += 38 * t17; - t2 += 38 * t18; - t3 += 38 * t19; - t4 += 38 * t20; - t5 += 38 * t21; - t6 += 38 * t22; - t7 += 38 * t23; - t8 += 38 * t24; - t9 += 38 * t25; - t10 += 38 * t26; - t11 += 38 * t27; - t12 += 38 * t28; - t13 += 38 * t29; - t14 += 38 * t30; - // t15 left as is - - // first car - c = 1; - v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; - v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; - v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; - v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; - v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; - v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; - v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; - v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; - v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; - v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; - v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; - v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; - v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; - v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; - v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; - v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; - t0 += c-1 + 37 * (c-1); - - // second car - c = 1; - v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; - v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; - v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; - v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; - v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; - v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; - v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; - v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; - v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; - v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; - v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; - v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; - v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; - v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; - v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; - v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; - t0 += c-1 + 37 * (c-1); - - o[ 0] = t0; - o[ 1] = t1; - o[ 2] = t2; - o[ 3] = t3; - o[ 4] = t4; - o[ 5] = t5; - o[ 6] = t6; - o[ 7] = t7; - o[ 8] = t8; - o[ 9] = t9; - o[10] = t10; - o[11] = t11; - o[12] = t12; - o[13] = t13; - o[14] = t14; - o[15] = t15; -} + // 7.6.3 + queueMicrotask(() => { + // 7.6.3.1 + if (errorData === null) { + cacheJobPromise.resolve(undefined) + } else { + // 7.6.3.2 + cacheJobPromise.reject(errorData) + } + }) + // 7.7 + return cacheJobPromise.promise + } -/***/ }), + async put (request, response) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' }) -/***/ 9177: -/***/ ((module) => { + request = webidl.converters.RequestInfo(request) + response = webidl.converters.Response(response) -/** - * Node.js module for Forge. - * - * @author Dave Longley - * - * Copyright 2011-2016 Digital Bazaar, Inc. - */ -module.exports = { - // default options - options: { - usePureJavaScript: false - } -}; + // 1. + let innerRequest = null + // 2. + if (request instanceof Request) { + innerRequest = request[kState] + } else { // 3. + innerRequest = new Request(request)[kState] + } -/***/ }), + // 4. + if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Expected an http/s scheme when method is not GET' + }) + } -/***/ 5104: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 5. + const innerResponse = response[kState] -/** - * Hash-based Message Authentication Code implementation. Requires a message - * digest object that can be obtained, for example, from forge.md.sha1 or - * forge.md.md5. - * - * @author Dave Longley - * - * Copyright (c) 2010-2012 Digital Bazaar, Inc. All rights reserved. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(6231); -__nccwpck_require__(8339); + // 6. + if (innerResponse.status === 206) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got 206 status' + }) + } -/* HMAC API */ -var hmac = module.exports = forge.hmac = forge.hmac || {}; + // 7. + if (innerResponse.headersList.contains('vary')) { + // 7.1. + const fieldValues = getFieldValues(innerResponse.headersList.get('vary')) -/** - * Creates an HMAC object that uses the given message digest object. - * - * @return an HMAC object. - */ -hmac.create = function() { - // the hmac key to use - var _key = null; + // 7.2. + for (const fieldValue of fieldValues) { + // 7.2.1 + if (fieldValue === '*') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got * vary field value' + }) + } + } + } - // the message digest to use - var _md = null; + // 8. + if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Response body is locked or disturbed' + }) + } - // the inner padding - var _ipadding = null; + // 9. + const clonedResponse = cloneResponse(innerResponse) - // the outer padding - var _opadding = null; + // 10. + const bodyReadPromise = createDeferredPromise() - // hmac context - var ctx = {}; + // 11. + if (innerResponse.body != null) { + // 11.1 + const stream = innerResponse.body.stream - /** - * Starts or restarts the HMAC with the given key and message digest. - * - * @param md the message digest to use, null to reuse the previous one, - * a string to use builtin 'sha1', 'md5', 'sha256'. - * @param key the key to use as a string, array of bytes, byte buffer, - * or null to reuse the previous key. - */ - ctx.start = function(md, key) { - if(md !== null) { - if(typeof md === 'string') { - // create builtin message digest - md = md.toLowerCase(); - if(md in forge.md.algorithms) { - _md = forge.md.algorithms[md].create(); - } else { - throw new Error('Unknown hash algorithm "' + md + '"'); - } - } else { - // store message digest - _md = md; - } - } + // 11.2 + const reader = stream.getReader() - if(key === null) { - // reuse previous key - key = _key; + // 11.3 + readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject) } else { - if(typeof key === 'string') { - // convert string into byte buffer - key = forge.util.createBuffer(key); - } else if(forge.util.isArray(key)) { - // convert byte array into byte buffer - var tmp = key; - key = forge.util.createBuffer(); - for(var i = 0; i < tmp.length; ++i) { - key.putByte(tmp[i]); - } - } + bodyReadPromise.resolve(undefined) + } - // if key is longer than blocksize, hash it - var keylen = key.length(); - if(keylen > _md.blockLength) { - _md.start(); - _md.update(key.bytes()); - key = _md.digest(); - } + // 12. + /** @type {CacheBatchOperation[]} */ + const operations = [] - // mix key into inner and outer padding - // ipadding = [0x36 * blocksize] ^ key - // opadding = [0x5C * blocksize] ^ key - _ipadding = forge.util.createBuffer(); - _opadding = forge.util.createBuffer(); - keylen = key.length(); - for(var i = 0; i < keylen; ++i) { - var tmp = key.at(i); - _ipadding.putByte(0x36 ^ tmp); - _opadding.putByte(0x5C ^ tmp); - } + // 13. + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 14. + request: innerRequest, // 15. + response: clonedResponse // 16. + } - // if key is shorter than blocksize, add additional padding - if(keylen < _md.blockLength) { - var tmp = _md.blockLength - keylen; - for(var i = 0; i < tmp; ++i) { - _ipadding.putByte(0x36); - _opadding.putByte(0x5C); - } - } - _key = key; - _ipadding = _ipadding.bytes(); - _opadding = _opadding.bytes(); + // 17. + operations.push(operation) + + // 19. + const bytes = await bodyReadPromise.promise + + if (clonedResponse.body != null) { + clonedResponse.body.source = bytes } - // digest is done like so: hash(opadding | hash(ipadding | message)) + // 19.1 + const cacheJobPromise = createDeferredPromise() - // prepare to do inner hash - // hash(ipadding | message) - _md.start(); - _md.update(_ipadding); - }; + // 19.2.1 + let errorData = null - /** - * Updates the HMAC with the given message bytes. - * - * @param bytes the bytes to update with. - */ - ctx.update = function(bytes) { - _md.update(bytes); - }; + // 19.2.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } - /** - * Produces the Message Authentication Code (MAC). - * - * @return a byte buffer containing the digest value. - */ - ctx.getMac = function() { - // digest is done like so: hash(opadding | hash(ipadding | message)) - // here we do the outer hashing - var inner = _md.digest().bytes(); - _md.start(); - _md.update(_opadding); - _md.update(inner); - return _md.digest(); - }; - // alias for getMac - ctx.digest = ctx.getMac; + // 19.2.3 + queueMicrotask(() => { + // 19.2.3.1 + if (errorData === null) { + cacheJobPromise.resolve() + } else { // 19.2.3.2 + cacheJobPromise.reject(errorData) + } + }) - return ctx; -}; + return cacheJobPromise.promise + } + async delete (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' }) -/***/ }), + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) -/***/ 7655: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * @type {Request} + */ + let r = null -/** - * Node.js module for Forge. - * - * @author Dave Longley - * - * Copyright 2011-2016 Digital Bazaar, Inc. - */ -module.exports = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(1449); -__nccwpck_require__(9549); -__nccwpck_require__(7088); -__nccwpck_require__(7157); -__nccwpck_require__(0); -__nccwpck_require__(5104); -__nccwpck_require__(5173); -__nccwpck_require__(9994); -__nccwpck_require__(1145); -__nccwpck_require__(3339); -__nccwpck_require__(1611); -__nccwpck_require__(154); -__nccwpck_require__(7014); -__nccwpck_require__(466); -__nccwpck_require__(4829); -__nccwpck_require__(6924); -__nccwpck_require__(6861); -__nccwpck_require__(4467); -__nccwpck_require__(4376); -__nccwpck_require__(7821); -__nccwpck_require__(9965); -__nccwpck_require__(4280); -__nccwpck_require__(9167); -__nccwpck_require__(8339); + if (request instanceof Request) { + r = request[kState] + if (r.method !== 'GET' && !options.ignoreMethod) { + return false + } + } else { + assert(typeof request === 'string') -/***/ }), + r = new Request(request)[kState] + } -/***/ 7052: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** @type {CacheBatchOperation[]} */ + const operations = [] -// Copyright (c) 2005 Tom Wu -// All Rights Reserved. -// See "LICENSE" for details. + /** @type {CacheBatchOperation} */ + const operation = { + type: 'delete', + request: r, + options + } -// Basic JavaScript BN library - subset useful for RSA encryption. + operations.push(operation) -/* -Licensing (LICENSE) -------------------- + const cacheJobPromise = createDeferredPromise() -This software is covered under the following copyright: -*/ -/* - * Copyright (c) 2003-2005 Tom Wu - * All Rights Reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining - * a copy of this software and associated documentation files (the - * "Software"), to deal in the Software without restriction, including - * without limitation the rights to use, copy, modify, merge, publish, - * distribute, sublicense, and/or sell copies of the Software, and to - * permit persons to whom the Software is furnished to do so, subject to - * the following conditions: - * - * The above copyright notice and this permission notice shall be - * included in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, - * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY - * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. - * - * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL, - * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER - * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF - * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT - * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - * - * In addition, the following condition applies: - * - * All redistributions must retain an intact copy of this copyright notice - * and disclaimer. - */ -/* -Address all questions regarding this license to: + let errorData = null + let requestResponses - Tom Wu - tjw@cs.Stanford.EDU -*/ -var forge = __nccwpck_require__(9177); - -module.exports = forge.jsbn = forge.jsbn || {}; - -// Bits per digit -var dbits; - -// JavaScript engine analysis -var canary = 0xdeadbeefcafe; -var j_lm = ((canary&0xffffff)==0xefcafe); - -// (public) Constructor -function BigInteger(a,b,c) { - this.data = []; - if(a != null) - if("number" == typeof a) this.fromNumber(a,b,c); - else if(b == null && "string" != typeof a) this.fromString(a,256); - else this.fromString(a,b); -} -forge.jsbn.BigInteger = BigInteger; - -// return new, unset BigInteger -function nbi() { return new BigInteger(null); } - -// am: Compute w_j += (x*this_i), propagate carries, -// c is initial carry, returns final carry. -// c < 3*dvalue, x < 2*dvalue, this_i < dvalue -// We need to select the fastest one that works in this environment. - -// am1: use a single mult and divide to get the high bits, -// max digit bits should be 26 because -// max internal value = 2*dvalue^2-2*dvalue (< 2^53) -function am1(i,x,w,j,c,n) { - while(--n >= 0) { - var v = x*this.data[i++]+w.data[j]+c; - c = Math.floor(v/0x4000000); - w.data[j++] = v&0x3ffffff; - } - return c; -} -// am2 avoids a big mult-and-extract completely. -// Max digit bits should be <= 30 because we do bitwise ops -// on values up to 2*hdvalue^2-hdvalue-1 (< 2^31) -function am2(i,x,w,j,c,n) { - var xl = x&0x7fff, xh = x>>15; - while(--n >= 0) { - var l = this.data[i]&0x7fff; - var h = this.data[i++]>>15; - var m = xh*l+h*xl; - l = xl*l+((m&0x7fff)<<15)+w.data[j]+(c&0x3fffffff); - c = (l>>>30)+(m>>>15)+xh*h+(c>>>30); - w.data[j++] = l&0x3fffffff; - } - return c; -} -// Alternately, set max digit bits to 28 since some -// browsers slow down when dealing with 32-bit numbers. -function am3(i,x,w,j,c,n) { - var xl = x&0x3fff, xh = x>>14; - while(--n >= 0) { - var l = this.data[i]&0x3fff; - var h = this.data[i++]>>14; - var m = xh*l+h*xl; - l = xl*l+((m&0x3fff)<<14)+w.data[j]+c; - c = (l>>28)+(m>>14)+xh*h; - w.data[j++] = l&0xfffffff; - } - return c; -} - -// node.js (no browser) -if(typeof(navigator) === 'undefined') -{ - BigInteger.prototype.am = am3; - dbits = 28; -} else if(j_lm && (navigator.appName == "Microsoft Internet Explorer")) { - BigInteger.prototype.am = am2; - dbits = 30; -} else if(j_lm && (navigator.appName != "Netscape")) { - BigInteger.prototype.am = am1; - dbits = 26; -} else { // Mozilla/Netscape seems to prefer am3 - BigInteger.prototype.am = am3; - dbits = 28; -} - -BigInteger.prototype.DB = dbits; -BigInteger.prototype.DM = ((1<= 0; --i) r.data[i] = this.data[i]; - r.t = this.t; - r.s = this.s; -} - -// (protected) set from integer value x, -DV <= x < DV -function bnpFromInt(x) { - this.t = 1; - this.s = (x<0)?-1:0; - if(x > 0) this.data[0] = x; - else if(x < -1) this.data[0] = x+this.DV; - else this.t = 0; -} - -// return bigint initialized to value -function nbv(i) { var r = nbi(); r.fromInt(i); return r; } - -// (protected) set from string and radix -function bnpFromString(s,b) { - var k; - if(b == 16) k = 4; - else if(b == 8) k = 3; - else if(b == 256) k = 8; // byte array - else if(b == 2) k = 1; - else if(b == 32) k = 5; - else if(b == 4) k = 2; - else { this.fromRadix(s,b); return; } - this.t = 0; - this.s = 0; - var i = s.length, mi = false, sh = 0; - while(--i >= 0) { - var x = (k==8)?s[i]&0xff:intAt(s,i); - if(x < 0) { - if(s.charAt(i) == "-") mi = true; - continue; + try { + requestResponses = this.#batchCacheOperations(operations) + } catch (e) { + errorData = e } - mi = false; - if(sh == 0) - this.data[this.t++] = x; - else if(sh+k > this.DB) { - this.data[this.t-1] |= (x&((1<<(this.DB-sh))-1))<>(this.DB-sh)); - } else - this.data[this.t-1] |= x<= this.DB) sh -= this.DB; - } - if(k == 8 && (s[0]&0x80) != 0) { - this.s = -1; - if(sh > 0) this.data[this.t-1] |= ((1<<(this.DB-sh))-1)< 0 && this.data[this.t-1] == c) --this.t; -} - -// (public) return string representation in given radix -function bnToString(b) { - if(this.s < 0) return "-"+this.negate().toString(b); - var k; - if(b == 16) k = 4; - else if(b == 8) k = 3; - else if(b == 2) k = 1; - else if(b == 32) k = 5; - else if(b == 4) k = 2; - else return this.toRadix(b); - var km = (1< 0) { - if(p < this.DB && (d = this.data[i]>>p) > 0) { m = true; r = int2char(d); } - while(i >= 0) { - if(p < k) { - d = (this.data[i]&((1<>(p+=this.DB-k); + + queueMicrotask(() => { + if (errorData === null) { + cacheJobPromise.resolve(!!requestResponses?.length) } else { - d = (this.data[i]>>(p-=k))&km; - if(p <= 0) { p += this.DB; --i; } + cacheJobPromise.reject(errorData) } - if(d > 0) m = true; - if(m) r += int2char(d); - } - } - return m?r:"0"; -} - -// (public) -this -function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; } + }) -// (public) |this| -function bnAbs() { return (this.s<0)?this.negate():this; } + return cacheJobPromise.promise + } -// (public) return + if this > a, - if this < a, 0 if equal -function bnCompareTo(a) { - var r = this.s-a.s; - if(r != 0) return r; - var i = this.t; - r = i-a.t; - if(r != 0) return (this.s<0)?-r:r; - while(--i >= 0) if((r=this.data[i]-a.data[i]) != 0) return r; - return 0; -} + /** + * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys + * @param {any} request + * @param {import('../../types/cache').CacheQueryOptions} options + * @returns {readonly Request[]} + */ + async keys (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) -// returns bit length of the integer x -function nbits(x) { - var r = 1, t; - if((t=x>>>16) != 0) { x = t; r += 16; } - if((t=x>>8) != 0) { x = t; r += 8; } - if((t=x>>4) != 0) { x = t; r += 4; } - if((t=x>>2) != 0) { x = t; r += 2; } - if((t=x>>1) != 0) { x = t; r += 1; } - return r; -} + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) -// (public) return the number of bits in "this" -function bnBitLength() { - if(this.t <= 0) return 0; - return this.DB*(this.t-1)+nbits(this.data[this.t-1]^(this.s&this.DM)); -} + // 1. + let r = null -// (protected) r = this << n*DB -function bnpDLShiftTo(n,r) { - var i; - for(i = this.t-1; i >= 0; --i) r.data[i+n] = this.data[i]; - for(i = n-1; i >= 0; --i) r.data[i] = 0; - r.t = this.t+n; - r.s = this.s; -} - -// (protected) r = this >> n*DB -function bnpDRShiftTo(n,r) { - for(var i = n; i < this.t; ++i) r.data[i-n] = this.data[i]; - r.t = Math.max(this.t-n,0); - r.s = this.s; -} - -// (protected) r = this << n -function bnpLShiftTo(n,r) { - var bs = n%this.DB; - var cbs = this.DB-bs; - var bm = (1<= 0; --i) { - r.data[i+ds+1] = (this.data[i]>>cbs)|c; - c = (this.data[i]&bm)<= 0; --i) r.data[i] = 0; - r.data[ds] = c; - r.t = this.t+ds+1; - r.s = this.s; - r.clamp(); -} - -// (protected) r = this >> n -function bnpRShiftTo(n,r) { - r.s = this.s; - var ds = Math.floor(n/this.DB); - if(ds >= this.t) { r.t = 0; return; } - var bs = n%this.DB; - var cbs = this.DB-bs; - var bm = (1<>bs; - for(var i = ds+1; i < this.t; ++i) { - r.data[i-ds-1] |= (this.data[i]&bm)<>bs; - } - if(bs > 0) r.data[this.t-ds-1] |= (this.s&bm)<>= this.DB; - } - if(a.t < this.t) { - c -= a.s; - while(i < this.t) { - c += this.data[i]; - r.data[i++] = c&this.DM; - c >>= this.DB; - } - c += this.s; - } else { - c += this.s; - while(i < a.t) { - c -= a.data[i]; - r.data[i++] = c&this.DM; - c >>= this.DB; - } - c -= a.s; - } - r.s = (c<0)?-1:0; - if(c < -1) r.data[i++] = this.DV+c; - else if(c > 0) r.data[i++] = c; - r.t = i; - r.clamp(); -} - -// (protected) r = this * a, r != this,a (HAC 14.12) -// "this" should be the larger one if appropriate. -function bnpMultiplyTo(a,r) { - var x = this.abs(), y = a.abs(); - var i = x.t; - r.t = i+y.t; - while(--i >= 0) r.data[i] = 0; - for(i = 0; i < y.t; ++i) r.data[i+x.t] = x.am(0,y.data[i],r,i,0,x.t); - r.s = 0; - r.clamp(); - if(this.s != a.s) BigInteger.ZERO.subTo(r,r); -} - -// (protected) r = this^2, r != this (HAC 14.16) -function bnpSquareTo(r) { - var x = this.abs(); - var i = r.t = 2*x.t; - while(--i >= 0) r.data[i] = 0; - for(i = 0; i < x.t-1; ++i) { - var c = x.am(i,x.data[i],r,2*i,0,1); - if((r.data[i+x.t]+=x.am(i+1,2*x.data[i],r,2*i+1,c,x.t-i-1)) >= x.DV) { - r.data[i+x.t] -= x.DV; - r.data[i+x.t+1] = 1; - } - } - if(r.t > 0) r.data[r.t-1] += x.am(i,x.data[i],r,2*i,0,1); - r.s = 0; - r.clamp(); -} - -// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20) -// r != q, this != m. q or r may be null. -function bnpDivRemTo(m,q,r) { - var pm = m.abs(); - if(pm.t <= 0) return; - var pt = this.abs(); - if(pt.t < pm.t) { - if(q != null) q.fromInt(0); - if(r != null) this.copyTo(r); - return; - } - if(r == null) r = nbi(); - var y = nbi(), ts = this.s, ms = m.s; - var nsh = this.DB-nbits(pm.data[pm.t-1]); // normalize modulus - if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); } else { pm.copyTo(y); pt.copyTo(r); } - var ys = y.t; - var y0 = y.data[ys-1]; - if(y0 == 0) return; - var yt = y0*(1<1)?y.data[ys-2]>>this.F2:0); - var d1 = this.FV/yt, d2 = (1<= 0) { - r.data[r.t++] = 1; - r.subTo(t,r); - } - BigInteger.ONE.dlShiftTo(ys,t); - t.subTo(y,y); // "negative" y so we can replace sub with am later - while(y.t < ys) y.data[y.t++] = 0; - while(--j >= 0) { - // Estimate quotient digit - var qd = (r.data[--i]==y0)?this.DM:Math.floor(r.data[i]*d1+(r.data[i-1]+e)*d2); - if((r.data[i]+=y.am(0,qd,r,j,0,ys)) < qd) { // Try it out - y.dlShiftTo(j,t); - r.subTo(t,r); - while(r.data[i] < --qd) r.subTo(t,r); - } - } - if(q != null) { - r.drShiftTo(ys,q); - if(ts != ms) BigInteger.ZERO.subTo(q,q); - } - r.t = ys; - r.clamp(); - if(nsh > 0) r.rShiftTo(nsh,r); // Denormalize remainder - if(ts < 0) BigInteger.ZERO.subTo(r,r); -} - -// (public) this mod a -function bnMod(a) { - var r = nbi(); - this.abs().divRemTo(a,null,r); - if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r); - return r; -} + // 2. + if (request !== undefined) { + // 2.1 + if (request instanceof Request) { + // 2.1.1 + r = request[kState] -// Modular reduction using "classic" algorithm -function Classic(m) { this.m = m; } -function cConvert(x) { - if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m); - else return x; -} -function cRevert(x) { return x; } -function cReduce(x) { x.divRemTo(this.m,null,x); } -function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } -function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); } - -Classic.prototype.convert = cConvert; -Classic.prototype.revert = cRevert; -Classic.prototype.reduce = cReduce; -Classic.prototype.mulTo = cMulTo; -Classic.prototype.sqrTo = cSqrTo; - -// (protected) return "-1/this % 2^DB"; useful for Mont. reduction -// justification: -// xy == 1 (mod m) -// xy = 1+km -// xy(2-xy) = (1+km)(1-km) -// x[y(2-xy)] = 1-k^2m^2 -// x[y(2-xy)] == 1 (mod m^2) -// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2 -// should reduce x and y(2-xy) by m^2 at each step to keep size bounded. -// JS multiply "overflows" differently from C/C++, so care is needed here. -function bnpInvDigit() { - if(this.t < 1) return 0; - var x = this.data[0]; - if((x&1) == 0) return 0; - var y = x&3; // y == 1/x mod 2^2 - y = (y*(2-(x&0xf)*y))&0xf; // y == 1/x mod 2^4 - y = (y*(2-(x&0xff)*y))&0xff; // y == 1/x mod 2^8 - y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff; // y == 1/x mod 2^16 - // last step - calculate inverse mod DV directly; - // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints - y = (y*(2-x*y%this.DV))%this.DV; // y == 1/x mod 2^dbits - // we really want the negative inverse, and -DV < y < DV - return (y>0)?this.DV-y:-y; -} - -// Montgomery reduction -function Montgomery(m) { - this.m = m; - this.mp = m.invDigit(); - this.mpl = this.mp&0x7fff; - this.mph = this.mp>>15; - this.um = (1<<(m.DB-15))-1; - this.mt2 = 2*m.t; -} - -// xR mod m -function montConvert(x) { - var r = nbi(); - x.abs().dlShiftTo(this.m.t,r); - r.divRemTo(this.m,null,r); - if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r); - return r; -} + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { // 2.2 + r = new Request(request)[kState] + } + } -// x/R mod m -function montRevert(x) { - var r = nbi(); - x.copyTo(r); - this.reduce(r); - return r; -} + // 4. + const promise = createDeferredPromise() -// x = x/R mod m (HAC 14.32) -function montReduce(x) { - while(x.t <= this.mt2) // pad x so am has enough room later - x.data[x.t++] = 0; - for(var i = 0; i < this.m.t; ++i) { - // faster way of calculating u0 = x.data[i]*mp mod DV - var j = x.data[i]&0x7fff; - var u0 = (j*this.mpl+(((j*this.mph+(x.data[i]>>15)*this.mpl)&this.um)<<15))&x.DM; - // use am to combine the multiply-shift-add into one call - j = i+this.m.t; - x.data[j] += this.m.am(0,u0,x,i,0,this.m.t); - // propagate carry - while(x.data[j] >= x.DV) { x.data[j] -= x.DV; x.data[++j]++; } - } - x.clamp(); - x.drShiftTo(this.m.t,x); - if(x.compareTo(this.m) >= 0) x.subTo(this.m,x); -} - -// r = "x^2/R mod m"; x != r -function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); } - -// r = "xy/R mod m"; x,y != r -function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } - -Montgomery.prototype.convert = montConvert; -Montgomery.prototype.revert = montRevert; -Montgomery.prototype.reduce = montReduce; -Montgomery.prototype.mulTo = montMulTo; -Montgomery.prototype.sqrTo = montSqrTo; - -// (protected) true iff this is even -function bnpIsEven() { return ((this.t>0)?(this.data[0]&1):this.s) == 0; } - -// (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79) -function bnpExp(e,z) { - if(e > 0xffffffff || e < 1) return BigInteger.ONE; - var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1; - g.copyTo(r); - while(--i >= 0) { - z.sqrTo(r,r2); - if((e&(1< 0) z.mulTo(r2,g,r); - else { var t = r; r = r2; r2 = t; } - } - return z.revert(r); -} - -// (public) this^e % m, 0 <= e < 2^32 -function bnModPowInt(e,m) { - var z; - if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m); - return this.exp(e,z); -} - -// protected -BigInteger.prototype.copyTo = bnpCopyTo; -BigInteger.prototype.fromInt = bnpFromInt; -BigInteger.prototype.fromString = bnpFromString; -BigInteger.prototype.clamp = bnpClamp; -BigInteger.prototype.dlShiftTo = bnpDLShiftTo; -BigInteger.prototype.drShiftTo = bnpDRShiftTo; -BigInteger.prototype.lShiftTo = bnpLShiftTo; -BigInteger.prototype.rShiftTo = bnpRShiftTo; -BigInteger.prototype.subTo = bnpSubTo; -BigInteger.prototype.multiplyTo = bnpMultiplyTo; -BigInteger.prototype.squareTo = bnpSquareTo; -BigInteger.prototype.divRemTo = bnpDivRemTo; -BigInteger.prototype.invDigit = bnpInvDigit; -BigInteger.prototype.isEven = bnpIsEven; -BigInteger.prototype.exp = bnpExp; - -// public -BigInteger.prototype.toString = bnToString; -BigInteger.prototype.negate = bnNegate; -BigInteger.prototype.abs = bnAbs; -BigInteger.prototype.compareTo = bnCompareTo; -BigInteger.prototype.bitLength = bnBitLength; -BigInteger.prototype.mod = bnMod; -BigInteger.prototype.modPowInt = bnModPowInt; - -// "constants" -BigInteger.ZERO = nbv(0); -BigInteger.ONE = nbv(1); - -// jsbn2 lib - -//Copyright (c) 2005-2009 Tom Wu -//All Rights Reserved. -//See "LICENSE" for details (See jsbn.js for LICENSE). - -//Extended JavaScript BN functions, required for RSA private ops. - -//Version 1.1: new BigInteger("0", 10) returns "proper" zero - -//(public) -function bnClone() { var r = nbi(); this.copyTo(r); return r; } - -//(public) return value as integer -function bnIntValue() { -if(this.s < 0) { - if(this.t == 1) return this.data[0]-this.DV; - else if(this.t == 0) return -1; -} else if(this.t == 1) return this.data[0]; -else if(this.t == 0) return 0; -// assumes 16 < DB < 32 -return ((this.data[1]&((1<<(32-this.DB))-1))<>24; } - -//(public) return value as short (assumes DB>=16) -function bnShortValue() { return (this.t==0)?this.s:(this.data[0]<<16)>>16; } - -//(protected) return x s.t. r^x < DV -function bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); } - -//(public) 0 if this == 0, 1 if this > 0 -function bnSigNum() { -if(this.s < 0) return -1; -else if(this.t <= 0 || (this.t == 1 && this.data[0] <= 0)) return 0; -else return 1; -} - -//(protected) convert to radix string -function bnpToRadix(b) { -if(b == null) b = 10; -if(this.signum() == 0 || b < 2 || b > 36) return "0"; -var cs = this.chunkSize(b); -var a = Math.pow(b,cs); -var d = nbv(a), y = nbi(), z = nbi(), r = ""; -this.divRemTo(d,y,z); -while(y.signum() > 0) { - r = (a+z.intValue()).toString(b).substr(1) + r; - y.divRemTo(d,y,z); -} -return z.intValue().toString(b) + r; -} - -//(protected) convert from radix string -function bnpFromRadix(s,b) { -this.fromInt(0); -if(b == null) b = 10; -var cs = this.chunkSize(b); -var d = Math.pow(b,cs), mi = false, j = 0, w = 0; -for(var i = 0; i < s.length; ++i) { - var x = intAt(s,i); - if(x < 0) { - if(s.charAt(i) == "-" && this.signum() == 0) mi = true; - continue; - } - w = b*w+x; - if(++j >= cs) { - this.dMultiply(d); - this.dAddOffset(w,0); - j = 0; - w = 0; - } -} -if(j > 0) { - this.dMultiply(Math.pow(b,j)); - this.dAddOffset(w,0); -} -if(mi) BigInteger.ZERO.subTo(this,this); -} - -//(protected) alternate constructor -function bnpFromNumber(a,b,c) { -if("number" == typeof b) { - // new BigInteger(int,int,RNG) - if(a < 2) this.fromInt(1); - else { - this.fromNumber(a,c); - if(!this.testBit(a-1)) // force MSB set - this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this); - if(this.isEven()) this.dAddOffset(1,0); // force odd - while(!this.isProbablePrime(b)) { - this.dAddOffset(2,0); - if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this); - } - } -} else { - // new BigInteger(int,RNG) - var x = new Array(), t = a&7; - x.length = (a>>3)+1; - b.nextBytes(x); - if(t > 0) x[0] &= ((1< 0) { - if(p < this.DB && (d = this.data[i]>>p) != (this.s&this.DM)>>p) - r[k++] = d|(this.s<<(this.DB-p)); - while(i >= 0) { - if(p < 8) { - d = (this.data[i]&((1<>(p+=this.DB-8); - } else { - d = (this.data[i]>>(p-=8))&0xff; - if(p <= 0) { p += this.DB; --i; } - } - if((d&0x80) != 0) d |= -256; - if(k == 0 && (this.s&0x80) != (d&0x80)) ++k; - if(k > 0 || d != this.s) r[k++] = d; - } -} -return r; -} - -function bnEquals(a) { return(this.compareTo(a)==0); } -function bnMin(a) { return(this.compareTo(a)<0)?this:a; } -function bnMax(a) { return(this.compareTo(a)>0)?this:a; } - -//(protected) r = this op a (bitwise) -function bnpBitwiseTo(a,op,r) { -var i, f, m = Math.min(a.t,this.t); -for(i = 0; i < m; ++i) r.data[i] = op(this.data[i],a.data[i]); -if(a.t < this.t) { - f = a.s&this.DM; - for(i = m; i < this.t; ++i) r.data[i] = op(this.data[i],f); - r.t = this.t; -} else { - f = this.s&this.DM; - for(i = m; i < a.t; ++i) r.data[i] = op(f,a.data[i]); - r.t = a.t; -} -r.s = op(this.s,a.s); -r.clamp(); -} + // 5. + // 5.1 + const requests = [] -//(public) this & a -function op_and(x,y) { return x&y; } -function bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; } + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + // 5.2.1.1 + requests.push(requestResponse[0]) + } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) + + // 5.3.2 + for (const requestResponse of requestResponses) { + // 5.3.2.1 + requests.push(requestResponse[0]) + } + } -//(public) this | a -function op_or(x,y) { return x|y; } -function bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; } + // 5.4 + queueMicrotask(() => { + // 5.4.1 + const requestList = [] -//(public) this ^ a -function op_xor(x,y) { return x^y; } -function bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; } - -//(public) this & ~a -function op_andnot(x,y) { return x&~y; } -function bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; } - -//(public) ~this -function bnNot() { -var r = nbi(); -for(var i = 0; i < this.t; ++i) r.data[i] = this.DM&~this.data[i]; -r.t = this.t; -r.s = ~this.s; -return r; -} - -//(public) this << n -function bnShiftLeft(n) { -var r = nbi(); -if(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r); -return r; -} - -//(public) this >> n -function bnShiftRight(n) { -var r = nbi(); -if(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r); -return r; -} - -//return index of lowest 1-bit in x, x < 2^31 -function lbit(x) { -if(x == 0) return -1; -var r = 0; -if((x&0xffff) == 0) { x >>= 16; r += 16; } -if((x&0xff) == 0) { x >>= 8; r += 8; } -if((x&0xf) == 0) { x >>= 4; r += 4; } -if((x&3) == 0) { x >>= 2; r += 2; } -if((x&1) == 0) ++r; -return r; -} - -//(public) returns index of lowest 1-bit (or -1 if none) -function bnGetLowestSetBit() { -for(var i = 0; i < this.t; ++i) - if(this.data[i] != 0) return i*this.DB+lbit(this.data[i]); -if(this.s < 0) return this.t*this.DB; -return -1; -} - -//return number of 1 bits in x -function cbit(x) { -var r = 0; -while(x != 0) { x &= x-1; ++r; } -return r; -} - -//(public) return number of set bits -function bnBitCount() { -var r = 0, x = this.s&this.DM; -for(var i = 0; i < this.t; ++i) r += cbit(this.data[i]^x); -return r; -} - -//(public) true iff nth bit is set -function bnTestBit(n) { -var j = Math.floor(n/this.DB); -if(j >= this.t) return(this.s!=0); -return((this.data[j]&(1<<(n%this.DB)))!=0); -} - -//(protected) this op (1<>= this.DB; -} -if(a.t < this.t) { - c += a.s; - while(i < this.t) { - c += this.data[i]; - r.data[i++] = c&this.DM; - c >>= this.DB; - } - c += this.s; -} else { - c += this.s; - while(i < a.t) { - c += a.data[i]; - r.data[i++] = c&this.DM; - c >>= this.DB; - } - c += a.s; -} -r.s = (c<0)?-1:0; -if(c > 0) r.data[i++] = c; -else if(c < -1) r.data[i++] = this.DV+c; -r.t = i; -r.clamp(); -} - -//(public) this + a -function bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; } - -//(public) this - a -function bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; } - -//(public) this * a -function bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; } - -//(public) this / a -function bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; } - -//(public) this % a -function bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; } - -//(public) [this/a,this%a] -function bnDivideAndRemainder(a) { -var q = nbi(), r = nbi(); -this.divRemTo(a,q,r); -return new Array(q,r); -} - -//(protected) this *= n, this >= 0, 1 < n < DV -function bnpDMultiply(n) { -this.data[this.t] = this.am(0,n-1,this,0,0,this.t); -++this.t; -this.clamp(); -} - -//(protected) this += n << w words, this >= 0 -function bnpDAddOffset(n,w) { -if(n == 0) return; -while(this.t <= w) this.data[this.t++] = 0; -this.data[w] += n; -while(this.data[w] >= this.DV) { - this.data[w] -= this.DV; - if(++w >= this.t) this.data[this.t++] = 0; - ++this.data[w]; -} -} - -//A "null" reducer -function NullExp() {} -function nNop(x) { return x; } -function nMulTo(x,y,r) { x.multiplyTo(y,r); } -function nSqrTo(x,r) { x.squareTo(r); } - -NullExp.prototype.convert = nNop; -NullExp.prototype.revert = nNop; -NullExp.prototype.mulTo = nMulTo; -NullExp.prototype.sqrTo = nSqrTo; - -//(public) this^e -function bnPow(e) { return this.exp(e,new NullExp()); } - -//(protected) r = lower n words of "this * a", a.t <= n -//"this" should be the larger one if appropriate. -function bnpMultiplyLowerTo(a,n,r) { -var i = Math.min(this.t+a.t,n); -r.s = 0; // assumes a,this >= 0 -r.t = i; -while(i > 0) r.data[--i] = 0; -var j; -for(j = r.t-this.t; i < j; ++i) r.data[i+this.t] = this.am(0,a.data[i],r,i,0,this.t); -for(j = Math.min(a.t,n); i < j; ++i) this.am(0,a.data[i],r,i,0,n-i); -r.clamp(); -} - -//(protected) r = "this * a" without lower n words, n > 0 -//"this" should be the larger one if appropriate. -function bnpMultiplyUpperTo(a,n,r) { ---n; -var i = r.t = this.t+a.t-n; -r.s = 0; // assumes a,this >= 0 -while(--i >= 0) r.data[i] = 0; -for(i = Math.max(n-this.t,0); i < a.t; ++i) - r.data[this.t+i-n] = this.am(n-i,a.data[i],r,0,0,this.t+i-n); -r.clamp(); -r.drShiftTo(1,r); -} - -//Barrett modular reduction -function Barrett(m) { -// setup Barrett -this.r2 = nbi(); -this.q3 = nbi(); -BigInteger.ONE.dlShiftTo(2*m.t,this.r2); -this.mu = this.r2.divide(m); -this.m = m; -} - -function barrettConvert(x) { -if(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m); -else if(x.compareTo(this.m) < 0) return x; -else { var r = nbi(); x.copyTo(r); this.reduce(r); return r; } -} - -function barrettRevert(x) { return x; } - -//x = x mod m (HAC 14.42) -function barrettReduce(x) { -x.drShiftTo(this.m.t-1,this.r2); -if(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); } -this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3); -this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2); -while(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1); -x.subTo(this.r2,x); -while(x.compareTo(this.m) >= 0) x.subTo(this.m,x); -} - -//r = x^2 mod m; x != r -function barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); } - -//r = x*y mod m; x,y != r -function barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } - -Barrett.prototype.convert = barrettConvert; -Barrett.prototype.revert = barrettRevert; -Barrett.prototype.reduce = barrettReduce; -Barrett.prototype.mulTo = barrettMulTo; -Barrett.prototype.sqrTo = barrettSqrTo; - -//(public) this^e % m (HAC 14.85) -function bnModPow(e,m) { -var i = e.bitLength(), k, r = nbv(1), z; -if(i <= 0) return r; -else if(i < 18) k = 1; -else if(i < 48) k = 3; -else if(i < 144) k = 4; -else if(i < 768) k = 5; -else k = 6; -if(i < 8) - z = new Classic(m); -else if(m.isEven()) - z = new Barrett(m); -else - z = new Montgomery(m); - -// precomputation -var g = new Array(), n = 3, k1 = k-1, km = (1< 1) { - var g2 = nbi(); - z.sqrTo(g[1],g2); - while(n <= km) { - g[n] = nbi(); - z.mulTo(g2,g[n-2],g[n]); - n += 2; - } -} - -var j = e.t-1, w, is1 = true, r2 = nbi(), t; -i = nbits(e.data[j])-1; -while(j >= 0) { - if(i >= k1) w = (e.data[j]>>(i-k1))&km; - else { - w = (e.data[j]&((1<<(i+1))-1))<<(k1-i); - if(j > 0) w |= e.data[j-1]>>(this.DB+i-k1); - } - - n = k; - while((w&1) == 0) { w >>= 1; --n; } - if((i -= n) < 0) { i += this.DB; --j; } - if(is1) { // ret == 1, don't bother squaring or multiplying it - g[w].copyTo(r); - is1 = false; - } else { - while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; } - if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; } - z.mulTo(r2,g[w],r); - } - - while(j >= 0 && (e.data[j]&(1< 0) { - x.rShiftTo(g,x); - y.rShiftTo(g,y); -} -while(x.signum() > 0) { - if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x); - if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y); - if(x.compareTo(y) >= 0) { - x.subTo(y,x); - x.rShiftTo(1,x); - } else { - y.subTo(x,y); - y.rShiftTo(1,y); - } -} -if(g > 0) y.lShiftTo(g,y); -return y; -} - -//(protected) this % n, n < 2^26 -function bnpModInt(n) { -if(n <= 0) return 0; -var d = this.DV%n, r = (this.s<0)?n-1:0; -if(this.t > 0) - if(d == 0) r = this.data[0]%n; - else for(var i = this.t-1; i >= 0; --i) r = (d*r+this.data[i])%n; -return r; -} - -//(public) 1/this % m (HAC 14.61) -function bnModInverse(m) { -var ac = m.isEven(); -if((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO; -var u = m.clone(), v = this.clone(); -var a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1); -while(u.signum() != 0) { - while(u.isEven()) { - u.rShiftTo(1,u); - if(ac) { - if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); } - a.rShiftTo(1,a); - } else if(!b.isEven()) b.subTo(m,b); - b.rShiftTo(1,b); - } - while(v.isEven()) { - v.rShiftTo(1,v); - if(ac) { - if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); } - c.rShiftTo(1,c); - } else if(!d.isEven()) d.subTo(m,d); - d.rShiftTo(1,d); - } - if(u.compareTo(v) >= 0) { - u.subTo(v,u); - if(ac) a.subTo(c,a); - b.subTo(d,b); - } else { - v.subTo(u,v); - if(ac) c.subTo(a,c); - d.subTo(b,d); - } -} -if(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO; -if(d.compareTo(m) >= 0) return d.subtract(m); -if(d.signum() < 0) d.addTo(m,d); else return d; -if(d.signum() < 0) return d.add(m); else return d; -} - -var lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509]; -var lplim = (1<<26)/lowprimes[lowprimes.length-1]; - -//(public) test primality with certainty >= 1-.5^t -function bnIsProbablePrime(t) { -var i, x = this.abs(); -if(x.t == 1 && x.data[0] <= lowprimes[lowprimes.length-1]) { - for(i = 0; i < lowprimes.length; ++i) - if(x.data[0] == lowprimes[i]) return true; - return false; -} -if(x.isEven()) return false; -i = 1; -while(i < lowprimes.length) { - var m = lowprimes[i], j = i+1; - while(j < lowprimes.length && m < lplim) m *= lowprimes[j++]; - m = x.modInt(m); - while(i < j) if(m%lowprimes[i++] == 0) return false; -} -return x.millerRabin(t); -} - -//(protected) true if probably prime (HAC 4.24, Miller-Rabin) -function bnpMillerRabin(t) { -var n1 = this.subtract(BigInteger.ONE); -var k = n1.getLowestSetBit(); -if(k <= 0) return false; -var r = n1.shiftRight(k); -var prng = bnGetPrng(); -var a; -for(var i = 0; i < t; ++i) { - // select witness 'a' at random from between 1 and n1 - do { - a = new BigInteger(this.bitLength(), prng); - } - while(a.compareTo(BigInteger.ONE) <= 0 || a.compareTo(n1) >= 0); - var y = a.modPow(r,this); - if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) { - var j = 1; - while(j++ < k && y.compareTo(n1) != 0) { - y = y.modPowInt(2,this); - if(y.compareTo(BigInteger.ONE) == 0) return false; - } - if(y.compareTo(n1) != 0) return false; - } -} -return true; -} - -// get pseudo random number generator -function bnGetPrng() { - // create prng with api that matches BigInteger secure random - return { - // x is an array to fill with bytes - nextBytes: function(x) { - for(var i = 0; i < x.length; ++i) { - x[i] = Math.floor(Math.random() * 0x0100); + // 5.4.2.1 + requestList.push(requestObject) } - } - }; -} -//protected -BigInteger.prototype.chunkSize = bnpChunkSize; -BigInteger.prototype.toRadix = bnpToRadix; -BigInteger.prototype.fromRadix = bnpFromRadix; -BigInteger.prototype.fromNumber = bnpFromNumber; -BigInteger.prototype.bitwiseTo = bnpBitwiseTo; -BigInteger.prototype.changeBit = bnpChangeBit; -BigInteger.prototype.addTo = bnpAddTo; -BigInteger.prototype.dMultiply = bnpDMultiply; -BigInteger.prototype.dAddOffset = bnpDAddOffset; -BigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo; -BigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo; -BigInteger.prototype.modInt = bnpModInt; -BigInteger.prototype.millerRabin = bnpMillerRabin; - -//public -BigInteger.prototype.clone = bnClone; -BigInteger.prototype.intValue = bnIntValue; -BigInteger.prototype.byteValue = bnByteValue; -BigInteger.prototype.shortValue = bnShortValue; -BigInteger.prototype.signum = bnSigNum; -BigInteger.prototype.toByteArray = bnToByteArray; -BigInteger.prototype.equals = bnEquals; -BigInteger.prototype.min = bnMin; -BigInteger.prototype.max = bnMax; -BigInteger.prototype.and = bnAnd; -BigInteger.prototype.or = bnOr; -BigInteger.prototype.xor = bnXor; -BigInteger.prototype.andNot = bnAndNot; -BigInteger.prototype.not = bnNot; -BigInteger.prototype.shiftLeft = bnShiftLeft; -BigInteger.prototype.shiftRight = bnShiftRight; -BigInteger.prototype.getLowestSetBit = bnGetLowestSetBit; -BigInteger.prototype.bitCount = bnBitCount; -BigInteger.prototype.testBit = bnTestBit; -BigInteger.prototype.setBit = bnSetBit; -BigInteger.prototype.clearBit = bnClearBit; -BigInteger.prototype.flipBit = bnFlipBit; -BigInteger.prototype.add = bnAdd; -BigInteger.prototype.subtract = bnSubtract; -BigInteger.prototype.multiply = bnMultiply; -BigInteger.prototype.divide = bnDivide; -BigInteger.prototype.remainder = bnRemainder; -BigInteger.prototype.divideAndRemainder = bnDivideAndRemainder; -BigInteger.prototype.modPow = bnModPow; -BigInteger.prototype.modInverse = bnModInverse; -BigInteger.prototype.pow = bnPow; -BigInteger.prototype.gcd = bnGCD; -BigInteger.prototype.isProbablePrime = bnIsProbablePrime; - -//BigInteger interfaces not implemented in jsbn: - -//BigInteger(int signum, byte[] magnitude) -//double doubleValue() -//float floatValue() -//int hashCode() -//long longValue() -//static BigInteger valueOf(long val) + // 5.4.3 + promise.resolve(Object.freeze(requestList)) + }) + return promise.promise + } -/***/ }), + /** + * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm + * @param {CacheBatchOperation[]} operations + * @returns {requestResponseList} + */ + #batchCacheOperations (operations) { + // 1. + const cache = this.#relevantRequestResponseList -/***/ 5173: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 2. + const backupCache = [...cache] -/** - * Javascript implementation of RSA-KEM. - * - * @author Lautaro Cozzani Rodriguez - * @author Dave Longley - * - * Copyright (c) 2014 Lautaro Cozzani - * Copyright (c) 2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); -__nccwpck_require__(7821); -__nccwpck_require__(7052); + // 3. + const addedItems = [] -module.exports = forge.kem = forge.kem || {}; + // 4.1 + const resultList = [] -var BigInteger = forge.jsbn.BigInteger; + try { + // 4.2 + for (const operation of operations) { + // 4.2.1 + if (operation.type !== 'delete' && operation.type !== 'put') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'operation type does not match "delete" or "put"' + }) + } -/** - * The API for the RSA Key Encapsulation Mechanism (RSA-KEM) from ISO 18033-2. - */ -forge.kem.rsa = {}; + // 4.2.2 + if (operation.type === 'delete' && operation.response != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'delete operation should not have an associated response' + }) + } -/** - * Creates an RSA KEM API object for generating a secret asymmetric key. - * - * The symmetric key may be generated via a call to 'encrypt', which will - * produce a ciphertext to be transmitted to the recipient and a key to be - * kept secret. The ciphertext is a parameter to be passed to 'decrypt' which - * will produce the same secret key for the recipient to use to decrypt a - * message that was encrypted with the secret key. - * - * @param kdf the KDF API to use (eg: new forge.kem.kdf1()). - * @param options the options to use. - * [prng] a custom crypto-secure pseudo-random number generator to use, - * that must define "getBytesSync". - */ -forge.kem.rsa.create = function(kdf, options) { - options = options || {}; - var prng = options.prng || forge.random; + // 4.2.3 + if (this.#queryCache(operation.request, operation.options, addedItems).length) { + throw new DOMException('???', 'InvalidStateError') + } - var kem = {}; + // 4.2.4 + let requestResponses - /** - * Generates a secret key and its encapsulation. - * - * @param publicKey the RSA public key to encrypt with. - * @param keyLength the length, in bytes, of the secret key to generate. - * - * @return an object with: - * encapsulation: the ciphertext for generating the secret key, as a - * binary-encoded string of bytes. - * key: the secret key to use for encrypting a message. - */ - kem.encrypt = function(publicKey, keyLength) { - // generate a random r where 1 < r < n - var byteLength = Math.ceil(publicKey.n.bitLength() / 8); - var r; - do { - r = new BigInteger( - forge.util.bytesToHex(prng.getBytesSync(byteLength)), - 16).mod(publicKey.n); - } while(r.compareTo(BigInteger.ONE) <= 0); - - // prepend r with zeros - r = forge.util.hexToBytes(r.toString(16)); - var zeros = byteLength - r.length; - if(zeros > 0) { - r = forge.util.fillString(String.fromCharCode(0), zeros) + r; - } - - // encrypt the random - var encapsulation = publicKey.encrypt(r, 'NONE'); - - // generate the secret key - var key = kdf.generate(r, keyLength); - - return {encapsulation: encapsulation, key: key}; - }; + // 4.2.5 + if (operation.type === 'delete') { + // 4.2.5.1 + requestResponses = this.#queryCache(operation.request, operation.options) - /** - * Decrypts an encapsulated secret key. - * - * @param privateKey the RSA private key to decrypt with. - * @param encapsulation the ciphertext for generating the secret key, as - * a binary-encoded string of bytes. - * @param keyLength the length, in bytes, of the secret key to generate. - * - * @return the secret key as a binary-encoded string of bytes. - */ - kem.decrypt = function(privateKey, encapsulation, keyLength) { - // decrypt the encapsulation and generate the secret key - var r = privateKey.decrypt(encapsulation, 'NONE'); - return kdf.generate(r, keyLength); - }; + // TODO: the spec is wrong, this is needed to pass WPTs + if (requestResponses.length === 0) { + return [] + } - return kem; -}; + // 4.2.5.2 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) -// TODO: add forge.kem.kdf.create('KDF1', {md: ..., ...}) API? + // 4.2.5.2.1 + cache.splice(idx, 1) + } + } else if (operation.type === 'put') { // 4.2.6 + // 4.2.6.1 + if (operation.response == null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'put operation should have an associated response' + }) + } -/** - * Creates a key derivation API object that implements KDF1 per ISO 18033-2. - * - * @param md the hash API to use. - * @param [digestLength] an optional digest length that must be positive and - * less than or equal to md.digestLength. - * - * @return a KDF1 API object. - */ -forge.kem.kdf1 = function(md, digestLength) { - _createKDF(this, md, 0, digestLength || md.digestLength); -}; + // 4.2.6.2 + const r = operation.request -/** - * Creates a key derivation API object that implements KDF2 per ISO 18033-2. - * - * @param md the hash API to use. - * @param [digestLength] an optional digest length that must be positive and - * less than or equal to md.digestLength. - * - * @return a KDF2 API object. - */ -forge.kem.kdf2 = function(md, digestLength) { - _createKDF(this, md, 1, digestLength || md.digestLength); -}; + // 4.2.6.3 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'expected http or https scheme' + }) + } -/** - * Creates a KDF1 or KDF2 API object. - * - * @param md the hash API to use. - * @param counterStart the starting index for the counter. - * @param digestLength the digest length to use. - * - * @return the KDF API object. - */ -function _createKDF(kdf, md, counterStart, digestLength) { - /** - * Generate a key of the specified length. - * - * @param x the binary-encoded byte string to generate a key from. - * @param length the number of bytes to generate (the size of the key). - * - * @return the key as a binary-encoded string. - */ - kdf.generate = function(x, length) { - var key = new forge.util.ByteBuffer(); + // 4.2.6.4 + if (r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'not get method' + }) + } - // run counter from counterStart to ceil(length / Hash.len) - var k = Math.ceil(length / digestLength) + counterStart; + // 4.2.6.5 + if (operation.options != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'options must not be defined' + }) + } - var c = new forge.util.ByteBuffer(); - for(var i = counterStart; i < k; ++i) { - // I2OSP(i, 4): convert counter to an octet string of 4 octets - c.putInt32(i); + // 4.2.6.6 + requestResponses = this.#queryCache(operation.request) - // digest 'x' and the counter and add the result to the key - md.start(); - md.update(x + c.getBytes()); - var hash = md.digest(); - key.putBytes(hash.getBytes(digestLength)); - } + // 4.2.6.7 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) - // truncate to the correct key length - key.truncate(key.length() - length); - return key.getBytes(); - }; -} + // 4.2.6.7.1 + cache.splice(idx, 1) + } + // 4.2.6.8 + cache.push([operation.request, operation.response]) -/***/ }), + // 4.2.6.10 + addedItems.push([operation.request, operation.response]) + } -/***/ 9994: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 4.2.7 + resultList.push([operation.request, operation.response]) + } -/** - * Cross-browser support for logging in a web application. - * - * @author David I. Lehn - * - * Copyright (c) 2008-2013 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); + // 4.3 + return resultList + } catch (e) { // 5. + // 5.1 + this.#relevantRequestResponseList.length = 0 -/* LOG API */ -module.exports = forge.log = forge.log || {}; + // 5.2 + this.#relevantRequestResponseList = backupCache -/** - * Application logging system. - * - * Each logger level available as it's own function of the form: - * forge.log.level(category, args...) - * The category is an arbitrary string, and the args are the same as - * Firebug's console.log API. By default the call will be output as: - * 'LEVEL [category] , args[1], ...' - * This enables proper % formatting via the first argument. - * Each category is enabled by default but can be enabled or disabled with - * the setCategoryEnabled() function. - */ -// list of known levels -forge.log.levels = [ - 'none', 'error', 'warning', 'info', 'debug', 'verbose', 'max']; -// info on the levels indexed by name: -// index: level index -// name: uppercased display name -var sLevelInfo = {}; -// list of loggers -var sLoggers = []; -/** - * Standard console logger. If no console support is enabled this will - * remain null. Check before using. - */ -var sConsoleLogger = null; + // 5.3 + throw e + } + } -// logger flags -/** - * Lock the level at the current value. Used in cases where user config may - * set the level such that only critical messages are seen but more verbose - * messages are needed for debugging or other purposes. - */ -forge.log.LEVEL_LOCKED = (1 << 1); -/** - * Always call log function. By default, the logging system will check the - * message level against logger.level before calling the log function. This - * flag allows the function to do its own check. - */ -forge.log.NO_LEVEL_CHECK = (1 << 2); -/** - * Perform message interpolation with the passed arguments. "%" style - * fields in log messages will be replaced by arguments as needed. Some - * loggers, such as Firebug, may do this automatically. The original log - * message will be available as 'message' and the interpolated version will - * be available as 'fullMessage'. - */ -forge.log.INTERPOLATE = (1 << 3); + /** + * @see https://w3c.github.io/ServiceWorker/#query-cache + * @param {any} requestQuery + * @param {import('../../types/cache').CacheQueryOptions} options + * @param {requestResponseList} targetStorage + * @returns {requestResponseList} + */ + #queryCache (requestQuery, options, targetStorage) { + /** @type {requestResponseList} */ + const resultList = [] -// setup each log level -for(var i = 0; i < forge.log.levels.length; ++i) { - var level = forge.log.levels[i]; - sLevelInfo[level] = { - index: i, - name: level.toUpperCase() - }; -} + const storage = targetStorage ?? this.#relevantRequestResponseList -/** - * Message logger. Will dispatch a message to registered loggers as needed. - * - * @param message message object - */ -forge.log.logMessage = function(message) { - var messageLevelIndex = sLevelInfo[message.level].index; - for(var i = 0; i < sLoggers.length; ++i) { - var logger = sLoggers[i]; - if(logger.flags & forge.log.NO_LEVEL_CHECK) { - logger.f(message); - } else { - // get logger level - var loggerLevelIndex = sLevelInfo[logger.level].index; - // check level - if(messageLevelIndex <= loggerLevelIndex) { - // message critical enough, call logger - logger.f(logger, message); + for (const requestResponse of storage) { + const [cachedRequest, cachedResponse] = requestResponse + if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) { + resultList.push(requestResponse) } } - } -}; -/** - * Sets the 'standard' key on a message object to: - * "LEVEL [category] " + message - * - * @param message a message log object - */ -forge.log.prepareStandard = function(message) { - if(!('standard' in message)) { - message.standard = - sLevelInfo[message.level].name + - //' ' + +message.timestamp + - ' [' + message.category + '] ' + - message.message; + return resultList } -}; -/** - * Sets the 'full' key on a message object to the original message - * interpolated via % formatting with the message arguments. - * - * @param message a message log object. - */ -forge.log.prepareFull = function(message) { - if(!('full' in message)) { - // copy args and insert message at the front - var args = [message.message]; - args = args.concat([] || 0); - // format the message - message.full = forge.util.format.apply(this, args); - } -}; + /** + * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm + * @param {any} requestQuery + * @param {any} request + * @param {any | null} response + * @param {import('../../types/cache').CacheQueryOptions | undefined} options + * @returns {boolean} + */ + #requestMatchesCachedItem (requestQuery, request, response = null, options) { + // if (options?.ignoreMethod === false && request.method === 'GET') { + // return false + // } -/** - * Applies both preparseStandard() and prepareFull() to a message object and - * store result in 'standardFull'. - * - * @param message a message log object. - */ -forge.log.prepareStandardFull = function(message) { - if(!('standardFull' in message)) { - // FIXME implement 'standardFull' logging - forge.log.prepareStandard(message); - message.standardFull = message.standard; - } -}; + const queryURL = new URL(requestQuery.url) -// create log level functions -if(true) { - // levels for which we want functions - var levels = ['error', 'warning', 'info', 'debug', 'verbose']; - for(var i = 0; i < levels.length; ++i) { - // wrap in a function to ensure proper level var is passed - (function(level) { - // create function for this level - forge.log[level] = function(category, message/*, args...*/) { - // convert arguments to real array, remove category and message - var args = Array.prototype.slice.call(arguments).slice(2); - // create message object - // Note: interpolation and standard formatting is done lazily - var msg = { - timestamp: new Date(), - level: level, - category: category, - message: message, - 'arguments': args - /*standard*/ - /*full*/ - /*fullMessage*/ - }; - // process this message - forge.log.logMessage(msg); - }; - })(levels[i]); - } -} + const cachedURL = new URL(request.url) -/** - * Creates a new logger with specified custom logging function. - * - * The logging function has a signature of: - * function(logger, message) - * logger: current logger - * message: object: - * level: level id - * category: category - * message: string message - * arguments: Array of extra arguments - * fullMessage: interpolated message and arguments if INTERPOLATE flag set - * - * @param logFunction a logging function which takes a log message object - * as a parameter. - * - * @return a logger object. - */ -forge.log.makeLogger = function(logFunction) { - var logger = { - flags: 0, - f: logFunction - }; - forge.log.setLevel(logger, 'none'); - return logger; -}; + if (options?.ignoreSearch) { + cachedURL.search = '' -/** - * Sets the current log level on a logger. - * - * @param logger the target logger. - * @param level the new maximum log level as a string. - * - * @return true if set, false if not. - */ -forge.log.setLevel = function(logger, level) { - var rval = false; - if(logger && !(logger.flags & forge.log.LEVEL_LOCKED)) { - for(var i = 0; i < forge.log.levels.length; ++i) { - var aValidLevel = forge.log.levels[i]; - if(level == aValidLevel) { - // set level - logger.level = level; - rval = true; - break; - } + queryURL.search = '' } - } - return rval; -}; + if (!urlEquals(queryURL, cachedURL, true)) { + return false + } -/** - * Locks the log level at its current value. - * - * @param logger the target logger. - * @param lock boolean lock value, default to true. - */ -forge.log.lock = function(logger, lock) { - if(typeof lock === 'undefined' || lock) { - logger.flags |= forge.log.LEVEL_LOCKED; - } else { - logger.flags &= ~forge.log.LEVEL_LOCKED; - } -}; + if ( + response == null || + options?.ignoreVary || + !response.headersList.contains('vary') + ) { + return true + } -/** - * Adds a logger. - * - * @param logger the logger object. - */ -forge.log.addLogger = function(logger) { - sLoggers.push(logger); -}; + const fieldValues = getFieldValues(response.headersList.get('vary')) -// setup the console logger if possible, else create fake console.log -if(typeof(console) !== 'undefined' && 'log' in console) { - var logger; - if(console.error && console.warn && console.info && console.debug) { - // looks like Firebug-style logging is available - // level handlers map - var levelHandlers = { - error: console.error, - warning: console.warn, - info: console.info, - debug: console.debug, - verbose: console.debug - }; - var f = function(logger, message) { - forge.log.prepareStandard(message); - var handler = levelHandlers[message.level]; - // prepend standard message and concat args - var args = [message.standard]; - args = args.concat(message['arguments'].slice()); - // apply to low-level console function - handler.apply(console, args); - }; - logger = forge.log.makeLogger(f); - } else { - // only appear to have basic console.log - var f = function(logger, message) { - forge.log.prepareStandardFull(message); - console.log(message.standardFull); - }; - logger = forge.log.makeLogger(f); + for (const fieldValue of fieldValues) { + if (fieldValue === '*') { + return false + } + + const requestValue = request.headersList.get(fieldValue) + const queryValue = requestQuery.headersList.get(fieldValue) + + // If one has the header and the other doesn't, or one has + // a different value than the other, return false + if (requestValue !== queryValue) { + return false + } + } + + return true } - forge.log.setLevel(logger, 'debug'); - forge.log.addLogger(logger); - sConsoleLogger = logger; -} else { - // define fake console.log to avoid potential script errors on - // browsers that do not have console logging - console = { - log: function() {} - }; } -/* - * Check for logging control query vars in current URL. - * - * console.level= - * Set's the console log level by name. Useful to override defaults and - * allow more verbose logging before a user config is loaded. - * - * console.lock= - * Lock the console log level at whatever level it is set at. This is run - * after console.level is processed. Useful to force a level of verbosity - * that could otherwise be limited by a user config. - */ -if(sConsoleLogger !== null && - typeof window !== 'undefined' && window.location -) { - var query = new URL(window.location.href).searchParams; - if(query.has('console.level')) { - // set with last value - forge.log.setLevel( - sConsoleLogger, query.get('console.level').slice(-1)[0]); - } - if(query.has('console.lock')) { - // set with last value - var lock = query.get('console.lock').slice(-1)[0]; - if(lock == 'true') { - forge.log.lock(sConsoleLogger); - } +Object.defineProperties(Cache.prototype, { + [Symbol.toStringTag]: { + value: 'Cache', + configurable: true + }, + match: kEnumerableProperty, + matchAll: kEnumerableProperty, + add: kEnumerableProperty, + addAll: kEnumerableProperty, + put: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) + +const cacheQueryOptionConverters = [ + { + key: 'ignoreSearch', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreMethod', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreVary', + converter: webidl.converters.boolean, + defaultValue: false } -} - -// provide public access to console logger -forge.log.consoleLogger = sConsoleLogger; +] +webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters) -/***/ }), +webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([ + ...cacheQueryOptionConverters, + { + key: 'cacheName', + converter: webidl.converters.DOMString + } +]) -/***/ 1145: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +webidl.converters.Response = webidl.interfaceConverter(Response) -/** - * Node.js module for all known Forge message digests. - * - * @author Dave Longley - * - * Copyright 2011-2017 Digital Bazaar, Inc. - */ -module.exports = __nccwpck_require__(6231); +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.RequestInfo +) -__nccwpck_require__(6594); -__nccwpck_require__(279); -__nccwpck_require__(4086); -__nccwpck_require__(9542); +module.exports = { + Cache +} /***/ }), -/***/ 6231: +/***/ 7907: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Node.js module for Forge message digests. - * - * @author Dave Longley - * - * Copyright 2011-2017 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); +"use strict"; -module.exports = forge.md = forge.md || {}; -forge.md.algorithms = forge.md.algorithms || {}; +const { kConstruct } = __nccwpck_require__(9174) +const { Cache } = __nccwpck_require__(6101) +const { webidl } = __nccwpck_require__(1744) +const { kEnumerableProperty } = __nccwpck_require__(3983) -/***/ }), +class CacheStorage { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map + * @type {Map { + constructor () { + if (arguments[0] !== kConstruct) { + webidl.illegalConstructor() + } + } -/** - * Message Digest Algorithm 5 with 128-bit digest (MD5) implementation. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(6231); -__nccwpck_require__(8339); + async match (request, options = {}) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.match' }) -var md5 = module.exports = forge.md5 = forge.md5 || {}; -forge.md.md5 = forge.md.algorithms.md5 = md5; + request = webidl.converters.RequestInfo(request) + options = webidl.converters.MultiCacheQueryOptions(options) -/** - * Creates an MD5 message digest object. - * - * @return a message digest object. - */ -md5.create = function() { - // do initialization as necessary - if(!_initialized) { - _init(); - } - - // MD5 state contains four 32-bit integers - var _state = null; - - // input buffer - var _input = forge.util.createBuffer(); - - // used for word storage - var _w = new Array(16); - - // message digest object - var md = { - algorithm: 'md5', - blockLength: 64, - digestLength: 16, - // 56-bit length of message so far (does not including padding) - messageLength: 0, - // true message length - fullMessageLength: null, - // size of message length in bytes - messageLengthSize: 8 - }; + // 1. + if (options.cacheName != null) { + // 1.1.1.1 + if (this.#caches.has(options.cacheName)) { + // 1.1.1.1.1 + const cacheList = this.#caches.get(options.cacheName) + const cache = new Cache(kConstruct, cacheList) + + return await cache.match(request, options) + } + } else { // 2. + // 2.2 + for (const cacheList of this.#caches.values()) { + const cache = new Cache(kConstruct, cacheList) + + // 2.2.1.2 + const response = await cache.match(request, options) + + if (response !== undefined) { + return response + } + } + } + } /** - * Starts the digest. - * - * @return this digest object. + * @see https://w3c.github.io/ServiceWorker/#cache-storage-has + * @param {string} cacheName + * @returns {Promise} */ - md.start = function() { - // up to 56-bit message length for convenience - md.messageLength = 0; - - // full message length (set md.messageLength64 for backwards-compatibility) - md.fullMessageLength = md.messageLength64 = []; - var int32s = md.messageLengthSize / 4; - for(var i = 0; i < int32s; ++i) { - md.fullMessageLength.push(0); - } - _input = forge.util.createBuffer(); - _state = { - h0: 0x67452301, - h1: 0xEFCDAB89, - h2: 0x98BADCFE, - h3: 0x10325476 - }; - return md; - }; - // start digest automatically for first time - md.start(); + async has (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' }) + + cacheName = webidl.converters.DOMString(cacheName) + + // 2.1.1 + // 2.2 + return this.#caches.has(cacheName) + } /** - * Updates the digest with the given message input. The given input can - * treated as raw input (no encoding will be applied) or an encoding of - * 'utf8' maybe given to encode the input using UTF-8. - * - * @param msg the message input to update with. - * @param encoding the encoding to use (default: 'raw', other: 'utf8'). - * - * @return this digest object. + * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open + * @param {string} cacheName + * @returns {Promise} */ - md.update = function(msg, encoding) { - if(encoding === 'utf8') { - msg = forge.util.encodeUtf8(msg); - } + async open (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' }) - // update message length - var len = msg.length; - md.messageLength += len; - len = [(len / 0x100000000) >>> 0, len >>> 0]; - for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { - md.fullMessageLength[i] += len[1]; - len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); - md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; - len[0] = (len[1] / 0x100000000) >>> 0; - } + cacheName = webidl.converters.DOMString(cacheName) - // add bytes to input buffer - _input.putBytes(msg); + // 2.1 + if (this.#caches.has(cacheName)) { + // await caches.open('v1') !== await caches.open('v1') - // process bytes - _update(_state, _w, _input); + // 2.1.1 + const cache = this.#caches.get(cacheName) - // compact input buffer every 2K or if empty - if(_input.read > 2048 || _input.length() === 0) { - _input.compact(); + // 2.1.1.1 + return new Cache(kConstruct, cache) } - return md; - }; + // 2.2 + const cache = [] + + // 2.3 + this.#caches.set(cacheName, cache) + + // 2.4 + return new Cache(kConstruct, cache) + } /** - * Produces the digest. - * - * @return a byte buffer containing the digest value. + * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete + * @param {string} cacheName + * @returns {Promise} */ - md.digest = function() { - /* Note: Here we copy the remaining bytes in the input buffer and - add the appropriate MD5 padding. Then we do the final update - on a copy of the state so that if the user wants to get - intermediate digests they can do so. */ - - /* Determine the number of bytes that must be added to the message - to ensure its length is congruent to 448 mod 512. In other words, - the data to be digested must be a multiple of 512 bits (or 128 bytes). - This data includes the message, some padding, and the length of the - message. Since the length of the message will be encoded as 8 bytes (64 - bits), that means that the last segment of the data must have 56 bytes - (448 bits) of message and padding. Therefore, the length of the message - plus the padding must be congruent to 448 mod 512 because - 512 - 128 = 448. - - In order to fill up the message length it must be filled with - padding that begins with 1 bit followed by all 0 bits. Padding - must *always* be present, so if the message length is already - congruent to 448 mod 512, then 512 padding bits must be added. */ - - var finalBlock = forge.util.createBuffer(); - finalBlock.putBytes(_input.bytes()); - - // compute remaining size to be digested (include message length size) - var remaining = ( - md.fullMessageLength[md.fullMessageLength.length - 1] + - md.messageLengthSize); - - // add padding for overflow blockSize - overflow - // _padding starts with 1 byte with first bit is set (byte value 128), then - // there may be up to (blockSize - 1) other pad bytes - var overflow = remaining & (md.blockLength - 1); - finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); - - // serialize message length in bits in little-endian order; since length - // is stored in bytes we multiply by 8 and add carry - var bits, carry = 0; - for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { - bits = md.fullMessageLength[i] * 8 + carry; - carry = (bits / 0x100000000) >>> 0; - finalBlock.putInt32Le(bits >>> 0); - } - - var s2 = { - h0: _state.h0, - h1: _state.h1, - h2: _state.h2, - h3: _state.h3 - }; - _update(s2, _w, finalBlock); - var rval = forge.util.createBuffer(); - rval.putInt32Le(s2.h0); - rval.putInt32Le(s2.h1); - rval.putInt32Le(s2.h2); - rval.putInt32Le(s2.h3); - return rval; - }; - - return md; -}; + async delete (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' }) -// padding, constant tables for calculating md5 -var _padding = null; -var _g = null; -var _r = null; -var _k = null; -var _initialized = false; + cacheName = webidl.converters.DOMString(cacheName) -/** - * Initializes the constant tables. - */ -function _init() { - // create padding - _padding = String.fromCharCode(128); - _padding += forge.util.fillString(String.fromCharCode(0x00), 64); + return this.#caches.delete(cacheName) + } - // g values - _g = [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - 1, 6, 11, 0, 5, 10, 15, 4, 9, 14, 3, 8, 13, 2, 7, 12, - 5, 8, 11, 14, 1, 4, 7, 10, 13, 0, 3, 6, 9, 12, 15, 2, - 0, 7, 14, 5, 12, 3, 10, 1, 8, 15, 6, 13, 4, 11, 2, 9]; + /** + * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys + * @returns {string[]} + */ + async keys () { + webidl.brandCheck(this, CacheStorage) - // rounds table - _r = [ - 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, - 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, - 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, - 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21]; + // 2.1 + const keys = this.#caches.keys() - // get the result of abs(sin(i + 1)) as a 32-bit integer - _k = new Array(64); - for(var i = 0; i < 64; ++i) { - _k[i] = Math.floor(Math.abs(Math.sin(i + 1)) * 0x100000000); + // 2.2 + return [...keys] } - - // now initialized - _initialized = true; } -/** - * Updates an MD5 state with the given byte buffer. - * - * @param s the MD5 state to update. - * @param w the array to use to store words. - * @param bytes the byte buffer to update with. - */ -function _update(s, w, bytes) { - // consume 512 bit (64 byte) chunks - var t, a, b, c, d, f, r, i; - var len = bytes.length(); - while(len >= 64) { - // initialize hash value for this chunk - a = s.h0; - b = s.h1; - c = s.h2; - d = s.h3; - - // round 1 - for(i = 0; i < 16; ++i) { - w[i] = bytes.getInt32Le(); - f = d ^ (b & (c ^ d)); - t = (a + f + _k[i] + w[i]); - r = _r[i]; - a = d; - d = c; - c = b; - b += (t << r) | (t >>> (32 - r)); - } - // round 2 - for(; i < 32; ++i) { - f = c ^ (d & (b ^ c)); - t = (a + f + _k[i] + w[_g[i]]); - r = _r[i]; - a = d; - d = c; - c = b; - b += (t << r) | (t >>> (32 - r)); - } - // round 3 - for(; i < 48; ++i) { - f = b ^ c ^ d; - t = (a + f + _k[i] + w[_g[i]]); - r = _r[i]; - a = d; - d = c; - c = b; - b += (t << r) | (t >>> (32 - r)); - } - // round 4 - for(; i < 64; ++i) { - f = c ^ (b | ~d); - t = (a + f + _k[i] + w[_g[i]]); - r = _r[i]; - a = d; - d = c; - c = b; - b += (t << r) | (t >>> (32 - r)); - } - - // update hash state - s.h0 = (s.h0 + a) | 0; - s.h1 = (s.h1 + b) | 0; - s.h2 = (s.h2 + c) | 0; - s.h3 = (s.h3 + d) | 0; - - len -= 64; - } +Object.defineProperties(CacheStorage.prototype, { + [Symbol.toStringTag]: { + value: 'CacheStorage', + configurable: true + }, + match: kEnumerableProperty, + has: kEnumerableProperty, + open: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) + +module.exports = { + CacheStorage } /***/ }), -/***/ 7973: +/***/ 9174: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Node.js module for Forge mask generation functions. - * - * @author Stefan Siegl - * - * Copyright 2012 Stefan Siegl - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(3339); +"use strict"; + -module.exports = forge.mgf = forge.mgf || {}; -forge.mgf.mgf1 = forge.mgf1; +module.exports = { + kConstruct: (__nccwpck_require__(2785).kConstruct) +} /***/ }), -/***/ 3339: +/***/ 2396: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; + + +const assert = __nccwpck_require__(9491) +const { URLSerializer } = __nccwpck_require__(685) +const { isValidHeaderName } = __nccwpck_require__(2538) + /** - * Javascript implementation of mask generation function MGF1. - * - * @author Stefan Siegl - * @author Dave Longley - * - * Copyright (c) 2012 Stefan Siegl - * Copyright (c) 2014 Digital Bazaar, Inc. + * @see https://url.spec.whatwg.org/#concept-url-equals + * @param {URL} A + * @param {URL} B + * @param {boolean | undefined} excludeFragment + * @returns {boolean} */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); +function urlEquals (A, B, excludeFragment = false) { + const serializedA = URLSerializer(A, excludeFragment) + + const serializedB = URLSerializer(B, excludeFragment) -forge.mgf = forge.mgf || {}; -var mgf1 = module.exports = forge.mgf.mgf1 = forge.mgf1 = forge.mgf1 || {}; + return serializedA === serializedB +} /** - * Creates a MGF1 mask generation function object. - * - * @param md the message digest API to use (eg: forge.md.sha1.create()). - * - * @return a mask generation function object. + * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262 + * @param {string} header */ -mgf1.create = function(md) { - var mgf = { - /** - * Generate mask of specified length. - * - * @param {String} seed The seed for mask generation. - * @param maskLen Number of bytes to generate. - * @return {String} The generated mask. - */ - generate: function(seed, maskLen) { - /* 2. Let T be the empty octet string. */ - var t = new forge.util.ByteBuffer(); - - /* 3. For counter from 0 to ceil(maskLen / hLen), do the following: */ - var len = Math.ceil(maskLen / md.digestLength); - for(var i = 0; i < len; i++) { - /* a. Convert counter to an octet string C of length 4 octets */ - var c = new forge.util.ByteBuffer(); - c.putInt32(i); - - /* b. Concatenate the hash of the seed mgfSeed and C to the octet - * string T: */ - md.start(); - md.update(seed + c.getBytes()); - t.putBuffer(md.digest()); - } +function fieldValues (header) { + assert(header !== null) + + const values = [] - /* Output the leading maskLen octets of T as the octet string mask. */ - t.truncate(t.length() - maskLen); - return t.getBytes(); + for (let value of header.split(',')) { + value = value.trim() + + if (!value.length) { + continue + } else if (!isValidHeaderName(value)) { + continue } - }; - return mgf; -}; + values.push(value) + } + + return values +} + +module.exports = { + urlEquals, + fieldValues +} /***/ }), -/***/ 1925: +/***/ 3598: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Object IDs for ASN.1. - * - * @author Dave Longley - * - * Copyright (c) 2010-2013 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); - -forge.pki = forge.pki || {}; -var oids = module.exports = forge.pki.oids = forge.oids = forge.oids || {}; - -// set id to name mapping and name to id mapping -function _IN(id, name) { - oids[id] = name; - oids[name] = id; -} -// set id to name mapping only -function _I_(id, name) { - oids[id] = name; -} - -// algorithm OIDs -_IN('1.2.840.113549.1.1.1', 'rsaEncryption'); -// Note: md2 & md4 not implemented -//_IN('1.2.840.113549.1.1.2', 'md2WithRSAEncryption'); -//_IN('1.2.840.113549.1.1.3', 'md4WithRSAEncryption'); -_IN('1.2.840.113549.1.1.4', 'md5WithRSAEncryption'); -_IN('1.2.840.113549.1.1.5', 'sha1WithRSAEncryption'); -_IN('1.2.840.113549.1.1.7', 'RSAES-OAEP'); -_IN('1.2.840.113549.1.1.8', 'mgf1'); -_IN('1.2.840.113549.1.1.9', 'pSpecified'); -_IN('1.2.840.113549.1.1.10', 'RSASSA-PSS'); -_IN('1.2.840.113549.1.1.11', 'sha256WithRSAEncryption'); -_IN('1.2.840.113549.1.1.12', 'sha384WithRSAEncryption'); -_IN('1.2.840.113549.1.1.13', 'sha512WithRSAEncryption'); -// Edwards-curve Digital Signature Algorithm (EdDSA) Ed25519 -_IN('1.3.101.112', 'EdDSA25519'); - -_IN('1.2.840.10040.4.3', 'dsa-with-sha1'); - -_IN('1.3.14.3.2.7', 'desCBC'); - -_IN('1.3.14.3.2.26', 'sha1'); -// Deprecated equivalent of sha1WithRSAEncryption -_IN('1.3.14.3.2.29', 'sha1WithRSASignature'); -_IN('2.16.840.1.101.3.4.2.1', 'sha256'); -_IN('2.16.840.1.101.3.4.2.2', 'sha384'); -_IN('2.16.840.1.101.3.4.2.3', 'sha512'); -_IN('2.16.840.1.101.3.4.2.4', 'sha224'); -_IN('2.16.840.1.101.3.4.2.5', 'sha512-224'); -_IN('2.16.840.1.101.3.4.2.6', 'sha512-256'); -_IN('1.2.840.113549.2.2', 'md2'); -_IN('1.2.840.113549.2.5', 'md5'); - -// pkcs#7 content types -_IN('1.2.840.113549.1.7.1', 'data'); -_IN('1.2.840.113549.1.7.2', 'signedData'); -_IN('1.2.840.113549.1.7.3', 'envelopedData'); -_IN('1.2.840.113549.1.7.4', 'signedAndEnvelopedData'); -_IN('1.2.840.113549.1.7.5', 'digestedData'); -_IN('1.2.840.113549.1.7.6', 'encryptedData'); - -// pkcs#9 oids -_IN('1.2.840.113549.1.9.1', 'emailAddress'); -_IN('1.2.840.113549.1.9.2', 'unstructuredName'); -_IN('1.2.840.113549.1.9.3', 'contentType'); -_IN('1.2.840.113549.1.9.4', 'messageDigest'); -_IN('1.2.840.113549.1.9.5', 'signingTime'); -_IN('1.2.840.113549.1.9.6', 'counterSignature'); -_IN('1.2.840.113549.1.9.7', 'challengePassword'); -_IN('1.2.840.113549.1.9.8', 'unstructuredAddress'); -_IN('1.2.840.113549.1.9.14', 'extensionRequest'); - -_IN('1.2.840.113549.1.9.20', 'friendlyName'); -_IN('1.2.840.113549.1.9.21', 'localKeyId'); -_IN('1.2.840.113549.1.9.22.1', 'x509Certificate'); - -// pkcs#12 safe bags -_IN('1.2.840.113549.1.12.10.1.1', 'keyBag'); -_IN('1.2.840.113549.1.12.10.1.2', 'pkcs8ShroudedKeyBag'); -_IN('1.2.840.113549.1.12.10.1.3', 'certBag'); -_IN('1.2.840.113549.1.12.10.1.4', 'crlBag'); -_IN('1.2.840.113549.1.12.10.1.5', 'secretBag'); -_IN('1.2.840.113549.1.12.10.1.6', 'safeContentsBag'); - -// password-based-encryption for pkcs#12 -_IN('1.2.840.113549.1.5.13', 'pkcs5PBES2'); -_IN('1.2.840.113549.1.5.12', 'pkcs5PBKDF2'); - -_IN('1.2.840.113549.1.12.1.1', 'pbeWithSHAAnd128BitRC4'); -_IN('1.2.840.113549.1.12.1.2', 'pbeWithSHAAnd40BitRC4'); -_IN('1.2.840.113549.1.12.1.3', 'pbeWithSHAAnd3-KeyTripleDES-CBC'); -_IN('1.2.840.113549.1.12.1.4', 'pbeWithSHAAnd2-KeyTripleDES-CBC'); -_IN('1.2.840.113549.1.12.1.5', 'pbeWithSHAAnd128BitRC2-CBC'); -_IN('1.2.840.113549.1.12.1.6', 'pbewithSHAAnd40BitRC2-CBC'); - -// hmac OIDs -_IN('1.2.840.113549.2.7', 'hmacWithSHA1'); -_IN('1.2.840.113549.2.8', 'hmacWithSHA224'); -_IN('1.2.840.113549.2.9', 'hmacWithSHA256'); -_IN('1.2.840.113549.2.10', 'hmacWithSHA384'); -_IN('1.2.840.113549.2.11', 'hmacWithSHA512'); - -// symmetric key algorithm oids -_IN('1.2.840.113549.3.7', 'des-EDE3-CBC'); -_IN('2.16.840.1.101.3.4.1.2', 'aes128-CBC'); -_IN('2.16.840.1.101.3.4.1.22', 'aes192-CBC'); -_IN('2.16.840.1.101.3.4.1.42', 'aes256-CBC'); - -// certificate issuer/subject OIDs -_IN('2.5.4.3', 'commonName'); -_IN('2.5.4.4', 'surname'); -_IN('2.5.4.5', 'serialNumber'); -_IN('2.5.4.6', 'countryName'); -_IN('2.5.4.7', 'localityName'); -_IN('2.5.4.8', 'stateOrProvinceName'); -_IN('2.5.4.9', 'streetAddress'); -_IN('2.5.4.10', 'organizationName'); -_IN('2.5.4.11', 'organizationalUnitName'); -_IN('2.5.4.12', 'title'); -_IN('2.5.4.13', 'description'); -_IN('2.5.4.15', 'businessCategory'); -_IN('2.5.4.17', 'postalCode'); -_IN('2.5.4.42', 'givenName'); -_IN('1.3.6.1.4.1.311.60.2.1.2', 'jurisdictionOfIncorporationStateOrProvinceName'); -_IN('1.3.6.1.4.1.311.60.2.1.3', 'jurisdictionOfIncorporationCountryName'); - -// X.509 extension OIDs -_IN('2.16.840.1.113730.1.1', 'nsCertType'); -_IN('2.16.840.1.113730.1.13', 'nsComment'); // deprecated in theory; still widely used -_I_('2.5.29.1', 'authorityKeyIdentifier'); // deprecated, use .35 -_I_('2.5.29.2', 'keyAttributes'); // obsolete use .37 or .15 -_I_('2.5.29.3', 'certificatePolicies'); // deprecated, use .32 -_I_('2.5.29.4', 'keyUsageRestriction'); // obsolete use .37 or .15 -_I_('2.5.29.5', 'policyMapping'); // deprecated use .33 -_I_('2.5.29.6', 'subtreesConstraint'); // obsolete use .30 -_I_('2.5.29.7', 'subjectAltName'); // deprecated use .17 -_I_('2.5.29.8', 'issuerAltName'); // deprecated use .18 -_I_('2.5.29.9', 'subjectDirectoryAttributes'); -_I_('2.5.29.10', 'basicConstraints'); // deprecated use .19 -_I_('2.5.29.11', 'nameConstraints'); // deprecated use .30 -_I_('2.5.29.12', 'policyConstraints'); // deprecated use .36 -_I_('2.5.29.13', 'basicConstraints'); // deprecated use .19 -_IN('2.5.29.14', 'subjectKeyIdentifier'); -_IN('2.5.29.15', 'keyUsage'); -_I_('2.5.29.16', 'privateKeyUsagePeriod'); -_IN('2.5.29.17', 'subjectAltName'); -_IN('2.5.29.18', 'issuerAltName'); -_IN('2.5.29.19', 'basicConstraints'); -_I_('2.5.29.20', 'cRLNumber'); -_I_('2.5.29.21', 'cRLReason'); -_I_('2.5.29.22', 'expirationDate'); -_I_('2.5.29.23', 'instructionCode'); -_I_('2.5.29.24', 'invalidityDate'); -_I_('2.5.29.25', 'cRLDistributionPoints'); // deprecated use .31 -_I_('2.5.29.26', 'issuingDistributionPoint'); // deprecated use .28 -_I_('2.5.29.27', 'deltaCRLIndicator'); -_I_('2.5.29.28', 'issuingDistributionPoint'); -_I_('2.5.29.29', 'certificateIssuer'); -_I_('2.5.29.30', 'nameConstraints'); -_IN('2.5.29.31', 'cRLDistributionPoints'); -_IN('2.5.29.32', 'certificatePolicies'); -_I_('2.5.29.33', 'policyMappings'); -_I_('2.5.29.34', 'policyConstraints'); // deprecated use .36 -_IN('2.5.29.35', 'authorityKeyIdentifier'); -_I_('2.5.29.36', 'policyConstraints'); -_IN('2.5.29.37', 'extKeyUsage'); -_I_('2.5.29.46', 'freshestCRL'); -_I_('2.5.29.54', 'inhibitAnyPolicy'); - -// extKeyUsage purposes -_IN('1.3.6.1.4.1.11129.2.4.2', 'timestampList'); -_IN('1.3.6.1.5.5.7.1.1', 'authorityInfoAccess'); -_IN('1.3.6.1.5.5.7.3.1', 'serverAuth'); -_IN('1.3.6.1.5.5.7.3.2', 'clientAuth'); -_IN('1.3.6.1.5.5.7.3.3', 'codeSigning'); -_IN('1.3.6.1.5.5.7.3.4', 'emailProtection'); -_IN('1.3.6.1.5.5.7.3.8', 'timeStamping'); +"use strict"; +// @ts-check + + + +/* global WebAssembly */ + +const assert = __nccwpck_require__(9491) +const net = __nccwpck_require__(1808) +const http = __nccwpck_require__(3685) +const { pipeline } = __nccwpck_require__(2781) +const util = __nccwpck_require__(3983) +const timers = __nccwpck_require__(9459) +const Request = __nccwpck_require__(2905) +const DispatcherBase = __nccwpck_require__(4839) +const { + RequestContentLengthMismatchError, + ResponseContentLengthMismatchError, + InvalidArgumentError, + RequestAbortedError, + HeadersTimeoutError, + HeadersOverflowError, + SocketError, + InformationalError, + BodyTimeoutError, + HTTPParserError, + ResponseExceededMaxSizeError, + ClientDestroyedError +} = __nccwpck_require__(8045) +const buildConnector = __nccwpck_require__(2067) +const { + kUrl, + kReset, + kServerName, + kClient, + kBusy, + kParser, + kConnect, + kBlocking, + kResuming, + kRunning, + kPending, + kSize, + kWriting, + kQueue, + kConnected, + kConnecting, + kNeedDrain, + kNoRef, + kKeepAliveDefaultTimeout, + kHostHeader, + kPendingIdx, + kRunningIdx, + kError, + kPipelining, + kSocket, + kKeepAliveTimeoutValue, + kMaxHeadersSize, + kKeepAliveMaxTimeout, + kKeepAliveTimeoutThreshold, + kHeadersTimeout, + kBodyTimeout, + kStrictContentLength, + kConnector, + kMaxRedirections, + kMaxRequests, + kCounter, + kClose, + kDestroy, + kDispatch, + kInterceptors, + kLocalAddress, + kMaxResponseSize, + kHTTPConnVersion, + // HTTP2 + kHost, + kHTTP2Session, + kHTTP2SessionState, + kHTTP2BuildRequest, + kHTTP2CopyHeaders, + kHTTP1BuildRequest +} = __nccwpck_require__(2785) + +/** @type {import('http2')} */ +let http2 +try { + http2 = __nccwpck_require__(5158) +} catch { + // @ts-ignore + http2 = { constants: {} } +} +const { + constants: { + HTTP2_HEADER_AUTHORITY, + HTTP2_HEADER_METHOD, + HTTP2_HEADER_PATH, + HTTP2_HEADER_SCHEME, + HTTP2_HEADER_CONTENT_LENGTH, + HTTP2_HEADER_EXPECT, + HTTP2_HEADER_STATUS + } +} = http2 -/***/ }), +// Experimental +let h2ExperimentalWarned = false -/***/ 1281: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +const FastBuffer = Buffer[Symbol.species] -/** - * Password-based encryption functions. - * - * @author Dave Longley - * @author Stefan Siegl - * - * Copyright (c) 2010-2013 Digital Bazaar, Inc. - * Copyright (c) 2012 Stefan Siegl - * - * An EncryptedPrivateKeyInfo: - * - * EncryptedPrivateKeyInfo ::= SEQUENCE { - * encryptionAlgorithm EncryptionAlgorithmIdentifier, - * encryptedData EncryptedData } - * - * EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier - * - * EncryptedData ::= OCTET STRING - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(9549); -__nccwpck_require__(7157); -__nccwpck_require__(6231); -__nccwpck_require__(1925); -__nccwpck_require__(1611); -__nccwpck_require__(154); -__nccwpck_require__(7821); -__nccwpck_require__(9965); -__nccwpck_require__(3921); -__nccwpck_require__(8339); - -if(typeof BigInteger === 'undefined') { - var BigInteger = forge.jsbn.BigInteger; -} - -// shortcut for asn.1 API -var asn1 = forge.asn1; - -/* Password-based encryption implementation. */ -var pki = forge.pki = forge.pki || {}; -module.exports = pki.pbe = forge.pbe = forge.pbe || {}; -var oids = pki.oids; - -// validator for an EncryptedPrivateKeyInfo structure -// Note: Currently only works w/algorithm params -var encryptedPrivateKeyValidator = { - name: 'EncryptedPrivateKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'EncryptedPrivateKeyInfo.encryptionAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'encryptionOid' - }, { - name: 'AlgorithmIdentifier.parameters', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'encryptionParams' - }] - }, { - // encryptedData - name: 'EncryptedPrivateKeyInfo.encryptedData', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'encryptedData' - }] -}; +const kClosedResolve = Symbol('kClosedResolve') -// validator for a PBES2Algorithms structure -// Note: Currently only works w/PBKDF2 + AES encryption schemes -var PBES2AlgorithmsValidator = { - name: 'PBES2Algorithms', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'PBES2Algorithms.keyDerivationFunc', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'PBES2Algorithms.keyDerivationFunc.oid', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'kdfOid' - }, { - name: 'PBES2Algorithms.params', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'PBES2Algorithms.params.salt', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'kdfSalt' - }, { - name: 'PBES2Algorithms.params.iterationCount', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'kdfIterationCount' - }, { - name: 'PBES2Algorithms.params.keyLength', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - optional: true, - capture: 'keyLength' - }, { - // prf - name: 'PBES2Algorithms.params.prf', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - optional: true, - value: [{ - name: 'PBES2Algorithms.params.prf.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'prfOid' - }] - }] - }] - }, { - name: 'PBES2Algorithms.encryptionScheme', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'PBES2Algorithms.encryptionScheme.oid', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'encOid' - }, { - name: 'PBES2Algorithms.encryptionScheme.iv', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'encIv' - }] - }] -}; +const channels = {} -var pkcs12PbeParamsValidator = { - name: 'pkcs-12PbeParams', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'pkcs-12PbeParams.salt', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'salt' - }, { - name: 'pkcs-12PbeParams.iterations', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'iterations' - }] -}; +try { + const diagnosticsChannel = __nccwpck_require__(7643) + channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders') + channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect') + channels.connectError = diagnosticsChannel.channel('undici:client:connectError') + channels.connected = diagnosticsChannel.channel('undici:client:connected') +} catch { + channels.sendHeaders = { hasSubscribers: false } + channels.beforeConnect = { hasSubscribers: false } + channels.connectError = { hasSubscribers: false } + channels.connected = { hasSubscribers: false } +} /** - * Encrypts a ASN.1 PrivateKeyInfo object, producing an EncryptedPrivateKeyInfo. - * - * PBES2Algorithms ALGORITHM-IDENTIFIER ::= - * { {PBES2-params IDENTIFIED BY id-PBES2}, ...} - * - * id-PBES2 OBJECT IDENTIFIER ::= {pkcs-5 13} - * - * PBES2-params ::= SEQUENCE { - * keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}}, - * encryptionScheme AlgorithmIdentifier {{PBES2-Encs}} - * } - * - * PBES2-KDFs ALGORITHM-IDENTIFIER ::= - * { {PBKDF2-params IDENTIFIED BY id-PBKDF2}, ... } - * - * PBES2-Encs ALGORITHM-IDENTIFIER ::= { ... } - * - * PBKDF2-params ::= SEQUENCE { - * salt CHOICE { - * specified OCTET STRING, - * otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}} - * }, - * iterationCount INTEGER (1..MAX), - * keyLength INTEGER (1..MAX) OPTIONAL, - * prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1 - * } - * - * @param obj the ASN.1 PrivateKeyInfo object. - * @param password the password to encrypt with. - * @param options: - * algorithm the encryption algorithm to use - * ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'. - * count the iteration count to use. - * saltSize the salt size to use. - * prfAlgorithm the PRF message digest algorithm to use - * ('sha1', 'sha224', 'sha256', 'sha384', 'sha512') - * - * @return the ASN.1 EncryptedPrivateKeyInfo. + * @type {import('../types/client').default} */ -pki.encryptPrivateKeyInfo = function(obj, password, options) { - // set default options - options = options || {}; - options.saltSize = options.saltSize || 8; - options.count = options.count || 2048; - options.algorithm = options.algorithm || 'aes128'; - options.prfAlgorithm = options.prfAlgorithm || 'sha1'; - - // generate PBE params - var salt = forge.random.getBytesSync(options.saltSize); - var count = options.count; - var countBytes = asn1.integerToDer(count); - var dkLen; - var encryptionAlgorithm; - var encryptedData; - if(options.algorithm.indexOf('aes') === 0 || options.algorithm === 'des') { - // do PBES2 - var ivLen, encOid, cipherFn; - switch(options.algorithm) { - case 'aes128': - dkLen = 16; - ivLen = 16; - encOid = oids['aes128-CBC']; - cipherFn = forge.aes.createEncryptionCipher; - break; - case 'aes192': - dkLen = 24; - ivLen = 16; - encOid = oids['aes192-CBC']; - cipherFn = forge.aes.createEncryptionCipher; - break; - case 'aes256': - dkLen = 32; - ivLen = 16; - encOid = oids['aes256-CBC']; - cipherFn = forge.aes.createEncryptionCipher; - break; - case 'des': - dkLen = 8; - ivLen = 8; - encOid = oids['desCBC']; - cipherFn = forge.des.createEncryptionCipher; - break; - default: - var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.'); - error.algorithm = options.algorithm; - throw error; +class Client extends DispatcherBase { + /** + * + * @param {string|URL} url + * @param {import('../types/client').Client.Options} options + */ + constructor (url, { + interceptors, + maxHeaderSize, + headersTimeout, + socketTimeout, + requestTimeout, + connectTimeout, + bodyTimeout, + idleTimeout, + keepAlive, + keepAliveTimeout, + maxKeepAliveTimeout, + keepAliveMaxTimeout, + keepAliveTimeoutThreshold, + socketPath, + pipelining, + tls, + strictContentLength, + maxCachedSessions, + maxRedirections, + connect, + maxRequestsPerClient, + localAddress, + maxResponseSize, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + // h2 + allowH2, + maxConcurrentStreams + } = {}) { + super() + + if (keepAlive !== undefined) { + throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead') } - // get PRF message digest - var prfAlgorithm = 'hmacWith' + options.prfAlgorithm.toUpperCase(); - var md = prfAlgorithmToMessageDigest(prfAlgorithm); - - // encrypt private key using pbe SHA-1 and AES/DES - var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md); - var iv = forge.random.getBytesSync(ivLen); - var cipher = cipherFn(dk); - cipher.start(iv); - cipher.update(asn1.toDer(obj)); - cipher.finish(); - encryptedData = cipher.output.getBytes(); - - // get PBKDF2-params - var params = createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm); - - encryptionAlgorithm = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(oids['pkcs5PBES2']).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // keyDerivationFunc - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(oids['pkcs5PBKDF2']).getBytes()), - // PBKDF2-params - params - ]), - // encryptionScheme - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(encOid).getBytes()), - // iv - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, iv) - ]) - ]) - ]); - } else if(options.algorithm === '3des') { - // Do PKCS12 PBE - dkLen = 24; - - var saltBytes = new forge.util.ByteBuffer(salt); - var dk = pki.pbe.generatePkcs12Key(password, saltBytes, 1, count, dkLen); - var iv = pki.pbe.generatePkcs12Key(password, saltBytes, 2, count, dkLen); - var cipher = forge.des.createEncryptionCipher(dk); - cipher.start(iv); - cipher.update(asn1.toDer(obj)); - cipher.finish(); - encryptedData = cipher.output.getBytes(); - - encryptionAlgorithm = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(oids['pbeWithSHAAnd3-KeyTripleDES-CBC']).getBytes()), - // pkcs-12PbeParams - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // salt - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt), - // iteration count - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - countBytes.getBytes()) - ]) - ]); - } else { - var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.'); - error.algorithm = options.algorithm; - throw error; - } - - // EncryptedPrivateKeyInfo - var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // encryptionAlgorithm - encryptionAlgorithm, - // encryptedData - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, encryptedData) - ]); - return rval; -}; + if (socketTimeout !== undefined) { + throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead') + } -/** - * Decrypts a ASN.1 PrivateKeyInfo object. - * - * @param obj the ASN.1 EncryptedPrivateKeyInfo object. - * @param password the password to decrypt with. - * - * @return the ASN.1 PrivateKeyInfo on success, null on failure. - */ -pki.decryptPrivateKeyInfo = function(obj, password) { - var rval = null; + if (requestTimeout !== undefined) { + throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead') + } - // get PBE params - var capture = {}; - var errors = []; - if(!asn1.validate(obj, encryptedPrivateKeyValidator, capture, errors)) { - var error = new Error('Cannot read encrypted private key. ' + - 'ASN.1 object is not a supported EncryptedPrivateKeyInfo.'); - error.errors = errors; - throw error; - } + if (idleTimeout !== undefined) { + throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead') + } - // get cipher - var oid = asn1.derToOid(capture.encryptionOid); - var cipher = pki.pbe.getCipher(oid, capture.encryptionParams, password); + if (maxKeepAliveTimeout !== undefined) { + throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead') + } - // get encrypted data - var encrypted = forge.util.createBuffer(capture.encryptedData); + if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) { + throw new InvalidArgumentError('invalid maxHeaderSize') + } - cipher.update(encrypted); - if(cipher.finish()) { - rval = asn1.fromDer(cipher.output); - } + if (socketPath != null && typeof socketPath !== 'string') { + throw new InvalidArgumentError('invalid socketPath') + } - return rval; -}; + if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) { + throw new InvalidArgumentError('invalid connectTimeout') + } -/** - * Converts a EncryptedPrivateKeyInfo to PEM format. - * - * @param epki the EncryptedPrivateKeyInfo. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted encrypted private key. - */ -pki.encryptedPrivateKeyToPem = function(epki, maxline) { - // convert to DER, then PEM-encode - var msg = { - type: 'ENCRYPTED PRIVATE KEY', - body: asn1.toDer(epki).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveTimeout') + } -/** - * Converts a PEM-encoded EncryptedPrivateKeyInfo to ASN.1 format. Decryption - * is not performed. - * - * @param pem the EncryptedPrivateKeyInfo in PEM-format. - * - * @return the ASN.1 EncryptedPrivateKeyInfo. - */ -pki.encryptedPrivateKeyFromPem = function(pem) { - var msg = forge.pem.decode(pem)[0]; + if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveMaxTimeout') + } - if(msg.type !== 'ENCRYPTED PRIVATE KEY') { - var error = new Error('Could not convert encrypted private key from PEM; ' + - 'PEM header type is "ENCRYPTED PRIVATE KEY".'); - error.headerType = msg.type; - throw error; - } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert encrypted private key from PEM; ' + - 'PEM is encrypted.'); - } + if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) { + throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold') + } - // convert DER to ASN.1 object - return asn1.fromDer(msg.body); -}; + if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('headersTimeout must be a positive integer or zero') + } -/** - * Encrypts an RSA private key. By default, the key will be wrapped in - * a PrivateKeyInfo and encrypted to produce a PKCS#8 EncryptedPrivateKeyInfo. - * This is the standard, preferred way to encrypt a private key. - * - * To produce a non-standard PEM-encrypted private key that uses encapsulated - * headers to indicate the encryption algorithm (old-style non-PKCS#8 OpenSSL - * private key encryption), set the 'legacy' option to true. Note: Using this - * option will cause the iteration count to be forced to 1. - * - * Note: The 'des' algorithm is supported, but it is not considered to be - * secure because it only uses a single 56-bit key. If possible, it is highly - * recommended that a different algorithm be used. - * - * @param rsaKey the RSA key to encrypt. - * @param password the password to use. - * @param options: - * algorithm: the encryption algorithm to use - * ('aes128', 'aes192', 'aes256', '3des', 'des'). - * count: the iteration count to use. - * saltSize: the salt size to use. - * legacy: output an old non-PKCS#8 PEM-encrypted+encapsulated - * headers (DEK-Info) private key. - * - * @return the PEM-encoded ASN.1 EncryptedPrivateKeyInfo. - */ -pki.encryptRsaPrivateKey = function(rsaKey, password, options) { - // standard PKCS#8 - options = options || {}; - if(!options.legacy) { - // encrypt PrivateKeyInfo - var rval = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(rsaKey)); - rval = pki.encryptPrivateKeyInfo(rval, password, options); - return pki.encryptedPrivateKeyToPem(rval); - } - - // legacy non-PKCS#8 - var algorithm; - var iv; - var dkLen; - var cipherFn; - switch(options.algorithm) { - case 'aes128': - algorithm = 'AES-128-CBC'; - dkLen = 16; - iv = forge.random.getBytesSync(16); - cipherFn = forge.aes.createEncryptionCipher; - break; - case 'aes192': - algorithm = 'AES-192-CBC'; - dkLen = 24; - iv = forge.random.getBytesSync(16); - cipherFn = forge.aes.createEncryptionCipher; - break; - case 'aes256': - algorithm = 'AES-256-CBC'; - dkLen = 32; - iv = forge.random.getBytesSync(16); - cipherFn = forge.aes.createEncryptionCipher; - break; - case '3des': - algorithm = 'DES-EDE3-CBC'; - dkLen = 24; - iv = forge.random.getBytesSync(8); - cipherFn = forge.des.createEncryptionCipher; - break; - case 'des': - algorithm = 'DES-CBC'; - dkLen = 8; - iv = forge.random.getBytesSync(8); - cipherFn = forge.des.createEncryptionCipher; - break; - default: - var error = new Error('Could not encrypt RSA private key; unsupported ' + - 'encryption algorithm "' + options.algorithm + '".'); - error.algorithm = options.algorithm; - throw error; - } - - // encrypt private key using OpenSSL legacy key derivation - var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen); - var cipher = cipherFn(dk); - cipher.start(iv); - cipher.update(asn1.toDer(pki.privateKeyToAsn1(rsaKey))); - cipher.finish(); - - var msg = { - type: 'RSA PRIVATE KEY', - procType: { - version: '4', - type: 'ENCRYPTED' - }, - dekInfo: { - algorithm: algorithm, - parameters: forge.util.bytesToHex(iv).toUpperCase() - }, - body: cipher.output.getBytes() - }; - return forge.pem.encode(msg); -}; + if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero') + } -/** - * Decrypts an RSA private key. - * - * @param pem the PEM-formatted EncryptedPrivateKeyInfo to decrypt. - * @param password the password to use. - * - * @return the RSA key on success, null on failure. - */ -pki.decryptRsaPrivateKey = function(pem, password) { - var rval = null; - - var msg = forge.pem.decode(pem)[0]; - - if(msg.type !== 'ENCRYPTED PRIVATE KEY' && - msg.type !== 'PRIVATE KEY' && - msg.type !== 'RSA PRIVATE KEY') { - var error = new Error('Could not convert private key from PEM; PEM header type ' + - 'is not "ENCRYPTED PRIVATE KEY", "PRIVATE KEY", or "RSA PRIVATE KEY".'); - error.headerType = error; - throw error; - } - - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - var dkLen; - var cipherFn; - switch(msg.dekInfo.algorithm) { - case 'DES-CBC': - dkLen = 8; - cipherFn = forge.des.createDecryptionCipher; - break; - case 'DES-EDE3-CBC': - dkLen = 24; - cipherFn = forge.des.createDecryptionCipher; - break; - case 'AES-128-CBC': - dkLen = 16; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'AES-192-CBC': - dkLen = 24; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'AES-256-CBC': - dkLen = 32; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'RC2-40-CBC': - dkLen = 5; - cipherFn = function(key) { - return forge.rc2.createDecryptionCipher(key, 40); - }; - break; - case 'RC2-64-CBC': - dkLen = 8; - cipherFn = function(key) { - return forge.rc2.createDecryptionCipher(key, 64); - }; - break; - case 'RC2-128-CBC': - dkLen = 16; - cipherFn = function(key) { - return forge.rc2.createDecryptionCipher(key, 128); - }; - break; - default: - var error = new Error('Could not decrypt private key; unsupported ' + - 'encryption algorithm "' + msg.dekInfo.algorithm + '".'); - error.algorithm = msg.dekInfo.algorithm; - throw error; + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') } - // use OpenSSL legacy key derivation - var iv = forge.util.hexToBytes(msg.dekInfo.parameters); - var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen); - var cipher = cipherFn(dk); - cipher.start(iv); - cipher.update(forge.util.createBuffer(msg.body)); - if(cipher.finish()) { - rval = cipher.output.getBytes(); - } else { - return rval; + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') } - } else { - rval = msg.body; - } - if(msg.type === 'ENCRYPTED PRIVATE KEY') { - rval = pki.decryptPrivateKeyInfo(asn1.fromDer(rval), password); - } else { - // decryption already performed above - rval = asn1.fromDer(rval); + if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) { + throw new InvalidArgumentError('maxRequestsPerClient must be a positive number') + } + + if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) { + throw new InvalidArgumentError('localAddress must be valid string IP address') + } + + if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) { + throw new InvalidArgumentError('maxResponseSize must be a positive number') + } + + if ( + autoSelectFamilyAttemptTimeout != null && + (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1) + ) { + throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number') + } + + // h2 + if (allowH2 != null && typeof allowH2 !== 'boolean') { + throw new InvalidArgumentError('allowH2 must be a valid boolean value') + } + + if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) { + throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0') + } + + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) + } + + this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client) + ? interceptors.Client + : [createRedirectInterceptor({ maxRedirections })] + this[kUrl] = util.parseOrigin(url) + this[kConnector] = connect + this[kSocket] = null + this[kPipelining] = pipelining != null ? pipelining : 1 + this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize + this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout + this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout + this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold + this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout] + this[kServerName] = null + this[kLocalAddress] = localAddress != null ? localAddress : null + this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n` + this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3 + this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3 + this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength + this[kMaxRedirections] = maxRedirections + this[kMaxRequests] = maxRequestsPerClient + this[kClosedResolve] = null + this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1 + this[kHTTPConnVersion] = 'h1' + + // HTTP/2 + this[kHTTP2Session] = null + this[kHTTP2SessionState] = !allowH2 + ? null + : { + // streams: null, // Fixed queue of streams - For future support of `push` + openStreams: 0, // Keep track of them to decide wether or not unref the session + maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server + } + this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}` + + // kQueue is built up of 3 sections separated by + // the kRunningIdx and kPendingIdx indices. + // | complete | running | pending | + // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length + // kRunningIdx points to the first running element. + // kPendingIdx points to the first pending element. + // This implements a fast queue with an amortized + // time of O(1). + + this[kQueue] = [] + this[kRunningIdx] = 0 + this[kPendingIdx] = 0 + } + + get pipelining () { + return this[kPipelining] + } + + set pipelining (value) { + this[kPipelining] = value + resume(this, true) + } + + get [kPending] () { + return this[kQueue].length - this[kPendingIdx] + } + + get [kRunning] () { + return this[kPendingIdx] - this[kRunningIdx] + } + + get [kSize] () { + return this[kQueue].length - this[kRunningIdx] + } + + get [kConnected] () { + return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed + } + + get [kBusy] () { + const socket = this[kSocket] + return ( + (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) || + (this[kSize] >= (this[kPipelining] || 1)) || + this[kPending] > 0 + ) } - if(rval !== null) { - rval = pki.privateKeyFromAsn1(rval); + /* istanbul ignore: only used for test */ + [kConnect] (cb) { + connect(this) + this.once('connect', cb) } - return rval; -}; + [kDispatch] (opts, handler) { + const origin = opts.origin || this[kUrl].origin -/** - * Derives a PKCS#12 key. - * - * @param password the password to derive the key material from, null or - * undefined for none. - * @param salt the salt, as a ByteBuffer, to use. - * @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC). - * @param iter the iteration count. - * @param n the number of bytes to derive from the password. - * @param md the message digest to use, defaults to SHA-1. - * - * @return a ByteBuffer with the bytes derived from the password. - */ -pki.pbe.generatePkcs12Key = function(password, salt, id, iter, n, md) { - var j, l; - - if(typeof md === 'undefined' || md === null) { - if(!('sha1' in forge.md)) { - throw new Error('"sha1" hash algorithm unavailable.'); - } - md = forge.md.sha1.create(); - } - - var u = md.digestLength; - var v = md.blockLength; - var result = new forge.util.ByteBuffer(); - - /* Convert password to Unicode byte buffer + trailing 0-byte. */ - var passBuf = new forge.util.ByteBuffer(); - if(password !== null && password !== undefined) { - for(l = 0; l < password.length; l++) { - passBuf.putInt16(password.charCodeAt(l)); - } - passBuf.putInt16(0); - } - - /* Length of salt and password in BYTES. */ - var p = passBuf.length(); - var s = salt.length(); - - /* 1. Construct a string, D (the "diversifier"), by concatenating - v copies of ID. */ - var D = new forge.util.ByteBuffer(); - D.fillWithByte(id, v); - - /* 2. Concatenate copies of the salt together to create a string S of length - v * ceil(s / v) bytes (the final copy of the salt may be trunacted - to create S). - Note that if the salt is the empty string, then so is S. */ - var Slen = v * Math.ceil(s / v); - var S = new forge.util.ByteBuffer(); - for(l = 0; l < Slen; l++) { - S.putByte(salt.at(l % s)); - } - - /* 3. Concatenate copies of the password together to create a string P of - length v * ceil(p / v) bytes (the final copy of the password may be - truncated to create P). - Note that if the password is the empty string, then so is P. */ - var Plen = v * Math.ceil(p / v); - var P = new forge.util.ByteBuffer(); - for(l = 0; l < Plen; l++) { - P.putByte(passBuf.at(l % p)); - } - - /* 4. Set I=S||P to be the concatenation of S and P. */ - var I = S; - I.putBuffer(P); - - /* 5. Set c=ceil(n / u). */ - var c = Math.ceil(n / u); - - /* 6. For i=1, 2, ..., c, do the following: */ - for(var i = 1; i <= c; i++) { - /* a) Set Ai=H^r(D||I). (l.e. the rth hash of D||I, H(H(H(...H(D||I)))) */ - var buf = new forge.util.ByteBuffer(); - buf.putBytes(D.bytes()); - buf.putBytes(I.bytes()); - for(var round = 0; round < iter; round++) { - md.start(); - md.update(buf.getBytes()); - buf = md.digest(); - } - - /* b) Concatenate copies of Ai to create a string B of length v bytes (the - final copy of Ai may be truncated to create B). */ - var B = new forge.util.ByteBuffer(); - for(l = 0; l < v; l++) { - B.putByte(buf.at(l % u)); - } - - /* c) Treating I as a concatenation I0, I1, ..., Ik-1 of v-byte blocks, - where k=ceil(s / v) + ceil(p / v), modify I by setting - Ij=(Ij+B+1) mod 2v for each j. */ - var k = Math.ceil(s / v) + Math.ceil(p / v); - var Inew = new forge.util.ByteBuffer(); - for(j = 0; j < k; j++) { - var chunk = new forge.util.ByteBuffer(I.getBytes(v)); - var x = 0x1ff; - for(l = B.length() - 1; l >= 0; l--) { - x = x >> 8; - x += B.at(l) + chunk.at(l); - chunk.setAt(l, x & 0xff); - } - Inew.putBuffer(chunk); + const request = this[kHTTPConnVersion] === 'h2' + ? Request[kHTTP2BuildRequest](origin, opts, handler) + : Request[kHTTP1BuildRequest](origin, opts, handler) + + this[kQueue].push(request) + if (this[kResuming]) { + // Do nothing. + } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) { + // Wait a tick in case stream/iterator is ended in the same tick. + this[kResuming] = 1 + process.nextTick(resume, this) + } else { + resume(this, true) } - I = Inew; - /* Add Ai to A. */ - result.putBuffer(buf); - } + if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) { + this[kNeedDrain] = 2 + } - result.truncate(result.length() - n); - return result; -}; + return this[kNeedDrain] < 2 + } -/** - * Get new Forge cipher object instance. - * - * @param oid the OID (in string notation). - * @param params the ASN.1 params object. - * @param password the password to decrypt with. - * - * @return new cipher object instance. - */ -pki.pbe.getCipher = function(oid, params, password) { - switch(oid) { - case pki.oids['pkcs5PBES2']: - return pki.pbe.getCipherForPBES2(oid, params, password); - - case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']: - case pki.oids['pbewithSHAAnd40BitRC2-CBC']: - return pki.pbe.getCipherForPKCS12PBE(oid, params, password); - - default: - var error = new Error('Cannot read encrypted PBE data block. Unsupported OID.'); - error.oid = oid; - error.supportedOids = [ - 'pkcs5PBES2', - 'pbeWithSHAAnd3-KeyTripleDES-CBC', - 'pbewithSHAAnd40BitRC2-CBC' - ]; - throw error; + async [kClose] () { + // TODO: for H2 we need to gracefully flush the remaining enqueued + // request and close each stream. + return new Promise((resolve) => { + if (!this[kSize]) { + resolve(null) + } else { + this[kClosedResolve] = resolve + } + }) } -}; -/** - * Get new Forge cipher object instance according to PBES2 params block. - * - * The returned cipher instance is already started using the IV - * from PBES2 parameter block. - * - * @param oid the PKCS#5 PBKDF2 OID (in string notation). - * @param params the ASN.1 PBES2-params object. - * @param password the password to decrypt with. - * - * @return new cipher object instance. - */ -pki.pbe.getCipherForPBES2 = function(oid, params, password) { - // get PBE params - var capture = {}; - var errors = []; - if(!asn1.validate(params, PBES2AlgorithmsValidator, capture, errors)) { - var error = new Error('Cannot read password-based-encryption algorithm ' + - 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.'); - error.errors = errors; - throw error; - } - - // check oids - oid = asn1.derToOid(capture.kdfOid); - if(oid !== pki.oids['pkcs5PBKDF2']) { - var error = new Error('Cannot read encrypted private key. ' + - 'Unsupported key derivation function OID.'); - error.oid = oid; - error.supportedOids = ['pkcs5PBKDF2']; - throw error; - } - oid = asn1.derToOid(capture.encOid); - if(oid !== pki.oids['aes128-CBC'] && - oid !== pki.oids['aes192-CBC'] && - oid !== pki.oids['aes256-CBC'] && - oid !== pki.oids['des-EDE3-CBC'] && - oid !== pki.oids['desCBC']) { - var error = new Error('Cannot read encrypted private key. ' + - 'Unsupported encryption scheme OID.'); - error.oid = oid; - error.supportedOids = [ - 'aes128-CBC', 'aes192-CBC', 'aes256-CBC', 'des-EDE3-CBC', 'desCBC']; - throw error; - } - - // set PBE params - var salt = capture.kdfSalt; - var count = forge.util.createBuffer(capture.kdfIterationCount); - count = count.getInt(count.length() << 3); - var dkLen; - var cipherFn; - switch(pki.oids[oid]) { - case 'aes128-CBC': - dkLen = 16; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'aes192-CBC': - dkLen = 24; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'aes256-CBC': - dkLen = 32; - cipherFn = forge.aes.createDecryptionCipher; - break; - case 'des-EDE3-CBC': - dkLen = 24; - cipherFn = forge.des.createDecryptionCipher; - break; - case 'desCBC': - dkLen = 8; - cipherFn = forge.des.createDecryptionCipher; - break; - } - - // get PRF message digest - var md = prfOidToMessageDigest(capture.prfOid); - - // decrypt private key using pbe with chosen PRF and AES/DES - var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md); - var iv = capture.encIv; - var cipher = cipherFn(dk); - cipher.start(iv); - - return cipher; -}; + async [kDestroy] (err) { + return new Promise((resolve) => { + const requests = this[kQueue].splice(this[kPendingIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) + } -/** - * Get new Forge cipher object instance for PKCS#12 PBE. - * - * The returned cipher instance is already started using the key & IV - * derived from the provided password and PKCS#12 PBE salt. - * - * @param oid The PKCS#12 PBE OID (in string notation). - * @param params The ASN.1 PKCS#12 PBE-params object. - * @param password The password to decrypt with. - * - * @return the new cipher object instance. - */ -pki.pbe.getCipherForPKCS12PBE = function(oid, params, password) { - // get PBE params - var capture = {}; - var errors = []; - if(!asn1.validate(params, pkcs12PbeParamsValidator, capture, errors)) { - var error = new Error('Cannot read password-based-encryption algorithm ' + - 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.'); - error.errors = errors; - throw error; - } - - var salt = forge.util.createBuffer(capture.salt); - var count = forge.util.createBuffer(capture.iterations); - count = count.getInt(count.length() << 3); - - var dkLen, dIvLen, cipherFn; - switch(oid) { - case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']: - dkLen = 24; - dIvLen = 8; - cipherFn = forge.des.startDecrypting; - break; + const callback = () => { + if (this[kClosedResolve]) { + // TODO (fix): Should we error here with ClientDestroyedError? + this[kClosedResolve]() + this[kClosedResolve] = null + } + resolve() + } - case pki.oids['pbewithSHAAnd40BitRC2-CBC']: - dkLen = 5; - dIvLen = 8; - cipherFn = function(key, iv) { - var cipher = forge.rc2.createDecryptionCipher(key, 40); - cipher.start(iv, null); - return cipher; - }; - break; + if (this[kHTTP2Session] != null) { + util.destroy(this[kHTTP2Session], err) + this[kHTTP2Session] = null + this[kHTTP2SessionState] = null + } - default: - var error = new Error('Cannot read PKCS #12 PBE data block. Unsupported OID.'); - error.oid = oid; - throw error; + if (!this[kSocket]) { + queueMicrotask(callback) + } else { + util.destroy(this[kSocket].on('close', callback), err) + } + + resume(this) + }) } +} - // get PRF message digest - var md = prfOidToMessageDigest(capture.prfOid); - var key = pki.pbe.generatePkcs12Key(password, salt, 1, count, dkLen, md); - md.start(); - var iv = pki.pbe.generatePkcs12Key(password, salt, 2, count, dIvLen, md); +function onHttp2SessionError (err) { + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') - return cipherFn(key, iv); -}; + this[kSocket][kError] = err -/** - * OpenSSL's legacy key derivation function. - * - * See: http://www.openssl.org/docs/crypto/EVP_BytesToKey.html - * - * @param password the password to derive the key from. - * @param salt the salt to use, null for none. - * @param dkLen the number of bytes needed for the derived key. - * @param [options] the options to use: - * [md] an optional message digest object to use. - */ -pki.pbe.opensslDeriveBytes = function(password, salt, dkLen, md) { - if(typeof md === 'undefined' || md === null) { - if(!('md5' in forge.md)) { - throw new Error('"md5" hash algorithm unavailable.'); - } - md = forge.md.md5.create(); - } - if(salt === null) { - salt = ''; - } - var digests = [hash(md, password + salt)]; - for(var length = 16, i = 1; length < dkLen; ++i, length += 16) { - digests.push(hash(md, digests[i - 1] + password + salt)); + onError(this[kClient], err) +} + +function onHttp2FrameError (type, code, id) { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + + if (id === 0) { + this[kSocket][kError] = err + onError(this[kClient], err) } - return digests.join('').substr(0, dkLen); -}; +} -function hash(md, bytes) { - return md.start().update(bytes).digest().getBytes(); +function onHttp2SessionEnd () { + util.destroy(this, new SocketError('other side closed')) + util.destroy(this[kSocket], new SocketError('other side closed')) } -function prfOidToMessageDigest(prfOid) { - // get PRF algorithm, default to SHA-1 - var prfAlgorithm; - if(!prfOid) { - prfAlgorithm = 'hmacWithSHA1'; - } else { - prfAlgorithm = pki.oids[asn1.derToOid(prfOid)]; - if(!prfAlgorithm) { - var error = new Error('Unsupported PRF OID.'); - error.oid = prfOid; - error.supported = [ - 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384', - 'hmacWithSHA512']; - throw error; +function onHTTP2GoAway (code) { + const client = this[kClient] + const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`) + client[kSocket] = null + client[kHTTP2Session] = null + + if (client.destroyed) { + assert(this[kPending] === 0) + + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) } + } else if (client[kRunning] > 0) { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null + + errorRequest(client, request, err) } - return prfAlgorithmToMessageDigest(prfAlgorithm); -} - -function prfAlgorithmToMessageDigest(prfAlgorithm) { - var factory = forge.md; - switch(prfAlgorithm) { - case 'hmacWithSHA224': - factory = forge.md.sha512; - case 'hmacWithSHA1': - case 'hmacWithSHA256': - case 'hmacWithSHA384': - case 'hmacWithSHA512': - prfAlgorithm = prfAlgorithm.substr(8).toLowerCase(); - break; - default: - var error = new Error('Unsupported PRF algorithm.'); - error.algorithm = prfAlgorithm; - error.supported = [ - 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384', - 'hmacWithSHA512']; - throw error; - } - if(!factory || !(prfAlgorithm in factory)) { - throw new Error('Unknown hash algorithm: ' + prfAlgorithm); - } - return factory[prfAlgorithm].create(); -} - -function createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm) { - var params = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // salt - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt), - // iteration count - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - countBytes.getBytes()) - ]); - // when PRF algorithm is not SHA-1 default, add key length and PRF algorithm - if(prfAlgorithm !== 'hmacWithSHA1') { - params.value.push( - // key length - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - forge.util.hexToBytes(dkLen.toString(16))), - // AlgorithmIdentifier - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids[prfAlgorithm]).getBytes()), - // parameters (null) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ])); - } - return params; + + client[kPendingIdx] = client[kRunningIdx] + + assert(client[kRunning] === 0) + + client.emit('disconnect', + client[kUrl], + [client], + err + ) + + resume(client) } +const constants = __nccwpck_require__(953) +const createRedirectInterceptor = __nccwpck_require__(8861) +const EMPTY_BUF = Buffer.alloc(0) -/***/ }), +async function lazyllhttp () { + const llhttpWasmData = process.env.JEST_WORKER_ID ? __nccwpck_require__(1145) : undefined -/***/ 1611: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + let mod + try { + mod = await WebAssembly.compile(Buffer.from(__nccwpck_require__(5627), 'base64')) + } catch (e) { + /* istanbul ignore next */ -/** - * Password-Based Key-Derivation Function #2 implementation. - * - * See RFC 2898 for details. - * - * @author Dave Longley - * - * Copyright (c) 2010-2013 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(5104); -__nccwpck_require__(6231); -__nccwpck_require__(8339); + // We could check if the error was caused by the simd option not + // being enabled, but the occurring of this other error + // * https://github.com/emscripten-core/emscripten/issues/11495 + // got me to remove that check to avoid breaking Node 12. + mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || __nccwpck_require__(1145), 'base64')) + } + + return await WebAssembly.instantiate(mod, { + env: { + /* eslint-disable camelcase */ -var pkcs5 = forge.pkcs5 = forge.pkcs5 || {}; + wasm_on_url: (p, at, len) => { + /* istanbul ignore next */ + return 0 + }, + wasm_on_status: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_begin: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageBegin() || 0 + }, + wasm_on_header_field: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_header_value: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0 + }, + wasm_on_body: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_complete: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageComplete() || 0 + } -var crypto; -if(forge.util.isNodejs && !forge.options.usePureJavaScript) { - crypto = __nccwpck_require__(6113); + /* eslint-enable camelcase */ + } + }) } -/** - * Derives a key from a password. - * - * @param p the password as a binary-encoded string of bytes. - * @param s the salt as a binary-encoded string of bytes. - * @param c the iteration count, a positive integer. - * @param dkLen the intended length, in bytes, of the derived key, - * (max: 2^32 - 1) * hash length of the PRF. - * @param [md] the message digest (or algorithm identifier as a string) to use - * in the PRF, defaults to SHA-1. - * @param [callback(err, key)] presence triggers asynchronous version, called - * once the operation completes. - * - * @return the derived key, as a binary-encoded string of bytes, for the - * synchronous version (if no callback is specified). - */ -module.exports = forge.pbkdf2 = pkcs5.pbkdf2 = function( - p, s, c, dkLen, md, callback) { - if(typeof md === 'function') { - callback = md; - md = null; - } - - // use native implementation if possible and not disabled, note that - // some node versions only support SHA-1, others allow digest to be changed - if(forge.util.isNodejs && !forge.options.usePureJavaScript && - crypto.pbkdf2 && (md === null || typeof md !== 'object') && - (crypto.pbkdf2Sync.length > 4 || (!md || md === 'sha1'))) { - if(typeof md !== 'string') { - // default prf to SHA-1 - md = 'sha1'; - } - p = Buffer.from(p, 'binary'); - s = Buffer.from(s, 'binary'); - if(!callback) { - if(crypto.pbkdf2Sync.length === 4) { - return crypto.pbkdf2Sync(p, s, c, dkLen).toString('binary'); +let llhttpInstance = null +let llhttpPromise = lazyllhttp() +llhttpPromise.catch() + +let currentParser = null +let currentBufferRef = null +let currentBufferSize = 0 +let currentBufferPtr = null + +const TIMEOUT_HEADERS = 1 +const TIMEOUT_BODY = 2 +const TIMEOUT_IDLE = 3 + +class Parser { + constructor (client, socket, { exports }) { + assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0) + + this.llhttp = exports + this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE) + this.client = client + this.socket = socket + this.timeout = null + this.timeoutValue = null + this.timeoutType = null + this.statusCode = null + this.statusText = '' + this.upgrade = false + this.headers = [] + this.headersSize = 0 + this.headersMaxSize = client[kMaxHeadersSize] + this.shouldKeepAlive = false + this.paused = false + this.resume = this.resume.bind(this) + + this.bytesRead = 0 + + this.keepAlive = '' + this.contentLength = '' + this.connection = '' + this.maxResponseSize = client[kMaxResponseSize] + } + + setTimeout (value, type) { + this.timeoutType = type + if (value !== this.timeoutValue) { + timers.clearTimeout(this.timeout) + if (value) { + this.timeout = timers.setTimeout(onParserTimeout, value, this) + // istanbul ignore else: only for jest + if (this.timeout.unref) { + this.timeout.unref() + } + } else { + this.timeout = null + } + this.timeoutValue = value + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() } - return crypto.pbkdf2Sync(p, s, c, dkLen, md).toString('binary'); } - if(crypto.pbkdf2Sync.length === 4) { - return crypto.pbkdf2(p, s, c, dkLen, function(err, key) { - if(err) { - return callback(err); - } - callback(null, key.toString('binary')); - }); + } + + resume () { + if (this.socket.destroyed || !this.paused) { + return } - return crypto.pbkdf2(p, s, c, dkLen, md, function(err, key) { - if(err) { - return callback(err); + + assert(this.ptr != null) + assert(currentParser == null) + + this.llhttp.llhttp_resume(this.ptr) + + assert(this.timeoutType === TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() } - callback(null, key.toString('binary')); - }); - } + } - if(typeof md === 'undefined' || md === null) { - // default prf to SHA-1 - md = 'sha1'; + this.paused = false + this.execute(this.socket.read() || EMPTY_BUF) // Flush parser. + this.readMore() } - if(typeof md === 'string') { - if(!(md in forge.md.algorithms)) { - throw new Error('Unknown hash algorithm: ' + md); + + readMore () { + while (!this.paused && this.ptr) { + const chunk = this.socket.read() + if (chunk === null) { + break + } + this.execute(chunk) } - md = forge.md[md].create(); } - var hLen = md.digestLength; + execute (data) { + assert(this.ptr != null) + assert(currentParser == null) + assert(!this.paused) - /* 1. If dkLen > (2^32 - 1) * hLen, output "derived key too long" and - stop. */ - if(dkLen > (0xFFFFFFFF * hLen)) { - var err = new Error('Derived key is too long.'); - if(callback) { - return callback(err); - } - throw err; - } + const { socket, llhttp } = this - /* 2. Let len be the number of hLen-octet blocks in the derived key, - rounding up, and let r be the number of octets in the last - block: - - len = CEIL(dkLen / hLen), - r = dkLen - (len - 1) * hLen. */ - var len = Math.ceil(dkLen / hLen); - var r = dkLen - (len - 1) * hLen; - - /* 3. For each block of the derived key apply the function F defined - below to the password P, the salt S, the iteration count c, and - the block index to compute the block: - - T_1 = F(P, S, c, 1), - T_2 = F(P, S, c, 2), - ... - T_len = F(P, S, c, len), - - where the function F is defined as the exclusive-or sum of the - first c iterates of the underlying pseudorandom function PRF - applied to the password P and the concatenation of the salt S - and the block index i: - - F(P, S, c, i) = u_1 XOR u_2 XOR ... XOR u_c - - where - - u_1 = PRF(P, S || INT(i)), - u_2 = PRF(P, u_1), - ... - u_c = PRF(P, u_{c-1}). - - Here, INT(i) is a four-octet encoding of the integer i, most - significant octet first. */ - var prf = forge.hmac.create(); - prf.start(md, p); - var dk = ''; - var xor, u_c, u_c1; - - // sync version - if(!callback) { - for(var i = 1; i <= len; ++i) { - // PRF(P, S || INT(i)) (first iteration) - prf.start(null, null); - prf.update(s); - prf.update(forge.util.int32ToBytes(i)); - xor = u_c1 = prf.digest().getBytes(); - - // PRF(P, u_{c-1}) (other iterations) - for(var j = 2; j <= c; ++j) { - prf.start(null, null); - prf.update(u_c1); - u_c = prf.digest().getBytes(); - // F(p, s, c, i) - xor = forge.util.xorBytes(xor, u_c, hLen); - u_c1 = u_c; + if (data.length > currentBufferSize) { + if (currentBufferPtr) { + llhttp.free(currentBufferPtr) } + currentBufferSize = Math.ceil(data.length / 4096) * 4096 + currentBufferPtr = llhttp.malloc(currentBufferSize) + } + + new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data) - /* 4. Concatenate the blocks and extract the first dkLen octets to - produce a derived key DK: + // Call `execute` on the wasm parser. + // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data, + // and finally the length of bytes to parse. + // The return value is an error code or `constants.ERROR.OK`. + try { + let ret + + try { + currentBufferRef = data + currentParser = this + ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length) + /* eslint-disable-next-line no-useless-catch */ + } catch (err) { + /* istanbul ignore next: difficult to make a test case for */ + throw err + } finally { + currentParser = null + currentBufferRef = null + } - DK = T_1 || T_2 || ... || T_len<0..r-1> */ - dk += (i < len) ? xor : xor.substr(0, r); + const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr + + if (ret === constants.ERROR.PAUSED_UPGRADE) { + this.onUpgrade(data.slice(offset)) + } else if (ret === constants.ERROR.PAUSED) { + this.paused = true + socket.unshift(data.slice(offset)) + } else if (ret !== constants.ERROR.OK) { + const ptr = llhttp.llhttp_get_error_reason(this.ptr) + let message = '' + /* istanbul ignore else: difficult to make a test case for */ + if (ptr) { + const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0) + message = + 'Response does not match the HTTP/1.1 protocol (' + + Buffer.from(llhttp.memory.buffer, ptr, len).toString() + + ')' + } + throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset)) + } + } catch (err) { + util.destroy(socket, err) } - /* 5. Output the derived key DK. */ - return dk; } - // async version - var i = 1, j; - function outer() { - if(i > len) { - // done - return callback(null, dk); - } + destroy () { + assert(this.ptr != null) + assert(currentParser == null) + + this.llhttp.llhttp_free(this.ptr) + this.ptr = null - // PRF(P, S || INT(i)) (first iteration) - prf.start(null, null); - prf.update(s); - prf.update(forge.util.int32ToBytes(i)); - xor = u_c1 = prf.digest().getBytes(); + timers.clearTimeout(this.timeout) + this.timeout = null + this.timeoutValue = null + this.timeoutType = null - // PRF(P, u_{c-1}) (other iterations) - j = 2; - inner(); + this.paused = false } - function inner() { - if(j <= c) { - prf.start(null, null); - prf.update(u_c1); - u_c = prf.digest().getBytes(); - // F(p, s, c, i) - xor = forge.util.xorBytes(xor, u_c, hLen); - u_c1 = u_c; - ++j; - return forge.util.setImmediate(inner); - } + onStatus (buf) { + this.statusText = buf.toString() + } - /* 4. Concatenate the blocks and extract the first dkLen octets to - produce a derived key DK: + onMessageBegin () { + const { socket, client } = this - DK = T_1 || T_2 || ... || T_len<0..r-1> */ - dk += (i < len) ? xor : xor.substr(0, r); + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 + } - ++i; - outer(); + const request = client[kQueue][client[kRunningIdx]] + if (!request) { + return -1 + } } - outer(); -}; + onHeaderField (buf) { + const len = this.headers.length + if ((len & 1) === 0) { + this.headers.push(buf) + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) + } -/***/ }), + this.trackHeader(buf.length) + } -/***/ 154: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + onHeaderValue (buf) { + let len = this.headers.length -/** - * Javascript implementation of basic PEM (Privacy Enhanced Mail) algorithms. - * - * See: RFC 1421. - * - * @author Dave Longley - * - * Copyright (c) 2013-2014 Digital Bazaar, Inc. - * - * A Forge PEM object has the following fields: - * - * type: identifies the type of message (eg: "RSA PRIVATE KEY"). - * - * procType: identifies the type of processing performed on the message, - * it has two subfields: version and type, eg: 4,ENCRYPTED. - * - * contentDomain: identifies the type of content in the message, typically - * only uses the value: "RFC822". - * - * dekInfo: identifies the message encryption algorithm and mode and includes - * any parameters for the algorithm, it has two subfields: algorithm and - * parameters, eg: DES-CBC,F8143EDE5960C597. - * - * headers: contains all other PEM encapsulated headers -- where order is - * significant (for pairing data like recipient ID + key info). - * - * body: the binary-encoded body. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); + if ((len & 1) === 1) { + this.headers.push(buf) + len += 1 + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) + } -// shortcut for pem API -var pem = module.exports = forge.pem = forge.pem || {}; + const key = this.headers[len - 2] + if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') { + this.keepAlive += buf.toString() + } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') { + this.connection += buf.toString() + } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') { + this.contentLength += buf.toString() + } -/** - * Encodes (serializes) the given PEM object. - * - * @param msg the PEM message object to encode. - * @param options the options to use: - * maxline the maximum characters per line for the body, (default: 64). - * - * @return the PEM-formatted string. - */ -pem.encode = function(msg, options) { - options = options || {}; - var rval = '-----BEGIN ' + msg.type + '-----\r\n'; - - // encode special headers - var header; - if(msg.procType) { - header = { - name: 'Proc-Type', - values: [String(msg.procType.version), msg.procType.type] - }; - rval += foldHeader(header); - } - if(msg.contentDomain) { - header = {name: 'Content-Domain', values: [msg.contentDomain]}; - rval += foldHeader(header); + this.trackHeader(buf.length) } - if(msg.dekInfo) { - header = {name: 'DEK-Info', values: [msg.dekInfo.algorithm]}; - if(msg.dekInfo.parameters) { - header.values.push(msg.dekInfo.parameters); + + trackHeader (len) { + this.headersSize += len + if (this.headersSize >= this.headersMaxSize) { + util.destroy(this.socket, new HeadersOverflowError()) } - rval += foldHeader(header); } - if(msg.headers) { - // encode all other headers - for(var i = 0; i < msg.headers.length; ++i) { - rval += foldHeader(msg.headers[i]); + onUpgrade (head) { + const { upgrade, client, socket, headers, statusCode } = this + + assert(upgrade) + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert(!socket.destroyed) + assert(socket === client[kSocket]) + assert(!this.paused) + assert(request.upgrade || request.method === 'CONNECT') + + this.statusCode = null + this.statusText = '' + this.shouldKeepAlive = null + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + socket.unshift(head) + + socket[kParser].destroy() + socket[kParser] = null + + socket[kClient] = null + socket[kError] = null + socket + .removeListener('error', onSocketError) + .removeListener('readable', onSocketReadable) + .removeListener('end', onSocketEnd) + .removeListener('close', onSocketClose) + + client[kSocket] = null + client[kQueue][client[kRunningIdx]++] = null + client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade')) + + try { + request.onUpgrade(statusCode, headers, socket) + } catch (err) { + util.destroy(socket, err) } - } - // terminate header - if(msg.procType) { - rval += '\r\n'; + resume(client) } - // add body - rval += forge.util.encode64(msg.body, options.maxline || 64) + '\r\n'; - - rval += '-----END ' + msg.type + '-----\r\n'; - return rval; -}; + onHeadersComplete (statusCode, upgrade, shouldKeepAlive) { + const { client, socket, headers, statusText } = this -/** - * Decodes (deserializes) all PEM messages found in the given string. - * - * @param str the PEM-formatted string to decode. - * - * @return the PEM message objects in an array. - */ -pem.decode = function(str) { - var rval = []; - - // split string into PEM messages (be lenient w/EOF on BEGIN line) - var rMessage = /\s*-----BEGIN ([A-Z0-9- ]+)-----\r?\n?([\x21-\x7e\s]+?(?:\r?\n\r?\n))?([:A-Za-z0-9+\/=\s]+?)-----END \1-----/g; - var rHeader = /([\x21-\x7e]+):\s*([\x21-\x7e\s^:]+)/; - var rCRLF = /\r?\n/; - var match; - while(true) { - match = rMessage.exec(str); - if(!match) { - break; + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 } - // accept "NEW CERTIFICATE REQUEST" as "CERTIFICATE REQUEST" - // https://datatracker.ietf.org/doc/html/rfc7468#section-7 - var type = match[1]; - if(type === 'NEW CERTIFICATE REQUEST') { - type = 'CERTIFICATE REQUEST'; + const request = client[kQueue][client[kRunningIdx]] + + /* istanbul ignore next: difficult to make a test case for */ + if (!request) { + return -1 } - var msg = { - type: type, - procType: null, - contentDomain: null, - dekInfo: null, - headers: [], - body: forge.util.decode64(match[3]) - }; - rval.push(msg); + assert(!this.upgrade) + assert(this.statusCode < 200) - // no headers - if(!match[2]) { - continue; + if (statusCode === 100) { + util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket))) + return -1 + } + + /* this can only happen if server is misbehaving */ + if (upgrade && !request.upgrade) { + util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket))) + return -1 } - // parse headers - var lines = match[2].split(rCRLF); - var li = 0; - while(match && li < lines.length) { - // get line, trim any rhs whitespace - var line = lines[li].replace(/\s+$/, ''); + assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS) - // RFC2822 unfold any following folded lines - for(var nl = li + 1; nl < lines.length; ++nl) { - var next = lines[nl]; - if(!/\s/.test(next[0])) { - break; - } - line += next; - li = nl; + this.statusCode = statusCode + this.shouldKeepAlive = ( + shouldKeepAlive || + // Override llhttp value which does not allow keepAlive for HEAD. + (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive') + ) + + if (this.statusCode >= 200) { + const bodyTimeout = request.bodyTimeout != null + ? request.bodyTimeout + : client[kBodyTimeout] + this.setTimeout(bodyTimeout, TIMEOUT_BODY) + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() } + } - // parse header - match = line.match(rHeader); - if(match) { - var header = {name: match[1], values: []}; - var values = match[2].split(','); - for(var vi = 0; vi < values.length; ++vi) { - header.values.push(ltrim(values[vi])); - } - - // Proc-Type must be the first header - if(!msg.procType) { - if(header.name !== 'Proc-Type') { - throw new Error('Invalid PEM formatted message. The first ' + - 'encapsulated header must be "Proc-Type".'); - } else if(header.values.length !== 2) { - throw new Error('Invalid PEM formatted message. The "Proc-Type" ' + - 'header must have two subfields.'); - } - msg.procType = {version: values[0], type: values[1]}; - } else if(!msg.contentDomain && header.name === 'Content-Domain') { - // special-case Content-Domain - msg.contentDomain = values[0] || ''; - } else if(!msg.dekInfo && header.name === 'DEK-Info') { - // special-case DEK-Info - if(header.values.length === 0) { - throw new Error('Invalid PEM formatted message. The "DEK-Info" ' + - 'header must have at least one subfield.'); - } - msg.dekInfo = {algorithm: values[0], parameters: values[1] || null}; + if (request.method === 'CONNECT') { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 + } + + if (upgrade) { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 + } + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + if (this.shouldKeepAlive && client[kPipelining]) { + const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null + + if (keepAliveTimeout != null) { + const timeout = Math.min( + keepAliveTimeout - client[kKeepAliveTimeoutThreshold], + client[kKeepAliveMaxTimeout] + ) + if (timeout <= 0) { + socket[kReset] = true } else { - msg.headers.push(header); + client[kKeepAliveTimeoutValue] = timeout } + } else { + client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout] } + } else { + // Stop more requests from being dispatched. + socket[kReset] = true + } - ++li; + const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false + + if (request.aborted) { + return -1 } - if(msg.procType === 'ENCRYPTED' && !msg.dekInfo) { - throw new Error('Invalid PEM formatted message. The "DEK-Info" ' + - 'header must be present if "Proc-Type" is "ENCRYPTED".'); + if (request.method === 'HEAD') { + return 1 + } + + if (statusCode < 200) { + return 1 + } + + if (socket[kBlocking]) { + socket[kBlocking] = false + resume(client) } - } - if(rval.length === 0) { - throw new Error('Invalid PEM formatted message.'); + return pause ? constants.ERROR.PAUSED : 0 } - return rval; -}; + onBody (buf) { + const { client, socket, statusCode, maxResponseSize } = this -function foldHeader(header) { - var rval = header.name + ': '; + if (socket.destroyed) { + return -1 + } - // ensure values with CRLF are folded - var values = []; - var insertSpace = function(match, $1) { - return ' ' + $1; - }; - for(var i = 0; i < header.values.length; ++i) { - values.push(header.values[i].replace(/^(\S+\r\n)/, insertSpace)); - } - rval += values.join(',') + '\r\n'; - - // do folding - var length = 0; - var candidate = -1; - for(var i = 0; i < rval.length; ++i, ++length) { - if(length > 65 && candidate !== -1) { - var insert = rval[candidate]; - if(insert === ',') { - ++candidate; - rval = rval.substr(0, candidate) + '\r\n ' + rval.substr(candidate); - } else { - rval = rval.substr(0, candidate) + - '\r\n' + insert + rval.substr(candidate + 1); + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert.strictEqual(this.timeoutType, TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() } - length = (i - candidate - 1); - candidate = -1; - ++i; - } else if(rval[i] === ' ' || rval[i] === '\t' || rval[i] === ',') { - candidate = i; + } + + assert(statusCode >= 200) + + if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) { + util.destroy(socket, new ResponseExceededMaxSizeError()) + return -1 + } + + this.bytesRead += buf.length + + if (request.onData(buf) === false) { + return constants.ERROR.PAUSED } } - return rval; -} + onMessageComplete () { + const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this + + if (socket.destroyed && (!statusCode || shouldKeepAlive)) { + return -1 + } + + if (upgrade) { + return + } + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert(statusCode >= 100) + + this.statusCode = null + this.statusText = '' + this.bytesRead = 0 + this.contentLength = '' + this.keepAlive = '' + this.connection = '' + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + if (statusCode < 200) { + return + } -function ltrim(str) { - return str.replace(/^\s+/, ''); + /* istanbul ignore next: should be handled by llhttp? */ + if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) { + util.destroy(socket, new ResponseContentLengthMismatchError()) + return -1 + } + + request.onComplete(headers) + + client[kQueue][client[kRunningIdx]++] = null + + if (socket[kWriting]) { + assert.strictEqual(client[kRunning], 0) + // Response completed before request. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (!shouldKeepAlive) { + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (socket[kReset] && client[kRunning] === 0) { + // Destroy socket once all requests have completed. + // The request at the tail of the pipeline is the one + // that requested reset and no further requests should + // have been queued since then. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (client[kPipelining] === 1) { + // We must wait a full event loop cycle to reuse this socket to make sure + // that non-spec compliant servers are not closing the connection even if they + // said they won't. + setImmediate(resume, client) + } else { + resume(client) + } + } } +function onParserTimeout (parser) { + const { socket, timeoutType, client } = parser -/***/ }), + /* istanbul ignore else */ + if (timeoutType === TIMEOUT_HEADERS) { + if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { + assert(!parser.paused, 'cannot be paused while waiting for headers') + util.destroy(socket, new HeadersTimeoutError()) + } + } else if (timeoutType === TIMEOUT_BODY) { + if (!parser.paused) { + util.destroy(socket, new BodyTimeoutError()) + } + } else if (timeoutType === TIMEOUT_IDLE) { + assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue]) + util.destroy(socket, new InformationalError('socket idle timeout')) + } +} -/***/ 7014: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function onSocketReadable () { + const { [kParser]: parser } = this + if (parser) { + parser.readMore() + } +} -/** - * Partial implementation of PKCS#1 v2.2: RSA-OEAP - * - * Modified but based on the following MIT and BSD licensed code: - * - * https://github.com/kjur/jsjws/blob/master/rsa.js: - * - * The 'jsjws'(JSON Web Signature JavaScript Library) License - * - * Copyright (c) 2012 Kenji Urushima - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - * THE SOFTWARE. - * - * http://webrsa.cvs.sourceforge.net/viewvc/webrsa/Client/RSAES-OAEP.js?content-type=text%2Fplain: - * - * RSAES-OAEP.js - * $Id: RSAES-OAEP.js,v 1.1.1.1 2003/03/19 15:37:20 ellispritchard Exp $ - * JavaScript Implementation of PKCS #1 v2.1 RSA CRYPTOGRAPHY STANDARD (RSA Laboratories, June 14, 2002) - * Copyright (C) Ellis Pritchard, Guardian Unlimited 2003. - * Contact: ellis@nukinetics.com - * Distributed under the BSD License. - * - * Official documentation: http://www.rsa.com/rsalabs/node.asp?id=2125 - * - * @author Evan Jones (http://evanjones.ca/) - * @author Dave Longley - * - * Copyright (c) 2013-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); -__nccwpck_require__(7821); -__nccwpck_require__(279); +function onSocketError (err) { + const { [kClient]: client, [kParser]: parser } = this -// shortcut for PKCS#1 API -var pkcs1 = module.exports = forge.pkcs1 = forge.pkcs1 || {}; + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') -/** - * Encode the given RSAES-OAEP message (M) using key, with optional label (L) - * and seed. - * - * This method does not perform RSA encryption, it only encodes the message - * using RSAES-OAEP. - * - * @param key the RSA key to use. - * @param message the message to encode. - * @param options the options to use: - * label an optional label to use. - * seed the seed to use. - * md the message digest object to use, undefined for SHA-1. - * mgf1 optional mgf1 parameters: - * md the message digest object to use for MGF1. - * - * @return the encoded message bytes. - */ -pkcs1.encode_rsa_oaep = function(key, message, options) { - // parse arguments - var label; - var seed; - var md; - var mgf1Md; - // legacy args (label, seed, md) - if(typeof options === 'string') { - label = options; - seed = arguments[3] || undefined; - md = arguments[4] || undefined; - } else if(options) { - label = options.label || undefined; - seed = options.seed || undefined; - md = options.md || undefined; - if(options.mgf1 && options.mgf1.md) { - mgf1Md = options.mgf1.md; - } - } - - // default OAEP to SHA-1 message digest - if(!md) { - md = forge.md.sha1.create(); - } else { - md.start(); + if (client[kHTTPConnVersion] !== 'h2') { + // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded + // to the user. + if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so for as a valid response. + parser.onMessageComplete() + return + } } - // default MGF-1 to same as OAEP - if(!mgf1Md) { - mgf1Md = md; - } + this[kError] = err - // compute length in bytes and check output - var keyLength = Math.ceil(key.n.bitLength() / 8); - var maxLength = keyLength - 2 * md.digestLength - 2; - if(message.length > maxLength) { - var error = new Error('RSAES-OAEP input message length is too long.'); - error.length = message.length; - error.maxLength = maxLength; - throw error; - } + onError(this[kClient], err) +} - if(!label) { - label = ''; - } - md.update(label, 'raw'); - var lHash = md.digest(); +function onError (client, err) { + if ( + client[kRunning] === 0 && + err.code !== 'UND_ERR_INFO' && + err.code !== 'UND_ERR_SOCKET' + ) { + // Error is not caused by running request and not a recoverable + // socket error. + + assert(client[kPendingIdx] === client[kRunningIdx]) - var PS = ''; - var PS_length = maxLength - message.length; - for(var i = 0; i < PS_length; i++) { - PS += '\x00'; + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) + } + assert(client[kSize] === 0) } +} - var DB = lHash.getBytes() + PS + '\x01' + message; +function onSocketEnd () { + const { [kParser]: parser, [kClient]: client } = this - if(!seed) { - seed = forge.random.getBytes(md.digestLength); - } else if(seed.length !== md.digestLength) { - var error = new Error('Invalid RSAES-OAEP seed. The seed length must ' + - 'match the digest length.'); - error.seedLength = seed.length; - error.digestLength = md.digestLength; - throw error; + if (client[kHTTPConnVersion] !== 'h2') { + if (parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() + return + } } - var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md); - var maskedDB = forge.util.xorBytes(DB, dbMask, DB.length); + util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this))) +} - var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md); - var maskedSeed = forge.util.xorBytes(seed, seedMask, seed.length); +function onSocketClose () { + const { [kClient]: client, [kParser]: parser } = this - // return encoded message - return '\x00' + maskedSeed + maskedDB; -}; + if (client[kHTTPConnVersion] === 'h1' && parser) { + if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() + } -/** - * Decode the given RSAES-OAEP encoded message (EM) using key, with optional - * label (L). - * - * This method does not perform RSA decryption, it only decodes the message - * using RSAES-OAEP. - * - * @param key the RSA key to use. - * @param em the encoded message to decode. - * @param options the options to use: - * label an optional label to use. - * md the message digest object to use for OAEP, undefined for SHA-1. - * mgf1 optional mgf1 parameters: - * md the message digest object to use for MGF1. - * - * @return the decoded message bytes. - */ -pkcs1.decode_rsa_oaep = function(key, em, options) { - // parse args - var label; - var md; - var mgf1Md; - // legacy args - if(typeof options === 'string') { - label = options; - md = arguments[3] || undefined; - } else if(options) { - label = options.label || undefined; - md = options.md || undefined; - if(options.mgf1 && options.mgf1.md) { - mgf1Md = options.mgf1.md; - } - } - - // compute length in bytes - var keyLength = Math.ceil(key.n.bitLength() / 8); - - if(em.length !== keyLength) { - var error = new Error('RSAES-OAEP encoded message length is invalid.'); - error.length = em.length; - error.expectedLength = keyLength; - throw error; - } - - // default OAEP to SHA-1 message digest - if(md === undefined) { - md = forge.md.sha1.create(); - } else { - md.start(); + this[kParser].destroy() + this[kParser] = null } - // default MGF-1 to same as OAEP - if(!mgf1Md) { - mgf1Md = md; - } + const err = this[kError] || new SocketError('closed', util.getSocketInfo(this)) - if(keyLength < 2 * md.digestLength + 2) { - throw new Error('RSAES-OAEP key is too short for the hash function.'); - } + client[kSocket] = null - if(!label) { - label = ''; - } - md.update(label, 'raw'); - var lHash = md.digest().getBytes(); + if (client.destroyed) { + assert(client[kPending] === 0) - // split the message into its parts - var y = em.charAt(0); - var maskedSeed = em.substring(1, md.digestLength + 1); - var maskedDB = em.substring(1 + md.digestLength); + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) + } + } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null - var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md); - var seed = forge.util.xorBytes(maskedSeed, seedMask, maskedSeed.length); + errorRequest(client, request, err) + } - var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md); - var db = forge.util.xorBytes(maskedDB, dbMask, maskedDB.length); + client[kPendingIdx] = client[kRunningIdx] - var lHashPrime = db.substring(0, md.digestLength); + assert(client[kRunning] === 0) - // constant time check that all values match what is expected - var error = (y !== '\x00'); + client.emit('disconnect', client[kUrl], [client], err) - // constant time check lHash vs lHashPrime - for(var i = 0; i < md.digestLength; ++i) { - error |= (lHash.charAt(i) !== lHashPrime.charAt(i)); - } + resume(client) +} - // "constant time" find the 0x1 byte separating the padding (zeros) from the - // message - // TODO: It must be possible to do this in a better/smarter way? - var in_ps = 1; - var index = md.digestLength; - for(var j = md.digestLength; j < db.length; j++) { - var code = db.charCodeAt(j); +async function connect (client) { + assert(!client[kConnecting]) + assert(!client[kSocket]) - var is_0 = (code & 0x1) ^ 0x1; + let { host, hostname, protocol, port } = client[kUrl] - // non-zero if not 0 or 1 in the ps section - var error_mask = in_ps ? 0xfffe : 0x0000; - error |= (code & error_mask); + // Resolve ipv6 + if (hostname[0] === '[') { + const idx = hostname.indexOf(']') - // latch in_ps to zero after we find 0x1 - in_ps = in_ps & is_0; - index += in_ps; - } + assert(idx !== -1) + const ip = hostname.substring(1, idx) - if(error || db.charCodeAt(index) !== 0x1) { - throw new Error('Invalid RSAES-OAEP padding.'); + assert(net.isIP(ip)) + hostname = ip } - return db.substring(index + 1); -}; + client[kConnecting] = true -function rsa_mgf1(seed, maskLength, hash) { - // default to SHA-1 message digest - if(!hash) { - hash = forge.md.sha1.create(); - } - var t = ''; - var count = Math.ceil(maskLength / hash.digestLength); - for(var i = 0; i < count; ++i) { - var c = String.fromCharCode( - (i >> 24) & 0xFF, (i >> 16) & 0xFF, (i >> 8) & 0xFF, i & 0xFF); - hash.start(); - hash.update(seed + c); - t += hash.digest().getBytes(); + if (channels.beforeConnect.hasSubscribers) { + channels.beforeConnect.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector] + }) } - return t.substring(0, maskLength); -} + try { + const socket = await new Promise((resolve, reject) => { + client[kConnector]({ + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, (err, socket) => { + if (err) { + reject(err) + } else { + resolve(socket) + } + }) + }) -/***/ }), + if (client.destroyed) { + util.destroy(socket.on('error', () => {}), new ClientDestroyedError()) + return + } -/***/ 466: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + client[kConnecting] = false -/** - * Javascript implementation of PKCS#12. - * - * @author Dave Longley - * @author Stefan Siegl - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - * Copyright (c) 2012 Stefan Siegl - * - * The ASN.1 representation of PKCS#12 is as follows - * (see ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-12/pkcs-12-tc1.pdf for details) - * - * PFX ::= SEQUENCE { - * version INTEGER {v3(3)}(v3,...), - * authSafe ContentInfo, - * macData MacData OPTIONAL - * } - * - * MacData ::= SEQUENCE { - * mac DigestInfo, - * macSalt OCTET STRING, - * iterations INTEGER DEFAULT 1 - * } - * Note: The iterations default is for historical reasons and its use is - * deprecated. A higher value, like 1024, is recommended. - * - * DigestInfo is defined in PKCS#7 as follows: - * - * DigestInfo ::= SEQUENCE { - * digestAlgorithm DigestAlgorithmIdentifier, - * digest Digest - * } - * - * DigestAlgorithmIdentifier ::= AlgorithmIdentifier - * - * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters - * for the algorithm, if any. In the case of SHA1 there is none. - * - * AlgorithmIdentifer ::= SEQUENCE { - * algorithm OBJECT IDENTIFIER, - * parameters ANY DEFINED BY algorithm OPTIONAL - * } - * - * Digest ::= OCTET STRING - * - * - * ContentInfo ::= SEQUENCE { - * contentType ContentType, - * content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL - * } - * - * ContentType ::= OBJECT IDENTIFIER - * - * AuthenticatedSafe ::= SEQUENCE OF ContentInfo - * -- Data if unencrypted - * -- EncryptedData if password-encrypted - * -- EnvelopedData if public key-encrypted - * - * - * SafeContents ::= SEQUENCE OF SafeBag - * - * SafeBag ::= SEQUENCE { - * bagId BAG-TYPE.&id ({PKCS12BagSet}) - * bagValue [0] EXPLICIT BAG-TYPE.&Type({PKCS12BagSet}{@bagId}), - * bagAttributes SET OF PKCS12Attribute OPTIONAL - * } - * - * PKCS12Attribute ::= SEQUENCE { - * attrId ATTRIBUTE.&id ({PKCS12AttrSet}), - * attrValues SET OF ATTRIBUTE.&Type ({PKCS12AttrSet}{@attrId}) - * } -- This type is compatible with the X.500 type 'Attribute' - * - * PKCS12AttrSet ATTRIBUTE ::= { - * friendlyName | -- from PKCS #9 - * localKeyId, -- from PKCS #9 - * ... -- Other attributes are allowed - * } - * - * CertBag ::= SEQUENCE { - * certId BAG-TYPE.&id ({CertTypes}), - * certValue [0] EXPLICIT BAG-TYPE.&Type ({CertTypes}{@certId}) - * } - * - * x509Certificate BAG-TYPE ::= {OCTET STRING IDENTIFIED BY {certTypes 1}} - * -- DER-encoded X.509 certificate stored in OCTET STRING - * - * sdsiCertificate BAG-TYPE ::= {IA5String IDENTIFIED BY {certTypes 2}} - * -- Base64-encoded SDSI certificate stored in IA5String - * - * CertTypes BAG-TYPE ::= { - * x509Certificate | - * sdsiCertificate, - * ... -- For future extensions - * } - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -__nccwpck_require__(5104); -__nccwpck_require__(1925); -__nccwpck_require__(266); -__nccwpck_require__(1281); -__nccwpck_require__(7821); -__nccwpck_require__(3921); -__nccwpck_require__(279); -__nccwpck_require__(8339); -__nccwpck_require__(8180); - -// shortcut for asn.1 & PKI API -var asn1 = forge.asn1; -var pki = forge.pki; - -// shortcut for PKCS#12 API -var p12 = module.exports = forge.pkcs12 = forge.pkcs12 || {}; - -var contentInfoValidator = { - name: 'ContentInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, // a ContentInfo - constructed: true, - value: [{ - name: 'ContentInfo.contentType', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'contentType' - }, { - name: 'ContentInfo.content', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - constructed: true, - captureAsn1: 'content' - }] -}; + assert(socket) -var pfxValidator = { - name: 'PFX', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'PFX.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'version' - }, - contentInfoValidator, { - name: 'PFX.macData', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - optional: true, - captureAsn1: 'mac', - value: [{ - name: 'PFX.macData.mac', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, // DigestInfo - constructed: true, - value: [{ - name: 'PFX.macData.mac.digestAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, // DigestAlgorithmIdentifier - constructed: true, - value: [{ - name: 'PFX.macData.mac.digestAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'macAlgorithm' - }, { - name: 'PFX.macData.mac.digestAlgorithm.parameters', - tagClass: asn1.Class.UNIVERSAL, - captureAsn1: 'macAlgorithmParameters' - }] - }, { - name: 'PFX.macData.mac.digest', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'macDigest' - }] - }, { - name: 'PFX.macData.macSalt', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'macSalt' - }, { - name: 'PFX.macData.iterations', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - optional: true, - capture: 'macIterations' - }] - }] -}; + const isH2 = socket.alpnProtocol === 'h2' + if (isH2) { + if (!h2ExperimentalWarned) { + h2ExperimentalWarned = true + process.emitWarning('H2 support is experimental, expect them to change at any time.', { + code: 'UNDICI-H2' + }) + } -var safeBagValidator = { - name: 'SafeBag', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'SafeBag.bagId', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'bagId' - }, { - name: 'SafeBag.bagValue', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - constructed: true, - captureAsn1: 'bagValue' - }, { - name: 'SafeBag.bagAttributes', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - constructed: true, - optional: true, - capture: 'bagAttributes' - }] -}; + const session = http2.connect(client[kUrl], { + createConnection: () => socket, + peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams + }) -var attributeValidator = { - name: 'Attribute', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'Attribute.attrId', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'oid' - }, { - name: 'Attribute.attrValues', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - constructed: true, - capture: 'values' - }] -}; + client[kHTTPConnVersion] = 'h2' + session[kClient] = client + session[kSocket] = socket + session.on('error', onHttp2SessionError) + session.on('frameError', onHttp2FrameError) + session.on('end', onHttp2SessionEnd) + session.on('goaway', onHTTP2GoAway) + session.on('close', onSocketClose) + session.unref() + + client[kHTTP2Session] = session + socket[kHTTP2Session] = session + } else { + if (!llhttpInstance) { + llhttpInstance = await llhttpPromise + llhttpPromise = null + } -var certBagValidator = { - name: 'CertBag', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'CertBag.certId', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'certId' - }, { - name: 'CertBag.certValue', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - constructed: true, - /* So far we only support X.509 certificates (which are wrapped in - an OCTET STRING, hence hard code that here). */ - value: [{ - name: 'CertBag.certValue[0]', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Class.OCTETSTRING, - constructed: false, - capture: 'cert' - }] - }] -}; + socket[kNoRef] = false + socket[kWriting] = false + socket[kReset] = false + socket[kBlocking] = false + socket[kParser] = new Parser(client, socket, llhttpInstance) + } + + socket[kCounter] = 0 + socket[kMaxRequests] = client[kMaxRequests] + socket[kClient] = client + socket[kError] = null + + socket + .on('error', onSocketError) + .on('readable', onSocketReadable) + .on('end', onSocketEnd) + .on('close', onSocketClose) + + client[kSocket] = socket + + if (channels.connected.hasSubscribers) { + channels.connected.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + socket + }) + } + client.emit('connect', client[kUrl], [client]) + } catch (err) { + if (client.destroyed) { + return + } -/** - * Search SafeContents structure for bags with matching attributes. - * - * The search can optionally be narrowed by a certain bag type. - * - * @param safeContents the SafeContents structure to search in. - * @param attrName the name of the attribute to compare against. - * @param attrValue the attribute value to search for. - * @param [bagType] bag type to narrow search by. - * - * @return an array of matching bags. - */ -function _getBagsByAttribute(safeContents, attrName, attrValue, bagType) { - var result = []; + client[kConnecting] = false - for(var i = 0; i < safeContents.length; i++) { - for(var j = 0; j < safeContents[i].safeBags.length; j++) { - var bag = safeContents[i].safeBags[j]; - if(bagType !== undefined && bag.type !== bagType) { - continue; - } - // only filter by bag type, no attribute specified - if(attrName === null) { - result.push(bag); - continue; - } - if(bag.attributes[attrName] !== undefined && - bag.attributes[attrName].indexOf(attrValue) >= 0) { - result.push(bag); + if (channels.connectError.hasSubscribers) { + channels.connectError.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + error: err + }) + } + + if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') { + assert(client[kRunning] === 0) + while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { + const request = client[kQueue][client[kPendingIdx]++] + errorRequest(client, request, err) } + } else { + onError(client, err) } + + client.emit('connectionError', client[kUrl], [client], err) } - return result; + resume(client) } -/** - * Converts a PKCS#12 PFX in ASN.1 notation into a PFX object. - * - * @param obj The PKCS#12 PFX in ASN.1 notation. - * @param strict true to use strict DER decoding, false not to (default: true). - * @param {String} password Password to decrypt with (optional). - * - * @return PKCS#12 PFX object. - */ -p12.pkcs12FromAsn1 = function(obj, strict, password) { - // handle args - if(typeof strict === 'string') { - password = strict; - strict = true; - } else if(strict === undefined) { - strict = true; +function emitDrain (client) { + client[kNeedDrain] = 0 + client.emit('drain', client[kUrl], [client]) +} + +function resume (client, sync) { + if (client[kResuming] === 2) { + return } - // validate PFX and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, pfxValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#12 PFX. ' + - 'ASN.1 object is not an PKCS#12 PFX.'); - error.errors = error; - throw error; + client[kResuming] = 2 + + _resume(client, sync) + client[kResuming] = 0 + + if (client[kRunningIdx] > 256) { + client[kQueue].splice(0, client[kRunningIdx]) + client[kPendingIdx] -= client[kRunningIdx] + client[kRunningIdx] = 0 } +} - var pfx = { - version: capture.version.charCodeAt(0), - safeContents: [], +function _resume (client, sync) { + while (true) { + if (client.destroyed) { + assert(client[kPending] === 0) + return + } - /** - * Gets bags with matching attributes. - * - * @param filter the attributes to filter by: - * [localKeyId] the localKeyId to search for. - * [localKeyIdHex] the localKeyId in hex to search for. - * [friendlyName] the friendly name to search for. - * [bagType] bag type to narrow each attribute search by. - * - * @return a map of attribute type to an array of matching bags or, if no - * attribute was given but a bag type, the map key will be the - * bag type. - */ - getBags: function(filter) { - var rval = {}; - - var localKeyId; - if('localKeyId' in filter) { - localKeyId = filter.localKeyId; - } else if('localKeyIdHex' in filter) { - localKeyId = forge.util.hexToBytes(filter.localKeyIdHex); - } + if (client[kClosedResolve] && !client[kSize]) { + client[kClosedResolve]() + client[kClosedResolve] = null + return + } - // filter on bagType only - if(localKeyId === undefined && !('friendlyName' in filter) && - 'bagType' in filter) { - rval[filter.bagType] = _getBagsByAttribute( - pfx.safeContents, null, null, filter.bagType); - } + const socket = client[kSocket] - if(localKeyId !== undefined) { - rval.localKeyId = _getBagsByAttribute( - pfx.safeContents, 'localKeyId', - localKeyId, filter.bagType); + if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') { + if (client[kSize] === 0) { + if (!socket[kNoRef] && socket.unref) { + socket.unref() + socket[kNoRef] = true + } + } else if (socket[kNoRef] && socket.ref) { + socket.ref() + socket[kNoRef] = false } - if('friendlyName' in filter) { - rval.friendlyName = _getBagsByAttribute( - pfx.safeContents, 'friendlyName', - filter.friendlyName, filter.bagType); + + if (client[kSize] === 0) { + if (socket[kParser].timeoutType !== TIMEOUT_IDLE) { + socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE) + } + } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { + if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { + const request = client[kQueue][client[kRunningIdx]] + const headersTimeout = request.headersTimeout != null + ? request.headersTimeout + : client[kHeadersTimeout] + socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS) + } } + } - return rval; - }, + if (client[kBusy]) { + client[kNeedDrain] = 2 + } else if (client[kNeedDrain] === 2) { + if (sync) { + client[kNeedDrain] = 1 + process.nextTick(emitDrain, client) + } else { + emitDrain(client) + } + continue + } - /** - * DEPRECATED: use getBags() instead. - * - * Get bags with matching friendlyName attribute. - * - * @param friendlyName the friendly name to search for. - * @param [bagType] bag type to narrow search by. - * - * @return an array of bags with matching friendlyName attribute. - */ - getBagsByFriendlyName: function(friendlyName, bagType) { - return _getBagsByAttribute( - pfx.safeContents, 'friendlyName', friendlyName, bagType); - }, + if (client[kPending] === 0) { + return + } - /** - * DEPRECATED: use getBags() instead. - * - * Get bags with matching localKeyId attribute. - * - * @param localKeyId the localKeyId to search for. - * @param [bagType] bag type to narrow search by. - * - * @return an array of bags with matching localKeyId attribute. - */ - getBagsByLocalKeyId: function(localKeyId, bagType) { - return _getBagsByAttribute( - pfx.safeContents, 'localKeyId', localKeyId, bagType); + if (client[kRunning] >= (client[kPipelining] || 1)) { + return } - }; - if(capture.version.charCodeAt(0) !== 3) { - var error = new Error('PKCS#12 PFX of version other than 3 not supported.'); - error.version = capture.version.charCodeAt(0); - throw error; - } + const request = client[kQueue][client[kPendingIdx]] - if(asn1.derToOid(capture.contentType) !== pki.oids.data) { - var error = new Error('Only PKCS#12 PFX in password integrity mode supported.'); - error.oid = asn1.derToOid(capture.contentType); - throw error; - } + if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) { + if (client[kRunning] > 0) { + return + } - var data = capture.content.value[0]; - if(data.tagClass !== asn1.Class.UNIVERSAL || - data.type !== asn1.Type.OCTETSTRING) { - throw new Error('PKCS#12 authSafe content data is not an OCTET STRING.'); - } - data = _decodePkcs7Data(data); + client[kServerName] = request.servername - // check for MAC - if(capture.mac) { - var md = null; - var macKeyBytes = 0; - var macAlgorithm = asn1.derToOid(capture.macAlgorithm); - switch(macAlgorithm) { - case pki.oids.sha1: - md = forge.md.sha1.create(); - macKeyBytes = 20; - break; - case pki.oids.sha256: - md = forge.md.sha256.create(); - macKeyBytes = 32; - break; - case pki.oids.sha384: - md = forge.md.sha384.create(); - macKeyBytes = 48; - break; - case pki.oids.sha512: - md = forge.md.sha512.create(); - macKeyBytes = 64; - break; - case pki.oids.md5: - md = forge.md.md5.create(); - macKeyBytes = 16; - break; + if (socket && socket.servername !== request.servername) { + util.destroy(socket, new InformationalError('servername changed')) + return + } } - if(md === null) { - throw new Error('PKCS#12 uses unsupported MAC algorithm: ' + macAlgorithm); + + if (client[kConnecting]) { + return } - // verify MAC (iterations default to 1) - var macSalt = new forge.util.ByteBuffer(capture.macSalt); - var macIterations = (('macIterations' in capture) ? - parseInt(forge.util.bytesToHex(capture.macIterations), 16) : 1); - var macKey = p12.generateKey( - password, macSalt, 3, macIterations, macKeyBytes, md); - var mac = forge.hmac.create(); - mac.start(md, macKey); - mac.update(data.value); - var macValue = mac.getMac(); - if(macValue.getBytes() !== capture.macDigest) { - throw new Error('PKCS#12 MAC could not be verified. Invalid password?'); + if (!socket && !client[kHTTP2Session]) { + connect(client) + return } - } - _decodeAuthenticatedSafe(pfx, data.value, strict, password); - return pfx; -}; + if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) { + return + } -/** - * Decodes PKCS#7 Data. PKCS#7 (RFC 2315) defines "Data" as an OCTET STRING, - * but it is sometimes an OCTET STRING that is composed/constructed of chunks, - * each its own OCTET STRING. This is BER-encoding vs. DER-encoding. This - * function transforms this corner-case into the usual simple, - * non-composed/constructed OCTET STRING. - * - * This function may be moved to ASN.1 at some point to better deal with - * more BER-encoding issues, should they arise. - * - * @param data the ASN.1 Data object to transform. - */ -function _decodePkcs7Data(data) { - // handle special case of "chunked" data content: an octet string composed - // of other octet strings - if(data.composed || data.constructed) { - var value = forge.util.createBuffer(); - for(var i = 0; i < data.value.length; ++i) { - value.putBytes(data.value[i].value); + if (client[kRunning] > 0 && !request.idempotent) { + // Non-idempotent request cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } + + if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) { + // Don't dispatch an upgrade until all preceding requests have completed. + // A misbehaving server might upgrade the connection before all pipelined + // request has completed. + return + } + + if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 && + (util.isStream(request.body) || util.isAsyncIterable(request.body))) { + // Request with stream or iterator body can error while other requests + // are inflight and indirectly error those as well. + // Ensure this doesn't happen by waiting for inflight + // to complete before dispatching. + + // Request with stream or iterator body cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } + + if (!request.aborted && write(client, request)) { + client[kPendingIdx]++ + } else { + client[kQueue].splice(client[kPendingIdx], 1) } - data.composed = data.constructed = false; - data.value = value.getBytes(); } - return data; } -/** - * Decode PKCS#12 AuthenticatedSafe (BER encoded) into PFX object. - * - * The AuthenticatedSafe is a BER-encoded SEQUENCE OF ContentInfo. - * - * @param pfx The PKCS#12 PFX object to fill. - * @param {String} authSafe BER-encoded AuthenticatedSafe. - * @param strict true to use strict DER decoding, false not to. - * @param {String} password Password to decrypt with (optional). - */ -function _decodeAuthenticatedSafe(pfx, authSafe, strict, password) { - authSafe = asn1.fromDer(authSafe, strict); /* actually it's BER encoded */ +// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2 +function shouldSendContentLength (method) { + return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT' +} - if(authSafe.tagClass !== asn1.Class.UNIVERSAL || - authSafe.type !== asn1.Type.SEQUENCE || - authSafe.constructed !== true) { - throw new Error('PKCS#12 AuthenticatedSafe expected to be a ' + - 'SEQUENCE OF ContentInfo'); +function write (client, request) { + if (client[kHTTPConnVersion] === 'h2') { + writeH2(client, client[kHTTP2Session], request) + return } - for(var i = 0; i < authSafe.value.length; i++) { - var contentInfo = authSafe.value[i]; + const { body, method, path, host, upgrade, headers, blocking, reset } = request - // validate contentInfo and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(contentInfo, contentInfoValidator, capture, errors)) { - var error = new Error('Cannot read ContentInfo.'); - error.errors = errors; - throw error; - } + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 - var obj = { - encrypted: false - }; - var safeContents = null; - var data = capture.content.value[0]; - switch(asn1.derToOid(capture.contentType)) { - case pki.oids.data: - if(data.tagClass !== asn1.Class.UNIVERSAL || - data.type !== asn1.Type.OCTETSTRING) { - throw new Error('PKCS#12 SafeContents Data is not an OCTET STRING.'); - } - safeContents = _decodePkcs7Data(data).value; - break; - case pki.oids.encryptedData: - safeContents = _decryptSafeContents(data, password); - obj.encrypted = true; - break; - default: - var error = new Error('Unsupported PKCS#12 contentType.'); - error.contentType = asn1.derToOid(capture.contentType); - throw error; - } + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. - obj.safeBags = _decodeSafeContents(safeContents, strict, password); - pfx.safeContents.push(obj); - } -} + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) -/** - * Decrypt PKCS#7 EncryptedData structure. - * - * @param data ASN.1 encoded EncryptedContentInfo object. - * @param password The user-provided password. - * - * @return The decrypted SafeContents (ASN.1 object). - */ -function _decryptSafeContents(data, password) { - var capture = {}; - var errors = []; - if(!asn1.validate( - data, forge.pkcs7.asn1.encryptedDataValidator, capture, errors)) { - var error = new Error('Cannot read EncryptedContentInfo.'); - error.errors = errors; - throw error; + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) } - var oid = asn1.derToOid(capture.contentType); - if(oid !== pki.oids.data) { - var error = new Error( - 'PKCS#12 EncryptedContentInfo ContentType is not Data.'); - error.oid = oid; - throw error; + const bodyLength = util.bodyLength(body) + + let contentLength = bodyLength + + if (contentLength === null) { + contentLength = request.contentLength } - // get cipher - oid = asn1.derToOid(capture.encAlgorithm); - var cipher = pki.pbe.getCipher(oid, capture.encParameter, password); + if (contentLength === 0 && !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. + + contentLength = null + } - // get encrypted data - var encryptedContentAsn1 = _decodePkcs7Data(capture.encryptedContentAsn1); - var encrypted = forge.util.createBuffer(encryptedContentAsn1.value); + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) + return false + } - cipher.update(encrypted); - if(!cipher.finish()) { - throw new Error('Failed to decrypt PKCS#12 SafeContents.'); + process.emitWarning(new RequestContentLengthMismatchError()) } - return cipher.output.getBytes(); -} + const socket = client[kSocket] -/** - * Decode PKCS#12 SafeContents (BER-encoded) into array of Bag objects. - * - * The safeContents is a BER-encoded SEQUENCE OF SafeBag. - * - * @param {String} safeContents BER-encoded safeContents. - * @param strict true to use strict DER decoding, false not to. - * @param {String} password Password to decrypt with (optional). - * - * @return {Array} Array of Bag objects. - */ -function _decodeSafeContents(safeContents, strict, password) { - // if strict and no safe contents, return empty safes - if(!strict && safeContents.length === 0) { - return []; + try { + request.onConnect((err) => { + if (request.aborted || request.completed) { + return + } + + errorRequest(client, request, err || new RequestAbortedError()) + + util.destroy(socket, new InformationalError('aborted')) + }) + } catch (err) { + errorRequest(client, request, err) + } + + if (request.aborted) { + return false } - // actually it's BER-encoded - safeContents = asn1.fromDer(safeContents, strict); + if (method === 'HEAD') { + // https://github.com/mcollina/undici/issues/258 + // Close after a HEAD request to interop with misbehaving servers + // that may send a body in the response. - if(safeContents.tagClass !== asn1.Class.UNIVERSAL || - safeContents.type !== asn1.Type.SEQUENCE || - safeContents.constructed !== true) { - throw new Error( - 'PKCS#12 SafeContents expected to be a SEQUENCE OF SafeBag.'); + socket[kReset] = true } - var res = []; - for(var i = 0; i < safeContents.value.length; i++) { - var safeBag = safeContents.value[i]; + if (upgrade || method === 'CONNECT') { + // On CONNECT or upgrade, block pipeline from dispatching further + // requests on this connection. - // validate SafeBag and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(safeBag, safeBagValidator, capture, errors)) { - var error = new Error('Cannot read SafeBag.'); - error.errors = errors; - throw error; - } + socket[kReset] = true + } - /* Create bag object and push to result array. */ - var bag = { - type: asn1.derToOid(capture.bagId), - attributes: _decodeBagAttributes(capture.bagAttributes) - }; - res.push(bag); - - var validator, decoder; - var bagAsn1 = capture.bagValue.value[0]; - switch(bag.type) { - case pki.oids.pkcs8ShroudedKeyBag: - /* bagAsn1 has a EncryptedPrivateKeyInfo, which we need to decrypt. - Afterwards we can handle it like a keyBag, - which is a PrivateKeyInfo. */ - bagAsn1 = pki.decryptPrivateKeyInfo(bagAsn1, password); - if(bagAsn1 === null) { - throw new Error( - 'Unable to decrypt PKCS#8 ShroudedKeyBag, wrong password?'); - } - - /* fall through */ - case pki.oids.keyBag: - /* A PKCS#12 keyBag is a simple PrivateKeyInfo as understood by our - PKI module, hence we don't have to do validation/capturing here, - just pass what we already got. */ - try { - bag.key = pki.privateKeyFromAsn1(bagAsn1); - } catch(e) { - // ignore unknown key type, pass asn1 value - bag.key = null; - bag.asn1 = bagAsn1; - } - continue; /* Nothing more to do. */ - - case pki.oids.certBag: - /* A PKCS#12 certBag can wrap both X.509 and sdsi certificates. - Therefore put the SafeBag content through another validator to - capture the fields. Afterwards check & store the results. */ - validator = certBagValidator; - decoder = function() { - if(asn1.derToOid(capture.certId) !== pki.oids.x509Certificate) { - var error = new Error( - 'Unsupported certificate type, only X.509 supported.'); - error.oid = asn1.derToOid(capture.certId); - throw error; - } + if (reset != null) { + socket[kReset] = reset + } - // true=produce cert hash - var certAsn1 = asn1.fromDer(capture.cert, strict); - try { - bag.cert = pki.certificateFromAsn1(certAsn1, true); - } catch(e) { - // ignore unknown cert type, pass asn1 value - bag.cert = null; - bag.asn1 = certAsn1; - } - }; - break; + if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) { + socket[kReset] = true + } - default: - var error = new Error('Unsupported PKCS#12 SafeBag type.'); - error.oid = bag.type; - throw error; - } + if (blocking) { + socket[kBlocking] = true + } - /* Validate SafeBag value (i.e. CertBag, etc.) and capture data if needed. */ - if(validator !== undefined && - !asn1.validate(bagAsn1, validator, capture, errors)) { - var error = new Error('Cannot read PKCS#12 ' + validator.name); - error.errors = errors; - throw error; - } + let header = `${method} ${path} HTTP/1.1\r\n` - /* Call decoder function from above to store the results. */ - decoder(); + if (typeof host === 'string') { + header += `host: ${host}\r\n` + } else { + header += client[kHostHeader] } - return res; -} + if (upgrade) { + header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n` + } else if (client[kPipelining] && !socket[kReset]) { + header += 'connection: keep-alive\r\n' + } else { + header += 'connection: close\r\n' + } -/** - * Decode PKCS#12 SET OF PKCS12Attribute into JavaScript object. - * - * @param attributes SET OF PKCS12Attribute (ASN.1 object). - * - * @return the decoded attributes. - */ -function _decodeBagAttributes(attributes) { - var decodedAttrs = {}; - - if(attributes !== undefined) { - for(var i = 0; i < attributes.length; ++i) { - var capture = {}; - var errors = []; - if(!asn1.validate(attributes[i], attributeValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#12 BagAttribute.'); - error.errors = errors; - throw error; - } + if (headers) { + header += headers + } - var oid = asn1.derToOid(capture.oid); - if(pki.oids[oid] === undefined) { - // unsupported attribute type, ignore. - continue; - } + if (channels.sendHeaders.hasSubscribers) { + channels.sendHeaders.publish({ request, headers: header, socket }) + } - decodedAttrs[pki.oids[oid]] = []; - for(var j = 0; j < capture.values.length; ++j) { - decodedAttrs[pki.oids[oid]].push(capture.values[j].value); - } + /* istanbul ignore else: assertion */ + if (!body || bodyLength === 0) { + if (contentLength === 0) { + socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') + } else { + assert(contentLength === null, 'no body must not have content length') + socket.write(`${header}\r\n`, 'latin1') + } + request.onRequestSent() + } else if (util.isBuffer(body)) { + assert(contentLength === body.byteLength, 'buffer body must have content length') + + socket.cork() + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + socket.write(body) + socket.uncork() + request.onBodySent(body) + request.onRequestSent() + if (!expectsPayload) { + socket[kReset] = true + } + } else if (util.isBlobLike(body)) { + if (typeof body.stream === 'function') { + writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload }) + } else { + writeBlob({ body, client, request, socket, contentLength, header, expectsPayload }) } + } else if (util.isStream(body)) { + writeStream({ body, client, request, socket, contentLength, header, expectsPayload }) + } else if (util.isIterable(body)) { + writeIterable({ body, client, request, socket, contentLength, header, expectsPayload }) + } else { + assert(false) } - return decodedAttrs; + return true } -/** - * Wraps a private key and certificate in a PKCS#12 PFX wrapper. If a - * password is provided then the private key will be encrypted. - * - * An entire certificate chain may also be included. To do this, pass - * an array for the "cert" parameter where the first certificate is - * the one that is paired with the private key and each subsequent one - * verifies the previous one. The certificates may be in PEM format or - * have been already parsed by Forge. - * - * @todo implement password-based-encryption for the whole package - * - * @param key the private key. - * @param cert the certificate (may be an array of certificates in order - * to specify a certificate chain). - * @param password the password to use, null for none. - * @param options: - * algorithm the encryption algorithm to use - * ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'. - * count the iteration count to use. - * saltSize the salt size to use. - * useMac true to include a MAC, false not to, defaults to true. - * localKeyId the local key ID to use, in hex. - * friendlyName the friendly name to use. - * generateLocalKeyId true to generate a random local key ID, - * false not to, defaults to true. - * - * @return the PKCS#12 PFX ASN.1 object. - */ -p12.toPkcs12Asn1 = function(key, cert, password, options) { - // set default options - options = options || {}; - options.saltSize = options.saltSize || 8; - options.count = options.count || 2048; - options.algorithm = options.algorithm || options.encAlgorithm || 'aes128'; - if(!('useMac' in options)) { - options.useMac = true; - } - if(!('localKeyId' in options)) { - options.localKeyId = null; - } - if(!('generateLocalKeyId' in options)) { - options.generateLocalKeyId = true; - } - - var localKeyId = options.localKeyId; - var bagAttrs; - if(localKeyId !== null) { - localKeyId = forge.util.hexToBytes(localKeyId); - } else if(options.generateLocalKeyId) { - // use SHA-1 of paired cert, if available - if(cert) { - var pairedCert = forge.util.isArray(cert) ? cert[0] : cert; - if(typeof pairedCert === 'string') { - pairedCert = pki.certificateFromPem(pairedCert); +function writeH2 (client, session, request) { + const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request + + let headers + if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()) + else headers = reqHeaders + + if (upgrade) { + errorRequest(client, request, new Error('Upgrade not supported for H2')) + return false + } + + try { + // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event? + request.onConnect((err) => { + if (request.aborted || request.completed) { + return } - var sha1 = forge.md.sha1.create(); - sha1.update(asn1.toDer(pki.certificateToAsn1(pairedCert)).getBytes()); - localKeyId = sha1.digest().getBytes(); - } else { - // FIXME: consider using SHA-1 of public key (which can be generated - // from private key components), see: cert.generateSubjectKeyIdentifier - // generate random bytes - localKeyId = forge.random.getBytes(20); - } - } - - var attrs = []; - if(localKeyId !== null) { - attrs.push( - // localKeyID - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // attrId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.localKeyId).getBytes()), - // attrValues - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - localKeyId) - ]) - ])); - } - if('friendlyName' in options) { - attrs.push( - // friendlyName - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // attrId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.friendlyName).getBytes()), - // attrValues - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BMPSTRING, false, - options.friendlyName) - ]) - ])); + + errorRequest(client, request, err || new RequestAbortedError()) + }) + } catch (err) { + errorRequest(client, request, err) } - if(attrs.length > 0) { - bagAttrs = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs); + if (request.aborted) { + return false } - // collect contents for AuthenticatedSafe - var contents = []; + /** @type {import('node:http2').ClientHttp2Stream} */ + let stream + const h2State = client[kHTTP2SessionState] - // create safe bag(s) for certificate chain - var chain = []; - if(cert !== null) { - if(forge.util.isArray(cert)) { - chain = cert; - } else { - chain = [cert]; - } - } - - var certSafeBags = []; - for(var i = 0; i < chain.length; ++i) { - // convert cert from PEM as necessary - cert = chain[i]; - if(typeof cert === 'string') { - cert = pki.certificateFromPem(cert); - } - - // SafeBag - var certBagAttrs = (i === 0) ? bagAttrs : undefined; - var certAsn1 = pki.certificateToAsn1(cert); - var certSafeBag = - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // bagId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.certBag).getBytes()), - // bagValue - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - // CertBag - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // certId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.x509Certificate).getBytes()), - // certValue (x509Certificate) - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - asn1.toDer(certAsn1).getBytes()) - ])])]), - // bagAttributes (OPTIONAL) - certBagAttrs - ]); - certSafeBags.push(certSafeBag); - } - - if(certSafeBags.length > 0) { - // SafeContents - var certSafeContents = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, certSafeBags); - - // ContentInfo - var certCI = - // PKCS#7 ContentInfo - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // contentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - // OID for the content type is 'data' - asn1.oidToDer(pki.oids.data).getBytes()), - // content - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - asn1.toDer(certSafeContents).getBytes()) - ]) - ]); - contents.push(certCI); - } - - // create safe contents for private key - var keyBag = null; - if(key !== null) { - // SafeBag - var pkAsn1 = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(key)); - if(password === null) { - // no encryption - keyBag = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // bagId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.keyBag).getBytes()), - // bagValue - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - // PrivateKeyInfo - pkAsn1 - ]), - // bagAttributes (OPTIONAL) - bagAttrs - ]); + headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost] + headers[HTTP2_HEADER_METHOD] = method + + if (method === 'CONNECT') { + session.ref() + // we are already connected, streams are pending, first request + // will create a new stream. We trigger a request to create the stream and wait until + // `ready` event is triggered + // We disabled endStream to allow the user to write to the stream + stream = session.request(headers, { endStream: false, signal }) + + if (stream.id && !stream.pending) { + request.onUpgrade(null, null, stream) + ++h2State.openStreams } else { - // encrypted PrivateKeyInfo - keyBag = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // bagId - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.pkcs8ShroudedKeyBag).getBytes()), - // bagValue - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - // EncryptedPrivateKeyInfo - pki.encryptPrivateKeyInfo(pkAsn1, password, options) - ]), - // bagAttributes (OPTIONAL) - bagAttrs - ]); - } - - // SafeContents - var keySafeContents = - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [keyBag]); - - // ContentInfo - var keyCI = - // PKCS#7 ContentInfo - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // contentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - // OID for the content type is 'data' - asn1.oidToDer(pki.oids.data).getBytes()), - // content - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - asn1.toDer(keySafeContents).getBytes()) - ]) - ]); - contents.push(keyCI); - } - - // create AuthenticatedSafe by stringing together the contents - var safe = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, contents); - - var macData; - if(options.useMac) { - // MacData - var sha1 = forge.md.sha1.create(); - var macSalt = new forge.util.ByteBuffer( - forge.random.getBytes(options.saltSize)); - var count = options.count; - // 160-bit key - var key = p12.generateKey(password, macSalt, 3, count, 20); - var mac = forge.hmac.create(); - mac.start(sha1, key); - mac.update(asn1.toDer(safe).getBytes()); - var macValue = mac.getMac(); - macData = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // mac DigestInfo - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // digestAlgorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm = SHA-1 - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.sha1).getBytes()), - // parameters = Null - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]), - // digest - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, - false, macValue.getBytes()) - ]), - // macSalt OCTET STRING - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, macSalt.getBytes()), - // iterations INTEGER (XXX: Only support count < 65536) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(count).getBytes() - ) - ]); - } - - // PFX - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version (3) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(3).getBytes()), - // PKCS#7 ContentInfo - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // contentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - // OID for the content type is 'data' - asn1.oidToDer(pki.oids.data).getBytes()), - // content - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - asn1.toDer(safe).getBytes()) - ]) - ]), - macData - ]); -}; + stream.once('ready', () => { + request.onUpgrade(null, null, stream) + ++h2State.openStreams + }) + } -/** - * Derives a PKCS#12 key. - * - * @param password the password to derive the key material from, null or - * undefined for none. - * @param salt the salt, as a ByteBuffer, to use. - * @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC). - * @param iter the iteration count. - * @param n the number of bytes to derive from the password. - * @param md the message digest to use, defaults to SHA-1. - * - * @return a ByteBuffer with the bytes derived from the password. - */ -p12.generateKey = forge.pbe.generatePkcs12Key; + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) session.unref() + }) + return true + } -/***/ }), + // https://tools.ietf.org/html/rfc7540#section-8.3 + // :path and :scheme headers must be omited when sending CONNECT -/***/ 4829: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + headers[HTTP2_HEADER_PATH] = path + headers[HTTP2_HEADER_SCHEME] = 'https' -/** - * Javascript implementation of PKCS#7 v1.5. - * - * @author Stefan Siegl - * @author Dave Longley - * - * Copyright (c) 2012 Stefan Siegl - * Copyright (c) 2012-2015 Digital Bazaar, Inc. - * - * Currently this implementation only supports ContentType of EnvelopedData, - * EncryptedData, or SignedData at the root level. The top level elements may - * contain only a ContentInfo of ContentType Data, i.e. plain data. Further - * nesting is not (yet) supported. - * - * The Forge validators for PKCS #7's ASN.1 structures are available from - * a separate file pkcs7asn1.js, since those are referenced from other - * PKCS standards like PKCS #12. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(9549); -__nccwpck_require__(7157); -__nccwpck_require__(1925); -__nccwpck_require__(154); -__nccwpck_require__(266); -__nccwpck_require__(7821); -__nccwpck_require__(8339); -__nccwpck_require__(8180); - -// shortcut for ASN.1 API -var asn1 = forge.asn1; - -// shortcut for PKCS#7 API -var p7 = module.exports = forge.pkcs7 = forge.pkcs7 || {}; + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 -/** - * Converts a PKCS#7 message from PEM format. - * - * @param pem the PEM-formatted PKCS#7 message. - * - * @return the PKCS#7 message. - */ -p7.messageFromPem = function(pem) { - var msg = forge.pem.decode(pem)[0]; + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. - if(msg.type !== 'PKCS7') { - var error = new Error('Could not convert PKCS#7 message from PEM; PEM ' + - 'header type is not "PKCS#7".'); - error.headerType = msg.type; - throw error; + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) + + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert PKCS#7 message from PEM; PEM is encrypted.'); + + let contentLength = util.bodyLength(body) + + if (contentLength == null) { + contentLength = request.contentLength } - // convert DER to ASN.1 object - var obj = asn1.fromDer(msg.body); + if (contentLength === 0 || !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. - return p7.messageFromAsn1(obj); -}; + contentLength = null + } -/** - * Converts a PKCS#7 message to PEM format. - * - * @param msg The PKCS#7 message object - * @param maxline The maximum characters per line, defaults to 64. - * - * @return The PEM-formatted PKCS#7 message. - */ -p7.messageToPem = function(msg, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var pemObj = { - type: 'PKCS7', - body: asn1.toDer(msg.toAsn1()).getBytes() - }; - return forge.pem.encode(pemObj, {maxline: maxline}); -}; + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) + return false + } -/** - * Converts a PKCS#7 message from an ASN.1 object. - * - * @param obj the ASN.1 representation of a ContentInfo. - * - * @return the PKCS#7 message. - */ -p7.messageFromAsn1 = function(obj) { - // validate root level ContentInfo and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, p7.asn1.contentInfoValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#7 message. ' + - 'ASN.1 object is not an PKCS#7 ContentInfo.'); - error.errors = errors; - throw error; - } - - var contentType = asn1.derToOid(capture.contentType); - var msg; - - switch(contentType) { - case forge.pki.oids.envelopedData: - msg = p7.createEnvelopedData(); - break; + process.emitWarning(new RequestContentLengthMismatchError()) + } - case forge.pki.oids.encryptedData: - msg = p7.createEncryptedData(); - break; + if (contentLength != null) { + assert(body, 'no body must not have content length') + headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}` + } - case forge.pki.oids.signedData: - msg = p7.createSignedData(); - break; + session.ref() - default: - throw new Error('Cannot read PKCS#7 message. ContentType with OID ' + - contentType + ' is not (yet) supported.'); + const shouldEndStream = method === 'GET' || method === 'HEAD' + if (expectContinue) { + headers[HTTP2_HEADER_EXPECT] = '100-continue' + stream = session.request(headers, { endStream: shouldEndStream, signal }) + + stream.once('continue', writeBodyH2) + } else { + stream = session.request(headers, { + endStream: shouldEndStream, + signal + }) + writeBodyH2() } - msg.fromAsn1(capture.content.value[0]); - return msg; -}; + // Increment counter as we have new several streams open + ++h2State.openStreams -p7.createSignedData = function() { - var msg = null; - msg = { - type: forge.pki.oids.signedData, - version: 1, - certificates: [], - crls: [], - // TODO: add json-formatted signer stuff here? - signers: [], - // populated during sign() - digestAlgorithmIdentifiers: [], - contentInfo: null, - signerInfos: [], - - fromAsn1: function(obj) { - // validate SignedData content block and capture data. - _fromAsn1(msg, obj, p7.asn1.signedDataValidator); - msg.certificates = []; - msg.crls = []; - msg.digestAlgorithmIdentifiers = []; - msg.contentInfo = null; - msg.signerInfos = []; - - if(msg.rawCapture.certificates) { - var certs = msg.rawCapture.certificates.value; - for(var i = 0; i < certs.length; ++i) { - msg.certificates.push(forge.pki.certificateFromAsn1(certs[i])); - } - } + stream.once('response', headers => { + const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers - // TODO: parse crls - }, + if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { + stream.pause() + } + }) - toAsn1: function() { - // degenerate case with no content - if(!msg.contentInfo) { - msg.sign(); - } + stream.once('end', () => { + request.onComplete([]) + }) - var certs = []; - for(var i = 0; i < msg.certificates.length; ++i) { - certs.push(forge.pki.certificateToAsn1(msg.certificates[i])); - } + stream.on('data', (chunk) => { + if (request.onData(chunk) === false) { + stream.pause() + } + }) - var crls = []; - // TODO: implement CRLs - - // [0] SignedData - var signedData = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Version - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(msg.version).getBytes()), - // DigestAlgorithmIdentifiers - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SET, true, - msg.digestAlgorithmIdentifiers), - // ContentInfo - msg.contentInfo - ]) - ]); - if(certs.length > 0) { - // [0] IMPLICIT ExtendedCertificatesAndCertificates OPTIONAL - signedData.value[0].value.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, certs)); - } - if(crls.length > 0) { - // [1] IMPLICIT CertificateRevocationLists OPTIONAL - signedData.value[0].value.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, crls)); - } - // SignerInfos - signedData.value[0].value.push( - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, - msg.signerInfos)); - - // ContentInfo - return asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // ContentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(msg.type).getBytes()), - // [0] SignedData - signedData - ]); - }, + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) { + session.unref() + } + }) - /** - * Add (another) entity to list of signers. - * - * Note: If authenticatedAttributes are provided, then, per RFC 2315, - * they must include at least two attributes: content type and - * message digest. The message digest attribute value will be - * auto-calculated during signing and will be ignored if provided. - * - * Here's an example of providing these two attributes: - * - * forge.pkcs7.createSignedData(); - * p7.addSigner({ - * issuer: cert.issuer.attributes, - * serialNumber: cert.serialNumber, - * key: privateKey, - * digestAlgorithm: forge.pki.oids.sha1, - * authenticatedAttributes: [{ - * type: forge.pki.oids.contentType, - * value: forge.pki.oids.data - * }, { - * type: forge.pki.oids.messageDigest - * }] - * }); - * - * TODO: Support [subjectKeyIdentifier] as signer's ID. - * - * @param signer the signer information: - * key the signer's private key. - * [certificate] a certificate containing the public key - * associated with the signer's private key; use this option as - * an alternative to specifying signer.issuer and - * signer.serialNumber. - * [issuer] the issuer attributes (eg: cert.issuer.attributes). - * [serialNumber] the signer's certificate's serial number in - * hexadecimal (eg: cert.serialNumber). - * [digestAlgorithm] the message digest OID, as a string, to use - * (eg: forge.pki.oids.sha1). - * [authenticatedAttributes] an optional array of attributes - * to also sign along with the content. - */ - addSigner: function(signer) { - var issuer = signer.issuer; - var serialNumber = signer.serialNumber; - if(signer.certificate) { - var cert = signer.certificate; - if(typeof cert === 'string') { - cert = forge.pki.certificateFromPem(cert); - } - issuer = cert.issuer.attributes; - serialNumber = cert.serialNumber; - } - var key = signer.key; - if(!key) { - throw new Error( - 'Could not add PKCS#7 signer; no private key specified.'); - } - if(typeof key === 'string') { - key = forge.pki.privateKeyFromPem(key); - } + stream.once('error', function (err) { + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) - // ensure OID known for digest algorithm - var digestAlgorithm = signer.digestAlgorithm || forge.pki.oids.sha1; - switch(digestAlgorithm) { - case forge.pki.oids.sha1: - case forge.pki.oids.sha256: - case forge.pki.oids.sha384: - case forge.pki.oids.sha512: - case forge.pki.oids.md5: - break; - default: - throw new Error( - 'Could not add PKCS#7 signer; unknown message digest algorithm: ' + - digestAlgorithm); - } + stream.once('frameError', (type, code) => { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + errorRequest(client, request, err) - // if authenticatedAttributes is present, then the attributes - // must contain at least PKCS #9 content-type and message-digest - var authenticatedAttributes = signer.authenticatedAttributes || []; - if(authenticatedAttributes.length > 0) { - var contentType = false; - var messageDigest = false; - for(var i = 0; i < authenticatedAttributes.length; ++i) { - var attr = authenticatedAttributes[i]; - if(!contentType && attr.type === forge.pki.oids.contentType) { - contentType = true; - if(messageDigest) { - break; - } - continue; - } - if(!messageDigest && attr.type === forge.pki.oids.messageDigest) { - messageDigest = true; - if(contentType) { - break; - } - continue; - } - } + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) - if(!contentType || !messageDigest) { - throw new Error('Invalid signer.authenticatedAttributes. If ' + - 'signer.authenticatedAttributes is specified, then it must ' + - 'contain at least two attributes, PKCS #9 content-type and ' + - 'PKCS #9 message-digest.'); - } + // stream.on('aborted', () => { + // // TODO(HTTP/2): Support aborted + // }) + + // stream.on('timeout', () => { + // // TODO(HTTP/2): Support timeout + // }) + + // stream.on('push', headers => { + // // TODO(HTTP/2): Suppor push + // }) + + // stream.on('trailers', headers => { + // // TODO(HTTP/2): Support trailers + // }) + + return true + + function writeBodyH2 () { + /* istanbul ignore else: assertion */ + if (!body) { + request.onRequestSent() + } else if (util.isBuffer(body)) { + assert(contentLength === body.byteLength, 'buffer body must have content length') + stream.cork() + stream.write(body) + stream.uncork() + stream.end() + request.onBodySent(body) + request.onRequestSent() + } else if (util.isBlobLike(body)) { + if (typeof body.stream === 'function') { + writeIterable({ + client, + request, + contentLength, + h2stream: stream, + expectsPayload, + body: body.stream(), + socket: client[kSocket], + header: '' + }) + } else { + writeBlob({ + body, + client, + request, + contentLength, + expectsPayload, + h2stream: stream, + header: '', + socket: client[kSocket] + }) } + } else if (util.isStream(body)) { + writeStream({ + body, + client, + request, + contentLength, + expectsPayload, + socket: client[kSocket], + h2stream: stream, + header: '' + }) + } else if (util.isIterable(body)) { + writeIterable({ + body, + client, + request, + contentLength, + expectsPayload, + header: '', + h2stream: stream, + socket: client[kSocket] + }) + } else { + assert(false) + } + } +} - msg.signers.push({ - key: key, - version: 1, - issuer: issuer, - serialNumber: serialNumber, - digestAlgorithm: digestAlgorithm, - signatureAlgorithm: forge.pki.oids.rsaEncryption, - signature: null, - authenticatedAttributes: authenticatedAttributes, - unauthenticatedAttributes: [] - }); - }, - - /** - * Signs the content. - * @param options Options to apply when signing: - * [detached] boolean. If signing should be done in detached mode. Defaults to false. - */ - sign: function(options) { - options = options || {}; - // auto-generate content info - if(typeof msg.content !== 'object' || msg.contentInfo === null) { - // use Data ContentInfo - msg.contentInfo = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // ContentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(forge.pki.oids.data).getBytes()) - ]); - - // add actual content, if present - if('content' in msg) { - var content; - if(msg.content instanceof forge.util.ByteBuffer) { - content = msg.content.bytes(); - } else if(typeof msg.content === 'string') { - content = forge.util.encodeUtf8(msg.content); - } +function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined') - if (options.detached) { - msg.detachedContent = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, content); - } else { - msg.contentInfo.value.push( - // [0] EXPLICIT content - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - content) - ])); - } + if (client[kHTTPConnVersion] === 'h2') { + // For HTTP/2, is enough to pipe the stream + const pipe = pipeline( + body, + h2stream, + (err) => { + if (err) { + util.destroy(body, err) + util.destroy(h2stream, err) + } else { + request.onRequestSent() } } + ) - // no signers, return early (degenerate case for certificate container) - if(msg.signers.length === 0) { - return; - } + pipe.on('data', onPipeData) + pipe.once('end', () => { + pipe.removeListener('data', onPipeData) + util.destroy(pipe) + }) - // generate digest algorithm identifiers - var mds = addDigestAlgorithmIds(); + function onPipeData (chunk) { + request.onBodySent(chunk) + } - // generate signerInfos - addSignerInfos(mds); - }, + return + } - verify: function() { - throw new Error('PKCS#7 signature verification not yet implemented.'); - }, + let finished = false - /** - * Add a certificate. - * - * @param cert the certificate to add. - */ - addCertificate: function(cert) { - // convert from PEM - if(typeof cert === 'string') { - cert = forge.pki.certificateFromPem(cert); + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + + const onData = function (chunk) { + if (finished) { + return + } + + try { + if (!writer.write(chunk) && this.pause) { + this.pause() } - msg.certificates.push(cert); - }, + } catch (err) { + util.destroy(this, err) + } + } + const onDrain = function () { + if (finished) { + return + } - /** - * Add a certificate revokation list. - * - * @param crl the certificate revokation list to add. - */ - addCertificateRevokationList: function(crl) { - throw new Error('PKCS#7 CRL support not yet implemented.'); + if (body.resume) { + body.resume() + } + } + const onAbort = function () { + if (finished) { + return + } + const err = new RequestAbortedError() + queueMicrotask(() => onFinished(err)) + } + const onFinished = function (err) { + if (finished) { + return } - }; - return msg; - function addDigestAlgorithmIds() { - var mds = {}; + finished = true - for(var i = 0; i < msg.signers.length; ++i) { - var signer = msg.signers[i]; - var oid = signer.digestAlgorithm; - if(!(oid in mds)) { - // content digest - mds[oid] = forge.md[forge.pki.oids[oid]].create(); - } - if(signer.authenticatedAttributes.length === 0) { - // no custom attributes to digest; use content message digest - signer.md = mds[oid]; - } else { - // custom attributes to be digested; use own message digest - // TODO: optimize to just copy message digest state if that - // feature is ever supported with message digests - signer.md = forge.md[forge.pki.oids[oid]].create(); + assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1)) + + socket + .off('drain', onDrain) + .off('error', onFinished) + + body + .removeListener('data', onData) + .removeListener('end', onFinished) + .removeListener('error', onFinished) + .removeListener('close', onAbort) + + if (!err) { + try { + writer.end() + } catch (er) { + err = er } } - // add unique digest algorithm identifiers - msg.digestAlgorithmIdentifiers = []; - for(var oid in mds) { - msg.digestAlgorithmIdentifiers.push( - // AlgorithmIdentifier - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(oid).getBytes()), - // parameters (null) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ])); + writer.destroy(err) + + if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) { + util.destroy(body, err) + } else { + util.destroy(body) } + } - return mds; + body + .on('data', onData) + .on('end', onFinished) + .on('error', onFinished) + .on('close', onAbort) + + if (body.resume) { + body.resume() } - function addSignerInfos(mds) { - var content; + socket + .on('drain', onDrain) + .on('error', onFinished) +} + +async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength === body.size, 'blob body must have content length') + + const isH2 = client[kHTTPConnVersion] === 'h2' + try { + if (contentLength != null && contentLength !== body.size) { + throw new RequestContentLengthMismatchError() + } - if (msg.detachedContent) { - // Signature has been made in detached mode. - content = msg.detachedContent; + const buffer = Buffer.from(await body.arrayBuffer()) + + if (isH2) { + h2stream.cork() + h2stream.write(buffer) + h2stream.uncork() } else { - // Note: ContentInfo is a SEQUENCE with 2 values, second value is - // the content field and is optional for a ContentInfo but required here - // since signers are present - // get ContentInfo content - content = msg.contentInfo.value[1]; - // skip [0] EXPLICIT content wrapper - content = content.value[0]; + socket.cork() + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + socket.write(buffer) + socket.uncork() } - if(!content) { - throw new Error( - 'Could not sign PKCS#7 message; there is no content to sign.'); + request.onBodySent(buffer) + request.onRequestSent() + + if (!expectsPayload) { + socket[kReset] = true } - // get ContentInfo content type - var contentType = asn1.derToOid(msg.contentInfo.value[0].value); + resume(client) + } catch (err) { + util.destroy(isH2 ? h2stream : socket, err) + } +} - // serialize content - var bytes = asn1.toDer(content); +async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined') + + let callback = null + function onDrain () { + if (callback) { + const cb = callback + callback = null + cb() + } + } - // skip identifier and length per RFC 2315 9.3 - // skip identifier (1 byte) - bytes.getByte(); - // read and discard length bytes - asn1.getBerValueLength(bytes); - bytes = bytes.getBytes(); + const waitForDrain = () => new Promise((resolve, reject) => { + assert(callback === null) - // digest content DER value bytes - for(var oid in mds) { - mds[oid].start().update(bytes); + if (socket[kError]) { + reject(socket[kError]) + } else { + callback = resolve } + }) - // sign content - var signingTime = new Date(); - for(var i = 0; i < msg.signers.length; ++i) { - var signer = msg.signers[i]; + if (client[kHTTPConnVersion] === 'h2') { + h2stream + .on('close', onDrain) + .on('drain', onDrain) - if(signer.authenticatedAttributes.length === 0) { - // if ContentInfo content type is not "Data", then - // authenticatedAttributes must be present per RFC 2315 - if(contentType !== forge.pki.oids.data) { - throw new Error( - 'Invalid signer; authenticatedAttributes must be present ' + - 'when the ContentInfo content type is not PKCS#7 Data.'); + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] } - } else { - // process authenticated attributes - // [0] IMPLICIT - signer.authenticatedAttributesAsn1 = asn1.create( - asn1.Class.CONTEXT_SPECIFIC, 0, true, []); - - // per RFC 2315, attributes are to be digested using a SET container - // not the above [0] IMPLICIT container - var attrsAsn1 = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SET, true, []); - - for(var ai = 0; ai < signer.authenticatedAttributes.length; ++ai) { - var attr = signer.authenticatedAttributes[ai]; - if(attr.type === forge.pki.oids.messageDigest) { - // use content message digest as value - attr.value = mds[signer.digestAlgorithm].digest(); - } else if(attr.type === forge.pki.oids.signingTime) { - // auto-populate signing time if not already set - if(!attr.value) { - attr.value = signingTime; - } - } - // convert to ASN.1 and push onto Attributes SET (for signing) and - // onto authenticatedAttributesAsn1 to complete SignedData ASN.1 - // TODO: optimize away duplication - attrsAsn1.value.push(_attributeToAsn1(attr)); - signer.authenticatedAttributesAsn1.value.push(_attributeToAsn1(attr)); + const res = h2stream.write(chunk) + request.onBodySent(chunk) + if (!res) { + await waitForDrain() } + } + } catch (err) { + h2stream.destroy(err) + } finally { + request.onRequestSent() + h2stream.end() + h2stream + .off('close', onDrain) + .off('drain', onDrain) + } + + return + } + + socket + .on('close', onDrain) + .on('drain', onDrain) - // DER-serialize and digest SET OF attributes only - bytes = asn1.toDer(attrsAsn1).getBytes(); - signer.md.start().update(bytes); + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] } - // sign digest - signer.signature = signer.key.sign(signer.md, 'RSASSA-PKCS1-V1_5'); + if (!writer.write(chunk)) { + await waitForDrain() + } } - // add signer info - msg.signerInfos = _signersToAsn1(msg.signers); + writer.end() + } catch (err) { + writer.destroy(err) + } finally { + socket + .off('close', onDrain) + .off('drain', onDrain) } -}; +} -/** - * Creates an empty PKCS#7 message of type EncryptedData. - * - * @return the message. - */ -p7.createEncryptedData = function() { - var msg = null; - msg = { - type: forge.pki.oids.encryptedData, - version: 0, - encryptedContent: { - algorithm: forge.pki.oids['aes256-CBC'] - }, +class AsyncWriter { + constructor ({ socket, request, contentLength, client, expectsPayload, header }) { + this.socket = socket + this.request = request + this.contentLength = contentLength + this.client = client + this.bytesWritten = 0 + this.expectsPayload = expectsPayload + this.header = header - /** - * Reads an EncryptedData content block (in ASN.1 format) - * - * @param obj The ASN.1 representation of the EncryptedData content block - */ - fromAsn1: function(obj) { - // Validate EncryptedData content block and capture data. - _fromAsn1(msg, obj, p7.asn1.encryptedDataValidator); - }, + socket[kWriting] = true + } - /** - * Decrypt encrypted content - * - * @param key The (symmetric) key as a byte buffer - */ - decrypt: function(key) { - if(key !== undefined) { - msg.encryptedContent.key = key; - } - _decryptContent(msg); + write (chunk) { + const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this + + if (socket[kError]) { + throw socket[kError] + } + + if (socket.destroyed) { + return false + } + + const len = Buffer.byteLength(chunk) + if (!len) { + return true + } + + // We should defer writing chunks. + if (contentLength !== null && bytesWritten + len > contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } + + process.emitWarning(new RequestContentLengthMismatchError()) } - }; - return msg; -}; -/** - * Creates an empty PKCS#7 message of type EnvelopedData. - * - * @return the message. - */ -p7.createEnvelopedData = function() { - var msg = null; - msg = { - type: forge.pki.oids.envelopedData, - version: 0, - recipients: [], - encryptedContent: { - algorithm: forge.pki.oids['aes256-CBC'] - }, + socket.cork() - /** - * Reads an EnvelopedData content block (in ASN.1 format) - * - * @param obj the ASN.1 representation of the EnvelopedData content block. - */ - fromAsn1: function(obj) { - // validate EnvelopedData content block and capture data - var capture = _fromAsn1(msg, obj, p7.asn1.envelopedDataValidator); - msg.recipients = _recipientsFromAsn1(capture.recipientInfos.value); - }, + if (bytesWritten === 0) { + if (!expectsPayload) { + socket[kReset] = true + } - toAsn1: function() { - // ContentInfo - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // ContentType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(msg.type).getBytes()), - // [0] EnvelopedData - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Version - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(msg.version).getBytes()), - // RecipientInfos - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, - _recipientsToAsn1(msg.recipients)), - // EncryptedContentInfo - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, - _encryptedContentToAsn1(msg.encryptedContent)) - ]) - ]) - ]); - }, + if (contentLength === null) { + socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1') + } else { + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + } + } - /** - * Find recipient by X.509 certificate's issuer. - * - * @param cert the certificate with the issuer to look for. - * - * @return the recipient object. - */ - findRecipient: function(cert) { - var sAttr = cert.issuer.attributes; + if (contentLength === null) { + socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1') + } - for(var i = 0; i < msg.recipients.length; ++i) { - var r = msg.recipients[i]; - var rAttr = r.issuer; + this.bytesWritten += len - if(r.serialNumber !== cert.serialNumber) { - continue; - } + const ret = socket.write(chunk) - if(rAttr.length !== sAttr.length) { - continue; - } + socket.uncork() - var match = true; - for(var j = 0; j < sAttr.length; ++j) { - if(rAttr[j].type !== sAttr[j].type || - rAttr[j].value !== sAttr[j].value) { - match = false; - break; - } - } + request.onBodySent(chunk) - if(match) { - return r; + if (!ret) { + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() } } + } - return null; - }, + return ret + } - /** - * Decrypt enveloped content - * - * @param recipient The recipient object related to the private key - * @param privKey The (RSA) private key object - */ - decrypt: function(recipient, privKey) { - if(msg.encryptedContent.key === undefined && recipient !== undefined && - privKey !== undefined) { - switch(recipient.encryptedContent.algorithm) { - case forge.pki.oids.rsaEncryption: - case forge.pki.oids.desCBC: - var key = privKey.decrypt(recipient.encryptedContent.content); - msg.encryptedContent.key = forge.util.createBuffer(key); - break; - - default: - throw new Error('Unsupported asymmetric cipher, ' + - 'OID ' + recipient.encryptedContent.algorithm); - } - } + end () { + const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this + request.onRequestSent() - _decryptContent(msg); - }, + socket[kWriting] = false - /** - * Add (another) entity to list of recipients. - * - * @param cert The certificate of the entity to add. - */ - addRecipient: function(cert) { - msg.recipients.push({ - version: 0, - issuer: cert.issuer.attributes, - serialNumber: cert.serialNumber, - encryptedContent: { - // We simply assume rsaEncryption here, since forge.pki only - // supports RSA so far. If the PKI module supports other - // ciphers one day, we need to modify this one as well. - algorithm: forge.pki.oids.rsaEncryption, - key: cert.publicKey - } - }); - }, + if (socket[kError]) { + throw socket[kError] + } - /** - * Encrypt enveloped content. - * - * This function supports two optional arguments, cipher and key, which - * can be used to influence symmetric encryption. Unless cipher is - * provided, the cipher specified in encryptedContent.algorithm is used - * (defaults to AES-256-CBC). If no key is provided, encryptedContent.key - * is (re-)used. If that one's not set, a random key will be generated - * automatically. - * - * @param [key] The key to be used for symmetric encryption. - * @param [cipher] The OID of the symmetric cipher to use. - */ - encrypt: function(key, cipher) { - // Part 1: Symmetric encryption - if(msg.encryptedContent.content === undefined) { - cipher = cipher || msg.encryptedContent.algorithm; - key = key || msg.encryptedContent.key; - - var keyLen, ivLen, ciphFn; - switch(cipher) { - case forge.pki.oids['aes128-CBC']: - keyLen = 16; - ivLen = 16; - ciphFn = forge.aes.createEncryptionCipher; - break; - - case forge.pki.oids['aes192-CBC']: - keyLen = 24; - ivLen = 16; - ciphFn = forge.aes.createEncryptionCipher; - break; - - case forge.pki.oids['aes256-CBC']: - keyLen = 32; - ivLen = 16; - ciphFn = forge.aes.createEncryptionCipher; - break; - - case forge.pki.oids['des-EDE3-CBC']: - keyLen = 24; - ivLen = 8; - ciphFn = forge.des.createEncryptionCipher; - break; - - default: - throw new Error('Unsupported symmetric cipher, OID ' + cipher); - } - - if(key === undefined) { - key = forge.util.createBuffer(forge.random.getBytes(keyLen)); - } else if(key.length() != keyLen) { - throw new Error('Symmetric key has wrong length; ' + - 'got ' + key.length() + ' bytes, expected ' + keyLen + '.'); - } - - // Keep a copy of the key & IV in the object, so the caller can - // use it for whatever reason. - msg.encryptedContent.algorithm = cipher; - msg.encryptedContent.key = key; - msg.encryptedContent.parameter = forge.util.createBuffer( - forge.random.getBytes(ivLen)); - - var ciph = ciphFn(key); - ciph.start(msg.encryptedContent.parameter.copy()); - ciph.update(msg.content); - - // The finish function does PKCS#7 padding by default, therefore - // no action required by us. - if(!ciph.finish()) { - throw new Error('Symmetric encryption failed.'); - } - - msg.encryptedContent.content = ciph.output; - } + if (socket.destroyed) { + return + } - // Part 2: asymmetric encryption for each recipient - for(var i = 0; i < msg.recipients.length; ++i) { - var recipient = msg.recipients[i]; + if (bytesWritten === 0) { + if (expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD send a Content-Length in a request message when + // no Transfer-Encoding is sent and the request method defines a meaning + // for an enclosed payload body. - // Nothing to do, encryption already done. - if(recipient.encryptedContent.content !== undefined) { - continue; - } + socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') + } else { + socket.write(`${header}\r\n`, 'latin1') + } + } else if (contentLength === null) { + socket.write('\r\n0\r\n\r\n', 'latin1') + } - switch(recipient.encryptedContent.algorithm) { - case forge.pki.oids.rsaEncryption: - recipient.encryptedContent.content = - recipient.encryptedContent.key.encrypt( - msg.encryptedContent.key.data); - break; + if (contentLength !== null && bytesWritten !== contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } else { + process.emitWarning(new RequestContentLengthMismatchError()) + } + } - default: - throw new Error('Unsupported asymmetric cipher, OID ' + - recipient.encryptedContent.algorithm); - } + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() } } - }; - return msg; -}; -/** - * Converts a single recipient from an ASN.1 object. - * - * @param obj the ASN.1 RecipientInfo. - * - * @return the recipient object. - */ -function _recipientFromAsn1(obj) { - // validate EnvelopedData content block and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, p7.asn1.recipientInfoValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#7 RecipientInfo. ' + - 'ASN.1 object is not an PKCS#7 RecipientInfo.'); - error.errors = errors; - throw error; + resume(client) } - return { - version: capture.version.charCodeAt(0), - issuer: forge.pki.RDNAttributesAsArray(capture.issuer), - serialNumber: forge.util.createBuffer(capture.serial).toHex(), - encryptedContent: { - algorithm: asn1.derToOid(capture.encAlgorithm), - parameter: capture.encParameter ? capture.encParameter.value : undefined, - content: capture.encKey - } - }; -} + destroy (err) { + const { socket, client } = this -/** - * Converts a single recipient object to an ASN.1 object. - * - * @param obj the recipient object. - * - * @return the ASN.1 RecipientInfo. - */ -function _recipientToAsn1(obj) { - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Version - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(obj.version).getBytes()), - // IssuerAndSerialNumber - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Name - forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}), - // Serial - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - forge.util.hexToBytes(obj.serialNumber)) - ]), - // KeyEncryptionAlgorithmIdentifier - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(obj.encryptedContent.algorithm).getBytes()), - // Parameter, force NULL, only RSA supported for now. - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]), - // EncryptedKey - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - obj.encryptedContent.content) - ]); -} + socket[kWriting] = false -/** - * Map a set of RecipientInfo ASN.1 objects to recipient objects. - * - * @param infos an array of ASN.1 representations RecipientInfo (i.e. SET OF). - * - * @return an array of recipient objects. - */ -function _recipientsFromAsn1(infos) { - var ret = []; - for(var i = 0; i < infos.length; ++i) { - ret.push(_recipientFromAsn1(infos[i])); + if (err) { + assert(client[kRunning] <= 1, 'pipeline should only contain this request') + util.destroy(socket, err) + } } - return ret; } -/** - * Map an array of recipient objects to ASN.1 RecipientInfo objects. - * - * @param recipients an array of recipientInfo objects. - * - * @return an array of ASN.1 RecipientInfos. - */ -function _recipientsToAsn1(recipients) { - var ret = []; - for(var i = 0; i < recipients.length; ++i) { - ret.push(_recipientToAsn1(recipients[i])); +function errorRequest (client, request, err) { + try { + request.onError(err) + assert(request.aborted) + } catch (err) { + client.emit('error', err) } - return ret; } -/** - * Converts a single signer from an ASN.1 object. - * - * @param obj the ASN.1 representation of a SignerInfo. - * - * @return the signer object. - */ -function _signerFromAsn1(obj) { - // validate EnvelopedData content block and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, p7.asn1.signerInfoValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#7 SignerInfo. ' + - 'ASN.1 object is not an PKCS#7 SignerInfo.'); - error.errors = errors; - throw error; - } - - var rval = { - version: capture.version.charCodeAt(0), - issuer: forge.pki.RDNAttributesAsArray(capture.issuer), - serialNumber: forge.util.createBuffer(capture.serial).toHex(), - digestAlgorithm: asn1.derToOid(capture.digestAlgorithm), - signatureAlgorithm: asn1.derToOid(capture.signatureAlgorithm), - signature: capture.signature, - authenticatedAttributes: [], - unauthenticatedAttributes: [] - }; +module.exports = Client - // TODO: convert attributes - var authenticatedAttributes = capture.authenticatedAttributes || []; - var unauthenticatedAttributes = capture.unauthenticatedAttributes || []; - return rval; -} +/***/ }), -/** - * Converts a single signerInfo object to an ASN.1 object. - * - * @param obj the signerInfo object. - * - * @return the ASN.1 representation of a SignerInfo. - */ -function _signerToAsn1(obj) { - // SignerInfo - var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(obj.version).getBytes()), - // issuerAndSerialNumber - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // name - forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}), - // serial - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - forge.util.hexToBytes(obj.serialNumber)) - ]), - // digestAlgorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(obj.digestAlgorithm).getBytes()), - // parameters (null) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]) - ]); +/***/ 6436: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // authenticatedAttributes (OPTIONAL) - if(obj.authenticatedAttributesAsn1) { - // add ASN.1 previously generated during signing - rval.value.push(obj.authenticatedAttributesAsn1); - } +"use strict"; - // digestEncryptionAlgorithm - rval.value.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(obj.signatureAlgorithm).getBytes()), - // parameters (null) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ])); - // encryptedDigest - rval.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, obj.signature)); +/* istanbul ignore file: only for Node 12 */ - // unauthenticatedAttributes (OPTIONAL) - if(obj.unauthenticatedAttributes.length > 0) { - // [1] IMPLICIT - var attrsAsn1 = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, []); - for(var i = 0; i < obj.unauthenticatedAttributes.length; ++i) { - var attr = obj.unauthenticatedAttributes[i]; - attrsAsn1.values.push(_attributeToAsn1(attr)); - } - rval.value.push(attrsAsn1); - } +const { kConnected, kSize } = __nccwpck_require__(2785) - return rval; -} +class CompatWeakRef { + constructor (value) { + this.value = value + } -/** - * Map a set of SignerInfo ASN.1 objects to an array of signer objects. - * - * @param signerInfoAsn1s an array of ASN.1 SignerInfos (i.e. SET OF). - * - * @return an array of signers objects. - */ -function _signersFromAsn1(signerInfoAsn1s) { - var ret = []; - for(var i = 0; i < signerInfoAsn1s.length; ++i) { - ret.push(_signerFromAsn1(signerInfoAsn1s[i])); + deref () { + return this.value[kConnected] === 0 && this.value[kSize] === 0 + ? undefined + : this.value } - return ret; } -/** - * Map an array of signer objects to ASN.1 objects. - * - * @param signers an array of signer objects. - * - * @return an array of ASN.1 SignerInfos. - */ -function _signersToAsn1(signers) { - var ret = []; - for(var i = 0; i < signers.length; ++i) { - ret.push(_signerToAsn1(signers[i])); +class CompatFinalizer { + constructor (finalizer) { + this.finalizer = finalizer } - return ret; -} -/** - * Convert an attribute object to an ASN.1 Attribute. - * - * @param attr the attribute object. - * - * @return the ASN.1 Attribute. - */ -function _attributeToAsn1(attr) { - var value; - - // TODO: generalize to support more attributes - if(attr.type === forge.pki.oids.contentType) { - value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(attr.value).getBytes()); - } else if(attr.type === forge.pki.oids.messageDigest) { - value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - attr.value.bytes()); - } else if(attr.type === forge.pki.oids.signingTime) { - /* Note per RFC 2985: Dates between 1 January 1950 and 31 December 2049 - (inclusive) MUST be encoded as UTCTime. Any dates with year values - before 1950 or after 2049 MUST be encoded as GeneralizedTime. [Further,] - UTCTime values MUST be expressed in Greenwich Mean Time (Zulu) and MUST - include seconds (i.e., times are YYMMDDHHMMSSZ), even where the - number of seconds is zero. Midnight (GMT) must be represented as - "YYMMDD000000Z". */ - // TODO: make these module-level constants - var jan_1_1950 = new Date('1950-01-01T00:00:00Z'); - var jan_1_2050 = new Date('2050-01-01T00:00:00Z'); - var date = attr.value; - if(typeof date === 'string') { - // try to parse date - var timestamp = Date.parse(date); - if(!isNaN(timestamp)) { - date = new Date(timestamp); - } else if(date.length === 13) { - // YYMMDDHHMMSSZ (13 chars for UTCTime) - date = asn1.utcTimeToDate(date); - } else { - // assume generalized time - date = asn1.generalizedTimeToDate(date); - } + register (dispatcher, key) { + if (dispatcher.on) { + dispatcher.on('disconnect', () => { + if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) { + this.finalizer(key) + } + }) } - - if(date >= jan_1_1950 && date < jan_1_2050) { - value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false, - asn1.dateToUtcTime(date)); - } else { - value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false, - asn1.dateToGeneralizedTime(date)); - } - } - - // TODO: expose as common API call - // create a RelativeDistinguishedName set - // each value in the set is an AttributeTypeAndValue first - // containing the type (an OID) and second the value - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // AttributeType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(attr.type).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ - // AttributeValue - value - ]) - ]); -} - -/** - * Map messages encrypted content to ASN.1 objects. - * - * @param ec The encryptedContent object of the message. - * - * @return ASN.1 representation of the encryptedContent object (SEQUENCE). - */ -function _encryptedContentToAsn1(ec) { - return [ - // ContentType, always Data for the moment - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(forge.pki.oids.data).getBytes()), - // ContentEncryptionAlgorithmIdentifier - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // Algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(ec.algorithm).getBytes()), - // Parameters (IV) - !ec.parameter ? - undefined : - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - ec.parameter.getBytes()) - ]), - // [0] EncryptedContent - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - ec.content.getBytes()) - ]) - ]; + } } -/** - * Reads the "common part" of an PKCS#7 content block (in ASN.1 format) - * - * This function reads the "common part" of the PKCS#7 content blocks - * EncryptedData and EnvelopedData, i.e. version number and symmetrically - * encrypted content block. - * - * The result of the ASN.1 validate and capture process is returned - * to allow the caller to extract further data, e.g. the list of recipients - * in case of a EnvelopedData object. - * - * @param msg the PKCS#7 object to read the data to. - * @param obj the ASN.1 representation of the content block. - * @param validator the ASN.1 structure validator object to use. - * - * @return the value map captured by validator object. - */ -function _fromAsn1(msg, obj, validator) { - var capture = {}; - var errors = []; - if(!asn1.validate(obj, validator, capture, errors)) { - var error = new Error('Cannot read PKCS#7 message. ' + - 'ASN.1 object is not a supported PKCS#7 message.'); - error.errors = error; - throw error; - } - - // Check contentType, so far we only support (raw) Data. - var contentType = asn1.derToOid(capture.contentType); - if(contentType !== forge.pki.oids.data) { - throw new Error('Unsupported PKCS#7 message. ' + - 'Only wrapped ContentType Data supported.'); - } - - if(capture.encryptedContent) { - var content = ''; - if(forge.util.isArray(capture.encryptedContent)) { - for(var i = 0; i < capture.encryptedContent.length; ++i) { - if(capture.encryptedContent[i].type !== asn1.Type.OCTETSTRING) { - throw new Error('Malformed PKCS#7 message, expecting encrypted ' + - 'content constructed of only OCTET STRING objects.'); - } - content += capture.encryptedContent[i].value; - } - } else { - content = capture.encryptedContent; +module.exports = function () { + // FIXME: remove workaround when the Node bug is fixed + // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 + if (process.env.NODE_V8_COVERAGE) { + return { + WeakRef: CompatWeakRef, + FinalizationRegistry: CompatFinalizer } - msg.encryptedContent = { - algorithm: asn1.derToOid(capture.encAlgorithm), - parameter: forge.util.createBuffer(capture.encParameter.value), - content: forge.util.createBuffer(content) - }; } - - if(capture.content) { - var content = ''; - if(forge.util.isArray(capture.content)) { - for(var i = 0; i < capture.content.length; ++i) { - if(capture.content[i].type !== asn1.Type.OCTETSTRING) { - throw new Error('Malformed PKCS#7 message, expecting ' + - 'content constructed of only OCTET STRING objects.'); - } - content += capture.content[i].value; - } - } else { - content = capture.content; - } - msg.content = forge.util.createBuffer(content); + return { + WeakRef: global.WeakRef || CompatWeakRef, + FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer } - - msg.version = capture.version.charCodeAt(0); - msg.rawCapture = capture; - - return capture; } -/** - * Decrypt the symmetrically encrypted content block of the PKCS#7 message. - * - * Decryption is skipped in case the PKCS#7 message object already has a - * (decrypted) content attribute. The algorithm, key and cipher parameters - * (probably the iv) are taken from the encryptedContent attribute of the - * message object. - * - * @param The PKCS#7 message object. - */ -function _decryptContent(msg) { - if(msg.encryptedContent.key === undefined) { - throw new Error('Symmetric key not available.'); - } - if(msg.content === undefined) { - var ciph; +/***/ }), - switch(msg.encryptedContent.algorithm) { - case forge.pki.oids['aes128-CBC']: - case forge.pki.oids['aes192-CBC']: - case forge.pki.oids['aes256-CBC']: - ciph = forge.aes.createDecryptionCipher(msg.encryptedContent.key); - break; +/***/ 663: +/***/ ((module) => { - case forge.pki.oids['desCBC']: - case forge.pki.oids['des-EDE3-CBC']: - ciph = forge.des.createDecryptionCipher(msg.encryptedContent.key); - break; +"use strict"; - default: - throw new Error('Unsupported symmetric cipher, OID ' + - msg.encryptedContent.algorithm); - } - ciph.start(msg.encryptedContent.parameter); - ciph.update(msg.encryptedContent.content); - if(!ciph.finish()) { - throw new Error('Symmetric decryption failed.'); - } +// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size +const maxAttributeValueSize = 1024 - msg.content = ciph.output; - } +// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size +const maxNameValuePairSize = 4096 + +module.exports = { + maxAttributeValueSize, + maxNameValuePairSize } /***/ }), -/***/ 266: +/***/ 1724: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; + + +const { parseSetCookie } = __nccwpck_require__(4408) +const { stringify, getHeadersList } = __nccwpck_require__(3121) +const { webidl } = __nccwpck_require__(1744) +const { Headers } = __nccwpck_require__(554) + /** - * Javascript implementation of ASN.1 validators for PKCS#7 v1.5. - * - * @author Dave Longley - * @author Stefan Siegl - * - * Copyright (c) 2012-2015 Digital Bazaar, Inc. - * Copyright (c) 2012 Stefan Siegl - * - * The ASN.1 representation of PKCS#7 is as follows - * (see RFC #2315 for details, http://www.ietf.org/rfc/rfc2315.txt): - * - * A PKCS#7 message consists of a ContentInfo on root level, which may - * contain any number of further ContentInfo nested into it. - * - * ContentInfo ::= SEQUENCE { - * contentType ContentType, - * content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL - * } - * - * ContentType ::= OBJECT IDENTIFIER - * - * EnvelopedData ::= SEQUENCE { - * version Version, - * recipientInfos RecipientInfos, - * encryptedContentInfo EncryptedContentInfo - * } - * - * EncryptedData ::= SEQUENCE { - * version Version, - * encryptedContentInfo EncryptedContentInfo - * } - * - * id-signedData OBJECT IDENTIFIER ::= { iso(1) member-body(2) - * us(840) rsadsi(113549) pkcs(1) pkcs7(7) 2 } - * - * SignedData ::= SEQUENCE { - * version INTEGER, - * digestAlgorithms DigestAlgorithmIdentifiers, - * contentInfo ContentInfo, - * certificates [0] IMPLICIT Certificates OPTIONAL, - * crls [1] IMPLICIT CertificateRevocationLists OPTIONAL, - * signerInfos SignerInfos - * } - * - * SignerInfos ::= SET OF SignerInfo - * - * SignerInfo ::= SEQUENCE { - * version Version, - * issuerAndSerialNumber IssuerAndSerialNumber, - * digestAlgorithm DigestAlgorithmIdentifier, - * authenticatedAttributes [0] IMPLICIT Attributes OPTIONAL, - * digestEncryptionAlgorithm DigestEncryptionAlgorithmIdentifier, - * encryptedDigest EncryptedDigest, - * unauthenticatedAttributes [1] IMPLICIT Attributes OPTIONAL - * } - * - * EncryptedDigest ::= OCTET STRING - * - * Attributes ::= SET OF Attribute - * - * Attribute ::= SEQUENCE { - * attrType OBJECT IDENTIFIER, - * attrValues SET OF AttributeValue - * } - * - * AttributeValue ::= ANY - * - * Version ::= INTEGER - * - * RecipientInfos ::= SET OF RecipientInfo - * - * EncryptedContentInfo ::= SEQUENCE { - * contentType ContentType, - * contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier, - * encryptedContent [0] IMPLICIT EncryptedContent OPTIONAL - * } - * - * ContentEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier - * - * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters - * for the algorithm, if any. In the case of AES and DES3, there is only one, - * the IV. - * - * AlgorithmIdentifer ::= SEQUENCE { - * algorithm OBJECT IDENTIFIER, - * parameters ANY DEFINED BY algorithm OPTIONAL - * } - * - * EncryptedContent ::= OCTET STRING - * - * RecipientInfo ::= SEQUENCE { - * version Version, - * issuerAndSerialNumber IssuerAndSerialNumber, - * keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier, - * encryptedKey EncryptedKey - * } - * - * IssuerAndSerialNumber ::= SEQUENCE { - * issuer Name, - * serialNumber CertificateSerialNumber - * } - * - * CertificateSerialNumber ::= INTEGER - * - * KeyEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier - * - * EncryptedKey ::= OCTET STRING + * @typedef {Object} Cookie + * @property {string} name + * @property {string} value + * @property {Date|number|undefined} expires + * @property {number|undefined} maxAge + * @property {string|undefined} domain + * @property {string|undefined} path + * @property {boolean|undefined} secure + * @property {boolean|undefined} httpOnly + * @property {'Strict'|'Lax'|'None'} sameSite + * @property {string[]} unparsed */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -__nccwpck_require__(8339); - -// shortcut for ASN.1 API -var asn1 = forge.asn1; - -// shortcut for PKCS#7 API -var p7v = module.exports = forge.pkcs7asn1 = forge.pkcs7asn1 || {}; -forge.pkcs7 = forge.pkcs7 || {}; -forge.pkcs7.asn1 = p7v; - -var contentInfoValidator = { - name: 'ContentInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'ContentInfo.ContentType', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'contentType' - }, { - name: 'ContentInfo.content', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - constructed: true, - optional: true, - captureAsn1: 'content' - }] -}; -p7v.contentInfoValidator = contentInfoValidator; - -var encryptedContentInfoValidator = { - name: 'EncryptedContentInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'EncryptedContentInfo.contentType', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'contentType' - }, { - name: 'EncryptedContentInfo.contentEncryptionAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'EncryptedContentInfo.contentEncryptionAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'encAlgorithm' - }, { - name: 'EncryptedContentInfo.contentEncryptionAlgorithm.parameter', - tagClass: asn1.Class.UNIVERSAL, - captureAsn1: 'encParameter' - }] - }, { - name: 'EncryptedContentInfo.encryptedContent', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - /* The PKCS#7 structure output by OpenSSL somewhat differs from what - * other implementations do generate. - * - * OpenSSL generates a structure like this: - * SEQUENCE { - * ... - * [0] - * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38 - * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45 - * ... - * } - * - * Whereas other implementations (and this PKCS#7 module) generate: - * SEQUENCE { - * ... - * [0] { - * OCTET STRING - * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38 - * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45 - * ... - * } - * } - * - * In order to support both, we just capture the context specific - * field here. The OCTET STRING bit is removed below. - */ - capture: 'encryptedContent', - captureAsn1: 'encryptedContentAsn1' - }] -}; - -p7v.envelopedDataValidator = { - name: 'EnvelopedData', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'EnvelopedData.Version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'version' - }, { - name: 'EnvelopedData.RecipientInfos', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - constructed: true, - captureAsn1: 'recipientInfos' - }].concat(encryptedContentInfoValidator) -}; -p7v.encryptedDataValidator = { - name: 'EncryptedData', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'EncryptedData.Version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'version' - }].concat(encryptedContentInfoValidator) -}; +/** + * @param {Headers} headers + * @returns {Record} + */ +function getCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' }) -var signerValidator = { - name: 'SignerInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'SignerInfo.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false - }, { - name: 'SignerInfo.issuerAndSerialNumber', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'SignerInfo.issuerAndSerialNumber.issuer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'issuer' - }, { - name: 'SignerInfo.issuerAndSerialNumber.serialNumber', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'serial' - }] - }, { - name: 'SignerInfo.digestAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'SignerInfo.digestAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'digestAlgorithm' - }, { - name: 'SignerInfo.digestAlgorithm.parameter', - tagClass: asn1.Class.UNIVERSAL, - constructed: false, - captureAsn1: 'digestParameter', - optional: true - }] - }, { - name: 'SignerInfo.authenticatedAttributes', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - constructed: true, - optional: true, - capture: 'authenticatedAttributes' - }, { - name: 'SignerInfo.digestEncryptionAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - capture: 'signatureAlgorithm' - }, { - name: 'SignerInfo.encryptedDigest', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'signature' - }, { - name: 'SignerInfo.unauthenticatedAttributes', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 1, - constructed: true, - optional: true, - capture: 'unauthenticatedAttributes' - }] -}; + webidl.brandCheck(headers, Headers, { strict: false }) -p7v.signedDataValidator = { - name: 'SignedData', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'SignedData.Version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'version' - }, { - name: 'SignedData.DigestAlgorithms', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - constructed: true, - captureAsn1: 'digestAlgorithms' - }, - contentInfoValidator, - { - name: 'SignedData.Certificates', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - optional: true, - captureAsn1: 'certificates' - }, { - name: 'SignedData.CertificateRevocationLists', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 1, - optional: true, - captureAsn1: 'crls' - }, { - name: 'SignedData.SignerInfos', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - capture: 'signerInfos', - optional: true, - value: [signerValidator] - }] -}; + const cookie = headers.get('cookie') + const out = {} -p7v.recipientInfoValidator = { - name: 'RecipientInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'RecipientInfo.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'version' - }, { - name: 'RecipientInfo.issuerAndSerial', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'RecipientInfo.issuerAndSerial.issuer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'issuer' - }, { - name: 'RecipientInfo.issuerAndSerial.serialNumber', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'serial' - }] - }, { - name: 'RecipientInfo.keyEncryptionAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'RecipientInfo.keyEncryptionAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'encAlgorithm' - }, { - name: 'RecipientInfo.keyEncryptionAlgorithm.parameter', - tagClass: asn1.Class.UNIVERSAL, - constructed: false, - captureAsn1: 'encParameter', - optional: true - }] - }, { - name: 'RecipientInfo.encryptedKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'encKey' - }] -}; + if (!cookie) { + return out + } + for (const piece of cookie.split(';')) { + const [name, ...value] = piece.split('=') -/***/ }), + out[name.trim()] = value.join('=') + } -/***/ 6924: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return out +} /** - * Javascript implementation of a basic Public Key Infrastructure, including - * support for RSA public and private keys. - * - * @author Dave Longley - * - * Copyright (c) 2010-2013 Digital Bazaar, Inc. + * @param {Headers} headers + * @param {string} name + * @param {{ path?: string, domain?: string }|undefined} attributes + * @returns {void} */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -__nccwpck_require__(1925); -__nccwpck_require__(1281); -__nccwpck_require__(154); -__nccwpck_require__(1611); -__nccwpck_require__(466); -__nccwpck_require__(4376); -__nccwpck_require__(3921); -__nccwpck_require__(8339); -__nccwpck_require__(8180); - -// shortcut for asn.1 API -var asn1 = forge.asn1; - -/* Public Key Infrastructure (PKI) implementation. */ -var pki = module.exports = forge.pki = forge.pki || {}; +function deleteCookie (headers, name, attributes) { + webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + name = webidl.converters.DOMString(name) + attributes = webidl.converters.DeleteCookieAttributes(attributes) + + // Matches behavior of + // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278 + setCookie(headers, { + name, + value: '', + expires: new Date(0), + ...attributes + }) +} /** - * NOTE: THIS METHOD IS DEPRECATED. Use pem.decode() instead. - * - * Converts PEM-formatted data to DER. - * - * @param pem the PEM-formatted data. - * - * @return the DER-formatted data. + * @param {Headers} headers + * @returns {Cookie[]} */ -pki.pemToDer = function(pem) { - var msg = forge.pem.decode(pem)[0]; - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert PEM to DER; PEM is encrypted.'); +function getSetCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + const cookies = getHeadersList(headers).cookies + + if (!cookies) { + return [] } - return forge.util.createBuffer(msg.body); -}; + + // In older versions of undici, cookies is a list of name:value. + return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair)) +} /** - * Converts an RSA private key from PEM format. - * - * @param pem the PEM-formatted private key. - * - * @return the private key. + * @param {Headers} headers + * @param {Cookie} cookie + * @returns {void} */ -pki.privateKeyFromPem = function(pem) { - var msg = forge.pem.decode(pem)[0]; +function setCookie (headers, cookie) { + webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' }) - if(msg.type !== 'PRIVATE KEY' && msg.type !== 'RSA PRIVATE KEY') { - var error = new Error('Could not convert private key from PEM; PEM ' + - 'header type is not "PRIVATE KEY" or "RSA PRIVATE KEY".'); - error.headerType = msg.type; - throw error; - } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert private key from PEM; PEM is encrypted.'); + webidl.brandCheck(headers, Headers, { strict: false }) + + cookie = webidl.converters.Cookie(cookie) + + const str = stringify(cookie) + + if (str) { + headers.append('Set-Cookie', stringify(cookie)) } +} - // convert DER to ASN.1 object - var obj = asn1.fromDer(msg.body); +webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([ + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null + } +]) - return pki.privateKeyFromAsn1(obj); -}; +webidl.converters.Cookie = webidl.dictionaryConverter([ + { + converter: webidl.converters.DOMString, + key: 'name' + }, + { + converter: webidl.converters.DOMString, + key: 'value' + }, + { + converter: webidl.nullableConverter((value) => { + if (typeof value === 'number') { + return webidl.converters['unsigned long long'](value) + } -/** - * Converts an RSA private key to PEM format. - * - * @param key the private key. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted private key. - */ -pki.privateKeyToPem = function(key, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var msg = { - type: 'RSA PRIVATE KEY', - body: asn1.toDer(pki.privateKeyToAsn1(key)).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + return new Date(value) + }), + key: 'expires', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters['long long']), + key: 'maxAge', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'secure', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'httpOnly', + defaultValue: null + }, + { + converter: webidl.converters.USVString, + key: 'sameSite', + allowedValues: ['Strict', 'Lax', 'None'] + }, + { + converter: webidl.sequenceConverter(webidl.converters.DOMString), + key: 'unparsed', + defaultValue: [] + } +]) -/** - * Converts a PrivateKeyInfo to PEM format. - * - * @param pki the PrivateKeyInfo. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted private key. - */ -pki.privateKeyInfoToPem = function(pki, maxline) { - // convert to DER, then PEM-encode - var msg = { - type: 'PRIVATE KEY', - body: asn1.toDer(pki).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; +module.exports = { + getCookies, + deleteCookie, + getSetCookies, + setCookie +} /***/ }), -/***/ 6861: +/***/ 4408: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; + + +const { maxNameValuePairSize, maxAttributeValueSize } = __nccwpck_require__(663) +const { isCTLExcludingHtab } = __nccwpck_require__(3121) +const { collectASequenceOfCodePointsFast } = __nccwpck_require__(685) +const assert = __nccwpck_require__(9491) + /** - * Prime number generation API. - * - * @author Dave Longley - * - * Copyright (c) 2014 Digital Bazaar, Inc. + * @description Parses the field-value attributes of a set-cookie header string. + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} header + * @returns if the header is invalid, null will be returned */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); -__nccwpck_require__(7052); -__nccwpck_require__(7821); +function parseSetCookie (header) { + // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F + // character (CTL characters excluding HTAB): Abort these steps and + // ignore the set-cookie-string entirely. + if (isCTLExcludingHtab(header)) { + return null + } -(function() { + let nameValuePair = '' + let unparsedAttributes = '' + let name = '' + let value = '' -// forge.prime already defined -if(forge.prime) { - module.exports = forge.prime; - return; -} + // 2. If the set-cookie-string contains a %x3B (";") character: + if (header.includes(';')) { + // 1. The name-value-pair string consists of the characters up to, + // but not including, the first %x3B (";"), and the unparsed- + // attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question). + const position = { position: 0 } + + nameValuePair = collectASequenceOfCodePointsFast(';', header, position) + unparsedAttributes = header.slice(position.position) + } else { + // Otherwise: + + // 1. The name-value-pair string consists of all the characters + // contained in the set-cookie-string, and the unparsed- + // attributes is the empty string. + nameValuePair = header + } + + // 3. If the name-value-pair string lacks a %x3D ("=") character, then + // the name string is empty, and the value string is the value of + // name-value-pair. + if (!nameValuePair.includes('=')) { + value = nameValuePair + } else { + // Otherwise, the name string consists of the characters up to, but + // not including, the first %x3D ("=") character, and the (possibly + // empty) value string consists of the characters after the first + // %x3D ("=") character. + const position = { position: 0 } + name = collectASequenceOfCodePointsFast( + '=', + nameValuePair, + position + ) + value = nameValuePair.slice(position.position + 1) + } -/* PRIME API */ -var prime = module.exports = forge.prime = forge.prime || {}; + // 4. Remove any leading or trailing WSP characters from the name + // string and the value string. + name = name.trim() + value = value.trim() -var BigInteger = forge.jsbn.BigInteger; + // 5. If the sum of the lengths of the name string and the value string + // is more than 4096 octets, abort these steps and ignore the set- + // cookie-string entirely. + if (name.length + value.length > maxNameValuePairSize) { + return null + } -// primes are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29 -var GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2]; -var THIRTY = new BigInteger(null); -THIRTY.fromInt(30); -var op_or = function(x, y) {return x|y;}; + // 6. The cookie-name is the name string, and the cookie-value is the + // value string. + return { + name, value, ...parseUnparsedAttributes(unparsedAttributes) + } +} /** - * Generates a random probable prime with the given number of bits. - * - * Alternative algorithms can be specified by name as a string or as an - * object with custom options like so: - * - * { - * name: 'PRIMEINC', - * options: { - * maxBlockTime: , - * millerRabinTests: , - * workerScript: , - * workers: . - * workLoad: the size of the work load, ie: number of possible prime - * numbers for each web worker to check per work assignment, - * (default: 100). - * } - * } - * - * @param bits the number of bits for the prime number. - * @param options the options to use. - * [algorithm] the algorithm to use (default: 'PRIMEINC'). - * [prng] a custom crypto-secure pseudo-random number generator to use, - * that must define "getBytesSync". - * - * @return callback(err, num) called once the operation completes. + * Parses the remaining attributes of a set-cookie header + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} unparsedAttributes + * @param {[Object.]={}} cookieAttributeList */ -prime.generateProbablePrime = function(bits, options, callback) { - if(typeof options === 'function') { - callback = options; - options = {}; +function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) { + // 1. If the unparsed-attributes string is empty, skip the rest of + // these steps. + if (unparsedAttributes.length === 0) { + return cookieAttributeList } - options = options || {}; - // default to PRIMEINC algorithm - var algorithm = options.algorithm || 'PRIMEINC'; - if(typeof algorithm === 'string') { - algorithm = {name: algorithm}; - } - algorithm.options = algorithm.options || {}; - - // create prng with api that matches BigInteger secure random - var prng = options.prng || forge.random; - var rng = { - // x is an array to fill with bytes - nextBytes: function(x) { - var b = prng.getBytesSync(x.length); - for(var i = 0; i < x.length; ++i) { - x[i] = b.charCodeAt(i); - } - } - }; + // 2. Discard the first character of the unparsed-attributes (which + // will be a %x3B (";") character). + assert(unparsedAttributes[0] === ';') + unparsedAttributes = unparsedAttributes.slice(1) - if(algorithm.name === 'PRIMEINC') { - return primeincFindPrime(bits, rng, algorithm.options, callback); - } + let cookieAv = '' - throw new Error('Invalid prime generation algorithm: ' + algorithm.name); -}; + // 3. If the remaining unparsed-attributes contains a %x3B (";") + // character: + if (unparsedAttributes.includes(';')) { + // 1. Consume the characters of the unparsed-attributes up to, but + // not including, the first %x3B (";") character. + cookieAv = collectASequenceOfCodePointsFast( + ';', + unparsedAttributes, + { position: 0 } + ) + unparsedAttributes = unparsedAttributes.slice(cookieAv.length) + } else { + // Otherwise: -function primeincFindPrime(bits, rng, options, callback) { - if('workers' in options) { - return primeincFindPrimeWithWorkers(bits, rng, options, callback); + // 1. Consume the remainder of the unparsed-attributes. + cookieAv = unparsedAttributes + unparsedAttributes = '' } - return primeincFindPrimeWithoutWorkers(bits, rng, options, callback); -} -function primeincFindPrimeWithoutWorkers(bits, rng, options, callback) { - // initialize random number - var num = generateRandom(bits, rng); + // Let the cookie-av string be the characters consumed in this step. - /* Note: All primes are of the form 30k+i for i < 30 and gcd(30, i)=1. The - number we are given is always aligned at 30k + 1. Each time the number is - determined not to be prime we add to get to the next 'i', eg: if the number - was at 30k + 1 we add 6. */ - var deltaIdx = 0; + let attributeName = '' + let attributeValue = '' - // get required number of MR tests - var mrTests = getMillerRabinTests(num.bitLength()); - if('millerRabinTests' in options) { - mrTests = options.millerRabinTests; - } + // 4. If the cookie-av string contains a %x3D ("=") character: + if (cookieAv.includes('=')) { + // 1. The (possibly empty) attribute-name string consists of the + // characters up to, but not including, the first %x3D ("=") + // character, and the (possibly empty) attribute-value string + // consists of the characters after the first %x3D ("=") + // character. + const position = { position: 0 } - // find prime nearest to 'num' for maxBlockTime ms - // 10 ms gives 5ms of leeway for other calculations before dropping - // below 60fps (1000/60 == 16.67), but in reality, the number will - // likely be higher due to an 'atomic' big int modPow - var maxBlockTime = 10; - if('maxBlockTime' in options) { - maxBlockTime = options.maxBlockTime; - } + attributeName = collectASequenceOfCodePointsFast( + '=', + cookieAv, + position + ) + attributeValue = cookieAv.slice(position.position + 1) + } else { + // Otherwise: - _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback); -} + // 1. The attribute-name string consists of the entire cookie-av + // string, and the attribute-value string is empty. + attributeName = cookieAv + } + + // 5. Remove any leading or trailing WSP characters from the attribute- + // name string and the attribute-value string. + attributeName = attributeName.trim() + attributeValue = attributeValue.trim() + + // 6. If the attribute-value is longer than 1024 octets, ignore the + // cookie-av string and return to Step 1 of this algorithm. + if (attributeValue.length > maxAttributeValueSize) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 7. Process the attribute-name and attribute-value according to the + // requirements in the following subsections. (Notice that + // attributes with unrecognized attribute-names are ignored.) + const attributeNameLowercase = attributeName.toLowerCase() + + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1 + // If the attribute-name case-insensitively matches the string + // "Expires", the user agent MUST process the cookie-av as follows. + if (attributeNameLowercase === 'expires') { + // 1. Let the expiry-time be the result of parsing the attribute-value + // as cookie-date (see Section 5.1.1). + const expiryTime = new Date(attributeValue) + + // 2. If the attribute-value failed to parse as a cookie date, ignore + // the cookie-av. + + cookieAttributeList.expires = expiryTime + } else if (attributeNameLowercase === 'max-age') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2 + // If the attribute-name case-insensitively matches the string "Max- + // Age", the user agent MUST process the cookie-av as follows. + + // 1. If the first character of the attribute-value is not a DIGIT or a + // "-" character, ignore the cookie-av. + const charCode = attributeValue.charCodeAt(0) + + if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 2. If the remainder of attribute-value contains a non-DIGIT + // character, ignore the cookie-av. + if (!/^\d+$/.test(attributeValue)) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 3. Let delta-seconds be the attribute-value converted to an integer. + const deltaSeconds = Number(attributeValue) + + // 4. Let cookie-age-limit be the maximum age of the cookie (which + // SHOULD be 400 days or less, see Section 4.1.2.2). + + // 5. Set delta-seconds to the smaller of its present value and cookie- + // age-limit. + // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs) + + // 6. If delta-seconds is less than or equal to zero (0), let expiry- + // time be the earliest representable date and time. Otherwise, let + // the expiry-time be the current date and time plus delta-seconds + // seconds. + // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds + + // 7. Append an attribute to the cookie-attribute-list with an + // attribute-name of Max-Age and an attribute-value of expiry-time. + cookieAttributeList.maxAge = deltaSeconds + } else if (attributeNameLowercase === 'domain') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3 + // If the attribute-name case-insensitively matches the string "Domain", + // the user agent MUST process the cookie-av as follows. + + // 1. Let cookie-domain be the attribute-value. + let cookieDomain = attributeValue + + // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be + // cookie-domain without its leading %x2E ("."). + if (cookieDomain[0] === '.') { + cookieDomain = cookieDomain.slice(1) + } + + // 3. Convert the cookie-domain to lower case. + cookieDomain = cookieDomain.toLowerCase() + + // 4. Append an attribute to the cookie-attribute-list with an + // attribute-name of Domain and an attribute-value of cookie-domain. + cookieAttributeList.domain = cookieDomain + } else if (attributeNameLowercase === 'path') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4 + // If the attribute-name case-insensitively matches the string "Path", + // the user agent MUST process the cookie-av as follows. + + // 1. If the attribute-value is empty or if the first character of the + // attribute-value is not %x2F ("/"): + let cookiePath = '' + if (attributeValue.length === 0 || attributeValue[0] !== '/') { + // 1. Let cookie-path be the default-path. + cookiePath = '/' + } else { + // Otherwise: -function _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback) { - var start = +new Date(); - do { - // overflow, regenerate random number - if(num.bitLength() > bits) { - num = generateRandom(bits, rng); - } - // do primality test - if(num.isProbablePrime(mrTests)) { - return callback(null, num); + // 1. Let cookie-path be the attribute-value. + cookiePath = attributeValue } - // get next potential prime - num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0); - } while(maxBlockTime < 0 || (+new Date() - start < maxBlockTime)); - // keep trying later - forge.util.setImmediate(function() { - _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback); - }); -} + // 2. Append an attribute to the cookie-attribute-list with an + // attribute-name of Path and an attribute-value of cookie-path. + cookieAttributeList.path = cookiePath + } else if (attributeNameLowercase === 'secure') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5 + // If the attribute-name case-insensitively matches the string "Secure", + // the user agent MUST append an attribute to the cookie-attribute-list + // with an attribute-name of Secure and an empty attribute-value. -// NOTE: This algorithm is indeterminate in nature because workers -// run in parallel looking at different segments of numbers. Even if this -// algorithm is run twice with the same input from a predictable RNG, it -// may produce different outputs. -function primeincFindPrimeWithWorkers(bits, rng, options, callback) { - // web workers unavailable - if(typeof Worker === 'undefined') { - return primeincFindPrimeWithoutWorkers(bits, rng, options, callback); - } - - // initialize random number - var num = generateRandom(bits, rng); - - // use web workers to generate keys - var numWorkers = options.workers; - var workLoad = options.workLoad || 100; - var range = workLoad * 30 / 8; - var workerScript = options.workerScript || 'forge/prime.worker.js'; - if(numWorkers === -1) { - return forge.util.estimateCores(function(err, cores) { - if(err) { - // default to 2 - cores = 2; - } - numWorkers = cores - 1; - generate(); - }); - } - generate(); + cookieAttributeList.secure = true + } else if (attributeNameLowercase === 'httponly') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6 + // If the attribute-name case-insensitively matches the string + // "HttpOnly", the user agent MUST append an attribute to the cookie- + // attribute-list with an attribute-name of HttpOnly and an empty + // attribute-value. - function generate() { - // require at least 1 worker - numWorkers = Math.max(1, numWorkers); + cookieAttributeList.httpOnly = true + } else if (attributeNameLowercase === 'samesite') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7 + // If the attribute-name case-insensitively matches the string + // "SameSite", the user agent MUST process the cookie-av as follows: - // TODO: consider optimizing by starting workers outside getPrime() ... - // note that in order to clean up they will have to be made internally - // asynchronous which may actually be slower + // 1. Let enforcement be "Default". + let enforcement = 'Default' - // start workers immediately - var workers = []; - for(var i = 0; i < numWorkers; ++i) { - // FIXME: fix path or use blob URLs - workers[i] = new Worker(workerScript); + const attributeValueLowercase = attributeValue.toLowerCase() + // 2. If cookie-av's attribute-value is a case-insensitive match for + // "None", set enforcement to "None". + if (attributeValueLowercase.includes('none')) { + enforcement = 'None' } - var running = numWorkers; - // listen for requests from workers and assign ranges to find prime - for(var i = 0; i < numWorkers; ++i) { - workers[i].addEventListener('message', workerMessage); + // 3. If cookie-av's attribute-value is a case-insensitive match for + // "Strict", set enforcement to "Strict". + if (attributeValueLowercase.includes('strict')) { + enforcement = 'Strict' } - /* Note: The distribution of random numbers is unknown. Therefore, each - web worker is continuously allocated a range of numbers to check for a - random number until one is found. + // 4. If cookie-av's attribute-value is a case-insensitive match for + // "Lax", set enforcement to "Lax". + if (attributeValueLowercase.includes('lax')) { + enforcement = 'Lax' + } - Every 30 numbers will be checked just 8 times, because prime numbers - have the form: + // 5. Append an attribute to the cookie-attribute-list with an + // attribute-name of "SameSite" and an attribute-value of + // enforcement. + cookieAttributeList.sameSite = enforcement + } else { + cookieAttributeList.unparsed ??= [] - 30k+i, for i < 30 and gcd(30, i)=1 (there are 8 values of i for this) + cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`) + } - Therefore, if we want a web worker to run N checks before asking for - a new range of numbers, each range must contain N*30/8 numbers. + // 8. Return to Step 1 of this algorithm. + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) +} - For 100 checks (workLoad), this is a range of 375. */ +module.exports = { + parseSetCookie, + parseUnparsedAttributes +} - var found = false; - function workerMessage(e) { - // ignore message, prime already found - if(found) { - return; - } - --running; - var data = e.data; - if(data.found) { - // terminate all workers - for(var i = 0; i < workers.length; ++i) { - workers[i].terminate(); - } - found = true; - return callback(null, new BigInteger(data.prime, 16)); - } +/***/ }), - // overflow, regenerate random number - if(num.bitLength() > bits) { - num = generateRandom(bits, rng); - } +/***/ 3121: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // assign new range to check - var hex = num.toString(16); +"use strict"; - // start prime search - e.target.postMessage({ - hex: hex, - workLoad: workLoad - }); - num.dAddOffset(range, 0); +const assert = __nccwpck_require__(9491) +const { kHeadersList } = __nccwpck_require__(2785) + +function isCTLExcludingHtab (value) { + if (value.length === 0) { + return false + } + + for (const char of value) { + const code = char.charCodeAt(0) + + if ( + (code >= 0x00 || code <= 0x08) || + (code >= 0x0A || code <= 0x1F) || + code === 0x7F + ) { + return false } } } /** - * Generates a random number using the given number of bits and RNG. - * - * @param bits the number of bits for the number. - * @param rng the random number generator to use. - * - * @return the random number. + CHAR = + token = 1* + separators = "(" | ")" | "<" | ">" | "@" + | "," | ";" | ":" | "\" | <"> + | "/" | "[" | "]" | "?" | "=" + | "{" | "}" | SP | HT + * @param {string} name */ -function generateRandom(bits, rng) { - var num = new BigInteger(bits, rng); - // force MSB set - var bits1 = bits - 1; - if(!num.testBit(bits1)) { - num.bitwiseTo(BigInteger.ONE.shiftLeft(bits1), op_or, num); +function validateCookieName (name) { + for (const char of name) { + const code = char.charCodeAt(0) + + if ( + (code <= 0x20 || code > 0x7F) || + char === '(' || + char === ')' || + char === '>' || + char === '<' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' + ) { + throw new Error('Invalid cookie name') + } } - // align number on 30k+1 boundary - num.dAddOffset(31 - num.mod(THIRTY).byteValue(), 0); - return num; } /** - * Returns the required number of Miller-Rabin tests to generate a - * prime with an error probability of (1/2)^80. - * - * See Handbook of Applied Cryptography Chapter 4, Table 4.4. - * - * @param bits the bit size. - * - * @return the required number of iterations. + cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) + cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E + ; US-ASCII characters excluding CTLs, + ; whitespace DQUOTE, comma, semicolon, + ; and backslash + * @param {string} value */ -function getMillerRabinTests(bits) { - if(bits <= 100) return 27; - if(bits <= 150) return 18; - if(bits <= 200) return 15; - if(bits <= 250) return 12; - if(bits <= 300) return 9; - if(bits <= 350) return 8; - if(bits <= 400) return 7; - if(bits <= 500) return 6; - if(bits <= 600) return 5; - if(bits <= 800) return 4; - if(bits <= 1250) return 3; - return 2; -} - -})(); - - -/***/ }), +function validateCookieValue (value) { + for (const char of value) { + const code = char.charCodeAt(0) -/***/ 4467: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if ( + code < 0x21 || // exclude CTLs (0-31) + code === 0x22 || + code === 0x2C || + code === 0x3B || + code === 0x5C || + code > 0x7E // non-ascii + ) { + throw new Error('Invalid header value') + } + } +} /** - * A javascript implementation of a cryptographically-secure - * Pseudo Random Number Generator (PRNG). The Fortuna algorithm is followed - * here though the use of SHA-256 is not enforced; when generating an - * a PRNG context, the hashing algorithm and block cipher used for - * the generator are specified via a plugin. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. + * path-value = + * @param {string} path */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); +function validateCookiePath (path) { + for (const char of path) { + const code = char.charCodeAt(0) -var _crypto = null; -if(forge.util.isNodejs && !forge.options.usePureJavaScript && - !process.versions['node-webkit']) { - _crypto = __nccwpck_require__(6113); + if (code < 0x21 || char === ';') { + throw new Error('Invalid cookie path') + } + } } -/* PRNG API */ -var prng = module.exports = forge.prng = forge.prng || {}; - /** - * Creates a new PRNG context. - * - * A PRNG plugin must be passed in that will provide: - * - * 1. A function that initializes the key and seed of a PRNG context. It - * will be given a 16 byte key and a 16 byte seed. Any key expansion - * or transformation of the seed from a byte string into an array of - * integers (or similar) should be performed. - * 2. The cryptographic function used by the generator. It takes a key and - * a seed. - * 3. A seed increment function. It takes the seed and returns seed + 1. - * 4. An api to create a message digest. - * - * For an example, see random.js. - * - * @param plugin the PRNG plugin to use. + * I have no idea why these values aren't allowed to be honest, + * but Deno tests these. - Khafra + * @param {string} domain */ -prng.create = function(plugin) { - var ctx = { - plugin: plugin, - key: null, - seed: null, - time: null, - // number of reseeds so far - reseeds: 0, - // amount of data generated so far - generated: 0, - // no initial key bytes - keyBytes: '' - }; - - // create 32 entropy pools (each is a message digest) - var md = plugin.md; - var pools = new Array(32); - for(var i = 0; i < 32; ++i) { - pools[i] = md.create(); +function validateCookieDomain (domain) { + if ( + domain.startsWith('-') || + domain.endsWith('.') || + domain.endsWith('-') + ) { + throw new Error('Invalid cookie domain') } - ctx.pools = pools; - - // entropy pools are written to cyclically, starting at index 0 - ctx.pool = 0; - - /** - * Generates random bytes. The bytes may be generated synchronously or - * asynchronously. Web workers must use the asynchronous interface or - * else the behavior is undefined. - * - * @param count the number of random bytes to generate. - * @param [callback(err, bytes)] called once the operation completes. - * - * @return count random bytes as a string. - */ - ctx.generate = function(count, callback) { - // do synchronously - if(!callback) { - return ctx.generateSync(count); - } - - // simple generator using counter-based CBC - var cipher = ctx.plugin.cipher; - var increment = ctx.plugin.increment; - var formatKey = ctx.plugin.formatKey; - var formatSeed = ctx.plugin.formatSeed; - var b = forge.util.createBuffer(); - - // paranoid deviation from Fortuna: - // reset key for every request to protect previously - // generated random bytes should the key be discovered; - // there is no 100ms based reseeding because of this - // forced reseed for every `generate` call - ctx.key = null; - - generate(); - - function generate(err) { - if(err) { - return callback(err); - } - - // sufficient bytes generated - if(b.length() >= count) { - return callback(null, b.getBytes(count)); - } - - // if amount of data generated is greater than 1 MiB, trigger reseed - if(ctx.generated > 0xfffff) { - ctx.key = null; - } - - if(ctx.key === null) { - // prevent stack overflow - return forge.util.nextTick(function() { - _reseed(generate); - }); - } - - // generate the random bytes - var bytes = cipher(ctx.key, ctx.seed); - ctx.generated += bytes.length; - b.putBytes(bytes); - - // generate bytes for a new key and seed - ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed))); - ctx.seed = formatSeed(cipher(ctx.key, ctx.seed)); - - forge.util.setImmediate(generate); - } - }; +} - /** - * Generates random bytes synchronously. - * - * @param count the number of random bytes to generate. - * - * @return count random bytes as a string. - */ - ctx.generateSync = function(count) { - // simple generator using counter-based CBC - var cipher = ctx.plugin.cipher; - var increment = ctx.plugin.increment; - var formatKey = ctx.plugin.formatKey; - var formatSeed = ctx.plugin.formatSeed; - - // paranoid deviation from Fortuna: - // reset key for every request to protect previously - // generated random bytes should the key be discovered; - // there is no 100ms based reseeding because of this - // forced reseed for every `generateSync` call - ctx.key = null; - - var b = forge.util.createBuffer(); - while(b.length() < count) { - // if amount of data generated is greater than 1 MiB, trigger reseed - if(ctx.generated > 0xfffff) { - ctx.key = null; - } +/** + * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1 + * @param {number|Date} date + IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT + ; fixed length/zone/capitalization subset of the format + ; see Section 3.3 of [RFC5322] - if(ctx.key === null) { - _reseedSync(); - } + day-name = %x4D.6F.6E ; "Mon", case-sensitive + / %x54.75.65 ; "Tue", case-sensitive + / %x57.65.64 ; "Wed", case-sensitive + / %x54.68.75 ; "Thu", case-sensitive + / %x46.72.69 ; "Fri", case-sensitive + / %x53.61.74 ; "Sat", case-sensitive + / %x53.75.6E ; "Sun", case-sensitive + date1 = day SP month SP year + ; e.g., 02 Jun 1982 - // generate the random bytes - var bytes = cipher(ctx.key, ctx.seed); - ctx.generated += bytes.length; - b.putBytes(bytes); + day = 2DIGIT + month = %x4A.61.6E ; "Jan", case-sensitive + / %x46.65.62 ; "Feb", case-sensitive + / %x4D.61.72 ; "Mar", case-sensitive + / %x41.70.72 ; "Apr", case-sensitive + / %x4D.61.79 ; "May", case-sensitive + / %x4A.75.6E ; "Jun", case-sensitive + / %x4A.75.6C ; "Jul", case-sensitive + / %x41.75.67 ; "Aug", case-sensitive + / %x53.65.70 ; "Sep", case-sensitive + / %x4F.63.74 ; "Oct", case-sensitive + / %x4E.6F.76 ; "Nov", case-sensitive + / %x44.65.63 ; "Dec", case-sensitive + year = 4DIGIT - // generate bytes for a new key and seed - ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed))); - ctx.seed = formatSeed(cipher(ctx.key, ctx.seed)); - } + GMT = %x47.4D.54 ; "GMT", case-sensitive - return b.getBytes(count); - }; + time-of-day = hour ":" minute ":" second + ; 00:00:00 - 23:59:60 (leap second) - /** - * Private function that asynchronously reseeds a generator. - * - * @param callback(err) called once the operation completes. - */ - function _reseed(callback) { - if(ctx.pools[0].messageLength >= 32) { - _seed(); - return callback(); - } - // not enough seed data... - var needed = (32 - ctx.pools[0].messageLength) << 5; - ctx.seedFile(needed, function(err, bytes) { - if(err) { - return callback(err); - } - ctx.collect(bytes); - _seed(); - callback(); - }); + hour = 2DIGIT + minute = 2DIGIT + second = 2DIGIT + */ +function toIMFDate (date) { + if (typeof date === 'number') { + date = new Date(date) } - /** - * Private function that synchronously reseeds a generator. - */ - function _reseedSync() { - if(ctx.pools[0].messageLength >= 32) { - return _seed(); - } - // not enough seed data... - var needed = (32 - ctx.pools[0].messageLength) << 5; - ctx.collect(ctx.seedFileSync(needed)); - _seed(); - } + const days = [ + 'Sun', 'Mon', 'Tue', 'Wed', + 'Thu', 'Fri', 'Sat' + ] - /** - * Private function that seeds a generator once enough bytes are available. - */ - function _seed() { - // update reseed count - ctx.reseeds = (ctx.reseeds === 0xffffffff) ? 0 : ctx.reseeds + 1; - - // goal is to update `key` via: - // key = hash(key + s) - // where 's' is all collected entropy from selected pools, then... - - // create a plugin-based message digest - var md = ctx.plugin.md.create(); - - // consume current key bytes - md.update(ctx.keyBytes); - - // digest the entropy of pools whose index k meet the - // condition 'n mod 2^k == 0' where n is the number of reseeds - var _2powK = 1; - for(var k = 0; k < 32; ++k) { - if(ctx.reseeds % _2powK === 0) { - md.update(ctx.pools[k].digest().getBytes()); - ctx.pools[k].start(); - } - _2powK = _2powK << 1; - } + const months = [ + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' + ] - // get digest for key bytes - ctx.keyBytes = md.digest().getBytes(); + const dayName = days[date.getUTCDay()] + const day = date.getUTCDate().toString().padStart(2, '0') + const month = months[date.getUTCMonth()] + const year = date.getUTCFullYear() + const hour = date.getUTCHours().toString().padStart(2, '0') + const minute = date.getUTCMinutes().toString().padStart(2, '0') + const second = date.getUTCSeconds().toString().padStart(2, '0') - // paranoid deviation from Fortuna: - // update `seed` via `seed = hash(key)` - // instead of initializing to zero once and only - // ever incrementing it - md.start(); - md.update(ctx.keyBytes); - var seedBytes = md.digest().getBytes(); + return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT` +} - // update state - ctx.key = ctx.plugin.formatKey(ctx.keyBytes); - ctx.seed = ctx.plugin.formatSeed(seedBytes); - ctx.generated = 0; +/** + max-age-av = "Max-Age=" non-zero-digit *DIGIT + ; In practice, both expires-av and max-age-av + ; are limited to dates representable by the + ; user agent. + * @param {number} maxAge + */ +function validateCookieMaxAge (maxAge) { + if (maxAge < 0) { + throw new Error('Invalid cookie max-age') } +} - /** - * The built-in default seedFile. This seedFile is used when entropy - * is needed immediately. - * - * @param needed the number of bytes that are needed. - * - * @return the random bytes. - */ - function defaultSeedFile(needed) { - // use window.crypto.getRandomValues strong source of entropy if available - var getRandomValues = null; - var globalScope = forge.util.globalScope; - var _crypto = globalScope.crypto || globalScope.msCrypto; - if(_crypto && _crypto.getRandomValues) { - getRandomValues = function(arr) { - return _crypto.getRandomValues(arr); - }; - } - - var b = forge.util.createBuffer(); - if(getRandomValues) { - while(b.length() < needed) { - // max byte length is 65536 before QuotaExceededError is thrown - // http://www.w3.org/TR/WebCryptoAPI/#RandomSource-method-getRandomValues - var count = Math.max(1, Math.min(needed - b.length(), 65536) / 4); - var entropy = new Uint32Array(Math.floor(count)); - try { - getRandomValues(entropy); - for(var i = 0; i < entropy.length; ++i) { - b.putInt32(entropy[i]); - } - } catch(e) { - /* only ignore QuotaExceededError */ - if(!(typeof QuotaExceededError !== 'undefined' && - e instanceof QuotaExceededError)) { - throw e; - } - } - } - } - - // be sad and add some weak random data - if(b.length() < needed) { - /* Draws from Park-Miller "minimal standard" 31 bit PRNG, - implemented with David G. Carta's optimization: with 32 bit math - and without division (Public Domain). */ - var hi, lo, next; - var seed = Math.floor(Math.random() * 0x010000); - while(b.length() < needed) { - lo = 16807 * (seed & 0xFFFF); - hi = 16807 * (seed >> 16); - lo += (hi & 0x7FFF) << 16; - lo += hi >> 15; - lo = (lo & 0x7FFFFFFF) + (lo >> 31); - seed = lo & 0xFFFFFFFF; - - // consume lower 3 bytes of seed - for(var i = 0; i < 3; ++i) { - // throw in more pseudo random - next = seed >>> (i << 3); - next ^= Math.floor(Math.random() * 0x0100); - b.putByte(next & 0xFF); - } - } - } - - return b.getBytes(needed); - } - // initialize seed file APIs - if(_crypto) { - // use nodejs async API - ctx.seedFile = function(needed, callback) { - _crypto.randomBytes(needed, function(err, bytes) { - if(err) { - return callback(err); - } - callback(null, bytes.toString()); - }); - }; - // use nodejs sync API - ctx.seedFileSync = function(needed) { - return _crypto.randomBytes(needed).toString(); - }; - } else { - ctx.seedFile = function(needed, callback) { - try { - callback(null, defaultSeedFile(needed)); - } catch(e) { - callback(e); - } - }; - ctx.seedFileSync = defaultSeedFile; +/** + * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1 + * @param {import('./index').Cookie} cookie + */ +function stringify (cookie) { + if (cookie.name.length === 0) { + return null } - /** - * Adds entropy to a prng ctx's accumulator. - * - * @param bytes the bytes of entropy as a string. - */ - ctx.collect = function(bytes) { - // iterate over pools distributing entropy cyclically - var count = bytes.length; - for(var i = 0; i < count; ++i) { - ctx.pools[ctx.pool].update(bytes.substr(i, 1)); - ctx.pool = (ctx.pool === 31) ? 0 : ctx.pool + 1; - } - }; + validateCookieName(cookie.name) + validateCookieValue(cookie.value) - /** - * Collects an integer of n bits. - * - * @param i the integer entropy. - * @param n the number of bits in the integer. - */ - ctx.collectInt = function(i, n) { - var bytes = ''; - for(var x = 0; x < n; x += 8) { - bytes += String.fromCharCode((i >> x) & 0xFF); - } - ctx.collect(bytes); - }; + const out = [`${cookie.name}=${cookie.value}`] - /** - * Registers a Web Worker to receive immediate entropy from the main thread. - * This method is required until Web Workers can access the native crypto - * API. This method should be called twice for each created worker, once in - * the main thread, and once in the worker itself. - * - * @param worker the worker to register. - */ - ctx.registerWorker = function(worker) { - // worker receives random bytes - if(worker === self) { - ctx.seedFile = function(needed, callback) { - function listener(e) { - var data = e.data; - if(data.forge && data.forge.prng) { - self.removeEventListener('message', listener); - callback(data.forge.prng.err, data.forge.prng.bytes); - } - } - self.addEventListener('message', listener); - self.postMessage({forge: {prng: {needed: needed}}}); - }; - } else { - // main thread sends random bytes upon request - var listener = function(e) { - var data = e.data; - if(data.forge && data.forge.prng) { - ctx.seedFile(data.forge.prng.needed, function(err, bytes) { - worker.postMessage({forge: {prng: {err: err, bytes: bytes}}}); - }); - } - }; - // TODO: do we need to remove the event listener when the worker dies? - worker.addEventListener('message', listener); - } - }; + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1 + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2 + if (cookie.name.startsWith('__Secure-')) { + cookie.secure = true + } - return ctx; -}; + if (cookie.name.startsWith('__Host-')) { + cookie.secure = true + cookie.domain = null + cookie.path = '/' + } + if (cookie.secure) { + out.push('Secure') + } -/***/ }), + if (cookie.httpOnly) { + out.push('HttpOnly') + } -/***/ 4376: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (typeof cookie.maxAge === 'number') { + validateCookieMaxAge(cookie.maxAge) + out.push(`Max-Age=${cookie.maxAge}`) + } -/** - * Javascript implementation of PKCS#1 PSS signature padding. - * - * @author Stefan Siegl - * - * Copyright (c) 2012 Stefan Siegl - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7821); -__nccwpck_require__(8339); + if (cookie.domain) { + validateCookieDomain(cookie.domain) + out.push(`Domain=${cookie.domain}`) + } -// shortcut for PSS API -var pss = module.exports = forge.pss = forge.pss || {}; + if (cookie.path) { + validateCookiePath(cookie.path) + out.push(`Path=${cookie.path}`) + } -/** - * Creates a PSS signature scheme object. - * - * There are several ways to provide a salt for encoding: - * - * 1. Specify the saltLength only and the built-in PRNG will generate it. - * 2. Specify the saltLength and a custom PRNG with 'getBytesSync' defined that - * will be used. - * 3. Specify the salt itself as a forge.util.ByteBuffer. - * - * @param options the options to use: - * md the message digest object to use, a forge md instance. - * mgf the mask generation function to use, a forge mgf instance. - * [saltLength] the length of the salt in octets. - * [prng] the pseudo-random number generator to use to produce a salt. - * [salt] the salt to use when encoding. - * - * @return a signature scheme object. - */ -pss.create = function(options) { - // backwards compatibility w/legacy args: hash, mgf, sLen - if(arguments.length === 3) { - options = { - md: arguments[0], - mgf: arguments[1], - saltLength: arguments[2] - }; + if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') { + out.push(`Expires=${toIMFDate(cookie.expires)}`) + } + + if (cookie.sameSite) { + out.push(`SameSite=${cookie.sameSite}`) } - var hash = options.md; - var mgf = options.mgf; - var hLen = hash.digestLength; + for (const part of cookie.unparsed) { + if (!part.includes('=')) { + throw new Error('Invalid unparsed') + } + + const [key, ...value] = part.split('=') - var salt_ = options.salt || null; - if(typeof salt_ === 'string') { - // assume binary-encoded string - salt_ = forge.util.createBuffer(salt_); + out.push(`${key.trim()}=${value.join('=')}`) } - var sLen; - if('saltLength' in options) { - sLen = options.saltLength; - } else if(salt_ !== null) { - sLen = salt_.length(); - } else { - throw new Error('Salt length not specified or specific salt not given.'); + return out.join('; ') +} + +let kHeadersListNode + +function getHeadersList (headers) { + if (headers[kHeadersList]) { + return headers[kHeadersList] } - if(salt_ !== null && salt_.length() !== sLen) { - throw new Error('Given salt length does not match length of given salt.'); + if (!kHeadersListNode) { + kHeadersListNode = Object.getOwnPropertySymbols(headers).find( + (symbol) => symbol.description === 'headers list' + ) + + assert(kHeadersListNode, 'Headers cannot be parsed') } - var prng = options.prng || forge.random; + const headersList = headers[kHeadersListNode] + assert(headersList) + + return headersList +} - var pssobj = {}; +module.exports = { + isCTLExcludingHtab, + stringify, + getHeadersList +} - /** - * Encodes a PSS signature. - * - * This function implements EMSA-PSS-ENCODE as per RFC 3447, section 9.1.1. - * - * @param md the message digest object with the hash to sign. - * @param modsBits the length of the RSA modulus in bits. - * - * @return the encoded message as a binary-encoded string of length - * ceil((modBits - 1) / 8). - */ - pssobj.encode = function(md, modBits) { - var i; - var emBits = modBits - 1; - var emLen = Math.ceil(emBits / 8); - /* 2. Let mHash = Hash(M), an octet string of length hLen. */ - var mHash = md.digest().getBytes(); +/***/ }), - /* 3. If emLen < hLen + sLen + 2, output "encoding error" and stop. */ - if(emLen < hLen + sLen + 2) { - throw new Error('Message is too long to encrypt.'); - } +/***/ 2067: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /* 4. Generate a random octet string salt of length sLen; if sLen = 0, - * then salt is the empty string. */ - var salt; - if(salt_ === null) { - salt = prng.getBytesSync(sLen); - } else { - salt = salt_.bytes(); - } - - /* 5. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt; */ - var m_ = new forge.util.ByteBuffer(); - m_.fillWithByte(0, 8); - m_.putBytes(mHash); - m_.putBytes(salt); - - /* 6. Let H = Hash(M'), an octet string of length hLen. */ - hash.start(); - hash.update(m_.getBytes()); - var h = hash.digest().getBytes(); - - /* 7. Generate an octet string PS consisting of emLen - sLen - hLen - 2 - * zero octets. The length of PS may be 0. */ - var ps = new forge.util.ByteBuffer(); - ps.fillWithByte(0, emLen - sLen - hLen - 2); - - /* 8. Let DB = PS || 0x01 || salt; DB is an octet string of length - * emLen - hLen - 1. */ - ps.putByte(0x01); - ps.putBytes(salt); - var db = ps.getBytes(); - - /* 9. Let dbMask = MGF(H, emLen - hLen - 1). */ - var maskLen = emLen - hLen - 1; - var dbMask = mgf.generate(h, maskLen); - - /* 10. Let maskedDB = DB \xor dbMask. */ - var maskedDB = ''; - for(i = 0; i < maskLen; i++) { - maskedDB += String.fromCharCode(db.charCodeAt(i) ^ dbMask.charCodeAt(i)); - } - - /* 11. Set the leftmost 8emLen - emBits bits of the leftmost octet in - * maskedDB to zero. */ - var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF; - maskedDB = String.fromCharCode(maskedDB.charCodeAt(0) & ~mask) + - maskedDB.substr(1); - - /* 12. Let EM = maskedDB || H || 0xbc. - * 13. Output EM. */ - return maskedDB + h + String.fromCharCode(0xbc); - }; +"use strict"; - /** - * Verifies a PSS signature. - * - * This function implements EMSA-PSS-VERIFY as per RFC 3447, section 9.1.2. - * - * @param mHash the message digest hash, as a binary-encoded string, to - * compare against the signature. - * @param em the encoded message, as a binary-encoded string - * (RSA decryption result). - * @param modsBits the length of the RSA modulus in bits. - * - * @return true if the signature was verified, false if not. - */ - pssobj.verify = function(mHash, em, modBits) { - var i; - var emBits = modBits - 1; - var emLen = Math.ceil(emBits / 8); - /* c. Convert the message representative m to an encoded message EM - * of length emLen = ceil((modBits - 1) / 8) octets, where modBits - * is the length in bits of the RSA modulus n */ - em = em.substr(-emLen); +const net = __nccwpck_require__(1808) +const assert = __nccwpck_require__(9491) +const util = __nccwpck_require__(3983) +const { InvalidArgumentError, ConnectTimeoutError } = __nccwpck_require__(8045) + +let tls // include tls conditionally since it is not always available - /* 3. If emLen < hLen + sLen + 2, output "inconsistent" and stop. */ - if(emLen < hLen + sLen + 2) { - throw new Error('Inconsistent parameters to PSS signature verification.'); +// TODO: session re-use does not wait for the first +// connection to resolve the session and might therefore +// resolve the same servername multiple times even when +// re-use is enabled. + +let SessionCache +// FIXME: remove workaround when the Node bug is fixed +// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 +if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) { + SessionCache = class WeakSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() + this._sessionRegistry = new global.FinalizationRegistry((key) => { + if (this._sessionCache.size < this._maxCachedSessions) { + return + } + + const ref = this._sessionCache.get(key) + if (ref !== undefined && ref.deref() === undefined) { + this._sessionCache.delete(key) + } + }) } - /* 4. If the rightmost octet of EM does not have hexadecimal value - * 0xbc, output "inconsistent" and stop. */ - if(em.charCodeAt(emLen - 1) !== 0xbc) { - throw new Error('Encoded message does not end in 0xBC.'); + get (sessionKey) { + const ref = this._sessionCache.get(sessionKey) + return ref ? ref.deref() : null } - /* 5. Let maskedDB be the leftmost emLen - hLen - 1 octets of EM, and - * let H be the next hLen octets. */ - var maskLen = emLen - hLen - 1; - var maskedDB = em.substr(0, maskLen); - var h = em.substr(maskLen, hLen); + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { + return + } + + this._sessionCache.set(sessionKey, new WeakRef(session)) + this._sessionRegistry.register(session, sessionKey) + } + } +} else { + SessionCache = class SimpleSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() + } - /* 6. If the leftmost 8emLen - emBits bits of the leftmost octet in - * maskedDB are not all equal to zero, output "inconsistent" and stop. */ - var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF; - if((maskedDB.charCodeAt(0) & mask) !== 0) { - throw new Error('Bits beyond keysize not zero as expected.'); + get (sessionKey) { + return this._sessionCache.get(sessionKey) } - /* 7. Let dbMask = MGF(H, emLen - hLen - 1). */ - var dbMask = mgf.generate(h, maskLen); + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { + return + } + + if (this._sessionCache.size >= this._maxCachedSessions) { + // remove the oldest session + const { value: oldestKey } = this._sessionCache.keys().next() + this._sessionCache.delete(oldestKey) + } - /* 8. Let DB = maskedDB \xor dbMask. */ - var db = ''; - for(i = 0; i < maskLen; i++) { - db += String.fromCharCode(maskedDB.charCodeAt(i) ^ dbMask.charCodeAt(i)); + this._sessionCache.set(sessionKey, session) } + } +} - /* 9. Set the leftmost 8emLen - emBits bits of the leftmost octet - * in DB to zero. */ - db = String.fromCharCode(db.charCodeAt(0) & ~mask) + db.substr(1); +function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) { + if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) { + throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero') + } - /* 10. If the emLen - hLen - sLen - 2 leftmost octets of DB are not zero - * or if the octet at position emLen - hLen - sLen - 1 (the leftmost - * position is "position 1") does not have hexadecimal value 0x01, - * output "inconsistent" and stop. */ - var checkLen = emLen - hLen - sLen - 2; - for(i = 0; i < checkLen; i++) { - if(db.charCodeAt(i) !== 0x00) { - throw new Error('Leftmost octets not zero as expected'); + const options = { path: socketPath, ...opts } + const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions) + timeout = timeout == null ? 10e3 : timeout + allowH2 = allowH2 != null ? allowH2 : false + return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) { + let socket + if (protocol === 'https:') { + if (!tls) { + tls = __nccwpck_require__(4404) } + servername = servername || options.servername || util.getServerName(host) || null + + const sessionKey = servername || hostname + const session = sessionCache.get(sessionKey) || null + + assert(sessionKey) + + socket = tls.connect({ + highWaterMark: 16384, // TLS in node can't have bigger HWM anyway... + ...options, + servername, + session, + localAddress, + // TODO(HTTP/2): Add support for h2c + ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'], + socket: httpSocket, // upgrade socket connection + port: port || 443, + host: hostname + }) + + socket + .on('session', function (session) { + // TODO (fix): Can a session become invalid once established? Don't think so? + sessionCache.set(sessionKey, session) + }) + } else { + assert(!httpSocket, 'httpSocket can only be sent on TLS update') + socket = net.connect({ + highWaterMark: 64 * 1024, // Same as nodejs fs streams. + ...options, + localAddress, + port: port || 80, + host: hostname + }) } - if(db.charCodeAt(checkLen) !== 0x01) { - throw new Error('Inconsistent PSS signature, 0x01 marker not found'); + // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket + if (options.keepAlive == null || options.keepAlive) { + const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay + socket.setKeepAlive(true, keepAliveInitialDelay) } - /* 11. Let salt be the last sLen octets of DB. */ - var salt = db.substr(-sLen); + const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout) - /* 12. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt */ - var m_ = new forge.util.ByteBuffer(); - m_.fillWithByte(0, 8); - m_.putBytes(mHash); - m_.putBytes(salt); + socket + .setNoDelay(true) + .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () { + cancelTimeout() - /* 13. Let H' = Hash(M'), an octet string of length hLen. */ - hash.start(); - hash.update(m_.getBytes()); - var h_ = hash.digest().getBytes(); + if (callback) { + const cb = callback + callback = null + cb(null, this) + } + }) + .on('error', function (err) { + cancelTimeout() - /* 14. If H = H', output "consistent." Otherwise, output "inconsistent." */ - return h === h_; - }; + if (callback) { + const cb = callback + callback = null + cb(err) + } + }) - return pssobj; -}; + return socket + } +} + +function setupTimeout (onConnectTimeout, timeout) { + if (!timeout) { + return () => {} + } + + let s1 = null + let s2 = null + const timeoutId = setTimeout(() => { + // setImmediate is added to make sure that we priotorise socket error events over timeouts + s1 = setImmediate(() => { + if (process.platform === 'win32') { + // Windows needs an extra setImmediate probably due to implementation differences in the socket logic + s2 = setImmediate(() => onConnectTimeout()) + } else { + onConnectTimeout() + } + }) + }, timeout) + return () => { + clearTimeout(timeoutId) + clearImmediate(s1) + clearImmediate(s2) + } +} + +function onConnectTimeout (socket) { + util.destroy(socket, new ConnectTimeoutError()) +} + +module.exports = buildConnector /***/ }), -/***/ 7821: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 8045: +/***/ ((module) => { -/** - * An API for getting cryptographically-secure random bytes. The bytes are - * generated using the Fortuna algorithm devised by Bruce Schneier and - * Niels Ferguson. - * - * Getting strong random bytes is not yet easy to do in javascript. The only - * truish random entropy that can be collected is from the mouse, keyboard, or - * from timing with respect to page loads, etc. This generator makes a poor - * attempt at providing random bytes when those sources haven't yet provided - * enough entropy to initially seed or to reseed the PRNG. - * - * @author Dave Longley - * - * Copyright (c) 2009-2014 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(4086); -__nccwpck_require__(4467); -__nccwpck_require__(8339); - -(function() { - -// forge.random already defined -if(forge.random && forge.random.getBytes) { - module.exports = forge.random; - return; -} - -(function(jQuery) { - -// the default prng plugin, uses AES-128 -var prng_aes = {}; -var _prng_aes_output = new Array(4); -var _prng_aes_buffer = forge.util.createBuffer(); -prng_aes.formatKey = function(key) { - // convert the key into 32-bit integers - var tmp = forge.util.createBuffer(key); - key = new Array(4); - key[0] = tmp.getInt32(); - key[1] = tmp.getInt32(); - key[2] = tmp.getInt32(); - key[3] = tmp.getInt32(); - - // return the expanded key - return forge.aes._expandKey(key, false); -}; -prng_aes.formatSeed = function(seed) { - // convert seed into 32-bit integers - var tmp = forge.util.createBuffer(seed); - seed = new Array(4); - seed[0] = tmp.getInt32(); - seed[1] = tmp.getInt32(); - seed[2] = tmp.getInt32(); - seed[3] = tmp.getInt32(); - return seed; -}; -prng_aes.cipher = function(key, seed) { - forge.aes._updateBlock(key, seed, _prng_aes_output, false); - _prng_aes_buffer.putInt32(_prng_aes_output[0]); - _prng_aes_buffer.putInt32(_prng_aes_output[1]); - _prng_aes_buffer.putInt32(_prng_aes_output[2]); - _prng_aes_buffer.putInt32(_prng_aes_output[3]); - return _prng_aes_buffer.getBytes(); -}; -prng_aes.increment = function(seed) { - // FIXME: do we care about carry or signed issues? - ++seed[3]; - return seed; -}; -prng_aes.md = forge.md.sha256; +"use strict"; -/** - * Creates a new PRNG. - */ -function spawnPrng() { - var ctx = forge.prng.create(prng_aes); - /** - * Gets random bytes. If a native secure crypto API is unavailable, this - * method tries to make the bytes more unpredictable by drawing from data that - * can be collected from the user of the browser, eg: mouse movement. - * - * If a callback is given, this method will be called asynchronously. - * - * @param count the number of random bytes to get. - * @param [callback(err, bytes)] called once the operation completes. - * - * @return the random bytes in a string. - */ - ctx.getBytes = function(count, callback) { - return ctx.generate(count, callback); - }; +class UndiciError extends Error { + constructor (message) { + super(message) + this.name = 'UndiciError' + this.code = 'UND_ERR' + } +} - /** - * Gets random bytes asynchronously. If a native secure crypto API is - * unavailable, this method tries to make the bytes more unpredictable by - * drawing from data that can be collected from the user of the browser, - * eg: mouse movement. - * - * @param count the number of random bytes to get. - * - * @return the random bytes in a string. - */ - ctx.getBytesSync = function(count) { - return ctx.generate(count); - }; +class ConnectTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ConnectTimeoutError) + this.name = 'ConnectTimeoutError' + this.message = message || 'Connect Timeout Error' + this.code = 'UND_ERR_CONNECT_TIMEOUT' + } +} - return ctx; +class HeadersTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersTimeoutError) + this.name = 'HeadersTimeoutError' + this.message = message || 'Headers Timeout Error' + this.code = 'UND_ERR_HEADERS_TIMEOUT' + } } -// create default prng context -var _ctx = spawnPrng(); +class HeadersOverflowError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersOverflowError) + this.name = 'HeadersOverflowError' + this.message = message || 'Headers Overflow Error' + this.code = 'UND_ERR_HEADERS_OVERFLOW' + } +} -// add other sources of entropy only if window.crypto.getRandomValues is not -// available -- otherwise this source will be automatically used by the prng -var getRandomValues = null; -var globalScope = forge.util.globalScope; -var _crypto = globalScope.crypto || globalScope.msCrypto; -if(_crypto && _crypto.getRandomValues) { - getRandomValues = function(arr) { - return _crypto.getRandomValues(arr); - }; +class BodyTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, BodyTimeoutError) + this.name = 'BodyTimeoutError' + this.message = message || 'Body Timeout Error' + this.code = 'UND_ERR_BODY_TIMEOUT' + } } -if(forge.options.usePureJavaScript || - (!forge.util.isNodejs && !getRandomValues)) { - // if this is a web worker, do not use weak entropy, instead register to - // receive strong entropy asynchronously from the main thread - if(typeof window === 'undefined' || window.document === undefined) { - // FIXME: +class ResponseStatusCodeError extends UndiciError { + constructor (message, statusCode, headers, body) { + super(message) + Error.captureStackTrace(this, ResponseStatusCodeError) + this.name = 'ResponseStatusCodeError' + this.message = message || 'Response Status Code Error' + this.code = 'UND_ERR_RESPONSE_STATUS_CODE' + this.body = body + this.status = statusCode + this.statusCode = statusCode + this.headers = headers } +} - // get load time entropy - _ctx.collectInt(+new Date(), 32); +class InvalidArgumentError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidArgumentError) + this.name = 'InvalidArgumentError' + this.message = message || 'Invalid Argument Error' + this.code = 'UND_ERR_INVALID_ARG' + } +} - // add some entropy from navigator object - if(typeof(navigator) !== 'undefined') { - var _navBytes = ''; - for(var key in navigator) { - try { - if(typeof(navigator[key]) == 'string') { - _navBytes += navigator[key]; - } - } catch(e) { - /* Some navigator keys might not be accessible, e.g. the geolocation - attribute throws an exception if touched in Mozilla chrome:// - context. +class InvalidReturnValueError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidReturnValueError) + this.name = 'InvalidReturnValueError' + this.message = message || 'Invalid Return Value Error' + this.code = 'UND_ERR_INVALID_RETURN_VALUE' + } +} - Silently ignore this and just don't use this as a source of - entropy. */ - } - } - _ctx.collect(_navBytes); - _navBytes = null; +class RequestAbortedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestAbortedError) + this.name = 'AbortError' + this.message = message || 'Request aborted' + this.code = 'UND_ERR_ABORTED' } +} - // add mouse and keyboard collectors if jquery is available - if(jQuery) { - // set up mouse entropy capture - jQuery().mousemove(function(e) { - // add mouse coords - _ctx.collectInt(e.clientX, 16); - _ctx.collectInt(e.clientY, 16); - }); +class InformationalError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InformationalError) + this.name = 'InformationalError' + this.message = message || 'Request information' + this.code = 'UND_ERR_INFO' + } +} - // set up keyboard entropy capture - jQuery().keypress(function(e) { - _ctx.collectInt(e.charCode, 8); - }); +class RequestContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestContentLengthMismatchError) + this.name = 'RequestContentLengthMismatchError' + this.message = message || 'Request body length does not match content-length header' + this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH' } } -/* Random API */ -if(!forge.random) { - forge.random = _ctx; -} else { - // extend forge.random with _ctx - for(var key in _ctx) { - forge.random[key] = _ctx[key]; +class ResponseContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseContentLengthMismatchError) + this.name = 'ResponseContentLengthMismatchError' + this.message = message || 'Response body length does not match content-length header' + this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH' } } -// expose spawn PRNG -forge.random.createInstance = spawnPrng; +class ClientDestroyedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientDestroyedError) + this.name = 'ClientDestroyedError' + this.message = message || 'The client is destroyed' + this.code = 'UND_ERR_DESTROYED' + } +} -module.exports = forge.random; +class ClientClosedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientClosedError) + this.name = 'ClientClosedError' + this.message = message || 'The client is closed' + this.code = 'UND_ERR_CLOSED' + } +} -})(typeof(jQuery) !== 'undefined' ? jQuery : null); +class SocketError extends UndiciError { + constructor (message, socket) { + super(message) + Error.captureStackTrace(this, SocketError) + this.name = 'SocketError' + this.message = message || 'Socket error' + this.code = 'UND_ERR_SOCKET' + this.socket = socket + } +} -})(); +class NotSupportedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'NotSupportedError' + this.message = message || 'Not supported error' + this.code = 'UND_ERR_NOT_SUPPORTED' + } +} +class BalancedPoolMissingUpstreamError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'MissingUpstreamError' + this.message = message || 'No upstream has been added to the BalancedPool' + this.code = 'UND_ERR_BPL_MISSING_UPSTREAM' + } +} -/***/ }), +class HTTPParserError extends Error { + constructor (message, code, data) { + super(message) + Error.captureStackTrace(this, HTTPParserError) + this.name = 'HTTPParserError' + this.code = code ? `HPE_${code}` : undefined + this.data = data ? data.toString() : undefined + } +} -/***/ 9965: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +class ResponseExceededMaxSizeError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseExceededMaxSizeError) + this.name = 'ResponseExceededMaxSizeError' + this.message = message || 'Response content exceeded max size' + this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE' + } +} -/** - * RC2 implementation. - * - * @author Stefan Siegl - * - * Copyright (c) 2012 Stefan Siegl - * - * Information on the RC2 cipher is available from RFC #2268, - * http://www.ietf.org/rfc/rfc2268.txt - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(8339); - -var piTable = [ - 0xd9, 0x78, 0xf9, 0xc4, 0x19, 0xdd, 0xb5, 0xed, 0x28, 0xe9, 0xfd, 0x79, 0x4a, 0xa0, 0xd8, 0x9d, - 0xc6, 0x7e, 0x37, 0x83, 0x2b, 0x76, 0x53, 0x8e, 0x62, 0x4c, 0x64, 0x88, 0x44, 0x8b, 0xfb, 0xa2, - 0x17, 0x9a, 0x59, 0xf5, 0x87, 0xb3, 0x4f, 0x13, 0x61, 0x45, 0x6d, 0x8d, 0x09, 0x81, 0x7d, 0x32, - 0xbd, 0x8f, 0x40, 0xeb, 0x86, 0xb7, 0x7b, 0x0b, 0xf0, 0x95, 0x21, 0x22, 0x5c, 0x6b, 0x4e, 0x82, - 0x54, 0xd6, 0x65, 0x93, 0xce, 0x60, 0xb2, 0x1c, 0x73, 0x56, 0xc0, 0x14, 0xa7, 0x8c, 0xf1, 0xdc, - 0x12, 0x75, 0xca, 0x1f, 0x3b, 0xbe, 0xe4, 0xd1, 0x42, 0x3d, 0xd4, 0x30, 0xa3, 0x3c, 0xb6, 0x26, - 0x6f, 0xbf, 0x0e, 0xda, 0x46, 0x69, 0x07, 0x57, 0x27, 0xf2, 0x1d, 0x9b, 0xbc, 0x94, 0x43, 0x03, - 0xf8, 0x11, 0xc7, 0xf6, 0x90, 0xef, 0x3e, 0xe7, 0x06, 0xc3, 0xd5, 0x2f, 0xc8, 0x66, 0x1e, 0xd7, - 0x08, 0xe8, 0xea, 0xde, 0x80, 0x52, 0xee, 0xf7, 0x84, 0xaa, 0x72, 0xac, 0x35, 0x4d, 0x6a, 0x2a, - 0x96, 0x1a, 0xd2, 0x71, 0x5a, 0x15, 0x49, 0x74, 0x4b, 0x9f, 0xd0, 0x5e, 0x04, 0x18, 0xa4, 0xec, - 0xc2, 0xe0, 0x41, 0x6e, 0x0f, 0x51, 0xcb, 0xcc, 0x24, 0x91, 0xaf, 0x50, 0xa1, 0xf4, 0x70, 0x39, - 0x99, 0x7c, 0x3a, 0x85, 0x23, 0xb8, 0xb4, 0x7a, 0xfc, 0x02, 0x36, 0x5b, 0x25, 0x55, 0x97, 0x31, - 0x2d, 0x5d, 0xfa, 0x98, 0xe3, 0x8a, 0x92, 0xae, 0x05, 0xdf, 0x29, 0x10, 0x67, 0x6c, 0xba, 0xc9, - 0xd3, 0x00, 0xe6, 0xcf, 0xe1, 0x9e, 0xa8, 0x2c, 0x63, 0x16, 0x01, 0x3f, 0x58, 0xe2, 0x89, 0xa9, - 0x0d, 0x38, 0x34, 0x1b, 0xab, 0x33, 0xff, 0xb0, 0xbb, 0x48, 0x0c, 0x5f, 0xb9, 0xb1, 0xcd, 0x2e, - 0xc5, 0xf3, 0xdb, 0x47, 0xe5, 0xa5, 0x9c, 0x77, 0x0a, 0xa6, 0x20, 0x68, 0xfe, 0x7f, 0xc1, 0xad -]; +class RequestRetryError extends UndiciError { + constructor (message, code, { headers, data }) { + super(message) + Error.captureStackTrace(this, RequestRetryError) + this.name = 'RequestRetryError' + this.message = message || 'Request retry error' + this.code = 'UND_ERR_REQ_RETRY' + this.statusCode = code + this.data = data + this.headers = headers + } +} -var s = [1, 2, 3, 5]; +module.exports = { + HTTPParserError, + UndiciError, + HeadersTimeoutError, + HeadersOverflowError, + BodyTimeoutError, + RequestContentLengthMismatchError, + ConnectTimeoutError, + ResponseStatusCodeError, + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError, + ClientDestroyedError, + ClientClosedError, + InformationalError, + SocketError, + NotSupportedError, + ResponseContentLengthMismatchError, + BalancedPoolMissingUpstreamError, + ResponseExceededMaxSizeError, + RequestRetryError +} -/** - * Rotate a word left by given number of bits. - * - * Bits that are shifted out on the left are put back in on the right - * hand side. - * - * @param word The word to shift left. - * @param bits The number of bits to shift by. - * @return The rotated word. - */ -var rol = function(word, bits) { - return ((word << bits) & 0xffff) | ((word & 0xffff) >> (16 - bits)); -}; -/** - * Rotate a word right by given number of bits. - * - * Bits that are shifted out on the right are put back in on the left - * hand side. - * - * @param word The word to shift right. - * @param bits The number of bits to shift by. - * @return The rotated word. - */ -var ror = function(word, bits) { - return ((word & 0xffff) >> bits) | ((word << (16 - bits)) & 0xffff); -}; +/***/ }), -/* RC2 API */ -module.exports = forge.rc2 = forge.rc2 || {}; +/***/ 2905: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Perform RC2 key expansion as per RFC #2268, section 2. - * - * @param key variable-length user key (between 1 and 128 bytes) - * @param effKeyBits number of effective key bits (default: 128) - * @return the expanded RC2 key (ByteBuffer of 128 bytes) - */ -forge.rc2.expandKey = function(key, effKeyBits) { - if(typeof key === 'string') { - key = forge.util.createBuffer(key); - } - effKeyBits = effKeyBits || 128; - - /* introduce variables that match the names used in RFC #2268 */ - var L = key; - var T = key.length(); - var T1 = effKeyBits; - var T8 = Math.ceil(T1 / 8); - var TM = 0xff >> (T1 & 0x07); - var i; +"use strict"; - for(i = T; i < 128; i++) { - L.putByte(piTable[(L.at(i - 1) + L.at(i - T)) & 0xff]); - } - L.setAt(128 - T8, piTable[L.at(128 - T8) & TM]); +const { + InvalidArgumentError, + NotSupportedError +} = __nccwpck_require__(8045) +const assert = __nccwpck_require__(9491) +const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = __nccwpck_require__(2785) +const util = __nccwpck_require__(3983) - for(i = 127 - T8; i >= 0; i--) { - L.setAt(i, piTable[L.at(i + 1) ^ L.at(i + T8)]); - } +// tokenRegExp and headerCharRegex have been lifted from +// https://github.com/nodejs/node/blob/main/lib/_http_common.js - return L; -}; +/** + * Verifies that the given val is a valid HTTP token + * per the rules defined in RFC 7230 + * See https://tools.ietf.org/html/rfc7230#section-3.2.6 + */ +const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/ /** - * Creates a RC2 cipher object. - * - * @param key the symmetric key to use (as base for key generation). - * @param bits the number of effective key bits. - * @param encrypt false for decryption, true for encryption. - * - * @return the cipher. + * Matches if val contains an invalid field-vchar + * field-value = *( field-content / obs-fold ) + * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] + * field-vchar = VCHAR / obs-text */ -var createCipher = function(key, bits, encrypt) { - var _finish = false, _input = null, _output = null, _iv = null; - var mixRound, mashRound; - var i, j, K = []; +const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/ - /* Expand key and fill into K[] Array */ - key = forge.rc2.expandKey(key, bits); - for(i = 0; i < 64; i++) { - K.push(key.getInt16Le()); - } +// Verifies that a given path is valid does not contain control chars \x00 to \x20 +const invalidPathRegex = /[^\u0021-\u00ff]/ - if(encrypt) { - /** - * Perform one mixing round "in place". - * - * @param R Array of four words to perform mixing on. - */ - mixRound = function(R) { - for(i = 0; i < 4; i++) { - R[i] += K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) + - ((~R[(i + 3) % 4]) & R[(i + 1) % 4]); - R[i] = rol(R[i], s[i]); - j++; - } - }; +const kHandler = Symbol('handler') - /** - * Perform one mashing round "in place". - * - * @param R Array of four words to perform mashing on. - */ - mashRound = function(R) { - for(i = 0; i < 4; i++) { - R[i] += K[R[(i + 3) % 4] & 63]; - } - }; - } else { - /** - * Perform one r-mixing round "in place". - * - * @param R Array of four words to perform mixing on. - */ - mixRound = function(R) { - for(i = 3; i >= 0; i--) { - R[i] = ror(R[i], s[i]); - R[i] -= K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) + - ((~R[(i + 3) % 4]) & R[(i + 1) % 4]); - j--; - } - }; +const channels = {} - /** - * Perform one r-mashing round "in place". - * - * @param R Array of four words to perform mashing on. - */ - mashRound = function(R) { - for(i = 3; i >= 0; i--) { - R[i] -= K[R[(i + 3) % 4] & 63]; - } - }; - } +let extractBody - /** - * Run the specified cipher execution plan. - * - * This function takes four words from the input buffer, applies the IV on - * it (if requested) and runs the provided execution plan. - * - * The plan must be put together in form of a array of arrays. Where the - * outer one is simply a list of steps to perform and the inner one needs - * to have two elements: the first one telling how many rounds to perform, - * the second one telling what to do (i.e. the function to call). - * - * @param {Array} plan The plan to execute. - */ - var runPlan = function(plan) { - var R = []; +try { + const diagnosticsChannel = __nccwpck_require__(7643) + channels.create = diagnosticsChannel.channel('undici:request:create') + channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent') + channels.headers = diagnosticsChannel.channel('undici:request:headers') + channels.trailers = diagnosticsChannel.channel('undici:request:trailers') + channels.error = diagnosticsChannel.channel('undici:request:error') +} catch { + channels.create = { hasSubscribers: false } + channels.bodySent = { hasSubscribers: false } + channels.headers = { hasSubscribers: false } + channels.trailers = { hasSubscribers: false } + channels.error = { hasSubscribers: false } +} - /* Get data from input buffer and fill the four words into R */ - for(i = 0; i < 4; i++) { - var val = _input.getInt16Le(); +class Request { + constructor (origin, { + path, + method, + body, + headers, + query, + idempotent, + blocking, + upgrade, + headersTimeout, + bodyTimeout, + reset, + throwOnError, + expectContinue + }, handler) { + if (typeof path !== 'string') { + throw new InvalidArgumentError('path must be a string') + } else if ( + path[0] !== '/' && + !(path.startsWith('http://') || path.startsWith('https://')) && + method !== 'CONNECT' + ) { + throw new InvalidArgumentError('path must be an absolute URL or start with a slash') + } else if (invalidPathRegex.exec(path) !== null) { + throw new InvalidArgumentError('invalid request path') + } - if(_iv !== null) { - if(encrypt) { - /* We're encrypting, apply the IV first. */ - val ^= _iv.getInt16Le(); - } else { - /* We're decryption, keep cipher text for next block. */ - _iv.putInt16Le(val); - } - } + if (typeof method !== 'string') { + throw new InvalidArgumentError('method must be a string') + } else if (tokenRegExp.exec(method) === null) { + throw new InvalidArgumentError('invalid request method') + } - R.push(val & 0xffff); + if (upgrade && typeof upgrade !== 'string') { + throw new InvalidArgumentError('upgrade must be a string') } - /* Reset global "j" variable as per spec. */ - j = encrypt ? 0 : 63; + if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('invalid headersTimeout') + } - /* Run execution plan. */ - for(var ptr = 0; ptr < plan.length; ptr++) { - for(var ctr = 0; ctr < plan[ptr][0]; ctr++) { - plan[ptr][1](R); - } + if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('invalid bodyTimeout') } - /* Write back result to output buffer. */ - for(i = 0; i < 4; i++) { - if(_iv !== null) { - if(encrypt) { - /* We're encrypting in CBC-mode, feed back encrypted bytes into - IV buffer to carry it forward to next block. */ - _iv.putInt16Le(R[i]); - } else { - R[i] ^= _iv.getInt16Le(); - } - } + if (reset != null && typeof reset !== 'boolean') { + throw new InvalidArgumentError('invalid reset') + } - _output.putInt16Le(R[i]); + if (expectContinue != null && typeof expectContinue !== 'boolean') { + throw new InvalidArgumentError('invalid expectContinue') } - }; - /* Create cipher object */ - var cipher = null; - cipher = { - /** - * Starts or restarts the encryption or decryption process, whichever - * was previously configured. - * - * To use the cipher in CBC mode, iv may be given either as a string - * of bytes, or as a byte buffer. For ECB mode, give null as iv. - * - * @param iv the initialization vector to use, null for ECB mode. - * @param output the output the buffer to write to, null to create one. - */ - start: function(iv, output) { - if(iv) { - /* CBC mode */ - if(typeof iv === 'string') { - iv = forge.util.createBuffer(iv); - } - } + this.headersTimeout = headersTimeout - _finish = false; - _input = forge.util.createBuffer(); - _output = output || new forge.util.createBuffer(); - _iv = iv; + this.bodyTimeout = bodyTimeout - cipher.output = _output; - }, + this.throwOnError = throwOnError === true - /** - * Updates the next block. - * - * @param input the buffer to read from. - */ - update: function(input) { - if(!_finish) { - // not finishing, so fill the input buffer with more input - _input.putBuffer(input); - } + this.method = method - while(_input.length() >= 8) { - runPlan([ - [ 5, mixRound ], - [ 1, mashRound ], - [ 6, mixRound ], - [ 1, mashRound ], - [ 5, mixRound ] - ]); - } - }, + this.abort = null - /** - * Finishes encrypting or decrypting. - * - * @param pad a padding function to use, null for PKCS#7 padding, - * signature(blockSize, buffer, decrypt). - * - * @return true if successful, false on error. - */ - finish: function(pad) { - var rval = true; + if (body == null) { + this.body = null + } else if (util.isStream(body)) { + this.body = body - if(encrypt) { - if(pad) { - rval = pad(8, _input, !encrypt); - } else { - // add PKCS#7 padding to block (each pad byte is the - // value of the number of pad bytes) - var padding = (_input.length() === 8) ? 8 : (8 - _input.length()); - _input.fillWithByte(padding, padding); + const rState = this.body._readableState + if (!rState || !rState.autoDestroy) { + this.endHandler = function autoDestroy () { + util.destroy(this) } + this.body.on('end', this.endHandler) } - if(rval) { - // do final update - _finish = true; - cipher.update(); - } - - if(!encrypt) { - // check for error: input data not a multiple of block size - rval = (_input.length() === 0); - if(rval) { - if(pad) { - rval = pad(8, _output, !encrypt); - } else { - // ensure padding byte count is valid - var len = _output.length(); - var count = _output.at(len - 1); - - if(count > len) { - rval = false; - } else { - // trim off padding bytes - _output.truncate(count); - } - } + this.errorHandler = err => { + if (this.abort) { + this.abort(err) + } else { + this.error = err } } - - return rval; + this.body.on('error', this.errorHandler) + } else if (util.isBuffer(body)) { + this.body = body.byteLength ? body : null + } else if (ArrayBuffer.isView(body)) { + this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null + } else if (body instanceof ArrayBuffer) { + this.body = body.byteLength ? Buffer.from(body) : null + } else if (typeof body === 'string') { + this.body = body.length ? Buffer.from(body) : null + } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) { + this.body = body + } else { + throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable') } - }; - return cipher; -}; + this.completed = false -/** - * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the - * given symmetric key. The output will be stored in the 'output' member - * of the returned cipher. - * - * The key and iv may be given as a string of bytes or a byte buffer. - * The cipher is initialized to use 128 effective key bits. - * - * @param key the symmetric key to use. - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * - * @return the cipher. - */ -forge.rc2.startEncrypting = function(key, iv, output) { - var cipher = forge.rc2.createEncryptionCipher(key, 128); - cipher.start(iv, output); - return cipher; -}; + this.aborted = false -/** - * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the - * given symmetric key. - * - * The key may be given as a string of bytes or a byte buffer. - * - * To start encrypting call start() on the cipher with an iv and optional - * output buffer. - * - * @param key the symmetric key to use. - * - * @return the cipher. - */ -forge.rc2.createEncryptionCipher = function(key, bits) { - return createCipher(key, bits, true); -}; + this.upgrade = upgrade || null -/** - * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the - * given symmetric key. The output will be stored in the 'output' member - * of the returned cipher. - * - * The key and iv may be given as a string of bytes or a byte buffer. - * The cipher is initialized to use 128 effective key bits. - * - * @param key the symmetric key to use. - * @param iv the initialization vector to use. - * @param output the buffer to write to, null to create one. - * - * @return the cipher. - */ -forge.rc2.startDecrypting = function(key, iv, output) { - var cipher = forge.rc2.createDecryptionCipher(key, 128); - cipher.start(iv, output); - return cipher; -}; + this.path = query ? util.buildURL(path, query) : path -/** - * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the - * given symmetric key. - * - * The key may be given as a string of bytes or a byte buffer. - * - * To start decrypting call start() on the cipher with an iv and optional - * output buffer. - * - * @param key the symmetric key to use. - * - * @return the cipher. - */ -forge.rc2.createDecryptionCipher = function(key, bits) { - return createCipher(key, bits, false); -}; + this.origin = origin + this.idempotent = idempotent == null + ? method === 'HEAD' || method === 'GET' + : idempotent -/***/ }), + this.blocking = blocking == null ? false : blocking -/***/ 3921: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + this.reset = reset == null ? null : reset -/** - * Javascript implementation of basic RSA algorithms. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - * - * The only algorithm currently supported for PKI is RSA. - * - * An RSA key is often stored in ASN.1 DER format. The SubjectPublicKeyInfo - * ASN.1 structure is composed of an algorithm of type AlgorithmIdentifier - * and a subjectPublicKey of type bit string. - * - * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters - * for the algorithm, if any. In the case of RSA, there aren't any. - * - * SubjectPublicKeyInfo ::= SEQUENCE { - * algorithm AlgorithmIdentifier, - * subjectPublicKey BIT STRING - * } - * - * AlgorithmIdentifer ::= SEQUENCE { - * algorithm OBJECT IDENTIFIER, - * parameters ANY DEFINED BY algorithm OPTIONAL - * } - * - * For an RSA public key, the subjectPublicKey is: - * - * RSAPublicKey ::= SEQUENCE { - * modulus INTEGER, -- n - * publicExponent INTEGER -- e - * } - * - * PrivateKeyInfo ::= SEQUENCE { - * version Version, - * privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, - * privateKey PrivateKey, - * attributes [0] IMPLICIT Attributes OPTIONAL - * } - * - * Version ::= INTEGER - * PrivateKeyAlgorithmIdentifier ::= AlgorithmIdentifier - * PrivateKey ::= OCTET STRING - * Attributes ::= SET OF Attribute - * - * An RSA private key as the following structure: - * - * RSAPrivateKey ::= SEQUENCE { - * version Version, - * modulus INTEGER, -- n - * publicExponent INTEGER, -- e - * privateExponent INTEGER, -- d - * prime1 INTEGER, -- p - * prime2 INTEGER, -- q - * exponent1 INTEGER, -- d mod (p-1) - * exponent2 INTEGER, -- d mod (q-1) - * coefficient INTEGER -- (inverse of q) mod p - * } - * - * Version ::= INTEGER - * - * The OID for the RSA key algorithm is: 1.2.840.113549.1.1.1 - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -__nccwpck_require__(7052); -__nccwpck_require__(1925); -__nccwpck_require__(7014); -__nccwpck_require__(6861); -__nccwpck_require__(7821); -__nccwpck_require__(8339); + this.host = null -if(typeof BigInteger === 'undefined') { - var BigInteger = forge.jsbn.BigInteger; -} + this.contentLength = null -var _crypto = forge.util.isNodejs ? __nccwpck_require__(6113) : null; + this.contentType = null -// shortcut for asn.1 API -var asn1 = forge.asn1; + this.headers = '' -// shortcut for util API -var util = forge.util; + // Only for H2 + this.expectContinue = expectContinue != null ? expectContinue : false -/* - * RSA encryption and decryption, see RFC 2313. - */ -forge.pki = forge.pki || {}; -module.exports = forge.pki.rsa = forge.rsa = forge.rsa || {}; -var pki = forge.pki; - -// for finding primes, which are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29 -var GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2]; - -// validator for a PrivateKeyInfo structure -var privateKeyValidator = { - // PrivateKeyInfo - name: 'PrivateKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // Version (INTEGER) - name: 'PrivateKeyInfo.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyVersion' - }, { - // privateKeyAlgorithm - name: 'PrivateKeyInfo.privateKeyAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'privateKeyOid' - }] - }, { - // PrivateKey - name: 'PrivateKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'privateKey' - }] -}; + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(this, headers[i], headers[i + 1]) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(this, key, headers[key]) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') + } -// validator for an RSA private key -var rsaPrivateKeyValidator = { - // RSAPrivateKey - name: 'RSAPrivateKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // Version (INTEGER) - name: 'RSAPrivateKey.version', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyVersion' - }, { - // modulus (n) - name: 'RSAPrivateKey.modulus', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyModulus' - }, { - // publicExponent (e) - name: 'RSAPrivateKey.publicExponent', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyPublicExponent' - }, { - // privateExponent (d) - name: 'RSAPrivateKey.privateExponent', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyPrivateExponent' - }, { - // prime1 (p) - name: 'RSAPrivateKey.prime1', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyPrime1' - }, { - // prime2 (q) - name: 'RSAPrivateKey.prime2', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyPrime2' - }, { - // exponent1 (d mod (p-1)) - name: 'RSAPrivateKey.exponent1', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyExponent1' - }, { - // exponent2 (d mod (q-1)) - name: 'RSAPrivateKey.exponent2', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyExponent2' - }, { - // coefficient ((inverse of q) mod p) - name: 'RSAPrivateKey.coefficient', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'privateKeyCoefficient' - }] -}; + if (util.isFormDataLike(this.body)) { + if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) { + throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.') + } -// validator for an RSA public key -var rsaPublicKeyValidator = { - // RSAPublicKey - name: 'RSAPublicKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // modulus (n) - name: 'RSAPublicKey.modulus', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'publicKeyModulus' - }, { - // publicExponent (e) - name: 'RSAPublicKey.exponent', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'publicKeyExponent' - }] -}; + if (!extractBody) { + extractBody = (__nccwpck_require__(1472).extractBody) + } -// validator for an SubjectPublicKeyInfo structure -// Note: Currently only works with an RSA public key -var publicKeyValidator = forge.pki.rsa.publicKeyValidator = { - name: 'SubjectPublicKeyInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'subjectPublicKeyInfo', - value: [{ - name: 'SubjectPublicKeyInfo.AlgorithmIdentifier', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'publicKeyOid' - }] - }, { - // subjectPublicKey - name: 'SubjectPublicKeyInfo.subjectPublicKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - value: [{ - // RSAPublicKey - name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - optional: true, - captureAsn1: 'rsaPublicKey' - }] - }] -}; + const [bodyStream, contentType] = extractBody(body) + if (this.contentType == null) { + this.contentType = contentType + this.headers += `content-type: ${contentType}\r\n` + } + this.body = bodyStream.stream + this.contentLength = bodyStream.length + } else if (util.isBlobLike(body) && this.contentType == null && body.type) { + this.contentType = body.type + this.headers += `content-type: ${body.type}\r\n` + } -// validator for a DigestInfo structure -var digestInfoValidator = { - name: 'DigestInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'DigestInfo.DigestAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'DigestInfo.DigestAlgorithm.algorithmIdentifier', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'algorithmIdentifier' - }, { - // NULL paramters - name: 'DigestInfo.DigestAlgorithm.parameters', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.NULL, - // captured only to check existence for md2 and md5 - capture: 'parameters', - optional: true, - constructed: false - }] - }, { - // digest - name: 'DigestInfo.digest', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OCTETSTRING, - constructed: false, - capture: 'digest' - }] -}; + util.validateHandler(handler, method, upgrade) -/** - * Wrap digest in DigestInfo object. - * - * This function implements EMSA-PKCS1-v1_5-ENCODE as per RFC 3447. - * - * DigestInfo ::= SEQUENCE { - * digestAlgorithm DigestAlgorithmIdentifier, - * digest Digest - * } - * - * DigestAlgorithmIdentifier ::= AlgorithmIdentifier - * Digest ::= OCTET STRING - * - * @param md the message digest object with the hash to sign. - * - * @return the encoded message (ready for RSA encrytion) - */ -var emsaPkcs1v15encode = function(md) { - // get the oid for the algorithm - var oid; - if(md.algorithm in pki.oids) { - oid = pki.oids[md.algorithm]; - } else { - var error = new Error('Unknown message digest algorithm.'); - error.algorithm = md.algorithm; - throw error; - } - var oidBytes = asn1.oidToDer(oid).getBytes(); - - // create the digest info - var digestInfo = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - var digestAlgorithm = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - digestAlgorithm.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OID, false, oidBytes)); - digestAlgorithm.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')); - var digest = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, - false, md.digest().getBytes()); - digestInfo.value.push(digestAlgorithm); - digestInfo.value.push(digest); - - // encode digest info - return asn1.toDer(digestInfo).getBytes(); -}; + this.servername = util.getServerName(this.host) -/** - * Performs x^c mod n (RSA encryption or decryption operation). - * - * @param x the number to raise and mod. - * @param key the key to use. - * @param pub true if the key is public, false if private. - * - * @return the result of x^c mod n. - */ -var _modPow = function(x, key, pub) { - if(pub) { - return x.modPow(key.e, key.n); - } + this[kHandler] = handler - if(!key.p || !key.q) { - // allow calculation without CRT params (slow) - return x.modPow(key.d, key.n); + if (channels.create.hasSubscribers) { + channels.create.publish({ request: this }) + } } - // pre-compute dP, dQ, and qInv if necessary - if(!key.dP) { - key.dP = key.d.mod(key.p.subtract(BigInteger.ONE)); - } - if(!key.dQ) { - key.dQ = key.d.mod(key.q.subtract(BigInteger.ONE)); - } - if(!key.qInv) { - key.qInv = key.q.modInverse(key.p); + onBodySent (chunk) { + if (this[kHandler].onBodySent) { + try { + return this[kHandler].onBodySent(chunk) + } catch (err) { + this.abort(err) + } + } } - /* Chinese remainder theorem (CRT) states: - - Suppose n1, n2, ..., nk are positive integers which are pairwise - coprime (n1 and n2 have no common factors other than 1). For any - integers x1, x2, ..., xk there exists an integer x solving the - system of simultaneous congruences (where ~= means modularly - congruent so a ~= b mod n means a mod n = b mod n): - - x ~= x1 mod n1 - x ~= x2 mod n2 - ... - x ~= xk mod nk + onRequestSent () { + if (channels.bodySent.hasSubscribers) { + channels.bodySent.publish({ request: this }) + } - This system of congruences has a single simultaneous solution x - between 0 and n - 1. Furthermore, each xk solution and x itself - is congruent modulo the product n = n1*n2*...*nk. - So x1 mod n = x2 mod n = xk mod n = x mod n. + if (this[kHandler].onRequestSent) { + try { + return this[kHandler].onRequestSent() + } catch (err) { + this.abort(err) + } + } + } - The single simultaneous solution x can be solved with the following - equation: + onConnect (abort) { + assert(!this.aborted) + assert(!this.completed) - x = sum(xi*ri*si) mod n where ri = n/ni and si = ri^-1 mod ni. + if (this.error) { + abort(this.error) + } else { + this.abort = abort + return this[kHandler].onConnect(abort) + } + } - Where x is less than n, xi = x mod ni. + onHeaders (statusCode, headers, resume, statusText) { + assert(!this.aborted) + assert(!this.completed) - For RSA we are only concerned with k = 2. The modulus n = pq, where - p and q are coprime. The RSA decryption algorithm is: + if (channels.headers.hasSubscribers) { + channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) + } - y = x^d mod n + try { + return this[kHandler].onHeaders(statusCode, headers, resume, statusText) + } catch (err) { + this.abort(err) + } + } - Given the above: + onData (chunk) { + assert(!this.aborted) + assert(!this.completed) - x1 = x^d mod p - r1 = n/p = q - s1 = q^-1 mod p - x2 = x^d mod q - r2 = n/q = p - s2 = p^-1 mod q + try { + return this[kHandler].onData(chunk) + } catch (err) { + this.abort(err) + return false + } + } - So y = (x1r1s1 + x2r2s2) mod n - = ((x^d mod p)q(q^-1 mod p) + (x^d mod q)p(p^-1 mod q)) mod n + onUpgrade (statusCode, headers, socket) { + assert(!this.aborted) + assert(!this.completed) - According to Fermat's Little Theorem, if the modulus P is prime, - for any integer A not evenly divisible by P, A^(P-1) ~= 1 mod P. - Since A is not divisible by P it follows that if: - N ~= M mod (P - 1), then A^N mod P = A^M mod P. Therefore: + return this[kHandler].onUpgrade(statusCode, headers, socket) + } - A^N mod P = A^(M mod (P - 1)) mod P. (The latter takes less effort - to calculate). In order to calculate x^d mod p more quickly the - exponent d mod (p - 1) is stored in the RSA private key (the same - is done for x^d mod q). These values are referred to as dP and dQ - respectively. Therefore we now have: + onComplete (trailers) { + this.onFinally() - y = ((x^dP mod p)q(q^-1 mod p) + (x^dQ mod q)p(p^-1 mod q)) mod n + assert(!this.aborted) - Since we'll be reducing x^dP by modulo p (same for q) we can also - reduce x by p (and q respectively) before hand. Therefore, let + this.completed = true + if (channels.trailers.hasSubscribers) { + channels.trailers.publish({ request: this, trailers }) + } - xp = ((x mod p)^dP mod p), and - xq = ((x mod q)^dQ mod q), yielding: + try { + return this[kHandler].onComplete(trailers) + } catch (err) { + // TODO (fix): This might be a bad idea? + this.onError(err) + } + } - y = (xp*q*(q^-1 mod p) + xq*p*(p^-1 mod q)) mod n + onError (error) { + this.onFinally() - This can be further reduced to a simple algorithm that only - requires 1 inverse (the q inverse is used) to be used and stored. - The algorithm is called Garner's algorithm. If qInv is the - inverse of q, we simply calculate: + if (channels.error.hasSubscribers) { + channels.error.publish({ request: this, error }) + } - y = (qInv*(xp - xq) mod p) * q + xq + if (this.aborted) { + return + } + this.aborted = true - However, there are two further complications. First, we need to - ensure that xp > xq to prevent signed BigIntegers from being used - so we add p until this is true (since we will be mod'ing with - p anyway). Then, there is a known timing attack on algorithms - using the CRT. To mitigate this risk, "cryptographic blinding" - should be used. This requires simply generating a random number r - between 0 and n-1 and its inverse and multiplying x by r^e before - calculating y and then multiplying y by r^-1 afterwards. Note that - r must be coprime with n (gcd(r, n) === 1) in order to have an - inverse. - */ + return this[kHandler].onError(error) + } - // cryptographic blinding - var r; - do { - r = new BigInteger( - forge.util.bytesToHex(forge.random.getBytes(key.n.bitLength() / 8)), - 16); - } while(r.compareTo(key.n) >= 0 || !r.gcd(key.n).equals(BigInteger.ONE)); - x = x.multiply(r.modPow(key.e, key.n)).mod(key.n); + onFinally () { + if (this.errorHandler) { + this.body.off('error', this.errorHandler) + this.errorHandler = null + } - // calculate xp and xq - var xp = x.mod(key.p).modPow(key.dP, key.p); - var xq = x.mod(key.q).modPow(key.dQ, key.q); + if (this.endHandler) { + this.body.off('end', this.endHandler) + this.endHandler = null + } + } - // xp must be larger than xq to avoid signed bit usage - while(xp.compareTo(xq) < 0) { - xp = xp.add(key.p); + // TODO: adjust to support H2 + addHeader (key, value) { + processHeader(this, key, value) + return this } - // do last step - var y = xp.subtract(xq) - .multiply(key.qInv).mod(key.p) - .multiply(key.q).add(xq); + static [kHTTP1BuildRequest] (origin, opts, handler) { + // TODO: Migrate header parsing here, to make Requests + // HTTP agnostic + return new Request(origin, opts, handler) + } - // remove effect of random for cryptographic blinding - y = y.multiply(r.modInverse(key.n)).mod(key.n); + static [kHTTP2BuildRequest] (origin, opts, handler) { + const headers = opts.headers + opts = { ...opts, headers: null } - return y; -}; + const request = new Request(origin, opts, handler) -/** - * NOTE: THIS METHOD IS DEPRECATED, use 'sign' on a private key object or - * 'encrypt' on a public key object instead. - * - * Performs RSA encryption. - * - * The parameter bt controls whether to put padding bytes before the - * message passed in. Set bt to either true or false to disable padding - * completely (in order to handle e.g. EMSA-PSS encoding seperately before), - * signaling whether the encryption operation is a public key operation - * (i.e. encrypting data) or not, i.e. private key operation (data signing). - * - * For PKCS#1 v1.5 padding pass in the block type to use, i.e. either 0x01 - * (for signing) or 0x02 (for encryption). The key operation mode (private - * or public) is derived from this flag in that case). - * - * @param m the message to encrypt as a byte string. - * @param key the RSA key to use. - * @param bt for PKCS#1 v1.5 padding, the block type to use - * (0x01 for private key, 0x02 for public), - * to disable padding: true = public key, false = private key. - * - * @return the encrypted bytes as a string. - */ -pki.rsa.encrypt = function(m, key, bt) { - var pub = bt; - var eb; + request.headers = {} - // get the length of the modulus in bytes - var k = Math.ceil(key.n.bitLength() / 8); + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(request, headers[i], headers[i + 1], true) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(request, key, headers[key], true) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') + } - if(bt !== false && bt !== true) { - // legacy, default to PKCS#1 v1.5 padding - pub = (bt === 0x02); - eb = _encodePkcs1_v1_5(m, key, bt); - } else { - eb = forge.util.createBuffer(); - eb.putBytes(m); + return request } - // load encryption block as big integer 'x' - // FIXME: hex conversion inefficient, get BigInteger w/byte strings - var x = new BigInteger(eb.toHex(), 16); + static [kHTTP2CopyHeaders] (raw) { + const rawHeaders = raw.split('\r\n') + const headers = {} - // do RSA encryption - var y = _modPow(x, key, pub); + for (const header of rawHeaders) { + const [key, value] = header.split(': ') - // convert y into the encrypted data byte string, if y is shorter in - // bytes than k, then prepend zero bytes to fill up ed - // FIXME: hex conversion inefficient, get BigInteger w/byte strings - var yhex = y.toString(16); - var ed = forge.util.createBuffer(); - var zeros = k - Math.ceil(yhex.length / 2); - while(zeros > 0) { - ed.putByte(0x00); - --zeros; - } - ed.putBytes(forge.util.hexToBytes(yhex)); - return ed.getBytes(); -}; + if (value == null || value.length === 0) continue -/** - * NOTE: THIS METHOD IS DEPRECATED, use 'decrypt' on a private key object or - * 'verify' on a public key object instead. - * - * Performs RSA decryption. - * - * The parameter ml controls whether to apply PKCS#1 v1.5 padding - * or not. Set ml = false to disable padding removal completely - * (in order to handle e.g. EMSA-PSS later on) and simply pass back - * the RSA encryption block. - * - * @param ed the encrypted data to decrypt in as a byte string. - * @param key the RSA key to use. - * @param pub true for a public key operation, false for private. - * @param ml the message length, if known, false to disable padding. - * - * @return the decrypted message as a byte string. - */ -pki.rsa.decrypt = function(ed, key, pub, ml) { - // get the length of the modulus in bytes - var k = Math.ceil(key.n.bitLength() / 8); + if (headers[key]) headers[key] += `,${value}` + else headers[key] = value + } - // error if the length of the encrypted data ED is not k - if(ed.length !== k) { - var error = new Error('Encrypted message length is invalid.'); - error.length = ed.length; - error.expected = k; - throw error; + return headers } +} - // convert encrypted data into a big integer - // FIXME: hex conversion inefficient, get BigInteger w/byte strings - var y = new BigInteger(forge.util.createBuffer(ed).toHex(), 16); - - // y must be less than the modulus or it wasn't the result of - // a previous mod operation (encryption) using that modulus - if(y.compareTo(key.n) >= 0) { - throw new Error('Encrypted message is invalid.'); +function processHeaderValue (key, val, skipAppend) { + if (val && typeof val === 'object') { + throw new InvalidArgumentError(`invalid ${key} header`) } - // do RSA decryption - var x = _modPow(y, key, pub); + val = val != null ? `${val}` : '' - // create the encryption block, if x is shorter in bytes than k, then - // prepend zero bytes to fill up eb - // FIXME: hex conversion inefficient, get BigInteger w/byte strings - var xhex = x.toString(16); - var eb = forge.util.createBuffer(); - var zeros = k - Math.ceil(xhex.length / 2); - while(zeros > 0) { - eb.putByte(0x00); - --zeros; + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) } - eb.putBytes(forge.util.hexToBytes(xhex)); - if(ml !== false) { - // legacy, default to PKCS#1 v1.5 padding - return _decodePkcs1_v1_5(eb.getBytes(), key, pub); - } - - // return message - return eb.getBytes(); -}; - -/** - * Creates an RSA key-pair generation state object. It is used to allow - * key-generation to be performed in steps. It also allows for a UI to - * display progress updates. - * - * @param bits the size for the private key in bits, defaults to 2048. - * @param e the public exponent to use, defaults to 65537 (0x10001). - * @param [options] the options to use. - * prng a custom crypto-secure pseudo-random number generator to use, - * that must define "getBytesSync". - * algorithm the algorithm to use (default: 'PRIMEINC'). - * - * @return the state object to use to generate the key-pair. - */ -pki.rsa.createKeyPairGenerationState = function(bits, e, options) { - // TODO: migrate step-based prime generation code to forge.prime + return skipAppend ? val : `${key}: ${val}\r\n` +} - // set default bits - if(typeof(bits) === 'string') { - bits = parseInt(bits, 10); +function processHeader (request, key, val, skipAppend = false) { + if (val && (typeof val === 'object' && !Array.isArray(val))) { + throw new InvalidArgumentError(`invalid ${key} header`) + } else if (val === undefined) { + return } - bits = bits || 2048; - - // create prng with api that matches BigInteger secure random - options = options || {}; - var prng = options.prng || forge.random; - var rng = { - // x is an array to fill with bytes - nextBytes: function(x) { - var b = prng.getBytesSync(x.length); - for(var i = 0; i < x.length; ++i) { - x[i] = b.charCodeAt(i); - } - } - }; - var algorithm = options.algorithm || 'PRIMEINC'; - - // create PRIMEINC algorithm state - var rval; - if(algorithm === 'PRIMEINC') { - rval = { - algorithm: algorithm, - state: 0, - bits: bits, - rng: rng, - eInt: e || 65537, - e: new BigInteger(null), - p: null, - q: null, - qBits: bits >> 1, - pBits: bits - (bits >> 1), - pqState: 0, - num: null, - keys: null - }; - rval.e.fromInt(rval.eInt); + if ( + request.host === null && + key.length === 4 && + key.toLowerCase() === 'host' + ) { + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) + } + // Consumed by Client + request.host = val + } else if ( + request.contentLength === null && + key.length === 14 && + key.toLowerCase() === 'content-length' + ) { + request.contentLength = parseInt(val, 10) + if (!Number.isFinite(request.contentLength)) { + throw new InvalidArgumentError('invalid content-length header') + } + } else if ( + request.contentType === null && + key.length === 12 && + key.toLowerCase() === 'content-type' + ) { + request.contentType = val + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) + } else if ( + key.length === 17 && + key.toLowerCase() === 'transfer-encoding' + ) { + throw new InvalidArgumentError('invalid transfer-encoding header') + } else if ( + key.length === 10 && + key.toLowerCase() === 'connection' + ) { + const value = typeof val === 'string' ? val.toLowerCase() : null + if (value !== 'close' && value !== 'keep-alive') { + throw new InvalidArgumentError('invalid connection header') + } else if (value === 'close') { + request.reset = true + } + } else if ( + key.length === 10 && + key.toLowerCase() === 'keep-alive' + ) { + throw new InvalidArgumentError('invalid keep-alive header') + } else if ( + key.length === 7 && + key.toLowerCase() === 'upgrade' + ) { + throw new InvalidArgumentError('invalid upgrade header') + } else if ( + key.length === 6 && + key.toLowerCase() === 'expect' + ) { + throw new NotSupportedError('expect header not supported') + } else if (tokenRegExp.exec(key) === null) { + throw new InvalidArgumentError('invalid header key') } else { - throw new Error('Invalid key generation algorithm: ' + algorithm); - } - - return rval; -}; - -/** - * Attempts to runs the key-generation algorithm for at most n seconds - * (approximately) using the given state. When key-generation has completed, - * the keys will be stored in state.keys. - * - * To use this function to update a UI while generating a key or to prevent - * causing browser lockups/warnings, set "n" to a value other than 0. A - * simple pattern for generating a key and showing a progress indicator is: - * - * var state = pki.rsa.createKeyPairGenerationState(2048); - * var step = function() { - * // step key-generation, run algorithm for 100 ms, repeat - * if(!forge.pki.rsa.stepKeyPairGenerationState(state, 100)) { - * setTimeout(step, 1); - * } else { - * // key-generation complete - * // TODO: turn off progress indicator here - * // TODO: use the generated key-pair in "state.keys" - * } - * }; - * // TODO: turn on progress indicator here - * setTimeout(step, 0); - * - * @param state the state to use. - * @param n the maximum number of milliseconds to run the algorithm for, 0 - * to run the algorithm to completion. - * - * @return true if the key-generation completed, false if not. - */ -pki.rsa.stepKeyPairGenerationState = function(state, n) { - // set default algorithm if not set - if(!('algorithm' in state)) { - state.algorithm = 'PRIMEINC'; - } - - // TODO: migrate step-based prime generation code to forge.prime - // TODO: abstract as PRIMEINC algorithm - - // do key generation (based on Tom Wu's rsa.js, see jsbn.js license) - // with some minor optimizations and designed to run in steps - - // local state vars - var THIRTY = new BigInteger(null); - THIRTY.fromInt(30); - var deltaIdx = 0; - var op_or = function(x, y) {return x | y;}; - - // keep stepping until time limit is reached or done - var t1 = +new Date(); - var t2; - var total = 0; - while(state.keys === null && (n <= 0 || total < n)) { - // generate p or q - if(state.state === 0) { - /* Note: All primes are of the form: - - 30k+i, for i < 30 and gcd(30, i)=1, where there are 8 values for i - - When we generate a random number, we always align it at 30k + 1. Each - time the number is determined not to be prime we add to get to the - next 'i', eg: if the number was at 30k + 1 we add 6. */ - var bits = (state.p === null) ? state.pBits : state.qBits; - var bits1 = bits - 1; - - // get a random number - if(state.pqState === 0) { - state.num = new BigInteger(bits, state.rng); - // force MSB set - if(!state.num.testBit(bits1)) { - state.num.bitwiseTo( - BigInteger.ONE.shiftLeft(bits1), op_or, state.num); - } - // align number on 30k+1 boundary - state.num.dAddOffset(31 - state.num.mod(THIRTY).byteValue(), 0); - deltaIdx = 0; - - ++state.pqState; - } else if(state.pqState === 1) { - // try to make the number a prime - if(state.num.bitLength() > bits) { - // overflow, try again - state.pqState = 0; - // do primality test - } else if(state.num.isProbablePrime( - _getMillerRabinTests(state.num.bitLength()))) { - ++state.pqState; - } else { - // get next potential prime - state.num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0); - } - } else if(state.pqState === 2) { - // ensure number is coprime with e - state.pqState = - (state.num.subtract(BigInteger.ONE).gcd(state.e) - .compareTo(BigInteger.ONE) === 0) ? 3 : 0; - } else if(state.pqState === 3) { - // store p or q - state.pqState = 0; - if(state.p === null) { - state.p = state.num; + if (Array.isArray(val)) { + for (let i = 0; i < val.length; i++) { + if (skipAppend) { + if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}` + else request.headers[key] = processHeaderValue(key, val[i], skipAppend) } else { - state.q = state.num; - } - - // advance state if both p and q are ready - if(state.p !== null && state.q !== null) { - ++state.state; + request.headers += processHeaderValue(key, val[i]) } - state.num = null; } - } else if(state.state === 1) { - // ensure p is larger than q (swap them if not) - if(state.p.compareTo(state.q) < 0) { - state.num = state.p; - state.p = state.q; - state.q = state.num; - } - ++state.state; - } else if(state.state === 2) { - // compute phi: (p - 1)(q - 1) (Euler's totient function) - state.p1 = state.p.subtract(BigInteger.ONE); - state.q1 = state.q.subtract(BigInteger.ONE); - state.phi = state.p1.multiply(state.q1); - ++state.state; - } else if(state.state === 3) { - // ensure e and phi are coprime - if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) === 0) { - // phi and e are coprime, advance - ++state.state; - } else { - // phi and e aren't coprime, so generate a new p and q - state.p = null; - state.q = null; - state.state = 0; - } - } else if(state.state === 4) { - // create n, ensure n is has the right number of bits - state.n = state.p.multiply(state.q); - - // ensure n is right number of bits - if(state.n.bitLength() === state.bits) { - // success, advance - ++state.state; - } else { - // failed, get new q - state.q = null; - state.state = 0; - } - } else if(state.state === 5) { - // set keys - var d = state.e.modInverse(state.phi); - state.keys = { - privateKey: pki.rsa.setPrivateKey( - state.n, state.e, d, state.p, state.q, - d.mod(state.p1), d.mod(state.q1), - state.q.modInverse(state.p)), - publicKey: pki.rsa.setPublicKey(state.n, state.e) - }; + } else { + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) } - - // update timing - t2 = +new Date(); - total += t2 - t1; - t1 = t2; } +} - return state.keys !== null; -}; +module.exports = Request -/** - * Generates an RSA public-private key pair in a single call. - * - * To generate a key-pair in steps (to allow for progress updates and to - * prevent blocking or warnings in slow browsers) then use the key-pair - * generation state functions. - * - * To generate a key-pair asynchronously (either through web-workers, if - * available, or by breaking up the work on the main thread), pass a - * callback function. - * - * @param [bits] the size for the private key in bits, defaults to 2048. - * @param [e] the public exponent to use, defaults to 65537. - * @param [options] options for key-pair generation, if given then 'bits' - * and 'e' must *not* be given: - * bits the size for the private key in bits, (default: 2048). - * e the public exponent to use, (default: 65537 (0x10001)). - * workerScript the worker script URL. - * workers the number of web workers (if supported) to use, - * (default: 2). - * workLoad the size of the work load, ie: number of possible prime - * numbers for each web worker to check per work assignment, - * (default: 100). - * prng a custom crypto-secure pseudo-random number generator to use, - * that must define "getBytesSync". Disables use of native APIs. - * algorithm the algorithm to use (default: 'PRIMEINC'). - * @param [callback(err, keypair)] called once the operation completes. - * - * @return an object with privateKey and publicKey properties. - */ -pki.rsa.generateKeyPair = function(bits, e, options, callback) { - // (bits), (options), (callback) - if(arguments.length === 1) { - if(typeof bits === 'object') { - options = bits; - bits = undefined; - } else if(typeof bits === 'function') { - callback = bits; - bits = undefined; - } - } else if(arguments.length === 2) { - // (bits, e), (bits, options), (bits, callback), (options, callback) - if(typeof bits === 'number') { - if(typeof e === 'function') { - callback = e; - e = undefined; - } else if(typeof e !== 'number') { - options = e; - e = undefined; - } - } else { - options = bits; - callback = e; - bits = undefined; - e = undefined; - } - } else if(arguments.length === 3) { - // (bits, e, options), (bits, e, callback), (bits, options, callback) - if(typeof e === 'number') { - if(typeof options === 'function') { - callback = options; - options = undefined; - } - } else { - callback = options; - options = e; - e = undefined; - } - } - options = options || {}; - if(bits === undefined) { - bits = options.bits || 2048; - } - if(e === undefined) { - e = options.e || 0x10001; - } - - // use native code if permitted, available, and parameters are acceptable - if(!forge.options.usePureJavaScript && !options.prng && - bits >= 256 && bits <= 16384 && (e === 0x10001 || e === 3)) { - if(callback) { - // try native async - if(_detectNodeCrypto('generateKeyPair')) { - return _crypto.generateKeyPair('rsa', { - modulusLength: bits, - publicExponent: e, - publicKeyEncoding: { - type: 'spki', - format: 'pem' - }, - privateKeyEncoding: { - type: 'pkcs8', - format: 'pem' - } - }, function(err, pub, priv) { - if(err) { - return callback(err); - } - callback(null, { - privateKey: pki.privateKeyFromPem(priv), - publicKey: pki.publicKeyFromPem(pub) - }); - }); - } - if(_detectSubtleCrypto('generateKey') && - _detectSubtleCrypto('exportKey')) { - // use standard native generateKey - return util.globalScope.crypto.subtle.generateKey({ - name: 'RSASSA-PKCS1-v1_5', - modulusLength: bits, - publicExponent: _intToUint8Array(e), - hash: {name: 'SHA-256'} - }, true /* key can be exported*/, ['sign', 'verify']) - .then(function(pair) { - return util.globalScope.crypto.subtle.exportKey( - 'pkcs8', pair.privateKey); - // avoiding catch(function(err) {...}) to support IE <= 8 - }).then(undefined, function(err) { - callback(err); - }).then(function(pkcs8) { - if(pkcs8) { - var privateKey = pki.privateKeyFromAsn1( - asn1.fromDer(forge.util.createBuffer(pkcs8))); - callback(null, { - privateKey: privateKey, - publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e) - }); - } - }); - } - if(_detectSubtleMsCrypto('generateKey') && - _detectSubtleMsCrypto('exportKey')) { - var genOp = util.globalScope.msCrypto.subtle.generateKey({ - name: 'RSASSA-PKCS1-v1_5', - modulusLength: bits, - publicExponent: _intToUint8Array(e), - hash: {name: 'SHA-256'} - }, true /* key can be exported*/, ['sign', 'verify']); - genOp.oncomplete = function(e) { - var pair = e.target.result; - var exportOp = util.globalScope.msCrypto.subtle.exportKey( - 'pkcs8', pair.privateKey); - exportOp.oncomplete = function(e) { - var pkcs8 = e.target.result; - var privateKey = pki.privateKeyFromAsn1( - asn1.fromDer(forge.util.createBuffer(pkcs8))); - callback(null, { - privateKey: privateKey, - publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e) - }); - }; - exportOp.onerror = function(err) { - callback(err); - }; - }; - genOp.onerror = function(err) { - callback(err); - }; - return; - } - } else { - // try native sync - if(_detectNodeCrypto('generateKeyPairSync')) { - var keypair = _crypto.generateKeyPairSync('rsa', { - modulusLength: bits, - publicExponent: e, - publicKeyEncoding: { - type: 'spki', - format: 'pem' - }, - privateKeyEncoding: { - type: 'pkcs8', - format: 'pem' - } - }); - return { - privateKey: pki.privateKeyFromPem(keypair.privateKey), - publicKey: pki.publicKeyFromPem(keypair.publicKey) - }; - } - } - } - // use JavaScript implementation - var state = pki.rsa.createKeyPairGenerationState(bits, e, options); - if(!callback) { - pki.rsa.stepKeyPairGenerationState(state, 0); - return state.keys; - } - _generateKeyPair(state, options, callback); -}; +/***/ }), + +/***/ 2785: +/***/ ((module) => { + +module.exports = { + kClose: Symbol('close'), + kDestroy: Symbol('destroy'), + kDispatch: Symbol('dispatch'), + kUrl: Symbol('url'), + kWriting: Symbol('writing'), + kResuming: Symbol('resuming'), + kQueue: Symbol('queue'), + kConnect: Symbol('connect'), + kConnecting: Symbol('connecting'), + kHeadersList: Symbol('headers list'), + kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'), + kKeepAliveMaxTimeout: Symbol('max keep alive timeout'), + kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'), + kKeepAliveTimeoutValue: Symbol('keep alive timeout'), + kKeepAlive: Symbol('keep alive'), + kHeadersTimeout: Symbol('headers timeout'), + kBodyTimeout: Symbol('body timeout'), + kServerName: Symbol('server name'), + kLocalAddress: Symbol('local address'), + kHost: Symbol('host'), + kNoRef: Symbol('no ref'), + kBodyUsed: Symbol('used'), + kRunning: Symbol('running'), + kBlocking: Symbol('blocking'), + kPending: Symbol('pending'), + kSize: Symbol('size'), + kBusy: Symbol('busy'), + kQueued: Symbol('queued'), + kFree: Symbol('free'), + kConnected: Symbol('connected'), + kClosed: Symbol('closed'), + kNeedDrain: Symbol('need drain'), + kReset: Symbol('reset'), + kDestroyed: Symbol.for('nodejs.stream.destroyed'), + kMaxHeadersSize: Symbol('max headers size'), + kRunningIdx: Symbol('running index'), + kPendingIdx: Symbol('pending index'), + kError: Symbol('error'), + kClients: Symbol('clients'), + kClient: Symbol('client'), + kParser: Symbol('parser'), + kOnDestroyed: Symbol('destroy callbacks'), + kPipelining: Symbol('pipelining'), + kSocket: Symbol('socket'), + kHostHeader: Symbol('host header'), + kConnector: Symbol('connector'), + kStrictContentLength: Symbol('strict content length'), + kMaxRedirections: Symbol('maxRedirections'), + kMaxRequests: Symbol('maxRequestsPerClient'), + kProxy: Symbol('proxy agent options'), + kCounter: Symbol('socket request counter'), + kInterceptors: Symbol('dispatch interceptors'), + kMaxResponseSize: Symbol('max response size'), + kHTTP2Session: Symbol('http2Session'), + kHTTP2SessionState: Symbol('http2Session state'), + kHTTP2BuildRequest: Symbol('http2 build request'), + kHTTP1BuildRequest: Symbol('http1 build request'), + kHTTP2CopyHeaders: Symbol('http2 copy headers'), + kHTTPConnVersion: Symbol('http connection version'), + kRetryHandlerDefaultRetry: Symbol('retry agent default retry'), + kConstruct: Symbol('constructable') +} + -/** - * Sets an RSA public key from BigIntegers modulus and exponent. - * - * @param n the modulus. - * @param e the exponent. - * - * @return the public key. - */ -pki.setRsaPublicKey = pki.rsa.setPublicKey = function(n, e) { - var key = { - n: n, - e: e - }; +/***/ }), - /** - * Encrypts the given data with this public key. Newer applications - * should use the 'RSA-OAEP' decryption scheme, 'RSAES-PKCS1-V1_5' is for - * legacy applications. - * - * @param data the byte string to encrypt. - * @param scheme the encryption scheme to use: - * 'RSAES-PKCS1-V1_5' (default), - * 'RSA-OAEP', - * 'RAW', 'NONE', or null to perform raw RSA encryption, - * an object with an 'encode' property set to a function - * with the signature 'function(data, key)' that returns - * a binary-encoded string representing the encoded data. - * @param schemeOptions any scheme-specific options. - * - * @return the encrypted byte string. - */ - key.encrypt = function(data, scheme, schemeOptions) { - if(typeof scheme === 'string') { - scheme = scheme.toUpperCase(); - } else if(scheme === undefined) { - scheme = 'RSAES-PKCS1-V1_5'; - } +/***/ 3983: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if(scheme === 'RSAES-PKCS1-V1_5') { - scheme = { - encode: function(m, key, pub) { - return _encodePkcs1_v1_5(m, key, 0x02).getBytes(); - } - }; - } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') { - scheme = { - encode: function(m, key) { - return forge.pkcs1.encode_rsa_oaep(key, m, schemeOptions); - } - }; - } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) { - scheme = {encode: function(e) {return e;}}; - } else if(typeof scheme === 'string') { - throw new Error('Unsupported encryption scheme: "' + scheme + '".'); - } +"use strict"; - // do scheme-based encoding then rsa encryption - var e = scheme.encode(data, key, true); - return pki.rsa.encrypt(e, key, true); - }; - /** - * Verifies the given signature against the given digest. - * - * PKCS#1 supports multiple (currently two) signature schemes: - * RSASSA-PKCS1-V1_5 and RSASSA-PSS. - * - * By default this implementation uses the "old scheme", i.e. - * RSASSA-PKCS1-V1_5, in which case once RSA-decrypted, the - * signature is an OCTET STRING that holds a DigestInfo. - * - * DigestInfo ::= SEQUENCE { - * digestAlgorithm DigestAlgorithmIdentifier, - * digest Digest - * } - * DigestAlgorithmIdentifier ::= AlgorithmIdentifier - * Digest ::= OCTET STRING - * - * To perform PSS signature verification, provide an instance - * of Forge PSS object as the scheme parameter. - * - * @param digest the message digest hash to compare against the signature, - * as a binary-encoded string. - * @param signature the signature to verify, as a binary-encoded string. - * @param scheme signature verification scheme to use: - * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5, - * a Forge PSS object for RSASSA-PSS, - * 'NONE' or null for none, DigestInfo will not be expected, but - * PKCS#1 v1.5 padding will still be used. - * @param options optional verify options - * _parseAllDigestBytes testing flag to control parsing of all - * digest bytes. Unsupported and not for general usage. - * (default: true) - * - * @return true if the signature was verified, false if not. - */ - key.verify = function(digest, signature, scheme, options) { - if(typeof scheme === 'string') { - scheme = scheme.toUpperCase(); - } else if(scheme === undefined) { - scheme = 'RSASSA-PKCS1-V1_5'; - } - if(options === undefined) { - options = { - _parseAllDigestBytes: true - }; - } - if(!('_parseAllDigestBytes' in options)) { - options._parseAllDigestBytes = true; - } +const assert = __nccwpck_require__(9491) +const { kDestroyed, kBodyUsed } = __nccwpck_require__(2785) +const { IncomingMessage } = __nccwpck_require__(3685) +const stream = __nccwpck_require__(2781) +const net = __nccwpck_require__(1808) +const { InvalidArgumentError } = __nccwpck_require__(8045) +const { Blob } = __nccwpck_require__(4300) +const nodeUtil = __nccwpck_require__(3837) +const { stringify } = __nccwpck_require__(3477) - if(scheme === 'RSASSA-PKCS1-V1_5') { - scheme = { - verify: function(digest, d) { - // remove padding - d = _decodePkcs1_v1_5(d, key, true); - // d is ASN.1 BER-encoded DigestInfo - var obj = asn1.fromDer(d, { - parseAllBytes: options._parseAllDigestBytes - }); +const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) - // validate DigestInfo - var capture = {}; - var errors = []; - if(!asn1.validate(obj, digestInfoValidator, capture, errors)) { - var error = new Error( - 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' + - 'DigestInfo value.'); - error.errors = errors; - throw error; - } - // check hash algorithm identifier - // see PKCS1-v1-5DigestAlgorithms in RFC 8017 - // FIXME: add support to vaidator for strict value choices - var oid = asn1.derToOid(capture.algorithmIdentifier); - if(!(oid === forge.oids.md2 || - oid === forge.oids.md5 || - oid === forge.oids.sha1 || - oid === forge.oids.sha224 || - oid === forge.oids.sha256 || - oid === forge.oids.sha384 || - oid === forge.oids.sha512 || - oid === forge.oids['sha512-224'] || - oid === forge.oids['sha512-256'])) { - var error = new Error( - 'Unknown RSASSA-PKCS1-v1_5 DigestAlgorithm identifier.'); - error.oid = oid; - throw error; - } +function nop () {} - // special check for md2 and md5 that NULL parameters exist - if(oid === forge.oids.md2 || oid === forge.oids.md5) { - if(!('parameters' in capture)) { - throw new Error( - 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' + - 'DigestInfo value. ' + - 'Missing algorithm identifer NULL parameters.'); - } - } +function isStream (obj) { + return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function' +} - // compare the given digest to the decrypted one - return digest === capture.digest; - } - }; - } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) { - scheme = { - verify: function(digest, d) { - // remove padding - d = _decodePkcs1_v1_5(d, key, true); - return digest === d; - } - }; - } +// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License) +function isBlobLike (object) { + return (Blob && object instanceof Blob) || ( + object && + typeof object === 'object' && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + /^(Blob|File)$/.test(object[Symbol.toStringTag]) + ) +} - // do rsa decryption w/o any decoding, then verify -- which does decoding - var d = pki.rsa.decrypt(signature, key, true, false); - return scheme.verify(digest, d, key.n.bitLength()); - }; +function buildURL (url, queryParams) { + if (url.includes('?') || url.includes('#')) { + throw new Error('Query params cannot be passed when url already contains "?" or "#".') + } - return key; -}; + const stringified = stringify(queryParams) -/** - * Sets an RSA private key from BigIntegers modulus, exponent, primes, - * prime exponents, and modular multiplicative inverse. - * - * @param n the modulus. - * @param e the public exponent. - * @param d the private exponent ((inverse of e) mod n). - * @param p the first prime. - * @param q the second prime. - * @param dP exponent1 (d mod (p-1)). - * @param dQ exponent2 (d mod (q-1)). - * @param qInv ((inverse of q) mod p) - * - * @return the private key. - */ -pki.setRsaPrivateKey = pki.rsa.setPrivateKey = function( - n, e, d, p, q, dP, dQ, qInv) { - var key = { - n: n, - e: e, - d: d, - p: p, - q: q, - dP: dP, - dQ: dQ, - qInv: qInv - }; + if (stringified) { + url += '?' + stringified + } - /** - * Decrypts the given data with this private key. The decryption scheme - * must match the one used to encrypt the data. - * - * @param data the byte string to decrypt. - * @param scheme the decryption scheme to use: - * 'RSAES-PKCS1-V1_5' (default), - * 'RSA-OAEP', - * 'RAW', 'NONE', or null to perform raw RSA decryption. - * @param schemeOptions any scheme-specific options. - * - * @return the decrypted byte string. - */ - key.decrypt = function(data, scheme, schemeOptions) { - if(typeof scheme === 'string') { - scheme = scheme.toUpperCase(); - } else if(scheme === undefined) { - scheme = 'RSAES-PKCS1-V1_5'; - } + return url +} - // do rsa decryption w/o any decoding - var d = pki.rsa.decrypt(data, key, false, false); +function parseURL (url) { + if (typeof url === 'string') { + url = new URL(url) - if(scheme === 'RSAES-PKCS1-V1_5') { - scheme = {decode: _decodePkcs1_v1_5}; - } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') { - scheme = { - decode: function(d, key) { - return forge.pkcs1.decode_rsa_oaep(key, d, schemeOptions); - } - }; - } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) { - scheme = {decode: function(d) {return d;}}; - } else { - throw new Error('Unsupported encryption scheme: "' + scheme + '".'); + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') } - // decode according to scheme - return scheme.decode(d, key, false); - }; + return url + } - /** - * Signs the given digest, producing a signature. - * - * PKCS#1 supports multiple (currently two) signature schemes: - * RSASSA-PKCS1-V1_5 and RSASSA-PSS. - * - * By default this implementation uses the "old scheme", i.e. - * RSASSA-PKCS1-V1_5. In order to generate a PSS signature, provide - * an instance of Forge PSS object as the scheme parameter. - * - * @param md the message digest object with the hash to sign. - * @param scheme the signature scheme to use: - * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5, - * a Forge PSS object for RSASSA-PSS, - * 'NONE' or null for none, DigestInfo will not be used but - * PKCS#1 v1.5 padding will still be used. - * - * @return the signature as a byte string. - */ - key.sign = function(md, scheme) { - /* Note: The internal implementation of RSA operations is being - transitioned away from a PKCS#1 v1.5 hard-coded scheme. Some legacy - code like the use of an encoding block identifier 'bt' will eventually - be removed. */ + if (!url || typeof url !== 'object') { + throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.') + } - // private key operation - var bt = false; + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') + } - if(typeof scheme === 'string') { - scheme = scheme.toUpperCase(); + if (!(url instanceof URL)) { + if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) { + throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.') } - if(scheme === undefined || scheme === 'RSASSA-PKCS1-V1_5') { - scheme = {encode: emsaPkcs1v15encode}; - bt = 0x01; - } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) { - scheme = {encode: function() {return md;}}; - bt = 0x01; + if (url.path != null && typeof url.path !== 'string') { + throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.') } - // encode and then encrypt - var d = scheme.encode(md, key.n.bitLength()); - return pki.rsa.encrypt(d, key, bt); - }; + if (url.pathname != null && typeof url.pathname !== 'string') { + throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.') + } - return key; -}; + if (url.hostname != null && typeof url.hostname !== 'string') { + throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.') + } -/** - * Wraps an RSAPrivateKey ASN.1 object in an ASN.1 PrivateKeyInfo object. - * - * @param rsaKey the ASN.1 RSAPrivateKey. - * - * @return the ASN.1 PrivateKeyInfo. - */ -pki.wrapRsaPrivateKey = function(rsaKey) { - // PrivateKeyInfo - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version (0) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(0).getBytes()), - // privateKeyAlgorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.rsaEncryption).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]), - // PrivateKey - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, - asn1.toDer(rsaKey).getBytes()) - ]); -}; + if (url.origin != null && typeof url.origin !== 'string') { + throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.') + } -/** - * Converts a private key from an ASN.1 object. - * - * @param obj the ASN.1 representation of a PrivateKeyInfo containing an - * RSAPrivateKey or an RSAPrivateKey. - * - * @return the private key. - */ -pki.privateKeyFromAsn1 = function(obj) { - // get PrivateKeyInfo - var capture = {}; - var errors = []; - if(asn1.validate(obj, privateKeyValidator, capture, errors)) { - obj = asn1.fromDer(forge.util.createBuffer(capture.privateKey)); - } - - // get RSAPrivateKey - capture = {}; - errors = []; - if(!asn1.validate(obj, rsaPrivateKeyValidator, capture, errors)) { - var error = new Error('Cannot read private key. ' + - 'ASN.1 object does not contain an RSAPrivateKey.'); - error.errors = errors; - throw error; - } - - // Note: Version is currently ignored. - // capture.privateKeyVersion - // FIXME: inefficient, get a BigInteger that uses byte strings - var n, e, d, p, q, dP, dQ, qInv; - n = forge.util.createBuffer(capture.privateKeyModulus).toHex(); - e = forge.util.createBuffer(capture.privateKeyPublicExponent).toHex(); - d = forge.util.createBuffer(capture.privateKeyPrivateExponent).toHex(); - p = forge.util.createBuffer(capture.privateKeyPrime1).toHex(); - q = forge.util.createBuffer(capture.privateKeyPrime2).toHex(); - dP = forge.util.createBuffer(capture.privateKeyExponent1).toHex(); - dQ = forge.util.createBuffer(capture.privateKeyExponent2).toHex(); - qInv = forge.util.createBuffer(capture.privateKeyCoefficient).toHex(); - - // set private key - return pki.setRsaPrivateKey( - new BigInteger(n, 16), - new BigInteger(e, 16), - new BigInteger(d, 16), - new BigInteger(p, 16), - new BigInteger(q, 16), - new BigInteger(dP, 16), - new BigInteger(dQ, 16), - new BigInteger(qInv, 16)); -}; + const port = url.port != null + ? url.port + : (url.protocol === 'https:' ? 443 : 80) + let origin = url.origin != null + ? url.origin + : `${url.protocol}//${url.hostname}:${port}` + let path = url.path != null + ? url.path + : `${url.pathname || ''}${url.search || ''}` -/** - * Converts a private key to an ASN.1 RSAPrivateKey. - * - * @param key the private key. - * - * @return the ASN.1 representation of an RSAPrivateKey. - */ -pki.privateKeyToAsn1 = pki.privateKeyToRSAPrivateKey = function(key) { - // RSAPrivateKey - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version (0 = only 2 primes, 1 multiple primes) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(0).getBytes()), - // modulus (n) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.n)), - // publicExponent (e) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.e)), - // privateExponent (d) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.d)), - // privateKeyPrime1 (p) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.p)), - // privateKeyPrime2 (q) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.q)), - // privateKeyExponent1 (dP) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.dP)), - // privateKeyExponent2 (dQ) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.dQ)), - // coefficient (qInv) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.qInv)) - ]); -}; + if (origin.endsWith('/')) { + origin = origin.substring(0, origin.length - 1) + } -/** - * Converts a public key from an ASN.1 SubjectPublicKeyInfo or RSAPublicKey. - * - * @param obj the asn1 representation of a SubjectPublicKeyInfo or RSAPublicKey. - * - * @return the public key. - */ -pki.publicKeyFromAsn1 = function(obj) { - // get SubjectPublicKeyInfo - var capture = {}; - var errors = []; - if(asn1.validate(obj, publicKeyValidator, capture, errors)) { - // get oid - var oid = asn1.derToOid(capture.publicKeyOid); - if(oid !== pki.oids.rsaEncryption) { - var error = new Error('Cannot read public key. Unknown OID.'); - error.oid = oid; - throw error; + if (path && !path.startsWith('/')) { + path = `/${path}` } - obj = capture.rsaPublicKey; + // new URL(path, origin) is unsafe when `path` contains an absolute URL + // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL: + // If first parameter is a relative URL, second param is required, and will be used as the base URL. + // If first parameter is an absolute URL, a given second param will be ignored. + url = new URL(origin + path) } - // get RSA params - errors = []; - if(!asn1.validate(obj, rsaPublicKeyValidator, capture, errors)) { - var error = new Error('Cannot read public key. ' + - 'ASN.1 object does not contain an RSAPublicKey.'); - error.errors = errors; - throw error; + return url +} + +function parseOrigin (url) { + url = parseURL(url) + + if (url.pathname !== '/' || url.search || url.hash) { + throw new InvalidArgumentError('invalid url') } - // FIXME: inefficient, get a BigInteger that uses byte strings - var n = forge.util.createBuffer(capture.publicKeyModulus).toHex(); - var e = forge.util.createBuffer(capture.publicKeyExponent).toHex(); + return url +} - // set public key - return pki.setRsaPublicKey( - new BigInteger(n, 16), - new BigInteger(e, 16)); -}; +function getHostname (host) { + if (host[0] === '[') { + const idx = host.indexOf(']') -/** - * Converts a public key to an ASN.1 SubjectPublicKeyInfo. - * - * @param key the public key. - * - * @return the asn1 representation of a SubjectPublicKeyInfo. - */ -pki.publicKeyToAsn1 = pki.publicKeyToSubjectPublicKeyInfo = function(key) { - // SubjectPublicKeyInfo - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // AlgorithmIdentifier - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(pki.oids.rsaEncryption).getBytes()), - // parameters (null) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]), - // subjectPublicKey - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, [ - pki.publicKeyToRSAPublicKey(key) - ]) - ]); -}; + assert(idx !== -1) + return host.substring(1, idx) + } -/** - * Converts a public key to an ASN.1 RSAPublicKey. - * - * @param key the public key. - * - * @return the asn1 representation of a RSAPublicKey. - */ -pki.publicKeyToRSAPublicKey = function(key) { - // RSAPublicKey - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // modulus (n) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.n)), - // publicExponent (e) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - _bnToBytes(key.e)) - ]); -}; + const idx = host.indexOf(':') + if (idx === -1) return host -/** - * Encodes a message using PKCS#1 v1.5 padding. - * - * @param m the message to encode. - * @param key the RSA key to use. - * @param bt the block type to use, i.e. either 0x01 (for signing) or 0x02 - * (for encryption). - * - * @return the padded byte buffer. - */ -function _encodePkcs1_v1_5(m, key, bt) { - var eb = forge.util.createBuffer(); + return host.substring(0, idx) +} + +// IP addresses are not valid server names per RFC6066 +// > Currently, the only server names supported are DNS hostnames +function getServerName (host) { + if (!host) { + return null + } - // get the length of the modulus in bytes - var k = Math.ceil(key.n.bitLength() / 8); + assert.strictEqual(typeof host, 'string') - /* use PKCS#1 v1.5 padding */ - if(m.length > (k - 11)) { - var error = new Error('Message is too long for PKCS#1 v1.5 padding.'); - error.length = m.length; - error.max = k - 11; - throw error; + const servername = getHostname(host) + if (net.isIP(servername)) { + return '' } - /* A block type BT, a padding string PS, and the data D shall be - formatted into an octet string EB, the encryption block: + return servername +} - EB = 00 || BT || PS || 00 || D +function deepClone (obj) { + return JSON.parse(JSON.stringify(obj)) +} - The block type BT shall be a single octet indicating the structure of - the encryption block. For this version of the document it shall have - value 00, 01, or 02. For a private-key operation, the block type - shall be 00 or 01. For a public-key operation, it shall be 02. +function isAsyncIterable (obj) { + return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function') +} - The padding string PS shall consist of k-3-||D|| octets. For block - type 00, the octets shall have value 00; for block type 01, they - shall have value FF; and for block type 02, they shall be - pseudorandomly generated and nonzero. This makes the length of the - encryption block EB equal to k. */ +function isIterable (obj) { + return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function')) +} - // build the encryption block - eb.putByte(0x00); - eb.putByte(bt); +function bodyLength (body) { + if (body == null) { + return 0 + } else if (isStream(body)) { + const state = body._readableState + return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length) + ? state.length + : null + } else if (isBlobLike(body)) { + return body.size != null ? body.size : null + } else if (isBuffer(body)) { + return body.byteLength + } - // create the padding - var padNum = k - 3 - m.length; - var padByte; - // private key op - if(bt === 0x00 || bt === 0x01) { - padByte = (bt === 0x00) ? 0x00 : 0xFF; - for(var i = 0; i < padNum; ++i) { - eb.putByte(padByte); - } - } else { - // public key op - // pad with random non-zero values - while(padNum > 0) { - var numZeros = 0; - var padBytes = forge.random.getBytes(padNum); - for(var i = 0; i < padNum; ++i) { - padByte = padBytes.charCodeAt(i); - if(padByte === 0) { - ++numZeros; - } else { - eb.putByte(padByte); - } - } - padNum = numZeros; + return null +} + +function isDestroyed (stream) { + return !stream || !!(stream.destroyed || stream[kDestroyed]) +} + +function isReadableAborted (stream) { + const state = stream && stream._readableState + return isDestroyed(stream) && state && !state.endEmitted +} + +function destroy (stream, err) { + if (stream == null || !isStream(stream) || isDestroyed(stream)) { + return + } + + if (typeof stream.destroy === 'function') { + if (Object.getPrototypeOf(stream).constructor === IncomingMessage) { + // See: https://github.com/nodejs/node/pull/38505/files + stream.socket = null } + + stream.destroy(err) + } else if (err) { + process.nextTick((stream, err) => { + stream.emit('error', err) + }, stream, err) } - // zero followed by message - eb.putByte(0x00); - eb.putBytes(m); + if (stream.destroyed !== true) { + stream[kDestroyed] = true + } +} - return eb; +const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/ +function parseKeepAliveTimeout (val) { + const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR) + return m ? parseInt(m[1], 10) * 1000 : null } -/** - * Decodes a message using PKCS#1 v1.5 padding. - * - * @param em the message to decode. - * @param key the RSA key to use. - * @param pub true if the key is a public key, false if it is private. - * @param ml the message length, if specified. - * - * @return the decoded bytes. - */ -function _decodePkcs1_v1_5(em, key, pub, ml) { - // get the length of the modulus in bytes - var k = Math.ceil(key.n.bitLength() / 8); - - /* It is an error if any of the following conditions occurs: - - 1. The encryption block EB cannot be parsed unambiguously. - 2. The padding string PS consists of fewer than eight octets - or is inconsisent with the block type BT. - 3. The decryption process is a public-key operation and the block - type BT is not 00 or 01, or the decryption process is a - private-key operation and the block type is not 02. - */ +function parseHeaders (headers, obj = {}) { + // For H2 support + if (!Array.isArray(headers)) return headers - // parse the encryption block - var eb = forge.util.createBuffer(em); - var first = eb.getByte(); - var bt = eb.getByte(); - if(first !== 0x00 || - (pub && bt !== 0x00 && bt !== 0x01) || - (!pub && bt != 0x02) || - (pub && bt === 0x00 && typeof(ml) === 'undefined')) { - throw new Error('Encryption block is invalid.'); - } - - var padNum = 0; - if(bt === 0x00) { - // check all padding bytes for 0x00 - padNum = k - 3 - ml; - for(var i = 0; i < padNum; ++i) { - if(eb.getByte() !== 0x00) { - throw new Error('Encryption block is invalid.'); - } - } - } else if(bt === 0x01) { - // find the first byte that isn't 0xFF, should be after all padding - padNum = 0; - while(eb.length() > 1) { - if(eb.getByte() !== 0xFF) { - --eb.read; - break; + for (let i = 0; i < headers.length; i += 2) { + const key = headers[i].toString().toLowerCase() + let val = obj[key] + + if (!val) { + if (Array.isArray(headers[i + 1])) { + obj[key] = headers[i + 1].map(x => x.toString('utf8')) + } else { + obj[key] = headers[i + 1].toString('utf8') } - ++padNum; - } - } else if(bt === 0x02) { - // look for 0x00 byte - padNum = 0; - while(eb.length() > 1) { - if(eb.getByte() === 0x00) { - --eb.read; - break; + } else { + if (!Array.isArray(val)) { + val = [val] + obj[key] = val } - ++padNum; + val.push(headers[i + 1].toString('utf8')) } } - // zero must be 0x00 and padNum must be (k - 3 - message length) - var zero = eb.getByte(); - if(zero !== 0x00 || padNum !== (k - 3 - eb.length())) { - throw new Error('Encryption block is invalid.'); + // See https://github.com/nodejs/node/pull/46528 + if ('content-length' in obj && 'content-disposition' in obj) { + obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1') } - return eb.getBytes(); + return obj } -/** - * Runs the key-generation algorithm asynchronously, either in the background - * via Web Workers, or using the main thread and setImmediate. - * - * @param state the key-pair generation state. - * @param [options] options for key-pair generation: - * workerScript the worker script URL. - * workers the number of web workers (if supported) to use, - * (default: 2, -1 to use estimated cores minus one). - * workLoad the size of the work load, ie: number of possible prime - * numbers for each web worker to check per work assignment, - * (default: 100). - * @param callback(err, keypair) called once the operation completes. - */ -function _generateKeyPair(state, options, callback) { - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; +function parseRawHeaders (headers) { + const ret = [] + let hasContentLength = false + let contentDispositionIdx = -1 - var opts = { - algorithm: { - name: options.algorithm || 'PRIMEINC', - options: { - workers: options.workers || 2, - workLoad: options.workLoad || 100, - workerScript: options.workerScript - } + for (let n = 0; n < headers.length; n += 2) { + const key = headers[n + 0].toString() + const val = headers[n + 1].toString('utf8') + + if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) { + ret.push(key, val) + hasContentLength = true + } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) { + contentDispositionIdx = ret.push(key, val) - 1 + } else { + ret.push(key, val) } - }; - if('prng' in options) { - opts.prng = options.prng; } - generate(); + // See https://github.com/nodejs/node/pull/46528 + if (hasContentLength && contentDispositionIdx !== -1) { + ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1') + } - function generate() { - // find p and then q (done in series to simplify) - getPrime(state.pBits, function(err, num) { - if(err) { - return callback(err); - } - state.p = num; - if(state.q !== null) { - return finish(err, state.q); - } - getPrime(state.qBits, finish); - }); + return ret +} + +function isBuffer (buffer) { + // See, https://github.com/mcollina/undici/pull/319 + return buffer instanceof Uint8Array || Buffer.isBuffer(buffer) +} + +function validateHandler (handler, method, upgrade) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') } - function getPrime(bits, callback) { - forge.prime.generateProbablePrime(bits, opts, callback); + if (typeof handler.onConnect !== 'function') { + throw new InvalidArgumentError('invalid onConnect method') } - function finish(err, num) { - if(err) { - return callback(err); - } + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } - // set q - state.q = num; + if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) { + throw new InvalidArgumentError('invalid onBodySent method') + } - // ensure p is larger than q (swap them if not) - if(state.p.compareTo(state.q) < 0) { - var tmp = state.p; - state.p = state.q; - state.q = tmp; + if (upgrade || method === 'CONNECT') { + if (typeof handler.onUpgrade !== 'function') { + throw new InvalidArgumentError('invalid onUpgrade method') } - - // ensure p is coprime with e - if(state.p.subtract(BigInteger.ONE).gcd(state.e) - .compareTo(BigInteger.ONE) !== 0) { - state.p = null; - generate(); - return; + } else { + if (typeof handler.onHeaders !== 'function') { + throw new InvalidArgumentError('invalid onHeaders method') } - // ensure q is coprime with e - if(state.q.subtract(BigInteger.ONE).gcd(state.e) - .compareTo(BigInteger.ONE) !== 0) { - state.q = null; - getPrime(state.qBits, finish); - return; + if (typeof handler.onData !== 'function') { + throw new InvalidArgumentError('invalid onData method') } - // compute phi: (p - 1)(q - 1) (Euler's totient function) - state.p1 = state.p.subtract(BigInteger.ONE); - state.q1 = state.q.subtract(BigInteger.ONE); - state.phi = state.p1.multiply(state.q1); - - // ensure e and phi are coprime - if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) !== 0) { - // phi and e aren't coprime, so generate a new p and q - state.p = state.q = null; - generate(); - return; + if (typeof handler.onComplete !== 'function') { + throw new InvalidArgumentError('invalid onComplete method') } + } +} - // create n, ensure n is has the right number of bits - state.n = state.p.multiply(state.q); - if(state.n.bitLength() !== state.bits) { - // failed, get new q - state.q = null; - getPrime(state.qBits, finish); - return; - } +// A body is disturbed if it has been read from and it cannot +// be re-used without losing state or data. +function isDisturbed (body) { + return !!(body && ( + stream.isDisturbed + ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed? + : body[kBodyUsed] || + body.readableDidRead || + (body._readableState && body._readableState.dataEmitted) || + isReadableAborted(body) + )) +} - // set keys - var d = state.e.modInverse(state.phi); - state.keys = { - privateKey: pki.rsa.setPrivateKey( - state.n, state.e, d, state.p, state.q, - d.mod(state.p1), d.mod(state.q1), - state.q.modInverse(state.p)), - publicKey: pki.rsa.setPublicKey(state.n, state.e) - }; +function isErrored (body) { + return !!(body && ( + stream.isErrored + ? stream.isErrored(body) + : /state: 'errored'/.test(nodeUtil.inspect(body) + ))) +} - callback(null, state.keys); - } +function isReadable (body) { + return !!(body && ( + stream.isReadable + ? stream.isReadable(body) + : /state: 'readable'/.test(nodeUtil.inspect(body) + ))) } -/** - * Converts a positive BigInteger into 2's-complement big-endian bytes. - * - * @param b the big integer to convert. - * - * @return the bytes. - */ -function _bnToBytes(b) { - // prepend 0x00 if first byte >= 0x80 - var hex = b.toString(16); - if(hex[0] >= '8') { - hex = '00' + hex; - } - var bytes = forge.util.hexToBytes(hex); - - // ensure integer is minimally-encoded - if(bytes.length > 1 && - // leading 0x00 for positive integer - ((bytes.charCodeAt(0) === 0 && - (bytes.charCodeAt(1) & 0x80) === 0) || - // leading 0xFF for negative integer - (bytes.charCodeAt(0) === 0xFF && - (bytes.charCodeAt(1) & 0x80) === 0x80))) { - return bytes.substr(1); +function getSocketInfo (socket) { + return { + localAddress: socket.localAddress, + localPort: socket.localPort, + remoteAddress: socket.remoteAddress, + remotePort: socket.remotePort, + remoteFamily: socket.remoteFamily, + timeout: socket.timeout, + bytesWritten: socket.bytesWritten, + bytesRead: socket.bytesRead } - return bytes; } -/** - * Returns the required number of Miller-Rabin tests to generate a - * prime with an error probability of (1/2)^80. - * - * See Handbook of Applied Cryptography Chapter 4, Table 4.4. - * - * @param bits the bit size. - * - * @return the required number of iterations. - */ -function _getMillerRabinTests(bits) { - if(bits <= 100) return 27; - if(bits <= 150) return 18; - if(bits <= 200) return 15; - if(bits <= 250) return 12; - if(bits <= 300) return 9; - if(bits <= 350) return 8; - if(bits <= 400) return 7; - if(bits <= 500) return 6; - if(bits <= 600) return 5; - if(bits <= 800) return 4; - if(bits <= 1250) return 3; - return 2; +async function * convertIterableToBuffer (iterable) { + for await (const chunk of iterable) { + yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + } } -/** - * Performs feature detection on the Node crypto interface. - * - * @param fn the feature (function) to detect. - * - * @return true if detected, false if not. - */ -function _detectNodeCrypto(fn) { - return forge.util.isNodejs && typeof _crypto[fn] === 'function'; -} +let ReadableStream +function ReadableStreamFrom (iterable) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(5356).ReadableStream) + } -/** - * Performs feature detection on the SubtleCrypto interface. - * - * @param fn the feature (function) to detect. - * - * @return true if detected, false if not. - */ -function _detectSubtleCrypto(fn) { - return (typeof util.globalScope !== 'undefined' && - typeof util.globalScope.crypto === 'object' && - typeof util.globalScope.crypto.subtle === 'object' && - typeof util.globalScope.crypto.subtle[fn] === 'function'); -} + if (ReadableStream.from) { + return ReadableStream.from(convertIterableToBuffer(iterable)) + } -/** - * Performs feature detection on the deprecated Microsoft Internet Explorer - * outdated SubtleCrypto interface. This function should only be used after - * checking for the modern, standard SubtleCrypto interface. - * - * @param fn the feature (function) to detect. - * - * @return true if detected, false if not. - */ -function _detectSubtleMsCrypto(fn) { - return (typeof util.globalScope !== 'undefined' && - typeof util.globalScope.msCrypto === 'object' && - typeof util.globalScope.msCrypto.subtle === 'object' && - typeof util.globalScope.msCrypto.subtle[fn] === 'function'); + let iterator + return new ReadableStream( + { + async start () { + iterator = iterable[Symbol.asyncIterator]() + }, + async pull (controller) { + const { done, value } = await iterator.next() + if (done) { + queueMicrotask(() => { + controller.close() + }) + } else { + const buf = Buffer.isBuffer(value) ? value : Buffer.from(value) + controller.enqueue(new Uint8Array(buf)) + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + } + }, + 0 + ) } -function _intToUint8Array(x) { - var bytes = forge.util.hexToBytes(x.toString(16)); - var buffer = new Uint8Array(bytes.length); - for(var i = 0; i < bytes.length; ++i) { - buffer[i] = bytes.charCodeAt(i); +// The chunk should be a FormData instance and contains +// all the required methods. +function isFormDataLike (object) { + return ( + object && + typeof object === 'object' && + typeof object.append === 'function' && + typeof object.delete === 'function' && + typeof object.get === 'function' && + typeof object.getAll === 'function' && + typeof object.has === 'function' && + typeof object.set === 'function' && + object[Symbol.toStringTag] === 'FormData' + ) +} + +function throwIfAborted (signal) { + if (!signal) { return } + if (typeof signal.throwIfAborted === 'function') { + signal.throwIfAborted() + } else { + if (signal.aborted) { + // DOMException not available < v17.0.0 + const err = new Error('The operation was aborted') + err.name = 'AbortError' + throw err + } } - return buffer; } -function _privateKeyFromJwk(jwk) { - if(jwk.kty !== 'RSA') { - throw new Error( - 'Unsupported key algorithm "' + jwk.kty + '"; algorithm must be "RSA".'); +function addAbortListener (signal, listener) { + if ('addEventListener' in signal) { + signal.addEventListener('abort', listener, { once: true }) + return () => signal.removeEventListener('abort', listener) } - return pki.setRsaPrivateKey( - _base64ToBigInt(jwk.n), - _base64ToBigInt(jwk.e), - _base64ToBigInt(jwk.d), - _base64ToBigInt(jwk.p), - _base64ToBigInt(jwk.q), - _base64ToBigInt(jwk.dp), - _base64ToBigInt(jwk.dq), - _base64ToBigInt(jwk.qi)); + signal.addListener('abort', listener) + return () => signal.removeListener('abort', listener) } -function _publicKeyFromJwk(jwk) { - if(jwk.kty !== 'RSA') { - throw new Error('Key algorithm must be "RSA".'); +const hasToWellFormed = !!String.prototype.toWellFormed + +/** + * @param {string} val + */ +function toUSVString (val) { + if (hasToWellFormed) { + return `${val}`.toWellFormed() + } else if (nodeUtil.toUSVString) { + return nodeUtil.toUSVString(val) } - return pki.setRsaPublicKey( - _base64ToBigInt(jwk.n), - _base64ToBigInt(jwk.e)); + + return `${val}` +} + +// Parsed accordingly to RFC 9110 +// https://www.rfc-editor.org/rfc/rfc9110#field.content-range +function parseRangeHeader (range) { + if (range == null || range === '') return { start: 0, end: null, size: null } + + const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null + return m + ? { + start: parseInt(m[1]), + end: m[2] ? parseInt(m[2]) : null, + size: m[3] ? parseInt(m[3]) : null + } + : null } -function _base64ToBigInt(b64) { - return new BigInteger(forge.util.bytesToHex(forge.util.decode64(b64)), 16); +const kEnumerableProperty = Object.create(null) +kEnumerableProperty.enumerable = true + +module.exports = { + kEnumerableProperty, + nop, + isDisturbed, + isErrored, + isReadable, + toUSVString, + isReadableAborted, + isBlobLike, + parseOrigin, + parseURL, + getServerName, + isStream, + isIterable, + isAsyncIterable, + isDestroyed, + parseRawHeaders, + parseHeaders, + parseKeepAliveTimeout, + destroy, + bodyLength, + deepClone, + ReadableStreamFrom, + isBuffer, + validateHandler, + getSocketInfo, + isFormDataLike, + buildURL, + throwIfAborted, + addAbortListener, + parseRangeHeader, + nodeMajor, + nodeMinor, + nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13), + safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] } /***/ }), -/***/ 279: +/***/ 4839: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Secure Hash Algorithm with 160-bit digest (SHA-1) implementation. - * - * @author Dave Longley - * - * Copyright (c) 2010-2015 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(6231); -__nccwpck_require__(8339); +"use strict"; -var sha1 = module.exports = forge.sha1 = forge.sha1 || {}; -forge.md.sha1 = forge.md.algorithms.sha1 = sha1; -/** - * Creates a SHA-1 message digest object. - * - * @return a message digest object. - */ -sha1.create = function() { - // do initialization as necessary - if(!_initialized) { - _init(); - } - - // SHA-1 state contains five 32-bit integers - var _state = null; - - // input buffer - var _input = forge.util.createBuffer(); - - // used for word storage - var _w = new Array(80); - - // message digest object - var md = { - algorithm: 'sha1', - blockLength: 64, - digestLength: 20, - // 56-bit length of message so far (does not including padding) - messageLength: 0, - // true message length - fullMessageLength: null, - // size of message length in bytes - messageLengthSize: 8 - }; +const Dispatcher = __nccwpck_require__(412) +const { + ClientDestroyedError, + ClientClosedError, + InvalidArgumentError +} = __nccwpck_require__(8045) +const { kDestroy, kClose, kDispatch, kInterceptors } = __nccwpck_require__(2785) - /** - * Starts the digest. - * - * @return this digest object. - */ - md.start = function() { - // up to 56-bit message length for convenience - md.messageLength = 0; - - // full message length (set md.messageLength64 for backwards-compatibility) - md.fullMessageLength = md.messageLength64 = []; - var int32s = md.messageLengthSize / 4; - for(var i = 0; i < int32s; ++i) { - md.fullMessageLength.push(0); - } - _input = forge.util.createBuffer(); - _state = { - h0: 0x67452301, - h1: 0xEFCDAB89, - h2: 0x98BADCFE, - h3: 0x10325476, - h4: 0xC3D2E1F0 - }; - return md; - }; - // start digest automatically for first time - md.start(); +const kDestroyed = Symbol('destroyed') +const kClosed = Symbol('closed') +const kOnDestroyed = Symbol('onDestroyed') +const kOnClosed = Symbol('onClosed') +const kInterceptedDispatch = Symbol('Intercepted Dispatch') - /** - * Updates the digest with the given message input. The given input can - * treated as raw input (no encoding will be applied) or an encoding of - * 'utf8' maybe given to encode the input using UTF-8. - * - * @param msg the message input to update with. - * @param encoding the encoding to use (default: 'raw', other: 'utf8'). - * - * @return this digest object. - */ - md.update = function(msg, encoding) { - if(encoding === 'utf8') { - msg = forge.util.encodeUtf8(msg); - } +class DispatcherBase extends Dispatcher { + constructor () { + super() - // update message length - var len = msg.length; - md.messageLength += len; - len = [(len / 0x100000000) >>> 0, len >>> 0]; - for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { - md.fullMessageLength[i] += len[1]; - len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); - md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; - len[0] = ((len[1] / 0x100000000) >>> 0); - } + this[kDestroyed] = false + this[kOnDestroyed] = null + this[kClosed] = false + this[kOnClosed] = [] + } - // add bytes to input buffer - _input.putBytes(msg); + get destroyed () { + return this[kDestroyed] + } + + get closed () { + return this[kClosed] + } - // process bytes - _update(_state, _w, _input); + get interceptors () { + return this[kInterceptors] + } - // compact input buffer every 2K or if empty - if(_input.read > 2048 || _input.length() === 0) { - _input.compact(); + set interceptors (newInterceptors) { + if (newInterceptors) { + for (let i = newInterceptors.length - 1; i >= 0; i--) { + const interceptor = this[kInterceptors][i] + if (typeof interceptor !== 'function') { + throw new InvalidArgumentError('interceptor must be an function') + } + } } - return md; - }; + this[kInterceptors] = newInterceptors + } - /** - * Produces the digest. - * - * @return a byte buffer containing the digest value. - */ - md.digest = function() { - /* Note: Here we copy the remaining bytes in the input buffer and - add the appropriate SHA-1 padding. Then we do the final update - on a copy of the state so that if the user wants to get - intermediate digests they can do so. */ - - /* Determine the number of bytes that must be added to the message - to ensure its length is congruent to 448 mod 512. In other words, - the data to be digested must be a multiple of 512 bits (or 128 bytes). - This data includes the message, some padding, and the length of the - message. Since the length of the message will be encoded as 8 bytes (64 - bits), that means that the last segment of the data must have 56 bytes - (448 bits) of message and padding. Therefore, the length of the message - plus the padding must be congruent to 448 mod 512 because - 512 - 128 = 448. - - In order to fill up the message length it must be filled with - padding that begins with 1 bit followed by all 0 bits. Padding - must *always* be present, so if the message length is already - congruent to 448 mod 512, then 512 padding bits must be added. */ - - var finalBlock = forge.util.createBuffer(); - finalBlock.putBytes(_input.bytes()); - - // compute remaining size to be digested (include message length size) - var remaining = ( - md.fullMessageLength[md.fullMessageLength.length - 1] + - md.messageLengthSize); - - // add padding for overflow blockSize - overflow - // _padding starts with 1 byte with first bit is set (byte value 128), then - // there may be up to (blockSize - 1) other pad bytes - var overflow = remaining & (md.blockLength - 1); - finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); - - // serialize message length in bits in big-endian order; since length - // is stored in bytes we multiply by 8 and add carry from next int - var next, carry; - var bits = md.fullMessageLength[0] * 8; - for(var i = 0; i < md.fullMessageLength.length - 1; ++i) { - next = md.fullMessageLength[i + 1] * 8; - carry = (next / 0x100000000) >>> 0; - bits += carry; - finalBlock.putInt32(bits >>> 0); - bits = next >>> 0; - } - finalBlock.putInt32(bits); - - var s2 = { - h0: _state.h0, - h1: _state.h1, - h2: _state.h2, - h3: _state.h3, - h4: _state.h4 - }; - _update(s2, _w, finalBlock); - var rval = forge.util.createBuffer(); - rval.putInt32(s2.h0); - rval.putInt32(s2.h1); - rval.putInt32(s2.h2); - rval.putInt32(s2.h3); - rval.putInt32(s2.h4); - return rval; - }; + close (callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.close((err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } - return md; -}; + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } -// sha-1 padding bytes not initialized yet -var _padding = null; -var _initialized = false; + if (this[kDestroyed]) { + queueMicrotask(() => callback(new ClientDestroyedError(), null)) + return + } -/** - * Initializes the constant tables. - */ -function _init() { - // create padding - _padding = String.fromCharCode(128); - _padding += forge.util.fillString(String.fromCharCode(0x00), 64); + if (this[kClosed]) { + if (this[kOnClosed]) { + this[kOnClosed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } - // now initialized - _initialized = true; -} + this[kClosed] = true + this[kOnClosed].push(callback) -/** - * Updates a SHA-1 state with the given byte buffer. - * - * @param s the SHA-1 state to update. - * @param w the array to use to store words. - * @param bytes the byte buffer to update with. - */ -function _update(s, w, bytes) { - // consume 512 bit (64 byte) chunks - var t, a, b, c, d, e, f, i; - var len = bytes.length(); - while(len >= 64) { - // the w array will be populated with sixteen 32-bit big-endian words - // and then extended into 80 32-bit words according to SHA-1 algorithm - // and for 32-79 using Max Locktyukhin's optimization - - // initialize hash value for this chunk - a = s.h0; - b = s.h1; - c = s.h2; - d = s.h3; - e = s.h4; - - // round 1 - for(i = 0; i < 16; ++i) { - t = bytes.getInt32(); - w[i] = t; - f = d ^ (b & (c ^ d)); - t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - for(; i < 20; ++i) { - t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]); - t = (t << 1) | (t >>> 31); - w[i] = t; - f = d ^ (b & (c ^ d)); - t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - // round 2 - for(; i < 32; ++i) { - t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]); - t = (t << 1) | (t >>> 31); - w[i] = t; - f = b ^ c ^ d; - t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - for(; i < 40; ++i) { - t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]); - t = (t << 2) | (t >>> 30); - w[i] = t; - f = b ^ c ^ d; - t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - // round 3 - for(; i < 60; ++i) { - t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]); - t = (t << 2) | (t >>> 30); - w[i] = t; - f = (b & c) | (d & (b ^ c)); - t = ((a << 5) | (a >>> 27)) + f + e + 0x8F1BBCDC + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - // round 4 - for(; i < 80; ++i) { - t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]); - t = (t << 2) | (t >>> 30); - w[i] = t; - f = b ^ c ^ d; - t = ((a << 5) | (a >>> 27)) + f + e + 0xCA62C1D6 + t; - e = d; - d = c; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - c = ((b << 30) | (b >>> 2)) >>> 0; - b = a; - a = t; - } - - // update hash state - s.h0 = (s.h0 + a) | 0; - s.h1 = (s.h1 + b) | 0; - s.h2 = (s.h2 + c) | 0; - s.h3 = (s.h3 + d) | 0; - s.h4 = (s.h4 + e) | 0; - - len -= 64; + const onClosed = () => { + const callbacks = this[kOnClosed] + this[kOnClosed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } + + // Should not error. + this[kClose]() + .then(() => this.destroy()) + .then(() => { + queueMicrotask(onClosed) + }) } -} + destroy (err, callback) { + if (typeof err === 'function') { + callback = err + err = null + } -/***/ }), + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.destroy(err, (err, data) => { + return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data) + }) + }) + } -/***/ 4086: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } -/** - * Secure Hash Algorithm with 256-bit digest (SHA-256) implementation. - * - * See FIPS 180-2 for details. - * - * @author Dave Longley - * - * Copyright (c) 2010-2015 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(6231); -__nccwpck_require__(8339); + if (this[kDestroyed]) { + if (this[kOnDestroyed]) { + this[kOnDestroyed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } -var sha256 = module.exports = forge.sha256 = forge.sha256 || {}; -forge.md.sha256 = forge.md.algorithms.sha256 = sha256; + if (!err) { + err = new ClientDestroyedError() + } -/** - * Creates a SHA-256 message digest object. - * - * @return a message digest object. - */ -sha256.create = function() { - // do initialization as necessary - if(!_initialized) { - _init(); - } - - // SHA-256 state contains eight 32-bit integers - var _state = null; - - // input buffer - var _input = forge.util.createBuffer(); - - // used for word storage - var _w = new Array(64); - - // message digest object - var md = { - algorithm: 'sha256', - blockLength: 64, - digestLength: 32, - // 56-bit length of message so far (does not including padding) - messageLength: 0, - // true message length - fullMessageLength: null, - // size of message length in bytes - messageLengthSize: 8 - }; + this[kDestroyed] = true + this[kOnDestroyed] = this[kOnDestroyed] || [] + this[kOnDestroyed].push(callback) - /** - * Starts the digest. - * - * @return this digest object. - */ - md.start = function() { - // up to 56-bit message length for convenience - md.messageLength = 0; - - // full message length (set md.messageLength64 for backwards-compatibility) - md.fullMessageLength = md.messageLength64 = []; - var int32s = md.messageLengthSize / 4; - for(var i = 0; i < int32s; ++i) { - md.fullMessageLength.push(0); - } - _input = forge.util.createBuffer(); - _state = { - h0: 0x6A09E667, - h1: 0xBB67AE85, - h2: 0x3C6EF372, - h3: 0xA54FF53A, - h4: 0x510E527F, - h5: 0x9B05688C, - h6: 0x1F83D9AB, - h7: 0x5BE0CD19 - }; - return md; - }; - // start digest automatically for first time - md.start(); + const onDestroyed = () => { + const callbacks = this[kOnDestroyed] + this[kOnDestroyed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } - /** - * Updates the digest with the given message input. The given input can - * treated as raw input (no encoding will be applied) or an encoding of - * 'utf8' maybe given to encode the input using UTF-8. - * - * @param msg the message input to update with. - * @param encoding the encoding to use (default: 'raw', other: 'utf8'). - * - * @return this digest object. - */ - md.update = function(msg, encoding) { - if(encoding === 'utf8') { - msg = forge.util.encodeUtf8(msg); + // Should not error. + this[kDestroy](err).then(() => { + queueMicrotask(onDestroyed) + }) + } + + [kInterceptedDispatch] (opts, handler) { + if (!this[kInterceptors] || this[kInterceptors].length === 0) { + this[kInterceptedDispatch] = this[kDispatch] + return this[kDispatch](opts, handler) + } + + let dispatch = this[kDispatch].bind(this) + for (let i = this[kInterceptors].length - 1; i >= 0; i--) { + dispatch = this[kInterceptors][i](dispatch) } + this[kInterceptedDispatch] = dispatch + return dispatch(opts, handler) + } - // update message length - var len = msg.length; - md.messageLength += len; - len = [(len / 0x100000000) >>> 0, len >>> 0]; - for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { - md.fullMessageLength[i] += len[1]; - len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); - md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; - len[0] = ((len[1] / 0x100000000) >>> 0); + dispatch (opts, handler) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') } - // add bytes to input buffer - _input.putBytes(msg); + try { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object.') + } + + if (this[kDestroyed] || this[kOnDestroyed]) { + throw new ClientDestroyedError() + } + + if (this[kClosed]) { + throw new ClientClosedError() + } + + return this[kInterceptedDispatch](opts, handler) + } catch (err) { + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } - // process bytes - _update(_state, _w, _input); + handler.onError(err) - // compact input buffer every 2K or if empty - if(_input.read > 2048 || _input.length() === 0) { - _input.compact(); + return false } + } +} - return md; - }; +module.exports = DispatcherBase - /** - * Produces the digest. - * - * @return a byte buffer containing the digest value. - */ - md.digest = function() { - /* Note: Here we copy the remaining bytes in the input buffer and - add the appropriate SHA-256 padding. Then we do the final update - on a copy of the state so that if the user wants to get - intermediate digests they can do so. */ - - /* Determine the number of bytes that must be added to the message - to ensure its length is congruent to 448 mod 512. In other words, - the data to be digested must be a multiple of 512 bits (or 128 bytes). - This data includes the message, some padding, and the length of the - message. Since the length of the message will be encoded as 8 bytes (64 - bits), that means that the last segment of the data must have 56 bytes - (448 bits) of message and padding. Therefore, the length of the message - plus the padding must be congruent to 448 mod 512 because - 512 - 128 = 448. - - In order to fill up the message length it must be filled with - padding that begins with 1 bit followed by all 0 bits. Padding - must *always* be present, so if the message length is already - congruent to 448 mod 512, then 512 padding bits must be added. */ - - var finalBlock = forge.util.createBuffer(); - finalBlock.putBytes(_input.bytes()); - - // compute remaining size to be digested (include message length size) - var remaining = ( - md.fullMessageLength[md.fullMessageLength.length - 1] + - md.messageLengthSize); - - // add padding for overflow blockSize - overflow - // _padding starts with 1 byte with first bit is set (byte value 128), then - // there may be up to (blockSize - 1) other pad bytes - var overflow = remaining & (md.blockLength - 1); - finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); - - // serialize message length in bits in big-endian order; since length - // is stored in bytes we multiply by 8 and add carry from next int - var next, carry; - var bits = md.fullMessageLength[0] * 8; - for(var i = 0; i < md.fullMessageLength.length - 1; ++i) { - next = md.fullMessageLength[i + 1] * 8; - carry = (next / 0x100000000) >>> 0; - bits += carry; - finalBlock.putInt32(bits >>> 0); - bits = next >>> 0; - } - finalBlock.putInt32(bits); - - var s2 = { - h0: _state.h0, - h1: _state.h1, - h2: _state.h2, - h3: _state.h3, - h4: _state.h4, - h5: _state.h5, - h6: _state.h6, - h7: _state.h7 - }; - _update(s2, _w, finalBlock); - var rval = forge.util.createBuffer(); - rval.putInt32(s2.h0); - rval.putInt32(s2.h1); - rval.putInt32(s2.h2); - rval.putInt32(s2.h3); - rval.putInt32(s2.h4); - rval.putInt32(s2.h5); - rval.putInt32(s2.h6); - rval.putInt32(s2.h7); - return rval; - }; - return md; -}; +/***/ }), -// sha-256 padding bytes not initialized yet -var _padding = null; -var _initialized = false; +/***/ 412: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// table of constants -var _k = null; +"use strict"; -/** - * Initializes the constant tables. - */ -function _init() { - // create padding - _padding = String.fromCharCode(128); - _padding += forge.util.fillString(String.fromCharCode(0x00), 64); - - // create K table for SHA-256 - _k = [ - 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, - 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, - 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, - 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, - 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, - 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, - 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, - 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, - 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, - 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, - 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, - 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, - 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, - 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, - 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, - 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2]; - - // now initialized - _initialized = true; -} -/** - * Updates a SHA-256 state with the given byte buffer. - * - * @param s the SHA-256 state to update. - * @param w the array to use to store words. - * @param bytes the byte buffer to update with. - */ -function _update(s, w, bytes) { - // consume 512 bit (64 byte) chunks - var t1, t2, s0, s1, ch, maj, i, a, b, c, d, e, f, g, h; - var len = bytes.length(); - while(len >= 64) { - // the w array will be populated with sixteen 32-bit big-endian words - // and then extended into 64 32-bit words according to SHA-256 - for(i = 0; i < 16; ++i) { - w[i] = bytes.getInt32(); - } - for(; i < 64; ++i) { - // XOR word 2 words ago rot right 17, rot right 19, shft right 10 - t1 = w[i - 2]; - t1 = - ((t1 >>> 17) | (t1 << 15)) ^ - ((t1 >>> 19) | (t1 << 13)) ^ - (t1 >>> 10); - // XOR word 15 words ago rot right 7, rot right 18, shft right 3 - t2 = w[i - 15]; - t2 = - ((t2 >>> 7) | (t2 << 25)) ^ - ((t2 >>> 18) | (t2 << 14)) ^ - (t2 >>> 3); - // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^32 - w[i] = (t1 + w[i - 7] + t2 + w[i - 16]) | 0; - } - - // initialize hash value for this chunk - a = s.h0; - b = s.h1; - c = s.h2; - d = s.h3; - e = s.h4; - f = s.h5; - g = s.h6; - h = s.h7; - - // round function - for(i = 0; i < 64; ++i) { - // Sum1(e) - s1 = - ((e >>> 6) | (e << 26)) ^ - ((e >>> 11) | (e << 21)) ^ - ((e >>> 25) | (e << 7)); - // Ch(e, f, g) (optimized the same way as SHA-1) - ch = g ^ (e & (f ^ g)); - // Sum0(a) - s0 = - ((a >>> 2) | (a << 30)) ^ - ((a >>> 13) | (a << 19)) ^ - ((a >>> 22) | (a << 10)); - // Maj(a, b, c) (optimized the same way as SHA-1) - maj = (a & b) | (c & (a ^ b)); - - // main algorithm - t1 = h + s1 + ch + _k[i] + w[i]; - t2 = s0 + maj; - h = g; - g = f; - f = e; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - // can't truncate with `| 0` - e = (d + t1) >>> 0; - d = c; - c = b; - b = a; - // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug - // can't truncate with `| 0` - a = (t1 + t2) >>> 0; - } - - // update hash state - s.h0 = (s.h0 + a) | 0; - s.h1 = (s.h1 + b) | 0; - s.h2 = (s.h2 + c) | 0; - s.h3 = (s.h3 + d) | 0; - s.h4 = (s.h4 + e) | 0; - s.h5 = (s.h5 + f) | 0; - s.h6 = (s.h6 + g) | 0; - s.h7 = (s.h7 + h) | 0; - len -= 64; +const EventEmitter = __nccwpck_require__(2361) + +class Dispatcher extends EventEmitter { + dispatch () { + throw new Error('not implemented') + } + + close () { + throw new Error('not implemented') + } + + destroy () { + throw new Error('not implemented') } } +module.exports = Dispatcher + /***/ }), -/***/ 9542: +/***/ 1472: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Secure Hash Algorithm with a 1024-bit block size implementation. - * - * This includes: SHA-512, SHA-384, SHA-512/224, and SHA-512/256. For - * SHA-256 (block size 512 bits), see sha256.js. - * - * See FIPS 180-4 for details. - * - * @author Dave Longley - * - * Copyright (c) 2014-2015 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(6231); -__nccwpck_require__(8339); +"use strict"; -var sha512 = module.exports = forge.sha512 = forge.sha512 || {}; -// SHA-512 -forge.md.sha512 = forge.md.algorithms.sha512 = sha512; +const Busboy = __nccwpck_require__(727) +const util = __nccwpck_require__(3983) +const { + ReadableStreamFrom, + isBlobLike, + isReadableStreamLike, + readableStreamClose, + createDeferredPromise, + fullyReadBody +} = __nccwpck_require__(2538) +const { FormData } = __nccwpck_require__(2015) +const { kState } = __nccwpck_require__(5861) +const { webidl } = __nccwpck_require__(1744) +const { DOMException, structuredClone } = __nccwpck_require__(1037) +const { Blob, File: NativeFile } = __nccwpck_require__(4300) +const { kBodyUsed } = __nccwpck_require__(2785) +const assert = __nccwpck_require__(9491) +const { isErrored } = __nccwpck_require__(3983) +const { isUint8Array, isArrayBuffer } = __nccwpck_require__(9830) +const { File: UndiciFile } = __nccwpck_require__(8511) +const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) + +let ReadableStream = globalThis.ReadableStream + +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile +const textEncoder = new TextEncoder() +const textDecoder = new TextDecoder() + +// https://fetch.spec.whatwg.org/#concept-bodyinit-extract +function extractBody (object, keepalive = false) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(5356).ReadableStream) + } + + // 1. Let stream be null. + let stream = null + + // 2. If object is a ReadableStream object, then set stream to object. + if (object instanceof ReadableStream) { + stream = object + } else if (isBlobLike(object)) { + // 3. Otherwise, if object is a Blob object, set stream to the + // result of running object’s get stream. + stream = object.stream() + } else { + // 4. Otherwise, set stream to a new ReadableStream object, and set + // up stream. + stream = new ReadableStream({ + async pull (controller) { + controller.enqueue( + typeof source === 'string' ? textEncoder.encode(source) : source + ) + queueMicrotask(() => readableStreamClose(controller)) + }, + start () {}, + type: undefined + }) + } -// SHA-384 -var sha384 = forge.sha384 = forge.sha512.sha384 = forge.sha512.sha384 || {}; -sha384.create = function() { - return sha512.create('SHA-384'); -}; -forge.md.sha384 = forge.md.algorithms.sha384 = sha384; + // 5. Assert: stream is a ReadableStream object. + assert(isReadableStreamLike(stream)) + + // 6. Let action be null. + let action = null + + // 7. Let source be null. + let source = null + + // 8. Let length be null. + let length = null + + // 9. Let type be null. + let type = null + + // 10. Switch on object: + if (typeof object === 'string') { + // Set source to the UTF-8 encoding of object. + // Note: setting source to a Uint8Array here breaks some mocking assumptions. + source = object + + // Set type to `text/plain;charset=UTF-8`. + type = 'text/plain;charset=UTF-8' + } else if (object instanceof URLSearchParams) { + // URLSearchParams + + // spec says to run application/x-www-form-urlencoded on body.list + // this is implemented in Node.js as apart of an URLSearchParams instance toString method + // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490 + // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100 + + // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list. + source = object.toString() + + // Set type to `application/x-www-form-urlencoded;charset=UTF-8`. + type = 'application/x-www-form-urlencoded;charset=UTF-8' + } else if (isArrayBuffer(object)) { + // BufferSource/ArrayBuffer + + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.slice()) + } else if (ArrayBuffer.isView(object)) { + // BufferSource/ArrayBufferView + + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) + } else if (util.isFormDataLike(object)) { + const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` + const prefix = `--${boundary}\r\nContent-Disposition: form-data` + + /*! formdata-polyfill. MIT License. Jimmy Wärting */ + const escape = (str) => + str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22') + const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n') + + // Set action to this step: run the multipart/form-data + // encoding algorithm, with object’s entry list and UTF-8. + // - This ensures that the body is immutable and can't be changed afterwords + // - That the content-length is calculated in advance. + // - And that all parts are pre-encoded and ready to be sent. + + const blobParts = [] + const rn = new Uint8Array([13, 10]) // '\r\n' + length = 0 + let hasUnknownSizeValue = false + + for (const [name, value] of object) { + if (typeof value === 'string') { + const chunk = textEncoder.encode(prefix + + `; name="${escape(normalizeLinefeeds(name))}"` + + `\r\n\r\n${normalizeLinefeeds(value)}\r\n`) + blobParts.push(chunk) + length += chunk.byteLength + } else { + const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` + + (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' + + `Content-Type: ${ + value.type || 'application/octet-stream' + }\r\n\r\n`) + blobParts.push(chunk, value, rn) + if (typeof value.size === 'number') { + length += chunk.byteLength + value.size + rn.byteLength + } else { + hasUnknownSizeValue = true + } + } + } -// SHA-512/256 -forge.sha512.sha256 = forge.sha512.sha256 || { - create: function() { - return sha512.create('SHA-512/256'); - } -}; -forge.md['sha512/256'] = forge.md.algorithms['sha512/256'] = - forge.sha512.sha256; + const chunk = textEncoder.encode(`--${boundary}--`) + blobParts.push(chunk) + length += chunk.byteLength + if (hasUnknownSizeValue) { + length = null + } -// SHA-512/224 -forge.sha512.sha224 = forge.sha512.sha224 || { - create: function() { - return sha512.create('SHA-512/224'); - } -}; -forge.md['sha512/224'] = forge.md.algorithms['sha512/224'] = - forge.sha512.sha224; + // Set source to object. + source = object -/** - * Creates a SHA-2 message digest object. - * - * @param algorithm the algorithm to use (SHA-512, SHA-384, SHA-512/224, - * SHA-512/256). - * - * @return a message digest object. - */ -sha512.create = function(algorithm) { - // do initialization as necessary - if(!_initialized) { - _init(); + action = async function * () { + for (const part of blobParts) { + if (part.stream) { + yield * part.stream() + } else { + yield part + } + } + } + + // Set type to `multipart/form-data; boundary=`, + // followed by the multipart/form-data boundary string generated + // by the multipart/form-data encoding algorithm. + type = 'multipart/form-data; boundary=' + boundary + } else if (isBlobLike(object)) { + // Blob + + // Set source to object. + source = object + + // Set length to object’s size. + length = object.size + + // If object’s type attribute is not the empty byte sequence, set + // type to its value. + if (object.type) { + type = object.type + } + } else if (typeof object[Symbol.asyncIterator] === 'function') { + // If keepalive is true, then throw a TypeError. + if (keepalive) { + throw new TypeError('keepalive') + } + + // If object is disturbed or locked, then throw a TypeError. + if (util.isDisturbed(object) || object.locked) { + throw new TypeError( + 'Response body object should not be disturbed or locked' + ) + } + + stream = + object instanceof ReadableStream ? object : ReadableStreamFrom(object) } - if(typeof algorithm === 'undefined') { - algorithm = 'SHA-512'; + // 11. If source is a byte sequence, then set action to a + // step that returns source and length to source’s length. + if (typeof source === 'string' || util.isBuffer(source)) { + length = Buffer.byteLength(source) } - if(!(algorithm in _states)) { - throw new Error('Invalid SHA-512 algorithm: ' + algorithm); + // 12. If action is non-null, then run these steps in in parallel: + if (action != null) { + // Run action. + let iterator + stream = new ReadableStream({ + async start () { + iterator = action(object)[Symbol.asyncIterator]() + }, + async pull (controller) { + const { value, done } = await iterator.next() + if (done) { + // When running action is done, close stream. + queueMicrotask(() => { + controller.close() + }) + } else { + // Whenever one or more bytes are available and stream is not errored, + // enqueue a Uint8Array wrapping an ArrayBuffer containing the available + // bytes into stream. + if (!isErrored(stream)) { + controller.enqueue(new Uint8Array(value)) + } + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + }, + type: undefined + }) } - // SHA-512 state contains eight 64-bit integers (each as two 32-bit ints) - var _state = _states[algorithm]; - var _h = null; + // 13. Let body be a body whose stream is stream, source is source, + // and length is length. + const body = { stream, source, length } - // input buffer - var _input = forge.util.createBuffer(); + // 14. Return (body, type). + return [body, type] +} - // used for 64-bit word storage - var _w = new Array(80); - for(var wi = 0; wi < 80; ++wi) { - _w[wi] = new Array(2); +// https://fetch.spec.whatwg.org/#bodyinit-safely-extract +function safelyExtractBody (object, keepalive = false) { + if (!ReadableStream) { + // istanbul ignore next + ReadableStream = (__nccwpck_require__(5356).ReadableStream) } - // determine digest length by algorithm name (default) - var digestLength = 64; - switch(algorithm) { - case 'SHA-384': - digestLength = 48; - break; - case 'SHA-512/256': - digestLength = 32; - break; - case 'SHA-512/224': - digestLength = 28; - break; + // To safely extract a body and a `Content-Type` value from + // a byte sequence or BodyInit object object, run these steps: + + // 1. If object is a ReadableStream object, then: + if (object instanceof ReadableStream) { + // Assert: object is neither disturbed nor locked. + // istanbul ignore next + assert(!util.isDisturbed(object), 'The body has already been consumed.') + // istanbul ignore next + assert(!object.locked, 'The stream is locked.') } - // message digest object - var md = { - // SHA-512 => sha512 - algorithm: algorithm.replace('-', '').toLowerCase(), - blockLength: 128, - digestLength: digestLength, - // 56-bit length of message so far (does not including padding) - messageLength: 0, - // true message length - fullMessageLength: null, - // size of message length in bytes - messageLengthSize: 16 - }; + // 2. Return the results of extracting object. + return extractBody(object, keepalive) +} - /** - * Starts the digest. - * - * @return this digest object. - */ - md.start = function() { - // up to 56-bit message length for convenience - md.messageLength = 0; - - // full message length (set md.messageLength128 for backwards-compatibility) - md.fullMessageLength = md.messageLength128 = []; - var int32s = md.messageLengthSize / 4; - for(var i = 0; i < int32s; ++i) { - md.fullMessageLength.push(0); - } - _input = forge.util.createBuffer(); - _h = new Array(_state.length); - for(var i = 0; i < _state.length; ++i) { - _h[i] = _state[i].slice(0); - } - return md; - }; - // start digest automatically for first time - md.start(); +function cloneBody (body) { + // To clone a body body, run these steps: - /** - * Updates the digest with the given message input. The given input can - * treated as raw input (no encoding will be applied) or an encoding of - * 'utf8' maybe given to encode the input using UTF-8. - * - * @param msg the message input to update with. - * @param encoding the encoding to use (default: 'raw', other: 'utf8'). - * - * @return this digest object. - */ - md.update = function(msg, encoding) { - if(encoding === 'utf8') { - msg = forge.util.encodeUtf8(msg); - } + // https://fetch.spec.whatwg.org/#concept-body-clone - // update message length - var len = msg.length; - md.messageLength += len; - len = [(len / 0x100000000) >>> 0, len >>> 0]; - for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { - md.fullMessageLength[i] += len[1]; - len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); - md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; - len[0] = ((len[1] / 0x100000000) >>> 0); - } + // 1. Let « out1, out2 » be the result of teeing body’s stream. + const [out1, out2] = body.stream.tee() + const out2Clone = structuredClone(out2, { transfer: [out2] }) + // This, for whatever reasons, unrefs out2Clone which allows + // the process to exit by itself. + const [, finalClone] = out2Clone.tee() - // add bytes to input buffer - _input.putBytes(msg); + // 2. Set body’s stream to out1. + body.stream = out1 - // process bytes - _update(_h, _w, _input); + // 3. Return a body whose stream is out2 and other members are copied from body. + return { + stream: finalClone, + length: body.length, + source: body.source + } +} - // compact input buffer every 2K or if empty - if(_input.read > 2048 || _input.length() === 0) { - _input.compact(); - } +async function * consumeBody (body) { + if (body) { + if (isUint8Array(body)) { + yield body + } else { + const stream = body.stream - return md; - }; + if (util.isDisturbed(stream)) { + throw new TypeError('The body has already been consumed.') + } - /** - * Produces the digest. - * - * @return a byte buffer containing the digest value. - */ - md.digest = function() { - /* Note: Here we copy the remaining bytes in the input buffer and - add the appropriate SHA-512 padding. Then we do the final update - on a copy of the state so that if the user wants to get - intermediate digests they can do so. */ - - /* Determine the number of bytes that must be added to the message - to ensure its length is congruent to 896 mod 1024. In other words, - the data to be digested must be a multiple of 1024 bits (or 128 bytes). - This data includes the message, some padding, and the length of the - message. Since the length of the message will be encoded as 16 bytes (128 - bits), that means that the last segment of the data must have 112 bytes - (896 bits) of message and padding. Therefore, the length of the message - plus the padding must be congruent to 896 mod 1024 because - 1024 - 128 = 896. - - In order to fill up the message length it must be filled with - padding that begins with 1 bit followed by all 0 bits. Padding - must *always* be present, so if the message length is already - congruent to 896 mod 1024, then 1024 padding bits must be added. */ - - var finalBlock = forge.util.createBuffer(); - finalBlock.putBytes(_input.bytes()); - - // compute remaining size to be digested (include message length size) - var remaining = ( - md.fullMessageLength[md.fullMessageLength.length - 1] + - md.messageLengthSize); - - // add padding for overflow blockSize - overflow - // _padding starts with 1 byte with first bit is set (byte value 128), then - // there may be up to (blockSize - 1) other pad bytes - var overflow = remaining & (md.blockLength - 1); - finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); - - // serialize message length in bits in big-endian order; since length - // is stored in bytes we multiply by 8 and add carry from next int - var next, carry; - var bits = md.fullMessageLength[0] * 8; - for(var i = 0; i < md.fullMessageLength.length - 1; ++i) { - next = md.fullMessageLength[i + 1] * 8; - carry = (next / 0x100000000) >>> 0; - bits += carry; - finalBlock.putInt32(bits >>> 0); - bits = next >>> 0; - } - finalBlock.putInt32(bits); - - var h = new Array(_h.length); - for(var i = 0; i < _h.length; ++i) { - h[i] = _h[i].slice(0); - } - _update(h, _w, finalBlock); - var rval = forge.util.createBuffer(); - var hlen; - if(algorithm === 'SHA-512') { - hlen = h.length; - } else if(algorithm === 'SHA-384') { - hlen = h.length - 2; - } else { - hlen = h.length - 4; - } - for(var i = 0; i < hlen; ++i) { - rval.putInt32(h[i][0]); - if(i !== hlen - 1 || algorithm !== 'SHA-512/224') { - rval.putInt32(h[i][1]); + if (stream.locked) { + throw new TypeError('The stream is locked.') } + + // Compat. + stream[kBodyUsed] = true + + yield * stream } - return rval; - }; + } +} - return md; -}; +function throwIfAborted (state) { + if (state.aborted) { + throw new DOMException('The operation was aborted.', 'AbortError') + } +} -// sha-512 padding bytes not initialized yet -var _padding = null; -var _initialized = false; +function bodyMixinMethods (instance) { + const methods = { + blob () { + // The blob() method steps are to return the result of + // running consume body with this and the following step + // given a byte sequence bytes: return a Blob whose + // contents are bytes and whose type attribute is this’s + // MIME type. + return specConsumeBody(this, (bytes) => { + let mimeType = bodyMimeType(this) -// table of constants -var _k = null; + if (mimeType === 'failure') { + mimeType = '' + } else if (mimeType) { + mimeType = serializeAMimeType(mimeType) + } -// initial hash states -var _states = null; + // Return a Blob whose contents are bytes and type attribute + // is mimeType. + return new Blob([bytes], { type: mimeType }) + }, instance) + }, -/** - * Initializes the constant tables. - */ -function _init() { - // create padding - _padding = String.fromCharCode(128); - _padding += forge.util.fillString(String.fromCharCode(0x00), 128); - - // create K table for SHA-512 - _k = [ - [0x428a2f98, 0xd728ae22], [0x71374491, 0x23ef65cd], - [0xb5c0fbcf, 0xec4d3b2f], [0xe9b5dba5, 0x8189dbbc], - [0x3956c25b, 0xf348b538], [0x59f111f1, 0xb605d019], - [0x923f82a4, 0xaf194f9b], [0xab1c5ed5, 0xda6d8118], - [0xd807aa98, 0xa3030242], [0x12835b01, 0x45706fbe], - [0x243185be, 0x4ee4b28c], [0x550c7dc3, 0xd5ffb4e2], - [0x72be5d74, 0xf27b896f], [0x80deb1fe, 0x3b1696b1], - [0x9bdc06a7, 0x25c71235], [0xc19bf174, 0xcf692694], - [0xe49b69c1, 0x9ef14ad2], [0xefbe4786, 0x384f25e3], - [0x0fc19dc6, 0x8b8cd5b5], [0x240ca1cc, 0x77ac9c65], - [0x2de92c6f, 0x592b0275], [0x4a7484aa, 0x6ea6e483], - [0x5cb0a9dc, 0xbd41fbd4], [0x76f988da, 0x831153b5], - [0x983e5152, 0xee66dfab], [0xa831c66d, 0x2db43210], - [0xb00327c8, 0x98fb213f], [0xbf597fc7, 0xbeef0ee4], - [0xc6e00bf3, 0x3da88fc2], [0xd5a79147, 0x930aa725], - [0x06ca6351, 0xe003826f], [0x14292967, 0x0a0e6e70], - [0x27b70a85, 0x46d22ffc], [0x2e1b2138, 0x5c26c926], - [0x4d2c6dfc, 0x5ac42aed], [0x53380d13, 0x9d95b3df], - [0x650a7354, 0x8baf63de], [0x766a0abb, 0x3c77b2a8], - [0x81c2c92e, 0x47edaee6], [0x92722c85, 0x1482353b], - [0xa2bfe8a1, 0x4cf10364], [0xa81a664b, 0xbc423001], - [0xc24b8b70, 0xd0f89791], [0xc76c51a3, 0x0654be30], - [0xd192e819, 0xd6ef5218], [0xd6990624, 0x5565a910], - [0xf40e3585, 0x5771202a], [0x106aa070, 0x32bbd1b8], - [0x19a4c116, 0xb8d2d0c8], [0x1e376c08, 0x5141ab53], - [0x2748774c, 0xdf8eeb99], [0x34b0bcb5, 0xe19b48a8], - [0x391c0cb3, 0xc5c95a63], [0x4ed8aa4a, 0xe3418acb], - [0x5b9cca4f, 0x7763e373], [0x682e6ff3, 0xd6b2b8a3], - [0x748f82ee, 0x5defb2fc], [0x78a5636f, 0x43172f60], - [0x84c87814, 0xa1f0ab72], [0x8cc70208, 0x1a6439ec], - [0x90befffa, 0x23631e28], [0xa4506ceb, 0xde82bde9], - [0xbef9a3f7, 0xb2c67915], [0xc67178f2, 0xe372532b], - [0xca273ece, 0xea26619c], [0xd186b8c7, 0x21c0c207], - [0xeada7dd6, 0xcde0eb1e], [0xf57d4f7f, 0xee6ed178], - [0x06f067aa, 0x72176fba], [0x0a637dc5, 0xa2c898a6], - [0x113f9804, 0xbef90dae], [0x1b710b35, 0x131c471b], - [0x28db77f5, 0x23047d84], [0x32caab7b, 0x40c72493], - [0x3c9ebe0a, 0x15c9bebc], [0x431d67c4, 0x9c100d4c], - [0x4cc5d4be, 0xcb3e42b6], [0x597f299c, 0xfc657e2a], - [0x5fcb6fab, 0x3ad6faec], [0x6c44198c, 0x4a475817] - ]; - - // initial hash states - _states = {}; - _states['SHA-512'] = [ - [0x6a09e667, 0xf3bcc908], - [0xbb67ae85, 0x84caa73b], - [0x3c6ef372, 0xfe94f82b], - [0xa54ff53a, 0x5f1d36f1], - [0x510e527f, 0xade682d1], - [0x9b05688c, 0x2b3e6c1f], - [0x1f83d9ab, 0xfb41bd6b], - [0x5be0cd19, 0x137e2179] - ]; - _states['SHA-384'] = [ - [0xcbbb9d5d, 0xc1059ed8], - [0x629a292a, 0x367cd507], - [0x9159015a, 0x3070dd17], - [0x152fecd8, 0xf70e5939], - [0x67332667, 0xffc00b31], - [0x8eb44a87, 0x68581511], - [0xdb0c2e0d, 0x64f98fa7], - [0x47b5481d, 0xbefa4fa4] - ]; - _states['SHA-512/256'] = [ - [0x22312194, 0xFC2BF72C], - [0x9F555FA3, 0xC84C64C2], - [0x2393B86B, 0x6F53B151], - [0x96387719, 0x5940EABD], - [0x96283EE2, 0xA88EFFE3], - [0xBE5E1E25, 0x53863992], - [0x2B0199FC, 0x2C85B8AA], - [0x0EB72DDC, 0x81C52CA2] - ]; - _states['SHA-512/224'] = [ - [0x8C3D37C8, 0x19544DA2], - [0x73E19966, 0x89DCD4D6], - [0x1DFAB7AE, 0x32FF9C82], - [0x679DD514, 0x582F9FCF], - [0x0F6D2B69, 0x7BD44DA8], - [0x77E36F73, 0x04C48942], - [0x3F9D85A8, 0x6A1D36C8], - [0x1112E6AD, 0x91D692A1] - ]; - - // now initialized - _initialized = true; -} + arrayBuffer () { + // The arrayBuffer() method steps are to return the result + // of running consume body with this and the following step + // given a byte sequence bytes: return a new ArrayBuffer + // whose contents are bytes. + return specConsumeBody(this, (bytes) => { + return new Uint8Array(bytes).buffer + }, instance) + }, -/** - * Updates a SHA-512 state with the given byte buffer. - * - * @param s the SHA-512 state to update. - * @param w the array to use to store words. - * @param bytes the byte buffer to update with. - */ -function _update(s, w, bytes) { - // consume 512 bit (128 byte) chunks - var t1_hi, t1_lo; - var t2_hi, t2_lo; - var s0_hi, s0_lo; - var s1_hi, s1_lo; - var ch_hi, ch_lo; - var maj_hi, maj_lo; - var a_hi, a_lo; - var b_hi, b_lo; - var c_hi, c_lo; - var d_hi, d_lo; - var e_hi, e_lo; - var f_hi, f_lo; - var g_hi, g_lo; - var h_hi, h_lo; - var i, hi, lo, w2, w7, w15, w16; - var len = bytes.length(); - while(len >= 128) { - // the w array will be populated with sixteen 64-bit big-endian words - // and then extended into 64 64-bit words according to SHA-512 - for(i = 0; i < 16; ++i) { - w[i][0] = bytes.getInt32() >>> 0; - w[i][1] = bytes.getInt32() >>> 0; - } - for(; i < 80; ++i) { - // for word 2 words ago: ROTR 19(x) ^ ROTR 61(x) ^ SHR 6(x) - w2 = w[i - 2]; - hi = w2[0]; - lo = w2[1]; - - // high bits - t1_hi = ( - ((hi >>> 19) | (lo << 13)) ^ // ROTR 19 - ((lo >>> 29) | (hi << 3)) ^ // ROTR 61/(swap + ROTR 29) - (hi >>> 6)) >>> 0; // SHR 6 - // low bits - t1_lo = ( - ((hi << 13) | (lo >>> 19)) ^ // ROTR 19 - ((lo << 3) | (hi >>> 29)) ^ // ROTR 61/(swap + ROTR 29) - ((hi << 26) | (lo >>> 6))) >>> 0; // SHR 6 - - // for word 15 words ago: ROTR 1(x) ^ ROTR 8(x) ^ SHR 7(x) - w15 = w[i - 15]; - hi = w15[0]; - lo = w15[1]; - - // high bits - t2_hi = ( - ((hi >>> 1) | (lo << 31)) ^ // ROTR 1 - ((hi >>> 8) | (lo << 24)) ^ // ROTR 8 - (hi >>> 7)) >>> 0; // SHR 7 - // low bits - t2_lo = ( - ((hi << 31) | (lo >>> 1)) ^ // ROTR 1 - ((hi << 24) | (lo >>> 8)) ^ // ROTR 8 - ((hi << 25) | (lo >>> 7))) >>> 0; // SHR 7 - - // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^64 (carry lo overflow) - w7 = w[i - 7]; - w16 = w[i - 16]; - lo = (t1_lo + w7[1] + t2_lo + w16[1]); - w[i][0] = (t1_hi + w7[0] + t2_hi + w16[0] + - ((lo / 0x100000000) >>> 0)) >>> 0; - w[i][1] = lo >>> 0; - } - - // initialize hash value for this chunk - a_hi = s[0][0]; - a_lo = s[0][1]; - b_hi = s[1][0]; - b_lo = s[1][1]; - c_hi = s[2][0]; - c_lo = s[2][1]; - d_hi = s[3][0]; - d_lo = s[3][1]; - e_hi = s[4][0]; - e_lo = s[4][1]; - f_hi = s[5][0]; - f_lo = s[5][1]; - g_hi = s[6][0]; - g_lo = s[6][1]; - h_hi = s[7][0]; - h_lo = s[7][1]; - - // round function - for(i = 0; i < 80; ++i) { - // Sum1(e) = ROTR 14(e) ^ ROTR 18(e) ^ ROTR 41(e) - s1_hi = ( - ((e_hi >>> 14) | (e_lo << 18)) ^ // ROTR 14 - ((e_hi >>> 18) | (e_lo << 14)) ^ // ROTR 18 - ((e_lo >>> 9) | (e_hi << 23))) >>> 0; // ROTR 41/(swap + ROTR 9) - s1_lo = ( - ((e_hi << 18) | (e_lo >>> 14)) ^ // ROTR 14 - ((e_hi << 14) | (e_lo >>> 18)) ^ // ROTR 18 - ((e_lo << 23) | (e_hi >>> 9))) >>> 0; // ROTR 41/(swap + ROTR 9) - - // Ch(e, f, g) (optimized the same way as SHA-1) - ch_hi = (g_hi ^ (e_hi & (f_hi ^ g_hi))) >>> 0; - ch_lo = (g_lo ^ (e_lo & (f_lo ^ g_lo))) >>> 0; - - // Sum0(a) = ROTR 28(a) ^ ROTR 34(a) ^ ROTR 39(a) - s0_hi = ( - ((a_hi >>> 28) | (a_lo << 4)) ^ // ROTR 28 - ((a_lo >>> 2) | (a_hi << 30)) ^ // ROTR 34/(swap + ROTR 2) - ((a_lo >>> 7) | (a_hi << 25))) >>> 0; // ROTR 39/(swap + ROTR 7) - s0_lo = ( - ((a_hi << 4) | (a_lo >>> 28)) ^ // ROTR 28 - ((a_lo << 30) | (a_hi >>> 2)) ^ // ROTR 34/(swap + ROTR 2) - ((a_lo << 25) | (a_hi >>> 7))) >>> 0; // ROTR 39/(swap + ROTR 7) - - // Maj(a, b, c) (optimized the same way as SHA-1) - maj_hi = ((a_hi & b_hi) | (c_hi & (a_hi ^ b_hi))) >>> 0; - maj_lo = ((a_lo & b_lo) | (c_lo & (a_lo ^ b_lo))) >>> 0; - - // main algorithm - // t1 = (h + s1 + ch + _k[i] + _w[i]) modulo 2^64 (carry lo overflow) - lo = (h_lo + s1_lo + ch_lo + _k[i][1] + w[i][1]); - t1_hi = (h_hi + s1_hi + ch_hi + _k[i][0] + w[i][0] + - ((lo / 0x100000000) >>> 0)) >>> 0; - t1_lo = lo >>> 0; - - // t2 = s0 + maj modulo 2^64 (carry lo overflow) - lo = s0_lo + maj_lo; - t2_hi = (s0_hi + maj_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - t2_lo = lo >>> 0; - - h_hi = g_hi; - h_lo = g_lo; - - g_hi = f_hi; - g_lo = f_lo; - - f_hi = e_hi; - f_lo = e_lo; - - // e = (d + t1) modulo 2^64 (carry lo overflow) - lo = d_lo + t1_lo; - e_hi = (d_hi + t1_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - e_lo = lo >>> 0; - - d_hi = c_hi; - d_lo = c_lo; - - c_hi = b_hi; - c_lo = b_lo; - - b_hi = a_hi; - b_lo = a_lo; - - // a = (t1 + t2) modulo 2^64 (carry lo overflow) - lo = t1_lo + t2_lo; - a_hi = (t1_hi + t2_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - a_lo = lo >>> 0; - } - - // update hash state (additional modulo 2^64) - lo = s[0][1] + a_lo; - s[0][0] = (s[0][0] + a_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[0][1] = lo >>> 0; - - lo = s[1][1] + b_lo; - s[1][0] = (s[1][0] + b_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[1][1] = lo >>> 0; - - lo = s[2][1] + c_lo; - s[2][0] = (s[2][0] + c_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[2][1] = lo >>> 0; - - lo = s[3][1] + d_lo; - s[3][0] = (s[3][0] + d_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[3][1] = lo >>> 0; - - lo = s[4][1] + e_lo; - s[4][0] = (s[4][0] + e_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[4][1] = lo >>> 0; - - lo = s[5][1] + f_lo; - s[5][0] = (s[5][0] + f_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[5][1] = lo >>> 0; + text () { + // The text() method steps are to return the result of running + // consume body with this and UTF-8 decode. + return specConsumeBody(this, utf8DecodeBytes, instance) + }, + + json () { + // The json() method steps are to return the result of running + // consume body with this and parse JSON from bytes. + return specConsumeBody(this, parseJSONFromBytes, instance) + }, + + async formData () { + webidl.brandCheck(this, instance) + + throwIfAborted(this[kState]) + + const contentType = this.headers.get('Content-Type') + + // If mimeType’s essence is "multipart/form-data", then: + if (/multipart\/form-data/.test(contentType)) { + const headers = {} + for (const [key, value] of this.headers) headers[key.toLowerCase()] = value + + const responseFormData = new FormData() + + let busboy + + try { + busboy = new Busboy({ + headers, + preservePath: true + }) + } catch (err) { + throw new DOMException(`${err}`, 'AbortError') + } + + busboy.on('field', (name, value) => { + responseFormData.append(name, value) + }) + busboy.on('file', (name, value, filename, encoding, mimeType) => { + const chunks = [] + + if (encoding === 'base64' || encoding.toLowerCase() === 'base64') { + let base64chunk = '' + + value.on('data', (chunk) => { + base64chunk += chunk.toString().replace(/[\r\n]/gm, '') + + const end = base64chunk.length - base64chunk.length % 4 + chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64')) + + base64chunk = base64chunk.slice(end) + }) + value.on('end', () => { + chunks.push(Buffer.from(base64chunk, 'base64')) + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } else { + value.on('data', (chunk) => { + chunks.push(chunk) + }) + value.on('end', () => { + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } + }) + + const busboyResolve = new Promise((resolve, reject) => { + busboy.on('finish', resolve) + busboy.on('error', (err) => reject(new TypeError(err))) + }) + + if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk) + busboy.end() + await busboyResolve + + return responseFormData + } else if (/application\/x-www-form-urlencoded/.test(contentType)) { + // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then: + + // 1. Let entries be the result of parsing bytes. + let entries + try { + let text = '' + // application/x-www-form-urlencoded parser will keep the BOM. + // https://url.spec.whatwg.org/#concept-urlencoded-parser + // Note that streaming decoder is stateful and cannot be reused + const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true }) + + for await (const chunk of consumeBody(this[kState].body)) { + if (!isUint8Array(chunk)) { + throw new TypeError('Expected Uint8Array chunk') + } + text += streamingDecoder.decode(chunk, { stream: true }) + } + text += streamingDecoder.decode() + entries = new URLSearchParams(text) + } catch (err) { + // istanbul ignore next: Unclear when new URLSearchParams can fail on a string. + // 2. If entries is failure, then throw a TypeError. + throw Object.assign(new TypeError(), { cause: err }) + } - lo = s[6][1] + g_lo; - s[6][0] = (s[6][0] + g_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[6][1] = lo >>> 0; + // 3. Return a new FormData object whose entries are entries. + const formData = new FormData() + for (const [name, value] of entries) { + formData.append(name, value) + } + return formData + } else { + // Wait a tick before checking if the request has been aborted. + // Otherwise, a TypeError can be thrown when an AbortError should. + await Promise.resolve() - lo = s[7][1] + h_lo; - s[7][0] = (s[7][0] + h_hi + ((lo / 0x100000000) >>> 0)) >>> 0; - s[7][1] = lo >>> 0; + throwIfAborted(this[kState]) - len -= 128; + // Otherwise, throw a TypeError. + throw webidl.errors.exception({ + header: `${instance.name}.formData`, + message: 'Could not parse content as FormData.' + }) + } + } } -} - -/***/ }), - -/***/ 4280: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/** - * Functions to output keys in SSH-friendly formats. - * - * This is part of the Forge project which may be used under the terms of - * either the BSD License or the GNU General Public License (GPL) Version 2. - * - * See: https://github.com/digitalbazaar/forge/blob/cbebca3780658703d925b61b2caffb1d263a6c1d/LICENSE - * - * @author https://github.com/shellac - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(5104); -__nccwpck_require__(6594); -__nccwpck_require__(279); -__nccwpck_require__(8339); + return methods +} -var ssh = module.exports = forge.ssh = forge.ssh || {}; +function mixinBody (prototype) { + Object.assign(prototype.prototype, bodyMixinMethods(prototype)) +} /** - * Encodes (and optionally encrypts) a private RSA key as a Putty PPK file. - * - * @param privateKey the key. - * @param passphrase a passphrase to protect the key (falsy for no encryption). - * @param comment a comment to include in the key file. - * - * @return the PPK file as a string. + * @see https://fetch.spec.whatwg.org/#concept-body-consume-body + * @param {Response|Request} object + * @param {(value: unknown) => unknown} convertBytesToJSValue + * @param {Response|Request} instance */ -ssh.privateKeyToPutty = function(privateKey, passphrase, comment) { - comment = comment || ''; - passphrase = passphrase || ''; - var algorithm = 'ssh-rsa'; - var encryptionAlgorithm = (passphrase === '') ? 'none' : 'aes256-cbc'; - - var ppk = 'PuTTY-User-Key-File-2: ' + algorithm + '\r\n'; - ppk += 'Encryption: ' + encryptionAlgorithm + '\r\n'; - ppk += 'Comment: ' + comment + '\r\n'; - - // public key into buffer for ppk - var pubbuffer = forge.util.createBuffer(); - _addStringToBuffer(pubbuffer, algorithm); - _addBigIntegerToBuffer(pubbuffer, privateKey.e); - _addBigIntegerToBuffer(pubbuffer, privateKey.n); - - // write public key - var pub = forge.util.encode64(pubbuffer.bytes(), 64); - var length = Math.floor(pub.length / 66) + 1; // 66 = 64 + \r\n - ppk += 'Public-Lines: ' + length + '\r\n'; - ppk += pub; - - // private key into a buffer - var privbuffer = forge.util.createBuffer(); - _addBigIntegerToBuffer(privbuffer, privateKey.d); - _addBigIntegerToBuffer(privbuffer, privateKey.p); - _addBigIntegerToBuffer(privbuffer, privateKey.q); - _addBigIntegerToBuffer(privbuffer, privateKey.qInv); - - // optionally encrypt the private key - var priv; - if(!passphrase) { - // use the unencrypted buffer - priv = forge.util.encode64(privbuffer.bytes(), 64); - } else { - // encrypt RSA key using passphrase - var encLen = privbuffer.length() + 16 - 1; - encLen -= encLen % 16; +async function specConsumeBody (object, convertBytesToJSValue, instance) { + webidl.brandCheck(object, instance) - // pad private key with sha1-d data -- needs to be a multiple of 16 - var padding = _sha1(privbuffer.bytes()); + throwIfAborted(object[kState]) - padding.truncate(padding.length() - encLen + privbuffer.length()); - privbuffer.putBuffer(padding); - - var aeskey = forge.util.createBuffer(); - aeskey.putBuffer(_sha1('\x00\x00\x00\x00', passphrase)); - aeskey.putBuffer(_sha1('\x00\x00\x00\x01', passphrase)); + // 1. If object is unusable, then return a promise rejected + // with a TypeError. + if (bodyUnusable(object[kState].body)) { + throw new TypeError('Body is unusable') + } - // encrypt some bytes using CBC mode - // key is 40 bytes, so truncate *by* 8 bytes - var cipher = forge.aes.createEncryptionCipher(aeskey.truncate(8), 'CBC'); - cipher.start(forge.util.createBuffer().fillWithByte(0, 16)); - cipher.update(privbuffer.copy()); - cipher.finish(); - var encrypted = cipher.output; + // 2. Let promise be a new promise. + const promise = createDeferredPromise() - // Note: this appears to differ from Putty -- is forge wrong, or putty? - // due to padding we finish as an exact multiple of 16 - encrypted.truncate(16); // all padding + // 3. Let errorSteps given error be to reject promise with error. + const errorSteps = (error) => promise.reject(error) - priv = forge.util.encode64(encrypted.bytes(), 64); + // 4. Let successSteps given a byte sequence data be to resolve + // promise with the result of running convertBytesToJSValue + // with data. If that threw an exception, then run errorSteps + // with that exception. + const successSteps = (data) => { + try { + promise.resolve(convertBytesToJSValue(data)) + } catch (e) { + errorSteps(e) + } } - // output private key - length = Math.floor(priv.length / 66) + 1; // 64 + \r\n - ppk += '\r\nPrivate-Lines: ' + length + '\r\n'; - ppk += priv; - - // MAC - var mackey = _sha1('putty-private-key-file-mac-key', passphrase); - - var macbuffer = forge.util.createBuffer(); - _addStringToBuffer(macbuffer, algorithm); - _addStringToBuffer(macbuffer, encryptionAlgorithm); - _addStringToBuffer(macbuffer, comment); - macbuffer.putInt32(pubbuffer.length()); - macbuffer.putBuffer(pubbuffer); - macbuffer.putInt32(privbuffer.length()); - macbuffer.putBuffer(privbuffer); + // 5. If object’s body is null, then run successSteps with an + // empty byte sequence. + if (object[kState].body == null) { + successSteps(new Uint8Array()) + return promise.promise + } - var hmac = forge.hmac.create(); - hmac.start('sha1', mackey); - hmac.update(macbuffer.bytes()); + // 6. Otherwise, fully read object’s body given successSteps, + // errorSteps, and object’s relevant global object. + await fullyReadBody(object[kState].body, successSteps, errorSteps) - ppk += '\r\nPrivate-MAC: ' + hmac.digest().toHex() + '\r\n'; + // 7. Return promise. + return promise.promise +} - return ppk; -}; +// https://fetch.spec.whatwg.org/#body-unusable +function bodyUnusable (body) { + // An object including the Body interface mixin is + // said to be unusable if its body is non-null and + // its body’s stream is disturbed or locked. + return body != null && (body.stream.locked || util.isDisturbed(body.stream)) +} /** - * Encodes a public RSA key as an OpenSSH file. - * - * @param key the key. - * @param comment a comment. - * - * @return the public key in OpenSSH format. + * @see https://encoding.spec.whatwg.org/#utf-8-decode + * @param {Buffer} buffer */ -ssh.publicKeyToOpenSSH = function(key, comment) { - var type = 'ssh-rsa'; - comment = comment || ''; - - var buffer = forge.util.createBuffer(); - _addStringToBuffer(buffer, type); - _addBigIntegerToBuffer(buffer, key.e); - _addBigIntegerToBuffer(buffer, key.n); +function utf8DecodeBytes (buffer) { + if (buffer.length === 0) { + return '' + } - return type + ' ' + forge.util.encode64(buffer.bytes()) + ' ' + comment; -}; + // 1. Let buffer be the result of peeking three bytes from + // ioQueue, converted to a byte sequence. -/** - * Encodes a private RSA key as an OpenSSH file. - * - * @param key the key. - * @param passphrase a passphrase to protect the key (falsy for no encryption). - * - * @return the public key in OpenSSH format. - */ -ssh.privateKeyToOpenSSH = function(privateKey, passphrase) { - if(!passphrase) { - return forge.pki.privateKeyToPem(privateKey); + // 2. If buffer is 0xEF 0xBB 0xBF, then read three + // bytes from ioQueue. (Do nothing with those bytes.) + if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) { + buffer = buffer.subarray(3) } - // OpenSSH private key is just a legacy format, it seems - return forge.pki.encryptRsaPrivateKey(privateKey, passphrase, - {legacy: true, algorithm: 'aes128'}); -}; -/** - * Gets the SSH fingerprint for the given public key. - * - * @param options the options to use. - * [md] the message digest object to use (defaults to forge.md.md5). - * [encoding] an alternative output encoding, such as 'hex' - * (defaults to none, outputs a byte buffer). - * [delimiter] the delimiter to use between bytes for 'hex' encoded - * output, eg: ':' (defaults to none). - * - * @return the fingerprint as a byte buffer or other encoding based on options. - */ -ssh.getPublicKeyFingerprint = function(key, options) { - options = options || {}; - var md = options.md || forge.md.md5.create(); - - var type = 'ssh-rsa'; - var buffer = forge.util.createBuffer(); - _addStringToBuffer(buffer, type); - _addBigIntegerToBuffer(buffer, key.e); - _addBigIntegerToBuffer(buffer, key.n); - - // hash public key bytes - md.start(); - md.update(buffer.getBytes()); - var digest = md.digest(); - if(options.encoding === 'hex') { - var hex = digest.toHex(); - if(options.delimiter) { - return hex.match(/.{2}/g).join(options.delimiter); - } - return hex; - } else if(options.encoding === 'binary') { - return digest.getBytes(); - } else if(options.encoding) { - throw new Error('Unknown encoding "' + options.encoding + '".'); - } - return digest; -}; + // 3. Process a queue with an instance of UTF-8’s + // decoder, ioQueue, output, and "replacement". + const output = textDecoder.decode(buffer) -/** - * Adds len(val) then val to a buffer. - * - * @param buffer the buffer to add to. - * @param val a big integer. - */ -function _addBigIntegerToBuffer(buffer, val) { - var hexVal = val.toString(16); - // ensure 2s complement +ve - if(hexVal[0] >= '8') { - hexVal = '00' + hexVal; - } - var bytes = forge.util.hexToBytes(hexVal); - buffer.putInt32(bytes.length); - buffer.putBytes(bytes); + // 4. Return output. + return output } /** - * Adds len(val) then val to a buffer. - * - * @param buffer the buffer to add to. - * @param val a string. + * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value + * @param {Uint8Array} bytes */ -function _addStringToBuffer(buffer, val) { - buffer.putInt32(val.length); - buffer.putString(val); +function parseJSONFromBytes (bytes) { + return JSON.parse(utf8DecodeBytes(bytes)) } /** - * Hashes the arguments into one value using SHA-1. - * - * @return the sha1 hash of the provided arguments. + * @see https://fetch.spec.whatwg.org/#concept-body-mime-type + * @param {import('./response').Response|import('./request').Request} object */ -function _sha1() { - var sha = forge.md.sha1.create(); - var num = arguments.length; - for (var i = 0; i < num; ++i) { - sha.update(arguments[i]); +function bodyMimeType (object) { + const { headersList } = object[kState] + const contentType = headersList.get('content-type') + + if (contentType === null) { + return 'failure' } - return sha.digest(); + + return parseMIMEType(contentType) +} + +module.exports = { + extractBody, + safelyExtractBody, + cloneBody, + mixinBody } /***/ }), -/***/ 9167: +/***/ 1037: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * A Javascript implementation of Transport Layer Security (TLS). - * - * @author Dave Longley - * - * Copyright (c) 2009-2014 Digital Bazaar, Inc. - * - * The TLS Handshake Protocol involves the following steps: - * - * - Exchange hello messages to agree on algorithms, exchange random values, - * and check for session resumption. - * - * - Exchange the necessary cryptographic parameters to allow the client and - * server to agree on a premaster secret. - * - * - Exchange certificates and cryptographic information to allow the client - * and server to authenticate themselves. - * - * - Generate a master secret from the premaster secret and exchanged random - * values. - * - * - Provide security parameters to the record layer. - * - * - Allow the client and server to verify that their peer has calculated the - * same security parameters and that the handshake occurred without tampering - * by an attacker. - * - * Up to 4 different messages may be sent during a key exchange. The server - * certificate, the server key exchange, the client certificate, and the - * client key exchange. - * - * A typical handshake (from the client's perspective). - * - * 1. Client sends ClientHello. - * 2. Client receives ServerHello. - * 3. Client receives optional Certificate. - * 4. Client receives optional ServerKeyExchange. - * 5. Client receives ServerHelloDone. - * 6. Client sends optional Certificate. - * 7. Client sends ClientKeyExchange. - * 8. Client sends optional CertificateVerify. - * 9. Client sends ChangeCipherSpec. - * 10. Client sends Finished. - * 11. Client receives ChangeCipherSpec. - * 12. Client receives Finished. - * 13. Client sends/receives application data. - * - * To reuse an existing session: - * - * 1. Client sends ClientHello with session ID for reuse. - * 2. Client receives ServerHello with same session ID if reusing. - * 3. Client receives ChangeCipherSpec message if reusing. - * 4. Client receives Finished. - * 5. Client sends ChangeCipherSpec. - * 6. Client sends Finished. - * - * Note: Client ignores HelloRequest if in the middle of a handshake. - * - * Record Layer: - * - * The record layer fragments information blocks into TLSPlaintext records - * carrying data in chunks of 2^14 bytes or less. Client message boundaries are - * not preserved in the record layer (i.e., multiple client messages of the - * same ContentType MAY be coalesced into a single TLSPlaintext record, or a - * single message MAY be fragmented across several records). - * - * struct { - * uint8 major; - * uint8 minor; - * } ProtocolVersion; - * - * struct { - * ContentType type; - * ProtocolVersion version; - * uint16 length; - * opaque fragment[TLSPlaintext.length]; - * } TLSPlaintext; - * - * type: - * The higher-level protocol used to process the enclosed fragment. - * - * version: - * The version of the protocol being employed. TLS Version 1.2 uses version - * {3, 3}. TLS Version 1.0 uses version {3, 1}. Note that a client that - * supports multiple versions of TLS may not know what version will be - * employed before it receives the ServerHello. - * - * length: - * The length (in bytes) of the following TLSPlaintext.fragment. The length - * MUST NOT exceed 2^14 = 16384 bytes. - * - * fragment: - * The application data. This data is transparent and treated as an - * independent block to be dealt with by the higher-level protocol specified - * by the type field. - * - * Implementations MUST NOT send zero-length fragments of Handshake, Alert, or - * ChangeCipherSpec content types. Zero-length fragments of Application data - * MAY be sent as they are potentially useful as a traffic analysis - * countermeasure. - * - * Note: Data of different TLS record layer content types MAY be interleaved. - * Application data is generally of lower precedence for transmission than - * other content types. However, records MUST be delivered to the network in - * the same order as they are protected by the record layer. Recipients MUST - * receive and process interleaved application layer traffic during handshakes - * subsequent to the first one on a connection. - * - * struct { - * ContentType type; // same as TLSPlaintext.type - * ProtocolVersion version;// same as TLSPlaintext.version - * uint16 length; - * opaque fragment[TLSCompressed.length]; - * } TLSCompressed; - * - * length: - * The length (in bytes) of the following TLSCompressed.fragment. - * The length MUST NOT exceed 2^14 + 1024. - * - * fragment: - * The compressed form of TLSPlaintext.fragment. - * - * Note: A CompressionMethod.null operation is an identity operation; no fields - * are altered. In this implementation, since no compression is supported, - * uncompressed records are always the same as compressed records. - * - * Encryption Information: - * - * The encryption and MAC functions translate a TLSCompressed structure into a - * TLSCiphertext. The decryption functions reverse the process. The MAC of the - * record also includes a sequence number so that missing, extra, or repeated - * messages are detectable. - * - * struct { - * ContentType type; - * ProtocolVersion version; - * uint16 length; - * select (SecurityParameters.cipher_type) { - * case stream: GenericStreamCipher; - * case block: GenericBlockCipher; - * case aead: GenericAEADCipher; - * } fragment; - * } TLSCiphertext; - * - * type: - * The type field is identical to TLSCompressed.type. - * - * version: - * The version field is identical to TLSCompressed.version. - * - * length: - * The length (in bytes) of the following TLSCiphertext.fragment. - * The length MUST NOT exceed 2^14 + 2048. - * - * fragment: - * The encrypted form of TLSCompressed.fragment, with the MAC. - * - * Note: Only CBC Block Ciphers are supported by this implementation. - * - * The TLSCompressed.fragment structures are converted to/from block - * TLSCiphertext.fragment structures. - * - * struct { - * opaque IV[SecurityParameters.record_iv_length]; - * block-ciphered struct { - * opaque content[TLSCompressed.length]; - * opaque MAC[SecurityParameters.mac_length]; - * uint8 padding[GenericBlockCipher.padding_length]; - * uint8 padding_length; - * }; - * } GenericBlockCipher; - * - * The MAC is generated as described in Section 6.2.3.1. - * - * IV: - * The Initialization Vector (IV) SHOULD be chosen at random, and MUST be - * unpredictable. Note that in versions of TLS prior to 1.1, there was no - * IV field, and the last ciphertext block of the previous record (the "CBC - * residue") was used as the IV. This was changed to prevent the attacks - * described in [CBCATT]. For block ciphers, the IV length is of length - * SecurityParameters.record_iv_length, which is equal to the - * SecurityParameters.block_size. - * - * padding: - * Padding that is added to force the length of the plaintext to be an - * integral multiple of the block cipher's block length. The padding MAY be - * any length up to 255 bytes, as long as it results in the - * TLSCiphertext.length being an integral multiple of the block length. - * Lengths longer than necessary might be desirable to frustrate attacks on - * a protocol that are based on analysis of the lengths of exchanged - * messages. Each uint8 in the padding data vector MUST be filled with the - * padding length value. The receiver MUST check this padding and MUST use - * the bad_record_mac alert to indicate padding errors. - * - * padding_length: - * The padding length MUST be such that the total size of the - * GenericBlockCipher structure is a multiple of the cipher's block length. - * Legal values range from zero to 255, inclusive. This length specifies the - * length of the padding field exclusive of the padding_length field itself. - * - * The encrypted data length (TLSCiphertext.length) is one more than the sum of - * SecurityParameters.block_length, TLSCompressed.length, - * SecurityParameters.mac_length, and padding_length. - * - * Example: If the block length is 8 bytes, the content length - * (TLSCompressed.length) is 61 bytes, and the MAC length is 20 bytes, then the - * length before padding is 82 bytes (this does not include the IV. Thus, the - * padding length modulo 8 must be equal to 6 in order to make the total length - * an even multiple of 8 bytes (the block length). The padding length can be - * 6, 14, 22, and so on, through 254. If the padding length were the minimum - * necessary, 6, the padding would be 6 bytes, each containing the value 6. - * Thus, the last 8 octets of the GenericBlockCipher before block encryption - * would be xx 06 06 06 06 06 06 06, where xx is the last octet of the MAC. - * - * Note: With block ciphers in CBC mode (Cipher Block Chaining), it is critical - * that the entire plaintext of the record be known before any ciphertext is - * transmitted. Otherwise, it is possible for the attacker to mount the attack - * described in [CBCATT]. - * - * Implementation note: Canvel et al. [CBCTIME] have demonstrated a timing - * attack on CBC padding based on the time required to compute the MAC. In - * order to defend against this attack, implementations MUST ensure that - * record processing time is essentially the same whether or not the padding - * is correct. In general, the best way to do this is to compute the MAC even - * if the padding is incorrect, and only then reject the packet. For instance, - * if the pad appears to be incorrect, the implementation might assume a - * zero-length pad and then compute the MAC. This leaves a small timing - * channel, since MAC performance depends, to some extent, on the size of the - * data fragment, but it is not believed to be large enough to be exploitable, - * due to the large block size of existing MACs and the small size of the - * timing signal. - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(9549); -__nccwpck_require__(5104); -__nccwpck_require__(6594); -__nccwpck_require__(154); -__nccwpck_require__(6924); -__nccwpck_require__(7821); -__nccwpck_require__(279); -__nccwpck_require__(8339); - -/** - * Generates pseudo random bytes by mixing the result of two hash functions, - * MD5 and SHA-1. - * - * prf_TLS1(secret, label, seed) = - * P_MD5(S1, label + seed) XOR P_SHA-1(S2, label + seed); - * - * Each P_hash function functions as follows: - * - * P_hash(secret, seed) = HMAC_hash(secret, A(1) + seed) + - * HMAC_hash(secret, A(2) + seed) + - * HMAC_hash(secret, A(3) + seed) + ... - * A() is defined as: - * A(0) = seed - * A(i) = HMAC_hash(secret, A(i-1)) - * - * The '+' operator denotes concatenation. - * - * As many iterations A(N) as are needed are performed to generate enough - * pseudo random byte output. If an iteration creates more data than is - * necessary, then it is truncated. - * - * Therefore: - * A(1) = HMAC_hash(secret, A(0)) - * = HMAC_hash(secret, seed) - * A(2) = HMAC_hash(secret, A(1)) - * = HMAC_hash(secret, HMAC_hash(secret, seed)) - * - * Therefore: - * P_hash(secret, seed) = - * HMAC_hash(secret, HMAC_hash(secret, A(0)) + seed) + - * HMAC_hash(secret, HMAC_hash(secret, A(1)) + seed) + - * ... - * - * Therefore: - * P_hash(secret, seed) = - * HMAC_hash(secret, HMAC_hash(secret, seed) + seed) + - * HMAC_hash(secret, HMAC_hash(secret, HMAC_hash(secret, seed)) + seed) + - * ... - * - * @param secret the secret to use. - * @param label the label to use. - * @param seed the seed value to use. - * @param length the number of bytes to generate. - * - * @return the pseudo random bytes in a byte buffer. - */ -var prf_TLS1 = function(secret, label, seed, length) { - var rval = forge.util.createBuffer(); - - /* For TLS 1.0, the secret is split in half, into two secrets of equal - length. If the secret has an odd length then the last byte of the first - half will be the same as the first byte of the second. The length of the - two secrets is half of the secret rounded up. */ - var idx = (secret.length >> 1); - var slen = idx + (secret.length & 1); - var s1 = secret.substr(0, slen); - var s2 = secret.substr(idx, slen); - var ai = forge.util.createBuffer(); - var hmac = forge.hmac.create(); - seed = label + seed; - - // determine the number of iterations that must be performed to generate - // enough output bytes, md5 creates 16 byte hashes, sha1 creates 20 - var md5itr = Math.ceil(length / 16); - var sha1itr = Math.ceil(length / 20); - - // do md5 iterations - hmac.start('MD5', s1); - var md5bytes = forge.util.createBuffer(); - ai.putBytes(seed); - for(var i = 0; i < md5itr; ++i) { - // HMAC_hash(secret, A(i-1)) - hmac.start(null, null); - hmac.update(ai.getBytes()); - ai.putBuffer(hmac.digest()); - - // HMAC_hash(secret, A(i) + seed) - hmac.start(null, null); - hmac.update(ai.bytes() + seed); - md5bytes.putBuffer(hmac.digest()); - } - - // do sha1 iterations - hmac.start('SHA1', s2); - var sha1bytes = forge.util.createBuffer(); - ai.clear(); - ai.putBytes(seed); - for(var i = 0; i < sha1itr; ++i) { - // HMAC_hash(secret, A(i-1)) - hmac.start(null, null); - hmac.update(ai.getBytes()); - ai.putBuffer(hmac.digest()); - - // HMAC_hash(secret, A(i) + seed) - hmac.start(null, null); - hmac.update(ai.bytes() + seed); - sha1bytes.putBuffer(hmac.digest()); - } - - // XOR the md5 bytes with the sha1 bytes - rval.putBytes(forge.util.xorBytes( - md5bytes.getBytes(), sha1bytes.getBytes(), length)); - - return rval; -}; - -/** - * Generates pseudo random bytes using a SHA256 algorithm. For TLS 1.2. - * - * @param secret the secret to use. - * @param label the label to use. - * @param seed the seed value to use. - * @param length the number of bytes to generate. - * - * @return the pseudo random bytes in a byte buffer. - */ -var prf_sha256 = function(secret, label, seed, length) { - // FIXME: implement me for TLS 1.2 -}; - -/** - * Gets a MAC for a record using the SHA-1 hash algorithm. - * - * @param key the mac key. - * @param state the sequence number (array of two 32-bit integers). - * @param record the record. - * - * @return the sha-1 hash (20 bytes) for the given record. - */ -var hmac_sha1 = function(key, seqNum, record) { - /* MAC is computed like so: - HMAC_hash( - key, seqNum + - TLSCompressed.type + - TLSCompressed.version + - TLSCompressed.length + - TLSCompressed.fragment) - */ - var hmac = forge.hmac.create(); - hmac.start('SHA1', key); - var b = forge.util.createBuffer(); - b.putInt32(seqNum[0]); - b.putInt32(seqNum[1]); - b.putByte(record.type); - b.putByte(record.version.major); - b.putByte(record.version.minor); - b.putInt16(record.length); - b.putBytes(record.fragment.bytes()); - hmac.update(b.getBytes()); - return hmac.digest().getBytes(); -}; +"use strict"; -/** - * Compresses the TLSPlaintext record into a TLSCompressed record using the - * deflate algorithm. - * - * @param c the TLS connection. - * @param record the TLSPlaintext record to compress. - * @param s the ConnectionState to use. - * - * @return true on success, false on failure. - */ -var deflate = function(c, record, s) { - var rval = false; +const { MessageChannel, receiveMessageOnPort } = __nccwpck_require__(1267) + +const corsSafeListedMethods = ['GET', 'HEAD', 'POST'] +const corsSafeListedMethodsSet = new Set(corsSafeListedMethods) + +const nullBodyStatus = [101, 204, 205, 304] + +const redirectStatus = [301, 302, 303, 307, 308] +const redirectStatusSet = new Set(redirectStatus) + +// https://fetch.spec.whatwg.org/#block-bad-port +const badPorts = [ + '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79', + '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137', + '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532', + '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723', + '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697', + '10080' +] + +const badPortsSet = new Set(badPorts) + +// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies +const referrerPolicy = [ + '', + 'no-referrer', + 'no-referrer-when-downgrade', + 'same-origin', + 'origin', + 'strict-origin', + 'origin-when-cross-origin', + 'strict-origin-when-cross-origin', + 'unsafe-url' +] +const referrerPolicySet = new Set(referrerPolicy) + +const requestRedirect = ['follow', 'manual', 'error'] + +const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE'] +const safeMethodsSet = new Set(safeMethods) + +const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors'] + +const requestCredentials = ['omit', 'same-origin', 'include'] + +const requestCache = [ + 'default', + 'no-store', + 'reload', + 'no-cache', + 'force-cache', + 'only-if-cached' +] + +// https://fetch.spec.whatwg.org/#request-body-header-name +const requestBodyHeader = [ + 'content-encoding', + 'content-language', + 'content-location', + 'content-type', + // See https://github.com/nodejs/undici/issues/2021 + // 'Content-Length' is a forbidden header name, which is typically + // removed in the Headers implementation. However, undici doesn't + // filter out headers, so we add it here. + 'content-length' +] + +// https://fetch.spec.whatwg.org/#enumdef-requestduplex +const requestDuplex = [ + 'half' +] + +// http://fetch.spec.whatwg.org/#forbidden-method +const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK'] +const forbiddenMethodsSet = new Set(forbiddenMethods) + +const subresource = [ + 'audio', + 'audioworklet', + 'font', + 'image', + 'manifest', + 'paintworklet', + 'script', + 'style', + 'track', + 'video', + 'xslt', + '' +] +const subresourceSet = new Set(subresource) + +/** @type {globalThis['DOMException']} */ +const DOMException = globalThis.DOMException ?? (() => { + // DOMException was only made a global in Node v17.0.0, + // but fetch supports >= v16.8. try { - var bytes = c.deflate(record.fragment.getBytes()); - record.fragment = forge.util.createBuffer(bytes); - record.length = bytes.length; - rval = true; - } catch(ex) { - // deflate error, fail out + atob('~') + } catch (err) { + return Object.getPrototypeOf(err).constructor } +})() - return rval; -}; +let channel -/** - * Decompresses the TLSCompressed record into a TLSPlaintext record using the - * deflate algorithm. - * - * @param c the TLS connection. - * @param record the TLSCompressed record to decompress. - * @param s the ConnectionState to use. - * - * @return true on success, false on failure. - */ -var inflate = function(c, record, s) { - var rval = false; +/** @type {globalThis['structuredClone']} */ +const structuredClone = + globalThis.structuredClone ?? + // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js + // structuredClone was added in v17.0.0, but fetch supports v16.8 + function structuredClone (value, options = undefined) { + if (arguments.length === 0) { + throw new TypeError('missing argument') + } - try { - var bytes = c.inflate(record.fragment.getBytes()); - record.fragment = forge.util.createBuffer(bytes); - record.length = bytes.length; - rval = true; - } catch(ex) { - // inflate error, fail out + if (!channel) { + channel = new MessageChannel() + } + channel.port1.unref() + channel.port2.unref() + channel.port1.postMessage(value, options?.transfer) + return receiveMessageOnPort(channel.port2).message } - return rval; -}; - -/** - * Reads a TLS variable-length vector from a byte buffer. - * - * Variable-length vectors are defined by specifying a subrange of legal - * lengths, inclusively, using the notation . When these are - * encoded, the actual length precedes the vector's contents in the byte - * stream. The length will be in the form of a number consuming as many bytes - * as required to hold the vector's specified maximum (ceiling) length. A - * variable-length vector with an actual length field of zero is referred to - * as an empty vector. - * - * @param b the byte buffer. - * @param lenBytes the number of bytes required to store the length. - * - * @return the resulting byte buffer. - */ -var readVector = function(b, lenBytes) { - var len = 0; - switch(lenBytes) { - case 1: - len = b.getByte(); - break; - case 2: - len = b.getInt16(); - break; - case 3: - len = b.getInt24(); - break; - case 4: - len = b.getInt32(); - break; - } - - // read vector bytes into a new buffer - return forge.util.createBuffer(b.getBytes(len)); -}; - -/** - * Writes a TLS variable-length vector to a byte buffer. - * - * @param b the byte buffer. - * @param lenBytes the number of bytes required to store the length. - * @param v the byte buffer vector. - */ -var writeVector = function(b, lenBytes, v) { - // encode length at the start of the vector, where the number of bytes for - // the length is the maximum number of bytes it would take to encode the - // vector's ceiling - b.putInt(v.length(), lenBytes << 3); - b.putBuffer(v); -}; - -/** - * The tls implementation. - */ -var tls = {}; +module.exports = { + DOMException, + structuredClone, + subresource, + forbiddenMethods, + requestBodyHeader, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + redirectStatus, + corsSafeListedMethods, + nullBodyStatus, + safeMethods, + badPorts, + requestDuplex, + subresourceSet, + badPortsSet, + redirectStatusSet, + corsSafeListedMethodsSet, + safeMethodsSet, + forbiddenMethodsSet, + referrerPolicySet +} -/** - * Version: TLS 1.2 = 3.3, TLS 1.1 = 3.2, TLS 1.0 = 3.1. Both TLS 1.1 and - * TLS 1.2 were still too new (ie: openSSL didn't implement them) at the time - * of this implementation so TLS 1.0 was implemented instead. - */ -tls.Versions = { - TLS_1_0: {major: 3, minor: 1}, - TLS_1_1: {major: 3, minor: 2}, - TLS_1_2: {major: 3, minor: 3} -}; -tls.SupportedVersions = [ - tls.Versions.TLS_1_1, - tls.Versions.TLS_1_0 -]; -tls.Version = tls.SupportedVersions[0]; -/** - * Maximum fragment size. True maximum is 16384, but we fragment before that - * to allow for unusual small increases during compression. - */ -tls.MaxFragment = 16384 - 1024; +/***/ }), -/** - * Whether this entity is considered the "client" or "server". - * enum { server, client } ConnectionEnd; - */ -tls.ConnectionEnd = { - server: 0, - client: 1 -}; +/***/ 685: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Pseudo-random function algorithm used to generate keys from the master - * secret. - * enum { tls_prf_sha256 } PRFAlgorithm; - */ -tls.PRFAlgorithm = { - tls_prf_sha256: 0 -}; +const assert = __nccwpck_require__(9491) +const { atob } = __nccwpck_require__(4300) +const { isomorphicDecode } = __nccwpck_require__(2538) -/** - * Bulk encryption algorithms. - * enum { null, rc4, des3, aes } BulkCipherAlgorithm; - */ -tls.BulkCipherAlgorithm = { - none: null, - rc4: 0, - des3: 1, - aes: 2 -}; +const encoder = new TextEncoder() /** - * Cipher types. - * enum { stream, block, aead } CipherType; + * @see https://mimesniff.spec.whatwg.org/#http-token-code-point */ -tls.CipherType = { - stream: 0, - block: 1, - aead: 2 -}; - +const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/ +const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line /** - * MAC (Message Authentication Code) algorithms. - * enum { null, hmac_md5, hmac_sha1, hmac_sha256, - * hmac_sha384, hmac_sha512} MACAlgorithm; + * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point */ -tls.MACAlgorithm = { - none: null, - hmac_md5: 0, - hmac_sha1: 1, - hmac_sha256: 2, - hmac_sha384: 3, - hmac_sha512: 4 -}; +const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line -/** - * Compression algorithms. - * enum { null(0), deflate(1), (255) } CompressionMethod; - */ -tls.CompressionMethod = { - none: 0, - deflate: 1 -}; +// https://fetch.spec.whatwg.org/#data-url-processor +/** @param {URL} dataURL */ +function dataURLProcessor (dataURL) { + // 1. Assert: dataURL’s scheme is "data". + assert(dataURL.protocol === 'data:') -/** - * TLS record content types. - * enum { - * change_cipher_spec(20), alert(21), handshake(22), - * application_data(23), (255) - * } ContentType; - */ -tls.ContentType = { - change_cipher_spec: 20, - alert: 21, - handshake: 22, - application_data: 23, - heartbeat: 24 -}; + // 2. Let input be the result of running the URL + // serializer on dataURL with exclude fragment + // set to true. + let input = URLSerializer(dataURL, true) -/** - * TLS handshake types. - * enum { - * hello_request(0), client_hello(1), server_hello(2), - * certificate(11), server_key_exchange (12), - * certificate_request(13), server_hello_done(14), - * certificate_verify(15), client_key_exchange(16), - * finished(20), (255) - * } HandshakeType; - */ -tls.HandshakeType = { - hello_request: 0, - client_hello: 1, - server_hello: 2, - certificate: 11, - server_key_exchange: 12, - certificate_request: 13, - server_hello_done: 14, - certificate_verify: 15, - client_key_exchange: 16, - finished: 20 -}; + // 3. Remove the leading "data:" string from input. + input = input.slice(5) -/** - * TLS Alert Protocol. - * - * enum { warning(1), fatal(2), (255) } AlertLevel; - * - * enum { - * close_notify(0), - * unexpected_message(10), - * bad_record_mac(20), - * decryption_failed(21), - * record_overflow(22), - * decompression_failure(30), - * handshake_failure(40), - * bad_certificate(42), - * unsupported_certificate(43), - * certificate_revoked(44), - * certificate_expired(45), - * certificate_unknown(46), - * illegal_parameter(47), - * unknown_ca(48), - * access_denied(49), - * decode_error(50), - * decrypt_error(51), - * export_restriction(60), - * protocol_version(70), - * insufficient_security(71), - * internal_error(80), - * user_canceled(90), - * no_renegotiation(100), - * (255) - * } AlertDescription; - * - * struct { - * AlertLevel level; - * AlertDescription description; - * } Alert; - */ -tls.Alert = {}; -tls.Alert.Level = { - warning: 1, - fatal: 2 -}; -tls.Alert.Description = { - close_notify: 0, - unexpected_message: 10, - bad_record_mac: 20, - decryption_failed: 21, - record_overflow: 22, - decompression_failure: 30, - handshake_failure: 40, - bad_certificate: 42, - unsupported_certificate: 43, - certificate_revoked: 44, - certificate_expired: 45, - certificate_unknown: 46, - illegal_parameter: 47, - unknown_ca: 48, - access_denied: 49, - decode_error: 50, - decrypt_error: 51, - export_restriction: 60, - protocol_version: 70, - insufficient_security: 71, - internal_error: 80, - user_canceled: 90, - no_renegotiation: 100 -}; + // 4. Let position point at the start of input. + const position = { position: 0 } -/** - * TLS Heartbeat Message types. - * enum { - * heartbeat_request(1), - * heartbeat_response(2), - * (255) - * } HeartbeatMessageType; - */ -tls.HeartbeatMessageType = { - heartbeat_request: 1, - heartbeat_response: 2 -}; + // 5. Let mimeType be the result of collecting a + // sequence of code points that are not equal + // to U+002C (,), given position. + let mimeType = collectASequenceOfCodePointsFast( + ',', + input, + position + ) -/** - * Supported cipher suites. - */ -tls.CipherSuites = {}; + // 6. Strip leading and trailing ASCII whitespace + // from mimeType. + // Undici implementation note: we need to store the + // length because if the mimetype has spaces removed, + // the wrong amount will be sliced from the input in + // step #9 + const mimeTypeLength = mimeType.length + mimeType = removeASCIIWhitespace(mimeType, true, true) -/** - * Gets a supported cipher suite from its 2 byte ID. - * - * @param twoBytes two bytes in a string. - * - * @return the matching supported cipher suite or null. - */ -tls.getCipherSuite = function(twoBytes) { - var rval = null; - for(var key in tls.CipherSuites) { - var cs = tls.CipherSuites[key]; - if(cs.id[0] === twoBytes.charCodeAt(0) && - cs.id[1] === twoBytes.charCodeAt(1)) { - rval = cs; - break; - } + // 7. If position is past the end of input, then + // return failure + if (position.position >= input.length) { + return 'failure' } - return rval; -}; -/** - * Called when an unexpected record is encountered. - * - * @param c the connection. - * @param record the record. - */ -tls.handleUnexpected = function(c, record) { - // if connection is client and closed, ignore unexpected messages - var ignore = (!c.open && c.entity === tls.ConnectionEnd.client); - if(!ignore) { - c.error(c, { - message: 'Unexpected message. Received TLS record out of order.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.unexpected_message - } - }); - } -}; + // 8. Advance position by 1. + position.position++ -/** - * Called when a client receives a HelloRequest record. - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleHelloRequest = function(c, record, length) { - // ignore renegotiation requests from the server during a handshake, but - // if handshaking, send a warning alert that renegotation is denied - if(!c.handshaking && c.handshakes > 0) { - // send alert warning - tls.queue(c, tls.createAlert(c, { - level: tls.Alert.Level.warning, - description: tls.Alert.Description.no_renegotiation - })); - tls.flush(c); - } + // 9. Let encodedBody be the remainder of input. + const encodedBody = input.slice(mimeTypeLength + 1) - // continue - c.process(); -}; + // 10. Let body be the percent-decoding of encodedBody. + let body = stringPercentDecode(encodedBody) -/** - * Parses a hello message from a ClientHello or ServerHello record. - * - * @param record the record to parse. - * - * @return the parsed message. - */ -tls.parseHelloMessage = function(c, record, length) { - var msg = null; - - var client = (c.entity === tls.ConnectionEnd.client); - - // minimum of 38 bytes in message - if(length < 38) { - c.error(c, { - message: client ? - 'Invalid ServerHello message. Message too short.' : - 'Invalid ClientHello message. Message too short.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter - } - }); - } else { - // use 'remaining' to calculate # of remaining bytes in the message - var b = record.fragment; - var remaining = b.length(); - msg = { - version: { - major: b.getByte(), - minor: b.getByte() - }, - random: forge.util.createBuffer(b.getBytes(32)), - session_id: readVector(b, 1), - extensions: [] - }; - if(client) { - msg.cipher_suite = b.getBytes(2); - msg.compression_method = b.getByte(); - } else { - msg.cipher_suites = readVector(b, 2); - msg.compression_methods = readVector(b, 1); - } - - // read extensions if there are any bytes left in the message - remaining = length - (remaining - b.length()); - if(remaining > 0) { - // parse extensions - var exts = readVector(b, 2); - while(exts.length() > 0) { - msg.extensions.push({ - type: [exts.getByte(), exts.getByte()], - data: readVector(exts, 2) - }); - } + // 11. If mimeType ends with U+003B (;), followed by + // zero or more U+0020 SPACE, followed by an ASCII + // case-insensitive match for "base64", then: + if (/;(\u0020){0,}base64$/i.test(mimeType)) { + // 1. Let stringBody be the isomorphic decode of body. + const stringBody = isomorphicDecode(body) - // TODO: make extension support modular - if(!client) { - for(var i = 0; i < msg.extensions.length; ++i) { - var ext = msg.extensions[i]; - - // support SNI extension - if(ext.type[0] === 0x00 && ext.type[1] === 0x00) { - // get server name list - var snl = readVector(ext.data, 2); - while(snl.length() > 0) { - // read server name type - var snType = snl.getByte(); - - // only HostName type (0x00) is known, break out if - // another type is detected - if(snType !== 0x00) { - break; - } + // 2. Set body to the forgiving-base64 decode of + // stringBody. + body = forgivingBase64(stringBody) - // add host name to server name list - c.session.extensions.server_name.serverNameList.push( - readVector(snl, 2).getBytes()); - } - } - } - } + // 3. If body is failure, then return failure. + if (body === 'failure') { + return 'failure' } - // version already set, do not allow version change - if(c.session.version) { - if(msg.version.major !== c.session.version.major || - msg.version.minor !== c.session.version.minor) { - return c.error(c, { - message: 'TLS version change is disallowed during renegotiation.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.protocol_version - } - }); - } - } + // 4. Remove the last 6 code points from mimeType. + mimeType = mimeType.slice(0, -6) - // get the chosen (ServerHello) cipher suite - if(client) { - // FIXME: should be checking configured acceptable cipher suites - c.session.cipherSuite = tls.getCipherSuite(msg.cipher_suite); - } else { - // get a supported preferred (ClientHello) cipher suite - // choose the first supported cipher suite - var tmp = forge.util.createBuffer(msg.cipher_suites.bytes()); - while(tmp.length() > 0) { - // FIXME: should be checking configured acceptable suites - // cipher suites take up 2 bytes - c.session.cipherSuite = tls.getCipherSuite(tmp.getBytes(2)); - if(c.session.cipherSuite !== null) { - break; - } - } - } + // 5. Remove trailing U+0020 SPACE code points from mimeType, + // if any. + mimeType = mimeType.replace(/(\u0020)+$/, '') - // cipher suite not supported - if(c.session.cipherSuite === null) { - return c.error(c, { - message: 'No cipher suites in common.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.handshake_failure - }, - cipherSuite: forge.util.bytesToHex(msg.cipher_suite) - }); - } + // 6. Remove the last U+003B (;) code point from mimeType. + mimeType = mimeType.slice(0, -1) + } - // TODO: handle compression methods - if(client) { - c.session.compressionMethod = msg.compression_method; - } else { - // no compression - c.session.compressionMethod = tls.CompressionMethod.none; - } + // 12. If mimeType starts with U+003B (;), then prepend + // "text/plain" to mimeType. + if (mimeType.startsWith(';')) { + mimeType = 'text/plain' + mimeType } - return msg; -}; + // 13. Let mimeTypeRecord be the result of parsing + // mimeType. + let mimeTypeRecord = parseMIMEType(mimeType) -/** - * Creates security parameters for the given connection based on the given - * hello message. - * - * @param c the TLS connection. - * @param msg the hello message. - */ -tls.createSecurityParameters = function(c, msg) { - /* Note: security params are from TLS 1.2, some values like prf_algorithm - are ignored for TLS 1.0/1.1 and the builtin as specified in the spec is - used. */ - - // TODO: handle other options from server when more supported - - // get client and server randoms - var client = (c.entity === tls.ConnectionEnd.client); - var msgRandom = msg.random.bytes(); - var cRandom = client ? c.session.sp.client_random : msgRandom; - var sRandom = client ? msgRandom : tls.createRandom().getBytes(); - - // create new security parameters - c.session.sp = { - entity: c.entity, - prf_algorithm: tls.PRFAlgorithm.tls_prf_sha256, - bulk_cipher_algorithm: null, - cipher_type: null, - enc_key_length: null, - block_length: null, - fixed_iv_length: null, - record_iv_length: null, - mac_algorithm: null, - mac_length: null, - mac_key_length: null, - compression_algorithm: c.session.compressionMethod, - pre_master_secret: null, - master_secret: null, - client_random: cRandom, - server_random: sRandom - }; -}; + // 14. If mimeTypeRecord is failure, then set + // mimeTypeRecord to text/plain;charset=US-ASCII. + if (mimeTypeRecord === 'failure') { + mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII') + } + + // 15. Return a new data: URL struct whose MIME + // type is mimeTypeRecord and body is body. + // https://fetch.spec.whatwg.org/#data-url-struct + return { mimeType: mimeTypeRecord, body } +} +// https://url.spec.whatwg.org/#concept-url-serializer /** - * Called when a client receives a ServerHello record. - * - * When a ServerHello message will be sent: - * The server will send this message in response to a client hello message - * when it was able to find an acceptable set of algorithms. If it cannot - * find such a match, it will respond with a handshake failure alert. - * - * uint24 length; - * struct { - * ProtocolVersion server_version; - * Random random; - * SessionID session_id; - * CipherSuite cipher_suite; - * CompressionMethod compression_method; - * select(extensions_present) { - * case false: - * struct {}; - * case true: - * Extension extensions<0..2^16-1>; - * }; - * } ServerHello; - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. + * @param {URL} url + * @param {boolean} excludeFragment */ -tls.handleServerHello = function(c, record, length) { - var msg = tls.parseHelloMessage(c, record, length); - if(c.fail) { - return; - } - - // ensure server version is compatible - if(msg.version.minor <= c.version.minor) { - c.version.minor = msg.version.minor; - } else { - return c.error(c, { - message: 'Incompatible TLS version.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.protocol_version - } - }); +function URLSerializer (url, excludeFragment = false) { + if (!excludeFragment) { + return url.href } - // indicate session version has been set - c.session.version = c.version; + const href = url.href + const hashLength = url.hash.length - // get the session ID from the message - var sessionId = msg.session_id.bytes(); + return hashLength === 0 ? href : href.substring(0, href.length - hashLength) +} - // if the session ID is not blank and matches the cached one, resume - // the session - if(sessionId.length > 0 && sessionId === c.session.id) { - // resuming session, expect a ChangeCipherSpec next - c.expect = SCC; - c.session.resuming = true; +// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points +/** + * @param {(char: string) => boolean} condition + * @param {string} input + * @param {{ position: number }} position + */ +function collectASequenceOfCodePoints (condition, input, position) { + // 1. Let result be the empty string. + let result = '' - // get new server random - c.session.sp.server_random = msg.random.bytes(); - } else { - // not resuming, expect a server Certificate message next - c.expect = SCE; - c.session.resuming = false; + // 2. While position doesn’t point past the end of input and the + // code point at position within input meets the condition condition: + while (position.position < input.length && condition(input[position.position])) { + // 1. Append that code point to the end of result. + result += input[position.position] - // create new security parameters - tls.createSecurityParameters(c, msg); + // 2. Advance position by 1. + position.position++ } - // set new session ID - c.session.id = sessionId; - - // continue - c.process(); -}; + // 3. Return result. + return result +} /** - * Called when a server receives a ClientHello record. - * - * When a ClientHello message will be sent: - * When a client first connects to a server it is required to send the - * client hello as its first message. The client can also send a client - * hello in response to a hello request or on its own initiative in order - * to renegotiate the security parameters in an existing connection. - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. + * A faster collectASequenceOfCodePoints that only works when comparing a single character. + * @param {string} char + * @param {string} input + * @param {{ position: number }} position */ -tls.handleClientHello = function(c, record, length) { - var msg = tls.parseHelloMessage(c, record, length); - if(c.fail) { - return; - } - - // get the session ID from the message - var sessionId = msg.session_id.bytes(); +function collectASequenceOfCodePointsFast (char, input, position) { + const idx = input.indexOf(char, position.position) + const start = position.position - // see if the given session ID is in the cache - var session = null; - if(c.sessionCache) { - session = c.sessionCache.getSession(sessionId); - if(session === null) { - // session ID not found - sessionId = ''; - } else if(session.version.major !== msg.version.major || - session.version.minor > msg.version.minor) { - // if session version is incompatible with client version, do not resume - session = null; - sessionId = ''; - } - } - - // no session found to resume, generate a new session ID - if(sessionId.length === 0) { - sessionId = forge.random.getBytes(32); + if (idx === -1) { + position.position = input.length + return input.slice(start) } - // update session - c.session.id = sessionId; - c.session.clientHelloVersion = msg.version; - c.session.sp = {}; - if(session) { - // use version and security parameters from resumed session - c.version = c.session.version = session.version; - c.session.sp = session.sp; - } else { - // use highest compatible minor version - var version; - for(var i = 1; i < tls.SupportedVersions.length; ++i) { - version = tls.SupportedVersions[i]; - if(version.minor <= msg.version.minor) { - break; - } - } - c.version = {major: version.major, minor: version.minor}; - c.session.version = c.version; - } + position.position = idx + return input.slice(start, position.position) +} - // if a session is set, resume it - if(session !== null) { - // resuming session, expect a ChangeCipherSpec next - c.expect = CCC; - c.session.resuming = true; +// https://url.spec.whatwg.org/#string-percent-decode +/** @param {string} input */ +function stringPercentDecode (input) { + // 1. Let bytes be the UTF-8 encoding of input. + const bytes = encoder.encode(input) - // get new client random - c.session.sp.client_random = msg.random.bytes(); - } else { - // not resuming, expect a Certificate or ClientKeyExchange - c.expect = (c.verifyClient !== false) ? CCE : CKE; - c.session.resuming = false; + // 2. Return the percent-decoding of bytes. + return percentDecode(bytes) +} - // create new security parameters - tls.createSecurityParameters(c, msg); - } +// https://url.spec.whatwg.org/#percent-decode +/** @param {Uint8Array} input */ +function percentDecode (input) { + // 1. Let output be an empty byte sequence. + /** @type {number[]} */ + const output = [] - // connection now open - c.open = true; + // 2. For each byte byte in input: + for (let i = 0; i < input.length; i++) { + const byte = input[i] - // queue server hello - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createServerHello(c) - })); + // 1. If byte is not 0x25 (%), then append byte to output. + if (byte !== 0x25) { + output.push(byte) - if(c.session.resuming) { - // queue change cipher spec message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.change_cipher_spec, - data: tls.createChangeCipherSpec() - })); + // 2. Otherwise, if byte is 0x25 (%) and the next two bytes + // after byte in input are not in the ranges + // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F), + // and 0x61 (a) to 0x66 (f), all inclusive, append byte + // to output. + } else if ( + byte === 0x25 && + !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2])) + ) { + output.push(0x25) - // create pending state - c.state.pending = tls.createConnectionState(c); + // 3. Otherwise: + } else { + // 1. Let bytePoint be the two bytes after byte in input, + // decoded, and then interpreted as hexadecimal number. + const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2]) + const bytePoint = Number.parseInt(nextTwoBytes, 16) + + // 2. Append a byte whose value is bytePoint to output. + output.push(bytePoint) + + // 3. Skip the next two bytes in input. + i += 2 + } + } + + // 3. Return output. + return Uint8Array.from(output) +} + +// https://mimesniff.spec.whatwg.org/#parse-a-mime-type +/** @param {string} input */ +function parseMIMEType (input) { + // 1. Remove any leading and trailing HTTP whitespace + // from input. + input = removeHTTPWhitespace(input, true, true) + + // 2. Let position be a position variable for input, + // initially pointing at the start of input. + const position = { position: 0 } + + // 3. Let type be the result of collecting a sequence + // of code points that are not U+002F (/) from + // input, given position. + const type = collectASequenceOfCodePointsFast( + '/', + input, + position + ) + + // 4. If type is the empty string or does not solely + // contain HTTP token code points, then return failure. + // https://mimesniff.spec.whatwg.org/#http-token-code-point + if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) { + return 'failure' + } + + // 5. If position is past the end of input, then return + // failure + if (position.position > input.length) { + return 'failure' + } + + // 6. Advance position by 1. (This skips past U+002F (/).) + position.position++ + + // 7. Let subtype be the result of collecting a sequence of + // code points that are not U+003B (;) from input, given + // position. + let subtype = collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 8. Remove any trailing HTTP whitespace from subtype. + subtype = removeHTTPWhitespace(subtype, false, true) + + // 9. If subtype is the empty string or does not solely + // contain HTTP token code points, then return failure. + if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) { + return 'failure' + } + + const typeLowercase = type.toLowerCase() + const subtypeLowercase = subtype.toLowerCase() + + // 10. Let mimeType be a new MIME type record whose type + // is type, in ASCII lowercase, and subtype is subtype, + // in ASCII lowercase. + // https://mimesniff.spec.whatwg.org/#mime-type + const mimeType = { + type: typeLowercase, + subtype: subtypeLowercase, + /** @type {Map} */ + parameters: new Map(), + // https://mimesniff.spec.whatwg.org/#mime-type-essence + essence: `${typeLowercase}/${subtypeLowercase}` + } + + // 11. While position is not past the end of input: + while (position.position < input.length) { + // 1. Advance position by 1. (This skips past U+003B (;).) + position.position++ + + // 2. Collect a sequence of code points that are HTTP + // whitespace from input given position. + collectASequenceOfCodePoints( + // https://fetch.spec.whatwg.org/#http-whitespace + char => HTTP_WHITESPACE_REGEX.test(char), + input, + position + ) - // change current write state to pending write state - c.state.current.write = c.state.pending.write; + // 3. Let parameterName be the result of collecting a + // sequence of code points that are not U+003B (;) + // or U+003D (=) from input, given position. + let parameterName = collectASequenceOfCodePoints( + (char) => char !== ';' && char !== '=', + input, + position + ) - // queue finished - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createFinished(c) - })); - } else { - // queue server certificate - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createCertificate(c) - })); + // 4. Set parameterName to parameterName, in ASCII + // lowercase. + parameterName = parameterName.toLowerCase() - if(!c.fail) { - // queue server key exchange - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createServerKeyExchange(c) - })); - - // request client certificate if set - if(c.verifyClient !== false) { - // queue certificate request - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createCertificateRequest(c) - })); + // 5. If position is not past the end of input, then: + if (position.position < input.length) { + // 1. If the code point at position within input is + // U+003B (;), then continue. + if (input[position.position] === ';') { + continue } - // queue server hello done - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createServerHelloDone(c) - })); + // 2. Advance position by 1. (This skips past U+003D (=).) + position.position++ } - } - // send records - tls.flush(c); + // 6. If position is past the end of input, then break. + if (position.position > input.length) { + break + } - // continue - c.process(); -}; + // 7. Let parameterValue be null. + let parameterValue = null -/** - * Called when a client receives a Certificate record. - * - * When this message will be sent: - * The server must send a certificate whenever the agreed-upon key exchange - * method is not an anonymous one. This message will always immediately - * follow the server hello message. - * - * Meaning of this message: - * The certificate type must be appropriate for the selected cipher suite's - * key exchange algorithm, and is generally an X.509v3 certificate. It must - * contain a key which matches the key exchange method, as follows. Unless - * otherwise specified, the signing algorithm for the certificate must be - * the same as the algorithm for the certificate key. Unless otherwise - * specified, the public key may be of any length. - * - * opaque ASN.1Cert<1..2^24-1>; - * struct { - * ASN.1Cert certificate_list<1..2^24-1>; - * } Certificate; - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleCertificate = function(c, record, length) { - // minimum of 3 bytes in message - if(length < 3) { - return c.error(c, { - message: 'Invalid Certificate message. Message too short.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter - } - }); - } + // 8. If the code point at position within input is + // U+0022 ("), then: + if (input[position.position] === '"') { + // 1. Set parameterValue to the result of collecting + // an HTTP quoted string from input, given position + // and the extract-value flag. + parameterValue = collectAnHTTPQuotedString(input, position, true) - var b = record.fragment; - var msg = { - certificate_list: readVector(b, 3) - }; + // 2. Collect a sequence of code points that are not + // U+003B (;) from input, given position. + collectASequenceOfCodePointsFast( + ';', + input, + position + ) - /* The sender's certificate will be first in the list (chain), each - subsequent one that follows will certify the previous one, but root - certificates (self-signed) that specify the certificate authority may - be omitted under the assumption that clients must already possess it. */ - var cert, asn1; - var certs = []; - try { - while(msg.certificate_list.length() > 0) { - // each entry in msg.certificate_list is a vector with 3 len bytes - cert = readVector(msg.certificate_list, 3); - asn1 = forge.asn1.fromDer(cert); - cert = forge.pki.certificateFromAsn1(asn1, true); - certs.push(cert); - } - } catch(ex) { - return c.error(c, { - message: 'Could not parse certificate list.', - cause: ex, - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.bad_certificate - } - }); - } + // 9. Otherwise: + } else { + // 1. Set parameterValue to the result of collecting + // a sequence of code points that are not U+003B (;) + // from input, given position. + parameterValue = collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 2. Remove any trailing HTTP whitespace from parameterValue. + parameterValue = removeHTTPWhitespace(parameterValue, false, true) - // ensure at least 1 certificate was provided if in client-mode - // or if verifyClient was set to true to require a certificate - // (as opposed to 'optional') - var client = (c.entity === tls.ConnectionEnd.client); - if((client || c.verifyClient === true) && certs.length === 0) { - // error, no certificate - c.error(c, { - message: client ? - 'No server certificate provided.' : - 'No client certificate provided.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter + // 3. If parameterValue is the empty string, then continue. + if (parameterValue.length === 0) { + continue } - }); - } else if(certs.length === 0) { - // no certs to verify - // expect a ServerKeyExchange or ClientKeyExchange message next - c.expect = client ? SKE : CKE; - } else { - // save certificate in session - if(client) { - c.session.serverCertificate = certs[0]; - } else { - c.session.clientCertificate = certs[0]; } - if(tls.verifyCertificateChain(c, certs)) { - // expect a ServerKeyExchange or ClientKeyExchange message next - c.expect = client ? SKE : CKE; + // 10. If all of the following are true + // - parameterName is not the empty string + // - parameterName solely contains HTTP token code points + // - parameterValue solely contains HTTP quoted-string token code points + // - mimeType’s parameters[parameterName] does not exist + // then set mimeType’s parameters[parameterName] to parameterValue. + if ( + parameterName.length !== 0 && + HTTP_TOKEN_CODEPOINTS.test(parameterName) && + (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) && + !mimeType.parameters.has(parameterName) + ) { + mimeType.parameters.set(parameterName, parameterValue) } } - // continue - c.process(); -}; - -/** - * Called when a client receives a ServerKeyExchange record. - * - * When this message will be sent: - * This message will be sent immediately after the server certificate - * message (or the server hello message, if this is an anonymous - * negotiation). - * - * The server key exchange message is sent by the server only when the - * server certificate message (if sent) does not contain enough data to - * allow the client to exchange a premaster secret. - * - * Meaning of this message: - * This message conveys cryptographic information to allow the client to - * communicate the premaster secret: either an RSA public key to encrypt - * the premaster secret with, or a Diffie-Hellman public key with which the - * client can complete a key exchange (with the result being the premaster - * secret.) - * - * enum { - * dhe_dss, dhe_rsa, dh_anon, rsa, dh_dss, dh_rsa - * } KeyExchangeAlgorithm; - * - * struct { - * opaque dh_p<1..2^16-1>; - * opaque dh_g<1..2^16-1>; - * opaque dh_Ys<1..2^16-1>; - * } ServerDHParams; - * - * struct { - * select(KeyExchangeAlgorithm) { - * case dh_anon: - * ServerDHParams params; - * case dhe_dss: - * case dhe_rsa: - * ServerDHParams params; - * digitally-signed struct { - * opaque client_random[32]; - * opaque server_random[32]; - * ServerDHParams params; - * } signed_params; - * case rsa: - * case dh_dss: - * case dh_rsa: - * struct {}; - * }; - * } ServerKeyExchange; - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleServerKeyExchange = function(c, record, length) { - // this implementation only supports RSA, no Diffie-Hellman support - // so any length > 0 is invalid - if(length > 0) { - return c.error(c, { - message: 'Invalid key parameters. Only RSA is supported.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.unsupported_certificate - } - }); - } - - // expect an optional CertificateRequest message next - c.expect = SCR; + // 12. Return mimeType. + return mimeType +} - // continue - c.process(); -}; +// https://infra.spec.whatwg.org/#forgiving-base64-decode +/** @param {string} data */ +function forgivingBase64 (data) { + // 1. Remove all ASCII whitespace from data. + data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line -/** - * Called when a client receives a ClientKeyExchange record. - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleClientKeyExchange = function(c, record, length) { - // this implementation only supports RSA, no Diffie-Hellman support - // so any length < 48 is invalid - if(length < 48) { - return c.error(c, { - message: 'Invalid key parameters. Only RSA is supported.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.unsupported_certificate - } - }); + // 2. If data’s code point length divides by 4 leaving + // no remainder, then: + if (data.length % 4 === 0) { + // 1. If data ends with one or two U+003D (=) code points, + // then remove them from data. + data = data.replace(/=?=$/, '') } - var b = record.fragment; - var msg = { - enc_pre_master_secret: readVector(b, 2).getBytes() - }; - - // do rsa decryption - var privateKey = null; - if(c.getPrivateKey) { - try { - privateKey = c.getPrivateKey(c, c.session.serverCertificate); - privateKey = forge.pki.privateKeyFromPem(privateKey); - } catch(ex) { - c.error(c, { - message: 'Could not get private key.', - cause: ex, - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); - } + // 3. If data’s code point length divides by 4 leaving + // a remainder of 1, then return failure. + if (data.length % 4 === 1) { + return 'failure' } - if(privateKey === null) { - return c.error(c, { - message: 'No private key set.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); + // 4. If data contains a code point that is not one of + // U+002B (+) + // U+002F (/) + // ASCII alphanumeric + // then return failure. + if (/[^+/0-9A-Za-z]/.test(data)) { + return 'failure' } - try { - // decrypt 48-byte pre-master secret - var sp = c.session.sp; - sp.pre_master_secret = privateKey.decrypt(msg.enc_pre_master_secret); - - // ensure client hello version matches first 2 bytes - var version = c.session.clientHelloVersion; - if(version.major !== sp.pre_master_secret.charCodeAt(0) || - version.minor !== sp.pre_master_secret.charCodeAt(1)) { - // error, do not send alert (see BLEI attack below) - throw new Error('TLS version rollback attack detected.'); - } - } catch(ex) { - /* Note: Daniel Bleichenbacher [BLEI] can be used to attack a - TLS server which is using PKCS#1 encoded RSA, so instead of - failing here, we generate 48 random bytes and use that as - the pre-master secret. */ - sp.pre_master_secret = forge.random.getBytes(48); - } - - // expect a CertificateVerify message if a Certificate was received that - // does not have fixed Diffie-Hellman params, otherwise expect - // ChangeCipherSpec - c.expect = CCC; - if(c.session.clientCertificate !== null) { - // only RSA support, so expect CertificateVerify - // TODO: support Diffie-Hellman - c.expect = CCV; - } - - // continue - c.process(); -}; + const binary = atob(data) + const bytes = new Uint8Array(binary.length) -/** - * Called when a client receives a CertificateRequest record. - * - * When this message will be sent: - * A non-anonymous server can optionally request a certificate from the - * client, if appropriate for the selected cipher suite. This message, if - * sent, will immediately follow the Server Key Exchange message (if it is - * sent; otherwise, the Server Certificate message). - * - * enum { - * rsa_sign(1), dss_sign(2), rsa_fixed_dh(3), dss_fixed_dh(4), - * rsa_ephemeral_dh_RESERVED(5), dss_ephemeral_dh_RESERVED(6), - * fortezza_dms_RESERVED(20), (255) - * } ClientCertificateType; - * - * opaque DistinguishedName<1..2^16-1>; - * - * struct { - * ClientCertificateType certificate_types<1..2^8-1>; - * SignatureAndHashAlgorithm supported_signature_algorithms<2^16-1>; - * DistinguishedName certificate_authorities<0..2^16-1>; - * } CertificateRequest; - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleCertificateRequest = function(c, record, length) { - // minimum of 3 bytes in message - if(length < 3) { - return c.error(c, { - message: 'Invalid CertificateRequest. Message too short.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter - } - }); + for (let byte = 0; byte < binary.length; byte++) { + bytes[byte] = binary.charCodeAt(byte) } - // TODO: TLS 1.2+ has different format including - // SignatureAndHashAlgorithm after cert types - var b = record.fragment; - var msg = { - certificate_types: readVector(b, 1), - certificate_authorities: readVector(b, 2) - }; - - // save certificate request in session - c.session.certificateRequest = msg; - - // expect a ServerHelloDone message next - c.expect = SHD; - - // continue - c.process(); -}; + return bytes +} +// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string +// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string /** - * Called when a server receives a CertificateVerify record. - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. + * @param {string} input + * @param {{ position: number }} position + * @param {boolean?} extractValue */ -tls.handleCertificateVerify = function(c, record, length) { - if(length < 2) { - return c.error(c, { - message: 'Invalid CertificateVerify. Message too short.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter - } - }); - } +function collectAnHTTPQuotedString (input, position, extractValue) { + // 1. Let positionStart be position. + const positionStart = position.position - // rewind to get full bytes for message so it can be manually - // digested below (special case for CertificateVerify messages because - // they must be digested *after* handling as opposed to all others) - var b = record.fragment; - b.read -= 4; - var msgBytes = b.bytes(); - b.read += 4; + // 2. Let value be the empty string. + let value = '' - var msg = { - signature: readVector(b, 2).getBytes() - }; + // 3. Assert: the code point at position within input + // is U+0022 ("). + assert(input[position.position] === '"') - // TODO: add support for DSA + // 4. Advance position by 1. + position.position++ - // generate data to verify - var verify = forge.util.createBuffer(); - verify.putBuffer(c.session.md5.digest()); - verify.putBuffer(c.session.sha1.digest()); - verify = verify.getBytes(); + // 5. While true: + while (true) { + // 1. Append the result of collecting a sequence of code points + // that are not U+0022 (") or U+005C (\) from input, given + // position, to value. + value += collectASequenceOfCodePoints( + (char) => char !== '"' && char !== '\\', + input, + position + ) - try { - var cert = c.session.clientCertificate; - /*b = forge.pki.rsa.decrypt( - msg.signature, cert.publicKey, true, verify.length); - if(b !== verify) {*/ - if(!cert.publicKey.verify(verify, msg.signature, 'NONE')) { - throw new Error('CertificateVerify signature does not match.'); - } - - // digest message now that it has been handled - c.session.md5.update(msgBytes); - c.session.sha1.update(msgBytes); - } catch(ex) { - return c.error(c, { - message: 'Bad signature in CertificateVerify.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.handshake_failure - } - }); - } + // 2. If position is past the end of input, then break. + if (position.position >= input.length) { + break + } - // expect ChangeCipherSpec - c.expect = CCC; + // 3. Let quoteOrBackslash be the code point at position within + // input. + const quoteOrBackslash = input[position.position] - // continue - c.process(); -}; + // 4. Advance position by 1. + position.position++ -/** - * Called when a client receives a ServerHelloDone record. - * - * When this message will be sent: - * The server hello done message is sent by the server to indicate the end - * of the server hello and associated messages. After sending this message - * the server will wait for a client response. - * - * Meaning of this message: - * This message means that the server is done sending messages to support - * the key exchange, and the client can proceed with its phase of the key - * exchange. - * - * Upon receipt of the server hello done message the client should verify - * that the server provided a valid certificate if required and check that - * the server hello parameters are acceptable. - * - * struct {} ServerHelloDone; - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. - */ -tls.handleServerHelloDone = function(c, record, length) { - // len must be 0 bytes - if(length > 0) { - return c.error(c, { - message: 'Invalid ServerHelloDone message. Invalid length.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.record_overflow + // 5. If quoteOrBackslash is U+005C (\), then: + if (quoteOrBackslash === '\\') { + // 1. If position is past the end of input, then append + // U+005C (\) to value and break. + if (position.position >= input.length) { + value += '\\' + break } - }); - } - if(c.serverCertificate === null) { - // no server certificate was provided - var error = { - message: 'No server certificate provided. Not enough security.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.insufficient_security - } - }; + // 2. Append the code point at position within input to value. + value += input[position.position] - // call application callback - var depth = 0; - var ret = c.verify(c, error.alert.description, depth, []); - if(ret !== true) { - // check for custom alert info - if(ret || ret === 0) { - // set custom message and alert description - if(typeof ret === 'object' && !forge.util.isArray(ret)) { - if(ret.message) { - error.message = ret.message; - } - if(ret.alert) { - error.alert.description = ret.alert; - } - } else if(typeof ret === 'number') { - // set custom alert description - error.alert.description = ret; - } - } + // 3. Advance position by 1. + position.position++ + + // 6. Otherwise: + } else { + // 1. Assert: quoteOrBackslash is U+0022 ("). + assert(quoteOrBackslash === '"') - // send error - return c.error(c, error); + // 2. Break. + break } } - // create client certificate message if requested - if(c.session.certificateRequest !== null) { - record = tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createCertificate(c) - }); - tls.queue(c, record); + // 6. If the extract-value flag is set, then return value. + if (extractValue) { + return value } - // create client key exchange message - record = tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createClientKeyExchange(c) - }); - tls.queue(c, record); - - // expect no messages until the following callback has been called - c.expect = SER; - - // create callback to handle client signature (for client-certs) - var callback = function(c, signature) { - if(c.session.certificateRequest !== null && - c.session.clientCertificate !== null) { - // create certificate verify message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createCertificateVerify(c, signature) - })); - } - - // create change cipher spec message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.change_cipher_spec, - data: tls.createChangeCipherSpec() - })); + // 7. Return the code points from positionStart to position, + // inclusive, within input. + return input.slice(positionStart, position.position) +} - // create pending state - c.state.pending = tls.createConnectionState(c); +/** + * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type + */ +function serializeAMimeType (mimeType) { + assert(mimeType !== 'failure') + const { parameters, essence } = mimeType - // change current write state to pending write state - c.state.current.write = c.state.pending.write; + // 1. Let serialization be the concatenation of mimeType’s + // type, U+002F (/), and mimeType’s subtype. + let serialization = essence - // create finished message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createFinished(c) - })); + // 2. For each name → value of mimeType’s parameters: + for (let [name, value] of parameters.entries()) { + // 1. Append U+003B (;) to serialization. + serialization += ';' - // expect a server ChangeCipherSpec message next - c.expect = SCC; + // 2. Append name to serialization. + serialization += name - // send records - tls.flush(c); + // 3. Append U+003D (=) to serialization. + serialization += '=' - // continue - c.process(); - }; + // 4. If value does not solely contain HTTP token code + // points or value is the empty string, then: + if (!HTTP_TOKEN_CODEPOINTS.test(value)) { + // 1. Precede each occurence of U+0022 (") or + // U+005C (\) in value with U+005C (\). + value = value.replace(/(\\|")/g, '\\$1') - // if there is no certificate request or no client certificate, do - // callback immediately - if(c.session.certificateRequest === null || - c.session.clientCertificate === null) { - return callback(c, null); + // 2. Prepend U+0022 (") to value. + value = '"' + value + + // 3. Append U+0022 (") to value. + value += '"' + } + + // 5. Append value to serialization. + serialization += value } - // otherwise get the client signature - tls.getClientSignature(c, callback); -}; + // 3. Return serialization. + return serialization +} /** - * Called when a ChangeCipherSpec record is received. - * - * @param c the connection. - * @param record the record. + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} char */ -tls.handleChangeCipherSpec = function(c, record) { - if(record.fragment.getByte() !== 0x01) { - return c.error(c, { - message: 'Invalid ChangeCipherSpec message received.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.illegal_parameter - } - }); - } - - // create pending state if: - // 1. Resuming session in client mode OR - // 2. NOT resuming session in server mode - var client = (c.entity === tls.ConnectionEnd.client); - if((c.session.resuming && client) || (!c.session.resuming && !client)) { - c.state.pending = tls.createConnectionState(c); - } +function isHTTPWhiteSpace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === ' ' +} - // change current read state to pending read state - c.state.current.read = c.state.pending.read; +/** + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} str + */ +function removeHTTPWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 - // clear pending state if: - // 1. NOT resuming session in client mode OR - // 2. resuming a session in server mode - if((!c.session.resuming && client) || (c.session.resuming && !client)) { - c.state.pending = null; + if (leading) { + for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++); } - // expect a Finished record next - c.expect = client ? SFI : CFI; + if (trailing) { + for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--); + } - // continue - c.process(); -}; + return str.slice(lead, trail + 1) +} /** - * Called when a Finished record is received. - * - * When this message will be sent: - * A finished message is always sent immediately after a change - * cipher spec message to verify that the key exchange and - * authentication processes were successful. It is essential that a - * change cipher spec message be received between the other - * handshake messages and the Finished message. - * - * Meaning of this message: - * The finished message is the first protected with the just- - * negotiated algorithms, keys, and secrets. Recipients of finished - * messages must verify that the contents are correct. Once a side - * has sent its Finished message and received and validated the - * Finished message from its peer, it may begin to send and receive - * application data over the connection. - * - * struct { - * opaque verify_data[verify_data_length]; - * } Finished; - * - * verify_data - * PRF(master_secret, finished_label, Hash(handshake_messages)) - * [0..verify_data_length-1]; - * - * finished_label - * For Finished messages sent by the client, the string - * "client finished". For Finished messages sent by the server, the - * string "server finished". - * - * verify_data_length depends on the cipher suite. If it is not specified - * by the cipher suite, then it is 12. Versions of TLS < 1.2 always used - * 12 bytes. - * - * @param c the connection. - * @param record the record. - * @param length the length of the handshake message. + * @see https://infra.spec.whatwg.org/#ascii-whitespace + * @param {string} char */ -tls.handleFinished = function(c, record, length) { - // rewind to get full bytes for message so it can be manually - // digested below (special case for Finished messages because they - // must be digested *after* handling as opposed to all others) - var b = record.fragment; - b.read -= 4; - var msgBytes = b.bytes(); - b.read += 4; - - // message contains only verify_data - var vd = record.fragment.getBytes(); - - // ensure verify data is correct - b = forge.util.createBuffer(); - b.putBuffer(c.session.md5.digest()); - b.putBuffer(c.session.sha1.digest()); - - // set label based on entity type - var client = (c.entity === tls.ConnectionEnd.client); - var label = client ? 'server finished' : 'client finished'; - - // TODO: determine prf function and verify length for TLS 1.2 - var sp = c.session.sp; - var vdl = 12; - var prf = prf_TLS1; - b = prf(sp.master_secret, label, b.getBytes(), vdl); - if(b.getBytes() !== vd) { - return c.error(c, { - message: 'Invalid verify_data in Finished message.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.decrypt_error - } - }); - } - - // digest finished message now that it has been handled - c.session.md5.update(msgBytes); - c.session.sha1.update(msgBytes); - - // resuming session as client or NOT resuming session as server - if((c.session.resuming && client) || (!c.session.resuming && !client)) { - // create change cipher spec message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.change_cipher_spec, - data: tls.createChangeCipherSpec() - })); +function isASCIIWhitespace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' ' +} - // change current write state to pending write state, clear pending - c.state.current.write = c.state.pending.write; - c.state.pending = null; +/** + * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace + */ +function removeASCIIWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 - // create finished message - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createFinished(c) - })); + if (leading) { + for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++); } - // expect application data next - c.expect = client ? SAD : CAD; + if (trailing) { + for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--); + } - // handshake complete - c.handshaking = false; - ++c.handshakes; + return str.slice(lead, trail + 1) +} - // save access to peer certificate - c.peerCertificate = client ? - c.session.serverCertificate : c.session.clientCertificate; +module.exports = { + dataURLProcessor, + URLSerializer, + collectASequenceOfCodePoints, + collectASequenceOfCodePointsFast, + stringPercentDecode, + parseMIMEType, + collectAnHTTPQuotedString, + serializeAMimeType +} - // send records - tls.flush(c); - // now connected - c.isConnected = true; - c.connected(c); +/***/ }), - // continue - c.process(); -}; +/***/ 8511: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Called when an Alert record is received. - * - * @param c the connection. - * @param record the record. - */ -tls.handleAlert = function(c, record) { - // read alert - var b = record.fragment; - var alert = { - level: b.getByte(), - description: b.getByte() - }; +"use strict"; - // TODO: consider using a table? - // get appropriate message - var msg; - switch(alert.description) { - case tls.Alert.Description.close_notify: - msg = 'Connection closed.'; - break; - case tls.Alert.Description.unexpected_message: - msg = 'Unexpected message.'; - break; - case tls.Alert.Description.bad_record_mac: - msg = 'Bad record MAC.'; - break; - case tls.Alert.Description.decryption_failed: - msg = 'Decryption failed.'; - break; - case tls.Alert.Description.record_overflow: - msg = 'Record overflow.'; - break; - case tls.Alert.Description.decompression_failure: - msg = 'Decompression failed.'; - break; - case tls.Alert.Description.handshake_failure: - msg = 'Handshake failure.'; - break; - case tls.Alert.Description.bad_certificate: - msg = 'Bad certificate.'; - break; - case tls.Alert.Description.unsupported_certificate: - msg = 'Unsupported certificate.'; - break; - case tls.Alert.Description.certificate_revoked: - msg = 'Certificate revoked.'; - break; - case tls.Alert.Description.certificate_expired: - msg = 'Certificate expired.'; - break; - case tls.Alert.Description.certificate_unknown: - msg = 'Certificate unknown.'; - break; - case tls.Alert.Description.illegal_parameter: - msg = 'Illegal parameter.'; - break; - case tls.Alert.Description.unknown_ca: - msg = 'Unknown certificate authority.'; - break; - case tls.Alert.Description.access_denied: - msg = 'Access denied.'; - break; - case tls.Alert.Description.decode_error: - msg = 'Decode error.'; - break; - case tls.Alert.Description.decrypt_error: - msg = 'Decrypt error.'; - break; - case tls.Alert.Description.export_restriction: - msg = 'Export restriction.'; - break; - case tls.Alert.Description.protocol_version: - msg = 'Unsupported protocol version.'; - break; - case tls.Alert.Description.insufficient_security: - msg = 'Insufficient security.'; - break; - case tls.Alert.Description.internal_error: - msg = 'Internal error.'; - break; - case tls.Alert.Description.user_canceled: - msg = 'User canceled.'; - break; - case tls.Alert.Description.no_renegotiation: - msg = 'Renegotiation not supported.'; - break; - default: - msg = 'Unknown error.'; - break; - } - - // close connection on close_notify, not an error - if(alert.description === tls.Alert.Description.close_notify) { - return c.close(); - } - - // call error handler - c.error(c, { - message: msg, - send: false, - // origin is the opposite end - origin: (c.entity === tls.ConnectionEnd.client) ? 'server' : 'client', - alert: alert - }); - // continue - c.process(); -}; +const { Blob, File: NativeFile } = __nccwpck_require__(4300) +const { types } = __nccwpck_require__(3837) +const { kState } = __nccwpck_require__(5861) +const { isBlobLike } = __nccwpck_require__(2538) +const { webidl } = __nccwpck_require__(1744) +const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) +const { kEnumerableProperty } = __nccwpck_require__(3983) +const encoder = new TextEncoder() + +class File extends Blob { + constructor (fileBits, fileName, options = {}) { + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' }) + + fileBits = webidl.converters['sequence'](fileBits) + fileName = webidl.converters.USVString(fileName) + options = webidl.converters.FilePropertyBag(options) + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + // Note: Blob handles this for us + + // 2. Let n be the fileName argument to the constructor. + const n = fileName + + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: + + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // 2. Convert every character in t to ASCII lowercase. + let t = options.type + let d + + // eslint-disable-next-line no-labels + substep: { + if (t) { + t = parseMIMEType(t) + + if (t === 'failure') { + t = '' + // eslint-disable-next-line no-labels + break substep + } + + t = serializeAMimeType(t).toLowerCase() + } -/** - * Called when a Handshake record is received. - * - * @param c the connection. - * @param record the record. - */ -tls.handleHandshake = function(c, record) { - // get the handshake type and message length - var b = record.fragment; - var type = b.getByte(); - var length = b.getInt24(); - - // see if the record fragment doesn't yet contain the full message - if(length > b.length()) { - // cache the record, clear its fragment, and reset the buffer read - // pointer before the type and length were read - c.fragmented = record; - record.fragment = forge.util.createBuffer(); - b.read -= 4; - - // continue - return c.process(); - } - - // full message now available, clear cache, reset read pointer to - // before type and length - c.fragmented = null; - b.read -= 4; - - // save the handshake bytes for digestion after handler is found - // (include type and length of handshake msg) - var bytes = b.bytes(length + 4); - - // restore read pointer - b.read += 4; - - // handle expected message - if(type in hsTable[c.entity][c.expect]) { - // initialize server session - if(c.entity === tls.ConnectionEnd.server && !c.open && !c.fail) { - c.handshaking = true; - c.session = { - version: null, - extensions: { - server_name: { - serverNameList: [] - } - }, - cipherSuite: null, - compressionMethod: null, - serverCertificate: null, - clientCertificate: null, - md5: forge.md.md5.create(), - sha1: forge.md.sha1.create() - }; + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + d = options.lastModified } - /* Update handshake messages digest. Finished and CertificateVerify - messages are not digested here. They can't be digested as part of - the verify_data that they contain. These messages are manually - digested in their handlers. HelloRequest messages are simply never - included in the handshake message digest according to spec. */ - if(type !== tls.HandshakeType.hello_request && - type !== tls.HandshakeType.certificate_verify && - type !== tls.HandshakeType.finished) { - c.session.md5.update(bytes); - c.session.sha1.update(bytes); + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. + + super(processBlobParts(fileBits, options), { type: t }) + this[kState] = { + name: n, + lastModified: d, + type: t } + } - // handle specific handshake type record - hsTable[c.entity][c.expect][type](c, record, length); - } else { - // unexpected record - tls.handleUnexpected(c, record); + get name () { + webidl.brandCheck(this, File) + + return this[kState].name } -}; -/** - * Called when an ApplicationData record is received. - * - * @param c the connection. - * @param record the record. - */ -tls.handleApplicationData = function(c, record) { - // buffer data, notify that its ready - c.data.putBuffer(record.fragment); - c.dataReady(c); + get lastModified () { + webidl.brandCheck(this, File) - // continue - c.process(); -}; + return this[kState].lastModified + } -/** - * Called when a Heartbeat record is received. - * - * @param c the connection. - * @param record the record. - */ -tls.handleHeartbeat = function(c, record) { - // get the heartbeat type and payload - var b = record.fragment; - var type = b.getByte(); - var length = b.getInt16(); - var payload = b.getBytes(length); - - if(type === tls.HeartbeatMessageType.heartbeat_request) { - // discard request during handshake or if length is too large - if(c.handshaking || length > payload.length) { - // continue - return c.process(); - } - // retransmit payload - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.heartbeat, - data: tls.createHeartbeat( - tls.HeartbeatMessageType.heartbeat_response, payload) - })); - tls.flush(c); - } else if(type === tls.HeartbeatMessageType.heartbeat_response) { - // check payload against expected payload, discard heartbeat if no match - if(payload !== c.expectedHeartbeatPayload) { - // continue - return c.process(); - } + get type () { + webidl.brandCheck(this, File) - // notify that a valid heartbeat was received - if(c.heartbeatReceived) { - c.heartbeatReceived(c, forge.util.createBuffer(payload)); - } + return this[kState].type } +} - // continue - c.process(); -}; +class FileLike { + constructor (blobLike, fileName, options = {}) { + // TODO: argument idl type check -/** - * The transistional state tables for receiving TLS records. It maps the - * current TLS engine state and a received record to a function to handle the - * record and update the state. - * - * For instance, if the current state is SHE, then the TLS engine is expecting - * a ServerHello record. Once a record is received, the handler function is - * looked up using the state SHE and the record's content type. - * - * The resulting function will either be an error handler or a record handler. - * The function will take whatever action is appropriate and update the state - * for the next record. - * - * The states are all based on possible server record types. Note that the - * client will never specifically expect to receive a HelloRequest or an alert - * from the server so there is no state that reflects this. These messages may - * occur at any time. - * - * There are two tables for mapping states because there is a second tier of - * types for handshake messages. Once a record with a content type of handshake - * is received, the handshake record handler will look up the handshake type in - * the secondary map to get its appropriate handler. - * - * Valid message orders are as follows: - * - * =======================FULL HANDSHAKE====================== - * Client Server - * - * ClientHello --------> - * ServerHello - * Certificate* - * ServerKeyExchange* - * CertificateRequest* - * <-------- ServerHelloDone - * Certificate* - * ClientKeyExchange - * CertificateVerify* - * [ChangeCipherSpec] - * Finished --------> - * [ChangeCipherSpec] - * <-------- Finished - * Application Data <-------> Application Data - * - * =====================SESSION RESUMPTION===================== - * Client Server - * - * ClientHello --------> - * ServerHello - * [ChangeCipherSpec] - * <-------- Finished - * [ChangeCipherSpec] - * Finished --------> - * Application Data <-------> Application Data - */ -// client expect states (indicate which records are expected to be received) -var SHE = 0; // rcv server hello -var SCE = 1; // rcv server certificate -var SKE = 2; // rcv server key exchange -var SCR = 3; // rcv certificate request -var SHD = 4; // rcv server hello done -var SCC = 5; // rcv change cipher spec -var SFI = 6; // rcv finished -var SAD = 7; // rcv application data -var SER = 8; // not expecting any messages at this point - -// server expect states -var CHE = 0; // rcv client hello -var CCE = 1; // rcv client certificate -var CKE = 2; // rcv client key exchange -var CCV = 3; // rcv certificate verify -var CCC = 4; // rcv change cipher spec -var CFI = 5; // rcv finished -var CAD = 6; // rcv application data -var CER = 7; // not expecting any messages at this point - -// map client current expect state and content type to function -var __ = tls.handleUnexpected; -var R0 = tls.handleChangeCipherSpec; -var R1 = tls.handleAlert; -var R2 = tls.handleHandshake; -var R3 = tls.handleApplicationData; -var R4 = tls.handleHeartbeat; -var ctTable = []; -ctTable[tls.ConnectionEnd.client] = [ -// CC,AL,HS,AD,HB -/*SHE*/[__,R1,R2,__,R4], -/*SCE*/[__,R1,R2,__,R4], -/*SKE*/[__,R1,R2,__,R4], -/*SCR*/[__,R1,R2,__,R4], -/*SHD*/[__,R1,R2,__,R4], -/*SCC*/[R0,R1,__,__,R4], -/*SFI*/[__,R1,R2,__,R4], -/*SAD*/[__,R1,R2,R3,R4], -/*SER*/[__,R1,R2,__,R4] -]; + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + + // 2. Let n be the fileName argument to the constructor. + const n = fileName -// map server current expect state and content type to function -ctTable[tls.ConnectionEnd.server] = [ -// CC,AL,HS,AD -/*CHE*/[__,R1,R2,__,R4], -/*CCE*/[__,R1,R2,__,R4], -/*CKE*/[__,R1,R2,__,R4], -/*CCV*/[__,R1,R2,__,R4], -/*CCC*/[R0,R1,__,__,R4], -/*CFI*/[__,R1,R2,__,R4], -/*CAD*/[__,R1,R2,R3,R4], -/*CER*/[__,R1,R2,__,R4] -]; + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: -// map client current expect state and handshake type to function -var H0 = tls.handleHelloRequest; -var H1 = tls.handleServerHello; -var H2 = tls.handleCertificate; -var H3 = tls.handleServerKeyExchange; -var H4 = tls.handleCertificateRequest; -var H5 = tls.handleServerHelloDone; -var H6 = tls.handleFinished; -var hsTable = []; -hsTable[tls.ConnectionEnd.client] = [ -// HR,01,SH,03,04,05,06,07,08,09,10,SC,SK,CR,HD,15,CK,17,18,19,FI -/*SHE*/[__,__,H1,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*SCE*/[H0,__,__,__,__,__,__,__,__,__,__,H2,H3,H4,H5,__,__,__,__,__,__], -/*SKE*/[H0,__,__,__,__,__,__,__,__,__,__,__,H3,H4,H5,__,__,__,__,__,__], -/*SCR*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,H4,H5,__,__,__,__,__,__], -/*SHD*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,H5,__,__,__,__,__,__], -/*SCC*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*SFI*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H6], -/*SAD*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*SER*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__] -]; + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // TODO + const t = options.type -// map server current expect state and handshake type to function -// Note: CAD[CH] does not map to FB because renegotation is prohibited -var H7 = tls.handleClientHello; -var H8 = tls.handleClientKeyExchange; -var H9 = tls.handleCertificateVerify; -hsTable[tls.ConnectionEnd.server] = [ -// 01,CH,02,03,04,05,06,07,08,09,10,CC,12,13,14,CV,CK,17,18,19,FI -/*CHE*/[__,H7,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*CCE*/[__,__,__,__,__,__,__,__,__,__,__,H2,__,__,__,__,__,__,__,__,__], -/*CKE*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H8,__,__,__,__], -/*CCV*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H9,__,__,__,__,__], -/*CCC*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*CFI*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H6], -/*CAD*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], -/*CER*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__] -]; + // 2. Convert every character in t to ASCII lowercase. + // TODO -/** - * Generates the master_secret and keys using the given security parameters. - * - * The security parameters for a TLS connection state are defined as such: - * - * struct { - * ConnectionEnd entity; - * PRFAlgorithm prf_algorithm; - * BulkCipherAlgorithm bulk_cipher_algorithm; - * CipherType cipher_type; - * uint8 enc_key_length; - * uint8 block_length; - * uint8 fixed_iv_length; - * uint8 record_iv_length; - * MACAlgorithm mac_algorithm; - * uint8 mac_length; - * uint8 mac_key_length; - * CompressionMethod compression_algorithm; - * opaque master_secret[48]; - * opaque client_random[32]; - * opaque server_random[32]; - * } SecurityParameters; - * - * Note that this definition is from TLS 1.2. In TLS 1.0 some of these - * parameters are ignored because, for instance, the PRFAlgorithm is a - * builtin-fixed algorithm combining iterations of MD5 and SHA-1 in TLS 1.0. - * - * The Record Protocol requires an algorithm to generate keys required by the - * current connection state. - * - * The master secret is expanded into a sequence of secure bytes, which is then - * split to a client write MAC key, a server write MAC key, a client write - * encryption key, and a server write encryption key. In TLS 1.0 a client write - * IV and server write IV are also generated. Each of these is generated from - * the byte sequence in that order. Unused values are empty. In TLS 1.2, some - * AEAD ciphers may additionally require a client write IV and a server write - * IV (see Section 6.2.3.3). - * - * When keys, MAC keys, and IVs are generated, the master secret is used as an - * entropy source. - * - * To generate the key material, compute: - * - * master_secret = PRF(pre_master_secret, "master secret", - * ClientHello.random + ServerHello.random) - * - * key_block = PRF(SecurityParameters.master_secret, - * "key expansion", - * SecurityParameters.server_random + - * SecurityParameters.client_random); - * - * until enough output has been generated. Then, the key_block is - * partitioned as follows: - * - * client_write_MAC_key[SecurityParameters.mac_key_length] - * server_write_MAC_key[SecurityParameters.mac_key_length] - * client_write_key[SecurityParameters.enc_key_length] - * server_write_key[SecurityParameters.enc_key_length] - * client_write_IV[SecurityParameters.fixed_iv_length] - * server_write_IV[SecurityParameters.fixed_iv_length] - * - * In TLS 1.2, the client_write_IV and server_write_IV are only generated for - * implicit nonce techniques as described in Section 3.2.1 of [AEAD]. This - * implementation uses TLS 1.0 so IVs are generated. - * - * Implementation note: The currently defined cipher suite which requires the - * most material is AES_256_CBC_SHA256. It requires 2 x 32 byte keys and 2 x 32 - * byte MAC keys, for a total 128 bytes of key material. In TLS 1.0 it also - * requires 2 x 16 byte IVs, so it actually takes 160 bytes of key material. - * - * @param c the connection. - * @param sp the security parameters to use. - * - * @return the security keys. - */ -tls.generateKeys = function(c, sp) { - // TLS_RSA_WITH_AES_128_CBC_SHA (required to be compliant with TLS 1.2) & - // TLS_RSA_WITH_AES_256_CBC_SHA are the only cipher suites implemented - // at present + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + const d = options.lastModified ?? Date.now() - // TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA is required to be compliant with - // TLS 1.0 but we don't care right now because AES is better and we have - // an implementation for it + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. - // TODO: TLS 1.2 implementation - /* - // determine the PRF - var prf; - switch(sp.prf_algorithm) { - case tls.PRFAlgorithm.tls_prf_sha256: - prf = prf_sha256; - break; - default: - // should never happen - throw new Error('Invalid PRF'); + this[kState] = { + blobLike, + name: n, + type: t, + lastModified: d + } } - */ - - // TLS 1.0/1.1 implementation - var prf = prf_TLS1; - // concatenate server and client random - var random = sp.client_random + sp.server_random; + stream (...args) { + webidl.brandCheck(this, FileLike) - // only create master secret if session is new - if(!c.session.resuming) { - // create master secret, clean up pre-master secret - sp.master_secret = prf( - sp.pre_master_secret, 'master secret', random, 48).bytes(); - sp.pre_master_secret = null; + return this[kState].blobLike.stream(...args) } - // generate the amount of key material needed - random = sp.server_random + sp.client_random; - var length = 2 * sp.mac_key_length + 2 * sp.enc_key_length; + arrayBuffer (...args) { + webidl.brandCheck(this, FileLike) - // include IV for TLS/1.0 - var tls10 = (c.version.major === tls.Versions.TLS_1_0.major && - c.version.minor === tls.Versions.TLS_1_0.minor); - if(tls10) { - length += 2 * sp.fixed_iv_length; + return this[kState].blobLike.arrayBuffer(...args) } - var km = prf(sp.master_secret, 'key expansion', random, length); - // split the key material into the MAC and encryption keys - var rval = { - client_write_MAC_key: km.getBytes(sp.mac_key_length), - server_write_MAC_key: km.getBytes(sp.mac_key_length), - client_write_key: km.getBytes(sp.enc_key_length), - server_write_key: km.getBytes(sp.enc_key_length) - }; + slice (...args) { + webidl.brandCheck(this, FileLike) - // include TLS 1.0 IVs - if(tls10) { - rval.client_write_IV = km.getBytes(sp.fixed_iv_length); - rval.server_write_IV = km.getBytes(sp.fixed_iv_length); + return this[kState].blobLike.slice(...args) } - return rval; -}; + text (...args) { + webidl.brandCheck(this, FileLike) -/** - * Creates a new initialized TLS connection state. A connection state has - * a read mode and a write mode. - * - * compression state: - * The current state of the compression algorithm. - * - * cipher state: - * The current state of the encryption algorithm. This will consist of the - * scheduled key for that connection. For stream ciphers, this will also - * contain whatever state information is necessary to allow the stream to - * continue to encrypt or decrypt data. - * - * MAC key: - * The MAC key for the connection. - * - * sequence number: - * Each connection state contains a sequence number, which is maintained - * separately for read and write states. The sequence number MUST be set to - * zero whenever a connection state is made the active state. Sequence - * numbers are of type uint64 and may not exceed 2^64-1. Sequence numbers do - * not wrap. If a TLS implementation would need to wrap a sequence number, - * it must renegotiate instead. A sequence number is incremented after each - * record: specifically, the first record transmitted under a particular - * connection state MUST use sequence number 0. - * - * @param c the connection. - * - * @return the new initialized TLS connection state. - */ -tls.createConnectionState = function(c) { - var client = (c.entity === tls.ConnectionEnd.client); - - var createMode = function() { - var mode = { - // two 32-bit numbers, first is most significant - sequenceNumber: [0, 0], - macKey: null, - macLength: 0, - macFunction: null, - cipherState: null, - cipherFunction: function(record) {return true;}, - compressionState: null, - compressFunction: function(record) {return true;}, - updateSequenceNumber: function() { - if(mode.sequenceNumber[1] === 0xFFFFFFFF) { - mode.sequenceNumber[1] = 0; - ++mode.sequenceNumber[0]; - } else { - ++mode.sequenceNumber[1]; - } - } - }; - return mode; - }; - var state = { - read: createMode(), - write: createMode() - }; + return this[kState].blobLike.text(...args) + } - // update function in read mode will decrypt then decompress a record - state.read.update = function(c, record) { - if(!state.read.cipherFunction(record, state.read)) { - c.error(c, { - message: 'Could not decrypt record or bad MAC.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - // doesn't matter if decryption failed or MAC was - // invalid, return the same error so as not to reveal - // which one occurred - description: tls.Alert.Description.bad_record_mac - } - }); - } else if(!state.read.compressFunction(c, record, state.read)) { - c.error(c, { - message: 'Could not decompress record.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.decompression_failure - } - }); - } - return !c.fail; - }; + get size () { + webidl.brandCheck(this, FileLike) - // update function in write mode will compress then encrypt a record - state.write.update = function(c, record) { - if(!state.write.compressFunction(c, record, state.write)) { - // error, but do not send alert since it would require - // compression as well - c.error(c, { - message: 'Could not compress record.', - send: false, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); - } else if(!state.write.cipherFunction(record, state.write)) { - // error, but do not send alert since it would require - // encryption as well - c.error(c, { - message: 'Could not encrypt record.', - send: false, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); - } - return !c.fail; - }; + return this[kState].blobLike.size + } - // handle security parameters - if(c.session) { - var sp = c.session.sp; - c.session.cipherSuite.initSecurityParameters(sp); + get type () { + webidl.brandCheck(this, FileLike) - // generate keys - sp.keys = tls.generateKeys(c, sp); - state.read.macKey = client ? - sp.keys.server_write_MAC_key : sp.keys.client_write_MAC_key; - state.write.macKey = client ? - sp.keys.client_write_MAC_key : sp.keys.server_write_MAC_key; + return this[kState].blobLike.type + } - // cipher suite setup - c.session.cipherSuite.initConnectionState(state, c, sp); + get name () { + webidl.brandCheck(this, FileLike) - // compression setup - switch(sp.compression_algorithm) { - case tls.CompressionMethod.none: - break; - case tls.CompressionMethod.deflate: - state.read.compressFunction = inflate; - state.write.compressFunction = deflate; - break; - default: - throw new Error('Unsupported compression algorithm.'); - } + return this[kState].name } - return state; -}; + get lastModified () { + webidl.brandCheck(this, FileLike) -/** - * Creates a Random structure. - * - * struct { - * uint32 gmt_unix_time; - * opaque random_bytes[28]; - * } Random; - * - * gmt_unix_time: - * The current time and date in standard UNIX 32-bit format (seconds since - * the midnight starting Jan 1, 1970, UTC, ignoring leap seconds) according - * to the sender's internal clock. Clocks are not required to be set - * correctly by the basic TLS protocol; higher-level or application - * protocols may define additional requirements. Note that, for historical - * reasons, the data element is named using GMT, the predecessor of the - * current worldwide time base, UTC. - * random_bytes: - * 28 bytes generated by a secure random number generator. - * - * @return the Random structure as a byte array. - */ -tls.createRandom = function() { - // get UTC milliseconds - var d = new Date(); - var utc = +d + d.getTimezoneOffset() * 60000; - var rval = forge.util.createBuffer(); - rval.putInt32(utc); - rval.putBytes(forge.random.getBytes(28)); - return rval; -}; + return this[kState].lastModified + } -/** - * Creates a TLS record with the given type and data. - * - * @param c the connection. - * @param options: - * type: the record type. - * data: the plain text data in a byte buffer. - * - * @return the created record. - */ -tls.createRecord = function(c, options) { - if(!options.data) { - return null; + get [Symbol.toStringTag] () { + return 'File' } - var record = { - type: options.type, - version: { - major: c.version.major, - minor: c.version.minor - }, - length: options.data.length(), - fragment: options.data - }; - return record; -}; +} -/** - * Creates a TLS alert record. - * - * @param c the connection. - * @param alert: - * level: the TLS alert level. - * description: the TLS alert description. - * - * @return the created alert record. - */ -tls.createAlert = function(c, alert) { - var b = forge.util.createBuffer(); - b.putByte(alert.level); - b.putByte(alert.description); - return tls.createRecord(c, { - type: tls.ContentType.alert, - data: b - }); -}; +Object.defineProperties(File.prototype, { + [Symbol.toStringTag]: { + value: 'File', + configurable: true + }, + name: kEnumerableProperty, + lastModified: kEnumerableProperty +}) -/* The structure of a TLS handshake message. - * - * struct { - * HandshakeType msg_type; // handshake type - * uint24 length; // bytes in message - * select(HandshakeType) { - * case hello_request: HelloRequest; - * case client_hello: ClientHello; - * case server_hello: ServerHello; - * case certificate: Certificate; - * case server_key_exchange: ServerKeyExchange; - * case certificate_request: CertificateRequest; - * case server_hello_done: ServerHelloDone; - * case certificate_verify: CertificateVerify; - * case client_key_exchange: ClientKeyExchange; - * case finished: Finished; - * } body; - * } Handshake; - */ +webidl.converters.Blob = webidl.interfaceConverter(Blob) -/** - * Creates a ClientHello message. - * - * opaque SessionID<0..32>; - * enum { null(0), deflate(1), (255) } CompressionMethod; - * uint8 CipherSuite[2]; - * - * struct { - * ProtocolVersion client_version; - * Random random; - * SessionID session_id; - * CipherSuite cipher_suites<2..2^16-2>; - * CompressionMethod compression_methods<1..2^8-1>; - * select(extensions_present) { - * case false: - * struct {}; - * case true: - * Extension extensions<0..2^16-1>; - * }; - * } ClientHello; - * - * The extension format for extended client hellos and server hellos is: - * - * struct { - * ExtensionType extension_type; - * opaque extension_data<0..2^16-1>; - * } Extension; - * - * Here: - * - * - "extension_type" identifies the particular extension type. - * - "extension_data" contains information specific to the particular - * extension type. - * - * The extension types defined in this document are: - * - * enum { - * server_name(0), max_fragment_length(1), - * client_certificate_url(2), trusted_ca_keys(3), - * truncated_hmac(4), status_request(5), (65535) - * } ExtensionType; - * - * @param c the connection. - * - * @return the ClientHello byte buffer. - */ -tls.createClientHello = function(c) { - // save hello version - c.session.clientHelloVersion = { - major: c.version.major, - minor: c.version.minor - }; +webidl.converters.BlobPart = function (V, opts) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } - // create supported cipher suites - var cipherSuites = forge.util.createBuffer(); - for(var i = 0; i < c.cipherSuites.length; ++i) { - var cs = c.cipherSuites[i]; - cipherSuites.putByte(cs.id[0]); - cipherSuites.putByte(cs.id[1]); - } - var cSuites = cipherSuites.length(); - - // create supported compression methods, null always supported, but - // also support deflate if connection has inflate and deflate methods - var compressionMethods = forge.util.createBuffer(); - compressionMethods.putByte(tls.CompressionMethod.none); - // FIXME: deflate support disabled until issues with raw deflate data - // without zlib headers are resolved - /* - if(c.inflate !== null && c.deflate !== null) { - compressionMethods.putByte(tls.CompressionMethod.deflate); + if ( + ArrayBuffer.isView(V) || + types.isAnyArrayBuffer(V) + ) { + return webidl.converters.BufferSource(V, opts) + } } - */ - var cMethods = compressionMethods.length(); - - // create TLS SNI (server name indication) extension if virtual host - // has been specified, see RFC 3546 - var extensions = forge.util.createBuffer(); - if(c.virtualHost) { - // create extension struct - var ext = forge.util.createBuffer(); - ext.putByte(0x00); // type server_name (ExtensionType is 2 bytes) - ext.putByte(0x00); - - /* In order to provide the server name, clients MAY include an - * extension of type "server_name" in the (extended) client hello. - * The "extension_data" field of this extension SHALL contain - * "ServerNameList" where: - * - * struct { - * NameType name_type; - * select(name_type) { - * case host_name: HostName; - * } name; - * } ServerName; - * - * enum { - * host_name(0), (255) - * } NameType; - * - * opaque HostName<1..2^16-1>; - * - * struct { - * ServerName server_name_list<1..2^16-1> - * } ServerNameList; - */ - var serverName = forge.util.createBuffer(); - serverName.putByte(0x00); // type host_name - writeVector(serverName, 2, forge.util.createBuffer(c.virtualHost)); - - // ServerNameList is in extension_data - var snList = forge.util.createBuffer(); - writeVector(snList, 2, serverName); - writeVector(ext, 2, snList); - extensions.putBuffer(ext); - } - var extLength = extensions.length(); - if(extLength > 0) { - // add extension vector length - extLength += 2; - } - - // determine length of the handshake message - // cipher suites and compression methods size will need to be - // updated if more get added to the list - var sessionId = c.session.id; - var length = - sessionId.length + 1 + // session ID vector - 2 + // version (major + minor) - 4 + 28 + // random time and random bytes - 2 + cSuites + // cipher suites vector - 1 + cMethods + // compression methods vector - extLength; // extensions vector - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.client_hello); - rval.putInt24(length); // handshake length - rval.putByte(c.version.major); // major version - rval.putByte(c.version.minor); // minor version - rval.putBytes(c.session.sp.client_random); // random time + bytes - writeVector(rval, 1, forge.util.createBuffer(sessionId)); - writeVector(rval, 2, cipherSuites); - writeVector(rval, 1, compressionMethods); - if(extLength > 0) { - writeVector(rval, 2, extensions); - } - return rval; -}; -/** - * Creates a ServerHello message. - * - * @param c the connection. - * - * @return the ServerHello byte buffer. - */ -tls.createServerHello = function(c) { - // determine length of the handshake message - var sessionId = c.session.id; - var length = - sessionId.length + 1 + // session ID vector - 2 + // version (major + minor) - 4 + 28 + // random time and random bytes - 2 + // chosen cipher suite - 1; // chosen compression method - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.server_hello); - rval.putInt24(length); // handshake length - rval.putByte(c.version.major); // major version - rval.putByte(c.version.minor); // minor version - rval.putBytes(c.session.sp.server_random); // random time + bytes - writeVector(rval, 1, forge.util.createBuffer(sessionId)); - rval.putByte(c.session.cipherSuite.id[0]); - rval.putByte(c.session.cipherSuite.id[1]); - rval.putByte(c.session.compressionMethod); - return rval; -}; + return webidl.converters.USVString(V, opts) +} -/** - * Creates a Certificate message. - * - * When this message will be sent: - * This is the first message the client can send after receiving a server - * hello done message and the first message the server can send after - * sending a ServerHello. This client message is only sent if the server - * requests a certificate. If no suitable certificate is available, the - * client should send a certificate message containing no certificates. If - * client authentication is required by the server for the handshake to - * continue, it may respond with a fatal handshake failure alert. - * - * opaque ASN.1Cert<1..2^24-1>; - * - * struct { - * ASN.1Cert certificate_list<0..2^24-1>; - * } Certificate; - * - * @param c the connection. - * - * @return the Certificate byte buffer. - */ -tls.createCertificate = function(c) { - // TODO: check certificate request to ensure types are supported - - // get a certificate (a certificate as a PEM string) - var client = (c.entity === tls.ConnectionEnd.client); - var cert = null; - if(c.getCertificate) { - var hint; - if(client) { - hint = c.session.certificateRequest; - } else { - hint = c.session.extensions.server_name.serverNameList; +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.BlobPart +) + +// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag +webidl.converters.FilePropertyBag = webidl.dictionaryConverter([ + { + key: 'lastModified', + converter: webidl.converters['long long'], + get defaultValue () { + return Date.now() } - cert = c.getCertificate(c, hint); - } + }, + { + key: 'type', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'endings', + converter: (value) => { + value = webidl.converters.DOMString(value) + value = value.toLowerCase() - // buffer to hold certificate list - var certList = forge.util.createBuffer(); - if(cert !== null) { - try { - // normalize cert to a chain of certificates - if(!forge.util.isArray(cert)) { - cert = [cert]; + if (value !== 'native') { + value = 'transparent' } - var asn1 = null; - for(var i = 0; i < cert.length; ++i) { - var msg = forge.pem.decode(cert[i])[0]; - if(msg.type !== 'CERTIFICATE' && - msg.type !== 'X509 CERTIFICATE' && - msg.type !== 'TRUSTED CERTIFICATE') { - var error = new Error('Could not convert certificate from PEM; PEM ' + - 'header type is not "CERTIFICATE", "X509 CERTIFICATE", or ' + - '"TRUSTED CERTIFICATE".'); - error.headerType = msg.type; - throw error; - } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert certificate from PEM; PEM is encrypted.'); - } - var der = forge.util.createBuffer(msg.body); - if(asn1 === null) { - asn1 = forge.asn1.fromDer(der.bytes(), false); - } + return value + }, + defaultValue: 'transparent' + } +]) - // certificate entry is itself a vector with 3 length bytes - var certBuffer = forge.util.createBuffer(); - writeVector(certBuffer, 3, der); +/** + * @see https://www.w3.org/TR/FileAPI/#process-blob-parts + * @param {(NodeJS.TypedArray|Blob|string)[]} parts + * @param {{ type: string, endings: string }} options + */ +function processBlobParts (parts, options) { + // 1. Let bytes be an empty sequence of bytes. + /** @type {NodeJS.TypedArray[]} */ + const bytes = [] - // add cert vector to cert list vector - certList.putBuffer(certBuffer); + // 2. For each element in parts: + for (const element of parts) { + // 1. If element is a USVString, run the following substeps: + if (typeof element === 'string') { + // 1. Let s be element. + let s = element + + // 2. If the endings member of options is "native", set s + // to the result of converting line endings to native + // of element. + if (options.endings === 'native') { + s = convertLineEndingsNative(s) } - // save certificate - cert = forge.pki.certificateFromAsn1(asn1); - if(client) { - c.session.clientCertificate = cert; + // 3. Append the result of UTF-8 encoding s to bytes. + bytes.push(encoder.encode(s)) + } else if ( + types.isAnyArrayBuffer(element) || + types.isTypedArray(element) + ) { + // 2. If element is a BufferSource, get a copy of the + // bytes held by the buffer source, and append those + // bytes to bytes. + if (!element.buffer) { // ArrayBuffer + bytes.push(new Uint8Array(element)) } else { - c.session.serverCertificate = cert; + bytes.push( + new Uint8Array(element.buffer, element.byteOffset, element.byteLength) + ) } - } catch(ex) { - return c.error(c, { - message: 'Could not send certificate list.', - cause: ex, - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.bad_certificate - } - }); + } else if (isBlobLike(element)) { + // 3. If element is a Blob, append the bytes it represents + // to bytes. + bytes.push(element) } } - // determine length of the handshake message - var length = 3 + certList.length(); // cert list vector - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.certificate); - rval.putInt24(length); - writeVector(rval, 3, certList); - return rval; -}; - -/** - * Creates a ClientKeyExchange message. - * - * When this message will be sent: - * This message is always sent by the client. It will immediately follow the - * client certificate message, if it is sent. Otherwise it will be the first - * message sent by the client after it receives the server hello done - * message. - * - * Meaning of this message: - * With this message, the premaster secret is set, either though direct - * transmission of the RSA-encrypted secret, or by the transmission of - * Diffie-Hellman parameters which will allow each side to agree upon the - * same premaster secret. When the key exchange method is DH_RSA or DH_DSS, - * client certification has been requested, and the client was able to - * respond with a certificate which contained a Diffie-Hellman public key - * whose parameters (group and generator) matched those specified by the - * server in its certificate, this message will not contain any data. - * - * Meaning of this message: - * If RSA is being used for key agreement and authentication, the client - * generates a 48-byte premaster secret, encrypts it using the public key - * from the server's certificate or the temporary RSA key provided in a - * server key exchange message, and sends the result in an encrypted - * premaster secret message. This structure is a variant of the client - * key exchange message, not a message in itself. - * - * struct { - * select(KeyExchangeAlgorithm) { - * case rsa: EncryptedPreMasterSecret; - * case diffie_hellman: ClientDiffieHellmanPublic; - * } exchange_keys; - * } ClientKeyExchange; - * - * struct { - * ProtocolVersion client_version; - * opaque random[46]; - * } PreMasterSecret; - * - * struct { - * public-key-encrypted PreMasterSecret pre_master_secret; - * } EncryptedPreMasterSecret; - * - * A public-key-encrypted element is encoded as a vector <0..2^16-1>. - * - * @param c the connection. - * - * @return the ClientKeyExchange byte buffer. - */ -tls.createClientKeyExchange = function(c) { - // create buffer to encrypt - var b = forge.util.createBuffer(); - - // add highest client-supported protocol to help server avoid version - // rollback attacks - b.putByte(c.session.clientHelloVersion.major); - b.putByte(c.session.clientHelloVersion.minor); - - // generate and add 46 random bytes - b.putBytes(forge.random.getBytes(46)); - - // save pre-master secret - var sp = c.session.sp; - sp.pre_master_secret = b.getBytes(); - - // RSA-encrypt the pre-master secret - var key = c.session.serverCertificate.publicKey; - b = key.encrypt(sp.pre_master_secret); - - /* Note: The encrypted pre-master secret will be stored in a - public-key-encrypted opaque vector that has the length prefixed using - 2 bytes, so include those 2 bytes in the handshake message length. This - is done as a minor optimization instead of calling writeVector(). */ - - // determine length of the handshake message - var length = b.length + 2; - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.client_key_exchange); - rval.putInt24(length); - // add vector length bytes - rval.putInt16(b.length); - rval.putBytes(b); - return rval; -}; + // 3. Return bytes. + return bytes +} /** - * Creates a ServerKeyExchange message. - * - * @param c the connection. - * - * @return the ServerKeyExchange byte buffer. + * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native + * @param {string} s */ -tls.createServerKeyExchange = function(c) { - // this implementation only supports RSA, no Diffie-Hellman support, - // so this record is empty - - // determine length of the handshake message - var length = 0; +function convertLineEndingsNative (s) { + // 1. Let native line ending be be the code point U+000A LF. + let nativeLineEnding = '\n' - // build record fragment - var rval = forge.util.createBuffer(); - if(length > 0) { - rval.putByte(tls.HandshakeType.server_key_exchange); - rval.putInt24(length); + // 2. If the underlying platform’s conventions are to + // represent newlines as a carriage return and line feed + // sequence, set native line ending to the code point + // U+000D CR followed by the code point U+000A LF. + if (process.platform === 'win32') { + nativeLineEnding = '\r\n' } - return rval; -}; -/** - * Gets the signed data used to verify a client-side certificate. See - * tls.createCertificateVerify() for details. - * - * @param c the connection. - * @param callback the callback to call once the signed data is ready. - */ -tls.getClientSignature = function(c, callback) { - // generate data to RSA encrypt - var b = forge.util.createBuffer(); - b.putBuffer(c.session.md5.digest()); - b.putBuffer(c.session.sha1.digest()); - b = b.getBytes(); - - // create default signing function as necessary - c.getSignature = c.getSignature || function(c, b, callback) { - // do rsa encryption, call callback - var privateKey = null; - if(c.getPrivateKey) { - try { - privateKey = c.getPrivateKey(c, c.session.clientCertificate); - privateKey = forge.pki.privateKeyFromPem(privateKey); - } catch(ex) { - c.error(c, { - message: 'Could not get private key.', - cause: ex, - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); - } - } - if(privateKey === null) { - c.error(c, { - message: 'No private key set.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.internal_error - } - }); - } else { - b = privateKey.sign(b, null); - } - callback(c, b); - }; + return s.replace(/\r?\n/g, nativeLineEnding) +} - // get client signature - c.getSignature(c, b, callback); -}; +// If this function is moved to ./util.js, some tools (such as +// rollup) will warn about circular dependencies. See: +// https://github.com/nodejs/undici/issues/1629 +function isFileLike (object) { + return ( + (NativeFile && object instanceof NativeFile) || + object instanceof File || ( + object && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + object[Symbol.toStringTag] === 'File' + ) + ) +} -/** - * Creates a CertificateVerify message. - * - * Meaning of this message: - * This structure conveys the client's Diffie-Hellman public value - * (Yc) if it was not already included in the client's certificate. - * The encoding used for Yc is determined by the enumerated - * PublicValueEncoding. This structure is a variant of the client - * key exchange message, not a message in itself. - * - * When this message will be sent: - * This message is used to provide explicit verification of a client - * certificate. This message is only sent following a client - * certificate that has signing capability (i.e. all certificates - * except those containing fixed Diffie-Hellman parameters). When - * sent, it will immediately follow the client key exchange message. - * - * struct { - * Signature signature; - * } CertificateVerify; - * - * CertificateVerify.signature.md5_hash - * MD5(handshake_messages); - * - * Certificate.signature.sha_hash - * SHA(handshake_messages); - * - * Here handshake_messages refers to all handshake messages sent or - * received starting at client hello up to but not including this - * message, including the type and length fields of the handshake - * messages. - * - * select(SignatureAlgorithm) { - * case anonymous: struct { }; - * case rsa: - * digitally-signed struct { - * opaque md5_hash[16]; - * opaque sha_hash[20]; - * }; - * case dsa: - * digitally-signed struct { - * opaque sha_hash[20]; - * }; - * } Signature; - * - * In digital signing, one-way hash functions are used as input for a - * signing algorithm. A digitally-signed element is encoded as an opaque - * vector <0..2^16-1>, where the length is specified by the signing - * algorithm and key. - * - * In RSA signing, a 36-byte structure of two hashes (one SHA and one - * MD5) is signed (encrypted with the private key). It is encoded with - * PKCS #1 block type 0 or type 1 as described in [PKCS1]. - * - * In DSS, the 20 bytes of the SHA hash are run directly through the - * Digital Signing Algorithm with no additional hashing. - * - * @param c the connection. - * @param signature the signature to include in the message. - * - * @return the CertificateVerify byte buffer. - */ -tls.createCertificateVerify = function(c, signature) { - /* Note: The signature will be stored in a "digitally-signed" opaque - vector that has the length prefixed using 2 bytes, so include those - 2 bytes in the handshake message length. This is done as a minor - optimization instead of calling writeVector(). */ - - // determine length of the handshake message - var length = signature.length + 2; - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.certificate_verify); - rval.putInt24(length); - // add vector length bytes - rval.putInt16(signature.length); - rval.putBytes(signature); - return rval; -}; +module.exports = { File, FileLike, isFileLike } -/** - * Creates a CertificateRequest message. - * - * @param c the connection. - * - * @return the CertificateRequest byte buffer. - */ -tls.createCertificateRequest = function(c) { - // TODO: support other certificate types - var certTypes = forge.util.createBuffer(); - - // common RSA certificate type - certTypes.putByte(0x01); - - // add distinguished names from CA store - var cAs = forge.util.createBuffer(); - for(var key in c.caStore.certs) { - var cert = c.caStore.certs[key]; - var dn = forge.pki.distinguishedNameToAsn1(cert.subject); - var byteBuffer = forge.asn1.toDer(dn); - cAs.putInt16(byteBuffer.length()); - cAs.putBuffer(byteBuffer); - } - - // TODO: TLS 1.2+ has a different format - - // determine length of the handshake message - var length = - 1 + certTypes.length() + - 2 + cAs.length(); - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.certificate_request); - rval.putInt24(length); - writeVector(rval, 1, certTypes); - writeVector(rval, 2, cAs); - return rval; -}; -/** - * Creates a ServerHelloDone message. - * - * @param c the connection. - * - * @return the ServerHelloDone byte buffer. - */ -tls.createServerHelloDone = function(c) { - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.server_hello_done); - rval.putInt24(0); - return rval; -}; +/***/ }), -/** - * Creates a ChangeCipherSpec message. - * - * The change cipher spec protocol exists to signal transitions in - * ciphering strategies. The protocol consists of a single message, - * which is encrypted and compressed under the current (not the pending) - * connection state. The message consists of a single byte of value 1. - * - * struct { - * enum { change_cipher_spec(1), (255) } type; - * } ChangeCipherSpec; - * - * @return the ChangeCipherSpec byte buffer. - */ -tls.createChangeCipherSpec = function() { - var rval = forge.util.createBuffer(); - rval.putByte(0x01); - return rval; -}; +/***/ 2015: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Creates a Finished message. - * - * struct { - * opaque verify_data[12]; - * } Finished; - * - * verify_data - * PRF(master_secret, finished_label, MD5(handshake_messages) + - * SHA-1(handshake_messages)) [0..11]; - * - * finished_label - * For Finished messages sent by the client, the string "client - * finished". For Finished messages sent by the server, the - * string "server finished". - * - * handshake_messages - * All of the data from all handshake messages up to but not - * including this message. This is only data visible at the - * handshake layer and does not include record layer headers. - * This is the concatenation of all the Handshake structures as - * defined in 7.4 exchanged thus far. - * - * @param c the connection. - * - * @return the Finished byte buffer. - */ -tls.createFinished = function(c) { - // generate verify_data - var b = forge.util.createBuffer(); - b.putBuffer(c.session.md5.digest()); - b.putBuffer(c.session.sha1.digest()); - - // TODO: determine prf function and verify length for TLS 1.2 - var client = (c.entity === tls.ConnectionEnd.client); - var sp = c.session.sp; - var vdl = 12; - var prf = prf_TLS1; - var label = client ? 'client finished' : 'server finished'; - b = prf(sp.master_secret, label, b.getBytes(), vdl); - - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(tls.HandshakeType.finished); - rval.putInt24(b.length()); - rval.putBuffer(b); - return rval; -}; +"use strict"; -/** - * Creates a HeartbeatMessage (See RFC 6520). - * - * struct { - * HeartbeatMessageType type; - * uint16 payload_length; - * opaque payload[HeartbeatMessage.payload_length]; - * opaque padding[padding_length]; - * } HeartbeatMessage; - * - * The total length of a HeartbeatMessage MUST NOT exceed 2^14 or - * max_fragment_length when negotiated as defined in [RFC6066]. - * - * type: The message type, either heartbeat_request or heartbeat_response. - * - * payload_length: The length of the payload. - * - * payload: The payload consists of arbitrary content. - * - * padding: The padding is random content that MUST be ignored by the - * receiver. The length of a HeartbeatMessage is TLSPlaintext.length - * for TLS and DTLSPlaintext.length for DTLS. Furthermore, the - * length of the type field is 1 byte, and the length of the - * payload_length is 2. Therefore, the padding_length is - * TLSPlaintext.length - payload_length - 3 for TLS and - * DTLSPlaintext.length - payload_length - 3 for DTLS. The - * padding_length MUST be at least 16. - * - * The sender of a HeartbeatMessage MUST use a random padding of at - * least 16 bytes. The padding of a received HeartbeatMessage message - * MUST be ignored. - * - * If the payload_length of a received HeartbeatMessage is too large, - * the received HeartbeatMessage MUST be discarded silently. - * - * @param c the connection. - * @param type the tls.HeartbeatMessageType. - * @param payload the heartbeat data to send as the payload. - * @param [payloadLength] the payload length to use, defaults to the - * actual payload length. - * - * @return the HeartbeatRequest byte buffer. - */ -tls.createHeartbeat = function(type, payload, payloadLength) { - if(typeof payloadLength === 'undefined') { - payloadLength = payload.length; - } - // build record fragment - var rval = forge.util.createBuffer(); - rval.putByte(type); // heartbeat message type - rval.putInt16(payloadLength); // payload length - rval.putBytes(payload); // payload - // padding - var plaintextLength = rval.length(); - var paddingLength = Math.max(16, plaintextLength - payloadLength - 3); - rval.putBytes(forge.random.getBytes(paddingLength)); - return rval; -}; -/** - * Fragments, compresses, encrypts, and queues a record for delivery. - * - * @param c the connection. - * @param record the record to queue. - */ -tls.queue = function(c, record) { - // error during record creation - if(!record) { - return; - } +const { isBlobLike, toUSVString, makeIterator } = __nccwpck_require__(2538) +const { kState } = __nccwpck_require__(5861) +const { File: UndiciFile, FileLike, isFileLike } = __nccwpck_require__(8511) +const { webidl } = __nccwpck_require__(1744) +const { Blob, File: NativeFile } = __nccwpck_require__(4300) - if(record.fragment.length() === 0) { - if(record.type === tls.ContentType.handshake || - record.type === tls.ContentType.alert || - record.type === tls.ContentType.change_cipher_spec) { - // Empty handshake, alert of change cipher spec messages are not allowed per the TLS specification and should not be sent. - return; +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile + +// https://xhr.spec.whatwg.org/#formdata +class FormData { + constructor (form) { + if (form !== undefined) { + throw webidl.errors.conversionFailed({ + prefix: 'FormData constructor', + argument: 'Argument 1', + types: ['undefined'] + }) } - } - // if the record is a handshake record, update handshake hashes - if(record.type === tls.ContentType.handshake) { - var bytes = record.fragment.bytes(); - c.session.md5.update(bytes); - c.session.sha1.update(bytes); - bytes = null; + this[kState] = [] } - // handle record fragmentation - var records; - if(record.fragment.length() <= tls.MaxFragment) { - records = [record]; - } else { - // fragment data as long as it is too long - records = []; - var data = record.fragment.bytes(); - while(data.length > tls.MaxFragment) { - records.push(tls.createRecord(c, { - type: record.type, - data: forge.util.createBuffer(data.slice(0, tls.MaxFragment)) - })); - data = data.slice(tls.MaxFragment); - } - // add last record - if(data.length > 0) { - records.push(tls.createRecord(c, { - type: record.type, - data: forge.util.createBuffer(data) - })); - } - } - - // compress and encrypt all fragmented records - for(var i = 0; i < records.length && !c.fail; ++i) { - // update the record using current write state - var rec = records[i]; - var s = c.state.current.write; - if(s.update(c, rec)) { - // store record - c.records.push(rec); + append (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' }) + + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'" + ) } - } -}; -/** - * Flushes all queued records to the output buffer and calls the - * tlsDataReady() handler on the given connection. - * - * @param c the connection. - * - * @return true on success, false on failure. - */ -tls.flush = function(c) { - for(var i = 0; i < c.records.length; ++i) { - var record = c.records[i]; - - // add record header and fragment - c.tlsData.putByte(record.type); - c.tlsData.putByte(record.version.major); - c.tlsData.putByte(record.version.minor); - c.tlsData.putInt16(record.fragment.length()); - c.tlsData.putBuffer(c.records[i].fragment); - } - c.records = []; - return c.tlsDataReady(c); -}; + // 1. Let value be value if given; otherwise blobValue. -/** - * Maps a pki.certificateError to a tls.Alert.Description. - * - * @param error the error to map. - * - * @return the alert description. - */ -var _certErrorToAlertDesc = function(error) { - switch(error) { - case true: - return true; - case forge.pki.certificateError.bad_certificate: - return tls.Alert.Description.bad_certificate; - case forge.pki.certificateError.unsupported_certificate: - return tls.Alert.Description.unsupported_certificate; - case forge.pki.certificateError.certificate_revoked: - return tls.Alert.Description.certificate_revoked; - case forge.pki.certificateError.certificate_expired: - return tls.Alert.Description.certificate_expired; - case forge.pki.certificateError.certificate_unknown: - return tls.Alert.Description.certificate_unknown; - case forge.pki.certificateError.unknown_ca: - return tls.Alert.Description.unknown_ca; - default: - return tls.Alert.Description.bad_certificate; + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? webidl.converters.USVString(filename) + : undefined + + // 2. Let entry be the result of creating an entry with + // name, value, and filename if given. + const entry = makeEntry(name, value, filename) + + // 3. Append entry to this’s entry list. + this[kState].push(entry) } -}; -/** - * Maps a tls.Alert.Description to a pki.certificateError. - * - * @param desc the alert description. - * - * @return the certificate error. - */ -var _alertDescToCertError = function(desc) { - switch(desc) { - case true: - return true; - case tls.Alert.Description.bad_certificate: - return forge.pki.certificateError.bad_certificate; - case tls.Alert.Description.unsupported_certificate: - return forge.pki.certificateError.unsupported_certificate; - case tls.Alert.Description.certificate_revoked: - return forge.pki.certificateError.certificate_revoked; - case tls.Alert.Description.certificate_expired: - return forge.pki.certificateError.certificate_expired; - case tls.Alert.Description.certificate_unknown: - return forge.pki.certificateError.certificate_unknown; - case tls.Alert.Description.unknown_ca: - return forge.pki.certificateError.unknown_ca; - default: - return forge.pki.certificateError.bad_certificate; + delete (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' }) + + name = webidl.converters.USVString(name) + + // The delete(name) method steps are to remove all entries whose name + // is name from this’s entry list. + this[kState] = this[kState].filter(entry => entry.name !== name) } -}; -/** - * Verifies a certificate chain against the given connection's - * Certificate Authority store. - * - * @param c the TLS connection. - * @param chain the certificate chain to verify, with the root or highest - * authority at the end. - * - * @return true if successful, false if not. - */ -tls.verifyCertificateChain = function(c, chain) { - try { - // Make a copy of c.verifyOptions so that we can modify options.verify - // without modifying c.verifyOptions. - var options = {}; - for (var key in c.verifyOptions) { - options[key] = c.verifyOptions[key]; - } - - options.verify = function(vfd, depth, chain) { - // convert pki.certificateError to tls alert description - var desc = _certErrorToAlertDesc(vfd); - - // call application callback - var ret = c.verify(c, vfd, depth, chain); - if(ret !== true) { - if(typeof ret === 'object' && !forge.util.isArray(ret)) { - // throw custom error - var error = new Error('The application rejected the certificate.'); - error.send = true; - error.alert = { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.bad_certificate - }; - if(ret.message) { - error.message = ret.message; - } - if(ret.alert) { - error.alert.description = ret.alert; - } - throw error; - } + get (name) { + webidl.brandCheck(this, FormData) - // convert tls alert description to pki.certificateError - if(ret !== vfd) { - ret = _alertDescToCertError(ret); - } - } + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' }) - return ret; - }; + name = webidl.converters.USVString(name) - // verify chain - forge.pki.verifyCertificateChain(c.caStore, chain, options); - } catch(ex) { - // build tls error if not already customized - var err = ex; - if(typeof err !== 'object' || forge.util.isArray(err)) { - err = { - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: _certErrorToAlertDesc(ex) - } - }; - } - if(!('send' in err)) { - err.send = true; - } - if(!('alert' in err)) { - err.alert = { - level: tls.Alert.Level.fatal, - description: _certErrorToAlertDesc(err.error) - }; + // 1. If there is no entry whose name is name in this’s entry list, + // then return null. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx === -1) { + return null } - // send error - c.error(c, err); + // 2. Return the value of the first entry whose name is name from + // this’s entry list. + return this[kState][idx].value } - return !c.fail; -}; + getAll (name) { + webidl.brandCheck(this, FormData) -/** - * Creates a new TLS session cache. - * - * @param cache optional map of session ID to cached session. - * @param capacity the maximum size for the cache (default: 100). - * - * @return the new TLS session cache. - */ -tls.createSessionCache = function(cache, capacity) { - var rval = null; + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' }) - // assume input is already a session cache object - if(cache && cache.getSession && cache.setSession && cache.order) { - rval = cache; - } else { - // create cache - rval = {}; - rval.cache = cache || {}; - rval.capacity = Math.max(capacity || 100, 1); - rval.order = []; - - // store order for sessions, delete session overflow - for(var key in cache) { - if(rval.order.length <= capacity) { - rval.order.push(key); - } else { - delete cache[key]; - } - } + name = webidl.converters.USVString(name) - // get a session from a session ID (or get any session) - rval.getSession = function(sessionId) { - var session = null; - var key = null; + // 1. If there is no entry whose name is name in this’s entry list, + // then return the empty list. + // 2. Return the values of all entries whose name is name, in order, + // from this’s entry list. + return this[kState] + .filter((entry) => entry.name === name) + .map((entry) => entry.value) + } - // if session ID provided, use it - if(sessionId) { - key = forge.util.bytesToHex(sessionId); - } else if(rval.order.length > 0) { - // get first session from cache - key = rval.order[0]; - } + has (name) { + webidl.brandCheck(this, FormData) - if(key !== null && key in rval.cache) { - // get cached session and remove from cache - session = rval.cache[key]; - delete rval.cache[key]; - for(var i in rval.order) { - if(rval.order[i] === key) { - rval.order.splice(i, 1); - break; - } - } - } + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' }) - return session; - }; + name = webidl.converters.USVString(name) - // set a session in the cache - rval.setSession = function(sessionId, session) { - // remove session from cache if at capacity - if(rval.order.length === rval.capacity) { - var key = rval.order.shift(); - delete rval.cache[key]; - } - // add session to cache - var key = forge.util.bytesToHex(sessionId); - rval.order.push(key); - rval.cache[key] = session; - }; + // The has(name) method steps are to return true if there is an entry + // whose name is name in this’s entry list; otherwise false. + return this[kState].findIndex((entry) => entry.name === name) !== -1 } - return rval; -}; + set (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) -/** - * Creates a new TLS connection. - * - * See public createConnection() docs for more details. - * - * @param options the options for this connection. - * - * @return the new TLS connection. - */ -tls.createConnection = function(options) { - var caStore = null; - if(options.caStore) { - // if CA store is an array, convert it to a CA store object - if(forge.util.isArray(options.caStore)) { - caStore = forge.pki.createCaStore(options.caStore); + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' }) + + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'" + ) + } + + // The set(name, value) and set(name, blobValue, filename) method steps + // are: + + // 1. Let value be value if given; otherwise blobValue. + + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? toUSVString(filename) + : undefined + + // 2. Let entry be the result of creating an entry with name, value, and + // filename if given. + const entry = makeEntry(name, value, filename) + + // 3. If there are entries in this’s entry list whose name is name, then + // replace the first such entry with entry and remove the others. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx !== -1) { + this[kState] = [ + ...this[kState].slice(0, idx), + entry, + ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name) + ] } else { - caStore = options.caStore; + // 4. Otherwise, append entry to this’s entry list. + this[kState].push(entry) } - } else { - // create empty CA store - caStore = forge.pki.createCaStore(); - } - - // setup default cipher suites - var cipherSuites = options.cipherSuites || null; - if(cipherSuites === null) { - cipherSuites = []; - for(var key in tls.CipherSuites) { - cipherSuites.push(tls.CipherSuites[key]); - } - } - - // set default entity - var entity = (options.server || false) ? - tls.ConnectionEnd.server : tls.ConnectionEnd.client; - - // create session cache if requested - var sessionCache = options.sessionCache ? - tls.createSessionCache(options.sessionCache) : null; - - // create TLS connection - var c = { - version: {major: tls.Version.major, minor: tls.Version.minor}, - entity: entity, - sessionId: options.sessionId, - caStore: caStore, - sessionCache: sessionCache, - cipherSuites: cipherSuites, - connected: options.connected, - virtualHost: options.virtualHost || null, - verifyClient: options.verifyClient || false, - verify: options.verify || function(cn, vfd, dpth, cts) {return vfd;}, - verifyOptions: options.verifyOptions || {}, - getCertificate: options.getCertificate || null, - getPrivateKey: options.getPrivateKey || null, - getSignature: options.getSignature || null, - input: forge.util.createBuffer(), - tlsData: forge.util.createBuffer(), - data: forge.util.createBuffer(), - tlsDataReady: options.tlsDataReady, - dataReady: options.dataReady, - heartbeatReceived: options.heartbeatReceived, - closed: options.closed, - error: function(c, ex) { - // set origin if not set - ex.origin = ex.origin || - ((c.entity === tls.ConnectionEnd.client) ? 'client' : 'server'); - - // send TLS alert - if(ex.send) { - tls.queue(c, tls.createAlert(c, ex.alert)); - tls.flush(c); - } + } - // error is fatal by default - var fatal = (ex.fatal !== false); - if(fatal) { - // set fail flag - c.fail = true; - } + entries () { + webidl.brandCheck(this, FormData) - // call error handler first - options.error(c, ex); + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key+value' + ) + } - if(fatal) { - // fatal error, close connection, do not clear fail - c.close(false); - } - }, - deflate: options.deflate || null, - inflate: options.inflate || null - }; + keys () { + webidl.brandCheck(this, FormData) - /** - * Resets a closed TLS connection for reuse. Called in c.close(). - * - * @param clearFail true to clear the fail flag (default: true). - */ - c.reset = function(clearFail) { - c.version = {major: tls.Version.major, minor: tls.Version.minor}; - c.record = null; - c.session = null; - c.peerCertificate = null; - c.state = { - pending: null, - current: null - }; - c.expect = (c.entity === tls.ConnectionEnd.client) ? SHE : CHE; - c.fragmented = null; - c.records = []; - c.open = false; - c.handshakes = 0; - c.handshaking = false; - c.isConnected = false; - c.fail = !(clearFail || typeof(clearFail) === 'undefined'); - c.input.clear(); - c.tlsData.clear(); - c.data.clear(); - c.state.current = tls.createConnectionState(c); - }; + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key' + ) + } - // do initial reset of connection - c.reset(); + values () { + webidl.brandCheck(this, FormData) - /** - * Updates the current TLS engine state based on the given record. - * - * @param c the TLS connection. - * @param record the TLS record to act on. - */ - var _update = function(c, record) { - // get record handler (align type in table by subtracting lowest) - var aligned = record.type - tls.ContentType.change_cipher_spec; - var handlers = ctTable[c.entity][c.expect]; - if(aligned in handlers) { - handlers[aligned](c, record); - } else { - // unexpected record - tls.handleUnexpected(c, record); - } - }; + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'value' + ) + } /** - * Reads the record header and initializes the next record on the given - * connection. - * - * @param c the TLS connection with the next record. - * - * @return 0 if the input data could be processed, otherwise the - * number of bytes required for data to be processed. + * @param {(value: string, key: string, self: FormData) => void} callbackFn + * @param {unknown} thisArg */ - var _readRecordHeader = function(c) { - var rval = 0; + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, FormData) - // get input buffer and its length - var b = c.input; - var len = b.length(); + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' }) - // need at least 5 bytes to initialize a record - if(len < 5) { - rval = 5 - len; - } else { - // enough bytes for header - // initialize record - c.record = { - type: b.getByte(), - version: { - major: b.getByte(), - minor: b.getByte() - }, - length: b.getInt16(), - fragment: forge.util.createBuffer(), - ready: false - }; + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'." + ) + } - // check record version - var compatibleVersion = (c.record.version.major === c.version.major); - if(compatibleVersion && c.session && c.session.version) { - // session version already set, require same minor version - compatibleVersion = (c.record.version.minor === c.version.minor); - } - if(!compatibleVersion) { - c.error(c, { - message: 'Incompatible TLS version.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: tls.Alert.Description.protocol_version - } - }); - } + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) } + } +} - return rval; - }; +FormData.prototype[Symbol.iterator] = FormData.prototype.entries - /** - * Reads the next record's contents and appends its message to any - * previously fragmented message. - * - * @param c the TLS connection with the next record. - * - * @return 0 if the input data could be processed, otherwise the - * number of bytes required for data to be processed. - */ - var _readRecord = function(c) { - var rval = 0; - - // ensure there is enough input data to get the entire record - var b = c.input; - var len = b.length(); - if(len < c.record.length) { - // not enough data yet, return how much is required - rval = c.record.length - len; - } else { - // there is enough data to parse the pending record - // fill record fragment and compact input buffer - c.record.fragment.putBytes(b.getBytes(c.record.length)); - b.compact(); - - // update record using current read state - var s = c.state.current.read; - if(s.update(c, c.record)) { - // see if there is a previously fragmented message that the - // new record's message fragment should be appended to - if(c.fragmented !== null) { - // if the record type matches a previously fragmented - // record, append the record fragment to it - if(c.fragmented.type === c.record.type) { - // concatenate record fragments - c.fragmented.fragment.putBuffer(c.record.fragment); - c.record = c.fragmented; - } else { - // error, invalid fragmented record - c.error(c, { - message: 'Invalid fragmented record.', - send: true, - alert: { - level: tls.Alert.Level.fatal, - description: - tls.Alert.Description.unexpected_message - } - }); - } - } +Object.defineProperties(FormData.prototype, { + [Symbol.toStringTag]: { + value: 'FormData', + configurable: true + } +}) + +/** + * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry + * @param {string} name + * @param {string|Blob} value + * @param {?string} filename + * @returns + */ +function makeEntry (name, value, filename) { + // 1. Set name to the result of converting name into a scalar value string. + // "To convert a string into a scalar value string, replace any surrogates + // with U+FFFD." + // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end + name = Buffer.from(name).toString('utf8') - // record is now ready - c.record.ready = true; + // 2. If value is a string, then set value to the result of converting + // value into a scalar value string. + if (typeof value === 'string') { + value = Buffer.from(value).toString('utf8') + } else { + // 3. Otherwise: + + // 1. If value is not a File object, then set value to a new File object, + // representing the same bytes, whose name attribute value is "blob" + if (!isFileLike(value)) { + value = value instanceof Blob + ? new File([value], 'blob', { type: value.type }) + : new FileLike(value, 'blob', { type: value.type }) + } + + // 2. If filename is given, then set value to a new File object, + // representing the same bytes, whose name attribute is filename. + if (filename !== undefined) { + /** @type {FilePropertyBag} */ + const options = { + type: value.type, + lastModified: value.lastModified } + + value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile + ? new File([value], filename, options) + : new FileLike(value, filename, options) } + } - return rval; - }; + // 4. Return an entry whose name is name and whose value is value. + return { name, value } +} - /** - * Performs a handshake using the TLS Handshake Protocol, as a client. - * - * This method should only be called if the connection is in client mode. - * - * @param sessionId the session ID to use, null to start a new one. - */ - c.handshake = function(sessionId) { - // error to call this in non-client mode - if(c.entity !== tls.ConnectionEnd.client) { - // not fatal error - c.error(c, { - message: 'Cannot initiate handshake as a server.', - fatal: false - }); - } else if(c.handshaking) { - // handshake is already in progress, fail but not fatal error - c.error(c, { - message: 'Handshake already in progress.', - fatal: false - }); - } else { - // clear fail flag on reuse - if(c.fail && !c.open && c.handshakes === 0) { - c.fail = false; - } +module.exports = { FormData } - // now handshaking - c.handshaking = true; - // default to blank (new session) - sessionId = sessionId || ''; +/***/ }), - // if a session ID was specified, try to find it in the cache - var session = null; - if(sessionId.length > 0) { - if(c.sessionCache) { - session = c.sessionCache.getSession(sessionId); - } +/***/ 1246: +/***/ ((module) => { - // matching session not found in cache, clear session ID - if(session === null) { - sessionId = ''; - } - } +"use strict"; - // no session given, grab a session from the cache, if available - if(sessionId.length === 0 && c.sessionCache) { - session = c.sessionCache.getSession(); - if(session !== null) { - sessionId = session.id; - } - } - // set up session - c.session = { - id: sessionId, - version: null, - cipherSuite: null, - compressionMethod: null, - serverCertificate: null, - certificateRequest: null, - clientCertificate: null, - sp: {}, - md5: forge.md.md5.create(), - sha1: forge.md.sha1.create() - }; +// In case of breaking changes, increase the version +// number to avoid conflicts. +const globalOrigin = Symbol.for('undici.globalOrigin.1') - // use existing session information - if(session) { - // only update version on connection, session version not yet set - c.version = session.version; - c.session.sp = session.sp; - } +function getGlobalOrigin () { + return globalThis[globalOrigin] +} - // generate new client random - c.session.sp.client_random = tls.createRandom().getBytes(); +function setGlobalOrigin (newOrigin) { + if (newOrigin === undefined) { + Object.defineProperty(globalThis, globalOrigin, { + value: undefined, + writable: true, + enumerable: false, + configurable: false + }) - // connection now open - c.open = true; + return + } - // send hello - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.handshake, - data: tls.createClientHello(c) - })); - tls.flush(c); - } - }; + const parsedURL = new URL(newOrigin) - /** - * Called when TLS protocol data has been received from somewhere and should - * be processed by the TLS engine. - * - * @param data the TLS protocol data, as a string, to process. - * - * @return 0 if the data could be processed, otherwise the number of bytes - * required for data to be processed. - */ - c.process = function(data) { - var rval = 0; + if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') { + throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`) + } - // buffer input data - if(data) { - c.input.putBytes(data); - } + Object.defineProperty(globalThis, globalOrigin, { + value: parsedURL, + writable: true, + enumerable: false, + configurable: false + }) +} - // process next record if no failure, process will be called after - // each record is handled (since handling can be asynchronous) - if(!c.fail) { - // reset record if ready and now empty - if(c.record !== null && - c.record.ready && c.record.fragment.isEmpty()) { - c.record = null; - } +module.exports = { + getGlobalOrigin, + setGlobalOrigin +} - // if there is no pending record, try to read record header - if(c.record === null) { - rval = _readRecordHeader(c); - } - // read the next record (if record not yet ready) - if(!c.fail && c.record !== null && !c.record.ready) { - rval = _readRecord(c); - } +/***/ }), - // record ready to be handled, update engine state - if(!c.fail && c.record !== null && c.record.ready) { - _update(c, c.record); - } - } +/***/ 554: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return rval; - }; +"use strict"; +// https://github.com/Ethan-Arrowood/undici-fetch - /** - * Requests that application data be packaged into a TLS record. The - * tlsDataReady handler will be called when the TLS record(s) have been - * prepared. - * - * @param data the application data, as a raw 'binary' encoded string, to - * be sent; to send utf-16/utf-8 string data, use the return value - * of util.encodeUtf8(str). - * - * @return true on success, false on failure. - */ - c.prepare = function(data) { - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.application_data, - data: forge.util.createBuffer(data) - })); - return tls.flush(c); - }; - /** - * Requests that a heartbeat request be packaged into a TLS record for - * transmission. The tlsDataReady handler will be called when TLS record(s) - * have been prepared. - * - * When a heartbeat response has been received, the heartbeatReceived - * handler will be called with the matching payload. This handler can - * be used to clear a retransmission timer, etc. - * - * @param payload the heartbeat data to send as the payload in the message. - * @param [payloadLength] the payload length to use, defaults to the - * actual payload length. - * - * @return true on success, false on failure. - */ - c.prepareHeartbeatRequest = function(payload, payloadLength) { - if(payload instanceof forge.util.ByteBuffer) { - payload = payload.bytes(); - } - if(typeof payloadLength === 'undefined') { - payloadLength = payload.length; - } - c.expectedHeartbeatPayload = payload; - tls.queue(c, tls.createRecord(c, { - type: tls.ContentType.heartbeat, - data: tls.createHeartbeat( - tls.HeartbeatMessageType.heartbeat_request, payload, payloadLength) - })); - return tls.flush(c); - }; - /** - * Closes the connection (sends a close_notify alert). - * - * @param clearFail true to clear the fail flag (default: true). - */ - c.close = function(clearFail) { - // save session if connection didn't fail - if(!c.fail && c.sessionCache && c.session) { - // only need to preserve session ID, version, and security params - var session = { - id: c.session.id, - version: c.session.version, - sp: c.session.sp - }; - session.sp.keys = null; - c.sessionCache.setSession(session.id, session); - } +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) +const { kGuard } = __nccwpck_require__(5861) +const { kEnumerableProperty } = __nccwpck_require__(3983) +const { + makeIterator, + isValidHeaderName, + isValidHeaderValue +} = __nccwpck_require__(2538) +const { webidl } = __nccwpck_require__(1744) +const assert = __nccwpck_require__(9491) + +const kHeadersMap = Symbol('headers map') +const kHeadersSortedMap = Symbol('headers map sorted') - if(c.open) { - // connection no longer open, clear input - c.open = false; - c.input.clear(); +/** + * @param {number} code + */ +function isHTTPWhiteSpaceCharCode (code) { + return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020 +} - // if connected or handshaking, send an alert - if(c.isConnected || c.handshaking) { - c.isConnected = c.handshaking = false; +/** + * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize + * @param {string} potentialValue + */ +function headerValueNormalize (potentialValue) { + // To normalize a byte sequence potentialValue, remove + // any leading and trailing HTTP whitespace bytes from + // potentialValue. + let i = 0; let j = potentialValue.length - // send close_notify alert - tls.queue(c, tls.createAlert(c, { - level: tls.Alert.Level.warning, - description: tls.Alert.Description.close_notify - })); - tls.flush(c); - } + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i - // call handler - c.closed(c); - } + return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j) +} - // reset TLS connection, do not clear fail flag - c.reset(clearFail); - }; +function fill (headers, object) { + // To fill a Headers object headers with a given object object, run these steps: - return c; -}; + // 1. If object is a sequence, then for each header in object: + // Note: webidl conversion to array has already been done. + if (Array.isArray(object)) { + for (let i = 0; i < object.length; ++i) { + const header = object[i] + // 1. If header does not contain exactly two items, then throw a TypeError. + if (header.length !== 2) { + throw webidl.errors.exception({ + header: 'Headers constructor', + message: `expected name/value pair to be length 2, found ${header.length}.` + }) + } -/* TLS API */ -module.exports = forge.tls = forge.tls || {}; + // 2. Append (header’s first item, header’s second item) to headers. + appendHeader(headers, header[0], header[1]) + } + } else if (typeof object === 'object' && object !== null) { + // Note: null should throw -// expose non-functions -for(var key in tls) { - if(typeof tls[key] !== 'function') { - forge.tls[key] = tls[key]; + // 2. Otherwise, object is a record, then for each key → value in object, + // append (key, value) to headers + const keys = Object.keys(object) + for (let i = 0; i < keys.length; ++i) { + appendHeader(headers, keys[i], object[keys[i]]) + } + } else { + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) } } -// expose prf_tls1 for testing -forge.tls.prf_tls1 = prf_TLS1; +/** + * @see https://fetch.spec.whatwg.org/#concept-headers-append + */ +function appendHeader (headers, name, value) { + // 1. Normalize value. + value = headerValueNormalize(value) -// expose sha1 hmac method -forge.tls.hmac_sha1 = hmac_sha1; + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value, + type: 'header value' + }) + } -// expose session cache creation -forge.tls.createSessionCache = tls.createSessionCache; + // 3. If headers’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if headers’s guard is "request" and name is a + // forbidden header name, return. + // Note: undici does not implement forbidden header names + if (headers[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (headers[kGuard] === 'request-no-cors') { + // 5. Otherwise, if headers’s guard is "request-no-cors": + // TODO + } -/** - * Creates a new TLS connection. This does not make any assumptions about the - * transport layer that TLS is working on top of, ie: it does not assume there - * is a TCP/IP connection or establish one. A TLS connection is totally - * abstracted away from the layer is runs on top of, it merely establishes a - * secure channel between a client" and a "server". - * - * A TLS connection contains 4 connection states: pending read and write, and - * current read and write. - * - * At initialization, the current read and write states will be null. Only once - * the security parameters have been set and the keys have been generated can - * the pending states be converted into current states. Current states will be - * updated for each record processed. - * - * A custom certificate verify callback may be provided to check information - * like the common name on the server's certificate. It will be called for - * every certificate in the chain. It has the following signature: - * - * variable func(c, certs, index, preVerify) - * Where: - * c The TLS connection - * verified Set to true if certificate was verified, otherwise the alert - * tls.Alert.Description for why the certificate failed. - * depth The current index in the chain, where 0 is the server's cert. - * certs The certificate chain, *NOTE* if the server was anonymous then - * the chain will be empty. - * - * The function returns true on success and on failure either the appropriate - * tls.Alert.Description or an object with 'alert' set to the appropriate - * tls.Alert.Description and 'message' set to a custom error message. If true - * is not returned then the connection will abort using, in order of - * availability, first the returned alert description, second the preVerify - * alert description, and lastly the default 'bad_certificate'. - * - * There are three callbacks that can be used to make use of client-side - * certificates where each takes the TLS connection as the first parameter: - * - * getCertificate(conn, hint) - * The second parameter is a hint as to which certificate should be - * returned. If the connection entity is a client, then the hint will be - * the CertificateRequest message from the server that is part of the - * TLS protocol. If the connection entity is a server, then it will be - * the servername list provided via an SNI extension the ClientHello, if - * one was provided (empty array if not). The hint can be examined to - * determine which certificate to use (advanced). Most implementations - * will just return a certificate. The return value must be a - * PEM-formatted certificate or an array of PEM-formatted certificates - * that constitute a certificate chain, with the first in the array/chain - * being the client's certificate. - * getPrivateKey(conn, certificate) - * The second parameter is an forge.pki X.509 certificate object that - * is associated with the requested private key. The return value must - * be a PEM-formatted private key. - * getSignature(conn, bytes, callback) - * This callback can be used instead of getPrivateKey if the private key - * is not directly accessible in javascript or should not be. For - * instance, a secure external web service could provide the signature - * in exchange for appropriate credentials. The second parameter is a - * string of bytes to be signed that are part of the TLS protocol. These - * bytes are used to verify that the private key for the previously - * provided client-side certificate is accessible to the client. The - * callback is a function that takes 2 parameters, the TLS connection - * and the RSA encrypted (signed) bytes as a string. This callback must - * be called once the signature is ready. - * - * @param options the options for this connection: - * server: true if the connection is server-side, false for client. - * sessionId: a session ID to reuse, null for a new connection. - * caStore: an array of certificates to trust. - * sessionCache: a session cache to use. - * cipherSuites: an optional array of cipher suites to use, - * see tls.CipherSuites. - * connected: function(conn) called when the first handshake completes. - * virtualHost: the virtual server name to use in a TLS SNI extension. - * verifyClient: true to require a client certificate in server mode, - * 'optional' to request one, false not to (default: false). - * verify: a handler used to custom verify certificates in the chain. - * verifyOptions: an object with options for the certificate chain validation. - * See documentation of pki.verifyCertificateChain for possible options. - * verifyOptions.verify is ignored. If you wish to specify a verify handler - * use the verify key. - * getCertificate: an optional callback used to get a certificate or - * a chain of certificates (as an array). - * getPrivateKey: an optional callback used to get a private key. - * getSignature: an optional callback used to get a signature. - * tlsDataReady: function(conn) called when TLS protocol data has been - * prepared and is ready to be used (typically sent over a socket - * connection to its destination), read from conn.tlsData buffer. - * dataReady: function(conn) called when application data has - * been parsed from a TLS record and should be consumed by the - * application, read from conn.data buffer. - * closed: function(conn) called when the connection has been closed. - * error: function(conn, error) called when there was an error. - * deflate: function(inBytes) if provided, will deflate TLS records using - * the deflate algorithm if the server supports it. - * inflate: function(inBytes) if provided, will inflate TLS records using - * the deflate algorithm if the server supports it. - * - * @return the new TLS connection. - */ -forge.tls.createConnection = tls.createConnection; + // 6. Otherwise, if headers’s guard is "response" and name is a + // forbidden response-header name, return. + // 7. Append (name, value) to headers’s header list. + return headers[kHeadersList].append(name, value) -/***/ }), + // 8. If headers’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from headers +} -/***/ 8339: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +class HeadersList { + /** @type {[string, string][]|null} */ + cookies = null -/** - * Utility functions for web applications. - * - * @author Dave Longley - * - * Copyright (c) 2010-2018 Digital Bazaar, Inc. - */ -var forge = __nccwpck_require__(9177); -var baseN = __nccwpck_require__(2300); - -/* Utilities API */ -var util = module.exports = forge.util = forge.util || {}; - -// define setImmediate and nextTick -(function() { - // use native nextTick (unless we're in webpack) - // webpack (or better node-libs-browser polyfill) sets process.browser. - // this way we can detect webpack properly - if(typeof process !== 'undefined' && process.nextTick && !process.browser) { - util.nextTick = process.nextTick; - if(typeof setImmediate === 'function') { - util.setImmediate = setImmediate; + constructor (init) { + if (init instanceof HeadersList) { + this[kHeadersMap] = new Map(init[kHeadersMap]) + this[kHeadersSortedMap] = init[kHeadersSortedMap] + this.cookies = init.cookies === null ? null : [...init.cookies] } else { - // polyfill setImmediate with nextTick, older versions of node - // (those w/o setImmediate) won't totally starve IO - util.setImmediate = util.nextTick; + this[kHeadersMap] = new Map(init) + this[kHeadersSortedMap] = null } - return; } - // polyfill nextTick with native setImmediate - if(typeof setImmediate === 'function') { - util.setImmediate = function() { return setImmediate.apply(undefined, arguments); }; - util.nextTick = function(callback) { - return setImmediate(callback); - }; - return; + // https://fetch.spec.whatwg.org/#header-list-contains + contains (name) { + // A header list list contains a header name name if list + // contains a header whose name is a byte-case-insensitive + // match for name. + name = name.toLowerCase() + + return this[kHeadersMap].has(name) } - /* Note: A polyfill upgrade pattern is used here to allow combining - polyfills. For example, MutationObserver is fast, but blocks UI updates, - so it needs to allow UI updates periodically, so it falls back on - postMessage or setTimeout. */ + clear () { + this[kHeadersMap].clear() + this[kHeadersSortedMap] = null + this.cookies = null + } - // polyfill with setTimeout - util.setImmediate = function(callback) { - setTimeout(callback, 0); - }; + // https://fetch.spec.whatwg.org/#concept-header-list-append + append (name, value) { + this[kHeadersSortedMap] = null - // upgrade polyfill to use postMessage - if(typeof window !== 'undefined' && - typeof window.postMessage === 'function') { - var msg = 'forge.setImmediate'; - var callbacks = []; - util.setImmediate = function(callback) { - callbacks.push(callback); - // only send message when one hasn't been sent in - // the current turn of the event loop - if(callbacks.length === 1) { - window.postMessage(msg, '*'); - } - }; - function handler(event) { - if(event.source === window && event.data === msg) { - event.stopPropagation(); - var copy = callbacks.slice(); - callbacks.length = 0; - copy.forEach(function(callback) { - callback(); - }); - } - } - window.addEventListener('message', handler, true); - } + // 1. If list contains name, then set name to the first such + // header’s name. + const lowercaseName = name.toLowerCase() + const exists = this[kHeadersMap].get(lowercaseName) - // upgrade polyfill to use MutationObserver - if(typeof MutationObserver !== 'undefined') { - // polyfill with MutationObserver - var now = Date.now(); - var attr = true; - var div = document.createElement('div'); - var callbacks = []; - new MutationObserver(function() { - var copy = callbacks.slice(); - callbacks.length = 0; - copy.forEach(function(callback) { - callback(); - }); - }).observe(div, {attributes: true}); - var oldSetImmediate = util.setImmediate; - util.setImmediate = function(callback) { - if(Date.now() - now > 15) { - now = Date.now(); - oldSetImmediate(callback); - } else { - callbacks.push(callback); - // only trigger observer when it hasn't been triggered in - // the current turn of the event loop - if(callbacks.length === 1) { - div.setAttribute('a', attr = !attr); - } - } - }; - } + // 2. Append (name, value) to list. + if (exists) { + const delimiter = lowercaseName === 'cookie' ? '; ' : ', ' + this[kHeadersMap].set(lowercaseName, { + name: exists.name, + value: `${exists.value}${delimiter}${value}` + }) + } else { + this[kHeadersMap].set(lowercaseName, { name, value }) + } - util.nextTick = util.setImmediate; -})(); + if (lowercaseName === 'set-cookie') { + this.cookies ??= [] + this.cookies.push(value) + } + } -// check if running under Node.js -util.isNodejs = - typeof process !== 'undefined' && process.versions && process.versions.node; + // https://fetch.spec.whatwg.org/#concept-header-list-set + set (name, value) { + this[kHeadersSortedMap] = null + const lowercaseName = name.toLowerCase() + if (lowercaseName === 'set-cookie') { + this.cookies = [value] + } -// 'self' will also work in Web Workers (instance of WorkerGlobalScope) while -// it will point to `window` in the main thread. -// To remain compatible with older browsers, we fall back to 'window' if 'self' -// is not available. -util.globalScope = (function() { - if(util.isNodejs) { - return global; + // 1. If list contains name, then set the value of + // the first such header to value and remove the + // others. + // 2. Otherwise, append header (name, value) to list. + this[kHeadersMap].set(lowercaseName, { name, value }) } - return typeof self === 'undefined' ? window : self; -})(); + // https://fetch.spec.whatwg.org/#concept-header-list-delete + delete (name) { + this[kHeadersSortedMap] = null -// define isArray -util.isArray = Array.isArray || function(x) { - return Object.prototype.toString.call(x) === '[object Array]'; -}; + name = name.toLowerCase() -// define isArrayBuffer -util.isArrayBuffer = function(x) { - return typeof ArrayBuffer !== 'undefined' && x instanceof ArrayBuffer; -}; + if (name === 'set-cookie') { + this.cookies = null + } -// define isArrayBufferView -util.isArrayBufferView = function(x) { - return x && util.isArrayBuffer(x.buffer) && x.byteLength !== undefined; -}; + this[kHeadersMap].delete(name) + } -/** - * Ensure a bits param is 8, 16, 24, or 32. Used to validate input for - * algorithms where bit manipulation, JavaScript limitations, and/or algorithm - * design only allow for byte operations of a limited size. - * - * @param n number of bits. - * - * Throw Error if n invalid. - */ -function _checkBitsParam(n) { - if(!(n === 8 || n === 16 || n === 24 || n === 32)) { - throw new Error('Only 8, 16, 24, or 32 bits supported: ' + n); + // https://fetch.spec.whatwg.org/#concept-header-list-get + get (name) { + const value = this[kHeadersMap].get(name.toLowerCase()) + + // 1. If list does not contain name, then return null. + // 2. Return the values of all headers in list whose name + // is a byte-case-insensitive match for name, + // separated from each other by 0x2C 0x20, in order. + return value === undefined ? null : value.value } -} -// TODO: set ByteBuffer to best available backing -util.ByteBuffer = ByteStringBuffer; + * [Symbol.iterator] () { + // use the lowercased name + for (const [name, { value }] of this[kHeadersMap]) { + yield [name, value] + } + } -/** Buffer w/BinaryString backing */ + get entries () { + const headers = {} -/** - * Constructor for a binary string backed byte buffer. - * - * @param [b] the bytes to wrap (either encoded as string, one byte per - * character, or as an ArrayBuffer or Typed Array). - */ -function ByteStringBuffer(b) { - // TODO: update to match DataBuffer API - - // the data in this buffer - this.data = ''; - // the pointer for reading from this buffer - this.read = 0; - - if(typeof b === 'string') { - this.data = b; - } else if(util.isArrayBuffer(b) || util.isArrayBufferView(b)) { - if(typeof Buffer !== 'undefined' && b instanceof Buffer) { - this.data = b.toString('binary'); - } else { - // convert native buffer to forge buffer - // FIXME: support native buffers internally instead - var arr = new Uint8Array(b); - try { - this.data = String.fromCharCode.apply(null, arr); - } catch(e) { - for(var i = 0; i < arr.length; ++i) { - this.putByte(arr[i]); - } + if (this[kHeadersMap].size) { + for (const { name, value } of this[kHeadersMap].values()) { + headers[name] = value } } - } else if(b instanceof ByteStringBuffer || - (typeof b === 'object' && typeof b.data === 'string' && - typeof b.read === 'number')) { - // copy existing buffer - this.data = b.data; - this.read = b.read; - } - - // used for v8 optimization - this._constructedStringLength = 0; -} -util.ByteStringBuffer = ByteStringBuffer; - -/* Note: This is an optimization for V8-based browsers. When V8 concatenates - a string, the strings are only joined logically using a "cons string" or - "constructed/concatenated string". These containers keep references to one - another and can result in very large memory usage. For example, if a 2MB - string is constructed by concatenating 4 bytes together at a time, the - memory usage will be ~44MB; so ~22x increase. The strings are only joined - together when an operation requiring their joining takes place, such as - substr(). This function is called when adding data to this buffer to ensure - these types of strings are periodically joined to reduce the memory - footprint. */ -var _MAX_CONSTRUCTED_STRING_LENGTH = 4096; -util.ByteStringBuffer.prototype._optimizeConstructedString = function(x) { - this._constructedStringLength += x; - if(this._constructedStringLength > _MAX_CONSTRUCTED_STRING_LENGTH) { - // this substr() should cause the constructed string to join - this.data.substr(0, 1); - this._constructedStringLength = 0; + + return headers } -}; +} -/** - * Gets the number of bytes in this buffer. - * - * @return the number of bytes in this buffer. - */ -util.ByteStringBuffer.prototype.length = function() { - return this.data.length - this.read; -}; +// https://fetch.spec.whatwg.org/#headers-class +class Headers { + constructor (init = undefined) { + if (init === kConstruct) { + return + } + this[kHeadersList] = new HeadersList() -/** - * Gets whether or not this buffer is empty. - * - * @return true if this buffer is empty, false if not. - */ -util.ByteStringBuffer.prototype.isEmpty = function() { - return this.length() <= 0; -}; + // The new Headers(init) constructor steps are: -/** - * Puts a byte in this buffer. - * - * @param b the byte to put. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putByte = function(b) { - return this.putBytes(String.fromCharCode(b)); -}; + // 1. Set this’s guard to "none". + this[kGuard] = 'none' -/** - * Puts a byte in this buffer N times. - * - * @param b the byte to put. - * @param n the number of bytes of value b to put. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.fillWithByte = function(b, n) { - b = String.fromCharCode(b); - var d = this.data; - while(n > 0) { - if(n & 1) { - d += b; - } - n >>>= 1; - if(n > 0) { - b += b; + // 2. If init is given, then fill this with init. + if (init !== undefined) { + init = webidl.converters.HeadersInit(init) + fill(this, init) } } - this.data = d; - this._optimizeConstructedString(n); - return this; -}; -/** - * Puts bytes in this buffer. - * - * @param bytes the bytes (as a binary encoded string) to put. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putBytes = function(bytes) { - this.data += bytes; - this._optimizeConstructedString(bytes.length); - return this; -}; + // https://fetch.spec.whatwg.org/#dom-headers-append + append (name, value) { + webidl.brandCheck(this, Headers) -/** - * Puts a UTF-16 encoded string into this buffer. - * - * @param str the string to put. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putString = function(str) { - return this.putBytes(util.encodeUtf8(str)); -}; + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' }) -/** - * Puts a 16-bit integer in this buffer in big-endian order. - * - * @param i the 16-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt16 = function(i) { - return this.putBytes( - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i & 0xFF)); -}; + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) -/** - * Puts a 24-bit integer in this buffer in big-endian order. - * - * @param i the 24-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt24 = function(i) { - return this.putBytes( - String.fromCharCode(i >> 16 & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i & 0xFF)); -}; + return appendHeader(this, name, value) + } -/** - * Puts a 32-bit integer in this buffer in big-endian order. - * - * @param i the 32-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt32 = function(i) { - return this.putBytes( - String.fromCharCode(i >> 24 & 0xFF) + - String.fromCharCode(i >> 16 & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i & 0xFF)); -}; + // https://fetch.spec.whatwg.org/#dom-headers-delete + delete (name) { + webidl.brandCheck(this, Headers) -/** - * Puts a 16-bit integer in this buffer in little-endian order. - * - * @param i the 16-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt16Le = function(i) { - return this.putBytes( - String.fromCharCode(i & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF)); -}; + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' }) -/** - * Puts a 24-bit integer in this buffer in little-endian order. - * - * @param i the 24-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt24Le = function(i) { - return this.putBytes( - String.fromCharCode(i & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i >> 16 & 0xFF)); -}; + name = webidl.converters.ByteString(name) -/** - * Puts a 32-bit integer in this buffer in little-endian order. - * - * @param i the 32-bit integer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt32Le = function(i) { - return this.putBytes( - String.fromCharCode(i & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i >> 16 & 0xFF) + - String.fromCharCode(i >> 24 & 0xFF)); -}; + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.delete', + value: name, + type: 'header name' + }) + } -/** - * Puts an n-bit integer in this buffer in big-endian order. - * - * @param i the n-bit integer. - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putInt = function(i, n) { - _checkBitsParam(n); - var bytes = ''; - do { - n -= 8; - bytes += String.fromCharCode((i >> n) & 0xFF); - } while(n > 0); - return this.putBytes(bytes); -}; + // 2. If this’s guard is "immutable", then throw a TypeError. + // 3. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 4. Otherwise, if this’s guard is "request-no-cors", name + // is not a no-CORS-safelisted request-header name, and + // name is not a privileged no-CORS request-header name, + // return. + // 5. Otherwise, if this’s guard is "response" and name is + // a forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO + } + + // 6. If this’s header list does not contain name, then + // return. + if (!this[kHeadersList].contains(name)) { + return + } -/** - * Puts a signed n-bit integer in this buffer in big-endian order. Two's - * complement representation is used. - * - * @param i the n-bit integer. - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putSignedInt = function(i, n) { - // putInt checks n - if(i < 0) { - i += 2 << (n - 1); + // 7. Delete name from this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this. + this[kHeadersList].delete(name) } - return this.putInt(i, n); -}; -/** - * Puts the given buffer into this buffer. - * - * @param buffer the buffer to put into this one. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.putBuffer = function(buffer) { - return this.putBytes(buffer.getBytes()); -}; - -/** - * Gets a byte from this buffer and advances the read pointer by 1. - * - * @return the byte. - */ -util.ByteStringBuffer.prototype.getByte = function() { - return this.data.charCodeAt(this.read++); -}; + // https://fetch.spec.whatwg.org/#dom-headers-get + get (name) { + webidl.brandCheck(this, Headers) -/** - * Gets a uint16 from this buffer in big-endian order and advances the read - * pointer by 2. - * - * @return the uint16. - */ -util.ByteStringBuffer.prototype.getInt16 = function() { - var rval = ( - this.data.charCodeAt(this.read) << 8 ^ - this.data.charCodeAt(this.read + 1)); - this.read += 2; - return rval; -}; + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' }) -/** - * Gets a uint24 from this buffer in big-endian order and advances the read - * pointer by 3. - * - * @return the uint24. - */ -util.ByteStringBuffer.prototype.getInt24 = function() { - var rval = ( - this.data.charCodeAt(this.read) << 16 ^ - this.data.charCodeAt(this.read + 1) << 8 ^ - this.data.charCodeAt(this.read + 2)); - this.read += 3; - return rval; -}; + name = webidl.converters.ByteString(name) -/** - * Gets a uint32 from this buffer in big-endian order and advances the read - * pointer by 4. - * - * @return the word. - */ -util.ByteStringBuffer.prototype.getInt32 = function() { - var rval = ( - this.data.charCodeAt(this.read) << 24 ^ - this.data.charCodeAt(this.read + 1) << 16 ^ - this.data.charCodeAt(this.read + 2) << 8 ^ - this.data.charCodeAt(this.read + 3)); - this.read += 4; - return rval; -}; + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.get', + value: name, + type: 'header name' + }) + } -/** - * Gets a uint16 from this buffer in little-endian order and advances the read - * pointer by 2. - * - * @return the uint16. - */ -util.ByteStringBuffer.prototype.getInt16Le = function() { - var rval = ( - this.data.charCodeAt(this.read) ^ - this.data.charCodeAt(this.read + 1) << 8); - this.read += 2; - return rval; -}; + // 2. Return the result of getting name from this’s header + // list. + return this[kHeadersList].get(name) + } -/** - * Gets a uint24 from this buffer in little-endian order and advances the read - * pointer by 3. - * - * @return the uint24. - */ -util.ByteStringBuffer.prototype.getInt24Le = function() { - var rval = ( - this.data.charCodeAt(this.read) ^ - this.data.charCodeAt(this.read + 1) << 8 ^ - this.data.charCodeAt(this.read + 2) << 16); - this.read += 3; - return rval; -}; + // https://fetch.spec.whatwg.org/#dom-headers-has + has (name) { + webidl.brandCheck(this, Headers) -/** - * Gets a uint32 from this buffer in little-endian order and advances the read - * pointer by 4. - * - * @return the word. - */ -util.ByteStringBuffer.prototype.getInt32Le = function() { - var rval = ( - this.data.charCodeAt(this.read) ^ - this.data.charCodeAt(this.read + 1) << 8 ^ - this.data.charCodeAt(this.read + 2) << 16 ^ - this.data.charCodeAt(this.read + 3) << 24); - this.read += 4; - return rval; -}; + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' }) -/** - * Gets an n-bit integer from this buffer in big-endian order and advances the - * read pointer by ceil(n/8). - * - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return the integer. - */ -util.ByteStringBuffer.prototype.getInt = function(n) { - _checkBitsParam(n); - var rval = 0; - do { - // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits. - rval = (rval << 8) + this.data.charCodeAt(this.read++); - n -= 8; - } while(n > 0); - return rval; -}; + name = webidl.converters.ByteString(name) -/** - * Gets a signed n-bit integer from this buffer in big-endian order, using - * two's complement, and advances the read pointer by n/8. - * - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return the integer. - */ -util.ByteStringBuffer.prototype.getSignedInt = function(n) { - // getInt checks n - var x = this.getInt(n); - var max = 2 << (n - 2); - if(x >= max) { - x -= max << 1; - } - return x; -}; + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.has', + value: name, + type: 'header name' + }) + } -/** - * Reads bytes out as a binary encoded string and clears them from the - * buffer. Note that the resulting string is binary encoded (in node.js this - * encoding is referred to as `binary`, it is *not* `utf8`). - * - * @param count the number of bytes to read, undefined or null for all. - * - * @return a binary encoded string of bytes. - */ -util.ByteStringBuffer.prototype.getBytes = function(count) { - var rval; - if(count) { - // read count bytes - count = Math.min(this.length(), count); - rval = this.data.slice(this.read, this.read + count); - this.read += count; - } else if(count === 0) { - rval = ''; - } else { - // read all bytes, optimize to only copy when needed - rval = (this.read === 0) ? this.data : this.data.slice(this.read); - this.clear(); + // 2. Return true if this’s header list contains name; + // otherwise false. + return this[kHeadersList].contains(name) } - return rval; -}; -/** - * Gets a binary encoded string of the bytes from this buffer without - * modifying the read pointer. - * - * @param count the number of bytes to get, omit to get all. - * - * @return a string full of binary encoded characters. - */ -util.ByteStringBuffer.prototype.bytes = function(count) { - return (typeof(count) === 'undefined' ? - this.data.slice(this.read) : - this.data.slice(this.read, this.read + count)); -}; + // https://fetch.spec.whatwg.org/#dom-headers-set + set (name, value) { + webidl.brandCheck(this, Headers) -/** - * Gets a byte at the given index without modifying the read pointer. - * - * @param i the byte index. - * - * @return the byte. - */ -util.ByteStringBuffer.prototype.at = function(i) { - return this.data.charCodeAt(this.read + i); -}; + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' }) -/** - * Puts a byte at the given index without modifying the read pointer. - * - * @param i the byte index. - * @param b the byte to put. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.setAt = function(i, b) { - this.data = this.data.substr(0, this.read + i) + - String.fromCharCode(b) + - this.data.substr(this.read + i + 1); - return this; -}; + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) -/** - * Gets the last byte without modifying the read pointer. - * - * @return the last byte. - */ -util.ByteStringBuffer.prototype.last = function() { - return this.data.charCodeAt(this.data.length - 1); -}; + // 1. Normalize value. + value = headerValueNormalize(value) -/** - * Creates a copy of this buffer. - * - * @return the copy. - */ -util.ByteStringBuffer.prototype.copy = function() { - var c = util.createBuffer(this.data); - c.read = this.read; - return c; -}; + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value, + type: 'header value' + }) + } -/** - * Compacts this buffer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.compact = function() { - if(this.read > 0) { - this.data = this.data.slice(this.read); - this.read = 0; + // 3. If this’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 5. Otherwise, if this’s guard is "request-no-cors" and + // name/value is not a no-CORS-safelisted request-header, + // return. + // 6. Otherwise, if this’s guard is "response" and name is a + // forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO + } + + // 7. Set (name, value) in this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this + this[kHeadersList].set(name, value) } - return this; -}; -/** - * Clears this buffer. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.clear = function() { - this.data = ''; - this.read = 0; - return this; -}; + // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie + getSetCookie () { + webidl.brandCheck(this, Headers) -/** - * Shortens this buffer by triming bytes off of the end of this buffer. - * - * @param count the number of bytes to trim off. - * - * @return this buffer. - */ -util.ByteStringBuffer.prototype.truncate = function(count) { - var len = Math.max(0, this.length() - count); - this.data = this.data.substr(this.read, len); - this.read = 0; - return this; -}; + // 1. If this’s header list does not contain `Set-Cookie`, then return « ». + // 2. Return the values of all headers in this’s header list whose name is + // a byte-case-insensitive match for `Set-Cookie`, in order. -/** - * Converts this buffer to a hexadecimal string. - * - * @return a hexadecimal string. - */ -util.ByteStringBuffer.prototype.toHex = function() { - var rval = ''; - for(var i = this.read; i < this.data.length; ++i) { - var b = this.data.charCodeAt(i); - if(b < 16) { - rval += '0'; + const list = this[kHeadersList].cookies + + if (list) { + return [...list] } - rval += b.toString(16); + + return [] } - return rval; -}; -/** - * Converts this buffer to a UTF-16 string (standard JavaScript string). - * - * @return a UTF-16 string. - */ -util.ByteStringBuffer.prototype.toString = function() { - return util.decodeUtf8(this.bytes()); -}; + // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine + get [kHeadersSortedMap] () { + if (this[kHeadersList][kHeadersSortedMap]) { + return this[kHeadersList][kHeadersSortedMap] + } -/** End Buffer w/BinaryString backing */ + // 1. Let headers be an empty list of headers with the key being the name + // and value the value. + const headers = [] -/** Buffer w/UInt8Array backing */ + // 2. Let names be the result of convert header names to a sorted-lowercase + // set with all the names of the headers in list. + const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1) + const cookies = this[kHeadersList].cookies -/** - * FIXME: Experimental. Do not use yet. - * - * Constructor for an ArrayBuffer-backed byte buffer. - * - * The buffer may be constructed from a string, an ArrayBuffer, DataView, or a - * TypedArray. - * - * If a string is given, its encoding should be provided as an option, - * otherwise it will default to 'binary'. A 'binary' string is encoded such - * that each character is one byte in length and size. - * - * If an ArrayBuffer, DataView, or TypedArray is given, it will be used - * *directly* without any copying. Note that, if a write to the buffer requires - * more space, the buffer will allocate a new backing ArrayBuffer to - * accommodate. The starting read and write offsets for the buffer may be - * given as options. - * - * @param [b] the initial bytes for this buffer. - * @param options the options to use: - * [readOffset] the starting read offset to use (default: 0). - * [writeOffset] the starting write offset to use (default: the - * length of the first parameter). - * [growSize] the minimum amount, in bytes, to grow the buffer by to - * accommodate writes (default: 1024). - * [encoding] the encoding ('binary', 'utf8', 'utf16', 'hex') for the - * first parameter, if it is a string (default: 'binary'). - */ -function DataBuffer(b, options) { - // default options - options = options || {}; + // 3. For each name of names: + for (let i = 0; i < names.length; ++i) { + const [name, value] = names[i] + // 1. If name is `set-cookie`, then: + if (name === 'set-cookie') { + // 1. Let values be a list of all values of headers in list whose name + // is a byte-case-insensitive match for name, in order. + + // 2. For each value of values: + // 1. Append (name, value) to headers. + for (let j = 0; j < cookies.length; ++j) { + headers.push([name, cookies[j]]) + } + } else { + // 2. Otherwise: - // pointers for read from/write to buffer - this.read = options.readOffset || 0; - this.growSize = options.growSize || 1024; + // 1. Let value be the result of getting name from list. - var isArrayBuffer = util.isArrayBuffer(b); - var isArrayBufferView = util.isArrayBufferView(b); - if(isArrayBuffer || isArrayBufferView) { - // use ArrayBuffer directly - if(isArrayBuffer) { - this.data = new DataView(b); - } else { - // TODO: adjust read/write offset based on the type of view - // or specify that this must be done in the options ... that the - // offsets are byte-based - this.data = new DataView(b.buffer, b.byteOffset, b.byteLength); - } - this.write = ('writeOffset' in options ? - options.writeOffset : this.data.byteLength); - return; - } + // 2. Assert: value is non-null. + assert(value !== null) - // initialize to empty array buffer and add any given bytes using putBytes - this.data = new DataView(new ArrayBuffer(0)); - this.write = 0; + // 3. Append (name, value) to headers. + headers.push([name, value]) + } + } - if(b !== null && b !== undefined) { - this.putBytes(b); - } + this[kHeadersList][kHeadersSortedMap] = headers - if('writeOffset' in options) { - this.write = options.writeOffset; + // 4. Return headers. + return headers } -} -util.DataBuffer = DataBuffer; -/** - * Gets the number of bytes in this buffer. - * - * @return the number of bytes in this buffer. - */ -util.DataBuffer.prototype.length = function() { - return this.write - this.read; -}; + keys () { + webidl.brandCheck(this, Headers) -/** - * Gets whether or not this buffer is empty. - * - * @return true if this buffer is empty, false if not. - */ -util.DataBuffer.prototype.isEmpty = function() { - return this.length() <= 0; -}; + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key') + } -/** - * Ensures this buffer has enough empty space to accommodate the given number - * of bytes. An optional parameter may be given that indicates a minimum - * amount to grow the buffer if necessary. If the parameter is not given, - * the buffer will be grown by some previously-specified default amount - * or heuristic. - * - * @param amount the number of bytes to accommodate. - * @param [growSize] the minimum amount, in bytes, to grow the buffer by if - * necessary. - */ -util.DataBuffer.prototype.accommodate = function(amount, growSize) { - if(this.length() >= amount) { - return this; + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key' + ) } - growSize = Math.max(growSize || this.growSize, amount); - - // grow buffer - var src = new Uint8Array( - this.data.buffer, this.data.byteOffset, this.data.byteLength); - var dst = new Uint8Array(this.length() + growSize); - dst.set(src); - this.data = new DataView(dst.buffer); - - return this; -}; -/** - * Puts a byte in this buffer. - * - * @param b the byte to put. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putByte = function(b) { - this.accommodate(1); - this.data.setUint8(this.write++, b); - return this; -}; + values () { + webidl.brandCheck(this, Headers) -/** - * Puts a byte in this buffer N times. - * - * @param b the byte to put. - * @param n the number of bytes of value b to put. - * - * @return this buffer. - */ -util.DataBuffer.prototype.fillWithByte = function(b, n) { - this.accommodate(n); - for(var i = 0; i < n; ++i) { - this.data.setUint8(b); - } - return this; -}; + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'value') + } -/** - * Puts bytes in this buffer. The bytes may be given as a string, an - * ArrayBuffer, a DataView, or a TypedArray. - * - * @param bytes the bytes to put. - * @param [encoding] the encoding for the first parameter ('binary', 'utf8', - * 'utf16', 'hex'), if it is a string (default: 'binary'). - * - * @return this buffer. - */ -util.DataBuffer.prototype.putBytes = function(bytes, encoding) { - if(util.isArrayBufferView(bytes)) { - var src = new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength); - var len = src.byteLength - src.byteOffset; - this.accommodate(len); - var dst = new Uint8Array(this.data.buffer, this.write); - dst.set(src); - this.write += len; - return this; + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'value' + ) } - if(util.isArrayBuffer(bytes)) { - var src = new Uint8Array(bytes); - this.accommodate(src.byteLength); - var dst = new Uint8Array(this.data.buffer); - dst.set(src, this.write); - this.write += src.byteLength; - return this; - } + entries () { + webidl.brandCheck(this, Headers) - // bytes is a util.DataBuffer or equivalent - if(bytes instanceof util.DataBuffer || - (typeof bytes === 'object' && - typeof bytes.read === 'number' && typeof bytes.write === 'number' && - util.isArrayBufferView(bytes.data))) { - var src = new Uint8Array(bytes.data.byteLength, bytes.read, bytes.length()); - this.accommodate(src.byteLength); - var dst = new Uint8Array(bytes.data.byteLength, this.write); - dst.set(src); - this.write += src.byteLength; - return this; - } + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key+value') + } - if(bytes instanceof util.ByteStringBuffer) { - // copy binary string and process as the same as a string parameter below - bytes = bytes.data; - encoding = 'binary'; + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key+value' + ) } - // string conversion - encoding = encoding || 'binary'; - if(typeof bytes === 'string') { - var view; + /** + * @param {(value: string, key: string, self: Headers) => void} callbackFn + * @param {unknown} thisArg + */ + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, Headers) - // decode from string - if(encoding === 'hex') { - this.accommodate(Math.ceil(bytes.length / 2)); - view = new Uint8Array(this.data.buffer, this.write); - this.write += util.binary.hex.decode(bytes, view, this.write); - return this; - } - if(encoding === 'base64') { - this.accommodate(Math.ceil(bytes.length / 4) * 3); - view = new Uint8Array(this.data.buffer, this.write); - this.write += util.binary.base64.decode(bytes, view, this.write); - return this; - } + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' }) - // encode text as UTF-8 bytes - if(encoding === 'utf8') { - // encode as UTF-8 then decode string as raw binary - bytes = util.encodeUtf8(bytes); - encoding = 'binary'; + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'." + ) } - // decode string as raw binary - if(encoding === 'binary' || encoding === 'raw') { - // one byte per character - this.accommodate(bytes.length); - view = new Uint8Array(this.data.buffer, this.write); - this.write += util.binary.raw.decode(view); - return this; + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) } + } - // encode text as UTF-16 bytes - if(encoding === 'utf16') { - // two bytes per character - this.accommodate(bytes.length * 2); - view = new Uint16Array(this.data.buffer, this.write); - this.write += util.text.utf16.encode(view); - return this; - } + [Symbol.for('nodejs.util.inspect.custom')] () { + webidl.brandCheck(this, Headers) - throw new Error('Invalid encoding: ' + encoding); + return this[kHeadersList] } +} - throw Error('Invalid parameter: ' + bytes); -}; +Headers.prototype[Symbol.iterator] = Headers.prototype.entries -/** - * Puts the given buffer into this buffer. - * - * @param buffer the buffer to put into this one. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putBuffer = function(buffer) { - this.putBytes(buffer); - buffer.clear(); - return this; -}; +Object.defineProperties(Headers.prototype, { + append: kEnumerableProperty, + delete: kEnumerableProperty, + get: kEnumerableProperty, + has: kEnumerableProperty, + set: kEnumerableProperty, + getSetCookie: kEnumerableProperty, + keys: kEnumerableProperty, + values: kEnumerableProperty, + entries: kEnumerableProperty, + forEach: kEnumerableProperty, + [Symbol.iterator]: { enumerable: false }, + [Symbol.toStringTag]: { + value: 'Headers', + configurable: true + } +}) -/** - * Puts a string into this buffer. - * - * @param str the string to put. - * @param [encoding] the encoding for the string (default: 'utf16'). - * - * @return this buffer. - */ -util.DataBuffer.prototype.putString = function(str) { - return this.putBytes(str, 'utf16'); -}; +webidl.converters.HeadersInit = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (V[Symbol.iterator]) { + return webidl.converters['sequence>'](V) + } -/** - * Puts a 16-bit integer in this buffer in big-endian order. - * - * @param i the 16-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt16 = function(i) { - this.accommodate(2); - this.data.setInt16(this.write, i); - this.write += 2; - return this; -}; + return webidl.converters['record'](V) + } -/** - * Puts a 24-bit integer in this buffer in big-endian order. - * - * @param i the 24-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt24 = function(i) { - this.accommodate(3); - this.data.setInt16(this.write, i >> 8 & 0xFFFF); - this.data.setInt8(this.write, i >> 16 & 0xFF); - this.write += 3; - return this; -}; + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) +} -/** - * Puts a 32-bit integer in this buffer in big-endian order. - * - * @param i the 32-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt32 = function(i) { - this.accommodate(4); - this.data.setInt32(this.write, i); - this.write += 4; - return this; -}; +module.exports = { + fill, + Headers, + HeadersList +} -/** - * Puts a 16-bit integer in this buffer in little-endian order. - * - * @param i the 16-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt16Le = function(i) { - this.accommodate(2); - this.data.setInt16(this.write, i, true); - this.write += 2; - return this; -}; -/** - * Puts a 24-bit integer in this buffer in little-endian order. - * - * @param i the 24-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt24Le = function(i) { - this.accommodate(3); - this.data.setInt8(this.write, i >> 16 & 0xFF); - this.data.setInt16(this.write, i >> 8 & 0xFFFF, true); - this.write += 3; - return this; -}; +/***/ }), -/** - * Puts a 32-bit integer in this buffer in little-endian order. - * - * @param i the 32-bit integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt32Le = function(i) { - this.accommodate(4); - this.data.setInt32(this.write, i, true); - this.write += 4; - return this; -}; +/***/ 4881: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Puts an n-bit integer in this buffer in big-endian order. - * - * @param i the n-bit integer. - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return this buffer. - */ -util.DataBuffer.prototype.putInt = function(i, n) { - _checkBitsParam(n); - this.accommodate(n / 8); - do { - n -= 8; - this.data.setInt8(this.write++, (i >> n) & 0xFF); - } while(n > 0); - return this; -}; +"use strict"; +// https://github.com/Ethan-Arrowood/undici-fetch + + + +const { + Response, + makeNetworkError, + makeAppropriateNetworkError, + filterResponse, + makeResponse +} = __nccwpck_require__(7823) +const { Headers } = __nccwpck_require__(554) +const { Request, makeRequest } = __nccwpck_require__(8359) +const zlib = __nccwpck_require__(9796) +const { + bytesMatch, + makePolicyContainer, + clonePolicyContainer, + requestBadPort, + TAOCheck, + appendRequestOriginHeader, + responseLocationURL, + requestCurrentURL, + setRequestReferrerPolicyOnRedirect, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + createOpaqueTimingInfo, + appendFetchMetadata, + corsCheck, + crossOriginResourcePolicyCheck, + determineRequestsReferrer, + coarsenedSharedCurrentTime, + createDeferredPromise, + isBlobLike, + sameOrigin, + isCancelled, + isAborted, + isErrorLike, + fullyReadBody, + readableStreamClose, + isomorphicEncode, + urlIsLocal, + urlIsHttpHttpsScheme, + urlHasHttpsScheme +} = __nccwpck_require__(2538) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) +const assert = __nccwpck_require__(9491) +const { safelyExtractBody } = __nccwpck_require__(1472) +const { + redirectStatusSet, + nullBodyStatus, + safeMethodsSet, + requestBodyHeader, + subresourceSet, + DOMException +} = __nccwpck_require__(1037) +const { kHeadersList } = __nccwpck_require__(2785) +const EE = __nccwpck_require__(2361) +const { Readable, pipeline } = __nccwpck_require__(2781) +const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = __nccwpck_require__(3983) +const { dataURLProcessor, serializeAMimeType } = __nccwpck_require__(685) +const { TransformStream } = __nccwpck_require__(5356) +const { getGlobalDispatcher } = __nccwpck_require__(1892) +const { webidl } = __nccwpck_require__(1744) +const { STATUS_CODES } = __nccwpck_require__(3685) +const GET_OR_HEAD = ['GET', 'HEAD'] + +/** @type {import('buffer').resolveObjectURL} */ +let resolveObjectURL +let ReadableStream = globalThis.ReadableStream + +class Fetch extends EE { + constructor (dispatcher) { + super() + + this.dispatcher = dispatcher + this.connection = null + this.dump = false + this.state = 'ongoing' + // 2 terminated listeners get added per request, + // but only 1 gets removed. If there are 20 redirects, + // 21 listeners will be added. + // See https://github.com/nodejs/undici/issues/1711 + // TODO (fix): Find and fix root cause for leaked listener. + this.setMaxListeners(21) + } + + terminate (reason) { + if (this.state !== 'ongoing') { + return + } -/** - * Puts a signed n-bit integer in this buffer in big-endian order. Two's - * complement representation is used. - * - * @param i the n-bit integer. - * @param n the number of bits in the integer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.putSignedInt = function(i, n) { - _checkBitsParam(n); - this.accommodate(n / 8); - if(i < 0) { - i += 2 << (n - 1); + this.state = 'terminated' + this.connection?.destroy(reason) + this.emit('terminated', reason) } - return this.putInt(i, n); -}; -/** - * Gets a byte from this buffer and advances the read pointer by 1. - * - * @return the byte. - */ -util.DataBuffer.prototype.getByte = function() { - return this.data.getInt8(this.read++); -}; + // https://fetch.spec.whatwg.org/#fetch-controller-abort + abort (error) { + if (this.state !== 'ongoing') { + return + } -/** - * Gets a uint16 from this buffer in big-endian order and advances the read - * pointer by 2. - * - * @return the uint16. - */ -util.DataBuffer.prototype.getInt16 = function() { - var rval = this.data.getInt16(this.read); - this.read += 2; - return rval; -}; + // 1. Set controller’s state to "aborted". + this.state = 'aborted' -/** - * Gets a uint24 from this buffer in big-endian order and advances the read - * pointer by 3. - * - * @return the uint24. - */ -util.DataBuffer.prototype.getInt24 = function() { - var rval = ( - this.data.getInt16(this.read) << 8 ^ - this.data.getInt8(this.read + 2)); - this.read += 3; - return rval; -}; + // 2. Let fallbackError be an "AbortError" DOMException. + // 3. Set error to fallbackError if it is not given. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } -/** - * Gets a uint32 from this buffer in big-endian order and advances the read - * pointer by 4. - * - * @return the word. - */ -util.DataBuffer.prototype.getInt32 = function() { - var rval = this.data.getInt32(this.read); - this.read += 4; - return rval; -}; + // 4. Let serializedError be StructuredSerialize(error). + // If that threw an exception, catch it, and let + // serializedError be StructuredSerialize(fallbackError). -/** - * Gets a uint16 from this buffer in little-endian order and advances the read - * pointer by 2. - * - * @return the uint16. - */ -util.DataBuffer.prototype.getInt16Le = function() { - var rval = this.data.getInt16(this.read, true); - this.read += 2; - return rval; -}; + // 5. Set controller’s serialized abort reason to serializedError. + this.serializedAbortReason = error -/** - * Gets a uint24 from this buffer in little-endian order and advances the read - * pointer by 3. - * - * @return the uint24. - */ -util.DataBuffer.prototype.getInt24Le = function() { - var rval = ( - this.data.getInt8(this.read) ^ - this.data.getInt16(this.read + 1, true) << 8); - this.read += 3; - return rval; -}; + this.connection?.destroy(error) + this.emit('terminated', error) + } +} -/** - * Gets a uint32 from this buffer in little-endian order and advances the read - * pointer by 4. - * - * @return the word. - */ -util.DataBuffer.prototype.getInt32Le = function() { - var rval = this.data.getInt32(this.read, true); - this.read += 4; - return rval; -}; +// https://fetch.spec.whatwg.org/#fetch-method +function fetch (input, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' }) -/** - * Gets an n-bit integer from this buffer in big-endian order and advances the - * read pointer by n/8. - * - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return the integer. - */ -util.DataBuffer.prototype.getInt = function(n) { - _checkBitsParam(n); - var rval = 0; - do { - // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits. - rval = (rval << 8) + this.data.getInt8(this.read++); - n -= 8; - } while(n > 0); - return rval; -}; + // 1. Let p be a new promise. + const p = createDeferredPromise() -/** - * Gets a signed n-bit integer from this buffer in big-endian order, using - * two's complement, and advances the read pointer by n/8. - * - * @param n the number of bits in the integer (8, 16, 24, or 32). - * - * @return the integer. - */ -util.DataBuffer.prototype.getSignedInt = function(n) { - // getInt checks n - var x = this.getInt(n); - var max = 2 << (n - 2); - if(x >= max) { - x -= max << 1; - } - return x; -}; + // 2. Let requestObject be the result of invoking the initial value of + // Request as constructor with input and init as arguments. If this throws + // an exception, reject p with it and return p. + let requestObject -/** - * Reads bytes out as a binary encoded string and clears them from the - * buffer. - * - * @param count the number of bytes to read, undefined or null for all. - * - * @return a binary encoded string of bytes. - */ -util.DataBuffer.prototype.getBytes = function(count) { - // TODO: deprecate this method, it is poorly named and - // this.toString('binary') replaces it - // add a toTypedArray()/toArrayBuffer() function - var rval; - if(count) { - // read count bytes - count = Math.min(this.length(), count); - rval = this.data.slice(this.read, this.read + count); - this.read += count; - } else if(count === 0) { - rval = ''; - } else { - // read all bytes, optimize to only copy when needed - rval = (this.read === 0) ? this.data : this.data.slice(this.read); - this.clear(); + try { + requestObject = new Request(input, init) + } catch (e) { + p.reject(e) + return p.promise } - return rval; -}; - -/** - * Gets a binary encoded string of the bytes from this buffer without - * modifying the read pointer. - * - * @param count the number of bytes to get, omit to get all. - * - * @return a string full of binary encoded characters. - */ -util.DataBuffer.prototype.bytes = function(count) { - // TODO: deprecate this method, it is poorly named, add "getString()" - return (typeof(count) === 'undefined' ? - this.data.slice(this.read) : - this.data.slice(this.read, this.read + count)); -}; -/** - * Gets a byte at the given index without modifying the read pointer. - * - * @param i the byte index. - * - * @return the byte. - */ -util.DataBuffer.prototype.at = function(i) { - return this.data.getUint8(this.read + i); -}; + // 3. Let request be requestObject’s request. + const request = requestObject[kState] -/** - * Puts a byte at the given index without modifying the read pointer. - * - * @param i the byte index. - * @param b the byte to put. - * - * @return this buffer. - */ -util.DataBuffer.prototype.setAt = function(i, b) { - this.data.setUint8(i, b); - return this; -}; + // 4. If requestObject’s signal’s aborted flag is set, then: + if (requestObject.signal.aborted) { + // 1. Abort the fetch() call with p, request, null, and + // requestObject’s signal’s abort reason. + abortFetch(p, request, null, requestObject.signal.reason) -/** - * Gets the last byte without modifying the read pointer. - * - * @return the last byte. - */ -util.DataBuffer.prototype.last = function() { - return this.data.getUint8(this.write - 1); -}; + // 2. Return p. + return p.promise + } -/** - * Creates a copy of this buffer. - * - * @return the copy. - */ -util.DataBuffer.prototype.copy = function() { - return new util.DataBuffer(this); -}; + // 5. Let globalObject be request’s client’s global object. + const globalObject = request.client.globalObject -/** - * Compacts this buffer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.compact = function() { - if(this.read > 0) { - var src = new Uint8Array(this.data.buffer, this.read); - var dst = new Uint8Array(src.byteLength); - dst.set(src); - this.data = new DataView(dst); - this.write -= this.read; - this.read = 0; + // 6. If globalObject is a ServiceWorkerGlobalScope object, then set + // request’s service-workers mode to "none". + if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') { + request.serviceWorkers = 'none' } - return this; -}; -/** - * Clears this buffer. - * - * @return this buffer. - */ -util.DataBuffer.prototype.clear = function() { - this.data = new DataView(new ArrayBuffer(0)); - this.read = this.write = 0; - return this; -}; + // 7. Let responseObject be null. + let responseObject = null -/** - * Shortens this buffer by triming bytes off of the end of this buffer. - * - * @param count the number of bytes to trim off. - * - * @return this buffer. - */ -util.DataBuffer.prototype.truncate = function(count) { - this.write = Math.max(0, this.length() - count); - this.read = Math.min(this.read, this.write); - return this; -}; + // 8. Let relevantRealm be this’s relevant Realm. + const relevantRealm = null -/** - * Converts this buffer to a hexadecimal string. - * - * @return a hexadecimal string. - */ -util.DataBuffer.prototype.toHex = function() { - var rval = ''; - for(var i = this.read; i < this.data.byteLength; ++i) { - var b = this.data.getUint8(i); - if(b < 16) { - rval += '0'; - } - rval += b.toString(16); - } - return rval; -}; + // 9. Let locallyAborted be false. + let locallyAborted = false -/** - * Converts this buffer to a string, using the given encoding. If no - * encoding is given, 'utf8' (UTF-8) is used. - * - * @param [encoding] the encoding to use: 'binary', 'utf8', 'utf16', 'hex', - * 'base64' (default: 'utf8'). - * - * @return a string representation of the bytes in this buffer. - */ -util.DataBuffer.prototype.toString = function(encoding) { - var view = new Uint8Array(this.data, this.read, this.length()); - encoding = encoding || 'utf8'; + // 10. Let controller be null. + let controller = null - // encode to string - if(encoding === 'binary' || encoding === 'raw') { - return util.binary.raw.encode(view); - } - if(encoding === 'hex') { - return util.binary.hex.encode(view); - } - if(encoding === 'base64') { - return util.binary.base64.encode(view); - } + // 11. Add the following abort steps to requestObject’s signal: + addAbortListener( + requestObject.signal, + () => { + // 1. Set locallyAborted to true. + locallyAborted = true - // decode to text - if(encoding === 'utf8') { - return util.text.utf8.decode(view); - } - if(encoding === 'utf16') { - return util.text.utf16.decode(view); - } + // 2. Assert: controller is non-null. + assert(controller != null) - throw new Error('Invalid encoding: ' + encoding); -}; + // 3. Abort controller with requestObject’s signal’s abort reason. + controller.abort(requestObject.signal.reason) -/** End Buffer w/UInt8Array backing */ + // 4. Abort the fetch() call with p, request, responseObject, + // and requestObject’s signal’s abort reason. + abortFetch(p, request, responseObject, requestObject.signal.reason) + } + ) -/** - * Creates a buffer that stores bytes. A value may be given to populate the - * buffer with data. This value can either be string of encoded bytes or a - * regular string of characters. When passing a string of binary encoded - * bytes, the encoding `raw` should be given. This is also the default. When - * passing a string of characters, the encoding `utf8` should be given. - * - * @param [input] a string with encoded bytes to store in the buffer. - * @param [encoding] (default: 'raw', other: 'utf8'). - */ -util.createBuffer = function(input, encoding) { - // TODO: deprecate, use new ByteBuffer() instead - encoding = encoding || 'raw'; - if(input !== undefined && encoding === 'utf8') { - input = util.encodeUtf8(input); - } - return new util.ByteBuffer(input); -}; + // 12. Let handleFetchDone given response response be to finalize and + // report timing with response, globalObject, and "fetch". + const handleFetchDone = (response) => + finalizeAndReportTiming(response, 'fetch') -/** - * Fills a string with a particular value. If you want the string to be a byte - * string, pass in String.fromCharCode(theByte). - * - * @param c the character to fill the string with, use String.fromCharCode - * to fill the string with a byte value. - * @param n the number of characters of value c to fill with. - * - * @return the filled string. - */ -util.fillString = function(c, n) { - var s = ''; - while(n > 0) { - if(n & 1) { - s += c; - } - n >>>= 1; - if(n > 0) { - c += c; + // 13. Set controller to the result of calling fetch given request, + // with processResponseEndOfBody set to handleFetchDone, and processResponse + // given response being these substeps: + + const processResponse = (response) => { + // 1. If locallyAborted is true, terminate these substeps. + if (locallyAborted) { + return Promise.resolve() } - } - return s; -}; -/** - * Performs a per byte XOR between two byte strings and returns the result as a - * string of bytes. - * - * @param s1 first string of bytes. - * @param s2 second string of bytes. - * @param n the number of bytes to XOR. - * - * @return the XOR'd result. - */ -util.xorBytes = function(s1, s2, n) { - var s3 = ''; - var b = ''; - var t = ''; - var i = 0; - var c = 0; - for(; n > 0; --n, ++i) { - b = s1.charCodeAt(i) ^ s2.charCodeAt(i); - if(c >= 10) { - s3 += t; - t = ''; - c = 0; - } - t += String.fromCharCode(b); - ++c; - } - s3 += t; - return s3; -}; + // 2. If response’s aborted flag is set, then: + if (response.aborted) { + // 1. Let deserializedError be the result of deserialize a serialized + // abort reason given controller’s serialized abort reason and + // relevantRealm. -/** - * Converts a hex string into a 'binary' encoded string of bytes. - * - * @param hex the hexadecimal string to convert. - * - * @return the binary-encoded string of bytes. - */ -util.hexToBytes = function(hex) { - // TODO: deprecate: "Deprecated. Use util.binary.hex.decode instead." - var rval = ''; - var i = 0; - if(hex.length & 1 == 1) { - // odd number of characters, convert first character alone - i = 1; - rval += String.fromCharCode(parseInt(hex[0], 16)); - } - // convert 2 characters (1 byte) at a time - for(; i < hex.length; i += 2) { - rval += String.fromCharCode(parseInt(hex.substr(i, 2), 16)); - } - return rval; -}; + // 2. Abort the fetch() call with p, request, responseObject, and + // deserializedError. -/** - * Converts a 'binary' encoded string of bytes to hex. - * - * @param bytes the byte string to convert. - * - * @return the string of hexadecimal characters. - */ -util.bytesToHex = function(bytes) { - // TODO: deprecate: "Deprecated. Use util.binary.hex.encode instead." - return util.createBuffer(bytes).toHex(); -}; + abortFetch(p, request, responseObject, controller.serializedAbortReason) + return Promise.resolve() + } -/** - * Converts an 32-bit integer to 4-big-endian byte string. - * - * @param i the integer. - * - * @return the byte string. - */ -util.int32ToBytes = function(i) { - return ( - String.fromCharCode(i >> 24 & 0xFF) + - String.fromCharCode(i >> 16 & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i & 0xFF)); -}; + // 3. If response is a network error, then reject p with a TypeError + // and terminate these substeps. + if (response.type === 'error') { + p.reject( + Object.assign(new TypeError('fetch failed'), { cause: response.error }) + ) + return Promise.resolve() + } -// base64 characters, reverse mapping -var _base64 = - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='; -var _base64Idx = [ -/*43 -43 = 0*/ -/*'+', 1, 2, 3,'/' */ - 62, -1, -1, -1, 63, + // 4. Set responseObject to the result of creating a Response object, + // given response, "immutable", and relevantRealm. + responseObject = new Response() + responseObject[kState] = response + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm -/*'0','1','2','3','4','5','6','7','8','9' */ - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, + // 5. Resolve p with responseObject. + p.resolve(responseObject) + } -/*15, 16, 17,'=', 19, 20, 21 */ - -1, -1, -1, 64, -1, -1, -1, + controller = fetching({ + request, + processResponseEndOfBody: handleFetchDone, + processResponse, + dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici + }) -/*65 - 43 = 22*/ -/*'A','B','C','D','E','F','G','H','I','J','K','L','M', */ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, + // 14. Return p. + return p.promise +} -/*'N','O','P','Q','R','S','T','U','V','W','X','Y','Z' */ - 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, +// https://fetch.spec.whatwg.org/#finalize-and-report-timing +function finalizeAndReportTiming (response, initiatorType = 'other') { + // 1. If response is an aborted network error, then return. + if (response.type === 'error' && response.aborted) { + return + } -/*91 - 43 = 48 */ -/*48, 49, 50, 51, 52, 53 */ - -1, -1, -1, -1, -1, -1, + // 2. If response’s URL list is null or empty, then return. + if (!response.urlList?.length) { + return + } -/*97 - 43 = 54*/ -/*'a','b','c','d','e','f','g','h','i','j','k','l','m' */ - 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + // 3. Let originalURL be response’s URL list[0]. + const originalURL = response.urlList[0] -/*'n','o','p','q','r','s','t','u','v','w','x','y','z' */ - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51 -]; + // 4. Let timingInfo be response’s timing info. + let timingInfo = response.timingInfo -// base58 characters (Bitcoin alphabet) -var _base58 = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'; + // 5. Let cacheState be response’s cache state. + let cacheState = response.cacheState -/** - * Base64 encodes a 'binary' encoded string of bytes. - * - * @param input the binary encoded string of bytes to base64-encode. - * @param maxline the maximum number of encoded characters per line to use, - * defaults to none. - * - * @return the base64-encoded output. - */ -util.encode64 = function(input, maxline) { - // TODO: deprecate: "Deprecated. Use util.binary.base64.encode instead." - var line = ''; - var output = ''; - var chr1, chr2, chr3; - var i = 0; - while(i < input.length) { - chr1 = input.charCodeAt(i++); - chr2 = input.charCodeAt(i++); - chr3 = input.charCodeAt(i++); - - // encode 4 character group - line += _base64.charAt(chr1 >> 2); - line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4)); - if(isNaN(chr2)) { - line += '=='; - } else { - line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6)); - line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63); - } + // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return. + if (!urlIsHttpHttpsScheme(originalURL)) { + return + } - if(maxline && line.length > maxline) { - output += line.substr(0, maxline) + '\r\n'; - line = line.substr(maxline); - } + // 7. If timingInfo is null, then return. + if (timingInfo === null) { + return } - output += line; - return output; -}; -/** - * Base64 decodes a string into a 'binary' encoded string of bytes. - * - * @param input the base64-encoded input. - * - * @return the binary encoded string. - */ -util.decode64 = function(input) { - // TODO: deprecate: "Deprecated. Use util.binary.base64.decode instead." - - // remove all non-base64 characters - input = input.replace(/[^A-Za-z0-9\+\/\=]/g, ''); - - var output = ''; - var enc1, enc2, enc3, enc4; - var i = 0; - - while(i < input.length) { - enc1 = _base64Idx[input.charCodeAt(i++) - 43]; - enc2 = _base64Idx[input.charCodeAt(i++) - 43]; - enc3 = _base64Idx[input.charCodeAt(i++) - 43]; - enc4 = _base64Idx[input.charCodeAt(i++) - 43]; - - output += String.fromCharCode((enc1 << 2) | (enc2 >> 4)); - if(enc3 !== 64) { - // decoded at least 2 bytes - output += String.fromCharCode(((enc2 & 15) << 4) | (enc3 >> 2)); - if(enc4 !== 64) { - // decoded 3 bytes - output += String.fromCharCode(((enc3 & 3) << 6) | enc4); - } - } + // 8. If response’s timing allow passed flag is not set, then: + if (!response.timingAllowPassed) { + // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. + timingInfo = createOpaqueTimingInfo({ + startTime: timingInfo.startTime + }) + + // 2. Set cacheState to the empty string. + cacheState = '' } - return output; -}; + // 9. Set timingInfo’s end time to the coarsened shared current time + // given global’s relevant settings object’s cross-origin isolated + // capability. + // TODO: given global’s relevant settings object’s cross-origin isolated + // capability? + timingInfo.endTime = coarsenedSharedCurrentTime() -/** - * Encodes the given string of characters (a standard JavaScript - * string) as a binary encoded string where the bytes represent - * a UTF-8 encoded string of characters. Non-ASCII characters will be - * encoded as multiple bytes according to UTF-8. - * - * @param str a standard string of characters to encode. - * - * @return the binary encoded string. - */ -util.encodeUtf8 = function(str) { - return unescape(encodeURIComponent(str)); -}; + // 10. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo -/** - * Decodes a binary encoded string that contains bytes that - * represent a UTF-8 encoded string of characters -- into a - * string of characters (a standard JavaScript string). - * - * @param str the binary encoded string to decode. - * - * @return the resulting standard string of characters. - */ -util.decodeUtf8 = function(str) { - return decodeURIComponent(escape(str)); -}; + // 11. Mark resource timing for timingInfo, originalURL, initiatorType, + // global, and cacheState. + markResourceTiming( + timingInfo, + originalURL, + initiatorType, + globalThis, + cacheState + ) +} -// binary encoding/decoding tools -// FIXME: Experimental. Do not use yet. -util.binary = { - raw: {}, - hex: {}, - base64: {}, - base58: {}, - baseN : { - encode: baseN.encode, - decode: baseN.decode +// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing +function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) { + if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) { + performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState) } -}; +} -/** - * Encodes a Uint8Array as a binary-encoded string. This encoding uses - * a value between 0 and 255 for each character. - * - * @param bytes the Uint8Array to encode. - * - * @return the binary-encoded string. - */ -util.binary.raw.encode = function(bytes) { - return String.fromCharCode.apply(null, bytes); -}; +// https://fetch.spec.whatwg.org/#abort-fetch +function abortFetch (p, request, responseObject, error) { + // Note: AbortSignal.reason was added in node v17.2.0 + // which would give us an undefined error to reject with. + // Remove this once node v16 is no longer supported. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } -/** - * Decodes a binary-encoded string to a Uint8Array. This encoding uses - * a value between 0 and 255 for each character. - * - * @param str the binary-encoded string to decode. - * @param [output] an optional Uint8Array to write the output to; if it - * is too small, an exception will be thrown. - * @param [offset] the start offset for writing to the output (default: 0). - * - * @return the Uint8Array or the number of bytes written if output was given. - */ -util.binary.raw.decode = function(str, output, offset) { - var out = output; - if(!out) { - out = new Uint8Array(str.length); + // 1. Reject promise with error. + p.reject(error) + + // 2. If request’s body is not null and is readable, then cancel request’s + // body with error. + if (request.body != null && isReadable(request.body?.stream)) { + request.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) } - offset = offset || 0; - var j = offset; - for(var i = 0; i < str.length; ++i) { - out[j++] = str.charCodeAt(i); + + // 3. If responseObject is null, then return. + if (responseObject == null) { + return } - return output ? (j - offset) : out; -}; -/** - * Encodes a 'binary' string, ArrayBuffer, DataView, TypedArray, or - * ByteBuffer as a string of hexadecimal characters. - * - * @param bytes the bytes to convert. - * - * @return the string of hexadecimal characters. - */ -util.binary.hex.encode = util.bytesToHex; + // 4. Let response be responseObject’s response. + const response = responseObject[kState] -/** - * Decodes a hex-encoded string to a Uint8Array. - * - * @param hex the hexadecimal string to convert. - * @param [output] an optional Uint8Array to write the output to; if it - * is too small, an exception will be thrown. - * @param [offset] the start offset for writing to the output (default: 0). - * - * @return the Uint8Array or the number of bytes written if output was given. - */ -util.binary.hex.decode = function(hex, output, offset) { - var out = output; - if(!out) { - out = new Uint8Array(Math.ceil(hex.length / 2)); - } - offset = offset || 0; - var i = 0, j = offset; - if(hex.length & 1) { - // odd number of characters, convert first character alone - i = 1; - out[j++] = parseInt(hex[0], 16); - } - // convert 2 characters (1 byte) at a time - for(; i < hex.length; i += 2) { - out[j++] = parseInt(hex.substr(i, 2), 16); - } - return output ? (j - offset) : out; -}; + // 5. If response’s body is not null and is readable, then error response’s + // body with error. + if (response.body != null && isReadable(response.body?.stream)) { + response.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) + } +} -/** - * Base64-encodes a Uint8Array. - * - * @param input the Uint8Array to encode. - * @param maxline the maximum number of encoded characters per line to use, - * defaults to none. - * - * @return the base64-encoded output string. - */ -util.binary.base64.encode = function(input, maxline) { - var line = ''; - var output = ''; - var chr1, chr2, chr3; - var i = 0; - while(i < input.byteLength) { - chr1 = input[i++]; - chr2 = input[i++]; - chr3 = input[i++]; - - // encode 4 character group - line += _base64.charAt(chr1 >> 2); - line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4)); - if(isNaN(chr2)) { - line += '=='; - } else { - line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6)); - line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63); - } +// https://fetch.spec.whatwg.org/#fetching +function fetching ({ + request, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseEndOfBody, + processResponseConsumeBody, + useParallelQueue = false, + dispatcher // undici +}) { + // 1. Let taskDestination be null. + let taskDestination = null + + // 2. Let crossOriginIsolatedCapability be false. + let crossOriginIsolatedCapability = false + + // 3. If request’s client is non-null, then: + if (request.client != null) { + // 1. Set taskDestination to request’s client’s global object. + taskDestination = request.client.globalObject + + // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin + // isolated capability. + crossOriginIsolatedCapability = + request.client.crossOriginIsolatedCapability + } + + // 4. If useParallelQueue is true, then set taskDestination to the result of + // starting a new parallel queue. + // TODO + + // 5. Let timingInfo be a new fetch timing info whose start time and + // post-redirect start time are the coarsened shared current time given + // crossOriginIsolatedCapability. + const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability) + const timingInfo = createOpaqueTimingInfo({ + startTime: currenTime + }) - if(maxline && line.length > maxline) { - output += line.substr(0, maxline) + '\r\n'; - line = line.substr(maxline); + // 6. Let fetchParams be a new fetch params whose + // request is request, + // timing info is timingInfo, + // process request body chunk length is processRequestBodyChunkLength, + // process request end-of-body is processRequestEndOfBody, + // process response is processResponse, + // process response consume body is processResponseConsumeBody, + // process response end-of-body is processResponseEndOfBody, + // task destination is taskDestination, + // and cross-origin isolated capability is crossOriginIsolatedCapability. + const fetchParams = { + controller: new Fetch(dispatcher), + request, + timingInfo, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseConsumeBody, + processResponseEndOfBody, + taskDestination, + crossOriginIsolatedCapability + } + + // 7. If request’s body is a byte sequence, then set request’s body to + // request’s body as a body. + // NOTE: Since fetching is only called from fetch, body should already be + // extracted. + assert(!request.body || request.body.stream) + + // 8. If request’s window is "client", then set request’s window to request’s + // client, if request’s client’s global object is a Window object; otherwise + // "no-window". + if (request.window === 'client') { + // TODO: What if request.client is null? + request.window = + request.client?.globalObject?.constructor?.name === 'Window' + ? request.client + : 'no-window' + } + + // 9. If request’s origin is "client", then set request’s origin to request’s + // client’s origin. + if (request.origin === 'client') { + // TODO: What if request.client is null? + request.origin = request.client?.origin + } + + // 10. If all of the following conditions are true: + // TODO + + // 11. If request’s policy container is "client", then: + if (request.policyContainer === 'client') { + // 1. If request’s client is non-null, then set request’s policy + // container to a clone of request’s client’s policy container. [HTML] + if (request.client != null) { + request.policyContainer = clonePolicyContainer( + request.client.policyContainer + ) + } else { + // 2. Otherwise, set request’s policy container to a new policy + // container. + request.policyContainer = makePolicyContainer() } } - output += line; - return output; -}; -/** - * Decodes a base64-encoded string to a Uint8Array. - * - * @param input the base64-encoded input string. - * @param [output] an optional Uint8Array to write the output to; if it - * is too small, an exception will be thrown. - * @param [offset] the start offset for writing to the output (default: 0). - * - * @return the Uint8Array or the number of bytes written if output was given. - */ -util.binary.base64.decode = function(input, output, offset) { - var out = output; - if(!out) { - out = new Uint8Array(Math.ceil(input.length / 4) * 3); - } - - // remove all non-base64 characters - input = input.replace(/[^A-Za-z0-9\+\/\=]/g, ''); - - offset = offset || 0; - var enc1, enc2, enc3, enc4; - var i = 0, j = offset; - - while(i < input.length) { - enc1 = _base64Idx[input.charCodeAt(i++) - 43]; - enc2 = _base64Idx[input.charCodeAt(i++) - 43]; - enc3 = _base64Idx[input.charCodeAt(i++) - 43]; - enc4 = _base64Idx[input.charCodeAt(i++) - 43]; - - out[j++] = (enc1 << 2) | (enc2 >> 4); - if(enc3 !== 64) { - // decoded at least 2 bytes - out[j++] = ((enc2 & 15) << 4) | (enc3 >> 2); - if(enc4 !== 64) { - // decoded 3 bytes - out[j++] = ((enc3 & 3) << 6) | enc4; - } - } - } + // 12. If request’s header list does not contain `Accept`, then: + if (!request.headersList.contains('accept')) { + // 1. Let value be `*/*`. + const value = '*/*' - // make sure result is the exact decoded length - return output ? (j - offset) : out.subarray(0, j); -}; + // 2. A user agent should set value to the first matching statement, if + // any, switching on request’s destination: + // "document" + // "frame" + // "iframe" + // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8` + // "image" + // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5` + // "style" + // `text/css,*/*;q=0.1` + // TODO -// add support for base58 encoding/decoding with Bitcoin alphabet -util.binary.base58.encode = function(input, maxline) { - return util.binary.baseN.encode(input, _base58, maxline); -}; -util.binary.base58.decode = function(input, maxline) { - return util.binary.baseN.decode(input, _base58, maxline); -}; + // 3. Append `Accept`/value to request’s header list. + request.headersList.append('accept', value) + } -// text encoding/decoding tools -// FIXME: Experimental. Do not use yet. -util.text = { - utf8: {}, - utf16: {} -}; + // 13. If request’s header list does not contain `Accept-Language`, then + // user agents should append `Accept-Language`/an appropriate value to + // request’s header list. + if (!request.headersList.contains('accept-language')) { + request.headersList.append('accept-language', '*') + } -/** - * Encodes the given string as UTF-8 in a Uint8Array. - * - * @param str the string to encode. - * @param [output] an optional Uint8Array to write the output to; if it - * is too small, an exception will be thrown. - * @param [offset] the start offset for writing to the output (default: 0). - * - * @return the Uint8Array or the number of bytes written if output was given. - */ -util.text.utf8.encode = function(str, output, offset) { - str = util.encodeUtf8(str); - var out = output; - if(!out) { - out = new Uint8Array(str.length); - } - offset = offset || 0; - var j = offset; - for(var i = 0; i < str.length; ++i) { - out[j++] = str.charCodeAt(i); - } - return output ? (j - offset) : out; -}; + // 14. If request’s priority is null, then use request’s initiator and + // destination appropriately in setting request’s priority to a + // user-agent-defined object. + if (request.priority === null) { + // TODO + } -/** - * Decodes the UTF-8 contents from a Uint8Array. - * - * @param bytes the Uint8Array to decode. - * - * @return the resulting string. - */ -util.text.utf8.decode = function(bytes) { - return util.decodeUtf8(String.fromCharCode.apply(null, bytes)); -}; + // 15. If request is a subresource request, then: + if (subresourceSet.has(request.destination)) { + // TODO + } -/** - * Encodes the given string as UTF-16 in a Uint8Array. - * - * @param str the string to encode. - * @param [output] an optional Uint8Array to write the output to; if it - * is too small, an exception will be thrown. - * @param [offset] the start offset for writing to the output (default: 0). - * - * @return the Uint8Array or the number of bytes written if output was given. - */ -util.text.utf16.encode = function(str, output, offset) { - var out = output; - if(!out) { - out = new Uint8Array(str.length * 2); - } - var view = new Uint16Array(out.buffer); - offset = offset || 0; - var j = offset; - var k = offset; - for(var i = 0; i < str.length; ++i) { - view[k++] = str.charCodeAt(i); - j += 2; - } - return output ? (j - offset) : out; -}; + // 16. Run main fetch given fetchParams. + mainFetch(fetchParams) + .catch(err => { + fetchParams.controller.terminate(err) + }) -/** - * Decodes the UTF-16 contents from a Uint8Array. - * - * @param bytes the Uint8Array to decode. - * - * @return the resulting string. - */ -util.text.utf16.decode = function(bytes) { - return String.fromCharCode.apply(null, new Uint16Array(bytes.buffer)); -}; + // 17. Return fetchParam's controller + return fetchParams.controller +} -/** - * Deflates the given data using a flash interface. - * - * @param api the flash interface. - * @param bytes the data. - * @param raw true to return only raw deflate data, false to include zlib - * header and trailer. - * - * @return the deflated data as a string. - */ -util.deflate = function(api, bytes, raw) { - bytes = util.decode64(api.deflate(util.encode64(bytes)).rval); +// https://fetch.spec.whatwg.org/#concept-main-fetch +async function mainFetch (fetchParams, recursive = false) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request - // strip zlib header and trailer if necessary - if(raw) { - // zlib header is 2 bytes (CMF,FLG) where FLG indicates that - // there is a 4-byte DICT (alder-32) block before the data if - // its 5th bit is set - var start = 2; - var flg = bytes.charCodeAt(1); - if(flg & 0x20) { - start = 6; - } - // zlib trailer is 4 bytes of adler-32 - bytes = bytes.substring(start, bytes.length - 4); + // 2. Let response be null. + let response = null + + // 3. If request’s local-URLs-only flag is set and request’s current URL is + // not local, then set response to a network error. + if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) { + response = makeNetworkError('local URLs only') } - return bytes; -}; + // 4. Run report Content Security Policy violations for request. + // TODO -/** - * Inflates the given data using a flash interface. - * - * @param api the flash interface. - * @param bytes the data. - * @param raw true if the incoming data has no zlib header or trailer and is - * raw DEFLATE data. - * - * @return the inflated data as a string, null on error. - */ -util.inflate = function(api, bytes, raw) { - // TODO: add zlib header and trailer if necessary/possible - var rval = api.inflate(util.encode64(bytes)).rval; - return (rval === null) ? null : util.decode64(rval); -}; + // 5. Upgrade request to a potentially trustworthy URL, if appropriate. + tryUpgradeRequestToAPotentiallyTrustworthyURL(request) -/** - * Sets a storage object. - * - * @param api the storage interface. - * @param id the storage ID to use. - * @param obj the storage object, null to remove. - */ -var _setStorageObject = function(api, id, obj) { - if(!api) { - throw new Error('WebStorage not available.'); + // 6. If should request be blocked due to a bad port, should fetching request + // be blocked as mixed content, or should request be blocked by Content + // Security Policy returns blocked, then set response to a network error. + if (requestBadPort(request) === 'blocked') { + response = makeNetworkError('bad port') } + // TODO: should fetching request be blocked as mixed content? + // TODO: should request be blocked by Content Security Policy? - var rval; - if(obj === null) { - rval = api.removeItem(id); - } else { - // json-encode and base64-encode object - obj = util.encode64(JSON.stringify(obj)); - rval = api.setItem(id, obj); + // 7. If request’s referrer policy is the empty string, then set request’s + // referrer policy to request’s policy container’s referrer policy. + if (request.referrerPolicy === '') { + request.referrerPolicy = request.policyContainer.referrerPolicy } - // handle potential flash error - if(typeof(rval) !== 'undefined' && rval.rval !== true) { - var error = new Error(rval.error.message); - error.id = rval.error.id; - error.name = rval.error.name; - throw error; + // 8. If request’s referrer is not "no-referrer", then set request’s + // referrer to the result of invoking determine request’s referrer. + if (request.referrer !== 'no-referrer') { + request.referrer = determineRequestsReferrer(request) } -}; -/** - * Gets a storage object. - * - * @param api the storage interface. - * @param id the storage ID to use. - * - * @return the storage object entry or null if none exists. - */ -var _getStorageObject = function(api, id) { - if(!api) { - throw new Error('WebStorage not available.'); - } - - // get the existing entry - var rval = api.getItem(id); - - /* Note: We check api.init because we can't do (api == localStorage) - on IE because of "Class doesn't support Automation" exception. Only - the flash api has an init method so this works too, but we need a - better solution in the future. */ - - // flash returns item wrapped in an object, handle special case - if(api.init) { - if(rval.rval === null) { - if(rval.error) { - var error = new Error(rval.error.message); - error.id = rval.error.id; - error.name = rval.error.name; - throw error; + // 9. Set request’s current URL’s scheme to "https" if all of the following + // conditions are true: + // - request’s current URL’s scheme is "http" + // - request’s current URL’s host is a domain + // - Matching request’s current URL’s host per Known HSTS Host Domain Name + // Matching results in either a superdomain match with an asserted + // includeSubDomains directive or a congruent match (with or without an + // asserted includeSubDomains directive). [HSTS] + // TODO + + // 10. If recursive is false, then run the remaining steps in parallel. + // TODO + + // 11. If response is null, then set response to the result of running + // the steps corresponding to the first matching statement: + if (response === null) { + response = await (async () => { + const currentURL = requestCurrentURL(request) + + if ( + // - request’s current URL’s origin is same origin with request’s origin, + // and request’s response tainting is "basic" + (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') || + // request’s current URL’s scheme is "data" + (currentURL.protocol === 'data:') || + // - request’s mode is "navigate" or "websocket" + (request.mode === 'navigate' || request.mode === 'websocket') + ) { + // 1. Set request’s response tainting to "basic". + request.responseTainting = 'basic' + + // 2. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) } - // no error, but also no item - rval = null; - } else { - rval = rval.rval; - } - } - // handle decoding - if(rval !== null) { - // base64-decode and json-decode data - rval = JSON.parse(util.decode64(rval)); - } + // request’s mode is "same-origin" + if (request.mode === 'same-origin') { + // 1. Return a network error. + return makeNetworkError('request mode cannot be "same-origin"') + } - return rval; -}; + // request’s mode is "no-cors" + if (request.mode === 'no-cors') { + // 1. If request’s redirect mode is not "follow", then return a network + // error. + if (request.redirect !== 'follow') { + return makeNetworkError( + 'redirect mode cannot be "follow" for "no-cors" request' + ) + } -/** - * Stores an item in local storage. - * - * @param api the storage interface. - * @param id the storage ID to use. - * @param key the key for the item. - * @param data the data for the item (any javascript object/primitive). - */ -var _setItem = function(api, id, key, data) { - // get storage object - var obj = _getStorageObject(api, id); - if(obj === null) { - // create a new storage object - obj = {}; - } - // update key - obj[key] = data; - - // set storage object - _setStorageObject(api, id, obj); -}; + // 2. Set request’s response tainting to "opaque". + request.responseTainting = 'opaque' -/** - * Gets an item from local storage. - * - * @param api the storage interface. - * @param id the storage ID to use. - * @param key the key for the item. - * - * @return the item. - */ -var _getItem = function(api, id, key) { - // get storage object - var rval = _getStorageObject(api, id); - if(rval !== null) { - // return data at key - rval = (key in rval) ? rval[key] : null; - } + // 3. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) + } - return rval; -}; + // request’s current URL’s scheme is not an HTTP(S) scheme + if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) { + // Return a network error. + return makeNetworkError('URL scheme must be a HTTP(S) scheme') + } -/** - * Removes an item from local storage. - * - * @param api the storage interface. - * @param id the storage ID to use. - * @param key the key for the item. - */ -var _removeItem = function(api, id, key) { - // get storage object - var obj = _getStorageObject(api, id); - if(obj !== null && key in obj) { - // remove key - delete obj[key]; - - // see if entry has no keys remaining - var empty = true; - for(var prop in obj) { - empty = false; - break; + // - request’s use-CORS-preflight flag is set + // - request’s unsafe-request flag is set and either request’s method is + // not a CORS-safelisted method or CORS-unsafe request-header names with + // request’s header list is not empty + // 1. Set request’s response tainting to "cors". + // 2. Let corsWithPreflightResponse be the result of running HTTP fetch + // given fetchParams and true. + // 3. If corsWithPreflightResponse is a network error, then clear cache + // entries using request. + // 4. Return corsWithPreflightResponse. + // TODO + + // Otherwise + // 1. Set request’s response tainting to "cors". + request.responseTainting = 'cors' + + // 2. Return the result of running HTTP fetch given fetchParams. + return await httpFetch(fetchParams) + })() + } + + // 12. If recursive is true, then return response. + if (recursive) { + return response + } + + // 13. If response is not a network error and response is not a filtered + // response, then: + if (response.status !== 0 && !response.internalResponse) { + // If request’s response tainting is "cors", then: + if (request.responseTainting === 'cors') { + // 1. Let headerNames be the result of extracting header list values + // given `Access-Control-Expose-Headers` and response’s header list. + // TODO + // 2. If request’s credentials mode is not "include" and headerNames + // contains `*`, then set response’s CORS-exposed header-name list to + // all unique header names in response’s header list. + // TODO + // 3. Otherwise, if headerNames is not null or failure, then set + // response’s CORS-exposed header-name list to headerNames. + // TODO + } + + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (request.responseTainting === 'basic') { + response = filterResponse(response, 'basic') + } else if (request.responseTainting === 'cors') { + response = filterResponse(response, 'cors') + } else if (request.responseTainting === 'opaque') { + response = filterResponse(response, 'opaque') + } else { + assert(false) + } + } + + // 14. Let internalResponse be response, if response is a network error, + // and response’s internal response otherwise. + let internalResponse = + response.status === 0 ? response : response.internalResponse + + // 15. If internalResponse’s URL list is empty, then set it to a clone of + // request’s URL list. + if (internalResponse.urlList.length === 0) { + internalResponse.urlList.push(...request.urlList) + } + + // 16. If request’s timing allow failed flag is unset, then set + // internalResponse’s timing allow passed flag. + if (!request.timingAllowFailed) { + response.timingAllowPassed = true + } + + // 17. If response is not a network error and any of the following returns + // blocked + // - should internalResponse to request be blocked as mixed content + // - should internalResponse to request be blocked by Content Security Policy + // - should internalResponse to request be blocked due to its MIME type + // - should internalResponse to request be blocked due to nosniff + // TODO + + // 18. If response’s type is "opaque", internalResponse’s status is 206, + // internalResponse’s range-requested flag is set, and request’s header + // list does not contain `Range`, then set response and internalResponse + // to a network error. + if ( + response.type === 'opaque' && + internalResponse.status === 206 && + internalResponse.rangeRequested && + !request.headers.contains('range') + ) { + response = internalResponse = makeNetworkError() + } + + // 19. If response is not a network error and either request’s method is + // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status, + // set internalResponse’s body to null and disregard any enqueuing toward + // it (if any). + if ( + response.status !== 0 && + (request.method === 'HEAD' || + request.method === 'CONNECT' || + nullBodyStatus.includes(internalResponse.status)) + ) { + internalResponse.body = null + fetchParams.controller.dump = true + } + + // 20. If request’s integrity metadata is not the empty string, then: + if (request.integrity) { + // 1. Let processBodyError be this step: run fetch finale given fetchParams + // and a network error. + const processBodyError = (reason) => + fetchFinale(fetchParams, makeNetworkError(reason)) + + // 2. If request’s response tainting is "opaque", or response’s body is null, + // then run processBodyError and abort these steps. + if (request.responseTainting === 'opaque' || response.body == null) { + processBodyError(response.error) + return } - if(empty) { - // remove entry entirely if no keys are left - obj = null; + + // 3. Let processBody given bytes be these steps: + const processBody = (bytes) => { + // 1. If bytes do not match request’s integrity metadata, + // then run processBodyError and abort these steps. [SRI] + if (!bytesMatch(bytes, request.integrity)) { + processBodyError('integrity mismatch') + return + } + + // 2. Set response’s body to bytes as a body. + response.body = safelyExtractBody(bytes)[0] + + // 3. Run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) } - // set storage object - _setStorageObject(api, id, obj); + // 4. Fully read response’s body given processBody and processBodyError. + await fullyReadBody(response.body, processBody, processBodyError) + } else { + // 21. Otherwise, run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) } -}; +} -/** - * Clears the local disk storage identified by the given ID. - * - * @param api the storage interface. - * @param id the storage ID to use. - */ -var _clearItems = function(api, id) { - _setStorageObject(api, id, null); -}; +// https://fetch.spec.whatwg.org/#concept-scheme-fetch +// given a fetch params fetchParams +function schemeFetch (fetchParams) { + // Note: since the connection is destroyed on redirect, which sets fetchParams to a + // cancelled state, we do not want this condition to trigger *unless* there have been + // no redirects. See https://github.com/nodejs/undici/issues/1776 + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { + return Promise.resolve(makeAppropriateNetworkError(fetchParams)) + } -/** - * Calls a storage function. - * - * @param func the function to call. - * @param args the arguments for the function. - * @param location the location argument. - * - * @return the return value from the function. - */ -var _callStorageFunction = function(func, args, location) { - var rval = null; + // 2. Let request be fetchParams’s request. + const { request } = fetchParams - // default storage types - if(typeof(location) === 'undefined') { - location = ['web', 'flash']; - } + const { protocol: scheme } = requestCurrentURL(request) - // apply storage types in order of preference - var type; - var done = false; - var exception = null; - for(var idx in location) { - type = location[idx]; - try { - if(type === 'flash' || type === 'both') { - if(args[0] === null) { - throw new Error('Flash local storage not available.'); - } - rval = func.apply(this, args); - done = (type === 'flash'); + // 3. Switch on request’s current URL’s scheme and run the associated steps: + switch (scheme) { + case 'about:': { + // If request’s current URL’s path is the string "blank", then return a new response + // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) », + // and body is the empty byte sequence as a body. + + // Otherwise, return a network error. + return Promise.resolve(makeNetworkError('about scheme is not supported')) + } + case 'blob:': { + if (!resolveObjectURL) { + resolveObjectURL = (__nccwpck_require__(4300).resolveObjectURL) } - if(type === 'web' || type === 'both') { - args[0] = localStorage; - rval = func.apply(this, args); - done = true; + + // 1. Let blobURLEntry be request’s current URL’s blob URL entry. + const blobURLEntry = requestCurrentURL(request) + + // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56 + // Buffer.resolveObjectURL does not ignore URL queries. + if (blobURLEntry.search.length !== 0) { + return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.')) } - } catch(ex) { - exception = ex; - } - if(done) { - break; - } - } - if(!done) { - throw exception; - } + const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString()) - return rval; -}; + // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s + // object is not a Blob object, then return a network error. + if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) { + return Promise.resolve(makeNetworkError('invalid method')) + } -/** - * Stores an item on local disk. - * - * The available types of local storage include 'flash', 'web', and 'both'. - * - * The type 'flash' refers to flash local storage (SharedObject). In order - * to use flash local storage, the 'api' parameter must be valid. The type - * 'web' refers to WebStorage, if supported by the browser. The type 'both' - * refers to storing using both 'flash' and 'web', not just one or the - * other. - * - * The location array should list the storage types to use in order of - * preference: - * - * ['flash']: flash only storage - * ['web']: web only storage - * ['both']: try to store in both - * ['flash','web']: store in flash first, but if not available, 'web' - * ['web','flash']: store in web first, but if not available, 'flash' - * - * The location array defaults to: ['web', 'flash'] - * - * @param api the flash interface, null to use only WebStorage. - * @param id the storage ID to use. - * @param key the key for the item. - * @param data the data for the item (any javascript object/primitive). - * @param location an array with the preferred types of storage to use. - */ -util.setItem = function(api, id, key, data, location) { - _callStorageFunction(_setItem, arguments, location); -}; + // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object. + const bodyWithType = safelyExtractBody(blobURLEntryObject) -/** - * Gets an item on local disk. - * - * Set setItem() for details on storage types. - * - * @param api the flash interface, null to use only WebStorage. - * @param id the storage ID to use. - * @param key the key for the item. - * @param location an array with the preferred types of storage to use. - * - * @return the item. - */ -util.getItem = function(api, id, key, location) { - return _callStorageFunction(_getItem, arguments, location); -}; + // 4. Let body be bodyWithType’s body. + const body = bodyWithType[0] -/** - * Removes an item on local disk. - * - * Set setItem() for details on storage types. - * - * @param api the flash interface. - * @param id the storage ID to use. - * @param key the key for the item. - * @param location an array with the preferred types of storage to use. - */ -util.removeItem = function(api, id, key, location) { - _callStorageFunction(_removeItem, arguments, location); -}; + // 5. Let length be body’s length, serialized and isomorphic encoded. + const length = isomorphicEncode(`${body.length}`) -/** - * Clears the local disk storage identified by the given ID. - * - * Set setItem() for details on storage types. - * - * @param api the flash interface if flash is available. - * @param id the storage ID to use. - * @param location an array with the preferred types of storage to use. - */ -util.clearItems = function(api, id, location) { - _callStorageFunction(_clearItems, arguments, location); -}; + // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence. + const type = bodyWithType[1] ?? '' -/** - * Check if an object is empty. - * - * Taken from: - * http://stackoverflow.com/questions/679915/how-do-i-test-for-an-empty-javascript-object-from-json/679937#679937 - * - * @param object the object to check. - */ -util.isEmpty = function(obj) { - for(var prop in obj) { - if(obj.hasOwnProperty(prop)) { - return false; + // 7. Return a new response whose status message is `OK`, header list is + // « (`Content-Length`, length), (`Content-Type`, type) », and body is body. + const response = makeResponse({ + statusText: 'OK', + headersList: [ + ['content-length', { name: 'Content-Length', value: length }], + ['content-type', { name: 'Content-Type', value: type }] + ] + }) + + response.body = body + + return Promise.resolve(response) } - } - return true; -}; + case 'data:': { + // 1. Let dataURLStruct be the result of running the + // data: URL processor on request’s current URL. + const currentURL = requestCurrentURL(request) + const dataURLStruct = dataURLProcessor(currentURL) -/** - * Format with simple printf-style interpolation. - * - * %%: literal '%' - * %s,%o: convert next argument into a string. - * - * @param format the string to format. - * @param ... arguments to interpolate into the format string. - */ -util.format = function(format) { - var re = /%./g; - // current match - var match; - // current part - var part; - // current arg index - var argi = 0; - // collected parts to recombine later - var parts = []; - // last index found - var last = 0; - // loop while matches remain - while((match = re.exec(format))) { - part = format.substring(last, re.lastIndex - 2); - // don't add empty strings (ie, parts between %s%s) - if(part.length > 0) { - parts.push(part); - } - last = re.lastIndex; - // switch on % code - var code = match[0][1]; - switch(code) { - case 's': - case 'o': - // check if enough arguments were given - if(argi < arguments.length) { - parts.push(arguments[argi++ + 1]); - } else { - parts.push(''); + // 2. If dataURLStruct is failure, then return a + // network error. + if (dataURLStruct === 'failure') { + return Promise.resolve(makeNetworkError('failed to fetch the data URL')) } - break; - // FIXME: do proper formating for numbers, etc - //case 'f': - //case 'd': - case '%': - parts.push('%'); - break; - default: - parts.push('<%' + code + '?>'); + + // 3. Let mimeType be dataURLStruct’s MIME type, serialized. + const mimeType = serializeAMimeType(dataURLStruct.mimeType) + + // 4. Return a response whose status message is `OK`, + // header list is « (`Content-Type`, mimeType) », + // and body is dataURLStruct’s body as a body. + return Promise.resolve(makeResponse({ + statusText: 'OK', + headersList: [ + ['content-type', { name: 'Content-Type', value: mimeType }] + ], + body: safelyExtractBody(dataURLStruct.body)[0] + })) + } + case 'file:': { + // For now, unfortunate as it is, file URLs are left as an exercise for the reader. + // When in doubt, return a network error. + return Promise.resolve(makeNetworkError('not implemented... yet...')) + } + case 'http:': + case 'https:': { + // Return the result of running HTTP fetch given fetchParams. + + return httpFetch(fetchParams) + .catch((err) => makeNetworkError(err)) + } + default: { + return Promise.resolve(makeNetworkError('unknown scheme')) } } - // add trailing part of format string - parts.push(format.substring(last)); - return parts.join(''); -}; +} -/** - * Formats a number. - * - * http://snipplr.com/view/5945/javascript-numberformat--ported-from-php/ - */ -util.formatNumber = function(number, decimals, dec_point, thousands_sep) { - // http://kevin.vanzonneveld.net - // + original by: Jonas Raoni Soares Silva (http://www.jsfromhell.com) - // + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net) - // + bugfix by: Michael White (http://crestidg.com) - // + bugfix by: Benjamin Lupton - // + bugfix by: Allan Jensen (http://www.winternet.no) - // + revised by: Jonas Raoni Soares Silva (http://www.jsfromhell.com) - // * example 1: number_format(1234.5678, 2, '.', ''); - // * returns 1: 1234.57 - - var n = number, c = isNaN(decimals = Math.abs(decimals)) ? 2 : decimals; - var d = dec_point === undefined ? ',' : dec_point; - var t = thousands_sep === undefined ? - '.' : thousands_sep, s = n < 0 ? '-' : ''; - var i = parseInt((n = Math.abs(+n || 0).toFixed(c)), 10) + ''; - var j = (i.length > 3) ? i.length % 3 : 0; - return s + (j ? i.substr(0, j) + t : '') + - i.substr(j).replace(/(\d{3})(?=\d)/g, '$1' + t) + - (c ? d + Math.abs(n - i).toFixed(c).slice(2) : ''); -}; +// https://fetch.spec.whatwg.org/#finalize-response +function finalizeResponse (fetchParams, response) { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true -/** - * Formats a byte size. - * - * http://snipplr.com/view/5949/format-humanize-file-byte-size-presentation-in-javascript/ - */ -util.formatSize = function(size) { - if(size >= 1073741824) { - size = util.formatNumber(size / 1073741824, 2, '.', '') + ' GiB'; - } else if(size >= 1048576) { - size = util.formatNumber(size / 1048576, 2, '.', '') + ' MiB'; - } else if(size >= 1024) { - size = util.formatNumber(size / 1024, 0) + ' KiB'; - } else { - size = util.formatNumber(size, 0) + ' bytes'; + // 2, If fetchParams’s process response done is not null, then queue a fetch + // task to run fetchParams’s process response done given response, with + // fetchParams’s task destination. + if (fetchParams.processResponseDone != null) { + queueMicrotask(() => fetchParams.processResponseDone(response)) } - return size; -}; +} -/** - * Converts an IPv4 or IPv6 string representation into bytes (in network order). - * - * @param ip the IPv4 or IPv6 address to convert. - * - * @return the 4-byte IPv6 or 16-byte IPv6 address or null if the address can't - * be parsed. - */ -util.bytesFromIP = function(ip) { - if(ip.indexOf('.') !== -1) { - return util.bytesFromIPv4(ip); - } - if(ip.indexOf(':') !== -1) { - return util.bytesFromIPv6(ip); - } - return null; -}; +// https://fetch.spec.whatwg.org/#fetch-finale +function fetchFinale (fetchParams, response) { + // 1. If response is a network error, then: + if (response.type === 'error') { + // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ». + response.urlList = [fetchParams.request.urlList[0]] -/** - * Converts an IPv4 string representation into bytes (in network order). - * - * @param ip the IPv4 address to convert. - * - * @return the 4-byte address or null if the address can't be parsed. - */ -util.bytesFromIPv4 = function(ip) { - ip = ip.split('.'); - if(ip.length !== 4) { - return null; - } - var b = util.createBuffer(); - for(var i = 0; i < ip.length; ++i) { - var num = parseInt(ip[i], 10); - if(isNaN(num)) { - return null; - } - b.putByte(num); + // 2. Set response’s timing info to the result of creating an opaque timing + // info for fetchParams’s timing info. + response.timingInfo = createOpaqueTimingInfo({ + startTime: fetchParams.timingInfo.startTime + }) } - return b.getBytes(); -}; -/** - * Converts an IPv6 string representation into bytes (in network order). - * - * @param ip the IPv6 address to convert. - * - * @return the 16-byte address or null if the address can't be parsed. - */ -util.bytesFromIPv6 = function(ip) { - var blanks = 0; - ip = ip.split(':').filter(function(e) { - if(e.length === 0) ++blanks; - return true; - }); - var zeros = (8 - ip.length + blanks) * 2; - var b = util.createBuffer(); - for(var i = 0; i < 8; ++i) { - if(!ip[i] || ip[i].length === 0) { - b.fillWithByte(0, zeros); - zeros = 0; - continue; - } - var bytes = util.hexToBytes(ip[i]); - if(bytes.length < 2) { - b.putByte(0); + // 2. Let processResponseEndOfBody be the following steps: + const processResponseEndOfBody = () => { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true + + // If fetchParams’s process response end-of-body is not null, + // then queue a fetch task to run fetchParams’s process response + // end-of-body given response with fetchParams’s task destination. + if (fetchParams.processResponseEndOfBody != null) { + queueMicrotask(() => fetchParams.processResponseEndOfBody(response)) } - b.putBytes(bytes); } - return b.getBytes(); -}; -/** - * Converts 4-bytes into an IPv4 string representation or 16-bytes into - * an IPv6 string representation. The bytes must be in network order. - * - * @param bytes the bytes to convert. - * - * @return the IPv4 or IPv6 string representation if 4 or 16 bytes, - * respectively, are given, otherwise null. - */ -util.bytesToIP = function(bytes) { - if(bytes.length === 4) { - return util.bytesToIPv4(bytes); + // 3. If fetchParams’s process response is non-null, then queue a fetch task + // to run fetchParams’s process response given response, with fetchParams’s + // task destination. + if (fetchParams.processResponse != null) { + queueMicrotask(() => fetchParams.processResponse(response)) } - if(bytes.length === 16) { - return util.bytesToIPv6(bytes); - } - return null; -}; -/** - * Converts 4-bytes into an IPv4 string representation. The bytes must be - * in network order. - * - * @param bytes the bytes to convert. - * - * @return the IPv4 string representation or null for an invalid # of bytes. - */ -util.bytesToIPv4 = function(bytes) { - if(bytes.length !== 4) { - return null; - } - var ip = []; - for(var i = 0; i < bytes.length; ++i) { - ip.push(bytes.charCodeAt(i)); - } - return ip.join('.'); -}; + // 4. If response’s body is null, then run processResponseEndOfBody. + if (response.body == null) { + processResponseEndOfBody() + } else { + // 5. Otherwise: -/** - * Converts 16-bytes into an IPv16 string representation. The bytes must be - * in network order. - * - * @param bytes the bytes to convert. - * - * @return the IPv16 string representation or null for an invalid # of bytes. - */ -util.bytesToIPv6 = function(bytes) { - if(bytes.length !== 16) { - return null; - } - var ip = []; - var zeroGroups = []; - var zeroMaxGroup = 0; - for(var i = 0; i < bytes.length; i += 2) { - var hex = util.bytesToHex(bytes[i] + bytes[i + 1]); - // canonicalize zero representation - while(hex[0] === '0' && hex !== '0') { - hex = hex.substr(1); - } - if(hex === '0') { - var last = zeroGroups[zeroGroups.length - 1]; - var idx = ip.length; - if(!last || idx !== last.end + 1) { - zeroGroups.push({start: idx, end: idx}); - } else { - last.end = idx; - if((last.end - last.start) > - (zeroGroups[zeroMaxGroup].end - zeroGroups[zeroMaxGroup].start)) { - zeroMaxGroup = zeroGroups.length - 1; - } - } + // 1. Let transformStream be a new a TransformStream. + + // 2. Let identityTransformAlgorithm be an algorithm which, given chunk, + // enqueues chunk in transformStream. + const identityTransformAlgorithm = (chunk, controller) => { + controller.enqueue(chunk) } - ip.push(hex); - } - if(zeroGroups.length > 0) { - var group = zeroGroups[zeroMaxGroup]; - // only shorten group of length > 0 - if(group.end - group.start > 0) { - ip.splice(group.start, group.end - group.start + 1, ''); - if(group.start === 0) { - ip.unshift(''); + + // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm + // and flushAlgorithm set to processResponseEndOfBody. + const transformStream = new TransformStream({ + start () {}, + transform: identityTransformAlgorithm, + flush: processResponseEndOfBody + }, { + size () { + return 1 } - if(group.end === 7) { - ip.push(''); + }, { + size () { + return 1 } - } - } - return ip.join(':'); -}; + }) -/** - * Estimates the number of processes that can be run concurrently. If - * creating Web Workers, keep in mind that the main JavaScript process needs - * its own core. - * - * @param options the options to use: - * update true to force an update (not use the cached value). - * @param callback(err, max) called once the operation completes. - */ -util.estimateCores = function(options, callback) { - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; - if('cores' in util && !options.update) { - return callback(null, util.cores); - } - if(typeof navigator !== 'undefined' && - 'hardwareConcurrency' in navigator && - navigator.hardwareConcurrency > 0) { - util.cores = navigator.hardwareConcurrency; - return callback(null, util.cores); - } - if(typeof Worker === 'undefined') { - // workers not available - util.cores = 1; - return callback(null, util.cores); - } - if(typeof Blob === 'undefined') { - // can't estimate, default to 2 - util.cores = 2; - return callback(null, util.cores); - } - - // create worker concurrency estimation code as blob - var blobUrl = URL.createObjectURL(new Blob(['(', - function() { - self.addEventListener('message', function(e) { - // run worker for 4 ms - var st = Date.now(); - var et = st + 4; - while(Date.now() < et); - self.postMessage({st: st, et: et}); - }); - }.toString(), - ')()'], {type: 'application/javascript'})); - - // take 5 samples using 16 workers - sample([], 5, 16); - - function sample(max, samples, numWorkers) { - if(samples === 0) { - // get overlap average - var avg = Math.floor(max.reduce(function(avg, x) { - return avg + x; - }, 0) / max.length); - util.cores = Math.max(1, avg); - URL.revokeObjectURL(blobUrl); - return callback(null, util.cores); - } - map(numWorkers, function(err, results) { - max.push(reduce(numWorkers, results)); - sample(max, samples - 1, numWorkers); - }); + // 4. Set response’s body to the result of piping response’s body through transformStream. + response.body = { stream: response.body.stream.pipeThrough(transformStream) } } - function map(numWorkers, callback) { - var workers = []; - var results = []; - for(var i = 0; i < numWorkers; ++i) { - var worker = new Worker(blobUrl); - worker.addEventListener('message', function(e) { - results.push(e.data); - if(results.length === numWorkers) { - for(var i = 0; i < numWorkers; ++i) { - workers[i].terminate(); - } - callback(null, results); - } - }); - workers.push(worker); - } - for(var i = 0; i < numWorkers; ++i) { - workers[i].postMessage(i); - } - } + // 6. If fetchParams’s process response consume body is non-null, then: + if (fetchParams.processResponseConsumeBody != null) { + // 1. Let processBody given nullOrBytes be this step: run fetchParams’s + // process response consume body given response and nullOrBytes. + const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes) - function reduce(numWorkers, results) { - // find overlapping time windows - var overlaps = []; - for(var n = 0; n < numWorkers; ++n) { - var r1 = results[n]; - var overlap = overlaps[n] = []; - for(var i = 0; i < numWorkers; ++i) { - if(n === i) { - continue; - } - var r2 = results[i]; - if((r1.st > r2.st && r1.st < r2.et) || - (r2.st > r1.st && r2.st < r1.et)) { - overlap.push(i); - } - } + // 2. Let processBodyError be this step: run fetchParams’s process + // response consume body given response and failure. + const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure) + + // 3. If response’s body is null, then queue a fetch task to run processBody + // given null, with fetchParams’s task destination. + if (response.body == null) { + queueMicrotask(() => processBody(null)) + } else { + // 4. Otherwise, fully read response’s body given processBody, processBodyError, + // and fetchParams’s task destination. + return fullyReadBody(response.body, processBody, processBodyError) } - // get maximum overlaps ... don't include overlapping worker itself - // as the main JS process was also being scheduled during the work and - // would have to be subtracted from the estimate anyway - return overlaps.reduce(function(max, overlap) { - return Math.max(max, overlap.length); - }, 0); + return Promise.resolve() } -}; +} +// https://fetch.spec.whatwg.org/#http-fetch +async function httpFetch (fetchParams) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request -/***/ }), + // 2. Let response be null. + let response = null -/***/ 8180: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 3. Let actualResponse be null. + let actualResponse = null -/** - * Javascript implementation of X.509 and related components (such as - * Certification Signing Requests) of a Public Key Infrastructure. - * - * @author Dave Longley - * - * Copyright (c) 2010-2014 Digital Bazaar, Inc. - * - * The ASN.1 representation of an X.509v3 certificate is as follows - * (see RFC 2459): - * - * Certificate ::= SEQUENCE { - * tbsCertificate TBSCertificate, - * signatureAlgorithm AlgorithmIdentifier, - * signatureValue BIT STRING - * } - * - * TBSCertificate ::= SEQUENCE { - * version [0] EXPLICIT Version DEFAULT v1, - * serialNumber CertificateSerialNumber, - * signature AlgorithmIdentifier, - * issuer Name, - * validity Validity, - * subject Name, - * subjectPublicKeyInfo SubjectPublicKeyInfo, - * issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL, - * -- If present, version shall be v2 or v3 - * subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL, - * -- If present, version shall be v2 or v3 - * extensions [3] EXPLICIT Extensions OPTIONAL - * -- If present, version shall be v3 - * } - * - * Version ::= INTEGER { v1(0), v2(1), v3(2) } - * - * CertificateSerialNumber ::= INTEGER - * - * Name ::= CHOICE { - * // only one possible choice for now - * RDNSequence - * } - * - * RDNSequence ::= SEQUENCE OF RelativeDistinguishedName - * - * RelativeDistinguishedName ::= SET OF AttributeTypeAndValue - * - * AttributeTypeAndValue ::= SEQUENCE { - * type AttributeType, - * value AttributeValue - * } - * AttributeType ::= OBJECT IDENTIFIER - * AttributeValue ::= ANY DEFINED BY AttributeType - * - * Validity ::= SEQUENCE { - * notBefore Time, - * notAfter Time - * } - * - * Time ::= CHOICE { - * utcTime UTCTime, - * generalTime GeneralizedTime - * } - * - * UniqueIdentifier ::= BIT STRING - * - * SubjectPublicKeyInfo ::= SEQUENCE { - * algorithm AlgorithmIdentifier, - * subjectPublicKey BIT STRING - * } - * - * Extensions ::= SEQUENCE SIZE (1..MAX) OF Extension - * - * Extension ::= SEQUENCE { - * extnID OBJECT IDENTIFIER, - * critical BOOLEAN DEFAULT FALSE, - * extnValue OCTET STRING - * } - * - * The only key algorithm currently supported for PKI is RSA. - * - * RSASSA-PSS signatures are described in RFC 3447 and RFC 4055. - * - * PKCS#10 v1.7 describes certificate signing requests: - * - * CertificationRequestInfo: - * - * CertificationRequestInfo ::= SEQUENCE { - * version INTEGER { v1(0) } (v1,...), - * subject Name, - * subjectPKInfo SubjectPublicKeyInfo{{ PKInfoAlgorithms }}, - * attributes [0] Attributes{{ CRIAttributes }} - * } - * - * Attributes { ATTRIBUTE:IOSet } ::= SET OF Attribute{{ IOSet }} - * - * CRIAttributes ATTRIBUTE ::= { - * ... -- add any locally defined attributes here -- } - * - * Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE { - * type ATTRIBUTE.&id({IOSet}), - * values SET SIZE(1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type}) - * } - * - * CertificationRequest ::= SEQUENCE { - * certificationRequestInfo CertificationRequestInfo, - * signatureAlgorithm AlgorithmIdentifier{{ SignatureAlgorithms }}, - * signature BIT STRING - * } - */ -var forge = __nccwpck_require__(9177); -__nccwpck_require__(7994); -__nccwpck_require__(9549); -__nccwpck_require__(7157); -__nccwpck_require__(6231); -__nccwpck_require__(7973); -__nccwpck_require__(1925); -__nccwpck_require__(154); -__nccwpck_require__(4376); -__nccwpck_require__(3921); -__nccwpck_require__(8339); - -// shortcut for asn.1 API -var asn1 = forge.asn1; - -/* Public Key Infrastructure (PKI) implementation. */ -var pki = module.exports = forge.pki = forge.pki || {}; -var oids = pki.oids; - -// short name OID mappings -var _shortNames = {}; -_shortNames['CN'] = oids['commonName']; -_shortNames['commonName'] = 'CN'; -_shortNames['C'] = oids['countryName']; -_shortNames['countryName'] = 'C'; -_shortNames['L'] = oids['localityName']; -_shortNames['localityName'] = 'L'; -_shortNames['ST'] = oids['stateOrProvinceName']; -_shortNames['stateOrProvinceName'] = 'ST'; -_shortNames['O'] = oids['organizationName']; -_shortNames['organizationName'] = 'O'; -_shortNames['OU'] = oids['organizationalUnitName']; -_shortNames['organizationalUnitName'] = 'OU'; -_shortNames['E'] = oids['emailAddress']; -_shortNames['emailAddress'] = 'E'; - -// validator for an SubjectPublicKeyInfo structure -// Note: Currently only works with an RSA public key -var publicKeyValidator = forge.pki.rsa.publicKeyValidator; - -// validator for an X.509v3 certificate -var x509CertificateValidator = { - name: 'Certificate', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'Certificate.TBSCertificate', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'tbsCertificate', - value: [{ - name: 'Certificate.TBSCertificate.version', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - constructed: true, - optional: true, - value: [{ - name: 'Certificate.TBSCertificate.version.integer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'certVersion' - }] - }, { - name: 'Certificate.TBSCertificate.serialNumber', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'certSerialNumber' - }, { - name: 'Certificate.TBSCertificate.signature', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'Certificate.TBSCertificate.signature.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'certinfoSignatureOid' - }, { - name: 'Certificate.TBSCertificate.signature.parameters', - tagClass: asn1.Class.UNIVERSAL, - optional: true, - captureAsn1: 'certinfoSignatureParams' - }] - }, { - name: 'Certificate.TBSCertificate.issuer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'certIssuer' - }, { - name: 'Certificate.TBSCertificate.validity', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - // Note: UTC and generalized times may both appear so the capture - // names are based on their detected order, the names used below - // are only for the common case, which validity time really means - // "notBefore" and which means "notAfter" will be determined by order - value: [{ - // notBefore (Time) (UTC time case) - name: 'Certificate.TBSCertificate.validity.notBefore (utc)', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.UTCTIME, - constructed: false, - optional: true, - capture: 'certValidity1UTCTime' - }, { - // notBefore (Time) (generalized time case) - name: 'Certificate.TBSCertificate.validity.notBefore (generalized)', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.GENERALIZEDTIME, - constructed: false, - optional: true, - capture: 'certValidity2GeneralizedTime' - }, { - // notAfter (Time) (only UTC time is supported) - name: 'Certificate.TBSCertificate.validity.notAfter (utc)', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.UTCTIME, - constructed: false, - optional: true, - capture: 'certValidity3UTCTime' - }, { - // notAfter (Time) (only UTC time is supported) - name: 'Certificate.TBSCertificate.validity.notAfter (generalized)', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.GENERALIZEDTIME, - constructed: false, - optional: true, - capture: 'certValidity4GeneralizedTime' - }] - }, { - // Name (subject) (RDNSequence) - name: 'Certificate.TBSCertificate.subject', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'certSubject' - }, - // SubjectPublicKeyInfo - publicKeyValidator, - { - // issuerUniqueID (optional) - name: 'Certificate.TBSCertificate.issuerUniqueID', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 1, - constructed: true, - optional: true, - value: [{ - name: 'Certificate.TBSCertificate.issuerUniqueID.id', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - // TODO: support arbitrary bit length ids - captureBitStringValue: 'certIssuerUniqueId' - }] - }, { - // subjectUniqueID (optional) - name: 'Certificate.TBSCertificate.subjectUniqueID', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 2, - constructed: true, - optional: true, - value: [{ - name: 'Certificate.TBSCertificate.subjectUniqueID.id', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - // TODO: support arbitrary bit length ids - captureBitStringValue: 'certSubjectUniqueId' - }] - }, { - // Extensions (optional) - name: 'Certificate.TBSCertificate.extensions', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 3, - constructed: true, - captureAsn1: 'certExtensions', - optional: true - }] - }, { - // AlgorithmIdentifier (signature algorithm) - name: 'Certificate.signatureAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // algorithm - name: 'Certificate.signatureAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'certSignatureOid' - }, { - name: 'Certificate.TBSCertificate.signature.parameters', - tagClass: asn1.Class.UNIVERSAL, - optional: true, - captureAsn1: 'certSignatureParams' - }] - }, { - // SignatureValue - name: 'Certificate.signatureValue', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - captureBitStringValue: 'certSignature' - }] -}; + // 4. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo -var rsassaPssParameterValidator = { - name: 'rsapss', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'rsapss.hashAlgorithm', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - constructed: true, - value: [{ - name: 'rsapss.hashAlgorithm.AlgorithmIdentifier', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Class.SEQUENCE, - constructed: true, - optional: true, - value: [{ - name: 'rsapss.hashAlgorithm.AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'hashOid' - /* parameter block omitted, for SHA1 NULL anyhow. */ - }] - }] - }, { - name: 'rsapss.maskGenAlgorithm', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 1, - constructed: true, - value: [{ - name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Class.SEQUENCE, - constructed: true, - optional: true, - value: [{ - name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'maskGenOid' - }, { - name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'maskGenHashOid' - /* parameter block omitted, for SHA1 NULL anyhow. */ - }] - }] - }] - }, { - name: 'rsapss.saltLength', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 2, - optional: true, - value: [{ - name: 'rsapss.saltLength.saltLength', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Class.INTEGER, - constructed: false, - capture: 'saltLength' - }] - }, { - name: 'rsapss.trailerField', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 3, - optional: true, - value: [{ - name: 'rsapss.trailer.trailer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Class.INTEGER, - constructed: false, - capture: 'trailer' - }] - }] -}; + // 5. If request’s service-workers mode is "all", then: + if (request.serviceWorkers === 'all') { + // TODO + } -// validator for a CertificationRequestInfo structure -var certificationRequestInfoValidator = { - name: 'CertificationRequestInfo', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'certificationRequestInfo', - value: [{ - name: 'CertificationRequestInfo.integer', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.INTEGER, - constructed: false, - capture: 'certificationRequestInfoVersion' - }, { - // Name (subject) (RDNSequence) - name: 'CertificationRequestInfo.subject', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'certificationRequestInfoSubject' - }, - // SubjectPublicKeyInfo - publicKeyValidator, - { - name: 'CertificationRequestInfo.attributes', - tagClass: asn1.Class.CONTEXT_SPECIFIC, - type: 0, - constructed: true, - optional: true, - capture: 'certificationRequestInfoAttributes', - value: [{ - name: 'CertificationRequestInfo.attributes', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - name: 'CertificationRequestInfo.attributes.type', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false - }, { - name: 'CertificationRequestInfo.attributes.value', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SET, - constructed: true - }] - }] - }] -}; + // 6. If response is null, then: + if (response === null) { + // 1. If makeCORSPreflight is true and one of these conditions is true: + // TODO -// validator for a CertificationRequest structure -var certificationRequestValidator = { - name: 'CertificationRequest', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - captureAsn1: 'csr', - value: [ - certificationRequestInfoValidator, { - // AlgorithmIdentifier (signature algorithm) - name: 'CertificationRequest.signatureAlgorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.SEQUENCE, - constructed: true, - value: [{ - // algorithm - name: 'CertificationRequest.signatureAlgorithm.algorithm', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.OID, - constructed: false, - capture: 'csrSignatureOid' - }, { - name: 'CertificationRequest.signatureAlgorithm.parameters', - tagClass: asn1.Class.UNIVERSAL, - optional: true, - captureAsn1: 'csrSignatureParams' - }] - }, { - // signature - name: 'CertificationRequest.signature', - tagClass: asn1.Class.UNIVERSAL, - type: asn1.Type.BITSTRING, - constructed: false, - captureBitStringValue: 'csrSignature' + // 2. If request’s redirect mode is "follow", then set request’s + // service-workers mode to "none". + if (request.redirect === 'follow') { + request.serviceWorkers = 'none' } - ] -}; -/** - * Converts an RDNSequence of ASN.1 DER-encoded RelativeDistinguishedName - * sets into an array with objects that have type and value properties. - * - * @param rdn the RDNSequence to convert. - * @param md a message digest to append type and value to if provided. - */ -pki.RDNAttributesAsArray = function(rdn, md) { - var rval = []; - - // each value in 'rdn' in is a SET of RelativeDistinguishedName - var set, attr, obj; - for(var si = 0; si < rdn.value.length; ++si) { - // get the RelativeDistinguishedName set - set = rdn.value[si]; - - // each value in the SET is an AttributeTypeAndValue sequence - // containing first a type (an OID) and second a value (defined by - // the OID) - for(var i = 0; i < set.value.length; ++i) { - obj = {}; - attr = set.value[i]; - obj.type = asn1.derToOid(attr.value[0].value); - obj.value = attr.value[1].value; - obj.valueTagClass = attr.value[1].type; - // if the OID is known, get its name and short name - if(obj.type in oids) { - obj.name = oids[obj.type]; - if(obj.name in _shortNames) { - obj.shortName = _shortNames[obj.name]; - } - } - if(md) { - md.update(obj.type); - md.update(obj.value); - } - rval.push(obj); + // 3. Set response and actualResponse to the result of running + // HTTP-network-or-cache fetch given fetchParams. + actualResponse = response = await httpNetworkOrCacheFetch(fetchParams) + + // 4. If request’s response tainting is "cors" and a CORS check + // for request and response returns failure, then return a network error. + if ( + request.responseTainting === 'cors' && + corsCheck(request, response) === 'failure' + ) { + return makeNetworkError('cors failure') + } + + // 5. If the TAO check for request and response returns failure, then set + // request’s timing allow failed flag. + if (TAOCheck(request, response) === 'failure') { + request.timingAllowFailed = true + } + } + + // 7. If either request’s response tainting or response’s type + // is "opaque", and the cross-origin resource policy check with + // request’s origin, request’s client, request’s destination, + // and actualResponse returns blocked, then return a network error. + if ( + (request.responseTainting === 'opaque' || response.type === 'opaque') && + crossOriginResourcePolicyCheck( + request.origin, + request.client, + request.destination, + actualResponse + ) === 'blocked' + ) { + return makeNetworkError('blocked') + } + + // 8. If actualResponse’s status is a redirect status, then: + if (redirectStatusSet.has(actualResponse.status)) { + // 1. If actualResponse’s status is not 303, request’s body is not null, + // and the connection uses HTTP/2, then user agents may, and are even + // encouraged to, transmit an RST_STREAM frame. + // See, https://github.com/whatwg/fetch/issues/1288 + if (request.redirect !== 'manual') { + fetchParams.controller.connection.destroy() + } + + // 2. Switch on request’s redirect mode: + if (request.redirect === 'error') { + // Set response to a network error. + response = makeNetworkError('unexpected redirect') + } else if (request.redirect === 'manual') { + // Set response to an opaque-redirect filtered response whose internal + // response is actualResponse. + // NOTE(spec): On the web this would return an `opaqueredirect` response, + // but that doesn't make sense server side. + // See https://github.com/nodejs/undici/issues/1193. + response = actualResponse + } else if (request.redirect === 'follow') { + // Set response to the result of running HTTP-redirect fetch given + // fetchParams and response. + response = await httpRedirectFetch(fetchParams, response) + } else { + assert(false) } } - return rval; -}; + // 9. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo -/** - * Converts ASN.1 CRIAttributes into an array with objects that have type and - * value properties. - * - * @param attributes the CRIAttributes to convert. - */ -pki.CRIAttributesAsArray = function(attributes) { - var rval = []; - - // each value in 'attributes' in is a SEQUENCE with an OID and a SET - for(var si = 0; si < attributes.length; ++si) { - // get the attribute sequence - var seq = attributes[si]; - - // each value in the SEQUENCE containing first a type (an OID) and - // second a set of values (defined by the OID) - var type = asn1.derToOid(seq.value[0].value); - var values = seq.value[1].value; - for(var vi = 0; vi < values.length; ++vi) { - var obj = {}; - obj.type = type; - obj.value = values[vi].value; - obj.valueTagClass = values[vi].type; - // if the OID is known, get its name and short name - if(obj.type in oids) { - obj.name = oids[obj.type]; - if(obj.name in _shortNames) { - obj.shortName = _shortNames[obj.name]; - } - } - // parse extensions - if(obj.type === oids.extensionRequest) { - obj.extensions = []; - for(var ei = 0; ei < obj.value.length; ++ei) { - obj.extensions.push(pki.certificateExtensionFromAsn1(obj.value[ei])); - } - } - rval.push(obj); + // 10. Return response. + return response +} + +// https://fetch.spec.whatwg.org/#http-redirect-fetch +function httpRedirectFetch (fetchParams, response) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let actualResponse be response, if response is not a filtered response, + // and response’s internal response otherwise. + const actualResponse = response.internalResponse + ? response.internalResponse + : response + + // 3. Let locationURL be actualResponse’s location URL given request’s current + // URL’s fragment. + let locationURL + + try { + locationURL = responseLocationURL( + actualResponse, + requestCurrentURL(request).hash + ) + + // 4. If locationURL is null, then return response. + if (locationURL == null) { + return response } + } catch (err) { + // 5. If locationURL is failure, then return a network error. + return Promise.resolve(makeNetworkError(err)) } - return rval; -}; - -/** - * Gets an issuer or subject attribute from its name, type, or short name. - * - * @param obj the issuer or subject object. - * @param options a short name string or an object with: - * shortName the short name for the attribute. - * name the name for the attribute. - * type the type for the attribute. - * - * @return the attribute. - */ -function _getAttribute(obj, options) { - if(typeof options === 'string') { - options = {shortName: options}; + // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network + // error. + if (!urlIsHttpHttpsScheme(locationURL)) { + return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme')) } - var rval = null; - var attr; - for(var i = 0; rval === null && i < obj.attributes.length; ++i) { - attr = obj.attributes[i]; - if(options.type && options.type === attr.type) { - rval = attr; - } else if(options.name && options.name === attr.name) { - rval = attr; - } else if(options.shortName && options.shortName === attr.shortName) { - rval = attr; - } + // 7. If request’s redirect count is 20, then return a network error. + if (request.redirectCount === 20) { + return Promise.resolve(makeNetworkError('redirect count exceeded')) } - return rval; -} -/** - * Converts signature parameters from ASN.1 structure. - * - * Currently only RSASSA-PSS supported. The PKCS#1 v1.5 signature scheme had - * no parameters. - * - * RSASSA-PSS-params ::= SEQUENCE { - * hashAlgorithm [0] HashAlgorithm DEFAULT - * sha1Identifier, - * maskGenAlgorithm [1] MaskGenAlgorithm DEFAULT - * mgf1SHA1Identifier, - * saltLength [2] INTEGER DEFAULT 20, - * trailerField [3] INTEGER DEFAULT 1 - * } - * - * HashAlgorithm ::= AlgorithmIdentifier - * - * MaskGenAlgorithm ::= AlgorithmIdentifier - * - * AlgorithmIdentifer ::= SEQUENCE { - * algorithm OBJECT IDENTIFIER, - * parameters ANY DEFINED BY algorithm OPTIONAL - * } - * - * @param oid The OID specifying the signature algorithm - * @param obj The ASN.1 structure holding the parameters - * @param fillDefaults Whether to use return default values where omitted - * @return signature parameter object - */ -var _readSignatureParameters = function(oid, obj, fillDefaults) { - var params = {}; + // 8. Increase request’s redirect count by 1. + request.redirectCount += 1 - if(oid !== oids['RSASSA-PSS']) { - return params; + // 9. If request’s mode is "cors", locationURL includes credentials, and + // request’s origin is not same origin with locationURL’s origin, then return + // a network error. + if ( + request.mode === 'cors' && + (locationURL.username || locationURL.password) && + !sameOrigin(request, locationURL) + ) { + return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')) } - if(fillDefaults) { - params = { - hash: { - algorithmOid: oids['sha1'] - }, - mgf: { - algorithmOid: oids['mgf1'], - hash: { - algorithmOid: oids['sha1'] - } - }, - saltLength: 20 - }; + // 10. If request’s response tainting is "cors" and locationURL includes + // credentials, then return a network error. + if ( + request.responseTainting === 'cors' && + (locationURL.username || locationURL.password) + ) { + return Promise.resolve(makeNetworkError( + 'URL cannot contain credentials for request mode "cors"' + )) } - var capture = {}; - var errors = []; - if(!asn1.validate(obj, rsassaPssParameterValidator, capture, errors)) { - var error = new Error('Cannot read RSASSA-PSS parameter block.'); - error.errors = errors; - throw error; + // 11. If actualResponse’s status is not 303, request’s body is non-null, + // and request’s body’s source is null, then return a network error. + if ( + actualResponse.status !== 303 && + request.body != null && + request.body.source == null + ) { + return Promise.resolve(makeNetworkError()) } - if(capture.hashOid !== undefined) { - params.hash = params.hash || {}; - params.hash.algorithmOid = asn1.derToOid(capture.hashOid); + // 12. If one of the following is true + // - actualResponse’s status is 301 or 302 and request’s method is `POST` + // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD` + if ( + ([301, 302].includes(actualResponse.status) && request.method === 'POST') || + (actualResponse.status === 303 && + !GET_OR_HEAD.includes(request.method)) + ) { + // then: + // 1. Set request’s method to `GET` and request’s body to null. + request.method = 'GET' + request.body = null + + // 2. For each headerName of request-body-header name, delete headerName from + // request’s header list. + for (const headerName of requestBodyHeader) { + request.headersList.delete(headerName) + } } - if(capture.maskGenOid !== undefined) { - params.mgf = params.mgf || {}; - params.mgf.algorithmOid = asn1.derToOid(capture.maskGenOid); - params.mgf.hash = params.mgf.hash || {}; - params.mgf.hash.algorithmOid = asn1.derToOid(capture.maskGenHashOid); + // 13. If request’s current URL’s origin is not same origin with locationURL’s + // origin, then for each headerName of CORS non-wildcard request-header name, + // delete headerName from request’s header list. + if (!sameOrigin(requestCurrentURL(request), locationURL)) { + // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name + request.headersList.delete('authorization') + + // https://fetch.spec.whatwg.org/#authentication-entries + request.headersList.delete('proxy-authorization', true) + + // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. + request.headersList.delete('cookie') + request.headersList.delete('host') } - if(capture.saltLength !== undefined) { - params.saltLength = capture.saltLength.charCodeAt(0); + // 14. If request’s body is non-null, then set request’s body to the first return + // value of safely extracting request’s body’s source. + if (request.body != null) { + assert(request.body.source != null) + request.body = safelyExtractBody(request.body.source)[0] } - return params; -}; + // 15. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo -/** - * Create signature digest for OID. - * - * @param options - * signatureOid: the OID specifying the signature algorithm. - * type: a human readable type for error messages - * @return a created md instance. throws if unknown oid. - */ -var _createSignatureDigest = function(options) { - switch(oids[options.signatureOid]) { - case 'sha1WithRSAEncryption': - // deprecated alias - case 'sha1WithRSASignature': - return forge.md.sha1.create(); - case 'md5WithRSAEncryption': - return forge.md.md5.create(); - case 'sha256WithRSAEncryption': - return forge.md.sha256.create(); - case 'sha384WithRSAEncryption': - return forge.md.sha384.create(); - case 'sha512WithRSAEncryption': - return forge.md.sha512.create(); - case 'RSASSA-PSS': - return forge.md.sha256.create(); - default: - var error = new Error( - 'Could not compute ' + options.type + ' digest. ' + - 'Unknown signature OID.'); - error.signatureOid = options.signatureOid; - throw error; + // 16. Set timingInfo’s redirect end time and post-redirect start time to the + // coarsened shared current time given fetchParams’s cross-origin isolated + // capability. + timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = + coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability) + + // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s + // redirect start time to timingInfo’s start time. + if (timingInfo.redirectStartTime === 0) { + timingInfo.redirectStartTime = timingInfo.startTime } -}; -/** - * Verify signature on certificate or CSR. - * - * @param options: - * certificate the certificate or CSR to verify. - * md the signature digest. - * signature the signature - * @return a created md instance. throws if unknown oid. - */ -var _verifySignature = function(options) { - var cert = options.certificate; - var scheme; - - switch(cert.signatureOid) { - case oids.sha1WithRSAEncryption: - // deprecated alias - case oids.sha1WithRSASignature: - /* use PKCS#1 v1.5 padding scheme */ - break; - case oids['RSASSA-PSS']: - var hash, mgf; - - /* initialize mgf */ - hash = oids[cert.signatureParameters.mgf.hash.algorithmOid]; - if(hash === undefined || forge.md[hash] === undefined) { - var error = new Error('Unsupported MGF hash function.'); - error.oid = cert.signatureParameters.mgf.hash.algorithmOid; - error.name = hash; - throw error; - } + // 18. Append locationURL to request’s URL list. + request.urlList.push(locationURL) - mgf = oids[cert.signatureParameters.mgf.algorithmOid]; - if(mgf === undefined || forge.mgf[mgf] === undefined) { - var error = new Error('Unsupported MGF function.'); - error.oid = cert.signatureParameters.mgf.algorithmOid; - error.name = mgf; - throw error; - } + // 19. Invoke set request’s referrer policy on redirect on request and + // actualResponse. + setRequestReferrerPolicyOnRedirect(request, actualResponse) - mgf = forge.mgf[mgf].create(forge.md[hash].create()); + // 20. Return the result of running main fetch given fetchParams and true. + return mainFetch(fetchParams, true) +} + +// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch +async function httpNetworkOrCacheFetch ( + fetchParams, + isAuthenticationFetch = false, + isNewConnectionFetch = false +) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request - /* initialize hash function */ - hash = oids[cert.signatureParameters.hash.algorithmOid]; - if(hash === undefined || forge.md[hash] === undefined) { - var error = new Error('Unsupported RSASSA-PSS hash function.'); - error.oid = cert.signatureParameters.hash.algorithmOid; - error.name = hash; - throw error; - } + // 2. Let httpFetchParams be null. + let httpFetchParams = null - scheme = forge.pss.create( - forge.md[hash].create(), mgf, cert.signatureParameters.saltLength - ); - break; - } + // 3. Let httpRequest be null. + let httpRequest = null - // verify signature on cert using public key - return cert.publicKey.verify( - options.md.digest().getBytes(), options.signature, scheme - ); -}; + // 4. Let response be null. + let response = null -/** - * Converts an X.509 certificate from PEM format. - * - * Note: If the certificate is to be verified then compute hash should - * be set to true. This will scan the TBSCertificate part of the ASN.1 - * object while it is converted so it doesn't need to be converted back - * to ASN.1-DER-encoding later. - * - * @param pem the PEM-formatted certificate. - * @param computeHash true to compute the hash for verification. - * @param strict true to be strict when checking ASN.1 value lengths, false to - * allow truncated values (default: true). - * - * @return the certificate. - */ -pki.certificateFromPem = function(pem, computeHash, strict) { - var msg = forge.pem.decode(pem)[0]; - - if(msg.type !== 'CERTIFICATE' && - msg.type !== 'X509 CERTIFICATE' && - msg.type !== 'TRUSTED CERTIFICATE') { - var error = new Error( - 'Could not convert certificate from PEM; PEM header type ' + - 'is not "CERTIFICATE", "X509 CERTIFICATE", or "TRUSTED CERTIFICATE".'); - error.headerType = msg.type; - throw error; - } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error( - 'Could not convert certificate from PEM; PEM is encrypted.'); - } + // 5. Let storedResponse be null. + // TODO: cache - // convert DER to ASN.1 object - var obj = asn1.fromDer(msg.body, strict); + // 6. Let httpCache be null. + const httpCache = null - return pki.certificateFromAsn1(obj, computeHash); -}; + // 7. Let the revalidatingFlag be unset. + const revalidatingFlag = false -/** - * Converts an X.509 certificate to PEM format. - * - * @param cert the certificate. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted certificate. - */ -pki.certificateToPem = function(cert, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var msg = { - type: 'CERTIFICATE', - body: asn1.toDer(pki.certificateToAsn1(cert)).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + // 8. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. If request’s window is "no-window" and request’s redirect mode is + // "error", then set httpFetchParams to fetchParams and httpRequest to + // request. + if (request.window === 'no-window' && request.redirect === 'error') { + httpFetchParams = fetchParams + httpRequest = request + } else { + // Otherwise: + + // 1. Set httpRequest to a clone of request. + httpRequest = makeRequest(request) + + // 2. Set httpFetchParams to a copy of fetchParams. + httpFetchParams = { ...fetchParams } + + // 3. Set httpFetchParams’s request to httpRequest. + httpFetchParams.request = httpRequest + } + + // 3. Let includeCredentials be true if one of + const includeCredentials = + request.credentials === 'include' || + (request.credentials === 'same-origin' && + request.responseTainting === 'basic') -/** - * Converts an RSA public key from PEM format. - * - * @param pem the PEM-formatted public key. - * - * @return the public key. - */ -pki.publicKeyFromPem = function(pem) { - var msg = forge.pem.decode(pem)[0]; + // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s + // body is non-null; otherwise null. + const contentLength = httpRequest.body ? httpRequest.body.length : null - if(msg.type !== 'PUBLIC KEY' && msg.type !== 'RSA PUBLIC KEY') { - var error = new Error('Could not convert public key from PEM; PEM header ' + - 'type is not "PUBLIC KEY" or "RSA PUBLIC KEY".'); - error.headerType = msg.type; - throw error; + // 5. Let contentLengthHeaderValue be null. + let contentLengthHeaderValue = null + + // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or + // `PUT`, then set contentLengthHeaderValue to `0`. + if ( + httpRequest.body == null && + ['POST', 'PUT'].includes(httpRequest.method) + ) { + contentLengthHeaderValue = '0' } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert public key from PEM; PEM is encrypted.'); + + // 7. If contentLength is non-null, then set contentLengthHeaderValue to + // contentLength, serialized and isomorphic encoded. + if (contentLength != null) { + contentLengthHeaderValue = isomorphicEncode(`${contentLength}`) } - // convert DER to ASN.1 object - var obj = asn1.fromDer(msg.body); + // 8. If contentLengthHeaderValue is non-null, then append + // `Content-Length`/contentLengthHeaderValue to httpRequest’s header + // list. + if (contentLengthHeaderValue != null) { + httpRequest.headersList.append('content-length', contentLengthHeaderValue) + } - return pki.publicKeyFromAsn1(obj); -}; + // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`, + // contentLengthHeaderValue) to httpRequest’s header list. -/** - * Converts an RSA public key to PEM format (using a SubjectPublicKeyInfo). - * - * @param key the public key. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted public key. - */ -pki.publicKeyToPem = function(key, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var msg = { - type: 'PUBLIC KEY', - body: asn1.toDer(pki.publicKeyToAsn1(key)).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + // 10. If contentLength is non-null and httpRequest’s keepalive is true, + // then: + if (contentLength != null && httpRequest.keepalive) { + // NOTE: keepalive is a noop outside of browser context. + } -/** - * Converts an RSA public key to PEM format (using an RSAPublicKey). - * - * @param key the public key. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted public key. - */ -pki.publicKeyToRSAPublicKeyPem = function(key, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var msg = { - type: 'RSA PUBLIC KEY', - body: asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + // 11. If httpRequest’s referrer is a URL, then append + // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded, + // to httpRequest’s header list. + if (httpRequest.referrer instanceof URL) { + httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href)) + } -/** - * Gets a fingerprint for the given public key. - * - * @param options the options to use. - * [md] the message digest object to use (defaults to forge.md.sha1). - * [type] the type of fingerprint, such as 'RSAPublicKey', - * 'SubjectPublicKeyInfo' (defaults to 'RSAPublicKey'). - * [encoding] an alternative output encoding, such as 'hex' - * (defaults to none, outputs a byte buffer). - * [delimiter] the delimiter to use between bytes for 'hex' encoded - * output, eg: ':' (defaults to none). - * - * @return the fingerprint as a byte buffer or other encoding based on options. - */ -pki.getPublicKeyFingerprint = function(key, options) { - options = options || {}; - var md = options.md || forge.md.sha1.create(); - var type = options.type || 'RSAPublicKey'; + // 12. Append a request `Origin` header for httpRequest. + appendRequestOriginHeader(httpRequest) - var bytes; - switch(type) { - case 'RSAPublicKey': - bytes = asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes(); - break; - case 'SubjectPublicKeyInfo': - bytes = asn1.toDer(pki.publicKeyToAsn1(key)).getBytes(); - break; - default: - throw new Error('Unknown fingerprint type "' + options.type + '".'); + // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA] + appendFetchMetadata(httpRequest) + + // 14. If httpRequest’s header list does not contain `User-Agent`, then + // user agents should append `User-Agent`/default `User-Agent` value to + // httpRequest’s header list. + if (!httpRequest.headersList.contains('user-agent')) { + httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node') } - // hash public key bytes - md.start(); - md.update(bytes); - var digest = md.digest(); - if(options.encoding === 'hex') { - var hex = digest.toHex(); - if(options.delimiter) { - return hex.match(/.{2}/g).join(options.delimiter); - } - return hex; - } else if(options.encoding === 'binary') { - return digest.getBytes(); - } else if(options.encoding) { - throw new Error('Unknown encoding "' + options.encoding + '".'); + // 15. If httpRequest’s cache mode is "default" and httpRequest’s header + // list contains `If-Modified-Since`, `If-None-Match`, + // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set + // httpRequest’s cache mode to "no-store". + if ( + httpRequest.cache === 'default' && + (httpRequest.headersList.contains('if-modified-since') || + httpRequest.headersList.contains('if-none-match') || + httpRequest.headersList.contains('if-unmodified-since') || + httpRequest.headersList.contains('if-match') || + httpRequest.headersList.contains('if-range')) + ) { + httpRequest.cache = 'no-store' } - return digest; -}; -/** - * Converts a PKCS#10 certification request (CSR) from PEM format. - * - * Note: If the certification request is to be verified then compute hash - * should be set to true. This will scan the CertificationRequestInfo part of - * the ASN.1 object while it is converted so it doesn't need to be converted - * back to ASN.1-DER-encoding later. - * - * @param pem the PEM-formatted certificate. - * @param computeHash true to compute the hash for verification. - * @param strict true to be strict when checking ASN.1 value lengths, false to - * allow truncated values (default: true). - * - * @return the certification request (CSR). - */ -pki.certificationRequestFromPem = function(pem, computeHash, strict) { - var msg = forge.pem.decode(pem)[0]; + // 16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent + // no-cache cache-control header modification flag is unset, and + // httpRequest’s header list does not contain `Cache-Control`, then append + // `Cache-Control`/`max-age=0` to httpRequest’s header list. + if ( + httpRequest.cache === 'no-cache' && + !httpRequest.preventNoCacheCacheControlHeaderModification && + !httpRequest.headersList.contains('cache-control') + ) { + httpRequest.headersList.append('cache-control', 'max-age=0') + } + + // 17. If httpRequest’s cache mode is "no-store" or "reload", then: + if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') { + // 1. If httpRequest’s header list does not contain `Pragma`, then append + // `Pragma`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('pragma')) { + httpRequest.headersList.append('pragma', 'no-cache') + } - if(msg.type !== 'CERTIFICATE REQUEST') { - var error = new Error('Could not convert certification request from PEM; ' + - 'PEM header type is not "CERTIFICATE REQUEST".'); - error.headerType = msg.type; - throw error; + // 2. If httpRequest’s header list does not contain `Cache-Control`, + // then append `Cache-Control`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('cache-control')) { + httpRequest.headersList.append('cache-control', 'no-cache') + } } - if(msg.procType && msg.procType.type === 'ENCRYPTED') { - throw new Error('Could not convert certification request from PEM; ' + - 'PEM is encrypted.'); + + // 18. If httpRequest’s header list contains `Range`, then append + // `Accept-Encoding`/`identity` to httpRequest’s header list. + if (httpRequest.headersList.contains('range')) { + httpRequest.headersList.append('accept-encoding', 'identity') } - // convert DER to ASN.1 object - var obj = asn1.fromDer(msg.body, strict); + // 19. Modify httpRequest’s header list per HTTP. Do not append a given + // header if httpRequest’s header list contains that header’s name. + // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129 + if (!httpRequest.headersList.contains('accept-encoding')) { + if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) { + httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate') + } else { + httpRequest.headersList.append('accept-encoding', 'gzip, deflate') + } + } - return pki.certificationRequestFromAsn1(obj, computeHash); -}; + httpRequest.headersList.delete('host') -/** - * Converts a PKCS#10 certification request (CSR) to PEM format. - * - * @param csr the certification request. - * @param maxline the maximum characters per line, defaults to 64. - * - * @return the PEM-formatted certification request. - */ -pki.certificationRequestToPem = function(csr, maxline) { - // convert to ASN.1, then DER, then PEM-encode - var msg = { - type: 'CERTIFICATE REQUEST', - body: asn1.toDer(pki.certificationRequestToAsn1(csr)).getBytes() - }; - return forge.pem.encode(msg, {maxline: maxline}); -}; + // 20. If includeCredentials is true, then: + if (includeCredentials) { + // 1. If the user agent is not configured to block cookies for httpRequest + // (see section 7 of [COOKIES]), then: + // TODO: credentials + // 2. If httpRequest’s header list does not contain `Authorization`, then: + // TODO: credentials + } -/** - * Creates an empty X.509v3 RSA certificate. - * - * @return the certificate. - */ -pki.createCertificate = function() { - var cert = {}; - cert.version = 0x02; - cert.serialNumber = '00'; - cert.signatureOid = null; - cert.signature = null; - cert.siginfo = {}; - cert.siginfo.algorithmOid = null; - cert.validity = {}; - cert.validity.notBefore = new Date(); - cert.validity.notAfter = new Date(); - - cert.issuer = {}; - cert.issuer.getField = function(sn) { - return _getAttribute(cert.issuer, sn); - }; - cert.issuer.addField = function(attr) { - _fillMissingFields([attr]); - cert.issuer.attributes.push(attr); - }; - cert.issuer.attributes = []; - cert.issuer.hash = null; + // 21. If there’s a proxy-authentication entry, use it as appropriate. + // TODO: proxy-authentication - cert.subject = {}; - cert.subject.getField = function(sn) { - return _getAttribute(cert.subject, sn); - }; - cert.subject.addField = function(attr) { - _fillMissingFields([attr]); - cert.subject.attributes.push(attr); - }; - cert.subject.attributes = []; - cert.subject.hash = null; + // 22. Set httpCache to the result of determining the HTTP cache + // partition, given httpRequest. + // TODO: cache - cert.extensions = []; - cert.publicKey = null; - cert.md = null; + // 23. If httpCache is null, then set httpRequest’s cache mode to + // "no-store". + if (httpCache == null) { + httpRequest.cache = 'no-store' + } - /** - * Sets the subject of this certificate. - * - * @param attrs the array of subject attributes to use. - * @param uniqueId an optional a unique ID to use. - */ - cert.setSubject = function(attrs, uniqueId) { - // set new attributes, clear hash - _fillMissingFields(attrs); - cert.subject.attributes = attrs; - delete cert.subject.uniqueId; - if(uniqueId) { - // TODO: support arbitrary bit length ids - cert.subject.uniqueId = uniqueId; - } - cert.subject.hash = null; - }; + // 24. If httpRequest’s cache mode is neither "no-store" nor "reload", + // then: + if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') { + // TODO: cache + } - /** - * Sets the issuer of this certificate. - * - * @param attrs the array of issuer attributes to use. - * @param uniqueId an optional a unique ID to use. - */ - cert.setIssuer = function(attrs, uniqueId) { - // set new attributes, clear hash - _fillMissingFields(attrs); - cert.issuer.attributes = attrs; - delete cert.issuer.uniqueId; - if(uniqueId) { - // TODO: support arbitrary bit length ids - cert.issuer.uniqueId = uniqueId; - } - cert.issuer.hash = null; - }; + // 9. If aborted, then return the appropriate network error for fetchParams. + // TODO - /** - * Sets the extensions of this certificate. - * - * @param exts the array of extensions to use. - */ - cert.setExtensions = function(exts) { - for(var i = 0; i < exts.length; ++i) { - _fillMissingExtensionFields(exts[i], {cert: cert}); + // 10. If response is null, then: + if (response == null) { + // 1. If httpRequest’s cache mode is "only-if-cached", then return a + // network error. + if (httpRequest.mode === 'only-if-cached') { + return makeNetworkError('only if cached') } - // set new extensions - cert.extensions = exts; - }; - /** - * Gets an extension by its name or id. - * - * @param options the name to use or an object with: - * name the name to use. - * id the id to use. - * - * @return the extension or null if not found. - */ - cert.getExtension = function(options) { - if(typeof options === 'string') { - options = {name: options}; - } - - var rval = null; - var ext; - for(var i = 0; rval === null && i < cert.extensions.length; ++i) { - ext = cert.extensions[i]; - if(options.id && ext.id === options.id) { - rval = ext; - } else if(options.name && ext.name === options.name) { - rval = ext; - } - } - return rval; - }; + // 2. Let forwardResponse be the result of running HTTP-network fetch + // given httpFetchParams, includeCredentials, and isNewConnectionFetch. + const forwardResponse = await httpNetworkFetch( + httpFetchParams, + includeCredentials, + isNewConnectionFetch + ) - /** - * Signs this certificate using the given private key. - * - * @param key the private key to sign with. - * @param md the message digest object to use (defaults to forge.md.sha1). - */ - cert.sign = function(key, md) { - // TODO: get signature OID from private key - cert.md = md || forge.md.sha1.create(); - var algorithmOid = oids[cert.md.algorithm + 'WithRSAEncryption']; - if(!algorithmOid) { - var error = new Error('Could not compute certificate digest. ' + - 'Unknown message digest algorithm OID.'); - error.algorithm = cert.md.algorithm; - throw error; + // 3. If httpRequest’s method is unsafe and forwardResponse’s status is + // in the range 200 to 399, inclusive, invalidate appropriate stored + // responses in httpCache, as per the "Invalidation" chapter of HTTP + // Caching, and set storedResponse to null. [HTTP-CACHING] + if ( + !safeMethodsSet.has(httpRequest.method) && + forwardResponse.status >= 200 && + forwardResponse.status <= 399 + ) { + // TODO: cache } - cert.signatureOid = cert.siginfo.algorithmOid = algorithmOid; - // get TBSCertificate, convert to DER - cert.tbsCertificate = pki.getTBSCertificate(cert); - var bytes = asn1.toDer(cert.tbsCertificate); + // 4. If the revalidatingFlag is set and forwardResponse’s status is 304, + // then: + if (revalidatingFlag && forwardResponse.status === 304) { + // TODO: cache + } - // digest and sign - cert.md.update(bytes.getBytes()); - cert.signature = key.sign(cert.md); - }; + // 5. If response is null, then: + if (response == null) { + // 1. Set response to forwardResponse. + response = forwardResponse - /** - * Attempts verify the signature on the passed certificate using this - * certificate's public key. - * - * @param child the certificate to verify. - * - * @return true if verified, false if not. - */ - cert.verify = function(child) { - var rval = false; - - if(!cert.issued(child)) { - var issuer = child.issuer; - var subject = cert.subject; - var error = new Error( - 'The parent certificate did not issue the given child ' + - 'certificate; the child certificate\'s issuer does not match the ' + - 'parent\'s subject.'); - error.expectedIssuer = subject.attributes; - error.actualIssuer = issuer.attributes; - throw error; + // 2. Store httpRequest and forwardResponse in httpCache, as per the + // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING] + // TODO: cache } + } - var md = child.md; - if(md === null) { - // create digest for OID signature types - md = _createSignatureDigest({ - signatureOid: child.signatureOid, - type: 'certificate' - }); + // 11. Set response’s URL list to a clone of httpRequest’s URL list. + response.urlList = [...httpRequest.urlList] - // produce DER formatted TBSCertificate and digest it - var tbsCertificate = child.tbsCertificate || pki.getTBSCertificate(child); - var bytes = asn1.toDer(tbsCertificate); - md.update(bytes.getBytes()); - } + // 12. If httpRequest’s header list contains `Range`, then set response’s + // range-requested flag. + if (httpRequest.headersList.contains('range')) { + response.rangeRequested = true + } - if(md !== null) { - rval = _verifySignature({ - certificate: cert, md: md, signature: child.signature - }); + // 13. Set response’s request-includes-credentials to includeCredentials. + response.requestIncludesCredentials = includeCredentials + + // 14. If response’s status is 401, httpRequest’s response tainting is not + // "cors", includeCredentials is true, and request’s window is an environment + // settings object, then: + // TODO + + // 15. If response’s status is 407, then: + if (response.status === 407) { + // 1. If request’s window is "no-window", then return a network error. + if (request.window === 'no-window') { + return makeNetworkError() } - return rval; - }; + // 2. ??? - /** - * Returns true if this certificate's issuer matches the passed - * certificate's subject. Note that no signature check is performed. - * - * @param parent the certificate to check. - * - * @return true if this certificate's issuer matches the passed certificate's - * subject. - */ - cert.isIssuer = function(parent) { - var rval = false; - - var i = cert.issuer; - var s = parent.subject; - - // compare hashes if present - if(i.hash && s.hash) { - rval = (i.hash === s.hash); - } else if(i.attributes.length === s.attributes.length) { - // all attributes are the same so issuer matches subject - rval = true; - var iattr, sattr; - for(var n = 0; rval && n < i.attributes.length; ++n) { - iattr = i.attributes[n]; - sattr = s.attributes[n]; - if(iattr.type !== sattr.type || iattr.value !== sattr.value) { - // attribute mismatch - rval = false; - } - } + // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) } - return rval; - }; + // 4. Prompt the end user as appropriate in request’s window and store + // the result as a proxy-authentication entry. [HTTP-AUTH] + // TODO: Invoke some kind of callback? - /** - * Returns true if this certificate's subject matches the issuer of the - * given certificate). Note that not signature check is performed. - * - * @param child the certificate to check. - * - * @return true if this certificate's subject matches the passed - * certificate's issuer. - */ - cert.issued = function(child) { - return child.isIssuer(cert); - }; + // 5. Set response to the result of running HTTP-network-or-cache fetch given + // fetchParams. + // TODO + return makeNetworkError('proxy authentication required') + } - /** - * Generates the subjectKeyIdentifier for this certificate as byte buffer. - * - * @return the subjectKeyIdentifier for this certificate as byte buffer. - */ - cert.generateSubjectKeyIdentifier = function() { - /* See: 4.2.1.2 section of the the RFC3280, keyIdentifier is either: - - (1) The keyIdentifier is composed of the 160-bit SHA-1 hash of the - value of the BIT STRING subjectPublicKey (excluding the tag, - length, and number of unused bits). - - (2) The keyIdentifier is composed of a four bit type field with - the value 0100 followed by the least significant 60 bits of the - SHA-1 hash of the value of the BIT STRING subjectPublicKey - (excluding the tag, length, and number of unused bit string bits). - */ - - // skipping the tag, length, and number of unused bits is the same - // as just using the RSAPublicKey (for RSA keys, which are the - // only ones supported) - return pki.getPublicKeyFingerprint(cert.publicKey, {type: 'RSAPublicKey'}); - }; + // 16. If all of the following are true + if ( + // response’s status is 421 + response.status === 421 && + // isNewConnectionFetch is false + !isNewConnectionFetch && + // request’s body is null, or request’s body is non-null and request’s body’s source is non-null + (request.body == null || request.body.source != null) + ) { + // then: - /** - * Verifies the subjectKeyIdentifier extension value for this certificate - * against its public key. If no extension is found, false will be - * returned. - * - * @return true if verified, false if not. - */ - cert.verifySubjectKeyIdentifier = function() { - var oid = oids['subjectKeyIdentifier']; - for(var i = 0; i < cert.extensions.length; ++i) { - var ext = cert.extensions[i]; - if(ext.id === oid) { - var ski = cert.generateSubjectKeyIdentifier().getBytes(); - return (forge.util.hexToBytes(ext.subjectKeyIdentifier) === ski); - } + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) } - return false; - }; - return cert; -}; + // 2. Set response to the result of running HTTP-network-or-cache + // fetch given fetchParams, isAuthenticationFetch, and true. -/** - * Converts an X.509v3 RSA certificate from an ASN.1 object. - * - * Note: If the certificate is to be verified then compute hash should - * be set to true. There is currently no implementation for converting - * a certificate back to ASN.1 so the TBSCertificate part of the ASN.1 - * object needs to be scanned before the cert object is created. - * - * @param obj the asn1 representation of an X.509v3 RSA certificate. - * @param computeHash true to compute the hash for verification. - * - * @return the certificate. - */ -pki.certificateFromAsn1 = function(obj, computeHash) { - // validate certificate and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, x509CertificateValidator, capture, errors)) { - var error = new Error('Cannot read X.509 certificate. ' + - 'ASN.1 object is not an X509v3 Certificate.'); - error.errors = errors; - throw error; - } - - // get oid - var oid = asn1.derToOid(capture.publicKeyOid); - if(oid !== pki.oids.rsaEncryption) { - throw new Error('Cannot read public key. OID is not RSA.'); - } - - // create certificate - var cert = pki.createCertificate(); - cert.version = capture.certVersion ? - capture.certVersion.charCodeAt(0) : 0; - var serial = forge.util.createBuffer(capture.certSerialNumber); - cert.serialNumber = serial.toHex(); - cert.signatureOid = forge.asn1.derToOid(capture.certSignatureOid); - cert.signatureParameters = _readSignatureParameters( - cert.signatureOid, capture.certSignatureParams, true); - cert.siginfo.algorithmOid = forge.asn1.derToOid(capture.certinfoSignatureOid); - cert.siginfo.parameters = _readSignatureParameters(cert.siginfo.algorithmOid, - capture.certinfoSignatureParams, false); - cert.signature = capture.certSignature; - - var validity = []; - if(capture.certValidity1UTCTime !== undefined) { - validity.push(asn1.utcTimeToDate(capture.certValidity1UTCTime)); - } - if(capture.certValidity2GeneralizedTime !== undefined) { - validity.push(asn1.generalizedTimeToDate( - capture.certValidity2GeneralizedTime)); - } - if(capture.certValidity3UTCTime !== undefined) { - validity.push(asn1.utcTimeToDate(capture.certValidity3UTCTime)); - } - if(capture.certValidity4GeneralizedTime !== undefined) { - validity.push(asn1.generalizedTimeToDate( - capture.certValidity4GeneralizedTime)); - } - if(validity.length > 2) { - throw new Error('Cannot read notBefore/notAfter validity times; more ' + - 'than two times were provided in the certificate.'); - } - if(validity.length < 2) { - throw new Error('Cannot read notBefore/notAfter validity times; they ' + - 'were not provided as either UTCTime or GeneralizedTime.'); - } - cert.validity.notBefore = validity[0]; - cert.validity.notAfter = validity[1]; - - // keep TBSCertificate to preserve signature when exporting - cert.tbsCertificate = capture.tbsCertificate; - - if(computeHash) { - // create digest for OID signature type - cert.md = _createSignatureDigest({ - signatureOid: cert.signatureOid, - type: 'certificate' - }); + // TODO (spec): The spec doesn't specify this but we need to cancel + // the active response before we can start a new one. + // https://github.com/whatwg/fetch/issues/1293 + fetchParams.controller.connection.destroy() - // produce DER formatted TBSCertificate and digest it - var bytes = asn1.toDer(cert.tbsCertificate); - cert.md.update(bytes.getBytes()); + response = await httpNetworkOrCacheFetch( + fetchParams, + isAuthenticationFetch, + true + ) } - // handle issuer, build issuer message digest - var imd = forge.md.sha1.create(); - var ibytes = asn1.toDer(capture.certIssuer); - imd.update(ibytes.getBytes()); - cert.issuer.getField = function(sn) { - return _getAttribute(cert.issuer, sn); - }; - cert.issuer.addField = function(attr) { - _fillMissingFields([attr]); - cert.issuer.attributes.push(attr); - }; - cert.issuer.attributes = pki.RDNAttributesAsArray(capture.certIssuer); - if(capture.certIssuerUniqueId) { - cert.issuer.uniqueId = capture.certIssuerUniqueId; - } - cert.issuer.hash = imd.digest().toHex(); - - // handle subject, build subject message digest - var smd = forge.md.sha1.create(); - var sbytes = asn1.toDer(capture.certSubject); - smd.update(sbytes.getBytes()); - cert.subject.getField = function(sn) { - return _getAttribute(cert.subject, sn); - }; - cert.subject.addField = function(attr) { - _fillMissingFields([attr]); - cert.subject.attributes.push(attr); - }; - cert.subject.attributes = pki.RDNAttributesAsArray(capture.certSubject); - if(capture.certSubjectUniqueId) { - cert.subject.uniqueId = capture.certSubjectUniqueId; + // 17. If isAuthenticationFetch is true, then create an authentication entry + if (isAuthenticationFetch) { + // TODO } - cert.subject.hash = smd.digest().toHex(); - // handle extensions - if(capture.certExtensions) { - cert.extensions = pki.certificateExtensionsFromAsn1(capture.certExtensions); - } else { - cert.extensions = []; + // 18. Return response. + return response +} + +// https://fetch.spec.whatwg.org/#http-network-fetch +async function httpNetworkFetch ( + fetchParams, + includeCredentials = false, + forceNewConnection = false +) { + assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed) + + fetchParams.controller.connection = { + abort: null, + destroyed: false, + destroy (err) { + if (!this.destroyed) { + this.destroyed = true + this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError')) + } + } } - // convert RSA public key from ASN.1 - cert.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo); + // 1. Let request be fetchParams’s request. + const request = fetchParams.request - return cert; -}; + // 2. Let response be null. + let response = null -/** - * Converts an ASN.1 extensions object (with extension sequences as its - * values) into an array of extension objects with types and values. - * - * Supported extensions: - * - * id-ce-keyUsage OBJECT IDENTIFIER ::= { id-ce 15 } - * KeyUsage ::= BIT STRING { - * digitalSignature (0), - * nonRepudiation (1), - * keyEncipherment (2), - * dataEncipherment (3), - * keyAgreement (4), - * keyCertSign (5), - * cRLSign (6), - * encipherOnly (7), - * decipherOnly (8) - * } - * - * id-ce-basicConstraints OBJECT IDENTIFIER ::= { id-ce 19 } - * BasicConstraints ::= SEQUENCE { - * cA BOOLEAN DEFAULT FALSE, - * pathLenConstraint INTEGER (0..MAX) OPTIONAL - * } - * - * subjectAltName EXTENSION ::= { - * SYNTAX GeneralNames - * IDENTIFIED BY id-ce-subjectAltName - * } - * - * GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName - * - * GeneralName ::= CHOICE { - * otherName [0] INSTANCE OF OTHER-NAME, - * rfc822Name [1] IA5String, - * dNSName [2] IA5String, - * x400Address [3] ORAddress, - * directoryName [4] Name, - * ediPartyName [5] EDIPartyName, - * uniformResourceIdentifier [6] IA5String, - * IPAddress [7] OCTET STRING, - * registeredID [8] OBJECT IDENTIFIER - * } - * - * OTHER-NAME ::= TYPE-IDENTIFIER - * - * EDIPartyName ::= SEQUENCE { - * nameAssigner [0] DirectoryString {ub-name} OPTIONAL, - * partyName [1] DirectoryString {ub-name} - * } - * - * @param exts the extensions ASN.1 with extension sequences to parse. - * - * @return the array. - */ -pki.certificateExtensionsFromAsn1 = function(exts) { - var rval = []; - for(var i = 0; i < exts.value.length; ++i) { - // get extension sequence - var extseq = exts.value[i]; - for(var ei = 0; ei < extseq.value.length; ++ei) { - rval.push(pki.certificateExtensionFromAsn1(extseq.value[ei])); - } - } + // 3. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo - return rval; -}; + // 4. Let httpCache be the result of determining the HTTP cache partition, + // given request. + // TODO: cache + const httpCache = null -/** - * Parses a single certificate extension from ASN.1. - * - * @param ext the extension in ASN.1 format. - * - * @return the parsed extension as an object. - */ -pki.certificateExtensionFromAsn1 = function(ext) { - // an extension has: - // [0] extnID OBJECT IDENTIFIER - // [1] critical BOOLEAN DEFAULT FALSE - // [2] extnValue OCTET STRING - var e = {}; - e.id = asn1.derToOid(ext.value[0].value); - e.critical = false; - if(ext.value[1].type === asn1.Type.BOOLEAN) { - e.critical = (ext.value[1].value.charCodeAt(0) !== 0x00); - e.value = ext.value[2].value; - } else { - e.value = ext.value[1].value; - } - // if the oid is known, get its name - if(e.id in oids) { - e.name = oids[e.id]; - - // handle key usage - if(e.name === 'keyUsage') { - // get value as BIT STRING - var ev = asn1.fromDer(e.value); - var b2 = 0x00; - var b3 = 0x00; - if(ev.value.length > 1) { - // skip first byte, just indicates unused bits which - // will be padded with 0s anyway - // get bytes with flag bits - b2 = ev.value.charCodeAt(1); - b3 = ev.value.length > 2 ? ev.value.charCodeAt(2) : 0; - } - // set flags - e.digitalSignature = (b2 & 0x80) === 0x80; - e.nonRepudiation = (b2 & 0x40) === 0x40; - e.keyEncipherment = (b2 & 0x20) === 0x20; - e.dataEncipherment = (b2 & 0x10) === 0x10; - e.keyAgreement = (b2 & 0x08) === 0x08; - e.keyCertSign = (b2 & 0x04) === 0x04; - e.cRLSign = (b2 & 0x02) === 0x02; - e.encipherOnly = (b2 & 0x01) === 0x01; - e.decipherOnly = (b3 & 0x80) === 0x80; - } else if(e.name === 'basicConstraints') { - // handle basic constraints - // get value as SEQUENCE - var ev = asn1.fromDer(e.value); - // get cA BOOLEAN flag (defaults to false) - if(ev.value.length > 0 && ev.value[0].type === asn1.Type.BOOLEAN) { - e.cA = (ev.value[0].value.charCodeAt(0) !== 0x00); - } else { - e.cA = false; - } - // get path length constraint - var value = null; - if(ev.value.length > 0 && ev.value[0].type === asn1.Type.INTEGER) { - value = ev.value[0].value; - } else if(ev.value.length > 1) { - value = ev.value[1].value; - } - if(value !== null) { - e.pathLenConstraint = asn1.derToInteger(value); - } - } else if(e.name === 'extKeyUsage') { - // handle extKeyUsage - // value is a SEQUENCE of OIDs - var ev = asn1.fromDer(e.value); - for(var vi = 0; vi < ev.value.length; ++vi) { - var oid = asn1.derToOid(ev.value[vi].value); - if(oid in oids) { - e[oids[oid]] = true; - } else { - e[oid] = true; - } - } - } else if(e.name === 'nsCertType') { - // handle nsCertType - // get value as BIT STRING - var ev = asn1.fromDer(e.value); - var b2 = 0x00; - if(ev.value.length > 1) { - // skip first byte, just indicates unused bits which - // will be padded with 0s anyway - // get bytes with flag bits - b2 = ev.value.charCodeAt(1); - } - // set flags - e.client = (b2 & 0x80) === 0x80; - e.server = (b2 & 0x40) === 0x40; - e.email = (b2 & 0x20) === 0x20; - e.objsign = (b2 & 0x10) === 0x10; - e.reserved = (b2 & 0x08) === 0x08; - e.sslCA = (b2 & 0x04) === 0x04; - e.emailCA = (b2 & 0x02) === 0x02; - e.objCA = (b2 & 0x01) === 0x01; - } else if( - e.name === 'subjectAltName' || - e.name === 'issuerAltName') { - // handle subjectAltName/issuerAltName - e.altNames = []; - - // ev is a SYNTAX SEQUENCE - var gn; - var ev = asn1.fromDer(e.value); - for(var n = 0; n < ev.value.length; ++n) { - // get GeneralName - gn = ev.value[n]; - - var altName = { - type: gn.type, - value: gn.value - }; - e.altNames.push(altName); - - // Note: Support for types 1,2,6,7,8 - switch(gn.type) { - // rfc822Name - case 1: - // dNSName - case 2: - // uniformResourceIdentifier (URI) - case 6: - break; - // IPAddress - case 7: - // convert to IPv4/IPv6 string representation - altName.ip = forge.util.bytesToIP(gn.value); - break; - // registeredID - case 8: - altName.oid = asn1.derToOid(gn.value); - break; - default: - // unsupported - } - } - } else if(e.name === 'subjectKeyIdentifier') { - // value is an OCTETSTRING w/the hash of the key-type specific - // public key structure (eg: RSAPublicKey) - var ev = asn1.fromDer(e.value); - e.subjectKeyIdentifier = forge.util.bytesToHex(ev.value); - } + // 5. If httpCache is null, then set request’s cache mode to "no-store". + if (httpCache == null) { + request.cache = 'no-store' } - return e; -}; -/** - * Converts a PKCS#10 certification request (CSR) from an ASN.1 object. - * - * Note: If the certification request is to be verified then compute hash - * should be set to true. There is currently no implementation for converting - * a certificate back to ASN.1 so the CertificationRequestInfo part of the - * ASN.1 object needs to be scanned before the csr object is created. - * - * @param obj the asn1 representation of a PKCS#10 certification request (CSR). - * @param computeHash true to compute the hash for verification. - * - * @return the certification request (CSR). - */ -pki.certificationRequestFromAsn1 = function(obj, computeHash) { - // validate certification request and capture data - var capture = {}; - var errors = []; - if(!asn1.validate(obj, certificationRequestValidator, capture, errors)) { - var error = new Error('Cannot read PKCS#10 certificate request. ' + - 'ASN.1 object is not a PKCS#10 CertificationRequest.'); - error.errors = errors; - throw error; - } - - // get oid - var oid = asn1.derToOid(capture.publicKeyOid); - if(oid !== pki.oids.rsaEncryption) { - throw new Error('Cannot read public key. OID is not RSA.'); - } - - // create certification request - var csr = pki.createCertificationRequest(); - csr.version = capture.csrVersion ? capture.csrVersion.charCodeAt(0) : 0; - csr.signatureOid = forge.asn1.derToOid(capture.csrSignatureOid); - csr.signatureParameters = _readSignatureParameters( - csr.signatureOid, capture.csrSignatureParams, true); - csr.siginfo.algorithmOid = forge.asn1.derToOid(capture.csrSignatureOid); - csr.siginfo.parameters = _readSignatureParameters( - csr.siginfo.algorithmOid, capture.csrSignatureParams, false); - csr.signature = capture.csrSignature; - - // keep CertificationRequestInfo to preserve signature when exporting - csr.certificationRequestInfo = capture.certificationRequestInfo; - - if(computeHash) { - // create digest for OID signature type - csr.md = _createSignatureDigest({ - signatureOid: csr.signatureOid, - type: 'certification request' - }); + // 6. Let networkPartitionKey be the result of determining the network + // partition key given request. + // TODO + + // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise + // "no". + const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars - // produce DER formatted CertificationRequestInfo and digest it - var bytes = asn1.toDer(csr.certificationRequestInfo); - csr.md.update(bytes.getBytes()); + // 8. Switch on request’s mode: + if (request.mode === 'websocket') { + // Let connection be the result of obtaining a WebSocket connection, + // given request’s current URL. + // TODO + } else { + // Let connection be the result of obtaining a connection, given + // networkPartitionKey, request’s current URL’s origin, + // includeCredentials, and forceNewConnection. + // TODO } - // handle subject, build subject message digest - var smd = forge.md.sha1.create(); - csr.subject.getField = function(sn) { - return _getAttribute(csr.subject, sn); - }; - csr.subject.addField = function(attr) { - _fillMissingFields([attr]); - csr.subject.attributes.push(attr); - }; - csr.subject.attributes = pki.RDNAttributesAsArray( - capture.certificationRequestInfoSubject, smd); - csr.subject.hash = smd.digest().toHex(); + // 9. Run these steps, but abort when the ongoing fetch is terminated: - // convert RSA public key from ASN.1 - csr.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo); + // 1. If connection is failure, then return a network error. - // convert attributes from ASN.1 - csr.getAttribute = function(sn) { - return _getAttribute(csr, sn); - }; - csr.addAttribute = function(attr) { - _fillMissingFields([attr]); - csr.attributes.push(attr); - }; - csr.attributes = pki.CRIAttributesAsArray( - capture.certificationRequestInfoAttributes || []); + // 2. Set timingInfo’s final connection timing info to the result of + // calling clamp and coarsen connection timing info with connection’s + // timing info, timingInfo’s post-redirect start time, and fetchParams’s + // cross-origin isolated capability. - return csr; -}; + // 3. If connection is not an HTTP/2 connection, request’s body is non-null, + // and request’s body’s source is null, then append (`Transfer-Encoding`, + // `chunked`) to request’s header list. -/** - * Creates an empty certification request (a CSR or certificate signing - * request). Once created, its public key and attributes can be set and then - * it can be signed. - * - * @return the empty certification request. - */ -pki.createCertificationRequest = function() { - var csr = {}; - csr.version = 0x00; - csr.signatureOid = null; - csr.signature = null; - csr.siginfo = {}; - csr.siginfo.algorithmOid = null; - - csr.subject = {}; - csr.subject.getField = function(sn) { - return _getAttribute(csr.subject, sn); - }; - csr.subject.addField = function(attr) { - _fillMissingFields([attr]); - csr.subject.attributes.push(attr); - }; - csr.subject.attributes = []; - csr.subject.hash = null; + // 4. Set timingInfo’s final network-request start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated + // capability. - csr.publicKey = null; - csr.attributes = []; - csr.getAttribute = function(sn) { - return _getAttribute(csr, sn); - }; - csr.addAttribute = function(attr) { - _fillMissingFields([attr]); - csr.attributes.push(attr); - }; - csr.md = null; + // 5. Set response to the result of making an HTTP request over connection + // using request with the following caveats: - /** - * Sets the subject of this certification request. - * - * @param attrs the array of subject attributes to use. - */ - csr.setSubject = function(attrs) { - // set new attributes - _fillMissingFields(attrs); - csr.subject.attributes = attrs; - csr.subject.hash = null; - }; + // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS] + // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH] - /** - * Sets the attributes of this certification request. - * - * @param attrs the array of attributes to use. - */ - csr.setAttributes = function(attrs) { - // set new attributes - _fillMissingFields(attrs); - csr.attributes = attrs; - }; + // - If request’s body is non-null, and request’s body’s source is null, + // then the user agent may have a buffer of up to 64 kibibytes and store + // a part of request’s body in that buffer. If the user agent reads from + // request’s body beyond that buffer’s size and the user agent needs to + // resend request, then instead return a network error. - /** - * Signs this certification request using the given private key. - * - * @param key the private key to sign with. - * @param md the message digest object to use (defaults to forge.md.sha1). - */ - csr.sign = function(key, md) { - // TODO: get signature OID from private key - csr.md = md || forge.md.sha1.create(); - var algorithmOid = oids[csr.md.algorithm + 'WithRSAEncryption']; - if(!algorithmOid) { - var error = new Error('Could not compute certification request digest. ' + - 'Unknown message digest algorithm OID.'); - error.algorithm = csr.md.algorithm; - throw error; - } - csr.signatureOid = csr.siginfo.algorithmOid = algorithmOid; + // - Set timingInfo’s final network-response start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated capability, + // immediately after the user agent’s HTTP parser receives the first byte + // of the response (e.g., frame header bytes for HTTP/2 or response status + // line for HTTP/1.x). - // get CertificationRequestInfo, convert to DER - csr.certificationRequestInfo = pki.getCertificationRequestInfo(csr); - var bytes = asn1.toDer(csr.certificationRequestInfo); + // - Wait until all the headers are transmitted. - // digest and sign - csr.md.update(bytes.getBytes()); - csr.signature = key.sign(csr.md); - }; + // - Any responses whose status is in the range 100 to 199, inclusive, + // and is not 101, are to be ignored, except for the purposes of setting + // timingInfo’s final network-response start time above. - /** - * Attempts verify the signature on the passed certification request using - * its public key. - * - * A CSR that has been exported to a file in PEM format can be verified using - * OpenSSL using this command: - * - * openssl req -in -verify -noout -text - * - * @return true if verified, false if not. - */ - csr.verify = function() { - var rval = false; - - var md = csr.md; - if(md === null) { - md = _createSignatureDigest({ - signatureOid: csr.signatureOid, - type: 'certification request' - }); + // - If request’s header list contains `Transfer-Encoding`/`chunked` and + // response is transferred via HTTP/1.0 or older, then return a network + // error. - // produce DER formatted CertificationRequestInfo and digest it - var cri = csr.certificationRequestInfo || - pki.getCertificationRequestInfo(csr); - var bytes = asn1.toDer(cri); - md.update(bytes.getBytes()); - } + // - If the HTTP request results in a TLS client certificate dialog, then: - if(md !== null) { - rval = _verifySignature({ - certificate: csr, md: md, signature: csr.signature - }); - } + // 1. If request’s window is an environment settings object, make the + // dialog available in request’s window. - return rval; - }; + // 2. Otherwise, return a network error. - return csr; -}; + // To transmit request’s body body, run these steps: + let requestBody = null + // 1. If body is null and fetchParams’s process request end-of-body is + // non-null, then queue a fetch task given fetchParams’s process request + // end-of-body and fetchParams’s task destination. + if (request.body == null && fetchParams.processRequestEndOfBody) { + queueMicrotask(() => fetchParams.processRequestEndOfBody()) + } else if (request.body != null) { + // 2. Otherwise, if body is non-null: -/** - * Converts an X.509 subject or issuer to an ASN.1 RDNSequence. - * - * @param obj the subject or issuer (distinguished name). - * - * @return the ASN.1 RDNSequence. - */ -function _dnToAsn1(obj) { - // create an empty RDNSequence - var rval = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - - // iterate over attributes - var attr, set; - var attrs = obj.attributes; - for(var i = 0; i < attrs.length; ++i) { - attr = attrs[i]; - var value = attr.value; - - // reuse tag class for attribute value if available - var valueTagClass = asn1.Type.PRINTABLESTRING; - if('valueTagClass' in attr) { - valueTagClass = attr.valueTagClass; - - if(valueTagClass === asn1.Type.UTF8) { - value = forge.util.encodeUtf8(value); + // 1. Let processBodyChunk given bytes be these steps: + const processBodyChunk = async function * (bytes) { + // 1. If the ongoing fetch is terminated, then abort these steps. + if (isCancelled(fetchParams)) { + return } - // FIXME: handle more encodings - } - // create a RelativeDistinguishedName set - // each value in the set is an AttributeTypeAndValue first - // containing the type (an OID) and second the value - set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // AttributeType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(attr.type).getBytes()), - // AttributeValue - asn1.create(asn1.Class.UNIVERSAL, valueTagClass, false, value) - ]) - ]); - rval.value.push(set); - } + // 2. Run this step in parallel: transmit bytes. + yield bytes - return rval; -} + // 3. If fetchParams’s process request body is non-null, then run + // fetchParams’s process request body given bytes’s length. + fetchParams.processRequestBodyChunkLength?.(bytes.byteLength) + } -/** - * Gets all printable attributes (typically of an issuer or subject) in a - * simplified JSON format for display. - * - * @param attrs the attributes. - * - * @return the JSON for display. - */ -function _getAttributesAsJson(attrs) { - var rval = {}; - for(var i = 0; i < attrs.length; ++i) { - var attr = attrs[i]; - if(attr.shortName && ( - attr.valueTagClass === asn1.Type.UTF8 || - attr.valueTagClass === asn1.Type.PRINTABLESTRING || - attr.valueTagClass === asn1.Type.IA5STRING)) { - var value = attr.value; - if(attr.valueTagClass === asn1.Type.UTF8) { - value = forge.util.encodeUtf8(attr.value); - } - if(!(attr.shortName in rval)) { - rval[attr.shortName] = value; - } else if(forge.util.isArray(rval[attr.shortName])) { - rval[attr.shortName].push(value); - } else { - rval[attr.shortName] = [rval[attr.shortName], value]; + // 2. Let processEndOfBody be these steps: + const processEndOfBody = () => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return } - } - } - return rval; -} -/** - * Fills in missing fields in attributes. - * - * @param attrs the attributes to fill missing fields in. - */ -function _fillMissingFields(attrs) { - var attr; - for(var i = 0; i < attrs.length; ++i) { - attr = attrs[i]; - - // populate missing name - if(typeof attr.name === 'undefined') { - if(attr.type && attr.type in pki.oids) { - attr.name = pki.oids[attr.type]; - } else if(attr.shortName && attr.shortName in _shortNames) { - attr.name = pki.oids[_shortNames[attr.shortName]]; + // 2. If fetchParams’s process request end-of-body is non-null, + // then run fetchParams’s process request end-of-body. + if (fetchParams.processRequestEndOfBody) { + fetchParams.processRequestEndOfBody() } } - // populate missing type (OID) - if(typeof attr.type === 'undefined') { - if(attr.name && attr.name in pki.oids) { - attr.type = pki.oids[attr.name]; - } else { - var error = new Error('Attribute type not specified.'); - error.attribute = attr; - throw error; + // 3. Let processBodyError given e be these steps: + const processBodyError = (e) => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return } - } - // populate missing shortname - if(typeof attr.shortName === 'undefined') { - if(attr.name && attr.name in _shortNames) { - attr.shortName = _shortNames[attr.name]; + // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller. + if (e.name === 'AbortError') { + fetchParams.controller.abort() + } else { + fetchParams.controller.terminate(e) } } - // convert extensions to value - if(attr.type === oids.extensionRequest) { - attr.valueConstructed = true; - attr.valueTagClass = asn1.Type.SEQUENCE; - if(!attr.value && attr.extensions) { - attr.value = []; - for(var ei = 0; ei < attr.extensions.length; ++ei) { - attr.value.push(pki.certificateExtensionToAsn1( - _fillMissingExtensionFields(attr.extensions[ei]))); + // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody, + // processBodyError, and fetchParams’s task destination. + requestBody = (async function * () { + try { + for await (const bytes of request.body.stream) { + yield * processBodyChunk(bytes) } + processEndOfBody() + } catch (err) { + processBodyError(err) } + })() + } + + try { + // socket is only provided for websockets + const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody }) + + if (socket) { + response = makeResponse({ status, statusText, headersList, socket }) + } else { + const iterator = body[Symbol.asyncIterator]() + fetchParams.controller.next = () => iterator.next() + + response = makeResponse({ status, statusText, headersList }) } + } catch (err) { + // 10. If aborted, then: + if (err.name === 'AbortError') { + // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame. + fetchParams.controller.connection.destroy() - if(typeof attr.value === 'undefined') { - var error = new Error('Attribute value not specified.'); - error.attribute = attr; - throw error; + // 2. Return the appropriate network error for fetchParams. + return makeAppropriateNetworkError(fetchParams, err) } + + return makeNetworkError(err) } -} -/** - * Fills in missing fields in certificate extensions. - * - * @param e the extension. - * @param [options] the options to use. - * [cert] the certificate the extensions are for. - * - * @return the extension. - */ -function _fillMissingExtensionFields(e, options) { - options = options || {}; + // 11. Let pullAlgorithm be an action that resumes the ongoing fetch + // if it is suspended. + const pullAlgorithm = () => { + fetchParams.controller.resume() + } - // populate missing name - if(typeof e.name === 'undefined') { - if(e.id && e.id in pki.oids) { - e.name = pki.oids[e.id]; - } + // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s + // controller with reason, given reason. + const cancelAlgorithm = (reason) => { + fetchParams.controller.abort(reason) } - // populate missing id - if(typeof e.id === 'undefined') { - if(e.name && e.name in pki.oids) { - e.id = pki.oids[e.name]; - } else { - var error = new Error('Extension ID not specified.'); - error.extension = e; - throw error; - } + // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by + // the user agent. + // TODO + + // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object + // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent. + // TODO + + // 15. Let stream be a new ReadableStream. + // 16. Set up stream with pullAlgorithm set to pullAlgorithm, + // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to + // highWaterMark, and sizeAlgorithm set to sizeAlgorithm. + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(5356).ReadableStream) } - if(typeof e.value !== 'undefined') { - return e; - } - - // handle missing value: - - // value is a BIT STRING - if(e.name === 'keyUsage') { - // build flags - var unused = 0; - var b2 = 0x00; - var b3 = 0x00; - if(e.digitalSignature) { - b2 |= 0x80; - unused = 7; - } - if(e.nonRepudiation) { - b2 |= 0x40; - unused = 6; - } - if(e.keyEncipherment) { - b2 |= 0x20; - unused = 5; - } - if(e.dataEncipherment) { - b2 |= 0x10; - unused = 4; - } - if(e.keyAgreement) { - b2 |= 0x08; - unused = 3; - } - if(e.keyCertSign) { - b2 |= 0x04; - unused = 2; - } - if(e.cRLSign) { - b2 |= 0x02; - unused = 1; - } - if(e.encipherOnly) { - b2 |= 0x01; - unused = 0; - } - if(e.decipherOnly) { - b3 |= 0x80; - unused = 7; - } - - // create bit string - var value = String.fromCharCode(unused); - if(b3 !== 0) { - value += String.fromCharCode(b2) + String.fromCharCode(b3); - } else if(b2 !== 0) { - value += String.fromCharCode(b2); - } - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value); - } else if(e.name === 'basicConstraints') { - // basicConstraints is a SEQUENCE - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - // cA BOOLEAN flag defaults to false - if(e.cA) { - e.value.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false, - String.fromCharCode(0xFF))); - } - if('pathLenConstraint' in e) { - e.value.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(e.pathLenConstraint).getBytes())); - } - } else if(e.name === 'extKeyUsage') { - // extKeyUsage is a SEQUENCE of OIDs - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - var seq = e.value.value; - for(var key in e) { - if(e[key] !== true) { - continue; + const stream = new ReadableStream( + { + async start (controller) { + fetchParams.controller.controller = controller + }, + async pull (controller) { + await pullAlgorithm(controller) + }, + async cancel (reason) { + await cancelAlgorithm(reason) } - // key is name in OID map - if(key in oids) { - seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, - false, asn1.oidToDer(oids[key]).getBytes())); - } else if(key.indexOf('.') !== -1) { - // assume key is an OID - seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, - false, asn1.oidToDer(key).getBytes())); + }, + { + highWaterMark: 0, + size () { + return 1 } } - } else if(e.name === 'nsCertType') { - // nsCertType is a BIT STRING - // build flags - var unused = 0; - var b2 = 0x00; - - if(e.client) { - b2 |= 0x80; - unused = 7; - } - if(e.server) { - b2 |= 0x40; - unused = 6; - } - if(e.email) { - b2 |= 0x20; - unused = 5; - } - if(e.objsign) { - b2 |= 0x10; - unused = 4; - } - if(e.reserved) { - b2 |= 0x08; - unused = 3; - } - if(e.sslCA) { - b2 |= 0x04; - unused = 2; - } - if(e.emailCA) { - b2 |= 0x02; - unused = 1; - } - if(e.objCA) { - b2 |= 0x01; - unused = 0; - } - - // create bit string - var value = String.fromCharCode(unused); - if(b2 !== 0) { - value += String.fromCharCode(b2); - } - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value); - } else if(e.name === 'subjectAltName' || e.name === 'issuerAltName') { - // SYNTAX SEQUENCE - e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - - var altName; - for(var n = 0; n < e.altNames.length; ++n) { - altName = e.altNames[n]; - var value = altName.value; - // handle IP - if(altName.type === 7 && altName.ip) { - value = forge.util.bytesFromIP(altName.ip); - if(value === null) { - var error = new Error( - 'Extension "ip" value is not a valid IPv4 or IPv6 address.'); - error.extension = e; - throw error; - } - } else if(altName.type === 8) { - // handle OID - if(altName.oid) { - value = asn1.oidToDer(asn1.oidToDer(altName.oid)); - } else { - // deprecated ... convert value to OID - value = asn1.oidToDer(value); + ) + + // 17. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. Set response’s body to a new body whose stream is stream. + response.body = { stream } + + // 2. If response is not a network error and request’s cache mode is + // not "no-store", then update response in httpCache for request. + // TODO + + // 3. If includeCredentials is true and the user agent is not configured + // to block cookies for request (see section 7 of [COOKIES]), then run the + // "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on + // the value of each header whose name is a byte-case-insensitive match for + // `Set-Cookie` in response’s header list, if any, and request’s current URL. + // TODO + + // 18. If aborted, then: + // TODO + + // 19. Run these steps in parallel: + + // 1. Run these steps, but abort when fetchParams is canceled: + fetchParams.controller.on('terminated', onAborted) + fetchParams.controller.resume = async () => { + // 1. While true + while (true) { + // 1-3. See onData... + + // 4. Set bytes to the result of handling content codings given + // codings and bytes. + let bytes + let isFailure + try { + const { done, value } = await fetchParams.controller.next() + + if (isAborted(fetchParams)) { + break } - } - e.value.value.push(asn1.create( - asn1.Class.CONTEXT_SPECIFIC, altName.type, false, - value)); - } - } else if(e.name === 'nsComment' && options.cert) { - // sanity check value is ASCII (req'd) and not too big - if(!(/^[\x00-\x7F]*$/.test(e.comment)) || - (e.comment.length < 1) || (e.comment.length > 128)) { - throw new Error('Invalid "nsComment" content.'); - } - // IA5STRING opaque comment - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.IA5STRING, false, e.comment); - } else if(e.name === 'subjectKeyIdentifier' && options.cert) { - var ski = options.cert.generateSubjectKeyIdentifier(); - e.subjectKeyIdentifier = ski.toHex(); - // OCTETSTRING w/digest - e.value = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, ski.getBytes()); - } else if(e.name === 'authorityKeyIdentifier' && options.cert) { - // SYNTAX SEQUENCE - e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - var seq = e.value.value; - - if(e.keyIdentifier) { - var keyIdentifier = (e.keyIdentifier === true ? - options.cert.generateSubjectKeyIdentifier().getBytes() : - e.keyIdentifier); - seq.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, false, keyIdentifier)); - } - - if(e.authorityCertIssuer) { - var authorityCertIssuer = [ - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 4, true, [ - _dnToAsn1(e.authorityCertIssuer === true ? - options.cert.issuer : e.authorityCertIssuer) - ]) - ]; - seq.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, authorityCertIssuer)); - } - - if(e.serialNumber) { - var serialNumber = forge.util.hexToBytes(e.serialNumber === true ? - options.cert.serialNumber : e.serialNumber); - seq.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, false, serialNumber)); - } - } else if(e.name === 'cRLDistributionPoints') { - e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - var seq = e.value.value; - - // Create sub SEQUENCE of DistributionPointName - var subSeq = asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - - // Create fullName CHOICE - var fullNameGeneralNames = asn1.create( - asn1.Class.CONTEXT_SPECIFIC, 0, true, []); - var altName; - for(var n = 0; n < e.altNames.length; ++n) { - altName = e.altNames[n]; - var value = altName.value; - // handle IP - if(altName.type === 7 && altName.ip) { - value = forge.util.bytesFromIP(altName.ip); - if(value === null) { - var error = new Error( - 'Extension "ip" value is not a valid IPv4 or IPv6 address.'); - error.extension = e; - throw error; - } - } else if(altName.type === 8) { - // handle OID - if(altName.oid) { - value = asn1.oidToDer(asn1.oidToDer(altName.oid)); + + bytes = done ? undefined : value + } catch (err) { + if (fetchParams.controller.ended && !timingInfo.encodedBodySize) { + // zlib doesn't like empty streams. + bytes = undefined } else { - // deprecated ... convert value to OID - value = asn1.oidToDer(value); + bytes = err + + // err may be propagated from the result of calling readablestream.cancel, + // which might not be an error. https://github.com/nodejs/undici/issues/2009 + isFailure = true } } - fullNameGeneralNames.value.push(asn1.create( - asn1.Class.CONTEXT_SPECIFIC, altName.type, false, - value)); - } - // Add to the parent SEQUENCE - subSeq.value.push(asn1.create( - asn1.Class.CONTEXT_SPECIFIC, 0, true, [fullNameGeneralNames])); - seq.push(subSeq); - } + if (bytes === undefined) { + // 2. Otherwise, if the bytes transmission for response’s message + // body is done normally and stream is readable, then close + // stream, finalize response for fetchParams and response, and + // abort these in-parallel steps. + readableStreamClose(fetchParams.controller.controller) - // ensure value has been defined by now - if(typeof e.value === 'undefined') { - var error = new Error('Extension value not specified.'); - error.extension = e; - throw error; - } + finalizeResponse(fetchParams, response) - return e; -} + return + } -/** - * Convert signature parameters object to ASN.1 - * - * @param {String} oid Signature algorithm OID - * @param params The signature parametrs object - * @return ASN.1 object representing signature parameters - */ -function _signatureParametersToAsn1(oid, params) { - switch(oid) { - case oids['RSASSA-PSS']: - var parts = []; - - if(params.hash.algorithmOid !== undefined) { - parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(params.hash.algorithmOid).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]) - ])); + // 5. Increase timingInfo’s decoded body size by bytes’s length. + timingInfo.decodedBodySize += bytes?.byteLength ?? 0 + + // 6. If bytes is failure, then terminate fetchParams’s controller. + if (isFailure) { + fetchParams.controller.terminate(bytes) + return } - if(params.mgf.algorithmOid !== undefined) { - parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(params.mgf.algorithmOid).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(params.mgf.hash.algorithmOid).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') - ]) - ]) - ])); + // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes + // into stream. + fetchParams.controller.controller.enqueue(new Uint8Array(bytes)) + + // 8. If stream is errored, then terminate the ongoing fetch. + if (isErrored(stream)) { + fetchParams.controller.terminate() + return } - if(params.saltLength !== undefined) { - parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(params.saltLength).getBytes()) - ])); + // 9. If stream doesn’t need more data ask the user agent to suspend + // the ongoing fetch. + if (!fetchParams.controller.controller.desiredSize) { + return } + } + } - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, parts); + // 2. If aborted, then: + function onAborted (reason) { + // 2. If fetchParams is aborted, then: + if (isAborted(fetchParams)) { + // 1. Set response’s aborted flag. + response.aborted = true - default: - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, ''); + // 2. If stream is readable, then error stream with the result of + // deserialize a serialized abort reason given fetchParams’s + // controller’s serialized abort reason and an + // implementation-defined realm. + if (isReadable(stream)) { + fetchParams.controller.controller.error( + fetchParams.controller.serializedAbortReason + ) + } + } else { + // 3. Otherwise, if stream is readable, error stream with a TypeError. + if (isReadable(stream)) { + fetchParams.controller.controller.error(new TypeError('terminated', { + cause: isErrorLike(reason) ? reason : undefined + })) + } + } + + // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame. + // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so. + fetchParams.controller.connection.destroy() } -} -/** - * Converts a certification request's attributes to an ASN.1 set of - * CRIAttributes. - * - * @param csr certification request. - * - * @return the ASN.1 set of CRIAttributes. - */ -function _CRIAttributesToAsn1(csr) { - // create an empty context-specific container - var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, []); + // 20. Return response. + return response - // no attributes, return empty container - if(csr.attributes.length === 0) { - return rval; - } + async function dispatch ({ body }) { + const url = requestCurrentURL(request) + /** @type {import('../..').Agent} */ + const agent = fetchParams.controller.dispatcher - // each attribute has a sequence with a type and a set of values - var attrs = csr.attributes; - for(var i = 0; i < attrs.length; ++i) { - var attr = attrs[i]; - var value = attr.value; + return new Promise((resolve, reject) => agent.dispatch( + { + path: url.pathname + url.search, + origin: url.origin, + method: request.method, + body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body, + headers: request.headersList.entries, + maxRedirections: 0, + upgrade: request.mode === 'websocket' ? 'websocket' : undefined + }, + { + body: null, + abort: null, - // reuse tag class for attribute value if available - var valueTagClass = asn1.Type.UTF8; - if('valueTagClass' in attr) { - valueTagClass = attr.valueTagClass; - } - if(valueTagClass === asn1.Type.UTF8) { - value = forge.util.encodeUtf8(value); - } - var valueConstructed = false; - if('valueConstructed' in attr) { - valueConstructed = attr.valueConstructed; - } - // FIXME: handle more encodings + onConnect (abort) { + // TODO (fix): Do we need connection here? + const { connection } = fetchParams.controller - // create a RelativeDistinguishedName set - // each value in the set is an AttributeTypeAndValue first - // containing the type (an OID) and second the value - var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // AttributeType - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(attr.type).getBytes()), - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ - // AttributeValue - asn1.create( - asn1.Class.UNIVERSAL, valueTagClass, valueConstructed, value) - ]) - ]); - rval.value.push(seq); - } + if (connection.destroyed) { + abort(new DOMException('The operation was aborted.', 'AbortError')) + } else { + fetchParams.controller.on('terminated', abort) + this.abort = connection.abort = abort + } + }, - return rval; -} + onHeaders (status, headersList, resume, statusText) { + if (status < 200) { + return + } -var jan_1_1950 = new Date('1950-01-01T00:00:00Z'); -var jan_1_2050 = new Date('2050-01-01T00:00:00Z'); + let codings = [] + let location = '' + + const headers = new Headers() + + // For H2, the headers are a plain JS object + // We distinguish between them and iterate accordingly + if (Array.isArray(headersList)) { + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()) + } else if (key.toLowerCase() === 'location') { + location = val + } -/** - * Converts a Date object to ASN.1 - * Handles the different format before and after 1st January 2050 - * - * @param date date object. - * - * @return the ASN.1 object representing the date. - */ -function _dateToAsn1(date) { - if(date >= jan_1_1950 && date < jan_1_2050) { - return asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false, - asn1.dateToUtcTime(date)); - } else { - return asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false, - asn1.dateToGeneralizedTime(date)); - } -} + headers[kHeadersList].append(key, val) + } + } else { + const keys = Object.keys(headersList) + for (const key of keys) { + const val = headersList[key] + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse() + } else if (key.toLowerCase() === 'location') { + location = val + } -/** - * Gets the ASN.1 TBSCertificate part of an X.509v3 certificate. - * - * @param cert the certificate. - * - * @return the asn1 TBSCertificate. - */ -pki.getTBSCertificate = function(cert) { - // TBSCertificate - var notBefore = _dateToAsn1(cert.validity.notBefore); - var notAfter = _dateToAsn1(cert.validity.notAfter); - var tbs = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ - // integer - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(cert.version).getBytes()) - ]), - // serialNumber - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - forge.util.hexToBytes(cert.serialNumber)), - // signature - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(cert.siginfo.algorithmOid).getBytes()), - // parameters - _signatureParametersToAsn1( - cert.siginfo.algorithmOid, cert.siginfo.parameters) - ]), - // issuer - _dnToAsn1(cert.issuer), - // validity - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - notBefore, - notAfter - ]), - // subject - _dnToAsn1(cert.subject), - // SubjectPublicKeyInfo - pki.publicKeyToAsn1(cert.publicKey) - ]); + headers[kHeadersList].append(key, val) + } + } - if(cert.issuer.uniqueId) { - // issuerUniqueID (optional) - tbs.value.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, - // TODO: support arbitrary bit length ids - String.fromCharCode(0x00) + - cert.issuer.uniqueId - ) - ]) - ); - } - if(cert.subject.uniqueId) { - // subjectUniqueID (optional) - tbs.value.push( - asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [ - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, - // TODO: support arbitrary bit length ids - String.fromCharCode(0x00) + - cert.subject.uniqueId - ) - ]) - ); - } + this.body = new Readable({ read: resume }) + + const decoders = [] + + const willFollow = request.redirect === 'follow' && + location && + redirectStatusSet.has(status) + + // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding + if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) { + for (const coding of codings) { + // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2 + if (coding === 'x-gzip' || coding === 'gzip') { + decoders.push(zlib.createGunzip({ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH + })) + } else if (coding === 'deflate') { + decoders.push(zlib.createInflate()) + } else if (coding === 'br') { + decoders.push(zlib.createBrotliDecompress()) + } else { + decoders.length = 0 + break + } + } + } - if(cert.extensions.length > 0) { - // extensions (optional) - tbs.value.push(pki.certificateExtensionsToAsn1(cert.extensions)); - } + resolve({ + status, + statusText, + headersList: headers[kHeadersList], + body: decoders.length + ? pipeline(this.body, ...decoders, () => { }) + : this.body.on('error', () => {}) + }) - return tbs; -}; + return true + }, -/** - * Gets the ASN.1 CertificationRequestInfo part of a - * PKCS#10 CertificationRequest. - * - * @param csr the certification request. - * - * @return the asn1 CertificationRequestInfo. - */ -pki.getCertificationRequestInfo = function(csr) { - // CertificationRequestInfo - var cri = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // version - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, - asn1.integerToDer(csr.version).getBytes()), - // subject - _dnToAsn1(csr.subject), - // SubjectPublicKeyInfo - pki.publicKeyToAsn1(csr.publicKey), - // attributes - _CRIAttributesToAsn1(csr) - ]); + onData (chunk) { + if (fetchParams.controller.dump) { + return + } - return cri; -}; + // 1. If one or more bytes have been transmitted from response’s + // message body, then: -/** - * Converts a DistinguishedName (subject or issuer) to an ASN.1 object. - * - * @param dn the DistinguishedName. - * - * @return the asn1 representation of a DistinguishedName. - */ -pki.distinguishedNameToAsn1 = function(dn) { - return _dnToAsn1(dn); -}; + // 1. Let bytes be the transmitted bytes. + const bytes = chunk -/** - * Converts an X.509v3 RSA certificate to an ASN.1 object. - * - * @param cert the certificate. - * - * @return the asn1 representation of an X.509v3 RSA certificate. - */ -pki.certificateToAsn1 = function(cert) { - // prefer cached TBSCertificate over generating one - var tbsCertificate = cert.tbsCertificate || pki.getTBSCertificate(cert); - - // Certificate - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // TBSCertificate - tbsCertificate, - // AlgorithmIdentifier (signature algorithm) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(cert.signatureOid).getBytes()), - // parameters - _signatureParametersToAsn1(cert.signatureOid, cert.signatureParameters) - ]), - // SignatureValue - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, - String.fromCharCode(0x00) + cert.signature) - ]); -}; + // 2. Let codings be the result of extracting header list values + // given `Content-Encoding` and response’s header list. + // See pullAlgorithm. -/** - * Converts X.509v3 certificate extensions to ASN.1. - * - * @param exts the extensions to convert. - * - * @return the extensions in ASN.1 format. - */ -pki.certificateExtensionsToAsn1 = function(exts) { - // create top-level extension container - var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 3, true, []); + // 3. Increase timingInfo’s encoded body size by bytes’s length. + timingInfo.encodedBodySize += bytes.byteLength - // create extension sequence (stores a sequence for each extension) - var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); - rval.value.push(seq); + // 4. See pullAlgorithm... - for(var i = 0; i < exts.length; ++i) { - seq.value.push(pki.certificateExtensionToAsn1(exts[i])); - } + return this.body.push(bytes) + }, - return rval; -}; + onComplete () { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } -/** - * Converts a single certificate extension to ASN.1. - * - * @param ext the extension to convert. - * - * @return the extension in ASN.1 format. - */ -pki.certificateExtensionToAsn1 = function(ext) { - // create a sequence for each extension - var extseq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + fetchParams.controller.ended = true - // extnID (OID) - extseq.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(ext.id).getBytes())); + this.body.push(null) + }, - // critical defaults to false - if(ext.critical) { - // critical BOOLEAN DEFAULT FALSE - extseq.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false, - String.fromCharCode(0xFF))); - } + onError (error) { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } - var value = ext.value; - if(typeof ext.value !== 'string') { - // value is asn.1 - value = asn1.toDer(value).getBytes(); - } + this.body?.destroy(error) - // extnValue (OCTET STRING) - extseq.value.push(asn1.create( - asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, value)); + fetchParams.controller.terminate(error) - return extseq; -}; + reject(error) + }, -/** - * Converts a PKCS#10 certification request to an ASN.1 object. - * - * @param csr the certification request. - * - * @return the asn1 representation of a certification request. - */ -pki.certificationRequestToAsn1 = function(csr) { - // prefer cached CertificationRequestInfo over generating one - var cri = csr.certificationRequestInfo || - pki.getCertificationRequestInfo(csr); - - // Certificate - return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // CertificationRequestInfo - cri, - // AlgorithmIdentifier (signature algorithm) - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ - // algorithm - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, - asn1.oidToDer(csr.signatureOid).getBytes()), - // parameters - _signatureParametersToAsn1(csr.signatureOid, csr.signatureParameters) - ]), - // signature - asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, - String.fromCharCode(0x00) + csr.signature) - ]); -}; + onUpgrade (status, headersList, socket) { + if (status !== 101) { + return + } -/** - * Creates a CA store. - * - * @param certs an optional array of certificate objects or PEM-formatted - * certificate strings to add to the CA store. - * - * @return the CA store. - */ -pki.createCaStore = function(certs) { - // create CA store - var caStore = { - // stored certificates - certs: {} - }; + const headers = new Headers() - /** - * Gets the certificate that issued the passed certificate or its - * 'parent'. - * - * @param cert the certificate to get the parent for. - * - * @return the parent certificate or null if none was found. - */ - caStore.getIssuer = function(cert) { - var rval = getBySubject(cert.issuer); - - // see if there are multiple matches - /*if(forge.util.isArray(rval)) { - // TODO: resolve multiple matches by checking - // authorityKey/subjectKey/issuerUniqueID/other identifiers, etc. - // FIXME: or alternatively do authority key mapping - // if possible (X.509v1 certs can't work?) - throw new Error('Resolving multiple issuer matches not implemented yet.'); - }*/ - - return rval; - }; + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') - /** - * Adds a trusted certificate to the store. - * - * @param cert the certificate to add as a trusted certificate (either a - * pki.certificate object or a PEM-formatted certificate). - */ - caStore.addCertificate = function(cert) { - // convert from pem if necessary - if(typeof cert === 'string') { - cert = forge.pki.certificateFromPem(cert); - } + headers[kHeadersList].append(key, val) + } - ensureSubjectHasHash(cert.subject); + resolve({ + status, + statusText: STATUS_CODES[status], + headersList: headers[kHeadersList], + socket + }) - if(!caStore.hasCertificate(cert)) { // avoid duplicate certificates in store - if(cert.subject.hash in caStore.certs) { - // subject hash already exists, append to array - var tmp = caStore.certs[cert.subject.hash]; - if(!forge.util.isArray(tmp)) { - tmp = [tmp]; + return true } - tmp.push(cert); - caStore.certs[cert.subject.hash] = tmp; - } else { - caStore.certs[cert.subject.hash] = cert; } - } - }; + )) + } +} - /** - * Checks to see if the given certificate is in the store. - * - * @param cert the certificate to check (either a pki.certificate or a - * PEM-formatted certificate). - * - * @return true if the certificate is in the store, false if not. - */ - caStore.hasCertificate = function(cert) { - // convert from pem if necessary - if(typeof cert === 'string') { - cert = forge.pki.certificateFromPem(cert); - } +module.exports = { + fetch, + Fetch, + fetching, + finalizeAndReportTiming +} - var match = getBySubject(cert.subject); - if(!match) { - return false; + +/***/ }), + +/***/ 8359: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/* globals AbortController */ + + + +const { extractBody, mixinBody, cloneBody } = __nccwpck_require__(1472) +const { Headers, fill: fillHeaders, HeadersList } = __nccwpck_require__(554) +const { FinalizationRegistry } = __nccwpck_require__(6436)() +const util = __nccwpck_require__(3983) +const { + isValidHTTPToken, + sameOrigin, + normalizeMethod, + makePolicyContainer, + normalizeMethodRecord +} = __nccwpck_require__(2538) +const { + forbiddenMethodsSet, + corsSafeListedMethodsSet, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + requestDuplex +} = __nccwpck_require__(1037) +const { kEnumerableProperty } = util +const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(5861) +const { webidl } = __nccwpck_require__(1744) +const { getGlobalOrigin } = __nccwpck_require__(1246) +const { URLSerializer } = __nccwpck_require__(685) +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) +const assert = __nccwpck_require__(9491) +const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(2361) + +let TransformStream = globalThis.TransformStream + +const kAbortController = Symbol('abortController') + +const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { + signal.removeEventListener('abort', abort) +}) + +// https://fetch.spec.whatwg.org/#request-class +class Request { + // https://fetch.spec.whatwg.org/#dom-request + constructor (input, init = {}) { + if (input === kConstruct) { + return } - if(!forge.util.isArray(match)) { - match = [match]; + + webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' }) + + input = webidl.converters.RequestInfo(input) + init = webidl.converters.RequestInit(init) + + // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object + this[kRealm] = { + settingsObject: { + baseUrl: getGlobalOrigin(), + get origin () { + return this.baseUrl?.origin + }, + policyContainer: makePolicyContainer() + } } - // compare DER-encoding of certificates - var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes(); - for(var i = 0; i < match.length; ++i) { - var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes(); - if(der1 === der2) { - return true; + + // 1. Let request be null. + let request = null + + // 2. Let fallbackMode be null. + let fallbackMode = null + + // 3. Let baseURL be this’s relevant settings object’s API base URL. + const baseUrl = this[kRealm].settingsObject.baseUrl + + // 4. Let signal be null. + let signal = null + + // 5. If input is a string, then: + if (typeof input === 'string') { + // 1. Let parsedURL be the result of parsing input with baseURL. + // 2. If parsedURL is failure, then throw a TypeError. + let parsedURL + try { + parsedURL = new URL(input, baseUrl) + } catch (err) { + throw new TypeError('Failed to parse URL from ' + input, { cause: err }) } + + // 3. If parsedURL includes credentials, then throw a TypeError. + if (parsedURL.username || parsedURL.password) { + throw new TypeError( + 'Request cannot be constructed from a URL that includes credentials: ' + + input + ) + } + + // 4. Set request to a new request whose URL is parsedURL. + request = makeRequest({ urlList: [parsedURL] }) + + // 5. Set fallbackMode to "cors". + fallbackMode = 'cors' + } else { + // 6. Otherwise: + + // 7. Assert: input is a Request object. + assert(input instanceof Request) + + // 8. Set request to input’s request. + request = input[kState] + + // 9. Set signal to input’s signal. + signal = input[kSignal] } - return false; - }; - /** - * Lists all of the certificates kept in the store. - * - * @return an array of all of the pki.certificate objects in the store. - */ - caStore.listAllCertificates = function() { - var certList = []; - - for(var hash in caStore.certs) { - if(caStore.certs.hasOwnProperty(hash)) { - var value = caStore.certs[hash]; - if(!forge.util.isArray(value)) { - certList.push(value); - } else { - for(var i = 0; i < value.length; ++i) { - certList.push(value[i]); - } - } + // 7. Let origin be this’s relevant settings object’s origin. + const origin = this[kRealm].settingsObject.origin + + // 8. Let window be "client". + let window = 'client' + + // 9. If request’s window is an environment settings object and its origin + // is same origin with origin, then set window to request’s window. + if ( + request.window?.constructor?.name === 'EnvironmentSettingsObject' && + sameOrigin(request.window, origin) + ) { + window = request.window + } + + // 10. If init["window"] exists and is non-null, then throw a TypeError. + if (init.window != null) { + throw new TypeError(`'window' option '${window}' must be null`) + } + + // 11. If init["window"] exists, then set window to "no-window". + if ('window' in init) { + window = 'no-window' + } + + // 12. Set request to a new request with the following properties: + request = makeRequest({ + // URL request’s URL. + // undici implementation note: this is set as the first item in request's urlList in makeRequest + // method request’s method. + method: request.method, + // header list A copy of request’s header list. + // undici implementation note: headersList is cloned in makeRequest + headersList: request.headersList, + // unsafe-request flag Set. + unsafeRequest: request.unsafeRequest, + // client This’s relevant settings object. + client: this[kRealm].settingsObject, + // window window. + window, + // priority request’s priority. + priority: request.priority, + // origin request’s origin. The propagation of the origin is only significant for navigation requests + // being handled by a service worker. In this scenario a request can have an origin that is different + // from the current client. + origin: request.origin, + // referrer request’s referrer. + referrer: request.referrer, + // referrer policy request’s referrer policy. + referrerPolicy: request.referrerPolicy, + // mode request’s mode. + mode: request.mode, + // credentials mode request’s credentials mode. + credentials: request.credentials, + // cache mode request’s cache mode. + cache: request.cache, + // redirect mode request’s redirect mode. + redirect: request.redirect, + // integrity metadata request’s integrity metadata. + integrity: request.integrity, + // keepalive request’s keepalive. + keepalive: request.keepalive, + // reload-navigation flag request’s reload-navigation flag. + reloadNavigation: request.reloadNavigation, + // history-navigation flag request’s history-navigation flag. + historyNavigation: request.historyNavigation, + // URL list A clone of request’s URL list. + urlList: [...request.urlList] + }) + + const initHasKey = Object.keys(init).length !== 0 + + // 13. If init is not empty, then: + if (initHasKey) { + // 1. If request’s mode is "navigate", then set it to "same-origin". + if (request.mode === 'navigate') { + request.mode = 'same-origin' } + + // 2. Unset request’s reload-navigation flag. + request.reloadNavigation = false + + // 3. Unset request’s history-navigation flag. + request.historyNavigation = false + + // 4. Set request’s origin to "client". + request.origin = 'client' + + // 5. Set request’s referrer to "client" + request.referrer = 'client' + + // 6. Set request’s referrer policy to the empty string. + request.referrerPolicy = '' + + // 7. Set request’s URL to request’s current URL. + request.url = request.urlList[request.urlList.length - 1] + + // 8. Set request’s URL list to « request’s URL ». + request.urlList = [request.url] } - return certList; - }; + // 14. If init["referrer"] exists, then: + if (init.referrer !== undefined) { + // 1. Let referrer be init["referrer"]. + const referrer = init.referrer - /** - * Removes a certificate from the store. - * - * @param cert the certificate to remove (either a pki.certificate or a - * PEM-formatted certificate). - * - * @return the certificate that was removed or null if the certificate - * wasn't in store. - */ - caStore.removeCertificate = function(cert) { - var result; + // 2. If referrer is the empty string, then set request’s referrer to "no-referrer". + if (referrer === '') { + request.referrer = 'no-referrer' + } else { + // 1. Let parsedReferrer be the result of parsing referrer with + // baseURL. + // 2. If parsedReferrer is failure, then throw a TypeError. + let parsedReferrer + try { + parsedReferrer = new URL(referrer, baseUrl) + } catch (err) { + throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }) + } - // convert from pem if necessary - if(typeof cert === 'string') { - cert = forge.pki.certificateFromPem(cert); + // 3. If one of the following is true + // - parsedReferrer’s scheme is "about" and path is the string "client" + // - parsedReferrer’s origin is not same origin with origin + // then set request’s referrer to "client". + if ( + (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') || + (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl)) + ) { + request.referrer = 'client' + } else { + // 4. Otherwise, set request’s referrer to parsedReferrer. + request.referrer = parsedReferrer + } + } } - ensureSubjectHasHash(cert.subject); - if(!caStore.hasCertificate(cert)) { - return null; + + // 15. If init["referrerPolicy"] exists, then set request’s referrer policy + // to it. + if (init.referrerPolicy !== undefined) { + request.referrerPolicy = init.referrerPolicy } - var match = getBySubject(cert.subject); + // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise. + let mode + if (init.mode !== undefined) { + mode = init.mode + } else { + mode = fallbackMode + } - if(!forge.util.isArray(match)) { - result = caStore.certs[cert.subject.hash]; - delete caStore.certs[cert.subject.hash]; - return result; + // 17. If mode is "navigate", then throw a TypeError. + if (mode === 'navigate') { + throw webidl.errors.exception({ + header: 'Request constructor', + message: 'invalid request mode navigate.' + }) } - // compare DER-encoding of certificates - var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes(); - for(var i = 0; i < match.length; ++i) { - var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes(); - if(der1 === der2) { - result = match[i]; - match.splice(i, 1); - } + // 18. If mode is non-null, set request’s mode to mode. + if (mode != null) { + request.mode = mode } - if(match.length === 0) { - delete caStore.certs[cert.subject.hash]; + + // 19. If init["credentials"] exists, then set request’s credentials mode + // to it. + if (init.credentials !== undefined) { + request.credentials = init.credentials } - return result; - }; + // 18. If init["cache"] exists, then set request’s cache mode to it. + if (init.cache !== undefined) { + request.cache = init.cache + } - function getBySubject(subject) { - ensureSubjectHasHash(subject); - return caStore.certs[subject.hash] || null; - } + // 21. If request’s cache mode is "only-if-cached" and request’s mode is + // not "same-origin", then throw a TypeError. + if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') { + throw new TypeError( + "'only-if-cached' can be set only with 'same-origin' mode" + ) + } - function ensureSubjectHasHash(subject) { - // produce subject hash if it doesn't exist - if(!subject.hash) { - var md = forge.md.sha1.create(); - subject.attributes = pki.RDNAttributesAsArray(_dnToAsn1(subject), md); - subject.hash = md.digest().toHex(); + // 22. If init["redirect"] exists, then set request’s redirect mode to it. + if (init.redirect !== undefined) { + request.redirect = init.redirect } - } - // auto-add passed in certs - if(certs) { - // parse PEM-formatted certificates as necessary - for(var i = 0; i < certs.length; ++i) { - var cert = certs[i]; - caStore.addCertificate(cert); + // 23. If init["integrity"] exists, then set request’s integrity metadata to it. + if (init.integrity != null) { + request.integrity = String(init.integrity) } - } - return caStore; -}; + // 24. If init["keepalive"] exists, then set request’s keepalive to it. + if (init.keepalive !== undefined) { + request.keepalive = Boolean(init.keepalive) + } -/** - * Certificate verification errors, based on TLS. - */ -pki.certificateError = { - bad_certificate: 'forge.pki.BadCertificate', - unsupported_certificate: 'forge.pki.UnsupportedCertificate', - certificate_revoked: 'forge.pki.CertificateRevoked', - certificate_expired: 'forge.pki.CertificateExpired', - certificate_unknown: 'forge.pki.CertificateUnknown', - unknown_ca: 'forge.pki.UnknownCertificateAuthority' -}; + // 25. If init["method"] exists, then: + if (init.method !== undefined) { + // 1. Let method be init["method"]. + let method = init.method -/** - * Verifies a certificate chain against the given Certificate Authority store - * with an optional custom verify callback. - * - * @param caStore a certificate store to verify against. - * @param chain the certificate chain to verify, with the root or highest - * authority at the end (an array of certificates). - * @param options a callback to be called for every certificate in the chain or - * an object with: - * verify a callback to be called for every certificate in the - * chain - * validityCheckDate the date against which the certificate - * validity period should be checked. Pass null to not check - * the validity period. By default, the current date is used. - * - * The verify callback has the following signature: - * - * verified - Set to true if certificate was verified, otherwise the - * pki.certificateError for why the certificate failed. - * depth - The current index in the chain, where 0 is the end point's cert. - * certs - The certificate chain, *NOTE* an empty chain indicates an anonymous - * end point. - * - * The function returns true on success and on failure either the appropriate - * pki.certificateError or an object with 'error' set to the appropriate - * pki.certificateError and 'message' set to a custom error message. - * - * @return true if successful, error thrown if not. - */ -pki.verifyCertificateChain = function(caStore, chain, options) { - /* From: RFC3280 - Internet X.509 Public Key Infrastructure Certificate - Section 6: Certification Path Validation - See inline parentheticals related to this particular implementation. - - The primary goal of path validation is to verify the binding between - a subject distinguished name or a subject alternative name and subject - public key, as represented in the end entity certificate, based on the - public key of the trust anchor. This requires obtaining a sequence of - certificates that support that binding. That sequence should be provided - in the passed 'chain'. The trust anchor should be in the given CA - store. The 'end entity' certificate is the certificate provided by the - end point (typically a server) and is the first in the chain. - - To meet this goal, the path validation process verifies, among other - things, that a prospective certification path (a sequence of n - certificates or a 'chain') satisfies the following conditions: - - (a) for all x in {1, ..., n-1}, the subject of certificate x is - the issuer of certificate x+1; - - (b) certificate 1 is issued by the trust anchor; - - (c) certificate n is the certificate to be validated; and - - (d) for all x in {1, ..., n}, the certificate was valid at the - time in question. - - Note that here 'n' is index 0 in the chain and 1 is the last certificate - in the chain and it must be signed by a certificate in the connection's - CA store. - - The path validation process also determines the set of certificate - policies that are valid for this path, based on the certificate policies - extension, policy mapping extension, policy constraints extension, and - inhibit any-policy extension. - - Note: Policy mapping extension not supported (Not Required). - - Note: If the certificate has an unsupported critical extension, then it - must be rejected. - - Note: A certificate is self-issued if the DNs that appear in the subject - and issuer fields are identical and are not empty. - - The path validation algorithm assumes the following seven inputs are - provided to the path processing logic. What this specific implementation - will use is provided parenthetically: - - (a) a prospective certification path of length n (the 'chain') - (b) the current date/time: ('now'). - (c) user-initial-policy-set: A set of certificate policy identifiers - naming the policies that are acceptable to the certificate user. - The user-initial-policy-set contains the special value any-policy - if the user is not concerned about certificate policy - (Not implemented. Any policy is accepted). - (d) trust anchor information, describing a CA that serves as a trust - anchor for the certification path. The trust anchor information - includes: - - (1) the trusted issuer name, - (2) the trusted public key algorithm, - (3) the trusted public key, and - (4) optionally, the trusted public key parameters associated - with the public key. - - (Trust anchors are provided via certificates in the CA store). - - The trust anchor information may be provided to the path processing - procedure in the form of a self-signed certificate. The trusted anchor - information is trusted because it was delivered to the path processing - procedure by some trustworthy out-of-band procedure. If the trusted - public key algorithm requires parameters, then the parameters are - provided along with the trusted public key (No parameters used in this - implementation). - - (e) initial-policy-mapping-inhibit, which indicates if policy mapping is - allowed in the certification path. - (Not implemented, no policy checking) - - (f) initial-explicit-policy, which indicates if the path must be valid - for at least one of the certificate policies in the user-initial- - policy-set. - (Not implemented, no policy checking) - - (g) initial-any-policy-inhibit, which indicates whether the - anyPolicy OID should be processed if it is included in a - certificate. - (Not implemented, so any policy is valid provided that it is - not marked as critical) */ - - /* Basic Path Processing: - - For each certificate in the 'chain', the following is checked: - - 1. The certificate validity period includes the current time. - 2. The certificate was signed by its parent (where the parent is either - the next in the chain or from the CA store). Allow processing to - continue to the next step if no parent is found but the certificate is - in the CA store. - 3. TODO: The certificate has not been revoked. - 4. The certificate issuer name matches the parent's subject name. - 5. TODO: If the certificate is self-issued and not the final certificate - in the chain, skip this step, otherwise verify that the subject name - is within one of the permitted subtrees of X.500 distinguished names - and that each of the alternative names in the subjectAltName extension - (critical or non-critical) is within one of the permitted subtrees for - that name type. - 6. TODO: If the certificate is self-issued and not the final certificate - in the chain, skip this step, otherwise verify that the subject name - is not within one of the excluded subtrees for X.500 distinguished - names and none of the subjectAltName extension names are excluded for - that name type. - 7. The other steps in the algorithm for basic path processing involve - handling the policy extension which is not presently supported in this - implementation. Instead, if a critical policy extension is found, the - certificate is rejected as not supported. - 8. If the certificate is not the first or if its the only certificate in - the chain (having no parent from the CA store or is self-signed) and it - has a critical key usage extension, verify that the keyCertSign bit is - set. If the key usage extension exists, verify that the basic - constraints extension exists. If the basic constraints extension exists, - verify that the cA flag is set. If pathLenConstraint is set, ensure that - the number of certificates that precede in the chain (come earlier - in the chain as implemented below), excluding the very first in the - chain (typically the end-entity one), isn't greater than the - pathLenConstraint. This constraint limits the number of intermediate - CAs that may appear below a CA before only end-entity certificates - may be issued. */ - - // if a verify callback is passed as the third parameter, package it within - // the options object. This is to support a legacy function signature that - // expected the verify callback as the third parameter. - if(typeof options === 'function') { - options = {verify: options}; - } - options = options || {}; + // 2. If method is not a method or method is a forbidden method, then + // throw a TypeError. + if (!isValidHTTPToken(method)) { + throw new TypeError(`'${method}' is not a valid HTTP method.`) + } - // copy cert chain references to another array to protect against changes - // in verify callback - chain = chain.slice(0); - var certs = chain.slice(0); - - var validityCheckDate = options.validityCheckDate; - // if no validityCheckDate is specified, default to the current date. Make - // sure to maintain the value null because it indicates that the validity - // period should not be checked. - if(typeof validityCheckDate === 'undefined') { - validityCheckDate = new Date(); - } - - // verify each cert in the chain using its parent, where the parent - // is either the next in the chain or from the CA store - var first = true; - var error = null; - var depth = 0; - do { - var cert = chain.shift(); - var parent = null; - var selfSigned = false; - - if(validityCheckDate) { - // 1. check valid time - if(validityCheckDate < cert.validity.notBefore || - validityCheckDate > cert.validity.notAfter) { - error = { - message: 'Certificate is not valid yet or has expired.', - error: pki.certificateError.certificate_expired, - notBefore: cert.validity.notBefore, - notAfter: cert.validity.notAfter, - // TODO: we might want to reconsider renaming 'now' to - // 'validityCheckDate' should this API be changed in the future. - now: validityCheckDate - }; + if (forbiddenMethodsSet.has(method.toUpperCase())) { + throw new TypeError(`'${method}' HTTP method is unsupported.`) } + + // 3. Normalize method. + method = normalizeMethodRecord[method] ?? normalizeMethod(method) + + // 4. Set request’s method to method. + request.method = method } - // 2. verify with parent from chain or CA store - if(error === null) { - parent = chain[0] || caStore.getIssuer(cert); - if(parent === null) { - // check for self-signed cert - if(cert.isIssuer(cert)) { - selfSigned = true; - parent = cert; - } + // 26. If init["signal"] exists, then set signal to it. + if (init.signal !== undefined) { + signal = init.signal + } + + // 27. Set this’s request to request. + this[kState] = request + + // 28. Set this’s signal to a new AbortSignal object with this’s relevant + // Realm. + // TODO: could this be simplified with AbortSignal.any + // (https://dom.spec.whatwg.org/#dom-abortsignal-any) + const ac = new AbortController() + this[kSignal] = ac.signal + this[kSignal][kRealm] = this[kRealm] + + // 29. If signal is not null, then make this’s signal follow signal. + if (signal != null) { + if ( + !signal || + typeof signal.aborted !== 'boolean' || + typeof signal.addEventListener !== 'function' + ) { + throw new TypeError( + "Failed to construct 'Request': member signal is not of type AbortSignal." + ) } - if(parent) { - // FIXME: current CA store implementation might have multiple - // certificates where the issuer can't be determined from the - // certificate (happens rarely with, eg: old certificates) so normalize - // by always putting parents into an array - // TODO: there's may be an extreme degenerate case currently uncovered - // where an old intermediate certificate seems to have a matching parent - // but none of the parents actually verify ... but the intermediate - // is in the CA and it should pass this check; needs investigation - var parents = parent; - if(!forge.util.isArray(parents)) { - parents = [parents]; - } - - // try to verify with each possible parent (typically only one) - var verified = false; - while(!verified && parents.length > 0) { - parent = parents.shift(); - try { - verified = parent.verify(cert); - } catch(ex) { - // failure to verify, don't care why, try next one + if (signal.aborted) { + ac.abort(signal.reason) + } else { + // Keep a strong ref to ac while request object + // is alive. This is needed to prevent AbortController + // from being prematurely garbage collected. + // See, https://github.com/nodejs/undici/issues/1926. + this[kAbortController] = ac + + const acRef = new WeakRef(ac) + const abort = function () { + const ac = acRef.deref() + if (ac !== undefined) { + ac.abort(this.reason) } } - if(!verified) { - error = { - message: 'Certificate signature is invalid.', - error: pki.certificateError.bad_certificate - }; - } - } + // Third-party AbortControllers may not work with these. + // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619. + try { + // If the max amount of listeners is equal to the default, increase it + // This is only available in node >= v19.9.0 + if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) { + setMaxListeners(100, signal) + } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) { + setMaxListeners(100, signal) + } + } catch {} - if(error === null && (!parent || selfSigned) && - !caStore.hasCertificate(cert)) { - // no parent issuer and certificate itself is not trusted - error = { - message: 'Certificate is not trusted.', - error: pki.certificateError.unknown_ca - }; + util.addAbortListener(signal, abort) + requestFinalizer.register(ac, { signal, abort }) } } - // TODO: 3. check revoked + // 30. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is request’s header list and guard is + // "request". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kHeadersList] = request.headersList + this[kHeaders][kGuard] = 'request' + this[kHeaders][kRealm] = this[kRealm] - // 4. check for matching issuer/subject - if(error === null && parent && !cert.isIssuer(parent)) { - // parent is not issuer - error = { - message: 'Certificate issuer is invalid.', - error: pki.certificateError.bad_certificate - }; + // 31. If this’s request’s mode is "no-cors", then: + if (mode === 'no-cors') { + // 1. If this’s request’s method is not a CORS-safelisted method, + // then throw a TypeError. + if (!corsSafeListedMethodsSet.has(request.method)) { + throw new TypeError( + `'${request.method} is unsupported in no-cors mode.` + ) + } + + // 2. Set this’s headers’s guard to "request-no-cors". + this[kHeaders][kGuard] = 'request-no-cors' } - // 5. TODO: check names with permitted names tree + // 32. If init is not empty, then: + if (initHasKey) { + /** @type {HeadersList} */ + const headersList = this[kHeaders][kHeadersList] + // 1. Let headers be a copy of this’s headers and its associated header + // list. + // 2. If init["headers"] exists, then set headers to init["headers"]. + const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList) - // 6. TODO: check names against excluded names tree + // 3. Empty this’s headers’s header list. + headersList.clear() - // 7. check for unsupported critical extensions - if(error === null) { - // supported extensions - var se = { - keyUsage: true, - basicConstraints: true - }; - for(var i = 0; error === null && i < cert.extensions.length; ++i) { - var ext = cert.extensions[i]; - if(ext.critical && !(ext.name in se)) { - error = { - message: - 'Certificate has an unsupported critical extension.', - error: pki.certificateError.unsupported_certificate - }; + // 4. If headers is a Headers object, then for each header in its header + // list, append header’s name/header’s value to this’s headers. + if (headers instanceof HeadersList) { + for (const [key, val] of headers) { + headersList.append(key, val) } + // Note: Copy the `set-cookie` meta-data. + headersList.cookies = headers.cookies + } else { + // 5. Otherwise, fill this’s headers with headers. + fillHeaders(this[kHeaders], headers) } } - // 8. check for CA if cert is not first or is the only certificate - // remaining in chain with no parent or is self-signed - if(error === null && - (!first || (chain.length === 0 && (!parent || selfSigned)))) { - // first check keyUsage extension and then basic constraints - var bcExt = cert.getExtension('basicConstraints'); - var keyUsageExt = cert.getExtension('keyUsage'); - if(keyUsageExt !== null) { - // keyCertSign must be true and there must be a basic - // constraints extension - if(!keyUsageExt.keyCertSign || bcExt === null) { - // bad certificate - error = { - message: - 'Certificate keyUsage or basicConstraints conflict ' + - 'or indicate that the certificate is not a CA. ' + - 'If the certificate is the only one in the chain or ' + - 'isn\'t the first then the certificate must be a ' + - 'valid CA.', - error: pki.certificateError.bad_certificate - }; - } - } - // basic constraints cA flag must be set - if(error === null && bcExt !== null && !bcExt.cA) { - // bad certificate - error = { - message: - 'Certificate basicConstraints indicates the certificate ' + - 'is not a CA.', - error: pki.certificateError.bad_certificate - }; - } - // if error is not null and keyUsage is available, then we know it - // has keyCertSign and there is a basic constraints extension too, - // which means we can check pathLenConstraint (if it exists) - if(error === null && keyUsageExt !== null && - 'pathLenConstraint' in bcExt) { - // pathLen is the maximum # of intermediate CA certs that can be - // found between the current certificate and the end-entity (depth 0) - // certificate; this number does not include the end-entity (depth 0, - // last in the chain) even if it happens to be a CA certificate itself - var pathLen = depth - 1; - if(pathLen > bcExt.pathLenConstraint) { - // pathLenConstraint violated, bad certificate - error = { - message: - 'Certificate basicConstraints pathLenConstraint violated.', - error: pki.certificateError.bad_certificate - }; - } + // 33. Let inputBody be input’s request’s body if input is a Request + // object; otherwise null. + const inputBody = input instanceof Request ? input[kState].body : null + + // 34. If either init["body"] exists and is non-null or inputBody is + // non-null, and request’s method is `GET` or `HEAD`, then throw a + // TypeError. + if ( + (init.body != null || inputBody != null) && + (request.method === 'GET' || request.method === 'HEAD') + ) { + throw new TypeError('Request with GET/HEAD method cannot have body.') + } + + // 35. Let initBody be null. + let initBody = null + + // 36. If init["body"] exists and is non-null, then: + if (init.body != null) { + // 1. Let Content-Type be null. + // 2. Set initBody and Content-Type to the result of extracting + // init["body"], with keepalive set to request’s keepalive. + const [extractedBody, contentType] = extractBody( + init.body, + request.keepalive + ) + initBody = extractedBody + + // 3, If Content-Type is non-null and this’s headers’s header list does + // not contain `Content-Type`, then append `Content-Type`/Content-Type to + // this’s headers. + if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) { + this[kHeaders].append('content-type', contentType) } } - // call application callback - var vfd = (error === null) ? true : error.error; - var ret = options.verify ? options.verify(vfd, depth, certs) : vfd; - if(ret === true) { - // clear any set error - error = null; - } else { - // if passed basic tests, set default message and alert - if(vfd === true) { - error = { - message: 'The application rejected the certificate.', - error: pki.certificateError.bad_certificate - }; + // 37. Let inputOrInitBody be initBody if it is non-null; otherwise + // inputBody. + const inputOrInitBody = initBody ?? inputBody + + // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is + // null, then: + if (inputOrInitBody != null && inputOrInitBody.source == null) { + // 1. If initBody is non-null and init["duplex"] does not exist, + // then throw a TypeError. + if (initBody != null && init.duplex == null) { + throw new TypeError('RequestInit: duplex option is required when sending a body.') } - // check for custom error info - if(ret || ret === 0) { - // set custom message and error - if(typeof ret === 'object' && !forge.util.isArray(ret)) { - if(ret.message) { - error.message = ret.message; - } - if(ret.error) { - error.error = ret.error; - } - } else if(typeof ret === 'string') { - // set custom error - error.error = ret; - } + // 2. If this’s request’s mode is neither "same-origin" nor "cors", + // then throw a TypeError. + if (request.mode !== 'same-origin' && request.mode !== 'cors') { + throw new TypeError( + 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' + ) } - // throw error - throw error; + // 3. Set this’s request’s use-CORS-preflight flag. + request.useCORSPreflightFlag = true } - // no longer first cert in chain - first = false; - ++depth; - } while(chain.length > 0); + // 39. Let finalBody be inputOrInitBody. + let finalBody = inputOrInitBody - return true; -}; + // 40. If initBody is null and inputBody is non-null, then: + if (initBody == null && inputBody != null) { + // 1. If input is unusable, then throw a TypeError. + if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) { + throw new TypeError( + 'Cannot construct a Request with a Request object that has already been used.' + ) + } + // 2. Set finalBody to the result of creating a proxy for inputBody. + if (!TransformStream) { + TransformStream = (__nccwpck_require__(5356).TransformStream) + } -/***/ }), + // https://streams.spec.whatwg.org/#readablestream-create-a-proxy + const identityTransform = new TransformStream() + inputBody.stream.pipeThrough(identityTransform) + finalBody = { + source: inputBody.source, + length: inputBody.length, + stream: identityTransform.readable + } + } -/***/ 1223: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 41. Set this’s request’s body to finalBody. + this[kState].body = finalBody + } -var wrappy = __nccwpck_require__(2940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) + // Returns request’s HTTP method, which is "GET" by default. + get method () { + webidl.brandCheck(this, Request) -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) + // The method getter steps are to return this’s request’s method. + return this[kState].method + } - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) + // Returns the URL of request as a string. + get url () { + webidl.brandCheck(this, Request) -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) + // The url getter steps are to return this’s request’s URL, serialized. + return URLSerializer(this[kState].url) } - f.called = false - return f -} -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) + // Returns a Headers object consisting of the headers associated with request. + // Note that headers added in the network layer by the user agent will not + // be accounted for in this object, e.g., the "Host" header. + get headers () { + webidl.brandCheck(this, Request) + + // The headers getter steps are to return this’s headers. + return this[kHeaders] } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} + // Returns the kind of resource requested by request, e.g., "document" + // or "script". + get destination () { + webidl.brandCheck(this, Request) -/***/ }), + // The destination getter are to return this’s request’s destination. + return this[kState].destination + } -/***/ 7684: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Returns the referrer of request. Its value can be a same-origin URL if + // explicitly set in init, the empty string to indicate no referrer, and + // "about:client" when defaulting to the global’s default. This is used + // during fetching to determine the value of the `Referer` header of the + // request being made. + get referrer () { + webidl.brandCheck(this, Request) -"use strict"; + // 1. If this’s request’s referrer is "no-referrer", then return the + // empty string. + if (this[kState].referrer === 'no-referrer') { + return '' + } -const Queue = __nccwpck_require__(5185); + // 2. If this’s request’s referrer is "client", then return + // "about:client". + if (this[kState].referrer === 'client') { + return 'about:client' + } -const pLimit = concurrency => { - if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) { - throw new TypeError('Expected `concurrency` to be a number from 1 and up'); - } + // Return this’s request’s referrer, serialized. + return this[kState].referrer.toString() + } - const queue = new Queue(); - let activeCount = 0; + // Returns the referrer policy associated with request. + // This is used during fetching to compute the value of the request’s + // referrer. + get referrerPolicy () { + webidl.brandCheck(this, Request) - const next = () => { - activeCount--; + // The referrerPolicy getter steps are to return this’s request’s referrer policy. + return this[kState].referrerPolicy + } - if (queue.size > 0) { - queue.dequeue()(); - } - }; + // Returns the mode associated with request, which is a string indicating + // whether the request will use CORS, or will be restricted to same-origin + // URLs. + get mode () { + webidl.brandCheck(this, Request) - const run = async (fn, resolve, ...args) => { - activeCount++; + // The mode getter steps are to return this’s request’s mode. + return this[kState].mode + } - const result = (async () => fn(...args))(); + // Returns the credentials mode associated with request, + // which is a string indicating whether credentials will be sent with the + // request always, never, or only when sent to a same-origin URL. + get credentials () { + // The credentials getter steps are to return this’s request’s credentials mode. + return this[kState].credentials + } - resolve(result); + // Returns the cache mode associated with request, + // which is a string indicating how the request will + // interact with the browser’s cache when fetching. + get cache () { + webidl.brandCheck(this, Request) - try { - await result; - } catch {} + // The cache getter steps are to return this’s request’s cache mode. + return this[kState].cache + } - next(); - }; + // Returns the redirect mode associated with request, + // which is a string indicating how redirects for the + // request will be handled during fetching. A request + // will follow redirects by default. + get redirect () { + webidl.brandCheck(this, Request) - const enqueue = (fn, resolve, ...args) => { - queue.enqueue(run.bind(null, fn, resolve, ...args)); + // The redirect getter steps are to return this’s request’s redirect mode. + return this[kState].redirect + } - (async () => { - // This function needs to wait until the next microtask before comparing - // `activeCount` to `concurrency`, because `activeCount` is updated asynchronously - // when the run function is dequeued and called. The comparison in the if-statement - // needs to happen asynchronously as well to get an up-to-date value for `activeCount`. - await Promise.resolve(); + // Returns request’s subresource integrity metadata, which is a + // cryptographic hash of the resource being fetched. Its value + // consists of multiple hashes separated by whitespace. [SRI] + get integrity () { + webidl.brandCheck(this, Request) - if (activeCount < concurrency && queue.size > 0) { - queue.dequeue()(); - } - })(); - }; + // The integrity getter steps are to return this’s request’s integrity + // metadata. + return this[kState].integrity + } - const generator = (fn, ...args) => new Promise(resolve => { - enqueue(fn, resolve, ...args); - }); + // Returns a boolean indicating whether or not request can outlive the + // global in which it was created. + get keepalive () { + webidl.brandCheck(this, Request) - Object.defineProperties(generator, { - activeCount: { - get: () => activeCount - }, - pendingCount: { - get: () => queue.size - }, - clearQueue: { - value: () => { - queue.clear(); - } - } - }); + // The keepalive getter steps are to return this’s request’s keepalive. + return this[kState].keepalive + } - return generator; -}; + // Returns a boolean indicating whether or not request is for a reload + // navigation. + get isReloadNavigation () { + webidl.brandCheck(this, Request) -module.exports = pLimit; + // The isReloadNavigation getter steps are to return true if this’s + // request’s reload-navigation flag is set; otherwise false. + return this[kState].reloadNavigation + } + // Returns a boolean indicating whether or not request is for a history + // navigation (a.k.a. back-foward navigation). + get isHistoryNavigation () { + webidl.brandCheck(this, Request) -/***/ }), + // The isHistoryNavigation getter steps are to return true if this’s request’s + // history-navigation flag is set; otherwise false. + return this[kState].historyNavigation + } -/***/ 7214: -/***/ ((module) => { + // Returns the signal associated with request, which is an AbortSignal + // object indicating whether or not request has been aborted, and its + // abort event handler. + get signal () { + webidl.brandCheck(this, Request) -"use strict"; + // The signal getter steps are to return this’s signal. + return this[kSignal] + } + get body () { + webidl.brandCheck(this, Request) -const codes = {}; + return this[kState].body ? this[kState].body.stream : null + } -function createErrorType(code, message, Base) { - if (!Base) { - Base = Error + get bodyUsed () { + webidl.brandCheck(this, Request) + + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) } - function getMessage (arg1, arg2, arg3) { - if (typeof message === 'string') { - return message - } else { - return message(arg1, arg2, arg3) - } + get duplex () { + webidl.brandCheck(this, Request) + + return 'half' } - class NodeError extends Base { - constructor (arg1, arg2, arg3) { - super(getMessage(arg1, arg2, arg3)); + // Returns a clone of request. + clone () { + webidl.brandCheck(this, Request) + + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || this.body?.locked) { + throw new TypeError('unusable') } - } - NodeError.prototype.name = Base.name; - NodeError.prototype.code = code; + // 2. Let clonedRequest be the result of cloning this’s request. + const clonedRequest = cloneRequest(this[kState]) - codes[code] = NodeError; -} + // 3. Let clonedRequestObject be the result of creating a Request object, + // given clonedRequest, this’s headers’s guard, and this’s relevant Realm. + const clonedRequestObject = new Request(kConstruct) + clonedRequestObject[kState] = clonedRequest + clonedRequestObject[kRealm] = this[kRealm] + clonedRequestObject[kHeaders] = new Headers(kConstruct) + clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList + clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] -// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js -function oneOf(expected, thing) { - if (Array.isArray(expected)) { - const len = expected.length; - expected = expected.map((i) => String(i)); - if (len > 2) { - return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + - expected[len - 1]; - } else if (len === 2) { - return `one of ${thing} ${expected[0]} or ${expected[1]}`; + // 4. Make clonedRequestObject’s signal follow this’s signal. + const ac = new AbortController() + if (this.signal.aborted) { + ac.abort(this.signal.reason) } else { - return `of ${thing} ${expected[0]}`; + util.addAbortListener( + this.signal, + () => { + ac.abort(this.signal.reason) + } + ) } - } else { - return `of ${thing} ${String(expected)}`; - } + clonedRequestObject[kSignal] = ac.signal + + // 4. Return clonedRequestObject. + return clonedRequestObject + } +} + +mixinBody(Request) + +function makeRequest (init) { + // https://fetch.spec.whatwg.org/#requests + const request = { + method: 'GET', + localURLsOnly: false, + unsafeRequest: false, + body: null, + client: null, + reservedClient: null, + replacesClientId: '', + window: 'client', + keepalive: false, + serviceWorkers: 'all', + initiator: '', + destination: '', + priority: null, + origin: 'client', + policyContainer: 'client', + referrer: 'client', + referrerPolicy: '', + mode: 'no-cors', + useCORSPreflightFlag: false, + credentials: 'same-origin', + useCredentials: false, + cache: 'default', + redirect: 'follow', + integrity: '', + cryptoGraphicsNonceMetadata: '', + parserMetadata: '', + reloadNavigation: false, + historyNavigation: false, + userActivation: false, + taintedOrigin: false, + redirectCount: 0, + responseTainting: 'basic', + preventNoCacheCacheControlHeaderModification: false, + done: false, + timingAllowFailed: false, + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList() + } + request.url = request.urlList[0] + return request +} + +// https://fetch.spec.whatwg.org/#concept-request-clone +function cloneRequest (request) { + // To clone a request request, run these steps: + + // 1. Let newRequest be a copy of request, except for its body. + const newRequest = makeRequest({ ...request, body: null }) + + // 2. If request’s body is non-null, set newRequest’s body to the + // result of cloning request’s body. + if (request.body != null) { + newRequest.body = cloneBody(request.body) + } + + // 3. Return newRequest. + return newRequest } -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith -function startsWith(str, search, pos) { - return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; -} +Object.defineProperties(Request.prototype, { + method: kEnumerableProperty, + url: kEnumerableProperty, + headers: kEnumerableProperty, + redirect: kEnumerableProperty, + clone: kEnumerableProperty, + signal: kEnumerableProperty, + duplex: kEnumerableProperty, + destination: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + isHistoryNavigation: kEnumerableProperty, + isReloadNavigation: kEnumerableProperty, + keepalive: kEnumerableProperty, + integrity: kEnumerableProperty, + cache: kEnumerableProperty, + credentials: kEnumerableProperty, + attribute: kEnumerableProperty, + referrerPolicy: kEnumerableProperty, + referrer: kEnumerableProperty, + mode: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Request', + configurable: true + } +}) -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith -function endsWith(str, search, this_len) { - if (this_len === undefined || this_len > str.length) { - this_len = str.length; - } - return str.substring(this_len - search.length, this_len) === search; -} +webidl.converters.Request = webidl.interfaceConverter( + Request +) -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes -function includes(str, search, start) { - if (typeof start !== 'number') { - start = 0; +// https://fetch.spec.whatwg.org/#requestinfo +webidl.converters.RequestInfo = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) } - if (start + search.length > str.length) { - return false; - } else { - return str.indexOf(search, start) !== -1; + if (V instanceof Request) { + return webidl.converters.Request(V) } + + return webidl.converters.USVString(V) } -createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { - return 'The value "' + value + '" is invalid for option "' + name + '"' -}, TypeError); -createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { - // determiner: 'must be' or 'must not be' - let determiner; - if (typeof expected === 'string' && startsWith(expected, 'not ')) { - determiner = 'must not be'; - expected = expected.replace(/^not /, ''); - } else { - determiner = 'must be'; - } +webidl.converters.AbortSignal = webidl.interfaceConverter( + AbortSignal +) - let msg; - if (endsWith(name, ' argument')) { - // For cases like 'first argument' - msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; - } else { - const type = includes(name, '.') ? 'property' : 'argument'; - msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; +// https://fetch.spec.whatwg.org/#requestinit +webidl.converters.RequestInit = webidl.dictionaryConverter([ + { + key: 'method', + converter: webidl.converters.ByteString + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + }, + { + key: 'body', + converter: webidl.nullableConverter( + webidl.converters.BodyInit + ) + }, + { + key: 'referrer', + converter: webidl.converters.USVString + }, + { + key: 'referrerPolicy', + converter: webidl.converters.DOMString, + // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy + allowedValues: referrerPolicy + }, + { + key: 'mode', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#concept-request-mode + allowedValues: requestMode + }, + { + key: 'credentials', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcredentials + allowedValues: requestCredentials + }, + { + key: 'cache', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcache + allowedValues: requestCache + }, + { + key: 'redirect', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestredirect + allowedValues: requestRedirect + }, + { + key: 'integrity', + converter: webidl.converters.DOMString + }, + { + key: 'keepalive', + converter: webidl.converters.boolean + }, + { + key: 'signal', + converter: webidl.nullableConverter( + (signal) => webidl.converters.AbortSignal( + signal, + { strict: false } + ) + ) + }, + { + key: 'window', + converter: webidl.converters.any + }, + { + key: 'duplex', + converter: webidl.converters.DOMString, + allowedValues: requestDuplex } +]) - msg += `. Received type ${typeof actual}`; - return msg; -}, TypeError); -createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); -createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { - return 'The ' + name + ' method is not implemented' -}); -createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); -createErrorType('ERR_STREAM_DESTROYED', function (name) { - return 'Cannot call ' + name + ' after a stream was destroyed'; -}); -createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); -createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); -createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); -createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); -createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { - return 'Unknown encoding: ' + arg -}, TypeError); -createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); - -module.exports.q = codes; +module.exports = { Request, makeRequest } /***/ }), -/***/ 1359: +/***/ 7823: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. +const { Headers, HeadersList, fill } = __nccwpck_require__(554) +const { extractBody, cloneBody, mixinBody } = __nccwpck_require__(1472) +const util = __nccwpck_require__(3983) +const { kEnumerableProperty } = util +const { + isValidReasonPhrase, + isCancelled, + isAborted, + isBlobLike, + serializeJavascriptValueToJSONString, + isErrorLike, + isomorphicEncode +} = __nccwpck_require__(2538) +const { + redirectStatusSet, + nullBodyStatus, + DOMException +} = __nccwpck_require__(1037) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) +const { webidl } = __nccwpck_require__(1744) +const { FormData } = __nccwpck_require__(2015) +const { getGlobalOrigin } = __nccwpck_require__(1246) +const { URLSerializer } = __nccwpck_require__(685) +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) +const assert = __nccwpck_require__(9491) +const { types } = __nccwpck_require__(3837) + +const ReadableStream = globalThis.ReadableStream || (__nccwpck_require__(5356).ReadableStream) +const textEncoder = new TextEncoder('utf-8') + +// https://fetch.spec.whatwg.org/#response-class +class Response { + // Creates network error Response. + static error () { + // TODO + const relevantRealm = { settingsObject: {} } + + // The static error() method steps are to return the result of creating a + // Response object, given a new network error, "immutable", and this’s + // relevant Realm. + const responseObject = new Response() + responseObject[kState] = makeNetworkError() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + return responseObject + } + + // https://fetch.spec.whatwg.org/#dom-response-json + static json (data, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' }) + + if (init !== null) { + init = webidl.converters.ResponseInit(init) + } + + // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data. + const bytes = textEncoder.encode( + serializeJavascriptValueToJSONString(data) + ) + // 2. Let body be the result of extracting bytes. + const body = extractBody(bytes) -/**/ -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) keys.push(key); - return keys; -}; -/**/ + // 3. Let responseObject be the result of creating a Response object, given a new response, + // "response", and this’s relevant Realm. + const relevantRealm = { settingsObject: {} } + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'response' + responseObject[kHeaders][kRealm] = relevantRealm -module.exports = Duplex; -const Readable = __nccwpck_require__(1433); -const Writable = __nccwpck_require__(6993); -__nccwpck_require__(4124)(Duplex, Readable); -{ - // Allow the keys array to be GC'ed. - const keys = objectKeys(Writable.prototype); - for (var v = 0; v < keys.length; v++) { - const method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + // 4. Perform initialize a response given responseObject, init, and (body, "application/json"). + initializeResponse(responseObject, init, { body: body[0], type: 'application/json' }) + + // 5. Return responseObject. + return responseObject } -} -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - Readable.call(this, options); - Writable.call(this, options); - this.allowHalfOpen = true; - if (options) { - if (options.readable === false) this.readable = false; - if (options.writable === false) this.writable = false; - if (options.allowHalfOpen === false) { - this.allowHalfOpen = false; - this.once('end', onend); + + // Creates a redirect Response that redirects to url with status status. + static redirect (url, status = 302) { + const relevantRealm = { settingsObject: {} } + + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' }) + + url = webidl.converters.USVString(url) + status = webidl.converters['unsigned short'](status) + + // 1. Let parsedURL be the result of parsing url with current settings + // object’s API base URL. + // 2. If parsedURL is failure, then throw a TypeError. + // TODO: base-URL? + let parsedURL + try { + parsedURL = new URL(url, getGlobalOrigin()) + } catch (err) { + throw Object.assign(new TypeError('Failed to parse URL from ' + url), { + cause: err + }) } - } -} -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - return this._writableState.highWaterMark; - } -}); -Object.defineProperty(Duplex.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); - } -}); -Object.defineProperty(Duplex.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - return this._writableState.length; - } -}); -// the no-half-open enforcer -function onend() { - // If the writable side ended, then we're ok. - if (this._writableState.ended) return; + // 3. If status is not a redirect status, then throw a RangeError. + if (!redirectStatusSet.has(status)) { + throw new RangeError('Invalid status code ' + status) + } - // no more data can be written. - // But allow more writes to happen in this tick. - process.nextTick(onEndNT, this); -} -function onEndNT(self) { - self.end(); -} -Object.defineProperty(Duplex.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - if (this._readableState === undefined || this._writableState === undefined) { - return false; + // 4. Let responseObject be the result of creating a Response object, + // given a new response, "immutable", and this’s relevant Realm. + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + + // 5. Set responseObject’s response’s status to status. + responseObject[kState].status = status + + // 6. Let value be parsedURL, serialized and isomorphic encoded. + const value = isomorphicEncode(URLSerializer(parsedURL)) + + // 7. Append `Location`/value to responseObject’s response’s header list. + responseObject[kState].headersList.append('location', value) + + // 8. Return responseObject. + return responseObject + } + + // https://fetch.spec.whatwg.org/#dom-response + constructor (body = null, init = {}) { + if (body !== null) { + body = webidl.converters.BodyInit(body) } - return this._readableState.destroyed && this._writableState.destroyed; - }, - set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; + + init = webidl.converters.ResponseInit(init) + + // TODO + this[kRealm] = { settingsObject: {} } + + // 1. Set this’s response to a new response. + this[kState] = makeResponse({}) + + // 2. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is this’s response’s header list and guard + // is "response". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kGuard] = 'response' + this[kHeaders][kHeadersList] = this[kState].headersList + this[kHeaders][kRealm] = this[kRealm] + + // 3. Let bodyWithType be null. + let bodyWithType = null + + // 4. If body is non-null, then set bodyWithType to the result of extracting body. + if (body != null) { + const [extractedBody, type] = extractBody(body) + bodyWithType = { body: extractedBody, type } } - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; - this._writableState.destroyed = value; + // 5. Perform initialize a response given this, init, and bodyWithType. + initializeResponse(this, init, bodyWithType) } -}); -/***/ }), + // Returns response’s type, e.g., "cors". + get type () { + webidl.brandCheck(this, Response) -/***/ 1542: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // The type getter steps are to return this’s response’s type. + return this[kState].type + } -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + // Returns response’s URL, if it has one; otherwise the empty string. + get url () { + webidl.brandCheck(this, Response) + + const urlList = this[kState].urlList + + // The url getter steps are to return the empty string if this’s + // response’s URL is null; otherwise this’s response’s URL, + // serialized with exclude fragment set to true. + const url = urlList[urlList.length - 1] ?? null + + if (url === null) { + return '' + } -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. + return URLSerializer(url, true) + } + // Returns whether response was obtained through a redirect. + get redirected () { + webidl.brandCheck(this, Response) + // The redirected getter steps are to return true if this’s response’s URL + // list has more than one item; otherwise false. + return this[kState].urlList.length > 1 + } -module.exports = PassThrough; -const Transform = __nccwpck_require__(4415); -__nccwpck_require__(4124)(PassThrough, Transform); -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - Transform.call(this, options); -} -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; + // Returns response’s status. + get status () { + webidl.brandCheck(this, Response) -/***/ }), + // The status getter steps are to return this’s response’s status. + return this[kState].status + } -/***/ 1433: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Returns whether response’s status is an ok status. + get ok () { + webidl.brandCheck(this, Response) -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + // The ok getter steps are to return true if this’s response’s status is an + // ok status; otherwise false. + return this[kState].status >= 200 && this[kState].status <= 299 + } + // Returns response’s status message. + get statusText () { + webidl.brandCheck(this, Response) + // The statusText getter steps are to return this’s response’s status + // message. + return this[kState].statusText + } -module.exports = Readable; + // Returns response’s headers as Headers. + get headers () { + webidl.brandCheck(this, Response) -/**/ -var Duplex; -/**/ + // The headers getter steps are to return this’s headers. + return this[kHeaders] + } -Readable.ReadableState = ReadableState; + get body () { + webidl.brandCheck(this, Response) -/**/ -const EE = (__nccwpck_require__(2361).EventEmitter); -var EElistenerCount = function EElistenerCount(emitter, type) { - return emitter.listeners(type).length; -}; -/**/ + return this[kState].body ? this[kState].body.stream : null + } -/**/ -var Stream = __nccwpck_require__(2387); -/**/ + get bodyUsed () { + webidl.brandCheck(this, Response) -const Buffer = (__nccwpck_require__(4300).Buffer); -const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) + } -/**/ -const debugUtil = __nccwpck_require__(3837); -let debug; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function debug() {}; -} -/**/ + // Returns a clone of response. + clone () { + webidl.brandCheck(this, Response) -const BufferList = __nccwpck_require__(6522); -const destroyImpl = __nccwpck_require__(7049); -const _require = __nccwpck_require__(9948), - getHighWaterMark = _require.getHighWaterMark; -const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || (this.body && this.body.locked)) { + throw webidl.errors.exception({ + header: 'Response.clone', + message: 'Body has already been consumed.' + }) + } -// Lazy loaded to improve the startup performance. -let StringDecoder; -let createReadableStreamAsyncIterator; -let from; -__nccwpck_require__(4124)(Readable, Stream); -const errorOrDestroy = destroyImpl.errorOrDestroy; -const kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + // 2. Let clonedResponse be the result of cloning this’s response. + const clonedResponse = cloneResponse(this[kState]) - // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; + // 3. Return the result of creating a Response object, given + // clonedResponse, this’s headers’s guard, and this’s relevant Realm. + const clonedResponseObject = new Response() + clonedResponseObject[kState] = clonedResponse + clonedResponseObject[kRealm] = this[kRealm] + clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList + clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm] + + return clonedResponseObject + } } -function ReadableState(options, stream, isDuplex) { - Duplex = Duplex || __nccwpck_require__(1359); - options = options || {}; - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; +mixinBody(Response) - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; +Object.defineProperties(Response.prototype, { + type: kEnumerableProperty, + url: kEnumerableProperty, + status: kEnumerableProperty, + ok: kEnumerableProperty, + redirected: kEnumerableProperty, + statusText: kEnumerableProperty, + headers: kEnumerableProperty, + clone: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Response', + configurable: true + } +}) - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); +Object.defineProperties(Response, { + json: kEnumerableProperty, + redirect: kEnumerableProperty, + error: kEnumerableProperty +}) - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; +// https://fetch.spec.whatwg.org/#concept-response-clone +function cloneResponse (response) { + // To clone a response response, run these steps: + + // 1. If response is a filtered response, then return a new identical + // filtered response whose internal response is a clone of response’s + // internal response. + if (response.internalResponse) { + return filterResponse( + cloneResponse(response.internalResponse), + response.type + ) + } - // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - this.sync = true; + // 2. Let newResponse be a copy of response, except for its body. + const newResponse = makeResponse({ ...response, body: null }) - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - this.paused = true; + // 3. If response’s body is non-null, then set newResponse’s body to the + // result of cloning response’s body. + if (response.body != null) { + newResponse.body = cloneBody(response.body) + } - // Should close be emitted on destroy. Defaults to true. - this.emitClose = options.emitClose !== false; + // 4. Return newResponse. + return newResponse +} - // Should .destroy() be called after 'end' (and potentially 'finish') - this.autoDestroy = !!options.autoDestroy; +function makeResponse (init) { + return { + aborted: false, + rangeRequested: false, + timingAllowPassed: false, + requestIncludesCredentials: false, + type: 'default', + status: 200, + timingInfo: null, + cacheState: '', + statusText: '', + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList(), + urlList: init.urlList ? [...init.urlList] : [] + } +} + +function makeNetworkError (reason) { + const isError = isErrorLike(reason) + return makeResponse({ + type: 'error', + status: 0, + error: isError + ? reason + : new Error(reason ? String(reason) : reason), + aborted: reason && reason.name === 'AbortError' + }) +} - // has it been destroyed - this.destroyed = false; +function makeFilteredResponse (response, state) { + state = { + internalResponse: response, + ...state + } - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; + return new Proxy(response, { + get (target, p) { + return p in state ? state[p] : target[p] + }, + set (target, p, value) { + assert(!(p in state)) + target[p] = value + return true + } + }) +} - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; +// https://fetch.spec.whatwg.org/#concept-filtered-response +function filterResponse (response, type) { + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (type === 'basic') { + // A basic filtered response is a filtered response whose type is "basic" + // and header list excludes any headers in internal response’s header list + // whose name is a forbidden response-header name. - // if true, a maybeReadMore has been scheduled - this.readingMore = false; - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; + // Note: undici does not implement forbidden response-header names + return makeFilteredResponse(response, { + type: 'basic', + headersList: response.headersList + }) + } else if (type === 'cors') { + // A CORS filtered response is a filtered response whose type is "cors" + // and header list excludes any headers in internal response’s header + // list whose name is not a CORS-safelisted response-header name, given + // internal response’s CORS-exposed header-name list. + + // Note: undici does not implement CORS-safelisted response-header names + return makeFilteredResponse(response, { + type: 'cors', + headersList: response.headersList + }) + } else if (type === 'opaque') { + // An opaque filtered response is a filtered response whose type is + // "opaque", URL list is the empty list, status is 0, status message + // is the empty byte sequence, header list is empty, and body is null. + + return makeFilteredResponse(response, { + type: 'opaque', + urlList: Object.freeze([]), + status: 0, + statusText: '', + body: null + }) + } else if (type === 'opaqueredirect') { + // An opaque-redirect filtered response is a filtered response whose type + // is "opaqueredirect", status is 0, status message is the empty byte + // sequence, header list is empty, and body is null. + + return makeFilteredResponse(response, { + type: 'opaqueredirect', + status: 0, + statusText: '', + headersList: [], + body: null + }) + } else { + assert(false) } } -function Readable(options) { - Duplex = Duplex || __nccwpck_require__(1359); - if (!(this instanceof Readable)) return new Readable(options); - // Checking for a Stream.Duplex instance is faster here instead of inside - // the ReadableState constructor, at least with V8 6.5 - const isDuplex = this instanceof Duplex; - this._readableState = new ReadableState(options, this, isDuplex); +// https://fetch.spec.whatwg.org/#appropriate-network-error +function makeAppropriateNetworkError (fetchParams, err = null) { + // 1. Assert: fetchParams is canceled. + assert(isCancelled(fetchParams)) - // legacy - this.readable = true; - if (options) { - if (typeof options.read === 'function') this._read = options.read; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - } - Stream.call(this); + // 2. Return an aborted network error if fetchParams is aborted; + // otherwise return a network error. + return isAborted(fetchParams) + ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err })) + : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err })) } -Object.defineProperty(Readable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - if (this._readableState === undefined) { - return false; - } - return this._readableState.destroyed; - }, - set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; - } - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; +// https://whatpr.org/fetch/1392.html#initialize-a-response +function initializeResponse (response, init, body) { + // 1. If init["status"] is not in the range 200 to 599, inclusive, then + // throw a RangeError. + if (init.status !== null && (init.status < 200 || init.status > 599)) { + throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.') } -}); -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; -Readable.prototype._destroy = function (err, cb) { - cb(err); -}; -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; - } - skipChunkCheck = true; + // 2. If init["statusText"] does not match the reason-phrase token production, + // then throw a TypeError. + if ('statusText' in init && init.statusText != null) { + // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2: + // reason-phrase = *( HTAB / SP / VCHAR / obs-text ) + if (!isValidReasonPhrase(String(init.statusText))) { + throw new TypeError('Invalid statusText') } - } else { - skipChunkCheck = true; } - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); -}; -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - debug('readableAddChunk', chunk); - var state = stream._readableState; - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - if (er) { - errorOrDestroy(stream, er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } - if (addToFront) { - if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); - } else if (state.ended) { - errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); - } else if (state.destroyed) { - return false; - } else { - state.reading = false; - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - maybeReadMore(stream, state); - } + // 3. Set response’s response’s status to init["status"]. + if ('status' in init && init.status != null) { + response[kState].status = init.status } - // We can push more data if we are below the highWaterMark. - // Also, if we have no data yet, we can stand some more bytes. - // This is to work around cases where hwm=0, such as the repl. - return !state.ended && (state.length < state.highWaterMark || state.length === 0); -} -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - state.awaitDrain = 0; - stream.emit('data', chunk); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - if (state.needReadable) emitReadable(stream); + // 4. Set response’s response’s status message to init["statusText"]. + if ('statusText' in init && init.statusText != null) { + response[kState].statusText = init.statusText } - maybeReadMore(stream, state); -} -function chunkInvalid(state, chunk) { - var er; - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + + // 5. If init["headers"] exists, then fill response’s headers with init["headers"]. + if ('headers' in init && init.headers != null) { + fill(response[kHeaders], init.headers) + } + + // 6. If body was given, then: + if (body) { + // 1. If response's status is a null body status, then throw a TypeError. + if (nullBodyStatus.includes(response.status)) { + throw webidl.errors.exception({ + header: 'Response constructor', + message: 'Invalid response status code ' + response.status + }) + } + + // 2. Set response's body to body's body. + response[kState].body = body.body + + // 3. If body's type is non-null and response's header list does not contain + // `Content-Type`, then append (`Content-Type`, body's type) to response's header list. + if (body.type != null && !response[kState].headersList.contains('Content-Type')) { + response[kState].headersList.append('content-type', body.type) + } } - return er; } -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); - const decoder = new StringDecoder(enc); - this._readableState.decoder = decoder; - // If setEncoding(null), decoder.encoding equals utf8 - this._readableState.encoding = this._readableState.decoder.encoding; +webidl.converters.ReadableStream = webidl.interfaceConverter( + ReadableStream +) - // Iterate over current buffer to convert already stored Buffers: - let p = this._readableState.buffer.head; - let content = ''; - while (p !== null) { - content += decoder.write(p.data); - p = p.next; +webidl.converters.FormData = webidl.interfaceConverter( + FormData +) + +webidl.converters.URLSearchParams = webidl.interfaceConverter( + URLSearchParams +) + +// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit +webidl.converters.XMLHttpRequestBodyInit = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) } - this._readableState.buffer.clear(); - if (content !== '') this._readableState.buffer.push(content); - this._readableState.length = content.length; - return this; -}; -// Don't raise the hwm > 1GB -const MAX_HWM = 0x40000000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) } - return n; + + if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { + return webidl.converters.BufferSource(V) + } + + if (util.isFormDataLike(V)) { + return webidl.converters.FormData(V, { strict: false }) + } + + if (V instanceof URLSearchParams) { + return webidl.converters.URLSearchParams(V) + } + + return webidl.converters.DOMString(V) } -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; +// https://fetch.spec.whatwg.org/#bodyinit +webidl.converters.BodyInit = function (V) { + if (V instanceof ReadableStream) { + return webidl.converters.ReadableStream(V) } - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; - // Don't have enough - if (!state.ended) { - state.needReadable = true; - return 0; + + // Note: the spec doesn't include async iterables, + // this is an undici extension. + if (V?.[Symbol.asyncIterator]) { + return V } - return state.length; + + return webidl.converters.XMLHttpRequestBodyInit(V) } -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - if (n !== 0) state.emittedReadable = false; +webidl.converters.ResponseInit = webidl.dictionaryConverter([ + { + key: 'status', + converter: webidl.converters['unsigned short'], + defaultValue: 200 + }, + { + key: 'statusText', + converter: webidl.converters.ByteString, + defaultValue: '' + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + } +]) - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; +module.exports = { + makeNetworkError, + makeResponse, + makeAppropriateNetworkError, + filterResponse, + Response, + cloneResponse +} + + +/***/ }), + +/***/ 5861: +/***/ ((module) => { + +"use strict"; + + +module.exports = { + kUrl: Symbol('url'), + kHeaders: Symbol('headers'), + kSignal: Symbol('signal'), + kState: Symbol('state'), + kGuard: Symbol('guard'), + kRealm: Symbol('realm') +} + + +/***/ }), + +/***/ 2538: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = __nccwpck_require__(1037) +const { getGlobalOrigin } = __nccwpck_require__(1246) +const { performance } = __nccwpck_require__(4074) +const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983) +const assert = __nccwpck_require__(9491) +const { isUint8Array } = __nccwpck_require__(9830) + +// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable +/** @type {import('crypto')|undefined} */ +let crypto + +try { + crypto = __nccwpck_require__(6113) +} catch { + +} + +function responseURL (response) { + // https://fetch.spec.whatwg.org/#responses + // A response has an associated URL. It is a pointer to the last URL + // in response’s URL list and null if response’s URL list is empty. + const urlList = response.urlList + const length = urlList.length + return length === 0 ? null : urlList[length - 1].toString() +} + +// https://fetch.spec.whatwg.org/#concept-response-location-url +function responseLocationURL (response, requestFragment) { + // 1. If response’s status is not a redirect status, then return null. + if (!redirectStatusSet.has(response.status)) { + return null } - n = howMuchToRead(n, state); - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; + // 2. Let location be the result of extracting header list values given + // `Location` and response’s header list. + let location = response.headersList.get('location') + + // 3. If location is a header value, then set location to the result of + // parsing location with response’s URL. + if (location !== null && isValidHeaderValue(location)) { + location = new URL(location, responseURL(response)) } - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. + // 4. If location is a URL whose fragment is null, then set location’s + // fragment to requestFragment. + if (location && !location.hash) { + location.hash = requestFragment + } - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); + // 5. Return location. + return location +} - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); +/** @returns {URL} */ +function requestCurrentURL (request) { + return request.urlList[request.urlList.length - 1] +} + +function requestBadPort (request) { + // 1. Let url be request’s current URL. + const url = requestCurrentURL(request) + + // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port, + // then return blocked. + if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { + return 'blocked' } - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); + // 3. Return allowed. + return 'allowed' +} + +function isErrorLike (object) { + return object instanceof Error || ( + object?.constructor?.name === 'Error' || + object?.constructor?.name === 'DOMException' + ) +} + +// Check whether |statusText| is a ByteString and +// matches the Reason-Phrase token production. +// RFC 2616: https://tools.ietf.org/html/rfc2616 +// RFC 7230: https://tools.ietf.org/html/rfc7230 +// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )" +// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116 +function isValidReasonPhrase (statusText) { + for (let i = 0; i < statusText.length; ++i) { + const c = statusText.charCodeAt(i) + if ( + !( + ( + c === 0x09 || // HTAB + (c >= 0x20 && c <= 0x7e) || // SP / VCHAR + (c >= 0x80 && c <= 0xff) + ) // obs-text + ) + ) { + return false + } } - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - if (ret === null) { - state.needReadable = state.length <= state.highWaterMark; - n = 0; - } else { - state.length -= n; - state.awaitDrain = 0; + return true +} + +/** + * @see https://tools.ietf.org/html/rfc7230#section-3.2.6 + * @param {number} c + */ +function isTokenCharCode (c) { + switch (c) { + case 0x22: + case 0x28: + case 0x29: + case 0x2c: + case 0x2f: + case 0x3a: + case 0x3b: + case 0x3c: + case 0x3d: + case 0x3e: + case 0x3f: + case 0x40: + case 0x5b: + case 0x5c: + case 0x5d: + case 0x7b: + case 0x7d: + // DQUOTE and "(),/:;<=>?@[\]{}" + return false + default: + // VCHAR %x21-7E + return c >= 0x21 && c <= 0x7e + } +} + +/** + * @param {string} characters + */ +function isValidHTTPToken (characters) { + if (characters.length === 0) { + return false + } + for (let i = 0; i < characters.length; ++i) { + if (!isTokenCharCode(characters.charCodeAt(i))) { + return false + } } - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; + return true +} - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this); +/** + * @see https://fetch.spec.whatwg.org/#header-name + * @param {string} potentialValue + */ +function isValidHeaderName (potentialValue) { + return isValidHTTPToken(potentialValue) +} + +/** + * @see https://fetch.spec.whatwg.org/#header-value + * @param {string} potentialValue + */ +function isValidHeaderValue (potentialValue) { + // - Has no leading or trailing HTTP tab or space bytes. + // - Contains no 0x00 (NUL) or HTTP newline bytes. + if ( + potentialValue.startsWith('\t') || + potentialValue.startsWith(' ') || + potentialValue.endsWith('\t') || + potentialValue.endsWith(' ') + ) { + return false } - if (ret !== null) this.emit('data', ret); - return ret; -}; -function onEofChunk(stream, state) { - debug('onEofChunk'); - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } + + if ( + potentialValue.includes('\0') || + potentialValue.includes('\r') || + potentialValue.includes('\n') + ) { + return false } - state.ended = true; - if (state.sync) { - // if we are sync, wait until next tick to emit the data. - // Otherwise we risk emitting data in the flow() - // the readable code triggers during a read() call - emitReadable(stream); - } else { - // emit 'readable' now to make sure it gets picked up. - state.needReadable = false; - if (!state.emittedReadable) { - state.emittedReadable = true; - emitReadable_(stream); + + return true +} + +// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect +function setRequestReferrerPolicyOnRedirect (request, actualResponse) { + // Given a request request and a response actualResponse, this algorithm + // updates request’s referrer policy according to the Referrer-Policy + // header (if any) in actualResponse. + + // 1. Let policy be the result of executing § 8.1 Parse a referrer policy + // from a Referrer-Policy header on actualResponse. + + // 8.1 Parse a referrer policy from a Referrer-Policy header + // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list. + const { headersList } = actualResponse + // 2. Let policy be the empty string. + // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token. + // 4. Return policy. + const policyHeader = (headersList.get('referrer-policy') ?? '').split(',') + + // Note: As the referrer-policy can contain multiple policies + // separated by comma, we need to loop through all of them + // and pick the first valid one. + // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy + let policy = '' + if (policyHeader.length > 0) { + // The right-most policy takes precedence. + // The left-most policy is the fallback. + for (let i = policyHeader.length; i !== 0; i--) { + const token = policyHeader[i - 1].trim() + if (referrerPolicyTokens.has(token)) { + policy = token + break + } } } -} -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - debug('emitReadable', state.needReadable, state.emittedReadable); - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - process.nextTick(emitReadable_, stream); + // 2. If policy is not the empty string, then set request’s referrer policy to policy. + if (policy !== '') { + request.referrerPolicy = policy } } -function emitReadable_(stream) { - var state = stream._readableState; - debug('emitReadable_', state.destroyed, state.length, state.ended); - if (!state.destroyed && (state.length || state.ended)) { - stream.emit('readable'); - state.emittedReadable = false; - } - // The stream needs another readable event if - // 1. It is not flowing, as the flow mechanism will take - // care of it. - // 2. It is not ended. - // 3. It is below the highWaterMark, so we can schedule - // another readable later. - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; - flow(stream); +// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check +function crossOriginResourcePolicyCheck () { + // TODO + return 'allowed' } -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - process.nextTick(maybeReadMore_, stream, state); +// https://fetch.spec.whatwg.org/#concept-cors-check +function corsCheck () { + // TODO + return 'success' +} + +// https://fetch.spec.whatwg.org/#concept-tao-check +function TAOCheck () { + // TODO + return 'success' +} + +function appendFetchMetadata (httpRequest) { + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header + + // 1. Assert: r’s url is a potentially trustworthy URL. + // TODO + + // 2. Let header be a Structured Header whose value is a token. + let header = null + + // 3. Set header’s value to r’s mode. + header = httpRequest.mode + + // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list. + httpRequest.headersList.set('sec-fetch-mode', header) + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header + // TODO +} + +// https://fetch.spec.whatwg.org/#append-a-request-origin-header +function appendRequestOriginHeader (request) { + // 1. Let serializedOrigin be the result of byte-serializing a request origin with request. + let serializedOrigin = request.origin + + // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list. + if (request.responseTainting === 'cors' || request.mode === 'websocket') { + if (serializedOrigin) { + request.headersList.append('origin', serializedOrigin) + } + + // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then: + } else if (request.method !== 'GET' && request.method !== 'HEAD') { + // 1. Switch on request’s referrer policy: + switch (request.referrerPolicy) { + case 'no-referrer': + // Set serializedOrigin to `null`. + serializedOrigin = null + break + case 'no-referrer-when-downgrade': + case 'strict-origin': + case 'strict-origin-when-cross-origin': + // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`. + if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) { + serializedOrigin = null + } + break + case 'same-origin': + // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`. + if (!sameOrigin(request, requestCurrentURL(request))) { + serializedOrigin = null + } + break + default: + // Do nothing. + } + + if (serializedOrigin) { + // 2. Append (`Origin`, serializedOrigin) to request’s header list. + request.headersList.append('origin', serializedOrigin) + } } } -function maybeReadMore_(stream, state) { - // Attempt to read more data if we should. - // - // The conditions for reading more data are (one of): - // - Not enough data buffered (state.length < state.highWaterMark). The loop - // is responsible for filling the buffer with enough data if such data - // is available. If highWaterMark is 0 and we are not in the flowing mode - // we should _not_ attempt to buffer any extra data. We'll get more data - // when the stream consumer calls read() instead. - // - No data in the buffer, and the stream is in flowing mode. In this mode - // the loop below is responsible for ensuring read() is called. Failing to - // call read here would abort the flow and there's no other mechanism for - // continuing the flow if the stream consumer has just subscribed to the - // 'data' event. - // - // In addition to the above conditions to keep reading data, the following - // conditions prevent the data from being read: - // - The stream has ended (state.ended). - // - There is already a pending 'read' operation (state.reading). This is a - // case where the the stream has called the implementation defined _read() - // method, but they are processing the call asynchronously and have _not_ - // called push() with new data. In this case we skip performing more - // read()s. The execution ends in this method again after the _read() ends - // up calling push() with more data. - while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { - const len = state.length; - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break; + +function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) { + // TODO + return performance.now() +} + +// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info +function createOpaqueTimingInfo (timingInfo) { + return { + startTime: timingInfo.startTime ?? 0, + redirectStartTime: 0, + redirectEndTime: 0, + postRedirectStartTime: timingInfo.startTime ?? 0, + finalServiceWorkerStartTime: 0, + finalNetworkResponseStartTime: 0, + finalNetworkRequestStartTime: 0, + endTime: 0, + encodedBodySize: 0, + decodedBodySize: 0, + finalConnectionTimingInfo: null + } +} + +// https://html.spec.whatwg.org/multipage/origin.html#policy-container +function makePolicyContainer () { + // Note: the fetch spec doesn't make use of embedder policy or CSP list + return { + referrerPolicy: 'strict-origin-when-cross-origin' } - state.readingMore = false; } -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); -}; -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; +// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container +function clonePolicyContainer (policyContainer) { + return { + referrerPolicy: policyContainer.referrerPolicy } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); - dest.on('unpipe', onunpipe); - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } +} + +// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer +function determineRequestsReferrer (request) { + // 1. Let policy be request's referrer policy. + const policy = request.referrerPolicy + + // Note: policy cannot (shouldn't) be null or an empty string. + assert(policy) + + // 2. Let environment be request’s client. + + let referrerSource = null + + // 3. Switch on request’s referrer: + if (request.referrer === 'client') { + // Note: node isn't a browser and doesn't implement document/iframes, + // so we bypass this step and replace it with our own. + + const globalOrigin = getGlobalOrigin() + + if (!globalOrigin || globalOrigin.origin === 'null') { + return 'no-referrer' } - } - function onend() { - debug('onend'); - dest.end(); + + // note: we need to clone it as it's mutated + referrerSource = new URL(globalOrigin) + } else if (request.referrer instanceof URL) { + // Let referrerSource be request’s referrer. + referrerSource = request.referrer } - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); - cleanedUp = true; + // 4. Let request’s referrerURL be the result of stripping referrerSource for + // use as a referrer. + let referrerURL = stripURLForReferrer(referrerSource) - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + // 5. Let referrerOrigin be the result of stripping referrerSource for use as + // a referrer, with the origin-only flag set to true. + const referrerOrigin = stripURLForReferrer(referrerSource, true) + + // 6. If the result of serializing referrerURL is a string whose length is + // greater than 4096, set referrerURL to referrerOrigin. + if (referrerURL.toString().length > 4096) { + referrerURL = referrerOrigin } - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - debug('dest.write', ret); - if (ret === false) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', state.awaitDrain); - state.awaitDrain++; + + const areSameOrigin = sameOrigin(request, referrerURL) + const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && + !isURLPotentiallyTrustworthy(request.url) + + // 8. Execute the switch statements corresponding to the value of policy: + switch (policy) { + case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true) + case 'unsafe-url': return referrerURL + case 'same-origin': + return areSameOrigin ? referrerOrigin : 'no-referrer' + case 'origin-when-cross-origin': + return areSameOrigin ? referrerURL : referrerOrigin + case 'strict-origin-when-cross-origin': { + const currentURL = requestCurrentURL(request) + + // 1. If the origin of referrerURL and the origin of request’s current + // URL are the same, then return referrerURL. + if (sameOrigin(referrerURL, currentURL)) { + return referrerURL } - src.pause(); + + // 2. If referrerURL is a potentially trustworthy URL and request’s + // current URL is not a potentially trustworthy URL, then return no + // referrer. + if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) { + return 'no-referrer' + } + + // 3. Return referrerOrigin. + return referrerOrigin } + case 'strict-origin': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ + case 'no-referrer-when-downgrade': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ + + default: // eslint-disable-line + return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin } +} - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); +/** + * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url + * @param {URL} url + * @param {boolean|undefined} originOnly + */ +function stripURLForReferrer (url, originOnly) { + // 1. Assert: url is a URL. + assert(url instanceof URL) + + // 2. If url’s scheme is a local scheme, then return no referrer. + if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') { + return 'no-referrer' } - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror); + // 3. Set url’s username to the empty string. + url.username = '' - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); + // 4. Set url’s password to the empty string. + url.password = '' + + // 5. Set url’s fragment to null. + url.hash = '' + + // 6. If the origin-only flag is true, then: + if (originOnly) { + // 1. Set url’s path to « the empty string ». + url.pathname = '' + + // 2. Set url’s query to null. + url.search = '' } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); + + // 7. Return url. + return url +} + +function isURLPotentiallyTrustworthy (url) { + if (!(url instanceof URL)) { + return false } - dest.once('finish', onfinish); - function unpipe() { - debug('unpipe'); - src.unpipe(dest); + + // If child of about, return true + if (url.href === 'about:blank' || url.href === 'about:srcdoc') { + return true } - // tell the dest that it's being piped to - dest.emit('pipe', src); + // If scheme is data, return true + if (url.protocol === 'data:') return true - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - return dest; -}; -function pipeOnDrain(src) { - return function pipeOnDrainFunctionResult() { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); + // If file, return true + if (url.protocol === 'file:') return true + + return isOriginPotentiallyTrustworthy(url.origin) + + function isOriginPotentiallyTrustworthy (origin) { + // If origin is explicitly null, return false + if (origin == null || origin === 'null') return false + + const originAsURL = new URL(origin) + + // If secure, return true + if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') { + return true } - }; -} -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { - hasUnpiped: false - }; - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; + // If localhost or variants, return true + if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) || + (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) || + (originAsURL.hostname.endsWith('.localhost'))) { + return true + } - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - if (!dest) dest = state.pipes; + // If any other, return false + return false + } +} - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist + * @param {Uint8Array} bytes + * @param {string} metadataList + */ +function bytesMatch (bytes, metadataList) { + // If node is not built with OpenSSL support, we cannot check + // a request's integrity, so allow it by default (the spec will + // allow requests if an invalid hash is given, as precedence). + /* istanbul ignore if: only if node is built with --without-ssl */ + if (crypto === undefined) { + return true } - // slow case. multiple pipe destinations. + // 1. Let parsedMetadata be the result of parsing metadataList. + const parsedMetadata = parseMetadata(metadataList) - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { - hasUnpiped: false - }); - return this; + // 2. If parsedMetadata is no metadata, return true. + if (parsedMetadata === 'no metadata') { + return true } - // try to find the right one. - var index = indexOf(state.pipes, dest); - if (index === -1) return this; - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - dest.emit('unpipe', this, unpipeInfo); - return this; -}; + // 3. If parsedMetadata is the empty set, return true. + if (parsedMetadata.length === 0) { + return true + } -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - const res = Stream.prototype.on.call(this, ev, fn); - const state = this._readableState; - if (ev === 'data') { - // update readableListening so that resume() may be a no-op - // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0; + // 4. Let metadata be the result of getting the strongest + // metadata from parsedMetadata. + const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) + // get the strongest algorithm + const strongest = list[0].algo + // get all entries that use the strongest algorithm; ignore weaker + const metadata = list.filter((item) => item.algo === strongest) - // Try start flowing on next tick if stream isn't explicitly paused - if (state.flowing !== false) this.resume(); - } else if (ev === 'readable') { - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.flowing = false; - state.emittedReadable = false; - debug('on readable', state.length, state.reading); - if (state.length) { - emitReadable(this); - } else if (!state.reading) { - process.nextTick(nReadingNextTick, this); - } + // 5. For each item in metadata: + for (const item of metadata) { + // 1. Let algorithm be the alg component of item. + const algorithm = item.algo + + // 2. Let expectedValue be the val component of item. + let expectedValue = item.hash + + // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e + // "be liberal with padding". This is annoying, and it's not even in the spec. + + if (expectedValue.endsWith('==')) { + expectedValue = expectedValue.slice(0, -2) + } + + // 3. Let actualValue be the result of applying algorithm to bytes. + let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') + + if (actualValue.endsWith('==')) { + actualValue = actualValue.slice(0, -2) + } + + // 4. If actualValue is a case-sensitive match for expectedValue, + // return true. + if (actualValue === expectedValue) { + return true + } + + let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') + + if (actualBase64URL.endsWith('==')) { + actualBase64URL = actualBase64URL.slice(0, -2) + } + + if (actualBase64URL === expectedValue) { + return true } } - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; -Readable.prototype.removeListener = function (ev, fn) { - const res = Stream.prototype.removeListener.call(this, ev, fn); - if (ev === 'readable') { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); - } - return res; -}; -Readable.prototype.removeAllListeners = function (ev) { - const res = Stream.prototype.removeAllListeners.apply(this, arguments); - if (ev === 'readable' || ev === undefined) { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); + + // 6. Return false. + return false +} + +// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options +// https://www.w3.org/TR/CSP2/#source-list-syntax +// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 +const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i + +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + * @param {string} metadata + */ +function parseMetadata (metadata) { + // 1. Let result be the empty set. + /** @type {{ algo: string, hash: string }[]} */ + const result = [] + + // 2. Let empty be equal to true. + let empty = true + + const supportedHashes = crypto.getHashes() + + // 3. For each token returned by splitting metadata on spaces: + for (const token of metadata.split(' ')) { + // 1. Set empty to false. + empty = false + + // 2. Parse token as a hash-with-options. + const parsedToken = parseHashWithOptions.exec(token) + + // 3. If token does not parse, continue to the next token. + if (parsedToken === null || parsedToken.groups === undefined) { + // Note: Chromium blocks the request at this point, but Firefox + // gives a warning that an invalid integrity was given. The + // correct behavior is to ignore these, and subsequently not + // check the integrity of the resource. + continue + } + + // 4. Let algorithm be the hash-algo component of token. + const algorithm = parsedToken.groups.algo + + // 5. If algorithm is a hash function recognized by the user + // agent, add the parsed token to result. + if (supportedHashes.includes(algorithm.toLowerCase())) { + result.push(parsedToken.groups) + } } - return res; -}; -function updateReadableListening(self) { - const state = self._readableState; - state.readableListening = self.listenerCount('readable') > 0; - if (state.resumeScheduled && !state.paused) { - // flowing needs to be set to true now, otherwise - // the upcoming resume will not flow. - state.flowing = true; - // crude way to check if we should resume - } else if (self.listenerCount('data') > 0) { - self.resume(); + // 4. Return no metadata if empty is true, otherwise return result. + if (empty === true) { + return 'no metadata' } + + return result } -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); + +// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request +function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { + // TODO } -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - // we flow only if there is no one listening - // for readable, but we still have to call - // resume() - state.flowing = !state.readableListening; - resume(this, state); +/** + * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin} + * @param {URL} A + * @param {URL} B + */ +function sameOrigin (A, B) { + // 1. If A and B are the same opaque origin, then return true. + if (A.origin === B.origin && A.origin === 'null') { + return true } - state.paused = false; - return this; -}; -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - process.nextTick(resume_, stream, state); + + // 2. If A and B are both tuple origins and their schemes, + // hosts, and port are identical, then return true. + if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) { + return true } + + // 3. Return false. + return false } -function resume_(stream, state) { - debug('resume', state.reading); - if (!state.reading) { - stream.read(0); - } - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); + +function createDeferredPromise () { + let res + let rej + const promise = new Promise((resolve, reject) => { + res = resolve + rej = reject + }) + + return { promise, resolve: res, reject: rej } } -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (this._readableState.flowing !== false) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - this._readableState.paused = true; - return this; -}; -function flow(stream) { - const state = stream._readableState; - debug('flow', state.flowing); - while (state.flowing && stream.read() !== null); + +function isAborted (fetchParams) { + return fetchParams.controller.state === 'aborted' } -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var state = this._readableState; - var paused = false; - stream.on('end', () => { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) this.push(chunk); - } - this.push(null); - }); - stream.on('data', chunk => { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); +function isCancelled (fetchParams) { + return fetchParams.controller.state === 'aborted' || + fetchParams.controller.state === 'terminated' +} + +const normalizeMethodRecord = { + delete: 'DELETE', + DELETE: 'DELETE', + get: 'GET', + GET: 'GET', + head: 'HEAD', + HEAD: 'HEAD', + options: 'OPTIONS', + OPTIONS: 'OPTIONS', + post: 'POST', + POST: 'POST', + put: 'PUT', + PUT: 'PUT' +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(normalizeMethodRecord, null) + +/** + * @see https://fetch.spec.whatwg.org/#concept-method-normalize + * @param {string} method + */ +function normalizeMethod (method) { + return normalizeMethodRecord[method.toLowerCase()] ?? method +} - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - var ret = this.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); +// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string +function serializeJavascriptValueToJSONString (value) { + // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »). + const result = JSON.stringify(value) - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function methodWrap(method) { - return function methodWrapReturnFunction() { - return stream[method].apply(stream, arguments); - }; - }(i); - } + // 2. If result is undefined, then throw a TypeError. + if (result === undefined) { + throw new TypeError('Value is not JSON serializable') } - // proxy certain important events. - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + // 3. Assert: result is a string. + assert(typeof result === 'string') + + // 4. Return result. + return result +} + +// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object +const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())) + +/** + * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object + * @param {() => unknown[]} iterator + * @param {string} name name of the instance + * @param {'key'|'value'|'key+value'} kind + */ +function makeIterator (iterator, name, kind) { + const object = { + index: 0, + kind, + target: iterator } - // when we try to consume some more bytes, simply unpause the - // underlying stream. - this._read = n => { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); + const i = { + next () { + // 1. Let interface be the interface for which the iterator prototype object exists. + + // 2. Let thisValue be the this value. + + // 3. Let object be ? ToObject(thisValue). + + // 4. If object is a platform object, then perform a security + // check, passing: + + // 5. If object is not a default iterator object for interface, + // then throw a TypeError. + if (Object.getPrototypeOf(this) !== i) { + throw new TypeError( + `'next' called on an object that does not implement interface ${name} Iterator.` + ) + } + + // 6. Let index be object’s index. + // 7. Let kind be object’s kind. + // 8. Let values be object’s target's value pairs to iterate over. + const { index, kind, target } = object + const values = target() + + // 9. Let len be the length of values. + const len = values.length + + // 10. If index is greater than or equal to len, then return + // CreateIterResultObject(undefined, true). + if (index >= len) { + return { value: undefined, done: true } + } + + // 11. Let pair be the entry in values at index index. + const pair = values[index] + + // 12. Set object’s index to index + 1. + object.index = index + 1 + + // 13. Return the iterator result for pair and kind. + return iteratorResult(pair, kind) + }, + // The class string of an iterator prototype object for a given interface is the + // result of concatenating the identifier of the interface and the string " Iterator". + [Symbol.toStringTag]: `${name} Iterator` + } + + // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%. + Object.setPrototypeOf(i, esIteratorPrototype) + // esIteratorPrototype needs to be the prototype of i + // which is the prototype of an empty object. Yes, it's confusing. + return Object.setPrototypeOf({}, i) +} + +// https://webidl.spec.whatwg.org/#iterator-result +function iteratorResult (pair, kind) { + let result + + // 1. Let result be a value determined by the value of kind: + switch (kind) { + case 'key': { + // 1. Let idlKey be pair’s key. + // 2. Let key be the result of converting idlKey to an + // ECMAScript value. + // 3. result is key. + result = pair[0] + break } - }; - return this; -}; -if (typeof Symbol === 'function') { - Readable.prototype[Symbol.asyncIterator] = function () { - if (createReadableStreamAsyncIterator === undefined) { - createReadableStreamAsyncIterator = __nccwpck_require__(3306); + case 'value': { + // 1. Let idlValue be pair’s value. + // 2. Let value be the result of converting idlValue to + // an ECMAScript value. + // 3. result is value. + result = pair[1] + break } - return createReadableStreamAsyncIterator(this); - }; + case 'key+value': { + // 1. Let idlKey be pair’s key. + // 2. Let idlValue be pair’s value. + // 3. Let key be the result of converting idlKey to an + // ECMAScript value. + // 4. Let value be the result of converting idlValue to + // an ECMAScript value. + // 5. Let array be ! ArrayCreate(2). + // 6. Call ! CreateDataProperty(array, "0", key). + // 7. Call ! CreateDataProperty(array, "1", value). + // 8. result is array. + result = pair + break + } + } + + // 2. Return CreateIterResultObject(result, false). + return { value: result, done: false } } -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.highWaterMark; + +/** + * @see https://fetch.spec.whatwg.org/#body-fully-read + */ +async function fullyReadBody (body, processBody, processBodyError) { + // 1. If taskDestination is null, then set taskDestination to + // the result of starting a new parallel queue. + + // 2. Let successSteps given a byte sequence bytes be to queue a + // fetch task to run processBody given bytes, with taskDestination. + const successSteps = processBody + + // 3. Let errorSteps be to queue a fetch task to run processBodyError, + // with taskDestination. + const errorSteps = processBodyError + + // 4. Let reader be the result of getting a reader for body’s stream. + // If that threw an exception, then run errorSteps with that + // exception and return. + let reader + + try { + reader = body.stream.getReader() + } catch (e) { + errorSteps(e) + return } -}); -Object.defineProperty(Readable.prototype, 'readableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState && this._readableState.buffer; + + // 5. Read all bytes from reader, given successSteps and errorSteps. + try { + const result = await readAllBytes(reader) + successSteps(result) + } catch (e) { + errorSteps(e) } -}); -Object.defineProperty(Readable.prototype, 'readableFlowing', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.flowing; - }, - set: function set(state) { - if (this._readableState) { - this._readableState.flowing = state; - } +} + +/** @type {ReadableStream} */ +let ReadableStream = globalThis.ReadableStream + +function isReadableStreamLike (stream) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(5356).ReadableStream) } -}); -// exposed for testing purposes only. -Readable._fromList = fromList; -Object.defineProperty(Readable.prototype, 'readableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - return this._readableState.length; + return stream instanceof ReadableStream || ( + stream[Symbol.toStringTag] === 'ReadableStream' && + typeof stream.tee === 'function' + ) +} + +const MAXIMUM_ARGUMENT_LENGTH = 65535 + +/** + * @see https://infra.spec.whatwg.org/#isomorphic-decode + * @param {number[]|Uint8Array} input + */ +function isomorphicDecode (input) { + // 1. To isomorphic decode a byte sequence input, return a string whose code point + // length is equal to input’s length and whose code points have the same values + // as the values of input’s bytes, in the same order. + + if (input.length < MAXIMUM_ARGUMENT_LENGTH) { + return String.fromCharCode(...input) } -}); -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = state.buffer.consume(n, state.decoder); + return input.reduce((previous, current) => previous + String.fromCharCode(current), '') +} + +/** + * @param {ReadableStreamController} controller + */ +function readableStreamClose (controller) { + try { + controller.close() + } catch (err) { + // TODO: add comment explaining why this error occurs. + if (!err.message.includes('Controller is already closed')) { + throw err + } } - return ret; } -function endReadable(stream) { - var state = stream._readableState; - debug('endReadable', state.endEmitted); - if (!state.endEmitted) { - state.ended = true; - process.nextTick(endReadableNT, state, stream); + +/** + * @see https://infra.spec.whatwg.org/#isomorphic-encode + * @param {string} input + */ +function isomorphicEncode (input) { + // 1. Assert: input contains no code points greater than U+00FF. + for (let i = 0; i < input.length; i++) { + assert(input.charCodeAt(i) <= 0xFF) } + + // 2. Return a byte sequence whose length is equal to input’s code + // point length and whose bytes have the same values as the + // values of input’s code points, in the same order + return input } -function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length); - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the writable side is ready for autoDestroy as well - const wState = stream._writableState; - if (!wState || wState.autoDestroy && wState.finished) { - stream.destroy(); - } +/** + * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes + * @see https://streams.spec.whatwg.org/#read-loop + * @param {ReadableStreamDefaultReader} reader + */ +async function readAllBytes (reader) { + const bytes = [] + let byteLength = 0 + + while (true) { + const { done, value: chunk } = await reader.read() + + if (done) { + // 1. Call successSteps with bytes. + return Buffer.concat(bytes, byteLength) + } + + // 1. If chunk is not a Uint8Array object, call failureSteps + // with a TypeError and abort these steps. + if (!isUint8Array(chunk)) { + throw new TypeError('Received non-Uint8Array chunk') } + + // 2. Append the bytes represented by chunk to bytes. + bytes.push(chunk) + byteLength += chunk.length + + // 3. Read-loop given reader, bytes, successSteps, and failureSteps. } } -if (typeof Symbol === 'function') { - Readable.from = function (iterable, opts) { - if (from === undefined) { - from = __nccwpck_require__(9082); - } - return from(Readable, iterable, opts); - }; + +/** + * @see https://fetch.spec.whatwg.org/#is-local + * @param {URL} url + */ +function urlIsLocal (url) { + assert('protocol' in url) // ensure it's a url object + + const protocol = url.protocol + + return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:' } -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; + +/** + * @param {string|URL} url + */ +function urlHasHttpsScheme (url) { + if (typeof url === 'string') { + return url.startsWith('https:') } - return -1; + + return url.protocol === 'https:' +} + +/** + * @see https://fetch.spec.whatwg.org/#http-scheme + * @param {URL} url + */ +function urlIsHttpHttpsScheme (url) { + assert('protocol' in url) // ensure it's a url object + + const protocol = url.protocol + + return protocol === 'http:' || protocol === 'https:' +} + +/** + * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0. + */ +const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key)) + +module.exports = { + isAborted, + isCancelled, + createDeferredPromise, + ReadableStreamFrom, + toUSVString, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + coarsenedSharedCurrentTime, + determineRequestsReferrer, + makePolicyContainer, + clonePolicyContainer, + appendFetchMetadata, + appendRequestOriginHeader, + TAOCheck, + corsCheck, + crossOriginResourcePolicyCheck, + createOpaqueTimingInfo, + setRequestReferrerPolicyOnRedirect, + isValidHTTPToken, + requestBadPort, + requestCurrentURL, + responseURL, + responseLocationURL, + isBlobLike, + isURLPotentiallyTrustworthy, + isValidReasonPhrase, + sameOrigin, + normalizeMethod, + serializeJavascriptValueToJSONString, + makeIterator, + isValidHeaderName, + isValidHeaderValue, + hasOwn, + isErrorLike, + fullyReadBody, + bytesMatch, + isReadableStreamLike, + readableStreamClose, + isomorphicEncode, + isomorphicDecode, + urlIsLocal, + urlHasHttpsScheme, + urlIsHttpHttpsScheme, + readAllBytes, + normalizeMethodRecord } + /***/ }), -/***/ 4415: +/***/ 1744: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. +const { types } = __nccwpck_require__(3837) +const { hasOwn, toUSVString } = __nccwpck_require__(2538) +/** @type {import('../../types/webidl').Webidl} */ +const webidl = {} +webidl.converters = {} +webidl.util = {} +webidl.errors = {} -module.exports = Transform; -const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, - ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; -const Duplex = __nccwpck_require__(1359); -__nccwpck_require__(4124)(Transform, Duplex); -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; - var cb = ts.writecb; - if (cb === null) { - return this.emit('error', new ERR_MULTIPLE_CALLBACK()); - } - ts.writechunk = null; - ts.writecb = null; - if (data != null) - // single equals check for both `null` and `undefined` - this.push(data); - cb(er); - var rs = this._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); - } +webidl.errors.exception = function (message) { + return new TypeError(`${message.header}: ${message.message}`) } -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - Duplex.call(this, options); - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; +webidl.errors.conversionFailed = function (context) { + const plural = context.types.length === 1 ? '' : ' one of' + const message = + `${context.argument} could not be converted to` + + `${plural}: ${context.types.join(', ')}.` - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - if (typeof options.flush === 'function') this._flush = options.flush; - } + return webidl.errors.exception({ + header: context.prefix, + message + }) +} - // When the writable side finishes, then flush out anything remaining. - this.on('prefinish', prefinish); +webidl.errors.invalidArgument = function (context) { + return webidl.errors.exception({ + header: context.prefix, + message: `"${context.value}" is an invalid ${context.type}.` + }) } -function prefinish() { - if (typeof this._flush === 'function' && !this._readableState.destroyed) { - this._flush((er, data) => { - done(this, er, data); - }); + +// https://webidl.spec.whatwg.org/#implements +webidl.brandCheck = function (V, I, opts = undefined) { + if (opts?.strict !== false && !(V instanceof I)) { + throw new TypeError('Illegal invocation') } else { - done(this, null, null); + return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag] } } -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); -}; -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); +webidl.argumentLengthCheck = function ({ length }, min, ctx) { + if (length < min) { + throw webidl.errors.exception({ + message: `${min} argument${min !== 1 ? 's' : ''} required, ` + + `but${length ? ' only' : ''} ${length} found.`, + ...ctx + }) } -}; +} -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; - if (ts.writechunk !== null && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; -Transform.prototype._destroy = function (err, cb) { - Duplex.prototype._destroy.call(this, err, err2 => { - cb(err2); - }); -}; -function done(stream, er, data) { - if (er) return stream.emit('error', er); - if (data != null) - // single equals check for both `null` and `undefined` - stream.push(data); +webidl.illegalConstructor = function () { + throw webidl.errors.exception({ + header: 'TypeError', + message: 'Illegal constructor' + }) +} - // TODO(BridgeAR): Write a test for these two error cases - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); - if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); - return stream.push(null); +// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values +webidl.util.Type = function (V) { + switch (typeof V) { + case 'undefined': return 'Undefined' + case 'boolean': return 'Boolean' + case 'string': return 'String' + case 'symbol': return 'Symbol' + case 'number': return 'Number' + case 'bigint': return 'BigInt' + case 'function': + case 'object': { + if (V === null) { + return 'Null' + } + + return 'Object' + } + } } -/***/ }), +// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint +webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) { + let upperBound + let lowerBound -/***/ 6993: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 1. If bitLength is 64, then: + if (bitLength === 64) { + // 1. Let upperBound be 2^53 − 1. + upperBound = Math.pow(2, 53) - 1 -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + // 2. If signedness is "unsigned", then let lowerBound be 0. + if (signedness === 'unsigned') { + lowerBound = 0 + } else { + // 3. Otherwise let lowerBound be −2^53 + 1. + lowerBound = Math.pow(-2, 53) + 1 + } + } else if (signedness === 'unsigned') { + // 2. Otherwise, if signedness is "unsigned", then: -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. + // 1. Let lowerBound be 0. + lowerBound = 0 + // 2. Let upperBound be 2^bitLength − 1. + upperBound = Math.pow(2, bitLength) - 1 + } else { + // 3. Otherwise: + // 1. Let lowerBound be -2^bitLength − 1. + lowerBound = Math.pow(-2, bitLength) - 1 -module.exports = Writable; + // 2. Let upperBound be 2^bitLength − 1 − 1. + upperBound = Math.pow(2, bitLength - 1) - 1 + } -/* */ -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} + // 4. Let x be ? ToNumber(V). + let x = Number(V) -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - this.next = null; - this.entry = null; - this.finish = () => { - onCorkedFinish(this, state); - }; -} -/* */ + // 5. If x is −0, then set x to +0. + if (x === 0) { + x = 0 + } -/**/ -var Duplex; -/**/ + // 6. If the conversion is to an IDL type associated + // with the [EnforceRange] extended attribute, then: + if (opts.enforceRange === true) { + // 1. If x is NaN, +∞, or −∞, then throw a TypeError. + if ( + Number.isNaN(x) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Could not convert ${V} to an integer.` + }) + } -Writable.WritableState = WritableState; + // 2. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) -/**/ -const internalUtil = { - deprecate: __nccwpck_require__(7127) -}; -/**/ + // 3. If x < lowerBound or x > upperBound, then + // throw a TypeError. + if (x < lowerBound || x > upperBound) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.` + }) + } -/**/ -var Stream = __nccwpck_require__(2387); -/**/ + // 4. Return x. + return x + } -const Buffer = (__nccwpck_require__(4300).Buffer); -const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} -const destroyImpl = __nccwpck_require__(7049); -const _require = __nccwpck_require__(9948), - getHighWaterMark = _require.getHighWaterMark; -const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, - ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; -const errorOrDestroy = destroyImpl.errorOrDestroy; -__nccwpck_require__(4124)(Writable, Stream); -function nop() {} -function WritableState(options, stream, isDuplex) { - Duplex = Duplex || __nccwpck_require__(1359); - options = options || {}; + // 7. If x is not NaN and the conversion is to an IDL + // type associated with the [Clamp] extended + // attribute, then: + if (!Number.isNaN(x) && opts.clamp === true) { + // 1. Set x to min(max(x, lowerBound), upperBound). + x = Math.min(Math.max(x, lowerBound), upperBound) - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream, - // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + // 2. Round x to the nearest integer, choosing the + // even integer if it lies halfway between two, + // and choosing +0 rather than −0. + if (Math.floor(x) % 2 === 0) { + x = Math.floor(x) + } else { + x = Math.ceil(x) + } + + // 3. Return x. + return x + } + + // 8. If x is NaN, +0, +∞, or −∞, then return +0. + if ( + Number.isNaN(x) || + (x === 0 && Object.is(0, x)) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + return 0 + } + + // 9. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) + + // 10. Set x to x modulo 2^bitLength. + x = x % Math.pow(2, bitLength) - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + // 11. If signedness is "signed" and x ≥ 2^bitLength − 1, + // then return x − 2^bitLength. + if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) { + return x - Math.pow(2, bitLength) + } - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + // 12. Otherwise, return x. + return x +} - // if _final has been called - this.finalCalled = false; +// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart +webidl.util.IntegerPart = function (n) { + // 1. Let r be floor(abs(n)). + const r = Math.floor(Math.abs(n)) - // drain event flag. - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; + // 2. If n < 0, then return -1 × r. + if (n < 0) { + return -1 * r + } - // has it been destroyed - this.destroyed = false; + // 3. Otherwise, return r. + return r +} - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; +// https://webidl.spec.whatwg.org/#es-sequence +webidl.sequenceConverter = function (converter) { + return (V) => { + // 1. If Type(V) is not Object, throw a TypeError. + if (webidl.util.Type(V) !== 'Object') { + throw webidl.errors.exception({ + header: 'Sequence', + message: `Value of type ${webidl.util.Type(V)} is not an Object.` + }) + } - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; + // 2. Let method be ? GetMethod(V, @@iterator). + /** @type {Generator} */ + const method = V?.[Symbol.iterator]?.() + const seq = [] - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; + // 3. If method is undefined, throw a TypeError. + if ( + method === undefined || + typeof method.next !== 'function' + ) { + throw webidl.errors.exception({ + header: 'Sequence', + message: 'Object is not an iterator.' + }) + } - // a flag to see when we're in the middle of a write. - this.writing = false; + // https://webidl.spec.whatwg.org/#create-sequence-from-iterable + while (true) { + const { done, value } = method.next() - // when true all writes will be buffered until .uncork() call - this.corked = 0; + if (done) { + break + } - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; + seq.push(converter(value)) + } - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; + return seq + } +} - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; +// https://webidl.spec.whatwg.org/#es-to-record +webidl.recordConverter = function (keyConverter, valueConverter) { + return (O) => { + // 1. If Type(O) is not Object, throw a TypeError. + if (webidl.util.Type(O) !== 'Object') { + throw webidl.errors.exception({ + header: 'Record', + message: `Value of type ${webidl.util.Type(O)} is not an Object.` + }) + } - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; + // 2. Let result be a new empty instance of record. + const result = {} - // the amount that is being written when _write is called. - this.writelen = 0; - this.bufferedRequest = null; - this.lastBufferedRequest = null; + if (!types.isProxy(O)) { + // Object.keys only returns enumerable properties + const keys = Object.keys(O) - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; + for (const key of keys) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue + } - // Should close be emitted on destroy. Defaults to true. - this.emitClose = options.emitClose !== false; + // 5. Return result. + return result + } - // Should .destroy() be called after 'finish' (and potentially 'end') - this.autoDestroy = !!options.autoDestroy; + // 3. Let keys be ? O.[[OwnPropertyKeys]](). + const keys = Reflect.ownKeys(O) - // count buffered requests - this.bufferedRequestCount = 0; + // 4. For each key of keys. + for (const key of keys) { + // 1. Let desc be ? O.[[GetOwnProperty]](key). + const desc = Reflect.getOwnPropertyDescriptor(O, key) - // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - this.corkedRequestsFree = new CorkedRequest(this); -} -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; + // 2. If desc is not undefined and desc.[[Enumerable]] is true: + if (desc?.enumerable) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) + + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) + + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue + } + } + + // 5. Return result. + return result } - return out; -}; -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function writableStateBufferGetter() { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); +} -// Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. -var realHasInstance; -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function value(object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; - return object && object._writableState instanceof WritableState; +webidl.interfaceConverter = function (i) { + return (V, opts = {}) => { + if (opts.strict !== false && !(V instanceof i)) { + throw webidl.errors.exception({ + header: i.name, + message: `Expected ${V} to be an instance of ${i.name}.` + }) } - }); -} else { - realHasInstance = function realHasInstance(object) { - return object instanceof this; - }; + + return V + } } -function Writable(options) { - Duplex = Duplex || __nccwpck_require__(1359); - // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. +webidl.dictionaryConverter = function (converters) { + return (dictionary) => { + const type = webidl.util.Type(dictionary) + const dict = {} - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. + if (type === 'Null' || type === 'Undefined') { + return dict + } else if (type !== 'Object') { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Expected ${dictionary} to be one of: Null, Undefined, Object.` + }) + } - // Checking for a Stream.Duplex instance is faster here instead of inside - // the WritableState constructor, at least with V8 6.5 - const isDuplex = this instanceof Duplex; - if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); - this._writableState = new WritableState(options, this, isDuplex); + for (const options of converters) { + const { key, defaultValue, required, converter } = options - // legacy. - this.writable = true; - if (options) { - if (typeof options.write === 'function') this._write = options.write; - if (typeof options.writev === 'function') this._writev = options.writev; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - if (typeof options.final === 'function') this._final = options.final; + if (required === true) { + if (!hasOwn(dictionary, key)) { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Missing required key "${key}".` + }) + } + } + + let value = dictionary[key] + const hasDefault = hasOwn(options, 'defaultValue') + + // Only use defaultValue if value is undefined and + // a defaultValue options was provided. + if (hasDefault && value !== null) { + value = value ?? defaultValue + } + + // A key can be optional and have no default value. + // When this happens, do not perform a conversion, + // and do not assign the key a value. + if (required || hasDefault || value !== undefined) { + value = converter(value) + + if ( + options.allowedValues && + !options.allowedValues.includes(value) + ) { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.` + }) + } + + dict[key] = value + } + } + + return dict } - Stream.call(this); } -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); -}; -function writeAfterEnd(stream, cb) { - var er = new ERR_STREAM_WRITE_AFTER_END(); - // TODO: defer error events consistently everywhere, not just the cb - errorOrDestroy(stream, er); - process.nextTick(cb, er); +webidl.nullableConverter = function (converter) { + return (V) => { + if (V === null) { + return V + } + + return converter(V) + } } -// Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. -function validChunk(stream, state, chunk, cb) { - var er; - if (chunk === null) { - er = new ERR_STREAM_NULL_VALUES(); - } else if (typeof chunk !== 'string' && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); +// https://webidl.spec.whatwg.org/#es-DOMString +webidl.converters.DOMString = function (V, opts = {}) { + // 1. If V is null and the conversion is to an IDL type + // associated with the [LegacyNullToEmptyString] + // extended attribute, then return the DOMString value + // that represents the empty string. + if (V === null && opts.legacyNullToEmptyString) { + return '' } - if (er) { - errorOrDestroy(stream, er); - process.nextTick(cb, er); - return false; + + // 2. Let x be ? ToString(V). + if (typeof V === 'symbol') { + throw new TypeError('Could not convert argument of type symbol to string.') } - return true; + + // 3. Return the IDL DOMString value that represents the + // same sequence of code units as the one the + // ECMAScript String value x represents. + return String(V) } -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - var isBuf = !state.objectMode && _isUint8Array(chunk); - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); + +// https://webidl.spec.whatwg.org/#es-ByteString +webidl.converters.ByteString = function (V) { + // 1. Let x be ? ToString(V). + // Note: DOMString converter perform ? ToString(V) + const x = webidl.converters.DOMString(V) + + // 2. If the value of any element of x is greater than + // 255, then throw a TypeError. + for (let index = 0; index < x.length; index++) { + if (x.charCodeAt(index) > 255) { + throw new TypeError( + 'Cannot convert argument to a ByteString because the character at ' + + `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.` + ) + } } - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; + + // 3. Return an IDL ByteString value whose length is the + // length of x, and where the value of each element is + // the value of the corresponding element of x. + return x +} + +// https://webidl.spec.whatwg.org/#es-USVString +webidl.converters.USVString = toUSVString + +// https://webidl.spec.whatwg.org/#es-boolean +webidl.converters.boolean = function (V) { + // 1. Let x be the result of computing ToBoolean(V). + const x = Boolean(V) + + // 2. Return the IDL boolean value that is the one that represents + // the same truth value as the ECMAScript Boolean value x. + return x +} + +// https://webidl.spec.whatwg.org/#es-any +webidl.converters.any = function (V) { + return V +} + +// https://webidl.spec.whatwg.org/#es-long-long +webidl.converters['long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "signed"). + const x = webidl.util.ConvertToInt(V, 64, 'signed') + + // 2. Return the IDL long long value that represents + // the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-long-long +webidl.converters['unsigned long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "unsigned"). + const x = webidl.util.ConvertToInt(V, 64, 'unsigned') + + // 2. Return the IDL unsigned long long value that + // represents the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-long +webidl.converters['unsigned long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 32, "unsigned"). + const x = webidl.util.ConvertToInt(V, 32, 'unsigned') + + // 2. Return the IDL unsigned long value that + // represents the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-short +webidl.converters['unsigned short'] = function (V, opts) { + // 1. Let x be ? ConvertToInt(V, 16, "unsigned"). + const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts) + + // 2. Return the IDL unsigned short value that represents + // the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#idl-ArrayBuffer +webidl.converters.ArrayBuffer = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have an + // [[ArrayBufferData]] internal slot, then throw a + // TypeError. + // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances + // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances + if ( + webidl.util.Type(V) !== 'Object' || + !types.isAnyArrayBuffer(V) + ) { + throw webidl.errors.conversionFailed({ + prefix: `${V}`, + argument: `${V}`, + types: ['ArrayBuffer'] + }) } - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - if (typeof cb !== 'function') cb = nop; - if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V) is true, then throw a + // TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) } - return ret; -}; -Writable.prototype.cork = function () { - this._writableState.corked++; -}; -Writable.prototype.uncork = function () { - var state = this._writableState; - if (state.corked) { - state.corked--; - if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V) is true, then throw a + // TypeError. + // Note: resizable ArrayBuffers are currently a proposal. + + // 4. Return the IDL ArrayBuffer value that is a + // reference to the same object as V. + return V +} + +webidl.converters.TypedArray = function (V, T, opts = {}) { + // 1. Let T be the IDL type V is being converted to. + + // 2. If Type(V) is not Object, or V does not have a + // [[TypedArrayName]] internal slot with a value + // equal to T’s name, then throw a TypeError. + if ( + webidl.util.Type(V) !== 'Object' || + !types.isTypedArray(V) || + V.constructor.name !== T.name + ) { + throw webidl.errors.conversionFailed({ + prefix: `${T.name}`, + argument: `${V}`, + types: [T.name] + }) } -}; -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; -Object.defineProperty(Writable.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); + + // 3. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) } -}); -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); + + // 4. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable array buffers are currently a proposal + + // 5. Return the IDL value of type T that is a reference + // to the same object as V. + return V +} + +webidl.converters.DataView = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have a + // [[DataView]] internal slot, then throw a TypeError. + if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) { + throw webidl.errors.exception({ + header: 'DataView', + message: 'Object is not a DataView.' + }) } - return chunk; + + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true, + // then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable ArrayBuffers are currently a proposal + + // 4. Return the IDL DataView value that is a reference + // to the same object as V. + return V } -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; + +// https://webidl.spec.whatwg.org/#BufferSource +webidl.converters.BufferSource = function (V, opts = {}) { + if (types.isAnyArrayBuffer(V)) { + return webidl.converters.ArrayBuffer(V, opts) } -}); -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; - } + if (types.isTypedArray(V)) { + return webidl.converters.TypedArray(V, V.constructor) } - var len = state.objectMode ? 1 : chunk.length; - state.length += len; - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk, - encoding, - isBuf, - callback: cb, - next: null - }; - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); + + if (types.isDataView(V)) { + return webidl.converters.DataView(V, opts) } - return ret; + + throw new TypeError(`Could not convert ${V} to a BufferSource.`) } -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.ByteString +) + +webidl.converters['sequence>'] = webidl.sequenceConverter( + webidl.converters['sequence'] +) + +webidl.converters['record'] = webidl.recordConverter( + webidl.converters.ByteString, + webidl.converters.ByteString +) + +module.exports = { + webidl } -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - process.nextTick(cb, er); - // this can emit finish, and it will always happen - // after error - process.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); - // this can emit finish, but finish must - // always follow error - finishMaybe(stream, state); + + +/***/ }), + +/***/ 4854: +/***/ ((module) => { + +"use strict"; + + +/** + * @see https://encoding.spec.whatwg.org/#concept-encoding-get + * @param {string|undefined} label + */ +function getEncoding (label) { + if (!label) { + return 'failure' + } + + // 1. Remove any leading and trailing ASCII whitespace from label. + // 2. If label is an ASCII case-insensitive match for any of the + // labels listed in the table below, then return the + // corresponding encoding; otherwise return failure. + switch (label.trim().toLowerCase()) { + case 'unicode-1-1-utf-8': + case 'unicode11utf8': + case 'unicode20utf8': + case 'utf-8': + case 'utf8': + case 'x-unicode20utf8': + return 'UTF-8' + case '866': + case 'cp866': + case 'csibm866': + case 'ibm866': + return 'IBM866' + case 'csisolatin2': + case 'iso-8859-2': + case 'iso-ir-101': + case 'iso8859-2': + case 'iso88592': + case 'iso_8859-2': + case 'iso_8859-2:1987': + case 'l2': + case 'latin2': + return 'ISO-8859-2' + case 'csisolatin3': + case 'iso-8859-3': + case 'iso-ir-109': + case 'iso8859-3': + case 'iso88593': + case 'iso_8859-3': + case 'iso_8859-3:1988': + case 'l3': + case 'latin3': + return 'ISO-8859-3' + case 'csisolatin4': + case 'iso-8859-4': + case 'iso-ir-110': + case 'iso8859-4': + case 'iso88594': + case 'iso_8859-4': + case 'iso_8859-4:1988': + case 'l4': + case 'latin4': + return 'ISO-8859-4' + case 'csisolatincyrillic': + case 'cyrillic': + case 'iso-8859-5': + case 'iso-ir-144': + case 'iso8859-5': + case 'iso88595': + case 'iso_8859-5': + case 'iso_8859-5:1988': + return 'ISO-8859-5' + case 'arabic': + case 'asmo-708': + case 'csiso88596e': + case 'csiso88596i': + case 'csisolatinarabic': + case 'ecma-114': + case 'iso-8859-6': + case 'iso-8859-6-e': + case 'iso-8859-6-i': + case 'iso-ir-127': + case 'iso8859-6': + case 'iso88596': + case 'iso_8859-6': + case 'iso_8859-6:1987': + return 'ISO-8859-6' + case 'csisolatingreek': + case 'ecma-118': + case 'elot_928': + case 'greek': + case 'greek8': + case 'iso-8859-7': + case 'iso-ir-126': + case 'iso8859-7': + case 'iso88597': + case 'iso_8859-7': + case 'iso_8859-7:1987': + case 'sun_eu_greek': + return 'ISO-8859-7' + case 'csiso88598e': + case 'csisolatinhebrew': + case 'hebrew': + case 'iso-8859-8': + case 'iso-8859-8-e': + case 'iso-ir-138': + case 'iso8859-8': + case 'iso88598': + case 'iso_8859-8': + case 'iso_8859-8:1988': + case 'visual': + return 'ISO-8859-8' + case 'csiso88598i': + case 'iso-8859-8-i': + case 'logical': + return 'ISO-8859-8-I' + case 'csisolatin6': + case 'iso-8859-10': + case 'iso-ir-157': + case 'iso8859-10': + case 'iso885910': + case 'l6': + case 'latin6': + return 'ISO-8859-10' + case 'iso-8859-13': + case 'iso8859-13': + case 'iso885913': + return 'ISO-8859-13' + case 'iso-8859-14': + case 'iso8859-14': + case 'iso885914': + return 'ISO-8859-14' + case 'csisolatin9': + case 'iso-8859-15': + case 'iso8859-15': + case 'iso885915': + case 'iso_8859-15': + case 'l9': + return 'ISO-8859-15' + case 'iso-8859-16': + return 'ISO-8859-16' + case 'cskoi8r': + case 'koi': + case 'koi8': + case 'koi8-r': + case 'koi8_r': + return 'KOI8-R' + case 'koi8-ru': + case 'koi8-u': + return 'KOI8-U' + case 'csmacintosh': + case 'mac': + case 'macintosh': + case 'x-mac-roman': + return 'macintosh' + case 'iso-8859-11': + case 'iso8859-11': + case 'iso885911': + case 'tis-620': + case 'windows-874': + return 'windows-874' + case 'cp1250': + case 'windows-1250': + case 'x-cp1250': + return 'windows-1250' + case 'cp1251': + case 'windows-1251': + case 'x-cp1251': + return 'windows-1251' + case 'ansi_x3.4-1968': + case 'ascii': + case 'cp1252': + case 'cp819': + case 'csisolatin1': + case 'ibm819': + case 'iso-8859-1': + case 'iso-ir-100': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'iso_8859-1:1987': + case 'l1': + case 'latin1': + case 'us-ascii': + case 'windows-1252': + case 'x-cp1252': + return 'windows-1252' + case 'cp1253': + case 'windows-1253': + case 'x-cp1253': + return 'windows-1253' + case 'cp1254': + case 'csisolatin5': + case 'iso-8859-9': + case 'iso-ir-148': + case 'iso8859-9': + case 'iso88599': + case 'iso_8859-9': + case 'iso_8859-9:1989': + case 'l5': + case 'latin5': + case 'windows-1254': + case 'x-cp1254': + return 'windows-1254' + case 'cp1255': + case 'windows-1255': + case 'x-cp1255': + return 'windows-1255' + case 'cp1256': + case 'windows-1256': + case 'x-cp1256': + return 'windows-1256' + case 'cp1257': + case 'windows-1257': + case 'x-cp1257': + return 'windows-1257' + case 'cp1258': + case 'windows-1258': + case 'x-cp1258': + return 'windows-1258' + case 'x-mac-cyrillic': + case 'x-mac-ukrainian': + return 'x-mac-cyrillic' + case 'chinese': + case 'csgb2312': + case 'csiso58gb231280': + case 'gb2312': + case 'gb_2312': + case 'gb_2312-80': + case 'gbk': + case 'iso-ir-58': + case 'x-gbk': + return 'GBK' + case 'gb18030': + return 'gb18030' + case 'big5': + case 'big5-hkscs': + case 'cn-big5': + case 'csbig5': + case 'x-x-big5': + return 'Big5' + case 'cseucpkdfmtjapanese': + case 'euc-jp': + case 'x-euc-jp': + return 'EUC-JP' + case 'csiso2022jp': + case 'iso-2022-jp': + return 'ISO-2022-JP' + case 'csshiftjis': + case 'ms932': + case 'ms_kanji': + case 'shift-jis': + case 'shift_jis': + case 'sjis': + case 'windows-31j': + case 'x-sjis': + return 'Shift_JIS' + case 'cseuckr': + case 'csksc56011987': + case 'euc-kr': + case 'iso-ir-149': + case 'korean': + case 'ks_c_5601-1987': + case 'ks_c_5601-1989': + case 'ksc5601': + case 'ksc_5601': + case 'windows-949': + return 'EUC-KR' + case 'csiso2022kr': + case 'hz-gb-2312': + case 'iso-2022-cn': + case 'iso-2022-cn-ext': + case 'iso-2022-kr': + case 'replacement': + return 'replacement' + case 'unicodefffe': + case 'utf-16be': + return 'UTF-16BE' + case 'csunicode': + case 'iso-10646-ucs-2': + case 'ucs-2': + case 'unicode': + case 'unicodefeff': + case 'utf-16': + case 'utf-16le': + return 'UTF-16LE' + case 'x-user-defined': + return 'x-user-defined' + default: return 'failure' } } -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; + +module.exports = { + getEncoding } -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); - onwriteStateUpdate(state); - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state) || stream.destroyed; - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - if (sync) { - process.nextTick(afterWrite, stream, state, finished, cb); - } else { - afterWrite(stream, state, finished, cb); + + +/***/ }), + +/***/ 1446: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + staticPropertyDescriptors, + readOperation, + fireAProgressEvent +} = __nccwpck_require__(7530) +const { + kState, + kError, + kResult, + kEvents, + kAborted +} = __nccwpck_require__(9054) +const { webidl } = __nccwpck_require__(1744) +const { kEnumerableProperty } = __nccwpck_require__(3983) + +class FileReader extends EventTarget { + constructor () { + super() + + this[kState] = 'empty' + this[kResult] = null + this[kError] = null + this[kEvents] = { + loadend: null, + error: null, + abort: null, + load: null, + progress: null, + loadstart: null } } -} -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer + * @param {import('buffer').Blob} blob + */ + readAsArrayBuffer (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsArrayBuffer(blob) method, when invoked, + // must initiate a read operation for blob with ArrayBuffer. + readOperation(this, blob, 'ArrayBuffer') } -} -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - var count = 0; - var allBuffers = true; - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; - } - buffer.allBuffers = allBuffers; - doWrite(stream, state, true, state.length, buffer, '', holder.finish); + /** + * @see https://w3c.github.io/FileAPI/#readAsBinaryString + * @param {import('buffer').Blob} blob + */ + readAsBinaryString (blob) { + webidl.brandCheck(this, FileReader) - // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - state.bufferedRequestCount = 0; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; - } - } - if (entry === null) state.lastBufferedRequest = null; + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsBinaryString(blob) method, when invoked, + // must initiate a read operation for blob with BinaryString. + readOperation(this, blob, 'BinaryString') } - state.bufferedRequest = entry; - state.bufferProcessing = false; -} -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); -}; -Writable.prototype._writev = null; -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; + + /** + * @see https://w3c.github.io/FileAPI/#readAsDataText + * @param {import('buffer').Blob} blob + * @param {string?} encoding + */ + readAsText (blob, encoding = undefined) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + if (encoding !== undefined) { + encoding = webidl.converters.DOMString(encoding) + } + + // The readAsText(blob, encoding) method, when invoked, + // must initiate a read operation for blob with Text and encoding. + readOperation(this, blob, 'Text', encoding) } - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL + * @param {import('buffer').Blob} blob + */ + readAsDataURL (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsDataURL(blob) method, when invoked, must + // initiate a read operation for blob with DataURL. + readOperation(this, blob, 'DataURL') } - // ignore unnecessary end() calls. - if (!state.ending) endWritable(this, state, cb); - return this; -}; -Object.defineProperty(Writable.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - return this._writableState.length; - } -}); -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} -function callFinal(stream, state) { - stream._final(err => { - state.pendingcb--; - if (err) { - errorOrDestroy(stream, err); + /** + * @see https://w3c.github.io/FileAPI/#dfn-abort + */ + abort () { + // 1. If this's state is "empty" or if this's state is + // "done" set this's result to null and terminate + // this algorithm. + if (this[kState] === 'empty' || this[kState] === 'done') { + this[kResult] = null + return } - state.prefinished = true; - stream.emit('prefinish'); - finishMaybe(stream, state); - }); -} -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function' && !state.destroyed) { - state.pendingcb++; - state.finalCalled = true; - process.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit('prefinish'); + + // 2. If this's state is "loading" set this's state to + // "done" and set this's result to null. + if (this[kState] === 'loading') { + this[kState] = 'done' + this[kResult] = null + } + + // 3. If there are any tasks from this on the file reading + // task source in an affiliated task queue, then remove + // those tasks from that task queue. + this[kAborted] = true + + // 4. Terminate the algorithm for the read method being processed. + // TODO + + // 5. Fire a progress event called abort at this. + fireAProgressEvent('abort', this) + + // 6. If this's state is not "loading", fire a progress + // event called loadend at this. + if (this[kState] !== 'loading') { + fireAProgressEvent('loadend', this) } } -} -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - prefinish(stream, state); - if (state.pendingcb === 0) { - state.finished = true; - stream.emit('finish'); - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the readable side is ready for autoDestroy as well - const rState = stream._readableState; - if (!rState || rState.autoDestroy && rState.endEmitted) { - stream.destroy(); - } - } + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate + */ + get readyState () { + webidl.brandCheck(this, FileReader) + + switch (this[kState]) { + case 'empty': return this.EMPTY + case 'loading': return this.LOADING + case 'done': return this.DONE } } - return need; -} -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-result + */ + get result () { + webidl.brandCheck(this, FileReader) + + // The result attribute’s getter, when invoked, must return + // this's result. + return this[kResult] } - state.ended = true; - stream.writable = false; -} -function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-error + */ + get error () { + webidl.brandCheck(this, FileReader) + + // The error attribute’s getter, when invoked, must return + // this's error. + return this[kError] } - // reuse the free corkReq. - state.corkedRequestsFree.next = corkReq; -} -Object.defineProperty(Writable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get() { - if (this._writableState === undefined) { - return false; - } - return this._writableState.destroyed; - }, - set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._writableState) { - return; + get onloadend () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].loadend + } + + set onloadend (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].loadend) { + this.removeEventListener('loadend', this[kEvents].loadend) } - // backward compatibility, the user is explicitly - // managing destroyed - this._writableState.destroyed = value; + if (typeof fn === 'function') { + this[kEvents].loadend = fn + this.addEventListener('loadend', fn) + } else { + this[kEvents].loadend = null + } } -}); -Writable.prototype.destroy = destroyImpl.destroy; -Writable.prototype._undestroy = destroyImpl.undestroy; -Writable.prototype._destroy = function (err, cb) { - cb(err); -}; -/***/ }), + get onerror () { + webidl.brandCheck(this, FileReader) -/***/ 3306: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return this[kEvents].error + } -"use strict"; + set onerror (fn) { + webidl.brandCheck(this, FileReader) + if (this[kEvents].error) { + this.removeEventListener('error', this[kEvents].error) + } -const finished = __nccwpck_require__(6080); -const kLastResolve = Symbol('lastResolve'); -const kLastReject = Symbol('lastReject'); -const kError = Symbol('error'); -const kEnded = Symbol('ended'); -const kLastPromise = Symbol('lastPromise'); -const kHandlePromise = Symbol('handlePromise'); -const kStream = Symbol('stream'); -function createIterResult(value, done) { - return { - value, - done - }; -} -function readAndResolve(iter) { - const resolve = iter[kLastResolve]; - if (resolve !== null) { - const data = iter[kStream].read(); - // we defer if data is null - // we can be expecting either 'end' or - // 'error' - if (data !== null) { - iter[kLastPromise] = null; - iter[kLastResolve] = null; - iter[kLastReject] = null; - resolve(createIterResult(data, false)); + if (typeof fn === 'function') { + this[kEvents].error = fn + this.addEventListener('error', fn) + } else { + this[kEvents].error = null } } -} -function onReadable(iter) { - // we wait for the next tick, because it might - // emit an error with process.nextTick - process.nextTick(readAndResolve, iter); -} -function wrapForNext(lastPromise, iter) { - return (resolve, reject) => { - lastPromise.then(() => { - if (iter[kEnded]) { - resolve(createIterResult(undefined, true)); - return; - } - iter[kHandlePromise](resolve, reject); - }, reject); - }; -} -const AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); -const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({ - get stream() { - return this[kStream]; - }, - next() { - // if we have detected an error in the meanwhile - // reject straight away - const error = this[kError]; - if (error !== null) { - return Promise.reject(error); + + get onloadstart () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].loadstart + } + + set onloadstart (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].loadstart) { + this.removeEventListener('loadstart', this[kEvents].loadstart) } - if (this[kEnded]) { - return Promise.resolve(createIterResult(undefined, true)); + + if (typeof fn === 'function') { + this[kEvents].loadstart = fn + this.addEventListener('loadstart', fn) + } else { + this[kEvents].loadstart = null } - if (this[kStream].destroyed) { - // We need to defer via nextTick because if .destroy(err) is - // called, the error will be emitted via nextTick, and - // we cannot guarantee that there is no error lingering around - // waiting to be emitted. - return new Promise((resolve, reject) => { - process.nextTick(() => { - if (this[kError]) { - reject(this[kError]); - } else { - resolve(createIterResult(undefined, true)); - } - }); - }); + } + + get onprogress () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].progress + } + + set onprogress (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].progress) { + this.removeEventListener('progress', this[kEvents].progress) } - // if we have multiple next() calls - // we will wait for the previous Promise to finish - // this logic is optimized to support for await loops, - // where next() is only called once at a time - const lastPromise = this[kLastPromise]; - let promise; - if (lastPromise) { - promise = new Promise(wrapForNext(lastPromise, this)); + if (typeof fn === 'function') { + this[kEvents].progress = fn + this.addEventListener('progress', fn) } else { - // fast path needed to support multiple this.push() - // without triggering the next() queue - const data = this[kStream].read(); - if (data !== null) { - return Promise.resolve(createIterResult(data, false)); - } - promise = new Promise(this[kHandlePromise]); + this[kEvents].progress = null } - this[kLastPromise] = promise; - return promise; - }, - [Symbol.asyncIterator]() { - return this; - }, - return() { - // destroy(err, cb) is a private API - // we can guarantee we have that here, because we control the - // Readable class this is attached to - return new Promise((resolve, reject) => { - this[kStream].destroy(null, err => { - if (err) { - reject(err); - return; - } - resolve(createIterResult(undefined, true)); - }); - }); } -}, AsyncIteratorPrototype); -const createReadableStreamAsyncIterator = stream => { - const iterator = Object.create(ReadableStreamAsyncIteratorPrototype, { - [kStream]: { - value: stream, - writable: true - }, - [kLastResolve]: { - value: null, - writable: true - }, - [kLastReject]: { - value: null, - writable: true - }, - [kError]: { - value: null, - writable: true - }, - [kEnded]: { - value: stream._readableState.endEmitted, - writable: true - }, - // the function passed to new Promise - // is cached so we avoid allocating a new - // closure at every run - [kHandlePromise]: { - value: (resolve, reject) => { - const data = iterator[kStream].read(); - if (data) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(data, false)); - } else { - iterator[kLastResolve] = resolve; - iterator[kLastReject] = reject; - } - }, - writable: true + + get onload () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].load + } + + set onload (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].load) { + this.removeEventListener('load', this[kEvents].load) } - }); - iterator[kLastPromise] = null; - finished(stream, err => { - if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - const reject = iterator[kLastReject]; - // reject if we are waiting for data in the Promise - // returned by next() and store the error - if (reject !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - reject(err); - } - iterator[kError] = err; - return; + + if (typeof fn === 'function') { + this[kEvents].load = fn + this.addEventListener('load', fn) + } else { + this[kEvents].load = null } - const resolve = iterator[kLastResolve]; - if (resolve !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(undefined, true)); + } + + get onabort () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].abort + } + + set onabort (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].abort) { + this.removeEventListener('abort', this[kEvents].abort) } - iterator[kEnded] = true; - }); - stream.on('readable', onReadable.bind(null, iterator)); - return iterator; -}; -module.exports = createReadableStreamAsyncIterator; + + if (typeof fn === 'function') { + this[kEvents].abort = fn + this.addEventListener('abort', fn) + } else { + this[kEvents].abort = null + } + } +} + +// https://w3c.github.io/FileAPI/#dom-filereader-empty +FileReader.EMPTY = FileReader.prototype.EMPTY = 0 +// https://w3c.github.io/FileAPI/#dom-filereader-loading +FileReader.LOADING = FileReader.prototype.LOADING = 1 +// https://w3c.github.io/FileAPI/#dom-filereader-done +FileReader.DONE = FileReader.prototype.DONE = 2 + +Object.defineProperties(FileReader.prototype, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors, + readAsArrayBuffer: kEnumerableProperty, + readAsBinaryString: kEnumerableProperty, + readAsText: kEnumerableProperty, + readAsDataURL: kEnumerableProperty, + abort: kEnumerableProperty, + readyState: kEnumerableProperty, + result: kEnumerableProperty, + error: kEnumerableProperty, + onloadstart: kEnumerableProperty, + onprogress: kEnumerableProperty, + onload: kEnumerableProperty, + onabort: kEnumerableProperty, + onerror: kEnumerableProperty, + onloadend: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'FileReader', + writable: false, + enumerable: false, + configurable: true + } +}) + +Object.defineProperties(FileReader, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors +}) + +module.exports = { + FileReader +} + /***/ }), -/***/ 6522: +/***/ 5504: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } -function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } -function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } -function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } -const _require = __nccwpck_require__(4300), - Buffer = _require.Buffer; -const _require2 = __nccwpck_require__(3837), - inspect = _require2.inspect; -const custom = inspect && inspect.custom || 'inspect'; -function copyBuffer(src, target, offset) { - Buffer.prototype.copy.call(src, target, offset); -} -module.exports = class BufferList { - constructor() { - this.head = null; - this.tail = null; - this.length = 0; - } - push(v) { - const entry = { - data: v, - next: null - }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - } - unshift(v) { - const entry = { - data: v, - next: this.head - }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - } - shift() { - if (this.length === 0) return; - const ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; +const { webidl } = __nccwpck_require__(1744) + +const kState = Symbol('ProgressEvent state') + +/** + * @see https://xhr.spec.whatwg.org/#progressevent + */ +class ProgressEvent extends Event { + constructor (type, eventInitDict = {}) { + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {}) + + super(type, eventInitDict) + + this[kState] = { + lengthComputable: eventInitDict.lengthComputable, + loaded: eventInitDict.loaded, + total: eventInitDict.total + } } - clear() { - this.head = this.tail = null; - this.length = 0; + + get lengthComputable () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].lengthComputable } - join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - while (p = p.next) ret += s + p.data; - return ret; + + get loaded () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].loaded } - concat(n) { - if (this.length === 0) return Buffer.alloc(0); - const ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - return ret; + + get total () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].total } +} - // Consumes a specified amount of bytes or characters from the buffered data. - consume(n, hasStrings) { - var ret; - if (n < this.head.data.length) { - // `slice` is the same for buffers and strings. - ret = this.head.data.slice(0, n); - this.head.data = this.head.data.slice(n); - } else if (n === this.head.data.length) { - // First chunk is a perfect match. - ret = this.shift(); - } else { - // Result spans more than one buffer. - ret = hasStrings ? this._getString(n) : this._getBuffer(n); - } - return ret; +webidl.converters.ProgressEventInit = webidl.dictionaryConverter([ + { + key: 'lengthComputable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'loaded', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'total', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false } - first() { - return this.head.data; +]) + +module.exports = { + ProgressEvent +} + + +/***/ }), + +/***/ 9054: +/***/ ((module) => { + +"use strict"; + + +module.exports = { + kState: Symbol('FileReader state'), + kResult: Symbol('FileReader result'), + kError: Symbol('FileReader error'), + kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'), + kEvents: Symbol('FileReader events'), + kAborted: Symbol('FileReader aborted') +} + + +/***/ }), + +/***/ 7530: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + kState, + kError, + kResult, + kAborted, + kLastProgressEventFired +} = __nccwpck_require__(9054) +const { ProgressEvent } = __nccwpck_require__(5504) +const { getEncoding } = __nccwpck_require__(4854) +const { DOMException } = __nccwpck_require__(1037) +const { serializeAMimeType, parseMIMEType } = __nccwpck_require__(685) +const { types } = __nccwpck_require__(3837) +const { StringDecoder } = __nccwpck_require__(1576) +const { btoa } = __nccwpck_require__(4300) + +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} + +/** + * @see https://w3c.github.io/FileAPI/#readOperation + * @param {import('./filereader').FileReader} fr + * @param {import('buffer').Blob} blob + * @param {string} type + * @param {string?} encodingName + */ +function readOperation (fr, blob, type, encodingName) { + // 1. If fr’s state is "loading", throw an InvalidStateError + // DOMException. + if (fr[kState] === 'loading') { + throw new DOMException('Invalid state', 'InvalidStateError') } - // Consumes a specified amount of characters from the buffered data. - _getString(n) { - var p = this.head; - var c = 1; - var ret = p.data; - n -= ret.length; - while (p = p.next) { - const str = p.data; - const nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = str.slice(nb); + // 2. Set fr’s state to "loading". + fr[kState] = 'loading' + + // 3. Set fr’s result to null. + fr[kResult] = null + + // 4. Set fr’s error to null. + fr[kError] = null + + // 5. Let stream be the result of calling get stream on blob. + /** @type {import('stream/web').ReadableStream} */ + const stream = blob.stream() + + // 6. Let reader be the result of getting a reader from stream. + const reader = stream.getReader() + + // 7. Let bytes be an empty byte sequence. + /** @type {Uint8Array[]} */ + const bytes = [] + + // 8. Let chunkPromise be the result of reading a chunk from + // stream with reader. + let chunkPromise = reader.read() + + // 9. Let isFirstChunk be true. + let isFirstChunk = true + + // 10. In parallel, while true: + // Note: "In parallel" just means non-blocking + // Note 2: readOperation itself cannot be async as double + // reading the body would then reject the promise, instead + // of throwing an error. + ;(async () => { + while (!fr[kAborted]) { + // 1. Wait for chunkPromise to be fulfilled or rejected. + try { + const { done, value } = await chunkPromise + + // 2. If chunkPromise is fulfilled, and isFirstChunk is + // true, queue a task to fire a progress event called + // loadstart at fr. + if (isFirstChunk && !fr[kAborted]) { + queueMicrotask(() => { + fireAProgressEvent('loadstart', fr) + }) + } + + // 3. Set isFirstChunk to false. + isFirstChunk = false + + // 4. If chunkPromise is fulfilled with an object whose + // done property is false and whose value property is + // a Uint8Array object, run these steps: + if (!done && types.isUint8Array(value)) { + // 1. Let bs be the byte sequence represented by the + // Uint8Array object. + + // 2. Append bs to bytes. + bytes.push(value) + + // 3. If roughly 50ms have passed since these steps + // were last invoked, queue a task to fire a + // progress event called progress at fr. + if ( + ( + fr[kLastProgressEventFired] === undefined || + Date.now() - fr[kLastProgressEventFired] >= 50 + ) && + !fr[kAborted] + ) { + fr[kLastProgressEventFired] = Date.now() + queueMicrotask(() => { + fireAProgressEvent('progress', fr) + }) + } + + // 4. Set chunkPromise to the result of reading a + // chunk from stream with reader. + chunkPromise = reader.read() + } else if (done) { + // 5. Otherwise, if chunkPromise is fulfilled with an + // object whose done property is true, queue a task + // to run the following steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' + + // 2. Let result be the result of package data given + // bytes, type, blob’s type, and encodingName. + try { + const result = packageData(bytes, type, blob.type, encodingName) + + // 4. Else: + + if (fr[kAborted]) { + return + } + + // 1. Set fr’s result to result. + fr[kResult] = result + + // 2. Fire a progress event called load at the fr. + fireAProgressEvent('load', fr) + } catch (error) { + // 3. If package data threw an exception error: + + // 1. Set fr’s error to error. + fr[kError] = error + + // 2. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) + } + + // 5. If fr’s state is not "loading", fire a progress + // event called loadend at the fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) + + break } - break; + } catch (error) { + if (fr[kAborted]) { + return + } + + // 6. Otherwise, if chunkPromise is rejected with an + // error error, queue a task to run the following + // steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' + + // 2. Set fr’s error to error. + fr[kError] = error + + // 3. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) + + // 4. If fr’s state is not "loading", fire a progress + // event called loadend at fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) + + break } - ++c; } - this.length -= c; - return ret; - } + })() +} - // Consumes a specified amount of bytes from the buffered data. - _getBuffer(n) { - const ret = Buffer.allocUnsafe(n); - var p = this.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - while (p = p.next) { - const buf = p.data; - const nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = buf.slice(nb); - } - break; +/** + * @see https://w3c.github.io/FileAPI/#fire-a-progress-event + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e The name of the event + * @param {import('./filereader').FileReader} reader + */ +function fireAProgressEvent (e, reader) { + // The progress event e does not bubble. e.bubbles must be false + // The progress event e is NOT cancelable. e.cancelable must be false + const event = new ProgressEvent(e, { + bubbles: false, + cancelable: false + }) + + reader.dispatchEvent(event) +} + +/** + * @see https://w3c.github.io/FileAPI/#blob-package-data + * @param {Uint8Array[]} bytes + * @param {string} type + * @param {string?} mimeType + * @param {string?} encodingName + */ +function packageData (bytes, type, mimeType, encodingName) { + // 1. A Blob has an associated package data algorithm, given + // bytes, a type, a optional mimeType, and a optional + // encodingName, which switches on type and runs the + // associated steps: + + switch (type) { + case 'DataURL': { + // 1. Return bytes as a DataURL [RFC2397] subject to + // the considerations below: + // * Use mimeType as part of the Data URL if it is + // available in keeping with the Data URL + // specification [RFC2397]. + // * If mimeType is not available return a Data URL + // without a media-type. [RFC2397]. + + // https://datatracker.ietf.org/doc/html/rfc2397#section-3 + // dataurl := "data:" [ mediatype ] [ ";base64" ] "," data + // mediatype := [ type "/" subtype ] *( ";" parameter ) + // data := *urlchar + // parameter := attribute "=" value + let dataURL = 'data:' + + const parsed = parseMIMEType(mimeType || 'application/octet-stream') + + if (parsed !== 'failure') { + dataURL += serializeAMimeType(parsed) } - ++c; - } - this.length -= c; - return ret; - } - // Make sure the linked list only shows the minimal necessary information. - [custom](_, options) { - return inspect(this, _objectSpread(_objectSpread({}, options), {}, { - // Only inspect one level. - depth: 0, - // It should not recurse. - customInspect: false - })); - } -}; + dataURL += ';base64,' -/***/ }), + const decoder = new StringDecoder('latin1') -/***/ 7049: -/***/ ((module) => { + for (const chunk of bytes) { + dataURL += btoa(decoder.write(chunk)) + } -"use strict"; + dataURL += btoa(decoder.end()) + return dataURL + } + case 'Text': { + // 1. Let encoding be failure + let encoding = 'failure' -// undocumented cb() API, needed for core, not for public API -function destroy(err, cb) { - const readableDestroyed = this._readableState && this._readableState.destroyed; - const writableDestroyed = this._writableState && this._writableState.destroyed; - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err) { - if (!this._writableState) { - process.nextTick(emitErrorNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - process.nextTick(emitErrorNT, this, err); + // 2. If the encodingName is present, set encoding to the + // result of getting an encoding from encodingName. + if (encodingName) { + encoding = getEncoding(encodingName) + } + + // 3. If encoding is failure, and mimeType is present: + if (encoding === 'failure' && mimeType) { + // 1. Let type be the result of parse a MIME type + // given mimeType. + const type = parseMIMEType(mimeType) + + // 2. If type is not failure, set encoding to the result + // of getting an encoding from type’s parameters["charset"]. + if (type !== 'failure') { + encoding = getEncoding(type.parameters.get('charset')) + } + } + + // 4. If encoding is failure, then set encoding to UTF-8. + if (encoding === 'failure') { + encoding = 'UTF-8' } + + // 5. Decode bytes using fallback encoding encoding, and + // return the result. + return decode(bytes, encoding) } - return this; - } + case 'ArrayBuffer': { + // Return a new ArrayBuffer whose contents are bytes. + const sequence = combineByteSequences(bytes) - // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks + return sequence.buffer + } + case 'BinaryString': { + // Return bytes as a binary string, in which every byte + // is represented by a code unit of equal value [0..255]. + let binaryString = '' - if (this._readableState) { - this._readableState.destroyed = true; - } + const decoder = new StringDecoder('latin1') - // if this is a duplex stream mark the writable part as destroyed as well - if (this._writableState) { - this._writableState.destroyed = true; - } - this._destroy(err || null, err => { - if (!cb && err) { - if (!this._writableState) { - process.nextTick(emitErrorAndCloseNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - process.nextTick(emitErrorAndCloseNT, this, err); - } else { - process.nextTick(emitCloseNT, this); + for (const chunk of bytes) { + binaryString += decoder.write(chunk) } - } else if (cb) { - process.nextTick(emitCloseNT, this); - cb(err); - } else { - process.nextTick(emitCloseNT, this); + + binaryString += decoder.end() + + return binaryString } - }); - return this; -} -function emitErrorAndCloseNT(self, err) { - emitErrorNT(self, err); - emitCloseNT(self); -} -function emitCloseNT(self) { - if (self._writableState && !self._writableState.emitClose) return; - if (self._readableState && !self._readableState.emitClose) return; - self.emit('close'); -} -function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; } - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finalCalled = false; - this._writableState.prefinished = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; +} + +/** + * @see https://encoding.spec.whatwg.org/#decode + * @param {Uint8Array[]} ioQueue + * @param {string} encoding + */ +function decode (ioQueue, encoding) { + const bytes = combineByteSequences(ioQueue) + + // 1. Let BOMEncoding be the result of BOM sniffing ioQueue. + const BOMEncoding = BOMSniffing(bytes) + + let slice = 0 + + // 2. If BOMEncoding is non-null: + if (BOMEncoding !== null) { + // 1. Set encoding to BOMEncoding. + encoding = BOMEncoding + + // 2. Read three bytes from ioQueue, if BOMEncoding is + // UTF-8; otherwise read two bytes. + // (Do nothing with those bytes.) + slice = BOMEncoding === 'UTF-8' ? 3 : 2 } + + // 3. Process a queue with an instance of encoding’s + // decoder, ioQueue, output, and "replacement". + + // 4. Return output. + + const sliced = bytes.slice(slice) + return new TextDecoder(encoding).decode(sliced) } -function emitErrorNT(self, err) { - self.emit('error', err); + +/** + * @see https://encoding.spec.whatwg.org/#bom-sniff + * @param {Uint8Array} ioQueue + */ +function BOMSniffing (ioQueue) { + // 1. Let BOM be the result of peeking 3 bytes from ioQueue, + // converted to a byte sequence. + const [a, b, c] = ioQueue + + // 2. For each of the rows in the table below, starting with + // the first one and going down, if BOM starts with the + // bytes given in the first column, then return the + // encoding given in the cell in the second column of that + // row. Otherwise, return null. + if (a === 0xEF && b === 0xBB && c === 0xBF) { + return 'UTF-8' + } else if (a === 0xFE && b === 0xFF) { + return 'UTF-16BE' + } else if (a === 0xFF && b === 0xFE) { + return 'UTF-16LE' + } + + return null } -function errorOrDestroy(stream, err) { - // We have tests that rely on errors being emitted - // in the same tick, so changing this is semver major. - // For now when you opt-in to autoDestroy we allow - // the error to be emitted nextTick. In a future - // semver major update we should change the default to this. - const rState = stream._readableState; - const wState = stream._writableState; - if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +/** + * @param {Uint8Array[]} sequences + */ +function combineByteSequences (sequences) { + const size = sequences.reduce((a, b) => { + return a + b.byteLength + }, 0) + + let offset = 0 + + return sequences.reduce((a, b) => { + a.set(b, offset) + offset += b.byteLength + return a + }, new Uint8Array(size)) } + module.exports = { - destroy, - undestroy, - errorOrDestroy -}; + staticPropertyDescriptors, + readOperation, + fireAProgressEvent +} + /***/ }), -/***/ 6080: +/***/ 1892: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Ported from https://github.com/mafintosh/end-of-stream with -// permission from the author, Mathias Buus (@mafintosh). +// We include a version number for the Dispatcher API. In case of breaking changes, +// this version number must be increased to avoid conflicts. +const globalDispatcher = Symbol.for('undici.globalDispatcher.1') +const { InvalidArgumentError } = __nccwpck_require__(8045) +const Agent = __nccwpck_require__(7890) -const ERR_STREAM_PREMATURE_CLOSE = (__nccwpck_require__(7214)/* .codes.ERR_STREAM_PREMATURE_CLOSE */ .q.ERR_STREAM_PREMATURE_CLOSE); -function once(callback) { - let called = false; - return function () { - if (called) return; - called = true; - for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { - args[_key] = arguments[_key]; - } - callback.apply(this, args); - }; -} -function noop() {} -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; +if (getGlobalDispatcher() === undefined) { + setGlobalDispatcher(new Agent()) } -function eos(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - callback = once(callback || noop); - let readable = opts.readable || opts.readable !== false && stream.readable; - let writable = opts.writable || opts.writable !== false && stream.writable; - const onlegacyfinish = () => { - if (!stream.writable) onfinish(); - }; - var writableEnded = stream._writableState && stream._writableState.finished; - const onfinish = () => { - writable = false; - writableEnded = true; - if (!readable) callback.call(stream); - }; - var readableEnded = stream._readableState && stream._readableState.endEmitted; - const onend = () => { - readable = false; - readableEnded = true; - if (!writable) callback.call(stream); - }; - const onerror = err => { - callback.call(stream, err); - }; - const onclose = () => { - let err; - if (readable && !readableEnded) { - if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - if (writable && !writableEnded) { - if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - }; - const onrequest = () => { - stream.req.on('finish', onfinish); - }; - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest();else stream.on('request', onrequest); - } else if (writable && !stream._writableState) { - // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); + +function setGlobalDispatcher (agent) { + if (!agent || typeof agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument agent must implement Agent') } - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - return function () { - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; + Object.defineProperty(globalThis, globalDispatcher, { + value: agent, + writable: true, + enumerable: false, + configurable: false + }) } -module.exports = eos; + +function getGlobalDispatcher () { + return globalThis[globalDispatcher] +} + +module.exports = { + setGlobalDispatcher, + getGlobalDispatcher +} + /***/ }), -/***/ 9082: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 6930: +/***/ ((module) => { "use strict"; -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } -function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } -function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } -function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } -const ERR_INVALID_ARG_TYPE = (__nccwpck_require__(7214)/* .codes.ERR_INVALID_ARG_TYPE */ .q.ERR_INVALID_ARG_TYPE); -function from(Readable, iterable, opts) { - let iterator; - if (iterable && typeof iterable.next === 'function') { - iterator = iterable; - } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); - const readable = new Readable(_objectSpread({ - objectMode: true - }, opts)); - // Reading boolean to protect against _read - // being called before last iteration completion. - let reading = false; - readable._read = function () { - if (!reading) { - reading = true; - next(); - } - }; - function next() { - return _next2.apply(this, arguments); +module.exports = class DecoratorHandler { + constructor (handler) { + this.handler = handler } - function _next2() { - _next2 = _asyncToGenerator(function* () { - try { - const _yield$iterator$next = yield iterator.next(), - value = _yield$iterator$next.value, - done = _yield$iterator$next.done; - if (done) { - readable.push(null); - } else if (readable.push(yield value)) { - next(); - } else { - reading = false; - } - } catch (err) { - readable.destroy(err); - } - }); - return _next2.apply(this, arguments); + + onConnect (...args) { + return this.handler.onConnect(...args) + } + + onError (...args) { + return this.handler.onError(...args) + } + + onUpgrade (...args) { + return this.handler.onUpgrade(...args) + } + + onHeaders (...args) { + return this.handler.onHeaders(...args) + } + + onData (...args) { + return this.handler.onData(...args) + } + + onComplete (...args) { + return this.handler.onComplete(...args) + } + + onBodySent (...args) { + return this.handler.onBodySent(...args) } - return readable; } -module.exports = from; + /***/ }), -/***/ 6989: +/***/ 2860: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Ported from https://github.com/mafintosh/pump with -// permission from the author, Mathias Buus (@mafintosh). +const util = __nccwpck_require__(3983) +const { kBodyUsed } = __nccwpck_require__(2785) +const assert = __nccwpck_require__(9491) +const { InvalidArgumentError } = __nccwpck_require__(8045) +const EE = __nccwpck_require__(2361) -let eos; -function once(callback) { - let called = false; - return function () { - if (called) return; - called = true; - callback(...arguments); - }; -} -const _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; -function noop(err) { - // Rethrow the error if it exists to avoid swallowing it - if (err) throw err; -} -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -} -function destroyer(stream, reading, writing, callback) { - callback = once(callback); - let closed = false; - stream.on('close', () => { - closed = true; - }); - if (eos === undefined) eos = __nccwpck_require__(6080); - eos(stream, { - readable: reading, - writable: writing - }, err => { - if (err) return callback(err); - closed = true; - callback(); - }); - let destroyed = false; - return err => { - if (closed) return; - if (destroyed) return; - destroyed = true; +const redirectableStatusCodes = [300, 301, 302, 303, 307, 308] - // request.destroy just do .end - .abort is what we want - if (isRequest(stream)) return stream.abort(); - if (typeof stream.destroy === 'function') return stream.destroy(); - callback(err || new ERR_STREAM_DESTROYED('pipe')); - }; +const kBody = Symbol('body') + +class BodyAsyncIterable { + constructor (body) { + this[kBody] = body + this[kBodyUsed] = false + } + + async * [Symbol.asyncIterator] () { + assert(!this[kBodyUsed], 'disturbed') + this[kBodyUsed] = true + yield * this[kBody] + } } -function call(fn) { - fn(); + +class RedirectHandler { + constructor (dispatch, maxRedirections, opts, handler) { + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + util.validateHandler(handler, opts.method, opts.upgrade) + + this.dispatch = dispatch + this.location = null + this.abort = null + this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy + this.maxRedirections = maxRedirections + this.handler = handler + this.history = [] + + if (util.isStream(this.opts.body)) { + // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp + // so that it can be dispatched again? + // TODO (fix): Do we need 100-expect support to provide a way to do this properly? + if (util.bodyLength(this.opts.body) === 0) { + this.opts.body + .on('data', function () { + assert(false) + }) + } + + if (typeof this.opts.body.readableDidRead !== 'boolean') { + this.opts.body[kBodyUsed] = false + EE.prototype.on.call(this.opts.body, 'data', function () { + this[kBodyUsed] = true + }) + } + } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') { + // TODO (fix): We can't access ReadableStream internal state + // to determine whether or not it has been disturbed. This is just + // a workaround. + this.opts.body = new BodyAsyncIterable(this.opts.body) + } else if ( + this.opts.body && + typeof this.opts.body !== 'string' && + !ArrayBuffer.isView(this.opts.body) && + util.isIterable(this.opts.body) + ) { + // TODO: Should we allow re-using iterable if !this.opts.idempotent + // or through some other flag? + this.opts.body = new BodyAsyncIterable(this.opts.body) + } + } + + onConnect (abort) { + this.abort = abort + this.handler.onConnect(abort, { history: this.history }) + } + + onUpgrade (statusCode, headers, socket) { + this.handler.onUpgrade(statusCode, headers, socket) + } + + onError (error) { + this.handler.onError(error) + } + + onHeaders (statusCode, headers, resume, statusText) { + this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) + ? null + : parseLocation(statusCode, headers) + + if (this.opts.origin) { + this.history.push(new URL(this.opts.path, this.opts.origin)) + } + + if (!this.location) { + return this.handler.onHeaders(statusCode, headers, resume, statusText) + } + + const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))) + const path = search ? `${pathname}${search}` : pathname + + // Remove headers referring to the original URL. + // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers. + // https://tools.ietf.org/html/rfc7231#section-6.4 + this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin) + this.opts.path = path + this.opts.origin = origin + this.opts.maxRedirections = 0 + this.opts.query = null + + // https://tools.ietf.org/html/rfc7231#section-6.4.4 + // In case of HTTP 303, always replace method to be either HEAD or GET + if (statusCode === 303 && this.opts.method !== 'HEAD') { + this.opts.method = 'GET' + this.opts.body = null + } + } + + onData (chunk) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 + + TLDR: undici always ignores 3xx response bodies. + + Redirection is used to serve the requested resource from another URL, so it is assumes that + no body is generated (and thus can be ignored). Even though generating a body is not prohibited. + + For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually + (which means it's optional and not mandated) contain just an hyperlink to the value of + the Location response header, so the body can be ignored safely. + + For status 300, which is "Multiple Choices", the spec mentions both generating a Location + response header AND a response body with the other possible location to follow. + Since the spec explicitily chooses not to specify a format for such body and leave it to + servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it. + */ + } else { + return this.handler.onData(chunk) + } + } + + onComplete (trailers) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 + + TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections + and neither are useful if present. + + See comment on onData method above for more detailed informations. + */ + + this.location = null + this.abort = null + + this.dispatch(this.opts, this) + } else { + this.handler.onComplete(trailers) + } + } + + onBodySent (chunk) { + if (this.handler.onBodySent) { + this.handler.onBodySent(chunk) + } + } } -function pipe(from, to) { - return from.pipe(to); + +function parseLocation (statusCode, headers) { + if (redirectableStatusCodes.indexOf(statusCode) === -1) { + return null + } + + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toString().toLowerCase() === 'location') { + return headers[i + 1] + } + } } -function popCallback(streams) { - if (!streams.length) return noop; - if (typeof streams[streams.length - 1] !== 'function') return noop; - return streams.pop(); + +// https://tools.ietf.org/html/rfc7231#section-6.4.4 +function shouldRemoveHeader (header, removeContent, unknownOrigin) { + return ( + (header.length === 4 && header.toString().toLowerCase() === 'host') || + (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || + (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || + (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') + ) +} + +// https://tools.ietf.org/html/rfc7231#section-6.4 +function cleanRequestHeaders (headers, removeContent, unknownOrigin) { + const ret = [] + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) { + ret.push(headers[i], headers[i + 1]) + } + } + } else if (headers && typeof headers === 'object') { + for (const key of Object.keys(headers)) { + if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) { + ret.push(key, headers[key]) + } + } + } else { + assert(headers == null, 'headers must be an object or an array') + } + return ret } -function pipeline() { - for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { - streams[_key] = arguments[_key]; + +module.exports = RedirectHandler + + +/***/ }), + +/***/ 2286: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const assert = __nccwpck_require__(9491) + +const { kRetryHandlerDefaultRetry } = __nccwpck_require__(2785) +const { RequestRetryError } = __nccwpck_require__(8045) +const { isDisturbed, parseHeaders, parseRangeHeader } = __nccwpck_require__(3983) + +function calculateRetryAfterHeader (retryAfter) { + const current = Date.now() + const diff = new Date(retryAfter).getTime() - current + + return diff +} + +class RetryHandler { + constructor (opts, handlers) { + const { retryOptions, ...dispatchOpts } = opts + const { + // Retry scoped + retry: retryFn, + maxRetries, + maxTimeout, + minTimeout, + timeoutFactor, + // Response scoped + methods, + errorCodes, + retryAfter, + statusCodes + } = retryOptions ?? {} + + this.dispatch = handlers.dispatch + this.handler = handlers.handler + this.opts = dispatchOpts + this.abort = null + this.aborted = false + this.retryOpts = { + retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry], + retryAfter: retryAfter ?? true, + maxTimeout: maxTimeout ?? 30 * 1000, // 30s, + timeout: minTimeout ?? 500, // .5s + timeoutFactor: timeoutFactor ?? 2, + maxRetries: maxRetries ?? 5, + // What errors we should retry + methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'], + // Indicates which errors to retry + statusCodes: statusCodes ?? [500, 502, 503, 504, 429], + // List of errors to retry + errorCodes: errorCodes ?? [ + 'ECONNRESET', + 'ECONNREFUSED', + 'ENOTFOUND', + 'ENETDOWN', + 'ENETUNREACH', + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'EPIPE' + ] + } + + this.retryCount = 0 + this.start = 0 + this.end = null + this.etag = null + this.resume = null + + // Handle possible onConnect duplication + this.handler.onConnect(reason => { + this.aborted = true + if (this.abort) { + this.abort(reason) + } else { + this.reason = reason + } + }) } - const callback = popCallback(streams); - if (Array.isArray(streams[0])) streams = streams[0]; - if (streams.length < 2) { - throw new ERR_MISSING_ARGS('streams'); + + onRequestSent () { + if (this.handler.onRequestSent) { + this.handler.onRequestSent() + } + } + + onUpgrade (statusCode, headers, socket) { + if (this.handler.onUpgrade) { + this.handler.onUpgrade(statusCode, headers, socket) + } + } + + onConnect (abort) { + if (this.aborted) { + abort(this.reason) + } else { + this.abort = abort + } } - let error; - const destroys = streams.map(function (stream, i) { - const reading = i < streams.length - 1; - const writing = i > 0; - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err; - if (err) destroys.forEach(call); - if (reading) return; - destroys.forEach(call); - callback(error); - }); - }); - return streams.reduce(pipe); -} -module.exports = pipeline; -/***/ }), + onBodySent (chunk) { + if (this.handler.onBodySent) return this.handler.onBodySent(chunk) + } -/***/ 9948: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) { + const { statusCode, code, headers } = err + const { method, retryOptions } = opts + const { + maxRetries, + timeout, + maxTimeout, + timeoutFactor, + statusCodes, + errorCodes, + methods + } = retryOptions + let { counter, currentTimeout } = state -"use strict"; + currentTimeout = + currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout + // Any code that is not a Undici's originated and allowed to retry + if ( + code && + code !== 'UND_ERR_REQ_RETRY' && + code !== 'UND_ERR_SOCKET' && + !errorCodes.includes(code) + ) { + cb(err) + return + } -const ERR_INVALID_OPT_VALUE = (__nccwpck_require__(7214)/* .codes.ERR_INVALID_OPT_VALUE */ .q.ERR_INVALID_OPT_VALUE); -function highWaterMarkFrom(options, isDuplex, duplexKey) { - return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; -} -function getHighWaterMark(state, options, duplexKey, isDuplex) { - const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); - if (hwm != null) { - if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { - const name = isDuplex ? duplexKey : 'highWaterMark'; - throw new ERR_INVALID_OPT_VALUE(name, hwm); + // If a set of method are provided and the current method is not in the list + if (Array.isArray(methods) && !methods.includes(method)) { + cb(err) + return } - return Math.floor(hwm); - } - // Default value - return state.objectMode ? 16 : 16 * 1024; -} -module.exports = { - getHighWaterMark -}; + // If a set of status code are provided and the current status code is not in the list + if ( + statusCode != null && + Array.isArray(statusCodes) && + !statusCodes.includes(statusCode) + ) { + cb(err) + return + } -/***/ }), + // If we reached the max number of retries + if (counter > maxRetries) { + cb(err) + return + } -/***/ 2387: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + let retryAfterHeader = headers != null && headers['retry-after'] + if (retryAfterHeader) { + retryAfterHeader = Number(retryAfterHeader) + retryAfterHeader = isNaN(retryAfterHeader) + ? calculateRetryAfterHeader(retryAfterHeader) + : retryAfterHeader * 1e3 // Retry-After is in seconds + } -module.exports = __nccwpck_require__(2781); + const retryTimeout = + retryAfterHeader > 0 + ? Math.min(retryAfterHeader, maxTimeout) + : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout) + state.currentTimeout = retryTimeout -/***/ }), + setTimeout(() => cb(null), retryTimeout) + } -/***/ 1642: -/***/ ((module, exports, __nccwpck_require__) => { + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const headers = parseHeaders(rawHeaders) -var Stream = __nccwpck_require__(2781); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream.Readable; - Object.assign(module.exports, Stream); - module.exports.Stream = Stream; -} else { - exports = module.exports = __nccwpck_require__(1433); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = __nccwpck_require__(6993); - exports.Duplex = __nccwpck_require__(1359); - exports.Transform = __nccwpck_require__(4415); - exports.PassThrough = __nccwpck_require__(1542); - exports.finished = __nccwpck_require__(6080); - exports.pipeline = __nccwpck_require__(6989); -} + this.retryCount += 1 + if (statusCode >= 300) { + this.abort( + new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } -/***/ }), + // Checkpoint for resume from where we left it + if (this.resume != null) { + this.resume = null -/***/ 3515: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (statusCode !== 206) { + return true + } -"use strict"; + const contentRange = parseRangeHeader(headers['content-range']) + // If no content range + if (!contentRange) { + this.abort( + new RequestRetryError('Content-Range mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + // Let's start with a weak etag check + if (this.etag != null && this.etag !== headers.etag) { + this.abort( + new RequestRetryError('ETag mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } -const {PassThrough} = __nccwpck_require__(2781); -const debug = __nccwpck_require__(8237)('retry-request'); -const extend = __nccwpck_require__(8171); + const { start, size, end = size } = contentRange -const DEFAULTS = { - objectMode: false, - retries: 2, + assert(this.start === start, 'content-range mismatch') + assert(this.end == null || this.end === end, 'content-range mismatch') - /* - The maximum time to delay in seconds. If retryDelayMultiplier results in a - delay greater than maxRetryDelay, retries should delay by maxRetryDelay - seconds instead. - */ - maxRetryDelay: 64, + this.resume = resume + return true + } - /* - The multiplier by which to increase the delay time between the completion of - failed requests, and the initiation of the subsequent retrying request. - */ - retryDelayMultiplier: 2, + if (this.end == null) { + if (statusCode === 206) { + // First time we receive 206 + const range = parseRangeHeader(headers['content-range']) - /* - The length of time to keep retrying in seconds. The last sleep period will - be shortened as necessary, so that the last retry runs at deadline (and not - considerably beyond it). The total time starting from when the initial - request is sent, after which an error will be returned, regardless of the - retrying attempts made meanwhile. - */ - totalTimeout: 600, + if (range == null) { + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } - noResponseRetries: 2, - currentRetryAttempt: 0, - shouldRetryFn: function (response) { - const retryRanges = [ - // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes - // 1xx - Retry (Informational, request still processing) - // 2xx - Do not retry (Success) - // 3xx - Do not retry (Redirect) - // 4xx - Do not retry (Client errors) - // 429 - Retry ("Too Many Requests") - // 5xx - Retry (Server errors) - [100, 199], - [429, 429], - [500, 599], - ]; + const { start, size, end = size } = range - const statusCode = response.statusCode; - debug(`Response status: ${statusCode}`); + assert( + start != null && Number.isFinite(start) && this.start !== start, + 'content-range mismatch' + ) + assert(Number.isFinite(start)) + assert( + end != null && Number.isFinite(end) && this.end !== end, + 'invalid content-length' + ) - let range; - while ((range = retryRanges.shift())) { - if (statusCode >= range[0] && statusCode <= range[1]) { - // Not a successful status or redirect. - return true; + this.start = start + this.end = end } - } - }, -}; -function retryRequest(requestOpts, opts, callback) { - const streamMode = typeof arguments[arguments.length - 1] !== 'function'; + // We make our best to checkpoint the body for further range headers + if (this.end == null) { + const contentLength = headers['content-length'] + this.end = contentLength != null ? Number(contentLength) : null + } - if (typeof opts === 'function') { - callback = opts; - } + assert(Number.isFinite(this.start)) + assert( + this.end == null || Number.isFinite(this.end), + 'invalid content-length' + ) - const manualCurrentRetryAttemptWasSet = - opts && typeof opts.currentRetryAttempt === 'number'; - opts = extend({}, DEFAULTS, opts); + this.resume = resume + this.etag = headers.etag != null ? headers.etag : null - if (typeof opts.request === 'undefined') { - try { - // eslint-disable-next-line node/no-unpublished-require - opts.request = __nccwpck_require__(8418); - } catch (e) { - throw new Error('A request library must be provided to retry-request.'); + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) } - } - let currentRetryAttempt = opts.currentRetryAttempt; + const err = new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) - let numNoResponseAttempts = 0; - let streamResponseHandled = false; + this.abort(err) - let retryStream; - let requestStream; - let delayStream; + return false + } - let activeRequest; - const retryRequest = { - abort: function () { - if (activeRequest && activeRequest.abort) { - activeRequest.abort(); - } - }, - }; + onData (chunk) { + this.start += chunk.length - if (streamMode) { - retryStream = new PassThrough({objectMode: opts.objectMode}); - retryStream.abort = resetStreams; + return this.handler.onData(chunk) } - const timeOfFirstRequest = Date.now(); - if (currentRetryAttempt > 0) { - retryAfterDelay(currentRetryAttempt); - } else { - makeRequest(); + onComplete (rawTrailers) { + this.retryCount = 0 + return this.handler.onComplete(rawTrailers) } - if (streamMode) { - return retryStream; - } else { - return retryRequest; - } + onError (err) { + if (this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } - function resetStreams() { - delayStream = null; + this.retryOpts.retry( + err, + { + state: { counter: this.retryCount++, currentTimeout: this.retryAfter }, + opts: { retryOptions: this.retryOpts, ...this.opts } + }, + onRetry.bind(this) + ) - if (requestStream) { - requestStream.abort && requestStream.abort(); - requestStream.cancel && requestStream.cancel(); + function onRetry (err) { + if (err != null || this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } - if (requestStream.destroy) { - requestStream.destroy(); - } else if (requestStream.end) { - requestStream.end(); + if (this.start !== 0) { + this.opts = { + ...this.opts, + headers: { + ...this.opts.headers, + range: `bytes=${this.start}-${this.end ?? ''}` + } + } + } + + try { + this.dispatch(this.opts, this) + } catch (err) { + this.handler.onError(err) } } } +} - function makeRequest() { - currentRetryAttempt++; - debug(`Current retry attempt: ${currentRetryAttempt}`); - - if (streamMode) { - streamResponseHandled = false; +module.exports = RetryHandler - delayStream = new PassThrough({objectMode: opts.objectMode}); - requestStream = opts.request(requestOpts); - setImmediate(() => { - retryStream.emit('request'); - }); +/***/ }), - requestStream - // gRPC via google-cloud-node can emit an `error` as well as a `response` - // Whichever it emits, we run with-- we can't run with both. That's what - // is up with the `streamResponseHandled` tracking. - .on('error', err => { - if (streamResponseHandled) { - return; - } +/***/ 8861: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - streamResponseHandled = true; - onResponse(err); - }) - .on('response', (resp, body) => { - if (streamResponseHandled) { - return; - } +"use strict"; - streamResponseHandled = true; - onResponse(null, resp, body); - }) - .on('complete', retryStream.emit.bind(retryStream, 'complete')); - requestStream.pipe(delayStream); - } else { - activeRequest = opts.request(requestOpts, onResponse); - } - } +const RedirectHandler = __nccwpck_require__(2860) - function retryAfterDelay(currentRetryAttempt) { - if (streamMode) { - resetStreams(); - } +function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) { + return (dispatch) => { + return function Intercept (opts, handler) { + const { maxRedirections = defaultMaxRedirections } = opts - const nextRetryDelay = getNextRetryDelay({ - maxRetryDelay: opts.maxRetryDelay, - retryDelayMultiplier: opts.retryDelayMultiplier, - retryNumber: currentRetryAttempt, - timeOfFirstRequest, - totalTimeout: opts.totalTimeout, - }); - debug(`Next retry delay: ${nextRetryDelay}`); + if (!maxRedirections) { + return dispatch(opts, handler) + } - if (nextRetryDelay <= 0) { - numNoResponseAttempts = opts.noResponseRetries + 1; - return; + const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler) + opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting. + return dispatch(opts, redirectHandler) } - - setTimeout(makeRequest, nextRetryDelay); } +} - function onResponse(err, response, body) { - // An error such as DNS resolution. - if (err) { - numNoResponseAttempts++; +module.exports = createRedirectInterceptor - if (numNoResponseAttempts <= opts.noResponseRetries) { - retryAfterDelay(numNoResponseAttempts); - } else { - if (streamMode) { - retryStream.emit('error', err); - retryStream.end(); - } else { - callback(err, response, body); - } - } - return; - } +/***/ }), - // Send the response to see if we should try again. - // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts - // the very first request sent as the first "retry". It is only accurate - // when a user provides their own "currentRetryAttempt" option at - // instantiation. - const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet - ? currentRetryAttempt - : currentRetryAttempt - 1; - if ( - adjustedCurrentRetryAttempt < opts.retries && - opts.shouldRetryFn(response) - ) { - retryAfterDelay(currentRetryAttempt); - return; - } +/***/ 953: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // No more attempts need to be made, just continue on. - if (streamMode) { - retryStream.emit('response', response); - delayStream.pipe(retryStream); - requestStream.on('error', err => { - retryStream.destroy(err); - }); - } else { - callback(err, response, body); +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0; +const utils_1 = __nccwpck_require__(1891); +// C headers +var ERROR; +(function (ERROR) { + ERROR[ERROR["OK"] = 0] = "OK"; + ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL"; + ERROR[ERROR["STRICT"] = 2] = "STRICT"; + ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED"; + ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH"; + ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION"; + ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD"; + ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL"; + ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT"; + ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION"; + ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN"; + ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH"; + ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE"; + ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS"; + ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE"; + ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING"; + ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN"; + ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE"; + ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE"; + ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER"; + ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE"; + ERROR[ERROR["PAUSED"] = 21] = "PAUSED"; + ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE"; + ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE"; + ERROR[ERROR["USER"] = 24] = "USER"; +})(ERROR = exports.ERROR || (exports.ERROR = {})); +var TYPE; +(function (TYPE) { + TYPE[TYPE["BOTH"] = 0] = "BOTH"; + TYPE[TYPE["REQUEST"] = 1] = "REQUEST"; + TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE"; +})(TYPE = exports.TYPE || (exports.TYPE = {})); +var FLAGS; +(function (FLAGS) { + FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE"; + FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE"; + FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE"; + FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED"; + FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE"; + FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH"; + FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY"; + FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING"; + // 1 << 8 is unused + FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING"; +})(FLAGS = exports.FLAGS || (exports.FLAGS = {})); +var LENIENT_FLAGS; +(function (LENIENT_FLAGS) { + LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS"; + LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH"; + LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE"; +})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {})); +var METHODS; +(function (METHODS) { + METHODS[METHODS["DELETE"] = 0] = "DELETE"; + METHODS[METHODS["GET"] = 1] = "GET"; + METHODS[METHODS["HEAD"] = 2] = "HEAD"; + METHODS[METHODS["POST"] = 3] = "POST"; + METHODS[METHODS["PUT"] = 4] = "PUT"; + /* pathological */ + METHODS[METHODS["CONNECT"] = 5] = "CONNECT"; + METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS"; + METHODS[METHODS["TRACE"] = 7] = "TRACE"; + /* WebDAV */ + METHODS[METHODS["COPY"] = 8] = "COPY"; + METHODS[METHODS["LOCK"] = 9] = "LOCK"; + METHODS[METHODS["MKCOL"] = 10] = "MKCOL"; + METHODS[METHODS["MOVE"] = 11] = "MOVE"; + METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND"; + METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH"; + METHODS[METHODS["SEARCH"] = 14] = "SEARCH"; + METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK"; + METHODS[METHODS["BIND"] = 16] = "BIND"; + METHODS[METHODS["REBIND"] = 17] = "REBIND"; + METHODS[METHODS["UNBIND"] = 18] = "UNBIND"; + METHODS[METHODS["ACL"] = 19] = "ACL"; + /* subversion */ + METHODS[METHODS["REPORT"] = 20] = "REPORT"; + METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY"; + METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT"; + METHODS[METHODS["MERGE"] = 23] = "MERGE"; + /* upnp */ + METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH"; + METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY"; + METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE"; + METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE"; + /* RFC-5789 */ + METHODS[METHODS["PATCH"] = 28] = "PATCH"; + METHODS[METHODS["PURGE"] = 29] = "PURGE"; + /* CalDAV */ + METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR"; + /* RFC-2068, section 19.6.1.2 */ + METHODS[METHODS["LINK"] = 31] = "LINK"; + METHODS[METHODS["UNLINK"] = 32] = "UNLINK"; + /* icecast */ + METHODS[METHODS["SOURCE"] = 33] = "SOURCE"; + /* RFC-7540, section 11.6 */ + METHODS[METHODS["PRI"] = 34] = "PRI"; + /* RFC-2326 RTSP */ + METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE"; + METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE"; + METHODS[METHODS["SETUP"] = 37] = "SETUP"; + METHODS[METHODS["PLAY"] = 38] = "PLAY"; + METHODS[METHODS["PAUSE"] = 39] = "PAUSE"; + METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN"; + METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER"; + METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER"; + METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT"; + METHODS[METHODS["RECORD"] = 44] = "RECORD"; + /* RAOP */ + METHODS[METHODS["FLUSH"] = 45] = "FLUSH"; +})(METHODS = exports.METHODS || (exports.METHODS = {})); +exports.METHODS_HTTP = [ + METHODS.DELETE, + METHODS.GET, + METHODS.HEAD, + METHODS.POST, + METHODS.PUT, + METHODS.CONNECT, + METHODS.OPTIONS, + METHODS.TRACE, + METHODS.COPY, + METHODS.LOCK, + METHODS.MKCOL, + METHODS.MOVE, + METHODS.PROPFIND, + METHODS.PROPPATCH, + METHODS.SEARCH, + METHODS.UNLOCK, + METHODS.BIND, + METHODS.REBIND, + METHODS.UNBIND, + METHODS.ACL, + METHODS.REPORT, + METHODS.MKACTIVITY, + METHODS.CHECKOUT, + METHODS.MERGE, + METHODS['M-SEARCH'], + METHODS.NOTIFY, + METHODS.SUBSCRIBE, + METHODS.UNSUBSCRIBE, + METHODS.PATCH, + METHODS.PURGE, + METHODS.MKCALENDAR, + METHODS.LINK, + METHODS.UNLINK, + METHODS.PRI, + // TODO(indutny): should we allow it with HTTP? + METHODS.SOURCE, +]; +exports.METHODS_ICE = [ + METHODS.SOURCE, +]; +exports.METHODS_RTSP = [ + METHODS.OPTIONS, + METHODS.DESCRIBE, + METHODS.ANNOUNCE, + METHODS.SETUP, + METHODS.PLAY, + METHODS.PAUSE, + METHODS.TEARDOWN, + METHODS.GET_PARAMETER, + METHODS.SET_PARAMETER, + METHODS.REDIRECT, + METHODS.RECORD, + METHODS.FLUSH, + // For AirPlay + METHODS.GET, + METHODS.POST, +]; +exports.METHOD_MAP = utils_1.enumToMap(METHODS); +exports.H_METHOD_MAP = {}; +Object.keys(exports.METHOD_MAP).forEach((key) => { + if (/^H/.test(key)) { + exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key]; } - } -} +}); +var FINISH; +(function (FINISH) { + FINISH[FINISH["SAFE"] = 0] = "SAFE"; + FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB"; + FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE"; +})(FINISH = exports.FINISH || (exports.FINISH = {})); +exports.ALPHA = []; +for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) { + // Upper case + exports.ALPHA.push(String.fromCharCode(i)); + // Lower case + exports.ALPHA.push(String.fromCharCode(i + 0x20)); +} +exports.NUM_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, +}; +exports.HEX_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, + A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF, + a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf, +}; +exports.NUM = [ + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', +]; +exports.ALPHANUM = exports.ALPHA.concat(exports.NUM); +exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')']; +exports.USERINFO_CHARS = exports.ALPHANUM + .concat(exports.MARK) + .concat(['%', ';', ':', '&', '=', '+', '$', ',']); +// TODO(indutny): use RFC +exports.STRICT_URL_CHAR = [ + '!', '"', '$', '%', '&', '\'', + '(', ')', '*', '+', ',', '-', '.', '/', + ':', ';', '<', '=', '>', + '@', '[', '\\', ']', '^', '_', + '`', + '{', '|', '}', '~', +].concat(exports.ALPHANUM); +exports.URL_CHAR = exports.STRICT_URL_CHAR + .concat(['\t', '\f']); +// All characters with 0x80 bit set to 1 +for (let i = 0x80; i <= 0xff; i++) { + exports.URL_CHAR.push(i); +} +exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']); +/* Tokens as defined by rfc 2616. Also lowercases them. + * token = 1* + * separators = "(" | ")" | "<" | ">" | "@" + * | "," | ";" | ":" | "\" | <"> + * | "/" | "[" | "]" | "?" | "=" + * | "{" | "}" | SP | HT + */ +exports.STRICT_TOKEN = [ + '!', '#', '$', '%', '&', '\'', + '*', '+', '-', '.', + '^', '_', '`', + '|', '~', +].concat(exports.ALPHANUM); +exports.TOKEN = exports.STRICT_TOKEN.concat([' ']); +/* + * Verify that a char is a valid visible (printable) US-ASCII + * character or %x80-FF + */ +exports.HEADER_CHARS = ['\t']; +for (let i = 32; i <= 255; i++) { + if (i !== 127) { + exports.HEADER_CHARS.push(i); + } +} +// ',' = \x44 +exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44); +exports.MAJOR = exports.NUM_MAP; +exports.MINOR = exports.MAJOR; +var HEADER_STATE; +(function (HEADER_STATE) { + HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL"; + HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION"; + HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING"; + HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE"; + HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE"; + HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE"; + HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED"; +})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {})); +exports.SPECIAL_HEADERS = { + 'connection': HEADER_STATE.CONNECTION, + 'content-length': HEADER_STATE.CONTENT_LENGTH, + 'proxy-connection': HEADER_STATE.CONNECTION, + 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING, + 'upgrade': HEADER_STATE.UPGRADE, +}; +//# sourceMappingURL=constants.js.map -module.exports = retryRequest; +/***/ }), -function getNextRetryDelay(config) { - const { - maxRetryDelay, - retryDelayMultiplier, - retryNumber, - timeOfFirstRequest, - totalTimeout, - } = config; +/***/ 1145: +/***/ ((module) => { - const maxRetryDelayMs = maxRetryDelay * 1000; - const totalTimeoutMs = totalTimeout * 1000; +module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC1kAIABBGGpCADcDACAAQgA3AwAgAEE4akIANwMAIABBMGpCADcDACAAQShqQgA3AwAgAEEgakIANwMAIABBEGpCADcDACAAQQhqQgA3AwAgAEHdATYCHEEAC3sBAX8CQCAAKAIMIgMNAAJAIAAoAgRFDQAgACABNgIECwJAIAAgASACEMSAgIAAIgMNACAAKAIMDwsgACADNgIcQQAhAyAAKAIEIgFFDQAgACABIAIgACgCCBGBgICAAAAiAUUNACAAIAI2AhQgACABNgIMIAEhAwsgAwvk8wEDDn8DfgR/I4CAgIAAQRBrIgMkgICAgAAgASEEIAEhBSABIQYgASEHIAEhCCABIQkgASEKIAEhCyABIQwgASENIAEhDiABIQ8CQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgACgCHCIQQX9qDt0B2gEB2QECAwQFBgcICQoLDA0O2AEPENcBERLWARMUFRYXGBkaG+AB3wEcHR7VAR8gISIjJCXUASYnKCkqKyzTAdIBLS7RAdABLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVG2wFHSElKzwHOAUvNAUzMAU1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4ABgQGCAYMBhAGFAYYBhwGIAYkBigGLAYwBjQGOAY8BkAGRAZIBkwGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwHLAcoBuAHJAbkByAG6AbsBvAG9Ab4BvwHAAcEBwgHDAcQBxQHGAQDcAQtBACEQDMYBC0EOIRAMxQELQQ0hEAzEAQtBDyEQDMMBC0EQIRAMwgELQRMhEAzBAQtBFCEQDMABC0EVIRAMvwELQRYhEAy+AQtBFyEQDL0BC0EYIRAMvAELQRkhEAy7AQtBGiEQDLoBC0EbIRAMuQELQRwhEAy4AQtBCCEQDLcBC0EdIRAMtgELQSAhEAy1AQtBHyEQDLQBC0EHIRAMswELQSEhEAyyAQtBIiEQDLEBC0EeIRAMsAELQSMhEAyvAQtBEiEQDK4BC0ERIRAMrQELQSQhEAysAQtBJSEQDKsBC0EmIRAMqgELQSchEAypAQtBwwEhEAyoAQtBKSEQDKcBC0ErIRAMpgELQSwhEAylAQtBLSEQDKQBC0EuIRAMowELQS8hEAyiAQtBxAEhEAyhAQtBMCEQDKABC0E0IRAMnwELQQwhEAyeAQtBMSEQDJ0BC0EyIRAMnAELQTMhEAybAQtBOSEQDJoBC0E1IRAMmQELQcUBIRAMmAELQQshEAyXAQtBOiEQDJYBC0E2IRAMlQELQQohEAyUAQtBNyEQDJMBC0E4IRAMkgELQTwhEAyRAQtBOyEQDJABC0E9IRAMjwELQQkhEAyOAQtBKCEQDI0BC0E+IRAMjAELQT8hEAyLAQtBwAAhEAyKAQtBwQAhEAyJAQtBwgAhEAyIAQtBwwAhEAyHAQtBxAAhEAyGAQtBxQAhEAyFAQtBxgAhEAyEAQtBKiEQDIMBC0HHACEQDIIBC0HIACEQDIEBC0HJACEQDIABC0HKACEQDH8LQcsAIRAMfgtBzQAhEAx9C0HMACEQDHwLQc4AIRAMewtBzwAhEAx6C0HQACEQDHkLQdEAIRAMeAtB0gAhEAx3C0HTACEQDHYLQdQAIRAMdQtB1gAhEAx0C0HVACEQDHMLQQYhEAxyC0HXACEQDHELQQUhEAxwC0HYACEQDG8LQQQhEAxuC0HZACEQDG0LQdoAIRAMbAtB2wAhEAxrC0HcACEQDGoLQQMhEAxpC0HdACEQDGgLQd4AIRAMZwtB3wAhEAxmC0HhACEQDGULQeAAIRAMZAtB4gAhEAxjC0HjACEQDGILQQIhEAxhC0HkACEQDGALQeUAIRAMXwtB5gAhEAxeC0HnACEQDF0LQegAIRAMXAtB6QAhEAxbC0HqACEQDFoLQesAIRAMWQtB7AAhEAxYC0HtACEQDFcLQe4AIRAMVgtB7wAhEAxVC0HwACEQDFQLQfEAIRAMUwtB8gAhEAxSC0HzACEQDFELQfQAIRAMUAtB9QAhEAxPC0H2ACEQDE4LQfcAIRAMTQtB+AAhEAxMC0H5ACEQDEsLQfoAIRAMSgtB+wAhEAxJC0H8ACEQDEgLQf0AIRAMRwtB/gAhEAxGC0H/ACEQDEULQYABIRAMRAtBgQEhEAxDC0GCASEQDEILQYMBIRAMQQtBhAEhEAxAC0GFASEQDD8LQYYBIRAMPgtBhwEhEAw9C0GIASEQDDwLQYkBIRAMOwtBigEhEAw6C0GLASEQDDkLQYwBIRAMOAtBjQEhEAw3C0GOASEQDDYLQY8BIRAMNQtBkAEhEAw0C0GRASEQDDMLQZIBIRAMMgtBkwEhEAwxC0GUASEQDDALQZUBIRAMLwtBlgEhEAwuC0GXASEQDC0LQZgBIRAMLAtBmQEhEAwrC0GaASEQDCoLQZsBIRAMKQtBnAEhEAwoC0GdASEQDCcLQZ4BIRAMJgtBnwEhEAwlC0GgASEQDCQLQaEBIRAMIwtBogEhEAwiC0GjASEQDCELQaQBIRAMIAtBpQEhEAwfC0GmASEQDB4LQacBIRAMHQtBqAEhEAwcC0GpASEQDBsLQaoBIRAMGgtBqwEhEAwZC0GsASEQDBgLQa0BIRAMFwtBrgEhEAwWC0EBIRAMFQtBrwEhEAwUC0GwASEQDBMLQbEBIRAMEgtBswEhEAwRC0GyASEQDBALQbQBIRAMDwtBtQEhEAwOC0G2ASEQDA0LQbcBIRAMDAtBuAEhEAwLC0G5ASEQDAoLQboBIRAMCQtBuwEhEAwIC0HGASEQDAcLQbwBIRAMBgtBvQEhEAwFC0G+ASEQDAQLQb8BIRAMAwtBwAEhEAwCC0HCASEQDAELQcEBIRALA0ACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQDscBAAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxweHyAhIyUoP0BBREVGR0hJSktMTU9QUVJT3gNXWVtcXWBiZWZnaGlqa2xtb3BxcnN0dXZ3eHl6e3x9foABggGFAYYBhwGJAYsBjAGNAY4BjwGQAZEBlAGVAZYBlwGYAZkBmgGbAZwBnQGeAZ8BoAGhAaIBowGkAaUBpgGnAagBqQGqAasBrAGtAa4BrwGwAbEBsgGzAbQBtQG2AbcBuAG5AboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBxwHIAckBygHLAcwBzQHOAc8B0AHRAdIB0wHUAdUB1gHXAdgB2QHaAdsB3AHdAd4B4AHhAeIB4wHkAeUB5gHnAegB6QHqAesB7AHtAe4B7wHwAfEB8gHzAZkCpAKwAv4C/gILIAEiBCACRw3zAUHdASEQDP8DCyABIhAgAkcN3QFBwwEhEAz+AwsgASIBIAJHDZABQfcAIRAM/QMLIAEiASACRw2GAUHvACEQDPwDCyABIgEgAkcNf0HqACEQDPsDCyABIgEgAkcNe0HoACEQDPoDCyABIgEgAkcNeEHmACEQDPkDCyABIgEgAkcNGkEYIRAM+AMLIAEiASACRw0UQRIhEAz3AwsgASIBIAJHDVlBxQAhEAz2AwsgASIBIAJHDUpBPyEQDPUDCyABIgEgAkcNSEE8IRAM9AMLIAEiASACRw1BQTEhEAzzAwsgAC0ALkEBRg3rAwyHAgsgACABIgEgAhDAgICAAEEBRw3mASAAQgA3AyAM5wELIAAgASIBIAIQtICAgAAiEA3nASABIQEM9QILAkAgASIBIAJHDQBBBiEQDPADCyAAIAFBAWoiASACELuAgIAAIhAN6AEgASEBDDELIABCADcDIEESIRAM1QMLIAEiECACRw0rQR0hEAztAwsCQCABIgEgAkYNACABQQFqIQFBECEQDNQDC0EHIRAM7AMLIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN5QFBCCEQDOsDCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEUIRAM0gMLQQkhEAzqAwsgASEBIAApAyBQDeQBIAEhAQzyAgsCQCABIgEgAkcNAEELIRAM6QMLIAAgAUEBaiIBIAIQtoCAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3mASABIQEMDQsgACABIgEgAhC6gICAACIQDecBIAEhAQzwAgsCQCABIgEgAkcNAEEPIRAM5QMLIAEtAAAiEEE7Rg0IIBBBDUcN6AEgAUEBaiEBDO8CCyAAIAEiASACELqAgIAAIhAN6AEgASEBDPICCwNAAkAgAS0AAEHwtYCAAGotAAAiEEEBRg0AIBBBAkcN6wEgACgCBCEQIABBADYCBCAAIBAgAUEBaiIBELmAgIAAIhAN6gEgASEBDPQCCyABQQFqIgEgAkcNAAtBEiEQDOIDCyAAIAEiASACELqAgIAAIhAN6QEgASEBDAoLIAEiASACRw0GQRshEAzgAwsCQCABIgEgAkcNAEEWIRAM4AMLIABBioCAgAA2AgggACABNgIEIAAgASACELiAgIAAIhAN6gEgASEBQSAhEAzGAwsCQCABIgEgAkYNAANAAkAgAS0AAEHwt4CAAGotAAAiEEECRg0AAkAgEEF/ag4E5QHsAQDrAewBCyABQQFqIQFBCCEQDMgDCyABQQFqIgEgAkcNAAtBFSEQDN8DC0EVIRAM3gMLA0ACQCABLQAAQfC5gIAAai0AACIQQQJGDQAgEEF/ag4E3gHsAeAB6wHsAQsgAUEBaiIBIAJHDQALQRghEAzdAwsCQCABIgEgAkYNACAAQYuAgIAANgIIIAAgATYCBCABIQFBByEQDMQDC0EZIRAM3AMLIAFBAWohAQwCCwJAIAEiFCACRw0AQRohEAzbAwsgFCEBAkAgFC0AAEFzag4U3QLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gIA7gILQQAhECAAQQA2AhwgAEGvi4CAADYCECAAQQI2AgwgACAUQQFqNgIUDNoDCwJAIAEtAAAiEEE7Rg0AIBBBDUcN6AEgAUEBaiEBDOUCCyABQQFqIQELQSIhEAy/AwsCQCABIhAgAkcNAEEcIRAM2AMLQgAhESAQIQEgEC0AAEFQag435wHmAQECAwQFBgcIAAAAAAAAAAkKCwwNDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxAREhMUAAtBHiEQDL0DC0ICIREM5QELQgMhEQzkAQtCBCERDOMBC0IFIREM4gELQgYhEQzhAQtCByERDOABC0IIIREM3wELQgkhEQzeAQtCCiERDN0BC0ILIREM3AELQgwhEQzbAQtCDSERDNoBC0IOIREM2QELQg8hEQzYAQtCCiERDNcBC0ILIREM1gELQgwhEQzVAQtCDSERDNQBC0IOIREM0wELQg8hEQzSAQtCACERAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQLQAAQVBqDjflAeQBAAECAwQFBgfmAeYB5gHmAeYB5gHmAQgJCgsMDeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gEODxAREhPmAQtCAiERDOQBC0IDIREM4wELQgQhEQziAQtCBSERDOEBC0IGIREM4AELQgchEQzfAQtCCCERDN4BC0IJIREM3QELQgohEQzcAQtCCyERDNsBC0IMIREM2gELQg0hEQzZAQtCDiERDNgBC0IPIREM1wELQgohEQzWAQtCCyERDNUBC0IMIREM1AELQg0hEQzTAQtCDiERDNIBC0IPIREM0QELIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN0gFBHyEQDMADCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEkIRAMpwMLQSAhEAy/AwsgACABIhAgAhC+gICAAEF/ag4FtgEAxQIB0QHSAQtBESEQDKQDCyAAQQE6AC8gECEBDLsDCyABIgEgAkcN0gFBJCEQDLsDCyABIg0gAkcNHkHGACEQDLoDCyAAIAEiASACELKAgIAAIhAN1AEgASEBDLUBCyABIhAgAkcNJkHQACEQDLgDCwJAIAEiASACRw0AQSghEAy4AwsgAEEANgIEIABBjICAgAA2AgggACABIAEQsYCAgAAiEA3TASABIQEM2AELAkAgASIQIAJHDQBBKSEQDLcDCyAQLQAAIgFBIEYNFCABQQlHDdMBIBBBAWohAQwVCwJAIAEiASACRg0AIAFBAWohAQwXC0EqIRAMtQMLAkAgASIQIAJHDQBBKyEQDLUDCwJAIBAtAAAiAUEJRg0AIAFBIEcN1QELIAAtACxBCEYN0wEgECEBDJEDCwJAIAEiASACRw0AQSwhEAy0AwsgAS0AAEEKRw3VASABQQFqIQEMyQILIAEiDiACRw3VAUEvIRAMsgMLA0ACQCABLQAAIhBBIEYNAAJAIBBBdmoOBADcAdwBANoBCyABIQEM4AELIAFBAWoiASACRw0AC0ExIRAMsQMLQTIhECABIhQgAkYNsAMgAiAUayAAKAIAIgFqIRUgFCABa0EDaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfC7gIAAai0AAEcNAQJAIAFBA0cNAEEGIQEMlgMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLEDCyAAQQA2AgAgFCEBDNkBC0EzIRAgASIUIAJGDa8DIAIgFGsgACgCACIBaiEVIBQgAWtBCGohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUH0u4CAAGotAABHDQECQCABQQhHDQBBBSEBDJUDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAywAwsgAEEANgIAIBQhAQzYAQtBNCEQIAEiFCACRg2uAyACIBRrIAAoAgAiAWohFSAUIAFrQQVqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw0BAkAgAUEFRw0AQQchAQyUAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMrwMLIABBADYCACAUIQEM1wELAkAgASIBIAJGDQADQAJAIAEtAABBgL6AgABqLQAAIhBBAUYNACAQQQJGDQogASEBDN0BCyABQQFqIgEgAkcNAAtBMCEQDK4DC0EwIRAMrQMLAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AIBBBdmoOBNkB2gHaAdkB2gELIAFBAWoiASACRw0AC0E4IRAMrQMLQTghEAysAwsDQAJAIAEtAAAiEEEgRg0AIBBBCUcNAwsgAUEBaiIBIAJHDQALQTwhEAyrAwsDQAJAIAEtAAAiEEEgRg0AAkACQCAQQXZqDgTaAQEB2gEACyAQQSxGDdsBCyABIQEMBAsgAUEBaiIBIAJHDQALQT8hEAyqAwsgASEBDNsBC0HAACEQIAEiFCACRg2oAyACIBRrIAAoAgAiAWohFiAUIAFrQQZqIRcCQANAIBQtAABBIHIgAUGAwICAAGotAABHDQEgAUEGRg2OAyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAypAwsgAEEANgIAIBQhAQtBNiEQDI4DCwJAIAEiDyACRw0AQcEAIRAMpwMLIABBjICAgAA2AgggACAPNgIEIA8hASAALQAsQX9qDgTNAdUB1wHZAYcDCyABQQFqIQEMzAELAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgciAQIBBBv39qQf8BcUEaSRtB/wFxIhBBCUYNACAQQSBGDQACQAJAAkACQCAQQZ1/ag4TAAMDAwMDAwMBAwMDAwMDAwMDAgMLIAFBAWohAUExIRAMkQMLIAFBAWohAUEyIRAMkAMLIAFBAWohAUEzIRAMjwMLIAEhAQzQAQsgAUEBaiIBIAJHDQALQTUhEAylAwtBNSEQDKQDCwJAIAEiASACRg0AA0ACQCABLQAAQYC8gIAAai0AAEEBRg0AIAEhAQzTAQsgAUEBaiIBIAJHDQALQT0hEAykAwtBPSEQDKMDCyAAIAEiASACELCAgIAAIhAN1gEgASEBDAELIBBBAWohAQtBPCEQDIcDCwJAIAEiASACRw0AQcIAIRAMoAMLAkADQAJAIAEtAABBd2oOGAAC/gL+AoQD/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4CAP4CCyABQQFqIgEgAkcNAAtBwgAhEAygAwsgAUEBaiEBIAAtAC1BAXFFDb0BIAEhAQtBLCEQDIUDCyABIgEgAkcN0wFBxAAhEAydAwsDQAJAIAEtAABBkMCAgABqLQAAQQFGDQAgASEBDLcCCyABQQFqIgEgAkcNAAtBxQAhEAycAwsgDS0AACIQQSBGDbMBIBBBOkcNgQMgACgCBCEBIABBADYCBCAAIAEgDRCvgICAACIBDdABIA1BAWohAQyzAgtBxwAhECABIg0gAkYNmgMgAiANayAAKAIAIgFqIRYgDSABa0EFaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGQwoCAAGotAABHDYADIAFBBUYN9AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmgMLQcgAIRAgASINIAJGDZkDIAIgDWsgACgCACIBaiEWIA0gAWtBCWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBlsKAgABqLQAARw3/AgJAIAFBCUcNAEECIQEM9QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJkDCwJAIAEiDSACRw0AQckAIRAMmQMLAkACQCANLQAAIgFBIHIgASABQb9/akH/AXFBGkkbQf8BcUGSf2oOBwCAA4ADgAOAA4ADAYADCyANQQFqIQFBPiEQDIADCyANQQFqIQFBPyEQDP8CC0HKACEQIAEiDSACRg2XAyACIA1rIAAoAgAiAWohFiANIAFrQQFqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaDCgIAAai0AAEcN/QIgAUEBRg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyXAwtBywAhECABIg0gAkYNlgMgAiANayAAKAIAIgFqIRYgDSABa0EOaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGiwoCAAGotAABHDfwCIAFBDkYN8AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlgMLQcwAIRAgASINIAJGDZUDIAIgDWsgACgCACIBaiEWIA0gAWtBD2ohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBwMKAgABqLQAARw37AgJAIAFBD0cNAEEDIQEM8QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJUDC0HNACEQIAEiDSACRg2UAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQdDCgIAAai0AAEcN+gICQCABQQVHDQBBBCEBDPACCyABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyUAwsCQCABIg0gAkcNAEHOACEQDJQDCwJAAkACQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZ1/ag4TAP0C/QL9Av0C/QL9Av0C/QL9Av0C/QL9AgH9Av0C/QICA/0CCyANQQFqIQFBwQAhEAz9AgsgDUEBaiEBQcIAIRAM/AILIA1BAWohAUHDACEQDPsCCyANQQFqIQFBxAAhEAz6AgsCQCABIgEgAkYNACAAQY2AgIAANgIIIAAgATYCBCABIQFBxQAhEAz6AgtBzwAhEAySAwsgECEBAkACQCAQLQAAQXZqDgQBqAKoAgCoAgsgEEEBaiEBC0EnIRAM+AILAkAgASIBIAJHDQBB0QAhEAyRAwsCQCABLQAAQSBGDQAgASEBDI0BCyABQQFqIQEgAC0ALUEBcUUNxwEgASEBDIwBCyABIhcgAkcNyAFB0gAhEAyPAwtB0wAhECABIhQgAkYNjgMgAiAUayAAKAIAIgFqIRYgFCABa0EBaiEXA0AgFC0AACABQdbCgIAAai0AAEcNzAEgAUEBRg3HASABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAyOAwsCQCABIgEgAkcNAEHVACEQDI4DCyABLQAAQQpHDcwBIAFBAWohAQzHAQsCQCABIgEgAkcNAEHWACEQDI0DCwJAAkAgAS0AAEF2ag4EAM0BzQEBzQELIAFBAWohAQzHAQsgAUEBaiEBQcoAIRAM8wILIAAgASIBIAIQroCAgAAiEA3LASABIQFBzQAhEAzyAgsgAC0AKUEiRg2FAwymAgsCQCABIgEgAkcNAEHbACEQDIoDC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgAS0AAEFQag4K1AHTAQABAgMEBQYI1QELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMzAELQQkhEEEBIRRBACEXQQAhFgzLAQsCQCABIgEgAkcNAEHdACEQDIkDCyABLQAAQS5HDcwBIAFBAWohAQymAgsgASIBIAJHDcwBQd8AIRAMhwMLAkAgASIBIAJGDQAgAEGOgICAADYCCCAAIAE2AgQgASEBQdAAIRAM7gILQeAAIRAMhgMLQeEAIRAgASIBIAJGDYUDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHiwoCAAGotAABHDc0BIBRBA0YNzAEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhQMLQeIAIRAgASIBIAJGDYQDIAIgAWsgACgCACIUaiEWIAEgFGtBAmohFwNAIAEtAAAgFEHmwoCAAGotAABHDcwBIBRBAkYNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhAMLQeMAIRAgASIBIAJGDYMDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHpwoCAAGotAABHDcsBIBRBA0YNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMgwMLAkAgASIBIAJHDQBB5QAhEAyDAwsgACABQQFqIgEgAhCogICAACIQDc0BIAEhAUHWACEQDOkCCwJAIAEiASACRg0AA0ACQCABLQAAIhBBIEYNAAJAAkACQCAQQbh/ag4LAAHPAc8BzwHPAc8BzwHPAc8BAs8BCyABQQFqIQFB0gAhEAztAgsgAUEBaiEBQdMAIRAM7AILIAFBAWohAUHUACEQDOsCCyABQQFqIgEgAkcNAAtB5AAhEAyCAwtB5AAhEAyBAwsDQAJAIAEtAABB8MKAgABqLQAAIhBBAUYNACAQQX5qDgPPAdAB0QHSAQsgAUEBaiIBIAJHDQALQeYAIRAMgAMLAkAgASIBIAJGDQAgAUEBaiEBDAMLQecAIRAM/wILA0ACQCABLQAAQfDEgIAAai0AACIQQQFGDQACQCAQQX5qDgTSAdMB1AEA1QELIAEhAUHXACEQDOcCCyABQQFqIgEgAkcNAAtB6AAhEAz+AgsCQCABIgEgAkcNAEHpACEQDP4CCwJAIAEtAAAiEEF2ag4augHVAdUBvAHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHKAdUB1QEA0wELIAFBAWohAQtBBiEQDOMCCwNAAkAgAS0AAEHwxoCAAGotAABBAUYNACABIQEMngILIAFBAWoiASACRw0AC0HqACEQDPsCCwJAIAEiASACRg0AIAFBAWohAQwDC0HrACEQDPoCCwJAIAEiASACRw0AQewAIRAM+gILIAFBAWohAQwBCwJAIAEiASACRw0AQe0AIRAM+QILIAFBAWohAQtBBCEQDN4CCwJAIAEiFCACRw0AQe4AIRAM9wILIBQhAQJAAkACQCAULQAAQfDIgIAAai0AAEF/ag4H1AHVAdYBAJwCAQLXAQsgFEEBaiEBDAoLIBRBAWohAQzNAQtBACEQIABBADYCHCAAQZuSgIAANgIQIABBBzYCDCAAIBRBAWo2AhQM9gILAkADQAJAIAEtAABB8MiAgABqLQAAIhBBBEYNAAJAAkAgEEF/ag4H0gHTAdQB2QEABAHZAQsgASEBQdoAIRAM4AILIAFBAWohAUHcACEQDN8CCyABQQFqIgEgAkcNAAtB7wAhEAz2AgsgAUEBaiEBDMsBCwJAIAEiFCACRw0AQfAAIRAM9QILIBQtAABBL0cN1AEgFEEBaiEBDAYLAkAgASIUIAJHDQBB8QAhEAz0AgsCQCAULQAAIgFBL0cNACAUQQFqIQFB3QAhEAzbAgsgAUF2aiIEQRZLDdMBQQEgBHRBiYCAAnFFDdMBDMoCCwJAIAEiASACRg0AIAFBAWohAUHeACEQDNoCC0HyACEQDPICCwJAIAEiFCACRw0AQfQAIRAM8gILIBQhAQJAIBQtAABB8MyAgABqLQAAQX9qDgPJApQCANQBC0HhACEQDNgCCwJAIAEiFCACRg0AA0ACQCAULQAAQfDKgIAAai0AACIBQQNGDQACQCABQX9qDgLLAgDVAQsgFCEBQd8AIRAM2gILIBRBAWoiFCACRw0AC0HzACEQDPECC0HzACEQDPACCwJAIAEiASACRg0AIABBj4CAgAA2AgggACABNgIEIAEhAUHgACEQDNcCC0H1ACEQDO8CCwJAIAEiASACRw0AQfYAIRAM7wILIABBj4CAgAA2AgggACABNgIEIAEhAQtBAyEQDNQCCwNAIAEtAABBIEcNwwIgAUEBaiIBIAJHDQALQfcAIRAM7AILAkAgASIBIAJHDQBB+AAhEAzsAgsgAS0AAEEgRw3OASABQQFqIQEM7wELIAAgASIBIAIQrICAgAAiEA3OASABIQEMjgILAkAgASIEIAJHDQBB+gAhEAzqAgsgBC0AAEHMAEcN0QEgBEEBaiEBQRMhEAzPAQsCQCABIgQgAkcNAEH7ACEQDOkCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRADQCAELQAAIAFB8M6AgABqLQAARw3QASABQQVGDc4BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQfsAIRAM6AILAkAgASIEIAJHDQBB/AAhEAzoAgsCQAJAIAQtAABBvX9qDgwA0QHRAdEB0QHRAdEB0QHRAdEB0QEB0QELIARBAWohAUHmACEQDM8CCyAEQQFqIQFB5wAhEAzOAgsCQCABIgQgAkcNAEH9ACEQDOcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDc8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH9ACEQDOcCCyAAQQA2AgAgEEEBaiEBQRAhEAzMAQsCQCABIgQgAkcNAEH+ACEQDOYCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUH2zoCAAGotAABHDc4BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH+ACEQDOYCCyAAQQA2AgAgEEEBaiEBQRYhEAzLAQsCQCABIgQgAkcNAEH/ACEQDOUCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUH8zoCAAGotAABHDc0BIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH/ACEQDOUCCyAAQQA2AgAgEEEBaiEBQQUhEAzKAQsCQCABIgQgAkcNAEGAASEQDOQCCyAELQAAQdkARw3LASAEQQFqIQFBCCEQDMkBCwJAIAEiBCACRw0AQYEBIRAM4wILAkACQCAELQAAQbJ/ag4DAMwBAcwBCyAEQQFqIQFB6wAhEAzKAgsgBEEBaiEBQewAIRAMyQILAkAgASIEIAJHDQBBggEhEAziAgsCQAJAIAQtAABBuH9qDggAywHLAcsBywHLAcsBAcsBCyAEQQFqIQFB6gAhEAzJAgsgBEEBaiEBQe0AIRAMyAILAkAgASIEIAJHDQBBgwEhEAzhAgsgAiAEayAAKAIAIgFqIRAgBCABa0ECaiEUAkADQCAELQAAIAFBgM+AgABqLQAARw3JASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBA2AgBBgwEhEAzhAgtBACEQIABBADYCACAUQQFqIQEMxgELAkAgASIEIAJHDQBBhAEhEAzgAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBg8+AgABqLQAARw3IASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhAEhEAzgAgsgAEEANgIAIBBBAWohAUEjIRAMxQELAkAgASIEIAJHDQBBhQEhEAzfAgsCQAJAIAQtAABBtH9qDggAyAHIAcgByAHIAcgBAcgBCyAEQQFqIQFB7wAhEAzGAgsgBEEBaiEBQfAAIRAMxQILAkAgASIEIAJHDQBBhgEhEAzeAgsgBC0AAEHFAEcNxQEgBEEBaiEBDIMCCwJAIAEiBCACRw0AQYcBIRAM3QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQYjPgIAAai0AAEcNxQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYcBIRAM3QILIABBADYCACAQQQFqIQFBLSEQDMIBCwJAIAEiBCACRw0AQYgBIRAM3AILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNxAEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYgBIRAM3AILIABBADYCACAQQQFqIQFBKSEQDMEBCwJAIAEiASACRw0AQYkBIRAM2wILQQEhECABLQAAQd8ARw3AASABQQFqIQEMgQILAkAgASIEIAJHDQBBigEhEAzaAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQA0AgBC0AACABQYzPgIAAai0AAEcNwQEgAUEBRg2vAiABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGKASEQDNkCCwJAIAEiBCACRw0AQYsBIRAM2QILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQY7PgIAAai0AAEcNwQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYsBIRAM2QILIABBADYCACAQQQFqIQFBAiEQDL4BCwJAIAEiBCACRw0AQYwBIRAM2AILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNwAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYwBIRAM2AILIABBADYCACAQQQFqIQFBHyEQDL0BCwJAIAEiBCACRw0AQY0BIRAM1wILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNvwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY0BIRAM1wILIABBADYCACAQQQFqIQFBCSEQDLwBCwJAIAEiBCACRw0AQY4BIRAM1gILAkACQCAELQAAQbd/ag4HAL8BvwG/Ab8BvwEBvwELIARBAWohAUH4ACEQDL0CCyAEQQFqIQFB+QAhEAy8AgsCQCABIgQgAkcNAEGPASEQDNUCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGRz4CAAGotAABHDb0BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGPASEQDNUCCyAAQQA2AgAgEEEBaiEBQRghEAy6AQsCQCABIgQgAkcNAEGQASEQDNQCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUGXz4CAAGotAABHDbwBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGQASEQDNQCCyAAQQA2AgAgEEEBaiEBQRchEAy5AQsCQCABIgQgAkcNAEGRASEQDNMCCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUGaz4CAAGotAABHDbsBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGRASEQDNMCCyAAQQA2AgAgEEEBaiEBQRUhEAy4AQsCQCABIgQgAkcNAEGSASEQDNICCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGhz4CAAGotAABHDboBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGSASEQDNICCyAAQQA2AgAgEEEBaiEBQR4hEAy3AQsCQCABIgQgAkcNAEGTASEQDNECCyAELQAAQcwARw24ASAEQQFqIQFBCiEQDLYBCwJAIAQgAkcNAEGUASEQDNACCwJAAkAgBC0AAEG/f2oODwC5AbkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AQG5AQsgBEEBaiEBQf4AIRAMtwILIARBAWohAUH/ACEQDLYCCwJAIAQgAkcNAEGVASEQDM8CCwJAAkAgBC0AAEG/f2oOAwC4AQG4AQsgBEEBaiEBQf0AIRAMtgILIARBAWohBEGAASEQDLUCCwJAIAQgAkcNAEGWASEQDM4CCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUGnz4CAAGotAABHDbYBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGWASEQDM4CCyAAQQA2AgAgEEEBaiEBQQshEAyzAQsCQCAEIAJHDQBBlwEhEAzNAgsCQAJAAkACQCAELQAAQVNqDiMAuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AQG4AbgBuAG4AbgBArgBuAG4AQO4AQsgBEEBaiEBQfsAIRAMtgILIARBAWohAUH8ACEQDLUCCyAEQQFqIQRBgQEhEAy0AgsgBEEBaiEEQYIBIRAMswILAkAgBCACRw0AQZgBIRAMzAILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQanPgIAAai0AAEcNtAEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZgBIRAMzAILIABBADYCACAQQQFqIQFBGSEQDLEBCwJAIAQgAkcNAEGZASEQDMsCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGuz4CAAGotAABHDbMBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGZASEQDMsCCyAAQQA2AgAgEEEBaiEBQQYhEAywAQsCQCAEIAJHDQBBmgEhEAzKAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBtM+AgABqLQAARw2yASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmgEhEAzKAgsgAEEANgIAIBBBAWohAUEcIRAMrwELAkAgBCACRw0AQZsBIRAMyQILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbbPgIAAai0AAEcNsQEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZsBIRAMyQILIABBADYCACAQQQFqIQFBJyEQDK4BCwJAIAQgAkcNAEGcASEQDMgCCwJAAkAgBC0AAEGsf2oOAgABsQELIARBAWohBEGGASEQDK8CCyAEQQFqIQRBhwEhEAyuAgsCQCAEIAJHDQBBnQEhEAzHAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBuM+AgABqLQAARw2vASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBnQEhEAzHAgsgAEEANgIAIBBBAWohAUEmIRAMrAELAkAgBCACRw0AQZ4BIRAMxgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbrPgIAAai0AAEcNrgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ4BIRAMxgILIABBADYCACAQQQFqIQFBAyEQDKsBCwJAIAQgAkcNAEGfASEQDMUCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDa0BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGfASEQDMUCCyAAQQA2AgAgEEEBaiEBQQwhEAyqAQsCQCAEIAJHDQBBoAEhEAzEAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBvM+AgABqLQAARw2sASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBoAEhEAzEAgsgAEEANgIAIBBBAWohAUENIRAMqQELAkAgBCACRw0AQaEBIRAMwwILAkACQCAELQAAQbp/ag4LAKwBrAGsAawBrAGsAawBrAGsAQGsAQsgBEEBaiEEQYsBIRAMqgILIARBAWohBEGMASEQDKkCCwJAIAQgAkcNAEGiASEQDMICCyAELQAAQdAARw2pASAEQQFqIQQM6QELAkAgBCACRw0AQaMBIRAMwQILAkACQCAELQAAQbd/ag4HAaoBqgGqAaoBqgEAqgELIARBAWohBEGOASEQDKgCCyAEQQFqIQFBIiEQDKYBCwJAIAQgAkcNAEGkASEQDMACCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHAz4CAAGotAABHDagBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGkASEQDMACCyAAQQA2AgAgEEEBaiEBQR0hEAylAQsCQCAEIAJHDQBBpQEhEAy/AgsCQAJAIAQtAABBrn9qDgMAqAEBqAELIARBAWohBEGQASEQDKYCCyAEQQFqIQFBBCEQDKQBCwJAIAQgAkcNAEGmASEQDL4CCwJAAkACQAJAAkAgBC0AAEG/f2oOFQCqAaoBqgGqAaoBqgGqAaoBqgGqAQGqAaoBAqoBqgEDqgGqAQSqAQsgBEEBaiEEQYgBIRAMqAILIARBAWohBEGJASEQDKcCCyAEQQFqIQRBigEhEAymAgsgBEEBaiEEQY8BIRAMpQILIARBAWohBEGRASEQDKQCCwJAIAQgAkcNAEGnASEQDL0CCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDaUBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGnASEQDL0CCyAAQQA2AgAgEEEBaiEBQREhEAyiAQsCQCAEIAJHDQBBqAEhEAy8AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBws+AgABqLQAARw2kASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqAEhEAy8AgsgAEEANgIAIBBBAWohAUEsIRAMoQELAkAgBCACRw0AQakBIRAMuwILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQcXPgIAAai0AAEcNowEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQakBIRAMuwILIABBADYCACAQQQFqIQFBKyEQDKABCwJAIAQgAkcNAEGqASEQDLoCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHKz4CAAGotAABHDaIBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGqASEQDLoCCyAAQQA2AgAgEEEBaiEBQRQhEAyfAQsCQCAEIAJHDQBBqwEhEAy5AgsCQAJAAkACQCAELQAAQb5/ag4PAAECpAGkAaQBpAGkAaQBpAGkAaQBpAGkAQOkAQsgBEEBaiEEQZMBIRAMogILIARBAWohBEGUASEQDKECCyAEQQFqIQRBlQEhEAygAgsgBEEBaiEEQZYBIRAMnwILAkAgBCACRw0AQawBIRAMuAILIAQtAABBxQBHDZ8BIARBAWohBAzgAQsCQCAEIAJHDQBBrQEhEAy3AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBzc+AgABqLQAARw2fASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrQEhEAy3AgsgAEEANgIAIBBBAWohAUEOIRAMnAELAkAgBCACRw0AQa4BIRAMtgILIAQtAABB0ABHDZ0BIARBAWohAUElIRAMmwELAkAgBCACRw0AQa8BIRAMtQILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNnQEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQa8BIRAMtQILIABBADYCACAQQQFqIQFBKiEQDJoBCwJAIAQgAkcNAEGwASEQDLQCCwJAAkAgBC0AAEGrf2oOCwCdAZ0BnQGdAZ0BnQGdAZ0BnQEBnQELIARBAWohBEGaASEQDJsCCyAEQQFqIQRBmwEhEAyaAgsCQCAEIAJHDQBBsQEhEAyzAgsCQAJAIAQtAABBv39qDhQAnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBAZwBCyAEQQFqIQRBmQEhEAyaAgsgBEEBaiEEQZwBIRAMmQILAkAgBCACRw0AQbIBIRAMsgILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQdnPgIAAai0AAEcNmgEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbIBIRAMsgILIABBADYCACAQQQFqIQFBISEQDJcBCwJAIAQgAkcNAEGzASEQDLECCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUHdz4CAAGotAABHDZkBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGzASEQDLECCyAAQQA2AgAgEEEBaiEBQRohEAyWAQsCQCAEIAJHDQBBtAEhEAywAgsCQAJAAkAgBC0AAEG7f2oOEQCaAZoBmgGaAZoBmgGaAZoBmgEBmgGaAZoBmgGaAQKaAQsgBEEBaiEEQZ0BIRAMmAILIARBAWohBEGeASEQDJcCCyAEQQFqIQRBnwEhEAyWAgsCQCAEIAJHDQBBtQEhEAyvAgsgAiAEayAAKAIAIgFqIRQgBCABa0EFaiEQAkADQCAELQAAIAFB5M+AgABqLQAARw2XASABQQVGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtQEhEAyvAgsgAEEANgIAIBBBAWohAUEoIRAMlAELAkAgBCACRw0AQbYBIRAMrgILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQerPgIAAai0AAEcNlgEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbYBIRAMrgILIABBADYCACAQQQFqIQFBByEQDJMBCwJAIAQgAkcNAEG3ASEQDK0CCwJAAkAgBC0AAEG7f2oODgCWAZYBlgGWAZYBlgGWAZYBlgGWAZYBlgEBlgELIARBAWohBEGhASEQDJQCCyAEQQFqIQRBogEhEAyTAgsCQCAEIAJHDQBBuAEhEAysAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB7c+AgABqLQAARw2UASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuAEhEAysAgsgAEEANgIAIBBBAWohAUESIRAMkQELAkAgBCACRw0AQbkBIRAMqwILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNkwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbkBIRAMqwILIABBADYCACAQQQFqIQFBICEQDJABCwJAIAQgAkcNAEG6ASEQDKoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHyz4CAAGotAABHDZIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG6ASEQDKoCCyAAQQA2AgAgEEEBaiEBQQ8hEAyPAQsCQCAEIAJHDQBBuwEhEAypAgsCQAJAIAQtAABBt39qDgcAkgGSAZIBkgGSAQGSAQsgBEEBaiEEQaUBIRAMkAILIARBAWohBEGmASEQDI8CCwJAIAQgAkcNAEG8ASEQDKgCCyACIARrIAAoAgAiAWohFCAEIAFrQQdqIRACQANAIAQtAAAgAUH0z4CAAGotAABHDZABIAFBB0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG8ASEQDKgCCyAAQQA2AgAgEEEBaiEBQRshEAyNAQsCQCAEIAJHDQBBvQEhEAynAgsCQAJAAkAgBC0AAEG+f2oOEgCRAZEBkQGRAZEBkQGRAZEBkQEBkQGRAZEBkQGRAZEBApEBCyAEQQFqIQRBpAEhEAyPAgsgBEEBaiEEQacBIRAMjgILIARBAWohBEGoASEQDI0CCwJAIAQgAkcNAEG+ASEQDKYCCyAELQAAQc4ARw2NASAEQQFqIQQMzwELAkAgBCACRw0AQb8BIRAMpQILAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgBC0AAEG/f2oOFQABAgOcAQQFBpwBnAGcAQcICQoLnAEMDQ4PnAELIARBAWohAUHoACEQDJoCCyAEQQFqIQFB6QAhEAyZAgsgBEEBaiEBQe4AIRAMmAILIARBAWohAUHyACEQDJcCCyAEQQFqIQFB8wAhEAyWAgsgBEEBaiEBQfYAIRAMlQILIARBAWohAUH3ACEQDJQCCyAEQQFqIQFB+gAhEAyTAgsgBEEBaiEEQYMBIRAMkgILIARBAWohBEGEASEQDJECCyAEQQFqIQRBhQEhEAyQAgsgBEEBaiEEQZIBIRAMjwILIARBAWohBEGYASEQDI4CCyAEQQFqIQRBoAEhEAyNAgsgBEEBaiEEQaMBIRAMjAILIARBAWohBEGqASEQDIsCCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEGrASEQDIsCC0HAASEQDKMCCyAAIAUgAhCqgICAACIBDYsBIAUhAQxcCwJAIAYgAkYNACAGQQFqIQUMjQELQcIBIRAMoQILA0ACQCAQLQAAQXZqDgSMAQAAjwEACyAQQQFqIhAgAkcNAAtBwwEhEAygAgsCQCAHIAJGDQAgAEGRgICAADYCCCAAIAc2AgQgByEBQQEhEAyHAgtBxAEhEAyfAgsCQCAHIAJHDQBBxQEhEAyfAgsCQAJAIActAABBdmoOBAHOAc4BAM4BCyAHQQFqIQYMjQELIAdBAWohBQyJAQsCQCAHIAJHDQBBxgEhEAyeAgsCQAJAIActAABBdmoOFwGPAY8BAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAQCPAQsgB0EBaiEHC0GwASEQDIQCCwJAIAggAkcNAEHIASEQDJ0CCyAILQAAQSBHDY0BIABBADsBMiAIQQFqIQFBswEhEAyDAgsgASEXAkADQCAXIgcgAkYNASAHLQAAQVBqQf8BcSIQQQpPDcwBAkAgAC8BMiIUQZkzSw0AIAAgFEEKbCIUOwEyIBBB//8DcyAUQf7/A3FJDQAgB0EBaiEXIAAgFCAQaiIQOwEyIBBB//8DcUHoB0kNAQsLQQAhECAAQQA2AhwgAEHBiYCAADYCECAAQQ02AgwgACAHQQFqNgIUDJwCC0HHASEQDJsCCyAAIAggAhCugICAACIQRQ3KASAQQRVHDYwBIABByAE2AhwgACAINgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAyaAgsCQCAJIAJHDQBBzAEhEAyaAgtBACEUQQEhF0EBIRZBACEQAkACQAJAAkACQAJAAkACQAJAIAktAABBUGoOCpYBlQEAAQIDBAUGCJcBC0ECIRAMBgtBAyEQDAULQQQhEAwEC0EFIRAMAwtBBiEQDAILQQchEAwBC0EIIRALQQAhF0EAIRZBACEUDI4BC0EJIRBBASEUQQAhF0EAIRYMjQELAkAgCiACRw0AQc4BIRAMmQILIAotAABBLkcNjgEgCkEBaiEJDMoBCyALIAJHDY4BQdABIRAMlwILAkAgCyACRg0AIABBjoCAgAA2AgggACALNgIEQbcBIRAM/gELQdEBIRAMlgILAkAgBCACRw0AQdIBIRAMlgILIAIgBGsgACgCACIQaiEUIAQgEGtBBGohCwNAIAQtAAAgEEH8z4CAAGotAABHDY4BIBBBBEYN6QEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB0gEhEAyVAgsgACAMIAIQrICAgAAiAQ2NASAMIQEMuAELAkAgBCACRw0AQdQBIRAMlAILIAIgBGsgACgCACIQaiEUIAQgEGtBAWohDANAIAQtAAAgEEGB0ICAAGotAABHDY8BIBBBAUYNjgEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB1AEhEAyTAgsCQCAEIAJHDQBB1gEhEAyTAgsgAiAEayAAKAIAIhBqIRQgBCAQa0ECaiELA0AgBC0AACAQQYPQgIAAai0AAEcNjgEgEEECRg2QASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHWASEQDJICCwJAIAQgAkcNAEHXASEQDJICCwJAAkAgBC0AAEG7f2oOEACPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAY8BCyAEQQFqIQRBuwEhEAz5AQsgBEEBaiEEQbwBIRAM+AELAkAgBCACRw0AQdgBIRAMkQILIAQtAABByABHDYwBIARBAWohBAzEAQsCQCAEIAJGDQAgAEGQgICAADYCCCAAIAQ2AgRBvgEhEAz3AQtB2QEhEAyPAgsCQCAEIAJHDQBB2gEhEAyPAgsgBC0AAEHIAEYNwwEgAEEBOgAoDLkBCyAAQQI6AC8gACAEIAIQpoCAgAAiEA2NAUHCASEQDPQBCyAALQAoQX9qDgK3AbkBuAELA0ACQCAELQAAQXZqDgQAjgGOAQCOAQsgBEEBaiIEIAJHDQALQd0BIRAMiwILIABBADoALyAALQAtQQRxRQ2EAgsgAEEAOgAvIABBAToANCABIQEMjAELIBBBFUYN2gEgAEEANgIcIAAgATYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMiAILAkAgACAQIAIQtICAgAAiBA0AIBAhAQyBAgsCQCAEQRVHDQAgAEEDNgIcIAAgEDYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMiAILIABBADYCHCAAIBA2AhQgAEGnjoCAADYCECAAQRI2AgxBACEQDIcCCyAQQRVGDdYBIABBADYCHCAAIAE2AhQgAEHajYCAADYCECAAQRQ2AgxBACEQDIYCCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNjQEgAEEHNgIcIAAgEDYCFCAAIBQ2AgxBACEQDIUCCyAAIAAvATBBgAFyOwEwIAEhAQtBKiEQDOoBCyAQQRVGDdEBIABBADYCHCAAIAE2AhQgAEGDjICAADYCECAAQRM2AgxBACEQDIICCyAQQRVGDc8BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDIECCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyNAQsgAEEMNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDIACCyAQQRVGDcwBIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDP8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyMAQsgAEENNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDP4BCyAQQRVGDckBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDP0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyLAQsgAEEONgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPwBCyAAQQA2AhwgACABNgIUIABBwJWAgAA2AhAgAEECNgIMQQAhEAz7AQsgEEEVRg3FASAAQQA2AhwgACABNgIUIABBxoyAgAA2AhAgAEEjNgIMQQAhEAz6AQsgAEEQNgIcIAAgATYCFCAAIBA2AgxBACEQDPkBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQzxAQsgAEERNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPgBCyAQQRVGDcEBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPcBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyIAQsgAEETNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPYBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQztAQsgAEEUNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPUBCyAQQRVGDb0BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDPQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyGAQsgAEEWNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPMBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQt4CAgAAiBA0AIAFBAWohAQzpAQsgAEEXNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPIBCyAAQQA2AhwgACABNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzxAQtCASERCyAQQQFqIQECQCAAKQMgIhJC//////////8PVg0AIAAgEkIEhiARhDcDICABIQEMhAELIABBADYCHCAAIAE2AhQgAEGtiYCAADYCECAAQQw2AgxBACEQDO8BCyAAQQA2AhwgACAQNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzuAQsgACgCBCEXIABBADYCBCAQIBGnaiIWIQEgACAXIBAgFiAUGyIQELWAgIAAIhRFDXMgAEEFNgIcIAAgEDYCFCAAIBQ2AgxBACEQDO0BCyAAQQA2AhwgACAQNgIUIABBqpyAgAA2AhAgAEEPNgIMQQAhEAzsAQsgACAQIAIQtICAgAAiAQ0BIBAhAQtBDiEQDNEBCwJAIAFBFUcNACAAQQI2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAzqAQsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAM6QELIAFBAWohEAJAIAAvATAiAUGAAXFFDQACQCAAIBAgAhC7gICAACIBDQAgECEBDHALIAFBFUcNugEgAEEFNgIcIAAgEDYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAM6QELAkAgAUGgBHFBoARHDQAgAC0ALUECcQ0AIABBADYCHCAAIBA2AhQgAEGWk4CAADYCECAAQQQ2AgxBACEQDOkBCyAAIBAgAhC9gICAABogECEBAkACQAJAAkACQCAAIBAgAhCzgICAAA4WAgEABAQEBAQEBAQEBAQEBAQEBAQEAwQLIABBAToALgsgACAALwEwQcAAcjsBMCAQIQELQSYhEAzRAQsgAEEjNgIcIAAgEDYCFCAAQaWWgIAANgIQIABBFTYCDEEAIRAM6QELIABBADYCHCAAIBA2AhQgAEHVi4CAADYCECAAQRE2AgxBACEQDOgBCyAALQAtQQFxRQ0BQcMBIRAMzgELAkAgDSACRg0AA0ACQCANLQAAQSBGDQAgDSEBDMQBCyANQQFqIg0gAkcNAAtBJSEQDOcBC0ElIRAM5gELIAAoAgQhBCAAQQA2AgQgACAEIA0Qr4CAgAAiBEUNrQEgAEEmNgIcIAAgBDYCDCAAIA1BAWo2AhRBACEQDOUBCyAQQRVGDasBIABBADYCHCAAIAE2AhQgAEH9jYCAADYCECAAQR02AgxBACEQDOQBCyAAQSc2AhwgACABNgIUIAAgEDYCDEEAIRAM4wELIBAhAUEBIRQCQAJAAkACQAJAAkACQCAALQAsQX5qDgcGBQUDAQIABQsgACAALwEwQQhyOwEwDAMLQQIhFAwBC0EEIRQLIABBAToALCAAIAAvATAgFHI7ATALIBAhAQtBKyEQDMoBCyAAQQA2AhwgACAQNgIUIABBq5KAgAA2AhAgAEELNgIMQQAhEAziAQsgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDEEAIRAM4QELIABBADoALCAQIQEMvQELIBAhAUEBIRQCQAJAAkACQAJAIAAtACxBe2oOBAMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0EpIRAMxQELIABBADYCHCAAIAE2AhQgAEHwlICAADYCECAAQQM2AgxBACEQDN0BCwJAIA4tAABBDUcNACAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA5BAWohAQx1CyAAQSw2AhwgACABNgIMIAAgDkEBajYCFEEAIRAM3QELIAAtAC1BAXFFDQFBxAEhEAzDAQsCQCAOIAJHDQBBLSEQDNwBCwJAAkADQAJAIA4tAABBdmoOBAIAAAMACyAOQQFqIg4gAkcNAAtBLSEQDN0BCyAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA4hAQx0CyAAQSw2AhwgACAONgIUIAAgATYCDEEAIRAM3AELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHMLIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzbAQsgACgCBCEEIABBADYCBCAAIAQgDhCxgICAACIEDaABIA4hAQzOAQsgEEEsRw0BIAFBAWohEEEBIQECQAJAAkACQAJAIAAtACxBe2oOBAMBAgQACyAQIQEMBAtBAiEBDAELQQQhAQsgAEEBOgAsIAAgAC8BMCABcjsBMCAQIQEMAQsgACAALwEwQQhyOwEwIBAhAQtBOSEQDL8BCyAAQQA6ACwgASEBC0E0IRAMvQELIAAgAC8BMEEgcjsBMCABIQEMAgsgACgCBCEEIABBADYCBAJAIAAgBCABELGAgIAAIgQNACABIQEMxwELIABBNzYCHCAAIAE2AhQgACAENgIMQQAhEAzUAQsgAEEIOgAsIAEhAQtBMCEQDLkBCwJAIAAtAChBAUYNACABIQEMBAsgAC0ALUEIcUUNkwEgASEBDAMLIAAtADBBIHENlAFBxQEhEAy3AQsCQCAPIAJGDQACQANAAkAgDy0AAEFQaiIBQf8BcUEKSQ0AIA8hAUE1IRAMugELIAApAyAiEUKZs+bMmbPmzBlWDQEgACARQgp+IhE3AyAgESABrUL/AYMiEkJ/hVYNASAAIBEgEnw3AyAgD0EBaiIPIAJHDQALQTkhEAzRAQsgACgCBCECIABBADYCBCAAIAIgD0EBaiIEELGAgIAAIgINlQEgBCEBDMMBC0E5IRAMzwELAkAgAC8BMCIBQQhxRQ0AIAAtAChBAUcNACAALQAtQQhxRQ2QAQsgACABQff7A3FBgARyOwEwIA8hAQtBNyEQDLQBCyAAIAAvATBBEHI7ATAMqwELIBBBFUYNiwEgAEEANgIcIAAgATYCFCAAQfCOgIAANgIQIABBHDYCDEEAIRAMywELIABBwwA2AhwgACABNgIMIAAgDUEBajYCFEEAIRAMygELAkAgAS0AAEE6Rw0AIAAoAgQhECAAQQA2AgQCQCAAIBAgARCvgICAACIQDQAgAUEBaiEBDGMLIABBwwA2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMygELIABBADYCHCAAIAE2AhQgAEGxkYCAADYCECAAQQo2AgxBACEQDMkBCyAAQQA2AhwgACABNgIUIABBoJmAgAA2AhAgAEEeNgIMQQAhEAzIAQsgAEEANgIACyAAQYASOwEqIAAgF0EBaiIBIAIQqICAgAAiEA0BIAEhAQtBxwAhEAysAQsgEEEVRw2DASAAQdEANgIcIAAgATYCFCAAQeOXgIAANgIQIABBFTYCDEEAIRAMxAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDF4LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMwwELIABBADYCHCAAIBQ2AhQgAEHBqICAADYCECAAQQc2AgwgAEEANgIAQQAhEAzCAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAzBAQtBACEQIABBADYCHCAAIAE2AhQgAEGAkYCAADYCECAAQQk2AgwMwAELIBBBFUYNfSAAQQA2AhwgACABNgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAy/AQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgAUEBaiEBAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBAJAIAAgECABEK2AgIAAIhANACABIQEMXAsgAEHYADYCHCAAIAE2AhQgACAQNgIMQQAhEAy+AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMrQELIABB2QA2AhwgACABNgIUIAAgBDYCDEEAIRAMvQELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKsBCyAAQdoANgIcIAAgATYCFCAAIAQ2AgxBACEQDLwBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQypAQsgAEHcADYCHCAAIAE2AhQgACAENgIMQQAhEAy7AQsCQCABLQAAQVBqIhBB/wFxQQpPDQAgACAQOgAqIAFBAWohAUHPACEQDKIBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQynAQsgAEHeADYCHCAAIAE2AhQgACAENgIMQQAhEAy6AQsgAEEANgIAIBdBAWohAQJAIAAtAClBI08NACABIQEMWQsgAEEANgIcIAAgATYCFCAAQdOJgIAANgIQIABBCDYCDEEAIRAMuQELIABBADYCAAtBACEQIABBADYCHCAAIAE2AhQgAEGQs4CAADYCECAAQQg2AgwMtwELIABBADYCACAXQQFqIQECQCAALQApQSFHDQAgASEBDFYLIABBADYCHCAAIAE2AhQgAEGbioCAADYCECAAQQg2AgxBACEQDLYBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKSIQQV1qQQtPDQAgASEBDFULAkAgEEEGSw0AQQEgEHRBygBxRQ0AIAEhAQxVC0EAIRAgAEEANgIcIAAgATYCFCAAQfeJgIAANgIQIABBCDYCDAy1AQsgEEEVRg1xIABBADYCHCAAIAE2AhQgAEG5jYCAADYCECAAQRo2AgxBACEQDLQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxUCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLMBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDLIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDLEBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxRCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLABCyAAQQA2AhwgACABNgIUIABBxoqAgAA2AhAgAEEHNgIMQQAhEAyvAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAyuAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAytAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMTQsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAysAQsgAEEANgIcIAAgATYCFCAAQdyIgIAANgIQIABBBzYCDEEAIRAMqwELIBBBP0cNASABQQFqIQELQQUhEAyQAQtBACEQIABBADYCHCAAIAE2AhQgAEH9koCAADYCECAAQQc2AgwMqAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMpwELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMpgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEYLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMpQELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0gA2AhwgACAUNgIUIAAgATYCDEEAIRAMpAELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0wA2AhwgACAUNgIUIAAgATYCDEEAIRAMowELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDEMLIABB5QA2AhwgACAUNgIUIAAgATYCDEEAIRAMogELIABBADYCHCAAIBQ2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKEBCyAAQQA2AhwgACABNgIUIABBw4+AgAA2AhAgAEEHNgIMQQAhEAygAQtBACEQIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgwMnwELIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgxBACEQDJ4BCyAAQQA2AhwgACAUNgIUIABB/pGAgAA2AhAgAEEHNgIMQQAhEAydAQsgAEEANgIcIAAgATYCFCAAQY6bgIAANgIQIABBBjYCDEEAIRAMnAELIBBBFUYNVyAAQQA2AhwgACABNgIUIABBzI6AgAA2AhAgAEEgNgIMQQAhEAybAQsgAEEANgIAIBBBAWohAUEkIRALIAAgEDoAKSAAKAIEIRAgAEEANgIEIAAgECABEKuAgIAAIhANVCABIQEMPgsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQfGbgIAANgIQIABBBjYCDAyXAQsgAUEVRg1QIABBADYCHCAAIAU2AhQgAEHwjICAADYCECAAQRs2AgxBACEQDJYBCyAAKAIEIQUgAEEANgIEIAAgBSAQEKmAgIAAIgUNASAQQQFqIQULQa0BIRAMewsgAEHBATYCHCAAIAU2AgwgACAQQQFqNgIUQQAhEAyTAQsgACgCBCEGIABBADYCBCAAIAYgEBCpgICAACIGDQEgEEEBaiEGC0GuASEQDHgLIABBwgE2AhwgACAGNgIMIAAgEEEBajYCFEEAIRAMkAELIABBADYCHCAAIAc2AhQgAEGXi4CAADYCECAAQQ02AgxBACEQDI8BCyAAQQA2AhwgACAINgIUIABB45CAgAA2AhAgAEEJNgIMQQAhEAyOAQsgAEEANgIcIAAgCDYCFCAAQZSNgIAANgIQIABBITYCDEEAIRAMjQELQQEhFkEAIRdBACEUQQEhEAsgACAQOgArIAlBAWohCAJAAkAgAC0ALUEQcQ0AAkACQAJAIAAtACoOAwEAAgQLIBZFDQMMAgsgFA0BDAILIBdFDQELIAAoAgQhECAAQQA2AgQgACAQIAgQrYCAgAAiEEUNPSAAQckBNgIcIAAgCDYCFCAAIBA2AgxBACEQDIwBCyAAKAIEIQQgAEEANgIEIAAgBCAIEK2AgIAAIgRFDXYgAEHKATYCHCAAIAg2AhQgACAENgIMQQAhEAyLAQsgACgCBCEEIABBADYCBCAAIAQgCRCtgICAACIERQ10IABBywE2AhwgACAJNgIUIAAgBDYCDEEAIRAMigELIAAoAgQhBCAAQQA2AgQgACAEIAoQrYCAgAAiBEUNciAAQc0BNgIcIAAgCjYCFCAAIAQ2AgxBACEQDIkBCwJAIAstAABBUGoiEEH/AXFBCk8NACAAIBA6ACogC0EBaiEKQbYBIRAMcAsgACgCBCEEIABBADYCBCAAIAQgCxCtgICAACIERQ1wIABBzwE2AhwgACALNgIUIAAgBDYCDEEAIRAMiAELIABBADYCHCAAIAQ2AhQgAEGQs4CAADYCECAAQQg2AgwgAEEANgIAQQAhEAyHAQsgAUEVRg0/IABBADYCHCAAIAw2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDIYBCyAAQYEEOwEoIAAoAgQhECAAQgA3AwAgACAQIAxBAWoiDBCrgICAACIQRQ04IABB0wE2AhwgACAMNgIUIAAgEDYCDEEAIRAMhQELIABBADYCAAtBACEQIABBADYCHCAAIAQ2AhQgAEHYm4CAADYCECAAQQg2AgwMgwELIAAoAgQhECAAQgA3AwAgACAQIAtBAWoiCxCrgICAACIQDQFBxgEhEAxpCyAAQQI6ACgMVQsgAEHVATYCHCAAIAs2AhQgACAQNgIMQQAhEAyAAQsgEEEVRg03IABBADYCHCAAIAQ2AhQgAEGkjICAADYCECAAQRA2AgxBACEQDH8LIAAtADRBAUcNNCAAIAQgAhC8gICAACIQRQ00IBBBFUcNNSAAQdwBNgIcIAAgBDYCFCAAQdWWgIAANgIQIABBFTYCDEEAIRAMfgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQMfQtBACEQDGMLQQIhEAxiC0ENIRAMYQtBDyEQDGALQSUhEAxfC0ETIRAMXgtBFSEQDF0LQRYhEAxcC0EXIRAMWwtBGCEQDFoLQRkhEAxZC0EaIRAMWAtBGyEQDFcLQRwhEAxWC0EdIRAMVQtBHyEQDFQLQSEhEAxTC0EjIRAMUgtBxgAhEAxRC0EuIRAMUAtBLyEQDE8LQTshEAxOC0E9IRAMTQtByAAhEAxMC0HJACEQDEsLQcsAIRAMSgtBzAAhEAxJC0HOACEQDEgLQdEAIRAMRwtB1QAhEAxGC0HYACEQDEULQdkAIRAMRAtB2wAhEAxDC0HkACEQDEILQeUAIRAMQQtB8QAhEAxAC0H0ACEQDD8LQY0BIRAMPgtBlwEhEAw9C0GpASEQDDwLQawBIRAMOwtBwAEhEAw6C0G5ASEQDDkLQa8BIRAMOAtBsQEhEAw3C0GyASEQDDYLQbQBIRAMNQtBtQEhEAw0C0G6ASEQDDMLQb0BIRAMMgtBvwEhEAwxC0HBASEQDDALIABBADYCHCAAIAQ2AhQgAEHpi4CAADYCECAAQR82AgxBACEQDEgLIABB2wE2AhwgACAENgIUIABB+paAgAA2AhAgAEEVNgIMQQAhEAxHCyAAQfgANgIcIAAgDDYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMRgsgAEHRADYCHCAAIAU2AhQgAEGwl4CAADYCECAAQRU2AgxBACEQDEULIABB+QA2AhwgACABNgIUIAAgEDYCDEEAIRAMRAsgAEH4ADYCHCAAIAE2AhQgAEHKmICAADYCECAAQRU2AgxBACEQDEMLIABB5AA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAxCCyAAQdcANgIcIAAgATYCFCAAQcmXgIAANgIQIABBFTYCDEEAIRAMQQsgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMQAsgAEHCADYCHCAAIAE2AhQgAEHjmICAADYCECAAQRU2AgxBACEQDD8LIABBADYCBCAAIA8gDxCxgICAACIERQ0BIABBOjYCHCAAIAQ2AgwgACAPQQFqNgIUQQAhEAw+CyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBEUNACAAQTs2AhwgACAENgIMIAAgAUEBajYCFEEAIRAMPgsgAUEBaiEBDC0LIA9BAWohAQwtCyAAQQA2AhwgACAPNgIUIABB5JKAgAA2AhAgAEEENgIMQQAhEAw7CyAAQTY2AhwgACAENgIUIAAgAjYCDEEAIRAMOgsgAEEuNgIcIAAgDjYCFCAAIAQ2AgxBACEQDDkLIABB0AA2AhwgACABNgIUIABBkZiAgAA2AhAgAEEVNgIMQQAhEAw4CyANQQFqIQEMLAsgAEEVNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMNgsgAEEbNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNQsgAEEPNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNAsgAEELNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMMwsgAEEaNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMgsgAEELNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMQsgAEEKNgIcIAAgATYCFCAAQeSWgIAANgIQIABBFTYCDEEAIRAMMAsgAEEeNgIcIAAgATYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAMLwsgAEEANgIcIAAgEDYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMLgsgAEEENgIcIAAgATYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMLQsgAEEANgIAIAtBAWohCwtBuAEhEAwSCyAAQQA2AgAgEEEBaiEBQfUAIRAMEQsgASEBAkAgAC0AKUEFRw0AQeMAIRAMEQtB4gAhEAwQC0EAIRAgAEEANgIcIABB5JGAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAwoCyAAQQA2AgAgF0EBaiEBQcAAIRAMDgtBASEBCyAAIAE6ACwgAEEANgIAIBdBAWohAQtBKCEQDAsLIAEhAQtBOCEQDAkLAkAgASIPIAJGDQADQAJAIA8tAABBgL6AgABqLQAAIgFBAUYNACABQQJHDQMgD0EBaiEBDAQLIA9BAWoiDyACRw0AC0E+IRAMIgtBPiEQDCELIABBADoALCAPIQEMAQtBCyEQDAYLQTohEAwFCyABQQFqIQFBLSEQDAQLIAAgAToALCAAQQA2AgAgFkEBaiEBQQwhEAwDCyAAQQA2AgAgF0EBaiEBQQohEAwCCyAAQQA2AgALIABBADoALCANIQFBCSEQDAALC0EAIRAgAEEANgIcIAAgCzYCFCAAQc2QgIAANgIQIABBCTYCDAwXC0EAIRAgAEEANgIcIAAgCjYCFCAAQemKgIAANgIQIABBCTYCDAwWC0EAIRAgAEEANgIcIAAgCTYCFCAAQbeQgIAANgIQIABBCTYCDAwVC0EAIRAgAEEANgIcIAAgCDYCFCAAQZyRgIAANgIQIABBCTYCDAwUC0EAIRAgAEEANgIcIAAgATYCFCAAQc2QgIAANgIQIABBCTYCDAwTC0EAIRAgAEEANgIcIAAgATYCFCAAQemKgIAANgIQIABBCTYCDAwSC0EAIRAgAEEANgIcIAAgATYCFCAAQbeQgIAANgIQIABBCTYCDAwRC0EAIRAgAEEANgIcIAAgATYCFCAAQZyRgIAANgIQIABBCTYCDAwQC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwPC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwOC0EAIRAgAEEANgIcIAAgATYCFCAAQcCSgIAANgIQIABBCzYCDAwNC0EAIRAgAEEANgIcIAAgATYCFCAAQZWJgIAANgIQIABBCzYCDAwMC0EAIRAgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDAwLC0EAIRAgAEEANgIcIAAgATYCFCAAQfuPgIAANgIQIABBCjYCDAwKC0EAIRAgAEEANgIcIAAgATYCFCAAQfGZgIAANgIQIABBAjYCDAwJC0EAIRAgAEEANgIcIAAgATYCFCAAQcSUgIAANgIQIABBAjYCDAwIC0EAIRAgAEEANgIcIAAgATYCFCAAQfKVgIAANgIQIABBAjYCDAwHCyAAQQI2AhwgACABNgIUIABBnJqAgAA2AhAgAEEWNgIMQQAhEAwGC0EBIRAMBQtB1AAhECABIgQgAkYNBCADQQhqIAAgBCACQdjCgIAAQQoQxYCAgAAgAygCDCEEIAMoAggOAwEEAgALEMqAgIAAAAsgAEEANgIcIABBtZqAgAA2AhAgAEEXNgIMIAAgBEEBajYCFEEAIRAMAgsgAEEANgIcIAAgBDYCFCAAQcqagIAANgIQIABBCTYCDEEAIRAMAQsCQCABIgQgAkcNAEEiIRAMAQsgAEGJgICAADYCCCAAIAQ2AgRBISEQCyADQRBqJICAgIAAIBALrwEBAn8gASgCACEGAkACQCACIANGDQAgBCAGaiEEIAYgA2ogAmshByACIAZBf3MgBWoiBmohBQNAAkAgAi0AACAELQAARg0AQQIhBAwDCwJAIAYNAEEAIQQgBSECDAMLIAZBf2ohBiAEQQFqIQQgAkEBaiICIANHDQALIAchBiADIQILIABBATYCACABIAY2AgAgACACNgIEDwsgAUEANgIAIAAgBDYCACAAIAI2AgQLCgAgABDHgICAAAvyNgELfyOAgICAAEEQayIBJICAgIAAAkBBACgCoNCAgAANAEEAEMuAgIAAQYDUhIAAayICQdkASQ0AQQAhAwJAQQAoAuDTgIAAIgQNAEEAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEIakFwcUHYqtWqBXMiBDYC4NOAgABBAEEANgL004CAAEEAQQA2AsTTgIAAC0EAIAI2AszTgIAAQQBBgNSEgAA2AsjTgIAAQQBBgNSEgAA2ApjQgIAAQQAgBDYCrNCAgABBAEF/NgKo0ICAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALQYDUhIAAQXhBgNSEgABrQQ9xQQBBgNSEgABBCGpBD3EbIgNqIgRBBGogAkFIaiIFIANrIgNBAXI2AgBBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAQYDUhIAAIAVqQTg2AgQLAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABB7AFLDQACQEEAKAKI0ICAACIGQRAgAEETakFwcSAAQQtJGyICQQN2IgR2IgNBA3FFDQACQAJAIANBAXEgBHJBAXMiBUEDdCIEQbDQgIAAaiIDIARBuNCAgABqKAIAIgQoAggiAkcNAEEAIAZBfiAFd3E2AojQgIAADAELIAMgAjYCCCACIAM2AgwLIARBCGohAyAEIAVBA3QiBUEDcjYCBCAEIAVqIgQgBCgCBEEBcjYCBAwMCyACQQAoApDQgIAAIgdNDQECQCADRQ0AAkACQCADIAR0QQIgBHQiA0EAIANrcnEiA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqIgRBA3QiA0Gw0ICAAGoiBSADQbjQgIAAaigCACIDKAIIIgBHDQBBACAGQX4gBHdxIgY2AojQgIAADAELIAUgADYCCCAAIAU2AgwLIAMgAkEDcjYCBCADIARBA3QiBGogBCACayIFNgIAIAMgAmoiACAFQQFyNgIEAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQQCQAJAIAZBASAHQQN2dCIIcQ0AQQAgBiAIcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCAENgIMIAIgBDYCCCAEIAI2AgwgBCAINgIICyADQQhqIQNBACAANgKc0ICAAEEAIAU2ApDQgIAADAwLQQAoAozQgIAAIglFDQEgCUEAIAlrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqQQJ0QbjSgIAAaigCACIAKAIEQXhxIAJrIQQgACEFAkADQAJAIAUoAhAiAw0AIAVBFGooAgAiA0UNAgsgAygCBEF4cSACayIFIAQgBSAESSIFGyEEIAMgACAFGyEAIAMhBQwACwsgACgCGCEKAkAgACgCDCIIIABGDQAgACgCCCIDQQAoApjQgIAASRogCCADNgIIIAMgCDYCDAwLCwJAIABBFGoiBSgCACIDDQAgACgCECIDRQ0DIABBEGohBQsDQCAFIQsgAyIIQRRqIgUoAgAiAw0AIAhBEGohBSAIKAIQIgMNAAsgC0EANgIADAoLQX8hAiAAQb9/Sw0AIABBE2oiA0FwcSECQQAoAozQgIAAIgdFDQBBACELAkAgAkGAAkkNAEEfIQsgAkH///8HSw0AIANBCHYiAyADQYD+P2pBEHZBCHEiA3QiBCAEQYDgH2pBEHZBBHEiBHQiBSAFQYCAD2pBEHZBAnEiBXRBD3YgAyAEciAFcmsiA0EBdCACIANBFWp2QQFxckEcaiELC0EAIAJrIQQCQAJAAkACQCALQQJ0QbjSgIAAaigCACIFDQBBACEDQQAhCAwBC0EAIQMgAkEAQRkgC0EBdmsgC0EfRht0IQBBACEIA0ACQCAFKAIEQXhxIAJrIgYgBE8NACAGIQQgBSEIIAYNAEEAIQQgBSEIIAUhAwwDCyADIAVBFGooAgAiBiAGIAUgAEEddkEEcWpBEGooAgAiBUYbIAMgBhshAyAAQQF0IQAgBQ0ACwsCQCADIAhyDQBBACEIQQIgC3QiA0EAIANrciAHcSIDRQ0DIANBACADa3FBf2oiAyADQQx2QRBxIgN2IgVBBXZBCHEiACADciAFIAB2IgNBAnZBBHEiBXIgAyAFdiIDQQF2QQJxIgVyIAMgBXYiA0EBdkEBcSIFciADIAV2akECdEG40oCAAGooAgAhAwsgA0UNAQsDQCADKAIEQXhxIAJrIgYgBEkhAAJAIAMoAhAiBQ0AIANBFGooAgAhBQsgBiAEIAAbIQQgAyAIIAAbIQggBSEDIAUNAAsLIAhFDQAgBEEAKAKQ0ICAACACa08NACAIKAIYIQsCQCAIKAIMIgAgCEYNACAIKAIIIgNBACgCmNCAgABJGiAAIAM2AgggAyAANgIMDAkLAkAgCEEUaiIFKAIAIgMNACAIKAIQIgNFDQMgCEEQaiEFCwNAIAUhBiADIgBBFGoiBSgCACIDDQAgAEEQaiEFIAAoAhAiAw0ACyAGQQA2AgAMCAsCQEEAKAKQ0ICAACIDIAJJDQBBACgCnNCAgAAhBAJAAkAgAyACayIFQRBJDQAgBCACaiIAIAVBAXI2AgRBACAFNgKQ0ICAAEEAIAA2ApzQgIAAIAQgA2ogBTYCACAEIAJBA3I2AgQMAQsgBCADQQNyNgIEIAQgA2oiAyADKAIEQQFyNgIEQQBBADYCnNCAgABBAEEANgKQ0ICAAAsgBEEIaiEDDAoLAkBBACgClNCAgAAiACACTQ0AQQAoAqDQgIAAIgMgAmoiBCAAIAJrIgVBAXI2AgRBACAFNgKU0ICAAEEAIAQ2AqDQgIAAIAMgAkEDcjYCBCADQQhqIQMMCgsCQAJAQQAoAuDTgIAARQ0AQQAoAujTgIAAIQQMAQtBAEJ/NwLs04CAAEEAQoCAhICAgMAANwLk04CAAEEAIAFBDGpBcHFB2KrVqgVzNgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgABBgIAEIQQLQQAhAwJAIAQgAkHHAGoiB2oiBkEAIARrIgtxIgggAksNAEEAQTA2AvjTgIAADAoLAkBBACgCwNOAgAAiA0UNAAJAQQAoArjTgIAAIgQgCGoiBSAETQ0AIAUgA00NAQtBACEDQQBBMDYC+NOAgAAMCgtBAC0AxNOAgABBBHENBAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQAJAIAMoAgAiBSAESw0AIAUgAygCBGogBEsNAwsgAygCCCIDDQALC0EAEMuAgIAAIgBBf0YNBSAIIQYCQEEAKALk04CAACIDQX9qIgQgAHFFDQAgCCAAayAEIABqQQAgA2txaiEGCyAGIAJNDQUgBkH+////B0sNBQJAQQAoAsDTgIAAIgNFDQBBACgCuNOAgAAiBCAGaiIFIARNDQYgBSADSw0GCyAGEMuAgIAAIgMgAEcNAQwHCyAGIABrIAtxIgZB/v///wdLDQQgBhDLgICAACIAIAMoAgAgAygCBGpGDQMgACEDCwJAIANBf0YNACACQcgAaiAGTQ0AAkAgByAGa0EAKALo04CAACIEakEAIARrcSIEQf7///8HTQ0AIAMhAAwHCwJAIAQQy4CAgABBf0YNACAEIAZqIQYgAyEADAcLQQAgBmsQy4CAgAAaDAQLIAMhACADQX9HDQUMAwtBACEIDAcLQQAhAAwFCyAAQX9HDQILQQBBACgCxNOAgABBBHI2AsTTgIAACyAIQf7///8HSw0BIAgQy4CAgAAhAEEAEMuAgIAAIQMgAEF/Rg0BIANBf0YNASAAIANPDQEgAyAAayIGIAJBOGpNDQELQQBBACgCuNOAgAAgBmoiAzYCuNOAgAACQCADQQAoArzTgIAATQ0AQQAgAzYCvNOAgAALAkACQAJAAkBBACgCoNCAgAAiBEUNAEHI04CAACEDA0AgACADKAIAIgUgAygCBCIIakYNAiADKAIIIgMNAAwDCwsCQAJAQQAoApjQgIAAIgNFDQAgACADTw0BC0EAIAA2ApjQgIAAC0EAIQNBACAGNgLM04CAAEEAIAA2AsjTgIAAQQBBfzYCqNCAgABBAEEAKALg04CAADYCrNCAgABBAEEANgLU04CAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgQgBkFIaiIFIANrIgNBAXI2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAIAAgBWpBODYCBAwCCyADLQAMQQhxDQAgBCAFSQ0AIAQgAE8NACAEQXggBGtBD3FBACAEQQhqQQ9xGyIFaiIAQQAoApTQgIAAIAZqIgsgBWsiBUEBcjYCBCADIAggBmo2AgRBAEEAKALw04CAADYCpNCAgABBACAFNgKU0ICAAEEAIAA2AqDQgIAAIAQgC2pBODYCBAwBCwJAIABBACgCmNCAgAAiCE8NAEEAIAA2ApjQgIAAIAAhCAsgACAGaiEFQcjTgIAAIQMCQAJAAkACQAJAAkACQANAIAMoAgAgBUYNASADKAIIIgMNAAwCCwsgAy0ADEEIcUUNAQtByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiIFIARLDQMLIAMoAgghAwwACwsgAyAANgIAIAMgAygCBCAGajYCBCAAQXggAGtBD3FBACAAQQhqQQ9xG2oiCyACQQNyNgIEIAVBeCAFa0EPcUEAIAVBCGpBD3EbaiIGIAsgAmoiAmshAwJAIAYgBEcNAEEAIAI2AqDQgIAAQQBBACgClNCAgAAgA2oiAzYClNCAgAAgAiADQQFyNgIEDAMLAkAgBkEAKAKc0ICAAEcNAEEAIAI2ApzQgIAAQQBBACgCkNCAgAAgA2oiAzYCkNCAgAAgAiADQQFyNgIEIAIgA2ogAzYCAAwDCwJAIAYoAgQiBEEDcUEBRw0AIARBeHEhBwJAAkAgBEH/AUsNACAGKAIIIgUgBEEDdiIIQQN0QbDQgIAAaiIARhoCQCAGKAIMIgQgBUcNAEEAQQAoAojQgIAAQX4gCHdxNgKI0ICAAAwCCyAEIABGGiAEIAU2AgggBSAENgIMDAELIAYoAhghCQJAAkAgBigCDCIAIAZGDQAgBigCCCIEIAhJGiAAIAQ2AgggBCAANgIMDAELAkAgBkEUaiIEKAIAIgUNACAGQRBqIgQoAgAiBQ0AQQAhAAwBCwNAIAQhCCAFIgBBFGoiBCgCACIFDQAgAEEQaiEEIAAoAhAiBQ0ACyAIQQA2AgALIAlFDQACQAJAIAYgBigCHCIFQQJ0QbjSgIAAaiIEKAIARw0AIAQgADYCACAADQFBAEEAKAKM0ICAAEF+IAV3cTYCjNCAgAAMAgsgCUEQQRQgCSgCECAGRhtqIAA2AgAgAEUNAQsgACAJNgIYAkAgBigCECIERQ0AIAAgBDYCECAEIAA2AhgLIAYoAhQiBEUNACAAQRRqIAQ2AgAgBCAANgIYCyAHIANqIQMgBiAHaiIGKAIEIQQLIAYgBEF+cTYCBCACIANqIAM2AgAgAiADQQFyNgIEAkAgA0H/AUsNACADQXhxQbDQgIAAaiEEAkACQEEAKAKI0ICAACIFQQEgA0EDdnQiA3ENAEEAIAUgA3I2AojQgIAAIAQhAwwBCyAEKAIIIQMLIAMgAjYCDCAEIAI2AgggAiAENgIMIAIgAzYCCAwDC0EfIQQCQCADQf///wdLDQAgA0EIdiIEIARBgP4/akEQdkEIcSIEdCIFIAVBgOAfakEQdkEEcSIFdCIAIABBgIAPakEQdkECcSIAdEEPdiAEIAVyIAByayIEQQF0IAMgBEEVanZBAXFyQRxqIQQLIAIgBDYCHCACQgA3AhAgBEECdEG40oCAAGohBQJAQQAoAozQgIAAIgBBASAEdCIIcQ0AIAUgAjYCAEEAIAAgCHI2AozQgIAAIAIgBTYCGCACIAI2AgggAiACNgIMDAMLIANBAEEZIARBAXZrIARBH0YbdCEEIAUoAgAhAANAIAAiBSgCBEF4cSADRg0CIARBHXYhACAEQQF0IQQgBSAAQQRxakEQaiIIKAIAIgANAAsgCCACNgIAIAIgBTYCGCACIAI2AgwgAiACNgIIDAILIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgsgBkFIaiIIIANrIgNBAXI2AgQgACAIakE4NgIEIAQgBUE3IAVrQQ9xQQAgBUFJakEPcRtqQUFqIgggCCAEQRBqSRsiCEEjNgIEQQBBACgC8NOAgAA2AqTQgIAAQQAgAzYClNCAgABBACALNgKg0ICAACAIQRBqQQApAtDTgIAANwIAIAhBACkCyNOAgAA3AghBACAIQQhqNgLQ04CAAEEAIAY2AszTgIAAQQAgADYCyNOAgABBAEEANgLU04CAACAIQSRqIQMDQCADQQc2AgAgA0EEaiIDIAVJDQALIAggBEYNAyAIIAgoAgRBfnE2AgQgCCAIIARrIgA2AgAgBCAAQQFyNgIEAkAgAEH/AUsNACAAQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgAEEDdnQiAHENAEEAIAUgAHI2AojQgIAAIAMhBQwBCyADKAIIIQULIAUgBDYCDCADIAQ2AgggBCADNgIMIAQgBTYCCAwEC0EfIQMCQCAAQf///wdLDQAgAEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCIIIAhBgIAPakEQdkECcSIIdEEPdiADIAVyIAhyayIDQQF0IAAgA0EVanZBAXFyQRxqIQMLIAQgAzYCHCAEQgA3AhAgA0ECdEG40oCAAGohBQJAQQAoAozQgIAAIghBASADdCIGcQ0AIAUgBDYCAEEAIAggBnI2AozQgIAAIAQgBTYCGCAEIAQ2AgggBCAENgIMDAQLIABBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhCANAIAgiBSgCBEF4cSAARg0DIANBHXYhCCADQQF0IQMgBSAIQQRxakEQaiIGKAIAIggNAAsgBiAENgIAIAQgBTYCGCAEIAQ2AgwgBCAENgIIDAMLIAUoAggiAyACNgIMIAUgAjYCCCACQQA2AhggAiAFNgIMIAIgAzYCCAsgC0EIaiEDDAULIAUoAggiAyAENgIMIAUgBDYCCCAEQQA2AhggBCAFNgIMIAQgAzYCCAtBACgClNCAgAAiAyACTQ0AQQAoAqDQgIAAIgQgAmoiBSADIAJrIgNBAXI2AgRBACADNgKU0ICAAEEAIAU2AqDQgIAAIAQgAkEDcjYCBCAEQQhqIQMMAwtBACEDQQBBMDYC+NOAgAAMAgsCQCALRQ0AAkACQCAIIAgoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAA2AgAgAA0BQQAgB0F+IAV3cSIHNgKM0ICAAAwCCyALQRBBFCALKAIQIAhGG2ogADYCACAARQ0BCyAAIAs2AhgCQCAIKAIQIgNFDQAgACADNgIQIAMgADYCGAsgCEEUaigCACIDRQ0AIABBFGogAzYCACADIAA2AhgLAkACQCAEQQ9LDQAgCCAEIAJqIgNBA3I2AgQgCCADaiIDIAMoAgRBAXI2AgQMAQsgCCACaiIAIARBAXI2AgQgCCACQQNyNgIEIAAgBGogBDYCAAJAIARB/wFLDQAgBEF4cUGw0ICAAGohAwJAAkBBACgCiNCAgAAiBUEBIARBA3Z0IgRxDQBBACAFIARyNgKI0ICAACADIQQMAQsgAygCCCEECyAEIAA2AgwgAyAANgIIIAAgAzYCDCAAIAQ2AggMAQtBHyEDAkAgBEH///8HSw0AIARBCHYiAyADQYD+P2pBEHZBCHEiA3QiBSAFQYDgH2pBEHZBBHEiBXQiAiACQYCAD2pBEHZBAnEiAnRBD3YgAyAFciACcmsiA0EBdCAEIANBFWp2QQFxckEcaiEDCyAAIAM2AhwgAEIANwIQIANBAnRBuNKAgABqIQUCQCAHQQEgA3QiAnENACAFIAA2AgBBACAHIAJyNgKM0ICAACAAIAU2AhggACAANgIIIAAgADYCDAwBCyAEQQBBGSADQQF2ayADQR9GG3QhAyAFKAIAIQICQANAIAIiBSgCBEF4cSAERg0BIANBHXYhAiADQQF0IQMgBSACQQRxakEQaiIGKAIAIgINAAsgBiAANgIAIAAgBTYCGCAAIAA2AgwgACAANgIIDAELIAUoAggiAyAANgIMIAUgADYCCCAAQQA2AhggACAFNgIMIAAgAzYCCAsgCEEIaiEDDAELAkAgCkUNAAJAAkAgACAAKAIcIgVBAnRBuNKAgABqIgMoAgBHDQAgAyAINgIAIAgNAUEAIAlBfiAFd3E2AozQgIAADAILIApBEEEUIAooAhAgAEYbaiAINgIAIAhFDQELIAggCjYCGAJAIAAoAhAiA0UNACAIIAM2AhAgAyAINgIYCyAAQRRqKAIAIgNFDQAgCEEUaiADNgIAIAMgCDYCGAsCQAJAIARBD0sNACAAIAQgAmoiA0EDcjYCBCAAIANqIgMgAygCBEEBcjYCBAwBCyAAIAJqIgUgBEEBcjYCBCAAIAJBA3I2AgQgBSAEaiAENgIAAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQMCQAJAQQEgB0EDdnQiCCAGcQ0AQQAgCCAGcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCADNgIMIAIgAzYCCCADIAI2AgwgAyAINgIIC0EAIAU2ApzQgIAAQQAgBDYCkNCAgAALIABBCGohAwsgAUEQaiSAgICAACADCwoAIAAQyYCAgAAL4g0BB38CQCAARQ0AIABBeGoiASAAQXxqKAIAIgJBeHEiAGohAwJAIAJBAXENACACQQNxRQ0BIAEgASgCACICayIBQQAoApjQgIAAIgRJDQEgAiAAaiEAAkAgAUEAKAKc0ICAAEYNAAJAIAJB/wFLDQAgASgCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgASgCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAwsgAiAGRhogAiAENgIIIAQgAjYCDAwCCyABKAIYIQcCQAJAIAEoAgwiBiABRg0AIAEoAggiAiAESRogBiACNgIIIAIgBjYCDAwBCwJAIAFBFGoiAigCACIEDQAgAUEQaiICKAIAIgQNAEEAIQYMAQsDQCACIQUgBCIGQRRqIgIoAgAiBA0AIAZBEGohAiAGKAIQIgQNAAsgBUEANgIACyAHRQ0BAkACQCABIAEoAhwiBEECdEG40oCAAGoiAigCAEcNACACIAY2AgAgBg0BQQBBACgCjNCAgABBfiAEd3E2AozQgIAADAMLIAdBEEEUIAcoAhAgAUYbaiAGNgIAIAZFDQILIAYgBzYCGAJAIAEoAhAiAkUNACAGIAI2AhAgAiAGNgIYCyABKAIUIgJFDQEgBkEUaiACNgIAIAIgBjYCGAwBCyADKAIEIgJBA3FBA0cNACADIAJBfnE2AgRBACAANgKQ0ICAACABIABqIAA2AgAgASAAQQFyNgIEDwsgASADTw0AIAMoAgQiAkEBcUUNAAJAAkAgAkECcQ0AAkAgA0EAKAKg0ICAAEcNAEEAIAE2AqDQgIAAQQBBACgClNCAgAAgAGoiADYClNCAgAAgASAAQQFyNgIEIAFBACgCnNCAgABHDQNBAEEANgKQ0ICAAEEAQQA2ApzQgIAADwsCQCADQQAoApzQgIAARw0AQQAgATYCnNCAgABBAEEAKAKQ0ICAACAAaiIANgKQ0ICAACABIABBAXI2AgQgASAAaiAANgIADwsgAkF4cSAAaiEAAkACQCACQf8BSw0AIAMoAggiBCACQQN2IgVBA3RBsNCAgABqIgZGGgJAIAMoAgwiAiAERw0AQQBBACgCiNCAgABBfiAFd3E2AojQgIAADAILIAIgBkYaIAIgBDYCCCAEIAI2AgwMAQsgAygCGCEHAkACQCADKAIMIgYgA0YNACADKAIIIgJBACgCmNCAgABJGiAGIAI2AgggAiAGNgIMDAELAkAgA0EUaiICKAIAIgQNACADQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQACQAJAIAMgAygCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAgsgB0EQQRQgBygCECADRhtqIAY2AgAgBkUNAQsgBiAHNgIYAkAgAygCECICRQ0AIAYgAjYCECACIAY2AhgLIAMoAhQiAkUNACAGQRRqIAI2AgAgAiAGNgIYCyABIABqIAA2AgAgASAAQQFyNgIEIAFBACgCnNCAgABHDQFBACAANgKQ0ICAAA8LIAMgAkF+cTYCBCABIABqIAA2AgAgASAAQQFyNgIECwJAIABB/wFLDQAgAEF4cUGw0ICAAGohAgJAAkBBACgCiNCAgAAiBEEBIABBA3Z0IgBxDQBBACAEIAByNgKI0ICAACACIQAMAQsgAigCCCEACyAAIAE2AgwgAiABNgIIIAEgAjYCDCABIAA2AggPC0EfIQICQCAAQf///wdLDQAgAEEIdiICIAJBgP4/akEQdkEIcSICdCIEIARBgOAfakEQdkEEcSIEdCIGIAZBgIAPakEQdkECcSIGdEEPdiACIARyIAZyayICQQF0IAAgAkEVanZBAXFyQRxqIQILIAEgAjYCHCABQgA3AhAgAkECdEG40oCAAGohBAJAAkBBACgCjNCAgAAiBkEBIAJ0IgNxDQAgBCABNgIAQQAgBiADcjYCjNCAgAAgASAENgIYIAEgATYCCCABIAE2AgwMAQsgAEEAQRkgAkEBdmsgAkEfRht0IQIgBCgCACEGAkADQCAGIgQoAgRBeHEgAEYNASACQR12IQYgAkEBdCECIAQgBkEEcWpBEGoiAygCACIGDQALIAMgATYCACABIAQ2AhggASABNgIMIAEgATYCCAwBCyAEKAIIIgAgATYCDCAEIAE2AgggAUEANgIYIAEgBDYCDCABIAA2AggLQQBBACgCqNCAgABBf2oiAUF/IAEbNgKo0ICAAAsLBAAAAAtOAAJAIAANAD8AQRB0DwsCQCAAQf//A3ENACAAQX9MDQACQCAAQRB2QAAiAEF/Rw0AQQBBMDYC+NOAgABBfw8LIABBEHQPCxDKgICAAAAL8gICA38BfgJAIAJFDQAgACABOgAAIAIgAGoiA0F/aiABOgAAIAJBA0kNACAAIAE6AAIgACABOgABIANBfWogAToAACADQX5qIAE6AAAgAkEHSQ0AIAAgAToAAyADQXxqIAE6AAAgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIFayICQSBJDQAgAa1CgYCAgBB+IQYgAyAFaiEBA0AgASAGNwMYIAEgBjcDECABIAY3AwggASAGNwMAIAFBIGohASACQWBqIgJBH0sNAAsLIAALC45IAQBBgAgLhkgBAAAAAgAAAAMAAAAAAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAGAAAABwAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEludmFsaWQgY2hhciBpbiB1cmwgcXVlcnkAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9ib2R5AENvbnRlbnQtTGVuZ3RoIG92ZXJmbG93AENodW5rIHNpemUgb3ZlcmZsb3cAUmVzcG9uc2Ugb3ZlcmZsb3cASW52YWxpZCBtZXRob2QgZm9yIEhUVFAveC54IHJlcXVlc3QASW52YWxpZCBtZXRob2QgZm9yIFJUU1AveC54IHJlcXVlc3QARXhwZWN0ZWQgU09VUkNFIG1ldGhvZCBmb3IgSUNFL3gueCByZXF1ZXN0AEludmFsaWQgY2hhciBpbiB1cmwgZnJhZ21lbnQgc3RhcnQARXhwZWN0ZWQgZG90AFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fc3RhdHVzAEludmFsaWQgcmVzcG9uc2Ugc3RhdHVzAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMAVXNlciBjYWxsYmFjayBlcnJvcgBgb25fcmVzZXRgIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19oZWFkZXJgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2JlZ2luYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlYCBjYWxsYmFjayBlcnJvcgBgb25fc3RhdHVzX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdmVyc2lvbl9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX3VybF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWVzc2FnZV9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX21ldGhvZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lYCBjYWxsYmFjayBlcnJvcgBVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNlcnZlcgBJbnZhbGlkIGhlYWRlciB2YWx1ZSBjaGFyAEludmFsaWQgaGVhZGVyIGZpZWxkIGNoYXIAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl92ZXJzaW9uAEludmFsaWQgbWlub3IgdmVyc2lvbgBJbnZhbGlkIG1ham9yIHZlcnNpb24ARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgdmVyc2lvbgBFeHBlY3RlZCBDUkxGIGFmdGVyIHZlcnNpb24ASW52YWxpZCBIVFRQIHZlcnNpb24ASW52YWxpZCBoZWFkZXIgdG9rZW4AU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl91cmwASW52YWxpZCBjaGFyYWN0ZXJzIGluIHVybABVbmV4cGVjdGVkIHN0YXJ0IGNoYXIgaW4gdXJsAERvdWJsZSBAIGluIHVybABFbXB0eSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXJhY3RlciBpbiBDb250ZW50LUxlbmd0aABEdXBsaWNhdGUgQ29udGVudC1MZW5ndGgASW52YWxpZCBjaGFyIGluIHVybCBwYXRoAENvbnRlbnQtTGVuZ3RoIGNhbid0IGJlIHByZXNlbnQgd2l0aCBUcmFuc2Zlci1FbmNvZGluZwBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBzaXplAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX3ZhbHVlAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgdmFsdWUATWlzc2luZyBleHBlY3RlZCBMRiBhZnRlciBoZWFkZXIgdmFsdWUASW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIHF1b3RlIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGVkIHZhbHVlAFBhdXNlZCBieSBvbl9oZWFkZXJzX2NvbXBsZXRlAEludmFsaWQgRU9GIHN0YXRlAG9uX3Jlc2V0IHBhdXNlAG9uX2NodW5rX2hlYWRlciBwYXVzZQBvbl9tZXNzYWdlX2JlZ2luIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl92YWx1ZSBwYXVzZQBvbl9zdGF0dXNfY29tcGxldGUgcGF1c2UAb25fdmVyc2lvbl9jb21wbGV0ZSBwYXVzZQBvbl91cmxfY29tcGxldGUgcGF1c2UAb25fY2h1bmtfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlIHBhdXNlAG9uX21lc3NhZ2VfY29tcGxldGUgcGF1c2UAb25fbWV0aG9kX2NvbXBsZXRlIHBhdXNlAG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19leHRlbnNpb25fbmFtZSBwYXVzZQBVbmV4cGVjdGVkIHNwYWNlIGFmdGVyIHN0YXJ0IGxpbmUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fbmFtZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIG5hbWUAUGF1c2Ugb24gQ09OTkVDVC9VcGdyYWRlAFBhdXNlIG9uIFBSSS9VcGdyYWRlAEV4cGVjdGVkIEhUVFAvMiBDb25uZWN0aW9uIFByZWZhY2UAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9tZXRob2QARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgbWV0aG9kAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX2ZpZWxkAFBhdXNlZABJbnZhbGlkIHdvcmQgZW5jb3VudGVyZWQASW52YWxpZCBtZXRob2QgZW5jb3VudGVyZWQAVW5leHBlY3RlZCBjaGFyIGluIHVybCBzY2hlbWEAUmVxdWVzdCBoYXMgaW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgAFNXSVRDSF9QUk9YWQBVU0VfUFJPWFkATUtBQ1RJVklUWQBVTlBST0NFU1NBQkxFX0VOVElUWQBDT1BZAE1PVkVEX1BFUk1BTkVOVExZAFRPT19FQVJMWQBOT1RJRlkARkFJTEVEX0RFUEVOREVOQ1kAQkFEX0dBVEVXQVkAUExBWQBQVVQAQ0hFQ0tPVVQAR0FURVdBWV9USU1FT1VUAFJFUVVFU1RfVElNRU9VVABORVRXT1JLX0NPTk5FQ1RfVElNRU9VVABDT05ORUNUSU9OX1RJTUVPVVQATE9HSU5fVElNRU9VVABORVRXT1JLX1JFQURfVElNRU9VVABQT1NUAE1JU0RJUkVDVEVEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9SRVFVRVNUAENMSUVOVF9DTE9TRURfTE9BRF9CQUxBTkNFRF9SRVFVRVNUAEJBRF9SRVFVRVNUAEhUVFBfUkVRVUVTVF9TRU5UX1RPX0hUVFBTX1BPUlQAUkVQT1JUAElNX0FfVEVBUE9UAFJFU0VUX0NPTlRFTlQATk9fQ09OVEVOVABQQVJUSUFMX0NPTlRFTlQASFBFX0lOVkFMSURfQ09OU1RBTlQASFBFX0NCX1JFU0VUAEdFVABIUEVfU1RSSUNUAENPTkZMSUNUAFRFTVBPUkFSWV9SRURJUkVDVABQRVJNQU5FTlRfUkVESVJFQ1QAQ09OTkVDVABNVUxUSV9TVEFUVVMASFBFX0lOVkFMSURfU1RBVFVTAFRPT19NQU5ZX1JFUVVFU1RTAEVBUkxZX0hJTlRTAFVOQVZBSUxBQkxFX0ZPUl9MRUdBTF9SRUFTT05TAE9QVElPTlMAU1dJVENISU5HX1BST1RPQ09MUwBWQVJJQU5UX0FMU09fTkVHT1RJQVRFUwBNVUxUSVBMRV9DSE9JQ0VTAElOVEVSTkFMX1NFUlZFUl9FUlJPUgBXRUJfU0VSVkVSX1VOS05PV05fRVJST1IAUkFJTEdVTl9FUlJPUgBJREVOVElUWV9QUk9WSURFUl9BVVRIRU5USUNBVElPTl9FUlJPUgBTU0xfQ0VSVElGSUNBVEVfRVJST1IASU5WQUxJRF9YX0ZPUldBUkRFRF9GT1IAU0VUX1BBUkFNRVRFUgBHRVRfUEFSQU1FVEVSAEhQRV9VU0VSAFNFRV9PVEhFUgBIUEVfQ0JfQ0hVTktfSEVBREVSAE1LQ0FMRU5EQVIAU0VUVVAAV0VCX1NFUlZFUl9JU19ET1dOAFRFQVJET1dOAEhQRV9DTE9TRURfQ09OTkVDVElPTgBIRVVSSVNUSUNfRVhQSVJBVElPTgBESVNDT05ORUNURURfT1BFUkFUSU9OAE5PTl9BVVRIT1JJVEFUSVZFX0lORk9STUFUSU9OAEhQRV9JTlZBTElEX1ZFUlNJT04ASFBFX0NCX01FU1NBR0VfQkVHSU4AU0lURV9JU19GUk9aRU4ASFBFX0lOVkFMSURfSEVBREVSX1RPS0VOAElOVkFMSURfVE9LRU4ARk9SQklEREVOAEVOSEFOQ0VfWU9VUl9DQUxNAEhQRV9JTlZBTElEX1VSTABCTE9DS0VEX0JZX1BBUkVOVEFMX0NPTlRST0wATUtDT0wAQUNMAEhQRV9JTlRFUk5BTABSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFX1VOT0ZGSUNJQUwASFBFX09LAFVOTElOSwBVTkxPQ0sAUFJJAFJFVFJZX1dJVEgASFBFX0lOVkFMSURfQ09OVEVOVF9MRU5HVEgASFBFX1VORVhQRUNURURfQ09OVEVOVF9MRU5HVEgARkxVU0gAUFJPUFBBVENIAE0tU0VBUkNIAFVSSV9UT09fTE9ORwBQUk9DRVNTSU5HAE1JU0NFTExBTkVPVVNfUEVSU0lTVEVOVF9XQVJOSU5HAE1JU0NFTExBTkVPVVNfV0FSTklORwBIUEVfSU5WQUxJRF9UUkFOU0ZFUl9FTkNPRElORwBFeHBlY3RlZCBDUkxGAEhQRV9JTlZBTElEX0NIVU5LX1NJWkUATU9WRQBDT05USU5VRQBIUEVfQ0JfU1RBVFVTX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJTX0NPTVBMRVRFAEhQRV9DQl9WRVJTSU9OX0NPTVBMRVRFAEhQRV9DQl9VUkxfQ09NUExFVEUASFBFX0NCX0NIVU5LX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfVkFMVUVfQ09NUExFVEUASFBFX0NCX0NIVU5LX0VYVEVOU0lPTl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX05BTUVfQ09NUExFVEUASFBFX0NCX01FU1NBR0VfQ09NUExFVEUASFBFX0NCX01FVEhPRF9DT01QTEVURQBIUEVfQ0JfSEVBREVSX0ZJRUxEX0NPTVBMRVRFAERFTEVURQBIUEVfSU5WQUxJRF9FT0ZfU1RBVEUASU5WQUxJRF9TU0xfQ0VSVElGSUNBVEUAUEFVU0UATk9fUkVTUE9OU0UAVU5TVVBQT1JURURfTUVESUFfVFlQRQBHT05FAE5PVF9BQ0NFUFRBQkxFAFNFUlZJQ0VfVU5BVkFJTEFCTEUAUkFOR0VfTk9UX1NBVElTRklBQkxFAE9SSUdJTl9JU19VTlJFQUNIQUJMRQBSRVNQT05TRV9JU19TVEFMRQBQVVJHRQBNRVJHRQBSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFAFJFUVVFU1RfSEVBREVSX1RPT19MQVJHRQBQQVlMT0FEX1RPT19MQVJHRQBJTlNVRkZJQ0lFTlRfU1RPUkFHRQBIUEVfUEFVU0VEX1VQR1JBREUASFBFX1BBVVNFRF9IMl9VUEdSQURFAFNPVVJDRQBBTk5PVU5DRQBUUkFDRQBIUEVfVU5FWFBFQ1RFRF9TUEFDRQBERVNDUklCRQBVTlNVQlNDUklCRQBSRUNPUkQASFBFX0lOVkFMSURfTUVUSE9EAE5PVF9GT1VORABQUk9QRklORABVTkJJTkQAUkVCSU5EAFVOQVVUSE9SSVpFRABNRVRIT0RfTk9UX0FMTE9XRUQASFRUUF9WRVJTSU9OX05PVF9TVVBQT1JURUQAQUxSRUFEWV9SRVBPUlRFRABBQ0NFUFRFRABOT1RfSU1QTEVNRU5URUQATE9PUF9ERVRFQ1RFRABIUEVfQ1JfRVhQRUNURUQASFBFX0xGX0VYUEVDVEVEAENSRUFURUQASU1fVVNFRABIUEVfUEFVU0VEAFRJTUVPVVRfT0NDVVJFRABQQVlNRU5UX1JFUVVJUkVEAFBSRUNPTkRJVElPTl9SRVFVSVJFRABQUk9YWV9BVVRIRU5USUNBVElPTl9SRVFVSVJFRABORVRXT1JLX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAExFTkdUSF9SRVFVSVJFRABTU0xfQ0VSVElGSUNBVEVfUkVRVUlSRUQAVVBHUkFERV9SRVFVSVJFRABQQUdFX0VYUElSRUQAUFJFQ09ORElUSU9OX0ZBSUxFRABFWFBFQ1RBVElPTl9GQUlMRUQAUkVWQUxJREFUSU9OX0ZBSUxFRABTU0xfSEFORFNIQUtFX0ZBSUxFRABMT0NLRUQAVFJBTlNGT1JNQVRJT05fQVBQTElFRABOT1RfTU9ESUZJRUQATk9UX0VYVEVOREVEAEJBTkRXSURUSF9MSU1JVF9FWENFRURFRABTSVRFX0lTX09WRVJMT0FERUQASEVBRABFeHBlY3RlZCBIVFRQLwAAXhMAACYTAAAwEAAA8BcAAJ0TAAAVEgAAORcAAPASAAAKEAAAdRIAAK0SAACCEwAATxQAAH8QAACgFQAAIxQAAIkSAACLFAAATRUAANQRAADPFAAAEBgAAMkWAADcFgAAwREAAOAXAAC7FAAAdBQAAHwVAADlFAAACBcAAB8QAABlFQAAoxQAACgVAAACFQAAmRUAACwQAACLGQAATw8AANQOAABqEAAAzhAAAAIXAACJDgAAbhMAABwTAABmFAAAVhcAAMETAADNEwAAbBMAAGgXAABmFwAAXxcAACITAADODwAAaQ4AANgOAABjFgAAyxMAAKoOAAAoFwAAJhcAAMUTAABdFgAA6BEAAGcTAABlEwAA8hYAAHMTAAAdFwAA+RYAAPMRAADPDgAAzhUAAAwSAACzEQAApREAAGEQAAAyFwAAuxMAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIDAgICAgIAAAICAAICAAICAgICAgICAgIABAAAAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAACAAICAgICAAACAgACAgACAgICAgICAgICAAMABAAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAgACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbG9zZWVlcC1hbGl2ZQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBY2h1bmtlZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEAAAEBAAEBAAEBAQEBAQEBAQEAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlY3Rpb25lbnQtbGVuZ3Rob25yb3h5LWNvbm5lY3Rpb24AAAAAAAAAAAAAAAAAAAByYW5zZmVyLWVuY29kaW5ncGdyYWRlDQoNCg0KU00NCg0KVFRQL0NFL1RTUC8AAAAAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQIAAQMAAAAAAAAAAAAAAAAAAAAAAAAEAQEFAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAAAAAAAAAAAAMEAAAEBAQEBAQEBAQEBAUEBAQEBAQEBAQEBAQABAAGBwQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAIAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOT1VOQ0VFQ0tPVVRORUNURVRFQ1JJQkVMVVNIRVRFQURTRUFSQ0hSR0VDVElWSVRZTEVOREFSVkVPVElGWVBUSU9OU0NIU0VBWVNUQVRDSEdFT1JESVJFQ1RPUlRSQ0hQQVJBTUVURVJVUkNFQlNDUklCRUFSRE9XTkFDRUlORE5LQ0tVQlNDUklCRUhUVFAvQURUUC8=' - const jitter = Math.floor(Math.random() * 1000); - const calculatedNextRetryDelay = - Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; - const maxAllowableDelayMs = - totalTimeoutMs - (Date.now() - timeOfFirstRequest); +/***/ }), - return Math.min( - calculatedNextRetryDelay, - maxAllowableDelayMs, - maxRetryDelayMs - ); -} +/***/ 5627: +/***/ ((module) => { -module.exports.getNextRetryDelay = getNextRetryDelay; +module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC0kBAXsgAEEQav0MAAAAAAAAAAAAAAAAAAAAACIB/QsDACAAIAH9CwMAIABBMGogAf0LAwAgAEEgaiAB/QsDACAAQd0BNgIcQQALewEBfwJAIAAoAgwiAw0AAkAgACgCBEUNACAAIAE2AgQLAkAgACABIAIQxICAgAAiAw0AIAAoAgwPCyAAIAM2AhxBACEDIAAoAgQiAUUNACAAIAEgAiAAKAIIEYGAgIAAACIBRQ0AIAAgAjYCFCAAIAE2AgwgASEDCyADC+TzAQMOfwN+BH8jgICAgABBEGsiAySAgICAACABIQQgASEFIAEhBiABIQcgASEIIAEhCSABIQogASELIAEhDCABIQ0gASEOIAEhDwJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAKAIcIhBBf2oO3QHaAQHZAQIDBAUGBwgJCgsMDQ7YAQ8Q1wEREtYBExQVFhcYGRob4AHfARwdHtUBHyAhIiMkJdQBJicoKSorLNMB0gEtLtEB0AEvMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUbbAUdISUrPAc4BS80BTMwBTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gAGBAYIBgwGEAYUBhgGHAYgBiQGKAYsBjAGNAY4BjwGQAZEBkgGTAZQBlQGWAZcBmAGZAZoBmwGcAZ0BngGfAaABoQGiAaMBpAGlAaYBpwGoAakBqgGrAawBrQGuAa8BsAGxAbIBswG0AbUBtgG3AcsBygG4AckBuQHIAboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBANwBC0EAIRAMxgELQQ4hEAzFAQtBDSEQDMQBC0EPIRAMwwELQRAhEAzCAQtBEyEQDMEBC0EUIRAMwAELQRUhEAy/AQtBFiEQDL4BC0EXIRAMvQELQRghEAy8AQtBGSEQDLsBC0EaIRAMugELQRshEAy5AQtBHCEQDLgBC0EIIRAMtwELQR0hEAy2AQtBICEQDLUBC0EfIRAMtAELQQchEAyzAQtBISEQDLIBC0EiIRAMsQELQR4hEAywAQtBIyEQDK8BC0ESIRAMrgELQREhEAytAQtBJCEQDKwBC0ElIRAMqwELQSYhEAyqAQtBJyEQDKkBC0HDASEQDKgBC0EpIRAMpwELQSshEAymAQtBLCEQDKUBC0EtIRAMpAELQS4hEAyjAQtBLyEQDKIBC0HEASEQDKEBC0EwIRAMoAELQTQhEAyfAQtBDCEQDJ4BC0ExIRAMnQELQTIhEAycAQtBMyEQDJsBC0E5IRAMmgELQTUhEAyZAQtBxQEhEAyYAQtBCyEQDJcBC0E6IRAMlgELQTYhEAyVAQtBCiEQDJQBC0E3IRAMkwELQTghEAySAQtBPCEQDJEBC0E7IRAMkAELQT0hEAyPAQtBCSEQDI4BC0EoIRAMjQELQT4hEAyMAQtBPyEQDIsBC0HAACEQDIoBC0HBACEQDIkBC0HCACEQDIgBC0HDACEQDIcBC0HEACEQDIYBC0HFACEQDIUBC0HGACEQDIQBC0EqIRAMgwELQccAIRAMggELQcgAIRAMgQELQckAIRAMgAELQcoAIRAMfwtBywAhEAx+C0HNACEQDH0LQcwAIRAMfAtBzgAhEAx7C0HPACEQDHoLQdAAIRAMeQtB0QAhEAx4C0HSACEQDHcLQdMAIRAMdgtB1AAhEAx1C0HWACEQDHQLQdUAIRAMcwtBBiEQDHILQdcAIRAMcQtBBSEQDHALQdgAIRAMbwtBBCEQDG4LQdkAIRAMbQtB2gAhEAxsC0HbACEQDGsLQdwAIRAMagtBAyEQDGkLQd0AIRAMaAtB3gAhEAxnC0HfACEQDGYLQeEAIRAMZQtB4AAhEAxkC0HiACEQDGMLQeMAIRAMYgtBAiEQDGELQeQAIRAMYAtB5QAhEAxfC0HmACEQDF4LQecAIRAMXQtB6AAhEAxcC0HpACEQDFsLQeoAIRAMWgtB6wAhEAxZC0HsACEQDFgLQe0AIRAMVwtB7gAhEAxWC0HvACEQDFULQfAAIRAMVAtB8QAhEAxTC0HyACEQDFILQfMAIRAMUQtB9AAhEAxQC0H1ACEQDE8LQfYAIRAMTgtB9wAhEAxNC0H4ACEQDEwLQfkAIRAMSwtB+gAhEAxKC0H7ACEQDEkLQfwAIRAMSAtB/QAhEAxHC0H+ACEQDEYLQf8AIRAMRQtBgAEhEAxEC0GBASEQDEMLQYIBIRAMQgtBgwEhEAxBC0GEASEQDEALQYUBIRAMPwtBhgEhEAw+C0GHASEQDD0LQYgBIRAMPAtBiQEhEAw7C0GKASEQDDoLQYsBIRAMOQtBjAEhEAw4C0GNASEQDDcLQY4BIRAMNgtBjwEhEAw1C0GQASEQDDQLQZEBIRAMMwtBkgEhEAwyC0GTASEQDDELQZQBIRAMMAtBlQEhEAwvC0GWASEQDC4LQZcBIRAMLQtBmAEhEAwsC0GZASEQDCsLQZoBIRAMKgtBmwEhEAwpC0GcASEQDCgLQZ0BIRAMJwtBngEhEAwmC0GfASEQDCULQaABIRAMJAtBoQEhEAwjC0GiASEQDCILQaMBIRAMIQtBpAEhEAwgC0GlASEQDB8LQaYBIRAMHgtBpwEhEAwdC0GoASEQDBwLQakBIRAMGwtBqgEhEAwaC0GrASEQDBkLQawBIRAMGAtBrQEhEAwXC0GuASEQDBYLQQEhEAwVC0GvASEQDBQLQbABIRAMEwtBsQEhEAwSC0GzASEQDBELQbIBIRAMEAtBtAEhEAwPC0G1ASEQDA4LQbYBIRAMDQtBtwEhEAwMC0G4ASEQDAsLQbkBIRAMCgtBugEhEAwJC0G7ASEQDAgLQcYBIRAMBwtBvAEhEAwGC0G9ASEQDAULQb4BIRAMBAtBvwEhEAwDC0HAASEQDAILQcIBIRAMAQtBwQEhEAsDQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAOxwEAAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB4fICEjJSg/QEFERUZHSElKS0xNT1BRUlPeA1dZW1xdYGJlZmdoaWprbG1vcHFyc3R1dnd4eXp7fH1+gAGCAYUBhgGHAYkBiwGMAY0BjgGPAZABkQGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwG4AbkBugG7AbwBvQG+Ab8BwAHBAcIBwwHEAcUBxgHHAcgByQHKAcsBzAHNAc4BzwHQAdEB0gHTAdQB1QHWAdcB2AHZAdoB2wHcAd0B3gHgAeEB4gHjAeQB5QHmAecB6AHpAeoB6wHsAe0B7gHvAfAB8QHyAfMBmQKkArAC/gL+AgsgASIEIAJHDfMBQd0BIRAM/wMLIAEiECACRw3dAUHDASEQDP4DCyABIgEgAkcNkAFB9wAhEAz9AwsgASIBIAJHDYYBQe8AIRAM/AMLIAEiASACRw1/QeoAIRAM+wMLIAEiASACRw17QegAIRAM+gMLIAEiASACRw14QeYAIRAM+QMLIAEiASACRw0aQRghEAz4AwsgASIBIAJHDRRBEiEQDPcDCyABIgEgAkcNWUHFACEQDPYDCyABIgEgAkcNSkE/IRAM9QMLIAEiASACRw1IQTwhEAz0AwsgASIBIAJHDUFBMSEQDPMDCyAALQAuQQFGDesDDIcCCyAAIAEiASACEMCAgIAAQQFHDeYBIABCADcDIAznAQsgACABIgEgAhC0gICAACIQDecBIAEhAQz1AgsCQCABIgEgAkcNAEEGIRAM8AMLIAAgAUEBaiIBIAIQu4CAgAAiEA3oASABIQEMMQsgAEIANwMgQRIhEAzVAwsgASIQIAJHDStBHSEQDO0DCwJAIAEiASACRg0AIAFBAWohAUEQIRAM1AMLQQchEAzsAwsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3lAUEIIRAM6wMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQRQhEAzSAwtBCSEQDOoDCyABIQEgACkDIFAN5AEgASEBDPICCwJAIAEiASACRw0AQQshEAzpAwsgACABQQFqIgEgAhC2gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeYBIAEhAQwNCyAAIAEiASACELqAgIAAIhAN5wEgASEBDPACCwJAIAEiASACRw0AQQ8hEAzlAwsgAS0AACIQQTtGDQggEEENRw3oASABQQFqIQEM7wILIAAgASIBIAIQuoCAgAAiEA3oASABIQEM8gILA0ACQCABLQAAQfC1gIAAai0AACIQQQFGDQAgEEECRw3rASAAKAIEIRAgAEEANgIEIAAgECABQQFqIgEQuYCAgAAiEA3qASABIQEM9AILIAFBAWoiASACRw0AC0ESIRAM4gMLIAAgASIBIAIQuoCAgAAiEA3pASABIQEMCgsgASIBIAJHDQZBGyEQDOADCwJAIAEiASACRw0AQRYhEAzgAwsgAEGKgICAADYCCCAAIAE2AgQgACABIAIQuICAgAAiEA3qASABIQFBICEQDMYDCwJAIAEiASACRg0AA0ACQCABLQAAQfC3gIAAai0AACIQQQJGDQACQCAQQX9qDgTlAewBAOsB7AELIAFBAWohAUEIIRAMyAMLIAFBAWoiASACRw0AC0EVIRAM3wMLQRUhEAzeAwsDQAJAIAEtAABB8LmAgABqLQAAIhBBAkYNACAQQX9qDgTeAewB4AHrAewBCyABQQFqIgEgAkcNAAtBGCEQDN0DCwJAIAEiASACRg0AIABBi4CAgAA2AgggACABNgIEIAEhAUEHIRAMxAMLQRkhEAzcAwsgAUEBaiEBDAILAkAgASIUIAJHDQBBGiEQDNsDCyAUIQECQCAULQAAQXNqDhTdAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAgDuAgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQM2gMLAkAgAS0AACIQQTtGDQAgEEENRw3oASABQQFqIQEM5QILIAFBAWohAQtBIiEQDL8DCwJAIAEiECACRw0AQRwhEAzYAwtCACERIBAhASAQLQAAQVBqDjfnAeYBAQIDBAUGBwgAAAAAAAAACQoLDA0OAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPEBESExQAC0EeIRAMvQMLQgIhEQzlAQtCAyERDOQBC0IEIREM4wELQgUhEQziAQtCBiERDOEBC0IHIREM4AELQgghEQzfAQtCCSERDN4BC0IKIREM3QELQgshEQzcAQtCDCERDNsBC0INIREM2gELQg4hEQzZAQtCDyERDNgBC0IKIREM1wELQgshEQzWAQtCDCERDNUBC0INIREM1AELQg4hEQzTAQtCDyERDNIBC0IAIRECQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAtAABBUGoON+UB5AEAAQIDBAUGB+YB5gHmAeYB5gHmAeYBCAkKCwwN5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAQ4PEBESE+YBC0ICIREM5AELQgMhEQzjAQtCBCERDOIBC0IFIREM4QELQgYhEQzgAQtCByERDN8BC0IIIREM3gELQgkhEQzdAQtCCiERDNwBC0ILIREM2wELQgwhEQzaAQtCDSERDNkBC0IOIREM2AELQg8hEQzXAQtCCiERDNYBC0ILIREM1QELQgwhEQzUAQtCDSERDNMBC0IOIREM0gELQg8hEQzRAQsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3SAUEfIRAMwAMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQSQhEAynAwtBICEQDL8DCyAAIAEiECACEL6AgIAAQX9qDgW2AQDFAgHRAdIBC0ERIRAMpAMLIABBAToALyAQIQEMuwMLIAEiASACRw3SAUEkIRAMuwMLIAEiDSACRw0eQcYAIRAMugMLIAAgASIBIAIQsoCAgAAiEA3UASABIQEMtQELIAEiECACRw0mQdAAIRAMuAMLAkAgASIBIAJHDQBBKCEQDLgDCyAAQQA2AgQgAEGMgICAADYCCCAAIAEgARCxgICAACIQDdMBIAEhAQzYAQsCQCABIhAgAkcNAEEpIRAMtwMLIBAtAAAiAUEgRg0UIAFBCUcN0wEgEEEBaiEBDBULAkAgASIBIAJGDQAgAUEBaiEBDBcLQSohEAy1AwsCQCABIhAgAkcNAEErIRAMtQMLAkAgEC0AACIBQQlGDQAgAUEgRw3VAQsgAC0ALEEIRg3TASAQIQEMkQMLAkAgASIBIAJHDQBBLCEQDLQDCyABLQAAQQpHDdUBIAFBAWohAQzJAgsgASIOIAJHDdUBQS8hEAyyAwsDQAJAIAEtAAAiEEEgRg0AAkAgEEF2ag4EANwB3AEA2gELIAEhAQzgAQsgAUEBaiIBIAJHDQALQTEhEAyxAwtBMiEQIAEiFCACRg2wAyACIBRrIAAoAgAiAWohFSAUIAFrQQNqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB8LuAgABqLQAARw0BAkAgAUEDRw0AQQYhAQyWAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMsQMLIABBADYCACAUIQEM2QELQTMhECABIhQgAkYNrwMgAiAUayAAKAIAIgFqIRUgFCABa0EIaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfS7gIAAai0AAEcNAQJAIAFBCEcNAEEFIQEMlQMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLADCyAAQQA2AgAgFCEBDNgBC0E0IRAgASIUIAJGDa4DIAIgFGsgACgCACIBaiEVIBQgAWtBBWohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUHQwoCAAGotAABHDQECQCABQQVHDQBBByEBDJQDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAyvAwsgAEEANgIAIBQhAQzXAQsCQCABIgEgAkYNAANAAkAgAS0AAEGAvoCAAGotAAAiEEEBRg0AIBBBAkYNCiABIQEM3QELIAFBAWoiASACRw0AC0EwIRAMrgMLQTAhEAytAwsCQCABIgEgAkYNAANAAkAgAS0AACIQQSBGDQAgEEF2ag4E2QHaAdoB2QHaAQsgAUEBaiIBIAJHDQALQTghEAytAwtBOCEQDKwDCwNAAkAgAS0AACIQQSBGDQAgEEEJRw0DCyABQQFqIgEgAkcNAAtBPCEQDKsDCwNAAkAgAS0AACIQQSBGDQACQAJAIBBBdmoOBNoBAQHaAQALIBBBLEYN2wELIAEhAQwECyABQQFqIgEgAkcNAAtBPyEQDKoDCyABIQEM2wELQcAAIRAgASIUIAJGDagDIAIgFGsgACgCACIBaiEWIBQgAWtBBmohFwJAA0AgFC0AAEEgciABQYDAgIAAai0AAEcNASABQQZGDY4DIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADKkDCyAAQQA2AgAgFCEBC0E2IRAMjgMLAkAgASIPIAJHDQBBwQAhEAynAwsgAEGMgICAADYCCCAAIA82AgQgDyEBIAAtACxBf2oOBM0B1QHXAdkBhwMLIAFBAWohAQzMAQsCQCABIgEgAkYNAANAAkAgAS0AACIQQSByIBAgEEG/f2pB/wFxQRpJG0H/AXEiEEEJRg0AIBBBIEYNAAJAAkACQAJAIBBBnX9qDhMAAwMDAwMDAwEDAwMDAwMDAwMCAwsgAUEBaiEBQTEhEAyRAwsgAUEBaiEBQTIhEAyQAwsgAUEBaiEBQTMhEAyPAwsgASEBDNABCyABQQFqIgEgAkcNAAtBNSEQDKUDC0E1IRAMpAMLAkAgASIBIAJGDQADQAJAIAEtAABBgLyAgABqLQAAQQFGDQAgASEBDNMBCyABQQFqIgEgAkcNAAtBPSEQDKQDC0E9IRAMowMLIAAgASIBIAIQsICAgAAiEA3WASABIQEMAQsgEEEBaiEBC0E8IRAMhwMLAkAgASIBIAJHDQBBwgAhEAygAwsCQANAAkAgAS0AAEF3ag4YAAL+Av4ChAP+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gIA/gILIAFBAWoiASACRw0AC0HCACEQDKADCyABQQFqIQEgAC0ALUEBcUUNvQEgASEBC0EsIRAMhQMLIAEiASACRw3TAUHEACEQDJ0DCwNAAkAgAS0AAEGQwICAAGotAABBAUYNACABIQEMtwILIAFBAWoiASACRw0AC0HFACEQDJwDCyANLQAAIhBBIEYNswEgEEE6Rw2BAyAAKAIEIQEgAEEANgIEIAAgASANEK+AgIAAIgEN0AEgDUEBaiEBDLMCC0HHACEQIAEiDSACRg2aAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQZDCgIAAai0AAEcNgAMgAUEFRg30AiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyaAwtByAAhECABIg0gAkYNmQMgAiANayAAKAIAIgFqIRYgDSABa0EJaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGWwoCAAGotAABHDf8CAkAgAUEJRw0AQQIhAQz1AgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmQMLAkAgASINIAJHDQBByQAhEAyZAwsCQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZJ/ag4HAIADgAOAA4ADgAMBgAMLIA1BAWohAUE+IRAMgAMLIA1BAWohAUE/IRAM/wILQcoAIRAgASINIAJGDZcDIAIgDWsgACgCACIBaiEWIA0gAWtBAWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBoMKAgABqLQAARw39AiABQQFGDfACIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJcDC0HLACEQIAEiDSACRg2WAyACIA1rIAAoAgAiAWohFiANIAFrQQ5qIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaLCgIAAai0AAEcN/AIgAUEORg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyWAwtBzAAhECABIg0gAkYNlQMgAiANayAAKAIAIgFqIRYgDSABa0EPaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUHAwoCAAGotAABHDfsCAkAgAUEPRw0AQQMhAQzxAgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlQMLQc0AIRAgASINIAJGDZQDIAIgDWsgACgCACIBaiEWIA0gAWtBBWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw36AgJAIAFBBUcNAEEEIQEM8AILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJQDCwJAIAEiDSACRw0AQc4AIRAMlAMLAkACQAJAAkAgDS0AACIBQSByIAEgAUG/f2pB/wFxQRpJG0H/AXFBnX9qDhMA/QL9Av0C/QL9Av0C/QL9Av0C/QL9Av0CAf0C/QL9AgID/QILIA1BAWohAUHBACEQDP0CCyANQQFqIQFBwgAhEAz8AgsgDUEBaiEBQcMAIRAM+wILIA1BAWohAUHEACEQDPoCCwJAIAEiASACRg0AIABBjYCAgAA2AgggACABNgIEIAEhAUHFACEQDPoCC0HPACEQDJIDCyAQIQECQAJAIBAtAABBdmoOBAGoAqgCAKgCCyAQQQFqIQELQSchEAz4AgsCQCABIgEgAkcNAEHRACEQDJEDCwJAIAEtAABBIEYNACABIQEMjQELIAFBAWohASAALQAtQQFxRQ3HASABIQEMjAELIAEiFyACRw3IAUHSACEQDI8DC0HTACEQIAEiFCACRg2OAyACIBRrIAAoAgAiAWohFiAUIAFrQQFqIRcDQCAULQAAIAFB1sKAgABqLQAARw3MASABQQFGDccBIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADI4DCwJAIAEiASACRw0AQdUAIRAMjgMLIAEtAABBCkcNzAEgAUEBaiEBDMcBCwJAIAEiASACRw0AQdYAIRAMjQMLAkACQCABLQAAQXZqDgQAzQHNAQHNAQsgAUEBaiEBDMcBCyABQQFqIQFBygAhEAzzAgsgACABIgEgAhCugICAACIQDcsBIAEhAUHNACEQDPICCyAALQApQSJGDYUDDKYCCwJAIAEiASACRw0AQdsAIRAMigMLQQAhFEEBIRdBASEWQQAhEAJAAkACQAJAAkACQAJAAkACQCABLQAAQVBqDgrUAdMBAAECAwQFBgjVAQtBAiEQDAYLQQMhEAwFC0EEIRAMBAtBBSEQDAMLQQYhEAwCC0EHIRAMAQtBCCEQC0EAIRdBACEWQQAhFAzMAQtBCSEQQQEhFEEAIRdBACEWDMsBCwJAIAEiASACRw0AQd0AIRAMiQMLIAEtAABBLkcNzAEgAUEBaiEBDKYCCyABIgEgAkcNzAFB3wAhEAyHAwsCQCABIgEgAkYNACAAQY6AgIAANgIIIAAgATYCBCABIQFB0AAhEAzuAgtB4AAhEAyGAwtB4QAhECABIgEgAkYNhQMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQeLCgIAAai0AAEcNzQEgFEEDRg3MASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyFAwtB4gAhECABIgEgAkYNhAMgAiABayAAKAIAIhRqIRYgASAUa0ECaiEXA0AgAS0AACAUQebCgIAAai0AAEcNzAEgFEECRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyEAwtB4wAhECABIgEgAkYNgwMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQenCgIAAai0AAEcNywEgFEEDRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyDAwsCQCABIgEgAkcNAEHlACEQDIMDCyAAIAFBAWoiASACEKiAgIAAIhANzQEgASEBQdYAIRAM6QILAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AAkACQAJAIBBBuH9qDgsAAc8BzwHPAc8BzwHPAc8BzwECzwELIAFBAWohAUHSACEQDO0CCyABQQFqIQFB0wAhEAzsAgsgAUEBaiEBQdQAIRAM6wILIAFBAWoiASACRw0AC0HkACEQDIIDC0HkACEQDIEDCwNAAkAgAS0AAEHwwoCAAGotAAAiEEEBRg0AIBBBfmoOA88B0AHRAdIBCyABQQFqIgEgAkcNAAtB5gAhEAyAAwsCQCABIgEgAkYNACABQQFqIQEMAwtB5wAhEAz/AgsDQAJAIAEtAABB8MSAgABqLQAAIhBBAUYNAAJAIBBBfmoOBNIB0wHUAQDVAQsgASEBQdcAIRAM5wILIAFBAWoiASACRw0AC0HoACEQDP4CCwJAIAEiASACRw0AQekAIRAM/gILAkAgAS0AACIQQXZqDhq6AdUB1QG8AdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAcoB1QHVAQDTAQsgAUEBaiEBC0EGIRAM4wILA0ACQCABLQAAQfDGgIAAai0AAEEBRg0AIAEhAQyeAgsgAUEBaiIBIAJHDQALQeoAIRAM+wILAkAgASIBIAJGDQAgAUEBaiEBDAMLQesAIRAM+gILAkAgASIBIAJHDQBB7AAhEAz6AgsgAUEBaiEBDAELAkAgASIBIAJHDQBB7QAhEAz5AgsgAUEBaiEBC0EEIRAM3gILAkAgASIUIAJHDQBB7gAhEAz3AgsgFCEBAkACQAJAIBQtAABB8MiAgABqLQAAQX9qDgfUAdUB1gEAnAIBAtcBCyAUQQFqIQEMCgsgFEEBaiEBDM0BC0EAIRAgAEEANgIcIABBm5KAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAz2AgsCQANAAkAgAS0AAEHwyICAAGotAAAiEEEERg0AAkACQCAQQX9qDgfSAdMB1AHZAQAEAdkBCyABIQFB2gAhEAzgAgsgAUEBaiEBQdwAIRAM3wILIAFBAWoiASACRw0AC0HvACEQDPYCCyABQQFqIQEMywELAkAgASIUIAJHDQBB8AAhEAz1AgsgFC0AAEEvRw3UASAUQQFqIQEMBgsCQCABIhQgAkcNAEHxACEQDPQCCwJAIBQtAAAiAUEvRw0AIBRBAWohAUHdACEQDNsCCyABQXZqIgRBFksN0wFBASAEdEGJgIACcUUN0wEMygILAkAgASIBIAJGDQAgAUEBaiEBQd4AIRAM2gILQfIAIRAM8gILAkAgASIUIAJHDQBB9AAhEAzyAgsgFCEBAkAgFC0AAEHwzICAAGotAABBf2oOA8kClAIA1AELQeEAIRAM2AILAkAgASIUIAJGDQADQAJAIBQtAABB8MqAgABqLQAAIgFBA0YNAAJAIAFBf2oOAssCANUBCyAUIQFB3wAhEAzaAgsgFEEBaiIUIAJHDQALQfMAIRAM8QILQfMAIRAM8AILAkAgASIBIAJGDQAgAEGPgICAADYCCCAAIAE2AgQgASEBQeAAIRAM1wILQfUAIRAM7wILAkAgASIBIAJHDQBB9gAhEAzvAgsgAEGPgICAADYCCCAAIAE2AgQgASEBC0EDIRAM1AILA0AgAS0AAEEgRw3DAiABQQFqIgEgAkcNAAtB9wAhEAzsAgsCQCABIgEgAkcNAEH4ACEQDOwCCyABLQAAQSBHDc4BIAFBAWohAQzvAQsgACABIgEgAhCsgICAACIQDc4BIAEhAQyOAgsCQCABIgQgAkcNAEH6ACEQDOoCCyAELQAAQcwARw3RASAEQQFqIQFBEyEQDM8BCwJAIAEiBCACRw0AQfsAIRAM6QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEANAIAQtAAAgAUHwzoCAAGotAABHDdABIAFBBUYNzgEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBB+wAhEAzoAgsCQCABIgQgAkcNAEH8ACEQDOgCCwJAAkAgBC0AAEG9f2oODADRAdEB0QHRAdEB0QHRAdEB0QHRAQHRAQsgBEEBaiEBQeYAIRAMzwILIARBAWohAUHnACEQDM4CCwJAIAEiBCACRw0AQf0AIRAM5wILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNzwEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf0AIRAM5wILIABBADYCACAQQQFqIQFBECEQDMwBCwJAIAEiBCACRw0AQf4AIRAM5gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQfbOgIAAai0AAEcNzgEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf4AIRAM5gILIABBADYCACAQQQFqIQFBFiEQDMsBCwJAIAEiBCACRw0AQf8AIRAM5QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQfzOgIAAai0AAEcNzQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf8AIRAM5QILIABBADYCACAQQQFqIQFBBSEQDMoBCwJAIAEiBCACRw0AQYABIRAM5AILIAQtAABB2QBHDcsBIARBAWohAUEIIRAMyQELAkAgASIEIAJHDQBBgQEhEAzjAgsCQAJAIAQtAABBsn9qDgMAzAEBzAELIARBAWohAUHrACEQDMoCCyAEQQFqIQFB7AAhEAzJAgsCQCABIgQgAkcNAEGCASEQDOICCwJAAkAgBC0AAEG4f2oOCADLAcsBywHLAcsBywEBywELIARBAWohAUHqACEQDMkCCyAEQQFqIQFB7QAhEAzIAgsCQCABIgQgAkcNAEGDASEQDOECCyACIARrIAAoAgAiAWohECAEIAFrQQJqIRQCQANAIAQtAAAgAUGAz4CAAGotAABHDckBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgEDYCAEGDASEQDOECC0EAIRAgAEEANgIAIBRBAWohAQzGAQsCQCABIgQgAkcNAEGEASEQDOACCyACIARrIAAoAgAiAWohFCAEIAFrQQRqIRACQANAIAQtAAAgAUGDz4CAAGotAABHDcgBIAFBBEYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGEASEQDOACCyAAQQA2AgAgEEEBaiEBQSMhEAzFAQsCQCABIgQgAkcNAEGFASEQDN8CCwJAAkAgBC0AAEG0f2oOCADIAcgByAHIAcgByAEByAELIARBAWohAUHvACEQDMYCCyAEQQFqIQFB8AAhEAzFAgsCQCABIgQgAkcNAEGGASEQDN4CCyAELQAAQcUARw3FASAEQQFqIQEMgwILAkAgASIEIAJHDQBBhwEhEAzdAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBiM+AgABqLQAARw3FASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhwEhEAzdAgsgAEEANgIAIBBBAWohAUEtIRAMwgELAkAgASIEIAJHDQBBiAEhEAzcAgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw3EASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiAEhEAzcAgsgAEEANgIAIBBBAWohAUEpIRAMwQELAkAgASIBIAJHDQBBiQEhEAzbAgtBASEQIAEtAABB3wBHDcABIAFBAWohAQyBAgsCQCABIgQgAkcNAEGKASEQDNoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRADQCAELQAAIAFBjM+AgABqLQAARw3BASABQQFGDa8CIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYoBIRAM2QILAkAgASIEIAJHDQBBiwEhEAzZAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBjs+AgABqLQAARw3BASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiwEhEAzZAgsgAEEANgIAIBBBAWohAUECIRAMvgELAkAgASIEIAJHDQBBjAEhEAzYAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw3AASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjAEhEAzYAgsgAEEANgIAIBBBAWohAUEfIRAMvQELAkAgASIEIAJHDQBBjQEhEAzXAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8s+AgABqLQAARw2/ASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjQEhEAzXAgsgAEEANgIAIBBBAWohAUEJIRAMvAELAkAgASIEIAJHDQBBjgEhEAzWAgsCQAJAIAQtAABBt39qDgcAvwG/Ab8BvwG/AQG/AQsgBEEBaiEBQfgAIRAMvQILIARBAWohAUH5ACEQDLwCCwJAIAEiBCACRw0AQY8BIRAM1QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQZHPgIAAai0AAEcNvQEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY8BIRAM1QILIABBADYCACAQQQFqIQFBGCEQDLoBCwJAIAEiBCACRw0AQZABIRAM1AILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQZfPgIAAai0AAEcNvAEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZABIRAM1AILIABBADYCACAQQQFqIQFBFyEQDLkBCwJAIAEiBCACRw0AQZEBIRAM0wILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQZrPgIAAai0AAEcNuwEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZEBIRAM0wILIABBADYCACAQQQFqIQFBFSEQDLgBCwJAIAEiBCACRw0AQZIBIRAM0gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQaHPgIAAai0AAEcNugEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZIBIRAM0gILIABBADYCACAQQQFqIQFBHiEQDLcBCwJAIAEiBCACRw0AQZMBIRAM0QILIAQtAABBzABHDbgBIARBAWohAUEKIRAMtgELAkAgBCACRw0AQZQBIRAM0AILAkACQCAELQAAQb9/ag4PALkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AbkBAbkBCyAEQQFqIQFB/gAhEAy3AgsgBEEBaiEBQf8AIRAMtgILAkAgBCACRw0AQZUBIRAMzwILAkACQCAELQAAQb9/ag4DALgBAbgBCyAEQQFqIQFB/QAhEAy2AgsgBEEBaiEEQYABIRAMtQILAkAgBCACRw0AQZYBIRAMzgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQafPgIAAai0AAEcNtgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZYBIRAMzgILIABBADYCACAQQQFqIQFBCyEQDLMBCwJAIAQgAkcNAEGXASEQDM0CCwJAAkACQAJAIAQtAABBU2oOIwC4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBAbgBuAG4AbgBuAECuAG4AbgBA7gBCyAEQQFqIQFB+wAhEAy2AgsgBEEBaiEBQfwAIRAMtQILIARBAWohBEGBASEQDLQCCyAEQQFqIQRBggEhEAyzAgsCQCAEIAJHDQBBmAEhEAzMAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBqc+AgABqLQAARw20ASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmAEhEAzMAgsgAEEANgIAIBBBAWohAUEZIRAMsQELAkAgBCACRw0AQZkBIRAMywILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQa7PgIAAai0AAEcNswEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZkBIRAMywILIABBADYCACAQQQFqIQFBBiEQDLABCwJAIAQgAkcNAEGaASEQDMoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG0z4CAAGotAABHDbIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGaASEQDMoCCyAAQQA2AgAgEEEBaiEBQRwhEAyvAQsCQCAEIAJHDQBBmwEhEAzJAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBts+AgABqLQAARw2xASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmwEhEAzJAgsgAEEANgIAIBBBAWohAUEnIRAMrgELAkAgBCACRw0AQZwBIRAMyAILAkACQCAELQAAQax/ag4CAAGxAQsgBEEBaiEEQYYBIRAMrwILIARBAWohBEGHASEQDK4CCwJAIAQgAkcNAEGdASEQDMcCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG4z4CAAGotAABHDa8BIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGdASEQDMcCCyAAQQA2AgAgEEEBaiEBQSYhEAysAQsCQCAEIAJHDQBBngEhEAzGAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBus+AgABqLQAARw2uASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBngEhEAzGAgsgAEEANgIAIBBBAWohAUEDIRAMqwELAkAgBCACRw0AQZ8BIRAMxQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNrQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ8BIRAMxQILIABBADYCACAQQQFqIQFBDCEQDKoBCwJAIAQgAkcNAEGgASEQDMQCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUG8z4CAAGotAABHDawBIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGgASEQDMQCCyAAQQA2AgAgEEEBaiEBQQ0hEAypAQsCQCAEIAJHDQBBoQEhEAzDAgsCQAJAIAQtAABBun9qDgsArAGsAawBrAGsAawBrAGsAawBAawBCyAEQQFqIQRBiwEhEAyqAgsgBEEBaiEEQYwBIRAMqQILAkAgBCACRw0AQaIBIRAMwgILIAQtAABB0ABHDakBIARBAWohBAzpAQsCQCAEIAJHDQBBowEhEAzBAgsCQAJAIAQtAABBt39qDgcBqgGqAaoBqgGqAQCqAQsgBEEBaiEEQY4BIRAMqAILIARBAWohAUEiIRAMpgELAkAgBCACRw0AQaQBIRAMwAILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQcDPgIAAai0AAEcNqAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaQBIRAMwAILIABBADYCACAQQQFqIQFBHSEQDKUBCwJAIAQgAkcNAEGlASEQDL8CCwJAAkAgBC0AAEGuf2oOAwCoAQGoAQsgBEEBaiEEQZABIRAMpgILIARBAWohAUEEIRAMpAELAkAgBCACRw0AQaYBIRAMvgILAkACQAJAAkACQCAELQAAQb9/ag4VAKoBqgGqAaoBqgGqAaoBqgGqAaoBAaoBqgECqgGqAQOqAaoBBKoBCyAEQQFqIQRBiAEhEAyoAgsgBEEBaiEEQYkBIRAMpwILIARBAWohBEGKASEQDKYCCyAEQQFqIQRBjwEhEAylAgsgBEEBaiEEQZEBIRAMpAILAkAgBCACRw0AQacBIRAMvQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNpQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQacBIRAMvQILIABBADYCACAQQQFqIQFBESEQDKIBCwJAIAQgAkcNAEGoASEQDLwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHCz4CAAGotAABHDaQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGoASEQDLwCCyAAQQA2AgAgEEEBaiEBQSwhEAyhAQsCQCAEIAJHDQBBqQEhEAy7AgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBxc+AgABqLQAARw2jASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqQEhEAy7AgsgAEEANgIAIBBBAWohAUErIRAMoAELAkAgBCACRw0AQaoBIRAMugILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQcrPgIAAai0AAEcNogEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaoBIRAMugILIABBADYCACAQQQFqIQFBFCEQDJ8BCwJAIAQgAkcNAEGrASEQDLkCCwJAAkACQAJAIAQtAABBvn9qDg8AAQKkAaQBpAGkAaQBpAGkAaQBpAGkAaQBA6QBCyAEQQFqIQRBkwEhEAyiAgsgBEEBaiEEQZQBIRAMoQILIARBAWohBEGVASEQDKACCyAEQQFqIQRBlgEhEAyfAgsCQCAEIAJHDQBBrAEhEAy4AgsgBC0AAEHFAEcNnwEgBEEBaiEEDOABCwJAIAQgAkcNAEGtASEQDLcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHNz4CAAGotAABHDZ8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGtASEQDLcCCyAAQQA2AgAgEEEBaiEBQQ4hEAycAQsCQCAEIAJHDQBBrgEhEAy2AgsgBC0AAEHQAEcNnQEgBEEBaiEBQSUhEAybAQsCQCAEIAJHDQBBrwEhEAy1AgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw2dASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrwEhEAy1AgsgAEEANgIAIBBBAWohAUEqIRAMmgELAkAgBCACRw0AQbABIRAMtAILAkACQCAELQAAQat/ag4LAJ0BnQGdAZ0BnQGdAZ0BnQGdAQGdAQsgBEEBaiEEQZoBIRAMmwILIARBAWohBEGbASEQDJoCCwJAIAQgAkcNAEGxASEQDLMCCwJAAkAgBC0AAEG/f2oOFACcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAEBnAELIARBAWohBEGZASEQDJoCCyAEQQFqIQRBnAEhEAyZAgsCQCAEIAJHDQBBsgEhEAyyAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFB2c+AgABqLQAARw2aASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBsgEhEAyyAgsgAEEANgIAIBBBAWohAUEhIRAMlwELAkAgBCACRw0AQbMBIRAMsQILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQd3PgIAAai0AAEcNmQEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbMBIRAMsQILIABBADYCACAQQQFqIQFBGiEQDJYBCwJAIAQgAkcNAEG0ASEQDLACCwJAAkACQCAELQAAQbt/ag4RAJoBmgGaAZoBmgGaAZoBmgGaAQGaAZoBmgGaAZoBApoBCyAEQQFqIQRBnQEhEAyYAgsgBEEBaiEEQZ4BIRAMlwILIARBAWohBEGfASEQDJYCCwJAIAQgAkcNAEG1ASEQDK8CCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUHkz4CAAGotAABHDZcBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG1ASEQDK8CCyAAQQA2AgAgEEEBaiEBQSghEAyUAQsCQCAEIAJHDQBBtgEhEAyuAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB6s+AgABqLQAARw2WASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtgEhEAyuAgsgAEEANgIAIBBBAWohAUEHIRAMkwELAkAgBCACRw0AQbcBIRAMrQILAkACQCAELQAAQbt/ag4OAJYBlgGWAZYBlgGWAZYBlgGWAZYBlgGWAQGWAQsgBEEBaiEEQaEBIRAMlAILIARBAWohBEGiASEQDJMCCwJAIAQgAkcNAEG4ASEQDKwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDZQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG4ASEQDKwCCyAAQQA2AgAgEEEBaiEBQRIhEAyRAQsCQCAEIAJHDQBBuQEhEAyrAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw2TASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuQEhEAyrAgsgAEEANgIAIBBBAWohAUEgIRAMkAELAkAgBCACRw0AQboBIRAMqgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNkgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQboBIRAMqgILIABBADYCACAQQQFqIQFBDyEQDI8BCwJAIAQgAkcNAEG7ASEQDKkCCwJAAkAgBC0AAEG3f2oOBwCSAZIBkgGSAZIBAZIBCyAEQQFqIQRBpQEhEAyQAgsgBEEBaiEEQaYBIRAMjwILAkAgBCACRw0AQbwBIRAMqAILIAIgBGsgACgCACIBaiEUIAQgAWtBB2ohEAJAA0AgBC0AACABQfTPgIAAai0AAEcNkAEgAUEHRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbwBIRAMqAILIABBADYCACAQQQFqIQFBGyEQDI0BCwJAIAQgAkcNAEG9ASEQDKcCCwJAAkACQCAELQAAQb5/ag4SAJEBkQGRAZEBkQGRAZEBkQGRAQGRAZEBkQGRAZEBkQECkQELIARBAWohBEGkASEQDI8CCyAEQQFqIQRBpwEhEAyOAgsgBEEBaiEEQagBIRAMjQILAkAgBCACRw0AQb4BIRAMpgILIAQtAABBzgBHDY0BIARBAWohBAzPAQsCQCAEIAJHDQBBvwEhEAylAgsCQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAELQAAQb9/ag4VAAECA5wBBAUGnAGcAZwBBwgJCgucAQwNDg+cAQsgBEEBaiEBQegAIRAMmgILIARBAWohAUHpACEQDJkCCyAEQQFqIQFB7gAhEAyYAgsgBEEBaiEBQfIAIRAMlwILIARBAWohAUHzACEQDJYCCyAEQQFqIQFB9gAhEAyVAgsgBEEBaiEBQfcAIRAMlAILIARBAWohAUH6ACEQDJMCCyAEQQFqIQRBgwEhEAySAgsgBEEBaiEEQYQBIRAMkQILIARBAWohBEGFASEQDJACCyAEQQFqIQRBkgEhEAyPAgsgBEEBaiEEQZgBIRAMjgILIARBAWohBEGgASEQDI0CCyAEQQFqIQRBowEhEAyMAgsgBEEBaiEEQaoBIRAMiwILAkAgBCACRg0AIABBkICAgAA2AgggACAENgIEQasBIRAMiwILQcABIRAMowILIAAgBSACEKqAgIAAIgENiwEgBSEBDFwLAkAgBiACRg0AIAZBAWohBQyNAQtBwgEhEAyhAgsDQAJAIBAtAABBdmoOBIwBAACPAQALIBBBAWoiECACRw0AC0HDASEQDKACCwJAIAcgAkYNACAAQZGAgIAANgIIIAAgBzYCBCAHIQFBASEQDIcCC0HEASEQDJ8CCwJAIAcgAkcNAEHFASEQDJ8CCwJAAkAgBy0AAEF2ag4EAc4BzgEAzgELIAdBAWohBgyNAQsgB0EBaiEFDIkBCwJAIAcgAkcNAEHGASEQDJ4CCwJAAkAgBy0AAEF2ag4XAY8BjwEBjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAI8BCyAHQQFqIQcLQbABIRAMhAILAkAgCCACRw0AQcgBIRAMnQILIAgtAABBIEcNjQEgAEEAOwEyIAhBAWohAUGzASEQDIMCCyABIRcCQANAIBciByACRg0BIActAABBUGpB/wFxIhBBCk8NzAECQCAALwEyIhRBmTNLDQAgACAUQQpsIhQ7ATIgEEH//wNzIBRB/v8DcUkNACAHQQFqIRcgACAUIBBqIhA7ATIgEEH//wNxQegHSQ0BCwtBACEQIABBADYCHCAAQcGJgIAANgIQIABBDTYCDCAAIAdBAWo2AhQMnAILQccBIRAMmwILIAAgCCACEK6AgIAAIhBFDcoBIBBBFUcNjAEgAEHIATYCHCAAIAg2AhQgAEHJl4CAADYCECAAQRU2AgxBACEQDJoCCwJAIAkgAkcNAEHMASEQDJoCC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgCS0AAEFQag4KlgGVAQABAgMEBQYIlwELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMjgELQQkhEEEBIRRBACEXQQAhFgyNAQsCQCAKIAJHDQBBzgEhEAyZAgsgCi0AAEEuRw2OASAKQQFqIQkMygELIAsgAkcNjgFB0AEhEAyXAgsCQCALIAJGDQAgAEGOgICAADYCCCAAIAs2AgRBtwEhEAz+AQtB0QEhEAyWAgsCQCAEIAJHDQBB0gEhEAyWAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EEaiELA0AgBC0AACAQQfzPgIAAai0AAEcNjgEgEEEERg3pASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHSASEQDJUCCyAAIAwgAhCsgICAACIBDY0BIAwhAQy4AQsCQCAEIAJHDQBB1AEhEAyUAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EBaiEMA0AgBC0AACAQQYHQgIAAai0AAEcNjwEgEEEBRg2OASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHUASEQDJMCCwJAIAQgAkcNAEHWASEQDJMCCyACIARrIAAoAgAiEGohFCAEIBBrQQJqIQsDQCAELQAAIBBBg9CAgABqLQAARw2OASAQQQJGDZABIBBBAWohECAEQQFqIgQgAkcNAAsgACAUNgIAQdYBIRAMkgILAkAgBCACRw0AQdcBIRAMkgILAkACQCAELQAAQbt/ag4QAI8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwEBjwELIARBAWohBEG7ASEQDPkBCyAEQQFqIQRBvAEhEAz4AQsCQCAEIAJHDQBB2AEhEAyRAgsgBC0AAEHIAEcNjAEgBEEBaiEEDMQBCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEG+ASEQDPcBC0HZASEQDI8CCwJAIAQgAkcNAEHaASEQDI8CCyAELQAAQcgARg3DASAAQQE6ACgMuQELIABBAjoALyAAIAQgAhCmgICAACIQDY0BQcIBIRAM9AELIAAtAChBf2oOArcBuQG4AQsDQAJAIAQtAABBdmoOBACOAY4BAI4BCyAEQQFqIgQgAkcNAAtB3QEhEAyLAgsgAEEAOgAvIAAtAC1BBHFFDYQCCyAAQQA6AC8gAEEBOgA0IAEhAQyMAQsgEEEVRg3aASAAQQA2AhwgACABNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAyIAgsCQCAAIBAgAhC0gICAACIEDQAgECEBDIECCwJAIARBFUcNACAAQQM2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAyIAgsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMhwILIBBBFUYN1gEgAEEANgIcIAAgATYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMhgILIAAoAgQhFyAAQQA2AgQgECARp2oiFiEBIAAgFyAQIBYgFBsiEBC1gICAACIURQ2NASAAQQc2AhwgACAQNgIUIAAgFDYCDEEAIRAMhQILIAAgAC8BMEGAAXI7ATAgASEBC0EqIRAM6gELIBBBFUYN0QEgAEEANgIcIAAgATYCFCAAQYOMgIAANgIQIABBEzYCDEEAIRAMggILIBBBFUYNzwEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAMgQILIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDI0BCyAAQQw2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMgAILIBBBFUYNzAEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM/wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIwBCyAAQQ02AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/gELIBBBFUYNyQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM/QELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIsBCyAAQQ42AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/AELIABBADYCHCAAIAE2AhQgAEHAlYCAADYCECAAQQI2AgxBACEQDPsBCyAQQRVGDcUBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPoBCyAAQRA2AhwgACABNgIUIAAgEDYCDEEAIRAM+QELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDPEBCyAAQRE2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM+AELIBBBFUYNwQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM9wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIgBCyAAQRM2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM9gELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDO0BCyAAQRQ2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM9QELIBBBFUYNvQEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM9AELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIYBCyAAQRY2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM8wELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC3gICAACIEDQAgAUEBaiEBDOkBCyAAQRc2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM8gELIABBADYCHCAAIAE2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDPEBC0IBIRELIBBBAWohAQJAIAApAyAiEkL//////////w9WDQAgACASQgSGIBGENwMgIAEhAQyEAQsgAEEANgIcIAAgATYCFCAAQa2JgIAANgIQIABBDDYCDEEAIRAM7wELIABBADYCHCAAIBA2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDO4BCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNcyAAQQU2AhwgACAQNgIUIAAgFDYCDEEAIRAM7QELIABBADYCHCAAIBA2AhQgAEGqnICAADYCECAAQQ82AgxBACEQDOwBCyAAIBAgAhC0gICAACIBDQEgECEBC0EOIRAM0QELAkAgAUEVRw0AIABBAjYCHCAAIBA2AhQgAEGwmICAADYCECAAQRU2AgxBACEQDOoBCyAAQQA2AhwgACAQNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAzpAQsgAUEBaiEQAkAgAC8BMCIBQYABcUUNAAJAIAAgECACELuAgIAAIgENACAQIQEMcAsgAUEVRw26ASAAQQU2AhwgACAQNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAzpAQsCQCABQaAEcUGgBEcNACAALQAtQQJxDQAgAEEANgIcIAAgEDYCFCAAQZaTgIAANgIQIABBBDYCDEEAIRAM6QELIAAgECACEL2AgIAAGiAQIQECQAJAAkACQAJAIAAgECACELOAgIAADhYCAQAEBAQEBAQEBAQEBAQEBAQEBAQDBAsgAEEBOgAuCyAAIAAvATBBwAByOwEwIBAhAQtBJiEQDNEBCyAAQSM2AhwgACAQNgIUIABBpZaAgAA2AhAgAEEVNgIMQQAhEAzpAQsgAEEANgIcIAAgEDYCFCAAQdWLgIAANgIQIABBETYCDEEAIRAM6AELIAAtAC1BAXFFDQFBwwEhEAzOAQsCQCANIAJGDQADQAJAIA0tAABBIEYNACANIQEMxAELIA1BAWoiDSACRw0AC0ElIRAM5wELQSUhEAzmAQsgACgCBCEEIABBADYCBCAAIAQgDRCvgICAACIERQ2tASAAQSY2AhwgACAENgIMIAAgDUEBajYCFEEAIRAM5QELIBBBFUYNqwEgAEEANgIcIAAgATYCFCAAQf2NgIAANgIQIABBHTYCDEEAIRAM5AELIABBJzYCHCAAIAE2AhQgACAQNgIMQQAhEAzjAQsgECEBQQEhFAJAAkACQAJAAkACQAJAIAAtACxBfmoOBwYFBQMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0ErIRAMygELIABBADYCHCAAIBA2AhQgAEGrkoCAADYCECAAQQs2AgxBACEQDOIBCyAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMQQAhEAzhAQsgAEEAOgAsIBAhAQy9AQsgECEBQQEhFAJAAkACQAJAAkAgAC0ALEF7ag4EAwECAAULIAAgAC8BMEEIcjsBMAwDC0ECIRQMAQtBBCEUCyAAQQE6ACwgACAALwEwIBRyOwEwCyAQIQELQSkhEAzFAQsgAEEANgIcIAAgATYCFCAAQfCUgIAANgIQIABBAzYCDEEAIRAM3QELAkAgDi0AAEENRw0AIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHULIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzdAQsgAC0ALUEBcUUNAUHEASEQDMMBCwJAIA4gAkcNAEEtIRAM3AELAkACQANAAkAgDi0AAEF2ag4EAgAAAwALIA5BAWoiDiACRw0AC0EtIRAM3QELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDiEBDHQLIABBLDYCHCAAIA42AhQgACABNgIMQQAhEAzcAQsgACgCBCEBIABBADYCBAJAIAAgASAOELGAgIAAIgENACAOQQFqIQEMcwsgAEEsNgIcIAAgATYCDCAAIA5BAWo2AhRBACEQDNsBCyAAKAIEIQQgAEEANgIEIAAgBCAOELGAgIAAIgQNoAEgDiEBDM4BCyAQQSxHDQEgAUEBaiEQQQEhAQJAAkACQAJAAkAgAC0ALEF7ag4EAwECBAALIBAhAQwEC0ECIQEMAQtBBCEBCyAAQQE6ACwgACAALwEwIAFyOwEwIBAhAQwBCyAAIAAvATBBCHI7ATAgECEBC0E5IRAMvwELIABBADoALCABIQELQTQhEAy9AQsgACAALwEwQSByOwEwIAEhAQwCCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBA0AIAEhAQzHAQsgAEE3NgIcIAAgATYCFCAAIAQ2AgxBACEQDNQBCyAAQQg6ACwgASEBC0EwIRAMuQELAkAgAC0AKEEBRg0AIAEhAQwECyAALQAtQQhxRQ2TASABIQEMAwsgAC0AMEEgcQ2UAUHFASEQDLcBCwJAIA8gAkYNAAJAA0ACQCAPLQAAQVBqIgFB/wFxQQpJDQAgDyEBQTUhEAy6AQsgACkDICIRQpmz5syZs+bMGVYNASAAIBFCCn4iETcDICARIAGtQv8BgyISQn+FVg0BIAAgESASfDcDICAPQQFqIg8gAkcNAAtBOSEQDNEBCyAAKAIEIQIgAEEANgIEIAAgAiAPQQFqIgQQsYCAgAAiAg2VASAEIQEMwwELQTkhEAzPAQsCQCAALwEwIgFBCHFFDQAgAC0AKEEBRw0AIAAtAC1BCHFFDZABCyAAIAFB9/sDcUGABHI7ATAgDyEBC0E3IRAMtAELIAAgAC8BMEEQcjsBMAyrAQsgEEEVRg2LASAAQQA2AhwgACABNgIUIABB8I6AgAA2AhAgAEEcNgIMQQAhEAzLAQsgAEHDADYCHCAAIAE2AgwgACANQQFqNgIUQQAhEAzKAQsCQCABLQAAQTpHDQAgACgCBCEQIABBADYCBAJAIAAgECABEK+AgIAAIhANACABQQFqIQEMYwsgAEHDADYCHCAAIBA2AgwgACABQQFqNgIUQQAhEAzKAQsgAEEANgIcIAAgATYCFCAAQbGRgIAANgIQIABBCjYCDEEAIRAMyQELIABBADYCHCAAIAE2AhQgAEGgmYCAADYCECAAQR42AgxBACEQDMgBCyAAQQA2AgALIABBgBI7ASogACAXQQFqIgEgAhCogICAACIQDQEgASEBC0HHACEQDKwBCyAQQRVHDYMBIABB0QA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAzEAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAzDAQsgAEEANgIcIAAgFDYCFCAAQcGogIAANgIQIABBBzYCDCAAQQA2AgBBACEQDMIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxdCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDMEBC0EAIRAgAEEANgIcIAAgATYCFCAAQYCRgIAANgIQIABBCTYCDAzAAQsgEEEVRg19IABBADYCHCAAIAE2AhQgAEGUjYCAADYCECAAQSE2AgxBACEQDL8BC0EBIRZBACEXQQAhFEEBIRALIAAgEDoAKyABQQFqIQECQAJAIAAtAC1BEHENAAJAAkACQCAALQAqDgMBAAIECyAWRQ0DDAILIBQNAQwCCyAXRQ0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQrYCAgAAiEA0AIAEhAQxcCyAAQdgANgIcIAAgATYCFCAAIBA2AgxBACEQDL4BCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQytAQsgAEHZADYCHCAAIAE2AhQgACAENgIMQQAhEAy9AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMqwELIABB2gA2AhwgACABNgIUIAAgBDYCDEEAIRAMvAELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKkBCyAAQdwANgIcIAAgATYCFCAAIAQ2AgxBACEQDLsBCwJAIAEtAABBUGoiEEH/AXFBCk8NACAAIBA6ACogAUEBaiEBQc8AIRAMogELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKcBCyAAQd4ANgIcIAAgATYCFCAAIAQ2AgxBACEQDLoBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKUEjTw0AIAEhAQxZCyAAQQA2AhwgACABNgIUIABB04mAgAA2AhAgAEEINgIMQQAhEAy5AQsgAEEANgIAC0EAIRAgAEEANgIcIAAgATYCFCAAQZCzgIAANgIQIABBCDYCDAy3AQsgAEEANgIAIBdBAWohAQJAIAAtAClBIUcNACABIQEMVgsgAEEANgIcIAAgATYCFCAAQZuKgIAANgIQIABBCDYCDEEAIRAMtgELIABBADYCACAXQQFqIQECQCAALQApIhBBXWpBC08NACABIQEMVQsCQCAQQQZLDQBBASAQdEHKAHFFDQAgASEBDFULQQAhECAAQQA2AhwgACABNgIUIABB94mAgAA2AhAgAEEINgIMDLUBCyAQQRVGDXEgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMtAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFQLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMswELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMsgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMsQELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFELIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMsAELIABBADYCHCAAIAE2AhQgAEHGioCAADYCECAAQQc2AgxBACEQDK8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDK4BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDK0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDKwBCyAAQQA2AhwgACABNgIUIABB3IiAgAA2AhAgAEEHNgIMQQAhEAyrAQsgEEE/Rw0BIAFBAWohAQtBBSEQDJABC0EAIRAgAEEANgIcIAAgATYCFCAAQf2SgIAANgIQIABBBzYCDAyoAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAynAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAymAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMRgsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAylAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHSADYCHCAAIBQ2AhQgACABNgIMQQAhEAykAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHTADYCHCAAIBQ2AhQgACABNgIMQQAhEAyjAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMQwsgAEHlADYCHCAAIBQ2AhQgACABNgIMQQAhEAyiAQsgAEEANgIcIAAgFDYCFCAAQcOPgIAANgIQIABBBzYCDEEAIRAMoQELIABBADYCHCAAIAE2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKABC0EAIRAgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDAyfAQsgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDEEAIRAMngELIABBADYCHCAAIBQ2AhQgAEH+kYCAADYCECAAQQc2AgxBACEQDJ0BCyAAQQA2AhwgACABNgIUIABBjpuAgAA2AhAgAEEGNgIMQQAhEAycAQsgEEEVRg1XIABBADYCHCAAIAE2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDJsBCyAAQQA2AgAgEEEBaiEBQSQhEAsgACAQOgApIAAoAgQhECAAQQA2AgQgACAQIAEQq4CAgAAiEA1UIAEhAQw+CyAAQQA2AgALQQAhECAAQQA2AhwgACAENgIUIABB8ZuAgAA2AhAgAEEGNgIMDJcBCyABQRVGDVAgAEEANgIcIAAgBTYCFCAAQfCMgIAANgIQIABBGzYCDEEAIRAMlgELIAAoAgQhBSAAQQA2AgQgACAFIBAQqYCAgAAiBQ0BIBBBAWohBQtBrQEhEAx7CyAAQcEBNgIcIAAgBTYCDCAAIBBBAWo2AhRBACEQDJMBCyAAKAIEIQYgAEEANgIEIAAgBiAQEKmAgIAAIgYNASAQQQFqIQYLQa4BIRAMeAsgAEHCATYCHCAAIAY2AgwgACAQQQFqNgIUQQAhEAyQAQsgAEEANgIcIAAgBzYCFCAAQZeLgIAANgIQIABBDTYCDEEAIRAMjwELIABBADYCHCAAIAg2AhQgAEHjkICAADYCECAAQQk2AgxBACEQDI4BCyAAQQA2AhwgACAINgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAyNAQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgCUEBaiEIAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBCAAIBAgCBCtgICAACIQRQ09IABByQE2AhwgACAINgIUIAAgEDYCDEEAIRAMjAELIAAoAgQhBCAAQQA2AgQgACAEIAgQrYCAgAAiBEUNdiAAQcoBNgIcIAAgCDYCFCAAIAQ2AgxBACEQDIsBCyAAKAIEIQQgAEEANgIEIAAgBCAJEK2AgIAAIgRFDXQgAEHLATYCHCAAIAk2AhQgACAENgIMQQAhEAyKAQsgACgCBCEEIABBADYCBCAAIAQgChCtgICAACIERQ1yIABBzQE2AhwgACAKNgIUIAAgBDYCDEEAIRAMiQELAkAgCy0AAEFQaiIQQf8BcUEKTw0AIAAgEDoAKiALQQFqIQpBtgEhEAxwCyAAKAIEIQQgAEEANgIEIAAgBCALEK2AgIAAIgRFDXAgAEHPATYCHCAAIAs2AhQgACAENgIMQQAhEAyIAQsgAEEANgIcIAAgBDYCFCAAQZCzgIAANgIQIABBCDYCDCAAQQA2AgBBACEQDIcBCyABQRVGDT8gAEEANgIcIAAgDDYCFCAAQcyOgIAANgIQIABBIDYCDEEAIRAMhgELIABBgQQ7ASggACgCBCEQIABCADcDACAAIBAgDEEBaiIMEKuAgIAAIhBFDTggAEHTATYCHCAAIAw2AhQgACAQNgIMQQAhEAyFAQsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQdibgIAANgIQIABBCDYCDAyDAQsgACgCBCEQIABCADcDACAAIBAgC0EBaiILEKuAgIAAIhANAUHGASEQDGkLIABBAjoAKAxVCyAAQdUBNgIcIAAgCzYCFCAAIBA2AgxBACEQDIABCyAQQRVGDTcgAEEANgIcIAAgBDYCFCAAQaSMgIAANgIQIABBEDYCDEEAIRAMfwsgAC0ANEEBRw00IAAgBCACELyAgIAAIhBFDTQgEEEVRw01IABB3AE2AhwgACAENgIUIABB1ZaAgAA2AhAgAEEVNgIMQQAhEAx+C0EAIRAgAEEANgIcIABBr4uAgAA2AhAgAEECNgIMIAAgFEEBajYCFAx9C0EAIRAMYwtBAiEQDGILQQ0hEAxhC0EPIRAMYAtBJSEQDF8LQRMhEAxeC0EVIRAMXQtBFiEQDFwLQRchEAxbC0EYIRAMWgtBGSEQDFkLQRohEAxYC0EbIRAMVwtBHCEQDFYLQR0hEAxVC0EfIRAMVAtBISEQDFMLQSMhEAxSC0HGACEQDFELQS4hEAxQC0EvIRAMTwtBOyEQDE4LQT0hEAxNC0HIACEQDEwLQckAIRAMSwtBywAhEAxKC0HMACEQDEkLQc4AIRAMSAtB0QAhEAxHC0HVACEQDEYLQdgAIRAMRQtB2QAhEAxEC0HbACEQDEMLQeQAIRAMQgtB5QAhEAxBC0HxACEQDEALQfQAIRAMPwtBjQEhEAw+C0GXASEQDD0LQakBIRAMPAtBrAEhEAw7C0HAASEQDDoLQbkBIRAMOQtBrwEhEAw4C0GxASEQDDcLQbIBIRAMNgtBtAEhEAw1C0G1ASEQDDQLQboBIRAMMwtBvQEhEAwyC0G/ASEQDDELQcEBIRAMMAsgAEEANgIcIAAgBDYCFCAAQemLgIAANgIQIABBHzYCDEEAIRAMSAsgAEHbATYCHCAAIAQ2AhQgAEH6loCAADYCECAAQRU2AgxBACEQDEcLIABB+AA2AhwgACAMNgIUIABBypiAgAA2AhAgAEEVNgIMQQAhEAxGCyAAQdEANgIcIAAgBTYCFCAAQbCXgIAANgIQIABBFTYCDEEAIRAMRQsgAEH5ADYCHCAAIAE2AhQgACAQNgIMQQAhEAxECyAAQfgANgIcIAAgATYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMQwsgAEHkADYCHCAAIAE2AhQgAEHjl4CAADYCECAAQRU2AgxBACEQDEILIABB1wA2AhwgACABNgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAxBCyAAQQA2AhwgACABNgIUIABBuY2AgAA2AhAgAEEaNgIMQQAhEAxACyAAQcIANgIcIAAgATYCFCAAQeOYgIAANgIQIABBFTYCDEEAIRAMPwsgAEEANgIEIAAgDyAPELGAgIAAIgRFDQEgAEE6NgIcIAAgBDYCDCAAIA9BAWo2AhRBACEQDD4LIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCxgICAACIERQ0AIABBOzYCHCAAIAQ2AgwgACABQQFqNgIUQQAhEAw+CyABQQFqIQEMLQsgD0EBaiEBDC0LIABBADYCHCAAIA82AhQgAEHkkoCAADYCECAAQQQ2AgxBACEQDDsLIABBNjYCHCAAIAQ2AhQgACACNgIMQQAhEAw6CyAAQS42AhwgACAONgIUIAAgBDYCDEEAIRAMOQsgAEHQADYCHCAAIAE2AhQgAEGRmICAADYCECAAQRU2AgxBACEQDDgLIA1BAWohAQwsCyAAQRU2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAw2CyAAQRs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw1CyAAQQ82AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw0CyAAQQs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAwzCyAAQRo2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwyCyAAQQs2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwxCyAAQQo2AhwgACABNgIUIABB5JaAgAA2AhAgAEEVNgIMQQAhEAwwCyAAQR42AhwgACABNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAwvCyAAQQA2AhwgACAQNgIUIABB2o2AgAA2AhAgAEEUNgIMQQAhEAwuCyAAQQQ2AhwgACABNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAwtCyAAQQA2AgAgC0EBaiELC0G4ASEQDBILIABBADYCACAQQQFqIQFB9QAhEAwRCyABIQECQCAALQApQQVHDQBB4wAhEAwRC0HiACEQDBALQQAhECAAQQA2AhwgAEHkkYCAADYCECAAQQc2AgwgACAUQQFqNgIUDCgLIABBADYCACAXQQFqIQFBwAAhEAwOC0EBIQELIAAgAToALCAAQQA2AgAgF0EBaiEBC0EoIRAMCwsgASEBC0E4IRAMCQsCQCABIg8gAkYNAANAAkAgDy0AAEGAvoCAAGotAAAiAUEBRg0AIAFBAkcNAyAPQQFqIQEMBAsgD0EBaiIPIAJHDQALQT4hEAwiC0E+IRAMIQsgAEEAOgAsIA8hAQwBC0ELIRAMBgtBOiEQDAULIAFBAWohAUEtIRAMBAsgACABOgAsIABBADYCACAWQQFqIQFBDCEQDAMLIABBADYCACAXQQFqIQFBCiEQDAILIABBADYCAAsgAEEAOgAsIA0hAUEJIRAMAAsLQQAhECAAQQA2AhwgACALNgIUIABBzZCAgAA2AhAgAEEJNgIMDBcLQQAhECAAQQA2AhwgACAKNgIUIABB6YqAgAA2AhAgAEEJNgIMDBYLQQAhECAAQQA2AhwgACAJNgIUIABBt5CAgAA2AhAgAEEJNgIMDBULQQAhECAAQQA2AhwgACAINgIUIABBnJGAgAA2AhAgAEEJNgIMDBQLQQAhECAAQQA2AhwgACABNgIUIABBzZCAgAA2AhAgAEEJNgIMDBMLQQAhECAAQQA2AhwgACABNgIUIABB6YqAgAA2AhAgAEEJNgIMDBILQQAhECAAQQA2AhwgACABNgIUIABBt5CAgAA2AhAgAEEJNgIMDBELQQAhECAAQQA2AhwgACABNgIUIABBnJGAgAA2AhAgAEEJNgIMDBALQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA8LQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA4LQQAhECAAQQA2AhwgACABNgIUIABBwJKAgAA2AhAgAEELNgIMDA0LQQAhECAAQQA2AhwgACABNgIUIABBlYmAgAA2AhAgAEELNgIMDAwLQQAhECAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMDAsLQQAhECAAQQA2AhwgACABNgIUIABB+4+AgAA2AhAgAEEKNgIMDAoLQQAhECAAQQA2AhwgACABNgIUIABB8ZmAgAA2AhAgAEECNgIMDAkLQQAhECAAQQA2AhwgACABNgIUIABBxJSAgAA2AhAgAEECNgIMDAgLQQAhECAAQQA2AhwgACABNgIUIABB8pWAgAA2AhAgAEECNgIMDAcLIABBAjYCHCAAIAE2AhQgAEGcmoCAADYCECAAQRY2AgxBACEQDAYLQQEhEAwFC0HUACEQIAEiBCACRg0EIANBCGogACAEIAJB2MKAgABBChDFgICAACADKAIMIQQgAygCCA4DAQQCAAsQyoCAgAAACyAAQQA2AhwgAEG1moCAADYCECAAQRc2AgwgACAEQQFqNgIUQQAhEAwCCyAAQQA2AhwgACAENgIUIABBypqAgAA2AhAgAEEJNgIMQQAhEAwBCwJAIAEiBCACRw0AQSIhEAwBCyAAQYmAgIAANgIIIAAgBDYCBEEhIRALIANBEGokgICAgAAgEAuvAQECfyABKAIAIQYCQAJAIAIgA0YNACAEIAZqIQQgBiADaiACayEHIAIgBkF/cyAFaiIGaiEFA0ACQCACLQAAIAQtAABGDQBBAiEEDAMLAkAgBg0AQQAhBCAFIQIMAwsgBkF/aiEGIARBAWohBCACQQFqIgIgA0cNAAsgByEGIAMhAgsgAEEBNgIAIAEgBjYCACAAIAI2AgQPCyABQQA2AgAgACAENgIAIAAgAjYCBAsKACAAEMeAgIAAC/I2AQt/I4CAgIAAQRBrIgEkgICAgAACQEEAKAKg0ICAAA0AQQAQy4CAgABBgNSEgABrIgJB2QBJDQBBACEDAkBBACgC4NOAgAAiBA0AQQBCfzcC7NOAgABBAEKAgISAgIDAADcC5NOAgABBACABQQhqQXBxQdiq1aoFcyIENgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgAALQQAgAjYCzNOAgABBAEGA1ISAADYCyNOAgABBAEGA1ISAADYCmNCAgABBACAENgKs0ICAAEEAQX82AqjQgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAtBgNSEgABBeEGA1ISAAGtBD3FBAEGA1ISAAEEIakEPcRsiA2oiBEEEaiACQUhqIgUgA2siA0EBcjYCAEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgABBgNSEgAAgBWpBODYCBAsCQAJAAkACQAJAAkACQAJAAkACQAJAAkAgAEHsAUsNAAJAQQAoAojQgIAAIgZBECAAQRNqQXBxIABBC0kbIgJBA3YiBHYiA0EDcUUNAAJAAkAgA0EBcSAEckEBcyIFQQN0IgRBsNCAgABqIgMgBEG40ICAAGooAgAiBCgCCCICRw0AQQAgBkF+IAV3cTYCiNCAgAAMAQsgAyACNgIIIAIgAzYCDAsgBEEIaiEDIAQgBUEDdCIFQQNyNgIEIAQgBWoiBCAEKAIEQQFyNgIEDAwLIAJBACgCkNCAgAAiB00NAQJAIANFDQACQAJAIAMgBHRBAiAEdCIDQQAgA2tycSIDQQAgA2txQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmoiBEEDdCIDQbDQgIAAaiIFIANBuNCAgABqKAIAIgMoAggiAEcNAEEAIAZBfiAEd3EiBjYCiNCAgAAMAQsgBSAANgIIIAAgBTYCDAsgAyACQQNyNgIEIAMgBEEDdCIEaiAEIAJrIgU2AgAgAyACaiIAIAVBAXI2AgQCQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhBAJAAkAgBkEBIAdBA3Z0IghxDQBBACAGIAhyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAQ2AgwgAiAENgIIIAQgAjYCDCAEIAg2AggLIANBCGohA0EAIAA2ApzQgIAAQQAgBTYCkNCAgAAMDAtBACgCjNCAgAAiCUUNASAJQQAgCWtxQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmpBAnRBuNKAgABqKAIAIgAoAgRBeHEgAmshBCAAIQUCQANAAkAgBSgCECIDDQAgBUEUaigCACIDRQ0CCyADKAIEQXhxIAJrIgUgBCAFIARJIgUbIQQgAyAAIAUbIQAgAyEFDAALCyAAKAIYIQoCQCAAKAIMIgggAEYNACAAKAIIIgNBACgCmNCAgABJGiAIIAM2AgggAyAINgIMDAsLAkAgAEEUaiIFKAIAIgMNACAAKAIQIgNFDQMgAEEQaiEFCwNAIAUhCyADIghBFGoiBSgCACIDDQAgCEEQaiEFIAgoAhAiAw0ACyALQQA2AgAMCgtBfyECIABBv39LDQAgAEETaiIDQXBxIQJBACgCjNCAgAAiB0UNAEEAIQsCQCACQYACSQ0AQR8hCyACQf///wdLDQAgA0EIdiIDIANBgP4/akEQdkEIcSIDdCIEIARBgOAfakEQdkEEcSIEdCIFIAVBgIAPakEQdkECcSIFdEEPdiADIARyIAVyayIDQQF0IAIgA0EVanZBAXFyQRxqIQsLQQAgAmshBAJAAkACQAJAIAtBAnRBuNKAgABqKAIAIgUNAEEAIQNBACEIDAELQQAhAyACQQBBGSALQQF2ayALQR9GG3QhAEEAIQgDQAJAIAUoAgRBeHEgAmsiBiAETw0AIAYhBCAFIQggBg0AQQAhBCAFIQggBSEDDAMLIAMgBUEUaigCACIGIAYgBSAAQR12QQRxakEQaigCACIFRhsgAyAGGyEDIABBAXQhACAFDQALCwJAIAMgCHINAEEAIQhBAiALdCIDQQAgA2tyIAdxIgNFDQMgA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBUEFdkEIcSIAIANyIAUgAHYiA0ECdkEEcSIFciADIAV2IgNBAXZBAnEiBXIgAyAFdiIDQQF2QQFxIgVyIAMgBXZqQQJ0QbjSgIAAaigCACEDCyADRQ0BCwNAIAMoAgRBeHEgAmsiBiAESSEAAkAgAygCECIFDQAgA0EUaigCACEFCyAGIAQgABshBCADIAggABshCCAFIQMgBQ0ACwsgCEUNACAEQQAoApDQgIAAIAJrTw0AIAgoAhghCwJAIAgoAgwiACAIRg0AIAgoAggiA0EAKAKY0ICAAEkaIAAgAzYCCCADIAA2AgwMCQsCQCAIQRRqIgUoAgAiAw0AIAgoAhAiA0UNAyAIQRBqIQULA0AgBSEGIAMiAEEUaiIFKAIAIgMNACAAQRBqIQUgACgCECIDDQALIAZBADYCAAwICwJAQQAoApDQgIAAIgMgAkkNAEEAKAKc0ICAACEEAkACQCADIAJrIgVBEEkNACAEIAJqIgAgBUEBcjYCBEEAIAU2ApDQgIAAQQAgADYCnNCAgAAgBCADaiAFNgIAIAQgAkEDcjYCBAwBCyAEIANBA3I2AgQgBCADaiIDIAMoAgRBAXI2AgRBAEEANgKc0ICAAEEAQQA2ApDQgIAACyAEQQhqIQMMCgsCQEEAKAKU0ICAACIAIAJNDQBBACgCoNCAgAAiAyACaiIEIAAgAmsiBUEBcjYCBEEAIAU2ApTQgIAAQQAgBDYCoNCAgAAgAyACQQNyNgIEIANBCGohAwwKCwJAAkBBACgC4NOAgABFDQBBACgC6NOAgAAhBAwBC0EAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEMakFwcUHYqtWqBXM2AuDTgIAAQQBBADYC9NOAgABBAEEANgLE04CAAEGAgAQhBAtBACEDAkAgBCACQccAaiIHaiIGQQAgBGsiC3EiCCACSw0AQQBBMDYC+NOAgAAMCgsCQEEAKALA04CAACIDRQ0AAkBBACgCuNOAgAAiBCAIaiIFIARNDQAgBSADTQ0BC0EAIQNBAEEwNgL404CAAAwKC0EALQDE04CAAEEEcQ0EAkACQAJAQQAoAqDQgIAAIgRFDQBByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiAESw0DCyADKAIIIgMNAAsLQQAQy4CAgAAiAEF/Rg0FIAghBgJAQQAoAuTTgIAAIgNBf2oiBCAAcUUNACAIIABrIAQgAGpBACADa3FqIQYLIAYgAk0NBSAGQf7///8HSw0FAkBBACgCwNOAgAAiA0UNAEEAKAK404CAACIEIAZqIgUgBE0NBiAFIANLDQYLIAYQy4CAgAAiAyAARw0BDAcLIAYgAGsgC3EiBkH+////B0sNBCAGEMuAgIAAIgAgAygCACADKAIEakYNAyAAIQMLAkAgA0F/Rg0AIAJByABqIAZNDQACQCAHIAZrQQAoAujTgIAAIgRqQQAgBGtxIgRB/v///wdNDQAgAyEADAcLAkAgBBDLgICAAEF/Rg0AIAQgBmohBiADIQAMBwtBACAGaxDLgICAABoMBAsgAyEAIANBf0cNBQwDC0EAIQgMBwtBACEADAULIABBf0cNAgtBAEEAKALE04CAAEEEcjYCxNOAgAALIAhB/v///wdLDQEgCBDLgICAACEAQQAQy4CAgAAhAyAAQX9GDQEgA0F/Rg0BIAAgA08NASADIABrIgYgAkE4ak0NAQtBAEEAKAK404CAACAGaiIDNgK404CAAAJAIANBACgCvNOAgABNDQBBACADNgK804CAAAsCQAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQCAAIAMoAgAiBSADKAIEIghqRg0CIAMoAggiAw0ADAMLCwJAAkBBACgCmNCAgAAiA0UNACAAIANPDQELQQAgADYCmNCAgAALQQAhA0EAIAY2AszTgIAAQQAgADYCyNOAgABBAEF/NgKo0ICAAEEAQQAoAuDTgIAANgKs0ICAAEEAQQA2AtTTgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiBCAGQUhqIgUgA2siA0EBcjYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgAAgACAFakE4NgIEDAILIAMtAAxBCHENACAEIAVJDQAgBCAATw0AIARBeCAEa0EPcUEAIARBCGpBD3EbIgVqIgBBACgClNCAgAAgBmoiCyAFayIFQQFyNgIEIAMgCCAGajYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAU2ApTQgIAAQQAgADYCoNCAgAAgBCALakE4NgIEDAELAkAgAEEAKAKY0ICAACIITw0AQQAgADYCmNCAgAAgACEICyAAIAZqIQVByNOAgAAhAwJAAkACQAJAAkACQAJAA0AgAygCACAFRg0BIAMoAggiAw0ADAILCyADLQAMQQhxRQ0BC0HI04CAACEDA0ACQCADKAIAIgUgBEsNACAFIAMoAgRqIgUgBEsNAwsgAygCCCEDDAALCyADIAA2AgAgAyADKAIEIAZqNgIEIABBeCAAa0EPcUEAIABBCGpBD3EbaiILIAJBA3I2AgQgBUF4IAVrQQ9xQQAgBUEIakEPcRtqIgYgCyACaiICayEDAkAgBiAERw0AQQAgAjYCoNCAgABBAEEAKAKU0ICAACADaiIDNgKU0ICAACACIANBAXI2AgQMAwsCQCAGQQAoApzQgIAARw0AQQAgAjYCnNCAgABBAEEAKAKQ0ICAACADaiIDNgKQ0ICAACACIANBAXI2AgQgAiADaiADNgIADAMLAkAgBigCBCIEQQNxQQFHDQAgBEF4cSEHAkACQCAEQf8BSw0AIAYoAggiBSAEQQN2IghBA3RBsNCAgABqIgBGGgJAIAYoAgwiBCAFRw0AQQBBACgCiNCAgABBfiAId3E2AojQgIAADAILIAQgAEYaIAQgBTYCCCAFIAQ2AgwMAQsgBigCGCEJAkACQCAGKAIMIgAgBkYNACAGKAIIIgQgCEkaIAAgBDYCCCAEIAA2AgwMAQsCQCAGQRRqIgQoAgAiBQ0AIAZBEGoiBCgCACIFDQBBACEADAELA0AgBCEIIAUiAEEUaiIEKAIAIgUNACAAQRBqIQQgACgCECIFDQALIAhBADYCAAsgCUUNAAJAAkAgBiAGKAIcIgVBAnRBuNKAgABqIgQoAgBHDQAgBCAANgIAIAANAUEAQQAoAozQgIAAQX4gBXdxNgKM0ICAAAwCCyAJQRBBFCAJKAIQIAZGG2ogADYCACAARQ0BCyAAIAk2AhgCQCAGKAIQIgRFDQAgACAENgIQIAQgADYCGAsgBigCFCIERQ0AIABBFGogBDYCACAEIAA2AhgLIAcgA2ohAyAGIAdqIgYoAgQhBAsgBiAEQX5xNgIEIAIgA2ogAzYCACACIANBAXI2AgQCQCADQf8BSw0AIANBeHFBsNCAgABqIQQCQAJAQQAoAojQgIAAIgVBASADQQN2dCIDcQ0AQQAgBSADcjYCiNCAgAAgBCEDDAELIAQoAgghAwsgAyACNgIMIAQgAjYCCCACIAQ2AgwgAiADNgIIDAMLQR8hBAJAIANB////B0sNACADQQh2IgQgBEGA/j9qQRB2QQhxIgR0IgUgBUGA4B9qQRB2QQRxIgV0IgAgAEGAgA9qQRB2QQJxIgB0QQ92IAQgBXIgAHJrIgRBAXQgAyAEQRVqdkEBcXJBHGohBAsgAiAENgIcIAJCADcCECAEQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiAEEBIAR0IghxDQAgBSACNgIAQQAgACAIcjYCjNCAgAAgAiAFNgIYIAIgAjYCCCACIAI2AgwMAwsgA0EAQRkgBEEBdmsgBEEfRht0IQQgBSgCACEAA0AgACIFKAIEQXhxIANGDQIgBEEddiEAIARBAXQhBCAFIABBBHFqQRBqIggoAgAiAA0ACyAIIAI2AgAgAiAFNgIYIAIgAjYCDCACIAI2AggMAgsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiCyAGQUhqIgggA2siA0EBcjYCBCAAIAhqQTg2AgQgBCAFQTcgBWtBD3FBACAFQUlqQQ9xG2pBQWoiCCAIIARBEGpJGyIIQSM2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAs2AqDQgIAAIAhBEGpBACkC0NOAgAA3AgAgCEEAKQLI04CAADcCCEEAIAhBCGo2AtDTgIAAQQAgBjYCzNOAgABBACAANgLI04CAAEEAQQA2AtTTgIAAIAhBJGohAwNAIANBBzYCACADQQRqIgMgBUkNAAsgCCAERg0DIAggCCgCBEF+cTYCBCAIIAggBGsiADYCACAEIABBAXI2AgQCQCAAQf8BSw0AIABBeHFBsNCAgABqIQMCQAJAQQAoAojQgIAAIgVBASAAQQN2dCIAcQ0AQQAgBSAAcjYCiNCAgAAgAyEFDAELIAMoAgghBQsgBSAENgIMIAMgBDYCCCAEIAM2AgwgBCAFNgIIDAQLQR8hAwJAIABB////B0sNACAAQQh2IgMgA0GA/j9qQRB2QQhxIgN0IgUgBUGA4B9qQRB2QQRxIgV0IgggCEGAgA9qQRB2QQJxIgh0QQ92IAMgBXIgCHJrIgNBAXQgACADQRVqdkEBcXJBHGohAwsgBCADNgIcIARCADcCECADQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiCEEBIAN0IgZxDQAgBSAENgIAQQAgCCAGcjYCjNCAgAAgBCAFNgIYIAQgBDYCCCAEIAQ2AgwMBAsgAEEAQRkgA0EBdmsgA0EfRht0IQMgBSgCACEIA0AgCCIFKAIEQXhxIABGDQMgA0EddiEIIANBAXQhAyAFIAhBBHFqQRBqIgYoAgAiCA0ACyAGIAQ2AgAgBCAFNgIYIAQgBDYCDCAEIAQ2AggMAwsgBSgCCCIDIAI2AgwgBSACNgIIIAJBADYCGCACIAU2AgwgAiADNgIICyALQQhqIQMMBQsgBSgCCCIDIAQ2AgwgBSAENgIIIARBADYCGCAEIAU2AgwgBCADNgIIC0EAKAKU0ICAACIDIAJNDQBBACgCoNCAgAAiBCACaiIFIAMgAmsiA0EBcjYCBEEAIAM2ApTQgIAAQQAgBTYCoNCAgAAgBCACQQNyNgIEIARBCGohAwwDC0EAIQNBAEEwNgL404CAAAwCCwJAIAtFDQACQAJAIAggCCgCHCIFQQJ0QbjSgIAAaiIDKAIARw0AIAMgADYCACAADQFBACAHQX4gBXdxIgc2AozQgIAADAILIAtBEEEUIAsoAhAgCEYbaiAANgIAIABFDQELIAAgCzYCGAJAIAgoAhAiA0UNACAAIAM2AhAgAyAANgIYCyAIQRRqKAIAIgNFDQAgAEEUaiADNgIAIAMgADYCGAsCQAJAIARBD0sNACAIIAQgAmoiA0EDcjYCBCAIIANqIgMgAygCBEEBcjYCBAwBCyAIIAJqIgAgBEEBcjYCBCAIIAJBA3I2AgQgACAEaiAENgIAAkAgBEH/AUsNACAEQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgBEEDdnQiBHENAEEAIAUgBHI2AojQgIAAIAMhBAwBCyADKAIIIQQLIAQgADYCDCADIAA2AgggACADNgIMIAAgBDYCCAwBC0EfIQMCQCAEQf///wdLDQAgBEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCICIAJBgIAPakEQdkECcSICdEEPdiADIAVyIAJyayIDQQF0IAQgA0EVanZBAXFyQRxqIQMLIAAgAzYCHCAAQgA3AhAgA0ECdEG40oCAAGohBQJAIAdBASADdCICcQ0AIAUgADYCAEEAIAcgAnI2AozQgIAAIAAgBTYCGCAAIAA2AgggACAANgIMDAELIARBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhAgJAA0AgAiIFKAIEQXhxIARGDQEgA0EddiECIANBAXQhAyAFIAJBBHFqQRBqIgYoAgAiAg0ACyAGIAA2AgAgACAFNgIYIAAgADYCDCAAIAA2AggMAQsgBSgCCCIDIAA2AgwgBSAANgIIIABBADYCGCAAIAU2AgwgACADNgIICyAIQQhqIQMMAQsCQCAKRQ0AAkACQCAAIAAoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAg2AgAgCA0BQQAgCUF+IAV3cTYCjNCAgAAMAgsgCkEQQRQgCigCECAARhtqIAg2AgAgCEUNAQsgCCAKNgIYAkAgACgCECIDRQ0AIAggAzYCECADIAg2AhgLIABBFGooAgAiA0UNACAIQRRqIAM2AgAgAyAINgIYCwJAAkAgBEEPSw0AIAAgBCACaiIDQQNyNgIEIAAgA2oiAyADKAIEQQFyNgIEDAELIAAgAmoiBSAEQQFyNgIEIAAgAkEDcjYCBCAFIARqIAQ2AgACQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhAwJAAkBBASAHQQN2dCIIIAZxDQBBACAIIAZyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAM2AgwgAiADNgIIIAMgAjYCDCADIAg2AggLQQAgBTYCnNCAgABBACAENgKQ0ICAAAsgAEEIaiEDCyABQRBqJICAgIAAIAMLCgAgABDJgICAAAviDQEHfwJAIABFDQAgAEF4aiIBIABBfGooAgAiAkF4cSIAaiEDAkAgAkEBcQ0AIAJBA3FFDQEgASABKAIAIgJrIgFBACgCmNCAgAAiBEkNASACIABqIQACQCABQQAoApzQgIAARg0AAkAgAkH/AUsNACABKAIIIgQgAkEDdiIFQQN0QbDQgIAAaiIGRhoCQCABKAIMIgIgBEcNAEEAQQAoAojQgIAAQX4gBXdxNgKI0ICAAAwDCyACIAZGGiACIAQ2AgggBCACNgIMDAILIAEoAhghBwJAAkAgASgCDCIGIAFGDQAgASgCCCICIARJGiAGIAI2AgggAiAGNgIMDAELAkAgAUEUaiICKAIAIgQNACABQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQECQAJAIAEgASgCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAwsgB0EQQRQgBygCECABRhtqIAY2AgAgBkUNAgsgBiAHNgIYAkAgASgCECICRQ0AIAYgAjYCECACIAY2AhgLIAEoAhQiAkUNASAGQRRqIAI2AgAgAiAGNgIYDAELIAMoAgQiAkEDcUEDRw0AIAMgAkF+cTYCBEEAIAA2ApDQgIAAIAEgAGogADYCACABIABBAXI2AgQPCyABIANPDQAgAygCBCICQQFxRQ0AAkACQCACQQJxDQACQCADQQAoAqDQgIAARw0AQQAgATYCoNCAgABBAEEAKAKU0ICAACAAaiIANgKU0ICAACABIABBAXI2AgQgAUEAKAKc0ICAAEcNA0EAQQA2ApDQgIAAQQBBADYCnNCAgAAPCwJAIANBACgCnNCAgABHDQBBACABNgKc0ICAAEEAQQAoApDQgIAAIABqIgA2ApDQgIAAIAEgAEEBcjYCBCABIABqIAA2AgAPCyACQXhxIABqIQACQAJAIAJB/wFLDQAgAygCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgAygCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAgsgAiAGRhogAiAENgIIIAQgAjYCDAwBCyADKAIYIQcCQAJAIAMoAgwiBiADRg0AIAMoAggiAkEAKAKY0ICAAEkaIAYgAjYCCCACIAY2AgwMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEGDAELA0AgAiEFIAQiBkEUaiICKAIAIgQNACAGQRBqIQIgBigCECIEDQALIAVBADYCAAsgB0UNAAJAAkAgAyADKAIcIgRBAnRBuNKAgABqIgIoAgBHDQAgAiAGNgIAIAYNAUEAQQAoAozQgIAAQX4gBHdxNgKM0ICAAAwCCyAHQRBBFCAHKAIQIANGG2ogBjYCACAGRQ0BCyAGIAc2AhgCQCADKAIQIgJFDQAgBiACNgIQIAIgBjYCGAsgAygCFCICRQ0AIAZBFGogAjYCACACIAY2AhgLIAEgAGogADYCACABIABBAXI2AgQgAUEAKAKc0ICAAEcNAUEAIAA2ApDQgIAADwsgAyACQX5xNgIEIAEgAGogADYCACABIABBAXI2AgQLAkAgAEH/AUsNACAAQXhxQbDQgIAAaiECAkACQEEAKAKI0ICAACIEQQEgAEEDdnQiAHENAEEAIAQgAHI2AojQgIAAIAIhAAwBCyACKAIIIQALIAAgATYCDCACIAE2AgggASACNgIMIAEgADYCCA8LQR8hAgJAIABB////B0sNACAAQQh2IgIgAkGA/j9qQRB2QQhxIgJ0IgQgBEGA4B9qQRB2QQRxIgR0IgYgBkGAgA9qQRB2QQJxIgZ0QQ92IAIgBHIgBnJrIgJBAXQgACACQRVqdkEBcXJBHGohAgsgASACNgIcIAFCADcCECACQQJ0QbjSgIAAaiEEAkACQEEAKAKM0ICAACIGQQEgAnQiA3ENACAEIAE2AgBBACAGIANyNgKM0ICAACABIAQ2AhggASABNgIIIAEgATYCDAwBCyAAQQBBGSACQQF2ayACQR9GG3QhAiAEKAIAIQYCQANAIAYiBCgCBEF4cSAARg0BIAJBHXYhBiACQQF0IQIgBCAGQQRxakEQaiIDKAIAIgYNAAsgAyABNgIAIAEgBDYCGCABIAE2AgwgASABNgIIDAELIAQoAggiACABNgIMIAQgATYCCCABQQA2AhggASAENgIMIAEgADYCCAtBAEEAKAKo0ICAAEF/aiIBQX8gARs2AqjQgIAACwsEAAAAC04AAkAgAA0APwBBEHQPCwJAIABB//8DcQ0AIABBf0wNAAJAIABBEHZAACIAQX9HDQBBAEEwNgL404CAAEF/DwsgAEEQdA8LEMqAgIAAAAvyAgIDfwF+AkAgAkUNACAAIAE6AAAgAiAAaiIDQX9qIAE6AAAgAkEDSQ0AIAAgAToAAiAAIAE6AAEgA0F9aiABOgAAIANBfmogAToAACACQQdJDQAgACABOgADIANBfGogAToAACACQQlJDQAgAEEAIABrQQNxIgRqIgMgAUH/AXFBgYKECGwiATYCACADIAIgBGtBfHEiBGoiAkF8aiABNgIAIARBCUkNACADIAE2AgggAyABNgIEIAJBeGogATYCACACQXRqIAE2AgAgBEEZSQ0AIAMgATYCGCADIAE2AhQgAyABNgIQIAMgATYCDCACQXBqIAE2AgAgAkFsaiABNgIAIAJBaGogATYCACACQWRqIAE2AgAgBCADQQRxQRhyIgVrIgJBIEkNACABrUKBgICAEH4hBiADIAVqIQEDQCABIAY3AxggASAGNwMQIAEgBjcDCCABIAY3AwAgAUEgaiEBIAJBYGoiAkEfSw0ACwsgAAsLjkgBAEGACAuGSAEAAAACAAAAAwAAAAAAAAAAAAAABAAAAAUAAAAAAAAAAAAAAAYAAAAHAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASW52YWxpZCBjaGFyIGluIHVybCBxdWVyeQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2JvZHkAQ29udGVudC1MZW5ndGggb3ZlcmZsb3cAQ2h1bmsgc2l6ZSBvdmVyZmxvdwBSZXNwb25zZSBvdmVyZmxvdwBJbnZhbGlkIG1ldGhvZCBmb3IgSFRUUC94LnggcmVxdWVzdABJbnZhbGlkIG1ldGhvZCBmb3IgUlRTUC94LnggcmVxdWVzdABFeHBlY3RlZCBTT1VSQ0UgbWV0aG9kIGZvciBJQ0UveC54IHJlcXVlc3QASW52YWxpZCBjaGFyIGluIHVybCBmcmFnbWVudCBzdGFydABFeHBlY3RlZCBkb3QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9zdGF0dXMASW52YWxpZCByZXNwb25zZSBzdGF0dXMASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucwBVc2VyIGNhbGxiYWNrIGVycm9yAGBvbl9yZXNldGAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2hlYWRlcmAgY2FsbGJhY2sgZXJyb3IAYG9uX21lc3NhZ2VfYmVnaW5gIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19leHRlbnNpb25fdmFsdWVgIGNhbGxiYWNrIGVycm9yAGBvbl9zdGF0dXNfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl92ZXJzaW9uX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdXJsX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWV0aG9kX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX25hbWVgIGNhbGxiYWNrIGVycm9yAFVuZXhwZWN0ZWQgY2hhciBpbiB1cmwgc2VydmVyAEludmFsaWQgaGVhZGVyIHZhbHVlIGNoYXIASW52YWxpZCBoZWFkZXIgZmllbGQgY2hhcgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3ZlcnNpb24ASW52YWxpZCBtaW5vciB2ZXJzaW9uAEludmFsaWQgbWFqb3IgdmVyc2lvbgBFeHBlY3RlZCBzcGFjZSBhZnRlciB2ZXJzaW9uAEV4cGVjdGVkIENSTEYgYWZ0ZXIgdmVyc2lvbgBJbnZhbGlkIEhUVFAgdmVyc2lvbgBJbnZhbGlkIGhlYWRlciB0b2tlbgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3VybABJbnZhbGlkIGNoYXJhY3RlcnMgaW4gdXJsAFVuZXhwZWN0ZWQgc3RhcnQgY2hhciBpbiB1cmwARG91YmxlIEAgaW4gdXJsAEVtcHR5IENvbnRlbnQtTGVuZ3RoAEludmFsaWQgY2hhcmFjdGVyIGluIENvbnRlbnQtTGVuZ3RoAER1cGxpY2F0ZSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXIgaW4gdXJsIHBhdGgAQ29udGVudC1MZW5ndGggY2FuJ3QgYmUgcHJlc2VudCB3aXRoIFRyYW5zZmVyLUVuY29kaW5nAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIHNpemUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfdmFsdWUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyB2YWx1ZQBNaXNzaW5nIGV4cGVjdGVkIExGIGFmdGVyIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AgaGVhZGVyIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGUgdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyBxdW90ZWQgdmFsdWUAUGF1c2VkIGJ5IG9uX2hlYWRlcnNfY29tcGxldGUASW52YWxpZCBFT0Ygc3RhdGUAb25fcmVzZXQgcGF1c2UAb25fY2h1bmtfaGVhZGVyIHBhdXNlAG9uX21lc3NhZ2VfYmVnaW4gcGF1c2UAb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlIHBhdXNlAG9uX3N0YXR1c19jb21wbGV0ZSBwYXVzZQBvbl92ZXJzaW9uX2NvbXBsZXRlIHBhdXNlAG9uX3VybF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19jb21wbGV0ZSBwYXVzZQBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGUgcGF1c2UAb25fbWVzc2FnZV9jb21wbGV0ZSBwYXVzZQBvbl9tZXRob2RfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lIHBhdXNlAFVuZXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgc3RhcnQgbGluZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgbmFtZQBQYXVzZSBvbiBDT05ORUNUL1VwZ3JhZGUAUGF1c2Ugb24gUFJJL1VwZ3JhZGUARXhwZWN0ZWQgSFRUUC8yIENvbm5lY3Rpb24gUHJlZmFjZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX21ldGhvZABFeHBlY3RlZCBzcGFjZSBhZnRlciBtZXRob2QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfZmllbGQAUGF1c2VkAEludmFsaWQgd29yZCBlbmNvdW50ZXJlZABJbnZhbGlkIG1ldGhvZCBlbmNvdW50ZXJlZABVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNjaGVtYQBSZXF1ZXN0IGhhcyBpbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AAU1dJVENIX1BST1hZAFVTRV9QUk9YWQBNS0FDVElWSVRZAFVOUFJPQ0VTU0FCTEVfRU5USVRZAENPUFkATU9WRURfUEVSTUFORU5UTFkAVE9PX0VBUkxZAE5PVElGWQBGQUlMRURfREVQRU5ERU5DWQBCQURfR0FURVdBWQBQTEFZAFBVVABDSEVDS09VVABHQVRFV0FZX1RJTUVPVVQAUkVRVUVTVF9USU1FT1VUAE5FVFdPUktfQ09OTkVDVF9USU1FT1VUAENPTk5FQ1RJT05fVElNRU9VVABMT0dJTl9USU1FT1VUAE5FVFdPUktfUkVBRF9USU1FT1VUAFBPU1QATUlTRElSRUNURURfUkVRVUVTVABDTElFTlRfQ0xPU0VEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9MT0FEX0JBTEFOQ0VEX1JFUVVFU1QAQkFEX1JFUVVFU1QASFRUUF9SRVFVRVNUX1NFTlRfVE9fSFRUUFNfUE9SVABSRVBPUlQASU1fQV9URUFQT1QAUkVTRVRfQ09OVEVOVABOT19DT05URU5UAFBBUlRJQUxfQ09OVEVOVABIUEVfSU5WQUxJRF9DT05TVEFOVABIUEVfQ0JfUkVTRVQAR0VUAEhQRV9TVFJJQ1QAQ09ORkxJQ1QAVEVNUE9SQVJZX1JFRElSRUNUAFBFUk1BTkVOVF9SRURJUkVDVABDT05ORUNUAE1VTFRJX1NUQVRVUwBIUEVfSU5WQUxJRF9TVEFUVVMAVE9PX01BTllfUkVRVUVTVFMARUFSTFlfSElOVFMAVU5BVkFJTEFCTEVfRk9SX0xFR0FMX1JFQVNPTlMAT1BUSU9OUwBTV0lUQ0hJTkdfUFJPVE9DT0xTAFZBUklBTlRfQUxTT19ORUdPVElBVEVTAE1VTFRJUExFX0NIT0lDRVMASU5URVJOQUxfU0VSVkVSX0VSUk9SAFdFQl9TRVJWRVJfVU5LTk9XTl9FUlJPUgBSQUlMR1VOX0VSUk9SAElERU5USVRZX1BST1ZJREVSX0FVVEhFTlRJQ0FUSU9OX0VSUk9SAFNTTF9DRVJUSUZJQ0FURV9FUlJPUgBJTlZBTElEX1hfRk9SV0FSREVEX0ZPUgBTRVRfUEFSQU1FVEVSAEdFVF9QQVJBTUVURVIASFBFX1VTRVIAU0VFX09USEVSAEhQRV9DQl9DSFVOS19IRUFERVIATUtDQUxFTkRBUgBTRVRVUABXRUJfU0VSVkVSX0lTX0RPV04AVEVBUkRPV04ASFBFX0NMT1NFRF9DT05ORUNUSU9OAEhFVVJJU1RJQ19FWFBJUkFUSU9OAERJU0NPTk5FQ1RFRF9PUEVSQVRJT04ATk9OX0FVVEhPUklUQVRJVkVfSU5GT1JNQVRJT04ASFBFX0lOVkFMSURfVkVSU0lPTgBIUEVfQ0JfTUVTU0FHRV9CRUdJTgBTSVRFX0lTX0ZST1pFTgBIUEVfSU5WQUxJRF9IRUFERVJfVE9LRU4ASU5WQUxJRF9UT0tFTgBGT1JCSURERU4ARU5IQU5DRV9ZT1VSX0NBTE0ASFBFX0lOVkFMSURfVVJMAEJMT0NLRURfQllfUEFSRU5UQUxfQ09OVFJPTABNS0NPTABBQ0wASFBFX0lOVEVSTkFMAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0VfVU5PRkZJQ0lBTABIUEVfT0sAVU5MSU5LAFVOTE9DSwBQUkkAUkVUUllfV0lUSABIUEVfSU5WQUxJRF9DT05URU5UX0xFTkdUSABIUEVfVU5FWFBFQ1RFRF9DT05URU5UX0xFTkdUSABGTFVTSABQUk9QUEFUQ0gATS1TRUFSQ0gAVVJJX1RPT19MT05HAFBST0NFU1NJTkcATUlTQ0VMTEFORU9VU19QRVJTSVNURU5UX1dBUk5JTkcATUlTQ0VMTEFORU9VU19XQVJOSU5HAEhQRV9JTlZBTElEX1RSQU5TRkVSX0VOQ09ESU5HAEV4cGVjdGVkIENSTEYASFBFX0lOVkFMSURfQ0hVTktfU0laRQBNT1ZFAENPTlRJTlVFAEhQRV9DQl9TVEFUVVNfQ09NUExFVEUASFBFX0NCX0hFQURFUlNfQ09NUExFVEUASFBFX0NCX1ZFUlNJT05fQ09NUExFVEUASFBFX0NCX1VSTF9DT01QTEVURQBIUEVfQ0JfQ0hVTktfQ09NUExFVEUASFBFX0NCX0hFQURFUl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX1ZBTFVFX0NPTVBMRVRFAEhQRV9DQl9DSFVOS19FWFRFTlNJT05fTkFNRV9DT01QTEVURQBIUEVfQ0JfTUVTU0FHRV9DT01QTEVURQBIUEVfQ0JfTUVUSE9EX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfRklFTERfQ09NUExFVEUAREVMRVRFAEhQRV9JTlZBTElEX0VPRl9TVEFURQBJTlZBTElEX1NTTF9DRVJUSUZJQ0FURQBQQVVTRQBOT19SRVNQT05TRQBVTlNVUFBPUlRFRF9NRURJQV9UWVBFAEdPTkUATk9UX0FDQ0VQVEFCTEUAU0VSVklDRV9VTkFWQUlMQUJMRQBSQU5HRV9OT1RfU0FUSVNGSUFCTEUAT1JJR0lOX0lTX1VOUkVBQ0hBQkxFAFJFU1BPTlNFX0lTX1NUQUxFAFBVUkdFAE1FUkdFAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0UAUkVRVUVTVF9IRUFERVJfVE9PX0xBUkdFAFBBWUxPQURfVE9PX0xBUkdFAElOU1VGRklDSUVOVF9TVE9SQUdFAEhQRV9QQVVTRURfVVBHUkFERQBIUEVfUEFVU0VEX0gyX1VQR1JBREUAU09VUkNFAEFOTk9VTkNFAFRSQUNFAEhQRV9VTkVYUEVDVEVEX1NQQUNFAERFU0NSSUJFAFVOU1VCU0NSSUJFAFJFQ09SRABIUEVfSU5WQUxJRF9NRVRIT0QATk9UX0ZPVU5EAFBST1BGSU5EAFVOQklORABSRUJJTkQAVU5BVVRIT1JJWkVEAE1FVEhPRF9OT1RfQUxMT1dFRABIVFRQX1ZFUlNJT05fTk9UX1NVUFBPUlRFRABBTFJFQURZX1JFUE9SVEVEAEFDQ0VQVEVEAE5PVF9JTVBMRU1FTlRFRABMT09QX0RFVEVDVEVEAEhQRV9DUl9FWFBFQ1RFRABIUEVfTEZfRVhQRUNURUQAQ1JFQVRFRABJTV9VU0VEAEhQRV9QQVVTRUQAVElNRU9VVF9PQ0NVUkVEAFBBWU1FTlRfUkVRVUlSRUQAUFJFQ09ORElUSU9OX1JFUVVJUkVEAFBST1hZX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAE5FVFdPUktfQVVUSEVOVElDQVRJT05fUkVRVUlSRUQATEVOR1RIX1JFUVVJUkVEAFNTTF9DRVJUSUZJQ0FURV9SRVFVSVJFRABVUEdSQURFX1JFUVVJUkVEAFBBR0VfRVhQSVJFRABQUkVDT05ESVRJT05fRkFJTEVEAEVYUEVDVEFUSU9OX0ZBSUxFRABSRVZBTElEQVRJT05fRkFJTEVEAFNTTF9IQU5EU0hBS0VfRkFJTEVEAExPQ0tFRABUUkFOU0ZPUk1BVElPTl9BUFBMSUVEAE5PVF9NT0RJRklFRABOT1RfRVhURU5ERUQAQkFORFdJRFRIX0xJTUlUX0VYQ0VFREVEAFNJVEVfSVNfT1ZFUkxPQURFRABIRUFEAEV4cGVjdGVkIEhUVFAvAABeEwAAJhMAADAQAADwFwAAnRMAABUSAAA5FwAA8BIAAAoQAAB1EgAArRIAAIITAABPFAAAfxAAAKAVAAAjFAAAiRIAAIsUAABNFQAA1BEAAM8UAAAQGAAAyRYAANwWAADBEQAA4BcAALsUAAB0FAAAfBUAAOUUAAAIFwAAHxAAAGUVAACjFAAAKBUAAAIVAACZFQAALBAAAIsZAABPDwAA1A4AAGoQAADOEAAAAhcAAIkOAABuEwAAHBMAAGYUAABWFwAAwRMAAM0TAABsEwAAaBcAAGYXAABfFwAAIhMAAM4PAABpDgAA2A4AAGMWAADLEwAAqg4AACgXAAAmFwAAxRMAAF0WAADoEQAAZxMAAGUTAADyFgAAcxMAAB0XAAD5FgAA8xEAAM8OAADOFQAADBIAALMRAAClEQAAYRAAADIXAAC7EwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAgMCAgICAgAAAgIAAgIAAgICAgICAgICAgAEAAAAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAgICAAIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIAAgICAgIAAAICAAICAAICAgICAgICAgIAAwAEAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsb3NlZWVwLWFsaXZlAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQFjaHVua2VkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQABAQEBAQAAAQEAAQEAAQEBAQEBAQEBAQAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVjdGlvbmVudC1sZW5ndGhvbnJveHktY29ubmVjdGlvbgAAAAAAAAAAAAAAAAAAAHJhbnNmZXItZW5jb2RpbmdwZ3JhZGUNCg0KDQpTTQ0KDQpUVFAvQ0UvVFNQLwAAAAAAAAAAAAAAAAECAAEDAAAAAAAAAAAAAAAAAAAAAAAABAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQUBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAABAAACAAAAAAAAAAAAAAAAAAAAAAAAAwQAAAQEBAQEBAQEBAQEBQQEBAQEBAQEBAQEBAAEAAYHBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQABAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAgAAAAACAAAAAAAAAAAAAAAAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5PVU5DRUVDS09VVE5FQ1RFVEVDUklCRUxVU0hFVEVBRFNFQVJDSFJHRUNUSVZJVFlMRU5EQVJWRU9USUZZUFRJT05TQ0hTRUFZU1RBVENIR0VPUkRJUkVDVE9SVFJDSFBBUkFNRVRFUlVSQ0VCU0NSSUJFQVJET1dOQUNFSU5ETktDS1VCU0NSSUJFSFRUUC9BRFRQLw==' /***/ }), -/***/ 4347: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1891: +/***/ ((__unused_webpack_module, exports) => { -module.exports = __nccwpck_require__(6244); +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.enumToMap = void 0; +function enumToMap(obj) { + const res = {}; + Object.keys(obj).forEach((key) => { + const value = obj[key]; + if (typeof value === 'number') { + res[key] = value; + } + }); + return res; +} +exports.enumToMap = enumToMap; +//# sourceMappingURL=utils.js.map /***/ }), -/***/ 6244: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 6771: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var RetryOperation = __nccwpck_require__(5369); +"use strict"; -exports.operation = function(options) { - var timeouts = exports.timeouts(options); - return new RetryOperation(timeouts, { - forever: options && (options.forever || options.retries === Infinity), - unref: options && options.unref, - maxRetryTime: options && options.maxRetryTime - }); -}; -exports.timeouts = function(options) { - if (options instanceof Array) { - return [].concat(options); +const { kClients } = __nccwpck_require__(2785) +const Agent = __nccwpck_require__(7890) +const { + kAgent, + kMockAgentSet, + kMockAgentGet, + kDispatches, + kIsMockActive, + kNetConnect, + kGetNetConnect, + kOptions, + kFactory +} = __nccwpck_require__(4347) +const MockClient = __nccwpck_require__(8687) +const MockPool = __nccwpck_require__(6193) +const { matchValue, buildMockOptions } = __nccwpck_require__(9323) +const { InvalidArgumentError, UndiciError } = __nccwpck_require__(8045) +const Dispatcher = __nccwpck_require__(412) +const Pluralizer = __nccwpck_require__(8891) +const PendingInterceptorsFormatter = __nccwpck_require__(6823) + +class FakeWeakRef { + constructor (value) { + this.value = value } - var opts = { - retries: 10, - factor: 2, - minTimeout: 1 * 1000, - maxTimeout: Infinity, - randomize: false - }; - for (var key in options) { - opts[key] = options[key]; + deref () { + return this.value } +} - if (opts.minTimeout > opts.maxTimeout) { - throw new Error('minTimeout is greater than maxTimeout'); - } +class MockAgent extends Dispatcher { + constructor (opts) { + super(opts) - var timeouts = []; - for (var i = 0; i < opts.retries; i++) { - timeouts.push(this.createTimeout(i, opts)); - } + this[kNetConnect] = true + this[kIsMockActive] = true - if (options && options.forever && !timeouts.length) { - timeouts.push(this.createTimeout(i, opts)); + // Instantiate Agent and encapsulate + if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + const agent = opts && opts.agent ? opts.agent : new Agent(opts) + this[kAgent] = agent + + this[kClients] = agent[kClients] + this[kOptions] = buildMockOptions(opts) } - // sort the array numerically ascending - timeouts.sort(function(a,b) { - return a - b; - }); + get (origin) { + let dispatcher = this[kMockAgentGet](origin) - return timeouts; -}; + if (!dispatcher) { + dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + } + return dispatcher + } -exports.createTimeout = function(attempt, opts) { - var random = (opts.randomize) - ? (Math.random() + 1) - : 1; + dispatch (opts, handler) { + // Call MockAgent.get to perform additional setup before dispatching as normal + this.get(opts.origin) + return this[kAgent].dispatch(opts, handler) + } - var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); - timeout = Math.min(timeout, opts.maxTimeout); + async close () { + await this[kAgent].close() + this[kClients].clear() + } - return timeout; -}; + deactivate () { + this[kIsMockActive] = false + } -exports.wrap = function(obj, options, methods) { - if (options instanceof Array) { - methods = options; - options = null; + activate () { + this[kIsMockActive] = true } - if (!methods) { - methods = []; - for (var key in obj) { - if (typeof obj[key] === 'function') { - methods.push(key); + enableNetConnect (matcher) { + if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) { + if (Array.isArray(this[kNetConnect])) { + this[kNetConnect].push(matcher) + } else { + this[kNetConnect] = [matcher] } + } else if (typeof matcher === 'undefined') { + this[kNetConnect] = true + } else { + throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.') } } - for (var i = 0; i < methods.length; i++) { - var method = methods[i]; - var original = obj[method]; + disableNetConnect () { + this[kNetConnect] = false + } - obj[method] = function retryWrapper(original) { - var op = exports.operation(options); - var args = Array.prototype.slice.call(arguments, 1); - var callback = args.pop(); + // This is required to bypass issues caused by using global symbols - see: + // https://github.com/nodejs/undici/issues/1447 + get isMockActive () { + return this[kIsMockActive] + } - args.push(function(err) { - if (op.retry(err)) { - return; - } - if (err) { - arguments[0] = op.mainError(); - } - callback.apply(this, arguments); - }); + [kMockAgentSet] (origin, dispatcher) { + this[kClients].set(origin, new FakeWeakRef(dispatcher)) + } - op.attempt(function() { - original.apply(obj, args); - }); - }.bind(obj, original); - obj[method].options = options; + [kFactory] (origin) { + const mockOptions = Object.assign({ agent: this }, this[kOptions]) + return this[kOptions] && this[kOptions].connections === 1 + ? new MockClient(origin, mockOptions) + : new MockPool(origin, mockOptions) } -}; + [kMockAgentGet] (origin) { + // First check if we can immediately find it + const ref = this[kClients].get(origin) + if (ref) { + return ref.deref() + } -/***/ }), + // If the origin is not a string create a dummy parent pool and return to user + if (typeof origin !== 'string') { + const dispatcher = this[kFactory]('http://localhost:9999') + this[kMockAgentSet](origin, dispatcher) + return dispatcher + } -/***/ 5369: -/***/ ((module) => { + // If we match, create a pool and assign the same dispatches + for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) { + const nonExplicitDispatcher = nonExplicitRef.deref() + if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) { + const dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches] + return dispatcher + } + } + } -function RetryOperation(timeouts, options) { - // Compatibility for the old (timeouts, retryForever) signature - if (typeof options === 'boolean') { - options = { forever: options }; + [kGetNetConnect] () { + return this[kNetConnect] } - this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); - this._timeouts = timeouts; - this._options = options || {}; - this._maxRetryTime = options && options.maxRetryTime || Infinity; - this._fn = null; - this._errors = []; - this._attempts = 1; - this._operationTimeout = null; - this._operationTimeoutCb = null; - this._timeout = null; - this._operationStart = null; - this._timer = null; + pendingInterceptors () { + const mockAgentClients = this[kClients] - if (this._options.forever) { - this._cachedTimeouts = this._timeouts.slice(0); + return Array.from(mockAgentClients.entries()) + .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin }))) + .filter(({ pending }) => pending) } -} -module.exports = RetryOperation; -RetryOperation.prototype.reset = function() { - this._attempts = 1; - this._timeouts = this._originalTimeouts.slice(0); -} + assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) { + const pending = this.pendingInterceptors() -RetryOperation.prototype.stop = function() { - if (this._timeout) { - clearTimeout(this._timeout); - } - if (this._timer) { - clearTimeout(this._timer); - } + if (pending.length === 0) { + return + } - this._timeouts = []; - this._cachedTimeouts = null; -}; + const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length) -RetryOperation.prototype.retry = function(err) { - if (this._timeout) { - clearTimeout(this._timeout); - } + throw new UndiciError(` +${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending: - if (!err) { - return false; - } - var currentTime = new Date().getTime(); - if (err && currentTime - this._operationStart >= this._maxRetryTime) { - this._errors.push(err); - this._errors.unshift(new Error('RetryOperation timeout occurred')); - return false; +${pendingInterceptorsFormatter.format(pending)} +`.trim()) } +} - this._errors.push(err); +module.exports = MockAgent - var timeout = this._timeouts.shift(); - if (timeout === undefined) { - if (this._cachedTimeouts) { - // retry forever, only keep last error - this._errors.splice(0, this._errors.length - 1); - timeout = this._cachedTimeouts.slice(-1); - } else { - return false; - } - } - var self = this; - this._timer = setTimeout(function() { - self._attempts++; +/***/ }), + +/***/ 8687: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; - if (self._operationTimeoutCb) { - self._timeout = setTimeout(function() { - self._operationTimeoutCb(self._attempts); - }, self._operationTimeout); - if (self._options.unref) { - self._timeout.unref(); - } +const { promisify } = __nccwpck_require__(3837) +const Client = __nccwpck_require__(3598) +const { buildMockDispatch } = __nccwpck_require__(9323) +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = __nccwpck_require__(4347) +const { MockInterceptor } = __nccwpck_require__(410) +const Symbols = __nccwpck_require__(2785) +const { InvalidArgumentError } = __nccwpck_require__(8045) + +/** + * MockClient provides an API that extends the Client to influence the mockDispatches. + */ +class MockClient extends Client { + constructor (origin, opts) { + super(origin, opts) + + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') } - self._fn(self._attempts); - }, timeout); + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) - if (this._options.unref) { - this._timer.unref(); + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] } - return true; -}; - -RetryOperation.prototype.attempt = function(fn, timeoutOps) { - this._fn = fn; + get [Symbols.kConnected] () { + return this[kConnected] + } - if (timeoutOps) { - if (timeoutOps.timeout) { - this._operationTimeout = timeoutOps.timeout; - } - if (timeoutOps.cb) { - this._operationTimeoutCb = timeoutOps.cb; - } + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) } - var self = this; - if (this._operationTimeoutCb) { - this._timeout = setTimeout(function() { - self._operationTimeoutCb(); - }, self._operationTimeout); + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) } +} - this._operationStart = new Date().getTime(); +module.exports = MockClient - this._fn(this._attempts); -}; -RetryOperation.prototype.try = function(fn) { - console.log('Using RetryOperation.try() is deprecated'); - this.attempt(fn); -}; +/***/ }), -RetryOperation.prototype.start = function(fn) { - console.log('Using RetryOperation.start() is deprecated'); - this.attempt(fn); -}; +/***/ 888: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -RetryOperation.prototype.start = RetryOperation.prototype.try; +"use strict"; -RetryOperation.prototype.errors = function() { - return this._errors; -}; -RetryOperation.prototype.attempts = function() { - return this._attempts; -}; +const { UndiciError } = __nccwpck_require__(8045) -RetryOperation.prototype.mainError = function() { - if (this._errors.length === 0) { - return null; +class MockNotMatchedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, MockNotMatchedError) + this.name = 'MockNotMatchedError' + this.message = message || 'The request does not match any registered mock dispatches' + this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED' } +} - var counts = {}; - var mainError = null; - var mainErrorCount = 0; +module.exports = { + MockNotMatchedError +} - for (var i = 0; i < this._errors.length; i++) { - var error = this._errors[i]; - var message = error.message; - var count = (counts[message] || 0) + 1; - counts[message] = count; +/***/ }), - if (count >= mainErrorCount) { - mainError = error; - mainErrorCount = count; +/***/ 410: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { getResponseData, buildKey, addMockDispatch } = __nccwpck_require__(9323) +const { + kDispatches, + kDispatchKey, + kDefaultHeaders, + kDefaultTrailers, + kContentLength, + kMockDispatch +} = __nccwpck_require__(4347) +const { InvalidArgumentError } = __nccwpck_require__(8045) +const { buildURL } = __nccwpck_require__(3983) + +/** + * Defines the scope API for an interceptor reply + */ +class MockScope { + constructor (mockDispatch) { + this[kMockDispatch] = mockDispatch + } + + /** + * Delay a reply by a set amount in ms. + */ + delay (waitInMs) { + if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) { + throw new InvalidArgumentError('waitInMs must be a valid integer > 0') } + + this[kMockDispatch].delay = waitInMs + return this } - return mainError; -}; + /** + * For a defined reply, never mark as consumed. + */ + persist () { + this[kMockDispatch].persist = true + return this + } + /** + * Allow one to define a reply for a set amount of matching requests. + */ + times (repeatTimes) { + if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) { + throw new InvalidArgumentError('repeatTimes must be a valid integer > 0') + } -/***/ }), + this[kMockDispatch].times = repeatTimes + return this + } +} -/***/ 1867: -/***/ ((module, exports, __nccwpck_require__) => { +/** + * Defines an interceptor for a Mock + */ +class MockInterceptor { + constructor (opts, mockDispatches) { + if (typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object') + } + if (typeof opts.path === 'undefined') { + throw new InvalidArgumentError('opts.path must be defined') + } + if (typeof opts.method === 'undefined') { + opts.method = 'GET' + } + // See https://github.com/nodejs/undici/issues/1245 + // As per RFC 3986, clients are not supposed to send URI + // fragments to servers when they retrieve a document, + if (typeof opts.path === 'string') { + if (opts.query) { + opts.path = buildURL(opts.path, opts.query) + } else { + // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811 + const parsedURL = new URL(opts.path, 'data://') + opts.path = parsedURL.pathname + parsedURL.search + } + } + if (typeof opts.method === 'string') { + opts.method = opts.method.toUpperCase() + } -/*! safe-buffer. MIT License. Feross Aboukhadijeh */ -/* eslint-disable node/no-deprecated-api */ -var buffer = __nccwpck_require__(4300) -var Buffer = buffer.Buffer + this[kDispatchKey] = buildKey(opts) + this[kDispatches] = mockDispatches + this[kDefaultHeaders] = {} + this[kDefaultTrailers] = {} + this[kContentLength] = false + } -// alternative to using Object.keys for old browsers -function copyProps (src, dst) { - for (var key in src) { - dst[key] = src[key] + createMockScopeDispatchData (statusCode, data, responseOptions = {}) { + const responseData = getResponseData(data) + const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {} + const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers } + const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers } + + return { statusCode, data, headers, trailers } } -} -if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { - module.exports = buffer -} else { - // Copy properties from require('buffer') - copyProps(buffer, exports) - exports.Buffer = SafeBuffer -} -function SafeBuffer (arg, encodingOrOffset, length) { - return Buffer(arg, encodingOrOffset, length) -} + validateReplyParameters (statusCode, data, responseOptions) { + if (typeof statusCode === 'undefined') { + throw new InvalidArgumentError('statusCode must be defined') + } + if (typeof data === 'undefined') { + throw new InvalidArgumentError('data must be defined') + } + if (typeof responseOptions !== 'object') { + throw new InvalidArgumentError('responseOptions must be an object') + } + } -SafeBuffer.prototype = Object.create(Buffer.prototype) + /** + * Mock an undici request with a defined reply. + */ + reply (replyData) { + // Values of reply aren't available right now as they + // can only be available when the reply callback is invoked. + if (typeof replyData === 'function') { + // We'll first wrap the provided callback in another function, + // this function will properly resolve the data from the callback + // when invoked. + const wrappedDefaultsCallback = (opts) => { + // Our reply options callback contains the parameter for statusCode, data and options. + const resolvedData = replyData(opts) + + // Check if it is in the right format + if (typeof resolvedData !== 'object') { + throw new InvalidArgumentError('reply options callback must return an object') + } + + const { statusCode, data = '', responseOptions = {} } = resolvedData + this.validateReplyParameters(statusCode, data, responseOptions) + // Since the values can be obtained immediately we return them + // from this higher order function that will be resolved later. + return { + ...this.createMockScopeDispatchData(statusCode, data, responseOptions) + } + } -// Copy static methods from Buffer -copyProps(Buffer, SafeBuffer) + // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data. + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback) + return new MockScope(newMockDispatch) + } -SafeBuffer.from = function (arg, encodingOrOffset, length) { - if (typeof arg === 'number') { - throw new TypeError('Argument must not be a number') + // We can have either one or three parameters, if we get here, + // we should have 1-3 parameters. So we spread the arguments of + // this function to obtain the parameters, since replyData will always + // just be the statusCode. + const [statusCode, data = '', responseOptions = {}] = [...arguments] + this.validateReplyParameters(statusCode, data, responseOptions) + + // Send in-already provided data like usual + const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions) + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData) + return new MockScope(newMockDispatch) } - return Buffer(arg, encodingOrOffset, length) -} -SafeBuffer.alloc = function (size, fill, encoding) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') + /** + * Mock an undici request with a defined error. + */ + replyWithError (error) { + if (typeof error === 'undefined') { + throw new InvalidArgumentError('error must be defined') + } + + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error }) + return new MockScope(newMockDispatch) } - var buf = Buffer(size) - if (fill !== undefined) { - if (typeof encoding === 'string') { - buf.fill(fill, encoding) - } else { - buf.fill(fill) + + /** + * Set default reply headers on the interceptor for subsequent replies + */ + defaultReplyHeaders (headers) { + if (typeof headers === 'undefined') { + throw new InvalidArgumentError('headers must be defined') } - } else { - buf.fill(0) + + this[kDefaultHeaders] = headers + return this } - return buf -} -SafeBuffer.allocUnsafe = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') + /** + * Set default reply trailers on the interceptor for subsequent replies + */ + defaultReplyTrailers (trailers) { + if (typeof trailers === 'undefined') { + throw new InvalidArgumentError('trailers must be defined') + } + + this[kDefaultTrailers] = trailers + return this } - return Buffer(size) -} -SafeBuffer.allocUnsafeSlow = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') + /** + * Set reply content length header for replies on the interceptor + */ + replyContentLength () { + this[kContentLength] = true + return this } - return buffer.SlowBuffer(size) } +module.exports.MockInterceptor = MockInterceptor +module.exports.MockScope = MockScope + /***/ }), -/***/ 9626: +/***/ 6193: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var stubs = __nccwpck_require__(1099) +const { promisify } = __nccwpck_require__(3837) +const Pool = __nccwpck_require__(4634) +const { buildMockDispatch } = __nccwpck_require__(9323) +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = __nccwpck_require__(4347) +const { MockInterceptor } = __nccwpck_require__(410) +const Symbols = __nccwpck_require__(2785) +const { InvalidArgumentError } = __nccwpck_require__(8045) -/* - * StreamEvents can be used 2 ways: - * - * 1: - * function MyStream() { - * require('stream-events').call(this) - * } - * - * 2: - * require('stream-events')(myStream) +/** + * MockPool provides an API that extends the Pool to influence the mockDispatches. */ -function StreamEvents(stream) { - stream = stream || this +class MockPool extends Pool { + constructor (origin, opts) { + super(origin, opts) - var cfg = { - callthrough: true, - calls: 1 + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) + + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] } - stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) - stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) + get [Symbols.kConnected] () { + return this[kConnected] + } - return stream + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) + } + + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) + } } -module.exports = StreamEvents +module.exports = MockPool /***/ }), -/***/ 6121: +/***/ 4347: /***/ ((module) => { -module.exports = shift - -function shift (stream) { - var rs = stream._readableState - if (!rs) return null - return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) -} - -function getStateLength (state) { - if (state.buffer.length) { - // Since node 6.3.0 state.buffer is a BufferList not an array - if (state.buffer.head) { - return state.buffer.head.data.length - } +"use strict"; - return state.buffer[0].length - } - return state.length +module.exports = { + kAgent: Symbol('agent'), + kOptions: Symbol('options'), + kFactory: Symbol('factory'), + kDispatches: Symbol('dispatches'), + kDispatchKey: Symbol('dispatch key'), + kDefaultHeaders: Symbol('default headers'), + kDefaultTrailers: Symbol('default trailers'), + kContentLength: Symbol('content length'), + kMockAgent: Symbol('mock agent'), + kMockAgentSet: Symbol('mock agent set'), + kMockAgentGet: Symbol('mock agent get'), + kMockDispatch: Symbol('mock dispatch'), + kClose: Symbol('close'), + kOriginalClose: Symbol('original agent close'), + kOrigin: Symbol('origin'), + kIsMockActive: Symbol('is mock active'), + kNetConnect: Symbol('net connect'), + kGetNetConnect: Symbol('get net connect'), + kConnected: Symbol('connected') } /***/ }), -/***/ 4841: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9323: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +const { MockNotMatchedError } = __nccwpck_require__(888) +const { + kDispatches, + kMockAgent, + kOriginalDispatch, + kOrigin, + kGetNetConnect +} = __nccwpck_require__(4347) +const { buildURL, nop } = __nccwpck_require__(3983) +const { STATUS_CODES } = __nccwpck_require__(3685) +const { + types: { + isPromise + } +} = __nccwpck_require__(3837) -/**/ +function matchValue (match, value) { + if (typeof match === 'string') { + return match === value + } + if (match instanceof RegExp) { + return match.test(value) + } + if (typeof match === 'function') { + return match(value) === true + } + return false +} -var Buffer = (__nccwpck_require__(1867).Buffer); -/**/ +function lowerCaseEntries (headers) { + return Object.fromEntries( + Object.entries(headers).map(([headerName, headerValue]) => { + return [headerName.toLocaleLowerCase(), headerValue] + }) + ) +} -var isEncoding = Buffer.isEncoding || function (encoding) { - encoding = '' + encoding; - switch (encoding && encoding.toLowerCase()) { - case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': - return true; - default: - return false; +/** + * @param {import('../../index').Headers|string[]|Record} headers + * @param {string} key + */ +function getHeaderByName (headers, key) { + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) { + return headers[i + 1] + } + } + + return undefined + } else if (typeof headers.get === 'function') { + return headers.get(key) + } else { + return lowerCaseEntries(headers)[key.toLocaleLowerCase()] } -}; +} -function _normalizeEncoding(enc) { - if (!enc) return 'utf8'; - var retried; - while (true) { - switch (enc) { - case 'utf8': - case 'utf-8': - return 'utf8'; - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return 'utf16le'; - case 'latin1': - case 'binary': - return 'latin1'; - case 'base64': - case 'ascii': - case 'hex': - return enc; - default: - if (retried) return; // undefined - enc = ('' + enc).toLowerCase(); - retried = true; +/** @param {string[]} headers */ +function buildHeadersFromArray (headers) { // fetch HeadersList + const clone = headers.slice() + const entries = [] + for (let index = 0; index < clone.length; index += 2) { + entries.push([clone[index], clone[index + 1]]) + } + return Object.fromEntries(entries) +} + +function matchHeaders (mockDispatch, headers) { + if (typeof mockDispatch.headers === 'function') { + if (Array.isArray(headers)) { // fetch HeadersList + headers = buildHeadersFromArray(headers) } + return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {}) + } + if (typeof mockDispatch.headers === 'undefined') { + return true + } + if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') { + return false } -}; -// Do not cache `Buffer.isEncoding` when checking encoding names as some -// modules monkey-patch it to support additional encodings -function normalizeEncoding(enc) { - var nenc = _normalizeEncoding(enc); - if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); - return nenc || enc; -} + for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) { + const headerValue = getHeaderByName(headers, matchHeaderName) -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. -exports.s = StringDecoder; -function StringDecoder(encoding) { - this.encoding = normalizeEncoding(encoding); - var nb; - switch (this.encoding) { - case 'utf16le': - this.text = utf16Text; - this.end = utf16End; - nb = 4; - break; - case 'utf8': - this.fillLast = utf8FillLast; - nb = 4; - break; - case 'base64': - this.text = base64Text; - this.end = base64End; - nb = 3; - break; - default: - this.write = simpleWrite; - this.end = simpleEnd; - return; + if (!matchValue(matchHeaderValue, headerValue)) { + return false + } } - this.lastNeed = 0; - this.lastTotal = 0; - this.lastChar = Buffer.allocUnsafe(nb); + return true } -StringDecoder.prototype.write = function (buf) { - if (buf.length === 0) return ''; - var r; - var i; - if (this.lastNeed) { - r = this.fillLast(buf); - if (r === undefined) return ''; - i = this.lastNeed; - this.lastNeed = 0; - } else { - i = 0; +function safeUrl (path) { + if (typeof path !== 'string') { + return path } - if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); - return r || ''; -}; - -StringDecoder.prototype.end = utf8End; -// Returns only complete characters in a Buffer -StringDecoder.prototype.text = utf8Text; + const pathSegments = path.split('?') -// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer -StringDecoder.prototype.fillLast = function (buf) { - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); + if (pathSegments.length !== 2) { + return path } - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); - this.lastNeed -= buf.length; -}; -// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a -// continuation byte. If an invalid byte is detected, -2 is returned. -function utf8CheckByte(byte) { - if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; - return byte >> 6 === 0x02 ? -1 : -2; + const qp = new URLSearchParams(pathSegments.pop()) + qp.sort() + return [...pathSegments, qp.toString()].join('?') } -// Checks at most 3 bytes at the end of a Buffer in order to detect an -// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) -// needed to complete the UTF-8 character (if applicable) are returned. -function utf8CheckIncomplete(self, buf, i) { - var j = buf.length - 1; - if (j < i) return 0; - var nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 1; - return nb; +function matchKey (mockDispatch, { path, method, body, headers }) { + const pathMatch = matchValue(mockDispatch.path, path) + const methodMatch = matchValue(mockDispatch.method, method) + const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true + const headersMatch = matchHeaders(mockDispatch, headers) + return pathMatch && methodMatch && bodyMatch && headersMatch +} + +function getResponseData (data) { + if (Buffer.isBuffer(data)) { + return data + } else if (typeof data === 'object') { + return JSON.stringify(data) + } else { + return data.toString() } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 2; - return nb; +} + +function getMockDispatch (mockDispatches, key) { + const basePath = key.query ? buildURL(key.path, key.query) : key.path + const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath + + // Match path + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`) } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) { - if (nb === 2) nb = 0;else self.lastNeed = nb - 3; - } - return nb; + + // Match method + matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`) } - return 0; -} -// Validates as many continuation bytes for a multi-byte UTF-8 character as -// needed or are available. If we see a non-continuation byte where we expect -// one, we "replace" the validated continuation bytes we've seen so far with -// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding -// behavior. The continuation byte check is included three times in the case -// where all of the continuation bytes for a character exist in the same buffer. -// It is also done this way as a slight performance increase instead of using a -// loop. -function utf8CheckExtraBytes(self, buf, p) { - if ((buf[0] & 0xC0) !== 0x80) { - self.lastNeed = 0; - return '\ufffd'; + // Match body + matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`) } - if (self.lastNeed > 1 && buf.length > 1) { - if ((buf[1] & 0xC0) !== 0x80) { - self.lastNeed = 1; - return '\ufffd'; - } - if (self.lastNeed > 2 && buf.length > 2) { - if ((buf[2] & 0xC0) !== 0x80) { - self.lastNeed = 2; - return '\ufffd'; - } + + // Match headers + matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`) + } + + return matchedMockDispatches[0] +} + +function addMockDispatch (mockDispatches, key, data) { + const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false } + const replyData = typeof data === 'function' ? { callback: data } : { ...data } + const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } } + mockDispatches.push(newMockDispatch) + return newMockDispatch +} + +function deleteMockDispatch (mockDispatches, key) { + const index = mockDispatches.findIndex(dispatch => { + if (!dispatch.consumed) { + return false } + return matchKey(dispatch, key) + }) + if (index !== -1) { + mockDispatches.splice(index, 1) } } -// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. -function utf8FillLast(buf) { - var p = this.lastTotal - this.lastNeed; - var r = utf8CheckExtraBytes(this, buf, p); - if (r !== undefined) return r; - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, p, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); +function buildKey (opts) { + const { path, method, body, headers, query } = opts + return { + path, + method, + body, + headers, + query } - buf.copy(this.lastChar, p, 0, buf.length); - this.lastNeed -= buf.length; } -// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a -// partial character, the character's bytes are buffered until the required -// number of bytes are available. -function utf8Text(buf, i) { - var total = utf8CheckIncomplete(this, buf, i); - if (!this.lastNeed) return buf.toString('utf8', i); - this.lastTotal = total; - var end = buf.length - (total - this.lastNeed); - buf.copy(this.lastChar, 0, end); - return buf.toString('utf8', i, end); +function generateKeyValues (data) { + return Object.entries(data).reduce((keyValuePairs, [key, value]) => [ + ...keyValuePairs, + Buffer.from(`${key}`), + Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`) + ], []) } -// For UTF-8, a replacement character is added when ending on a partial -// character. -function utf8End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + '\ufffd'; - return r; +/** + * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status + * @param {number} statusCode + */ +function getStatusText (statusCode) { + return STATUS_CODES[statusCode] || 'unknown' } -// UTF-16LE typically needs two bytes per character, but even if we have an even -// number of bytes available, we need to check if we end on a leading/high -// surrogate. In that case, we need to wait for the next two bytes in order to -// decode the last character properly. -function utf16Text(buf, i) { - if ((buf.length - i) % 2 === 0) { - var r = buf.toString('utf16le', i); - if (r) { - var c = r.charCodeAt(r.length - 1); - if (c >= 0xD800 && c <= 0xDBFF) { - this.lastNeed = 2; - this.lastTotal = 4; - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - return r.slice(0, -1); - } +async function getResponse (body) { + const buffers = [] + for await (const data of body) { + buffers.push(data) + } + return Buffer.concat(buffers).toString('utf8') +} + +/** + * Mock dispatch function used to simulate undici dispatches + */ +function mockDispatch (opts, handler) { + // Get mock dispatch from built key + const key = buildKey(opts) + const mockDispatch = getMockDispatch(this[kDispatches], key) + + mockDispatch.timesInvoked++ + + // Here's where we resolve a callback if a callback is present for the dispatch data. + if (mockDispatch.data.callback) { + mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) } + } + + // Parse mockDispatch data + const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch + const { timesInvoked, times } = mockDispatch + + // If it's used up and not persistent, mark as consumed + mockDispatch.consumed = !persist && timesInvoked >= times + mockDispatch.pending = timesInvoked < times + + // If specified, trigger dispatch error + if (error !== null) { + deleteMockDispatch(this[kDispatches], key) + handler.onError(error) + return true + } + + // Handle the request with a delay if necessary + if (typeof delay === 'number' && delay > 0) { + setTimeout(() => { + handleReply(this[kDispatches]) + }, delay) + } else { + handleReply(this[kDispatches]) + } + + function handleReply (mockDispatches, _data = data) { + // fetch's HeadersList is a 1D string array + const optsHeaders = Array.isArray(opts.headers) + ? buildHeadersFromArray(opts.headers) + : opts.headers + const body = typeof _data === 'function' + ? _data({ ...opts, headers: optsHeaders }) + : _data + + // util.types.isPromise is likely needed for jest. + if (isPromise(body)) { + // If handleReply is asynchronous, throwing an error + // in the callback will reject the promise, rather than + // synchronously throw the error, which breaks some tests. + // Rather, we wait for the callback to resolve if it is a + // promise, and then re-run handleReply with the new body. + body.then((newData) => handleReply(mockDispatches, newData)) + return } - return r; + + const responseData = getResponseData(body) + const responseHeaders = generateKeyValues(headers) + const responseTrailers = generateKeyValues(trailers) + + handler.abort = nop + handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode)) + handler.onData(Buffer.from(responseData)) + handler.onComplete(responseTrailers) + deleteMockDispatch(mockDispatches, key) } - this.lastNeed = 1; - this.lastTotal = 2; - this.lastChar[0] = buf[buf.length - 1]; - return buf.toString('utf16le', i, buf.length - 1); + + function resume () {} + + return true } -// For UTF-16LE we do not explicitly append special replacement characters if we -// end on a partial character, we simply let v8 handle that. -function utf16End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) { - var end = this.lastTotal - this.lastNeed; - return r + this.lastChar.toString('utf16le', 0, end); +function buildMockDispatch () { + const agent = this[kMockAgent] + const origin = this[kOrigin] + const originalDispatch = this[kOriginalDispatch] + + return function dispatch (opts, handler) { + if (agent.isMockActive) { + try { + mockDispatch.call(this, opts, handler) + } catch (error) { + if (error instanceof MockNotMatchedError) { + const netConnect = agent[kGetNetConnect]() + if (netConnect === false) { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`) + } + if (checkNetConnect(netConnect, origin)) { + originalDispatch.call(this, opts, handler) + } else { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`) + } + } else { + throw error + } + } + } else { + originalDispatch.call(this, opts, handler) + } } - return r; } -function base64Text(buf, i) { - var n = (buf.length - i) % 3; - if (n === 0) return buf.toString('base64', i); - this.lastNeed = 3 - n; - this.lastTotal = 3; - if (n === 1) { - this.lastChar[0] = buf[buf.length - 1]; - } else { - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; +function checkNetConnect (netConnect, origin) { + const url = new URL(origin) + if (netConnect === true) { + return true + } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) { + return true } - return buf.toString('base64', i, buf.length - n); + return false } -function base64End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); - return r; +function buildMockOptions (opts) { + if (opts) { + const { agent, ...mockOptions } = opts + return mockOptions + } } -// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) -function simpleWrite(buf) { - return buf.toString(this.encoding); +module.exports = { + getResponseData, + getMockDispatch, + addMockDispatch, + deleteMockDispatch, + buildKey, + generateKeyValues, + matchValue, + getResponse, + getStatusText, + mockDispatch, + buildMockDispatch, + checkNetConnect, + buildMockOptions, + getHeaderByName } -function simpleEnd(buf) { - return buf && buf.length ? this.write(buf) : ''; -} /***/ }), -/***/ 1099: -/***/ ((module) => { +/***/ 6823: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -module.exports = function stubs(obj, method, cfg, stub) { - if (!obj || !method || !obj[method]) - throw new Error('You must provide an object and a key for an existing method') +const { Transform } = __nccwpck_require__(2781) +const { Console } = __nccwpck_require__(6206) - if (!stub) { - stub = cfg - cfg = {} +/** + * Gets the output of `console.table(…)` as a string. + */ +module.exports = class PendingInterceptorsFormatter { + constructor ({ disableColors } = {}) { + this.transform = new Transform({ + transform (chunk, _enc, cb) { + cb(null, chunk) + } + }) + + this.logger = new Console({ + stdout: this.transform, + inspectOptions: { + colors: !disableColors && !process.env.CI + } + }) } - stub = stub || function() {} + format (pendingInterceptors) { + const withPrettyHeaders = pendingInterceptors.map( + ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + Method: method, + Origin: origin, + Path: path, + 'Status code': statusCode, + Persistent: persist ? '✅' : '❌', + Invocations: timesInvoked, + Remaining: persist ? Infinity : times - timesInvoked + })) - cfg.callthrough = cfg.callthrough || false - cfg.calls = cfg.calls || 0 + this.logger.table(withPrettyHeaders) + return this.transform.read().toString() + } +} - var norevert = cfg.calls === 0 - var cached = obj[method].bind(obj) +/***/ }), - obj[method] = function() { - var args = [].slice.call(arguments) - var returnVal +/***/ 8891: +/***/ ((module) => { - if (cfg.callthrough) - returnVal = cached.apply(obj, args) +"use strict"; - returnVal = stub.apply(obj, args) || returnVal - if (!norevert && --cfg.calls === 0) - obj[method] = cached +const singulars = { + pronoun: 'it', + is: 'is', + was: 'was', + this: 'this' +} - return returnVal +const plurals = { + pronoun: 'they', + is: 'are', + was: 'were', + this: 'these' +} + +module.exports = class Pluralizer { + constructor (singular, plural) { + this.singular = singular + this.plural = plural + } + + pluralize (count) { + const one = count === 1 + const keys = one ? singulars : plurals + const noun = one ? this.singular : this.plural + return { ...keys, count, noun } } } /***/ }), -/***/ 9318: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 8266: +/***/ ((module) => { "use strict"; +/* eslint-disable */ -const os = __nccwpck_require__(2037); -const tty = __nccwpck_require__(6224); -const hasFlag = __nccwpck_require__(1621); - -const {env} = process; -let forceColor; -if (hasFlag('no-color') || - hasFlag('no-colors') || - hasFlag('color=false') || - hasFlag('color=never')) { - forceColor = 0; -} else if (hasFlag('color') || - hasFlag('colors') || - hasFlag('color=true') || - hasFlag('color=always')) { - forceColor = 1; -} -if ('FORCE_COLOR' in env) { - if (env.FORCE_COLOR === 'true') { - forceColor = 1; - } else if (env.FORCE_COLOR === 'false') { - forceColor = 0; - } else { - forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); - } -} +// Extracted from node/lib/internal/fixed_queue.js -function translateLevel(level) { - if (level === 0) { - return false; - } +// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two. +const kSize = 2048; +const kMask = kSize - 1; - return { - level, - hasBasic: true, - has256: level >= 2, - has16m: level >= 3 - }; -} +// The FixedQueue is implemented as a singly-linked list of fixed-size +// circular buffers. It looks something like this: +// +// head tail +// | | +// v v +// +-----------+ <-----\ +-----------+ <------\ +-----------+ +// | [null] | \----- | next | \------- | next | +// +-----------+ +-----------+ +-----------+ +// | item | <-- bottom | item | <-- bottom | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | bottom --> | item | +// | item | | item | | item | +// | ... | | ... | | ... | +// | item | | item | | item | +// | item | | item | | item | +// | [empty] | <-- top | item | | item | +// | [empty] | | item | | item | +// | [empty] | | [empty] | <-- top top --> | [empty] | +// +-----------+ +-----------+ +-----------+ +// +// Or, if there is only one circular buffer, it looks something +// like either of these: +// +// head tail head tail +// | | | | +// v v v v +// +-----------+ +-----------+ +// | [null] | | [null] | +// +-----------+ +-----------+ +// | [empty] | | item | +// | [empty] | | item | +// | item | <-- bottom top --> | [empty] | +// | item | | [empty] | +// | [empty] | <-- top bottom --> | item | +// | [empty] | | item | +// +-----------+ +-----------+ +// +// Adding a value means moving `top` forward by one, removing means +// moving `bottom` forward by one. After reaching the end, the queue +// wraps around. +// +// When `top === bottom` the current queue is empty and when +// `top + 1 === bottom` it's full. This wastes a single space of storage +// but allows much quicker checks. -function supportsColor(haveStream, streamIsTTY) { - if (forceColor === 0) { - return 0; - } +class FixedCircularBuffer { + constructor() { + this.bottom = 0; + this.top = 0; + this.list = new Array(kSize); + this.next = null; + } - if (hasFlag('color=16m') || - hasFlag('color=full') || - hasFlag('color=truecolor')) { - return 3; - } + isEmpty() { + return this.top === this.bottom; + } - if (hasFlag('color=256')) { - return 2; - } + isFull() { + return ((this.top + 1) & kMask) === this.bottom; + } - if (haveStream && !streamIsTTY && forceColor === undefined) { - return 0; - } + push(data) { + this.list[this.top] = data; + this.top = (this.top + 1) & kMask; + } - const min = forceColor || 0; + shift() { + const nextItem = this.list[this.bottom]; + if (nextItem === undefined) + return null; + this.list[this.bottom] = undefined; + this.bottom = (this.bottom + 1) & kMask; + return nextItem; + } +} - if (env.TERM === 'dumb') { - return min; - } +module.exports = class FixedQueue { + constructor() { + this.head = this.tail = new FixedCircularBuffer(); + } - if (process.platform === 'win32') { - // Windows 10 build 10586 is the first Windows release that supports 256 colors. - // Windows 10 build 14931 is the first release that supports 16m/TrueColor. - const osRelease = os.release().split('.'); - if ( - Number(osRelease[0]) >= 10 && - Number(osRelease[2]) >= 10586 - ) { - return Number(osRelease[2]) >= 14931 ? 3 : 2; - } + isEmpty() { + return this.head.isEmpty(); + } - return 1; - } + push(data) { + if (this.head.isFull()) { + // Head is full: Creates a new queue, sets the old queue's `.next` to it, + // and sets it as the new main queue. + this.head = this.head.next = new FixedCircularBuffer(); + } + this.head.push(data); + } - if ('CI' in env) { - if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { - return 1; - } + shift() { + const tail = this.tail; + const next = tail.shift(); + if (tail.isEmpty() && tail.next !== null) { + // If there is another queue, it forms the new tail. + this.tail = tail.next; + } + return next; + } +}; - return min; - } - if ('TEAMCITY_VERSION' in env) { - return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; - } +/***/ }), - if (env.COLORTERM === 'truecolor') { - return 3; - } +/***/ 3198: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if ('TERM_PROGRAM' in env) { - const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); +"use strict"; - switch (env.TERM_PROGRAM) { - case 'iTerm.app': - return version >= 3 ? 3 : 2; - case 'Apple_Terminal': - return 2; - // No default - } - } - if (/-256(color)?$/i.test(env.TERM)) { - return 2; - } +const DispatcherBase = __nccwpck_require__(4839) +const FixedQueue = __nccwpck_require__(8266) +const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = __nccwpck_require__(2785) +const PoolStats = __nccwpck_require__(9689) - if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { - return 1; - } +const kClients = Symbol('clients') +const kNeedDrain = Symbol('needDrain') +const kQueue = Symbol('queue') +const kClosedResolve = Symbol('closed resolve') +const kOnDrain = Symbol('onDrain') +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kGetDispatcher = Symbol('get dispatcher') +const kAddClient = Symbol('add client') +const kRemoveClient = Symbol('remove client') +const kStats = Symbol('stats') - if ('COLORTERM' in env) { - return 1; - } +class PoolBase extends DispatcherBase { + constructor () { + super() - return min; -} + this[kQueue] = new FixedQueue() + this[kClients] = [] + this[kQueued] = 0 -function getSupportLevel(stream) { - const level = supportsColor(stream, stream && stream.isTTY); - return translateLevel(level); -} + const pool = this -module.exports = { - supportsColor: getSupportLevel, - stdout: translateLevel(supportsColor(true, tty.isatty(1))), - stderr: translateLevel(supportsColor(true, tty.isatty(2))) -}; + this[kOnDrain] = function onDrain (origin, targets) { + const queue = pool[kQueue] + let needDrain = false -/***/ }), + while (!needDrain) { + const item = queue.shift() + if (!item) { + break + } + pool[kQueued]-- + needDrain = !this.dispatch(item.opts, item.handler) + } -/***/ 4920: -/***/ ((__unused_webpack_module, exports) => { + this[kNeedDrain] = needDrain -"use strict"; + if (!this[kNeedDrain] && pool[kNeedDrain]) { + pool[kNeedDrain] = false + pool.emit('drain', origin, [pool, ...targets]) + } -/** - * @license - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; -/** - * @class TeenyStatisticsWarning - * @extends Error - * @description While an error, is used for emitting warnings when - * meeting certain configured thresholds. - * @see process.emitWarning - */ -class TeenyStatisticsWarning extends Error { - /** - * @param {string} message - */ - constructor(message) { - super(message); - this.threshold = 0; - this.type = ''; - this.value = 0; - this.name = this.constructor.name; - Error.captureStackTrace(this, this.constructor); + if (pool[kClosedResolve] && queue.isEmpty()) { + Promise + .all(pool[kClients].map(c => c.close())) + .then(pool[kClosedResolve]) + } } -} -exports.TeenyStatisticsWarning = TeenyStatisticsWarning; -TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; -/** - * @class TeenyStatistics - * @description Maintain various statistics internal to teeny-request. Tracking - * is not automatic and must be instrumented within teeny-request. - */ -class TeenyStatistics { - /** - * @param {TeenyStatisticsOptions} [opts] - */ - constructor(opts) { - /** - * @type {number} - * @private - * @default 0 - */ - this._concurrentRequests = 0; - /** - * @type {boolean} - * @private - * @default false - */ - this._didConcurrentRequestWarn = false; - this._options = TeenyStatistics._prepareOptions(opts); + + this[kOnConnect] = (origin, targets) => { + pool.emit('connect', origin, [pool, ...targets]) } - /** - * Returns a copy of the current options. - * @return {TeenyStatisticsOptions} - */ - getOptions() { - return Object.assign({}, this._options); + + this[kOnDisconnect] = (origin, targets, err) => { + pool.emit('disconnect', origin, [pool, ...targets], err) } - /** - * Change configured statistics options. This will not preserve unspecified - * options that were previously specified, i.e. this is a reset of options. - * @param {TeenyStatisticsOptions} [opts] - * @returns {TeenyStatisticsConfig} The previous options. - * @see _prepareOptions - */ - setOptions(opts) { - const oldOpts = this._options; - this._options = TeenyStatistics._prepareOptions(opts); - return oldOpts; + + this[kOnConnectionError] = (origin, targets, err) => { + pool.emit('connectionError', origin, [pool, ...targets], err) } - /** - * @readonly - * @return {TeenyStatisticsCounters} - */ - get counters() { - return { - concurrentRequests: this._concurrentRequests, - }; + + this[kStats] = new PoolStats(this) + } + + get [kBusy] () { + return this[kNeedDrain] + } + + get [kConnected] () { + return this[kClients].filter(client => client[kConnected]).length + } + + get [kFree] () { + return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length + } + + get [kPending] () { + let ret = this[kQueued] + for (const { [kPending]: pending } of this[kClients]) { + ret += pending } - /** - * @description Should call this right before making a request. - */ - requestStarting() { - this._concurrentRequests++; - if (this._options.concurrentRequests > 0 && - this._concurrentRequests >= this._options.concurrentRequests && - !this._didConcurrentRequestWarn) { - this._didConcurrentRequestWarn = true; - const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + - this._concurrentRequests + - ' requests in-flight, which exceeds the configured threshold of ' + - this._options.concurrentRequests + - '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + - 'variable or the concurrentRequests option of teeny-request to ' + - 'increase or disable (0) this warning.'); - warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; - warning.value = this._concurrentRequests; - warning.threshold = this._options.concurrentRequests; - process.emitWarning(warning); - } + return ret + } + + get [kRunning] () { + let ret = 0 + for (const { [kRunning]: running } of this[kClients]) { + ret += running } - /** - * @description When using `requestStarting`, call this after the request - * has finished. - */ - requestFinished() { - // TODO negative? - this._concurrentRequests--; + return ret + } + + get [kSize] () { + let ret = this[kQueued] + for (const { [kSize]: size } of this[kClients]) { + ret += size } - /** - * Configuration Precedence: - * 1. Dependency inversion via defined option. - * 2. Global numeric environment variable. - * 3. Built-in default. - * This will not preserve unspecified options previously specified. - * @param {TeenyStatisticsOptions} [opts] - * @returns {TeenyStatisticsOptions} - * @private - */ - static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { - let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; - const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); - if (diConcurrentRequests !== undefined) { - concurrentRequests = diConcurrentRequests; - } - else if (!Number.isNaN(envConcurrentRequests)) { - concurrentRequests = envConcurrentRequests; - } - return { concurrentRequests }; + return ret + } + + get stats () { + return this[kStats] + } + + async [kClose] () { + if (this[kQueue].isEmpty()) { + return Promise.all(this[kClients].map(c => c.close())) + } else { + return new Promise((resolve) => { + this[kClosedResolve] = resolve + }) } -} -exports.TeenyStatistics = TeenyStatistics; -/** - * @description A default threshold representing when to warn about excessive - * in-flight/concurrent requests. - * @type {number} - * @static - * @readonly - * @default 5000 - */ -TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; -//# sourceMappingURL=TeenyStatistics.js.map + } -/***/ }), + async [kDestroy] (err) { + while (true) { + const item = this[kQueue].shift() + if (!item) { + break + } + item.handler.onError(err) + } -/***/ 446: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return Promise.all(this[kClients].map(c => c.destroy(err))) + } -"use strict"; + [kDispatch] (opts, handler) { + const dispatcher = this[kGetDispatcher]() -/** - * @license - * Copyright 2019 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getAgent = exports.pool = void 0; -const http_1 = __nccwpck_require__(3685); -const https_1 = __nccwpck_require__(5687); -// eslint-disable-next-line node/no-deprecated-api -const url_1 = __nccwpck_require__(7310); -exports.pool = new Map(); -/** - * Determines if a proxy should be considered based on the environment. - * - * @param uri The request uri - * @returns {boolean} - */ -function shouldUseProxyForURI(uri) { - const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; - if (!noProxyEnv) { - return true; - } - const givenURI = new URL(uri); - for (const noProxyRaw of noProxyEnv.split(',')) { - const noProxy = noProxyRaw.trim(); - if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { - return false; - } - else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { - const noProxyWildcard = noProxy.replace(/^\*\./, '.'); - if (givenURI.hostname.endsWith(noProxyWildcard)) { - return false; - } - } - } - return true; -} -/** - * Returns a custom request Agent if one is found, otherwise returns undefined - * which will result in the global http(s) Agent being used. - * @private - * @param {string} uri The request uri - * @param {Options} reqOpts The request options - * @returns {HttpAnyAgent|undefined} - */ -function getAgent(uri, reqOpts) { - const isHttp = uri.startsWith('http://'); - const proxy = reqOpts.proxy || - process.env.HTTP_PROXY || - process.env.http_proxy || - process.env.HTTPS_PROXY || - process.env.https_proxy; - const poolOptions = Object.assign({}, reqOpts.pool); - const manuallyProvidedProxy = !!reqOpts.proxy; - const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); - if (proxy && shouldUseProxy) { - // tslint:disable-next-line variable-name - const Agent = isHttp - ? __nccwpck_require__(3764) - : __nccwpck_require__(7219); - const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; - return new Agent(proxyOpts); + if (!dispatcher) { + this[kNeedDrain] = true + this[kQueue].push({ opts, handler }) + this[kQueued]++ + } else if (!dispatcher.dispatch(opts, handler)) { + dispatcher[kNeedDrain] = true + this[kNeedDrain] = !this[kGetDispatcher]() } - let key = isHttp ? 'http' : 'https'; - if (reqOpts.forever) { - key += ':forever'; - if (!exports.pool.has(key)) { - // tslint:disable-next-line variable-name - const Agent = isHttp ? http_1.Agent : https_1.Agent; - exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); + + return !this[kNeedDrain] + } + + [kAddClient] (client) { + client + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].push(client) + + if (this[kNeedDrain]) { + process.nextTick(() => { + if (this[kNeedDrain]) { + this[kOnDrain](client[kUrl], [this, client]) } + }) } - return exports.pool.get(key); + + return this + } + + [kRemoveClient] (client) { + client.close(() => { + const idx = this[kClients].indexOf(client) + if (idx !== -1) { + this[kClients].splice(idx, 1) + } + }) + + this[kNeedDrain] = this[kClients].some(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) + } } -exports.getAgent = getAgent; -//# sourceMappingURL=agents.js.map + +module.exports = { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} + /***/ }), -/***/ 6886: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9689: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = __nccwpck_require__(2785) +const kPool = Symbol('pool') -/** - * @license - * Copyright 2018 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.teenyRequest = exports.RequestError = void 0; -const node_fetch_1 = __nccwpck_require__(467); -const stream_1 = __nccwpck_require__(2781); -const uuid = __nccwpck_require__(7835); -const agents_1 = __nccwpck_require__(446); -const TeenyStatistics_1 = __nccwpck_require__(4920); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const streamEvents = __nccwpck_require__(9626); -class RequestError extends Error { +class PoolStats { + constructor (pool) { + this[kPool] = pool + } + + get connected () { + return this[kPool][kConnected] + } + + get free () { + return this[kPool][kFree] + } + + get pending () { + return this[kPool][kPending] + } + + get queued () { + return this[kPool][kQueued] + } + + get running () { + return this[kPool][kRunning] + } + + get size () { + return this[kPool][kSize] + } } -exports.RequestError = RequestError; -/** - * Convert options from Request to Fetch format - * @private - * @param reqOpts Request options - */ -function requestToFetchOptions(reqOpts) { - const options = { - method: reqOpts.method || 'GET', - ...(reqOpts.timeout && { timeout: reqOpts.timeout }), - ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), - }; - if (typeof reqOpts.json === 'object') { - // Add Content-type: application/json header - reqOpts.headers = reqOpts.headers || {}; - reqOpts.headers['Content-Type'] = 'application/json'; - // Set body to JSON representation of value - options.body = JSON.stringify(reqOpts.json); - } - else { - if (Buffer.isBuffer(reqOpts.body)) { - options.body = reqOpts.body; - } - else if (typeof reqOpts.body !== 'string') { - options.body = JSON.stringify(reqOpts.body); - } - else { - options.body = reqOpts.body; - } + +module.exports = PoolStats + + +/***/ }), + +/***/ 4634: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kGetDispatcher +} = __nccwpck_require__(3198) +const Client = __nccwpck_require__(3598) +const { + InvalidArgumentError +} = __nccwpck_require__(8045) +const util = __nccwpck_require__(3983) +const { kUrl, kInterceptors } = __nccwpck_require__(2785) +const buildConnector = __nccwpck_require__(2067) + +const kOptions = Symbol('options') +const kConnections = Symbol('connections') +const kFactory = Symbol('factory') + +function defaultFactory (origin, opts) { + return new Client(origin, opts) +} + +class Pool extends PoolBase { + constructor (origin, { + connections, + factory = defaultFactory, + connect, + connectTimeout, + tls, + maxCachedSessions, + socketPath, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + allowH2, + ...options + } = {}) { + super() + + if (connections != null && (!Number.isFinite(connections) || connections < 0)) { + throw new InvalidArgumentError('invalid connections') + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } + + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } + + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - options.headers = reqOpts.headers; - let uri = (reqOpts.uri || - reqOpts.url); - if (!uri) { - throw new Error('Missing uri or url in reqOpts.'); + + this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool) + ? options.interceptors.Pool + : [] + this[kConnections] = connections || null + this[kUrl] = util.parseOrigin(origin) + this[kOptions] = { ...util.deepClone(options), connect, allowH2 } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kFactory] = factory + } + + [kGetDispatcher] () { + let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain]) + + if (dispatcher) { + return dispatcher } - if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const qs = __nccwpck_require__(3477); - const params = qs.stringify(reqOpts.qs); - uri = uri + '?' + params; + + if (!this[kConnections] || this[kClients].length < this[kConnections]) { + dispatcher = this[kFactory](this[kUrl], this[kOptions]) + this[kAddClient](dispatcher) } - options.agent = (0, agents_1.getAgent)(uri, reqOpts); - return { uri, options }; + + return dispatcher + } } -/** - * Convert a response from `fetch` to `request` format. - * @private - * @param opts The `request` options used to create the request. - * @param res The Fetch response - * @returns A `request` response object - */ -function fetchToRequestResponse(opts, res) { - const request = {}; - request.agent = opts.agent || false; - request.headers = (opts.headers || {}); - request.href = res.url; - // headers need to be converted from a map to an obj - const resHeaders = {}; - res.headers.forEach((value, key) => (resHeaders[key] = value)); - const response = Object.assign(res.body, { - statusCode: res.status, - statusMessage: res.statusText, - request, - body: res.body, - headers: resHeaders, - toJSON: () => ({ headers: resHeaders }), - }); - return response; + +module.exports = Pool + + +/***/ }), + +/***/ 7858: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { kProxy, kClose, kDestroy, kInterceptors } = __nccwpck_require__(2785) +const { URL } = __nccwpck_require__(7310) +const Agent = __nccwpck_require__(7890) +const Pool = __nccwpck_require__(4634) +const DispatcherBase = __nccwpck_require__(4839) +const { InvalidArgumentError, RequestAbortedError } = __nccwpck_require__(8045) +const buildConnector = __nccwpck_require__(2067) + +const kAgent = Symbol('proxy agent') +const kClient = Symbol('proxy client') +const kProxyHeaders = Symbol('proxy headers') +const kRequestTls = Symbol('request tls settings') +const kProxyTls = Symbol('proxy tls settings') +const kConnectEndpoint = Symbol('connect endpoint function') + +function defaultProtocolPort (protocol) { + return protocol === 'https:' ? 443 : 80 } -/** - * Create POST body from two parts as multipart/related content-type - * @private - * @param boundary - * @param multipart - */ -function createMultipartStream(boundary, multipart) { - const finale = `--${boundary}--`; - const stream = new stream_1.PassThrough(); - for (const part of multipart) { - const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; - stream.write(preamble); - if (typeof part.body === 'string') { - stream.write(part.body); - stream.write('\r\n'); - } - else { - part.body.pipe(stream, { end: false }); - part.body.on('end', () => { - stream.write('\r\n'); - stream.write(finale); - stream.end(); - }); - } - } - return stream; + +function buildProxyOptions (opts) { + if (typeof opts === 'string') { + opts = { uri: opts } + } + + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') + } + + return { + uri: opts.uri, + protocol: opts.protocol || 'https' + } } -function teenyRequest(reqOpts, callback) { - const { uri, options } = requestToFetchOptions(reqOpts); - const multipart = reqOpts.multipart; - if (reqOpts.multipart && multipart.length === 2) { - if (!callback) { - // TODO: add support for multipart uploads through streaming - throw new Error('Multipart without callback is not implemented.'); - } - const boundary = uuid.v4(); - options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; - options.body = createMultipartStream(boundary, multipart); - // Multipart upload - teenyRequest.stats.requestStarting(); - (0, node_fetch_1.default)(uri, options).then(res => { - teenyRequest.stats.requestFinished(); - const header = res.headers.get('content-type'); - const response = fetchToRequestResponse(options, res); - const body = response.body; - if (header === 'application/json' || - header === 'application/json; charset=utf-8') { - res.json().then(json => { - response.body = json; - callback(null, response, json); - }, (err) => { - callback(err, response, body); - }); - return; - } - res.text().then(text => { - response.body = text; - callback(null, response, text); - }, err => { - callback(err, response, body); - }); - }, err => { - teenyRequest.stats.requestFinished(); - callback(err, null, null); - }); - return; + +function defaultFactory (origin, opts) { + return new Pool(origin, opts) +} + +class ProxyAgent extends DispatcherBase { + constructor (opts) { + super(opts) + this[kProxy] = buildProxyOptions(opts) + this[kAgent] = new Agent(opts) + this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent) + ? opts.interceptors.ProxyAgent + : [] + + if (typeof opts === 'string') { + opts = { uri: opts } } - if (callback === undefined) { - // Stream mode - const requestStream = streamEvents(new stream_1.PassThrough()); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let responseStream; - requestStream.once('reading', () => { - if (responseStream) { - (0, stream_1.pipeline)(responseStream, requestStream, () => { }); - } - else { - requestStream.once('response', () => { - (0, stream_1.pipeline)(responseStream, requestStream, () => { }); - }); - } - }); - options.compress = false; - teenyRequest.stats.requestStarting(); - (0, node_fetch_1.default)(uri, options).then(res => { - teenyRequest.stats.requestFinished(); - responseStream = res.body; - responseStream.on('error', (err) => { - requestStream.emit('error', err); - }); - const response = fetchToRequestResponse(options, res); - requestStream.emit('response', response); - }, err => { - teenyRequest.stats.requestFinished(); - requestStream.emit('error', err); - }); - // fetch doesn't supply the raw HTTP stream, instead it - // returns a PassThrough piped from the HTTP response - // stream. - return requestStream; + + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') } - // GET or POST with callback - teenyRequest.stats.requestStarting(); - (0, node_fetch_1.default)(uri, options).then(res => { - teenyRequest.stats.requestFinished(); - const header = res.headers.get('content-type'); - const response = fetchToRequestResponse(options, res); - const body = response.body; - if (header === 'application/json' || - header === 'application/json; charset=utf-8') { - if (response.statusCode === 204) { - // Probably a DELETE - callback(null, response, body); - return; + + const { clientFactory = defaultFactory } = opts + + if (typeof clientFactory !== 'function') { + throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.') + } + + this[kRequestTls] = opts.requestTls + this[kProxyTls] = opts.proxyTls + this[kProxyHeaders] = opts.headers || {} + + const resolvedUrl = new URL(opts.uri) + const { origin, port, host, username, password } = resolvedUrl + + if (opts.auth && opts.token) { + throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') + } else if (opts.auth) { + /* @deprecated in favour of opts.token */ + this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}` + } else if (opts.token) { + this[kProxyHeaders]['proxy-authorization'] = opts.token + } else if (username && password) { + this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}` + } + + const connect = buildConnector({ ...opts.proxyTls }) + this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }) + this[kClient] = clientFactory(resolvedUrl, { connect }) + this[kAgent] = new Agent({ + ...opts, + connect: async (opts, callback) => { + let requestedHost = opts.host + if (!opts.port) { + requestedHost += `:${defaultProtocolPort(opts.protocol)}` + } + try { + const { socket, statusCode } = await this[kClient].connect({ + origin, + port, + path: requestedHost, + signal: opts.signal, + headers: { + ...this[kProxyHeaders], + host } - res.json().then(json => { - response.body = json; - callback(null, response, json); - }, err => { - callback(err, response, body); - }); - return; + }) + if (statusCode !== 200) { + socket.on('error', () => {}).destroy() + callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`)) + } + if (opts.protocol !== 'https:') { + callback(null, socket) + return + } + let servername + if (this[kRequestTls]) { + servername = this[kRequestTls].servername + } else { + servername = opts.servername + } + this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback) + } catch (err) { + callback(err) } - res.text().then(text => { - const response = fetchToRequestResponse(options, res); - response.body = text; - callback(null, response, text); - }, err => { - callback(err, response, body); - }); - }, err => { - teenyRequest.stats.requestFinished(); - callback(err, null, null); - }); - return; -} -exports.teenyRequest = teenyRequest; -teenyRequest.defaults = (defaults) => { - return (reqOpts, callback) => { - const opts = { ...defaults, ...reqOpts }; - if (callback === undefined) { - return teenyRequest(opts); + } + }) + } + + dispatch (opts, handler) { + const { host } = new URL(opts.origin) + const headers = buildHeaders(opts.headers) + throwIfProxyAuthIsSent(headers) + return this[kAgent].dispatch( + { + ...opts, + headers: { + ...headers, + host } - teenyRequest(opts, callback); - }; -}; + }, + handler + ) + } + + async [kClose] () { + await this[kAgent].close() + await this[kClient].close() + } + + async [kDestroy] () { + await this[kAgent].destroy() + await this[kClient].destroy() + } +} + /** - * Single instance of an interface for keeping track of things. + * @param {string[] | Record} headers + * @returns {Record} */ -teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); -teenyRequest.resetStats = () => { - teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); -}; -//# sourceMappingURL=index.js.map +function buildHeaders (headers) { + // When using undici.fetch, the headers list is stored + // as an array. + if (Array.isArray(headers)) { + /** @type {Record} */ + const headersPair = {} + + for (let i = 0; i < headers.length; i += 2) { + headersPair[headers[i]] = headers[i + 1] + } + + return headersPair + } + + return headers +} + +/** + * @param {Record} headers + * + * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers + * Nevertheless, it was changed and to avoid a security vulnerability by end users + * this check was created. + * It should be removed in the next major version for performance reasons + */ +function throwIfProxyAuthIsSent (headers) { + const existProxyAuth = headers && Object.keys(headers) + .find((key) => key.toLowerCase() === 'proxy-authorization') + if (existProxyAuth) { + throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor') + } +} + +module.exports = ProxyAgent + /***/ }), -/***/ 7835: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9459: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; +let fastNow = Date.now() +let fastNowTimeout + +const fastTimers = [] + +function onTimeout () { + fastNow = Date.now() + + let len = fastTimers.length + let idx = 0 + while (idx < len) { + const timer = fastTimers[idx] + + if (timer.state === 0) { + timer.state = fastNow + timer.delay + } else if (timer.state > 0 && fastNow >= timer.state) { + timer.state = -1 + timer.callback(timer.opaque) + } + + if (timer.state === -1) { + timer.state = -2 + if (idx !== len - 1) { + fastTimers[idx] = fastTimers.pop() + } else { + fastTimers.pop() + } + len -= 1 + } else { + idx += 1 + } } -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; + + if (fastTimers.length > 0) { + refreshTimeout() } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; +} + +function refreshTimeout () { + if (fastNowTimeout && fastNowTimeout.refresh) { + fastNowTimeout.refresh() + } else { + clearTimeout(fastNowTimeout) + fastNowTimeout = setTimeout(onTimeout, 1e3) + if (fastNowTimeout.unref) { + fastNowTimeout.unref() + } } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; +} + +class Timeout { + constructor (callback, delay, opaque) { + this.callback = callback + this.delay = delay + this.opaque = opaque + + // -2 not in timer list + // -1 in timer list but inactive + // 0 in timer list waiting for time + // > 0 in timer list waiting for time to expire + this.state = -2 + + this.refresh() } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; + + refresh () { + if (this.state === -2) { + fastTimers.push(this) + if (!fastNowTimeout || fastTimers.length === 1) { + refreshTimeout() + } + } + + this.state = 0 } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; + + clear () { + this.state = -1 } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; +} + +module.exports = { + setTimeout (callback, delay, opaque) { + return delay < 1e3 + ? setTimeout(callback, delay, opaque) + : new Timeout(callback, delay, opaque) + }, + clearTimeout (timeout) { + if (timeout instanceof Timeout) { + timeout.clear() + } else { + clearTimeout(timeout) + } } -})); +} -var _v = _interopRequireDefault(__nccwpck_require__(618)); -var _v2 = _interopRequireDefault(__nccwpck_require__(6888)); +/***/ }), -var _v3 = _interopRequireDefault(__nccwpck_require__(1186)); +/***/ 5354: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var _v4 = _interopRequireDefault(__nccwpck_require__(2385)); +"use strict"; -var _nil = _interopRequireDefault(__nccwpck_require__(5879)); -var _version = _interopRequireDefault(__nccwpck_require__(694)); +const diagnosticsChannel = __nccwpck_require__(7643) +const { uid, states } = __nccwpck_require__(9188) +const { + kReadyState, + kSentClose, + kByteParser, + kReceivedClose +} = __nccwpck_require__(7578) +const { fireEvent, failWebsocketConnection } = __nccwpck_require__(5515) +const { CloseEvent } = __nccwpck_require__(2611) +const { makeRequest } = __nccwpck_require__(8359) +const { fetching } = __nccwpck_require__(4881) +const { Headers } = __nccwpck_require__(554) +const { getGlobalDispatcher } = __nccwpck_require__(1892) +const { kHeadersList } = __nccwpck_require__(2785) + +const channels = {} +channels.open = diagnosticsChannel.channel('undici:websocket:open') +channels.close = diagnosticsChannel.channel('undici:websocket:close') +channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error') + +/** @type {import('crypto')} */ +let crypto +try { + crypto = __nccwpck_require__(6113) +} catch { + +} + +/** + * @see https://websockets.spec.whatwg.org/#concept-websocket-establish + * @param {URL} url + * @param {string|string[]} protocols + * @param {import('./websocket').WebSocket} ws + * @param {(response: any) => void} onEstablish + * @param {Partial} options + */ +function establishWebSocketConnection (url, protocols, ws, onEstablish, options) { + // 1. Let requestURL be a copy of url, with its scheme set to "http", if url’s + // scheme is "ws", and to "https" otherwise. + const requestURL = url + + requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:' + + // 2. Let request be a new request, whose URL is requestURL, client is client, + // service-workers mode is "none", referrer is "no-referrer", mode is + // "websocket", credentials mode is "include", cache mode is "no-store" , + // and redirect mode is "error". + const request = makeRequest({ + urlList: [requestURL], + serviceWorkers: 'none', + referrer: 'no-referrer', + mode: 'websocket', + credentials: 'include', + cache: 'no-store', + redirect: 'error' + }) -var _validate = _interopRequireDefault(__nccwpck_require__(4875)); + // Note: undici extension, allow setting custom headers. + if (options.headers) { + const headersList = new Headers(options.headers)[kHeadersList] + + request.headersList = headersList + } + + // 3. Append (`Upgrade`, `websocket`) to request’s header list. + // 4. Append (`Connection`, `Upgrade`) to request’s header list. + // Note: both of these are handled by undici currently. + // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397 + + // 5. Let keyValue be a nonce consisting of a randomly selected + // 16-byte value that has been forgiving-base64-encoded and + // isomorphic encoded. + const keyValue = crypto.randomBytes(16).toString('base64') + + // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s + // header list. + request.headersList.append('sec-websocket-key', keyValue) + + // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s + // header list. + request.headersList.append('sec-websocket-version', '13') + + // 8. For each protocol in protocols, combine + // (`Sec-WebSocket-Protocol`, protocol) in request’s header + // list. + for (const protocol of protocols) { + request.headersList.append('sec-websocket-protocol', protocol) + } + + // 9. Let permessageDeflate be a user-agent defined + // "permessage-deflate" extension header value. + // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673 + // TODO: enable once permessage-deflate is supported + const permessageDeflate = '' // 'permessage-deflate; 15' + + // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to + // request’s header list. + // request.headersList.append('sec-websocket-extensions', permessageDeflate) + + // 11. Fetch request with useParallelQueue set to true, and + // processResponse given response being these steps: + const controller = fetching({ + request, + useParallelQueue: true, + dispatcher: options.dispatcher ?? getGlobalDispatcher(), + processResponse (response) { + // 1. If response is a network error or its status is not 101, + // fail the WebSocket connection. + if (response.type === 'error' || response.status !== 101) { + failWebsocketConnection(ws, 'Received network error or non-101 status code.') + return + } -var _stringify = _interopRequireDefault(__nccwpck_require__(5802)); + // 2. If protocols is not the empty list and extracting header + // list values given `Sec-WebSocket-Protocol` and response’s + // header list results in null, failure, or the empty byte + // sequence, then fail the WebSocket connection. + if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Server did not respond with sent protocols.') + return + } -var _parse = _interopRequireDefault(__nccwpck_require__(893)); + // 3. Follow the requirements stated step 2 to step 6, inclusive, + // of the last set of steps in section 4.1 of The WebSocket + // Protocol to validate response. This either results in fail + // the WebSocket connection or the WebSocket connection is + // established. + + // 2. If the response lacks an |Upgrade| header field or the |Upgrade| + // header field contains a value that is not an ASCII case- + // insensitive match for the value "websocket", the client MUST + // _Fail the WebSocket Connection_. + if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') { + failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".') + return + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + // 3. If the response lacks a |Connection| header field or the + // |Connection| header field doesn't contain a token that is an + // ASCII case-insensitive match for the value "Upgrade", the client + // MUST _Fail the WebSocket Connection_. + if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') { + failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".') + return + } -/***/ }), + // 4. If the response lacks a |Sec-WebSocket-Accept| header field or + // the |Sec-WebSocket-Accept| contains a value other than the + // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket- + // Key| (as a string, not base64-decoded) with the string "258EAFA5- + // E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and + // trailing whitespace, the client MUST _Fail the WebSocket + // Connection_. + const secWSAccept = response.headersList.get('Sec-WebSocket-Accept') + const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64') + if (secWSAccept !== digest) { + failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.') + return + } -/***/ 9576: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // 5. If the response includes a |Sec-WebSocket-Extensions| header + // field and this header field indicates the use of an extension + // that was not present in the client's handshake (the server has + // indicated an extension not requested by the client), the client + // MUST _Fail the WebSocket Connection_. (The parsing of this + // header field to determine which extensions are requested is + // discussed in Section 9.1.) + const secExtension = response.headersList.get('Sec-WebSocket-Extensions') + + if (secExtension !== null && secExtension !== permessageDeflate) { + failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.') + return + } -"use strict"; + // 6. If the response includes a |Sec-WebSocket-Protocol| header field + // and this header field indicates the use of a subprotocol that was + // not present in the client's handshake (the server has indicated a + // subprotocol not requested by the client), the client MUST _Fail + // the WebSocket Connection_. + const secProtocol = response.headersList.get('Sec-WebSocket-Protocol') + if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.') + return + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + response.socket.on('data', onSocketData) + response.socket.on('close', onSocketClose) + response.socket.on('error', onSocketError) -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + if (channels.open.hasSubscribers) { + channels.open.publish({ + address: response.socket.address(), + protocol: secProtocol, + extensions: secExtension + }) + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + onEstablish(response) + } + }) -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); + return controller +} + +/** + * @param {Buffer} chunk + */ +function onSocketData (chunk) { + if (!this.ws[kByteParser].write(chunk)) { + this.pause() + } +} + +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4 + */ +function onSocketClose () { + const { ws } = this + + // If the TCP connection was closed after the + // WebSocket closing handshake was completed, the WebSocket connection + // is said to have been closed _cleanly_. + const wasClean = ws[kSentClose] && ws[kReceivedClose] + + let code = 1005 + let reason = '' + + const result = ws[kByteParser].closingInfo + + if (result) { + code = result.code ?? 1005 + reason = result.reason + } else if (!ws[kSentClose]) { + // If _The WebSocket + // Connection is Closed_ and no Close control frame was received by the + // endpoint (such as could occur if the underlying transport connection + // is lost), _The WebSocket Connection Close Code_ is considered to be + // 1006. + code = 1006 + } + + // 1. Change the ready state to CLOSED (3). + ws[kReadyState] = states.CLOSED + + // 2. If the user agent was required to fail the WebSocket + // connection, or if the WebSocket connection was closed + // after being flagged as full, fire an event named error + // at the WebSocket object. + // TODO + + // 3. Fire an event named close at the WebSocket object, + // using CloseEvent, with the wasClean attribute + // initialized to true if the connection closed cleanly + // and false otherwise, the code attribute initialized to + // the WebSocket connection close code, and the reason + // attribute initialized to the result of applying UTF-8 + // decode without BOM to the WebSocket connection close + // reason. + fireEvent('close', ws, CloseEvent, { + wasClean, code, reason + }) + + if (channels.close.hasSubscribers) { + channels.close.publish({ + websocket: ws, + code, + reason + }) + } +} + +function onSocketError (error) { + const { ws } = this + + ws[kReadyState] = states.CLOSING + + if (channels.socketError.hasSubscribers) { + channels.socketError.publish(error) } - return _crypto.default.createHash('md5').update(bytes).digest(); + this.destroy() +} + +module.exports = { + establishWebSocketConnection } -var _default = md5; -exports["default"] = _default; /***/ }), -/***/ 4974: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9188: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +// This is a Globally Unique Identifier unique used +// to validate that the endpoint accepts websocket +// connections. +// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3 +const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +const states = { + CONNECTING: 0, + OPEN: 1, + CLOSING: 2, + CLOSED: 3 +} -var _default = { - randomUUID: _crypto.default.randomUUID -}; -exports["default"] = _default; +const opcodes = { + CONTINUATION: 0x0, + TEXT: 0x1, + BINARY: 0x2, + CLOSE: 0x8, + PING: 0x9, + PONG: 0xA +} -/***/ }), +const maxUnsigned16Bit = 2 ** 16 - 1 // 65535 -/***/ 5879: -/***/ ((__unused_webpack_module, exports) => { +const parserStates = { + INFO: 0, + PAYLOADLENGTH_16: 2, + PAYLOADLENGTH_64: 3, + READ_DATA: 4 +} -"use strict"; +const emptyBuffer = Buffer.allocUnsafe(0) +module.exports = { + uid, + staticPropertyDescriptors, + states, + opcodes, + maxUnsigned16Bit, + parserStates, + emptyBuffer +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; /***/ }), -/***/ 893: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 2611: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +const { webidl } = __nccwpck_require__(1744) +const { kEnumerableProperty } = __nccwpck_require__(3983) +const { MessagePort } = __nccwpck_require__(1267) -var _validate = _interopRequireDefault(__nccwpck_require__(4875)); +/** + * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent + */ +class MessageEvent extends Event { + #eventInit -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' }) -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.MessageEventInit(eventInitDict) + + super(type, eventInitDict) + + this.#eventInit = eventInitDict } - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + get data () { + webidl.brandCheck(this, MessageEvent) - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ + return this.#eventInit.data + } - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ + get origin () { + webidl.brandCheck(this, MessageEvent) - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ + return this.#eventInit.origin + } - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + get lastEventId () { + webidl.brandCheck(this, MessageEvent) - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} + return this.#eventInit.lastEventId + } -var _default = parse; -exports["default"] = _default; + get source () { + webidl.brandCheck(this, MessageEvent) -/***/ }), + return this.#eventInit.source + } -/***/ 7658: -/***/ ((__unused_webpack_module, exports) => { + get ports () { + webidl.brandCheck(this, MessageEvent) -"use strict"; + if (!Object.isFrozen(this.#eventInit.ports)) { + Object.freeze(this.#eventInit.ports) + } + return this.#eventInit.ports + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; + initMessageEvent ( + type, + bubbles = false, + cancelable = false, + data = null, + origin = '', + lastEventId = '', + source = null, + ports = [] + ) { + webidl.brandCheck(this, MessageEvent) -/***/ }), + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' }) -/***/ 2042: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return new MessageEvent(type, { + bubbles, cancelable, data, origin, lastEventId, source, ports + }) + } +} -"use strict"; +/** + * @see https://websockets.spec.whatwg.org/#the-closeevent-interface + */ +class CloseEvent extends Event { + #eventInit + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' }) -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.CloseEventInit(eventInitDict) -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + super(type, eventInitDict) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + this.#eventInit = eventInitDict + } -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + get wasClean () { + webidl.brandCheck(this, CloseEvent) -let poolPtr = rnds8Pool.length; + return this.#eventInit.wasClean + } -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); + get code () { + webidl.brandCheck(this, CloseEvent) - poolPtr = 0; + return this.#eventInit.code } - return rnds8Pool.slice(poolPtr, poolPtr += 16); + get reason () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.reason + } } -/***/ }), +// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface +class ErrorEvent extends Event { + #eventInit -/***/ 6723: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + constructor (type, eventInitDict) { + webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' }) -"use strict"; + super(type, eventInitDict) + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {}) -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + this.#eventInit = eventInitDict + } -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + get message () { + webidl.brandCheck(this, ErrorEvent) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + return this.#eventInit.message + } -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); + get filename () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.filename } - return _crypto.default.createHash('sha1').update(bytes).digest(); -} + get lineno () { + webidl.brandCheck(this, ErrorEvent) -var _default = sha1; -exports["default"] = _default; + return this.#eventInit.lineno + } -/***/ }), + get colno () { + webidl.brandCheck(this, ErrorEvent) -/***/ 5802: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return this.#eventInit.colno + } -"use strict"; + get error () { + webidl.brandCheck(this, ErrorEvent) + return this.#eventInit.error + } +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -exports.unsafeStringify = unsafeStringify; +Object.defineProperties(MessageEvent.prototype, { + [Symbol.toStringTag]: { + value: 'MessageEvent', + configurable: true + }, + data: kEnumerableProperty, + origin: kEnumerableProperty, + lastEventId: kEnumerableProperty, + source: kEnumerableProperty, + ports: kEnumerableProperty, + initMessageEvent: kEnumerableProperty +}) -var _validate = _interopRequireDefault(__nccwpck_require__(4875)); +Object.defineProperties(CloseEvent.prototype, { + [Symbol.toStringTag]: { + value: 'CloseEvent', + configurable: true + }, + reason: kEnumerableProperty, + code: kEnumerableProperty, + wasClean: kEnumerableProperty +}) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +Object.defineProperties(ErrorEvent.prototype, { + [Symbol.toStringTag]: { + value: 'ErrorEvent', + configurable: true + }, + message: kEnumerableProperty, + filename: kEnumerableProperty, + lineno: kEnumerableProperty, + colno: kEnumerableProperty, + error: kEnumerableProperty +}) -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; +webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort) -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).slice(1)); -} +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.MessagePort +) -function unsafeStringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); -} +const eventInit = [ + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false + } +] -function stringify(arr, offset = 0) { - const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields +webidl.converters.MessageEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'data', + converter: webidl.converters.any, + defaultValue: null + }, + { + key: 'origin', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lastEventId', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'source', + // Node doesn't implement WindowProxy or ServiceWorker, so the only + // valid value for source is a MessagePort. + converter: webidl.nullableConverter(webidl.converters.MessagePort), + defaultValue: null + }, + { + key: 'ports', + converter: webidl.converters['sequence'], + get defaultValue () { + return [] + } + } +]) - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); +webidl.converters.CloseEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'wasClean', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'code', + converter: webidl.converters['unsigned short'], + defaultValue: 0 + }, + { + key: 'reason', + converter: webidl.converters.USVString, + defaultValue: '' } +]) - return uuid; +webidl.converters.ErrorEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'message', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'filename', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lineno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'colno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'error', + converter: webidl.converters.any + } +]) + +module.exports = { + MessageEvent, + CloseEvent, + ErrorEvent } -var _default = stringify; -exports["default"] = _default; /***/ }), -/***/ 618: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5444: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +const { maxUnsigned16Bit } = __nccwpck_require__(9188) -var _rng = _interopRequireDefault(__nccwpck_require__(2042)); +/** @type {import('crypto')} */ +let crypto +try { + crypto = __nccwpck_require__(6113) +} catch { -var _stringify = __nccwpck_require__(5802); +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +class WebsocketFrameSend { + /** + * @param {Buffer|undefined} data + */ + constructor (data) { + this.frameData = data + this.maskKey = crypto.randomBytes(4) + } -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; + createFrame (opcode) { + const bodyLength = this.frameData?.byteLength ?? 0 -let _clockseq; // Previous uuid creation time + /** @type {number} */ + let payloadLength = bodyLength // 0-125 + let offset = 6 + if (bodyLength > maxUnsigned16Bit) { + offset += 8 // payload length is next 8 bytes + payloadLength = 127 + } else if (bodyLength > 125) { + offset += 2 // payload length is next 2 bytes + payloadLength = 126 + } -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + const buffer = Buffer.allocUnsafe(bodyLength + offset) -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 + // Clear first 2 bytes, everything else is overwritten + buffer[0] = buffer[1] = 0 + buffer[0] |= 0x80 // FIN + buffer[0] = (buffer[0] & 0xF0) + opcode // opcode - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); + /*! ws. MIT License. Einar Otto Stangvik */ + buffer[offset - 4] = this.maskKey[0] + buffer[offset - 3] = this.maskKey[1] + buffer[offset - 2] = this.maskKey[2] + buffer[offset - 1] = this.maskKey[3] - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } + buffer[1] = payloadLength - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + if (payloadLength === 126) { + buffer.writeUInt16BE(bodyLength, 2) + } else if (payloadLength === 127) { + // Clear extended payload length + buffer[2] = buffer[3] = 0 + buffer.writeUIntBE(bodyLength, 4, 6) } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + buffer[1] |= 0x80 // MASK - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock + // mask body + for (let i = 0; i < bodyLength; i++) { + buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4] + } - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + return buffer + } +} - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression +module.exports = { + WebsocketFrameSend +} - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval +/***/ }), - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested +/***/ 1688: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch +const { Writable } = __nccwpck_require__(2781) +const diagnosticsChannel = __nccwpck_require__(7643) +const { parserStates, opcodes, states, emptyBuffer } = __nccwpck_require__(9188) +const { kReadyState, kSentClose, kResponse, kReceivedClose } = __nccwpck_require__(7578) +const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = __nccwpck_require__(5515) +const { WebsocketFrameSend } = __nccwpck_require__(5444) - msecs += 12219292800000; // `time_low` +// This code was influenced by ws released under the MIT license. +// Copyright (c) 2011 Einar Otto Stangvik +// Copyright (c) 2013 Arnout Kazemier and contributors +// Copyright (c) 2016 Luigi Pinca and contributors - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` +const channels = {} +channels.ping = diagnosticsChannel.channel('undici:websocket:ping') +channels.pong = diagnosticsChannel.channel('undici:websocket:pong') - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` +class ByteParser extends Writable { + #buffers = [] + #byteOffset = 0 - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + #state = parserStates.INFO - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + #info = {} + #fragments = [] - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + constructor (ws) { + super() - b[i++] = clockseq & 0xff; // `node` + this.ws = ws + } - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; + /** + * @param {Buffer} chunk + * @param {() => void} callback + */ + _write (chunk, _, callback) { + this.#buffers.push(chunk) + this.#byteOffset += chunk.length + + this.run(callback) } - return buf || (0, _stringify.unsafeStringify)(b); -} + /** + * Runs whenever a new chunk is received. + * Callback is called whenever there are no more chunks buffering, + * or not enough bytes are buffered to parse. + */ + run (callback) { + while (true) { + if (this.#state === parserStates.INFO) { + // If there aren't enough bytes to parse the payload length, etc. + if (this.#byteOffset < 2) { + return callback() + } -var _default = v1; -exports["default"] = _default; + const buffer = this.consume(2) -/***/ }), + this.#info.fin = (buffer[0] & 0x80) !== 0 + this.#info.opcode = buffer[0] & 0x0F -/***/ 6888: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // If we receive a fragmented message, we use the type of the first + // frame to parse the full message as binary/text, when it's terminated + this.#info.originalOpcode ??= this.#info.opcode -"use strict"; + this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION + if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) { + // Only text and binary frames can be fragmented + failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.') + return + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + const payloadLength = buffer[1] & 0x7F -var _v = _interopRequireDefault(__nccwpck_require__(8392)); + if (payloadLength <= 125) { + this.#info.payloadLength = payloadLength + this.#state = parserStates.READ_DATA + } else if (payloadLength === 126) { + this.#state = parserStates.PAYLOADLENGTH_16 + } else if (payloadLength === 127) { + this.#state = parserStates.PAYLOADLENGTH_64 + } -var _md = _interopRequireDefault(__nccwpck_require__(9576)); + if (this.#info.fragmented && payloadLength > 125) { + // A fragmented frame can't be fragmented itself + failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.') + return + } else if ( + (this.#info.opcode === opcodes.PING || + this.#info.opcode === opcodes.PONG || + this.#info.opcode === opcodes.CLOSE) && + payloadLength > 125 + ) { + // Control frames can have a payload length of 125 bytes MAX + failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.') + return + } else if (this.#info.opcode === opcodes.CLOSE) { + if (payloadLength === 1) { + failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.') + return + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + const body = this.consume(payloadLength) -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; + this.#info.closeInfo = this.parseCloseBody(false, body) -/***/ }), + if (!this.ws[kSentClose]) { + // If an endpoint receives a Close frame and did not previously send a + // Close frame, the endpoint MUST send a Close frame in response. (When + // sending a Close frame in response, the endpoint typically echos the + // status code it received.) + const body = Buffer.allocUnsafe(2) + body.writeUInt16BE(this.#info.closeInfo.code, 0) + const closeFrame = new WebsocketFrameSend(body) -/***/ 8392: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + this.ws[kResponse].socket.write( + closeFrame.createFrame(opcodes.CLOSE), + (err) => { + if (!err) { + this.ws[kSentClose] = true + } + } + ) + } -"use strict"; + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this.ws[kReadyState] = states.CLOSING + this.ws[kReceivedClose] = true + this.end() -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports.URL = exports.DNS = void 0; -exports["default"] = v35; + return + } else if (this.#info.opcode === opcodes.PING) { + // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in + // response, unless it already received a Close frame. + // A Pong frame sent in response to a Ping frame must have identical + // "Application data" -var _stringify = __nccwpck_require__(5802); + const body = this.consume(payloadLength) -var _parse = _interopRequireDefault(__nccwpck_require__(893)); + if (!this.ws[kReceivedClose]) { + const frame = new WebsocketFrameSend(body) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG)) -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape + if (channels.ping.hasSubscribers) { + channels.ping.publish({ + payload: body + }) + } + } - const bytes = []; + this.#state = parserStates.INFO - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } else if (this.#info.opcode === opcodes.PONG) { + // A Pong frame MAY be sent unsolicited. This serves as a + // unidirectional heartbeat. A response to an unsolicited Pong frame is + // not expected. - return bytes; -} + const body = this.consume(payloadLength) -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; + if (channels.pong.hasSubscribers) { + channels.pong.publish({ + payload: body + }) + } -function v35(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - var _namespace; + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } + } else if (this.#state === parserStates.PAYLOADLENGTH_16) { + if (this.#byteOffset < 2) { + return callback() + } - if (typeof value === 'string') { - value = stringToBytes(value); - } + const buffer = this.consume(2) - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); - } + this.#info.payloadLength = buffer.readUInt16BE(0) + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.PAYLOADLENGTH_64) { + if (this.#byteOffset < 8) { + return callback() + } - if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` + const buffer = this.consume(8) + const upper = buffer.readUInt32BE(0) + // 2^31 is the maxinimum bytes an arraybuffer can contain + // on 32-bit systems. Although, on 64-bit systems, this is + // 2^53-1 bytes. + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275 + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e + if (upper > 2 ** 31 - 1) { + failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.') + return + } - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; + const lower = buffer.readUInt32BE(4) - if (buf) { - offset = offset || 0; + this.#info.payloadLength = (upper << 8) + lower + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.READ_DATA) { + if (this.#byteOffset < this.#info.payloadLength) { + // If there is still more data in this chunk that needs to be read + return callback() + } else if (this.#byteOffset >= this.#info.payloadLength) { + // If the server sent multiple frames in a single chunk - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; + const body = this.consume(this.#info.payloadLength) + + this.#fragments.push(body) + + // If the frame is unfragmented, or a fragmented frame was terminated, + // a message was received + if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) { + const fullMessage = Buffer.concat(this.#fragments) + + websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage) + + this.#info = {} + this.#fragments.length = 0 + } + + this.#state = parserStates.INFO + } } - return buf; + if (this.#byteOffset > 0) { + continue + } else { + callback() + break + } } + } - return (0, _stringify.unsafeStringify)(bytes); - } // Function#name is not settable on some platforms (#270) + /** + * Take n bytes from the buffered Buffers + * @param {number} n + * @returns {Buffer|null} + */ + consume (n) { + if (n > this.#byteOffset) { + return null + } else if (n === 0) { + return emptyBuffer + } + if (this.#buffers[0].length === n) { + this.#byteOffset -= this.#buffers[0].length + return this.#buffers.shift() + } - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support + const buffer = Buffer.allocUnsafe(n) + let offset = 0 + while (offset !== n) { + const next = this.#buffers[0] + const { length } = next - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} + if (length + offset === n) { + buffer.set(this.#buffers.shift(), offset) + break + } else if (length + offset > n) { + buffer.set(next.subarray(0, n - offset), offset) + this.#buffers[0] = next.subarray(n - offset) + break + } else { + buffer.set(this.#buffers.shift(), offset) + offset += next.length + } + } -/***/ }), + this.#byteOffset -= n -/***/ 1186: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return buffer + } -"use strict"; + parseCloseBody (onlyCode, data) { + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5 + /** @type {number|undefined} */ + let code + if (data.length >= 2) { + // _The WebSocket Connection Close Code_ is + // defined as the status code (Section 7.4) contained in the first Close + // control frame received by the application + code = data.readUInt16BE(0) + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + if (onlyCode) { + if (!isValidStatusCode(code)) { + return null + } -var _native = _interopRequireDefault(__nccwpck_require__(4974)); + return { code } + } -var _rng = _interopRequireDefault(__nccwpck_require__(2042)); + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6 + /** @type {Buffer} */ + let reason = data.subarray(2) -var _stringify = __nccwpck_require__(5802); + // Remove BOM + if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) { + reason = reason.subarray(3) + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + if (code !== undefined && !isValidStatusCode(code)) { + return null + } -function v4(options, buf, offset) { - if (_native.default.randomUUID && !buf && !options) { - return _native.default.randomUUID(); + try { + // TODO: optimize this + reason = new TextDecoder('utf-8', { fatal: true }).decode(reason) + } catch { + return null + } + + return { code, reason } } - options = options || {}; + get closingInfo () { + return this.#info.closeInfo + } +} - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` +module.exports = { + ByteParser +} - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided +/***/ }), - if (buf) { - offset = offset || 0; +/***/ 7578: +/***/ ((module) => { - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } +"use strict"; - return buf; - } - return (0, _stringify.unsafeStringify)(rnds); +module.exports = { + kWebSocketURL: Symbol('url'), + kReadyState: Symbol('ready state'), + kController: Symbol('controller'), + kResponse: Symbol('response'), + kBinaryType: Symbol('binary type'), + kSentClose: Symbol('sent close'), + kReceivedClose: Symbol('received close'), + kByteParser: Symbol('byte parser') } -var _default = v4; -exports["default"] = _default; /***/ }), -/***/ 2385: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5515: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +const { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = __nccwpck_require__(7578) +const { states, opcodes } = __nccwpck_require__(9188) +const { MessageEvent, ErrorEvent } = __nccwpck_require__(2611) -var _v = _interopRequireDefault(__nccwpck_require__(8392)); +/* globals Blob */ -var _sha = _interopRequireDefault(__nccwpck_require__(6723)); +/** + * @param {import('./websocket').WebSocket} ws + */ +function isEstablished (ws) { + // If the server's response is validated as provided for above, it is + // said that _The WebSocket Connection is Established_ and that the + // WebSocket Connection is in the OPEN state. + return ws[kReadyState] === states.OPEN +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosing (ws) { + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + return ws[kReadyState] === states.CLOSING +} -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosed (ws) { + return ws[kReadyState] === states.CLOSED +} -/***/ }), +/** + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e + * @param {EventTarget} target + * @param {EventInit | undefined} eventInitDict + */ +function fireEvent (e, target, eventConstructor = Event, eventInitDict) { + // 1. If eventConstructor is not given, then let eventConstructor be Event. -/***/ 4875: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // 2. Let event be the result of creating an event given eventConstructor, + // in the relevant realm of target. + // 3. Initialize event’s type attribute to e. + const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap -"use strict"; + // 4. Initialize any other IDL attributes of event as described in the + // invocation of this algorithm. + // 5. Return the result of dispatching event at target, with legacy target + // override flag set if set. + target.dispatchEvent(event) +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @param {import('./websocket').WebSocket} ws + * @param {number} type Opcode + * @param {Buffer} data application data + */ +function websocketMessageReceived (ws, type, data) { + // 1. If ready state is not OPEN (1), then return. + if (ws[kReadyState] !== states.OPEN) { + return + } -var _regex = _interopRequireDefault(__nccwpck_require__(7658)); + // 2. Let dataForEvent be determined by switching on type and binary type: + let dataForEvent -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + if (type === opcodes.TEXT) { + // -> type indicates that the data is Text + // a new DOMString containing data + try { + dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data) + } catch { + failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.') + return + } + } else if (type === opcodes.BINARY) { + if (ws[kBinaryType] === 'blob') { + // -> type indicates that the data is Binary and binary type is "blob" + // a new Blob object, created in the relevant Realm of the WebSocket + // object, that represents data as its raw data + dataForEvent = new Blob([data]) + } else { + // -> type indicates that the data is Binary and binary type is "arraybuffer" + // a new ArrayBuffer object, created in the relevant Realm of the + // WebSocket object, whose contents are data + dataForEvent = new Uint8Array(data).buffer + } + } -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); + // 3. Fire an event named message at the WebSocket object, using MessageEvent, + // with the origin attribute initialized to the serialization of the WebSocket + // object’s url's origin, and the data attribute initialized to dataForEvent. + fireEvent('message', ws, MessageEvent, { + origin: ws[kWebSocketURL].origin, + data: dataForEvent + }) } -var _default = validate; -exports["default"] = _default; +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455 + * @see https://datatracker.ietf.org/doc/html/rfc2616 + * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407 + * @param {string} protocol + */ +function isValidSubprotocol (protocol) { + // If present, this value indicates one + // or more comma-separated subprotocol the client wishes to speak, + // ordered by preference. The elements that comprise this value + // MUST be non-empty strings with characters in the range U+0021 to + // U+007E not including separator characters as defined in + // [RFC2616] and MUST all be unique strings. + if (protocol.length === 0) { + return false + } -/***/ }), + for (const char of protocol) { + const code = char.charCodeAt(0) -/***/ 694: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if ( + code < 0x21 || + code > 0x7E || + char === '(' || + char === ')' || + char === '<' || + char === '>' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' || + code === 32 || // SP + code === 9 // HT + ) { + return false + } + } -"use strict"; + return true +} + +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4 + * @param {number} code + */ +function isValidStatusCode (code) { + if (code >= 1000 && code < 1015) { + return ( + code !== 1004 && // reserved + code !== 1005 && // "MUST NOT be set as a status code" + code !== 1006 // "MUST NOT be set as a status code" + ) + } + return code >= 3000 && code <= 4999 +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/** + * @param {import('./websocket').WebSocket} ws + * @param {string|undefined} reason + */ +function failWebsocketConnection (ws, reason) { + const { [kController]: controller, [kResponse]: response } = ws -var _validate = _interopRequireDefault(__nccwpck_require__(4875)); + controller.abort() -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + if (response?.socket && !response.socket.destroyed) { + response.socket.destroy() + } -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); + if (reason) { + fireEvent('error', ws, ErrorEvent, { + error: new Error(reason) + }) } +} - return parseInt(uuid.slice(14, 15), 16); +module.exports = { + isEstablished, + isClosing, + isClosed, + fireEvent, + isValidSubprotocol, + isValidStatusCode, + failWebsocketConnection, + websocketMessageReceived } -var _default = version; -exports["default"] = _default; /***/ }), -/***/ 4256: +/***/ 4284: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var punycode = __nccwpck_require__(5477); -var mappingTable = __nccwpck_require__(2020); +const { webidl } = __nccwpck_require__(1744) +const { DOMException } = __nccwpck_require__(1037) +const { URLSerializer } = __nccwpck_require__(685) +const { getGlobalOrigin } = __nccwpck_require__(1246) +const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = __nccwpck_require__(9188) +const { + kWebSocketURL, + kReadyState, + kController, + kBinaryType, + kResponse, + kSentClose, + kByteParser +} = __nccwpck_require__(7578) +const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = __nccwpck_require__(5515) +const { establishWebSocketConnection } = __nccwpck_require__(5354) +const { WebsocketFrameSend } = __nccwpck_require__(5444) +const { ByteParser } = __nccwpck_require__(1688) +const { kEnumerableProperty, isBlobLike } = __nccwpck_require__(3983) +const { getGlobalDispatcher } = __nccwpck_require__(1892) +const { types } = __nccwpck_require__(3837) + +let experimentalWarned = false + +// https://websockets.spec.whatwg.org/#interface-definition +class WebSocket extends EventTarget { + #events = { + open: null, + error: null, + close: null, + message: null + } + + #bufferedAmount = 0 + #protocol = '' + #extensions = '' -var PROCESSING_OPTIONS = { - TRANSITIONAL: 0, - NONTRANSITIONAL: 1 -}; + /** + * @param {string} url + * @param {string|string[]} protocols + */ + constructor (url, protocols = []) { + super() -function normalize(str) { // fix bug in v8 - return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); -} + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' }) -function findStatus(val) { - var start = 0; - var end = mappingTable.length - 1; + if (!experimentalWarned) { + experimentalWarned = true + process.emitWarning('WebSockets are experimental, expect them to change at any time.', { + code: 'UNDICI-WS' + }) + } - while (start <= end) { - var mid = Math.floor((start + end) / 2); + const options = webidl.converters['DOMString or sequence or WebSocketInit'](protocols) - var target = mappingTable[mid]; - if (target[0][0] <= val && target[0][1] >= val) { - return target; - } else if (target[0][0] > val) { - end = mid - 1; - } else { - start = mid + 1; - } - } + url = webidl.converters.USVString(url) + protocols = options.protocols - return null; -} + // 1. Let baseURL be this's relevant settings object's API base URL. + const baseURL = getGlobalOrigin() -var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; + // 1. Let urlRecord be the result of applying the URL parser to url with baseURL. + let urlRecord -function countSymbols(string) { - return string - // replace every surrogate pair with a BMP symbol - .replace(regexAstralSymbols, '_') - // then get the length - .length; -} + try { + urlRecord = new URL(url, baseURL) + } catch (e) { + // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException. + throw new DOMException(e, 'SyntaxError') + } -function mapChars(domain_name, useSTD3, processing_option) { - var hasError = false; - var processed = ""; + // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws". + if (urlRecord.protocol === 'http:') { + urlRecord.protocol = 'ws:' + } else if (urlRecord.protocol === 'https:') { + // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss". + urlRecord.protocol = 'wss:' + } - var len = countSymbols(domain_name); - for (var i = 0; i < len; ++i) { - var codePoint = domain_name.codePointAt(i); - var status = findStatus(codePoint); + // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException. + if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') { + throw new DOMException( + `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`, + 'SyntaxError' + ) + } - switch (status[1]) { - case "disallowed": - hasError = true; - processed += String.fromCodePoint(codePoint); - break; - case "ignored": - break; - case "mapped": - processed += String.fromCodePoint.apply(String, status[2]); - break; - case "deviation": - if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { - processed += String.fromCodePoint.apply(String, status[2]); - } else { - processed += String.fromCodePoint(codePoint); - } - break; - case "valid": - processed += String.fromCodePoint(codePoint); - break; - case "disallowed_STD3_mapped": - if (useSTD3) { - hasError = true; - processed += String.fromCodePoint(codePoint); - } else { - processed += String.fromCodePoint.apply(String, status[2]); - } - break; - case "disallowed_STD3_valid": - if (useSTD3) { - hasError = true; - } + // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError" + // DOMException. + if (urlRecord.hash || urlRecord.href.endsWith('#')) { + throw new DOMException('Got fragment', 'SyntaxError') + } - processed += String.fromCodePoint(codePoint); - break; + // 8. If protocols is a string, set protocols to a sequence consisting + // of just that string. + if (typeof protocols === 'string') { + protocols = [protocols] } - } - return { - string: processed, - error: hasError - }; -} + // 9. If any of the values in protocols occur more than once or otherwise + // fail to match the requirements for elements that comprise the value + // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket + // protocol, then throw a "SyntaxError" DOMException. + if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } -var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; + if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } -function validateLabel(label, processing_option) { - if (label.substr(0, 4) === "xn--") { - label = punycode.toUnicode(label); - processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; - } + // 10. Set this's url to urlRecord. + this[kWebSocketURL] = new URL(urlRecord.href) - var error = false; + // 11. Let client be this's relevant settings object. - if (normalize(label) !== label || - (label[3] === "-" && label[4] === "-") || - label[0] === "-" || label[label.length - 1] === "-" || - label.indexOf(".") !== -1 || - label.search(combiningMarksRegex) === 0) { - error = true; - } + // 12. Run this step in parallel: - var len = countSymbols(label); - for (var i = 0; i < len; ++i) { - var status = findStatus(label.codePointAt(i)); - if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || - (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && - status[1] !== "valid" && status[1] !== "deviation")) { - error = true; - break; - } - } + // 1. Establish a WebSocket connection given urlRecord, protocols, + // and client. + this[kController] = establishWebSocketConnection( + urlRecord, + protocols, + this, + (response) => this.#onConnectionEstablished(response), + options + ) - return { - label: label, - error: error - }; -} + // Each WebSocket object has an associated ready state, which is a + // number representing the state of the connection. Initially it must + // be CONNECTING (0). + this[kReadyState] = WebSocket.CONNECTING -function processing(domain_name, useSTD3, processing_option) { - var result = mapChars(domain_name, useSTD3, processing_option); - result.string = normalize(result.string); + // The extensions attribute must initially return the empty string. - var labels = result.string.split("."); - for (var i = 0; i < labels.length; ++i) { - try { - var validation = validateLabel(labels[i]); - labels[i] = validation.label; - result.error = result.error || validation.error; - } catch(e) { - result.error = true; - } + // The protocol attribute must initially return the empty string. + + // Each WebSocket object has an associated binary type, which is a + // BinaryType. Initially it must be "blob". + this[kBinaryType] = 'blob' } - return { - string: labels.join("."), - error: result.error - }; -} + /** + * @see https://websockets.spec.whatwg.org/#dom-websocket-close + * @param {number|undefined} code + * @param {string|undefined} reason + */ + close (code = undefined, reason = undefined) { + webidl.brandCheck(this, WebSocket) -module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { - var result = processing(domain_name, useSTD3, processing_option); - var labels = result.string.split("."); - labels = labels.map(function(l) { - try { - return punycode.toASCII(l); - } catch(e) { - result.error = true; - return l; + if (code !== undefined) { + code = webidl.converters['unsigned short'](code, { clamp: true }) } - }); - if (verifyDnsLength) { - var total = labels.slice(0, labels.length - 1).join(".").length; - if (total.length > 253 || total.length === 0) { - result.error = true; + if (reason !== undefined) { + reason = webidl.converters.USVString(reason) } - for (var i=0; i < labels.length; ++i) { - if (labels.length > 63 || labels.length === 0) { - result.error = true; - break; + // 1. If code is present, but is neither an integer equal to 1000 nor an + // integer in the range 3000 to 4999, inclusive, throw an + // "InvalidAccessError" DOMException. + if (code !== undefined) { + if (code !== 1000 && (code < 3000 || code > 4999)) { + throw new DOMException('invalid code', 'InvalidAccessError') } } - } - if (result.error) return null; - return labels.join("."); -}; + let reasonByteLength = 0 -module.exports.toUnicode = function(domain_name, useSTD3) { - var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); + // 2. If reason is present, then run these substeps: + if (reason !== undefined) { + // 1. Let reasonBytes be the result of encoding reason. + // 2. If reasonBytes is longer than 123 bytes, then throw a + // "SyntaxError" DOMException. + reasonByteLength = Buffer.byteLength(reason) - return { - domain: result.string, - error: result.error - }; -}; + if (reasonByteLength > 123) { + throw new DOMException( + `Reason must be less than 123 bytes; received ${reasonByteLength}`, + 'SyntaxError' + ) + } + } -module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; + // 3. Run the first matching steps from the following list: + if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) { + // If this's ready state is CLOSING (2) or CLOSED (3) + // Do nothing. + } else if (!isEstablished(this)) { + // If the WebSocket connection is not yet established + // Fail the WebSocket connection and set this's ready state + // to CLOSING (2). + failWebsocketConnection(this, 'Connection was closed before it was established.') + this[kReadyState] = WebSocket.CLOSING + } else if (!isClosing(this)) { + // If the WebSocket closing handshake has not yet been started + // Start the WebSocket closing handshake and set this's ready + // state to CLOSING (2). + // - If neither code nor reason is present, the WebSocket Close + // message must not have a body. + // - If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + // - If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + + const frame = new WebsocketFrameSend() + + // If neither code nor reason is present, the WebSocket Close + // message must not have a body. + + // If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + if (code !== undefined && reason === undefined) { + frame.frameData = Buffer.allocUnsafe(2) + frame.frameData.writeUInt16BE(code, 0) + } else if (code !== undefined && reason !== undefined) { + // If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength) + frame.frameData.writeUInt16BE(code, 0) + // the body MAY contain UTF-8-encoded data with value /reason/ + frame.frameData.write(reason, 2, 'utf-8') + } else { + frame.frameData = emptyBuffer + } + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket -/***/ }), + socket.write(frame.createFrame(opcodes.CLOSE), (err) => { + if (!err) { + this[kSentClose] = true + } + }) -/***/ 4294: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this[kReadyState] = states.CLOSING + } else { + // Otherwise + // Set this's ready state to CLOSING (2). + this[kReadyState] = WebSocket.CLOSING + } + } -module.exports = __nccwpck_require__(4219); + /** + * @see https://websockets.spec.whatwg.org/#dom-websocket-send + * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data + */ + send (data) { + webidl.brandCheck(this, WebSocket) + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' }) -/***/ }), + data = webidl.converters.WebSocketSendData(data) -/***/ 4219: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // 1. If this's ready state is CONNECTING, then throw an + // "InvalidStateError" DOMException. + if (this[kReadyState] === WebSocket.CONNECTING) { + throw new DOMException('Sent before connected.', 'InvalidStateError') + } -"use strict"; + // 2. Run the appropriate set of steps from the following list: + // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1 + // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + if (!isEstablished(this) || isClosing(this)) { + return + } -var net = __nccwpck_require__(1808); -var tls = __nccwpck_require__(4404); -var http = __nccwpck_require__(3685); -var https = __nccwpck_require__(5687); -var events = __nccwpck_require__(2361); -var assert = __nccwpck_require__(9491); -var util = __nccwpck_require__(3837); + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket + + // If data is a string + if (typeof data === 'string') { + // If the WebSocket connection is established and the WebSocket + // closing handshake has not yet started, then the user agent + // must send a WebSocket Message comprised of the data argument + // using a text frame opcode; if the data cannot be sent, e.g. + // because it would need to be buffered but the buffer is full, + // the user agent must flag the WebSocket as full and then close + // the WebSocket connection. Any invocation of this method with a + // string argument that does not throw an exception must increase + // the bufferedAmount attribute by the number of bytes needed to + // express the argument as UTF-8. + + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.TEXT) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (types.isArrayBuffer(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need + // to be buffered but the buffer is full, the user agent must flag + // the WebSocket as full and then close the WebSocket connection. + // The data to be sent is the data stored in the buffer described + // by the ArrayBuffer object. Any invocation of this method with an + // ArrayBuffer argument that does not throw an exception must + // increase the bufferedAmount attribute by the length of the + // ArrayBuffer in bytes. + + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (ArrayBuffer.isView(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The + // data to be sent is the data stored in the section of the buffer + // described by the ArrayBuffer object that data references. Any + // invocation of this method with this kind of argument that does + // not throw an exception must increase the bufferedAmount attribute + // by the length of data’s buffer in bytes. + + const ab = Buffer.from(data, data.byteOffset, data.byteLength) + + const frame = new WebsocketFrameSend(ab) + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += ab.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= ab.byteLength + }) + } else if (isBlobLike(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The data + // to be sent is the raw data represented by the Blob object. Any + // invocation of this method with a Blob argument that does not throw + // an exception must increase the bufferedAmount attribute by the size + // of the Blob object’s raw data, in bytes. + + const frame = new WebsocketFrameSend() + + data.arrayBuffer().then((ab) => { + const value = Buffer.from(ab) + frame.frameData = value + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + }) + } + } + get readyState () { + webidl.brandCheck(this, WebSocket) -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; + // The readyState getter steps are to return this's ready state. + return this[kReadyState] + } + get bufferedAmount () { + webidl.brandCheck(this, WebSocket) -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} + return this.#bufferedAmount + } -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} + get url () { + webidl.brandCheck(this, WebSocket) -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} + // The url getter steps are to return this's url, serialized. + return URLSerializer(this[kWebSocketURL]) + } -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} + get extensions () { + webidl.brandCheck(this, WebSocket) + return this.#extensions + } -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; + get protocol () { + webidl.brandCheck(this, WebSocket) - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); + return this.#protocol + } -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + get onopen () { + webidl.brandCheck(this, WebSocket) - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; + return this.#events.open } - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); + set onopen (fn) { + webidl.brandCheck(this, WebSocket) - function onFree() { - self.emit('free', socket, options); + if (this.#events.open) { + this.removeEventListener('open', this.#events.open) } - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); + if (typeof fn === 'function') { + this.#events.open = fn + this.addEventListener('open', fn) + } else { + this.#events.open = null } - }); -}; + } -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); + get onerror () { + webidl.brandCheck(this, WebSocket) - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port + return this.#events.error + } + + set onerror (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.error) { + this.removeEventListener('error', this.#events.error) + } + + if (typeof fn === 'function') { + this.#events.error = fn + this.addEventListener('error', fn) + } else { + this.#events.error = null } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); + + get onclose () { + webidl.brandCheck(this, WebSocket) + + return this.#events.close } - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); + set onclose (fn) { + webidl.brandCheck(this, WebSocket) - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; + if (this.#events.close) { + this.removeEventListener('close', this.#events.close) + } + + if (typeof fn === 'function') { + this.#events.close = fn + this.addEventListener('close', fn) + } else { + this.#events.close = null + } } - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); + get onmessage () { + webidl.brandCheck(this, WebSocket) + + return this.#events.message } - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); + set onmessage (fn) { + webidl.brandCheck(this, WebSocket) - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; + if (this.#events.message) { + this.removeEventListener('message', this.#events.message) } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; + + if (typeof fn === 'function') { + this.#events.message = fn + this.addEventListener('message', fn) + } else { + this.#events.message = null } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); } - function onError(cause) { - connectReq.removeAllListeners(); + get binaryType () { + webidl.brandCheck(this, WebSocket) - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); + return this[kBinaryType] } -}; -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); + set binaryType (type) { + webidl.brandCheck(this, WebSocket) - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); + if (type !== 'blob' && type !== 'arraybuffer') { + this[kBinaryType] = 'blob' + } else { + this[kBinaryType] = type + } } -}; -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); + /** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + */ + #onConnectionEstablished (response) { + // processResponse is called when the "response’s header list has been received and initialized." + // once this happens, the connection is open + this[kResponse] = response + + const parser = new ByteParser(this) + parser.on('drain', function onParserDrain () { + this.ws[kResponse].socket.resume() + }) - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} + response.socket.ws = this + this[kByteParser] = parser + + // 1. Change the ready state to OPEN (1). + this[kReadyState] = states.OPEN + + // 2. Change the extensions attribute’s value to the extensions in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1 + const extensions = response.headersList.get('sec-websocket-extensions') + + if (extensions !== null) { + this.#extensions = extensions + } + + // 3. Change the protocol attribute’s value to the subprotocol in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9 + const protocol = response.headersList.get('sec-websocket-protocol') + + if (protocol !== null) { + this.#protocol = protocol + } + + // 4. Fire an event named open at the WebSocket object. + fireEvent('open', this) + } +} + +// https://websockets.spec.whatwg.org/#dom-websocket-connecting +WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING +// https://websockets.spec.whatwg.org/#dom-websocket-open +WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN +// https://websockets.spec.whatwg.org/#dom-websocket-closing +WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING +// https://websockets.spec.whatwg.org/#dom-websocket-closed +WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED + +Object.defineProperties(WebSocket.prototype, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors, + url: kEnumerableProperty, + readyState: kEnumerableProperty, + bufferedAmount: kEnumerableProperty, + onopen: kEnumerableProperty, + onerror: kEnumerableProperty, + onclose: kEnumerableProperty, + close: kEnumerableProperty, + onmessage: kEnumerableProperty, + binaryType: kEnumerableProperty, + send: kEnumerableProperty, + extensions: kEnumerableProperty, + protocol: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'WebSocket', + writable: false, + enumerable: false, + configurable: true + } +}) + +Object.defineProperties(WebSocket, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors +}) +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.DOMString +) -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; +webidl.converters['DOMString or sequence'] = function (V) { + if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) { + return webidl.converters['sequence'](V) } - return host; // for v0.11 or later + + return webidl.converters.DOMString(V) } -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } - } +// This implements the propsal made in https://github.com/whatwg/websockets/issues/42 +webidl.converters.WebSocketInit = webidl.dictionaryConverter([ + { + key: 'protocols', + converter: webidl.converters['DOMString or sequence'], + get defaultValue () { + return [] + } + }, + { + key: 'dispatcher', + converter: (V) => V, + get defaultValue () { + return getGlobalDispatcher() } + }, + { + key: 'headers', + converter: webidl.nullableConverter(webidl.converters.HeadersInit) } - return target; +]) + +webidl.converters['DOMString or sequence or WebSocketInit'] = function (V) { + if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) { + return webidl.converters.WebSocketInit(V) + } + + return { protocols: webidl.converters['DOMString or sequence'](V) } } +webidl.converters.WebSocketSendData = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); + if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) { + return webidl.converters.BufferSource(V) } - console.error.apply(console, args); } -} else { - debug = function() {}; + + return webidl.converters.USVString(V) +} + +module.exports = { + WebSocket } -exports.debug = debug; // for test /***/ }), @@ -71475,7 +56632,7 @@ function getUserAgent() { return navigator.userAgent; } - if (typeof process === "object" && "version" in process) { + if (typeof process === "object" && process.version !== undefined) { return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; } @@ -74080,788 +59237,15690 @@ module.exports.parseURL = function (input, options) { /***/ }), -/***/ 3185: -/***/ ((module) => { +/***/ 3185: +/***/ ((module) => { + +"use strict"; + + +module.exports.mixin = function mixin(target, source) { + const keys = Object.getOwnPropertyNames(source); + for (let i = 0; i < keys.length; ++i) { + Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); + } +}; + +module.exports.wrapperSymbol = Symbol("wrapper"); +module.exports.implSymbol = Symbol("impl"); + +module.exports.wrapperForImpl = function (impl) { + return impl[module.exports.wrapperSymbol]; +}; + +module.exports.implForWrapper = function (wrapper) { + return wrapper[module.exports.implSymbol]; +}; + + + +/***/ }), + +/***/ 2940: +/***/ ((module) => { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + + +/***/ }), + +/***/ 5185: +/***/ ((module) => { + +class Node { + /// value; + /// next; + + constructor(value) { + this.value = value; + + // TODO: Remove this when targeting Node.js 12. + this.next = undefined; + } +} + +class Queue { + // TODO: Use private class fields when targeting Node.js 12. + // #_head; + // #_tail; + // #_size; + + constructor() { + this.clear(); + } + + enqueue(value) { + const node = new Node(value); + + if (this._head) { + this._tail.next = node; + this._tail = node; + } else { + this._head = node; + this._tail = node; + } + + this._size++; + } + + dequeue() { + const current = this._head; + if (!current) { + return; + } + + this._head = this._head.next; + this._size--; + return current.value; + } + + clear() { + this._head = undefined; + this._tail = undefined; + this._size = 0; + } + + get size() { + return this._size; + } + + * [Symbol.iterator]() { + let current = this._head; + + while (current) { + yield current.value; + current = current.next; + } + } +} + +module.exports = Queue; + + +/***/ }), + +/***/ 2877: +/***/ ((module) => { + +module.exports = eval("require")("encoding"); + + +/***/ }), + +/***/ 9491: +/***/ ((module) => { + +"use strict"; +module.exports = require("assert"); + +/***/ }), + +/***/ 852: +/***/ ((module) => { + +"use strict"; +module.exports = require("async_hooks"); + +/***/ }), + +/***/ 4300: +/***/ ((module) => { + +"use strict"; +module.exports = require("buffer"); + +/***/ }), + +/***/ 2081: +/***/ ((module) => { + +"use strict"; +module.exports = require("child_process"); + +/***/ }), + +/***/ 6206: +/***/ ((module) => { + +"use strict"; +module.exports = require("console"); + +/***/ }), + +/***/ 6113: +/***/ ((module) => { + +"use strict"; +module.exports = require("crypto"); + +/***/ }), + +/***/ 7643: +/***/ ((module) => { + +"use strict"; +module.exports = require("diagnostics_channel"); + +/***/ }), + +/***/ 2361: +/***/ ((module) => { + +"use strict"; +module.exports = require("events"); + +/***/ }), + +/***/ 7147: +/***/ ((module) => { + +"use strict"; +module.exports = require("fs"); + +/***/ }), + +/***/ 3685: +/***/ ((module) => { + +"use strict"; +module.exports = require("http"); + +/***/ }), + +/***/ 5158: +/***/ ((module) => { + +"use strict"; +module.exports = require("http2"); + +/***/ }), + +/***/ 5687: +/***/ ((module) => { + +"use strict"; +module.exports = require("https"); + +/***/ }), + +/***/ 1808: +/***/ ((module) => { + +"use strict"; +module.exports = require("net"); + +/***/ }), + +/***/ 5673: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:events"); + +/***/ }), + +/***/ 4492: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:stream"); + +/***/ }), + +/***/ 7261: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:util"); + +/***/ }), + +/***/ 2037: +/***/ ((module) => { + +"use strict"; +module.exports = require("os"); + +/***/ }), + +/***/ 1017: +/***/ ((module) => { + +"use strict"; +module.exports = require("path"); + +/***/ }), + +/***/ 4074: +/***/ ((module) => { + +"use strict"; +module.exports = require("perf_hooks"); + +/***/ }), + +/***/ 5477: +/***/ ((module) => { + +"use strict"; +module.exports = require("punycode"); + +/***/ }), + +/***/ 3477: +/***/ ((module) => { + +"use strict"; +module.exports = require("querystring"); + +/***/ }), + +/***/ 2781: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream"); + +/***/ }), + +/***/ 5356: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream/web"); + +/***/ }), + +/***/ 1576: +/***/ ((module) => { + +"use strict"; +module.exports = require("string_decoder"); + +/***/ }), + +/***/ 4404: +/***/ ((module) => { + +"use strict"; +module.exports = require("tls"); + +/***/ }), + +/***/ 6224: +/***/ ((module) => { + +"use strict"; +module.exports = require("tty"); + +/***/ }), + +/***/ 7310: +/***/ ((module) => { + +"use strict"; +module.exports = require("url"); + +/***/ }), + +/***/ 3837: +/***/ ((module) => { + +"use strict"; +module.exports = require("util"); + +/***/ }), + +/***/ 9830: +/***/ ((module) => { + +"use strict"; +module.exports = require("util/types"); + +/***/ }), + +/***/ 1267: +/***/ ((module) => { + +"use strict"; +module.exports = require("worker_threads"); + +/***/ }), + +/***/ 9796: +/***/ ((module) => { + +"use strict"; +module.exports = require("zlib"); + +/***/ }), + +/***/ 2960: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const WritableStream = (__nccwpck_require__(4492).Writable) +const inherits = (__nccwpck_require__(7261).inherits) + +const StreamSearch = __nccwpck_require__(1142) + +const PartStream = __nccwpck_require__(1620) +const HeaderParser = __nccwpck_require__(2032) + +const DASH = 45 +const B_ONEDASH = Buffer.from('-') +const B_CRLF = Buffer.from('\r\n') +const EMPTY_FN = function () {} + +function Dicer (cfg) { + if (!(this instanceof Dicer)) { return new Dicer(cfg) } + WritableStream.call(this, cfg) + + if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } + + if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } + + this._headerFirst = cfg.headerFirst + + this._dashes = 0 + this._parts = 0 + this._finished = false + this._realFinish = false + this._isPreamble = true + this._justMatched = false + this._firstWrite = true + this._inHeader = true + this._part = undefined + this._cb = undefined + this._ignoreData = false + this._partOpts = { highWaterMark: cfg.partHwm } + this._pause = false + + const self = this + this._hparser = new HeaderParser(cfg) + this._hparser.on('header', function (header) { + self._inHeader = false + self._part.emit('header', header) + }) +} +inherits(Dicer, WritableStream) + +Dicer.prototype.emit = function (ev) { + if (ev === 'finish' && !this._realFinish) { + if (!this._finished) { + const self = this + process.nextTick(function () { + self.emit('error', new Error('Unexpected end of multipart data')) + if (self._part && !self._ignoreData) { + const type = (self._isPreamble ? 'Preamble' : 'Part') + self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) + self._part.push(null) + process.nextTick(function () { + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + return + } + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + } + } else { WritableStream.prototype.emit.apply(this, arguments) } +} + +Dicer.prototype._write = function (data, encoding, cb) { + // ignore unexpected data (e.g. extra trailer data after finished) + if (!this._hparser && !this._bparser) { return cb() } + + if (this._headerFirst && this._isPreamble) { + if (!this._part) { + this._part = new PartStream(this._partOpts) + if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() } + } + const r = this._hparser.push(data) + if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } + } + + // allows for "easier" testing + if (this._firstWrite) { + this._bparser.push(B_CRLF) + this._firstWrite = false + } + + this._bparser.push(data) + + if (this._pause) { this._cb = cb } else { cb() } +} + +Dicer.prototype.reset = function () { + this._part = undefined + this._bparser = undefined + this._hparser = undefined +} + +Dicer.prototype.setBoundary = function (boundary) { + const self = this + this._bparser = new StreamSearch('\r\n--' + boundary) + this._bparser.on('info', function (isMatch, data, start, end) { + self._oninfo(isMatch, data, start, end) + }) +} + +Dicer.prototype._ignore = function () { + if (this._part && !this._ignoreData) { + this._ignoreData = true + this._part.on('error', EMPTY_FN) + // we must perform some kind of read on the stream even though we are + // ignoring the data, otherwise node's Readable stream will not emit 'end' + // after pushing null to the stream + this._part.resume() + } +} + +Dicer.prototype._oninfo = function (isMatch, data, start, end) { + let buf; const self = this; let i = 0; let r; let shouldWriteMore = true + + if (!this._part && this._justMatched && data) { + while (this._dashes < 2 && (start + i) < end) { + if (data[start + i] === DASH) { + ++i + ++this._dashes + } else { + if (this._dashes) { buf = B_ONEDASH } + this._dashes = 0 + break + } + } + if (this._dashes === 2) { + if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) } + this.reset() + this._finished = true + // no more parts will be added + if (self._parts === 0) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } + } + if (this._dashes) { return } + } + if (this._justMatched) { this._justMatched = false } + if (!this._part) { + this._part = new PartStream(this._partOpts) + this._part._read = function (n) { + self._unpause() + } + if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() } + if (!this._isPreamble) { this._inHeader = true } + } + if (data && start < end && !this._ignoreData) { + if (this._isPreamble || !this._inHeader) { + if (buf) { shouldWriteMore = this._part.push(buf) } + shouldWriteMore = this._part.push(data.slice(start, end)) + if (!shouldWriteMore) { this._pause = true } + } else if (!this._isPreamble && this._inHeader) { + if (buf) { this._hparser.push(buf) } + r = this._hparser.push(data.slice(start, end)) + if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } + } + } + if (isMatch) { + this._hparser.reset() + if (this._isPreamble) { this._isPreamble = false } else { + if (start !== end) { + ++this._parts + this._part.on('end', function () { + if (--self._parts === 0) { + if (self._finished) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } else { + self._unpause() + } + } + }) + } + } + this._part.push(null) + this._part = undefined + this._ignoreData = false + this._justMatched = true + this._dashes = 0 + } +} + +Dicer.prototype._unpause = function () { + if (!this._pause) { return } + + this._pause = false + if (this._cb) { + const cb = this._cb + this._cb = undefined + cb() + } +} + +module.exports = Dicer + + +/***/ }), + +/***/ 2032: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const EventEmitter = (__nccwpck_require__(5673).EventEmitter) +const inherits = (__nccwpck_require__(7261).inherits) +const getLimit = __nccwpck_require__(1467) + +const StreamSearch = __nccwpck_require__(1142) + +const B_DCRLF = Buffer.from('\r\n\r\n') +const RE_CRLF = /\r\n/g +const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex + +function HeaderParser (cfg) { + EventEmitter.call(this) + + cfg = cfg || {} + const self = this + this.nread = 0 + this.maxed = false + this.npairs = 0 + this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) + this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) + this.buffer = '' + this.header = {} + this.finished = false + this.ss = new StreamSearch(B_DCRLF) + this.ss.on('info', function (isMatch, data, start, end) { + if (data && !self.maxed) { + if (self.nread + end - start >= self.maxHeaderSize) { + end = self.maxHeaderSize - self.nread + start + self.nread = self.maxHeaderSize + self.maxed = true + } else { self.nread += (end - start) } + + self.buffer += data.toString('binary', start, end) + } + if (isMatch) { self._finish() } + }) +} +inherits(HeaderParser, EventEmitter) + +HeaderParser.prototype.push = function (data) { + const r = this.ss.push(data) + if (this.finished) { return r } +} + +HeaderParser.prototype.reset = function () { + this.finished = false + this.buffer = '' + this.header = {} + this.ss.reset() +} + +HeaderParser.prototype._finish = function () { + if (this.buffer) { this._parseHeader() } + this.ss.matches = this.ss.maxMatches + const header = this.header + this.header = {} + this.buffer = '' + this.finished = true + this.nread = this.npairs = 0 + this.maxed = false + this.emit('header', header) +} + +HeaderParser.prototype._parseHeader = function () { + if (this.npairs === this.maxHeaderPairs) { return } + + const lines = this.buffer.split(RE_CRLF) + const len = lines.length + let m, h + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (lines[i].length === 0) { continue } + if (lines[i][0] === '\t' || lines[i][0] === ' ') { + // folded header content + // RFC2822 says to just remove the CRLF and not the whitespace following + // it, so we follow the RFC and include the leading whitespace ... + if (h) { + this.header[h][this.header[h].length - 1] += lines[i] + continue + } + } + + const posColon = lines[i].indexOf(':') + if ( + posColon === -1 || + posColon === 0 + ) { + return + } + m = RE_HDR.exec(lines[i]) + h = m[1].toLowerCase() + this.header[h] = this.header[h] || [] + this.header[h].push((m[2] || '')) + if (++this.npairs === this.maxHeaderPairs) { break } + } +} + +module.exports = HeaderParser + + +/***/ }), + +/***/ 1620: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const inherits = (__nccwpck_require__(7261).inherits) +const ReadableStream = (__nccwpck_require__(4492).Readable) + +function PartStream (opts) { + ReadableStream.call(this, opts) +} +inherits(PartStream, ReadableStream) + +PartStream.prototype._read = function (n) {} + +module.exports = PartStream + + +/***/ }), + +/***/ 1142: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +/** + * Copyright Brian White. All rights reserved. + * + * @see https://github.com/mscdex/streamsearch + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + */ +const EventEmitter = (__nccwpck_require__(5673).EventEmitter) +const inherits = (__nccwpck_require__(7261).inherits) + +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) + } + + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } + + const needleLength = needle.length + + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } + + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } + + this.maxMatches = Infinity + this.matches = 0 + + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 + + this._lookbehind = Buffer.alloc(needleLength) + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i + } +} +inherits(SBMH, EventEmitter) + +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 +} + +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r +} + +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) + + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) + + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] + } + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } + } + + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } + + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff + + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len + + this._bufpos = len + return len + } + } + + pos += (pos >= 0) * this._bufpos + + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } + + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } + + this._bufpos = len + return len +} + +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] +} + +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true +} + +module.exports = SBMH + + +/***/ }), + +/***/ 727: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const WritableStream = (__nccwpck_require__(4492).Writable) +const { inherits } = __nccwpck_require__(7261) +const Dicer = __nccwpck_require__(2960) + +const MultipartParser = __nccwpck_require__(2183) +const UrlencodedParser = __nccwpck_require__(8306) +const parseParams = __nccwpck_require__(1854) + +function Busboy (opts) { + if (!(this instanceof Busboy)) { return new Busboy(opts) } + + if (typeof opts !== 'object') { + throw new TypeError('Busboy expected an options-Object.') + } + if (typeof opts.headers !== 'object') { + throw new TypeError('Busboy expected an options-Object with headers-attribute.') + } + if (typeof opts.headers['content-type'] !== 'string') { + throw new TypeError('Missing Content-Type-header.') + } + + const { + headers, + ...streamOptions + } = opts + + this.opts = { + autoDestroy: false, + ...streamOptions + } + WritableStream.call(this, this.opts) + + this._done = false + this._parser = this.getParserByHeaders(headers) + this._finished = false +} +inherits(Busboy, WritableStream) + +Busboy.prototype.emit = function (ev) { + if (ev === 'finish') { + if (!this._done) { + this._parser?.end() + return + } else if (this._finished) { + return + } + this._finished = true + } + WritableStream.prototype.emit.apply(this, arguments) +} + +Busboy.prototype.getParserByHeaders = function (headers) { + const parsed = parseParams(headers['content-type']) + + const cfg = { + defCharset: this.opts.defCharset, + fileHwm: this.opts.fileHwm, + headers, + highWaterMark: this.opts.highWaterMark, + isPartAFile: this.opts.isPartAFile, + limits: this.opts.limits, + parsedConType: parsed, + preservePath: this.opts.preservePath + } + + if (MultipartParser.detect.test(parsed[0])) { + return new MultipartParser(this, cfg) + } + if (UrlencodedParser.detect.test(parsed[0])) { + return new UrlencodedParser(this, cfg) + } + throw new Error('Unsupported Content-Type.') +} + +Busboy.prototype._write = function (chunk, encoding, cb) { + this._parser.write(chunk, cb) +} + +module.exports = Busboy +module.exports["default"] = Busboy +module.exports.Busboy = Busboy + +module.exports.Dicer = Dicer + + +/***/ }), + +/***/ 2183: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +// TODO: +// * support 1 nested multipart level +// (see second multipart example here: +// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) +// * support limits.fieldNameSize +// -- this will require modifications to utils.parseParams + +const { Readable } = __nccwpck_require__(4492) +const { inherits } = __nccwpck_require__(7261) + +const Dicer = __nccwpck_require__(2960) + +const parseParams = __nccwpck_require__(1854) +const decodeText = __nccwpck_require__(4619) +const basename = __nccwpck_require__(8647) +const getLimit = __nccwpck_require__(1467) + +const RE_BOUNDARY = /^boundary$/i +const RE_FIELD = /^form-data$/i +const RE_CHARSET = /^charset$/i +const RE_FILENAME = /^filename$/i +const RE_NAME = /^name$/i + +Multipart.detect = /^multipart\/form-data/i +function Multipart (boy, cfg) { + let i + let len + const self = this + let boundary + const limits = cfg.limits + const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) + const parsedConType = cfg.parsedConType || [] + const defCharset = cfg.defCharset || 'utf8' + const preservePath = cfg.preservePath + const fileOpts = { highWaterMark: cfg.fileHwm } + + for (i = 0, len = parsedConType.length; i < len; ++i) { + if (Array.isArray(parsedConType[i]) && + RE_BOUNDARY.test(parsedConType[i][0])) { + boundary = parsedConType[i][1] + break + } + } + + function checkFinished () { + if (nends === 0 && finished && !boy._done) { + finished = false + self.end() + } + } + + if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } + + const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) + const filesLimit = getLimit(limits, 'files', Infinity) + const fieldsLimit = getLimit(limits, 'fields', Infinity) + const partsLimit = getLimit(limits, 'parts', Infinity) + const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) + const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) + + let nfiles = 0 + let nfields = 0 + let nends = 0 + let curFile + let curField + let finished = false + + this._needDrain = false + this._pause = false + this._cb = undefined + this._nparts = 0 + this._boy = boy + + const parserCfg = { + boundary, + maxHeaderPairs: headerPairsLimit, + maxHeaderSize: headerSizeLimit, + partHwm: fileOpts.highWaterMark, + highWaterMark: cfg.highWaterMark + } + + this.parser = new Dicer(parserCfg) + this.parser.on('drain', function () { + self._needDrain = false + if (self._cb && !self._pause) { + const cb = self._cb + self._cb = undefined + cb() + } + }).on('part', function onPart (part) { + if (++self._nparts > partsLimit) { + self.parser.removeListener('part', onPart) + self.parser.on('part', skipPart) + boy.hitPartsLimit = true + boy.emit('partsLimit') + return skipPart(part) + } + + // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let + // us emit 'end' early since we know the part has ended if we are already + // seeing the next part + if (curField) { + const field = curField + field.emit('end') + field.removeAllListeners('end') + } + + part.on('header', function (header) { + let contype + let fieldname + let parsed + let charset + let encoding + let filename + let nsize = 0 + + if (header['content-type']) { + parsed = parseParams(header['content-type'][0]) + if (parsed[0]) { + contype = parsed[0].toLowerCase() + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_CHARSET.test(parsed[i][0])) { + charset = parsed[i][1].toLowerCase() + break + } + } + } + } + + if (contype === undefined) { contype = 'text/plain' } + if (charset === undefined) { charset = defCharset } + + if (header['content-disposition']) { + parsed = parseParams(header['content-disposition'][0]) + if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_NAME.test(parsed[i][0])) { + fieldname = parsed[i][1] + } else if (RE_FILENAME.test(parsed[i][0])) { + filename = parsed[i][1] + if (!preservePath) { filename = basename(filename) } + } + } + } else { return skipPart(part) } + + if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } + + let onData, + onEnd + + if (isPartAFile(fieldname, contype, filename)) { + // file/binary field + if (nfiles === filesLimit) { + if (!boy.hitFilesLimit) { + boy.hitFilesLimit = true + boy.emit('filesLimit') + } + return skipPart(part) + } + + ++nfiles + + if (!boy._events.file) { + self.parser._ignore() + return + } + + ++nends + const file = new FileStream(fileOpts) + curFile = file + file.on('end', function () { + --nends + self._pause = false + checkFinished() + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + }) + file._read = function (n) { + if (!self._pause) { return } + self._pause = false + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + } + boy.emit('file', fieldname, file, filename, encoding, contype) + + onData = function (data) { + if ((nsize += data.length) > fileSizeLimit) { + const extralen = fileSizeLimit - nsize + data.length + if (extralen > 0) { file.push(data.slice(0, extralen)) } + file.truncated = true + file.bytesRead = fileSizeLimit + part.removeAllListeners('data') + file.emit('limit') + return + } else if (!file.push(data)) { self._pause = true } + + file.bytesRead = nsize + } + + onEnd = function () { + curFile = undefined + file.push(null) + } + } else { + // non-file field + if (nfields === fieldsLimit) { + if (!boy.hitFieldsLimit) { + boy.hitFieldsLimit = true + boy.emit('fieldsLimit') + } + return skipPart(part) + } + + ++nfields + ++nends + let buffer = '' + let truncated = false + curField = part + + onData = function (data) { + if ((nsize += data.length) > fieldSizeLimit) { + const extralen = (fieldSizeLimit - (nsize - data.length)) + buffer += data.toString('binary', 0, extralen) + truncated = true + part.removeAllListeners('data') + } else { buffer += data.toString('binary') } + } + + onEnd = function () { + curField = undefined + if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } + boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) + --nends + checkFinished() + } + } + + /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become + broken. Streams2/streams3 is a huge black box of confusion, but + somehow overriding the sync state seems to fix things again (and still + seems to work for previous node versions). + */ + part._readableState.sync = false + + part.on('data', onData) + part.on('end', onEnd) + }).on('error', function (err) { + if (curFile) { curFile.emit('error', err) } + }) + }).on('error', function (err) { + boy.emit('error', err) + }).on('finish', function () { + finished = true + checkFinished() + }) +} + +Multipart.prototype.write = function (chunk, cb) { + const r = this.parser.write(chunk) + if (r && !this._pause) { + cb() + } else { + this._needDrain = !r + this._cb = cb + } +} + +Multipart.prototype.end = function () { + const self = this + + if (self.parser.writable) { + self.parser.end() + } else if (!self._boy._done) { + process.nextTick(function () { + self._boy._done = true + self._boy.emit('finish') + }) + } +} + +function skipPart (part) { + part.resume() +} + +function FileStream (opts) { + Readable.call(this, opts) + + this.bytesRead = 0 + + this.truncated = false +} + +inherits(FileStream, Readable) + +FileStream.prototype._read = function (n) {} + +module.exports = Multipart + + +/***/ }), + +/***/ 8306: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const Decoder = __nccwpck_require__(7100) +const decodeText = __nccwpck_require__(4619) +const getLimit = __nccwpck_require__(1467) + +const RE_CHARSET = /^charset$/i + +UrlEncoded.detect = /^application\/x-www-form-urlencoded/i +function UrlEncoded (boy, cfg) { + const limits = cfg.limits + const parsedConType = cfg.parsedConType + this.boy = boy + + this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) + this.fieldsLimit = getLimit(limits, 'fields', Infinity) + + let charset + for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var + if (Array.isArray(parsedConType[i]) && + RE_CHARSET.test(parsedConType[i][0])) { + charset = parsedConType[i][1].toLowerCase() + break + } + } + + if (charset === undefined) { charset = cfg.defCharset || 'utf8' } + + this.decoder = new Decoder() + this.charset = charset + this._fields = 0 + this._state = 'key' + this._checkingBytes = true + this._bytesKey = 0 + this._bytesVal = 0 + this._key = '' + this._val = '' + this._keyTrunc = false + this._valTrunc = false + this._hitLimit = false +} + +UrlEncoded.prototype.write = function (data, cb) { + if (this._fields === this.fieldsLimit) { + if (!this.boy.hitFieldsLimit) { + this.boy.hitFieldsLimit = true + this.boy.emit('fieldsLimit') + } + return cb() + } + + let idxeq; let idxamp; let i; let p = 0; const len = data.length + + while (p < len) { + if (this._state === 'key') { + idxeq = idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x3D/* = */) { + idxeq = i + break + } else if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesKey } + } + + if (idxeq !== undefined) { + // key with assignment + if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } + this._state = 'val' + + this._hitLimit = false + this._checkingBytes = true + this._val = '' + this._bytesVal = 0 + this._valTrunc = false + this.decoder.reset() + + p = idxeq + 1 + } else if (idxamp !== undefined) { + // key with no assignment + ++this._fields + let key; const keyTrunc = this._keyTrunc + if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + if (key.length) { + this.boy.emit('field', decodeText(key, 'binary', this.charset), + '', + keyTrunc, + false) + } + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._keyTrunc = true + } + } else { + if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } + p = len + } + } else { + idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesVal } + } + + if (idxamp !== undefined) { + ++this._fields + if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + this._state = 'key' + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._val === '' && this.fieldSizeLimit === 0) || + (this._bytesVal = this._val.length) === this.fieldSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._valTrunc = true + } + } else { + if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } + p = len + } + } + } + cb() +} + +UrlEncoded.prototype.end = function () { + if (this.boy._done) { return } + + if (this._state === 'key' && this._key.length > 0) { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + '', + this._keyTrunc, + false) + } else if (this._state === 'val') { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + } + this.boy._done = true + this.boy.emit('finish') +} + +module.exports = UrlEncoded + + +/***/ }), + +/***/ 7100: +/***/ ((module) => { + +"use strict"; + + +const RE_PLUS = /\+/g + +const HEX = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +] + +function Decoder () { + this.buffer = undefined +} +Decoder.prototype.write = function (str) { + // Replace '+' with ' ' before decoding + str = str.replace(RE_PLUS, ' ') + let res = '' + let i = 0; let p = 0; const len = str.length + for (; i < len; ++i) { + if (this.buffer !== undefined) { + if (!HEX[str.charCodeAt(i)]) { + res += '%' + this.buffer + this.buffer = undefined + --i // retry character + } else { + this.buffer += str[i] + ++p + if (this.buffer.length === 2) { + res += String.fromCharCode(parseInt(this.buffer, 16)) + this.buffer = undefined + } + } + } else if (str[i] === '%') { + if (i > p) { + res += str.substring(p, i) + p = i + } + this.buffer = '' + ++p + } + } + if (p < len && this.buffer === undefined) { res += str.substring(p) } + return res +} +Decoder.prototype.reset = function () { + this.buffer = undefined +} + +module.exports = Decoder + + +/***/ }), + +/***/ 8647: +/***/ ((module) => { + +"use strict"; + + +module.exports = function basename (path) { + if (typeof path !== 'string') { return '' } + for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1) + return (path === '..' || path === '.' ? '' : path) + } + } + return (path === '..' || path === '.' ? '' : path) +} + + +/***/ }), + +/***/ 4619: +/***/ (function(module) { + +"use strict"; + + +// Node has always utf-8 +const utf8Decoder = new TextDecoder('utf-8') +const textDecoders = new Map([ + ['utf-8', utf8Decoder], + ['utf8', utf8Decoder] +]) + +function getDecoder (charset) { + let lc + while (true) { + switch (charset) { + case 'utf-8': + case 'utf8': + return decoders.utf8 + case 'latin1': + case 'ascii': // TODO: Make these a separate, strict decoder? + case 'us-ascii': + case 'iso-8859-1': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'windows-1252': + case 'iso_8859-1:1987': + case 'cp1252': + case 'x-cp1252': + return decoders.latin1 + case 'utf16le': + case 'utf-16le': + case 'ucs2': + case 'ucs-2': + return decoders.utf16le + case 'base64': + return decoders.base64 + default: + if (lc === undefined) { + lc = true + charset = charset.toLowerCase() + continue + } + return decoders.other.bind(charset) + } + } +} + +const decoders = { + utf8: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.utf8Slice(0, data.length) + }, + + latin1: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + return data + } + return data.latin1Slice(0, data.length) + }, + + utf16le: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.ucs2Slice(0, data.length) + }, + + base64: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.base64Slice(0, data.length) + }, + + other: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + + if (textDecoders.has(this.toString())) { + try { + return textDecoders.get(this).decode(data) + } catch (e) { } + } + return typeof data === 'string' + ? data + : data.toString() + } +} + +function decodeText (text, sourceEncoding, destEncoding) { + if (text) { + return getDecoder(destEncoding)(text, sourceEncoding) + } + return text +} + +module.exports = decodeText + + +/***/ }), + +/***/ 1467: +/***/ ((module) => { + +"use strict"; + + +module.exports = function getLimit (limits, name, defaultLimit) { + if ( + !limits || + limits[name] === undefined || + limits[name] === null + ) { return defaultLimit } + + if ( + typeof limits[name] !== 'number' || + isNaN(limits[name]) + ) { throw new TypeError('Limit ' + name + ' is not a valid number') } + + return limits[name] +} + + +/***/ }), + +/***/ 1854: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/* eslint-disable object-property-newline */ + + +const decodeText = __nccwpck_require__(4619) + +const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g + +const EncodedLookup = { + '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04', + '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09', + '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c', + '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e', + '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12', + '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17', + '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b', + '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d', + '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20', + '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25', + '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a', + '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c', + '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f', + '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33', + '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38', + '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b', + '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e', + '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41', + '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46', + '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a', + '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d', + '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f', + '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54', + '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59', + '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c', + '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e', + '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62', + '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67', + '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b', + '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d', + '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70', + '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75', + '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a', + '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c', + '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f', + '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83', + '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88', + '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b', + '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e', + '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91', + '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96', + '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a', + '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d', + '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f', + '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2', + '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4', + '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7', + '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9', + '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab', + '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac', + '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad', + '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae', + '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0', + '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2', + '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5', + '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7', + '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba', + '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb', + '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc', + '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd', + '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf', + '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0', + '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3', + '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5', + '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8', + '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca', + '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb', + '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc', + '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce', + '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf', + '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1', + '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3', + '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6', + '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8', + '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda', + '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb', + '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd', + '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde', + '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf', + '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1', + '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4', + '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6', + '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9', + '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea', + '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec', + '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed', + '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee', + '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef', + '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2', + '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4', + '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7', + '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9', + '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb', + '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc', + '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd', + '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe', + '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff' +} + +function encodedReplacer (match) { + return EncodedLookup[match] +} + +const STATE_KEY = 0 +const STATE_VALUE = 1 +const STATE_CHARSET = 2 +const STATE_LANG = 3 + +function parseParams (str) { + const res = [] + let state = STATE_KEY + let charset = '' + let inquote = false + let escaping = false + let p = 0 + let tmp = '' + const len = str.length + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + const char = str[i] + if (char === '\\' && inquote) { + if (escaping) { escaping = false } else { + escaping = true + continue + } + } else if (char === '"') { + if (!escaping) { + if (inquote) { + inquote = false + state = STATE_KEY + } else { inquote = true } + continue + } else { escaping = false } + } else { + if (escaping && inquote) { tmp += '\\' } + escaping = false + if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") { + if (state === STATE_CHARSET) { + state = STATE_LANG + charset = tmp.substring(1) + } else { state = STATE_VALUE } + tmp = '' + continue + } else if (state === STATE_KEY && + (char === '*' || char === '=') && + res.length) { + state = char === '*' + ? STATE_CHARSET + : STATE_VALUE + res[p] = [tmp, undefined] + tmp = '' + continue + } else if (!inquote && char === ';') { + state = STATE_KEY + if (charset) { + if (tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } + charset = '' + } else if (tmp.length) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } + tmp = '' + ++p + continue + } else if (!inquote && (char === ' ' || char === '\t')) { continue } + } + tmp += char + } + if (charset && tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } else if (tmp) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + + if (res[p] === undefined) { + if (tmp) { res[p] = tmp } + } else { res[p][1] = tmp } + + return res +} + +module.exports = parseParams + + +/***/ }), + +/***/ 2989: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AclRoleAccessorMethods = exports.Acl = void 0; +const promisify_1 = __nccwpck_require__(9203); +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +class AclRoleAccessorMethods { + constructor() { + this.owners = {}; + this.readers = {}; + this.writers = {}; + /** + * An object of convenience methods to add or delete owner ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.owners.addAllAuthenticatedUsers` + * - `myFile.acl.owners.deleteAllAuthenticatedUsers` + * - `myFile.acl.owners.addAllUsers` + * - `myFile.acl.owners.deleteAllUsers` + * - `myFile.acl.owners.addDomain` + * - `myFile.acl.owners.deleteDomain` + * - `myFile.acl.owners.addGroup` + * - `myFile.acl.owners.deleteGroup` + * - `myFile.acl.owners.addProject` + * - `myFile.acl.owners.deleteProject` + * - `myFile.acl.owners.addUser` + * - `myFile.acl.owners.deleteUser` + * + * @name Acl#owners + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as an owner of a file. + * //- + * const myBucket = gcs.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.owners.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.owners = {}; + /** + * An object of convenience methods to add or delete reader ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.readers.addAllAuthenticatedUsers` + * - `myFile.acl.readers.deleteAllAuthenticatedUsers` + * - `myFile.acl.readers.addAllUsers` + * - `myFile.acl.readers.deleteAllUsers` + * - `myFile.acl.readers.addDomain` + * - `myFile.acl.readers.deleteDomain` + * - `myFile.acl.readers.addGroup` + * - `myFile.acl.readers.deleteGroup` + * - `myFile.acl.readers.addProject` + * - `myFile.acl.readers.deleteProject` + * - `myFile.acl.readers.addUser` + * - `myFile.acl.readers.deleteUser` + * + * @name Acl#readers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a reader of a file. + * //- + * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.READER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.readers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.readers = {}; + /** + * An object of convenience methods to add or delete writer ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.writers.addAllAuthenticatedUsers` + * - `myFile.acl.writers.deleteAllAuthenticatedUsers` + * - `myFile.acl.writers.addAllUsers` + * - `myFile.acl.writers.deleteAllUsers` + * - `myFile.acl.writers.addDomain` + * - `myFile.acl.writers.deleteDomain` + * - `myFile.acl.writers.addGroup` + * - `myFile.acl.writers.deleteGroup` + * - `myFile.acl.writers.addProject` + * - `myFile.acl.writers.deleteProject` + * - `myFile.acl.writers.addUser` + * - `myFile.acl.writers.deleteUser` + * + * @name Acl#writers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a writer of a file. + * //- + * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.WRITER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.writers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.writers = {}; + AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); + } + _assignAccessMethods(role) { + const accessMethods = AclRoleAccessorMethods.accessMethods; + const entities = AclRoleAccessorMethods.entities; + const roleGroup = role.toLowerCase() + 's'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[roleGroup] = entities.reduce((acc, entity) => { + const isPrefix = entity.charAt(entity.length - 1) === '-'; + accessMethods.forEach(accessMethod => { + let method = accessMethod + entity[0].toUpperCase() + entity.substring(1); + if (isPrefix) { + method = method.replace('-', ''); + } + // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the + // more complex API of specifying an `entity` and `role`. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + acc[method] = (entityId, options, callback) => { + let apiEntity; + if (typeof options === 'function') { + callback = options; + options = {}; + } + if (isPrefix) { + apiEntity = entity + entityId; + } + else { + // If the entity is not a prefix, it is a special entity group + // that does not require further details. The accessor methods + // only accept a callback. + apiEntity = entity; + callback = entityId; + } + options = Object.assign({ + entity: apiEntity, + role, + }, options); + const args = [options]; + if (typeof callback === 'function') { + args.push(callback); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return this[accessMethod].apply(this, args); + }; + }); + return acc; + }, {}); + } +} +exports.AclRoleAccessorMethods = AclRoleAccessorMethods; +AclRoleAccessorMethods.accessMethods = ['add', 'delete']; +AclRoleAccessorMethods.entities = [ + // Special entity groups that do not require further specification. + 'allAuthenticatedUsers', + 'allUsers', + // Entity groups that require specification, e.g. `user-email@example.com`. + 'domain-', + 'group-', + 'project-', + 'user-', +]; +AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +class Acl extends AclRoleAccessorMethods { + constructor(options) { + super(); + this.pathPrefix = options.pathPrefix; + this.request_ = options.request; + } + /** + * @typedef {array} AddAclResponse + * @property {object} 0 The Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback AddAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Add access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be added. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link https://cloud.google.com/storage/docs/access-control Access + * Control}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {AddAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * // Here is how you would grant ownership permissions to a user on a + * specific + * // revision of a file. + * //- + * myFile.acl.add({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_add_file_owner + * Example of adding an owner to a file: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + */ + add(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'POST', + uri: '', + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + json: { + entity: options.entity, + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * @typedef {array} RemoveAclResponse + * @property {object} 0 The full API response. + */ + /** + * @callback RemoveAclCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} + * + * @param {object} options Configuration object. + * @param {string} options.entity Whose permissions will be revoked. + * @param {int} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {RemoveAclCallback} callback The callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.delete({ + * entity: 'user-useremail@example.com' + * }, function(err, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.delete({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_file_owner + * Example of removing an owner from a bucket: + */ + delete(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'DELETE', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + }, (err, resp) => { + callback(err, resp); + }); + } + /** + * @typedef {array} GetAclResponse + * @property {object|object[]} 0 Single or array of Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback GetAclCallback + * @param {?Error} err Request error, if any. + * @param {object|object[]} acl Single or array of Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Get access controls on a {@link Bucket} or {@link File}. If + * an entity is omitted, you will receive an array of all applicable access + * controls. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} + * + * @param {object|function} [options] Configuration options. If you want to + * receive a list of all access controls, pass the callback function as + * the only argument. + * @param {string} options.entity Whose permissions will be fetched. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.get({ + * entity: 'user-useremail@example.com' + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // Get all access controls. + * //- + * myBucket.acl.get(function(err, aclObjects, apiResponse) { + * // aclObjects = [ + * // { + * // entity: 'user-useremail@example.com', + * // role: 'owner' + * // } + * // ] + * }); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.get({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.get().then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_file_acl + * Example of printing a file's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_file_acl_for_user + * Example of printing a file's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + */ + get(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + let path = ''; + const query = {}; + if (options) { + path = '/' + encodeURIComponent(options.entity); + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + } + this.request({ + uri: path, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + let results; + if (resp.items) { + results = resp.items.map(this.makeAclObject_); + } + else { + results = this.makeAclObject_(resp); + } + callback(null, results, resp); + }); + } + /** + * @typedef {array} UpdateAclResponse + * @property {object} 0 The updated Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback UpdateAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The updated Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Update access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be updated. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link Storage.acl}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {UpdateAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE + * }; + * + * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.update({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.update(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + update(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'PUT', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + json: { + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject) { + const obj = { + entity: accessControlObject.entity, + role: accessControlObject.role, + }; + if (accessControlObject.projectTeam) { + obj.projectTeam = accessControlObject.projectTeam; + } + return obj; + } + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts, callback) { + reqOpts.uri = this.pathPrefix + reqOpts.uri; + this.request_(reqOpts, callback); + } +} +exports.Acl = Acl; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Acl, { + exclude: ['request'], +}); + + +/***/ }), + +/***/ 3973: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(4052); +const paginator_1 = __nccwpck_require__(6412); +const promisify_1 = __nccwpck_require__(9203); +const fs = __importStar(__nccwpck_require__(7147)); +const mime = __importStar(__nccwpck_require__(3583)); +const path = __importStar(__nccwpck_require__(1017)); +const p_limit_1 = __importDefault(__nccwpck_require__(7684)); +const util_1 = __nccwpck_require__(3837); +const async_retry_1 = __importDefault(__nccwpck_require__(3415)); +const util_js_1 = __nccwpck_require__(9258); +const acl_js_1 = __nccwpck_require__(2989); +const file_js_1 = __nccwpck_require__(4713); +const iam_js_1 = __nccwpck_require__(352); +const notification_js_1 = __nccwpck_require__(1178); +const storage_js_1 = __nccwpck_require__(6007); +const signer_js_1 = __nccwpck_require__(9019); +const stream_1 = __nccwpck_require__(2781); +const url_1 = __nccwpck_require__(7310); +var BucketActionToHTTPMethod; +(function (BucketActionToHTTPMethod) { + BucketActionToHTTPMethod["list"] = "GET"; +})(BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = BucketActionToHTTPMethod = {})); +var AvailableServiceObjectMethods; +(function (AvailableServiceObjectMethods) { + AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; + AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; +})(AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = AvailableServiceObjectMethods = {})); +var BucketExceptionMessages; +(function (BucketExceptionMessages) { + BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; + BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; + BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; + BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; + BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; + BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; + BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; + BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; +})(BucketExceptionMessages || (exports.BucketExceptionMessages = BucketExceptionMessages = {})); +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +class Bucket extends index_js_1.ServiceObject { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + getFilesStream(query) { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + constructor(storage, name, options) { + var _a, _b, _c, _d; + options = options || {}; + // Allow for "gs://"-style input, and strip any trailing slashes. + name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); + const requestQueryObject = {}; + if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + requestQueryObject.ifGenerationMatch = + options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + requestQueryObject.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + requestQueryObject.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + requestQueryObject.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + const userProject = options.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * Create a bucket. + * + * @method Bucket#create + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.create(function(err, bucket, apiResponse) { + * if (!err) { + * // The bucket was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.create().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * IamDeleteBucketOptions Configuration options. + * @property {boolean} [ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} DeleteBucketResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteBucketCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} + * + * @method Bucket#delete + * @param {DeleteBucketOptions} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_delete_bucket + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} BucketExistsResponse + * @property {boolean} 0 Whether the {@link Bucket} exists. + */ + /** + * @callback BucketExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link Bucket} exists. + */ + /** + * Check if the bucket exists. + * + * @method Bucket#exists + * @param {BucketExistsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {BucketExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() + * @property {boolean} [autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetBucketResponse + * @property {Bucket} 0 The {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a bucket if it exists. + * + * You may optionally use this to "get or create" an object by providing + * an object with `autoCreate` set to `true`. Any extra configuration that + * is normally required for the `create` method must be contained within + * this object as well. + * + * @method Bucket#get + * @param {GetBucketOptions} [options] Configuration options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.get(function(err, bucket, apiResponse) { + * // `bucket.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.get().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetBucketMetadataResponse + * @property {object} 0 The bucket metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Get the bucket's metadata. + * + * To set metadata, see {@link Bucket#setMetadata}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} + * + * @method Bucket#getMetadata + * @param {GetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_get_requester_pays_status + * Example of retrieving the requester pays status of a bucket: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} SetBucketMetadataResponse + * @property {object} apiResponse The full API response. + */ + /** + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * Set the bucket's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @method Bucket#setMetadata + * @param {object} metadata The metadata you wish to set. + * @param {SetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Set website metadata field on the bucket. + * //- + * const metadata = { + * website: { + * mainPageSuffix: 'http://example.com', + * notFoundPage: 'http://example.com/404.html' + * } + * }; + * + * bucket.setMetadata(metadata, function(err, apiResponse) {}); + * + * //- + * // Enable versioning for your bucket. + * //- + * bucket.setMetadata({ + * versioning: { + * enabled: true + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Enable KMS encryption for objects within this bucket. + * //- + * bucket.setMetadata({ + * encryption: { + * defaultKmsKeyName: 'projects/grape-spaceship-123/...' + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Set the default event-based hold value for new objects in this + * // bucket. + * //- + * bucket.setMetadata({ + * defaultEventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Remove object lifecycle rules. + * //- + * bucket.setMetadata({ + * lifecycle: null + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: storage, + baseUrl: '/b', + id: name, + createMethod: storage.createBucket.bind(storage), + methods, + }); + this.name = name; + this.storage = storage; + this.userProject = options.userProject; + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.acl.default = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/defaultObjectAcl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.storage.crc32cGenerator; + this.iam = new iam_js_1.Iam(this); + this.getFilesStream = paginator_1.paginator.streamify('getFiles'); + this.instanceRetryValue = storage.retryOptions.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = new url_1.URL('gs://'); + uri.host = this.name; + return uri; + } + /** + * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). + * @property {boolean} [append=true] The new rules will be appended to any + * pre-existing rules. + */ + /** + * + * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects + * in this bucket. + * @property {string|object} action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @property {object} condition Condition a bucket must meet before the + * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @property {string} [storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. Please see + * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. + */ + /** + * Add an object lifecycle management rule to the bucket. + * + * By default, an Object Lifecycle Management rule provided to this method + * will be included to the existing policy. To replace all existing rules, + * supply the `options` argument, setting `append` to `false`. + * + * To add multiple rules, pass a list to the `rule` parameter. Calling this + * function multiple times asynchronously does not guarantee that all rules + * are added correctly. + * + * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects + * in this bucket. + * @param {string|object} rule.action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @param {object} rule.condition Condition a bucket must meet before the + * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @param {string} [rule.storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. + * @param {AddLifecycleRuleOptions} [options] Configuration object. + * @param {boolean} [options.append=true] Append the new rule to the existing + * policy. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Automatically have an object deleted from this bucket once it is 3 years + * // of age. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * const lifecycleRules = bucket.metadata.lifecycle.rule; + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // By default, the rule you provide will be added to the existing policy. + * // Optionally, you can disable this behavior to replace all of the + * // pre-existing rules. + * //- + * const options = { + * append: false + * }; + * + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, options, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // All rules have been replaced with the new "delete" rule. + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // For objects created before 2018, "downgrade" the storage class. + * //- + * bucket.addLifecycleRule({ + * action: 'setStorageClass', + * storageClass: 'COLDLINE', + * condition: { + * createdBefore: new Date('2018') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete objects created before 2016 which have the Coldline storage + * // class. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * matchesStorageClass: [ + * 'COLDLINE' + * ], + * createdBefore: new Date('2016') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceNoncurrentTime: 100 + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * noncurrentTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a customTime that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceCustomTime: 100 + * } + * }, function(err, apiResponse) ()); + * + * //- + * // Delete object that has a customTime before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * customTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * ``` + */ + addLifecycleRule(rule, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + options = options || {}; + const rules = Array.isArray(rule) ? rule : [rule]; + for (const curRule of rules) { + if (curRule.condition.createdBefore instanceof Date) { + curRule.condition.createdBefore = curRule.condition.createdBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.customTimeBefore instanceof Date) { + curRule.condition.customTimeBefore = curRule.condition.customTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.noncurrentTimeBefore instanceof Date) { + curRule.condition.noncurrentTimeBefore = + curRule.condition.noncurrentTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + } + if (options.append === false) { + this.setMetadata({ lifecycle: { rule: rules } }, options, callback); + return; + } + // The default behavior appends the previously-defined lifecycle rules with + // the new ones just passed in by the user. + this.getMetadata((err, metadata) => { + var _a, _b; + if (err) { + callback(err); + return; + } + const currentLifecycleRules = Array.isArray((_a = metadata.lifecycle) === null || _a === void 0 ? void 0 : _a.rule) + ? (_b = metadata.lifecycle) === null || _b === void 0 ? void 0 : _b.rule + : []; + this.setMetadata({ + lifecycle: { rule: currentLifecycleRules.concat(rules) }, + }, options, callback); + }); + } + /** + * @typedef {object} CombineOptions + * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CombineCallback + * @param {?Error} err Request error, if any. + * @param {File} newFile The new {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CombineResponse + * @property {File} 0 The new {@link File}. + * @property {object} 1 The full API response. + */ + /** + * Combine multiple files into one new file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} + * + * @throws {Error} if a non-array is provided as sources argument. + * @throws {Error} if no sources are provided. + * @throws {Error} if no destination is provided. + * + * @param {string[]|File[]} sources The source files that will be + * combined. + * @param {string|File} destination The file you would like the + * source files combined into. + * @param {CombineOptions} [options] Configuration options. + * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {CombineCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const logBucket = storage.bucket('log-bucket'); + * + * const sources = [ + * logBucket.file('2013-logs.txt'), + * logBucket.file('2014-logs.txt') + * ]; + * + * const allLogs = logBucket.file('all-logs.txt'); + * + * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { + * // newFile === allLogs + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * logBucket.combine(sources, allLogs).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + combine(sources, destination, optionsOrCallback, callback) { + var _a; + if (!Array.isArray(sources) || sources.length === 0) { + throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); + } + if (!destination) { + throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required + AvailableServiceObjectMethods.setMetadata, // Same as above + options); + const convertToFile = (file) => { + if (file instanceof file_js_1.File) { + return file; + } + return this.file(file); + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sources = sources.map(convertToFile); + const destinationFile = convertToFile(destination); + callback = callback || index_js_1.util.noop; + if (!destinationFile.metadata.contentType) { + const destinationContentType = mime.contentType(destinationFile.name); + if (destinationContentType) { + destinationFile.metadata.contentType = destinationContentType; + } + } + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === + undefined && + options.ifGenerationMatch === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + if (options.ifGenerationMatch === undefined) { + Object.assign(options, destinationFile.instancePreconditionOpts, options); + } + // Make the request from the destination File object. + destinationFile.request({ + method: 'POST', + uri: '/compose', + maxRetries, + json: { + destination: { + contentType: destinationFile.metadata.contentType, + }, + sourceObjects: sources.map(source => { + const sourceObject = { + name: source.name, + }; + if (source.metadata && source.metadata.generation) { + sourceObject.generation = parseInt(source.metadata.generation.toString()); + } + return sourceObject; + }), + }, + qs: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, destinationFile, resp); + }); + } + /** + * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. + * + * @typedef {object} CreateChannelConfig + * @property {string} address The address where notifications are + * delivered for this channel. + * @property {string} [delimiter] Returns results in a directory-like mode. + * @property {number} [maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [projection=noAcl] Set of properties to return. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions=false] If `true`, lists all versions of an object + * as distinct results. + */ + /** + * @typedef {object} CreateChannelOptions + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} CreateChannelResponse + * @property {Channel} 0 The new {@link Channel}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateChannelCallback + * @param {?Error} err Request error, if any. + * @param {Channel} channel The new {@link Channel}. + * @param {object} apiResponse The full API response. + */ + /** + * Create a channel that will be notified when objects in this bucket changes. + * + * @throws {Error} If an ID is not provided. + * @throws {Error} If an address is not provided. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} + * + * @param {string} id The ID of the channel to create. + * @param {CreateChannelConfig} config Configuration for creating channel. + * @param {string} config.address The address where notifications are + * delivered for this channel. + * @param {string} [config.delimiter] Returns results in a directory-like mode. + * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @param {string} [config.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [config.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {string} [config.projection=noAcl] Set of properties to return. + * @param {string} [config.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [config.versions=false] If `true`, lists all versions of an object + * as distinct results. + * @param {CreateChannelOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateChannelCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const id = 'new-channel-id'; + * + * const config = { + * address: 'https://...' + * }; + * + * bucket.createChannel(id, config, function(err, channel, apiResponse) { + * if (!err) { + * // Channel created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.createChannel(id, config).then(function(data) { + * const channel = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + createChannel(id, config, optionsOrCallback, callback) { + if (typeof id !== 'string') { + throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + method: 'POST', + uri: '/o/watch', + json: Object.assign({ + id, + type: 'web_hook', + }, config), + qs: options, + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const resourceId = apiResponse.resourceId; + const channel = this.storage.channel(id, resourceId); + channel.metadata = apiResponse; + callback(null, channel, apiResponse); + }); + } + /** + * Metadata to set for the Notification. + * + * @typedef {object} CreateNotificationOptions + * @property {object} [customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @property {string[]} [eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @property {string} [objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @property {string} [payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CreateNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The new {@link Notification}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CreateNotificationResponse + * @property {Notification} 0 The new {@link Notification}. + * @property {object} 1 The full API response. + */ + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationOptions} [options] Metadata to set for the + * notification. + * @param {object} [options.customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @param {string[]} [options.eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @param {string} [options.objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @param {string} [options.payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * @see Notification#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const callback = function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }; + * + * myBucket.createNotification('my-topic', callback); + * + * //- + * // Configure the nofiication by providing Notification metadata. + * //- + * const metadata = { + * objectNamePrefix: 'prefix-' + * }; + * + * myBucket.createNotification('my-topic', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.createNotification('my-topic').then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/createNotification.js + * region_tag:storage_create_bucket_notifications + * Another example: + */ + createNotification(topic, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const topicIsObject = topic !== null && typeof topic === 'object'; + if (topicIsObject && index_js_1.util.isCustomType(topic, 'pubsub/topic')) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + topic = topic.name; + } + if (typeof topic !== 'string') { + throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); + } + const body = Object.assign({ topic }, options); + if (body.topic.indexOf('projects') !== 0) { + body.topic = 'projects/{{projectId}}/topics/' + body.topic; + } + body.topic = '//pubsub.googleapis.com/' + body.topic; + if (!body.payloadFormat) { + body.payloadFormat = 'JSON_API_V1'; + } + const query = {}; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + this.request({ + method: 'POST', + uri: '/notificationConfigs', + json: (0, util_js_1.convertObjKeysToSnakeCase)(body), + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const notification = this.notification(apiResponse.id); + notification.metadata = apiResponse; + callback(null, notification, apiResponse); + }); + } + /** + * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} + * for all of the supported properties. + * @property {boolean} [force] Suppress errors until all files have been + * processed. + */ + /** + * @callback DeleteFilesCallback + * @param {?Error|?Error[]} err Request error, if any, or array of errors from + * files that were not able to be deleted. + * @param {object} [apiResponse] The full API response. + */ + /** + * Iterate over the bucket's files, calling `file.delete()` on each. + * + * This is not an atomic request. A delete attempt will be + * made for each file individually. Any one can fail, in which case only a + * portion of the files you intended to be deleted would have. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors until all files have had a chance + * to be processed. + * + * File preconditions cannot be passed to this function. It will not retry unless + * the idempotency strategy is set to retry always. + * + * The `query` object passed as the first argument will also be passed to + * {@link Bucket#getFiles}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} + * @param {boolean} [query.force] Suppress errors until all files have been + * processed. + * @param {DeleteFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the files in the bucket. + * //- + * bucket.deleteFiles(function(err) {}); + * + * //- + * // By default, if a file cannot be deleted, this method will stop deleting + * // files from your bucket. You can override this setting with `force: + * // true`. + * //- + * bucket.deleteFiles({ + * force: true + * }, function(errors) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * }); + * + * //- + * // The first argument to this method acts as a query to + * // {@link Bucket#getFiles}. As an example, you can delete files + * // which match a prefix. + * //- + * bucket.deleteFiles({ + * prefix: 'images/' + * }, function(err) { + * if (!err) { + * // All files in the `images` directory have been deleted. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteFiles().then(function() {}); + * ``` + */ + deleteFiles(queryOrCallback, callback) { + let query = {}; + if (typeof queryOrCallback === 'function') { + callback = queryOrCallback; + } + else if (queryOrCallback) { + query = queryOrCallback; + } + const MAX_PARALLEL_LIMIT = 10; + const MAX_QUEUE_SIZE = 1000; + const errors = []; + const deleteFile = (file) => { + return file.delete(query).catch(err => { + if (!query.force) { + throw err; + } + errors.push(err); + }); + }; + (async () => { + try { + let promises = []; + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const filesStream = this.getFilesStream(query); + for await (const curFile of filesStream) { + if (promises.length >= MAX_QUEUE_SIZE) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => deleteFile(curFile)).catch(e => { + filesStream.destroy(); + throw e; + })); + } + await Promise.all(promises); + callback(errors.length > 0 ? errors : null); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @deprecated + * @typedef {array} DeleteLabelsResponse + * @property {object} 0 The full API response. + */ + /** + * @deprecated + * @callback DeleteLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata Bucket's metadata. + */ + /** + * @deprecated Use setMetadata directly + * Delete one or more labels from this bucket. + * + * @param {string|string[]} [labels] The labels to delete. If no labels are + * provided, all of the labels are removed. + * @param {DeleteLabelsCallback} [callback] Callback function. + * @param {DeleteLabelsOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the labels from this bucket. + * //- + * bucket.deleteLabels(function(err, apiResponse) {}); + * + * //- + * // Delete a single label. + * //- + * bucket.deleteLabels('labelone', function(err, apiResponse) {}); + * + * //- + * // Delete a specific set of labels. + * //- + * bucket.deleteLabels([ + * 'labelone', + * 'labeltwo' + * ], function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteLabels().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { + let labels = new Array(); + let options = {}; + if (typeof labelsOrCallbackOrOptions === 'function') { + callback = labelsOrCallbackOrOptions; + } + else if (typeof labelsOrCallbackOrOptions === 'string') { + labels = [labelsOrCallbackOrOptions]; + } + else if (Array.isArray(labelsOrCallbackOrOptions)) { + labels = labelsOrCallbackOrOptions; + } + else if (labelsOrCallbackOrOptions) { + options = labelsOrCallbackOrOptions; + } + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const deleteLabels = (labels) => { + const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { + nullLabelMap[labelKey] = null; + return nullLabelMap; + }, {}); + if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { + this.setLabels(nullLabelMap, options, callback); + } + else { + this.setLabels(nullLabelMap, callback); + } + }; + if (labels.length === 0) { + this.getLabels((err, labels) => { + if (err) { + callback(err); + return; + } + deleteLabels(Object.keys(labels)); + }); + } + else { + deleteLabels(labels); + } + } + /** + * @typedef {array} DisableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DisableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *

+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Disable `requesterPays` functionality from this bucket. + * + * @param {DisableRequesterPaysCallback} [callback] Callback function. + * @param {DisableRequesterPaysOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.disableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality disabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.disableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_disable_requester_pays + * Example of disabling requester pays: + */ + disableRequesterPays(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: false, + }, + }, options, callback); + } + /** + * Configuration object for enabling logging. + * + * @typedef {object} EnableLoggingOptions + * @property {string|Bucket} [bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @property {string} prefix A unique prefix for log object names. + */ + /** + * Enable logging functionality for this bucket. This will make two API + * requests, first to grant Cloud Storage WRITE permission to the bucket, then + * to set the appropriate configuration on the Bucket's metadata. + * + * @param {EnableLoggingOptions} config Configuration options. + * @param {string|Bucket} [config.bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @param {string} config.prefix A unique prefix for log object names. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const config = { + * prefix: 'log' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) { + * if (!err) { + * // Logging functionality enabled successfully. + * } + * }); + * + * ``` + * @example + * Optionally, provide a destination bucket. + * ``` + * const config = { + * prefix: 'log', + * bucket: 'destination-bucket' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) {}); + * ``` + * + * @example + * If the callback is omitted, we'll return a Promise. + * ``` + * bucket.enableLogging(config).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + enableLogging(config, callback) { + if (!config || + typeof config === 'function' || + typeof config.prefix === 'undefined') { + throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); + } + let logBucket = this.id; + if (config.bucket && config.bucket instanceof Bucket) { + logBucket = config.bucket.id; + } + else if (config.bucket && typeof config.bucket === 'string') { + logBucket = config.bucket; + } + const options = {}; + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { + options.ifMetagenerationMatch = config.ifMetagenerationMatch; + } + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { + options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; + } + (async () => { + try { + const [policy] = await this.iam.getPolicy(); + policy.bindings.push({ + members: ['group:cloud-storage-analytics@google.com'], + role: 'roles/storage.objectCreator', + }); + await this.iam.setPolicy(policy); + this.setMetadata({ + logging: { + logBucket, + logObjectPrefix: config.prefix, + }, + }, options, callback); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @typedef {array} EnableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback EnableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Enable `requesterPays` functionality for this bucket. This enables you, the + * bucket owner, to have the requesting user assume the charges for the access + * to your bucket and its contents. + * + * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] + * Callback function or precondition options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.enableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality enabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.enableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_enable_requester_pays + * Example of enabling requester pays: + */ + enableRequesterPays(optionsOrCallback, cb) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: true, + }, + }, options, cb); + } + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name, options) { + if (!name) { + throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); + } + return new file_js_1.File(this, name, options); + } + /** + * @typedef {array} GetFilesResponse + * @property {File[]} 0 Array of {@link File} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetFilesCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Array of {@link File} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Query object for listing files. + * + * @typedef {object} GetFilesOptions + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {string} [delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @property {string} [endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {boolean} [includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [matchGlob] A glob pattern used to filter results, + * for example foo*bar + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions] If true, returns File objects scoped to + * their versions. + */ + /** + * Get {@link File} objects for the files currently in the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} + * + * @param {GetFilesOptions} [query] Query object for listing files. + * @param {boolean} [query.autoPaginate=true] Have pagination handled + * automatically. + * @param {string} [query.delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @param {string} [query.endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @param {string} [query.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {number} [query.maxApiCalls] Maximum number of API calls to make. + * @param {number} [query.maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @param {string} [query.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [query.startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {string} [query.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [query.versions] If true, returns File objects scoped to + * their versions. + * @param {GetFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFiles(function(err, files) { + * if (!err) { + * // files is an array of File objects. + * } + * }); + * + * //- + * // If your bucket has versioning enabled, you can get all of your files + * // scoped to their generation. + * //- + * bucket.getFiles({ + * versions: true + * }, function(err, files) { + * // Each file is scoped to its generation. + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, files, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * bucket.getFiles(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * files[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * files[0].getMetadata(function(err, metadata) {}); + * }; + * + * bucket.getFiles({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getFiles().then(function(data) { + * const files = data[0]; + * }); + * + * ``` + * @example + *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
+ * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, function(err, files, nextQuery, apiResponse) { + * // files = [ + * // {File} // File object for file "a" + * // ] + * + * // apiResponse.prefixes = [ + * // 'a/', + * // 'b/' + * // ] + * }); + * ``` + * + * @example + * Using prefixes, it's now possible to simulate a file system with follow-up requests. + * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/', + * prefix: 'a/' + * }, function(err, files, nextQuery, apiResponse) { + * // No files found within "directory" a. + * // files = [] + * + * // However, a "sub-directory" was found. + * // This prefix can be used to continue traversing the "file system". + * // apiResponse.prefixes = [ + * // 'a/b/' + * // ] + * }); + * ``` + * + * @example include:samples/files.js + * region_tag:storage_list_files + * Another example: + * + * @example include:samples/files.js + * region_tag:storage_list_files_with_prefix + * Example of listing files, filtered by a prefix: + */ + getFiles(queryOrCallback, callback) { + let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; + if (!callback) { + callback = queryOrCallback; + } + query = Object.assign({}, query); + this.request({ + uri: '/o', + qs: query, + }, (err, resp) => { + if (err) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const files = itemsArray.map((file) => { + const options = {}; + if (query.versions) { + options.generation = file.generation; + } + if (file.kmsKeyName) { + options.kmsKeyName = file.kmsKeyName; + } + const fileInstance = this.file(file.name, options); + fileInstance.metadata = file; + return fileInstance; + }); + let nextQuery = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, query, { + pageToken: resp.nextPageToken, + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(null, files, nextQuery, resp); + }); + } + /** + * @deprecated + * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated + * @typedef {array} GetLabelsResponse + * @property {object} 0 Object of labels currently set on this bucket. + */ + /** + * @deprecated + * @callback GetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} labels Object of labels currently set on this bucket. + */ + /** + * @deprecated Use getMetadata directly. + * Get the labels currently set on this bucket. + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getLabels(function(err, labels) { + * if (err) { + * // Error handling omitted. + * } + * + * // labels = { + * // label: 'labelValue', + * // ... + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getLabels().then(function(data) { + * const labels = data[0]; + * }); + * ``` + */ + getLabels(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.getMetadata(options, (err, metadata) => { + if (err) { + callback(err, null); + return; + } + callback(null, (metadata === null || metadata === void 0 ? void 0 : metadata.labels) || {}); + }); + } + /** + * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback GetNotificationsCallback + * @param {?Error} err Request error, if any. + * @param {Notification[]} notifications Array of {@link Notification} + * instances. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetNotificationsResponse + * @property {Notification[]} 0 Array of {@link Notification} instances. + * @property {object} 1 The full API response. + */ + /** + * Retrieves a list of notification subscriptions for a given bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} + * + * @param {GetNotificationsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.getNotifications(function(err, notifications, apiResponse) { + * if (!err) { + * // notifications is an array of Notification objects. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getNotifications().then(function(data) { + * const notifications = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/listNotifications.js + * region_tag:storage_list_bucket_notifications + * Another example: + */ + getNotifications(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + uri: '/notificationConfigs', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const notifications = itemsArray.map((notification) => { + const notificationInstance = this.notification(notification.id); + notificationInstance.metadata = notification; + return notificationInstance; + }); + callback(null, notifications, resp); + }); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * @typedef {object} GetBucketSignedUrlConfig + * @property {string} action Currently only supports "list" (HTTP: GET). + * @property {*} expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @property {string} [version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @property {string} [cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @property {object} [extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [queryParams] Additional query parameters to include + * in the signed URL. + */ + /** + * Get a signed URL to allow limited time access to a bucket. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {GetBucketSignedUrlConfig} config Configuration object. + * @param {string} config.action Currently only supports "list" (HTTP: GET). + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * //- + * // Generate a URL that allows temporary access to list files in a bucket. + * //- + * const request = require('request'); + * + * const config = { + * action: 'list', + * expires: '03-17-2025' + * }; + * + * bucket.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The bucket is now available to be listed from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * ``` + */ + getSignedUrl(cfg, callback) { + const method = BucketActionToHTTPMethod[cfg.action]; + const signConfig = { + method, + expires: cfg.expires, + version: cfg.version, + cname: cfg.cname, + extensionHeaders: cfg.extensionHeaders || {}, + queryParams: cfg.queryParams || {}, + }; + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback BucketLockCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Lock a previously-defined retention policy. This will prevent changes to + * the policy. + * + * @throws {Error} if a metageneration is not provided. + * + * @param {number|string} metageneration The bucket's metageneration. This is + * accesssible from calling {@link File#getMetadata}. + * @param {BucketLockCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const metageneration = 2; + * + * bucket.lock(metageneration, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.lock(metageneration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + lock(metageneration, callback) { + const metatype = typeof metageneration; + if (metatype !== 'number' && metatype !== 'string') { + throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); + } + this.request({ + method: 'POST', + uri: '/lockRetentionPolicy', + qs: { + ifMetagenerationMatch: metageneration, + }, + }, callback); + } + /** + * @typedef {array} MakeBucketPrivateResponse + * @property {File[]} 0 List of files made private. + */ + /** + * @callback MakeBucketPrivateCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made private. + */ + /** + * @typedef {object} MakeBucketPrivateOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Make the bucket listing private. + * + * You may also choose to make the contents of the bucket private by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePrivate} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPrivateOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {Metadata} [options.metadata] Define custom metadata properties to define + * along with the operation. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {MakeBucketPrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket private. + * //- + * bucket.makePrivate(function(err) {}); + * + * //- + * // Make the bucket and its contents private. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePrivate(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // Make the bucket and its contents private, using force to suppress errors + * // until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePrivate(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePrivate(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b, _c, _d; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options.private = true; + const query = { + predefinedAcl: 'projectPrivate', + }; + if (options.userProject) { + query.userProject = options.userProject; + } + if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + query.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + query.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + query.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + // You aren't allowed to set both predefinedAcl & acl properties on a bucket + // so acl must explicitly be nullified. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, (err) => { + if (err) { + callback(err); + } + const internalCall = () => { + if (options.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options); + } + return Promise.resolve([]); + }; + internalCall() + .then(files => callback(null, files)) + .catch(callback); + }); + } + /** + * @typedef {object} MakeBucketPublicOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + */ + /** + * @callback MakeBucketPublicCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made public. + */ + /** + * @typedef {array} MakeBucketPublicResponse + * @property {File[]} 0 List of files made public. + */ + /** + * Make the bucket publicly readable. + * + * You may also choose to make the contents of the bucket publicly readable by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePublic} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPublicOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {MakeBucketPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket publicly readable. + * //- + * bucket.makePublic(function(err) {}); + * + * //- + * // Make the bucket and its contents publicly readable. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePublic(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // Make the bucket and its contents publicly readable, using force to + * // suppress errors until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePublic(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePublic(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePublic(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const req = { public: true, ...options }; + this.acl + .add({ + entity: 'allUsers', + role: 'READER', + }) + .then(() => { + return this.acl.default.add({ + entity: 'allUsers', + role: 'READER', + }); + }) + .then(() => { + if (req.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req); + } + return []; + }) + .then(files => callback(null, files), callback); + } + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id) { + if (!id) { + throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); + } + return new notification_js_1.Notification(this, id); + } + /** + * Remove an already-existing retention policy from this bucket, if it is not + * locked. + * + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * bucket.removeRetentionPeriod(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.removeRetentionPeriod().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + removeRetentionPeriod(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: null, + }, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { + reqOpts.qs = { ...reqOpts.qs, userProject: this.userProject }; + } + return super.request(reqOpts, callback); + } + /** + * @deprecated + * @typedef {array} SetLabelsResponse + * @property {object} 0 The bucket metadata. + */ + /** + * @deprecated + * @callback SetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * @deprecated + * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated Use setMetadata directly. + * Set labels on the bucket. + * + * This makes an underlying call to {@link Bucket#setMetadata}, which + * is a PATCH request. This means an individual label can be overwritten, but + * unmentioned labels will not be touched. + * + * @param {object} labels Labels to set on the bucket. + * @param {SetLabelsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const labels = { + * labelone: 'labelonevalue', + * labeltwo: 'labeltwovalue' + * }; + * + * bucket.setLabels(labels, function(err, metadata) { + * if (!err) { + * // Labels set successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setLabels(labels).then(function(data) { + * const metadata = data[0]; + * }); + * ``` + */ + setLabels(labels, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.setMetadata({ labels }, options, callback); + } + setMetadata(metadata, optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * Lock all objects contained in the bucket, based on their creation time. Any + * attempt to overwrite or delete objects younger than the retention period + * will result in a `PERMISSION_DENIED` error. + * + * An unlocked retention policy can be modified or removed from the bucket via + * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A + * locked retention policy cannot be removed or shortened in duration for the + * lifetime of the bucket. Attempting to remove or decrease period of a locked + * retention policy will result in a `PERMISSION_DENIED` error. You can still + * increase the policy. + * + * @param {*} duration In seconds, the minimum retention time for all objects + * contained in this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataCallback} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const DURATION_SECONDS = 15780000; // 6 months. + * + * //- + * // Lock the objects in this bucket for 6 months. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setRetentionPeriod(duration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: { + retentionPeriod: duration.toString(), + }, + }, options, callback); + } + /** + * + * @typedef {object} Cors + * @property {number} [maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @property {string[]} [method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + */ + /** + * This can be used to set the CORS configuration on the bucket. + * + * The configuration will be overwritten with the value passed into this. + * + * @param {Cors[]} corsConfiguration The new CORS configuration to set + * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour + * bucket.setCorsConfiguration(corsConfiguration); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + cors: corsConfiguration, + }, options, callback); + } + /** + * @typedef {object} SetBucketStorageClassOptions + * @property {string} [userProject] - The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetBucketStorageClassCallback + * @param {?Error} err Request error, if any. + */ + /** + * Set the default storage class for new files in this bucket. + * + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] - The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass = storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(); + this.setMetadata({ storageClass }, options, callback); + } + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.userProject = userProject; + const methods = [ + 'create', + 'delete', + 'exists', + 'get', + 'getMetadata', + 'setMetadata', + ]; + methods.forEach(method => { + const methodConfig = this.methods[method]; + if (typeof methodConfig === 'object') { + if (typeof methodConfig.reqOpts === 'object') { + Object.assign(methodConfig.reqOpts.qs, { userProject }); + } + else { + methodConfig.reqOpts = { + qs: { userProject }, + }; + } + } + }); + } + /** + * @typedef {object} UploadOptions Configuration options for Bucket#upload(). + * @property {string|File} [destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @property {string} [encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @property {boolean} [gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @property {string} [kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @property {object} [metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @property {string} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + */ + /** + * @typedef {array} UploadResponse + * @property {object} 0 The uploaded {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback UploadCallback + * @param {?Error} err Request error, if any. + * @param {object} file The uploaded {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Upload a file to the bucket. This is a convenience method that wraps + * {@link File#createWriteStream}. + * + * Resumable uploads are enabled by default + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} + * + * @param {string} pathString The fully qualified path to the file you + * wish to upload to your bucket. + * @param {UploadOptions} [options] Configuration options. + * @param {string|File} [options.destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {boolean} [options.gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @param {object} [options.metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @param {string} [options.offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @param {string} [options.predefinedAcl] Apply a predefined set of access + * controls to this object. + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @param {boolean} [options.private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @param {boolean} [options.public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @param {boolean} [options.resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @param {number} [options.timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @param {string} [options.uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {string|boolean} [options.validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + * @param {UploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Upload a file from a local path. + * //- + * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { + * // Your bucket now contains: + * // - "image.png" (with the contents of `/local/path/image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * + * //- + * // It's not always that easy. You will likely want to specify the filename + * // used when your new file lands in your bucket. + * // + * // You may also want to set metadata or customize other options. + * //- + * const options = { + * destination: 'new-image.png', + * validation: 'crc32c', + * metadata: { + * metadata: { + * event: 'Fall trip to the zoo' + * } + * } + * }; + * + * bucket.upload('local-image.png', options, function(err, file) { + * // Your bucket now contains: + * // - "new-image.png" (with the contents of `local-image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * //- + * // You can also have a file gzip'd on the fly. + * //- + * bucket.upload('index.html', { gzip: true }, function(err, file) { + * // Your bucket now contains: + * // - "index.html" (automatically compressed with gzip) + * + * // Downloading the file with `file.download` will automatically decode + * the + * // file. + * }); + * + * //- + * // You may also re-use a File object, {File}, that references + * // the file you wish to create or overwrite. + * //- + * const options = { + * destination: bucket.file('existing-file.png'), + * resumable: false + * }; + * + * bucket.upload('local-img.png', options, function(err, newFile) { + * // Your bucket now contains: + * // - "existing-file.png" (with the contents of `local-img.png') + * + * // Note: + * // The `newFile` parameter is equal to `file`. + * }); + * + * //- + * // To use + * // + * // Customer-supplied Encryption Keys, provide the `encryptionKey` + * option. + * //- + * const crypto = require('crypto'); + * const encryptionKey = crypto.randomBytes(32); + * + * bucket.upload('img.png', { + * encryptionKey: encryptionKey + * }, function(err, newFile) { + * // `img.png` was uploaded with your custom encryption key. + * + * // `newFile` is already configured to use the encryption key when making + * // operations on the remote object. + * + * // However, to use your encryption key later, you must create a `File` + * // instance with the `key` supplied: + * const file = bucket.file('img.png', { + * encryptionKey: encryptionKey + * }); + * + * // Or with `file#setEncryptionKey`: + * const file = bucket.file('img.png'); + * file.setEncryptionKey(encryptionKey); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.upload('local-image.png').then(function(data) { + * const file = data[0]; + * }); + * + * To upload a file from a URL, use {@link File#createWriteStream}. + * + * ``` + * @example include:samples/files.js + * region_tag:storage_upload_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + */ + upload(pathString, optionsOrCallback, callback) { + var _a, _b; + const upload = (numberOfRetries) => { + const returnValue = (0, async_retry_1.default)(async (bail) => { + await new Promise((resolve, reject) => { + var _a, _b; + if (numberOfRetries === 0 && + ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { + newFile.storage.retryOptions.autoRetry = false; + } + const writable = newFile.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + fs.createReadStream(pathString) + .on('error', bail) + .pipe(writable) + .on('error', err => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + else { + return bail(err); + } + }) + .on('finish', () => { + return resolve(); + }); + }); + }, { + retries: numberOfRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(null, newFile, newFile.metadata); + } + }) + .catch(callback); + } + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (global['GCLOUD_SANDBOX_ENV']) { + return; + } + let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options = Object.assign({ + metadata: {}, + }, options); + // Do not retry if precondition option ifGenerationMatch is not set + // because this is a file operation + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + let newFile; + if (options.destination instanceof file_js_1.File) { + newFile = options.destination; + } + else if (options.destination !== null && + typeof options.destination === 'string') { + // Use the string as the name of the file. + newFile = this.file(options.destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + else { + // Resort to using the name of the incoming file. + const destination = path.basename(pathString); + newFile = this.file(destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + upload(maxRetries); + } + /** + * @private + * + * @typedef {object} MakeAllFilesPublicPrivateOptions + * @property {boolean} [force] Suppress errors until all files have been + * processed. + * @property {boolean} [private] Make files private. + * @property {boolean} [public] Make files public. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @private + * + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Files that were updated. + */ + /** + * @typedef {array} MakeAllFilesPublicPrivateResponse + * @property {File[]} 0 List of files affected. + */ + /** + * Iterate over all of a bucket's files, calling `file.makePublic()` (public) + * or `file.makePrivate()` (private) on each. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop, and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors. + * + * @private + * + * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. + * @param {boolean} [options.force] Suppress errors until all files have been + * processed. + * @param {boolean} [options.private] Make files private. + * @param {boolean} [options.public] Make files public. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. + * + * @return {Promise} + */ + makeAllFilesPublicPrivate_(optionsOrCallback, callback) { + const MAX_PARALLEL_LIMIT = 10; + const errors = []; + const updatedFiles = []; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const processFile = async (file) => { + try { + await (options.public ? file.makePublic() : file.makePrivate(options)); + updatedFiles.push(file); + } + catch (e) { + if (!options.force) { + throw e; + } + errors.push(e); + } + }; + this.getFiles(options) + .then(([files]) => { + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const promises = files.map(file => { + return limit(() => processFile(file)); + }); + return Promise.all(promises); + }) + .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); + } + getId() { + return this.id; + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + methodType, localPreconditionOptions) { + var _a, _b; + if (typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + (methodType === AvailableServiceObjectMethods.setMetadata || + methodType === AvailableServiceObjectMethods.delete) && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) { + this.storage.retryOptions.autoRetry = false; + } + else if (this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } +} +exports.Bucket = Bucket; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Bucket, 'getFiles'); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Bucket, { + exclude: ['cloudStorageURI', 'request', 'file', 'notification'], +}); + + +/***/ }), + +/***/ 2665: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Channel = void 0; +const index_js_1 = __nccwpck_require__(4052); +const promisify_1 = __nccwpck_require__(9203); +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +class Channel extends index_js_1.ServiceObject { + constructor(storage, id, resourceId) { + const config = { + parent: storage, + baseUrl: '/channels', + // An ID shouldn't be included in the API requests. + // RE: + // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 + id: '', + methods: { + // Only need `request`. + }, + }; + super(config); + this.metadata.id = id; + this.metadata.resourceId = resourceId; + } + /** + * @typedef {array} StopResponse + * @property {object} 0 The full API response. + */ + /** + * @callback StopCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Stop this channel. + * + * @param {StopCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * channel.stop(function(err, apiResponse) { + * if (!err) { + * // Channel stopped successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * channel.stop().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + stop(callback) { + callback = callback || index_js_1.util.noop; + this.request({ + method: 'POST', + uri: '/stop', + json: this.metadata, + }, (err, apiResponse) => { + callback(err, apiResponse); + }); + } +} +exports.Channel = Channel; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Channel); + + +/***/ }), + +/***/ 5810: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _CRC32C_crc32c; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; +const fs_1 = __nccwpck_require__(7147); +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +const CRC32C_EXTENSIONS = [ + 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, + 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, + 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, + 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, + 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, + 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, + 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, + 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, + 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, + 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, + 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, + 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, + 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, + 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, + 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, + 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, + 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, + 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, + 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, + 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, + 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, + 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, + 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, + 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, + 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, + 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, + 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, + 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, + 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, + 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, + 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, + 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, + 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, + 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, + 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, + 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, + 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, + 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, + 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, + 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, + 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, + 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, + 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, +]; +exports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); +exports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; +const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); +exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR; +const CRC32C_EXCEPTION_MESSAGES = { + INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, + INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, + INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, +}; +exports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES; +class CRC32C { + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue = 0) { + /** Current CRC32C value */ + _CRC32C_crc32c.set(this, 0); + __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); + } + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data) { + let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; + for (const d of data) { + const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; + current = tablePoly ^ (current >>> 8); + } + __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); + } + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input) { + if (typeof input === 'number') { + return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + else if (typeof input === 'string') { + return input === this.toString(); + } + else if (Buffer.isBuffer(input)) { + return Buffer.compare(input, this.toBuffer()) === 0; + } + else { + // `CRC32C`-like object + return input.toString() === this.toString(); + } + } + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer() { + const buffer = Buffer.alloc(4); + buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); + return buffer; + } + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON() { + return this.toString(); + } + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString() { + return this.toBuffer().toString('base64'); + } + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf() { + return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + static fromBuffer(value) { + let buffer; + if (Buffer.isBuffer(value)) { + buffer = value; + } + else if ('buffer' in value) { + // `ArrayBufferView` + buffer = Buffer.from(value.buffer); + } + else { + // `ArrayBuffer` + buffer = Buffer.from(value); + } + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); + } + return new CRC32C(buffer.readInt32BE()); + } + static async fromFile(file) { + const crc32c = new CRC32C(); + await new Promise((resolve, reject) => { + (0, fs_1.createReadStream)(file) + .on('data', (d) => crc32c.update(d)) + .on('end', resolve) + .on('error', reject); + }); + return crc32c; + } + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + static fromString(value) { + const buffer = Buffer.from(value, 'base64'); + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); + } + return this.fromBuffer(buffer); + } + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + static fromNumber(value) { + if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); + } + return new CRC32C(value); + } + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value) { + if (typeof value === 'number') { + return this.fromNumber(value); + } + else if (typeof value === 'string') { + return this.fromString(value); + } + else if ('byteLength' in value) { + // `ArrayBuffer` | `Buffer` | `ArrayBufferView` + return this.fromBuffer(value); + } + else { + // `CRC32CValidator`/`CRC32C`-like + return this.fromString(value.toString()); + } + } +} +exports.CRC32C = CRC32C; +_CRC32C_crc32c = new WeakMap(); +CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; + + +/***/ }), + +/***/ 4713: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _File_instances, _File_validateIntegrity; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(4052); +const promisify_1 = __nccwpck_require__(9203); +const compressible_1 = __importDefault(__nccwpck_require__(6763)); +const crypto = __importStar(__nccwpck_require__(6113)); +const fs = __importStar(__nccwpck_require__(7147)); +const mime_1 = __importDefault(__nccwpck_require__(9994)); +const resumableUpload = __importStar(__nccwpck_require__(2851)); +const stream_1 = __nccwpck_require__(2781); +const zlib = __importStar(__nccwpck_require__(9796)); +const storage_js_1 = __nccwpck_require__(6007); +const bucket_js_1 = __nccwpck_require__(3973); +const acl_js_1 = __nccwpck_require__(2989); +const signer_js_1 = __nccwpck_require__(9019); +const util_js_1 = __nccwpck_require__(8064); +const duplexify_1 = __importDefault(__nccwpck_require__(6599)); +const util_js_2 = __nccwpck_require__(9258); +const crc32c_js_1 = __nccwpck_require__(5810); +const hash_stream_validator_js_1 = __nccwpck_require__(725); +const async_retry_1 = __importDefault(__nccwpck_require__(3415)); +var ActionToHTTPMethod; +(function (ActionToHTTPMethod) { + ActionToHTTPMethod["read"] = "GET"; + ActionToHTTPMethod["write"] = "PUT"; + ActionToHTTPMethod["delete"] = "DELETE"; + ActionToHTTPMethod["resumable"] = "POST"; +})(ActionToHTTPMethod || (exports.ActionToHTTPMethod = ActionToHTTPMethod = {})); +/** + * @private + */ +exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; +/** + * @private + */ +const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; +class RequestError extends Error { +} +exports.RequestError = RequestError; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +var FileExceptionMessages; +(function (FileExceptionMessages) { + FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; + FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; + FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; + FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; + FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; + FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; + FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; + FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; + FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; + FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; + FileExceptionMessages["MD5_RESUMED_UPLOAD"] = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value"; + FileExceptionMessages["MISSING_RESUME_CRC32C_FINAL_UPLOAD"] = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`."; +})(FileExceptionMessages || (exports.FileExceptionMessages = FileExceptionMessages = {})); +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +class File extends index_js_1.ServiceObject { + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket, name, options = {}) { + var _a, _b; + const requestQueryObject = {}; + let generation; + if (options.generation !== null) { + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + requestQueryObject.generation = generation; + } + } + Object.assign(requestQueryObject, options.preconditionOpts); + const userProject = options.userProject || bucket.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * @typedef {array} DeleteFileResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteFileCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @method File#delete + * @param {object} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the file does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_delete_file + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} FileExistsResponse + * @property {boolean} 0 Whether the {@link File} exists. + */ + /** + * @callback FileExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link File} exists. + */ + /** + * Check if the file exists. + * + * @method File#exists + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {FileExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileResponse + * @property {File} 0 The {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileCallback + * @param {?Error} err Request error, if any. + * @param {File} file The {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a file object and its metadata if it exists. + * + * @method File#get + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.get(function(err, file, apiResponse) { + * // file.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.get().then(function(data) { + * const file = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileMetadataResponse + * @property {object} 0 The {@link File} metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The {@link File} metadata. + * @param {object} apiResponse The full API response. + */ + /** + * Get the file's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} + * + * @method File#getMetadata + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_get_metadata + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). + * @param {string} [userProject] The ID of the project which will be billed for the request. + */ + /** + * @callback SetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} SetFileMetadataResponse + * @property {object} 0 The full API response. + */ + /** + * Merge the given metadata with the current remote file's metadata. This + * will set metadata if it was previously unset or update previously set + * metadata. To unset previously set metadata, set its value to null. + * + * You can set custom key/value pairs in the metadata key of the given + * object, however the other properties outside of this object must adhere + * to the {@link https://goo.gl/BOnnCK| official API documentation}. + * + * + * See the examples below for more information. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @method File#setMetadata + * @param {object} [metadata] The metadata you wish to update. + * @param {SetFileMetadataOptions} [options] Configuration options. + * @param {SetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * const metadata = { + * contentType: 'application/x-font-ttf', + * metadata: { + * my: 'custom', + * properties: 'go here' + * } + * }; + * + * file.setMetadata(metadata, function(err, apiResponse) {}); + * + * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } + * file.setMetadata({ + * metadata: { + * abc: '123', // will be set. + * unsetMe: null, // will be unset (deleted). + * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. + * } + * }, function(err, apiResponse) { + * // metadata should now be { abc: '123', hello: 'goodbye' } + * }); + * + * //- + * // Set a temporary hold on this file from its bucket's retention period + * // configuration. + * // + * file.setMetadata({ + * temporaryHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Alternatively, you may set a temporary hold. This will follow the + * // same behavior as an event-based hold, with the exception that the + * // bucket's retention policy will not renew for this file from the time + * // the hold is released. + * //- + * file.setMetadata({ + * eventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: bucket, + baseUrl: '/o', + id: encodeURIComponent(name), + methods, + }); + _File_instances.add(this); + this.bucket = bucket; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.storage = bucket.parent; + // @TODO Can this duplicate code from above be avoided? + if (options.generation !== null) { + let generation; + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + this.generation = generation; + } + } + this.kmsKeyName = options.kmsKeyName; + this.userProject = userProject; + this.name = name; + if (options.encryptionKey) { + this.setEncryptionKey(options.encryptionKey); + } + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.bucket.crc32cGenerator; + this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = this.bucket.cloudStorageURI; + uri.pathname = this.name; + return uri; + } + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { + var _a; + return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && + ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever); + } + /** + * @typedef {array} CopyResponse + * @property {File} 0 The copied {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback CopyCallback + * @param {?Error} err Request error, if any. + * @param {File} copiedFile The copied {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} CopyOptions Configuration options for File#copy(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [cacheControl] The cacheControl setting for the new file. + * @property {string} [contentEncoding] The contentEncoding setting for the new file. + * @property {string} [contentType] The contentType setting for the new file. + * @property {string} [destinationKmsKeyName] Resource name of the Cloud + * KMS key, of the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {Metadata} [metadata] Metadata to specify on the copied file. + * @property {string} [predefinedAcl] Set the ACL for the new file. + * @property {string} [token] A previously-returned `rewriteToken` from an + * unfinished rewrite request. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Copy this file to another file. By default, this will copy the file to the + * same bucket, but you can choose to copy it to another Bucket by providing + * a Bucket or File object or a URL starting with "gs://". + * The generation of the file will not be preserved. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {CopyOptions} [options] Configuration options. See an + * @param {CopyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { + * // `my-bucket` now contains: + * // - "my-image.png" + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-copy.png'; + * file.copy(newLocation, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be copied to that bucket + * // using the same name. + * //- + * const anotherBucket = storage.bucket('another-bucket'); + * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * file.copy(anotherFile, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `copiedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.copy(newLocation).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_copy_file + * Another example: + */ + copy(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || index_js_1.util.noop; + let destBucket; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destBucket = this.storage.bucket(parsedDestination[1]); + destName = parsedDestination[2]; + } + else { + destBucket = this.bucket; + destName = destination; + } + } + else if (destination instanceof bucket_js_1.Bucket) { + destBucket = destination; + destName = this.name; + } + else if (destination instanceof File) { + destBucket = destination.bucket; + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + const query = {}; + if (this.generation !== undefined) { + query.sourceGeneration = this.generation; + } + if (options.token !== undefined) { + query.rewriteToken = options.token; + } + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (options.predefinedAcl !== undefined) { + query.destinationPredefinedAcl = options.predefinedAcl; + delete options.predefinedAcl; + } + newFile = newFile || destBucket.file(destName); + const headers = {}; + if (this.encryptionKey !== undefined) { + headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; + headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; + headers['x-goog-copy-source-encryption-key-sha256'] = + this.encryptionKeyHash; + } + if (newFile.encryptionKey !== undefined) { + this.setEncryptionKey(newFile.encryptionKey); + } + else if (options.destinationKmsKeyName !== undefined) { + query.destinationKmsKeyName = options.destinationKmsKeyName; + delete options.destinationKmsKeyName; + } + else if (newFile.kmsKeyName !== undefined) { + query.destinationKmsKeyName = newFile.kmsKeyName; + } + if (query.destinationKmsKeyName) { + this.kmsKeyName = query.destinationKmsKeyName; + const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); + if (keyIndex > -1) { + this.interceptors.splice(keyIndex, 1); + } + } + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + headers, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + if (resp.rewriteToken) { + const options = { + token: resp.rewriteToken, + }; + if (query.userProject) { + options.userProject = query.userProject; + } + if (query.destinationKmsKeyName) { + options.destinationKmsKeyName = query.destinationKmsKeyName; + } + this.copy(newFile, options, callback); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options = {}) { + options = Object.assign({ decompress: true }, options); + const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; + const tailRequest = options.end < 0; + let validateStream = undefined; + let request = undefined; + const throughStream = new util_js_2.PassThroughShim(); + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + const value = options.validation.toLowerCase().trim(); + crc32c = value === 'crc32c'; + md5 = value === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + } + const shouldRunValidation = !rangeRequest && (crc32c || md5); + if (rangeRequest) { + if (typeof options.validation === 'string' || + options.validation === true) { + throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); + } + // Range requests can't receive data integrity checks. + crc32c = false; + md5 = false; + } + const onComplete = (err) => { + if (err) { + // There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed. + // This causes a memory leak, so cleanup the sockets manually here by destroying the agent. + if (request === null || request === void 0 ? void 0 : request.agent) { + request.agent.destroy(); + } + throughStream.destroy(err); + } + }; + // We listen to the response event from the request stream so that we + // can... + // + // 1) Intercept any data from going to the user if an error occurred. + // 2) Calculate the hashes from the http.IncomingMessage response + // stream, + // which will return the bytes from the source without decompressing + // gzip'd content. We then send it through decompressed, if + // applicable, to the user. + const onResponse = (err, _body, rawResponseStream) => { + if (err) { + // Get error message from the body. + this.getBufferFromReadable(rawResponseStream).then(body => { + err.message = body.toString('utf8'); + throughStream.destroy(err); + }); + return; + } + request = rawResponseStream.request; + const headers = rawResponseStream.toJSON().headers; + const isCompressed = headers['content-encoding'] === 'gzip'; + const hashes = {}; + // The object is safe to validate if: + // 1. It was stored gzip and returned to us gzip OR + // 2. It was never stored as gzip + const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && + isCompressed) || + headers['x-goog-stored-content-encoding'] === 'identity'; + const transformStreams = []; + if (shouldRunValidation) { + // The x-goog-hash header should be set with a crc32c and md5 hash. + // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' + if (typeof headers['x-goog-hash'] === 'string') { + headers['x-goog-hash'] + .split(',') + .forEach((hashKeyValPair) => { + const delimiterIndex = hashKeyValPair.indexOf('='); + const hashType = hashKeyValPair.substring(0, delimiterIndex); + const hashValue = hashKeyValPair.substring(delimiterIndex + 1); + hashes[hashType] = hashValue; + }); + } + validateStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + md5, + crc32cGenerator: this.crc32cGenerator, + crc32cExpected: hashes.crc32c, + md5Expected: hashes.md5, + }); + } + if (md5 && !hashes.md5) { + const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); + hashError.code = 'MD5_NOT_AVAILABLE'; + throughStream.destroy(hashError); + return; + } + if (safeToValidate && shouldRunValidation && validateStream) { + transformStreams.push(validateStream); + } + if (isCompressed && options.decompress) { + transformStreams.push(zlib.createGunzip()); + } + (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete); + }; + // Authenticate the request, then pipe the remote API request to the stream + // returned to the user. + const makeRequest = () => { + const query = { alt: 'media' }; + if (this.generation) { + query.generation = this.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + const headers = { + 'Accept-Encoding': 'gzip', + 'Cache-Control': 'no-store', + }; + if (rangeRequest) { + const start = typeof options.start === 'number' ? options.start : '0'; + const end = typeof options.end === 'number' ? options.end : ''; + headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; + } + const reqOpts = { + uri: '', + headers, + qs: query, + }; + if (options[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts[util_js_1.GCCL_GCS_CMD_KEY] = options[util_js_1.GCCL_GCS_CMD_KEY]; + } + this.requestStream(reqOpts) + .on('error', err => { + throughStream.destroy(err); + }) + .on('response', res => { + throughStream.emit('response', res); + index_js_1.util.handleResp(null, res, null, onResponse); + }) + .resume(); + }; + throughStream.on('reading', makeRequest); + return throughStream; + } + /** + * @callback CreateResumableUploadCallback + * @param {?Error} err Request error, if any. + * @param {string} uri The resumable upload's unique session URI. + */ + /** + * @typedef {array} CreateResumableUploadResponse + * @property {string} 0 The resumable upload's unique session URI. + */ + /** + * @typedef {object} CreateResumableUploadOptions + * @property {object} [metadata] Metadata to set on the file. + * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. + * @property {string} [origin] Origin header to set for the upload. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string} [chunkSize] Create a separate request per chunk. This + * value is in bytes and should be a multiple of 256 KiB (2^18). + * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} + */ + /** + * Create a unique resumable upload session URI. This is the first step when + * performing a resumable upload. + * + * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * for more on how the entire process works. + * + *

Note

+ * + * If you are just looking to perform a resumable upload without worrying + * about any of the details, see {@link File#createWriteStream}. Resumable + * uploads are performed by default. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * + * @param {CreateResumableUploadOptions} [options] Configuration options. + * @param {CreateResumableUploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.createResumableUpload(function(err, uri) { + * if (!err) { + * // `uri` can be used to PUT data to. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.createResumableUpload().then(function(data) { + * const uri = data[0]; + * }); + * ``` + */ + createResumableUpload(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const retryOptions = this.storage.retryOptions; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + retryOptions.autoRetry = false; + } + resumableUpload.createURI({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + origin: options.origin, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + userProject: options.userProject || this.userProject, + retryOptions: retryOptions, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }, callback); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + * + * //- + * //

Continuing a Resumable Upload

+ * // + * // One can capture a `uri` from a resumable upload to reuse later. + * // Additionally, for validation, one can also capture and pass `crc32c`. + * //- + * let uri: string | undefined = undefined; + * let resumeCRC32C: string | undefined = undefined; + * + * fs.createWriteStream() + * .on('uri', link => {uri = link}) + * .on('crc32', crc32c => {resumeCRC32C = crc32c}); + * + * // later... + * fs.createWriteStream({uri, resumeCRC32C}); + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + createWriteStream(options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + if (options.contentType) { + options.metadata.contentType = options.contentType; + } + if (!options.metadata.contentType || + options.metadata.contentType === 'auto') { + const detectedContentType = mime_1.default.getType(this.name); + if (detectedContentType) { + options.metadata.contentType = detectedContentType; + } + } + let gzip = options.gzip; + if (gzip === 'auto') { + gzip = (0, compressible_1.default)(options.metadata.contentType || ''); + } + if (gzip) { + options.metadata.contentEncoding = 'gzip'; + } + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + options.validation = options.validation.toLowerCase(); + crc32c = options.validation === 'crc32c'; + md5 = options.validation === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + md5 = false; + } + if (options.offset) { + if (md5) { + throw new RangeError(FileExceptionMessages.MD5_RESUMED_UPLOAD); + } + if (crc32c && !options.isPartialUpload && !options.resumeCRC32C) { + throw new RangeError(FileExceptionMessages.MISSING_RESUME_CRC32C_FINAL_UPLOAD); + } + } + /** + * A callback for determining when the underlying pipeline is complete. + * It's possible the pipeline callback could error before the write stream + * calls `final` so by default this will destroy the write stream unless the + * write stream sets this callback via its `final` handler. + * @param error An optional error + */ + let pipelineCallback = error => { + writeStream.destroy(error || undefined); + }; + // A stream for consumer to write to + const writeStream = new stream_1.Writable({ + final(cb) { + // Set the pipeline callback to this callback so the pipeline's results + // can be populated to the consumer + pipelineCallback = cb; + emitStream.end(); + }, + write(chunk, encoding, cb) { + emitStream.write(chunk, encoding, cb); + }, + }); + const transformStreams = []; + if (gzip) { + transformStreams.push(zlib.createGzip()); + } + const emitStream = new util_js_2.PassThroughShim(); + let hashCalculatingStream = null; + if (crc32c || md5) { + const crc32cInstance = options.resumeCRC32C + ? crc32c_js_1.CRC32C.from(options.resumeCRC32C) + : undefined; + hashCalculatingStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + crc32cInstance, + md5, + crc32cGenerator: this.crc32cGenerator, + updateHashesOnly: true, + }); + transformStreams.push(hashCalculatingStream); + } + const fileWriteStream = (0, duplexify_1.default)(); + let fileWriteStreamMetadataReceived = false; + // Handing off emitted events to users + emitStream.on('reading', () => writeStream.emit('reading')); + emitStream.on('writing', () => writeStream.emit('writing')); + fileWriteStream.on('uri', evt => writeStream.emit('uri', evt)); + fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); + fileWriteStream.on('response', resp => writeStream.emit('response', resp)); + fileWriteStream.once('metadata', () => { + fileWriteStreamMetadataReceived = true; + }); + writeStream.once('writing', () => { + if (options.resumable === false) { + this.startSimpleUpload_(fileWriteStream, options); + } + else { + this.startResumableUpload_(fileWriteStream, options); + } + (0, stream_1.pipeline)(emitStream, ...transformStreams, fileWriteStream, async (e) => { + if (e) { + return pipelineCallback(e); + } + // We want to make sure we've received the metadata from the server in order + // to properly validate the object's integrity. Depending on the type of upload, + // the stream could close before the response is returned. + if (!fileWriteStreamMetadataReceived) { + try { + await new Promise((resolve, reject) => { + fileWriteStream.once('metadata', resolve); + fileWriteStream.once('error', reject); + }); + } + catch (e) { + return pipelineCallback(e); + } + } + // Emit the local CRC32C value for future validation, if validation is enabled. + if (hashCalculatingStream === null || hashCalculatingStream === void 0 ? void 0 : hashCalculatingStream.crc32c) { + writeStream.emit('crc32c', hashCalculatingStream.crc32c); + } + try { + // Metadata may not be ready if the upload is a partial upload, + // nothing to validate yet. + const metadataNotReady = options.isPartialUpload && !this.metadata; + if (hashCalculatingStream && !metadataNotReady) { + await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { + crc32c, + md5, + }); + } + pipelineCallback(); + } + catch (e) { + pipelineCallback(e); + } + }); + }); + return writeStream; + } + delete(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_js_1.AvailableServiceObjectMethods.delete, options); + super + .delete(options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} DownloadResponse + * @property [0] The contents of a File. + */ + /** + * @callback DownloadCallback + * @param err Request error, if any. + * @param contents The contents of a File. + */ + /** + * Convenience method to download a file into memory or to a local + * destination. + * + * @param {object} [options] Configuration options. The arguments match those + * passed to {@link File#createReadStream}. + * @param {string} [options.destination] Local file path to write the file's + * contents to. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DownloadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Download a file into memory. The contents will be available as the + * second + * // argument in the demonstration below, `contents`. + * //- + * file.download(function(err, contents) {}); + * + * //- + * // Download a file to a local destination. + * //- + * file.download({ + * destination: '/Users/me/Desktop/file-backup.txt' + * }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.download().then(function(data) { + * const contents = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_download_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + * + * @example include:samples/requesterPays.js + * region_tag:storage_download_file_requester_pays + * Example of downloading a file where the requester pays: + */ + download(optionsOrCallback, cb) { + let options; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + let called = false; + const callback = ((...args) => { + if (!called) + cb(...args); + called = true; + }); + const destination = options.destination; + delete options.destination; + const fileStream = this.createReadStream(options); + let receivedData = false; + if (destination) { + fileStream + .on('error', callback) + .once('data', data => { + // We know that the file exists the server - now we can truncate/write to a file + receivedData = true; + const writable = fs.createWriteStream(destination); + writable.write(data); + fileStream + .pipe(writable) + .on('error', callback) + .on('finish', callback); + }) + .on('end', () => { + // In the case of an empty file no data will be received before the end event fires + if (!receivedData) { + const data = Buffer.alloc(0); + try { + fs.writeFileSync(destination, data); + callback(null, data); + } + catch (e) { + callback(e, data); + } + } + }); + } + else { + this.getBufferFromReadable(fileStream) + .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) + .catch(callback); + } + } + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey) { + this.encryptionKey = encryptionKey; + this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); + this.encryptionKeyHash = crypto + .createHash('sha256') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + .update(this.encryptionKeyBase64, 'base64') + .digest('base64'); + this.encryptionKeyInterceptor = { + request: reqOpts => { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryptionKeyHash; + return reqOpts; + }, + }; + this.interceptors.push(this.encryptionKeyInterceptor); + return this; + } + /** + * @typedef {array} GetExpirationDateResponse + * @property {date} 0 A Date object representing the earliest time this file's + * retention policy will expire. + */ + /** + * @callback GetExpirationDateCallback + * @param {?Error} err Request error, if any. + * @param {date} expirationDate A Date object representing the earliest time + * this file's retention policy will expire. + */ + /** + * If this bucket has a retention policy defined, use this method to get a + * Date object representing the earliest time this file will expire. + * + * @param {GetExpirationDateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getExpirationDate(function(err, expirationDate) { + * // expirationDate is a Date object. + * }); + * ``` + */ + getExpirationDate(callback) { + this.getMetadata((err, metadata, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + if (!metadata.retentionExpirationTime) { + const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); + callback(error, null, apiResponse); + return; + } + callback(null, new Date(metadata.retentionExpirationTime), apiResponse); + }); + } + /** + * @typedef {array} GenerateSignedPostPolicyV2Response + * @property {object} 0 The document policy. + */ + /** + * @callback GenerateSignedPostPolicyV2Callback + * @param {?Error} err Request error, if any. + * @param {object} policy The document policy. + */ + /** + * Get a signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} + * + * @throws {Error} If an expiration timestamp from the past is given. + * @throws {Error} If options.equals has an array with less or more than two + * members. + * @throws {Error} If options.startsWith has an array with less or more than two + * members. + * + * @param {object} options Configuration options. + * @param {array|array[]} [options.equals] Array of request parameters and + * their expected value (e.g. [['$', '']]). Values are + * translated into equality constraints in the conditions field of the + * policy document (e.g. ['eq', '$', '']). If only one + * equality condition is to be specified, options.equals can be a one- + * dimensional array (e.g. ['$', '']). + * @param {*} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {array|array[]} [options.startsWith] Array of request parameters and + * their expected prefixes (e.g. [['$', '']). Values are + * translated into starts-with constraints in the conditions field of the + * policy document (e.g. ['starts-with', '$', '']). If only + * one prefix condition is to be specified, options.startsWith can be a + * one- dimensional array (e.g. ['$', '']). + * @param {string} [options.acl] ACL for the object from possibly predefined + * ACLs. + * @param {string} [options.successRedirect] The URL to which the user client + * is redirected if the upload is successful. + * @param {string} [options.successStatus] - The status of the Google Storage + * response if the upload is successful (must be string). + * @param {object} [options.contentLengthRange] + * @param {number} [options.contentLengthRange.min] Minimum value for the + * request's content length. + * @param {number} [options.contentLengthRange.max] Maximum value for the + * request's content length. + * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * equals: ['$Content-Type', 'image/jpeg'], + * expires: '10-25-2022', + * contentLengthRange: { + * min: 0, + * max: 1024 + * } + * }; + * + * file.generateSignedPostPolicyV2(options, function(err, policy) { + * // policy.string: the policy document in plain text. + * // policy.base64: the policy document in base64. + * // policy.signature: the policy signature in base64. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV2(options).then(function(data) { + * const policy = data[0]; + * }); + * ``` + */ + generateSignedPostPolicyV2(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + options = Object.assign({}, options); + const conditions = [ + ['eq', '$key', this.name], + { + bucket: this.bucket.name, + }, + ]; + if (Array.isArray(options.equals)) { + if (!Array.isArray(options.equals[0])) { + options.equals = [options.equals]; + } + options.equals.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); + } + conditions.push(['eq', condition[0], condition[1]]); + }); + } + if (Array.isArray(options.startsWith)) { + if (!Array.isArray(options.startsWith[0])) { + options.startsWith = [options.startsWith]; + } + options.startsWith.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); + } + conditions.push(['starts-with', condition[0], condition[1]]); + }); + } + if (options.acl) { + conditions.push({ + acl: options.acl, + }); + } + if (options.successRedirect) { + conditions.push({ + success_action_redirect: options.successRedirect, + }); + } + if (options.successStatus) { + conditions.push({ + success_action_status: options.successStatus, + }); + } + if (options.contentLengthRange) { + const min = options.contentLengthRange.min; + const max = options.contentLengthRange.max; + if (typeof min !== 'number' || typeof max !== 'number') { + throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); + } + conditions.push(['content-length-range', min, max]); + } + const policy = { + expiration: expires.toISOString(), + conditions, + }; + const policyString = JSON.stringify(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + this.storage.authClient.sign(policyBase64).then(signature => { + callback(null, { + string: policyString, + base64: policyBase64, + signature, + }); + }, err => { + callback(new signer_js_1.SigningError(err.message)); + }); + } + /** + * @typedef {object} SignedPostPolicyV4Output + * @property {string} url The request URL. + * @property {object} fields The form fields to include in the POST request. + */ + /** + * @typedef {array} GenerateSignedPostPolicyV4Response + * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. + */ + /** + * @callback GenerateSignedPostPolicyV4Callback + * @param {?Error} err Request error, if any. + * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. + */ + /** + * Get a v4 signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} + * + * @param {object} options Configuration options. + * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instead of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in + * the result, e.g. "https://cdn.example.com". + * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} + * to include in the signed policy. Any fields with key beginning with 'x-ignore-' + * will not be included in the policy to be signed. + * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} + * to include in the signed policy. All fields given in `config.fields` are + * automatically included in the conditions array, adding the same entry + * in both `fields` and `conditions` will result in duplicate entries. + * + * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * expires: '10-25-2022', + * conditions: [ + * ['eq', '$Content-Type', 'image/jpeg'], + * ['content-length-range', 0, 1024], + * ], + * fields: { + * acl: 'public-read', + * 'x-goog-meta-foo': 'bar', + * 'x-ignore-mykey': 'data' + * } + * }; + * + * file.generateSignedPostPolicyV4(options, function(err, response) { + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV4(options).then(function(data) { + * const response = data[0]; + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * ``` + */ + generateSignedPostPolicyV4(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + options = Object.assign({}, options); + let fields = Object.assign({}, options.fields); + const now = new Date(); + const nowISO = (0, util_js_2.formatAsUTCISO)(now, true); + const todayISO = (0, util_js_2.formatAsUTCISO)(now); + const sign = async () => { + const { client_email } = await this.storage.authClient.getCredentials(); + const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; + fields = { + ...fields, + bucket: this.bucket.name, + key: this.name, + 'x-goog-date': nowISO, + 'x-goog-credential': credential, + 'x-goog-algorithm': 'GOOG4-RSA-SHA256', + }; + const conditions = options.conditions || []; + Object.entries(fields).forEach(([key, value]) => { + if (!key.startsWith('x-ignore-')) { + conditions.push({ [key]: value }); + } + }); + delete fields.bucket; + const expiration = (0, util_js_2.formatAsUTCISO)(expires, true, '-', ':'); + const policy = { + conditions, + expiration, + }; + const policyString = (0, util_js_2.unicodeJSONStringify)(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + try { + const signature = await this.storage.authClient.sign(policyBase64); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + fields['policy'] = policyBase64; + fields['x-goog-signature'] = signatureHex; + let url; + if (options.virtualHostedStyle) { + url = `https://${this.bucket.name}.storage.googleapis.com/`; + } + else if (options.bucketBoundHostname) { + url = `${options.bucketBoundHostname}/`; + } + else { + url = `${exports.STORAGE_POST_POLICY_BASE_URL}/${this.bucket.name}/`; + } + return { + url, + fields, + }; + } + catch (err) { + throw new signer_js_1.SigningError(err.message); + } + }; + sign().then(res => callback(null, res), callback); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * Get a signed URL to allow limited time access to the file. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {object} config Configuration object. + * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or + * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). + * When using "resumable", the header `X-Goog-Resumable: start` has + * to be sent when making a request with the signed URL. + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like + * if you provide this, the client must provide this HTTP header with this same + * value in its request, so to if this parameter is not provided here, + * the client must not provide any value for this HTTP header in its request. + * @param {string} [config.contentType] Just like if you provide this, the client + * must provide this HTTP header with this same value in its request, so to if + * this parameter is not provided here, the client must not provide any value + * for this HTTP header in its request. + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @param {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {string} [config.promptSaveAs] The filename to prompt the user to + * save the file as when the signed url is accessed. This is ignored if + * `config.responseDisposition` is set. + * @param {string} [config.responseDisposition] The + * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the + * signed url. + * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value + * given is passed to `new Date()`. + * Note: Use for 'v4' only. + * @param {string} [config.responseType] The response-content-type parameter + * of the signed url. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to read from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * // Access will begin at accessibleAt and end at expires. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * accessibleAt: '03-13-2025' + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL to allow write permissions. This means anyone with this + * URL + * // can send a POST request with new data that will overwrite the file. + * //- + * file.getSignedUrl({ + * action: 'write', + * expires: '03-17-2025' + * }, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to be written to. + * const writeStream = request.put(url); + * writeStream.end('New data'); + * + * writeStream.on('complete', function(resp) { + * // Confirm the new content was saved. + * file.download(function(err, fileContents) { + * console.log('Contents:', fileContents.toString()); + * // Contents: New data + * }); + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_generate_signed_url + * Another example: + */ + getSignedUrl(cfg, callback) { + const method = ActionToHTTPMethod[cfg.action]; + const extensionHeaders = (0, util_js_2.objectKeyToLowercase)(cfg.extensionHeaders || {}); + if (cfg.action === 'resumable') { + extensionHeaders['x-goog-resumable'] = 'start'; + } + const queryParams = Object.assign({}, cfg.queryParams); + if (typeof cfg.responseType === 'string') { + queryParams['response-content-type'] = cfg.responseType; + } + if (typeof cfg.promptSaveAs === 'string') { + queryParams['response-content-disposition'] = + 'attachment; filename="' + cfg.promptSaveAs + '"'; + } + if (typeof cfg.responseDisposition === 'string') { + queryParams['response-content-disposition'] = cfg.responseDisposition; + } + if (this.generation) { + queryParams['generation'] = this.generation.toString(); + } + const signConfig = { + method, + expires: cfg.expires, + accessibleAt: cfg.accessibleAt, + extensionHeaders, + queryParams, + contentMd5: cfg.contentMd5, + contentType: cfg.contentType, + }; + if (cfg.cname) { + signConfig.cname = cfg.cname; + } + if (cfg.version) { + signConfig.version = cfg.version; + } + if (cfg.virtualHostedStyle) { + signConfig.virtualHostedStyle = cfg.virtualHostedStyle; + } + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this.bucket, this); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback IsPublicCallback + * @param {?Error} err Request error, if any. + * @param {boolean} resp Whether file is public or not. + */ + /** + * @typedef {array} IsPublicResponse + * @property {boolean} 0 Whether file is public or not. + */ + /** + * Check whether this file is public or not by sending + * a HEAD request without credentials. + * No errors from the server indicates that the current + * file is public. + * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} + * indicates that file is private. + * Any other non 403 error is propagated to user. + * + * @param {IsPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Check whether the file is publicly accessible. + * //- + * file.isPublic(function(err, resp) { + * if (err) { + * console.error(err); + * return; + * } + * console.log(`the file ${file.id} is public: ${resp}`) ; + * }) + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.isPublic().then(function(data) { + * const resp = data[0]; + * }); + * ``` + */ + isPublic(callback) { + var _a; + // Build any custom headers based on the defined interceptors on the parent + // storage object and this object + const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; + const fileInterceptors = this.interceptors || []; + const allInterceptors = storageInterceptors.concat(fileInterceptors); + const headers = allInterceptors.reduce((acc, curInterceptor) => { + const currentHeaders = curInterceptor.request({ + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + }); + Object.assign(acc, currentHeaders.headers); + return acc; + }, {}); + index_js_1.util.makeRequest({ + method: 'GET', + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + headers, + }, { + retryOptions: this.storage.retryOptions, + }, (err) => { + if (err) { + const apiError = err; + if (apiError.code === 403) { + callback(null, false); + } + else { + callback(err); + } + } + else { + callback(null, true); + } + }); + } + /** + * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [strict] If true, set the file to be private to + * only the owner user. Otherwise, it will be private to the project. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback MakeFilePrivateCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} MakeFilePrivateResponse + * @property {object} 0 The full API response. + */ + /** + * Make a file private to the project and remove all other permissions. + * Set `options.strict` to true to make the file private to only the owner. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @param {MakeFilePrivateOptions} [options] Configuration options. + * @param {MakeFilePrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Set the file private so only project maintainers can see and modify it. + * //- + * file.makePrivate(function(err) {}); + * + * //- + * // Set the file private so only the owner can see and modify it. + * //- + * file.makePrivate({ strict: true }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePrivate().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const query = { + predefinedAcl: options.strict ? 'private' : 'projectPrivate', + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }; + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { + query.ifMetagenerationMatch = + (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; + delete options.preconditionOpts; + } + if (options.userProject) { + query.userProject = options.userProject; + } + // You aren't allowed to set both predefinedAcl & acl properties on a file, + // so acl must explicitly be nullified, destroying all previous acls on the + // file. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, callback); + } + /** + * @typedef {array} MakeFilePublicResponse + * @property {object} 0 The full API response. + */ + /** + * @callback MakeFilePublicCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set a file to be publicly readable and maintain all previous permissions. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {MakeFilePublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.makePublic(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePublic().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_make_public + * Another example: + */ + makePublic(callback) { + callback = callback || index_js_1.util.noop; + this.acl.add({ + entity: 'allUsers', + role: 'READER', + }, (err, acl, resp) => { + callback(err, resp); + }); + } + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl() { + return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; + } + /** + * @typedef {array} MoveResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Move this file to another location. By default, this will rename the file + * and keep it in the same bucket, but you can choose to move it to another + * Bucket by providing a Bucket or File object or a URL beginning with + * "gs://". + * + * **Warning**: + * There is currently no atomic `move` method in the Cloud Storage API, + * so this method is a composition of {@link File#copy} (to the new + * location) and {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {MoveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is moved to its + * // current bucket, under the new name provided. + * //- + * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-new.png'; + * file.move(newLocation, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be moved to that bucket + * // using the same name. + * //- + * const anotherBucket = gcs.bucket('another-bucket'); + * + * file.move(anotherBucket, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.move(anotherFile, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `destinationFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.move('my-image-new.png').then(function(data) { + * const destinationFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file + * Another example: + */ + move(destination, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.copy(destination, options, (err, destinationFile, copyApiResponse) => { + if (err) { + err.message = 'file#copy failed with an error - ' + err.message; + callback(err, null, copyApiResponse); + return; + } + if (this.name !== destinationFile.name || + this.bucket.name !== destinationFile.bucket.name) { + this.delete(options, (err, apiResponse) => { + if (err) { + err.message = 'file#delete failed with an error - ' + err.message; + callback(err, destinationFile, apiResponse); + return; + } + callback(null, destinationFile, copyApiResponse); + }); + } + else { + callback(null, destinationFile, copyApiResponse); + } + }); + } + /** + * @typedef {array} RenameResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback RenameCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} RenameOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Rename this file. + * + * **Warning**: + * There is currently no atomic `rename` method in the Cloud Storage API, + * so this method is an alias of {@link File#move}, which in turn is a + * composition of {@link File#copy} (to the new location) and + * {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * @param {string|File} destinationFile Destination file. + * @param {RenameCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a string or a File object. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // You can pass in a string for the destinationFile. + * //- + * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "renamed-image.png" + * + * // `renamedFile` is an instance of a File object that refers to your + * // renamed file. + * }); + * + * //- + * // You can pass in a File object. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.rename(anotherFile, function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * + * // Note: + * // The `renamedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.rename('my-renamed-image.png').then(function(data) { + * const renamedFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + rename(destinationFile, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.move(destinationFile, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + return this.parent.request.call(this, reqOpts, callback); + } + /** + * @callback RotateEncryptionKeyCallback + * @extends CopyCallback + */ + /** + * @typedef RotateEncryptionKeyResponse + * @extends CopyResponse + */ + /** + * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options + * for File#rotateEncryptionKey(). + * If a string or Buffer is provided, it is interpreted as an AES-256, + * customer-supplied encryption key. If you'd like to use a Cloud KMS key + * name, you must specify an options object with the property name: + * `kmsKeyName`. + * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. + * @param {string} [options.kmsKeyName] A Cloud KMS key name. + */ + /** + * This method allows you to update the encryption key associated with this + * file. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {RotateEncryptionKeyOptions} [options] - Configuration options. + * @param {RotateEncryptionKeyCallback} [callback] + * @returns {Promise} + * + * @example include:samples/encryption.js + * region_tag:storage_rotate_encryption_key + * Example of rotating the encryption key for this file: + */ + rotateEncryptionKey(optionsOrCallback, callback) { + var _a; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + let options = {}; + if (typeof optionsOrCallback === 'string' || + optionsOrCallback instanceof Buffer) { + options = { + encryptionKey: optionsOrCallback, + }; + } + else if (typeof optionsOrCallback === 'object') { + options = optionsOrCallback; + } + const newFile = this.bucket.file(this.id, options); + const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined + ? { preconditionOpts: options.preconditionOpts } + : {}; + this.copy(newFile, copyOptions, callback); + } + /** + * @typedef {object} SaveOptions + * @extends CreateWriteStreamOptions + */ + /** + * @callback SaveCallback + * @param {?Error} err Request error, if any. + */ + /** + * Write strings or buffers to a file. + * + * *This is a convenience method which wraps {@link File#createWriteStream}.* + * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * @param {SaveData} data The data to write to a file. + * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` + * parameter. + * @param {SaveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const contents = 'This is the contents of the file.'; + * + * file.save(contents, function(err) { + * if (!err) { + * // File written successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.save(contents).then(function() {}); + * ``` + */ + save(data, optionsOrCallback, callback) { + // tslint:enable:no-any + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + let maxRetries = this.storage.retryOptions.maxRetries; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + maxRetries = 0; + } + const returnValue = (0, async_retry_1.default)(async (bail) => { + return new Promise((resolve, reject) => { + if (maxRetries === 0) { + this.storage.retryOptions.autoRetry = false; + } + const writable = this.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + const handleError = (err) => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + return bail(err); + }; + if (typeof data === 'string' || Buffer.isBuffer(data)) { + writable + .on('error', handleError) + .on('finish', () => resolve()) + .end(data); + } + else { + (0, stream_1.pipeline)(data, writable, err => { + if (err) { + if (typeof data !== 'function') { + // Only PipelineSourceFunction can be retried. Async-iterables + // and Readable streams can only be consumed once. + return bail(err); + } + handleError(err); + } + else { + resolve(); + } + }); + } + }); + }, { + retries: maxRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(); + } + }) + .catch(callback); + } + } + setMetadata(metadata, optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_js_1.AvailableServiceObjectMethods.setMetadata, options); + if (metadata.retention !== undefined) { + options.overrideUnlockedRetention = true; + } + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} SetStorageClassResponse + * @property {object} 0 The full API response. + */ + /** + * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetStorageClassCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set the storage class for this file. + * + * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`) + * **Note:** The storage classes `multi_regional` and `regional` + * are now legacy and will be deprecated in the future. + * @param {SetStorageClassOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * file.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const req = { + ...options, + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass: storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(), + }; + this.copy(this, req, (err, file, apiResponse) => { + if (err) { + callback(err, apiResponse); + return; + } + this.metadata = file.metadata; + callback(null, apiResponse); + }); + } + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.bucket.setUserProject.call(this, userProject); + } + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const retryOptions = this.storage.retryOptions; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options.preconditionOpts)) { + retryOptions.autoRetry = false; + } + const uploadStream = resumableUpload.upload({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + isPartialUpload: options.isPartialUpload, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + uri: options.uri, + userProject: options.userProject || this.userProject, + retryOptions: { ...retryOptions }, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, + highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }); + uploadStream + .on('response', resp => { + dup.emit('response', resp); + }) + .on('uri', uri => { + dup.emit('uri', uri); + }) + .on('metadata', metadata => { + this.metadata = metadata; + dup.emit('metadata'); + }) + .on('finish', () => { + dup.emit('complete'); + }) + .on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(uploadStream); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const apiEndpoint = this.storage.apiEndpoint; + const bucketName = this.bucket.name; + const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; + const reqOpts = { + qs: { + name: this.name, + }, + uri: uri, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }; + if (this.generation !== undefined) { + reqOpts.qs.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName !== undefined) { + reqOpts.qs.kmsKeyName = this.kmsKeyName; + } + if (typeof options.timeout === 'number') { + reqOpts.timeout = options.timeout; + } + if (options.userProject || this.userProject) { + reqOpts.qs.userProject = options.userProject || this.userProject; + } + if (options.predefinedAcl) { + reqOpts.qs.predefinedAcl = options.predefinedAcl; + } + else if (options.private) { + reqOpts.qs.predefinedAcl = 'private'; + } + else if (options.public) { + reqOpts.qs.predefinedAcl = 'publicRead'; + } + Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); + index_js_1.util.makeWritableStream(dup, { + makeAuthenticatedRequest: (reqOpts) => { + this.request(reqOpts, (err, body, resp) => { + if (err) { + dup.destroy(err); + return; + } + this.metadata = body; + dup.emit('metadata', body); + dup.emit('response', resp); + dup.emit('complete'); + }); + }, + metadata: options.metadata, + request: reqOpts, + }); + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, methodType, localPreconditionOptions) { + var _a, _b, _c, _d; + if ((typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.delete && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + if ((typeof coreOpts === 'object' && + ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.setMetadata && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } + async getBufferFromReadable(readable) { + const buf = []; + for await (const chunk of readable) { + buf.push(chunk); + } + return Buffer.concat(buf); + } +} +exports.File = File; +_File_instances = new WeakSet(), _File_validateIntegrity = +/** + * + * @param hashCalculatingStream + * @param verify + * @returns {boolean} Returns `true` if valid, throws with error otherwise + */ +async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { + const metadata = this.metadata; + // If we're doing validation, assume the worst + let dataMismatch = !!(verify.crc32c || verify.md5); + if (verify.crc32c && metadata.crc32c) { + dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); + } + if (verify.md5 && metadata.md5Hash) { + dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); + } + if (dataMismatch) { + const errors = []; + let code = ''; + let message = ''; + try { + await this.delete(); + if (verify.md5 && !metadata.md5Hash) { + code = 'MD5_NOT_AVAILABLE'; + message = FileExceptionMessages.MD5_NOT_AVAILABLE; + } + else { + code = 'FILE_NO_UPLOAD'; + message = FileExceptionMessages.UPLOAD_MISMATCH; + } + } + catch (e) { + const error = e; + code = 'FILE_NO_UPLOAD_DELETE'; + message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; + errors.push(error); + } + const error = new RequestError(message); + error.code = code; + error.errors = errors; + throw error; + } + return true; +}; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(File, { + exclude: [ + 'cloudStorageURI', + 'publicUrl', + 'request', + 'save', + 'setEncryptionKey', + 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', + 'getBufferFromReadable', + ], +}); + + +/***/ }), + +/***/ 725: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HashStreamValidator = void 0; +const crypto_1 = __nccwpck_require__(6113); +const stream_1 = __nccwpck_require__(2781); +const crc32c_js_1 = __nccwpck_require__(5810); +const file_js_1 = __nccwpck_require__(4713); +class HashStreamValidator extends stream_1.Transform { + constructor(options = {}) { + super(); + this.updateHashesOnly = false; + _HashStreamValidator_crc32cHash.set(this, undefined); + _HashStreamValidator_md5Hash.set(this, undefined); + _HashStreamValidator_md5Digest.set(this, ''); + this.crc32cEnabled = !!options.crc32c; + this.md5Enabled = !!options.md5; + this.updateHashesOnly = !!options.updateHashesOnly; + this.crc32cExpected = options.crc32cExpected; + this.md5Expected = options.md5Expected; + if (this.crc32cEnabled) { + if (options.crc32cInstance) { + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, "f"); + } + else { + const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); + } + } + if (this.md5Enabled) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), "f"); + } + } + /** + * Return the current CRC32C value, if available. + */ + get crc32c() { + var _a; + return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) === null || _a === void 0 ? void 0 : _a.toString(); + } + _flush(callback) { + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); + } + if (this.updateHashesOnly) { + callback(); + return; + } + // If we're doing validation, assume the worst-- a data integrity + // mismatch. If not, these tests won't be performed, and we can assume + // the best. + // We must check if the server decompressed the data on serve because hash + // validation is not possible in this case. + let failed = this.crc32cEnabled || this.md5Enabled; + if (this.crc32cEnabled && this.crc32cExpected) { + failed = !this.test('crc32c', this.crc32cExpected); + } + if (this.md5Enabled && this.md5Expected) { + failed = !this.test('md5', this.md5Expected); + } + if (failed) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + callback(mismatchError); + } + else { + callback(); + } + } + _transform(chunk, encoding, callback) { + this.push(chunk, encoding); + try { + if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); + callback(); + } + catch (e) { + callback(e); + } + } + test(hash, sum) { + const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; + if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); + } + if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; + } + return false; + } +} +exports.HashStreamValidator = HashStreamValidator; +_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); + + +/***/ }), + +/***/ 4654: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HmacKey = void 0; +const index_js_1 = __nccwpck_require__(4052); +const storage_js_1 = __nccwpck_require__(6007); +const promisify_1 = __nccwpck_require__(9203); +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +class HmacKey extends index_js_1.ServiceObject { + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage, accessId, options) { + const methods = { + /** + * @typedef {object} DeleteHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} DeleteHmacKeyResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Deletes an HMAC key. + * Key state must be set to `INACTIVE` prior to deletion. + * Caution: HMAC keys cannot be recovered once you delete them. + * + * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. + * + * @method HmacKey#delete + * @param {DeleteHmacKeyOptions} [options] Configuration options. + * @param {DeleteHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Delete HMAC key after making the key inactive. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * hmacKey.delete((err) => { + * if (err) { + * console.error(err); + * return; + * } + * // The HMAC key is deleted. + * }); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey + * .setMetadata({state: 'INACTIVE'}) + * .then(() => { + * return hmacKey.delete(); + * }); + * ``` + */ + delete: true, + /** + * @callback GetHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey this {@link HmacKey} instance. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetHmacKeyResponse + * @property {HmacKey} 0 This {@link HmacKey} instance. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} GetHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * this {@link HmacKey} instance. + * + * HmacKey.get() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#get + * @param {GetHmacKeyOptions} [options] Configuration options. + * @param {GetHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's Metadata. + * //- + * storage.hmacKey('ACCESS_ID') + * .get((err, hmacKey) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * // do something with the returned HmacKey object. + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .get() + * .then((data) => { + * const hmacKey = data[0]; + * }); + * ``` + */ + get: true, + /** + * @typedef {object} GetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * the HMAC key's metadata as an object. + * + * HmacKey.getMetadata() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#getMetadata + * @param {GetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's metadata and populate to the metadata property. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata((err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata() + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + getMetadata: true, + /** + * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. + * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. + * @property {string} [etag] Include an etag from a previous get HMAC key request + * to perform safe read-modify-write. + */ + /** + * @typedef {object} SetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @callback HmacKeyMetadataCallback + * @param {?Error} err Request error, if any. + * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} HmacKeyMetadataResponse + * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. + * @property {object} 1 The full API response. + */ + /** + * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for + * valid states. + * + * @method HmacKey#setMetadata + * @param {SetHmacKeyMetadata} metadata The new metadata. + * @param {SetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * const metadata = { + * state: 'INACTIVE', + * }; + * + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata) + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + setMetadata: { + reqOpts: { + method: 'PUT', + }, + }, + }; + const projectId = (options && options.projectId) || storage.projectId; + super({ + parent: storage, + id: accessId, + baseUrl: `/projects/${projectId}/hmacKeys`, + methods, + }); + this.storage = storage; + this.instanceRetryValue = storage.retryOptions.autoRetry; + } + setMetadata(metadata, optionsOrCallback, cb) { + // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways + if (this.storage.retryOptions.idempotencyStrategy !== + storage_js_1.IdempotencyStrategy.RetryAlways) { + this.storage.retryOptions.autoRetry = false; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } +} +exports.HmacKey = HmacKey; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(HmacKey); + + +/***/ }), + +/***/ 352: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Iam = exports.IAMExceptionMessages = void 0; +const promisify_1 = __nccwpck_require__(9203); +const util_js_1 = __nccwpck_require__(9258); +var IAMExceptionMessages; +(function (IAMExceptionMessages) { + IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; + IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; +})(IAMExceptionMessages || (exports.IAMExceptionMessages = IAMExceptionMessages = {})); +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +class Iam { + constructor(bucket) { + this.request_ = bucket.request.bind(bucket); + this.resourceId_ = 'buckets/' + bucket.getId(); + } + /** + * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). + * @property {number} [requestedPolicyVersion] The version of IAM policies to + * request. If a policy with a condition is requested without setting + * this, the server will return an error. This must be set to a value + * of 3 to retrieve IAM policies containing conditions. This is to + * prevent client code that isn't aware of IAM conditions from + * interpreting and modifying policies incorrectly. The service might + * return a policy with version lower than the one that was requested, + * based on the feature syntax in the policy fetched. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetPolicyResponse + * @property {Policy} 0 The policy. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} Policy + * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. + * @property {string} [policy.etag] Etags are used to perform a read-modify-write. + * @property {number} [policy.version] The syntax schema version of the Policy. + * To set an IAM policy with conditional binding, this field must be set to + * 3 or greater. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + */ + /** + * @typedef {object} PolicyBinding + * @property {string} role Role that is assigned to members. + * @property {string[]} members Specifies the identities requesting access for the bucket. + * @property {Expr} [condition] The condition that is associated with this binding. + */ + /** + * @typedef {object} Expr + * @property {string} [title] An optional title for the expression, i.e. a + * short string describing its purpose. This can be used e.g. in UIs + * which allow to enter the expression. + * @property {string} [description] An optional description of the + * expression. This is a longer text which describes the expression, + * e.g. when hovered over it in a UI. + * @property {string} expression Textual representation of an expression in + * Common Expression Language syntax. The application context of the + * containing message determines which well-known feature set of CEL + * is supported.The condition that is associated with this binding. + * + * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions + */ + /** + * Get the IAM policy. + * + * @param {GetPolicyOptions} [options] Request options. + * @param {GetPolicyCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.iam.getPolicy( + * {requestedPolicyVersion: 3}, + * function(err, policy, apiResponse) { + * + * }, + * ); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy({requestedPolicyVersion: 3}) + * .then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + */ + getPolicy(optionsOrCallback, callback) { + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const qs = {}; + if (options.userProject) { + qs.userProject = options.userProject; + } + if (options.requestedPolicyVersion !== null && + options.requestedPolicyVersion !== undefined) { + qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; + } + this.request_({ + uri: '/iam', + qs, + }, cb); + } + /** + * Set the IAM policy. + * + * @throws {Error} If no policy is provided. + * + * @param {Policy} policy The policy. + * @param {SetPolicyOptions} [options] Configuration options. + * @param {SetPolicyCallback} callback Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const myPolicy = { + * bindings: [ + * { + * role: 'roles/storage.admin', + * members: + * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] + * } + * ] + * }; + * + * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.setPolicy(myPolicy).then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ + setPolicy(policy, optionsOrCallback, callback) { + if (policy === null || typeof policy !== 'object') { + throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + let maxRetries; + if (policy.etag === undefined) { + maxRetries = 0; + } + this.request_({ + method: 'PUT', + uri: '/iam', + maxRetries, + json: Object.assign({ + resourceId: this.resourceId_, + }, policy), + qs: options, + }, cb); + } + /** + * Test a set of permissions for a resource. + * + * @throws {Error} If permissions are not provided. + * + * @param {string|string[]} permissions The permission(s) to test for. + * @param {TestIamPermissionsOptions} [options] Configuration object. + * @param {TestIamPermissionsCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * //- + * // Test a single permission. + * //- + * const test = 'storage.buckets.delete'; + * + * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": true + * // } + * }); + * + * //- + * // Test several permissions at once. + * //- + * const tests = [ + * 'storage.buckets.delete', + * 'storage.buckets.get' + * ]; + * + * bucket.iam.testPermissions(tests, function(err, permissions) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": false, + * // "storage.buckets.get": true + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.testPermissions(test).then(function(data) { + * const permissions = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + testPermissions(permissions, optionsOrCallback, callback) { + if (!Array.isArray(permissions) && typeof permissions !== 'string') { + throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const permissionsArray = Array.isArray(permissions) + ? permissions + : [permissions]; + const req = Object.assign({ + permissions: permissionsArray, + }, options); + this.request_({ + uri: '/iam/testPermissions', + qs: req, + useQuerystring: true, + }, (err, resp) => { + if (err) { + cb(err, null, resp); + return; + } + const availablePermissions = Array.isArray(resp.permissions) + ? resp.permissions + : []; + const permissionsHash = permissionsArray.reduce((acc, permission) => { + acc[permission] = availablePermissions.indexOf(permission) > -1; + return acc; + }, {}); + cb(null, permissionsHash, resp); + }); + } +} +exports.Iam = Iam; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Iam); + + +/***/ }), + +/***/ 7577: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = exports.Storage = exports.IdempotencyStrategy = exports.ApiError = void 0; +/** + * The `@google-cloud/storage` package has a single named export which is the + * {@link Storage} (ES6) class, which should be instantiated with `new`. + * + * See {@link Storage} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Storage} @google-cloud/storage + * @alias nodejs-storage + * + * @example + * Install the client library with npm: + * ``` + * npm install --save @google-cloud/storage + * ``` + * + * @example + * Import the client library + * ``` + * const {Storage} = require('@google-cloud/storage'); + * ``` + * + * @example + * Create a client that uses Application + * Default Credentials (ADC): + * ``` + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit + * credentials: + * ``` + * const storage = new Storage({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example include:samples/quickstart.js + * region_tag:storage_quickstart + * Full quickstart example: + */ +var index_js_1 = __nccwpck_require__(4052); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return index_js_1.ApiError; } })); +var storage_js_1 = __nccwpck_require__(6007); +Object.defineProperty(exports, "IdempotencyStrategy", ({ enumerable: true, get: function () { return storage_js_1.IdempotencyStrategy; } })); +Object.defineProperty(exports, "Storage", ({ enumerable: true, get: function () { return storage_js_1.Storage; } })); +var bucket_js_1 = __nccwpck_require__(3973); +Object.defineProperty(exports, "Bucket", ({ enumerable: true, get: function () { return bucket_js_1.Bucket; } })); +__exportStar(__nccwpck_require__(5810), exports); +var channel_js_1 = __nccwpck_require__(2665); +Object.defineProperty(exports, "Channel", ({ enumerable: true, get: function () { return channel_js_1.Channel; } })); +var file_js_1 = __nccwpck_require__(4713); +Object.defineProperty(exports, "File", ({ enumerable: true, get: function () { return file_js_1.File; } })); +__exportStar(__nccwpck_require__(725), exports); +var hmacKey_js_1 = __nccwpck_require__(4654); +Object.defineProperty(exports, "HmacKey", ({ enumerable: true, get: function () { return hmacKey_js_1.HmacKey; } })); +var iam_js_1 = __nccwpck_require__(352); +Object.defineProperty(exports, "Iam", ({ enumerable: true, get: function () { return iam_js_1.Iam; } })); +var notification_js_1 = __nccwpck_require__(1178); +Object.defineProperty(exports, "Notification", ({ enumerable: true, get: function () { return notification_js_1.Notification; } })); +__exportStar(__nccwpck_require__(5594), exports); + + +/***/ }), + +/***/ 4052: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; +var service_js_1 = __nccwpck_require__(7370); +Object.defineProperty(exports, "Service", ({ enumerable: true, get: function () { return service_js_1.Service; } })); +var service_object_js_1 = __nccwpck_require__(3409); +Object.defineProperty(exports, "ServiceObject", ({ enumerable: true, get: function () { return service_object_js_1.ServiceObject; } })); +var util_js_1 = __nccwpck_require__(8064); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return util_js_1.ApiError; } })); +Object.defineProperty(exports, "util", ({ enumerable: true, get: function () { return util_js_1.util; } })); + + +/***/ }), + +/***/ 3409: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ServiceObject = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const promisify_1 = __nccwpck_require__(9203); +const events_1 = __nccwpck_require__(2361); +const util_js_1 = __nccwpck_require__(8064); +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +class ServiceObject extends events_1.EventEmitter { + /* + * @constructor + * @alias module:common/service-object + * + * @private + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string} config.createMethod - The method which creates this object. + * @param {string=} config.id - The identifier of the object. For example, the + * name of a Storage bucket or Pub/Sub topic. + * @param {object=} config.methods - A map of each method name that should be inherited. + * @param {object} config.methods[].reqOpts - Default request options for this + * particular method. A common use case is when `setMetadata` requires a + * `PUT` method to override the default `PATCH`. + * @param {object} config.parent - The parent service instance. For example, an + * instance of Storage if the object is Bucket. + */ + constructor(config) { + super(); + this.metadata = {}; + this.baseUrl = config.baseUrl; + this.parent = config.parent; // Parent class. + this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). + this.createMethod = config.createMethod; + this.methods = config.methods || {}; + this.interceptors = []; + this.projectId = config.projectId; + if (config.methods) { + // This filters the ServiceObject instance (e.g. a "File") to only have + // the configured methods. We make a couple of exceptions for core- + // functionality ("request()" and "getRequestInterceptors()") + Object.getOwnPropertyNames(ServiceObject.prototype) + .filter(methodName => { + return ( + // All ServiceObjects need `request` and `getRequestInterceptors`. + // clang-format off + !/^request/.test(methodName) && + !/^getRequestInterceptors/.test(methodName) && + // clang-format on + // The ServiceObject didn't redefine the method. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] === + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ServiceObject.prototype[methodName] && + // This method isn't wanted. + !config.methods[methodName]); + }) + .forEach(methodName => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] = undefined; + }); + } + } + create(optionsOrCallback, callback) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const args = [this.id]; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + if (typeof optionsOrCallback === 'object') { + args.push(optionsOrCallback); + } + // Wrap the callback to return *this* instance of the object, not the + // newly-created one. + // tslint: disable-next-line no-any + function onCreate(...args) { + const [err, instance] = args; + if (!err) { + self.metadata = instance.metadata; + if (self.id && instance.metadata) { + self.id = instance.metadata.id; + } + args[1] = self; // replace the created `instance` with this one. + } + callback(...args); + } + args.push(onCreate); + // eslint-disable-next-line prefer-spread + this.createMethod.apply(null, args); + } + delete(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const ignoreNotFound = options.ignoreNotFound; + delete options.ignoreNotFound; + const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; + const reqOpts = { + method: 'DELETE', + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + if (err) { + if (err.code === 404 && ignoreNotFound) { + err = null; + } + } + callback(err, res); + }); + } + exists(optionsOrCallback, cb) { + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + this.get(options, err => { + if (err) { + if (err.code === 404) { + callback(null, false); + } + else { + callback(err); + } + return; + } + callback(null, true); + }); + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const [opts, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const options = Object.assign({}, opts); + const autoCreate = options.autoCreate && typeof this.create === 'function'; + delete options.autoCreate; + function onCreate(err, instance, apiResponse) { + if (err) { + if (err.code === 409) { + self.get(options, callback); + return; + } + callback(err, null, apiResponse); + return; + } + callback(null, instance, apiResponse); + } + this.getMetadata(options, (err, metadata) => { + if (err) { + if (err.code === 404 && autoCreate) { + const args = []; + if (Object.keys(options).length > 0) { + args.push(options); + } + args.push(onCreate); + self.create(...args); + return; + } + callback(err, null, metadata); + return; + } + callback(null, self, metadata); + }); + } + getMetadata(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.getMetadata === 'object' && + this.methods.getMetadata) || + {}; + const reqOpts = { + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + const localInterceptors = this.interceptors + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + return this.parent.getRequestInterceptors().concat(localInterceptors); + } + setMetadata(metadata, optionsOrCallback, cb) { + var _a, _b; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.setMetadata === 'object' && + this.methods.setMetadata) || + {}; + const reqOpts = { + method: 'PATCH', + uri: '', + ...methodConfig.reqOpts, + json: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.json, + ...metadata, + }, + qs: { + ...(_b = methodConfig.reqOpts) === null || _b === void 0 ? void 0 : _b.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts }; + if (this.projectId) { + reqOpts.projectId = this.projectId; + } + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .filter(x => x.trim()) // Limit to non-empty strings. + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/'); + const childInterceptors = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + const localInterceptors = [].slice.call(this.interceptors); + reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); + if (reqOpts.shouldReturnStream) { + return this.parent.requestStream(reqOpts); + } + this.parent.request(reqOpts, callback); + } + request(reqOpts, callback) { + this.request_(reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return this.request_(opts); + } +} +exports.ServiceObject = ServiceObject; +(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] }); + + +/***/ }), + +/***/ 7370: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; +const uuid = __importStar(__nccwpck_require__(5840)); +const util_js_1 = __nccwpck_require__(8064); +const util_js_2 = __nccwpck_require__(9258); +exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; +class Service { + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config, options = {}) { + this.baseUrl = config.baseUrl; + this.apiEndpoint = config.apiEndpoint; + this.timeout = options.timeout; + this.globalInterceptors = Array.isArray(options.interceptors_) + ? options.interceptors_ + : []; + this.interceptors = []; + this.packageJson = config.packageJson; + this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN; + this.projectIdRequired = config.projectIdRequired !== false; + this.providedUserAgent = options.userAgent; + const reqCfg = { + ...config, + projectIdRequired: this.projectIdRequired, + projectId: this.projectId, + authClient: options.authClient || config.authClient, + credentials: options.credentials, + keyFile: options.keyFilename, + email: options.email, + token: options.token, + }; + this.makeAuthenticatedRequest = + util_js_1.util.makeAuthenticatedRequestFactory(reqCfg); + this.authClient = this.makeAuthenticatedRequest.authClient; + this.getCredentials = this.makeAuthenticatedRequest.getCredentials; + const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; + if (isCloudFunctionEnv) { + this.interceptors.push({ + request(reqOpts) { + reqOpts.forever = false; + return reqOpts; + }, + }); + } + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + return [].slice + .call(this.globalInterceptors) + .concat(this.interceptors) + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + } + getProjectId(callback) { + if (!callback) { + return this.getProjectIdAsync(); + } + this.getProjectIdAsync().then(p => callback(null, p), callback); + } + async getProjectIdAsync() { + const projectId = await this.authClient.getProjectId(); + if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) { + this.projectId = projectId; + } + return this.projectId; + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts, timeout: this.timeout }; + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl]; + if (this.projectIdRequired) { + if (reqOpts.projectId) { + uriComponents.push('projects'); + uriComponents.push(reqOpts.projectId); + } + else { + uriComponents.push('projects'); + uriComponents.push(this.projectId); + } + } + uriComponents.push(reqOpts.uri); + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/') + // Some URIs have colon separators. + // Bad: https://.../projects/:list + // Good: https://.../projects:list + .replace(/\/:/g, ':'); + const requestInterceptors = this.getRequestInterceptors(); + const interceptorArray = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + interceptorArray.forEach(interceptor => { + if (typeof interceptor.request === 'function') { + requestInterceptors.push(interceptor.request); + } + }); + requestInterceptors.forEach(requestInterceptor => { + reqOpts = requestInterceptor(reqOpts); + }); + delete reqOpts.interceptors_; + const pkg = this.packageJson; + let userAgent = (0, util_js_2.getUserAgentString)(); + if (this.providedUserAgent) { + userAgent = `${this.providedUserAgent} ${userAgent}`; + } + reqOpts.headers = { + ...reqOpts.headers, + 'User-Agent': userAgent, + 'x-goog-api-client': `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${pkg.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (reqOpts[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts.headers['x-goog-api-client'] += ` gccl-gcs-cmd/${reqOpts[util_js_1.GCCL_GCS_CMD_KEY]}`; + } + if (reqOpts.shouldReturnStream) { + return this.makeAuthenticatedRequest(reqOpts); + } + else { + this.makeAuthenticatedRequest(reqOpts, callback); + } + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts, callback) { + Service.prototype.request_.call(this, reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return Service.prototype.request_.call(this, opts); + } +} +exports.Service = Service; + + +/***/ }), + +/***/ 8064: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = exports.GCCL_GCS_CMD_KEY = void 0; +/*! + * @module common/util + */ +const projectify_1 = __nccwpck_require__(3497); +const ent = __importStar(__nccwpck_require__(1151)); +const google_auth_library_1 = __nccwpck_require__(810); +const retry_request_1 = __importDefault(__nccwpck_require__(3515)); +const stream_1 = __nccwpck_require__(2781); +const teeny_request_1 = __nccwpck_require__(6886); +const uuid = __importStar(__nccwpck_require__(5840)); +const service_js_1 = __nccwpck_require__(7370); +const util_js_1 = __nccwpck_require__(9258); +const duplexify_1 = __importDefault(__nccwpck_require__(6599)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * A unique symbol for providing a `gccl-gcs-cmd` value + * for the `X-Goog-API-Client` header. + * + * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V` + **/ +exports.GCCL_GCS_CMD_KEY = Symbol.for('GCCL_GCS_CMD'); +const requestDefaults = { + timeout: 60000, + gzip: true, + forever: true, + pool: { + maxSockets: Infinity, + }, +}; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + * @private + */ +const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + * @private + */ +const MAX_RETRY_DEFAULT = 3; +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +class ApiError extends Error { + constructor(errorBodyOrMessage) { + super(); + if (typeof errorBodyOrMessage !== 'object') { + this.message = errorBodyOrMessage || ''; + return; + } + const errorBody = errorBodyOrMessage; + this.code = errorBody.code; + this.errors = errorBody.errors; + this.response = errorBody.response; + try { + this.errors = JSON.parse(this.response.body).error.errors; + } + catch (e) { + this.errors = errorBody.errors; + } + this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); + Error.captureStackTrace(this); + } + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err, errors) { + const messages = new Set(); + if (err.message) { + messages.add(err.message); + } + if (errors && errors.length) { + errors.forEach(({ message }) => messages.add(message)); + } + else if (err.response && err.response.body) { + messages.add(ent.decode(err.response.body.toString())); + } + else if (!err.message) { + messages.add('A failure occurred during this request.'); + } + let messageArr = Array.from(messages); + if (messageArr.length > 1) { + messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); + messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); + messageArr.push('\n'); + } + return messageArr.join('\n'); + } +} +exports.ApiError = ApiError; +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +class PartialFailureError extends Error { + constructor(b) { + super(); + const errorObject = b; + this.errors = errorObject.errors; + this.name = 'PartialFailureError'; + this.response = errorObject.response; + this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); + } +} +exports.PartialFailureError = PartialFailureError; +class Util { + constructor() { + this.ApiError = ApiError; + this.PartialFailureError = PartialFailureError; + } + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop() { } + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err, resp, body, callback) { + callback = callback || util.noop; + const parsedResp = { + err: err || null, + ...(resp && util.parseHttpRespMessage(resp)), + ...(body && util.parseHttpRespBody(body)), + }; + // Assign the parsed body to resp.body, even if { json: false } was passed + // as a request option. + // We assume that nobody uses the previously unparsed value of resp.body. + if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { + parsedResp.resp.body = parsedResp.body; + } + if (parsedResp.err && resp) { + parsedResp.err.response = resp; + } + callback(parsedResp.err, parsedResp.body, parsedResp.resp); + } + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage) { + const parsedHttpRespMessage = { + resp: httpRespMessage, + }; + if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { + // Unknown error. Format according to ApiError standard. + parsedHttpRespMessage.err = new ApiError({ + errors: new Array(), + code: httpRespMessage.statusCode, + message: httpRespMessage.statusMessage, + response: httpRespMessage, + }); + } + return parsedHttpRespMessage; + } + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body) { + const parsedHttpRespBody = { + body, + }; + if (typeof body === 'string') { + try { + parsedHttpRespBody.body = JSON.parse(body); + } + catch (err) { + parsedHttpRespBody.body = body; + } + } + if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { + // Error from JSON API. + parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); + } + return parsedHttpRespBody; + } + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup, options, onComplete) { + var _a; + onComplete = onComplete || util.noop; + const writeStream = new ProgressStream(); + writeStream.on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(writeStream); + const defaultReqOpts = { + method: 'POST', + qs: { + uploadType: 'multipart', + }, + timeout: 0, + maxRetries: 0, + }; + const metadata = options.metadata || {}; + const reqOpts = { + ...defaultReqOpts, + ...options.request, + qs: { + ...defaultReqOpts.qs, + ...(_a = options.request) === null || _a === void 0 ? void 0 : _a.qs, + }, + multipart: [ + { + 'Content-Type': 'application/json', + body: JSON.stringify(metadata), + }, + { + 'Content-Type': metadata.contentType || 'application/octet-stream', + body: writeStream, + }, + ], + }; + options.makeAuthenticatedRequest(reqOpts, { + onAuthenticated(err, authenticatedReqOpts) { + if (err) { + dup.destroy(err); + return; + } + requestDefaults.headers = util._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const request = teeny_request_1.teenyRequest.defaults(requestDefaults); + request(authenticatedReqOpts, (err, resp, body) => { + util.handleResp(err, resp, body, (err, data) => { + if (err) { + dup.destroy(err); + return; + } + dup.emit('response', resp); + onComplete(data); + }); + }); + }, + }); + } + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err) { + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (err.errors) { + for (const e of err.errors) { + const reason = e.reason; + if (reason === 'rateLimitExceeded') { + return true; + } + if (reason === 'userRateLimitExceeded') { + return true; + } + if (reason && reason.includes('EAI_AGAIN')) { + return true; + } + } + } + } + return false; + } + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config) { + const googleAutoAuthConfig = { ...config }; + if (googleAutoAuthConfig.projectId === service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + delete googleAutoAuthConfig.projectId; + } + let authClient; + if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) { + // Use an existing `GoogleAuth` + authClient = googleAutoAuthConfig.authClient; + } + else { + // Pass an `AuthClient` to `GoogleAuth`, if available + const config = { + ...googleAutoAuthConfig, + authClient: googleAutoAuthConfig.authClient, + }; + authClient = new google_auth_library_1.GoogleAuth(config); + } + function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { + let stream; + let projectId; + const reqConfig = { ...config }; + let activeRequest_; + if (!optionsOrCallback) { + stream = (0, duplexify_1.default)(); + reqConfig.stream = stream; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; + async function setProjectId() { + projectId = await authClient.getProjectId(); + } + const onAuthenticated = async (err, authenticatedReqOpts) => { + const authLibraryError = err; + const autoAuthFailed = err && + typeof err.message === 'string' && + err.message.indexOf('Could not load the default credentials') > -1; + if (autoAuthFailed) { + // Even though authentication failed, the API might not actually + // care. + authenticatedReqOpts = reqOpts; + } + if (!err || autoAuthFailed) { + try { + // Try with existing `projectId` value + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + if (e instanceof projectify_1.MissingProjectIdError) { + // A `projectId` was required, but we don't have one. + try { + // Attempt to get the `projectId` + await setProjectId(); + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + // Re-use the "Could not load the default credentials error" if + // auto auth failed. + err = err || e; + } + } + else { + // Some other error unrelated to missing `projectId` + err = err || e; + } + } + } + if (err) { + if (stream) { + stream.destroy(err); + } + else { + const fn = options && options.onAuthenticated + ? options.onAuthenticated + : callback; + fn(err); + } + return; + } + if (options && options.onAuthenticated) { + options.onAuthenticated(null, authenticatedReqOpts); + } + else { + activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { + if (apiResponseError && + apiResponseError.code === 401 && + authLibraryError) { + // Re-use the "Could not load the default credentials error" if + // the API request failed due to missing credentials. + apiResponseError = authLibraryError; + } + callback(apiResponseError, ...params); + }); + } + }; + const prepareRequest = async () => { + try { + const getProjectId = async () => { + if (config.projectId && + config.projectId !== service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + // The user provided a project ID. We don't need to check with the + // auth client, it could be incorrect. + return config.projectId; + } + if (config.projectIdRequired === false) { + // A projectId is not required. Return the default. + return service_js_1.DEFAULT_PROJECT_ID_TOKEN; + } + return setProjectId(); + }; + const authorizeRequest = async () => { + if (reqConfig.customEndpoint && + !reqConfig.useAuthWithCustomEndpoint) { + // Using a custom API override. Do not use `google-auth-library` for + // authentication. (ex: connecting to a local Datastore server) + return reqOpts; + } + else { + return authClient.authorizeRequest(reqOpts); + } + }; + const [_projectId, authorizedReqOpts] = await Promise.all([ + getProjectId(), + authorizeRequest(), + ]); + if (_projectId) { + projectId = _projectId; + } + return onAuthenticated(null, authorizedReqOpts); + } + catch (e) { + return onAuthenticated(e); + } + }; + prepareRequest(); + if (stream) { + return stream; + } + return { + abort() { + setImmediate(() => { + if (activeRequest_) { + activeRequest_.abort(); + activeRequest_ = null; + } + }); + }, + }; + } + const mar = makeAuthenticatedRequest; + mar.getCredentials = authClient.getCredentials.bind(authClient); + mar.authClient = authClient; + return mar; + } + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts, config, callback) { + var _a, _b, _c, _d, _e; + let autoRetryValue = AUTO_RETRY_DEFAULT; + if (config.autoRetry !== undefined) { + autoRetryValue = config.autoRetry; + } + else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { + autoRetryValue = config.retryOptions.autoRetry; + } + let maxRetryValue = MAX_RETRY_DEFAULT; + if (config.maxRetries !== undefined) { + maxRetryValue = config.maxRetries; + } + else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { + maxRetryValue = config.retryOptions.maxRetries; + } + requestDefaults.headers = this._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const options = { + request: teeny_request_1.teenyRequest.defaults(requestDefaults), + retries: autoRetryValue !== false ? maxRetryValue : 0, + noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, + shouldRetryFn(httpRespMessage) { + var _a, _b; + const err = util.parseHttpRespMessage(httpRespMessage).err; + if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { + return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); + } + return err && util.shouldRetryRequest(err); + }, + maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, + retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, + totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, + }; + if (typeof reqOpts.maxRetries === 'number') { + options.retries = reqOpts.maxRetries; + options.noResponseRetries = reqOpts.maxRetries; + } + if (!config.stream) { + return (0, retry_request_1.default)(reqOpts, options, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (err, response, body) => { + util.handleResp(err, response, body, callback); + }); + } + const dup = config.stream; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let requestStream; + const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; + if (isGetRequest) { + requestStream = (0, retry_request_1.default)(reqOpts, options); + dup.setReadable(requestStream); + } + else { + // Streaming writable HTTP requests cannot be retried. + requestStream = options.request(reqOpts); + dup.setWritable(requestStream); + } + // Replay the Request events back to the stream. + requestStream + .on('error', dup.destroy.bind(dup)) + .on('response', dup.emit.bind(dup, 'response')) + .on('complete', dup.emit.bind(dup, 'complete')); + dup.abort = requestStream.abort; + return dup; + } + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts, projectId) { + delete reqOpts.autoPaginate; + delete reqOpts.autoPaginateVal; + delete reqOpts.objectMode; + if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { + delete reqOpts.qs.autoPaginate; + delete reqOpts.qs.autoPaginateVal; + reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId); + } + if (Array.isArray(reqOpts.multipart)) { + reqOpts.multipart = reqOpts.multipart.map(part => { + return (0, projectify_1.replaceProjectIdToken)(part, projectId); + }); + } + if (reqOpts.json !== null && typeof reqOpts.json === 'object') { + delete reqOpts.json.autoPaginate; + delete reqOpts.json.autoPaginateVal; + reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId); + } + reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId); + return reqOpts; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + isCustomType(unknown, module) { + function getConstructorName(obj) { + return obj.constructor && obj.constructor.name.toLowerCase(); + } + const moduleNameParts = module.split('/'); + const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); + const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); + if (subModuleName && getConstructorName(unknown) !== subModuleName) { + return false; + } + let walkingModule = unknown; + // eslint-disable-next-line no-constant-condition + while (true) { + if (getConstructorName(walkingModule) === parentModuleName) { + return true; + } + walkingModule = walkingModule.parent; + if (!walkingModule) { + return false; + } + } + } + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback(optionsOrCallback, cb) { + return typeof optionsOrCallback === 'function' + ? [{}, optionsOrCallback] + : [optionsOrCallback, cb]; + } + _getDefaultHeaders(gcclGcsCmd) { + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (gcclGcsCmd) { + headers['x-goog-api-client'] += ` gccl-gcs-cmd/${gcclGcsCmd}`; + } + return headers; + } +} +exports.Util = Util; +/** + * Basic Passthrough Stream that records the number of bytes read + * every time the cursor is moved. + */ +class ProgressStream extends stream_1.Transform { + constructor() { + super(...arguments); + this.bytesRead = 0; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + _transform(chunk, encoding, callback) { + this.bytesRead += chunk.length; + this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); + this.push(chunk); + callback(); + } +} +const util = new Util(); +exports.util = util; + + +/***/ }), + +/***/ 1178: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = void 0; +const index_js_1 = __nccwpck_require__(4052); +const promisify_1 = __nccwpck_require__(9203); +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +class Notification extends index_js_1.ServiceObject { + constructor(bucket, id) { + const requestQueryObject = {}; + const methods = { + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * @method Notification#create + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationRequest} [options] Metadata to set for + * the notification. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.create(function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.create().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: true, + /** + * @typedef {array} DeleteNotificationResponse + * @property {object} 0 The full API response. + */ + /** + * Permanently deletes a notification subscription. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteNotificationCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/deleteNotification.js + * region_tag:storage_delete_bucket_notification + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get a notification and its metadata if it exists. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * See {@link Bucket#createNotification} for create options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false`. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.get(function(err, notification, apiResponse) { + * // `notification.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.get().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get the notification's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/getMetadataNotifications.js + * region_tag:storage_print_pubsub_bucket_notification + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} NotificationExistsResponse + * @property {boolean} 0 Whether the notification exists or not. + */ + /** + * @callback NotificationExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the notification exists or not. + */ + /** + * Check if the notification exists. + * + * @method Notification#exists + * @param {NotificationExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: true, + }; + super({ + parent: bucket, + baseUrl: '/notificationConfigs', + id: id.toString(), + createMethod: bucket.createNotification.bind(bucket), + methods, + }); + } +} +exports.Notification = Notification; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Notification); + + +/***/ }), + +/***/ 2851: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkUploadStatus = exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0; +const abort_controller_1 = __importDefault(__nccwpck_require__(1659)); +const crypto_1 = __nccwpck_require__(6113); +const gaxios = __importStar(__nccwpck_require__(9555)); +const google_auth_library_1 = __nccwpck_require__(810); +const stream_1 = __nccwpck_require__(2781); +const async_retry_1 = __importDefault(__nccwpck_require__(3415)); +const uuid = __importStar(__nccwpck_require__(5840)); +const util_js_1 = __nccwpck_require__(9258); +const util_js_2 = __nccwpck_require__(8064); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const NOT_FOUND_STATUS_CODE = 404; +const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; +const DEFAULT_API_ENDPOINT_REGEX = /.*\.googleapis\.com/; +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +class Upload extends stream_1.Writable { + constructor(cfg) { + var _a; + super(cfg); + _Upload_instances.add(this); + this.numBytesWritten = 0; + this.numRetries = 0; + this.currentInvocationId = { + checkUploadStatus: uuid.v4(), + chunk: uuid.v4(), + uri: uuid.v4(), + }; + /** + * A cache of buffers written to this instance, ready for consuming + */ + this.writeBuffers = []; + this.numChunksReadInRequest = 0; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; + this.upstreamEnded = false; + _Upload_gcclGcsCmd.set(this, void 0); + cfg = cfg || {}; + if (!cfg.bucket || !cfg.file) { + throw new Error('A bucket and file name are required'); + } + if (cfg.offset && !cfg.uri) { + throw new RangeError('Cannot provide an `offset` without providing a `uri`'); + } + if (cfg.isPartialUpload && !cfg.chunkSize) { + throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`'); + } + cfg.authConfig = cfg.authConfig || {}; + cfg.authConfig.scopes = [ + 'https://www.googleapis.com/auth/devstorage.full_control', + ]; + this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig); + this.apiEndpoint = 'https://storage.googleapis.com'; + if (cfg.apiEndpoint) { + this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); + if (!DEFAULT_API_ENDPOINT_REGEX.test(cfg.apiEndpoint)) { + this.authClient = gaxios; + } + } + this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; + this.bucket = cfg.bucket; + const cacheKeyElements = [cfg.bucket, cfg.file]; + if (typeof cfg.generation === 'number') { + cacheKeyElements.push(`${cfg.generation}`); + } + this.cacheKey = cacheKeyElements.join('/'); + this.customRequestOptions = cfg.customRequestOptions || {}; + this.file = cfg.file; + this.generation = cfg.generation; + this.kmsKeyName = cfg.kmsKeyName; + this.metadata = cfg.metadata || {}; + this.offset = cfg.offset; + this.origin = cfg.origin; + this.params = cfg.params || {}; + this.userProject = cfg.userProject; + this.chunkSize = cfg.chunkSize; + this.retryOptions = cfg.retryOptions; + this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false; + if (cfg.key) { + const base64Key = Buffer.from(cfg.key).toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + this.predefinedAcl = cfg.predefinedAcl; + if (cfg.private) + this.predefinedAcl = 'private'; + if (cfg.public) + this.predefinedAcl = 'publicRead'; + const autoRetry = cfg.retryOptions.autoRetry; + this.uriProvidedManually = !!cfg.uri; + this.uri = cfg.uri; + if (this.offset) { + // we're resuming an incomplete upload + this.numBytesWritten = this.offset; + } + this.numRetries = 0; // counter for number of retries currently executed + if (!autoRetry) { + cfg.retryOptions.maxRetries = 0; + } + this.timeOfFirstRequest = Date.now(); + const contentLength = cfg.metadata + ? Number(cfg.metadata.contentLength) + : NaN; + this.contentLength = isNaN(contentLength) ? '*' : contentLength; + __classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_2.GCCL_GCS_CMD_KEY], "f"); + this.once('writing', () => { + if (this.uri) { + this.continueUploading(); + } + else { + this.createURI(err => { + if (err) { + return this.destroy(err); + } + this.startUploading(); + return; + }); + } + }); + } + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent = () => { }) { + this.upstreamEnded = true; + this.once('uploadFinished', fireFinishEvent); + process.nextTick(() => { + this.emit('upstreamFinished'); + // it's possible `_write` may not be called - namely for empty object uploads + this.emit('writing'); + }); + } + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk, encoding, readCallback = () => { }) { + // Backwards-compatible event + this.emit('writing'); + this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk); + this.once('readFromChunkBuffer', readCallback); + process.nextTick(() => this.emit('wroteToChunkBuffer')); + } + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + prependLocalBufferToUpstream(keepLastBytes) { + // Typically, the upstream write buffers should be smaller than the local + // cache, so we can save time by setting the local cache as the new + // upstream write buffer array and appending the old array to it + let initialBuffers = []; + if (keepLastBytes) { + // we only want the last X bytes + let bytesKept = 0; + while (keepLastBytes > bytesKept) { + // load backwards because we want the last X bytes + // note: `localWriteCacheByteLength` is reset below + let buf = this.localWriteCache.pop(); + if (!buf) + break; + bytesKept += buf.byteLength; + if (bytesKept > keepLastBytes) { + // we have gone over the amount desired, let's keep the last X bytes + // of this buffer + const diff = bytesKept - keepLastBytes; + buf = buf.subarray(diff); + bytesKept -= diff; + } + initialBuffers.unshift(buf); + } + } + else { + // we're keeping all of the local cache, simply use it as the initial buffer + initialBuffers = this.localWriteCache; + } + // Append the old upstream to the new + const append = this.writeBuffers; + this.writeBuffers = initialBuffers; + for (const buf of append) { + this.writeBuffers.push(buf); + } + // reset last buffers sent + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + */ + *pullFromChunkBuffer(limit) { + while (limit) { + const buf = this.writeBuffers.shift(); + if (!buf) + break; + let bufToYield = buf; + if (buf.byteLength > limit) { + bufToYield = buf.subarray(0, limit); + this.writeBuffers.unshift(buf.subarray(limit)); + limit = 0; + } + else { + limit -= buf.byteLength; + } + yield bufToYield; + // Notify upstream we've read from the buffer and we're able to consume + // more. It can also potentially send more data down as we're currently + // iterating. + this.emit('readFromChunkBuffer'); + } + } + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + async waitForNextChunk() { + const willBeMoreChunks = await new Promise(resolve => { + // There's data available - it should be digested + if (this.writeBuffers.length) { + return resolve(true); + } + // The upstream writable ended, we shouldn't expect any more data. + if (this.upstreamEnded) { + return resolve(false); + } + // Nothing immediate seems to be determined. We need to prepare some + // listeners to determine next steps... + const wroteToChunkBufferCallback = () => { + removeListeners(); + return resolve(true); + }; + const upstreamFinishedCallback = () => { + removeListeners(); + // this should be the last chunk, if there's anything there + if (this.writeBuffers.length) + return resolve(true); + return resolve(false); + }; + // Remove listeners when we're ready to callback. + const removeListeners = () => { + this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); + this.removeListener('upstreamFinished', upstreamFinishedCallback); + }; + // If there's data recently written it should be digested + this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); + // If the upstream finishes let's see if there's anything to grab + this.once('upstreamFinished', upstreamFinishedCallback); + }); + return willBeMoreChunks; + } + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + async *upstreamIterator(limit = Infinity) { + // read from upstream chunk buffer + while (limit && (await this.waitForNextChunk())) { + // read until end or limit has been reached + for (const chunk of this.pullFromChunkBuffer(limit)) { + limit -= chunk.byteLength; + yield chunk; + } + } + } + createURI(callback) { + if (!callback) { + return this.createURIAsync(); + } + this.createURIAsync().then(r => callback(null, r), callback); + } + async createURIAsync() { + const metadata = { ...this.metadata }; + const headers = {}; + // Delete content length and content type from metadata if they exist. + // These are headers and should not be sent as part of the metadata. + if (metadata.contentLength) { + headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); + delete metadata.contentLength; + } + if (metadata.contentType) { + headers['X-Upload-Content-Type'] = metadata.contentType; + delete metadata.contentType; + } + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + // Check if headers already exist before creating new ones + const reqOpts = { + method: 'POST', + url: [this.baseURI, this.bucket, 'o'].join('/'), + params: Object.assign({ + name: this.file, + uploadType: 'resumable', + }, this.params), + data: metadata, + headers: { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + ...headers, + }, + }; + if (metadata.contentLength) { + reqOpts.headers['X-Upload-Content-Length'] = + metadata.contentLength.toString(); + } + if (metadata.contentType) { + reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; + } + if (typeof this.generation !== 'undefined') { + reqOpts.params.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName) { + reqOpts.params.kmsKeyName = this.kmsKeyName; + } + if (this.predefinedAcl) { + reqOpts.params.predefinedAcl = this.predefinedAcl; + } + if (this.origin) { + reqOpts.headers.Origin = this.origin; + } + const uri = await (0, async_retry_1.default)(async (bail) => { + var _a, _b, _c; + try { + const res = await this.makeRequest(reqOpts); + // We have successfully got a URI we can now create a new invocation id + this.currentInvocationId.uri = uuid.v4(); + return res.headers.location; + } + catch (err) { + const e = err; + const apiError = { + code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, + name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, + message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, + errors: [ + { + reason: e.code, + }, + ], + }; + if (this.retryOptions.maxRetries > 0 && + this.retryOptions.retryableErrorFn(apiError)) { + throw e; + } + else { + return bail(e); + } + } + }, { + retries: this.retryOptions.maxRetries, + factor: this.retryOptions.retryDelayMultiplier, + maxTimeout: this.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + this.uri = uri; + this.offset = 0; + // emit the newly generated URI for future reuse, if necessary. + this.emit('uri', uri); + return uri; + } + async continueUploading() { + var _a; + (_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset()); + return this.startUploading(); + } + async startUploading() { + const multiChunkMode = !!this.chunkSize; + let responseReceived = false; + this.numChunksReadInRequest = 0; + if (!this.offset) { + this.offset = 0; + } + // Check if the offset (server) is too far behind the current stream + if (this.offset < this.numBytesWritten) { + const delta = this.numBytesWritten - this.offset; + const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; + this.emit('error', new RangeError(message)); + return; + } + // Check if we should 'fast-forward' to the relevant data to upload + if (this.numBytesWritten < this.offset) { + // 'fast-forward' to the byte where we need to upload. + // only push data from the byte after the one we left off on + const fastForwardBytes = this.offset - this.numBytesWritten; + for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { + _chunk; // discard the data up until the point we want + } + this.numBytesWritten = this.offset; + } + let expectedUploadSize = undefined; + // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available + if (typeof this.contentLength === 'number') { + expectedUploadSize = this.contentLength - this.numBytesWritten; + } + // `expectedUploadSize` should be no more than the `chunkSize`. + // It's possible this is the last chunk request for a multiple + // chunk upload, thus smaller than the chunk size. + if (this.chunkSize) { + expectedUploadSize = expectedUploadSize + ? Math.min(this.chunkSize, expectedUploadSize) + : this.chunkSize; + } + // A queue for the upstream data + const upstreamQueue = this.upstreamIterator(expectedUploadSize); + // The primary read stream for this request. This stream retrieves no more + // than the exact requested amount from upstream. + const requestStream = new stream_1.Readable({ + read: async () => { + // Don't attempt to retrieve data upstream if we already have a response + if (responseReceived) + requestStream.push(null); + const result = await upstreamQueue.next(); + if (result.value) { + this.numChunksReadInRequest++; + if (multiChunkMode) { + // save ever buffer used in the request in multi-chunk mode + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + else { + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + this.numBytesWritten += result.value.byteLength; + this.emit('progress', { + bytesWritten: this.numBytesWritten, + contentLength: this.contentLength, + }); + requestStream.push(result.value); + } + if (result.done) { + requestStream.push(null); + } + }, + }); + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }; + // If using multiple chunk upload, set appropriate header + if (multiChunkMode) { + // We need to know how much data is available upstream to set the `Content-Range` header. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + for await (const chunk of this.upstreamIterator(expectedUploadSize)) { + // This will conveniently track and keep the size of the buffers. + // We will reach either the expected upload size or the remainder of the stream. + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk); + } + // This is the sum from the `#addLocalBufferCache` calls + const bytesToUpload = this.localWriteCacheByteLength; + // Important: we want to know if the upstream has ended and the queue is empty before + // unshifting data back into the queue. This way we will know if this is the last request or not. + const isLastChunkOfUpload = !(await this.waitForNextChunk()); + // Important: put the data back in the queue for the actual upload + this.prependLocalBufferToUpstream(); + let totalObjectSize = this.contentLength; + if (typeof this.contentLength !== 'number' && + isLastChunkOfUpload && + !this.isPartialUpload) { + // Let's let the server know this is the last chunk of the object since we didn't set it before. + totalObjectSize = bytesToUpload + this.numBytesWritten; + } + // `- 1` as the ending byte is inclusive in the request. + const endingByte = bytesToUpload + this.numBytesWritten - 1; + // `Content-Length` for multiple chunk uploads is the size of the chunk, + // not the overall object + headers['Content-Length'] = bytesToUpload; + headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; + } + else { + headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; + } + const reqOpts = { + method: 'PUT', + url: this.uri, + headers, + body: requestStream, + }; + try { + const resp = await this.makeRequestStream(reqOpts); + if (resp) { + responseReceived = true; + await this.responseHandler(resp); + } + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + // Process the API response to look for errors that came in + // the response body. + async responseHandler(resp) { + if (resp.data.error) { + this.destroy(resp.data.error); + return; + } + // At this point we can safely create a new id for the chunk + this.currentInvocationId.chunk = uuid.v4(); + const moreDataToUpload = await this.waitForNextChunk(); + const shouldContinueWithNextMultiChunkRequest = this.chunkSize && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + resp.headers.range && + moreDataToUpload; + /** + * This is true when we're expecting to upload more data in a future request, + * yet the upstream for the upload session has been exhausted. + */ + const shouldContinueUploadInAnotherRequest = this.isPartialUpload && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + !moreDataToUpload; + if (shouldContinueWithNextMultiChunkRequest) { + // Use the upper value in this header to determine where to start the next chunk. + // We should not assume that the server received all bytes sent in the request. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const range = resp.headers.range; + this.offset = Number(range.split('-')[1]) + 1; + // We should not assume that the server received all bytes sent in the request. + // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const missingBytes = this.numBytesWritten - this.offset; + if (missingBytes) { + // As multi-chunk uploads send one chunk per request and pulls one + // chunk into the pipeline, prepending the missing bytes back should + // be fine for the next request. + this.prependLocalBufferToUpstream(missingBytes); + this.numBytesWritten -= missingBytes; + } + else { + // No bytes missing - no need to keep the local cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + // continue uploading next chunk + this.continueUploading(); + } + else if (!this.isSuccessfulResponse(resp.status) && + !shouldContinueUploadInAnotherRequest) { + const err = new Error('Upload failed'); + err.code = resp.status; + err.name = 'Upload failed'; + if (resp === null || resp === void 0 ? void 0 : resp.data) { + err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; + } + this.destroy(err); + } + else { + // no need to keep the cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + if (resp && resp.data) { + resp.data.size = Number(resp.data.size); + } + this.emit('metadata', resp.data); + // Allow the object (Upload) to continue naturally so the user's + // "finish" event fires. + this.emit('uploadFinished'); + } + } + /** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ + async checkUploadStatus(config = {}) { + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const opts = { + method: 'PUT', + url: this.uri, + headers: { + 'Content-Length': 0, + 'Content-Range': 'bytes */*', + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }, + }; + try { + const resp = await this.makeRequest(opts); + // Successfully got the offset we can now create a new offset invocation id + this.currentInvocationId.checkUploadStatus = uuid.v4(); + return resp; + } + catch (e) { + if (config.retry === false || + !(e instanceof Error) || + !this.retryOptions.retryableErrorFn(e)) { + throw e; + } + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + throw e; + } + await new Promise(res => setTimeout(res, retryDelay)); + return this.checkUploadStatus(config); + } + } + async getAndSetOffset() { + try { + // we want to handle retries in this method. + const resp = await this.checkUploadStatus({ retry: false }); + if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { + if (typeof resp.headers.range === 'string') { + this.offset = Number(resp.headers.range.split('-')[1]) + 1; + return; + } + } + this.offset = 0; + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + async makeRequest(reqOpts) { + if (this.encryption) { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryption.hash.toString(); + } + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + // Let gaxios know we will handle a 308 error code ourselves. + reqOpts.validateStatus = (status) => { + return (this.isSuccessfulResponse(status) || + status === RESUMABLE_INCOMPLETE_STATUS_CODE); + }; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + async makeRequestStream(reqOpts) { + const controller = new abort_controller_1.default(); + const errorCallback = () => controller.abort(); + this.once('error', errorCallback); + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + reqOpts.signal = controller.signal; + reqOpts.validateStatus = () => true; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + const successfulRequest = this.onResponse(res); + this.removeListener('error', errorCallback); + return successfulRequest ? res : null; + } + /** + * @return {bool} is the request good? + */ + onResponse(resp) { + if (resp.status !== 200 && + this.retryOptions.retryableErrorFn({ + code: resp.status, + message: resp.statusText, + name: resp.statusText, + })) { + this.attemptDelayedRetry(resp); + return false; + } + this.emit('response', resp); + return true; + } + /** + * @param resp GaxiosResponse object from previous attempt + */ + attemptDelayedRetry(resp) { + if (this.numRetries < this.retryOptions.maxRetries) { + if (resp.status === NOT_FOUND_STATUS_CODE && + this.numChunksReadInRequest === 0) { + this.startUploading(); + } + else { + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`)); + return; + } + // Unshift the local cache back in case it's needed for the next request. + this.numBytesWritten -= this.localWriteCacheByteLength; + this.prependLocalBufferToUpstream(); + // We don't know how much data has been received by the server. + // `continueUploading` will recheck the offset via `getAndSetOffset`. + // If `offset` < `numberBytesReceived` then we will raise a RangeError + // as we've streamed too much data that has been missed - this should + // not be the case for multi-chunk uploads as `lastChunkSent` is the + // body of the entire request. + this.offset = undefined; + setTimeout(this.continueUploading.bind(this), retryDelay); + } + this.numRetries++; + } + else { + this.destroy(new Error('Retry limit exceeded - ' + resp.data)); + } + } + /** + * The amount of time to wait before retrying the request, in milliseconds. + * If negative, do not retry. + * + * @returns the amount of time to wait, in milliseconds. + */ + getRetryDelay() { + const randomMs = Math.round(Math.random() * 1000); + const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * + 1000 + + randomMs; + const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - + (Date.now() - this.timeOfFirstRequest); + const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; + return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); + } + /* + * Prepare user-defined API endpoint for compatibility with our API. + */ + sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status) { + return status >= 200 && status < 300; + } +} +exports.Upload = Upload; +_Upload_gcclGcsCmd = new WeakMap(), _Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() { + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; +}, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) { + this.localWriteCache.push(buf); + this.localWriteCacheByteLength += buf.byteLength; +}; +function upload(cfg) { + return new Upload(cfg); +} +exports.upload = upload; +function createURI(cfg, callback) { + const up = new Upload(cfg); + if (!callback) { + return up.createURI(); + } + up.createURI().then(r => callback(null, r), callback); +} +exports.createURI = createURI; +/** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ +function checkUploadStatus(cfg) { + const up = new Upload(cfg); + return up.checkUploadStatus(); +} +exports.checkUploadStatus = checkUploadStatus; + + +/***/ }), + +/***/ 9019: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; +const crypto = __importStar(__nccwpck_require__(6113)); +const url = __importStar(__nccwpck_require__(7310)); +const storage_js_1 = __nccwpck_require__(6007); +const util_js_1 = __nccwpck_require__(9258); +var SignerExceptionMessages; +(function (SignerExceptionMessages) { + SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; + SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; + SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; +})(SignerExceptionMessages || (exports.SignerExceptionMessages = SignerExceptionMessages = {})); +/* + * Default signing version for getSignedUrl is 'v2'. + */ +const DEFAULT_SIGNING_VERSION = 'v2'; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +/** + * @const {string} + * @private + */ +exports.PATH_STYLED_HOST = 'https://storage.googleapis.com'; +class URLSigner { + constructor(authClient, bucket, file) { + this.bucket = bucket; + this.file = file; + this.authClient = authClient; + } + getSignedUrl(cfg) { + const expiresInSeconds = this.parseExpires(cfg.expires); + const method = cfg.method; + const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); + if (expiresInSeconds < accessibleAtInSeconds) { + throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); + } + let customHost; + // Default style is `path`. + const isVirtualHostedStyle = cfg.virtualHostedStyle || false; + if (cfg.cname) { + customHost = cfg.cname; + } + else if (isVirtualHostedStyle) { + customHost = `https://${this.bucket.name}.storage.googleapis.com`; + } + const secondsToMilliseconds = 1000; + const config = Object.assign({}, cfg, { + method, + expiration: expiresInSeconds, + accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), + bucket: this.bucket.name, + file: this.file ? (0, util_js_1.encodeURI)(this.file.name, false) : undefined, + }); + if (customHost) { + config.cname = customHost; + } + const version = cfg.version || DEFAULT_SIGNING_VERSION; + let promise; + if (version === 'v2') { + promise = this.getSignedUrlV2(config); + } + else if (version === 'v4') { + promise = this.getSignedUrlV4(config); + } + else { + throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); + } + return promise.then(query => { + query = Object.assign(query, cfg.queryParams); + const signedUrl = new url.URL(config.cname || exports.PATH_STYLED_HOST); + signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + signedUrl.search = (0, util_js_1.qsStringify)(query); + return signedUrl.href; + }); + } + getSignedUrlV2(config) { + const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); + const resourcePath = this.getResourcePath(false, config.bucket, config.file); + const blobToSign = [ + config.method, + config.contentMd5 || '', + config.contentType || '', + config.expiration, + canonicalHeadersString + resourcePath, + ].join('\n'); + const sign = async () => { + const authClient = this.authClient; + try { + const signature = await authClient.sign(blobToSign); + const credentials = await authClient.getCredentials(); + return { + GoogleAccessId: credentials.client_email, + Expires: config.expiration, + Signature: signature, + }; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + getSignedUrlV4(config) { + config.accessibleAt = config.accessibleAt + ? config.accessibleAt + : new Date(); + const millisecondsToSeconds = 1.0 / 1000.0; + const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; + // v4 limit expiration to be 7 days maximum + if (expiresPeriodInSeconds > SEVEN_DAYS) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + const extensionHeaders = Object.assign({}, config.extensionHeaders); + const fqdn = new url.URL(config.cname || exports.PATH_STYLED_HOST); + extensionHeaders.host = fqdn.host; + if (config.contentMd5) { + extensionHeaders['content-md5'] = config.contentMd5; + } + if (config.contentType) { + extensionHeaders['content-type'] = config.contentType; + } + let contentSha256; + const sha256Header = extensionHeaders['x-goog-content-sha256']; + if (sha256Header) { + if (typeof sha256Header !== 'string' || + !/[A-Fa-f0-9]{40}/.test(sha256Header)) { + throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); + } + contentSha256 = sha256Header; + } + const signedHeaders = Object.keys(extensionHeaders) + .map(header => header.toLowerCase()) + .sort() + .join(';'); + const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); + const datestamp = (0, util_js_1.formatAsUTCISO)(config.accessibleAt); + const credentialScope = `${datestamp}/auto/storage/goog4_request`; + const sign = async () => { + const credentials = await this.authClient.getCredentials(); + const credential = `${credentials.client_email}/${credentialScope}`; + const dateISO = (0, util_js_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true); + const queryParams = { + 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', + 'X-Goog-Credential': credential, + 'X-Goog-Date': dateISO, + 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), + 'X-Goog-SignedHeaders': signedHeaders, + ...(config.queryParams || {}), + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); + const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); + const hash = crypto + .createHash('sha256') + .update(canonicalRequest) + .digest('hex'); + const blobToSign = [ + 'GOOG4-RSA-SHA256', + dateISO, + credentialScope, + hash, + ].join('\n'); + try { + const signature = await this.authClient.sign(blobToSign); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const signedQuery = Object.assign({}, queryParams, { + 'X-Goog-Signature': signatureHex, + }); + return signedQuery; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers) { + // Sort headers by their lowercased names + const sortedHeaders = (0, util_js_1.objectEntries)(headers) + // Convert header names to lowercase + .map(([headerName, value]) => [ + headerName.toLowerCase(), + value, + ]) + .sort((a, b) => a[0].localeCompare(b[0])); + return sortedHeaders + .filter(([, value]) => value !== undefined) + .map(([headerName, value]) => { + // - Convert Array (multi-valued header) into string, delimited by + // ',' (no space). + // - Trim leading and trailing spaces. + // - Convert sequential (2+) spaces into a single space + const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); + return `${headerName}:${canonicalValue}\n`; + }) + .join(''); + } + getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { + return [ + method, + path, + query, + headers, + signedHeaders, + contentSha256 || 'UNSIGNED-PAYLOAD', + ].join('\n'); + } + getCanonicalQueryParams(query) { + return (0, util_js_1.objectEntries)(query) + .map(([key, value]) => [(0, util_js_1.encodeURI)(key, true), (0, util_js_1.encodeURI)(value, true)]) + .sort((a, b) => (a[0] < b[0] ? -1 : 1)) + .map(([key, value]) => `${key}=${value}`) + .join('&'); + } + getResourcePath(cname, bucket, file) { + if (cname) { + return '/' + (file || ''); + } + else if (file) { + return `/${bucket}/${file}`; + } + else { + return `/${bucket}`; + } + } + parseExpires(expires, current = new Date()) { + const expiresInMSeconds = new Date(expires).valueOf(); + if (isNaN(expiresInMSeconds)) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expiresInMSeconds < current.valueOf()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + return Math.floor(expiresInMSeconds / 1000); // The API expects seconds. + } + parseAccessibleAt(accessibleAt) { + const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); + if (isNaN(accessibleAtInMSeconds)) { + throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); + } + return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. + } +} +exports.URLSigner = URLSigner; +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +class SigningError extends Error { + constructor() { + super(...arguments); + this.name = 'SigningError'; + } +} +exports.SigningError = SigningError; + + +/***/ }), + +/***/ 6007: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; +const index_js_1 = __nccwpck_require__(4052); +const paginator_1 = __nccwpck_require__(6412); +const promisify_1 = __nccwpck_require__(9203); +const stream_1 = __nccwpck_require__(2781); +const bucket_js_1 = __nccwpck_require__(3973); +const channel_js_1 = __nccwpck_require__(2665); +const file_js_1 = __nccwpck_require__(4713); +const util_js_1 = __nccwpck_require__(9258); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const hmacKey_js_1 = __nccwpck_require__(4654); +const crc32c_js_1 = __nccwpck_require__(5810); +var IdempotencyStrategy; +(function (IdempotencyStrategy) { + IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; + IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; + IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; +})(IdempotencyStrategy || (exports.IdempotencyStrategy = IdempotencyStrategy = {})); +var ExceptionMessages; +(function (ExceptionMessages) { + ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; + ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; +})(ExceptionMessages || (exports.ExceptionMessages = ExceptionMessages = {})); +var StorageExceptionMessages; +(function (StorageExceptionMessages) { + StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; + StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; + StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; + StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; +})(StorageExceptionMessages || (exports.StorageExceptionMessages = StorageExceptionMessages = {})); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +exports.AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +exports.MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +exports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +exports.TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +exports.MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. + * + * @const {enum} + * @private + */ +const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +const RETRYABLE_ERR_FN_DEFAULT = function (err) { + var _a; + const isConnectionProblem = (reason) => { + return (reason.includes('eai_again') || // DNS lookup error + reason === 'econnreset' || + reason === 'unexpected connection closure' || + reason === 'epipe' || + reason === 'socket connection timeout'); + }; + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (typeof err.code === 'string') { + if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) { + return true; + } + const reason = err.code.toLowerCase(); + if (isConnectionProblem(reason)) { + return true; + } + } + if (err.errors) { + for (const e of err.errors) { + const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); + if (reason && isConnectionProblem(reason)) { + return true; + } + } + } + } + return false; +}; +exports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +class Storage extends index_js_1.Service { + getBucketsStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + getHmacKeysStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; + let apiEndpoint = 'https://storage.googleapis.com'; + let customEndpoint = false; + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; + if (typeof EMULATOR_HOST === 'string') { + apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); + customEndpoint = true; + } + if (options.apiEndpoint) { + apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); + customEndpoint = true; + } + options = Object.assign({}, options, { apiEndpoint }); + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; + const config = { + apiEndpoint: options.apiEndpoint, + retryOptions: { + autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined + ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry + : exports.AUTO_RETRY_DEFAULT, + maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) + ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries + : exports.MAX_RETRY_DEFAULT, + retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) + ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier + : exports.RETRY_DELAY_MULTIPLIER_DEFAULT, + totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) + ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout + : exports.TOTAL_TIMEOUT_DEFAULT, + maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) + ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay + : exports.MAX_RETRY_DELAY_DEFAULT, + retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) + ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn + : exports.RETRYABLE_ERR_FN_DEFAULT, + idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined + ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy + : IDEMPOTENCY_STRATEGY_DEFAULT, + }, + baseUrl, + customEndpoint, + useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, + projectIdRequired: false, + scopes: [ + 'https://www.googleapis.com/auth/iam', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/devstorage.full_control', + ], + packageJson: (0, package_json_helper_cjs_1.getPackageJSON)(), + }; + super(config, options); + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + this.acl = Storage.acl; + this.crc32cGenerator = + options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + this.retryOptions = config.retryOptions; + this.getBucketsStream = paginator_1.paginator.streamify('getBuckets'); + this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys'); + } + static sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name, options) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); + } + return new bucket_js_1.Bucket(this, name, options); + } + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id, resourceId) { + return new channel_js_1.Channel(this, id, resourceId); + } + /** + * @typedef {array} CreateBucketResponse + * @property {Bucket} 0 The new {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The new {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Metadata to set for the bucket. + * + * @typedef {object} CreateBucketRequest + * @property {boolean} [archive=false] Specify the storage class as Archive. + * @property {object} [autoclass.enabled=false] Specify whether Autoclass is + * enabled for the bucket. + * @property {object} [autoclass.terminalStorageClass='NEARLINE'] The storage class that objects in an Autoclass bucket eventually transition to if + * they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. + * @property {boolean} [coldline=false] Specify the storage class as Coldline. + * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. + * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [dra=false] Specify the storage class as Durable Reduced + * Availability. + * @property {boolean} [enableObjectRetention=false] Specifiy whether or not object retention should be enabled on this bucket. + * @property {string} [location] Specify the bucket's location. If specifying + * a dual-region, the `customPlacementConfig` property should be set in conjunction. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [multiRegional=false] Specify the storage class as + * Multi-Regional. + * @property {boolean} [nearline=false] Specify the storage class as Nearline. + * @property {boolean} [regional=false] Specify the storage class as Regional. + * @property {boolean} [requesterPays=false] **Early Access Testers Only** + * Force the use of the User Project metadata field to assign operational + * costs when an operation is made on a Bucket and its objects. + * @property {string} [rpo] For dual-region buckets, controls whether turbo + * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). + * @property {boolean} [standard=true] Specify the storage class as Standard. + * @property {string} [storageClass] The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @property {Versioning} [versioning=undefined] Specify the versioning status. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * Create a bucket. + * + * Cloud Storage uses a flat namespace, so you can't create a bucket with + * a name that is already in use. For more information, see + * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} name Name of the bucket to create. + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a name is not provided. + * @see Bucket#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const callback = function(err, bucket, apiResponse) { + * // `bucket` is a Bucket object. + * }; + * + * storage.createBucket('new-bucket', callback); + * + * //- + * // Create a bucket in a specific location and region. See the + * // Official JSON API docs for complete details on the `location` + * option. + * // + * //- + * const metadata = { + * location: 'US-CENTRAL1', + * regional: true + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Create a bucket with a retention policy of 6 months. + * //- + * const metadata = { + * retentionPolicy: { + * retentionPeriod: 15780000 // 6 months in seconds. + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Enable versioning on a new bucket. + * //- + * const metadata = { + * versioning: { + * enabled: true + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createBucket('new-bucket').then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_create_bucket + * Another example: + */ + createBucket(name, metadataOrCallback, callback) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); + } + let metadata; + if (!callback) { + callback = metadataOrCallback; + metadata = {}; + } + else { + metadata = metadataOrCallback; + } + const body = { + ...metadata, + name, + }; + const storageClasses = { + archive: 'ARCHIVE', + coldline: 'COLDLINE', + dra: 'DURABLE_REDUCED_AVAILABILITY', + multiRegional: 'MULTI_REGIONAL', + nearline: 'NEARLINE', + regional: 'REGIONAL', + standard: 'STANDARD', + }; + const storageClassKeys = Object.keys(storageClasses); + for (const storageClass of storageClassKeys) { + if (body[storageClass]) { + if (metadata.storageClass && metadata.storageClass !== storageClass) { + throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); + } + body.storageClass = storageClasses[storageClass]; + delete body[storageClass]; + } + } + if (body.requesterPays) { + body.billing = { + requesterPays: body.requesterPays, + }; + delete body.requesterPays; + } + const query = { + project: this.projectId, + }; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + if (body.enableObjectRetention) { + query.enableObjectRetention = body.enableObjectRetention; + delete body.enableObjectRetention; + } + this.request({ + method: 'POST', + uri: '/b', + qs: query, + json: body, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const bucket = this.bucket(name); + bucket.metadata = resp; + callback(null, bucket, resp); + }); + } + /** + * @typedef {object} CreateHmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {object} HmacKeyMetadata + * @property {string} accessId The access id identifies which HMAC key was + * used to sign a request when authenticating with HMAC. + * @property {string} etag Used to perform a read-modify-write of the key. + * @property {string} id The resource name of the HMAC key. + * @property {string} projectId The project ID. + * @property {string} serviceAccountEmail The service account's email this + * HMAC key is created for. + * @property {string} state The state of this HMAC key. One of "ACTIVE", + * "INACTIVE" or "DELETED". + * @property {string} timeCreated The creation time of the HMAC key in + * RFC 3339 format. + * @property {string} [updated] The time this HMAC key was last updated in + * RFC 3339 format. + */ + /** + * @typedef {array} CreateHmacKeyResponse + * @property {HmacKey} 0 The HmacKey instance created from API response. + * @property {string} 1 The HMAC key's secret used to access the XML API. + * @property {object} 3 The raw API response. + */ + /** + * @callback CreateHmacKeyCallback Callback function. + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey The HmacKey instance created from API response. + * @param {string} secret The HMAC key's secret used to access the XML API. + * @param {object} apiResponse The raw API response. + */ + /** + * Create an HMAC key associated with an service account to authenticate + * requests to the Cloud Storage XML API. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @param {string} serviceAccountEmail The service account's email address + * with which the HMAC key is created for. + * @param {CreateHmacKeyCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('google-cloud/storage'); + * const storage = new Storage(); + * + * // Replace with your service account's email address + * const serviceAccountEmail = + * 'my-service-account@appspot.gserviceaccount.com'; + * + * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { + * if (!err) { + * // Securely store the secret for use with the XML API. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createHmacKey(serviceAccountEmail) + * .then((response) => { + * const hmacKey = response[0]; + * const secret = response[1]; + * // Securely store the secret for use with the XML API. + * }); + * ``` + */ + createHmacKey(serviceAccountEmail, optionsOrCb, cb) { + if (typeof serviceAccountEmail !== 'string') { + throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); + } + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options, { serviceAccountEmail }); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + method: 'POST', + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const metadata = resp.metadata; + const hmacKey = this.hmacKey(metadata.accessId, { + projectId: metadata.projectId, + }); + hmacKey.metadata = resp.metadata; + callback(null, hmacKey, resp.secret, resp); + }); + } + /** + * Query object for listing buckets. + * + * @typedef {object} GetBucketsRequest + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * @typedef {array} GetBucketsResponse + * @property {Bucket[]} 0 Array of {@link Bucket} instances. + * @property {object} 1 nextQuery A query object to receive more results. + * @property {object} 2 The full API response. + */ + /** + * @callback GetBucketsCallback + * @param {?Error} err Request error, if any. + * @param {Bucket[]} buckets Array of {@link Bucket} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Get Bucket objects for all of the buckets in your project. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} + * + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @param {GetBucketsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getBuckets(function(err, buckets) { + * if (!err) { + * // buckets is an array of Bucket objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, buckets, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getBuckets(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * buckets[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); + * }; + * + * storage.getBuckets({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getBuckets().then(function(data) { + * const buckets = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_list_buckets + * Another example: + */ + getBuckets(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + options.project = options.project || this.projectId; + this.request({ + uri: '/b', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const buckets = itemsArray.map((bucket) => { + const bucketInstance = this.bucket(bucket.id); + bucketInstance.metadata = bucket; + return bucketInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, buckets, nextQuery, resp); + }); + } + getHmacKeys(optionsOrCb, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const hmacKeys = itemsArray.map((hmacKey) => { + const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { + projectId: hmacKey.projectId, + }); + hmacKeyInstance.metadata = hmacKey; + return hmacKeyInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, hmacKeys, nextQuery, resp); + }); + } + /** + * @typedef {array} GetServiceAccountResponse + * @property {object} 0 The service account resource. + * @property {object} 1 The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * @callback GetServiceAccountCallback + * @param {?Error} err Request error, if any. + * @param {object} serviceAccount The serviceAccount resource. + * @param {string} serviceAccount.emailAddress The service account email + * address. + * @param {object} apiResponse The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * Get the email address of this project's Google Cloud Storage service + * account. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} + * + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project to be billed for this + * request. + * @param {GetServiceAccountCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { + * if (!err) { + * const serviceAccountEmail = serviceAccount.emailAddress; + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getServiceAccount().then(function(data) { + * const serviceAccountEmail = data[0].emailAddress; + * const apiResponse = data[1]; + * }); + * ``` + */ + getServiceAccount(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + this.request({ + uri: `/projects/${this.projectId}/serviceAccount`, + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const camelCaseResponse = {}; + for (const prop in resp) { + // eslint-disable-next-line no-prototype-builtins + if (resp.hasOwnProperty(prop)) { + const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); + camelCaseResponse[camelCaseProp] = resp[prop]; + } + } + callback(null, camelCaseResponse, resp); + }); + } + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId, options) { + if (!accessId) { + throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); + } + return new hmacKey_js_1.HmacKey(this, accessId, options); + } +} +exports.Storage = Storage; +/** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ +Storage.Bucket = bucket_js_1.Bucket; +/** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ +Storage.Channel = channel_js_1.Channel; +/** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ +Storage.File = file_js_1.File; +/** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ +Storage.HmacKey = hmacKey_js_1.HmacKey; +Storage.acl = { + OWNER_ROLE: 'OWNER', + READER_ROLE: 'READER', + WRITER_ROLE: 'WRITER', +}; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Storage, { + exclude: ['bucket', 'channel', 'hmacKey'], +}); + + +/***/ }), + +/***/ 5594: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -module.exports.mixin = function mixin(target, source) { - const keys = Object.getOwnPropertyNames(source); - for (let i = 0; i < keys.length; ++i) { - Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); - } +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; }; - -module.exports.wrapperSymbol = Symbol("wrapper"); -module.exports.implSymbol = Symbol("impl"); - -module.exports.wrapperForImpl = function (impl) { - return impl[module.exports.wrapperSymbol]; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; - -module.exports.implForWrapper = function (wrapper) { - return wrapper[module.exports.implSymbol]; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; }; - - - -/***/ }), - -/***/ 2940: -/***/ ((module) => { - -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) - - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') - - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) - - return wrapper - - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] - } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) +var _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiClientHeaders, _XMLMultiPartUploadHelper_handleErrorResponse; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TransferManager = exports.MultiPartUploadError = void 0; +const file_js_1 = __nccwpck_require__(4713); +const p_limit_1 = __importDefault(__nccwpck_require__(7684)); +const path = __importStar(__nccwpck_require__(1017)); +const fs_1 = __nccwpck_require__(7147); +const crc32c_js_1 = __nccwpck_require__(5810); +const google_auth_library_1 = __nccwpck_require__(810); +const fast_xml_parser_1 = __nccwpck_require__(2603); +const async_retry_1 = __importDefault(__nccwpck_require__(3415)); +const crypto_1 = __nccwpck_require__(6113); +const util_js_1 = __nccwpck_require__(8064); +const util_js_2 = __nccwpck_require__(9258); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * Default number of concurrently executing promises to use when calling uploadManyFiles. + * + */ +const DEFAULT_PARALLEL_UPLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadManyFiles. + * + */ +const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 5; +/** + * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling uploadFileInChunks. + * + */ +const UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * Default number of concurrently executing promises to use when calling uploadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 5; +const EMPTY_REGEX = '(?:)'; +/** + * The `gccl-gcs-cmd` value for the `X-Goog-API-Client` header. + * Example: `gccl-gcs-cmd/tm.upload_many` + * + * @see {@link GCCL_GCS_CMD}. + * @see {@link GCCL_GCS_CMD_KEY}. + */ +const GCCL_GCS_CMD_FEATURE = { + UPLOAD_MANY: 'tm.upload_many', + DOWNLOAD_MANY: 'tm.download_many', + UPLOAD_SHARDED: 'tm.upload_sharded', + DOWNLOAD_SHARDED: 'tm.download_sharded', +}; +const defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => { + return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap); +}; +class MultiPartUploadError extends Error { + constructor(message, uploadId, partsMap) { + super(message); + this.uploadId = uploadId; + this.partsMap = partsMap; } - return ret - } } - - -/***/ }), - -/***/ 4091: -/***/ ((module) => { - -"use strict"; - -module.exports = function (Yallist) { - Yallist.prototype[Symbol.iterator] = function* () { - for (let walker = this.head; walker; walker = walker.next) { - yield walker.value +exports.MultiPartUploadError = MultiPartUploadError; +/** + * Class representing an implementation of MPU in the XML API. This class is not meant for public usage. + * + * @private + * + */ +class XMLMultiPartUploadHelper { + constructor(bucket, fileName, uploadId, partsMap) { + _XMLMultiPartUploadHelper_instances.add(this); + this.authClient = bucket.storage.authClient || new google_auth_library_1.GoogleAuth(); + this.uploadId = uploadId || ''; + this.bucket = bucket; + this.fileName = fileName; + this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`; + this.xmlBuilder = new fast_xml_parser_1.XMLBuilder({ arrayNodeName: 'Part' }); + this.xmlParser = new fast_xml_parser_1.XMLParser(); + this.partsMap = partsMap || new Map(); + this.retryOptions = { + retries: this.bucket.storage.retryOptions.maxRetries, + factor: this.bucket.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000, + maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000, + }; + } + /** + * Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id. + * + * @returns {Promise} + */ + async initiateUpload(headers = {}) { + const url = `${this.baseUrl}?uploads`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this, headers), + method: 'POST', + url, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + const parsedXML = this.xmlParser.parse(res.data); + this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Uploads the provided chunk of data to the XML API using the previously created upload id. + * + * @param {number} partNumber the sequence number of this chunk. + * @param {Buffer} chunk the chunk of data to be uploaded. + * @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server + * to validate the chunk was not corrupted. + * @returns {Promise} + */ + async uploadPart(partNumber, chunk, validation) { + const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`; + let headers = __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this); + if (validation === 'md5') { + const hash = (0, crypto_1.createHash)('md5').update(chunk).digest('base64'); + headers = { + 'Content-MD5': hash, + }; + } + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'PUT', + body: chunk, + headers, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + this.partsMap.set(partNumber, res.headers['etag']); + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Sends the final request of the MPU to tell GCS the upload is now complete. + * + * @returns {Promise} + */ + async completeUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0])); + const parts = []; + for (const entry of sortedMap.entries()) { + parts.push({ PartNumber: entry[0], ETag: entry[1] }); + } + const body = `${this.xmlBuilder.build(parts)}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this), + url, + method: 'POST', + body, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } + /** + * Aborts an multipart upload that is in progress. Once aborted, any parts in the process of being uploaded fail, + * and future requests using the upload ID fail. + * + * @returns {Promise} + */ + async abortUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'DELETE', + }); + if (res.data && res.data.error) { + throw res.data.error; + } + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } +} +_XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_setGoogApiClientHeaders = function _XMLMultiPartUploadHelper_setGoogApiClientHeaders(headers = {}) { + let headerFound = false; + let userAgentFound = false; + for (const [key, value] of Object.entries(headers)) { + if (key.toLocaleLowerCase().trim() === 'x-goog-api-client') { + headerFound = true; + // Prepend command feature to value, if not already there + if (!value.includes(GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED)) { + headers[key] = `${value} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + } + else if (key.toLocaleLowerCase().trim() === 'user-agent') { + userAgentFound = true; + } + } + // If the header isn't present, add it + if (!headerFound) { + headers['x-goog-api-client'] = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + // If the User-Agent isn't present, add it + if (!userAgentFound) { + headers['User-Agent'] = (0, util_js_2.getUserAgentString)(); + } + return headers; +}, _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) { + if (this.bucket.storage.retryOptions.autoRetry && + this.bucket.storage.retryOptions.retryableErrorFn(err)) { + throw err; + } + else { + bail(err); + } +}; +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * + */ +class TransferManager { + constructor(bucket) { + this.bucket = bucket; + } + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * + */ + async uploadManyFiles(filePathsOrDirectory, options = {}) { + var _a; + if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { + options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; + } + else if (options.skipIfExists && + options.passthroughOptions === undefined) { + options.passthroughOptions = { + preconditionOpts: { + ifGenerationMatch: 0, + }, + }; + } + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); + const promises = []; + let allPaths = []; + if (!Array.isArray(filePathsOrDirectory)) { + for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { + allPaths.push(curPath); + } + } + else { + allPaths = filePathsOrDirectory; + } + for (const filePath of allPaths) { + const stat = await fs_1.promises.lstat(filePath); + if (stat.isDirectory()) { + continue; + } + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.UPLOAD_MANY, + }; + passThroughOptionsCopy.destination = filePath + .split(path.sep) + .join(path.posix.sep); + if (options.prefix) { + passThroughOptionsCopy.destination = path.posix.join(...options.prefix.split(path.sep), passThroughOptionsCopy.destination); + } + promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * + */ + async downloadManyFiles(filesOrFolder, options = {}) { + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); + const promises = []; + let files = []; + if (!Array.isArray(filesOrFolder)) { + const directoryFiles = await this.bucket.getFiles({ + prefix: filesOrFolder, + }); + files = directoryFiles[0]; + } + else { + files = filesOrFolder.map(curFile => { + if (typeof curFile === 'string') { + return this.bucket.file(curFile); + } + return curFile; + }); + } + const stripRegexString = options.stripPrefix + ? `^${options.stripPrefix}` + : EMPTY_REGEX; + const regex = new RegExp(stripRegexString, 'g'); + for (const file of files) { + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_MANY, + }; + if (options.prefix) { + passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); + } + if (options.stripPrefix) { + passThroughOptionsCopy.destination = file.name.replace(regex, ''); + } + promises.push(limit(() => file.download(passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory. + * + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {File | string} fileOrName {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * + */ + async downloadFileInChunks(fileOrName, options = {}) { + let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + let limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); + const noReturnData = Boolean(options.noReturnData); + const promises = []; + const file = typeof fileOrName === 'string' + ? this.bucket.file(fileOrName) + : fileOrName; + const fileInfo = await file.get(); + const size = parseInt(fileInfo[0].metadata.size.toString()); + // If the file size does not meet the threshold download it as a single chunk. + if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { + limit = (0, p_limit_1.default)(1); + chunkSize = size; + } + let start = 0; + const filePath = options.destination || path.basename(file.name); + const fileToWrite = await fs_1.promises.open(filePath, 'w'); + while (start < size) { + const chunkStart = start; + let chunkEnd = start + chunkSize - 1; + chunkEnd = chunkEnd > size ? size : chunkEnd; + promises.push(limit(async () => { + const resp = await file.download({ + start: chunkStart, + end: chunkEnd, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_SHARDED, + }); + const result = await fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); + if (noReturnData) + return; + return result.buffer; + })); + start += chunkSize; + } + let chunks; + try { + chunks = await Promise.all(promises); + } + finally { + await fileToWrite.close(); + } + if (options.validation === 'crc32c' && fileInfo[0].metadata.crc32c) { + const downloadedCrc32C = await crc32c_js_1.CRC32C.fromFile(filePath); + if (!downloadedCrc32C.validate(fileInfo[0].metadata.crc32c)) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + throw mismatchError; + } + } + if (noReturnData) + return; + return [Buffer.concat(chunks, size)]; + } + /** + * @typedef {object} UploadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded. + * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path. + * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified + * defaults to the specified concurrency limit. + * @property {string} [uploadId] If specified attempts to resume a previous upload. + * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk + * specified in partsMap + * @property {object} [headers] headers to be sent when initiating the multipart upload. + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload} + * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set, + * failures will be automatically aborted. + * + */ + /** + * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and + * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to + * resume the upload. + * + * @param {string} [filePath] The path of the file to be uploaded + * @param {UploadFileInChunksOptions} [options] Configuration options. + * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this. + * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.uploadFileInChunks('large-file.txt'); + * // Your bucket now contains: + * // - "large-file.txt" + * ``` + * + * + */ + async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) { + const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT); + const maxQueueSize = options.maxQueueSize || + options.concurrencyLimit || + DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT; + const fileName = options.uploadName || path.basename(filePath); + const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap); + let partNumber = 1; + let promises = []; + try { + if (options.uploadId === undefined) { + await mpuHelper.initiateUpload(options.headers); + } + const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize; + const readStream = (0, fs_1.createReadStream)(filePath, { + highWaterMark: chunkSize, + start: startOrResumptionByte, + }); + // p-limit only limits the number of running promises. We do not want to hold an entire + // large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory. + for await (const curChunk of readStream) { + if (promises.length >= maxQueueSize) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation))); + } + await Promise.all(promises); + return await mpuHelper.completeUpload(); + } + catch (e) { + if ((options.autoAbortFailure === undefined || options.autoAbortFailure) && + mpuHelper.uploadId) { + try { + await mpuHelper.abortUpload(); + return; + } + catch (e) { + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + async *getPathsFromDirectory(directory) { + const filesAndSubdirectories = await fs_1.promises.readdir(directory, { + withFileTypes: true, + }); + for (const curFileOrDirectory of filesAndSubdirectories) { + const fullPath = path.join(directory, curFileOrDirectory.name); + curFileOrDirectory.isDirectory() + ? yield* this.getPathsFromDirectory(fullPath) + : yield fullPath; + } } - } } +exports.TransferManager = TransferManager; /***/ }), -/***/ 665: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 9258: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -module.exports = Yallist - -Yallist.Node = Node -Yallist.create = Yallist - -function Yallist (list) { - var self = this - if (!(self instanceof Yallist)) { - self = new Yallist() - } - - self.tail = null - self.head = null - self.length = 0 - - if (list && typeof list.forEach === 'function') { - list.forEach(function (item) { - self.push(item) - }) - } else if (arguments.length > 0) { - for (var i = 0, l = arguments.length; i < l; i++) { - self.push(arguments[i]) - } - } - - return self -} - -Yallist.prototype.removeNode = function (node) { - if (node.list !== this) { - throw new Error('removing node which does not belong to this list') - } - - var next = node.next - var prev = node.prev - - if (next) { - next.prev = prev - } - - if (prev) { - prev.next = next - } - - if (node === this.head) { - this.head = next - } - if (node === this.tail) { - this.tail = prev - } - - node.list.length-- - node.next = null - node.prev = null - node.list = null - - return next -} - -Yallist.prototype.unshiftNode = function (node) { - if (node === this.head) { - return - } - - if (node.list) { - node.list.removeNode(node) - } - - var head = this.head - node.list = this - node.next = head - if (head) { - head.prev = node - } - - this.head = node - if (!this.tail) { - this.tail = node - } - this.length++ -} - -Yallist.prototype.pushNode = function (node) { - if (node === this.tail) { - return - } - - if (node.list) { - node.list.removeNode(node) - } - - var tail = this.tail - node.list = this - node.prev = tail - if (tail) { - tail.next = node - } - this.tail = node - if (!this.head) { - this.head = node - } - this.length++ -} - -Yallist.prototype.push = function () { - for (var i = 0, l = arguments.length; i < l; i++) { - push(this, arguments[i]) +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { + enumerable: true, + get: function () { + return m[k]; + } + }; } - return this.length + Object.defineProperty(o, k2, desc); +} : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); +var __setModuleDefault = this && this.__setModuleDefault || (Object.create ? function (o, v) { + Object.defineProperty(o, "default", { + enumerable: true, + value: v + }); +} : function (o, v) { + o["default"] = v; +}); +var __importStar = this && this.__importStar || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.PassThroughShim = exports.getModuleFormat = exports.getDirName = exports.getUserAgentString = exports.getRuntimeTrackingString = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0; +const path = __importStar(__nccwpck_require__(1017)); +const querystring = __importStar(__nccwpck_require__(3477)); +const stream_1 = __nccwpck_require__(2781); +const url = __importStar(__nccwpck_require__(7310)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(8568); +// Done to avoid a problem with mangling of identifiers when using esModuleInterop +const fileURLToPath = url.fileURLToPath; +const isEsm = false; +function normalize(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + return { + options, + callback + }; } - -Yallist.prototype.unshift = function () { - for (var i = 0, l = arguments.length; i < l; i++) { - unshift(this, arguments[i]) - } - return this.length +exports.normalize = normalize; +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +function objectEntries(obj) { + return Object.keys(obj).map(key => [key, obj[key]]); } - -Yallist.prototype.pop = function () { - if (!this.tail) { - return undefined - } - - var res = this.tail.value - this.tail = this.tail.prev - if (this.tail) { - this.tail.next = null - } else { - this.head = null - } - this.length-- - return res +exports.objectEntries = objectEntries; +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +function fixedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); } - -Yallist.prototype.shift = function () { - if (!this.head) { - return undefined - } - - var res = this.head.value - this.head = this.head.next - if (this.head) { - this.head.prev = null - } else { - this.tail = null - } - this.length-- - return res +exports.fixedEncodeURIComponent = fixedEncodeURIComponent; +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +function encodeURI(uri, encodeSlash) { + // Split the string by `/`, and conditionally rejoin them with either + // %2F if encodeSlash is `true`, or '/' if `false`. + return uri.split('/').map(fixedEncodeURIComponent).join(encodeSlash ? '%2F' : '/'); } - -Yallist.prototype.forEach = function (fn, thisp) { - thisp = thisp || this - for (var walker = this.head, i = 0; walker !== null; i++) { - fn.call(thisp, walker.value, i, this) - walker = walker.next - } +exports.encodeURI = encodeURI; +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +function qsStringify(qs) { + return querystring.stringify(qs, '&', '=', { + encodeURIComponent: component => encodeURI(component, true) + }); } - -Yallist.prototype.forEachReverse = function (fn, thisp) { - thisp = thisp || this - for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { - fn.call(thisp, walker.value, i, this) - walker = walker.prev +exports.qsStringify = qsStringify; +function objectKeyToLowercase(object) { + const newObj = {}; + for (let key of Object.keys(object)) { + const value = object[key]; + key = key.toLowerCase(); + newObj[key] = value; } + return newObj; } - -Yallist.prototype.get = function (n) { - for (var i = 0, walker = this.head; walker !== null && i < n; i++) { - // abort out of the list early if we hit a cycle - walker = walker.next - } - if (i === n && walker !== null) { - return walker.value - } +exports.objectKeyToLowercase = objectKeyToLowercase; +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +function unicodeJSONStringify(obj) { + return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, char => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); } - -Yallist.prototype.getReverse = function (n) { - for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { - // abort out of the list early if we hit a cycle - walker = walker.prev - } - if (i === n && walker !== null) { - return walker.value +exports.unicodeJSONStringify = unicodeJSONStringify; +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +function convertObjKeysToSnakeCase(obj) { + if (obj instanceof Date || obj instanceof RegExp) { + return obj; } -} - -Yallist.prototype.map = function (fn, thisp) { - thisp = thisp || this - var res = new Yallist() - for (var walker = this.head; walker !== null;) { - res.push(fn.call(thisp, walker.value, this)) - walker = walker.next + if (Array.isArray(obj)) { + return obj.map(convertObjKeysToSnakeCase); } - return res -} - -Yallist.prototype.mapReverse = function (fn, thisp) { - thisp = thisp || this - var res = new Yallist() - for (var walker = this.tail; walker !== null;) { - res.push(fn.call(thisp, walker.value, this)) - walker = walker.prev + if (obj instanceof Object) { + return Object.keys(obj).reduce((acc, cur) => { + const s = cur[0].toLocaleLowerCase() + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { + return `_${p1.toLowerCase()}`; + }); + acc[s] = convertObjKeysToSnakeCase(obj[cur]); + return acc; + }, Object()); } - return res + return obj; } - -Yallist.prototype.reduce = function (fn, initial) { - var acc - var walker = this.head - if (arguments.length > 1) { - acc = initial - } else if (this.head) { - walker = this.head.next - acc = this.head.value - } else { - throw new TypeError('Reduce of empty list with no initial value') - } - - for (var i = 0; walker !== null; i++) { - acc = fn(acc, walker.value, i) - walker = walker.next - } - - return acc +exports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase; +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { + const year = dateTimeToFormat.getUTCFullYear(); + const month = dateTimeToFormat.getUTCMonth() + 1; + const day = dateTimeToFormat.getUTCDate(); + const hour = dateTimeToFormat.getUTCHours(); + const minute = dateTimeToFormat.getUTCMinutes(); + const second = dateTimeToFormat.getUTCSeconds(); + let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month.toString().padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; + if (includeTime) { + resultString = `${resultString}T${hour.toString().padStart(2, '0')}${timeDelimiter}${minute.toString().padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; + } + return resultString; } - -Yallist.prototype.reduceReverse = function (fn, initial) { - var acc - var walker = this.tail - if (arguments.length > 1) { - acc = initial - } else if (this.tail) { - walker = this.tail.prev - acc = this.tail.value +exports.formatAsUTCISO = formatAsUTCISO; +/** + * Examines the runtime environment and returns the appropriate tracking string. + * @returns {string} metrics tracking string based on the current runtime environment. + */ +function getRuntimeTrackingString() { + if ( + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version.deno) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return `gl-deno/${globalThis.Deno.version.deno}`; } else { - throw new TypeError('Reduce of empty list with no initial value') - } - - for (var i = this.length - 1; walker !== null; i--) { - acc = fn(acc, walker.value, i) - walker = walker.prev - } - - return acc -} - -Yallist.prototype.toArray = function () { - var arr = new Array(this.length) - for (var i = 0, walker = this.head; walker !== null; i++) { - arr[i] = walker.value - walker = walker.next - } - return arr -} - -Yallist.prototype.toArrayReverse = function () { - var arr = new Array(this.length) - for (var i = 0, walker = this.tail; walker !== null; i++) { - arr[i] = walker.value - walker = walker.prev - } - return arr -} - -Yallist.prototype.slice = function (from, to) { - to = to || this.length - if (to < 0) { - to += this.length - } - from = from || 0 - if (from < 0) { - from += this.length - } - var ret = new Yallist() - if (to < from || to < 0) { - return ret - } - if (from < 0) { - from = 0 - } - if (to > this.length) { - to = this.length - } - for (var i = 0, walker = this.head; walker !== null && i < from; i++) { - walker = walker.next - } - for (; walker !== null && i < to; i++, walker = walker.next) { - ret.push(walker.value) - } - return ret -} - -Yallist.prototype.sliceReverse = function (from, to) { - to = to || this.length - if (to < 0) { - to += this.length - } - from = from || 0 - if (from < 0) { - from += this.length - } - var ret = new Yallist() - if (to < from || to < 0) { - return ret - } - if (from < 0) { - from = 0 - } - if (to > this.length) { - to = this.length - } - for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { - walker = walker.prev - } - for (; walker !== null && i > from; i--, walker = walker.prev) { - ret.push(walker.value) - } - return ret -} - -Yallist.prototype.splice = function (start, deleteCount, ...nodes) { - if (start > this.length) { - start = this.length - 1 - } - if (start < 0) { - start = this.length + start; - } - - for (var i = 0, walker = this.head; walker !== null && i < start; i++) { - walker = walker.next + return `gl-node/${process.versions.node}`; } - - var ret = [] - for (var i = 0; walker && i < deleteCount; i++) { - ret.push(walker.value) - walker = this.removeNode(walker) - } - if (walker === null) { - walker = this.tail - } - - if (walker !== this.head && walker !== this.tail) { - walker = walker.prev - } - - for (var i = 0; i < nodes.length; i++) { - walker = insert(this, walker, nodes[i]) - } - return ret; } - -Yallist.prototype.reverse = function () { - var head = this.head - var tail = this.tail - for (var walker = head; walker !== null; walker = walker.prev) { - var p = walker.prev - walker.prev = walker.next - walker.next = p - } - this.head = tail - this.tail = head - return this +exports.getRuntimeTrackingString = getRuntimeTrackingString; +/** + * Looks at package.json and creates the user-agent string to be applied to request headers. + * @returns {string} user agent string. + */ +function getUserAgentString() { + const pkg = (0, package_json_helper_cjs_1.getPackageJSON)(); + const hyphenatedPackageName = pkg.name.replace('@google-cloud', 'gcloud-node') // For legacy purposes. + .replace('/', '-'); // For UA spec-compliance purposes. + return hyphenatedPackageName + '/' + pkg.version; } - -function insert (self, node, value) { - var inserted = node === self.head ? - new Node(value, null, node, self) : - new Node(value, node, node.next, self) - - if (inserted.next === null) { - self.tail = inserted - } - if (inserted.prev === null) { - self.head = inserted +exports.getUserAgentString = getUserAgentString; +function getDirName() { + let dirToUse = ''; + try { + dirToUse = __dirname; + } catch (e) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + dirToUse = __dirname; } - - self.length++ - - return inserted + return dirToUse; } - -function push (self, item) { - self.tail = new Node(item, self.tail, null, self) - if (!self.head) { - self.head = self.tail - } - self.length++ +exports.getDirName = getDirName; +function getModuleFormat() { + return isEsm ? 'ESM' : 'CJS'; } - -function unshift (self, item) { - self.head = new Node(item, null, self.head, self) - if (!self.tail) { - self.tail = self.head +exports.getModuleFormat = getModuleFormat; +class PassThroughShim extends stream_1.PassThrough { + constructor() { + super(...arguments); + this.shouldEmitReading = true; + this.shouldEmitWriting = true; } - self.length++ -} - -function Node (value, prev, next, list) { - if (!(this instanceof Node)) { - return new Node(value, prev, next, list) + _read(size) { + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + super._read(size); } - - this.list = list - this.value = value - - if (prev) { - prev.next = this - this.prev = prev - } else { - this.prev = null + _write(chunk, encoding, callback) { + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + // Per the nodejs documention, callback must be invoked on the next tick + process.nextTick(() => { + super._write(chunk, encoding, callback); + }); } - - if (next) { - next.prev = this - this.next = next - } else { - this.next = null + _final(callback) { + // If the stream is empty (i.e. empty file) final will be invoked before _read / _write + // and we should still emit the proper events. + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + callback(null); } } - -try { - // add if support for Symbol.iterator is present - __nccwpck_require__(4091)(Yallist) -} catch (er) {} +exports.PassThroughShim = PassThroughShim; /***/ }), -/***/ 5185: -/***/ ((module) => { - -class Node { - /// value; - /// next; - - constructor(value) { - this.value = value; - - // TODO: Remove this when targeting Node.js 12. - this.next = undefined; - } -} - -class Queue { - // TODO: Use private class fields when targeting Node.js 12. - // #_head; - // #_tail; - // #_size; - - constructor() { - this.clear(); - } - - enqueue(value) { - const node = new Node(value); - - if (this._head) { - this._tail.next = node; - this._tail = node; - } else { - this._head = node; - this._tail = node; - } - - this._size++; - } - - dequeue() { - const current = this._head; - if (!current) { - return; - } - - this._head = this._head.next; - this._size--; - return current.value; - } - - clear() { - this._head = undefined; - this._tail = undefined; - this._size = 0; - } +/***/ 8568: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - get size() { - return this._size; - } +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. - * [Symbol.iterator]() { - let current = this._head; +/* eslint-disable node/no-missing-require */ - while (current) { - yield current.value; - current = current.next; - } - } +function getPackageJSON() { + return __nccwpck_require__(5458); } -module.exports = Queue; - - -/***/ }), - -/***/ 2877: -/***/ ((module) => { - -module.exports = eval("require")("encoding"); - - -/***/ }), - -/***/ 8418: -/***/ ((module) => { - -module.exports = eval("require")("request"); - - -/***/ }), - -/***/ 9491: -/***/ ((module) => { - -"use strict"; -module.exports = require("assert"); - -/***/ }), - -/***/ 4300: -/***/ ((module) => { - -"use strict"; -module.exports = require("buffer"); - -/***/ }), - -/***/ 2081: -/***/ ((module) => { - -"use strict"; -module.exports = require("child_process"); - -/***/ }), - -/***/ 6113: -/***/ ((module) => { - -"use strict"; -module.exports = require("crypto"); - -/***/ }), - -/***/ 2361: -/***/ ((module) => { - -"use strict"; -module.exports = require("events"); - -/***/ }), - -/***/ 7147: -/***/ ((module) => { - -"use strict"; -module.exports = require("fs"); - -/***/ }), - -/***/ 3685: -/***/ ((module) => { - -"use strict"; -module.exports = require("http"); - -/***/ }), - -/***/ 5687: -/***/ ((module) => { - -"use strict"; -module.exports = require("https"); - -/***/ }), - -/***/ 1808: -/***/ ((module) => { - -"use strict"; -module.exports = require("net"); - -/***/ }), - -/***/ 2037: -/***/ ((module) => { - -"use strict"; -module.exports = require("os"); - -/***/ }), - -/***/ 1017: -/***/ ((module) => { - -"use strict"; -module.exports = require("path"); - -/***/ }), - -/***/ 5477: -/***/ ((module) => { - -"use strict"; -module.exports = require("punycode"); - -/***/ }), - -/***/ 3477: -/***/ ((module) => { - -"use strict"; -module.exports = require("querystring"); - -/***/ }), - -/***/ 2781: -/***/ ((module) => { - -"use strict"; -module.exports = require("stream"); - -/***/ }), - -/***/ 4404: -/***/ ((module) => { - -"use strict"; -module.exports = require("tls"); - -/***/ }), - -/***/ 6224: -/***/ ((module) => { - -"use strict"; -module.exports = require("tty"); - -/***/ }), - -/***/ 7310: -/***/ ((module) => { - -"use strict"; -module.exports = require("url"); - -/***/ }), - -/***/ 3837: -/***/ ((module) => { +exports.getPackageJSON = getPackageJSON; -"use strict"; -module.exports = require("util"); /***/ }), -/***/ 9796: +/***/ 5458: /***/ ((module) => { "use strict"; -module.exports = require("zlib"); +module.exports = JSON.parse('{"name":"@google-cloud/storage","description":"Cloud Storage Client Library for Node.js","version":"7.7.0","license":"Apache-2.0","author":"Google Inc.","engines":{"node":">=14"},"repository":"googleapis/nodejs-storage","main":"./build/cjs/src/index.js","types":"./build/cjs/src/index.d.ts","type":"module","exports":{".":{"import":{"types":"./build/esm/src/index.d.ts","default":"./build/esm/src/index.js"},"require":{"types":"./build/cjs/src/index.d.ts","default":"./build/cjs/src/index.js"}}},"files":["build/cjs/src","build/cjs/package.json","!build/cjs/src/**/*.map","build/esm/src","!build/esm/src/**/*.map"],"keywords":["google apis client","google api client","google apis","google api","google","google cloud platform","google cloud","cloud","google storage","storage"],"scripts":{"all-test":"npm test && npm run system-test && npm run samples-test","benchwrapper":"node bin/benchwrapper.js","check":"gts check","clean":"rm -rf build/","compile:cjs":"tsc -p ./tsconfig.cjs.json","compile:esm":"tsc -p .","compile":"npm run compile:cjs && npm run compile:esm","conformance-test":"mocha --parallel build/cjs/conformance-test/ --require build/cjs/conformance-test/globalHooks.js","docs-test":"linkinator docs","docs":"jsdoc -c .jsdoc.json","fix":"gts fix","lint":"gts check","postcompile":"cp ./src/package-json-helper.cjs ./build/cjs/src && cp ./src/package-json-helper.cjs ./build/esm/src","postcompile:cjs":"babel --plugins gapic-tools/build/src/replaceImportMetaUrl,gapic-tools/build/src/toggleESMFlagVariable build/cjs/src/util.js -o build/cjs/src/util.js && cp internal-tooling/helpers/package.cjs.json build/cjs/package.json","precompile":"rm -rf build/","preconformance-test":"npm run compile:cjs -- --sourceMap","predocs-test":"npm run docs","predocs":"npm run compile:cjs -- --sourceMap","prelint":"cd samples; npm link ../; npm install","prepare":"npm run compile","presystem-test:esm":"npm run compile:esm","presystem-test":"npm run compile -- --sourceMap","pretest":"npm run compile -- --sourceMap","samples-test":"npm link && cd samples/ && npm link ../ && npm test && cd ../","system-test:esm":"mocha build/esm/system-test --timeout 600000 --exit","system-test":"mocha build/cjs/system-test --timeout 600000 --exit","test":"c8 mocha build/cjs/test"},"dependencies":{"@google-cloud/paginator":"^5.0.0","@google-cloud/projectify":"^4.0.0","@google-cloud/promisify":"^4.0.0","abort-controller":"^3.0.0","async-retry":"^1.3.3","compressible":"^2.0.12","duplexify":"^4.0.0","ent":"^2.2.0","fast-xml-parser":"^4.3.0","gaxios":"^6.0.2","google-auth-library":"^9.0.0","mime":"^3.0.0","mime-types":"^2.0.8","p-limit":"^3.0.1","retry-request":"^7.0.0","teeny-request":"^9.0.0","uuid":"^8.0.0"},"devDependencies":{"@babel/cli":"^7.22.10","@babel/core":"^7.22.11","@google-cloud/pubsub":"^4.0.0","@grpc/grpc-js":"^1.0.3","@grpc/proto-loader":"^0.7.0","@types/async-retry":"^1.4.3","@types/compressible":"^2.0.0","@types/ent":"^2.2.1","@types/mime":"^3.0.0","@types/mime-types":"^2.1.0","@types/mocha":"^9.1.1","@types/mockery":"^1.4.29","@types/node":"^20.4.4","@types/node-fetch":"^2.1.3","@types/proxyquire":"^1.3.28","@types/request":"^2.48.4","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^8.0.0","@types/yargs":"^17.0.10","c8":"^8.0.0","form-data":"^4.0.0","gapic-tools":"^0.2.0","gts":"^5.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","linkinator":"^4.0.0","mocha":"^9.2.2","mockery":"^2.1.0","nock":"~13.4.0","node-fetch":"^2.6.7","pack-n-play":"^2.0.0","proxyquire":"^2.1.3","sinon":"^17.0.0","tmp":"^0.2.0","typescript":"^5.1.6","yargs":"^17.3.1"}}'); /***/ }), -/***/ 5458: +/***/ 2077: /***/ ((module) => { "use strict"; -module.exports = JSON.parse('{"name":"@google-cloud/storage","description":"Cloud Storage Client Library for Node.js","version":"6.9.4","license":"Apache-2.0","author":"Google Inc.","engines":{"node":">=12"},"repository":"googleapis/nodejs-storage","main":"./build/src/index.js","types":"./build/src/index.d.ts","files":["build/src","!build/src/**/*.map"],"keywords":["google apis client","google api client","google apis","google api","google","google cloud platform","google cloud","cloud","google storage","storage"],"scripts":{"predocs":"npm run compile","docs":"jsdoc -c .jsdoc.js","system-test":"mocha build/system-test --timeout 600000 --exit","conformance-test":"mocha --parallel build/conformance-test/ --require build/conformance-test/globalHooks.js","preconformance-test":"npm run compile","presystem-test":"npm run compile","test":"c8 mocha build/test","pretest":"npm run compile","lint":"gts check","samples-test":"npm link && cd samples/ && npm link ../ && npm test && cd ../","all-test":"npm test && npm run system-test && npm run samples-test","check":"gts check","clean":"gts clean","compile":"tsc -p .","fix":"gts fix","prepare":"npm run compile","docs-test":"linkinator docs","predocs-test":"npm run docs","benchwrapper":"node bin/benchwrapper.js","prelint":"cd samples; npm link ../; npm install","precompile":"gts clean"},"dependencies":{"@google-cloud/paginator":"^3.0.7","@google-cloud/projectify":"^3.0.0","@google-cloud/promisify":"^3.0.0","abort-controller":"^3.0.0","async-retry":"^1.3.3","compressible":"^2.0.12","duplexify":"^4.0.0","ent":"^2.2.0","extend":"^3.0.2","gaxios":"^5.0.0","google-auth-library":"^8.0.1","mime":"^3.0.0","mime-types":"^2.0.8","p-limit":"^3.0.1","retry-request":"^5.0.0","teeny-request":"^8.0.0","uuid":"^8.0.0"},"devDependencies":{"@google-cloud/pubsub":"^3.0.0","@grpc/grpc-js":"^1.0.3","@grpc/proto-loader":"^0.7.0","@types/async-retry":"^1.4.3","@types/compressible":"^2.0.0","@types/ent":"^2.2.1","@types/extend":"^3.0.0","@types/mime":"^3.0.0","@types/mime-types":"^2.1.0","@types/mocha":"^9.1.1","@types/mockery":"^1.4.29","@types/node":"^18.0.0","@types/node-fetch":"^2.1.3","@types/proxyquire":"^1.3.28","@types/request":"^2.48.4","@types/sinon":"^10.0.0","@types/tmp":"0.2.3","@types/uuid":"^8.0.0","@types/yargs":"^17.0.10","c8":"^7.0.0","form-data":"^4.0.0","gts":"^3.1.0","jsdoc":"^4.0.0","jsdoc-fresh":"^2.0.0","jsdoc-region-tag":"^2.0.0","linkinator":"^4.0.0","mocha":"^9.2.2","mockery":"^2.1.0","nock":"~13.3.0","node-fetch":"^2.6.7","proxyquire":"^2.1.3","sinon":"^15.0.0","tmp":"^0.2.0","typescript":"^4.6.4","yargs":"^17.3.1"}}'); +module.exports = JSON.parse('{"Aacute;":"Á","Aacute":"Á","aacute;":"á","aacute":"á","Abreve;":"Ă","abreve;":"ă","ac;":"∾","acd;":"∿","acE;":"∾̳","Acirc;":"Â","Acirc":"Â","acirc;":"â","acirc":"â","acute;":"´","acute":"´","Acy;":"А","acy;":"а","AElig;":"Æ","AElig":"Æ","aelig;":"æ","aelig":"æ","af;":"⁡","Afr;":"𝔄","afr;":"𝔞","Agrave;":"À","Agrave":"À","agrave;":"à","agrave":"à","alefsym;":"ℵ","aleph;":"ℵ","Alpha;":"Α","alpha;":"α","Amacr;":"Ā","amacr;":"ā","amalg;":"⨿","AMP;":"&","AMP":"&","amp;":"&","amp":"&","And;":"⩓","and;":"∧","andand;":"⩕","andd;":"⩜","andslope;":"⩘","andv;":"⩚","ang;":"∠","ange;":"⦤","angle;":"∠","angmsd;":"∡","angmsdaa;":"⦨","angmsdab;":"⦩","angmsdac;":"⦪","angmsdad;":"⦫","angmsdae;":"⦬","angmsdaf;":"⦭","angmsdag;":"⦮","angmsdah;":"⦯","angrt;":"∟","angrtvb;":"⊾","angrtvbd;":"⦝","angsph;":"∢","angst;":"Å","angzarr;":"⍼","Aogon;":"Ą","aogon;":"ą","Aopf;":"𝔸","aopf;":"𝕒","ap;":"≈","apacir;":"⩯","apE;":"⩰","ape;":"≊","apid;":"≋","apos;":"\'","ApplyFunction;":"⁡","approx;":"≈","approxeq;":"≊","Aring;":"Å","Aring":"Å","aring;":"å","aring":"å","Ascr;":"𝒜","ascr;":"𝒶","Assign;":"≔","ast;":"*","asymp;":"≈","asympeq;":"≍","Atilde;":"Ã","Atilde":"Ã","atilde;":"ã","atilde":"ã","Auml;":"Ä","Auml":"Ä","auml;":"ä","auml":"ä","awconint;":"∳","awint;":"⨑","backcong;":"≌","backepsilon;":"϶","backprime;":"‵","backsim;":"∽","backsimeq;":"⋍","Backslash;":"∖","Barv;":"⫧","barvee;":"⊽","Barwed;":"⌆","barwed;":"⌅","barwedge;":"⌅","bbrk;":"⎵","bbrktbrk;":"⎶","bcong;":"≌","Bcy;":"Б","bcy;":"б","bdquo;":"„","becaus;":"∵","Because;":"∵","because;":"∵","bemptyv;":"⦰","bepsi;":"϶","bernou;":"ℬ","Bernoullis;":"ℬ","Beta;":"Β","beta;":"β","beth;":"ℶ","between;":"≬","Bfr;":"𝔅","bfr;":"𝔟","bigcap;":"⋂","bigcirc;":"◯","bigcup;":"⋃","bigodot;":"⨀","bigoplus;":"⨁","bigotimes;":"⨂","bigsqcup;":"⨆","bigstar;":"★","bigtriangledown;":"▽","bigtriangleup;":"△","biguplus;":"⨄","bigvee;":"⋁","bigwedge;":"⋀","bkarow;":"⤍","blacklozenge;":"⧫","blacksquare;":"▪","blacktriangle;":"▴","blacktriangledown;":"▾","blacktriangleleft;":"◂","blacktriangleright;":"▸","blank;":"␣","blk12;":"▒","blk14;":"░","blk34;":"▓","block;":"█","bne;":"=⃥","bnequiv;":"≡⃥","bNot;":"⫭","bnot;":"⌐","Bopf;":"𝔹","bopf;":"𝕓","bot;":"⊥","bottom;":"⊥","bowtie;":"⋈","boxbox;":"⧉","boxDL;":"╗","boxDl;":"╖","boxdL;":"╕","boxdl;":"┐","boxDR;":"╔","boxDr;":"╓","boxdR;":"╒","boxdr;":"┌","boxH;":"═","boxh;":"─","boxHD;":"╦","boxHd;":"╤","boxhD;":"╥","boxhd;":"┬","boxHU;":"╩","boxHu;":"╧","boxhU;":"╨","boxhu;":"┴","boxminus;":"⊟","boxplus;":"⊞","boxtimes;":"⊠","boxUL;":"╝","boxUl;":"╜","boxuL;":"╛","boxul;":"┘","boxUR;":"╚","boxUr;":"╙","boxuR;":"╘","boxur;":"└","boxV;":"║","boxv;":"│","boxVH;":"╬","boxVh;":"╫","boxvH;":"╪","boxvh;":"┼","boxVL;":"╣","boxVl;":"╢","boxvL;":"╡","boxvl;":"┤","boxVR;":"╠","boxVr;":"╟","boxvR;":"╞","boxvr;":"├","bprime;":"‵","Breve;":"˘","breve;":"˘","brvbar;":"¦","brvbar":"¦","Bscr;":"ℬ","bscr;":"𝒷","bsemi;":"⁏","bsim;":"∽","bsime;":"⋍","bsol;":"\\\\","bsolb;":"⧅","bsolhsub;":"⟈","bull;":"•","bullet;":"•","bump;":"≎","bumpE;":"⪮","bumpe;":"≏","Bumpeq;":"≎","bumpeq;":"≏","Cacute;":"Ć","cacute;":"ć","Cap;":"⋒","cap;":"∩","capand;":"⩄","capbrcup;":"⩉","capcap;":"⩋","capcup;":"⩇","capdot;":"⩀","CapitalDifferentialD;":"ⅅ","caps;":"∩︀","caret;":"⁁","caron;":"ˇ","Cayleys;":"ℭ","ccaps;":"⩍","Ccaron;":"Č","ccaron;":"č","Ccedil;":"Ç","Ccedil":"Ç","ccedil;":"ç","ccedil":"ç","Ccirc;":"Ĉ","ccirc;":"ĉ","Cconint;":"∰","ccups;":"⩌","ccupssm;":"⩐","Cdot;":"Ċ","cdot;":"ċ","cedil;":"¸","cedil":"¸","Cedilla;":"¸","cemptyv;":"⦲","cent;":"¢","cent":"¢","CenterDot;":"·","centerdot;":"·","Cfr;":"ℭ","cfr;":"𝔠","CHcy;":"Ч","chcy;":"ч","check;":"✓","checkmark;":"✓","Chi;":"Χ","chi;":"χ","cir;":"○","circ;":"ˆ","circeq;":"≗","circlearrowleft;":"↺","circlearrowright;":"↻","circledast;":"⊛","circledcirc;":"⊚","circleddash;":"⊝","CircleDot;":"⊙","circledR;":"®","circledS;":"Ⓢ","CircleMinus;":"⊖","CirclePlus;":"⊕","CircleTimes;":"⊗","cirE;":"⧃","cire;":"≗","cirfnint;":"⨐","cirmid;":"⫯","cirscir;":"⧂","ClockwiseContourIntegral;":"∲","CloseCurlyDoubleQuote;":"”","CloseCurlyQuote;":"’","clubs;":"♣","clubsuit;":"♣","Colon;":"∷","colon;":":","Colone;":"⩴","colone;":"≔","coloneq;":"≔","comma;":",","commat;":"@","comp;":"∁","compfn;":"∘","complement;":"∁","complexes;":"ℂ","cong;":"≅","congdot;":"⩭","Congruent;":"≡","Conint;":"∯","conint;":"∮","ContourIntegral;":"∮","Copf;":"ℂ","copf;":"𝕔","coprod;":"∐","Coproduct;":"∐","COPY;":"©","COPY":"©","copy;":"©","copy":"©","copysr;":"℗","CounterClockwiseContourIntegral;":"∳","crarr;":"↵","Cross;":"⨯","cross;":"✗","Cscr;":"𝒞","cscr;":"𝒸","csub;":"⫏","csube;":"⫑","csup;":"⫐","csupe;":"⫒","ctdot;":"⋯","cudarrl;":"⤸","cudarrr;":"⤵","cuepr;":"⋞","cuesc;":"⋟","cularr;":"↶","cularrp;":"⤽","Cup;":"⋓","cup;":"∪","cupbrcap;":"⩈","CupCap;":"≍","cupcap;":"⩆","cupcup;":"⩊","cupdot;":"⊍","cupor;":"⩅","cups;":"∪︀","curarr;":"↷","curarrm;":"⤼","curlyeqprec;":"⋞","curlyeqsucc;":"⋟","curlyvee;":"⋎","curlywedge;":"⋏","curren;":"¤","curren":"¤","curvearrowleft;":"↶","curvearrowright;":"↷","cuvee;":"⋎","cuwed;":"⋏","cwconint;":"∲","cwint;":"∱","cylcty;":"⌭","Dagger;":"‡","dagger;":"†","daleth;":"ℸ","Darr;":"↡","dArr;":"⇓","darr;":"↓","dash;":"‐","Dashv;":"⫤","dashv;":"⊣","dbkarow;":"⤏","dblac;":"˝","Dcaron;":"Ď","dcaron;":"ď","Dcy;":"Д","dcy;":"д","DD;":"ⅅ","dd;":"ⅆ","ddagger;":"‡","ddarr;":"⇊","DDotrahd;":"⤑","ddotseq;":"⩷","deg;":"°","deg":"°","Del;":"∇","Delta;":"Δ","delta;":"δ","demptyv;":"⦱","dfisht;":"⥿","Dfr;":"𝔇","dfr;":"𝔡","dHar;":"⥥","dharl;":"⇃","dharr;":"⇂","DiacriticalAcute;":"´","DiacriticalDot;":"˙","DiacriticalDoubleAcute;":"˝","DiacriticalGrave;":"`","DiacriticalTilde;":"˜","diam;":"⋄","Diamond;":"⋄","diamond;":"⋄","diamondsuit;":"♦","diams;":"♦","die;":"¨","DifferentialD;":"ⅆ","digamma;":"ϝ","disin;":"⋲","div;":"÷","divide;":"÷","divide":"÷","divideontimes;":"⋇","divonx;":"⋇","DJcy;":"Ђ","djcy;":"ђ","dlcorn;":"⌞","dlcrop;":"⌍","dollar;":"$","Dopf;":"𝔻","dopf;":"𝕕","Dot;":"¨","dot;":"˙","DotDot;":"⃜","doteq;":"≐","doteqdot;":"≑","DotEqual;":"≐","dotminus;":"∸","dotplus;":"∔","dotsquare;":"⊡","doublebarwedge;":"⌆","DoubleContourIntegral;":"∯","DoubleDot;":"¨","DoubleDownArrow;":"⇓","DoubleLeftArrow;":"⇐","DoubleLeftRightArrow;":"⇔","DoubleLeftTee;":"⫤","DoubleLongLeftArrow;":"⟸","DoubleLongLeftRightArrow;":"⟺","DoubleLongRightArrow;":"⟹","DoubleRightArrow;":"⇒","DoubleRightTee;":"⊨","DoubleUpArrow;":"⇑","DoubleUpDownArrow;":"⇕","DoubleVerticalBar;":"∥","DownArrow;":"↓","Downarrow;":"⇓","downarrow;":"↓","DownArrowBar;":"⤓","DownArrowUpArrow;":"⇵","DownBreve;":"̑","downdownarrows;":"⇊","downharpoonleft;":"⇃","downharpoonright;":"⇂","DownLeftRightVector;":"⥐","DownLeftTeeVector;":"⥞","DownLeftVector;":"↽","DownLeftVectorBar;":"⥖","DownRightTeeVector;":"⥟","DownRightVector;":"⇁","DownRightVectorBar;":"⥗","DownTee;":"⊤","DownTeeArrow;":"↧","drbkarow;":"⤐","drcorn;":"⌟","drcrop;":"⌌","Dscr;":"𝒟","dscr;":"𝒹","DScy;":"Ѕ","dscy;":"ѕ","dsol;":"⧶","Dstrok;":"Đ","dstrok;":"đ","dtdot;":"⋱","dtri;":"▿","dtrif;":"▾","duarr;":"⇵","duhar;":"⥯","dwangle;":"⦦","DZcy;":"Џ","dzcy;":"џ","dzigrarr;":"⟿","Eacute;":"É","Eacute":"É","eacute;":"é","eacute":"é","easter;":"⩮","Ecaron;":"Ě","ecaron;":"ě","ecir;":"≖","Ecirc;":"Ê","Ecirc":"Ê","ecirc;":"ê","ecirc":"ê","ecolon;":"≕","Ecy;":"Э","ecy;":"э","eDDot;":"⩷","Edot;":"Ė","eDot;":"≑","edot;":"ė","ee;":"ⅇ","efDot;":"≒","Efr;":"𝔈","efr;":"𝔢","eg;":"⪚","Egrave;":"È","Egrave":"È","egrave;":"è","egrave":"è","egs;":"⪖","egsdot;":"⪘","el;":"⪙","Element;":"∈","elinters;":"⏧","ell;":"ℓ","els;":"⪕","elsdot;":"⪗","Emacr;":"Ē","emacr;":"ē","empty;":"∅","emptyset;":"∅","EmptySmallSquare;":"◻","emptyv;":"∅","EmptyVerySmallSquare;":"▫","emsp;":" ","emsp13;":" ","emsp14;":" ","ENG;":"Ŋ","eng;":"ŋ","ensp;":" ","Eogon;":"Ę","eogon;":"ę","Eopf;":"𝔼","eopf;":"𝕖","epar;":"⋕","eparsl;":"⧣","eplus;":"⩱","epsi;":"ε","Epsilon;":"Ε","epsilon;":"ε","epsiv;":"ϵ","eqcirc;":"≖","eqcolon;":"≕","eqsim;":"≂","eqslantgtr;":"⪖","eqslantless;":"⪕","Equal;":"⩵","equals;":"=","EqualTilde;":"≂","equest;":"≟","Equilibrium;":"⇌","equiv;":"≡","equivDD;":"⩸","eqvparsl;":"⧥","erarr;":"⥱","erDot;":"≓","Escr;":"ℰ","escr;":"ℯ","esdot;":"≐","Esim;":"⩳","esim;":"≂","Eta;":"Η","eta;":"η","ETH;":"Ð","ETH":"Ð","eth;":"ð","eth":"ð","Euml;":"Ë","Euml":"Ë","euml;":"ë","euml":"ë","euro;":"€","excl;":"!","exist;":"∃","Exists;":"∃","expectation;":"ℰ","ExponentialE;":"ⅇ","exponentiale;":"ⅇ","fallingdotseq;":"≒","Fcy;":"Ф","fcy;":"ф","female;":"♀","ffilig;":"ffi","fflig;":"ff","ffllig;":"ffl","Ffr;":"𝔉","ffr;":"𝔣","filig;":"fi","FilledSmallSquare;":"◼","FilledVerySmallSquare;":"▪","fjlig;":"fj","flat;":"♭","fllig;":"fl","fltns;":"▱","fnof;":"ƒ","Fopf;":"𝔽","fopf;":"𝕗","ForAll;":"∀","forall;":"∀","fork;":"⋔","forkv;":"⫙","Fouriertrf;":"ℱ","fpartint;":"⨍","frac12;":"½","frac12":"½","frac13;":"⅓","frac14;":"¼","frac14":"¼","frac15;":"⅕","frac16;":"⅙","frac18;":"⅛","frac23;":"⅔","frac25;":"⅖","frac34;":"¾","frac34":"¾","frac35;":"⅗","frac38;":"⅜","frac45;":"⅘","frac56;":"⅚","frac58;":"⅝","frac78;":"⅞","frasl;":"⁄","frown;":"⌢","Fscr;":"ℱ","fscr;":"𝒻","gacute;":"ǵ","Gamma;":"Γ","gamma;":"γ","Gammad;":"Ϝ","gammad;":"ϝ","gap;":"⪆","Gbreve;":"Ğ","gbreve;":"ğ","Gcedil;":"Ģ","Gcirc;":"Ĝ","gcirc;":"ĝ","Gcy;":"Г","gcy;":"г","Gdot;":"Ġ","gdot;":"ġ","gE;":"≧","ge;":"≥","gEl;":"⪌","gel;":"⋛","geq;":"≥","geqq;":"≧","geqslant;":"⩾","ges;":"⩾","gescc;":"⪩","gesdot;":"⪀","gesdoto;":"⪂","gesdotol;":"⪄","gesl;":"⋛︀","gesles;":"⪔","Gfr;":"𝔊","gfr;":"𝔤","Gg;":"⋙","gg;":"≫","ggg;":"⋙","gimel;":"ℷ","GJcy;":"Ѓ","gjcy;":"ѓ","gl;":"≷","gla;":"⪥","glE;":"⪒","glj;":"⪤","gnap;":"⪊","gnapprox;":"⪊","gnE;":"≩","gne;":"⪈","gneq;":"⪈","gneqq;":"≩","gnsim;":"⋧","Gopf;":"𝔾","gopf;":"𝕘","grave;":"`","GreaterEqual;":"≥","GreaterEqualLess;":"⋛","GreaterFullEqual;":"≧","GreaterGreater;":"⪢","GreaterLess;":"≷","GreaterSlantEqual;":"⩾","GreaterTilde;":"≳","Gscr;":"𝒢","gscr;":"ℊ","gsim;":"≳","gsime;":"⪎","gsiml;":"⪐","GT;":">","GT":">","Gt;":"≫","gt;":">","gt":">","gtcc;":"⪧","gtcir;":"⩺","gtdot;":"⋗","gtlPar;":"⦕","gtquest;":"⩼","gtrapprox;":"⪆","gtrarr;":"⥸","gtrdot;":"⋗","gtreqless;":"⋛","gtreqqless;":"⪌","gtrless;":"≷","gtrsim;":"≳","gvertneqq;":"≩︀","gvnE;":"≩︀","Hacek;":"ˇ","hairsp;":" ","half;":"½","hamilt;":"ℋ","HARDcy;":"Ъ","hardcy;":"ъ","hArr;":"⇔","harr;":"↔","harrcir;":"⥈","harrw;":"↭","Hat;":"^","hbar;":"ℏ","Hcirc;":"Ĥ","hcirc;":"ĥ","hearts;":"♥","heartsuit;":"♥","hellip;":"…","hercon;":"⊹","Hfr;":"ℌ","hfr;":"𝔥","HilbertSpace;":"ℋ","hksearow;":"⤥","hkswarow;":"⤦","hoarr;":"⇿","homtht;":"∻","hookleftarrow;":"↩","hookrightarrow;":"↪","Hopf;":"ℍ","hopf;":"𝕙","horbar;":"―","HorizontalLine;":"─","Hscr;":"ℋ","hscr;":"𝒽","hslash;":"ℏ","Hstrok;":"Ħ","hstrok;":"ħ","HumpDownHump;":"≎","HumpEqual;":"≏","hybull;":"⁃","hyphen;":"‐","Iacute;":"Í","Iacute":"Í","iacute;":"í","iacute":"í","ic;":"⁣","Icirc;":"Î","Icirc":"Î","icirc;":"î","icirc":"î","Icy;":"И","icy;":"и","Idot;":"İ","IEcy;":"Е","iecy;":"е","iexcl;":"¡","iexcl":"¡","iff;":"⇔","Ifr;":"ℑ","ifr;":"𝔦","Igrave;":"Ì","Igrave":"Ì","igrave;":"ì","igrave":"ì","ii;":"ⅈ","iiiint;":"⨌","iiint;":"∭","iinfin;":"⧜","iiota;":"℩","IJlig;":"IJ","ijlig;":"ij","Im;":"ℑ","Imacr;":"Ī","imacr;":"ī","image;":"ℑ","ImaginaryI;":"ⅈ","imagline;":"ℐ","imagpart;":"ℑ","imath;":"ı","imof;":"⊷","imped;":"Ƶ","Implies;":"⇒","in;":"∈","incare;":"℅","infin;":"∞","infintie;":"⧝","inodot;":"ı","Int;":"∬","int;":"∫","intcal;":"⊺","integers;":"ℤ","Integral;":"∫","intercal;":"⊺","Intersection;":"⋂","intlarhk;":"⨗","intprod;":"⨼","InvisibleComma;":"⁣","InvisibleTimes;":"⁢","IOcy;":"Ё","iocy;":"ё","Iogon;":"Į","iogon;":"į","Iopf;":"𝕀","iopf;":"𝕚","Iota;":"Ι","iota;":"ι","iprod;":"⨼","iquest;":"¿","iquest":"¿","Iscr;":"ℐ","iscr;":"𝒾","isin;":"∈","isindot;":"⋵","isinE;":"⋹","isins;":"⋴","isinsv;":"⋳","isinv;":"∈","it;":"⁢","Itilde;":"Ĩ","itilde;":"ĩ","Iukcy;":"І","iukcy;":"і","Iuml;":"Ï","Iuml":"Ï","iuml;":"ï","iuml":"ï","Jcirc;":"Ĵ","jcirc;":"ĵ","Jcy;":"Й","jcy;":"й","Jfr;":"𝔍","jfr;":"𝔧","jmath;":"ȷ","Jopf;":"𝕁","jopf;":"𝕛","Jscr;":"𝒥","jscr;":"𝒿","Jsercy;":"Ј","jsercy;":"ј","Jukcy;":"Є","jukcy;":"є","Kappa;":"Κ","kappa;":"κ","kappav;":"ϰ","Kcedil;":"Ķ","kcedil;":"ķ","Kcy;":"К","kcy;":"к","Kfr;":"𝔎","kfr;":"𝔨","kgreen;":"ĸ","KHcy;":"Х","khcy;":"х","KJcy;":"Ќ","kjcy;":"ќ","Kopf;":"𝕂","kopf;":"𝕜","Kscr;":"𝒦","kscr;":"𝓀","lAarr;":"⇚","Lacute;":"Ĺ","lacute;":"ĺ","laemptyv;":"⦴","lagran;":"ℒ","Lambda;":"Λ","lambda;":"λ","Lang;":"⟪","lang;":"⟨","langd;":"⦑","langle;":"⟨","lap;":"⪅","Laplacetrf;":"ℒ","laquo;":"«","laquo":"«","Larr;":"↞","lArr;":"⇐","larr;":"←","larrb;":"⇤","larrbfs;":"⤟","larrfs;":"⤝","larrhk;":"↩","larrlp;":"↫","larrpl;":"⤹","larrsim;":"⥳","larrtl;":"↢","lat;":"⪫","lAtail;":"⤛","latail;":"⤙","late;":"⪭","lates;":"⪭︀","lBarr;":"⤎","lbarr;":"⤌","lbbrk;":"❲","lbrace;":"{","lbrack;":"[","lbrke;":"⦋","lbrksld;":"⦏","lbrkslu;":"⦍","Lcaron;":"Ľ","lcaron;":"ľ","Lcedil;":"Ļ","lcedil;":"ļ","lceil;":"⌈","lcub;":"{","Lcy;":"Л","lcy;":"л","ldca;":"⤶","ldquo;":"“","ldquor;":"„","ldrdhar;":"⥧","ldrushar;":"⥋","ldsh;":"↲","lE;":"≦","le;":"≤","LeftAngleBracket;":"⟨","LeftArrow;":"←","Leftarrow;":"⇐","leftarrow;":"←","LeftArrowBar;":"⇤","LeftArrowRightArrow;":"⇆","leftarrowtail;":"↢","LeftCeiling;":"⌈","LeftDoubleBracket;":"⟦","LeftDownTeeVector;":"⥡","LeftDownVector;":"⇃","LeftDownVectorBar;":"⥙","LeftFloor;":"⌊","leftharpoondown;":"↽","leftharpoonup;":"↼","leftleftarrows;":"⇇","LeftRightArrow;":"↔","Leftrightarrow;":"⇔","leftrightarrow;":"↔","leftrightarrows;":"⇆","leftrightharpoons;":"⇋","leftrightsquigarrow;":"↭","LeftRightVector;":"⥎","LeftTee;":"⊣","LeftTeeArrow;":"↤","LeftTeeVector;":"⥚","leftthreetimes;":"⋋","LeftTriangle;":"⊲","LeftTriangleBar;":"⧏","LeftTriangleEqual;":"⊴","LeftUpDownVector;":"⥑","LeftUpTeeVector;":"⥠","LeftUpVector;":"↿","LeftUpVectorBar;":"⥘","LeftVector;":"↼","LeftVectorBar;":"⥒","lEg;":"⪋","leg;":"⋚","leq;":"≤","leqq;":"≦","leqslant;":"⩽","les;":"⩽","lescc;":"⪨","lesdot;":"⩿","lesdoto;":"⪁","lesdotor;":"⪃","lesg;":"⋚︀","lesges;":"⪓","lessapprox;":"⪅","lessdot;":"⋖","lesseqgtr;":"⋚","lesseqqgtr;":"⪋","LessEqualGreater;":"⋚","LessFullEqual;":"≦","LessGreater;":"≶","lessgtr;":"≶","LessLess;":"⪡","lesssim;":"≲","LessSlantEqual;":"⩽","LessTilde;":"≲","lfisht;":"⥼","lfloor;":"⌊","Lfr;":"𝔏","lfr;":"𝔩","lg;":"≶","lgE;":"⪑","lHar;":"⥢","lhard;":"↽","lharu;":"↼","lharul;":"⥪","lhblk;":"▄","LJcy;":"Љ","ljcy;":"љ","Ll;":"⋘","ll;":"≪","llarr;":"⇇","llcorner;":"⌞","Lleftarrow;":"⇚","llhard;":"⥫","lltri;":"◺","Lmidot;":"Ŀ","lmidot;":"ŀ","lmoust;":"⎰","lmoustache;":"⎰","lnap;":"⪉","lnapprox;":"⪉","lnE;":"≨","lne;":"⪇","lneq;":"⪇","lneqq;":"≨","lnsim;":"⋦","loang;":"⟬","loarr;":"⇽","lobrk;":"⟦","LongLeftArrow;":"⟵","Longleftarrow;":"⟸","longleftarrow;":"⟵","LongLeftRightArrow;":"⟷","Longleftrightarrow;":"⟺","longleftrightarrow;":"⟷","longmapsto;":"⟼","LongRightArrow;":"⟶","Longrightarrow;":"⟹","longrightarrow;":"⟶","looparrowleft;":"↫","looparrowright;":"↬","lopar;":"⦅","Lopf;":"𝕃","lopf;":"𝕝","loplus;":"⨭","lotimes;":"⨴","lowast;":"∗","lowbar;":"_","LowerLeftArrow;":"↙","LowerRightArrow;":"↘","loz;":"◊","lozenge;":"◊","lozf;":"⧫","lpar;":"(","lparlt;":"⦓","lrarr;":"⇆","lrcorner;":"⌟","lrhar;":"⇋","lrhard;":"⥭","lrm;":"‎","lrtri;":"⊿","lsaquo;":"‹","Lscr;":"ℒ","lscr;":"𝓁","Lsh;":"↰","lsh;":"↰","lsim;":"≲","lsime;":"⪍","lsimg;":"⪏","lsqb;":"[","lsquo;":"‘","lsquor;":"‚","Lstrok;":"Ł","lstrok;":"ł","LT;":"<","LT":"<","Lt;":"≪","lt;":"<","lt":"<","ltcc;":"⪦","ltcir;":"⩹","ltdot;":"⋖","lthree;":"⋋","ltimes;":"⋉","ltlarr;":"⥶","ltquest;":"⩻","ltri;":"◃","ltrie;":"⊴","ltrif;":"◂","ltrPar;":"⦖","lurdshar;":"⥊","luruhar;":"⥦","lvertneqq;":"≨︀","lvnE;":"≨︀","macr;":"¯","macr":"¯","male;":"♂","malt;":"✠","maltese;":"✠","Map;":"⤅","map;":"↦","mapsto;":"↦","mapstodown;":"↧","mapstoleft;":"↤","mapstoup;":"↥","marker;":"▮","mcomma;":"⨩","Mcy;":"М","mcy;":"м","mdash;":"—","mDDot;":"∺","measuredangle;":"∡","MediumSpace;":" ","Mellintrf;":"ℳ","Mfr;":"𝔐","mfr;":"𝔪","mho;":"℧","micro;":"µ","micro":"µ","mid;":"∣","midast;":"*","midcir;":"⫰","middot;":"·","middot":"·","minus;":"−","minusb;":"⊟","minusd;":"∸","minusdu;":"⨪","MinusPlus;":"∓","mlcp;":"⫛","mldr;":"…","mnplus;":"∓","models;":"⊧","Mopf;":"𝕄","mopf;":"𝕞","mp;":"∓","Mscr;":"ℳ","mscr;":"𝓂","mstpos;":"∾","Mu;":"Μ","mu;":"μ","multimap;":"⊸","mumap;":"⊸","nabla;":"∇","Nacute;":"Ń","nacute;":"ń","nang;":"∠⃒","nap;":"≉","napE;":"⩰̸","napid;":"≋̸","napos;":"ʼn","napprox;":"≉","natur;":"♮","natural;":"♮","naturals;":"ℕ","nbsp;":" ","nbsp":" ","nbump;":"≎̸","nbumpe;":"≏̸","ncap;":"⩃","Ncaron;":"Ň","ncaron;":"ň","Ncedil;":"Ņ","ncedil;":"ņ","ncong;":"≇","ncongdot;":"⩭̸","ncup;":"⩂","Ncy;":"Н","ncy;":"н","ndash;":"–","ne;":"≠","nearhk;":"⤤","neArr;":"⇗","nearr;":"↗","nearrow;":"↗","nedot;":"≐̸","NegativeMediumSpace;":"​","NegativeThickSpace;":"​","NegativeThinSpace;":"​","NegativeVeryThinSpace;":"​","nequiv;":"≢","nesear;":"⤨","nesim;":"≂̸","NestedGreaterGreater;":"≫","NestedLessLess;":"≪","NewLine;":"\\n","nexist;":"∄","nexists;":"∄","Nfr;":"𝔑","nfr;":"𝔫","ngE;":"≧̸","nge;":"≱","ngeq;":"≱","ngeqq;":"≧̸","ngeqslant;":"⩾̸","nges;":"⩾̸","nGg;":"⋙̸","ngsim;":"≵","nGt;":"≫⃒","ngt;":"≯","ngtr;":"≯","nGtv;":"≫̸","nhArr;":"⇎","nharr;":"↮","nhpar;":"⫲","ni;":"∋","nis;":"⋼","nisd;":"⋺","niv;":"∋","NJcy;":"Њ","njcy;":"њ","nlArr;":"⇍","nlarr;":"↚","nldr;":"‥","nlE;":"≦̸","nle;":"≰","nLeftarrow;":"⇍","nleftarrow;":"↚","nLeftrightarrow;":"⇎","nleftrightarrow;":"↮","nleq;":"≰","nleqq;":"≦̸","nleqslant;":"⩽̸","nles;":"⩽̸","nless;":"≮","nLl;":"⋘̸","nlsim;":"≴","nLt;":"≪⃒","nlt;":"≮","nltri;":"⋪","nltrie;":"⋬","nLtv;":"≪̸","nmid;":"∤","NoBreak;":"⁠","NonBreakingSpace;":" ","Nopf;":"ℕ","nopf;":"𝕟","Not;":"⫬","not;":"¬","not":"¬","NotCongruent;":"≢","NotCupCap;":"≭","NotDoubleVerticalBar;":"∦","NotElement;":"∉","NotEqual;":"≠","NotEqualTilde;":"≂̸","NotExists;":"∄","NotGreater;":"≯","NotGreaterEqual;":"≱","NotGreaterFullEqual;":"≧̸","NotGreaterGreater;":"≫̸","NotGreaterLess;":"≹","NotGreaterSlantEqual;":"⩾̸","NotGreaterTilde;":"≵","NotHumpDownHump;":"≎̸","NotHumpEqual;":"≏̸","notin;":"∉","notindot;":"⋵̸","notinE;":"⋹̸","notinva;":"∉","notinvb;":"⋷","notinvc;":"⋶","NotLeftTriangle;":"⋪","NotLeftTriangleBar;":"⧏̸","NotLeftTriangleEqual;":"⋬","NotLess;":"≮","NotLessEqual;":"≰","NotLessGreater;":"≸","NotLessLess;":"≪̸","NotLessSlantEqual;":"⩽̸","NotLessTilde;":"≴","NotNestedGreaterGreater;":"⪢̸","NotNestedLessLess;":"⪡̸","notni;":"∌","notniva;":"∌","notnivb;":"⋾","notnivc;":"⋽","NotPrecedes;":"⊀","NotPrecedesEqual;":"⪯̸","NotPrecedesSlantEqual;":"⋠","NotReverseElement;":"∌","NotRightTriangle;":"⋫","NotRightTriangleBar;":"⧐̸","NotRightTriangleEqual;":"⋭","NotSquareSubset;":"⊏̸","NotSquareSubsetEqual;":"⋢","NotSquareSuperset;":"⊐̸","NotSquareSupersetEqual;":"⋣","NotSubset;":"⊂⃒","NotSubsetEqual;":"⊈","NotSucceeds;":"⊁","NotSucceedsEqual;":"⪰̸","NotSucceedsSlantEqual;":"⋡","NotSucceedsTilde;":"≿̸","NotSuperset;":"⊃⃒","NotSupersetEqual;":"⊉","NotTilde;":"≁","NotTildeEqual;":"≄","NotTildeFullEqual;":"≇","NotTildeTilde;":"≉","NotVerticalBar;":"∤","npar;":"∦","nparallel;":"∦","nparsl;":"⫽⃥","npart;":"∂̸","npolint;":"⨔","npr;":"⊀","nprcue;":"⋠","npre;":"⪯̸","nprec;":"⊀","npreceq;":"⪯̸","nrArr;":"⇏","nrarr;":"↛","nrarrc;":"⤳̸","nrarrw;":"↝̸","nRightarrow;":"⇏","nrightarrow;":"↛","nrtri;":"⋫","nrtrie;":"⋭","nsc;":"⊁","nsccue;":"⋡","nsce;":"⪰̸","Nscr;":"𝒩","nscr;":"𝓃","nshortmid;":"∤","nshortparallel;":"∦","nsim;":"≁","nsime;":"≄","nsimeq;":"≄","nsmid;":"∤","nspar;":"∦","nsqsube;":"⋢","nsqsupe;":"⋣","nsub;":"⊄","nsubE;":"⫅̸","nsube;":"⊈","nsubset;":"⊂⃒","nsubseteq;":"⊈","nsubseteqq;":"⫅̸","nsucc;":"⊁","nsucceq;":"⪰̸","nsup;":"⊅","nsupE;":"⫆̸","nsupe;":"⊉","nsupset;":"⊃⃒","nsupseteq;":"⊉","nsupseteqq;":"⫆̸","ntgl;":"≹","Ntilde;":"Ñ","Ntilde":"Ñ","ntilde;":"ñ","ntilde":"ñ","ntlg;":"≸","ntriangleleft;":"⋪","ntrianglelefteq;":"⋬","ntriangleright;":"⋫","ntrianglerighteq;":"⋭","Nu;":"Ν","nu;":"ν","num;":"#","numero;":"№","numsp;":" ","nvap;":"≍⃒","nVDash;":"⊯","nVdash;":"⊮","nvDash;":"⊭","nvdash;":"⊬","nvge;":"≥⃒","nvgt;":">⃒","nvHarr;":"⤄","nvinfin;":"⧞","nvlArr;":"⤂","nvle;":"≤⃒","nvlt;":"<⃒","nvltrie;":"⊴⃒","nvrArr;":"⤃","nvrtrie;":"⊵⃒","nvsim;":"∼⃒","nwarhk;":"⤣","nwArr;":"⇖","nwarr;":"↖","nwarrow;":"↖","nwnear;":"⤧","Oacute;":"Ó","Oacute":"Ó","oacute;":"ó","oacute":"ó","oast;":"⊛","ocir;":"⊚","Ocirc;":"Ô","Ocirc":"Ô","ocirc;":"ô","ocirc":"ô","Ocy;":"О","ocy;":"о","odash;":"⊝","Odblac;":"Ő","odblac;":"ő","odiv;":"⨸","odot;":"⊙","odsold;":"⦼","OElig;":"Œ","oelig;":"œ","ofcir;":"⦿","Ofr;":"𝔒","ofr;":"𝔬","ogon;":"˛","Ograve;":"Ò","Ograve":"Ò","ograve;":"ò","ograve":"ò","ogt;":"⧁","ohbar;":"⦵","ohm;":"Ω","oint;":"∮","olarr;":"↺","olcir;":"⦾","olcross;":"⦻","oline;":"‾","olt;":"⧀","Omacr;":"Ō","omacr;":"ō","Omega;":"Ω","omega;":"ω","Omicron;":"Ο","omicron;":"ο","omid;":"⦶","ominus;":"⊖","Oopf;":"𝕆","oopf;":"𝕠","opar;":"⦷","OpenCurlyDoubleQuote;":"“","OpenCurlyQuote;":"‘","operp;":"⦹","oplus;":"⊕","Or;":"⩔","or;":"∨","orarr;":"↻","ord;":"⩝","order;":"ℴ","orderof;":"ℴ","ordf;":"ª","ordf":"ª","ordm;":"º","ordm":"º","origof;":"⊶","oror;":"⩖","orslope;":"⩗","orv;":"⩛","oS;":"Ⓢ","Oscr;":"𝒪","oscr;":"ℴ","Oslash;":"Ø","Oslash":"Ø","oslash;":"ø","oslash":"ø","osol;":"⊘","Otilde;":"Õ","Otilde":"Õ","otilde;":"õ","otilde":"õ","Otimes;":"⨷","otimes;":"⊗","otimesas;":"⨶","Ouml;":"Ö","Ouml":"Ö","ouml;":"ö","ouml":"ö","ovbar;":"⌽","OverBar;":"‾","OverBrace;":"⏞","OverBracket;":"⎴","OverParenthesis;":"⏜","par;":"∥","para;":"¶","para":"¶","parallel;":"∥","parsim;":"⫳","parsl;":"⫽","part;":"∂","PartialD;":"∂","Pcy;":"П","pcy;":"п","percnt;":"%","period;":".","permil;":"‰","perp;":"⊥","pertenk;":"‱","Pfr;":"𝔓","pfr;":"𝔭","Phi;":"Φ","phi;":"φ","phiv;":"ϕ","phmmat;":"ℳ","phone;":"☎","Pi;":"Π","pi;":"π","pitchfork;":"⋔","piv;":"ϖ","planck;":"ℏ","planckh;":"ℎ","plankv;":"ℏ","plus;":"+","plusacir;":"⨣","plusb;":"⊞","pluscir;":"⨢","plusdo;":"∔","plusdu;":"⨥","pluse;":"⩲","PlusMinus;":"±","plusmn;":"±","plusmn":"±","plussim;":"⨦","plustwo;":"⨧","pm;":"±","Poincareplane;":"ℌ","pointint;":"⨕","Popf;":"ℙ","popf;":"𝕡","pound;":"£","pound":"£","Pr;":"⪻","pr;":"≺","prap;":"⪷","prcue;":"≼","prE;":"⪳","pre;":"⪯","prec;":"≺","precapprox;":"⪷","preccurlyeq;":"≼","Precedes;":"≺","PrecedesEqual;":"⪯","PrecedesSlantEqual;":"≼","PrecedesTilde;":"≾","preceq;":"⪯","precnapprox;":"⪹","precneqq;":"⪵","precnsim;":"⋨","precsim;":"≾","Prime;":"″","prime;":"′","primes;":"ℙ","prnap;":"⪹","prnE;":"⪵","prnsim;":"⋨","prod;":"∏","Product;":"∏","profalar;":"⌮","profline;":"⌒","profsurf;":"⌓","prop;":"∝","Proportion;":"∷","Proportional;":"∝","propto;":"∝","prsim;":"≾","prurel;":"⊰","Pscr;":"𝒫","pscr;":"𝓅","Psi;":"Ψ","psi;":"ψ","puncsp;":" ","Qfr;":"𝔔","qfr;":"𝔮","qint;":"⨌","Qopf;":"ℚ","qopf;":"𝕢","qprime;":"⁗","Qscr;":"𝒬","qscr;":"𝓆","quaternions;":"ℍ","quatint;":"⨖","quest;":"?","questeq;":"≟","QUOT;":"\\"","QUOT":"\\"","quot;":"\\"","quot":"\\"","rAarr;":"⇛","race;":"∽̱","Racute;":"Ŕ","racute;":"ŕ","radic;":"√","raemptyv;":"⦳","Rang;":"⟫","rang;":"⟩","rangd;":"⦒","range;":"⦥","rangle;":"⟩","raquo;":"»","raquo":"»","Rarr;":"↠","rArr;":"⇒","rarr;":"→","rarrap;":"⥵","rarrb;":"⇥","rarrbfs;":"⤠","rarrc;":"⤳","rarrfs;":"⤞","rarrhk;":"↪","rarrlp;":"↬","rarrpl;":"⥅","rarrsim;":"⥴","Rarrtl;":"⤖","rarrtl;":"↣","rarrw;":"↝","rAtail;":"⤜","ratail;":"⤚","ratio;":"∶","rationals;":"ℚ","RBarr;":"⤐","rBarr;":"⤏","rbarr;":"⤍","rbbrk;":"❳","rbrace;":"}","rbrack;":"]","rbrke;":"⦌","rbrksld;":"⦎","rbrkslu;":"⦐","Rcaron;":"Ř","rcaron;":"ř","Rcedil;":"Ŗ","rcedil;":"ŗ","rceil;":"⌉","rcub;":"}","Rcy;":"Р","rcy;":"р","rdca;":"⤷","rdldhar;":"⥩","rdquo;":"”","rdquor;":"”","rdsh;":"↳","Re;":"ℜ","real;":"ℜ","realine;":"ℛ","realpart;":"ℜ","reals;":"ℝ","rect;":"▭","REG;":"®","REG":"®","reg;":"®","reg":"®","ReverseElement;":"∋","ReverseEquilibrium;":"⇋","ReverseUpEquilibrium;":"⥯","rfisht;":"⥽","rfloor;":"⌋","Rfr;":"ℜ","rfr;":"𝔯","rHar;":"⥤","rhard;":"⇁","rharu;":"⇀","rharul;":"⥬","Rho;":"Ρ","rho;":"ρ","rhov;":"ϱ","RightAngleBracket;":"⟩","RightArrow;":"→","Rightarrow;":"⇒","rightarrow;":"→","RightArrowBar;":"⇥","RightArrowLeftArrow;":"⇄","rightarrowtail;":"↣","RightCeiling;":"⌉","RightDoubleBracket;":"⟧","RightDownTeeVector;":"⥝","RightDownVector;":"⇂","RightDownVectorBar;":"⥕","RightFloor;":"⌋","rightharpoondown;":"⇁","rightharpoonup;":"⇀","rightleftarrows;":"⇄","rightleftharpoons;":"⇌","rightrightarrows;":"⇉","rightsquigarrow;":"↝","RightTee;":"⊢","RightTeeArrow;":"↦","RightTeeVector;":"⥛","rightthreetimes;":"⋌","RightTriangle;":"⊳","RightTriangleBar;":"⧐","RightTriangleEqual;":"⊵","RightUpDownVector;":"⥏","RightUpTeeVector;":"⥜","RightUpVector;":"↾","RightUpVectorBar;":"⥔","RightVector;":"⇀","RightVectorBar;":"⥓","ring;":"˚","risingdotseq;":"≓","rlarr;":"⇄","rlhar;":"⇌","rlm;":"‏","rmoust;":"⎱","rmoustache;":"⎱","rnmid;":"⫮","roang;":"⟭","roarr;":"⇾","robrk;":"⟧","ropar;":"⦆","Ropf;":"ℝ","ropf;":"𝕣","roplus;":"⨮","rotimes;":"⨵","RoundImplies;":"⥰","rpar;":")","rpargt;":"⦔","rppolint;":"⨒","rrarr;":"⇉","Rrightarrow;":"⇛","rsaquo;":"›","Rscr;":"ℛ","rscr;":"𝓇","Rsh;":"↱","rsh;":"↱","rsqb;":"]","rsquo;":"’","rsquor;":"’","rthree;":"⋌","rtimes;":"⋊","rtri;":"▹","rtrie;":"⊵","rtrif;":"▸","rtriltri;":"⧎","RuleDelayed;":"⧴","ruluhar;":"⥨","rx;":"℞","Sacute;":"Ś","sacute;":"ś","sbquo;":"‚","Sc;":"⪼","sc;":"≻","scap;":"⪸","Scaron;":"Š","scaron;":"š","sccue;":"≽","scE;":"⪴","sce;":"⪰","Scedil;":"Ş","scedil;":"ş","Scirc;":"Ŝ","scirc;":"ŝ","scnap;":"⪺","scnE;":"⪶","scnsim;":"⋩","scpolint;":"⨓","scsim;":"≿","Scy;":"С","scy;":"с","sdot;":"⋅","sdotb;":"⊡","sdote;":"⩦","searhk;":"⤥","seArr;":"⇘","searr;":"↘","searrow;":"↘","sect;":"§","sect":"§","semi;":";","seswar;":"⤩","setminus;":"∖","setmn;":"∖","sext;":"✶","Sfr;":"𝔖","sfr;":"𝔰","sfrown;":"⌢","sharp;":"♯","SHCHcy;":"Щ","shchcy;":"щ","SHcy;":"Ш","shcy;":"ш","ShortDownArrow;":"↓","ShortLeftArrow;":"←","shortmid;":"∣","shortparallel;":"∥","ShortRightArrow;":"→","ShortUpArrow;":"↑","shy;":"­","shy":"­","Sigma;":"Σ","sigma;":"σ","sigmaf;":"ς","sigmav;":"ς","sim;":"∼","simdot;":"⩪","sime;":"≃","simeq;":"≃","simg;":"⪞","simgE;":"⪠","siml;":"⪝","simlE;":"⪟","simne;":"≆","simplus;":"⨤","simrarr;":"⥲","slarr;":"←","SmallCircle;":"∘","smallsetminus;":"∖","smashp;":"⨳","smeparsl;":"⧤","smid;":"∣","smile;":"⌣","smt;":"⪪","smte;":"⪬","smtes;":"⪬︀","SOFTcy;":"Ь","softcy;":"ь","sol;":"/","solb;":"⧄","solbar;":"⌿","Sopf;":"𝕊","sopf;":"𝕤","spades;":"♠","spadesuit;":"♠","spar;":"∥","sqcap;":"⊓","sqcaps;":"⊓︀","sqcup;":"⊔","sqcups;":"⊔︀","Sqrt;":"√","sqsub;":"⊏","sqsube;":"⊑","sqsubset;":"⊏","sqsubseteq;":"⊑","sqsup;":"⊐","sqsupe;":"⊒","sqsupset;":"⊐","sqsupseteq;":"⊒","squ;":"□","Square;":"□","square;":"□","SquareIntersection;":"⊓","SquareSubset;":"⊏","SquareSubsetEqual;":"⊑","SquareSuperset;":"⊐","SquareSupersetEqual;":"⊒","SquareUnion;":"⊔","squarf;":"▪","squf;":"▪","srarr;":"→","Sscr;":"𝒮","sscr;":"𝓈","ssetmn;":"∖","ssmile;":"⌣","sstarf;":"⋆","Star;":"⋆","star;":"☆","starf;":"★","straightepsilon;":"ϵ","straightphi;":"ϕ","strns;":"¯","Sub;":"⋐","sub;":"⊂","subdot;":"⪽","subE;":"⫅","sube;":"⊆","subedot;":"⫃","submult;":"⫁","subnE;":"⫋","subne;":"⊊","subplus;":"⪿","subrarr;":"⥹","Subset;":"⋐","subset;":"⊂","subseteq;":"⊆","subseteqq;":"⫅","SubsetEqual;":"⊆","subsetneq;":"⊊","subsetneqq;":"⫋","subsim;":"⫇","subsub;":"⫕","subsup;":"⫓","succ;":"≻","succapprox;":"⪸","succcurlyeq;":"≽","Succeeds;":"≻","SucceedsEqual;":"⪰","SucceedsSlantEqual;":"≽","SucceedsTilde;":"≿","succeq;":"⪰","succnapprox;":"⪺","succneqq;":"⪶","succnsim;":"⋩","succsim;":"≿","SuchThat;":"∋","Sum;":"∑","sum;":"∑","sung;":"♪","Sup;":"⋑","sup;":"⊃","sup1;":"¹","sup1":"¹","sup2;":"²","sup2":"²","sup3;":"³","sup3":"³","supdot;":"⪾","supdsub;":"⫘","supE;":"⫆","supe;":"⊇","supedot;":"⫄","Superset;":"⊃","SupersetEqual;":"⊇","suphsol;":"⟉","suphsub;":"⫗","suplarr;":"⥻","supmult;":"⫂","supnE;":"⫌","supne;":"⊋","supplus;":"⫀","Supset;":"⋑","supset;":"⊃","supseteq;":"⊇","supseteqq;":"⫆","supsetneq;":"⊋","supsetneqq;":"⫌","supsim;":"⫈","supsub;":"⫔","supsup;":"⫖","swarhk;":"⤦","swArr;":"⇙","swarr;":"↙","swarrow;":"↙","swnwar;":"⤪","szlig;":"ß","szlig":"ß","Tab;":"\\t","target;":"⌖","Tau;":"Τ","tau;":"τ","tbrk;":"⎴","Tcaron;":"Ť","tcaron;":"ť","Tcedil;":"Ţ","tcedil;":"ţ","Tcy;":"Т","tcy;":"т","tdot;":"⃛","telrec;":"⌕","Tfr;":"𝔗","tfr;":"𝔱","there4;":"∴","Therefore;":"∴","therefore;":"∴","Theta;":"Θ","theta;":"θ","thetasym;":"ϑ","thetav;":"ϑ","thickapprox;":"≈","thicksim;":"∼","ThickSpace;":"  ","thinsp;":" ","ThinSpace;":" ","thkap;":"≈","thksim;":"∼","THORN;":"Þ","THORN":"Þ","thorn;":"þ","thorn":"þ","Tilde;":"∼","tilde;":"˜","TildeEqual;":"≃","TildeFullEqual;":"≅","TildeTilde;":"≈","times;":"×","times":"×","timesb;":"⊠","timesbar;":"⨱","timesd;":"⨰","tint;":"∭","toea;":"⤨","top;":"⊤","topbot;":"⌶","topcir;":"⫱","Topf;":"𝕋","topf;":"𝕥","topfork;":"⫚","tosa;":"⤩","tprime;":"‴","TRADE;":"™","trade;":"™","triangle;":"▵","triangledown;":"▿","triangleleft;":"◃","trianglelefteq;":"⊴","triangleq;":"≜","triangleright;":"▹","trianglerighteq;":"⊵","tridot;":"◬","trie;":"≜","triminus;":"⨺","TripleDot;":"⃛","triplus;":"⨹","trisb;":"⧍","tritime;":"⨻","trpezium;":"⏢","Tscr;":"𝒯","tscr;":"𝓉","TScy;":"Ц","tscy;":"ц","TSHcy;":"Ћ","tshcy;":"ћ","Tstrok;":"Ŧ","tstrok;":"ŧ","twixt;":"≬","twoheadleftarrow;":"↞","twoheadrightarrow;":"↠","Uacute;":"Ú","Uacute":"Ú","uacute;":"ú","uacute":"ú","Uarr;":"↟","uArr;":"⇑","uarr;":"↑","Uarrocir;":"⥉","Ubrcy;":"Ў","ubrcy;":"ў","Ubreve;":"Ŭ","ubreve;":"ŭ","Ucirc;":"Û","Ucirc":"Û","ucirc;":"û","ucirc":"û","Ucy;":"У","ucy;":"у","udarr;":"⇅","Udblac;":"Ű","udblac;":"ű","udhar;":"⥮","ufisht;":"⥾","Ufr;":"𝔘","ufr;":"𝔲","Ugrave;":"Ù","Ugrave":"Ù","ugrave;":"ù","ugrave":"ù","uHar;":"⥣","uharl;":"↿","uharr;":"↾","uhblk;":"▀","ulcorn;":"⌜","ulcorner;":"⌜","ulcrop;":"⌏","ultri;":"◸","Umacr;":"Ū","umacr;":"ū","uml;":"¨","uml":"¨","UnderBar;":"_","UnderBrace;":"⏟","UnderBracket;":"⎵","UnderParenthesis;":"⏝","Union;":"⋃","UnionPlus;":"⊎","Uogon;":"Ų","uogon;":"ų","Uopf;":"𝕌","uopf;":"𝕦","UpArrow;":"↑","Uparrow;":"⇑","uparrow;":"↑","UpArrowBar;":"⤒","UpArrowDownArrow;":"⇅","UpDownArrow;":"↕","Updownarrow;":"⇕","updownarrow;":"↕","UpEquilibrium;":"⥮","upharpoonleft;":"↿","upharpoonright;":"↾","uplus;":"⊎","UpperLeftArrow;":"↖","UpperRightArrow;":"↗","Upsi;":"ϒ","upsi;":"υ","upsih;":"ϒ","Upsilon;":"Υ","upsilon;":"υ","UpTee;":"⊥","UpTeeArrow;":"↥","upuparrows;":"⇈","urcorn;":"⌝","urcorner;":"⌝","urcrop;":"⌎","Uring;":"Ů","uring;":"ů","urtri;":"◹","Uscr;":"𝒰","uscr;":"𝓊","utdot;":"⋰","Utilde;":"Ũ","utilde;":"ũ","utri;":"▵","utrif;":"▴","uuarr;":"⇈","Uuml;":"Ü","Uuml":"Ü","uuml;":"ü","uuml":"ü","uwangle;":"⦧","vangrt;":"⦜","varepsilon;":"ϵ","varkappa;":"ϰ","varnothing;":"∅","varphi;":"ϕ","varpi;":"ϖ","varpropto;":"∝","vArr;":"⇕","varr;":"↕","varrho;":"ϱ","varsigma;":"ς","varsubsetneq;":"⊊︀","varsubsetneqq;":"⫋︀","varsupsetneq;":"⊋︀","varsupsetneqq;":"⫌︀","vartheta;":"ϑ","vartriangleleft;":"⊲","vartriangleright;":"⊳","Vbar;":"⫫","vBar;":"⫨","vBarv;":"⫩","Vcy;":"В","vcy;":"в","VDash;":"⊫","Vdash;":"⊩","vDash;":"⊨","vdash;":"⊢","Vdashl;":"⫦","Vee;":"⋁","vee;":"∨","veebar;":"⊻","veeeq;":"≚","vellip;":"⋮","Verbar;":"‖","verbar;":"|","Vert;":"‖","vert;":"|","VerticalBar;":"∣","VerticalLine;":"|","VerticalSeparator;":"❘","VerticalTilde;":"≀","VeryThinSpace;":" ","Vfr;":"𝔙","vfr;":"𝔳","vltri;":"⊲","vnsub;":"⊂⃒","vnsup;":"⊃⃒","Vopf;":"𝕍","vopf;":"𝕧","vprop;":"∝","vrtri;":"⊳","Vscr;":"𝒱","vscr;":"𝓋","vsubnE;":"⫋︀","vsubne;":"⊊︀","vsupnE;":"⫌︀","vsupne;":"⊋︀","Vvdash;":"⊪","vzigzag;":"⦚","Wcirc;":"Ŵ","wcirc;":"ŵ","wedbar;":"⩟","Wedge;":"⋀","wedge;":"∧","wedgeq;":"≙","weierp;":"℘","Wfr;":"𝔚","wfr;":"𝔴","Wopf;":"𝕎","wopf;":"𝕨","wp;":"℘","wr;":"≀","wreath;":"≀","Wscr;":"𝒲","wscr;":"𝓌","xcap;":"⋂","xcirc;":"◯","xcup;":"⋃","xdtri;":"▽","Xfr;":"𝔛","xfr;":"𝔵","xhArr;":"⟺","xharr;":"⟷","Xi;":"Ξ","xi;":"ξ","xlArr;":"⟸","xlarr;":"⟵","xmap;":"⟼","xnis;":"⋻","xodot;":"⨀","Xopf;":"𝕏","xopf;":"𝕩","xoplus;":"⨁","xotime;":"⨂","xrArr;":"⟹","xrarr;":"⟶","Xscr;":"𝒳","xscr;":"𝓍","xsqcup;":"⨆","xuplus;":"⨄","xutri;":"△","xvee;":"⋁","xwedge;":"⋀","Yacute;":"Ý","Yacute":"Ý","yacute;":"ý","yacute":"ý","YAcy;":"Я","yacy;":"я","Ycirc;":"Ŷ","ycirc;":"ŷ","Ycy;":"Ы","ycy;":"ы","yen;":"¥","yen":"¥","Yfr;":"𝔜","yfr;":"𝔶","YIcy;":"Ї","yicy;":"ї","Yopf;":"𝕐","yopf;":"𝕪","Yscr;":"𝒴","yscr;":"𝓎","YUcy;":"Ю","yucy;":"ю","Yuml;":"Ÿ","yuml;":"ÿ","yuml":"ÿ","Zacute;":"Ź","zacute;":"ź","Zcaron;":"Ž","zcaron;":"ž","Zcy;":"З","zcy;":"з","Zdot;":"Ż","zdot;":"ż","zeetrf;":"ℨ","ZeroWidthSpace;":"​","Zeta;":"Ζ","zeta;":"ζ","Zfr;":"ℨ","zfr;":"𝔷","ZHcy;":"Ж","zhcy;":"ж","zigrarr;":"⇝","Zopf;":"ℤ","zopf;":"𝕫","Zscr;":"𝒵","zscr;":"𝓏","zwj;":"‍","zwnj;":"‌"}'); /***/ }), -/***/ 2077: +/***/ 3123: /***/ ((module) => { "use strict"; -module.exports = JSON.parse('{"Aacute;":"Á","Aacute":"Á","aacute;":"á","aacute":"á","Abreve;":"Ă","abreve;":"ă","ac;":"∾","acd;":"∿","acE;":"∾̳","Acirc;":"Â","Acirc":"Â","acirc;":"â","acirc":"â","acute;":"´","acute":"´","Acy;":"А","acy;":"а","AElig;":"Æ","AElig":"Æ","aelig;":"æ","aelig":"æ","af;":"⁡","Afr;":"𝔄","afr;":"𝔞","Agrave;":"À","Agrave":"À","agrave;":"à","agrave":"à","alefsym;":"ℵ","aleph;":"ℵ","Alpha;":"Α","alpha;":"α","Amacr;":"Ā","amacr;":"ā","amalg;":"⨿","AMP;":"&","AMP":"&","amp;":"&","amp":"&","And;":"⩓","and;":"∧","andand;":"⩕","andd;":"⩜","andslope;":"⩘","andv;":"⩚","ang;":"∠","ange;":"⦤","angle;":"∠","angmsd;":"∡","angmsdaa;":"⦨","angmsdab;":"⦩","angmsdac;":"⦪","angmsdad;":"⦫","angmsdae;":"⦬","angmsdaf;":"⦭","angmsdag;":"⦮","angmsdah;":"⦯","angrt;":"∟","angrtvb;":"⊾","angrtvbd;":"⦝","angsph;":"∢","angst;":"Å","angzarr;":"⍼","Aogon;":"Ą","aogon;":"ą","Aopf;":"𝔸","aopf;":"𝕒","ap;":"≈","apacir;":"⩯","apE;":"⩰","ape;":"≊","apid;":"≋","apos;":"\'","ApplyFunction;":"⁡","approx;":"≈","approxeq;":"≊","Aring;":"Å","Aring":"Å","aring;":"å","aring":"å","Ascr;":"𝒜","ascr;":"𝒶","Assign;":"≔","ast;":"*","asymp;":"≈","asympeq;":"≍","Atilde;":"Ã","Atilde":"Ã","atilde;":"ã","atilde":"ã","Auml;":"Ä","Auml":"Ä","auml;":"ä","auml":"ä","awconint;":"∳","awint;":"⨑","backcong;":"≌","backepsilon;":"϶","backprime;":"‵","backsim;":"∽","backsimeq;":"⋍","Backslash;":"∖","Barv;":"⫧","barvee;":"⊽","Barwed;":"⌆","barwed;":"⌅","barwedge;":"⌅","bbrk;":"⎵","bbrktbrk;":"⎶","bcong;":"≌","Bcy;":"Б","bcy;":"б","bdquo;":"„","becaus;":"∵","Because;":"∵","because;":"∵","bemptyv;":"⦰","bepsi;":"϶","bernou;":"ℬ","Bernoullis;":"ℬ","Beta;":"Β","beta;":"β","beth;":"ℶ","between;":"≬","Bfr;":"𝔅","bfr;":"𝔟","bigcap;":"⋂","bigcirc;":"◯","bigcup;":"⋃","bigodot;":"⨀","bigoplus;":"⨁","bigotimes;":"⨂","bigsqcup;":"⨆","bigstar;":"★","bigtriangledown;":"▽","bigtriangleup;":"△","biguplus;":"⨄","bigvee;":"⋁","bigwedge;":"⋀","bkarow;":"⤍","blacklozenge;":"⧫","blacksquare;":"▪","blacktriangle;":"▴","blacktriangledown;":"▾","blacktriangleleft;":"◂","blacktriangleright;":"▸","blank;":"␣","blk12;":"▒","blk14;":"░","blk34;":"▓","block;":"█","bne;":"=⃥","bnequiv;":"≡⃥","bNot;":"⫭","bnot;":"⌐","Bopf;":"𝔹","bopf;":"𝕓","bot;":"⊥","bottom;":"⊥","bowtie;":"⋈","boxbox;":"⧉","boxDL;":"╗","boxDl;":"╖","boxdL;":"╕","boxdl;":"┐","boxDR;":"╔","boxDr;":"╓","boxdR;":"╒","boxdr;":"┌","boxH;":"═","boxh;":"─","boxHD;":"╦","boxHd;":"╤","boxhD;":"╥","boxhd;":"┬","boxHU;":"╩","boxHu;":"╧","boxhU;":"╨","boxhu;":"┴","boxminus;":"⊟","boxplus;":"⊞","boxtimes;":"⊠","boxUL;":"╝","boxUl;":"╜","boxuL;":"╛","boxul;":"┘","boxUR;":"╚","boxUr;":"╙","boxuR;":"╘","boxur;":"└","boxV;":"║","boxv;":"│","boxVH;":"╬","boxVh;":"╫","boxvH;":"╪","boxvh;":"┼","boxVL;":"╣","boxVl;":"╢","boxvL;":"╡","boxvl;":"┤","boxVR;":"╠","boxVr;":"╟","boxvR;":"╞","boxvr;":"├","bprime;":"‵","Breve;":"˘","breve;":"˘","brvbar;":"¦","brvbar":"¦","Bscr;":"ℬ","bscr;":"𝒷","bsemi;":"⁏","bsim;":"∽","bsime;":"⋍","bsol;":"\\\\","bsolb;":"⧅","bsolhsub;":"⟈","bull;":"•","bullet;":"•","bump;":"≎","bumpE;":"⪮","bumpe;":"≏","Bumpeq;":"≎","bumpeq;":"≏","Cacute;":"Ć","cacute;":"ć","Cap;":"⋒","cap;":"∩","capand;":"⩄","capbrcup;":"⩉","capcap;":"⩋","capcup;":"⩇","capdot;":"⩀","CapitalDifferentialD;":"ⅅ","caps;":"∩︀","caret;":"⁁","caron;":"ˇ","Cayleys;":"ℭ","ccaps;":"⩍","Ccaron;":"Č","ccaron;":"č","Ccedil;":"Ç","Ccedil":"Ç","ccedil;":"ç","ccedil":"ç","Ccirc;":"Ĉ","ccirc;":"ĉ","Cconint;":"∰","ccups;":"⩌","ccupssm;":"⩐","Cdot;":"Ċ","cdot;":"ċ","cedil;":"¸","cedil":"¸","Cedilla;":"¸","cemptyv;":"⦲","cent;":"¢","cent":"¢","CenterDot;":"·","centerdot;":"·","Cfr;":"ℭ","cfr;":"𝔠","CHcy;":"Ч","chcy;":"ч","check;":"✓","checkmark;":"✓","Chi;":"Χ","chi;":"χ","cir;":"○","circ;":"ˆ","circeq;":"≗","circlearrowleft;":"↺","circlearrowright;":"↻","circledast;":"⊛","circledcirc;":"⊚","circleddash;":"⊝","CircleDot;":"⊙","circledR;":"®","circledS;":"Ⓢ","CircleMinus;":"⊖","CirclePlus;":"⊕","CircleTimes;":"⊗","cirE;":"⧃","cire;":"≗","cirfnint;":"⨐","cirmid;":"⫯","cirscir;":"⧂","ClockwiseContourIntegral;":"∲","CloseCurlyDoubleQuote;":"”","CloseCurlyQuote;":"’","clubs;":"♣","clubsuit;":"♣","Colon;":"∷","colon;":":","Colone;":"⩴","colone;":"≔","coloneq;":"≔","comma;":",","commat;":"@","comp;":"∁","compfn;":"∘","complement;":"∁","complexes;":"ℂ","cong;":"≅","congdot;":"⩭","Congruent;":"≡","Conint;":"∯","conint;":"∮","ContourIntegral;":"∮","Copf;":"ℂ","copf;":"𝕔","coprod;":"∐","Coproduct;":"∐","COPY;":"©","COPY":"©","copy;":"©","copy":"©","copysr;":"℗","CounterClockwiseContourIntegral;":"∳","crarr;":"↵","Cross;":"⨯","cross;":"✗","Cscr;":"𝒞","cscr;":"𝒸","csub;":"⫏","csube;":"⫑","csup;":"⫐","csupe;":"⫒","ctdot;":"⋯","cudarrl;":"⤸","cudarrr;":"⤵","cuepr;":"⋞","cuesc;":"⋟","cularr;":"↶","cularrp;":"⤽","Cup;":"⋓","cup;":"∪","cupbrcap;":"⩈","CupCap;":"≍","cupcap;":"⩆","cupcup;":"⩊","cupdot;":"⊍","cupor;":"⩅","cups;":"∪︀","curarr;":"↷","curarrm;":"⤼","curlyeqprec;":"⋞","curlyeqsucc;":"⋟","curlyvee;":"⋎","curlywedge;":"⋏","curren;":"¤","curren":"¤","curvearrowleft;":"↶","curvearrowright;":"↷","cuvee;":"⋎","cuwed;":"⋏","cwconint;":"∲","cwint;":"∱","cylcty;":"⌭","Dagger;":"‡","dagger;":"†","daleth;":"ℸ","Darr;":"↡","dArr;":"⇓","darr;":"↓","dash;":"‐","Dashv;":"⫤","dashv;":"⊣","dbkarow;":"⤏","dblac;":"˝","Dcaron;":"Ď","dcaron;":"ď","Dcy;":"Д","dcy;":"д","DD;":"ⅅ","dd;":"ⅆ","ddagger;":"‡","ddarr;":"⇊","DDotrahd;":"⤑","ddotseq;":"⩷","deg;":"°","deg":"°","Del;":"∇","Delta;":"Δ","delta;":"δ","demptyv;":"⦱","dfisht;":"⥿","Dfr;":"𝔇","dfr;":"𝔡","dHar;":"⥥","dharl;":"⇃","dharr;":"⇂","DiacriticalAcute;":"´","DiacriticalDot;":"˙","DiacriticalDoubleAcute;":"˝","DiacriticalGrave;":"`","DiacriticalTilde;":"˜","diam;":"⋄","Diamond;":"⋄","diamond;":"⋄","diamondsuit;":"♦","diams;":"♦","die;":"¨","DifferentialD;":"ⅆ","digamma;":"ϝ","disin;":"⋲","div;":"÷","divide;":"÷","divide":"÷","divideontimes;":"⋇","divonx;":"⋇","DJcy;":"Ђ","djcy;":"ђ","dlcorn;":"⌞","dlcrop;":"⌍","dollar;":"$","Dopf;":"𝔻","dopf;":"𝕕","Dot;":"¨","dot;":"˙","DotDot;":"⃜","doteq;":"≐","doteqdot;":"≑","DotEqual;":"≐","dotminus;":"∸","dotplus;":"∔","dotsquare;":"⊡","doublebarwedge;":"⌆","DoubleContourIntegral;":"∯","DoubleDot;":"¨","DoubleDownArrow;":"⇓","DoubleLeftArrow;":"⇐","DoubleLeftRightArrow;":"⇔","DoubleLeftTee;":"⫤","DoubleLongLeftArrow;":"⟸","DoubleLongLeftRightArrow;":"⟺","DoubleLongRightArrow;":"⟹","DoubleRightArrow;":"⇒","DoubleRightTee;":"⊨","DoubleUpArrow;":"⇑","DoubleUpDownArrow;":"⇕","DoubleVerticalBar;":"∥","DownArrow;":"↓","Downarrow;":"⇓","downarrow;":"↓","DownArrowBar;":"⤓","DownArrowUpArrow;":"⇵","DownBreve;":"̑","downdownarrows;":"⇊","downharpoonleft;":"⇃","downharpoonright;":"⇂","DownLeftRightVector;":"⥐","DownLeftTeeVector;":"⥞","DownLeftVector;":"↽","DownLeftVectorBar;":"⥖","DownRightTeeVector;":"⥟","DownRightVector;":"⇁","DownRightVectorBar;":"⥗","DownTee;":"⊤","DownTeeArrow;":"↧","drbkarow;":"⤐","drcorn;":"⌟","drcrop;":"⌌","Dscr;":"𝒟","dscr;":"𝒹","DScy;":"Ѕ","dscy;":"ѕ","dsol;":"⧶","Dstrok;":"Đ","dstrok;":"đ","dtdot;":"⋱","dtri;":"▿","dtrif;":"▾","duarr;":"⇵","duhar;":"⥯","dwangle;":"⦦","DZcy;":"Џ","dzcy;":"џ","dzigrarr;":"⟿","Eacute;":"É","Eacute":"É","eacute;":"é","eacute":"é","easter;":"⩮","Ecaron;":"Ě","ecaron;":"ě","ecir;":"≖","Ecirc;":"Ê","Ecirc":"Ê","ecirc;":"ê","ecirc":"ê","ecolon;":"≕","Ecy;":"Э","ecy;":"э","eDDot;":"⩷","Edot;":"Ė","eDot;":"≑","edot;":"ė","ee;":"ⅇ","efDot;":"≒","Efr;":"𝔈","efr;":"𝔢","eg;":"⪚","Egrave;":"È","Egrave":"È","egrave;":"è","egrave":"è","egs;":"⪖","egsdot;":"⪘","el;":"⪙","Element;":"∈","elinters;":"⏧","ell;":"ℓ","els;":"⪕","elsdot;":"⪗","Emacr;":"Ē","emacr;":"ē","empty;":"∅","emptyset;":"∅","EmptySmallSquare;":"◻","emptyv;":"∅","EmptyVerySmallSquare;":"▫","emsp;":" ","emsp13;":" ","emsp14;":" ","ENG;":"Ŋ","eng;":"ŋ","ensp;":" ","Eogon;":"Ę","eogon;":"ę","Eopf;":"𝔼","eopf;":"𝕖","epar;":"⋕","eparsl;":"⧣","eplus;":"⩱","epsi;":"ε","Epsilon;":"Ε","epsilon;":"ε","epsiv;":"ϵ","eqcirc;":"≖","eqcolon;":"≕","eqsim;":"≂","eqslantgtr;":"⪖","eqslantless;":"⪕","Equal;":"⩵","equals;":"=","EqualTilde;":"≂","equest;":"≟","Equilibrium;":"⇌","equiv;":"≡","equivDD;":"⩸","eqvparsl;":"⧥","erarr;":"⥱","erDot;":"≓","Escr;":"ℰ","escr;":"ℯ","esdot;":"≐","Esim;":"⩳","esim;":"≂","Eta;":"Η","eta;":"η","ETH;":"Ð","ETH":"Ð","eth;":"ð","eth":"ð","Euml;":"Ë","Euml":"Ë","euml;":"ë","euml":"ë","euro;":"€","excl;":"!","exist;":"∃","Exists;":"∃","expectation;":"ℰ","ExponentialE;":"ⅇ","exponentiale;":"ⅇ","fallingdotseq;":"≒","Fcy;":"Ф","fcy;":"ф","female;":"♀","ffilig;":"ffi","fflig;":"ff","ffllig;":"ffl","Ffr;":"𝔉","ffr;":"𝔣","filig;":"fi","FilledSmallSquare;":"◼","FilledVerySmallSquare;":"▪","fjlig;":"fj","flat;":"♭","fllig;":"fl","fltns;":"▱","fnof;":"ƒ","Fopf;":"𝔽","fopf;":"𝕗","ForAll;":"∀","forall;":"∀","fork;":"⋔","forkv;":"⫙","Fouriertrf;":"ℱ","fpartint;":"⨍","frac12;":"½","frac12":"½","frac13;":"⅓","frac14;":"¼","frac14":"¼","frac15;":"⅕","frac16;":"⅙","frac18;":"⅛","frac23;":"⅔","frac25;":"⅖","frac34;":"¾","frac34":"¾","frac35;":"⅗","frac38;":"⅜","frac45;":"⅘","frac56;":"⅚","frac58;":"⅝","frac78;":"⅞","frasl;":"⁄","frown;":"⌢","Fscr;":"ℱ","fscr;":"𝒻","gacute;":"ǵ","Gamma;":"Γ","gamma;":"γ","Gammad;":"Ϝ","gammad;":"ϝ","gap;":"⪆","Gbreve;":"Ğ","gbreve;":"ğ","Gcedil;":"Ģ","Gcirc;":"Ĝ","gcirc;":"ĝ","Gcy;":"Г","gcy;":"г","Gdot;":"Ġ","gdot;":"ġ","gE;":"≧","ge;":"≥","gEl;":"⪌","gel;":"⋛","geq;":"≥","geqq;":"≧","geqslant;":"⩾","ges;":"⩾","gescc;":"⪩","gesdot;":"⪀","gesdoto;":"⪂","gesdotol;":"⪄","gesl;":"⋛︀","gesles;":"⪔","Gfr;":"𝔊","gfr;":"𝔤","Gg;":"⋙","gg;":"≫","ggg;":"⋙","gimel;":"ℷ","GJcy;":"Ѓ","gjcy;":"ѓ","gl;":"≷","gla;":"⪥","glE;":"⪒","glj;":"⪤","gnap;":"⪊","gnapprox;":"⪊","gnE;":"≩","gne;":"⪈","gneq;":"⪈","gneqq;":"≩","gnsim;":"⋧","Gopf;":"𝔾","gopf;":"𝕘","grave;":"`","GreaterEqual;":"≥","GreaterEqualLess;":"⋛","GreaterFullEqual;":"≧","GreaterGreater;":"⪢","GreaterLess;":"≷","GreaterSlantEqual;":"⩾","GreaterTilde;":"≳","Gscr;":"𝒢","gscr;":"ℊ","gsim;":"≳","gsime;":"⪎","gsiml;":"⪐","GT;":">","GT":">","Gt;":"≫","gt;":">","gt":">","gtcc;":"⪧","gtcir;":"⩺","gtdot;":"⋗","gtlPar;":"⦕","gtquest;":"⩼","gtrapprox;":"⪆","gtrarr;":"⥸","gtrdot;":"⋗","gtreqless;":"⋛","gtreqqless;":"⪌","gtrless;":"≷","gtrsim;":"≳","gvertneqq;":"≩︀","gvnE;":"≩︀","Hacek;":"ˇ","hairsp;":" ","half;":"½","hamilt;":"ℋ","HARDcy;":"Ъ","hardcy;":"ъ","hArr;":"⇔","harr;":"↔","harrcir;":"⥈","harrw;":"↭","Hat;":"^","hbar;":"ℏ","Hcirc;":"Ĥ","hcirc;":"ĥ","hearts;":"♥","heartsuit;":"♥","hellip;":"…","hercon;":"⊹","Hfr;":"ℌ","hfr;":"𝔥","HilbertSpace;":"ℋ","hksearow;":"⤥","hkswarow;":"⤦","hoarr;":"⇿","homtht;":"∻","hookleftarrow;":"↩","hookrightarrow;":"↪","Hopf;":"ℍ","hopf;":"𝕙","horbar;":"―","HorizontalLine;":"─","Hscr;":"ℋ","hscr;":"𝒽","hslash;":"ℏ","Hstrok;":"Ħ","hstrok;":"ħ","HumpDownHump;":"≎","HumpEqual;":"≏","hybull;":"⁃","hyphen;":"‐","Iacute;":"Í","Iacute":"Í","iacute;":"í","iacute":"í","ic;":"⁣","Icirc;":"Î","Icirc":"Î","icirc;":"î","icirc":"î","Icy;":"И","icy;":"и","Idot;":"İ","IEcy;":"Е","iecy;":"е","iexcl;":"¡","iexcl":"¡","iff;":"⇔","Ifr;":"ℑ","ifr;":"𝔦","Igrave;":"Ì","Igrave":"Ì","igrave;":"ì","igrave":"ì","ii;":"ⅈ","iiiint;":"⨌","iiint;":"∭","iinfin;":"⧜","iiota;":"℩","IJlig;":"IJ","ijlig;":"ij","Im;":"ℑ","Imacr;":"Ī","imacr;":"ī","image;":"ℑ","ImaginaryI;":"ⅈ","imagline;":"ℐ","imagpart;":"ℑ","imath;":"ı","imof;":"⊷","imped;":"Ƶ","Implies;":"⇒","in;":"∈","incare;":"℅","infin;":"∞","infintie;":"⧝","inodot;":"ı","Int;":"∬","int;":"∫","intcal;":"⊺","integers;":"ℤ","Integral;":"∫","intercal;":"⊺","Intersection;":"⋂","intlarhk;":"⨗","intprod;":"⨼","InvisibleComma;":"⁣","InvisibleTimes;":"⁢","IOcy;":"Ё","iocy;":"ё","Iogon;":"Į","iogon;":"į","Iopf;":"𝕀","iopf;":"𝕚","Iota;":"Ι","iota;":"ι","iprod;":"⨼","iquest;":"¿","iquest":"¿","Iscr;":"ℐ","iscr;":"𝒾","isin;":"∈","isindot;":"⋵","isinE;":"⋹","isins;":"⋴","isinsv;":"⋳","isinv;":"∈","it;":"⁢","Itilde;":"Ĩ","itilde;":"ĩ","Iukcy;":"І","iukcy;":"і","Iuml;":"Ï","Iuml":"Ï","iuml;":"ï","iuml":"ï","Jcirc;":"Ĵ","jcirc;":"ĵ","Jcy;":"Й","jcy;":"й","Jfr;":"𝔍","jfr;":"𝔧","jmath;":"ȷ","Jopf;":"𝕁","jopf;":"𝕛","Jscr;":"𝒥","jscr;":"𝒿","Jsercy;":"Ј","jsercy;":"ј","Jukcy;":"Є","jukcy;":"є","Kappa;":"Κ","kappa;":"κ","kappav;":"ϰ","Kcedil;":"Ķ","kcedil;":"ķ","Kcy;":"К","kcy;":"к","Kfr;":"𝔎","kfr;":"𝔨","kgreen;":"ĸ","KHcy;":"Х","khcy;":"х","KJcy;":"Ќ","kjcy;":"ќ","Kopf;":"𝕂","kopf;":"𝕜","Kscr;":"𝒦","kscr;":"𝓀","lAarr;":"⇚","Lacute;":"Ĺ","lacute;":"ĺ","laemptyv;":"⦴","lagran;":"ℒ","Lambda;":"Λ","lambda;":"λ","Lang;":"⟪","lang;":"⟨","langd;":"⦑","langle;":"⟨","lap;":"⪅","Laplacetrf;":"ℒ","laquo;":"«","laquo":"«","Larr;":"↞","lArr;":"⇐","larr;":"←","larrb;":"⇤","larrbfs;":"⤟","larrfs;":"⤝","larrhk;":"↩","larrlp;":"↫","larrpl;":"⤹","larrsim;":"⥳","larrtl;":"↢","lat;":"⪫","lAtail;":"⤛","latail;":"⤙","late;":"⪭","lates;":"⪭︀","lBarr;":"⤎","lbarr;":"⤌","lbbrk;":"❲","lbrace;":"{","lbrack;":"[","lbrke;":"⦋","lbrksld;":"⦏","lbrkslu;":"⦍","Lcaron;":"Ľ","lcaron;":"ľ","Lcedil;":"Ļ","lcedil;":"ļ","lceil;":"⌈","lcub;":"{","Lcy;":"Л","lcy;":"л","ldca;":"⤶","ldquo;":"“","ldquor;":"„","ldrdhar;":"⥧","ldrushar;":"⥋","ldsh;":"↲","lE;":"≦","le;":"≤","LeftAngleBracket;":"⟨","LeftArrow;":"←","Leftarrow;":"⇐","leftarrow;":"←","LeftArrowBar;":"⇤","LeftArrowRightArrow;":"⇆","leftarrowtail;":"↢","LeftCeiling;":"⌈","LeftDoubleBracket;":"⟦","LeftDownTeeVector;":"⥡","LeftDownVector;":"⇃","LeftDownVectorBar;":"⥙","LeftFloor;":"⌊","leftharpoondown;":"↽","leftharpoonup;":"↼","leftleftarrows;":"⇇","LeftRightArrow;":"↔","Leftrightarrow;":"⇔","leftrightarrow;":"↔","leftrightarrows;":"⇆","leftrightharpoons;":"⇋","leftrightsquigarrow;":"↭","LeftRightVector;":"⥎","LeftTee;":"⊣","LeftTeeArrow;":"↤","LeftTeeVector;":"⥚","leftthreetimes;":"⋋","LeftTriangle;":"⊲","LeftTriangleBar;":"⧏","LeftTriangleEqual;":"⊴","LeftUpDownVector;":"⥑","LeftUpTeeVector;":"⥠","LeftUpVector;":"↿","LeftUpVectorBar;":"⥘","LeftVector;":"↼","LeftVectorBar;":"⥒","lEg;":"⪋","leg;":"⋚","leq;":"≤","leqq;":"≦","leqslant;":"⩽","les;":"⩽","lescc;":"⪨","lesdot;":"⩿","lesdoto;":"⪁","lesdotor;":"⪃","lesg;":"⋚︀","lesges;":"⪓","lessapprox;":"⪅","lessdot;":"⋖","lesseqgtr;":"⋚","lesseqqgtr;":"⪋","LessEqualGreater;":"⋚","LessFullEqual;":"≦","LessGreater;":"≶","lessgtr;":"≶","LessLess;":"⪡","lesssim;":"≲","LessSlantEqual;":"⩽","LessTilde;":"≲","lfisht;":"⥼","lfloor;":"⌊","Lfr;":"𝔏","lfr;":"𝔩","lg;":"≶","lgE;":"⪑","lHar;":"⥢","lhard;":"↽","lharu;":"↼","lharul;":"⥪","lhblk;":"▄","LJcy;":"Љ","ljcy;":"љ","Ll;":"⋘","ll;":"≪","llarr;":"⇇","llcorner;":"⌞","Lleftarrow;":"⇚","llhard;":"⥫","lltri;":"◺","Lmidot;":"Ŀ","lmidot;":"ŀ","lmoust;":"⎰","lmoustache;":"⎰","lnap;":"⪉","lnapprox;":"⪉","lnE;":"≨","lne;":"⪇","lneq;":"⪇","lneqq;":"≨","lnsim;":"⋦","loang;":"⟬","loarr;":"⇽","lobrk;":"⟦","LongLeftArrow;":"⟵","Longleftarrow;":"⟸","longleftarrow;":"⟵","LongLeftRightArrow;":"⟷","Longleftrightarrow;":"⟺","longleftrightarrow;":"⟷","longmapsto;":"⟼","LongRightArrow;":"⟶","Longrightarrow;":"⟹","longrightarrow;":"⟶","looparrowleft;":"↫","looparrowright;":"↬","lopar;":"⦅","Lopf;":"𝕃","lopf;":"𝕝","loplus;":"⨭","lotimes;":"⨴","lowast;":"∗","lowbar;":"_","LowerLeftArrow;":"↙","LowerRightArrow;":"↘","loz;":"◊","lozenge;":"◊","lozf;":"⧫","lpar;":"(","lparlt;":"⦓","lrarr;":"⇆","lrcorner;":"⌟","lrhar;":"⇋","lrhard;":"⥭","lrm;":"‎","lrtri;":"⊿","lsaquo;":"‹","Lscr;":"ℒ","lscr;":"𝓁","Lsh;":"↰","lsh;":"↰","lsim;":"≲","lsime;":"⪍","lsimg;":"⪏","lsqb;":"[","lsquo;":"‘","lsquor;":"‚","Lstrok;":"Ł","lstrok;":"ł","LT;":"<","LT":"<","Lt;":"≪","lt;":"<","lt":"<","ltcc;":"⪦","ltcir;":"⩹","ltdot;":"⋖","lthree;":"⋋","ltimes;":"⋉","ltlarr;":"⥶","ltquest;":"⩻","ltri;":"◃","ltrie;":"⊴","ltrif;":"◂","ltrPar;":"⦖","lurdshar;":"⥊","luruhar;":"⥦","lvertneqq;":"≨︀","lvnE;":"≨︀","macr;":"¯","macr":"¯","male;":"♂","malt;":"✠","maltese;":"✠","Map;":"⤅","map;":"↦","mapsto;":"↦","mapstodown;":"↧","mapstoleft;":"↤","mapstoup;":"↥","marker;":"▮","mcomma;":"⨩","Mcy;":"М","mcy;":"м","mdash;":"—","mDDot;":"∺","measuredangle;":"∡","MediumSpace;":" ","Mellintrf;":"ℳ","Mfr;":"𝔐","mfr;":"𝔪","mho;":"℧","micro;":"µ","micro":"µ","mid;":"∣","midast;":"*","midcir;":"⫰","middot;":"·","middot":"·","minus;":"−","minusb;":"⊟","minusd;":"∸","minusdu;":"⨪","MinusPlus;":"∓","mlcp;":"⫛","mldr;":"…","mnplus;":"∓","models;":"⊧","Mopf;":"𝕄","mopf;":"𝕞","mp;":"∓","Mscr;":"ℳ","mscr;":"𝓂","mstpos;":"∾","Mu;":"Μ","mu;":"μ","multimap;":"⊸","mumap;":"⊸","nabla;":"∇","Nacute;":"Ń","nacute;":"ń","nang;":"∠⃒","nap;":"≉","napE;":"⩰̸","napid;":"≋̸","napos;":"ʼn","napprox;":"≉","natur;":"♮","natural;":"♮","naturals;":"ℕ","nbsp;":" ","nbsp":" ","nbump;":"≎̸","nbumpe;":"≏̸","ncap;":"⩃","Ncaron;":"Ň","ncaron;":"ň","Ncedil;":"Ņ","ncedil;":"ņ","ncong;":"≇","ncongdot;":"⩭̸","ncup;":"⩂","Ncy;":"Н","ncy;":"н","ndash;":"–","ne;":"≠","nearhk;":"⤤","neArr;":"⇗","nearr;":"↗","nearrow;":"↗","nedot;":"≐̸","NegativeMediumSpace;":"​","NegativeThickSpace;":"​","NegativeThinSpace;":"​","NegativeVeryThinSpace;":"​","nequiv;":"≢","nesear;":"⤨","nesim;":"≂̸","NestedGreaterGreater;":"≫","NestedLessLess;":"≪","NewLine;":"\\n","nexist;":"∄","nexists;":"∄","Nfr;":"𝔑","nfr;":"𝔫","ngE;":"≧̸","nge;":"≱","ngeq;":"≱","ngeqq;":"≧̸","ngeqslant;":"⩾̸","nges;":"⩾̸","nGg;":"⋙̸","ngsim;":"≵","nGt;":"≫⃒","ngt;":"≯","ngtr;":"≯","nGtv;":"≫̸","nhArr;":"⇎","nharr;":"↮","nhpar;":"⫲","ni;":"∋","nis;":"⋼","nisd;":"⋺","niv;":"∋","NJcy;":"Њ","njcy;":"њ","nlArr;":"⇍","nlarr;":"↚","nldr;":"‥","nlE;":"≦̸","nle;":"≰","nLeftarrow;":"⇍","nleftarrow;":"↚","nLeftrightarrow;":"⇎","nleftrightarrow;":"↮","nleq;":"≰","nleqq;":"≦̸","nleqslant;":"⩽̸","nles;":"⩽̸","nless;":"≮","nLl;":"⋘̸","nlsim;":"≴","nLt;":"≪⃒","nlt;":"≮","nltri;":"⋪","nltrie;":"⋬","nLtv;":"≪̸","nmid;":"∤","NoBreak;":"⁠","NonBreakingSpace;":" ","Nopf;":"ℕ","nopf;":"𝕟","Not;":"⫬","not;":"¬","not":"¬","NotCongruent;":"≢","NotCupCap;":"≭","NotDoubleVerticalBar;":"∦","NotElement;":"∉","NotEqual;":"≠","NotEqualTilde;":"≂̸","NotExists;":"∄","NotGreater;":"≯","NotGreaterEqual;":"≱","NotGreaterFullEqual;":"≧̸","NotGreaterGreater;":"≫̸","NotGreaterLess;":"≹","NotGreaterSlantEqual;":"⩾̸","NotGreaterTilde;":"≵","NotHumpDownHump;":"≎̸","NotHumpEqual;":"≏̸","notin;":"∉","notindot;":"⋵̸","notinE;":"⋹̸","notinva;":"∉","notinvb;":"⋷","notinvc;":"⋶","NotLeftTriangle;":"⋪","NotLeftTriangleBar;":"⧏̸","NotLeftTriangleEqual;":"⋬","NotLess;":"≮","NotLessEqual;":"≰","NotLessGreater;":"≸","NotLessLess;":"≪̸","NotLessSlantEqual;":"⩽̸","NotLessTilde;":"≴","NotNestedGreaterGreater;":"⪢̸","NotNestedLessLess;":"⪡̸","notni;":"∌","notniva;":"∌","notnivb;":"⋾","notnivc;":"⋽","NotPrecedes;":"⊀","NotPrecedesEqual;":"⪯̸","NotPrecedesSlantEqual;":"⋠","NotReverseElement;":"∌","NotRightTriangle;":"⋫","NotRightTriangleBar;":"⧐̸","NotRightTriangleEqual;":"⋭","NotSquareSubset;":"⊏̸","NotSquareSubsetEqual;":"⋢","NotSquareSuperset;":"⊐̸","NotSquareSupersetEqual;":"⋣","NotSubset;":"⊂⃒","NotSubsetEqual;":"⊈","NotSucceeds;":"⊁","NotSucceedsEqual;":"⪰̸","NotSucceedsSlantEqual;":"⋡","NotSucceedsTilde;":"≿̸","NotSuperset;":"⊃⃒","NotSupersetEqual;":"⊉","NotTilde;":"≁","NotTildeEqual;":"≄","NotTildeFullEqual;":"≇","NotTildeTilde;":"≉","NotVerticalBar;":"∤","npar;":"∦","nparallel;":"∦","nparsl;":"⫽⃥","npart;":"∂̸","npolint;":"⨔","npr;":"⊀","nprcue;":"⋠","npre;":"⪯̸","nprec;":"⊀","npreceq;":"⪯̸","nrArr;":"⇏","nrarr;":"↛","nrarrc;":"⤳̸","nrarrw;":"↝̸","nRightarrow;":"⇏","nrightarrow;":"↛","nrtri;":"⋫","nrtrie;":"⋭","nsc;":"⊁","nsccue;":"⋡","nsce;":"⪰̸","Nscr;":"𝒩","nscr;":"𝓃","nshortmid;":"∤","nshortparallel;":"∦","nsim;":"≁","nsime;":"≄","nsimeq;":"≄","nsmid;":"∤","nspar;":"∦","nsqsube;":"⋢","nsqsupe;":"⋣","nsub;":"⊄","nsubE;":"⫅̸","nsube;":"⊈","nsubset;":"⊂⃒","nsubseteq;":"⊈","nsubseteqq;":"⫅̸","nsucc;":"⊁","nsucceq;":"⪰̸","nsup;":"⊅","nsupE;":"⫆̸","nsupe;":"⊉","nsupset;":"⊃⃒","nsupseteq;":"⊉","nsupseteqq;":"⫆̸","ntgl;":"≹","Ntilde;":"Ñ","Ntilde":"Ñ","ntilde;":"ñ","ntilde":"ñ","ntlg;":"≸","ntriangleleft;":"⋪","ntrianglelefteq;":"⋬","ntriangleright;":"⋫","ntrianglerighteq;":"⋭","Nu;":"Ν","nu;":"ν","num;":"#","numero;":"№","numsp;":" ","nvap;":"≍⃒","nVDash;":"⊯","nVdash;":"⊮","nvDash;":"⊭","nvdash;":"⊬","nvge;":"≥⃒","nvgt;":">⃒","nvHarr;":"⤄","nvinfin;":"⧞","nvlArr;":"⤂","nvle;":"≤⃒","nvlt;":"<⃒","nvltrie;":"⊴⃒","nvrArr;":"⤃","nvrtrie;":"⊵⃒","nvsim;":"∼⃒","nwarhk;":"⤣","nwArr;":"⇖","nwarr;":"↖","nwarrow;":"↖","nwnear;":"⤧","Oacute;":"Ó","Oacute":"Ó","oacute;":"ó","oacute":"ó","oast;":"⊛","ocir;":"⊚","Ocirc;":"Ô","Ocirc":"Ô","ocirc;":"ô","ocirc":"ô","Ocy;":"О","ocy;":"о","odash;":"⊝","Odblac;":"Ő","odblac;":"ő","odiv;":"⨸","odot;":"⊙","odsold;":"⦼","OElig;":"Œ","oelig;":"œ","ofcir;":"⦿","Ofr;":"𝔒","ofr;":"𝔬","ogon;":"˛","Ograve;":"Ò","Ograve":"Ò","ograve;":"ò","ograve":"ò","ogt;":"⧁","ohbar;":"⦵","ohm;":"Ω","oint;":"∮","olarr;":"↺","olcir;":"⦾","olcross;":"⦻","oline;":"‾","olt;":"⧀","Omacr;":"Ō","omacr;":"ō","Omega;":"Ω","omega;":"ω","Omicron;":"Ο","omicron;":"ο","omid;":"⦶","ominus;":"⊖","Oopf;":"𝕆","oopf;":"𝕠","opar;":"⦷","OpenCurlyDoubleQuote;":"“","OpenCurlyQuote;":"‘","operp;":"⦹","oplus;":"⊕","Or;":"⩔","or;":"∨","orarr;":"↻","ord;":"⩝","order;":"ℴ","orderof;":"ℴ","ordf;":"ª","ordf":"ª","ordm;":"º","ordm":"º","origof;":"⊶","oror;":"⩖","orslope;":"⩗","orv;":"⩛","oS;":"Ⓢ","Oscr;":"𝒪","oscr;":"ℴ","Oslash;":"Ø","Oslash":"Ø","oslash;":"ø","oslash":"ø","osol;":"⊘","Otilde;":"Õ","Otilde":"Õ","otilde;":"õ","otilde":"õ","Otimes;":"⨷","otimes;":"⊗","otimesas;":"⨶","Ouml;":"Ö","Ouml":"Ö","ouml;":"ö","ouml":"ö","ovbar;":"⌽","OverBar;":"‾","OverBrace;":"⏞","OverBracket;":"⎴","OverParenthesis;":"⏜","par;":"∥","para;":"¶","para":"¶","parallel;":"∥","parsim;":"⫳","parsl;":"⫽","part;":"∂","PartialD;":"∂","Pcy;":"П","pcy;":"п","percnt;":"%","period;":".","permil;":"‰","perp;":"⊥","pertenk;":"‱","Pfr;":"𝔓","pfr;":"𝔭","Phi;":"Φ","phi;":"φ","phiv;":"ϕ","phmmat;":"ℳ","phone;":"☎","Pi;":"Π","pi;":"π","pitchfork;":"⋔","piv;":"ϖ","planck;":"ℏ","planckh;":"ℎ","plankv;":"ℏ","plus;":"+","plusacir;":"⨣","plusb;":"⊞","pluscir;":"⨢","plusdo;":"∔","plusdu;":"⨥","pluse;":"⩲","PlusMinus;":"±","plusmn;":"±","plusmn":"±","plussim;":"⨦","plustwo;":"⨧","pm;":"±","Poincareplane;":"ℌ","pointint;":"⨕","Popf;":"ℙ","popf;":"𝕡","pound;":"£","pound":"£","Pr;":"⪻","pr;":"≺","prap;":"⪷","prcue;":"≼","prE;":"⪳","pre;":"⪯","prec;":"≺","precapprox;":"⪷","preccurlyeq;":"≼","Precedes;":"≺","PrecedesEqual;":"⪯","PrecedesSlantEqual;":"≼","PrecedesTilde;":"≾","preceq;":"⪯","precnapprox;":"⪹","precneqq;":"⪵","precnsim;":"⋨","precsim;":"≾","Prime;":"″","prime;":"′","primes;":"ℙ","prnap;":"⪹","prnE;":"⪵","prnsim;":"⋨","prod;":"∏","Product;":"∏","profalar;":"⌮","profline;":"⌒","profsurf;":"⌓","prop;":"∝","Proportion;":"∷","Proportional;":"∝","propto;":"∝","prsim;":"≾","prurel;":"⊰","Pscr;":"𝒫","pscr;":"𝓅","Psi;":"Ψ","psi;":"ψ","puncsp;":" ","Qfr;":"𝔔","qfr;":"𝔮","qint;":"⨌","Qopf;":"ℚ","qopf;":"𝕢","qprime;":"⁗","Qscr;":"𝒬","qscr;":"𝓆","quaternions;":"ℍ","quatint;":"⨖","quest;":"?","questeq;":"≟","QUOT;":"\\"","QUOT":"\\"","quot;":"\\"","quot":"\\"","rAarr;":"⇛","race;":"∽̱","Racute;":"Ŕ","racute;":"ŕ","radic;":"√","raemptyv;":"⦳","Rang;":"⟫","rang;":"⟩","rangd;":"⦒","range;":"⦥","rangle;":"⟩","raquo;":"»","raquo":"»","Rarr;":"↠","rArr;":"⇒","rarr;":"→","rarrap;":"⥵","rarrb;":"⇥","rarrbfs;":"⤠","rarrc;":"⤳","rarrfs;":"⤞","rarrhk;":"↪","rarrlp;":"↬","rarrpl;":"⥅","rarrsim;":"⥴","Rarrtl;":"⤖","rarrtl;":"↣","rarrw;":"↝","rAtail;":"⤜","ratail;":"⤚","ratio;":"∶","rationals;":"ℚ","RBarr;":"⤐","rBarr;":"⤏","rbarr;":"⤍","rbbrk;":"❳","rbrace;":"}","rbrack;":"]","rbrke;":"⦌","rbrksld;":"⦎","rbrkslu;":"⦐","Rcaron;":"Ř","rcaron;":"ř","Rcedil;":"Ŗ","rcedil;":"ŗ","rceil;":"⌉","rcub;":"}","Rcy;":"Р","rcy;":"р","rdca;":"⤷","rdldhar;":"⥩","rdquo;":"”","rdquor;":"”","rdsh;":"↳","Re;":"ℜ","real;":"ℜ","realine;":"ℛ","realpart;":"ℜ","reals;":"ℝ","rect;":"▭","REG;":"®","REG":"®","reg;":"®","reg":"®","ReverseElement;":"∋","ReverseEquilibrium;":"⇋","ReverseUpEquilibrium;":"⥯","rfisht;":"⥽","rfloor;":"⌋","Rfr;":"ℜ","rfr;":"𝔯","rHar;":"⥤","rhard;":"⇁","rharu;":"⇀","rharul;":"⥬","Rho;":"Ρ","rho;":"ρ","rhov;":"ϱ","RightAngleBracket;":"⟩","RightArrow;":"→","Rightarrow;":"⇒","rightarrow;":"→","RightArrowBar;":"⇥","RightArrowLeftArrow;":"⇄","rightarrowtail;":"↣","RightCeiling;":"⌉","RightDoubleBracket;":"⟧","RightDownTeeVector;":"⥝","RightDownVector;":"⇂","RightDownVectorBar;":"⥕","RightFloor;":"⌋","rightharpoondown;":"⇁","rightharpoonup;":"⇀","rightleftarrows;":"⇄","rightleftharpoons;":"⇌","rightrightarrows;":"⇉","rightsquigarrow;":"↝","RightTee;":"⊢","RightTeeArrow;":"↦","RightTeeVector;":"⥛","rightthreetimes;":"⋌","RightTriangle;":"⊳","RightTriangleBar;":"⧐","RightTriangleEqual;":"⊵","RightUpDownVector;":"⥏","RightUpTeeVector;":"⥜","RightUpVector;":"↾","RightUpVectorBar;":"⥔","RightVector;":"⇀","RightVectorBar;":"⥓","ring;":"˚","risingdotseq;":"≓","rlarr;":"⇄","rlhar;":"⇌","rlm;":"‏","rmoust;":"⎱","rmoustache;":"⎱","rnmid;":"⫮","roang;":"⟭","roarr;":"⇾","robrk;":"⟧","ropar;":"⦆","Ropf;":"ℝ","ropf;":"𝕣","roplus;":"⨮","rotimes;":"⨵","RoundImplies;":"⥰","rpar;":")","rpargt;":"⦔","rppolint;":"⨒","rrarr;":"⇉","Rrightarrow;":"⇛","rsaquo;":"›","Rscr;":"ℛ","rscr;":"𝓇","Rsh;":"↱","rsh;":"↱","rsqb;":"]","rsquo;":"’","rsquor;":"’","rthree;":"⋌","rtimes;":"⋊","rtri;":"▹","rtrie;":"⊵","rtrif;":"▸","rtriltri;":"⧎","RuleDelayed;":"⧴","ruluhar;":"⥨","rx;":"℞","Sacute;":"Ś","sacute;":"ś","sbquo;":"‚","Sc;":"⪼","sc;":"≻","scap;":"⪸","Scaron;":"Š","scaron;":"š","sccue;":"≽","scE;":"⪴","sce;":"⪰","Scedil;":"Ş","scedil;":"ş","Scirc;":"Ŝ","scirc;":"ŝ","scnap;":"⪺","scnE;":"⪶","scnsim;":"⋩","scpolint;":"⨓","scsim;":"≿","Scy;":"С","scy;":"с","sdot;":"⋅","sdotb;":"⊡","sdote;":"⩦","searhk;":"⤥","seArr;":"⇘","searr;":"↘","searrow;":"↘","sect;":"§","sect":"§","semi;":";","seswar;":"⤩","setminus;":"∖","setmn;":"∖","sext;":"✶","Sfr;":"𝔖","sfr;":"𝔰","sfrown;":"⌢","sharp;":"♯","SHCHcy;":"Щ","shchcy;":"щ","SHcy;":"Ш","shcy;":"ш","ShortDownArrow;":"↓","ShortLeftArrow;":"←","shortmid;":"∣","shortparallel;":"∥","ShortRightArrow;":"→","ShortUpArrow;":"↑","shy;":"­","shy":"­","Sigma;":"Σ","sigma;":"σ","sigmaf;":"ς","sigmav;":"ς","sim;":"∼","simdot;":"⩪","sime;":"≃","simeq;":"≃","simg;":"⪞","simgE;":"⪠","siml;":"⪝","simlE;":"⪟","simne;":"≆","simplus;":"⨤","simrarr;":"⥲","slarr;":"←","SmallCircle;":"∘","smallsetminus;":"∖","smashp;":"⨳","smeparsl;":"⧤","smid;":"∣","smile;":"⌣","smt;":"⪪","smte;":"⪬","smtes;":"⪬︀","SOFTcy;":"Ь","softcy;":"ь","sol;":"/","solb;":"⧄","solbar;":"⌿","Sopf;":"𝕊","sopf;":"𝕤","spades;":"♠","spadesuit;":"♠","spar;":"∥","sqcap;":"⊓","sqcaps;":"⊓︀","sqcup;":"⊔","sqcups;":"⊔︀","Sqrt;":"√","sqsub;":"⊏","sqsube;":"⊑","sqsubset;":"⊏","sqsubseteq;":"⊑","sqsup;":"⊐","sqsupe;":"⊒","sqsupset;":"⊐","sqsupseteq;":"⊒","squ;":"□","Square;":"□","square;":"□","SquareIntersection;":"⊓","SquareSubset;":"⊏","SquareSubsetEqual;":"⊑","SquareSuperset;":"⊐","SquareSupersetEqual;":"⊒","SquareUnion;":"⊔","squarf;":"▪","squf;":"▪","srarr;":"→","Sscr;":"𝒮","sscr;":"𝓈","ssetmn;":"∖","ssmile;":"⌣","sstarf;":"⋆","Star;":"⋆","star;":"☆","starf;":"★","straightepsilon;":"ϵ","straightphi;":"ϕ","strns;":"¯","Sub;":"⋐","sub;":"⊂","subdot;":"⪽","subE;":"⫅","sube;":"⊆","subedot;":"⫃","submult;":"⫁","subnE;":"⫋","subne;":"⊊","subplus;":"⪿","subrarr;":"⥹","Subset;":"⋐","subset;":"⊂","subseteq;":"⊆","subseteqq;":"⫅","SubsetEqual;":"⊆","subsetneq;":"⊊","subsetneqq;":"⫋","subsim;":"⫇","subsub;":"⫕","subsup;":"⫓","succ;":"≻","succapprox;":"⪸","succcurlyeq;":"≽","Succeeds;":"≻","SucceedsEqual;":"⪰","SucceedsSlantEqual;":"≽","SucceedsTilde;":"≿","succeq;":"⪰","succnapprox;":"⪺","succneqq;":"⪶","succnsim;":"⋩","succsim;":"≿","SuchThat;":"∋","Sum;":"∑","sum;":"∑","sung;":"♪","Sup;":"⋑","sup;":"⊃","sup1;":"¹","sup1":"¹","sup2;":"²","sup2":"²","sup3;":"³","sup3":"³","supdot;":"⪾","supdsub;":"⫘","supE;":"⫆","supe;":"⊇","supedot;":"⫄","Superset;":"⊃","SupersetEqual;":"⊇","suphsol;":"⟉","suphsub;":"⫗","suplarr;":"⥻","supmult;":"⫂","supnE;":"⫌","supne;":"⊋","supplus;":"⫀","Supset;":"⋑","supset;":"⊃","supseteq;":"⊇","supseteqq;":"⫆","supsetneq;":"⊋","supsetneqq;":"⫌","supsim;":"⫈","supsub;":"⫔","supsup;":"⫖","swarhk;":"⤦","swArr;":"⇙","swarr;":"↙","swarrow;":"↙","swnwar;":"⤪","szlig;":"ß","szlig":"ß","Tab;":"\\t","target;":"⌖","Tau;":"Τ","tau;":"τ","tbrk;":"⎴","Tcaron;":"Ť","tcaron;":"ť","Tcedil;":"Ţ","tcedil;":"ţ","Tcy;":"Т","tcy;":"т","tdot;":"⃛","telrec;":"⌕","Tfr;":"𝔗","tfr;":"𝔱","there4;":"∴","Therefore;":"∴","therefore;":"∴","Theta;":"Θ","theta;":"θ","thetasym;":"ϑ","thetav;":"ϑ","thickapprox;":"≈","thicksim;":"∼","ThickSpace;":"  ","thinsp;":" ","ThinSpace;":" ","thkap;":"≈","thksim;":"∼","THORN;":"Þ","THORN":"Þ","thorn;":"þ","thorn":"þ","Tilde;":"∼","tilde;":"˜","TildeEqual;":"≃","TildeFullEqual;":"≅","TildeTilde;":"≈","times;":"×","times":"×","timesb;":"⊠","timesbar;":"⨱","timesd;":"⨰","tint;":"∭","toea;":"⤨","top;":"⊤","topbot;":"⌶","topcir;":"⫱","Topf;":"𝕋","topf;":"𝕥","topfork;":"⫚","tosa;":"⤩","tprime;":"‴","TRADE;":"™","trade;":"™","triangle;":"▵","triangledown;":"▿","triangleleft;":"◃","trianglelefteq;":"⊴","triangleq;":"≜","triangleright;":"▹","trianglerighteq;":"⊵","tridot;":"◬","trie;":"≜","triminus;":"⨺","TripleDot;":"⃛","triplus;":"⨹","trisb;":"⧍","tritime;":"⨻","trpezium;":"⏢","Tscr;":"𝒯","tscr;":"𝓉","TScy;":"Ц","tscy;":"ц","TSHcy;":"Ћ","tshcy;":"ћ","Tstrok;":"Ŧ","tstrok;":"ŧ","twixt;":"≬","twoheadleftarrow;":"↞","twoheadrightarrow;":"↠","Uacute;":"Ú","Uacute":"Ú","uacute;":"ú","uacute":"ú","Uarr;":"↟","uArr;":"⇑","uarr;":"↑","Uarrocir;":"⥉","Ubrcy;":"Ў","ubrcy;":"ў","Ubreve;":"Ŭ","ubreve;":"ŭ","Ucirc;":"Û","Ucirc":"Û","ucirc;":"û","ucirc":"û","Ucy;":"У","ucy;":"у","udarr;":"⇅","Udblac;":"Ű","udblac;":"ű","udhar;":"⥮","ufisht;":"⥾","Ufr;":"𝔘","ufr;":"𝔲","Ugrave;":"Ù","Ugrave":"Ù","ugrave;":"ù","ugrave":"ù","uHar;":"⥣","uharl;":"↿","uharr;":"↾","uhblk;":"▀","ulcorn;":"⌜","ulcorner;":"⌜","ulcrop;":"⌏","ultri;":"◸","Umacr;":"Ū","umacr;":"ū","uml;":"¨","uml":"¨","UnderBar;":"_","UnderBrace;":"⏟","UnderBracket;":"⎵","UnderParenthesis;":"⏝","Union;":"⋃","UnionPlus;":"⊎","Uogon;":"Ų","uogon;":"ų","Uopf;":"𝕌","uopf;":"𝕦","UpArrow;":"↑","Uparrow;":"⇑","uparrow;":"↑","UpArrowBar;":"⤒","UpArrowDownArrow;":"⇅","UpDownArrow;":"↕","Updownarrow;":"⇕","updownarrow;":"↕","UpEquilibrium;":"⥮","upharpoonleft;":"↿","upharpoonright;":"↾","uplus;":"⊎","UpperLeftArrow;":"↖","UpperRightArrow;":"↗","Upsi;":"ϒ","upsi;":"υ","upsih;":"ϒ","Upsilon;":"Υ","upsilon;":"υ","UpTee;":"⊥","UpTeeArrow;":"↥","upuparrows;":"⇈","urcorn;":"⌝","urcorner;":"⌝","urcrop;":"⌎","Uring;":"Ů","uring;":"ů","urtri;":"◹","Uscr;":"𝒰","uscr;":"𝓊","utdot;":"⋰","Utilde;":"Ũ","utilde;":"ũ","utri;":"▵","utrif;":"▴","uuarr;":"⇈","Uuml;":"Ü","Uuml":"Ü","uuml;":"ü","uuml":"ü","uwangle;":"⦧","vangrt;":"⦜","varepsilon;":"ϵ","varkappa;":"ϰ","varnothing;":"∅","varphi;":"ϕ","varpi;":"ϖ","varpropto;":"∝","vArr;":"⇕","varr;":"↕","varrho;":"ϱ","varsigma;":"ς","varsubsetneq;":"⊊︀","varsubsetneqq;":"⫋︀","varsupsetneq;":"⊋︀","varsupsetneqq;":"⫌︀","vartheta;":"ϑ","vartriangleleft;":"⊲","vartriangleright;":"⊳","Vbar;":"⫫","vBar;":"⫨","vBarv;":"⫩","Vcy;":"В","vcy;":"в","VDash;":"⊫","Vdash;":"⊩","vDash;":"⊨","vdash;":"⊢","Vdashl;":"⫦","Vee;":"⋁","vee;":"∨","veebar;":"⊻","veeeq;":"≚","vellip;":"⋮","Verbar;":"‖","verbar;":"|","Vert;":"‖","vert;":"|","VerticalBar;":"∣","VerticalLine;":"|","VerticalSeparator;":"❘","VerticalTilde;":"≀","VeryThinSpace;":" ","Vfr;":"𝔙","vfr;":"𝔳","vltri;":"⊲","vnsub;":"⊂⃒","vnsup;":"⊃⃒","Vopf;":"𝕍","vopf;":"𝕧","vprop;":"∝","vrtri;":"⊳","Vscr;":"𝒱","vscr;":"𝓋","vsubnE;":"⫋︀","vsubne;":"⊊︀","vsupnE;":"⫌︀","vsupne;":"⊋︀","Vvdash;":"⊪","vzigzag;":"⦚","Wcirc;":"Ŵ","wcirc;":"ŵ","wedbar;":"⩟","Wedge;":"⋀","wedge;":"∧","wedgeq;":"≙","weierp;":"℘","Wfr;":"𝔚","wfr;":"𝔴","Wopf;":"𝕎","wopf;":"𝕨","wp;":"℘","wr;":"≀","wreath;":"≀","Wscr;":"𝒲","wscr;":"𝓌","xcap;":"⋂","xcirc;":"◯","xcup;":"⋃","xdtri;":"▽","Xfr;":"𝔛","xfr;":"𝔵","xhArr;":"⟺","xharr;":"⟷","Xi;":"Ξ","xi;":"ξ","xlArr;":"⟸","xlarr;":"⟵","xmap;":"⟼","xnis;":"⋻","xodot;":"⨀","Xopf;":"𝕏","xopf;":"𝕩","xoplus;":"⨁","xotime;":"⨂","xrArr;":"⟹","xrarr;":"⟶","Xscr;":"𝒳","xscr;":"𝓍","xsqcup;":"⨆","xuplus;":"⨄","xutri;":"△","xvee;":"⋁","xwedge;":"⋀","Yacute;":"Ý","Yacute":"Ý","yacute;":"ý","yacute":"ý","YAcy;":"Я","yacy;":"я","Ycirc;":"Ŷ","ycirc;":"ŷ","Ycy;":"Ы","ycy;":"ы","yen;":"¥","yen":"¥","Yfr;":"𝔜","yfr;":"𝔶","YIcy;":"Ї","yicy;":"ї","Yopf;":"𝕐","yopf;":"𝕪","Yscr;":"𝒴","yscr;":"𝓎","YUcy;":"Ю","yucy;":"ю","Yuml;":"Ÿ","yuml;":"ÿ","yuml":"ÿ","Zacute;":"Ź","zacute;":"ź","Zcaron;":"Ž","zcaron;":"ž","Zcy;":"З","zcy;":"з","Zdot;":"Ż","zdot;":"ż","zeetrf;":"ℨ","ZeroWidthSpace;":"​","Zeta;":"Ζ","zeta;":"ζ","Zfr;":"ℨ","zfr;":"𝔷","ZHcy;":"Ж","zhcy;":"ж","zigrarr;":"⇝","Zopf;":"ℤ","zopf;":"𝕫","Zscr;":"𝒵","zscr;":"𝓏","zwj;":"‍","zwnj;":"‌"}'); +module.exports = JSON.parse('{"9":"Tab;","10":"NewLine;","33":"excl;","34":"quot;","35":"num;","36":"dollar;","37":"percnt;","38":"amp;","39":"apos;","40":"lpar;","41":"rpar;","42":"midast;","43":"plus;","44":"comma;","46":"period;","47":"sol;","58":"colon;","59":"semi;","60":"lt;","61":"equals;","62":"gt;","63":"quest;","64":"commat;","91":"lsqb;","92":"bsol;","93":"rsqb;","94":"Hat;","95":"UnderBar;","96":"grave;","123":"lcub;","124":"VerticalLine;","125":"rcub;","160":"NonBreakingSpace;","161":"iexcl;","162":"cent;","163":"pound;","164":"curren;","165":"yen;","166":"brvbar;","167":"sect;","168":"uml;","169":"copy;","170":"ordf;","171":"laquo;","172":"not;","173":"shy;","174":"reg;","175":"strns;","176":"deg;","177":"pm;","178":"sup2;","179":"sup3;","180":"DiacriticalAcute;","181":"micro;","182":"para;","183":"middot;","184":"Cedilla;","185":"sup1;","186":"ordm;","187":"raquo;","188":"frac14;","189":"half;","190":"frac34;","191":"iquest;","192":"Agrave;","193":"Aacute;","194":"Acirc;","195":"Atilde;","196":"Auml;","197":"Aring;","198":"AElig;","199":"Ccedil;","200":"Egrave;","201":"Eacute;","202":"Ecirc;","203":"Euml;","204":"Igrave;","205":"Iacute;","206":"Icirc;","207":"Iuml;","208":"ETH;","209":"Ntilde;","210":"Ograve;","211":"Oacute;","212":"Ocirc;","213":"Otilde;","214":"Ouml;","215":"times;","216":"Oslash;","217":"Ugrave;","218":"Uacute;","219":"Ucirc;","220":"Uuml;","221":"Yacute;","222":"THORN;","223":"szlig;","224":"agrave;","225":"aacute;","226":"acirc;","227":"atilde;","228":"auml;","229":"aring;","230":"aelig;","231":"ccedil;","232":"egrave;","233":"eacute;","234":"ecirc;","235":"euml;","236":"igrave;","237":"iacute;","238":"icirc;","239":"iuml;","240":"eth;","241":"ntilde;","242":"ograve;","243":"oacute;","244":"ocirc;","245":"otilde;","246":"ouml;","247":"divide;","248":"oslash;","249":"ugrave;","250":"uacute;","251":"ucirc;","252":"uuml;","253":"yacute;","254":"thorn;","255":"yuml;","256":"Amacr;","257":"amacr;","258":"Abreve;","259":"abreve;","260":"Aogon;","261":"aogon;","262":"Cacute;","263":"cacute;","264":"Ccirc;","265":"ccirc;","266":"Cdot;","267":"cdot;","268":"Ccaron;","269":"ccaron;","270":"Dcaron;","271":"dcaron;","272":"Dstrok;","273":"dstrok;","274":"Emacr;","275":"emacr;","278":"Edot;","279":"edot;","280":"Eogon;","281":"eogon;","282":"Ecaron;","283":"ecaron;","284":"Gcirc;","285":"gcirc;","286":"Gbreve;","287":"gbreve;","288":"Gdot;","289":"gdot;","290":"Gcedil;","292":"Hcirc;","293":"hcirc;","294":"Hstrok;","295":"hstrok;","296":"Itilde;","297":"itilde;","298":"Imacr;","299":"imacr;","302":"Iogon;","303":"iogon;","304":"Idot;","305":"inodot;","306":"IJlig;","307":"ijlig;","308":"Jcirc;","309":"jcirc;","310":"Kcedil;","311":"kcedil;","312":"kgreen;","313":"Lacute;","314":"lacute;","315":"Lcedil;","316":"lcedil;","317":"Lcaron;","318":"lcaron;","319":"Lmidot;","320":"lmidot;","321":"Lstrok;","322":"lstrok;","323":"Nacute;","324":"nacute;","325":"Ncedil;","326":"ncedil;","327":"Ncaron;","328":"ncaron;","329":"napos;","330":"ENG;","331":"eng;","332":"Omacr;","333":"omacr;","336":"Odblac;","337":"odblac;","338":"OElig;","339":"oelig;","340":"Racute;","341":"racute;","342":"Rcedil;","343":"rcedil;","344":"Rcaron;","345":"rcaron;","346":"Sacute;","347":"sacute;","348":"Scirc;","349":"scirc;","350":"Scedil;","351":"scedil;","352":"Scaron;","353":"scaron;","354":"Tcedil;","355":"tcedil;","356":"Tcaron;","357":"tcaron;","358":"Tstrok;","359":"tstrok;","360":"Utilde;","361":"utilde;","362":"Umacr;","363":"umacr;","364":"Ubreve;","365":"ubreve;","366":"Uring;","367":"uring;","368":"Udblac;","369":"udblac;","370":"Uogon;","371":"uogon;","372":"Wcirc;","373":"wcirc;","374":"Ycirc;","375":"ycirc;","376":"Yuml;","377":"Zacute;","378":"zacute;","379":"Zdot;","380":"zdot;","381":"Zcaron;","382":"zcaron;","402":"fnof;","437":"imped;","501":"gacute;","567":"jmath;","710":"circ;","711":"Hacek;","728":"breve;","729":"dot;","730":"ring;","731":"ogon;","732":"tilde;","733":"DiacriticalDoubleAcute;","785":"DownBreve;","913":"Alpha;","914":"Beta;","915":"Gamma;","916":"Delta;","917":"Epsilon;","918":"Zeta;","919":"Eta;","920":"Theta;","921":"Iota;","922":"Kappa;","923":"Lambda;","924":"Mu;","925":"Nu;","926":"Xi;","927":"Omicron;","928":"Pi;","929":"Rho;","931":"Sigma;","932":"Tau;","933":"Upsilon;","934":"Phi;","935":"Chi;","936":"Psi;","937":"Omega;","945":"alpha;","946":"beta;","947":"gamma;","948":"delta;","949":"epsilon;","950":"zeta;","951":"eta;","952":"theta;","953":"iota;","954":"kappa;","955":"lambda;","956":"mu;","957":"nu;","958":"xi;","959":"omicron;","960":"pi;","961":"rho;","962":"varsigma;","963":"sigma;","964":"tau;","965":"upsilon;","966":"phi;","967":"chi;","968":"psi;","969":"omega;","977":"vartheta;","978":"upsih;","981":"varphi;","982":"varpi;","988":"Gammad;","989":"gammad;","1008":"varkappa;","1009":"varrho;","1013":"varepsilon;","1014":"bepsi;","1025":"IOcy;","1026":"DJcy;","1027":"GJcy;","1028":"Jukcy;","1029":"DScy;","1030":"Iukcy;","1031":"YIcy;","1032":"Jsercy;","1033":"LJcy;","1034":"NJcy;","1035":"TSHcy;","1036":"KJcy;","1038":"Ubrcy;","1039":"DZcy;","1040":"Acy;","1041":"Bcy;","1042":"Vcy;","1043":"Gcy;","1044":"Dcy;","1045":"IEcy;","1046":"ZHcy;","1047":"Zcy;","1048":"Icy;","1049":"Jcy;","1050":"Kcy;","1051":"Lcy;","1052":"Mcy;","1053":"Ncy;","1054":"Ocy;","1055":"Pcy;","1056":"Rcy;","1057":"Scy;","1058":"Tcy;","1059":"Ucy;","1060":"Fcy;","1061":"KHcy;","1062":"TScy;","1063":"CHcy;","1064":"SHcy;","1065":"SHCHcy;","1066":"HARDcy;","1067":"Ycy;","1068":"SOFTcy;","1069":"Ecy;","1070":"YUcy;","1071":"YAcy;","1072":"acy;","1073":"bcy;","1074":"vcy;","1075":"gcy;","1076":"dcy;","1077":"iecy;","1078":"zhcy;","1079":"zcy;","1080":"icy;","1081":"jcy;","1082":"kcy;","1083":"lcy;","1084":"mcy;","1085":"ncy;","1086":"ocy;","1087":"pcy;","1088":"rcy;","1089":"scy;","1090":"tcy;","1091":"ucy;","1092":"fcy;","1093":"khcy;","1094":"tscy;","1095":"chcy;","1096":"shcy;","1097":"shchcy;","1098":"hardcy;","1099":"ycy;","1100":"softcy;","1101":"ecy;","1102":"yucy;","1103":"yacy;","1105":"iocy;","1106":"djcy;","1107":"gjcy;","1108":"jukcy;","1109":"dscy;","1110":"iukcy;","1111":"yicy;","1112":"jsercy;","1113":"ljcy;","1114":"njcy;","1115":"tshcy;","1116":"kjcy;","1118":"ubrcy;","1119":"dzcy;","8194":"ensp;","8195":"emsp;","8196":"emsp13;","8197":"emsp14;","8199":"numsp;","8200":"puncsp;","8201":"ThinSpace;","8202":"VeryThinSpace;","8203":"ZeroWidthSpace;","8204":"zwnj;","8205":"zwj;","8206":"lrm;","8207":"rlm;","8208":"hyphen;","8211":"ndash;","8212":"mdash;","8213":"horbar;","8214":"Vert;","8216":"OpenCurlyQuote;","8217":"rsquor;","8218":"sbquo;","8220":"OpenCurlyDoubleQuote;","8221":"rdquor;","8222":"ldquor;","8224":"dagger;","8225":"ddagger;","8226":"bullet;","8229":"nldr;","8230":"mldr;","8240":"permil;","8241":"pertenk;","8242":"prime;","8243":"Prime;","8244":"tprime;","8245":"bprime;","8249":"lsaquo;","8250":"rsaquo;","8254":"OverBar;","8257":"caret;","8259":"hybull;","8260":"frasl;","8271":"bsemi;","8279":"qprime;","8287":"MediumSpace;","8288":"NoBreak;","8289":"ApplyFunction;","8290":"it;","8291":"InvisibleComma;","8364":"euro;","8411":"TripleDot;","8412":"DotDot;","8450":"Copf;","8453":"incare;","8458":"gscr;","8459":"Hscr;","8460":"Poincareplane;","8461":"quaternions;","8462":"planckh;","8463":"plankv;","8464":"Iscr;","8465":"imagpart;","8466":"Lscr;","8467":"ell;","8469":"Nopf;","8470":"numero;","8471":"copysr;","8472":"wp;","8473":"primes;","8474":"rationals;","8475":"Rscr;","8476":"Rfr;","8477":"Ropf;","8478":"rx;","8482":"trade;","8484":"Zopf;","8487":"mho;","8488":"Zfr;","8489":"iiota;","8492":"Bscr;","8493":"Cfr;","8495":"escr;","8496":"expectation;","8497":"Fscr;","8499":"phmmat;","8500":"oscr;","8501":"aleph;","8502":"beth;","8503":"gimel;","8504":"daleth;","8517":"DD;","8518":"DifferentialD;","8519":"exponentiale;","8520":"ImaginaryI;","8531":"frac13;","8532":"frac23;","8533":"frac15;","8534":"frac25;","8535":"frac35;","8536":"frac45;","8537":"frac16;","8538":"frac56;","8539":"frac18;","8540":"frac38;","8541":"frac58;","8542":"frac78;","8592":"slarr;","8593":"uparrow;","8594":"srarr;","8595":"ShortDownArrow;","8596":"leftrightarrow;","8597":"varr;","8598":"UpperLeftArrow;","8599":"UpperRightArrow;","8600":"searrow;","8601":"swarrow;","8602":"nleftarrow;","8603":"nrightarrow;","8605":"rightsquigarrow;","8606":"twoheadleftarrow;","8607":"Uarr;","8608":"twoheadrightarrow;","8609":"Darr;","8610":"leftarrowtail;","8611":"rightarrowtail;","8612":"mapstoleft;","8613":"UpTeeArrow;","8614":"RightTeeArrow;","8615":"mapstodown;","8617":"larrhk;","8618":"rarrhk;","8619":"looparrowleft;","8620":"rarrlp;","8621":"leftrightsquigarrow;","8622":"nleftrightarrow;","8624":"lsh;","8625":"rsh;","8626":"ldsh;","8627":"rdsh;","8629":"crarr;","8630":"curvearrowleft;","8631":"curvearrowright;","8634":"olarr;","8635":"orarr;","8636":"lharu;","8637":"lhard;","8638":"upharpoonright;","8639":"upharpoonleft;","8640":"RightVector;","8641":"rightharpoondown;","8642":"RightDownVector;","8643":"LeftDownVector;","8644":"rlarr;","8645":"UpArrowDownArrow;","8646":"lrarr;","8647":"llarr;","8648":"uuarr;","8649":"rrarr;","8650":"downdownarrows;","8651":"ReverseEquilibrium;","8652":"rlhar;","8653":"nLeftarrow;","8654":"nLeftrightarrow;","8655":"nRightarrow;","8656":"Leftarrow;","8657":"Uparrow;","8658":"Rightarrow;","8659":"Downarrow;","8660":"Leftrightarrow;","8661":"vArr;","8662":"nwArr;","8663":"neArr;","8664":"seArr;","8665":"swArr;","8666":"Lleftarrow;","8667":"Rrightarrow;","8669":"zigrarr;","8676":"LeftArrowBar;","8677":"RightArrowBar;","8693":"duarr;","8701":"loarr;","8702":"roarr;","8703":"hoarr;","8704":"forall;","8705":"complement;","8706":"PartialD;","8707":"Exists;","8708":"NotExists;","8709":"varnothing;","8711":"nabla;","8712":"isinv;","8713":"notinva;","8715":"SuchThat;","8716":"NotReverseElement;","8719":"Product;","8720":"Coproduct;","8721":"sum;","8722":"minus;","8723":"mp;","8724":"plusdo;","8726":"ssetmn;","8727":"lowast;","8728":"SmallCircle;","8730":"Sqrt;","8733":"vprop;","8734":"infin;","8735":"angrt;","8736":"angle;","8737":"measuredangle;","8738":"angsph;","8739":"VerticalBar;","8740":"nsmid;","8741":"spar;","8742":"nspar;","8743":"wedge;","8744":"vee;","8745":"cap;","8746":"cup;","8747":"Integral;","8748":"Int;","8749":"tint;","8750":"oint;","8751":"DoubleContourIntegral;","8752":"Cconint;","8753":"cwint;","8754":"cwconint;","8755":"CounterClockwiseContourIntegral;","8756":"therefore;","8757":"because;","8758":"ratio;","8759":"Proportion;","8760":"minusd;","8762":"mDDot;","8763":"homtht;","8764":"Tilde;","8765":"bsim;","8766":"mstpos;","8767":"acd;","8768":"wreath;","8769":"nsim;","8770":"esim;","8771":"TildeEqual;","8772":"nsimeq;","8773":"TildeFullEqual;","8774":"simne;","8775":"NotTildeFullEqual;","8776":"TildeTilde;","8777":"NotTildeTilde;","8778":"approxeq;","8779":"apid;","8780":"bcong;","8781":"CupCap;","8782":"HumpDownHump;","8783":"HumpEqual;","8784":"esdot;","8785":"eDot;","8786":"fallingdotseq;","8787":"risingdotseq;","8788":"coloneq;","8789":"eqcolon;","8790":"eqcirc;","8791":"cire;","8793":"wedgeq;","8794":"veeeq;","8796":"trie;","8799":"questeq;","8800":"NotEqual;","8801":"equiv;","8802":"NotCongruent;","8804":"leq;","8805":"GreaterEqual;","8806":"LessFullEqual;","8807":"GreaterFullEqual;","8808":"lneqq;","8809":"gneqq;","8810":"NestedLessLess;","8811":"NestedGreaterGreater;","8812":"twixt;","8813":"NotCupCap;","8814":"NotLess;","8815":"NotGreater;","8816":"NotLessEqual;","8817":"NotGreaterEqual;","8818":"lsim;","8819":"gtrsim;","8820":"NotLessTilde;","8821":"NotGreaterTilde;","8822":"lg;","8823":"gtrless;","8824":"ntlg;","8825":"ntgl;","8826":"Precedes;","8827":"Succeeds;","8828":"PrecedesSlantEqual;","8829":"SucceedsSlantEqual;","8830":"prsim;","8831":"succsim;","8832":"nprec;","8833":"nsucc;","8834":"subset;","8835":"supset;","8836":"nsub;","8837":"nsup;","8838":"SubsetEqual;","8839":"supseteq;","8840":"nsubseteq;","8841":"nsupseteq;","8842":"subsetneq;","8843":"supsetneq;","8845":"cupdot;","8846":"uplus;","8847":"SquareSubset;","8848":"SquareSuperset;","8849":"SquareSubsetEqual;","8850":"SquareSupersetEqual;","8851":"SquareIntersection;","8852":"SquareUnion;","8853":"oplus;","8854":"ominus;","8855":"otimes;","8856":"osol;","8857":"odot;","8858":"ocir;","8859":"oast;","8861":"odash;","8862":"plusb;","8863":"minusb;","8864":"timesb;","8865":"sdotb;","8866":"vdash;","8867":"LeftTee;","8868":"top;","8869":"UpTee;","8871":"models;","8872":"vDash;","8873":"Vdash;","8874":"Vvdash;","8875":"VDash;","8876":"nvdash;","8877":"nvDash;","8878":"nVdash;","8879":"nVDash;","8880":"prurel;","8882":"vltri;","8883":"vrtri;","8884":"trianglelefteq;","8885":"trianglerighteq;","8886":"origof;","8887":"imof;","8888":"mumap;","8889":"hercon;","8890":"intercal;","8891":"veebar;","8893":"barvee;","8894":"angrtvb;","8895":"lrtri;","8896":"xwedge;","8897":"xvee;","8898":"xcap;","8899":"xcup;","8900":"diamond;","8901":"sdot;","8902":"Star;","8903":"divonx;","8904":"bowtie;","8905":"ltimes;","8906":"rtimes;","8907":"lthree;","8908":"rthree;","8909":"bsime;","8910":"cuvee;","8911":"cuwed;","8912":"Subset;","8913":"Supset;","8914":"Cap;","8915":"Cup;","8916":"pitchfork;","8917":"epar;","8918":"ltdot;","8919":"gtrdot;","8920":"Ll;","8921":"ggg;","8922":"LessEqualGreater;","8923":"gtreqless;","8926":"curlyeqprec;","8927":"curlyeqsucc;","8928":"nprcue;","8929":"nsccue;","8930":"nsqsube;","8931":"nsqsupe;","8934":"lnsim;","8935":"gnsim;","8936":"prnsim;","8937":"succnsim;","8938":"ntriangleleft;","8939":"ntriangleright;","8940":"ntrianglelefteq;","8941":"ntrianglerighteq;","8942":"vellip;","8943":"ctdot;","8944":"utdot;","8945":"dtdot;","8946":"disin;","8947":"isinsv;","8948":"isins;","8949":"isindot;","8950":"notinvc;","8951":"notinvb;","8953":"isinE;","8954":"nisd;","8955":"xnis;","8956":"nis;","8957":"notnivc;","8958":"notnivb;","8965":"barwedge;","8966":"doublebarwedge;","8968":"LeftCeiling;","8969":"RightCeiling;","8970":"lfloor;","8971":"RightFloor;","8972":"drcrop;","8973":"dlcrop;","8974":"urcrop;","8975":"ulcrop;","8976":"bnot;","8978":"profline;","8979":"profsurf;","8981":"telrec;","8982":"target;","8988":"ulcorner;","8989":"urcorner;","8990":"llcorner;","8991":"lrcorner;","8994":"sfrown;","8995":"ssmile;","9005":"cylcty;","9006":"profalar;","9014":"topbot;","9021":"ovbar;","9023":"solbar;","9084":"angzarr;","9136":"lmoustache;","9137":"rmoustache;","9140":"tbrk;","9141":"UnderBracket;","9142":"bbrktbrk;","9180":"OverParenthesis;","9181":"UnderParenthesis;","9182":"OverBrace;","9183":"UnderBrace;","9186":"trpezium;","9191":"elinters;","9251":"blank;","9416":"oS;","9472":"HorizontalLine;","9474":"boxv;","9484":"boxdr;","9488":"boxdl;","9492":"boxur;","9496":"boxul;","9500":"boxvr;","9508":"boxvl;","9516":"boxhd;","9524":"boxhu;","9532":"boxvh;","9552":"boxH;","9553":"boxV;","9554":"boxdR;","9555":"boxDr;","9556":"boxDR;","9557":"boxdL;","9558":"boxDl;","9559":"boxDL;","9560":"boxuR;","9561":"boxUr;","9562":"boxUR;","9563":"boxuL;","9564":"boxUl;","9565":"boxUL;","9566":"boxvR;","9567":"boxVr;","9568":"boxVR;","9569":"boxvL;","9570":"boxVl;","9571":"boxVL;","9572":"boxHd;","9573":"boxhD;","9574":"boxHD;","9575":"boxHu;","9576":"boxhU;","9577":"boxHU;","9578":"boxvH;","9579":"boxVh;","9580":"boxVH;","9600":"uhblk;","9604":"lhblk;","9608":"block;","9617":"blk14;","9618":"blk12;","9619":"blk34;","9633":"square;","9642":"squf;","9643":"EmptyVerySmallSquare;","9645":"rect;","9646":"marker;","9649":"fltns;","9651":"xutri;","9652":"utrif;","9653":"utri;","9656":"rtrif;","9657":"triangleright;","9661":"xdtri;","9662":"dtrif;","9663":"triangledown;","9666":"ltrif;","9667":"triangleleft;","9674":"lozenge;","9675":"cir;","9708":"tridot;","9711":"xcirc;","9720":"ultri;","9721":"urtri;","9722":"lltri;","9723":"EmptySmallSquare;","9724":"FilledSmallSquare;","9733":"starf;","9734":"star;","9742":"phone;","9792":"female;","9794":"male;","9824":"spadesuit;","9827":"clubsuit;","9829":"heartsuit;","9830":"diams;","9834":"sung;","9837":"flat;","9838":"natural;","9839":"sharp;","10003":"checkmark;","10007":"cross;","10016":"maltese;","10038":"sext;","10072":"VerticalSeparator;","10098":"lbbrk;","10099":"rbbrk;","10184":"bsolhsub;","10185":"suphsol;","10214":"lobrk;","10215":"robrk;","10216":"LeftAngleBracket;","10217":"RightAngleBracket;","10218":"Lang;","10219":"Rang;","10220":"loang;","10221":"roang;","10229":"xlarr;","10230":"xrarr;","10231":"xharr;","10232":"xlArr;","10233":"xrArr;","10234":"xhArr;","10236":"xmap;","10239":"dzigrarr;","10498":"nvlArr;","10499":"nvrArr;","10500":"nvHarr;","10501":"Map;","10508":"lbarr;","10509":"rbarr;","10510":"lBarr;","10511":"rBarr;","10512":"RBarr;","10513":"DDotrahd;","10514":"UpArrowBar;","10515":"DownArrowBar;","10518":"Rarrtl;","10521":"latail;","10522":"ratail;","10523":"lAtail;","10524":"rAtail;","10525":"larrfs;","10526":"rarrfs;","10527":"larrbfs;","10528":"rarrbfs;","10531":"nwarhk;","10532":"nearhk;","10533":"searhk;","10534":"swarhk;","10535":"nwnear;","10536":"toea;","10537":"tosa;","10538":"swnwar;","10547":"rarrc;","10549":"cudarrr;","10550":"ldca;","10551":"rdca;","10552":"cudarrl;","10553":"larrpl;","10556":"curarrm;","10557":"cularrp;","10565":"rarrpl;","10568":"harrcir;","10569":"Uarrocir;","10570":"lurdshar;","10571":"ldrushar;","10574":"LeftRightVector;","10575":"RightUpDownVector;","10576":"DownLeftRightVector;","10577":"LeftUpDownVector;","10578":"LeftVectorBar;","10579":"RightVectorBar;","10580":"RightUpVectorBar;","10581":"RightDownVectorBar;","10582":"DownLeftVectorBar;","10583":"DownRightVectorBar;","10584":"LeftUpVectorBar;","10585":"LeftDownVectorBar;","10586":"LeftTeeVector;","10587":"RightTeeVector;","10588":"RightUpTeeVector;","10589":"RightDownTeeVector;","10590":"DownLeftTeeVector;","10591":"DownRightTeeVector;","10592":"LeftUpTeeVector;","10593":"LeftDownTeeVector;","10594":"lHar;","10595":"uHar;","10596":"rHar;","10597":"dHar;","10598":"luruhar;","10599":"ldrdhar;","10600":"ruluhar;","10601":"rdldhar;","10602":"lharul;","10603":"llhard;","10604":"rharul;","10605":"lrhard;","10606":"UpEquilibrium;","10607":"ReverseUpEquilibrium;","10608":"RoundImplies;","10609":"erarr;","10610":"simrarr;","10611":"larrsim;","10612":"rarrsim;","10613":"rarrap;","10614":"ltlarr;","10616":"gtrarr;","10617":"subrarr;","10619":"suplarr;","10620":"lfisht;","10621":"rfisht;","10622":"ufisht;","10623":"dfisht;","10629":"lopar;","10630":"ropar;","10635":"lbrke;","10636":"rbrke;","10637":"lbrkslu;","10638":"rbrksld;","10639":"lbrksld;","10640":"rbrkslu;","10641":"langd;","10642":"rangd;","10643":"lparlt;","10644":"rpargt;","10645":"gtlPar;","10646":"ltrPar;","10650":"vzigzag;","10652":"vangrt;","10653":"angrtvbd;","10660":"ange;","10661":"range;","10662":"dwangle;","10663":"uwangle;","10664":"angmsdaa;","10665":"angmsdab;","10666":"angmsdac;","10667":"angmsdad;","10668":"angmsdae;","10669":"angmsdaf;","10670":"angmsdag;","10671":"angmsdah;","10672":"bemptyv;","10673":"demptyv;","10674":"cemptyv;","10675":"raemptyv;","10676":"laemptyv;","10677":"ohbar;","10678":"omid;","10679":"opar;","10681":"operp;","10683":"olcross;","10684":"odsold;","10686":"olcir;","10687":"ofcir;","10688":"olt;","10689":"ogt;","10690":"cirscir;","10691":"cirE;","10692":"solb;","10693":"bsolb;","10697":"boxbox;","10701":"trisb;","10702":"rtriltri;","10703":"LeftTriangleBar;","10704":"RightTriangleBar;","10716":"iinfin;","10717":"infintie;","10718":"nvinfin;","10723":"eparsl;","10724":"smeparsl;","10725":"eqvparsl;","10731":"lozf;","10740":"RuleDelayed;","10742":"dsol;","10752":"xodot;","10753":"xoplus;","10754":"xotime;","10756":"xuplus;","10758":"xsqcup;","10764":"qint;","10765":"fpartint;","10768":"cirfnint;","10769":"awint;","10770":"rppolint;","10771":"scpolint;","10772":"npolint;","10773":"pointint;","10774":"quatint;","10775":"intlarhk;","10786":"pluscir;","10787":"plusacir;","10788":"simplus;","10789":"plusdu;","10790":"plussim;","10791":"plustwo;","10793":"mcomma;","10794":"minusdu;","10797":"loplus;","10798":"roplus;","10799":"Cross;","10800":"timesd;","10801":"timesbar;","10803":"smashp;","10804":"lotimes;","10805":"rotimes;","10806":"otimesas;","10807":"Otimes;","10808":"odiv;","10809":"triplus;","10810":"triminus;","10811":"tritime;","10812":"iprod;","10815":"amalg;","10816":"capdot;","10818":"ncup;","10819":"ncap;","10820":"capand;","10821":"cupor;","10822":"cupcap;","10823":"capcup;","10824":"cupbrcap;","10825":"capbrcup;","10826":"cupcup;","10827":"capcap;","10828":"ccups;","10829":"ccaps;","10832":"ccupssm;","10835":"And;","10836":"Or;","10837":"andand;","10838":"oror;","10839":"orslope;","10840":"andslope;","10842":"andv;","10843":"orv;","10844":"andd;","10845":"ord;","10847":"wedbar;","10854":"sdote;","10858":"simdot;","10861":"congdot;","10862":"easter;","10863":"apacir;","10864":"apE;","10865":"eplus;","10866":"pluse;","10867":"Esim;","10868":"Colone;","10869":"Equal;","10871":"eDDot;","10872":"equivDD;","10873":"ltcir;","10874":"gtcir;","10875":"ltquest;","10876":"gtquest;","10877":"LessSlantEqual;","10878":"GreaterSlantEqual;","10879":"lesdot;","10880":"gesdot;","10881":"lesdoto;","10882":"gesdoto;","10883":"lesdotor;","10884":"gesdotol;","10885":"lessapprox;","10886":"gtrapprox;","10887":"lneq;","10888":"gneq;","10889":"lnapprox;","10890":"gnapprox;","10891":"lesseqqgtr;","10892":"gtreqqless;","10893":"lsime;","10894":"gsime;","10895":"lsimg;","10896":"gsiml;","10897":"lgE;","10898":"glE;","10899":"lesges;","10900":"gesles;","10901":"eqslantless;","10902":"eqslantgtr;","10903":"elsdot;","10904":"egsdot;","10905":"el;","10906":"eg;","10909":"siml;","10910":"simg;","10911":"simlE;","10912":"simgE;","10913":"LessLess;","10914":"GreaterGreater;","10916":"glj;","10917":"gla;","10918":"ltcc;","10919":"gtcc;","10920":"lescc;","10921":"gescc;","10922":"smt;","10923":"lat;","10924":"smte;","10925":"late;","10926":"bumpE;","10927":"preceq;","10928":"succeq;","10931":"prE;","10932":"scE;","10933":"prnE;","10934":"succneqq;","10935":"precapprox;","10936":"succapprox;","10937":"prnap;","10938":"succnapprox;","10939":"Pr;","10940":"Sc;","10941":"subdot;","10942":"supdot;","10943":"subplus;","10944":"supplus;","10945":"submult;","10946":"supmult;","10947":"subedot;","10948":"supedot;","10949":"subseteqq;","10950":"supseteqq;","10951":"subsim;","10952":"supsim;","10955":"subsetneqq;","10956":"supsetneqq;","10959":"csub;","10960":"csup;","10961":"csube;","10962":"csupe;","10963":"subsup;","10964":"supsub;","10965":"subsub;","10966":"supsup;","10967":"suphsub;","10968":"supdsub;","10969":"forkv;","10970":"topfork;","10971":"mlcp;","10980":"DoubleLeftTee;","10982":"Vdashl;","10983":"Barv;","10984":"vBar;","10985":"vBarv;","10987":"Vbar;","10988":"Not;","10989":"bNot;","10990":"rnmid;","10991":"cirmid;","10992":"midcir;","10993":"topcir;","10994":"nhpar;","10995":"parsim;","11005":"parsl;","64256":"fflig;","64257":"filig;","64258":"fllig;","64259":"ffilig;","64260":"ffllig;"}'); /***/ }), -/***/ 3123: +/***/ 6318: /***/ ((module) => { "use strict"; -module.exports = JSON.parse('{"9":"Tab;","10":"NewLine;","33":"excl;","34":"quot;","35":"num;","36":"dollar;","37":"percnt;","38":"amp;","39":"apos;","40":"lpar;","41":"rpar;","42":"midast;","43":"plus;","44":"comma;","46":"period;","47":"sol;","58":"colon;","59":"semi;","60":"lt;","61":"equals;","62":"gt;","63":"quest;","64":"commat;","91":"lsqb;","92":"bsol;","93":"rsqb;","94":"Hat;","95":"UnderBar;","96":"grave;","123":"lcub;","124":"VerticalLine;","125":"rcub;","160":"NonBreakingSpace;","161":"iexcl;","162":"cent;","163":"pound;","164":"curren;","165":"yen;","166":"brvbar;","167":"sect;","168":"uml;","169":"copy;","170":"ordf;","171":"laquo;","172":"not;","173":"shy;","174":"reg;","175":"strns;","176":"deg;","177":"pm;","178":"sup2;","179":"sup3;","180":"DiacriticalAcute;","181":"micro;","182":"para;","183":"middot;","184":"Cedilla;","185":"sup1;","186":"ordm;","187":"raquo;","188":"frac14;","189":"half;","190":"frac34;","191":"iquest;","192":"Agrave;","193":"Aacute;","194":"Acirc;","195":"Atilde;","196":"Auml;","197":"Aring;","198":"AElig;","199":"Ccedil;","200":"Egrave;","201":"Eacute;","202":"Ecirc;","203":"Euml;","204":"Igrave;","205":"Iacute;","206":"Icirc;","207":"Iuml;","208":"ETH;","209":"Ntilde;","210":"Ograve;","211":"Oacute;","212":"Ocirc;","213":"Otilde;","214":"Ouml;","215":"times;","216":"Oslash;","217":"Ugrave;","218":"Uacute;","219":"Ucirc;","220":"Uuml;","221":"Yacute;","222":"THORN;","223":"szlig;","224":"agrave;","225":"aacute;","226":"acirc;","227":"atilde;","228":"auml;","229":"aring;","230":"aelig;","231":"ccedil;","232":"egrave;","233":"eacute;","234":"ecirc;","235":"euml;","236":"igrave;","237":"iacute;","238":"icirc;","239":"iuml;","240":"eth;","241":"ntilde;","242":"ograve;","243":"oacute;","244":"ocirc;","245":"otilde;","246":"ouml;","247":"divide;","248":"oslash;","249":"ugrave;","250":"uacute;","251":"ucirc;","252":"uuml;","253":"yacute;","254":"thorn;","255":"yuml;","256":"Amacr;","257":"amacr;","258":"Abreve;","259":"abreve;","260":"Aogon;","261":"aogon;","262":"Cacute;","263":"cacute;","264":"Ccirc;","265":"ccirc;","266":"Cdot;","267":"cdot;","268":"Ccaron;","269":"ccaron;","270":"Dcaron;","271":"dcaron;","272":"Dstrok;","273":"dstrok;","274":"Emacr;","275":"emacr;","278":"Edot;","279":"edot;","280":"Eogon;","281":"eogon;","282":"Ecaron;","283":"ecaron;","284":"Gcirc;","285":"gcirc;","286":"Gbreve;","287":"gbreve;","288":"Gdot;","289":"gdot;","290":"Gcedil;","292":"Hcirc;","293":"hcirc;","294":"Hstrok;","295":"hstrok;","296":"Itilde;","297":"itilde;","298":"Imacr;","299":"imacr;","302":"Iogon;","303":"iogon;","304":"Idot;","305":"inodot;","306":"IJlig;","307":"ijlig;","308":"Jcirc;","309":"jcirc;","310":"Kcedil;","311":"kcedil;","312":"kgreen;","313":"Lacute;","314":"lacute;","315":"Lcedil;","316":"lcedil;","317":"Lcaron;","318":"lcaron;","319":"Lmidot;","320":"lmidot;","321":"Lstrok;","322":"lstrok;","323":"Nacute;","324":"nacute;","325":"Ncedil;","326":"ncedil;","327":"Ncaron;","328":"ncaron;","329":"napos;","330":"ENG;","331":"eng;","332":"Omacr;","333":"omacr;","336":"Odblac;","337":"odblac;","338":"OElig;","339":"oelig;","340":"Racute;","341":"racute;","342":"Rcedil;","343":"rcedil;","344":"Rcaron;","345":"rcaron;","346":"Sacute;","347":"sacute;","348":"Scirc;","349":"scirc;","350":"Scedil;","351":"scedil;","352":"Scaron;","353":"scaron;","354":"Tcedil;","355":"tcedil;","356":"Tcaron;","357":"tcaron;","358":"Tstrok;","359":"tstrok;","360":"Utilde;","361":"utilde;","362":"Umacr;","363":"umacr;","364":"Ubreve;","365":"ubreve;","366":"Uring;","367":"uring;","368":"Udblac;","369":"udblac;","370":"Uogon;","371":"uogon;","372":"Wcirc;","373":"wcirc;","374":"Ycirc;","375":"ycirc;","376":"Yuml;","377":"Zacute;","378":"zacute;","379":"Zdot;","380":"zdot;","381":"Zcaron;","382":"zcaron;","402":"fnof;","437":"imped;","501":"gacute;","567":"jmath;","710":"circ;","711":"Hacek;","728":"breve;","729":"dot;","730":"ring;","731":"ogon;","732":"tilde;","733":"DiacriticalDoubleAcute;","785":"DownBreve;","913":"Alpha;","914":"Beta;","915":"Gamma;","916":"Delta;","917":"Epsilon;","918":"Zeta;","919":"Eta;","920":"Theta;","921":"Iota;","922":"Kappa;","923":"Lambda;","924":"Mu;","925":"Nu;","926":"Xi;","927":"Omicron;","928":"Pi;","929":"Rho;","931":"Sigma;","932":"Tau;","933":"Upsilon;","934":"Phi;","935":"Chi;","936":"Psi;","937":"Omega;","945":"alpha;","946":"beta;","947":"gamma;","948":"delta;","949":"epsilon;","950":"zeta;","951":"eta;","952":"theta;","953":"iota;","954":"kappa;","955":"lambda;","956":"mu;","957":"nu;","958":"xi;","959":"omicron;","960":"pi;","961":"rho;","962":"varsigma;","963":"sigma;","964":"tau;","965":"upsilon;","966":"phi;","967":"chi;","968":"psi;","969":"omega;","977":"vartheta;","978":"upsih;","981":"varphi;","982":"varpi;","988":"Gammad;","989":"gammad;","1008":"varkappa;","1009":"varrho;","1013":"varepsilon;","1014":"bepsi;","1025":"IOcy;","1026":"DJcy;","1027":"GJcy;","1028":"Jukcy;","1029":"DScy;","1030":"Iukcy;","1031":"YIcy;","1032":"Jsercy;","1033":"LJcy;","1034":"NJcy;","1035":"TSHcy;","1036":"KJcy;","1038":"Ubrcy;","1039":"DZcy;","1040":"Acy;","1041":"Bcy;","1042":"Vcy;","1043":"Gcy;","1044":"Dcy;","1045":"IEcy;","1046":"ZHcy;","1047":"Zcy;","1048":"Icy;","1049":"Jcy;","1050":"Kcy;","1051":"Lcy;","1052":"Mcy;","1053":"Ncy;","1054":"Ocy;","1055":"Pcy;","1056":"Rcy;","1057":"Scy;","1058":"Tcy;","1059":"Ucy;","1060":"Fcy;","1061":"KHcy;","1062":"TScy;","1063":"CHcy;","1064":"SHcy;","1065":"SHCHcy;","1066":"HARDcy;","1067":"Ycy;","1068":"SOFTcy;","1069":"Ecy;","1070":"YUcy;","1071":"YAcy;","1072":"acy;","1073":"bcy;","1074":"vcy;","1075":"gcy;","1076":"dcy;","1077":"iecy;","1078":"zhcy;","1079":"zcy;","1080":"icy;","1081":"jcy;","1082":"kcy;","1083":"lcy;","1084":"mcy;","1085":"ncy;","1086":"ocy;","1087":"pcy;","1088":"rcy;","1089":"scy;","1090":"tcy;","1091":"ucy;","1092":"fcy;","1093":"khcy;","1094":"tscy;","1095":"chcy;","1096":"shcy;","1097":"shchcy;","1098":"hardcy;","1099":"ycy;","1100":"softcy;","1101":"ecy;","1102":"yucy;","1103":"yacy;","1105":"iocy;","1106":"djcy;","1107":"gjcy;","1108":"jukcy;","1109":"dscy;","1110":"iukcy;","1111":"yicy;","1112":"jsercy;","1113":"ljcy;","1114":"njcy;","1115":"tshcy;","1116":"kjcy;","1118":"ubrcy;","1119":"dzcy;","8194":"ensp;","8195":"emsp;","8196":"emsp13;","8197":"emsp14;","8199":"numsp;","8200":"puncsp;","8201":"ThinSpace;","8202":"VeryThinSpace;","8203":"ZeroWidthSpace;","8204":"zwnj;","8205":"zwj;","8206":"lrm;","8207":"rlm;","8208":"hyphen;","8211":"ndash;","8212":"mdash;","8213":"horbar;","8214":"Vert;","8216":"OpenCurlyQuote;","8217":"rsquor;","8218":"sbquo;","8220":"OpenCurlyDoubleQuote;","8221":"rdquor;","8222":"ldquor;","8224":"dagger;","8225":"ddagger;","8226":"bullet;","8229":"nldr;","8230":"mldr;","8240":"permil;","8241":"pertenk;","8242":"prime;","8243":"Prime;","8244":"tprime;","8245":"bprime;","8249":"lsaquo;","8250":"rsaquo;","8254":"OverBar;","8257":"caret;","8259":"hybull;","8260":"frasl;","8271":"bsemi;","8279":"qprime;","8287":"MediumSpace;","8288":"NoBreak;","8289":"ApplyFunction;","8290":"it;","8291":"InvisibleComma;","8364":"euro;","8411":"TripleDot;","8412":"DotDot;","8450":"Copf;","8453":"incare;","8458":"gscr;","8459":"Hscr;","8460":"Poincareplane;","8461":"quaternions;","8462":"planckh;","8463":"plankv;","8464":"Iscr;","8465":"imagpart;","8466":"Lscr;","8467":"ell;","8469":"Nopf;","8470":"numero;","8471":"copysr;","8472":"wp;","8473":"primes;","8474":"rationals;","8475":"Rscr;","8476":"Rfr;","8477":"Ropf;","8478":"rx;","8482":"trade;","8484":"Zopf;","8487":"mho;","8488":"Zfr;","8489":"iiota;","8492":"Bscr;","8493":"Cfr;","8495":"escr;","8496":"expectation;","8497":"Fscr;","8499":"phmmat;","8500":"oscr;","8501":"aleph;","8502":"beth;","8503":"gimel;","8504":"daleth;","8517":"DD;","8518":"DifferentialD;","8519":"exponentiale;","8520":"ImaginaryI;","8531":"frac13;","8532":"frac23;","8533":"frac15;","8534":"frac25;","8535":"frac35;","8536":"frac45;","8537":"frac16;","8538":"frac56;","8539":"frac18;","8540":"frac38;","8541":"frac58;","8542":"frac78;","8592":"slarr;","8593":"uparrow;","8594":"srarr;","8595":"ShortDownArrow;","8596":"leftrightarrow;","8597":"varr;","8598":"UpperLeftArrow;","8599":"UpperRightArrow;","8600":"searrow;","8601":"swarrow;","8602":"nleftarrow;","8603":"nrightarrow;","8605":"rightsquigarrow;","8606":"twoheadleftarrow;","8607":"Uarr;","8608":"twoheadrightarrow;","8609":"Darr;","8610":"leftarrowtail;","8611":"rightarrowtail;","8612":"mapstoleft;","8613":"UpTeeArrow;","8614":"RightTeeArrow;","8615":"mapstodown;","8617":"larrhk;","8618":"rarrhk;","8619":"looparrowleft;","8620":"rarrlp;","8621":"leftrightsquigarrow;","8622":"nleftrightarrow;","8624":"lsh;","8625":"rsh;","8626":"ldsh;","8627":"rdsh;","8629":"crarr;","8630":"curvearrowleft;","8631":"curvearrowright;","8634":"olarr;","8635":"orarr;","8636":"lharu;","8637":"lhard;","8638":"upharpoonright;","8639":"upharpoonleft;","8640":"RightVector;","8641":"rightharpoondown;","8642":"RightDownVector;","8643":"LeftDownVector;","8644":"rlarr;","8645":"UpArrowDownArrow;","8646":"lrarr;","8647":"llarr;","8648":"uuarr;","8649":"rrarr;","8650":"downdownarrows;","8651":"ReverseEquilibrium;","8652":"rlhar;","8653":"nLeftarrow;","8654":"nLeftrightarrow;","8655":"nRightarrow;","8656":"Leftarrow;","8657":"Uparrow;","8658":"Rightarrow;","8659":"Downarrow;","8660":"Leftrightarrow;","8661":"vArr;","8662":"nwArr;","8663":"neArr;","8664":"seArr;","8665":"swArr;","8666":"Lleftarrow;","8667":"Rrightarrow;","8669":"zigrarr;","8676":"LeftArrowBar;","8677":"RightArrowBar;","8693":"duarr;","8701":"loarr;","8702":"roarr;","8703":"hoarr;","8704":"forall;","8705":"complement;","8706":"PartialD;","8707":"Exists;","8708":"NotExists;","8709":"varnothing;","8711":"nabla;","8712":"isinv;","8713":"notinva;","8715":"SuchThat;","8716":"NotReverseElement;","8719":"Product;","8720":"Coproduct;","8721":"sum;","8722":"minus;","8723":"mp;","8724":"plusdo;","8726":"ssetmn;","8727":"lowast;","8728":"SmallCircle;","8730":"Sqrt;","8733":"vprop;","8734":"infin;","8735":"angrt;","8736":"angle;","8737":"measuredangle;","8738":"angsph;","8739":"VerticalBar;","8740":"nsmid;","8741":"spar;","8742":"nspar;","8743":"wedge;","8744":"vee;","8745":"cap;","8746":"cup;","8747":"Integral;","8748":"Int;","8749":"tint;","8750":"oint;","8751":"DoubleContourIntegral;","8752":"Cconint;","8753":"cwint;","8754":"cwconint;","8755":"CounterClockwiseContourIntegral;","8756":"therefore;","8757":"because;","8758":"ratio;","8759":"Proportion;","8760":"minusd;","8762":"mDDot;","8763":"homtht;","8764":"Tilde;","8765":"bsim;","8766":"mstpos;","8767":"acd;","8768":"wreath;","8769":"nsim;","8770":"esim;","8771":"TildeEqual;","8772":"nsimeq;","8773":"TildeFullEqual;","8774":"simne;","8775":"NotTildeFullEqual;","8776":"TildeTilde;","8777":"NotTildeTilde;","8778":"approxeq;","8779":"apid;","8780":"bcong;","8781":"CupCap;","8782":"HumpDownHump;","8783":"HumpEqual;","8784":"esdot;","8785":"eDot;","8786":"fallingdotseq;","8787":"risingdotseq;","8788":"coloneq;","8789":"eqcolon;","8790":"eqcirc;","8791":"cire;","8793":"wedgeq;","8794":"veeeq;","8796":"trie;","8799":"questeq;","8800":"NotEqual;","8801":"equiv;","8802":"NotCongruent;","8804":"leq;","8805":"GreaterEqual;","8806":"LessFullEqual;","8807":"GreaterFullEqual;","8808":"lneqq;","8809":"gneqq;","8810":"NestedLessLess;","8811":"NestedGreaterGreater;","8812":"twixt;","8813":"NotCupCap;","8814":"NotLess;","8815":"NotGreater;","8816":"NotLessEqual;","8817":"NotGreaterEqual;","8818":"lsim;","8819":"gtrsim;","8820":"NotLessTilde;","8821":"NotGreaterTilde;","8822":"lg;","8823":"gtrless;","8824":"ntlg;","8825":"ntgl;","8826":"Precedes;","8827":"Succeeds;","8828":"PrecedesSlantEqual;","8829":"SucceedsSlantEqual;","8830":"prsim;","8831":"succsim;","8832":"nprec;","8833":"nsucc;","8834":"subset;","8835":"supset;","8836":"nsub;","8837":"nsup;","8838":"SubsetEqual;","8839":"supseteq;","8840":"nsubseteq;","8841":"nsupseteq;","8842":"subsetneq;","8843":"supsetneq;","8845":"cupdot;","8846":"uplus;","8847":"SquareSubset;","8848":"SquareSuperset;","8849":"SquareSubsetEqual;","8850":"SquareSupersetEqual;","8851":"SquareIntersection;","8852":"SquareUnion;","8853":"oplus;","8854":"ominus;","8855":"otimes;","8856":"osol;","8857":"odot;","8858":"ocir;","8859":"oast;","8861":"odash;","8862":"plusb;","8863":"minusb;","8864":"timesb;","8865":"sdotb;","8866":"vdash;","8867":"LeftTee;","8868":"top;","8869":"UpTee;","8871":"models;","8872":"vDash;","8873":"Vdash;","8874":"Vvdash;","8875":"VDash;","8876":"nvdash;","8877":"nvDash;","8878":"nVdash;","8879":"nVDash;","8880":"prurel;","8882":"vltri;","8883":"vrtri;","8884":"trianglelefteq;","8885":"trianglerighteq;","8886":"origof;","8887":"imof;","8888":"mumap;","8889":"hercon;","8890":"intercal;","8891":"veebar;","8893":"barvee;","8894":"angrtvb;","8895":"lrtri;","8896":"xwedge;","8897":"xvee;","8898":"xcap;","8899":"xcup;","8900":"diamond;","8901":"sdot;","8902":"Star;","8903":"divonx;","8904":"bowtie;","8905":"ltimes;","8906":"rtimes;","8907":"lthree;","8908":"rthree;","8909":"bsime;","8910":"cuvee;","8911":"cuwed;","8912":"Subset;","8913":"Supset;","8914":"Cap;","8915":"Cup;","8916":"pitchfork;","8917":"epar;","8918":"ltdot;","8919":"gtrdot;","8920":"Ll;","8921":"ggg;","8922":"LessEqualGreater;","8923":"gtreqless;","8926":"curlyeqprec;","8927":"curlyeqsucc;","8928":"nprcue;","8929":"nsccue;","8930":"nsqsube;","8931":"nsqsupe;","8934":"lnsim;","8935":"gnsim;","8936":"prnsim;","8937":"succnsim;","8938":"ntriangleleft;","8939":"ntriangleright;","8940":"ntrianglelefteq;","8941":"ntrianglerighteq;","8942":"vellip;","8943":"ctdot;","8944":"utdot;","8945":"dtdot;","8946":"disin;","8947":"isinsv;","8948":"isins;","8949":"isindot;","8950":"notinvc;","8951":"notinvb;","8953":"isinE;","8954":"nisd;","8955":"xnis;","8956":"nis;","8957":"notnivc;","8958":"notnivb;","8965":"barwedge;","8966":"doublebarwedge;","8968":"LeftCeiling;","8969":"RightCeiling;","8970":"lfloor;","8971":"RightFloor;","8972":"drcrop;","8973":"dlcrop;","8974":"urcrop;","8975":"ulcrop;","8976":"bnot;","8978":"profline;","8979":"profsurf;","8981":"telrec;","8982":"target;","8988":"ulcorner;","8989":"urcorner;","8990":"llcorner;","8991":"lrcorner;","8994":"sfrown;","8995":"ssmile;","9005":"cylcty;","9006":"profalar;","9014":"topbot;","9021":"ovbar;","9023":"solbar;","9084":"angzarr;","9136":"lmoustache;","9137":"rmoustache;","9140":"tbrk;","9141":"UnderBracket;","9142":"bbrktbrk;","9180":"OverParenthesis;","9181":"UnderParenthesis;","9182":"OverBrace;","9183":"UnderBrace;","9186":"trpezium;","9191":"elinters;","9251":"blank;","9416":"oS;","9472":"HorizontalLine;","9474":"boxv;","9484":"boxdr;","9488":"boxdl;","9492":"boxur;","9496":"boxul;","9500":"boxvr;","9508":"boxvl;","9516":"boxhd;","9524":"boxhu;","9532":"boxvh;","9552":"boxH;","9553":"boxV;","9554":"boxdR;","9555":"boxDr;","9556":"boxDR;","9557":"boxdL;","9558":"boxDl;","9559":"boxDL;","9560":"boxuR;","9561":"boxUr;","9562":"boxUR;","9563":"boxuL;","9564":"boxUl;","9565":"boxUL;","9566":"boxvR;","9567":"boxVr;","9568":"boxVR;","9569":"boxvL;","9570":"boxVl;","9571":"boxVL;","9572":"boxHd;","9573":"boxhD;","9574":"boxHD;","9575":"boxHu;","9576":"boxhU;","9577":"boxHU;","9578":"boxvH;","9579":"boxVh;","9580":"boxVH;","9600":"uhblk;","9604":"lhblk;","9608":"block;","9617":"blk14;","9618":"blk12;","9619":"blk34;","9633":"square;","9642":"squf;","9643":"EmptyVerySmallSquare;","9645":"rect;","9646":"marker;","9649":"fltns;","9651":"xutri;","9652":"utrif;","9653":"utri;","9656":"rtrif;","9657":"triangleright;","9661":"xdtri;","9662":"dtrif;","9663":"triangledown;","9666":"ltrif;","9667":"triangleleft;","9674":"lozenge;","9675":"cir;","9708":"tridot;","9711":"xcirc;","9720":"ultri;","9721":"urtri;","9722":"lltri;","9723":"EmptySmallSquare;","9724":"FilledSmallSquare;","9733":"starf;","9734":"star;","9742":"phone;","9792":"female;","9794":"male;","9824":"spadesuit;","9827":"clubsuit;","9829":"heartsuit;","9830":"diams;","9834":"sung;","9837":"flat;","9838":"natural;","9839":"sharp;","10003":"checkmark;","10007":"cross;","10016":"maltese;","10038":"sext;","10072":"VerticalSeparator;","10098":"lbbrk;","10099":"rbbrk;","10184":"bsolhsub;","10185":"suphsol;","10214":"lobrk;","10215":"robrk;","10216":"LeftAngleBracket;","10217":"RightAngleBracket;","10218":"Lang;","10219":"Rang;","10220":"loang;","10221":"roang;","10229":"xlarr;","10230":"xrarr;","10231":"xharr;","10232":"xlArr;","10233":"xrArr;","10234":"xhArr;","10236":"xmap;","10239":"dzigrarr;","10498":"nvlArr;","10499":"nvrArr;","10500":"nvHarr;","10501":"Map;","10508":"lbarr;","10509":"rbarr;","10510":"lBarr;","10511":"rBarr;","10512":"RBarr;","10513":"DDotrahd;","10514":"UpArrowBar;","10515":"DownArrowBar;","10518":"Rarrtl;","10521":"latail;","10522":"ratail;","10523":"lAtail;","10524":"rAtail;","10525":"larrfs;","10526":"rarrfs;","10527":"larrbfs;","10528":"rarrbfs;","10531":"nwarhk;","10532":"nearhk;","10533":"searhk;","10534":"swarhk;","10535":"nwnear;","10536":"toea;","10537":"tosa;","10538":"swnwar;","10547":"rarrc;","10549":"cudarrr;","10550":"ldca;","10551":"rdca;","10552":"cudarrl;","10553":"larrpl;","10556":"curarrm;","10557":"cularrp;","10565":"rarrpl;","10568":"harrcir;","10569":"Uarrocir;","10570":"lurdshar;","10571":"ldrushar;","10574":"LeftRightVector;","10575":"RightUpDownVector;","10576":"DownLeftRightVector;","10577":"LeftUpDownVector;","10578":"LeftVectorBar;","10579":"RightVectorBar;","10580":"RightUpVectorBar;","10581":"RightDownVectorBar;","10582":"DownLeftVectorBar;","10583":"DownRightVectorBar;","10584":"LeftUpVectorBar;","10585":"LeftDownVectorBar;","10586":"LeftTeeVector;","10587":"RightTeeVector;","10588":"RightUpTeeVector;","10589":"RightDownTeeVector;","10590":"DownLeftTeeVector;","10591":"DownRightTeeVector;","10592":"LeftUpTeeVector;","10593":"LeftDownTeeVector;","10594":"lHar;","10595":"uHar;","10596":"rHar;","10597":"dHar;","10598":"luruhar;","10599":"ldrdhar;","10600":"ruluhar;","10601":"rdldhar;","10602":"lharul;","10603":"llhard;","10604":"rharul;","10605":"lrhard;","10606":"UpEquilibrium;","10607":"ReverseUpEquilibrium;","10608":"RoundImplies;","10609":"erarr;","10610":"simrarr;","10611":"larrsim;","10612":"rarrsim;","10613":"rarrap;","10614":"ltlarr;","10616":"gtrarr;","10617":"subrarr;","10619":"suplarr;","10620":"lfisht;","10621":"rfisht;","10622":"ufisht;","10623":"dfisht;","10629":"lopar;","10630":"ropar;","10635":"lbrke;","10636":"rbrke;","10637":"lbrkslu;","10638":"rbrksld;","10639":"lbrksld;","10640":"rbrkslu;","10641":"langd;","10642":"rangd;","10643":"lparlt;","10644":"rpargt;","10645":"gtlPar;","10646":"ltrPar;","10650":"vzigzag;","10652":"vangrt;","10653":"angrtvbd;","10660":"ange;","10661":"range;","10662":"dwangle;","10663":"uwangle;","10664":"angmsdaa;","10665":"angmsdab;","10666":"angmsdac;","10667":"angmsdad;","10668":"angmsdae;","10669":"angmsdaf;","10670":"angmsdag;","10671":"angmsdah;","10672":"bemptyv;","10673":"demptyv;","10674":"cemptyv;","10675":"raemptyv;","10676":"laemptyv;","10677":"ohbar;","10678":"omid;","10679":"opar;","10681":"operp;","10683":"olcross;","10684":"odsold;","10686":"olcir;","10687":"ofcir;","10688":"olt;","10689":"ogt;","10690":"cirscir;","10691":"cirE;","10692":"solb;","10693":"bsolb;","10697":"boxbox;","10701":"trisb;","10702":"rtriltri;","10703":"LeftTriangleBar;","10704":"RightTriangleBar;","10716":"iinfin;","10717":"infintie;","10718":"nvinfin;","10723":"eparsl;","10724":"smeparsl;","10725":"eqvparsl;","10731":"lozf;","10740":"RuleDelayed;","10742":"dsol;","10752":"xodot;","10753":"xoplus;","10754":"xotime;","10756":"xuplus;","10758":"xsqcup;","10764":"qint;","10765":"fpartint;","10768":"cirfnint;","10769":"awint;","10770":"rppolint;","10771":"scpolint;","10772":"npolint;","10773":"pointint;","10774":"quatint;","10775":"intlarhk;","10786":"pluscir;","10787":"plusacir;","10788":"simplus;","10789":"plusdu;","10790":"plussim;","10791":"plustwo;","10793":"mcomma;","10794":"minusdu;","10797":"loplus;","10798":"roplus;","10799":"Cross;","10800":"timesd;","10801":"timesbar;","10803":"smashp;","10804":"lotimes;","10805":"rotimes;","10806":"otimesas;","10807":"Otimes;","10808":"odiv;","10809":"triplus;","10810":"triminus;","10811":"tritime;","10812":"iprod;","10815":"amalg;","10816":"capdot;","10818":"ncup;","10819":"ncap;","10820":"capand;","10821":"cupor;","10822":"cupcap;","10823":"capcup;","10824":"cupbrcap;","10825":"capbrcup;","10826":"cupcup;","10827":"capcap;","10828":"ccups;","10829":"ccaps;","10832":"ccupssm;","10835":"And;","10836":"Or;","10837":"andand;","10838":"oror;","10839":"orslope;","10840":"andslope;","10842":"andv;","10843":"orv;","10844":"andd;","10845":"ord;","10847":"wedbar;","10854":"sdote;","10858":"simdot;","10861":"congdot;","10862":"easter;","10863":"apacir;","10864":"apE;","10865":"eplus;","10866":"pluse;","10867":"Esim;","10868":"Colone;","10869":"Equal;","10871":"eDDot;","10872":"equivDD;","10873":"ltcir;","10874":"gtcir;","10875":"ltquest;","10876":"gtquest;","10877":"LessSlantEqual;","10878":"GreaterSlantEqual;","10879":"lesdot;","10880":"gesdot;","10881":"lesdoto;","10882":"gesdoto;","10883":"lesdotor;","10884":"gesdotol;","10885":"lessapprox;","10886":"gtrapprox;","10887":"lneq;","10888":"gneq;","10889":"lnapprox;","10890":"gnapprox;","10891":"lesseqqgtr;","10892":"gtreqqless;","10893":"lsime;","10894":"gsime;","10895":"lsimg;","10896":"gsiml;","10897":"lgE;","10898":"glE;","10899":"lesges;","10900":"gesles;","10901":"eqslantless;","10902":"eqslantgtr;","10903":"elsdot;","10904":"egsdot;","10905":"el;","10906":"eg;","10909":"siml;","10910":"simg;","10911":"simlE;","10912":"simgE;","10913":"LessLess;","10914":"GreaterGreater;","10916":"glj;","10917":"gla;","10918":"ltcc;","10919":"gtcc;","10920":"lescc;","10921":"gescc;","10922":"smt;","10923":"lat;","10924":"smte;","10925":"late;","10926":"bumpE;","10927":"preceq;","10928":"succeq;","10931":"prE;","10932":"scE;","10933":"prnE;","10934":"succneqq;","10935":"precapprox;","10936":"succapprox;","10937":"prnap;","10938":"succnapprox;","10939":"Pr;","10940":"Sc;","10941":"subdot;","10942":"supdot;","10943":"subplus;","10944":"supplus;","10945":"submult;","10946":"supmult;","10947":"subedot;","10948":"supedot;","10949":"subseteqq;","10950":"supseteqq;","10951":"subsim;","10952":"supsim;","10955":"subsetneqq;","10956":"supsetneqq;","10959":"csub;","10960":"csup;","10961":"csube;","10962":"csupe;","10963":"subsup;","10964":"supsub;","10965":"subsub;","10966":"supsup;","10967":"suphsub;","10968":"supdsub;","10969":"forkv;","10970":"topfork;","10971":"mlcp;","10980":"DoubleLeftTee;","10982":"Vdashl;","10983":"Barv;","10984":"vBar;","10985":"vBarv;","10987":"Vbar;","10988":"Not;","10989":"bNot;","10990":"rnmid;","10991":"cirmid;","10992":"midcir;","10993":"topcir;","10994":"nhpar;","10995":"parsim;","11005":"parsl;","64256":"fflig;","64257":"filig;","64258":"fllig;","64259":"ffilig;","64260":"ffllig;"}'); +module.exports = JSON.parse('{"name":"gaxios","version":"6.2.0","description":"A simple common HTTP client specifically for Google APIs and services.","main":"build/src/index.js","types":"build/src/index.d.ts","files":["build/src"],"scripts":{"lint":"gts check","test":"c8 mocha build/test","presystem-test":"npm run compile","system-test":"mocha build/system-test --timeout 80000","compile":"tsc -p .","fix":"gts fix","prepare":"npm run compile","pretest":"npm run compile","webpack":"webpack","prebrowser-test":"npm run compile","browser-test":"node build/browser-test/browser-test-runner.js","docs":"compodoc src/","docs-test":"linkinator docs","predocs-test":"npm run docs","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","prelint":"cd samples; npm link ../; npm install","clean":"gts clean","precompile":"gts clean"},"repository":"googleapis/gaxios","keywords":["google"],"engines":{"node":">=14"},"author":"Google, LLC","license":"Apache-2.0","devDependencies":{"@babel/plugin-proposal-private-methods":"^7.18.6","@compodoc/compodoc":"^1.1.9","@types/cors":"^2.8.6","@types/express":"^4.16.1","@types/extend":"^3.0.1","@types/mocha":"^9.0.0","@types/multiparty":"0.0.36","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.0.0","@types/node-fetch":"^2.5.7","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^9.0.0","abort-controller":"^3.0.0","assert":"^2.0.0","browserify":"^17.0.0","c8":"^8.0.0","cors":"^2.8.5","execa":"^5.0.0","express":"^4.16.4","form-data":"^4.0.0","gts":"^5.0.0","is-docker":"^2.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-remap-coverage":"^0.1.5","karma-sourcemap-loader":"^0.4.0","karma-webpack":"^5.0.0","linkinator":"^4.0.0","mocha":"^8.0.0","multiparty":"^4.2.1","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","puppeteer":"^21.0.0","sinon":"^17.0.0","stream-browserify":"^3.0.0","tmp":"0.2.1","ts-loader":"^8.0.0","typescript":"^5.1.6","uuid":"^9.0.0","webpack":"^5.35.0","webpack-cli":"^4.0.0"},"dependencies":{"extend":"^3.0.2","https-proxy-agent":"^7.0.1","is-stream":"^2.0.0","node-fetch":"^2.6.9"}}'); /***/ }), @@ -74869,7 +74928,7 @@ module.exports = JSON.parse('{"9":"Tab;","10":"NewLine;","33":"excl;","34":"quot /***/ ((module) => { "use strict"; -module.exports = JSON.parse('{"name":"google-auth-library","version":"8.7.0","author":"Google Inc.","description":"Google APIs Authentication Client Library for Node.js","engines":{"node":">=12"},"main":"./build/src/index.js","types":"./build/src/index.d.ts","repository":"googleapis/google-auth-library-nodejs.git","keywords":["google","api","google apis","client","client library"],"dependencies":{"arrify":"^2.0.0","base64-js":"^1.3.0","ecdsa-sig-formatter":"^1.0.11","fast-text-encoding":"^1.0.0","gaxios":"^5.0.0","gcp-metadata":"^5.0.0","gtoken":"^6.1.0","jws":"^4.0.0","lru-cache":"^6.0.0"},"devDependencies":{"@compodoc/compodoc":"^1.1.7","@types/base64-js":"^1.2.5","@types/chai":"^4.1.7","@types/jws":"^3.1.0","@types/lru-cache":"^5.0.0","@types/mocha":"^9.0.0","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^16.0.0","@types/sinon":"^10.0.0","@types/tmp":"^0.2.0","assert-rejects":"^1.0.0","c8":"^7.0.0","chai":"^4.2.0","codecov":"^3.0.2","execa":"^5.0.0","gts":"^3.1.0","is-docker":"^2.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-sourcemap-loader":"^0.3.7","karma-webpack":"^5.0.0","keypair":"^1.0.4","linkinator":"^4.0.0","mocha":"^9.2.2","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","puppeteer":"^18.0.0","sinon":"^14.0.0","tmp":"^0.2.0","ts-loader":"^8.0.0","typescript":"^4.6.3","webpack":"^5.21.2","webpack-cli":"^4.0.0"},"files":["build/src","!build/src/**/*.map"],"scripts":{"test":"c8 mocha build/test","clean":"gts clean","prepare":"npm run compile","lint":"gts check","compile":"tsc -p .","fix":"gts fix","pretest":"npm run compile","docs":"compodoc src/","samples-setup":"cd samples/ && npm link ../ && npm run setup && cd ../","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","system-test":"mocha build/system-test --timeout 60000","presystem-test":"npm run compile","webpack":"webpack","browser-test":"karma start","docs-test":"linkinator docs","predocs-test":"npm run docs","prelint":"cd samples; npm link ../; npm install","precompile":"gts clean"},"license":"Apache-2.0"}'); +module.exports = JSON.parse('{"name":"google-auth-library","version":"9.6.3","author":"Google Inc.","description":"Google APIs Authentication Client Library for Node.js","engines":{"node":">=14"},"main":"./build/src/index.js","types":"./build/src/index.d.ts","repository":"googleapis/google-auth-library-nodejs.git","keywords":["google","api","google apis","client","client library"],"dependencies":{"base64-js":"^1.3.0","ecdsa-sig-formatter":"^1.0.11","gaxios":"^6.1.1","gcp-metadata":"^6.1.0","gtoken":"^7.0.0","jws":"^4.0.0"},"devDependencies":{"@compodoc/compodoc":"^1.1.7","@types/base64-js":"^1.2.5","@types/chai":"^4.1.7","@types/jws":"^3.1.0","@types/mocha":"^9.0.0","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.4.2","@types/sinon":"^10.0.0","assert-rejects":"^1.0.0","c8":"^8.0.0","chai":"^4.2.0","codecov":"^3.0.2","execa":"^5.0.0","gts":"^5.0.0","is-docker":"^2.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","keypair":"^1.0.4","linkinator":"^4.0.0","mocha":"^9.2.2","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","puppeteer":"^21.0.0","sinon":"^15.0.0","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.21.2","webpack-cli":"^4.0.0"},"files":["build/src","!build/src/**/*.map"],"scripts":{"test":"c8 mocha build/test","clean":"gts clean","prepare":"npm run compile","lint":"gts check","compile":"tsc -p .","fix":"gts fix","pretest":"npm run compile -- --sourceMap","docs":"compodoc src/","samples-setup":"cd samples/ && npm link ../ && npm run setup && cd ../","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","system-test":"mocha build/system-test --timeout 60000","presystem-test":"npm run compile -- --sourceMap","webpack":"webpack","browser-test":"karma start","docs-test":"linkinator docs","predocs-test":"npm run docs","prelint":"cd samples; npm link ../; npm install","precompile":"gts clean"},"license":"Apache-2.0"}'); /***/ }), diff --git a/dist/post/index.js.map b/dist/post/index.js.map index f48c585..287e0a9 100644 --- a/dist/post/index.js.map +++ b/dist/post/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9oBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9sBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACz5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3mGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9mCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACllCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC72FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;;;;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACt2BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpHA;AACA;AACA;;;;;;;;;ACFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/tBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7vBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1bA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/XA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChGA;AACA;AACA;AACA;;;;;;;;ACHA;;;;;;;ACAA;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1vDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClkCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACz5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACt+BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/eA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/iCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/uCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9/BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjjCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3uCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClaA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC55DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzrIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3lFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzqGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjgCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/nBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACrBA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChRA;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChMA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzaA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnEA;;;;;;;;AAAA;;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;AEDA;AACA;AACA;AACA","sources":["../webpack://throttle-action/./lib/locks.js","../webpack://throttle-action/./lib/post.js","../webpack://throttle-action/./node_modules/@actions/core/lib/command.js","../webpack://throttle-action/./node_modules/@actions/core/lib/core.js","../webpack://throttle-action/./node_modules/@actions/core/lib/file-command.js","../webpack://throttle-action/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://throttle-action/./node_modules/@actions/core/lib/path-utils.js","../webpack://throttle-action/./node_modules/@actions/core/lib/summary.js","../webpack://throttle-action/./node_modules/@actions/core/lib/utils.js","../webpack://throttle-action/./node_modules/@actions/github/lib/context.js","../webpack://throttle-action/./node_modules/@actions/github/lib/github.js","../webpack://throttle-action/./node_modules/@actions/github/lib/internal/utils.js","../webpack://throttle-action/./node_modules/@actions/github/lib/utils.js","../webpack://throttle-action/./node_modules/@actions/http-client/lib/auth.js","../webpack://throttle-action/./node_modules/@actions/http-client/lib/index.js","../webpack://throttle-action/./node_modules/@actions/http-client/lib/proxy.js","../webpack://throttle-action/./node_modules/@google-cloud/common/build/src/index.js","../webpack://throttle-action/./node_modules/@google-cloud/common/build/src/operation.js","../webpack://throttle-action/./node_modules/@google-cloud/common/build/src/service-object.js","../webpack://throttle-action/./node_modules/@google-cloud/common/build/src/service.js","../webpack://throttle-action/./node_modules/@google-cloud/common/build/src/util.js","../webpack://throttle-action/./node_modules/@google-cloud/paginator/build/src/index.js","../webpack://throttle-action/./node_modules/@google-cloud/paginator/build/src/resource-stream.js","../webpack://throttle-action/./node_modules/@google-cloud/projectify/build/src/index.js","../webpack://throttle-action/./node_modules/@google-cloud/promisify/build/src/index.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/acl.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/bucket.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/channel.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/crc32c.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/file.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/hash-stream-validator.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/hmacKey.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/iam.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/index.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/nodejs-common/index.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/nodejs-common/service-object.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/nodejs-common/service.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/nodejs-common/util.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/notification.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/resumable-upload.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/signer.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/storage.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/transfer-manager.js","../webpack://throttle-action/./node_modules/@google-cloud/storage/build/src/util.js","../webpack://throttle-action/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/core/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://throttle-action/./node_modules/@octokit/request/dist-node/index.js","../webpack://throttle-action/./node_modules/@tootallnate/once/dist/index.js","../webpack://throttle-action/./node_modules/abort-controller/dist/abort-controller.js","../webpack://throttle-action/./node_modules/agent-base/dist/src/index.js","../webpack://throttle-action/./node_modules/agent-base/dist/src/promisify.js","../webpack://throttle-action/./node_modules/arrify/index.js","../webpack://throttle-action/./node_modules/async-retry/lib/index.js","../webpack://throttle-action/./node_modules/base64-js/index.js","../webpack://throttle-action/./node_modules/before-after-hook/index.js","../webpack://throttle-action/./node_modules/before-after-hook/lib/add.js","../webpack://throttle-action/./node_modules/before-after-hook/lib/register.js","../webpack://throttle-action/./node_modules/before-after-hook/lib/remove.js","../webpack://throttle-action/./node_modules/bignumber.js/bignumber.js","../webpack://throttle-action/./node_modules/buffer-equal-constant-time/index.js","../webpack://throttle-action/./node_modules/compressible/index.js","../webpack://throttle-action/./node_modules/debug/src/browser.js","../webpack://throttle-action/./node_modules/debug/src/common.js","../webpack://throttle-action/./node_modules/debug/src/index.js","../webpack://throttle-action/./node_modules/debug/src/node.js","../webpack://throttle-action/./node_modules/deprecation/dist-node/index.js","../webpack://throttle-action/./node_modules/duplexify/index.js","../webpack://throttle-action/./node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.js","../webpack://throttle-action/./node_modules/ecdsa-sig-formatter/src/param-bytes-for-alg.js","../webpack://throttle-action/./node_modules/end-of-stream/index.js","../webpack://throttle-action/./node_modules/ent/decode.js","../webpack://throttle-action/./node_modules/ent/encode.js","../webpack://throttle-action/./node_modules/ent/index.js","../webpack://throttle-action/./node_modules/event-target-shim/dist/event-target-shim.js","../webpack://throttle-action/./node_modules/extend/index.js","../webpack://throttle-action/./node_modules/fast-text-encoding/text.min.js","../webpack://throttle-action/./node_modules/gaxios/build/src/common.js","../webpack://throttle-action/./node_modules/gaxios/build/src/gaxios.js","../webpack://throttle-action/./node_modules/gaxios/build/src/index.js","../webpack://throttle-action/./node_modules/gaxios/build/src/retry.js","../webpack://throttle-action/./node_modules/gcp-metadata/build/src/gcp-residency.js","../webpack://throttle-action/./node_modules/gcp-metadata/build/src/index.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/authclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/awsclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/awsrequestsigner.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/baseexternalclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/computeclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/downscopedclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/envDetect.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/executable-response.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/externalclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/googleauth.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/iam.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/identitypoolclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/idtokenclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/impersonated.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/jwtaccess.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/jwtclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/loginticket.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/oauth2client.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/oauth2common.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/pluggable-auth-client.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/refreshclient.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/auth/stscredentials.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/crypto/browser/crypto.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/crypto/crypto.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/crypto/node/crypto.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/index.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/options.js","../webpack://throttle-action/./node_modules/google-auth-library/build/src/transporters.js","../webpack://throttle-action/./node_modules/google-p12-pem/build/src/index.js","../webpack://throttle-action/./node_modules/gtoken/build/src/index.js","../webpack://throttle-action/./node_modules/has-flag/index.js","../webpack://throttle-action/./node_modules/http-proxy-agent/dist/agent.js","../webpack://throttle-action/./node_modules/http-proxy-agent/dist/index.js","../webpack://throttle-action/./node_modules/https-proxy-agent/dist/agent.js","../webpack://throttle-action/./node_modules/https-proxy-agent/dist/index.js","../webpack://throttle-action/./node_modules/https-proxy-agent/dist/parse-proxy-response.js","../webpack://throttle-action/./node_modules/inherits/inherits.js","../webpack://throttle-action/./node_modules/inherits/inherits_browser.js","../webpack://throttle-action/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://throttle-action/./node_modules/is-stream/index.js","../webpack://throttle-action/./node_modules/json-bigint/index.js","../webpack://throttle-action/./node_modules/json-bigint/lib/parse.js","../webpack://throttle-action/./node_modules/json-bigint/lib/stringify.js","../webpack://throttle-action/./node_modules/jwa/index.js","../webpack://throttle-action/./node_modules/jws/index.js","../webpack://throttle-action/./node_modules/jws/lib/data-stream.js","../webpack://throttle-action/./node_modules/jws/lib/sign-stream.js","../webpack://throttle-action/./node_modules/jws/lib/tostring.js","../webpack://throttle-action/./node_modules/jws/lib/verify-stream.js","../webpack://throttle-action/./node_modules/lru-cache/index.js","../webpack://throttle-action/./node_modules/mime-db/index.js","../webpack://throttle-action/./node_modules/mime-types/index.js","../webpack://throttle-action/./node_modules/mime/Mime.js","../webpack://throttle-action/./node_modules/mime/index.js","../webpack://throttle-action/./node_modules/mime/types/other.js","../webpack://throttle-action/./node_modules/mime/types/standard.js","../webpack://throttle-action/./node_modules/ms/index.js","../webpack://throttle-action/./node_modules/node-fetch/lib/index.js","../webpack://throttle-action/./node_modules/node-forge/lib/aes.js","../webpack://throttle-action/./node_modules/node-forge/lib/aesCipherSuites.js","../webpack://throttle-action/./node_modules/node-forge/lib/asn1-validator.js","../webpack://throttle-action/./node_modules/node-forge/lib/asn1.js","../webpack://throttle-action/./node_modules/node-forge/lib/baseN.js","../webpack://throttle-action/./node_modules/node-forge/lib/cipher.js","../webpack://throttle-action/./node_modules/node-forge/lib/cipherModes.js","../webpack://throttle-action/./node_modules/node-forge/lib/des.js","../webpack://throttle-action/./node_modules/node-forge/lib/ed25519.js","../webpack://throttle-action/./node_modules/node-forge/lib/forge.js","../webpack://throttle-action/./node_modules/node-forge/lib/hmac.js","../webpack://throttle-action/./node_modules/node-forge/lib/index.js","../webpack://throttle-action/./node_modules/node-forge/lib/jsbn.js","../webpack://throttle-action/./node_modules/node-forge/lib/kem.js","../webpack://throttle-action/./node_modules/node-forge/lib/log.js","../webpack://throttle-action/./node_modules/node-forge/lib/md.all.js","../webpack://throttle-action/./node_modules/node-forge/lib/md.js","../webpack://throttle-action/./node_modules/node-forge/lib/md5.js","../webpack://throttle-action/./node_modules/node-forge/lib/mgf.js","../webpack://throttle-action/./node_modules/node-forge/lib/mgf1.js","../webpack://throttle-action/./node_modules/node-forge/lib/oids.js","../webpack://throttle-action/./node_modules/node-forge/lib/pbe.js","../webpack://throttle-action/./node_modules/node-forge/lib/pbkdf2.js","../webpack://throttle-action/./node_modules/node-forge/lib/pem.js","../webpack://throttle-action/./node_modules/node-forge/lib/pkcs1.js","../webpack://throttle-action/./node_modules/node-forge/lib/pkcs12.js","../webpack://throttle-action/./node_modules/node-forge/lib/pkcs7.js","../webpack://throttle-action/./node_modules/node-forge/lib/pkcs7asn1.js","../webpack://throttle-action/./node_modules/node-forge/lib/pki.js","../webpack://throttle-action/./node_modules/node-forge/lib/prime.js","../webpack://throttle-action/./node_modules/node-forge/lib/prng.js","../webpack://throttle-action/./node_modules/node-forge/lib/pss.js","../webpack://throttle-action/./node_modules/node-forge/lib/random.js","../webpack://throttle-action/./node_modules/node-forge/lib/rc2.js","../webpack://throttle-action/./node_modules/node-forge/lib/rsa.js","../webpack://throttle-action/./node_modules/node-forge/lib/sha1.js","../webpack://throttle-action/./node_modules/node-forge/lib/sha256.js","../webpack://throttle-action/./node_modules/node-forge/lib/sha512.js","../webpack://throttle-action/./node_modules/node-forge/lib/ssh.js","../webpack://throttle-action/./node_modules/node-forge/lib/tls.js","../webpack://throttle-action/./node_modules/node-forge/lib/util.js","../webpack://throttle-action/./node_modules/node-forge/lib/x509.js","../webpack://throttle-action/./node_modules/once/once.js","../webpack://throttle-action/./node_modules/p-limit/index.js","../webpack://throttle-action/./node_modules/readable-stream/errors.js","../webpack://throttle-action/./node_modules/readable-stream/lib/_stream_duplex.js","../webpack://throttle-action/./node_modules/readable-stream/lib/_stream_passthrough.js","../webpack://throttle-action/./node_modules/readable-stream/lib/_stream_readable.js","../webpack://throttle-action/./node_modules/readable-stream/lib/_stream_transform.js","../webpack://throttle-action/./node_modules/readable-stream/lib/_stream_writable.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/async_iterator.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/buffer_list.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/destroy.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/end-of-stream.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/from.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/pipeline.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/state.js","../webpack://throttle-action/./node_modules/readable-stream/lib/internal/streams/stream.js","../webpack://throttle-action/./node_modules/readable-stream/readable.js","../webpack://throttle-action/./node_modules/retry-request/index.js","../webpack://throttle-action/./node_modules/retry/index.js","../webpack://throttle-action/./node_modules/retry/lib/retry.js","../webpack://throttle-action/./node_modules/retry/lib/retry_operation.js","../webpack://throttle-action/./node_modules/safe-buffer/index.js","../webpack://throttle-action/./node_modules/stream-events/index.js","../webpack://throttle-action/./node_modules/stream-shift/index.js","../webpack://throttle-action/./node_modules/string_decoder/lib/string_decoder.js","../webpack://throttle-action/./node_modules/stubs/index.js","../webpack://throttle-action/./node_modules/supports-color/index.js","../webpack://throttle-action/./node_modules/teeny-request/build/src/TeenyStatistics.js","../webpack://throttle-action/./node_modules/teeny-request/build/src/agents.js","../webpack://throttle-action/./node_modules/teeny-request/build/src/index.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/index.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/md5.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/native.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/nil.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/parse.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/regex.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/rng.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/sha1.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/stringify.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/v1.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/v3.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/v35.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/v4.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/v5.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/validate.js","../webpack://throttle-action/./node_modules/teeny-request/node_modules/uuid/dist/version.js","../webpack://throttle-action/./node_modules/tr46/index.js","../webpack://throttle-action/./node_modules/tunnel/index.js","../webpack://throttle-action/./node_modules/tunnel/lib/tunnel.js","../webpack://throttle-action/./node_modules/universal-user-agent/dist-node/index.js","../webpack://throttle-action/./node_modules/util-deprecate/node.js","../webpack://throttle-action/./node_modules/uuid/dist/index.js","../webpack://throttle-action/./node_modules/uuid/dist/md5.js","../webpack://throttle-action/./node_modules/uuid/dist/nil.js","../webpack://throttle-action/./node_modules/uuid/dist/parse.js","../webpack://throttle-action/./node_modules/uuid/dist/regex.js","../webpack://throttle-action/./node_modules/uuid/dist/rng.js","../webpack://throttle-action/./node_modules/uuid/dist/sha1.js","../webpack://throttle-action/./node_modules/uuid/dist/stringify.js","../webpack://throttle-action/./node_modules/uuid/dist/v1.js","../webpack://throttle-action/./node_modules/uuid/dist/v3.js","../webpack://throttle-action/./node_modules/uuid/dist/v35.js","../webpack://throttle-action/./node_modules/uuid/dist/v4.js","../webpack://throttle-action/./node_modules/uuid/dist/v5.js","../webpack://throttle-action/./node_modules/uuid/dist/validate.js","../webpack://throttle-action/./node_modules/uuid/dist/version.js","../webpack://throttle-action/./node_modules/webidl-conversions/lib/index.js","../webpack://throttle-action/./node_modules/whatwg-url/lib/URL-impl.js","../webpack://throttle-action/./node_modules/whatwg-url/lib/URL.js","../webpack://throttle-action/./node_modules/whatwg-url/lib/public-api.js","../webpack://throttle-action/./node_modules/whatwg-url/lib/url-state-machine.js","../webpack://throttle-action/./node_modules/whatwg-url/lib/utils.js","../webpack://throttle-action/./node_modules/wrappy/wrappy.js","../webpack://throttle-action/./node_modules/yallist/iterator.js","../webpack://throttle-action/./node_modules/yallist/yallist.js","../webpack://throttle-action/./node_modules/yocto-queue/index.js","../webpack://throttle-action/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://throttle-action/external node-commonjs \"assert\"","../webpack://throttle-action/external node-commonjs \"buffer\"","../webpack://throttle-action/external node-commonjs \"child_process\"","../webpack://throttle-action/external node-commonjs \"crypto\"","../webpack://throttle-action/external node-commonjs \"events\"","../webpack://throttle-action/external node-commonjs \"fs\"","../webpack://throttle-action/external node-commonjs \"http\"","../webpack://throttle-action/external node-commonjs \"https\"","../webpack://throttle-action/external node-commonjs \"net\"","../webpack://throttle-action/external node-commonjs \"os\"","../webpack://throttle-action/external node-commonjs \"path\"","../webpack://throttle-action/external node-commonjs \"punycode\"","../webpack://throttle-action/external node-commonjs \"querystring\"","../webpack://throttle-action/external node-commonjs \"stream\"","../webpack://throttle-action/external node-commonjs \"tls\"","../webpack://throttle-action/external node-commonjs \"tty\"","../webpack://throttle-action/external node-commonjs \"url\"","../webpack://throttle-action/external node-commonjs \"util\"","../webpack://throttle-action/external node-commonjs \"zlib\"","../webpack://throttle-action/webpack/bootstrap","../webpack://throttle-action/webpack/runtime/compat","../webpack://throttle-action/webpack/before-startup","../webpack://throttle-action/webpack/startup","../webpack://throttle-action/webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleCloudStorageMutex = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst github = __importStar(require(\"@actions/github\"));\nconst storage_1 = require(\"@google-cloud/storage\");\nconst common_1 = require(\"@google-cloud/common\");\nconst TIME_PERIOD_PATTERN = /^(?:(\\d+)m)?(?:(\\d+)s)?$/;\nclass TimePeriodError extends Error {\n constructor() {\n super(...arguments);\n this.name = 'TimePeriodError';\n }\n}\n/**\n * Parse a number of microseconds from a time string.\n *\n */\nfunction parseTimePeriod(period) {\n const match = period.match(TIME_PERIOD_PATTERN);\n if (match === null) {\n throw new TimePeriodError(`Invalid time period string: ${period}`);\n }\n else {\n const minutes = match[1] || 0;\n const seconds = match[2] || 0;\n return Number(minutes) * 60 + Number(seconds);\n }\n}\n/**\n * Generate a default identifier this mutex\n *\n * The identifier is used to verify that this invocation is the owner of the\n * mutex before releasing it.\n */\nfunction defaultMutexIdentifier() {\n const context = github.context;\n return `${context.repo.owner}/${context.repo.repo}/${context.workflow}/${context.runNumber}/${context.job}`;\n}\nconst INITIAL_SLEEP = 500;\nclass GoogleCloudStorageMutex {\n constructor(bucket, name, options = {}) {\n this.file = new storage_1.File(new storage_1.Bucket(new storage_1.Storage(), bucket), name, {\n generation: 0\n });\n if (options.timeout === null || options.timeout === undefined) {\n this.timeout = 5 * 60 * 1000; // 5 minutes\n }\n else if (typeof options.timeout === 'string') {\n this.timeout = parseTimePeriod(options.timeout) * 1000;\n }\n else {\n this.timeout = options.timeout;\n }\n if (options.identifier === null || options.identifier === undefined) {\n this.identifier = defaultMutexIdentifier();\n }\n else {\n this.identifier = options.identifier;\n }\n }\n acquire() {\n return __awaiter(this, void 0, void 0, function* () {\n const start = Date.now();\n let sleep = INITIAL_SLEEP;\n for (;;) {\n try {\n yield this.file.save(this.identifier, { resumable: false });\n core.info('Acquired lock');\n return;\n }\n catch (e) {\n if (e instanceof common_1.ApiError && e.code === 412) {\n const delta = Date.now() - start;\n if (delta < this.timeout) {\n core.info(`Cannot acquire lock, sleeping for ${sleep / 1000.0}s`);\n yield new Promise(resolve => setTimeout(resolve, sleep));\n sleep = sleep * 2;\n continue;\n }\n }\n core.info('Cannot acquire lock, raising');\n throw e;\n }\n }\n });\n }\n release() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n const data = yield this.file.download();\n if (data[0].toString() === this.identifier) {\n yield this.file.delete();\n }\n else {\n core.info(`Locked by someone else (${data[0]} !== ${this.identifier})`);\n }\n }\n catch (e) {\n if (e instanceof common_1.ApiError && e.code === 404) {\n // no lock to release\n }\n else {\n throw e;\n }\n }\n });\n }\n}\nexports.GoogleCloudStorageMutex = GoogleCloudStorageMutex;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst locks_1 = require(\"./locks\");\nfunction run() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n const mutex = new locks_1.GoogleCloudStorageMutex(core.getInput('bucket'), core.getInput('filename'), { timeout: core.getInput('timeout') });\n yield mutex.release();\n }\n catch (error) {\n if (error instanceof Error) {\n core.error(error.message);\n }\n else {\n core.error(`Unknown error ${error}`);\n }\n core.setFailed('Failed to release lock');\n }\n });\n}\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));\n }\n command_1.issueCommand('set-env', { name }, convertedVal);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueFileCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n if (options && options.trimWhitespace === false) {\n return inputs;\n }\n return inputs.map(input => input.trim());\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n const filePath = process.env['GITHUB_OUTPUT'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));\n }\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n const filePath = process.env['GITHUB_STATE'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));\n }\n command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.prepareKeyValueMessage = exports.issueFileCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst uuid_1 = require(\"uuid\");\nconst utils_1 = require(\"./utils\");\nfunction issueFileCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueFileCommand = issueFileCommand;\nfunction prepareKeyValueMessage(key, value) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n const convertedValue = utils_1.toCommandValue(value);\n // These should realistically never happen, but just in case someone finds a\n // way to exploit uuid generation let's not allow keys or values that contain\n // the delimiter.\n if (key.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedValue.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;\n}\nexports.prepareKeyValueMessage = prepareKeyValueMessage;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n var _a, _b, _c;\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;\n this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;\n this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options, ...additionalPlugins) {\n const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins);\n return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nexports.defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const reqHost = reqUrl.hostname;\n if (isLoopbackAddress(reqHost)) {\n return true;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperNoProxyItem === '*' ||\n upperReqHosts.some(x => x === upperNoProxyItem ||\n x.endsWith(`.${upperNoProxyItem}`) ||\n (upperNoProxyItem.startsWith('.') &&\n x.endsWith(`${upperNoProxyItem}`)))) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\nfunction isLoopbackAddress(host) {\n const hostLower = host.toLowerCase();\n return (hostLower === 'localhost' ||\n hostLower.startsWith('127.') ||\n hostLower.startsWith('[::1]') ||\n hostLower.startsWith('[0:0:0:0:0:0:0:1]'));\n}\n//# sourceMappingURL=proxy.js.map","\"use strict\";\n// Copyright 2016 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.ApiError = exports.ServiceObject = exports.Service = exports.Operation = void 0;\n/**\n * @type {module:common/operation}\n * @private\n */\nvar operation_1 = require(\"./operation\");\nObject.defineProperty(exports, \"Operation\", { enumerable: true, get: function () { return operation_1.Operation; } });\n/**\n * @type {module:common/service}\n * @private\n */\nvar service_1 = require(\"./service\");\nObject.defineProperty(exports, \"Service\", { enumerable: true, get: function () { return service_1.Service; } });\n/**\n * @type {module:common/serviceObject}\n * @private\n */\nvar service_object_1 = require(\"./service-object\");\nObject.defineProperty(exports, \"ServiceObject\", { enumerable: true, get: function () { return service_object_1.ServiceObject; } });\n/**\n * @type {module:common/util}\n * @private\n */\nvar util_1 = require(\"./util\");\nObject.defineProperty(exports, \"ApiError\", { enumerable: true, get: function () { return util_1.ApiError; } });\nObject.defineProperty(exports, \"util\", { enumerable: true, get: function () { return util_1.util; } });\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2016 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Operation = void 0;\n/*!\n * @module common/operation\n */\nconst service_object_1 = require(\"./service-object\");\nconst util_1 = require(\"util\");\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass Operation extends service_object_1.ServiceObject {\n /**\n * An Operation object allows you to interact with APIs that take longer to\n * process things.\n *\n * @constructor\n * @alias module:common/operation\n *\n * @param {object} config - Configuration object.\n * @param {module:common/service|module:common/serviceObject|module:common/grpcService|module:common/grpcServiceObject} config.parent - The parent object.\n */\n constructor(config) {\n const methods = {\n /**\n * Checks to see if an operation exists.\n */\n exists: true,\n /**\n * Retrieves the operation.\n */\n get: true,\n /**\n * Retrieves metadata for the operation.\n */\n getMetadata: {\n reqOpts: {\n name: config.id,\n },\n },\n };\n config = Object.assign({\n baseUrl: '',\n }, config);\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n config.methods = (config.methods || methods);\n super(config);\n this.completeListeners = 0;\n this.hasActiveListeners = false;\n this.listenForEvents_();\n }\n /**\n * Wraps the `complete` and `error` events in a Promise.\n *\n * @return {Promise}\n */\n promise() {\n return new Promise((resolve, reject) => {\n this.on('error', reject).on('complete', (metadata) => {\n resolve([metadata]);\n });\n });\n }\n /**\n * Begin listening for events on the operation. This method keeps track of how\n * many \"complete\" listeners are registered and removed, making sure polling\n * is handled automatically.\n *\n * As long as there is one active \"complete\" listener, the connection is open.\n * When there are no more listeners, the polling stops.\n *\n * @private\n */\n listenForEvents_() {\n this.on('newListener', (event) => {\n if (event === 'complete') {\n this.completeListeners++;\n if (!this.hasActiveListeners) {\n this.hasActiveListeners = true;\n this.startPolling_();\n }\n }\n });\n this.on('removeListener', (event) => {\n if (event === 'complete' && --this.completeListeners === 0) {\n this.hasActiveListeners = false;\n }\n });\n }\n /**\n * Poll for a status update. Returns null for an incomplete\n * status, and metadata for a complete status.\n *\n * @private\n */\n poll_(callback) {\n this.getMetadata((err, body) => {\n if (err || body.error) {\n callback(err || body.error);\n return;\n }\n if (!body.done) {\n callback(null);\n return;\n }\n callback(null, body);\n });\n }\n /**\n * Poll `getMetadata` to check the operation's status. This runs a loop to\n * ping the API on an interval.\n *\n * Note: This method is automatically called once a \"complete\" event handler\n * is registered on the operation.\n *\n * @private\n */\n async startPolling_() {\n if (!this.hasActiveListeners) {\n return;\n }\n try {\n const metadata = await (0, util_1.promisify)(this.poll_.bind(this))();\n if (!metadata) {\n setTimeout(this.startPolling_.bind(this), this.pollIntervalMs || 500);\n return;\n }\n this.emit('complete', metadata);\n }\n catch (err) {\n this.emit('error', err);\n }\n }\n}\nexports.Operation = Operation;\n//# sourceMappingURL=operation.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ServiceObject = void 0;\n/*!\n * @module common/service-object\n */\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst arrify = require(\"arrify\");\nconst events_1 = require(\"events\");\nconst extend = require(\"extend\");\nconst util_1 = require(\"./util\");\n/**\n * ServiceObject is a base class, meant to be inherited from by a \"service\n * object,\" like a BigQuery dataset or Storage bucket.\n *\n * Most of the time, these objects share common functionality; they can be\n * created or deleted, and you can get or set their metadata.\n *\n * By inheriting from this class, a service object will be extended with these\n * shared behaviors. Note that any method can be overridden when the service\n * object requires specific behavior.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass ServiceObject extends events_1.EventEmitter {\n /*\n * @constructor\n * @alias module:common/service-object\n *\n * @private\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string} config.createMethod - The method which creates this object.\n * @param {string=} config.id - The identifier of the object. For example, the\n * name of a Storage bucket or Pub/Sub topic.\n * @param {object=} config.methods - A map of each method name that should be inherited.\n * @param {object} config.methods[].reqOpts - Default request options for this\n * particular method. A common use case is when `setMetadata` requires a\n * `PUT` method to override the default `PATCH`.\n * @param {object} config.parent - The parent service instance. For example, an\n * instance of Storage if the object is Bucket.\n */\n constructor(config) {\n super();\n this.metadata = {};\n this.baseUrl = config.baseUrl;\n this.parent = config.parent; // Parent class.\n this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc).\n this.createMethod = config.createMethod;\n this.methods = config.methods || {};\n this.interceptors = [];\n this.pollIntervalMs = config.pollIntervalMs;\n this.projectId = config.projectId;\n if (config.methods) {\n // This filters the ServiceObject instance (e.g. a \"File\") to only have\n // the configured methods. We make a couple of exceptions for core-\n // functionality (\"request()\" and \"getRequestInterceptors()\")\n Object.getOwnPropertyNames(ServiceObject.prototype)\n .filter(methodName => {\n return (\n // All ServiceObjects need `request` and `getRequestInterceptors`.\n // clang-format off\n !/^request/.test(methodName) &&\n !/^getRequestInterceptors/.test(methodName) &&\n // clang-format on\n // The ServiceObject didn't redefine the method.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] ===\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ServiceObject.prototype[methodName] &&\n // This method isn't wanted.\n !config.methods[methodName]);\n })\n .forEach(methodName => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] = undefined;\n });\n }\n }\n create(optionsOrCallback, callback) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const args = [this.id];\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n if (typeof optionsOrCallback === 'object') {\n args.push(optionsOrCallback);\n }\n // Wrap the callback to return *this* instance of the object, not the\n // newly-created one.\n // tslint: disable-next-line no-any\n function onCreate(...args) {\n const [err, instance] = args;\n if (!err) {\n self.metadata = instance.metadata;\n args[1] = self; // replace the created `instance` with this one.\n }\n callback(...args);\n }\n args.push(onCreate);\n // eslint-disable-next-line prefer-spread\n this.createMethod.apply(null, args);\n }\n delete(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const ignoreNotFound = options.ignoreNotFound;\n delete options.ignoreNotFound;\n const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {};\n const reqOpts = extend(true, {\n method: 'DELETE',\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, ...args) => {\n if (err) {\n if (err.code === 404 && ignoreNotFound) {\n err = null;\n }\n }\n callback(err, ...args);\n });\n }\n exists(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n this.get(options, err => {\n if (err) {\n if (err.code === 404) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n return;\n }\n callback(null, true);\n });\n }\n get(optionsOrCallback, cb) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const [opts, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const options = Object.assign({}, opts);\n const autoCreate = options.autoCreate && typeof this.create === 'function';\n delete options.autoCreate;\n function onCreate(err, instance, apiResponse) {\n if (err) {\n if (err.code === 409) {\n self.get(options, callback);\n return;\n }\n callback(err, null, apiResponse);\n return;\n }\n callback(null, instance, apiResponse);\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n if (err.code === 404 && autoCreate) {\n const args = [];\n if (Object.keys(options).length > 0) {\n args.push(options);\n }\n args.push(onCreate);\n self.create(...args);\n return;\n }\n callback(err, null, metadata);\n return;\n }\n callback(null, self, metadata);\n });\n }\n getMetadata(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.getMetadata === 'object' &&\n this.methods.getMetadata) ||\n {};\n const reqOpts = extend(true, {\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n const localInterceptors = this.interceptors\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n return this.parent.getRequestInterceptors().concat(localInterceptors);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.setMetadata === 'object' &&\n this.methods.setMetadata) ||\n {};\n const reqOpts = extend(true, {}, {\n method: 'PATCH',\n uri: '',\n }, methodConfig.reqOpts, {\n json: metadata,\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts);\n if (this.projectId) {\n reqOpts.projectId = this.projectId;\n }\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri];\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .filter(x => x.trim()) // Limit to non-empty strings.\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/');\n const childInterceptors = arrify(reqOpts.interceptors_);\n const localInterceptors = [].slice.call(this.interceptors);\n reqOpts.interceptors_ = childInterceptors.concat(localInterceptors);\n if (reqOpts.shouldReturnStream) {\n return this.parent.requestStream(reqOpts);\n }\n this.parent.request(reqOpts, callback);\n }\n request(reqOpts, callback) {\n this.request_(reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return this.request_(opts);\n }\n}\nexports.ServiceObject = ServiceObject;\n(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] });\n//# sourceMappingURL=service-object.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0;\n/*!\n * @module common/service\n */\nconst arrify = require(\"arrify\");\nconst extend = require(\"extend\");\nconst util_1 = require(\"./util\");\nexports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}';\nclass Service {\n /**\n * Service is a base class, meant to be inherited from by a \"service,\" like\n * BigQuery or Storage.\n *\n * This handles making authenticated requests by exposing a `makeReq_`\n * function.\n *\n * @constructor\n * @alias module:common/service\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string[]} config.scopes - The scopes required for the request.\n * @param {object=} options - [Configuration object](#/docs).\n */\n constructor(config, options = {}) {\n this.baseUrl = config.baseUrl;\n this.apiEndpoint = config.apiEndpoint;\n this.timeout = options.timeout;\n this.globalInterceptors = arrify(options.interceptors_);\n this.interceptors = [];\n this.packageJson = config.packageJson;\n this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN;\n this.projectIdRequired = config.projectIdRequired !== false;\n this.providedUserAgent = options.userAgent;\n const reqCfg = extend({}, config, {\n projectIdRequired: this.projectIdRequired,\n projectId: this.projectId,\n authClient: options.authClient,\n credentials: options.credentials,\n keyFile: options.keyFilename,\n email: options.email,\n token: options.token,\n });\n this.makeAuthenticatedRequest =\n util_1.util.makeAuthenticatedRequestFactory(reqCfg);\n this.authClient = this.makeAuthenticatedRequest.authClient;\n this.getCredentials = this.makeAuthenticatedRequest.getCredentials;\n const isCloudFunctionEnv = !!process.env.FUNCTION_NAME;\n if (isCloudFunctionEnv) {\n this.interceptors.push({\n request(reqOpts) {\n reqOpts.forever = false;\n return reqOpts;\n },\n });\n }\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n return [].slice\n .call(this.globalInterceptors)\n .concat(this.interceptors)\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n }\n getProjectId(callback) {\n if (!callback) {\n return this.getProjectIdAsync();\n }\n this.getProjectIdAsync().then(p => callback(null, p), callback);\n }\n async getProjectIdAsync() {\n const projectId = await this.authClient.getProjectId();\n if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) {\n this.projectId = projectId;\n }\n return this.projectId;\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts, { timeout: this.timeout });\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl];\n if (this.projectIdRequired) {\n if (reqOpts.projectId) {\n uriComponents.push('projects');\n uriComponents.push(reqOpts.projectId);\n }\n else {\n uriComponents.push('projects');\n uriComponents.push(this.projectId);\n }\n }\n uriComponents.push(reqOpts.uri);\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/')\n // Some URIs have colon separators.\n // Bad: https://.../projects/:list\n // Good: https://.../projects:list\n .replace(/\\/:/g, ':');\n const requestInterceptors = this.getRequestInterceptors();\n arrify(reqOpts.interceptors_).forEach(interceptor => {\n if (typeof interceptor.request === 'function') {\n requestInterceptors.push(interceptor.request);\n }\n });\n requestInterceptors.forEach(requestInterceptor => {\n reqOpts = requestInterceptor(reqOpts);\n });\n delete reqOpts.interceptors_;\n const pkg = this.packageJson;\n let userAgent = util_1.util.getUserAgentFromPackageJson(pkg);\n if (this.providedUserAgent) {\n userAgent = `${this.providedUserAgent} ${userAgent}`;\n }\n reqOpts.headers = extend({}, reqOpts.headers, {\n 'User-Agent': userAgent,\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${pkg.version}`,\n });\n if (reqOpts.shouldReturnStream) {\n return this.makeAuthenticatedRequest(reqOpts);\n }\n else {\n this.makeAuthenticatedRequest(reqOpts, callback);\n }\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n * @param {function} callback - The callback function passed to `request`.\n */\n request(reqOpts, callback) {\n Service.prototype.request_.call(this, reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return Service.prototype.request_.call(this, opts);\n }\n}\nexports.Service = Service;\n//# sourceMappingURL=service.js.map","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.Util = exports.PartialFailureError = exports.ApiError = void 0;\n/*!\n * @module common/util\n */\nconst projectify_1 = require(\"@google-cloud/projectify\");\nconst ent = require(\"ent\");\nconst extend = require(\"extend\");\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst retryRequest = require(\"retry-request\");\nconst stream_1 = require(\"stream\");\nconst teeny_request_1 = require(\"teeny-request\");\nconst service_1 = require(\"./service\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst duplexify = require('duplexify');\nconst requestDefaults = {\n timeout: 60000,\n gzip: true,\n forever: true,\n pool: {\n maxSockets: Infinity,\n },\n};\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n * @private\n */\nconst AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n * @private\n */\nconst MAX_RETRY_DEFAULT = 3;\n/**\n * Custom error type for API errors.\n *\n * @param {object} errorBody - Error object.\n */\nclass ApiError extends Error {\n constructor(errorBodyOrMessage) {\n super();\n if (typeof errorBodyOrMessage !== 'object') {\n this.message = errorBodyOrMessage || '';\n return;\n }\n const errorBody = errorBodyOrMessage;\n this.code = errorBody.code;\n this.errors = errorBody.errors;\n this.response = errorBody.response;\n try {\n this.errors = JSON.parse(this.response.body).error.errors;\n }\n catch (e) {\n this.errors = errorBody.errors;\n }\n this.message = ApiError.createMultiErrorMessage(errorBody, this.errors);\n Error.captureStackTrace(this);\n }\n /**\n * Pieces together an error message by combining all unique error messages\n * returned from a single GoogleError\n *\n * @private\n *\n * @param {GoogleErrorBody} err The original error.\n * @param {GoogleInnerError[]} [errors] Inner errors, if any.\n * @returns {string}\n */\n static createMultiErrorMessage(err, errors) {\n const messages = new Set();\n if (err.message) {\n messages.add(err.message);\n }\n if (errors && errors.length) {\n errors.forEach(({ message }) => messages.add(message));\n }\n else if (err.response && err.response.body) {\n messages.add(ent.decode(err.response.body.toString()));\n }\n else if (!err.message) {\n messages.add('A failure occurred during this request.');\n }\n let messageArr = Array.from(messages);\n if (messageArr.length > 1) {\n messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`);\n messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\\n');\n messageArr.push('\\n');\n }\n return messageArr.join('\\n');\n }\n}\nexports.ApiError = ApiError;\n/**\n * Custom error type for partial errors returned from the API.\n *\n * @param {object} b - Error object.\n */\nclass PartialFailureError extends Error {\n constructor(b) {\n super();\n const errorObject = b;\n this.errors = errorObject.errors;\n this.name = 'PartialFailureError';\n this.response = errorObject.response;\n this.message = ApiError.createMultiErrorMessage(errorObject, this.errors);\n }\n}\nexports.PartialFailureError = PartialFailureError;\nclass Util {\n constructor() {\n this.ApiError = ApiError;\n this.PartialFailureError = PartialFailureError;\n }\n /**\n * No op.\n *\n * @example\n * function doSomething(callback) {\n * callback = callback || noop;\n * }\n */\n noop() { }\n /**\n * Uniformly process an API response.\n *\n * @param {*} err - Error value.\n * @param {*} resp - Response value.\n * @param {*} body - Body value.\n * @param {function} callback - The callback function.\n */\n handleResp(err, resp, body, callback) {\n callback = callback || util.noop;\n const parsedResp = extend(true, { err: err || null }, resp && util.parseHttpRespMessage(resp), body && util.parseHttpRespBody(body));\n // Assign the parsed body to resp.body, even if { json: false } was passed\n // as a request option.\n // We assume that nobody uses the previously unparsed value of resp.body.\n if (!parsedResp.err && resp && typeof parsedResp.body === 'object') {\n parsedResp.resp.body = parsedResp.body;\n }\n if (parsedResp.err && resp) {\n parsedResp.err.response = resp;\n }\n callback(parsedResp.err, parsedResp.body, parsedResp.resp);\n }\n /**\n * Sniff an incoming HTTP response message for errors.\n *\n * @param {object} httpRespMessage - An incoming HTTP response message from `request`.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.resp - The original response object.\n */\n parseHttpRespMessage(httpRespMessage) {\n const parsedHttpRespMessage = {\n resp: httpRespMessage,\n };\n if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) {\n // Unknown error. Format according to ApiError standard.\n parsedHttpRespMessage.err = new ApiError({\n errors: new Array(),\n code: httpRespMessage.statusCode,\n message: httpRespMessage.statusMessage,\n response: httpRespMessage,\n });\n }\n return parsedHttpRespMessage;\n }\n /**\n * Parse the response body from an HTTP request.\n *\n * @param {object} body - The response body.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.body - The original body value provided\n * will try to be JSON.parse'd. If it's successful, the parsed value will\n * be returned here, otherwise the original value and an error will be returned.\n */\n parseHttpRespBody(body) {\n const parsedHttpRespBody = {\n body,\n };\n if (typeof body === 'string') {\n try {\n parsedHttpRespBody.body = JSON.parse(body);\n }\n catch (err) {\n parsedHttpRespBody.body = body;\n }\n }\n if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) {\n // Error from JSON API.\n parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error);\n }\n return parsedHttpRespBody;\n }\n /**\n * Take a Duplexify stream, fetch an authenticated connection header, and\n * create an outgoing writable stream.\n *\n * @param {Duplexify} dup - Duplexify stream.\n * @param {object} options - Configuration object.\n * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through.\n * @param {object} options.metadata - Metadata to send at the head of the request.\n * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object.\n * @param {string=} options.request.method - Default: \"POST\".\n * @param {string=} options.request.qs.uploadType - Default: \"multipart\".\n * @param {string=} options.streamContentType - Default: \"application/octet-stream\".\n * @param {function} onComplete - Callback, executed after the writable Request stream has completed.\n */\n makeWritableStream(dup, options, onComplete) {\n onComplete = onComplete || util.noop;\n const writeStream = new ProgressStream();\n writeStream.on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(writeStream);\n const defaultReqOpts = {\n method: 'POST',\n qs: {\n uploadType: 'multipart',\n },\n timeout: 0,\n maxRetries: 0,\n };\n const metadata = options.metadata || {};\n const reqOpts = extend(true, defaultReqOpts, options.request, {\n multipart: [\n {\n 'Content-Type': 'application/json',\n body: JSON.stringify(metadata),\n },\n {\n 'Content-Type': metadata.contentType || 'application/octet-stream',\n body: writeStream,\n },\n ],\n });\n options.makeAuthenticatedRequest(reqOpts, {\n onAuthenticated(err, authenticatedReqOpts) {\n if (err) {\n dup.destroy(err);\n return;\n }\n const request = teeny_request_1.teenyRequest.defaults(requestDefaults);\n request(authenticatedReqOpts, (err, resp, body) => {\n util.handleResp(err, resp, body, (err, data) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n dup.emit('response', resp);\n onComplete(data);\n });\n });\n },\n });\n }\n /**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted. This is used for rate limit\n * related errors as well as intermittent server errors.\n *\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\n shouldRetryRequest(err) {\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = e.reason;\n if (reason === 'rateLimitExceeded') {\n return true;\n }\n if (reason === 'userRateLimitExceeded') {\n return true;\n }\n if (reason && reason.includes('EAI_AGAIN')) {\n return true;\n }\n }\n }\n }\n return false;\n }\n /**\n * Get a function for making authenticated requests.\n *\n * @param {object} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {object=} config.credentials - Credentials object.\n * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false.\n * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false.\n * @param {string=} config.email - Account email address, required for PEM/P12 usage.\n * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3)\n * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile.\n * @param {array} config.scopes - Array of scopes required for the API.\n */\n makeAuthenticatedRequestFactory(config) {\n const googleAutoAuthConfig = extend({}, config);\n if (googleAutoAuthConfig.projectId === service_1.DEFAULT_PROJECT_ID_TOKEN) {\n delete googleAutoAuthConfig.projectId;\n }\n let authClient;\n if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) {\n // Use an existing `GoogleAuth`\n authClient = googleAutoAuthConfig.authClient;\n }\n else {\n // Pass an `AuthClient` to `GoogleAuth`, if available\n const config = {\n ...googleAutoAuthConfig,\n authClient: googleAutoAuthConfig.authClient,\n };\n authClient = new google_auth_library_1.GoogleAuth(config);\n }\n function makeAuthenticatedRequest(reqOpts, optionsOrCallback) {\n let stream;\n let projectId;\n const reqConfig = extend({}, config);\n let activeRequest_;\n if (!optionsOrCallback) {\n stream = duplexify();\n reqConfig.stream = stream;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined;\n async function setProjectId() {\n projectId = await authClient.getProjectId();\n }\n const onAuthenticated = async (err, authenticatedReqOpts) => {\n const authLibraryError = err;\n const autoAuthFailed = err &&\n err.message.indexOf('Could not load the default credentials') > -1;\n if (autoAuthFailed) {\n // Even though authentication failed, the API might not actually\n // care.\n authenticatedReqOpts = reqOpts;\n }\n if (!err || autoAuthFailed) {\n try {\n // Try with existing `projectId` value\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n if (e instanceof projectify_1.MissingProjectIdError) {\n // A `projectId` was required, but we don't have one.\n try {\n // Attempt to get the `projectId`\n await setProjectId();\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n // Re-use the \"Could not load the default credentials error\" if\n // auto auth failed.\n err = err || e;\n }\n }\n else {\n // Some other error unrelated to missing `projectId`\n err = err || e;\n }\n }\n }\n if (err) {\n if (stream) {\n stream.destroy(err);\n }\n else {\n const fn = options && options.onAuthenticated\n ? options.onAuthenticated\n : callback;\n fn(err);\n }\n return;\n }\n if (options && options.onAuthenticated) {\n options.onAuthenticated(null, authenticatedReqOpts);\n }\n else {\n activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => {\n if (apiResponseError &&\n apiResponseError.code === 401 &&\n authLibraryError) {\n // Re-use the \"Could not load the default credentials error\" if\n // the API request failed due to missing credentials.\n apiResponseError = authLibraryError;\n }\n callback(apiResponseError, ...params);\n });\n }\n };\n const prepareRequest = async () => {\n try {\n const getProjectId = async () => {\n if (config.projectId &&\n config.projectId !== service_1.DEFAULT_PROJECT_ID_TOKEN) {\n // The user provided a project ID. We don't need to check with the\n // auth client, it could be incorrect.\n return config.projectId;\n }\n if (config.projectIdRequired === false) {\n // A projectId is not required. Return the default.\n return service_1.DEFAULT_PROJECT_ID_TOKEN;\n }\n return setProjectId();\n };\n const authorizeRequest = async () => {\n if (reqConfig.customEndpoint &&\n !reqConfig.useAuthWithCustomEndpoint) {\n // Using a custom API override. Do not use `google-auth-library` for\n // authentication. (ex: connecting to a local Datastore server)\n return reqOpts;\n }\n else {\n return authClient.authorizeRequest(reqOpts);\n }\n };\n const [_projectId, authorizedReqOpts] = await Promise.all([\n getProjectId(),\n authorizeRequest(),\n ]);\n if (_projectId) {\n projectId = _projectId;\n }\n return onAuthenticated(null, authorizedReqOpts);\n }\n catch (e) {\n return onAuthenticated(e);\n }\n };\n prepareRequest();\n if (stream) {\n return stream;\n }\n return {\n abort() {\n setImmediate(() => {\n if (activeRequest_) {\n activeRequest_.abort();\n activeRequest_ = null;\n }\n });\n },\n };\n }\n const mar = makeAuthenticatedRequest;\n mar.getCredentials = authClient.getCredentials.bind(authClient);\n mar.authClient = authClient;\n return mar;\n }\n /**\n * Make a request through the `retryRequest` module with built-in error\n * handling and exponential back off.\n *\n * @param {object} reqOpts - Request options in the format `request` expects.\n * @param {object=} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {number=} config.maxRetries - Maximum number of automatic retries\n * attempted before returning the error. (default: 3)\n * @param {object=} config.request - HTTP module for request calls.\n * @param {function} callback - The callback function.\n */\n makeRequest(reqOpts, config, callback) {\n var _a, _b, _c, _d, _e, _f, _g;\n let autoRetryValue = AUTO_RETRY_DEFAULT;\n if (config.autoRetry !== undefined &&\n ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) {\n throw new ApiError('autoRetry is deprecated. Use retryOptions.autoRetry instead.');\n }\n else if (config.autoRetry !== undefined) {\n autoRetryValue = config.autoRetry;\n }\n else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry) !== undefined) {\n autoRetryValue = config.retryOptions.autoRetry;\n }\n let maxRetryValue = MAX_RETRY_DEFAULT;\n if (config.maxRetries && ((_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries)) {\n throw new ApiError('maxRetries is deprecated. Use retryOptions.maxRetries instead.');\n }\n else if (config.maxRetries) {\n maxRetryValue = config.maxRetries;\n }\n else if ((_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries) {\n maxRetryValue = config.retryOptions.maxRetries;\n }\n const options = {\n request: teeny_request_1.teenyRequest.defaults(requestDefaults),\n retries: autoRetryValue !== false ? maxRetryValue : 0,\n noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0,\n shouldRetryFn(httpRespMessage) {\n var _a, _b;\n const err = util.parseHttpRespMessage(httpRespMessage).err;\n if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) {\n return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err));\n }\n return err && util.shouldRetryRequest(err);\n },\n maxRetryDelay: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.maxRetryDelay,\n retryDelayMultiplier: (_f = config.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier,\n totalTimeout: (_g = config.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout,\n };\n if (typeof reqOpts.maxRetries === 'number') {\n options.retries = reqOpts.maxRetries;\n }\n if (!config.stream) {\n return retryRequest(reqOpts, options, (err, response, body) => {\n util.handleResp(err, response, body, callback);\n });\n }\n const dup = config.stream;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let requestStream;\n const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET';\n if (isGetRequest) {\n requestStream = retryRequest(reqOpts, options);\n dup.setReadable(requestStream);\n }\n else {\n // Streaming writable HTTP requests cannot be retried.\n requestStream = options.request(reqOpts);\n dup.setWritable(requestStream);\n }\n // Replay the Request events back to the stream.\n requestStream\n .on('error', dup.destroy.bind(dup))\n .on('response', dup.emit.bind(dup, 'response'))\n .on('complete', dup.emit.bind(dup, 'complete'));\n dup.abort = requestStream.abort;\n return dup;\n }\n /**\n * Decorate the options about to be made in a request.\n *\n * @param {object} reqOpts - The options to be passed to `request`.\n * @param {string} projectId - The project ID.\n * @return {object} reqOpts - The decorated reqOpts.\n */\n decorateRequest(reqOpts, projectId) {\n delete reqOpts.autoPaginate;\n delete reqOpts.autoPaginateVal;\n delete reqOpts.objectMode;\n if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') {\n delete reqOpts.qs.autoPaginate;\n delete reqOpts.qs.autoPaginateVal;\n reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId);\n }\n if (Array.isArray(reqOpts.multipart)) {\n reqOpts.multipart = reqOpts.multipart.map(part => {\n return (0, projectify_1.replaceProjectIdToken)(part, projectId);\n });\n }\n if (reqOpts.json !== null && typeof reqOpts.json === 'object') {\n delete reqOpts.json.autoPaginate;\n delete reqOpts.json.autoPaginateVal;\n reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId);\n }\n reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId);\n return reqOpts;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n isCustomType(unknown, module) {\n function getConstructorName(obj) {\n return obj.constructor && obj.constructor.name.toLowerCase();\n }\n const moduleNameParts = module.split('/');\n const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase();\n const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase();\n if (subModuleName && getConstructorName(unknown) !== subModuleName) {\n return false;\n }\n let walkingModule = unknown;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n if (getConstructorName(walkingModule) === parentModuleName) {\n return true;\n }\n walkingModule = walkingModule.parent;\n if (!walkingModule) {\n return false;\n }\n }\n }\n /**\n * Create a properly-formatted User-Agent string from a package.json file.\n *\n * @param {object} packageJson - A module's package.json file.\n * @return {string} userAgent - The formatted User-Agent string.\n */\n getUserAgentFromPackageJson(packageJson) {\n const hyphenatedPackageName = packageJson.name\n .replace('@google-cloud', 'gcloud-node') // For legacy purposes.\n .replace('/', '-'); // For UA spec-compliance purposes.\n return hyphenatedPackageName + '/' + packageJson.version;\n }\n /**\n * Given two parameters, figure out if this is either:\n * - Just a callback function\n * - An options object, and then a callback function\n * @param optionsOrCallback An options object or callback.\n * @param cb A potentially undefined callback.\n */\n maybeOptionsOrCallback(optionsOrCallback, cb) {\n return typeof optionsOrCallback === 'function'\n ? [{}, optionsOrCallback]\n : [optionsOrCallback, cb];\n }\n}\nexports.Util = Util;\n/**\n * Basic Passthrough Stream that records the number of bytes read\n * every time the cursor is moved.\n */\nclass ProgressStream extends stream_1.Transform {\n constructor() {\n super(...arguments);\n this.bytesRead = 0;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _transform(chunk, encoding, callback) {\n this.bytesRead += chunk.length;\n this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' });\n this.push(chunk);\n callback();\n }\n}\nconst util = new Util();\nexports.util = util;\n//# sourceMappingURL=util.js.map","\"use strict\";\n/*!\n * Copyright 2015 Google Inc. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ResourceStream = exports.paginator = exports.Paginator = void 0;\n/*!\n * @module common/paginator\n */\nconst arrify = require(\"arrify\");\nconst extend = require(\"extend\");\nconst resource_stream_1 = require(\"./resource-stream\");\nObject.defineProperty(exports, \"ResourceStream\", { enumerable: true, get: function () { return resource_stream_1.ResourceStream; } });\n/*! Developer Documentation\n *\n * paginator is used to auto-paginate `nextQuery` methods as well as\n * streamifying them.\n *\n * Before:\n *\n * search.query('done=true', function(err, results, nextQuery) {\n * search.query(nextQuery, function(err, results, nextQuery) {});\n * });\n *\n * After:\n *\n * search.query('done=true', function(err, results) {});\n *\n * Methods to extend should be written to accept callbacks and return a\n * `nextQuery`.\n */\nclass Paginator {\n /**\n * Cache the original method, then overwrite it on the Class's prototype.\n *\n * @param {function} Class - The parent class of the methods to extend.\n * @param {string|string[]} methodNames - Name(s) of the methods to extend.\n */\n // tslint:disable-next-line:variable-name\n extend(Class, methodNames) {\n methodNames = arrify(methodNames);\n methodNames.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n // map the original method to a private member\n Class.prototype[methodName + '_'] = originalMethod;\n // overwrite the original to auto-paginate\n /* eslint-disable @typescript-eslint/no-explicit-any */\n Class.prototype[methodName] = function (...args) {\n const parsedArguments = paginator.parseArguments_(args);\n return paginator.run_(parsedArguments, originalMethod.bind(this));\n };\n });\n }\n /**\n * Wraps paginated API calls in a readable object stream.\n *\n * This method simply calls the nextQuery recursively, emitting results to a\n * stream. The stream ends when `nextQuery` is null.\n *\n * `maxResults` will act as a cap for how many results are fetched and emitted\n * to the stream.\n *\n * @param {string} methodName - Name of the method to streamify.\n * @return {function} - Wrapped function.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n streamify(methodName) {\n return function (\n /* eslint-disable @typescript-eslint/no-explicit-any */\n ...args) {\n const parsedArguments = paginator.parseArguments_(args);\n const originalMethod = this[methodName + '_'] || this[methodName];\n return paginator.runAsStream_(parsedArguments, originalMethod.bind(this));\n };\n }\n /**\n * Parse a pseudo-array `arguments` for a query and callback.\n *\n * @param {array} args - The original `arguments` pseduo-array that the original\n * method received.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n parseArguments_(args) {\n let query;\n let autoPaginate = true;\n let maxApiCalls = -1;\n let maxResults = -1;\n let callback;\n const firstArgument = args[0];\n const lastArgument = args[args.length - 1];\n if (typeof firstArgument === 'function') {\n callback = firstArgument;\n }\n else {\n query = firstArgument;\n }\n if (typeof lastArgument === 'function') {\n callback = lastArgument;\n }\n if (typeof query === 'object') {\n query = extend(true, {}, query);\n // Check if the user only asked for a certain amount of results.\n if (query.maxResults && typeof query.maxResults === 'number') {\n // `maxResults` is used API-wide.\n maxResults = query.maxResults;\n }\n else if (typeof query.pageSize === 'number') {\n // `pageSize` is Pub/Sub's `maxResults`.\n maxResults = query.pageSize;\n }\n if (query.maxApiCalls && typeof query.maxApiCalls === 'number') {\n maxApiCalls = query.maxApiCalls;\n delete query.maxApiCalls;\n }\n // maxResults is the user specified limit.\n if (maxResults !== -1 || query.autoPaginate === false) {\n autoPaginate = false;\n }\n }\n const parsedArguments = {\n query: query || {},\n autoPaginate,\n maxApiCalls,\n maxResults,\n callback,\n };\n parsedArguments.streamOptions = extend(true, {}, parsedArguments.query);\n delete parsedArguments.streamOptions.autoPaginate;\n delete parsedArguments.streamOptions.maxResults;\n delete parsedArguments.streamOptions.pageSize;\n return parsedArguments;\n }\n /**\n * This simply checks to see if `autoPaginate` is set or not, if it's true\n * then we buffer all results, otherwise simply call the original method.\n *\n * @param {array} parsedArguments - Parsed arguments from the original method\n * call.\n * @param {object=|string=} parsedArguments.query - Query object. This is most\n * commonly an object, but to make the API more simple, it can also be a\n * string in some places.\n * @param {function=} parsedArguments.callback - Callback function.\n * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled.\n * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make.\n * @param {number} parsedArguments.maxResults - Maximum results to return.\n * @param {function} originalMethod - The cached method that accepts a callback\n * and returns `nextQuery` to receive more results.\n */\n run_(parsedArguments, originalMethod) {\n const query = parsedArguments.query;\n const callback = parsedArguments.callback;\n if (!parsedArguments.autoPaginate) {\n return originalMethod(query, callback);\n }\n const results = new Array();\n const promise = new Promise((resolve, reject) => {\n paginator\n .runAsStream_(parsedArguments, originalMethod)\n .on('error', reject)\n .on('data', (data) => results.push(data))\n .on('end', () => resolve(results));\n });\n if (!callback) {\n return promise.then(results => [results]);\n }\n promise.then(results => callback(null, results), (err) => callback(err));\n }\n /**\n * This method simply calls the nextQuery recursively, emitting results to a\n * stream. The stream ends when `nextQuery` is null.\n *\n * `maxResults` will act as a cap for how many results are fetched and emitted\n * to the stream.\n *\n * @param {object=|string=} parsedArguments.query - Query object. This is most\n * commonly an object, but to make the API more simple, it can also be a\n * string in some places.\n * @param {function=} parsedArguments.callback - Callback function.\n * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled.\n * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make.\n * @param {number} parsedArguments.maxResults - Maximum results to return.\n * @param {function} originalMethod - The cached method that accepts a callback\n * and returns `nextQuery` to receive more results.\n * @return {stream} - Readable object stream.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n runAsStream_(parsedArguments, originalMethod) {\n return new resource_stream_1.ResourceStream(parsedArguments, originalMethod);\n }\n}\nexports.Paginator = Paginator;\nconst paginator = new Paginator();\nexports.paginator = paginator;\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*!\n * Copyright 2019 Google Inc. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ResourceStream = void 0;\nconst stream_1 = require(\"stream\");\nclass ResourceStream extends stream_1.Transform {\n constructor(args, requestFn) {\n const options = Object.assign({ objectMode: true }, args.streamOptions);\n super(options);\n this._ended = false;\n this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls;\n this._nextQuery = args.query;\n this._reading = false;\n this._requestFn = requestFn;\n this._requestsMade = 0;\n this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults;\n }\n /* eslint-disable @typescript-eslint/no-explicit-any */\n end(...args) {\n this._ended = true;\n return super.end(...args);\n }\n _read() {\n if (this._reading) {\n return;\n }\n this._reading = true;\n // Wrap in a try/catch to catch input linting errors, e.g.\n // an invalid BigQuery query. These errors are thrown in an\n // async fashion, which makes them un-catchable by the user.\n try {\n this._requestFn(this._nextQuery, (err, results, nextQuery) => {\n if (err) {\n this.destroy(err);\n return;\n }\n this._nextQuery = nextQuery;\n if (this._resultsToSend !== Infinity) {\n results = results.splice(0, this._resultsToSend);\n this._resultsToSend -= results.length;\n }\n let more = true;\n for (const result of results) {\n if (this._ended) {\n break;\n }\n more = this.push(result);\n }\n const isFinished = !this._nextQuery || this._resultsToSend < 1;\n const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls;\n if (isFinished || madeMaxCalls) {\n this.end();\n }\n if (more && !this._ended) {\n setImmediate(() => this._read());\n }\n this._reading = false;\n });\n }\n catch (e) {\n this.destroy(e);\n }\n }\n}\nexports.ResourceStream = ResourceStream;\n//# sourceMappingURL=resource-stream.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MissingProjectIdError = exports.replaceProjectIdToken = void 0;\nconst stream_1 = require(\"stream\");\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/**\n * Populate the `{{projectId}}` placeholder.\n *\n * @throws {Error} If a projectId is required, but one is not provided.\n *\n * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped.\n * @param {string} projectId - A projectId. If not provided\n * @return {*} - The original argument with all placeholders populated.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction replaceProjectIdToken(value, projectId) {\n if (Array.isArray(value)) {\n value = value.map(v => replaceProjectIdToken(v, projectId));\n }\n if (value !== null &&\n typeof value === 'object' &&\n !(value instanceof Buffer) &&\n !(value instanceof stream_1.Stream) &&\n typeof value.hasOwnProperty === 'function') {\n for (const opt in value) {\n // eslint-disable-next-line no-prototype-builtins\n if (value.hasOwnProperty(opt)) {\n value[opt] = replaceProjectIdToken(value[opt], projectId);\n }\n }\n }\n if (typeof value === 'string' &&\n value.indexOf('{{projectId}}') > -1) {\n if (!projectId || projectId === '{{projectId}}') {\n throw new MissingProjectIdError();\n }\n value = value.replace(/{{projectId}}/g, projectId);\n }\n return value;\n}\nexports.replaceProjectIdToken = replaceProjectIdToken;\n/**\n * Custom error type for missing project ID errors.\n */\nclass MissingProjectIdError extends Error {\n constructor() {\n super(...arguments);\n this.message = `Sorry, we cannot connect to Cloud Services without a project\n ID. You may specify one with an environment variable named\n \"GOOGLE_CLOUD_PROJECT\".`.replace(/ +/g, ' ');\n }\n}\nexports.MissingProjectIdError = MissingProjectIdError;\n//# sourceMappingURL=index.js.map","\"use strict\";\n/* eslint-disable prefer-rest-params */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0;\n/**\n * Wraps a callback style function to conditionally return a promise.\n *\n * @param {function} originalMethod - The method to promisify.\n * @param {object=} options - Promise options.\n * @param {boolean} options.singular - Resolve the promise with single arg instead of an array.\n * @return {function} wrapped\n */\nfunction promisify(originalMethod, options) {\n if (originalMethod.promisified_) {\n return originalMethod;\n }\n options = options || {};\n const slice = Array.prototype.slice;\n // tslint:disable-next-line:no-any\n const wrapper = function () {\n let last;\n for (last = arguments.length - 1; last >= 0; last--) {\n const arg = arguments[last];\n if (typeof arg === 'undefined') {\n continue; // skip trailing undefined.\n }\n if (typeof arg !== 'function') {\n break; // non-callback last argument found.\n }\n return originalMethod.apply(this, arguments);\n }\n // peel trailing undefined.\n const args = slice.call(arguments, 0, last + 1);\n // tslint:disable-next-line:variable-name\n let PromiseCtor = Promise;\n // Because dedupe will likely create a single install of\n // @google-cloud/common to be shared amongst all modules, we need to\n // localize it at the Service level.\n if (this && this.Promise) {\n PromiseCtor = this.Promise;\n }\n return new PromiseCtor((resolve, reject) => {\n // tslint:disable-next-line:no-any\n args.push((...args) => {\n const callbackArgs = slice.call(args);\n const err = callbackArgs.shift();\n if (err) {\n return reject(err);\n }\n if (options.singular && callbackArgs.length === 1) {\n resolve(callbackArgs[0]);\n }\n else {\n resolve(callbackArgs);\n }\n });\n originalMethod.apply(this, args);\n });\n };\n wrapper.promisified_ = true;\n return wrapper;\n}\nexports.promisify = promisify;\n/**\n * Promisifies certain Class methods. This will not promisify private or\n * streaming methods.\n *\n * @param {module:common/service} Class - Service class.\n * @param {object=} options - Configuration object.\n */\n// tslint:disable-next-line:variable-name\nfunction promisifyAll(Class, options) {\n const exclude = (options && options.exclude) || [];\n const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype);\n const methods = ownPropertyNames.filter(methodName => {\n // clang-format off\n return (!exclude.includes(methodName) &&\n typeof Class.prototype[methodName] === 'function' && // is it a function?\n !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable?\n );\n // clang-format on\n });\n methods.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n if (!originalMethod.promisified_) {\n Class.prototype[methodName] = exports.promisify(originalMethod, options);\n }\n });\n}\nexports.promisifyAll = promisifyAll;\n/**\n * Wraps a promisy type function to conditionally call a callback function.\n *\n * @param {function} originalMethod - The method to callbackify.\n * @param {object=} options - Callback options.\n * @param {boolean} options.singular - Pass to the callback a single arg instead of an array.\n * @return {function} wrapped\n */\nfunction callbackify(originalMethod) {\n if (originalMethod.callbackified_) {\n return originalMethod;\n }\n // tslint:disable-next-line:no-any\n const wrapper = function () {\n if (typeof arguments[arguments.length - 1] !== 'function') {\n return originalMethod.apply(this, arguments);\n }\n const cb = Array.prototype.pop.call(arguments);\n originalMethod.apply(this, arguments).then(\n // tslint:disable-next-line:no-any\n (res) => {\n res = Array.isArray(res) ? res : [res];\n cb(null, ...res);\n }, (err) => cb(err));\n };\n wrapper.callbackified_ = true;\n return wrapper;\n}\nexports.callbackify = callbackify;\n/**\n * Callbackifies certain Class methods. This will not callbackify private or\n * streaming methods.\n *\n * @param {module:common/service} Class - Service class.\n * @param {object=} options - Configuration object.\n */\nfunction callbackifyAll(\n// tslint:disable-next-line:variable-name\nClass, options) {\n const exclude = (options && options.exclude) || [];\n const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype);\n const methods = ownPropertyNames.filter(methodName => {\n // clang-format off\n return (!exclude.includes(methodName) &&\n typeof Class.prototype[methodName] === 'function' && // is it a function?\n !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable?\n );\n // clang-format on\n });\n methods.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n if (!originalMethod.callbackified_) {\n Class.prototype[methodName] = exports.callbackify(originalMethod);\n }\n });\n}\nexports.callbackifyAll = callbackifyAll;\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AclRoleAccessorMethods = exports.Acl = void 0;\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * Attach functionality to a {@link Storage.acl} instance. This will add an\n * object for each role group (owners, readers, and writers), with each object\n * containing methods to add or delete a type of entity.\n *\n * As an example, here are a few methods that are created.\n *\n * myBucket.acl.readers.deleteGroup('groupId', function(err) {});\n *\n * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {});\n *\n * myBucket.acl.writers.addDomain('example.com', function(err, acl) {});\n *\n * @private\n */\nclass AclRoleAccessorMethods {\n constructor() {\n this.owners = {};\n this.readers = {};\n this.writers = {};\n /**\n * An object of convenience methods to add or delete owner ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.owners.addAllAuthenticatedUsers`\n * - `myFile.acl.owners.deleteAllAuthenticatedUsers`\n * - `myFile.acl.owners.addAllUsers`\n * - `myFile.acl.owners.deleteAllUsers`\n * - `myFile.acl.owners.addDomain`\n * - `myFile.acl.owners.deleteDomain`\n * - `myFile.acl.owners.addGroup`\n * - `myFile.acl.owners.deleteGroup`\n * - `myFile.acl.owners.addProject`\n * - `myFile.acl.owners.deleteProject`\n * - `myFile.acl.owners.addUser`\n * - `myFile.acl.owners.deleteUser`\n *\n * @name Acl#owners\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as an owner of a file.\n * //-\n * const myBucket = gcs.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n * myFile.acl.owners.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.OWNER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.owners.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.owners = {};\n /**\n * An object of convenience methods to add or delete reader ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.readers.addAllAuthenticatedUsers`\n * - `myFile.acl.readers.deleteAllAuthenticatedUsers`\n * - `myFile.acl.readers.addAllUsers`\n * - `myFile.acl.readers.deleteAllUsers`\n * - `myFile.acl.readers.addDomain`\n * - `myFile.acl.readers.deleteDomain`\n * - `myFile.acl.readers.addGroup`\n * - `myFile.acl.readers.deleteGroup`\n * - `myFile.acl.readers.addProject`\n * - `myFile.acl.readers.deleteProject`\n * - `myFile.acl.readers.addUser`\n * - `myFile.acl.readers.deleteUser`\n *\n * @name Acl#readers\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as a reader of a file.\n * //-\n * myFile.acl.readers.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.READER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.readers.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.readers = {};\n /**\n * An object of convenience methods to add or delete writer ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.writers.addAllAuthenticatedUsers`\n * - `myFile.acl.writers.deleteAllAuthenticatedUsers`\n * - `myFile.acl.writers.addAllUsers`\n * - `myFile.acl.writers.deleteAllUsers`\n * - `myFile.acl.writers.addDomain`\n * - `myFile.acl.writers.deleteDomain`\n * - `myFile.acl.writers.addGroup`\n * - `myFile.acl.writers.deleteGroup`\n * - `myFile.acl.writers.addProject`\n * - `myFile.acl.writers.deleteProject`\n * - `myFile.acl.writers.addUser`\n * - `myFile.acl.writers.deleteUser`\n *\n * @name Acl#writers\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as a writer of a file.\n * //-\n * myFile.acl.writers.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.WRITER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.writers.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.writers = {};\n AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this));\n }\n _assignAccessMethods(role) {\n const accessMethods = AclRoleAccessorMethods.accessMethods;\n const entities = AclRoleAccessorMethods.entities;\n const roleGroup = role.toLowerCase() + 's';\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[roleGroup] = entities.reduce((acc, entity) => {\n const isPrefix = entity.charAt(entity.length - 1) === '-';\n accessMethods.forEach(accessMethod => {\n let method = accessMethod + entity[0].toUpperCase() + entity.substr(1);\n if (isPrefix) {\n method = method.replace('-', '');\n }\n // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the\n // more complex API of specifying an `entity` and `role`.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n acc[method] = (entityId, options, callback) => {\n let apiEntity;\n if (typeof options === 'function') {\n callback = options;\n options = {};\n }\n if (isPrefix) {\n apiEntity = entity + entityId;\n }\n else {\n // If the entity is not a prefix, it is a special entity group\n // that does not require further details. The accessor methods\n // only accept a callback.\n apiEntity = entity;\n callback = entityId;\n }\n options = Object.assign({\n entity: apiEntity,\n role,\n }, options);\n const args = [options];\n if (typeof callback === 'function') {\n args.push(callback);\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return this[accessMethod].apply(this, args);\n };\n });\n return acc;\n }, {});\n }\n}\nexports.AclRoleAccessorMethods = AclRoleAccessorMethods;\nAclRoleAccessorMethods.accessMethods = ['add', 'delete'];\nAclRoleAccessorMethods.entities = [\n // Special entity groups that do not require further specification.\n 'allAuthenticatedUsers',\n 'allUsers',\n // Entity groups that require specification, e.g. `user-email@example.com`.\n 'domain-',\n 'group-',\n 'project-',\n 'user-',\n];\nAclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER'];\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against an\n * object or bucket (for example, `READ` or `WRITE`); the entity defines who the\n * permission applies to (for example, a specific user or group of users).\n *\n * Where an `entity` value is accepted, we follow the format the Cloud Storage\n * API expects.\n *\n * Refer to\n * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls\n * for the most up-to-date values.\n *\n * - `user-userId`\n * - `user-email`\n * - `group-groupId`\n * - `group-email`\n * - `domain-domain`\n * - `project-team-projectId`\n * - `allUsers`\n * - `allAuthenticatedUsers`\n *\n * Examples:\n *\n * - The user \"liz@example.com\" would be `user-liz@example.com`.\n * - The group \"example@googlegroups.com\" would be\n * `group-example@googlegroups.com`.\n * - To refer to all members of the Google Apps for Business domain\n * \"example.com\", the entity would be `domain-example.com`.\n *\n * For more detailed information, see\n * {@link http://goo.gl/6qBBPO| About Access Control Lists}.\n *\n * @constructor Acl\n * @mixin\n * @param {object} options Configuration options.\n */\nclass Acl extends AclRoleAccessorMethods {\n constructor(options) {\n super();\n this.pathPrefix = options.pathPrefix;\n this.request_ = options.request;\n }\n /**\n * @typedef {array} AddAclResponse\n * @property {object} 0 The Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback AddAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} acl The Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Add access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation}\n *\n * @param {object} options Configuration options.\n * @param {string} options.entity Whose permissions will be added.\n * @param {string} options.role Permissions allowed for the defined entity.\n * See {@link https://cloud.google.com/storage/docs/access-control Access\n * Control}.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {AddAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * const options = {\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.OWNER_ROLE\n * };\n *\n * myBucket.acl.add(options, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * // Here is how you would grant ownership permissions to a user on a\n * specific\n * // revision of a file.\n * //-\n * myFile.acl.add({\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.OWNER_ROLE,\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_add_file_owner\n * Example of adding an owner to a file:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_owner\n * Example of adding an owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_default_owner\n * Example of adding a default owner to a bucket:\n */\n add(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'POST',\n uri: '',\n qs: query,\n maxRetries: 0,\n json: {\n entity: options.entity,\n role: options.role.toUpperCase(),\n },\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, this.makeAclObject_(resp), resp);\n });\n }\n /**\n * @typedef {array} RemoveAclResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback RemoveAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation}\n *\n * @param {object} options Configuration object.\n * @param {string} options.entity Whose permissions will be revoked.\n * @param {int} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {RemoveAclCallback} callback The callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * myBucket.acl.delete({\n * entity: 'user-useremail@example.com'\n * }, function(err, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.delete({\n * entity: 'user-useremail@example.com',\n * generation: 1\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_owner\n * Example of removing an owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_default_owner\n * Example of removing a default owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_file_owner\n * Example of removing an owner from a bucket:\n */\n delete(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'DELETE',\n uri: '/' + encodeURIComponent(options.entity),\n qs: query,\n }, (err, resp) => {\n callback(err, resp);\n });\n }\n /**\n * @typedef {array} GetAclResponse\n * @property {object|object[]} 0 Single or array of Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object|object[]} acl Single or array of Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get access controls on a {@link Bucket} or {@link File}. If\n * an entity is omitted, you will receive an array of all applicable access\n * controls.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation}\n *\n * @param {object|function} [options] Configuration options. If you want to\n * receive a list of all access controls, pass the callback function as\n * the only argument.\n * @param {string} [options.entity] Whose permissions will be fetched.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * myBucket.acl.get({\n * entity: 'user-useremail@example.com'\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // Get all access controls.\n * //-\n * myBucket.acl.get(function(err, aclObjects, apiResponse) {\n * // aclObjects = [\n * // {\n * // entity: 'user-useremail@example.com',\n * // role: 'owner'\n * // }\n * // ]\n * });\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.get({\n * entity: 'user-useremail@example.com',\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.get().then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_print_file_acl\n * Example of printing a file's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_file_acl_for_user\n * Example of printing a file's ACL for a specific user:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl\n * Example of printing a bucket's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl_for_user\n * Example of printing a bucket's ACL for a specific user:\n */\n get(optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;\n let path = '';\n const query = {};\n if (options) {\n path = '/' + encodeURIComponent(options.entity);\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n }\n this.request({\n uri: path,\n qs: query,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n let results;\n if (resp.items) {\n results = resp.items.map(this.makeAclObject_);\n }\n else {\n results = this.makeAclObject_(resp);\n }\n callback(null, results, resp);\n });\n }\n /**\n * @typedef {array} UpdateAclResponse\n * @property {object} 0 The updated Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback UpdateAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} acl The updated Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Update access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation}\n *\n * @param {object} options Configuration options.\n * @param {string} options.entity Whose permissions will be updated.\n * @param {string} options.role Permissions allowed for the defined entity.\n * See {@link Storage.acl}.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {UpdateAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * const options = {\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.WRITER_ROLE\n * };\n *\n * myBucket.acl.update(options, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.update({\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.WRITER_ROLE,\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.update(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n update(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'PUT',\n uri: '/' + encodeURIComponent(options.entity),\n qs: query,\n json: {\n role: options.role.toUpperCase(),\n },\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, this.makeAclObject_(resp), resp);\n });\n }\n /**\n * Transform API responses to a consistent object format.\n *\n * @private\n */\n makeAclObject_(accessControlObject) {\n const obj = {\n entity: accessControlObject.entity,\n role: accessControlObject.role,\n };\n if (accessControlObject.projectTeam) {\n obj.projectTeam = accessControlObject.projectTeam;\n }\n return obj;\n }\n /**\n * Patch requests up to the bucket's request object.\n *\n * @private\n *\n * @param {string} method Action.\n * @param {string} path Request path.\n * @param {*} query Request query object.\n * @param {*} body Request body contents.\n * @param {function} callback Callback function.\n */\n request(reqOpts, callback) {\n reqOpts.uri = this.pathPrefix + reqOpts.uri;\n this.request_(reqOpts, callback);\n }\n}\nexports.Acl = Acl;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Acl, {\n exclude: ['request'],\n});\n//# sourceMappingURL=acl.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst paginator_1 = require(\"@google-cloud/paginator\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst extend = require(\"extend\");\nconst fs = require(\"fs\");\nconst mime = require(\"mime-types\");\nconst path = require(\"path\");\nconst pLimit = require(\"p-limit\");\nconst util_1 = require(\"util\");\nconst retry = require(\"async-retry\");\nconst util_2 = require(\"./util\");\nconst acl_1 = require(\"./acl\");\nconst file_1 = require(\"./file\");\nconst iam_1 = require(\"./iam\");\nconst notification_1 = require(\"./notification\");\nconst storage_1 = require(\"./storage\");\nconst signer_1 = require(\"./signer\");\nconst stream_1 = require(\"stream\");\nconst url_1 = require(\"url\");\nvar BucketActionToHTTPMethod;\n(function (BucketActionToHTTPMethod) {\n BucketActionToHTTPMethod[\"list\"] = \"GET\";\n})(BucketActionToHTTPMethod = exports.BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = {}));\nvar AvailableServiceObjectMethods;\n(function (AvailableServiceObjectMethods) {\n AvailableServiceObjectMethods[AvailableServiceObjectMethods[\"setMetadata\"] = 0] = \"setMetadata\";\n AvailableServiceObjectMethods[AvailableServiceObjectMethods[\"delete\"] = 1] = \"delete\";\n})(AvailableServiceObjectMethods = exports.AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = {}));\nvar BucketExceptionMessages;\n(function (BucketExceptionMessages) {\n BucketExceptionMessages[\"PROVIDE_SOURCE_FILE\"] = \"You must provide at least one source file.\";\n BucketExceptionMessages[\"DESTINATION_FILE_NOT_SPECIFIED\"] = \"A destination file must be specified.\";\n BucketExceptionMessages[\"CHANNEL_ID_REQUIRED\"] = \"An ID is required to create a channel.\";\n BucketExceptionMessages[\"CHANNEL_ADDRESS_REQUIRED\"] = \"An address is required to create a channel.\";\n BucketExceptionMessages[\"TOPIC_NAME_REQUIRED\"] = \"A valid topic name is required.\";\n BucketExceptionMessages[\"CONFIGURATION_OBJECT_PREFIX_REQUIRED\"] = \"A configuration object with a prefix is required.\";\n BucketExceptionMessages[\"SPECIFY_FILE_NAME\"] = \"A file name must be specified.\";\n BucketExceptionMessages[\"METAGENERATION_NOT_PROVIDED\"] = \"A metageneration must be provided.\";\n BucketExceptionMessages[\"SUPPLY_NOTIFICATION_ID\"] = \"You must supply a notification ID.\";\n})(BucketExceptionMessages = exports.BucketExceptionMessages || (exports.BucketExceptionMessages = {}));\n/**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n/**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n/**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n/**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n/**\n * A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n *\n * @name Bucket#crc32cGenerator\n * @type {CRC32CValidator}\n */\n/**\n * Get and set IAM policies for your bucket.\n *\n * @name Bucket#iam\n * @mixes Iam\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management}\n * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Get the IAM policy for your bucket.\n * //-\n * bucket.iam.getPolicy(function(err, policy) {\n * console.log(policy);\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.getPolicy().then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_view_bucket_iam_members\n * Example of retrieving a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_add_bucket_iam_member\n * Example of adding to a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_remove_bucket_iam_member\n * Example of removing from a bucket's IAM policy:\n */\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against\n * an object or bucket (for example, `READ` or `WRITE`); the entity defines\n * who the permission applies to (for example, a specific user or group of\n * users).\n *\n * The `acl` object on a Bucket instance provides methods to get you a list of\n * the ACLs defined on your bucket, as well as set, update, and delete them.\n *\n * Buckets also have\n * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs}\n * for all created files. Default ACLs specify permissions that all new\n * objects added to the bucket will inherit by default. You can add, delete,\n * get, and update entities and permissions for these as well with\n * {@link Bucket#acl.default}.\n *\n * See {@link http://goo.gl/6qBBPO| About Access Control Lists}\n * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs}\n *\n * @name Bucket#acl\n * @mixes Acl\n * @property {Acl} default Cloud Storage Buckets have\n * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs}\n * for all created files. You can add, delete, get, and update entities and\n * permissions for these as well. The method signatures and examples are all\n * the same, after only prefixing the method call with `default`.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Make a bucket's contents publicly readable.\n * //-\n * const myBucket = storage.bucket('my-bucket');\n *\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * myBucket.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl\n * Example of printing a bucket's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl_for_user\n * Example of printing a bucket's ACL for a specific user:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_owner\n * Example of adding an owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_owner\n * Example of removing an owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_default_owner\n * Example of adding a default owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_default_owner\n * Example of removing a default owner from a bucket:\n */\n/**\n * The API-formatted resource description of the bucket.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name Bucket#metadata\n * @type {object}\n */\n/**\n * The bucket's name.\n * @name Bucket#name\n * @type {string}\n */\n/**\n * Get {@link File} objects for the files currently in the bucket as a\n * readable object stream.\n *\n * @method Bucket#getFilesStream\n * @param {GetFilesOptions} [query] Query object for listing files.\n * @returns {ReadableStream} A readable stream that emits {@link File} instances.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getFilesStream()\n * .on('error', console.error)\n * .on('data', function(file) {\n * // file is a File object.\n * })\n * .on('end', function() {\n * // All files retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * bucket.getFilesStream()\n * .on('data', function(file) {\n * this.end();\n * });\n *\n * //-\n * // If you're filtering files with a delimiter, you should use\n * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to\n * // preserve the `apiResponse` argument.\n * //-\n * const prefixes = [];\n *\n * function callback(err, files, nextQuery, apiResponse) {\n * prefixes = prefixes.concat(apiResponse.prefixes);\n *\n * if (nextQuery) {\n * bucket.getFiles(nextQuery, callback);\n * } else {\n * // prefixes = The finished array of prefixes.\n * }\n * }\n *\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/'\n * }, callback);\n * ```\n */\n/**\n * Create a Bucket object to interact with a Cloud Storage bucket.\n *\n * @class\n * @hideconstructor\n *\n * @param {Storage} storage A {@link Storage} instance.\n * @param {string} name The name of the bucket.\n * @param {object} [options] Configuration object.\n * @param {string} [options.userProject] User project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * ```\n */\nclass Bucket extends nodejs_common_1.ServiceObject {\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n getFilesStream(query) {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n constructor(storage, name, options) {\n var _a, _b, _c, _d;\n options = options || {};\n // Allow for \"gs://\"-style input, and strip any trailing slashes.\n name = name.replace(/^gs:\\/\\//, '').replace(/\\/+$/, '');\n const requestQueryObject = {};\n if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) {\n requestQueryObject.ifGenerationMatch =\n options.preconditionOpts.ifGenerationMatch;\n }\n if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) {\n requestQueryObject.ifGenerationNotMatch =\n options.preconditionOpts.ifGenerationNotMatch;\n }\n if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) {\n requestQueryObject.ifMetagenerationMatch =\n options.preconditionOpts.ifMetagenerationMatch;\n }\n if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) {\n requestQueryObject.ifMetagenerationNotMatch =\n options.preconditionOpts.ifMetagenerationNotMatch;\n }\n const userProject = options.userProject;\n if (typeof userProject === 'string') {\n requestQueryObject.userProject = userProject;\n }\n const methods = {\n /**\n * Create a bucket.\n *\n * @method Bucket#create\n * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket.\n * @param {CreateBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * bucket.create(function(err, bucket, apiResponse) {\n * if (!err) {\n * // The bucket was created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.create().then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n create: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * IamDeleteBucketOptions Configuration options.\n * @property {boolean} [ignoreNotFound = false] Ignore an error if\n * the bucket does not exist.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} DeleteBucketResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation}\n *\n * @method Bucket#delete\n * @param {DeleteBucketOptions} [options] Configuration options.\n * @param {boolean} [options.ignoreNotFound = false] Ignore an error if\n * the bucket does not exist.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * bucket.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_delete_bucket\n * Another example:\n */\n delete: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} BucketExistsResponse\n * @property {boolean} 0 Whether the {@link Bucket} exists.\n */\n /**\n * @callback BucketExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the {@link Bucket} exists.\n */\n /**\n * Check if the bucket exists.\n *\n * @method Bucket#exists\n * @param {BucketExistsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {BucketExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get()\n * @property {boolean} [autoCreate] Automatically create the object if\n * it does not exist. Default: `false`\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} GetBucketResponse\n * @property {Bucket} 0 The {@link Bucket}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket} bucket The {@link Bucket}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get a bucket if it exists.\n *\n * You may optionally use this to \"get or create\" an object by providing\n * an object with `autoCreate` set to `true`. Any extra configuration that\n * is normally required for the `create` method must be contained within\n * this object as well.\n *\n * @method Bucket#get\n * @param {GetBucketOptions} [options] Configuration options.\n * @param {boolean} [options.autoCreate] Automatically create the object if\n * it does not exist. Default: `false`\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.get(function(err, bucket, apiResponse) {\n * // `bucket.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.get().then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetBucketMetadataResponse\n * @property {object} 0 The bucket metadata.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Get the bucket's metadata.\n *\n * To set metadata, see {@link Bucket#setMetadata}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation}\n *\n * @method Bucket#getMetadata\n * @param {GetBucketMetadataOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_get_requester_pays_status\n * Example of retrieving the requester pays status of a bucket:\n */\n getMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} SetBucketMetadataResponse\n * @property {object} apiResponse The full API response.\n */\n /**\n * @callback SetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n */\n /**\n * Set the bucket's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @method Bucket#setMetadata\n * @param {object} metadata The metadata you wish to set.\n * @param {SetBucketMetadataOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Set website metadata field on the bucket.\n * //-\n * const metadata = {\n * website: {\n * mainPageSuffix: 'http://example.com',\n * notFoundPage: 'http://example.com/404.html'\n * }\n * };\n *\n * bucket.setMetadata(metadata, function(err, apiResponse) {});\n *\n * //-\n * // Enable versioning for your bucket.\n * //-\n * bucket.setMetadata({\n * versioning: {\n * enabled: true\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Enable KMS encryption for objects within this bucket.\n * //-\n * bucket.setMetadata({\n * encryption: {\n * defaultKmsKeyName: 'projects/grape-spaceship-123/...'\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Set the default event-based hold value for new objects in this\n * // bucket.\n * //-\n * bucket.setMetadata({\n * defaultEventBasedHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Remove object lifecycle rules.\n * //-\n * bucket.setMetadata({\n * lifecycle: null\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setMetadata(metadata).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n };\n super({\n parent: storage,\n baseUrl: '/b',\n id: name,\n createMethod: storage.createBucket.bind(storage),\n methods,\n });\n this.name = name;\n this.storage = storage;\n this.userProject = options.userProject;\n this.acl = new acl_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/acl',\n });\n this.acl.default = new acl_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/defaultObjectAcl',\n });\n this.crc32cGenerator =\n options.crc32cGenerator || this.storage.crc32cGenerator;\n this.iam = new iam_1.Iam(this);\n this.getFilesStream = paginator_1.paginator.streamify('getFiles');\n this.instanceRetryValue = storage.retryOptions.autoRetry;\n this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts;\n }\n /**\n * The bucket's Cloud Storage URI (`gs://`)\n *\n * @example\n * ```ts\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * // `gs://my-bucket`\n * const href = bucket.cloudStorageURI.href;\n * ```\n */\n get cloudStorageURI() {\n const uri = new url_1.URL('gs://');\n uri.host = this.name;\n return uri;\n }\n /**\n * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule().\n * @property {boolean} [append=true] The new rules will be appended to any\n * pre-existing rules.\n */\n /**\n *\n * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects\n * in this bucket.\n * @property {string|object} action The action to be taken upon matching of\n * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'.\n * **Note**: For configuring a raw-formatted rule object to be passed as `action`\n * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}.\n * @property {object} condition Condition a bucket must meet before the\n * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}.\n * @property {string} [storageClass] When using the `setStorageClass`\n * action, provide this option to dictate which storage class the object\n * should update to. Please see\n * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions.\n */\n /**\n * Add an object lifecycle management rule to the bucket.\n *\n * By default, an Object Lifecycle Management rule provided to this method\n * will be included to the existing policy. To replace all existing rules,\n * supply the `options` argument, setting `append` to `false`.\n *\n * To add multiple rules, pass a list to the `rule` parameter. Calling this\n * function multiple times asynchronously does not guarantee that all rules\n * are added correctly.\n *\n * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects\n * in this bucket.\n * @param {string|object} rule.action The action to be taken upon matching of\n * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'.\n * **Note**: For configuring a raw-formatted rule object to be passed as `action`\n * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}.\n * @param {object} rule.condition Condition a bucket must meet before the\n * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}.\n * @param {string} [rule.storageClass] When using the `setStorageClass`\n * action, provide this option to dictate which storage class the object\n * should update to.\n * @param {AddLifecycleRuleOptions} [options] Configuration object.\n * @param {boolean} [options.append=true] Append the new rule to the existing\n * policy.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Automatically have an object deleted from this bucket once it is 3 years\n * // of age.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * age: 365 * 3 // Specified in days.\n * }\n * }, function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * const lifecycleRules = bucket.metadata.lifecycle.rule;\n *\n * // Iterate over the Object Lifecycle Management rules on this bucket.\n * lifecycleRules.forEach(lifecycleRule => {});\n * });\n *\n * //-\n * // By default, the rule you provide will be added to the existing policy.\n * // Optionally, you can disable this behavior to replace all of the\n * // pre-existing rules.\n * //-\n * const options = {\n * append: false\n * };\n *\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * age: 365 * 3 // Specified in days.\n * }\n * }, options, function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // All rules have been replaced with the new \"delete\" rule.\n *\n * // Iterate over the Object Lifecycle Management rules on this bucket.\n * lifecycleRules.forEach(lifecycleRule => {});\n * });\n *\n * //-\n * // For objects created before 2018, \"downgrade\" the storage class.\n * //-\n * bucket.addLifecycleRule({\n * action: 'setStorageClass',\n * storageClass: 'COLDLINE',\n * condition: {\n * createdBefore: new Date('2018')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete objects created before 2016 which have the Coldline storage\n * // class.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * matchesStorageClass: [\n * 'COLDLINE'\n * ],\n * createdBefore: new Date('2016')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a noncurrent timestamp that is at least 100 days.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * daysSinceNoncurrentTime: 100\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a noncurrent timestamp before 2020-01-01.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * noncurrentTimeBefore: new Date('2020-01-01')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a customTime that is at least 100 days.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * daysSinceCustomTime: 100\n * }\n * }, function(err, apiResponse) ());\n *\n * //-\n * // Delete object that has a customTime before 2020-01-01.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * customTimeBefore: new Date('2020-01-01')\n * }\n * }, function(err, apiResponse) {});\n * ```\n */\n addLifecycleRule(rule, optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n options = options || {};\n const rules = Array.isArray(rule) ? rule : [rule];\n const newLifecycleRules = rules.map(rule => {\n if (typeof rule.action === 'object') {\n // This is a raw-formatted rule object, the way the API expects.\n // Just pass it through as-is.\n return rule;\n }\n const apiFormattedRule = {};\n apiFormattedRule.condition = {};\n apiFormattedRule.action = {\n type: rule.action.charAt(0).toUpperCase() + rule.action.slice(1),\n };\n if (rule.storageClass) {\n apiFormattedRule.action.storageClass = rule.storageClass;\n }\n for (const condition in rule.condition) {\n if (rule.condition[condition] instanceof Date) {\n apiFormattedRule.condition[condition] = rule.condition[condition]\n .toISOString()\n .replace(/T.+$/, '');\n }\n else {\n apiFormattedRule.condition[condition] = rule.condition[condition];\n }\n }\n return apiFormattedRule;\n });\n if (options.append === false) {\n this.setMetadata({ lifecycle: { rule: newLifecycleRules } }, options, callback);\n return;\n }\n // The default behavior appends the previously-defined lifecycle rules with\n // the new ones just passed in by the user.\n this.getMetadata((err, metadata) => {\n if (err) {\n callback(err);\n return;\n }\n const currentLifecycleRules = Array.isArray(metadata.lifecycle && metadata.lifecycle.rule)\n ? metadata.lifecycle && metadata.lifecycle.rule\n : [];\n this.setMetadata({\n lifecycle: {\n rule: currentLifecycleRules.concat(newLifecycleRules),\n },\n }, options, callback);\n });\n }\n /**\n * @typedef {object} CombineOptions\n * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of\n * the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback CombineCallback\n * @param {?Error} err Request error, if any.\n * @param {File} newFile The new {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} CombineResponse\n * @property {File} 0 The new {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * Combine multiple files into one new file.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation}\n *\n * @throws {Error} if a non-array is provided as sources argument.\n * @throws {Error} if no sources are provided.\n * @throws {Error} if no destination is provided.\n *\n * @param {string[]|File[]} sources The source files that will be\n * combined.\n * @param {string|File} destination The file you would like the\n * source files combined into.\n * @param {CombineOptions} [options] Configuration options.\n * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of\n * the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n \n * @param {CombineCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const logBucket = storage.bucket('log-bucket');\n *\n * const sources = [\n * logBucket.file('2013-logs.txt'),\n * logBucket.file('2014-logs.txt')\n * ];\n *\n * const allLogs = logBucket.file('all-logs.txt');\n *\n * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) {\n * // newFile === allLogs\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * logBucket.combine(sources, allLogs).then(function(data) {\n * const newFile = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n combine(sources, destination, optionsOrCallback, callback) {\n var _a;\n if (!Array.isArray(sources) || sources.length === 0) {\n throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE);\n }\n if (!destination) {\n throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED);\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required\n AvailableServiceObjectMethods.setMetadata, // Same as above\n options);\n const convertToFile = (file) => {\n if (file instanceof file_1.File) {\n return file;\n }\n return this.file(file);\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n sources = sources.map(convertToFile);\n const destinationFile = convertToFile(destination);\n callback = callback || nodejs_common_1.util.noop;\n if (!destinationFile.metadata.contentType) {\n const destinationContentType = mime.contentType(destinationFile.name);\n if (destinationContentType) {\n destinationFile.metadata.contentType = destinationContentType;\n }\n }\n let maxRetries = this.storage.retryOptions.maxRetries;\n if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) ===\n undefined &&\n options.ifGenerationMatch === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n maxRetries = 0;\n }\n if (options.ifGenerationMatch === undefined) {\n Object.assign(options, destinationFile.instancePreconditionOpts, options);\n }\n // Make the request from the destination File object.\n destinationFile.request({\n method: 'POST',\n uri: '/compose',\n maxRetries,\n json: {\n destination: {\n contentType: destinationFile.metadata.contentType,\n },\n sourceObjects: sources.map(source => {\n const sourceObject = {\n name: source.name,\n };\n if (source.metadata && source.metadata.generation) {\n sourceObject.generation = source.metadata.generation;\n }\n return sourceObject;\n }),\n },\n qs: options,\n }, (err, resp) => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, destinationFile, resp);\n });\n }\n /**\n * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}.\n *\n * @typedef {object} CreateChannelConfig\n * @property {string} address The address where notifications are\n * delivered for this channel.\n * @property {string} [delimiter] Returns results in a directory-like mode.\n * @property {number} [maxResults] Maximum number of `items` plus `prefixes`\n * to return in a single page of responses.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [prefix] Filter results to objects whose names begin\n * with this prefix.\n * @property {string} [projection=noAcl] Set of properties to return.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {boolean} [versions=false] If `true`, lists all versions of an object\n * as distinct results.\n */\n /**\n * @typedef {object} CreateChannelOptions\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} CreateChannelResponse\n * @property {Channel} 0 The new {@link Channel}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CreateChannelCallback\n * @param {?Error} err Request error, if any.\n * @param {Channel} channel The new {@link Channel}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Create a channel that will be notified when objects in this bucket changes.\n *\n * @throws {Error} If an ID is not provided.\n * @throws {Error} If an address is not provided.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation}\n *\n * @param {string} id The ID of the channel to create.\n * @param {CreateChannelConfig} config Configuration for creating channel.\n * @param {string} config.address The address where notifications are\n * delivered for this channel.\n * @param {string} [config.delimiter] Returns results in a directory-like mode.\n * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes`\n * to return in a single page of responses.\n * @param {string} [config.pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @param {string} [config.prefix] Filter results to objects whose names begin\n * with this prefix.\n * @param {string} [config.projection=noAcl] Set of properties to return.\n * @param {string} [config.userProject] The ID of the project which will be\n * billed for the request.\n * @param {boolean} [config.versions=false] If `true`, lists all versions of an object\n * as distinct results.\n * @param {CreateChannelOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {CreateChannelCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const id = 'new-channel-id';\n *\n * const config = {\n * address: 'https://...'\n * };\n *\n * bucket.createChannel(id, config, function(err, channel, apiResponse) {\n * if (!err) {\n * // Channel created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.createChannel(id, config).then(function(data) {\n * const channel = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n createChannel(id, config, optionsOrCallback, callback) {\n if (typeof id !== 'string') {\n throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED);\n }\n if (typeof config.address !== 'string') {\n throw new Error(BucketExceptionMessages.CHANNEL_ADDRESS_REQUIRED);\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.request({\n method: 'POST',\n uri: '/o/watch',\n json: Object.assign({\n id,\n type: 'web_hook',\n }, config),\n qs: options,\n }, (err, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n const resourceId = apiResponse.resourceId;\n const channel = this.storage.channel(id, resourceId);\n channel.metadata = apiResponse;\n callback(null, channel, apiResponse);\n });\n }\n /**\n * Metadata to set for the Notification.\n *\n * @typedef {object} CreateNotificationOptions\n * @property {object} [customAttributes] An optional list of additional\n * attributes to attach to each Cloud PubSub message published for this\n * notification subscription.\n * @property {string[]} [eventTypes] If present, only send notifications about\n * listed event types. If empty, sent notifications for all event types.\n * @property {string} [objectNamePrefix] If present, only apply this\n * notification configuration to object names that begin with this prefix.\n * @property {string} [payloadFormat] The desired content of the Payload.\n * Defaults to `JSON_API_V1`.\n *\n * Acceptable values are:\n * - `JSON_API_V1`\n *\n * - `NONE`\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback CreateNotificationCallback\n * @param {?Error} err Request error, if any.\n * @param {Notification} notification The new {@link Notification}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} CreateNotificationResponse\n * @property {Notification} 0 The new {@link Notification}.\n * @property {object} 1 The full API response.\n */\n /**\n * Creates a notification subscription for the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert}\n *\n * @param {Topic|string} topic The Cloud PubSub topic to which this\n * subscription publishes. If the project ID is omitted, the current\n * project ID will be used.\n *\n * Acceptable formats are:\n * - `projects/grape-spaceship-123/topics/my-topic`\n *\n * - `my-topic`\n * @param {CreateNotificationOptions} [options] Metadata to set for the\n * notification.\n * @param {object} [options.customAttributes] An optional list of additional\n * attributes to attach to each Cloud PubSub message published for this\n * notification subscription.\n * @param {string[]} [options.eventTypes] If present, only send notifications about\n * listed event types. If empty, sent notifications for all event types.\n * @param {string} [options.objectNamePrefix] If present, only apply this\n * notification configuration to object names that begin with this prefix.\n * @param {string} [options.payloadFormat] The desired content of the Payload.\n * Defaults to `JSON_API_V1`.\n *\n * Acceptable values are:\n * - `JSON_API_V1`\n *\n * - `NONE`\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {CreateNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a valid topic is not provided.\n * @see Notification#create\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const callback = function(err, notification, apiResponse) {\n * if (!err) {\n * // The notification was created successfully.\n * }\n * };\n *\n * myBucket.createNotification('my-topic', callback);\n *\n * //-\n * // Configure the nofiication by providing Notification metadata.\n * //-\n * const metadata = {\n * objectNamePrefix: 'prefix-'\n * };\n *\n * myBucket.createNotification('my-topic', metadata, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.createNotification('my-topic').then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/createNotification.js\n * region_tag:storage_create_bucket_notifications\n * Another example:\n */\n createNotification(topic, optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n const topicIsObject = topic !== null && typeof topic === 'object';\n if (topicIsObject && nodejs_common_1.util.isCustomType(topic, 'pubsub/topic')) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n topic = topic.name;\n }\n if (typeof topic !== 'string') {\n throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED);\n }\n const body = Object.assign({ topic }, options);\n if (body.topic.indexOf('projects') !== 0) {\n body.topic = 'projects/{{projectId}}/topics/' + body.topic;\n }\n body.topic = '//pubsub.googleapis.com/' + body.topic;\n if (!body.payloadFormat) {\n body.payloadFormat = 'JSON_API_V1';\n }\n const query = {};\n if (body.userProject) {\n query.userProject = body.userProject;\n delete body.userProject;\n }\n this.request({\n method: 'POST',\n uri: '/notificationConfigs',\n json: (0, util_2.convertObjKeysToSnakeCase)(body),\n qs: query,\n maxRetries: 0, //explicitly set this value since this is a non-idempotent function\n }, (err, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n const notification = this.notification(apiResponse.id);\n notification.metadata = apiResponse;\n callback(null, notification, apiResponse);\n });\n }\n /**\n * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles}\n * for all of the supported properties.\n * @property {boolean} [force] Suppress errors until all files have been\n * processed.\n */\n /**\n * @callback DeleteFilesCallback\n * @param {?Error|?Error[]} err Request error, if any, or array of errors from\n * files that were not able to be deleted.\n * @param {object} [apiResponse] The full API response.\n */\n /**\n * Iterate over the bucket's files, calling `file.delete()` on each.\n *\n * This is not an atomic request. A delete attempt will be\n * made for each file individually. Any one can fail, in which case only a\n * portion of the files you intended to be deleted would have.\n *\n * Operations are performed in parallel, up to 10 at once. The first error\n * breaks the loop and will execute the provided callback with it. Specify\n * `{ force: true }` to suppress the errors until all files have had a chance\n * to be processed.\n *\n * File preconditions cannot be passed to this function. It will not retry unless\n * the idempotency strategy is set to retry always.\n *\n * The `query` object passed as the first argument will also be passed to\n * {@link Bucket#getFiles}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation}\n *\n * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles}\n * @param {boolean} [query.force] Suppress errors until all files have been\n * processed.\n * @param {DeleteFilesCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Delete all of the files in the bucket.\n * //-\n * bucket.deleteFiles(function(err) {});\n *\n * //-\n * // By default, if a file cannot be deleted, this method will stop deleting\n * // files from your bucket. You can override this setting with `force:\n * // true`.\n * //-\n * bucket.deleteFiles({\n * force: true\n * }, function(errors) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * });\n *\n * //-\n * // The first argument to this method acts as a query to\n * // {@link Bucket#getFiles}. As an example, you can delete files\n * // which match a prefix.\n * //-\n * bucket.deleteFiles({\n * prefix: 'images/'\n * }, function(err) {\n * if (!err) {\n * // All files in the `images` directory have been deleted.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.deleteFiles().then(function() {});\n * ```\n */\n deleteFiles(queryOrCallback, callback) {\n let query = {};\n if (typeof queryOrCallback === 'function') {\n callback = queryOrCallback;\n }\n else if (queryOrCallback) {\n query = queryOrCallback;\n }\n const MAX_PARALLEL_LIMIT = 10;\n const MAX_QUEUE_SIZE = 1000;\n const errors = [];\n const deleteFile = (file) => {\n return file.delete(query).catch(err => {\n if (!query.force) {\n throw err;\n }\n errors.push(err);\n });\n };\n (async () => {\n try {\n let promises = [];\n const limit = pLimit(MAX_PARALLEL_LIMIT);\n const filesStream = this.getFilesStream(query);\n for await (const curFile of filesStream) {\n if (promises.length >= MAX_QUEUE_SIZE) {\n await Promise.all(promises);\n promises = [];\n }\n promises.push(limit(() => deleteFile(curFile)).catch(e => {\n filesStream.destroy();\n throw e;\n }));\n }\n await Promise.all(promises);\n callback(errors.length > 0 ? errors : null);\n }\n catch (e) {\n callback(e);\n return;\n }\n })();\n }\n /**\n * @typedef {array} DeleteLabelsResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata Bucket's metadata.\n */\n /**\n * Delete one or more labels from this bucket.\n *\n * @param {string|string[]} [labels] The labels to delete. If no labels are\n * provided, all of the labels are removed.\n * @param {DeleteLabelsCallback} [callback] Callback function.\n * @param {DeleteLabelsOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Delete all of the labels from this bucket.\n * //-\n * bucket.deleteLabels(function(err, apiResponse) {});\n *\n * //-\n * // Delete a single label.\n * //-\n * bucket.deleteLabels('labelone', function(err, apiResponse) {});\n *\n * //-\n * // Delete a specific set of labels.\n * //-\n * bucket.deleteLabels([\n * 'labelone',\n * 'labeltwo'\n * ], function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.deleteLabels().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) {\n let labels = new Array();\n let options = {};\n if (typeof labelsOrCallbackOrOptions === 'function') {\n callback = labelsOrCallbackOrOptions;\n }\n else if (typeof labelsOrCallbackOrOptions === 'string') {\n labels = [labelsOrCallbackOrOptions];\n }\n else if (Array.isArray(labelsOrCallbackOrOptions)) {\n labels = labelsOrCallbackOrOptions;\n }\n else if (labelsOrCallbackOrOptions) {\n options = labelsOrCallbackOrOptions;\n }\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n const deleteLabels = (labels) => {\n const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => {\n nullLabelMap[labelKey] = null;\n return nullLabelMap;\n }, {});\n if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) {\n this.setLabels(nullLabelMap, options, callback);\n }\n else {\n this.setLabels(nullLabelMap, callback);\n }\n };\n if (labels.length === 0) {\n this.getLabels((err, labels) => {\n if (err) {\n callback(err);\n return;\n }\n deleteLabels(Object.keys(labels));\n });\n }\n else {\n deleteLabels(labels);\n }\n }\n /**\n * @typedef {array} DisableRequesterPaysResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DisableRequesterPaysCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n *
\n * Early Access Testers Only\n *

\n * This feature is not yet widely-available.\n *

\n *
\n *\n * Disable `requesterPays` functionality from this bucket.\n *\n * @param {DisableRequesterPaysCallback} [callback] Callback function.\n * @param {DisableRequesterPaysOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.disableRequesterPays(function(err, apiResponse) {\n * if (!err) {\n * // requesterPays functionality disabled successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.disableRequesterPays().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_disable_requester_pays\n * Example of disabling requester pays:\n */\n disableRequesterPays(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.setMetadata({\n billing: {\n requesterPays: false,\n },\n }, options, callback);\n }\n /**\n * Configuration object for enabling logging.\n *\n * @typedef {object} EnableLoggingOptions\n * @property {string|Bucket} [bucket] The bucket for the log entries. By\n * default, the current bucket is used.\n * @property {string} prefix A unique prefix for log object names.\n */\n /**\n * Enable logging functionality for this bucket. This will make two API\n * requests, first to grant Cloud Storage WRITE permission to the bucket, then\n * to set the appropriate configuration on the Bucket's metadata.\n *\n * @param {EnableLoggingOptions} config Configuration options.\n * @param {string|Bucket} [config.bucket] The bucket for the log entries. By\n * default, the current bucket is used.\n * @param {string} config.prefix A unique prefix for log object names.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * const config = {\n * prefix: 'log'\n * };\n *\n * bucket.enableLogging(config, function(err, apiResponse) {\n * if (!err) {\n * // Logging functionality enabled successfully.\n * }\n * });\n *\n * ```\n * @example\n * Optionally, provide a destination bucket.\n * ```\n * const config = {\n * prefix: 'log',\n * bucket: 'destination-bucket'\n * };\n *\n * bucket.enableLogging(config, function(err, apiResponse) {});\n * ```\n *\n * @example\n * If the callback is omitted, we'll return a Promise.\n * ```\n * bucket.enableLogging(config).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n enableLogging(config, callback) {\n if (!config ||\n typeof config === 'function' ||\n typeof config.prefix === 'undefined') {\n throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED);\n }\n const logBucket = config.bucket\n ? config.bucket.id || config.bucket\n : this.id;\n const options = {};\n if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) {\n options.ifMetagenerationMatch = config.ifMetagenerationMatch;\n }\n if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) {\n options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch;\n }\n (async () => {\n try {\n const [policy] = await this.iam.getPolicy();\n policy.bindings.push({\n members: ['group:cloud-storage-analytics@google.com'],\n role: 'roles/storage.objectCreator',\n });\n await this.iam.setPolicy(policy);\n this.setMetadata({\n logging: {\n logBucket,\n logObjectPrefix: config.prefix,\n },\n }, options, callback);\n }\n catch (e) {\n callback(e);\n return;\n }\n })();\n }\n /**\n * @typedef {array} EnableRequesterPaysResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback EnableRequesterPaysCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n *
\n * Early Access Testers Only\n *

\n * This feature is not yet widely-available.\n *

\n *
\n *\n * Enable `requesterPays` functionality for this bucket. This enables you, the\n * bucket owner, to have the requesting user assume the charges for the access\n * to your bucket and its contents.\n *\n * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback]\n * Callback function or precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.enableRequesterPays(function(err, apiResponse) {\n * if (!err) {\n * // requesterPays functionality enabled successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.enableRequesterPays().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_enable_requester_pays\n * Example of enabling requester pays:\n */\n enableRequesterPays(optionsOrCallback, cb) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n cb = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.setMetadata({\n billing: {\n requesterPays: true,\n },\n }, options, cb);\n }\n /**\n * Create a {@link File} object. See {@link File} to see how to handle\n * the different use cases you may have.\n *\n * @param {string} name The name of the file in this bucket.\n * @param {FileOptions} [options] Configuration options.\n * @param {string|number} [options.generation] Only use a specific revision of\n * this file.\n * @param {string} [options.encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * KMS key ring must use the same location as the bucket.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for all requests made from File object.\n * @returns {File}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-existing-file.png');\n * ```\n */\n file(name, options) {\n if (!name) {\n throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME);\n }\n return new file_1.File(this, name, options);\n }\n /**\n * @typedef {array} GetFilesResponse\n * @property {File[]} 0 Array of {@link File} instances.\n * @param {object} nextQuery 1 A query object to receive more results.\n * @param {object} apiResponse 2 The full API response.\n */\n /**\n * @callback GetFilesCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files Array of {@link File} instances.\n * @param {object} nextQuery A query object to receive more results.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Query object for listing files.\n *\n * @typedef {object} GetFilesOptions\n * @property {boolean} [autoPaginate=true] Have pagination handled\n * automatically.\n * @property {string} [delimiter] Results will contain only objects whose\n * names, aside from the prefix, do not contain delimiter. Objects whose\n * names, aside from the prefix, contain delimiter will have their name\n * truncated after the delimiter, returned in `apiResponse.prefixes`.\n * Duplicate prefixes are omitted.\n * @property {string} [endOffset] Filter results to objects whose names are\n * lexicographically before endOffset. If startOffset is also set, the objects\n * listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @property {boolean} [includeTrailingDelimiter] If true, objects that end in\n * exactly one instance of delimiter have their metadata included in items[]\n * in addition to the relevant part of the object name appearing in prefixes[].\n * @property {string} [prefix] Filter results to objects whose names begin\n * with this prefix.\n * @property {number} [maxApiCalls] Maximum number of API calls to make.\n * @property {number} [maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [startOffset] Filter results to objects whose names are\n * lexicographically equal to or after startOffset. If endOffset is also set,\n * the objects listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {boolean} [versions] If true, returns File objects scoped to\n * their versions.\n */\n /**\n * Get {@link File} objects for the files currently in the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation}\n *\n * @param {GetFilesOptions} [query] Query object for listing files.\n * @param {boolean} [query.autoPaginate=true] Have pagination handled\n * automatically.\n * @param {string} [query.delimiter] Results will contain only objects whose\n * names, aside from the prefix, do not contain delimiter. Objects whose\n * names, aside from the prefix, contain delimiter will have their name\n * truncated after the delimiter, returned in `apiResponse.prefixes`.\n * Duplicate prefixes are omitted.\n * @param {string} [query.endOffset] Filter results to objects whose names are\n * lexicographically before endOffset. If startOffset is also set, the objects\n * listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in\n * exactly one instance of delimiter have their metadata included in items[]\n * in addition to the relevant part of the object name appearing in prefixes[].\n * @param {string} [query.prefix] Filter results to objects whose names begin\n * with this prefix.\n * @param {number} [query.maxApiCalls] Maximum number of API calls to make.\n * @param {number} [query.maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @param {string} [query.pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @param {string} [query.startOffset] Filter results to objects whose names are\n * lexicographically equal to or after startOffset. If endOffset is also set,\n * the objects listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @param {string} [query.userProject] The ID of the project which will be\n * billed for the request.\n * @param {boolean} [query.versions] If true, returns File objects scoped to\n * their versions.\n * @param {GetFilesCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getFiles(function(err, files) {\n * if (!err) {\n * // files is an array of File objects.\n * }\n * });\n *\n * //-\n * // If your bucket has versioning enabled, you can get all of your files\n * // scoped to their generation.\n * //-\n * bucket.getFiles({\n * versions: true\n * }, function(err, files) {\n * // Each file is scoped to its generation.\n * });\n *\n * //-\n * // To control how many API requests are made and page through the results\n * // manually, set `autoPaginate` to `false`.\n * //-\n * const callback = function(err, files, nextQuery, apiResponse) {\n * if (nextQuery) {\n * // More results exist.\n * bucket.getFiles(nextQuery, callback);\n * }\n *\n * // The `metadata` property is populated for you with the metadata at the\n * // time of fetching.\n * files[0].metadata;\n *\n * // However, in cases where you are concerned the metadata could have\n * // changed, use the `getMetadata` method.\n * files[0].getMetadata(function(err, metadata) {});\n * };\n *\n * bucket.getFiles({\n * autoPaginate: false\n * }, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getFiles().then(function(data) {\n * const files = data[0];\n * });\n *\n * ```\n * @example\n *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. \"a\"
  2. \"a/b/c/d\"
  3. \"b/d/e\"

Using a delimiter of `/` will return a single file, \"a\".

`apiResponse.prefixes` will return the \"sub-directories\" that were found:

  1. \"a/\"
  2. \"b/\"
\n * ```\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/'\n * }, function(err, files, nextQuery, apiResponse) {\n * // files = [\n * // {File} // File object for file \"a\"\n * // ]\n *\n * // apiResponse.prefixes = [\n * // 'a/',\n * // 'b/'\n * // ]\n * });\n * ```\n *\n * @example\n * Using prefixes, it's now possible to simulate a file system with follow-up requests.\n * ```\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/',\n * prefix: 'a/'\n * }, function(err, files, nextQuery, apiResponse) {\n * // No files found within \"directory\" a.\n * // files = []\n *\n * // However, a \"sub-directory\" was found.\n * // This prefix can be used to continue traversing the \"file system\".\n * // apiResponse.prefixes = [\n * // 'a/b/'\n * // ]\n * });\n * ```\n *\n * @example include:samples/files.js\n * region_tag:storage_list_files\n * Another example:\n *\n * @example include:samples/files.js\n * region_tag:storage_list_files_with_prefix\n * Example of listing files, filtered by a prefix:\n */\n getFiles(queryOrCallback, callback) {\n let query = typeof queryOrCallback === 'object' ? queryOrCallback : {};\n if (!callback) {\n callback = queryOrCallback;\n }\n query = Object.assign({}, query);\n this.request({\n uri: '/o',\n qs: query,\n }, (err, resp) => {\n if (err) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const files = itemsArray.map((file) => {\n const options = {};\n if (query.versions) {\n options.generation = file.generation;\n }\n if (file.kmsKeyName) {\n options.kmsKeyName = file.kmsKeyName;\n }\n const fileInstance = this.file(file.name, options);\n fileInstance.metadata = file;\n return fileInstance;\n });\n let nextQuery = null;\n if (resp.nextPageToken) {\n nextQuery = Object.assign({}, query, {\n pageToken: resp.nextPageToken,\n });\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n callback(null, files, nextQuery, resp);\n });\n }\n /**\n * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels().\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} GetLabelsResponse\n * @property {object} 0 Object of labels currently set on this bucket.\n */\n /**\n * @callback GetLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} labels Object of labels currently set on this bucket.\n */\n /**\n * Get the labels currently set on this bucket.\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetLabelsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getLabels(function(err, labels) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // labels = {\n * // label: 'labelValue',\n * // ...\n * // }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getLabels().then(function(data) {\n * const labels = data[0];\n * });\n * ```\n */\n getLabels(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n callback(err, null);\n return;\n }\n callback(null, metadata.labels || {});\n });\n }\n /**\n * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback GetNotificationsCallback\n * @param {?Error} err Request error, if any.\n * @param {Notification[]} notifications Array of {@link Notification}\n * instances.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} GetNotificationsResponse\n * @property {Notification[]} 0 Array of {@link Notification} instances.\n * @property {object} 1 The full API response.\n */\n /**\n * Retrieves a list of notification subscriptions for a given bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list}\n *\n * @param {GetNotificationsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * bucket.getNotifications(function(err, notifications, apiResponse) {\n * if (!err) {\n * // notifications is an array of Notification objects.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getNotifications().then(function(data) {\n * const notifications = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/listNotifications.js\n * region_tag:storage_list_bucket_notifications\n * Another example:\n */\n getNotifications(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.request({\n uri: '/notificationConfigs',\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const notifications = itemsArray.map((notification) => {\n const notificationInstance = this.notification(notification.id);\n notificationInstance.metadata = notification;\n return notificationInstance;\n });\n callback(null, notifications, resp);\n });\n }\n /**\n * @typedef {array} GetSignedUrlResponse\n * @property {object} 0 The signed URL.\n */\n /**\n * @callback GetSignedUrlCallback\n * @param {?Error} err Request error, if any.\n * @param {object} url The signed URL.\n */\n /**\n * @typedef {object} GetBucketSignedUrlConfig\n * @property {string} action Currently only supports \"list\" (HTTP: GET).\n * @property {*} expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * @property {string} [version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @property {string} [cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @property {object} [extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @property {object} [queryParams] Additional query parameters to include\n * in the signed URL.\n */\n /**\n * Get a signed URL to allow limited time access to a bucket.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed URL. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference}\n *\n * @throws {Error} if an expiration timestamp from the past is given.\n *\n * @param {GetBucketSignedUrlConfig} config Configuration object.\n * @param {string} config.action Currently only supports \"list\" (HTTP: GET).\n * @param {*} config.expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * @param {string} [config.version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @param {object} [config.extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @property {object} [config.queryParams] Additional query parameters to include\n * in the signed URL.\n * @param {GetSignedUrlCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * //-\n * // Generate a URL that allows temporary access to list files in a bucket.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'list',\n * expires: '03-17-2025'\n * };\n *\n * bucket.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The bucket is now available to be listed from this URL.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getSignedUrl(config).then(function(data) {\n * const url = data[0];\n * });\n * ```\n */\n getSignedUrl(cfg, callback) {\n const method = BucketActionToHTTPMethod[cfg.action];\n if (!method) {\n throw new Error(storage_1.ExceptionMessages.INVALID_ACTION);\n }\n const signConfig = {\n method,\n expires: cfg.expires,\n version: cfg.version,\n cname: cfg.cname,\n extensionHeaders: cfg.extensionHeaders || {},\n queryParams: cfg.queryParams || {},\n };\n if (!this.signer) {\n this.signer = new signer_1.URLSigner(this.storage.authClient, this);\n }\n this.signer\n .getSignedUrl(signConfig)\n .then(signedUrl => callback(null, signedUrl), callback);\n }\n /**\n * @callback BucketLockCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Lock a previously-defined retention policy. This will prevent changes to\n * the policy.\n *\n * @throws {Error} if a metageneration is not provided.\n *\n * @param {number|string} metageneration The bucket's metageneration. This is\n * accesssible from calling {@link File#getMetadata}.\n * @param {BucketLockCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const metageneration = 2;\n *\n * bucket.lock(metageneration, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.lock(metageneration).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n lock(metageneration, callback) {\n const metatype = typeof metageneration;\n if (metatype !== 'number' && metatype !== 'string') {\n throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED);\n }\n this.request({\n method: 'POST',\n uri: '/lockRetentionPolicy',\n qs: {\n ifMetagenerationMatch: metageneration,\n },\n }, callback);\n }\n /**\n * @typedef {array} MakeBucketPrivateResponse\n * @property {File[]} 0 List of files made private.\n */\n /**\n * @callback MakeBucketPrivateCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files List of files made private.\n */\n /**\n * @typedef {object} MakeBucketPrivateOptions\n * @property {boolean} [includeFiles=false] Make each file in the bucket\n * private.\n * @property {Metadata} [metadata] Define custom metadata properties to define\n * along with the operation.\n * @property {boolean} [force] Queue errors occurred while making files\n * private until all files have been processed.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Make the bucket listing private.\n *\n * You may also choose to make the contents of the bucket private by\n * specifying `includeFiles: true`. This will automatically run\n * {@link File#makePrivate} for every file in the bucket.\n *\n * When specifying `includeFiles: true`, use `force: true` to delay execution\n * of your callback until all files have been processed. By default, the\n * callback is executed after the first error. Use `force` to queue such\n * errors until all files have been processed, after which they will be\n * returned as an array as the first argument to your callback.\n *\n * NOTE: This may cause the process to be long-running and use a high number\n * of requests. Use with caution.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {MakeBucketPrivateOptions} [options] Configuration options.\n * @param {boolean} [options.includeFiles=false] Make each file in the bucket\n * private.\n * @param {Metadata} [options.metadata] Define custom metadata properties to define\n * along with the operation.\n * @param {boolean} [options.force] Queue errors occurred while making files\n * private until all files have been processed.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {MakeBucketPrivateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Make the bucket private.\n * //-\n * bucket.makePrivate(function(err) {});\n *\n * //-\n * // Make the bucket and its contents private.\n * //-\n * const opts = {\n * includeFiles: true\n * };\n *\n * bucket.makePrivate(opts, function(err, files) {\n * // `err`:\n * // The first error to occur, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made private in the bucket.\n * });\n *\n * //-\n * // Make the bucket and its contents private, using force to suppress errors\n * // until all files have been processed.\n * //-\n * const opts = {\n * includeFiles: true,\n * force: true\n * };\n *\n * bucket.makePrivate(opts, function(errors, files) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made private in the bucket.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.makePrivate(opts).then(function(data) {\n * const files = data[0];\n * });\n * ```\n */\n makePrivate(optionsOrCallback, callback) {\n var _a, _b, _c, _d;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n options.private = true;\n const query = {\n predefinedAcl: 'projectPrivate',\n };\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) {\n query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch;\n }\n if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) {\n query.ifGenerationNotMatch =\n options.preconditionOpts.ifGenerationNotMatch;\n }\n if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) {\n query.ifMetagenerationMatch =\n options.preconditionOpts.ifMetagenerationMatch;\n }\n if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) {\n query.ifMetagenerationNotMatch =\n options.preconditionOpts.ifMetagenerationNotMatch;\n }\n // You aren't allowed to set both predefinedAcl & acl properties on a bucket\n // so acl must explicitly be nullified.\n const metadata = extend({}, options.metadata, { acl: null });\n this.setMetadata(metadata, query, err => {\n if (err) {\n callback(err);\n }\n const internalCall = () => {\n if (options.includeFiles) {\n return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options);\n }\n return Promise.resolve([]);\n };\n internalCall()\n .then(files => callback(null, files))\n .catch(callback);\n });\n }\n /**\n * @typedef {object} MakeBucketPublicOptions\n * @property {boolean} [includeFiles=false] Make each file in the bucket\n * private.\n * @property {boolean} [force] Queue errors occurred while making files\n * private until all files have been processed.\n */\n /**\n * @callback MakeBucketPublicCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files List of files made public.\n */\n /**\n * @typedef {array} MakeBucketPublicResponse\n * @property {File[]} 0 List of files made public.\n */\n /**\n * Make the bucket publicly readable.\n *\n * You may also choose to make the contents of the bucket publicly readable by\n * specifying `includeFiles: true`. This will automatically run\n * {@link File#makePublic} for every file in the bucket.\n *\n * When specifying `includeFiles: true`, use `force: true` to delay execution\n * of your callback until all files have been processed. By default, the\n * callback is executed after the first error. Use `force` to queue such\n * errors until all files have been processed, after which they will be\n * returned as an array as the first argument to your callback.\n *\n * NOTE: This may cause the process to be long-running and use a high number\n * of requests. Use with caution.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {MakeBucketPublicOptions} [options] Configuration options.\n * @param {boolean} [options.includeFiles=false] Make each file in the bucket\n * private.\n * @param {boolean} [options.force] Queue errors occurred while making files\n * private until all files have been processed.\n * @param {MakeBucketPublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Make the bucket publicly readable.\n * //-\n * bucket.makePublic(function(err) {});\n *\n * //-\n * // Make the bucket and its contents publicly readable.\n * //-\n * const opts = {\n * includeFiles: true\n * };\n *\n * bucket.makePublic(opts, function(err, files) {\n * // `err`:\n * // The first error to occur, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made public in the bucket.\n * });\n *\n * //-\n * // Make the bucket and its contents publicly readable, using force to\n * // suppress errors until all files have been processed.\n * //-\n * const opts = {\n * includeFiles: true,\n * force: true\n * };\n *\n * bucket.makePublic(opts, function(errors, files) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made public in the bucket.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.makePublic(opts).then(function(data) {\n * const files = data[0];\n * });\n * ```\n */\n makePublic(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const req = extend(true, { public: true }, options);\n this.acl\n .add({\n entity: 'allUsers',\n role: 'READER',\n })\n .then(() => {\n return this.acl.default.add({\n entity: 'allUsers',\n role: 'READER',\n });\n })\n .then(() => {\n if (req.includeFiles) {\n return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req);\n }\n return [];\n })\n .then(files => callback(null, files), callback);\n }\n /**\n * Get a reference to a Cloud Pub/Sub Notification.\n *\n * @param {string} id ID of notification.\n * @returns {Notification}\n * @see Notification\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const notification = bucket.notification('1');\n * ```\n */\n notification(id) {\n if (!id) {\n throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID);\n }\n return new notification_1.Notification(this, id);\n }\n /**\n * Remove an already-existing retention policy from this bucket, if it is not\n * locked.\n *\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * bucket.removeRetentionPeriod(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.removeRetentionPeriod().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n removeRetentionPeriod(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n retentionPolicy: null,\n }, options, callback);\n }\n /**\n * Makes request and applies userProject query parameter if necessary.\n *\n * @private\n *\n * @param {object} reqOpts - The request options.\n * @param {function} callback - The callback function.\n */\n request(reqOpts, callback) {\n if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) {\n reqOpts.qs = extend(reqOpts.qs, { userProject: this.userProject });\n }\n return super.request(reqOpts, callback);\n }\n /**\n * @typedef {array} SetLabelsResponse\n * @property {object} 0 The bucket metadata.\n */\n /**\n * @callback SetLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n */\n /**\n * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Set labels on the bucket.\n *\n * This makes an underlying call to {@link Bucket#setMetadata}, which\n * is a PATCH request. This means an individual label can be overwritten, but\n * unmentioned labels will not be touched.\n *\n * @param {object} labels Labels to set on the bucket.\n * @param {SetLabelsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetLabelsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * const labels = {\n * labelone: 'labelonevalue',\n * labeltwo: 'labeltwovalue'\n * };\n *\n * bucket.setLabels(labels, function(err, metadata) {\n * if (!err) {\n * // Labels set successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setLabels(labels).then(function(data) {\n * const metadata = data[0];\n * });\n * ```\n */\n setLabels(labels, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || nodejs_common_1.util.noop;\n this.setMetadata({ labels }, options, callback);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options);\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * Lock all objects contained in the bucket, based on their creation time. Any\n * attempt to overwrite or delete objects younger than the retention period\n * will result in a `PERMISSION_DENIED` error.\n *\n * An unlocked retention policy can be modified or removed from the bucket via\n * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A\n * locked retention policy cannot be removed or shortened in duration for the\n * lifetime of the bucket. Attempting to remove or decrease period of a locked\n * retention policy will result in a `PERMISSION_DENIED` error. You can still\n * increase the policy.\n *\n * @param {*} duration In seconds, the minimum retention time for all objects\n * contained in this bucket.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataCallback} [options] Options, including precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const DURATION_SECONDS = 15780000; // 6 months.\n *\n * //-\n * // Lock the objects in this bucket for 6 months.\n * //-\n * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setRetentionPeriod(duration, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n retentionPolicy: {\n retentionPeriod: duration,\n },\n }, options, callback);\n }\n /**\n *\n * @typedef {object} Cors\n * @property {number} [maxAgeSeconds] The number of seconds the browser is\n * allowed to make requests before it must repeat the preflight request.\n * @property {string[]} [method] HTTP method allowed for cross origin resource\n * sharing with this bucket.\n * @property {string[]} [origin] an origin allowed for cross origin resource\n * sharing with this bucket.\n * @property {string[]} [responseHeader] A header allowed for cross origin\n * resource sharing with this bucket.\n */\n /**\n * This can be used to set the CORS configuration on the bucket.\n *\n * The configuration will be overwritten with the value passed into this.\n *\n * @param {Cors[]} corsConfiguration The new CORS configuration to set\n * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is\n * allowed to make requests before it must repeat the preflight request.\n * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource\n * sharing with this bucket.\n * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource\n * sharing with this bucket.\n * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin\n * resource sharing with this bucket.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataOptions} [options] Options, including precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour\n * bucket.setCorsConfiguration(corsConfiguration);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setCorsConfiguration(corsConfiguration).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n cors: corsConfiguration,\n }, options, callback);\n }\n /**\n * @typedef {object} SetBucketStorageClassOptions\n * @property {string} [userProject] - The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback SetBucketStorageClassCallback\n * @param {?Error} err Request error, if any.\n */\n /**\n * Set the default storage class for new files in this bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} storageClass The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`).\n * **Note:** The storage classes `multi_regional`, `regional`, and\n * `durable_reduced_availability` are now legacy and will be deprecated in\n * the future.\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] - The ID of the project which will be\n * billed for the request.\n * @param {SetStorageClassCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.setStorageClass('nearline', function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // The storage class was updated successfully.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setStorageClass('nearline').then(function() {});\n * ```\n */\n setStorageClass(storageClass, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n // In case we get input like `storageClass`, convert to `storage_class`.\n storageClass = storageClass\n .replace(/-/g, '_')\n .replace(/([a-z])([A-Z])/g, (_, low, up) => {\n return low + '_' + up;\n })\n .toUpperCase();\n this.setMetadata({ storageClass }, options, callback);\n }\n /**\n * Set a user project to be billed for all requests made from this Bucket\n * object and any files referenced from this Bucket object.\n *\n * @param {string} userProject The user project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.setUserProject('grape-spaceship-123');\n * ```\n */\n setUserProject(userProject) {\n this.userProject = userProject;\n const methods = [\n 'create',\n 'delete',\n 'exists',\n 'get',\n 'getMetadata',\n 'setMetadata',\n ];\n methods.forEach(method => {\n const methodConfig = this.methods[method];\n if (typeof methodConfig === 'object') {\n if (typeof methodConfig.reqOpts === 'object') {\n extend(methodConfig.reqOpts.qs, { userProject });\n }\n else {\n methodConfig.reqOpts = {\n qs: { userProject },\n };\n }\n }\n });\n }\n /**\n * @typedef {object} UploadOptions Configuration options for Bucket#upload().\n * @property {string|File} [destination] The place to save\n * your file. If given a string, the file will be uploaded to the bucket\n * using the string as a filename. When given a File object, your local\n * file will be uploaded to the File object's bucket and under the File\n * object's name. Lastly, when this argument is omitted, the file is uploaded\n * to your bucket using the name of the local file.\n * @property {string} [encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @property {boolean} [gzip] Automatically gzip the file. This will set\n * `options.metadata.contentEncoding` to `gzip`.\n * @property {string} [kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * @property {object} [metadata] See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}.\n * @property {string} [offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {boolean} [resumable=true] Resumable uploads are automatically\n * enabled and must be shut off explicitly by setting to false.\n * @property {number} [timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @property {string} [uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with an\n * MD5 checksum for maximum reliability. CRC32c will provide better\n * performance with less reliability. You may also choose to skip\n * validation completely, however this is **not recommended**.\n */\n /**\n * @typedef {array} UploadResponse\n * @property {object} 0 The uploaded {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback UploadCallback\n * @param {?Error} err Request error, if any.\n * @param {object} file The uploaded {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Upload a file to the bucket. This is a convenience method that wraps\n * {@link File#createWriteStream}.\n *\n * Resumable uploads are enabled by default\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation}\n *\n * @param {string} pathString The fully qualified path to the file you\n * wish to upload to your bucket.\n * @param {UploadOptions} [options] Configuration options.\n * @param {string|File} [options.destination] The place to save\n * your file. If given a string, the file will be uploaded to the bucket\n * using the string as a filename. When given a File object, your local\n * file will be uploaded to the File object's bucket and under the File\n * object's name. Lastly, when this argument is omitted, the file is uploaded\n * to your bucket using the name of the local file.\n * @param {string} [options.encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @param {boolean} [options.gzip] Automatically gzip the file. This will set\n * `options.metadata.contentEncoding` to `gzip`.\n * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * @param {object} [options.metadata] See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}.\n * @param {string} [options.offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @param {string} [options.predefinedAcl] Apply a predefined set of access\n * controls to this object.\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @param {boolean} [options.private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @param {boolean} [options.public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @param {boolean} [options.resumable=true] Resumable uploads are automatically\n * enabled and must be shut off explicitly by setting to false.\n * @param {number} [options.timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @param {string} [options.uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {string|boolean} [options.validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with an\n * MD5 checksum for maximum reliability. CRC32c will provide better\n * performance with less reliability. You may also choose to skip\n * validation completely, however this is **not recommended**.\n * @param {UploadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Upload a file from a local path.\n * //-\n * bucket.upload('/local/path/image.png', function(err, file, apiResponse) {\n * // Your bucket now contains:\n * // - \"image.png\" (with the contents of `/local/path/image.png')\n *\n * // `file` is an instance of a File object that refers to your new file.\n * });\n *\n *\n * //-\n * // It's not always that easy. You will likely want to specify the filename\n * // used when your new file lands in your bucket.\n * //\n * // You may also want to set metadata or customize other options.\n * //-\n * const options = {\n * destination: 'new-image.png',\n * validation: 'crc32c',\n * metadata: {\n * metadata: {\n * event: 'Fall trip to the zoo'\n * }\n * }\n * };\n *\n * bucket.upload('local-image.png', options, function(err, file) {\n * // Your bucket now contains:\n * // - \"new-image.png\" (with the contents of `local-image.png')\n *\n * // `file` is an instance of a File object that refers to your new file.\n * });\n *\n * //-\n * // You can also have a file gzip'd on the fly.\n * //-\n * bucket.upload('index.html', { gzip: true }, function(err, file) {\n * // Your bucket now contains:\n * // - \"index.html\" (automatically compressed with gzip)\n *\n * // Downloading the file with `file.download` will automatically decode\n * the\n * // file.\n * });\n *\n * //-\n * // You may also re-use a File object, {File}, that references\n * // the file you wish to create or overwrite.\n * //-\n * const options = {\n * destination: bucket.file('existing-file.png'),\n * resumable: false\n * };\n *\n * bucket.upload('local-img.png', options, function(err, newFile) {\n * // Your bucket now contains:\n * // - \"existing-file.png\" (with the contents of `local-img.png')\n *\n * // Note:\n * // The `newFile` parameter is equal to `file`.\n * });\n *\n * //-\n * // To use\n * // \n * // Customer-supplied Encryption Keys, provide the `encryptionKey`\n * option.\n * //-\n * const crypto = require('crypto');\n * const encryptionKey = crypto.randomBytes(32);\n *\n * bucket.upload('img.png', {\n * encryptionKey: encryptionKey\n * }, function(err, newFile) {\n * // `img.png` was uploaded with your custom encryption key.\n *\n * // `newFile` is already configured to use the encryption key when making\n * // operations on the remote object.\n *\n * // However, to use your encryption key later, you must create a `File`\n * // instance with the `key` supplied:\n * const file = bucket.file('img.png', {\n * encryptionKey: encryptionKey\n * });\n *\n * // Or with `file#setEncryptionKey`:\n * const file = bucket.file('img.png');\n * file.setEncryptionKey(encryptionKey);\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.upload('local-image.png').then(function(data) {\n * const file = data[0];\n * });\n *\n * To upload a file from a URL, use {@link File#createWriteStream}.\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_upload_file\n * Another example:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_upload_encrypted_file\n * Example of uploading an encrypted file:\n */\n upload(pathString, optionsOrCallback, callback) {\n var _a, _b;\n const upload = (numberOfRetries) => {\n const returnValue = retry(async (bail) => {\n await new Promise((resolve, reject) => {\n var _a, _b;\n if (numberOfRetries === 0 &&\n ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) {\n newFile.storage.retryOptions.autoRetry = false;\n }\n const writable = newFile.createWriteStream(options);\n if (options.onUploadProgress) {\n writable.on('progress', options.onUploadProgress);\n }\n fs.createReadStream(pathString)\n .on('error', bail)\n .pipe(writable)\n .on('error', err => {\n if (this.storage.retryOptions.autoRetry &&\n this.storage.retryOptions.retryableErrorFn(err)) {\n return reject(err);\n }\n else {\n return bail(err);\n }\n })\n .on('finish', () => {\n return resolve();\n });\n });\n }, {\n retries: numberOfRetries,\n factor: this.storage.retryOptions.retryDelayMultiplier,\n maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000,\n maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n if (!callback) {\n return returnValue;\n }\n else {\n return returnValue\n .then(() => {\n if (callback) {\n return callback(null, newFile, newFile.metadata);\n }\n })\n .catch(callback);\n }\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n if (global['GCLOUD_SANDBOX_ENV']) {\n return;\n }\n let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n options = Object.assign({\n metadata: {},\n }, options);\n // Do not retry if precondition option ifGenerationMatch is not set\n // because this is a file operation\n let maxRetries = this.storage.retryOptions.maxRetries;\n if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n maxRetries = 0;\n }\n let newFile;\n if (options.destination instanceof file_1.File) {\n newFile = options.destination;\n }\n else if (options.destination !== null &&\n typeof options.destination === 'string') {\n // Use the string as the name of the file.\n newFile = this.file(options.destination, {\n encryptionKey: options.encryptionKey,\n kmsKeyName: options.kmsKeyName,\n preconditionOpts: this.instancePreconditionOpts,\n });\n }\n else {\n // Resort to using the name of the incoming file.\n const destination = path.basename(pathString);\n newFile = this.file(destination, {\n encryptionKey: options.encryptionKey,\n kmsKeyName: options.kmsKeyName,\n preconditionOpts: this.instancePreconditionOpts,\n });\n }\n upload(maxRetries);\n }\n /**\n * @private\n *\n * @typedef {object} MakeAllFilesPublicPrivateOptions\n * @property {boolean} [force] Suppress errors until all files have been\n * processed.\n * @property {boolean} [private] Make files private.\n * @property {boolean} [public] Make files public.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @private\n *\n * @callback SetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files Files that were updated.\n */\n /**\n * @typedef {array} MakeAllFilesPublicPrivateResponse\n * @property {File[]} 0 List of files affected.\n */\n /**\n * Iterate over all of a bucket's files, calling `file.makePublic()` (public)\n * or `file.makePrivate()` (private) on each.\n *\n * Operations are performed in parallel, up to 10 at once. The first error\n * breaks the loop, and will execute the provided callback with it. Specify\n * `{ force: true }` to suppress the errors.\n *\n * @private\n *\n * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options.\n * @param {boolean} [options.force] Suppress errors until all files have been\n * processed.\n * @param {boolean} [options.private] Make files private.\n * @param {boolean} [options.public] Make files public.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n \n * @param {MakeAllFilesPublicPrivateCallback} callback Callback function.\n *\n * @return {Promise}\n */\n makeAllFilesPublicPrivate_(optionsOrCallback, callback) {\n const MAX_PARALLEL_LIMIT = 10;\n const errors = [];\n const updatedFiles = [];\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const processFile = async (file) => {\n try {\n await (options.public ? file.makePublic() : file.makePrivate(options));\n updatedFiles.push(file);\n }\n catch (e) {\n if (!options.force) {\n throw e;\n }\n errors.push(e);\n }\n };\n this.getFiles(options)\n .then(([files]) => {\n const limit = pLimit(MAX_PARALLEL_LIMIT);\n const promises = files.map(file => {\n return limit(() => processFile(file));\n });\n return Promise.all(promises);\n })\n .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles));\n }\n getId() {\n return this.id;\n }\n disableAutoRetryConditionallyIdempotent_(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n coreOpts, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n methodType, localPreconditionOptions) {\n var _a, _b;\n if (typeof coreOpts === 'object' &&\n ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined &&\n (methodType === AvailableServiceObjectMethods.setMetadata ||\n methodType === AvailableServiceObjectMethods.delete) &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) {\n this.storage.retryOptions.autoRetry = false;\n }\n else if (this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n }\n}\nexports.Bucket = Bucket;\n/*! Developer Documentation\n *\n * These methods can be auto-paginated.\n */\npaginator_1.paginator.extend(Bucket, 'getFiles');\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Bucket, {\n exclude: ['cloudStorageURI', 'request', 'file', 'notification'],\n});\n//# sourceMappingURL=bucket.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Channel = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * Create a channel object to interact with a Cloud Storage channel.\n *\n * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification}\n *\n * @class\n *\n * @param {string} id The ID of the channel.\n * @param {string} resourceId The resource ID of the channel.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * ```\n */\nclass Channel extends nodejs_common_1.ServiceObject {\n constructor(storage, id, resourceId) {\n const config = {\n parent: storage,\n baseUrl: '/channels',\n // An ID shouldn't be included in the API requests.\n // RE:\n // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145\n id: '',\n methods: {\n // Only need `request`.\n },\n };\n super(config);\n this.metadata.id = id;\n this.metadata.resourceId = resourceId;\n }\n /**\n * @typedef {array} StopResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback StopCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Stop this channel.\n *\n * @param {StopCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * channel.stop(function(err, apiResponse) {\n * if (!err) {\n * // Channel stopped successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * channel.stop().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n stop(callback) {\n callback = callback || nodejs_common_1.util.noop;\n this.request({\n method: 'POST',\n uri: '/stop',\n json: this.metadata,\n }, (err, apiResponse) => {\n callback(err, apiResponse);\n });\n }\n}\nexports.Channel = Channel;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Channel);\n//# sourceMappingURL=channel.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _CRC32C_crc32c;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0;\nconst fs_1 = require(\"fs\");\n/**\n * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c}\n */\nconst CRC32C_EXTENSIONS = [\n 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c,\n 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b,\n 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c,\n 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384,\n 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc,\n 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a,\n 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512,\n 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa,\n 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad,\n 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a,\n 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf,\n 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957,\n 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f,\n 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927,\n 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f,\n 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7,\n 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e,\n 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859,\n 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e,\n 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6,\n 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de,\n 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c,\n 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4,\n 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c,\n 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b,\n 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c,\n 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5,\n 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d,\n 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975,\n 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d,\n 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905,\n 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed,\n 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8,\n 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff,\n 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8,\n 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540,\n 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78,\n 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee,\n 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6,\n 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e,\n 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69,\n 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,\n 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351,\n];\nexports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS;\nconst CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS);\nexports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE;\nconst CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C();\nexports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR;\nconst CRC32C_EXCEPTION_MESSAGES = {\n INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`,\n INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`,\n INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`,\n};\nexports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES;\nclass CRC32C {\n /**\n * Constructs a new `CRC32C` object.\n *\n * Reconstruction is recommended via the `CRC32C.from` static method.\n *\n * @param initialValue An initial CRC32C value - a signed 32-bit integer.\n */\n constructor(initialValue = 0) {\n /** Current CRC32C value */\n _CRC32C_crc32c.set(this, 0);\n __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, \"f\");\n }\n /**\n * Calculates a CRC32C from a provided buffer.\n *\n * Implementation inspired from:\n * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c}\n * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c}\n * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage}\n *\n * @param data The `Buffer` to generate the CRC32C from\n */\n update(data) {\n let current = __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\") ^ 0xffffffff;\n for (const d of data) {\n const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff];\n current = tablePoly ^ (current >>> 8);\n }\n __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, \"f\");\n }\n /**\n * Validates a provided input to the current CRC32C value.\n *\n * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer\n */\n validate(input) {\n if (typeof input === 'number') {\n return input === __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\");\n }\n else if (typeof input === 'string') {\n return input === this.toString();\n }\n else if (Buffer.isBuffer(input)) {\n return Buffer.compare(input, this.toBuffer()) === 0;\n }\n else {\n // `CRC32C`-like object\n return input.toString() === this.toString();\n }\n }\n /**\n * Returns a `Buffer` representation of the CRC32C value\n */\n toBuffer() {\n const buffer = Buffer.alloc(4);\n buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, \"f\"));\n return buffer;\n }\n /**\n * Returns a JSON-compatible, base64-encoded representation of the CRC32C value.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`}\n */\n toJSON() {\n return this.toString();\n }\n /**\n * Returns a base64-encoded representation of the CRC32C value.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`}\n */\n toString() {\n return this.toBuffer().toString('base64');\n }\n /**\n * Returns the `number` representation of the CRC32C value as a signed 32-bit integer\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`}\n */\n valueOf() {\n return __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\");\n }\n /**\n * Generates a `CRC32C` from a compatible buffer format.\n *\n * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`\n */\n static fromBuffer(value) {\n let buffer;\n if (Buffer.isBuffer(value)) {\n buffer = value;\n }\n else if ('buffer' in value) {\n // `ArrayBufferView`\n buffer = Buffer.from(value.buffer);\n }\n else {\n // `ArrayBuffer`\n buffer = Buffer.from(value);\n }\n if (buffer.byteLength !== 4) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength));\n }\n return new CRC32C(buffer.readInt32BE());\n }\n static async fromFile(file) {\n const crc32c = new CRC32C();\n await new Promise((resolve, reject) => {\n (0, fs_1.createReadStream)(file)\n .on('data', (d) => crc32c.update(d))\n .on('end', resolve)\n .on('error', reject);\n });\n return crc32c;\n }\n /**\n * Generates a `CRC32C` from 4-byte base64-encoded data (string).\n *\n * @param value 4-byte base64-encoded data (string)\n */\n static fromString(value) {\n const buffer = Buffer.from(value, 'base64');\n if (buffer.byteLength !== 4) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength));\n }\n return this.fromBuffer(buffer);\n }\n /**\n * Generates a `CRC32C` from a safe, unsigned 32-bit integer.\n *\n * @param value an unsigned 32-bit integer\n */\n static fromNumber(value) {\n if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value));\n }\n return new CRC32C(value);\n }\n /**\n * Generates a `CRC32C` from a variety of compatable types.\n * Note: strings are treated as input, not as file paths to read from.\n *\n * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string)\n */\n static from(value) {\n if (typeof value === 'number') {\n return this.fromNumber(value);\n }\n else if (typeof value === 'string') {\n return this.fromString(value);\n }\n else if ('byteLength' in value) {\n // `ArrayBuffer` | `Buffer` | `ArrayBufferView`\n return this.fromBuffer(value);\n }\n else {\n // `CRC32CValidator`/`CRC32C`-like\n return this.fromString(value.toString());\n }\n }\n}\nexports.CRC32C = CRC32C;\n_CRC32C_crc32c = new WeakMap();\nCRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS;\nCRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE;\n//# sourceMappingURL=crc32c.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _File_instances, _File_validateIntegrity;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst compressible = require(\"compressible\");\nconst crypto = require(\"crypto\");\nconst extend = require(\"extend\");\nconst fs = require(\"fs\");\nconst mime = require(\"mime\");\nconst resumableUpload = require(\"./resumable-upload\");\nconst stream_1 = require(\"stream\");\nconst zlib = require(\"zlib\");\nconst storage_1 = require(\"./storage\");\nconst bucket_1 = require(\"./bucket\");\nconst acl_1 = require(\"./acl\");\nconst signer_1 = require(\"./signer\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst duplexify = require('duplexify');\nconst util_1 = require(\"./util\");\nconst hash_stream_validator_1 = require(\"./hash-stream-validator\");\nconst retry = require(\"async-retry\");\nvar ActionToHTTPMethod;\n(function (ActionToHTTPMethod) {\n ActionToHTTPMethod[\"read\"] = \"GET\";\n ActionToHTTPMethod[\"write\"] = \"PUT\";\n ActionToHTTPMethod[\"delete\"] = \"DELETE\";\n ActionToHTTPMethod[\"resumable\"] = \"POST\";\n})(ActionToHTTPMethod = exports.ActionToHTTPMethod || (exports.ActionToHTTPMethod = {}));\n/**\n * @private\n */\nexports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com';\n/**\n * @private\n */\nconst GS_URL_REGEXP = /^gs:\\/\\/([a-z0-9_.-]+)\\/(.+)$/;\nclass RequestError extends Error {\n}\nexports.RequestError = RequestError;\nconst SEVEN_DAYS = 7 * 24 * 60 * 60;\nvar FileExceptionMessages;\n(function (FileExceptionMessages) {\n FileExceptionMessages[\"EXPIRATION_TIME_NA\"] = \"An expiration time is not available.\";\n FileExceptionMessages[\"DESTINATION_NO_NAME\"] = \"Destination file should have a name.\";\n FileExceptionMessages[\"INVALID_VALIDATION_FILE_RANGE\"] = \"Cannot use validation with file ranges (start/end).\";\n FileExceptionMessages[\"MD5_NOT_AVAILABLE\"] = \"MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects.\";\n FileExceptionMessages[\"EQUALS_CONDITION_TWO_ELEMENTS\"] = \"Equals condition must be an array of 2 elements.\";\n FileExceptionMessages[\"STARTS_WITH_TWO_ELEMENTS\"] = \"StartsWith condition must be an array of 2 elements.\";\n FileExceptionMessages[\"CONTENT_LENGTH_RANGE_MIN_MAX\"] = \"ContentLengthRange must have numeric min & max fields.\";\n FileExceptionMessages[\"DOWNLOAD_MISMATCH\"] = \"The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again.\";\n FileExceptionMessages[\"UPLOAD_MISMATCH_DELETE_FAIL\"] = \"The uploaded data did not match the data from the server.\\n As a precaution, we attempted to delete the file, but it was not successful.\\n To be sure the content is the same, you should try removing the file manually,\\n then uploading the file again.\\n \\n\\nThe delete attempt failed with this message:\\n\\n \";\n FileExceptionMessages[\"UPLOAD_MISMATCH\"] = \"The uploaded data did not match the data from the server.\\n As a precaution, the file has been deleted.\\n To be sure the content is the same, you should try uploading the file again.\";\n})(FileExceptionMessages = exports.FileExceptionMessages || (exports.FileExceptionMessages = {}));\n/**\n * A File object is created from your {@link Bucket} object using\n * {@link Bucket#file}.\n *\n * @class\n */\nclass File extends nodejs_common_1.ServiceObject {\n /**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against\n * an object or bucket (for example, `READ` or `WRITE`); the entity defines\n * who the permission applies to (for example, a specific user or group of\n * users).\n *\n * The `acl` object on a File instance provides methods to get you a list of\n * the ACLs defined on your bucket, as well as set, update, and delete them.\n *\n * See {@link http://goo.gl/6qBBPO| About Access Control lists}\n *\n * @name File#acl\n * @mixes Acl\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * //-\n * // Make a file publicly readable.\n * //-\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * file.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n /**\n * The API-formatted resource description of the file.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name File#metadata\n * @type {object}\n */\n /**\n * The file's name.\n * @name File#name\n * @type {string}\n */\n /**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n /**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n /**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n /**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n /**\n * @callback Crc32cGeneratorCallback\n * @returns {CRC32CValidator}\n */\n /**\n * @typedef {object} FileOptions Options passed to the File constructor.\n * @property {string} [encryptionKey] A custom encryption key.\n * @property {number} [generation] Generation to scope the file to.\n * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this\n * object, if the object is encrypted by such a key. Limited availability;\n * usable only by enabled projects.\n * @property {string} [userProject] The ID of the project which will be\n * billed for all requests made from File object.\n * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n */\n /**\n * Constructs a file object.\n *\n * @param {Bucket} bucket The Bucket instance this file is\n * attached to.\n * @param {string} name The name of the remote file.\n * @param {FileOptions} [options] Configuration options.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * ```\n */\n constructor(bucket, name, options = {}) {\n var _a, _b;\n const requestQueryObject = {};\n let generation;\n if (options.generation !== null) {\n if (typeof options.generation === 'string') {\n generation = Number(options.generation);\n }\n else {\n generation = options.generation;\n }\n if (!isNaN(generation)) {\n requestQueryObject.generation = generation;\n }\n }\n Object.assign(requestQueryObject, options.preconditionOpts);\n const userProject = options.userProject || bucket.userProject;\n if (typeof userProject === 'string') {\n requestQueryObject.userProject = userProject;\n }\n const methods = {\n /**\n * @typedef {array} DeleteFileResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteFileCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete the file.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation}\n *\n * @method File#delete\n * @param {object} [options] Configuration options.\n * @param {boolean} [options.ignoreNotFound = false] Ignore an error if\n * the file does not exist.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteFileCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * file.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_delete_file\n * Another example:\n */\n delete: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} FileExistsResponse\n * @property {boolean} 0 Whether the {@link File} exists.\n */\n /**\n * @callback FileExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the {@link File} exists.\n */\n /**\n * Check if the file exists.\n *\n * @method File#exists\n * @param {options} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {FileExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetFileResponse\n * @property {File} 0 The {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetFileCallback\n * @param {?Error} err Request error, if any.\n * @param {File} file The {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get a file object and its metadata if it exists.\n *\n * @method File#get\n * @param {options} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetFileCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.get(function(err, file, apiResponse) {\n * // file.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.get().then(function(data) {\n * const file = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetFileMetadataResponse\n * @property {object} 0 The {@link File} metadata.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetFileMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The {@link File} metadata.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get the file's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation}\n *\n * @method File#getMetadata\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetFileMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_get_metadata\n * Another example:\n */\n getMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata().\n * @param {string} [userProject] The ID of the project which will be billed for the request.\n */\n /**\n * @callback SetFileMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} SetFileMetadataResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Merge the given metadata with the current remote file's metadata. This\n * will set metadata if it was previously unset or update previously set\n * metadata. To unset previously set metadata, set its value to null.\n *\n * You can set custom key/value pairs in the metadata key of the given\n * object, however the other properties outside of this object must adhere\n * to the {@link https://goo.gl/BOnnCK| official API documentation}.\n *\n *\n * See the examples below for more information.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation}\n *\n * @method File#setMetadata\n * @param {object} [metadata] The metadata you wish to update.\n * @param {SetFileMetadataOptions} [options] Configuration options.\n * @param {SetFileMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * const metadata = {\n * contentType: 'application/x-font-ttf',\n * metadata: {\n * my: 'custom',\n * properties: 'go here'\n * }\n * };\n *\n * file.setMetadata(metadata, function(err, apiResponse) {});\n *\n * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' }\n * file.setMetadata({\n * metadata: {\n * abc: '123', // will be set.\n * unsetMe: null, // will be unset (deleted).\n * hello: 'goodbye' // will be updated from 'world' to 'goodbye'.\n * }\n * }, function(err, apiResponse) {\n * // metadata should now be { abc: '123', hello: 'goodbye' }\n * });\n *\n * //-\n * // Set a temporary hold on this file from its bucket's retention period\n * // configuration.\n * //\n * file.setMetadata({\n * temporaryHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Alternatively, you may set a temporary hold. This will follow the\n * // same behavior as an event-based hold, with the exception that the\n * // bucket's retention policy will not renew for this file from the time\n * // the hold is released.\n * //-\n * file.setMetadata({\n * eventBasedHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.setMetadata(metadata).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n };\n super({\n parent: bucket,\n baseUrl: '/o',\n id: encodeURIComponent(name),\n methods,\n });\n _File_instances.add(this);\n this.bucket = bucket;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this.storage = bucket.parent;\n // @TODO Can this duplicate code from above be avoided?\n if (options.generation !== null) {\n let generation;\n if (typeof options.generation === 'string') {\n generation = Number(options.generation);\n }\n else {\n generation = options.generation;\n }\n if (!isNaN(generation)) {\n this.generation = generation;\n }\n }\n this.kmsKeyName = options.kmsKeyName;\n this.userProject = userProject;\n this.name = name;\n if (options.encryptionKey) {\n this.setEncryptionKey(options.encryptionKey);\n }\n this.acl = new acl_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/acl',\n });\n this.crc32cGenerator =\n options.crc32cGenerator || this.bucket.crc32cGenerator;\n this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry;\n this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts;\n }\n /**\n * The object's Cloud Storage URI (`gs://`)\n *\n * @example\n * ```ts\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('image.png');\n *\n * // `gs://my-bucket/image.png`\n * const href = file.cloudStorageURI.href;\n * ```\n */\n get cloudStorageURI() {\n const uri = this.bucket.cloudStorageURI;\n uri.pathname = this.name;\n return uri;\n }\n /**\n * A helper method for determining if a request should be retried based on preconditions.\n * This should only be used for methods where the idempotency is determined by\n * `ifGenerationMatch`\n * @private\n *\n * A request should not be retried under the following conditions:\n * - if precondition option `ifGenerationMatch` is not set OR\n * - if `idempotencyStrategy` is set to `RetryNever`\n */\n shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) {\n var _a;\n return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined &&\n ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever);\n }\n /**\n * @typedef {array} CopyResponse\n * @property {File} 0 The copied {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CopyCallback\n * @param {?Error} err Request error, if any.\n * @param {File} copiedFile The copied {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} CopyOptions Configuration options for File#copy(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @property {string} [cacheControl] The cacheControl setting for the new file.\n * @property {string} [contentEncoding] The contentEncoding setting for the new file.\n * @property {string} [contentType] The contentType setting for the new file.\n * @property {string} [destinationKmsKeyName] Resource name of the Cloud\n * KMS key, of the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @property {Metadata} [metadata] Metadata to specify on the copied file.\n * @property {string} [predefinedAcl] Set the ACL for the new file.\n * @property {string} [token] A previously-returned `rewriteToken` from an\n * unfinished rewrite request.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Copy this file to another file. By default, this will copy the file to the\n * same bucket, but you can choose to copy it to another Bucket by providing\n * a Bucket or File object or a URL starting with \"gs://\".\n * The generation of the file will not be preserved.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation}\n *\n * @throws {Error} If the destination file is not provided.\n *\n * @param {string|Bucket|File} destination Destination file.\n * @param {CopyOptions} [options] Configuration options. See an\n * @param {CopyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // You can pass in a variety of types for the destination.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // If you pass in a string for the destination, the file is copied to its\n * // current bucket, under the new name provided.\n * //-\n * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) {\n * // `my-bucket` now contains:\n * // - \"my-image.png\"\n * // - \"my-image-copy.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a string starting with \"gs://\" for the destination, the\n * // file is copied to the other bucket and under the new name provided.\n * //-\n * const newLocation = 'gs://another-bucket/my-image-copy.png';\n * file.copy(newLocation, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image-copy.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a Bucket object, the file will be copied to that bucket\n * // using the same name.\n * //-\n * const anotherBucket = storage.bucket('another-bucket');\n * file.copy(anotherBucket, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a File object, you have complete control over the new\n * // bucket and filename.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n * file.copy(anotherFile, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-awesome-image.png\"\n *\n * // Note:\n * // The `copiedFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.copy(newLocation).then(function(data) {\n * const newFile = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_copy_file\n * Another example:\n */\n copy(destination, optionsOrCallback, callback) {\n var _a, _b;\n const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME);\n if (!destination) {\n throw noDestinationError;\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n options = extend(true, {}, options);\n callback = callback || nodejs_common_1.util.noop;\n let destBucket;\n let destName;\n let newFile;\n if (typeof destination === 'string') {\n const parsedDestination = GS_URL_REGEXP.exec(destination);\n if (parsedDestination !== null && parsedDestination.length === 3) {\n destBucket = this.storage.bucket(parsedDestination[1]);\n destName = parsedDestination[2];\n }\n else {\n destBucket = this.bucket;\n destName = destination;\n }\n }\n else if (destination instanceof bucket_1.Bucket) {\n destBucket = destination;\n destName = this.name;\n }\n else if (destination instanceof File) {\n destBucket = destination.bucket;\n destName = destination.name;\n newFile = destination;\n }\n else {\n throw noDestinationError;\n }\n const query = {};\n if (this.generation !== undefined) {\n query.sourceGeneration = this.generation;\n }\n if (options.token !== undefined) {\n query.rewriteToken = options.token;\n }\n if (options.userProject !== undefined) {\n query.userProject = options.userProject;\n delete options.userProject;\n }\n if (options.predefinedAcl !== undefined) {\n query.destinationPredefinedAcl = options.predefinedAcl;\n delete options.predefinedAcl;\n }\n newFile = newFile || destBucket.file(destName);\n const headers = {};\n if (this.encryptionKey !== undefined) {\n headers['x-goog-copy-source-encryption-algorithm'] = 'AES256';\n headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64;\n headers['x-goog-copy-source-encryption-key-sha256'] =\n this.encryptionKeyHash;\n }\n if (newFile.encryptionKey !== undefined) {\n this.setEncryptionKey(newFile.encryptionKey);\n }\n else if (options.destinationKmsKeyName !== undefined) {\n query.destinationKmsKeyName = options.destinationKmsKeyName;\n delete options.destinationKmsKeyName;\n }\n else if (newFile.kmsKeyName !== undefined) {\n query.destinationKmsKeyName = newFile.kmsKeyName;\n }\n if (query.destinationKmsKeyName) {\n this.kmsKeyName = query.destinationKmsKeyName;\n const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor);\n if (keyIndex > -1) {\n this.interceptors.splice(keyIndex, 1);\n }\n }\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {\n this.storage.retryOptions.autoRetry = false;\n }\n if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) {\n query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch;\n delete options.preconditionOpts;\n }\n this.request({\n method: 'POST',\n uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`,\n qs: query,\n json: options,\n headers,\n }, (err, resp) => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n if (err) {\n callback(err, null, resp);\n return;\n }\n if (resp.rewriteToken) {\n const options = {\n token: resp.rewriteToken,\n };\n if (query.userProject) {\n options.userProject = query.userProject;\n }\n if (query.destinationKmsKeyName) {\n options.destinationKmsKeyName = query.destinationKmsKeyName;\n }\n this.copy(newFile, options, callback);\n return;\n }\n callback(null, newFile, resp);\n });\n }\n /**\n * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with a\n * CRC32c checksum. You may use MD5 if preferred, but that hash is not\n * supported for composite objects. An error will be raised if MD5 is\n * specified but is not available. You may also choose to skip validation\n * completely, however this is **not recommended**.\n * @property {number} [start] A byte offset to begin the file's download\n * from. Default is 0. NOTE: Byte ranges are inclusive; that is,\n * `options.start = 0` and `options.end = 999` represent the first 1000\n * bytes in a file or object. NOTE: when specifying a byte range, data\n * integrity is not available.\n * @property {number} [end] A byte offset to stop reading the file at.\n * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and\n * `options.end = 999` represent the first 1000 bytes in a file or object.\n * NOTE: when specifying a byte range, data integrity is not available.\n * @property {boolean} [decompress=true] Disable auto decompression of the\n * received data. By default this option is set to `true`.\n * Applicable in cases where the data was uploaded with\n * `gzip: true` option. See {@link File#createWriteStream}.\n */\n /**\n * Create a readable stream to read the contents of the remote file. It can be\n * piped to a writable stream or listened to for 'data' events to read a\n * file's contents.\n *\n * In the unlikely event there is a mismatch between what you downloaded and\n * the version in your Bucket, your error handler will receive an error with\n * code \"CONTENT_DOWNLOAD_MISMATCH\". If you receive this error, the best\n * recourse is to try downloading the file again.\n *\n * NOTE: Readable streams will emit the `end` event when the file is fully\n * downloaded.\n *\n * @param {CreateReadStreamOptions} [options] Configuration options.\n * @returns {ReadableStream}\n *\n * @example\n * ```\n * //-\n * //

Downloading a File

\n * //\n * // The example below demonstrates how we can reference a remote file, then\n * // pipe its contents to a local file. This is effectively creating a local\n * // backup of your remote data.\n * //-\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * const fs = require('fs');\n * const remoteFile = bucket.file('image.png');\n * const localFilename = '/Users/stephen/Photos/image.png';\n *\n * remoteFile.createReadStream()\n * .on('error', function(err) {})\n * .on('response', function(response) {\n * // Server connected and responded with the specified status and headers.\n * })\n * .on('end', function() {\n * // The file is fully downloaded.\n * })\n * .pipe(fs.createWriteStream(localFilename));\n *\n * //-\n * // To limit the downloaded data to only a byte range, pass an options\n * // object.\n * //-\n * const logFile = myBucket.file('access_log');\n * logFile.createReadStream({\n * start: 10000,\n * end: 20000\n * })\n * .on('error', function(err) {})\n * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));\n *\n * //-\n * // To read a tail byte range, specify only `options.end` as a negative\n * // number.\n * //-\n * const logFile = myBucket.file('access_log');\n * logFile.createReadStream({\n * end: -100\n * })\n * .on('error', function(err) {})\n * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));\n * ```\n */\n createReadStream(options = {}) {\n options = Object.assign({ decompress: true }, options);\n const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number';\n const tailRequest = options.end < 0;\n let validateStream = undefined;\n const throughStream = new util_1.PassThroughShim();\n let crc32c = true;\n let md5 = false;\n if (typeof options.validation === 'string') {\n const value = options.validation.toLowerCase().trim();\n crc32c = value === 'crc32c';\n md5 = value === 'md5';\n }\n else if (options.validation === false) {\n crc32c = false;\n }\n const shouldRunValidation = !rangeRequest && (crc32c || md5);\n if (rangeRequest) {\n if (typeof options.validation === 'string' ||\n options.validation === true) {\n throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE);\n }\n // Range requests can't receive data integrity checks.\n crc32c = false;\n md5 = false;\n }\n const onComplete = (err) => {\n if (err) {\n throughStream.destroy(err);\n }\n };\n // We listen to the response event from the request stream so that we\n // can...\n //\n // 1) Intercept any data from going to the user if an error occurred.\n // 2) Calculate the hashes from the http.IncomingMessage response\n // stream,\n // which will return the bytes from the source without decompressing\n // gzip'd content. We then send it through decompressed, if\n // applicable, to the user.\n const onResponse = (err, _body, rawResponseStream) => {\n if (err) {\n // Get error message from the body.\n this.getBufferFromReadable(rawResponseStream).then(body => {\n err.message = body.toString('utf8');\n throughStream.destroy(err);\n });\n return;\n }\n const headers = rawResponseStream.toJSON().headers;\n const isCompressed = headers['content-encoding'] === 'gzip';\n const hashes = {};\n // The object is safe to validate if:\n // 1. It was stored gzip and returned to us gzip OR\n // 2. It was never stored as gzip\n const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' &&\n isCompressed) ||\n headers['x-goog-stored-content-encoding'] === 'identity';\n const transformStreams = [];\n if (shouldRunValidation) {\n // The x-goog-hash header should be set with a crc32c and md5 hash.\n // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx'\n if (typeof headers['x-goog-hash'] === 'string') {\n headers['x-goog-hash']\n .split(',')\n .forEach((hashKeyValPair) => {\n const delimiterIndex = hashKeyValPair.indexOf('=');\n const hashType = hashKeyValPair.substr(0, delimiterIndex);\n const hashValue = hashKeyValPair.substr(delimiterIndex + 1);\n hashes[hashType] = hashValue;\n });\n }\n validateStream = new hash_stream_validator_1.HashStreamValidator({\n crc32c,\n md5,\n crc32cGenerator: this.crc32cGenerator,\n crc32cExpected: hashes.crc32c,\n md5Expected: hashes.md5,\n });\n }\n if (md5 && !hashes.md5) {\n const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE);\n hashError.code = 'MD5_NOT_AVAILABLE';\n throughStream.destroy(hashError);\n return;\n }\n if (safeToValidate && shouldRunValidation && validateStream) {\n transformStreams.push(validateStream);\n }\n if (isCompressed && options.decompress) {\n transformStreams.push(zlib.createGunzip());\n }\n (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete);\n };\n // Authenticate the request, then pipe the remote API request to the stream\n // returned to the user.\n const makeRequest = () => {\n const query = { alt: 'media' };\n if (this.generation) {\n query.generation = this.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n const headers = {\n 'Accept-Encoding': 'gzip',\n 'Cache-Control': 'no-store',\n };\n if (rangeRequest) {\n const start = typeof options.start === 'number' ? options.start : '0';\n const end = typeof options.end === 'number' ? options.end : '';\n headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`;\n }\n const reqOpts = {\n uri: '',\n headers,\n qs: query,\n };\n this.requestStream(reqOpts)\n .on('error', err => {\n throughStream.destroy(err);\n })\n .on('response', res => {\n throughStream.emit('response', res);\n nodejs_common_1.util.handleResp(null, res, null, onResponse);\n })\n .resume();\n };\n throughStream.on('reading', makeRequest);\n return throughStream;\n }\n /**\n * @callback CreateResumableUploadCallback\n * @param {?Error} err Request error, if any.\n * @param {string} uri The resumable upload's unique session URI.\n */\n /**\n * @typedef {array} CreateResumableUploadResponse\n * @property {string} 0 The resumable upload's unique session URI.\n */\n /**\n * @typedef {object} CreateResumableUploadOptions\n * @property {object} [metadata] Metadata to set on the file.\n * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload.\n * @property {string} [origin] Origin header to set for the upload.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string} [chunkSize] Create a separate request per chunk. This\n * value is in bytes and should be a multiple of 256 KiB (2^18).\n * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.}\n */\n /**\n * Create a unique resumable upload session URI. This is the first step when\n * performing a resumable upload.\n *\n * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide}\n * for more on how the entire process works.\n *\n *

Note

\n *\n * If you are just looking to perform a resumable upload without worrying\n * about any of the details, see {@link File#createWriteStream}. Resumable\n * uploads are performed by default.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide}\n *\n * @param {CreateResumableUploadOptions} [options] Configuration options.\n * @param {CreateResumableUploadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * file.createResumableUpload(function(err, uri) {\n * if (!err) {\n * // `uri` can be used to PUT data to.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.createResumableUpload().then(function(data) {\n * const uri = data[0];\n * });\n * ```\n */\n createResumableUpload(optionsOrCallback, callback) {\n var _a, _b;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const retryOptions = this.storage.retryOptions;\n if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n retryOptions.autoRetry = false;\n }\n resumableUpload.createURI({\n authClient: this.storage.authClient,\n apiEndpoint: this.storage.apiEndpoint,\n bucket: this.bucket.name,\n customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}),\n file: this.name,\n generation: this.generation,\n key: this.encryptionKey,\n kmsKeyName: this.kmsKeyName,\n metadata: options.metadata,\n offset: options.offset,\n origin: options.origin,\n predefinedAcl: options.predefinedAcl,\n private: options.private,\n public: options.public,\n userProject: options.userProject || this.userProject,\n retryOptions: retryOptions,\n params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts,\n }, callback);\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n }\n /**\n * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream().\n * @property {string} [contentType] Alias for\n * `options.metadata.contentType`. If set to `auto`, the file name is used\n * to determine the contentType.\n * @property {string|boolean} [gzip] If true, automatically gzip the file.\n * If set to `auto`, the contentType is used to determine if the file\n * should be gzipped. This will set `options.metadata.contentEncoding` to\n * `gzip` if necessary.\n * @property {object} [metadata] See the examples below or\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}\n * for more details.\n * @property {number} [offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {boolean} [resumable] Force a resumable upload. NOTE: When\n * working with streams, the file format and size is unknown until it's\n * completely consumed. Because of this, it's best for you to be explicit\n * for what makes sense given your input.\n * @property {number} [timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @property {string} [uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with a\n * CRC32c checksum. You may use MD5 if preferred, but that hash is not\n * supported for composite objects. An error will be raised if MD5 is\n * specified but is not available. You may also choose to skip validation\n * completely, however this is **not recommended**. In addition to specifying\n * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will\n * cause the server to perform validation in addition to client validation.\n * NOTE: Validation is automatically skipped for objects that were\n * uploaded using the `gzip` option and have already compressed content.\n */\n /**\n * Create a writable stream to overwrite the contents of the file in your\n * bucket.\n *\n * A File object can also be used to create files for the first time.\n *\n * Resumable uploads are automatically enabled and must be shut off explicitly\n * by setting `options.resumable` to `false`.\n *\n *\n *

\n * There is some overhead when using a resumable upload that can cause\n * noticeable performance degradation while uploading a series of small\n * files. When uploading files less than 10MB, it is recommended that the\n * resumable feature is disabled.\n *

\n *\n * NOTE: Writable streams will emit the `finish` event when the file is fully\n * uploaded.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload| Upload Options (Simple or Resumable)}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation}\n *\n * @param {CreateWriteStreamOptions} [options] Configuration options.\n * @returns {WritableStream}\n *\n * @example\n * ```\n * const fs = require('fs');\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * //

Uploading a File

\n * //\n * // Now, consider a case where we want to upload a file to your bucket. You\n * // have the option of using {@link Bucket#upload}, but that is just\n * // a convenience method which will do the following.\n * //-\n * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')\n * .pipe(file.createWriteStream())\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n *\n * //-\n * //

Uploading a File with gzip compression

\n * //-\n * fs.createReadStream('/Users/stephen/site/index.html')\n * .pipe(file.createWriteStream({ gzip: true }))\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n *\n * //-\n * // Downloading the file with `createReadStream` will automatically decode\n * // the file.\n * //-\n *\n * //-\n * //

Uploading a File with Metadata

\n * //\n * // One last case you may run into is when you want to upload a file to your\n * // bucket and set its metadata at the same time. Like above, you can use\n * // {@link Bucket#upload} to do this, which is just a wrapper around\n * // the following.\n * //-\n * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')\n * .pipe(file.createWriteStream({\n * metadata: {\n * contentType: 'image/jpeg',\n * metadata: {\n * custom: 'metadata'\n * }\n * }\n * }))\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n * ```\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n createWriteStream(options = {}) {\n options = extend(true, { metadata: {} }, options);\n if (options.contentType) {\n options.metadata.contentType = options.contentType;\n }\n if (!options.metadata.contentType ||\n options.metadata.contentType === 'auto') {\n const detectedContentType = mime.getType(this.name);\n if (detectedContentType) {\n options.metadata.contentType = detectedContentType;\n }\n }\n let gzip = options.gzip;\n if (gzip === 'auto') {\n gzip = compressible(options.metadata.contentType);\n }\n if (gzip) {\n options.metadata.contentEncoding = 'gzip';\n }\n let crc32c = true;\n let md5 = false;\n if (typeof options.validation === 'string') {\n options.validation = options.validation.toLowerCase();\n crc32c = options.validation === 'crc32c';\n md5 = options.validation === 'md5';\n }\n else if (options.validation === false) {\n crc32c = false;\n }\n /**\n * A callback for determining when the underlying pipeline is complete.\n * It's possible the pipeline callback could error before the write stream\n * calls `final` so by default this will destroy the write stream unless the\n * write stream sets this callback via its `final` handler.\n * @param error An optional error\n */\n let pipelineCallback = error => {\n writeStream.destroy(error || undefined);\n };\n // A stream for consumer to write to\n const writeStream = new stream_1.Writable({\n final(cb) {\n // Set the pipeline callback to this callback so the pipeline's results\n // can be populated to the consumer\n pipelineCallback = cb;\n emitStream.end();\n },\n write(chunk, encoding, cb) {\n emitStream.write(chunk, encoding, cb);\n },\n });\n const emitStream = new util_1.PassThroughShim();\n const hashCalculatingStream = new hash_stream_validator_1.HashStreamValidator({\n crc32c,\n md5,\n crc32cGenerator: this.crc32cGenerator,\n updateHashesOnly: true,\n });\n const fileWriteStream = duplexify();\n let fileWriteStreamMetadataReceived = false;\n // Handing off emitted events to users\n emitStream.on('reading', () => writeStream.emit('reading'));\n emitStream.on('writing', () => writeStream.emit('writing'));\n fileWriteStream.on('progress', evt => writeStream.emit('progress', evt));\n fileWriteStream.on('response', resp => writeStream.emit('response', resp));\n fileWriteStream.once('metadata', () => {\n fileWriteStreamMetadataReceived = true;\n });\n writeStream.on('writing', () => {\n if (options.resumable === false) {\n this.startSimpleUpload_(fileWriteStream, options);\n }\n else {\n this.startResumableUpload_(fileWriteStream, options);\n }\n (0, stream_1.pipeline)(emitStream, gzip ? zlib.createGzip() : new stream_1.PassThrough(), hashCalculatingStream, fileWriteStream, async (e) => {\n if (e) {\n return pipelineCallback(e);\n }\n // We want to make sure we've received the metadata from the server in order\n // to properly validate the object's integrity. Depending on the type of upload,\n // the stream could close before the response is returned.\n if (!fileWriteStreamMetadataReceived) {\n try {\n await new Promise((resolve, reject) => {\n fileWriteStream.once('metadata', resolve);\n fileWriteStream.once('error', reject);\n });\n }\n catch (e) {\n return pipelineCallback(e);\n }\n }\n try {\n await __classPrivateFieldGet(this, _File_instances, \"m\", _File_validateIntegrity).call(this, hashCalculatingStream, { crc32c, md5 });\n pipelineCallback();\n }\n catch (e) {\n pipelineCallback(e);\n }\n });\n });\n return writeStream;\n }\n delete(optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_1.AvailableServiceObjectMethods.delete, options);\n super\n .delete(options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * @typedef {array} DownloadResponse\n * @property [0] The contents of a File.\n */\n /**\n * @callback DownloadCallback\n * @param err Request error, if any.\n * @param contents The contents of a File.\n */\n /**\n * Convenience method to download a file into memory or to a local\n * destination.\n *\n * @param {object} [options] Configuration options. The arguments match those\n * passed to {@link File#createReadStream}.\n * @param {string} [options.destination] Local file path to write the file's\n * contents to.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DownloadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Download a file into memory. The contents will be available as the\n * second\n * // argument in the demonstration below, `contents`.\n * //-\n * file.download(function(err, contents) {});\n *\n * //-\n * // Download a file to a local destination.\n * //-\n * file.download({\n * destination: '/Users/me/Desktop/file-backup.txt'\n * }, function(err) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.download().then(function(data) {\n * const contents = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_download_file\n * Another example:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_download_encrypted_file\n * Example of downloading an encrypted file:\n *\n * @example include:samples/requesterPays.js\n * region_tag:storage_download_file_requester_pays\n * Example of downloading a file where the requester pays:\n */\n download(optionsOrCallback, cb) {\n let options;\n if (typeof optionsOrCallback === 'function') {\n cb = optionsOrCallback;\n options = {};\n }\n else {\n options = optionsOrCallback;\n }\n let called = false;\n const callback = ((...args) => {\n if (!called)\n cb(...args);\n called = true;\n });\n const destination = options.destination;\n delete options.destination;\n const fileStream = this.createReadStream(options);\n let receivedData = false;\n if (destination) {\n fileStream\n .on('error', callback)\n .once('data', data => {\n // We know that the file exists the server - now we can truncate/write to a file\n receivedData = true;\n const writable = fs.createWriteStream(destination);\n writable.write(data);\n fileStream\n .pipe(writable)\n .on('error', callback)\n .on('finish', callback);\n })\n .on('end', () => {\n // In the case of an empty file no data will be received before the end event fires\n if (!receivedData) {\n fs.openSync(destination, 'w');\n callback(null, Buffer.alloc(0));\n }\n });\n }\n else {\n this.getBufferFromReadable(fileStream)\n .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents))\n .catch(callback);\n }\n }\n /**\n * The Storage API allows you to use a custom key for server-side encryption.\n *\n * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}\n *\n * @param {string|buffer} encryptionKey An AES-256 encryption key.\n * @returns {File}\n *\n * @example\n * ```\n * const crypto = require('crypto');\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const encryptionKey = crypto.randomBytes(32);\n *\n * const fileWithCustomEncryption = myBucket.file('my-file');\n * fileWithCustomEncryption.setEncryptionKey(encryptionKey);\n *\n * const fileWithoutCustomEncryption = myBucket.file('my-file');\n *\n * fileWithCustomEncryption.save('data', function(err) {\n * // Try to download with the File object that hasn't had\n * // `setEncryptionKey()` called:\n * fileWithoutCustomEncryption.download(function(err) {\n * // We will receive an error:\n * // err.message === 'Bad Request'\n *\n * // Try again with the File object we called `setEncryptionKey()` on:\n * fileWithCustomEncryption.download(function(err, contents) {\n * // contents.toString() === 'data'\n * });\n * });\n * });\n *\n * ```\n * @example include:samples/encryption.js\n * region_tag:storage_upload_encrypted_file\n * Example of uploading an encrypted file:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_download_encrypted_file\n * Example of downloading an encrypted file:\n */\n setEncryptionKey(encryptionKey) {\n this.encryptionKey = encryptionKey;\n this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64');\n this.encryptionKeyHash = crypto\n .createHash('sha256')\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n .update(this.encryptionKeyBase64, 'base64')\n .digest('base64');\n this.encryptionKeyInterceptor = {\n request: reqOpts => {\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256';\n reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64;\n reqOpts.headers['x-goog-encryption-key-sha256'] =\n this.encryptionKeyHash;\n return reqOpts;\n },\n };\n this.interceptors.push(this.encryptionKeyInterceptor);\n return this;\n }\n /**\n * @typedef {array} GetExpirationDateResponse\n * @property {date} 0 A Date object representing the earliest time this file's\n * retention policy will expire.\n */\n /**\n * @callback GetExpirationDateCallback\n * @param {?Error} err Request error, if any.\n * @param {date} expirationDate A Date object representing the earliest time\n * this file's retention policy will expire.\n */\n /**\n * If this bucket has a retention policy defined, use this method to get a\n * Date object representing the earliest time this file will expire.\n *\n * @param {GetExpirationDateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.getExpirationDate(function(err, expirationDate) {\n * // expirationDate is a Date object.\n * });\n * ```\n */\n getExpirationDate(callback) {\n this.getMetadata((err, metadata, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n if (!metadata.retentionExpirationTime) {\n const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA);\n callback(error, null, apiResponse);\n return;\n }\n callback(null, new Date(metadata.retentionExpirationTime), apiResponse);\n });\n }\n /**\n * @typedef {array} GenerateSignedPostPolicyV2Response\n * @property {object} 0 The document policy.\n */\n /**\n * @callback GenerateSignedPostPolicyV2Callback\n * @param {?Error} err Request error, if any.\n * @param {object} policy The document policy.\n */\n /**\n * Get a signed policy document to allow a user to upload data with a POST\n * request.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed policy. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process}\n *\n * @throws {Error} If an expiration timestamp from the past is given.\n * @throws {Error} If options.equals has an array with less or more than two\n * members.\n * @throws {Error} If options.startsWith has an array with less or more than two\n * members.\n *\n * @param {object} options Configuration options.\n * @param {array|array[]} [options.equals] Array of request parameters and\n * their expected value (e.g. [['$', '']]). Values are\n * translated into equality constraints in the conditions field of the\n * policy document (e.g. ['eq', '$', '']). If only one\n * equality condition is to be specified, options.equals can be a one-\n * dimensional array (e.g. ['$', '']).\n * @param {*} options.expires - A timestamp when this policy will expire. Any\n * value given is passed to `new Date()`.\n * @param {array|array[]} [options.startsWith] Array of request parameters and\n * their expected prefixes (e.g. [['$', '']). Values are\n * translated into starts-with constraints in the conditions field of the\n * policy document (e.g. ['starts-with', '$', '']). If only\n * one prefix condition is to be specified, options.startsWith can be a\n * one- dimensional array (e.g. ['$', '']).\n * @param {string} [options.acl] ACL for the object from possibly predefined\n * ACLs.\n * @param {string} [options.successRedirect] The URL to which the user client\n * is redirected if the upload is successful.\n * @param {string} [options.successStatus] - The status of the Google Storage\n * response if the upload is successful (must be string).\n * @param {object} [options.contentLengthRange]\n * @param {number} [options.contentLengthRange.min] Minimum value for the\n * request's content length.\n * @param {number} [options.contentLengthRange.max] Maximum value for the\n * request's content length.\n * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const options = {\n * equals: ['$Content-Type', 'image/jpeg'],\n * expires: '10-25-2022',\n * contentLengthRange: {\n * min: 0,\n * max: 1024\n * }\n * };\n *\n * file.generateSignedPostPolicyV2(options, function(err, policy) {\n * // policy.string: the policy document in plain text.\n * // policy.base64: the policy document in base64.\n * // policy.signature: the policy signature in base64.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.generateSignedPostPolicyV2(options).then(function(data) {\n * const policy = data[0];\n * });\n * ```\n */\n generateSignedPostPolicyV2(optionsOrCallback, cb) {\n const args = (0, util_1.normalize)(optionsOrCallback, cb);\n let options = args.options;\n const callback = args.callback;\n const expires = new Date(options.expires);\n if (isNaN(expires.getTime())) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expires.valueOf() < Date.now()) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n options = Object.assign({}, options);\n const conditions = [\n ['eq', '$key', this.name],\n {\n bucket: this.bucket.name,\n },\n ];\n if (Array.isArray(options.equals)) {\n if (!Array.isArray(options.equals[0])) {\n options.equals = [options.equals];\n }\n options.equals.forEach(condition => {\n if (!Array.isArray(condition) || condition.length !== 2) {\n throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS);\n }\n conditions.push(['eq', condition[0], condition[1]]);\n });\n }\n if (Array.isArray(options.startsWith)) {\n if (!Array.isArray(options.startsWith[0])) {\n options.startsWith = [options.startsWith];\n }\n options.startsWith.forEach(condition => {\n if (!Array.isArray(condition) || condition.length !== 2) {\n throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS);\n }\n conditions.push(['starts-with', condition[0], condition[1]]);\n });\n }\n if (options.acl) {\n conditions.push({\n acl: options.acl,\n });\n }\n if (options.successRedirect) {\n conditions.push({\n success_action_redirect: options.successRedirect,\n });\n }\n if (options.successStatus) {\n conditions.push({\n success_action_status: options.successStatus,\n });\n }\n if (options.contentLengthRange) {\n const min = options.contentLengthRange.min;\n const max = options.contentLengthRange.max;\n if (typeof min !== 'number' || typeof max !== 'number') {\n throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX);\n }\n conditions.push(['content-length-range', min, max]);\n }\n const policy = {\n expiration: expires.toISOString(),\n conditions,\n };\n const policyString = JSON.stringify(policy);\n const policyBase64 = Buffer.from(policyString).toString('base64');\n this.storage.authClient.sign(policyBase64).then(signature => {\n callback(null, {\n string: policyString,\n base64: policyBase64,\n signature,\n });\n }, err => {\n callback(new signer_1.SigningError(err.message));\n });\n }\n /**\n * @typedef {object} SignedPostPolicyV4Output\n * @property {string} url The request URL.\n * @property {object} fields The form fields to include in the POST request.\n */\n /**\n * @typedef {array} GenerateSignedPostPolicyV4Response\n * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields.\n */\n /**\n * @callback GenerateSignedPostPolicyV4Callback\n * @param {?Error} err Request error, if any.\n * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields.\n */\n /**\n * Get a v4 signed policy document to allow a user to upload data with a POST\n * request.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed policy. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference}\n *\n * @param {object} options Configuration options.\n * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any\n * value given is passed to `new Date()`.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instead of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in\n * the result, e.g. \"https://cdn.example.com\".\n * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument}\n * to include in the signed policy. Any fields with key beginning with 'x-ignore-'\n * will not be included in the policy to be signed.\n * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document}\n * to include in the signed policy. All fields given in `config.fields` are\n * automatically included in the conditions array, adding the same entry\n * in both `fields` and `conditions` will result in duplicate entries.\n *\n * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const options = {\n * expires: '10-25-2022',\n * conditions: [\n * ['eq', '$Content-Type', 'image/jpeg'],\n * ['content-length-range', 0, 1024],\n * ],\n * fields: {\n * acl: 'public-read',\n * 'x-goog-meta-foo': 'bar',\n * 'x-ignore-mykey': 'data'\n * }\n * };\n *\n * file.generateSignedPostPolicyV4(options, function(err, response) {\n * // response.url The request URL\n * // response.fields The form fields (including the signature) to include\n * // to be used to upload objects by HTML forms.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.generateSignedPostPolicyV4(options).then(function(data) {\n * const response = data[0];\n * // response.url The request URL\n * // response.fields The form fields (including the signature) to include\n * // to be used to upload objects by HTML forms.\n * });\n * ```\n */\n generateSignedPostPolicyV4(optionsOrCallback, cb) {\n const args = (0, util_1.normalize)(optionsOrCallback, cb);\n let options = args.options;\n const callback = args.callback;\n const expires = new Date(options.expires);\n if (isNaN(expires.getTime())) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expires.valueOf() < Date.now()) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) {\n throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`);\n }\n options = Object.assign({}, options);\n let fields = Object.assign({}, options.fields);\n const now = new Date();\n const nowISO = (0, util_1.formatAsUTCISO)(now, true);\n const todayISO = (0, util_1.formatAsUTCISO)(now);\n const sign = async () => {\n const { client_email } = await this.storage.authClient.getCredentials();\n const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`;\n fields = {\n ...fields,\n bucket: this.bucket.name,\n key: this.name,\n 'x-goog-date': nowISO,\n 'x-goog-credential': credential,\n 'x-goog-algorithm': 'GOOG4-RSA-SHA256',\n };\n const conditions = options.conditions || [];\n Object.entries(fields).forEach(([key, value]) => {\n if (!key.startsWith('x-ignore-')) {\n conditions.push({ [key]: value });\n }\n });\n delete fields.bucket;\n const expiration = (0, util_1.formatAsUTCISO)(expires, true, '-', ':');\n const policy = {\n conditions,\n expiration,\n };\n const policyString = (0, util_1.unicodeJSONStringify)(policy);\n const policyBase64 = Buffer.from(policyString).toString('base64');\n try {\n const signature = await this.storage.authClient.sign(policyBase64);\n const signatureHex = Buffer.from(signature, 'base64').toString('hex');\n fields['policy'] = policyBase64;\n fields['x-goog-signature'] = signatureHex;\n let url;\n if (options.virtualHostedStyle) {\n url = `https://${this.bucket.name}.storage.googleapis.com/`;\n }\n else if (options.bucketBoundHostname) {\n url = `${options.bucketBoundHostname}/`;\n }\n else {\n url = `${exports.STORAGE_POST_POLICY_BASE_URL}/${this.bucket.name}/`;\n }\n return {\n url,\n fields,\n };\n }\n catch (err) {\n throw new signer_1.SigningError(err.message);\n }\n };\n sign().then(res => callback(null, res), callback);\n }\n /**\n * @typedef {array} GetSignedUrlResponse\n * @property {object} 0 The signed URL.\n */\n /**\n * @callback GetSignedUrlCallback\n * @param {?Error} err Request error, if any.\n * @param {object} url The signed URL.\n */\n /**\n * Get a signed URL to allow limited time access to the file.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed URL. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference}\n *\n * @throws {Error} if an expiration timestamp from the past is given.\n *\n * @param {object} config Configuration object.\n * @param {string} config.action \"read\" (HTTP: GET), \"write\" (HTTP: PUT), or\n * \"delete\" (HTTP: DELETE), \"resumable\" (HTTP: POST).\n * When using \"resumable\", the header `X-Goog-Resumable: start` has\n * to be sent when making a request with the signed URL.\n * @param {*} config.expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @param {string} [config.version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like\n * if you provide this, the client must provide this HTTP header with this same\n * value in its request, so to if this parameter is not provided here,\n * the client must not provide any value for this HTTP header in its request.\n * @param {string} [config.contentType] Just like if you provide this, the client\n * must provide this HTTP header with this same value in its request, so to if\n * this parameter is not provided here, the client must not provide any value\n * for this HTTP header in its request.\n * @param {object} [config.extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @param {object} [config.queryParams] Additional query parameters to include\n * in the signed URL.\n * @param {string} [config.promptSaveAs] The filename to prompt the user to\n * save the file as when the signed url is accessed. This is ignored if\n * `config.responseDisposition` is set.\n * @param {string} [config.responseDisposition] The\n * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the\n * signed url.\n * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value\n * given is passed to `new Date()`.\n * Note: Use for 'v4' only.\n * @param {string} [config.responseType] The response-content-type parameter\n * of the signed url.\n * @param {GetSignedUrlCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Generate a URL that allows temporary access to download your file.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'read',\n * expires: '03-17-2025',\n * };\n *\n * file.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file is now available to read from this URL.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // Generate a URL that allows temporary access to download your file.\n * // Access will begin at accessibleAt and end at expires.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'read',\n * expires: '03-17-2025',\n * accessibleAt: '03-13-2025'\n * };\n *\n * file.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // Generate a URL to allow write permissions. This means anyone with this\n * URL\n * // can send a POST request with new data that will overwrite the file.\n * //-\n * file.getSignedUrl({\n * action: 'write',\n * expires: '03-17-2025'\n * }, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file is now available to be written to.\n * const writeStream = request.put(url);\n * writeStream.end('New data');\n *\n * writeStream.on('complete', function(resp) {\n * // Confirm the new content was saved.\n * file.download(function(err, fileContents) {\n * console.log('Contents:', fileContents.toString());\n * // Contents: New data\n * });\n * });\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.getSignedUrl(config).then(function(data) {\n * const url = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_generate_signed_url\n * Another example:\n */\n getSignedUrl(cfg, callback) {\n const method = ActionToHTTPMethod[cfg.action];\n if (!method) {\n throw new Error(storage_1.ExceptionMessages.INVALID_ACTION);\n }\n const extensionHeaders = (0, util_1.objectKeyToLowercase)(cfg.extensionHeaders || {});\n if (cfg.action === 'resumable') {\n extensionHeaders['x-goog-resumable'] = 'start';\n }\n const queryParams = Object.assign({}, cfg.queryParams);\n if (typeof cfg.responseType === 'string') {\n queryParams['response-content-type'] = cfg.responseType;\n }\n if (typeof cfg.promptSaveAs === 'string') {\n queryParams['response-content-disposition'] =\n 'attachment; filename=\"' + cfg.promptSaveAs + '\"';\n }\n if (typeof cfg.responseDisposition === 'string') {\n queryParams['response-content-disposition'] = cfg.responseDisposition;\n }\n if (this.generation) {\n queryParams['generation'] = this.generation.toString();\n }\n const signConfig = {\n method,\n expires: cfg.expires,\n accessibleAt: cfg.accessibleAt,\n extensionHeaders,\n queryParams,\n contentMd5: cfg.contentMd5,\n contentType: cfg.contentType,\n };\n if (cfg.cname) {\n signConfig.cname = cfg.cname;\n }\n if (cfg.version) {\n signConfig.version = cfg.version;\n }\n if (cfg.virtualHostedStyle) {\n signConfig.virtualHostedStyle = cfg.virtualHostedStyle;\n }\n if (!this.signer) {\n this.signer = new signer_1.URLSigner(this.storage.authClient, this.bucket, this);\n }\n this.signer\n .getSignedUrl(signConfig)\n .then(signedUrl => callback(null, signedUrl), callback);\n }\n /**\n * @callback IsPublicCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} resp Whether file is public or not.\n */\n /**\n * @typedef {array} IsPublicResponse\n * @property {boolean} 0 Whether file is public or not.\n */\n /**\n * Check whether this file is public or not by sending\n * a HEAD request without credentials.\n * No errors from the server indicates that the current\n * file is public.\n * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden}\n * indicates that file is private.\n * Any other non 403 error is propagated to user.\n *\n * @param {IsPublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Check whether the file is publicly accessible.\n * //-\n * file.isPublic(function(err, resp) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n * console.log(`the file ${file.id} is public: ${resp}`) ;\n * })\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.isPublic().then(function(data) {\n * const resp = data[0];\n * });\n * ```\n */\n isPublic(callback) {\n var _a;\n // Build any custom headers based on the defined interceptors on the parent\n // storage object and this object\n const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || [];\n const fileInterceptors = this.interceptors || [];\n const allInterceptors = storageInterceptors.concat(fileInterceptors);\n const headers = allInterceptors.reduce((acc, curInterceptor) => {\n const currentHeaders = curInterceptor.request({\n uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`,\n });\n Object.assign(acc, currentHeaders.headers);\n return acc;\n }, {});\n nodejs_common_1.util.makeRequest({\n method: 'GET',\n uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`,\n headers,\n }, {\n retryOptions: this.storage.retryOptions,\n }, (err) => {\n if (err) {\n const apiError = err;\n if (apiError.code === 403) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n }\n else {\n callback(null, true);\n }\n });\n }\n /**\n * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate().\n * @property {Metadata} [metadata] Define custom metadata properties to define\n * along with the operation.\n * @property {boolean} [strict] If true, set the file to be private to\n * only the owner user. Otherwise, it will be private to the project.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback MakeFilePrivateCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} MakeFilePrivateResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Make a file private to the project and remove all other permissions.\n * Set `options.strict` to true to make the file private to only the owner.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation}\n *\n * @param {MakeFilePrivateOptions} [options] Configuration options.\n * @param {MakeFilePrivateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Set the file private so only project maintainers can see and modify it.\n * //-\n * file.makePrivate(function(err) {});\n *\n * //-\n * // Set the file private so only the owner can see and modify it.\n * //-\n * file.makePrivate({ strict: true }, function(err) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.makePrivate().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n makePrivate(optionsOrCallback, callback) {\n var _a, _b;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const query = {\n predefinedAcl: options.strict ? 'private' : 'projectPrivate',\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n };\n if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) {\n query.ifMetagenerationMatch =\n (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch;\n delete options.preconditionOpts;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n // You aren't allowed to set both predefinedAcl & acl properties on a file,\n // so acl must explicitly be nullified, destroying all previous acls on the\n // file.\n const metadata = extend({}, options.metadata, { acl: null });\n this.setMetadata(metadata, query, callback);\n }\n /**\n * @typedef {array} MakeFilePublicResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback MakeFilePublicCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Set a file to be publicly readable and maintain all previous permissions.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation}\n *\n * @param {MakeFilePublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.makePublic(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.makePublic().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_make_public\n * Another example:\n */\n makePublic(callback) {\n callback = callback || nodejs_common_1.util.noop;\n this.acl.add({\n entity: 'allUsers',\n role: 'READER',\n }, (err, acl, resp) => {\n callback(err, resp);\n });\n }\n /**\n * The public URL of this File\n * Use {@link File#makePublic} to enable anonymous access via the returned URL.\n *\n * @returns {string}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-file');\n *\n * // publicUrl will be \"https://storage.googleapis.com/albums/my-file\"\n * const publicUrl = file.publicUrl();\n * ```\n */\n publicUrl() {\n return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`;\n }\n /**\n * @typedef {array} MoveResponse\n * @property {File} 0 The destination File.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback MoveCallback\n * @param {?Error} err Request error, if any.\n * @param {?File} destinationFile The destination File.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} MoveOptions Configuration options for File#move(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Move this file to another location. By default, this will rename the file\n * and keep it in the same bucket, but you can choose to move it to another\n * Bucket by providing a Bucket or File object or a URL beginning with\n * \"gs://\".\n *\n * **Warning**:\n * There is currently no atomic `move` method in the Cloud Storage API,\n * so this method is a composition of {@link File#copy} (to the new\n * location) and {@link File#delete} (from the old location). While\n * unlikely, it is possible that an error returned to your callback could be\n * triggered from either one of these API calls failing, which could leave a\n * duplicate file lingering. The error message will indicate what operation\n * has failed.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation}\n *\n * @throws {Error} If the destination file is not provided.\n *\n * @param {string|Bucket|File} destination Destination file.\n * @param {MoveCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * //-\n * // You can pass in a variety of types for the destination.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // If you pass in a string for the destination, the file is moved to its\n * // current bucket, under the new name provided.\n * //-\n * file.move('my-image-new.png', function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * // but contains instead:\n * // - \"my-image-new.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a string starting with \"gs://\" for the destination, the\n * // file is copied to the other bucket and under the new name provided.\n * //-\n * const newLocation = 'gs://another-bucket/my-image-new.png';\n * file.move(newLocation, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image-new.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a Bucket object, the file will be moved to that bucket\n * // using the same name.\n * //-\n * const anotherBucket = gcs.bucket('another-bucket');\n *\n * file.move(anotherBucket, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a File object, you have complete control over the new\n * // bucket and filename.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n *\n * file.move(anotherFile, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-awesome-image.png\"\n *\n * // Note:\n * // The `destinationFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.move('my-image-new.png').then(function(data) {\n * const destinationFile = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_move_file\n * Another example:\n */\n move(destination, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || nodejs_common_1.util.noop;\n this.copy(destination, options, (err, destinationFile, copyApiResponse) => {\n if (err) {\n err.message = 'file#copy failed with an error - ' + err.message;\n callback(err, null, copyApiResponse);\n return;\n }\n if (this.name !== destinationFile.name ||\n this.bucket.name !== destinationFile.bucket.name) {\n this.delete(options, (err, apiResponse) => {\n if (err) {\n err.message = 'file#delete failed with an error - ' + err.message;\n callback(err, destinationFile, apiResponse);\n return;\n }\n callback(null, destinationFile, copyApiResponse);\n });\n }\n else {\n callback(null, destinationFile, copyApiResponse);\n }\n });\n }\n /**\n * @typedef {array} RenameResponse\n * @property {File} 0 The destination File.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback RenameCallback\n * @param {?Error} err Request error, if any.\n * @param {?File} destinationFile The destination File.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} RenameOptions Configuration options for File#move(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Rename this file.\n *\n * **Warning**:\n * There is currently no atomic `rename` method in the Cloud Storage API,\n * so this method is an alias of {@link File#move}, which in turn is a\n * composition of {@link File#copy} (to the new location) and\n * {@link File#delete} (from the old location). While\n * unlikely, it is possible that an error returned to your callback could be\n * triggered from either one of these API calls failing, which could leave a\n * duplicate file lingering. The error message will indicate what operation\n * has failed.\n *\n * @param {string|File} destinationFile Destination file.\n * @param {RenameCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // You can pass in a string or a File object.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n *\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // You can pass in a string for the destinationFile.\n * //-\n * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * // but contains instead:\n * // - \"renamed-image.png\"\n *\n * // `renamedFile` is an instance of a File object that refers to your\n * // renamed file.\n * });\n *\n * //-\n * // You can pass in a File object.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n *\n * file.rename(anotherFile, function(err, renamedFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n *\n * // Note:\n * // The `renamedFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.rename('my-renamed-image.png').then(function(data) {\n * const renamedFile = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n rename(destinationFile, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || nodejs_common_1.util.noop;\n this.move(destinationFile, options, callback);\n }\n /**\n * Makes request and applies userProject query parameter if necessary.\n *\n * @private\n *\n * @param {object} reqOpts - The request options.\n * @param {function} callback - The callback function.\n */\n request(reqOpts, callback) {\n return this.parent.request.call(this, reqOpts, callback);\n }\n /**\n * @callback RotateEncryptionKeyCallback\n * @extends CopyCallback\n */\n /**\n * @typedef RotateEncryptionKeyResponse\n * @extends CopyResponse\n */\n /**\n * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options\n * for File#rotateEncryptionKey().\n * If a string or Buffer is provided, it is interpreted as an AES-256,\n * customer-supplied encryption key. If you'd like to use a Cloud KMS key\n * name, you must specify an options object with the property name:\n * `kmsKeyName`.\n * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key.\n * @param {string} [options.kmsKeyName] A Cloud KMS key name.\n */\n /**\n * This method allows you to update the encryption key associated with this\n * file.\n *\n * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}\n *\n * @param {RotateEncryptionKeyOptions} [options] - Configuration options.\n * @param {RotateEncryptionKeyCallback} [callback]\n * @returns {Promise}\n *\n * @example include:samples/encryption.js\n * region_tag:storage_rotate_encryption_key\n * Example of rotating the encryption key for this file:\n */\n rotateEncryptionKey(optionsOrCallback, callback) {\n var _a;\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n let options = {};\n if (typeof optionsOrCallback === 'string' ||\n optionsOrCallback instanceof Buffer) {\n options = {\n encryptionKey: optionsOrCallback,\n };\n }\n else if (typeof optionsOrCallback === 'object') {\n options = optionsOrCallback;\n }\n const newFile = this.bucket.file(this.id, options);\n const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined\n ? { preconditionOpts: options.preconditionOpts }\n : {};\n this.copy(newFile, copyOptions, callback);\n }\n /**\n * @typedef {object} SaveOptions\n * @extends CreateWriteStreamOptions\n */\n /**\n * @callback SaveCallback\n * @param {?Error} err Request error, if any.\n */\n /**\n * Write strings or buffers to a file.\n *\n * *This is a convenience method which wraps {@link File#createWriteStream}.*\n * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly.\n *\n * Resumable uploads are automatically enabled and must be shut off explicitly\n * by setting `options.resumable` to `false`.\n *\n * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff.\n *\n *

\n * There is some overhead when using a resumable upload that can cause\n * noticeable performance degradation while uploading a series of small\n * files. When uploading files less than 10MB, it is recommended that the\n * resumable feature is disabled.\n *

\n *\n * @param {string | Buffer} data The data to write to a file.\n * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options`\n * parameter.\n * @param {SaveCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const contents = 'This is the contents of the file.';\n *\n * file.save(contents, function(err) {\n * if (!err) {\n * // File written successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.save(contents).then(function() {});\n * ```\n */\n save(data, optionsOrCallback, callback) {\n // tslint:enable:no-any\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n let maxRetries = this.storage.retryOptions.maxRetries;\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {\n maxRetries = 0;\n }\n const returnValue = retry(async (bail) => {\n await new Promise((resolve, reject) => {\n if (maxRetries === 0) {\n this.storage.retryOptions.autoRetry = false;\n }\n const writable = this.createWriteStream(options)\n .on('error', err => {\n if (this.storage.retryOptions.autoRetry &&\n this.storage.retryOptions.retryableErrorFn(err)) {\n return reject(err);\n }\n else {\n return bail(err);\n }\n })\n .on('finish', () => {\n return resolve();\n });\n if (options.onUploadProgress) {\n writable.on('progress', options.onUploadProgress);\n }\n writable.end(data);\n });\n }, {\n retries: maxRetries,\n factor: this.storage.retryOptions.retryDelayMultiplier,\n maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000,\n maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n if (!callback) {\n return returnValue;\n }\n else {\n return returnValue\n .then(() => {\n if (callback) {\n return callback();\n }\n })\n .catch(callback);\n }\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_1.AvailableServiceObjectMethods.setMetadata, options);\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * @typedef {array} SetStorageClassResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback SetStorageClassCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Set the storage class for this file.\n *\n * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class}\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} storageClass The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`)\n * **Note:** The storage classes `multi_regional` and `regional`\n * are now legacy and will be deprecated in the future.\n * @param {SetStorageClassOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetStorageClassCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * file.setStorageClass('nearline', function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // The storage class was updated successfully.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.setStorageClass('nearline').then(function() {});\n * ```\n */\n setStorageClass(storageClass, optionsOrCallback, callback) {\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n const req = extend(true, {}, options);\n // In case we get input like `storageClass`, convert to `storage_class`.\n req.storageClass = storageClass\n .replace(/-/g, '_')\n .replace(/([a-z])([A-Z])/g, (_, low, up) => {\n return low + '_' + up;\n })\n .toUpperCase();\n this.copy(this, req, (err, file, apiResponse) => {\n if (err) {\n callback(err, apiResponse);\n return;\n }\n this.metadata = file.metadata;\n callback(null, apiResponse);\n });\n }\n /**\n * Set a user project to be billed for all requests made from this File\n * object.\n *\n * @param {string} userProject The user project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-file');\n *\n * file.setUserProject('grape-spaceship-123');\n * ```\n */\n setUserProject(userProject) {\n this.bucket.setUserProject.call(this, userProject);\n }\n /**\n * This creates a resumable-upload upload stream.\n *\n * @param {Duplexify} stream - Duplexify stream of data to pipe to the file.\n * @param {object=} options - Configuration object.\n *\n * @private\n */\n startResumableUpload_(dup, options) {\n options = extend(true, {\n metadata: {},\n }, options);\n const retryOptions = this.storage.retryOptions;\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {\n retryOptions.autoRetry = false;\n }\n const uploadStream = resumableUpload.upload({\n authClient: this.storage.authClient,\n apiEndpoint: this.storage.apiEndpoint,\n bucket: this.bucket.name,\n customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}),\n file: this.name,\n generation: this.generation,\n key: this.encryptionKey,\n kmsKeyName: this.kmsKeyName,\n metadata: options.metadata,\n offset: options.offset,\n predefinedAcl: options.predefinedAcl,\n private: options.private,\n public: options.public,\n uri: options.uri,\n userProject: options.userProject || this.userProject,\n retryOptions: { ...retryOptions },\n params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts,\n chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize,\n });\n uploadStream\n .on('response', resp => {\n dup.emit('response', resp);\n })\n .on('metadata', metadata => {\n this.metadata = metadata;\n dup.emit('metadata');\n })\n .on('finish', () => {\n dup.emit('complete');\n })\n .on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(uploadStream);\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n }\n /**\n * Takes a readable stream and pipes it to a remote file. Unlike\n * `startResumableUpload_`, which uses the resumable upload technique, this\n * method uses a simple upload (all or nothing).\n *\n * @param {Duplexify} dup - Duplexify stream of data to pipe to the file.\n * @param {object=} options - Configuration object.\n *\n * @private\n */\n startSimpleUpload_(dup, options) {\n options = extend(true, {\n metadata: {},\n }, options);\n const apiEndpoint = this.storage.apiEndpoint;\n const bucketName = this.bucket.name;\n const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`;\n const reqOpts = {\n qs: {\n name: this.name,\n },\n uri: uri,\n };\n if (this.generation !== undefined) {\n reqOpts.qs.ifGenerationMatch = this.generation;\n }\n if (this.kmsKeyName !== undefined) {\n reqOpts.qs.kmsKeyName = this.kmsKeyName;\n }\n if (typeof options.timeout === 'number') {\n reqOpts.timeout = options.timeout;\n }\n if (options.userProject || this.userProject) {\n reqOpts.qs.userProject = options.userProject || this.userProject;\n }\n if (options.predefinedAcl) {\n reqOpts.qs.predefinedAcl = options.predefinedAcl;\n }\n else if (options.private) {\n reqOpts.qs.predefinedAcl = 'private';\n }\n else if (options.public) {\n reqOpts.qs.predefinedAcl = 'publicRead';\n }\n Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts);\n nodejs_common_1.util.makeWritableStream(dup, {\n makeAuthenticatedRequest: (reqOpts) => {\n this.request(reqOpts, (err, body, resp) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n this.metadata = body;\n dup.emit('metadata', body);\n dup.emit('response', resp);\n dup.emit('complete');\n });\n },\n metadata: options.metadata,\n request: reqOpts,\n });\n }\n disableAutoRetryConditionallyIdempotent_(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n coreOpts, methodType, localPreconditionOptions) {\n var _a, _b, _c, _d;\n if ((typeof coreOpts === 'object' &&\n ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined &&\n methodType === bucket_1.AvailableServiceObjectMethods.delete &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n if ((typeof coreOpts === 'object' &&\n ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined &&\n methodType === bucket_1.AvailableServiceObjectMethods.setMetadata &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n }\n async getBufferFromReadable(readable) {\n const buf = [];\n for await (const chunk of readable) {\n buf.push(chunk);\n }\n return Buffer.concat(buf);\n }\n}\nexports.File = File;\n_File_instances = new WeakSet(), _File_validateIntegrity = \n/**\n *\n * @param hashCalculatingStream\n * @param verify\n * @returns {boolean} Returns `true` if valid, throws with error otherwise\n */\nasync function _File_validateIntegrity(hashCalculatingStream, verify = {}) {\n const metadata = this.metadata;\n // If we're doing validation, assume the worst\n let dataMismatch = !!(verify.crc32c || verify.md5);\n if (verify.crc32c && metadata.crc32c) {\n dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c);\n }\n if (verify.md5 && metadata.md5Hash) {\n dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash);\n }\n if (dataMismatch) {\n const errors = [];\n let code = '';\n let message = '';\n try {\n await this.delete();\n if (verify.md5 && !metadata.md5Hash) {\n code = 'MD5_NOT_AVAILABLE';\n message = FileExceptionMessages.MD5_NOT_AVAILABLE;\n }\n else {\n code = 'FILE_NO_UPLOAD';\n message = FileExceptionMessages.UPLOAD_MISMATCH;\n }\n }\n catch (e) {\n const error = e;\n code = 'FILE_NO_UPLOAD_DELETE';\n message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`;\n errors.push(error);\n }\n const error = new RequestError(message);\n error.code = code;\n error.errors = errors;\n throw error;\n }\n return true;\n};\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(File, {\n exclude: [\n 'cloudStorageURI',\n 'publicUrl',\n 'request',\n 'save',\n 'setEncryptionKey',\n 'shouldRetryBasedOnPreconditionAndIdempotencyStrat',\n 'getBufferFromReadable',\n ],\n});\n//# sourceMappingURL=file.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HashStreamValidator = void 0;\nconst crypto_1 = require(\"crypto\");\nconst stream_1 = require(\"stream\");\nconst crc32c_1 = require(\"./crc32c\");\nconst file_1 = require(\"./file\");\nclass HashStreamValidator extends stream_1.Transform {\n constructor(options = {}) {\n super();\n this.updateHashesOnly = false;\n _HashStreamValidator_crc32cHash.set(this, undefined);\n _HashStreamValidator_md5Hash.set(this, undefined);\n _HashStreamValidator_md5Digest.set(this, '');\n this.crc32cEnabled = !!options.crc32c;\n this.md5Enabled = !!options.md5;\n this.updateHashesOnly = !!options.updateHashesOnly;\n this.crc32cExpected = options.crc32cExpected;\n this.md5Expected = options.md5Expected;\n if (this.crc32cEnabled) {\n const crc32cGenerator = options.crc32cGenerator || crc32c_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR;\n __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), \"f\");\n }\n if (this.md5Enabled) {\n __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), \"f\");\n }\n }\n _flush(callback) {\n if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\")) {\n __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\").digest('base64'), \"f\");\n }\n if (this.updateHashesOnly) {\n callback();\n return;\n }\n // If we're doing validation, assume the worst-- a data integrity\n // mismatch. If not, these tests won't be performed, and we can assume\n // the best.\n // We must check if the server decompressed the data on serve because hash\n // validation is not possible in this case.\n let failed = this.crc32cEnabled || this.md5Enabled;\n if (this.crc32cEnabled && this.crc32cExpected) {\n failed = !this.test('crc32c', this.crc32cExpected);\n }\n if (this.md5Enabled && this.md5Expected) {\n failed = !this.test('md5', this.md5Expected);\n }\n if (failed) {\n const mismatchError = new file_1.RequestError(file_1.FileExceptionMessages.DOWNLOAD_MISMATCH);\n mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH';\n callback(mismatchError);\n }\n else {\n callback();\n }\n }\n _transform(chunk, encoding, callback) {\n this.push(chunk, encoding);\n try {\n if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\"))\n __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\").update(chunk);\n if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\"))\n __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\").update(chunk);\n callback();\n }\n catch (e) {\n callback(e);\n }\n }\n test(hash, sum) {\n const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum;\n if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\")) {\n return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\").validate(check);\n }\n if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\")) {\n return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, \"f\") === check;\n }\n return false;\n }\n}\nexports.HashStreamValidator = HashStreamValidator;\n_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap();\n//# sourceMappingURL=hash-stream-validator.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HmacKey = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst storage_1 = require(\"./storage\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * The API-formatted resource description of the HMAC key.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name HmacKey#metadata\n * @type {object}\n */\n/**\n * An HmacKey object contains metadata of an HMAC key created from a\n * service account through the {@link Storage} client using\n * {@link Storage#createHmacKey}.\n *\n * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation}\n *\n * @class\n */\nclass HmacKey extends nodejs_common_1.ServiceObject {\n /**\n * @typedef {object} HmacKeyOptions\n * @property {string} [projectId] The project ID of the project that owns\n * the service account of the requested HMAC key. If not provided,\n * the project ID used to instantiate the Storage client will be used.\n */\n /**\n * Constructs an HmacKey object.\n *\n * Note: this only create a local reference to an HMAC key, to create\n * an HMAC key, use {@link Storage#createHmacKey}.\n *\n * @param {Storage} storage The Storage instance this HMAC key is\n * attached to.\n * @param {string} accessId The unique accessId for this HMAC key.\n * @param {HmacKeyOptions} options Constructor configurations.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const hmacKey = storage.hmacKey('access-id');\n * ```\n */\n constructor(storage, accessId, options) {\n const methods = {\n /**\n * @typedef {object} DeleteHmacKeyOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @typedef {array} DeleteHmacKeyResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteHmacKeyCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Deletes an HMAC key.\n * Key state must be set to `INACTIVE` prior to deletion.\n * Caution: HMAC keys cannot be recovered once you delete them.\n *\n * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists.\n *\n * @method HmacKey#delete\n * @param {DeleteHmacKeyOptions} [options] Configuration options.\n * @param {DeleteHmacKeyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Delete HMAC key after making the key inactive.\n * //-\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * hmacKey.delete((err) => {\n * if (err) {\n * console.error(err);\n * return;\n * }\n * // The HMAC key is deleted.\n * });\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * hmacKey\n * .setMetadata({state: 'INACTIVE'})\n * .then(() => {\n * return hmacKey.delete();\n * });\n * ```\n */\n delete: true,\n /**\n * @callback GetHmacKeyCallback\n * @param {?Error} err Request error, if any.\n * @param {HmacKey} hmacKey this {@link HmacKey} instance.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} GetHmacKeyResponse\n * @property {HmacKey} 0 This {@link HmacKey} instance.\n * @property {object} 1 The full API response.\n */\n /**\n * @typedef {object} GetHmacKeyOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * Retrieves and populate an HMAC key's metadata, and return\n * this {@link HmacKey} instance.\n *\n * HmacKey.get() does not give the HMAC key secret, as\n * it is only returned on creation.\n *\n * The authenticated user must have `storage.hmacKeys.get` permission\n * for the project in which the key exists.\n *\n * @method HmacKey#get\n * @param {GetHmacKeyOptions} [options] Configuration options.\n * @param {GetHmacKeyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Get the HmacKey's Metadata.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .get((err, hmacKey) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * // do something with the returned HmacKey object.\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .get()\n * .then((data) => {\n * const hmacKey = data[0];\n * });\n * ```\n */\n get: true,\n /**\n * @typedef {object} GetHmacKeyMetadataOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * Retrieves and populate an HMAC key's metadata, and return\n * the HMAC key's metadata as an object.\n *\n * HmacKey.getMetadata() does not give the HMAC key secret, as\n * it is only returned on creation.\n *\n * The authenticated user must have `storage.hmacKeys.get` permission\n * for the project in which the key exists.\n *\n * @method HmacKey#getMetadata\n * @param {GetHmacKeyMetadataOptions} [options] Configuration options.\n * @param {HmacKeyMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Get the HmacKey's metadata and populate to the metadata property.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .getMetadata((err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * console.log(hmacKeyMetadata);\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .getMetadata()\n * .then((data) => {\n * const hmacKeyMetadata = data[0];\n * console.log(hmacKeyMetadata);\n * });\n * ```\n */\n getMetadata: true,\n /**\n * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update.\n * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'.\n * @property {string} [etag] Include an etag from a previous get HMAC key request\n * to perform safe read-modify-write.\n */\n /**\n * @typedef {object} SetHmacKeyMetadataOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @callback HmacKeyMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} HmacKeyMetadataResponse\n * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object.\n * @property {object} 1 The full API response.\n */\n /**\n * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for\n * valid states.\n *\n * @method HmacKey#setMetadata\n * @param {SetHmacKeyMetadata} metadata The new metadata.\n * @param {SetHmacKeyMetadataOptions} [options] Configuration options.\n * @param {HmacKeyMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * const metadata = {\n * state: 'INACTIVE',\n * };\n *\n * storage.hmacKey('ACCESS_ID')\n * .setMetadata(metadata, (err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * console.log(hmacKeyMetadata);\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .setMetadata(metadata)\n * .then((data) => {\n * const hmacKeyMetadata = data[0];\n * console.log(hmacKeyMetadata);\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n method: 'PUT',\n },\n },\n };\n const projectId = (options && options.projectId) || storage.projectId;\n super({\n parent: storage,\n id: accessId,\n baseUrl: `/projects/${projectId}/hmacKeys`,\n methods,\n });\n this.storage = storage;\n this.instanceRetryValue = storage.retryOptions.autoRetry;\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways\n if (this.storage.retryOptions.idempotencyStrategy !==\n storage_1.IdempotencyStrategy.RetryAlways) {\n this.storage.retryOptions.autoRetry = false;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n}\nexports.HmacKey = HmacKey;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(HmacKey);\n//# sourceMappingURL=hmacKey.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Iam = exports.IAMExceptionMessages = void 0;\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst util_1 = require(\"./util\");\nvar IAMExceptionMessages;\n(function (IAMExceptionMessages) {\n IAMExceptionMessages[\"POLICY_OBJECT_REQUIRED\"] = \"A policy object is required.\";\n IAMExceptionMessages[\"PERMISSIONS_REQUIRED\"] = \"Permissions are required.\";\n})(IAMExceptionMessages = exports.IAMExceptionMessages || (exports.IAMExceptionMessages = {}));\n/**\n * Get and set IAM policies for your Cloud Storage bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management}\n * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @constructor Iam\n *\n * @param {Bucket} bucket The parent instance.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * // bucket.iam\n * ```\n */\nclass Iam {\n constructor(bucket) {\n this.request_ = bucket.request.bind(bucket);\n this.resourceId_ = 'buckets/' + bucket.getId();\n }\n /**\n * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy().\n * @property {number} [requestedPolicyVersion] The version of IAM policies to\n * request. If a policy with a condition is requested without setting\n * this, the server will return an error. This must be set to a value\n * of 3 to retrieve IAM policies containing conditions. This is to\n * prevent client code that isn't aware of IAM conditions from\n * interpreting and modifying policies incorrectly. The service might\n * return a policy with version lower than the one that was requested,\n * based on the feature syntax in the policy fetched.\n * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions}\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} GetPolicyResponse\n * @property {Policy} 0 The policy.\n * @property {object} 1 The full API response.\n */\n /**\n * @typedef {object} Policy\n * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles.\n * @property {string} [policy.etag] Etags are used to perform a read-modify-write.\n * @property {number} [policy.version] The syntax schema version of the Policy.\n * To set an IAM policy with conditional binding, this field must be set to\n * 3 or greater.\n * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions}\n */\n /**\n * @typedef {object} PolicyBinding\n * @property {string} role Role that is assigned to members.\n * @property {string[]} members Specifies the identities requesting access for the bucket.\n * @property {Expr} [condition] The condition that is associated with this binding.\n */\n /**\n * @typedef {object} Expr\n * @property {string} [title] An optional title for the expression, i.e. a\n * short string describing its purpose. This can be used e.g. in UIs\n * which allow to enter the expression.\n * @property {string} [description] An optional description of the\n * expression. This is a longer text which describes the expression,\n * e.g. when hovered over it in a UI.\n * @property {string} expression Textual representation of an expression in\n * Common Expression Language syntax. The application context of the\n * containing message determines which well-known feature set of CEL\n * is supported.The condition that is associated with this binding.\n *\n * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions\n */\n /**\n * Get the IAM policy.\n *\n * @param {GetPolicyOptions} [options] Request options.\n * @param {GetPolicyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * bucket.iam.getPolicy(\n * {requestedPolicyVersion: 3},\n * function(err, policy, apiResponse) {\n *\n * },\n * );\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.getPolicy({requestedPolicyVersion: 3})\n * .then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_view_bucket_iam_members\n * Example of retrieving a bucket's IAM policy:\n */\n getPolicy(optionsOrCallback, callback) {\n const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback);\n const qs = {};\n if (options.userProject) {\n qs.userProject = options.userProject;\n }\n if (options.requestedPolicyVersion !== null &&\n options.requestedPolicyVersion !== undefined) {\n qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion;\n }\n this.request_({\n uri: '/iam',\n qs,\n }, cb);\n }\n /**\n * Set the IAM policy.\n *\n * @throws {Error} If no policy is provided.\n *\n * @param {Policy} policy The policy.\n * @param {SetPolicyOptions} [options] Configuration options.\n * @param {SetPolicyCallback} callback Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * const myPolicy = {\n * bindings: [\n * {\n * role: 'roles/storage.admin',\n * members:\n * ['serviceAccount:myotherproject@appspot.gserviceaccount.com']\n * }\n * ]\n * };\n *\n * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.setPolicy(myPolicy).then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_add_bucket_iam_member\n * Example of adding to a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_remove_bucket_iam_member\n * Example of removing from a bucket's IAM policy:\n */\n setPolicy(policy, optionsOrCallback, callback) {\n if (policy === null || typeof policy !== 'object') {\n throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED);\n }\n const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback);\n let maxRetries;\n if (policy.etag === undefined) {\n maxRetries = 0;\n }\n this.request_({\n method: 'PUT',\n uri: '/iam',\n maxRetries,\n json: Object.assign({\n resourceId: this.resourceId_,\n }, policy),\n qs: options,\n }, cb);\n }\n /**\n * Test a set of permissions for a resource.\n *\n * @throws {Error} If permissions are not provided.\n *\n * @param {string|string[]} permissions The permission(s) to test for.\n * @param {TestIamPermissionsOptions} [options] Configuration object.\n * @param {TestIamPermissionsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * //-\n * // Test a single permission.\n * //-\n * const test = 'storage.buckets.delete';\n *\n * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) {\n * console.log(permissions);\n * // {\n * // \"storage.buckets.delete\": true\n * // }\n * });\n *\n * //-\n * // Test several permissions at once.\n * //-\n * const tests = [\n * 'storage.buckets.delete',\n * 'storage.buckets.get'\n * ];\n *\n * bucket.iam.testPermissions(tests, function(err, permissions) {\n * console.log(permissions);\n * // {\n * // \"storage.buckets.delete\": false,\n * // \"storage.buckets.get\": true\n * // }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.testPermissions(test).then(function(data) {\n * const permissions = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n testPermissions(permissions, optionsOrCallback, callback) {\n if (!Array.isArray(permissions) && typeof permissions !== 'string') {\n throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED);\n }\n const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback);\n const permissionsArray = Array.isArray(permissions)\n ? permissions\n : [permissions];\n const req = Object.assign({\n permissions: permissionsArray,\n }, options);\n this.request_({\n uri: '/iam/testPermissions',\n qs: req,\n useQuerystring: true,\n }, (err, resp) => {\n if (err) {\n cb(err, null, resp);\n return;\n }\n const availablePermissions = Array.isArray(resp.permissions)\n ? resp.permissions\n : [];\n const permissionsHash = permissionsArray.reduce((acc, permission) => {\n acc[permission] = availablePermissions.indexOf(permission) > -1;\n return acc;\n }, {});\n cb(null, permissionsHash, resp);\n });\n }\n}\nexports.Iam = Iam;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Iam);\n//# sourceMappingURL=iam.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Storage = exports.IdempotencyStrategy = exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = void 0;\nvar bucket_1 = require(\"./bucket\");\nObject.defineProperty(exports, \"Bucket\", { enumerable: true, get: function () { return bucket_1.Bucket; } });\n__exportStar(require(\"./crc32c\"), exports);\nvar channel_1 = require(\"./channel\");\nObject.defineProperty(exports, \"Channel\", { enumerable: true, get: function () { return channel_1.Channel; } });\nvar file_1 = require(\"./file\");\nObject.defineProperty(exports, \"File\", { enumerable: true, get: function () { return file_1.File; } });\n__exportStar(require(\"./hash-stream-validator\"), exports);\nvar hmacKey_1 = require(\"./hmacKey\");\nObject.defineProperty(exports, \"HmacKey\", { enumerable: true, get: function () { return hmacKey_1.HmacKey; } });\nvar iam_1 = require(\"./iam\");\nObject.defineProperty(exports, \"Iam\", { enumerable: true, get: function () { return iam_1.Iam; } });\nvar notification_1 = require(\"./notification\");\nObject.defineProperty(exports, \"Notification\", { enumerable: true, get: function () { return notification_1.Notification; } });\nvar storage_1 = require(\"./storage\");\nObject.defineProperty(exports, \"IdempotencyStrategy\", { enumerable: true, get: function () { return storage_1.IdempotencyStrategy; } });\nObject.defineProperty(exports, \"Storage\", { enumerable: true, get: function () { return storage_1.Storage; } });\n__exportStar(require(\"./transfer-manager\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0;\nvar service_1 = require(\"./service\");\nObject.defineProperty(exports, \"Service\", { enumerable: true, get: function () { return service_1.Service; } });\nvar service_object_1 = require(\"./service-object\");\nObject.defineProperty(exports, \"ServiceObject\", { enumerable: true, get: function () { return service_object_1.ServiceObject; } });\nvar util_1 = require(\"./util\");\nObject.defineProperty(exports, \"ApiError\", { enumerable: true, get: function () { return util_1.ApiError; } });\nObject.defineProperty(exports, \"util\", { enumerable: true, get: function () { return util_1.util; } });\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ServiceObject = void 0;\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst events_1 = require(\"events\");\nconst extend = require(\"extend\");\nconst util_1 = require(\"./util\");\n/**\n * ServiceObject is a base class, meant to be inherited from by a \"service\n * object,\" like a BigQuery dataset or Storage bucket.\n *\n * Most of the time, these objects share common functionality; they can be\n * created or deleted, and you can get or set their metadata.\n *\n * By inheriting from this class, a service object will be extended with these\n * shared behaviors. Note that any method can be overridden when the service\n * object requires specific behavior.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass ServiceObject extends events_1.EventEmitter {\n /*\n * @constructor\n * @alias module:common/service-object\n *\n * @private\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string} config.createMethod - The method which creates this object.\n * @param {string=} config.id - The identifier of the object. For example, the\n * name of a Storage bucket or Pub/Sub topic.\n * @param {object=} config.methods - A map of each method name that should be inherited.\n * @param {object} config.methods[].reqOpts - Default request options for this\n * particular method. A common use case is when `setMetadata` requires a\n * `PUT` method to override the default `PATCH`.\n * @param {object} config.parent - The parent service instance. For example, an\n * instance of Storage if the object is Bucket.\n */\n constructor(config) {\n super();\n this.metadata = {};\n this.baseUrl = config.baseUrl;\n this.parent = config.parent; // Parent class.\n this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc).\n this.createMethod = config.createMethod;\n this.methods = config.methods || {};\n this.interceptors = [];\n this.projectId = config.projectId;\n if (config.methods) {\n // This filters the ServiceObject instance (e.g. a \"File\") to only have\n // the configured methods. We make a couple of exceptions for core-\n // functionality (\"request()\" and \"getRequestInterceptors()\")\n Object.getOwnPropertyNames(ServiceObject.prototype)\n .filter(methodName => {\n return (\n // All ServiceObjects need `request` and `getRequestInterceptors`.\n // clang-format off\n !/^request/.test(methodName) &&\n !/^getRequestInterceptors/.test(methodName) &&\n // clang-format on\n // The ServiceObject didn't redefine the method.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] ===\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ServiceObject.prototype[methodName] &&\n // This method isn't wanted.\n !config.methods[methodName]);\n })\n .forEach(methodName => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] = undefined;\n });\n }\n }\n create(optionsOrCallback, callback) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const args = [this.id];\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n if (typeof optionsOrCallback === 'object') {\n args.push(optionsOrCallback);\n }\n // Wrap the callback to return *this* instance of the object, not the\n // newly-created one.\n // tslint: disable-next-line no-any\n function onCreate(...args) {\n const [err, instance] = args;\n if (!err) {\n self.metadata = instance.metadata;\n if (self.id && instance.metadata) {\n self.id = instance.metadata.id;\n }\n args[1] = self; // replace the created `instance` with this one.\n }\n callback(...args);\n }\n args.push(onCreate);\n // eslint-disable-next-line prefer-spread\n this.createMethod.apply(null, args);\n }\n delete(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const ignoreNotFound = options.ignoreNotFound;\n delete options.ignoreNotFound;\n const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {};\n const reqOpts = extend(true, {\n method: 'DELETE',\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, ...args) => {\n if (err) {\n if (err.code === 404 && ignoreNotFound) {\n err = null;\n }\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n callback(err, ...args);\n });\n }\n exists(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n this.get(options, err => {\n if (err) {\n if (err.code === 404) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n return;\n }\n callback(null, true);\n });\n }\n get(optionsOrCallback, cb) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const [opts, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const options = Object.assign({}, opts);\n const autoCreate = options.autoCreate && typeof this.create === 'function';\n delete options.autoCreate;\n function onCreate(err, instance, apiResponse) {\n if (err) {\n if (err.code === 409) {\n self.get(options, callback);\n return;\n }\n callback(err, null, apiResponse);\n return;\n }\n callback(null, instance, apiResponse);\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n if (err.code === 404 && autoCreate) {\n const args = [];\n if (Object.keys(options).length > 0) {\n args.push(options);\n }\n args.push(onCreate);\n self.create(...args);\n return;\n }\n callback(err, null, metadata);\n return;\n }\n callback(null, self, metadata);\n });\n }\n getMetadata(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.getMetadata === 'object' &&\n this.methods.getMetadata) ||\n {};\n const reqOpts = extend(true, {\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n const localInterceptors = this.interceptors\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n return this.parent.getRequestInterceptors().concat(localInterceptors);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.setMetadata === 'object' &&\n this.methods.setMetadata) ||\n {};\n const reqOpts = extend(true, {}, {\n method: 'PATCH',\n uri: '',\n }, methodConfig.reqOpts, {\n json: metadata,\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts);\n if (this.projectId) {\n reqOpts.projectId = this.projectId;\n }\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri];\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .filter(x => x.trim()) // Limit to non-empty strings.\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/');\n const childInterceptors = Array.isArray(reqOpts.interceptors_)\n ? reqOpts.interceptors_\n : [];\n const localInterceptors = [].slice.call(this.interceptors);\n reqOpts.interceptors_ = childInterceptors.concat(localInterceptors);\n if (reqOpts.shouldReturnStream) {\n return this.parent.requestStream(reqOpts);\n }\n this.parent.request(reqOpts, callback);\n }\n request(reqOpts, callback) {\n this.request_(reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return this.request_(opts);\n }\n}\nexports.ServiceObject = ServiceObject;\n(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] });\n//# sourceMappingURL=service-object.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0;\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst extend = require(\"extend\");\nconst uuid = require(\"uuid\");\nconst util_1 = require(\"./util\");\nexports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}';\nclass Service {\n /**\n * Service is a base class, meant to be inherited from by a \"service,\" like\n * BigQuery or Storage.\n *\n * This handles making authenticated requests by exposing a `makeReq_`\n * function.\n *\n * @constructor\n * @alias module:common/service\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string[]} config.scopes - The scopes required for the request.\n * @param {object=} options - [Configuration object](#/docs).\n */\n constructor(config, options = {}) {\n this.baseUrl = config.baseUrl;\n this.apiEndpoint = config.apiEndpoint;\n this.timeout = options.timeout;\n this.globalInterceptors = Array.isArray(options.interceptors_)\n ? options.interceptors_\n : [];\n this.interceptors = [];\n this.packageJson = config.packageJson;\n this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN;\n this.projectIdRequired = config.projectIdRequired !== false;\n this.providedUserAgent = options.userAgent;\n const reqCfg = extend({}, config, {\n projectIdRequired: this.projectIdRequired,\n projectId: this.projectId,\n authClient: options.authClient,\n credentials: options.credentials,\n keyFile: options.keyFilename,\n email: options.email,\n token: options.token,\n });\n this.makeAuthenticatedRequest =\n util_1.util.makeAuthenticatedRequestFactory(reqCfg);\n this.authClient = this.makeAuthenticatedRequest.authClient;\n this.getCredentials = this.makeAuthenticatedRequest.getCredentials;\n const isCloudFunctionEnv = !!process.env.FUNCTION_NAME;\n if (isCloudFunctionEnv) {\n this.interceptors.push({\n request(reqOpts) {\n reqOpts.forever = false;\n return reqOpts;\n },\n });\n }\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n return [].slice\n .call(this.globalInterceptors)\n .concat(this.interceptors)\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n }\n getProjectId(callback) {\n if (!callback) {\n return this.getProjectIdAsync();\n }\n this.getProjectIdAsync().then(p => callback(null, p), callback);\n }\n async getProjectIdAsync() {\n const projectId = await this.authClient.getProjectId();\n if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) {\n this.projectId = projectId;\n }\n return this.projectId;\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts, { timeout: this.timeout });\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl];\n if (this.projectIdRequired) {\n if (reqOpts.projectId) {\n uriComponents.push('projects');\n uriComponents.push(reqOpts.projectId);\n }\n else {\n uriComponents.push('projects');\n uriComponents.push(this.projectId);\n }\n }\n uriComponents.push(reqOpts.uri);\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/')\n // Some URIs have colon separators.\n // Bad: https://.../projects/:list\n // Good: https://.../projects:list\n .replace(/\\/:/g, ':');\n const requestInterceptors = this.getRequestInterceptors();\n const interceptorArray = Array.isArray(reqOpts.interceptors_)\n ? reqOpts.interceptors_\n : [];\n interceptorArray.forEach(interceptor => {\n if (typeof interceptor.request === 'function') {\n requestInterceptors.push(interceptor.request);\n }\n });\n requestInterceptors.forEach(requestInterceptor => {\n reqOpts = requestInterceptor(reqOpts);\n });\n delete reqOpts.interceptors_;\n const pkg = this.packageJson;\n let userAgent = util_1.util.getUserAgentFromPackageJson(pkg);\n if (this.providedUserAgent) {\n userAgent = `${this.providedUserAgent} ${userAgent}`;\n }\n reqOpts.headers = extend({}, reqOpts.headers, {\n 'User-Agent': userAgent,\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${pkg.version} gccl-invocation-id/${uuid.v4()}`,\n });\n if (reqOpts.shouldReturnStream) {\n return this.makeAuthenticatedRequest(reqOpts);\n }\n else {\n this.makeAuthenticatedRequest(reqOpts, callback);\n }\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n * @param {function} callback - The callback function passed to `request`.\n */\n request(reqOpts, callback) {\n Service.prototype.request_.call(this, reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return Service.prototype.request_.call(this, opts);\n }\n}\nexports.Service = Service;\n//# sourceMappingURL=service.js.map","\"use strict\";\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.Util = exports.PartialFailureError = exports.ApiError = void 0;\n/*!\n * @module common/util\n */\nconst projectify_1 = require(\"@google-cloud/projectify\");\nconst ent = require(\"ent\");\nconst extend = require(\"extend\");\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst retryRequest = require(\"retry-request\");\nconst stream_1 = require(\"stream\");\nconst teeny_request_1 = require(\"teeny-request\");\nconst uuid = require(\"uuid\");\nconst service_1 = require(\"./service\");\nconst packageJson = require('../../../package.json');\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst duplexify = require('duplexify');\nconst requestDefaults = {\n timeout: 60000,\n gzip: true,\n forever: true,\n pool: {\n maxSockets: Infinity,\n },\n};\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n * @private\n */\nconst AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n * @private\n */\nconst MAX_RETRY_DEFAULT = 3;\n/**\n * Custom error type for API errors.\n *\n * @param {object} errorBody - Error object.\n */\nclass ApiError extends Error {\n constructor(errorBodyOrMessage) {\n super();\n if (typeof errorBodyOrMessage !== 'object') {\n this.message = errorBodyOrMessage || '';\n return;\n }\n const errorBody = errorBodyOrMessage;\n this.code = errorBody.code;\n this.errors = errorBody.errors;\n this.response = errorBody.response;\n try {\n this.errors = JSON.parse(this.response.body).error.errors;\n }\n catch (e) {\n this.errors = errorBody.errors;\n }\n this.message = ApiError.createMultiErrorMessage(errorBody, this.errors);\n Error.captureStackTrace(this);\n }\n /**\n * Pieces together an error message by combining all unique error messages\n * returned from a single GoogleError\n *\n * @private\n *\n * @param {GoogleErrorBody} err The original error.\n * @param {GoogleInnerError[]} [errors] Inner errors, if any.\n * @returns {string}\n */\n static createMultiErrorMessage(err, errors) {\n const messages = new Set();\n if (err.message) {\n messages.add(err.message);\n }\n if (errors && errors.length) {\n errors.forEach(({ message }) => messages.add(message));\n }\n else if (err.response && err.response.body) {\n messages.add(ent.decode(err.response.body.toString()));\n }\n else if (!err.message) {\n messages.add('A failure occurred during this request.');\n }\n let messageArr = Array.from(messages);\n if (messageArr.length > 1) {\n messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`);\n messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\\n');\n messageArr.push('\\n');\n }\n return messageArr.join('\\n');\n }\n}\nexports.ApiError = ApiError;\n/**\n * Custom error type for partial errors returned from the API.\n *\n * @param {object} b - Error object.\n */\nclass PartialFailureError extends Error {\n constructor(b) {\n super();\n const errorObject = b;\n this.errors = errorObject.errors;\n this.name = 'PartialFailureError';\n this.response = errorObject.response;\n this.message = ApiError.createMultiErrorMessage(errorObject, this.errors);\n }\n}\nexports.PartialFailureError = PartialFailureError;\nclass Util {\n constructor() {\n this.ApiError = ApiError;\n this.PartialFailureError = PartialFailureError;\n }\n /**\n * No op.\n *\n * @example\n * function doSomething(callback) {\n * callback = callback || noop;\n * }\n */\n noop() { }\n /**\n * Uniformly process an API response.\n *\n * @param {*} err - Error value.\n * @param {*} resp - Response value.\n * @param {*} body - Body value.\n * @param {function} callback - The callback function.\n */\n handleResp(err, resp, body, callback) {\n callback = callback || util.noop;\n const parsedResp = extend(true, { err: err || null }, resp && util.parseHttpRespMessage(resp), body && util.parseHttpRespBody(body));\n // Assign the parsed body to resp.body, even if { json: false } was passed\n // as a request option.\n // We assume that nobody uses the previously unparsed value of resp.body.\n if (!parsedResp.err && resp && typeof parsedResp.body === 'object') {\n parsedResp.resp.body = parsedResp.body;\n }\n if (parsedResp.err && resp) {\n parsedResp.err.response = resp;\n }\n callback(parsedResp.err, parsedResp.body, parsedResp.resp);\n }\n /**\n * Sniff an incoming HTTP response message for errors.\n *\n * @param {object} httpRespMessage - An incoming HTTP response message from `request`.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.resp - The original response object.\n */\n parseHttpRespMessage(httpRespMessage) {\n const parsedHttpRespMessage = {\n resp: httpRespMessage,\n };\n if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) {\n // Unknown error. Format according to ApiError standard.\n parsedHttpRespMessage.err = new ApiError({\n errors: new Array(),\n code: httpRespMessage.statusCode,\n message: httpRespMessage.statusMessage,\n response: httpRespMessage,\n });\n }\n return parsedHttpRespMessage;\n }\n /**\n * Parse the response body from an HTTP request.\n *\n * @param {object} body - The response body.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.body - The original body value provided\n * will try to be JSON.parse'd. If it's successful, the parsed value will\n * be returned here, otherwise the original value and an error will be returned.\n */\n parseHttpRespBody(body) {\n const parsedHttpRespBody = {\n body,\n };\n if (typeof body === 'string') {\n try {\n parsedHttpRespBody.body = JSON.parse(body);\n }\n catch (err) {\n parsedHttpRespBody.body = body;\n }\n }\n if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) {\n // Error from JSON API.\n parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error);\n }\n return parsedHttpRespBody;\n }\n /**\n * Take a Duplexify stream, fetch an authenticated connection header, and\n * create an outgoing writable stream.\n *\n * @param {Duplexify} dup - Duplexify stream.\n * @param {object} options - Configuration object.\n * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through.\n * @param {object} options.metadata - Metadata to send at the head of the request.\n * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object.\n * @param {string=} options.request.method - Default: \"POST\".\n * @param {string=} options.request.qs.uploadType - Default: \"multipart\".\n * @param {string=} options.streamContentType - Default: \"application/octet-stream\".\n * @param {function} onComplete - Callback, executed after the writable Request stream has completed.\n */\n makeWritableStream(dup, options, onComplete) {\n onComplete = onComplete || util.noop;\n const writeStream = new ProgressStream();\n writeStream.on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(writeStream);\n const defaultReqOpts = {\n method: 'POST',\n qs: {\n uploadType: 'multipart',\n },\n timeout: 0,\n maxRetries: 0,\n };\n const metadata = options.metadata || {};\n const reqOpts = extend(true, defaultReqOpts, options.request, {\n multipart: [\n {\n 'Content-Type': 'application/json',\n body: JSON.stringify(metadata),\n },\n {\n 'Content-Type': metadata.contentType || 'application/octet-stream',\n body: writeStream,\n },\n ],\n });\n options.makeAuthenticatedRequest(reqOpts, {\n onAuthenticated(err, authenticatedReqOpts) {\n if (err) {\n dup.destroy(err);\n return;\n }\n requestDefaults.headers = util._getDefaultHeaders();\n const request = teeny_request_1.teenyRequest.defaults(requestDefaults);\n request(authenticatedReqOpts, (err, resp, body) => {\n util.handleResp(err, resp, body, (err, data) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n dup.emit('response', resp);\n onComplete(data);\n });\n });\n },\n });\n }\n /**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted. This is used for rate limit\n * related errors as well as intermittent server errors.\n *\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\n shouldRetryRequest(err) {\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = e.reason;\n if (reason === 'rateLimitExceeded') {\n return true;\n }\n if (reason === 'userRateLimitExceeded') {\n return true;\n }\n if (reason && reason.includes('EAI_AGAIN')) {\n return true;\n }\n }\n }\n }\n return false;\n }\n /**\n * Get a function for making authenticated requests.\n *\n * @param {object} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {object=} config.credentials - Credentials object.\n * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false.\n * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false.\n * @param {string=} config.email - Account email address, required for PEM/P12 usage.\n * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3)\n * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile.\n * @param {array} config.scopes - Array of scopes required for the API.\n */\n makeAuthenticatedRequestFactory(config) {\n const googleAutoAuthConfig = extend({}, config);\n if (googleAutoAuthConfig.projectId === service_1.DEFAULT_PROJECT_ID_TOKEN) {\n delete googleAutoAuthConfig.projectId;\n }\n let authClient;\n if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) {\n // Use an existing `GoogleAuth`\n authClient = googleAutoAuthConfig.authClient;\n }\n else {\n // Pass an `AuthClient` to `GoogleAuth`, if available\n const config = {\n ...googleAutoAuthConfig,\n authClient: googleAutoAuthConfig.authClient,\n };\n authClient = new google_auth_library_1.GoogleAuth(config);\n }\n function makeAuthenticatedRequest(reqOpts, optionsOrCallback) {\n let stream;\n let projectId;\n const reqConfig = extend({}, config);\n let activeRequest_;\n if (!optionsOrCallback) {\n stream = duplexify();\n reqConfig.stream = stream;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined;\n async function setProjectId() {\n projectId = await authClient.getProjectId();\n }\n const onAuthenticated = async (err, authenticatedReqOpts) => {\n const authLibraryError = err;\n const autoAuthFailed = err &&\n err.message.indexOf('Could not load the default credentials') > -1;\n if (autoAuthFailed) {\n // Even though authentication failed, the API might not actually\n // care.\n authenticatedReqOpts = reqOpts;\n }\n if (!err || autoAuthFailed) {\n try {\n // Try with existing `projectId` value\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n if (e instanceof projectify_1.MissingProjectIdError) {\n // A `projectId` was required, but we don't have one.\n try {\n // Attempt to get the `projectId`\n await setProjectId();\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n // Re-use the \"Could not load the default credentials error\" if\n // auto auth failed.\n err = err || e;\n }\n }\n else {\n // Some other error unrelated to missing `projectId`\n err = err || e;\n }\n }\n }\n if (err) {\n if (stream) {\n stream.destroy(err);\n }\n else {\n const fn = options && options.onAuthenticated\n ? options.onAuthenticated\n : callback;\n fn(err);\n }\n return;\n }\n if (options && options.onAuthenticated) {\n options.onAuthenticated(null, authenticatedReqOpts);\n }\n else {\n activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => {\n if (apiResponseError &&\n apiResponseError.code === 401 &&\n authLibraryError) {\n // Re-use the \"Could not load the default credentials error\" if\n // the API request failed due to missing credentials.\n apiResponseError = authLibraryError;\n }\n callback(apiResponseError, ...params);\n });\n }\n };\n const prepareRequest = async () => {\n try {\n const getProjectId = async () => {\n if (config.projectId &&\n config.projectId !== service_1.DEFAULT_PROJECT_ID_TOKEN) {\n // The user provided a project ID. We don't need to check with the\n // auth client, it could be incorrect.\n return config.projectId;\n }\n if (config.projectIdRequired === false) {\n // A projectId is not required. Return the default.\n return service_1.DEFAULT_PROJECT_ID_TOKEN;\n }\n return setProjectId();\n };\n const authorizeRequest = async () => {\n if (reqConfig.customEndpoint &&\n !reqConfig.useAuthWithCustomEndpoint) {\n // Using a custom API override. Do not use `google-auth-library` for\n // authentication. (ex: connecting to a local Datastore server)\n return reqOpts;\n }\n else {\n return authClient.authorizeRequest(reqOpts);\n }\n };\n const [_projectId, authorizedReqOpts] = await Promise.all([\n getProjectId(),\n authorizeRequest(),\n ]);\n if (_projectId) {\n projectId = _projectId;\n }\n return onAuthenticated(null, authorizedReqOpts);\n }\n catch (e) {\n return onAuthenticated(e);\n }\n };\n prepareRequest();\n if (stream) {\n return stream;\n }\n return {\n abort() {\n setImmediate(() => {\n if (activeRequest_) {\n activeRequest_.abort();\n activeRequest_ = null;\n }\n });\n },\n };\n }\n const mar = makeAuthenticatedRequest;\n mar.getCredentials = authClient.getCredentials.bind(authClient);\n mar.authClient = authClient;\n return mar;\n }\n /**\n * Make a request through the `retryRequest` module with built-in error\n * handling and exponential back off.\n *\n * @param {object} reqOpts - Request options in the format `request` expects.\n * @param {object=} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {number=} config.maxRetries - Maximum number of automatic retries\n * attempted before returning the error. (default: 3)\n * @param {object=} config.request - HTTP module for request calls.\n * @param {function} callback - The callback function.\n */\n makeRequest(reqOpts, config, callback) {\n var _a, _b, _c, _d, _e;\n let autoRetryValue = AUTO_RETRY_DEFAULT;\n if (config.autoRetry !== undefined) {\n autoRetryValue = config.autoRetry;\n }\n else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) {\n autoRetryValue = config.retryOptions.autoRetry;\n }\n let maxRetryValue = MAX_RETRY_DEFAULT;\n if (config.maxRetries !== undefined) {\n maxRetryValue = config.maxRetries;\n }\n else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) {\n maxRetryValue = config.retryOptions.maxRetries;\n }\n requestDefaults.headers = this._getDefaultHeaders();\n const options = {\n request: teeny_request_1.teenyRequest.defaults(requestDefaults),\n retries: autoRetryValue !== false ? maxRetryValue : 0,\n noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0,\n shouldRetryFn(httpRespMessage) {\n var _a, _b;\n const err = util.parseHttpRespMessage(httpRespMessage).err;\n if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) {\n return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err));\n }\n return err && util.shouldRetryRequest(err);\n },\n maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay,\n retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier,\n totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout,\n };\n if (typeof reqOpts.maxRetries === 'number') {\n options.retries = reqOpts.maxRetries;\n options.noResponseRetries = reqOpts.maxRetries;\n }\n if (!config.stream) {\n return retryRequest(reqOpts, options, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (err, response, body) => {\n util.handleResp(err, response, body, callback);\n });\n }\n const dup = config.stream;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let requestStream;\n const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET';\n if (isGetRequest) {\n requestStream = retryRequest(reqOpts, options);\n dup.setReadable(requestStream);\n }\n else {\n // Streaming writable HTTP requests cannot be retried.\n requestStream = options.request(reqOpts);\n dup.setWritable(requestStream);\n }\n // Replay the Request events back to the stream.\n requestStream\n .on('error', dup.destroy.bind(dup))\n .on('response', dup.emit.bind(dup, 'response'))\n .on('complete', dup.emit.bind(dup, 'complete'));\n dup.abort = requestStream.abort;\n return dup;\n }\n /**\n * Decorate the options about to be made in a request.\n *\n * @param {object} reqOpts - The options to be passed to `request`.\n * @param {string} projectId - The project ID.\n * @return {object} reqOpts - The decorated reqOpts.\n */\n decorateRequest(reqOpts, projectId) {\n delete reqOpts.autoPaginate;\n delete reqOpts.autoPaginateVal;\n delete reqOpts.objectMode;\n if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') {\n delete reqOpts.qs.autoPaginate;\n delete reqOpts.qs.autoPaginateVal;\n reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId);\n }\n if (Array.isArray(reqOpts.multipart)) {\n reqOpts.multipart = reqOpts.multipart.map(part => {\n return (0, projectify_1.replaceProjectIdToken)(part, projectId);\n });\n }\n if (reqOpts.json !== null && typeof reqOpts.json === 'object') {\n delete reqOpts.json.autoPaginate;\n delete reqOpts.json.autoPaginateVal;\n reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId);\n }\n reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId);\n return reqOpts;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n isCustomType(unknown, module) {\n function getConstructorName(obj) {\n return obj.constructor && obj.constructor.name.toLowerCase();\n }\n const moduleNameParts = module.split('/');\n const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase();\n const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase();\n if (subModuleName && getConstructorName(unknown) !== subModuleName) {\n return false;\n }\n let walkingModule = unknown;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n if (getConstructorName(walkingModule) === parentModuleName) {\n return true;\n }\n walkingModule = walkingModule.parent;\n if (!walkingModule) {\n return false;\n }\n }\n }\n /**\n * Create a properly-formatted User-Agent string from a package.json file.\n *\n * @param {object} packageJson - A module's package.json file.\n * @return {string} userAgent - The formatted User-Agent string.\n */\n getUserAgentFromPackageJson(packageJson) {\n const hyphenatedPackageName = packageJson.name\n .replace('@google-cloud', 'gcloud-node') // For legacy purposes.\n .replace('/', '-'); // For UA spec-compliance purposes.\n return hyphenatedPackageName + '/' + packageJson.version;\n }\n /**\n * Given two parameters, figure out if this is either:\n * - Just a callback function\n * - An options object, and then a callback function\n * @param optionsOrCallback An options object or callback.\n * @param cb A potentially undefined callback.\n */\n maybeOptionsOrCallback(optionsOrCallback, cb) {\n return typeof optionsOrCallback === 'function'\n ? [{}, optionsOrCallback]\n : [optionsOrCallback, cb];\n }\n _getDefaultHeaders() {\n return {\n 'User-Agent': util.getUserAgentFromPackageJson(packageJson),\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${uuid.v4()}`,\n };\n }\n}\nexports.Util = Util;\n/**\n * Basic Passthrough Stream that records the number of bytes read\n * every time the cursor is moved.\n */\nclass ProgressStream extends stream_1.Transform {\n constructor() {\n super(...arguments);\n this.bytesRead = 0;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _transform(chunk, encoding, callback) {\n this.bytesRead += chunk.length;\n this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' });\n this.push(chunk);\n callback();\n }\n}\nconst util = new Util();\nexports.util = util;\n//# sourceMappingURL=util.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Notification = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * The API-formatted resource description of the notification.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name Notification#metadata\n * @type {object}\n */\n/**\n * A Notification object is created from your {@link Bucket} object using\n * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub\n * notifications.\n *\n * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage}\n *\n * @class\n * @hideconstructor\n *\n * @param {Bucket} bucket The bucket instance this notification is attached to.\n * @param {string} id The ID of the notification.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const notification = myBucket.notification('1');\n * ```\n */\nclass Notification extends nodejs_common_1.ServiceObject {\n constructor(bucket, id) {\n const methods = {\n /**\n * Creates a notification subscription for the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert}\n * @method Notification#create\n *\n * @param {Topic|string} topic The Cloud PubSub topic to which this\n * subscription publishes. If the project ID is omitted, the current\n * project ID will be used.\n *\n * Acceptable formats are:\n * - `projects/grape-spaceship-123/topics/my-topic`\n *\n * - `my-topic`\n * @param {CreateNotificationRequest} [options] Metadata to set for\n * the notification.\n * @param {CreateNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a valid topic is not provided.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.create(function(err, notification, apiResponse) {\n * if (!err) {\n * // The notification was created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.create().then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n create: true,\n /**\n * @typedef {array} NotificationExistsResponse\n * @property {boolean} 0 Whether the notification exists or not.\n */\n /**\n * @callback NotificationExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the notification exists or not.\n */\n /**\n * Check if the notification exists.\n *\n * @method Notification#exists\n * @param {NotificationExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: true,\n };\n super({\n parent: bucket,\n baseUrl: '/notificationConfigs',\n id: id.toString(),\n createMethod: bucket.createNotification.bind(bucket),\n methods,\n });\n }\n /**\n * @typedef {array} DeleteNotificationResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Permanently deletes a notification subscription.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation}\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/deleteNotification.js\n * region_tag:storage_delete_bucket_notification\n * Another example:\n */\n delete(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.request({\n method: 'DELETE',\n uri: '',\n qs: options,\n }, callback || nodejs_common_1.util.noop);\n }\n /**\n * Get a notification and its metadata if it exists.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation}\n *\n * @param {object} [options] Configuration options.\n * See {@link Bucket#createNotification} for create options.\n * @param {boolean} [options.autoCreate] Automatically create the object if\n * it does not exist. Default: `false`.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationCallback} [callback] Callback function.\n * @return {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.get(function(err, notification, apiResponse) {\n * // `notification.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.get().then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const autoCreate = options.autoCreate;\n delete options.autoCreate;\n const onCreate = (err, notification, apiResponse) => {\n if (err) {\n if (err.code === 409) {\n this.get(options, callback);\n return;\n }\n callback(err, null, apiResponse);\n return;\n }\n callback(null, notification, apiResponse);\n };\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n if (err.code === 404 && autoCreate) {\n const args = [];\n if (Object.keys(options).length > 0) {\n args.push(options);\n }\n args.push(onCreate);\n // eslint-disable-next-line\n this.create.apply(this, args);\n return;\n }\n callback(err, null, metadata);\n return;\n }\n callback(null, this, metadata);\n });\n }\n /**\n * Get the notification's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation}\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/getMetadataNotifications.js\n * region_tag:storage_print_pubsub_bucket_notification\n * Another example:\n */\n getMetadata(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.request({\n uri: '',\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n this.metadata = resp;\n callback(null, this.metadata, resp);\n });\n }\n}\nexports.Notification = Notification;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Notification);\n//# sourceMappingURL=notification.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0;\nconst abort_controller_1 = require(\"abort-controller\");\nconst crypto_1 = require(\"crypto\");\nconst extend = require(\"extend\");\nconst gaxios = require(\"gaxios\");\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst stream_1 = require(\"stream\");\nconst retry = require(\"async-retry\");\nconst uuid = require(\"uuid\");\nconst NOT_FOUND_STATUS_CODE = 404;\nconst RESUMABLE_INCOMPLETE_STATUS_CODE = 308;\nconst DEFAULT_API_ENDPOINT_REGEX = /.*\\.googleapis\\.com/;\nconst packageJson = require('../../package.json');\nexports.PROTOCOL_REGEX = /^(\\w*):\\/\\//;\nclass Upload extends stream_1.Writable {\n constructor(cfg) {\n super();\n this.numBytesWritten = 0;\n this.numRetries = 0;\n this.currentInvocationId = {\n chunk: uuid.v4(),\n uri: uuid.v4(),\n offset: uuid.v4(),\n };\n this.upstreamChunkBuffer = Buffer.alloc(0);\n this.chunkBufferEncoding = undefined;\n this.numChunksReadInRequest = 0;\n /**\n * A chunk used for caching the most recent upload chunk.\n * We should not assume that the server received all bytes sent in the request.\n * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n */\n this.lastChunkSent = Buffer.alloc(0);\n this.upstreamEnded = false;\n cfg = cfg || {};\n if (!cfg.bucket || !cfg.file) {\n throw new Error('A bucket and file name are required');\n }\n cfg.authConfig = cfg.authConfig || {};\n cfg.authConfig.scopes = [\n 'https://www.googleapis.com/auth/devstorage.full_control',\n ];\n this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig);\n this.apiEndpoint = 'https://storage.googleapis.com';\n if (cfg.apiEndpoint) {\n this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint);\n if (!DEFAULT_API_ENDPOINT_REGEX.test(cfg.apiEndpoint)) {\n this.authClient = gaxios;\n }\n }\n this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`;\n this.bucket = cfg.bucket;\n const cacheKeyElements = [cfg.bucket, cfg.file];\n if (typeof cfg.generation === 'number') {\n cacheKeyElements.push(`${cfg.generation}`);\n }\n this.cacheKey = cacheKeyElements.join('/');\n this.customRequestOptions = cfg.customRequestOptions || {};\n this.file = cfg.file;\n this.generation = cfg.generation;\n this.kmsKeyName = cfg.kmsKeyName;\n this.metadata = cfg.metadata || {};\n this.offset = cfg.offset;\n this.origin = cfg.origin;\n this.params = cfg.params || {};\n this.userProject = cfg.userProject;\n this.chunkSize = cfg.chunkSize;\n this.retryOptions = cfg.retryOptions;\n if (cfg.key) {\n const base64Key = Buffer.from(cfg.key).toString('base64');\n this.encryption = {\n key: base64Key,\n hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'),\n };\n }\n this.predefinedAcl = cfg.predefinedAcl;\n if (cfg.private)\n this.predefinedAcl = 'private';\n if (cfg.public)\n this.predefinedAcl = 'publicRead';\n const autoRetry = cfg.retryOptions.autoRetry;\n this.uriProvidedManually = !!cfg.uri;\n this.uri = cfg.uri;\n this.numBytesWritten = 0;\n this.numRetries = 0; // counter for number of retries currently executed\n if (!autoRetry) {\n cfg.retryOptions.maxRetries = 0;\n }\n this.timeOfFirstRequest = Date.now();\n const contentLength = cfg.metadata\n ? Number(cfg.metadata.contentLength)\n : NaN;\n this.contentLength = isNaN(contentLength) ? '*' : contentLength;\n this.once('writing', () => {\n if (this.uri) {\n this.continueUploading();\n }\n else {\n this.createURI(err => {\n if (err) {\n return this.destroy(err);\n }\n this.startUploading();\n return;\n });\n }\n });\n }\n /**\n * Prevent 'finish' event until the upload has succeeded.\n *\n * @param fireFinishEvent The finish callback\n */\n _final(fireFinishEvent = () => { }) {\n this.upstreamEnded = true;\n this.once('uploadFinished', fireFinishEvent);\n process.nextTick(() => {\n this.emit('upstreamFinished');\n // it's possible `_write` may not be called - namely for empty object uploads\n this.emit('writing');\n });\n }\n /**\n * Handles incoming data from upstream\n *\n * @param chunk The chunk to append to the buffer\n * @param encoding The encoding of the chunk\n * @param readCallback A callback for when the buffer has been read downstream\n */\n _write(chunk, encoding, readCallback = () => { }) {\n // Backwards-compatible event\n this.emit('writing');\n this.upstreamChunkBuffer = Buffer.concat([\n this.upstreamChunkBuffer,\n typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk,\n ]);\n this.chunkBufferEncoding = encoding;\n this.once('readFromChunkBuffer', readCallback);\n process.nextTick(() => this.emit('wroteToChunkBuffer'));\n }\n /**\n * Prepends data back to the upstream chunk buffer.\n *\n * @param chunk The data to prepend\n */\n unshiftChunkBuffer(chunk) {\n this.upstreamChunkBuffer = Buffer.concat([chunk, this.upstreamChunkBuffer]);\n }\n /**\n * Retrieves data from upstream's buffer.\n *\n * @param limit The maximum amount to return from the buffer.\n * @returns The data requested.\n */\n pullFromChunkBuffer(limit) {\n const chunk = this.upstreamChunkBuffer.slice(0, limit);\n this.upstreamChunkBuffer = this.upstreamChunkBuffer.slice(limit);\n // notify upstream we've read from the buffer so it can potentially\n // send more data down.\n process.nextTick(() => this.emit('readFromChunkBuffer'));\n return chunk;\n }\n /**\n * A handler for determining if data is ready to be read from upstream.\n *\n * @returns If there will be more chunks to read in the future\n */\n async waitForNextChunk() {\n const willBeMoreChunks = await new Promise(resolve => {\n // There's data available - it should be digested\n if (this.upstreamChunkBuffer.byteLength) {\n return resolve(true);\n }\n // The upstream writable ended, we shouldn't expect any more data.\n if (this.upstreamEnded) {\n return resolve(false);\n }\n // Nothing immediate seems to be determined. We need to prepare some\n // listeners to determine next steps...\n const wroteToChunkBufferCallback = () => {\n removeListeners();\n return resolve(true);\n };\n const upstreamFinishedCallback = () => {\n removeListeners();\n // this should be the last chunk, if there's anything there\n if (this.upstreamChunkBuffer.length)\n return resolve(true);\n return resolve(false);\n };\n // Remove listeners when we're ready to callback.\n const removeListeners = () => {\n this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback);\n this.removeListener('upstreamFinished', upstreamFinishedCallback);\n };\n // If there's data recently written it should be digested\n this.once('wroteToChunkBuffer', wroteToChunkBufferCallback);\n // If the upstream finishes let's see if there's anything to grab\n this.once('upstreamFinished', upstreamFinishedCallback);\n });\n return willBeMoreChunks;\n }\n /**\n * Reads data from upstream up to the provided `limit`.\n * Ends when the limit has reached or no data is expected to be pushed from upstream.\n *\n * @param limit The most amount of data this iterator should return. `Infinity` by default.\n * @param oneChunkMode Determines if one, exhaustive chunk is yielded for the iterator\n */\n async *upstreamIterator(limit = Infinity, oneChunkMode) {\n let completeChunk = Buffer.alloc(0);\n // read from upstream chunk buffer\n while (limit && (await this.waitForNextChunk())) {\n // read until end or limit has been reached\n const chunk = this.pullFromChunkBuffer(limit);\n limit -= chunk.byteLength;\n if (oneChunkMode) {\n // return 1 chunk at the end of iteration\n completeChunk = Buffer.concat([completeChunk, chunk]);\n }\n else {\n // return many chunks throughout iteration\n yield {\n chunk,\n encoding: this.chunkBufferEncoding,\n };\n }\n }\n if (oneChunkMode) {\n yield {\n chunk: completeChunk,\n encoding: this.chunkBufferEncoding,\n };\n }\n }\n createURI(callback) {\n if (!callback) {\n return this.createURIAsync();\n }\n this.createURIAsync().then(r => callback(null, r), callback);\n }\n async createURIAsync() {\n const metadata = { ...this.metadata };\n const headers = {};\n // Delete content length and content type from metadata if they exist.\n // These are headers and should not be sent as part of the metadata.\n if (metadata.contentLength) {\n headers['X-Upload-Content-Length'] = metadata.contentLength.toString();\n delete metadata.contentLength;\n }\n if (metadata.contentType) {\n headers['X-Upload-Content-Type'] = metadata.contentType;\n delete metadata.contentType;\n }\n // Check if headers already exist before creating new ones\n const reqOpts = {\n method: 'POST',\n url: [this.baseURI, this.bucket, 'o'].join('/'),\n params: Object.assign({\n name: this.file,\n uploadType: 'resumable',\n }, this.params),\n data: metadata,\n headers: {\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.uri}`,\n ...headers,\n },\n };\n if (metadata.contentLength) {\n reqOpts.headers['X-Upload-Content-Length'] =\n metadata.contentLength.toString();\n }\n if (metadata.contentType) {\n reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType;\n }\n if (typeof this.generation !== 'undefined') {\n reqOpts.params.ifGenerationMatch = this.generation;\n }\n if (this.kmsKeyName) {\n reqOpts.params.kmsKeyName = this.kmsKeyName;\n }\n if (this.predefinedAcl) {\n reqOpts.params.predefinedAcl = this.predefinedAcl;\n }\n if (this.origin) {\n reqOpts.headers.Origin = this.origin;\n }\n const uri = await retry(async (bail) => {\n var _a, _b, _c;\n try {\n const res = await this.makeRequest(reqOpts);\n // We have successfully got a URI we can now create a new invocation id\n this.currentInvocationId.uri = uuid.v4();\n return res.headers.location;\n }\n catch (err) {\n const e = err;\n const apiError = {\n code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status,\n name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText,\n message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText,\n errors: [\n {\n reason: e.code,\n },\n ],\n };\n if (this.retryOptions.maxRetries > 0 &&\n this.retryOptions.retryableErrorFn(apiError)) {\n throw e;\n }\n else {\n return bail(e);\n }\n }\n }, {\n retries: this.retryOptions.maxRetries,\n factor: this.retryOptions.retryDelayMultiplier,\n maxTimeout: this.retryOptions.maxRetryDelay * 1000,\n maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n this.uri = uri;\n this.offset = 0;\n return uri;\n }\n async continueUploading() {\n if (typeof this.offset === 'number') {\n this.startUploading();\n return;\n }\n await this.getAndSetOffset();\n this.startUploading();\n }\n async startUploading() {\n const multiChunkMode = !!this.chunkSize;\n let responseReceived = false;\n this.numChunksReadInRequest = 0;\n if (!this.offset) {\n this.offset = 0;\n }\n // Check if the offset (server) is too far behind the current stream\n if (this.offset < this.numBytesWritten) {\n const delta = this.numBytesWritten - this.offset;\n const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`;\n this.emit('error', new RangeError(message));\n return;\n }\n // Check if we should 'fast-forward' to the relevant data to upload\n if (this.numBytesWritten < this.offset) {\n // 'fast-forward' to the byte where we need to upload.\n // only push data from the byte after the one we left off on\n const fastForwardBytes = this.offset - this.numBytesWritten;\n for await (const _chunk of this.upstreamIterator(fastForwardBytes)) {\n _chunk; // discard the data up until the point we want\n }\n this.numBytesWritten = this.offset;\n }\n let expectedUploadSize = undefined;\n // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available\n if (typeof this.contentLength === 'number') {\n expectedUploadSize = this.contentLength - this.numBytesWritten;\n }\n // `expectedUploadSize` should be no more than the `chunkSize`.\n // It's possible this is the last chunk request for a multiple\n // chunk upload, thus smaller than the chunk size.\n if (this.chunkSize) {\n expectedUploadSize = expectedUploadSize\n ? Math.min(this.chunkSize, expectedUploadSize)\n : this.chunkSize;\n }\n // A queue for the upstream data\n const upstreamQueue = this.upstreamIterator(expectedUploadSize, multiChunkMode // multi-chunk mode should return 1 chunk per request\n );\n // The primary read stream for this request. This stream retrieves no more\n // than the exact requested amount from upstream.\n const requestStream = new stream_1.Readable({\n read: async () => {\n // Don't attempt to retrieve data upstream if we already have a response\n if (responseReceived)\n requestStream.push(null);\n const result = await upstreamQueue.next();\n if (result.value) {\n this.numChunksReadInRequest++;\n this.lastChunkSent = result.value.chunk;\n this.numBytesWritten += result.value.chunk.byteLength;\n this.emit('progress', {\n bytesWritten: this.numBytesWritten,\n contentLength: this.contentLength,\n });\n requestStream.push(result.value.chunk, result.value.encoding);\n }\n if (result.done) {\n requestStream.push(null);\n }\n },\n });\n const headers = {\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.chunk}`,\n };\n // If using multiple chunk upload, set appropriate header\n if (multiChunkMode) {\n // We need to know how much data is available upstream to set the `Content-Range` header.\n const oneChunkIterator = this.upstreamIterator(expectedUploadSize, true);\n const { value } = await oneChunkIterator.next();\n const bytesToUpload = value.chunk.byteLength;\n // Important: we want to know if the upstream has ended and the queue is empty before\n // unshifting data back into the queue. This way we will know if this is the last request or not.\n const isLastChunkOfUpload = !(await this.waitForNextChunk());\n // Important: put the data back in the queue for the actual upload iterator\n this.unshiftChunkBuffer(value.chunk);\n let totalObjectSize = this.contentLength;\n if (typeof this.contentLength !== 'number' && isLastChunkOfUpload) {\n // Let's let the server know this is the last chunk since\n // we didn't know the content-length beforehand.\n totalObjectSize = bytesToUpload + this.numBytesWritten;\n }\n // `- 1` as the ending byte is inclusive in the request.\n const endingByte = bytesToUpload + this.numBytesWritten - 1;\n // `Content-Length` for multiple chunk uploads is the size of the chunk,\n // not the overall object\n headers['Content-Length'] = bytesToUpload;\n headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`;\n }\n else {\n headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`;\n }\n const reqOpts = {\n method: 'PUT',\n url: this.uri,\n headers,\n body: requestStream,\n };\n try {\n const resp = await this.makeRequestStream(reqOpts);\n if (resp) {\n responseReceived = true;\n this.responseHandler(resp);\n }\n }\n catch (e) {\n const err = e;\n if (this.retryOptions.retryableErrorFn(err)) {\n this.attemptDelayedRetry({\n status: NaN,\n data: err,\n });\n return;\n }\n this.destroy(err);\n }\n }\n // Process the API response to look for errors that came in\n // the response body.\n responseHandler(resp) {\n if (resp.data.error) {\n this.destroy(resp.data.error);\n return;\n }\n // At this point we can safely create a new id for the chunk\n this.currentInvocationId.chunk = uuid.v4();\n const shouldContinueWithNextMultiChunkRequest = this.chunkSize &&\n resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE &&\n resp.headers.range;\n if (shouldContinueWithNextMultiChunkRequest) {\n // Use the upper value in this header to determine where to start the next chunk.\n // We should not assume that the server received all bytes sent in the request.\n // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n const range = resp.headers.range;\n this.offset = Number(range.split('-')[1]) + 1;\n // We should not assume that the server received all bytes sent in the request.\n // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n const missingBytes = this.numBytesWritten - this.offset;\n if (missingBytes) {\n const dataToPrependForResending = this.lastChunkSent.slice(-missingBytes);\n // As multi-chunk uploads send one chunk per request and pulls one\n // chunk into the pipeline, prepending the missing bytes back should\n // be fine for the next request.\n this.unshiftChunkBuffer(dataToPrependForResending);\n this.numBytesWritten -= missingBytes;\n this.lastChunkSent = Buffer.alloc(0);\n }\n // continue uploading next chunk\n this.continueUploading();\n }\n else if (!this.isSuccessfulResponse(resp.status)) {\n const err = new Error('Upload failed');\n err.code = resp.status;\n err.name = 'Upload failed';\n if (resp === null || resp === void 0 ? void 0 : resp.data) {\n err.errors = [resp === null || resp === void 0 ? void 0 : resp.data];\n }\n this.destroy(err);\n }\n else {\n // remove the last chunk sent to free memory\n this.lastChunkSent = Buffer.alloc(0);\n if (resp && resp.data) {\n resp.data.size = Number(resp.data.size);\n }\n this.emit('metadata', resp.data);\n // Allow the object (Upload) to continue naturally so the user's\n // \"finish\" event fires.\n this.emit('uploadFinished');\n }\n }\n async getAndSetOffset() {\n const opts = {\n method: 'PUT',\n url: this.uri,\n headers: {\n 'Content-Length': 0,\n 'Content-Range': 'bytes */*',\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.offset}`,\n },\n };\n try {\n const resp = await this.makeRequest(opts);\n // Successfully got the offset we can now create a new offset invocation id\n this.currentInvocationId.offset = uuid.v4();\n if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) {\n if (resp.headers.range) {\n const range = resp.headers.range;\n this.offset = Number(range.split('-')[1]) + 1;\n return;\n }\n }\n this.offset = 0;\n }\n catch (e) {\n const err = e;\n if (this.retryOptions.retryableErrorFn(err)) {\n this.attemptDelayedRetry({\n status: NaN,\n data: err,\n });\n return;\n }\n this.destroy(err);\n }\n }\n async makeRequest(reqOpts) {\n if (this.encryption) {\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256';\n reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString();\n reqOpts.headers['x-goog-encryption-key-sha256'] =\n this.encryption.hash.toString();\n }\n if (this.userProject) {\n reqOpts.params = reqOpts.params || {};\n reqOpts.params.userProject = this.userProject;\n }\n // Let gaxios know we will handle a 308 error code ourselves.\n reqOpts.validateStatus = (status) => {\n return (this.isSuccessfulResponse(status) ||\n status === RESUMABLE_INCOMPLETE_STATUS_CODE);\n };\n const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts);\n const res = await this.authClient.request(combinedReqOpts);\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n return res;\n }\n async makeRequestStream(reqOpts) {\n const controller = new abort_controller_1.default();\n const errorCallback = () => controller.abort();\n this.once('error', errorCallback);\n if (this.userProject) {\n reqOpts.params = reqOpts.params || {};\n reqOpts.params.userProject = this.userProject;\n }\n reqOpts.signal = controller.signal;\n reqOpts.validateStatus = () => true;\n const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts);\n const res = await this.authClient.request(combinedReqOpts);\n const successfulRequest = this.onResponse(res);\n this.removeListener('error', errorCallback);\n return successfulRequest ? res : null;\n }\n /**\n * @return {bool} is the request good?\n */\n onResponse(resp) {\n if (resp.status !== 200 &&\n this.retryOptions.retryableErrorFn({\n code: resp.status,\n message: resp.statusText,\n name: resp.statusText,\n })) {\n this.attemptDelayedRetry(resp);\n return false;\n }\n this.emit('response', resp);\n return true;\n }\n /**\n * @param resp GaxiosResponse object from previous attempt\n */\n attemptDelayedRetry(resp) {\n if (this.numRetries < this.retryOptions.maxRetries) {\n if (resp.status === NOT_FOUND_STATUS_CODE &&\n this.numChunksReadInRequest === 0) {\n this.startUploading();\n }\n else {\n const retryDelay = this.getRetryDelay();\n if (retryDelay <= 0) {\n this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`));\n return;\n }\n // Unshift the most recent chunk back in case it's needed for the next\n // request.\n this.numBytesWritten -= this.lastChunkSent.byteLength;\n this.unshiftChunkBuffer(this.lastChunkSent);\n this.lastChunkSent = Buffer.alloc(0);\n // We don't know how much data has been received by the server.\n // `continueUploading` will recheck the offset via `getAndSetOffset`.\n // If `offset` < `numberBytesReceived` then we will raise a RangeError\n // as we've streamed too much data that has been missed - this should\n // not be the case for multi-chunk uploads as `lastChunkSent` is the\n // body of the entire request.\n this.offset = undefined;\n setTimeout(this.continueUploading.bind(this), retryDelay);\n }\n this.numRetries++;\n }\n else {\n this.destroy(new Error('Retry limit exceeded - ' + resp.data));\n }\n }\n /**\n * @returns {number} the amount of time to wait before retrying the request\n */\n getRetryDelay() {\n const randomMs = Math.round(Math.random() * 1000);\n const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) *\n 1000 +\n randomMs;\n const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 -\n (Date.now() - this.timeOfFirstRequest);\n const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000;\n return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs);\n }\n /*\n * Prepare user-defined API endpoint for compatibility with our API.\n */\n sanitizeEndpoint(url) {\n if (!exports.PROTOCOL_REGEX.test(url)) {\n url = `https://${url}`;\n }\n return url.replace(/\\/+$/, ''); // Remove trailing slashes\n }\n /**\n * Check if a given status code is 2xx\n *\n * @param status The status code to check\n * @returns if the status is 2xx\n */\n isSuccessfulResponse(status) {\n return status >= 200 && status < 300;\n }\n}\nexports.Upload = Upload;\nfunction upload(cfg) {\n return new Upload(cfg);\n}\nexports.upload = upload;\nfunction createURI(cfg, callback) {\n const up = new Upload(cfg);\n if (!callback) {\n return up.createURI();\n }\n up.createURI().then(r => callback(null, r), callback);\n}\nexports.createURI = createURI;\n//# sourceMappingURL=resumable-upload.js.map","\"use strict\";\n// Copyright 2020 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0;\nconst crypto = require(\"crypto\");\nconst url = require(\"url\");\nconst storage_1 = require(\"./storage\");\nconst util_1 = require(\"./util\");\nvar SignerExceptionMessages;\n(function (SignerExceptionMessages) {\n SignerExceptionMessages[\"ACCESSIBLE_DATE_INVALID\"] = \"The accessible at date provided was invalid.\";\n SignerExceptionMessages[\"EXPIRATION_BEFORE_ACCESSIBLE_DATE\"] = \"An expiration date cannot be before accessible date.\";\n SignerExceptionMessages[\"X_GOOG_CONTENT_SHA256\"] = \"The header X-Goog-Content-SHA256 must be a hexadecimal string.\";\n})(SignerExceptionMessages = exports.SignerExceptionMessages || (exports.SignerExceptionMessages = {}));\n/*\n * Default signing version for getSignedUrl is 'v2'.\n */\nconst DEFAULT_SIGNING_VERSION = 'v2';\nconst SEVEN_DAYS = 7 * 24 * 60 * 60;\n/**\n * @const {string}\n * @private\n */\nexports.PATH_STYLED_HOST = 'https://storage.googleapis.com';\nclass URLSigner {\n constructor(authClient, bucket, file) {\n this.bucket = bucket;\n this.file = file;\n this.authClient = authClient;\n }\n getSignedUrl(cfg) {\n const expiresInSeconds = this.parseExpires(cfg.expires);\n const method = cfg.method;\n const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt);\n if (expiresInSeconds < accessibleAtInSeconds) {\n throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE);\n }\n let customHost;\n // Default style is `path`.\n const isVirtualHostedStyle = cfg.virtualHostedStyle || false;\n if (cfg.cname) {\n customHost = cfg.cname;\n }\n else if (isVirtualHostedStyle) {\n customHost = `https://${this.bucket.name}.storage.googleapis.com`;\n }\n const secondsToMilliseconds = 1000;\n const config = Object.assign({}, cfg, {\n method,\n expiration: expiresInSeconds,\n accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds),\n bucket: this.bucket.name,\n file: this.file ? (0, util_1.encodeURI)(this.file.name, false) : undefined,\n });\n if (customHost) {\n config.cname = customHost;\n }\n const version = cfg.version || DEFAULT_SIGNING_VERSION;\n let promise;\n if (version === 'v2') {\n promise = this.getSignedUrlV2(config);\n }\n else if (version === 'v4') {\n promise = this.getSignedUrlV4(config);\n }\n else {\n throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`);\n }\n return promise.then(query => {\n query = Object.assign(query, cfg.queryParams);\n const signedUrl = new url.URL(config.cname || exports.PATH_STYLED_HOST);\n signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file);\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n signedUrl.search = (0, util_1.qsStringify)(query);\n return signedUrl.href;\n });\n }\n getSignedUrlV2(config) {\n const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {});\n const resourcePath = this.getResourcePath(false, config.bucket, config.file);\n const blobToSign = [\n config.method,\n config.contentMd5 || '',\n config.contentType || '',\n config.expiration,\n canonicalHeadersString + resourcePath,\n ].join('\\n');\n const sign = async () => {\n const authClient = this.authClient;\n try {\n const signature = await authClient.sign(blobToSign);\n const credentials = await authClient.getCredentials();\n return {\n GoogleAccessId: credentials.client_email,\n Expires: config.expiration,\n Signature: signature,\n };\n }\n catch (err) {\n const error = err;\n const signingErr = new SigningError(error.message);\n signingErr.stack = error.stack;\n throw signingErr;\n }\n };\n return sign();\n }\n getSignedUrlV4(config) {\n config.accessibleAt = config.accessibleAt\n ? config.accessibleAt\n : new Date();\n const millisecondsToSeconds = 1.0 / 1000.0;\n const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds;\n // v4 limit expiration to be 7 days maximum\n if (expiresPeriodInSeconds > SEVEN_DAYS) {\n throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`);\n }\n const extensionHeaders = Object.assign({}, config.extensionHeaders);\n const fqdn = new url.URL(config.cname || exports.PATH_STYLED_HOST);\n extensionHeaders.host = fqdn.host;\n if (config.contentMd5) {\n extensionHeaders['content-md5'] = config.contentMd5;\n }\n if (config.contentType) {\n extensionHeaders['content-type'] = config.contentType;\n }\n let contentSha256;\n const sha256Header = extensionHeaders['x-goog-content-sha256'];\n if (sha256Header) {\n if (typeof sha256Header !== 'string' ||\n !/[A-Fa-f0-9]{40}/.test(sha256Header)) {\n throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256);\n }\n contentSha256 = sha256Header;\n }\n const signedHeaders = Object.keys(extensionHeaders)\n .map(header => header.toLowerCase())\n .sort()\n .join(';');\n const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders);\n const datestamp = (0, util_1.formatAsUTCISO)(config.accessibleAt);\n const credentialScope = `${datestamp}/auto/storage/goog4_request`;\n const sign = async () => {\n const credentials = await this.authClient.getCredentials();\n const credential = `${credentials.client_email}/${credentialScope}`;\n const dateISO = (0, util_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true);\n const queryParams = {\n 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256',\n 'X-Goog-Credential': credential,\n 'X-Goog-Date': dateISO,\n 'X-Goog-Expires': expiresPeriodInSeconds.toString(10),\n 'X-Goog-SignedHeaders': signedHeaders,\n ...(config.queryParams || {}),\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const canonicalQueryParams = this.getCanonicalQueryParams(queryParams);\n const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256);\n const hash = crypto\n .createHash('sha256')\n .update(canonicalRequest)\n .digest('hex');\n const blobToSign = [\n 'GOOG4-RSA-SHA256',\n dateISO,\n credentialScope,\n hash,\n ].join('\\n');\n try {\n const signature = await this.authClient.sign(blobToSign);\n const signatureHex = Buffer.from(signature, 'base64').toString('hex');\n const signedQuery = Object.assign({}, queryParams, {\n 'X-Goog-Signature': signatureHex,\n });\n return signedQuery;\n }\n catch (err) {\n const error = err;\n const signingErr = new SigningError(error.message);\n signingErr.stack = error.stack;\n throw signingErr;\n }\n };\n return sign();\n }\n /**\n * Create canonical headers for signing v4 url.\n *\n * The canonical headers for v4-signing a request demands header names are\n * first lowercased, followed by sorting the header names.\n * Then, construct the canonical headers part of the request:\n * + \":\" + Trim() + \"\\n\"\n * ..\n * + \":\" + Trim() + \"\\n\"\n *\n * @param headers\n * @private\n */\n getCanonicalHeaders(headers) {\n // Sort headers by their lowercased names\n const sortedHeaders = (0, util_1.objectEntries)(headers)\n // Convert header names to lowercase\n .map(([headerName, value]) => [\n headerName.toLowerCase(),\n value,\n ])\n .sort((a, b) => a[0].localeCompare(b[0]));\n return sortedHeaders\n .filter(([, value]) => value !== undefined)\n .map(([headerName, value]) => {\n // - Convert Array (multi-valued header) into string, delimited by\n // ',' (no space).\n // - Trim leading and trailing spaces.\n // - Convert sequential (2+) spaces into a single space\n const canonicalValue = `${value}`.trim().replace(/\\s{2,}/g, ' ');\n return `${headerName}:${canonicalValue}\\n`;\n })\n .join('');\n }\n getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) {\n return [\n method,\n path,\n query,\n headers,\n signedHeaders,\n contentSha256 || 'UNSIGNED-PAYLOAD',\n ].join('\\n');\n }\n getCanonicalQueryParams(query) {\n return (0, util_1.objectEntries)(query)\n .map(([key, value]) => [(0, util_1.encodeURI)(key, true), (0, util_1.encodeURI)(value, true)])\n .sort((a, b) => (a[0] < b[0] ? -1 : 1))\n .map(([key, value]) => `${key}=${value}`)\n .join('&');\n }\n getResourcePath(cname, bucket, file) {\n if (cname) {\n return '/' + (file || '');\n }\n else if (file) {\n return `/${bucket}/${file}`;\n }\n else {\n return `/${bucket}`;\n }\n }\n parseExpires(expires, current = new Date()) {\n const expiresInMSeconds = new Date(expires).valueOf();\n if (isNaN(expiresInMSeconds)) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expiresInMSeconds < current.valueOf()) {\n throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n return Math.round(expiresInMSeconds / 1000); // The API expects seconds.\n }\n parseAccessibleAt(accessibleAt) {\n const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf();\n if (isNaN(accessibleAtInMSeconds)) {\n throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID);\n }\n return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds.\n }\n}\nexports.URLSigner = URLSigner;\n/**\n * Custom error type for errors related to getting signed errors and policies.\n *\n * @private\n */\nclass SigningError extends Error {\n constructor() {\n super(...arguments);\n this.name = 'SigningError';\n }\n}\nexports.SigningError = SigningError;\n//# sourceMappingURL=signer.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0;\nconst nodejs_common_1 = require(\"./nodejs-common\");\nconst paginator_1 = require(\"@google-cloud/paginator\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst stream_1 = require(\"stream\");\nconst bucket_1 = require(\"./bucket\");\nconst channel_1 = require(\"./channel\");\nconst file_1 = require(\"./file\");\nconst util_1 = require(\"./util\");\nconst hmacKey_1 = require(\"./hmacKey\");\nconst crc32c_1 = require(\"./crc32c\");\nvar IdempotencyStrategy;\n(function (IdempotencyStrategy) {\n IdempotencyStrategy[IdempotencyStrategy[\"RetryAlways\"] = 0] = \"RetryAlways\";\n IdempotencyStrategy[IdempotencyStrategy[\"RetryConditional\"] = 1] = \"RetryConditional\";\n IdempotencyStrategy[IdempotencyStrategy[\"RetryNever\"] = 2] = \"RetryNever\";\n})(IdempotencyStrategy = exports.IdempotencyStrategy || (exports.IdempotencyStrategy = {}));\nvar ExceptionMessages;\n(function (ExceptionMessages) {\n ExceptionMessages[\"EXPIRATION_DATE_INVALID\"] = \"The expiration date provided was invalid.\";\n ExceptionMessages[\"EXPIRATION_DATE_PAST\"] = \"An expiration date cannot be in the past.\";\n ExceptionMessages[\"INVALID_ACTION\"] = \"The action is not provided or invalid.\";\n})(ExceptionMessages = exports.ExceptionMessages || (exports.ExceptionMessages = {}));\nvar StorageExceptionMessages;\n(function (StorageExceptionMessages) {\n StorageExceptionMessages[\"BUCKET_NAME_REQUIRED\"] = \"A bucket name is needed to use Cloud Storage.\";\n StorageExceptionMessages[\"BUCKET_NAME_REQUIRED_CREATE\"] = \"A name is required to create a bucket.\";\n StorageExceptionMessages[\"HMAC_SERVICE_ACCOUNT\"] = \"The first argument must be a service account email to create an HMAC key.\";\n StorageExceptionMessages[\"HMAC_ACCESS_ID\"] = \"An access ID is needed to create an HmacKey object.\";\n})(StorageExceptionMessages = exports.StorageExceptionMessages || (exports.StorageExceptionMessages = {}));\nexports.PROTOCOL_REGEX = /^(\\w*):\\/\\//;\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n */\nexports.AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n */\nexports.MAX_RETRY_DEFAULT = 3;\n/**\n * Default behavior: Wait twice as long as previous retry before retrying.\n *\n * @const {number}\n */\nexports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2;\n/**\n * Default behavior: If the operation doesn't succeed after 600 seconds,\n * stop retrying.\n *\n * @const {number}\n */\nexports.TOTAL_TIMEOUT_DEFAULT = 600;\n/**\n * Default behavior: Wait no more than 64 seconds between retries.\n *\n * @const {number}\n */\nexports.MAX_RETRY_DELAY_DEFAULT = 64;\n/**\n * Default behavior: Retry conditionally idempotent operations if correct preconditions are set.\n *\n * @const {enum}\n * @private\n */\nconst IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional;\n/**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted.\n * @const\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\nconst RETRYABLE_ERR_FN_DEFAULT = function (err) {\n var _a;\n const isConnectionProblem = (reason) => {\n return (reason.includes('eai_again') || // DNS lookup error\n reason === 'econnreset' ||\n reason === 'unexpected connection closure' ||\n reason === 'epipe');\n };\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (typeof err.code === 'string') {\n const reason = err.code.toLowerCase();\n if (isConnectionProblem(reason)) {\n return true;\n }\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase();\n if (reason && isConnectionProblem(reason)) {\n return true;\n }\n }\n }\n }\n return false;\n};\nexports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT;\n/*! Developer Documentation\n *\n * Invoke this method to create a new Storage object bound with pre-determined\n * configuration options. For each object that can be created (e.g., a bucket),\n * there is an equivalent static and instance method. While they are classes,\n * they can be instantiated without use of the `new` keyword.\n */\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * This object provides constants to refer to the three permission levels that\n * can be granted to an entity:\n *\n * - `gcs.acl.OWNER_ROLE` - (\"OWNER\")\n * - `gcs.acl.READER_ROLE` - (\"READER\")\n * - `gcs.acl.WRITER_ROLE` - (\"WRITER\")\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists}\n *\n * @name Storage#acl\n * @type {object}\n * @property {string} OWNER_ROLE\n * @property {string} READER_ROLE\n * @property {string} WRITER_ROLE\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const albums = storage.bucket('albums');\n *\n * //-\n * // Make all of the files currently in a bucket publicly readable.\n * //-\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * albums.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // Make any new objects added to a bucket publicly readable.\n * //-\n * albums.acl.default.add(options, function(err, aclObject) {});\n *\n * //-\n * // Grant a user ownership permissions to a bucket.\n * //-\n * albums.acl.add({\n * entity: 'user-useremail@example.com',\n * role: storage.acl.OWNER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * albums.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n/**\n * Get {@link Bucket} objects for all of the buckets in your project as\n * a readable object stream.\n *\n * @method Storage#getBucketsStream\n * @param {GetBucketsRequest} [query] Query object for listing buckets.\n * @returns {ReadableStream} A readable stream that emits {@link Bucket}\n * instances.\n *\n * @example\n * ```\n * storage.getBucketsStream()\n * .on('error', console.error)\n * .on('data', function(bucket) {\n * // bucket is a Bucket object.\n * })\n * .on('end', function() {\n * // All buckets retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * storage.getBucketsStream()\n * .on('data', function(bucket) {\n * this.end();\n * });\n * ```\n */\n/**\n * Get {@link HmacKey} objects for all of the HMAC keys in the project in a\n * readable object stream.\n *\n * @method Storage#getHmacKeysStream\n * @param {GetHmacKeysOptions} [options] Configuration options.\n * @returns {ReadableStream} A readable stream that emits {@link HmacKey}\n * instances.\n *\n * @example\n * ```\n * storage.getHmacKeysStream()\n * .on('error', console.error)\n * .on('data', function(hmacKey) {\n * // hmacKey is an HmacKey object.\n * })\n * .on('end', function() {\n * // All HmacKey retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * storage.getHmacKeysStream()\n * .on('data', function(bucket) {\n * this.end();\n * });\n * ```\n */\n/**\n *

ACLs

\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share files with other users\n * and allow other users to access your buckets and files.\n *\n * To learn more about ACLs, read this overview on\n * {@link https://cloud.google.com/storage/docs/access-control| Access Control}.\n *\n * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview}\n * See {@link https://cloud.google.com/storage/docs/access-control| Access Control}\n *\n * @class\n */\nclass Storage extends nodejs_common_1.Service {\n getBucketsStream() {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n getHmacKeysStream() {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n /**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n /**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n /**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n /**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n /**\n * @callback Crc32cGeneratorCallback\n * @returns {CRC32CValidator}\n */\n /**\n * @typedef {object} StorageOptions\n * @property {string} [projectId] The project ID from the Google Developer's\n * Console, e.g. 'grape-spaceship-123'. We will also check the environment\n * variable `GCLOUD_PROJECT` for your project ID. If your app is running\n * in an environment which supports {@link\n * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application\n * Application Default Credentials}, your project ID will be detected\n * automatically.\n * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key\n * downloaded from the Google Developers Console. If you provide a path to\n * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and\n * .p12 require you to specify the `email` option as well.\n * @property {string} [email] Account email address. Required when using a .pem\n * or .p12 keyFilename.\n * @property {object} [credentials] Credentials object.\n * @property {string} [credentials.client_email]\n * @property {string} [credentials.private_key]\n * @property {object} [retryOptions] Options for customizing retries. Retriable server errors\n * will be retried with exponential delay between them dictated by the formula\n * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout\n * has been reached. Retries will only happen if autoRetry is set to true.\n * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to\n * increase the delay time between the completion of failed requests, and the\n * initiation of the subsequent retrying request.\n * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from\n * when the initial request is sent, after which an error will\n * be returned, regardless of the retrying attempts made meanwhile.\n * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests.\n * When this value is reached, ``retryDelayMultiplier`` will no longer be used to\n * increase delay time.\n * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries\n * attempted before returning the error.\n * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given\n * error should be retried and false otherwise.\n * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration\n * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways -\n * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional -\n * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never\n * retry a conditionally idempotent operation.\n * @property {string} [userAgent] The value to be prepended to the User-Agent\n * header in API requests.\n * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one.\n * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out.\n * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned.\n * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests.\n * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint.\n * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n */\n /**\n * Constructs the Storage client.\n *\n * @example\n * Create a client that uses Application Default Credentials\n * (ADC)\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * ```\n *\n * @example\n * Create a client with explicit credentials\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * keyFilename: '/path/to/keyfile.json'\n * });\n * ```\n *\n * @example\n * Create a client with credentials passed\n * by value as a JavaScript object\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * credentials: {\n * type: 'service_account',\n * project_id: 'xxxxxxx',\n * private_key_id: 'xxxx',\n * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\\n-----END PRIVATE KEY-----\\n',\n * client_email: 'xxxx',\n * client_id: 'xxx',\n * auth_uri: 'https://accounts.google.com/o/oauth2/auth',\n * token_uri: 'https://oauth2.googleapis.com/token',\n * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs',\n * client_x509_cert_url: 'xxx',\n * }\n * });\n * ```\n *\n * @example\n * Create a client with credentials passed\n * by loading a JSON file directly from disk\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * credentials: require('/path/to-keyfile.json')\n * });\n * ```\n *\n * @example\n * Create a client with an `AuthClient` (e.g. `DownscopedClient`)\n * ```\n * const {DownscopedClient} = require('google-auth-library');\n * const authClient = new DownscopedClient({...});\n *\n * const storage = new Storage({authClient});\n * ```\n *\n * Additional samples:\n * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1\n * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js\n *\n * @param {StorageOptions} [options] Configuration options.\n */\n constructor(options = {}) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p;\n let apiEndpoint = 'https://storage.googleapis.com';\n let customEndpoint = false;\n // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead.\n const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST;\n if (typeof EMULATOR_HOST === 'string') {\n apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST);\n customEndpoint = true;\n }\n if (options.apiEndpoint) {\n apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint);\n customEndpoint = true;\n }\n options = Object.assign({}, options, { apiEndpoint });\n // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead.\n const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`;\n const config = {\n apiEndpoint: options.apiEndpoint,\n retryOptions: {\n autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined\n ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry\n : exports.AUTO_RETRY_DEFAULT,\n maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries)\n ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries\n : exports.MAX_RETRY_DEFAULT,\n retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier)\n ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier\n : exports.RETRY_DELAY_MULTIPLIER_DEFAULT,\n totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout)\n ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout\n : exports.TOTAL_TIMEOUT_DEFAULT,\n maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay)\n ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay\n : exports.MAX_RETRY_DELAY_DEFAULT,\n retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn)\n ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn\n : exports.RETRYABLE_ERR_FN_DEFAULT,\n idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined\n ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy\n : IDEMPOTENCY_STRATEGY_DEFAULT,\n },\n baseUrl,\n customEndpoint,\n useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint,\n projectIdRequired: false,\n scopes: [\n 'https://www.googleapis.com/auth/iam',\n 'https://www.googleapis.com/auth/cloud-platform',\n 'https://www.googleapis.com/auth/devstorage.full_control',\n ],\n packageJson: require('../../package.json'),\n };\n super(config, options);\n /**\n * Reference to {@link Storage.acl}.\n *\n * @name Storage#acl\n * @see Storage.acl\n */\n this.acl = Storage.acl;\n this.crc32cGenerator =\n options.crc32cGenerator || crc32c_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR;\n this.retryOptions = config.retryOptions;\n this.getBucketsStream = paginator_1.paginator.streamify('getBuckets');\n this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys');\n }\n static sanitizeEndpoint(url) {\n if (!exports.PROTOCOL_REGEX.test(url)) {\n url = `https://${url}`;\n }\n return url.replace(/\\/+$/, ''); // Remove trailing slashes\n }\n /**\n * Get a reference to a Cloud Storage bucket.\n *\n * @param {string} name Name of the bucket.\n * @param {object} [options] Configuration object.\n * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to\n * encrypt objects inserted into this bucket, if no encryption method is\n * specified.\n * @param {string} [options.userProject] User project to be billed for all\n * requests made from this Bucket object.\n * @returns {Bucket}\n * @see Bucket\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const albums = storage.bucket('albums');\n * const photos = storage.bucket('photos');\n * ```\n */\n bucket(name, options) {\n if (!name) {\n throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED);\n }\n return new bucket_1.Bucket(this, name, options);\n }\n /**\n * Reference a channel to receive notifications about changes to your bucket.\n *\n * @param {string} id The ID of the channel.\n * @param {string} resourceId The resource ID of the channel.\n * @returns {Channel}\n * @see Channel\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * ```\n */\n channel(id, resourceId) {\n return new channel_1.Channel(this, id, resourceId);\n }\n /**\n * @typedef {array} CreateBucketResponse\n * @property {Bucket} 0 The new {@link Bucket}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CreateBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket} bucket The new {@link Bucket}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Metadata to set for the bucket.\n *\n * @typedef {object} CreateBucketRequest\n * @property {boolean} [archive=false] Specify the storage class as Archive.\n * @property {object} [autoclass.enabled=false] Specify whether Autoclass is\n * enabled for the bucket.\n * @property {boolean} [coldline=false] Specify the storage class as Coldline.\n * @property {Cors[]} [cors=[]] Specify the CORS configuration to use.\n * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets.\n * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}.\n * @property {boolean} [dra=false] Specify the storage class as Durable Reduced\n * Availability.\n * @property {string} [location] Specify the bucket's location. If specifying\n * a dual-region, the `customPlacementConfig` property should be set in conjunction.\n * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}.\n * @property {boolean} [multiRegional=false] Specify the storage class as\n * Multi-Regional.\n * @property {boolean} [nearline=false] Specify the storage class as Nearline.\n * @property {boolean} [regional=false] Specify the storage class as Regional.\n * @property {boolean} [requesterPays=false] **Early Access Testers Only**\n * Force the use of the User Project metadata field to assign operational\n * costs when an operation is made on a Bucket and its objects.\n * @property {string} [rpo] For dual-region buckets, controls whether turbo\n * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`).\n * @property {boolean} [standard=true] Specify the storage class as Standard.\n * @property {string} [storageClass] The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`).\n * **Note:** The storage classes `multi_regional`, `regional`, and\n * `durable_reduced_availability` are now legacy and will be deprecated in\n * the future.\n * @property {Versioning} [versioning=undefined] Specify the versioning status.\n * @property {string} [userProject] The ID of the project which will be billed\n * for the request.\n */\n /**\n * Create a bucket.\n *\n * Cloud Storage uses a flat namespace, so you can't create a bucket with\n * a name that is already in use. For more information, see\n * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation}\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} name Name of the bucket to create.\n * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket.\n * @param {CreateBucketCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a name is not provided.\n * @see Bucket#create\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const callback = function(err, bucket, apiResponse) {\n * // `bucket` is a Bucket object.\n * };\n *\n * storage.createBucket('new-bucket', callback);\n *\n * //-\n * // Create a bucket in a specific location and region. See the \n * // Official JSON API docs for complete details on the `location`\n * option.\n * // \n * //-\n * const metadata = {\n * location: 'US-CENTRAL1',\n * regional: true\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // Create a bucket with a retention policy of 6 months.\n * //-\n * const metadata = {\n * retentionPolicy: {\n * retentionPeriod: 15780000 // 6 months in seconds.\n * }\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // Enable versioning on a new bucket.\n * //-\n * const metadata = {\n * versioning: {\n * enabled: true\n * }\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.createBucket('new-bucket').then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_create_bucket\n * Another example:\n */\n createBucket(name, metadataOrCallback, callback) {\n if (!name) {\n throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE);\n }\n let metadata;\n if (!callback) {\n callback = metadataOrCallback;\n metadata = {};\n }\n else {\n metadata = metadataOrCallback;\n }\n const body = {\n ...metadata,\n name,\n };\n const storageClasses = {\n archive: 'ARCHIVE',\n coldline: 'COLDLINE',\n dra: 'DURABLE_REDUCED_AVAILABILITY',\n multiRegional: 'MULTI_REGIONAL',\n nearline: 'NEARLINE',\n regional: 'REGIONAL',\n standard: 'STANDARD',\n };\n const storageClassKeys = Object.keys(storageClasses);\n for (const storageClass of storageClassKeys) {\n if (body[storageClass]) {\n if (metadata.storageClass && metadata.storageClass !== storageClass) {\n throw new Error(`Both \\`${storageClass}\\` and \\`storageClass\\` were provided.`);\n }\n body.storageClass = storageClasses[storageClass];\n delete body[storageClass];\n }\n }\n if (body.requesterPays) {\n body.billing = {\n requesterPays: body.requesterPays,\n };\n delete body.requesterPays;\n }\n const query = {\n project: this.projectId,\n };\n if (body.userProject) {\n query.userProject = body.userProject;\n delete body.userProject;\n }\n this.request({\n method: 'POST',\n uri: '/b',\n qs: query,\n json: body,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const bucket = this.bucket(name);\n bucket.metadata = resp;\n callback(null, bucket, resp);\n });\n }\n /**\n * @typedef {object} CreateHmacKeyOptions\n * @property {string} [projectId] The project ID of the project that owns\n * the service account of the requested HMAC key. If not provided,\n * the project ID used to instantiate the Storage client will be used.\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @typedef {object} HmacKeyMetadata\n * @property {string} accessId The access id identifies which HMAC key was\n * used to sign a request when authenticating with HMAC.\n * @property {string} etag Used to perform a read-modify-write of the key.\n * @property {string} id The resource name of the HMAC key.\n * @property {string} projectId The project ID.\n * @property {string} serviceAccountEmail The service account's email this\n * HMAC key is created for.\n * @property {string} state The state of this HMAC key. One of \"ACTIVE\",\n * \"INACTIVE\" or \"DELETED\".\n * @property {string} timeCreated The creation time of the HMAC key in\n * RFC 3339 format.\n * @property {string} [updated] The time this HMAC key was last updated in\n * RFC 3339 format.\n */\n /**\n * @typedef {array} CreateHmacKeyResponse\n * @property {HmacKey} 0 The HmacKey instance created from API response.\n * @property {string} 1 The HMAC key's secret used to access the XML API.\n * @property {object} 3 The raw API response.\n */\n /**\n * @callback CreateHmacKeyCallback Callback function.\n * @param {?Error} err Request error, if any.\n * @param {HmacKey} hmacKey The HmacKey instance created from API response.\n * @param {string} secret The HMAC key's secret used to access the XML API.\n * @param {object} apiResponse The raw API response.\n */\n /**\n * Create an HMAC key associated with an service account to authenticate\n * requests to the Cloud Storage XML API.\n *\n * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation}\n *\n * @param {string} serviceAccountEmail The service account's email address\n * with which the HMAC key is created for.\n * @param {CreateHmacKeyCallback} [callback] Callback function.\n * @return {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('google-cloud/storage');\n * const storage = new Storage();\n *\n * // Replace with your service account's email address\n * const serviceAccountEmail =\n * 'my-service-account@appspot.gserviceaccount.com';\n *\n * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) {\n * if (!err) {\n * // Securely store the secret for use with the XML API.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.createHmacKey(serviceAccountEmail)\n * .then((response) => {\n * const hmacKey = response[0];\n * const secret = response[1];\n * // Securely store the secret for use with the XML API.\n * });\n * ```\n */\n createHmacKey(serviceAccountEmail, optionsOrCb, cb) {\n if (typeof serviceAccountEmail !== 'string') {\n throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT);\n }\n const { options, callback } = (0, util_1.normalize)(optionsOrCb, cb);\n const query = Object.assign({}, options, { serviceAccountEmail });\n const projectId = query.projectId || this.projectId;\n delete query.projectId;\n this.request({\n method: 'POST',\n uri: `/projects/${projectId}/hmacKeys`,\n qs: query,\n maxRetries: 0, //explicitly set this value since this is a non-idempotent function\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const metadata = resp.metadata;\n const hmacKey = this.hmacKey(metadata.accessId, {\n projectId: metadata.projectId,\n });\n hmacKey.metadata = resp.metadata;\n callback(null, hmacKey, resp.secret, resp);\n });\n }\n /**\n * Query object for listing buckets.\n *\n * @typedef {object} GetBucketsRequest\n * @property {boolean} [autoPaginate=true] Have pagination handled\n * automatically.\n * @property {number} [maxApiCalls] Maximum number of API calls to make.\n * @property {number} [maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [userProject] The ID of the project which will be billed\n * for the request.\n */\n /**\n * @typedef {array} GetBucketsResponse\n * @property {Bucket[]} 0 Array of {@link Bucket} instances.\n * @property {object} 1 nextQuery A query object to receive more results.\n * @property {object} 2 The full API response.\n */\n /**\n * @callback GetBucketsCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket[]} buckets Array of {@link Bucket} instances.\n * @param {object} nextQuery A query object to receive more results.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get Bucket objects for all of the buckets in your project.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation}\n *\n * @param {GetBucketsRequest} [query] Query object for listing buckets.\n * @param {GetBucketsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * storage.getBuckets(function(err, buckets) {\n * if (!err) {\n * // buckets is an array of Bucket objects.\n * }\n * });\n *\n * //-\n * // To control how many API requests are made and page through the results\n * // manually, set `autoPaginate` to `false`.\n * //-\n * const callback = function(err, buckets, nextQuery, apiResponse) {\n * if (nextQuery) {\n * // More results exist.\n * storage.getBuckets(nextQuery, callback);\n * }\n *\n * // The `metadata` property is populated for you with the metadata at the\n * // time of fetching.\n * buckets[0].metadata;\n *\n * // However, in cases where you are concerned the metadata could have\n * // changed, use the `getMetadata` method.\n * buckets[0].getMetadata(function(err, metadata, apiResponse) {});\n * };\n *\n * storage.getBuckets({\n * autoPaginate: false\n * }, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.getBuckets().then(function(data) {\n * const buckets = data[0];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_list_buckets\n * Another example:\n */\n getBuckets(optionsOrCallback, cb) {\n const { options, callback } = (0, util_1.normalize)(optionsOrCallback, cb);\n options.project = options.project || this.projectId;\n this.request({\n uri: '/b',\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const buckets = itemsArray.map((bucket) => {\n const bucketInstance = this.bucket(bucket.id);\n bucketInstance.metadata = bucket;\n return bucketInstance;\n });\n const nextQuery = resp.nextPageToken\n ? Object.assign({}, options, { pageToken: resp.nextPageToken })\n : null;\n callback(null, buckets, nextQuery, resp);\n });\n }\n getHmacKeys(optionsOrCb, cb) {\n const { options, callback } = (0, util_1.normalize)(optionsOrCb, cb);\n const query = Object.assign({}, options);\n const projectId = query.projectId || this.projectId;\n delete query.projectId;\n this.request({\n uri: `/projects/${projectId}/hmacKeys`,\n qs: query,\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const hmacKeys = itemsArray.map((hmacKey) => {\n const hmacKeyInstance = this.hmacKey(hmacKey.accessId, {\n projectId: hmacKey.projectId,\n });\n hmacKeyInstance.metadata = hmacKey;\n return hmacKeyInstance;\n });\n const nextQuery = resp.nextPageToken\n ? Object.assign({}, options, { pageToken: resp.nextPageToken })\n : null;\n callback(null, hmacKeys, nextQuery, resp);\n });\n }\n /**\n * @typedef {array} GetServiceAccountResponse\n * @property {object} 0 The service account resource.\n * @property {object} 1 The full\n * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}.\n */\n /**\n * @callback GetServiceAccountCallback\n * @param {?Error} err Request error, if any.\n * @param {object} serviceAccount The serviceAccount resource.\n * @param {string} serviceAccount.emailAddress The service account email\n * address.\n * @param {object} apiResponse The full\n * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}.\n */\n /**\n * Get the email address of this project's Google Cloud Storage service\n * account.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource}\n *\n * @param {object} [options] Configuration object.\n * @param {string} [options.userProject] User project to be billed for this\n * request.\n * @param {GetServiceAccountCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * storage.getServiceAccount(function(err, serviceAccount, apiResponse) {\n * if (!err) {\n * const serviceAccountEmail = serviceAccount.emailAddress;\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.getServiceAccount().then(function(data) {\n * const serviceAccountEmail = data[0].emailAddress;\n * const apiResponse = data[1];\n * });\n * ```\n */\n getServiceAccount(optionsOrCallback, cb) {\n const { options, callback } = (0, util_1.normalize)(optionsOrCallback, cb);\n this.request({\n uri: `/projects/${this.projectId}/serviceAccount`,\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const camelCaseResponse = {};\n for (const prop in resp) {\n // eslint-disable-next-line no-prototype-builtins\n if (resp.hasOwnProperty(prop)) {\n const camelCaseProp = prop.replace(/_(\\w)/g, (_, match) => match.toUpperCase());\n camelCaseResponse[camelCaseProp] = resp[prop];\n }\n }\n callback(null, camelCaseResponse, resp);\n });\n }\n /**\n * Get a reference to an HmacKey object.\n * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to\n * retrieve and populate the metadata.\n *\n * To get a reference to an HMAC key that's not created for a service\n * account in the same project used to instantiate the Storage client,\n * supply the project's ID as `projectId` in the `options` argument.\n *\n * @param {string} accessId The HMAC key's access ID.\n * @param {HmacKeyOptions} options HmacKey constructor options.\n * @returns {HmacKey}\n * @see HmacKey\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * ```\n */\n hmacKey(accessId, options) {\n if (!accessId) {\n throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID);\n }\n return new hmacKey_1.HmacKey(this, accessId, options);\n }\n}\nexports.Storage = Storage;\n/**\n * {@link Bucket} class.\n *\n * @name Storage.Bucket\n * @see Bucket\n * @type {Constructor}\n */\nStorage.Bucket = bucket_1.Bucket;\n/**\n * {@link Channel} class.\n *\n * @name Storage.Channel\n * @see Channel\n * @type {Constructor}\n */\nStorage.Channel = channel_1.Channel;\n/**\n * {@link File} class.\n *\n * @name Storage.File\n * @see File\n * @type {Constructor}\n */\nStorage.File = file_1.File;\n/**\n * {@link HmacKey} class.\n *\n * @name Storage.HmacKey\n * @see HmacKey\n * @type {Constructor}\n */\nStorage.HmacKey = hmacKey_1.HmacKey;\nStorage.acl = {\n OWNER_ROLE: 'OWNER',\n READER_ROLE: 'READER',\n WRITER_ROLE: 'WRITER',\n};\n/*! Developer Documentation\n *\n * These methods can be auto-paginated.\n */\npaginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']);\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Storage, {\n exclude: ['bucket', 'channel', 'hmacKey'],\n});\n//# sourceMappingURL=storage.js.map","\"use strict\";\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TransferManager = void 0;\nconst pLimit = require(\"p-limit\");\nconst path = require(\"path\");\nconst extend = require(\"extend\");\nconst fs_1 = require(\"fs\");\nconst crc32c_1 = require(\"./crc32c\");\n/**\n * Default number of concurrently executing promises to use when calling uploadManyFiles.\n * @experimental\n */\nconst DEFAULT_PARALLEL_UPLOAD_LIMIT = 2;\n/**\n * Default number of concurrently executing promises to use when calling downloadManyFiles.\n * @experimental\n */\nconst DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 2;\n/**\n * Default number of concurrently executing promises to use when calling downloadFileInChunks.\n * @experimental\n */\nconst DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 2;\n/**\n * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks.\n * @experimental\n */\nconst DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024;\n/**\n * The chunk size in bytes to use when calling downloadFileInChunks.\n * @experimental\n */\nconst DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 10 * 1024 * 1024;\nconst EMPTY_REGEX = '(?:)';\n/**\n * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket.\n *\n * @class\n * @hideconstructor\n *\n * @param {Bucket} bucket A {@link Bucket} instance\n * @experimental\n */\nclass TransferManager {\n constructor(bucket) {\n this.bucket = bucket;\n }\n /**\n * @typedef {object} UploadManyFilesOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when uploading the files.\n * @property {boolean} [skipIfExists] Do not upload the file if it already exists in\n * the bucket. This will set the precondition ifGenerationMatch = 0.\n * @property {string} [prefix] A prefix to append to all of the uploaded files.\n * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through\n * to each individual upload operation.\n * @experimental\n */\n /**\n * Upload multiple files in parallel to the bucket. This is a convenience method\n * that utilizes {@link Bucket#upload} to perform the upload.\n *\n * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name.\n * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list.\n * to be uploaded to the bucket\n * @param {UploadManyFilesOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Upload multiple files in parallel.\n * //-\n * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']);\n * // Your bucket now contains:\n * // - \"local/path/file1.txt\" (with the contents of '/local/path/file1.txt')\n * // - \"local/path/file2.txt\" (with the contents of '/local/path/file2.txt')\n * const response = await transferManager.uploadManyFiles('/local/directory');\n * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure.\n * ```\n * @experimental\n */\n async uploadManyFiles(filePathsOrDirectory, options = {}) {\n var _a;\n if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) {\n options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0;\n }\n else if (options.skipIfExists &&\n options.passthroughOptions === undefined) {\n options.passthroughOptions = {\n preconditionOpts: {\n ifGenerationMatch: 0,\n },\n };\n }\n const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT);\n const promises = [];\n let allPaths = [];\n if (!Array.isArray(filePathsOrDirectory)) {\n for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) {\n allPaths.push(curPath);\n }\n }\n else {\n allPaths = filePathsOrDirectory;\n }\n for (const filePath of allPaths) {\n const stat = await fs_1.promises.lstat(filePath);\n if (stat.isDirectory()) {\n continue;\n }\n const passThroughOptionsCopy = extend(true, {}, options.passthroughOptions);\n passThroughOptionsCopy.destination = filePath;\n if (options.prefix) {\n passThroughOptionsCopy.destination = path.join(options.prefix, passThroughOptionsCopy.destination);\n }\n promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy)));\n }\n return Promise.all(promises);\n }\n /**\n * @typedef {object} DownloadManyFilesOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when downloading the files.\n * @property {string} [prefix] A prefix to append to all of the downloaded files.\n * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files.\n * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through\n * to each individual download operation.\n * @experimental\n */\n /**\n * Download multiple files in parallel to the local filesystem. This is a convenience method\n * that utilizes {@link File#download} to perform the download.\n *\n * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If\n * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded.\n * @param {DownloadManyFilesOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Download multiple files in parallel.\n * //-\n * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']);\n * // The following files have been downloaded:\n * // - \"file1.txt\" (with the contents from my-bucket.file1.txt)\n * // - \"file2.txt\" (with the contents from my-bucket.file2.txt)\n * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]);\n * // The following files have been downloaded:\n * // - \"file1.txt\" (with the contents from my-bucket.file1.txt)\n * // - \"file2.txt\" (with the contents from my-bucket.file2.txt)\n * const response = await transferManager.downloadManyFiles('test-folder');\n * // All files with GCS prefix of 'test-folder' have been downloaded.\n * ```\n * @experimental\n */\n async downloadManyFiles(filesOrFolder, options = {}) {\n const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT);\n const promises = [];\n let files = [];\n if (!Array.isArray(filesOrFolder)) {\n const directoryFiles = await this.bucket.getFiles({\n prefix: filesOrFolder,\n });\n files = directoryFiles[0];\n }\n else {\n files = filesOrFolder.map(curFile => {\n if (typeof curFile === 'string') {\n return this.bucket.file(curFile);\n }\n return curFile;\n });\n }\n const stripRegexString = options.stripPrefix\n ? `^${options.stripPrefix}`\n : EMPTY_REGEX;\n const regex = new RegExp(stripRegexString, 'g');\n for (const file of files) {\n const passThroughOptionsCopy = extend(true, {}, options.passthroughOptions);\n if (options.prefix) {\n passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name);\n }\n if (options.stripPrefix) {\n passThroughOptionsCopy.destination = file.name.replace(regex, '');\n }\n promises.push(limit(() => file.download(passThroughOptionsCopy)));\n }\n return Promise.all(promises);\n }\n /**\n * @typedef {object} DownloadFileInChunksOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when downloading the file.\n * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded.\n * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete.\n * @experimental\n */\n /**\n * Download a large file in chunks utilizing parallel download operations. This is a convenience method\n * that utilizes {@link File#download} to perform the download.\n *\n * @param {object} [file | string] {@link File} to download.\n * @param {DownloadFileInChunksOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Download a large file in chunks utilizing parallel operations.\n * //-\n * const response = await transferManager.downloadLargeFile(bucket.file('large-file.txt');\n * // Your local directory now contains:\n * // - \"large-file.txt\" (with the contents from my-bucket.large-file.txt)\n * ```\n * @experimental\n */\n async downloadFileInChunks(fileOrName, options = {}) {\n let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE;\n let limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT);\n const promises = [];\n const file = typeof fileOrName === 'string'\n ? this.bucket.file(fileOrName)\n : fileOrName;\n const fileInfo = await file.get();\n const size = parseInt(fileInfo[0].metadata.size);\n // If the file size does not meet the threshold download it as a single chunk.\n if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) {\n limit = pLimit(1);\n chunkSize = size;\n }\n let start = 0;\n const filePath = options.destination || path.basename(file.name);\n const fileToWrite = await fs_1.promises.open(filePath, 'w+');\n while (start < size) {\n const chunkStart = start;\n let chunkEnd = start + chunkSize - 1;\n chunkEnd = chunkEnd > size ? size : chunkEnd;\n promises.push(limit(() => file.download({ start: chunkStart, end: chunkEnd }).then(resp => {\n return fileToWrite.write(resp[0], 0, resp[0].length, chunkStart);\n })));\n start += chunkSize;\n }\n return new Promise((resolve, reject) => {\n let results;\n Promise.all(promises)\n .then(data => {\n results = data.map(result => result.buffer);\n if (options.validation === 'crc32c') {\n return crc32c_1.CRC32C.fromFile(filePath);\n }\n return;\n })\n .then(() => {\n resolve(results);\n })\n .catch(e => {\n reject(e);\n })\n .finally(() => {\n fileToWrite.close();\n });\n });\n }\n async *getPathsFromDirectory(directory) {\n const filesAndSubdirectories = await fs_1.promises.readdir(directory, {\n withFileTypes: true,\n });\n for (const curFileOrDirectory of filesAndSubdirectories) {\n const fullPath = path.join(directory, curFileOrDirectory.name);\n curFileOrDirectory.isDirectory()\n ? yield* this.getPathsFromDirectory(fullPath)\n : yield fullPath;\n }\n }\n}\nexports.TransferManager = TransferManager;\n//# sourceMappingURL=transfer-manager.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PassThroughShim = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0;\nconst querystring = require(\"querystring\");\nconst stream_1 = require(\"stream\");\nfunction normalize(optionsOrCallback, cb) {\n const options = (typeof optionsOrCallback === 'object' ? optionsOrCallback : {});\n const callback = (typeof optionsOrCallback === 'function' ? optionsOrCallback : cb);\n return { options, callback };\n}\nexports.normalize = normalize;\n/**\n * Flatten an object into an Array of arrays, [[key, value], ..].\n * Implements Object.entries() for Node.js <8\n * @internal\n */\nfunction objectEntries(obj) {\n return Object.keys(obj).map(key => [key, obj[key]]);\n}\nexports.objectEntries = objectEntries;\n/**\n * Encode `str` with encodeURIComponent, plus these\n * reserved characters: `! * ' ( )`.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent}\n *\n * @param {string} str The URI component to encode.\n * @return {string} The encoded string.\n */\nfunction fixedEncodeURIComponent(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase());\n}\nexports.fixedEncodeURIComponent = fixedEncodeURIComponent;\n/**\n * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent.\n *\n * Encode every byte except `A-Z a-Z 0-9 ~ - . _`.\n *\n * @param {string} uri The URI to encode.\n * @param [boolean=false] encodeSlash If `true`, the \"/\" character is not encoded.\n * @return {string} The encoded string.\n */\nfunction encodeURI(uri, encodeSlash) {\n // Split the string by `/`, and conditionally rejoin them with either\n // %2F if encodeSlash is `true`, or '/' if `false`.\n return uri\n .split('/')\n .map(fixedEncodeURIComponent)\n .join(encodeSlash ? '%2F' : '/');\n}\nexports.encodeURI = encodeURI;\n/**\n * Serialize an object to a URL query string using util.encodeURI(uri, true).\n * @param {string} url The object to serialize.\n * @return {string} Serialized string.\n */\nfunction qsStringify(qs) {\n return querystring.stringify(qs, '&', '=', {\n encodeURIComponent: (component) => encodeURI(component, true),\n });\n}\nexports.qsStringify = qsStringify;\nfunction objectKeyToLowercase(object) {\n const newObj = {};\n for (let key of Object.keys(object)) {\n const value = object[key];\n key = key.toLowerCase();\n newObj[key] = value;\n }\n return newObj;\n}\nexports.objectKeyToLowercase = objectKeyToLowercase;\n/**\n * JSON encode str, with unicode \\u+ representation.\n * @param {object} obj The object to encode.\n * @return {string} Serialized string.\n */\nfunction unicodeJSONStringify(obj) {\n return JSON.stringify(obj).replace(/[\\u0080-\\uFFFF]/g, (char) => '\\\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4));\n}\nexports.unicodeJSONStringify = unicodeJSONStringify;\n/**\n * Converts the given objects keys to snake_case\n * @param {object} obj object to convert keys to snake case.\n * @returns {object} object with keys converted to snake case.\n */\nfunction convertObjKeysToSnakeCase(obj) {\n if (obj instanceof Date || obj instanceof RegExp) {\n return obj;\n }\n if (Array.isArray(obj)) {\n return obj.map(convertObjKeysToSnakeCase);\n }\n if (obj instanceof Object) {\n return Object.keys(obj).reduce((acc, cur) => {\n const s = cur[0].toLocaleLowerCase() +\n cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => {\n return `_${p1.toLowerCase()}`;\n });\n acc[s] = convertObjKeysToSnakeCase(obj[cur]);\n return acc;\n }, Object());\n }\n return obj;\n}\nexports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase;\n/**\n * Formats the provided date object as a UTC ISO string.\n * @param {Date} dateTimeToFormat date object to be formatted.\n * @param {boolean} includeTime flag to include hours, minutes, seconds in output.\n * @param {string} dateDelimiter delimiter between date components.\n * @param {string} timeDelimiter delimiter between time components.\n * @returns {string} UTC ISO format of provided date obect.\n */\nfunction formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') {\n const year = dateTimeToFormat.getUTCFullYear();\n const month = dateTimeToFormat.getUTCMonth() + 1;\n const day = dateTimeToFormat.getUTCDate();\n const hour = dateTimeToFormat.getUTCHours();\n const minute = dateTimeToFormat.getUTCMinutes();\n const second = dateTimeToFormat.getUTCSeconds();\n let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month\n .toString()\n .padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`;\n if (includeTime) {\n resultString = `${resultString}T${hour\n .toString()\n .padStart(2, '0')}${timeDelimiter}${minute\n .toString()\n .padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`;\n }\n return resultString;\n}\nexports.formatAsUTCISO = formatAsUTCISO;\nclass PassThroughShim extends stream_1.PassThrough {\n constructor() {\n super(...arguments);\n this.shouldEmitReading = true;\n this.shouldEmitWriting = true;\n }\n _read(size) {\n if (this.shouldEmitReading) {\n this.emit('reading');\n this.shouldEmitReading = false;\n }\n super._read(size);\n }\n _write(chunk, encoding, callback) {\n if (this.shouldEmitWriting) {\n this.emit('writing');\n this.shouldEmitWriting = false;\n }\n // Per the nodejs documention, callback must be invoked on the next tick\n process.nextTick(() => {\n super._write(chunk, encoding, callback);\n });\n }\n _final(callback) {\n // If the stream is empty (i.e. empty file) final will be invoked before _read / _write\n // and we should still emit the proper events.\n if (this.shouldEmitReading) {\n this.emit('reading');\n this.shouldEmitReading = false;\n }\n if (this.shouldEmitWriting) {\n this.emit('writing');\n this.shouldEmitWriting = false;\n }\n callback(null);\n }\n}\nexports.PassThroughShim = PassThroughShim;\n//# sourceMappingURL=util.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.6.0\";\n\nconst _excluded = [\"authStrategy\"];\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, _excluded);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.12\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.8.0\";\n\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\\n` + data.errors.map(e => ` - ${e.message}`).join(\"\\n\");\n}\n\nclass GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\"; // Expose the errors and response data in their shorthand properties.\n\n this.errors = response.errors;\n this.data = response.data; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlResponseError(requestOptions, headers, response.data);\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.GraphqlResponseError = GraphqlResponseError;\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.21.3\";\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n enumerableOnly && (symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n })), keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = null != arguments[i] ? arguments[i] : {};\n i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return _objectSpread2(_objectSpread2({}, response), {}, {\n data: []\n });\n }\n\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n\n try {\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n } catch (error) {\n if (error.status !== 409) throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\nconst paginatingEndpoints = [\"GET /app/hook/deliveries\", \"GET /app/installations\", \"GET /applications/grants\", \"GET /authorizations\", \"GET /enterprises/{enterprise}/actions/permissions/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\", \"GET /enterprises/{enterprise}/actions/runners\", \"GET /enterprises/{enterprise}/audit-log\", \"GET /enterprises/{enterprise}/secret-scanning/alerts\", \"GET /enterprises/{enterprise}/settings/billing/advanced-security\", \"GET /events\", \"GET /gists\", \"GET /gists/public\", \"GET /gists/starred\", \"GET /gists/{gist_id}/comments\", \"GET /gists/{gist_id}/commits\", \"GET /gists/{gist_id}/forks\", \"GET /installation/repositories\", \"GET /issues\", \"GET /licenses\", \"GET /marketplace_listing/plans\", \"GET /marketplace_listing/plans/{plan_id}/accounts\", \"GET /marketplace_listing/stubbed/plans\", \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\", \"GET /networks/{owner}/{repo}/events\", \"GET /notifications\", \"GET /organizations\", \"GET /orgs/{org}/actions/cache/usage-by-repository\", \"GET /orgs/{org}/actions/permissions/repositories\", \"GET /orgs/{org}/actions/runner-groups\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\", \"GET /orgs/{org}/actions/runners\", \"GET /orgs/{org}/actions/secrets\", \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/audit-log\", \"GET /orgs/{org}/blocks\", \"GET /orgs/{org}/code-scanning/alerts\", \"GET /orgs/{org}/codespaces\", \"GET /orgs/{org}/credential-authorizations\", \"GET /orgs/{org}/dependabot/secrets\", \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/events\", \"GET /orgs/{org}/external-groups\", \"GET /orgs/{org}/failed_invitations\", \"GET /orgs/{org}/hooks\", \"GET /orgs/{org}/hooks/{hook_id}/deliveries\", \"GET /orgs/{org}/installations\", \"GET /orgs/{org}/invitations\", \"GET /orgs/{org}/invitations/{invitation_id}/teams\", \"GET /orgs/{org}/issues\", \"GET /orgs/{org}/members\", \"GET /orgs/{org}/migrations\", \"GET /orgs/{org}/migrations/{migration_id}/repositories\", \"GET /orgs/{org}/outside_collaborators\", \"GET /orgs/{org}/packages\", \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", \"GET /orgs/{org}/projects\", \"GET /orgs/{org}/public_members\", \"GET /orgs/{org}/repos\", \"GET /orgs/{org}/secret-scanning/alerts\", \"GET /orgs/{org}/settings/billing/advanced-security\", \"GET /orgs/{org}/team-sync/groups\", \"GET /orgs/{org}/teams\", \"GET /orgs/{org}/teams/{team_slug}/discussions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/invitations\", \"GET /orgs/{org}/teams/{team_slug}/members\", \"GET /orgs/{org}/teams/{team_slug}/projects\", \"GET /orgs/{org}/teams/{team_slug}/repos\", \"GET /orgs/{org}/teams/{team_slug}/teams\", \"GET /projects/columns/{column_id}/cards\", \"GET /projects/{project_id}/collaborators\", \"GET /projects/{project_id}/columns\", \"GET /repos/{owner}/{repo}/actions/artifacts\", \"GET /repos/{owner}/{repo}/actions/caches\", \"GET /repos/{owner}/{repo}/actions/runners\", \"GET /repos/{owner}/{repo}/actions/runs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\", \"GET /repos/{owner}/{repo}/actions/secrets\", \"GET /repos/{owner}/{repo}/actions/workflows\", \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\", \"GET /repos/{owner}/{repo}/assignees\", \"GET /repos/{owner}/{repo}/branches\", \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", \"GET /repos/{owner}/{repo}/code-scanning/alerts\", \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", \"GET /repos/{owner}/{repo}/code-scanning/analyses\", \"GET /repos/{owner}/{repo}/codespaces\", \"GET /repos/{owner}/{repo}/codespaces/devcontainers\", \"GET /repos/{owner}/{repo}/codespaces/secrets\", \"GET /repos/{owner}/{repo}/collaborators\", \"GET /repos/{owner}/{repo}/comments\", \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/commits\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", \"GET /repos/{owner}/{repo}/commits/{ref}/status\", \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\", \"GET /repos/{owner}/{repo}/contributors\", \"GET /repos/{owner}/{repo}/dependabot/secrets\", \"GET /repos/{owner}/{repo}/deployments\", \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\", \"GET /repos/{owner}/{repo}/environments\", \"GET /repos/{owner}/{repo}/events\", \"GET /repos/{owner}/{repo}/forks\", \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\", \"GET /repos/{owner}/{repo}/hooks\", \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\", \"GET /repos/{owner}/{repo}/invitations\", \"GET /repos/{owner}/{repo}/issues\", \"GET /repos/{owner}/{repo}/issues/comments\", \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/issues/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", \"GET /repos/{owner}/{repo}/keys\", \"GET /repos/{owner}/{repo}/labels\", \"GET /repos/{owner}/{repo}/milestones\", \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\", \"GET /repos/{owner}/{repo}/notifications\", \"GET /repos/{owner}/{repo}/pages/builds\", \"GET /repos/{owner}/{repo}/projects\", \"GET /repos/{owner}/{repo}/pulls\", \"GET /repos/{owner}/{repo}/pulls/comments\", \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\", \"GET /repos/{owner}/{repo}/releases\", \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\", \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\", \"GET /repos/{owner}/{repo}/stargazers\", \"GET /repos/{owner}/{repo}/subscribers\", \"GET /repos/{owner}/{repo}/tags\", \"GET /repos/{owner}/{repo}/teams\", \"GET /repos/{owner}/{repo}/topics\", \"GET /repositories\", \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\", \"GET /search/code\", \"GET /search/commits\", \"GET /search/issues\", \"GET /search/labels\", \"GET /search/repositories\", \"GET /search/topics\", \"GET /search/users\", \"GET /teams/{team_id}/discussions\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\", \"GET /teams/{team_id}/invitations\", \"GET /teams/{team_id}/members\", \"GET /teams/{team_id}/projects\", \"GET /teams/{team_id}/repos\", \"GET /teams/{team_id}/teams\", \"GET /user/blocks\", \"GET /user/codespaces\", \"GET /user/codespaces/secrets\", \"GET /user/emails\", \"GET /user/followers\", \"GET /user/following\", \"GET /user/gpg_keys\", \"GET /user/installations\", \"GET /user/installations/{installation_id}/repositories\", \"GET /user/issues\", \"GET /user/keys\", \"GET /user/marketplace_purchases\", \"GET /user/marketplace_purchases/stubbed\", \"GET /user/memberships/orgs\", \"GET /user/migrations\", \"GET /user/migrations/{migration_id}/repositories\", \"GET /user/orgs\", \"GET /user/packages\", \"GET /user/packages/{package_type}/{package_name}/versions\", \"GET /user/public_emails\", \"GET /user/repos\", \"GET /user/repository_invitations\", \"GET /user/starred\", \"GET /user/subscriptions\", \"GET /user/teams\", \"GET /users\", \"GET /users/{username}/events\", \"GET /users/{username}/events/orgs/{org}\", \"GET /users/{username}/events/public\", \"GET /users/{username}/followers\", \"GET /users/{username}/following\", \"GET /users/{username}/gists\", \"GET /users/{username}/gpg_keys\", \"GET /users/{username}/keys\", \"GET /users/{username}/orgs\", \"GET /users/{username}/packages\", \"GET /users/{username}/projects\", \"GET /users/{username}/received_events\", \"GET /users/{username}/received_events/public\", \"GET /users/{username}/repos\", \"GET /users/{username}/starred\", \"GET /users/{username}/subscriptions\"];\n\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.isPaginatingEndpoint = isPaginatingEndpoint;\nexports.paginateRest = paginateRest;\nexports.paginatingEndpoints = paginatingEndpoints;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n\n if (enumerableOnly) {\n symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n });\n }\n\n keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i] != null ? arguments[i] : {};\n\n if (i % 2) {\n ownKeys(Object(source), true).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n });\n } else if (Object.getOwnPropertyDescriptors) {\n Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));\n } else {\n ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\nconst Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\"POST /orgs/{org}/actions/runners/{runner_id}/labels\"],\n addCustomLabelsToSelfHostedRunnerForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n approveWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateEnvironmentSecret: [\"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteActionsCacheById: [\"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"],\n deleteActionsCacheByKey: [\"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteEnvironmentSecret: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n disableSelectedRepositoryGithubActionsOrganization: [\"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n disableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunAttemptLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n enableSelectedRepositoryGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n enableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\"GET /orgs/{org}/actions/cache/usage-by-repository\"],\n getActionsCacheUsageForEnterprise: [\"GET /enterprises/{enterprise}/actions/cache/usage\"],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\"GET /orgs/{org}/actions/permissions/selected-actions\"],\n getAllowedActionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"],\n getEnvironmentSecret: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n getGithubActionsDefaultWorkflowPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/workflow\"],\n getGithubActionsPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions\"],\n getGithubActionsPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n getRepoPermissions: [\"GET /repos/{owner}/{repo}/actions/permissions\", {}, {\n renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"]\n }],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/access\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listJobsForWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"],\n listLabelsForSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}/labels\"],\n listLabelsForSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\"GET /orgs/{org}/actions/permissions/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n reviewPendingDeploymentsForRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n setAllowedActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/selected-actions\"],\n setAllowedActionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForOrg: [\"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"],\n setCustomLabelsForSelfHostedRunnerForRepo: [\"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n setGithubActionsDefaultWorkflowPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"],\n setGithubActionsPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions\"],\n setGithubActionsPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories\"],\n setWorkflowAccessToRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/access\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"]\n }],\n addRepoToInstallationForAuthenticatedUser: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\"POST /app/hook/deliveries/{delivery_id}/attempts\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"]\n }],\n removeRepoFromInstallationForAuthenticatedUser: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubAdvancedSecurityBillingGhe: [\"GET /enterprises/{enterprise}/settings/billing/advanced-security\"],\n getGithubAdvancedSecurityBillingOrg: [\"GET /orgs/{org}/settings/billing/advanced-security\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\"],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n getAnalysis: [\"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", {}, {\n renamed: [\"codeScanning\", \"listAlertInstances\"]\n }],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n codespaceMachinesForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/machines\"],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n createOrUpdateSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}\"],\n createWithPrForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"],\n createWithRepoForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/codespaces\"],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n deleteSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}\"],\n exportForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/exports\"],\n getExportDetailsForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/exports/{export_id}\"],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getPublicKeyForAuthenticatedUser: [\"GET /user/codespaces/secrets/public-key\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n getSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}\"],\n listDevcontainersInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/devcontainers\"],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\"GET /orgs/{org}/codespaces\", {}, {\n renamedParameters: {\n org_id: \"org\"\n }\n }],\n listInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}/repositories\"],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n removeRepositoryForSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n repoMachinesForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/machines\"],\n setRepositoriesForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories\"],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"],\n diffRange: [\"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n enterpriseAdmin: {\n addCustomLabelsToSelfHostedRunnerForEnterprise: [\"POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n disableSelectedOrganizationGithubActionsEnterprise: [\"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n enableSelectedOrganizationGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n getAllowedActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n getGithubActionsPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions\"],\n getServerStatistics: [\"GET /enterprise-installation/{enterprise_or_org}/server-statistics\"],\n listLabelsForSelfHostedRunnerForEnterprise: [\"GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/organizations\"],\n removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: [\"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForEnterprise: [\"DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}\"],\n setAllowedActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForEnterprise: [\"PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n setGithubActionsPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions\"],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\"GET /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"]\n }],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\"],\n removeRestrictionsForYourPublicRepos: [\"DELETE /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"]\n }],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\"PUT /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"]\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\"],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\"],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\"],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\"],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/repositories\"],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {}, {\n renamed: [\"migrations\", \"listReposForAuthenticatedUser\"]\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listCustomRoles: [\"GET /organizations/{organization_id}/custom_roles\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}\"],\n deletePackageForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"],\n deletePackageForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}\"],\n deletePackageVersionForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"]\n }],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"]\n }],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions\"],\n getPackageForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}\"],\n getPackageForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}\"],\n getPackageForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}\"],\n getPackageVersionForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageVersionForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\"],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\"],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n createForRelease: [\"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n listForRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"]\n }],\n acceptInvitationForAuthenticatedUser: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\"],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\"GET /repos/{owner}/{repo}/compare/{basehead}\"],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\"],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"]\n }],\n declineInvitationForAuthenticatedUser: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteAnEnvironment: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteTagProtection: [\"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\"],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"],\n downloadArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\", {}, {\n renamed: [\"repos\", \"downloadZipballArchive\"]\n }],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\"],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\"],\n generateReleaseNotes: [\"POST /repos/{owner}/{repo}/releases/generate-notes\"],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getEnvironment: [\"GET /repos/{owner}/{repo}/environments/{environment_name}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\", {}, {\n renamed: [\"repos\", \"updateStatusCheckProtection\"]\n }],\n updateStatusCheckProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"],\n listAlertsForEnterprise: [\"GET /enterprises/{enterprise}/secret-scanning/alerts\"],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\", {}, {\n renamed: [\"users\", \"addEmailForAuthenticatedUser\"]\n }],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\", {}, {\n renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"]\n }],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\", {}, {\n renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"]\n }],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\", {}, {\n renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"]\n }],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"]\n }],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"]\n }],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"]\n }],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"]\n }],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\", {}, {\n renamed: [\"users\", \"listBlockedByAuthenticatedUser\"]\n }],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\", {}, {\n renamed: [\"users\", \"listEmailsForAuthenticatedUser\"]\n }],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\", {}, {\n renamed: [\"users\", \"listFollowedByAuthenticatedUser\"]\n }],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\", {}, {\n renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"]\n }],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\", {}, {\n renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"]\n }],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\", {}, {\n renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"]\n }],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\", {}, {\n renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"]\n }],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"5.16.2\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return _objectSpread2(_objectSpread2({}, api), {}, {\n rest: api\n });\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n\nexports.legacyRestEndpointMethods = legacyRestEndpointMethods;\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnceCode = once(deprecation => console.warn(deprecation));\nconst logOnceHeaders = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n } // redact request credentials without mutating original request options\n\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy; // deprecations\n\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new deprecation.Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n }\n\n });\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.6.3\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request)).then(async response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined\n },\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new requestError.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n\n return getResponseData(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) throw error;\n throw new requestError.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\n\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n}\n\nfunction toErrorMessage(data) {\n if (typeof data === \"string\") return data; // istanbul ignore else - just in case\n\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n\n return data.message;\n } // istanbul ignore next - just in case\n\n\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction once(emitter, name, { signal } = {}) {\n return new Promise((resolve, reject) => {\n function cleanup() {\n signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup);\n emitter.removeListener(name, onEvent);\n emitter.removeListener('error', onError);\n }\n function onEvent(...args) {\n cleanup();\n resolve(args);\n }\n function onError(err) {\n cleanup();\n reject(err);\n }\n signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup);\n emitter.on(name, onEvent);\n emitter.on('error', onError);\n });\n}\nexports.default = once;\n//# sourceMappingURL=index.js.map","/**\n * @author Toru Nagashima \n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar eventTargetShim = require('event-target-shim');\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nclass AbortSignal extends eventTargetShim.EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n constructor() {\n super();\n throw new TypeError(\"AbortSignal cannot be constructed directly\");\n }\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n get aborted() {\n const aborted = abortedFlags.get(this);\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? \"null\" : typeof this}`);\n }\n return aborted;\n }\n}\neventTargetShim.defineEventAttribute(AbortSignal.prototype, \"abort\");\n/**\n * Create an AbortSignal object.\n */\nfunction createAbortSignal() {\n const signal = Object.create(AbortSignal.prototype);\n eventTargetShim.EventTarget.call(signal);\n abortedFlags.set(signal, false);\n return signal;\n}\n/**\n * Abort a given signal.\n */\nfunction abortSignal(signal) {\n if (abortedFlags.get(signal) !== false) {\n return;\n }\n abortedFlags.set(signal, true);\n signal.dispatchEvent({ type: \"abort\" });\n}\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap();\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n});\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n });\n}\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nclass AbortController {\n /**\n * Initialize this controller.\n */\n constructor() {\n signals.set(this, createAbortSignal());\n }\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n get signal() {\n return getSignal(this);\n }\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n abort() {\n abortSignal(getSignal(this));\n }\n}\n/**\n * Associated signals.\n */\nconst signals = new WeakMap();\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller) {\n const signal = signals.get(controller);\n if (signal == null) {\n throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? \"null\" : typeof controller}`);\n }\n return signal;\n}\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n});\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n });\n}\n\nexports.AbortController = AbortController;\nexports.AbortSignal = AbortSignal;\nexports.default = AbortController;\n\nmodule.exports = AbortController\nmodule.exports.AbortController = module.exports[\"default\"] = AbortController\nmodule.exports.AbortSignal = AbortSignal\n//# sourceMappingURL=abort-controller.js.map\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst events_1 = require(\"events\");\nconst debug_1 = __importDefault(require(\"debug\"));\nconst promisify_1 = __importDefault(require(\"./promisify\"));\nconst debug = debug_1.default('agent-base');\nfunction isAgent(v) {\n return Boolean(v) && typeof v.addRequest === 'function';\n}\nfunction isSecureEndpoint() {\n const { stack } = new Error();\n if (typeof stack !== 'string')\n return false;\n return stack.split('\\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1);\n}\nfunction createAgent(callback, opts) {\n return new createAgent.Agent(callback, opts);\n}\n(function (createAgent) {\n /**\n * Base `http.Agent` implementation.\n * No pooling/keep-alive is implemented by default.\n *\n * @param {Function} callback\n * @api public\n */\n class Agent extends events_1.EventEmitter {\n constructor(callback, _opts) {\n super();\n let opts = _opts;\n if (typeof callback === 'function') {\n this.callback = callback;\n }\n else if (callback) {\n opts = callback;\n }\n // Timeout for the socket to be returned from the callback\n this.timeout = null;\n if (opts && typeof opts.timeout === 'number') {\n this.timeout = opts.timeout;\n }\n // These aren't actually used by `agent-base`, but are required\n // for the TypeScript definition files in `@types/node` :/\n this.maxFreeSockets = 1;\n this.maxSockets = 1;\n this.maxTotalSockets = Infinity;\n this.sockets = {};\n this.freeSockets = {};\n this.requests = {};\n this.options = {};\n }\n get defaultPort() {\n if (typeof this.explicitDefaultPort === 'number') {\n return this.explicitDefaultPort;\n }\n return isSecureEndpoint() ? 443 : 80;\n }\n set defaultPort(v) {\n this.explicitDefaultPort = v;\n }\n get protocol() {\n if (typeof this.explicitProtocol === 'string') {\n return this.explicitProtocol;\n }\n return isSecureEndpoint() ? 'https:' : 'http:';\n }\n set protocol(v) {\n this.explicitProtocol = v;\n }\n callback(req, opts, fn) {\n throw new Error('\"agent-base\" has no default implementation, you must subclass and override `callback()`');\n }\n /**\n * Called by node-core's \"_http_client.js\" module when creating\n * a new HTTP request with this Agent instance.\n *\n * @api public\n */\n addRequest(req, _opts) {\n const opts = Object.assign({}, _opts);\n if (typeof opts.secureEndpoint !== 'boolean') {\n opts.secureEndpoint = isSecureEndpoint();\n }\n if (opts.host == null) {\n opts.host = 'localhost';\n }\n if (opts.port == null) {\n opts.port = opts.secureEndpoint ? 443 : 80;\n }\n if (opts.protocol == null) {\n opts.protocol = opts.secureEndpoint ? 'https:' : 'http:';\n }\n if (opts.host && opts.path) {\n // If both a `host` and `path` are specified then it's most\n // likely the result of a `url.parse()` call... we need to\n // remove the `path` portion so that `net.connect()` doesn't\n // attempt to open that as a unix socket file.\n delete opts.path;\n }\n delete opts.agent;\n delete opts.hostname;\n delete opts._defaultAgent;\n delete opts.defaultPort;\n delete opts.createConnection;\n // Hint to use \"Connection: close\"\n // XXX: non-documented `http` module API :(\n req._last = true;\n req.shouldKeepAlive = false;\n let timedOut = false;\n let timeoutId = null;\n const timeoutMs = opts.timeout || this.timeout;\n const onerror = (err) => {\n if (req._hadError)\n return;\n req.emit('error', err);\n // For Safety. Some additional errors might fire later on\n // and we need to make sure we don't double-fire the error event.\n req._hadError = true;\n };\n const ontimeout = () => {\n timeoutId = null;\n timedOut = true;\n const err = new Error(`A \"socket\" was not created for HTTP request before ${timeoutMs}ms`);\n err.code = 'ETIMEOUT';\n onerror(err);\n };\n const callbackError = (err) => {\n if (timedOut)\n return;\n if (timeoutId !== null) {\n clearTimeout(timeoutId);\n timeoutId = null;\n }\n onerror(err);\n };\n const onsocket = (socket) => {\n if (timedOut)\n return;\n if (timeoutId != null) {\n clearTimeout(timeoutId);\n timeoutId = null;\n }\n if (isAgent(socket)) {\n // `socket` is actually an `http.Agent` instance, so\n // relinquish responsibility for this `req` to the Agent\n // from here on\n debug('Callback returned another Agent instance %o', socket.constructor.name);\n socket.addRequest(req, opts);\n return;\n }\n if (socket) {\n socket.once('free', () => {\n this.freeSocket(socket, opts);\n });\n req.onSocket(socket);\n return;\n }\n const err = new Error(`no Duplex stream was returned to agent-base for \\`${req.method} ${req.path}\\``);\n onerror(err);\n };\n if (typeof this.callback !== 'function') {\n onerror(new Error('`callback` is not defined'));\n return;\n }\n if (!this.promisifiedCallback) {\n if (this.callback.length >= 3) {\n debug('Converting legacy callback function to promise');\n this.promisifiedCallback = promisify_1.default(this.callback);\n }\n else {\n this.promisifiedCallback = this.callback;\n }\n }\n if (typeof timeoutMs === 'number' && timeoutMs > 0) {\n timeoutId = setTimeout(ontimeout, timeoutMs);\n }\n if ('port' in opts && typeof opts.port !== 'number') {\n opts.port = Number(opts.port);\n }\n try {\n debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`);\n Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError);\n }\n catch (err) {\n Promise.reject(err).catch(callbackError);\n }\n }\n freeSocket(socket, opts) {\n debug('Freeing socket %o %o', socket.constructor.name, opts);\n socket.destroy();\n }\n destroy() {\n debug('Destroying agent %o', this.constructor.name);\n }\n }\n createAgent.Agent = Agent;\n // So that `instanceof` works correctly\n createAgent.prototype = createAgent.Agent.prototype;\n})(createAgent || (createAgent = {}));\nmodule.exports = createAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction promisify(fn) {\n return function (req, opts) {\n return new Promise((resolve, reject) => {\n fn.call(this, req, opts, (err, rtn) => {\n if (err) {\n reject(err);\n }\n else {\n resolve(rtn);\n }\n });\n });\n };\n}\nexports.default = promisify;\n//# sourceMappingURL=promisify.js.map","'use strict';\n\nconst arrify = value => {\n\tif (value === null || value === undefined) {\n\t\treturn [];\n\t}\n\n\tif (Array.isArray(value)) {\n\t\treturn value;\n\t}\n\n\tif (typeof value === 'string') {\n\t\treturn [value];\n\t}\n\n\tif (typeof value[Symbol.iterator] === 'function') {\n\t\treturn [...value];\n\t}\n\n\treturn [value];\n};\n\nmodule.exports = arrify;\n","// Packages\nvar retrier = require('retry');\n\nfunction retry(fn, opts) {\n function run(resolve, reject) {\n var options = opts || {};\n var op;\n\n // Default `randomize` to true\n if (!('randomize' in options)) {\n options.randomize = true;\n }\n\n op = retrier.operation(options);\n\n // We allow the user to abort retrying\n // this makes sense in the cases where\n // knowledge is obtained that retrying\n // would be futile (e.g.: auth errors)\n\n function bail(err) {\n reject(err || new Error('Aborted'));\n }\n\n function onError(err, num) {\n if (err.bail) {\n bail(err);\n return;\n }\n\n if (!op.retry(err)) {\n reject(op.mainError());\n } else if (options.onRetry) {\n options.onRetry(err, num);\n }\n }\n\n function runAttempt(num) {\n var val;\n\n try {\n val = fn(bail, num);\n } catch (err) {\n onError(err, num);\n return;\n }\n\n Promise.resolve(val)\n .then(resolve)\n .catch(function catchIt(err) {\n onError(err, num);\n });\n }\n\n op.attempt(runAttempt);\n }\n\n return new Promise(run);\n}\n\nmodule.exports = retry;\n","'use strict'\n\nexports.byteLength = byteLength\nexports.toByteArray = toByteArray\nexports.fromByteArray = fromByteArray\n\nvar lookup = []\nvar revLookup = []\nvar Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array\n\nvar code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'\nfor (var i = 0, len = code.length; i < len; ++i) {\n lookup[i] = code[i]\n revLookup[code.charCodeAt(i)] = i\n}\n\n// Support decoding URL-safe base64 strings, as Node.js does.\n// See: https://en.wikipedia.org/wiki/Base64#URL_applications\nrevLookup['-'.charCodeAt(0)] = 62\nrevLookup['_'.charCodeAt(0)] = 63\n\nfunction getLens (b64) {\n var len = b64.length\n\n if (len % 4 > 0) {\n throw new Error('Invalid string. Length must be a multiple of 4')\n }\n\n // Trim off extra bytes after placeholder bytes are found\n // See: https://github.com/beatgammit/base64-js/issues/42\n var validLen = b64.indexOf('=')\n if (validLen === -1) validLen = len\n\n var placeHoldersLen = validLen === len\n ? 0\n : 4 - (validLen % 4)\n\n return [validLen, placeHoldersLen]\n}\n\n// base64 is 4/3 + up to two characters of the original data\nfunction byteLength (b64) {\n var lens = getLens(b64)\n var validLen = lens[0]\n var placeHoldersLen = lens[1]\n return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen\n}\n\nfunction _byteLength (b64, validLen, placeHoldersLen) {\n return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen\n}\n\nfunction toByteArray (b64) {\n var tmp\n var lens = getLens(b64)\n var validLen = lens[0]\n var placeHoldersLen = lens[1]\n\n var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen))\n\n var curByte = 0\n\n // if there are placeholders, only get up to the last complete 4 chars\n var len = placeHoldersLen > 0\n ? validLen - 4\n : validLen\n\n var i\n for (i = 0; i < len; i += 4) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 18) |\n (revLookup[b64.charCodeAt(i + 1)] << 12) |\n (revLookup[b64.charCodeAt(i + 2)] << 6) |\n revLookup[b64.charCodeAt(i + 3)]\n arr[curByte++] = (tmp >> 16) & 0xFF\n arr[curByte++] = (tmp >> 8) & 0xFF\n arr[curByte++] = tmp & 0xFF\n }\n\n if (placeHoldersLen === 2) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 2) |\n (revLookup[b64.charCodeAt(i + 1)] >> 4)\n arr[curByte++] = tmp & 0xFF\n }\n\n if (placeHoldersLen === 1) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 10) |\n (revLookup[b64.charCodeAt(i + 1)] << 4) |\n (revLookup[b64.charCodeAt(i + 2)] >> 2)\n arr[curByte++] = (tmp >> 8) & 0xFF\n arr[curByte++] = tmp & 0xFF\n }\n\n return arr\n}\n\nfunction tripletToBase64 (num) {\n return lookup[num >> 18 & 0x3F] +\n lookup[num >> 12 & 0x3F] +\n lookup[num >> 6 & 0x3F] +\n lookup[num & 0x3F]\n}\n\nfunction encodeChunk (uint8, start, end) {\n var tmp\n var output = []\n for (var i = start; i < end; i += 3) {\n tmp =\n ((uint8[i] << 16) & 0xFF0000) +\n ((uint8[i + 1] << 8) & 0xFF00) +\n (uint8[i + 2] & 0xFF)\n output.push(tripletToBase64(tmp))\n }\n return output.join('')\n}\n\nfunction fromByteArray (uint8) {\n var tmp\n var len = uint8.length\n var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes\n var parts = []\n var maxChunkLength = 16383 // must be multiple of 3\n\n // go through the array every three bytes, we'll deal with trailing stuff later\n for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {\n parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))\n }\n\n // pad the end with zeros, but make sure to not forget the extra bytes\n if (extraBytes === 1) {\n tmp = uint8[len - 1]\n parts.push(\n lookup[tmp >> 2] +\n lookup[(tmp << 4) & 0x3F] +\n '=='\n )\n } else if (extraBytes === 2) {\n tmp = (uint8[len - 2] << 8) + uint8[len - 1]\n parts.push(\n lookup[tmp >> 10] +\n lookup[(tmp >> 4) & 0x3F] +\n lookup[(tmp << 2) & 0x3F] +\n '='\n )\n }\n\n return parts.join('')\n}\n","var register = require(\"./lib/register\");\nvar addHook = require(\"./lib/add\");\nvar removeHook = require(\"./lib/remove\");\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind;\nvar bindable = bind.bind(bind);\n\nfunction bindApi(hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(\n null,\n name ? [state, name] : [state]\n );\n hook.api = { remove: removeHookRef };\n hook.remove = removeHookRef;\n [\"before\", \"error\", \"after\", \"wrap\"].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind];\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);\n });\n}\n\nfunction HookSingular() {\n var singularHookName = \"h\";\n var singularHookState = {\n registry: {},\n };\n var singularHook = register.bind(null, singularHookState, singularHookName);\n bindApi(singularHook, singularHookState, singularHookName);\n return singularHook;\n}\n\nfunction HookCollection() {\n var state = {\n registry: {},\n };\n\n var hook = register.bind(null, state);\n bindApi(hook, state);\n\n return hook;\n}\n\nvar collectionHookDeprecationMessageDisplayed = false;\nfunction Hook() {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn(\n '[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4'\n );\n collectionHookDeprecationMessageDisplayed = true;\n }\n return HookCollection();\n}\n\nHook.Singular = HookSingular.bind();\nHook.Collection = HookCollection.bind();\n\nmodule.exports = Hook;\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook;\nmodule.exports.Singular = Hook.Singular;\nmodule.exports.Collection = Hook.Collection;\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n",";(function (globalObject) {\r\n 'use strict';\r\n\r\n/*\r\n * bignumber.js v9.1.1\r\n * A JavaScript library for arbitrary-precision arithmetic.\r\n * https://github.com/MikeMcl/bignumber.js\r\n * Copyright (c) 2022 Michael Mclaughlin \r\n * MIT Licensed.\r\n *\r\n * BigNumber.prototype methods | BigNumber methods\r\n * |\r\n * absoluteValue abs | clone\r\n * comparedTo | config set\r\n * decimalPlaces dp | DECIMAL_PLACES\r\n * dividedBy div | ROUNDING_MODE\r\n * dividedToIntegerBy idiv | EXPONENTIAL_AT\r\n * exponentiatedBy pow | RANGE\r\n * integerValue | CRYPTO\r\n * isEqualTo eq | MODULO_MODE\r\n * isFinite | POW_PRECISION\r\n * isGreaterThan gt | FORMAT\r\n * isGreaterThanOrEqualTo gte | ALPHABET\r\n * isInteger | isBigNumber\r\n * isLessThan lt | maximum max\r\n * isLessThanOrEqualTo lte | minimum min\r\n * isNaN | random\r\n * isNegative | sum\r\n * isPositive |\r\n * isZero |\r\n * minus |\r\n * modulo mod |\r\n * multipliedBy times |\r\n * negated |\r\n * plus |\r\n * precision sd |\r\n * shiftedBy |\r\n * squareRoot sqrt |\r\n * toExponential |\r\n * toFixed |\r\n * toFormat |\r\n * toFraction |\r\n * toJSON |\r\n * toNumber |\r\n * toPrecision |\r\n * toString |\r\n * valueOf |\r\n *\r\n */\r\n\r\n\r\n var BigNumber,\r\n isNumeric = /^-?(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:e[+-]?\\d+)?$/i,\r\n mathceil = Math.ceil,\r\n mathfloor = Math.floor,\r\n\r\n bignumberError = '[BigNumber Error] ',\r\n tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ',\r\n\r\n BASE = 1e14,\r\n LOG_BASE = 14,\r\n MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1\r\n // MAX_INT32 = 0x7fffffff, // 2^31 - 1\r\n POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13],\r\n SQRT_BASE = 1e7,\r\n\r\n // EDITABLE\r\n // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and\r\n // the arguments to toExponential, toFixed, toFormat, and toPrecision.\r\n MAX = 1E9; // 0 to MAX_INT32\r\n\r\n\r\n /*\r\n * Create and return a BigNumber constructor.\r\n */\r\n function clone(configObject) {\r\n var div, convertBase, parseNumeric,\r\n P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null },\r\n ONE = new BigNumber(1),\r\n\r\n\r\n //----------------------------- EDITABLE CONFIG DEFAULTS -------------------------------\r\n\r\n\r\n // The default values below must be integers within the inclusive ranges stated.\r\n // The values can also be changed at run-time using BigNumber.set.\r\n\r\n // The maximum number of decimal places for operations involving division.\r\n DECIMAL_PLACES = 20, // 0 to MAX\r\n\r\n // The rounding mode used when rounding to the above decimal places, and when using\r\n // toExponential, toFixed, toFormat and toPrecision, and round (default value).\r\n // UP 0 Away from zero.\r\n // DOWN 1 Towards zero.\r\n // CEIL 2 Towards +Infinity.\r\n // FLOOR 3 Towards -Infinity.\r\n // HALF_UP 4 Towards nearest neighbour. If equidistant, up.\r\n // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down.\r\n // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour.\r\n // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity.\r\n // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity.\r\n ROUNDING_MODE = 4, // 0 to 8\r\n\r\n // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS]\r\n\r\n // The exponent value at and beneath which toString returns exponential notation.\r\n // Number type: -7\r\n TO_EXP_NEG = -7, // 0 to -MAX\r\n\r\n // The exponent value at and above which toString returns exponential notation.\r\n // Number type: 21\r\n TO_EXP_POS = 21, // 0 to MAX\r\n\r\n // RANGE : [MIN_EXP, MAX_EXP]\r\n\r\n // The minimum exponent value, beneath which underflow to zero occurs.\r\n // Number type: -324 (5e-324)\r\n MIN_EXP = -1e7, // -1 to -MAX\r\n\r\n // The maximum exponent value, above which overflow to Infinity occurs.\r\n // Number type: 308 (1.7976931348623157e+308)\r\n // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow.\r\n MAX_EXP = 1e7, // 1 to MAX\r\n\r\n // Whether to use cryptographically-secure random number generation, if available.\r\n CRYPTO = false, // true or false\r\n\r\n // The modulo mode used when calculating the modulus: a mod n.\r\n // The quotient (q = a / n) is calculated according to the corresponding rounding mode.\r\n // The remainder (r) is calculated as: r = a - n * q.\r\n //\r\n // UP 0 The remainder is positive if the dividend is negative, else is negative.\r\n // DOWN 1 The remainder has the same sign as the dividend.\r\n // This modulo mode is commonly known as 'truncated division' and is\r\n // equivalent to (a % n) in JavaScript.\r\n // FLOOR 3 The remainder has the same sign as the divisor (Python %).\r\n // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function.\r\n // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)).\r\n // The remainder is always positive.\r\n //\r\n // The truncated division, floored division, Euclidian division and IEEE 754 remainder\r\n // modes are commonly used for the modulus operation.\r\n // Although the other rounding modes can also be used, they may not give useful results.\r\n MODULO_MODE = 1, // 0 to 9\r\n\r\n // The maximum number of significant digits of the result of the exponentiatedBy operation.\r\n // If POW_PRECISION is 0, there will be unlimited significant digits.\r\n POW_PRECISION = 0, // 0 to MAX\r\n\r\n // The format specification used by the BigNumber.prototype.toFormat method.\r\n FORMAT = {\r\n prefix: '',\r\n groupSize: 3,\r\n secondaryGroupSize: 0,\r\n groupSeparator: ',',\r\n decimalSeparator: '.',\r\n fractionGroupSize: 0,\r\n fractionGroupSeparator: '\\xA0', // non-breaking space\r\n suffix: ''\r\n },\r\n\r\n // The alphabet used for base conversion. It must be at least 2 characters long, with no '+',\r\n // '-', '.', whitespace, or repeated character.\r\n // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_'\r\n ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz',\r\n alphabetHasNormalDecimalDigits = true;\r\n\r\n\r\n //------------------------------------------------------------------------------------------\r\n\r\n\r\n // CONSTRUCTOR\r\n\r\n\r\n /*\r\n * The BigNumber constructor and exported function.\r\n * Create and return a new instance of a BigNumber object.\r\n *\r\n * v {number|string|BigNumber} A numeric value.\r\n * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive.\r\n */\r\n function BigNumber(v, b) {\r\n var alphabet, c, caseChanged, e, i, isNum, len, str,\r\n x = this;\r\n\r\n // Enable constructor call without `new`.\r\n if (!(x instanceof BigNumber)) return new BigNumber(v, b);\r\n\r\n if (b == null) {\r\n\r\n if (v && v._isBigNumber === true) {\r\n x.s = v.s;\r\n\r\n if (!v.c || v.e > MAX_EXP) {\r\n x.c = x.e = null;\r\n } else if (v.e < MIN_EXP) {\r\n x.c = [x.e = 0];\r\n } else {\r\n x.e = v.e;\r\n x.c = v.c.slice();\r\n }\r\n\r\n return;\r\n }\r\n\r\n if ((isNum = typeof v == 'number') && v * 0 == 0) {\r\n\r\n // Use `1 / n` to handle minus zero also.\r\n x.s = 1 / v < 0 ? (v = -v, -1) : 1;\r\n\r\n // Fast path for integers, where n < 2147483648 (2**31).\r\n if (v === ~~v) {\r\n for (e = 0, i = v; i >= 10; i /= 10, e++);\r\n\r\n if (e > MAX_EXP) {\r\n x.c = x.e = null;\r\n } else {\r\n x.e = e;\r\n x.c = [v];\r\n }\r\n\r\n return;\r\n }\r\n\r\n str = String(v);\r\n } else {\r\n\r\n if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum);\r\n\r\n x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1;\r\n }\r\n\r\n // Decimal point?\r\n if ((e = str.indexOf('.')) > -1) str = str.replace('.', '');\r\n\r\n // Exponential form?\r\n if ((i = str.search(/e/i)) > 0) {\r\n\r\n // Determine exponent.\r\n if (e < 0) e = i;\r\n e += +str.slice(i + 1);\r\n str = str.substring(0, i);\r\n } else if (e < 0) {\r\n\r\n // Integer.\r\n e = str.length;\r\n }\r\n\r\n } else {\r\n\r\n // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}'\r\n intCheck(b, 2, ALPHABET.length, 'Base');\r\n\r\n // Allow exponential notation to be used with base 10 argument, while\r\n // also rounding to DECIMAL_PLACES as with other bases.\r\n if (b == 10 && alphabetHasNormalDecimalDigits) {\r\n x = new BigNumber(v);\r\n return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE);\r\n }\r\n\r\n str = String(v);\r\n\r\n if (isNum = typeof v == 'number') {\r\n\r\n // Avoid potential interpretation of Infinity and NaN as base 44+ values.\r\n if (v * 0 != 0) return parseNumeric(x, str, isNum, b);\r\n\r\n x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1;\r\n\r\n // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}'\r\n if (BigNumber.DEBUG && str.replace(/^0\\.0*|\\./, '').length > 15) {\r\n throw Error\r\n (tooManyDigits + v);\r\n }\r\n } else {\r\n x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1;\r\n }\r\n\r\n alphabet = ALPHABET.slice(0, b);\r\n e = i = 0;\r\n\r\n // Check that str is a valid base b number.\r\n // Don't use RegExp, so alphabet can contain special characters.\r\n for (len = str.length; i < len; i++) {\r\n if (alphabet.indexOf(c = str.charAt(i)) < 0) {\r\n if (c == '.') {\r\n\r\n // If '.' is not the first character and it has not be found before.\r\n if (i > e) {\r\n e = len;\r\n continue;\r\n }\r\n } else if (!caseChanged) {\r\n\r\n // Allow e.g. hexadecimal 'FF' as well as 'ff'.\r\n if (str == str.toUpperCase() && (str = str.toLowerCase()) ||\r\n str == str.toLowerCase() && (str = str.toUpperCase())) {\r\n caseChanged = true;\r\n i = -1;\r\n e = 0;\r\n continue;\r\n }\r\n }\r\n\r\n return parseNumeric(x, String(v), isNum, b);\r\n }\r\n }\r\n\r\n // Prevent later check for length on converted number.\r\n isNum = false;\r\n str = convertBase(str, b, 10, x.s);\r\n\r\n // Decimal point?\r\n if ((e = str.indexOf('.')) > -1) str = str.replace('.', '');\r\n else e = str.length;\r\n }\r\n\r\n // Determine leading zeros.\r\n for (i = 0; str.charCodeAt(i) === 48; i++);\r\n\r\n // Determine trailing zeros.\r\n for (len = str.length; str.charCodeAt(--len) === 48;);\r\n\r\n if (str = str.slice(i, ++len)) {\r\n len -= i;\r\n\r\n // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}'\r\n if (isNum && BigNumber.DEBUG &&\r\n len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) {\r\n throw Error\r\n (tooManyDigits + (x.s * v));\r\n }\r\n\r\n // Overflow?\r\n if ((e = e - i - 1) > MAX_EXP) {\r\n\r\n // Infinity.\r\n x.c = x.e = null;\r\n\r\n // Underflow?\r\n } else if (e < MIN_EXP) {\r\n\r\n // Zero.\r\n x.c = [x.e = 0];\r\n } else {\r\n x.e = e;\r\n x.c = [];\r\n\r\n // Transform base\r\n\r\n // e is the base 10 exponent.\r\n // i is where to slice str to get the first element of the coefficient array.\r\n i = (e + 1) % LOG_BASE;\r\n if (e < 0) i += LOG_BASE; // i < 1\r\n\r\n if (i < len) {\r\n if (i) x.c.push(+str.slice(0, i));\r\n\r\n for (len -= LOG_BASE; i < len;) {\r\n x.c.push(+str.slice(i, i += LOG_BASE));\r\n }\r\n\r\n i = LOG_BASE - (str = str.slice(i)).length;\r\n } else {\r\n i -= len;\r\n }\r\n\r\n for (; i--; str += '0');\r\n x.c.push(+str);\r\n }\r\n } else {\r\n\r\n // Zero.\r\n x.c = [x.e = 0];\r\n }\r\n }\r\n\r\n\r\n // CONSTRUCTOR PROPERTIES\r\n\r\n\r\n BigNumber.clone = clone;\r\n\r\n BigNumber.ROUND_UP = 0;\r\n BigNumber.ROUND_DOWN = 1;\r\n BigNumber.ROUND_CEIL = 2;\r\n BigNumber.ROUND_FLOOR = 3;\r\n BigNumber.ROUND_HALF_UP = 4;\r\n BigNumber.ROUND_HALF_DOWN = 5;\r\n BigNumber.ROUND_HALF_EVEN = 6;\r\n BigNumber.ROUND_HALF_CEIL = 7;\r\n BigNumber.ROUND_HALF_FLOOR = 8;\r\n BigNumber.EUCLID = 9;\r\n\r\n\r\n /*\r\n * Configure infrequently-changing library-wide settings.\r\n *\r\n * Accept an object with the following optional properties (if the value of a property is\r\n * a number, it must be an integer within the inclusive range stated):\r\n *\r\n * DECIMAL_PLACES {number} 0 to MAX\r\n * ROUNDING_MODE {number} 0 to 8\r\n * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX]\r\n * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX]\r\n * CRYPTO {boolean} true or false\r\n * MODULO_MODE {number} 0 to 9\r\n * POW_PRECISION {number} 0 to MAX\r\n * ALPHABET {string} A string of two or more unique characters which does\r\n * not contain '.'.\r\n * FORMAT {object} An object with some of the following properties:\r\n * prefix {string}\r\n * groupSize {number}\r\n * secondaryGroupSize {number}\r\n * groupSeparator {string}\r\n * decimalSeparator {string}\r\n * fractionGroupSize {number}\r\n * fractionGroupSeparator {string}\r\n * suffix {string}\r\n *\r\n * (The values assigned to the above FORMAT object properties are not checked for validity.)\r\n *\r\n * E.g.\r\n * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 })\r\n *\r\n * Ignore properties/parameters set to null or undefined, except for ALPHABET.\r\n *\r\n * Return an object with the properties current values.\r\n */\r\n BigNumber.config = BigNumber.set = function (obj) {\r\n var p, v;\r\n\r\n if (obj != null) {\r\n\r\n if (typeof obj == 'object') {\r\n\r\n // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive.\r\n // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) {\r\n v = obj[p];\r\n intCheck(v, 0, MAX, p);\r\n DECIMAL_PLACES = v;\r\n }\r\n\r\n // ROUNDING_MODE {number} Integer, 0 to 8 inclusive.\r\n // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) {\r\n v = obj[p];\r\n intCheck(v, 0, 8, p);\r\n ROUNDING_MODE = v;\r\n }\r\n\r\n // EXPONENTIAL_AT {number|number[]}\r\n // Integer, -MAX to MAX inclusive or\r\n // [integer -MAX to 0 inclusive, 0 to MAX inclusive].\r\n // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) {\r\n v = obj[p];\r\n if (v && v.pop) {\r\n intCheck(v[0], -MAX, 0, p);\r\n intCheck(v[1], 0, MAX, p);\r\n TO_EXP_NEG = v[0];\r\n TO_EXP_POS = v[1];\r\n } else {\r\n intCheck(v, -MAX, MAX, p);\r\n TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v);\r\n }\r\n }\r\n\r\n // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or\r\n // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive].\r\n // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}'\r\n if (obj.hasOwnProperty(p = 'RANGE')) {\r\n v = obj[p];\r\n if (v && v.pop) {\r\n intCheck(v[0], -MAX, -1, p);\r\n intCheck(v[1], 1, MAX, p);\r\n MIN_EXP = v[0];\r\n MAX_EXP = v[1];\r\n } else {\r\n intCheck(v, -MAX, MAX, p);\r\n if (v) {\r\n MIN_EXP = -(MAX_EXP = v < 0 ? -v : v);\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' cannot be zero: ' + v);\r\n }\r\n }\r\n }\r\n\r\n // CRYPTO {boolean} true or false.\r\n // '[BigNumber Error] CRYPTO not true or false: {v}'\r\n // '[BigNumber Error] crypto unavailable'\r\n if (obj.hasOwnProperty(p = 'CRYPTO')) {\r\n v = obj[p];\r\n if (v === !!v) {\r\n if (v) {\r\n if (typeof crypto != 'undefined' && crypto &&\r\n (crypto.getRandomValues || crypto.randomBytes)) {\r\n CRYPTO = v;\r\n } else {\r\n CRYPTO = !v;\r\n throw Error\r\n (bignumberError + 'crypto unavailable');\r\n }\r\n } else {\r\n CRYPTO = v;\r\n }\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' not true or false: ' + v);\r\n }\r\n }\r\n\r\n // MODULO_MODE {number} Integer, 0 to 9 inclusive.\r\n // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'MODULO_MODE')) {\r\n v = obj[p];\r\n intCheck(v, 0, 9, p);\r\n MODULO_MODE = v;\r\n }\r\n\r\n // POW_PRECISION {number} Integer, 0 to MAX inclusive.\r\n // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'POW_PRECISION')) {\r\n v = obj[p];\r\n intCheck(v, 0, MAX, p);\r\n POW_PRECISION = v;\r\n }\r\n\r\n // FORMAT {object}\r\n // '[BigNumber Error] FORMAT not an object: {v}'\r\n if (obj.hasOwnProperty(p = 'FORMAT')) {\r\n v = obj[p];\r\n if (typeof v == 'object') FORMAT = v;\r\n else throw Error\r\n (bignumberError + p + ' not an object: ' + v);\r\n }\r\n\r\n // ALPHABET {string}\r\n // '[BigNumber Error] ALPHABET invalid: {v}'\r\n if (obj.hasOwnProperty(p = 'ALPHABET')) {\r\n v = obj[p];\r\n\r\n // Disallow if less than two characters,\r\n // or if it contains '+', '-', '.', whitespace, or a repeated character.\r\n if (typeof v == 'string' && !/^.?$|[+\\-.\\s]|(.).*\\1/.test(v)) {\r\n alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789';\r\n ALPHABET = v;\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' invalid: ' + v);\r\n }\r\n }\r\n\r\n } else {\r\n\r\n // '[BigNumber Error] Object expected: {v}'\r\n throw Error\r\n (bignumberError + 'Object expected: ' + obj);\r\n }\r\n }\r\n\r\n return {\r\n DECIMAL_PLACES: DECIMAL_PLACES,\r\n ROUNDING_MODE: ROUNDING_MODE,\r\n EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS],\r\n RANGE: [MIN_EXP, MAX_EXP],\r\n CRYPTO: CRYPTO,\r\n MODULO_MODE: MODULO_MODE,\r\n POW_PRECISION: POW_PRECISION,\r\n FORMAT: FORMAT,\r\n ALPHABET: ALPHABET\r\n };\r\n };\r\n\r\n\r\n /*\r\n * Return true if v is a BigNumber instance, otherwise return false.\r\n *\r\n * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed.\r\n *\r\n * v {any}\r\n *\r\n * '[BigNumber Error] Invalid BigNumber: {v}'\r\n */\r\n BigNumber.isBigNumber = function (v) {\r\n if (!v || v._isBigNumber !== true) return false;\r\n if (!BigNumber.DEBUG) return true;\r\n\r\n var i, n,\r\n c = v.c,\r\n e = v.e,\r\n s = v.s;\r\n\r\n out: if ({}.toString.call(c) == '[object Array]') {\r\n\r\n if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) {\r\n\r\n // If the first element is zero, the BigNumber value must be zero.\r\n if (c[0] === 0) {\r\n if (e === 0 && c.length === 1) return true;\r\n break out;\r\n }\r\n\r\n // Calculate number of digits that c[0] should have, based on the exponent.\r\n i = (e + 1) % LOG_BASE;\r\n if (i < 1) i += LOG_BASE;\r\n\r\n // Calculate number of digits of c[0].\r\n //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) {\r\n if (String(c[0]).length == i) {\r\n\r\n for (i = 0; i < c.length; i++) {\r\n n = c[i];\r\n if (n < 0 || n >= BASE || n !== mathfloor(n)) break out;\r\n }\r\n\r\n // Last element cannot be zero, unless it is the only element.\r\n if (n !== 0) return true;\r\n }\r\n }\r\n\r\n // Infinity/NaN\r\n } else if (c === null && e === null && (s === null || s === 1 || s === -1)) {\r\n return true;\r\n }\r\n\r\n throw Error\r\n (bignumberError + 'Invalid BigNumber: ' + v);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the maximum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.maximum = BigNumber.max = function () {\r\n return maxOrMin(arguments, P.lt);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the minimum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.minimum = BigNumber.min = function () {\r\n return maxOrMin(arguments, P.gt);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber with a random value equal to or greater than 0 and less than 1,\r\n * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing\r\n * zeros are produced).\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}'\r\n * '[BigNumber Error] crypto unavailable'\r\n */\r\n BigNumber.random = (function () {\r\n var pow2_53 = 0x20000000000000;\r\n\r\n // Return a 53 bit integer n, where 0 <= n < 9007199254740992.\r\n // Check if Math.random() produces more than 32 bits of randomness.\r\n // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits.\r\n // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1.\r\n var random53bitInt = (Math.random() * pow2_53) & 0x1fffff\r\n ? function () { return mathfloor(Math.random() * pow2_53); }\r\n : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) +\r\n (Math.random() * 0x800000 | 0); };\r\n\r\n return function (dp) {\r\n var a, b, e, k, v,\r\n i = 0,\r\n c = [],\r\n rand = new BigNumber(ONE);\r\n\r\n if (dp == null) dp = DECIMAL_PLACES;\r\n else intCheck(dp, 0, MAX);\r\n\r\n k = mathceil(dp / LOG_BASE);\r\n\r\n if (CRYPTO) {\r\n\r\n // Browsers supporting crypto.getRandomValues.\r\n if (crypto.getRandomValues) {\r\n\r\n a = crypto.getRandomValues(new Uint32Array(k *= 2));\r\n\r\n for (; i < k;) {\r\n\r\n // 53 bits:\r\n // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2)\r\n // 11111 11111111 11111111 11111111 11100000 00000000 00000000\r\n // ((Math.pow(2, 32) - 1) >>> 11).toString(2)\r\n // 11111 11111111 11111111\r\n // 0x20000 is 2^21.\r\n v = a[i] * 0x20000 + (a[i + 1] >>> 11);\r\n\r\n // Rejection sampling:\r\n // 0 <= v < 9007199254740992\r\n // Probability that v >= 9e15, is\r\n // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251\r\n if (v >= 9e15) {\r\n b = crypto.getRandomValues(new Uint32Array(2));\r\n a[i] = b[0];\r\n a[i + 1] = b[1];\r\n } else {\r\n\r\n // 0 <= v <= 8999999999999999\r\n // 0 <= (v % 1e14) <= 99999999999999\r\n c.push(v % 1e14);\r\n i += 2;\r\n }\r\n }\r\n i = k / 2;\r\n\r\n // Node.js supporting crypto.randomBytes.\r\n } else if (crypto.randomBytes) {\r\n\r\n // buffer\r\n a = crypto.randomBytes(k *= 7);\r\n\r\n for (; i < k;) {\r\n\r\n // 0x1000000000000 is 2^48, 0x10000000000 is 2^40\r\n // 0x100000000 is 2^32, 0x1000000 is 2^24\r\n // 11111 11111111 11111111 11111111 11111111 11111111 11111111\r\n // 0 <= v < 9007199254740992\r\n v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) +\r\n (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) +\r\n (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6];\r\n\r\n if (v >= 9e15) {\r\n crypto.randomBytes(7).copy(a, i);\r\n } else {\r\n\r\n // 0 <= (v % 1e14) <= 99999999999999\r\n c.push(v % 1e14);\r\n i += 7;\r\n }\r\n }\r\n i = k / 7;\r\n } else {\r\n CRYPTO = false;\r\n throw Error\r\n (bignumberError + 'crypto unavailable');\r\n }\r\n }\r\n\r\n // Use Math.random.\r\n if (!CRYPTO) {\r\n\r\n for (; i < k;) {\r\n v = random53bitInt();\r\n if (v < 9e15) c[i++] = v % 1e14;\r\n }\r\n }\r\n\r\n k = c[--i];\r\n dp %= LOG_BASE;\r\n\r\n // Convert trailing digits to zeros according to dp.\r\n if (k && dp) {\r\n v = POWS_TEN[LOG_BASE - dp];\r\n c[i] = mathfloor(k / v) * v;\r\n }\r\n\r\n // Remove trailing elements which are zero.\r\n for (; c[i] === 0; c.pop(), i--);\r\n\r\n // Zero?\r\n if (i < 0) {\r\n c = [e = 0];\r\n } else {\r\n\r\n // Remove leading elements which are zero and adjust exponent accordingly.\r\n for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE);\r\n\r\n // Count the digits of the first element of c to determine leading zeros, and...\r\n for (i = 1, v = c[0]; v >= 10; v /= 10, i++);\r\n\r\n // adjust the exponent accordingly.\r\n if (i < LOG_BASE) e -= LOG_BASE - i;\r\n }\r\n\r\n rand.e = e;\r\n rand.c = c;\r\n return rand;\r\n };\r\n })();\r\n\r\n\r\n /*\r\n * Return a BigNumber whose value is the sum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.sum = function () {\r\n var i = 1,\r\n args = arguments,\r\n sum = new BigNumber(args[0]);\r\n for (; i < args.length;) sum = sum.plus(args[i++]);\r\n return sum;\r\n };\r\n\r\n\r\n // PRIVATE FUNCTIONS\r\n\r\n\r\n // Called by BigNumber and BigNumber.prototype.toString.\r\n convertBase = (function () {\r\n var decimal = '0123456789';\r\n\r\n /*\r\n * Convert string of baseIn to an array of numbers of baseOut.\r\n * Eg. toBaseOut('255', 10, 16) returns [15, 15].\r\n * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5].\r\n */\r\n function toBaseOut(str, baseIn, baseOut, alphabet) {\r\n var j,\r\n arr = [0],\r\n arrL,\r\n i = 0,\r\n len = str.length;\r\n\r\n for (; i < len;) {\r\n for (arrL = arr.length; arrL--; arr[arrL] *= baseIn);\r\n\r\n arr[0] += alphabet.indexOf(str.charAt(i++));\r\n\r\n for (j = 0; j < arr.length; j++) {\r\n\r\n if (arr[j] > baseOut - 1) {\r\n if (arr[j + 1] == null) arr[j + 1] = 0;\r\n arr[j + 1] += arr[j] / baseOut | 0;\r\n arr[j] %= baseOut;\r\n }\r\n }\r\n }\r\n\r\n return arr.reverse();\r\n }\r\n\r\n // Convert a numeric string of baseIn to a numeric string of baseOut.\r\n // If the caller is toString, we are converting from base 10 to baseOut.\r\n // If the caller is BigNumber, we are converting from baseIn to base 10.\r\n return function (str, baseIn, baseOut, sign, callerIsToString) {\r\n var alphabet, d, e, k, r, x, xc, y,\r\n i = str.indexOf('.'),\r\n dp = DECIMAL_PLACES,\r\n rm = ROUNDING_MODE;\r\n\r\n // Non-integer.\r\n if (i >= 0) {\r\n k = POW_PRECISION;\r\n\r\n // Unlimited precision.\r\n POW_PRECISION = 0;\r\n str = str.replace('.', '');\r\n y = new BigNumber(baseIn);\r\n x = y.pow(str.length - i);\r\n POW_PRECISION = k;\r\n\r\n // Convert str as if an integer, then restore the fraction part by dividing the\r\n // result by its base raised to a power.\r\n\r\n y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'),\r\n 10, baseOut, decimal);\r\n y.e = y.c.length;\r\n }\r\n\r\n // Convert the number as integer.\r\n\r\n xc = toBaseOut(str, baseIn, baseOut, callerIsToString\r\n ? (alphabet = ALPHABET, decimal)\r\n : (alphabet = decimal, ALPHABET));\r\n\r\n // xc now represents str as an integer and converted to baseOut. e is the exponent.\r\n e = k = xc.length;\r\n\r\n // Remove trailing zeros.\r\n for (; xc[--k] == 0; xc.pop());\r\n\r\n // Zero?\r\n if (!xc[0]) return alphabet.charAt(0);\r\n\r\n // Does str represent an integer? If so, no need for the division.\r\n if (i < 0) {\r\n --e;\r\n } else {\r\n x.c = xc;\r\n x.e = e;\r\n\r\n // The sign is needed for correct rounding.\r\n x.s = sign;\r\n x = div(x, y, dp, rm, baseOut);\r\n xc = x.c;\r\n r = x.r;\r\n e = x.e;\r\n }\r\n\r\n // xc now represents str converted to baseOut.\r\n\r\n // THe index of the rounding digit.\r\n d = e + dp + 1;\r\n\r\n // The rounding digit: the digit to the right of the digit that may be rounded up.\r\n i = xc[d];\r\n\r\n // Look at the rounding digits and mode to determine whether to round up.\r\n\r\n k = baseOut / 2;\r\n r = r || d < 0 || xc[d + 1] != null;\r\n\r\n r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2))\r\n : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 ||\r\n rm == (x.s < 0 ? 8 : 7));\r\n\r\n // If the index of the rounding digit is not greater than zero, or xc represents\r\n // zero, then the result of the base conversion is zero or, if rounding up, a value\r\n // such as 0.00001.\r\n if (d < 1 || !xc[0]) {\r\n\r\n // 1^-dp or 0\r\n str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0);\r\n } else {\r\n\r\n // Truncate xc to the required number of decimal places.\r\n xc.length = d;\r\n\r\n // Round up?\r\n if (r) {\r\n\r\n // Rounding up may mean the previous digit has to be rounded up and so on.\r\n for (--baseOut; ++xc[--d] > baseOut;) {\r\n xc[d] = 0;\r\n\r\n if (!d) {\r\n ++e;\r\n xc = [1].concat(xc);\r\n }\r\n }\r\n }\r\n\r\n // Determine trailing zeros.\r\n for (k = xc.length; !xc[--k];);\r\n\r\n // E.g. [4, 11, 15] becomes 4bf.\r\n for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++]));\r\n\r\n // Add leading zeros, decimal point and trailing zeros as required.\r\n str = toFixedPoint(str, e, alphabet.charAt(0));\r\n }\r\n\r\n // The caller will add the sign.\r\n return str;\r\n };\r\n })();\r\n\r\n\r\n // Perform division in the specified base. Called by div and convertBase.\r\n div = (function () {\r\n\r\n // Assume non-zero x and k.\r\n function multiply(x, k, base) {\r\n var m, temp, xlo, xhi,\r\n carry = 0,\r\n i = x.length,\r\n klo = k % SQRT_BASE,\r\n khi = k / SQRT_BASE | 0;\r\n\r\n for (x = x.slice(); i--;) {\r\n xlo = x[i] % SQRT_BASE;\r\n xhi = x[i] / SQRT_BASE | 0;\r\n m = khi * xlo + xhi * klo;\r\n temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry;\r\n carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi;\r\n x[i] = temp % base;\r\n }\r\n\r\n if (carry) x = [carry].concat(x);\r\n\r\n return x;\r\n }\r\n\r\n function compare(a, b, aL, bL) {\r\n var i, cmp;\r\n\r\n if (aL != bL) {\r\n cmp = aL > bL ? 1 : -1;\r\n } else {\r\n\r\n for (i = cmp = 0; i < aL; i++) {\r\n\r\n if (a[i] != b[i]) {\r\n cmp = a[i] > b[i] ? 1 : -1;\r\n break;\r\n }\r\n }\r\n }\r\n\r\n return cmp;\r\n }\r\n\r\n function subtract(a, b, aL, base) {\r\n var i = 0;\r\n\r\n // Subtract b from a.\r\n for (; aL--;) {\r\n a[aL] -= i;\r\n i = a[aL] < b[aL] ? 1 : 0;\r\n a[aL] = i * base + a[aL] - b[aL];\r\n }\r\n\r\n // Remove leading zeros.\r\n for (; !a[0] && a.length > 1; a.splice(0, 1));\r\n }\r\n\r\n // x: dividend, y: divisor.\r\n return function (x, y, dp, rm, base) {\r\n var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0,\r\n yL, yz,\r\n s = x.s == y.s ? 1 : -1,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n // Either NaN, Infinity or 0?\r\n if (!xc || !xc[0] || !yc || !yc[0]) {\r\n\r\n return new BigNumber(\r\n\r\n // Return NaN if either NaN, or both Infinity or 0.\r\n !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN :\r\n\r\n // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0.\r\n xc && xc[0] == 0 || !yc ? s * 0 : s / 0\r\n );\r\n }\r\n\r\n q = new BigNumber(s);\r\n qc = q.c = [];\r\n e = x.e - y.e;\r\n s = dp + e + 1;\r\n\r\n if (!base) {\r\n base = BASE;\r\n e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE);\r\n s = s / LOG_BASE | 0;\r\n }\r\n\r\n // Result exponent may be one less then the current value of e.\r\n // The coefficients of the BigNumbers from convertBase may have trailing zeros.\r\n for (i = 0; yc[i] == (xc[i] || 0); i++);\r\n\r\n if (yc[i] > (xc[i] || 0)) e--;\r\n\r\n if (s < 0) {\r\n qc.push(1);\r\n more = true;\r\n } else {\r\n xL = xc.length;\r\n yL = yc.length;\r\n i = 0;\r\n s += 2;\r\n\r\n // Normalise xc and yc so highest order digit of yc is >= base / 2.\r\n\r\n n = mathfloor(base / (yc[0] + 1));\r\n\r\n // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1.\r\n // if (n > 1 || n++ == 1 && yc[0] < base / 2) {\r\n if (n > 1) {\r\n yc = multiply(yc, n, base);\r\n xc = multiply(xc, n, base);\r\n yL = yc.length;\r\n xL = xc.length;\r\n }\r\n\r\n xi = yL;\r\n rem = xc.slice(0, yL);\r\n remL = rem.length;\r\n\r\n // Add zeros to make remainder as long as divisor.\r\n for (; remL < yL; rem[remL++] = 0);\r\n yz = yc.slice();\r\n yz = [0].concat(yz);\r\n yc0 = yc[0];\r\n if (yc[1] >= base / 2) yc0++;\r\n // Not necessary, but to prevent trial digit n > base, when using base 3.\r\n // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15;\r\n\r\n do {\r\n n = 0;\r\n\r\n // Compare divisor and remainder.\r\n cmp = compare(yc, rem, yL, remL);\r\n\r\n // If divisor < remainder.\r\n if (cmp < 0) {\r\n\r\n // Calculate trial digit, n.\r\n\r\n rem0 = rem[0];\r\n if (yL != remL) rem0 = rem0 * base + (rem[1] || 0);\r\n\r\n // n is how many times the divisor goes into the current remainder.\r\n n = mathfloor(rem0 / yc0);\r\n\r\n // Algorithm:\r\n // product = divisor multiplied by trial digit (n).\r\n // Compare product and remainder.\r\n // If product is greater than remainder:\r\n // Subtract divisor from product, decrement trial digit.\r\n // Subtract product from remainder.\r\n // If product was less than remainder at the last compare:\r\n // Compare new remainder and divisor.\r\n // If remainder is greater than divisor:\r\n // Subtract divisor from remainder, increment trial digit.\r\n\r\n if (n > 1) {\r\n\r\n // n may be > base only when base is 3.\r\n if (n >= base) n = base - 1;\r\n\r\n // product = divisor * trial digit.\r\n prod = multiply(yc, n, base);\r\n prodL = prod.length;\r\n remL = rem.length;\r\n\r\n // Compare product and remainder.\r\n // If product > remainder then trial digit n too high.\r\n // n is 1 too high about 5% of the time, and is not known to have\r\n // ever been more than 1 too high.\r\n while (compare(prod, rem, prodL, remL) == 1) {\r\n n--;\r\n\r\n // Subtract divisor from product.\r\n subtract(prod, yL < prodL ? yz : yc, prodL, base);\r\n prodL = prod.length;\r\n cmp = 1;\r\n }\r\n } else {\r\n\r\n // n is 0 or 1, cmp is -1.\r\n // If n is 0, there is no need to compare yc and rem again below,\r\n // so change cmp to 1 to avoid it.\r\n // If n is 1, leave cmp as -1, so yc and rem are compared again.\r\n if (n == 0) {\r\n\r\n // divisor < remainder, so n must be at least 1.\r\n cmp = n = 1;\r\n }\r\n\r\n // product = divisor\r\n prod = yc.slice();\r\n prodL = prod.length;\r\n }\r\n\r\n if (prodL < remL) prod = [0].concat(prod);\r\n\r\n // Subtract product from remainder.\r\n subtract(rem, prod, remL, base);\r\n remL = rem.length;\r\n\r\n // If product was < remainder.\r\n if (cmp == -1) {\r\n\r\n // Compare divisor and new remainder.\r\n // If divisor < new remainder, subtract divisor from remainder.\r\n // Trial digit n too low.\r\n // n is 1 too low about 5% of the time, and very rarely 2 too low.\r\n while (compare(yc, rem, yL, remL) < 1) {\r\n n++;\r\n\r\n // Subtract divisor from remainder.\r\n subtract(rem, yL < remL ? yz : yc, remL, base);\r\n remL = rem.length;\r\n }\r\n }\r\n } else if (cmp === 0) {\r\n n++;\r\n rem = [0];\r\n } // else cmp === 1 and n will be 0\r\n\r\n // Add the next digit, n, to the result array.\r\n qc[i++] = n;\r\n\r\n // Update the remainder.\r\n if (rem[0]) {\r\n rem[remL++] = xc[xi] || 0;\r\n } else {\r\n rem = [xc[xi]];\r\n remL = 1;\r\n }\r\n } while ((xi++ < xL || rem[0] != null) && s--);\r\n\r\n more = rem[0] != null;\r\n\r\n // Leading zero?\r\n if (!qc[0]) qc.splice(0, 1);\r\n }\r\n\r\n if (base == BASE) {\r\n\r\n // To calculate q.e, first get the number of digits of qc[0].\r\n for (i = 1, s = qc[0]; s >= 10; s /= 10, i++);\r\n\r\n round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more);\r\n\r\n // Caller is convertBase.\r\n } else {\r\n q.e = e;\r\n q.r = +more;\r\n }\r\n\r\n return q;\r\n };\r\n })();\r\n\r\n\r\n /*\r\n * Return a string representing the value of BigNumber n in fixed-point or exponential\r\n * notation rounded to the specified decimal places or significant digits.\r\n *\r\n * n: a BigNumber.\r\n * i: the index of the last digit required (i.e. the digit that may be rounded up).\r\n * rm: the rounding mode.\r\n * id: 1 (toExponential) or 2 (toPrecision).\r\n */\r\n function format(n, i, rm, id) {\r\n var c0, e, ne, len, str;\r\n\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n if (!n.c) return n.toString();\r\n\r\n c0 = n.c[0];\r\n ne = n.e;\r\n\r\n if (i == null) {\r\n str = coeffToString(n.c);\r\n str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS)\r\n ? toExponential(str, ne)\r\n : toFixedPoint(str, ne, '0');\r\n } else {\r\n n = round(new BigNumber(n), i, rm);\r\n\r\n // n.e may have changed if the value was rounded up.\r\n e = n.e;\r\n\r\n str = coeffToString(n.c);\r\n len = str.length;\r\n\r\n // toPrecision returns exponential notation if the number of significant digits\r\n // specified is less than the number of digits necessary to represent the integer\r\n // part of the value in fixed-point notation.\r\n\r\n // Exponential notation.\r\n if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) {\r\n\r\n // Append zeros?\r\n for (; len < i; str += '0', len++);\r\n str = toExponential(str, e);\r\n\r\n // Fixed-point notation.\r\n } else {\r\n i -= ne;\r\n str = toFixedPoint(str, e, '0');\r\n\r\n // Append zeros?\r\n if (e + 1 > len) {\r\n if (--i > 0) for (str += '.'; i--; str += '0');\r\n } else {\r\n i += e - len;\r\n if (i > 0) {\r\n if (e + 1 == len) str += '.';\r\n for (; i--; str += '0');\r\n }\r\n }\r\n }\r\n }\r\n\r\n return n.s < 0 && c0 ? '-' + str : str;\r\n }\r\n\r\n\r\n // Handle BigNumber.max and BigNumber.min.\r\n function maxOrMin(args, method) {\r\n var n,\r\n i = 1,\r\n m = new BigNumber(args[0]);\r\n\r\n for (; i < args.length; i++) {\r\n n = new BigNumber(args[i]);\r\n\r\n // If any number is NaN, return NaN.\r\n if (!n.s) {\r\n m = n;\r\n break;\r\n } else if (method.call(m, n)) {\r\n m = n;\r\n }\r\n }\r\n\r\n return m;\r\n }\r\n\r\n\r\n /*\r\n * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP.\r\n * Called by minus, plus and times.\r\n */\r\n function normalise(n, c, e) {\r\n var i = 1,\r\n j = c.length;\r\n\r\n // Remove trailing zeros.\r\n for (; !c[--j]; c.pop());\r\n\r\n // Calculate the base 10 exponent. First get the number of digits of c[0].\r\n for (j = c[0]; j >= 10; j /= 10, i++);\r\n\r\n // Overflow?\r\n if ((e = i + e * LOG_BASE - 1) > MAX_EXP) {\r\n\r\n // Infinity.\r\n n.c = n.e = null;\r\n\r\n // Underflow?\r\n } else if (e < MIN_EXP) {\r\n\r\n // Zero.\r\n n.c = [n.e = 0];\r\n } else {\r\n n.e = e;\r\n n.c = c;\r\n }\r\n\r\n return n;\r\n }\r\n\r\n\r\n // Handle values that fail the validity test in BigNumber.\r\n parseNumeric = (function () {\r\n var basePrefix = /^(-?)0([xbo])(?=\\w[\\w.]*$)/i,\r\n dotAfter = /^([^.]+)\\.$/,\r\n dotBefore = /^\\.([^.]+)$/,\r\n isInfinityOrNaN = /^-?(Infinity|NaN)$/,\r\n whitespaceOrPlus = /^\\s*\\+(?=[\\w.])|^\\s+|\\s+$/g;\r\n\r\n return function (x, str, isNum, b) {\r\n var base,\r\n s = isNum ? str : str.replace(whitespaceOrPlus, '');\r\n\r\n // No exception on ±Infinity or NaN.\r\n if (isInfinityOrNaN.test(s)) {\r\n x.s = isNaN(s) ? null : s < 0 ? -1 : 1;\r\n } else {\r\n if (!isNum) {\r\n\r\n // basePrefix = /^(-?)0([xbo])(?=\\w[\\w.]*$)/i\r\n s = s.replace(basePrefix, function (m, p1, p2) {\r\n base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8;\r\n return !b || b == base ? p1 : m;\r\n });\r\n\r\n if (b) {\r\n base = b;\r\n\r\n // E.g. '1.' to '1', '.1' to '0.1'\r\n s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1');\r\n }\r\n\r\n if (str != s) return new BigNumber(s, base);\r\n }\r\n\r\n // '[BigNumber Error] Not a number: {n}'\r\n // '[BigNumber Error] Not a base {b} number: {n}'\r\n if (BigNumber.DEBUG) {\r\n throw Error\r\n (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str);\r\n }\r\n\r\n // NaN\r\n x.s = null;\r\n }\r\n\r\n x.c = x.e = null;\r\n }\r\n })();\r\n\r\n\r\n /*\r\n * Round x to sd significant digits using rounding mode rm. Check for over/under-flow.\r\n * If r is truthy, it is known that there are more digits after the rounding digit.\r\n */\r\n function round(x, sd, rm, r) {\r\n var d, i, j, k, n, ni, rd,\r\n xc = x.c,\r\n pows10 = POWS_TEN;\r\n\r\n // if x is not Infinity or NaN...\r\n if (xc) {\r\n\r\n // rd is the rounding digit, i.e. the digit after the digit that may be rounded up.\r\n // n is a base 1e14 number, the value of the element of array x.c containing rd.\r\n // ni is the index of n within x.c.\r\n // d is the number of digits of n.\r\n // i is the index of rd within n including leading zeros.\r\n // j is the actual index of rd within n (if < 0, rd is a leading zero).\r\n out: {\r\n\r\n // Get the number of digits of the first element of xc.\r\n for (d = 1, k = xc[0]; k >= 10; k /= 10, d++);\r\n i = sd - d;\r\n\r\n // If the rounding digit is in the first element of xc...\r\n if (i < 0) {\r\n i += LOG_BASE;\r\n j = sd;\r\n n = xc[ni = 0];\r\n\r\n // Get the rounding digit at index j of n.\r\n rd = n / pows10[d - j - 1] % 10 | 0;\r\n } else {\r\n ni = mathceil((i + 1) / LOG_BASE);\r\n\r\n if (ni >= xc.length) {\r\n\r\n if (r) {\r\n\r\n // Needed by sqrt.\r\n for (; xc.length <= ni; xc.push(0));\r\n n = rd = 0;\r\n d = 1;\r\n i %= LOG_BASE;\r\n j = i - LOG_BASE + 1;\r\n } else {\r\n break out;\r\n }\r\n } else {\r\n n = k = xc[ni];\r\n\r\n // Get the number of digits of n.\r\n for (d = 1; k >= 10; k /= 10, d++);\r\n\r\n // Get the index of rd within n.\r\n i %= LOG_BASE;\r\n\r\n // Get the index of rd within n, adjusted for leading zeros.\r\n // The number of leading zeros of n is given by LOG_BASE - d.\r\n j = i - LOG_BASE + d;\r\n\r\n // Get the rounding digit at index j of n.\r\n rd = j < 0 ? 0 : n / pows10[d - j - 1] % 10 | 0;\r\n }\r\n }\r\n\r\n r = r || sd < 0 ||\r\n\r\n // Are there any non-zero digits after the rounding digit?\r\n // The expression n % pows10[d - j - 1] returns all digits of n to the right\r\n // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714.\r\n xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]);\r\n\r\n r = rm < 4\r\n ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2))\r\n : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 &&\r\n\r\n // Check whether the digit to the left of the rounding digit is odd.\r\n ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 ||\r\n rm == (x.s < 0 ? 8 : 7));\r\n\r\n if (sd < 1 || !xc[0]) {\r\n xc.length = 0;\r\n\r\n if (r) {\r\n\r\n // Convert sd to decimal places.\r\n sd -= x.e + 1;\r\n\r\n // 1, 0.1, 0.01, 0.001, 0.0001 etc.\r\n xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE];\r\n x.e = -sd || 0;\r\n } else {\r\n\r\n // Zero.\r\n xc[0] = x.e = 0;\r\n }\r\n\r\n return x;\r\n }\r\n\r\n // Remove excess digits.\r\n if (i == 0) {\r\n xc.length = ni;\r\n k = 1;\r\n ni--;\r\n } else {\r\n xc.length = ni + 1;\r\n k = pows10[LOG_BASE - i];\r\n\r\n // E.g. 56700 becomes 56000 if 7 is the rounding digit.\r\n // j > 0 means i > number of leading zeros of n.\r\n xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0;\r\n }\r\n\r\n // Round up?\r\n if (r) {\r\n\r\n for (; ;) {\r\n\r\n // If the digit to be rounded up is in the first element of xc...\r\n if (ni == 0) {\r\n\r\n // i will be the length of xc[0] before k is added.\r\n for (i = 1, j = xc[0]; j >= 10; j /= 10, i++);\r\n j = xc[0] += k;\r\n for (k = 1; j >= 10; j /= 10, k++);\r\n\r\n // if i != k the length has increased.\r\n if (i != k) {\r\n x.e++;\r\n if (xc[0] == BASE) xc[0] = 1;\r\n }\r\n\r\n break;\r\n } else {\r\n xc[ni] += k;\r\n if (xc[ni] != BASE) break;\r\n xc[ni--] = 0;\r\n k = 1;\r\n }\r\n }\r\n }\r\n\r\n // Remove trailing zeros.\r\n for (i = xc.length; xc[--i] === 0; xc.pop());\r\n }\r\n\r\n // Overflow? Infinity.\r\n if (x.e > MAX_EXP) {\r\n x.c = x.e = null;\r\n\r\n // Underflow? Zero.\r\n } else if (x.e < MIN_EXP) {\r\n x.c = [x.e = 0];\r\n }\r\n }\r\n\r\n return x;\r\n }\r\n\r\n\r\n function valueOf(n) {\r\n var str,\r\n e = n.e;\r\n\r\n if (e === null) return n.toString();\r\n\r\n str = coeffToString(n.c);\r\n\r\n str = e <= TO_EXP_NEG || e >= TO_EXP_POS\r\n ? toExponential(str, e)\r\n : toFixedPoint(str, e, '0');\r\n\r\n return n.s < 0 ? '-' + str : str;\r\n }\r\n\r\n\r\n // PROTOTYPE/INSTANCE METHODS\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the absolute value of this BigNumber.\r\n */\r\n P.absoluteValue = P.abs = function () {\r\n var x = new BigNumber(this);\r\n if (x.s < 0) x.s = 1;\r\n return x;\r\n };\r\n\r\n\r\n /*\r\n * Return\r\n * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b),\r\n * -1 if the value of this BigNumber is less than the value of BigNumber(y, b),\r\n * 0 if they have the same value,\r\n * or null if the value of either is NaN.\r\n */\r\n P.comparedTo = function (y, b) {\r\n return compare(this, new BigNumber(y, b));\r\n };\r\n\r\n\r\n /*\r\n * If dp is undefined or null or true or false, return the number of decimal places of the\r\n * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN.\r\n *\r\n * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this\r\n * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or\r\n * ROUNDING_MODE if rm is omitted.\r\n *\r\n * [dp] {number} Decimal places: integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.decimalPlaces = P.dp = function (dp, rm) {\r\n var c, n, v,\r\n x = this;\r\n\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n return round(new BigNumber(x), dp + x.e + 1, rm);\r\n }\r\n\r\n if (!(c = x.c)) return null;\r\n n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE;\r\n\r\n // Subtract the number of trailing zeros of the last number.\r\n if (v = c[v]) for (; v % 10 == 0; v /= 10, n--);\r\n if (n < 0) n = 0;\r\n\r\n return n;\r\n };\r\n\r\n\r\n /*\r\n * n / 0 = I\r\n * n / N = N\r\n * n / I = 0\r\n * 0 / n = 0\r\n * 0 / 0 = N\r\n * 0 / N = N\r\n * 0 / I = 0\r\n * N / n = N\r\n * N / 0 = N\r\n * N / N = N\r\n * N / I = N\r\n * I / n = I\r\n * I / 0 = I\r\n * I / N = N\r\n * I / I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber divided by the value of\r\n * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE.\r\n */\r\n P.dividedBy = P.div = function (y, b) {\r\n return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the integer part of dividing the value of this\r\n * BigNumber by the value of BigNumber(y, b).\r\n */\r\n P.dividedToIntegerBy = P.idiv = function (y, b) {\r\n return div(this, new BigNumber(y, b), 0, 1);\r\n };\r\n\r\n\r\n /*\r\n * Return a BigNumber whose value is the value of this BigNumber exponentiated by n.\r\n *\r\n * If m is present, return the result modulo m.\r\n * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE.\r\n * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE.\r\n *\r\n * The modular power operation works efficiently when x, n, and m are integers, otherwise it\r\n * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0.\r\n *\r\n * n {number|string|BigNumber} The exponent. An integer.\r\n * [m] {number|string|BigNumber} The modulus.\r\n *\r\n * '[BigNumber Error] Exponent not an integer: {n}'\r\n */\r\n P.exponentiatedBy = P.pow = function (n, m) {\r\n var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y,\r\n x = this;\r\n\r\n n = new BigNumber(n);\r\n\r\n // Allow NaN and ±Infinity, but not other non-integers.\r\n if (n.c && !n.isInteger()) {\r\n throw Error\r\n (bignumberError + 'Exponent not an integer: ' + valueOf(n));\r\n }\r\n\r\n if (m != null) m = new BigNumber(m);\r\n\r\n // Exponent of MAX_SAFE_INTEGER is 15.\r\n nIsBig = n.e > 14;\r\n\r\n // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0.\r\n if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) {\r\n\r\n // The sign of the result of pow when x is negative depends on the evenness of n.\r\n // If +n overflows to ±Infinity, the evenness of n would be not be known.\r\n y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n)));\r\n return m ? y.mod(m) : y;\r\n }\r\n\r\n nIsNeg = n.s < 0;\r\n\r\n if (m) {\r\n\r\n // x % m returns NaN if abs(m) is zero, or m is NaN.\r\n if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN);\r\n\r\n isModExp = !nIsNeg && x.isInteger() && m.isInteger();\r\n\r\n if (isModExp) x = x.mod(m);\r\n\r\n // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15.\r\n // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15.\r\n } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0\r\n // [1, 240000000]\r\n ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7\r\n // [80000000000000] [99999750000000]\r\n : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) {\r\n\r\n // If x is negative and n is odd, k = -0, else k = 0.\r\n k = x.s < 0 && isOdd(n) ? -0 : 0;\r\n\r\n // If x >= 1, k = ±Infinity.\r\n if (x.e > -1) k = 1 / k;\r\n\r\n // If n is negative return ±0, else return ±Infinity.\r\n return new BigNumber(nIsNeg ? 1 / k : k);\r\n\r\n } else if (POW_PRECISION) {\r\n\r\n // Truncating each coefficient array to a length of k after each multiplication\r\n // equates to truncating significant digits to POW_PRECISION + [28, 41],\r\n // i.e. there will be a minimum of 28 guard digits retained.\r\n k = mathceil(POW_PRECISION / LOG_BASE + 2);\r\n }\r\n\r\n if (nIsBig) {\r\n half = new BigNumber(0.5);\r\n if (nIsNeg) n.s = 1;\r\n nIsOdd = isOdd(n);\r\n } else {\r\n i = Math.abs(+valueOf(n));\r\n nIsOdd = i % 2;\r\n }\r\n\r\n y = new BigNumber(ONE);\r\n\r\n // Performs 54 loop iterations for n of 9007199254740991.\r\n for (; ;) {\r\n\r\n if (nIsOdd) {\r\n y = y.times(x);\r\n if (!y.c) break;\r\n\r\n if (k) {\r\n if (y.c.length > k) y.c.length = k;\r\n } else if (isModExp) {\r\n y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m));\r\n }\r\n }\r\n\r\n if (i) {\r\n i = mathfloor(i / 2);\r\n if (i === 0) break;\r\n nIsOdd = i % 2;\r\n } else {\r\n n = n.times(half);\r\n round(n, n.e + 1, 1);\r\n\r\n if (n.e > 14) {\r\n nIsOdd = isOdd(n);\r\n } else {\r\n i = +valueOf(n);\r\n if (i === 0) break;\r\n nIsOdd = i % 2;\r\n }\r\n }\r\n\r\n x = x.times(x);\r\n\r\n if (k) {\r\n if (x.c && x.c.length > k) x.c.length = k;\r\n } else if (isModExp) {\r\n x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m));\r\n }\r\n }\r\n\r\n if (isModExp) return y;\r\n if (nIsNeg) y = ONE.div(y);\r\n\r\n return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y;\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer\r\n * using rounding mode rm, or ROUNDING_MODE if rm is omitted.\r\n *\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}'\r\n */\r\n P.integerValue = function (rm) {\r\n var n = new BigNumber(this);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n return round(n, n.e + 1, rm);\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isEqualTo = P.eq = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) === 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is a finite number, otherwise return false.\r\n */\r\n P.isFinite = function () {\r\n return !!this.c;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isGreaterThan = P.gt = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) > 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is greater than or equal to the value of\r\n * BigNumber(y, b), otherwise return false.\r\n */\r\n P.isGreaterThanOrEqualTo = P.gte = function (y, b) {\r\n return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0;\r\n\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is an integer, otherwise return false.\r\n */\r\n P.isInteger = function () {\r\n return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is less than the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isLessThan = P.lt = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) < 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is less than or equal to the value of\r\n * BigNumber(y, b), otherwise return false.\r\n */\r\n P.isLessThanOrEqualTo = P.lte = function (y, b) {\r\n return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is NaN, otherwise return false.\r\n */\r\n P.isNaN = function () {\r\n return !this.s;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is negative, otherwise return false.\r\n */\r\n P.isNegative = function () {\r\n return this.s < 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is positive, otherwise return false.\r\n */\r\n P.isPositive = function () {\r\n return this.s > 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is 0 or -0, otherwise return false.\r\n */\r\n P.isZero = function () {\r\n return !!this.c && this.c[0] == 0;\r\n };\r\n\r\n\r\n /*\r\n * n - 0 = n\r\n * n - N = N\r\n * n - I = -I\r\n * 0 - n = -n\r\n * 0 - 0 = 0\r\n * 0 - N = N\r\n * 0 - I = -I\r\n * N - n = N\r\n * N - 0 = N\r\n * N - N = N\r\n * N - I = N\r\n * I - n = I\r\n * I - 0 = I\r\n * I - N = N\r\n * I - I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber minus the value of\r\n * BigNumber(y, b).\r\n */\r\n P.minus = function (y, b) {\r\n var i, j, t, xLTy,\r\n x = this,\r\n a = x.s;\r\n\r\n y = new BigNumber(y, b);\r\n b = y.s;\r\n\r\n // Either NaN?\r\n if (!a || !b) return new BigNumber(NaN);\r\n\r\n // Signs differ?\r\n if (a != b) {\r\n y.s = -b;\r\n return x.plus(y);\r\n }\r\n\r\n var xe = x.e / LOG_BASE,\r\n ye = y.e / LOG_BASE,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n if (!xe || !ye) {\r\n\r\n // Either Infinity?\r\n if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN);\r\n\r\n // Either zero?\r\n if (!xc[0] || !yc[0]) {\r\n\r\n // Return y if y is non-zero, x if x is non-zero, or zero if both are zero.\r\n return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x :\r\n\r\n // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity\r\n ROUNDING_MODE == 3 ? -0 : 0);\r\n }\r\n }\r\n\r\n xe = bitFloor(xe);\r\n ye = bitFloor(ye);\r\n xc = xc.slice();\r\n\r\n // Determine which is the bigger number.\r\n if (a = xe - ye) {\r\n\r\n if (xLTy = a < 0) {\r\n a = -a;\r\n t = xc;\r\n } else {\r\n ye = xe;\r\n t = yc;\r\n }\r\n\r\n t.reverse();\r\n\r\n // Prepend zeros to equalise exponents.\r\n for (b = a; b--; t.push(0));\r\n t.reverse();\r\n } else {\r\n\r\n // Exponents equal. Check digit by digit.\r\n j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b;\r\n\r\n for (a = b = 0; b < j; b++) {\r\n\r\n if (xc[b] != yc[b]) {\r\n xLTy = xc[b] < yc[b];\r\n break;\r\n }\r\n }\r\n }\r\n\r\n // x < y? Point xc to the array of the bigger number.\r\n if (xLTy) {\r\n t = xc;\r\n xc = yc;\r\n yc = t;\r\n y.s = -y.s;\r\n }\r\n\r\n b = (j = yc.length) - (i = xc.length);\r\n\r\n // Append zeros to xc if shorter.\r\n // No need to add zeros to yc if shorter as subtract only needs to start at yc.length.\r\n if (b > 0) for (; b--; xc[i++] = 0);\r\n b = BASE - 1;\r\n\r\n // Subtract yc from xc.\r\n for (; j > a;) {\r\n\r\n if (xc[--j] < yc[j]) {\r\n for (i = j; i && !xc[--i]; xc[i] = b);\r\n --xc[i];\r\n xc[j] += BASE;\r\n }\r\n\r\n xc[j] -= yc[j];\r\n }\r\n\r\n // Remove leading zeros and adjust exponent accordingly.\r\n for (; xc[0] == 0; xc.splice(0, 1), --ye);\r\n\r\n // Zero?\r\n if (!xc[0]) {\r\n\r\n // Following IEEE 754 (2008) 6.3,\r\n // n - n = +0 but n - n = -0 when rounding towards -Infinity.\r\n y.s = ROUNDING_MODE == 3 ? -1 : 1;\r\n y.c = [y.e = 0];\r\n return y;\r\n }\r\n\r\n // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity\r\n // for finite x and y.\r\n return normalise(y, xc, ye);\r\n };\r\n\r\n\r\n /*\r\n * n % 0 = N\r\n * n % N = N\r\n * n % I = n\r\n * 0 % n = 0\r\n * -0 % n = -0\r\n * 0 % 0 = N\r\n * 0 % N = N\r\n * 0 % I = 0\r\n * N % n = N\r\n * N % 0 = N\r\n * N % N = N\r\n * N % I = N\r\n * I % n = N\r\n * I % 0 = N\r\n * I % N = N\r\n * I % I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber modulo the value of\r\n * BigNumber(y, b). The result depends on the value of MODULO_MODE.\r\n */\r\n P.modulo = P.mod = function (y, b) {\r\n var q, s,\r\n x = this;\r\n\r\n y = new BigNumber(y, b);\r\n\r\n // Return NaN if x is Infinity or NaN, or y is NaN or zero.\r\n if (!x.c || !y.s || y.c && !y.c[0]) {\r\n return new BigNumber(NaN);\r\n\r\n // Return x if y is Infinity or x is zero.\r\n } else if (!y.c || x.c && !x.c[0]) {\r\n return new BigNumber(x);\r\n }\r\n\r\n if (MODULO_MODE == 9) {\r\n\r\n // Euclidian division: q = sign(y) * floor(x / abs(y))\r\n // r = x - qy where 0 <= r < abs(y)\r\n s = y.s;\r\n y.s = 1;\r\n q = div(x, y, 0, 3);\r\n y.s = s;\r\n q.s *= s;\r\n } else {\r\n q = div(x, y, 0, MODULO_MODE);\r\n }\r\n\r\n y = x.minus(q.times(y));\r\n\r\n // To match JavaScript %, ensure sign of zero is sign of dividend.\r\n if (!y.c[0] && MODULO_MODE == 1) y.s = x.s;\r\n\r\n return y;\r\n };\r\n\r\n\r\n /*\r\n * n * 0 = 0\r\n * n * N = N\r\n * n * I = I\r\n * 0 * n = 0\r\n * 0 * 0 = 0\r\n * 0 * N = N\r\n * 0 * I = N\r\n * N * n = N\r\n * N * 0 = N\r\n * N * N = N\r\n * N * I = N\r\n * I * n = I\r\n * I * 0 = N\r\n * I * N = N\r\n * I * I = I\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value\r\n * of BigNumber(y, b).\r\n */\r\n P.multipliedBy = P.times = function (y, b) {\r\n var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc,\r\n base, sqrtBase,\r\n x = this,\r\n xc = x.c,\r\n yc = (y = new BigNumber(y, b)).c;\r\n\r\n // Either NaN, ±Infinity or ±0?\r\n if (!xc || !yc || !xc[0] || !yc[0]) {\r\n\r\n // Return NaN if either is NaN, or one is 0 and the other is Infinity.\r\n if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) {\r\n y.c = y.e = y.s = null;\r\n } else {\r\n y.s *= x.s;\r\n\r\n // Return ±Infinity if either is ±Infinity.\r\n if (!xc || !yc) {\r\n y.c = y.e = null;\r\n\r\n // Return ±0 if either is ±0.\r\n } else {\r\n y.c = [0];\r\n y.e = 0;\r\n }\r\n }\r\n\r\n return y;\r\n }\r\n\r\n e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE);\r\n y.s *= x.s;\r\n xcL = xc.length;\r\n ycL = yc.length;\r\n\r\n // Ensure xc points to longer array and xcL to its length.\r\n if (xcL < ycL) {\r\n zc = xc;\r\n xc = yc;\r\n yc = zc;\r\n i = xcL;\r\n xcL = ycL;\r\n ycL = i;\r\n }\r\n\r\n // Initialise the result array with zeros.\r\n for (i = xcL + ycL, zc = []; i--; zc.push(0));\r\n\r\n base = BASE;\r\n sqrtBase = SQRT_BASE;\r\n\r\n for (i = ycL; --i >= 0;) {\r\n c = 0;\r\n ylo = yc[i] % sqrtBase;\r\n yhi = yc[i] / sqrtBase | 0;\r\n\r\n for (k = xcL, j = i + k; j > i;) {\r\n xlo = xc[--k] % sqrtBase;\r\n xhi = xc[k] / sqrtBase | 0;\r\n m = yhi * xlo + xhi * ylo;\r\n xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c;\r\n c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi;\r\n zc[j--] = xlo % base;\r\n }\r\n\r\n zc[j] = c;\r\n }\r\n\r\n if (c) {\r\n ++e;\r\n } else {\r\n zc.splice(0, 1);\r\n }\r\n\r\n return normalise(y, zc, e);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber negated,\r\n * i.e. multiplied by -1.\r\n */\r\n P.negated = function () {\r\n var x = new BigNumber(this);\r\n x.s = -x.s || null;\r\n return x;\r\n };\r\n\r\n\r\n /*\r\n * n + 0 = n\r\n * n + N = N\r\n * n + I = I\r\n * 0 + n = n\r\n * 0 + 0 = 0\r\n * 0 + N = N\r\n * 0 + I = I\r\n * N + n = N\r\n * N + 0 = N\r\n * N + N = N\r\n * N + I = N\r\n * I + n = I\r\n * I + 0 = I\r\n * I + N = N\r\n * I + I = I\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber plus the value of\r\n * BigNumber(y, b).\r\n */\r\n P.plus = function (y, b) {\r\n var t,\r\n x = this,\r\n a = x.s;\r\n\r\n y = new BigNumber(y, b);\r\n b = y.s;\r\n\r\n // Either NaN?\r\n if (!a || !b) return new BigNumber(NaN);\r\n\r\n // Signs differ?\r\n if (a != b) {\r\n y.s = -b;\r\n return x.minus(y);\r\n }\r\n\r\n var xe = x.e / LOG_BASE,\r\n ye = y.e / LOG_BASE,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n if (!xe || !ye) {\r\n\r\n // Return ±Infinity if either ±Infinity.\r\n if (!xc || !yc) return new BigNumber(a / 0);\r\n\r\n // Either zero?\r\n // Return y if y is non-zero, x if x is non-zero, or zero if both are zero.\r\n if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0);\r\n }\r\n\r\n xe = bitFloor(xe);\r\n ye = bitFloor(ye);\r\n xc = xc.slice();\r\n\r\n // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts.\r\n if (a = xe - ye) {\r\n if (a > 0) {\r\n ye = xe;\r\n t = yc;\r\n } else {\r\n a = -a;\r\n t = xc;\r\n }\r\n\r\n t.reverse();\r\n for (; a--; t.push(0));\r\n t.reverse();\r\n }\r\n\r\n a = xc.length;\r\n b = yc.length;\r\n\r\n // Point xc to the longer array, and b to the shorter length.\r\n if (a - b < 0) {\r\n t = yc;\r\n yc = xc;\r\n xc = t;\r\n b = a;\r\n }\r\n\r\n // Only start adding at yc.length - 1 as the further digits of xc can be ignored.\r\n for (a = 0; b;) {\r\n a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0;\r\n xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE;\r\n }\r\n\r\n if (a) {\r\n xc = [a].concat(xc);\r\n ++ye;\r\n }\r\n\r\n // No need to check for zero, as +x + +y != 0 && -x + -y != 0\r\n // ye = MAX_EXP + 1 possible\r\n return normalise(y, xc, ye);\r\n };\r\n\r\n\r\n /*\r\n * If sd is undefined or null or true or false, return the number of significant digits of\r\n * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN.\r\n * If sd is true include integer-part trailing zeros in the count.\r\n *\r\n * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this\r\n * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or\r\n * ROUNDING_MODE if rm is omitted.\r\n *\r\n * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive.\r\n * boolean: whether to count integer-part trailing zeros: true or false.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}'\r\n */\r\n P.precision = P.sd = function (sd, rm) {\r\n var c, n, v,\r\n x = this;\r\n\r\n if (sd != null && sd !== !!sd) {\r\n intCheck(sd, 1, MAX);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n return round(new BigNumber(x), sd, rm);\r\n }\r\n\r\n if (!(c = x.c)) return null;\r\n v = c.length - 1;\r\n n = v * LOG_BASE + 1;\r\n\r\n if (v = c[v]) {\r\n\r\n // Subtract the number of trailing zeros of the last element.\r\n for (; v % 10 == 0; v /= 10, n--);\r\n\r\n // Add the number of digits of the first element.\r\n for (v = c[0]; v >= 10; v /= 10, n++);\r\n }\r\n\r\n if (sd && x.e + 1 > n) n = x.e + 1;\r\n\r\n return n;\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber shifted by k places\r\n * (powers of 10). Shift to the right if n > 0, and to the left if n < 0.\r\n *\r\n * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}'\r\n */\r\n P.shiftedBy = function (k) {\r\n intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER);\r\n return this.times('1e' + k);\r\n };\r\n\r\n\r\n /*\r\n * sqrt(-n) = N\r\n * sqrt(N) = N\r\n * sqrt(-I) = N\r\n * sqrt(I) = I\r\n * sqrt(0) = 0\r\n * sqrt(-0) = -0\r\n *\r\n * Return a new BigNumber whose value is the square root of the value of this BigNumber,\r\n * rounded according to DECIMAL_PLACES and ROUNDING_MODE.\r\n */\r\n P.squareRoot = P.sqrt = function () {\r\n var m, n, r, rep, t,\r\n x = this,\r\n c = x.c,\r\n s = x.s,\r\n e = x.e,\r\n dp = DECIMAL_PLACES + 4,\r\n half = new BigNumber('0.5');\r\n\r\n // Negative/NaN/Infinity/zero?\r\n if (s !== 1 || !c || !c[0]) {\r\n return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0);\r\n }\r\n\r\n // Initial estimate.\r\n s = Math.sqrt(+valueOf(x));\r\n\r\n // Math.sqrt underflow/overflow?\r\n // Pass x to Math.sqrt as integer, then adjust the exponent of the result.\r\n if (s == 0 || s == 1 / 0) {\r\n n = coeffToString(c);\r\n if ((n.length + e) % 2 == 0) n += '0';\r\n s = Math.sqrt(+n);\r\n e = bitFloor((e + 1) / 2) - (e < 0 || e % 2);\r\n\r\n if (s == 1 / 0) {\r\n n = '5e' + e;\r\n } else {\r\n n = s.toExponential();\r\n n = n.slice(0, n.indexOf('e') + 1) + e;\r\n }\r\n\r\n r = new BigNumber(n);\r\n } else {\r\n r = new BigNumber(s + '');\r\n }\r\n\r\n // Check for zero.\r\n // r could be zero if MIN_EXP is changed after the this value was created.\r\n // This would cause a division by zero (x/t) and hence Infinity below, which would cause\r\n // coeffToString to throw.\r\n if (r.c[0]) {\r\n e = r.e;\r\n s = e + dp;\r\n if (s < 3) s = 0;\r\n\r\n // Newton-Raphson iteration.\r\n for (; ;) {\r\n t = r;\r\n r = half.times(t.plus(div(x, t, dp, 1)));\r\n\r\n if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) {\r\n\r\n // The exponent of r may here be one less than the final result exponent,\r\n // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits\r\n // are indexed correctly.\r\n if (r.e < e) --s;\r\n n = n.slice(s - 3, s + 1);\r\n\r\n // The 4th rounding digit may be in error by -1 so if the 4 rounding digits\r\n // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the\r\n // iteration.\r\n if (n == '9999' || !rep && n == '4999') {\r\n\r\n // On the first iteration only, check to see if rounding up gives the\r\n // exact result as the nines may infinitely repeat.\r\n if (!rep) {\r\n round(t, t.e + DECIMAL_PLACES + 2, 0);\r\n\r\n if (t.times(t).eq(x)) {\r\n r = t;\r\n break;\r\n }\r\n }\r\n\r\n dp += 4;\r\n s += 4;\r\n rep = 1;\r\n } else {\r\n\r\n // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact\r\n // result. If not, then there are further digits and m will be truthy.\r\n if (!+n || !+n.slice(1) && n.charAt(0) == '5') {\r\n\r\n // Truncate to the first rounding digit.\r\n round(r, r.e + DECIMAL_PLACES + 2, 1);\r\n m = !r.times(r).eq(x);\r\n }\r\n\r\n break;\r\n }\r\n }\r\n }\r\n }\r\n\r\n return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in exponential notation and\r\n * rounded using ROUNDING_MODE to dp fixed decimal places.\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.toExponential = function (dp, rm) {\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n dp++;\r\n }\r\n return format(this, dp, rm, 1);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in fixed-point notation rounding\r\n * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted.\r\n *\r\n * Note: as with JavaScript's number type, (-0).toFixed(0) is '0',\r\n * but e.g. (-0.00001).toFixed(0) is '-0'.\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.toFixed = function (dp, rm) {\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n dp = dp + this.e + 1;\r\n }\r\n return format(this, dp, rm);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in fixed-point notation rounded\r\n * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties\r\n * of the format or FORMAT object (see BigNumber.set).\r\n *\r\n * The formatting object may contain some or all of the properties shown below.\r\n *\r\n * FORMAT = {\r\n * prefix: '',\r\n * groupSize: 3,\r\n * secondaryGroupSize: 0,\r\n * groupSeparator: ',',\r\n * decimalSeparator: '.',\r\n * fractionGroupSize: 0,\r\n * fractionGroupSeparator: '\\xA0', // non-breaking space\r\n * suffix: ''\r\n * };\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n * [format] {object} Formatting options. See FORMAT pbject above.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n * '[BigNumber Error] Argument not an object: {format}'\r\n */\r\n P.toFormat = function (dp, rm, format) {\r\n var str,\r\n x = this;\r\n\r\n if (format == null) {\r\n if (dp != null && rm && typeof rm == 'object') {\r\n format = rm;\r\n rm = null;\r\n } else if (dp && typeof dp == 'object') {\r\n format = dp;\r\n dp = rm = null;\r\n } else {\r\n format = FORMAT;\r\n }\r\n } else if (typeof format != 'object') {\r\n throw Error\r\n (bignumberError + 'Argument not an object: ' + format);\r\n }\r\n\r\n str = x.toFixed(dp, rm);\r\n\r\n if (x.c) {\r\n var i,\r\n arr = str.split('.'),\r\n g1 = +format.groupSize,\r\n g2 = +format.secondaryGroupSize,\r\n groupSeparator = format.groupSeparator || '',\r\n intPart = arr[0],\r\n fractionPart = arr[1],\r\n isNeg = x.s < 0,\r\n intDigits = isNeg ? intPart.slice(1) : intPart,\r\n len = intDigits.length;\r\n\r\n if (g2) {\r\n i = g1;\r\n g1 = g2;\r\n g2 = i;\r\n len -= i;\r\n }\r\n\r\n if (g1 > 0 && len > 0) {\r\n i = len % g1 || g1;\r\n intPart = intDigits.substr(0, i);\r\n for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1);\r\n if (g2 > 0) intPart += groupSeparator + intDigits.slice(i);\r\n if (isNeg) intPart = '-' + intPart;\r\n }\r\n\r\n str = fractionPart\r\n ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize)\r\n ? fractionPart.replace(new RegExp('\\\\d{' + g2 + '}\\\\B', 'g'),\r\n '$&' + (format.fractionGroupSeparator || ''))\r\n : fractionPart)\r\n : intPart;\r\n }\r\n\r\n return (format.prefix || '') + str + (format.suffix || '');\r\n };\r\n\r\n\r\n /*\r\n * Return an array of two BigNumbers representing the value of this BigNumber as a simple\r\n * fraction with an integer numerator and an integer denominator.\r\n * The denominator will be a positive non-zero value less than or equal to the specified\r\n * maximum denominator. If a maximum denominator is not specified, the denominator will be\r\n * the lowest value necessary to represent the number exactly.\r\n *\r\n * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator.\r\n *\r\n * '[BigNumber Error] Argument {not an integer|out of range} : {md}'\r\n */\r\n P.toFraction = function (md) {\r\n var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s,\r\n x = this,\r\n xc = x.c;\r\n\r\n if (md != null) {\r\n n = new BigNumber(md);\r\n\r\n // Throw if md is less than one or is not an integer, unless it is Infinity.\r\n if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) {\r\n throw Error\r\n (bignumberError + 'Argument ' +\r\n (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n));\r\n }\r\n }\r\n\r\n if (!xc) return new BigNumber(x);\r\n\r\n d = new BigNumber(ONE);\r\n n1 = d0 = new BigNumber(ONE);\r\n d1 = n0 = new BigNumber(ONE);\r\n s = coeffToString(xc);\r\n\r\n // Determine initial denominator.\r\n // d is a power of 10 and the minimum max denominator that specifies the value exactly.\r\n e = d.e = s.length - x.e - 1;\r\n d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp];\r\n md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n;\r\n\r\n exp = MAX_EXP;\r\n MAX_EXP = 1 / 0;\r\n n = new BigNumber(s);\r\n\r\n // n0 = d1 = 0\r\n n0.c[0] = 0;\r\n\r\n for (; ;) {\r\n q = div(n, d, 0, 1);\r\n d2 = d0.plus(q.times(d1));\r\n if (d2.comparedTo(md) == 1) break;\r\n d0 = d1;\r\n d1 = d2;\r\n n1 = n0.plus(q.times(d2 = n1));\r\n n0 = d2;\r\n d = n.minus(q.times(d2 = d));\r\n n = d2;\r\n }\r\n\r\n d2 = div(md.minus(d0), d1, 0, 1);\r\n n0 = n0.plus(d2.times(n1));\r\n d0 = d0.plus(d2.times(d1));\r\n n0.s = n1.s = x.s;\r\n e = e * 2;\r\n\r\n // Determine which fraction is closer to x, n0/d0 or n1/d1\r\n r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo(\r\n div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0];\r\n\r\n MAX_EXP = exp;\r\n\r\n return r;\r\n };\r\n\r\n\r\n /*\r\n * Return the value of this BigNumber converted to a number primitive.\r\n */\r\n P.toNumber = function () {\r\n return +valueOf(this);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber rounded to sd significant digits\r\n * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits\r\n * necessary to represent the integer part of the value in fixed-point notation, then use\r\n * exponential notation.\r\n *\r\n * [sd] {number} Significant digits. Integer, 1 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}'\r\n */\r\n P.toPrecision = function (sd, rm) {\r\n if (sd != null) intCheck(sd, 1, MAX);\r\n return format(this, sd, rm, 2);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in base b, or base 10 if b is\r\n * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and\r\n * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent\r\n * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than\r\n * TO_EXP_NEG, return exponential notation.\r\n *\r\n * [b] {number} Integer, 2 to ALPHABET.length inclusive.\r\n *\r\n * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}'\r\n */\r\n P.toString = function (b) {\r\n var str,\r\n n = this,\r\n s = n.s,\r\n e = n.e;\r\n\r\n // Infinity or NaN?\r\n if (e === null) {\r\n if (s) {\r\n str = 'Infinity';\r\n if (s < 0) str = '-' + str;\r\n } else {\r\n str = 'NaN';\r\n }\r\n } else {\r\n if (b == null) {\r\n str = e <= TO_EXP_NEG || e >= TO_EXP_POS\r\n ? toExponential(coeffToString(n.c), e)\r\n : toFixedPoint(coeffToString(n.c), e, '0');\r\n } else if (b === 10 && alphabetHasNormalDecimalDigits) {\r\n n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE);\r\n str = toFixedPoint(coeffToString(n.c), n.e, '0');\r\n } else {\r\n intCheck(b, 2, ALPHABET.length, 'Base');\r\n str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true);\r\n }\r\n\r\n if (s < 0 && n.c[0]) str = '-' + str;\r\n }\r\n\r\n return str;\r\n };\r\n\r\n\r\n /*\r\n * Return as toString, but do not accept a base argument, and include the minus sign for\r\n * negative zero.\r\n */\r\n P.valueOf = P.toJSON = function () {\r\n return valueOf(this);\r\n };\r\n\r\n\r\n P._isBigNumber = true;\r\n\r\n if (configObject != null) BigNumber.set(configObject);\r\n\r\n return BigNumber;\r\n }\r\n\r\n\r\n // PRIVATE HELPER FUNCTIONS\r\n\r\n // These functions don't need access to variables,\r\n // e.g. DECIMAL_PLACES, in the scope of the `clone` function above.\r\n\r\n\r\n function bitFloor(n) {\r\n var i = n | 0;\r\n return n > 0 || n === i ? i : i - 1;\r\n }\r\n\r\n\r\n // Return a coefficient array as a string of base 10 digits.\r\n function coeffToString(a) {\r\n var s, z,\r\n i = 1,\r\n j = a.length,\r\n r = a[0] + '';\r\n\r\n for (; i < j;) {\r\n s = a[i++] + '';\r\n z = LOG_BASE - s.length;\r\n for (; z--; s = '0' + s);\r\n r += s;\r\n }\r\n\r\n // Determine trailing zeros.\r\n for (j = r.length; r.charCodeAt(--j) === 48;);\r\n\r\n return r.slice(0, j + 1 || 1);\r\n }\r\n\r\n\r\n // Compare the value of BigNumbers x and y.\r\n function compare(x, y) {\r\n var a, b,\r\n xc = x.c,\r\n yc = y.c,\r\n i = x.s,\r\n j = y.s,\r\n k = x.e,\r\n l = y.e;\r\n\r\n // Either NaN?\r\n if (!i || !j) return null;\r\n\r\n a = xc && !xc[0];\r\n b = yc && !yc[0];\r\n\r\n // Either zero?\r\n if (a || b) return a ? b ? 0 : -j : i;\r\n\r\n // Signs differ?\r\n if (i != j) return i;\r\n\r\n a = i < 0;\r\n b = k == l;\r\n\r\n // Either Infinity?\r\n if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1;\r\n\r\n // Compare exponents.\r\n if (!b) return k > l ^ a ? 1 : -1;\r\n\r\n j = (k = xc.length) < (l = yc.length) ? k : l;\r\n\r\n // Compare digit by digit.\r\n for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1;\r\n\r\n // Compare lengths.\r\n return k == l ? 0 : k > l ^ a ? 1 : -1;\r\n }\r\n\r\n\r\n /*\r\n * Check that n is a primitive number, an integer, and in range, otherwise throw.\r\n */\r\n function intCheck(n, min, max, name) {\r\n if (n < min || n > max || n !== mathfloor(n)) {\r\n throw Error\r\n (bignumberError + (name || 'Argument') + (typeof n == 'number'\r\n ? n < min || n > max ? ' out of range: ' : ' not an integer: '\r\n : ' not a primitive number: ') + String(n));\r\n }\r\n }\r\n\r\n\r\n // Assumes finite n.\r\n function isOdd(n) {\r\n var k = n.c.length - 1;\r\n return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0;\r\n }\r\n\r\n\r\n function toExponential(str, e) {\r\n return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) +\r\n (e < 0 ? 'e' : 'e+') + e;\r\n }\r\n\r\n\r\n function toFixedPoint(str, e, z) {\r\n var len, zs;\r\n\r\n // Negative exponent?\r\n if (e < 0) {\r\n\r\n // Prepend zeros.\r\n for (zs = z + '.'; ++e; zs += z);\r\n str = zs + str;\r\n\r\n // Positive exponent\r\n } else {\r\n len = str.length;\r\n\r\n // Append zeros.\r\n if (++e > len) {\r\n for (zs = z, e -= len; --e; zs += z);\r\n str += zs;\r\n } else if (e < len) {\r\n str = str.slice(0, e) + '.' + str.slice(e);\r\n }\r\n }\r\n\r\n return str;\r\n }\r\n\r\n\r\n // EXPORT\r\n\r\n\r\n BigNumber = clone();\r\n BigNumber['default'] = BigNumber.BigNumber = BigNumber;\r\n\r\n // AMD.\r\n if (typeof define == 'function' && define.amd) {\r\n define(function () { return BigNumber; });\r\n\r\n // Node.js and other environments that support module.exports.\r\n } else if (typeof module != 'undefined' && module.exports) {\r\n module.exports = BigNumber;\r\n\r\n // Browser.\r\n } else {\r\n if (!globalObject) {\r\n globalObject = typeof self != 'undefined' && self ? self : window;\r\n }\r\n\r\n globalObject.BigNumber = BigNumber;\r\n }\r\n})(this);\r\n","/*jshint node:true */\n'use strict';\nvar Buffer = require('buffer').Buffer; // browserify\nvar SlowBuffer = require('buffer').SlowBuffer;\n\nmodule.exports = bufferEq;\n\nfunction bufferEq(a, b) {\n\n // shortcutting on type is necessary for correctness\n if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {\n return false;\n }\n\n // buffer sizes should be well-known information, so despite this\n // shortcutting, it doesn't leak any information about the *contents* of the\n // buffers.\n if (a.length !== b.length) {\n return false;\n }\n\n var c = 0;\n for (var i = 0; i < a.length; i++) {\n /*jshint bitwise:false */\n c |= a[i] ^ b[i]; // XOR\n }\n return c === 0;\n}\n\nbufferEq.install = function() {\n Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) {\n return bufferEq(this, that);\n };\n};\n\nvar origBufEqual = Buffer.prototype.equal;\nvar origSlowBufEqual = SlowBuffer.prototype.equal;\nbufferEq.restore = function() {\n Buffer.prototype.equal = origBufEqual;\n SlowBuffer.prototype.equal = origSlowBufEqual;\n};\n","/*!\n * compressible\n * Copyright(c) 2013 Jonathan Ong\n * Copyright(c) 2014 Jeremiah Senkpiel\n * Copyright(c) 2015 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n'use strict'\n\n/**\n * Module dependencies.\n * @private\n */\n\nvar db = require('mime-db')\n\n/**\n * Module variables.\n * @private\n */\n\nvar COMPRESSIBLE_TYPE_REGEXP = /^text\\/|\\+(?:json|text|xml)$/i\nvar EXTRACT_TYPE_REGEXP = /^\\s*([^;\\s]*)(?:;|\\s|$)/\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = compressible\n\n/**\n * Checks if a type is compressible.\n *\n * @param {string} type\n * @return {Boolean} compressible\n * @public\n */\n\nfunction compressible (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // strip parameters\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n var mime = match && match[1].toLowerCase()\n var data = db[mime]\n\n // return database information\n if (data && data.compressible !== undefined) {\n return data.compressible\n }\n\n // fallback to regexp or unknown\n return COMPRESSIBLE_TYPE_REGEXP.test(mime) || undefined\n}\n","/* eslint-env browser */\n\n/**\n * This is the web browser implementation of `debug()`.\n */\n\nexports.formatArgs = formatArgs;\nexports.save = save;\nexports.load = load;\nexports.useColors = useColors;\nexports.storage = localstorage();\nexports.destroy = (() => {\n\tlet warned = false;\n\n\treturn () => {\n\t\tif (!warned) {\n\t\t\twarned = true;\n\t\t\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\n\t\t}\n\t};\n})();\n\n/**\n * Colors.\n */\n\nexports.colors = [\n\t'#0000CC',\n\t'#0000FF',\n\t'#0033CC',\n\t'#0033FF',\n\t'#0066CC',\n\t'#0066FF',\n\t'#0099CC',\n\t'#0099FF',\n\t'#00CC00',\n\t'#00CC33',\n\t'#00CC66',\n\t'#00CC99',\n\t'#00CCCC',\n\t'#00CCFF',\n\t'#3300CC',\n\t'#3300FF',\n\t'#3333CC',\n\t'#3333FF',\n\t'#3366CC',\n\t'#3366FF',\n\t'#3399CC',\n\t'#3399FF',\n\t'#33CC00',\n\t'#33CC33',\n\t'#33CC66',\n\t'#33CC99',\n\t'#33CCCC',\n\t'#33CCFF',\n\t'#6600CC',\n\t'#6600FF',\n\t'#6633CC',\n\t'#6633FF',\n\t'#66CC00',\n\t'#66CC33',\n\t'#9900CC',\n\t'#9900FF',\n\t'#9933CC',\n\t'#9933FF',\n\t'#99CC00',\n\t'#99CC33',\n\t'#CC0000',\n\t'#CC0033',\n\t'#CC0066',\n\t'#CC0099',\n\t'#CC00CC',\n\t'#CC00FF',\n\t'#CC3300',\n\t'#CC3333',\n\t'#CC3366',\n\t'#CC3399',\n\t'#CC33CC',\n\t'#CC33FF',\n\t'#CC6600',\n\t'#CC6633',\n\t'#CC9900',\n\t'#CC9933',\n\t'#CCCC00',\n\t'#CCCC33',\n\t'#FF0000',\n\t'#FF0033',\n\t'#FF0066',\n\t'#FF0099',\n\t'#FF00CC',\n\t'#FF00FF',\n\t'#FF3300',\n\t'#FF3333',\n\t'#FF3366',\n\t'#FF3399',\n\t'#FF33CC',\n\t'#FF33FF',\n\t'#FF6600',\n\t'#FF6633',\n\t'#FF9900',\n\t'#FF9933',\n\t'#FFCC00',\n\t'#FFCC33'\n];\n\n/**\n * Currently only WebKit-based Web Inspectors, Firefox >= v31,\n * and the Firebug extension (any Firefox version) are known\n * to support \"%c\" CSS customizations.\n *\n * TODO: add a `localStorage` variable to explicitly enable/disable colors\n */\n\n// eslint-disable-next-line complexity\nfunction useColors() {\n\t// NB: In an Electron preload script, document will be defined but not fully\n\t// initialized. Since we know we're in Chrome, we'll just detect this case\n\t// explicitly\n\tif (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {\n\t\treturn true;\n\t}\n\n\t// Internet Explorer and Edge do not support colors.\n\tif (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\\/(\\d+)/)) {\n\t\treturn false;\n\t}\n\n\t// Is webkit? http://stackoverflow.com/a/16459606/376773\n\t// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632\n\treturn (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||\n\t\t// Is firebug? http://stackoverflow.com/a/398120/376773\n\t\t(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||\n\t\t// Is firefox >= v31?\n\t\t// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages\n\t\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\\/(\\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||\n\t\t// Double check webkit in userAgent just in case we are in a worker\n\t\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\\/(\\d+)/));\n}\n\n/**\n * Colorize log arguments if enabled.\n *\n * @api public\n */\n\nfunction formatArgs(args) {\n\targs[0] = (this.useColors ? '%c' : '') +\n\t\tthis.namespace +\n\t\t(this.useColors ? ' %c' : ' ') +\n\t\targs[0] +\n\t\t(this.useColors ? '%c ' : ' ') +\n\t\t'+' + module.exports.humanize(this.diff);\n\n\tif (!this.useColors) {\n\t\treturn;\n\t}\n\n\tconst c = 'color: ' + this.color;\n\targs.splice(1, 0, c, 'color: inherit');\n\n\t// The final \"%c\" is somewhat tricky, because there could be other\n\t// arguments passed either before or after the %c, so we need to\n\t// figure out the correct index to insert the CSS into\n\tlet index = 0;\n\tlet lastC = 0;\n\targs[0].replace(/%[a-zA-Z%]/g, match => {\n\t\tif (match === '%%') {\n\t\t\treturn;\n\t\t}\n\t\tindex++;\n\t\tif (match === '%c') {\n\t\t\t// We only are interested in the *last* %c\n\t\t\t// (the user may have provided their own)\n\t\t\tlastC = index;\n\t\t}\n\t});\n\n\targs.splice(lastC, 0, c);\n}\n\n/**\n * Invokes `console.debug()` when available.\n * No-op when `console.debug` is not a \"function\".\n * If `console.debug` is not available, falls back\n * to `console.log`.\n *\n * @api public\n */\nexports.log = console.debug || console.log || (() => {});\n\n/**\n * Save `namespaces`.\n *\n * @param {String} namespaces\n * @api private\n */\nfunction save(namespaces) {\n\ttry {\n\t\tif (namespaces) {\n\t\t\texports.storage.setItem('debug', namespaces);\n\t\t} else {\n\t\t\texports.storage.removeItem('debug');\n\t\t}\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n}\n\n/**\n * Load `namespaces`.\n *\n * @return {String} returns the previously persisted debug modes\n * @api private\n */\nfunction load() {\n\tlet r;\n\ttry {\n\t\tr = exports.storage.getItem('debug');\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n\n\t// If debug isn't set in LS, and we're in Electron, try to load $DEBUG\n\tif (!r && typeof process !== 'undefined' && 'env' in process) {\n\t\tr = process.env.DEBUG;\n\t}\n\n\treturn r;\n}\n\n/**\n * Localstorage attempts to return the localstorage.\n *\n * This is necessary because safari throws\n * when a user disables cookies/localstorage\n * and you attempt to access it.\n *\n * @return {LocalStorage}\n * @api private\n */\n\nfunction localstorage() {\n\ttry {\n\t\t// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context\n\t\t// The Browser also has localStorage in the global context.\n\t\treturn localStorage;\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n}\n\nmodule.exports = require('./common')(exports);\n\nconst {formatters} = module.exports;\n\n/**\n * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.\n */\n\nformatters.j = function (v) {\n\ttry {\n\t\treturn JSON.stringify(v);\n\t} catch (error) {\n\t\treturn '[UnexpectedJSONParseError]: ' + error.message;\n\t}\n};\n","\n/**\n * This is the common logic for both the Node.js and web browser\n * implementations of `debug()`.\n */\n\nfunction setup(env) {\n\tcreateDebug.debug = createDebug;\n\tcreateDebug.default = createDebug;\n\tcreateDebug.coerce = coerce;\n\tcreateDebug.disable = disable;\n\tcreateDebug.enable = enable;\n\tcreateDebug.enabled = enabled;\n\tcreateDebug.humanize = require('ms');\n\tcreateDebug.destroy = destroy;\n\n\tObject.keys(env).forEach(key => {\n\t\tcreateDebug[key] = env[key];\n\t});\n\n\t/**\n\t* The currently active debug mode names, and names to skip.\n\t*/\n\n\tcreateDebug.names = [];\n\tcreateDebug.skips = [];\n\n\t/**\n\t* Map of special \"%n\" handling functions, for the debug \"format\" argument.\n\t*\n\t* Valid key names are a single, lower or upper-case letter, i.e. \"n\" and \"N\".\n\t*/\n\tcreateDebug.formatters = {};\n\n\t/**\n\t* Selects a color for a debug namespace\n\t* @param {String} namespace The namespace string for the debug instance to be colored\n\t* @return {Number|String} An ANSI color code for the given namespace\n\t* @api private\n\t*/\n\tfunction selectColor(namespace) {\n\t\tlet hash = 0;\n\n\t\tfor (let i = 0; i < namespace.length; i++) {\n\t\t\thash = ((hash << 5) - hash) + namespace.charCodeAt(i);\n\t\t\thash |= 0; // Convert to 32bit integer\n\t\t}\n\n\t\treturn createDebug.colors[Math.abs(hash) % createDebug.colors.length];\n\t}\n\tcreateDebug.selectColor = selectColor;\n\n\t/**\n\t* Create a debugger with the given `namespace`.\n\t*\n\t* @param {String} namespace\n\t* @return {Function}\n\t* @api public\n\t*/\n\tfunction createDebug(namespace) {\n\t\tlet prevTime;\n\t\tlet enableOverride = null;\n\t\tlet namespacesCache;\n\t\tlet enabledCache;\n\n\t\tfunction debug(...args) {\n\t\t\t// Disabled?\n\t\t\tif (!debug.enabled) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tconst self = debug;\n\n\t\t\t// Set `diff` timestamp\n\t\t\tconst curr = Number(new Date());\n\t\t\tconst ms = curr - (prevTime || curr);\n\t\t\tself.diff = ms;\n\t\t\tself.prev = prevTime;\n\t\t\tself.curr = curr;\n\t\t\tprevTime = curr;\n\n\t\t\targs[0] = createDebug.coerce(args[0]);\n\n\t\t\tif (typeof args[0] !== 'string') {\n\t\t\t\t// Anything else let's inspect with %O\n\t\t\t\targs.unshift('%O');\n\t\t\t}\n\n\t\t\t// Apply any `formatters` transformations\n\t\t\tlet index = 0;\n\t\t\targs[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {\n\t\t\t\t// If we encounter an escaped % then don't increase the array index\n\t\t\t\tif (match === '%%') {\n\t\t\t\t\treturn '%';\n\t\t\t\t}\n\t\t\t\tindex++;\n\t\t\t\tconst formatter = createDebug.formatters[format];\n\t\t\t\tif (typeof formatter === 'function') {\n\t\t\t\t\tconst val = args[index];\n\t\t\t\t\tmatch = formatter.call(self, val);\n\n\t\t\t\t\t// Now we need to remove `args[index]` since it's inlined in the `format`\n\t\t\t\t\targs.splice(index, 1);\n\t\t\t\t\tindex--;\n\t\t\t\t}\n\t\t\t\treturn match;\n\t\t\t});\n\n\t\t\t// Apply env-specific formatting (colors, etc.)\n\t\t\tcreateDebug.formatArgs.call(self, args);\n\n\t\t\tconst logFn = self.log || createDebug.log;\n\t\t\tlogFn.apply(self, args);\n\t\t}\n\n\t\tdebug.namespace = namespace;\n\t\tdebug.useColors = createDebug.useColors();\n\t\tdebug.color = createDebug.selectColor(namespace);\n\t\tdebug.extend = extend;\n\t\tdebug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.\n\n\t\tObject.defineProperty(debug, 'enabled', {\n\t\t\tenumerable: true,\n\t\t\tconfigurable: false,\n\t\t\tget: () => {\n\t\t\t\tif (enableOverride !== null) {\n\t\t\t\t\treturn enableOverride;\n\t\t\t\t}\n\t\t\t\tif (namespacesCache !== createDebug.namespaces) {\n\t\t\t\t\tnamespacesCache = createDebug.namespaces;\n\t\t\t\t\tenabledCache = createDebug.enabled(namespace);\n\t\t\t\t}\n\n\t\t\t\treturn enabledCache;\n\t\t\t},\n\t\t\tset: v => {\n\t\t\t\tenableOverride = v;\n\t\t\t}\n\t\t});\n\n\t\t// Env-specific initialization logic for debug instances\n\t\tif (typeof createDebug.init === 'function') {\n\t\t\tcreateDebug.init(debug);\n\t\t}\n\n\t\treturn debug;\n\t}\n\n\tfunction extend(namespace, delimiter) {\n\t\tconst newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);\n\t\tnewDebug.log = this.log;\n\t\treturn newDebug;\n\t}\n\n\t/**\n\t* Enables a debug mode by namespaces. This can include modes\n\t* separated by a colon and wildcards.\n\t*\n\t* @param {String} namespaces\n\t* @api public\n\t*/\n\tfunction enable(namespaces) {\n\t\tcreateDebug.save(namespaces);\n\t\tcreateDebug.namespaces = namespaces;\n\n\t\tcreateDebug.names = [];\n\t\tcreateDebug.skips = [];\n\n\t\tlet i;\n\t\tconst split = (typeof namespaces === 'string' ? namespaces : '').split(/[\\s,]+/);\n\t\tconst len = split.length;\n\n\t\tfor (i = 0; i < len; i++) {\n\t\t\tif (!split[i]) {\n\t\t\t\t// ignore empty strings\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tnamespaces = split[i].replace(/\\*/g, '.*?');\n\n\t\t\tif (namespaces[0] === '-') {\n\t\t\t\tcreateDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$'));\n\t\t\t} else {\n\t\t\t\tcreateDebug.names.push(new RegExp('^' + namespaces + '$'));\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t* Disable debug output.\n\t*\n\t* @return {String} namespaces\n\t* @api public\n\t*/\n\tfunction disable() {\n\t\tconst namespaces = [\n\t\t\t...createDebug.names.map(toNamespace),\n\t\t\t...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)\n\t\t].join(',');\n\t\tcreateDebug.enable('');\n\t\treturn namespaces;\n\t}\n\n\t/**\n\t* Returns true if the given mode name is enabled, false otherwise.\n\t*\n\t* @param {String} name\n\t* @return {Boolean}\n\t* @api public\n\t*/\n\tfunction enabled(name) {\n\t\tif (name[name.length - 1] === '*') {\n\t\t\treturn true;\n\t\t}\n\n\t\tlet i;\n\t\tlet len;\n\n\t\tfor (i = 0, len = createDebug.skips.length; i < len; i++) {\n\t\t\tif (createDebug.skips[i].test(name)) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\n\t\tfor (i = 0, len = createDebug.names.length; i < len; i++) {\n\t\t\tif (createDebug.names[i].test(name)) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\n\t\treturn false;\n\t}\n\n\t/**\n\t* Convert regexp to namespace\n\t*\n\t* @param {RegExp} regxep\n\t* @return {String} namespace\n\t* @api private\n\t*/\n\tfunction toNamespace(regexp) {\n\t\treturn regexp.toString()\n\t\t\t.substring(2, regexp.toString().length - 2)\n\t\t\t.replace(/\\.\\*\\?$/, '*');\n\t}\n\n\t/**\n\t* Coerce `val`.\n\t*\n\t* @param {Mixed} val\n\t* @return {Mixed}\n\t* @api private\n\t*/\n\tfunction coerce(val) {\n\t\tif (val instanceof Error) {\n\t\t\treturn val.stack || val.message;\n\t\t}\n\t\treturn val;\n\t}\n\n\t/**\n\t* XXX DO NOT USE. This is a temporary stub function.\n\t* XXX It WILL be removed in the next major release.\n\t*/\n\tfunction destroy() {\n\t\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\n\t}\n\n\tcreateDebug.enable(createDebug.load());\n\n\treturn createDebug;\n}\n\nmodule.exports = setup;\n","/**\n * Detect Electron renderer / nwjs process, which is node, but we should\n * treat as a browser.\n */\n\nif (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {\n\tmodule.exports = require('./browser.js');\n} else {\n\tmodule.exports = require('./node.js');\n}\n","/**\n * Module dependencies.\n */\n\nconst tty = require('tty');\nconst util = require('util');\n\n/**\n * This is the Node.js implementation of `debug()`.\n */\n\nexports.init = init;\nexports.log = log;\nexports.formatArgs = formatArgs;\nexports.save = save;\nexports.load = load;\nexports.useColors = useColors;\nexports.destroy = util.deprecate(\n\t() => {},\n\t'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'\n);\n\n/**\n * Colors.\n */\n\nexports.colors = [6, 2, 3, 4, 5, 1];\n\ntry {\n\t// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)\n\t// eslint-disable-next-line import/no-extraneous-dependencies\n\tconst supportsColor = require('supports-color');\n\n\tif (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {\n\t\texports.colors = [\n\t\t\t20,\n\t\t\t21,\n\t\t\t26,\n\t\t\t27,\n\t\t\t32,\n\t\t\t33,\n\t\t\t38,\n\t\t\t39,\n\t\t\t40,\n\t\t\t41,\n\t\t\t42,\n\t\t\t43,\n\t\t\t44,\n\t\t\t45,\n\t\t\t56,\n\t\t\t57,\n\t\t\t62,\n\t\t\t63,\n\t\t\t68,\n\t\t\t69,\n\t\t\t74,\n\t\t\t75,\n\t\t\t76,\n\t\t\t77,\n\t\t\t78,\n\t\t\t79,\n\t\t\t80,\n\t\t\t81,\n\t\t\t92,\n\t\t\t93,\n\t\t\t98,\n\t\t\t99,\n\t\t\t112,\n\t\t\t113,\n\t\t\t128,\n\t\t\t129,\n\t\t\t134,\n\t\t\t135,\n\t\t\t148,\n\t\t\t149,\n\t\t\t160,\n\t\t\t161,\n\t\t\t162,\n\t\t\t163,\n\t\t\t164,\n\t\t\t165,\n\t\t\t166,\n\t\t\t167,\n\t\t\t168,\n\t\t\t169,\n\t\t\t170,\n\t\t\t171,\n\t\t\t172,\n\t\t\t173,\n\t\t\t178,\n\t\t\t179,\n\t\t\t184,\n\t\t\t185,\n\t\t\t196,\n\t\t\t197,\n\t\t\t198,\n\t\t\t199,\n\t\t\t200,\n\t\t\t201,\n\t\t\t202,\n\t\t\t203,\n\t\t\t204,\n\t\t\t205,\n\t\t\t206,\n\t\t\t207,\n\t\t\t208,\n\t\t\t209,\n\t\t\t214,\n\t\t\t215,\n\t\t\t220,\n\t\t\t221\n\t\t];\n\t}\n} catch (error) {\n\t// Swallow - we only care if `supports-color` is available; it doesn't have to be.\n}\n\n/**\n * Build up the default `inspectOpts` object from the environment variables.\n *\n * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js\n */\n\nexports.inspectOpts = Object.keys(process.env).filter(key => {\n\treturn /^debug_/i.test(key);\n}).reduce((obj, key) => {\n\t// Camel-case\n\tconst prop = key\n\t\t.substring(6)\n\t\t.toLowerCase()\n\t\t.replace(/_([a-z])/g, (_, k) => {\n\t\t\treturn k.toUpperCase();\n\t\t});\n\n\t// Coerce string value into JS value\n\tlet val = process.env[key];\n\tif (/^(yes|on|true|enabled)$/i.test(val)) {\n\t\tval = true;\n\t} else if (/^(no|off|false|disabled)$/i.test(val)) {\n\t\tval = false;\n\t} else if (val === 'null') {\n\t\tval = null;\n\t} else {\n\t\tval = Number(val);\n\t}\n\n\tobj[prop] = val;\n\treturn obj;\n}, {});\n\n/**\n * Is stdout a TTY? Colored output is enabled when `true`.\n */\n\nfunction useColors() {\n\treturn 'colors' in exports.inspectOpts ?\n\t\tBoolean(exports.inspectOpts.colors) :\n\t\ttty.isatty(process.stderr.fd);\n}\n\n/**\n * Adds ANSI color escape codes if enabled.\n *\n * @api public\n */\n\nfunction formatArgs(args) {\n\tconst {namespace: name, useColors} = this;\n\n\tif (useColors) {\n\t\tconst c = this.color;\n\t\tconst colorCode = '\\u001B[3' + (c < 8 ? c : '8;5;' + c);\n\t\tconst prefix = ` ${colorCode};1m${name} \\u001B[0m`;\n\n\t\targs[0] = prefix + args[0].split('\\n').join('\\n' + prefix);\n\t\targs.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\\u001B[0m');\n\t} else {\n\t\targs[0] = getDate() + name + ' ' + args[0];\n\t}\n}\n\nfunction getDate() {\n\tif (exports.inspectOpts.hideDate) {\n\t\treturn '';\n\t}\n\treturn new Date().toISOString() + ' ';\n}\n\n/**\n * Invokes `util.format()` with the specified arguments and writes to stderr.\n */\n\nfunction log(...args) {\n\treturn process.stderr.write(util.format(...args) + '\\n');\n}\n\n/**\n * Save `namespaces`.\n *\n * @param {String} namespaces\n * @api private\n */\nfunction save(namespaces) {\n\tif (namespaces) {\n\t\tprocess.env.DEBUG = namespaces;\n\t} else {\n\t\t// If you set a process.env field to null or undefined, it gets cast to the\n\t\t// string 'null' or 'undefined'. Just delete instead.\n\t\tdelete process.env.DEBUG;\n\t}\n}\n\n/**\n * Load `namespaces`.\n *\n * @return {String} returns the previously persisted debug modes\n * @api private\n */\n\nfunction load() {\n\treturn process.env.DEBUG;\n}\n\n/**\n * Init logic for `debug` instances.\n *\n * Create a new `inspectOpts` object in case `useColors` is set\n * differently for a particular `debug` instance.\n */\n\nfunction init(debug) {\n\tdebug.inspectOpts = {};\n\n\tconst keys = Object.keys(exports.inspectOpts);\n\tfor (let i = 0; i < keys.length; i++) {\n\t\tdebug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];\n\t}\n}\n\nmodule.exports = require('./common')(exports);\n\nconst {formatters} = module.exports;\n\n/**\n * Map %o to `util.inspect()`, all on a single line.\n */\n\nformatters.o = function (v) {\n\tthis.inspectOpts.colors = this.useColors;\n\treturn util.inspect(v, this.inspectOpts)\n\t\t.split('\\n')\n\t\t.map(str => str.trim())\n\t\t.join(' ');\n};\n\n/**\n * Map %O to `util.inspect()`, allowing multiple lines if needed.\n */\n\nformatters.O = function (v) {\n\tthis.inspectOpts.colors = this.useColors;\n\treturn util.inspect(v, this.inspectOpts);\n};\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","var stream = require('readable-stream')\nvar eos = require('end-of-stream')\nvar inherits = require('inherits')\nvar shift = require('stream-shift')\n\nvar SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from)\n ? Buffer.from([0])\n : new Buffer([0])\n\nvar onuncork = function(self, fn) {\n if (self._corked) self.once('uncork', fn)\n else fn()\n}\n\nvar autoDestroy = function (self, err) {\n if (self._autoDestroy) self.destroy(err)\n}\n\nvar destroyer = function(self, end) {\n return function(err) {\n if (err) autoDestroy(self, err.message === 'premature close' ? null : err)\n else if (end && !self._ended) self.end()\n }\n}\n\nvar end = function(ws, fn) {\n if (!ws) return fn()\n if (ws._writableState && ws._writableState.finished) return fn()\n if (ws._writableState) return ws.end(fn)\n ws.end()\n fn()\n}\n\nvar noop = function() {}\n\nvar toStreams2 = function(rs) {\n return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs)\n}\n\nvar Duplexify = function(writable, readable, opts) {\n if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts)\n stream.Duplex.call(this, opts)\n\n this._writable = null\n this._readable = null\n this._readable2 = null\n\n this._autoDestroy = !opts || opts.autoDestroy !== false\n this._forwardDestroy = !opts || opts.destroy !== false\n this._forwardEnd = !opts || opts.end !== false\n this._corked = 1 // start corked\n this._ondrain = null\n this._drained = false\n this._forwarding = false\n this._unwrite = null\n this._unread = null\n this._ended = false\n\n this.destroyed = false\n\n if (writable) this.setWritable(writable)\n if (readable) this.setReadable(readable)\n}\n\ninherits(Duplexify, stream.Duplex)\n\nDuplexify.obj = function(writable, readable, opts) {\n if (!opts) opts = {}\n opts.objectMode = true\n opts.highWaterMark = 16\n return new Duplexify(writable, readable, opts)\n}\n\nDuplexify.prototype.cork = function() {\n if (++this._corked === 1) this.emit('cork')\n}\n\nDuplexify.prototype.uncork = function() {\n if (this._corked && --this._corked === 0) this.emit('uncork')\n}\n\nDuplexify.prototype.setWritable = function(writable) {\n if (this._unwrite) this._unwrite()\n\n if (this.destroyed) {\n if (writable && writable.destroy) writable.destroy()\n return\n }\n\n if (writable === null || writable === false) {\n this.end()\n return\n }\n\n var self = this\n var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd))\n\n var ondrain = function() {\n var ondrain = self._ondrain\n self._ondrain = null\n if (ondrain) ondrain()\n }\n\n var clear = function() {\n self._writable.removeListener('drain', ondrain)\n unend()\n }\n\n if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks\n\n this._writable = writable\n this._writable.on('drain', ondrain)\n this._unwrite = clear\n\n this.uncork() // always uncork setWritable\n}\n\nDuplexify.prototype.setReadable = function(readable) {\n if (this._unread) this._unread()\n\n if (this.destroyed) {\n if (readable && readable.destroy) readable.destroy()\n return\n }\n\n if (readable === null || readable === false) {\n this.push(null)\n this.resume()\n return\n }\n\n var self = this\n var unend = eos(readable, {writable:false, readable:true}, destroyer(this))\n\n var onreadable = function() {\n self._forward()\n }\n\n var onend = function() {\n self.push(null)\n }\n\n var clear = function() {\n self._readable2.removeListener('readable', onreadable)\n self._readable2.removeListener('end', onend)\n unend()\n }\n\n this._drained = true\n this._readable = readable\n this._readable2 = readable._readableState ? readable : toStreams2(readable)\n this._readable2.on('readable', onreadable)\n this._readable2.on('end', onend)\n this._unread = clear\n\n this._forward()\n}\n\nDuplexify.prototype._read = function() {\n this._drained = true\n this._forward()\n}\n\nDuplexify.prototype._forward = function() {\n if (this._forwarding || !this._readable2 || !this._drained) return\n this._forwarding = true\n\n var data\n\n while (this._drained && (data = shift(this._readable2)) !== null) {\n if (this.destroyed) continue\n this._drained = this.push(data)\n }\n\n this._forwarding = false\n}\n\nDuplexify.prototype.destroy = function(err, cb) {\n if (!cb) cb = noop\n if (this.destroyed) return cb(null)\n this.destroyed = true\n\n var self = this\n process.nextTick(function() {\n self._destroy(err)\n cb(null)\n })\n}\n\nDuplexify.prototype._destroy = function(err) {\n if (err) {\n var ondrain = this._ondrain\n this._ondrain = null\n if (ondrain) ondrain(err)\n else this.emit('error', err)\n }\n\n if (this._forwardDestroy) {\n if (this._readable && this._readable.destroy) this._readable.destroy()\n if (this._writable && this._writable.destroy) this._writable.destroy()\n }\n\n this.emit('close')\n}\n\nDuplexify.prototype._write = function(data, enc, cb) {\n if (this.destroyed) return\n if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb))\n if (data === SIGNAL_FLUSH) return this._finish(cb)\n if (!this._writable) return cb()\n\n if (this._writable.write(data) === false) this._ondrain = cb\n else if (!this.destroyed) cb()\n}\n\nDuplexify.prototype._finish = function(cb) {\n var self = this\n this.emit('preend')\n onuncork(this, function() {\n end(self._forwardEnd && self._writable, function() {\n // haxx to not emit prefinish twice\n if (self._writableState.prefinished === false) self._writableState.prefinished = true\n self.emit('prefinish')\n onuncork(self, cb)\n })\n })\n}\n\nDuplexify.prototype.end = function(data, enc, cb) {\n if (typeof data === 'function') return this.end(null, null, data)\n if (typeof enc === 'function') return this.end(data, null, enc)\n this._ended = true\n if (data) this.write(data)\n if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH)\n return stream.Writable.prototype.end.call(this, cb)\n}\n\nmodule.exports = Duplexify\n","'use strict';\n\nvar Buffer = require('safe-buffer').Buffer;\n\nvar getParamBytesForAlg = require('./param-bytes-for-alg');\n\nvar MAX_OCTET = 0x80,\n\tCLASS_UNIVERSAL = 0,\n\tPRIMITIVE_BIT = 0x20,\n\tTAG_SEQ = 0x10,\n\tTAG_INT = 0x02,\n\tENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6),\n\tENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6);\n\nfunction base64Url(base64) {\n\treturn base64\n\t\t.replace(/=/g, '')\n\t\t.replace(/\\+/g, '-')\n\t\t.replace(/\\//g, '_');\n}\n\nfunction signatureAsBuffer(signature) {\n\tif (Buffer.isBuffer(signature)) {\n\t\treturn signature;\n\t} else if ('string' === typeof signature) {\n\t\treturn Buffer.from(signature, 'base64');\n\t}\n\n\tthrow new TypeError('ECDSA signature must be a Base64 string or a Buffer');\n}\n\nfunction derToJose(signature, alg) {\n\tsignature = signatureAsBuffer(signature);\n\tvar paramBytes = getParamBytesForAlg(alg);\n\n\t// the DER encoded param should at most be the param size, plus a padding\n\t// zero, since due to being a signed integer\n\tvar maxEncodedParamLength = paramBytes + 1;\n\n\tvar inputLength = signature.length;\n\n\tvar offset = 0;\n\tif (signature[offset++] !== ENCODED_TAG_SEQ) {\n\t\tthrow new Error('Could not find expected \"seq\"');\n\t}\n\n\tvar seqLength = signature[offset++];\n\tif (seqLength === (MAX_OCTET | 1)) {\n\t\tseqLength = signature[offset++];\n\t}\n\n\tif (inputLength - offset < seqLength) {\n\t\tthrow new Error('\"seq\" specified length of \"' + seqLength + '\", only \"' + (inputLength - offset) + '\" remaining');\n\t}\n\n\tif (signature[offset++] !== ENCODED_TAG_INT) {\n\t\tthrow new Error('Could not find expected \"int\" for \"r\"');\n\t}\n\n\tvar rLength = signature[offset++];\n\n\tif (inputLength - offset - 2 < rLength) {\n\t\tthrow new Error('\"r\" specified length of \"' + rLength + '\", only \"' + (inputLength - offset - 2) + '\" available');\n\t}\n\n\tif (maxEncodedParamLength < rLength) {\n\t\tthrow new Error('\"r\" specified length of \"' + rLength + '\", max of \"' + maxEncodedParamLength + '\" is acceptable');\n\t}\n\n\tvar rOffset = offset;\n\toffset += rLength;\n\n\tif (signature[offset++] !== ENCODED_TAG_INT) {\n\t\tthrow new Error('Could not find expected \"int\" for \"s\"');\n\t}\n\n\tvar sLength = signature[offset++];\n\n\tif (inputLength - offset !== sLength) {\n\t\tthrow new Error('\"s\" specified length of \"' + sLength + '\", expected \"' + (inputLength - offset) + '\"');\n\t}\n\n\tif (maxEncodedParamLength < sLength) {\n\t\tthrow new Error('\"s\" specified length of \"' + sLength + '\", max of \"' + maxEncodedParamLength + '\" is acceptable');\n\t}\n\n\tvar sOffset = offset;\n\toffset += sLength;\n\n\tif (offset !== inputLength) {\n\t\tthrow new Error('Expected to consume entire buffer, but \"' + (inputLength - offset) + '\" bytes remain');\n\t}\n\n\tvar rPadding = paramBytes - rLength,\n\t\tsPadding = paramBytes - sLength;\n\n\tvar dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength);\n\n\tfor (offset = 0; offset < rPadding; ++offset) {\n\t\tdst[offset] = 0;\n\t}\n\tsignature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength);\n\n\toffset = paramBytes;\n\n\tfor (var o = offset; offset < o + sPadding; ++offset) {\n\t\tdst[offset] = 0;\n\t}\n\tsignature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength);\n\n\tdst = dst.toString('base64');\n\tdst = base64Url(dst);\n\n\treturn dst;\n}\n\nfunction countPadding(buf, start, stop) {\n\tvar padding = 0;\n\twhile (start + padding < stop && buf[start + padding] === 0) {\n\t\t++padding;\n\t}\n\n\tvar needsSign = buf[start + padding] >= MAX_OCTET;\n\tif (needsSign) {\n\t\t--padding;\n\t}\n\n\treturn padding;\n}\n\nfunction joseToDer(signature, alg) {\n\tsignature = signatureAsBuffer(signature);\n\tvar paramBytes = getParamBytesForAlg(alg);\n\n\tvar signatureBytes = signature.length;\n\tif (signatureBytes !== paramBytes * 2) {\n\t\tthrow new TypeError('\"' + alg + '\" signatures must be \"' + paramBytes * 2 + '\" bytes, saw \"' + signatureBytes + '\"');\n\t}\n\n\tvar rPadding = countPadding(signature, 0, paramBytes);\n\tvar sPadding = countPadding(signature, paramBytes, signature.length);\n\tvar rLength = paramBytes - rPadding;\n\tvar sLength = paramBytes - sPadding;\n\n\tvar rsBytes = 1 + 1 + rLength + 1 + 1 + sLength;\n\n\tvar shortLength = rsBytes < MAX_OCTET;\n\n\tvar dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes);\n\n\tvar offset = 0;\n\tdst[offset++] = ENCODED_TAG_SEQ;\n\tif (shortLength) {\n\t\t// Bit 8 has value \"0\"\n\t\t// bits 7-1 give the length.\n\t\tdst[offset++] = rsBytes;\n\t} else {\n\t\t// Bit 8 of first octet has value \"1\"\n\t\t// bits 7-1 give the number of additional length octets.\n\t\tdst[offset++] = MAX_OCTET\t| 1;\n\t\t// length, base 256\n\t\tdst[offset++] = rsBytes & 0xff;\n\t}\n\tdst[offset++] = ENCODED_TAG_INT;\n\tdst[offset++] = rLength;\n\tif (rPadding < 0) {\n\t\tdst[offset++] = 0;\n\t\toffset += signature.copy(dst, offset, 0, paramBytes);\n\t} else {\n\t\toffset += signature.copy(dst, offset, rPadding, paramBytes);\n\t}\n\tdst[offset++] = ENCODED_TAG_INT;\n\tdst[offset++] = sLength;\n\tif (sPadding < 0) {\n\t\tdst[offset++] = 0;\n\t\tsignature.copy(dst, offset, paramBytes);\n\t} else {\n\t\tsignature.copy(dst, offset, paramBytes + sPadding);\n\t}\n\n\treturn dst;\n}\n\nmodule.exports = {\n\tderToJose: derToJose,\n\tjoseToDer: joseToDer\n};\n","'use strict';\n\nfunction getParamSize(keySize) {\n\tvar result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1);\n\treturn result;\n}\n\nvar paramBytesForAlg = {\n\tES256: getParamSize(256),\n\tES384: getParamSize(384),\n\tES512: getParamSize(521)\n};\n\nfunction getParamBytesForAlg(alg) {\n\tvar paramBytes = paramBytesForAlg[alg];\n\tif (paramBytes) {\n\t\treturn paramBytes;\n\t}\n\n\tthrow new Error('Unknown algorithm \"' + alg + '\"');\n}\n\nmodule.exports = getParamBytesForAlg;\n","var once = require('once');\n\nvar noop = function() {};\n\nvar isRequest = function(stream) {\n\treturn stream.setHeader && typeof stream.abort === 'function';\n};\n\nvar isChildProcess = function(stream) {\n\treturn stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3\n};\n\nvar eos = function(stream, opts, callback) {\n\tif (typeof opts === 'function') return eos(stream, null, opts);\n\tif (!opts) opts = {};\n\n\tcallback = once(callback || noop);\n\n\tvar ws = stream._writableState;\n\tvar rs = stream._readableState;\n\tvar readable = opts.readable || (opts.readable !== false && stream.readable);\n\tvar writable = opts.writable || (opts.writable !== false && stream.writable);\n\tvar cancelled = false;\n\n\tvar onlegacyfinish = function() {\n\t\tif (!stream.writable) onfinish();\n\t};\n\n\tvar onfinish = function() {\n\t\twritable = false;\n\t\tif (!readable) callback.call(stream);\n\t};\n\n\tvar onend = function() {\n\t\treadable = false;\n\t\tif (!writable) callback.call(stream);\n\t};\n\n\tvar onexit = function(exitCode) {\n\t\tcallback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);\n\t};\n\n\tvar onerror = function(err) {\n\t\tcallback.call(stream, err);\n\t};\n\n\tvar onclose = function() {\n\t\tprocess.nextTick(onclosenexttick);\n\t};\n\n\tvar onclosenexttick = function() {\n\t\tif (cancelled) return;\n\t\tif (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));\n\t\tif (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));\n\t};\n\n\tvar onrequest = function() {\n\t\tstream.req.on('finish', onfinish);\n\t};\n\n\tif (isRequest(stream)) {\n\t\tstream.on('complete', onfinish);\n\t\tstream.on('abort', onclose);\n\t\tif (stream.req) onrequest();\n\t\telse stream.on('request', onrequest);\n\t} else if (writable && !ws) { // legacy streams\n\t\tstream.on('end', onlegacyfinish);\n\t\tstream.on('close', onlegacyfinish);\n\t}\n\n\tif (isChildProcess(stream)) stream.on('exit', onexit);\n\n\tstream.on('end', onend);\n\tstream.on('finish', onfinish);\n\tif (opts.error !== false) stream.on('error', onerror);\n\tstream.on('close', onclose);\n\n\treturn function() {\n\t\tcancelled = true;\n\t\tstream.removeListener('complete', onfinish);\n\t\tstream.removeListener('abort', onclose);\n\t\tstream.removeListener('request', onrequest);\n\t\tif (stream.req) stream.req.removeListener('finish', onfinish);\n\t\tstream.removeListener('end', onlegacyfinish);\n\t\tstream.removeListener('close', onlegacyfinish);\n\t\tstream.removeListener('finish', onfinish);\n\t\tstream.removeListener('exit', onexit);\n\t\tstream.removeListener('end', onend);\n\t\tstream.removeListener('error', onerror);\n\t\tstream.removeListener('close', onclose);\n\t};\n};\n\nmodule.exports = eos;\n","var punycode = require('punycode');\nvar entities = require('./entities.json');\n\nmodule.exports = decode;\n\nfunction decode (str) {\n if (typeof str !== 'string') {\n throw new TypeError('Expected a String');\n }\n\n return str.replace(/&(#?[^;\\W]+;?)/g, function (_, match) {\n var m;\n if (m = /^#(\\d+);?$/.exec(match)) {\n return punycode.ucs2.encode([ parseInt(m[1], 10) ]);\n } else if (m = /^#[Xx]([A-Fa-f0-9]+);?/.exec(match)) {\n return punycode.ucs2.encode([ parseInt(m[1], 16) ]);\n } else {\n // named entity\n var hasSemi = /;$/.test(match);\n var withoutSemi = hasSemi ? match.replace(/;$/, '') : match;\n var target = entities[withoutSemi] || (hasSemi && entities[match]);\n\n if (typeof target === 'number') {\n return punycode.ucs2.encode([ target ]);\n } else if (typeof target === 'string') {\n return target;\n } else {\n return '&' + match;\n }\n }\n });\n}\n","var punycode = require('punycode');\nvar revEntities = require('./reversed.json');\n\nmodule.exports = encode;\n\nfunction encode (str, opts) {\n if (typeof str !== 'string') {\n throw new TypeError('Expected a String');\n }\n if (!opts) opts = {};\n\n var numeric = true;\n if (opts.named) numeric = false;\n if (opts.numeric !== undefined) numeric = opts.numeric;\n\n var special = opts.special || {\n '\"': true, \"'\": true,\n '<': true, '>': true,\n '&': true\n };\n\n var codePoints = punycode.ucs2.decode(str);\n var chars = [];\n for (var i = 0; i < codePoints.length; i++) {\n var cc = codePoints[i];\n var c = punycode.ucs2.encode([ cc ]);\n var e = revEntities[cc];\n if (e && (cc >= 127 || special[c]) && !numeric) {\n chars.push('&' + (/;$/.test(e) ? e : e + ';'));\n }\n else if (cc < 32 || cc >= 127 || special[c]) {\n chars.push('&#' + cc + ';');\n }\n else {\n chars.push(c);\n }\n }\n return chars.join('');\n}\n","exports.encode = require('./encode');\nexports.decode = require('./decode');\n","/**\n * @author Toru Nagashima \n * @copyright 2015 Toru Nagashima. All rights reserved.\n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap();\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap();\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event);\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n );\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n );\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true;\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault();\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n });\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true });\n\n // Define accessors\n const keys = Object.keys(event);\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key));\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget;\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation();\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this);\n\n data.stopped = true;\n data.immediateStopped = true;\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation();\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this));\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true;\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this));\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n});\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype);\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event);\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value;\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event;\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto);\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event);\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n });\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key);\n const isFunc = typeof descriptor.value === \"function\";\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n );\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto);\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto);\n wrappers.set(proto, wrapper);\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nfunction wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event));\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nfunction isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nfunction setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase;\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nfunction setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget;\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nfunction setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener;\n}\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap();\n\n// Listener types\nconst CAPTURE = 1;\nconst BUBBLE = 2;\nconst ATTRIBUTE = 3;\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget);\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this);\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next;\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null; // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this);\n\n // Traverse to the tail while removing old value.\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n node = node.next;\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n };\n if (prev === null) {\n listeners.set(eventName, newNode);\n } else {\n prev.next = newNode;\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n );\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this);\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n });\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i]);\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map());\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length);\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i];\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this);\n const optionsIsObj = isObject(options);\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n };\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName);\n if (node === undefined) {\n listeners.set(eventName, newNode);\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null;\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node;\n node = node.next;\n }\n\n // Add it.\n prev.next = newNode;\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this);\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n return\n }\n\n prev = node;\n node = node.next;\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this);\n const eventName = event.type;\n let node = listeners.get(eventName);\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event);\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null;\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n );\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent);\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err);\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent);\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next;\n }\n setPassiveListener(wrappedEvent, null);\n setEventPhase(wrappedEvent, 0);\n setCurrentTarget(wrappedEvent, null);\n\n return !wrappedEvent.defaultPrevented\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n});\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype);\n}\n\nexports.defineEventAttribute = defineEventAttribute;\nexports.EventTarget = EventTarget;\nexports.default = EventTarget;\n\nmodule.exports = EventTarget\nmodule.exports.EventTarget = module.exports[\"default\"] = EventTarget\nmodule.exports.defineEventAttribute = defineEventAttribute\n//# sourceMappingURL=event-target-shim.js.map\n","'use strict';\n\nvar hasOwn = Object.prototype.hasOwnProperty;\nvar toStr = Object.prototype.toString;\nvar defineProperty = Object.defineProperty;\nvar gOPD = Object.getOwnPropertyDescriptor;\n\nvar isArray = function isArray(arr) {\n\tif (typeof Array.isArray === 'function') {\n\t\treturn Array.isArray(arr);\n\t}\n\n\treturn toStr.call(arr) === '[object Array]';\n};\n\nvar isPlainObject = function isPlainObject(obj) {\n\tif (!obj || toStr.call(obj) !== '[object Object]') {\n\t\treturn false;\n\t}\n\n\tvar hasOwnConstructor = hasOwn.call(obj, 'constructor');\n\tvar hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf');\n\t// Not own constructor property must be Object\n\tif (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) {\n\t\treturn false;\n\t}\n\n\t// Own properties are enumerated firstly, so to speed up,\n\t// if last one is own, then all properties are own.\n\tvar key;\n\tfor (key in obj) { /**/ }\n\n\treturn typeof key === 'undefined' || hasOwn.call(obj, key);\n};\n\n// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target\nvar setProperty = function setProperty(target, options) {\n\tif (defineProperty && options.name === '__proto__') {\n\t\tdefineProperty(target, options.name, {\n\t\t\tenumerable: true,\n\t\t\tconfigurable: true,\n\t\t\tvalue: options.newValue,\n\t\t\twritable: true\n\t\t});\n\t} else {\n\t\ttarget[options.name] = options.newValue;\n\t}\n};\n\n// Return undefined instead of __proto__ if '__proto__' is not an own property\nvar getProperty = function getProperty(obj, name) {\n\tif (name === '__proto__') {\n\t\tif (!hasOwn.call(obj, name)) {\n\t\t\treturn void 0;\n\t\t} else if (gOPD) {\n\t\t\t// In early versions of node, obj['__proto__'] is buggy when obj has\n\t\t\t// __proto__ as an own property. Object.getOwnPropertyDescriptor() works.\n\t\t\treturn gOPD(obj, name).value;\n\t\t}\n\t}\n\n\treturn obj[name];\n};\n\nmodule.exports = function extend() {\n\tvar options, name, src, copy, copyIsArray, clone;\n\tvar target = arguments[0];\n\tvar i = 1;\n\tvar length = arguments.length;\n\tvar deep = false;\n\n\t// Handle a deep copy situation\n\tif (typeof target === 'boolean') {\n\t\tdeep = target;\n\t\ttarget = arguments[1] || {};\n\t\t// skip the boolean and the target\n\t\ti = 2;\n\t}\n\tif (target == null || (typeof target !== 'object' && typeof target !== 'function')) {\n\t\ttarget = {};\n\t}\n\n\tfor (; i < length; ++i) {\n\t\toptions = arguments[i];\n\t\t// Only deal with non-null/undefined values\n\t\tif (options != null) {\n\t\t\t// Extend the base object\n\t\t\tfor (name in options) {\n\t\t\t\tsrc = getProperty(target, name);\n\t\t\t\tcopy = getProperty(options, name);\n\n\t\t\t\t// Prevent never-ending loop\n\t\t\t\tif (target !== copy) {\n\t\t\t\t\t// Recurse if we're merging plain objects or arrays\n\t\t\t\t\tif (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) {\n\t\t\t\t\t\tif (copyIsArray) {\n\t\t\t\t\t\t\tcopyIsArray = false;\n\t\t\t\t\t\t\tclone = src && isArray(src) ? src : [];\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tclone = src && isPlainObject(src) ? src : {};\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// Never move original objects, clone them\n\t\t\t\t\t\tsetProperty(target, { name: name, newValue: extend(deep, clone, copy) });\n\n\t\t\t\t\t// Don't bring in undefined values\n\t\t\t\t\t} else if (typeof copy !== 'undefined') {\n\t\t\t\t\t\tsetProperty(target, { name: name, newValue: copy });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Return the modified object\n\treturn target;\n};\n","(function(scope) {'use strict';\nfunction B(r,e){var f;return r instanceof Buffer?f=r:f=Buffer.from(r.buffer,r.byteOffset,r.byteLength),f.toString(e)}var w=function(r){return Buffer.from(r)};function h(r){for(var e=0,f=Math.min(256*256,r.length+1),n=new Uint16Array(f),i=[],o=0;;){var t=e=f-1){var s=n.subarray(0,o),m=s;if(i.push(String.fromCharCode.apply(null,m)),!t)return i.join(\"\");r=r.subarray(e),e=0,o=0}var a=r[e++];if((a&128)===0)n[o++]=a;else if((a&224)===192){var d=r[e++]&63;n[o++]=(a&31)<<6|d}else if((a&240)===224){var d=r[e++]&63,l=r[e++]&63;n[o++]=(a&31)<<12|d<<6|l}else if((a&248)===240){var d=r[e++]&63,l=r[e++]&63,R=r[e++]&63,c=(a&7)<<18|d<<12|l<<6|R;c>65535&&(c-=65536,n[o++]=c>>>10&1023|55296,c=56320|c&1023),n[o++]=c}}}function F(r){for(var e=0,f=r.length,n=0,i=Math.max(32,f+(f>>>1)+7),o=new Uint8Array(i>>>3<<3);e=55296&&t<=56319){if(e=55296&&t<=56319)continue}if(n+4>o.length){i+=8,i*=1+e/r.length*2,i=i>>>3<<3;var m=new Uint8Array(i);m.set(o),o=m}if((t&4294967168)===0){o[n++]=t;continue}else if((t&4294965248)===0)o[n++]=t>>>6&31|192;else if((t&4294901760)===0)o[n++]=t>>>12&15|224,o[n++]=t>>>6&63|128;else if((t&4292870144)===0)o[n++]=t>>>18&7|240,o[n++]=t>>>12&63|128,o[n++]=t>>>6&63|128;else continue;o[n++]=t&63|128}return o.slice?o.slice(0,n):o.subarray(0,n)}var u=\"Failed to \",p=function(r,e,f){if(r)throw new Error(\"\".concat(u).concat(e,\": the '\").concat(f,\"' option is unsupported.\"))};var x=typeof Buffer==\"function\"&&Buffer.from;var A=x?w:F;function v(){this.encoding=\"utf-8\"}v.prototype.encode=function(r,e){return p(e&&e.stream,\"encode\",\"stream\"),A(r)};function U(r){var e;try{var f=new Blob([r],{type:\"text/plain;charset=UTF-8\"});e=URL.createObjectURL(f);var n=new XMLHttpRequest;return n.open(\"GET\",e,!1),n.send(),n.responseText}finally{e&&URL.revokeObjectURL(e)}}var O=!x&&typeof Blob==\"function\"&&typeof URL==\"function\"&&typeof URL.createObjectURL==\"function\",S=[\"utf-8\",\"utf8\",\"unicode-1-1-utf-8\"],T=h;x?T=B:O&&(T=function(r){try{return U(r)}catch(e){return h(r)}});var y=\"construct 'TextDecoder'\",E=\"\".concat(u,\" \").concat(y,\": the \");function g(r,e){p(e&&e.fatal,y,\"fatal\"),r=r||\"utf-8\";var f;if(x?f=Buffer.isEncoding(r):f=S.indexOf(r.toLowerCase())!==-1,!f)throw new RangeError(\"\".concat(E,\" encoding label provided ('\").concat(r,\"') is invalid.\"));this.encoding=r,this.fatal=!1,this.ignoreBOM=!1}g.prototype.decode=function(r,e){p(e&&e.stream,\"decode\",\"stream\");var f;return r instanceof Uint8Array?f=r:r.buffer instanceof ArrayBuffer?f=new Uint8Array(r.buffer):f=new Uint8Array(r),T(f,this.encoding)};scope.TextEncoder=scope.TextEncoder||v;scope.TextDecoder=scope.TextDecoder||g;\n}(typeof window !== 'undefined' ? window : (typeof global !== 'undefined' ? global : this)));\n","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GaxiosError = void 0;\n/* eslint-disable @typescript-eslint/no-explicit-any */\nclass GaxiosError extends Error {\n constructor(message, options, response) {\n super(message);\n this.response = response;\n this.config = options;\n this.code = response.status.toString();\n }\n}\nexports.GaxiosError = GaxiosError;\n//# sourceMappingURL=common.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Gaxios = void 0;\nconst extend_1 = __importDefault(require(\"extend\"));\nconst https_1 = require(\"https\");\nconst node_fetch_1 = __importDefault(require(\"node-fetch\"));\nconst querystring_1 = __importDefault(require(\"querystring\"));\nconst is_stream_1 = __importDefault(require(\"is-stream\"));\nconst url_1 = require(\"url\");\nconst common_1 = require(\"./common\");\nconst retry_1 = require(\"./retry\");\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fetch = hasFetch() ? window.fetch : node_fetch_1.default;\nfunction hasWindow() {\n return typeof window !== 'undefined' && !!window;\n}\nfunction hasFetch() {\n return hasWindow() && !!window.fetch;\n}\nfunction hasBuffer() {\n return typeof Buffer !== 'undefined';\n}\nfunction hasHeader(options, header) {\n return !!getHeader(options, header);\n}\nfunction getHeader(options, header) {\n header = header.toLowerCase();\n for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) {\n if (header === key.toLowerCase()) {\n return options.headers[key];\n }\n }\n return undefined;\n}\nlet HttpsProxyAgent;\nfunction loadProxy() {\n var _a, _b, _c, _d;\n const proxy = ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.HTTPS_PROXY) ||\n ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.https_proxy) ||\n ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.HTTP_PROXY) ||\n ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.http_proxy);\n if (proxy) {\n HttpsProxyAgent = require('https-proxy-agent');\n }\n return proxy;\n}\nloadProxy();\nfunction skipProxy(url) {\n var _a;\n const noProxyEnv = (_a = process.env.NO_PROXY) !== null && _a !== void 0 ? _a : process.env.no_proxy;\n if (!noProxyEnv) {\n return false;\n }\n const noProxyUrls = noProxyEnv.split(',');\n const parsedURL = new url_1.URL(url);\n return !!noProxyUrls.find(url => {\n if (url.startsWith('*.') || url.startsWith('.')) {\n url = url.replace(/^\\*\\./, '.');\n return parsedURL.hostname.endsWith(url);\n }\n else {\n return url === parsedURL.origin || url === parsedURL.hostname;\n }\n });\n}\n// Figure out if we should be using a proxy. Only if it's required, load\n// the https-proxy-agent module as it adds startup cost.\nfunction getProxy(url) {\n // If there is a match between the no_proxy env variables and the url, then do not proxy\n if (skipProxy(url)) {\n return undefined;\n // If there is not a match between the no_proxy env variables and the url, check to see if there should be a proxy\n }\n else {\n return loadProxy();\n }\n}\nclass Gaxios {\n /**\n * The Gaxios class is responsible for making HTTP requests.\n * @param defaults The default set of options to be used for this instance.\n */\n constructor(defaults) {\n this.agentCache = new Map();\n this.defaults = defaults || {};\n }\n /**\n * Perform an HTTP request with the given options.\n * @param opts Set of HTTP options that will be used for this HTTP request.\n */\n async request(opts = {}) {\n opts = this.validateOpts(opts);\n return this._request(opts);\n }\n async _defaultAdapter(opts) {\n const fetchImpl = opts.fetchImplementation || fetch;\n const res = (await fetchImpl(opts.url, opts));\n const data = await this.getResponseData(opts, res);\n return this.translateResponse(opts, res, data);\n }\n /**\n * Internal, retryable version of the `request` method.\n * @param opts Set of HTTP options that will be used for this HTTP request.\n */\n async _request(opts = {}) {\n try {\n let translatedResponse;\n if (opts.adapter) {\n translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this));\n }\n else {\n translatedResponse = await this._defaultAdapter(opts);\n }\n if (!opts.validateStatus(translatedResponse.status)) {\n throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse);\n }\n return translatedResponse;\n }\n catch (e) {\n const err = e;\n err.config = opts;\n const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err);\n if (shouldRetry && config) {\n err.config.retryConfig.currentRetryAttempt =\n config.retryConfig.currentRetryAttempt;\n return this._request(err.config);\n }\n throw err;\n }\n }\n async getResponseData(opts, res) {\n switch (opts.responseType) {\n case 'stream':\n return res.body;\n case 'json': {\n let data = await res.text();\n try {\n data = JSON.parse(data);\n }\n catch (_a) {\n // continue\n }\n return data;\n }\n case 'arraybuffer':\n return res.arrayBuffer();\n case 'blob':\n return res.blob();\n default:\n return res.text();\n }\n }\n /**\n * Validates the options, and merges them with defaults.\n * @param opts The original options passed from the client.\n */\n validateOpts(options) {\n const opts = (0, extend_1.default)(true, {}, this.defaults, options);\n if (!opts.url) {\n throw new Error('URL is required.');\n }\n // baseUrl has been deprecated, remove in 2.0\n const baseUrl = opts.baseUrl || opts.baseURL;\n if (baseUrl) {\n opts.url = baseUrl + opts.url;\n }\n opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer;\n if (opts.params && Object.keys(opts.params).length > 0) {\n let additionalQueryParams = opts.paramsSerializer(opts.params);\n if (additionalQueryParams.startsWith('?')) {\n additionalQueryParams = additionalQueryParams.slice(1);\n }\n const prefix = opts.url.includes('?') ? '&' : '?';\n opts.url = opts.url + prefix + additionalQueryParams;\n }\n if (typeof options.maxContentLength === 'number') {\n opts.size = options.maxContentLength;\n }\n if (typeof options.maxRedirects === 'number') {\n opts.follow = options.maxRedirects;\n }\n opts.headers = opts.headers || {};\n if (opts.data) {\n const isFormData = typeof FormData === 'undefined'\n ? false\n : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData;\n if (is_stream_1.default.readable(opts.data)) {\n opts.body = opts.data;\n }\n else if (hasBuffer() && Buffer.isBuffer(opts.data)) {\n // Do not attempt to JSON.stringify() a Buffer:\n opts.body = opts.data;\n if (!hasHeader(opts, 'Content-Type')) {\n opts.headers['Content-Type'] = 'application/json';\n }\n }\n else if (typeof opts.data === 'object') {\n // If www-form-urlencoded content type has been set, but data is\n // provided as an object, serialize the content using querystring:\n if (!isFormData) {\n if (getHeader(opts, 'content-type') ===\n 'application/x-www-form-urlencoded') {\n opts.body = opts.paramsSerializer(opts.data);\n }\n else {\n // } else if (!(opts.data instanceof FormData)) {\n if (!hasHeader(opts, 'Content-Type')) {\n opts.headers['Content-Type'] = 'application/json';\n }\n opts.body = JSON.stringify(opts.data);\n }\n }\n }\n else {\n opts.body = opts.data;\n }\n }\n opts.validateStatus = opts.validateStatus || this.validateStatus;\n opts.responseType = opts.responseType || 'json';\n if (!opts.headers['Accept'] && opts.responseType === 'json') {\n opts.headers['Accept'] = 'application/json';\n }\n opts.method = opts.method || 'GET';\n const proxy = getProxy(opts.url);\n if (proxy) {\n if (this.agentCache.has(proxy)) {\n opts.agent = this.agentCache.get(proxy);\n }\n else {\n // Proxy is being used in conjunction with mTLS.\n if (opts.cert && opts.key) {\n const parsedURL = new url_1.URL(proxy);\n opts.agent = new HttpsProxyAgent({\n port: parsedURL.port,\n host: parsedURL.host,\n protocol: parsedURL.protocol,\n cert: opts.cert,\n key: opts.key,\n });\n }\n else {\n opts.agent = new HttpsProxyAgent(proxy);\n }\n this.agentCache.set(proxy, opts.agent);\n }\n }\n else if (opts.cert && opts.key) {\n // Configure client for mTLS:\n if (this.agentCache.has(opts.key)) {\n opts.agent = this.agentCache.get(opts.key);\n }\n else {\n opts.agent = new https_1.Agent({\n cert: opts.cert,\n key: opts.key,\n });\n this.agentCache.set(opts.key, opts.agent);\n }\n }\n return opts;\n }\n /**\n * By default, throw for any non-2xx status code\n * @param status status code from the HTTP response\n */\n validateStatus(status) {\n return status >= 200 && status < 300;\n }\n /**\n * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo)\n * @param params key value pars to encode\n */\n paramsSerializer(params) {\n return querystring_1.default.stringify(params);\n }\n translateResponse(opts, res, data) {\n // headers need to be converted from a map to an obj\n const headers = {};\n res.headers.forEach((value, key) => {\n headers[key] = value;\n });\n return {\n config: opts,\n data: data,\n headers,\n status: res.status,\n statusText: res.statusText,\n // XMLHttpRequestLike\n request: {\n responseURL: res.url,\n },\n };\n }\n}\nexports.Gaxios = Gaxios;\n//# sourceMappingURL=gaxios.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0;\nconst gaxios_1 = require(\"./gaxios\");\nObject.defineProperty(exports, \"Gaxios\", { enumerable: true, get: function () { return gaxios_1.Gaxios; } });\nvar common_1 = require(\"./common\");\nObject.defineProperty(exports, \"GaxiosError\", { enumerable: true, get: function () { return common_1.GaxiosError; } });\n/**\n * The default instance used when the `request` method is directly\n * invoked.\n */\nexports.instance = new gaxios_1.Gaxios();\n/**\n * Make an HTTP request using the given options.\n * @param opts Options for the request\n */\nasync function request(opts) {\n return exports.instance.request(opts);\n}\nexports.request = request;\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getRetryConfig = void 0;\nasync function getRetryConfig(err) {\n var _a;\n let config = getConfig(err);\n if (!err || !err.config || (!config && !err.config.retry)) {\n return { shouldRetry: false };\n }\n config = config || {};\n config.currentRetryAttempt = config.currentRetryAttempt || 0;\n config.retry =\n config.retry === undefined || config.retry === null ? 3 : config.retry;\n config.httpMethodsToRetry = config.httpMethodsToRetry || [\n 'GET',\n 'HEAD',\n 'PUT',\n 'OPTIONS',\n 'DELETE',\n ];\n config.noResponseRetries =\n config.noResponseRetries === undefined || config.noResponseRetries === null\n ? 2\n : config.noResponseRetries;\n // If this wasn't in the list of status codes where we want\n // to automatically retry, return.\n const retryRanges = [\n // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes\n // 1xx - Retry (Informational, request still processing)\n // 2xx - Do not retry (Success)\n // 3xx - Do not retry (Redirect)\n // 4xx - Do not retry (Client errors)\n // 429 - Retry (\"Too Many Requests\")\n // 5xx - Retry (Server errors)\n [100, 199],\n [429, 429],\n [500, 599],\n ];\n config.statusCodesToRetry = config.statusCodesToRetry || retryRanges;\n // Put the config back into the err\n err.config.retryConfig = config;\n // Determine if we should retry the request\n const shouldRetryFn = config.shouldRetry || shouldRetryRequest;\n if (!(await shouldRetryFn(err))) {\n return { shouldRetry: false, config: err.config };\n }\n // Calculate time to wait with exponential backoff.\n // If this is the first retry, look for a configured retryDelay.\n const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100;\n // Formula: retryDelay + ((2^c - 1 / 2) * 1000)\n const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000;\n // We're going to retry! Incremenent the counter.\n err.config.retryConfig.currentRetryAttempt += 1;\n // Create a promise that invokes the retry after the backOffDelay\n const backoff = config.retryBackoff\n ? config.retryBackoff(err, delay)\n : new Promise(resolve => {\n setTimeout(resolve, delay);\n });\n // Notify the user if they added an `onRetryAttempt` handler\n if (config.onRetryAttempt) {\n config.onRetryAttempt(err);\n }\n // Return the promise in which recalls Gaxios to retry the request\n await backoff;\n return { shouldRetry: true, config: err.config };\n}\nexports.getRetryConfig = getRetryConfig;\n/**\n * Determine based on config if we should retry the request.\n * @param err The GaxiosError passed to the interceptor.\n */\nfunction shouldRetryRequest(err) {\n const config = getConfig(err);\n // node-fetch raises an AbortError if signaled:\n // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal\n if (err.name === 'AbortError') {\n return false;\n }\n // If there's no config, or retries are disabled, return.\n if (!config || config.retry === 0) {\n return false;\n }\n // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc)\n if (!err.response &&\n (config.currentRetryAttempt || 0) >= config.noResponseRetries) {\n return false;\n }\n // Only retry with configured HttpMethods.\n if (!err.config.method ||\n config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) {\n return false;\n }\n // If this wasn't in the list of status codes where we want\n // to automatically retry, return.\n if (err.response && err.response.status) {\n let isInRange = false;\n for (const [min, max] of config.statusCodesToRetry) {\n const status = err.response.status;\n if (status >= min && status <= max) {\n isInRange = true;\n break;\n }\n }\n if (!isInRange) {\n return false;\n }\n }\n // If we are out of retry attempts, return\n config.currentRetryAttempt = config.currentRetryAttempt || 0;\n if (config.currentRetryAttempt >= config.retry) {\n return false;\n }\n return true;\n}\n/**\n * Acquire the raxConfig object from an GaxiosError if available.\n * @param err The Gaxios error with a config object.\n */\nfunction getConfig(err) {\n if (err && err.config && err.config.retryConfig) {\n return err.config.retryConfig;\n }\n return;\n}\n//# sourceMappingURL=retry.js.map","\"use strict\";\n/**\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\n/**\n * Known paths unique to Google Compute Engine Linux instances\n */\nexports.GCE_LINUX_BIOS_PATHS = {\n BIOS_DATE: '/sys/class/dmi/id/bios_date',\n BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor',\n};\nconst GCE_MAC_ADDRESS_REGEX = /^42:01/;\n/**\n * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance).\n *\n * Uses the:\n * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}.\n *\n * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise.\n */\nfunction isGoogleCloudServerless() {\n /**\n * `CLOUD_RUN_JOB` is used for Cloud Run Jobs\n * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n *\n * `FUNCTION_NAME` is used in older Cloud Functions environments:\n * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}.\n *\n * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments:\n * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}.\n */\n const isGFEnvironment = process.env.CLOUD_RUN_JOB ||\n process.env.FUNCTION_NAME ||\n process.env.K_SERVICE;\n return !!isGFEnvironment;\n}\nexports.isGoogleCloudServerless = isGoogleCloudServerless;\n/**\n * Determines if the process is running on a Linux Google Compute Engine instance.\n *\n * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise.\n */\nfunction isGoogleComputeEngineLinux() {\n if ((0, os_1.platform)() !== 'linux')\n return false;\n try {\n // ensure this file exist\n (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE);\n // ensure this file exist and matches\n const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8');\n return /Google/.test(biosVendor);\n }\n catch (_a) {\n return false;\n }\n}\nexports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux;\n/**\n * Determines if the process is running on a Google Compute Engine instance with a known\n * MAC address.\n *\n * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise.\n */\nfunction isGoogleComputeEngineMACAddress() {\n const interfaces = (0, os_1.networkInterfaces)();\n for (const item of Object.values(interfaces)) {\n if (!item)\n continue;\n for (const { mac } of item) {\n if (GCE_MAC_ADDRESS_REGEX.test(mac)) {\n return true;\n }\n }\n }\n return false;\n}\nexports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress;\n/**\n * Determines if the process is running on a Google Compute Engine instance.\n *\n * @returns {boolean} `true` if the process is running on GCE, `false` otherwise.\n */\nfunction isGoogleComputeEngine() {\n return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress();\n}\nexports.isGoogleComputeEngine = isGoogleComputeEngine;\n/**\n * Determines if the process is running on Google Cloud Platform.\n *\n * @returns {boolean} `true` if the process is running on GCP, `false` otherwise.\n */\nfunction detectGCPResidency() {\n return isGoogleCloudServerless() || isGoogleComputeEngine();\n}\nexports.detectGCPResidency = detectGCPResidency;\n//# sourceMappingURL=gcp-residency.js.map","\"use strict\";\n/**\n * Copyright 2018 Google LLC\n *\n * Distributed under MIT license.\n * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.requestTimeout = exports.setGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.project = exports.instance = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst jsonBigint = require(\"json-bigint\");\nconst gcp_residency_1 = require(\"./gcp-residency\");\nexports.BASE_PATH = '/computeMetadata/v1';\nexports.HOST_ADDRESS = 'http://169.254.169.254';\nexports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.';\nexports.HEADER_NAME = 'Metadata-Flavor';\nexports.HEADER_VALUE = 'Google';\nexports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE });\n/**\n * Returns the base URL while taking into account the GCE_METADATA_HOST\n * environment variable if it exists.\n *\n * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1.\n */\nfunction getBaseUrl(baseUrl) {\n if (!baseUrl) {\n baseUrl =\n process.env.GCE_METADATA_IP ||\n process.env.GCE_METADATA_HOST ||\n exports.HOST_ADDRESS;\n }\n // If no scheme is provided default to HTTP:\n if (!/^https?:\\/\\//.test(baseUrl)) {\n baseUrl = `http://${baseUrl}`;\n }\n return new URL(exports.BASE_PATH, baseUrl).href;\n}\n// Accepts an options object passed from the user to the API. In previous\n// versions of the API, it referred to a `Request` or an `Axios` request\n// options object. Now it refers to an object with very limited property\n// names. This is here to help ensure users don't pass invalid options when\n// they upgrade from 0.4 to 0.5 to 0.8.\nfunction validate(options) {\n Object.keys(options).forEach(key => {\n switch (key) {\n case 'params':\n case 'property':\n case 'headers':\n break;\n case 'qs':\n throw new Error(\"'qs' is not a valid configuration option. Please use 'params' instead.\");\n default:\n throw new Error(`'${key}' is not a valid configuration option.`);\n }\n });\n}\nasync function metadataAccessor(type, options, noResponseRetries = 3, fastFail = false) {\n options = options || {};\n if (typeof options === 'string') {\n options = { property: options };\n }\n let property = '';\n if (typeof options === 'object' && options.property) {\n property = '/' + options.property;\n }\n validate(options);\n try {\n const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request;\n const res = await requestMethod({\n url: `${getBaseUrl()}/${type}${property}`,\n headers: Object.assign({}, exports.HEADERS, options.headers),\n retryConfig: { noResponseRetries },\n params: options.params,\n responseType: 'text',\n timeout: requestTimeout(),\n });\n // NOTE: node.js converts all incoming headers to lower case.\n if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) {\n throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`);\n }\n else if (!res.data) {\n throw new Error('Invalid response from the metadata service');\n }\n if (typeof res.data === 'string') {\n try {\n return jsonBigint.parse(res.data);\n }\n catch (_a) {\n /* ignore */\n }\n }\n return res.data;\n }\n catch (e) {\n const err = e;\n if (err.response && err.response.status !== 200) {\n err.message = `Unsuccessful response status code. ${err.message}`;\n }\n throw e;\n }\n}\nasync function fastFailMetadataRequest(options) {\n const secondaryOptions = {\n ...options,\n url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)),\n };\n // We race a connection between DNS/IP to metadata server. There are a couple\n // reasons for this:\n //\n // 1. the DNS is slow in some GCP environments; by checking both, we might\n // detect the runtime environment signficantly faster.\n // 2. we can't just check the IP, which is tarpitted and slow to respond\n // on a user's local machine.\n //\n // Additional logic has been added to make sure that we don't create an\n // unhandled rejection in scenarios where a failure happens sometime\n // after a success.\n //\n // Note, however, if a failure happens prior to a success, a rejection should\n // occur, this is for folks running locally.\n //\n let responded = false;\n const r1 = (0, gaxios_1.request)(options)\n .then(res => {\n responded = true;\n return res;\n })\n .catch(err => {\n if (responded) {\n return r2;\n }\n else {\n responded = true;\n throw err;\n }\n });\n const r2 = (0, gaxios_1.request)(secondaryOptions)\n .then(res => {\n responded = true;\n return res;\n })\n .catch(err => {\n if (responded) {\n return r1;\n }\n else {\n responded = true;\n throw err;\n }\n });\n return Promise.race([r1, r2]);\n}\n/**\n * Obtain metadata for the current GCE instance\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction instance(options) {\n return metadataAccessor('instance', options);\n}\nexports.instance = instance;\n/**\n * Obtain metadata for the current GCP Project.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction project(options) {\n return metadataAccessor('project', options);\n}\nexports.project = project;\n/*\n * How many times should we retry detecting GCP environment.\n */\nfunction detectGCPAvailableRetries() {\n return process.env.DETECT_GCP_RETRIES\n ? Number(process.env.DETECT_GCP_RETRIES)\n : 0;\n}\nlet cachedIsAvailableResponse;\n/**\n * Determine if the metadata server is currently available.\n */\nasync function isAvailable() {\n try {\n // If a user is instantiating several GCP libraries at the same time,\n // this may result in multiple calls to isAvailable(), to detect the\n // runtime environment. We use the same promise for each of these calls\n // to reduce the network load.\n if (cachedIsAvailableResponse === undefined) {\n cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), \n // If the default HOST_ADDRESS has been overridden, we should not\n // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in\n // a non-GCP environment):\n !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST));\n }\n await cachedIsAvailableResponse;\n return true;\n }\n catch (e) {\n const err = e;\n if (process.env.DEBUG_AUTH) {\n console.info(err);\n }\n if (err.type === 'request-timeout') {\n // If running in a GCP environment, metadata endpoint should return\n // within ms.\n return false;\n }\n if (err.response && err.response.status === 404) {\n return false;\n }\n else {\n if (!(err.response && err.response.status === 404) &&\n // A warning is emitted if we see an unexpected err.code, or err.code\n // is not populated:\n (!err.code ||\n ![\n 'EHOSTDOWN',\n 'EHOSTUNREACH',\n 'ENETUNREACH',\n 'ENOENT',\n 'ENOTFOUND',\n 'ECONNREFUSED',\n ].includes(err.code))) {\n let code = 'UNKNOWN';\n if (err.code)\n code = err.code;\n process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning');\n }\n // Failure to resolve the metadata service means that it is not available.\n return false;\n }\n }\n}\nexports.isAvailable = isAvailable;\n/**\n * reset the memoized isAvailable() lookup.\n */\nfunction resetIsAvailableCache() {\n cachedIsAvailableResponse = undefined;\n}\nexports.resetIsAvailableCache = resetIsAvailableCache;\n/**\n * A cache for the detected GCP Residency.\n */\nexports.gcpResidencyCache = null;\n/**\n * Sets the detected GCP Residency.\n * Useful for forcing metadata server detection behavior.\n *\n * Set `null` to autodetect the environment (default behavior).\n */\nfunction setGCPResidency(value = null) {\n exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)();\n}\nexports.setGCPResidency = setGCPResidency;\n/**\n * Obtain the timeout for requests to the metadata server.\n *\n * In certain environments and conditions requests can take longer than\n * the default timeout to complete. This function will determine the\n * appropriate timeout based on the environment.\n *\n * @returns {number} a request timeout duration in milliseconds.\n */\nfunction requestTimeout() {\n // Detecting the residency can be resource-intensive. Let's cache the result.\n if (exports.gcpResidencyCache === null) {\n exports.gcpResidencyCache = (0, gcp_residency_1.detectGCPResidency)();\n }\n return exports.gcpResidencyCache ? 0 : 3000;\n}\nexports.requestTimeout = requestTimeout;\n__exportStar(require(\"./gcp-residency\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2012 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AuthClient = void 0;\nconst events_1 = require(\"events\");\nconst transporters_1 = require(\"../transporters\");\nclass AuthClient extends events_1.EventEmitter {\n constructor() {\n super(...arguments);\n this.transporter = new transporters_1.DefaultTransporter();\n this.credentials = {};\n this.eagerRefreshThresholdMillis = 5 * 60 * 1000;\n this.forceRefreshOnFailure = false;\n }\n /**\n * Sets the auth credentials.\n */\n setCredentials(credentials) {\n this.credentials = credentials;\n }\n /**\n * Append additional headers, e.g., x-goog-user-project, shared across the\n * classes inheriting AuthClient. This method should be used by any method\n * that overrides getRequestMetadataAsync(), which is a shared helper for\n * setting request information in both gRPC and HTTP API calls.\n *\n * @param headers object to append additional headers to.\n */\n addSharedMetadataHeaders(headers) {\n // quota_project_id, stored in application_default_credentials.json, is set in\n // the x-goog-user-project header, to indicate an alternate account for\n // billing and quota:\n if (!headers['x-goog-user-project'] && // don't override a value the user sets.\n this.quotaProjectId) {\n headers['x-goog-user-project'] = this.quotaProjectId;\n }\n return headers;\n }\n}\nexports.AuthClient = AuthClient;\n//# sourceMappingURL=authclient.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AwsClient = void 0;\nconst awsrequestsigner_1 = require(\"./awsrequestsigner\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\n/**\n * AWS external account client. This is used for AWS workloads, where\n * AWS STS GetCallerIdentity serialized signed requests are exchanged for\n * GCP access token.\n */\nclass AwsClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiates an AwsClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid AWS credential.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super(options, additionalOptions);\n this.environmentId = options.credential_source.environment_id;\n // This is only required if the AWS region is not available in the\n // AWS_REGION or AWS_DEFAULT_REGION environment variables.\n this.regionUrl = options.credential_source.region_url;\n // This is only required if AWS security credentials are not available in\n // environment variables.\n this.securityCredentialsUrl = options.credential_source.url;\n this.regionalCredVerificationUrl =\n options.credential_source.regional_cred_verification_url;\n this.imdsV2SessionTokenUrl =\n options.credential_source.imdsv2_session_token_url;\n this.awsRequestSigner = null;\n this.region = '';\n // data validators\n this.validateEnvironmentId();\n this.validateMetadataServerURLs();\n }\n validateEnvironmentId() {\n var _a;\n const match = (_a = this.environmentId) === null || _a === void 0 ? void 0 : _a.match(/^(aws)(\\d+)$/);\n if (!match || !this.regionalCredVerificationUrl) {\n throw new Error('No valid AWS \"credential_source\" provided');\n }\n else if (parseInt(match[2], 10) !== 1) {\n throw new Error(`aws version \"${match[2]}\" is not supported in the current build.`);\n }\n }\n validateMetadataServerURLs() {\n this.validateMetadataURL(this.regionUrl, 'region_url');\n this.validateMetadataURL(this.securityCredentialsUrl, 'url');\n this.validateMetadataURL(this.imdsV2SessionTokenUrl, 'imdsv2_session_token_url');\n }\n validateMetadataURL(value, prop) {\n if (!value)\n return;\n const url = new URL(value);\n if (url.hostname !== AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS &&\n url.hostname !== `[${AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS}]`) {\n throw new RangeError(`Invalid host \"${url.hostname}\" for \"${prop}\". Expecting ${AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS} or ${AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS}.`);\n }\n }\n /**\n * Triggered when an external subject token is needed to be exchanged for a\n * GCP access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this uses a serialized AWS signed request to the STS GetCallerIdentity\n * endpoint.\n * The logic is summarized as:\n * 1. If imdsv2_session_token_url is provided in the credential source, then\n * fetch the aws session token and include it in the headers of the\n * metadata requests. This is a requirement for IDMSv2 but optional\n * for IDMSv1.\n * 2. Retrieve AWS region from availability-zone.\n * 3a. Check AWS credentials in environment variables. If not found, get\n * from security-credentials endpoint.\n * 3b. Get AWS credentials from security-credentials endpoint. In order\n * to retrieve this, the AWS role needs to be determined by calling\n * security-credentials endpoint without any argument. Then the\n * credentials can be retrieved via: security-credentials/role_name\n * 4. Generate the signed request to AWS STS GetCallerIdentity action.\n * 5. Inject x-goog-cloud-target-resource into header and serialize the\n * signed request. This will be the subject-token to pass to GCP STS.\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n // Initialize AWS request signer if not already initialized.\n if (!this.awsRequestSigner) {\n const metadataHeaders = {};\n if (this.imdsV2SessionTokenUrl) {\n metadataHeaders['x-aws-ec2-metadata-token'] =\n await this.getImdsV2SessionToken();\n }\n this.region = await this.getAwsRegion(metadataHeaders);\n this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => {\n // Check environment variables for permanent credentials first.\n // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html\n if (process.env['AWS_ACCESS_KEY_ID'] &&\n process.env['AWS_SECRET_ACCESS_KEY']) {\n return {\n accessKeyId: process.env['AWS_ACCESS_KEY_ID'],\n secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'],\n // This is normally not available for permanent credentials.\n token: process.env['AWS_SESSION_TOKEN'],\n };\n }\n // Since the role on a VM can change, we don't need to cache it.\n const roleName = await this.getAwsRoleName(metadataHeaders);\n // Temporary credentials typically last for several hours.\n // Expiration is returned in response.\n // Consider future optimization of this logic to cache AWS tokens\n // until their natural expiration.\n const awsCreds = await this.getAwsSecurityCredentials(roleName, metadataHeaders);\n return {\n accessKeyId: awsCreds.AccessKeyId,\n secretAccessKey: awsCreds.SecretAccessKey,\n token: awsCreds.Token,\n };\n }, this.region);\n }\n // Generate signed request to AWS STS GetCallerIdentity API.\n // Use the required regional endpoint. Otherwise, the request will fail.\n const options = await this.awsRequestSigner.getRequestOptions({\n url: this.regionalCredVerificationUrl.replace('{region}', this.region),\n method: 'POST',\n });\n // The GCP STS endpoint expects the headers to be formatted as:\n // [\n // {key: 'x-amz-date', value: '...'},\n // {key: 'Authorization', value: '...'},\n // ...\n // ]\n // And then serialized as:\n // encodeURIComponent(JSON.stringify({\n // url: '...',\n // method: 'POST',\n // headers: [{key: 'x-amz-date', value: '...'}, ...]\n // }))\n const reformattedHeader = [];\n const extendedHeaders = Object.assign({\n // The full, canonical resource name of the workload identity pool\n // provider, with or without the HTTPS prefix.\n // Including this header as part of the signature is recommended to\n // ensure data integrity.\n 'x-goog-cloud-target-resource': this.audience,\n }, options.headers);\n // Reformat header to GCP STS expected format.\n for (const key in extendedHeaders) {\n reformattedHeader.push({\n key,\n value: extendedHeaders[key],\n });\n }\n // Serialize the reformatted signed request.\n return encodeURIComponent(JSON.stringify({\n url: options.url,\n method: options.method,\n headers: reformattedHeader,\n }));\n }\n /**\n * @return A promise that resolves with the IMDSv2 Session Token.\n */\n async getImdsV2SessionToken() {\n const opts = {\n url: this.imdsV2SessionTokenUrl,\n method: 'PUT',\n responseType: 'text',\n headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' },\n };\n const response = await this.transporter.request(opts);\n return response.data;\n }\n /**\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the current AWS region.\n */\n async getAwsRegion(headers) {\n // Priority order for region determination:\n // AWS_REGION > AWS_DEFAULT_REGION > metadata server.\n if (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION']) {\n return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION']);\n }\n if (!this.regionUrl) {\n throw new Error('Unable to determine AWS region due to missing ' +\n '\"options.credential_source.region_url\"');\n }\n const opts = {\n url: this.regionUrl,\n method: 'GET',\n responseType: 'text',\n headers: headers,\n };\n const response = await this.transporter.request(opts);\n // Remove last character. For example, if us-east-2b is returned,\n // the region would be us-east-2.\n return response.data.substr(0, response.data.length - 1);\n }\n /**\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the assigned role to the current\n * AWS VM. This is needed for calling the security-credentials endpoint.\n */\n async getAwsRoleName(headers) {\n if (!this.securityCredentialsUrl) {\n throw new Error('Unable to determine AWS role name due to missing ' +\n '\"options.credential_source.url\"');\n }\n const opts = {\n url: this.securityCredentialsUrl,\n method: 'GET',\n responseType: 'text',\n headers: headers,\n };\n const response = await this.transporter.request(opts);\n return response.data;\n }\n /**\n * Retrieves the temporary AWS credentials by calling the security-credentials\n * endpoint as specified in the `credential_source` object.\n * @param roleName The role attached to the current VM.\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the temporary AWS credentials\n * needed for creating the GetCallerIdentity signed request.\n */\n async getAwsSecurityCredentials(roleName, headers) {\n const response = await this.transporter.request({\n url: `${this.securityCredentialsUrl}/${roleName}`,\n responseType: 'json',\n headers: headers,\n });\n return response.data;\n }\n}\nexports.AwsClient = AwsClient;\nAwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254';\nAwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254';\n//# sourceMappingURL=awsclient.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AwsRequestSigner = void 0;\nconst crypto_1 = require(\"../crypto/crypto\");\n/** AWS Signature Version 4 signing algorithm identifier. */\nconst AWS_ALGORITHM = 'AWS4-HMAC-SHA256';\n/**\n * The termination string for the AWS credential scope value as defined in\n * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html\n */\nconst AWS_REQUEST_TYPE = 'aws4_request';\n/**\n * Implements an AWS API request signer based on the AWS Signature Version 4\n * signing process.\n * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html\n */\nclass AwsRequestSigner {\n /**\n * Instantiates an AWS API request signer used to send authenticated signed\n * requests to AWS APIs based on the AWS Signature Version 4 signing process.\n * This also provides a mechanism to generate the signed request without\n * sending it.\n * @param getCredentials A mechanism to retrieve AWS security credentials\n * when needed.\n * @param region The AWS region to use.\n */\n constructor(getCredentials, region) {\n this.getCredentials = getCredentials;\n this.region = region;\n this.crypto = (0, crypto_1.createCrypto)();\n }\n /**\n * Generates the signed request for the provided HTTP request for calling\n * an AWS API. This follows the steps described at:\n * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html\n * @param amzOptions The AWS request options that need to be signed.\n * @return A promise that resolves with the GaxiosOptions containing the\n * signed HTTP request parameters.\n */\n async getRequestOptions(amzOptions) {\n if (!amzOptions.url) {\n throw new Error('\"url\" is required in \"amzOptions\"');\n }\n // Stringify JSON requests. This will be set in the request body of the\n // generated signed request.\n const requestPayloadData = typeof amzOptions.data === 'object'\n ? JSON.stringify(amzOptions.data)\n : amzOptions.data;\n const url = amzOptions.url;\n const method = amzOptions.method || 'GET';\n const requestPayload = amzOptions.body || requestPayloadData;\n const additionalAmzHeaders = amzOptions.headers;\n const awsSecurityCredentials = await this.getCredentials();\n const uri = new URL(url);\n const headerMap = await generateAuthenticationHeaderMap({\n crypto: this.crypto,\n host: uri.host,\n canonicalUri: uri.pathname,\n canonicalQuerystring: uri.search.substr(1),\n method,\n region: this.region,\n securityCredentials: awsSecurityCredentials,\n requestPayload,\n additionalAmzHeaders,\n });\n // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc.\n const headers = Object.assign(\n // Add x-amz-date if available.\n headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, {\n Authorization: headerMap.authorizationHeader,\n host: uri.host,\n }, additionalAmzHeaders || {});\n if (awsSecurityCredentials.token) {\n Object.assign(headers, {\n 'x-amz-security-token': awsSecurityCredentials.token,\n });\n }\n const awsSignedReq = {\n url,\n method: method,\n headers,\n };\n if (typeof requestPayload !== 'undefined') {\n awsSignedReq.body = requestPayload;\n }\n return awsSignedReq;\n }\n}\nexports.AwsRequestSigner = AwsRequestSigner;\n/**\n * Creates the HMAC-SHA256 hash of the provided message using the\n * provided key.\n *\n * @param crypto The crypto instance used to facilitate cryptographic\n * operations.\n * @param key The HMAC-SHA256 key to use.\n * @param msg The message to hash.\n * @return The computed hash bytes.\n */\nasync function sign(crypto, key, msg) {\n return await crypto.signWithHmacSha256(key, msg);\n}\n/**\n * Calculates the signing key used to calculate the signature for\n * AWS Signature Version 4 based on:\n * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html\n *\n * @param crypto The crypto instance used to facilitate cryptographic\n * operations.\n * @param key The AWS secret access key.\n * @param dateStamp The '%Y%m%d' date format.\n * @param region The AWS region.\n * @param serviceName The AWS service name, eg. sts.\n * @return The signing key bytes.\n */\nasync function getSigningKey(crypto, key, dateStamp, region, serviceName) {\n const kDate = await sign(crypto, `AWS4${key}`, dateStamp);\n const kRegion = await sign(crypto, kDate, region);\n const kService = await sign(crypto, kRegion, serviceName);\n const kSigning = await sign(crypto, kService, 'aws4_request');\n return kSigning;\n}\n/**\n * Generates the authentication header map needed for generating the AWS\n * Signature Version 4 signed request.\n *\n * @param option The options needed to compute the authentication header map.\n * @return The AWS authentication header map which constitutes of the following\n * components: amz-date, authorization header and canonical query string.\n */\nasync function generateAuthenticationHeaderMap(options) {\n const additionalAmzHeaders = options.additionalAmzHeaders || {};\n const requestPayload = options.requestPayload || '';\n // iam.amazonaws.com host => iam service.\n // sts.us-east-2.amazonaws.com => sts service.\n const serviceName = options.host.split('.')[0];\n const now = new Date();\n // Format: '%Y%m%dT%H%M%SZ'.\n const amzDate = now\n .toISOString()\n .replace(/[-:]/g, '')\n .replace(/\\.[0-9]+/, '');\n // Format: '%Y%m%d'.\n const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, '');\n // Change all additional headers to be lower case.\n const reformattedAdditionalAmzHeaders = {};\n Object.keys(additionalAmzHeaders).forEach(key => {\n reformattedAdditionalAmzHeaders[key.toLowerCase()] =\n additionalAmzHeaders[key];\n });\n // Add AWS token if available.\n if (options.securityCredentials.token) {\n reformattedAdditionalAmzHeaders['x-amz-security-token'] =\n options.securityCredentials.token;\n }\n // Header keys need to be sorted alphabetically.\n const amzHeaders = Object.assign({\n host: options.host,\n }, \n // Previously the date was not fixed with x-amz- and could be provided manually.\n // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req\n reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders);\n let canonicalHeaders = '';\n const signedHeadersList = Object.keys(amzHeaders).sort();\n signedHeadersList.forEach(key => {\n canonicalHeaders += `${key}:${amzHeaders[key]}\\n`;\n });\n const signedHeaders = signedHeadersList.join(';');\n const payloadHash = await options.crypto.sha256DigestHex(requestPayload);\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html\n const canonicalRequest = `${options.method}\\n` +\n `${options.canonicalUri}\\n` +\n `${options.canonicalQuerystring}\\n` +\n `${canonicalHeaders}\\n` +\n `${signedHeaders}\\n` +\n `${payloadHash}`;\n const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`;\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html\n const stringToSign = `${AWS_ALGORITHM}\\n` +\n `${amzDate}\\n` +\n `${credentialScope}\\n` +\n (await options.crypto.sha256DigestHex(canonicalRequest));\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html\n const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName);\n const signature = await sign(options.crypto, signingKey, stringToSign);\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html\n const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` +\n `${credentialScope}, SignedHeaders=${signedHeaders}, ` +\n `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`;\n return {\n // Do not return x-amz-date if date is available.\n amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate,\n authorizationHeader,\n canonicalQuerystring: options.canonicalQuerystring,\n };\n}\n//# sourceMappingURL=awsrequestsigner.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.BaseExternalAccountClient = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0;\nconst stream = require(\"stream\");\nconst authclient_1 = require(\"./authclient\");\nconst sts = require(\"./stscredentials\");\n/**\n * The required token exchange grant_type: rfc8693#section-2.1\n */\nconst STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange';\n/**\n * The requested token exchange requested_token_type: rfc8693#section-2.1\n */\nconst STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/** The default OAuth scope to request when none is provided. */\nconst DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform';\n/** The google apis domain pattern. */\nconst GOOGLE_APIS_DOMAIN_PATTERN = '\\\\.googleapis\\\\.com$';\n/** The variable portion pattern in a Google APIs domain. */\nconst VARIABLE_PORTION_PATTERN = '[^\\\\.\\\\s\\\\/\\\\\\\\]+';\n/** Default impersonated token lifespan in seconds.*/\nconst DEFAULT_TOKEN_LIFESPAN = 3600;\n/**\n * Offset to take into account network delays and server clock skews.\n */\nexports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000;\n/**\n * The credentials JSON file type for external account clients.\n * There are 3 types of JSON configs:\n * 1. authorized_user => Google end user credential\n * 2. service_account => Google service account credential\n * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s)\n */\nexports.EXTERNAL_ACCOUNT_TYPE = 'external_account';\n/** Cloud resource manager URL used to retrieve project information. */\nexports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/';\n/** The workforce audience pattern. */\nconst WORKFORCE_AUDIENCE_PATTERN = '//iam.googleapis.com/locations/[^/]+/workforcePools/[^/]+/providers/.+';\n/**\n * Base external account client. This is used to instantiate AuthClients for\n * exchanging external account credentials for GCP access token and authorizing\n * requests to GCP APIs.\n * The base class implements common logic for exchanging various type of\n * external credentials for GCP access token. The logic of determining and\n * retrieving the external credential based on the environment and\n * credential_source will be left for the subclasses.\n */\nclass BaseExternalAccountClient extends authclient_1.AuthClient {\n /**\n * Instantiate a BaseExternalAccountClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n var _a, _b;\n super();\n if (options.type !== exports.EXTERNAL_ACCOUNT_TYPE) {\n throw new Error(`Expected \"${exports.EXTERNAL_ACCOUNT_TYPE}\" type but ` +\n `received \"${options.type}\"`);\n }\n this.clientAuth = options.client_id\n ? {\n confidentialClientType: 'basic',\n clientId: options.client_id,\n clientSecret: options.client_secret,\n }\n : undefined;\n if (!this.validateGoogleAPIsUrl('sts', options.token_url)) {\n throw new Error(`\"${options.token_url}\" is not a valid token url.`);\n }\n this.stsCredential = new sts.StsCredentials(options.token_url, this.clientAuth);\n // Default OAuth scope. This could be overridden via public property.\n this.scopes = [DEFAULT_OAUTH_SCOPE];\n this.cachedAccessToken = null;\n this.audience = options.audience;\n this.subjectTokenType = options.subject_token_type;\n this.quotaProjectId = options.quota_project_id;\n this.workforcePoolUserProject = options.workforce_pool_user_project;\n const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN);\n if (this.workforcePoolUserProject &&\n !this.audience.match(workforceAudiencePattern)) {\n throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' +\n 'credentials.');\n }\n if (typeof options.service_account_impersonation_url !== 'undefined' &&\n !this.validateGoogleAPIsUrl('iamcredentials', options.service_account_impersonation_url)) {\n throw new Error(`\"${options.service_account_impersonation_url}\" is ` +\n 'not a valid service account impersonation url.');\n }\n this.serviceAccountImpersonationUrl =\n options.service_account_impersonation_url;\n this.serviceAccountImpersonationLifetime =\n (_b = (_a = options.service_account_impersonation) === null || _a === void 0 ? void 0 : _a.token_lifetime_seconds) !== null && _b !== void 0 ? _b : DEFAULT_TOKEN_LIFESPAN;\n // As threshold could be zero,\n // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the\n // zero value.\n if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') {\n this.eagerRefreshThresholdMillis = exports.EXPIRATION_TIME_OFFSET;\n }\n else {\n this.eagerRefreshThresholdMillis = additionalOptions\n .eagerRefreshThresholdMillis;\n }\n this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure);\n this.projectId = null;\n this.projectNumber = this.getProjectNumber(this.audience);\n }\n /** The service account email to be impersonated, if available. */\n getServiceAccountEmail() {\n var _a;\n if (this.serviceAccountImpersonationUrl) {\n // Parse email from URL. The formal looks as follows:\n // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken\n const re = /serviceAccounts\\/(?[^:]+):generateAccessToken$/;\n const result = re.exec(this.serviceAccountImpersonationUrl);\n return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null;\n }\n return null;\n }\n /**\n * Provides a mechanism to inject GCP access tokens directly.\n * When the provided credential expires, a new credential, using the\n * external account options, is retrieved.\n * @param credentials The Credentials object to set on the current client.\n */\n setCredentials(credentials) {\n super.setCredentials(credentials);\n this.cachedAccessToken = credentials;\n }\n /**\n * @return A promise that resolves with the current GCP access token\n * response. If the current credential is expired, a new one is retrieved.\n */\n async getAccessToken() {\n // If cached access token is unavailable or expired, force refresh.\n if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) {\n await this.refreshAccessTokenAsync();\n }\n // Return GCP access token in GetAccessTokenResponse format.\n return {\n token: this.cachedAccessToken.access_token,\n res: this.cachedAccessToken.res,\n };\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * The result has the form:\n * { Authorization: 'Bearer ' }\n */\n async getRequestHeaders() {\n const accessTokenResponse = await this.getAccessToken();\n const headers = {\n Authorization: `Bearer ${accessTokenResponse.token}`,\n };\n return this.addSharedMetadataHeaders(headers);\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n /**\n * @return A promise that resolves with the project ID corresponding to the\n * current workload identity pool or current workforce pool if\n * determinable. For workforce pool credential, it returns the project ID\n * corresponding to the workforcePoolUserProject.\n * This is introduced to match the current pattern of using the Auth\n * library:\n * const projectId = await auth.getProjectId();\n * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`;\n * const res = await client.request({ url });\n * The resource may not have permission\n * (resourcemanager.projects.get) to call this API or the required\n * scopes may not be selected:\n * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes\n */\n async getProjectId() {\n const projectNumber = this.projectNumber || this.workforcePoolUserProject;\n if (this.projectId) {\n // Return previously determined project ID.\n return this.projectId;\n }\n else if (projectNumber) {\n // Preferable not to use request() to avoid retrial policies.\n const headers = await this.getRequestHeaders();\n const response = await this.transporter.request({\n headers,\n url: `${exports.CLOUD_RESOURCE_MANAGER}${projectNumber}`,\n responseType: 'json',\n });\n this.projectId = response.data.projectId;\n return this.projectId;\n }\n return null;\n }\n /**\n * Authenticates the provided HTTP request, processes it and resolves with the\n * returned response.\n * @param opts The HTTP request options.\n * @param retry Whether the current attempt is a retry after a failed attempt.\n * @return A promise that resolves with the successful response.\n */\n async requestAsync(opts, retry = false) {\n let response;\n try {\n const requestHeaders = await this.getRequestHeaders();\n opts.headers = opts.headers || {};\n if (requestHeaders && requestHeaders['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] =\n requestHeaders['x-goog-user-project'];\n }\n if (requestHeaders && requestHeaders.Authorization) {\n opts.headers.Authorization = requestHeaders.Authorization;\n }\n response = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - forceRefreshOnFailure is true\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n this.forceRefreshOnFailure) {\n await this.refreshAccessTokenAsync();\n return await this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return response;\n }\n /**\n * Forces token refresh, even if unexpired tokens are currently cached.\n * External credentials are exchanged for GCP access tokens via the token\n * exchange endpoint and other settings provided in the client options\n * object.\n * If the service_account_impersonation_url is provided, an additional\n * step to exchange the external account GCP access token for a service\n * account impersonated token is performed.\n * @return A promise that resolves with the fresh GCP access tokens.\n */\n async refreshAccessTokenAsync() {\n // Retrieve the external credential.\n const subjectToken = await this.retrieveSubjectToken();\n // Construct the STS credentials options.\n const stsCredentialsOptions = {\n grantType: STS_GRANT_TYPE,\n audience: this.audience,\n requestedTokenType: STS_REQUEST_TOKEN_TYPE,\n subjectToken,\n subjectTokenType: this.subjectTokenType,\n // generateAccessToken requires the provided access token to have\n // scopes:\n // https://www.googleapis.com/auth/iam or\n // https://www.googleapis.com/auth/cloud-platform\n // The new service account access token scopes will match the user\n // provided ones.\n scope: this.serviceAccountImpersonationUrl\n ? [DEFAULT_OAUTH_SCOPE]\n : this.getScopesArray(),\n };\n // Exchange the external credentials for a GCP access token.\n // Client auth is prioritized over passing the workforcePoolUserProject\n // parameter for STS token exchange.\n const additionalOptions = !this.clientAuth && this.workforcePoolUserProject\n ? { userProject: this.workforcePoolUserProject }\n : undefined;\n const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, additionalOptions);\n if (this.serviceAccountImpersonationUrl) {\n this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token);\n }\n else if (stsResponse.expires_in) {\n // Save response in cached access token.\n this.cachedAccessToken = {\n access_token: stsResponse.access_token,\n expiry_date: new Date().getTime() + stsResponse.expires_in * 1000,\n res: stsResponse.res,\n };\n }\n else {\n // Save response in cached access token.\n this.cachedAccessToken = {\n access_token: stsResponse.access_token,\n res: stsResponse.res,\n };\n }\n // Save credentials.\n this.credentials = {};\n Object.assign(this.credentials, this.cachedAccessToken);\n delete this.credentials.res;\n // Trigger tokens event to notify external listeners.\n this.emit('tokens', {\n refresh_token: null,\n expiry_date: this.cachedAccessToken.expiry_date,\n access_token: this.cachedAccessToken.access_token,\n token_type: 'Bearer',\n id_token: null,\n });\n // Return the cached access token.\n return this.cachedAccessToken;\n }\n /**\n * Returns the workload identity pool project number if it is determinable\n * from the audience resource name.\n * @param audience The STS audience used to determine the project number.\n * @return The project number associated with the workload identity pool, if\n * this can be determined from the STS audience field. Otherwise, null is\n * returned.\n */\n getProjectNumber(audience) {\n // STS audience pattern:\n // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/...\n const match = audience.match(/\\/projects\\/([^/]+)/);\n if (!match) {\n return null;\n }\n return match[1];\n }\n /**\n * Exchanges an external account GCP access token for a service\n * account impersonated access token using iamcredentials\n * GenerateAccessToken API.\n * @param token The access token to exchange for a service account access\n * token.\n * @return A promise that resolves with the service account impersonated\n * credentials response.\n */\n async getImpersonatedAccessToken(token) {\n const opts = {\n url: this.serviceAccountImpersonationUrl,\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n },\n data: {\n scope: this.getScopesArray(),\n lifetime: this.serviceAccountImpersonationLifetime + 's',\n },\n responseType: 'json',\n };\n const response = await this.transporter.request(opts);\n const successResponse = response.data;\n return {\n access_token: successResponse.accessToken,\n // Convert from ISO format to timestamp.\n expiry_date: new Date(successResponse.expireTime).getTime(),\n res: response,\n };\n }\n /**\n * Returns whether the provided credentials are expired or not.\n * If there is no expiry time, assumes the token is not expired or expiring.\n * @param accessToken The credentials to check for expiration.\n * @return Whether the credentials are expired or not.\n */\n isExpired(accessToken) {\n const now = new Date().getTime();\n return accessToken.expiry_date\n ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis\n : false;\n }\n /**\n * @return The list of scopes for the requested GCP access token.\n */\n getScopesArray() {\n // Since scopes can be provided as string or array, the type should\n // be normalized.\n if (typeof this.scopes === 'string') {\n return [this.scopes];\n }\n else if (typeof this.scopes === 'undefined') {\n return [DEFAULT_OAUTH_SCOPE];\n }\n else {\n return this.scopes;\n }\n }\n /**\n * Checks whether Google APIs URL is valid.\n * @param apiName The apiName of url.\n * @param url The Google API URL to validate.\n * @return Whether the URL is valid or not.\n */\n validateGoogleAPIsUrl(apiName, url) {\n let parsedUrl;\n // Return false if error is thrown during parsing URL.\n try {\n parsedUrl = new URL(url);\n }\n catch (e) {\n return false;\n }\n const urlDomain = parsedUrl.hostname;\n // Check the protocol is https.\n if (parsedUrl.protocol !== 'https:') {\n return false;\n }\n const googleAPIsDomainPatterns = [\n new RegExp('^' +\n VARIABLE_PORTION_PATTERN +\n '\\\\.' +\n apiName +\n GOOGLE_APIS_DOMAIN_PATTERN),\n new RegExp('^' + apiName + GOOGLE_APIS_DOMAIN_PATTERN),\n new RegExp('^' +\n apiName +\n '\\\\.' +\n VARIABLE_PORTION_PATTERN +\n GOOGLE_APIS_DOMAIN_PATTERN),\n new RegExp('^' +\n VARIABLE_PORTION_PATTERN +\n '\\\\-' +\n apiName +\n GOOGLE_APIS_DOMAIN_PATTERN),\n new RegExp('^' +\n apiName +\n '\\\\-' +\n VARIABLE_PORTION_PATTERN +\n '\\\\.p' +\n GOOGLE_APIS_DOMAIN_PATTERN),\n ];\n for (const googleAPIsDomainPattern of googleAPIsDomainPatterns) {\n if (urlDomain.match(googleAPIsDomainPattern)) {\n return true;\n }\n }\n return false;\n }\n}\nexports.BaseExternalAccountClient = BaseExternalAccountClient;\n//# sourceMappingURL=baseexternalclient.js.map","\"use strict\";\n// Copyright 2013 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Compute = void 0;\nconst arrify = require(\"arrify\");\nconst gaxios_1 = require(\"gaxios\");\nconst gcpMetadata = require(\"gcp-metadata\");\nconst oauth2client_1 = require(\"./oauth2client\");\nclass Compute extends oauth2client_1.OAuth2Client {\n /**\n * Google Compute Engine service account credentials.\n *\n * Retrieve access token from the metadata server.\n * See: https://developers.google.com/compute/docs/authentication\n */\n constructor(options = {}) {\n super(options);\n // Start with an expired refresh token, which will automatically be\n // refreshed before the first API call is made.\n this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' };\n this.serviceAccountEmail = options.serviceAccountEmail || 'default';\n this.scopes = arrify(options.scopes);\n }\n /**\n * Refreshes the access token.\n * @param refreshToken Unused parameter\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`;\n let data;\n try {\n const instanceOptions = {\n property: tokenPath,\n };\n if (this.scopes.length > 0) {\n instanceOptions.params = {\n scopes: this.scopes.join(','),\n };\n }\n data = await gcpMetadata.instance(instanceOptions);\n }\n catch (e) {\n if (e instanceof gaxios_1.GaxiosError) {\n e.message = `Could not refresh access token: ${e.message}`;\n this.wrapError(e);\n }\n throw e;\n }\n const tokens = data;\n if (data && data.expires_in) {\n tokens.expiry_date = new Date().getTime() + data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res: null };\n }\n /**\n * Fetches an ID token.\n * @param targetAudience the audience for the fetched ID token.\n */\n async fetchIdToken(targetAudience) {\n const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` +\n `?format=full&audience=${targetAudience}`;\n let idToken;\n try {\n const instanceOptions = {\n property: idTokenPath,\n };\n idToken = await gcpMetadata.instance(instanceOptions);\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Could not fetch ID token: ${e.message}`;\n }\n throw e;\n }\n return idToken;\n }\n wrapError(e) {\n const res = e.response;\n if (res && res.status) {\n e.code = res.status.toString();\n if (res.status === 403) {\n e.message =\n 'A Forbidden error was returned while attempting to retrieve an access ' +\n 'token for the Compute Engine built-in service account. This may be because the Compute ' +\n 'Engine instance does not have the correct permission scopes specified: ' +\n e.message;\n }\n else if (res.status === 404) {\n e.message =\n 'A Not Found error was returned while attempting to retrieve an access' +\n 'token for the Compute Engine built-in service account. This may be because the Compute ' +\n 'Engine instance does not have any permission scopes specified: ' +\n e.message;\n }\n }\n }\n}\nexports.Compute = Compute;\n//# sourceMappingURL=computeclient.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0;\nconst stream = require(\"stream\");\nconst authclient_1 = require(\"./authclient\");\nconst sts = require(\"./stscredentials\");\n/**\n * The required token exchange grant_type: rfc8693#section-2.1\n */\nconst STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange';\n/**\n * The requested token exchange requested_token_type: rfc8693#section-2.1\n */\nconst STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/**\n * The requested token exchange subject_token_type: rfc8693#section-2.1\n */\nconst STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/** The STS access token exchange end point. */\nconst STS_ACCESS_TOKEN_URL = 'https://sts.googleapis.com/v1/token';\n/**\n * The maximum number of access boundary rules a Credential Access Boundary\n * can contain.\n */\nexports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10;\n/**\n * Offset to take into account network delays and server clock skews.\n */\nexports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000;\n/**\n * Defines a set of Google credentials that are downscoped from an existing set\n * of Google OAuth2 credentials. This is useful to restrict the Identity and\n * Access Management (IAM) permissions that a short-lived credential can use.\n * The common pattern of usage is to have a token broker with elevated access\n * generate these downscoped credentials from higher access source credentials\n * and pass the downscoped short-lived access tokens to a token consumer via\n * some secure authenticated channel for limited access to Google Cloud Storage\n * resources.\n */\nclass DownscopedClient extends authclient_1.AuthClient {\n /**\n * Instantiates a downscoped client object using the provided source\n * AuthClient and credential access boundary rules.\n * To downscope permissions of a source AuthClient, a Credential Access\n * Boundary that specifies which resources the new credential can access, as\n * well as an upper bound on the permissions that are available on each\n * resource, has to be defined. A downscoped client can then be instantiated\n * using the source AuthClient and the Credential Access Boundary.\n * @param authClient The source AuthClient to be downscoped based on the\n * provided Credential Access Boundary rules.\n * @param credentialAccessBoundary The Credential Access Boundary which\n * contains a list of access boundary rules. Each rule contains information\n * on the resource that the rule applies to, the upper bound of the\n * permissions that are available on that resource and an optional\n * condition to further restrict permissions.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n * @param quotaProjectId Optional quota project id for setting up in the\n * x-goog-user-project header.\n */\n constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) {\n super();\n this.authClient = authClient;\n this.credentialAccessBoundary = credentialAccessBoundary;\n // Check 1-10 Access Boundary Rules are defined within Credential Access\n // Boundary.\n if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) {\n throw new Error('At least one access boundary rule needs to be defined.');\n }\n else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length >\n exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) {\n throw new Error('The provided access boundary has more than ' +\n `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`);\n }\n // Check at least one permission should be defined in each Access Boundary\n // Rule.\n for (const rule of credentialAccessBoundary.accessBoundary\n .accessBoundaryRules) {\n if (rule.availablePermissions.length === 0) {\n throw new Error('At least one permission should be defined in access boundary rules.');\n }\n }\n this.stsCredential = new sts.StsCredentials(STS_ACCESS_TOKEN_URL);\n this.cachedDownscopedAccessToken = null;\n // As threshold could be zero,\n // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the\n // zero value.\n if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') {\n this.eagerRefreshThresholdMillis = exports.EXPIRATION_TIME_OFFSET;\n }\n else {\n this.eagerRefreshThresholdMillis = additionalOptions\n .eagerRefreshThresholdMillis;\n }\n this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure);\n this.quotaProjectId = quotaProjectId;\n }\n /**\n * Provides a mechanism to inject Downscoped access tokens directly.\n * The expiry_date field is required to facilitate determination of the token\n * expiration which would make it easier for the token consumer to handle.\n * @param credentials The Credentials object to set on the current client.\n */\n setCredentials(credentials) {\n if (!credentials.expiry_date) {\n throw new Error('The access token expiry_date field is missing in the provided ' +\n 'credentials.');\n }\n super.setCredentials(credentials);\n this.cachedDownscopedAccessToken = credentials;\n }\n async getAccessToken() {\n // If the cached access token is unavailable or expired, force refresh.\n // The Downscoped access token will be returned in\n // DownscopedAccessTokenResponse format.\n if (!this.cachedDownscopedAccessToken ||\n this.isExpired(this.cachedDownscopedAccessToken)) {\n await this.refreshAccessTokenAsync();\n }\n // Return Downscoped access token in DownscopedAccessTokenResponse format.\n return {\n token: this.cachedDownscopedAccessToken.access_token,\n expirationTime: this.cachedDownscopedAccessToken.expiry_date,\n res: this.cachedDownscopedAccessToken.res,\n };\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * The result has the form:\n * { Authorization: 'Bearer ' }\n */\n async getRequestHeaders() {\n const accessTokenResponse = await this.getAccessToken();\n const headers = {\n Authorization: `Bearer ${accessTokenResponse.token}`,\n };\n return this.addSharedMetadataHeaders(headers);\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n /**\n * Authenticates the provided HTTP request, processes it and resolves with the\n * returned response.\n * @param opts The HTTP request options.\n * @param retry Whether the current attempt is a retry after a failed attempt.\n * @return A promise that resolves with the successful response.\n */\n async requestAsync(opts, retry = false) {\n let response;\n try {\n const requestHeaders = await this.getRequestHeaders();\n opts.headers = opts.headers || {};\n if (requestHeaders && requestHeaders['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] =\n requestHeaders['x-goog-user-project'];\n }\n if (requestHeaders && requestHeaders.Authorization) {\n opts.headers.Authorization = requestHeaders.Authorization;\n }\n response = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - forceRefreshOnFailure is true\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n this.forceRefreshOnFailure) {\n await this.refreshAccessTokenAsync();\n return await this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return response;\n }\n /**\n * Forces token refresh, even if unexpired tokens are currently cached.\n * GCP access tokens are retrieved from authclient object/source credential.\n * Then GCP access tokens are exchanged for downscoped access tokens via the\n * token exchange endpoint.\n * @return A promise that resolves with the fresh downscoped access token.\n */\n async refreshAccessTokenAsync() {\n var _a;\n // Retrieve GCP access token from source credential.\n const subjectToken = (await this.authClient.getAccessToken()).token;\n // Construct the STS credentials options.\n const stsCredentialsOptions = {\n grantType: STS_GRANT_TYPE,\n requestedTokenType: STS_REQUEST_TOKEN_TYPE,\n subjectToken: subjectToken,\n subjectTokenType: STS_SUBJECT_TOKEN_TYPE,\n };\n // Exchange the source AuthClient access token for a Downscoped access\n // token.\n const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary);\n /**\n * The STS endpoint will only return the expiration time for the downscoped\n * access token if the original access token represents a service account.\n * The downscoped token's expiration time will always match the source\n * credential expiration. When no expires_in is returned, we can copy the\n * source credential's expiration time.\n */\n const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null;\n const expiryDate = stsResponse.expires_in\n ? new Date().getTime() + stsResponse.expires_in * 1000\n : sourceCredExpireDate;\n // Save response in cached access token.\n this.cachedDownscopedAccessToken = {\n access_token: stsResponse.access_token,\n expiry_date: expiryDate,\n res: stsResponse.res,\n };\n // Save credentials.\n this.credentials = {};\n Object.assign(this.credentials, this.cachedDownscopedAccessToken);\n delete this.credentials.res;\n // Trigger tokens event to notify external listeners.\n this.emit('tokens', {\n refresh_token: null,\n expiry_date: this.cachedDownscopedAccessToken.expiry_date,\n access_token: this.cachedDownscopedAccessToken.access_token,\n token_type: 'Bearer',\n id_token: null,\n });\n // Return the cached access token.\n return this.cachedDownscopedAccessToken;\n }\n /**\n * Returns whether the provided credentials are expired or not.\n * If there is no expiry time, assumes the token is not expired or expiring.\n * @param downscopedAccessToken The credentials to check for expiration.\n * @return Whether the credentials are expired or not.\n */\n isExpired(downscopedAccessToken) {\n const now = new Date().getTime();\n return downscopedAccessToken.expiry_date\n ? now >=\n downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis\n : false;\n }\n}\nexports.DownscopedClient = DownscopedClient;\n//# sourceMappingURL=downscopedclient.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getEnv = exports.clear = exports.GCPEnv = void 0;\nconst gcpMetadata = require(\"gcp-metadata\");\nvar GCPEnv;\n(function (GCPEnv) {\n GCPEnv[\"APP_ENGINE\"] = \"APP_ENGINE\";\n GCPEnv[\"KUBERNETES_ENGINE\"] = \"KUBERNETES_ENGINE\";\n GCPEnv[\"CLOUD_FUNCTIONS\"] = \"CLOUD_FUNCTIONS\";\n GCPEnv[\"COMPUTE_ENGINE\"] = \"COMPUTE_ENGINE\";\n GCPEnv[\"CLOUD_RUN\"] = \"CLOUD_RUN\";\n GCPEnv[\"NONE\"] = \"NONE\";\n})(GCPEnv = exports.GCPEnv || (exports.GCPEnv = {}));\nlet envPromise;\nfunction clear() {\n envPromise = undefined;\n}\nexports.clear = clear;\nasync function getEnv() {\n if (envPromise) {\n return envPromise;\n }\n envPromise = getEnvMemoized();\n return envPromise;\n}\nexports.getEnv = getEnv;\nasync function getEnvMemoized() {\n let env = GCPEnv.NONE;\n if (isAppEngine()) {\n env = GCPEnv.APP_ENGINE;\n }\n else if (isCloudFunction()) {\n env = GCPEnv.CLOUD_FUNCTIONS;\n }\n else if (await isComputeEngine()) {\n if (await isKubernetesEngine()) {\n env = GCPEnv.KUBERNETES_ENGINE;\n }\n else if (isCloudRun()) {\n env = GCPEnv.CLOUD_RUN;\n }\n else {\n env = GCPEnv.COMPUTE_ENGINE;\n }\n }\n else {\n env = GCPEnv.NONE;\n }\n return env;\n}\nfunction isAppEngine() {\n return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME);\n}\nfunction isCloudFunction() {\n return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET);\n}\n/**\n * This check only verifies that the environment is running knative.\n * This must be run *after* checking for Kubernetes, otherwise it will\n * return a false positive.\n */\nfunction isCloudRun() {\n return !!process.env.K_CONFIGURATION;\n}\nasync function isKubernetesEngine() {\n try {\n await gcpMetadata.instance('attributes/cluster-name');\n return true;\n }\n catch (e) {\n return false;\n }\n}\nasync function isComputeEngine() {\n return gcpMetadata.isAvailable();\n}\n//# sourceMappingURL=envDetect.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0;\nconst SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2';\nconst OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token';\nconst OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt';\n/**\n * Defines the response of a 3rd party executable run by the pluggable auth client.\n */\nclass ExecutableResponse {\n /**\n * Instantiates an ExecutableResponse instance using the provided JSON object\n * from the output of the executable.\n * @param responseJson Response from a 3rd party executable, loaded from a\n * run of the executable or a cached output file.\n */\n constructor(responseJson) {\n // Check that the required fields exist in the json response.\n if (!responseJson.version) {\n throw new InvalidVersionFieldError(\"Executable response must contain a 'version' field.\");\n }\n if (responseJson.success === undefined) {\n throw new InvalidSuccessFieldError(\"Executable response must contain a 'success' field.\");\n }\n this.version = responseJson.version;\n this.success = responseJson.success;\n // Validate required fields for a successful response.\n if (this.success) {\n this.expirationTime = responseJson.expiration_time;\n this.tokenType = responseJson.token_type;\n // Validate token type field.\n if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE &&\n this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 &&\n this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) {\n throw new InvalidTokenTypeFieldError(\"Executable response must contain a 'token_type' field when successful \" +\n `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`);\n }\n // Validate subject token.\n if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) {\n if (!responseJson.saml_response) {\n throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`);\n }\n this.subjectToken = responseJson.saml_response;\n }\n else {\n if (!responseJson.id_token) {\n throw new InvalidSubjectTokenError(\"Executable response must contain a 'id_token' field when \" +\n `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`);\n }\n this.subjectToken = responseJson.id_token;\n }\n }\n else {\n // Both code and message must be provided for unsuccessful responses.\n if (!responseJson.code) {\n throw new InvalidCodeFieldError(\"Executable response must contain a 'code' field when unsuccessful.\");\n }\n if (!responseJson.message) {\n throw new InvalidMessageFieldError(\"Executable response must contain a 'message' field when unsuccessful.\");\n }\n this.errorCode = responseJson.code;\n this.errorMessage = responseJson.message;\n }\n }\n /**\n * @return A boolean representing if the response has a valid token. Returns\n * true when the response was successful and the token is not expired.\n */\n isValid() {\n return !this.isExpired() && this.success;\n }\n /**\n * @return A boolean representing if the response is expired. Returns true if the\n * provided timeout has passed.\n */\n isExpired() {\n return (this.expirationTime !== undefined &&\n this.expirationTime < Math.round(Date.now() / 1000));\n }\n}\nexports.ExecutableResponse = ExecutableResponse;\n/**\n * An error thrown by the ExecutableResponse class.\n */\nclass ExecutableResponseError extends Error {\n constructor(message) {\n super(message);\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.ExecutableResponseError = ExecutableResponseError;\n/**\n * An error thrown when the 'version' field in an executable response is missing or invalid.\n */\nclass InvalidVersionFieldError extends ExecutableResponseError {\n}\nexports.InvalidVersionFieldError = InvalidVersionFieldError;\n/**\n * An error thrown when the 'success' field in an executable response is missing or invalid.\n */\nclass InvalidSuccessFieldError extends ExecutableResponseError {\n}\nexports.InvalidSuccessFieldError = InvalidSuccessFieldError;\n/**\n * An error thrown when the 'expiration_time' field in an executable response is missing or invalid.\n */\nclass InvalidExpirationTimeFieldError extends ExecutableResponseError {\n}\nexports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError;\n/**\n * An error thrown when the 'token_type' field in an executable response is missing or invalid.\n */\nclass InvalidTokenTypeFieldError extends ExecutableResponseError {\n}\nexports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError;\n/**\n * An error thrown when the 'code' field in an executable response is missing or invalid.\n */\nclass InvalidCodeFieldError extends ExecutableResponseError {\n}\nexports.InvalidCodeFieldError = InvalidCodeFieldError;\n/**\n * An error thrown when the 'message' field in an executable response is missing or invalid.\n */\nclass InvalidMessageFieldError extends ExecutableResponseError {\n}\nexports.InvalidMessageFieldError = InvalidMessageFieldError;\n/**\n * An error thrown when the subject token in an executable response is missing or invalid.\n */\nclass InvalidSubjectTokenError extends ExecutableResponseError {\n}\nexports.InvalidSubjectTokenError = InvalidSubjectTokenError;\n//# sourceMappingURL=executable-response.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ExternalAccountClient = void 0;\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst identitypoolclient_1 = require(\"./identitypoolclient\");\nconst awsclient_1 = require(\"./awsclient\");\nconst pluggable_auth_client_1 = require(\"./pluggable-auth-client\");\n/**\n * Dummy class with no constructor. Developers are expected to use fromJSON.\n */\nclass ExternalAccountClient {\n constructor() {\n throw new Error('ExternalAccountClients should be initialized via: ' +\n 'ExternalAccountClient.fromJSON(), ' +\n 'directly via explicit constructors, eg. ' +\n 'new AwsClient(options), new IdentityPoolClient(options), new' +\n 'PluggableAuthClientOptions, or via ' +\n 'new GoogleAuth(options).getClient()');\n }\n /**\n * This static method will instantiate the\n * corresponding type of external account credential depending on the\n * underlying credential source.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n * @return A BaseExternalAccountClient instance or null if the options\n * provided do not correspond to an external account credential.\n */\n static fromJSON(options, additionalOptions) {\n var _a, _b;\n if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) {\n return new awsclient_1.AwsClient(options, additionalOptions);\n }\n else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) {\n return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions);\n }\n else {\n return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions);\n }\n }\n else {\n return null;\n }\n }\n}\nexports.ExternalAccountClient = ExternalAccountClient;\n//# sourceMappingURL=externalclient.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0;\nconst child_process_1 = require(\"child_process\");\nconst fs = require(\"fs\");\nconst gcpMetadata = require(\"gcp-metadata\");\nconst os = require(\"os\");\nconst path = require(\"path\");\nconst crypto_1 = require(\"../crypto/crypto\");\nconst transporters_1 = require(\"../transporters\");\nconst computeclient_1 = require(\"./computeclient\");\nconst idtokenclient_1 = require(\"./idtokenclient\");\nconst envDetect_1 = require(\"./envDetect\");\nconst jwtclient_1 = require(\"./jwtclient\");\nconst refreshclient_1 = require(\"./refreshclient\");\nconst impersonated_1 = require(\"./impersonated\");\nconst externalclient_1 = require(\"./externalclient\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nexports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com';\nconst GoogleAuthExceptionMessages = {\n NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \\n' +\n 'To learn more about authentication and Google APIs, visit: \\n' +\n 'https://cloud.google.com/docs/authentication/getting-started',\n};\nclass GoogleAuth {\n constructor(opts) {\n /**\n * Caches a value indicating whether the auth layer is running on Google\n * Compute Engine.\n * @private\n */\n this.checkIsGCE = undefined;\n // To save the contents of the JSON credential file\n this.jsonContent = null;\n this.cachedCredential = null;\n opts = opts || {};\n this._cachedProjectId = opts.projectId || null;\n this.cachedCredential = opts.authClient || null;\n this.keyFilename = opts.keyFilename || opts.keyFile;\n this.scopes = opts.scopes;\n this.jsonContent = opts.credentials || null;\n this.clientOptions = opts.clientOptions;\n }\n // Note: this properly is only public to satisify unit tests.\n // https://github.com/Microsoft/TypeScript/issues/5228\n get isGCE() {\n return this.checkIsGCE;\n }\n // GAPIC client libraries should always use self-signed JWTs. The following\n // variables are set on the JWT client in order to indicate the type of library,\n // and sign the JWT with the correct audience and scopes (if not supplied).\n setGapicJWTValues(client) {\n client.defaultServicePath = this.defaultServicePath;\n client.useJWTAccessWithScope = this.useJWTAccessWithScope;\n client.defaultScopes = this.defaultScopes;\n }\n getProjectId(callback) {\n if (callback) {\n this.getProjectIdAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.getProjectIdAsync();\n }\n }\n /**\n * A temporary method for internal `getProjectId` usages where `null` is\n * acceptable. In a future major release, `getProjectId` should return `null`\n * (as the `Promise` base signature describes) and this private\n * method should be removed.\n *\n * @returns Promise that resolves with project id (or `null`)\n */\n async getProjectIdOptional() {\n try {\n return await this.getProjectId();\n }\n catch (e) {\n if (e instanceof Error &&\n e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) {\n return null;\n }\n else {\n throw e;\n }\n }\n }\n /*\n * A private method for finding and caching a projectId.\n *\n * Supports environments in order of precedence:\n * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable\n * - GOOGLE_APPLICATION_CREDENTIALS JSON file\n * - Cloud SDK: `gcloud config config-helper --format json`\n * - GCE project ID from metadata server\n *\n * @returns projectId\n */\n async findAndCacheProjectId() {\n let projectId = null;\n projectId || (projectId = await this.getProductionProjectId());\n projectId || (projectId = await this.getFileProjectId());\n projectId || (projectId = await this.getDefaultServiceProjectId());\n projectId || (projectId = await this.getGCEProjectId());\n projectId || (projectId = await this.getExternalAccountClientProjectId());\n if (projectId) {\n this._cachedProjectId = projectId;\n return projectId;\n }\n else {\n throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND);\n }\n }\n async getProjectIdAsync() {\n if (this._cachedProjectId) {\n return this._cachedProjectId;\n }\n if (!this._findProjectIdPromise) {\n this._findProjectIdPromise = this.findAndCacheProjectId();\n }\n return this._findProjectIdPromise;\n }\n /**\n * @returns Any scopes (user-specified or default scopes specified by the\n * client library) that need to be set on the current Auth client.\n */\n getAnyScopes() {\n return this.scopes || this.defaultScopes;\n }\n getApplicationDefault(optionsOrCallback = {}, callback) {\n let options;\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else {\n options = optionsOrCallback;\n }\n if (callback) {\n this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback);\n }\n else {\n return this.getApplicationDefaultAsync(options);\n }\n }\n async getApplicationDefaultAsync(options = {}) {\n // If we've already got a cached credential, return it.\n // This will also preserve one's configured quota project, in case they\n // set one directly on the credential previously.\n if (this.cachedCredential) {\n return await this.prepareAndCacheADC(this.cachedCredential);\n }\n // Since this is a 'new' ADC to cache we will use the environment variable\n // if it's available. We prefer this value over the value from ADC.\n const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT'];\n let credential;\n // Check for the existence of a local environment variable pointing to the\n // location of the credential file. This is typically used in local\n // developer scenarios.\n credential =\n await this._tryGetApplicationCredentialsFromEnvironmentVariable(options);\n if (credential) {\n if (credential instanceof jwtclient_1.JWT) {\n credential.scopes = this.scopes;\n }\n else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) {\n credential.scopes = this.getAnyScopes();\n }\n return await this.prepareAndCacheADC(credential, quotaProjectIdOverride);\n }\n // Look in the well-known credential file location.\n credential = await this._tryGetApplicationCredentialsFromWellKnownFile(options);\n if (credential) {\n if (credential instanceof jwtclient_1.JWT) {\n credential.scopes = this.scopes;\n }\n else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) {\n credential.scopes = this.getAnyScopes();\n }\n return await this.prepareAndCacheADC(credential, quotaProjectIdOverride);\n }\n // Determine if we're running on GCE.\n let isGCE;\n try {\n isGCE = await this._checkIsGCE();\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Unexpected error determining execution environment: ${e.message}`;\n }\n throw e;\n }\n if (!isGCE) {\n // We failed to find the default credentials. Bail out with an error.\n throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.');\n }\n // For GCE, just return a default ComputeClient. It will take care of\n // the rest.\n options.scopes = this.getAnyScopes();\n return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride);\n }\n async prepareAndCacheADC(credential, quotaProjectIdOverride) {\n const projectId = await this.getProjectIdOptional();\n if (quotaProjectIdOverride) {\n credential.quotaProjectId = quotaProjectIdOverride;\n }\n this.cachedCredential = credential;\n return { credential, projectId };\n }\n /**\n * Determines whether the auth layer is running on Google Compute Engine.\n * @returns A promise that resolves with the boolean.\n * @api private\n */\n async _checkIsGCE() {\n if (this.checkIsGCE === undefined) {\n this.checkIsGCE = await gcpMetadata.isAvailable();\n }\n return this.checkIsGCE;\n }\n /**\n * Attempts to load default credentials from the environment variable path..\n * @returns Promise that resolves with the OAuth2Client or null.\n * @api private\n */\n async _tryGetApplicationCredentialsFromEnvironmentVariable(options) {\n const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] ||\n process.env['google_application_credentials'];\n if (!credentialsPath || credentialsPath.length === 0) {\n return null;\n }\n try {\n return this._getApplicationCredentialsFromFilePath(credentialsPath, options);\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`;\n }\n throw e;\n }\n }\n /**\n * Attempts to load default credentials from a well-known file location\n * @return Promise that resolves with the OAuth2Client or null.\n * @api private\n */\n async _tryGetApplicationCredentialsFromWellKnownFile(options) {\n // First, figure out the location of the file, depending upon the OS type.\n let location = null;\n if (this._isWindows()) {\n // Windows\n location = process.env['APPDATA'];\n }\n else {\n // Linux or Mac\n const home = process.env['HOME'];\n if (home) {\n location = path.join(home, '.config');\n }\n }\n // If we found the root path, expand it.\n if (location) {\n location = path.join(location, 'gcloud', 'application_default_credentials.json');\n if (!fs.existsSync(location)) {\n location = null;\n }\n }\n // The file does not exist.\n if (!location) {\n return null;\n }\n // The file seems to exist. Try to use it.\n const client = await this._getApplicationCredentialsFromFilePath(location, options);\n return client;\n }\n /**\n * Attempts to load default credentials from a file at the given path..\n * @param filePath The path to the file to read.\n * @returns Promise that resolves with the OAuth2Client\n * @api private\n */\n async _getApplicationCredentialsFromFilePath(filePath, options = {}) {\n // Make sure the path looks like a string.\n if (!filePath || filePath.length === 0) {\n throw new Error('The file path is invalid.');\n }\n // Make sure there is a file at the path. lstatSync will throw if there is\n // nothing there.\n try {\n // Resolve path to actual file in case of symlink. Expect a thrown error\n // if not resolvable.\n filePath = fs.realpathSync(filePath);\n if (!fs.lstatSync(filePath).isFile()) {\n throw new Error();\n }\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`;\n }\n throw err;\n }\n // Now open a read stream on the file, and parse it.\n const readStream = fs.createReadStream(filePath);\n return this.fromStream(readStream, options);\n }\n /**\n * Create a credentials instance using a given impersonated input options.\n * @param json The impersonated input object.\n * @returns JWT or UserRefresh Client with data\n */\n fromImpersonatedJSON(json) {\n var _a, _b, _c, _d;\n if (!json) {\n throw new Error('Must pass in a JSON object containing an impersonated refresh token');\n }\n if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) {\n throw new Error(`The incoming JSON object does not have the \"${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}\" type`);\n }\n if (!json.source_credentials) {\n throw new Error('The incoming JSON object does not contain a source_credentials field');\n }\n if (!json.service_account_impersonation_url) {\n throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field');\n }\n // Create source client for impersonation\n const sourceClient = new refreshclient_1.UserRefreshClient(json.source_credentials.client_id, json.source_credentials.client_secret, json.source_credentials.refresh_token);\n // Extreact service account from service_account_impersonation_url\n const targetPrincipal = (_b = (_a = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _a === void 0 ? void 0 : _a.groups) === null || _b === void 0 ? void 0 : _b.target;\n if (!targetPrincipal) {\n throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`);\n }\n const targetScopes = (_c = this.getAnyScopes()) !== null && _c !== void 0 ? _c : [];\n const client = new impersonated_1.Impersonated({\n delegates: (_d = json.delegates) !== null && _d !== void 0 ? _d : [],\n sourceClient: sourceClient,\n targetPrincipal: targetPrincipal,\n targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes],\n });\n return client;\n }\n /**\n * Create a credentials instance using the given input options.\n * @param json The input object.\n * @param options The JWT or UserRefresh options for the client\n * @returns JWT or UserRefresh Client with data\n */\n fromJSON(json, options) {\n let client;\n if (!json) {\n throw new Error('Must pass in a JSON object containing the Google auth settings.');\n }\n options = options || {};\n if (json.type === 'authorized_user') {\n client = new refreshclient_1.UserRefreshClient(options);\n client.fromJSON(json);\n }\n else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) {\n client = this.fromImpersonatedJSON(json);\n }\n else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n client = externalclient_1.ExternalAccountClient.fromJSON(json, options);\n client.scopes = this.getAnyScopes();\n }\n else {\n options.scopes = this.scopes;\n client = new jwtclient_1.JWT(options);\n this.setGapicJWTValues(client);\n client.fromJSON(json);\n }\n return client;\n }\n /**\n * Return a JWT or UserRefreshClient from JavaScript object, caching both the\n * object used to instantiate and the client.\n * @param json The input object.\n * @param options The JWT or UserRefresh options for the client\n * @returns JWT or UserRefresh Client with data\n */\n _cacheClientFromJSON(json, options) {\n let client;\n // create either a UserRefreshClient or JWT client.\n options = options || {};\n if (json.type === 'authorized_user') {\n client = new refreshclient_1.UserRefreshClient(options);\n client.fromJSON(json);\n }\n else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) {\n client = this.fromImpersonatedJSON(json);\n }\n else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n client = externalclient_1.ExternalAccountClient.fromJSON(json, options);\n client.scopes = this.getAnyScopes();\n }\n else {\n options.scopes = this.scopes;\n client = new jwtclient_1.JWT(options);\n this.setGapicJWTValues(client);\n client.fromJSON(json);\n }\n // cache both raw data used to instantiate client and client itself.\n this.jsonContent = json;\n this.cachedCredential = client;\n return client;\n }\n fromStream(inputStream, optionsOrCallback = {}, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else {\n options = optionsOrCallback;\n }\n if (callback) {\n this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback);\n }\n else {\n return this.fromStreamAsync(inputStream, options);\n }\n }\n fromStreamAsync(inputStream, options) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n throw new Error('Must pass in a stream containing the Google auth settings.');\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n try {\n const data = JSON.parse(s);\n const r = this._cacheClientFromJSON(data, options);\n return resolve(r);\n }\n catch (err) {\n // If we failed parsing this.keyFileName, assume that it\n // is a PEM or p12 certificate:\n if (!this.keyFilename)\n throw err;\n const client = new jwtclient_1.JWT({\n ...this.clientOptions,\n keyFile: this.keyFilename,\n });\n this.cachedCredential = client;\n this.setGapicJWTValues(client);\n return resolve(client);\n }\n }\n catch (err) {\n return reject(err);\n }\n });\n });\n }\n /**\n * Create a credentials instance using the given API key string.\n * @param apiKey The API key string\n * @param options An optional options object.\n * @returns A JWT loaded from the key\n */\n fromAPIKey(apiKey, options) {\n options = options || {};\n const client = new jwtclient_1.JWT(options);\n client.fromAPIKey(apiKey);\n return client;\n }\n /**\n * Determines whether the current operating system is Windows.\n * @api private\n */\n _isWindows() {\n const sys = os.platform();\n if (sys && sys.length >= 3) {\n if (sys.substring(0, 3).toLowerCase() === 'win') {\n return true;\n }\n }\n return false;\n }\n /**\n * Run the Google Cloud SDK command that prints the default project ID\n */\n async getDefaultServiceProjectId() {\n return new Promise(resolve => {\n (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => {\n if (!err && stdout) {\n try {\n const projectId = JSON.parse(stdout).configuration.properties.core.project;\n resolve(projectId);\n return;\n }\n catch (e) {\n // ignore errors\n }\n }\n resolve(null);\n });\n });\n }\n /**\n * Loads the project id from environment variables.\n * @api private\n */\n getProductionProjectId() {\n return (process.env['GCLOUD_PROJECT'] ||\n process.env['GOOGLE_CLOUD_PROJECT'] ||\n process.env['gcloud_project'] ||\n process.env['google_cloud_project']);\n }\n /**\n * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file.\n * @api private\n */\n async getFileProjectId() {\n if (this.cachedCredential) {\n // Try to read the project ID from the cached credentials file\n return this.cachedCredential.projectId;\n }\n // Ensure the projectId is loaded from the keyFile if available.\n if (this.keyFilename) {\n const creds = await this.getClient();\n if (creds && creds.projectId) {\n return creds.projectId;\n }\n }\n // Try to load a credentials file and read its project ID\n const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable();\n if (r) {\n return r.projectId;\n }\n else {\n return null;\n }\n }\n /**\n * Gets the project ID from external account client if available.\n */\n async getExternalAccountClientProjectId() {\n if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n return null;\n }\n const creds = await this.getClient();\n // Do not suppress the underlying error, as the error could contain helpful\n // information for debugging and fixing. This is especially true for\n // external account creds as in order to get the project ID, the following\n // operations have to succeed:\n // 1. Valid credentials file should be supplied.\n // 2. Ability to retrieve access tokens from STS token exchange API.\n // 3. Ability to exchange for service account impersonated credentials (if\n // enabled).\n // 4. Ability to get project info using the access token from step 2 or 3.\n // Without surfacing the error, it is harder for developers to determine\n // which step went wrong.\n return await creds.getProjectId();\n }\n /**\n * Gets the Compute Engine project ID if it can be inferred.\n */\n async getGCEProjectId() {\n try {\n const r = await gcpMetadata.project('project-id');\n return r;\n }\n catch (e) {\n // Ignore any errors\n return null;\n }\n }\n getCredentials(callback) {\n if (callback) {\n this.getCredentialsAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.getCredentialsAsync();\n }\n }\n async getCredentialsAsync() {\n const client = await this.getClient();\n if (client instanceof baseexternalclient_1.BaseExternalAccountClient) {\n const serviceAccountEmail = client.getServiceAccountEmail();\n if (serviceAccountEmail) {\n return { client_email: serviceAccountEmail };\n }\n }\n if (this.jsonContent) {\n const credential = {\n client_email: this.jsonContent.client_email,\n private_key: this.jsonContent.private_key,\n };\n return credential;\n }\n const isGCE = await this._checkIsGCE();\n if (!isGCE) {\n throw new Error('Unknown error.');\n }\n // For GCE, return the service account details from the metadata server\n // NOTE: The trailing '/' at the end of service-accounts/ is very important!\n // The GCF metadata server doesn't respect querystring params if this / is\n // not included.\n const data = await gcpMetadata.instance({\n property: 'service-accounts/',\n params: { recursive: 'true' },\n });\n if (!data || !data.default || !data.default.email) {\n throw new Error('Failure from metadata server.');\n }\n return { client_email: data.default.email };\n }\n /**\n * Automatically obtain a client based on the provided configuration. If no\n * options were passed, use Application Default Credentials.\n */\n async getClient() {\n if (!this.cachedCredential) {\n if (this.jsonContent) {\n this._cacheClientFromJSON(this.jsonContent, this.clientOptions);\n }\n else if (this.keyFilename) {\n const filePath = path.resolve(this.keyFilename);\n const stream = fs.createReadStream(filePath);\n await this.fromStreamAsync(stream, this.clientOptions);\n }\n else {\n await this.getApplicationDefaultAsync(this.clientOptions);\n }\n }\n return this.cachedCredential;\n }\n /**\n * Creates a client which will fetch an ID token for authorization.\n * @param targetAudience the audience for the fetched ID token.\n * @returns IdTokenClient for making HTTP calls authenticated with ID tokens.\n */\n async getIdTokenClient(targetAudience) {\n const client = await this.getClient();\n if (!('fetchIdToken' in client)) {\n throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.');\n }\n return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client });\n }\n /**\n * Automatically obtain application default credentials, and return\n * an access token for making requests.\n */\n async getAccessToken() {\n const client = await this.getClient();\n return (await client.getAccessToken()).token;\n }\n /**\n * Obtain the HTTP headers that will provide authorization for a given\n * request.\n */\n async getRequestHeaders(url) {\n const client = await this.getClient();\n return client.getRequestHeaders(url);\n }\n /**\n * Obtain credentials for a request, then attach the appropriate headers to\n * the request options.\n * @param opts Axios or Request options on which to attach the headers\n */\n async authorizeRequest(opts) {\n opts = opts || {};\n const url = opts.url || opts.uri;\n const client = await this.getClient();\n const headers = await client.getRequestHeaders(url);\n opts.headers = Object.assign(opts.headers || {}, headers);\n return opts;\n }\n /**\n * Automatically obtain application default credentials, and make an\n * HTTP request using the given options.\n * @param opts Axios request options for the HTTP request.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n async request(opts) {\n const client = await this.getClient();\n return client.request(opts);\n }\n /**\n * Determine the compute environment in which the code is running.\n */\n getEnv() {\n return (0, envDetect_1.getEnv)();\n }\n /**\n * Sign the given data with the current private key, or go out\n * to the IAM API to sign it.\n * @param data The data to be signed.\n */\n async sign(data) {\n const client = await this.getClient();\n const crypto = (0, crypto_1.createCrypto)();\n if (client instanceof jwtclient_1.JWT && client.key) {\n const sign = await crypto.sign(client.key, data);\n return sign;\n }\n const creds = await this.getCredentials();\n if (!creds.client_email) {\n throw new Error('Cannot sign data without `client_email`.');\n }\n return this.signBlob(crypto, creds.client_email, data);\n }\n async signBlob(crypto, emailOrUniqueId, data) {\n const url = 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' +\n `${emailOrUniqueId}:signBlob`;\n const res = await this.request({\n method: 'POST',\n url,\n data: {\n payload: crypto.encodeBase64StringUtf8(data),\n },\n });\n return res.data.signedBlob;\n }\n}\nexports.GoogleAuth = GoogleAuth;\n/**\n * Export DefaultTransporter as a static property of the class.\n */\nGoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter;\n//# sourceMappingURL=googleauth.js.map","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IAMAuth = void 0;\nclass IAMAuth {\n /**\n * IAM credentials.\n *\n * @param selector the iam authority selector\n * @param token the token\n * @constructor\n */\n constructor(selector, token) {\n this.selector = selector;\n this.token = token;\n this.selector = selector;\n this.token = token;\n }\n /**\n * Acquire the HTTP headers required to make an authenticated request.\n */\n getRequestHeaders() {\n return {\n 'x-goog-iam-authority-selector': this.selector,\n 'x-goog-iam-authorization-token': this.token,\n };\n }\n}\nexports.IAMAuth = IAMAuth;\n//# sourceMappingURL=iam.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar _a, _b, _c;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IdentityPoolClient = void 0;\nconst fs = require(\"fs\");\nconst util_1 = require(\"util\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\n// fs.readfile is undefined in browser karma tests causing\n// `npm run browser-test` to fail as test.oauth2.ts imports this file via\n// src/index.ts.\n// Fallback to void function to avoid promisify throwing a TypeError.\nconst readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { }));\nconst realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { }));\nconst lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { }));\n/**\n * Defines the Url-sourced and file-sourced external account clients mainly\n * used for K8s and Azure workloads.\n */\nclass IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiate an IdentityPoolClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid file-sourced or\n * url-sourced credential or a workforce pool user project is provided\n * with a non workforce audience.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n var _a, _b;\n super(options, additionalOptions);\n this.file = options.credential_source.file;\n this.url = options.credential_source.url;\n this.headers = options.credential_source.headers;\n if (!this.file && !this.url) {\n throw new Error('No valid Identity Pool \"credential_source\" provided');\n }\n // Text is the default format type.\n this.formatType = ((_a = options.credential_source.format) === null || _a === void 0 ? void 0 : _a.type) || 'text';\n this.formatSubjectTokenFieldName =\n (_b = options.credential_source.format) === null || _b === void 0 ? void 0 : _b.subject_token_field_name;\n if (this.formatType !== 'json' && this.formatType !== 'text') {\n throw new Error(`Invalid credential_source format \"${this.formatType}\"`);\n }\n if (this.formatType === 'json' && !this.formatSubjectTokenFieldName) {\n throw new Error('Missing subject_token_field_name for JSON credential_source format');\n }\n }\n /**\n * Triggered when a external subject token is needed to be exchanged for a GCP\n * access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this either retrieves the local credential from a file location (k8s\n * workload) or by sending a GET request to a local metadata server (Azure\n * workloads).\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n if (this.file) {\n return await this.getTokenFromFile(this.file, this.formatType, this.formatSubjectTokenFieldName);\n }\n return await this.getTokenFromUrl(this.url, this.formatType, this.formatSubjectTokenFieldName, this.headers);\n }\n /**\n * Looks up the external subject token in the file path provided and\n * resolves with that token.\n * @param file The file path where the external credential is located.\n * @param formatType The token file or URL response type (JSON or text).\n * @param formatSubjectTokenFieldName For JSON response types, this is the\n * subject_token field name. For Azure, this is access_token. For text\n * response types, this is ignored.\n * @return A promise that resolves with the external subject token.\n */\n async getTokenFromFile(filePath, formatType, formatSubjectTokenFieldName) {\n // Make sure there is a file at the path. lstatSync will throw if there is\n // nothing there.\n try {\n // Resolve path to actual file in case of symlink. Expect a thrown error\n // if not resolvable.\n filePath = await realpath(filePath);\n if (!(await lstat(filePath)).isFile()) {\n throw new Error();\n }\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`;\n }\n throw err;\n }\n let subjectToken;\n const rawText = await readFile(filePath, { encoding: 'utf8' });\n if (formatType === 'text') {\n subjectToken = rawText;\n }\n else if (formatType === 'json' && formatSubjectTokenFieldName) {\n const json = JSON.parse(rawText);\n subjectToken = json[formatSubjectTokenFieldName];\n }\n if (!subjectToken) {\n throw new Error('Unable to parse the subject_token from the credential_source file');\n }\n return subjectToken;\n }\n /**\n * Sends a GET request to the URL provided and resolves with the returned\n * external subject token.\n * @param url The URL to call to retrieve the subject token. This is typically\n * a local metadata server.\n * @param formatType The token file or URL response type (JSON or text).\n * @param formatSubjectTokenFieldName For JSON response types, this is the\n * subject_token field name. For Azure, this is access_token. For text\n * response types, this is ignored.\n * @param headers The optional additional headers to send with the request to\n * the metadata server url.\n * @return A promise that resolves with the external subject token.\n */\n async getTokenFromUrl(url, formatType, formatSubjectTokenFieldName, headers) {\n const opts = {\n url,\n method: 'GET',\n headers,\n responseType: formatType,\n };\n let subjectToken;\n if (formatType === 'text') {\n const response = await this.transporter.request(opts);\n subjectToken = response.data;\n }\n else if (formatType === 'json' && formatSubjectTokenFieldName) {\n const response = await this.transporter.request(opts);\n subjectToken = response.data[formatSubjectTokenFieldName];\n }\n if (!subjectToken) {\n throw new Error('Unable to parse the subject_token from the credential_source URL');\n }\n return subjectToken;\n }\n}\nexports.IdentityPoolClient = IdentityPoolClient;\n//# sourceMappingURL=identitypoolclient.js.map","\"use strict\";\n// Copyright 2020 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IdTokenClient = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nclass IdTokenClient extends oauth2client_1.OAuth2Client {\n /**\n * Google ID Token client\n *\n * Retrieve access token from the metadata server.\n * See: https://developers.google.com/compute/docs/authentication\n */\n constructor(options) {\n super();\n this.targetAudience = options.targetAudience;\n this.idTokenProvider = options.idTokenProvider;\n }\n async getRequestMetadataAsync(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n url) {\n if (!this.credentials.id_token ||\n (this.credentials.expiry_date || 0) < Date.now()) {\n const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience);\n this.credentials = {\n id_token: idToken,\n expiry_date: this.getIdTokenExpiryDate(idToken),\n };\n }\n const headers = {\n Authorization: 'Bearer ' + this.credentials.id_token,\n };\n return { headers };\n }\n getIdTokenExpiryDate(idToken) {\n const payloadB64 = idToken.split('.')[1];\n if (payloadB64) {\n const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii'));\n return payload.exp * 1000;\n }\n }\n}\nexports.IdTokenClient = IdTokenClient;\n//# sourceMappingURL=idtokenclient.js.map","\"use strict\";\n/**\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nconst gaxios_1 = require(\"gaxios\");\nexports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account';\nclass Impersonated extends oauth2client_1.OAuth2Client {\n /**\n * Impersonated service account credentials.\n *\n * Create a new access token by impersonating another service account.\n *\n * Impersonated Credentials allowing credentials issued to a user or\n * service account to impersonate another. The source project using\n * Impersonated Credentials must enable the \"IAMCredentials\" API.\n * Also, the target service account must grant the orginating principal\n * the \"Service Account Token Creator\" IAM role.\n *\n * @param {object} options - The configuration object.\n * @param {object} [options.sourceClient] the source credential used as to\n * acquire the impersonated credentials.\n * @param {string} [options.targetPrincipal] the service account to\n * impersonate.\n * @param {string[]} [options.delegates] the chained list of delegates\n * required to grant the final access_token. If set, the sequence of\n * identities must have \"Service Account Token Creator\" capability granted to\n * the preceding identity. For example, if set to [serviceAccountB,\n * serviceAccountC], the sourceCredential must have the Token Creator role on\n * serviceAccountB. serviceAccountB must have the Token Creator on\n * serviceAccountC. Finally, C must have Token Creator on target_principal.\n * If left unset, sourceCredential must have that role on targetPrincipal.\n * @param {string[]} [options.targetScopes] scopes to request during the\n * authorization grant.\n * @param {number} [options.lifetime] number of seconds the delegated\n * credential should be valid for up to 3600 seconds by default, or 43,200\n * seconds by extending the token's lifetime, see:\n * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth\n * @param {string} [options.endpoint] api endpoint override.\n */\n constructor(options = {}) {\n var _a, _b, _c, _d, _e, _f;\n super(options);\n this.credentials = {\n expiry_date: 1,\n refresh_token: 'impersonated-placeholder',\n };\n this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client();\n this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : '';\n this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : [];\n this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : [];\n this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600;\n this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com';\n }\n /**\n * Refreshes the access token.\n * @param refreshToken Unused parameter\n */\n async refreshToken(refreshToken) {\n var _a, _b, _c, _d, _e, _f;\n try {\n await this.sourceClient.getAccessToken();\n const name = 'projects/-/serviceAccounts/' + this.targetPrincipal;\n const u = `${this.endpoint}/v1/${name}:generateAccessToken`;\n const body = {\n delegates: this.delegates,\n scope: this.targetScopes,\n lifetime: this.lifetime + 's',\n };\n const res = await this.sourceClient.request({\n url: u,\n data: body,\n method: 'POST',\n });\n const tokenResponse = res.data;\n this.credentials.access_token = tokenResponse.accessToken;\n this.credentials.expiry_date = Date.parse(tokenResponse.expireTime);\n return {\n tokens: this.credentials,\n res,\n };\n }\n catch (error) {\n if (!(error instanceof Error))\n throw error;\n let status = 0;\n let message = '';\n if (error instanceof gaxios_1.GaxiosError) {\n status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status;\n message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message;\n }\n if (status && message) {\n error.message = `${status}: unable to impersonate: ${message}`;\n throw error;\n }\n else {\n error.message = `unable to impersonate: ${error}`;\n throw error;\n }\n }\n }\n /**\n * Generates an OpenID Connect ID token for a service account.\n *\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation}\n *\n * @param targetAudience the audience for the fetched ID token.\n * @param options the for the request\n * @return an OpenID Connect ID token\n */\n async fetchIdToken(targetAudience, options) {\n var _a;\n await this.sourceClient.getAccessToken();\n const name = `projects/-/serviceAccounts/${this.targetPrincipal}`;\n const u = `${this.endpoint}/v1/${name}:generateIdToken`;\n const body = {\n delegates: this.delegates,\n audience: targetAudience,\n includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true,\n };\n const res = await this.sourceClient.request({\n url: u,\n data: body,\n method: 'POST',\n });\n return res.data.token;\n }\n}\nexports.Impersonated = Impersonated;\n//# sourceMappingURL=impersonated.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.JWTAccess = void 0;\nconst jws = require(\"jws\");\nconst LRU = require(\"lru-cache\");\nconst DEFAULT_HEADER = {\n alg: 'RS256',\n typ: 'JWT',\n};\nclass JWTAccess {\n /**\n * JWTAccess service account credentials.\n *\n * Create a new access token by using the credential to create a new JWT token\n * that's recognized as the access token.\n *\n * @param email the service account email address.\n * @param key the private key that will be used to sign the token.\n * @param keyId the ID of the private key used to sign the token.\n */\n constructor(email, key, keyId, eagerRefreshThresholdMillis) {\n this.cache = new LRU({\n max: 500,\n maxAge: 60 * 60 * 1000,\n });\n this.email = email;\n this.key = key;\n this.keyId = keyId;\n this.eagerRefreshThresholdMillis =\n eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000;\n }\n /**\n * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url\n *\n * @param url The URI being authorized.\n * @param scopes The scope or scopes being authorized\n * @returns A string that returns the cached key.\n */\n getCachedKey(url, scopes) {\n let cacheKey = url;\n if (scopes && Array.isArray(scopes) && scopes.length) {\n cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`;\n }\n else if (typeof scopes === 'string') {\n cacheKey = url ? `${url}_${scopes}` : scopes;\n }\n if (!cacheKey) {\n throw Error('Scopes or url must be provided');\n }\n return cacheKey;\n }\n /**\n * Get a non-expired access token, after refreshing if necessary.\n *\n * @param url The URI being authorized.\n * @param additionalClaims An object with a set of additional claims to\n * include in the payload.\n * @returns An object that includes the authorization header.\n */\n getRequestHeaders(url, additionalClaims, scopes) {\n // Return cached authorization headers, unless we are within\n // eagerRefreshThresholdMillis ms of them expiring:\n const key = this.getCachedKey(url, scopes);\n const cachedToken = this.cache.get(key);\n const now = Date.now();\n if (cachedToken &&\n cachedToken.expiration - now > this.eagerRefreshThresholdMillis) {\n return cachedToken.headers;\n }\n const iat = Math.floor(Date.now() / 1000);\n const exp = JWTAccess.getExpirationTime(iat);\n let defaultClaims;\n // Turn scopes into space-separated string\n if (Array.isArray(scopes)) {\n scopes = scopes.join(' ');\n }\n // If scopes are specified, sign with scopes\n if (scopes) {\n defaultClaims = {\n iss: this.email,\n sub: this.email,\n scope: scopes,\n exp,\n iat,\n };\n }\n else {\n defaultClaims = {\n iss: this.email,\n sub: this.email,\n aud: url,\n exp,\n iat,\n };\n }\n // if additionalClaims are provided, ensure they do not collide with\n // other required claims.\n if (additionalClaims) {\n for (const claim in defaultClaims) {\n if (additionalClaims[claim]) {\n throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`);\n }\n }\n }\n const header = this.keyId\n ? { ...DEFAULT_HEADER, kid: this.keyId }\n : DEFAULT_HEADER;\n const payload = Object.assign(defaultClaims, additionalClaims);\n // Sign the jwt and add it to the cache\n const signedJWT = jws.sign({ header, payload, secret: this.key });\n const headers = { Authorization: `Bearer ${signedJWT}` };\n this.cache.set(key, {\n expiration: exp * 1000,\n headers,\n });\n return headers;\n }\n /**\n * Returns an expiration time for the JWT token.\n *\n * @param iat The issued at time for the JWT.\n * @returns An expiration time for the JWT.\n */\n static getExpirationTime(iat) {\n const exp = iat + 3600; // 3600 seconds = 1 hour\n return exp;\n }\n /**\n * Create a JWTAccess credentials instance using the given input options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the service account auth settings.');\n }\n if (!json.client_email) {\n throw new Error('The incoming JSON object does not contain a client_email field');\n }\n if (!json.private_key) {\n throw new Error('The incoming JSON object does not contain a private_key field');\n }\n // Extract the relevant information from the json key file.\n this.email = json.client_email;\n this.key = json.private_key;\n this.keyId = json.private_key_id;\n this.projectId = json.project_id;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n reject(new Error('Must pass in a stream containing the service account auth settings.'));\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('data', chunk => (s += chunk))\n .on('error', reject)\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n resolve();\n }\n catch (err) {\n reject(err);\n }\n });\n });\n }\n}\nexports.JWTAccess = JWTAccess;\n//# sourceMappingURL=jwtaccess.js.map","\"use strict\";\n// Copyright 2013 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.JWT = void 0;\nconst gtoken_1 = require(\"gtoken\");\nconst jwtaccess_1 = require(\"./jwtaccess\");\nconst oauth2client_1 = require(\"./oauth2client\");\nclass JWT extends oauth2client_1.OAuth2Client {\n constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) {\n const opts = optionsOrEmail && typeof optionsOrEmail === 'object'\n ? optionsOrEmail\n : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject };\n super({\n eagerRefreshThresholdMillis: opts.eagerRefreshThresholdMillis,\n forceRefreshOnFailure: opts.forceRefreshOnFailure,\n });\n this.email = opts.email;\n this.keyFile = opts.keyFile;\n this.key = opts.key;\n this.keyId = opts.keyId;\n this.scopes = opts.scopes;\n this.subject = opts.subject;\n this.additionalClaims = opts.additionalClaims;\n this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 };\n }\n /**\n * Creates a copy of the credential with the specified scopes.\n * @param scopes List of requested scopes or a single scope.\n * @return The cloned instance.\n */\n createScoped(scopes) {\n return new JWT({\n email: this.email,\n keyFile: this.keyFile,\n key: this.key,\n keyId: this.keyId,\n scopes,\n subject: this.subject,\n additionalClaims: this.additionalClaims,\n });\n }\n /**\n * Obtains the metadata to be sent with the request.\n *\n * @param url the URI being authorized.\n */\n async getRequestMetadataAsync(url) {\n url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url;\n const useSelfSignedJWT = (!this.hasUserScopes() && url) ||\n (this.useJWTAccessWithScope && this.hasAnyScopes());\n if (!this.apiKey && useSelfSignedJWT) {\n if (this.additionalClaims &&\n this.additionalClaims.target_audience) {\n const { tokens } = await this.refreshToken();\n return {\n headers: this.addSharedMetadataHeaders({\n Authorization: `Bearer ${tokens.id_token}`,\n }),\n };\n }\n else {\n // no scopes have been set, but a uri has been provided. Use JWTAccess\n // credentials.\n if (!this.access) {\n this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis);\n }\n let scopes;\n if (this.hasUserScopes()) {\n scopes = this.scopes;\n }\n else if (!url) {\n scopes = this.defaultScopes;\n }\n const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, \n // Scopes take precedent over audience for signing,\n // so we only provide them if useJWTAccessWithScope is on\n this.useJWTAccessWithScope ? scopes : undefined);\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n }\n else if (this.hasAnyScopes() || this.apiKey) {\n return super.getRequestMetadataAsync(url);\n }\n else {\n // If no audience, apiKey, or scopes are provided, we should not attempt\n // to populate any headers:\n return { headers: {} };\n }\n }\n /**\n * Fetches an ID token.\n * @param targetAudience the audience for the fetched ID token.\n */\n async fetchIdToken(targetAudience) {\n // Create a new gToken for fetching an ID token\n const gtoken = new gtoken_1.GoogleToken({\n iss: this.email,\n sub: this.subject,\n scope: this.scopes || this.defaultScopes,\n keyFile: this.keyFile,\n key: this.key,\n additionalClaims: { target_audience: targetAudience },\n transporter: this.transporter,\n });\n await gtoken.getToken({\n forceRefresh: true,\n });\n if (!gtoken.idToken) {\n throw new Error('Unknown error: Failed to fetch ID token');\n }\n return gtoken.idToken;\n }\n /**\n * Determine if there are currently scopes available.\n */\n hasUserScopes() {\n if (!this.scopes) {\n return false;\n }\n return this.scopes.length > 0;\n }\n /**\n * Are there any default or user scopes defined.\n */\n hasAnyScopes() {\n if (this.scopes && this.scopes.length > 0)\n return true;\n if (this.defaultScopes && this.defaultScopes.length > 0)\n return true;\n return false;\n }\n authorize(callback) {\n if (callback) {\n this.authorizeAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.authorizeAsync();\n }\n }\n async authorizeAsync() {\n const result = await this.refreshToken();\n if (!result) {\n throw new Error('No result returned');\n }\n this.credentials = result.tokens;\n this.credentials.refresh_token = 'jwt-placeholder';\n this.key = this.gtoken.key;\n this.email = this.gtoken.iss;\n return result.tokens;\n }\n /**\n * Refreshes the access token.\n * @param refreshToken ignored\n * @private\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n const gtoken = this.createGToken();\n const token = await gtoken.getToken({\n forceRefresh: this.isTokenExpiring(),\n });\n const tokens = {\n access_token: token.access_token,\n token_type: 'Bearer',\n expiry_date: gtoken.expiresAt,\n id_token: gtoken.idToken,\n };\n this.emit('tokens', tokens);\n return { res: null, tokens };\n }\n /**\n * Create a gToken if it doesn't already exist.\n */\n createGToken() {\n if (!this.gtoken) {\n this.gtoken = new gtoken_1.GoogleToken({\n iss: this.email,\n sub: this.subject,\n scope: this.scopes || this.defaultScopes,\n keyFile: this.keyFile,\n key: this.key,\n additionalClaims: this.additionalClaims,\n transporter: this.transporter,\n });\n }\n return this.gtoken;\n }\n /**\n * Create a JWT credentials instance using the given input options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the service account auth settings.');\n }\n if (!json.client_email) {\n throw new Error('The incoming JSON object does not contain a client_email field');\n }\n if (!json.private_key) {\n throw new Error('The incoming JSON object does not contain a private_key field');\n }\n // Extract the relevant information from the json key file.\n this.email = json.client_email;\n this.key = json.private_key;\n this.keyId = json.private_key_id;\n this.projectId = json.project_id;\n this.quotaProjectId = json.quota_project_id;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n throw new Error('Must pass in a stream containing the service account auth settings.');\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n resolve();\n }\n catch (e) {\n reject(e);\n }\n });\n });\n }\n /**\n * Creates a JWT credentials instance using an API Key for authentication.\n * @param apiKey The API Key in string form.\n */\n fromAPIKey(apiKey) {\n if (typeof apiKey !== 'string') {\n throw new Error('Must provide an API Key string.');\n }\n this.apiKey = apiKey;\n }\n /**\n * Using the key or keyFile on the JWT client, obtain an object that contains\n * the key and the client email.\n */\n async getCredentials() {\n if (this.key) {\n return { private_key: this.key, client_email: this.email };\n }\n else if (this.keyFile) {\n const gtoken = this.createGToken();\n const creds = await gtoken.getCredentials(this.keyFile);\n return { private_key: creds.privateKey, client_email: creds.clientEmail };\n }\n throw new Error('A key or a keyFile must be provided to getCredentials.');\n }\n}\nexports.JWT = JWT;\n//# sourceMappingURL=jwtclient.js.map","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.LoginTicket = void 0;\nclass LoginTicket {\n /**\n * Create a simple class to extract user ID from an ID Token\n *\n * @param {string} env Envelope of the jwt\n * @param {TokenPayload} pay Payload of the jwt\n * @constructor\n */\n constructor(env, pay) {\n this.envelope = env;\n this.payload = pay;\n }\n getEnvelope() {\n return this.envelope;\n }\n getPayload() {\n return this.payload;\n }\n /**\n * Create a simple class to extract user ID from an ID Token\n *\n * @return The user ID\n */\n getUserId() {\n const payload = this.getPayload();\n if (payload && payload.sub) {\n return payload.sub;\n }\n return null;\n }\n /**\n * Returns attributes from the login ticket. This can contain\n * various information about the user session.\n *\n * @return The envelope and payload\n */\n getAttributes() {\n return { envelope: this.getEnvelope(), payload: this.getPayload() };\n }\n}\nexports.LoginTicket = LoginTicket;\n//# sourceMappingURL=loginticket.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst querystring = require(\"querystring\");\nconst stream = require(\"stream\");\nconst formatEcdsa = require(\"ecdsa-sig-formatter\");\nconst crypto_1 = require(\"../crypto/crypto\");\nconst authclient_1 = require(\"./authclient\");\nconst loginticket_1 = require(\"./loginticket\");\nvar CodeChallengeMethod;\n(function (CodeChallengeMethod) {\n CodeChallengeMethod[\"Plain\"] = \"plain\";\n CodeChallengeMethod[\"S256\"] = \"S256\";\n})(CodeChallengeMethod = exports.CodeChallengeMethod || (exports.CodeChallengeMethod = {}));\nvar CertificateFormat;\n(function (CertificateFormat) {\n CertificateFormat[\"PEM\"] = \"PEM\";\n CertificateFormat[\"JWK\"] = \"JWK\";\n})(CertificateFormat = exports.CertificateFormat || (exports.CertificateFormat = {}));\nclass OAuth2Client extends authclient_1.AuthClient {\n constructor(optionsOrClientId, clientSecret, redirectUri) {\n super();\n this.certificateCache = {};\n this.certificateExpiry = null;\n this.certificateCacheFormat = CertificateFormat.PEM;\n this.refreshTokenPromises = new Map();\n const opts = optionsOrClientId && typeof optionsOrClientId === 'object'\n ? optionsOrClientId\n : { clientId: optionsOrClientId, clientSecret, redirectUri };\n this._clientId = opts.clientId;\n this._clientSecret = opts.clientSecret;\n this.redirectUri = opts.redirectUri;\n this.eagerRefreshThresholdMillis =\n opts.eagerRefreshThresholdMillis || 5 * 60 * 1000;\n this.forceRefreshOnFailure = !!opts.forceRefreshOnFailure;\n }\n /**\n * Generates URL for consent page landing.\n * @param opts Options.\n * @return URL to consent page.\n */\n generateAuthUrl(opts = {}) {\n if (opts.code_challenge_method && !opts.code_challenge) {\n throw new Error('If a code_challenge_method is provided, code_challenge must be included.');\n }\n opts.response_type = opts.response_type || 'code';\n opts.client_id = opts.client_id || this._clientId;\n opts.redirect_uri = opts.redirect_uri || this.redirectUri;\n // Allow scopes to be passed either as array or a string\n if (Array.isArray(opts.scope)) {\n opts.scope = opts.scope.join(' ');\n }\n const rootUrl = OAuth2Client.GOOGLE_OAUTH2_AUTH_BASE_URL_;\n return (rootUrl +\n '?' +\n querystring.stringify(opts));\n }\n generateCodeVerifier() {\n // To make the code compatible with browser SubtleCrypto we need to make\n // this method async.\n throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.');\n }\n /**\n * Convenience method to automatically generate a code_verifier, and its\n * resulting SHA256. If used, this must be paired with a S256\n * code_challenge_method.\n *\n * For a full example see:\n * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js\n */\n async generateCodeVerifierAsync() {\n // base64 encoding uses 6 bits per character, and we want to generate128\n // characters. 6*128/8 = 96.\n const crypto = (0, crypto_1.createCrypto)();\n const randomString = crypto.randomBytesBase64(96);\n // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/\n // \"-\"/\".\"/\"_\"/\"~\". Base64 encoded strings are pretty close, so we're just\n // swapping out a few chars.\n const codeVerifier = randomString\n .replace(/\\+/g, '~')\n .replace(/=/g, '_')\n .replace(/\\//g, '-');\n // Generate the base64 encoded SHA256\n const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier);\n // We need to use base64UrlEncoding instead of standard base64\n const codeChallenge = unencodedCodeChallenge\n .split('=')[0]\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n return { codeVerifier, codeChallenge };\n }\n getToken(codeOrOptions, callback) {\n const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions;\n if (callback) {\n this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response));\n }\n else {\n return this.getTokenAsync(options);\n }\n }\n async getTokenAsync(options) {\n const url = OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_;\n const values = {\n code: options.code,\n client_id: options.client_id || this._clientId,\n client_secret: this._clientSecret,\n redirect_uri: options.redirect_uri || this.redirectUri,\n grant_type: 'authorization_code',\n code_verifier: options.codeVerifier,\n };\n const res = await this.transporter.request({\n method: 'POST',\n url,\n data: querystring.stringify(values),\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n });\n const tokens = res.data;\n if (res.data && res.data.expires_in) {\n tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res };\n }\n /**\n * Refreshes the access token.\n * @param refresh_token Existing refresh token.\n * @private\n */\n async refreshToken(refreshToken) {\n if (!refreshToken) {\n return this.refreshTokenNoCache(refreshToken);\n }\n // If a request to refresh using the same token has started,\n // return the same promise.\n if (this.refreshTokenPromises.has(refreshToken)) {\n return this.refreshTokenPromises.get(refreshToken);\n }\n const p = this.refreshTokenNoCache(refreshToken).then(r => {\n this.refreshTokenPromises.delete(refreshToken);\n return r;\n }, e => {\n this.refreshTokenPromises.delete(refreshToken);\n throw e;\n });\n this.refreshTokenPromises.set(refreshToken, p);\n return p;\n }\n async refreshTokenNoCache(refreshToken) {\n var _a;\n if (!refreshToken) {\n throw new Error('No refresh token is set.');\n }\n const url = OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_;\n const data = {\n refresh_token: refreshToken,\n client_id: this._clientId,\n client_secret: this._clientSecret,\n grant_type: 'refresh_token',\n };\n let res;\n try {\n // request for new token\n res = await this.transporter.request({\n method: 'POST',\n url,\n data: querystring.stringify(data),\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n });\n }\n catch (e) {\n if (e instanceof gaxios_1.GaxiosError &&\n e.message === 'invalid_grant' &&\n ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) &&\n /ReAuth/i.test(e.response.data.error_description)) {\n e.message = JSON.stringify(e.response.data);\n }\n throw e;\n }\n const tokens = res.data;\n // TODO: de-duplicate this code from a few spots\n if (res.data && res.data.expires_in) {\n tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res };\n }\n refreshAccessToken(callback) {\n if (callback) {\n this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback);\n }\n else {\n return this.refreshAccessTokenAsync();\n }\n }\n async refreshAccessTokenAsync() {\n const r = await this.refreshToken(this.credentials.refresh_token);\n const tokens = r.tokens;\n tokens.refresh_token = this.credentials.refresh_token;\n this.credentials = tokens;\n return { credentials: this.credentials, res: r.res };\n }\n getAccessToken(callback) {\n if (callback) {\n this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback);\n }\n else {\n return this.getAccessTokenAsync();\n }\n }\n async getAccessTokenAsync() {\n const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring();\n if (shouldRefresh) {\n if (!this.credentials.refresh_token) {\n if (this.refreshHandler) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n return { token: this.credentials.access_token };\n }\n }\n else {\n throw new Error('No refresh token or refresh handler callback is set.');\n }\n }\n const r = await this.refreshAccessTokenAsync();\n if (!r.credentials || (r.credentials && !r.credentials.access_token)) {\n throw new Error('Could not refresh access token.');\n }\n return { token: r.credentials.access_token, res: r.res };\n }\n else {\n return { token: this.credentials.access_token };\n }\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * In OAuth2Client, the result has the form:\n * { Authorization: 'Bearer ' }\n * @param url The optional url being authorized\n */\n async getRequestHeaders(url) {\n const headers = (await this.getRequestMetadataAsync(url)).headers;\n return headers;\n }\n async getRequestMetadataAsync(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n url) {\n const thisCreds = this.credentials;\n if (!thisCreds.access_token &&\n !thisCreds.refresh_token &&\n !this.apiKey &&\n !this.refreshHandler) {\n throw new Error('No access, refresh token, API key or refresh handler callback is set.');\n }\n if (thisCreds.access_token && !this.isTokenExpiring()) {\n thisCreds.token_type = thisCreds.token_type || 'Bearer';\n const headers = {\n Authorization: thisCreds.token_type + ' ' + thisCreds.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n // If refreshHandler exists, call processAndValidateRefreshHandler().\n if (this.refreshHandler) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n const headers = {\n Authorization: 'Bearer ' + this.credentials.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n }\n if (this.apiKey) {\n return { headers: { 'X-Goog-Api-Key': this.apiKey } };\n }\n let r = null;\n let tokens = null;\n try {\n r = await this.refreshToken(thisCreds.refresh_token);\n tokens = r.tokens;\n }\n catch (err) {\n const e = err;\n if (e.response &&\n (e.response.status === 403 || e.response.status === 404)) {\n e.message = `Could not refresh access token: ${e.message}`;\n }\n throw e;\n }\n const credentials = this.credentials;\n credentials.token_type = credentials.token_type || 'Bearer';\n tokens.refresh_token = credentials.refresh_token;\n this.credentials = tokens;\n const headers = {\n Authorization: credentials.token_type + ' ' + tokens.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers), res: r.res };\n }\n /**\n * Generates an URL to revoke the given token.\n * @param token The existing token to be revoked.\n */\n static getRevokeTokenUrl(token) {\n const parameters = querystring.stringify({ token });\n return `${OAuth2Client.GOOGLE_OAUTH2_REVOKE_URL_}?${parameters}`;\n }\n revokeToken(token, callback) {\n const opts = {\n url: OAuth2Client.getRevokeTokenUrl(token),\n method: 'POST',\n };\n if (callback) {\n this.transporter\n .request(opts)\n .then(r => callback(null, r), callback);\n }\n else {\n return this.transporter.request(opts);\n }\n }\n revokeCredentials(callback) {\n if (callback) {\n this.revokeCredentialsAsync().then(res => callback(null, res), callback);\n }\n else {\n return this.revokeCredentialsAsync();\n }\n }\n async revokeCredentialsAsync() {\n const token = this.credentials.access_token;\n this.credentials = {};\n if (token) {\n return this.revokeToken(token);\n }\n else {\n throw new Error('No access token to revoke.');\n }\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n async requestAsync(opts, retry = false) {\n let r2;\n try {\n const r = await this.getRequestMetadataAsync(opts.url);\n opts.headers = opts.headers || {};\n if (r.headers && r.headers['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project'];\n }\n if (r.headers && r.headers.Authorization) {\n opts.headers.Authorization = r.headers.Authorization;\n }\n if (this.apiKey) {\n opts.headers['X-Goog-Api-Key'] = this.apiKey;\n }\n r2 = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - An access_token and refresh_token were available, but either no\n // expiry_date was available or the forceRefreshOnFailure flag is set.\n // The absent expiry_date case can happen when developers stash the\n // access_token and refresh_token for later use, but the access_token\n // fails on the first try because it's expired. Some developers may\n // choose to enable forceRefreshOnFailure to mitigate time-related\n // errors.\n // Or the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - No refresh_token was available\n // - An access_token and a refreshHandler callback were available, but\n // either no expiry_date was available or the forceRefreshOnFailure\n // flag is set. The access_token fails on the first try because it's\n // expired. Some developers may choose to enable forceRefreshOnFailure\n // to mitigate time-related errors.\n const mayRequireRefresh = this.credentials &&\n this.credentials.access_token &&\n this.credentials.refresh_token &&\n (!this.credentials.expiry_date || this.forceRefreshOnFailure);\n const mayRequireRefreshWithNoRefreshToken = this.credentials &&\n this.credentials.access_token &&\n !this.credentials.refresh_token &&\n (!this.credentials.expiry_date || this.forceRefreshOnFailure) &&\n this.refreshHandler;\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry && isAuthErr && !isReadableStream && mayRequireRefresh) {\n await this.refreshAccessTokenAsync();\n return this.requestAsync(opts, true);\n }\n else if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n mayRequireRefreshWithNoRefreshToken) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n }\n return this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return r2;\n }\n verifyIdToken(options, callback) {\n // This function used to accept two arguments instead of an options object.\n // Check the types to help users upgrade with less pain.\n // This check can be removed after a 2.0 release.\n if (callback && typeof callback !== 'function') {\n throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.');\n }\n if (callback) {\n this.verifyIdTokenAsync(options).then(r => callback(null, r), callback);\n }\n else {\n return this.verifyIdTokenAsync(options);\n }\n }\n async verifyIdTokenAsync(options) {\n if (!options.idToken) {\n throw new Error('The verifyIdToken method requires an ID Token');\n }\n const response = await this.getFederatedSignonCertsAsync();\n const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, OAuth2Client.ISSUERS_, options.maxExpiry);\n return login;\n }\n /**\n * Obtains information about the provisioned access token. Especially useful\n * if you want to check the scopes that were provisioned to a given token.\n *\n * @param accessToken Required. The Access Token for which you want to get\n * user info.\n */\n async getTokenInfo(accessToken) {\n const { data } = await this.transporter.request({\n method: 'POST',\n headers: {\n 'Content-Type': 'application/x-www-form-urlencoded',\n Authorization: `Bearer ${accessToken}`,\n },\n url: OAuth2Client.GOOGLE_TOKEN_INFO_URL,\n });\n const info = Object.assign({\n expiry_date: new Date().getTime() + data.expires_in * 1000,\n scopes: data.scope.split(' '),\n }, data);\n delete info.expires_in;\n delete info.scope;\n return info;\n }\n getFederatedSignonCerts(callback) {\n if (callback) {\n this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback);\n }\n else {\n return this.getFederatedSignonCertsAsync();\n }\n }\n async getFederatedSignonCertsAsync() {\n const nowTime = new Date().getTime();\n const format = (0, crypto_1.hasBrowserCrypto)()\n ? CertificateFormat.JWK\n : CertificateFormat.PEM;\n if (this.certificateExpiry &&\n nowTime < this.certificateExpiry.getTime() &&\n this.certificateCacheFormat === format) {\n return { certs: this.certificateCache, format };\n }\n let res;\n let url;\n switch (format) {\n case CertificateFormat.PEM:\n url = OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_PEM_CERTS_URL_;\n break;\n case CertificateFormat.JWK:\n url = OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_JWK_CERTS_URL_;\n break;\n default:\n throw new Error(`Unsupported certificate format ${format}`);\n }\n try {\n res = await this.transporter.request({ url });\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Failed to retrieve verification certificates: ${e.message}`;\n }\n throw e;\n }\n const cacheControl = res ? res.headers['cache-control'] : undefined;\n let cacheAge = -1;\n if (cacheControl) {\n const pattern = new RegExp('max-age=([0-9]*)');\n const regexResult = pattern.exec(cacheControl);\n if (regexResult && regexResult.length === 2) {\n // Cache results with max-age (in seconds)\n cacheAge = Number(regexResult[1]) * 1000; // milliseconds\n }\n }\n let certificates = {};\n switch (format) {\n case CertificateFormat.PEM:\n certificates = res.data;\n break;\n case CertificateFormat.JWK:\n for (const key of res.data.keys) {\n certificates[key.kid] = key;\n }\n break;\n default:\n throw new Error(`Unsupported certificate format ${format}`);\n }\n const now = new Date();\n this.certificateExpiry =\n cacheAge === -1 ? null : new Date(now.getTime() + cacheAge);\n this.certificateCache = certificates;\n this.certificateCacheFormat = format;\n return { certs: certificates, format, res };\n }\n getIapPublicKeys(callback) {\n if (callback) {\n this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback);\n }\n else {\n return this.getIapPublicKeysAsync();\n }\n }\n async getIapPublicKeysAsync() {\n let res;\n const url = OAuth2Client.GOOGLE_OAUTH2_IAP_PUBLIC_KEY_URL_;\n try {\n res = await this.transporter.request({ url });\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Failed to retrieve verification certificates: ${e.message}`;\n }\n throw e;\n }\n return { pubkeys: res.data, res };\n }\n verifySignedJwtWithCerts() {\n // To make the code compatible with browser SubtleCrypto we need to make\n // this method async.\n throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.');\n }\n /**\n * Verify the id token is signed with the correct certificate\n * and is from the correct audience.\n * @param jwt The jwt to verify (The ID Token in this case).\n * @param certs The array of certs to test the jwt against.\n * @param requiredAudience The audience to test the jwt against.\n * @param issuers The allowed issuers of the jwt (Optional).\n * @param maxExpiry The max expiry the certificate can be (Optional).\n * @return Returns a promise resolving to LoginTicket on verification.\n */\n async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) {\n const crypto = (0, crypto_1.createCrypto)();\n if (!maxExpiry) {\n maxExpiry = OAuth2Client.MAX_TOKEN_LIFETIME_SECS_;\n }\n const segments = jwt.split('.');\n if (segments.length !== 3) {\n throw new Error('Wrong number of segments in token: ' + jwt);\n }\n const signed = segments[0] + '.' + segments[1];\n let signature = segments[2];\n let envelope;\n let payload;\n try {\n envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0]));\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`;\n }\n throw err;\n }\n if (!envelope) {\n throw new Error(\"Can't parse token envelope: \" + segments[0]);\n }\n try {\n payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1]));\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `Can't parse token payload '${segments[0]}`;\n }\n throw err;\n }\n if (!payload) {\n throw new Error(\"Can't parse token payload: \" + segments[1]);\n }\n if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) {\n // If this is not present, then there's no reason to attempt verification\n throw new Error('No pem found for envelope: ' + JSON.stringify(envelope));\n }\n const cert = certs[envelope.kid];\n if (envelope.alg === 'ES256') {\n signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64');\n }\n const verified = await crypto.verify(cert, signed, signature);\n if (!verified) {\n throw new Error('Invalid token signature: ' + jwt);\n }\n if (!payload.iat) {\n throw new Error('No issue time in token: ' + JSON.stringify(payload));\n }\n if (!payload.exp) {\n throw new Error('No expiration time in token: ' + JSON.stringify(payload));\n }\n const iat = Number(payload.iat);\n if (isNaN(iat))\n throw new Error('iat field using invalid format');\n const exp = Number(payload.exp);\n if (isNaN(exp))\n throw new Error('exp field using invalid format');\n const now = new Date().getTime() / 1000;\n if (exp >= now + maxExpiry) {\n throw new Error('Expiration time too far in future: ' + JSON.stringify(payload));\n }\n const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_;\n const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_;\n if (now < earliest) {\n throw new Error('Token used too early, ' +\n now +\n ' < ' +\n earliest +\n ': ' +\n JSON.stringify(payload));\n }\n if (now > latest) {\n throw new Error('Token used too late, ' +\n now +\n ' > ' +\n latest +\n ': ' +\n JSON.stringify(payload));\n }\n if (issuers && issuers.indexOf(payload.iss) < 0) {\n throw new Error('Invalid issuer, expected one of [' +\n issuers +\n '], but got ' +\n payload.iss);\n }\n // Check the audience matches if we have one\n if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) {\n const aud = payload.aud;\n let audVerified = false;\n // If the requiredAudience is an array, check if it contains token\n // audience\n if (requiredAudience.constructor === Array) {\n audVerified = requiredAudience.indexOf(aud) > -1;\n }\n else {\n audVerified = aud === requiredAudience;\n }\n if (!audVerified) {\n throw new Error('Wrong recipient, payload audience != requiredAudience');\n }\n }\n return new loginticket_1.LoginTicket(envelope, payload);\n }\n /**\n * Returns a promise that resolves with AccessTokenResponse type if\n * refreshHandler is defined.\n * If not, nothing is returned.\n */\n async processAndValidateRefreshHandler() {\n if (this.refreshHandler) {\n const accessTokenResponse = await this.refreshHandler();\n if (!accessTokenResponse.access_token) {\n throw new Error('No access token is returned by the refreshHandler callback.');\n }\n return accessTokenResponse;\n }\n return;\n }\n /**\n * Returns true if a token is expired or will expire within\n * eagerRefreshThresholdMillismilliseconds.\n * If there is no expiry time, assumes the token is not expired or expiring.\n */\n isTokenExpiring() {\n const expiryDate = this.credentials.expiry_date;\n return expiryDate\n ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis\n : false;\n }\n}\nexports.OAuth2Client = OAuth2Client;\nOAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo';\n/**\n * The base URL for auth endpoints.\n */\nOAuth2Client.GOOGLE_OAUTH2_AUTH_BASE_URL_ = 'https://accounts.google.com/o/oauth2/v2/auth';\n/**\n * The base endpoint for token retrieval.\n */\nOAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_ = 'https://oauth2.googleapis.com/token';\n/**\n * The base endpoint to revoke tokens.\n */\nOAuth2Client.GOOGLE_OAUTH2_REVOKE_URL_ = 'https://oauth2.googleapis.com/revoke';\n/**\n * Google Sign on certificates in PEM format.\n */\nOAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_PEM_CERTS_URL_ = 'https://www.googleapis.com/oauth2/v1/certs';\n/**\n * Google Sign on certificates in JWK format.\n */\nOAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_JWK_CERTS_URL_ = 'https://www.googleapis.com/oauth2/v3/certs';\n/**\n * Google Sign on certificates in JWK format.\n */\nOAuth2Client.GOOGLE_OAUTH2_IAP_PUBLIC_KEY_URL_ = 'https://www.gstatic.com/iap/verify/public_key';\n/**\n * Clock skew - five minutes in seconds\n */\nOAuth2Client.CLOCK_SKEW_SECS_ = 300;\n/**\n * Max Token Lifetime is one day in seconds\n */\nOAuth2Client.MAX_TOKEN_LIFETIME_SECS_ = 86400;\n/**\n * The allowed oauth token issuers.\n */\nOAuth2Client.ISSUERS_ = [\n 'accounts.google.com',\n 'https://accounts.google.com',\n];\n//# sourceMappingURL=oauth2client.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0;\nconst querystring = require(\"querystring\");\nconst crypto_1 = require(\"../crypto/crypto\");\n/** List of HTTP methods that accept request bodies. */\nconst METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH'];\n/**\n * Abstract class for handling client authentication in OAuth-based\n * operations.\n * When request-body client authentication is used, only application/json and\n * application/x-www-form-urlencoded content types for HTTP methods that support\n * request bodies are supported.\n */\nclass OAuthClientAuthHandler {\n /**\n * Instantiates an OAuth client authentication handler.\n * @param clientAuthentication The client auth credentials.\n */\n constructor(clientAuthentication) {\n this.clientAuthentication = clientAuthentication;\n this.crypto = (0, crypto_1.createCrypto)();\n }\n /**\n * Applies client authentication on the OAuth request's headers or POST\n * body but does not process the request.\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n * @param bearerToken The optional bearer token to use for authentication.\n * When this is used, no client authentication credentials are needed.\n */\n applyClientAuthenticationOptions(opts, bearerToken) {\n // Inject authenticated header.\n this.injectAuthenticatedHeaders(opts, bearerToken);\n // Inject authenticated request body.\n if (!bearerToken) {\n this.injectAuthenticatedRequestBody(opts);\n }\n }\n /**\n * Applies client authentication on the request's header if either\n * basic authentication or bearer token authentication is selected.\n *\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n * @param bearerToken The optional bearer token to use for authentication.\n * When this is used, no client authentication credentials are needed.\n */\n injectAuthenticatedHeaders(opts, bearerToken) {\n var _a;\n // Bearer token prioritized higher than basic Auth.\n if (bearerToken) {\n opts.headers = opts.headers || {};\n Object.assign(opts.headers, {\n Authorization: `Bearer ${bearerToken}}`,\n });\n }\n else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') {\n opts.headers = opts.headers || {};\n const clientId = this.clientAuthentication.clientId;\n const clientSecret = this.clientAuthentication.clientSecret || '';\n const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`);\n Object.assign(opts.headers, {\n Authorization: `Basic ${base64EncodedCreds}`,\n });\n }\n }\n /**\n * Applies client authentication on the request's body if request-body\n * client authentication is selected.\n *\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n */\n injectAuthenticatedRequestBody(opts) {\n var _a;\n if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') {\n const method = (opts.method || 'GET').toUpperCase();\n // Inject authenticated request body.\n if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) {\n // Get content-type.\n let contentType;\n const headers = opts.headers || {};\n for (const key in headers) {\n if (key.toLowerCase() === 'content-type' && headers[key]) {\n contentType = headers[key].toLowerCase();\n break;\n }\n }\n if (contentType === 'application/x-www-form-urlencoded') {\n opts.data = opts.data || '';\n const data = querystring.parse(opts.data);\n Object.assign(data, {\n client_id: this.clientAuthentication.clientId,\n client_secret: this.clientAuthentication.clientSecret || '',\n });\n opts.data = querystring.stringify(data);\n }\n else if (contentType === 'application/json') {\n opts.data = opts.data || {};\n Object.assign(opts.data, {\n client_id: this.clientAuthentication.clientId,\n client_secret: this.clientAuthentication.clientSecret || '',\n });\n }\n else {\n throw new Error(`${contentType} content-types are not supported with ` +\n `${this.clientAuthentication.confidentialClientType} ` +\n 'client authentication');\n }\n }\n else {\n throw new Error(`${method} HTTP method does not support ` +\n `${this.clientAuthentication.confidentialClientType} ` +\n 'client authentication');\n }\n }\n }\n}\nexports.OAuthClientAuthHandler = OAuthClientAuthHandler;\n/**\n * Converts an OAuth error response to a native JavaScript Error.\n * @param resp The OAuth error response to convert to a native Error object.\n * @param err The optional original error. If provided, the error properties\n * will be copied to the new error.\n * @return The converted native Error object.\n */\nfunction getErrorFromOAuthErrorResponse(resp, err) {\n // Error response.\n const errorCode = resp.error;\n const errorDescription = resp.error_description;\n const errorUri = resp.error_uri;\n let message = `Error code ${errorCode}`;\n if (typeof errorDescription !== 'undefined') {\n message += `: ${errorDescription}`;\n }\n if (typeof errorUri !== 'undefined') {\n message += ` - ${errorUri}`;\n }\n const newError = new Error(message);\n // Copy properties from original error to newly generated error.\n if (err) {\n const keys = Object.keys(err);\n if (err.stack) {\n // Copy error.stack if available.\n keys.push('stack');\n }\n keys.forEach(key => {\n // Do not overwrite the message field.\n if (key !== 'message') {\n Object.defineProperty(newError, key, {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n value: err[key],\n writable: false,\n enumerable: true,\n });\n }\n });\n }\n return newError;\n}\nexports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse;\n//# sourceMappingURL=oauth2common.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PluggableAuthClient = exports.ExecutableError = void 0;\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst executable_response_1 = require(\"./executable-response\");\nconst pluggable_auth_handler_1 = require(\"./pluggable-auth-handler\");\n/**\n * Error thrown from the executable run by PluggableAuthClient.\n */\nclass ExecutableError extends Error {\n constructor(message, code) {\n super(`The executable failed with exit code: ${code} and error message: ${message}.`);\n this.code = code;\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.ExecutableError = ExecutableError;\n/**\n * The default executable timeout when none is provided, in milliseconds.\n */\nconst DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000;\n/**\n * The minimum allowed executable timeout in milliseconds.\n */\nconst MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000;\n/**\n * The maximum allowed executable timeout in milliseconds.\n */\nconst MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000;\n/**\n * The environment variable to check to see if executable can be run.\n * Value must be set to '1' for the executable to run.\n */\nconst GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES';\n/**\n * The maximum currently supported executable version.\n */\nconst MAXIMUM_EXECUTABLE_VERSION = 1;\n/**\n * PluggableAuthClient enables the exchange of workload identity pool external credentials for\n * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These\n * scripts/executables are completely independent of the Google Cloud Auth libraries. These\n * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token\n * to be exchanged for a Google access token.\n *\n *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable\n * must be set to '1'. This is for security reasons.\n *\n *

Both OIDC and SAML are supported. The executable must adhere to a specific response format\n * defined below.\n *\n *

The executable must print out the 3rd party token to STDOUT in JSON format. When an\n * output_file is specified in the credential configuration, the executable must also handle writing the\n * JSON response to this file.\n *\n *

\n * OIDC response sample:\n * {\n *   \"version\": 1,\n *   \"success\": true,\n *   \"token_type\": \"urn:ietf:params:oauth:token-type:id_token\",\n *   \"id_token\": \"HEADER.PAYLOAD.SIGNATURE\",\n *   \"expiration_time\": 1620433341\n * }\n *\n * SAML2 response sample:\n * {\n *   \"version\": 1,\n *   \"success\": true,\n *   \"token_type\": \"urn:ietf:params:oauth:token-type:saml2\",\n *   \"saml_response\": \"...\",\n *   \"expiration_time\": 1620433341\n * }\n *\n * Error response sample:\n * {\n *   \"version\": 1,\n *   \"success\": false,\n *   \"code\": \"401\",\n *   \"message\": \"Error message.\"\n * }\n * 
\n *\n *

The \"expiration_time\" field in the JSON response is only required for successful\n * responses when an output file was specified in the credential configuration\n *\n *

The auth libraries will populate certain environment variables that will be accessible by the\n * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE,\n * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and\n * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE.\n *\n *

Please see this repositories README for a complete executable request/response specification.\n */\nclass PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiates a PluggableAuthClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid pluggable auth credential.\n * @param options The external account options object typically loaded from\n * the external account JSON credential file.\n * @param additionalOptions Optional additional behavior customization\n * options. These currently customize expiration threshold time and\n * whether to retry on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super(options, additionalOptions);\n if (!options.credential_source.executable) {\n throw new Error('No valid Pluggable Auth \"credential_source\" provided.');\n }\n this.command = options.credential_source.executable.command;\n if (!this.command) {\n throw new Error('No valid Pluggable Auth \"credential_source\" provided.');\n }\n // Check if the provided timeout exists and if it is valid.\n if (options.credential_source.executable.timeout_millis === undefined) {\n this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS;\n }\n else {\n this.timeoutMillis = options.credential_source.executable.timeout_millis;\n if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS ||\n this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) {\n throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` +\n `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`);\n }\n }\n this.outputFile = options.credential_source.executable.output_file;\n this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({\n command: this.command,\n timeoutMillis: this.timeoutMillis,\n outputFile: this.outputFile,\n });\n }\n /**\n * Triggered when an external subject token is needed to be exchanged for a\n * GCP access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this calls a user provided executable which returns the subject token.\n * The logic is summarized as:\n * 1. Validated that the executable is allowed to run. The\n * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to\n * 1 for security reasons.\n * 2. If an output file is specified by the user, check the file location\n * for a response. If the file exists and contains a valid response,\n * return the subject token from the file.\n * 3. Call the provided executable and return response.\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n // Check if the executable is allowed to run.\n if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') {\n throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' +\n 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' +\n 'Variable to 1.');\n }\n let executableResponse = undefined;\n // Try to get cached executable response from output file.\n if (this.outputFile) {\n executableResponse = await this.handler.retrieveCachedResponse();\n }\n // If no response from output file, call the executable.\n if (!executableResponse) {\n // Set up environment map with required values for the executable.\n const envMap = new Map();\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience);\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType);\n // Always set to 0 because interactive mode is not supported.\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0');\n if (this.outputFile) {\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile);\n }\n const serviceAccountEmail = this.getServiceAccountEmail();\n if (serviceAccountEmail) {\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail);\n }\n executableResponse = await this.handler.retrieveResponseFromExecutable(envMap);\n }\n if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) {\n throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`);\n }\n // Check that response was successful.\n if (!executableResponse.success) {\n throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode);\n }\n // Check that response contains expiration time if output file was specified.\n if (this.outputFile) {\n if (!executableResponse.expirationTime) {\n throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.');\n }\n }\n // Check that response is not expired.\n if (executableResponse.isExpired()) {\n throw new Error('Executable response is expired.');\n }\n // Return subject token from response.\n return executableResponse.subjectToken;\n }\n}\nexports.PluggableAuthClient = PluggableAuthClient;\n//# sourceMappingURL=pluggable-auth-client.js.map","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PluggableAuthHandler = void 0;\nconst pluggable_auth_client_1 = require(\"./pluggable-auth-client\");\nconst executable_response_1 = require(\"./executable-response\");\nconst childProcess = require(\"child_process\");\nconst fs = require(\"fs\");\n/**\n * A handler used to retrieve 3rd party token responses from user defined\n * executables and cached file output for the PluggableAuthClient class.\n */\nclass PluggableAuthHandler {\n /**\n * Instantiates a PluggableAuthHandler instance using the provided\n * PluggableAuthHandlerOptions object.\n */\n constructor(options) {\n if (!options.command) {\n throw new Error('No command provided.');\n }\n this.commandComponents = PluggableAuthHandler.parseCommand(options.command);\n this.timeoutMillis = options.timeoutMillis;\n if (!this.timeoutMillis) {\n throw new Error('No timeoutMillis provided.');\n }\n this.outputFile = options.outputFile;\n }\n /**\n * Calls user provided executable to get a 3rd party subject token and\n * returns the response.\n * @param envMap a Map of additional Environment Variables required for\n * the executable.\n * @return A promise that resolves with the executable response.\n */\n retrieveResponseFromExecutable(envMap) {\n return new Promise((resolve, reject) => {\n // Spawn process to run executable using added environment variables.\n const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), {\n env: { ...process.env, ...Object.fromEntries(envMap) },\n });\n let output = '';\n // Append stdout to output as executable runs.\n child.stdout.on('data', (data) => {\n output += data;\n });\n // Append stderr as executable runs.\n child.stderr.on('data', (err) => {\n output += err;\n });\n // Set up a timeout to end the child process and throw an error.\n const timeout = setTimeout(() => {\n // Kill child process and remove listeners so 'close' event doesn't get\n // read after child process is killed.\n child.removeAllListeners();\n child.kill();\n return reject(new Error('The executable failed to finish within the timeout specified.'));\n }, this.timeoutMillis);\n child.on('close', (code) => {\n // Cancel timeout if executable closes before timeout is reached.\n clearTimeout(timeout);\n if (code === 0) {\n // If the executable completed successfully, try to return the parsed response.\n try {\n const responseJson = JSON.parse(output);\n const response = new executable_response_1.ExecutableResponse(responseJson);\n return resolve(response);\n }\n catch (error) {\n if (error instanceof executable_response_1.ExecutableResponseError) {\n return reject(error);\n }\n return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`));\n }\n }\n else {\n return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString()));\n }\n });\n });\n }\n /**\n * Checks user provided output file for response from previous run of\n * executable and return the response if it exists, is formatted correctly, and is not expired.\n */\n async retrieveCachedResponse() {\n if (!this.outputFile || this.outputFile.length === 0) {\n return undefined;\n }\n let filePath;\n try {\n filePath = await fs.promises.realpath(this.outputFile);\n }\n catch (_a) {\n // If file path cannot be resolved, return undefined.\n return undefined;\n }\n if (!(await fs.promises.lstat(filePath)).isFile()) {\n // If path does not lead to file, return undefined.\n return undefined;\n }\n const responseString = await fs.promises.readFile(filePath, {\n encoding: 'utf8',\n });\n if (responseString === '') {\n return undefined;\n }\n try {\n const responseJson = JSON.parse(responseString);\n const response = new executable_response_1.ExecutableResponse(responseJson);\n // Check if response is successful and unexpired.\n if (response.isValid()) {\n return new executable_response_1.ExecutableResponse(responseJson);\n }\n return undefined;\n }\n catch (error) {\n if (error instanceof executable_response_1.ExecutableResponseError) {\n throw error;\n }\n throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`);\n }\n }\n /**\n * Parses given command string into component array, splitting on spaces unless\n * spaces are between quotation marks.\n */\n static parseCommand(command) {\n // Split the command into components by splitting on spaces,\n // unless spaces are contained in quotation marks.\n const components = command.match(/(?:[^\\s\"]+|\"[^\"]*\")+/g);\n if (!components) {\n throw new Error(`Provided command: \"${command}\" could not be parsed.`);\n }\n // Remove quotation marks from the beginning and end of each component if they are present.\n for (let i = 0; i < components.length; i++) {\n if (components[i][0] === '\"' && components[i].slice(-1) === '\"') {\n components[i] = components[i].slice(1, -1);\n }\n }\n return components;\n }\n}\nexports.PluggableAuthHandler = PluggableAuthHandler;\n//# sourceMappingURL=pluggable-auth-handler.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.UserRefreshClient = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nclass UserRefreshClient extends oauth2client_1.OAuth2Client {\n constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) {\n const opts = optionsOrClientId && typeof optionsOrClientId === 'object'\n ? optionsOrClientId\n : {\n clientId: optionsOrClientId,\n clientSecret,\n refreshToken,\n eagerRefreshThresholdMillis,\n forceRefreshOnFailure,\n };\n super({\n clientId: opts.clientId,\n clientSecret: opts.clientSecret,\n eagerRefreshThresholdMillis: opts.eagerRefreshThresholdMillis,\n forceRefreshOnFailure: opts.forceRefreshOnFailure,\n });\n this._refreshToken = opts.refreshToken;\n this.credentials.refresh_token = opts.refreshToken;\n }\n /**\n * Refreshes the access token.\n * @param refreshToken An ignored refreshToken..\n * @param callback Optional callback.\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n return super.refreshTokenNoCache(this._refreshToken);\n }\n /**\n * Create a UserRefreshClient credentials instance using the given input\n * options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the user refresh token');\n }\n if (json.type !== 'authorized_user') {\n throw new Error('The incoming JSON object does not have the \"authorized_user\" type');\n }\n if (!json.client_id) {\n throw new Error('The incoming JSON object does not contain a client_id field');\n }\n if (!json.client_secret) {\n throw new Error('The incoming JSON object does not contain a client_secret field');\n }\n if (!json.refresh_token) {\n throw new Error('The incoming JSON object does not contain a refresh_token field');\n }\n this._clientId = json.client_id;\n this._clientSecret = json.client_secret;\n this._refreshToken = json.refresh_token;\n this.credentials.refresh_token = json.refresh_token;\n this.quotaProjectId = json.quota_project_id;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n async fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n return reject(new Error('Must pass in a stream containing the user refresh token.'));\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n return resolve();\n }\n catch (err) {\n return reject(err);\n }\n });\n });\n }\n}\nexports.UserRefreshClient = UserRefreshClient;\n//# sourceMappingURL=refreshclient.js.map","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.StsCredentials = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst querystring = require(\"querystring\");\nconst transporters_1 = require(\"../transporters\");\nconst oauth2common_1 = require(\"./oauth2common\");\n/**\n * Implements the OAuth 2.0 token exchange based on\n * https://tools.ietf.org/html/rfc8693\n */\nclass StsCredentials extends oauth2common_1.OAuthClientAuthHandler {\n /**\n * Initializes an STS credentials instance.\n * @param tokenExchangeEndpoint The token exchange endpoint.\n * @param clientAuthentication The client authentication credentials if\n * available.\n */\n constructor(tokenExchangeEndpoint, clientAuthentication) {\n super(clientAuthentication);\n this.tokenExchangeEndpoint = tokenExchangeEndpoint;\n this.transporter = new transporters_1.DefaultTransporter();\n }\n /**\n * Exchanges the provided token for another type of token based on the\n * rfc8693 spec.\n * @param stsCredentialsOptions The token exchange options used to populate\n * the token exchange request.\n * @param additionalHeaders Optional additional headers to pass along the\n * request.\n * @param options Optional additional GCP-specific non-spec defined options\n * to send with the request.\n * Example: `&options=${encodeUriComponent(JSON.stringified(options))}`\n * @return A promise that resolves with the token exchange response containing\n * the requested token and its expiration time.\n */\n async exchangeToken(stsCredentialsOptions, additionalHeaders, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options) {\n var _a, _b, _c;\n const values = {\n grant_type: stsCredentialsOptions.grantType,\n resource: stsCredentialsOptions.resource,\n audience: stsCredentialsOptions.audience,\n scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '),\n requested_token_type: stsCredentialsOptions.requestedTokenType,\n subject_token: stsCredentialsOptions.subjectToken,\n subject_token_type: stsCredentialsOptions.subjectTokenType,\n actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken,\n actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType,\n // Non-standard GCP-specific options.\n options: options && JSON.stringify(options),\n };\n // Remove undefined fields.\n Object.keys(values).forEach(key => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n if (typeof values[key] === 'undefined') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n delete values[key];\n }\n });\n const headers = {\n 'Content-Type': 'application/x-www-form-urlencoded',\n };\n // Inject additional STS headers if available.\n Object.assign(headers, additionalHeaders || {});\n const opts = {\n url: this.tokenExchangeEndpoint,\n method: 'POST',\n headers,\n data: querystring.stringify(values),\n responseType: 'json',\n };\n // Apply OAuth client authentication.\n this.applyClientAuthenticationOptions(opts);\n try {\n const response = await this.transporter.request(opts);\n // Successful response.\n const stsSuccessfulResponse = response.data;\n stsSuccessfulResponse.res = response;\n return stsSuccessfulResponse;\n }\n catch (error) {\n // Translate error to OAuthError.\n if (error instanceof gaxios_1.GaxiosError && error.response) {\n throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, \n // Preserve other fields from the original error.\n error);\n }\n // Request could fail before the server responds.\n throw error;\n }\n }\n}\nexports.StsCredentials = StsCredentials;\n//# sourceMappingURL=stscredentials.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/* global window */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.BrowserCrypto = void 0;\n// This file implements crypto functions we need using in-browser\n// SubtleCrypto interface `window.crypto.subtle`.\nconst base64js = require(\"base64-js\");\n// Not all browsers support `TextEncoder`. The following `require` will\n// provide a fast UTF8-only replacement for those browsers that don't support\n// text encoding natively.\n// eslint-disable-next-line node/no-unsupported-features/node-builtins\nif (typeof process === 'undefined' && typeof TextEncoder === 'undefined') {\n require('fast-text-encoding');\n}\nconst crypto_1 = require(\"../crypto\");\nclass BrowserCrypto {\n constructor() {\n if (typeof window === 'undefined' ||\n window.crypto === undefined ||\n window.crypto.subtle === undefined) {\n throw new Error(\"SubtleCrypto not found. Make sure it's an https:// website.\");\n }\n }\n async sha256DigestBase64(str) {\n // SubtleCrypto digest() method is async, so we must make\n // this method async as well.\n // To calculate SHA256 digest using SubtleCrypto, we first\n // need to convert an input string to an ArrayBuffer:\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const inputBuffer = new TextEncoder().encode(str);\n // Result is ArrayBuffer as well.\n const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer);\n return base64js.fromByteArray(new Uint8Array(outputBuffer));\n }\n randomBytesBase64(count) {\n const array = new Uint8Array(count);\n window.crypto.getRandomValues(array);\n return base64js.fromByteArray(array);\n }\n static padBase64(base64) {\n // base64js requires padding, so let's add some '='\n while (base64.length % 4 !== 0) {\n base64 += '=';\n }\n return base64;\n }\n async verify(pubkey, data, signature) {\n const algo = {\n name: 'RSASSA-PKCS1-v1_5',\n hash: { name: 'SHA-256' },\n };\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const dataArray = new TextEncoder().encode(data);\n const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature));\n const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']);\n // SubtleCrypto's verify method is async so we must make\n // this method async as well.\n const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray);\n return result;\n }\n async sign(privateKey, data) {\n const algo = {\n name: 'RSASSA-PKCS1-v1_5',\n hash: { name: 'SHA-256' },\n };\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const dataArray = new TextEncoder().encode(data);\n const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']);\n // SubtleCrypto's sign method is async so we must make\n // this method async as well.\n const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray);\n return base64js.fromByteArray(new Uint8Array(result));\n }\n decodeBase64StringUtf8(base64) {\n const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64));\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const result = new TextDecoder().decode(uint8array);\n return result;\n }\n encodeBase64StringUtf8(text) {\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const uint8array = new TextEncoder().encode(text);\n const result = base64js.fromByteArray(uint8array);\n return result;\n }\n /**\n * Computes the SHA-256 hash of the provided string.\n * @param str The plain text string to hash.\n * @return A promise that resolves with the SHA-256 hash of the provided\n * string in hexadecimal encoding.\n */\n async sha256DigestHex(str) {\n // SubtleCrypto digest() method is async, so we must make\n // this method async as well.\n // To calculate SHA256 digest using SubtleCrypto, we first\n // need to convert an input string to an ArrayBuffer:\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const inputBuffer = new TextEncoder().encode(str);\n // Result is ArrayBuffer as well.\n const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer);\n return (0, crypto_1.fromArrayBufferToHex)(outputBuffer);\n }\n /**\n * Computes the HMAC hash of a message using the provided crypto key and the\n * SHA-256 algorithm.\n * @param key The secret crypto key in utf-8 or ArrayBuffer format.\n * @param msg The plain text message.\n * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer\n * format.\n */\n async signWithHmacSha256(key, msg) {\n // Convert key, if provided in ArrayBuffer format, to string.\n const rawKey = typeof key === 'string'\n ? key\n : String.fromCharCode(...new Uint16Array(key));\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const enc = new TextEncoder();\n const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), {\n name: 'HMAC',\n hash: {\n name: 'SHA-256',\n },\n }, false, ['sign']);\n return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg));\n }\n}\nexports.BrowserCrypto = BrowserCrypto;\n//# sourceMappingURL=crypto.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/* global window */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0;\nconst crypto_1 = require(\"./browser/crypto\");\nconst crypto_2 = require(\"./node/crypto\");\nfunction createCrypto() {\n if (hasBrowserCrypto()) {\n return new crypto_1.BrowserCrypto();\n }\n return new crypto_2.NodeCrypto();\n}\nexports.createCrypto = createCrypto;\nfunction hasBrowserCrypto() {\n return (typeof window !== 'undefined' &&\n typeof window.crypto !== 'undefined' &&\n typeof window.crypto.subtle !== 'undefined');\n}\nexports.hasBrowserCrypto = hasBrowserCrypto;\n/**\n * Converts an ArrayBuffer to a hexadecimal string.\n * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string.\n * @return The hexadecimal encoding of the ArrayBuffer.\n */\nfunction fromArrayBufferToHex(arrayBuffer) {\n // Convert buffer to byte array.\n const byteArray = Array.from(new Uint8Array(arrayBuffer));\n // Convert bytes to hex string.\n return byteArray\n .map(byte => {\n return byte.toString(16).padStart(2, '0');\n })\n .join('');\n}\nexports.fromArrayBufferToHex = fromArrayBufferToHex;\n//# sourceMappingURL=crypto.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NodeCrypto = void 0;\nconst crypto = require(\"crypto\");\nclass NodeCrypto {\n async sha256DigestBase64(str) {\n return crypto.createHash('sha256').update(str).digest('base64');\n }\n randomBytesBase64(count) {\n return crypto.randomBytes(count).toString('base64');\n }\n async verify(pubkey, data, signature) {\n const verifier = crypto.createVerify('sha256');\n verifier.update(data);\n verifier.end();\n return verifier.verify(pubkey, signature, 'base64');\n }\n async sign(privateKey, data) {\n const signer = crypto.createSign('RSA-SHA256');\n signer.update(data);\n signer.end();\n return signer.sign(privateKey, 'base64');\n }\n decodeBase64StringUtf8(base64) {\n return Buffer.from(base64, 'base64').toString('utf-8');\n }\n encodeBase64StringUtf8(text) {\n return Buffer.from(text, 'utf-8').toString('base64');\n }\n /**\n * Computes the SHA-256 hash of the provided string.\n * @param str The plain text string to hash.\n * @return A promise that resolves with the SHA-256 hash of the provided\n * string in hexadecimal encoding.\n */\n async sha256DigestHex(str) {\n return crypto.createHash('sha256').update(str).digest('hex');\n }\n /**\n * Computes the HMAC hash of a message using the provided crypto key and the\n * SHA-256 algorithm.\n * @param key The secret crypto key in utf-8 or ArrayBuffer format.\n * @param msg The plain text message.\n * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer\n * format.\n */\n async signWithHmacSha256(key, msg) {\n const cryptoKey = typeof key === 'string' ? key : toBuffer(key);\n return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest());\n }\n}\nexports.NodeCrypto = NodeCrypto;\n/**\n * Converts a Node.js Buffer to an ArrayBuffer.\n * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer\n * @param buffer The Buffer input to covert.\n * @return The ArrayBuffer representation of the input.\n */\nfunction toArrayBuffer(buffer) {\n return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);\n}\n/**\n * Converts an ArrayBuffer to a Node.js Buffer.\n * @param arrayBuffer The ArrayBuffer input to covert.\n * @return The Buffer representation of the input.\n */\nfunction toBuffer(arrayBuffer) {\n return Buffer.from(arrayBuffer);\n}\n//# sourceMappingURL=crypto.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.AuthClient = void 0;\n// Copyright 2017 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nconst googleauth_1 = require(\"./auth/googleauth\");\nObject.defineProperty(exports, \"GoogleAuth\", { enumerable: true, get: function () { return googleauth_1.GoogleAuth; } });\nvar authclient_1 = require(\"./auth/authclient\");\nObject.defineProperty(exports, \"AuthClient\", { enumerable: true, get: function () { return authclient_1.AuthClient; } });\nvar computeclient_1 = require(\"./auth/computeclient\");\nObject.defineProperty(exports, \"Compute\", { enumerable: true, get: function () { return computeclient_1.Compute; } });\nvar envDetect_1 = require(\"./auth/envDetect\");\nObject.defineProperty(exports, \"GCPEnv\", { enumerable: true, get: function () { return envDetect_1.GCPEnv; } });\nvar iam_1 = require(\"./auth/iam\");\nObject.defineProperty(exports, \"IAMAuth\", { enumerable: true, get: function () { return iam_1.IAMAuth; } });\nvar idtokenclient_1 = require(\"./auth/idtokenclient\");\nObject.defineProperty(exports, \"IdTokenClient\", { enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } });\nvar jwtaccess_1 = require(\"./auth/jwtaccess\");\nObject.defineProperty(exports, \"JWTAccess\", { enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } });\nvar jwtclient_1 = require(\"./auth/jwtclient\");\nObject.defineProperty(exports, \"JWT\", { enumerable: true, get: function () { return jwtclient_1.JWT; } });\nvar impersonated_1 = require(\"./auth/impersonated\");\nObject.defineProperty(exports, \"Impersonated\", { enumerable: true, get: function () { return impersonated_1.Impersonated; } });\nvar oauth2client_1 = require(\"./auth/oauth2client\");\nObject.defineProperty(exports, \"CodeChallengeMethod\", { enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } });\nObject.defineProperty(exports, \"OAuth2Client\", { enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } });\nvar loginticket_1 = require(\"./auth/loginticket\");\nObject.defineProperty(exports, \"LoginTicket\", { enumerable: true, get: function () { return loginticket_1.LoginTicket; } });\nvar refreshclient_1 = require(\"./auth/refreshclient\");\nObject.defineProperty(exports, \"UserRefreshClient\", { enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } });\nvar awsclient_1 = require(\"./auth/awsclient\");\nObject.defineProperty(exports, \"AwsClient\", { enumerable: true, get: function () { return awsclient_1.AwsClient; } });\nvar identitypoolclient_1 = require(\"./auth/identitypoolclient\");\nObject.defineProperty(exports, \"IdentityPoolClient\", { enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } });\nvar externalclient_1 = require(\"./auth/externalclient\");\nObject.defineProperty(exports, \"ExternalAccountClient\", { enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } });\nvar baseexternalclient_1 = require(\"./auth/baseexternalclient\");\nObject.defineProperty(exports, \"BaseExternalAccountClient\", { enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } });\nvar downscopedclient_1 = require(\"./auth/downscopedclient\");\nObject.defineProperty(exports, \"DownscopedClient\", { enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } });\nvar pluggable_auth_client_1 = require(\"./auth/pluggable-auth-client\");\nObject.defineProperty(exports, \"PluggableAuthClient\", { enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } });\nvar transporters_1 = require(\"./transporters\");\nObject.defineProperty(exports, \"DefaultTransporter\", { enumerable: true, get: function () { return transporters_1.DefaultTransporter; } });\nconst auth = new googleauth_1.GoogleAuth();\nexports.auth = auth;\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2017 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validate = void 0;\n// Accepts an options object passed from the user to the API. In the\n// previous version of the API, it referred to a `Request` options object.\n// Now it refers to an Axiox Request Config object. This is here to help\n// ensure users don't pass invalid options when they upgrade from 0.x to 1.x.\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction validate(options) {\n const vpairs = [\n { invalid: 'uri', expected: 'url' },\n { invalid: 'json', expected: 'data' },\n { invalid: 'qs', expected: 'params' },\n ];\n for (const pair of vpairs) {\n if (options[pair.invalid]) {\n const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`;\n throw new Error(e);\n }\n }\n}\nexports.validate = validate;\n//# sourceMappingURL=options.js.map","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DefaultTransporter = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst options_1 = require(\"./options\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst pkg = require('../../package.json');\nconst PRODUCT_NAME = 'google-api-nodejs-client';\nclass DefaultTransporter {\n /**\n * Configures request options before making a request.\n * @param opts GaxiosOptions options.\n * @return Configured options.\n */\n configure(opts = {}) {\n opts.headers = opts.headers || {};\n if (typeof window === 'undefined') {\n // set transporter user agent if not in browser\n const uaValue = opts.headers['User-Agent'];\n if (!uaValue) {\n opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT;\n }\n else if (!uaValue.includes(`${PRODUCT_NAME}/`)) {\n opts.headers['User-Agent'] = `${uaValue} ${DefaultTransporter.USER_AGENT}`;\n }\n // track google-auth-library-nodejs version:\n const authVersion = `auth/${pkg.version}`;\n if (opts.headers['x-goog-api-client'] &&\n !opts.headers['x-goog-api-client'].includes(authVersion)) {\n opts.headers['x-goog-api-client'] = `${opts.headers['x-goog-api-client']} ${authVersion}`;\n }\n else if (!opts.headers['x-goog-api-client']) {\n const nodeVersion = process.version.replace(/^v/, '');\n opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion} ${authVersion}`;\n }\n }\n return opts;\n }\n request(opts, callback) {\n // ensure the user isn't passing in request-style options\n opts = this.configure(opts);\n try {\n (0, options_1.validate)(opts);\n }\n catch (e) {\n if (callback) {\n return callback(e);\n }\n else {\n throw e;\n }\n }\n if (callback) {\n (0, gaxios_1.request)(opts).then(r => {\n callback(null, r);\n }, e => {\n callback(this.processError(e));\n });\n }\n else {\n return (0, gaxios_1.request)(opts).catch(e => {\n throw this.processError(e);\n });\n }\n }\n /**\n * Changes the error to include details from the body.\n */\n processError(e) {\n const res = e.response;\n const err = e;\n const body = res ? res.data : null;\n if (res && body && body.error && res.status !== 200) {\n if (typeof body.error === 'string') {\n err.message = body.error;\n err.code = res.status.toString();\n }\n else if (Array.isArray(body.error.errors)) {\n err.message = body.error.errors\n .map((err2) => err2.message)\n .join('\\n');\n err.code = body.error.code;\n err.errors = body.error.errors;\n }\n else {\n err.message = body.error.message;\n err.code = body.error.code || res.status;\n }\n }\n else if (res && res.status >= 400) {\n // Consider all 4xx and 5xx responses errors.\n err.message = body;\n err.code = res.status.toString();\n }\n return err;\n }\n}\nexports.DefaultTransporter = DefaultTransporter;\n/**\n * Default user agent.\n */\nDefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`;\n//# sourceMappingURL=transporters.js.map","\"use strict\";\n/**\n * Copyright 2018 Google LLC\n *\n * Distributed under MIT license.\n * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getPem = void 0;\nconst fs = require(\"fs\");\nconst forge = require(\"node-forge\");\nconst util_1 = require(\"util\");\nconst readFile = (0, util_1.promisify)(fs.readFile);\nfunction getPem(filename, callback) {\n if (callback) {\n getPemAsync(filename)\n .then(pem => callback(null, pem))\n .catch(err => callback(err, null));\n }\n else {\n return getPemAsync(filename);\n }\n}\nexports.getPem = getPem;\nfunction getPemAsync(filename) {\n return readFile(filename, { encoding: 'base64' }).then(keyp12 => {\n return convertToPem(keyp12);\n });\n}\n/**\n * Converts a P12 in base64 encoding to a pem.\n * @param p12base64 String containing base64 encoded p12.\n * @returns a string containing the pem.\n */\nfunction convertToPem(p12base64) {\n const p12Der = forge.util.decode64(p12base64);\n const p12Asn1 = forge.asn1.fromDer(p12Der);\n const p12 = forge.pkcs12.pkcs12FromAsn1(p12Asn1, 'notasecret');\n const bags = p12.getBags({ friendlyName: 'privatekey' });\n if (bags.friendlyName) {\n const privateKey = bags.friendlyName[0].key;\n const pem = forge.pki.privateKeyToPem(privateKey);\n return pem.replace(/\\r\\n/g, '\\n');\n }\n else {\n throw new Error('Unable to get friendly name.');\n }\n}\n//# sourceMappingURL=index.js.map","\"use strict\";\n/**\n * Copyright 2018 Google LLC\n *\n * Distributed under MIT license.\n * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleToken = void 0;\nconst fs = require(\"fs\");\nconst gaxios_1 = require(\"gaxios\");\nconst jws = require(\"jws\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst readFile = fs.readFile\n ? (0, util_1.promisify)(fs.readFile)\n : async () => {\n // if running in the web-browser, fs.readFile may not have been shimmed.\n throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS');\n };\nconst GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token';\nconst GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token=';\nclass ErrorWithCode extends Error {\n constructor(message, code) {\n super(message);\n this.code = code;\n }\n}\nlet getPem;\nclass GoogleToken {\n /**\n * Create a GoogleToken.\n *\n * @param options Configuration object.\n */\n constructor(options) {\n this.transporter = {\n request: opts => (0, gaxios_1.request)(opts),\n };\n this.configure(options);\n }\n get accessToken() {\n return this.rawToken ? this.rawToken.access_token : undefined;\n }\n get idToken() {\n return this.rawToken ? this.rawToken.id_token : undefined;\n }\n get tokenType() {\n return this.rawToken ? this.rawToken.token_type : undefined;\n }\n get refreshToken() {\n return this.rawToken ? this.rawToken.refresh_token : undefined;\n }\n /**\n * Returns whether the token has expired.\n *\n * @return true if the token has expired, false otherwise.\n */\n hasExpired() {\n const now = new Date().getTime();\n if (this.rawToken && this.expiresAt) {\n return now >= this.expiresAt;\n }\n else {\n return true;\n }\n }\n /**\n * Returns whether the token will expire within eagerRefreshThresholdMillis\n *\n * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise.\n */\n isTokenExpiring() {\n var _a;\n const now = new Date().getTime();\n const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0;\n if (this.rawToken && this.expiresAt) {\n return this.expiresAt <= now + eagerRefreshThresholdMillis;\n }\n else {\n return true;\n }\n }\n getToken(callback, opts = {}) {\n if (typeof callback === 'object') {\n opts = callback;\n callback = undefined;\n }\n opts = Object.assign({\n forceRefresh: false,\n }, opts);\n if (callback) {\n const cb = callback;\n this.getTokenAsync(opts).then(t => cb(null, t), callback);\n return;\n }\n return this.getTokenAsync(opts);\n }\n /**\n * Given a keyFile, extract the key and client email if available\n * @param keyFile Path to a json, pem, or p12 file that contains the key.\n * @returns an object with privateKey and clientEmail properties\n */\n async getCredentials(keyFile) {\n const ext = path.extname(keyFile);\n switch (ext) {\n case '.json': {\n const key = await readFile(keyFile, 'utf8');\n const body = JSON.parse(key);\n const privateKey = body.private_key;\n const clientEmail = body.client_email;\n if (!privateKey || !clientEmail) {\n throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS');\n }\n return { privateKey, clientEmail };\n }\n case '.der':\n case '.crt':\n case '.pem': {\n const privateKey = await readFile(keyFile, 'utf8');\n return { privateKey };\n }\n case '.p12':\n case '.pfx': {\n // NOTE: The loading of `google-p12-pem` is deferred for performance\n // reasons. The `node-forge` npm module in `google-p12-pem` adds a fair\n // bit time to overall module loading, and is likely not frequently\n // used. In a future release, p12 support will be entirely removed.\n if (!getPem) {\n getPem = (await Promise.resolve().then(() => require('google-p12-pem'))).getPem;\n }\n const privateKey = await getPem(keyFile);\n return { privateKey };\n }\n default:\n throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' +\n 'Current supported extensions are *.json, *.pem, and *.p12.', 'UNKNOWN_CERTIFICATE_TYPE');\n }\n }\n async getTokenAsync(opts) {\n if (this.inFlightRequest && !opts.forceRefresh) {\n return this.inFlightRequest;\n }\n try {\n return await (this.inFlightRequest = this.getTokenAsyncInner(opts));\n }\n finally {\n this.inFlightRequest = undefined;\n }\n }\n async getTokenAsyncInner(opts) {\n if (this.isTokenExpiring() === false && opts.forceRefresh === false) {\n return Promise.resolve(this.rawToken);\n }\n if (!this.key && !this.keyFile) {\n throw new Error('No key or keyFile set.');\n }\n if (!this.key && this.keyFile) {\n const creds = await this.getCredentials(this.keyFile);\n this.key = creds.privateKey;\n this.iss = creds.clientEmail || this.iss;\n if (!creds.clientEmail) {\n this.ensureEmail();\n }\n }\n return this.requestToken();\n }\n ensureEmail() {\n if (!this.iss) {\n throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS');\n }\n }\n revokeToken(callback) {\n if (callback) {\n this.revokeTokenAsync().then(() => callback(), callback);\n return;\n }\n return this.revokeTokenAsync();\n }\n async revokeTokenAsync() {\n if (!this.accessToken) {\n throw new Error('No token to revoke.');\n }\n const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken;\n await this.transporter.request({ url });\n this.configure({\n email: this.iss,\n sub: this.sub,\n key: this.key,\n keyFile: this.keyFile,\n scope: this.scope,\n additionalClaims: this.additionalClaims,\n });\n }\n /**\n * Configure the GoogleToken for re-use.\n * @param {object} options Configuration object.\n */\n configure(options = {}) {\n this.keyFile = options.keyFile;\n this.key = options.key;\n this.rawToken = undefined;\n this.iss = options.email || options.iss;\n this.sub = options.sub;\n this.additionalClaims = options.additionalClaims;\n if (typeof options.scope === 'object') {\n this.scope = options.scope.join(' ');\n }\n else {\n this.scope = options.scope;\n }\n this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis;\n if (options.transporter) {\n this.transporter = options.transporter;\n }\n }\n /**\n * Request the token from Google.\n */\n async requestToken() {\n var _a, _b;\n const iat = Math.floor(new Date().getTime() / 1000);\n const additionalClaims = this.additionalClaims || {};\n const payload = Object.assign({\n iss: this.iss,\n scope: this.scope,\n aud: GOOGLE_TOKEN_URL,\n exp: iat + 3600,\n iat,\n sub: this.sub,\n }, additionalClaims);\n const signedJWT = jws.sign({\n header: { alg: 'RS256' },\n payload,\n secret: this.key,\n });\n try {\n const r = await this.transporter.request({\n method: 'POST',\n url: GOOGLE_TOKEN_URL,\n data: {\n grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer',\n assertion: signedJWT,\n },\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n responseType: 'json',\n });\n this.rawToken = r.data;\n this.expiresAt =\n r.data.expires_in === null || r.data.expires_in === undefined\n ? undefined\n : (iat + r.data.expires_in) * 1000;\n return this.rawToken;\n }\n catch (e) {\n this.rawToken = undefined;\n this.tokenExpires = undefined;\n const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data)\n ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data\n : {};\n if (body.error) {\n const desc = body.error_description\n ? `: ${body.error_description}`\n : '';\n e.message = `${body.error}${desc}`;\n }\n throw e;\n }\n }\n}\nexports.GoogleToken = GoogleToken;\n//# sourceMappingURL=index.js.map","'use strict';\n\nmodule.exports = (flag, argv = process.argv) => {\n\tconst prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');\n\tconst position = argv.indexOf(prefix + flag);\n\tconst terminatorPosition = argv.indexOf('--');\n\treturn position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);\n};\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst net_1 = __importDefault(require(\"net\"));\nconst tls_1 = __importDefault(require(\"tls\"));\nconst url_1 = __importDefault(require(\"url\"));\nconst debug_1 = __importDefault(require(\"debug\"));\nconst once_1 = __importDefault(require(\"@tootallnate/once\"));\nconst agent_base_1 = require(\"agent-base\");\nconst debug = (0, debug_1.default)('http-proxy-agent');\nfunction isHTTPS(protocol) {\n return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false;\n}\n/**\n * The `HttpProxyAgent` implements an HTTP Agent subclass that connects\n * to the specified \"HTTP proxy server\" in order to proxy HTTP requests.\n *\n * @api public\n */\nclass HttpProxyAgent extends agent_base_1.Agent {\n constructor(_opts) {\n let opts;\n if (typeof _opts === 'string') {\n opts = url_1.default.parse(_opts);\n }\n else {\n opts = _opts;\n }\n if (!opts) {\n throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');\n }\n debug('Creating new HttpProxyAgent instance: %o', opts);\n super(opts);\n const proxy = Object.assign({}, opts);\n // If `true`, then connect to the proxy server over TLS.\n // Defaults to `false`.\n this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol);\n // Prefer `hostname` over `host`, and set the `port` if needed.\n proxy.host = proxy.hostname || proxy.host;\n if (typeof proxy.port === 'string') {\n proxy.port = parseInt(proxy.port, 10);\n }\n if (!proxy.port && proxy.host) {\n proxy.port = this.secureProxy ? 443 : 80;\n }\n if (proxy.host && proxy.path) {\n // If both a `host` and `path` are specified then it's most likely\n // the result of a `url.parse()` call... we need to remove the\n // `path` portion so that `net.connect()` doesn't attempt to open\n // that as a Unix socket file.\n delete proxy.path;\n delete proxy.pathname;\n }\n this.proxy = proxy;\n }\n /**\n * Called when the node-core HTTP client library is creating a\n * new HTTP request.\n *\n * @api protected\n */\n callback(req, opts) {\n return __awaiter(this, void 0, void 0, function* () {\n const { proxy, secureProxy } = this;\n const parsed = url_1.default.parse(req.path);\n if (!parsed.protocol) {\n parsed.protocol = 'http:';\n }\n if (!parsed.hostname) {\n parsed.hostname = opts.hostname || opts.host || null;\n }\n if (parsed.port == null && typeof opts.port) {\n parsed.port = String(opts.port);\n }\n if (parsed.port === '80') {\n // if port is 80, then we can remove the port so that the\n // \":80\" portion is not on the produced URL\n parsed.port = '';\n }\n // Change the `http.ClientRequest` instance's \"path\" field\n // to the absolute path of the URL that will be requested.\n req.path = url_1.default.format(parsed);\n // Inject the `Proxy-Authorization` header if necessary.\n if (proxy.auth) {\n req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`);\n }\n // Create a socket connection to the proxy server.\n let socket;\n if (secureProxy) {\n debug('Creating `tls.Socket`: %o', proxy);\n socket = tls_1.default.connect(proxy);\n }\n else {\n debug('Creating `net.Socket`: %o', proxy);\n socket = net_1.default.connect(proxy);\n }\n // At this point, the http ClientRequest's internal `_header` field\n // might have already been set. If this is the case then we'll need\n // to re-generate the string since we just changed the `req.path`.\n if (req._header) {\n let first;\n let endOfHeaders;\n debug('Regenerating stored HTTP header string for request');\n req._header = null;\n req._implicitHeader();\n if (req.output && req.output.length > 0) {\n // Node < 12\n debug('Patching connection write() output buffer with updated header');\n first = req.output[0];\n endOfHeaders = first.indexOf('\\r\\n\\r\\n') + 4;\n req.output[0] = req._header + first.substring(endOfHeaders);\n debug('Output buffer: %o', req.output);\n }\n else if (req.outputData && req.outputData.length > 0) {\n // Node >= 12\n debug('Patching connection write() output buffer with updated header');\n first = req.outputData[0].data;\n endOfHeaders = first.indexOf('\\r\\n\\r\\n') + 4;\n req.outputData[0].data =\n req._header + first.substring(endOfHeaders);\n debug('Output buffer: %o', req.outputData[0].data);\n }\n }\n // Wait for the socket's `connect` event, so that this `callback()`\n // function throws instead of the `http` request machinery. This is\n // important for i.e. `PacProxyAgent` which determines a failed proxy\n // connection via the `callback()` function throwing.\n yield (0, once_1.default)(socket, 'connect');\n return socket;\n });\n }\n}\nexports.default = HttpProxyAgent;\n//# sourceMappingURL=agent.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst agent_1 = __importDefault(require(\"./agent\"));\nfunction createHttpProxyAgent(opts) {\n return new agent_1.default(opts);\n}\n(function (createHttpProxyAgent) {\n createHttpProxyAgent.HttpProxyAgent = agent_1.default;\n createHttpProxyAgent.prototype = agent_1.default.prototype;\n})(createHttpProxyAgent || (createHttpProxyAgent = {}));\nmodule.exports = createHttpProxyAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst net_1 = __importDefault(require(\"net\"));\nconst tls_1 = __importDefault(require(\"tls\"));\nconst url_1 = __importDefault(require(\"url\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst debug_1 = __importDefault(require(\"debug\"));\nconst agent_base_1 = require(\"agent-base\");\nconst parse_proxy_response_1 = __importDefault(require(\"./parse-proxy-response\"));\nconst debug = debug_1.default('https-proxy-agent:agent');\n/**\n * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to\n * the specified \"HTTP(s) proxy server\" in order to proxy HTTPS requests.\n *\n * Outgoing HTTP requests are first tunneled through the proxy server using the\n * `CONNECT` HTTP request method to establish a connection to the proxy server,\n * and then the proxy server connects to the destination target and issues the\n * HTTP request from the proxy server.\n *\n * `https:` requests have their socket connection upgraded to TLS once\n * the connection to the proxy server has been established.\n *\n * @api public\n */\nclass HttpsProxyAgent extends agent_base_1.Agent {\n constructor(_opts) {\n let opts;\n if (typeof _opts === 'string') {\n opts = url_1.default.parse(_opts);\n }\n else {\n opts = _opts;\n }\n if (!opts) {\n throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');\n }\n debug('creating new HttpsProxyAgent instance: %o', opts);\n super(opts);\n const proxy = Object.assign({}, opts);\n // If `true`, then connect to the proxy server over TLS.\n // Defaults to `false`.\n this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol);\n // Prefer `hostname` over `host`, and set the `port` if needed.\n proxy.host = proxy.hostname || proxy.host;\n if (typeof proxy.port === 'string') {\n proxy.port = parseInt(proxy.port, 10);\n }\n if (!proxy.port && proxy.host) {\n proxy.port = this.secureProxy ? 443 : 80;\n }\n // ALPN is supported by Node.js >= v5.\n // attempt to negotiate http/1.1 for proxy servers that support http/2\n if (this.secureProxy && !('ALPNProtocols' in proxy)) {\n proxy.ALPNProtocols = ['http 1.1'];\n }\n if (proxy.host && proxy.path) {\n // If both a `host` and `path` are specified then it's most likely\n // the result of a `url.parse()` call... we need to remove the\n // `path` portion so that `net.connect()` doesn't attempt to open\n // that as a Unix socket file.\n delete proxy.path;\n delete proxy.pathname;\n }\n this.proxy = proxy;\n }\n /**\n * Called when the node-core HTTP client library is creating a\n * new HTTP request.\n *\n * @api protected\n */\n callback(req, opts) {\n return __awaiter(this, void 0, void 0, function* () {\n const { proxy, secureProxy } = this;\n // Create a socket connection to the proxy server.\n let socket;\n if (secureProxy) {\n debug('Creating `tls.Socket`: %o', proxy);\n socket = tls_1.default.connect(proxy);\n }\n else {\n debug('Creating `net.Socket`: %o', proxy);\n socket = net_1.default.connect(proxy);\n }\n const headers = Object.assign({}, proxy.headers);\n const hostname = `${opts.host}:${opts.port}`;\n let payload = `CONNECT ${hostname} HTTP/1.1\\r\\n`;\n // Inject the `Proxy-Authorization` header if necessary.\n if (proxy.auth) {\n headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`;\n }\n // The `Host` header should only include the port\n // number when it is not the default port.\n let { host, port, secureEndpoint } = opts;\n if (!isDefaultPort(port, secureEndpoint)) {\n host += `:${port}`;\n }\n headers.Host = host;\n headers.Connection = 'close';\n for (const name of Object.keys(headers)) {\n payload += `${name}: ${headers[name]}\\r\\n`;\n }\n const proxyResponsePromise = parse_proxy_response_1.default(socket);\n socket.write(`${payload}\\r\\n`);\n const { statusCode, buffered } = yield proxyResponsePromise;\n if (statusCode === 200) {\n req.once('socket', resume);\n if (opts.secureEndpoint) {\n // The proxy is connecting to a TLS server, so upgrade\n // this socket connection to a TLS connection.\n debug('Upgrading socket connection to TLS');\n const servername = opts.servername || opts.host;\n return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket,\n servername }));\n }\n return socket;\n }\n // Some other status code that's not 200... need to re-play the HTTP\n // header \"data\" events onto the socket once the HTTP machinery is\n // attached so that the node core `http` can parse and handle the\n // error status code.\n // Close the original socket, and a new \"fake\" socket is returned\n // instead, so that the proxy doesn't get the HTTP request\n // written to it (which may contain `Authorization` headers or other\n // sensitive data).\n //\n // See: https://hackerone.com/reports/541502\n socket.destroy();\n const fakeSocket = new net_1.default.Socket({ writable: false });\n fakeSocket.readable = true;\n // Need to wait for the \"socket\" event to re-play the \"data\" events.\n req.once('socket', (s) => {\n debug('replaying proxy buffer for failed request');\n assert_1.default(s.listenerCount('data') > 0);\n // Replay the \"buffered\" Buffer onto the fake `socket`, since at\n // this point the HTTP module machinery has been hooked up for\n // the user.\n s.push(buffered);\n s.push(null);\n });\n return fakeSocket;\n });\n }\n}\nexports.default = HttpsProxyAgent;\nfunction resume(socket) {\n socket.resume();\n}\nfunction isDefaultPort(port, secure) {\n return Boolean((!secure && port === 80) || (secure && port === 443));\n}\nfunction isHTTPS(protocol) {\n return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false;\n}\nfunction omit(obj, ...keys) {\n const ret = {};\n let key;\n for (key in obj) {\n if (!keys.includes(key)) {\n ret[key] = obj[key];\n }\n }\n return ret;\n}\n//# sourceMappingURL=agent.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst agent_1 = __importDefault(require(\"./agent\"));\nfunction createHttpsProxyAgent(opts) {\n return new agent_1.default(opts);\n}\n(function (createHttpsProxyAgent) {\n createHttpsProxyAgent.HttpsProxyAgent = agent_1.default;\n createHttpsProxyAgent.prototype = agent_1.default.prototype;\n})(createHttpsProxyAgent || (createHttpsProxyAgent = {}));\nmodule.exports = createHttpsProxyAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst debug_1 = __importDefault(require(\"debug\"));\nconst debug = debug_1.default('https-proxy-agent:parse-proxy-response');\nfunction parseProxyResponse(socket) {\n return new Promise((resolve, reject) => {\n // we need to buffer any HTTP traffic that happens with the proxy before we get\n // the CONNECT response, so that if the response is anything other than an \"200\"\n // response code, then we can re-play the \"data\" events on the socket once the\n // HTTP parser is hooked up...\n let buffersLength = 0;\n const buffers = [];\n function read() {\n const b = socket.read();\n if (b)\n ondata(b);\n else\n socket.once('readable', read);\n }\n function cleanup() {\n socket.removeListener('end', onend);\n socket.removeListener('error', onerror);\n socket.removeListener('close', onclose);\n socket.removeListener('readable', read);\n }\n function onclose(err) {\n debug('onclose had error %o', err);\n }\n function onend() {\n debug('onend');\n }\n function onerror(err) {\n cleanup();\n debug('onerror %o', err);\n reject(err);\n }\n function ondata(b) {\n buffers.push(b);\n buffersLength += b.length;\n const buffered = Buffer.concat(buffers, buffersLength);\n const endOfHeaders = buffered.indexOf('\\r\\n\\r\\n');\n if (endOfHeaders === -1) {\n // keep buffering\n debug('have not received end of HTTP headers yet...');\n read();\n return;\n }\n const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\\r\\n'));\n const statusCode = +firstLine.split(' ')[1];\n debug('got proxy server response: %o', firstLine);\n resolve({\n statusCode,\n buffered\n });\n }\n socket.on('error', onerror);\n socket.on('close', onclose);\n socket.on('end', onend);\n read();\n });\n}\nexports.default = parseProxyResponse;\n//# sourceMappingURL=parse-proxy-response.js.map","try {\n var util = require('util');\n /* istanbul ignore next */\n if (typeof util.inherits !== 'function') throw '';\n module.exports = util.inherits;\n} catch (e) {\n /* istanbul ignore next */\n module.exports = require('./inherits_browser.js');\n}\n","if (typeof Object.create === 'function') {\n // implementation from standard node.js 'util' module\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n ctor.prototype = Object.create(superCtor.prototype, {\n constructor: {\n value: ctor,\n enumerable: false,\n writable: true,\n configurable: true\n }\n })\n }\n };\n} else {\n // old school shim for old browsers\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n var TempCtor = function () {}\n TempCtor.prototype = superCtor.prototype\n ctor.prototype = new TempCtor()\n ctor.prototype.constructor = ctor\n }\n }\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","'use strict';\n\nconst isStream = stream =>\n\tstream !== null &&\n\ttypeof stream === 'object' &&\n\ttypeof stream.pipe === 'function';\n\nisStream.writable = stream =>\n\tisStream(stream) &&\n\tstream.writable !== false &&\n\ttypeof stream._write === 'function' &&\n\ttypeof stream._writableState === 'object';\n\nisStream.readable = stream =>\n\tisStream(stream) &&\n\tstream.readable !== false &&\n\ttypeof stream._read === 'function' &&\n\ttypeof stream._readableState === 'object';\n\nisStream.duplex = stream =>\n\tisStream.writable(stream) &&\n\tisStream.readable(stream);\n\nisStream.transform = stream =>\n\tisStream.duplex(stream) &&\n\ttypeof stream._transform === 'function';\n\nmodule.exports = isStream;\n","var json_stringify = require('./lib/stringify.js').stringify;\nvar json_parse = require('./lib/parse.js');\n\nmodule.exports = function(options) {\n return {\n parse: json_parse(options),\n stringify: json_stringify\n }\n};\n//create the default method members with no options applied for backwards compatibility\nmodule.exports.parse = json_parse();\nmodule.exports.stringify = json_stringify;\n","var BigNumber = null;\n\n// regexpxs extracted from\n// (c) BSD-3-Clause\n// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors\n\nconst suspectProtoRx = /(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])(?:p|\\\\u0070)(?:r|\\\\u0072)(?:o|\\\\u006[Ff])(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])/;\nconst suspectConstructorRx = /(?:c|\\\\u0063)(?:o|\\\\u006[Ff])(?:n|\\\\u006[Ee])(?:s|\\\\u0073)(?:t|\\\\u0074)(?:r|\\\\u0072)(?:u|\\\\u0075)(?:c|\\\\u0063)(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:r|\\\\u0072)/;\n\n/*\n json_parse.js\n 2012-06-20\n\n Public Domain.\n\n NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\n This file creates a json_parse function.\n During create you can (optionally) specify some behavioural switches\n\n require('json-bigint')(options)\n\n The optional options parameter holds switches that drive certain\n aspects of the parsing process:\n * options.strict = true will warn about duplicate-key usage in the json.\n The default (strict = false) will silently ignore those and overwrite\n values for keys that are in duplicate use.\n\n The resulting function follows this signature:\n json_parse(text, reviver)\n This method parses a JSON text to produce an object or array.\n It can throw a SyntaxError exception.\n\n The optional reviver parameter is a function that can filter and\n transform the results. It receives each of the keys and values,\n and its return value is used instead of the original value.\n If it returns what it received, then the structure is not modified.\n If it returns undefined then the member is deleted.\n\n Example:\n\n // Parse the text. Values that look like ISO date strings will\n // be converted to Date objects.\n\n myData = json_parse(text, function (key, value) {\n var a;\n if (typeof value === 'string') {\n a =\n/^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2}(?:\\.\\d*)?)Z$/.exec(value);\n if (a) {\n return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],\n +a[5], +a[6]));\n }\n }\n return value;\n });\n\n This is a reference implementation. You are free to copy, modify, or\n redistribute.\n\n This code should be minified before deployment.\n See http://javascript.crockford.com/jsmin.html\n\n USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO\n NOT CONTROL.\n*/\n\n/*members \"\", \"\\\"\", \"\\/\", \"\\\\\", at, b, call, charAt, f, fromCharCode,\n hasOwnProperty, message, n, name, prototype, push, r, t, text\n*/\n\nvar json_parse = function (options) {\n 'use strict';\n\n // This is a function that can parse a JSON text, producing a JavaScript\n // data structure. It is a simple, recursive descent parser. It does not use\n // eval or regular expressions, so it can be used as a model for implementing\n // a JSON parser in other languages.\n\n // We are defining the function inside of another function to avoid creating\n // global variables.\n\n // Default options one can override by passing options to the parse()\n var _options = {\n strict: false, // not being strict means do not generate syntax errors for \"duplicate key\"\n storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string\n alwaysParseAsBig: false, // toggles whether all numbers should be Big\n useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js\n protoAction: 'error',\n constructorAction: 'error',\n };\n\n // If there are options, then use them to override the default _options\n if (options !== undefined && options !== null) {\n if (options.strict === true) {\n _options.strict = true;\n }\n if (options.storeAsString === true) {\n _options.storeAsString = true;\n }\n _options.alwaysParseAsBig =\n options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false;\n _options.useNativeBigInt =\n options.useNativeBigInt === true ? options.useNativeBigInt : false;\n\n if (typeof options.constructorAction !== 'undefined') {\n if (\n options.constructorAction === 'error' ||\n options.constructorAction === 'ignore' ||\n options.constructorAction === 'preserve'\n ) {\n _options.constructorAction = options.constructorAction;\n } else {\n throw new Error(\n `Incorrect value for constructorAction option, must be \"error\", \"ignore\" or undefined but passed ${options.constructorAction}`\n );\n }\n }\n\n if (typeof options.protoAction !== 'undefined') {\n if (\n options.protoAction === 'error' ||\n options.protoAction === 'ignore' ||\n options.protoAction === 'preserve'\n ) {\n _options.protoAction = options.protoAction;\n } else {\n throw new Error(\n `Incorrect value for protoAction option, must be \"error\", \"ignore\" or undefined but passed ${options.protoAction}`\n );\n }\n }\n }\n\n var at, // The index of the current character\n ch, // The current character\n escapee = {\n '\"': '\"',\n '\\\\': '\\\\',\n '/': '/',\n b: '\\b',\n f: '\\f',\n n: '\\n',\n r: '\\r',\n t: '\\t',\n },\n text,\n error = function (m) {\n // Call error when something is wrong.\n\n throw {\n name: 'SyntaxError',\n message: m,\n at: at,\n text: text,\n };\n },\n next = function (c) {\n // If a c parameter is provided, verify that it matches the current character.\n\n if (c && c !== ch) {\n error(\"Expected '\" + c + \"' instead of '\" + ch + \"'\");\n }\n\n // Get the next character. When there are no more characters,\n // return the empty string.\n\n ch = text.charAt(at);\n at += 1;\n return ch;\n },\n number = function () {\n // Parse a number value.\n\n var number,\n string = '';\n\n if (ch === '-') {\n string = '-';\n next('-');\n }\n while (ch >= '0' && ch <= '9') {\n string += ch;\n next();\n }\n if (ch === '.') {\n string += '.';\n while (next() && ch >= '0' && ch <= '9') {\n string += ch;\n }\n }\n if (ch === 'e' || ch === 'E') {\n string += ch;\n next();\n if (ch === '-' || ch === '+') {\n string += ch;\n next();\n }\n while (ch >= '0' && ch <= '9') {\n string += ch;\n next();\n }\n }\n number = +string;\n if (!isFinite(number)) {\n error('Bad number');\n } else {\n if (BigNumber == null) BigNumber = require('bignumber.js');\n //if (number > 9007199254740992 || number < -9007199254740992)\n // Bignumber has stricter check: everything with length > 15 digits disallowed\n if (string.length > 15)\n return _options.storeAsString\n ? string\n : _options.useNativeBigInt\n ? BigInt(string)\n : new BigNumber(string);\n else\n return !_options.alwaysParseAsBig\n ? number\n : _options.useNativeBigInt\n ? BigInt(number)\n : new BigNumber(number);\n }\n },\n string = function () {\n // Parse a string value.\n\n var hex,\n i,\n string = '',\n uffff;\n\n // When parsing for string values, we must look for \" and \\ characters.\n\n if (ch === '\"') {\n var startAt = at;\n while (next()) {\n if (ch === '\"') {\n if (at - 1 > startAt) string += text.substring(startAt, at - 1);\n next();\n return string;\n }\n if (ch === '\\\\') {\n if (at - 1 > startAt) string += text.substring(startAt, at - 1);\n next();\n if (ch === 'u') {\n uffff = 0;\n for (i = 0; i < 4; i += 1) {\n hex = parseInt(next(), 16);\n if (!isFinite(hex)) {\n break;\n }\n uffff = uffff * 16 + hex;\n }\n string += String.fromCharCode(uffff);\n } else if (typeof escapee[ch] === 'string') {\n string += escapee[ch];\n } else {\n break;\n }\n startAt = at;\n }\n }\n }\n error('Bad string');\n },\n white = function () {\n // Skip whitespace.\n\n while (ch && ch <= ' ') {\n next();\n }\n },\n word = function () {\n // true, false, or null.\n\n switch (ch) {\n case 't':\n next('t');\n next('r');\n next('u');\n next('e');\n return true;\n case 'f':\n next('f');\n next('a');\n next('l');\n next('s');\n next('e');\n return false;\n case 'n':\n next('n');\n next('u');\n next('l');\n next('l');\n return null;\n }\n error(\"Unexpected '\" + ch + \"'\");\n },\n value, // Place holder for the value function.\n array = function () {\n // Parse an array value.\n\n var array = [];\n\n if (ch === '[') {\n next('[');\n white();\n if (ch === ']') {\n next(']');\n return array; // empty array\n }\n while (ch) {\n array.push(value());\n white();\n if (ch === ']') {\n next(']');\n return array;\n }\n next(',');\n white();\n }\n }\n error('Bad array');\n },\n object = function () {\n // Parse an object value.\n\n var key,\n object = Object.create(null);\n\n if (ch === '{') {\n next('{');\n white();\n if (ch === '}') {\n next('}');\n return object; // empty object\n }\n while (ch) {\n key = string();\n white();\n next(':');\n if (\n _options.strict === true &&\n Object.hasOwnProperty.call(object, key)\n ) {\n error('Duplicate key \"' + key + '\"');\n }\n\n if (suspectProtoRx.test(key) === true) {\n if (_options.protoAction === 'error') {\n error('Object contains forbidden prototype property');\n } else if (_options.protoAction === 'ignore') {\n value();\n } else {\n object[key] = value();\n }\n } else if (suspectConstructorRx.test(key) === true) {\n if (_options.constructorAction === 'error') {\n error('Object contains forbidden constructor property');\n } else if (_options.constructorAction === 'ignore') {\n value();\n } else {\n object[key] = value();\n }\n } else {\n object[key] = value();\n }\n\n white();\n if (ch === '}') {\n next('}');\n return object;\n }\n next(',');\n white();\n }\n }\n error('Bad object');\n };\n\n value = function () {\n // Parse a JSON value. It could be an object, an array, a string, a number,\n // or a word.\n\n white();\n switch (ch) {\n case '{':\n return object();\n case '[':\n return array();\n case '\"':\n return string();\n case '-':\n return number();\n default:\n return ch >= '0' && ch <= '9' ? number() : word();\n }\n };\n\n // Return the json_parse function. It will have access to all of the above\n // functions and variables.\n\n return function (source, reviver) {\n var result;\n\n text = source + '';\n at = 0;\n ch = ' ';\n result = value();\n white();\n if (ch) {\n error('Syntax error');\n }\n\n // If there is a reviver function, we recursively walk the new structure,\n // passing each name/value pair to the reviver function for possible\n // transformation, starting with a temporary root object that holds the result\n // in an empty key. If there is not a reviver function, we simply return the\n // result.\n\n return typeof reviver === 'function'\n ? (function walk(holder, key) {\n var k,\n v,\n value = holder[key];\n if (value && typeof value === 'object') {\n Object.keys(value).forEach(function (k) {\n v = walk(value, k);\n if (v !== undefined) {\n value[k] = v;\n } else {\n delete value[k];\n }\n });\n }\n return reviver.call(holder, key, value);\n })({ '': result }, '')\n : result;\n };\n};\n\nmodule.exports = json_parse;\n","var BigNumber = require('bignumber.js');\n\n/*\n json2.js\n 2013-05-26\n\n Public Domain.\n\n NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\n See http://www.JSON.org/js.html\n\n\n This code should be minified before deployment.\n See http://javascript.crockford.com/jsmin.html\n\n USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO\n NOT CONTROL.\n\n\n This file creates a global JSON object containing two methods: stringify\n and parse.\n\n JSON.stringify(value, replacer, space)\n value any JavaScript value, usually an object or array.\n\n replacer an optional parameter that determines how object\n values are stringified for objects. It can be a\n function or an array of strings.\n\n space an optional parameter that specifies the indentation\n of nested structures. If it is omitted, the text will\n be packed without extra whitespace. If it is a number,\n it will specify the number of spaces to indent at each\n level. If it is a string (such as '\\t' or ' '),\n it contains the characters used to indent at each level.\n\n This method produces a JSON text from a JavaScript value.\n\n When an object value is found, if the object contains a toJSON\n method, its toJSON method will be called and the result will be\n stringified. A toJSON method does not serialize: it returns the\n value represented by the name/value pair that should be serialized,\n or undefined if nothing should be serialized. The toJSON method\n will be passed the key associated with the value, and this will be\n bound to the value\n\n For example, this would serialize Dates as ISO strings.\n\n Date.prototype.toJSON = function (key) {\n function f(n) {\n // Format integers to have at least two digits.\n return n < 10 ? '0' + n : n;\n }\n\n return this.getUTCFullYear() + '-' +\n f(this.getUTCMonth() + 1) + '-' +\n f(this.getUTCDate()) + 'T' +\n f(this.getUTCHours()) + ':' +\n f(this.getUTCMinutes()) + ':' +\n f(this.getUTCSeconds()) + 'Z';\n };\n\n You can provide an optional replacer method. It will be passed the\n key and value of each member, with this bound to the containing\n object. The value that is returned from your method will be\n serialized. If your method returns undefined, then the member will\n be excluded from the serialization.\n\n If the replacer parameter is an array of strings, then it will be\n used to select the members to be serialized. It filters the results\n such that only members with keys listed in the replacer array are\n stringified.\n\n Values that do not have JSON representations, such as undefined or\n functions, will not be serialized. Such values in objects will be\n dropped; in arrays they will be replaced with null. You can use\n a replacer function to replace those with JSON values.\n JSON.stringify(undefined) returns undefined.\n\n The optional space parameter produces a stringification of the\n value that is filled with line breaks and indentation to make it\n easier to read.\n\n If the space parameter is a non-empty string, then that string will\n be used for indentation. If the space parameter is a number, then\n the indentation will be that many spaces.\n\n Example:\n\n text = JSON.stringify(['e', {pluribus: 'unum'}]);\n // text is '[\"e\",{\"pluribus\":\"unum\"}]'\n\n\n text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\\t');\n // text is '[\\n\\t\"e\",\\n\\t{\\n\\t\\t\"pluribus\": \"unum\"\\n\\t}\\n]'\n\n text = JSON.stringify([new Date()], function (key, value) {\n return this[key] instanceof Date ?\n 'Date(' + this[key] + ')' : value;\n });\n // text is '[\"Date(---current time---)\"]'\n\n\n JSON.parse(text, reviver)\n This method parses a JSON text to produce an object or array.\n It can throw a SyntaxError exception.\n\n The optional reviver parameter is a function that can filter and\n transform the results. It receives each of the keys and values,\n and its return value is used instead of the original value.\n If it returns what it received, then the structure is not modified.\n If it returns undefined then the member is deleted.\n\n Example:\n\n // Parse the text. Values that look like ISO date strings will\n // be converted to Date objects.\n\n myData = JSON.parse(text, function (key, value) {\n var a;\n if (typeof value === 'string') {\n a =\n/^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2}(?:\\.\\d*)?)Z$/.exec(value);\n if (a) {\n return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],\n +a[5], +a[6]));\n }\n }\n return value;\n });\n\n myData = JSON.parse('[\"Date(09/09/2001)\"]', function (key, value) {\n var d;\n if (typeof value === 'string' &&\n value.slice(0, 5) === 'Date(' &&\n value.slice(-1) === ')') {\n d = new Date(value.slice(5, -1));\n if (d) {\n return d;\n }\n }\n return value;\n });\n\n\n This is a reference implementation. You are free to copy, modify, or\n redistribute.\n*/\n\n/*jslint evil: true, regexp: true */\n\n/*members \"\", \"\\b\", \"\\t\", \"\\n\", \"\\f\", \"\\r\", \"\\\"\", JSON, \"\\\\\", apply,\n call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,\n getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,\n lastIndex, length, parse, prototype, push, replace, slice, stringify,\n test, toJSON, toString, valueOf\n*/\n\n\n// Create a JSON object only if one does not already exist. We create the\n// methods in a closure to avoid creating global variables.\n\nvar JSON = module.exports;\n\n(function () {\n 'use strict';\n\n function f(n) {\n // Format integers to have at least two digits.\n return n < 10 ? '0' + n : n;\n }\n\n var cx = /[\\u0000\\u00ad\\u0600-\\u0604\\u070f\\u17b4\\u17b5\\u200c-\\u200f\\u2028-\\u202f\\u2060-\\u206f\\ufeff\\ufff0-\\uffff]/g,\n escapable = /[\\\\\\\"\\x00-\\x1f\\x7f-\\x9f\\u00ad\\u0600-\\u0604\\u070f\\u17b4\\u17b5\\u200c-\\u200f\\u2028-\\u202f\\u2060-\\u206f\\ufeff\\ufff0-\\uffff]/g,\n gap,\n indent,\n meta = { // table of character substitutions\n '\\b': '\\\\b',\n '\\t': '\\\\t',\n '\\n': '\\\\n',\n '\\f': '\\\\f',\n '\\r': '\\\\r',\n '\"' : '\\\\\"',\n '\\\\': '\\\\\\\\'\n },\n rep;\n\n\n function quote(string) {\n\n// If the string contains no control characters, no quote characters, and no\n// backslash characters, then we can safely slap some quotes around it.\n// Otherwise we must also replace the offending characters with safe escape\n// sequences.\n\n escapable.lastIndex = 0;\n return escapable.test(string) ? '\"' + string.replace(escapable, function (a) {\n var c = meta[a];\n return typeof c === 'string'\n ? c\n : '\\\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);\n }) + '\"' : '\"' + string + '\"';\n }\n\n\n function str(key, holder) {\n\n// Produce a string from holder[key].\n\n var i, // The loop counter.\n k, // The member key.\n v, // The member value.\n length,\n mind = gap,\n partial,\n value = holder[key],\n isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value));\n\n// If the value has a toJSON method, call it to obtain a replacement value.\n\n if (value && typeof value === 'object' &&\n typeof value.toJSON === 'function') {\n value = value.toJSON(key);\n }\n\n// If we were called with a replacer function, then call the replacer to\n// obtain a replacement value.\n\n if (typeof rep === 'function') {\n value = rep.call(holder, key, value);\n }\n\n// What happens next depends on the value's type.\n\n switch (typeof value) {\n case 'string':\n if (isBigNumber) {\n return value;\n } else {\n return quote(value);\n }\n\n case 'number':\n\n// JSON numbers must be finite. Encode non-finite numbers as null.\n\n return isFinite(value) ? String(value) : 'null';\n\n case 'boolean':\n case 'null':\n case 'bigint':\n\n// If the value is a boolean or null, convert it to a string. Note:\n// typeof null does not produce 'null'. The case is included here in\n// the remote chance that this gets fixed someday.\n\n return String(value);\n\n// If the type is 'object', we might be dealing with an object or an array or\n// null.\n\n case 'object':\n\n// Due to a specification blunder in ECMAScript, typeof null is 'object',\n// so watch out for that case.\n\n if (!value) {\n return 'null';\n }\n\n// Make an array to hold the partial results of stringifying this object value.\n\n gap += indent;\n partial = [];\n\n// Is the value an array?\n\n if (Object.prototype.toString.apply(value) === '[object Array]') {\n\n// The value is an array. Stringify every element. Use null as a placeholder\n// for non-JSON values.\n\n length = value.length;\n for (i = 0; i < length; i += 1) {\n partial[i] = str(i, value) || 'null';\n }\n\n// Join all of the elements together, separated with commas, and wrap them in\n// brackets.\n\n v = partial.length === 0\n ? '[]'\n : gap\n ? '[\\n' + gap + partial.join(',\\n' + gap) + '\\n' + mind + ']'\n : '[' + partial.join(',') + ']';\n gap = mind;\n return v;\n }\n\n// If the replacer is an array, use it to select the members to be stringified.\n\n if (rep && typeof rep === 'object') {\n length = rep.length;\n for (i = 0; i < length; i += 1) {\n if (typeof rep[i] === 'string') {\n k = rep[i];\n v = str(k, value);\n if (v) {\n partial.push(quote(k) + (gap ? ': ' : ':') + v);\n }\n }\n }\n } else {\n\n// Otherwise, iterate through all of the keys in the object.\n\n Object.keys(value).forEach(function(k) {\n var v = str(k, value);\n if (v) {\n partial.push(quote(k) + (gap ? ': ' : ':') + v);\n }\n });\n }\n\n// Join all of the member texts together, separated with commas,\n// and wrap them in braces.\n\n v = partial.length === 0\n ? '{}'\n : gap\n ? '{\\n' + gap + partial.join(',\\n' + gap) + '\\n' + mind + '}'\n : '{' + partial.join(',') + '}';\n gap = mind;\n return v;\n }\n }\n\n// If the JSON object does not yet have a stringify method, give it one.\n\n if (typeof JSON.stringify !== 'function') {\n JSON.stringify = function (value, replacer, space) {\n\n// The stringify method takes a value and an optional replacer, and an optional\n// space parameter, and returns a JSON text. The replacer can be a function\n// that can replace values, or an array of strings that will select the keys.\n// A default replacer method can be provided. Use of the space parameter can\n// produce text that is more easily readable.\n\n var i;\n gap = '';\n indent = '';\n\n// If the space parameter is a number, make an indent string containing that\n// many spaces.\n\n if (typeof space === 'number') {\n for (i = 0; i < space; i += 1) {\n indent += ' ';\n }\n\n// If the space parameter is a string, it will be used as the indent string.\n\n } else if (typeof space === 'string') {\n indent = space;\n }\n\n// If there is a replacer, it must be a function or an array.\n// Otherwise, throw an error.\n\n rep = replacer;\n if (replacer && typeof replacer !== 'function' &&\n (typeof replacer !== 'object' ||\n typeof replacer.length !== 'number')) {\n throw new Error('JSON.stringify');\n }\n\n// Make a fake root object containing our value under the key of ''.\n// Return the result of stringifying the value.\n\n return str('', {'': value});\n };\n }\n}());\n","var bufferEqual = require('buffer-equal-constant-time');\nvar Buffer = require('safe-buffer').Buffer;\nvar crypto = require('crypto');\nvar formatEcdsa = require('ecdsa-sig-formatter');\nvar util = require('util');\n\nvar MSG_INVALID_ALGORITHM = '\"%s\" is not a valid algorithm.\\n Supported algorithms are:\\n \"HS256\", \"HS384\", \"HS512\", \"RS256\", \"RS384\", \"RS512\", \"PS256\", \"PS384\", \"PS512\", \"ES256\", \"ES384\", \"ES512\" and \"none\".'\nvar MSG_INVALID_SECRET = 'secret must be a string or buffer';\nvar MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer';\nvar MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object';\n\nvar supportsKeyObjects = typeof crypto.createPublicKey === 'function';\nif (supportsKeyObjects) {\n MSG_INVALID_VERIFIER_KEY += ' or a KeyObject';\n MSG_INVALID_SECRET += 'or a KeyObject';\n}\n\nfunction checkIsPublicKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return;\n }\n\n if (!supportsKeyObjects) {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key !== 'object') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.type !== 'string') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.asymmetricKeyType !== 'string') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.export !== 'function') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n};\n\nfunction checkIsPrivateKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return;\n }\n\n if (typeof key === 'object') {\n return;\n }\n\n throw typeError(MSG_INVALID_SIGNER_KEY);\n};\n\nfunction checkIsSecretKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return key;\n }\n\n if (!supportsKeyObjects) {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (typeof key !== 'object') {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (key.type !== 'secret') {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (typeof key.export !== 'function') {\n throw typeError(MSG_INVALID_SECRET);\n }\n}\n\nfunction fromBase64(base64) {\n return base64\n .replace(/=/g, '')\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n}\n\nfunction toBase64(base64url) {\n base64url = base64url.toString();\n\n var padding = 4 - base64url.length % 4;\n if (padding !== 4) {\n for (var i = 0; i < padding; ++i) {\n base64url += '=';\n }\n }\n\n return base64url\n .replace(/\\-/g, '+')\n .replace(/_/g, '/');\n}\n\nfunction typeError(template) {\n var args = [].slice.call(arguments, 1);\n var errMsg = util.format.bind(util, template).apply(null, args);\n return new TypeError(errMsg);\n}\n\nfunction bufferOrString(obj) {\n return Buffer.isBuffer(obj) || typeof obj === 'string';\n}\n\nfunction normalizeInput(thing) {\n if (!bufferOrString(thing))\n thing = JSON.stringify(thing);\n return thing;\n}\n\nfunction createHmacSigner(bits) {\n return function sign(thing, secret) {\n checkIsSecretKey(secret);\n thing = normalizeInput(thing);\n var hmac = crypto.createHmac('sha' + bits, secret);\n var sig = (hmac.update(thing), hmac.digest('base64'))\n return fromBase64(sig);\n }\n}\n\nfunction createHmacVerifier(bits) {\n return function verify(thing, signature, secret) {\n var computedSig = createHmacSigner(bits)(thing, secret);\n return bufferEqual(Buffer.from(signature), Buffer.from(computedSig));\n }\n}\n\nfunction createKeySigner(bits) {\n return function sign(thing, privateKey) {\n checkIsPrivateKey(privateKey);\n thing = normalizeInput(thing);\n // Even though we are specifying \"RSA\" here, this works with ECDSA\n // keys as well.\n var signer = crypto.createSign('RSA-SHA' + bits);\n var sig = (signer.update(thing), signer.sign(privateKey, 'base64'));\n return fromBase64(sig);\n }\n}\n\nfunction createKeyVerifier(bits) {\n return function verify(thing, signature, publicKey) {\n checkIsPublicKey(publicKey);\n thing = normalizeInput(thing);\n signature = toBase64(signature);\n var verifier = crypto.createVerify('RSA-SHA' + bits);\n verifier.update(thing);\n return verifier.verify(publicKey, signature, 'base64');\n }\n}\n\nfunction createPSSKeySigner(bits) {\n return function sign(thing, privateKey) {\n checkIsPrivateKey(privateKey);\n thing = normalizeInput(thing);\n var signer = crypto.createSign('RSA-SHA' + bits);\n var sig = (signer.update(thing), signer.sign({\n key: privateKey,\n padding: crypto.constants.RSA_PKCS1_PSS_PADDING,\n saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST\n }, 'base64'));\n return fromBase64(sig);\n }\n}\n\nfunction createPSSKeyVerifier(bits) {\n return function verify(thing, signature, publicKey) {\n checkIsPublicKey(publicKey);\n thing = normalizeInput(thing);\n signature = toBase64(signature);\n var verifier = crypto.createVerify('RSA-SHA' + bits);\n verifier.update(thing);\n return verifier.verify({\n key: publicKey,\n padding: crypto.constants.RSA_PKCS1_PSS_PADDING,\n saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST\n }, signature, 'base64');\n }\n}\n\nfunction createECDSASigner(bits) {\n var inner = createKeySigner(bits);\n return function sign() {\n var signature = inner.apply(null, arguments);\n signature = formatEcdsa.derToJose(signature, 'ES' + bits);\n return signature;\n };\n}\n\nfunction createECDSAVerifer(bits) {\n var inner = createKeyVerifier(bits);\n return function verify(thing, signature, publicKey) {\n signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64');\n var result = inner(thing, signature, publicKey);\n return result;\n };\n}\n\nfunction createNoneSigner() {\n return function sign() {\n return '';\n }\n}\n\nfunction createNoneVerifier() {\n return function verify(thing, signature) {\n return signature === '';\n }\n}\n\nmodule.exports = function jwa(algorithm) {\n var signerFactories = {\n hs: createHmacSigner,\n rs: createKeySigner,\n ps: createPSSKeySigner,\n es: createECDSASigner,\n none: createNoneSigner,\n }\n var verifierFactories = {\n hs: createHmacVerifier,\n rs: createKeyVerifier,\n ps: createPSSKeyVerifier,\n es: createECDSAVerifer,\n none: createNoneVerifier,\n }\n var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/);\n if (!match)\n throw typeError(MSG_INVALID_ALGORITHM, algorithm);\n var algo = (match[1] || match[3]).toLowerCase();\n var bits = match[2];\n\n return {\n sign: signerFactories[algo](bits),\n verify: verifierFactories[algo](bits),\n }\n};\n","/*global exports*/\nvar SignStream = require('./lib/sign-stream');\nvar VerifyStream = require('./lib/verify-stream');\n\nvar ALGORITHMS = [\n 'HS256', 'HS384', 'HS512',\n 'RS256', 'RS384', 'RS512',\n 'PS256', 'PS384', 'PS512',\n 'ES256', 'ES384', 'ES512'\n];\n\nexports.ALGORITHMS = ALGORITHMS;\nexports.sign = SignStream.sign;\nexports.verify = VerifyStream.verify;\nexports.decode = VerifyStream.decode;\nexports.isValid = VerifyStream.isValid;\nexports.createSign = function createSign(opts) {\n return new SignStream(opts);\n};\nexports.createVerify = function createVerify(opts) {\n return new VerifyStream(opts);\n};\n","/*global module, process*/\nvar Buffer = require('safe-buffer').Buffer;\nvar Stream = require('stream');\nvar util = require('util');\n\nfunction DataStream(data) {\n this.buffer = null;\n this.writable = true;\n this.readable = true;\n\n // No input\n if (!data) {\n this.buffer = Buffer.alloc(0);\n return this;\n }\n\n // Stream\n if (typeof data.pipe === 'function') {\n this.buffer = Buffer.alloc(0);\n data.pipe(this);\n return this;\n }\n\n // Buffer or String\n // or Object (assumedly a passworded key)\n if (data.length || typeof data === 'object') {\n this.buffer = data;\n this.writable = false;\n process.nextTick(function () {\n this.emit('end', data);\n this.readable = false;\n this.emit('close');\n }.bind(this));\n return this;\n }\n\n throw new TypeError('Unexpected data type ('+ typeof data + ')');\n}\nutil.inherits(DataStream, Stream);\n\nDataStream.prototype.write = function write(data) {\n this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]);\n this.emit('data', data);\n};\n\nDataStream.prototype.end = function end(data) {\n if (data)\n this.write(data);\n this.emit('end', data);\n this.emit('close');\n this.writable = false;\n this.readable = false;\n};\n\nmodule.exports = DataStream;\n","/*global module*/\nvar Buffer = require('safe-buffer').Buffer;\nvar DataStream = require('./data-stream');\nvar jwa = require('jwa');\nvar Stream = require('stream');\nvar toString = require('./tostring');\nvar util = require('util');\n\nfunction base64url(string, encoding) {\n return Buffer\n .from(string, encoding)\n .toString('base64')\n .replace(/=/g, '')\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n}\n\nfunction jwsSecuredInput(header, payload, encoding) {\n encoding = encoding || 'utf8';\n var encodedHeader = base64url(toString(header), 'binary');\n var encodedPayload = base64url(toString(payload), encoding);\n return util.format('%s.%s', encodedHeader, encodedPayload);\n}\n\nfunction jwsSign(opts) {\n var header = opts.header;\n var payload = opts.payload;\n var secretOrKey = opts.secret || opts.privateKey;\n var encoding = opts.encoding;\n var algo = jwa(header.alg);\n var securedInput = jwsSecuredInput(header, payload, encoding);\n var signature = algo.sign(securedInput, secretOrKey);\n return util.format('%s.%s', securedInput, signature);\n}\n\nfunction SignStream(opts) {\n var secret = opts.secret||opts.privateKey||opts.key;\n var secretStream = new DataStream(secret);\n this.readable = true;\n this.header = opts.header;\n this.encoding = opts.encoding;\n this.secret = this.privateKey = this.key = secretStream;\n this.payload = new DataStream(opts.payload);\n this.secret.once('close', function () {\n if (!this.payload.writable && this.readable)\n this.sign();\n }.bind(this));\n\n this.payload.once('close', function () {\n if (!this.secret.writable && this.readable)\n this.sign();\n }.bind(this));\n}\nutil.inherits(SignStream, Stream);\n\nSignStream.prototype.sign = function sign() {\n try {\n var signature = jwsSign({\n header: this.header,\n payload: this.payload.buffer,\n secret: this.secret.buffer,\n encoding: this.encoding\n });\n this.emit('done', signature);\n this.emit('data', signature);\n this.emit('end');\n this.readable = false;\n return signature;\n } catch (e) {\n this.readable = false;\n this.emit('error', e);\n this.emit('close');\n }\n};\n\nSignStream.sign = jwsSign;\n\nmodule.exports = SignStream;\n","/*global module*/\nvar Buffer = require('buffer').Buffer;\n\nmodule.exports = function toString(obj) {\n if (typeof obj === 'string')\n return obj;\n if (typeof obj === 'number' || Buffer.isBuffer(obj))\n return obj.toString();\n return JSON.stringify(obj);\n};\n","/*global module*/\nvar Buffer = require('safe-buffer').Buffer;\nvar DataStream = require('./data-stream');\nvar jwa = require('jwa');\nvar Stream = require('stream');\nvar toString = require('./tostring');\nvar util = require('util');\nvar JWS_REGEX = /^[a-zA-Z0-9\\-_]+?\\.[a-zA-Z0-9\\-_]+?\\.([a-zA-Z0-9\\-_]+)?$/;\n\nfunction isObject(thing) {\n return Object.prototype.toString.call(thing) === '[object Object]';\n}\n\nfunction safeJsonParse(thing) {\n if (isObject(thing))\n return thing;\n try { return JSON.parse(thing); }\n catch (e) { return undefined; }\n}\n\nfunction headerFromJWS(jwsSig) {\n var encodedHeader = jwsSig.split('.', 1)[0];\n return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary'));\n}\n\nfunction securedInputFromJWS(jwsSig) {\n return jwsSig.split('.', 2).join('.');\n}\n\nfunction signatureFromJWS(jwsSig) {\n return jwsSig.split('.')[2];\n}\n\nfunction payloadFromJWS(jwsSig, encoding) {\n encoding = encoding || 'utf8';\n var payload = jwsSig.split('.')[1];\n return Buffer.from(payload, 'base64').toString(encoding);\n}\n\nfunction isValidJws(string) {\n return JWS_REGEX.test(string) && !!headerFromJWS(string);\n}\n\nfunction jwsVerify(jwsSig, algorithm, secretOrKey) {\n if (!algorithm) {\n var err = new Error(\"Missing algorithm parameter for jws.verify\");\n err.code = \"MISSING_ALGORITHM\";\n throw err;\n }\n jwsSig = toString(jwsSig);\n var signature = signatureFromJWS(jwsSig);\n var securedInput = securedInputFromJWS(jwsSig);\n var algo = jwa(algorithm);\n return algo.verify(securedInput, signature, secretOrKey);\n}\n\nfunction jwsDecode(jwsSig, opts) {\n opts = opts || {};\n jwsSig = toString(jwsSig);\n\n if (!isValidJws(jwsSig))\n return null;\n\n var header = headerFromJWS(jwsSig);\n\n if (!header)\n return null;\n\n var payload = payloadFromJWS(jwsSig);\n if (header.typ === 'JWT' || opts.json)\n payload = JSON.parse(payload, opts.encoding);\n\n return {\n header: header,\n payload: payload,\n signature: signatureFromJWS(jwsSig)\n };\n}\n\nfunction VerifyStream(opts) {\n opts = opts || {};\n var secretOrKey = opts.secret||opts.publicKey||opts.key;\n var secretStream = new DataStream(secretOrKey);\n this.readable = true;\n this.algorithm = opts.algorithm;\n this.encoding = opts.encoding;\n this.secret = this.publicKey = this.key = secretStream;\n this.signature = new DataStream(opts.signature);\n this.secret.once('close', function () {\n if (!this.signature.writable && this.readable)\n this.verify();\n }.bind(this));\n\n this.signature.once('close', function () {\n if (!this.secret.writable && this.readable)\n this.verify();\n }.bind(this));\n}\nutil.inherits(VerifyStream, Stream);\nVerifyStream.prototype.verify = function verify() {\n try {\n var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer);\n var obj = jwsDecode(this.signature.buffer, this.encoding);\n this.emit('done', valid, obj);\n this.emit('data', valid);\n this.emit('end');\n this.readable = false;\n return valid;\n } catch (e) {\n this.readable = false;\n this.emit('error', e);\n this.emit('close');\n }\n};\n\nVerifyStream.decode = jwsDecode;\nVerifyStream.isValid = isValidJws;\nVerifyStream.verify = jwsVerify;\n\nmodule.exports = VerifyStream;\n","'use strict'\n\n// A linked list to keep track of recently-used-ness\nconst Yallist = require('yallist')\n\nconst MAX = Symbol('max')\nconst LENGTH = Symbol('length')\nconst LENGTH_CALCULATOR = Symbol('lengthCalculator')\nconst ALLOW_STALE = Symbol('allowStale')\nconst MAX_AGE = Symbol('maxAge')\nconst DISPOSE = Symbol('dispose')\nconst NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')\nconst LRU_LIST = Symbol('lruList')\nconst CACHE = Symbol('cache')\nconst UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')\n\nconst naiveLength = () => 1\n\n// lruList is a yallist where the head is the youngest\n// item, and the tail is the oldest. the list contains the Hit\n// objects as the entries.\n// Each Hit object has a reference to its Yallist.Node. This\n// never changes.\n//\n// cache is a Map (or PseudoMap) that matches the keys to\n// the Yallist.Node object.\nclass LRUCache {\n constructor (options) {\n if (typeof options === 'number')\n options = { max: options }\n\n if (!options)\n options = {}\n\n if (options.max && (typeof options.max !== 'number' || options.max < 0))\n throw new TypeError('max must be a non-negative number')\n // Kind of weird to have a default max of Infinity, but oh well.\n const max = this[MAX] = options.max || Infinity\n\n const lc = options.length || naiveLength\n this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc\n this[ALLOW_STALE] = options.stale || false\n if (options.maxAge && typeof options.maxAge !== 'number')\n throw new TypeError('maxAge must be a number')\n this[MAX_AGE] = options.maxAge || 0\n this[DISPOSE] = options.dispose\n this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false\n this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false\n this.reset()\n }\n\n // resize the cache when the max changes.\n set max (mL) {\n if (typeof mL !== 'number' || mL < 0)\n throw new TypeError('max must be a non-negative number')\n\n this[MAX] = mL || Infinity\n trim(this)\n }\n get max () {\n return this[MAX]\n }\n\n set allowStale (allowStale) {\n this[ALLOW_STALE] = !!allowStale\n }\n get allowStale () {\n return this[ALLOW_STALE]\n }\n\n set maxAge (mA) {\n if (typeof mA !== 'number')\n throw new TypeError('maxAge must be a non-negative number')\n\n this[MAX_AGE] = mA\n trim(this)\n }\n get maxAge () {\n return this[MAX_AGE]\n }\n\n // resize the cache when the lengthCalculator changes.\n set lengthCalculator (lC) {\n if (typeof lC !== 'function')\n lC = naiveLength\n\n if (lC !== this[LENGTH_CALCULATOR]) {\n this[LENGTH_CALCULATOR] = lC\n this[LENGTH] = 0\n this[LRU_LIST].forEach(hit => {\n hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)\n this[LENGTH] += hit.length\n })\n }\n trim(this)\n }\n get lengthCalculator () { return this[LENGTH_CALCULATOR] }\n\n get length () { return this[LENGTH] }\n get itemCount () { return this[LRU_LIST].length }\n\n rforEach (fn, thisp) {\n thisp = thisp || this\n for (let walker = this[LRU_LIST].tail; walker !== null;) {\n const prev = walker.prev\n forEachStep(this, fn, walker, thisp)\n walker = prev\n }\n }\n\n forEach (fn, thisp) {\n thisp = thisp || this\n for (let walker = this[LRU_LIST].head; walker !== null;) {\n const next = walker.next\n forEachStep(this, fn, walker, thisp)\n walker = next\n }\n }\n\n keys () {\n return this[LRU_LIST].toArray().map(k => k.key)\n }\n\n values () {\n return this[LRU_LIST].toArray().map(k => k.value)\n }\n\n reset () {\n if (this[DISPOSE] &&\n this[LRU_LIST] &&\n this[LRU_LIST].length) {\n this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))\n }\n\n this[CACHE] = new Map() // hash of items by key\n this[LRU_LIST] = new Yallist() // list of items in order of use recency\n this[LENGTH] = 0 // length of items in the list\n }\n\n dump () {\n return this[LRU_LIST].map(hit =>\n isStale(this, hit) ? false : {\n k: hit.key,\n v: hit.value,\n e: hit.now + (hit.maxAge || 0)\n }).toArray().filter(h => h)\n }\n\n dumpLru () {\n return this[LRU_LIST]\n }\n\n set (key, value, maxAge) {\n maxAge = maxAge || this[MAX_AGE]\n\n if (maxAge && typeof maxAge !== 'number')\n throw new TypeError('maxAge must be a number')\n\n const now = maxAge ? Date.now() : 0\n const len = this[LENGTH_CALCULATOR](value, key)\n\n if (this[CACHE].has(key)) {\n if (len > this[MAX]) {\n del(this, this[CACHE].get(key))\n return false\n }\n\n const node = this[CACHE].get(key)\n const item = node.value\n\n // dispose of the old one before overwriting\n // split out into 2 ifs for better coverage tracking\n if (this[DISPOSE]) {\n if (!this[NO_DISPOSE_ON_SET])\n this[DISPOSE](key, item.value)\n }\n\n item.now = now\n item.maxAge = maxAge\n item.value = value\n this[LENGTH] += len - item.length\n item.length = len\n this.get(key)\n trim(this)\n return true\n }\n\n const hit = new Entry(key, value, len, now, maxAge)\n\n // oversized objects fall out of cache automatically.\n if (hit.length > this[MAX]) {\n if (this[DISPOSE])\n this[DISPOSE](key, value)\n\n return false\n }\n\n this[LENGTH] += hit.length\n this[LRU_LIST].unshift(hit)\n this[CACHE].set(key, this[LRU_LIST].head)\n trim(this)\n return true\n }\n\n has (key) {\n if (!this[CACHE].has(key)) return false\n const hit = this[CACHE].get(key).value\n return !isStale(this, hit)\n }\n\n get (key) {\n return get(this, key, true)\n }\n\n peek (key) {\n return get(this, key, false)\n }\n\n pop () {\n const node = this[LRU_LIST].tail\n if (!node)\n return null\n\n del(this, node)\n return node.value\n }\n\n del (key) {\n del(this, this[CACHE].get(key))\n }\n\n load (arr) {\n // reset the cache\n this.reset()\n\n const now = Date.now()\n // A previous serialized cache has the most recent items first\n for (let l = arr.length - 1; l >= 0; l--) {\n const hit = arr[l]\n const expiresAt = hit.e || 0\n if (expiresAt === 0)\n // the item was created without expiration in a non aged cache\n this.set(hit.k, hit.v)\n else {\n const maxAge = expiresAt - now\n // dont add already expired items\n if (maxAge > 0) {\n this.set(hit.k, hit.v, maxAge)\n }\n }\n }\n }\n\n prune () {\n this[CACHE].forEach((value, key) => get(this, key, false))\n }\n}\n\nconst get = (self, key, doUse) => {\n const node = self[CACHE].get(key)\n if (node) {\n const hit = node.value\n if (isStale(self, hit)) {\n del(self, node)\n if (!self[ALLOW_STALE])\n return undefined\n } else {\n if (doUse) {\n if (self[UPDATE_AGE_ON_GET])\n node.value.now = Date.now()\n self[LRU_LIST].unshiftNode(node)\n }\n }\n return hit.value\n }\n}\n\nconst isStale = (self, hit) => {\n if (!hit || (!hit.maxAge && !self[MAX_AGE]))\n return false\n\n const diff = Date.now() - hit.now\n return hit.maxAge ? diff > hit.maxAge\n : self[MAX_AGE] && (diff > self[MAX_AGE])\n}\n\nconst trim = self => {\n if (self[LENGTH] > self[MAX]) {\n for (let walker = self[LRU_LIST].tail;\n self[LENGTH] > self[MAX] && walker !== null;) {\n // We know that we're about to delete this one, and also\n // what the next least recently used key will be, so just\n // go ahead and set it now.\n const prev = walker.prev\n del(self, walker)\n walker = prev\n }\n }\n}\n\nconst del = (self, node) => {\n if (node) {\n const hit = node.value\n if (self[DISPOSE])\n self[DISPOSE](hit.key, hit.value)\n\n self[LENGTH] -= hit.length\n self[CACHE].delete(hit.key)\n self[LRU_LIST].removeNode(node)\n }\n}\n\nclass Entry {\n constructor (key, value, length, now, maxAge) {\n this.key = key\n this.value = value\n this.length = length\n this.now = now\n this.maxAge = maxAge || 0\n }\n}\n\nconst forEachStep = (self, fn, node, thisp) => {\n let hit = node.value\n if (isStale(self, hit)) {\n del(self, node)\n if (!self[ALLOW_STALE])\n hit = undefined\n }\n if (hit)\n fn.call(thisp, hit.value, hit.key, self)\n}\n\nmodule.exports = LRUCache\n","/*!\n * mime-db\n * Copyright(c) 2014 Jonathan Ong\n * Copyright(c) 2015-2022 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n/**\n * Module exports.\n */\n\nmodule.exports = require('./db.json')\n","/*!\n * mime-types\n * Copyright(c) 2014 Jonathan Ong\n * Copyright(c) 2015 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n'use strict'\n\n/**\n * Module dependencies.\n * @private\n */\n\nvar db = require('mime-db')\nvar extname = require('path').extname\n\n/**\n * Module variables.\n * @private\n */\n\nvar EXTRACT_TYPE_REGEXP = /^\\s*([^;\\s]*)(?:;|\\s|$)/\nvar TEXT_TYPE_REGEXP = /^text\\//i\n\n/**\n * Module exports.\n * @public\n */\n\nexports.charset = charset\nexports.charsets = { lookup: charset }\nexports.contentType = contentType\nexports.extension = extension\nexports.extensions = Object.create(null)\nexports.lookup = lookup\nexports.types = Object.create(null)\n\n// Populate the extensions/types maps\npopulateMaps(exports.extensions, exports.types)\n\n/**\n * Get the default charset for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction charset (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n var mime = match && db[match[1].toLowerCase()]\n\n if (mime && mime.charset) {\n return mime.charset\n }\n\n // default text/* to utf-8\n if (match && TEXT_TYPE_REGEXP.test(match[1])) {\n return 'UTF-8'\n }\n\n return false\n}\n\n/**\n * Create a full Content-Type header given a MIME type or extension.\n *\n * @param {string} str\n * @return {boolean|string}\n */\n\nfunction contentType (str) {\n // TODO: should this even be in this module?\n if (!str || typeof str !== 'string') {\n return false\n }\n\n var mime = str.indexOf('/') === -1\n ? exports.lookup(str)\n : str\n\n if (!mime) {\n return false\n }\n\n // TODO: use content-type or other module\n if (mime.indexOf('charset') === -1) {\n var charset = exports.charset(mime)\n if (charset) mime += '; charset=' + charset.toLowerCase()\n }\n\n return mime\n}\n\n/**\n * Get the default extension for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction extension (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n\n // get extensions\n var exts = match && exports.extensions[match[1].toLowerCase()]\n\n if (!exts || !exts.length) {\n return false\n }\n\n return exts[0]\n}\n\n/**\n * Lookup the MIME type for a file path/extension.\n *\n * @param {string} path\n * @return {boolean|string}\n */\n\nfunction lookup (path) {\n if (!path || typeof path !== 'string') {\n return false\n }\n\n // get the extension (\"ext\" or \".ext\" or full path)\n var extension = extname('x.' + path)\n .toLowerCase()\n .substr(1)\n\n if (!extension) {\n return false\n }\n\n return exports.types[extension] || false\n}\n\n/**\n * Populate the extensions and types maps.\n * @private\n */\n\nfunction populateMaps (extensions, types) {\n // source preference (least -> most)\n var preference = ['nginx', 'apache', undefined, 'iana']\n\n Object.keys(db).forEach(function forEachMimeType (type) {\n var mime = db[type]\n var exts = mime.extensions\n\n if (!exts || !exts.length) {\n return\n }\n\n // mime -> extensions\n extensions[type] = exts\n\n // extension -> mime\n for (var i = 0; i < exts.length; i++) {\n var extension = exts[i]\n\n if (types[extension]) {\n var from = preference.indexOf(db[types[extension]].source)\n var to = preference.indexOf(mime.source)\n\n if (types[extension] !== 'application/octet-stream' &&\n (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) {\n // skip the remapping\n continue\n }\n }\n\n // set the extension -> mime\n types[extension] = type\n }\n })\n}\n","'use strict';\n\n/**\n * @param typeMap [Object] Map of MIME type -> Array[extensions]\n * @param ...\n */\nfunction Mime() {\n this._types = Object.create(null);\n this._extensions = Object.create(null);\n\n for (let i = 0; i < arguments.length; i++) {\n this.define(arguments[i]);\n }\n\n this.define = this.define.bind(this);\n this.getType = this.getType.bind(this);\n this.getExtension = this.getExtension.bind(this);\n}\n\n/**\n * Define mimetype -> extension mappings. Each key is a mime-type that maps\n * to an array of extensions associated with the type. The first extension is\n * used as the default extension for the type.\n *\n * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']});\n *\n * If a type declares an extension that has already been defined, an error will\n * be thrown. To suppress this error and force the extension to be associated\n * with the new type, pass `force`=true. Alternatively, you may prefix the\n * extension with \"*\" to map the type to extension, without mapping the\n * extension to the type.\n *\n * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']});\n *\n *\n * @param map (Object) type definitions\n * @param force (Boolean) if true, force overriding of existing definitions\n */\nMime.prototype.define = function(typeMap, force) {\n for (let type in typeMap) {\n let extensions = typeMap[type].map(function(t) {\n return t.toLowerCase();\n });\n type = type.toLowerCase();\n\n for (let i = 0; i < extensions.length; i++) {\n const ext = extensions[i];\n\n // '*' prefix = not the preferred type for this extension. So fixup the\n // extension, and skip it.\n if (ext[0] === '*') {\n continue;\n }\n\n if (!force && (ext in this._types)) {\n throw new Error(\n 'Attempt to change mapping for \"' + ext +\n '\" extension from \"' + this._types[ext] + '\" to \"' + type +\n '\". Pass `force=true` to allow this, otherwise remove \"' + ext +\n '\" from the list of extensions for \"' + type + '\".'\n );\n }\n\n this._types[ext] = type;\n }\n\n // Use first extension as default\n if (force || !this._extensions[type]) {\n const ext = extensions[0];\n this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1);\n }\n }\n};\n\n/**\n * Lookup a mime type based on extension\n */\nMime.prototype.getType = function(path) {\n path = String(path);\n let last = path.replace(/^.*[/\\\\]/, '').toLowerCase();\n let ext = last.replace(/^.*\\./, '').toLowerCase();\n\n let hasPath = last.length < path.length;\n let hasDot = ext.length < last.length - 1;\n\n return (hasDot || !hasPath) && this._types[ext] || null;\n};\n\n/**\n * Return file extension associated with a mime type\n */\nMime.prototype.getExtension = function(type) {\n type = /^\\s*([^;\\s]*)/.test(type) && RegExp.$1;\n return type && this._extensions[type.toLowerCase()] || null;\n};\n\nmodule.exports = Mime;\n","'use strict';\n\nlet Mime = require('./Mime');\nmodule.exports = new Mime(require('./types/standard'), require('./types/other'));\n","module.exports = {\"application/prs.cww\":[\"cww\"],\"application/vnd.1000minds.decision-model+xml\":[\"1km\"],\"application/vnd.3gpp.pic-bw-large\":[\"plb\"],\"application/vnd.3gpp.pic-bw-small\":[\"psb\"],\"application/vnd.3gpp.pic-bw-var\":[\"pvb\"],\"application/vnd.3gpp2.tcap\":[\"tcap\"],\"application/vnd.3m.post-it-notes\":[\"pwn\"],\"application/vnd.accpac.simply.aso\":[\"aso\"],\"application/vnd.accpac.simply.imp\":[\"imp\"],\"application/vnd.acucobol\":[\"acu\"],\"application/vnd.acucorp\":[\"atc\",\"acutc\"],\"application/vnd.adobe.air-application-installer-package+zip\":[\"air\"],\"application/vnd.adobe.formscentral.fcdt\":[\"fcdt\"],\"application/vnd.adobe.fxp\":[\"fxp\",\"fxpl\"],\"application/vnd.adobe.xdp+xml\":[\"xdp\"],\"application/vnd.adobe.xfdf\":[\"xfdf\"],\"application/vnd.ahead.space\":[\"ahead\"],\"application/vnd.airzip.filesecure.azf\":[\"azf\"],\"application/vnd.airzip.filesecure.azs\":[\"azs\"],\"application/vnd.amazon.ebook\":[\"azw\"],\"application/vnd.americandynamics.acc\":[\"acc\"],\"application/vnd.amiga.ami\":[\"ami\"],\"application/vnd.android.package-archive\":[\"apk\"],\"application/vnd.anser-web-certificate-issue-initiation\":[\"cii\"],\"application/vnd.anser-web-funds-transfer-initiation\":[\"fti\"],\"application/vnd.antix.game-component\":[\"atx\"],\"application/vnd.apple.installer+xml\":[\"mpkg\"],\"application/vnd.apple.keynote\":[\"key\"],\"application/vnd.apple.mpegurl\":[\"m3u8\"],\"application/vnd.apple.numbers\":[\"numbers\"],\"application/vnd.apple.pages\":[\"pages\"],\"application/vnd.apple.pkpass\":[\"pkpass\"],\"application/vnd.aristanetworks.swi\":[\"swi\"],\"application/vnd.astraea-software.iota\":[\"iota\"],\"application/vnd.audiograph\":[\"aep\"],\"application/vnd.balsamiq.bmml+xml\":[\"bmml\"],\"application/vnd.blueice.multipass\":[\"mpm\"],\"application/vnd.bmi\":[\"bmi\"],\"application/vnd.businessobjects\":[\"rep\"],\"application/vnd.chemdraw+xml\":[\"cdxml\"],\"application/vnd.chipnuts.karaoke-mmd\":[\"mmd\"],\"application/vnd.cinderella\":[\"cdy\"],\"application/vnd.citationstyles.style+xml\":[\"csl\"],\"application/vnd.claymore\":[\"cla\"],\"application/vnd.cloanto.rp9\":[\"rp9\"],\"application/vnd.clonk.c4group\":[\"c4g\",\"c4d\",\"c4f\",\"c4p\",\"c4u\"],\"application/vnd.cluetrust.cartomobile-config\":[\"c11amc\"],\"application/vnd.cluetrust.cartomobile-config-pkg\":[\"c11amz\"],\"application/vnd.commonspace\":[\"csp\"],\"application/vnd.contact.cmsg\":[\"cdbcmsg\"],\"application/vnd.cosmocaller\":[\"cmc\"],\"application/vnd.crick.clicker\":[\"clkx\"],\"application/vnd.crick.clicker.keyboard\":[\"clkk\"],\"application/vnd.crick.clicker.palette\":[\"clkp\"],\"application/vnd.crick.clicker.template\":[\"clkt\"],\"application/vnd.crick.clicker.wordbank\":[\"clkw\"],\"application/vnd.criticaltools.wbs+xml\":[\"wbs\"],\"application/vnd.ctc-posml\":[\"pml\"],\"application/vnd.cups-ppd\":[\"ppd\"],\"application/vnd.curl.car\":[\"car\"],\"application/vnd.curl.pcurl\":[\"pcurl\"],\"application/vnd.dart\":[\"dart\"],\"application/vnd.data-vision.rdz\":[\"rdz\"],\"application/vnd.dbf\":[\"dbf\"],\"application/vnd.dece.data\":[\"uvf\",\"uvvf\",\"uvd\",\"uvvd\"],\"application/vnd.dece.ttml+xml\":[\"uvt\",\"uvvt\"],\"application/vnd.dece.unspecified\":[\"uvx\",\"uvvx\"],\"application/vnd.dece.zip\":[\"uvz\",\"uvvz\"],\"application/vnd.denovo.fcselayout-link\":[\"fe_launch\"],\"application/vnd.dna\":[\"dna\"],\"application/vnd.dolby.mlp\":[\"mlp\"],\"application/vnd.dpgraph\":[\"dpg\"],\"application/vnd.dreamfactory\":[\"dfac\"],\"application/vnd.ds-keypoint\":[\"kpxx\"],\"application/vnd.dvb.ait\":[\"ait\"],\"application/vnd.dvb.service\":[\"svc\"],\"application/vnd.dynageo\":[\"geo\"],\"application/vnd.ecowin.chart\":[\"mag\"],\"application/vnd.enliven\":[\"nml\"],\"application/vnd.epson.esf\":[\"esf\"],\"application/vnd.epson.msf\":[\"msf\"],\"application/vnd.epson.quickanime\":[\"qam\"],\"application/vnd.epson.salt\":[\"slt\"],\"application/vnd.epson.ssf\":[\"ssf\"],\"application/vnd.eszigno3+xml\":[\"es3\",\"et3\"],\"application/vnd.ezpix-album\":[\"ez2\"],\"application/vnd.ezpix-package\":[\"ez3\"],\"application/vnd.fdf\":[\"fdf\"],\"application/vnd.fdsn.mseed\":[\"mseed\"],\"application/vnd.fdsn.seed\":[\"seed\",\"dataless\"],\"application/vnd.flographit\":[\"gph\"],\"application/vnd.fluxtime.clip\":[\"ftc\"],\"application/vnd.framemaker\":[\"fm\",\"frame\",\"maker\",\"book\"],\"application/vnd.frogans.fnc\":[\"fnc\"],\"application/vnd.frogans.ltf\":[\"ltf\"],\"application/vnd.fsc.weblaunch\":[\"fsc\"],\"application/vnd.fujitsu.oasys\":[\"oas\"],\"application/vnd.fujitsu.oasys2\":[\"oa2\"],\"application/vnd.fujitsu.oasys3\":[\"oa3\"],\"application/vnd.fujitsu.oasysgp\":[\"fg5\"],\"application/vnd.fujitsu.oasysprs\":[\"bh2\"],\"application/vnd.fujixerox.ddd\":[\"ddd\"],\"application/vnd.fujixerox.docuworks\":[\"xdw\"],\"application/vnd.fujixerox.docuworks.binder\":[\"xbd\"],\"application/vnd.fuzzysheet\":[\"fzs\"],\"application/vnd.genomatix.tuxedo\":[\"txd\"],\"application/vnd.geogebra.file\":[\"ggb\"],\"application/vnd.geogebra.tool\":[\"ggt\"],\"application/vnd.geometry-explorer\":[\"gex\",\"gre\"],\"application/vnd.geonext\":[\"gxt\"],\"application/vnd.geoplan\":[\"g2w\"],\"application/vnd.geospace\":[\"g3w\"],\"application/vnd.gmx\":[\"gmx\"],\"application/vnd.google-apps.document\":[\"gdoc\"],\"application/vnd.google-apps.presentation\":[\"gslides\"],\"application/vnd.google-apps.spreadsheet\":[\"gsheet\"],\"application/vnd.google-earth.kml+xml\":[\"kml\"],\"application/vnd.google-earth.kmz\":[\"kmz\"],\"application/vnd.grafeq\":[\"gqf\",\"gqs\"],\"application/vnd.groove-account\":[\"gac\"],\"application/vnd.groove-help\":[\"ghf\"],\"application/vnd.groove-identity-message\":[\"gim\"],\"application/vnd.groove-injector\":[\"grv\"],\"application/vnd.groove-tool-message\":[\"gtm\"],\"application/vnd.groove-tool-template\":[\"tpl\"],\"application/vnd.groove-vcard\":[\"vcg\"],\"application/vnd.hal+xml\":[\"hal\"],\"application/vnd.handheld-entertainment+xml\":[\"zmm\"],\"application/vnd.hbci\":[\"hbci\"],\"application/vnd.hhe.lesson-player\":[\"les\"],\"application/vnd.hp-hpgl\":[\"hpgl\"],\"application/vnd.hp-hpid\":[\"hpid\"],\"application/vnd.hp-hps\":[\"hps\"],\"application/vnd.hp-jlyt\":[\"jlt\"],\"application/vnd.hp-pcl\":[\"pcl\"],\"application/vnd.hp-pclxl\":[\"pclxl\"],\"application/vnd.hydrostatix.sof-data\":[\"sfd-hdstx\"],\"application/vnd.ibm.minipay\":[\"mpy\"],\"application/vnd.ibm.modcap\":[\"afp\",\"listafp\",\"list3820\"],\"application/vnd.ibm.rights-management\":[\"irm\"],\"application/vnd.ibm.secure-container\":[\"sc\"],\"application/vnd.iccprofile\":[\"icc\",\"icm\"],\"application/vnd.igloader\":[\"igl\"],\"application/vnd.immervision-ivp\":[\"ivp\"],\"application/vnd.immervision-ivu\":[\"ivu\"],\"application/vnd.insors.igm\":[\"igm\"],\"application/vnd.intercon.formnet\":[\"xpw\",\"xpx\"],\"application/vnd.intergeo\":[\"i2g\"],\"application/vnd.intu.qbo\":[\"qbo\"],\"application/vnd.intu.qfx\":[\"qfx\"],\"application/vnd.ipunplugged.rcprofile\":[\"rcprofile\"],\"application/vnd.irepository.package+xml\":[\"irp\"],\"application/vnd.is-xpr\":[\"xpr\"],\"application/vnd.isac.fcs\":[\"fcs\"],\"application/vnd.jam\":[\"jam\"],\"application/vnd.jcp.javame.midlet-rms\":[\"rms\"],\"application/vnd.jisp\":[\"jisp\"],\"application/vnd.joost.joda-archive\":[\"joda\"],\"application/vnd.kahootz\":[\"ktz\",\"ktr\"],\"application/vnd.kde.karbon\":[\"karbon\"],\"application/vnd.kde.kchart\":[\"chrt\"],\"application/vnd.kde.kformula\":[\"kfo\"],\"application/vnd.kde.kivio\":[\"flw\"],\"application/vnd.kde.kontour\":[\"kon\"],\"application/vnd.kde.kpresenter\":[\"kpr\",\"kpt\"],\"application/vnd.kde.kspread\":[\"ksp\"],\"application/vnd.kde.kword\":[\"kwd\",\"kwt\"],\"application/vnd.kenameaapp\":[\"htke\"],\"application/vnd.kidspiration\":[\"kia\"],\"application/vnd.kinar\":[\"kne\",\"knp\"],\"application/vnd.koan\":[\"skp\",\"skd\",\"skt\",\"skm\"],\"application/vnd.kodak-descriptor\":[\"sse\"],\"application/vnd.las.las+xml\":[\"lasxml\"],\"application/vnd.llamagraphics.life-balance.desktop\":[\"lbd\"],\"application/vnd.llamagraphics.life-balance.exchange+xml\":[\"lbe\"],\"application/vnd.lotus-1-2-3\":[\"123\"],\"application/vnd.lotus-approach\":[\"apr\"],\"application/vnd.lotus-freelance\":[\"pre\"],\"application/vnd.lotus-notes\":[\"nsf\"],\"application/vnd.lotus-organizer\":[\"org\"],\"application/vnd.lotus-screencam\":[\"scm\"],\"application/vnd.lotus-wordpro\":[\"lwp\"],\"application/vnd.macports.portpkg\":[\"portpkg\"],\"application/vnd.mapbox-vector-tile\":[\"mvt\"],\"application/vnd.mcd\":[\"mcd\"],\"application/vnd.medcalcdata\":[\"mc1\"],\"application/vnd.mediastation.cdkey\":[\"cdkey\"],\"application/vnd.mfer\":[\"mwf\"],\"application/vnd.mfmp\":[\"mfm\"],\"application/vnd.micrografx.flo\":[\"flo\"],\"application/vnd.micrografx.igx\":[\"igx\"],\"application/vnd.mif\":[\"mif\"],\"application/vnd.mobius.daf\":[\"daf\"],\"application/vnd.mobius.dis\":[\"dis\"],\"application/vnd.mobius.mbk\":[\"mbk\"],\"application/vnd.mobius.mqy\":[\"mqy\"],\"application/vnd.mobius.msl\":[\"msl\"],\"application/vnd.mobius.plc\":[\"plc\"],\"application/vnd.mobius.txf\":[\"txf\"],\"application/vnd.mophun.application\":[\"mpn\"],\"application/vnd.mophun.certificate\":[\"mpc\"],\"application/vnd.mozilla.xul+xml\":[\"xul\"],\"application/vnd.ms-artgalry\":[\"cil\"],\"application/vnd.ms-cab-compressed\":[\"cab\"],\"application/vnd.ms-excel\":[\"xls\",\"xlm\",\"xla\",\"xlc\",\"xlt\",\"xlw\"],\"application/vnd.ms-excel.addin.macroenabled.12\":[\"xlam\"],\"application/vnd.ms-excel.sheet.binary.macroenabled.12\":[\"xlsb\"],\"application/vnd.ms-excel.sheet.macroenabled.12\":[\"xlsm\"],\"application/vnd.ms-excel.template.macroenabled.12\":[\"xltm\"],\"application/vnd.ms-fontobject\":[\"eot\"],\"application/vnd.ms-htmlhelp\":[\"chm\"],\"application/vnd.ms-ims\":[\"ims\"],\"application/vnd.ms-lrm\":[\"lrm\"],\"application/vnd.ms-officetheme\":[\"thmx\"],\"application/vnd.ms-outlook\":[\"msg\"],\"application/vnd.ms-pki.seccat\":[\"cat\"],\"application/vnd.ms-pki.stl\":[\"*stl\"],\"application/vnd.ms-powerpoint\":[\"ppt\",\"pps\",\"pot\"],\"application/vnd.ms-powerpoint.addin.macroenabled.12\":[\"ppam\"],\"application/vnd.ms-powerpoint.presentation.macroenabled.12\":[\"pptm\"],\"application/vnd.ms-powerpoint.slide.macroenabled.12\":[\"sldm\"],\"application/vnd.ms-powerpoint.slideshow.macroenabled.12\":[\"ppsm\"],\"application/vnd.ms-powerpoint.template.macroenabled.12\":[\"potm\"],\"application/vnd.ms-project\":[\"mpp\",\"mpt\"],\"application/vnd.ms-word.document.macroenabled.12\":[\"docm\"],\"application/vnd.ms-word.template.macroenabled.12\":[\"dotm\"],\"application/vnd.ms-works\":[\"wps\",\"wks\",\"wcm\",\"wdb\"],\"application/vnd.ms-wpl\":[\"wpl\"],\"application/vnd.ms-xpsdocument\":[\"xps\"],\"application/vnd.mseq\":[\"mseq\"],\"application/vnd.musician\":[\"mus\"],\"application/vnd.muvee.style\":[\"msty\"],\"application/vnd.mynfc\":[\"taglet\"],\"application/vnd.neurolanguage.nlu\":[\"nlu\"],\"application/vnd.nitf\":[\"ntf\",\"nitf\"],\"application/vnd.noblenet-directory\":[\"nnd\"],\"application/vnd.noblenet-sealer\":[\"nns\"],\"application/vnd.noblenet-web\":[\"nnw\"],\"application/vnd.nokia.n-gage.ac+xml\":[\"*ac\"],\"application/vnd.nokia.n-gage.data\":[\"ngdat\"],\"application/vnd.nokia.n-gage.symbian.install\":[\"n-gage\"],\"application/vnd.nokia.radio-preset\":[\"rpst\"],\"application/vnd.nokia.radio-presets\":[\"rpss\"],\"application/vnd.novadigm.edm\":[\"edm\"],\"application/vnd.novadigm.edx\":[\"edx\"],\"application/vnd.novadigm.ext\":[\"ext\"],\"application/vnd.oasis.opendocument.chart\":[\"odc\"],\"application/vnd.oasis.opendocument.chart-template\":[\"otc\"],\"application/vnd.oasis.opendocument.database\":[\"odb\"],\"application/vnd.oasis.opendocument.formula\":[\"odf\"],\"application/vnd.oasis.opendocument.formula-template\":[\"odft\"],\"application/vnd.oasis.opendocument.graphics\":[\"odg\"],\"application/vnd.oasis.opendocument.graphics-template\":[\"otg\"],\"application/vnd.oasis.opendocument.image\":[\"odi\"],\"application/vnd.oasis.opendocument.image-template\":[\"oti\"],\"application/vnd.oasis.opendocument.presentation\":[\"odp\"],\"application/vnd.oasis.opendocument.presentation-template\":[\"otp\"],\"application/vnd.oasis.opendocument.spreadsheet\":[\"ods\"],\"application/vnd.oasis.opendocument.spreadsheet-template\":[\"ots\"],\"application/vnd.oasis.opendocument.text\":[\"odt\"],\"application/vnd.oasis.opendocument.text-master\":[\"odm\"],\"application/vnd.oasis.opendocument.text-template\":[\"ott\"],\"application/vnd.oasis.opendocument.text-web\":[\"oth\"],\"application/vnd.olpc-sugar\":[\"xo\"],\"application/vnd.oma.dd2+xml\":[\"dd2\"],\"application/vnd.openblox.game+xml\":[\"obgx\"],\"application/vnd.openofficeorg.extension\":[\"oxt\"],\"application/vnd.openstreetmap.data+xml\":[\"osm\"],\"application/vnd.openxmlformats-officedocument.presentationml.presentation\":[\"pptx\"],\"application/vnd.openxmlformats-officedocument.presentationml.slide\":[\"sldx\"],\"application/vnd.openxmlformats-officedocument.presentationml.slideshow\":[\"ppsx\"],\"application/vnd.openxmlformats-officedocument.presentationml.template\":[\"potx\"],\"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\":[\"xlsx\"],\"application/vnd.openxmlformats-officedocument.spreadsheetml.template\":[\"xltx\"],\"application/vnd.openxmlformats-officedocument.wordprocessingml.document\":[\"docx\"],\"application/vnd.openxmlformats-officedocument.wordprocessingml.template\":[\"dotx\"],\"application/vnd.osgeo.mapguide.package\":[\"mgp\"],\"application/vnd.osgi.dp\":[\"dp\"],\"application/vnd.osgi.subsystem\":[\"esa\"],\"application/vnd.palm\":[\"pdb\",\"pqa\",\"oprc\"],\"application/vnd.pawaafile\":[\"paw\"],\"application/vnd.pg.format\":[\"str\"],\"application/vnd.pg.osasli\":[\"ei6\"],\"application/vnd.picsel\":[\"efif\"],\"application/vnd.pmi.widget\":[\"wg\"],\"application/vnd.pocketlearn\":[\"plf\"],\"application/vnd.powerbuilder6\":[\"pbd\"],\"application/vnd.previewsystems.box\":[\"box\"],\"application/vnd.proteus.magazine\":[\"mgz\"],\"application/vnd.publishare-delta-tree\":[\"qps\"],\"application/vnd.pvi.ptid1\":[\"ptid\"],\"application/vnd.quark.quarkxpress\":[\"qxd\",\"qxt\",\"qwd\",\"qwt\",\"qxl\",\"qxb\"],\"application/vnd.rar\":[\"rar\"],\"application/vnd.realvnc.bed\":[\"bed\"],\"application/vnd.recordare.musicxml\":[\"mxl\"],\"application/vnd.recordare.musicxml+xml\":[\"musicxml\"],\"application/vnd.rig.cryptonote\":[\"cryptonote\"],\"application/vnd.rim.cod\":[\"cod\"],\"application/vnd.rn-realmedia\":[\"rm\"],\"application/vnd.rn-realmedia-vbr\":[\"rmvb\"],\"application/vnd.route66.link66+xml\":[\"link66\"],\"application/vnd.sailingtracker.track\":[\"st\"],\"application/vnd.seemail\":[\"see\"],\"application/vnd.sema\":[\"sema\"],\"application/vnd.semd\":[\"semd\"],\"application/vnd.semf\":[\"semf\"],\"application/vnd.shana.informed.formdata\":[\"ifm\"],\"application/vnd.shana.informed.formtemplate\":[\"itp\"],\"application/vnd.shana.informed.interchange\":[\"iif\"],\"application/vnd.shana.informed.package\":[\"ipk\"],\"application/vnd.simtech-mindmapper\":[\"twd\",\"twds\"],\"application/vnd.smaf\":[\"mmf\"],\"application/vnd.smart.teacher\":[\"teacher\"],\"application/vnd.software602.filler.form+xml\":[\"fo\"],\"application/vnd.solent.sdkm+xml\":[\"sdkm\",\"sdkd\"],\"application/vnd.spotfire.dxp\":[\"dxp\"],\"application/vnd.spotfire.sfs\":[\"sfs\"],\"application/vnd.stardivision.calc\":[\"sdc\"],\"application/vnd.stardivision.draw\":[\"sda\"],\"application/vnd.stardivision.impress\":[\"sdd\"],\"application/vnd.stardivision.math\":[\"smf\"],\"application/vnd.stardivision.writer\":[\"sdw\",\"vor\"],\"application/vnd.stardivision.writer-global\":[\"sgl\"],\"application/vnd.stepmania.package\":[\"smzip\"],\"application/vnd.stepmania.stepchart\":[\"sm\"],\"application/vnd.sun.wadl+xml\":[\"wadl\"],\"application/vnd.sun.xml.calc\":[\"sxc\"],\"application/vnd.sun.xml.calc.template\":[\"stc\"],\"application/vnd.sun.xml.draw\":[\"sxd\"],\"application/vnd.sun.xml.draw.template\":[\"std\"],\"application/vnd.sun.xml.impress\":[\"sxi\"],\"application/vnd.sun.xml.impress.template\":[\"sti\"],\"application/vnd.sun.xml.math\":[\"sxm\"],\"application/vnd.sun.xml.writer\":[\"sxw\"],\"application/vnd.sun.xml.writer.global\":[\"sxg\"],\"application/vnd.sun.xml.writer.template\":[\"stw\"],\"application/vnd.sus-calendar\":[\"sus\",\"susp\"],\"application/vnd.svd\":[\"svd\"],\"application/vnd.symbian.install\":[\"sis\",\"sisx\"],\"application/vnd.syncml+xml\":[\"xsm\"],\"application/vnd.syncml.dm+wbxml\":[\"bdm\"],\"application/vnd.syncml.dm+xml\":[\"xdm\"],\"application/vnd.syncml.dmddf+xml\":[\"ddf\"],\"application/vnd.tao.intent-module-archive\":[\"tao\"],\"application/vnd.tcpdump.pcap\":[\"pcap\",\"cap\",\"dmp\"],\"application/vnd.tmobile-livetv\":[\"tmo\"],\"application/vnd.trid.tpt\":[\"tpt\"],\"application/vnd.triscape.mxs\":[\"mxs\"],\"application/vnd.trueapp\":[\"tra\"],\"application/vnd.ufdl\":[\"ufd\",\"ufdl\"],\"application/vnd.uiq.theme\":[\"utz\"],\"application/vnd.umajin\":[\"umj\"],\"application/vnd.unity\":[\"unityweb\"],\"application/vnd.uoml+xml\":[\"uoml\"],\"application/vnd.vcx\":[\"vcx\"],\"application/vnd.visio\":[\"vsd\",\"vst\",\"vss\",\"vsw\"],\"application/vnd.visionary\":[\"vis\"],\"application/vnd.vsf\":[\"vsf\"],\"application/vnd.wap.wbxml\":[\"wbxml\"],\"application/vnd.wap.wmlc\":[\"wmlc\"],\"application/vnd.wap.wmlscriptc\":[\"wmlsc\"],\"application/vnd.webturbo\":[\"wtb\"],\"application/vnd.wolfram.player\":[\"nbp\"],\"application/vnd.wordperfect\":[\"wpd\"],\"application/vnd.wqd\":[\"wqd\"],\"application/vnd.wt.stf\":[\"stf\"],\"application/vnd.xara\":[\"xar\"],\"application/vnd.xfdl\":[\"xfdl\"],\"application/vnd.yamaha.hv-dic\":[\"hvd\"],\"application/vnd.yamaha.hv-script\":[\"hvs\"],\"application/vnd.yamaha.hv-voice\":[\"hvp\"],\"application/vnd.yamaha.openscoreformat\":[\"osf\"],\"application/vnd.yamaha.openscoreformat.osfpvg+xml\":[\"osfpvg\"],\"application/vnd.yamaha.smaf-audio\":[\"saf\"],\"application/vnd.yamaha.smaf-phrase\":[\"spf\"],\"application/vnd.yellowriver-custom-menu\":[\"cmp\"],\"application/vnd.zul\":[\"zir\",\"zirz\"],\"application/vnd.zzazz.deck+xml\":[\"zaz\"],\"application/x-7z-compressed\":[\"7z\"],\"application/x-abiword\":[\"abw\"],\"application/x-ace-compressed\":[\"ace\"],\"application/x-apple-diskimage\":[\"*dmg\"],\"application/x-arj\":[\"arj\"],\"application/x-authorware-bin\":[\"aab\",\"x32\",\"u32\",\"vox\"],\"application/x-authorware-map\":[\"aam\"],\"application/x-authorware-seg\":[\"aas\"],\"application/x-bcpio\":[\"bcpio\"],\"application/x-bdoc\":[\"*bdoc\"],\"application/x-bittorrent\":[\"torrent\"],\"application/x-blorb\":[\"blb\",\"blorb\"],\"application/x-bzip\":[\"bz\"],\"application/x-bzip2\":[\"bz2\",\"boz\"],\"application/x-cbr\":[\"cbr\",\"cba\",\"cbt\",\"cbz\",\"cb7\"],\"application/x-cdlink\":[\"vcd\"],\"application/x-cfs-compressed\":[\"cfs\"],\"application/x-chat\":[\"chat\"],\"application/x-chess-pgn\":[\"pgn\"],\"application/x-chrome-extension\":[\"crx\"],\"application/x-cocoa\":[\"cco\"],\"application/x-conference\":[\"nsc\"],\"application/x-cpio\":[\"cpio\"],\"application/x-csh\":[\"csh\"],\"application/x-debian-package\":[\"*deb\",\"udeb\"],\"application/x-dgc-compressed\":[\"dgc\"],\"application/x-director\":[\"dir\",\"dcr\",\"dxr\",\"cst\",\"cct\",\"cxt\",\"w3d\",\"fgd\",\"swa\"],\"application/x-doom\":[\"wad\"],\"application/x-dtbncx+xml\":[\"ncx\"],\"application/x-dtbook+xml\":[\"dtb\"],\"application/x-dtbresource+xml\":[\"res\"],\"application/x-dvi\":[\"dvi\"],\"application/x-envoy\":[\"evy\"],\"application/x-eva\":[\"eva\"],\"application/x-font-bdf\":[\"bdf\"],\"application/x-font-ghostscript\":[\"gsf\"],\"application/x-font-linux-psf\":[\"psf\"],\"application/x-font-pcf\":[\"pcf\"],\"application/x-font-snf\":[\"snf\"],\"application/x-font-type1\":[\"pfa\",\"pfb\",\"pfm\",\"afm\"],\"application/x-freearc\":[\"arc\"],\"application/x-futuresplash\":[\"spl\"],\"application/x-gca-compressed\":[\"gca\"],\"application/x-glulx\":[\"ulx\"],\"application/x-gnumeric\":[\"gnumeric\"],\"application/x-gramps-xml\":[\"gramps\"],\"application/x-gtar\":[\"gtar\"],\"application/x-hdf\":[\"hdf\"],\"application/x-httpd-php\":[\"php\"],\"application/x-install-instructions\":[\"install\"],\"application/x-iso9660-image\":[\"*iso\"],\"application/x-iwork-keynote-sffkey\":[\"*key\"],\"application/x-iwork-numbers-sffnumbers\":[\"*numbers\"],\"application/x-iwork-pages-sffpages\":[\"*pages\"],\"application/x-java-archive-diff\":[\"jardiff\"],\"application/x-java-jnlp-file\":[\"jnlp\"],\"application/x-keepass2\":[\"kdbx\"],\"application/x-latex\":[\"latex\"],\"application/x-lua-bytecode\":[\"luac\"],\"application/x-lzh-compressed\":[\"lzh\",\"lha\"],\"application/x-makeself\":[\"run\"],\"application/x-mie\":[\"mie\"],\"application/x-mobipocket-ebook\":[\"prc\",\"mobi\"],\"application/x-ms-application\":[\"application\"],\"application/x-ms-shortcut\":[\"lnk\"],\"application/x-ms-wmd\":[\"wmd\"],\"application/x-ms-wmz\":[\"wmz\"],\"application/x-ms-xbap\":[\"xbap\"],\"application/x-msaccess\":[\"mdb\"],\"application/x-msbinder\":[\"obd\"],\"application/x-mscardfile\":[\"crd\"],\"application/x-msclip\":[\"clp\"],\"application/x-msdos-program\":[\"*exe\"],\"application/x-msdownload\":[\"*exe\",\"*dll\",\"com\",\"bat\",\"*msi\"],\"application/x-msmediaview\":[\"mvb\",\"m13\",\"m14\"],\"application/x-msmetafile\":[\"*wmf\",\"*wmz\",\"*emf\",\"emz\"],\"application/x-msmoney\":[\"mny\"],\"application/x-mspublisher\":[\"pub\"],\"application/x-msschedule\":[\"scd\"],\"application/x-msterminal\":[\"trm\"],\"application/x-mswrite\":[\"wri\"],\"application/x-netcdf\":[\"nc\",\"cdf\"],\"application/x-ns-proxy-autoconfig\":[\"pac\"],\"application/x-nzb\":[\"nzb\"],\"application/x-perl\":[\"pl\",\"pm\"],\"application/x-pilot\":[\"*prc\",\"*pdb\"],\"application/x-pkcs12\":[\"p12\",\"pfx\"],\"application/x-pkcs7-certificates\":[\"p7b\",\"spc\"],\"application/x-pkcs7-certreqresp\":[\"p7r\"],\"application/x-rar-compressed\":[\"*rar\"],\"application/x-redhat-package-manager\":[\"rpm\"],\"application/x-research-info-systems\":[\"ris\"],\"application/x-sea\":[\"sea\"],\"application/x-sh\":[\"sh\"],\"application/x-shar\":[\"shar\"],\"application/x-shockwave-flash\":[\"swf\"],\"application/x-silverlight-app\":[\"xap\"],\"application/x-sql\":[\"sql\"],\"application/x-stuffit\":[\"sit\"],\"application/x-stuffitx\":[\"sitx\"],\"application/x-subrip\":[\"srt\"],\"application/x-sv4cpio\":[\"sv4cpio\"],\"application/x-sv4crc\":[\"sv4crc\"],\"application/x-t3vm-image\":[\"t3\"],\"application/x-tads\":[\"gam\"],\"application/x-tar\":[\"tar\"],\"application/x-tcl\":[\"tcl\",\"tk\"],\"application/x-tex\":[\"tex\"],\"application/x-tex-tfm\":[\"tfm\"],\"application/x-texinfo\":[\"texinfo\",\"texi\"],\"application/x-tgif\":[\"*obj\"],\"application/x-ustar\":[\"ustar\"],\"application/x-virtualbox-hdd\":[\"hdd\"],\"application/x-virtualbox-ova\":[\"ova\"],\"application/x-virtualbox-ovf\":[\"ovf\"],\"application/x-virtualbox-vbox\":[\"vbox\"],\"application/x-virtualbox-vbox-extpack\":[\"vbox-extpack\"],\"application/x-virtualbox-vdi\":[\"vdi\"],\"application/x-virtualbox-vhd\":[\"vhd\"],\"application/x-virtualbox-vmdk\":[\"vmdk\"],\"application/x-wais-source\":[\"src\"],\"application/x-web-app-manifest+json\":[\"webapp\"],\"application/x-x509-ca-cert\":[\"der\",\"crt\",\"pem\"],\"application/x-xfig\":[\"fig\"],\"application/x-xliff+xml\":[\"*xlf\"],\"application/x-xpinstall\":[\"xpi\"],\"application/x-xz\":[\"xz\"],\"application/x-zmachine\":[\"z1\",\"z2\",\"z3\",\"z4\",\"z5\",\"z6\",\"z7\",\"z8\"],\"audio/vnd.dece.audio\":[\"uva\",\"uvva\"],\"audio/vnd.digital-winds\":[\"eol\"],\"audio/vnd.dra\":[\"dra\"],\"audio/vnd.dts\":[\"dts\"],\"audio/vnd.dts.hd\":[\"dtshd\"],\"audio/vnd.lucent.voice\":[\"lvp\"],\"audio/vnd.ms-playready.media.pya\":[\"pya\"],\"audio/vnd.nuera.ecelp4800\":[\"ecelp4800\"],\"audio/vnd.nuera.ecelp7470\":[\"ecelp7470\"],\"audio/vnd.nuera.ecelp9600\":[\"ecelp9600\"],\"audio/vnd.rip\":[\"rip\"],\"audio/x-aac\":[\"aac\"],\"audio/x-aiff\":[\"aif\",\"aiff\",\"aifc\"],\"audio/x-caf\":[\"caf\"],\"audio/x-flac\":[\"flac\"],\"audio/x-m4a\":[\"*m4a\"],\"audio/x-matroska\":[\"mka\"],\"audio/x-mpegurl\":[\"m3u\"],\"audio/x-ms-wax\":[\"wax\"],\"audio/x-ms-wma\":[\"wma\"],\"audio/x-pn-realaudio\":[\"ram\",\"ra\"],\"audio/x-pn-realaudio-plugin\":[\"rmp\"],\"audio/x-realaudio\":[\"*ra\"],\"audio/x-wav\":[\"*wav\"],\"chemical/x-cdx\":[\"cdx\"],\"chemical/x-cif\":[\"cif\"],\"chemical/x-cmdf\":[\"cmdf\"],\"chemical/x-cml\":[\"cml\"],\"chemical/x-csml\":[\"csml\"],\"chemical/x-xyz\":[\"xyz\"],\"image/prs.btif\":[\"btif\"],\"image/prs.pti\":[\"pti\"],\"image/vnd.adobe.photoshop\":[\"psd\"],\"image/vnd.airzip.accelerator.azv\":[\"azv\"],\"image/vnd.dece.graphic\":[\"uvi\",\"uvvi\",\"uvg\",\"uvvg\"],\"image/vnd.djvu\":[\"djvu\",\"djv\"],\"image/vnd.dvb.subtitle\":[\"*sub\"],\"image/vnd.dwg\":[\"dwg\"],\"image/vnd.dxf\":[\"dxf\"],\"image/vnd.fastbidsheet\":[\"fbs\"],\"image/vnd.fpx\":[\"fpx\"],\"image/vnd.fst\":[\"fst\"],\"image/vnd.fujixerox.edmics-mmr\":[\"mmr\"],\"image/vnd.fujixerox.edmics-rlc\":[\"rlc\"],\"image/vnd.microsoft.icon\":[\"ico\"],\"image/vnd.ms-dds\":[\"dds\"],\"image/vnd.ms-modi\":[\"mdi\"],\"image/vnd.ms-photo\":[\"wdp\"],\"image/vnd.net-fpx\":[\"npx\"],\"image/vnd.pco.b16\":[\"b16\"],\"image/vnd.tencent.tap\":[\"tap\"],\"image/vnd.valve.source.texture\":[\"vtf\"],\"image/vnd.wap.wbmp\":[\"wbmp\"],\"image/vnd.xiff\":[\"xif\"],\"image/vnd.zbrush.pcx\":[\"pcx\"],\"image/x-3ds\":[\"3ds\"],\"image/x-cmu-raster\":[\"ras\"],\"image/x-cmx\":[\"cmx\"],\"image/x-freehand\":[\"fh\",\"fhc\",\"fh4\",\"fh5\",\"fh7\"],\"image/x-icon\":[\"*ico\"],\"image/x-jng\":[\"jng\"],\"image/x-mrsid-image\":[\"sid\"],\"image/x-ms-bmp\":[\"*bmp\"],\"image/x-pcx\":[\"*pcx\"],\"image/x-pict\":[\"pic\",\"pct\"],\"image/x-portable-anymap\":[\"pnm\"],\"image/x-portable-bitmap\":[\"pbm\"],\"image/x-portable-graymap\":[\"pgm\"],\"image/x-portable-pixmap\":[\"ppm\"],\"image/x-rgb\":[\"rgb\"],\"image/x-tga\":[\"tga\"],\"image/x-xbitmap\":[\"xbm\"],\"image/x-xpixmap\":[\"xpm\"],\"image/x-xwindowdump\":[\"xwd\"],\"message/vnd.wfa.wsc\":[\"wsc\"],\"model/vnd.collada+xml\":[\"dae\"],\"model/vnd.dwf\":[\"dwf\"],\"model/vnd.gdl\":[\"gdl\"],\"model/vnd.gtw\":[\"gtw\"],\"model/vnd.mts\":[\"mts\"],\"model/vnd.opengex\":[\"ogex\"],\"model/vnd.parasolid.transmit.binary\":[\"x_b\"],\"model/vnd.parasolid.transmit.text\":[\"x_t\"],\"model/vnd.sap.vds\":[\"vds\"],\"model/vnd.usdz+zip\":[\"usdz\"],\"model/vnd.valve.source.compiled-map\":[\"bsp\"],\"model/vnd.vtu\":[\"vtu\"],\"text/prs.lines.tag\":[\"dsc\"],\"text/vnd.curl\":[\"curl\"],\"text/vnd.curl.dcurl\":[\"dcurl\"],\"text/vnd.curl.mcurl\":[\"mcurl\"],\"text/vnd.curl.scurl\":[\"scurl\"],\"text/vnd.dvb.subtitle\":[\"sub\"],\"text/vnd.fly\":[\"fly\"],\"text/vnd.fmi.flexstor\":[\"flx\"],\"text/vnd.graphviz\":[\"gv\"],\"text/vnd.in3d.3dml\":[\"3dml\"],\"text/vnd.in3d.spot\":[\"spot\"],\"text/vnd.sun.j2me.app-descriptor\":[\"jad\"],\"text/vnd.wap.wml\":[\"wml\"],\"text/vnd.wap.wmlscript\":[\"wmls\"],\"text/x-asm\":[\"s\",\"asm\"],\"text/x-c\":[\"c\",\"cc\",\"cxx\",\"cpp\",\"h\",\"hh\",\"dic\"],\"text/x-component\":[\"htc\"],\"text/x-fortran\":[\"f\",\"for\",\"f77\",\"f90\"],\"text/x-handlebars-template\":[\"hbs\"],\"text/x-java-source\":[\"java\"],\"text/x-lua\":[\"lua\"],\"text/x-markdown\":[\"mkd\"],\"text/x-nfo\":[\"nfo\"],\"text/x-opml\":[\"opml\"],\"text/x-org\":[\"*org\"],\"text/x-pascal\":[\"p\",\"pas\"],\"text/x-processing\":[\"pde\"],\"text/x-sass\":[\"sass\"],\"text/x-scss\":[\"scss\"],\"text/x-setext\":[\"etx\"],\"text/x-sfv\":[\"sfv\"],\"text/x-suse-ymp\":[\"ymp\"],\"text/x-uuencode\":[\"uu\"],\"text/x-vcalendar\":[\"vcs\"],\"text/x-vcard\":[\"vcf\"],\"video/vnd.dece.hd\":[\"uvh\",\"uvvh\"],\"video/vnd.dece.mobile\":[\"uvm\",\"uvvm\"],\"video/vnd.dece.pd\":[\"uvp\",\"uvvp\"],\"video/vnd.dece.sd\":[\"uvs\",\"uvvs\"],\"video/vnd.dece.video\":[\"uvv\",\"uvvv\"],\"video/vnd.dvb.file\":[\"dvb\"],\"video/vnd.fvt\":[\"fvt\"],\"video/vnd.mpegurl\":[\"mxu\",\"m4u\"],\"video/vnd.ms-playready.media.pyv\":[\"pyv\"],\"video/vnd.uvvu.mp4\":[\"uvu\",\"uvvu\"],\"video/vnd.vivo\":[\"viv\"],\"video/x-f4v\":[\"f4v\"],\"video/x-fli\":[\"fli\"],\"video/x-flv\":[\"flv\"],\"video/x-m4v\":[\"m4v\"],\"video/x-matroska\":[\"mkv\",\"mk3d\",\"mks\"],\"video/x-mng\":[\"mng\"],\"video/x-ms-asf\":[\"asf\",\"asx\"],\"video/x-ms-vob\":[\"vob\"],\"video/x-ms-wm\":[\"wm\"],\"video/x-ms-wmv\":[\"wmv\"],\"video/x-ms-wmx\":[\"wmx\"],\"video/x-ms-wvx\":[\"wvx\"],\"video/x-msvideo\":[\"avi\"],\"video/x-sgi-movie\":[\"movie\"],\"video/x-smv\":[\"smv\"],\"x-conference/x-cooltalk\":[\"ice\"]};","module.exports = {\"application/andrew-inset\":[\"ez\"],\"application/applixware\":[\"aw\"],\"application/atom+xml\":[\"atom\"],\"application/atomcat+xml\":[\"atomcat\"],\"application/atomdeleted+xml\":[\"atomdeleted\"],\"application/atomsvc+xml\":[\"atomsvc\"],\"application/atsc-dwd+xml\":[\"dwd\"],\"application/atsc-held+xml\":[\"held\"],\"application/atsc-rsat+xml\":[\"rsat\"],\"application/bdoc\":[\"bdoc\"],\"application/calendar+xml\":[\"xcs\"],\"application/ccxml+xml\":[\"ccxml\"],\"application/cdfx+xml\":[\"cdfx\"],\"application/cdmi-capability\":[\"cdmia\"],\"application/cdmi-container\":[\"cdmic\"],\"application/cdmi-domain\":[\"cdmid\"],\"application/cdmi-object\":[\"cdmio\"],\"application/cdmi-queue\":[\"cdmiq\"],\"application/cu-seeme\":[\"cu\"],\"application/dash+xml\":[\"mpd\"],\"application/davmount+xml\":[\"davmount\"],\"application/docbook+xml\":[\"dbk\"],\"application/dssc+der\":[\"dssc\"],\"application/dssc+xml\":[\"xdssc\"],\"application/ecmascript\":[\"es\",\"ecma\"],\"application/emma+xml\":[\"emma\"],\"application/emotionml+xml\":[\"emotionml\"],\"application/epub+zip\":[\"epub\"],\"application/exi\":[\"exi\"],\"application/express\":[\"exp\"],\"application/fdt+xml\":[\"fdt\"],\"application/font-tdpfr\":[\"pfr\"],\"application/geo+json\":[\"geojson\"],\"application/gml+xml\":[\"gml\"],\"application/gpx+xml\":[\"gpx\"],\"application/gxf\":[\"gxf\"],\"application/gzip\":[\"gz\"],\"application/hjson\":[\"hjson\"],\"application/hyperstudio\":[\"stk\"],\"application/inkml+xml\":[\"ink\",\"inkml\"],\"application/ipfix\":[\"ipfix\"],\"application/its+xml\":[\"its\"],\"application/java-archive\":[\"jar\",\"war\",\"ear\"],\"application/java-serialized-object\":[\"ser\"],\"application/java-vm\":[\"class\"],\"application/javascript\":[\"js\",\"mjs\"],\"application/json\":[\"json\",\"map\"],\"application/json5\":[\"json5\"],\"application/jsonml+json\":[\"jsonml\"],\"application/ld+json\":[\"jsonld\"],\"application/lgr+xml\":[\"lgr\"],\"application/lost+xml\":[\"lostxml\"],\"application/mac-binhex40\":[\"hqx\"],\"application/mac-compactpro\":[\"cpt\"],\"application/mads+xml\":[\"mads\"],\"application/manifest+json\":[\"webmanifest\"],\"application/marc\":[\"mrc\"],\"application/marcxml+xml\":[\"mrcx\"],\"application/mathematica\":[\"ma\",\"nb\",\"mb\"],\"application/mathml+xml\":[\"mathml\"],\"application/mbox\":[\"mbox\"],\"application/mediaservercontrol+xml\":[\"mscml\"],\"application/metalink+xml\":[\"metalink\"],\"application/metalink4+xml\":[\"meta4\"],\"application/mets+xml\":[\"mets\"],\"application/mmt-aei+xml\":[\"maei\"],\"application/mmt-usd+xml\":[\"musd\"],\"application/mods+xml\":[\"mods\"],\"application/mp21\":[\"m21\",\"mp21\"],\"application/mp4\":[\"mp4s\",\"m4p\"],\"application/msword\":[\"doc\",\"dot\"],\"application/mxf\":[\"mxf\"],\"application/n-quads\":[\"nq\"],\"application/n-triples\":[\"nt\"],\"application/node\":[\"cjs\"],\"application/octet-stream\":[\"bin\",\"dms\",\"lrf\",\"mar\",\"so\",\"dist\",\"distz\",\"pkg\",\"bpk\",\"dump\",\"elc\",\"deploy\",\"exe\",\"dll\",\"deb\",\"dmg\",\"iso\",\"img\",\"msi\",\"msp\",\"msm\",\"buffer\"],\"application/oda\":[\"oda\"],\"application/oebps-package+xml\":[\"opf\"],\"application/ogg\":[\"ogx\"],\"application/omdoc+xml\":[\"omdoc\"],\"application/onenote\":[\"onetoc\",\"onetoc2\",\"onetmp\",\"onepkg\"],\"application/oxps\":[\"oxps\"],\"application/p2p-overlay+xml\":[\"relo\"],\"application/patch-ops-error+xml\":[\"xer\"],\"application/pdf\":[\"pdf\"],\"application/pgp-encrypted\":[\"pgp\"],\"application/pgp-signature\":[\"asc\",\"sig\"],\"application/pics-rules\":[\"prf\"],\"application/pkcs10\":[\"p10\"],\"application/pkcs7-mime\":[\"p7m\",\"p7c\"],\"application/pkcs7-signature\":[\"p7s\"],\"application/pkcs8\":[\"p8\"],\"application/pkix-attr-cert\":[\"ac\"],\"application/pkix-cert\":[\"cer\"],\"application/pkix-crl\":[\"crl\"],\"application/pkix-pkipath\":[\"pkipath\"],\"application/pkixcmp\":[\"pki\"],\"application/pls+xml\":[\"pls\"],\"application/postscript\":[\"ai\",\"eps\",\"ps\"],\"application/provenance+xml\":[\"provx\"],\"application/pskc+xml\":[\"pskcxml\"],\"application/raml+yaml\":[\"raml\"],\"application/rdf+xml\":[\"rdf\",\"owl\"],\"application/reginfo+xml\":[\"rif\"],\"application/relax-ng-compact-syntax\":[\"rnc\"],\"application/resource-lists+xml\":[\"rl\"],\"application/resource-lists-diff+xml\":[\"rld\"],\"application/rls-services+xml\":[\"rs\"],\"application/route-apd+xml\":[\"rapd\"],\"application/route-s-tsid+xml\":[\"sls\"],\"application/route-usd+xml\":[\"rusd\"],\"application/rpki-ghostbusters\":[\"gbr\"],\"application/rpki-manifest\":[\"mft\"],\"application/rpki-roa\":[\"roa\"],\"application/rsd+xml\":[\"rsd\"],\"application/rss+xml\":[\"rss\"],\"application/rtf\":[\"rtf\"],\"application/sbml+xml\":[\"sbml\"],\"application/scvp-cv-request\":[\"scq\"],\"application/scvp-cv-response\":[\"scs\"],\"application/scvp-vp-request\":[\"spq\"],\"application/scvp-vp-response\":[\"spp\"],\"application/sdp\":[\"sdp\"],\"application/senml+xml\":[\"senmlx\"],\"application/sensml+xml\":[\"sensmlx\"],\"application/set-payment-initiation\":[\"setpay\"],\"application/set-registration-initiation\":[\"setreg\"],\"application/shf+xml\":[\"shf\"],\"application/sieve\":[\"siv\",\"sieve\"],\"application/smil+xml\":[\"smi\",\"smil\"],\"application/sparql-query\":[\"rq\"],\"application/sparql-results+xml\":[\"srx\"],\"application/srgs\":[\"gram\"],\"application/srgs+xml\":[\"grxml\"],\"application/sru+xml\":[\"sru\"],\"application/ssdl+xml\":[\"ssdl\"],\"application/ssml+xml\":[\"ssml\"],\"application/swid+xml\":[\"swidtag\"],\"application/tei+xml\":[\"tei\",\"teicorpus\"],\"application/thraud+xml\":[\"tfi\"],\"application/timestamped-data\":[\"tsd\"],\"application/toml\":[\"toml\"],\"application/trig\":[\"trig\"],\"application/ttml+xml\":[\"ttml\"],\"application/ubjson\":[\"ubj\"],\"application/urc-ressheet+xml\":[\"rsheet\"],\"application/urc-targetdesc+xml\":[\"td\"],\"application/voicexml+xml\":[\"vxml\"],\"application/wasm\":[\"wasm\"],\"application/widget\":[\"wgt\"],\"application/winhlp\":[\"hlp\"],\"application/wsdl+xml\":[\"wsdl\"],\"application/wspolicy+xml\":[\"wspolicy\"],\"application/xaml+xml\":[\"xaml\"],\"application/xcap-att+xml\":[\"xav\"],\"application/xcap-caps+xml\":[\"xca\"],\"application/xcap-diff+xml\":[\"xdf\"],\"application/xcap-el+xml\":[\"xel\"],\"application/xcap-ns+xml\":[\"xns\"],\"application/xenc+xml\":[\"xenc\"],\"application/xhtml+xml\":[\"xhtml\",\"xht\"],\"application/xliff+xml\":[\"xlf\"],\"application/xml\":[\"xml\",\"xsl\",\"xsd\",\"rng\"],\"application/xml-dtd\":[\"dtd\"],\"application/xop+xml\":[\"xop\"],\"application/xproc+xml\":[\"xpl\"],\"application/xslt+xml\":[\"*xsl\",\"xslt\"],\"application/xspf+xml\":[\"xspf\"],\"application/xv+xml\":[\"mxml\",\"xhvml\",\"xvml\",\"xvm\"],\"application/yang\":[\"yang\"],\"application/yin+xml\":[\"yin\"],\"application/zip\":[\"zip\"],\"audio/3gpp\":[\"*3gpp\"],\"audio/adpcm\":[\"adp\"],\"audio/amr\":[\"amr\"],\"audio/basic\":[\"au\",\"snd\"],\"audio/midi\":[\"mid\",\"midi\",\"kar\",\"rmi\"],\"audio/mobile-xmf\":[\"mxmf\"],\"audio/mp3\":[\"*mp3\"],\"audio/mp4\":[\"m4a\",\"mp4a\"],\"audio/mpeg\":[\"mpga\",\"mp2\",\"mp2a\",\"mp3\",\"m2a\",\"m3a\"],\"audio/ogg\":[\"oga\",\"ogg\",\"spx\",\"opus\"],\"audio/s3m\":[\"s3m\"],\"audio/silk\":[\"sil\"],\"audio/wav\":[\"wav\"],\"audio/wave\":[\"*wav\"],\"audio/webm\":[\"weba\"],\"audio/xm\":[\"xm\"],\"font/collection\":[\"ttc\"],\"font/otf\":[\"otf\"],\"font/ttf\":[\"ttf\"],\"font/woff\":[\"woff\"],\"font/woff2\":[\"woff2\"],\"image/aces\":[\"exr\"],\"image/apng\":[\"apng\"],\"image/avif\":[\"avif\"],\"image/bmp\":[\"bmp\"],\"image/cgm\":[\"cgm\"],\"image/dicom-rle\":[\"drle\"],\"image/emf\":[\"emf\"],\"image/fits\":[\"fits\"],\"image/g3fax\":[\"g3\"],\"image/gif\":[\"gif\"],\"image/heic\":[\"heic\"],\"image/heic-sequence\":[\"heics\"],\"image/heif\":[\"heif\"],\"image/heif-sequence\":[\"heifs\"],\"image/hej2k\":[\"hej2\"],\"image/hsj2\":[\"hsj2\"],\"image/ief\":[\"ief\"],\"image/jls\":[\"jls\"],\"image/jp2\":[\"jp2\",\"jpg2\"],\"image/jpeg\":[\"jpeg\",\"jpg\",\"jpe\"],\"image/jph\":[\"jph\"],\"image/jphc\":[\"jhc\"],\"image/jpm\":[\"jpm\"],\"image/jpx\":[\"jpx\",\"jpf\"],\"image/jxr\":[\"jxr\"],\"image/jxra\":[\"jxra\"],\"image/jxrs\":[\"jxrs\"],\"image/jxs\":[\"jxs\"],\"image/jxsc\":[\"jxsc\"],\"image/jxsi\":[\"jxsi\"],\"image/jxss\":[\"jxss\"],\"image/ktx\":[\"ktx\"],\"image/ktx2\":[\"ktx2\"],\"image/png\":[\"png\"],\"image/sgi\":[\"sgi\"],\"image/svg+xml\":[\"svg\",\"svgz\"],\"image/t38\":[\"t38\"],\"image/tiff\":[\"tif\",\"tiff\"],\"image/tiff-fx\":[\"tfx\"],\"image/webp\":[\"webp\"],\"image/wmf\":[\"wmf\"],\"message/disposition-notification\":[\"disposition-notification\"],\"message/global\":[\"u8msg\"],\"message/global-delivery-status\":[\"u8dsn\"],\"message/global-disposition-notification\":[\"u8mdn\"],\"message/global-headers\":[\"u8hdr\"],\"message/rfc822\":[\"eml\",\"mime\"],\"model/3mf\":[\"3mf\"],\"model/gltf+json\":[\"gltf\"],\"model/gltf-binary\":[\"glb\"],\"model/iges\":[\"igs\",\"iges\"],\"model/mesh\":[\"msh\",\"mesh\",\"silo\"],\"model/mtl\":[\"mtl\"],\"model/obj\":[\"obj\"],\"model/step+xml\":[\"stpx\"],\"model/step+zip\":[\"stpz\"],\"model/step-xml+zip\":[\"stpxz\"],\"model/stl\":[\"stl\"],\"model/vrml\":[\"wrl\",\"vrml\"],\"model/x3d+binary\":[\"*x3db\",\"x3dbz\"],\"model/x3d+fastinfoset\":[\"x3db\"],\"model/x3d+vrml\":[\"*x3dv\",\"x3dvz\"],\"model/x3d+xml\":[\"x3d\",\"x3dz\"],\"model/x3d-vrml\":[\"x3dv\"],\"text/cache-manifest\":[\"appcache\",\"manifest\"],\"text/calendar\":[\"ics\",\"ifb\"],\"text/coffeescript\":[\"coffee\",\"litcoffee\"],\"text/css\":[\"css\"],\"text/csv\":[\"csv\"],\"text/html\":[\"html\",\"htm\",\"shtml\"],\"text/jade\":[\"jade\"],\"text/jsx\":[\"jsx\"],\"text/less\":[\"less\"],\"text/markdown\":[\"markdown\",\"md\"],\"text/mathml\":[\"mml\"],\"text/mdx\":[\"mdx\"],\"text/n3\":[\"n3\"],\"text/plain\":[\"txt\",\"text\",\"conf\",\"def\",\"list\",\"log\",\"in\",\"ini\"],\"text/richtext\":[\"rtx\"],\"text/rtf\":[\"*rtf\"],\"text/sgml\":[\"sgml\",\"sgm\"],\"text/shex\":[\"shex\"],\"text/slim\":[\"slim\",\"slm\"],\"text/spdx\":[\"spdx\"],\"text/stylus\":[\"stylus\",\"styl\"],\"text/tab-separated-values\":[\"tsv\"],\"text/troff\":[\"t\",\"tr\",\"roff\",\"man\",\"me\",\"ms\"],\"text/turtle\":[\"ttl\"],\"text/uri-list\":[\"uri\",\"uris\",\"urls\"],\"text/vcard\":[\"vcard\"],\"text/vtt\":[\"vtt\"],\"text/xml\":[\"*xml\"],\"text/yaml\":[\"yaml\",\"yml\"],\"video/3gpp\":[\"3gp\",\"3gpp\"],\"video/3gpp2\":[\"3g2\"],\"video/h261\":[\"h261\"],\"video/h263\":[\"h263\"],\"video/h264\":[\"h264\"],\"video/iso.segment\":[\"m4s\"],\"video/jpeg\":[\"jpgv\"],\"video/jpm\":[\"*jpm\",\"jpgm\"],\"video/mj2\":[\"mj2\",\"mjp2\"],\"video/mp2t\":[\"ts\"],\"video/mp4\":[\"mp4\",\"mp4v\",\"mpg4\"],\"video/mpeg\":[\"mpeg\",\"mpg\",\"mpe\",\"m1v\",\"m2v\"],\"video/ogg\":[\"ogv\"],\"video/quicktime\":[\"qt\",\"mov\"],\"video/webm\":[\"webm\"]};","/**\n * Helpers.\n */\n\nvar s = 1000;\nvar m = s * 60;\nvar h = m * 60;\nvar d = h * 24;\nvar w = d * 7;\nvar y = d * 365.25;\n\n/**\n * Parse or format the given `val`.\n *\n * Options:\n *\n * - `long` verbose formatting [false]\n *\n * @param {String|Number} val\n * @param {Object} [options]\n * @throws {Error} throw an error if val is not a non-empty string or a number\n * @return {String|Number}\n * @api public\n */\n\nmodule.exports = function(val, options) {\n options = options || {};\n var type = typeof val;\n if (type === 'string' && val.length > 0) {\n return parse(val);\n } else if (type === 'number' && isFinite(val)) {\n return options.long ? fmtLong(val) : fmtShort(val);\n }\n throw new Error(\n 'val is not a non-empty string or a valid number. val=' +\n JSON.stringify(val)\n );\n};\n\n/**\n * Parse the given `str` and return milliseconds.\n *\n * @param {String} str\n * @return {Number}\n * @api private\n */\n\nfunction parse(str) {\n str = String(str);\n if (str.length > 100) {\n return;\n }\n var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(\n str\n );\n if (!match) {\n return;\n }\n var n = parseFloat(match[1]);\n var type = (match[2] || 'ms').toLowerCase();\n switch (type) {\n case 'years':\n case 'year':\n case 'yrs':\n case 'yr':\n case 'y':\n return n * y;\n case 'weeks':\n case 'week':\n case 'w':\n return n * w;\n case 'days':\n case 'day':\n case 'd':\n return n * d;\n case 'hours':\n case 'hour':\n case 'hrs':\n case 'hr':\n case 'h':\n return n * h;\n case 'minutes':\n case 'minute':\n case 'mins':\n case 'min':\n case 'm':\n return n * m;\n case 'seconds':\n case 'second':\n case 'secs':\n case 'sec':\n case 's':\n return n * s;\n case 'milliseconds':\n case 'millisecond':\n case 'msecs':\n case 'msec':\n case 'ms':\n return n;\n default:\n return undefined;\n }\n}\n\n/**\n * Short format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtShort(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return Math.round(ms / d) + 'd';\n }\n if (msAbs >= h) {\n return Math.round(ms / h) + 'h';\n }\n if (msAbs >= m) {\n return Math.round(ms / m) + 'm';\n }\n if (msAbs >= s) {\n return Math.round(ms / s) + 's';\n }\n return ms + 'ms';\n}\n\n/**\n * Long format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtLong(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return plural(ms, msAbs, d, 'day');\n }\n if (msAbs >= h) {\n return plural(ms, msAbs, h, 'hour');\n }\n if (msAbs >= m) {\n return plural(ms, msAbs, m, 'minute');\n }\n if (msAbs >= s) {\n return plural(ms, msAbs, s, 'second');\n }\n return ms + ' ms';\n}\n\n/**\n * Pluralization helper.\n */\n\nfunction plural(ms, msAbs, n, name) {\n var isPlural = msAbs >= n * 1.5;\n return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar whatwgUrl = _interopDefault(require('whatwg-url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\nconst URL = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\n/**\n * Wrapper around `new URL` to handle arbitrary URLs\n *\n * @param {string} urlStr\n * @return {void}\n */\nfunction parseURL(urlStr) {\n\t/*\n \tCheck whether the URL is absolute or not\n \t\tScheme: https://tools.ietf.org/html/rfc3986#section-3.1\n \tAbsolute URL: https://tools.ietf.org/html/rfc3986#section-4.3\n */\n\tif (/^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.exec(urlStr)) {\n\t\turlStr = new URL(urlStr).toString();\n\t}\n\n\t// Fallback to old implementation for arbitrary URLs\n\treturn parse_url(urlStr);\n}\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parseURL(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parseURL(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parseURL(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\nconst URL$1 = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\n\nconst isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {\n\tconst orig = new URL$1(original).hostname;\n\tconst dest = new URL$1(destination).hostname;\n\n\treturn orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);\n};\n\n/**\n * isSameProtocol reports whether the two provided URLs use the same protocol.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nconst isSameProtocol = function isSameProtocol(destination, original) {\n\tconst orig = new URL$1(original).protocol;\n\tconst dest = new URL$1(destination).protocol;\n\n\treturn orig === dest;\n};\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\tdestroyStream(request.body, error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\n\t\t\tfinalize();\n\t\t});\n\n\t\tfixResponseChunkedTransferBadEnding(req, function (err) {\n\t\t\tif (signal && signal.aborted) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\t\t});\n\n\t\t/* c8 ignore next 18 */\n\t\tif (parseInt(process.version.substring(1)) < 14) {\n\t\t\t// Before Node.js 14, pipeline() does not fully support async iterators and does not always\n\t\t\t// properly handle when the socket close/end events are out of order.\n\t\t\treq.on('socket', function (s) {\n\t\t\t\ts.addListener('close', function (hadError) {\n\t\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\t\tconst hasDataListener = s.listenerCount('data') > 0;\n\n\t\t\t\t\t// if end happened before close but the socket didn't emit an error, do it now\n\t\t\t\t\tif (response && hasDataListener && !hadError && !(signal && signal.aborted)) {\n\t\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\t\tresponse.body.emit('error', err);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t}\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL$1(location, request.url).toString();\n\t\t\t\t} catch (err) {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOpts.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\traw.on('end', function () {\n\t\t\t\t\t// some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.\n\t\t\t\t\tif (!response) {\n\t\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\t\tresolve(response);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\nfunction fixResponseChunkedTransferBadEnding(request, errorCallback) {\n\tlet socket;\n\n\trequest.on('socket', function (s) {\n\t\tsocket = s;\n\t});\n\n\trequest.on('response', function (response) {\n\t\tconst headers = response.headers;\n\n\t\tif (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {\n\t\t\tresponse.once('close', function (hadError) {\n\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\tconst hasDataListener = socket.listenerCount('data') > 0;\n\n\t\t\t\tif (hasDataListener && !hadError) {\n\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\terrorCallback(err);\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n}\n\nfunction destroyStream(stream, err) {\n\tif (stream.destroy) {\n\t\tstream.destroy(err);\n\t} else {\n\t\t// node < 8\n\t\tstream.emit('error', err);\n\t\tstream.end();\n\t}\n}\n\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","/**\n * Advanced Encryption Standard (AES) implementation.\n *\n * This implementation is based on the public domain library 'jscrypto' which\n * was written by:\n *\n * Emily Stark (estark@stanford.edu)\n * Mike Hamburg (mhamburg@stanford.edu)\n * Dan Boneh (dabo@cs.stanford.edu)\n *\n * Parts of this code are based on the OpenSSL implementation of AES:\n * http://www.openssl.org\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./cipher');\nrequire('./cipherModes');\nrequire('./util');\n\n/* AES API */\nmodule.exports = forge.aes = forge.aes || {};\n\n/**\n * Deprecated. Instead, use:\n *\n * var cipher = forge.cipher.createCipher('AES-', key);\n * cipher.start({iv: iv});\n *\n * Creates an AES cipher object to encrypt data using the given symmetric key.\n * The output will be stored in the 'output' member of the returned cipher.\n *\n * The key and iv may be given as a string of bytes, an array of bytes,\n * a byte buffer, or an array of 32-bit words.\n *\n * @param key the symmetric key to use.\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.aes.startEncrypting = function(key, iv, output, mode) {\n var cipher = _createCipher({\n key: key,\n output: output,\n decrypt: false,\n mode: mode\n });\n cipher.start(iv);\n return cipher;\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var cipher = forge.cipher.createCipher('AES-', key);\n *\n * Creates an AES cipher object to encrypt data using the given symmetric key.\n *\n * The key may be given as a string of bytes, an array of bytes, a\n * byte buffer, or an array of 32-bit words.\n *\n * @param key the symmetric key to use.\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.aes.createEncryptionCipher = function(key, mode) {\n return _createCipher({\n key: key,\n output: null,\n decrypt: false,\n mode: mode\n });\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var decipher = forge.cipher.createDecipher('AES-', key);\n * decipher.start({iv: iv});\n *\n * Creates an AES cipher object to decrypt data using the given symmetric key.\n * The output will be stored in the 'output' member of the returned cipher.\n *\n * The key and iv may be given as a string of bytes, an array of bytes,\n * a byte buffer, or an array of 32-bit words.\n *\n * @param key the symmetric key to use.\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.aes.startDecrypting = function(key, iv, output, mode) {\n var cipher = _createCipher({\n key: key,\n output: output,\n decrypt: true,\n mode: mode\n });\n cipher.start(iv);\n return cipher;\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var decipher = forge.cipher.createDecipher('AES-', key);\n *\n * Creates an AES cipher object to decrypt data using the given symmetric key.\n *\n * The key may be given as a string of bytes, an array of bytes, a\n * byte buffer, or an array of 32-bit words.\n *\n * @param key the symmetric key to use.\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.aes.createDecryptionCipher = function(key, mode) {\n return _createCipher({\n key: key,\n output: null,\n decrypt: true,\n mode: mode\n });\n};\n\n/**\n * Creates a new AES cipher algorithm object.\n *\n * @param name the name of the algorithm.\n * @param mode the mode factory function.\n *\n * @return the AES algorithm object.\n */\nforge.aes.Algorithm = function(name, mode) {\n if(!init) {\n initialize();\n }\n var self = this;\n self.name = name;\n self.mode = new mode({\n blockSize: 16,\n cipher: {\n encrypt: function(inBlock, outBlock) {\n return _updateBlock(self._w, inBlock, outBlock, false);\n },\n decrypt: function(inBlock, outBlock) {\n return _updateBlock(self._w, inBlock, outBlock, true);\n }\n }\n });\n self._init = false;\n};\n\n/**\n * Initializes this AES algorithm by expanding its key.\n *\n * @param options the options to use.\n * key the key to use with this algorithm.\n * decrypt true if the algorithm should be initialized for decryption,\n * false for encryption.\n */\nforge.aes.Algorithm.prototype.initialize = function(options) {\n if(this._init) {\n return;\n }\n\n var key = options.key;\n var tmp;\n\n /* Note: The key may be a string of bytes, an array of bytes, a byte\n buffer, or an array of 32-bit integers. If the key is in bytes, then\n it must be 16, 24, or 32 bytes in length. If it is in 32-bit\n integers, it must be 4, 6, or 8 integers long. */\n\n if(typeof key === 'string' &&\n (key.length === 16 || key.length === 24 || key.length === 32)) {\n // convert key string into byte buffer\n key = forge.util.createBuffer(key);\n } else if(forge.util.isArray(key) &&\n (key.length === 16 || key.length === 24 || key.length === 32)) {\n // convert key integer array into byte buffer\n tmp = key;\n key = forge.util.createBuffer();\n for(var i = 0; i < tmp.length; ++i) {\n key.putByte(tmp[i]);\n }\n }\n\n // convert key byte buffer into 32-bit integer array\n if(!forge.util.isArray(key)) {\n tmp = key;\n key = [];\n\n // key lengths of 16, 24, 32 bytes allowed\n var len = tmp.length();\n if(len === 16 || len === 24 || len === 32) {\n len = len >>> 2;\n for(var i = 0; i < len; ++i) {\n key.push(tmp.getInt32());\n }\n }\n }\n\n // key must be an array of 32-bit integers by now\n if(!forge.util.isArray(key) ||\n !(key.length === 4 || key.length === 6 || key.length === 8)) {\n throw new Error('Invalid key parameter.');\n }\n\n // encryption operation is always used for these modes\n var mode = this.mode.name;\n var encryptOp = (['CFB', 'OFB', 'CTR', 'GCM'].indexOf(mode) !== -1);\n\n // do key expansion\n this._w = _expandKey(key, options.decrypt && !encryptOp);\n this._init = true;\n};\n\n/**\n * Expands a key. Typically only used for testing.\n *\n * @param key the symmetric key to expand, as an array of 32-bit words.\n * @param decrypt true to expand for decryption, false for encryption.\n *\n * @return the expanded key.\n */\nforge.aes._expandKey = function(key, decrypt) {\n if(!init) {\n initialize();\n }\n return _expandKey(key, decrypt);\n};\n\n/**\n * Updates a single block. Typically only used for testing.\n *\n * @param w the expanded key to use.\n * @param input an array of block-size 32-bit words.\n * @param output an array of block-size 32-bit words.\n * @param decrypt true to decrypt, false to encrypt.\n */\nforge.aes._updateBlock = _updateBlock;\n\n/** Register AES algorithms **/\n\nregisterAlgorithm('AES-ECB', forge.cipher.modes.ecb);\nregisterAlgorithm('AES-CBC', forge.cipher.modes.cbc);\nregisterAlgorithm('AES-CFB', forge.cipher.modes.cfb);\nregisterAlgorithm('AES-OFB', forge.cipher.modes.ofb);\nregisterAlgorithm('AES-CTR', forge.cipher.modes.ctr);\nregisterAlgorithm('AES-GCM', forge.cipher.modes.gcm);\n\nfunction registerAlgorithm(name, mode) {\n var factory = function() {\n return new forge.aes.Algorithm(name, mode);\n };\n forge.cipher.registerAlgorithm(name, factory);\n}\n\n/** AES implementation **/\n\nvar init = false; // not yet initialized\nvar Nb = 4; // number of words comprising the state (AES = 4)\nvar sbox; // non-linear substitution table used in key expansion\nvar isbox; // inversion of sbox\nvar rcon; // round constant word array\nvar mix; // mix-columns table\nvar imix; // inverse mix-columns table\n\n/**\n * Performs initialization, ie: precomputes tables to optimize for speed.\n *\n * One way to understand how AES works is to imagine that 'addition' and\n * 'multiplication' are interfaces that require certain mathematical\n * properties to hold true (ie: they are associative) but they might have\n * different implementations and produce different kinds of results ...\n * provided that their mathematical properties remain true. AES defines\n * its own methods of addition and multiplication but keeps some important\n * properties the same, ie: associativity and distributivity. The\n * explanation below tries to shed some light on how AES defines addition\n * and multiplication of bytes and 32-bit words in order to perform its\n * encryption and decryption algorithms.\n *\n * The basics:\n *\n * The AES algorithm views bytes as binary representations of polynomials\n * that have either 1 or 0 as the coefficients. It defines the addition\n * or subtraction of two bytes as the XOR operation. It also defines the\n * multiplication of two bytes as a finite field referred to as GF(2^8)\n * (Note: 'GF' means \"Galois Field\" which is a field that contains a finite\n * number of elements so GF(2^8) has 256 elements).\n *\n * This means that any two bytes can be represented as binary polynomials;\n * when they multiplied together and modularly reduced by an irreducible\n * polynomial of the 8th degree, the results are the field GF(2^8). The\n * specific irreducible polynomial that AES uses in hexadecimal is 0x11b.\n * This multiplication is associative with 0x01 as the identity:\n *\n * (b * 0x01 = GF(b, 0x01) = b).\n *\n * The operation GF(b, 0x02) can be performed at the byte level by left\n * shifting b once and then XOR'ing it (to perform the modular reduction)\n * with 0x11b if b is >= 128. Repeated application of the multiplication\n * of 0x02 can be used to implement the multiplication of any two bytes.\n *\n * For instance, multiplying 0x57 and 0x13, denoted as GF(0x57, 0x13), can\n * be performed by factoring 0x13 into 0x01, 0x02, and 0x10. Then these\n * factors can each be multiplied by 0x57 and then added together. To do\n * the multiplication, values for 0x57 multiplied by each of these 3 factors\n * can be precomputed and stored in a table. To add them, the values from\n * the table are XOR'd together.\n *\n * AES also defines addition and multiplication of words, that is 4-byte\n * numbers represented as polynomials of 3 degrees where the coefficients\n * are the values of the bytes.\n *\n * The word [a0, a1, a2, a3] is a polynomial a3x^3 + a2x^2 + a1x + a0.\n *\n * Addition is performed by XOR'ing like powers of x. Multiplication\n * is performed in two steps, the first is an algebriac expansion as\n * you would do normally (where addition is XOR). But the result is\n * a polynomial larger than 3 degrees and thus it cannot fit in a word. So\n * next the result is modularly reduced by an AES-specific polynomial of\n * degree 4 which will always produce a polynomial of less than 4 degrees\n * such that it will fit in a word. In AES, this polynomial is x^4 + 1.\n *\n * The modular product of two polynomials 'a' and 'b' is thus:\n *\n * d(x) = d3x^3 + d2x^2 + d1x + d0\n * with\n * d0 = GF(a0, b0) ^ GF(a3, b1) ^ GF(a2, b2) ^ GF(a1, b3)\n * d1 = GF(a1, b0) ^ GF(a0, b1) ^ GF(a3, b2) ^ GF(a2, b3)\n * d2 = GF(a2, b0) ^ GF(a1, b1) ^ GF(a0, b2) ^ GF(a3, b3)\n * d3 = GF(a3, b0) ^ GF(a2, b1) ^ GF(a1, b2) ^ GF(a0, b3)\n *\n * As a matrix:\n *\n * [d0] = [a0 a3 a2 a1][b0]\n * [d1] [a1 a0 a3 a2][b1]\n * [d2] [a2 a1 a0 a3][b2]\n * [d3] [a3 a2 a1 a0][b3]\n *\n * Special polynomials defined by AES (0x02 == {02}):\n * a(x) = {03}x^3 + {01}x^2 + {01}x + {02}\n * a^-1(x) = {0b}x^3 + {0d}x^2 + {09}x + {0e}.\n *\n * These polynomials are used in the MixColumns() and InverseMixColumns()\n * operations, respectively, to cause each element in the state to affect\n * the output (referred to as diffusing).\n *\n * RotWord() uses: a0 = a1 = a2 = {00} and a3 = {01}, which is the\n * polynomial x3.\n *\n * The ShiftRows() method modifies the last 3 rows in the state (where\n * the state is 4 words with 4 bytes per word) by shifting bytes cyclically.\n * The 1st byte in the second row is moved to the end of the row. The 1st\n * and 2nd bytes in the third row are moved to the end of the row. The 1st,\n * 2nd, and 3rd bytes are moved in the fourth row.\n *\n * More details on how AES arithmetic works:\n *\n * In the polynomial representation of binary numbers, XOR performs addition\n * and subtraction and multiplication in GF(2^8) denoted as GF(a, b)\n * corresponds with the multiplication of polynomials modulo an irreducible\n * polynomial of degree 8. In other words, for AES, GF(a, b) will multiply\n * polynomial 'a' with polynomial 'b' and then do a modular reduction by\n * an AES-specific irreducible polynomial of degree 8.\n *\n * A polynomial is irreducible if its only divisors are one and itself. For\n * the AES algorithm, this irreducible polynomial is:\n *\n * m(x) = x^8 + x^4 + x^3 + x + 1,\n *\n * or {01}{1b} in hexadecimal notation, where each coefficient is a bit:\n * 100011011 = 283 = 0x11b.\n *\n * For example, GF(0x57, 0x83) = 0xc1 because\n *\n * 0x57 = 87 = 01010111 = x^6 + x^4 + x^2 + x + 1\n * 0x85 = 131 = 10000101 = x^7 + x + 1\n *\n * (x^6 + x^4 + x^2 + x + 1) * (x^7 + x + 1)\n * = x^13 + x^11 + x^9 + x^8 + x^7 +\n * x^7 + x^5 + x^3 + x^2 + x +\n * x^6 + x^4 + x^2 + x + 1\n * = x^13 + x^11 + x^9 + x^8 + x^6 + x^5 + x^4 + x^3 + 1 = y\n * y modulo (x^8 + x^4 + x^3 + x + 1)\n * = x^7 + x^6 + 1.\n *\n * The modular reduction by m(x) guarantees the result will be a binary\n * polynomial of less than degree 8, so that it can fit in a byte.\n *\n * The operation to multiply a binary polynomial b with x (the polynomial\n * x in binary representation is 00000010) is:\n *\n * b_7x^8 + b_6x^7 + b_5x^6 + b_4x^5 + b_3x^4 + b_2x^3 + b_1x^2 + b_0x^1\n *\n * To get GF(b, x) we must reduce that by m(x). If b_7 is 0 (that is the\n * most significant bit is 0 in b) then the result is already reduced. If\n * it is 1, then we can reduce it by subtracting m(x) via an XOR.\n *\n * It follows that multiplication by x (00000010 or 0x02) can be implemented\n * by performing a left shift followed by a conditional bitwise XOR with\n * 0x1b. This operation on bytes is denoted by xtime(). Multiplication by\n * higher powers of x can be implemented by repeated application of xtime().\n *\n * By adding intermediate results, multiplication by any constant can be\n * implemented. For instance:\n *\n * GF(0x57, 0x13) = 0xfe because:\n *\n * xtime(b) = (b & 128) ? (b << 1 ^ 0x11b) : (b << 1)\n *\n * Note: We XOR with 0x11b instead of 0x1b because in javascript our\n * datatype for b can be larger than 1 byte, so a left shift will not\n * automatically eliminate bits that overflow a byte ... by XOR'ing the\n * overflow bit with 1 (the extra one from 0x11b) we zero it out.\n *\n * GF(0x57, 0x02) = xtime(0x57) = 0xae\n * GF(0x57, 0x04) = xtime(0xae) = 0x47\n * GF(0x57, 0x08) = xtime(0x47) = 0x8e\n * GF(0x57, 0x10) = xtime(0x8e) = 0x07\n *\n * GF(0x57, 0x13) = GF(0x57, (0x01 ^ 0x02 ^ 0x10))\n *\n * And by the distributive property (since XOR is addition and GF() is\n * multiplication):\n *\n * = GF(0x57, 0x01) ^ GF(0x57, 0x02) ^ GF(0x57, 0x10)\n * = 0x57 ^ 0xae ^ 0x07\n * = 0xfe.\n */\nfunction initialize() {\n init = true;\n\n /* Populate the Rcon table. These are the values given by\n [x^(i-1),{00},{00},{00}] where x^(i-1) are powers of x (and x = 0x02)\n in the field of GF(2^8), where i starts at 1.\n\n rcon[0] = [0x00, 0x00, 0x00, 0x00]\n rcon[1] = [0x01, 0x00, 0x00, 0x00] 2^(1-1) = 2^0 = 1\n rcon[2] = [0x02, 0x00, 0x00, 0x00] 2^(2-1) = 2^1 = 2\n ...\n rcon[9] = [0x1B, 0x00, 0x00, 0x00] 2^(9-1) = 2^8 = 0x1B\n rcon[10] = [0x36, 0x00, 0x00, 0x00] 2^(10-1) = 2^9 = 0x36\n\n We only store the first byte because it is the only one used.\n */\n rcon = [0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1B, 0x36];\n\n // compute xtime table which maps i onto GF(i, 0x02)\n var xtime = new Array(256);\n for(var i = 0; i < 128; ++i) {\n xtime[i] = i << 1;\n xtime[i + 128] = (i + 128) << 1 ^ 0x11B;\n }\n\n // compute all other tables\n sbox = new Array(256);\n isbox = new Array(256);\n mix = new Array(4);\n imix = new Array(4);\n for(var i = 0; i < 4; ++i) {\n mix[i] = new Array(256);\n imix[i] = new Array(256);\n }\n var e = 0, ei = 0, e2, e4, e8, sx, sx2, me, ime;\n for(var i = 0; i < 256; ++i) {\n /* We need to generate the SubBytes() sbox and isbox tables so that\n we can perform byte substitutions. This requires us to traverse\n all of the elements in GF, find their multiplicative inverses,\n and apply to each the following affine transformation:\n\n bi' = bi ^ b(i + 4) mod 8 ^ b(i + 5) mod 8 ^ b(i + 6) mod 8 ^\n b(i + 7) mod 8 ^ ci\n for 0 <= i < 8, where bi is the ith bit of the byte, and ci is the\n ith bit of a byte c with the value {63} or {01100011}.\n\n It is possible to traverse every possible value in a Galois field\n using what is referred to as a 'generator'. There are many\n generators (128 out of 256): 3,5,6,9,11,82 to name a few. To fully\n traverse GF we iterate 255 times, multiplying by our generator\n each time.\n\n On each iteration we can determine the multiplicative inverse for\n the current element.\n\n Suppose there is an element in GF 'e'. For a given generator 'g',\n e = g^x. The multiplicative inverse of e is g^(255 - x). It turns\n out that if use the inverse of a generator as another generator\n it will produce all of the corresponding multiplicative inverses\n at the same time. For this reason, we choose 5 as our inverse\n generator because it only requires 2 multiplies and 1 add and its\n inverse, 82, requires relatively few operations as well.\n\n In order to apply the affine transformation, the multiplicative\n inverse 'ei' of 'e' can be repeatedly XOR'd (4 times) with a\n bit-cycling of 'ei'. To do this 'ei' is first stored in 's' and\n 'x'. Then 's' is left shifted and the high bit of 's' is made the\n low bit. The resulting value is stored in 's'. Then 'x' is XOR'd\n with 's' and stored in 'x'. On each subsequent iteration the same\n operation is performed. When 4 iterations are complete, 'x' is\n XOR'd with 'c' (0x63) and the transformed value is stored in 'x'.\n For example:\n\n s = 01000001\n x = 01000001\n\n iteration 1: s = 10000010, x ^= s\n iteration 2: s = 00000101, x ^= s\n iteration 3: s = 00001010, x ^= s\n iteration 4: s = 00010100, x ^= s\n x ^= 0x63\n\n This can be done with a loop where s = (s << 1) | (s >> 7). However,\n it can also be done by using a single 16-bit (in this case 32-bit)\n number 'sx'. Since XOR is an associative operation, we can set 'sx'\n to 'ei' and then XOR it with 'sx' left-shifted 1,2,3, and 4 times.\n The most significant bits will flow into the high 8 bit positions\n and be correctly XOR'd with one another. All that remains will be\n to cycle the high 8 bits by XOR'ing them all with the lower 8 bits\n afterwards.\n\n At the same time we're populating sbox and isbox we can precompute\n the multiplication we'll need to do to do MixColumns() later.\n */\n\n // apply affine transformation\n sx = ei ^ (ei << 1) ^ (ei << 2) ^ (ei << 3) ^ (ei << 4);\n sx = (sx >> 8) ^ (sx & 255) ^ 0x63;\n\n // update tables\n sbox[e] = sx;\n isbox[sx] = e;\n\n /* Mixing columns is done using matrix multiplication. The columns\n that are to be mixed are each a single word in the current state.\n The state has Nb columns (4 columns). Therefore each column is a\n 4 byte word. So to mix the columns in a single column 'c' where\n its rows are r0, r1, r2, and r3, we use the following matrix\n multiplication:\n\n [2 3 1 1]*[r0,c]=[r'0,c]\n [1 2 3 1] [r1,c] [r'1,c]\n [1 1 2 3] [r2,c] [r'2,c]\n [3 1 1 2] [r3,c] [r'3,c]\n\n r0, r1, r2, and r3 are each 1 byte of one of the words in the\n state (a column). To do matrix multiplication for each mixed\n column c' we multiply the corresponding row from the left matrix\n with the corresponding column from the right matrix. In total, we\n get 4 equations:\n\n r0,c' = 2*r0,c + 3*r1,c + 1*r2,c + 1*r3,c\n r1,c' = 1*r0,c + 2*r1,c + 3*r2,c + 1*r3,c\n r2,c' = 1*r0,c + 1*r1,c + 2*r2,c + 3*r3,c\n r3,c' = 3*r0,c + 1*r1,c + 1*r2,c + 2*r3,c\n\n As usual, the multiplication is as previously defined and the\n addition is XOR. In order to optimize mixing columns we can store\n the multiplication results in tables. If you think of the whole\n column as a word (it might help to visualize by mentally rotating\n the equations above by counterclockwise 90 degrees) then you can\n see that it would be useful to map the multiplications performed on\n each byte (r0, r1, r2, r3) onto a word as well. For instance, we\n could map 2*r0,1*r0,1*r0,3*r0 onto a word by storing 2*r0 in the\n highest 8 bits and 3*r0 in the lowest 8 bits (with the other two\n respectively in the middle). This means that a table can be\n constructed that uses r0 as an index to the word. We can do the\n same with r1, r2, and r3, creating a total of 4 tables.\n\n To construct a full c', we can just look up each byte of c in\n their respective tables and XOR the results together.\n\n Also, to build each table we only have to calculate the word\n for 2,1,1,3 for every byte ... which we can do on each iteration\n of this loop since we will iterate over every byte. After we have\n calculated 2,1,1,3 we can get the results for the other tables\n by cycling the byte at the end to the beginning. For instance\n we can take the result of table 2,1,1,3 and produce table 3,2,1,1\n by moving the right most byte to the left most position just like\n how you can imagine the 3 moved out of 2,1,1,3 and to the front\n to produce 3,2,1,1.\n\n There is another optimization in that the same multiples of\n the current element we need in order to advance our generator\n to the next iteration can be reused in performing the 2,1,1,3\n calculation. We also calculate the inverse mix column tables,\n with e,9,d,b being the inverse of 2,1,1,3.\n\n When we're done, and we need to actually mix columns, the first\n byte of each state word should be put through mix[0] (2,1,1,3),\n the second through mix[1] (3,2,1,1) and so forth. Then they should\n be XOR'd together to produce the fully mixed column.\n */\n\n // calculate mix and imix table values\n sx2 = xtime[sx];\n e2 = xtime[e];\n e4 = xtime[e2];\n e8 = xtime[e4];\n me =\n (sx2 << 24) ^ // 2\n (sx << 16) ^ // 1\n (sx << 8) ^ // 1\n (sx ^ sx2); // 3\n ime =\n (e2 ^ e4 ^ e8) << 24 ^ // E (14)\n (e ^ e8) << 16 ^ // 9\n (e ^ e4 ^ e8) << 8 ^ // D (13)\n (e ^ e2 ^ e8); // B (11)\n // produce each of the mix tables by rotating the 2,1,1,3 value\n for(var n = 0; n < 4; ++n) {\n mix[n][e] = me;\n imix[n][sx] = ime;\n // cycle the right most byte to the left most position\n // ie: 2,1,1,3 becomes 3,2,1,1\n me = me << 24 | me >>> 8;\n ime = ime << 24 | ime >>> 8;\n }\n\n // get next element and inverse\n if(e === 0) {\n // 1 is the inverse of 1\n e = ei = 1;\n } else {\n // e = 2e + 2*2*2*(10e)) = multiply e by 82 (chosen generator)\n // ei = ei + 2*2*ei = multiply ei by 5 (inverse generator)\n e = e2 ^ xtime[xtime[xtime[e2 ^ e8]]];\n ei ^= xtime[xtime[ei]];\n }\n }\n}\n\n/**\n * Generates a key schedule using the AES key expansion algorithm.\n *\n * The AES algorithm takes the Cipher Key, K, and performs a Key Expansion\n * routine to generate a key schedule. The Key Expansion generates a total\n * of Nb*(Nr + 1) words: the algorithm requires an initial set of Nb words,\n * and each of the Nr rounds requires Nb words of key data. The resulting\n * key schedule consists of a linear array of 4-byte words, denoted [wi ],\n * with i in the range 0 <= i < Nb(Nr + 1).\n *\n * KeyExpansion(byte key[4*Nk], word w[Nb*(Nr+1)], Nk)\n * AES-128 (Nb=4, Nk=4, Nr=10)\n * AES-192 (Nb=4, Nk=6, Nr=12)\n * AES-256 (Nb=4, Nk=8, Nr=14)\n * Note: Nr=Nk+6.\n *\n * Nb is the number of columns (32-bit words) comprising the State (or\n * number of bytes in a block). For AES, Nb=4.\n *\n * @param key the key to schedule (as an array of 32-bit words).\n * @param decrypt true to modify the key schedule to decrypt, false not to.\n *\n * @return the generated key schedule.\n */\nfunction _expandKey(key, decrypt) {\n // copy the key's words to initialize the key schedule\n var w = key.slice(0);\n\n /* RotWord() will rotate a word, moving the first byte to the last\n byte's position (shifting the other bytes left).\n\n We will be getting the value of Rcon at i / Nk. 'i' will iterate\n from Nk to (Nb * Nr+1). Nk = 4 (4 byte key), Nb = 4 (4 words in\n a block), Nr = Nk + 6 (10). Therefore 'i' will iterate from\n 4 to 44 (exclusive). Each time we iterate 4 times, i / Nk will\n increase by 1. We use a counter iNk to keep track of this.\n */\n\n // go through the rounds expanding the key\n var temp, iNk = 1;\n var Nk = w.length;\n var Nr1 = Nk + 6 + 1;\n var end = Nb * Nr1;\n for(var i = Nk; i < end; ++i) {\n temp = w[i - 1];\n if(i % Nk === 0) {\n // temp = SubWord(RotWord(temp)) ^ Rcon[i / Nk]\n temp =\n sbox[temp >>> 16 & 255] << 24 ^\n sbox[temp >>> 8 & 255] << 16 ^\n sbox[temp & 255] << 8 ^\n sbox[temp >>> 24] ^ (rcon[iNk] << 24);\n iNk++;\n } else if(Nk > 6 && (i % Nk === 4)) {\n // temp = SubWord(temp)\n temp =\n sbox[temp >>> 24] << 24 ^\n sbox[temp >>> 16 & 255] << 16 ^\n sbox[temp >>> 8 & 255] << 8 ^\n sbox[temp & 255];\n }\n w[i] = w[i - Nk] ^ temp;\n }\n\n /* When we are updating a cipher block we always use the code path for\n encryption whether we are decrypting or not (to shorten code and\n simplify the generation of look up tables). However, because there\n are differences in the decryption algorithm, other than just swapping\n in different look up tables, we must transform our key schedule to\n account for these changes:\n\n 1. The decryption algorithm gets its key rounds in reverse order.\n 2. The decryption algorithm adds the round key before mixing columns\n instead of afterwards.\n\n We don't need to modify our key schedule to handle the first case,\n we can just traverse the key schedule in reverse order when decrypting.\n\n The second case requires a little work.\n\n The tables we built for performing rounds will take an input and then\n perform SubBytes() and MixColumns() or, for the decrypt version,\n InvSubBytes() and InvMixColumns(). But the decrypt algorithm requires\n us to AddRoundKey() before InvMixColumns(). This means we'll need to\n apply some transformations to the round key to inverse-mix its columns\n so they'll be correct for moving AddRoundKey() to after the state has\n had its columns inverse-mixed.\n\n To inverse-mix the columns of the state when we're decrypting we use a\n lookup table that will apply InvSubBytes() and InvMixColumns() at the\n same time. However, the round key's bytes are not inverse-substituted\n in the decryption algorithm. To get around this problem, we can first\n substitute the bytes in the round key so that when we apply the\n transformation via the InvSubBytes()+InvMixColumns() table, it will\n undo our substitution leaving us with the original value that we\n want -- and then inverse-mix that value.\n\n This change will correctly alter our key schedule so that we can XOR\n each round key with our already transformed decryption state. This\n allows us to use the same code path as the encryption algorithm.\n\n We make one more change to the decryption key. Since the decryption\n algorithm runs in reverse from the encryption algorithm, we reverse\n the order of the round keys to avoid having to iterate over the key\n schedule backwards when running the encryption algorithm later in\n decryption mode. In addition to reversing the order of the round keys,\n we also swap each round key's 2nd and 4th rows. See the comments\n section where rounds are performed for more details about why this is\n done. These changes are done inline with the other substitution\n described above.\n */\n if(decrypt) {\n var tmp;\n var m0 = imix[0];\n var m1 = imix[1];\n var m2 = imix[2];\n var m3 = imix[3];\n var wnew = w.slice(0);\n end = w.length;\n for(var i = 0, wi = end - Nb; i < end; i += Nb, wi -= Nb) {\n // do not sub the first or last round key (round keys are Nb\n // words) as no column mixing is performed before they are added,\n // but do change the key order\n if(i === 0 || i === (end - Nb)) {\n wnew[i] = w[wi];\n wnew[i + 1] = w[wi + 3];\n wnew[i + 2] = w[wi + 2];\n wnew[i + 3] = w[wi + 1];\n } else {\n // substitute each round key byte because the inverse-mix\n // table will inverse-substitute it (effectively cancel the\n // substitution because round key bytes aren't sub'd in\n // decryption mode) and swap indexes 3 and 1\n for(var n = 0; n < Nb; ++n) {\n tmp = w[wi + n];\n wnew[i + (3&-n)] =\n m0[sbox[tmp >>> 24]] ^\n m1[sbox[tmp >>> 16 & 255]] ^\n m2[sbox[tmp >>> 8 & 255]] ^\n m3[sbox[tmp & 255]];\n }\n }\n }\n w = wnew;\n }\n\n return w;\n}\n\n/**\n * Updates a single block (16 bytes) using AES. The update will either\n * encrypt or decrypt the block.\n *\n * @param w the key schedule.\n * @param input the input block (an array of 32-bit words).\n * @param output the updated output block.\n * @param decrypt true to decrypt the block, false to encrypt it.\n */\nfunction _updateBlock(w, input, output, decrypt) {\n /*\n Cipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)])\n begin\n byte state[4,Nb]\n state = in\n AddRoundKey(state, w[0, Nb-1])\n for round = 1 step 1 to Nr-1\n SubBytes(state)\n ShiftRows(state)\n MixColumns(state)\n AddRoundKey(state, w[round*Nb, (round+1)*Nb-1])\n end for\n SubBytes(state)\n ShiftRows(state)\n AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])\n out = state\n end\n\n InvCipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)])\n begin\n byte state[4,Nb]\n state = in\n AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])\n for round = Nr-1 step -1 downto 1\n InvShiftRows(state)\n InvSubBytes(state)\n AddRoundKey(state, w[round*Nb, (round+1)*Nb-1])\n InvMixColumns(state)\n end for\n InvShiftRows(state)\n InvSubBytes(state)\n AddRoundKey(state, w[0, Nb-1])\n out = state\n end\n */\n\n // Encrypt: AddRoundKey(state, w[0, Nb-1])\n // Decrypt: AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])\n var Nr = w.length / 4 - 1;\n var m0, m1, m2, m3, sub;\n if(decrypt) {\n m0 = imix[0];\n m1 = imix[1];\n m2 = imix[2];\n m3 = imix[3];\n sub = isbox;\n } else {\n m0 = mix[0];\n m1 = mix[1];\n m2 = mix[2];\n m3 = mix[3];\n sub = sbox;\n }\n var a, b, c, d, a2, b2, c2;\n a = input[0] ^ w[0];\n b = input[decrypt ? 3 : 1] ^ w[1];\n c = input[2] ^ w[2];\n d = input[decrypt ? 1 : 3] ^ w[3];\n var i = 3;\n\n /* In order to share code we follow the encryption algorithm when both\n encrypting and decrypting. To account for the changes required in the\n decryption algorithm, we use different lookup tables when decrypting\n and use a modified key schedule to account for the difference in the\n order of transformations applied when performing rounds. We also get\n key rounds in reverse order (relative to encryption). */\n for(var round = 1; round < Nr; ++round) {\n /* As described above, we'll be using table lookups to perform the\n column mixing. Each column is stored as a word in the state (the\n array 'input' has one column as a word at each index). In order to\n mix a column, we perform these transformations on each row in c,\n which is 1 byte in each word. The new column for c0 is c'0:\n\n m0 m1 m2 m3\n r0,c'0 = 2*r0,c0 + 3*r1,c0 + 1*r2,c0 + 1*r3,c0\n r1,c'0 = 1*r0,c0 + 2*r1,c0 + 3*r2,c0 + 1*r3,c0\n r2,c'0 = 1*r0,c0 + 1*r1,c0 + 2*r2,c0 + 3*r3,c0\n r3,c'0 = 3*r0,c0 + 1*r1,c0 + 1*r2,c0 + 2*r3,c0\n\n So using mix tables where c0 is a word with r0 being its upper\n 8 bits and r3 being its lower 8 bits:\n\n m0[c0 >> 24] will yield this word: [2*r0,1*r0,1*r0,3*r0]\n ...\n m3[c0 & 255] will yield this word: [1*r3,1*r3,3*r3,2*r3]\n\n Therefore to mix the columns in each word in the state we\n do the following (& 255 omitted for brevity):\n c'0,r0 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]\n c'0,r1 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]\n c'0,r2 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]\n c'0,r3 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]\n\n However, before mixing, the algorithm requires us to perform\n ShiftRows(). The ShiftRows() transformation cyclically shifts the\n last 3 rows of the state over different offsets. The first row\n (r = 0) is not shifted.\n\n s'_r,c = s_r,(c + shift(r, Nb) mod Nb\n for 0 < r < 4 and 0 <= c < Nb and\n shift(1, 4) = 1\n shift(2, 4) = 2\n shift(3, 4) = 3.\n\n This causes the first byte in r = 1 to be moved to the end of\n the row, the first 2 bytes in r = 2 to be moved to the end of\n the row, the first 3 bytes in r = 3 to be moved to the end of\n the row:\n\n r1: [c0 c1 c2 c3] => [c1 c2 c3 c0]\n r2: [c0 c1 c2 c3] [c2 c3 c0 c1]\n r3: [c0 c1 c2 c3] [c3 c0 c1 c2]\n\n We can make these substitutions inline with our column mixing to\n generate an updated set of equations to produce each word in the\n state (note the columns have changed positions):\n\n c0 c1 c2 c3 => c0 c1 c2 c3\n c0 c1 c2 c3 c1 c2 c3 c0 (cycled 1 byte)\n c0 c1 c2 c3 c2 c3 c0 c1 (cycled 2 bytes)\n c0 c1 c2 c3 c3 c0 c1 c2 (cycled 3 bytes)\n\n Therefore:\n\n c'0 = 2*r0,c0 + 3*r1,c1 + 1*r2,c2 + 1*r3,c3\n c'0 = 1*r0,c0 + 2*r1,c1 + 3*r2,c2 + 1*r3,c3\n c'0 = 1*r0,c0 + 1*r1,c1 + 2*r2,c2 + 3*r3,c3\n c'0 = 3*r0,c0 + 1*r1,c1 + 1*r2,c2 + 2*r3,c3\n\n c'1 = 2*r0,c1 + 3*r1,c2 + 1*r2,c3 + 1*r3,c0\n c'1 = 1*r0,c1 + 2*r1,c2 + 3*r2,c3 + 1*r3,c0\n c'1 = 1*r0,c1 + 1*r1,c2 + 2*r2,c3 + 3*r3,c0\n c'1 = 3*r0,c1 + 1*r1,c2 + 1*r2,c3 + 2*r3,c0\n\n ... and so forth for c'2 and c'3. The important distinction is\n that the columns are cycling, with c0 being used with the m0\n map when calculating c0, but c1 being used with the m0 map when\n calculating c1 ... and so forth.\n\n When performing the inverse we transform the mirror image and\n skip the bottom row, instead of the top one, and move upwards:\n\n c3 c2 c1 c0 => c0 c3 c2 c1 (cycled 3 bytes) *same as encryption\n c3 c2 c1 c0 c1 c0 c3 c2 (cycled 2 bytes)\n c3 c2 c1 c0 c2 c1 c0 c3 (cycled 1 byte) *same as encryption\n c3 c2 c1 c0 c3 c2 c1 c0\n\n If you compare the resulting matrices for ShiftRows()+MixColumns()\n and for InvShiftRows()+InvMixColumns() the 2nd and 4th columns are\n different (in encrypt mode vs. decrypt mode). So in order to use\n the same code to handle both encryption and decryption, we will\n need to do some mapping.\n\n If in encryption mode we let a=c0, b=c1, c=c2, d=c3, and r be\n a row number in the state, then the resulting matrix in encryption\n mode for applying the above transformations would be:\n\n r1: a b c d\n r2: b c d a\n r3: c d a b\n r4: d a b c\n\n If we did the same in decryption mode we would get:\n\n r1: a d c b\n r2: b a d c\n r3: c b a d\n r4: d c b a\n\n If instead we swap d and b (set b=c3 and d=c1), then we get:\n\n r1: a b c d\n r2: d a b c\n r3: c d a b\n r4: b c d a\n\n Now the 1st and 3rd rows are the same as the encryption matrix. All\n we need to do then to make the mapping exactly the same is to swap\n the 2nd and 4th rows when in decryption mode. To do this without\n having to do it on each iteration, we swapped the 2nd and 4th rows\n in the decryption key schedule. We also have to do the swap above\n when we first pull in the input and when we set the final output. */\n a2 =\n m0[a >>> 24] ^\n m1[b >>> 16 & 255] ^\n m2[c >>> 8 & 255] ^\n m3[d & 255] ^ w[++i];\n b2 =\n m0[b >>> 24] ^\n m1[c >>> 16 & 255] ^\n m2[d >>> 8 & 255] ^\n m3[a & 255] ^ w[++i];\n c2 =\n m0[c >>> 24] ^\n m1[d >>> 16 & 255] ^\n m2[a >>> 8 & 255] ^\n m3[b & 255] ^ w[++i];\n d =\n m0[d >>> 24] ^\n m1[a >>> 16 & 255] ^\n m2[b >>> 8 & 255] ^\n m3[c & 255] ^ w[++i];\n a = a2;\n b = b2;\n c = c2;\n }\n\n /*\n Encrypt:\n SubBytes(state)\n ShiftRows(state)\n AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])\n\n Decrypt:\n InvShiftRows(state)\n InvSubBytes(state)\n AddRoundKey(state, w[0, Nb-1])\n */\n // Note: rows are shifted inline\n output[0] =\n (sub[a >>> 24] << 24) ^\n (sub[b >>> 16 & 255] << 16) ^\n (sub[c >>> 8 & 255] << 8) ^\n (sub[d & 255]) ^ w[++i];\n output[decrypt ? 3 : 1] =\n (sub[b >>> 24] << 24) ^\n (sub[c >>> 16 & 255] << 16) ^\n (sub[d >>> 8 & 255] << 8) ^\n (sub[a & 255]) ^ w[++i];\n output[2] =\n (sub[c >>> 24] << 24) ^\n (sub[d >>> 16 & 255] << 16) ^\n (sub[a >>> 8 & 255] << 8) ^\n (sub[b & 255]) ^ w[++i];\n output[decrypt ? 1 : 3] =\n (sub[d >>> 24] << 24) ^\n (sub[a >>> 16 & 255] << 16) ^\n (sub[b >>> 8 & 255] << 8) ^\n (sub[c & 255]) ^ w[++i];\n}\n\n/**\n * Deprecated. Instead, use:\n *\n * forge.cipher.createCipher('AES-', key);\n * forge.cipher.createDecipher('AES-', key);\n *\n * Creates a deprecated AES cipher object. This object's mode will default to\n * CBC (cipher-block-chaining).\n *\n * The key and iv may be given as a string of bytes, an array of bytes, a\n * byte buffer, or an array of 32-bit words.\n *\n * @param options the options to use.\n * key the symmetric key to use.\n * output the buffer to write to.\n * decrypt true for decryption, false for encryption.\n * mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nfunction _createCipher(options) {\n options = options || {};\n var mode = (options.mode || 'CBC').toUpperCase();\n var algorithm = 'AES-' + mode;\n\n var cipher;\n if(options.decrypt) {\n cipher = forge.cipher.createDecipher(algorithm, options.key);\n } else {\n cipher = forge.cipher.createCipher(algorithm, options.key);\n }\n\n // backwards compatible start API\n var start = cipher.start;\n cipher.start = function(iv, options) {\n // backwards compatibility: support second arg as output buffer\n var output = null;\n if(options instanceof forge.util.ByteBuffer) {\n output = options;\n options = {};\n }\n options = options || {};\n options.output = output;\n options.iv = iv;\n start.call(cipher, options);\n };\n\n return cipher;\n}\n","/**\n * A Javascript implementation of AES Cipher Suites for TLS.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2009-2015 Digital Bazaar, Inc.\n *\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./tls');\n\nvar tls = module.exports = forge.tls;\n\n/**\n * Supported cipher suites.\n */\ntls.CipherSuites['TLS_RSA_WITH_AES_128_CBC_SHA'] = {\n id: [0x00, 0x2f],\n name: 'TLS_RSA_WITH_AES_128_CBC_SHA',\n initSecurityParameters: function(sp) {\n sp.bulk_cipher_algorithm = tls.BulkCipherAlgorithm.aes;\n sp.cipher_type = tls.CipherType.block;\n sp.enc_key_length = 16;\n sp.block_length = 16;\n sp.fixed_iv_length = 16;\n sp.record_iv_length = 16;\n sp.mac_algorithm = tls.MACAlgorithm.hmac_sha1;\n sp.mac_length = 20;\n sp.mac_key_length = 20;\n },\n initConnectionState: initConnectionState\n};\ntls.CipherSuites['TLS_RSA_WITH_AES_256_CBC_SHA'] = {\n id: [0x00, 0x35],\n name: 'TLS_RSA_WITH_AES_256_CBC_SHA',\n initSecurityParameters: function(sp) {\n sp.bulk_cipher_algorithm = tls.BulkCipherAlgorithm.aes;\n sp.cipher_type = tls.CipherType.block;\n sp.enc_key_length = 32;\n sp.block_length = 16;\n sp.fixed_iv_length = 16;\n sp.record_iv_length = 16;\n sp.mac_algorithm = tls.MACAlgorithm.hmac_sha1;\n sp.mac_length = 20;\n sp.mac_key_length = 20;\n },\n initConnectionState: initConnectionState\n};\n\nfunction initConnectionState(state, c, sp) {\n var client = (c.entity === forge.tls.ConnectionEnd.client);\n\n // cipher setup\n state.read.cipherState = {\n init: false,\n cipher: forge.cipher.createDecipher('AES-CBC', client ?\n sp.keys.server_write_key : sp.keys.client_write_key),\n iv: client ? sp.keys.server_write_IV : sp.keys.client_write_IV\n };\n state.write.cipherState = {\n init: false,\n cipher: forge.cipher.createCipher('AES-CBC', client ?\n sp.keys.client_write_key : sp.keys.server_write_key),\n iv: client ? sp.keys.client_write_IV : sp.keys.server_write_IV\n };\n state.read.cipherFunction = decrypt_aes_cbc_sha1;\n state.write.cipherFunction = encrypt_aes_cbc_sha1;\n\n // MAC setup\n state.read.macLength = state.write.macLength = sp.mac_length;\n state.read.macFunction = state.write.macFunction = tls.hmac_sha1;\n}\n\n/**\n * Encrypts the TLSCompressed record into a TLSCipherText record using AES\n * in CBC mode.\n *\n * @param record the TLSCompressed record to encrypt.\n * @param s the ConnectionState to use.\n *\n * @return true on success, false on failure.\n */\nfunction encrypt_aes_cbc_sha1(record, s) {\n var rval = false;\n\n // append MAC to fragment, update sequence number\n var mac = s.macFunction(s.macKey, s.sequenceNumber, record);\n record.fragment.putBytes(mac);\n s.updateSequenceNumber();\n\n // TLS 1.1+ use an explicit IV every time to protect against CBC attacks\n var iv;\n if(record.version.minor === tls.Versions.TLS_1_0.minor) {\n // use the pre-generated IV when initializing for TLS 1.0, otherwise use\n // the residue from the previous encryption\n iv = s.cipherState.init ? null : s.cipherState.iv;\n } else {\n iv = forge.random.getBytesSync(16);\n }\n\n s.cipherState.init = true;\n\n // start cipher\n var cipher = s.cipherState.cipher;\n cipher.start({iv: iv});\n\n // TLS 1.1+ write IV into output\n if(record.version.minor >= tls.Versions.TLS_1_1.minor) {\n cipher.output.putBytes(iv);\n }\n\n // do encryption (default padding is appropriate)\n cipher.update(record.fragment);\n if(cipher.finish(encrypt_aes_cbc_sha1_padding)) {\n // set record fragment to encrypted output\n record.fragment = cipher.output;\n record.length = record.fragment.length();\n rval = true;\n }\n\n return rval;\n}\n\n/**\n * Handles padding for aes_cbc_sha1 in encrypt mode.\n *\n * @param blockSize the block size.\n * @param input the input buffer.\n * @param decrypt true in decrypt mode, false in encrypt mode.\n *\n * @return true on success, false on failure.\n */\nfunction encrypt_aes_cbc_sha1_padding(blockSize, input, decrypt) {\n /* The encrypted data length (TLSCiphertext.length) is one more than the sum\n of SecurityParameters.block_length, TLSCompressed.length,\n SecurityParameters.mac_length, and padding_length.\n\n The padding may be any length up to 255 bytes long, as long as it results in\n the TLSCiphertext.length being an integral multiple of the block length.\n Lengths longer than necessary might be desirable to frustrate attacks on a\n protocol based on analysis of the lengths of exchanged messages. Each uint8\n in the padding data vector must be filled with the padding length value.\n\n The padding length should be such that the total size of the\n GenericBlockCipher structure is a multiple of the cipher's block length.\n Legal values range from zero to 255, inclusive. This length specifies the\n length of the padding field exclusive of the padding_length field itself.\n\n This is slightly different from PKCS#7 because the padding value is 1\n less than the actual number of padding bytes if you include the\n padding_length uint8 itself as a padding byte. */\n if(!decrypt) {\n // get the number of padding bytes required to reach the blockSize and\n // subtract 1 for the padding value (to make room for the padding_length\n // uint8)\n var padding = blockSize - (input.length() % blockSize);\n input.fillWithByte(padding - 1, padding);\n }\n return true;\n}\n\n/**\n * Handles padding for aes_cbc_sha1 in decrypt mode.\n *\n * @param blockSize the block size.\n * @param output the output buffer.\n * @param decrypt true in decrypt mode, false in encrypt mode.\n *\n * @return true on success, false on failure.\n */\nfunction decrypt_aes_cbc_sha1_padding(blockSize, output, decrypt) {\n var rval = true;\n if(decrypt) {\n /* The last byte in the output specifies the number of padding bytes not\n including itself. Each of the padding bytes has the same value as that\n last byte (known as the padding_length). Here we check all padding\n bytes to ensure they have the value of padding_length even if one of\n them is bad in order to ward-off timing attacks. */\n var len = output.length();\n var paddingLength = output.last();\n for(var i = len - 1 - paddingLength; i < len - 1; ++i) {\n rval = rval && (output.at(i) == paddingLength);\n }\n if(rval) {\n // trim off padding bytes and last padding length byte\n output.truncate(paddingLength + 1);\n }\n }\n return rval;\n}\n\n/**\n * Decrypts a TLSCipherText record into a TLSCompressed record using\n * AES in CBC mode.\n *\n * @param record the TLSCipherText record to decrypt.\n * @param s the ConnectionState to use.\n *\n * @return true on success, false on failure.\n */\nfunction decrypt_aes_cbc_sha1(record, s) {\n var rval = false;\n\n var iv;\n if(record.version.minor === tls.Versions.TLS_1_0.minor) {\n // use pre-generated IV when initializing for TLS 1.0, otherwise use the\n // residue from the previous decryption\n iv = s.cipherState.init ? null : s.cipherState.iv;\n } else {\n // TLS 1.1+ use an explicit IV every time to protect against CBC attacks\n // that is appended to the record fragment\n iv = record.fragment.getBytes(16);\n }\n\n s.cipherState.init = true;\n\n // start cipher\n var cipher = s.cipherState.cipher;\n cipher.start({iv: iv});\n\n // do decryption\n cipher.update(record.fragment);\n rval = cipher.finish(decrypt_aes_cbc_sha1_padding);\n\n // even if decryption fails, keep going to minimize timing attacks\n\n // decrypted data:\n // first (len - 20) bytes = application data\n // last 20 bytes = MAC\n var macLen = s.macLength;\n\n // create a random MAC to check against should the mac length check fail\n // Note: do this regardless of the failure to keep timing consistent\n var mac = forge.random.getBytesSync(macLen);\n\n // get fragment and mac\n var len = cipher.output.length();\n if(len >= macLen) {\n record.fragment = cipher.output.getBytes(len - macLen);\n mac = cipher.output.getBytes(macLen);\n } else {\n // bad data, but get bytes anyway to try to keep timing consistent\n record.fragment = cipher.output.getBytes();\n }\n record.fragment = forge.util.createBuffer(record.fragment);\n record.length = record.fragment.length();\n\n // see if data integrity checks out, update sequence number\n var mac2 = s.macFunction(s.macKey, s.sequenceNumber, record);\n s.updateSequenceNumber();\n rval = compareMacs(s.macKey, mac, mac2) && rval;\n return rval;\n}\n\n/**\n * Safely compare two MACs. This function will compare two MACs in a way\n * that protects against timing attacks.\n *\n * TODO: Expose elsewhere as a utility API.\n *\n * See: https://www.nccgroup.trust/us/about-us/newsroom-and-events/blog/2011/february/double-hmac-verification/\n *\n * @param key the MAC key to use.\n * @param mac1 as a binary-encoded string of bytes.\n * @param mac2 as a binary-encoded string of bytes.\n *\n * @return true if the MACs are the same, false if not.\n */\nfunction compareMacs(key, mac1, mac2) {\n var hmac = forge.hmac.create();\n\n hmac.start('SHA1', key);\n hmac.update(mac1);\n mac1 = hmac.digest().getBytes();\n\n hmac.start(null, null);\n hmac.update(mac2);\n mac2 = hmac.digest().getBytes();\n\n return mac1 === mac2;\n}\n","/**\n * Copyright (c) 2019 Digital Bazaar, Inc.\n */\n\nvar forge = require('./forge');\nrequire('./asn1');\nvar asn1 = forge.asn1;\n\nexports.privateKeyValidator = {\n // PrivateKeyInfo\n name: 'PrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // Version (INTEGER)\n name: 'PrivateKeyInfo.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyVersion'\n }, {\n // privateKeyAlgorithm\n name: 'PrivateKeyInfo.privateKeyAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'privateKeyOid'\n }]\n }, {\n // PrivateKey\n name: 'PrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'privateKey'\n }]\n};\n\nexports.publicKeyValidator = {\n name: 'SubjectPublicKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'subjectPublicKeyInfo',\n value: [{\n name: 'SubjectPublicKeyInfo.AlgorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'publicKeyOid'\n }]\n },\n // capture group for ed25519PublicKey\n {\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n composed: true,\n captureBitStringValue: 'ed25519PublicKey'\n }\n // FIXME: this is capture group for rsaPublicKey, use it in this API or\n // discard?\n /* {\n // subjectPublicKey\n name: 'SubjectPublicKeyInfo.subjectPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n value: [{\n // RSAPublicKey\n name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n optional: true,\n captureAsn1: 'rsaPublicKey'\n }]\n } */\n ]\n};\n","/**\n * Javascript implementation of Abstract Syntax Notation Number One.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2015 Digital Bazaar, Inc.\n *\n * An API for storing data using the Abstract Syntax Notation Number One\n * format using DER (Distinguished Encoding Rules) encoding. This encoding is\n * commonly used to store data for PKI, i.e. X.509 Certificates, and this\n * implementation exists for that purpose.\n *\n * Abstract Syntax Notation Number One (ASN.1) is used to define the abstract\n * syntax of information without restricting the way the information is encoded\n * for transmission. It provides a standard that allows for open systems\n * communication. ASN.1 defines the syntax of information data and a number of\n * simple data types as well as a notation for describing them and specifying\n * values for them.\n *\n * The RSA algorithm creates public and private keys that are often stored in\n * X.509 or PKCS#X formats -- which use ASN.1 (encoded in DER format). This\n * class provides the most basic functionality required to store and load DSA\n * keys that are encoded according to ASN.1.\n *\n * The most common binary encodings for ASN.1 are BER (Basic Encoding Rules)\n * and DER (Distinguished Encoding Rules). DER is just a subset of BER that\n * has stricter requirements for how data must be encoded.\n *\n * Each ASN.1 structure has a tag (a byte identifying the ASN.1 structure type)\n * and a byte array for the value of this ASN1 structure which may be data or a\n * list of ASN.1 structures.\n *\n * Each ASN.1 structure using BER is (Tag-Length-Value):\n *\n * | byte 0 | bytes X | bytes Y |\n * |--------|---------|----------\n * | tag | length | value |\n *\n * ASN.1 allows for tags to be of \"High-tag-number form\" which allows a tag to\n * be two or more octets, but that is not supported by this class. A tag is\n * only 1 byte. Bits 1-5 give the tag number (ie the data type within a\n * particular 'class'), 6 indicates whether or not the ASN.1 value is\n * constructed from other ASN.1 values, and bits 7 and 8 give the 'class'. If\n * bits 7 and 8 are both zero, the class is UNIVERSAL. If only bit 7 is set,\n * then the class is APPLICATION. If only bit 8 is set, then the class is\n * CONTEXT_SPECIFIC. If both bits 7 and 8 are set, then the class is PRIVATE.\n * The tag numbers for the data types for the class UNIVERSAL are listed below:\n *\n * UNIVERSAL 0 Reserved for use by the encoding rules\n * UNIVERSAL 1 Boolean type\n * UNIVERSAL 2 Integer type\n * UNIVERSAL 3 Bitstring type\n * UNIVERSAL 4 Octetstring type\n * UNIVERSAL 5 Null type\n * UNIVERSAL 6 Object identifier type\n * UNIVERSAL 7 Object descriptor type\n * UNIVERSAL 8 External type and Instance-of type\n * UNIVERSAL 9 Real type\n * UNIVERSAL 10 Enumerated type\n * UNIVERSAL 11 Embedded-pdv type\n * UNIVERSAL 12 UTF8String type\n * UNIVERSAL 13 Relative object identifier type\n * UNIVERSAL 14-15 Reserved for future editions\n * UNIVERSAL 16 Sequence and Sequence-of types\n * UNIVERSAL 17 Set and Set-of types\n * UNIVERSAL 18-22, 25-30 Character string types\n * UNIVERSAL 23-24 Time types\n *\n * The length of an ASN.1 structure is specified after the tag identifier.\n * There is a definite form and an indefinite form. The indefinite form may\n * be used if the encoding is constructed and not all immediately available.\n * The indefinite form is encoded using a length byte with only the 8th bit\n * set. The end of the constructed object is marked using end-of-contents\n * octets (two zero bytes).\n *\n * The definite form looks like this:\n *\n * The length may take up 1 or more bytes, it depends on the length of the\n * value of the ASN.1 structure. DER encoding requires that if the ASN.1\n * structure has a value that has a length greater than 127, more than 1 byte\n * will be used to store its length, otherwise just one byte will be used.\n * This is strict.\n *\n * In the case that the length of the ASN.1 value is less than 127, 1 octet\n * (byte) is used to store the \"short form\" length. The 8th bit has a value of\n * 0 indicating the length is \"short form\" and not \"long form\" and bits 7-1\n * give the length of the data. (The 8th bit is the left-most, most significant\n * bit: also known as big endian or network format).\n *\n * In the case that the length of the ASN.1 value is greater than 127, 2 to\n * 127 octets (bytes) are used to store the \"long form\" length. The first\n * byte's 8th bit is set to 1 to indicate the length is \"long form.\" Bits 7-1\n * give the number of additional octets. All following octets are in base 256\n * with the most significant digit first (typical big-endian binary unsigned\n * integer storage). So, for instance, if the length of a value was 257, the\n * first byte would be set to:\n *\n * 10000010 = 130 = 0x82.\n *\n * This indicates there are 2 octets (base 256) for the length. The second and\n * third bytes (the octets just mentioned) would store the length in base 256:\n *\n * octet 2: 00000001 = 1 * 256^1 = 256\n * octet 3: 00000001 = 1 * 256^0 = 1\n * total = 257\n *\n * The algorithm for converting a js integer value of 257 to base-256 is:\n *\n * var value = 257;\n * var bytes = [];\n * bytes[0] = (value >>> 8) & 0xFF; // most significant byte first\n * bytes[1] = value & 0xFF; // least significant byte last\n *\n * On the ASN.1 UNIVERSAL Object Identifier (OID) type:\n *\n * An OID can be written like: \"value1.value2.value3...valueN\"\n *\n * The DER encoding rules:\n *\n * The first byte has the value 40 * value1 + value2.\n * The following bytes, if any, encode the remaining values. Each value is\n * encoded in base 128, most significant digit first (big endian), with as\n * few digits as possible, and the most significant bit of each byte set\n * to 1 except the last in each value's encoding. For example: Given the\n * OID \"1.2.840.113549\", its DER encoding is (remember each byte except the\n * last one in each encoding is OR'd with 0x80):\n *\n * byte 1: 40 * 1 + 2 = 42 = 0x2A.\n * bytes 2-3: 128 * 6 + 72 = 840 = 6 72 = 6 72 = 0x0648 = 0x8648\n * bytes 4-6: 16384 * 6 + 128 * 119 + 13 = 6 119 13 = 0x06770D = 0x86F70D\n *\n * The final value is: 0x2A864886F70D.\n * The full OID (including ASN.1 tag and length of 6 bytes) is:\n * 0x06062A864886F70D\n */\nvar forge = require('./forge');\nrequire('./util');\nrequire('./oids');\n\n/* ASN.1 API */\nvar asn1 = module.exports = forge.asn1 = forge.asn1 || {};\n\n/**\n * ASN.1 classes.\n */\nasn1.Class = {\n UNIVERSAL: 0x00,\n APPLICATION: 0x40,\n CONTEXT_SPECIFIC: 0x80,\n PRIVATE: 0xC0\n};\n\n/**\n * ASN.1 types. Not all types are supported by this implementation, only\n * those necessary to implement a simple PKI are implemented.\n */\nasn1.Type = {\n NONE: 0,\n BOOLEAN: 1,\n INTEGER: 2,\n BITSTRING: 3,\n OCTETSTRING: 4,\n NULL: 5,\n OID: 6,\n ODESC: 7,\n EXTERNAL: 8,\n REAL: 9,\n ENUMERATED: 10,\n EMBEDDED: 11,\n UTF8: 12,\n ROID: 13,\n SEQUENCE: 16,\n SET: 17,\n PRINTABLESTRING: 19,\n IA5STRING: 22,\n UTCTIME: 23,\n GENERALIZEDTIME: 24,\n BMPSTRING: 30\n};\n\n/**\n * Creates a new asn1 object.\n *\n * @param tagClass the tag class for the object.\n * @param type the data type (tag number) for the object.\n * @param constructed true if the asn1 object is in constructed form.\n * @param value the value for the object, if it is not constructed.\n * @param [options] the options to use:\n * [bitStringContents] the plain BIT STRING content including padding\n * byte.\n *\n * @return the asn1 object.\n */\nasn1.create = function(tagClass, type, constructed, value, options) {\n /* An asn1 object has a tagClass, a type, a constructed flag, and a\n value. The value's type depends on the constructed flag. If\n constructed, it will contain a list of other asn1 objects. If not,\n it will contain the ASN.1 value as an array of bytes formatted\n according to the ASN.1 data type. */\n\n // remove undefined values\n if(forge.util.isArray(value)) {\n var tmp = [];\n for(var i = 0; i < value.length; ++i) {\n if(value[i] !== undefined) {\n tmp.push(value[i]);\n }\n }\n value = tmp;\n }\n\n var obj = {\n tagClass: tagClass,\n type: type,\n constructed: constructed,\n composed: constructed || forge.util.isArray(value),\n value: value\n };\n if(options && 'bitStringContents' in options) {\n // TODO: copy byte buffer if it's a buffer not a string\n obj.bitStringContents = options.bitStringContents;\n // TODO: add readonly flag to avoid this overhead\n // save copy to detect changes\n obj.original = asn1.copy(obj);\n }\n return obj;\n};\n\n/**\n * Copies an asn1 object.\n *\n * @param obj the asn1 object.\n * @param [options] copy options:\n * [excludeBitStringContents] true to not copy bitStringContents\n *\n * @return the a copy of the asn1 object.\n */\nasn1.copy = function(obj, options) {\n var copy;\n\n if(forge.util.isArray(obj)) {\n copy = [];\n for(var i = 0; i < obj.length; ++i) {\n copy.push(asn1.copy(obj[i], options));\n }\n return copy;\n }\n\n if(typeof obj === 'string') {\n // TODO: copy byte buffer if it's a buffer not a string\n return obj;\n }\n\n copy = {\n tagClass: obj.tagClass,\n type: obj.type,\n constructed: obj.constructed,\n composed: obj.composed,\n value: asn1.copy(obj.value, options)\n };\n if(options && !options.excludeBitStringContents) {\n // TODO: copy byte buffer if it's a buffer not a string\n copy.bitStringContents = obj.bitStringContents;\n }\n return copy;\n};\n\n/**\n * Compares asn1 objects for equality.\n *\n * Note this function does not run in constant time.\n *\n * @param obj1 the first asn1 object.\n * @param obj2 the second asn1 object.\n * @param [options] compare options:\n * [includeBitStringContents] true to compare bitStringContents\n *\n * @return true if the asn1 objects are equal.\n */\nasn1.equals = function(obj1, obj2, options) {\n if(forge.util.isArray(obj1)) {\n if(!forge.util.isArray(obj2)) {\n return false;\n }\n if(obj1.length !== obj2.length) {\n return false;\n }\n for(var i = 0; i < obj1.length; ++i) {\n if(!asn1.equals(obj1[i], obj2[i])) {\n return false;\n }\n }\n return true;\n }\n\n if(typeof obj1 !== typeof obj2) {\n return false;\n }\n\n if(typeof obj1 === 'string') {\n return obj1 === obj2;\n }\n\n var equal = obj1.tagClass === obj2.tagClass &&\n obj1.type === obj2.type &&\n obj1.constructed === obj2.constructed &&\n obj1.composed === obj2.composed &&\n asn1.equals(obj1.value, obj2.value);\n if(options && options.includeBitStringContents) {\n equal = equal && (obj1.bitStringContents === obj2.bitStringContents);\n }\n\n return equal;\n};\n\n/**\n * Gets the length of a BER-encoded ASN.1 value.\n *\n * In case the length is not specified, undefined is returned.\n *\n * @param b the BER-encoded ASN.1 byte buffer, starting with the first\n * length byte.\n *\n * @return the length of the BER-encoded ASN.1 value or undefined.\n */\nasn1.getBerValueLength = function(b) {\n // TODO: move this function and related DER/BER functions to a der.js\n // file; better abstract ASN.1 away from der/ber.\n var b2 = b.getByte();\n if(b2 === 0x80) {\n return undefined;\n }\n\n // see if the length is \"short form\" or \"long form\" (bit 8 set)\n var length;\n var longForm = b2 & 0x80;\n if(!longForm) {\n // length is just the first byte\n length = b2;\n } else {\n // the number of bytes the length is specified in bits 7 through 1\n // and each length byte is in big-endian base-256\n length = b.getInt((b2 & 0x7F) << 3);\n }\n return length;\n};\n\n/**\n * Check if the byte buffer has enough bytes. Throws an Error if not.\n *\n * @param bytes the byte buffer to parse from.\n * @param remaining the bytes remaining in the current parsing state.\n * @param n the number of bytes the buffer must have.\n */\nfunction _checkBufferLength(bytes, remaining, n) {\n if(n > remaining) {\n var error = new Error('Too few bytes to parse DER.');\n error.available = bytes.length();\n error.remaining = remaining;\n error.requested = n;\n throw error;\n }\n}\n\n/**\n * Gets the length of a BER-encoded ASN.1 value.\n *\n * In case the length is not specified, undefined is returned.\n *\n * @param bytes the byte buffer to parse from.\n * @param remaining the bytes remaining in the current parsing state.\n *\n * @return the length of the BER-encoded ASN.1 value or undefined.\n */\nvar _getValueLength = function(bytes, remaining) {\n // TODO: move this function and related DER/BER functions to a der.js\n // file; better abstract ASN.1 away from der/ber.\n // fromDer already checked that this byte exists\n var b2 = bytes.getByte();\n remaining--;\n if(b2 === 0x80) {\n return undefined;\n }\n\n // see if the length is \"short form\" or \"long form\" (bit 8 set)\n var length;\n var longForm = b2 & 0x80;\n if(!longForm) {\n // length is just the first byte\n length = b2;\n } else {\n // the number of bytes the length is specified in bits 7 through 1\n // and each length byte is in big-endian base-256\n var longFormBytes = b2 & 0x7F;\n _checkBufferLength(bytes, remaining, longFormBytes);\n length = bytes.getInt(longFormBytes << 3);\n }\n // FIXME: this will only happen for 32 bit getInt with high bit set\n if(length < 0) {\n throw new Error('Negative length: ' + length);\n }\n return length;\n};\n\n/**\n * Parses an asn1 object from a byte buffer in DER format.\n *\n * @param bytes the byte buffer to parse from.\n * @param [strict] true to be strict when checking value lengths, false to\n * allow truncated values (default: true).\n * @param [options] object with options or boolean strict flag\n * [strict] true to be strict when checking value lengths, false to\n * allow truncated values (default: true).\n * [parseAllBytes] true to ensure all bytes are parsed\n * (default: true)\n * [decodeBitStrings] true to attempt to decode the content of\n * BIT STRINGs (not OCTET STRINGs) using strict mode. Note that\n * without schema support to understand the data context this can\n * erroneously decode values that happen to be valid ASN.1. This\n * flag will be deprecated or removed as soon as schema support is\n * available. (default: true)\n *\n * @throws Will throw an error for various malformed input conditions.\n *\n * @return the parsed asn1 object.\n */\nasn1.fromDer = function(bytes, options) {\n if(options === undefined) {\n options = {\n strict: true,\n parseAllBytes: true,\n decodeBitStrings: true\n };\n }\n if(typeof options === 'boolean') {\n options = {\n strict: options,\n parseAllBytes: true,\n decodeBitStrings: true\n };\n }\n if(!('strict' in options)) {\n options.strict = true;\n }\n if(!('parseAllBytes' in options)) {\n options.parseAllBytes = true;\n }\n if(!('decodeBitStrings' in options)) {\n options.decodeBitStrings = true;\n }\n\n // wrap in buffer if needed\n if(typeof bytes === 'string') {\n bytes = forge.util.createBuffer(bytes);\n }\n\n var byteCount = bytes.length();\n var value = _fromDer(bytes, bytes.length(), 0, options);\n if(options.parseAllBytes && bytes.length() !== 0) {\n var error = new Error('Unparsed DER bytes remain after ASN.1 parsing.');\n error.byteCount = byteCount;\n error.remaining = bytes.length();\n throw error;\n }\n return value;\n};\n\n/**\n * Internal function to parse an asn1 object from a byte buffer in DER format.\n *\n * @param bytes the byte buffer to parse from.\n * @param remaining the number of bytes remaining for this chunk.\n * @param depth the current parsing depth.\n * @param options object with same options as fromDer().\n *\n * @return the parsed asn1 object.\n */\nfunction _fromDer(bytes, remaining, depth, options) {\n // temporary storage for consumption calculations\n var start;\n\n // minimum length for ASN.1 DER structure is 2\n _checkBufferLength(bytes, remaining, 2);\n\n // get the first byte\n var b1 = bytes.getByte();\n // consumed one byte\n remaining--;\n\n // get the tag class\n var tagClass = (b1 & 0xC0);\n\n // get the type (bits 1-5)\n var type = b1 & 0x1F;\n\n // get the variable value length and adjust remaining bytes\n start = bytes.length();\n var length = _getValueLength(bytes, remaining);\n remaining -= start - bytes.length();\n\n // ensure there are enough bytes to get the value\n if(length !== undefined && length > remaining) {\n if(options.strict) {\n var error = new Error('Too few bytes to read ASN.1 value.');\n error.available = bytes.length();\n error.remaining = remaining;\n error.requested = length;\n throw error;\n }\n // Note: be lenient with truncated values and use remaining state bytes\n length = remaining;\n }\n\n // value storage\n var value;\n // possible BIT STRING contents storage\n var bitStringContents;\n\n // constructed flag is bit 6 (32 = 0x20) of the first byte\n var constructed = ((b1 & 0x20) === 0x20);\n if(constructed) {\n // parse child asn1 objects from the value\n value = [];\n if(length === undefined) {\n // asn1 object of indefinite length, read until end tag\n for(;;) {\n _checkBufferLength(bytes, remaining, 2);\n if(bytes.bytes(2) === String.fromCharCode(0, 0)) {\n bytes.getBytes(2);\n remaining -= 2;\n break;\n }\n start = bytes.length();\n value.push(_fromDer(bytes, remaining, depth + 1, options));\n remaining -= start - bytes.length();\n }\n } else {\n // parsing asn1 object of definite length\n while(length > 0) {\n start = bytes.length();\n value.push(_fromDer(bytes, length, depth + 1, options));\n remaining -= start - bytes.length();\n length -= start - bytes.length();\n }\n }\n }\n\n // if a BIT STRING, save the contents including padding\n if(value === undefined && tagClass === asn1.Class.UNIVERSAL &&\n type === asn1.Type.BITSTRING) {\n bitStringContents = bytes.bytes(length);\n }\n\n // determine if a non-constructed value should be decoded as a composed\n // value that contains other ASN.1 objects. BIT STRINGs (and OCTET STRINGs)\n // can be used this way.\n if(value === undefined && options.decodeBitStrings &&\n tagClass === asn1.Class.UNIVERSAL &&\n // FIXME: OCTET STRINGs not yet supported here\n // .. other parts of forge expect to decode OCTET STRINGs manually\n (type === asn1.Type.BITSTRING /*|| type === asn1.Type.OCTETSTRING*/) &&\n length > 1) {\n // save read position\n var savedRead = bytes.read;\n var savedRemaining = remaining;\n var unused = 0;\n if(type === asn1.Type.BITSTRING) {\n /* The first octet gives the number of bits by which the length of the\n bit string is less than the next multiple of eight (this is called\n the \"number of unused bits\").\n\n The second and following octets give the value of the bit string\n converted to an octet string. */\n _checkBufferLength(bytes, remaining, 1);\n unused = bytes.getByte();\n remaining--;\n }\n // if all bits are used, maybe the BIT/OCTET STRING holds ASN.1 objs\n if(unused === 0) {\n try {\n // attempt to parse child asn1 object from the value\n // (stored in array to signal composed value)\n start = bytes.length();\n var subOptions = {\n // enforce strict mode to avoid parsing ASN.1 from plain data\n strict: true,\n decodeBitStrings: true\n };\n var composed = _fromDer(bytes, remaining, depth + 1, subOptions);\n var used = start - bytes.length();\n remaining -= used;\n if(type == asn1.Type.BITSTRING) {\n used++;\n }\n\n // if the data all decoded and the class indicates UNIVERSAL or\n // CONTEXT_SPECIFIC then assume we've got an encapsulated ASN.1 object\n var tc = composed.tagClass;\n if(used === length &&\n (tc === asn1.Class.UNIVERSAL || tc === asn1.Class.CONTEXT_SPECIFIC)) {\n value = [composed];\n }\n } catch(ex) {\n }\n }\n if(value === undefined) {\n // restore read position\n bytes.read = savedRead;\n remaining = savedRemaining;\n }\n }\n\n if(value === undefined) {\n // asn1 not constructed or composed, get raw value\n // TODO: do DER to OID conversion and vice-versa in .toDer?\n\n if(length === undefined) {\n if(options.strict) {\n throw new Error('Non-constructed ASN.1 object of indefinite length.');\n }\n // be lenient and use remaining state bytes\n length = remaining;\n }\n\n if(type === asn1.Type.BMPSTRING) {\n value = '';\n for(; length > 0; length -= 2) {\n _checkBufferLength(bytes, remaining, 2);\n value += String.fromCharCode(bytes.getInt16());\n remaining -= 2;\n }\n } else {\n value = bytes.getBytes(length);\n remaining -= length;\n }\n }\n\n // add BIT STRING contents if available\n var asn1Options = bitStringContents === undefined ? null : {\n bitStringContents: bitStringContents\n };\n\n // create and return asn1 object\n return asn1.create(tagClass, type, constructed, value, asn1Options);\n}\n\n/**\n * Converts the given asn1 object to a buffer of bytes in DER format.\n *\n * @param asn1 the asn1 object to convert to bytes.\n *\n * @return the buffer of bytes.\n */\nasn1.toDer = function(obj) {\n var bytes = forge.util.createBuffer();\n\n // build the first byte\n var b1 = obj.tagClass | obj.type;\n\n // for storing the ASN.1 value\n var value = forge.util.createBuffer();\n\n // use BIT STRING contents if available and data not changed\n var useBitStringContents = false;\n if('bitStringContents' in obj) {\n useBitStringContents = true;\n if(obj.original) {\n useBitStringContents = asn1.equals(obj, obj.original);\n }\n }\n\n if(useBitStringContents) {\n value.putBytes(obj.bitStringContents);\n } else if(obj.composed) {\n // if composed, use each child asn1 object's DER bytes as value\n // turn on 6th bit (0x20 = 32) to indicate asn1 is constructed\n // from other asn1 objects\n if(obj.constructed) {\n b1 |= 0x20;\n } else {\n // type is a bit string, add unused bits of 0x00\n value.putByte(0x00);\n }\n\n // add all of the child DER bytes together\n for(var i = 0; i < obj.value.length; ++i) {\n if(obj.value[i] !== undefined) {\n value.putBuffer(asn1.toDer(obj.value[i]));\n }\n }\n } else {\n // use asn1.value directly\n if(obj.type === asn1.Type.BMPSTRING) {\n for(var i = 0; i < obj.value.length; ++i) {\n value.putInt16(obj.value.charCodeAt(i));\n }\n } else {\n // ensure integer is minimally-encoded\n // TODO: should all leading bytes be stripped vs just one?\n // .. ex '00 00 01' => '01'?\n if(obj.type === asn1.Type.INTEGER &&\n obj.value.length > 1 &&\n // leading 0x00 for positive integer\n ((obj.value.charCodeAt(0) === 0 &&\n (obj.value.charCodeAt(1) & 0x80) === 0) ||\n // leading 0xFF for negative integer\n (obj.value.charCodeAt(0) === 0xFF &&\n (obj.value.charCodeAt(1) & 0x80) === 0x80))) {\n value.putBytes(obj.value.substr(1));\n } else {\n value.putBytes(obj.value);\n }\n }\n }\n\n // add tag byte\n bytes.putByte(b1);\n\n // use \"short form\" encoding\n if(value.length() <= 127) {\n // one byte describes the length\n // bit 8 = 0 and bits 7-1 = length\n bytes.putByte(value.length() & 0x7F);\n } else {\n // use \"long form\" encoding\n // 2 to 127 bytes describe the length\n // first byte: bit 8 = 1 and bits 7-1 = # of additional bytes\n // other bytes: length in base 256, big-endian\n var len = value.length();\n var lenBytes = '';\n do {\n lenBytes += String.fromCharCode(len & 0xFF);\n len = len >>> 8;\n } while(len > 0);\n\n // set first byte to # bytes used to store the length and turn on\n // bit 8 to indicate long-form length is used\n bytes.putByte(lenBytes.length | 0x80);\n\n // concatenate length bytes in reverse since they were generated\n // little endian and we need big endian\n for(var i = lenBytes.length - 1; i >= 0; --i) {\n bytes.putByte(lenBytes.charCodeAt(i));\n }\n }\n\n // concatenate value bytes\n bytes.putBuffer(value);\n return bytes;\n};\n\n/**\n * Converts an OID dot-separated string to a byte buffer. The byte buffer\n * contains only the DER-encoded value, not any tag or length bytes.\n *\n * @param oid the OID dot-separated string.\n *\n * @return the byte buffer.\n */\nasn1.oidToDer = function(oid) {\n // split OID into individual values\n var values = oid.split('.');\n var bytes = forge.util.createBuffer();\n\n // first byte is 40 * value1 + value2\n bytes.putByte(40 * parseInt(values[0], 10) + parseInt(values[1], 10));\n // other bytes are each value in base 128 with 8th bit set except for\n // the last byte for each value\n var last, valueBytes, value, b;\n for(var i = 2; i < values.length; ++i) {\n // produce value bytes in reverse because we don't know how many\n // bytes it will take to store the value\n last = true;\n valueBytes = [];\n value = parseInt(values[i], 10);\n do {\n b = value & 0x7F;\n value = value >>> 7;\n // if value is not last, then turn on 8th bit\n if(!last) {\n b |= 0x80;\n }\n valueBytes.push(b);\n last = false;\n } while(value > 0);\n\n // add value bytes in reverse (needs to be in big endian)\n for(var n = valueBytes.length - 1; n >= 0; --n) {\n bytes.putByte(valueBytes[n]);\n }\n }\n\n return bytes;\n};\n\n/**\n * Converts a DER-encoded byte buffer to an OID dot-separated string. The\n * byte buffer should contain only the DER-encoded value, not any tag or\n * length bytes.\n *\n * @param bytes the byte buffer.\n *\n * @return the OID dot-separated string.\n */\nasn1.derToOid = function(bytes) {\n var oid;\n\n // wrap in buffer if needed\n if(typeof bytes === 'string') {\n bytes = forge.util.createBuffer(bytes);\n }\n\n // first byte is 40 * value1 + value2\n var b = bytes.getByte();\n oid = Math.floor(b / 40) + '.' + (b % 40);\n\n // other bytes are each value in base 128 with 8th bit set except for\n // the last byte for each value\n var value = 0;\n while(bytes.length() > 0) {\n b = bytes.getByte();\n value = value << 7;\n // not the last byte for the value\n if(b & 0x80) {\n value += b & 0x7F;\n } else {\n // last byte\n oid += '.' + (value + b);\n value = 0;\n }\n }\n\n return oid;\n};\n\n/**\n * Converts a UTCTime value to a date.\n *\n * Note: GeneralizedTime has 4 digits for the year and is used for X.509\n * dates past 2049. Parsing that structure hasn't been implemented yet.\n *\n * @param utc the UTCTime value to convert.\n *\n * @return the date.\n */\nasn1.utcTimeToDate = function(utc) {\n /* The following formats can be used:\n\n YYMMDDhhmmZ\n YYMMDDhhmm+hh'mm'\n YYMMDDhhmm-hh'mm'\n YYMMDDhhmmssZ\n YYMMDDhhmmss+hh'mm'\n YYMMDDhhmmss-hh'mm'\n\n Where:\n\n YY is the least significant two digits of the year\n MM is the month (01 to 12)\n DD is the day (01 to 31)\n hh is the hour (00 to 23)\n mm are the minutes (00 to 59)\n ss are the seconds (00 to 59)\n Z indicates that local time is GMT, + indicates that local time is\n later than GMT, and - indicates that local time is earlier than GMT\n hh' is the absolute value of the offset from GMT in hours\n mm' is the absolute value of the offset from GMT in minutes */\n var date = new Date();\n\n // if YY >= 50 use 19xx, if YY < 50 use 20xx\n var year = parseInt(utc.substr(0, 2), 10);\n year = (year >= 50) ? 1900 + year : 2000 + year;\n var MM = parseInt(utc.substr(2, 2), 10) - 1; // use 0-11 for month\n var DD = parseInt(utc.substr(4, 2), 10);\n var hh = parseInt(utc.substr(6, 2), 10);\n var mm = parseInt(utc.substr(8, 2), 10);\n var ss = 0;\n\n // not just YYMMDDhhmmZ\n if(utc.length > 11) {\n // get character after minutes\n var c = utc.charAt(10);\n var end = 10;\n\n // see if seconds are present\n if(c !== '+' && c !== '-') {\n // get seconds\n ss = parseInt(utc.substr(10, 2), 10);\n end += 2;\n }\n }\n\n // update date\n date.setUTCFullYear(year, MM, DD);\n date.setUTCHours(hh, mm, ss, 0);\n\n if(end) {\n // get +/- after end of time\n c = utc.charAt(end);\n if(c === '+' || c === '-') {\n // get hours+minutes offset\n var hhoffset = parseInt(utc.substr(end + 1, 2), 10);\n var mmoffset = parseInt(utc.substr(end + 4, 2), 10);\n\n // calculate offset in milliseconds\n var offset = hhoffset * 60 + mmoffset;\n offset *= 60000;\n\n // apply offset\n if(c === '+') {\n date.setTime(+date - offset);\n } else {\n date.setTime(+date + offset);\n }\n }\n }\n\n return date;\n};\n\n/**\n * Converts a GeneralizedTime value to a date.\n *\n * @param gentime the GeneralizedTime value to convert.\n *\n * @return the date.\n */\nasn1.generalizedTimeToDate = function(gentime) {\n /* The following formats can be used:\n\n YYYYMMDDHHMMSS\n YYYYMMDDHHMMSS.fff\n YYYYMMDDHHMMSSZ\n YYYYMMDDHHMMSS.fffZ\n YYYYMMDDHHMMSS+hh'mm'\n YYYYMMDDHHMMSS.fff+hh'mm'\n YYYYMMDDHHMMSS-hh'mm'\n YYYYMMDDHHMMSS.fff-hh'mm'\n\n Where:\n\n YYYY is the year\n MM is the month (01 to 12)\n DD is the day (01 to 31)\n hh is the hour (00 to 23)\n mm are the minutes (00 to 59)\n ss are the seconds (00 to 59)\n .fff is the second fraction, accurate to three decimal places\n Z indicates that local time is GMT, + indicates that local time is\n later than GMT, and - indicates that local time is earlier than GMT\n hh' is the absolute value of the offset from GMT in hours\n mm' is the absolute value of the offset from GMT in minutes */\n var date = new Date();\n\n var YYYY = parseInt(gentime.substr(0, 4), 10);\n var MM = parseInt(gentime.substr(4, 2), 10) - 1; // use 0-11 for month\n var DD = parseInt(gentime.substr(6, 2), 10);\n var hh = parseInt(gentime.substr(8, 2), 10);\n var mm = parseInt(gentime.substr(10, 2), 10);\n var ss = parseInt(gentime.substr(12, 2), 10);\n var fff = 0;\n var offset = 0;\n var isUTC = false;\n\n if(gentime.charAt(gentime.length - 1) === 'Z') {\n isUTC = true;\n }\n\n var end = gentime.length - 5, c = gentime.charAt(end);\n if(c === '+' || c === '-') {\n // get hours+minutes offset\n var hhoffset = parseInt(gentime.substr(end + 1, 2), 10);\n var mmoffset = parseInt(gentime.substr(end + 4, 2), 10);\n\n // calculate offset in milliseconds\n offset = hhoffset * 60 + mmoffset;\n offset *= 60000;\n\n // apply offset\n if(c === '+') {\n offset *= -1;\n }\n\n isUTC = true;\n }\n\n // check for second fraction\n if(gentime.charAt(14) === '.') {\n fff = parseFloat(gentime.substr(14), 10) * 1000;\n }\n\n if(isUTC) {\n date.setUTCFullYear(YYYY, MM, DD);\n date.setUTCHours(hh, mm, ss, fff);\n\n // apply offset\n date.setTime(+date + offset);\n } else {\n date.setFullYear(YYYY, MM, DD);\n date.setHours(hh, mm, ss, fff);\n }\n\n return date;\n};\n\n/**\n * Converts a date to a UTCTime value.\n *\n * Note: GeneralizedTime has 4 digits for the year and is used for X.509\n * dates past 2049. Converting to a GeneralizedTime hasn't been\n * implemented yet.\n *\n * @param date the date to convert.\n *\n * @return the UTCTime value.\n */\nasn1.dateToUtcTime = function(date) {\n // TODO: validate; currently assumes proper format\n if(typeof date === 'string') {\n return date;\n }\n\n var rval = '';\n\n // create format YYMMDDhhmmssZ\n var format = [];\n format.push(('' + date.getUTCFullYear()).substr(2));\n format.push('' + (date.getUTCMonth() + 1));\n format.push('' + date.getUTCDate());\n format.push('' + date.getUTCHours());\n format.push('' + date.getUTCMinutes());\n format.push('' + date.getUTCSeconds());\n\n // ensure 2 digits are used for each format entry\n for(var i = 0; i < format.length; ++i) {\n if(format[i].length < 2) {\n rval += '0';\n }\n rval += format[i];\n }\n rval += 'Z';\n\n return rval;\n};\n\n/**\n * Converts a date to a GeneralizedTime value.\n *\n * @param date the date to convert.\n *\n * @return the GeneralizedTime value as a string.\n */\nasn1.dateToGeneralizedTime = function(date) {\n // TODO: validate; currently assumes proper format\n if(typeof date === 'string') {\n return date;\n }\n\n var rval = '';\n\n // create format YYYYMMDDHHMMSSZ\n var format = [];\n format.push('' + date.getUTCFullYear());\n format.push('' + (date.getUTCMonth() + 1));\n format.push('' + date.getUTCDate());\n format.push('' + date.getUTCHours());\n format.push('' + date.getUTCMinutes());\n format.push('' + date.getUTCSeconds());\n\n // ensure 2 digits are used for each format entry\n for(var i = 0; i < format.length; ++i) {\n if(format[i].length < 2) {\n rval += '0';\n }\n rval += format[i];\n }\n rval += 'Z';\n\n return rval;\n};\n\n/**\n * Converts a javascript integer to a DER-encoded byte buffer to be used\n * as the value for an INTEGER type.\n *\n * @param x the integer.\n *\n * @return the byte buffer.\n */\nasn1.integerToDer = function(x) {\n var rval = forge.util.createBuffer();\n if(x >= -0x80 && x < 0x80) {\n return rval.putSignedInt(x, 8);\n }\n if(x >= -0x8000 && x < 0x8000) {\n return rval.putSignedInt(x, 16);\n }\n if(x >= -0x800000 && x < 0x800000) {\n return rval.putSignedInt(x, 24);\n }\n if(x >= -0x80000000 && x < 0x80000000) {\n return rval.putSignedInt(x, 32);\n }\n var error = new Error('Integer too large; max is 32-bits.');\n error.integer = x;\n throw error;\n};\n\n/**\n * Converts a DER-encoded byte buffer to a javascript integer. This is\n * typically used to decode the value of an INTEGER type.\n *\n * @param bytes the byte buffer.\n *\n * @return the integer.\n */\nasn1.derToInteger = function(bytes) {\n // wrap in buffer if needed\n if(typeof bytes === 'string') {\n bytes = forge.util.createBuffer(bytes);\n }\n\n var n = bytes.length() * 8;\n if(n > 32) {\n throw new Error('Integer too large; max is 32-bits.');\n }\n return bytes.getSignedInt(n);\n};\n\n/**\n * Validates that the given ASN.1 object is at least a super set of the\n * given ASN.1 structure. Only tag classes and types are checked. An\n * optional map may also be provided to capture ASN.1 values while the\n * structure is checked.\n *\n * To capture an ASN.1 value, set an object in the validator's 'capture'\n * parameter to the key to use in the capture map. To capture the full\n * ASN.1 object, specify 'captureAsn1'. To capture BIT STRING bytes, including\n * the leading unused bits counter byte, specify 'captureBitStringContents'.\n * To capture BIT STRING bytes, without the leading unused bits counter byte,\n * specify 'captureBitStringValue'.\n *\n * Objects in the validator may set a field 'optional' to true to indicate\n * that it isn't necessary to pass validation.\n *\n * @param obj the ASN.1 object to validate.\n * @param v the ASN.1 structure validator.\n * @param capture an optional map to capture values in.\n * @param errors an optional array for storing validation errors.\n *\n * @return true on success, false on failure.\n */\nasn1.validate = function(obj, v, capture, errors) {\n var rval = false;\n\n // ensure tag class and type are the same if specified\n if((obj.tagClass === v.tagClass || typeof(v.tagClass) === 'undefined') &&\n (obj.type === v.type || typeof(v.type) === 'undefined')) {\n // ensure constructed flag is the same if specified\n if(obj.constructed === v.constructed ||\n typeof(v.constructed) === 'undefined') {\n rval = true;\n\n // handle sub values\n if(v.value && forge.util.isArray(v.value)) {\n var j = 0;\n for(var i = 0; rval && i < v.value.length; ++i) {\n rval = v.value[i].optional || false;\n if(obj.value[j]) {\n rval = asn1.validate(obj.value[j], v.value[i], capture, errors);\n if(rval) {\n ++j;\n } else if(v.value[i].optional) {\n rval = true;\n }\n }\n if(!rval && errors) {\n errors.push(\n '[' + v.name + '] ' +\n 'Tag class \"' + v.tagClass + '\", type \"' +\n v.type + '\" expected value length \"' +\n v.value.length + '\", got \"' +\n obj.value.length + '\"');\n }\n }\n }\n\n if(rval && capture) {\n if(v.capture) {\n capture[v.capture] = obj.value;\n }\n if(v.captureAsn1) {\n capture[v.captureAsn1] = obj;\n }\n if(v.captureBitStringContents && 'bitStringContents' in obj) {\n capture[v.captureBitStringContents] = obj.bitStringContents;\n }\n if(v.captureBitStringValue && 'bitStringContents' in obj) {\n var value;\n if(obj.bitStringContents.length < 2) {\n capture[v.captureBitStringValue] = '';\n } else {\n // FIXME: support unused bits with data shifting\n var unused = obj.bitStringContents.charCodeAt(0);\n if(unused !== 0) {\n throw new Error(\n 'captureBitStringValue only supported for zero unused bits');\n }\n capture[v.captureBitStringValue] = obj.bitStringContents.slice(1);\n }\n }\n }\n } else if(errors) {\n errors.push(\n '[' + v.name + '] ' +\n 'Expected constructed \"' + v.constructed + '\", got \"' +\n obj.constructed + '\"');\n }\n } else if(errors) {\n if(obj.tagClass !== v.tagClass) {\n errors.push(\n '[' + v.name + '] ' +\n 'Expected tag class \"' + v.tagClass + '\", got \"' +\n obj.tagClass + '\"');\n }\n if(obj.type !== v.type) {\n errors.push(\n '[' + v.name + '] ' +\n 'Expected type \"' + v.type + '\", got \"' + obj.type + '\"');\n }\n }\n return rval;\n};\n\n// regex for testing for non-latin characters\nvar _nonLatinRegex = /[^\\\\u0000-\\\\u00ff]/;\n\n/**\n * Pretty prints an ASN.1 object to a string.\n *\n * @param obj the object to write out.\n * @param level the level in the tree.\n * @param indentation the indentation to use.\n *\n * @return the string.\n */\nasn1.prettyPrint = function(obj, level, indentation) {\n var rval = '';\n\n // set default level and indentation\n level = level || 0;\n indentation = indentation || 2;\n\n // start new line for deep levels\n if(level > 0) {\n rval += '\\n';\n }\n\n // create indent\n var indent = '';\n for(var i = 0; i < level * indentation; ++i) {\n indent += ' ';\n }\n\n // print class:type\n rval += indent + 'Tag: ';\n switch(obj.tagClass) {\n case asn1.Class.UNIVERSAL:\n rval += 'Universal:';\n break;\n case asn1.Class.APPLICATION:\n rval += 'Application:';\n break;\n case asn1.Class.CONTEXT_SPECIFIC:\n rval += 'Context-Specific:';\n break;\n case asn1.Class.PRIVATE:\n rval += 'Private:';\n break;\n }\n\n if(obj.tagClass === asn1.Class.UNIVERSAL) {\n rval += obj.type;\n\n // known types\n switch(obj.type) {\n case asn1.Type.NONE:\n rval += ' (None)';\n break;\n case asn1.Type.BOOLEAN:\n rval += ' (Boolean)';\n break;\n case asn1.Type.INTEGER:\n rval += ' (Integer)';\n break;\n case asn1.Type.BITSTRING:\n rval += ' (Bit string)';\n break;\n case asn1.Type.OCTETSTRING:\n rval += ' (Octet string)';\n break;\n case asn1.Type.NULL:\n rval += ' (Null)';\n break;\n case asn1.Type.OID:\n rval += ' (Object Identifier)';\n break;\n case asn1.Type.ODESC:\n rval += ' (Object Descriptor)';\n break;\n case asn1.Type.EXTERNAL:\n rval += ' (External or Instance of)';\n break;\n case asn1.Type.REAL:\n rval += ' (Real)';\n break;\n case asn1.Type.ENUMERATED:\n rval += ' (Enumerated)';\n break;\n case asn1.Type.EMBEDDED:\n rval += ' (Embedded PDV)';\n break;\n case asn1.Type.UTF8:\n rval += ' (UTF8)';\n break;\n case asn1.Type.ROID:\n rval += ' (Relative Object Identifier)';\n break;\n case asn1.Type.SEQUENCE:\n rval += ' (Sequence)';\n break;\n case asn1.Type.SET:\n rval += ' (Set)';\n break;\n case asn1.Type.PRINTABLESTRING:\n rval += ' (Printable String)';\n break;\n case asn1.Type.IA5String:\n rval += ' (IA5String (ASCII))';\n break;\n case asn1.Type.UTCTIME:\n rval += ' (UTC time)';\n break;\n case asn1.Type.GENERALIZEDTIME:\n rval += ' (Generalized time)';\n break;\n case asn1.Type.BMPSTRING:\n rval += ' (BMP String)';\n break;\n }\n } else {\n rval += obj.type;\n }\n\n rval += '\\n';\n rval += indent + 'Constructed: ' + obj.constructed + '\\n';\n\n if(obj.composed) {\n var subvalues = 0;\n var sub = '';\n for(var i = 0; i < obj.value.length; ++i) {\n if(obj.value[i] !== undefined) {\n subvalues += 1;\n sub += asn1.prettyPrint(obj.value[i], level + 1, indentation);\n if((i + 1) < obj.value.length) {\n sub += ',';\n }\n }\n }\n rval += indent + 'Sub values: ' + subvalues + sub;\n } else {\n rval += indent + 'Value: ';\n if(obj.type === asn1.Type.OID) {\n var oid = asn1.derToOid(obj.value);\n rval += oid;\n if(forge.pki && forge.pki.oids) {\n if(oid in forge.pki.oids) {\n rval += ' (' + forge.pki.oids[oid] + ') ';\n }\n }\n }\n if(obj.type === asn1.Type.INTEGER) {\n try {\n rval += asn1.derToInteger(obj.value);\n } catch(ex) {\n rval += '0x' + forge.util.bytesToHex(obj.value);\n }\n } else if(obj.type === asn1.Type.BITSTRING) {\n // TODO: shift bits as needed to display without padding\n if(obj.value.length > 1) {\n // remove unused bits field\n rval += '0x' + forge.util.bytesToHex(obj.value.slice(1));\n } else {\n rval += '(none)';\n }\n // show unused bit count\n if(obj.value.length > 0) {\n var unused = obj.value.charCodeAt(0);\n if(unused == 1) {\n rval += ' (1 unused bit shown)';\n } else if(unused > 1) {\n rval += ' (' + unused + ' unused bits shown)';\n }\n }\n } else if(obj.type === asn1.Type.OCTETSTRING) {\n if(!_nonLatinRegex.test(obj.value)) {\n rval += '(' + obj.value + ') ';\n }\n rval += '0x' + forge.util.bytesToHex(obj.value);\n } else if(obj.type === asn1.Type.UTF8) {\n try {\n rval += forge.util.decodeUtf8(obj.value);\n } catch(e) {\n if(e.message === 'URI malformed') {\n rval +=\n '0x' + forge.util.bytesToHex(obj.value) + ' (malformed UTF8)';\n } else {\n throw e;\n }\n }\n } else if(obj.type === asn1.Type.PRINTABLESTRING ||\n obj.type === asn1.Type.IA5String) {\n rval += obj.value;\n } else if(_nonLatinRegex.test(obj.value)) {\n rval += '0x' + forge.util.bytesToHex(obj.value);\n } else if(obj.value.length === 0) {\n rval += '[null]';\n } else {\n rval += obj.value;\n }\n }\n\n return rval;\n};\n","/**\n * Base-N/Base-X encoding/decoding functions.\n *\n * Original implementation from base-x:\n * https://github.com/cryptocoinjs/base-x\n *\n * Which is MIT licensed:\n *\n * The MIT License (MIT)\n *\n * Copyright base-x contributors (c) 2016\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n * DEALINGS IN THE SOFTWARE.\n */\nvar api = {};\nmodule.exports = api;\n\n// baseN alphabet indexes\nvar _reverseAlphabets = {};\n\n/**\n * BaseN-encodes a Uint8Array using the given alphabet.\n *\n * @param input the Uint8Array to encode.\n * @param maxline the maximum number of encoded characters per line to use,\n * defaults to none.\n *\n * @return the baseN-encoded output string.\n */\napi.encode = function(input, alphabet, maxline) {\n if(typeof alphabet !== 'string') {\n throw new TypeError('\"alphabet\" must be a string.');\n }\n if(maxline !== undefined && typeof maxline !== 'number') {\n throw new TypeError('\"maxline\" must be a number.');\n }\n\n var output = '';\n\n if(!(input instanceof Uint8Array)) {\n // assume forge byte buffer\n output = _encodeWithByteBuffer(input, alphabet);\n } else {\n var i = 0;\n var base = alphabet.length;\n var first = alphabet.charAt(0);\n var digits = [0];\n for(i = 0; i < input.length; ++i) {\n for(var j = 0, carry = input[i]; j < digits.length; ++j) {\n carry += digits[j] << 8;\n digits[j] = carry % base;\n carry = (carry / base) | 0;\n }\n\n while(carry > 0) {\n digits.push(carry % base);\n carry = (carry / base) | 0;\n }\n }\n\n // deal with leading zeros\n for(i = 0; input[i] === 0 && i < input.length - 1; ++i) {\n output += first;\n }\n // convert digits to a string\n for(i = digits.length - 1; i >= 0; --i) {\n output += alphabet[digits[i]];\n }\n }\n\n if(maxline) {\n var regex = new RegExp('.{1,' + maxline + '}', 'g');\n output = output.match(regex).join('\\r\\n');\n }\n\n return output;\n};\n\n/**\n * Decodes a baseN-encoded (using the given alphabet) string to a\n * Uint8Array.\n *\n * @param input the baseN-encoded input string.\n *\n * @return the Uint8Array.\n */\napi.decode = function(input, alphabet) {\n if(typeof input !== 'string') {\n throw new TypeError('\"input\" must be a string.');\n }\n if(typeof alphabet !== 'string') {\n throw new TypeError('\"alphabet\" must be a string.');\n }\n\n var table = _reverseAlphabets[alphabet];\n if(!table) {\n // compute reverse alphabet\n table = _reverseAlphabets[alphabet] = [];\n for(var i = 0; i < alphabet.length; ++i) {\n table[alphabet.charCodeAt(i)] = i;\n }\n }\n\n // remove whitespace characters\n input = input.replace(/\\s/g, '');\n\n var base = alphabet.length;\n var first = alphabet.charAt(0);\n var bytes = [0];\n for(var i = 0; i < input.length; i++) {\n var value = table[input.charCodeAt(i)];\n if(value === undefined) {\n return;\n }\n\n for(var j = 0, carry = value; j < bytes.length; ++j) {\n carry += bytes[j] * base;\n bytes[j] = carry & 0xff;\n carry >>= 8;\n }\n\n while(carry > 0) {\n bytes.push(carry & 0xff);\n carry >>= 8;\n }\n }\n\n // deal with leading zeros\n for(var k = 0; input[k] === first && k < input.length - 1; ++k) {\n bytes.push(0);\n }\n\n if(typeof Buffer !== 'undefined') {\n return Buffer.from(bytes.reverse());\n }\n\n return new Uint8Array(bytes.reverse());\n};\n\nfunction _encodeWithByteBuffer(input, alphabet) {\n var i = 0;\n var base = alphabet.length;\n var first = alphabet.charAt(0);\n var digits = [0];\n for(i = 0; i < input.length(); ++i) {\n for(var j = 0, carry = input.at(i); j < digits.length; ++j) {\n carry += digits[j] << 8;\n digits[j] = carry % base;\n carry = (carry / base) | 0;\n }\n\n while(carry > 0) {\n digits.push(carry % base);\n carry = (carry / base) | 0;\n }\n }\n\n var output = '';\n\n // deal with leading zeros\n for(i = 0; input.at(i) === 0 && i < input.length() - 1; ++i) {\n output += first;\n }\n // convert digits to a string\n for(i = digits.length - 1; i >= 0; --i) {\n output += alphabet[digits[i]];\n }\n\n return output;\n}\n","/**\n * Cipher base API.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\nmodule.exports = forge.cipher = forge.cipher || {};\n\n// registered algorithms\nforge.cipher.algorithms = forge.cipher.algorithms || {};\n\n/**\n * Creates a cipher object that can be used to encrypt data using the given\n * algorithm and key. The algorithm may be provided as a string value for a\n * previously registered algorithm or it may be given as a cipher algorithm\n * API object.\n *\n * @param algorithm the algorithm to use, either a string or an algorithm API\n * object.\n * @param key the key to use, as a binary-encoded string of bytes or a\n * byte buffer.\n *\n * @return the cipher.\n */\nforge.cipher.createCipher = function(algorithm, key) {\n var api = algorithm;\n if(typeof api === 'string') {\n api = forge.cipher.getAlgorithm(api);\n if(api) {\n api = api();\n }\n }\n if(!api) {\n throw new Error('Unsupported algorithm: ' + algorithm);\n }\n\n // assume block cipher\n return new forge.cipher.BlockCipher({\n algorithm: api,\n key: key,\n decrypt: false\n });\n};\n\n/**\n * Creates a decipher object that can be used to decrypt data using the given\n * algorithm and key. The algorithm may be provided as a string value for a\n * previously registered algorithm or it may be given as a cipher algorithm\n * API object.\n *\n * @param algorithm the algorithm to use, either a string or an algorithm API\n * object.\n * @param key the key to use, as a binary-encoded string of bytes or a\n * byte buffer.\n *\n * @return the cipher.\n */\nforge.cipher.createDecipher = function(algorithm, key) {\n var api = algorithm;\n if(typeof api === 'string') {\n api = forge.cipher.getAlgorithm(api);\n if(api) {\n api = api();\n }\n }\n if(!api) {\n throw new Error('Unsupported algorithm: ' + algorithm);\n }\n\n // assume block cipher\n return new forge.cipher.BlockCipher({\n algorithm: api,\n key: key,\n decrypt: true\n });\n};\n\n/**\n * Registers an algorithm by name. If the name was already registered, the\n * algorithm API object will be overwritten.\n *\n * @param name the name of the algorithm.\n * @param algorithm the algorithm API object.\n */\nforge.cipher.registerAlgorithm = function(name, algorithm) {\n name = name.toUpperCase();\n forge.cipher.algorithms[name] = algorithm;\n};\n\n/**\n * Gets a registered algorithm by name.\n *\n * @param name the name of the algorithm.\n *\n * @return the algorithm, if found, null if not.\n */\nforge.cipher.getAlgorithm = function(name) {\n name = name.toUpperCase();\n if(name in forge.cipher.algorithms) {\n return forge.cipher.algorithms[name];\n }\n return null;\n};\n\nvar BlockCipher = forge.cipher.BlockCipher = function(options) {\n this.algorithm = options.algorithm;\n this.mode = this.algorithm.mode;\n this.blockSize = this.mode.blockSize;\n this._finish = false;\n this._input = null;\n this.output = null;\n this._op = options.decrypt ? this.mode.decrypt : this.mode.encrypt;\n this._decrypt = options.decrypt;\n this.algorithm.initialize(options);\n};\n\n/**\n * Starts or restarts the encryption or decryption process, whichever\n * was previously configured.\n *\n * For non-GCM mode, the IV may be a binary-encoded string of bytes, an array\n * of bytes, a byte buffer, or an array of 32-bit integers. If the IV is in\n * bytes, then it must be Nb (16) bytes in length. If the IV is given in as\n * 32-bit integers, then it must be 4 integers long.\n *\n * Note: an IV is not required or used in ECB mode.\n *\n * For GCM-mode, the IV must be given as a binary-encoded string of bytes or\n * a byte buffer. The number of bytes should be 12 (96 bits) as recommended\n * by NIST SP-800-38D but another length may be given.\n *\n * @param options the options to use:\n * iv the initialization vector to use as a binary-encoded string of\n * bytes, null to reuse the last ciphered block from a previous\n * update() (this \"residue\" method is for legacy support only).\n * additionalData additional authentication data as a binary-encoded\n * string of bytes, for 'GCM' mode, (default: none).\n * tagLength desired length of authentication tag, in bits, for\n * 'GCM' mode (0-128, default: 128).\n * tag the authentication tag to check if decrypting, as a\n * binary-encoded string of bytes.\n * output the output the buffer to write to, null to create one.\n */\nBlockCipher.prototype.start = function(options) {\n options = options || {};\n var opts = {};\n for(var key in options) {\n opts[key] = options[key];\n }\n opts.decrypt = this._decrypt;\n this._finish = false;\n this._input = forge.util.createBuffer();\n this.output = options.output || forge.util.createBuffer();\n this.mode.start(opts);\n};\n\n/**\n * Updates the next block according to the cipher mode.\n *\n * @param input the buffer to read from.\n */\nBlockCipher.prototype.update = function(input) {\n if(input) {\n // input given, so empty it into the input buffer\n this._input.putBuffer(input);\n }\n\n // do cipher operation until it needs more input and not finished\n while(!this._op.call(this.mode, this._input, this.output, this._finish) &&\n !this._finish) {}\n\n // free consumed memory from input buffer\n this._input.compact();\n};\n\n/**\n * Finishes encrypting or decrypting.\n *\n * @param pad a padding function to use in CBC mode, null for default,\n * signature(blockSize, buffer, decrypt).\n *\n * @return true if successful, false on error.\n */\nBlockCipher.prototype.finish = function(pad) {\n // backwards-compatibility w/deprecated padding API\n // Note: will overwrite padding functions even after another start() call\n if(pad && (this.mode.name === 'ECB' || this.mode.name === 'CBC')) {\n this.mode.pad = function(input) {\n return pad(this.blockSize, input, false);\n };\n this.mode.unpad = function(output) {\n return pad(this.blockSize, output, true);\n };\n }\n\n // build options for padding and afterFinish functions\n var options = {};\n options.decrypt = this._decrypt;\n\n // get # of bytes that won't fill a block\n options.overflow = this._input.length() % this.blockSize;\n\n if(!this._decrypt && this.mode.pad) {\n if(!this.mode.pad(this._input, options)) {\n return false;\n }\n }\n\n // do final update\n this._finish = true;\n this.update();\n\n if(this._decrypt && this.mode.unpad) {\n if(!this.mode.unpad(this.output, options)) {\n return false;\n }\n }\n\n if(this.mode.afterFinish) {\n if(!this.mode.afterFinish(this.output, options)) {\n return false;\n }\n }\n\n return true;\n};\n","/**\n * Supported cipher modes.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\nforge.cipher = forge.cipher || {};\n\n// supported cipher modes\nvar modes = module.exports = forge.cipher.modes = forge.cipher.modes || {};\n\n/** Electronic codebook (ECB) (Don't use this; it's not secure) **/\n\nmodes.ecb = function(options) {\n options = options || {};\n this.name = 'ECB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n};\n\nmodes.ecb.prototype.start = function(options) {};\n\nmodes.ecb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // write output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n};\n\nmodes.ecb.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // decrypt block\n this.cipher.decrypt(this._inBlock, this._outBlock);\n\n // write output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n};\n\nmodes.ecb.prototype.pad = function(input, options) {\n // add PKCS#7 padding to block (each pad byte is the\n // value of the number of pad bytes)\n var padding = (input.length() === this.blockSize ?\n this.blockSize : (this.blockSize - input.length()));\n input.fillWithByte(padding, padding);\n return true;\n};\n\nmodes.ecb.prototype.unpad = function(output, options) {\n // check for error: input data not a multiple of blockSize\n if(options.overflow > 0) {\n return false;\n }\n\n // ensure padding byte count is valid\n var len = output.length();\n var count = output.at(len - 1);\n if(count > (this.blockSize << 2)) {\n return false;\n }\n\n // trim off padding bytes\n output.truncate(count);\n return true;\n};\n\n/** Cipher-block Chaining (CBC) **/\n\nmodes.cbc = function(options) {\n options = options || {};\n this.name = 'CBC';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n};\n\nmodes.cbc.prototype.start = function(options) {\n // Note: legacy support for using IV residue (has security flaws)\n // if IV is null, reuse block from previous processing\n if(options.iv === null) {\n // must have a previous block\n if(!this._prev) {\n throw new Error('Invalid IV parameter.');\n }\n this._iv = this._prev.slice(0);\n } else if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n } else {\n // save IV as \"previous\" block\n this._iv = transformIV(options.iv, this.blockSize);\n this._prev = this._iv.slice(0);\n }\n};\n\nmodes.cbc.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n // CBC XOR's IV (or previous block) with plaintext\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._prev[i] ^ input.getInt32();\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // write output, save previous block\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n this._prev = this._outBlock;\n};\n\nmodes.cbc.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // decrypt block\n this.cipher.decrypt(this._inBlock, this._outBlock);\n\n // write output, save previous ciphered block\n // CBC XOR's IV (or previous block) with ciphertext\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._prev[i] ^ this._outBlock[i]);\n }\n this._prev = this._inBlock.slice(0);\n};\n\nmodes.cbc.prototype.pad = function(input, options) {\n // add PKCS#7 padding to block (each pad byte is the\n // value of the number of pad bytes)\n var padding = (input.length() === this.blockSize ?\n this.blockSize : (this.blockSize - input.length()));\n input.fillWithByte(padding, padding);\n return true;\n};\n\nmodes.cbc.prototype.unpad = function(output, options) {\n // check for error: input data not a multiple of blockSize\n if(options.overflow > 0) {\n return false;\n }\n\n // ensure padding byte count is valid\n var len = output.length();\n var count = output.at(len - 1);\n if(count > (this.blockSize << 2)) {\n return false;\n }\n\n // trim off padding bytes\n output.truncate(count);\n return true;\n};\n\n/** Cipher feedback (CFB) **/\n\nmodes.cfb = function(options) {\n options = options || {};\n this.name = 'CFB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output, write input as output\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32() ^ this._outBlock[i];\n output.putInt32(this._inBlock[i]);\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output, write input as partial output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialBlock[i] = input.getInt32() ^ this._outBlock[i];\n this._partialOutput.putInt32(this._partialBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._partialBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block (CFB always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output, write input as output\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n output.putInt32(this._inBlock[i] ^ this._outBlock[i]);\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output, write input as partial output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialBlock[i] = input.getInt32();\n this._partialOutput.putInt32(this._partialBlock[i] ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._partialBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\n/** Output feedback (OFB) **/\n\nmodes.ofb = function(options) {\n options = options || {};\n this.name = 'OFB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(input.length() === 0) {\n return true;\n }\n\n // encrypt block (OFB always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output and update next input\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(input.getInt32() ^ this._outBlock[i]);\n this._inBlock[i] = this._outBlock[i];\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._outBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.decrypt = modes.ofb.prototype.encrypt;\n\n/** Counter (CTR) **/\n\nmodes.ctr = function(options) {\n options = options || {};\n this.name = 'CTR';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.ctr.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n this._partialBytes = 0;\n};\n\nmodes.ctr.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block (CTR always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n } else {\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n }\n\n // block complete, increment counter (input block)\n inc32(this._inBlock);\n};\n\nmodes.ctr.prototype.decrypt = modes.ctr.prototype.encrypt;\n\n/** Galois/Counter Mode (GCM) **/\n\nmodes.gcm = function(options) {\n options = options || {};\n this.name = 'GCM';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n\n // R is actually this value concatenated with 120 more zero bits, but\n // we only XOR against R so the other zeros have no effect -- we just\n // apply this value to the first integer in a block\n this._R = 0xE1000000;\n};\n\nmodes.gcm.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // ensure IV is a byte buffer\n var iv = forge.util.createBuffer(options.iv);\n\n // no ciphered data processed yet\n this._cipherLength = 0;\n\n // default additional data is none\n var additionalData;\n if('additionalData' in options) {\n additionalData = forge.util.createBuffer(options.additionalData);\n } else {\n additionalData = forge.util.createBuffer();\n }\n\n // default tag length is 128 bits\n if('tagLength' in options) {\n this._tagLength = options.tagLength;\n } else {\n this._tagLength = 128;\n }\n\n // if tag is given, ensure tag matches tag length\n this._tag = null;\n if(options.decrypt) {\n // save tag to check later\n this._tag = forge.util.createBuffer(options.tag).getBytes();\n if(this._tag.length !== (this._tagLength / 8)) {\n throw new Error('Authentication tag does not match tag length.');\n }\n }\n\n // create tmp storage for hash calculation\n this._hashBlock = new Array(this._ints);\n\n // no tag generated yet\n this.tag = null;\n\n // generate hash subkey\n // (apply block cipher to \"zero\" block)\n this._hashSubkey = new Array(this._ints);\n this.cipher.encrypt([0, 0, 0, 0], this._hashSubkey);\n\n // generate table M\n // use 4-bit tables (32 component decomposition of a 16 byte value)\n // 8-bit tables take more space and are known to have security\n // vulnerabilities (in native implementations)\n this.componentBits = 4;\n this._m = this.generateHashTable(this._hashSubkey, this.componentBits);\n\n // Note: support IV length different from 96 bits? (only supporting\n // 96 bits is recommended by NIST SP-800-38D)\n // generate J_0\n var ivLength = iv.length();\n if(ivLength === 12) {\n // 96-bit IV\n this._j0 = [iv.getInt32(), iv.getInt32(), iv.getInt32(), 1];\n } else {\n // IV is NOT 96-bits\n this._j0 = [0, 0, 0, 0];\n while(iv.length() > 0) {\n this._j0 = this.ghash(\n this._hashSubkey, this._j0,\n [iv.getInt32(), iv.getInt32(), iv.getInt32(), iv.getInt32()]);\n }\n this._j0 = this.ghash(\n this._hashSubkey, this._j0, [0, 0].concat(from64To32(ivLength * 8)));\n }\n\n // generate ICB (initial counter block)\n this._inBlock = this._j0.slice(0);\n inc32(this._inBlock);\n this._partialBytes = 0;\n\n // consume authentication data\n additionalData = forge.util.createBuffer(additionalData);\n // save additional data length as a BE 64-bit number\n this._aDataLength = from64To32(additionalData.length() * 8);\n // pad additional data to 128 bit (16 byte) block size\n var overflow = additionalData.length() % this.blockSize;\n if(overflow) {\n additionalData.fillWithByte(0, this.blockSize - overflow);\n }\n this._s = [0, 0, 0, 0];\n while(additionalData.length() > 0) {\n this._s = this.ghash(this._hashSubkey, this._s, [\n additionalData.getInt32(),\n additionalData.getInt32(),\n additionalData.getInt32(),\n additionalData.getInt32()\n ]);\n }\n};\n\nmodes.gcm.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i] ^= input.getInt32());\n }\n this._cipherLength += this.blockSize;\n } else {\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes <= 0 || finish) {\n // handle overflow prior to hashing\n if(finish) {\n // get block overflow\n var overflow = inputLength % this.blockSize;\n this._cipherLength += overflow;\n // truncate for hash function\n this._partialOutput.truncate(this.blockSize - overflow);\n } else {\n this._cipherLength += this.blockSize;\n }\n\n // get output block for hashing\n for(var i = 0; i < this._ints; ++i) {\n this._outBlock[i] = this._partialOutput.getInt32();\n }\n this._partialOutput.read -= this.blockSize;\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n // block still incomplete, restore input buffer, get partial output,\n // and return early\n input.read -= this.blockSize;\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n }\n\n // update hash block S\n this._s = this.ghash(this._hashSubkey, this._s, this._outBlock);\n\n // increment counter (input block)\n inc32(this._inBlock);\n};\n\nmodes.gcm.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n var inputLength = input.length();\n if(inputLength < this.blockSize && !(finish && inputLength > 0)) {\n return true;\n }\n\n // encrypt block (GCM always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // increment counter (input block)\n inc32(this._inBlock);\n\n // update hash block S\n this._hashBlock[0] = input.getInt32();\n this._hashBlock[1] = input.getInt32();\n this._hashBlock[2] = input.getInt32();\n this._hashBlock[3] = input.getInt32();\n this._s = this.ghash(this._hashSubkey, this._s, this._hashBlock);\n\n // XOR hash input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i] ^ this._hashBlock[i]);\n }\n\n // increment cipher data length\n if(inputLength < this.blockSize) {\n this._cipherLength += inputLength % this.blockSize;\n } else {\n this._cipherLength += this.blockSize;\n }\n};\n\nmodes.gcm.prototype.afterFinish = function(output, options) {\n var rval = true;\n\n // handle overflow\n if(options.decrypt && options.overflow) {\n output.truncate(this.blockSize - options.overflow);\n }\n\n // handle authentication tag\n this.tag = forge.util.createBuffer();\n\n // concatenate additional data length with cipher length\n var lengths = this._aDataLength.concat(from64To32(this._cipherLength * 8));\n\n // include lengths in hash\n this._s = this.ghash(this._hashSubkey, this._s, lengths);\n\n // do GCTR(J_0, S)\n var tag = [];\n this.cipher.encrypt(this._j0, tag);\n for(var i = 0; i < this._ints; ++i) {\n this.tag.putInt32(this._s[i] ^ tag[i]);\n }\n\n // trim tag to length\n this.tag.truncate(this.tag.length() % (this._tagLength / 8));\n\n // check authentication tag\n if(options.decrypt && this.tag.bytes() !== this._tag) {\n rval = false;\n }\n\n return rval;\n};\n\n/**\n * See NIST SP-800-38D 6.3 (Algorithm 1). This function performs Galois\n * field multiplication. The field, GF(2^128), is defined by the polynomial:\n *\n * x^128 + x^7 + x^2 + x + 1\n *\n * Which is represented in little-endian binary form as: 11100001 (0xe1). When\n * the value of a coefficient is 1, a bit is set. The value R, is the\n * concatenation of this value and 120 zero bits, yielding a 128-bit value\n * which matches the block size.\n *\n * This function will multiply two elements (vectors of bytes), X and Y, in\n * the field GF(2^128). The result is initialized to zero. For each bit of\n * X (out of 128), x_i, if x_i is set, then the result is multiplied (XOR'd)\n * by the current value of Y. For each bit, the value of Y will be raised by\n * a power of x (multiplied by the polynomial x). This can be achieved by\n * shifting Y once to the right. If the current value of Y, prior to being\n * multiplied by x, has 0 as its LSB, then it is a 127th degree polynomial.\n * Otherwise, we must divide by R after shifting to find the remainder.\n *\n * @param x the first block to multiply by the second.\n * @param y the second block to multiply by the first.\n *\n * @return the block result of the multiplication.\n */\nmodes.gcm.prototype.multiply = function(x, y) {\n var z_i = [0, 0, 0, 0];\n var v_i = y.slice(0);\n\n // calculate Z_128 (block has 128 bits)\n for(var i = 0; i < 128; ++i) {\n // if x_i is 0, Z_{i+1} = Z_i (unchanged)\n // else Z_{i+1} = Z_i ^ V_i\n // get x_i by finding 32-bit int position, then left shift 1 by remainder\n var x_i = x[(i / 32) | 0] & (1 << (31 - i % 32));\n if(x_i) {\n z_i[0] ^= v_i[0];\n z_i[1] ^= v_i[1];\n z_i[2] ^= v_i[2];\n z_i[3] ^= v_i[3];\n }\n\n // if LSB(V_i) is 1, V_i = V_i >> 1\n // else V_i = (V_i >> 1) ^ R\n this.pow(v_i, v_i);\n }\n\n return z_i;\n};\n\nmodes.gcm.prototype.pow = function(x, out) {\n // if LSB(x) is 1, x = x >>> 1\n // else x = (x >>> 1) ^ R\n var lsb = x[3] & 1;\n\n // always do x >>> 1:\n // starting with the rightmost integer, shift each integer to the right\n // one bit, pulling in the bit from the integer to the left as its top\n // most bit (do this for the last 3 integers)\n for(var i = 3; i > 0; --i) {\n out[i] = (x[i] >>> 1) | ((x[i - 1] & 1) << 31);\n }\n // shift the first integer normally\n out[0] = x[0] >>> 1;\n\n // if lsb was not set, then polynomial had a degree of 127 and doesn't\n // need to divided; otherwise, XOR with R to find the remainder; we only\n // need to XOR the first integer since R technically ends w/120 zero bits\n if(lsb) {\n out[0] ^= this._R;\n }\n};\n\nmodes.gcm.prototype.tableMultiply = function(x) {\n // assumes 4-bit tables are used\n var z = [0, 0, 0, 0];\n for(var i = 0; i < 32; ++i) {\n var idx = (i / 8) | 0;\n var x_i = (x[idx] >>> ((7 - (i % 8)) * 4)) & 0xF;\n var ah = this._m[i][x_i];\n z[0] ^= ah[0];\n z[1] ^= ah[1];\n z[2] ^= ah[2];\n z[3] ^= ah[3];\n }\n return z;\n};\n\n/**\n * A continuing version of the GHASH algorithm that operates on a single\n * block. The hash block, last hash value (Ym) and the new block to hash\n * are given.\n *\n * @param h the hash block.\n * @param y the previous value for Ym, use [0, 0, 0, 0] for a new hash.\n * @param x the block to hash.\n *\n * @return the hashed value (Ym).\n */\nmodes.gcm.prototype.ghash = function(h, y, x) {\n y[0] ^= x[0];\n y[1] ^= x[1];\n y[2] ^= x[2];\n y[3] ^= x[3];\n return this.tableMultiply(y);\n //return this.multiply(y, h);\n};\n\n/**\n * Precomputes a table for multiplying against the hash subkey. This\n * mechanism provides a substantial speed increase over multiplication\n * performed without a table. The table-based multiplication this table is\n * for solves X * H by multiplying each component of X by H and then\n * composing the results together using XOR.\n *\n * This function can be used to generate tables with different bit sizes\n * for the components, however, this implementation assumes there are\n * 32 components of X (which is a 16 byte vector), therefore each component\n * takes 4-bits (so the table is constructed with bits=4).\n *\n * @param h the hash subkey.\n * @param bits the bit size for a component.\n */\nmodes.gcm.prototype.generateHashTable = function(h, bits) {\n // TODO: There are further optimizations that would use only the\n // first table M_0 (or some variant) along with a remainder table;\n // this can be explored in the future\n var multiplier = 8 / bits;\n var perInt = 4 * multiplier;\n var size = 16 * multiplier;\n var m = new Array(size);\n for(var i = 0; i < size; ++i) {\n var tmp = [0, 0, 0, 0];\n var idx = (i / perInt) | 0;\n var shft = ((perInt - 1 - (i % perInt)) * bits);\n tmp[idx] = (1 << (bits - 1)) << shft;\n m[i] = this.generateSubHashTable(this.multiply(tmp, h), bits);\n }\n return m;\n};\n\n/**\n * Generates a table for multiplying against the hash subkey for one\n * particular component (out of all possible component values).\n *\n * @param mid the pre-multiplied value for the middle key of the table.\n * @param bits the bit size for a component.\n */\nmodes.gcm.prototype.generateSubHashTable = function(mid, bits) {\n // compute the table quickly by minimizing the number of\n // POW operations -- they only need to be performed for powers of 2,\n // all other entries can be composed from those powers using XOR\n var size = 1 << bits;\n var half = size >>> 1;\n var m = new Array(size);\n m[half] = mid.slice(0);\n var i = half >>> 1;\n while(i > 0) {\n // raise m0[2 * i] and store in m0[i]\n this.pow(m[2 * i], m[i] = []);\n i >>= 1;\n }\n i = 2;\n while(i < half) {\n for(var j = 1; j < i; ++j) {\n var m_i = m[i];\n var m_j = m[j];\n m[i + j] = [\n m_i[0] ^ m_j[0],\n m_i[1] ^ m_j[1],\n m_i[2] ^ m_j[2],\n m_i[3] ^ m_j[3]\n ];\n }\n i *= 2;\n }\n m[0] = [0, 0, 0, 0];\n /* Note: We could avoid storing these by doing composition during multiply\n calculate top half using composition by speed is preferred. */\n for(i = half + 1; i < size; ++i) {\n var c = m[i ^ half];\n m[i] = [mid[0] ^ c[0], mid[1] ^ c[1], mid[2] ^ c[2], mid[3] ^ c[3]];\n }\n return m;\n};\n\n/** Utility functions */\n\nfunction transformIV(iv, blockSize) {\n if(typeof iv === 'string') {\n // convert iv string into byte buffer\n iv = forge.util.createBuffer(iv);\n }\n\n if(forge.util.isArray(iv) && iv.length > 4) {\n // convert iv byte array into byte buffer\n var tmp = iv;\n iv = forge.util.createBuffer();\n for(var i = 0; i < tmp.length; ++i) {\n iv.putByte(tmp[i]);\n }\n }\n\n if(iv.length() < blockSize) {\n throw new Error(\n 'Invalid IV length; got ' + iv.length() +\n ' bytes and expected ' + blockSize + ' bytes.');\n }\n\n if(!forge.util.isArray(iv)) {\n // convert iv byte buffer into 32-bit integer array\n var ints = [];\n var blocks = blockSize / 4;\n for(var i = 0; i < blocks; ++i) {\n ints.push(iv.getInt32());\n }\n iv = ints;\n }\n\n return iv;\n}\n\nfunction inc32(block) {\n // increment last 32 bits of block only\n block[block.length - 1] = (block[block.length - 1] + 1) & 0xFFFFFFFF;\n}\n\nfunction from64To32(num) {\n // convert 64-bit number to two BE Int32s\n return [(num / 0x100000000) | 0, num & 0xFFFFFFFF];\n}\n","/**\n * DES (Data Encryption Standard) implementation.\n *\n * This implementation supports DES as well as 3DES-EDE in ECB and CBC mode.\n * It is based on the BSD-licensed implementation by Paul Tero:\n *\n * Paul Tero, July 2001\n * http://www.tero.co.uk/des/\n *\n * Optimised for performance with large blocks by\n * Michael Hayworth, November 2001\n * http://www.netdealing.com\n *\n * THIS SOFTWARE IS PROVIDED \"AS IS\" AND\n * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE\n * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS\n * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)\n * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT\n * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY\n * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF\n * SUCH DAMAGE.\n *\n * @author Stefan Siegl\n * @author Dave Longley\n *\n * Copyright (c) 2012 Stefan Siegl \n * Copyright (c) 2012-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./cipher');\nrequire('./cipherModes');\nrequire('./util');\n\n/* DES API */\nmodule.exports = forge.des = forge.des || {};\n\n/**\n * Deprecated. Instead, use:\n *\n * var cipher = forge.cipher.createCipher('DES-', key);\n * cipher.start({iv: iv});\n *\n * Creates an DES cipher object to encrypt data using the given symmetric key.\n * The output will be stored in the 'output' member of the returned cipher.\n *\n * The key and iv may be given as binary-encoded strings of bytes or\n * byte buffers.\n *\n * @param key the symmetric key to use (64 or 192 bits).\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n * @param mode the cipher mode to use (default: 'CBC' if IV is\n * given, 'ECB' if null).\n *\n * @return the cipher.\n */\nforge.des.startEncrypting = function(key, iv, output, mode) {\n var cipher = _createCipher({\n key: key,\n output: output,\n decrypt: false,\n mode: mode || (iv === null ? 'ECB' : 'CBC')\n });\n cipher.start(iv);\n return cipher;\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var cipher = forge.cipher.createCipher('DES-', key);\n *\n * Creates an DES cipher object to encrypt data using the given symmetric key.\n *\n * The key may be given as a binary-encoded string of bytes or a byte buffer.\n *\n * @param key the symmetric key to use (64 or 192 bits).\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.des.createEncryptionCipher = function(key, mode) {\n return _createCipher({\n key: key,\n output: null,\n decrypt: false,\n mode: mode\n });\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var decipher = forge.cipher.createDecipher('DES-', key);\n * decipher.start({iv: iv});\n *\n * Creates an DES cipher object to decrypt data using the given symmetric key.\n * The output will be stored in the 'output' member of the returned cipher.\n *\n * The key and iv may be given as binary-encoded strings of bytes or\n * byte buffers.\n *\n * @param key the symmetric key to use (64 or 192 bits).\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n * @param mode the cipher mode to use (default: 'CBC' if IV is\n * given, 'ECB' if null).\n *\n * @return the cipher.\n */\nforge.des.startDecrypting = function(key, iv, output, mode) {\n var cipher = _createCipher({\n key: key,\n output: output,\n decrypt: true,\n mode: mode || (iv === null ? 'ECB' : 'CBC')\n });\n cipher.start(iv);\n return cipher;\n};\n\n/**\n * Deprecated. Instead, use:\n *\n * var decipher = forge.cipher.createDecipher('DES-', key);\n *\n * Creates an DES cipher object to decrypt data using the given symmetric key.\n *\n * The key may be given as a binary-encoded string of bytes or a byte buffer.\n *\n * @param key the symmetric key to use (64 or 192 bits).\n * @param mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nforge.des.createDecryptionCipher = function(key, mode) {\n return _createCipher({\n key: key,\n output: null,\n decrypt: true,\n mode: mode\n });\n};\n\n/**\n * Creates a new DES cipher algorithm object.\n *\n * @param name the name of the algorithm.\n * @param mode the mode factory function.\n *\n * @return the DES algorithm object.\n */\nforge.des.Algorithm = function(name, mode) {\n var self = this;\n self.name = name;\n self.mode = new mode({\n blockSize: 8,\n cipher: {\n encrypt: function(inBlock, outBlock) {\n return _updateBlock(self._keys, inBlock, outBlock, false);\n },\n decrypt: function(inBlock, outBlock) {\n return _updateBlock(self._keys, inBlock, outBlock, true);\n }\n }\n });\n self._init = false;\n};\n\n/**\n * Initializes this DES algorithm by expanding its key.\n *\n * @param options the options to use.\n * key the key to use with this algorithm.\n * decrypt true if the algorithm should be initialized for decryption,\n * false for encryption.\n */\nforge.des.Algorithm.prototype.initialize = function(options) {\n if(this._init) {\n return;\n }\n\n var key = forge.util.createBuffer(options.key);\n if(this.name.indexOf('3DES') === 0) {\n if(key.length() !== 24) {\n throw new Error('Invalid Triple-DES key size: ' + key.length() * 8);\n }\n }\n\n // do key expansion to 16 or 48 subkeys (single or triple DES)\n this._keys = _createKeys(key);\n this._init = true;\n};\n\n/** Register DES algorithms **/\n\nregisterAlgorithm('DES-ECB', forge.cipher.modes.ecb);\nregisterAlgorithm('DES-CBC', forge.cipher.modes.cbc);\nregisterAlgorithm('DES-CFB', forge.cipher.modes.cfb);\nregisterAlgorithm('DES-OFB', forge.cipher.modes.ofb);\nregisterAlgorithm('DES-CTR', forge.cipher.modes.ctr);\n\nregisterAlgorithm('3DES-ECB', forge.cipher.modes.ecb);\nregisterAlgorithm('3DES-CBC', forge.cipher.modes.cbc);\nregisterAlgorithm('3DES-CFB', forge.cipher.modes.cfb);\nregisterAlgorithm('3DES-OFB', forge.cipher.modes.ofb);\nregisterAlgorithm('3DES-CTR', forge.cipher.modes.ctr);\n\nfunction registerAlgorithm(name, mode) {\n var factory = function() {\n return new forge.des.Algorithm(name, mode);\n };\n forge.cipher.registerAlgorithm(name, factory);\n}\n\n/** DES implementation **/\n\nvar spfunction1 = [0x1010400,0,0x10000,0x1010404,0x1010004,0x10404,0x4,0x10000,0x400,0x1010400,0x1010404,0x400,0x1000404,0x1010004,0x1000000,0x4,0x404,0x1000400,0x1000400,0x10400,0x10400,0x1010000,0x1010000,0x1000404,0x10004,0x1000004,0x1000004,0x10004,0,0x404,0x10404,0x1000000,0x10000,0x1010404,0x4,0x1010000,0x1010400,0x1000000,0x1000000,0x400,0x1010004,0x10000,0x10400,0x1000004,0x400,0x4,0x1000404,0x10404,0x1010404,0x10004,0x1010000,0x1000404,0x1000004,0x404,0x10404,0x1010400,0x404,0x1000400,0x1000400,0,0x10004,0x10400,0,0x1010004];\nvar spfunction2 = [-0x7fef7fe0,-0x7fff8000,0x8000,0x108020,0x100000,0x20,-0x7fefffe0,-0x7fff7fe0,-0x7fffffe0,-0x7fef7fe0,-0x7fef8000,-0x80000000,-0x7fff8000,0x100000,0x20,-0x7fefffe0,0x108000,0x100020,-0x7fff7fe0,0,-0x80000000,0x8000,0x108020,-0x7ff00000,0x100020,-0x7fffffe0,0,0x108000,0x8020,-0x7fef8000,-0x7ff00000,0x8020,0,0x108020,-0x7fefffe0,0x100000,-0x7fff7fe0,-0x7ff00000,-0x7fef8000,0x8000,-0x7ff00000,-0x7fff8000,0x20,-0x7fef7fe0,0x108020,0x20,0x8000,-0x80000000,0x8020,-0x7fef8000,0x100000,-0x7fffffe0,0x100020,-0x7fff7fe0,-0x7fffffe0,0x100020,0x108000,0,-0x7fff8000,0x8020,-0x80000000,-0x7fefffe0,-0x7fef7fe0,0x108000];\nvar spfunction3 = [0x208,0x8020200,0,0x8020008,0x8000200,0,0x20208,0x8000200,0x20008,0x8000008,0x8000008,0x20000,0x8020208,0x20008,0x8020000,0x208,0x8000000,0x8,0x8020200,0x200,0x20200,0x8020000,0x8020008,0x20208,0x8000208,0x20200,0x20000,0x8000208,0x8,0x8020208,0x200,0x8000000,0x8020200,0x8000000,0x20008,0x208,0x20000,0x8020200,0x8000200,0,0x200,0x20008,0x8020208,0x8000200,0x8000008,0x200,0,0x8020008,0x8000208,0x20000,0x8000000,0x8020208,0x8,0x20208,0x20200,0x8000008,0x8020000,0x8000208,0x208,0x8020000,0x20208,0x8,0x8020008,0x20200];\nvar spfunction4 = [0x802001,0x2081,0x2081,0x80,0x802080,0x800081,0x800001,0x2001,0,0x802000,0x802000,0x802081,0x81,0,0x800080,0x800001,0x1,0x2000,0x800000,0x802001,0x80,0x800000,0x2001,0x2080,0x800081,0x1,0x2080,0x800080,0x2000,0x802080,0x802081,0x81,0x800080,0x800001,0x802000,0x802081,0x81,0,0,0x802000,0x2080,0x800080,0x800081,0x1,0x802001,0x2081,0x2081,0x80,0x802081,0x81,0x1,0x2000,0x800001,0x2001,0x802080,0x800081,0x2001,0x2080,0x800000,0x802001,0x80,0x800000,0x2000,0x802080];\nvar spfunction5 = [0x100,0x2080100,0x2080000,0x42000100,0x80000,0x100,0x40000000,0x2080000,0x40080100,0x80000,0x2000100,0x40080100,0x42000100,0x42080000,0x80100,0x40000000,0x2000000,0x40080000,0x40080000,0,0x40000100,0x42080100,0x42080100,0x2000100,0x42080000,0x40000100,0,0x42000000,0x2080100,0x2000000,0x42000000,0x80100,0x80000,0x42000100,0x100,0x2000000,0x40000000,0x2080000,0x42000100,0x40080100,0x2000100,0x40000000,0x42080000,0x2080100,0x40080100,0x100,0x2000000,0x42080000,0x42080100,0x80100,0x42000000,0x42080100,0x2080000,0,0x40080000,0x42000000,0x80100,0x2000100,0x40000100,0x80000,0,0x40080000,0x2080100,0x40000100];\nvar spfunction6 = [0x20000010,0x20400000,0x4000,0x20404010,0x20400000,0x10,0x20404010,0x400000,0x20004000,0x404010,0x400000,0x20000010,0x400010,0x20004000,0x20000000,0x4010,0,0x400010,0x20004010,0x4000,0x404000,0x20004010,0x10,0x20400010,0x20400010,0,0x404010,0x20404000,0x4010,0x404000,0x20404000,0x20000000,0x20004000,0x10,0x20400010,0x404000,0x20404010,0x400000,0x4010,0x20000010,0x400000,0x20004000,0x20000000,0x4010,0x20000010,0x20404010,0x404000,0x20400000,0x404010,0x20404000,0,0x20400010,0x10,0x4000,0x20400000,0x404010,0x4000,0x400010,0x20004010,0,0x20404000,0x20000000,0x400010,0x20004010];\nvar spfunction7 = [0x200000,0x4200002,0x4000802,0,0x800,0x4000802,0x200802,0x4200800,0x4200802,0x200000,0,0x4000002,0x2,0x4000000,0x4200002,0x802,0x4000800,0x200802,0x200002,0x4000800,0x4000002,0x4200000,0x4200800,0x200002,0x4200000,0x800,0x802,0x4200802,0x200800,0x2,0x4000000,0x200800,0x4000000,0x200800,0x200000,0x4000802,0x4000802,0x4200002,0x4200002,0x2,0x200002,0x4000000,0x4000800,0x200000,0x4200800,0x802,0x200802,0x4200800,0x802,0x4000002,0x4200802,0x4200000,0x200800,0,0x2,0x4200802,0,0x200802,0x4200000,0x800,0x4000002,0x4000800,0x800,0x200002];\nvar spfunction8 = [0x10001040,0x1000,0x40000,0x10041040,0x10000000,0x10001040,0x40,0x10000000,0x40040,0x10040000,0x10041040,0x41000,0x10041000,0x41040,0x1000,0x40,0x10040000,0x10000040,0x10001000,0x1040,0x41000,0x40040,0x10040040,0x10041000,0x1040,0,0,0x10040040,0x10000040,0x10001000,0x41040,0x40000,0x41040,0x40000,0x10041000,0x1000,0x40,0x10040040,0x1000,0x41040,0x10001000,0x40,0x10000040,0x10040000,0x10040040,0x10000000,0x40000,0x10001040,0,0x10041040,0x40040,0x10000040,0x10040000,0x10001000,0x10001040,0,0x10041040,0x41000,0x41000,0x1040,0x1040,0x40040,0x10000000,0x10041000];\n\n/**\n * Create necessary sub keys.\n *\n * @param key the 64-bit or 192-bit key.\n *\n * @return the expanded keys.\n */\nfunction _createKeys(key) {\n var pc2bytes0 = [0,0x4,0x20000000,0x20000004,0x10000,0x10004,0x20010000,0x20010004,0x200,0x204,0x20000200,0x20000204,0x10200,0x10204,0x20010200,0x20010204],\n pc2bytes1 = [0,0x1,0x100000,0x100001,0x4000000,0x4000001,0x4100000,0x4100001,0x100,0x101,0x100100,0x100101,0x4000100,0x4000101,0x4100100,0x4100101],\n pc2bytes2 = [0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808,0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808],\n pc2bytes3 = [0,0x200000,0x8000000,0x8200000,0x2000,0x202000,0x8002000,0x8202000,0x20000,0x220000,0x8020000,0x8220000,0x22000,0x222000,0x8022000,0x8222000],\n pc2bytes4 = [0,0x40000,0x10,0x40010,0,0x40000,0x10,0x40010,0x1000,0x41000,0x1010,0x41010,0x1000,0x41000,0x1010,0x41010],\n pc2bytes5 = [0,0x400,0x20,0x420,0,0x400,0x20,0x420,0x2000000,0x2000400,0x2000020,0x2000420,0x2000000,0x2000400,0x2000020,0x2000420],\n pc2bytes6 = [0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002,0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002],\n pc2bytes7 = [0,0x10000,0x800,0x10800,0x20000000,0x20010000,0x20000800,0x20010800,0x20000,0x30000,0x20800,0x30800,0x20020000,0x20030000,0x20020800,0x20030800],\n pc2bytes8 = [0,0x40000,0,0x40000,0x2,0x40002,0x2,0x40002,0x2000000,0x2040000,0x2000000,0x2040000,0x2000002,0x2040002,0x2000002,0x2040002],\n pc2bytes9 = [0,0x10000000,0x8,0x10000008,0,0x10000000,0x8,0x10000008,0x400,0x10000400,0x408,0x10000408,0x400,0x10000400,0x408,0x10000408],\n pc2bytes10 = [0,0x20,0,0x20,0x100000,0x100020,0x100000,0x100020,0x2000,0x2020,0x2000,0x2020,0x102000,0x102020,0x102000,0x102020],\n pc2bytes11 = [0,0x1000000,0x200,0x1000200,0x200000,0x1200000,0x200200,0x1200200,0x4000000,0x5000000,0x4000200,0x5000200,0x4200000,0x5200000,0x4200200,0x5200200],\n pc2bytes12 = [0,0x1000,0x8000000,0x8001000,0x80000,0x81000,0x8080000,0x8081000,0x10,0x1010,0x8000010,0x8001010,0x80010,0x81010,0x8080010,0x8081010],\n pc2bytes13 = [0,0x4,0x100,0x104,0,0x4,0x100,0x104,0x1,0x5,0x101,0x105,0x1,0x5,0x101,0x105];\n\n // how many iterations (1 for des, 3 for triple des)\n // changed by Paul 16/6/2007 to use Triple DES for 9+ byte keys\n var iterations = key.length() > 8 ? 3 : 1;\n\n // stores the return keys\n var keys = [];\n\n // now define the left shifts which need to be done\n var shifts = [0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0];\n\n var n = 0, tmp;\n for(var j = 0; j < iterations; j++) {\n var left = key.getInt32();\n var right = key.getInt32();\n\n tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f;\n right ^= tmp;\n left ^= (tmp << 4);\n\n tmp = ((right >>> -16) ^ left) & 0x0000ffff;\n left ^= tmp;\n right ^= (tmp << -16);\n\n tmp = ((left >>> 2) ^ right) & 0x33333333;\n right ^= tmp;\n left ^= (tmp << 2);\n\n tmp = ((right >>> -16) ^ left) & 0x0000ffff;\n left ^= tmp;\n right ^= (tmp << -16);\n\n tmp = ((left >>> 1) ^ right) & 0x55555555;\n right ^= tmp;\n left ^= (tmp << 1);\n\n tmp = ((right >>> 8) ^ left) & 0x00ff00ff;\n left ^= tmp;\n right ^= (tmp << 8);\n\n tmp = ((left >>> 1) ^ right) & 0x55555555;\n right ^= tmp;\n left ^= (tmp << 1);\n\n // right needs to be shifted and OR'd with last four bits of left\n tmp = (left << 8) | ((right >>> 20) & 0x000000f0);\n\n // left needs to be put upside down\n left = ((right << 24) | ((right << 8) & 0xff0000) |\n ((right >>> 8) & 0xff00) | ((right >>> 24) & 0xf0));\n right = tmp;\n\n // now go through and perform these shifts on the left and right keys\n for(var i = 0; i < shifts.length; ++i) {\n //shift the keys either one or two bits to the left\n if(shifts[i]) {\n left = (left << 2) | (left >>> 26);\n right = (right << 2) | (right >>> 26);\n } else {\n left = (left << 1) | (left >>> 27);\n right = (right << 1) | (right >>> 27);\n }\n left &= -0xf;\n right &= -0xf;\n\n // now apply PC-2, in such a way that E is easier when encrypting or\n // decrypting this conversion will look like PC-2 except only the last 6\n // bits of each byte are used rather than 48 consecutive bits and the\n // order of lines will be according to how the S selection functions will\n // be applied: S2, S4, S6, S8, S1, S3, S5, S7\n var lefttmp = (\n pc2bytes0[left >>> 28] | pc2bytes1[(left >>> 24) & 0xf] |\n pc2bytes2[(left >>> 20) & 0xf] | pc2bytes3[(left >>> 16) & 0xf] |\n pc2bytes4[(left >>> 12) & 0xf] | pc2bytes5[(left >>> 8) & 0xf] |\n pc2bytes6[(left >>> 4) & 0xf]);\n var righttmp = (\n pc2bytes7[right >>> 28] | pc2bytes8[(right >>> 24) & 0xf] |\n pc2bytes9[(right >>> 20) & 0xf] | pc2bytes10[(right >>> 16) & 0xf] |\n pc2bytes11[(right >>> 12) & 0xf] | pc2bytes12[(right >>> 8) & 0xf] |\n pc2bytes13[(right >>> 4) & 0xf]);\n tmp = ((righttmp >>> 16) ^ lefttmp) & 0x0000ffff;\n keys[n++] = lefttmp ^ tmp;\n keys[n++] = righttmp ^ (tmp << 16);\n }\n }\n\n return keys;\n}\n\n/**\n * Updates a single block (1 byte) using DES. The update will either\n * encrypt or decrypt the block.\n *\n * @param keys the expanded keys.\n * @param input the input block (an array of 32-bit words).\n * @param output the updated output block.\n * @param decrypt true to decrypt the block, false to encrypt it.\n */\nfunction _updateBlock(keys, input, output, decrypt) {\n // set up loops for single or triple DES\n var iterations = keys.length === 32 ? 3 : 9;\n var looping;\n if(iterations === 3) {\n looping = decrypt ? [30, -2, -2] : [0, 32, 2];\n } else {\n looping = (decrypt ?\n [94, 62, -2, 32, 64, 2, 30, -2, -2] :\n [0, 32, 2, 62, 30, -2, 64, 96, 2]);\n }\n\n var tmp;\n\n var left = input[0];\n var right = input[1];\n\n // first each 64 bit chunk of the message must be permuted according to IP\n tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f;\n right ^= tmp;\n left ^= (tmp << 4);\n\n tmp = ((left >>> 16) ^ right) & 0x0000ffff;\n right ^= tmp;\n left ^= (tmp << 16);\n\n tmp = ((right >>> 2) ^ left) & 0x33333333;\n left ^= tmp;\n right ^= (tmp << 2);\n\n tmp = ((right >>> 8) ^ left) & 0x00ff00ff;\n left ^= tmp;\n right ^= (tmp << 8);\n\n tmp = ((left >>> 1) ^ right) & 0x55555555;\n right ^= tmp;\n left ^= (tmp << 1);\n\n // rotate left 1 bit\n left = ((left << 1) | (left >>> 31));\n right = ((right << 1) | (right >>> 31));\n\n for(var j = 0; j < iterations; j += 3) {\n var endloop = looping[j + 1];\n var loopinc = looping[j + 2];\n\n // now go through and perform the encryption or decryption\n for(var i = looping[j]; i != endloop; i += loopinc) {\n var right1 = right ^ keys[i];\n var right2 = ((right >>> 4) | (right << 28)) ^ keys[i + 1];\n\n // passing these bytes through the S selection functions\n tmp = left;\n left = right;\n right = tmp ^ (\n spfunction2[(right1 >>> 24) & 0x3f] |\n spfunction4[(right1 >>> 16) & 0x3f] |\n spfunction6[(right1 >>> 8) & 0x3f] |\n spfunction8[right1 & 0x3f] |\n spfunction1[(right2 >>> 24) & 0x3f] |\n spfunction3[(right2 >>> 16) & 0x3f] |\n spfunction5[(right2 >>> 8) & 0x3f] |\n spfunction7[right2 & 0x3f]);\n }\n // unreverse left and right\n tmp = left;\n left = right;\n right = tmp;\n }\n\n // rotate right 1 bit\n left = ((left >>> 1) | (left << 31));\n right = ((right >>> 1) | (right << 31));\n\n // now perform IP-1, which is IP in the opposite direction\n tmp = ((left >>> 1) ^ right) & 0x55555555;\n right ^= tmp;\n left ^= (tmp << 1);\n\n tmp = ((right >>> 8) ^ left) & 0x00ff00ff;\n left ^= tmp;\n right ^= (tmp << 8);\n\n tmp = ((right >>> 2) ^ left) & 0x33333333;\n left ^= tmp;\n right ^= (tmp << 2);\n\n tmp = ((left >>> 16) ^ right) & 0x0000ffff;\n right ^= tmp;\n left ^= (tmp << 16);\n\n tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f;\n right ^= tmp;\n left ^= (tmp << 4);\n\n output[0] = left;\n output[1] = right;\n}\n\n/**\n * Deprecated. Instead, use:\n *\n * forge.cipher.createCipher('DES-', key);\n * forge.cipher.createDecipher('DES-', key);\n *\n * Creates a deprecated DES cipher object. This object's mode will default to\n * CBC (cipher-block-chaining).\n *\n * The key may be given as a binary-encoded string of bytes or a byte buffer.\n *\n * @param options the options to use.\n * key the symmetric key to use (64 or 192 bits).\n * output the buffer to write to.\n * decrypt true for decryption, false for encryption.\n * mode the cipher mode to use (default: 'CBC').\n *\n * @return the cipher.\n */\nfunction _createCipher(options) {\n options = options || {};\n var mode = (options.mode || 'CBC').toUpperCase();\n var algorithm = 'DES-' + mode;\n\n var cipher;\n if(options.decrypt) {\n cipher = forge.cipher.createDecipher(algorithm, options.key);\n } else {\n cipher = forge.cipher.createCipher(algorithm, options.key);\n }\n\n // backwards compatible start API\n var start = cipher.start;\n cipher.start = function(iv, options) {\n // backwards compatibility: support second arg as output buffer\n var output = null;\n if(options instanceof forge.util.ByteBuffer) {\n output = options;\n options = {};\n }\n options = options || {};\n options.output = output;\n options.iv = iv;\n start.call(cipher, options);\n };\n\n return cipher;\n}\n","/**\n * JavaScript implementation of Ed25519.\n *\n * Copyright (c) 2017-2019 Digital Bazaar, Inc.\n *\n * This implementation is based on the most excellent TweetNaCl which is\n * in the public domain. Many thanks to its contributors:\n *\n * https://github.com/dchest/tweetnacl-js\n */\nvar forge = require('./forge');\nrequire('./jsbn');\nrequire('./random');\nrequire('./sha512');\nrequire('./util');\nvar asn1Validator = require('./asn1-validator');\nvar publicKeyValidator = asn1Validator.publicKeyValidator;\nvar privateKeyValidator = asn1Validator.privateKeyValidator;\n\nif(typeof BigInteger === 'undefined') {\n var BigInteger = forge.jsbn.BigInteger;\n}\n\nvar ByteBuffer = forge.util.ByteBuffer;\nvar NativeBuffer = typeof Buffer === 'undefined' ? Uint8Array : Buffer;\n\n/*\n * Ed25519 algorithms, see RFC 8032:\n * https://tools.ietf.org/html/rfc8032\n */\nforge.pki = forge.pki || {};\nmodule.exports = forge.pki.ed25519 = forge.ed25519 = forge.ed25519 || {};\nvar ed25519 = forge.ed25519;\n\ned25519.constants = {};\ned25519.constants.PUBLIC_KEY_BYTE_LENGTH = 32;\ned25519.constants.PRIVATE_KEY_BYTE_LENGTH = 64;\ned25519.constants.SEED_BYTE_LENGTH = 32;\ned25519.constants.SIGN_BYTE_LENGTH = 64;\ned25519.constants.HASH_BYTE_LENGTH = 64;\n\ned25519.generateKeyPair = function(options) {\n options = options || {};\n var seed = options.seed;\n if(seed === undefined) {\n // generate seed\n seed = forge.random.getBytesSync(ed25519.constants.SEED_BYTE_LENGTH);\n } else if(typeof seed === 'string') {\n if(seed.length !== ed25519.constants.SEED_BYTE_LENGTH) {\n throw new TypeError(\n '\"seed\" must be ' + ed25519.constants.SEED_BYTE_LENGTH +\n ' bytes in length.');\n }\n } else if(!(seed instanceof Uint8Array)) {\n throw new TypeError(\n '\"seed\" must be a node.js Buffer, Uint8Array, or a binary string.');\n }\n\n seed = messageToNativeBuffer({message: seed, encoding: 'binary'});\n\n var pk = new NativeBuffer(ed25519.constants.PUBLIC_KEY_BYTE_LENGTH);\n var sk = new NativeBuffer(ed25519.constants.PRIVATE_KEY_BYTE_LENGTH);\n for(var i = 0; i < 32; ++i) {\n sk[i] = seed[i];\n }\n crypto_sign_keypair(pk, sk);\n return {publicKey: pk, privateKey: sk};\n};\n\n/**\n * Converts a private key from a RFC8410 ASN.1 encoding.\n *\n * @param obj - The asn1 representation of a private key.\n *\n * @returns {Object} keyInfo - The key information.\n * @returns {Buffer|Uint8Array} keyInfo.privateKeyBytes - 32 private key bytes.\n */\ned25519.privateKeyFromAsn1 = function(obj) {\n var capture = {};\n var errors = [];\n var valid = forge.asn1.validate(obj, privateKeyValidator, capture, errors);\n if(!valid) {\n var error = new Error('Invalid Key.');\n error.errors = errors;\n throw error;\n }\n var oid = forge.asn1.derToOid(capture.privateKeyOid);\n var ed25519Oid = forge.oids.EdDSA25519;\n if(oid !== ed25519Oid) {\n throw new Error('Invalid OID \"' + oid + '\"; OID must be \"' +\n ed25519Oid + '\".');\n }\n var privateKey = capture.privateKey;\n // manually extract the private key bytes from nested octet string, see FIXME:\n // https://github.com/digitalbazaar/forge/blob/master/lib/asn1.js#L542\n var privateKeyBytes = messageToNativeBuffer({\n message: forge.asn1.fromDer(privateKey).value,\n encoding: 'binary'\n });\n // TODO: RFC8410 specifies a format for encoding the public key bytes along\n // with the private key bytes. `publicKeyBytes` can be returned in the\n // future. https://tools.ietf.org/html/rfc8410#section-10.3\n return {privateKeyBytes: privateKeyBytes};\n};\n\n/**\n * Converts a public key from a RFC8410 ASN.1 encoding.\n *\n * @param obj - The asn1 representation of a public key.\n *\n * @return {Buffer|Uint8Array} - 32 public key bytes.\n */\ned25519.publicKeyFromAsn1 = function(obj) {\n // get SubjectPublicKeyInfo\n var capture = {};\n var errors = [];\n var valid = forge.asn1.validate(obj, publicKeyValidator, capture, errors);\n if(!valid) {\n var error = new Error('Invalid Key.');\n error.errors = errors;\n throw error;\n }\n var oid = forge.asn1.derToOid(capture.publicKeyOid);\n var ed25519Oid = forge.oids.EdDSA25519;\n if(oid !== ed25519Oid) {\n throw new Error('Invalid OID \"' + oid + '\"; OID must be \"' +\n ed25519Oid + '\".');\n }\n var publicKeyBytes = capture.ed25519PublicKey;\n if(publicKeyBytes.length !== ed25519.constants.PUBLIC_KEY_BYTE_LENGTH) {\n throw new Error('Key length is invalid.');\n }\n return messageToNativeBuffer({\n message: publicKeyBytes,\n encoding: 'binary'\n });\n};\n\ned25519.publicKeyFromPrivateKey = function(options) {\n options = options || {};\n var privateKey = messageToNativeBuffer({\n message: options.privateKey, encoding: 'binary'\n });\n if(privateKey.length !== ed25519.constants.PRIVATE_KEY_BYTE_LENGTH) {\n throw new TypeError(\n '\"options.privateKey\" must have a byte length of ' +\n ed25519.constants.PRIVATE_KEY_BYTE_LENGTH);\n }\n\n var pk = new NativeBuffer(ed25519.constants.PUBLIC_KEY_BYTE_LENGTH);\n for(var i = 0; i < pk.length; ++i) {\n pk[i] = privateKey[32 + i];\n }\n return pk;\n};\n\ned25519.sign = function(options) {\n options = options || {};\n var msg = messageToNativeBuffer(options);\n var privateKey = messageToNativeBuffer({\n message: options.privateKey,\n encoding: 'binary'\n });\n if(privateKey.length === ed25519.constants.SEED_BYTE_LENGTH) {\n var keyPair = ed25519.generateKeyPair({seed: privateKey});\n privateKey = keyPair.privateKey;\n } else if(privateKey.length !== ed25519.constants.PRIVATE_KEY_BYTE_LENGTH) {\n throw new TypeError(\n '\"options.privateKey\" must have a byte length of ' +\n ed25519.constants.SEED_BYTE_LENGTH + ' or ' +\n ed25519.constants.PRIVATE_KEY_BYTE_LENGTH);\n }\n\n var signedMsg = new NativeBuffer(\n ed25519.constants.SIGN_BYTE_LENGTH + msg.length);\n crypto_sign(signedMsg, msg, msg.length, privateKey);\n\n var sig = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH);\n for(var i = 0; i < sig.length; ++i) {\n sig[i] = signedMsg[i];\n }\n return sig;\n};\n\ned25519.verify = function(options) {\n options = options || {};\n var msg = messageToNativeBuffer(options);\n if(options.signature === undefined) {\n throw new TypeError(\n '\"options.signature\" must be a node.js Buffer, a Uint8Array, a forge ' +\n 'ByteBuffer, or a binary string.');\n }\n var sig = messageToNativeBuffer({\n message: options.signature,\n encoding: 'binary'\n });\n if(sig.length !== ed25519.constants.SIGN_BYTE_LENGTH) {\n throw new TypeError(\n '\"options.signature\" must have a byte length of ' +\n ed25519.constants.SIGN_BYTE_LENGTH);\n }\n var publicKey = messageToNativeBuffer({\n message: options.publicKey,\n encoding: 'binary'\n });\n if(publicKey.length !== ed25519.constants.PUBLIC_KEY_BYTE_LENGTH) {\n throw new TypeError(\n '\"options.publicKey\" must have a byte length of ' +\n ed25519.constants.PUBLIC_KEY_BYTE_LENGTH);\n }\n\n var sm = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH + msg.length);\n var m = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH + msg.length);\n var i;\n for(i = 0; i < ed25519.constants.SIGN_BYTE_LENGTH; ++i) {\n sm[i] = sig[i];\n }\n for(i = 0; i < msg.length; ++i) {\n sm[i + ed25519.constants.SIGN_BYTE_LENGTH] = msg[i];\n }\n return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0);\n};\n\nfunction messageToNativeBuffer(options) {\n var message = options.message;\n if(message instanceof Uint8Array || message instanceof NativeBuffer) {\n return message;\n }\n\n var encoding = options.encoding;\n if(message === undefined) {\n if(options.md) {\n // TODO: more rigorous validation that `md` is a MessageDigest\n message = options.md.digest().getBytes();\n encoding = 'binary';\n } else {\n throw new TypeError('\"options.message\" or \"options.md\" not specified.');\n }\n }\n\n if(typeof message === 'string' && !encoding) {\n throw new TypeError('\"options.encoding\" must be \"binary\" or \"utf8\".');\n }\n\n if(typeof message === 'string') {\n if(typeof Buffer !== 'undefined') {\n return Buffer.from(message, encoding);\n }\n message = new ByteBuffer(message, encoding);\n } else if(!(message instanceof ByteBuffer)) {\n throw new TypeError(\n '\"options.message\" must be a node.js Buffer, a Uint8Array, a forge ' +\n 'ByteBuffer, or a string with \"options.encoding\" specifying its ' +\n 'encoding.');\n }\n\n // convert to native buffer\n var buffer = new NativeBuffer(message.length());\n for(var i = 0; i < buffer.length; ++i) {\n buffer[i] = message.at(i);\n }\n return buffer;\n}\n\nvar gf0 = gf();\nvar gf1 = gf([1]);\nvar D = gf([\n 0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070,\n 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]);\nvar D2 = gf([\n 0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0,\n 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]);\nvar X = gf([\n 0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c,\n 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]);\nvar Y = gf([\n 0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666,\n 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]);\nvar L = new Float64Array([\n 0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58,\n 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]);\nvar I = gf([\n 0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43,\n 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]);\n\n// TODO: update forge buffer implementation to use `Buffer` or `Uint8Array`,\n// whichever is available, to improve performance\nfunction sha512(msg, msgLen) {\n // Note: `out` and `msg` are NativeBuffer\n var md = forge.md.sha512.create();\n var buffer = new ByteBuffer(msg);\n md.update(buffer.getBytes(msgLen), 'binary');\n var hash = md.digest().getBytes();\n if(typeof Buffer !== 'undefined') {\n return Buffer.from(hash, 'binary');\n }\n var out = new NativeBuffer(ed25519.constants.HASH_BYTE_LENGTH);\n for(var i = 0; i < 64; ++i) {\n out[i] = hash.charCodeAt(i);\n }\n return out;\n}\n\nfunction crypto_sign_keypair(pk, sk) {\n var p = [gf(), gf(), gf(), gf()];\n var i;\n\n var d = sha512(sk, 32);\n d[0] &= 248;\n d[31] &= 127;\n d[31] |= 64;\n\n scalarbase(p, d);\n pack(pk, p);\n\n for(i = 0; i < 32; ++i) {\n sk[i + 32] = pk[i];\n }\n return 0;\n}\n\n// Note: difference from C - smlen returned, not passed as argument.\nfunction crypto_sign(sm, m, n, sk) {\n var i, j, x = new Float64Array(64);\n var p = [gf(), gf(), gf(), gf()];\n\n var d = sha512(sk, 32);\n d[0] &= 248;\n d[31] &= 127;\n d[31] |= 64;\n\n var smlen = n + 64;\n for(i = 0; i < n; ++i) {\n sm[64 + i] = m[i];\n }\n for(i = 0; i < 32; ++i) {\n sm[32 + i] = d[32 + i];\n }\n\n var r = sha512(sm.subarray(32), n + 32);\n reduce(r);\n scalarbase(p, r);\n pack(sm, p);\n\n for(i = 32; i < 64; ++i) {\n sm[i] = sk[i];\n }\n var h = sha512(sm, n + 64);\n reduce(h);\n\n for(i = 32; i < 64; ++i) {\n x[i] = 0;\n }\n for(i = 0; i < 32; ++i) {\n x[i] = r[i];\n }\n for(i = 0; i < 32; ++i) {\n for(j = 0; j < 32; j++) {\n x[i + j] += h[i] * d[j];\n }\n }\n\n modL(sm.subarray(32), x);\n return smlen;\n}\n\nfunction crypto_sign_open(m, sm, n, pk) {\n var i, mlen;\n var t = new NativeBuffer(32);\n var p = [gf(), gf(), gf(), gf()],\n q = [gf(), gf(), gf(), gf()];\n\n mlen = -1;\n if(n < 64) {\n return -1;\n }\n\n if(unpackneg(q, pk)) {\n return -1;\n }\n\n for(i = 0; i < n; ++i) {\n m[i] = sm[i];\n }\n for(i = 0; i < 32; ++i) {\n m[i + 32] = pk[i];\n }\n var h = sha512(m, n);\n reduce(h);\n scalarmult(p, q, h);\n\n scalarbase(q, sm.subarray(32));\n add(p, q);\n pack(t, p);\n\n n -= 64;\n if(crypto_verify_32(sm, 0, t, 0)) {\n for(i = 0; i < n; ++i) {\n m[i] = 0;\n }\n return -1;\n }\n\n for(i = 0; i < n; ++i) {\n m[i] = sm[i + 64];\n }\n mlen = n;\n return mlen;\n}\n\nfunction modL(r, x) {\n var carry, i, j, k;\n for(i = 63; i >= 32; --i) {\n carry = 0;\n for(j = i - 32, k = i - 12; j < k; ++j) {\n x[j] += carry - 16 * x[i] * L[j - (i - 32)];\n carry = (x[j] + 128) >> 8;\n x[j] -= carry * 256;\n }\n x[j] += carry;\n x[i] = 0;\n }\n carry = 0;\n for(j = 0; j < 32; ++j) {\n x[j] += carry - (x[31] >> 4) * L[j];\n carry = x[j] >> 8;\n x[j] &= 255;\n }\n for(j = 0; j < 32; ++j) {\n x[j] -= carry * L[j];\n }\n for(i = 0; i < 32; ++i) {\n x[i + 1] += x[i] >> 8;\n r[i] = x[i] & 255;\n }\n}\n\nfunction reduce(r) {\n var x = new Float64Array(64);\n for(var i = 0; i < 64; ++i) {\n x[i] = r[i];\n r[i] = 0;\n }\n modL(r, x);\n}\n\nfunction add(p, q) {\n var a = gf(), b = gf(), c = gf(),\n d = gf(), e = gf(), f = gf(),\n g = gf(), h = gf(), t = gf();\n\n Z(a, p[1], p[0]);\n Z(t, q[1], q[0]);\n M(a, a, t);\n A(b, p[0], p[1]);\n A(t, q[0], q[1]);\n M(b, b, t);\n M(c, p[3], q[3]);\n M(c, c, D2);\n M(d, p[2], q[2]);\n A(d, d, d);\n Z(e, b, a);\n Z(f, d, c);\n A(g, d, c);\n A(h, b, a);\n\n M(p[0], e, f);\n M(p[1], h, g);\n M(p[2], g, f);\n M(p[3], e, h);\n}\n\nfunction cswap(p, q, b) {\n for(var i = 0; i < 4; ++i) {\n sel25519(p[i], q[i], b);\n }\n}\n\nfunction pack(r, p) {\n var tx = gf(), ty = gf(), zi = gf();\n inv25519(zi, p[2]);\n M(tx, p[0], zi);\n M(ty, p[1], zi);\n pack25519(r, ty);\n r[31] ^= par25519(tx) << 7;\n}\n\nfunction pack25519(o, n) {\n var i, j, b;\n var m = gf(), t = gf();\n for(i = 0; i < 16; ++i) {\n t[i] = n[i];\n }\n car25519(t);\n car25519(t);\n car25519(t);\n for(j = 0; j < 2; ++j) {\n m[0] = t[0] - 0xffed;\n for(i = 1; i < 15; ++i) {\n m[i] = t[i] - 0xffff - ((m[i - 1] >> 16) & 1);\n m[i-1] &= 0xffff;\n }\n m[15] = t[15] - 0x7fff - ((m[14] >> 16) & 1);\n b = (m[15] >> 16) & 1;\n m[14] &= 0xffff;\n sel25519(t, m, 1 - b);\n }\n for (i = 0; i < 16; i++) {\n o[2 * i] = t[i] & 0xff;\n o[2 * i + 1] = t[i] >> 8;\n }\n}\n\nfunction unpackneg(r, p) {\n var t = gf(), chk = gf(), num = gf(),\n den = gf(), den2 = gf(), den4 = gf(),\n den6 = gf();\n\n set25519(r[2], gf1);\n unpack25519(r[1], p);\n S(num, r[1]);\n M(den, num, D);\n Z(num, num, r[2]);\n A(den, r[2], den);\n\n S(den2, den);\n S(den4, den2);\n M(den6, den4, den2);\n M(t, den6, num);\n M(t, t, den);\n\n pow2523(t, t);\n M(t, t, num);\n M(t, t, den);\n M(t, t, den);\n M(r[0], t, den);\n\n S(chk, r[0]);\n M(chk, chk, den);\n if(neq25519(chk, num)) {\n M(r[0], r[0], I);\n }\n\n S(chk, r[0]);\n M(chk, chk, den);\n if(neq25519(chk, num)) {\n return -1;\n }\n\n if(par25519(r[0]) === (p[31] >> 7)) {\n Z(r[0], gf0, r[0]);\n }\n\n M(r[3], r[0], r[1]);\n return 0;\n}\n\nfunction unpack25519(o, n) {\n var i;\n for(i = 0; i < 16; ++i) {\n o[i] = n[2 * i] + (n[2 * i + 1] << 8);\n }\n o[15] &= 0x7fff;\n}\n\nfunction pow2523(o, i) {\n var c = gf();\n var a;\n for(a = 0; a < 16; ++a) {\n c[a] = i[a];\n }\n for(a = 250; a >= 0; --a) {\n S(c, c);\n if(a !== 1) {\n M(c, c, i);\n }\n }\n for(a = 0; a < 16; ++a) {\n o[a] = c[a];\n }\n}\n\nfunction neq25519(a, b) {\n var c = new NativeBuffer(32);\n var d = new NativeBuffer(32);\n pack25519(c, a);\n pack25519(d, b);\n return crypto_verify_32(c, 0, d, 0);\n}\n\nfunction crypto_verify_32(x, xi, y, yi) {\n return vn(x, xi, y, yi, 32);\n}\n\nfunction vn(x, xi, y, yi, n) {\n var i, d = 0;\n for(i = 0; i < n; ++i) {\n d |= x[xi + i] ^ y[yi + i];\n }\n return (1 & ((d - 1) >>> 8)) - 1;\n}\n\nfunction par25519(a) {\n var d = new NativeBuffer(32);\n pack25519(d, a);\n return d[0] & 1;\n}\n\nfunction scalarmult(p, q, s) {\n var b, i;\n set25519(p[0], gf0);\n set25519(p[1], gf1);\n set25519(p[2], gf1);\n set25519(p[3], gf0);\n for(i = 255; i >= 0; --i) {\n b = (s[(i / 8)|0] >> (i & 7)) & 1;\n cswap(p, q, b);\n add(q, p);\n add(p, p);\n cswap(p, q, b);\n }\n}\n\nfunction scalarbase(p, s) {\n var q = [gf(), gf(), gf(), gf()];\n set25519(q[0], X);\n set25519(q[1], Y);\n set25519(q[2], gf1);\n M(q[3], X, Y);\n scalarmult(p, q, s);\n}\n\nfunction set25519(r, a) {\n var i;\n for(i = 0; i < 16; i++) {\n r[i] = a[i] | 0;\n }\n}\n\nfunction inv25519(o, i) {\n var c = gf();\n var a;\n for(a = 0; a < 16; ++a) {\n c[a] = i[a];\n }\n for(a = 253; a >= 0; --a) {\n S(c, c);\n if(a !== 2 && a !== 4) {\n M(c, c, i);\n }\n }\n for(a = 0; a < 16; ++a) {\n o[a] = c[a];\n }\n}\n\nfunction car25519(o) {\n var i, v, c = 1;\n for(i = 0; i < 16; ++i) {\n v = o[i] + c + 65535;\n c = Math.floor(v / 65536);\n o[i] = v - c * 65536;\n }\n o[0] += c - 1 + 37 * (c - 1);\n}\n\nfunction sel25519(p, q, b) {\n var t, c = ~(b - 1);\n for(var i = 0; i < 16; ++i) {\n t = c & (p[i] ^ q[i]);\n p[i] ^= t;\n q[i] ^= t;\n }\n}\n\nfunction gf(init) {\n var i, r = new Float64Array(16);\n if(init) {\n for(i = 0; i < init.length; ++i) {\n r[i] = init[i];\n }\n }\n return r;\n}\n\nfunction A(o, a, b) {\n for(var i = 0; i < 16; ++i) {\n o[i] = a[i] + b[i];\n }\n}\n\nfunction Z(o, a, b) {\n for(var i = 0; i < 16; ++i) {\n o[i] = a[i] - b[i];\n }\n}\n\nfunction S(o, a) {\n M(o, a, a);\n}\n\nfunction M(o, a, b) {\n var v, c,\n t0 = 0, t1 = 0, t2 = 0, t3 = 0, t4 = 0, t5 = 0, t6 = 0, t7 = 0,\n t8 = 0, t9 = 0, t10 = 0, t11 = 0, t12 = 0, t13 = 0, t14 = 0, t15 = 0,\n t16 = 0, t17 = 0, t18 = 0, t19 = 0, t20 = 0, t21 = 0, t22 = 0, t23 = 0,\n t24 = 0, t25 = 0, t26 = 0, t27 = 0, t28 = 0, t29 = 0, t30 = 0,\n b0 = b[0],\n b1 = b[1],\n b2 = b[2],\n b3 = b[3],\n b4 = b[4],\n b5 = b[5],\n b6 = b[6],\n b7 = b[7],\n b8 = b[8],\n b9 = b[9],\n b10 = b[10],\n b11 = b[11],\n b12 = b[12],\n b13 = b[13],\n b14 = b[14],\n b15 = b[15];\n\n v = a[0];\n t0 += v * b0;\n t1 += v * b1;\n t2 += v * b2;\n t3 += v * b3;\n t4 += v * b4;\n t5 += v * b5;\n t6 += v * b6;\n t7 += v * b7;\n t8 += v * b8;\n t9 += v * b9;\n t10 += v * b10;\n t11 += v * b11;\n t12 += v * b12;\n t13 += v * b13;\n t14 += v * b14;\n t15 += v * b15;\n v = a[1];\n t1 += v * b0;\n t2 += v * b1;\n t3 += v * b2;\n t4 += v * b3;\n t5 += v * b4;\n t6 += v * b5;\n t7 += v * b6;\n t8 += v * b7;\n t9 += v * b8;\n t10 += v * b9;\n t11 += v * b10;\n t12 += v * b11;\n t13 += v * b12;\n t14 += v * b13;\n t15 += v * b14;\n t16 += v * b15;\n v = a[2];\n t2 += v * b0;\n t3 += v * b1;\n t4 += v * b2;\n t5 += v * b3;\n t6 += v * b4;\n t7 += v * b5;\n t8 += v * b6;\n t9 += v * b7;\n t10 += v * b8;\n t11 += v * b9;\n t12 += v * b10;\n t13 += v * b11;\n t14 += v * b12;\n t15 += v * b13;\n t16 += v * b14;\n t17 += v * b15;\n v = a[3];\n t3 += v * b0;\n t4 += v * b1;\n t5 += v * b2;\n t6 += v * b3;\n t7 += v * b4;\n t8 += v * b5;\n t9 += v * b6;\n t10 += v * b7;\n t11 += v * b8;\n t12 += v * b9;\n t13 += v * b10;\n t14 += v * b11;\n t15 += v * b12;\n t16 += v * b13;\n t17 += v * b14;\n t18 += v * b15;\n v = a[4];\n t4 += v * b0;\n t5 += v * b1;\n t6 += v * b2;\n t7 += v * b3;\n t8 += v * b4;\n t9 += v * b5;\n t10 += v * b6;\n t11 += v * b7;\n t12 += v * b8;\n t13 += v * b9;\n t14 += v * b10;\n t15 += v * b11;\n t16 += v * b12;\n t17 += v * b13;\n t18 += v * b14;\n t19 += v * b15;\n v = a[5];\n t5 += v * b0;\n t6 += v * b1;\n t7 += v * b2;\n t8 += v * b3;\n t9 += v * b4;\n t10 += v * b5;\n t11 += v * b6;\n t12 += v * b7;\n t13 += v * b8;\n t14 += v * b9;\n t15 += v * b10;\n t16 += v * b11;\n t17 += v * b12;\n t18 += v * b13;\n t19 += v * b14;\n t20 += v * b15;\n v = a[6];\n t6 += v * b0;\n t7 += v * b1;\n t8 += v * b2;\n t9 += v * b3;\n t10 += v * b4;\n t11 += v * b5;\n t12 += v * b6;\n t13 += v * b7;\n t14 += v * b8;\n t15 += v * b9;\n t16 += v * b10;\n t17 += v * b11;\n t18 += v * b12;\n t19 += v * b13;\n t20 += v * b14;\n t21 += v * b15;\n v = a[7];\n t7 += v * b0;\n t8 += v * b1;\n t9 += v * b2;\n t10 += v * b3;\n t11 += v * b4;\n t12 += v * b5;\n t13 += v * b6;\n t14 += v * b7;\n t15 += v * b8;\n t16 += v * b9;\n t17 += v * b10;\n t18 += v * b11;\n t19 += v * b12;\n t20 += v * b13;\n t21 += v * b14;\n t22 += v * b15;\n v = a[8];\n t8 += v * b0;\n t9 += v * b1;\n t10 += v * b2;\n t11 += v * b3;\n t12 += v * b4;\n t13 += v * b5;\n t14 += v * b6;\n t15 += v * b7;\n t16 += v * b8;\n t17 += v * b9;\n t18 += v * b10;\n t19 += v * b11;\n t20 += v * b12;\n t21 += v * b13;\n t22 += v * b14;\n t23 += v * b15;\n v = a[9];\n t9 += v * b0;\n t10 += v * b1;\n t11 += v * b2;\n t12 += v * b3;\n t13 += v * b4;\n t14 += v * b5;\n t15 += v * b6;\n t16 += v * b7;\n t17 += v * b8;\n t18 += v * b9;\n t19 += v * b10;\n t20 += v * b11;\n t21 += v * b12;\n t22 += v * b13;\n t23 += v * b14;\n t24 += v * b15;\n v = a[10];\n t10 += v * b0;\n t11 += v * b1;\n t12 += v * b2;\n t13 += v * b3;\n t14 += v * b4;\n t15 += v * b5;\n t16 += v * b6;\n t17 += v * b7;\n t18 += v * b8;\n t19 += v * b9;\n t20 += v * b10;\n t21 += v * b11;\n t22 += v * b12;\n t23 += v * b13;\n t24 += v * b14;\n t25 += v * b15;\n v = a[11];\n t11 += v * b0;\n t12 += v * b1;\n t13 += v * b2;\n t14 += v * b3;\n t15 += v * b4;\n t16 += v * b5;\n t17 += v * b6;\n t18 += v * b7;\n t19 += v * b8;\n t20 += v * b9;\n t21 += v * b10;\n t22 += v * b11;\n t23 += v * b12;\n t24 += v * b13;\n t25 += v * b14;\n t26 += v * b15;\n v = a[12];\n t12 += v * b0;\n t13 += v * b1;\n t14 += v * b2;\n t15 += v * b3;\n t16 += v * b4;\n t17 += v * b5;\n t18 += v * b6;\n t19 += v * b7;\n t20 += v * b8;\n t21 += v * b9;\n t22 += v * b10;\n t23 += v * b11;\n t24 += v * b12;\n t25 += v * b13;\n t26 += v * b14;\n t27 += v * b15;\n v = a[13];\n t13 += v * b0;\n t14 += v * b1;\n t15 += v * b2;\n t16 += v * b3;\n t17 += v * b4;\n t18 += v * b5;\n t19 += v * b6;\n t20 += v * b7;\n t21 += v * b8;\n t22 += v * b9;\n t23 += v * b10;\n t24 += v * b11;\n t25 += v * b12;\n t26 += v * b13;\n t27 += v * b14;\n t28 += v * b15;\n v = a[14];\n t14 += v * b0;\n t15 += v * b1;\n t16 += v * b2;\n t17 += v * b3;\n t18 += v * b4;\n t19 += v * b5;\n t20 += v * b6;\n t21 += v * b7;\n t22 += v * b8;\n t23 += v * b9;\n t24 += v * b10;\n t25 += v * b11;\n t26 += v * b12;\n t27 += v * b13;\n t28 += v * b14;\n t29 += v * b15;\n v = a[15];\n t15 += v * b0;\n t16 += v * b1;\n t17 += v * b2;\n t18 += v * b3;\n t19 += v * b4;\n t20 += v * b5;\n t21 += v * b6;\n t22 += v * b7;\n t23 += v * b8;\n t24 += v * b9;\n t25 += v * b10;\n t26 += v * b11;\n t27 += v * b12;\n t28 += v * b13;\n t29 += v * b14;\n t30 += v * b15;\n\n t0 += 38 * t16;\n t1 += 38 * t17;\n t2 += 38 * t18;\n t3 += 38 * t19;\n t4 += 38 * t20;\n t5 += 38 * t21;\n t6 += 38 * t22;\n t7 += 38 * t23;\n t8 += 38 * t24;\n t9 += 38 * t25;\n t10 += 38 * t26;\n t11 += 38 * t27;\n t12 += 38 * t28;\n t13 += 38 * t29;\n t14 += 38 * t30;\n // t15 left as is\n\n // first car\n c = 1;\n v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536;\n v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536;\n v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536;\n v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536;\n v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536;\n v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536;\n v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536;\n v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536;\n v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536;\n v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536;\n v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536;\n v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536;\n v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536;\n v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536;\n v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536;\n v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536;\n t0 += c-1 + 37 * (c-1);\n\n // second car\n c = 1;\n v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536;\n v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536;\n v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536;\n v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536;\n v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536;\n v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536;\n v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536;\n v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536;\n v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536;\n v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536;\n v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536;\n v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536;\n v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536;\n v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536;\n v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536;\n v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536;\n t0 += c-1 + 37 * (c-1);\n\n o[ 0] = t0;\n o[ 1] = t1;\n o[ 2] = t2;\n o[ 3] = t3;\n o[ 4] = t4;\n o[ 5] = t5;\n o[ 6] = t6;\n o[ 7] = t7;\n o[ 8] = t8;\n o[ 9] = t9;\n o[10] = t10;\n o[11] = t11;\n o[12] = t12;\n o[13] = t13;\n o[14] = t14;\n o[15] = t15;\n}\n","/**\n * Node.js module for Forge.\n *\n * @author Dave Longley\n *\n * Copyright 2011-2016 Digital Bazaar, Inc.\n */\nmodule.exports = {\n // default options\n options: {\n usePureJavaScript: false\n }\n};\n","/**\n * Hash-based Message Authentication Code implementation. Requires a message\n * digest object that can be obtained, for example, from forge.md.sha1 or\n * forge.md.md5.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2012 Digital Bazaar, Inc. All rights reserved.\n */\nvar forge = require('./forge');\nrequire('./md');\nrequire('./util');\n\n/* HMAC API */\nvar hmac = module.exports = forge.hmac = forge.hmac || {};\n\n/**\n * Creates an HMAC object that uses the given message digest object.\n *\n * @return an HMAC object.\n */\nhmac.create = function() {\n // the hmac key to use\n var _key = null;\n\n // the message digest to use\n var _md = null;\n\n // the inner padding\n var _ipadding = null;\n\n // the outer padding\n var _opadding = null;\n\n // hmac context\n var ctx = {};\n\n /**\n * Starts or restarts the HMAC with the given key and message digest.\n *\n * @param md the message digest to use, null to reuse the previous one,\n * a string to use builtin 'sha1', 'md5', 'sha256'.\n * @param key the key to use as a string, array of bytes, byte buffer,\n * or null to reuse the previous key.\n */\n ctx.start = function(md, key) {\n if(md !== null) {\n if(typeof md === 'string') {\n // create builtin message digest\n md = md.toLowerCase();\n if(md in forge.md.algorithms) {\n _md = forge.md.algorithms[md].create();\n } else {\n throw new Error('Unknown hash algorithm \"' + md + '\"');\n }\n } else {\n // store message digest\n _md = md;\n }\n }\n\n if(key === null) {\n // reuse previous key\n key = _key;\n } else {\n if(typeof key === 'string') {\n // convert string into byte buffer\n key = forge.util.createBuffer(key);\n } else if(forge.util.isArray(key)) {\n // convert byte array into byte buffer\n var tmp = key;\n key = forge.util.createBuffer();\n for(var i = 0; i < tmp.length; ++i) {\n key.putByte(tmp[i]);\n }\n }\n\n // if key is longer than blocksize, hash it\n var keylen = key.length();\n if(keylen > _md.blockLength) {\n _md.start();\n _md.update(key.bytes());\n key = _md.digest();\n }\n\n // mix key into inner and outer padding\n // ipadding = [0x36 * blocksize] ^ key\n // opadding = [0x5C * blocksize] ^ key\n _ipadding = forge.util.createBuffer();\n _opadding = forge.util.createBuffer();\n keylen = key.length();\n for(var i = 0; i < keylen; ++i) {\n var tmp = key.at(i);\n _ipadding.putByte(0x36 ^ tmp);\n _opadding.putByte(0x5C ^ tmp);\n }\n\n // if key is shorter than blocksize, add additional padding\n if(keylen < _md.blockLength) {\n var tmp = _md.blockLength - keylen;\n for(var i = 0; i < tmp; ++i) {\n _ipadding.putByte(0x36);\n _opadding.putByte(0x5C);\n }\n }\n _key = key;\n _ipadding = _ipadding.bytes();\n _opadding = _opadding.bytes();\n }\n\n // digest is done like so: hash(opadding | hash(ipadding | message))\n\n // prepare to do inner hash\n // hash(ipadding | message)\n _md.start();\n _md.update(_ipadding);\n };\n\n /**\n * Updates the HMAC with the given message bytes.\n *\n * @param bytes the bytes to update with.\n */\n ctx.update = function(bytes) {\n _md.update(bytes);\n };\n\n /**\n * Produces the Message Authentication Code (MAC).\n *\n * @return a byte buffer containing the digest value.\n */\n ctx.getMac = function() {\n // digest is done like so: hash(opadding | hash(ipadding | message))\n // here we do the outer hashing\n var inner = _md.digest().bytes();\n _md.start();\n _md.update(_opadding);\n _md.update(inner);\n return _md.digest();\n };\n // alias for getMac\n ctx.digest = ctx.getMac;\n\n return ctx;\n};\n","/**\n * Node.js module for Forge.\n *\n * @author Dave Longley\n *\n * Copyright 2011-2016 Digital Bazaar, Inc.\n */\nmodule.exports = require('./forge');\nrequire('./aes');\nrequire('./aesCipherSuites');\nrequire('./asn1');\nrequire('./cipher');\nrequire('./des');\nrequire('./ed25519');\nrequire('./hmac');\nrequire('./kem');\nrequire('./log');\nrequire('./md.all');\nrequire('./mgf1');\nrequire('./pbkdf2');\nrequire('./pem');\nrequire('./pkcs1');\nrequire('./pkcs12');\nrequire('./pkcs7');\nrequire('./pki');\nrequire('./prime');\nrequire('./prng');\nrequire('./pss');\nrequire('./random');\nrequire('./rc2');\nrequire('./ssh');\nrequire('./tls');\nrequire('./util');\n","// Copyright (c) 2005 Tom Wu\n// All Rights Reserved.\n// See \"LICENSE\" for details.\n\n// Basic JavaScript BN library - subset useful for RSA encryption.\n\n/*\nLicensing (LICENSE)\n-------------------\n\nThis software is covered under the following copyright:\n*/\n/*\n * Copyright (c) 2003-2005 Tom Wu\n * All Rights Reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining\n * a copy of this software and associated documentation files (the\n * \"Software\"), to deal in the Software without restriction, including\n * without limitation the rights to use, copy, modify, merge, publish,\n * distribute, sublicense, and/or sell copies of the Software, and to\n * permit persons to whom the Software is furnished to do so, subject to\n * the following conditions:\n *\n * The above copyright notice and this permission notice shall be\n * included in all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS-IS\" AND WITHOUT WARRANTY OF ANY KIND,\n * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY\n * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.\n *\n * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,\n * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER\n * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF\n * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT\n * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n *\n * In addition, the following condition applies:\n *\n * All redistributions must retain an intact copy of this copyright notice\n * and disclaimer.\n */\n/*\nAddress all questions regarding this license to:\n\n Tom Wu\n tjw@cs.Stanford.EDU\n*/\nvar forge = require('./forge');\n\nmodule.exports = forge.jsbn = forge.jsbn || {};\n\n// Bits per digit\nvar dbits;\n\n// JavaScript engine analysis\nvar canary = 0xdeadbeefcafe;\nvar j_lm = ((canary&0xffffff)==0xefcafe);\n\n// (public) Constructor\nfunction BigInteger(a,b,c) {\n this.data = [];\n if(a != null)\n if(\"number\" == typeof a) this.fromNumber(a,b,c);\n else if(b == null && \"string\" != typeof a) this.fromString(a,256);\n else this.fromString(a,b);\n}\nforge.jsbn.BigInteger = BigInteger;\n\n// return new, unset BigInteger\nfunction nbi() { return new BigInteger(null); }\n\n// am: Compute w_j += (x*this_i), propagate carries,\n// c is initial carry, returns final carry.\n// c < 3*dvalue, x < 2*dvalue, this_i < dvalue\n// We need to select the fastest one that works in this environment.\n\n// am1: use a single mult and divide to get the high bits,\n// max digit bits should be 26 because\n// max internal value = 2*dvalue^2-2*dvalue (< 2^53)\nfunction am1(i,x,w,j,c,n) {\n while(--n >= 0) {\n var v = x*this.data[i++]+w.data[j]+c;\n c = Math.floor(v/0x4000000);\n w.data[j++] = v&0x3ffffff;\n }\n return c;\n}\n// am2 avoids a big mult-and-extract completely.\n// Max digit bits should be <= 30 because we do bitwise ops\n// on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)\nfunction am2(i,x,w,j,c,n) {\n var xl = x&0x7fff, xh = x>>15;\n while(--n >= 0) {\n var l = this.data[i]&0x7fff;\n var h = this.data[i++]>>15;\n var m = xh*l+h*xl;\n l = xl*l+((m&0x7fff)<<15)+w.data[j]+(c&0x3fffffff);\n c = (l>>>30)+(m>>>15)+xh*h+(c>>>30);\n w.data[j++] = l&0x3fffffff;\n }\n return c;\n}\n// Alternately, set max digit bits to 28 since some\n// browsers slow down when dealing with 32-bit numbers.\nfunction am3(i,x,w,j,c,n) {\n var xl = x&0x3fff, xh = x>>14;\n while(--n >= 0) {\n var l = this.data[i]&0x3fff;\n var h = this.data[i++]>>14;\n var m = xh*l+h*xl;\n l = xl*l+((m&0x3fff)<<14)+w.data[j]+c;\n c = (l>>28)+(m>>14)+xh*h;\n w.data[j++] = l&0xfffffff;\n }\n return c;\n}\n\n// node.js (no browser)\nif(typeof(navigator) === 'undefined')\n{\n BigInteger.prototype.am = am3;\n dbits = 28;\n} else if(j_lm && (navigator.appName == \"Microsoft Internet Explorer\")) {\n BigInteger.prototype.am = am2;\n dbits = 30;\n} else if(j_lm && (navigator.appName != \"Netscape\")) {\n BigInteger.prototype.am = am1;\n dbits = 26;\n} else { // Mozilla/Netscape seems to prefer am3\n BigInteger.prototype.am = am3;\n dbits = 28;\n}\n\nBigInteger.prototype.DB = dbits;\nBigInteger.prototype.DM = ((1<= 0; --i) r.data[i] = this.data[i];\n r.t = this.t;\n r.s = this.s;\n}\n\n// (protected) set from integer value x, -DV <= x < DV\nfunction bnpFromInt(x) {\n this.t = 1;\n this.s = (x<0)?-1:0;\n if(x > 0) this.data[0] = x;\n else if(x < -1) this.data[0] = x+this.DV;\n else this.t = 0;\n}\n\n// return bigint initialized to value\nfunction nbv(i) { var r = nbi(); r.fromInt(i); return r; }\n\n// (protected) set from string and radix\nfunction bnpFromString(s,b) {\n var k;\n if(b == 16) k = 4;\n else if(b == 8) k = 3;\n else if(b == 256) k = 8; // byte array\n else if(b == 2) k = 1;\n else if(b == 32) k = 5;\n else if(b == 4) k = 2;\n else { this.fromRadix(s,b); return; }\n this.t = 0;\n this.s = 0;\n var i = s.length, mi = false, sh = 0;\n while(--i >= 0) {\n var x = (k==8)?s[i]&0xff:intAt(s,i);\n if(x < 0) {\n if(s.charAt(i) == \"-\") mi = true;\n continue;\n }\n mi = false;\n if(sh == 0)\n this.data[this.t++] = x;\n else if(sh+k > this.DB) {\n this.data[this.t-1] |= (x&((1<<(this.DB-sh))-1))<>(this.DB-sh));\n } else\n this.data[this.t-1] |= x<= this.DB) sh -= this.DB;\n }\n if(k == 8 && (s[0]&0x80) != 0) {\n this.s = -1;\n if(sh > 0) this.data[this.t-1] |= ((1<<(this.DB-sh))-1)< 0 && this.data[this.t-1] == c) --this.t;\n}\n\n// (public) return string representation in given radix\nfunction bnToString(b) {\n if(this.s < 0) return \"-\"+this.negate().toString(b);\n var k;\n if(b == 16) k = 4;\n else if(b == 8) k = 3;\n else if(b == 2) k = 1;\n else if(b == 32) k = 5;\n else if(b == 4) k = 2;\n else return this.toRadix(b);\n var km = (1< 0) {\n if(p < this.DB && (d = this.data[i]>>p) > 0) { m = true; r = int2char(d); }\n while(i >= 0) {\n if(p < k) {\n d = (this.data[i]&((1<>(p+=this.DB-k);\n } else {\n d = (this.data[i]>>(p-=k))&km;\n if(p <= 0) { p += this.DB; --i; }\n }\n if(d > 0) m = true;\n if(m) r += int2char(d);\n }\n }\n return m?r:\"0\";\n}\n\n// (public) -this\nfunction bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; }\n\n// (public) |this|\nfunction bnAbs() { return (this.s<0)?this.negate():this; }\n\n// (public) return + if this > a, - if this < a, 0 if equal\nfunction bnCompareTo(a) {\n var r = this.s-a.s;\n if(r != 0) return r;\n var i = this.t;\n r = i-a.t;\n if(r != 0) return (this.s<0)?-r:r;\n while(--i >= 0) if((r=this.data[i]-a.data[i]) != 0) return r;\n return 0;\n}\n\n// returns bit length of the integer x\nfunction nbits(x) {\n var r = 1, t;\n if((t=x>>>16) != 0) { x = t; r += 16; }\n if((t=x>>8) != 0) { x = t; r += 8; }\n if((t=x>>4) != 0) { x = t; r += 4; }\n if((t=x>>2) != 0) { x = t; r += 2; }\n if((t=x>>1) != 0) { x = t; r += 1; }\n return r;\n}\n\n// (public) return the number of bits in \"this\"\nfunction bnBitLength() {\n if(this.t <= 0) return 0;\n return this.DB*(this.t-1)+nbits(this.data[this.t-1]^(this.s&this.DM));\n}\n\n// (protected) r = this << n*DB\nfunction bnpDLShiftTo(n,r) {\n var i;\n for(i = this.t-1; i >= 0; --i) r.data[i+n] = this.data[i];\n for(i = n-1; i >= 0; --i) r.data[i] = 0;\n r.t = this.t+n;\n r.s = this.s;\n}\n\n// (protected) r = this >> n*DB\nfunction bnpDRShiftTo(n,r) {\n for(var i = n; i < this.t; ++i) r.data[i-n] = this.data[i];\n r.t = Math.max(this.t-n,0);\n r.s = this.s;\n}\n\n// (protected) r = this << n\nfunction bnpLShiftTo(n,r) {\n var bs = n%this.DB;\n var cbs = this.DB-bs;\n var bm = (1<= 0; --i) {\n r.data[i+ds+1] = (this.data[i]>>cbs)|c;\n c = (this.data[i]&bm)<= 0; --i) r.data[i] = 0;\n r.data[ds] = c;\n r.t = this.t+ds+1;\n r.s = this.s;\n r.clamp();\n}\n\n// (protected) r = this >> n\nfunction bnpRShiftTo(n,r) {\n r.s = this.s;\n var ds = Math.floor(n/this.DB);\n if(ds >= this.t) { r.t = 0; return; }\n var bs = n%this.DB;\n var cbs = this.DB-bs;\n var bm = (1<>bs;\n for(var i = ds+1; i < this.t; ++i) {\n r.data[i-ds-1] |= (this.data[i]&bm)<>bs;\n }\n if(bs > 0) r.data[this.t-ds-1] |= (this.s&bm)<>= this.DB;\n }\n if(a.t < this.t) {\n c -= a.s;\n while(i < this.t) {\n c += this.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c += this.s;\n } else {\n c += this.s;\n while(i < a.t) {\n c -= a.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c -= a.s;\n }\n r.s = (c<0)?-1:0;\n if(c < -1) r.data[i++] = this.DV+c;\n else if(c > 0) r.data[i++] = c;\n r.t = i;\n r.clamp();\n}\n\n// (protected) r = this * a, r != this,a (HAC 14.12)\n// \"this\" should be the larger one if appropriate.\nfunction bnpMultiplyTo(a,r) {\n var x = this.abs(), y = a.abs();\n var i = x.t;\n r.t = i+y.t;\n while(--i >= 0) r.data[i] = 0;\n for(i = 0; i < y.t; ++i) r.data[i+x.t] = x.am(0,y.data[i],r,i,0,x.t);\n r.s = 0;\n r.clamp();\n if(this.s != a.s) BigInteger.ZERO.subTo(r,r);\n}\n\n// (protected) r = this^2, r != this (HAC 14.16)\nfunction bnpSquareTo(r) {\n var x = this.abs();\n var i = r.t = 2*x.t;\n while(--i >= 0) r.data[i] = 0;\n for(i = 0; i < x.t-1; ++i) {\n var c = x.am(i,x.data[i],r,2*i,0,1);\n if((r.data[i+x.t]+=x.am(i+1,2*x.data[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {\n r.data[i+x.t] -= x.DV;\n r.data[i+x.t+1] = 1;\n }\n }\n if(r.t > 0) r.data[r.t-1] += x.am(i,x.data[i],r,2*i,0,1);\n r.s = 0;\n r.clamp();\n}\n\n// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)\n// r != q, this != m. q or r may be null.\nfunction bnpDivRemTo(m,q,r) {\n var pm = m.abs();\n if(pm.t <= 0) return;\n var pt = this.abs();\n if(pt.t < pm.t) {\n if(q != null) q.fromInt(0);\n if(r != null) this.copyTo(r);\n return;\n }\n if(r == null) r = nbi();\n var y = nbi(), ts = this.s, ms = m.s;\n var nsh = this.DB-nbits(pm.data[pm.t-1]);\t// normalize modulus\n if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); } else { pm.copyTo(y); pt.copyTo(r); }\n var ys = y.t;\n var y0 = y.data[ys-1];\n if(y0 == 0) return;\n var yt = y0*(1<1)?y.data[ys-2]>>this.F2:0);\n var d1 = this.FV/yt, d2 = (1<= 0) {\n r.data[r.t++] = 1;\n r.subTo(t,r);\n }\n BigInteger.ONE.dlShiftTo(ys,t);\n t.subTo(y,y);\t// \"negative\" y so we can replace sub with am later\n while(y.t < ys) y.data[y.t++] = 0;\n while(--j >= 0) {\n // Estimate quotient digit\n var qd = (r.data[--i]==y0)?this.DM:Math.floor(r.data[i]*d1+(r.data[i-1]+e)*d2);\n if((r.data[i]+=y.am(0,qd,r,j,0,ys)) < qd) {\t// Try it out\n y.dlShiftTo(j,t);\n r.subTo(t,r);\n while(r.data[i] < --qd) r.subTo(t,r);\n }\n }\n if(q != null) {\n r.drShiftTo(ys,q);\n if(ts != ms) BigInteger.ZERO.subTo(q,q);\n }\n r.t = ys;\n r.clamp();\n if(nsh > 0) r.rShiftTo(nsh,r);\t// Denormalize remainder\n if(ts < 0) BigInteger.ZERO.subTo(r,r);\n}\n\n// (public) this mod a\nfunction bnMod(a) {\n var r = nbi();\n this.abs().divRemTo(a,null,r);\n if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r);\n return r;\n}\n\n// Modular reduction using \"classic\" algorithm\nfunction Classic(m) { this.m = m; }\nfunction cConvert(x) {\n if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);\n else return x;\n}\nfunction cRevert(x) { return x; }\nfunction cReduce(x) { x.divRemTo(this.m,null,x); }\nfunction cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }\nfunction cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }\n\nClassic.prototype.convert = cConvert;\nClassic.prototype.revert = cRevert;\nClassic.prototype.reduce = cReduce;\nClassic.prototype.mulTo = cMulTo;\nClassic.prototype.sqrTo = cSqrTo;\n\n// (protected) return \"-1/this % 2^DB\"; useful for Mont. reduction\n// justification:\n// xy == 1 (mod m)\n// xy = 1+km\n// xy(2-xy) = (1+km)(1-km)\n// x[y(2-xy)] = 1-k^2m^2\n// x[y(2-xy)] == 1 (mod m^2)\n// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2\n// should reduce x and y(2-xy) by m^2 at each step to keep size bounded.\n// JS multiply \"overflows\" differently from C/C++, so care is needed here.\nfunction bnpInvDigit() {\n if(this.t < 1) return 0;\n var x = this.data[0];\n if((x&1) == 0) return 0;\n var y = x&3;\t\t// y == 1/x mod 2^2\n y = (y*(2-(x&0xf)*y))&0xf;\t// y == 1/x mod 2^4\n y = (y*(2-(x&0xff)*y))&0xff;\t// y == 1/x mod 2^8\n y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff;\t// y == 1/x mod 2^16\n // last step - calculate inverse mod DV directly;\n // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints\n y = (y*(2-x*y%this.DV))%this.DV;\t\t// y == 1/x mod 2^dbits\n // we really want the negative inverse, and -DV < y < DV\n return (y>0)?this.DV-y:-y;\n}\n\n// Montgomery reduction\nfunction Montgomery(m) {\n this.m = m;\n this.mp = m.invDigit();\n this.mpl = this.mp&0x7fff;\n this.mph = this.mp>>15;\n this.um = (1<<(m.DB-15))-1;\n this.mt2 = 2*m.t;\n}\n\n// xR mod m\nfunction montConvert(x) {\n var r = nbi();\n x.abs().dlShiftTo(this.m.t,r);\n r.divRemTo(this.m,null,r);\n if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r);\n return r;\n}\n\n// x/R mod m\nfunction montRevert(x) {\n var r = nbi();\n x.copyTo(r);\n this.reduce(r);\n return r;\n}\n\n// x = x/R mod m (HAC 14.32)\nfunction montReduce(x) {\n while(x.t <= this.mt2)\t// pad x so am has enough room later\n x.data[x.t++] = 0;\n for(var i = 0; i < this.m.t; ++i) {\n // faster way of calculating u0 = x.data[i]*mp mod DV\n var j = x.data[i]&0x7fff;\n var u0 = (j*this.mpl+(((j*this.mph+(x.data[i]>>15)*this.mpl)&this.um)<<15))&x.DM;\n // use am to combine the multiply-shift-add into one call\n j = i+this.m.t;\n x.data[j] += this.m.am(0,u0,x,i,0,this.m.t);\n // propagate carry\n while(x.data[j] >= x.DV) { x.data[j] -= x.DV; x.data[++j]++; }\n }\n x.clamp();\n x.drShiftTo(this.m.t,x);\n if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);\n}\n\n// r = \"x^2/R mod m\"; x != r\nfunction montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }\n\n// r = \"xy/R mod m\"; x,y != r\nfunction montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }\n\nMontgomery.prototype.convert = montConvert;\nMontgomery.prototype.revert = montRevert;\nMontgomery.prototype.reduce = montReduce;\nMontgomery.prototype.mulTo = montMulTo;\nMontgomery.prototype.sqrTo = montSqrTo;\n\n// (protected) true iff this is even\nfunction bnpIsEven() { return ((this.t>0)?(this.data[0]&1):this.s) == 0; }\n\n// (protected) this^e, e < 2^32, doing sqr and mul with \"r\" (HAC 14.79)\nfunction bnpExp(e,z) {\n if(e > 0xffffffff || e < 1) return BigInteger.ONE;\n var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;\n g.copyTo(r);\n while(--i >= 0) {\n z.sqrTo(r,r2);\n if((e&(1< 0) z.mulTo(r2,g,r);\n else { var t = r; r = r2; r2 = t; }\n }\n return z.revert(r);\n}\n\n// (public) this^e % m, 0 <= e < 2^32\nfunction bnModPowInt(e,m) {\n var z;\n if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);\n return this.exp(e,z);\n}\n\n// protected\nBigInteger.prototype.copyTo = bnpCopyTo;\nBigInteger.prototype.fromInt = bnpFromInt;\nBigInteger.prototype.fromString = bnpFromString;\nBigInteger.prototype.clamp = bnpClamp;\nBigInteger.prototype.dlShiftTo = bnpDLShiftTo;\nBigInteger.prototype.drShiftTo = bnpDRShiftTo;\nBigInteger.prototype.lShiftTo = bnpLShiftTo;\nBigInteger.prototype.rShiftTo = bnpRShiftTo;\nBigInteger.prototype.subTo = bnpSubTo;\nBigInteger.prototype.multiplyTo = bnpMultiplyTo;\nBigInteger.prototype.squareTo = bnpSquareTo;\nBigInteger.prototype.divRemTo = bnpDivRemTo;\nBigInteger.prototype.invDigit = bnpInvDigit;\nBigInteger.prototype.isEven = bnpIsEven;\nBigInteger.prototype.exp = bnpExp;\n\n// public\nBigInteger.prototype.toString = bnToString;\nBigInteger.prototype.negate = bnNegate;\nBigInteger.prototype.abs = bnAbs;\nBigInteger.prototype.compareTo = bnCompareTo;\nBigInteger.prototype.bitLength = bnBitLength;\nBigInteger.prototype.mod = bnMod;\nBigInteger.prototype.modPowInt = bnModPowInt;\n\n// \"constants\"\nBigInteger.ZERO = nbv(0);\nBigInteger.ONE = nbv(1);\n\n// jsbn2 lib\n\n//Copyright (c) 2005-2009 Tom Wu\n//All Rights Reserved.\n//See \"LICENSE\" for details (See jsbn.js for LICENSE).\n\n//Extended JavaScript BN functions, required for RSA private ops.\n\n//Version 1.1: new BigInteger(\"0\", 10) returns \"proper\" zero\n\n//(public)\nfunction bnClone() { var r = nbi(); this.copyTo(r); return r; }\n\n//(public) return value as integer\nfunction bnIntValue() {\nif(this.s < 0) {\n if(this.t == 1) return this.data[0]-this.DV;\n else if(this.t == 0) return -1;\n} else if(this.t == 1) return this.data[0];\nelse if(this.t == 0) return 0;\n// assumes 16 < DB < 32\nreturn ((this.data[1]&((1<<(32-this.DB))-1))<>24; }\n\n//(public) return value as short (assumes DB>=16)\nfunction bnShortValue() { return (this.t==0)?this.s:(this.data[0]<<16)>>16; }\n\n//(protected) return x s.t. r^x < DV\nfunction bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); }\n\n//(public) 0 if this == 0, 1 if this > 0\nfunction bnSigNum() {\nif(this.s < 0) return -1;\nelse if(this.t <= 0 || (this.t == 1 && this.data[0] <= 0)) return 0;\nelse return 1;\n}\n\n//(protected) convert to radix string\nfunction bnpToRadix(b) {\nif(b == null) b = 10;\nif(this.signum() == 0 || b < 2 || b > 36) return \"0\";\nvar cs = this.chunkSize(b);\nvar a = Math.pow(b,cs);\nvar d = nbv(a), y = nbi(), z = nbi(), r = \"\";\nthis.divRemTo(d,y,z);\nwhile(y.signum() > 0) {\n r = (a+z.intValue()).toString(b).substr(1) + r;\n y.divRemTo(d,y,z);\n}\nreturn z.intValue().toString(b) + r;\n}\n\n//(protected) convert from radix string\nfunction bnpFromRadix(s,b) {\nthis.fromInt(0);\nif(b == null) b = 10;\nvar cs = this.chunkSize(b);\nvar d = Math.pow(b,cs), mi = false, j = 0, w = 0;\nfor(var i = 0; i < s.length; ++i) {\n var x = intAt(s,i);\n if(x < 0) {\n if(s.charAt(i) == \"-\" && this.signum() == 0) mi = true;\n continue;\n }\n w = b*w+x;\n if(++j >= cs) {\n this.dMultiply(d);\n this.dAddOffset(w,0);\n j = 0;\n w = 0;\n }\n}\nif(j > 0) {\n this.dMultiply(Math.pow(b,j));\n this.dAddOffset(w,0);\n}\nif(mi) BigInteger.ZERO.subTo(this,this);\n}\n\n//(protected) alternate constructor\nfunction bnpFromNumber(a,b,c) {\nif(\"number\" == typeof b) {\n // new BigInteger(int,int,RNG)\n if(a < 2) this.fromInt(1);\n else {\n this.fromNumber(a,c);\n if(!this.testBit(a-1)) // force MSB set\n this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this);\n if(this.isEven()) this.dAddOffset(1,0); // force odd\n while(!this.isProbablePrime(b)) {\n this.dAddOffset(2,0);\n if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this);\n }\n }\n} else {\n // new BigInteger(int,RNG)\n var x = new Array(), t = a&7;\n x.length = (a>>3)+1;\n b.nextBytes(x);\n if(t > 0) x[0] &= ((1< 0) {\n if(p < this.DB && (d = this.data[i]>>p) != (this.s&this.DM)>>p)\n r[k++] = d|(this.s<<(this.DB-p));\n while(i >= 0) {\n if(p < 8) {\n d = (this.data[i]&((1<>(p+=this.DB-8);\n } else {\n d = (this.data[i]>>(p-=8))&0xff;\n if(p <= 0) { p += this.DB; --i; }\n }\n if((d&0x80) != 0) d |= -256;\n if(k == 0 && (this.s&0x80) != (d&0x80)) ++k;\n if(k > 0 || d != this.s) r[k++] = d;\n }\n}\nreturn r;\n}\n\nfunction bnEquals(a) { return(this.compareTo(a)==0); }\nfunction bnMin(a) { return(this.compareTo(a)<0)?this:a; }\nfunction bnMax(a) { return(this.compareTo(a)>0)?this:a; }\n\n//(protected) r = this op a (bitwise)\nfunction bnpBitwiseTo(a,op,r) {\nvar i, f, m = Math.min(a.t,this.t);\nfor(i = 0; i < m; ++i) r.data[i] = op(this.data[i],a.data[i]);\nif(a.t < this.t) {\n f = a.s&this.DM;\n for(i = m; i < this.t; ++i) r.data[i] = op(this.data[i],f);\n r.t = this.t;\n} else {\n f = this.s&this.DM;\n for(i = m; i < a.t; ++i) r.data[i] = op(f,a.data[i]);\n r.t = a.t;\n}\nr.s = op(this.s,a.s);\nr.clamp();\n}\n\n//(public) this & a\nfunction op_and(x,y) { return x&y; }\nfunction bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; }\n\n//(public) this | a\nfunction op_or(x,y) { return x|y; }\nfunction bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; }\n\n//(public) this ^ a\nfunction op_xor(x,y) { return x^y; }\nfunction bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; }\n\n//(public) this & ~a\nfunction op_andnot(x,y) { return x&~y; }\nfunction bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; }\n\n//(public) ~this\nfunction bnNot() {\nvar r = nbi();\nfor(var i = 0; i < this.t; ++i) r.data[i] = this.DM&~this.data[i];\nr.t = this.t;\nr.s = ~this.s;\nreturn r;\n}\n\n//(public) this << n\nfunction bnShiftLeft(n) {\nvar r = nbi();\nif(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r);\nreturn r;\n}\n\n//(public) this >> n\nfunction bnShiftRight(n) {\nvar r = nbi();\nif(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r);\nreturn r;\n}\n\n//return index of lowest 1-bit in x, x < 2^31\nfunction lbit(x) {\nif(x == 0) return -1;\nvar r = 0;\nif((x&0xffff) == 0) { x >>= 16; r += 16; }\nif((x&0xff) == 0) { x >>= 8; r += 8; }\nif((x&0xf) == 0) { x >>= 4; r += 4; }\nif((x&3) == 0) { x >>= 2; r += 2; }\nif((x&1) == 0) ++r;\nreturn r;\n}\n\n//(public) returns index of lowest 1-bit (or -1 if none)\nfunction bnGetLowestSetBit() {\nfor(var i = 0; i < this.t; ++i)\n if(this.data[i] != 0) return i*this.DB+lbit(this.data[i]);\nif(this.s < 0) return this.t*this.DB;\nreturn -1;\n}\n\n//return number of 1 bits in x\nfunction cbit(x) {\nvar r = 0;\nwhile(x != 0) { x &= x-1; ++r; }\nreturn r;\n}\n\n//(public) return number of set bits\nfunction bnBitCount() {\nvar r = 0, x = this.s&this.DM;\nfor(var i = 0; i < this.t; ++i) r += cbit(this.data[i]^x);\nreturn r;\n}\n\n//(public) true iff nth bit is set\nfunction bnTestBit(n) {\nvar j = Math.floor(n/this.DB);\nif(j >= this.t) return(this.s!=0);\nreturn((this.data[j]&(1<<(n%this.DB)))!=0);\n}\n\n//(protected) this op (1<>= this.DB;\n}\nif(a.t < this.t) {\n c += a.s;\n while(i < this.t) {\n c += this.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c += this.s;\n} else {\n c += this.s;\n while(i < a.t) {\n c += a.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c += a.s;\n}\nr.s = (c<0)?-1:0;\nif(c > 0) r.data[i++] = c;\nelse if(c < -1) r.data[i++] = this.DV+c;\nr.t = i;\nr.clamp();\n}\n\n//(public) this + a\nfunction bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; }\n\n//(public) this - a\nfunction bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; }\n\n//(public) this * a\nfunction bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; }\n\n//(public) this / a\nfunction bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; }\n\n//(public) this % a\nfunction bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; }\n\n//(public) [this/a,this%a]\nfunction bnDivideAndRemainder(a) {\nvar q = nbi(), r = nbi();\nthis.divRemTo(a,q,r);\nreturn new Array(q,r);\n}\n\n//(protected) this *= n, this >= 0, 1 < n < DV\nfunction bnpDMultiply(n) {\nthis.data[this.t] = this.am(0,n-1,this,0,0,this.t);\n++this.t;\nthis.clamp();\n}\n\n//(protected) this += n << w words, this >= 0\nfunction bnpDAddOffset(n,w) {\nif(n == 0) return;\nwhile(this.t <= w) this.data[this.t++] = 0;\nthis.data[w] += n;\nwhile(this.data[w] >= this.DV) {\n this.data[w] -= this.DV;\n if(++w >= this.t) this.data[this.t++] = 0;\n ++this.data[w];\n}\n}\n\n//A \"null\" reducer\nfunction NullExp() {}\nfunction nNop(x) { return x; }\nfunction nMulTo(x,y,r) { x.multiplyTo(y,r); }\nfunction nSqrTo(x,r) { x.squareTo(r); }\n\nNullExp.prototype.convert = nNop;\nNullExp.prototype.revert = nNop;\nNullExp.prototype.mulTo = nMulTo;\nNullExp.prototype.sqrTo = nSqrTo;\n\n//(public) this^e\nfunction bnPow(e) { return this.exp(e,new NullExp()); }\n\n//(protected) r = lower n words of \"this * a\", a.t <= n\n//\"this\" should be the larger one if appropriate.\nfunction bnpMultiplyLowerTo(a,n,r) {\nvar i = Math.min(this.t+a.t,n);\nr.s = 0; // assumes a,this >= 0\nr.t = i;\nwhile(i > 0) r.data[--i] = 0;\nvar j;\nfor(j = r.t-this.t; i < j; ++i) r.data[i+this.t] = this.am(0,a.data[i],r,i,0,this.t);\nfor(j = Math.min(a.t,n); i < j; ++i) this.am(0,a.data[i],r,i,0,n-i);\nr.clamp();\n}\n\n//(protected) r = \"this * a\" without lower n words, n > 0\n//\"this\" should be the larger one if appropriate.\nfunction bnpMultiplyUpperTo(a,n,r) {\n--n;\nvar i = r.t = this.t+a.t-n;\nr.s = 0; // assumes a,this >= 0\nwhile(--i >= 0) r.data[i] = 0;\nfor(i = Math.max(n-this.t,0); i < a.t; ++i)\n r.data[this.t+i-n] = this.am(n-i,a.data[i],r,0,0,this.t+i-n);\nr.clamp();\nr.drShiftTo(1,r);\n}\n\n//Barrett modular reduction\nfunction Barrett(m) {\n// setup Barrett\nthis.r2 = nbi();\nthis.q3 = nbi();\nBigInteger.ONE.dlShiftTo(2*m.t,this.r2);\nthis.mu = this.r2.divide(m);\nthis.m = m;\n}\n\nfunction barrettConvert(x) {\nif(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m);\nelse if(x.compareTo(this.m) < 0) return x;\nelse { var r = nbi(); x.copyTo(r); this.reduce(r); return r; }\n}\n\nfunction barrettRevert(x) { return x; }\n\n//x = x mod m (HAC 14.42)\nfunction barrettReduce(x) {\nx.drShiftTo(this.m.t-1,this.r2);\nif(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); }\nthis.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3);\nthis.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2);\nwhile(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1);\nx.subTo(this.r2,x);\nwhile(x.compareTo(this.m) >= 0) x.subTo(this.m,x);\n}\n\n//r = x^2 mod m; x != r\nfunction barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); }\n\n//r = x*y mod m; x,y != r\nfunction barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }\n\nBarrett.prototype.convert = barrettConvert;\nBarrett.prototype.revert = barrettRevert;\nBarrett.prototype.reduce = barrettReduce;\nBarrett.prototype.mulTo = barrettMulTo;\nBarrett.prototype.sqrTo = barrettSqrTo;\n\n//(public) this^e % m (HAC 14.85)\nfunction bnModPow(e,m) {\nvar i = e.bitLength(), k, r = nbv(1), z;\nif(i <= 0) return r;\nelse if(i < 18) k = 1;\nelse if(i < 48) k = 3;\nelse if(i < 144) k = 4;\nelse if(i < 768) k = 5;\nelse k = 6;\nif(i < 8)\n z = new Classic(m);\nelse if(m.isEven())\n z = new Barrett(m);\nelse\n z = new Montgomery(m);\n\n// precomputation\nvar g = new Array(), n = 3, k1 = k-1, km = (1< 1) {\n var g2 = nbi();\n z.sqrTo(g[1],g2);\n while(n <= km) {\n g[n] = nbi();\n z.mulTo(g2,g[n-2],g[n]);\n n += 2;\n }\n}\n\nvar j = e.t-1, w, is1 = true, r2 = nbi(), t;\ni = nbits(e.data[j])-1;\nwhile(j >= 0) {\n if(i >= k1) w = (e.data[j]>>(i-k1))&km;\n else {\n w = (e.data[j]&((1<<(i+1))-1))<<(k1-i);\n if(j > 0) w |= e.data[j-1]>>(this.DB+i-k1);\n }\n\n n = k;\n while((w&1) == 0) { w >>= 1; --n; }\n if((i -= n) < 0) { i += this.DB; --j; }\n if(is1) { // ret == 1, don't bother squaring or multiplying it\n g[w].copyTo(r);\n is1 = false;\n } else {\n while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; }\n if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; }\n z.mulTo(r2,g[w],r);\n }\n\n while(j >= 0 && (e.data[j]&(1< 0) {\n x.rShiftTo(g,x);\n y.rShiftTo(g,y);\n}\nwhile(x.signum() > 0) {\n if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x);\n if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y);\n if(x.compareTo(y) >= 0) {\n x.subTo(y,x);\n x.rShiftTo(1,x);\n } else {\n y.subTo(x,y);\n y.rShiftTo(1,y);\n }\n}\nif(g > 0) y.lShiftTo(g,y);\nreturn y;\n}\n\n//(protected) this % n, n < 2^26\nfunction bnpModInt(n) {\nif(n <= 0) return 0;\nvar d = this.DV%n, r = (this.s<0)?n-1:0;\nif(this.t > 0)\n if(d == 0) r = this.data[0]%n;\n else for(var i = this.t-1; i >= 0; --i) r = (d*r+this.data[i])%n;\nreturn r;\n}\n\n//(public) 1/this % m (HAC 14.61)\nfunction bnModInverse(m) {\nvar ac = m.isEven();\nif((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO;\nvar u = m.clone(), v = this.clone();\nvar a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1);\nwhile(u.signum() != 0) {\n while(u.isEven()) {\n u.rShiftTo(1,u);\n if(ac) {\n if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); }\n a.rShiftTo(1,a);\n } else if(!b.isEven()) b.subTo(m,b);\n b.rShiftTo(1,b);\n }\n while(v.isEven()) {\n v.rShiftTo(1,v);\n if(ac) {\n if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); }\n c.rShiftTo(1,c);\n } else if(!d.isEven()) d.subTo(m,d);\n d.rShiftTo(1,d);\n }\n if(u.compareTo(v) >= 0) {\n u.subTo(v,u);\n if(ac) a.subTo(c,a);\n b.subTo(d,b);\n } else {\n v.subTo(u,v);\n if(ac) c.subTo(a,c);\n d.subTo(b,d);\n }\n}\nif(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO;\nif(d.compareTo(m) >= 0) return d.subtract(m);\nif(d.signum() < 0) d.addTo(m,d); else return d;\nif(d.signum() < 0) return d.add(m); else return d;\n}\n\nvar lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509];\nvar lplim = (1<<26)/lowprimes[lowprimes.length-1];\n\n//(public) test primality with certainty >= 1-.5^t\nfunction bnIsProbablePrime(t) {\nvar i, x = this.abs();\nif(x.t == 1 && x.data[0] <= lowprimes[lowprimes.length-1]) {\n for(i = 0; i < lowprimes.length; ++i)\n if(x.data[0] == lowprimes[i]) return true;\n return false;\n}\nif(x.isEven()) return false;\ni = 1;\nwhile(i < lowprimes.length) {\n var m = lowprimes[i], j = i+1;\n while(j < lowprimes.length && m < lplim) m *= lowprimes[j++];\n m = x.modInt(m);\n while(i < j) if(m%lowprimes[i++] == 0) return false;\n}\nreturn x.millerRabin(t);\n}\n\n//(protected) true if probably prime (HAC 4.24, Miller-Rabin)\nfunction bnpMillerRabin(t) {\nvar n1 = this.subtract(BigInteger.ONE);\nvar k = n1.getLowestSetBit();\nif(k <= 0) return false;\nvar r = n1.shiftRight(k);\nvar prng = bnGetPrng();\nvar a;\nfor(var i = 0; i < t; ++i) {\n // select witness 'a' at random from between 1 and n1\n do {\n a = new BigInteger(this.bitLength(), prng);\n }\n while(a.compareTo(BigInteger.ONE) <= 0 || a.compareTo(n1) >= 0);\n var y = a.modPow(r,this);\n if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) {\n var j = 1;\n while(j++ < k && y.compareTo(n1) != 0) {\n y = y.modPowInt(2,this);\n if(y.compareTo(BigInteger.ONE) == 0) return false;\n }\n if(y.compareTo(n1) != 0) return false;\n }\n}\nreturn true;\n}\n\n// get pseudo random number generator\nfunction bnGetPrng() {\n // create prng with api that matches BigInteger secure random\n return {\n // x is an array to fill with bytes\n nextBytes: function(x) {\n for(var i = 0; i < x.length; ++i) {\n x[i] = Math.floor(Math.random() * 0x0100);\n }\n }\n };\n}\n\n//protected\nBigInteger.prototype.chunkSize = bnpChunkSize;\nBigInteger.prototype.toRadix = bnpToRadix;\nBigInteger.prototype.fromRadix = bnpFromRadix;\nBigInteger.prototype.fromNumber = bnpFromNumber;\nBigInteger.prototype.bitwiseTo = bnpBitwiseTo;\nBigInteger.prototype.changeBit = bnpChangeBit;\nBigInteger.prototype.addTo = bnpAddTo;\nBigInteger.prototype.dMultiply = bnpDMultiply;\nBigInteger.prototype.dAddOffset = bnpDAddOffset;\nBigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo;\nBigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo;\nBigInteger.prototype.modInt = bnpModInt;\nBigInteger.prototype.millerRabin = bnpMillerRabin;\n\n//public\nBigInteger.prototype.clone = bnClone;\nBigInteger.prototype.intValue = bnIntValue;\nBigInteger.prototype.byteValue = bnByteValue;\nBigInteger.prototype.shortValue = bnShortValue;\nBigInteger.prototype.signum = bnSigNum;\nBigInteger.prototype.toByteArray = bnToByteArray;\nBigInteger.prototype.equals = bnEquals;\nBigInteger.prototype.min = bnMin;\nBigInteger.prototype.max = bnMax;\nBigInteger.prototype.and = bnAnd;\nBigInteger.prototype.or = bnOr;\nBigInteger.prototype.xor = bnXor;\nBigInteger.prototype.andNot = bnAndNot;\nBigInteger.prototype.not = bnNot;\nBigInteger.prototype.shiftLeft = bnShiftLeft;\nBigInteger.prototype.shiftRight = bnShiftRight;\nBigInteger.prototype.getLowestSetBit = bnGetLowestSetBit;\nBigInteger.prototype.bitCount = bnBitCount;\nBigInteger.prototype.testBit = bnTestBit;\nBigInteger.prototype.setBit = bnSetBit;\nBigInteger.prototype.clearBit = bnClearBit;\nBigInteger.prototype.flipBit = bnFlipBit;\nBigInteger.prototype.add = bnAdd;\nBigInteger.prototype.subtract = bnSubtract;\nBigInteger.prototype.multiply = bnMultiply;\nBigInteger.prototype.divide = bnDivide;\nBigInteger.prototype.remainder = bnRemainder;\nBigInteger.prototype.divideAndRemainder = bnDivideAndRemainder;\nBigInteger.prototype.modPow = bnModPow;\nBigInteger.prototype.modInverse = bnModInverse;\nBigInteger.prototype.pow = bnPow;\nBigInteger.prototype.gcd = bnGCD;\nBigInteger.prototype.isProbablePrime = bnIsProbablePrime;\n\n//BigInteger interfaces not implemented in jsbn:\n\n//BigInteger(int signum, byte[] magnitude)\n//double doubleValue()\n//float floatValue()\n//int hashCode()\n//long longValue()\n//static BigInteger valueOf(long val)\n","/**\n * Javascript implementation of RSA-KEM.\n *\n * @author Lautaro Cozzani Rodriguez\n * @author Dave Longley\n *\n * Copyright (c) 2014 Lautaro Cozzani \n * Copyright (c) 2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\nrequire('./random');\nrequire('./jsbn');\n\nmodule.exports = forge.kem = forge.kem || {};\n\nvar BigInteger = forge.jsbn.BigInteger;\n\n/**\n * The API for the RSA Key Encapsulation Mechanism (RSA-KEM) from ISO 18033-2.\n */\nforge.kem.rsa = {};\n\n/**\n * Creates an RSA KEM API object for generating a secret asymmetric key.\n *\n * The symmetric key may be generated via a call to 'encrypt', which will\n * produce a ciphertext to be transmitted to the recipient and a key to be\n * kept secret. The ciphertext is a parameter to be passed to 'decrypt' which\n * will produce the same secret key for the recipient to use to decrypt a\n * message that was encrypted with the secret key.\n *\n * @param kdf the KDF API to use (eg: new forge.kem.kdf1()).\n * @param options the options to use.\n * [prng] a custom crypto-secure pseudo-random number generator to use,\n * that must define \"getBytesSync\".\n */\nforge.kem.rsa.create = function(kdf, options) {\n options = options || {};\n var prng = options.prng || forge.random;\n\n var kem = {};\n\n /**\n * Generates a secret key and its encapsulation.\n *\n * @param publicKey the RSA public key to encrypt with.\n * @param keyLength the length, in bytes, of the secret key to generate.\n *\n * @return an object with:\n * encapsulation: the ciphertext for generating the secret key, as a\n * binary-encoded string of bytes.\n * key: the secret key to use for encrypting a message.\n */\n kem.encrypt = function(publicKey, keyLength) {\n // generate a random r where 1 < r < n\n var byteLength = Math.ceil(publicKey.n.bitLength() / 8);\n var r;\n do {\n r = new BigInteger(\n forge.util.bytesToHex(prng.getBytesSync(byteLength)),\n 16).mod(publicKey.n);\n } while(r.compareTo(BigInteger.ONE) <= 0);\n\n // prepend r with zeros\n r = forge.util.hexToBytes(r.toString(16));\n var zeros = byteLength - r.length;\n if(zeros > 0) {\n r = forge.util.fillString(String.fromCharCode(0), zeros) + r;\n }\n\n // encrypt the random\n var encapsulation = publicKey.encrypt(r, 'NONE');\n\n // generate the secret key\n var key = kdf.generate(r, keyLength);\n\n return {encapsulation: encapsulation, key: key};\n };\n\n /**\n * Decrypts an encapsulated secret key.\n *\n * @param privateKey the RSA private key to decrypt with.\n * @param encapsulation the ciphertext for generating the secret key, as\n * a binary-encoded string of bytes.\n * @param keyLength the length, in bytes, of the secret key to generate.\n *\n * @return the secret key as a binary-encoded string of bytes.\n */\n kem.decrypt = function(privateKey, encapsulation, keyLength) {\n // decrypt the encapsulation and generate the secret key\n var r = privateKey.decrypt(encapsulation, 'NONE');\n return kdf.generate(r, keyLength);\n };\n\n return kem;\n};\n\n// TODO: add forge.kem.kdf.create('KDF1', {md: ..., ...}) API?\n\n/**\n * Creates a key derivation API object that implements KDF1 per ISO 18033-2.\n *\n * @param md the hash API to use.\n * @param [digestLength] an optional digest length that must be positive and\n * less than or equal to md.digestLength.\n *\n * @return a KDF1 API object.\n */\nforge.kem.kdf1 = function(md, digestLength) {\n _createKDF(this, md, 0, digestLength || md.digestLength);\n};\n\n/**\n * Creates a key derivation API object that implements KDF2 per ISO 18033-2.\n *\n * @param md the hash API to use.\n * @param [digestLength] an optional digest length that must be positive and\n * less than or equal to md.digestLength.\n *\n * @return a KDF2 API object.\n */\nforge.kem.kdf2 = function(md, digestLength) {\n _createKDF(this, md, 1, digestLength || md.digestLength);\n};\n\n/**\n * Creates a KDF1 or KDF2 API object.\n *\n * @param md the hash API to use.\n * @param counterStart the starting index for the counter.\n * @param digestLength the digest length to use.\n *\n * @return the KDF API object.\n */\nfunction _createKDF(kdf, md, counterStart, digestLength) {\n /**\n * Generate a key of the specified length.\n *\n * @param x the binary-encoded byte string to generate a key from.\n * @param length the number of bytes to generate (the size of the key).\n *\n * @return the key as a binary-encoded string.\n */\n kdf.generate = function(x, length) {\n var key = new forge.util.ByteBuffer();\n\n // run counter from counterStart to ceil(length / Hash.len)\n var k = Math.ceil(length / digestLength) + counterStart;\n\n var c = new forge.util.ByteBuffer();\n for(var i = counterStart; i < k; ++i) {\n // I2OSP(i, 4): convert counter to an octet string of 4 octets\n c.putInt32(i);\n\n // digest 'x' and the counter and add the result to the key\n md.start();\n md.update(x + c.getBytes());\n var hash = md.digest();\n key.putBytes(hash.getBytes(digestLength));\n }\n\n // truncate to the correct key length\n key.truncate(key.length() - length);\n return key.getBytes();\n };\n}\n","/**\n * Cross-browser support for logging in a web application.\n *\n * @author David I. Lehn \n *\n * Copyright (c) 2008-2013 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\n/* LOG API */\nmodule.exports = forge.log = forge.log || {};\n\n/**\n * Application logging system.\n *\n * Each logger level available as it's own function of the form:\n * forge.log.level(category, args...)\n * The category is an arbitrary string, and the args are the same as\n * Firebug's console.log API. By default the call will be output as:\n * 'LEVEL [category] , args[1], ...'\n * This enables proper % formatting via the first argument.\n * Each category is enabled by default but can be enabled or disabled with\n * the setCategoryEnabled() function.\n */\n// list of known levels\nforge.log.levels = [\n 'none', 'error', 'warning', 'info', 'debug', 'verbose', 'max'];\n// info on the levels indexed by name:\n// index: level index\n// name: uppercased display name\nvar sLevelInfo = {};\n// list of loggers\nvar sLoggers = [];\n/**\n * Standard console logger. If no console support is enabled this will\n * remain null. Check before using.\n */\nvar sConsoleLogger = null;\n\n// logger flags\n/**\n * Lock the level at the current value. Used in cases where user config may\n * set the level such that only critical messages are seen but more verbose\n * messages are needed for debugging or other purposes.\n */\nforge.log.LEVEL_LOCKED = (1 << 1);\n/**\n * Always call log function. By default, the logging system will check the\n * message level against logger.level before calling the log function. This\n * flag allows the function to do its own check.\n */\nforge.log.NO_LEVEL_CHECK = (1 << 2);\n/**\n * Perform message interpolation with the passed arguments. \"%\" style\n * fields in log messages will be replaced by arguments as needed. Some\n * loggers, such as Firebug, may do this automatically. The original log\n * message will be available as 'message' and the interpolated version will\n * be available as 'fullMessage'.\n */\nforge.log.INTERPOLATE = (1 << 3);\n\n// setup each log level\nfor(var i = 0; i < forge.log.levels.length; ++i) {\n var level = forge.log.levels[i];\n sLevelInfo[level] = {\n index: i,\n name: level.toUpperCase()\n };\n}\n\n/**\n * Message logger. Will dispatch a message to registered loggers as needed.\n *\n * @param message message object\n */\nforge.log.logMessage = function(message) {\n var messageLevelIndex = sLevelInfo[message.level].index;\n for(var i = 0; i < sLoggers.length; ++i) {\n var logger = sLoggers[i];\n if(logger.flags & forge.log.NO_LEVEL_CHECK) {\n logger.f(message);\n } else {\n // get logger level\n var loggerLevelIndex = sLevelInfo[logger.level].index;\n // check level\n if(messageLevelIndex <= loggerLevelIndex) {\n // message critical enough, call logger\n logger.f(logger, message);\n }\n }\n }\n};\n\n/**\n * Sets the 'standard' key on a message object to:\n * \"LEVEL [category] \" + message\n *\n * @param message a message log object\n */\nforge.log.prepareStandard = function(message) {\n if(!('standard' in message)) {\n message.standard =\n sLevelInfo[message.level].name +\n //' ' + +message.timestamp +\n ' [' + message.category + '] ' +\n message.message;\n }\n};\n\n/**\n * Sets the 'full' key on a message object to the original message\n * interpolated via % formatting with the message arguments.\n *\n * @param message a message log object.\n */\nforge.log.prepareFull = function(message) {\n if(!('full' in message)) {\n // copy args and insert message at the front\n var args = [message.message];\n args = args.concat([] || message['arguments']);\n // format the message\n message.full = forge.util.format.apply(this, args);\n }\n};\n\n/**\n * Applies both preparseStandard() and prepareFull() to a message object and\n * store result in 'standardFull'.\n *\n * @param message a message log object.\n */\nforge.log.prepareStandardFull = function(message) {\n if(!('standardFull' in message)) {\n // FIXME implement 'standardFull' logging\n forge.log.prepareStandard(message);\n message.standardFull = message.standard;\n }\n};\n\n// create log level functions\nif(true) {\n // levels for which we want functions\n var levels = ['error', 'warning', 'info', 'debug', 'verbose'];\n for(var i = 0; i < levels.length; ++i) {\n // wrap in a function to ensure proper level var is passed\n (function(level) {\n // create function for this level\n forge.log[level] = function(category, message/*, args...*/) {\n // convert arguments to real array, remove category and message\n var args = Array.prototype.slice.call(arguments).slice(2);\n // create message object\n // Note: interpolation and standard formatting is done lazily\n var msg = {\n timestamp: new Date(),\n level: level,\n category: category,\n message: message,\n 'arguments': args\n /*standard*/\n /*full*/\n /*fullMessage*/\n };\n // process this message\n forge.log.logMessage(msg);\n };\n })(levels[i]);\n }\n}\n\n/**\n * Creates a new logger with specified custom logging function.\n *\n * The logging function has a signature of:\n * function(logger, message)\n * logger: current logger\n * message: object:\n * level: level id\n * category: category\n * message: string message\n * arguments: Array of extra arguments\n * fullMessage: interpolated message and arguments if INTERPOLATE flag set\n *\n * @param logFunction a logging function which takes a log message object\n * as a parameter.\n *\n * @return a logger object.\n */\nforge.log.makeLogger = function(logFunction) {\n var logger = {\n flags: 0,\n f: logFunction\n };\n forge.log.setLevel(logger, 'none');\n return logger;\n};\n\n/**\n * Sets the current log level on a logger.\n *\n * @param logger the target logger.\n * @param level the new maximum log level as a string.\n *\n * @return true if set, false if not.\n */\nforge.log.setLevel = function(logger, level) {\n var rval = false;\n if(logger && !(logger.flags & forge.log.LEVEL_LOCKED)) {\n for(var i = 0; i < forge.log.levels.length; ++i) {\n var aValidLevel = forge.log.levels[i];\n if(level == aValidLevel) {\n // set level\n logger.level = level;\n rval = true;\n break;\n }\n }\n }\n\n return rval;\n};\n\n/**\n * Locks the log level at its current value.\n *\n * @param logger the target logger.\n * @param lock boolean lock value, default to true.\n */\nforge.log.lock = function(logger, lock) {\n if(typeof lock === 'undefined' || lock) {\n logger.flags |= forge.log.LEVEL_LOCKED;\n } else {\n logger.flags &= ~forge.log.LEVEL_LOCKED;\n }\n};\n\n/**\n * Adds a logger.\n *\n * @param logger the logger object.\n */\nforge.log.addLogger = function(logger) {\n sLoggers.push(logger);\n};\n\n// setup the console logger if possible, else create fake console.log\nif(typeof(console) !== 'undefined' && 'log' in console) {\n var logger;\n if(console.error && console.warn && console.info && console.debug) {\n // looks like Firebug-style logging is available\n // level handlers map\n var levelHandlers = {\n error: console.error,\n warning: console.warn,\n info: console.info,\n debug: console.debug,\n verbose: console.debug\n };\n var f = function(logger, message) {\n forge.log.prepareStandard(message);\n var handler = levelHandlers[message.level];\n // prepend standard message and concat args\n var args = [message.standard];\n args = args.concat(message['arguments'].slice());\n // apply to low-level console function\n handler.apply(console, args);\n };\n logger = forge.log.makeLogger(f);\n } else {\n // only appear to have basic console.log\n var f = function(logger, message) {\n forge.log.prepareStandardFull(message);\n console.log(message.standardFull);\n };\n logger = forge.log.makeLogger(f);\n }\n forge.log.setLevel(logger, 'debug');\n forge.log.addLogger(logger);\n sConsoleLogger = logger;\n} else {\n // define fake console.log to avoid potential script errors on\n // browsers that do not have console logging\n console = {\n log: function() {}\n };\n}\n\n/*\n * Check for logging control query vars in current URL.\n *\n * console.level=\n * Set's the console log level by name. Useful to override defaults and\n * allow more verbose logging before a user config is loaded.\n *\n * console.lock=\n * Lock the console log level at whatever level it is set at. This is run\n * after console.level is processed. Useful to force a level of verbosity\n * that could otherwise be limited by a user config.\n */\nif(sConsoleLogger !== null &&\n typeof window !== 'undefined' && window.location\n) {\n var query = new URL(window.location.href).searchParams;\n if(query.has('console.level')) {\n // set with last value\n forge.log.setLevel(\n sConsoleLogger, query.get('console.level').slice(-1)[0]);\n }\n if(query.has('console.lock')) {\n // set with last value\n var lock = query.get('console.lock').slice(-1)[0];\n if(lock == 'true') {\n forge.log.lock(sConsoleLogger);\n }\n }\n}\n\n// provide public access to console logger\nforge.log.consoleLogger = sConsoleLogger;\n","/**\n * Node.js module for all known Forge message digests.\n *\n * @author Dave Longley\n *\n * Copyright 2011-2017 Digital Bazaar, Inc.\n */\nmodule.exports = require('./md');\n\nrequire('./md5');\nrequire('./sha1');\nrequire('./sha256');\nrequire('./sha512');\n","/**\n * Node.js module for Forge message digests.\n *\n * @author Dave Longley\n *\n * Copyright 2011-2017 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\n\nmodule.exports = forge.md = forge.md || {};\nforge.md.algorithms = forge.md.algorithms || {};\n","/**\n * Message Digest Algorithm 5 with 128-bit digest (MD5) implementation.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./md');\nrequire('./util');\n\nvar md5 = module.exports = forge.md5 = forge.md5 || {};\nforge.md.md5 = forge.md.algorithms.md5 = md5;\n\n/**\n * Creates an MD5 message digest object.\n *\n * @return a message digest object.\n */\nmd5.create = function() {\n // do initialization as necessary\n if(!_initialized) {\n _init();\n }\n\n // MD5 state contains four 32-bit integers\n var _state = null;\n\n // input buffer\n var _input = forge.util.createBuffer();\n\n // used for word storage\n var _w = new Array(16);\n\n // message digest object\n var md = {\n algorithm: 'md5',\n blockLength: 64,\n digestLength: 16,\n // 56-bit length of message so far (does not including padding)\n messageLength: 0,\n // true message length\n fullMessageLength: null,\n // size of message length in bytes\n messageLengthSize: 8\n };\n\n /**\n * Starts the digest.\n *\n * @return this digest object.\n */\n md.start = function() {\n // up to 56-bit message length for convenience\n md.messageLength = 0;\n\n // full message length (set md.messageLength64 for backwards-compatibility)\n md.fullMessageLength = md.messageLength64 = [];\n var int32s = md.messageLengthSize / 4;\n for(var i = 0; i < int32s; ++i) {\n md.fullMessageLength.push(0);\n }\n _input = forge.util.createBuffer();\n _state = {\n h0: 0x67452301,\n h1: 0xEFCDAB89,\n h2: 0x98BADCFE,\n h3: 0x10325476\n };\n return md;\n };\n // start digest automatically for first time\n md.start();\n\n /**\n * Updates the digest with the given message input. The given input can\n * treated as raw input (no encoding will be applied) or an encoding of\n * 'utf8' maybe given to encode the input using UTF-8.\n *\n * @param msg the message input to update with.\n * @param encoding the encoding to use (default: 'raw', other: 'utf8').\n *\n * @return this digest object.\n */\n md.update = function(msg, encoding) {\n if(encoding === 'utf8') {\n msg = forge.util.encodeUtf8(msg);\n }\n\n // update message length\n var len = msg.length;\n md.messageLength += len;\n len = [(len / 0x100000000) >>> 0, len >>> 0];\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n md.fullMessageLength[i] += len[1];\n len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);\n md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;\n len[0] = (len[1] / 0x100000000) >>> 0;\n }\n\n // add bytes to input buffer\n _input.putBytes(msg);\n\n // process bytes\n _update(_state, _w, _input);\n\n // compact input buffer every 2K or if empty\n if(_input.read > 2048 || _input.length() === 0) {\n _input.compact();\n }\n\n return md;\n };\n\n /**\n * Produces the digest.\n *\n * @return a byte buffer containing the digest value.\n */\n md.digest = function() {\n /* Note: Here we copy the remaining bytes in the input buffer and\n add the appropriate MD5 padding. Then we do the final update\n on a copy of the state so that if the user wants to get\n intermediate digests they can do so. */\n\n /* Determine the number of bytes that must be added to the message\n to ensure its length is congruent to 448 mod 512. In other words,\n the data to be digested must be a multiple of 512 bits (or 128 bytes).\n This data includes the message, some padding, and the length of the\n message. Since the length of the message will be encoded as 8 bytes (64\n bits), that means that the last segment of the data must have 56 bytes\n (448 bits) of message and padding. Therefore, the length of the message\n plus the padding must be congruent to 448 mod 512 because\n 512 - 128 = 448.\n\n In order to fill up the message length it must be filled with\n padding that begins with 1 bit followed by all 0 bits. Padding\n must *always* be present, so if the message length is already\n congruent to 448 mod 512, then 512 padding bits must be added. */\n\n var finalBlock = forge.util.createBuffer();\n finalBlock.putBytes(_input.bytes());\n\n // compute remaining size to be digested (include message length size)\n var remaining = (\n md.fullMessageLength[md.fullMessageLength.length - 1] +\n md.messageLengthSize);\n\n // add padding for overflow blockSize - overflow\n // _padding starts with 1 byte with first bit is set (byte value 128), then\n // there may be up to (blockSize - 1) other pad bytes\n var overflow = remaining & (md.blockLength - 1);\n finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));\n\n // serialize message length in bits in little-endian order; since length\n // is stored in bytes we multiply by 8 and add carry\n var bits, carry = 0;\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n bits = md.fullMessageLength[i] * 8 + carry;\n carry = (bits / 0x100000000) >>> 0;\n finalBlock.putInt32Le(bits >>> 0);\n }\n\n var s2 = {\n h0: _state.h0,\n h1: _state.h1,\n h2: _state.h2,\n h3: _state.h3\n };\n _update(s2, _w, finalBlock);\n var rval = forge.util.createBuffer();\n rval.putInt32Le(s2.h0);\n rval.putInt32Le(s2.h1);\n rval.putInt32Le(s2.h2);\n rval.putInt32Le(s2.h3);\n return rval;\n };\n\n return md;\n};\n\n// padding, constant tables for calculating md5\nvar _padding = null;\nvar _g = null;\nvar _r = null;\nvar _k = null;\nvar _initialized = false;\n\n/**\n * Initializes the constant tables.\n */\nfunction _init() {\n // create padding\n _padding = String.fromCharCode(128);\n _padding += forge.util.fillString(String.fromCharCode(0x00), 64);\n\n // g values\n _g = [\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,\n 1, 6, 11, 0, 5, 10, 15, 4, 9, 14, 3, 8, 13, 2, 7, 12,\n 5, 8, 11, 14, 1, 4, 7, 10, 13, 0, 3, 6, 9, 12, 15, 2,\n 0, 7, 14, 5, 12, 3, 10, 1, 8, 15, 6, 13, 4, 11, 2, 9];\n\n // rounds table\n _r = [\n 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22,\n 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20,\n 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23,\n 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21];\n\n // get the result of abs(sin(i + 1)) as a 32-bit integer\n _k = new Array(64);\n for(var i = 0; i < 64; ++i) {\n _k[i] = Math.floor(Math.abs(Math.sin(i + 1)) * 0x100000000);\n }\n\n // now initialized\n _initialized = true;\n}\n\n/**\n * Updates an MD5 state with the given byte buffer.\n *\n * @param s the MD5 state to update.\n * @param w the array to use to store words.\n * @param bytes the byte buffer to update with.\n */\nfunction _update(s, w, bytes) {\n // consume 512 bit (64 byte) chunks\n var t, a, b, c, d, f, r, i;\n var len = bytes.length();\n while(len >= 64) {\n // initialize hash value for this chunk\n a = s.h0;\n b = s.h1;\n c = s.h2;\n d = s.h3;\n\n // round 1\n for(i = 0; i < 16; ++i) {\n w[i] = bytes.getInt32Le();\n f = d ^ (b & (c ^ d));\n t = (a + f + _k[i] + w[i]);\n r = _r[i];\n a = d;\n d = c;\n c = b;\n b += (t << r) | (t >>> (32 - r));\n }\n // round 2\n for(; i < 32; ++i) {\n f = c ^ (d & (b ^ c));\n t = (a + f + _k[i] + w[_g[i]]);\n r = _r[i];\n a = d;\n d = c;\n c = b;\n b += (t << r) | (t >>> (32 - r));\n }\n // round 3\n for(; i < 48; ++i) {\n f = b ^ c ^ d;\n t = (a + f + _k[i] + w[_g[i]]);\n r = _r[i];\n a = d;\n d = c;\n c = b;\n b += (t << r) | (t >>> (32 - r));\n }\n // round 4\n for(; i < 64; ++i) {\n f = c ^ (b | ~d);\n t = (a + f + _k[i] + w[_g[i]]);\n r = _r[i];\n a = d;\n d = c;\n c = b;\n b += (t << r) | (t >>> (32 - r));\n }\n\n // update hash state\n s.h0 = (s.h0 + a) | 0;\n s.h1 = (s.h1 + b) | 0;\n s.h2 = (s.h2 + c) | 0;\n s.h3 = (s.h3 + d) | 0;\n\n len -= 64;\n }\n}\n","/**\n * Node.js module for Forge mask generation functions.\n *\n * @author Stefan Siegl\n *\n * Copyright 2012 Stefan Siegl \n */\nvar forge = require('./forge');\nrequire('./mgf1');\n\nmodule.exports = forge.mgf = forge.mgf || {};\nforge.mgf.mgf1 = forge.mgf1;\n","/**\n * Javascript implementation of mask generation function MGF1.\n *\n * @author Stefan Siegl\n * @author Dave Longley\n *\n * Copyright (c) 2012 Stefan Siegl \n * Copyright (c) 2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\nforge.mgf = forge.mgf || {};\nvar mgf1 = module.exports = forge.mgf.mgf1 = forge.mgf1 = forge.mgf1 || {};\n\n/**\n * Creates a MGF1 mask generation function object.\n *\n * @param md the message digest API to use (eg: forge.md.sha1.create()).\n *\n * @return a mask generation function object.\n */\nmgf1.create = function(md) {\n var mgf = {\n /**\n * Generate mask of specified length.\n *\n * @param {String} seed The seed for mask generation.\n * @param maskLen Number of bytes to generate.\n * @return {String} The generated mask.\n */\n generate: function(seed, maskLen) {\n /* 2. Let T be the empty octet string. */\n var t = new forge.util.ByteBuffer();\n\n /* 3. For counter from 0 to ceil(maskLen / hLen), do the following: */\n var len = Math.ceil(maskLen / md.digestLength);\n for(var i = 0; i < len; i++) {\n /* a. Convert counter to an octet string C of length 4 octets */\n var c = new forge.util.ByteBuffer();\n c.putInt32(i);\n\n /* b. Concatenate the hash of the seed mgfSeed and C to the octet\n * string T: */\n md.start();\n md.update(seed + c.getBytes());\n t.putBuffer(md.digest());\n }\n\n /* Output the leading maskLen octets of T as the octet string mask. */\n t.truncate(t.length() - maskLen);\n return t.getBytes();\n }\n };\n\n return mgf;\n};\n","/**\n * Object IDs for ASN.1.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2013 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\n\nforge.pki = forge.pki || {};\nvar oids = module.exports = forge.pki.oids = forge.oids = forge.oids || {};\n\n// set id to name mapping and name to id mapping\nfunction _IN(id, name) {\n oids[id] = name;\n oids[name] = id;\n}\n// set id to name mapping only\nfunction _I_(id, name) {\n oids[id] = name;\n}\n\n// algorithm OIDs\n_IN('1.2.840.113549.1.1.1', 'rsaEncryption');\n// Note: md2 & md4 not implemented\n//_IN('1.2.840.113549.1.1.2', 'md2WithRSAEncryption');\n//_IN('1.2.840.113549.1.1.3', 'md4WithRSAEncryption');\n_IN('1.2.840.113549.1.1.4', 'md5WithRSAEncryption');\n_IN('1.2.840.113549.1.1.5', 'sha1WithRSAEncryption');\n_IN('1.2.840.113549.1.1.7', 'RSAES-OAEP');\n_IN('1.2.840.113549.1.1.8', 'mgf1');\n_IN('1.2.840.113549.1.1.9', 'pSpecified');\n_IN('1.2.840.113549.1.1.10', 'RSASSA-PSS');\n_IN('1.2.840.113549.1.1.11', 'sha256WithRSAEncryption');\n_IN('1.2.840.113549.1.1.12', 'sha384WithRSAEncryption');\n_IN('1.2.840.113549.1.1.13', 'sha512WithRSAEncryption');\n// Edwards-curve Digital Signature Algorithm (EdDSA) Ed25519\n_IN('1.3.101.112', 'EdDSA25519');\n\n_IN('1.2.840.10040.4.3', 'dsa-with-sha1');\n\n_IN('1.3.14.3.2.7', 'desCBC');\n\n_IN('1.3.14.3.2.26', 'sha1');\n// Deprecated equivalent of sha1WithRSAEncryption\n_IN('1.3.14.3.2.29', 'sha1WithRSASignature');\n_IN('2.16.840.1.101.3.4.2.1', 'sha256');\n_IN('2.16.840.1.101.3.4.2.2', 'sha384');\n_IN('2.16.840.1.101.3.4.2.3', 'sha512');\n_IN('2.16.840.1.101.3.4.2.4', 'sha224');\n_IN('2.16.840.1.101.3.4.2.5', 'sha512-224');\n_IN('2.16.840.1.101.3.4.2.6', 'sha512-256');\n_IN('1.2.840.113549.2.2', 'md2');\n_IN('1.2.840.113549.2.5', 'md5');\n\n// pkcs#7 content types\n_IN('1.2.840.113549.1.7.1', 'data');\n_IN('1.2.840.113549.1.7.2', 'signedData');\n_IN('1.2.840.113549.1.7.3', 'envelopedData');\n_IN('1.2.840.113549.1.7.4', 'signedAndEnvelopedData');\n_IN('1.2.840.113549.1.7.5', 'digestedData');\n_IN('1.2.840.113549.1.7.6', 'encryptedData');\n\n// pkcs#9 oids\n_IN('1.2.840.113549.1.9.1', 'emailAddress');\n_IN('1.2.840.113549.1.9.2', 'unstructuredName');\n_IN('1.2.840.113549.1.9.3', 'contentType');\n_IN('1.2.840.113549.1.9.4', 'messageDigest');\n_IN('1.2.840.113549.1.9.5', 'signingTime');\n_IN('1.2.840.113549.1.9.6', 'counterSignature');\n_IN('1.2.840.113549.1.9.7', 'challengePassword');\n_IN('1.2.840.113549.1.9.8', 'unstructuredAddress');\n_IN('1.2.840.113549.1.9.14', 'extensionRequest');\n\n_IN('1.2.840.113549.1.9.20', 'friendlyName');\n_IN('1.2.840.113549.1.9.21', 'localKeyId');\n_IN('1.2.840.113549.1.9.22.1', 'x509Certificate');\n\n// pkcs#12 safe bags\n_IN('1.2.840.113549.1.12.10.1.1', 'keyBag');\n_IN('1.2.840.113549.1.12.10.1.2', 'pkcs8ShroudedKeyBag');\n_IN('1.2.840.113549.1.12.10.1.3', 'certBag');\n_IN('1.2.840.113549.1.12.10.1.4', 'crlBag');\n_IN('1.2.840.113549.1.12.10.1.5', 'secretBag');\n_IN('1.2.840.113549.1.12.10.1.6', 'safeContentsBag');\n\n// password-based-encryption for pkcs#12\n_IN('1.2.840.113549.1.5.13', 'pkcs5PBES2');\n_IN('1.2.840.113549.1.5.12', 'pkcs5PBKDF2');\n\n_IN('1.2.840.113549.1.12.1.1', 'pbeWithSHAAnd128BitRC4');\n_IN('1.2.840.113549.1.12.1.2', 'pbeWithSHAAnd40BitRC4');\n_IN('1.2.840.113549.1.12.1.3', 'pbeWithSHAAnd3-KeyTripleDES-CBC');\n_IN('1.2.840.113549.1.12.1.4', 'pbeWithSHAAnd2-KeyTripleDES-CBC');\n_IN('1.2.840.113549.1.12.1.5', 'pbeWithSHAAnd128BitRC2-CBC');\n_IN('1.2.840.113549.1.12.1.6', 'pbewithSHAAnd40BitRC2-CBC');\n\n// hmac OIDs\n_IN('1.2.840.113549.2.7', 'hmacWithSHA1');\n_IN('1.2.840.113549.2.8', 'hmacWithSHA224');\n_IN('1.2.840.113549.2.9', 'hmacWithSHA256');\n_IN('1.2.840.113549.2.10', 'hmacWithSHA384');\n_IN('1.2.840.113549.2.11', 'hmacWithSHA512');\n\n// symmetric key algorithm oids\n_IN('1.2.840.113549.3.7', 'des-EDE3-CBC');\n_IN('2.16.840.1.101.3.4.1.2', 'aes128-CBC');\n_IN('2.16.840.1.101.3.4.1.22', 'aes192-CBC');\n_IN('2.16.840.1.101.3.4.1.42', 'aes256-CBC');\n\n// certificate issuer/subject OIDs\n_IN('2.5.4.3', 'commonName');\n_IN('2.5.4.4', 'surname');\n_IN('2.5.4.5', 'serialNumber');\n_IN('2.5.4.6', 'countryName');\n_IN('2.5.4.7', 'localityName');\n_IN('2.5.4.8', 'stateOrProvinceName');\n_IN('2.5.4.9', 'streetAddress');\n_IN('2.5.4.10', 'organizationName');\n_IN('2.5.4.11', 'organizationalUnitName');\n_IN('2.5.4.12', 'title');\n_IN('2.5.4.13', 'description');\n_IN('2.5.4.15', 'businessCategory');\n_IN('2.5.4.17', 'postalCode');\n_IN('2.5.4.42', 'givenName');\n_IN('1.3.6.1.4.1.311.60.2.1.2', 'jurisdictionOfIncorporationStateOrProvinceName');\n_IN('1.3.6.1.4.1.311.60.2.1.3', 'jurisdictionOfIncorporationCountryName');\n\n// X.509 extension OIDs\n_IN('2.16.840.1.113730.1.1', 'nsCertType');\n_IN('2.16.840.1.113730.1.13', 'nsComment'); // deprecated in theory; still widely used\n_I_('2.5.29.1', 'authorityKeyIdentifier'); // deprecated, use .35\n_I_('2.5.29.2', 'keyAttributes'); // obsolete use .37 or .15\n_I_('2.5.29.3', 'certificatePolicies'); // deprecated, use .32\n_I_('2.5.29.4', 'keyUsageRestriction'); // obsolete use .37 or .15\n_I_('2.5.29.5', 'policyMapping'); // deprecated use .33\n_I_('2.5.29.6', 'subtreesConstraint'); // obsolete use .30\n_I_('2.5.29.7', 'subjectAltName'); // deprecated use .17\n_I_('2.5.29.8', 'issuerAltName'); // deprecated use .18\n_I_('2.5.29.9', 'subjectDirectoryAttributes');\n_I_('2.5.29.10', 'basicConstraints'); // deprecated use .19\n_I_('2.5.29.11', 'nameConstraints'); // deprecated use .30\n_I_('2.5.29.12', 'policyConstraints'); // deprecated use .36\n_I_('2.5.29.13', 'basicConstraints'); // deprecated use .19\n_IN('2.5.29.14', 'subjectKeyIdentifier');\n_IN('2.5.29.15', 'keyUsage');\n_I_('2.5.29.16', 'privateKeyUsagePeriod');\n_IN('2.5.29.17', 'subjectAltName');\n_IN('2.5.29.18', 'issuerAltName');\n_IN('2.5.29.19', 'basicConstraints');\n_I_('2.5.29.20', 'cRLNumber');\n_I_('2.5.29.21', 'cRLReason');\n_I_('2.5.29.22', 'expirationDate');\n_I_('2.5.29.23', 'instructionCode');\n_I_('2.5.29.24', 'invalidityDate');\n_I_('2.5.29.25', 'cRLDistributionPoints'); // deprecated use .31\n_I_('2.5.29.26', 'issuingDistributionPoint'); // deprecated use .28\n_I_('2.5.29.27', 'deltaCRLIndicator');\n_I_('2.5.29.28', 'issuingDistributionPoint');\n_I_('2.5.29.29', 'certificateIssuer');\n_I_('2.5.29.30', 'nameConstraints');\n_IN('2.5.29.31', 'cRLDistributionPoints');\n_IN('2.5.29.32', 'certificatePolicies');\n_I_('2.5.29.33', 'policyMappings');\n_I_('2.5.29.34', 'policyConstraints'); // deprecated use .36\n_IN('2.5.29.35', 'authorityKeyIdentifier');\n_I_('2.5.29.36', 'policyConstraints');\n_IN('2.5.29.37', 'extKeyUsage');\n_I_('2.5.29.46', 'freshestCRL');\n_I_('2.5.29.54', 'inhibitAnyPolicy');\n\n// extKeyUsage purposes\n_IN('1.3.6.1.4.1.11129.2.4.2', 'timestampList');\n_IN('1.3.6.1.5.5.7.1.1', 'authorityInfoAccess');\n_IN('1.3.6.1.5.5.7.3.1', 'serverAuth');\n_IN('1.3.6.1.5.5.7.3.2', 'clientAuth');\n_IN('1.3.6.1.5.5.7.3.3', 'codeSigning');\n_IN('1.3.6.1.5.5.7.3.4', 'emailProtection');\n_IN('1.3.6.1.5.5.7.3.8', 'timeStamping');\n","/**\n * Password-based encryption functions.\n *\n * @author Dave Longley\n * @author Stefan Siegl \n *\n * Copyright (c) 2010-2013 Digital Bazaar, Inc.\n * Copyright (c) 2012 Stefan Siegl \n *\n * An EncryptedPrivateKeyInfo:\n *\n * EncryptedPrivateKeyInfo ::= SEQUENCE {\n * encryptionAlgorithm EncryptionAlgorithmIdentifier,\n * encryptedData EncryptedData }\n *\n * EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier\n *\n * EncryptedData ::= OCTET STRING\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./asn1');\nrequire('./des');\nrequire('./md');\nrequire('./oids');\nrequire('./pbkdf2');\nrequire('./pem');\nrequire('./random');\nrequire('./rc2');\nrequire('./rsa');\nrequire('./util');\n\nif(typeof BigInteger === 'undefined') {\n var BigInteger = forge.jsbn.BigInteger;\n}\n\n// shortcut for asn.1 API\nvar asn1 = forge.asn1;\n\n/* Password-based encryption implementation. */\nvar pki = forge.pki = forge.pki || {};\nmodule.exports = pki.pbe = forge.pbe = forge.pbe || {};\nvar oids = pki.oids;\n\n// validator for an EncryptedPrivateKeyInfo structure\n// Note: Currently only works w/algorithm params\nvar encryptedPrivateKeyValidator = {\n name: 'EncryptedPrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedPrivateKeyInfo.encryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encryptionOid'\n }, {\n name: 'AlgorithmIdentifier.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'encryptionParams'\n }]\n }, {\n // encryptedData\n name: 'EncryptedPrivateKeyInfo.encryptedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'encryptedData'\n }]\n};\n\n// validator for a PBES2Algorithms structure\n// Note: Currently only works w/PBKDF2 + AES encryption schemes\nvar PBES2AlgorithmsValidator = {\n name: 'PBES2Algorithms',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.keyDerivationFunc',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.keyDerivationFunc.oid',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'kdfOid'\n }, {\n name: 'PBES2Algorithms.params',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.params.salt',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'kdfSalt'\n }, {\n name: 'PBES2Algorithms.params.iterationCount',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'kdfIterationCount'\n }, {\n name: 'PBES2Algorithms.params.keyLength',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n optional: true,\n capture: 'keyLength'\n }, {\n // prf\n name: 'PBES2Algorithms.params.prf',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n optional: true,\n value: [{\n name: 'PBES2Algorithms.params.prf.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'prfOid'\n }]\n }]\n }]\n }, {\n name: 'PBES2Algorithms.encryptionScheme',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.encryptionScheme.oid',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encOid'\n }, {\n name: 'PBES2Algorithms.encryptionScheme.iv',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'encIv'\n }]\n }]\n};\n\nvar pkcs12PbeParamsValidator = {\n name: 'pkcs-12PbeParams',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'pkcs-12PbeParams.salt',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'salt'\n }, {\n name: 'pkcs-12PbeParams.iterations',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'iterations'\n }]\n};\n\n/**\n * Encrypts a ASN.1 PrivateKeyInfo object, producing an EncryptedPrivateKeyInfo.\n *\n * PBES2Algorithms ALGORITHM-IDENTIFIER ::=\n * { {PBES2-params IDENTIFIED BY id-PBES2}, ...}\n *\n * id-PBES2 OBJECT IDENTIFIER ::= {pkcs-5 13}\n *\n * PBES2-params ::= SEQUENCE {\n * keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}},\n * encryptionScheme AlgorithmIdentifier {{PBES2-Encs}}\n * }\n *\n * PBES2-KDFs ALGORITHM-IDENTIFIER ::=\n * { {PBKDF2-params IDENTIFIED BY id-PBKDF2}, ... }\n *\n * PBES2-Encs ALGORITHM-IDENTIFIER ::= { ... }\n *\n * PBKDF2-params ::= SEQUENCE {\n * salt CHOICE {\n * specified OCTET STRING,\n * otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}}\n * },\n * iterationCount INTEGER (1..MAX),\n * keyLength INTEGER (1..MAX) OPTIONAL,\n * prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1\n * }\n *\n * @param obj the ASN.1 PrivateKeyInfo object.\n * @param password the password to encrypt with.\n * @param options:\n * algorithm the encryption algorithm to use\n * ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'.\n * count the iteration count to use.\n * saltSize the salt size to use.\n * prfAlgorithm the PRF message digest algorithm to use\n * ('sha1', 'sha224', 'sha256', 'sha384', 'sha512')\n *\n * @return the ASN.1 EncryptedPrivateKeyInfo.\n */\npki.encryptPrivateKeyInfo = function(obj, password, options) {\n // set default options\n options = options || {};\n options.saltSize = options.saltSize || 8;\n options.count = options.count || 2048;\n options.algorithm = options.algorithm || 'aes128';\n options.prfAlgorithm = options.prfAlgorithm || 'sha1';\n\n // generate PBE params\n var salt = forge.random.getBytesSync(options.saltSize);\n var count = options.count;\n var countBytes = asn1.integerToDer(count);\n var dkLen;\n var encryptionAlgorithm;\n var encryptedData;\n if(options.algorithm.indexOf('aes') === 0 || options.algorithm === 'des') {\n // do PBES2\n var ivLen, encOid, cipherFn;\n switch(options.algorithm) {\n case 'aes128':\n dkLen = 16;\n ivLen = 16;\n encOid = oids['aes128-CBC'];\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes192':\n dkLen = 24;\n ivLen = 16;\n encOid = oids['aes192-CBC'];\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes256':\n dkLen = 32;\n ivLen = 16;\n encOid = oids['aes256-CBC'];\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'des':\n dkLen = 8;\n ivLen = 8;\n encOid = oids['desCBC'];\n cipherFn = forge.des.createEncryptionCipher;\n break;\n default:\n var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.');\n error.algorithm = options.algorithm;\n throw error;\n }\n\n // get PRF message digest\n var prfAlgorithm = 'hmacWith' + options.prfAlgorithm.toUpperCase();\n var md = prfAlgorithmToMessageDigest(prfAlgorithm);\n\n // encrypt private key using pbe SHA-1 and AES/DES\n var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md);\n var iv = forge.random.getBytesSync(ivLen);\n var cipher = cipherFn(dk);\n cipher.start(iv);\n cipher.update(asn1.toDer(obj));\n cipher.finish();\n encryptedData = cipher.output.getBytes();\n\n // get PBKDF2-params\n var params = createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm);\n\n encryptionAlgorithm = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oids['pkcs5PBES2']).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // keyDerivationFunc\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oids['pkcs5PBKDF2']).getBytes()),\n // PBKDF2-params\n params\n ]),\n // encryptionScheme\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(encOid).getBytes()),\n // iv\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, iv)\n ])\n ])\n ]);\n } else if(options.algorithm === '3des') {\n // Do PKCS12 PBE\n dkLen = 24;\n\n var saltBytes = new forge.util.ByteBuffer(salt);\n var dk = pki.pbe.generatePkcs12Key(password, saltBytes, 1, count, dkLen);\n var iv = pki.pbe.generatePkcs12Key(password, saltBytes, 2, count, dkLen);\n var cipher = forge.des.createEncryptionCipher(dk);\n cipher.start(iv);\n cipher.update(asn1.toDer(obj));\n cipher.finish();\n encryptedData = cipher.output.getBytes();\n\n encryptionAlgorithm = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oids['pbeWithSHAAnd3-KeyTripleDES-CBC']).getBytes()),\n // pkcs-12PbeParams\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // salt\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt),\n // iteration count\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n countBytes.getBytes())\n ])\n ]);\n } else {\n var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.');\n error.algorithm = options.algorithm;\n throw error;\n }\n\n // EncryptedPrivateKeyInfo\n var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // encryptionAlgorithm\n encryptionAlgorithm,\n // encryptedData\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, encryptedData)\n ]);\n return rval;\n};\n\n/**\n * Decrypts a ASN.1 PrivateKeyInfo object.\n *\n * @param obj the ASN.1 EncryptedPrivateKeyInfo object.\n * @param password the password to decrypt with.\n *\n * @return the ASN.1 PrivateKeyInfo on success, null on failure.\n */\npki.decryptPrivateKeyInfo = function(obj, password) {\n var rval = null;\n\n // get PBE params\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, encryptedPrivateKeyValidator, capture, errors)) {\n var error = new Error('Cannot read encrypted private key. ' +\n 'ASN.1 object is not a supported EncryptedPrivateKeyInfo.');\n error.errors = errors;\n throw error;\n }\n\n // get cipher\n var oid = asn1.derToOid(capture.encryptionOid);\n var cipher = pki.pbe.getCipher(oid, capture.encryptionParams, password);\n\n // get encrypted data\n var encrypted = forge.util.createBuffer(capture.encryptedData);\n\n cipher.update(encrypted);\n if(cipher.finish()) {\n rval = asn1.fromDer(cipher.output);\n }\n\n return rval;\n};\n\n/**\n * Converts a EncryptedPrivateKeyInfo to PEM format.\n *\n * @param epki the EncryptedPrivateKeyInfo.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted encrypted private key.\n */\npki.encryptedPrivateKeyToPem = function(epki, maxline) {\n // convert to DER, then PEM-encode\n var msg = {\n type: 'ENCRYPTED PRIVATE KEY',\n body: asn1.toDer(epki).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Converts a PEM-encoded EncryptedPrivateKeyInfo to ASN.1 format. Decryption\n * is not performed.\n *\n * @param pem the EncryptedPrivateKeyInfo in PEM-format.\n *\n * @return the ASN.1 EncryptedPrivateKeyInfo.\n */\npki.encryptedPrivateKeyFromPem = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'ENCRYPTED PRIVATE KEY') {\n var error = new Error('Could not convert encrypted private key from PEM; ' +\n 'PEM header type is \"ENCRYPTED PRIVATE KEY\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert encrypted private key from PEM; ' +\n 'PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n return asn1.fromDer(msg.body);\n};\n\n/**\n * Encrypts an RSA private key. By default, the key will be wrapped in\n * a PrivateKeyInfo and encrypted to produce a PKCS#8 EncryptedPrivateKeyInfo.\n * This is the standard, preferred way to encrypt a private key.\n *\n * To produce a non-standard PEM-encrypted private key that uses encapsulated\n * headers to indicate the encryption algorithm (old-style non-PKCS#8 OpenSSL\n * private key encryption), set the 'legacy' option to true. Note: Using this\n * option will cause the iteration count to be forced to 1.\n *\n * Note: The 'des' algorithm is supported, but it is not considered to be\n * secure because it only uses a single 56-bit key. If possible, it is highly\n * recommended that a different algorithm be used.\n *\n * @param rsaKey the RSA key to encrypt.\n * @param password the password to use.\n * @param options:\n * algorithm: the encryption algorithm to use\n * ('aes128', 'aes192', 'aes256', '3des', 'des').\n * count: the iteration count to use.\n * saltSize: the salt size to use.\n * legacy: output an old non-PKCS#8 PEM-encrypted+encapsulated\n * headers (DEK-Info) private key.\n *\n * @return the PEM-encoded ASN.1 EncryptedPrivateKeyInfo.\n */\npki.encryptRsaPrivateKey = function(rsaKey, password, options) {\n // standard PKCS#8\n options = options || {};\n if(!options.legacy) {\n // encrypt PrivateKeyInfo\n var rval = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(rsaKey));\n rval = pki.encryptPrivateKeyInfo(rval, password, options);\n return pki.encryptedPrivateKeyToPem(rval);\n }\n\n // legacy non-PKCS#8\n var algorithm;\n var iv;\n var dkLen;\n var cipherFn;\n switch(options.algorithm) {\n case 'aes128':\n algorithm = 'AES-128-CBC';\n dkLen = 16;\n iv = forge.random.getBytesSync(16);\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes192':\n algorithm = 'AES-192-CBC';\n dkLen = 24;\n iv = forge.random.getBytesSync(16);\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes256':\n algorithm = 'AES-256-CBC';\n dkLen = 32;\n iv = forge.random.getBytesSync(16);\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case '3des':\n algorithm = 'DES-EDE3-CBC';\n dkLen = 24;\n iv = forge.random.getBytesSync(8);\n cipherFn = forge.des.createEncryptionCipher;\n break;\n case 'des':\n algorithm = 'DES-CBC';\n dkLen = 8;\n iv = forge.random.getBytesSync(8);\n cipherFn = forge.des.createEncryptionCipher;\n break;\n default:\n var error = new Error('Could not encrypt RSA private key; unsupported ' +\n 'encryption algorithm \"' + options.algorithm + '\".');\n error.algorithm = options.algorithm;\n throw error;\n }\n\n // encrypt private key using OpenSSL legacy key derivation\n var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen);\n var cipher = cipherFn(dk);\n cipher.start(iv);\n cipher.update(asn1.toDer(pki.privateKeyToAsn1(rsaKey)));\n cipher.finish();\n\n var msg = {\n type: 'RSA PRIVATE KEY',\n procType: {\n version: '4',\n type: 'ENCRYPTED'\n },\n dekInfo: {\n algorithm: algorithm,\n parameters: forge.util.bytesToHex(iv).toUpperCase()\n },\n body: cipher.output.getBytes()\n };\n return forge.pem.encode(msg);\n};\n\n/**\n * Decrypts an RSA private key.\n *\n * @param pem the PEM-formatted EncryptedPrivateKeyInfo to decrypt.\n * @param password the password to use.\n *\n * @return the RSA key on success, null on failure.\n */\npki.decryptRsaPrivateKey = function(pem, password) {\n var rval = null;\n\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'ENCRYPTED PRIVATE KEY' &&\n msg.type !== 'PRIVATE KEY' &&\n msg.type !== 'RSA PRIVATE KEY') {\n var error = new Error('Could not convert private key from PEM; PEM header type ' +\n 'is not \"ENCRYPTED PRIVATE KEY\", \"PRIVATE KEY\", or \"RSA PRIVATE KEY\".');\n error.headerType = error;\n throw error;\n }\n\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n var dkLen;\n var cipherFn;\n switch(msg.dekInfo.algorithm) {\n case 'DES-CBC':\n dkLen = 8;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n case 'DES-EDE3-CBC':\n dkLen = 24;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n case 'AES-128-CBC':\n dkLen = 16;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'AES-192-CBC':\n dkLen = 24;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'AES-256-CBC':\n dkLen = 32;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'RC2-40-CBC':\n dkLen = 5;\n cipherFn = function(key) {\n return forge.rc2.createDecryptionCipher(key, 40);\n };\n break;\n case 'RC2-64-CBC':\n dkLen = 8;\n cipherFn = function(key) {\n return forge.rc2.createDecryptionCipher(key, 64);\n };\n break;\n case 'RC2-128-CBC':\n dkLen = 16;\n cipherFn = function(key) {\n return forge.rc2.createDecryptionCipher(key, 128);\n };\n break;\n default:\n var error = new Error('Could not decrypt private key; unsupported ' +\n 'encryption algorithm \"' + msg.dekInfo.algorithm + '\".');\n error.algorithm = msg.dekInfo.algorithm;\n throw error;\n }\n\n // use OpenSSL legacy key derivation\n var iv = forge.util.hexToBytes(msg.dekInfo.parameters);\n var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen);\n var cipher = cipherFn(dk);\n cipher.start(iv);\n cipher.update(forge.util.createBuffer(msg.body));\n if(cipher.finish()) {\n rval = cipher.output.getBytes();\n } else {\n return rval;\n }\n } else {\n rval = msg.body;\n }\n\n if(msg.type === 'ENCRYPTED PRIVATE KEY') {\n rval = pki.decryptPrivateKeyInfo(asn1.fromDer(rval), password);\n } else {\n // decryption already performed above\n rval = asn1.fromDer(rval);\n }\n\n if(rval !== null) {\n rval = pki.privateKeyFromAsn1(rval);\n }\n\n return rval;\n};\n\n/**\n * Derives a PKCS#12 key.\n *\n * @param password the password to derive the key material from, null or\n * undefined for none.\n * @param salt the salt, as a ByteBuffer, to use.\n * @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC).\n * @param iter the iteration count.\n * @param n the number of bytes to derive from the password.\n * @param md the message digest to use, defaults to SHA-1.\n *\n * @return a ByteBuffer with the bytes derived from the password.\n */\npki.pbe.generatePkcs12Key = function(password, salt, id, iter, n, md) {\n var j, l;\n\n if(typeof md === 'undefined' || md === null) {\n if(!('sha1' in forge.md)) {\n throw new Error('\"sha1\" hash algorithm unavailable.');\n }\n md = forge.md.sha1.create();\n }\n\n var u = md.digestLength;\n var v = md.blockLength;\n var result = new forge.util.ByteBuffer();\n\n /* Convert password to Unicode byte buffer + trailing 0-byte. */\n var passBuf = new forge.util.ByteBuffer();\n if(password !== null && password !== undefined) {\n for(l = 0; l < password.length; l++) {\n passBuf.putInt16(password.charCodeAt(l));\n }\n passBuf.putInt16(0);\n }\n\n /* Length of salt and password in BYTES. */\n var p = passBuf.length();\n var s = salt.length();\n\n /* 1. Construct a string, D (the \"diversifier\"), by concatenating\n v copies of ID. */\n var D = new forge.util.ByteBuffer();\n D.fillWithByte(id, v);\n\n /* 2. Concatenate copies of the salt together to create a string S of length\n v * ceil(s / v) bytes (the final copy of the salt may be trunacted\n to create S).\n Note that if the salt is the empty string, then so is S. */\n var Slen = v * Math.ceil(s / v);\n var S = new forge.util.ByteBuffer();\n for(l = 0; l < Slen; l++) {\n S.putByte(salt.at(l % s));\n }\n\n /* 3. Concatenate copies of the password together to create a string P of\n length v * ceil(p / v) bytes (the final copy of the password may be\n truncated to create P).\n Note that if the password is the empty string, then so is P. */\n var Plen = v * Math.ceil(p / v);\n var P = new forge.util.ByteBuffer();\n for(l = 0; l < Plen; l++) {\n P.putByte(passBuf.at(l % p));\n }\n\n /* 4. Set I=S||P to be the concatenation of S and P. */\n var I = S;\n I.putBuffer(P);\n\n /* 5. Set c=ceil(n / u). */\n var c = Math.ceil(n / u);\n\n /* 6. For i=1, 2, ..., c, do the following: */\n for(var i = 1; i <= c; i++) {\n /* a) Set Ai=H^r(D||I). (l.e. the rth hash of D||I, H(H(H(...H(D||I)))) */\n var buf = new forge.util.ByteBuffer();\n buf.putBytes(D.bytes());\n buf.putBytes(I.bytes());\n for(var round = 0; round < iter; round++) {\n md.start();\n md.update(buf.getBytes());\n buf = md.digest();\n }\n\n /* b) Concatenate copies of Ai to create a string B of length v bytes (the\n final copy of Ai may be truncated to create B). */\n var B = new forge.util.ByteBuffer();\n for(l = 0; l < v; l++) {\n B.putByte(buf.at(l % u));\n }\n\n /* c) Treating I as a concatenation I0, I1, ..., Ik-1 of v-byte blocks,\n where k=ceil(s / v) + ceil(p / v), modify I by setting\n Ij=(Ij+B+1) mod 2v for each j. */\n var k = Math.ceil(s / v) + Math.ceil(p / v);\n var Inew = new forge.util.ByteBuffer();\n for(j = 0; j < k; j++) {\n var chunk = new forge.util.ByteBuffer(I.getBytes(v));\n var x = 0x1ff;\n for(l = B.length() - 1; l >= 0; l--) {\n x = x >> 8;\n x += B.at(l) + chunk.at(l);\n chunk.setAt(l, x & 0xff);\n }\n Inew.putBuffer(chunk);\n }\n I = Inew;\n\n /* Add Ai to A. */\n result.putBuffer(buf);\n }\n\n result.truncate(result.length() - n);\n return result;\n};\n\n/**\n * Get new Forge cipher object instance.\n *\n * @param oid the OID (in string notation).\n * @param params the ASN.1 params object.\n * @param password the password to decrypt with.\n *\n * @return new cipher object instance.\n */\npki.pbe.getCipher = function(oid, params, password) {\n switch(oid) {\n case pki.oids['pkcs5PBES2']:\n return pki.pbe.getCipherForPBES2(oid, params, password);\n\n case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']:\n case pki.oids['pbewithSHAAnd40BitRC2-CBC']:\n return pki.pbe.getCipherForPKCS12PBE(oid, params, password);\n\n default:\n var error = new Error('Cannot read encrypted PBE data block. Unsupported OID.');\n error.oid = oid;\n error.supportedOids = [\n 'pkcs5PBES2',\n 'pbeWithSHAAnd3-KeyTripleDES-CBC',\n 'pbewithSHAAnd40BitRC2-CBC'\n ];\n throw error;\n }\n};\n\n/**\n * Get new Forge cipher object instance according to PBES2 params block.\n *\n * The returned cipher instance is already started using the IV\n * from PBES2 parameter block.\n *\n * @param oid the PKCS#5 PBKDF2 OID (in string notation).\n * @param params the ASN.1 PBES2-params object.\n * @param password the password to decrypt with.\n *\n * @return new cipher object instance.\n */\npki.pbe.getCipherForPBES2 = function(oid, params, password) {\n // get PBE params\n var capture = {};\n var errors = [];\n if(!asn1.validate(params, PBES2AlgorithmsValidator, capture, errors)) {\n var error = new Error('Cannot read password-based-encryption algorithm ' +\n 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.');\n error.errors = errors;\n throw error;\n }\n\n // check oids\n oid = asn1.derToOid(capture.kdfOid);\n if(oid !== pki.oids['pkcs5PBKDF2']) {\n var error = new Error('Cannot read encrypted private key. ' +\n 'Unsupported key derivation function OID.');\n error.oid = oid;\n error.supportedOids = ['pkcs5PBKDF2'];\n throw error;\n }\n oid = asn1.derToOid(capture.encOid);\n if(oid !== pki.oids['aes128-CBC'] &&\n oid !== pki.oids['aes192-CBC'] &&\n oid !== pki.oids['aes256-CBC'] &&\n oid !== pki.oids['des-EDE3-CBC'] &&\n oid !== pki.oids['desCBC']) {\n var error = new Error('Cannot read encrypted private key. ' +\n 'Unsupported encryption scheme OID.');\n error.oid = oid;\n error.supportedOids = [\n 'aes128-CBC', 'aes192-CBC', 'aes256-CBC', 'des-EDE3-CBC', 'desCBC'];\n throw error;\n }\n\n // set PBE params\n var salt = capture.kdfSalt;\n var count = forge.util.createBuffer(capture.kdfIterationCount);\n count = count.getInt(count.length() << 3);\n var dkLen;\n var cipherFn;\n switch(pki.oids[oid]) {\n case 'aes128-CBC':\n dkLen = 16;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'aes192-CBC':\n dkLen = 24;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'aes256-CBC':\n dkLen = 32;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'des-EDE3-CBC':\n dkLen = 24;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n case 'desCBC':\n dkLen = 8;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n }\n\n // get PRF message digest\n var md = prfOidToMessageDigest(capture.prfOid);\n\n // decrypt private key using pbe with chosen PRF and AES/DES\n var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md);\n var iv = capture.encIv;\n var cipher = cipherFn(dk);\n cipher.start(iv);\n\n return cipher;\n};\n\n/**\n * Get new Forge cipher object instance for PKCS#12 PBE.\n *\n * The returned cipher instance is already started using the key & IV\n * derived from the provided password and PKCS#12 PBE salt.\n *\n * @param oid The PKCS#12 PBE OID (in string notation).\n * @param params The ASN.1 PKCS#12 PBE-params object.\n * @param password The password to decrypt with.\n *\n * @return the new cipher object instance.\n */\npki.pbe.getCipherForPKCS12PBE = function(oid, params, password) {\n // get PBE params\n var capture = {};\n var errors = [];\n if(!asn1.validate(params, pkcs12PbeParamsValidator, capture, errors)) {\n var error = new Error('Cannot read password-based-encryption algorithm ' +\n 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.');\n error.errors = errors;\n throw error;\n }\n\n var salt = forge.util.createBuffer(capture.salt);\n var count = forge.util.createBuffer(capture.iterations);\n count = count.getInt(count.length() << 3);\n\n var dkLen, dIvLen, cipherFn;\n switch(oid) {\n case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']:\n dkLen = 24;\n dIvLen = 8;\n cipherFn = forge.des.startDecrypting;\n break;\n\n case pki.oids['pbewithSHAAnd40BitRC2-CBC']:\n dkLen = 5;\n dIvLen = 8;\n cipherFn = function(key, iv) {\n var cipher = forge.rc2.createDecryptionCipher(key, 40);\n cipher.start(iv, null);\n return cipher;\n };\n break;\n\n default:\n var error = new Error('Cannot read PKCS #12 PBE data block. Unsupported OID.');\n error.oid = oid;\n throw error;\n }\n\n // get PRF message digest\n var md = prfOidToMessageDigest(capture.prfOid);\n var key = pki.pbe.generatePkcs12Key(password, salt, 1, count, dkLen, md);\n md.start();\n var iv = pki.pbe.generatePkcs12Key(password, salt, 2, count, dIvLen, md);\n\n return cipherFn(key, iv);\n};\n\n/**\n * OpenSSL's legacy key derivation function.\n *\n * See: http://www.openssl.org/docs/crypto/EVP_BytesToKey.html\n *\n * @param password the password to derive the key from.\n * @param salt the salt to use, null for none.\n * @param dkLen the number of bytes needed for the derived key.\n * @param [options] the options to use:\n * [md] an optional message digest object to use.\n */\npki.pbe.opensslDeriveBytes = function(password, salt, dkLen, md) {\n if(typeof md === 'undefined' || md === null) {\n if(!('md5' in forge.md)) {\n throw new Error('\"md5\" hash algorithm unavailable.');\n }\n md = forge.md.md5.create();\n }\n if(salt === null) {\n salt = '';\n }\n var digests = [hash(md, password + salt)];\n for(var length = 16, i = 1; length < dkLen; ++i, length += 16) {\n digests.push(hash(md, digests[i - 1] + password + salt));\n }\n return digests.join('').substr(0, dkLen);\n};\n\nfunction hash(md, bytes) {\n return md.start().update(bytes).digest().getBytes();\n}\n\nfunction prfOidToMessageDigest(prfOid) {\n // get PRF algorithm, default to SHA-1\n var prfAlgorithm;\n if(!prfOid) {\n prfAlgorithm = 'hmacWithSHA1';\n } else {\n prfAlgorithm = pki.oids[asn1.derToOid(prfOid)];\n if(!prfAlgorithm) {\n var error = new Error('Unsupported PRF OID.');\n error.oid = prfOid;\n error.supported = [\n 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384',\n 'hmacWithSHA512'];\n throw error;\n }\n }\n return prfAlgorithmToMessageDigest(prfAlgorithm);\n}\n\nfunction prfAlgorithmToMessageDigest(prfAlgorithm) {\n var factory = forge.md;\n switch(prfAlgorithm) {\n case 'hmacWithSHA224':\n factory = forge.md.sha512;\n case 'hmacWithSHA1':\n case 'hmacWithSHA256':\n case 'hmacWithSHA384':\n case 'hmacWithSHA512':\n prfAlgorithm = prfAlgorithm.substr(8).toLowerCase();\n break;\n default:\n var error = new Error('Unsupported PRF algorithm.');\n error.algorithm = prfAlgorithm;\n error.supported = [\n 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384',\n 'hmacWithSHA512'];\n throw error;\n }\n if(!factory || !(prfAlgorithm in factory)) {\n throw new Error('Unknown hash algorithm: ' + prfAlgorithm);\n }\n return factory[prfAlgorithm].create();\n}\n\nfunction createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm) {\n var params = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // salt\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt),\n // iteration count\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n countBytes.getBytes())\n ]);\n // when PRF algorithm is not SHA-1 default, add key length and PRF algorithm\n if(prfAlgorithm !== 'hmacWithSHA1') {\n params.value.push(\n // key length\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(dkLen.toString(16))),\n // AlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids[prfAlgorithm]).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]));\n }\n return params;\n}\n","/**\n * Password-Based Key-Derivation Function #2 implementation.\n *\n * See RFC 2898 for details.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2013 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./hmac');\nrequire('./md');\nrequire('./util');\n\nvar pkcs5 = forge.pkcs5 = forge.pkcs5 || {};\n\nvar crypto;\nif(forge.util.isNodejs && !forge.options.usePureJavaScript) {\n crypto = require('crypto');\n}\n\n/**\n * Derives a key from a password.\n *\n * @param p the password as a binary-encoded string of bytes.\n * @param s the salt as a binary-encoded string of bytes.\n * @param c the iteration count, a positive integer.\n * @param dkLen the intended length, in bytes, of the derived key,\n * (max: 2^32 - 1) * hash length of the PRF.\n * @param [md] the message digest (or algorithm identifier as a string) to use\n * in the PRF, defaults to SHA-1.\n * @param [callback(err, key)] presence triggers asynchronous version, called\n * once the operation completes.\n *\n * @return the derived key, as a binary-encoded string of bytes, for the\n * synchronous version (if no callback is specified).\n */\nmodule.exports = forge.pbkdf2 = pkcs5.pbkdf2 = function(\n p, s, c, dkLen, md, callback) {\n if(typeof md === 'function') {\n callback = md;\n md = null;\n }\n\n // use native implementation if possible and not disabled, note that\n // some node versions only support SHA-1, others allow digest to be changed\n if(forge.util.isNodejs && !forge.options.usePureJavaScript &&\n crypto.pbkdf2 && (md === null || typeof md !== 'object') &&\n (crypto.pbkdf2Sync.length > 4 || (!md || md === 'sha1'))) {\n if(typeof md !== 'string') {\n // default prf to SHA-1\n md = 'sha1';\n }\n p = Buffer.from(p, 'binary');\n s = Buffer.from(s, 'binary');\n if(!callback) {\n if(crypto.pbkdf2Sync.length === 4) {\n return crypto.pbkdf2Sync(p, s, c, dkLen).toString('binary');\n }\n return crypto.pbkdf2Sync(p, s, c, dkLen, md).toString('binary');\n }\n if(crypto.pbkdf2Sync.length === 4) {\n return crypto.pbkdf2(p, s, c, dkLen, function(err, key) {\n if(err) {\n return callback(err);\n }\n callback(null, key.toString('binary'));\n });\n }\n return crypto.pbkdf2(p, s, c, dkLen, md, function(err, key) {\n if(err) {\n return callback(err);\n }\n callback(null, key.toString('binary'));\n });\n }\n\n if(typeof md === 'undefined' || md === null) {\n // default prf to SHA-1\n md = 'sha1';\n }\n if(typeof md === 'string') {\n if(!(md in forge.md.algorithms)) {\n throw new Error('Unknown hash algorithm: ' + md);\n }\n md = forge.md[md].create();\n }\n\n var hLen = md.digestLength;\n\n /* 1. If dkLen > (2^32 - 1) * hLen, output \"derived key too long\" and\n stop. */\n if(dkLen > (0xFFFFFFFF * hLen)) {\n var err = new Error('Derived key is too long.');\n if(callback) {\n return callback(err);\n }\n throw err;\n }\n\n /* 2. Let len be the number of hLen-octet blocks in the derived key,\n rounding up, and let r be the number of octets in the last\n block:\n\n len = CEIL(dkLen / hLen),\n r = dkLen - (len - 1) * hLen. */\n var len = Math.ceil(dkLen / hLen);\n var r = dkLen - (len - 1) * hLen;\n\n /* 3. For each block of the derived key apply the function F defined\n below to the password P, the salt S, the iteration count c, and\n the block index to compute the block:\n\n T_1 = F(P, S, c, 1),\n T_2 = F(P, S, c, 2),\n ...\n T_len = F(P, S, c, len),\n\n where the function F is defined as the exclusive-or sum of the\n first c iterates of the underlying pseudorandom function PRF\n applied to the password P and the concatenation of the salt S\n and the block index i:\n\n F(P, S, c, i) = u_1 XOR u_2 XOR ... XOR u_c\n\n where\n\n u_1 = PRF(P, S || INT(i)),\n u_2 = PRF(P, u_1),\n ...\n u_c = PRF(P, u_{c-1}).\n\n Here, INT(i) is a four-octet encoding of the integer i, most\n significant octet first. */\n var prf = forge.hmac.create();\n prf.start(md, p);\n var dk = '';\n var xor, u_c, u_c1;\n\n // sync version\n if(!callback) {\n for(var i = 1; i <= len; ++i) {\n // PRF(P, S || INT(i)) (first iteration)\n prf.start(null, null);\n prf.update(s);\n prf.update(forge.util.int32ToBytes(i));\n xor = u_c1 = prf.digest().getBytes();\n\n // PRF(P, u_{c-1}) (other iterations)\n for(var j = 2; j <= c; ++j) {\n prf.start(null, null);\n prf.update(u_c1);\n u_c = prf.digest().getBytes();\n // F(p, s, c, i)\n xor = forge.util.xorBytes(xor, u_c, hLen);\n u_c1 = u_c;\n }\n\n /* 4. Concatenate the blocks and extract the first dkLen octets to\n produce a derived key DK:\n\n DK = T_1 || T_2 || ... || T_len<0..r-1> */\n dk += (i < len) ? xor : xor.substr(0, r);\n }\n /* 5. Output the derived key DK. */\n return dk;\n }\n\n // async version\n var i = 1, j;\n function outer() {\n if(i > len) {\n // done\n return callback(null, dk);\n }\n\n // PRF(P, S || INT(i)) (first iteration)\n prf.start(null, null);\n prf.update(s);\n prf.update(forge.util.int32ToBytes(i));\n xor = u_c1 = prf.digest().getBytes();\n\n // PRF(P, u_{c-1}) (other iterations)\n j = 2;\n inner();\n }\n\n function inner() {\n if(j <= c) {\n prf.start(null, null);\n prf.update(u_c1);\n u_c = prf.digest().getBytes();\n // F(p, s, c, i)\n xor = forge.util.xorBytes(xor, u_c, hLen);\n u_c1 = u_c;\n ++j;\n return forge.util.setImmediate(inner);\n }\n\n /* 4. Concatenate the blocks and extract the first dkLen octets to\n produce a derived key DK:\n\n DK = T_1 || T_2 || ... || T_len<0..r-1> */\n dk += (i < len) ? xor : xor.substr(0, r);\n\n ++i;\n outer();\n }\n\n outer();\n};\n","/**\n * Javascript implementation of basic PEM (Privacy Enhanced Mail) algorithms.\n *\n * See: RFC 1421.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2013-2014 Digital Bazaar, Inc.\n *\n * A Forge PEM object has the following fields:\n *\n * type: identifies the type of message (eg: \"RSA PRIVATE KEY\").\n *\n * procType: identifies the type of processing performed on the message,\n * it has two subfields: version and type, eg: 4,ENCRYPTED.\n *\n * contentDomain: identifies the type of content in the message, typically\n * only uses the value: \"RFC822\".\n *\n * dekInfo: identifies the message encryption algorithm and mode and includes\n * any parameters for the algorithm, it has two subfields: algorithm and\n * parameters, eg: DES-CBC,F8143EDE5960C597.\n *\n * headers: contains all other PEM encapsulated headers -- where order is\n * significant (for pairing data like recipient ID + key info).\n *\n * body: the binary-encoded body.\n */\nvar forge = require('./forge');\nrequire('./util');\n\n// shortcut for pem API\nvar pem = module.exports = forge.pem = forge.pem || {};\n\n/**\n * Encodes (serializes) the given PEM object.\n *\n * @param msg the PEM message object to encode.\n * @param options the options to use:\n * maxline the maximum characters per line for the body, (default: 64).\n *\n * @return the PEM-formatted string.\n */\npem.encode = function(msg, options) {\n options = options || {};\n var rval = '-----BEGIN ' + msg.type + '-----\\r\\n';\n\n // encode special headers\n var header;\n if(msg.procType) {\n header = {\n name: 'Proc-Type',\n values: [String(msg.procType.version), msg.procType.type]\n };\n rval += foldHeader(header);\n }\n if(msg.contentDomain) {\n header = {name: 'Content-Domain', values: [msg.contentDomain]};\n rval += foldHeader(header);\n }\n if(msg.dekInfo) {\n header = {name: 'DEK-Info', values: [msg.dekInfo.algorithm]};\n if(msg.dekInfo.parameters) {\n header.values.push(msg.dekInfo.parameters);\n }\n rval += foldHeader(header);\n }\n\n if(msg.headers) {\n // encode all other headers\n for(var i = 0; i < msg.headers.length; ++i) {\n rval += foldHeader(msg.headers[i]);\n }\n }\n\n // terminate header\n if(msg.procType) {\n rval += '\\r\\n';\n }\n\n // add body\n rval += forge.util.encode64(msg.body, options.maxline || 64) + '\\r\\n';\n\n rval += '-----END ' + msg.type + '-----\\r\\n';\n return rval;\n};\n\n/**\n * Decodes (deserializes) all PEM messages found in the given string.\n *\n * @param str the PEM-formatted string to decode.\n *\n * @return the PEM message objects in an array.\n */\npem.decode = function(str) {\n var rval = [];\n\n // split string into PEM messages (be lenient w/EOF on BEGIN line)\n var rMessage = /\\s*-----BEGIN ([A-Z0-9- ]+)-----\\r?\\n?([\\x21-\\x7e\\s]+?(?:\\r?\\n\\r?\\n))?([:A-Za-z0-9+\\/=\\s]+?)-----END \\1-----/g;\n var rHeader = /([\\x21-\\x7e]+):\\s*([\\x21-\\x7e\\s^:]+)/;\n var rCRLF = /\\r?\\n/;\n var match;\n while(true) {\n match = rMessage.exec(str);\n if(!match) {\n break;\n }\n\n // accept \"NEW CERTIFICATE REQUEST\" as \"CERTIFICATE REQUEST\"\n // https://datatracker.ietf.org/doc/html/rfc7468#section-7\n var type = match[1];\n if(type === 'NEW CERTIFICATE REQUEST') {\n type = 'CERTIFICATE REQUEST';\n }\n\n var msg = {\n type: type,\n procType: null,\n contentDomain: null,\n dekInfo: null,\n headers: [],\n body: forge.util.decode64(match[3])\n };\n rval.push(msg);\n\n // no headers\n if(!match[2]) {\n continue;\n }\n\n // parse headers\n var lines = match[2].split(rCRLF);\n var li = 0;\n while(match && li < lines.length) {\n // get line, trim any rhs whitespace\n var line = lines[li].replace(/\\s+$/, '');\n\n // RFC2822 unfold any following folded lines\n for(var nl = li + 1; nl < lines.length; ++nl) {\n var next = lines[nl];\n if(!/\\s/.test(next[0])) {\n break;\n }\n line += next;\n li = nl;\n }\n\n // parse header\n match = line.match(rHeader);\n if(match) {\n var header = {name: match[1], values: []};\n var values = match[2].split(',');\n for(var vi = 0; vi < values.length; ++vi) {\n header.values.push(ltrim(values[vi]));\n }\n\n // Proc-Type must be the first header\n if(!msg.procType) {\n if(header.name !== 'Proc-Type') {\n throw new Error('Invalid PEM formatted message. The first ' +\n 'encapsulated header must be \"Proc-Type\".');\n } else if(header.values.length !== 2) {\n throw new Error('Invalid PEM formatted message. The \"Proc-Type\" ' +\n 'header must have two subfields.');\n }\n msg.procType = {version: values[0], type: values[1]};\n } else if(!msg.contentDomain && header.name === 'Content-Domain') {\n // special-case Content-Domain\n msg.contentDomain = values[0] || '';\n } else if(!msg.dekInfo && header.name === 'DEK-Info') {\n // special-case DEK-Info\n if(header.values.length === 0) {\n throw new Error('Invalid PEM formatted message. The \"DEK-Info\" ' +\n 'header must have at least one subfield.');\n }\n msg.dekInfo = {algorithm: values[0], parameters: values[1] || null};\n } else {\n msg.headers.push(header);\n }\n }\n\n ++li;\n }\n\n if(msg.procType === 'ENCRYPTED' && !msg.dekInfo) {\n throw new Error('Invalid PEM formatted message. The \"DEK-Info\" ' +\n 'header must be present if \"Proc-Type\" is \"ENCRYPTED\".');\n }\n }\n\n if(rval.length === 0) {\n throw new Error('Invalid PEM formatted message.');\n }\n\n return rval;\n};\n\nfunction foldHeader(header) {\n var rval = header.name + ': ';\n\n // ensure values with CRLF are folded\n var values = [];\n var insertSpace = function(match, $1) {\n return ' ' + $1;\n };\n for(var i = 0; i < header.values.length; ++i) {\n values.push(header.values[i].replace(/^(\\S+\\r\\n)/, insertSpace));\n }\n rval += values.join(',') + '\\r\\n';\n\n // do folding\n var length = 0;\n var candidate = -1;\n for(var i = 0; i < rval.length; ++i, ++length) {\n if(length > 65 && candidate !== -1) {\n var insert = rval[candidate];\n if(insert === ',') {\n ++candidate;\n rval = rval.substr(0, candidate) + '\\r\\n ' + rval.substr(candidate);\n } else {\n rval = rval.substr(0, candidate) +\n '\\r\\n' + insert + rval.substr(candidate + 1);\n }\n length = (i - candidate - 1);\n candidate = -1;\n ++i;\n } else if(rval[i] === ' ' || rval[i] === '\\t' || rval[i] === ',') {\n candidate = i;\n }\n }\n\n return rval;\n}\n\nfunction ltrim(str) {\n return str.replace(/^\\s+/, '');\n}\n","/**\n * Partial implementation of PKCS#1 v2.2: RSA-OEAP\n *\n * Modified but based on the following MIT and BSD licensed code:\n *\n * https://github.com/kjur/jsjws/blob/master/rsa.js:\n *\n * The 'jsjws'(JSON Web Signature JavaScript Library) License\n *\n * Copyright (c) 2012 Kenji Urushima\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n *\n * http://webrsa.cvs.sourceforge.net/viewvc/webrsa/Client/RSAES-OAEP.js?content-type=text%2Fplain:\n *\n * RSAES-OAEP.js\n * $Id: RSAES-OAEP.js,v 1.1.1.1 2003/03/19 15:37:20 ellispritchard Exp $\n * JavaScript Implementation of PKCS #1 v2.1 RSA CRYPTOGRAPHY STANDARD (RSA Laboratories, June 14, 2002)\n * Copyright (C) Ellis Pritchard, Guardian Unlimited 2003.\n * Contact: ellis@nukinetics.com\n * Distributed under the BSD License.\n *\n * Official documentation: http://www.rsa.com/rsalabs/node.asp?id=2125\n *\n * @author Evan Jones (http://evanjones.ca/)\n * @author Dave Longley\n *\n * Copyright (c) 2013-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\nrequire('./random');\nrequire('./sha1');\n\n// shortcut for PKCS#1 API\nvar pkcs1 = module.exports = forge.pkcs1 = forge.pkcs1 || {};\n\n/**\n * Encode the given RSAES-OAEP message (M) using key, with optional label (L)\n * and seed.\n *\n * This method does not perform RSA encryption, it only encodes the message\n * using RSAES-OAEP.\n *\n * @param key the RSA key to use.\n * @param message the message to encode.\n * @param options the options to use:\n * label an optional label to use.\n * seed the seed to use.\n * md the message digest object to use, undefined for SHA-1.\n * mgf1 optional mgf1 parameters:\n * md the message digest object to use for MGF1.\n *\n * @return the encoded message bytes.\n */\npkcs1.encode_rsa_oaep = function(key, message, options) {\n // parse arguments\n var label;\n var seed;\n var md;\n var mgf1Md;\n // legacy args (label, seed, md)\n if(typeof options === 'string') {\n label = options;\n seed = arguments[3] || undefined;\n md = arguments[4] || undefined;\n } else if(options) {\n label = options.label || undefined;\n seed = options.seed || undefined;\n md = options.md || undefined;\n if(options.mgf1 && options.mgf1.md) {\n mgf1Md = options.mgf1.md;\n }\n }\n\n // default OAEP to SHA-1 message digest\n if(!md) {\n md = forge.md.sha1.create();\n } else {\n md.start();\n }\n\n // default MGF-1 to same as OAEP\n if(!mgf1Md) {\n mgf1Md = md;\n }\n\n // compute length in bytes and check output\n var keyLength = Math.ceil(key.n.bitLength() / 8);\n var maxLength = keyLength - 2 * md.digestLength - 2;\n if(message.length > maxLength) {\n var error = new Error('RSAES-OAEP input message length is too long.');\n error.length = message.length;\n error.maxLength = maxLength;\n throw error;\n }\n\n if(!label) {\n label = '';\n }\n md.update(label, 'raw');\n var lHash = md.digest();\n\n var PS = '';\n var PS_length = maxLength - message.length;\n for(var i = 0; i < PS_length; i++) {\n PS += '\\x00';\n }\n\n var DB = lHash.getBytes() + PS + '\\x01' + message;\n\n if(!seed) {\n seed = forge.random.getBytes(md.digestLength);\n } else if(seed.length !== md.digestLength) {\n var error = new Error('Invalid RSAES-OAEP seed. The seed length must ' +\n 'match the digest length.');\n error.seedLength = seed.length;\n error.digestLength = md.digestLength;\n throw error;\n }\n\n var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md);\n var maskedDB = forge.util.xorBytes(DB, dbMask, DB.length);\n\n var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md);\n var maskedSeed = forge.util.xorBytes(seed, seedMask, seed.length);\n\n // return encoded message\n return '\\x00' + maskedSeed + maskedDB;\n};\n\n/**\n * Decode the given RSAES-OAEP encoded message (EM) using key, with optional\n * label (L).\n *\n * This method does not perform RSA decryption, it only decodes the message\n * using RSAES-OAEP.\n *\n * @param key the RSA key to use.\n * @param em the encoded message to decode.\n * @param options the options to use:\n * label an optional label to use.\n * md the message digest object to use for OAEP, undefined for SHA-1.\n * mgf1 optional mgf1 parameters:\n * md the message digest object to use for MGF1.\n *\n * @return the decoded message bytes.\n */\npkcs1.decode_rsa_oaep = function(key, em, options) {\n // parse args\n var label;\n var md;\n var mgf1Md;\n // legacy args\n if(typeof options === 'string') {\n label = options;\n md = arguments[3] || undefined;\n } else if(options) {\n label = options.label || undefined;\n md = options.md || undefined;\n if(options.mgf1 && options.mgf1.md) {\n mgf1Md = options.mgf1.md;\n }\n }\n\n // compute length in bytes\n var keyLength = Math.ceil(key.n.bitLength() / 8);\n\n if(em.length !== keyLength) {\n var error = new Error('RSAES-OAEP encoded message length is invalid.');\n error.length = em.length;\n error.expectedLength = keyLength;\n throw error;\n }\n\n // default OAEP to SHA-1 message digest\n if(md === undefined) {\n md = forge.md.sha1.create();\n } else {\n md.start();\n }\n\n // default MGF-1 to same as OAEP\n if(!mgf1Md) {\n mgf1Md = md;\n }\n\n if(keyLength < 2 * md.digestLength + 2) {\n throw new Error('RSAES-OAEP key is too short for the hash function.');\n }\n\n if(!label) {\n label = '';\n }\n md.update(label, 'raw');\n var lHash = md.digest().getBytes();\n\n // split the message into its parts\n var y = em.charAt(0);\n var maskedSeed = em.substring(1, md.digestLength + 1);\n var maskedDB = em.substring(1 + md.digestLength);\n\n var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md);\n var seed = forge.util.xorBytes(maskedSeed, seedMask, maskedSeed.length);\n\n var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md);\n var db = forge.util.xorBytes(maskedDB, dbMask, maskedDB.length);\n\n var lHashPrime = db.substring(0, md.digestLength);\n\n // constant time check that all values match what is expected\n var error = (y !== '\\x00');\n\n // constant time check lHash vs lHashPrime\n for(var i = 0; i < md.digestLength; ++i) {\n error |= (lHash.charAt(i) !== lHashPrime.charAt(i));\n }\n\n // \"constant time\" find the 0x1 byte separating the padding (zeros) from the\n // message\n // TODO: It must be possible to do this in a better/smarter way?\n var in_ps = 1;\n var index = md.digestLength;\n for(var j = md.digestLength; j < db.length; j++) {\n var code = db.charCodeAt(j);\n\n var is_0 = (code & 0x1) ^ 0x1;\n\n // non-zero if not 0 or 1 in the ps section\n var error_mask = in_ps ? 0xfffe : 0x0000;\n error |= (code & error_mask);\n\n // latch in_ps to zero after we find 0x1\n in_ps = in_ps & is_0;\n index += in_ps;\n }\n\n if(error || db.charCodeAt(index) !== 0x1) {\n throw new Error('Invalid RSAES-OAEP padding.');\n }\n\n return db.substring(index + 1);\n};\n\nfunction rsa_mgf1(seed, maskLength, hash) {\n // default to SHA-1 message digest\n if(!hash) {\n hash = forge.md.sha1.create();\n }\n var t = '';\n var count = Math.ceil(maskLength / hash.digestLength);\n for(var i = 0; i < count; ++i) {\n var c = String.fromCharCode(\n (i >> 24) & 0xFF, (i >> 16) & 0xFF, (i >> 8) & 0xFF, i & 0xFF);\n hash.start();\n hash.update(seed + c);\n t += hash.digest().getBytes();\n }\n return t.substring(0, maskLength);\n}\n","/**\n * Javascript implementation of PKCS#12.\n *\n * @author Dave Longley\n * @author Stefan Siegl \n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n * Copyright (c) 2012 Stefan Siegl \n *\n * The ASN.1 representation of PKCS#12 is as follows\n * (see ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-12/pkcs-12-tc1.pdf for details)\n *\n * PFX ::= SEQUENCE {\n * version INTEGER {v3(3)}(v3,...),\n * authSafe ContentInfo,\n * macData MacData OPTIONAL\n * }\n *\n * MacData ::= SEQUENCE {\n * mac DigestInfo,\n * macSalt OCTET STRING,\n * iterations INTEGER DEFAULT 1\n * }\n * Note: The iterations default is for historical reasons and its use is\n * deprecated. A higher value, like 1024, is recommended.\n *\n * DigestInfo is defined in PKCS#7 as follows:\n *\n * DigestInfo ::= SEQUENCE {\n * digestAlgorithm DigestAlgorithmIdentifier,\n * digest Digest\n * }\n *\n * DigestAlgorithmIdentifier ::= AlgorithmIdentifier\n *\n * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters\n * for the algorithm, if any. In the case of SHA1 there is none.\n *\n * AlgorithmIdentifer ::= SEQUENCE {\n * algorithm OBJECT IDENTIFIER,\n * parameters ANY DEFINED BY algorithm OPTIONAL\n * }\n *\n * Digest ::= OCTET STRING\n *\n *\n * ContentInfo ::= SEQUENCE {\n * contentType ContentType,\n * content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL\n * }\n *\n * ContentType ::= OBJECT IDENTIFIER\n *\n * AuthenticatedSafe ::= SEQUENCE OF ContentInfo\n * -- Data if unencrypted\n * -- EncryptedData if password-encrypted\n * -- EnvelopedData if public key-encrypted\n *\n *\n * SafeContents ::= SEQUENCE OF SafeBag\n *\n * SafeBag ::= SEQUENCE {\n * bagId BAG-TYPE.&id ({PKCS12BagSet})\n * bagValue [0] EXPLICIT BAG-TYPE.&Type({PKCS12BagSet}{@bagId}),\n * bagAttributes SET OF PKCS12Attribute OPTIONAL\n * }\n *\n * PKCS12Attribute ::= SEQUENCE {\n * attrId ATTRIBUTE.&id ({PKCS12AttrSet}),\n * attrValues SET OF ATTRIBUTE.&Type ({PKCS12AttrSet}{@attrId})\n * } -- This type is compatible with the X.500 type 'Attribute'\n *\n * PKCS12AttrSet ATTRIBUTE ::= {\n * friendlyName | -- from PKCS #9\n * localKeyId, -- from PKCS #9\n * ... -- Other attributes are allowed\n * }\n *\n * CertBag ::= SEQUENCE {\n * certId BAG-TYPE.&id ({CertTypes}),\n * certValue [0] EXPLICIT BAG-TYPE.&Type ({CertTypes}{@certId})\n * }\n *\n * x509Certificate BAG-TYPE ::= {OCTET STRING IDENTIFIED BY {certTypes 1}}\n * -- DER-encoded X.509 certificate stored in OCTET STRING\n *\n * sdsiCertificate BAG-TYPE ::= {IA5String IDENTIFIED BY {certTypes 2}}\n * -- Base64-encoded SDSI certificate stored in IA5String\n *\n * CertTypes BAG-TYPE ::= {\n * x509Certificate |\n * sdsiCertificate,\n * ... -- For future extensions\n * }\n */\nvar forge = require('./forge');\nrequire('./asn1');\nrequire('./hmac');\nrequire('./oids');\nrequire('./pkcs7asn1');\nrequire('./pbe');\nrequire('./random');\nrequire('./rsa');\nrequire('./sha1');\nrequire('./util');\nrequire('./x509');\n\n// shortcut for asn.1 & PKI API\nvar asn1 = forge.asn1;\nvar pki = forge.pki;\n\n// shortcut for PKCS#12 API\nvar p12 = module.exports = forge.pkcs12 = forge.pkcs12 || {};\n\nvar contentInfoValidator = {\n name: 'ContentInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE, // a ContentInfo\n constructed: true,\n value: [{\n name: 'ContentInfo.contentType',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'contentType'\n }, {\n name: 'ContentInfo.content',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n constructed: true,\n captureAsn1: 'content'\n }]\n};\n\nvar pfxValidator = {\n name: 'PFX',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PFX.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n },\n contentInfoValidator, {\n name: 'PFX.macData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n optional: true,\n captureAsn1: 'mac',\n value: [{\n name: 'PFX.macData.mac',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE, // DigestInfo\n constructed: true,\n value: [{\n name: 'PFX.macData.mac.digestAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE, // DigestAlgorithmIdentifier\n constructed: true,\n value: [{\n name: 'PFX.macData.mac.digestAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'macAlgorithm'\n }, {\n name: 'PFX.macData.mac.digestAlgorithm.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n captureAsn1: 'macAlgorithmParameters'\n }]\n }, {\n name: 'PFX.macData.mac.digest',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'macDigest'\n }]\n }, {\n name: 'PFX.macData.macSalt',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'macSalt'\n }, {\n name: 'PFX.macData.iterations',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n optional: true,\n capture: 'macIterations'\n }]\n }]\n};\n\nvar safeBagValidator = {\n name: 'SafeBag',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SafeBag.bagId',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'bagId'\n }, {\n name: 'SafeBag.bagValue',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n constructed: true,\n captureAsn1: 'bagValue'\n }, {\n name: 'SafeBag.bagAttributes',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true,\n optional: true,\n capture: 'bagAttributes'\n }]\n};\n\nvar attributeValidator = {\n name: 'Attribute',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'Attribute.attrId',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'oid'\n }, {\n name: 'Attribute.attrValues',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true,\n capture: 'values'\n }]\n};\n\nvar certBagValidator = {\n name: 'CertBag',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'CertBag.certId',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'certId'\n }, {\n name: 'CertBag.certValue',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n constructed: true,\n /* So far we only support X.509 certificates (which are wrapped in\n an OCTET STRING, hence hard code that here). */\n value: [{\n name: 'CertBag.certValue[0]',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.OCTETSTRING,\n constructed: false,\n capture: 'cert'\n }]\n }]\n};\n\n/**\n * Search SafeContents structure for bags with matching attributes.\n *\n * The search can optionally be narrowed by a certain bag type.\n *\n * @param safeContents the SafeContents structure to search in.\n * @param attrName the name of the attribute to compare against.\n * @param attrValue the attribute value to search for.\n * @param [bagType] bag type to narrow search by.\n *\n * @return an array of matching bags.\n */\nfunction _getBagsByAttribute(safeContents, attrName, attrValue, bagType) {\n var result = [];\n\n for(var i = 0; i < safeContents.length; i++) {\n for(var j = 0; j < safeContents[i].safeBags.length; j++) {\n var bag = safeContents[i].safeBags[j];\n if(bagType !== undefined && bag.type !== bagType) {\n continue;\n }\n // only filter by bag type, no attribute specified\n if(attrName === null) {\n result.push(bag);\n continue;\n }\n if(bag.attributes[attrName] !== undefined &&\n bag.attributes[attrName].indexOf(attrValue) >= 0) {\n result.push(bag);\n }\n }\n }\n\n return result;\n}\n\n/**\n * Converts a PKCS#12 PFX in ASN.1 notation into a PFX object.\n *\n * @param obj The PKCS#12 PFX in ASN.1 notation.\n * @param strict true to use strict DER decoding, false not to (default: true).\n * @param {String} password Password to decrypt with (optional).\n *\n * @return PKCS#12 PFX object.\n */\np12.pkcs12FromAsn1 = function(obj, strict, password) {\n // handle args\n if(typeof strict === 'string') {\n password = strict;\n strict = true;\n } else if(strict === undefined) {\n strict = true;\n }\n\n // validate PFX and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, pfxValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#12 PFX. ' +\n 'ASN.1 object is not an PKCS#12 PFX.');\n error.errors = error;\n throw error;\n }\n\n var pfx = {\n version: capture.version.charCodeAt(0),\n safeContents: [],\n\n /**\n * Gets bags with matching attributes.\n *\n * @param filter the attributes to filter by:\n * [localKeyId] the localKeyId to search for.\n * [localKeyIdHex] the localKeyId in hex to search for.\n * [friendlyName] the friendly name to search for.\n * [bagType] bag type to narrow each attribute search by.\n *\n * @return a map of attribute type to an array of matching bags or, if no\n * attribute was given but a bag type, the map key will be the\n * bag type.\n */\n getBags: function(filter) {\n var rval = {};\n\n var localKeyId;\n if('localKeyId' in filter) {\n localKeyId = filter.localKeyId;\n } else if('localKeyIdHex' in filter) {\n localKeyId = forge.util.hexToBytes(filter.localKeyIdHex);\n }\n\n // filter on bagType only\n if(localKeyId === undefined && !('friendlyName' in filter) &&\n 'bagType' in filter) {\n rval[filter.bagType] = _getBagsByAttribute(\n pfx.safeContents, null, null, filter.bagType);\n }\n\n if(localKeyId !== undefined) {\n rval.localKeyId = _getBagsByAttribute(\n pfx.safeContents, 'localKeyId',\n localKeyId, filter.bagType);\n }\n if('friendlyName' in filter) {\n rval.friendlyName = _getBagsByAttribute(\n pfx.safeContents, 'friendlyName',\n filter.friendlyName, filter.bagType);\n }\n\n return rval;\n },\n\n /**\n * DEPRECATED: use getBags() instead.\n *\n * Get bags with matching friendlyName attribute.\n *\n * @param friendlyName the friendly name to search for.\n * @param [bagType] bag type to narrow search by.\n *\n * @return an array of bags with matching friendlyName attribute.\n */\n getBagsByFriendlyName: function(friendlyName, bagType) {\n return _getBagsByAttribute(\n pfx.safeContents, 'friendlyName', friendlyName, bagType);\n },\n\n /**\n * DEPRECATED: use getBags() instead.\n *\n * Get bags with matching localKeyId attribute.\n *\n * @param localKeyId the localKeyId to search for.\n * @param [bagType] bag type to narrow search by.\n *\n * @return an array of bags with matching localKeyId attribute.\n */\n getBagsByLocalKeyId: function(localKeyId, bagType) {\n return _getBagsByAttribute(\n pfx.safeContents, 'localKeyId', localKeyId, bagType);\n }\n };\n\n if(capture.version.charCodeAt(0) !== 3) {\n var error = new Error('PKCS#12 PFX of version other than 3 not supported.');\n error.version = capture.version.charCodeAt(0);\n throw error;\n }\n\n if(asn1.derToOid(capture.contentType) !== pki.oids.data) {\n var error = new Error('Only PKCS#12 PFX in password integrity mode supported.');\n error.oid = asn1.derToOid(capture.contentType);\n throw error;\n }\n\n var data = capture.content.value[0];\n if(data.tagClass !== asn1.Class.UNIVERSAL ||\n data.type !== asn1.Type.OCTETSTRING) {\n throw new Error('PKCS#12 authSafe content data is not an OCTET STRING.');\n }\n data = _decodePkcs7Data(data);\n\n // check for MAC\n if(capture.mac) {\n var md = null;\n var macKeyBytes = 0;\n var macAlgorithm = asn1.derToOid(capture.macAlgorithm);\n switch(macAlgorithm) {\n case pki.oids.sha1:\n md = forge.md.sha1.create();\n macKeyBytes = 20;\n break;\n case pki.oids.sha256:\n md = forge.md.sha256.create();\n macKeyBytes = 32;\n break;\n case pki.oids.sha384:\n md = forge.md.sha384.create();\n macKeyBytes = 48;\n break;\n case pki.oids.sha512:\n md = forge.md.sha512.create();\n macKeyBytes = 64;\n break;\n case pki.oids.md5:\n md = forge.md.md5.create();\n macKeyBytes = 16;\n break;\n }\n if(md === null) {\n throw new Error('PKCS#12 uses unsupported MAC algorithm: ' + macAlgorithm);\n }\n\n // verify MAC (iterations default to 1)\n var macSalt = new forge.util.ByteBuffer(capture.macSalt);\n var macIterations = (('macIterations' in capture) ?\n parseInt(forge.util.bytesToHex(capture.macIterations), 16) : 1);\n var macKey = p12.generateKey(\n password, macSalt, 3, macIterations, macKeyBytes, md);\n var mac = forge.hmac.create();\n mac.start(md, macKey);\n mac.update(data.value);\n var macValue = mac.getMac();\n if(macValue.getBytes() !== capture.macDigest) {\n throw new Error('PKCS#12 MAC could not be verified. Invalid password?');\n }\n }\n\n _decodeAuthenticatedSafe(pfx, data.value, strict, password);\n return pfx;\n};\n\n/**\n * Decodes PKCS#7 Data. PKCS#7 (RFC 2315) defines \"Data\" as an OCTET STRING,\n * but it is sometimes an OCTET STRING that is composed/constructed of chunks,\n * each its own OCTET STRING. This is BER-encoding vs. DER-encoding. This\n * function transforms this corner-case into the usual simple,\n * non-composed/constructed OCTET STRING.\n *\n * This function may be moved to ASN.1 at some point to better deal with\n * more BER-encoding issues, should they arise.\n *\n * @param data the ASN.1 Data object to transform.\n */\nfunction _decodePkcs7Data(data) {\n // handle special case of \"chunked\" data content: an octet string composed\n // of other octet strings\n if(data.composed || data.constructed) {\n var value = forge.util.createBuffer();\n for(var i = 0; i < data.value.length; ++i) {\n value.putBytes(data.value[i].value);\n }\n data.composed = data.constructed = false;\n data.value = value.getBytes();\n }\n return data;\n}\n\n/**\n * Decode PKCS#12 AuthenticatedSafe (BER encoded) into PFX object.\n *\n * The AuthenticatedSafe is a BER-encoded SEQUENCE OF ContentInfo.\n *\n * @param pfx The PKCS#12 PFX object to fill.\n * @param {String} authSafe BER-encoded AuthenticatedSafe.\n * @param strict true to use strict DER decoding, false not to.\n * @param {String} password Password to decrypt with (optional).\n */\nfunction _decodeAuthenticatedSafe(pfx, authSafe, strict, password) {\n authSafe = asn1.fromDer(authSafe, strict); /* actually it's BER encoded */\n\n if(authSafe.tagClass !== asn1.Class.UNIVERSAL ||\n authSafe.type !== asn1.Type.SEQUENCE ||\n authSafe.constructed !== true) {\n throw new Error('PKCS#12 AuthenticatedSafe expected to be a ' +\n 'SEQUENCE OF ContentInfo');\n }\n\n for(var i = 0; i < authSafe.value.length; i++) {\n var contentInfo = authSafe.value[i];\n\n // validate contentInfo and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(contentInfo, contentInfoValidator, capture, errors)) {\n var error = new Error('Cannot read ContentInfo.');\n error.errors = errors;\n throw error;\n }\n\n var obj = {\n encrypted: false\n };\n var safeContents = null;\n var data = capture.content.value[0];\n switch(asn1.derToOid(capture.contentType)) {\n case pki.oids.data:\n if(data.tagClass !== asn1.Class.UNIVERSAL ||\n data.type !== asn1.Type.OCTETSTRING) {\n throw new Error('PKCS#12 SafeContents Data is not an OCTET STRING.');\n }\n safeContents = _decodePkcs7Data(data).value;\n break;\n case pki.oids.encryptedData:\n safeContents = _decryptSafeContents(data, password);\n obj.encrypted = true;\n break;\n default:\n var error = new Error('Unsupported PKCS#12 contentType.');\n error.contentType = asn1.derToOid(capture.contentType);\n throw error;\n }\n\n obj.safeBags = _decodeSafeContents(safeContents, strict, password);\n pfx.safeContents.push(obj);\n }\n}\n\n/**\n * Decrypt PKCS#7 EncryptedData structure.\n *\n * @param data ASN.1 encoded EncryptedContentInfo object.\n * @param password The user-provided password.\n *\n * @return The decrypted SafeContents (ASN.1 object).\n */\nfunction _decryptSafeContents(data, password) {\n var capture = {};\n var errors = [];\n if(!asn1.validate(\n data, forge.pkcs7.asn1.encryptedDataValidator, capture, errors)) {\n var error = new Error('Cannot read EncryptedContentInfo.');\n error.errors = errors;\n throw error;\n }\n\n var oid = asn1.derToOid(capture.contentType);\n if(oid !== pki.oids.data) {\n var error = new Error(\n 'PKCS#12 EncryptedContentInfo ContentType is not Data.');\n error.oid = oid;\n throw error;\n }\n\n // get cipher\n oid = asn1.derToOid(capture.encAlgorithm);\n var cipher = pki.pbe.getCipher(oid, capture.encParameter, password);\n\n // get encrypted data\n var encryptedContentAsn1 = _decodePkcs7Data(capture.encryptedContentAsn1);\n var encrypted = forge.util.createBuffer(encryptedContentAsn1.value);\n\n cipher.update(encrypted);\n if(!cipher.finish()) {\n throw new Error('Failed to decrypt PKCS#12 SafeContents.');\n }\n\n return cipher.output.getBytes();\n}\n\n/**\n * Decode PKCS#12 SafeContents (BER-encoded) into array of Bag objects.\n *\n * The safeContents is a BER-encoded SEQUENCE OF SafeBag.\n *\n * @param {String} safeContents BER-encoded safeContents.\n * @param strict true to use strict DER decoding, false not to.\n * @param {String} password Password to decrypt with (optional).\n *\n * @return {Array} Array of Bag objects.\n */\nfunction _decodeSafeContents(safeContents, strict, password) {\n // if strict and no safe contents, return empty safes\n if(!strict && safeContents.length === 0) {\n return [];\n }\n\n // actually it's BER-encoded\n safeContents = asn1.fromDer(safeContents, strict);\n\n if(safeContents.tagClass !== asn1.Class.UNIVERSAL ||\n safeContents.type !== asn1.Type.SEQUENCE ||\n safeContents.constructed !== true) {\n throw new Error(\n 'PKCS#12 SafeContents expected to be a SEQUENCE OF SafeBag.');\n }\n\n var res = [];\n for(var i = 0; i < safeContents.value.length; i++) {\n var safeBag = safeContents.value[i];\n\n // validate SafeBag and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(safeBag, safeBagValidator, capture, errors)) {\n var error = new Error('Cannot read SafeBag.');\n error.errors = errors;\n throw error;\n }\n\n /* Create bag object and push to result array. */\n var bag = {\n type: asn1.derToOid(capture.bagId),\n attributes: _decodeBagAttributes(capture.bagAttributes)\n };\n res.push(bag);\n\n var validator, decoder;\n var bagAsn1 = capture.bagValue.value[0];\n switch(bag.type) {\n case pki.oids.pkcs8ShroudedKeyBag:\n /* bagAsn1 has a EncryptedPrivateKeyInfo, which we need to decrypt.\n Afterwards we can handle it like a keyBag,\n which is a PrivateKeyInfo. */\n bagAsn1 = pki.decryptPrivateKeyInfo(bagAsn1, password);\n if(bagAsn1 === null) {\n throw new Error(\n 'Unable to decrypt PKCS#8 ShroudedKeyBag, wrong password?');\n }\n\n /* fall through */\n case pki.oids.keyBag:\n /* A PKCS#12 keyBag is a simple PrivateKeyInfo as understood by our\n PKI module, hence we don't have to do validation/capturing here,\n just pass what we already got. */\n try {\n bag.key = pki.privateKeyFromAsn1(bagAsn1);\n } catch(e) {\n // ignore unknown key type, pass asn1 value\n bag.key = null;\n bag.asn1 = bagAsn1;\n }\n continue; /* Nothing more to do. */\n\n case pki.oids.certBag:\n /* A PKCS#12 certBag can wrap both X.509 and sdsi certificates.\n Therefore put the SafeBag content through another validator to\n capture the fields. Afterwards check & store the results. */\n validator = certBagValidator;\n decoder = function() {\n if(asn1.derToOid(capture.certId) !== pki.oids.x509Certificate) {\n var error = new Error(\n 'Unsupported certificate type, only X.509 supported.');\n error.oid = asn1.derToOid(capture.certId);\n throw error;\n }\n\n // true=produce cert hash\n var certAsn1 = asn1.fromDer(capture.cert, strict);\n try {\n bag.cert = pki.certificateFromAsn1(certAsn1, true);\n } catch(e) {\n // ignore unknown cert type, pass asn1 value\n bag.cert = null;\n bag.asn1 = certAsn1;\n }\n };\n break;\n\n default:\n var error = new Error('Unsupported PKCS#12 SafeBag type.');\n error.oid = bag.type;\n throw error;\n }\n\n /* Validate SafeBag value (i.e. CertBag, etc.) and capture data if needed. */\n if(validator !== undefined &&\n !asn1.validate(bagAsn1, validator, capture, errors)) {\n var error = new Error('Cannot read PKCS#12 ' + validator.name);\n error.errors = errors;\n throw error;\n }\n\n /* Call decoder function from above to store the results. */\n decoder();\n }\n\n return res;\n}\n\n/**\n * Decode PKCS#12 SET OF PKCS12Attribute into JavaScript object.\n *\n * @param attributes SET OF PKCS12Attribute (ASN.1 object).\n *\n * @return the decoded attributes.\n */\nfunction _decodeBagAttributes(attributes) {\n var decodedAttrs = {};\n\n if(attributes !== undefined) {\n for(var i = 0; i < attributes.length; ++i) {\n var capture = {};\n var errors = [];\n if(!asn1.validate(attributes[i], attributeValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#12 BagAttribute.');\n error.errors = errors;\n throw error;\n }\n\n var oid = asn1.derToOid(capture.oid);\n if(pki.oids[oid] === undefined) {\n // unsupported attribute type, ignore.\n continue;\n }\n\n decodedAttrs[pki.oids[oid]] = [];\n for(var j = 0; j < capture.values.length; ++j) {\n decodedAttrs[pki.oids[oid]].push(capture.values[j].value);\n }\n }\n }\n\n return decodedAttrs;\n}\n\n/**\n * Wraps a private key and certificate in a PKCS#12 PFX wrapper. If a\n * password is provided then the private key will be encrypted.\n *\n * An entire certificate chain may also be included. To do this, pass\n * an array for the \"cert\" parameter where the first certificate is\n * the one that is paired with the private key and each subsequent one\n * verifies the previous one. The certificates may be in PEM format or\n * have been already parsed by Forge.\n *\n * @todo implement password-based-encryption for the whole package\n *\n * @param key the private key.\n * @param cert the certificate (may be an array of certificates in order\n * to specify a certificate chain).\n * @param password the password to use, null for none.\n * @param options:\n * algorithm the encryption algorithm to use\n * ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'.\n * count the iteration count to use.\n * saltSize the salt size to use.\n * useMac true to include a MAC, false not to, defaults to true.\n * localKeyId the local key ID to use, in hex.\n * friendlyName the friendly name to use.\n * generateLocalKeyId true to generate a random local key ID,\n * false not to, defaults to true.\n *\n * @return the PKCS#12 PFX ASN.1 object.\n */\np12.toPkcs12Asn1 = function(key, cert, password, options) {\n // set default options\n options = options || {};\n options.saltSize = options.saltSize || 8;\n options.count = options.count || 2048;\n options.algorithm = options.algorithm || options.encAlgorithm || 'aes128';\n if(!('useMac' in options)) {\n options.useMac = true;\n }\n if(!('localKeyId' in options)) {\n options.localKeyId = null;\n }\n if(!('generateLocalKeyId' in options)) {\n options.generateLocalKeyId = true;\n }\n\n var localKeyId = options.localKeyId;\n var bagAttrs;\n if(localKeyId !== null) {\n localKeyId = forge.util.hexToBytes(localKeyId);\n } else if(options.generateLocalKeyId) {\n // use SHA-1 of paired cert, if available\n if(cert) {\n var pairedCert = forge.util.isArray(cert) ? cert[0] : cert;\n if(typeof pairedCert === 'string') {\n pairedCert = pki.certificateFromPem(pairedCert);\n }\n var sha1 = forge.md.sha1.create();\n sha1.update(asn1.toDer(pki.certificateToAsn1(pairedCert)).getBytes());\n localKeyId = sha1.digest().getBytes();\n } else {\n // FIXME: consider using SHA-1 of public key (which can be generated\n // from private key components), see: cert.generateSubjectKeyIdentifier\n // generate random bytes\n localKeyId = forge.random.getBytes(20);\n }\n }\n\n var attrs = [];\n if(localKeyId !== null) {\n attrs.push(\n // localKeyID\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // attrId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.localKeyId).getBytes()),\n // attrValues\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n localKeyId)\n ])\n ]));\n }\n if('friendlyName' in options) {\n attrs.push(\n // friendlyName\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // attrId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.friendlyName).getBytes()),\n // attrValues\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BMPSTRING, false,\n options.friendlyName)\n ])\n ]));\n }\n\n if(attrs.length > 0) {\n bagAttrs = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs);\n }\n\n // collect contents for AuthenticatedSafe\n var contents = [];\n\n // create safe bag(s) for certificate chain\n var chain = [];\n if(cert !== null) {\n if(forge.util.isArray(cert)) {\n chain = cert;\n } else {\n chain = [cert];\n }\n }\n\n var certSafeBags = [];\n for(var i = 0; i < chain.length; ++i) {\n // convert cert from PEM as necessary\n cert = chain[i];\n if(typeof cert === 'string') {\n cert = pki.certificateFromPem(cert);\n }\n\n // SafeBag\n var certBagAttrs = (i === 0) ? bagAttrs : undefined;\n var certAsn1 = pki.certificateToAsn1(cert);\n var certSafeBag =\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // bagId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.certBag).getBytes()),\n // bagValue\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n // CertBag\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // certId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.x509Certificate).getBytes()),\n // certValue (x509Certificate)\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n asn1.toDer(certAsn1).getBytes())\n ])])]),\n // bagAttributes (OPTIONAL)\n certBagAttrs\n ]);\n certSafeBags.push(certSafeBag);\n }\n\n if(certSafeBags.length > 0) {\n // SafeContents\n var certSafeContents = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, certSafeBags);\n\n // ContentInfo\n var certCI =\n // PKCS#7 ContentInfo\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // contentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n // OID for the content type is 'data'\n asn1.oidToDer(pki.oids.data).getBytes()),\n // content\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n asn1.toDer(certSafeContents).getBytes())\n ])\n ]);\n contents.push(certCI);\n }\n\n // create safe contents for private key\n var keyBag = null;\n if(key !== null) {\n // SafeBag\n var pkAsn1 = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(key));\n if(password === null) {\n // no encryption\n keyBag = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // bagId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.keyBag).getBytes()),\n // bagValue\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n // PrivateKeyInfo\n pkAsn1\n ]),\n // bagAttributes (OPTIONAL)\n bagAttrs\n ]);\n } else {\n // encrypted PrivateKeyInfo\n keyBag = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // bagId\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.pkcs8ShroudedKeyBag).getBytes()),\n // bagValue\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n // EncryptedPrivateKeyInfo\n pki.encryptPrivateKeyInfo(pkAsn1, password, options)\n ]),\n // bagAttributes (OPTIONAL)\n bagAttrs\n ]);\n }\n\n // SafeContents\n var keySafeContents =\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [keyBag]);\n\n // ContentInfo\n var keyCI =\n // PKCS#7 ContentInfo\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // contentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n // OID for the content type is 'data'\n asn1.oidToDer(pki.oids.data).getBytes()),\n // content\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n asn1.toDer(keySafeContents).getBytes())\n ])\n ]);\n contents.push(keyCI);\n }\n\n // create AuthenticatedSafe by stringing together the contents\n var safe = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, contents);\n\n var macData;\n if(options.useMac) {\n // MacData\n var sha1 = forge.md.sha1.create();\n var macSalt = new forge.util.ByteBuffer(\n forge.random.getBytes(options.saltSize));\n var count = options.count;\n // 160-bit key\n var key = p12.generateKey(password, macSalt, 3, count, 20);\n var mac = forge.hmac.create();\n mac.start(sha1, key);\n mac.update(asn1.toDer(safe).getBytes());\n var macValue = mac.getMac();\n macData = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // mac DigestInfo\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // digestAlgorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm = SHA-1\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.sha1).getBytes()),\n // parameters = Null\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]),\n // digest\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING,\n false, macValue.getBytes())\n ]),\n // macSalt OCTET STRING\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, macSalt.getBytes()),\n // iterations INTEGER (XXX: Only support count < 65536)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(count).getBytes()\n )\n ]);\n }\n\n // PFX\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version (3)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(3).getBytes()),\n // PKCS#7 ContentInfo\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // contentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n // OID for the content type is 'data'\n asn1.oidToDer(pki.oids.data).getBytes()),\n // content\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n asn1.toDer(safe).getBytes())\n ])\n ]),\n macData\n ]);\n};\n\n/**\n * Derives a PKCS#12 key.\n *\n * @param password the password to derive the key material from, null or\n * undefined for none.\n * @param salt the salt, as a ByteBuffer, to use.\n * @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC).\n * @param iter the iteration count.\n * @param n the number of bytes to derive from the password.\n * @param md the message digest to use, defaults to SHA-1.\n *\n * @return a ByteBuffer with the bytes derived from the password.\n */\np12.generateKey = forge.pbe.generatePkcs12Key;\n","/**\n * Javascript implementation of PKCS#7 v1.5.\n *\n * @author Stefan Siegl\n * @author Dave Longley\n *\n * Copyright (c) 2012 Stefan Siegl \n * Copyright (c) 2012-2015 Digital Bazaar, Inc.\n *\n * Currently this implementation only supports ContentType of EnvelopedData,\n * EncryptedData, or SignedData at the root level. The top level elements may\n * contain only a ContentInfo of ContentType Data, i.e. plain data. Further\n * nesting is not (yet) supported.\n *\n * The Forge validators for PKCS #7's ASN.1 structures are available from\n * a separate file pkcs7asn1.js, since those are referenced from other\n * PKCS standards like PKCS #12.\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./asn1');\nrequire('./des');\nrequire('./oids');\nrequire('./pem');\nrequire('./pkcs7asn1');\nrequire('./random');\nrequire('./util');\nrequire('./x509');\n\n// shortcut for ASN.1 API\nvar asn1 = forge.asn1;\n\n// shortcut for PKCS#7 API\nvar p7 = module.exports = forge.pkcs7 = forge.pkcs7 || {};\n\n/**\n * Converts a PKCS#7 message from PEM format.\n *\n * @param pem the PEM-formatted PKCS#7 message.\n *\n * @return the PKCS#7 message.\n */\np7.messageFromPem = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'PKCS7') {\n var error = new Error('Could not convert PKCS#7 message from PEM; PEM ' +\n 'header type is not \"PKCS#7\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert PKCS#7 message from PEM; PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body);\n\n return p7.messageFromAsn1(obj);\n};\n\n/**\n * Converts a PKCS#7 message to PEM format.\n *\n * @param msg The PKCS#7 message object\n * @param maxline The maximum characters per line, defaults to 64.\n *\n * @return The PEM-formatted PKCS#7 message.\n */\np7.messageToPem = function(msg, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var pemObj = {\n type: 'PKCS7',\n body: asn1.toDer(msg.toAsn1()).getBytes()\n };\n return forge.pem.encode(pemObj, {maxline: maxline});\n};\n\n/**\n * Converts a PKCS#7 message from an ASN.1 object.\n *\n * @param obj the ASN.1 representation of a ContentInfo.\n *\n * @return the PKCS#7 message.\n */\np7.messageFromAsn1 = function(obj) {\n // validate root level ContentInfo and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, p7.asn1.contentInfoValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 message. ' +\n 'ASN.1 object is not an PKCS#7 ContentInfo.');\n error.errors = errors;\n throw error;\n }\n\n var contentType = asn1.derToOid(capture.contentType);\n var msg;\n\n switch(contentType) {\n case forge.pki.oids.envelopedData:\n msg = p7.createEnvelopedData();\n break;\n\n case forge.pki.oids.encryptedData:\n msg = p7.createEncryptedData();\n break;\n\n case forge.pki.oids.signedData:\n msg = p7.createSignedData();\n break;\n\n default:\n throw new Error('Cannot read PKCS#7 message. ContentType with OID ' +\n contentType + ' is not (yet) supported.');\n }\n\n msg.fromAsn1(capture.content.value[0]);\n return msg;\n};\n\np7.createSignedData = function() {\n var msg = null;\n msg = {\n type: forge.pki.oids.signedData,\n version: 1,\n certificates: [],\n crls: [],\n // TODO: add json-formatted signer stuff here?\n signers: [],\n // populated during sign()\n digestAlgorithmIdentifiers: [],\n contentInfo: null,\n signerInfos: [],\n\n fromAsn1: function(obj) {\n // validate SignedData content block and capture data.\n _fromAsn1(msg, obj, p7.asn1.signedDataValidator);\n msg.certificates = [];\n msg.crls = [];\n msg.digestAlgorithmIdentifiers = [];\n msg.contentInfo = null;\n msg.signerInfos = [];\n\n if(msg.rawCapture.certificates) {\n var certs = msg.rawCapture.certificates.value;\n for(var i = 0; i < certs.length; ++i) {\n msg.certificates.push(forge.pki.certificateFromAsn1(certs[i]));\n }\n }\n\n // TODO: parse crls\n },\n\n toAsn1: function() {\n // degenerate case with no content\n if(!msg.contentInfo) {\n msg.sign();\n }\n\n var certs = [];\n for(var i = 0; i < msg.certificates.length; ++i) {\n certs.push(forge.pki.certificateToAsn1(msg.certificates[i]));\n }\n\n var crls = [];\n // TODO: implement CRLs\n\n // [0] SignedData\n var signedData = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(msg.version).getBytes()),\n // DigestAlgorithmIdentifiers\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SET, true,\n msg.digestAlgorithmIdentifiers),\n // ContentInfo\n msg.contentInfo\n ])\n ]);\n if(certs.length > 0) {\n // [0] IMPLICIT ExtendedCertificatesAndCertificates OPTIONAL\n signedData.value[0].value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, certs));\n }\n if(crls.length > 0) {\n // [1] IMPLICIT CertificateRevocationLists OPTIONAL\n signedData.value[0].value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, crls));\n }\n // SignerInfos\n signedData.value[0].value.push(\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true,\n msg.signerInfos));\n\n // ContentInfo\n return asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // ContentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(msg.type).getBytes()),\n // [0] SignedData\n signedData\n ]);\n },\n\n /**\n * Add (another) entity to list of signers.\n *\n * Note: If authenticatedAttributes are provided, then, per RFC 2315,\n * they must include at least two attributes: content type and\n * message digest. The message digest attribute value will be\n * auto-calculated during signing and will be ignored if provided.\n *\n * Here's an example of providing these two attributes:\n *\n * forge.pkcs7.createSignedData();\n * p7.addSigner({\n * issuer: cert.issuer.attributes,\n * serialNumber: cert.serialNumber,\n * key: privateKey,\n * digestAlgorithm: forge.pki.oids.sha1,\n * authenticatedAttributes: [{\n * type: forge.pki.oids.contentType,\n * value: forge.pki.oids.data\n * }, {\n * type: forge.pki.oids.messageDigest\n * }]\n * });\n *\n * TODO: Support [subjectKeyIdentifier] as signer's ID.\n *\n * @param signer the signer information:\n * key the signer's private key.\n * [certificate] a certificate containing the public key\n * associated with the signer's private key; use this option as\n * an alternative to specifying signer.issuer and\n * signer.serialNumber.\n * [issuer] the issuer attributes (eg: cert.issuer.attributes).\n * [serialNumber] the signer's certificate's serial number in\n * hexadecimal (eg: cert.serialNumber).\n * [digestAlgorithm] the message digest OID, as a string, to use\n * (eg: forge.pki.oids.sha1).\n * [authenticatedAttributes] an optional array of attributes\n * to also sign along with the content.\n */\n addSigner: function(signer) {\n var issuer = signer.issuer;\n var serialNumber = signer.serialNumber;\n if(signer.certificate) {\n var cert = signer.certificate;\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n issuer = cert.issuer.attributes;\n serialNumber = cert.serialNumber;\n }\n var key = signer.key;\n if(!key) {\n throw new Error(\n 'Could not add PKCS#7 signer; no private key specified.');\n }\n if(typeof key === 'string') {\n key = forge.pki.privateKeyFromPem(key);\n }\n\n // ensure OID known for digest algorithm\n var digestAlgorithm = signer.digestAlgorithm || forge.pki.oids.sha1;\n switch(digestAlgorithm) {\n case forge.pki.oids.sha1:\n case forge.pki.oids.sha256:\n case forge.pki.oids.sha384:\n case forge.pki.oids.sha512:\n case forge.pki.oids.md5:\n break;\n default:\n throw new Error(\n 'Could not add PKCS#7 signer; unknown message digest algorithm: ' +\n digestAlgorithm);\n }\n\n // if authenticatedAttributes is present, then the attributes\n // must contain at least PKCS #9 content-type and message-digest\n var authenticatedAttributes = signer.authenticatedAttributes || [];\n if(authenticatedAttributes.length > 0) {\n var contentType = false;\n var messageDigest = false;\n for(var i = 0; i < authenticatedAttributes.length; ++i) {\n var attr = authenticatedAttributes[i];\n if(!contentType && attr.type === forge.pki.oids.contentType) {\n contentType = true;\n if(messageDigest) {\n break;\n }\n continue;\n }\n if(!messageDigest && attr.type === forge.pki.oids.messageDigest) {\n messageDigest = true;\n if(contentType) {\n break;\n }\n continue;\n }\n }\n\n if(!contentType || !messageDigest) {\n throw new Error('Invalid signer.authenticatedAttributes. If ' +\n 'signer.authenticatedAttributes is specified, then it must ' +\n 'contain at least two attributes, PKCS #9 content-type and ' +\n 'PKCS #9 message-digest.');\n }\n }\n\n msg.signers.push({\n key: key,\n version: 1,\n issuer: issuer,\n serialNumber: serialNumber,\n digestAlgorithm: digestAlgorithm,\n signatureAlgorithm: forge.pki.oids.rsaEncryption,\n signature: null,\n authenticatedAttributes: authenticatedAttributes,\n unauthenticatedAttributes: []\n });\n },\n\n /**\n * Signs the content.\n * @param options Options to apply when signing:\n * [detached] boolean. If signing should be done in detached mode. Defaults to false.\n */\n sign: function(options) {\n options = options || {};\n // auto-generate content info\n if(typeof msg.content !== 'object' || msg.contentInfo === null) {\n // use Data ContentInfo\n msg.contentInfo = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // ContentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(forge.pki.oids.data).getBytes())\n ]);\n\n // add actual content, if present\n if('content' in msg) {\n var content;\n if(msg.content instanceof forge.util.ByteBuffer) {\n content = msg.content.bytes();\n } else if(typeof msg.content === 'string') {\n content = forge.util.encodeUtf8(msg.content);\n }\n\n if (options.detached) {\n msg.detachedContent = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, content);\n } else {\n msg.contentInfo.value.push(\n // [0] EXPLICIT content\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n content)\n ]));\n }\n }\n }\n\n // no signers, return early (degenerate case for certificate container)\n if(msg.signers.length === 0) {\n return;\n }\n\n // generate digest algorithm identifiers\n var mds = addDigestAlgorithmIds();\n\n // generate signerInfos\n addSignerInfos(mds);\n },\n\n verify: function() {\n throw new Error('PKCS#7 signature verification not yet implemented.');\n },\n\n /**\n * Add a certificate.\n *\n * @param cert the certificate to add.\n */\n addCertificate: function(cert) {\n // convert from PEM\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n msg.certificates.push(cert);\n },\n\n /**\n * Add a certificate revokation list.\n *\n * @param crl the certificate revokation list to add.\n */\n addCertificateRevokationList: function(crl) {\n throw new Error('PKCS#7 CRL support not yet implemented.');\n }\n };\n return msg;\n\n function addDigestAlgorithmIds() {\n var mds = {};\n\n for(var i = 0; i < msg.signers.length; ++i) {\n var signer = msg.signers[i];\n var oid = signer.digestAlgorithm;\n if(!(oid in mds)) {\n // content digest\n mds[oid] = forge.md[forge.pki.oids[oid]].create();\n }\n if(signer.authenticatedAttributes.length === 0) {\n // no custom attributes to digest; use content message digest\n signer.md = mds[oid];\n } else {\n // custom attributes to be digested; use own message digest\n // TODO: optimize to just copy message digest state if that\n // feature is ever supported with message digests\n signer.md = forge.md[forge.pki.oids[oid]].create();\n }\n }\n\n // add unique digest algorithm identifiers\n msg.digestAlgorithmIdentifiers = [];\n for(var oid in mds) {\n msg.digestAlgorithmIdentifiers.push(\n // AlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oid).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]));\n }\n\n return mds;\n }\n\n function addSignerInfos(mds) {\n var content;\n\n if (msg.detachedContent) {\n // Signature has been made in detached mode.\n content = msg.detachedContent;\n } else {\n // Note: ContentInfo is a SEQUENCE with 2 values, second value is\n // the content field and is optional for a ContentInfo but required here\n // since signers are present\n // get ContentInfo content\n content = msg.contentInfo.value[1];\n // skip [0] EXPLICIT content wrapper\n content = content.value[0];\n }\n\n if(!content) {\n throw new Error(\n 'Could not sign PKCS#7 message; there is no content to sign.');\n }\n\n // get ContentInfo content type\n var contentType = asn1.derToOid(msg.contentInfo.value[0].value);\n\n // serialize content\n var bytes = asn1.toDer(content);\n\n // skip identifier and length per RFC 2315 9.3\n // skip identifier (1 byte)\n bytes.getByte();\n // read and discard length bytes\n asn1.getBerValueLength(bytes);\n bytes = bytes.getBytes();\n\n // digest content DER value bytes\n for(var oid in mds) {\n mds[oid].start().update(bytes);\n }\n\n // sign content\n var signingTime = new Date();\n for(var i = 0; i < msg.signers.length; ++i) {\n var signer = msg.signers[i];\n\n if(signer.authenticatedAttributes.length === 0) {\n // if ContentInfo content type is not \"Data\", then\n // authenticatedAttributes must be present per RFC 2315\n if(contentType !== forge.pki.oids.data) {\n throw new Error(\n 'Invalid signer; authenticatedAttributes must be present ' +\n 'when the ContentInfo content type is not PKCS#7 Data.');\n }\n } else {\n // process authenticated attributes\n // [0] IMPLICIT\n signer.authenticatedAttributesAsn1 = asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, 0, true, []);\n\n // per RFC 2315, attributes are to be digested using a SET container\n // not the above [0] IMPLICIT container\n var attrsAsn1 = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SET, true, []);\n\n for(var ai = 0; ai < signer.authenticatedAttributes.length; ++ai) {\n var attr = signer.authenticatedAttributes[ai];\n if(attr.type === forge.pki.oids.messageDigest) {\n // use content message digest as value\n attr.value = mds[signer.digestAlgorithm].digest();\n } else if(attr.type === forge.pki.oids.signingTime) {\n // auto-populate signing time if not already set\n if(!attr.value) {\n attr.value = signingTime;\n }\n }\n\n // convert to ASN.1 and push onto Attributes SET (for signing) and\n // onto authenticatedAttributesAsn1 to complete SignedData ASN.1\n // TODO: optimize away duplication\n attrsAsn1.value.push(_attributeToAsn1(attr));\n signer.authenticatedAttributesAsn1.value.push(_attributeToAsn1(attr));\n }\n\n // DER-serialize and digest SET OF attributes only\n bytes = asn1.toDer(attrsAsn1).getBytes();\n signer.md.start().update(bytes);\n }\n\n // sign digest\n signer.signature = signer.key.sign(signer.md, 'RSASSA-PKCS1-V1_5');\n }\n\n // add signer info\n msg.signerInfos = _signersToAsn1(msg.signers);\n }\n};\n\n/**\n * Creates an empty PKCS#7 message of type EncryptedData.\n *\n * @return the message.\n */\np7.createEncryptedData = function() {\n var msg = null;\n msg = {\n type: forge.pki.oids.encryptedData,\n version: 0,\n encryptedContent: {\n algorithm: forge.pki.oids['aes256-CBC']\n },\n\n /**\n * Reads an EncryptedData content block (in ASN.1 format)\n *\n * @param obj The ASN.1 representation of the EncryptedData content block\n */\n fromAsn1: function(obj) {\n // Validate EncryptedData content block and capture data.\n _fromAsn1(msg, obj, p7.asn1.encryptedDataValidator);\n },\n\n /**\n * Decrypt encrypted content\n *\n * @param key The (symmetric) key as a byte buffer\n */\n decrypt: function(key) {\n if(key !== undefined) {\n msg.encryptedContent.key = key;\n }\n _decryptContent(msg);\n }\n };\n return msg;\n};\n\n/**\n * Creates an empty PKCS#7 message of type EnvelopedData.\n *\n * @return the message.\n */\np7.createEnvelopedData = function() {\n var msg = null;\n msg = {\n type: forge.pki.oids.envelopedData,\n version: 0,\n recipients: [],\n encryptedContent: {\n algorithm: forge.pki.oids['aes256-CBC']\n },\n\n /**\n * Reads an EnvelopedData content block (in ASN.1 format)\n *\n * @param obj the ASN.1 representation of the EnvelopedData content block.\n */\n fromAsn1: function(obj) {\n // validate EnvelopedData content block and capture data\n var capture = _fromAsn1(msg, obj, p7.asn1.envelopedDataValidator);\n msg.recipients = _recipientsFromAsn1(capture.recipientInfos.value);\n },\n\n toAsn1: function() {\n // ContentInfo\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // ContentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(msg.type).getBytes()),\n // [0] EnvelopedData\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(msg.version).getBytes()),\n // RecipientInfos\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true,\n _recipientsToAsn1(msg.recipients)),\n // EncryptedContentInfo\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true,\n _encryptedContentToAsn1(msg.encryptedContent))\n ])\n ])\n ]);\n },\n\n /**\n * Find recipient by X.509 certificate's issuer.\n *\n * @param cert the certificate with the issuer to look for.\n *\n * @return the recipient object.\n */\n findRecipient: function(cert) {\n var sAttr = cert.issuer.attributes;\n\n for(var i = 0; i < msg.recipients.length; ++i) {\n var r = msg.recipients[i];\n var rAttr = r.issuer;\n\n if(r.serialNumber !== cert.serialNumber) {\n continue;\n }\n\n if(rAttr.length !== sAttr.length) {\n continue;\n }\n\n var match = true;\n for(var j = 0; j < sAttr.length; ++j) {\n if(rAttr[j].type !== sAttr[j].type ||\n rAttr[j].value !== sAttr[j].value) {\n match = false;\n break;\n }\n }\n\n if(match) {\n return r;\n }\n }\n\n return null;\n },\n\n /**\n * Decrypt enveloped content\n *\n * @param recipient The recipient object related to the private key\n * @param privKey The (RSA) private key object\n */\n decrypt: function(recipient, privKey) {\n if(msg.encryptedContent.key === undefined && recipient !== undefined &&\n privKey !== undefined) {\n switch(recipient.encryptedContent.algorithm) {\n case forge.pki.oids.rsaEncryption:\n case forge.pki.oids.desCBC:\n var key = privKey.decrypt(recipient.encryptedContent.content);\n msg.encryptedContent.key = forge.util.createBuffer(key);\n break;\n\n default:\n throw new Error('Unsupported asymmetric cipher, ' +\n 'OID ' + recipient.encryptedContent.algorithm);\n }\n }\n\n _decryptContent(msg);\n },\n\n /**\n * Add (another) entity to list of recipients.\n *\n * @param cert The certificate of the entity to add.\n */\n addRecipient: function(cert) {\n msg.recipients.push({\n version: 0,\n issuer: cert.issuer.attributes,\n serialNumber: cert.serialNumber,\n encryptedContent: {\n // We simply assume rsaEncryption here, since forge.pki only\n // supports RSA so far. If the PKI module supports other\n // ciphers one day, we need to modify this one as well.\n algorithm: forge.pki.oids.rsaEncryption,\n key: cert.publicKey\n }\n });\n },\n\n /**\n * Encrypt enveloped content.\n *\n * This function supports two optional arguments, cipher and key, which\n * can be used to influence symmetric encryption. Unless cipher is\n * provided, the cipher specified in encryptedContent.algorithm is used\n * (defaults to AES-256-CBC). If no key is provided, encryptedContent.key\n * is (re-)used. If that one's not set, a random key will be generated\n * automatically.\n *\n * @param [key] The key to be used for symmetric encryption.\n * @param [cipher] The OID of the symmetric cipher to use.\n */\n encrypt: function(key, cipher) {\n // Part 1: Symmetric encryption\n if(msg.encryptedContent.content === undefined) {\n cipher = cipher || msg.encryptedContent.algorithm;\n key = key || msg.encryptedContent.key;\n\n var keyLen, ivLen, ciphFn;\n switch(cipher) {\n case forge.pki.oids['aes128-CBC']:\n keyLen = 16;\n ivLen = 16;\n ciphFn = forge.aes.createEncryptionCipher;\n break;\n\n case forge.pki.oids['aes192-CBC']:\n keyLen = 24;\n ivLen = 16;\n ciphFn = forge.aes.createEncryptionCipher;\n break;\n\n case forge.pki.oids['aes256-CBC']:\n keyLen = 32;\n ivLen = 16;\n ciphFn = forge.aes.createEncryptionCipher;\n break;\n\n case forge.pki.oids['des-EDE3-CBC']:\n keyLen = 24;\n ivLen = 8;\n ciphFn = forge.des.createEncryptionCipher;\n break;\n\n default:\n throw new Error('Unsupported symmetric cipher, OID ' + cipher);\n }\n\n if(key === undefined) {\n key = forge.util.createBuffer(forge.random.getBytes(keyLen));\n } else if(key.length() != keyLen) {\n throw new Error('Symmetric key has wrong length; ' +\n 'got ' + key.length() + ' bytes, expected ' + keyLen + '.');\n }\n\n // Keep a copy of the key & IV in the object, so the caller can\n // use it for whatever reason.\n msg.encryptedContent.algorithm = cipher;\n msg.encryptedContent.key = key;\n msg.encryptedContent.parameter = forge.util.createBuffer(\n forge.random.getBytes(ivLen));\n\n var ciph = ciphFn(key);\n ciph.start(msg.encryptedContent.parameter.copy());\n ciph.update(msg.content);\n\n // The finish function does PKCS#7 padding by default, therefore\n // no action required by us.\n if(!ciph.finish()) {\n throw new Error('Symmetric encryption failed.');\n }\n\n msg.encryptedContent.content = ciph.output;\n }\n\n // Part 2: asymmetric encryption for each recipient\n for(var i = 0; i < msg.recipients.length; ++i) {\n var recipient = msg.recipients[i];\n\n // Nothing to do, encryption already done.\n if(recipient.encryptedContent.content !== undefined) {\n continue;\n }\n\n switch(recipient.encryptedContent.algorithm) {\n case forge.pki.oids.rsaEncryption:\n recipient.encryptedContent.content =\n recipient.encryptedContent.key.encrypt(\n msg.encryptedContent.key.data);\n break;\n\n default:\n throw new Error('Unsupported asymmetric cipher, OID ' +\n recipient.encryptedContent.algorithm);\n }\n }\n }\n };\n return msg;\n};\n\n/**\n * Converts a single recipient from an ASN.1 object.\n *\n * @param obj the ASN.1 RecipientInfo.\n *\n * @return the recipient object.\n */\nfunction _recipientFromAsn1(obj) {\n // validate EnvelopedData content block and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, p7.asn1.recipientInfoValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 RecipientInfo. ' +\n 'ASN.1 object is not an PKCS#7 RecipientInfo.');\n error.errors = errors;\n throw error;\n }\n\n return {\n version: capture.version.charCodeAt(0),\n issuer: forge.pki.RDNAttributesAsArray(capture.issuer),\n serialNumber: forge.util.createBuffer(capture.serial).toHex(),\n encryptedContent: {\n algorithm: asn1.derToOid(capture.encAlgorithm),\n parameter: capture.encParameter ? capture.encParameter.value : undefined,\n content: capture.encKey\n }\n };\n}\n\n/**\n * Converts a single recipient object to an ASN.1 object.\n *\n * @param obj the recipient object.\n *\n * @return the ASN.1 RecipientInfo.\n */\nfunction _recipientToAsn1(obj) {\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(obj.version).getBytes()),\n // IssuerAndSerialNumber\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Name\n forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}),\n // Serial\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(obj.serialNumber))\n ]),\n // KeyEncryptionAlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(obj.encryptedContent.algorithm).getBytes()),\n // Parameter, force NULL, only RSA supported for now.\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]),\n // EncryptedKey\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n obj.encryptedContent.content)\n ]);\n}\n\n/**\n * Map a set of RecipientInfo ASN.1 objects to recipient objects.\n *\n * @param infos an array of ASN.1 representations RecipientInfo (i.e. SET OF).\n *\n * @return an array of recipient objects.\n */\nfunction _recipientsFromAsn1(infos) {\n var ret = [];\n for(var i = 0; i < infos.length; ++i) {\n ret.push(_recipientFromAsn1(infos[i]));\n }\n return ret;\n}\n\n/**\n * Map an array of recipient objects to ASN.1 RecipientInfo objects.\n *\n * @param recipients an array of recipientInfo objects.\n *\n * @return an array of ASN.1 RecipientInfos.\n */\nfunction _recipientsToAsn1(recipients) {\n var ret = [];\n for(var i = 0; i < recipients.length; ++i) {\n ret.push(_recipientToAsn1(recipients[i]));\n }\n return ret;\n}\n\n/**\n * Converts a single signer from an ASN.1 object.\n *\n * @param obj the ASN.1 representation of a SignerInfo.\n *\n * @return the signer object.\n */\nfunction _signerFromAsn1(obj) {\n // validate EnvelopedData content block and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, p7.asn1.signerInfoValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 SignerInfo. ' +\n 'ASN.1 object is not an PKCS#7 SignerInfo.');\n error.errors = errors;\n throw error;\n }\n\n var rval = {\n version: capture.version.charCodeAt(0),\n issuer: forge.pki.RDNAttributesAsArray(capture.issuer),\n serialNumber: forge.util.createBuffer(capture.serial).toHex(),\n digestAlgorithm: asn1.derToOid(capture.digestAlgorithm),\n signatureAlgorithm: asn1.derToOid(capture.signatureAlgorithm),\n signature: capture.signature,\n authenticatedAttributes: [],\n unauthenticatedAttributes: []\n };\n\n // TODO: convert attributes\n var authenticatedAttributes = capture.authenticatedAttributes || [];\n var unauthenticatedAttributes = capture.unauthenticatedAttributes || [];\n\n return rval;\n}\n\n/**\n * Converts a single signerInfo object to an ASN.1 object.\n *\n * @param obj the signerInfo object.\n *\n * @return the ASN.1 representation of a SignerInfo.\n */\nfunction _signerToAsn1(obj) {\n // SignerInfo\n var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(obj.version).getBytes()),\n // issuerAndSerialNumber\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // name\n forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}),\n // serial\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(obj.serialNumber))\n ]),\n // digestAlgorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(obj.digestAlgorithm).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ])\n ]);\n\n // authenticatedAttributes (OPTIONAL)\n if(obj.authenticatedAttributesAsn1) {\n // add ASN.1 previously generated during signing\n rval.value.push(obj.authenticatedAttributesAsn1);\n }\n\n // digestEncryptionAlgorithm\n rval.value.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(obj.signatureAlgorithm).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]));\n\n // encryptedDigest\n rval.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, obj.signature));\n\n // unauthenticatedAttributes (OPTIONAL)\n if(obj.unauthenticatedAttributes.length > 0) {\n // [1] IMPLICIT\n var attrsAsn1 = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, []);\n for(var i = 0; i < obj.unauthenticatedAttributes.length; ++i) {\n var attr = obj.unauthenticatedAttributes[i];\n attrsAsn1.values.push(_attributeToAsn1(attr));\n }\n rval.value.push(attrsAsn1);\n }\n\n return rval;\n}\n\n/**\n * Map a set of SignerInfo ASN.1 objects to an array of signer objects.\n *\n * @param signerInfoAsn1s an array of ASN.1 SignerInfos (i.e. SET OF).\n *\n * @return an array of signers objects.\n */\nfunction _signersFromAsn1(signerInfoAsn1s) {\n var ret = [];\n for(var i = 0; i < signerInfoAsn1s.length; ++i) {\n ret.push(_signerFromAsn1(signerInfoAsn1s[i]));\n }\n return ret;\n}\n\n/**\n * Map an array of signer objects to ASN.1 objects.\n *\n * @param signers an array of signer objects.\n *\n * @return an array of ASN.1 SignerInfos.\n */\nfunction _signersToAsn1(signers) {\n var ret = [];\n for(var i = 0; i < signers.length; ++i) {\n ret.push(_signerToAsn1(signers[i]));\n }\n return ret;\n}\n\n/**\n * Convert an attribute object to an ASN.1 Attribute.\n *\n * @param attr the attribute object.\n *\n * @return the ASN.1 Attribute.\n */\nfunction _attributeToAsn1(attr) {\n var value;\n\n // TODO: generalize to support more attributes\n if(attr.type === forge.pki.oids.contentType) {\n value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.value).getBytes());\n } else if(attr.type === forge.pki.oids.messageDigest) {\n value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n attr.value.bytes());\n } else if(attr.type === forge.pki.oids.signingTime) {\n /* Note per RFC 2985: Dates between 1 January 1950 and 31 December 2049\n (inclusive) MUST be encoded as UTCTime. Any dates with year values\n before 1950 or after 2049 MUST be encoded as GeneralizedTime. [Further,]\n UTCTime values MUST be expressed in Greenwich Mean Time (Zulu) and MUST\n include seconds (i.e., times are YYMMDDHHMMSSZ), even where the\n number of seconds is zero. Midnight (GMT) must be represented as\n \"YYMMDD000000Z\". */\n // TODO: make these module-level constants\n var jan_1_1950 = new Date('1950-01-01T00:00:00Z');\n var jan_1_2050 = new Date('2050-01-01T00:00:00Z');\n var date = attr.value;\n if(typeof date === 'string') {\n // try to parse date\n var timestamp = Date.parse(date);\n if(!isNaN(timestamp)) {\n date = new Date(timestamp);\n } else if(date.length === 13) {\n // YYMMDDHHMMSSZ (13 chars for UTCTime)\n date = asn1.utcTimeToDate(date);\n } else {\n // assume generalized time\n date = asn1.generalizedTimeToDate(date);\n }\n }\n\n if(date >= jan_1_1950 && date < jan_1_2050) {\n value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false,\n asn1.dateToUtcTime(date));\n } else {\n value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false,\n asn1.dateToGeneralizedTime(date));\n }\n }\n\n // TODO: expose as common API call\n // create a RelativeDistinguishedName set\n // each value in the set is an AttributeTypeAndValue first\n // containing the type (an OID) and second the value\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AttributeType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.type).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n // AttributeValue\n value\n ])\n ]);\n}\n\n/**\n * Map messages encrypted content to ASN.1 objects.\n *\n * @param ec The encryptedContent object of the message.\n *\n * @return ASN.1 representation of the encryptedContent object (SEQUENCE).\n */\nfunction _encryptedContentToAsn1(ec) {\n return [\n // ContentType, always Data for the moment\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(forge.pki.oids.data).getBytes()),\n // ContentEncryptionAlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(ec.algorithm).getBytes()),\n // Parameters (IV)\n !ec.parameter ?\n undefined :\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n ec.parameter.getBytes())\n ]),\n // [0] EncryptedContent\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n ec.content.getBytes())\n ])\n ];\n}\n\n/**\n * Reads the \"common part\" of an PKCS#7 content block (in ASN.1 format)\n *\n * This function reads the \"common part\" of the PKCS#7 content blocks\n * EncryptedData and EnvelopedData, i.e. version number and symmetrically\n * encrypted content block.\n *\n * The result of the ASN.1 validate and capture process is returned\n * to allow the caller to extract further data, e.g. the list of recipients\n * in case of a EnvelopedData object.\n *\n * @param msg the PKCS#7 object to read the data to.\n * @param obj the ASN.1 representation of the content block.\n * @param validator the ASN.1 structure validator object to use.\n *\n * @return the value map captured by validator object.\n */\nfunction _fromAsn1(msg, obj, validator) {\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, validator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 message. ' +\n 'ASN.1 object is not a supported PKCS#7 message.');\n error.errors = error;\n throw error;\n }\n\n // Check contentType, so far we only support (raw) Data.\n var contentType = asn1.derToOid(capture.contentType);\n if(contentType !== forge.pki.oids.data) {\n throw new Error('Unsupported PKCS#7 message. ' +\n 'Only wrapped ContentType Data supported.');\n }\n\n if(capture.encryptedContent) {\n var content = '';\n if(forge.util.isArray(capture.encryptedContent)) {\n for(var i = 0; i < capture.encryptedContent.length; ++i) {\n if(capture.encryptedContent[i].type !== asn1.Type.OCTETSTRING) {\n throw new Error('Malformed PKCS#7 message, expecting encrypted ' +\n 'content constructed of only OCTET STRING objects.');\n }\n content += capture.encryptedContent[i].value;\n }\n } else {\n content = capture.encryptedContent;\n }\n msg.encryptedContent = {\n algorithm: asn1.derToOid(capture.encAlgorithm),\n parameter: forge.util.createBuffer(capture.encParameter.value),\n content: forge.util.createBuffer(content)\n };\n }\n\n if(capture.content) {\n var content = '';\n if(forge.util.isArray(capture.content)) {\n for(var i = 0; i < capture.content.length; ++i) {\n if(capture.content[i].type !== asn1.Type.OCTETSTRING) {\n throw new Error('Malformed PKCS#7 message, expecting ' +\n 'content constructed of only OCTET STRING objects.');\n }\n content += capture.content[i].value;\n }\n } else {\n content = capture.content;\n }\n msg.content = forge.util.createBuffer(content);\n }\n\n msg.version = capture.version.charCodeAt(0);\n msg.rawCapture = capture;\n\n return capture;\n}\n\n/**\n * Decrypt the symmetrically encrypted content block of the PKCS#7 message.\n *\n * Decryption is skipped in case the PKCS#7 message object already has a\n * (decrypted) content attribute. The algorithm, key and cipher parameters\n * (probably the iv) are taken from the encryptedContent attribute of the\n * message object.\n *\n * @param The PKCS#7 message object.\n */\nfunction _decryptContent(msg) {\n if(msg.encryptedContent.key === undefined) {\n throw new Error('Symmetric key not available.');\n }\n\n if(msg.content === undefined) {\n var ciph;\n\n switch(msg.encryptedContent.algorithm) {\n case forge.pki.oids['aes128-CBC']:\n case forge.pki.oids['aes192-CBC']:\n case forge.pki.oids['aes256-CBC']:\n ciph = forge.aes.createDecryptionCipher(msg.encryptedContent.key);\n break;\n\n case forge.pki.oids['desCBC']:\n case forge.pki.oids['des-EDE3-CBC']:\n ciph = forge.des.createDecryptionCipher(msg.encryptedContent.key);\n break;\n\n default:\n throw new Error('Unsupported symmetric cipher, OID ' +\n msg.encryptedContent.algorithm);\n }\n ciph.start(msg.encryptedContent.parameter);\n ciph.update(msg.encryptedContent.content);\n\n if(!ciph.finish()) {\n throw new Error('Symmetric decryption failed.');\n }\n\n msg.content = ciph.output;\n }\n}\n","/**\n * Javascript implementation of ASN.1 validators for PKCS#7 v1.5.\n *\n * @author Dave Longley\n * @author Stefan Siegl\n *\n * Copyright (c) 2012-2015 Digital Bazaar, Inc.\n * Copyright (c) 2012 Stefan Siegl \n *\n * The ASN.1 representation of PKCS#7 is as follows\n * (see RFC #2315 for details, http://www.ietf.org/rfc/rfc2315.txt):\n *\n * A PKCS#7 message consists of a ContentInfo on root level, which may\n * contain any number of further ContentInfo nested into it.\n *\n * ContentInfo ::= SEQUENCE {\n * contentType ContentType,\n * content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL\n * }\n *\n * ContentType ::= OBJECT IDENTIFIER\n *\n * EnvelopedData ::= SEQUENCE {\n * version Version,\n * recipientInfos RecipientInfos,\n * encryptedContentInfo EncryptedContentInfo\n * }\n *\n * EncryptedData ::= SEQUENCE {\n * version Version,\n * encryptedContentInfo EncryptedContentInfo\n * }\n *\n * id-signedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)\n * us(840) rsadsi(113549) pkcs(1) pkcs7(7) 2 }\n *\n * SignedData ::= SEQUENCE {\n * version INTEGER,\n * digestAlgorithms DigestAlgorithmIdentifiers,\n * contentInfo ContentInfo,\n * certificates [0] IMPLICIT Certificates OPTIONAL,\n * crls [1] IMPLICIT CertificateRevocationLists OPTIONAL,\n * signerInfos SignerInfos\n * }\n *\n * SignerInfos ::= SET OF SignerInfo\n *\n * SignerInfo ::= SEQUENCE {\n * version Version,\n * issuerAndSerialNumber IssuerAndSerialNumber,\n * digestAlgorithm DigestAlgorithmIdentifier,\n * authenticatedAttributes [0] IMPLICIT Attributes OPTIONAL,\n * digestEncryptionAlgorithm DigestEncryptionAlgorithmIdentifier,\n * encryptedDigest EncryptedDigest,\n * unauthenticatedAttributes [1] IMPLICIT Attributes OPTIONAL\n * }\n *\n * EncryptedDigest ::= OCTET STRING\n *\n * Attributes ::= SET OF Attribute\n *\n * Attribute ::= SEQUENCE {\n * attrType OBJECT IDENTIFIER,\n * attrValues SET OF AttributeValue\n * }\n *\n * AttributeValue ::= ANY\n *\n * Version ::= INTEGER\n *\n * RecipientInfos ::= SET OF RecipientInfo\n *\n * EncryptedContentInfo ::= SEQUENCE {\n * contentType ContentType,\n * contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier,\n * encryptedContent [0] IMPLICIT EncryptedContent OPTIONAL\n * }\n *\n * ContentEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier\n *\n * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters\n * for the algorithm, if any. In the case of AES and DES3, there is only one,\n * the IV.\n *\n * AlgorithmIdentifer ::= SEQUENCE {\n * algorithm OBJECT IDENTIFIER,\n * parameters ANY DEFINED BY algorithm OPTIONAL\n * }\n *\n * EncryptedContent ::= OCTET STRING\n *\n * RecipientInfo ::= SEQUENCE {\n * version Version,\n * issuerAndSerialNumber IssuerAndSerialNumber,\n * keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier,\n * encryptedKey EncryptedKey\n * }\n *\n * IssuerAndSerialNumber ::= SEQUENCE {\n * issuer Name,\n * serialNumber CertificateSerialNumber\n * }\n *\n * CertificateSerialNumber ::= INTEGER\n *\n * KeyEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier\n *\n * EncryptedKey ::= OCTET STRING\n */\nvar forge = require('./forge');\nrequire('./asn1');\nrequire('./util');\n\n// shortcut for ASN.1 API\nvar asn1 = forge.asn1;\n\n// shortcut for PKCS#7 API\nvar p7v = module.exports = forge.pkcs7asn1 = forge.pkcs7asn1 || {};\nforge.pkcs7 = forge.pkcs7 || {};\nforge.pkcs7.asn1 = p7v;\n\nvar contentInfoValidator = {\n name: 'ContentInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'ContentInfo.ContentType',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'contentType'\n }, {\n name: 'ContentInfo.content',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n captureAsn1: 'content'\n }]\n};\np7v.contentInfoValidator = contentInfoValidator;\n\nvar encryptedContentInfoValidator = {\n name: 'EncryptedContentInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedContentInfo.contentType',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'contentType'\n }, {\n name: 'EncryptedContentInfo.contentEncryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedContentInfo.contentEncryptionAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encAlgorithm'\n }, {\n name: 'EncryptedContentInfo.contentEncryptionAlgorithm.parameter',\n tagClass: asn1.Class.UNIVERSAL,\n captureAsn1: 'encParameter'\n }]\n }, {\n name: 'EncryptedContentInfo.encryptedContent',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n /* The PKCS#7 structure output by OpenSSL somewhat differs from what\n * other implementations do generate.\n *\n * OpenSSL generates a structure like this:\n * SEQUENCE {\n * ...\n * [0]\n * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38\n * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45\n * ...\n * }\n *\n * Whereas other implementations (and this PKCS#7 module) generate:\n * SEQUENCE {\n * ...\n * [0] {\n * OCTET STRING\n * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38\n * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45\n * ...\n * }\n * }\n *\n * In order to support both, we just capture the context specific\n * field here. The OCTET STRING bit is removed below.\n */\n capture: 'encryptedContent',\n captureAsn1: 'encryptedContentAsn1'\n }]\n};\n\np7v.envelopedDataValidator = {\n name: 'EnvelopedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EnvelopedData.Version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }, {\n name: 'EnvelopedData.RecipientInfos',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true,\n captureAsn1: 'recipientInfos'\n }].concat(encryptedContentInfoValidator)\n};\n\np7v.encryptedDataValidator = {\n name: 'EncryptedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedData.Version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }].concat(encryptedContentInfoValidator)\n};\n\nvar signerValidator = {\n name: 'SignerInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignerInfo.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false\n }, {\n name: 'SignerInfo.issuerAndSerialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignerInfo.issuerAndSerialNumber.issuer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'issuer'\n }, {\n name: 'SignerInfo.issuerAndSerialNumber.serialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'serial'\n }]\n }, {\n name: 'SignerInfo.digestAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignerInfo.digestAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'digestAlgorithm'\n }, {\n name: 'SignerInfo.digestAlgorithm.parameter',\n tagClass: asn1.Class.UNIVERSAL,\n constructed: false,\n captureAsn1: 'digestParameter',\n optional: true\n }]\n }, {\n name: 'SignerInfo.authenticatedAttributes',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n capture: 'authenticatedAttributes'\n }, {\n name: 'SignerInfo.digestEncryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n capture: 'signatureAlgorithm'\n }, {\n name: 'SignerInfo.encryptedDigest',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'signature'\n }, {\n name: 'SignerInfo.unauthenticatedAttributes',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n constructed: true,\n optional: true,\n capture: 'unauthenticatedAttributes'\n }]\n};\n\np7v.signedDataValidator = {\n name: 'SignedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignedData.Version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }, {\n name: 'SignedData.DigestAlgorithms',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true,\n captureAsn1: 'digestAlgorithms'\n },\n contentInfoValidator,\n {\n name: 'SignedData.Certificates',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n optional: true,\n captureAsn1: 'certificates'\n }, {\n name: 'SignedData.CertificateRevocationLists',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n optional: true,\n captureAsn1: 'crls'\n }, {\n name: 'SignedData.SignerInfos',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n capture: 'signerInfos',\n optional: true,\n value: [signerValidator]\n }]\n};\n\np7v.recipientInfoValidator = {\n name: 'RecipientInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'RecipientInfo.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }, {\n name: 'RecipientInfo.issuerAndSerial',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'RecipientInfo.issuerAndSerial.issuer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'issuer'\n }, {\n name: 'RecipientInfo.issuerAndSerial.serialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'serial'\n }]\n }, {\n name: 'RecipientInfo.keyEncryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'RecipientInfo.keyEncryptionAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encAlgorithm'\n }, {\n name: 'RecipientInfo.keyEncryptionAlgorithm.parameter',\n tagClass: asn1.Class.UNIVERSAL,\n constructed: false,\n captureAsn1: 'encParameter',\n optional: true\n }]\n }, {\n name: 'RecipientInfo.encryptedKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'encKey'\n }]\n};\n","/**\n * Javascript implementation of a basic Public Key Infrastructure, including\n * support for RSA public and private keys.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2013 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./asn1');\nrequire('./oids');\nrequire('./pbe');\nrequire('./pem');\nrequire('./pbkdf2');\nrequire('./pkcs12');\nrequire('./pss');\nrequire('./rsa');\nrequire('./util');\nrequire('./x509');\n\n// shortcut for asn.1 API\nvar asn1 = forge.asn1;\n\n/* Public Key Infrastructure (PKI) implementation. */\nvar pki = module.exports = forge.pki = forge.pki || {};\n\n/**\n * NOTE: THIS METHOD IS DEPRECATED. Use pem.decode() instead.\n *\n * Converts PEM-formatted data to DER.\n *\n * @param pem the PEM-formatted data.\n *\n * @return the DER-formatted data.\n */\npki.pemToDer = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert PEM to DER; PEM is encrypted.');\n }\n return forge.util.createBuffer(msg.body);\n};\n\n/**\n * Converts an RSA private key from PEM format.\n *\n * @param pem the PEM-formatted private key.\n *\n * @return the private key.\n */\npki.privateKeyFromPem = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'PRIVATE KEY' && msg.type !== 'RSA PRIVATE KEY') {\n var error = new Error('Could not convert private key from PEM; PEM ' +\n 'header type is not \"PRIVATE KEY\" or \"RSA PRIVATE KEY\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert private key from PEM; PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body);\n\n return pki.privateKeyFromAsn1(obj);\n};\n\n/**\n * Converts an RSA private key to PEM format.\n *\n * @param key the private key.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted private key.\n */\npki.privateKeyToPem = function(key, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'RSA PRIVATE KEY',\n body: asn1.toDer(pki.privateKeyToAsn1(key)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Converts a PrivateKeyInfo to PEM format.\n *\n * @param pki the PrivateKeyInfo.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted private key.\n */\npki.privateKeyInfoToPem = function(pki, maxline) {\n // convert to DER, then PEM-encode\n var msg = {\n type: 'PRIVATE KEY',\n body: asn1.toDer(pki).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n","/**\n * Prime number generation API.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\nrequire('./jsbn');\nrequire('./random');\n\n(function() {\n\n// forge.prime already defined\nif(forge.prime) {\n module.exports = forge.prime;\n return;\n}\n\n/* PRIME API */\nvar prime = module.exports = forge.prime = forge.prime || {};\n\nvar BigInteger = forge.jsbn.BigInteger;\n\n// primes are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29\nvar GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2];\nvar THIRTY = new BigInteger(null);\nTHIRTY.fromInt(30);\nvar op_or = function(x, y) {return x|y;};\n\n/**\n * Generates a random probable prime with the given number of bits.\n *\n * Alternative algorithms can be specified by name as a string or as an\n * object with custom options like so:\n *\n * {\n * name: 'PRIMEINC',\n * options: {\n * maxBlockTime: ,\n * millerRabinTests: ,\n * workerScript: ,\n * workers: .\n * workLoad: the size of the work load, ie: number of possible prime\n * numbers for each web worker to check per work assignment,\n * (default: 100).\n * }\n * }\n *\n * @param bits the number of bits for the prime number.\n * @param options the options to use.\n * [algorithm] the algorithm to use (default: 'PRIMEINC').\n * [prng] a custom crypto-secure pseudo-random number generator to use,\n * that must define \"getBytesSync\".\n *\n * @return callback(err, num) called once the operation completes.\n */\nprime.generateProbablePrime = function(bits, options, callback) {\n if(typeof options === 'function') {\n callback = options;\n options = {};\n }\n options = options || {};\n\n // default to PRIMEINC algorithm\n var algorithm = options.algorithm || 'PRIMEINC';\n if(typeof algorithm === 'string') {\n algorithm = {name: algorithm};\n }\n algorithm.options = algorithm.options || {};\n\n // create prng with api that matches BigInteger secure random\n var prng = options.prng || forge.random;\n var rng = {\n // x is an array to fill with bytes\n nextBytes: function(x) {\n var b = prng.getBytesSync(x.length);\n for(var i = 0; i < x.length; ++i) {\n x[i] = b.charCodeAt(i);\n }\n }\n };\n\n if(algorithm.name === 'PRIMEINC') {\n return primeincFindPrime(bits, rng, algorithm.options, callback);\n }\n\n throw new Error('Invalid prime generation algorithm: ' + algorithm.name);\n};\n\nfunction primeincFindPrime(bits, rng, options, callback) {\n if('workers' in options) {\n return primeincFindPrimeWithWorkers(bits, rng, options, callback);\n }\n return primeincFindPrimeWithoutWorkers(bits, rng, options, callback);\n}\n\nfunction primeincFindPrimeWithoutWorkers(bits, rng, options, callback) {\n // initialize random number\n var num = generateRandom(bits, rng);\n\n /* Note: All primes are of the form 30k+i for i < 30 and gcd(30, i)=1. The\n number we are given is always aligned at 30k + 1. Each time the number is\n determined not to be prime we add to get to the next 'i', eg: if the number\n was at 30k + 1 we add 6. */\n var deltaIdx = 0;\n\n // get required number of MR tests\n var mrTests = getMillerRabinTests(num.bitLength());\n if('millerRabinTests' in options) {\n mrTests = options.millerRabinTests;\n }\n\n // find prime nearest to 'num' for maxBlockTime ms\n // 10 ms gives 5ms of leeway for other calculations before dropping\n // below 60fps (1000/60 == 16.67), but in reality, the number will\n // likely be higher due to an 'atomic' big int modPow\n var maxBlockTime = 10;\n if('maxBlockTime' in options) {\n maxBlockTime = options.maxBlockTime;\n }\n\n _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback);\n}\n\nfunction _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback) {\n var start = +new Date();\n do {\n // overflow, regenerate random number\n if(num.bitLength() > bits) {\n num = generateRandom(bits, rng);\n }\n // do primality test\n if(num.isProbablePrime(mrTests)) {\n return callback(null, num);\n }\n // get next potential prime\n num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0);\n } while(maxBlockTime < 0 || (+new Date() - start < maxBlockTime));\n\n // keep trying later\n forge.util.setImmediate(function() {\n _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback);\n });\n}\n\n// NOTE: This algorithm is indeterminate in nature because workers\n// run in parallel looking at different segments of numbers. Even if this\n// algorithm is run twice with the same input from a predictable RNG, it\n// may produce different outputs.\nfunction primeincFindPrimeWithWorkers(bits, rng, options, callback) {\n // web workers unavailable\n if(typeof Worker === 'undefined') {\n return primeincFindPrimeWithoutWorkers(bits, rng, options, callback);\n }\n\n // initialize random number\n var num = generateRandom(bits, rng);\n\n // use web workers to generate keys\n var numWorkers = options.workers;\n var workLoad = options.workLoad || 100;\n var range = workLoad * 30 / 8;\n var workerScript = options.workerScript || 'forge/prime.worker.js';\n if(numWorkers === -1) {\n return forge.util.estimateCores(function(err, cores) {\n if(err) {\n // default to 2\n cores = 2;\n }\n numWorkers = cores - 1;\n generate();\n });\n }\n generate();\n\n function generate() {\n // require at least 1 worker\n numWorkers = Math.max(1, numWorkers);\n\n // TODO: consider optimizing by starting workers outside getPrime() ...\n // note that in order to clean up they will have to be made internally\n // asynchronous which may actually be slower\n\n // start workers immediately\n var workers = [];\n for(var i = 0; i < numWorkers; ++i) {\n // FIXME: fix path or use blob URLs\n workers[i] = new Worker(workerScript);\n }\n var running = numWorkers;\n\n // listen for requests from workers and assign ranges to find prime\n for(var i = 0; i < numWorkers; ++i) {\n workers[i].addEventListener('message', workerMessage);\n }\n\n /* Note: The distribution of random numbers is unknown. Therefore, each\n web worker is continuously allocated a range of numbers to check for a\n random number until one is found.\n\n Every 30 numbers will be checked just 8 times, because prime numbers\n have the form:\n\n 30k+i, for i < 30 and gcd(30, i)=1 (there are 8 values of i for this)\n\n Therefore, if we want a web worker to run N checks before asking for\n a new range of numbers, each range must contain N*30/8 numbers.\n\n For 100 checks (workLoad), this is a range of 375. */\n\n var found = false;\n function workerMessage(e) {\n // ignore message, prime already found\n if(found) {\n return;\n }\n\n --running;\n var data = e.data;\n if(data.found) {\n // terminate all workers\n for(var i = 0; i < workers.length; ++i) {\n workers[i].terminate();\n }\n found = true;\n return callback(null, new BigInteger(data.prime, 16));\n }\n\n // overflow, regenerate random number\n if(num.bitLength() > bits) {\n num = generateRandom(bits, rng);\n }\n\n // assign new range to check\n var hex = num.toString(16);\n\n // start prime search\n e.target.postMessage({\n hex: hex,\n workLoad: workLoad\n });\n\n num.dAddOffset(range, 0);\n }\n }\n}\n\n/**\n * Generates a random number using the given number of bits and RNG.\n *\n * @param bits the number of bits for the number.\n * @param rng the random number generator to use.\n *\n * @return the random number.\n */\nfunction generateRandom(bits, rng) {\n var num = new BigInteger(bits, rng);\n // force MSB set\n var bits1 = bits - 1;\n if(!num.testBit(bits1)) {\n num.bitwiseTo(BigInteger.ONE.shiftLeft(bits1), op_or, num);\n }\n // align number on 30k+1 boundary\n num.dAddOffset(31 - num.mod(THIRTY).byteValue(), 0);\n return num;\n}\n\n/**\n * Returns the required number of Miller-Rabin tests to generate a\n * prime with an error probability of (1/2)^80.\n *\n * See Handbook of Applied Cryptography Chapter 4, Table 4.4.\n *\n * @param bits the bit size.\n *\n * @return the required number of iterations.\n */\nfunction getMillerRabinTests(bits) {\n if(bits <= 100) return 27;\n if(bits <= 150) return 18;\n if(bits <= 200) return 15;\n if(bits <= 250) return 12;\n if(bits <= 300) return 9;\n if(bits <= 350) return 8;\n if(bits <= 400) return 7;\n if(bits <= 500) return 6;\n if(bits <= 600) return 5;\n if(bits <= 800) return 4;\n if(bits <= 1250) return 3;\n return 2;\n}\n\n})();\n","/**\n * A javascript implementation of a cryptographically-secure\n * Pseudo Random Number Generator (PRNG). The Fortuna algorithm is followed\n * here though the use of SHA-256 is not enforced; when generating an\n * a PRNG context, the hashing algorithm and block cipher used for\n * the generator are specified via a plugin.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\nvar _crypto = null;\nif(forge.util.isNodejs && !forge.options.usePureJavaScript &&\n !process.versions['node-webkit']) {\n _crypto = require('crypto');\n}\n\n/* PRNG API */\nvar prng = module.exports = forge.prng = forge.prng || {};\n\n/**\n * Creates a new PRNG context.\n *\n * A PRNG plugin must be passed in that will provide:\n *\n * 1. A function that initializes the key and seed of a PRNG context. It\n * will be given a 16 byte key and a 16 byte seed. Any key expansion\n * or transformation of the seed from a byte string into an array of\n * integers (or similar) should be performed.\n * 2. The cryptographic function used by the generator. It takes a key and\n * a seed.\n * 3. A seed increment function. It takes the seed and returns seed + 1.\n * 4. An api to create a message digest.\n *\n * For an example, see random.js.\n *\n * @param plugin the PRNG plugin to use.\n */\nprng.create = function(plugin) {\n var ctx = {\n plugin: plugin,\n key: null,\n seed: null,\n time: null,\n // number of reseeds so far\n reseeds: 0,\n // amount of data generated so far\n generated: 0,\n // no initial key bytes\n keyBytes: ''\n };\n\n // create 32 entropy pools (each is a message digest)\n var md = plugin.md;\n var pools = new Array(32);\n for(var i = 0; i < 32; ++i) {\n pools[i] = md.create();\n }\n ctx.pools = pools;\n\n // entropy pools are written to cyclically, starting at index 0\n ctx.pool = 0;\n\n /**\n * Generates random bytes. The bytes may be generated synchronously or\n * asynchronously. Web workers must use the asynchronous interface or\n * else the behavior is undefined.\n *\n * @param count the number of random bytes to generate.\n * @param [callback(err, bytes)] called once the operation completes.\n *\n * @return count random bytes as a string.\n */\n ctx.generate = function(count, callback) {\n // do synchronously\n if(!callback) {\n return ctx.generateSync(count);\n }\n\n // simple generator using counter-based CBC\n var cipher = ctx.plugin.cipher;\n var increment = ctx.plugin.increment;\n var formatKey = ctx.plugin.formatKey;\n var formatSeed = ctx.plugin.formatSeed;\n var b = forge.util.createBuffer();\n\n // paranoid deviation from Fortuna:\n // reset key for every request to protect previously\n // generated random bytes should the key be discovered;\n // there is no 100ms based reseeding because of this\n // forced reseed for every `generate` call\n ctx.key = null;\n\n generate();\n\n function generate(err) {\n if(err) {\n return callback(err);\n }\n\n // sufficient bytes generated\n if(b.length() >= count) {\n return callback(null, b.getBytes(count));\n }\n\n // if amount of data generated is greater than 1 MiB, trigger reseed\n if(ctx.generated > 0xfffff) {\n ctx.key = null;\n }\n\n if(ctx.key === null) {\n // prevent stack overflow\n return forge.util.nextTick(function() {\n _reseed(generate);\n });\n }\n\n // generate the random bytes\n var bytes = cipher(ctx.key, ctx.seed);\n ctx.generated += bytes.length;\n b.putBytes(bytes);\n\n // generate bytes for a new key and seed\n ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed)));\n ctx.seed = formatSeed(cipher(ctx.key, ctx.seed));\n\n forge.util.setImmediate(generate);\n }\n };\n\n /**\n * Generates random bytes synchronously.\n *\n * @param count the number of random bytes to generate.\n *\n * @return count random bytes as a string.\n */\n ctx.generateSync = function(count) {\n // simple generator using counter-based CBC\n var cipher = ctx.plugin.cipher;\n var increment = ctx.plugin.increment;\n var formatKey = ctx.plugin.formatKey;\n var formatSeed = ctx.plugin.formatSeed;\n\n // paranoid deviation from Fortuna:\n // reset key for every request to protect previously\n // generated random bytes should the key be discovered;\n // there is no 100ms based reseeding because of this\n // forced reseed for every `generateSync` call\n ctx.key = null;\n\n var b = forge.util.createBuffer();\n while(b.length() < count) {\n // if amount of data generated is greater than 1 MiB, trigger reseed\n if(ctx.generated > 0xfffff) {\n ctx.key = null;\n }\n\n if(ctx.key === null) {\n _reseedSync();\n }\n\n // generate the random bytes\n var bytes = cipher(ctx.key, ctx.seed);\n ctx.generated += bytes.length;\n b.putBytes(bytes);\n\n // generate bytes for a new key and seed\n ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed)));\n ctx.seed = formatSeed(cipher(ctx.key, ctx.seed));\n }\n\n return b.getBytes(count);\n };\n\n /**\n * Private function that asynchronously reseeds a generator.\n *\n * @param callback(err) called once the operation completes.\n */\n function _reseed(callback) {\n if(ctx.pools[0].messageLength >= 32) {\n _seed();\n return callback();\n }\n // not enough seed data...\n var needed = (32 - ctx.pools[0].messageLength) << 5;\n ctx.seedFile(needed, function(err, bytes) {\n if(err) {\n return callback(err);\n }\n ctx.collect(bytes);\n _seed();\n callback();\n });\n }\n\n /**\n * Private function that synchronously reseeds a generator.\n */\n function _reseedSync() {\n if(ctx.pools[0].messageLength >= 32) {\n return _seed();\n }\n // not enough seed data...\n var needed = (32 - ctx.pools[0].messageLength) << 5;\n ctx.collect(ctx.seedFileSync(needed));\n _seed();\n }\n\n /**\n * Private function that seeds a generator once enough bytes are available.\n */\n function _seed() {\n // update reseed count\n ctx.reseeds = (ctx.reseeds === 0xffffffff) ? 0 : ctx.reseeds + 1;\n\n // goal is to update `key` via:\n // key = hash(key + s)\n // where 's' is all collected entropy from selected pools, then...\n\n // create a plugin-based message digest\n var md = ctx.plugin.md.create();\n\n // consume current key bytes\n md.update(ctx.keyBytes);\n\n // digest the entropy of pools whose index k meet the\n // condition 'n mod 2^k == 0' where n is the number of reseeds\n var _2powK = 1;\n for(var k = 0; k < 32; ++k) {\n if(ctx.reseeds % _2powK === 0) {\n md.update(ctx.pools[k].digest().getBytes());\n ctx.pools[k].start();\n }\n _2powK = _2powK << 1;\n }\n\n // get digest for key bytes\n ctx.keyBytes = md.digest().getBytes();\n\n // paranoid deviation from Fortuna:\n // update `seed` via `seed = hash(key)`\n // instead of initializing to zero once and only\n // ever incrementing it\n md.start();\n md.update(ctx.keyBytes);\n var seedBytes = md.digest().getBytes();\n\n // update state\n ctx.key = ctx.plugin.formatKey(ctx.keyBytes);\n ctx.seed = ctx.plugin.formatSeed(seedBytes);\n ctx.generated = 0;\n }\n\n /**\n * The built-in default seedFile. This seedFile is used when entropy\n * is needed immediately.\n *\n * @param needed the number of bytes that are needed.\n *\n * @return the random bytes.\n */\n function defaultSeedFile(needed) {\n // use window.crypto.getRandomValues strong source of entropy if available\n var getRandomValues = null;\n var globalScope = forge.util.globalScope;\n var _crypto = globalScope.crypto || globalScope.msCrypto;\n if(_crypto && _crypto.getRandomValues) {\n getRandomValues = function(arr) {\n return _crypto.getRandomValues(arr);\n };\n }\n\n var b = forge.util.createBuffer();\n if(getRandomValues) {\n while(b.length() < needed) {\n // max byte length is 65536 before QuotaExceededError is thrown\n // http://www.w3.org/TR/WebCryptoAPI/#RandomSource-method-getRandomValues\n var count = Math.max(1, Math.min(needed - b.length(), 65536) / 4);\n var entropy = new Uint32Array(Math.floor(count));\n try {\n getRandomValues(entropy);\n for(var i = 0; i < entropy.length; ++i) {\n b.putInt32(entropy[i]);\n }\n } catch(e) {\n /* only ignore QuotaExceededError */\n if(!(typeof QuotaExceededError !== 'undefined' &&\n e instanceof QuotaExceededError)) {\n throw e;\n }\n }\n }\n }\n\n // be sad and add some weak random data\n if(b.length() < needed) {\n /* Draws from Park-Miller \"minimal standard\" 31 bit PRNG,\n implemented with David G. Carta's optimization: with 32 bit math\n and without division (Public Domain). */\n var hi, lo, next;\n var seed = Math.floor(Math.random() * 0x010000);\n while(b.length() < needed) {\n lo = 16807 * (seed & 0xFFFF);\n hi = 16807 * (seed >> 16);\n lo += (hi & 0x7FFF) << 16;\n lo += hi >> 15;\n lo = (lo & 0x7FFFFFFF) + (lo >> 31);\n seed = lo & 0xFFFFFFFF;\n\n // consume lower 3 bytes of seed\n for(var i = 0; i < 3; ++i) {\n // throw in more pseudo random\n next = seed >>> (i << 3);\n next ^= Math.floor(Math.random() * 0x0100);\n b.putByte(next & 0xFF);\n }\n }\n }\n\n return b.getBytes(needed);\n }\n // initialize seed file APIs\n if(_crypto) {\n // use nodejs async API\n ctx.seedFile = function(needed, callback) {\n _crypto.randomBytes(needed, function(err, bytes) {\n if(err) {\n return callback(err);\n }\n callback(null, bytes.toString());\n });\n };\n // use nodejs sync API\n ctx.seedFileSync = function(needed) {\n return _crypto.randomBytes(needed).toString();\n };\n } else {\n ctx.seedFile = function(needed, callback) {\n try {\n callback(null, defaultSeedFile(needed));\n } catch(e) {\n callback(e);\n }\n };\n ctx.seedFileSync = defaultSeedFile;\n }\n\n /**\n * Adds entropy to a prng ctx's accumulator.\n *\n * @param bytes the bytes of entropy as a string.\n */\n ctx.collect = function(bytes) {\n // iterate over pools distributing entropy cyclically\n var count = bytes.length;\n for(var i = 0; i < count; ++i) {\n ctx.pools[ctx.pool].update(bytes.substr(i, 1));\n ctx.pool = (ctx.pool === 31) ? 0 : ctx.pool + 1;\n }\n };\n\n /**\n * Collects an integer of n bits.\n *\n * @param i the integer entropy.\n * @param n the number of bits in the integer.\n */\n ctx.collectInt = function(i, n) {\n var bytes = '';\n for(var x = 0; x < n; x += 8) {\n bytes += String.fromCharCode((i >> x) & 0xFF);\n }\n ctx.collect(bytes);\n };\n\n /**\n * Registers a Web Worker to receive immediate entropy from the main thread.\n * This method is required until Web Workers can access the native crypto\n * API. This method should be called twice for each created worker, once in\n * the main thread, and once in the worker itself.\n *\n * @param worker the worker to register.\n */\n ctx.registerWorker = function(worker) {\n // worker receives random bytes\n if(worker === self) {\n ctx.seedFile = function(needed, callback) {\n function listener(e) {\n var data = e.data;\n if(data.forge && data.forge.prng) {\n self.removeEventListener('message', listener);\n callback(data.forge.prng.err, data.forge.prng.bytes);\n }\n }\n self.addEventListener('message', listener);\n self.postMessage({forge: {prng: {needed: needed}}});\n };\n } else {\n // main thread sends random bytes upon request\n var listener = function(e) {\n var data = e.data;\n if(data.forge && data.forge.prng) {\n ctx.seedFile(data.forge.prng.needed, function(err, bytes) {\n worker.postMessage({forge: {prng: {err: err, bytes: bytes}}});\n });\n }\n };\n // TODO: do we need to remove the event listener when the worker dies?\n worker.addEventListener('message', listener);\n }\n };\n\n return ctx;\n};\n","/**\n * Javascript implementation of PKCS#1 PSS signature padding.\n *\n * @author Stefan Siegl\n *\n * Copyright (c) 2012 Stefan Siegl \n */\nvar forge = require('./forge');\nrequire('./random');\nrequire('./util');\n\n// shortcut for PSS API\nvar pss = module.exports = forge.pss = forge.pss || {};\n\n/**\n * Creates a PSS signature scheme object.\n *\n * There are several ways to provide a salt for encoding:\n *\n * 1. Specify the saltLength only and the built-in PRNG will generate it.\n * 2. Specify the saltLength and a custom PRNG with 'getBytesSync' defined that\n * will be used.\n * 3. Specify the salt itself as a forge.util.ByteBuffer.\n *\n * @param options the options to use:\n * md the message digest object to use, a forge md instance.\n * mgf the mask generation function to use, a forge mgf instance.\n * [saltLength] the length of the salt in octets.\n * [prng] the pseudo-random number generator to use to produce a salt.\n * [salt] the salt to use when encoding.\n *\n * @return a signature scheme object.\n */\npss.create = function(options) {\n // backwards compatibility w/legacy args: hash, mgf, sLen\n if(arguments.length === 3) {\n options = {\n md: arguments[0],\n mgf: arguments[1],\n saltLength: arguments[2]\n };\n }\n\n var hash = options.md;\n var mgf = options.mgf;\n var hLen = hash.digestLength;\n\n var salt_ = options.salt || null;\n if(typeof salt_ === 'string') {\n // assume binary-encoded string\n salt_ = forge.util.createBuffer(salt_);\n }\n\n var sLen;\n if('saltLength' in options) {\n sLen = options.saltLength;\n } else if(salt_ !== null) {\n sLen = salt_.length();\n } else {\n throw new Error('Salt length not specified or specific salt not given.');\n }\n\n if(salt_ !== null && salt_.length() !== sLen) {\n throw new Error('Given salt length does not match length of given salt.');\n }\n\n var prng = options.prng || forge.random;\n\n var pssobj = {};\n\n /**\n * Encodes a PSS signature.\n *\n * This function implements EMSA-PSS-ENCODE as per RFC 3447, section 9.1.1.\n *\n * @param md the message digest object with the hash to sign.\n * @param modsBits the length of the RSA modulus in bits.\n *\n * @return the encoded message as a binary-encoded string of length\n * ceil((modBits - 1) / 8).\n */\n pssobj.encode = function(md, modBits) {\n var i;\n var emBits = modBits - 1;\n var emLen = Math.ceil(emBits / 8);\n\n /* 2. Let mHash = Hash(M), an octet string of length hLen. */\n var mHash = md.digest().getBytes();\n\n /* 3. If emLen < hLen + sLen + 2, output \"encoding error\" and stop. */\n if(emLen < hLen + sLen + 2) {\n throw new Error('Message is too long to encrypt.');\n }\n\n /* 4. Generate a random octet string salt of length sLen; if sLen = 0,\n * then salt is the empty string. */\n var salt;\n if(salt_ === null) {\n salt = prng.getBytesSync(sLen);\n } else {\n salt = salt_.bytes();\n }\n\n /* 5. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt; */\n var m_ = new forge.util.ByteBuffer();\n m_.fillWithByte(0, 8);\n m_.putBytes(mHash);\n m_.putBytes(salt);\n\n /* 6. Let H = Hash(M'), an octet string of length hLen. */\n hash.start();\n hash.update(m_.getBytes());\n var h = hash.digest().getBytes();\n\n /* 7. Generate an octet string PS consisting of emLen - sLen - hLen - 2\n * zero octets. The length of PS may be 0. */\n var ps = new forge.util.ByteBuffer();\n ps.fillWithByte(0, emLen - sLen - hLen - 2);\n\n /* 8. Let DB = PS || 0x01 || salt; DB is an octet string of length\n * emLen - hLen - 1. */\n ps.putByte(0x01);\n ps.putBytes(salt);\n var db = ps.getBytes();\n\n /* 9. Let dbMask = MGF(H, emLen - hLen - 1). */\n var maskLen = emLen - hLen - 1;\n var dbMask = mgf.generate(h, maskLen);\n\n /* 10. Let maskedDB = DB \\xor dbMask. */\n var maskedDB = '';\n for(i = 0; i < maskLen; i++) {\n maskedDB += String.fromCharCode(db.charCodeAt(i) ^ dbMask.charCodeAt(i));\n }\n\n /* 11. Set the leftmost 8emLen - emBits bits of the leftmost octet in\n * maskedDB to zero. */\n var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF;\n maskedDB = String.fromCharCode(maskedDB.charCodeAt(0) & ~mask) +\n maskedDB.substr(1);\n\n /* 12. Let EM = maskedDB || H || 0xbc.\n * 13. Output EM. */\n return maskedDB + h + String.fromCharCode(0xbc);\n };\n\n /**\n * Verifies a PSS signature.\n *\n * This function implements EMSA-PSS-VERIFY as per RFC 3447, section 9.1.2.\n *\n * @param mHash the message digest hash, as a binary-encoded string, to\n * compare against the signature.\n * @param em the encoded message, as a binary-encoded string\n * (RSA decryption result).\n * @param modsBits the length of the RSA modulus in bits.\n *\n * @return true if the signature was verified, false if not.\n */\n pssobj.verify = function(mHash, em, modBits) {\n var i;\n var emBits = modBits - 1;\n var emLen = Math.ceil(emBits / 8);\n\n /* c. Convert the message representative m to an encoded message EM\n * of length emLen = ceil((modBits - 1) / 8) octets, where modBits\n * is the length in bits of the RSA modulus n */\n em = em.substr(-emLen);\n\n /* 3. If emLen < hLen + sLen + 2, output \"inconsistent\" and stop. */\n if(emLen < hLen + sLen + 2) {\n throw new Error('Inconsistent parameters to PSS signature verification.');\n }\n\n /* 4. If the rightmost octet of EM does not have hexadecimal value\n * 0xbc, output \"inconsistent\" and stop. */\n if(em.charCodeAt(emLen - 1) !== 0xbc) {\n throw new Error('Encoded message does not end in 0xBC.');\n }\n\n /* 5. Let maskedDB be the leftmost emLen - hLen - 1 octets of EM, and\n * let H be the next hLen octets. */\n var maskLen = emLen - hLen - 1;\n var maskedDB = em.substr(0, maskLen);\n var h = em.substr(maskLen, hLen);\n\n /* 6. If the leftmost 8emLen - emBits bits of the leftmost octet in\n * maskedDB are not all equal to zero, output \"inconsistent\" and stop. */\n var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF;\n if((maskedDB.charCodeAt(0) & mask) !== 0) {\n throw new Error('Bits beyond keysize not zero as expected.');\n }\n\n /* 7. Let dbMask = MGF(H, emLen - hLen - 1). */\n var dbMask = mgf.generate(h, maskLen);\n\n /* 8. Let DB = maskedDB \\xor dbMask. */\n var db = '';\n for(i = 0; i < maskLen; i++) {\n db += String.fromCharCode(maskedDB.charCodeAt(i) ^ dbMask.charCodeAt(i));\n }\n\n /* 9. Set the leftmost 8emLen - emBits bits of the leftmost octet\n * in DB to zero. */\n db = String.fromCharCode(db.charCodeAt(0) & ~mask) + db.substr(1);\n\n /* 10. If the emLen - hLen - sLen - 2 leftmost octets of DB are not zero\n * or if the octet at position emLen - hLen - sLen - 1 (the leftmost\n * position is \"position 1\") does not have hexadecimal value 0x01,\n * output \"inconsistent\" and stop. */\n var checkLen = emLen - hLen - sLen - 2;\n for(i = 0; i < checkLen; i++) {\n if(db.charCodeAt(i) !== 0x00) {\n throw new Error('Leftmost octets not zero as expected');\n }\n }\n\n if(db.charCodeAt(checkLen) !== 0x01) {\n throw new Error('Inconsistent PSS signature, 0x01 marker not found');\n }\n\n /* 11. Let salt be the last sLen octets of DB. */\n var salt = db.substr(-sLen);\n\n /* 12. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt */\n var m_ = new forge.util.ByteBuffer();\n m_.fillWithByte(0, 8);\n m_.putBytes(mHash);\n m_.putBytes(salt);\n\n /* 13. Let H' = Hash(M'), an octet string of length hLen. */\n hash.start();\n hash.update(m_.getBytes());\n var h_ = hash.digest().getBytes();\n\n /* 14. If H = H', output \"consistent.\" Otherwise, output \"inconsistent.\" */\n return h === h_;\n };\n\n return pssobj;\n};\n","/**\n * An API for getting cryptographically-secure random bytes. The bytes are\n * generated using the Fortuna algorithm devised by Bruce Schneier and\n * Niels Ferguson.\n *\n * Getting strong random bytes is not yet easy to do in javascript. The only\n * truish random entropy that can be collected is from the mouse, keyboard, or\n * from timing with respect to page loads, etc. This generator makes a poor\n * attempt at providing random bytes when those sources haven't yet provided\n * enough entropy to initially seed or to reseed the PRNG.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2009-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./sha256');\nrequire('./prng');\nrequire('./util');\n\n(function() {\n\n// forge.random already defined\nif(forge.random && forge.random.getBytes) {\n module.exports = forge.random;\n return;\n}\n\n(function(jQuery) {\n\n// the default prng plugin, uses AES-128\nvar prng_aes = {};\nvar _prng_aes_output = new Array(4);\nvar _prng_aes_buffer = forge.util.createBuffer();\nprng_aes.formatKey = function(key) {\n // convert the key into 32-bit integers\n var tmp = forge.util.createBuffer(key);\n key = new Array(4);\n key[0] = tmp.getInt32();\n key[1] = tmp.getInt32();\n key[2] = tmp.getInt32();\n key[3] = tmp.getInt32();\n\n // return the expanded key\n return forge.aes._expandKey(key, false);\n};\nprng_aes.formatSeed = function(seed) {\n // convert seed into 32-bit integers\n var tmp = forge.util.createBuffer(seed);\n seed = new Array(4);\n seed[0] = tmp.getInt32();\n seed[1] = tmp.getInt32();\n seed[2] = tmp.getInt32();\n seed[3] = tmp.getInt32();\n return seed;\n};\nprng_aes.cipher = function(key, seed) {\n forge.aes._updateBlock(key, seed, _prng_aes_output, false);\n _prng_aes_buffer.putInt32(_prng_aes_output[0]);\n _prng_aes_buffer.putInt32(_prng_aes_output[1]);\n _prng_aes_buffer.putInt32(_prng_aes_output[2]);\n _prng_aes_buffer.putInt32(_prng_aes_output[3]);\n return _prng_aes_buffer.getBytes();\n};\nprng_aes.increment = function(seed) {\n // FIXME: do we care about carry or signed issues?\n ++seed[3];\n return seed;\n};\nprng_aes.md = forge.md.sha256;\n\n/**\n * Creates a new PRNG.\n */\nfunction spawnPrng() {\n var ctx = forge.prng.create(prng_aes);\n\n /**\n * Gets random bytes. If a native secure crypto API is unavailable, this\n * method tries to make the bytes more unpredictable by drawing from data that\n * can be collected from the user of the browser, eg: mouse movement.\n *\n * If a callback is given, this method will be called asynchronously.\n *\n * @param count the number of random bytes to get.\n * @param [callback(err, bytes)] called once the operation completes.\n *\n * @return the random bytes in a string.\n */\n ctx.getBytes = function(count, callback) {\n return ctx.generate(count, callback);\n };\n\n /**\n * Gets random bytes asynchronously. If a native secure crypto API is\n * unavailable, this method tries to make the bytes more unpredictable by\n * drawing from data that can be collected from the user of the browser,\n * eg: mouse movement.\n *\n * @param count the number of random bytes to get.\n *\n * @return the random bytes in a string.\n */\n ctx.getBytesSync = function(count) {\n return ctx.generate(count);\n };\n\n return ctx;\n}\n\n// create default prng context\nvar _ctx = spawnPrng();\n\n// add other sources of entropy only if window.crypto.getRandomValues is not\n// available -- otherwise this source will be automatically used by the prng\nvar getRandomValues = null;\nvar globalScope = forge.util.globalScope;\nvar _crypto = globalScope.crypto || globalScope.msCrypto;\nif(_crypto && _crypto.getRandomValues) {\n getRandomValues = function(arr) {\n return _crypto.getRandomValues(arr);\n };\n}\n\nif(forge.options.usePureJavaScript ||\n (!forge.util.isNodejs && !getRandomValues)) {\n // if this is a web worker, do not use weak entropy, instead register to\n // receive strong entropy asynchronously from the main thread\n if(typeof window === 'undefined' || window.document === undefined) {\n // FIXME:\n }\n\n // get load time entropy\n _ctx.collectInt(+new Date(), 32);\n\n // add some entropy from navigator object\n if(typeof(navigator) !== 'undefined') {\n var _navBytes = '';\n for(var key in navigator) {\n try {\n if(typeof(navigator[key]) == 'string') {\n _navBytes += navigator[key];\n }\n } catch(e) {\n /* Some navigator keys might not be accessible, e.g. the geolocation\n attribute throws an exception if touched in Mozilla chrome://\n context.\n\n Silently ignore this and just don't use this as a source of\n entropy. */\n }\n }\n _ctx.collect(_navBytes);\n _navBytes = null;\n }\n\n // add mouse and keyboard collectors if jquery is available\n if(jQuery) {\n // set up mouse entropy capture\n jQuery().mousemove(function(e) {\n // add mouse coords\n _ctx.collectInt(e.clientX, 16);\n _ctx.collectInt(e.clientY, 16);\n });\n\n // set up keyboard entropy capture\n jQuery().keypress(function(e) {\n _ctx.collectInt(e.charCode, 8);\n });\n }\n}\n\n/* Random API */\nif(!forge.random) {\n forge.random = _ctx;\n} else {\n // extend forge.random with _ctx\n for(var key in _ctx) {\n forge.random[key] = _ctx[key];\n }\n}\n\n// expose spawn PRNG\nforge.random.createInstance = spawnPrng;\n\nmodule.exports = forge.random;\n\n})(typeof(jQuery) !== 'undefined' ? jQuery : null);\n\n})();\n","/**\n * RC2 implementation.\n *\n * @author Stefan Siegl\n *\n * Copyright (c) 2012 Stefan Siegl \n *\n * Information on the RC2 cipher is available from RFC #2268,\n * http://www.ietf.org/rfc/rfc2268.txt\n */\nvar forge = require('./forge');\nrequire('./util');\n\nvar piTable = [\n 0xd9, 0x78, 0xf9, 0xc4, 0x19, 0xdd, 0xb5, 0xed, 0x28, 0xe9, 0xfd, 0x79, 0x4a, 0xa0, 0xd8, 0x9d,\n 0xc6, 0x7e, 0x37, 0x83, 0x2b, 0x76, 0x53, 0x8e, 0x62, 0x4c, 0x64, 0x88, 0x44, 0x8b, 0xfb, 0xa2,\n 0x17, 0x9a, 0x59, 0xf5, 0x87, 0xb3, 0x4f, 0x13, 0x61, 0x45, 0x6d, 0x8d, 0x09, 0x81, 0x7d, 0x32,\n 0xbd, 0x8f, 0x40, 0xeb, 0x86, 0xb7, 0x7b, 0x0b, 0xf0, 0x95, 0x21, 0x22, 0x5c, 0x6b, 0x4e, 0x82,\n 0x54, 0xd6, 0x65, 0x93, 0xce, 0x60, 0xb2, 0x1c, 0x73, 0x56, 0xc0, 0x14, 0xa7, 0x8c, 0xf1, 0xdc,\n 0x12, 0x75, 0xca, 0x1f, 0x3b, 0xbe, 0xe4, 0xd1, 0x42, 0x3d, 0xd4, 0x30, 0xa3, 0x3c, 0xb6, 0x26,\n 0x6f, 0xbf, 0x0e, 0xda, 0x46, 0x69, 0x07, 0x57, 0x27, 0xf2, 0x1d, 0x9b, 0xbc, 0x94, 0x43, 0x03,\n 0xf8, 0x11, 0xc7, 0xf6, 0x90, 0xef, 0x3e, 0xe7, 0x06, 0xc3, 0xd5, 0x2f, 0xc8, 0x66, 0x1e, 0xd7,\n 0x08, 0xe8, 0xea, 0xde, 0x80, 0x52, 0xee, 0xf7, 0x84, 0xaa, 0x72, 0xac, 0x35, 0x4d, 0x6a, 0x2a,\n 0x96, 0x1a, 0xd2, 0x71, 0x5a, 0x15, 0x49, 0x74, 0x4b, 0x9f, 0xd0, 0x5e, 0x04, 0x18, 0xa4, 0xec,\n 0xc2, 0xe0, 0x41, 0x6e, 0x0f, 0x51, 0xcb, 0xcc, 0x24, 0x91, 0xaf, 0x50, 0xa1, 0xf4, 0x70, 0x39,\n 0x99, 0x7c, 0x3a, 0x85, 0x23, 0xb8, 0xb4, 0x7a, 0xfc, 0x02, 0x36, 0x5b, 0x25, 0x55, 0x97, 0x31,\n 0x2d, 0x5d, 0xfa, 0x98, 0xe3, 0x8a, 0x92, 0xae, 0x05, 0xdf, 0x29, 0x10, 0x67, 0x6c, 0xba, 0xc9,\n 0xd3, 0x00, 0xe6, 0xcf, 0xe1, 0x9e, 0xa8, 0x2c, 0x63, 0x16, 0x01, 0x3f, 0x58, 0xe2, 0x89, 0xa9,\n 0x0d, 0x38, 0x34, 0x1b, 0xab, 0x33, 0xff, 0xb0, 0xbb, 0x48, 0x0c, 0x5f, 0xb9, 0xb1, 0xcd, 0x2e,\n 0xc5, 0xf3, 0xdb, 0x47, 0xe5, 0xa5, 0x9c, 0x77, 0x0a, 0xa6, 0x20, 0x68, 0xfe, 0x7f, 0xc1, 0xad\n];\n\nvar s = [1, 2, 3, 5];\n\n/**\n * Rotate a word left by given number of bits.\n *\n * Bits that are shifted out on the left are put back in on the right\n * hand side.\n *\n * @param word The word to shift left.\n * @param bits The number of bits to shift by.\n * @return The rotated word.\n */\nvar rol = function(word, bits) {\n return ((word << bits) & 0xffff) | ((word & 0xffff) >> (16 - bits));\n};\n\n/**\n * Rotate a word right by given number of bits.\n *\n * Bits that are shifted out on the right are put back in on the left\n * hand side.\n *\n * @param word The word to shift right.\n * @param bits The number of bits to shift by.\n * @return The rotated word.\n */\nvar ror = function(word, bits) {\n return ((word & 0xffff) >> bits) | ((word << (16 - bits)) & 0xffff);\n};\n\n/* RC2 API */\nmodule.exports = forge.rc2 = forge.rc2 || {};\n\n/**\n * Perform RC2 key expansion as per RFC #2268, section 2.\n *\n * @param key variable-length user key (between 1 and 128 bytes)\n * @param effKeyBits number of effective key bits (default: 128)\n * @return the expanded RC2 key (ByteBuffer of 128 bytes)\n */\nforge.rc2.expandKey = function(key, effKeyBits) {\n if(typeof key === 'string') {\n key = forge.util.createBuffer(key);\n }\n effKeyBits = effKeyBits || 128;\n\n /* introduce variables that match the names used in RFC #2268 */\n var L = key;\n var T = key.length();\n var T1 = effKeyBits;\n var T8 = Math.ceil(T1 / 8);\n var TM = 0xff >> (T1 & 0x07);\n var i;\n\n for(i = T; i < 128; i++) {\n L.putByte(piTable[(L.at(i - 1) + L.at(i - T)) & 0xff]);\n }\n\n L.setAt(128 - T8, piTable[L.at(128 - T8) & TM]);\n\n for(i = 127 - T8; i >= 0; i--) {\n L.setAt(i, piTable[L.at(i + 1) ^ L.at(i + T8)]);\n }\n\n return L;\n};\n\n/**\n * Creates a RC2 cipher object.\n *\n * @param key the symmetric key to use (as base for key generation).\n * @param bits the number of effective key bits.\n * @param encrypt false for decryption, true for encryption.\n *\n * @return the cipher.\n */\nvar createCipher = function(key, bits, encrypt) {\n var _finish = false, _input = null, _output = null, _iv = null;\n var mixRound, mashRound;\n var i, j, K = [];\n\n /* Expand key and fill into K[] Array */\n key = forge.rc2.expandKey(key, bits);\n for(i = 0; i < 64; i++) {\n K.push(key.getInt16Le());\n }\n\n if(encrypt) {\n /**\n * Perform one mixing round \"in place\".\n *\n * @param R Array of four words to perform mixing on.\n */\n mixRound = function(R) {\n for(i = 0; i < 4; i++) {\n R[i] += K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) +\n ((~R[(i + 3) % 4]) & R[(i + 1) % 4]);\n R[i] = rol(R[i], s[i]);\n j++;\n }\n };\n\n /**\n * Perform one mashing round \"in place\".\n *\n * @param R Array of four words to perform mashing on.\n */\n mashRound = function(R) {\n for(i = 0; i < 4; i++) {\n R[i] += K[R[(i + 3) % 4] & 63];\n }\n };\n } else {\n /**\n * Perform one r-mixing round \"in place\".\n *\n * @param R Array of four words to perform mixing on.\n */\n mixRound = function(R) {\n for(i = 3; i >= 0; i--) {\n R[i] = ror(R[i], s[i]);\n R[i] -= K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) +\n ((~R[(i + 3) % 4]) & R[(i + 1) % 4]);\n j--;\n }\n };\n\n /**\n * Perform one r-mashing round \"in place\".\n *\n * @param R Array of four words to perform mashing on.\n */\n mashRound = function(R) {\n for(i = 3; i >= 0; i--) {\n R[i] -= K[R[(i + 3) % 4] & 63];\n }\n };\n }\n\n /**\n * Run the specified cipher execution plan.\n *\n * This function takes four words from the input buffer, applies the IV on\n * it (if requested) and runs the provided execution plan.\n *\n * The plan must be put together in form of a array of arrays. Where the\n * outer one is simply a list of steps to perform and the inner one needs\n * to have two elements: the first one telling how many rounds to perform,\n * the second one telling what to do (i.e. the function to call).\n *\n * @param {Array} plan The plan to execute.\n */\n var runPlan = function(plan) {\n var R = [];\n\n /* Get data from input buffer and fill the four words into R */\n for(i = 0; i < 4; i++) {\n var val = _input.getInt16Le();\n\n if(_iv !== null) {\n if(encrypt) {\n /* We're encrypting, apply the IV first. */\n val ^= _iv.getInt16Le();\n } else {\n /* We're decryption, keep cipher text for next block. */\n _iv.putInt16Le(val);\n }\n }\n\n R.push(val & 0xffff);\n }\n\n /* Reset global \"j\" variable as per spec. */\n j = encrypt ? 0 : 63;\n\n /* Run execution plan. */\n for(var ptr = 0; ptr < plan.length; ptr++) {\n for(var ctr = 0; ctr < plan[ptr][0]; ctr++) {\n plan[ptr][1](R);\n }\n }\n\n /* Write back result to output buffer. */\n for(i = 0; i < 4; i++) {\n if(_iv !== null) {\n if(encrypt) {\n /* We're encrypting in CBC-mode, feed back encrypted bytes into\n IV buffer to carry it forward to next block. */\n _iv.putInt16Le(R[i]);\n } else {\n R[i] ^= _iv.getInt16Le();\n }\n }\n\n _output.putInt16Le(R[i]);\n }\n };\n\n /* Create cipher object */\n var cipher = null;\n cipher = {\n /**\n * Starts or restarts the encryption or decryption process, whichever\n * was previously configured.\n *\n * To use the cipher in CBC mode, iv may be given either as a string\n * of bytes, or as a byte buffer. For ECB mode, give null as iv.\n *\n * @param iv the initialization vector to use, null for ECB mode.\n * @param output the output the buffer to write to, null to create one.\n */\n start: function(iv, output) {\n if(iv) {\n /* CBC mode */\n if(typeof iv === 'string') {\n iv = forge.util.createBuffer(iv);\n }\n }\n\n _finish = false;\n _input = forge.util.createBuffer();\n _output = output || new forge.util.createBuffer();\n _iv = iv;\n\n cipher.output = _output;\n },\n\n /**\n * Updates the next block.\n *\n * @param input the buffer to read from.\n */\n update: function(input) {\n if(!_finish) {\n // not finishing, so fill the input buffer with more input\n _input.putBuffer(input);\n }\n\n while(_input.length() >= 8) {\n runPlan([\n [ 5, mixRound ],\n [ 1, mashRound ],\n [ 6, mixRound ],\n [ 1, mashRound ],\n [ 5, mixRound ]\n ]);\n }\n },\n\n /**\n * Finishes encrypting or decrypting.\n *\n * @param pad a padding function to use, null for PKCS#7 padding,\n * signature(blockSize, buffer, decrypt).\n *\n * @return true if successful, false on error.\n */\n finish: function(pad) {\n var rval = true;\n\n if(encrypt) {\n if(pad) {\n rval = pad(8, _input, !encrypt);\n } else {\n // add PKCS#7 padding to block (each pad byte is the\n // value of the number of pad bytes)\n var padding = (_input.length() === 8) ? 8 : (8 - _input.length());\n _input.fillWithByte(padding, padding);\n }\n }\n\n if(rval) {\n // do final update\n _finish = true;\n cipher.update();\n }\n\n if(!encrypt) {\n // check for error: input data not a multiple of block size\n rval = (_input.length() === 0);\n if(rval) {\n if(pad) {\n rval = pad(8, _output, !encrypt);\n } else {\n // ensure padding byte count is valid\n var len = _output.length();\n var count = _output.at(len - 1);\n\n if(count > len) {\n rval = false;\n } else {\n // trim off padding bytes\n _output.truncate(count);\n }\n }\n }\n }\n\n return rval;\n }\n };\n\n return cipher;\n};\n\n/**\n * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the\n * given symmetric key. The output will be stored in the 'output' member\n * of the returned cipher.\n *\n * The key and iv may be given as a string of bytes or a byte buffer.\n * The cipher is initialized to use 128 effective key bits.\n *\n * @param key the symmetric key to use.\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n *\n * @return the cipher.\n */\nforge.rc2.startEncrypting = function(key, iv, output) {\n var cipher = forge.rc2.createEncryptionCipher(key, 128);\n cipher.start(iv, output);\n return cipher;\n};\n\n/**\n * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the\n * given symmetric key.\n *\n * The key may be given as a string of bytes or a byte buffer.\n *\n * To start encrypting call start() on the cipher with an iv and optional\n * output buffer.\n *\n * @param key the symmetric key to use.\n *\n * @return the cipher.\n */\nforge.rc2.createEncryptionCipher = function(key, bits) {\n return createCipher(key, bits, true);\n};\n\n/**\n * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the\n * given symmetric key. The output will be stored in the 'output' member\n * of the returned cipher.\n *\n * The key and iv may be given as a string of bytes or a byte buffer.\n * The cipher is initialized to use 128 effective key bits.\n *\n * @param key the symmetric key to use.\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n *\n * @return the cipher.\n */\nforge.rc2.startDecrypting = function(key, iv, output) {\n var cipher = forge.rc2.createDecryptionCipher(key, 128);\n cipher.start(iv, output);\n return cipher;\n};\n\n/**\n * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the\n * given symmetric key.\n *\n * The key may be given as a string of bytes or a byte buffer.\n *\n * To start decrypting call start() on the cipher with an iv and optional\n * output buffer.\n *\n * @param key the symmetric key to use.\n *\n * @return the cipher.\n */\nforge.rc2.createDecryptionCipher = function(key, bits) {\n return createCipher(key, bits, false);\n};\n","/**\n * Javascript implementation of basic RSA algorithms.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n *\n * The only algorithm currently supported for PKI is RSA.\n *\n * An RSA key is often stored in ASN.1 DER format. The SubjectPublicKeyInfo\n * ASN.1 structure is composed of an algorithm of type AlgorithmIdentifier\n * and a subjectPublicKey of type bit string.\n *\n * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters\n * for the algorithm, if any. In the case of RSA, there aren't any.\n *\n * SubjectPublicKeyInfo ::= SEQUENCE {\n * algorithm AlgorithmIdentifier,\n * subjectPublicKey BIT STRING\n * }\n *\n * AlgorithmIdentifer ::= SEQUENCE {\n * algorithm OBJECT IDENTIFIER,\n * parameters ANY DEFINED BY algorithm OPTIONAL\n * }\n *\n * For an RSA public key, the subjectPublicKey is:\n *\n * RSAPublicKey ::= SEQUENCE {\n * modulus INTEGER, -- n\n * publicExponent INTEGER -- e\n * }\n *\n * PrivateKeyInfo ::= SEQUENCE {\n * version Version,\n * privateKeyAlgorithm PrivateKeyAlgorithmIdentifier,\n * privateKey PrivateKey,\n * attributes [0] IMPLICIT Attributes OPTIONAL\n * }\n *\n * Version ::= INTEGER\n * PrivateKeyAlgorithmIdentifier ::= AlgorithmIdentifier\n * PrivateKey ::= OCTET STRING\n * Attributes ::= SET OF Attribute\n *\n * An RSA private key as the following structure:\n *\n * RSAPrivateKey ::= SEQUENCE {\n * version Version,\n * modulus INTEGER, -- n\n * publicExponent INTEGER, -- e\n * privateExponent INTEGER, -- d\n * prime1 INTEGER, -- p\n * prime2 INTEGER, -- q\n * exponent1 INTEGER, -- d mod (p-1)\n * exponent2 INTEGER, -- d mod (q-1)\n * coefficient INTEGER -- (inverse of q) mod p\n * }\n *\n * Version ::= INTEGER\n *\n * The OID for the RSA key algorithm is: 1.2.840.113549.1.1.1\n */\nvar forge = require('./forge');\nrequire('./asn1');\nrequire('./jsbn');\nrequire('./oids');\nrequire('./pkcs1');\nrequire('./prime');\nrequire('./random');\nrequire('./util');\n\nif(typeof BigInteger === 'undefined') {\n var BigInteger = forge.jsbn.BigInteger;\n}\n\nvar _crypto = forge.util.isNodejs ? require('crypto') : null;\n\n// shortcut for asn.1 API\nvar asn1 = forge.asn1;\n\n// shortcut for util API\nvar util = forge.util;\n\n/*\n * RSA encryption and decryption, see RFC 2313.\n */\nforge.pki = forge.pki || {};\nmodule.exports = forge.pki.rsa = forge.rsa = forge.rsa || {};\nvar pki = forge.pki;\n\n// for finding primes, which are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29\nvar GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2];\n\n// validator for a PrivateKeyInfo structure\nvar privateKeyValidator = {\n // PrivateKeyInfo\n name: 'PrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // Version (INTEGER)\n name: 'PrivateKeyInfo.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyVersion'\n }, {\n // privateKeyAlgorithm\n name: 'PrivateKeyInfo.privateKeyAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'privateKeyOid'\n }]\n }, {\n // PrivateKey\n name: 'PrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'privateKey'\n }]\n};\n\n// validator for an RSA private key\nvar rsaPrivateKeyValidator = {\n // RSAPrivateKey\n name: 'RSAPrivateKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // Version (INTEGER)\n name: 'RSAPrivateKey.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyVersion'\n }, {\n // modulus (n)\n name: 'RSAPrivateKey.modulus',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyModulus'\n }, {\n // publicExponent (e)\n name: 'RSAPrivateKey.publicExponent',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPublicExponent'\n }, {\n // privateExponent (d)\n name: 'RSAPrivateKey.privateExponent',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPrivateExponent'\n }, {\n // prime1 (p)\n name: 'RSAPrivateKey.prime1',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPrime1'\n }, {\n // prime2 (q)\n name: 'RSAPrivateKey.prime2',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPrime2'\n }, {\n // exponent1 (d mod (p-1))\n name: 'RSAPrivateKey.exponent1',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyExponent1'\n }, {\n // exponent2 (d mod (q-1))\n name: 'RSAPrivateKey.exponent2',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyExponent2'\n }, {\n // coefficient ((inverse of q) mod p)\n name: 'RSAPrivateKey.coefficient',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyCoefficient'\n }]\n};\n\n// validator for an RSA public key\nvar rsaPublicKeyValidator = {\n // RSAPublicKey\n name: 'RSAPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // modulus (n)\n name: 'RSAPublicKey.modulus',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'publicKeyModulus'\n }, {\n // publicExponent (e)\n name: 'RSAPublicKey.exponent',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'publicKeyExponent'\n }]\n};\n\n// validator for an SubjectPublicKeyInfo structure\n// Note: Currently only works with an RSA public key\nvar publicKeyValidator = forge.pki.rsa.publicKeyValidator = {\n name: 'SubjectPublicKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'subjectPublicKeyInfo',\n value: [{\n name: 'SubjectPublicKeyInfo.AlgorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'publicKeyOid'\n }]\n }, {\n // subjectPublicKey\n name: 'SubjectPublicKeyInfo.subjectPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n value: [{\n // RSAPublicKey\n name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n optional: true,\n captureAsn1: 'rsaPublicKey'\n }]\n }]\n};\n\n// validator for a DigestInfo structure\nvar digestInfoValidator = {\n name: 'DigestInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'DigestInfo.DigestAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'DigestInfo.DigestAlgorithm.algorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'algorithmIdentifier'\n }, {\n // NULL paramters\n name: 'DigestInfo.DigestAlgorithm.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.NULL,\n // captured only to check existence for md2 and md5\n capture: 'parameters',\n optional: true,\n constructed: false\n }]\n }, {\n // digest\n name: 'DigestInfo.digest',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'digest'\n }]\n};\n\n/**\n * Wrap digest in DigestInfo object.\n *\n * This function implements EMSA-PKCS1-v1_5-ENCODE as per RFC 3447.\n *\n * DigestInfo ::= SEQUENCE {\n * digestAlgorithm DigestAlgorithmIdentifier,\n * digest Digest\n * }\n *\n * DigestAlgorithmIdentifier ::= AlgorithmIdentifier\n * Digest ::= OCTET STRING\n *\n * @param md the message digest object with the hash to sign.\n *\n * @return the encoded message (ready for RSA encrytion)\n */\nvar emsaPkcs1v15encode = function(md) {\n // get the oid for the algorithm\n var oid;\n if(md.algorithm in pki.oids) {\n oid = pki.oids[md.algorithm];\n } else {\n var error = new Error('Unknown message digest algorithm.');\n error.algorithm = md.algorithm;\n throw error;\n }\n var oidBytes = asn1.oidToDer(oid).getBytes();\n\n // create the digest info\n var digestInfo = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var digestAlgorithm = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n digestAlgorithm.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OID, false, oidBytes));\n digestAlgorithm.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.NULL, false, ''));\n var digest = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING,\n false, md.digest().getBytes());\n digestInfo.value.push(digestAlgorithm);\n digestInfo.value.push(digest);\n\n // encode digest info\n return asn1.toDer(digestInfo).getBytes();\n};\n\n/**\n * Performs x^c mod n (RSA encryption or decryption operation).\n *\n * @param x the number to raise and mod.\n * @param key the key to use.\n * @param pub true if the key is public, false if private.\n *\n * @return the result of x^c mod n.\n */\nvar _modPow = function(x, key, pub) {\n if(pub) {\n return x.modPow(key.e, key.n);\n }\n\n if(!key.p || !key.q) {\n // allow calculation without CRT params (slow)\n return x.modPow(key.d, key.n);\n }\n\n // pre-compute dP, dQ, and qInv if necessary\n if(!key.dP) {\n key.dP = key.d.mod(key.p.subtract(BigInteger.ONE));\n }\n if(!key.dQ) {\n key.dQ = key.d.mod(key.q.subtract(BigInteger.ONE));\n }\n if(!key.qInv) {\n key.qInv = key.q.modInverse(key.p);\n }\n\n /* Chinese remainder theorem (CRT) states:\n\n Suppose n1, n2, ..., nk are positive integers which are pairwise\n coprime (n1 and n2 have no common factors other than 1). For any\n integers x1, x2, ..., xk there exists an integer x solving the\n system of simultaneous congruences (where ~= means modularly\n congruent so a ~= b mod n means a mod n = b mod n):\n\n x ~= x1 mod n1\n x ~= x2 mod n2\n ...\n x ~= xk mod nk\n\n This system of congruences has a single simultaneous solution x\n between 0 and n - 1. Furthermore, each xk solution and x itself\n is congruent modulo the product n = n1*n2*...*nk.\n So x1 mod n = x2 mod n = xk mod n = x mod n.\n\n The single simultaneous solution x can be solved with the following\n equation:\n\n x = sum(xi*ri*si) mod n where ri = n/ni and si = ri^-1 mod ni.\n\n Where x is less than n, xi = x mod ni.\n\n For RSA we are only concerned with k = 2. The modulus n = pq, where\n p and q are coprime. The RSA decryption algorithm is:\n\n y = x^d mod n\n\n Given the above:\n\n x1 = x^d mod p\n r1 = n/p = q\n s1 = q^-1 mod p\n x2 = x^d mod q\n r2 = n/q = p\n s2 = p^-1 mod q\n\n So y = (x1r1s1 + x2r2s2) mod n\n = ((x^d mod p)q(q^-1 mod p) + (x^d mod q)p(p^-1 mod q)) mod n\n\n According to Fermat's Little Theorem, if the modulus P is prime,\n for any integer A not evenly divisible by P, A^(P-1) ~= 1 mod P.\n Since A is not divisible by P it follows that if:\n N ~= M mod (P - 1), then A^N mod P = A^M mod P. Therefore:\n\n A^N mod P = A^(M mod (P - 1)) mod P. (The latter takes less effort\n to calculate). In order to calculate x^d mod p more quickly the\n exponent d mod (p - 1) is stored in the RSA private key (the same\n is done for x^d mod q). These values are referred to as dP and dQ\n respectively. Therefore we now have:\n\n y = ((x^dP mod p)q(q^-1 mod p) + (x^dQ mod q)p(p^-1 mod q)) mod n\n\n Since we'll be reducing x^dP by modulo p (same for q) we can also\n reduce x by p (and q respectively) before hand. Therefore, let\n\n xp = ((x mod p)^dP mod p), and\n xq = ((x mod q)^dQ mod q), yielding:\n\n y = (xp*q*(q^-1 mod p) + xq*p*(p^-1 mod q)) mod n\n\n This can be further reduced to a simple algorithm that only\n requires 1 inverse (the q inverse is used) to be used and stored.\n The algorithm is called Garner's algorithm. If qInv is the\n inverse of q, we simply calculate:\n\n y = (qInv*(xp - xq) mod p) * q + xq\n\n However, there are two further complications. First, we need to\n ensure that xp > xq to prevent signed BigIntegers from being used\n so we add p until this is true (since we will be mod'ing with\n p anyway). Then, there is a known timing attack on algorithms\n using the CRT. To mitigate this risk, \"cryptographic blinding\"\n should be used. This requires simply generating a random number r\n between 0 and n-1 and its inverse and multiplying x by r^e before\n calculating y and then multiplying y by r^-1 afterwards. Note that\n r must be coprime with n (gcd(r, n) === 1) in order to have an\n inverse.\n */\n\n // cryptographic blinding\n var r;\n do {\n r = new BigInteger(\n forge.util.bytesToHex(forge.random.getBytes(key.n.bitLength() / 8)),\n 16);\n } while(r.compareTo(key.n) >= 0 || !r.gcd(key.n).equals(BigInteger.ONE));\n x = x.multiply(r.modPow(key.e, key.n)).mod(key.n);\n\n // calculate xp and xq\n var xp = x.mod(key.p).modPow(key.dP, key.p);\n var xq = x.mod(key.q).modPow(key.dQ, key.q);\n\n // xp must be larger than xq to avoid signed bit usage\n while(xp.compareTo(xq) < 0) {\n xp = xp.add(key.p);\n }\n\n // do last step\n var y = xp.subtract(xq)\n .multiply(key.qInv).mod(key.p)\n .multiply(key.q).add(xq);\n\n // remove effect of random for cryptographic blinding\n y = y.multiply(r.modInverse(key.n)).mod(key.n);\n\n return y;\n};\n\n/**\n * NOTE: THIS METHOD IS DEPRECATED, use 'sign' on a private key object or\n * 'encrypt' on a public key object instead.\n *\n * Performs RSA encryption.\n *\n * The parameter bt controls whether to put padding bytes before the\n * message passed in. Set bt to either true or false to disable padding\n * completely (in order to handle e.g. EMSA-PSS encoding seperately before),\n * signaling whether the encryption operation is a public key operation\n * (i.e. encrypting data) or not, i.e. private key operation (data signing).\n *\n * For PKCS#1 v1.5 padding pass in the block type to use, i.e. either 0x01\n * (for signing) or 0x02 (for encryption). The key operation mode (private\n * or public) is derived from this flag in that case).\n *\n * @param m the message to encrypt as a byte string.\n * @param key the RSA key to use.\n * @param bt for PKCS#1 v1.5 padding, the block type to use\n * (0x01 for private key, 0x02 for public),\n * to disable padding: true = public key, false = private key.\n *\n * @return the encrypted bytes as a string.\n */\npki.rsa.encrypt = function(m, key, bt) {\n var pub = bt;\n var eb;\n\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n if(bt !== false && bt !== true) {\n // legacy, default to PKCS#1 v1.5 padding\n pub = (bt === 0x02);\n eb = _encodePkcs1_v1_5(m, key, bt);\n } else {\n eb = forge.util.createBuffer();\n eb.putBytes(m);\n }\n\n // load encryption block as big integer 'x'\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var x = new BigInteger(eb.toHex(), 16);\n\n // do RSA encryption\n var y = _modPow(x, key, pub);\n\n // convert y into the encrypted data byte string, if y is shorter in\n // bytes than k, then prepend zero bytes to fill up ed\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var yhex = y.toString(16);\n var ed = forge.util.createBuffer();\n var zeros = k - Math.ceil(yhex.length / 2);\n while(zeros > 0) {\n ed.putByte(0x00);\n --zeros;\n }\n ed.putBytes(forge.util.hexToBytes(yhex));\n return ed.getBytes();\n};\n\n/**\n * NOTE: THIS METHOD IS DEPRECATED, use 'decrypt' on a private key object or\n * 'verify' on a public key object instead.\n *\n * Performs RSA decryption.\n *\n * The parameter ml controls whether to apply PKCS#1 v1.5 padding\n * or not. Set ml = false to disable padding removal completely\n * (in order to handle e.g. EMSA-PSS later on) and simply pass back\n * the RSA encryption block.\n *\n * @param ed the encrypted data to decrypt in as a byte string.\n * @param key the RSA key to use.\n * @param pub true for a public key operation, false for private.\n * @param ml the message length, if known, false to disable padding.\n *\n * @return the decrypted message as a byte string.\n */\npki.rsa.decrypt = function(ed, key, pub, ml) {\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n // error if the length of the encrypted data ED is not k\n if(ed.length !== k) {\n var error = new Error('Encrypted message length is invalid.');\n error.length = ed.length;\n error.expected = k;\n throw error;\n }\n\n // convert encrypted data into a big integer\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var y = new BigInteger(forge.util.createBuffer(ed).toHex(), 16);\n\n // y must be less than the modulus or it wasn't the result of\n // a previous mod operation (encryption) using that modulus\n if(y.compareTo(key.n) >= 0) {\n throw new Error('Encrypted message is invalid.');\n }\n\n // do RSA decryption\n var x = _modPow(y, key, pub);\n\n // create the encryption block, if x is shorter in bytes than k, then\n // prepend zero bytes to fill up eb\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var xhex = x.toString(16);\n var eb = forge.util.createBuffer();\n var zeros = k - Math.ceil(xhex.length / 2);\n while(zeros > 0) {\n eb.putByte(0x00);\n --zeros;\n }\n eb.putBytes(forge.util.hexToBytes(xhex));\n\n if(ml !== false) {\n // legacy, default to PKCS#1 v1.5 padding\n return _decodePkcs1_v1_5(eb.getBytes(), key, pub);\n }\n\n // return message\n return eb.getBytes();\n};\n\n/**\n * Creates an RSA key-pair generation state object. It is used to allow\n * key-generation to be performed in steps. It also allows for a UI to\n * display progress updates.\n *\n * @param bits the size for the private key in bits, defaults to 2048.\n * @param e the public exponent to use, defaults to 65537 (0x10001).\n * @param [options] the options to use.\n * prng a custom crypto-secure pseudo-random number generator to use,\n * that must define \"getBytesSync\".\n * algorithm the algorithm to use (default: 'PRIMEINC').\n *\n * @return the state object to use to generate the key-pair.\n */\npki.rsa.createKeyPairGenerationState = function(bits, e, options) {\n // TODO: migrate step-based prime generation code to forge.prime\n\n // set default bits\n if(typeof(bits) === 'string') {\n bits = parseInt(bits, 10);\n }\n bits = bits || 2048;\n\n // create prng with api that matches BigInteger secure random\n options = options || {};\n var prng = options.prng || forge.random;\n var rng = {\n // x is an array to fill with bytes\n nextBytes: function(x) {\n var b = prng.getBytesSync(x.length);\n for(var i = 0; i < x.length; ++i) {\n x[i] = b.charCodeAt(i);\n }\n }\n };\n\n var algorithm = options.algorithm || 'PRIMEINC';\n\n // create PRIMEINC algorithm state\n var rval;\n if(algorithm === 'PRIMEINC') {\n rval = {\n algorithm: algorithm,\n state: 0,\n bits: bits,\n rng: rng,\n eInt: e || 65537,\n e: new BigInteger(null),\n p: null,\n q: null,\n qBits: bits >> 1,\n pBits: bits - (bits >> 1),\n pqState: 0,\n num: null,\n keys: null\n };\n rval.e.fromInt(rval.eInt);\n } else {\n throw new Error('Invalid key generation algorithm: ' + algorithm);\n }\n\n return rval;\n};\n\n/**\n * Attempts to runs the key-generation algorithm for at most n seconds\n * (approximately) using the given state. When key-generation has completed,\n * the keys will be stored in state.keys.\n *\n * To use this function to update a UI while generating a key or to prevent\n * causing browser lockups/warnings, set \"n\" to a value other than 0. A\n * simple pattern for generating a key and showing a progress indicator is:\n *\n * var state = pki.rsa.createKeyPairGenerationState(2048);\n * var step = function() {\n * // step key-generation, run algorithm for 100 ms, repeat\n * if(!forge.pki.rsa.stepKeyPairGenerationState(state, 100)) {\n * setTimeout(step, 1);\n * } else {\n * // key-generation complete\n * // TODO: turn off progress indicator here\n * // TODO: use the generated key-pair in \"state.keys\"\n * }\n * };\n * // TODO: turn on progress indicator here\n * setTimeout(step, 0);\n *\n * @param state the state to use.\n * @param n the maximum number of milliseconds to run the algorithm for, 0\n * to run the algorithm to completion.\n *\n * @return true if the key-generation completed, false if not.\n */\npki.rsa.stepKeyPairGenerationState = function(state, n) {\n // set default algorithm if not set\n if(!('algorithm' in state)) {\n state.algorithm = 'PRIMEINC';\n }\n\n // TODO: migrate step-based prime generation code to forge.prime\n // TODO: abstract as PRIMEINC algorithm\n\n // do key generation (based on Tom Wu's rsa.js, see jsbn.js license)\n // with some minor optimizations and designed to run in steps\n\n // local state vars\n var THIRTY = new BigInteger(null);\n THIRTY.fromInt(30);\n var deltaIdx = 0;\n var op_or = function(x, y) {return x | y;};\n\n // keep stepping until time limit is reached or done\n var t1 = +new Date();\n var t2;\n var total = 0;\n while(state.keys === null && (n <= 0 || total < n)) {\n // generate p or q\n if(state.state === 0) {\n /* Note: All primes are of the form:\n\n 30k+i, for i < 30 and gcd(30, i)=1, where there are 8 values for i\n\n When we generate a random number, we always align it at 30k + 1. Each\n time the number is determined not to be prime we add to get to the\n next 'i', eg: if the number was at 30k + 1 we add 6. */\n var bits = (state.p === null) ? state.pBits : state.qBits;\n var bits1 = bits - 1;\n\n // get a random number\n if(state.pqState === 0) {\n state.num = new BigInteger(bits, state.rng);\n // force MSB set\n if(!state.num.testBit(bits1)) {\n state.num.bitwiseTo(\n BigInteger.ONE.shiftLeft(bits1), op_or, state.num);\n }\n // align number on 30k+1 boundary\n state.num.dAddOffset(31 - state.num.mod(THIRTY).byteValue(), 0);\n deltaIdx = 0;\n\n ++state.pqState;\n } else if(state.pqState === 1) {\n // try to make the number a prime\n if(state.num.bitLength() > bits) {\n // overflow, try again\n state.pqState = 0;\n // do primality test\n } else if(state.num.isProbablePrime(\n _getMillerRabinTests(state.num.bitLength()))) {\n ++state.pqState;\n } else {\n // get next potential prime\n state.num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0);\n }\n } else if(state.pqState === 2) {\n // ensure number is coprime with e\n state.pqState =\n (state.num.subtract(BigInteger.ONE).gcd(state.e)\n .compareTo(BigInteger.ONE) === 0) ? 3 : 0;\n } else if(state.pqState === 3) {\n // store p or q\n state.pqState = 0;\n if(state.p === null) {\n state.p = state.num;\n } else {\n state.q = state.num;\n }\n\n // advance state if both p and q are ready\n if(state.p !== null && state.q !== null) {\n ++state.state;\n }\n state.num = null;\n }\n } else if(state.state === 1) {\n // ensure p is larger than q (swap them if not)\n if(state.p.compareTo(state.q) < 0) {\n state.num = state.p;\n state.p = state.q;\n state.q = state.num;\n }\n ++state.state;\n } else if(state.state === 2) {\n // compute phi: (p - 1)(q - 1) (Euler's totient function)\n state.p1 = state.p.subtract(BigInteger.ONE);\n state.q1 = state.q.subtract(BigInteger.ONE);\n state.phi = state.p1.multiply(state.q1);\n ++state.state;\n } else if(state.state === 3) {\n // ensure e and phi are coprime\n if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) === 0) {\n // phi and e are coprime, advance\n ++state.state;\n } else {\n // phi and e aren't coprime, so generate a new p and q\n state.p = null;\n state.q = null;\n state.state = 0;\n }\n } else if(state.state === 4) {\n // create n, ensure n is has the right number of bits\n state.n = state.p.multiply(state.q);\n\n // ensure n is right number of bits\n if(state.n.bitLength() === state.bits) {\n // success, advance\n ++state.state;\n } else {\n // failed, get new q\n state.q = null;\n state.state = 0;\n }\n } else if(state.state === 5) {\n // set keys\n var d = state.e.modInverse(state.phi);\n state.keys = {\n privateKey: pki.rsa.setPrivateKey(\n state.n, state.e, d, state.p, state.q,\n d.mod(state.p1), d.mod(state.q1),\n state.q.modInverse(state.p)),\n publicKey: pki.rsa.setPublicKey(state.n, state.e)\n };\n }\n\n // update timing\n t2 = +new Date();\n total += t2 - t1;\n t1 = t2;\n }\n\n return state.keys !== null;\n};\n\n/**\n * Generates an RSA public-private key pair in a single call.\n *\n * To generate a key-pair in steps (to allow for progress updates and to\n * prevent blocking or warnings in slow browsers) then use the key-pair\n * generation state functions.\n *\n * To generate a key-pair asynchronously (either through web-workers, if\n * available, or by breaking up the work on the main thread), pass a\n * callback function.\n *\n * @param [bits] the size for the private key in bits, defaults to 2048.\n * @param [e] the public exponent to use, defaults to 65537.\n * @param [options] options for key-pair generation, if given then 'bits'\n * and 'e' must *not* be given:\n * bits the size for the private key in bits, (default: 2048).\n * e the public exponent to use, (default: 65537 (0x10001)).\n * workerScript the worker script URL.\n * workers the number of web workers (if supported) to use,\n * (default: 2).\n * workLoad the size of the work load, ie: number of possible prime\n * numbers for each web worker to check per work assignment,\n * (default: 100).\n * prng a custom crypto-secure pseudo-random number generator to use,\n * that must define \"getBytesSync\". Disables use of native APIs.\n * algorithm the algorithm to use (default: 'PRIMEINC').\n * @param [callback(err, keypair)] called once the operation completes.\n *\n * @return an object with privateKey and publicKey properties.\n */\npki.rsa.generateKeyPair = function(bits, e, options, callback) {\n // (bits), (options), (callback)\n if(arguments.length === 1) {\n if(typeof bits === 'object') {\n options = bits;\n bits = undefined;\n } else if(typeof bits === 'function') {\n callback = bits;\n bits = undefined;\n }\n } else if(arguments.length === 2) {\n // (bits, e), (bits, options), (bits, callback), (options, callback)\n if(typeof bits === 'number') {\n if(typeof e === 'function') {\n callback = e;\n e = undefined;\n } else if(typeof e !== 'number') {\n options = e;\n e = undefined;\n }\n } else {\n options = bits;\n callback = e;\n bits = undefined;\n e = undefined;\n }\n } else if(arguments.length === 3) {\n // (bits, e, options), (bits, e, callback), (bits, options, callback)\n if(typeof e === 'number') {\n if(typeof options === 'function') {\n callback = options;\n options = undefined;\n }\n } else {\n callback = options;\n options = e;\n e = undefined;\n }\n }\n options = options || {};\n if(bits === undefined) {\n bits = options.bits || 2048;\n }\n if(e === undefined) {\n e = options.e || 0x10001;\n }\n\n // use native code if permitted, available, and parameters are acceptable\n if(!forge.options.usePureJavaScript && !options.prng &&\n bits >= 256 && bits <= 16384 && (e === 0x10001 || e === 3)) {\n if(callback) {\n // try native async\n if(_detectNodeCrypto('generateKeyPair')) {\n return _crypto.generateKeyPair('rsa', {\n modulusLength: bits,\n publicExponent: e,\n publicKeyEncoding: {\n type: 'spki',\n format: 'pem'\n },\n privateKeyEncoding: {\n type: 'pkcs8',\n format: 'pem'\n }\n }, function(err, pub, priv) {\n if(err) {\n return callback(err);\n }\n callback(null, {\n privateKey: pki.privateKeyFromPem(priv),\n publicKey: pki.publicKeyFromPem(pub)\n });\n });\n }\n if(_detectSubtleCrypto('generateKey') &&\n _detectSubtleCrypto('exportKey')) {\n // use standard native generateKey\n return util.globalScope.crypto.subtle.generateKey({\n name: 'RSASSA-PKCS1-v1_5',\n modulusLength: bits,\n publicExponent: _intToUint8Array(e),\n hash: {name: 'SHA-256'}\n }, true /* key can be exported*/, ['sign', 'verify'])\n .then(function(pair) {\n return util.globalScope.crypto.subtle.exportKey(\n 'pkcs8', pair.privateKey);\n // avoiding catch(function(err) {...}) to support IE <= 8\n }).then(undefined, function(err) {\n callback(err);\n }).then(function(pkcs8) {\n if(pkcs8) {\n var privateKey = pki.privateKeyFromAsn1(\n asn1.fromDer(forge.util.createBuffer(pkcs8)));\n callback(null, {\n privateKey: privateKey,\n publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e)\n });\n }\n });\n }\n if(_detectSubtleMsCrypto('generateKey') &&\n _detectSubtleMsCrypto('exportKey')) {\n var genOp = util.globalScope.msCrypto.subtle.generateKey({\n name: 'RSASSA-PKCS1-v1_5',\n modulusLength: bits,\n publicExponent: _intToUint8Array(e),\n hash: {name: 'SHA-256'}\n }, true /* key can be exported*/, ['sign', 'verify']);\n genOp.oncomplete = function(e) {\n var pair = e.target.result;\n var exportOp = util.globalScope.msCrypto.subtle.exportKey(\n 'pkcs8', pair.privateKey);\n exportOp.oncomplete = function(e) {\n var pkcs8 = e.target.result;\n var privateKey = pki.privateKeyFromAsn1(\n asn1.fromDer(forge.util.createBuffer(pkcs8)));\n callback(null, {\n privateKey: privateKey,\n publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e)\n });\n };\n exportOp.onerror = function(err) {\n callback(err);\n };\n };\n genOp.onerror = function(err) {\n callback(err);\n };\n return;\n }\n } else {\n // try native sync\n if(_detectNodeCrypto('generateKeyPairSync')) {\n var keypair = _crypto.generateKeyPairSync('rsa', {\n modulusLength: bits,\n publicExponent: e,\n publicKeyEncoding: {\n type: 'spki',\n format: 'pem'\n },\n privateKeyEncoding: {\n type: 'pkcs8',\n format: 'pem'\n }\n });\n return {\n privateKey: pki.privateKeyFromPem(keypair.privateKey),\n publicKey: pki.publicKeyFromPem(keypair.publicKey)\n };\n }\n }\n }\n\n // use JavaScript implementation\n var state = pki.rsa.createKeyPairGenerationState(bits, e, options);\n if(!callback) {\n pki.rsa.stepKeyPairGenerationState(state, 0);\n return state.keys;\n }\n _generateKeyPair(state, options, callback);\n};\n\n/**\n * Sets an RSA public key from BigIntegers modulus and exponent.\n *\n * @param n the modulus.\n * @param e the exponent.\n *\n * @return the public key.\n */\npki.setRsaPublicKey = pki.rsa.setPublicKey = function(n, e) {\n var key = {\n n: n,\n e: e\n };\n\n /**\n * Encrypts the given data with this public key. Newer applications\n * should use the 'RSA-OAEP' decryption scheme, 'RSAES-PKCS1-V1_5' is for\n * legacy applications.\n *\n * @param data the byte string to encrypt.\n * @param scheme the encryption scheme to use:\n * 'RSAES-PKCS1-V1_5' (default),\n * 'RSA-OAEP',\n * 'RAW', 'NONE', or null to perform raw RSA encryption,\n * an object with an 'encode' property set to a function\n * with the signature 'function(data, key)' that returns\n * a binary-encoded string representing the encoded data.\n * @param schemeOptions any scheme-specific options.\n *\n * @return the encrypted byte string.\n */\n key.encrypt = function(data, scheme, schemeOptions) {\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n } else if(scheme === undefined) {\n scheme = 'RSAES-PKCS1-V1_5';\n }\n\n if(scheme === 'RSAES-PKCS1-V1_5') {\n scheme = {\n encode: function(m, key, pub) {\n return _encodePkcs1_v1_5(m, key, 0x02).getBytes();\n }\n };\n } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') {\n scheme = {\n encode: function(m, key) {\n return forge.pkcs1.encode_rsa_oaep(key, m, schemeOptions);\n }\n };\n } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) {\n scheme = {encode: function(e) {return e;}};\n } else if(typeof scheme === 'string') {\n throw new Error('Unsupported encryption scheme: \"' + scheme + '\".');\n }\n\n // do scheme-based encoding then rsa encryption\n var e = scheme.encode(data, key, true);\n return pki.rsa.encrypt(e, key, true);\n };\n\n /**\n * Verifies the given signature against the given digest.\n *\n * PKCS#1 supports multiple (currently two) signature schemes:\n * RSASSA-PKCS1-V1_5 and RSASSA-PSS.\n *\n * By default this implementation uses the \"old scheme\", i.e.\n * RSASSA-PKCS1-V1_5, in which case once RSA-decrypted, the\n * signature is an OCTET STRING that holds a DigestInfo.\n *\n * DigestInfo ::= SEQUENCE {\n * digestAlgorithm DigestAlgorithmIdentifier,\n * digest Digest\n * }\n * DigestAlgorithmIdentifier ::= AlgorithmIdentifier\n * Digest ::= OCTET STRING\n *\n * To perform PSS signature verification, provide an instance\n * of Forge PSS object as the scheme parameter.\n *\n * @param digest the message digest hash to compare against the signature,\n * as a binary-encoded string.\n * @param signature the signature to verify, as a binary-encoded string.\n * @param scheme signature verification scheme to use:\n * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5,\n * a Forge PSS object for RSASSA-PSS,\n * 'NONE' or null for none, DigestInfo will not be expected, but\n * PKCS#1 v1.5 padding will still be used.\n * @param options optional verify options\n * _parseAllDigestBytes testing flag to control parsing of all\n * digest bytes. Unsupported and not for general usage.\n * (default: true)\n *\n * @return true if the signature was verified, false if not.\n */\n key.verify = function(digest, signature, scheme, options) {\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n } else if(scheme === undefined) {\n scheme = 'RSASSA-PKCS1-V1_5';\n }\n if(options === undefined) {\n options = {\n _parseAllDigestBytes: true\n };\n }\n if(!('_parseAllDigestBytes' in options)) {\n options._parseAllDigestBytes = true;\n }\n\n if(scheme === 'RSASSA-PKCS1-V1_5') {\n scheme = {\n verify: function(digest, d) {\n // remove padding\n d = _decodePkcs1_v1_5(d, key, true);\n // d is ASN.1 BER-encoded DigestInfo\n var obj = asn1.fromDer(d, {\n parseAllBytes: options._parseAllDigestBytes\n });\n\n // validate DigestInfo\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, digestInfoValidator, capture, errors)) {\n var error = new Error(\n 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' +\n 'DigestInfo value.');\n error.errors = errors;\n throw error;\n }\n // check hash algorithm identifier\n // see PKCS1-v1-5DigestAlgorithms in RFC 8017\n // FIXME: add support to vaidator for strict value choices\n var oid = asn1.derToOid(capture.algorithmIdentifier);\n if(!(oid === forge.oids.md2 ||\n oid === forge.oids.md5 ||\n oid === forge.oids.sha1 ||\n oid === forge.oids.sha224 ||\n oid === forge.oids.sha256 ||\n oid === forge.oids.sha384 ||\n oid === forge.oids.sha512 ||\n oid === forge.oids['sha512-224'] ||\n oid === forge.oids['sha512-256'])) {\n var error = new Error(\n 'Unknown RSASSA-PKCS1-v1_5 DigestAlgorithm identifier.');\n error.oid = oid;\n throw error;\n }\n\n // special check for md2 and md5 that NULL parameters exist\n if(oid === forge.oids.md2 || oid === forge.oids.md5) {\n if(!('parameters' in capture)) {\n throw new Error(\n 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' +\n 'DigestInfo value. ' +\n 'Missing algorithm identifer NULL parameters.');\n }\n }\n\n // compare the given digest to the decrypted one\n return digest === capture.digest;\n }\n };\n } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) {\n scheme = {\n verify: function(digest, d) {\n // remove padding\n d = _decodePkcs1_v1_5(d, key, true);\n return digest === d;\n }\n };\n }\n\n // do rsa decryption w/o any decoding, then verify -- which does decoding\n var d = pki.rsa.decrypt(signature, key, true, false);\n return scheme.verify(digest, d, key.n.bitLength());\n };\n\n return key;\n};\n\n/**\n * Sets an RSA private key from BigIntegers modulus, exponent, primes,\n * prime exponents, and modular multiplicative inverse.\n *\n * @param n the modulus.\n * @param e the public exponent.\n * @param d the private exponent ((inverse of e) mod n).\n * @param p the first prime.\n * @param q the second prime.\n * @param dP exponent1 (d mod (p-1)).\n * @param dQ exponent2 (d mod (q-1)).\n * @param qInv ((inverse of q) mod p)\n *\n * @return the private key.\n */\npki.setRsaPrivateKey = pki.rsa.setPrivateKey = function(\n n, e, d, p, q, dP, dQ, qInv) {\n var key = {\n n: n,\n e: e,\n d: d,\n p: p,\n q: q,\n dP: dP,\n dQ: dQ,\n qInv: qInv\n };\n\n /**\n * Decrypts the given data with this private key. The decryption scheme\n * must match the one used to encrypt the data.\n *\n * @param data the byte string to decrypt.\n * @param scheme the decryption scheme to use:\n * 'RSAES-PKCS1-V1_5' (default),\n * 'RSA-OAEP',\n * 'RAW', 'NONE', or null to perform raw RSA decryption.\n * @param schemeOptions any scheme-specific options.\n *\n * @return the decrypted byte string.\n */\n key.decrypt = function(data, scheme, schemeOptions) {\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n } else if(scheme === undefined) {\n scheme = 'RSAES-PKCS1-V1_5';\n }\n\n // do rsa decryption w/o any decoding\n var d = pki.rsa.decrypt(data, key, false, false);\n\n if(scheme === 'RSAES-PKCS1-V1_5') {\n scheme = {decode: _decodePkcs1_v1_5};\n } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') {\n scheme = {\n decode: function(d, key) {\n return forge.pkcs1.decode_rsa_oaep(key, d, schemeOptions);\n }\n };\n } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) {\n scheme = {decode: function(d) {return d;}};\n } else {\n throw new Error('Unsupported encryption scheme: \"' + scheme + '\".');\n }\n\n // decode according to scheme\n return scheme.decode(d, key, false);\n };\n\n /**\n * Signs the given digest, producing a signature.\n *\n * PKCS#1 supports multiple (currently two) signature schemes:\n * RSASSA-PKCS1-V1_5 and RSASSA-PSS.\n *\n * By default this implementation uses the \"old scheme\", i.e.\n * RSASSA-PKCS1-V1_5. In order to generate a PSS signature, provide\n * an instance of Forge PSS object as the scheme parameter.\n *\n * @param md the message digest object with the hash to sign.\n * @param scheme the signature scheme to use:\n * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5,\n * a Forge PSS object for RSASSA-PSS,\n * 'NONE' or null for none, DigestInfo will not be used but\n * PKCS#1 v1.5 padding will still be used.\n *\n * @return the signature as a byte string.\n */\n key.sign = function(md, scheme) {\n /* Note: The internal implementation of RSA operations is being\n transitioned away from a PKCS#1 v1.5 hard-coded scheme. Some legacy\n code like the use of an encoding block identifier 'bt' will eventually\n be removed. */\n\n // private key operation\n var bt = false;\n\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n }\n\n if(scheme === undefined || scheme === 'RSASSA-PKCS1-V1_5') {\n scheme = {encode: emsaPkcs1v15encode};\n bt = 0x01;\n } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) {\n scheme = {encode: function() {return md;}};\n bt = 0x01;\n }\n\n // encode and then encrypt\n var d = scheme.encode(md, key.n.bitLength());\n return pki.rsa.encrypt(d, key, bt);\n };\n\n return key;\n};\n\n/**\n * Wraps an RSAPrivateKey ASN.1 object in an ASN.1 PrivateKeyInfo object.\n *\n * @param rsaKey the ASN.1 RSAPrivateKey.\n *\n * @return the ASN.1 PrivateKeyInfo.\n */\npki.wrapRsaPrivateKey = function(rsaKey) {\n // PrivateKeyInfo\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version (0)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(0).getBytes()),\n // privateKeyAlgorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.rsaEncryption).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]),\n // PrivateKey\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n asn1.toDer(rsaKey).getBytes())\n ]);\n};\n\n/**\n * Converts a private key from an ASN.1 object.\n *\n * @param obj the ASN.1 representation of a PrivateKeyInfo containing an\n * RSAPrivateKey or an RSAPrivateKey.\n *\n * @return the private key.\n */\npki.privateKeyFromAsn1 = function(obj) {\n // get PrivateKeyInfo\n var capture = {};\n var errors = [];\n if(asn1.validate(obj, privateKeyValidator, capture, errors)) {\n obj = asn1.fromDer(forge.util.createBuffer(capture.privateKey));\n }\n\n // get RSAPrivateKey\n capture = {};\n errors = [];\n if(!asn1.validate(obj, rsaPrivateKeyValidator, capture, errors)) {\n var error = new Error('Cannot read private key. ' +\n 'ASN.1 object does not contain an RSAPrivateKey.');\n error.errors = errors;\n throw error;\n }\n\n // Note: Version is currently ignored.\n // capture.privateKeyVersion\n // FIXME: inefficient, get a BigInteger that uses byte strings\n var n, e, d, p, q, dP, dQ, qInv;\n n = forge.util.createBuffer(capture.privateKeyModulus).toHex();\n e = forge.util.createBuffer(capture.privateKeyPublicExponent).toHex();\n d = forge.util.createBuffer(capture.privateKeyPrivateExponent).toHex();\n p = forge.util.createBuffer(capture.privateKeyPrime1).toHex();\n q = forge.util.createBuffer(capture.privateKeyPrime2).toHex();\n dP = forge.util.createBuffer(capture.privateKeyExponent1).toHex();\n dQ = forge.util.createBuffer(capture.privateKeyExponent2).toHex();\n qInv = forge.util.createBuffer(capture.privateKeyCoefficient).toHex();\n\n // set private key\n return pki.setRsaPrivateKey(\n new BigInteger(n, 16),\n new BigInteger(e, 16),\n new BigInteger(d, 16),\n new BigInteger(p, 16),\n new BigInteger(q, 16),\n new BigInteger(dP, 16),\n new BigInteger(dQ, 16),\n new BigInteger(qInv, 16));\n};\n\n/**\n * Converts a private key to an ASN.1 RSAPrivateKey.\n *\n * @param key the private key.\n *\n * @return the ASN.1 representation of an RSAPrivateKey.\n */\npki.privateKeyToAsn1 = pki.privateKeyToRSAPrivateKey = function(key) {\n // RSAPrivateKey\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version (0 = only 2 primes, 1 multiple primes)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(0).getBytes()),\n // modulus (n)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.n)),\n // publicExponent (e)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.e)),\n // privateExponent (d)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.d)),\n // privateKeyPrime1 (p)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.p)),\n // privateKeyPrime2 (q)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.q)),\n // privateKeyExponent1 (dP)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.dP)),\n // privateKeyExponent2 (dQ)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.dQ)),\n // coefficient (qInv)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.qInv))\n ]);\n};\n\n/**\n * Converts a public key from an ASN.1 SubjectPublicKeyInfo or RSAPublicKey.\n *\n * @param obj the asn1 representation of a SubjectPublicKeyInfo or RSAPublicKey.\n *\n * @return the public key.\n */\npki.publicKeyFromAsn1 = function(obj) {\n // get SubjectPublicKeyInfo\n var capture = {};\n var errors = [];\n if(asn1.validate(obj, publicKeyValidator, capture, errors)) {\n // get oid\n var oid = asn1.derToOid(capture.publicKeyOid);\n if(oid !== pki.oids.rsaEncryption) {\n var error = new Error('Cannot read public key. Unknown OID.');\n error.oid = oid;\n throw error;\n }\n obj = capture.rsaPublicKey;\n }\n\n // get RSA params\n errors = [];\n if(!asn1.validate(obj, rsaPublicKeyValidator, capture, errors)) {\n var error = new Error('Cannot read public key. ' +\n 'ASN.1 object does not contain an RSAPublicKey.');\n error.errors = errors;\n throw error;\n }\n\n // FIXME: inefficient, get a BigInteger that uses byte strings\n var n = forge.util.createBuffer(capture.publicKeyModulus).toHex();\n var e = forge.util.createBuffer(capture.publicKeyExponent).toHex();\n\n // set public key\n return pki.setRsaPublicKey(\n new BigInteger(n, 16),\n new BigInteger(e, 16));\n};\n\n/**\n * Converts a public key to an ASN.1 SubjectPublicKeyInfo.\n *\n * @param key the public key.\n *\n * @return the asn1 representation of a SubjectPublicKeyInfo.\n */\npki.publicKeyToAsn1 = pki.publicKeyToSubjectPublicKeyInfo = function(key) {\n // SubjectPublicKeyInfo\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.rsaEncryption).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]),\n // subjectPublicKey\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, [\n pki.publicKeyToRSAPublicKey(key)\n ])\n ]);\n};\n\n/**\n * Converts a public key to an ASN.1 RSAPublicKey.\n *\n * @param key the public key.\n *\n * @return the asn1 representation of a RSAPublicKey.\n */\npki.publicKeyToRSAPublicKey = function(key) {\n // RSAPublicKey\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // modulus (n)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.n)),\n // publicExponent (e)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.e))\n ]);\n};\n\n/**\n * Encodes a message using PKCS#1 v1.5 padding.\n *\n * @param m the message to encode.\n * @param key the RSA key to use.\n * @param bt the block type to use, i.e. either 0x01 (for signing) or 0x02\n * (for encryption).\n *\n * @return the padded byte buffer.\n */\nfunction _encodePkcs1_v1_5(m, key, bt) {\n var eb = forge.util.createBuffer();\n\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n /* use PKCS#1 v1.5 padding */\n if(m.length > (k - 11)) {\n var error = new Error('Message is too long for PKCS#1 v1.5 padding.');\n error.length = m.length;\n error.max = k - 11;\n throw error;\n }\n\n /* A block type BT, a padding string PS, and the data D shall be\n formatted into an octet string EB, the encryption block:\n\n EB = 00 || BT || PS || 00 || D\n\n The block type BT shall be a single octet indicating the structure of\n the encryption block. For this version of the document it shall have\n value 00, 01, or 02. For a private-key operation, the block type\n shall be 00 or 01. For a public-key operation, it shall be 02.\n\n The padding string PS shall consist of k-3-||D|| octets. For block\n type 00, the octets shall have value 00; for block type 01, they\n shall have value FF; and for block type 02, they shall be\n pseudorandomly generated and nonzero. This makes the length of the\n encryption block EB equal to k. */\n\n // build the encryption block\n eb.putByte(0x00);\n eb.putByte(bt);\n\n // create the padding\n var padNum = k - 3 - m.length;\n var padByte;\n // private key op\n if(bt === 0x00 || bt === 0x01) {\n padByte = (bt === 0x00) ? 0x00 : 0xFF;\n for(var i = 0; i < padNum; ++i) {\n eb.putByte(padByte);\n }\n } else {\n // public key op\n // pad with random non-zero values\n while(padNum > 0) {\n var numZeros = 0;\n var padBytes = forge.random.getBytes(padNum);\n for(var i = 0; i < padNum; ++i) {\n padByte = padBytes.charCodeAt(i);\n if(padByte === 0) {\n ++numZeros;\n } else {\n eb.putByte(padByte);\n }\n }\n padNum = numZeros;\n }\n }\n\n // zero followed by message\n eb.putByte(0x00);\n eb.putBytes(m);\n\n return eb;\n}\n\n/**\n * Decodes a message using PKCS#1 v1.5 padding.\n *\n * @param em the message to decode.\n * @param key the RSA key to use.\n * @param pub true if the key is a public key, false if it is private.\n * @param ml the message length, if specified.\n *\n * @return the decoded bytes.\n */\nfunction _decodePkcs1_v1_5(em, key, pub, ml) {\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n /* It is an error if any of the following conditions occurs:\n\n 1. The encryption block EB cannot be parsed unambiguously.\n 2. The padding string PS consists of fewer than eight octets\n or is inconsisent with the block type BT.\n 3. The decryption process is a public-key operation and the block\n type BT is not 00 or 01, or the decryption process is a\n private-key operation and the block type is not 02.\n */\n\n // parse the encryption block\n var eb = forge.util.createBuffer(em);\n var first = eb.getByte();\n var bt = eb.getByte();\n if(first !== 0x00 ||\n (pub && bt !== 0x00 && bt !== 0x01) ||\n (!pub && bt != 0x02) ||\n (pub && bt === 0x00 && typeof(ml) === 'undefined')) {\n throw new Error('Encryption block is invalid.');\n }\n\n var padNum = 0;\n if(bt === 0x00) {\n // check all padding bytes for 0x00\n padNum = k - 3 - ml;\n for(var i = 0; i < padNum; ++i) {\n if(eb.getByte() !== 0x00) {\n throw new Error('Encryption block is invalid.');\n }\n }\n } else if(bt === 0x01) {\n // find the first byte that isn't 0xFF, should be after all padding\n padNum = 0;\n while(eb.length() > 1) {\n if(eb.getByte() !== 0xFF) {\n --eb.read;\n break;\n }\n ++padNum;\n }\n } else if(bt === 0x02) {\n // look for 0x00 byte\n padNum = 0;\n while(eb.length() > 1) {\n if(eb.getByte() === 0x00) {\n --eb.read;\n break;\n }\n ++padNum;\n }\n }\n\n // zero must be 0x00 and padNum must be (k - 3 - message length)\n var zero = eb.getByte();\n if(zero !== 0x00 || padNum !== (k - 3 - eb.length())) {\n throw new Error('Encryption block is invalid.');\n }\n\n return eb.getBytes();\n}\n\n/**\n * Runs the key-generation algorithm asynchronously, either in the background\n * via Web Workers, or using the main thread and setImmediate.\n *\n * @param state the key-pair generation state.\n * @param [options] options for key-pair generation:\n * workerScript the worker script URL.\n * workers the number of web workers (if supported) to use,\n * (default: 2, -1 to use estimated cores minus one).\n * workLoad the size of the work load, ie: number of possible prime\n * numbers for each web worker to check per work assignment,\n * (default: 100).\n * @param callback(err, keypair) called once the operation completes.\n */\nfunction _generateKeyPair(state, options, callback) {\n if(typeof options === 'function') {\n callback = options;\n options = {};\n }\n options = options || {};\n\n var opts = {\n algorithm: {\n name: options.algorithm || 'PRIMEINC',\n options: {\n workers: options.workers || 2,\n workLoad: options.workLoad || 100,\n workerScript: options.workerScript\n }\n }\n };\n if('prng' in options) {\n opts.prng = options.prng;\n }\n\n generate();\n\n function generate() {\n // find p and then q (done in series to simplify)\n getPrime(state.pBits, function(err, num) {\n if(err) {\n return callback(err);\n }\n state.p = num;\n if(state.q !== null) {\n return finish(err, state.q);\n }\n getPrime(state.qBits, finish);\n });\n }\n\n function getPrime(bits, callback) {\n forge.prime.generateProbablePrime(bits, opts, callback);\n }\n\n function finish(err, num) {\n if(err) {\n return callback(err);\n }\n\n // set q\n state.q = num;\n\n // ensure p is larger than q (swap them if not)\n if(state.p.compareTo(state.q) < 0) {\n var tmp = state.p;\n state.p = state.q;\n state.q = tmp;\n }\n\n // ensure p is coprime with e\n if(state.p.subtract(BigInteger.ONE).gcd(state.e)\n .compareTo(BigInteger.ONE) !== 0) {\n state.p = null;\n generate();\n return;\n }\n\n // ensure q is coprime with e\n if(state.q.subtract(BigInteger.ONE).gcd(state.e)\n .compareTo(BigInteger.ONE) !== 0) {\n state.q = null;\n getPrime(state.qBits, finish);\n return;\n }\n\n // compute phi: (p - 1)(q - 1) (Euler's totient function)\n state.p1 = state.p.subtract(BigInteger.ONE);\n state.q1 = state.q.subtract(BigInteger.ONE);\n state.phi = state.p1.multiply(state.q1);\n\n // ensure e and phi are coprime\n if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) !== 0) {\n // phi and e aren't coprime, so generate a new p and q\n state.p = state.q = null;\n generate();\n return;\n }\n\n // create n, ensure n is has the right number of bits\n state.n = state.p.multiply(state.q);\n if(state.n.bitLength() !== state.bits) {\n // failed, get new q\n state.q = null;\n getPrime(state.qBits, finish);\n return;\n }\n\n // set keys\n var d = state.e.modInverse(state.phi);\n state.keys = {\n privateKey: pki.rsa.setPrivateKey(\n state.n, state.e, d, state.p, state.q,\n d.mod(state.p1), d.mod(state.q1),\n state.q.modInverse(state.p)),\n publicKey: pki.rsa.setPublicKey(state.n, state.e)\n };\n\n callback(null, state.keys);\n }\n}\n\n/**\n * Converts a positive BigInteger into 2's-complement big-endian bytes.\n *\n * @param b the big integer to convert.\n *\n * @return the bytes.\n */\nfunction _bnToBytes(b) {\n // prepend 0x00 if first byte >= 0x80\n var hex = b.toString(16);\n if(hex[0] >= '8') {\n hex = '00' + hex;\n }\n var bytes = forge.util.hexToBytes(hex);\n\n // ensure integer is minimally-encoded\n if(bytes.length > 1 &&\n // leading 0x00 for positive integer\n ((bytes.charCodeAt(0) === 0 &&\n (bytes.charCodeAt(1) & 0x80) === 0) ||\n // leading 0xFF for negative integer\n (bytes.charCodeAt(0) === 0xFF &&\n (bytes.charCodeAt(1) & 0x80) === 0x80))) {\n return bytes.substr(1);\n }\n return bytes;\n}\n\n/**\n * Returns the required number of Miller-Rabin tests to generate a\n * prime with an error probability of (1/2)^80.\n *\n * See Handbook of Applied Cryptography Chapter 4, Table 4.4.\n *\n * @param bits the bit size.\n *\n * @return the required number of iterations.\n */\nfunction _getMillerRabinTests(bits) {\n if(bits <= 100) return 27;\n if(bits <= 150) return 18;\n if(bits <= 200) return 15;\n if(bits <= 250) return 12;\n if(bits <= 300) return 9;\n if(bits <= 350) return 8;\n if(bits <= 400) return 7;\n if(bits <= 500) return 6;\n if(bits <= 600) return 5;\n if(bits <= 800) return 4;\n if(bits <= 1250) return 3;\n return 2;\n}\n\n/**\n * Performs feature detection on the Node crypto interface.\n *\n * @param fn the feature (function) to detect.\n *\n * @return true if detected, false if not.\n */\nfunction _detectNodeCrypto(fn) {\n return forge.util.isNodejs && typeof _crypto[fn] === 'function';\n}\n\n/**\n * Performs feature detection on the SubtleCrypto interface.\n *\n * @param fn the feature (function) to detect.\n *\n * @return true if detected, false if not.\n */\nfunction _detectSubtleCrypto(fn) {\n return (typeof util.globalScope !== 'undefined' &&\n typeof util.globalScope.crypto === 'object' &&\n typeof util.globalScope.crypto.subtle === 'object' &&\n typeof util.globalScope.crypto.subtle[fn] === 'function');\n}\n\n/**\n * Performs feature detection on the deprecated Microsoft Internet Explorer\n * outdated SubtleCrypto interface. This function should only be used after\n * checking for the modern, standard SubtleCrypto interface.\n *\n * @param fn the feature (function) to detect.\n *\n * @return true if detected, false if not.\n */\nfunction _detectSubtleMsCrypto(fn) {\n return (typeof util.globalScope !== 'undefined' &&\n typeof util.globalScope.msCrypto === 'object' &&\n typeof util.globalScope.msCrypto.subtle === 'object' &&\n typeof util.globalScope.msCrypto.subtle[fn] === 'function');\n}\n\nfunction _intToUint8Array(x) {\n var bytes = forge.util.hexToBytes(x.toString(16));\n var buffer = new Uint8Array(bytes.length);\n for(var i = 0; i < bytes.length; ++i) {\n buffer[i] = bytes.charCodeAt(i);\n }\n return buffer;\n}\n\nfunction _privateKeyFromJwk(jwk) {\n if(jwk.kty !== 'RSA') {\n throw new Error(\n 'Unsupported key algorithm \"' + jwk.kty + '\"; algorithm must be \"RSA\".');\n }\n return pki.setRsaPrivateKey(\n _base64ToBigInt(jwk.n),\n _base64ToBigInt(jwk.e),\n _base64ToBigInt(jwk.d),\n _base64ToBigInt(jwk.p),\n _base64ToBigInt(jwk.q),\n _base64ToBigInt(jwk.dp),\n _base64ToBigInt(jwk.dq),\n _base64ToBigInt(jwk.qi));\n}\n\nfunction _publicKeyFromJwk(jwk) {\n if(jwk.kty !== 'RSA') {\n throw new Error('Key algorithm must be \"RSA\".');\n }\n return pki.setRsaPublicKey(\n _base64ToBigInt(jwk.n),\n _base64ToBigInt(jwk.e));\n}\n\nfunction _base64ToBigInt(b64) {\n return new BigInteger(forge.util.bytesToHex(forge.util.decode64(b64)), 16);\n}\n","/**\n * Secure Hash Algorithm with 160-bit digest (SHA-1) implementation.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2015 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./md');\nrequire('./util');\n\nvar sha1 = module.exports = forge.sha1 = forge.sha1 || {};\nforge.md.sha1 = forge.md.algorithms.sha1 = sha1;\n\n/**\n * Creates a SHA-1 message digest object.\n *\n * @return a message digest object.\n */\nsha1.create = function() {\n // do initialization as necessary\n if(!_initialized) {\n _init();\n }\n\n // SHA-1 state contains five 32-bit integers\n var _state = null;\n\n // input buffer\n var _input = forge.util.createBuffer();\n\n // used for word storage\n var _w = new Array(80);\n\n // message digest object\n var md = {\n algorithm: 'sha1',\n blockLength: 64,\n digestLength: 20,\n // 56-bit length of message so far (does not including padding)\n messageLength: 0,\n // true message length\n fullMessageLength: null,\n // size of message length in bytes\n messageLengthSize: 8\n };\n\n /**\n * Starts the digest.\n *\n * @return this digest object.\n */\n md.start = function() {\n // up to 56-bit message length for convenience\n md.messageLength = 0;\n\n // full message length (set md.messageLength64 for backwards-compatibility)\n md.fullMessageLength = md.messageLength64 = [];\n var int32s = md.messageLengthSize / 4;\n for(var i = 0; i < int32s; ++i) {\n md.fullMessageLength.push(0);\n }\n _input = forge.util.createBuffer();\n _state = {\n h0: 0x67452301,\n h1: 0xEFCDAB89,\n h2: 0x98BADCFE,\n h3: 0x10325476,\n h4: 0xC3D2E1F0\n };\n return md;\n };\n // start digest automatically for first time\n md.start();\n\n /**\n * Updates the digest with the given message input. The given input can\n * treated as raw input (no encoding will be applied) or an encoding of\n * 'utf8' maybe given to encode the input using UTF-8.\n *\n * @param msg the message input to update with.\n * @param encoding the encoding to use (default: 'raw', other: 'utf8').\n *\n * @return this digest object.\n */\n md.update = function(msg, encoding) {\n if(encoding === 'utf8') {\n msg = forge.util.encodeUtf8(msg);\n }\n\n // update message length\n var len = msg.length;\n md.messageLength += len;\n len = [(len / 0x100000000) >>> 0, len >>> 0];\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n md.fullMessageLength[i] += len[1];\n len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);\n md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;\n len[0] = ((len[1] / 0x100000000) >>> 0);\n }\n\n // add bytes to input buffer\n _input.putBytes(msg);\n\n // process bytes\n _update(_state, _w, _input);\n\n // compact input buffer every 2K or if empty\n if(_input.read > 2048 || _input.length() === 0) {\n _input.compact();\n }\n\n return md;\n };\n\n /**\n * Produces the digest.\n *\n * @return a byte buffer containing the digest value.\n */\n md.digest = function() {\n /* Note: Here we copy the remaining bytes in the input buffer and\n add the appropriate SHA-1 padding. Then we do the final update\n on a copy of the state so that if the user wants to get\n intermediate digests they can do so. */\n\n /* Determine the number of bytes that must be added to the message\n to ensure its length is congruent to 448 mod 512. In other words,\n the data to be digested must be a multiple of 512 bits (or 128 bytes).\n This data includes the message, some padding, and the length of the\n message. Since the length of the message will be encoded as 8 bytes (64\n bits), that means that the last segment of the data must have 56 bytes\n (448 bits) of message and padding. Therefore, the length of the message\n plus the padding must be congruent to 448 mod 512 because\n 512 - 128 = 448.\n\n In order to fill up the message length it must be filled with\n padding that begins with 1 bit followed by all 0 bits. Padding\n must *always* be present, so if the message length is already\n congruent to 448 mod 512, then 512 padding bits must be added. */\n\n var finalBlock = forge.util.createBuffer();\n finalBlock.putBytes(_input.bytes());\n\n // compute remaining size to be digested (include message length size)\n var remaining = (\n md.fullMessageLength[md.fullMessageLength.length - 1] +\n md.messageLengthSize);\n\n // add padding for overflow blockSize - overflow\n // _padding starts with 1 byte with first bit is set (byte value 128), then\n // there may be up to (blockSize - 1) other pad bytes\n var overflow = remaining & (md.blockLength - 1);\n finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));\n\n // serialize message length in bits in big-endian order; since length\n // is stored in bytes we multiply by 8 and add carry from next int\n var next, carry;\n var bits = md.fullMessageLength[0] * 8;\n for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {\n next = md.fullMessageLength[i + 1] * 8;\n carry = (next / 0x100000000) >>> 0;\n bits += carry;\n finalBlock.putInt32(bits >>> 0);\n bits = next >>> 0;\n }\n finalBlock.putInt32(bits);\n\n var s2 = {\n h0: _state.h0,\n h1: _state.h1,\n h2: _state.h2,\n h3: _state.h3,\n h4: _state.h4\n };\n _update(s2, _w, finalBlock);\n var rval = forge.util.createBuffer();\n rval.putInt32(s2.h0);\n rval.putInt32(s2.h1);\n rval.putInt32(s2.h2);\n rval.putInt32(s2.h3);\n rval.putInt32(s2.h4);\n return rval;\n };\n\n return md;\n};\n\n// sha-1 padding bytes not initialized yet\nvar _padding = null;\nvar _initialized = false;\n\n/**\n * Initializes the constant tables.\n */\nfunction _init() {\n // create padding\n _padding = String.fromCharCode(128);\n _padding += forge.util.fillString(String.fromCharCode(0x00), 64);\n\n // now initialized\n _initialized = true;\n}\n\n/**\n * Updates a SHA-1 state with the given byte buffer.\n *\n * @param s the SHA-1 state to update.\n * @param w the array to use to store words.\n * @param bytes the byte buffer to update with.\n */\nfunction _update(s, w, bytes) {\n // consume 512 bit (64 byte) chunks\n var t, a, b, c, d, e, f, i;\n var len = bytes.length();\n while(len >= 64) {\n // the w array will be populated with sixteen 32-bit big-endian words\n // and then extended into 80 32-bit words according to SHA-1 algorithm\n // and for 32-79 using Max Locktyukhin's optimization\n\n // initialize hash value for this chunk\n a = s.h0;\n b = s.h1;\n c = s.h2;\n d = s.h3;\n e = s.h4;\n\n // round 1\n for(i = 0; i < 16; ++i) {\n t = bytes.getInt32();\n w[i] = t;\n f = d ^ (b & (c ^ d));\n t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n for(; i < 20; ++i) {\n t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]);\n t = (t << 1) | (t >>> 31);\n w[i] = t;\n f = d ^ (b & (c ^ d));\n t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n // round 2\n for(; i < 32; ++i) {\n t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]);\n t = (t << 1) | (t >>> 31);\n w[i] = t;\n f = b ^ c ^ d;\n t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n for(; i < 40; ++i) {\n t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);\n t = (t << 2) | (t >>> 30);\n w[i] = t;\n f = b ^ c ^ d;\n t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n // round 3\n for(; i < 60; ++i) {\n t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);\n t = (t << 2) | (t >>> 30);\n w[i] = t;\n f = (b & c) | (d & (b ^ c));\n t = ((a << 5) | (a >>> 27)) + f + e + 0x8F1BBCDC + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n // round 4\n for(; i < 80; ++i) {\n t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);\n t = (t << 2) | (t >>> 30);\n w[i] = t;\n f = b ^ c ^ d;\n t = ((a << 5) | (a >>> 27)) + f + e + 0xCA62C1D6 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n\n // update hash state\n s.h0 = (s.h0 + a) | 0;\n s.h1 = (s.h1 + b) | 0;\n s.h2 = (s.h2 + c) | 0;\n s.h3 = (s.h3 + d) | 0;\n s.h4 = (s.h4 + e) | 0;\n\n len -= 64;\n }\n}\n","/**\n * Secure Hash Algorithm with 256-bit digest (SHA-256) implementation.\n *\n * See FIPS 180-2 for details.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2015 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./md');\nrequire('./util');\n\nvar sha256 = module.exports = forge.sha256 = forge.sha256 || {};\nforge.md.sha256 = forge.md.algorithms.sha256 = sha256;\n\n/**\n * Creates a SHA-256 message digest object.\n *\n * @return a message digest object.\n */\nsha256.create = function() {\n // do initialization as necessary\n if(!_initialized) {\n _init();\n }\n\n // SHA-256 state contains eight 32-bit integers\n var _state = null;\n\n // input buffer\n var _input = forge.util.createBuffer();\n\n // used for word storage\n var _w = new Array(64);\n\n // message digest object\n var md = {\n algorithm: 'sha256',\n blockLength: 64,\n digestLength: 32,\n // 56-bit length of message so far (does not including padding)\n messageLength: 0,\n // true message length\n fullMessageLength: null,\n // size of message length in bytes\n messageLengthSize: 8\n };\n\n /**\n * Starts the digest.\n *\n * @return this digest object.\n */\n md.start = function() {\n // up to 56-bit message length for convenience\n md.messageLength = 0;\n\n // full message length (set md.messageLength64 for backwards-compatibility)\n md.fullMessageLength = md.messageLength64 = [];\n var int32s = md.messageLengthSize / 4;\n for(var i = 0; i < int32s; ++i) {\n md.fullMessageLength.push(0);\n }\n _input = forge.util.createBuffer();\n _state = {\n h0: 0x6A09E667,\n h1: 0xBB67AE85,\n h2: 0x3C6EF372,\n h3: 0xA54FF53A,\n h4: 0x510E527F,\n h5: 0x9B05688C,\n h6: 0x1F83D9AB,\n h7: 0x5BE0CD19\n };\n return md;\n };\n // start digest automatically for first time\n md.start();\n\n /**\n * Updates the digest with the given message input. The given input can\n * treated as raw input (no encoding will be applied) or an encoding of\n * 'utf8' maybe given to encode the input using UTF-8.\n *\n * @param msg the message input to update with.\n * @param encoding the encoding to use (default: 'raw', other: 'utf8').\n *\n * @return this digest object.\n */\n md.update = function(msg, encoding) {\n if(encoding === 'utf8') {\n msg = forge.util.encodeUtf8(msg);\n }\n\n // update message length\n var len = msg.length;\n md.messageLength += len;\n len = [(len / 0x100000000) >>> 0, len >>> 0];\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n md.fullMessageLength[i] += len[1];\n len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);\n md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;\n len[0] = ((len[1] / 0x100000000) >>> 0);\n }\n\n // add bytes to input buffer\n _input.putBytes(msg);\n\n // process bytes\n _update(_state, _w, _input);\n\n // compact input buffer every 2K or if empty\n if(_input.read > 2048 || _input.length() === 0) {\n _input.compact();\n }\n\n return md;\n };\n\n /**\n * Produces the digest.\n *\n * @return a byte buffer containing the digest value.\n */\n md.digest = function() {\n /* Note: Here we copy the remaining bytes in the input buffer and\n add the appropriate SHA-256 padding. Then we do the final update\n on a copy of the state so that if the user wants to get\n intermediate digests they can do so. */\n\n /* Determine the number of bytes that must be added to the message\n to ensure its length is congruent to 448 mod 512. In other words,\n the data to be digested must be a multiple of 512 bits (or 128 bytes).\n This data includes the message, some padding, and the length of the\n message. Since the length of the message will be encoded as 8 bytes (64\n bits), that means that the last segment of the data must have 56 bytes\n (448 bits) of message and padding. Therefore, the length of the message\n plus the padding must be congruent to 448 mod 512 because\n 512 - 128 = 448.\n\n In order to fill up the message length it must be filled with\n padding that begins with 1 bit followed by all 0 bits. Padding\n must *always* be present, so if the message length is already\n congruent to 448 mod 512, then 512 padding bits must be added. */\n\n var finalBlock = forge.util.createBuffer();\n finalBlock.putBytes(_input.bytes());\n\n // compute remaining size to be digested (include message length size)\n var remaining = (\n md.fullMessageLength[md.fullMessageLength.length - 1] +\n md.messageLengthSize);\n\n // add padding for overflow blockSize - overflow\n // _padding starts with 1 byte with first bit is set (byte value 128), then\n // there may be up to (blockSize - 1) other pad bytes\n var overflow = remaining & (md.blockLength - 1);\n finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));\n\n // serialize message length in bits in big-endian order; since length\n // is stored in bytes we multiply by 8 and add carry from next int\n var next, carry;\n var bits = md.fullMessageLength[0] * 8;\n for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {\n next = md.fullMessageLength[i + 1] * 8;\n carry = (next / 0x100000000) >>> 0;\n bits += carry;\n finalBlock.putInt32(bits >>> 0);\n bits = next >>> 0;\n }\n finalBlock.putInt32(bits);\n\n var s2 = {\n h0: _state.h0,\n h1: _state.h1,\n h2: _state.h2,\n h3: _state.h3,\n h4: _state.h4,\n h5: _state.h5,\n h6: _state.h6,\n h7: _state.h7\n };\n _update(s2, _w, finalBlock);\n var rval = forge.util.createBuffer();\n rval.putInt32(s2.h0);\n rval.putInt32(s2.h1);\n rval.putInt32(s2.h2);\n rval.putInt32(s2.h3);\n rval.putInt32(s2.h4);\n rval.putInt32(s2.h5);\n rval.putInt32(s2.h6);\n rval.putInt32(s2.h7);\n return rval;\n };\n\n return md;\n};\n\n// sha-256 padding bytes not initialized yet\nvar _padding = null;\nvar _initialized = false;\n\n// table of constants\nvar _k = null;\n\n/**\n * Initializes the constant tables.\n */\nfunction _init() {\n // create padding\n _padding = String.fromCharCode(128);\n _padding += forge.util.fillString(String.fromCharCode(0x00), 64);\n\n // create K table for SHA-256\n _k = [\n 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,\n 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,\n 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,\n 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,\n 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,\n 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,\n 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,\n 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,\n 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,\n 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,\n 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,\n 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,\n 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,\n 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,\n 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,\n 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2];\n\n // now initialized\n _initialized = true;\n}\n\n/**\n * Updates a SHA-256 state with the given byte buffer.\n *\n * @param s the SHA-256 state to update.\n * @param w the array to use to store words.\n * @param bytes the byte buffer to update with.\n */\nfunction _update(s, w, bytes) {\n // consume 512 bit (64 byte) chunks\n var t1, t2, s0, s1, ch, maj, i, a, b, c, d, e, f, g, h;\n var len = bytes.length();\n while(len >= 64) {\n // the w array will be populated with sixteen 32-bit big-endian words\n // and then extended into 64 32-bit words according to SHA-256\n for(i = 0; i < 16; ++i) {\n w[i] = bytes.getInt32();\n }\n for(; i < 64; ++i) {\n // XOR word 2 words ago rot right 17, rot right 19, shft right 10\n t1 = w[i - 2];\n t1 =\n ((t1 >>> 17) | (t1 << 15)) ^\n ((t1 >>> 19) | (t1 << 13)) ^\n (t1 >>> 10);\n // XOR word 15 words ago rot right 7, rot right 18, shft right 3\n t2 = w[i - 15];\n t2 =\n ((t2 >>> 7) | (t2 << 25)) ^\n ((t2 >>> 18) | (t2 << 14)) ^\n (t2 >>> 3);\n // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^32\n w[i] = (t1 + w[i - 7] + t2 + w[i - 16]) | 0;\n }\n\n // initialize hash value for this chunk\n a = s.h0;\n b = s.h1;\n c = s.h2;\n d = s.h3;\n e = s.h4;\n f = s.h5;\n g = s.h6;\n h = s.h7;\n\n // round function\n for(i = 0; i < 64; ++i) {\n // Sum1(e)\n s1 =\n ((e >>> 6) | (e << 26)) ^\n ((e >>> 11) | (e << 21)) ^\n ((e >>> 25) | (e << 7));\n // Ch(e, f, g) (optimized the same way as SHA-1)\n ch = g ^ (e & (f ^ g));\n // Sum0(a)\n s0 =\n ((a >>> 2) | (a << 30)) ^\n ((a >>> 13) | (a << 19)) ^\n ((a >>> 22) | (a << 10));\n // Maj(a, b, c) (optimized the same way as SHA-1)\n maj = (a & b) | (c & (a ^ b));\n\n // main algorithm\n t1 = h + s1 + ch + _k[i] + w[i];\n t2 = s0 + maj;\n h = g;\n g = f;\n f = e;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n // can't truncate with `| 0`\n e = (d + t1) >>> 0;\n d = c;\n c = b;\n b = a;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n // can't truncate with `| 0`\n a = (t1 + t2) >>> 0;\n }\n\n // update hash state\n s.h0 = (s.h0 + a) | 0;\n s.h1 = (s.h1 + b) | 0;\n s.h2 = (s.h2 + c) | 0;\n s.h3 = (s.h3 + d) | 0;\n s.h4 = (s.h4 + e) | 0;\n s.h5 = (s.h5 + f) | 0;\n s.h6 = (s.h6 + g) | 0;\n s.h7 = (s.h7 + h) | 0;\n len -= 64;\n }\n}\n","/**\n * Secure Hash Algorithm with a 1024-bit block size implementation.\n *\n * This includes: SHA-512, SHA-384, SHA-512/224, and SHA-512/256. For\n * SHA-256 (block size 512 bits), see sha256.js.\n *\n * See FIPS 180-4 for details.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2014-2015 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./md');\nrequire('./util');\n\nvar sha512 = module.exports = forge.sha512 = forge.sha512 || {};\n\n// SHA-512\nforge.md.sha512 = forge.md.algorithms.sha512 = sha512;\n\n// SHA-384\nvar sha384 = forge.sha384 = forge.sha512.sha384 = forge.sha512.sha384 || {};\nsha384.create = function() {\n return sha512.create('SHA-384');\n};\nforge.md.sha384 = forge.md.algorithms.sha384 = sha384;\n\n// SHA-512/256\nforge.sha512.sha256 = forge.sha512.sha256 || {\n create: function() {\n return sha512.create('SHA-512/256');\n }\n};\nforge.md['sha512/256'] = forge.md.algorithms['sha512/256'] =\n forge.sha512.sha256;\n\n// SHA-512/224\nforge.sha512.sha224 = forge.sha512.sha224 || {\n create: function() {\n return sha512.create('SHA-512/224');\n }\n};\nforge.md['sha512/224'] = forge.md.algorithms['sha512/224'] =\n forge.sha512.sha224;\n\n/**\n * Creates a SHA-2 message digest object.\n *\n * @param algorithm the algorithm to use (SHA-512, SHA-384, SHA-512/224,\n * SHA-512/256).\n *\n * @return a message digest object.\n */\nsha512.create = function(algorithm) {\n // do initialization as necessary\n if(!_initialized) {\n _init();\n }\n\n if(typeof algorithm === 'undefined') {\n algorithm = 'SHA-512';\n }\n\n if(!(algorithm in _states)) {\n throw new Error('Invalid SHA-512 algorithm: ' + algorithm);\n }\n\n // SHA-512 state contains eight 64-bit integers (each as two 32-bit ints)\n var _state = _states[algorithm];\n var _h = null;\n\n // input buffer\n var _input = forge.util.createBuffer();\n\n // used for 64-bit word storage\n var _w = new Array(80);\n for(var wi = 0; wi < 80; ++wi) {\n _w[wi] = new Array(2);\n }\n\n // determine digest length by algorithm name (default)\n var digestLength = 64;\n switch(algorithm) {\n case 'SHA-384':\n digestLength = 48;\n break;\n case 'SHA-512/256':\n digestLength = 32;\n break;\n case 'SHA-512/224':\n digestLength = 28;\n break;\n }\n\n // message digest object\n var md = {\n // SHA-512 => sha512\n algorithm: algorithm.replace('-', '').toLowerCase(),\n blockLength: 128,\n digestLength: digestLength,\n // 56-bit length of message so far (does not including padding)\n messageLength: 0,\n // true message length\n fullMessageLength: null,\n // size of message length in bytes\n messageLengthSize: 16\n };\n\n /**\n * Starts the digest.\n *\n * @return this digest object.\n */\n md.start = function() {\n // up to 56-bit message length for convenience\n md.messageLength = 0;\n\n // full message length (set md.messageLength128 for backwards-compatibility)\n md.fullMessageLength = md.messageLength128 = [];\n var int32s = md.messageLengthSize / 4;\n for(var i = 0; i < int32s; ++i) {\n md.fullMessageLength.push(0);\n }\n _input = forge.util.createBuffer();\n _h = new Array(_state.length);\n for(var i = 0; i < _state.length; ++i) {\n _h[i] = _state[i].slice(0);\n }\n return md;\n };\n // start digest automatically for first time\n md.start();\n\n /**\n * Updates the digest with the given message input. The given input can\n * treated as raw input (no encoding will be applied) or an encoding of\n * 'utf8' maybe given to encode the input using UTF-8.\n *\n * @param msg the message input to update with.\n * @param encoding the encoding to use (default: 'raw', other: 'utf8').\n *\n * @return this digest object.\n */\n md.update = function(msg, encoding) {\n if(encoding === 'utf8') {\n msg = forge.util.encodeUtf8(msg);\n }\n\n // update message length\n var len = msg.length;\n md.messageLength += len;\n len = [(len / 0x100000000) >>> 0, len >>> 0];\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n md.fullMessageLength[i] += len[1];\n len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);\n md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;\n len[0] = ((len[1] / 0x100000000) >>> 0);\n }\n\n // add bytes to input buffer\n _input.putBytes(msg);\n\n // process bytes\n _update(_h, _w, _input);\n\n // compact input buffer every 2K or if empty\n if(_input.read > 2048 || _input.length() === 0) {\n _input.compact();\n }\n\n return md;\n };\n\n /**\n * Produces the digest.\n *\n * @return a byte buffer containing the digest value.\n */\n md.digest = function() {\n /* Note: Here we copy the remaining bytes in the input buffer and\n add the appropriate SHA-512 padding. Then we do the final update\n on a copy of the state so that if the user wants to get\n intermediate digests they can do so. */\n\n /* Determine the number of bytes that must be added to the message\n to ensure its length is congruent to 896 mod 1024. In other words,\n the data to be digested must be a multiple of 1024 bits (or 128 bytes).\n This data includes the message, some padding, and the length of the\n message. Since the length of the message will be encoded as 16 bytes (128\n bits), that means that the last segment of the data must have 112 bytes\n (896 bits) of message and padding. Therefore, the length of the message\n plus the padding must be congruent to 896 mod 1024 because\n 1024 - 128 = 896.\n\n In order to fill up the message length it must be filled with\n padding that begins with 1 bit followed by all 0 bits. Padding\n must *always* be present, so if the message length is already\n congruent to 896 mod 1024, then 1024 padding bits must be added. */\n\n var finalBlock = forge.util.createBuffer();\n finalBlock.putBytes(_input.bytes());\n\n // compute remaining size to be digested (include message length size)\n var remaining = (\n md.fullMessageLength[md.fullMessageLength.length - 1] +\n md.messageLengthSize);\n\n // add padding for overflow blockSize - overflow\n // _padding starts with 1 byte with first bit is set (byte value 128), then\n // there may be up to (blockSize - 1) other pad bytes\n var overflow = remaining & (md.blockLength - 1);\n finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));\n\n // serialize message length in bits in big-endian order; since length\n // is stored in bytes we multiply by 8 and add carry from next int\n var next, carry;\n var bits = md.fullMessageLength[0] * 8;\n for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {\n next = md.fullMessageLength[i + 1] * 8;\n carry = (next / 0x100000000) >>> 0;\n bits += carry;\n finalBlock.putInt32(bits >>> 0);\n bits = next >>> 0;\n }\n finalBlock.putInt32(bits);\n\n var h = new Array(_h.length);\n for(var i = 0; i < _h.length; ++i) {\n h[i] = _h[i].slice(0);\n }\n _update(h, _w, finalBlock);\n var rval = forge.util.createBuffer();\n var hlen;\n if(algorithm === 'SHA-512') {\n hlen = h.length;\n } else if(algorithm === 'SHA-384') {\n hlen = h.length - 2;\n } else {\n hlen = h.length - 4;\n }\n for(var i = 0; i < hlen; ++i) {\n rval.putInt32(h[i][0]);\n if(i !== hlen - 1 || algorithm !== 'SHA-512/224') {\n rval.putInt32(h[i][1]);\n }\n }\n return rval;\n };\n\n return md;\n};\n\n// sha-512 padding bytes not initialized yet\nvar _padding = null;\nvar _initialized = false;\n\n// table of constants\nvar _k = null;\n\n// initial hash states\nvar _states = null;\n\n/**\n * Initializes the constant tables.\n */\nfunction _init() {\n // create padding\n _padding = String.fromCharCode(128);\n _padding += forge.util.fillString(String.fromCharCode(0x00), 128);\n\n // create K table for SHA-512\n _k = [\n [0x428a2f98, 0xd728ae22], [0x71374491, 0x23ef65cd],\n [0xb5c0fbcf, 0xec4d3b2f], [0xe9b5dba5, 0x8189dbbc],\n [0x3956c25b, 0xf348b538], [0x59f111f1, 0xb605d019],\n [0x923f82a4, 0xaf194f9b], [0xab1c5ed5, 0xda6d8118],\n [0xd807aa98, 0xa3030242], [0x12835b01, 0x45706fbe],\n [0x243185be, 0x4ee4b28c], [0x550c7dc3, 0xd5ffb4e2],\n [0x72be5d74, 0xf27b896f], [0x80deb1fe, 0x3b1696b1],\n [0x9bdc06a7, 0x25c71235], [0xc19bf174, 0xcf692694],\n [0xe49b69c1, 0x9ef14ad2], [0xefbe4786, 0x384f25e3],\n [0x0fc19dc6, 0x8b8cd5b5], [0x240ca1cc, 0x77ac9c65],\n [0x2de92c6f, 0x592b0275], [0x4a7484aa, 0x6ea6e483],\n [0x5cb0a9dc, 0xbd41fbd4], [0x76f988da, 0x831153b5],\n [0x983e5152, 0xee66dfab], [0xa831c66d, 0x2db43210],\n [0xb00327c8, 0x98fb213f], [0xbf597fc7, 0xbeef0ee4],\n [0xc6e00bf3, 0x3da88fc2], [0xd5a79147, 0x930aa725],\n [0x06ca6351, 0xe003826f], [0x14292967, 0x0a0e6e70],\n [0x27b70a85, 0x46d22ffc], [0x2e1b2138, 0x5c26c926],\n [0x4d2c6dfc, 0x5ac42aed], [0x53380d13, 0x9d95b3df],\n [0x650a7354, 0x8baf63de], [0x766a0abb, 0x3c77b2a8],\n [0x81c2c92e, 0x47edaee6], [0x92722c85, 0x1482353b],\n [0xa2bfe8a1, 0x4cf10364], [0xa81a664b, 0xbc423001],\n [0xc24b8b70, 0xd0f89791], [0xc76c51a3, 0x0654be30],\n [0xd192e819, 0xd6ef5218], [0xd6990624, 0x5565a910],\n [0xf40e3585, 0x5771202a], [0x106aa070, 0x32bbd1b8],\n [0x19a4c116, 0xb8d2d0c8], [0x1e376c08, 0x5141ab53],\n [0x2748774c, 0xdf8eeb99], [0x34b0bcb5, 0xe19b48a8],\n [0x391c0cb3, 0xc5c95a63], [0x4ed8aa4a, 0xe3418acb],\n [0x5b9cca4f, 0x7763e373], [0x682e6ff3, 0xd6b2b8a3],\n [0x748f82ee, 0x5defb2fc], [0x78a5636f, 0x43172f60],\n [0x84c87814, 0xa1f0ab72], [0x8cc70208, 0x1a6439ec],\n [0x90befffa, 0x23631e28], [0xa4506ceb, 0xde82bde9],\n [0xbef9a3f7, 0xb2c67915], [0xc67178f2, 0xe372532b],\n [0xca273ece, 0xea26619c], [0xd186b8c7, 0x21c0c207],\n [0xeada7dd6, 0xcde0eb1e], [0xf57d4f7f, 0xee6ed178],\n [0x06f067aa, 0x72176fba], [0x0a637dc5, 0xa2c898a6],\n [0x113f9804, 0xbef90dae], [0x1b710b35, 0x131c471b],\n [0x28db77f5, 0x23047d84], [0x32caab7b, 0x40c72493],\n [0x3c9ebe0a, 0x15c9bebc], [0x431d67c4, 0x9c100d4c],\n [0x4cc5d4be, 0xcb3e42b6], [0x597f299c, 0xfc657e2a],\n [0x5fcb6fab, 0x3ad6faec], [0x6c44198c, 0x4a475817]\n ];\n\n // initial hash states\n _states = {};\n _states['SHA-512'] = [\n [0x6a09e667, 0xf3bcc908],\n [0xbb67ae85, 0x84caa73b],\n [0x3c6ef372, 0xfe94f82b],\n [0xa54ff53a, 0x5f1d36f1],\n [0x510e527f, 0xade682d1],\n [0x9b05688c, 0x2b3e6c1f],\n [0x1f83d9ab, 0xfb41bd6b],\n [0x5be0cd19, 0x137e2179]\n ];\n _states['SHA-384'] = [\n [0xcbbb9d5d, 0xc1059ed8],\n [0x629a292a, 0x367cd507],\n [0x9159015a, 0x3070dd17],\n [0x152fecd8, 0xf70e5939],\n [0x67332667, 0xffc00b31],\n [0x8eb44a87, 0x68581511],\n [0xdb0c2e0d, 0x64f98fa7],\n [0x47b5481d, 0xbefa4fa4]\n ];\n _states['SHA-512/256'] = [\n [0x22312194, 0xFC2BF72C],\n [0x9F555FA3, 0xC84C64C2],\n [0x2393B86B, 0x6F53B151],\n [0x96387719, 0x5940EABD],\n [0x96283EE2, 0xA88EFFE3],\n [0xBE5E1E25, 0x53863992],\n [0x2B0199FC, 0x2C85B8AA],\n [0x0EB72DDC, 0x81C52CA2]\n ];\n _states['SHA-512/224'] = [\n [0x8C3D37C8, 0x19544DA2],\n [0x73E19966, 0x89DCD4D6],\n [0x1DFAB7AE, 0x32FF9C82],\n [0x679DD514, 0x582F9FCF],\n [0x0F6D2B69, 0x7BD44DA8],\n [0x77E36F73, 0x04C48942],\n [0x3F9D85A8, 0x6A1D36C8],\n [0x1112E6AD, 0x91D692A1]\n ];\n\n // now initialized\n _initialized = true;\n}\n\n/**\n * Updates a SHA-512 state with the given byte buffer.\n *\n * @param s the SHA-512 state to update.\n * @param w the array to use to store words.\n * @param bytes the byte buffer to update with.\n */\nfunction _update(s, w, bytes) {\n // consume 512 bit (128 byte) chunks\n var t1_hi, t1_lo;\n var t2_hi, t2_lo;\n var s0_hi, s0_lo;\n var s1_hi, s1_lo;\n var ch_hi, ch_lo;\n var maj_hi, maj_lo;\n var a_hi, a_lo;\n var b_hi, b_lo;\n var c_hi, c_lo;\n var d_hi, d_lo;\n var e_hi, e_lo;\n var f_hi, f_lo;\n var g_hi, g_lo;\n var h_hi, h_lo;\n var i, hi, lo, w2, w7, w15, w16;\n var len = bytes.length();\n while(len >= 128) {\n // the w array will be populated with sixteen 64-bit big-endian words\n // and then extended into 64 64-bit words according to SHA-512\n for(i = 0; i < 16; ++i) {\n w[i][0] = bytes.getInt32() >>> 0;\n w[i][1] = bytes.getInt32() >>> 0;\n }\n for(; i < 80; ++i) {\n // for word 2 words ago: ROTR 19(x) ^ ROTR 61(x) ^ SHR 6(x)\n w2 = w[i - 2];\n hi = w2[0];\n lo = w2[1];\n\n // high bits\n t1_hi = (\n ((hi >>> 19) | (lo << 13)) ^ // ROTR 19\n ((lo >>> 29) | (hi << 3)) ^ // ROTR 61/(swap + ROTR 29)\n (hi >>> 6)) >>> 0; // SHR 6\n // low bits\n t1_lo = (\n ((hi << 13) | (lo >>> 19)) ^ // ROTR 19\n ((lo << 3) | (hi >>> 29)) ^ // ROTR 61/(swap + ROTR 29)\n ((hi << 26) | (lo >>> 6))) >>> 0; // SHR 6\n\n // for word 15 words ago: ROTR 1(x) ^ ROTR 8(x) ^ SHR 7(x)\n w15 = w[i - 15];\n hi = w15[0];\n lo = w15[1];\n\n // high bits\n t2_hi = (\n ((hi >>> 1) | (lo << 31)) ^ // ROTR 1\n ((hi >>> 8) | (lo << 24)) ^ // ROTR 8\n (hi >>> 7)) >>> 0; // SHR 7\n // low bits\n t2_lo = (\n ((hi << 31) | (lo >>> 1)) ^ // ROTR 1\n ((hi << 24) | (lo >>> 8)) ^ // ROTR 8\n ((hi << 25) | (lo >>> 7))) >>> 0; // SHR 7\n\n // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^64 (carry lo overflow)\n w7 = w[i - 7];\n w16 = w[i - 16];\n lo = (t1_lo + w7[1] + t2_lo + w16[1]);\n w[i][0] = (t1_hi + w7[0] + t2_hi + w16[0] +\n ((lo / 0x100000000) >>> 0)) >>> 0;\n w[i][1] = lo >>> 0;\n }\n\n // initialize hash value for this chunk\n a_hi = s[0][0];\n a_lo = s[0][1];\n b_hi = s[1][0];\n b_lo = s[1][1];\n c_hi = s[2][0];\n c_lo = s[2][1];\n d_hi = s[3][0];\n d_lo = s[3][1];\n e_hi = s[4][0];\n e_lo = s[4][1];\n f_hi = s[5][0];\n f_lo = s[5][1];\n g_hi = s[6][0];\n g_lo = s[6][1];\n h_hi = s[7][0];\n h_lo = s[7][1];\n\n // round function\n for(i = 0; i < 80; ++i) {\n // Sum1(e) = ROTR 14(e) ^ ROTR 18(e) ^ ROTR 41(e)\n s1_hi = (\n ((e_hi >>> 14) | (e_lo << 18)) ^ // ROTR 14\n ((e_hi >>> 18) | (e_lo << 14)) ^ // ROTR 18\n ((e_lo >>> 9) | (e_hi << 23))) >>> 0; // ROTR 41/(swap + ROTR 9)\n s1_lo = (\n ((e_hi << 18) | (e_lo >>> 14)) ^ // ROTR 14\n ((e_hi << 14) | (e_lo >>> 18)) ^ // ROTR 18\n ((e_lo << 23) | (e_hi >>> 9))) >>> 0; // ROTR 41/(swap + ROTR 9)\n\n // Ch(e, f, g) (optimized the same way as SHA-1)\n ch_hi = (g_hi ^ (e_hi & (f_hi ^ g_hi))) >>> 0;\n ch_lo = (g_lo ^ (e_lo & (f_lo ^ g_lo))) >>> 0;\n\n // Sum0(a) = ROTR 28(a) ^ ROTR 34(a) ^ ROTR 39(a)\n s0_hi = (\n ((a_hi >>> 28) | (a_lo << 4)) ^ // ROTR 28\n ((a_lo >>> 2) | (a_hi << 30)) ^ // ROTR 34/(swap + ROTR 2)\n ((a_lo >>> 7) | (a_hi << 25))) >>> 0; // ROTR 39/(swap + ROTR 7)\n s0_lo = (\n ((a_hi << 4) | (a_lo >>> 28)) ^ // ROTR 28\n ((a_lo << 30) | (a_hi >>> 2)) ^ // ROTR 34/(swap + ROTR 2)\n ((a_lo << 25) | (a_hi >>> 7))) >>> 0; // ROTR 39/(swap + ROTR 7)\n\n // Maj(a, b, c) (optimized the same way as SHA-1)\n maj_hi = ((a_hi & b_hi) | (c_hi & (a_hi ^ b_hi))) >>> 0;\n maj_lo = ((a_lo & b_lo) | (c_lo & (a_lo ^ b_lo))) >>> 0;\n\n // main algorithm\n // t1 = (h + s1 + ch + _k[i] + _w[i]) modulo 2^64 (carry lo overflow)\n lo = (h_lo + s1_lo + ch_lo + _k[i][1] + w[i][1]);\n t1_hi = (h_hi + s1_hi + ch_hi + _k[i][0] + w[i][0] +\n ((lo / 0x100000000) >>> 0)) >>> 0;\n t1_lo = lo >>> 0;\n\n // t2 = s0 + maj modulo 2^64 (carry lo overflow)\n lo = s0_lo + maj_lo;\n t2_hi = (s0_hi + maj_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n t2_lo = lo >>> 0;\n\n h_hi = g_hi;\n h_lo = g_lo;\n\n g_hi = f_hi;\n g_lo = f_lo;\n\n f_hi = e_hi;\n f_lo = e_lo;\n\n // e = (d + t1) modulo 2^64 (carry lo overflow)\n lo = d_lo + t1_lo;\n e_hi = (d_hi + t1_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n e_lo = lo >>> 0;\n\n d_hi = c_hi;\n d_lo = c_lo;\n\n c_hi = b_hi;\n c_lo = b_lo;\n\n b_hi = a_hi;\n b_lo = a_lo;\n\n // a = (t1 + t2) modulo 2^64 (carry lo overflow)\n lo = t1_lo + t2_lo;\n a_hi = (t1_hi + t2_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n a_lo = lo >>> 0;\n }\n\n // update hash state (additional modulo 2^64)\n lo = s[0][1] + a_lo;\n s[0][0] = (s[0][0] + a_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[0][1] = lo >>> 0;\n\n lo = s[1][1] + b_lo;\n s[1][0] = (s[1][0] + b_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[1][1] = lo >>> 0;\n\n lo = s[2][1] + c_lo;\n s[2][0] = (s[2][0] + c_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[2][1] = lo >>> 0;\n\n lo = s[3][1] + d_lo;\n s[3][0] = (s[3][0] + d_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[3][1] = lo >>> 0;\n\n lo = s[4][1] + e_lo;\n s[4][0] = (s[4][0] + e_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[4][1] = lo >>> 0;\n\n lo = s[5][1] + f_lo;\n s[5][0] = (s[5][0] + f_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[5][1] = lo >>> 0;\n\n lo = s[6][1] + g_lo;\n s[6][0] = (s[6][0] + g_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[6][1] = lo >>> 0;\n\n lo = s[7][1] + h_lo;\n s[7][0] = (s[7][0] + h_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[7][1] = lo >>> 0;\n\n len -= 128;\n }\n}\n","/**\n * Functions to output keys in SSH-friendly formats.\n *\n * This is part of the Forge project which may be used under the terms of\n * either the BSD License or the GNU General Public License (GPL) Version 2.\n *\n * See: https://github.com/digitalbazaar/forge/blob/cbebca3780658703d925b61b2caffb1d263a6c1d/LICENSE\n *\n * @author https://github.com/shellac\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./hmac');\nrequire('./md5');\nrequire('./sha1');\nrequire('./util');\n\nvar ssh = module.exports = forge.ssh = forge.ssh || {};\n\n/**\n * Encodes (and optionally encrypts) a private RSA key as a Putty PPK file.\n *\n * @param privateKey the key.\n * @param passphrase a passphrase to protect the key (falsy for no encryption).\n * @param comment a comment to include in the key file.\n *\n * @return the PPK file as a string.\n */\nssh.privateKeyToPutty = function(privateKey, passphrase, comment) {\n comment = comment || '';\n passphrase = passphrase || '';\n var algorithm = 'ssh-rsa';\n var encryptionAlgorithm = (passphrase === '') ? 'none' : 'aes256-cbc';\n\n var ppk = 'PuTTY-User-Key-File-2: ' + algorithm + '\\r\\n';\n ppk += 'Encryption: ' + encryptionAlgorithm + '\\r\\n';\n ppk += 'Comment: ' + comment + '\\r\\n';\n\n // public key into buffer for ppk\n var pubbuffer = forge.util.createBuffer();\n _addStringToBuffer(pubbuffer, algorithm);\n _addBigIntegerToBuffer(pubbuffer, privateKey.e);\n _addBigIntegerToBuffer(pubbuffer, privateKey.n);\n\n // write public key\n var pub = forge.util.encode64(pubbuffer.bytes(), 64);\n var length = Math.floor(pub.length / 66) + 1; // 66 = 64 + \\r\\n\n ppk += 'Public-Lines: ' + length + '\\r\\n';\n ppk += pub;\n\n // private key into a buffer\n var privbuffer = forge.util.createBuffer();\n _addBigIntegerToBuffer(privbuffer, privateKey.d);\n _addBigIntegerToBuffer(privbuffer, privateKey.p);\n _addBigIntegerToBuffer(privbuffer, privateKey.q);\n _addBigIntegerToBuffer(privbuffer, privateKey.qInv);\n\n // optionally encrypt the private key\n var priv;\n if(!passphrase) {\n // use the unencrypted buffer\n priv = forge.util.encode64(privbuffer.bytes(), 64);\n } else {\n // encrypt RSA key using passphrase\n var encLen = privbuffer.length() + 16 - 1;\n encLen -= encLen % 16;\n\n // pad private key with sha1-d data -- needs to be a multiple of 16\n var padding = _sha1(privbuffer.bytes());\n\n padding.truncate(padding.length() - encLen + privbuffer.length());\n privbuffer.putBuffer(padding);\n\n var aeskey = forge.util.createBuffer();\n aeskey.putBuffer(_sha1('\\x00\\x00\\x00\\x00', passphrase));\n aeskey.putBuffer(_sha1('\\x00\\x00\\x00\\x01', passphrase));\n\n // encrypt some bytes using CBC mode\n // key is 40 bytes, so truncate *by* 8 bytes\n var cipher = forge.aes.createEncryptionCipher(aeskey.truncate(8), 'CBC');\n cipher.start(forge.util.createBuffer().fillWithByte(0, 16));\n cipher.update(privbuffer.copy());\n cipher.finish();\n var encrypted = cipher.output;\n\n // Note: this appears to differ from Putty -- is forge wrong, or putty?\n // due to padding we finish as an exact multiple of 16\n encrypted.truncate(16); // all padding\n\n priv = forge.util.encode64(encrypted.bytes(), 64);\n }\n\n // output private key\n length = Math.floor(priv.length / 66) + 1; // 64 + \\r\\n\n ppk += '\\r\\nPrivate-Lines: ' + length + '\\r\\n';\n ppk += priv;\n\n // MAC\n var mackey = _sha1('putty-private-key-file-mac-key', passphrase);\n\n var macbuffer = forge.util.createBuffer();\n _addStringToBuffer(macbuffer, algorithm);\n _addStringToBuffer(macbuffer, encryptionAlgorithm);\n _addStringToBuffer(macbuffer, comment);\n macbuffer.putInt32(pubbuffer.length());\n macbuffer.putBuffer(pubbuffer);\n macbuffer.putInt32(privbuffer.length());\n macbuffer.putBuffer(privbuffer);\n\n var hmac = forge.hmac.create();\n hmac.start('sha1', mackey);\n hmac.update(macbuffer.bytes());\n\n ppk += '\\r\\nPrivate-MAC: ' + hmac.digest().toHex() + '\\r\\n';\n\n return ppk;\n};\n\n/**\n * Encodes a public RSA key as an OpenSSH file.\n *\n * @param key the key.\n * @param comment a comment.\n *\n * @return the public key in OpenSSH format.\n */\nssh.publicKeyToOpenSSH = function(key, comment) {\n var type = 'ssh-rsa';\n comment = comment || '';\n\n var buffer = forge.util.createBuffer();\n _addStringToBuffer(buffer, type);\n _addBigIntegerToBuffer(buffer, key.e);\n _addBigIntegerToBuffer(buffer, key.n);\n\n return type + ' ' + forge.util.encode64(buffer.bytes()) + ' ' + comment;\n};\n\n/**\n * Encodes a private RSA key as an OpenSSH file.\n *\n * @param key the key.\n * @param passphrase a passphrase to protect the key (falsy for no encryption).\n *\n * @return the public key in OpenSSH format.\n */\nssh.privateKeyToOpenSSH = function(privateKey, passphrase) {\n if(!passphrase) {\n return forge.pki.privateKeyToPem(privateKey);\n }\n // OpenSSH private key is just a legacy format, it seems\n return forge.pki.encryptRsaPrivateKey(privateKey, passphrase,\n {legacy: true, algorithm: 'aes128'});\n};\n\n/**\n * Gets the SSH fingerprint for the given public key.\n *\n * @param options the options to use.\n * [md] the message digest object to use (defaults to forge.md.md5).\n * [encoding] an alternative output encoding, such as 'hex'\n * (defaults to none, outputs a byte buffer).\n * [delimiter] the delimiter to use between bytes for 'hex' encoded\n * output, eg: ':' (defaults to none).\n *\n * @return the fingerprint as a byte buffer or other encoding based on options.\n */\nssh.getPublicKeyFingerprint = function(key, options) {\n options = options || {};\n var md = options.md || forge.md.md5.create();\n\n var type = 'ssh-rsa';\n var buffer = forge.util.createBuffer();\n _addStringToBuffer(buffer, type);\n _addBigIntegerToBuffer(buffer, key.e);\n _addBigIntegerToBuffer(buffer, key.n);\n\n // hash public key bytes\n md.start();\n md.update(buffer.getBytes());\n var digest = md.digest();\n if(options.encoding === 'hex') {\n var hex = digest.toHex();\n if(options.delimiter) {\n return hex.match(/.{2}/g).join(options.delimiter);\n }\n return hex;\n } else if(options.encoding === 'binary') {\n return digest.getBytes();\n } else if(options.encoding) {\n throw new Error('Unknown encoding \"' + options.encoding + '\".');\n }\n return digest;\n};\n\n/**\n * Adds len(val) then val to a buffer.\n *\n * @param buffer the buffer to add to.\n * @param val a big integer.\n */\nfunction _addBigIntegerToBuffer(buffer, val) {\n var hexVal = val.toString(16);\n // ensure 2s complement +ve\n if(hexVal[0] >= '8') {\n hexVal = '00' + hexVal;\n }\n var bytes = forge.util.hexToBytes(hexVal);\n buffer.putInt32(bytes.length);\n buffer.putBytes(bytes);\n}\n\n/**\n * Adds len(val) then val to a buffer.\n *\n * @param buffer the buffer to add to.\n * @param val a string.\n */\nfunction _addStringToBuffer(buffer, val) {\n buffer.putInt32(val.length);\n buffer.putString(val);\n}\n\n/**\n * Hashes the arguments into one value using SHA-1.\n *\n * @return the sha1 hash of the provided arguments.\n */\nfunction _sha1() {\n var sha = forge.md.sha1.create();\n var num = arguments.length;\n for (var i = 0; i < num; ++i) {\n sha.update(arguments[i]);\n }\n return sha.digest();\n}\n","/**\n * A Javascript implementation of Transport Layer Security (TLS).\n *\n * @author Dave Longley\n *\n * Copyright (c) 2009-2014 Digital Bazaar, Inc.\n *\n * The TLS Handshake Protocol involves the following steps:\n *\n * - Exchange hello messages to agree on algorithms, exchange random values,\n * and check for session resumption.\n *\n * - Exchange the necessary cryptographic parameters to allow the client and\n * server to agree on a premaster secret.\n *\n * - Exchange certificates and cryptographic information to allow the client\n * and server to authenticate themselves.\n *\n * - Generate a master secret from the premaster secret and exchanged random\n * values.\n *\n * - Provide security parameters to the record layer.\n *\n * - Allow the client and server to verify that their peer has calculated the\n * same security parameters and that the handshake occurred without tampering\n * by an attacker.\n *\n * Up to 4 different messages may be sent during a key exchange. The server\n * certificate, the server key exchange, the client certificate, and the\n * client key exchange.\n *\n * A typical handshake (from the client's perspective).\n *\n * 1. Client sends ClientHello.\n * 2. Client receives ServerHello.\n * 3. Client receives optional Certificate.\n * 4. Client receives optional ServerKeyExchange.\n * 5. Client receives ServerHelloDone.\n * 6. Client sends optional Certificate.\n * 7. Client sends ClientKeyExchange.\n * 8. Client sends optional CertificateVerify.\n * 9. Client sends ChangeCipherSpec.\n * 10. Client sends Finished.\n * 11. Client receives ChangeCipherSpec.\n * 12. Client receives Finished.\n * 13. Client sends/receives application data.\n *\n * To reuse an existing session:\n *\n * 1. Client sends ClientHello with session ID for reuse.\n * 2. Client receives ServerHello with same session ID if reusing.\n * 3. Client receives ChangeCipherSpec message if reusing.\n * 4. Client receives Finished.\n * 5. Client sends ChangeCipherSpec.\n * 6. Client sends Finished.\n *\n * Note: Client ignores HelloRequest if in the middle of a handshake.\n *\n * Record Layer:\n *\n * The record layer fragments information blocks into TLSPlaintext records\n * carrying data in chunks of 2^14 bytes or less. Client message boundaries are\n * not preserved in the record layer (i.e., multiple client messages of the\n * same ContentType MAY be coalesced into a single TLSPlaintext record, or a\n * single message MAY be fragmented across several records).\n *\n * struct {\n * uint8 major;\n * uint8 minor;\n * } ProtocolVersion;\n *\n * struct {\n * ContentType type;\n * ProtocolVersion version;\n * uint16 length;\n * opaque fragment[TLSPlaintext.length];\n * } TLSPlaintext;\n *\n * type:\n * The higher-level protocol used to process the enclosed fragment.\n *\n * version:\n * The version of the protocol being employed. TLS Version 1.2 uses version\n * {3, 3}. TLS Version 1.0 uses version {3, 1}. Note that a client that\n * supports multiple versions of TLS may not know what version will be\n * employed before it receives the ServerHello.\n *\n * length:\n * The length (in bytes) of the following TLSPlaintext.fragment. The length\n * MUST NOT exceed 2^14 = 16384 bytes.\n *\n * fragment:\n * The application data. This data is transparent and treated as an\n * independent block to be dealt with by the higher-level protocol specified\n * by the type field.\n *\n * Implementations MUST NOT send zero-length fragments of Handshake, Alert, or\n * ChangeCipherSpec content types. Zero-length fragments of Application data\n * MAY be sent as they are potentially useful as a traffic analysis\n * countermeasure.\n *\n * Note: Data of different TLS record layer content types MAY be interleaved.\n * Application data is generally of lower precedence for transmission than\n * other content types. However, records MUST be delivered to the network in\n * the same order as they are protected by the record layer. Recipients MUST\n * receive and process interleaved application layer traffic during handshakes\n * subsequent to the first one on a connection.\n *\n * struct {\n * ContentType type; // same as TLSPlaintext.type\n * ProtocolVersion version;// same as TLSPlaintext.version\n * uint16 length;\n * opaque fragment[TLSCompressed.length];\n * } TLSCompressed;\n *\n * length:\n * The length (in bytes) of the following TLSCompressed.fragment.\n * The length MUST NOT exceed 2^14 + 1024.\n *\n * fragment:\n * The compressed form of TLSPlaintext.fragment.\n *\n * Note: A CompressionMethod.null operation is an identity operation; no fields\n * are altered. In this implementation, since no compression is supported,\n * uncompressed records are always the same as compressed records.\n *\n * Encryption Information:\n *\n * The encryption and MAC functions translate a TLSCompressed structure into a\n * TLSCiphertext. The decryption functions reverse the process. The MAC of the\n * record also includes a sequence number so that missing, extra, or repeated\n * messages are detectable.\n *\n * struct {\n * ContentType type;\n * ProtocolVersion version;\n * uint16 length;\n * select (SecurityParameters.cipher_type) {\n * case stream: GenericStreamCipher;\n * case block: GenericBlockCipher;\n * case aead: GenericAEADCipher;\n * } fragment;\n * } TLSCiphertext;\n *\n * type:\n * The type field is identical to TLSCompressed.type.\n *\n * version:\n * The version field is identical to TLSCompressed.version.\n *\n * length:\n * The length (in bytes) of the following TLSCiphertext.fragment.\n * The length MUST NOT exceed 2^14 + 2048.\n *\n * fragment:\n * The encrypted form of TLSCompressed.fragment, with the MAC.\n *\n * Note: Only CBC Block Ciphers are supported by this implementation.\n *\n * The TLSCompressed.fragment structures are converted to/from block\n * TLSCiphertext.fragment structures.\n *\n * struct {\n * opaque IV[SecurityParameters.record_iv_length];\n * block-ciphered struct {\n * opaque content[TLSCompressed.length];\n * opaque MAC[SecurityParameters.mac_length];\n * uint8 padding[GenericBlockCipher.padding_length];\n * uint8 padding_length;\n * };\n * } GenericBlockCipher;\n *\n * The MAC is generated as described in Section 6.2.3.1.\n *\n * IV:\n * The Initialization Vector (IV) SHOULD be chosen at random, and MUST be\n * unpredictable. Note that in versions of TLS prior to 1.1, there was no\n * IV field, and the last ciphertext block of the previous record (the \"CBC\n * residue\") was used as the IV. This was changed to prevent the attacks\n * described in [CBCATT]. For block ciphers, the IV length is of length\n * SecurityParameters.record_iv_length, which is equal to the\n * SecurityParameters.block_size.\n *\n * padding:\n * Padding that is added to force the length of the plaintext to be an\n * integral multiple of the block cipher's block length. The padding MAY be\n * any length up to 255 bytes, as long as it results in the\n * TLSCiphertext.length being an integral multiple of the block length.\n * Lengths longer than necessary might be desirable to frustrate attacks on\n * a protocol that are based on analysis of the lengths of exchanged\n * messages. Each uint8 in the padding data vector MUST be filled with the\n * padding length value. The receiver MUST check this padding and MUST use\n * the bad_record_mac alert to indicate padding errors.\n *\n * padding_length:\n * The padding length MUST be such that the total size of the\n * GenericBlockCipher structure is a multiple of the cipher's block length.\n * Legal values range from zero to 255, inclusive. This length specifies the\n * length of the padding field exclusive of the padding_length field itself.\n *\n * The encrypted data length (TLSCiphertext.length) is one more than the sum of\n * SecurityParameters.block_length, TLSCompressed.length,\n * SecurityParameters.mac_length, and padding_length.\n *\n * Example: If the block length is 8 bytes, the content length\n * (TLSCompressed.length) is 61 bytes, and the MAC length is 20 bytes, then the\n * length before padding is 82 bytes (this does not include the IV. Thus, the\n * padding length modulo 8 must be equal to 6 in order to make the total length\n * an even multiple of 8 bytes (the block length). The padding length can be\n * 6, 14, 22, and so on, through 254. If the padding length were the minimum\n * necessary, 6, the padding would be 6 bytes, each containing the value 6.\n * Thus, the last 8 octets of the GenericBlockCipher before block encryption\n * would be xx 06 06 06 06 06 06 06, where xx is the last octet of the MAC.\n *\n * Note: With block ciphers in CBC mode (Cipher Block Chaining), it is critical\n * that the entire plaintext of the record be known before any ciphertext is\n * transmitted. Otherwise, it is possible for the attacker to mount the attack\n * described in [CBCATT].\n *\n * Implementation note: Canvel et al. [CBCTIME] have demonstrated a timing\n * attack on CBC padding based on the time required to compute the MAC. In\n * order to defend against this attack, implementations MUST ensure that\n * record processing time is essentially the same whether or not the padding\n * is correct. In general, the best way to do this is to compute the MAC even\n * if the padding is incorrect, and only then reject the packet. For instance,\n * if the pad appears to be incorrect, the implementation might assume a\n * zero-length pad and then compute the MAC. This leaves a small timing\n * channel, since MAC performance depends, to some extent, on the size of the\n * data fragment, but it is not believed to be large enough to be exploitable,\n * due to the large block size of existing MACs and the small size of the\n * timing signal.\n */\nvar forge = require('./forge');\nrequire('./asn1');\nrequire('./hmac');\nrequire('./md5');\nrequire('./pem');\nrequire('./pki');\nrequire('./random');\nrequire('./sha1');\nrequire('./util');\n\n/**\n * Generates pseudo random bytes by mixing the result of two hash functions,\n * MD5 and SHA-1.\n *\n * prf_TLS1(secret, label, seed) =\n * P_MD5(S1, label + seed) XOR P_SHA-1(S2, label + seed);\n *\n * Each P_hash function functions as follows:\n *\n * P_hash(secret, seed) = HMAC_hash(secret, A(1) + seed) +\n * HMAC_hash(secret, A(2) + seed) +\n * HMAC_hash(secret, A(3) + seed) + ...\n * A() is defined as:\n * A(0) = seed\n * A(i) = HMAC_hash(secret, A(i-1))\n *\n * The '+' operator denotes concatenation.\n *\n * As many iterations A(N) as are needed are performed to generate enough\n * pseudo random byte output. If an iteration creates more data than is\n * necessary, then it is truncated.\n *\n * Therefore:\n * A(1) = HMAC_hash(secret, A(0))\n * = HMAC_hash(secret, seed)\n * A(2) = HMAC_hash(secret, A(1))\n * = HMAC_hash(secret, HMAC_hash(secret, seed))\n *\n * Therefore:\n * P_hash(secret, seed) =\n * HMAC_hash(secret, HMAC_hash(secret, A(0)) + seed) +\n * HMAC_hash(secret, HMAC_hash(secret, A(1)) + seed) +\n * ...\n *\n * Therefore:\n * P_hash(secret, seed) =\n * HMAC_hash(secret, HMAC_hash(secret, seed) + seed) +\n * HMAC_hash(secret, HMAC_hash(secret, HMAC_hash(secret, seed)) + seed) +\n * ...\n *\n * @param secret the secret to use.\n * @param label the label to use.\n * @param seed the seed value to use.\n * @param length the number of bytes to generate.\n *\n * @return the pseudo random bytes in a byte buffer.\n */\nvar prf_TLS1 = function(secret, label, seed, length) {\n var rval = forge.util.createBuffer();\n\n /* For TLS 1.0, the secret is split in half, into two secrets of equal\n length. If the secret has an odd length then the last byte of the first\n half will be the same as the first byte of the second. The length of the\n two secrets is half of the secret rounded up. */\n var idx = (secret.length >> 1);\n var slen = idx + (secret.length & 1);\n var s1 = secret.substr(0, slen);\n var s2 = secret.substr(idx, slen);\n var ai = forge.util.createBuffer();\n var hmac = forge.hmac.create();\n seed = label + seed;\n\n // determine the number of iterations that must be performed to generate\n // enough output bytes, md5 creates 16 byte hashes, sha1 creates 20\n var md5itr = Math.ceil(length / 16);\n var sha1itr = Math.ceil(length / 20);\n\n // do md5 iterations\n hmac.start('MD5', s1);\n var md5bytes = forge.util.createBuffer();\n ai.putBytes(seed);\n for(var i = 0; i < md5itr; ++i) {\n // HMAC_hash(secret, A(i-1))\n hmac.start(null, null);\n hmac.update(ai.getBytes());\n ai.putBuffer(hmac.digest());\n\n // HMAC_hash(secret, A(i) + seed)\n hmac.start(null, null);\n hmac.update(ai.bytes() + seed);\n md5bytes.putBuffer(hmac.digest());\n }\n\n // do sha1 iterations\n hmac.start('SHA1', s2);\n var sha1bytes = forge.util.createBuffer();\n ai.clear();\n ai.putBytes(seed);\n for(var i = 0; i < sha1itr; ++i) {\n // HMAC_hash(secret, A(i-1))\n hmac.start(null, null);\n hmac.update(ai.getBytes());\n ai.putBuffer(hmac.digest());\n\n // HMAC_hash(secret, A(i) + seed)\n hmac.start(null, null);\n hmac.update(ai.bytes() + seed);\n sha1bytes.putBuffer(hmac.digest());\n }\n\n // XOR the md5 bytes with the sha1 bytes\n rval.putBytes(forge.util.xorBytes(\n md5bytes.getBytes(), sha1bytes.getBytes(), length));\n\n return rval;\n};\n\n/**\n * Generates pseudo random bytes using a SHA256 algorithm. For TLS 1.2.\n *\n * @param secret the secret to use.\n * @param label the label to use.\n * @param seed the seed value to use.\n * @param length the number of bytes to generate.\n *\n * @return the pseudo random bytes in a byte buffer.\n */\nvar prf_sha256 = function(secret, label, seed, length) {\n // FIXME: implement me for TLS 1.2\n};\n\n/**\n * Gets a MAC for a record using the SHA-1 hash algorithm.\n *\n * @param key the mac key.\n * @param state the sequence number (array of two 32-bit integers).\n * @param record the record.\n *\n * @return the sha-1 hash (20 bytes) for the given record.\n */\nvar hmac_sha1 = function(key, seqNum, record) {\n /* MAC is computed like so:\n HMAC_hash(\n key, seqNum +\n TLSCompressed.type +\n TLSCompressed.version +\n TLSCompressed.length +\n TLSCompressed.fragment)\n */\n var hmac = forge.hmac.create();\n hmac.start('SHA1', key);\n var b = forge.util.createBuffer();\n b.putInt32(seqNum[0]);\n b.putInt32(seqNum[1]);\n b.putByte(record.type);\n b.putByte(record.version.major);\n b.putByte(record.version.minor);\n b.putInt16(record.length);\n b.putBytes(record.fragment.bytes());\n hmac.update(b.getBytes());\n return hmac.digest().getBytes();\n};\n\n/**\n * Compresses the TLSPlaintext record into a TLSCompressed record using the\n * deflate algorithm.\n *\n * @param c the TLS connection.\n * @param record the TLSPlaintext record to compress.\n * @param s the ConnectionState to use.\n *\n * @return true on success, false on failure.\n */\nvar deflate = function(c, record, s) {\n var rval = false;\n\n try {\n var bytes = c.deflate(record.fragment.getBytes());\n record.fragment = forge.util.createBuffer(bytes);\n record.length = bytes.length;\n rval = true;\n } catch(ex) {\n // deflate error, fail out\n }\n\n return rval;\n};\n\n/**\n * Decompresses the TLSCompressed record into a TLSPlaintext record using the\n * deflate algorithm.\n *\n * @param c the TLS connection.\n * @param record the TLSCompressed record to decompress.\n * @param s the ConnectionState to use.\n *\n * @return true on success, false on failure.\n */\nvar inflate = function(c, record, s) {\n var rval = false;\n\n try {\n var bytes = c.inflate(record.fragment.getBytes());\n record.fragment = forge.util.createBuffer(bytes);\n record.length = bytes.length;\n rval = true;\n } catch(ex) {\n // inflate error, fail out\n }\n\n return rval;\n};\n\n/**\n * Reads a TLS variable-length vector from a byte buffer.\n *\n * Variable-length vectors are defined by specifying a subrange of legal\n * lengths, inclusively, using the notation . When these are\n * encoded, the actual length precedes the vector's contents in the byte\n * stream. The length will be in the form of a number consuming as many bytes\n * as required to hold the vector's specified maximum (ceiling) length. A\n * variable-length vector with an actual length field of zero is referred to\n * as an empty vector.\n *\n * @param b the byte buffer.\n * @param lenBytes the number of bytes required to store the length.\n *\n * @return the resulting byte buffer.\n */\nvar readVector = function(b, lenBytes) {\n var len = 0;\n switch(lenBytes) {\n case 1:\n len = b.getByte();\n break;\n case 2:\n len = b.getInt16();\n break;\n case 3:\n len = b.getInt24();\n break;\n case 4:\n len = b.getInt32();\n break;\n }\n\n // read vector bytes into a new buffer\n return forge.util.createBuffer(b.getBytes(len));\n};\n\n/**\n * Writes a TLS variable-length vector to a byte buffer.\n *\n * @param b the byte buffer.\n * @param lenBytes the number of bytes required to store the length.\n * @param v the byte buffer vector.\n */\nvar writeVector = function(b, lenBytes, v) {\n // encode length at the start of the vector, where the number of bytes for\n // the length is the maximum number of bytes it would take to encode the\n // vector's ceiling\n b.putInt(v.length(), lenBytes << 3);\n b.putBuffer(v);\n};\n\n/**\n * The tls implementation.\n */\nvar tls = {};\n\n/**\n * Version: TLS 1.2 = 3.3, TLS 1.1 = 3.2, TLS 1.0 = 3.1. Both TLS 1.1 and\n * TLS 1.2 were still too new (ie: openSSL didn't implement them) at the time\n * of this implementation so TLS 1.0 was implemented instead.\n */\ntls.Versions = {\n TLS_1_0: {major: 3, minor: 1},\n TLS_1_1: {major: 3, minor: 2},\n TLS_1_2: {major: 3, minor: 3}\n};\ntls.SupportedVersions = [\n tls.Versions.TLS_1_1,\n tls.Versions.TLS_1_0\n];\ntls.Version = tls.SupportedVersions[0];\n\n/**\n * Maximum fragment size. True maximum is 16384, but we fragment before that\n * to allow for unusual small increases during compression.\n */\ntls.MaxFragment = 16384 - 1024;\n\n/**\n * Whether this entity is considered the \"client\" or \"server\".\n * enum { server, client } ConnectionEnd;\n */\ntls.ConnectionEnd = {\n server: 0,\n client: 1\n};\n\n/**\n * Pseudo-random function algorithm used to generate keys from the master\n * secret.\n * enum { tls_prf_sha256 } PRFAlgorithm;\n */\ntls.PRFAlgorithm = {\n tls_prf_sha256: 0\n};\n\n/**\n * Bulk encryption algorithms.\n * enum { null, rc4, des3, aes } BulkCipherAlgorithm;\n */\ntls.BulkCipherAlgorithm = {\n none: null,\n rc4: 0,\n des3: 1,\n aes: 2\n};\n\n/**\n * Cipher types.\n * enum { stream, block, aead } CipherType;\n */\ntls.CipherType = {\n stream: 0,\n block: 1,\n aead: 2\n};\n\n/**\n * MAC (Message Authentication Code) algorithms.\n * enum { null, hmac_md5, hmac_sha1, hmac_sha256,\n * hmac_sha384, hmac_sha512} MACAlgorithm;\n */\ntls.MACAlgorithm = {\n none: null,\n hmac_md5: 0,\n hmac_sha1: 1,\n hmac_sha256: 2,\n hmac_sha384: 3,\n hmac_sha512: 4\n};\n\n/**\n * Compression algorithms.\n * enum { null(0), deflate(1), (255) } CompressionMethod;\n */\ntls.CompressionMethod = {\n none: 0,\n deflate: 1\n};\n\n/**\n * TLS record content types.\n * enum {\n * change_cipher_spec(20), alert(21), handshake(22),\n * application_data(23), (255)\n * } ContentType;\n */\ntls.ContentType = {\n change_cipher_spec: 20,\n alert: 21,\n handshake: 22,\n application_data: 23,\n heartbeat: 24\n};\n\n/**\n * TLS handshake types.\n * enum {\n * hello_request(0), client_hello(1), server_hello(2),\n * certificate(11), server_key_exchange (12),\n * certificate_request(13), server_hello_done(14),\n * certificate_verify(15), client_key_exchange(16),\n * finished(20), (255)\n * } HandshakeType;\n */\ntls.HandshakeType = {\n hello_request: 0,\n client_hello: 1,\n server_hello: 2,\n certificate: 11,\n server_key_exchange: 12,\n certificate_request: 13,\n server_hello_done: 14,\n certificate_verify: 15,\n client_key_exchange: 16,\n finished: 20\n};\n\n/**\n * TLS Alert Protocol.\n *\n * enum { warning(1), fatal(2), (255) } AlertLevel;\n *\n * enum {\n * close_notify(0),\n * unexpected_message(10),\n * bad_record_mac(20),\n * decryption_failed(21),\n * record_overflow(22),\n * decompression_failure(30),\n * handshake_failure(40),\n * bad_certificate(42),\n * unsupported_certificate(43),\n * certificate_revoked(44),\n * certificate_expired(45),\n * certificate_unknown(46),\n * illegal_parameter(47),\n * unknown_ca(48),\n * access_denied(49),\n * decode_error(50),\n * decrypt_error(51),\n * export_restriction(60),\n * protocol_version(70),\n * insufficient_security(71),\n * internal_error(80),\n * user_canceled(90),\n * no_renegotiation(100),\n * (255)\n * } AlertDescription;\n *\n * struct {\n * AlertLevel level;\n * AlertDescription description;\n * } Alert;\n */\ntls.Alert = {};\ntls.Alert.Level = {\n warning: 1,\n fatal: 2\n};\ntls.Alert.Description = {\n close_notify: 0,\n unexpected_message: 10,\n bad_record_mac: 20,\n decryption_failed: 21,\n record_overflow: 22,\n decompression_failure: 30,\n handshake_failure: 40,\n bad_certificate: 42,\n unsupported_certificate: 43,\n certificate_revoked: 44,\n certificate_expired: 45,\n certificate_unknown: 46,\n illegal_parameter: 47,\n unknown_ca: 48,\n access_denied: 49,\n decode_error: 50,\n decrypt_error: 51,\n export_restriction: 60,\n protocol_version: 70,\n insufficient_security: 71,\n internal_error: 80,\n user_canceled: 90,\n no_renegotiation: 100\n};\n\n/**\n * TLS Heartbeat Message types.\n * enum {\n * heartbeat_request(1),\n * heartbeat_response(2),\n * (255)\n * } HeartbeatMessageType;\n */\ntls.HeartbeatMessageType = {\n heartbeat_request: 1,\n heartbeat_response: 2\n};\n\n/**\n * Supported cipher suites.\n */\ntls.CipherSuites = {};\n\n/**\n * Gets a supported cipher suite from its 2 byte ID.\n *\n * @param twoBytes two bytes in a string.\n *\n * @return the matching supported cipher suite or null.\n */\ntls.getCipherSuite = function(twoBytes) {\n var rval = null;\n for(var key in tls.CipherSuites) {\n var cs = tls.CipherSuites[key];\n if(cs.id[0] === twoBytes.charCodeAt(0) &&\n cs.id[1] === twoBytes.charCodeAt(1)) {\n rval = cs;\n break;\n }\n }\n return rval;\n};\n\n/**\n * Called when an unexpected record is encountered.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleUnexpected = function(c, record) {\n // if connection is client and closed, ignore unexpected messages\n var ignore = (!c.open && c.entity === tls.ConnectionEnd.client);\n if(!ignore) {\n c.error(c, {\n message: 'Unexpected message. Received TLS record out of order.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.unexpected_message\n }\n });\n }\n};\n\n/**\n * Called when a client receives a HelloRequest record.\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleHelloRequest = function(c, record, length) {\n // ignore renegotiation requests from the server during a handshake, but\n // if handshaking, send a warning alert that renegotation is denied\n if(!c.handshaking && c.handshakes > 0) {\n // send alert warning\n tls.queue(c, tls.createAlert(c, {\n level: tls.Alert.Level.warning,\n description: tls.Alert.Description.no_renegotiation\n }));\n tls.flush(c);\n }\n\n // continue\n c.process();\n};\n\n/**\n * Parses a hello message from a ClientHello or ServerHello record.\n *\n * @param record the record to parse.\n *\n * @return the parsed message.\n */\ntls.parseHelloMessage = function(c, record, length) {\n var msg = null;\n\n var client = (c.entity === tls.ConnectionEnd.client);\n\n // minimum of 38 bytes in message\n if(length < 38) {\n c.error(c, {\n message: client ?\n 'Invalid ServerHello message. Message too short.' :\n 'Invalid ClientHello message. Message too short.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n } else {\n // use 'remaining' to calculate # of remaining bytes in the message\n var b = record.fragment;\n var remaining = b.length();\n msg = {\n version: {\n major: b.getByte(),\n minor: b.getByte()\n },\n random: forge.util.createBuffer(b.getBytes(32)),\n session_id: readVector(b, 1),\n extensions: []\n };\n if(client) {\n msg.cipher_suite = b.getBytes(2);\n msg.compression_method = b.getByte();\n } else {\n msg.cipher_suites = readVector(b, 2);\n msg.compression_methods = readVector(b, 1);\n }\n\n // read extensions if there are any bytes left in the message\n remaining = length - (remaining - b.length());\n if(remaining > 0) {\n // parse extensions\n var exts = readVector(b, 2);\n while(exts.length() > 0) {\n msg.extensions.push({\n type: [exts.getByte(), exts.getByte()],\n data: readVector(exts, 2)\n });\n }\n\n // TODO: make extension support modular\n if(!client) {\n for(var i = 0; i < msg.extensions.length; ++i) {\n var ext = msg.extensions[i];\n\n // support SNI extension\n if(ext.type[0] === 0x00 && ext.type[1] === 0x00) {\n // get server name list\n var snl = readVector(ext.data, 2);\n while(snl.length() > 0) {\n // read server name type\n var snType = snl.getByte();\n\n // only HostName type (0x00) is known, break out if\n // another type is detected\n if(snType !== 0x00) {\n break;\n }\n\n // add host name to server name list\n c.session.extensions.server_name.serverNameList.push(\n readVector(snl, 2).getBytes());\n }\n }\n }\n }\n }\n\n // version already set, do not allow version change\n if(c.session.version) {\n if(msg.version.major !== c.session.version.major ||\n msg.version.minor !== c.session.version.minor) {\n return c.error(c, {\n message: 'TLS version change is disallowed during renegotiation.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.protocol_version\n }\n });\n }\n }\n\n // get the chosen (ServerHello) cipher suite\n if(client) {\n // FIXME: should be checking configured acceptable cipher suites\n c.session.cipherSuite = tls.getCipherSuite(msg.cipher_suite);\n } else {\n // get a supported preferred (ClientHello) cipher suite\n // choose the first supported cipher suite\n var tmp = forge.util.createBuffer(msg.cipher_suites.bytes());\n while(tmp.length() > 0) {\n // FIXME: should be checking configured acceptable suites\n // cipher suites take up 2 bytes\n c.session.cipherSuite = tls.getCipherSuite(tmp.getBytes(2));\n if(c.session.cipherSuite !== null) {\n break;\n }\n }\n }\n\n // cipher suite not supported\n if(c.session.cipherSuite === null) {\n return c.error(c, {\n message: 'No cipher suites in common.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.handshake_failure\n },\n cipherSuite: forge.util.bytesToHex(msg.cipher_suite)\n });\n }\n\n // TODO: handle compression methods\n if(client) {\n c.session.compressionMethod = msg.compression_method;\n } else {\n // no compression\n c.session.compressionMethod = tls.CompressionMethod.none;\n }\n }\n\n return msg;\n};\n\n/**\n * Creates security parameters for the given connection based on the given\n * hello message.\n *\n * @param c the TLS connection.\n * @param msg the hello message.\n */\ntls.createSecurityParameters = function(c, msg) {\n /* Note: security params are from TLS 1.2, some values like prf_algorithm\n are ignored for TLS 1.0/1.1 and the builtin as specified in the spec is\n used. */\n\n // TODO: handle other options from server when more supported\n\n // get client and server randoms\n var client = (c.entity === tls.ConnectionEnd.client);\n var msgRandom = msg.random.bytes();\n var cRandom = client ? c.session.sp.client_random : msgRandom;\n var sRandom = client ? msgRandom : tls.createRandom().getBytes();\n\n // create new security parameters\n c.session.sp = {\n entity: c.entity,\n prf_algorithm: tls.PRFAlgorithm.tls_prf_sha256,\n bulk_cipher_algorithm: null,\n cipher_type: null,\n enc_key_length: null,\n block_length: null,\n fixed_iv_length: null,\n record_iv_length: null,\n mac_algorithm: null,\n mac_length: null,\n mac_key_length: null,\n compression_algorithm: c.session.compressionMethod,\n pre_master_secret: null,\n master_secret: null,\n client_random: cRandom,\n server_random: sRandom\n };\n};\n\n/**\n * Called when a client receives a ServerHello record.\n *\n * When a ServerHello message will be sent:\n * The server will send this message in response to a client hello message\n * when it was able to find an acceptable set of algorithms. If it cannot\n * find such a match, it will respond with a handshake failure alert.\n *\n * uint24 length;\n * struct {\n * ProtocolVersion server_version;\n * Random random;\n * SessionID session_id;\n * CipherSuite cipher_suite;\n * CompressionMethod compression_method;\n * select(extensions_present) {\n * case false:\n * struct {};\n * case true:\n * Extension extensions<0..2^16-1>;\n * };\n * } ServerHello;\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleServerHello = function(c, record, length) {\n var msg = tls.parseHelloMessage(c, record, length);\n if(c.fail) {\n return;\n }\n\n // ensure server version is compatible\n if(msg.version.minor <= c.version.minor) {\n c.version.minor = msg.version.minor;\n } else {\n return c.error(c, {\n message: 'Incompatible TLS version.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.protocol_version\n }\n });\n }\n\n // indicate session version has been set\n c.session.version = c.version;\n\n // get the session ID from the message\n var sessionId = msg.session_id.bytes();\n\n // if the session ID is not blank and matches the cached one, resume\n // the session\n if(sessionId.length > 0 && sessionId === c.session.id) {\n // resuming session, expect a ChangeCipherSpec next\n c.expect = SCC;\n c.session.resuming = true;\n\n // get new server random\n c.session.sp.server_random = msg.random.bytes();\n } else {\n // not resuming, expect a server Certificate message next\n c.expect = SCE;\n c.session.resuming = false;\n\n // create new security parameters\n tls.createSecurityParameters(c, msg);\n }\n\n // set new session ID\n c.session.id = sessionId;\n\n // continue\n c.process();\n};\n\n/**\n * Called when a server receives a ClientHello record.\n *\n * When a ClientHello message will be sent:\n * When a client first connects to a server it is required to send the\n * client hello as its first message. The client can also send a client\n * hello in response to a hello request or on its own initiative in order\n * to renegotiate the security parameters in an existing connection.\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleClientHello = function(c, record, length) {\n var msg = tls.parseHelloMessage(c, record, length);\n if(c.fail) {\n return;\n }\n\n // get the session ID from the message\n var sessionId = msg.session_id.bytes();\n\n // see if the given session ID is in the cache\n var session = null;\n if(c.sessionCache) {\n session = c.sessionCache.getSession(sessionId);\n if(session === null) {\n // session ID not found\n sessionId = '';\n } else if(session.version.major !== msg.version.major ||\n session.version.minor > msg.version.minor) {\n // if session version is incompatible with client version, do not resume\n session = null;\n sessionId = '';\n }\n }\n\n // no session found to resume, generate a new session ID\n if(sessionId.length === 0) {\n sessionId = forge.random.getBytes(32);\n }\n\n // update session\n c.session.id = sessionId;\n c.session.clientHelloVersion = msg.version;\n c.session.sp = {};\n if(session) {\n // use version and security parameters from resumed session\n c.version = c.session.version = session.version;\n c.session.sp = session.sp;\n } else {\n // use highest compatible minor version\n var version;\n for(var i = 1; i < tls.SupportedVersions.length; ++i) {\n version = tls.SupportedVersions[i];\n if(version.minor <= msg.version.minor) {\n break;\n }\n }\n c.version = {major: version.major, minor: version.minor};\n c.session.version = c.version;\n }\n\n // if a session is set, resume it\n if(session !== null) {\n // resuming session, expect a ChangeCipherSpec next\n c.expect = CCC;\n c.session.resuming = true;\n\n // get new client random\n c.session.sp.client_random = msg.random.bytes();\n } else {\n // not resuming, expect a Certificate or ClientKeyExchange\n c.expect = (c.verifyClient !== false) ? CCE : CKE;\n c.session.resuming = false;\n\n // create new security parameters\n tls.createSecurityParameters(c, msg);\n }\n\n // connection now open\n c.open = true;\n\n // queue server hello\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createServerHello(c)\n }));\n\n if(c.session.resuming) {\n // queue change cipher spec message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.change_cipher_spec,\n data: tls.createChangeCipherSpec()\n }));\n\n // create pending state\n c.state.pending = tls.createConnectionState(c);\n\n // change current write state to pending write state\n c.state.current.write = c.state.pending.write;\n\n // queue finished\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createFinished(c)\n }));\n } else {\n // queue server certificate\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createCertificate(c)\n }));\n\n if(!c.fail) {\n // queue server key exchange\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createServerKeyExchange(c)\n }));\n\n // request client certificate if set\n if(c.verifyClient !== false) {\n // queue certificate request\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createCertificateRequest(c)\n }));\n }\n\n // queue server hello done\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createServerHelloDone(c)\n }));\n }\n }\n\n // send records\n tls.flush(c);\n\n // continue\n c.process();\n};\n\n/**\n * Called when a client receives a Certificate record.\n *\n * When this message will be sent:\n * The server must send a certificate whenever the agreed-upon key exchange\n * method is not an anonymous one. This message will always immediately\n * follow the server hello message.\n *\n * Meaning of this message:\n * The certificate type must be appropriate for the selected cipher suite's\n * key exchange algorithm, and is generally an X.509v3 certificate. It must\n * contain a key which matches the key exchange method, as follows. Unless\n * otherwise specified, the signing algorithm for the certificate must be\n * the same as the algorithm for the certificate key. Unless otherwise\n * specified, the public key may be of any length.\n *\n * opaque ASN.1Cert<1..2^24-1>;\n * struct {\n * ASN.1Cert certificate_list<1..2^24-1>;\n * } Certificate;\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleCertificate = function(c, record, length) {\n // minimum of 3 bytes in message\n if(length < 3) {\n return c.error(c, {\n message: 'Invalid Certificate message. Message too short.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n }\n\n var b = record.fragment;\n var msg = {\n certificate_list: readVector(b, 3)\n };\n\n /* The sender's certificate will be first in the list (chain), each\n subsequent one that follows will certify the previous one, but root\n certificates (self-signed) that specify the certificate authority may\n be omitted under the assumption that clients must already possess it. */\n var cert, asn1;\n var certs = [];\n try {\n while(msg.certificate_list.length() > 0) {\n // each entry in msg.certificate_list is a vector with 3 len bytes\n cert = readVector(msg.certificate_list, 3);\n asn1 = forge.asn1.fromDer(cert);\n cert = forge.pki.certificateFromAsn1(asn1, true);\n certs.push(cert);\n }\n } catch(ex) {\n return c.error(c, {\n message: 'Could not parse certificate list.',\n cause: ex,\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.bad_certificate\n }\n });\n }\n\n // ensure at least 1 certificate was provided if in client-mode\n // or if verifyClient was set to true to require a certificate\n // (as opposed to 'optional')\n var client = (c.entity === tls.ConnectionEnd.client);\n if((client || c.verifyClient === true) && certs.length === 0) {\n // error, no certificate\n c.error(c, {\n message: client ?\n 'No server certificate provided.' :\n 'No client certificate provided.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n } else if(certs.length === 0) {\n // no certs to verify\n // expect a ServerKeyExchange or ClientKeyExchange message next\n c.expect = client ? SKE : CKE;\n } else {\n // save certificate in session\n if(client) {\n c.session.serverCertificate = certs[0];\n } else {\n c.session.clientCertificate = certs[0];\n }\n\n if(tls.verifyCertificateChain(c, certs)) {\n // expect a ServerKeyExchange or ClientKeyExchange message next\n c.expect = client ? SKE : CKE;\n }\n }\n\n // continue\n c.process();\n};\n\n/**\n * Called when a client receives a ServerKeyExchange record.\n *\n * When this message will be sent:\n * This message will be sent immediately after the server certificate\n * message (or the server hello message, if this is an anonymous\n * negotiation).\n *\n * The server key exchange message is sent by the server only when the\n * server certificate message (if sent) does not contain enough data to\n * allow the client to exchange a premaster secret.\n *\n * Meaning of this message:\n * This message conveys cryptographic information to allow the client to\n * communicate the premaster secret: either an RSA public key to encrypt\n * the premaster secret with, or a Diffie-Hellman public key with which the\n * client can complete a key exchange (with the result being the premaster\n * secret.)\n *\n * enum {\n * dhe_dss, dhe_rsa, dh_anon, rsa, dh_dss, dh_rsa\n * } KeyExchangeAlgorithm;\n *\n * struct {\n * opaque dh_p<1..2^16-1>;\n * opaque dh_g<1..2^16-1>;\n * opaque dh_Ys<1..2^16-1>;\n * } ServerDHParams;\n *\n * struct {\n * select(KeyExchangeAlgorithm) {\n * case dh_anon:\n * ServerDHParams params;\n * case dhe_dss:\n * case dhe_rsa:\n * ServerDHParams params;\n * digitally-signed struct {\n * opaque client_random[32];\n * opaque server_random[32];\n * ServerDHParams params;\n * } signed_params;\n * case rsa:\n * case dh_dss:\n * case dh_rsa:\n * struct {};\n * };\n * } ServerKeyExchange;\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleServerKeyExchange = function(c, record, length) {\n // this implementation only supports RSA, no Diffie-Hellman support\n // so any length > 0 is invalid\n if(length > 0) {\n return c.error(c, {\n message: 'Invalid key parameters. Only RSA is supported.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.unsupported_certificate\n }\n });\n }\n\n // expect an optional CertificateRequest message next\n c.expect = SCR;\n\n // continue\n c.process();\n};\n\n/**\n * Called when a client receives a ClientKeyExchange record.\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleClientKeyExchange = function(c, record, length) {\n // this implementation only supports RSA, no Diffie-Hellman support\n // so any length < 48 is invalid\n if(length < 48) {\n return c.error(c, {\n message: 'Invalid key parameters. Only RSA is supported.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.unsupported_certificate\n }\n });\n }\n\n var b = record.fragment;\n var msg = {\n enc_pre_master_secret: readVector(b, 2).getBytes()\n };\n\n // do rsa decryption\n var privateKey = null;\n if(c.getPrivateKey) {\n try {\n privateKey = c.getPrivateKey(c, c.session.serverCertificate);\n privateKey = forge.pki.privateKeyFromPem(privateKey);\n } catch(ex) {\n c.error(c, {\n message: 'Could not get private key.',\n cause: ex,\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n }\n }\n\n if(privateKey === null) {\n return c.error(c, {\n message: 'No private key set.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n }\n\n try {\n // decrypt 48-byte pre-master secret\n var sp = c.session.sp;\n sp.pre_master_secret = privateKey.decrypt(msg.enc_pre_master_secret);\n\n // ensure client hello version matches first 2 bytes\n var version = c.session.clientHelloVersion;\n if(version.major !== sp.pre_master_secret.charCodeAt(0) ||\n version.minor !== sp.pre_master_secret.charCodeAt(1)) {\n // error, do not send alert (see BLEI attack below)\n throw new Error('TLS version rollback attack detected.');\n }\n } catch(ex) {\n /* Note: Daniel Bleichenbacher [BLEI] can be used to attack a\n TLS server which is using PKCS#1 encoded RSA, so instead of\n failing here, we generate 48 random bytes and use that as\n the pre-master secret. */\n sp.pre_master_secret = forge.random.getBytes(48);\n }\n\n // expect a CertificateVerify message if a Certificate was received that\n // does not have fixed Diffie-Hellman params, otherwise expect\n // ChangeCipherSpec\n c.expect = CCC;\n if(c.session.clientCertificate !== null) {\n // only RSA support, so expect CertificateVerify\n // TODO: support Diffie-Hellman\n c.expect = CCV;\n }\n\n // continue\n c.process();\n};\n\n/**\n * Called when a client receives a CertificateRequest record.\n *\n * When this message will be sent:\n * A non-anonymous server can optionally request a certificate from the\n * client, if appropriate for the selected cipher suite. This message, if\n * sent, will immediately follow the Server Key Exchange message (if it is\n * sent; otherwise, the Server Certificate message).\n *\n * enum {\n * rsa_sign(1), dss_sign(2), rsa_fixed_dh(3), dss_fixed_dh(4),\n * rsa_ephemeral_dh_RESERVED(5), dss_ephemeral_dh_RESERVED(6),\n * fortezza_dms_RESERVED(20), (255)\n * } ClientCertificateType;\n *\n * opaque DistinguishedName<1..2^16-1>;\n *\n * struct {\n * ClientCertificateType certificate_types<1..2^8-1>;\n * SignatureAndHashAlgorithm supported_signature_algorithms<2^16-1>;\n * DistinguishedName certificate_authorities<0..2^16-1>;\n * } CertificateRequest;\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleCertificateRequest = function(c, record, length) {\n // minimum of 3 bytes in message\n if(length < 3) {\n return c.error(c, {\n message: 'Invalid CertificateRequest. Message too short.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n }\n\n // TODO: TLS 1.2+ has different format including\n // SignatureAndHashAlgorithm after cert types\n var b = record.fragment;\n var msg = {\n certificate_types: readVector(b, 1),\n certificate_authorities: readVector(b, 2)\n };\n\n // save certificate request in session\n c.session.certificateRequest = msg;\n\n // expect a ServerHelloDone message next\n c.expect = SHD;\n\n // continue\n c.process();\n};\n\n/**\n * Called when a server receives a CertificateVerify record.\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleCertificateVerify = function(c, record, length) {\n if(length < 2) {\n return c.error(c, {\n message: 'Invalid CertificateVerify. Message too short.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n }\n\n // rewind to get full bytes for message so it can be manually\n // digested below (special case for CertificateVerify messages because\n // they must be digested *after* handling as opposed to all others)\n var b = record.fragment;\n b.read -= 4;\n var msgBytes = b.bytes();\n b.read += 4;\n\n var msg = {\n signature: readVector(b, 2).getBytes()\n };\n\n // TODO: add support for DSA\n\n // generate data to verify\n var verify = forge.util.createBuffer();\n verify.putBuffer(c.session.md5.digest());\n verify.putBuffer(c.session.sha1.digest());\n verify = verify.getBytes();\n\n try {\n var cert = c.session.clientCertificate;\n /*b = forge.pki.rsa.decrypt(\n msg.signature, cert.publicKey, true, verify.length);\n if(b !== verify) {*/\n if(!cert.publicKey.verify(verify, msg.signature, 'NONE')) {\n throw new Error('CertificateVerify signature does not match.');\n }\n\n // digest message now that it has been handled\n c.session.md5.update(msgBytes);\n c.session.sha1.update(msgBytes);\n } catch(ex) {\n return c.error(c, {\n message: 'Bad signature in CertificateVerify.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.handshake_failure\n }\n });\n }\n\n // expect ChangeCipherSpec\n c.expect = CCC;\n\n // continue\n c.process();\n};\n\n/**\n * Called when a client receives a ServerHelloDone record.\n *\n * When this message will be sent:\n * The server hello done message is sent by the server to indicate the end\n * of the server hello and associated messages. After sending this message\n * the server will wait for a client response.\n *\n * Meaning of this message:\n * This message means that the server is done sending messages to support\n * the key exchange, and the client can proceed with its phase of the key\n * exchange.\n *\n * Upon receipt of the server hello done message the client should verify\n * that the server provided a valid certificate if required and check that\n * the server hello parameters are acceptable.\n *\n * struct {} ServerHelloDone;\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleServerHelloDone = function(c, record, length) {\n // len must be 0 bytes\n if(length > 0) {\n return c.error(c, {\n message: 'Invalid ServerHelloDone message. Invalid length.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.record_overflow\n }\n });\n }\n\n if(c.serverCertificate === null) {\n // no server certificate was provided\n var error = {\n message: 'No server certificate provided. Not enough security.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.insufficient_security\n }\n };\n\n // call application callback\n var depth = 0;\n var ret = c.verify(c, error.alert.description, depth, []);\n if(ret !== true) {\n // check for custom alert info\n if(ret || ret === 0) {\n // set custom message and alert description\n if(typeof ret === 'object' && !forge.util.isArray(ret)) {\n if(ret.message) {\n error.message = ret.message;\n }\n if(ret.alert) {\n error.alert.description = ret.alert;\n }\n } else if(typeof ret === 'number') {\n // set custom alert description\n error.alert.description = ret;\n }\n }\n\n // send error\n return c.error(c, error);\n }\n }\n\n // create client certificate message if requested\n if(c.session.certificateRequest !== null) {\n record = tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createCertificate(c)\n });\n tls.queue(c, record);\n }\n\n // create client key exchange message\n record = tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createClientKeyExchange(c)\n });\n tls.queue(c, record);\n\n // expect no messages until the following callback has been called\n c.expect = SER;\n\n // create callback to handle client signature (for client-certs)\n var callback = function(c, signature) {\n if(c.session.certificateRequest !== null &&\n c.session.clientCertificate !== null) {\n // create certificate verify message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createCertificateVerify(c, signature)\n }));\n }\n\n // create change cipher spec message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.change_cipher_spec,\n data: tls.createChangeCipherSpec()\n }));\n\n // create pending state\n c.state.pending = tls.createConnectionState(c);\n\n // change current write state to pending write state\n c.state.current.write = c.state.pending.write;\n\n // create finished message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createFinished(c)\n }));\n\n // expect a server ChangeCipherSpec message next\n c.expect = SCC;\n\n // send records\n tls.flush(c);\n\n // continue\n c.process();\n };\n\n // if there is no certificate request or no client certificate, do\n // callback immediately\n if(c.session.certificateRequest === null ||\n c.session.clientCertificate === null) {\n return callback(c, null);\n }\n\n // otherwise get the client signature\n tls.getClientSignature(c, callback);\n};\n\n/**\n * Called when a ChangeCipherSpec record is received.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleChangeCipherSpec = function(c, record) {\n if(record.fragment.getByte() !== 0x01) {\n return c.error(c, {\n message: 'Invalid ChangeCipherSpec message received.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.illegal_parameter\n }\n });\n }\n\n // create pending state if:\n // 1. Resuming session in client mode OR\n // 2. NOT resuming session in server mode\n var client = (c.entity === tls.ConnectionEnd.client);\n if((c.session.resuming && client) || (!c.session.resuming && !client)) {\n c.state.pending = tls.createConnectionState(c);\n }\n\n // change current read state to pending read state\n c.state.current.read = c.state.pending.read;\n\n // clear pending state if:\n // 1. NOT resuming session in client mode OR\n // 2. resuming a session in server mode\n if((!c.session.resuming && client) || (c.session.resuming && !client)) {\n c.state.pending = null;\n }\n\n // expect a Finished record next\n c.expect = client ? SFI : CFI;\n\n // continue\n c.process();\n};\n\n/**\n * Called when a Finished record is received.\n *\n * When this message will be sent:\n * A finished message is always sent immediately after a change\n * cipher spec message to verify that the key exchange and\n * authentication processes were successful. It is essential that a\n * change cipher spec message be received between the other\n * handshake messages and the Finished message.\n *\n * Meaning of this message:\n * The finished message is the first protected with the just-\n * negotiated algorithms, keys, and secrets. Recipients of finished\n * messages must verify that the contents are correct. Once a side\n * has sent its Finished message and received and validated the\n * Finished message from its peer, it may begin to send and receive\n * application data over the connection.\n *\n * struct {\n * opaque verify_data[verify_data_length];\n * } Finished;\n *\n * verify_data\n * PRF(master_secret, finished_label, Hash(handshake_messages))\n * [0..verify_data_length-1];\n *\n * finished_label\n * For Finished messages sent by the client, the string\n * \"client finished\". For Finished messages sent by the server, the\n * string \"server finished\".\n *\n * verify_data_length depends on the cipher suite. If it is not specified\n * by the cipher suite, then it is 12. Versions of TLS < 1.2 always used\n * 12 bytes.\n *\n * @param c the connection.\n * @param record the record.\n * @param length the length of the handshake message.\n */\ntls.handleFinished = function(c, record, length) {\n // rewind to get full bytes for message so it can be manually\n // digested below (special case for Finished messages because they\n // must be digested *after* handling as opposed to all others)\n var b = record.fragment;\n b.read -= 4;\n var msgBytes = b.bytes();\n b.read += 4;\n\n // message contains only verify_data\n var vd = record.fragment.getBytes();\n\n // ensure verify data is correct\n b = forge.util.createBuffer();\n b.putBuffer(c.session.md5.digest());\n b.putBuffer(c.session.sha1.digest());\n\n // set label based on entity type\n var client = (c.entity === tls.ConnectionEnd.client);\n var label = client ? 'server finished' : 'client finished';\n\n // TODO: determine prf function and verify length for TLS 1.2\n var sp = c.session.sp;\n var vdl = 12;\n var prf = prf_TLS1;\n b = prf(sp.master_secret, label, b.getBytes(), vdl);\n if(b.getBytes() !== vd) {\n return c.error(c, {\n message: 'Invalid verify_data in Finished message.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.decrypt_error\n }\n });\n }\n\n // digest finished message now that it has been handled\n c.session.md5.update(msgBytes);\n c.session.sha1.update(msgBytes);\n\n // resuming session as client or NOT resuming session as server\n if((c.session.resuming && client) || (!c.session.resuming && !client)) {\n // create change cipher spec message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.change_cipher_spec,\n data: tls.createChangeCipherSpec()\n }));\n\n // change current write state to pending write state, clear pending\n c.state.current.write = c.state.pending.write;\n c.state.pending = null;\n\n // create finished message\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createFinished(c)\n }));\n }\n\n // expect application data next\n c.expect = client ? SAD : CAD;\n\n // handshake complete\n c.handshaking = false;\n ++c.handshakes;\n\n // save access to peer certificate\n c.peerCertificate = client ?\n c.session.serverCertificate : c.session.clientCertificate;\n\n // send records\n tls.flush(c);\n\n // now connected\n c.isConnected = true;\n c.connected(c);\n\n // continue\n c.process();\n};\n\n/**\n * Called when an Alert record is received.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleAlert = function(c, record) {\n // read alert\n var b = record.fragment;\n var alert = {\n level: b.getByte(),\n description: b.getByte()\n };\n\n // TODO: consider using a table?\n // get appropriate message\n var msg;\n switch(alert.description) {\n case tls.Alert.Description.close_notify:\n msg = 'Connection closed.';\n break;\n case tls.Alert.Description.unexpected_message:\n msg = 'Unexpected message.';\n break;\n case tls.Alert.Description.bad_record_mac:\n msg = 'Bad record MAC.';\n break;\n case tls.Alert.Description.decryption_failed:\n msg = 'Decryption failed.';\n break;\n case tls.Alert.Description.record_overflow:\n msg = 'Record overflow.';\n break;\n case tls.Alert.Description.decompression_failure:\n msg = 'Decompression failed.';\n break;\n case tls.Alert.Description.handshake_failure:\n msg = 'Handshake failure.';\n break;\n case tls.Alert.Description.bad_certificate:\n msg = 'Bad certificate.';\n break;\n case tls.Alert.Description.unsupported_certificate:\n msg = 'Unsupported certificate.';\n break;\n case tls.Alert.Description.certificate_revoked:\n msg = 'Certificate revoked.';\n break;\n case tls.Alert.Description.certificate_expired:\n msg = 'Certificate expired.';\n break;\n case tls.Alert.Description.certificate_unknown:\n msg = 'Certificate unknown.';\n break;\n case tls.Alert.Description.illegal_parameter:\n msg = 'Illegal parameter.';\n break;\n case tls.Alert.Description.unknown_ca:\n msg = 'Unknown certificate authority.';\n break;\n case tls.Alert.Description.access_denied:\n msg = 'Access denied.';\n break;\n case tls.Alert.Description.decode_error:\n msg = 'Decode error.';\n break;\n case tls.Alert.Description.decrypt_error:\n msg = 'Decrypt error.';\n break;\n case tls.Alert.Description.export_restriction:\n msg = 'Export restriction.';\n break;\n case tls.Alert.Description.protocol_version:\n msg = 'Unsupported protocol version.';\n break;\n case tls.Alert.Description.insufficient_security:\n msg = 'Insufficient security.';\n break;\n case tls.Alert.Description.internal_error:\n msg = 'Internal error.';\n break;\n case tls.Alert.Description.user_canceled:\n msg = 'User canceled.';\n break;\n case tls.Alert.Description.no_renegotiation:\n msg = 'Renegotiation not supported.';\n break;\n default:\n msg = 'Unknown error.';\n break;\n }\n\n // close connection on close_notify, not an error\n if(alert.description === tls.Alert.Description.close_notify) {\n return c.close();\n }\n\n // call error handler\n c.error(c, {\n message: msg,\n send: false,\n // origin is the opposite end\n origin: (c.entity === tls.ConnectionEnd.client) ? 'server' : 'client',\n alert: alert\n });\n\n // continue\n c.process();\n};\n\n/**\n * Called when a Handshake record is received.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleHandshake = function(c, record) {\n // get the handshake type and message length\n var b = record.fragment;\n var type = b.getByte();\n var length = b.getInt24();\n\n // see if the record fragment doesn't yet contain the full message\n if(length > b.length()) {\n // cache the record, clear its fragment, and reset the buffer read\n // pointer before the type and length were read\n c.fragmented = record;\n record.fragment = forge.util.createBuffer();\n b.read -= 4;\n\n // continue\n return c.process();\n }\n\n // full message now available, clear cache, reset read pointer to\n // before type and length\n c.fragmented = null;\n b.read -= 4;\n\n // save the handshake bytes for digestion after handler is found\n // (include type and length of handshake msg)\n var bytes = b.bytes(length + 4);\n\n // restore read pointer\n b.read += 4;\n\n // handle expected message\n if(type in hsTable[c.entity][c.expect]) {\n // initialize server session\n if(c.entity === tls.ConnectionEnd.server && !c.open && !c.fail) {\n c.handshaking = true;\n c.session = {\n version: null,\n extensions: {\n server_name: {\n serverNameList: []\n }\n },\n cipherSuite: null,\n compressionMethod: null,\n serverCertificate: null,\n clientCertificate: null,\n md5: forge.md.md5.create(),\n sha1: forge.md.sha1.create()\n };\n }\n\n /* Update handshake messages digest. Finished and CertificateVerify\n messages are not digested here. They can't be digested as part of\n the verify_data that they contain. These messages are manually\n digested in their handlers. HelloRequest messages are simply never\n included in the handshake message digest according to spec. */\n if(type !== tls.HandshakeType.hello_request &&\n type !== tls.HandshakeType.certificate_verify &&\n type !== tls.HandshakeType.finished) {\n c.session.md5.update(bytes);\n c.session.sha1.update(bytes);\n }\n\n // handle specific handshake type record\n hsTable[c.entity][c.expect][type](c, record, length);\n } else {\n // unexpected record\n tls.handleUnexpected(c, record);\n }\n};\n\n/**\n * Called when an ApplicationData record is received.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleApplicationData = function(c, record) {\n // buffer data, notify that its ready\n c.data.putBuffer(record.fragment);\n c.dataReady(c);\n\n // continue\n c.process();\n};\n\n/**\n * Called when a Heartbeat record is received.\n *\n * @param c the connection.\n * @param record the record.\n */\ntls.handleHeartbeat = function(c, record) {\n // get the heartbeat type and payload\n var b = record.fragment;\n var type = b.getByte();\n var length = b.getInt16();\n var payload = b.getBytes(length);\n\n if(type === tls.HeartbeatMessageType.heartbeat_request) {\n // discard request during handshake or if length is too large\n if(c.handshaking || length > payload.length) {\n // continue\n return c.process();\n }\n // retransmit payload\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.heartbeat,\n data: tls.createHeartbeat(\n tls.HeartbeatMessageType.heartbeat_response, payload)\n }));\n tls.flush(c);\n } else if(type === tls.HeartbeatMessageType.heartbeat_response) {\n // check payload against expected payload, discard heartbeat if no match\n if(payload !== c.expectedHeartbeatPayload) {\n // continue\n return c.process();\n }\n\n // notify that a valid heartbeat was received\n if(c.heartbeatReceived) {\n c.heartbeatReceived(c, forge.util.createBuffer(payload));\n }\n }\n\n // continue\n c.process();\n};\n\n/**\n * The transistional state tables for receiving TLS records. It maps the\n * current TLS engine state and a received record to a function to handle the\n * record and update the state.\n *\n * For instance, if the current state is SHE, then the TLS engine is expecting\n * a ServerHello record. Once a record is received, the handler function is\n * looked up using the state SHE and the record's content type.\n *\n * The resulting function will either be an error handler or a record handler.\n * The function will take whatever action is appropriate and update the state\n * for the next record.\n *\n * The states are all based on possible server record types. Note that the\n * client will never specifically expect to receive a HelloRequest or an alert\n * from the server so there is no state that reflects this. These messages may\n * occur at any time.\n *\n * There are two tables for mapping states because there is a second tier of\n * types for handshake messages. Once a record with a content type of handshake\n * is received, the handshake record handler will look up the handshake type in\n * the secondary map to get its appropriate handler.\n *\n * Valid message orders are as follows:\n *\n * =======================FULL HANDSHAKE======================\n * Client Server\n *\n * ClientHello -------->\n * ServerHello\n * Certificate*\n * ServerKeyExchange*\n * CertificateRequest*\n * <-------- ServerHelloDone\n * Certificate*\n * ClientKeyExchange\n * CertificateVerify*\n * [ChangeCipherSpec]\n * Finished -------->\n * [ChangeCipherSpec]\n * <-------- Finished\n * Application Data <-------> Application Data\n *\n * =====================SESSION RESUMPTION=====================\n * Client Server\n *\n * ClientHello -------->\n * ServerHello\n * [ChangeCipherSpec]\n * <-------- Finished\n * [ChangeCipherSpec]\n * Finished -------->\n * Application Data <-------> Application Data\n */\n// client expect states (indicate which records are expected to be received)\nvar SHE = 0; // rcv server hello\nvar SCE = 1; // rcv server certificate\nvar SKE = 2; // rcv server key exchange\nvar SCR = 3; // rcv certificate request\nvar SHD = 4; // rcv server hello done\nvar SCC = 5; // rcv change cipher spec\nvar SFI = 6; // rcv finished\nvar SAD = 7; // rcv application data\nvar SER = 8; // not expecting any messages at this point\n\n// server expect states\nvar CHE = 0; // rcv client hello\nvar CCE = 1; // rcv client certificate\nvar CKE = 2; // rcv client key exchange\nvar CCV = 3; // rcv certificate verify\nvar CCC = 4; // rcv change cipher spec\nvar CFI = 5; // rcv finished\nvar CAD = 6; // rcv application data\nvar CER = 7; // not expecting any messages at this point\n\n// map client current expect state and content type to function\nvar __ = tls.handleUnexpected;\nvar R0 = tls.handleChangeCipherSpec;\nvar R1 = tls.handleAlert;\nvar R2 = tls.handleHandshake;\nvar R3 = tls.handleApplicationData;\nvar R4 = tls.handleHeartbeat;\nvar ctTable = [];\nctTable[tls.ConnectionEnd.client] = [\n// CC,AL,HS,AD,HB\n/*SHE*/[__,R1,R2,__,R4],\n/*SCE*/[__,R1,R2,__,R4],\n/*SKE*/[__,R1,R2,__,R4],\n/*SCR*/[__,R1,R2,__,R4],\n/*SHD*/[__,R1,R2,__,R4],\n/*SCC*/[R0,R1,__,__,R4],\n/*SFI*/[__,R1,R2,__,R4],\n/*SAD*/[__,R1,R2,R3,R4],\n/*SER*/[__,R1,R2,__,R4]\n];\n\n// map server current expect state and content type to function\nctTable[tls.ConnectionEnd.server] = [\n// CC,AL,HS,AD\n/*CHE*/[__,R1,R2,__,R4],\n/*CCE*/[__,R1,R2,__,R4],\n/*CKE*/[__,R1,R2,__,R4],\n/*CCV*/[__,R1,R2,__,R4],\n/*CCC*/[R0,R1,__,__,R4],\n/*CFI*/[__,R1,R2,__,R4],\n/*CAD*/[__,R1,R2,R3,R4],\n/*CER*/[__,R1,R2,__,R4]\n];\n\n// map client current expect state and handshake type to function\nvar H0 = tls.handleHelloRequest;\nvar H1 = tls.handleServerHello;\nvar H2 = tls.handleCertificate;\nvar H3 = tls.handleServerKeyExchange;\nvar H4 = tls.handleCertificateRequest;\nvar H5 = tls.handleServerHelloDone;\nvar H6 = tls.handleFinished;\nvar hsTable = [];\nhsTable[tls.ConnectionEnd.client] = [\n// HR,01,SH,03,04,05,06,07,08,09,10,SC,SK,CR,HD,15,CK,17,18,19,FI\n/*SHE*/[__,__,H1,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*SCE*/[H0,__,__,__,__,__,__,__,__,__,__,H2,H3,H4,H5,__,__,__,__,__,__],\n/*SKE*/[H0,__,__,__,__,__,__,__,__,__,__,__,H3,H4,H5,__,__,__,__,__,__],\n/*SCR*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,H4,H5,__,__,__,__,__,__],\n/*SHD*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,H5,__,__,__,__,__,__],\n/*SCC*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*SFI*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H6],\n/*SAD*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*SER*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__]\n];\n\n// map server current expect state and handshake type to function\n// Note: CAD[CH] does not map to FB because renegotation is prohibited\nvar H7 = tls.handleClientHello;\nvar H8 = tls.handleClientKeyExchange;\nvar H9 = tls.handleCertificateVerify;\nhsTable[tls.ConnectionEnd.server] = [\n// 01,CH,02,03,04,05,06,07,08,09,10,CC,12,13,14,CV,CK,17,18,19,FI\n/*CHE*/[__,H7,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*CCE*/[__,__,__,__,__,__,__,__,__,__,__,H2,__,__,__,__,__,__,__,__,__],\n/*CKE*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H8,__,__,__,__],\n/*CCV*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H9,__,__,__,__,__],\n/*CCC*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*CFI*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H6],\n/*CAD*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__],\n/*CER*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__]\n];\n\n/**\n * Generates the master_secret and keys using the given security parameters.\n *\n * The security parameters for a TLS connection state are defined as such:\n *\n * struct {\n * ConnectionEnd entity;\n * PRFAlgorithm prf_algorithm;\n * BulkCipherAlgorithm bulk_cipher_algorithm;\n * CipherType cipher_type;\n * uint8 enc_key_length;\n * uint8 block_length;\n * uint8 fixed_iv_length;\n * uint8 record_iv_length;\n * MACAlgorithm mac_algorithm;\n * uint8 mac_length;\n * uint8 mac_key_length;\n * CompressionMethod compression_algorithm;\n * opaque master_secret[48];\n * opaque client_random[32];\n * opaque server_random[32];\n * } SecurityParameters;\n *\n * Note that this definition is from TLS 1.2. In TLS 1.0 some of these\n * parameters are ignored because, for instance, the PRFAlgorithm is a\n * builtin-fixed algorithm combining iterations of MD5 and SHA-1 in TLS 1.0.\n *\n * The Record Protocol requires an algorithm to generate keys required by the\n * current connection state.\n *\n * The master secret is expanded into a sequence of secure bytes, which is then\n * split to a client write MAC key, a server write MAC key, a client write\n * encryption key, and a server write encryption key. In TLS 1.0 a client write\n * IV and server write IV are also generated. Each of these is generated from\n * the byte sequence in that order. Unused values are empty. In TLS 1.2, some\n * AEAD ciphers may additionally require a client write IV and a server write\n * IV (see Section 6.2.3.3).\n *\n * When keys, MAC keys, and IVs are generated, the master secret is used as an\n * entropy source.\n *\n * To generate the key material, compute:\n *\n * master_secret = PRF(pre_master_secret, \"master secret\",\n * ClientHello.random + ServerHello.random)\n *\n * key_block = PRF(SecurityParameters.master_secret,\n * \"key expansion\",\n * SecurityParameters.server_random +\n * SecurityParameters.client_random);\n *\n * until enough output has been generated. Then, the key_block is\n * partitioned as follows:\n *\n * client_write_MAC_key[SecurityParameters.mac_key_length]\n * server_write_MAC_key[SecurityParameters.mac_key_length]\n * client_write_key[SecurityParameters.enc_key_length]\n * server_write_key[SecurityParameters.enc_key_length]\n * client_write_IV[SecurityParameters.fixed_iv_length]\n * server_write_IV[SecurityParameters.fixed_iv_length]\n *\n * In TLS 1.2, the client_write_IV and server_write_IV are only generated for\n * implicit nonce techniques as described in Section 3.2.1 of [AEAD]. This\n * implementation uses TLS 1.0 so IVs are generated.\n *\n * Implementation note: The currently defined cipher suite which requires the\n * most material is AES_256_CBC_SHA256. It requires 2 x 32 byte keys and 2 x 32\n * byte MAC keys, for a total 128 bytes of key material. In TLS 1.0 it also\n * requires 2 x 16 byte IVs, so it actually takes 160 bytes of key material.\n *\n * @param c the connection.\n * @param sp the security parameters to use.\n *\n * @return the security keys.\n */\ntls.generateKeys = function(c, sp) {\n // TLS_RSA_WITH_AES_128_CBC_SHA (required to be compliant with TLS 1.2) &\n // TLS_RSA_WITH_AES_256_CBC_SHA are the only cipher suites implemented\n // at present\n\n // TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA is required to be compliant with\n // TLS 1.0 but we don't care right now because AES is better and we have\n // an implementation for it\n\n // TODO: TLS 1.2 implementation\n /*\n // determine the PRF\n var prf;\n switch(sp.prf_algorithm) {\n case tls.PRFAlgorithm.tls_prf_sha256:\n prf = prf_sha256;\n break;\n default:\n // should never happen\n throw new Error('Invalid PRF');\n }\n */\n\n // TLS 1.0/1.1 implementation\n var prf = prf_TLS1;\n\n // concatenate server and client random\n var random = sp.client_random + sp.server_random;\n\n // only create master secret if session is new\n if(!c.session.resuming) {\n // create master secret, clean up pre-master secret\n sp.master_secret = prf(\n sp.pre_master_secret, 'master secret', random, 48).bytes();\n sp.pre_master_secret = null;\n }\n\n // generate the amount of key material needed\n random = sp.server_random + sp.client_random;\n var length = 2 * sp.mac_key_length + 2 * sp.enc_key_length;\n\n // include IV for TLS/1.0\n var tls10 = (c.version.major === tls.Versions.TLS_1_0.major &&\n c.version.minor === tls.Versions.TLS_1_0.minor);\n if(tls10) {\n length += 2 * sp.fixed_iv_length;\n }\n var km = prf(sp.master_secret, 'key expansion', random, length);\n\n // split the key material into the MAC and encryption keys\n var rval = {\n client_write_MAC_key: km.getBytes(sp.mac_key_length),\n server_write_MAC_key: km.getBytes(sp.mac_key_length),\n client_write_key: km.getBytes(sp.enc_key_length),\n server_write_key: km.getBytes(sp.enc_key_length)\n };\n\n // include TLS 1.0 IVs\n if(tls10) {\n rval.client_write_IV = km.getBytes(sp.fixed_iv_length);\n rval.server_write_IV = km.getBytes(sp.fixed_iv_length);\n }\n\n return rval;\n};\n\n/**\n * Creates a new initialized TLS connection state. A connection state has\n * a read mode and a write mode.\n *\n * compression state:\n * The current state of the compression algorithm.\n *\n * cipher state:\n * The current state of the encryption algorithm. This will consist of the\n * scheduled key for that connection. For stream ciphers, this will also\n * contain whatever state information is necessary to allow the stream to\n * continue to encrypt or decrypt data.\n *\n * MAC key:\n * The MAC key for the connection.\n *\n * sequence number:\n * Each connection state contains a sequence number, which is maintained\n * separately for read and write states. The sequence number MUST be set to\n * zero whenever a connection state is made the active state. Sequence\n * numbers are of type uint64 and may not exceed 2^64-1. Sequence numbers do\n * not wrap. If a TLS implementation would need to wrap a sequence number,\n * it must renegotiate instead. A sequence number is incremented after each\n * record: specifically, the first record transmitted under a particular\n * connection state MUST use sequence number 0.\n *\n * @param c the connection.\n *\n * @return the new initialized TLS connection state.\n */\ntls.createConnectionState = function(c) {\n var client = (c.entity === tls.ConnectionEnd.client);\n\n var createMode = function() {\n var mode = {\n // two 32-bit numbers, first is most significant\n sequenceNumber: [0, 0],\n macKey: null,\n macLength: 0,\n macFunction: null,\n cipherState: null,\n cipherFunction: function(record) {return true;},\n compressionState: null,\n compressFunction: function(record) {return true;},\n updateSequenceNumber: function() {\n if(mode.sequenceNumber[1] === 0xFFFFFFFF) {\n mode.sequenceNumber[1] = 0;\n ++mode.sequenceNumber[0];\n } else {\n ++mode.sequenceNumber[1];\n }\n }\n };\n return mode;\n };\n var state = {\n read: createMode(),\n write: createMode()\n };\n\n // update function in read mode will decrypt then decompress a record\n state.read.update = function(c, record) {\n if(!state.read.cipherFunction(record, state.read)) {\n c.error(c, {\n message: 'Could not decrypt record or bad MAC.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n // doesn't matter if decryption failed or MAC was\n // invalid, return the same error so as not to reveal\n // which one occurred\n description: tls.Alert.Description.bad_record_mac\n }\n });\n } else if(!state.read.compressFunction(c, record, state.read)) {\n c.error(c, {\n message: 'Could not decompress record.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.decompression_failure\n }\n });\n }\n return !c.fail;\n };\n\n // update function in write mode will compress then encrypt a record\n state.write.update = function(c, record) {\n if(!state.write.compressFunction(c, record, state.write)) {\n // error, but do not send alert since it would require\n // compression as well\n c.error(c, {\n message: 'Could not compress record.',\n send: false,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n } else if(!state.write.cipherFunction(record, state.write)) {\n // error, but do not send alert since it would require\n // encryption as well\n c.error(c, {\n message: 'Could not encrypt record.',\n send: false,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n }\n return !c.fail;\n };\n\n // handle security parameters\n if(c.session) {\n var sp = c.session.sp;\n c.session.cipherSuite.initSecurityParameters(sp);\n\n // generate keys\n sp.keys = tls.generateKeys(c, sp);\n state.read.macKey = client ?\n sp.keys.server_write_MAC_key : sp.keys.client_write_MAC_key;\n state.write.macKey = client ?\n sp.keys.client_write_MAC_key : sp.keys.server_write_MAC_key;\n\n // cipher suite setup\n c.session.cipherSuite.initConnectionState(state, c, sp);\n\n // compression setup\n switch(sp.compression_algorithm) {\n case tls.CompressionMethod.none:\n break;\n case tls.CompressionMethod.deflate:\n state.read.compressFunction = inflate;\n state.write.compressFunction = deflate;\n break;\n default:\n throw new Error('Unsupported compression algorithm.');\n }\n }\n\n return state;\n};\n\n/**\n * Creates a Random structure.\n *\n * struct {\n * uint32 gmt_unix_time;\n * opaque random_bytes[28];\n * } Random;\n *\n * gmt_unix_time:\n * The current time and date in standard UNIX 32-bit format (seconds since\n * the midnight starting Jan 1, 1970, UTC, ignoring leap seconds) according\n * to the sender's internal clock. Clocks are not required to be set\n * correctly by the basic TLS protocol; higher-level or application\n * protocols may define additional requirements. Note that, for historical\n * reasons, the data element is named using GMT, the predecessor of the\n * current worldwide time base, UTC.\n * random_bytes:\n * 28 bytes generated by a secure random number generator.\n *\n * @return the Random structure as a byte array.\n */\ntls.createRandom = function() {\n // get UTC milliseconds\n var d = new Date();\n var utc = +d + d.getTimezoneOffset() * 60000;\n var rval = forge.util.createBuffer();\n rval.putInt32(utc);\n rval.putBytes(forge.random.getBytes(28));\n return rval;\n};\n\n/**\n * Creates a TLS record with the given type and data.\n *\n * @param c the connection.\n * @param options:\n * type: the record type.\n * data: the plain text data in a byte buffer.\n *\n * @return the created record.\n */\ntls.createRecord = function(c, options) {\n if(!options.data) {\n return null;\n }\n var record = {\n type: options.type,\n version: {\n major: c.version.major,\n minor: c.version.minor\n },\n length: options.data.length(),\n fragment: options.data\n };\n return record;\n};\n\n/**\n * Creates a TLS alert record.\n *\n * @param c the connection.\n * @param alert:\n * level: the TLS alert level.\n * description: the TLS alert description.\n *\n * @return the created alert record.\n */\ntls.createAlert = function(c, alert) {\n var b = forge.util.createBuffer();\n b.putByte(alert.level);\n b.putByte(alert.description);\n return tls.createRecord(c, {\n type: tls.ContentType.alert,\n data: b\n });\n};\n\n/* The structure of a TLS handshake message.\n *\n * struct {\n * HandshakeType msg_type; // handshake type\n * uint24 length; // bytes in message\n * select(HandshakeType) {\n * case hello_request: HelloRequest;\n * case client_hello: ClientHello;\n * case server_hello: ServerHello;\n * case certificate: Certificate;\n * case server_key_exchange: ServerKeyExchange;\n * case certificate_request: CertificateRequest;\n * case server_hello_done: ServerHelloDone;\n * case certificate_verify: CertificateVerify;\n * case client_key_exchange: ClientKeyExchange;\n * case finished: Finished;\n * } body;\n * } Handshake;\n */\n\n/**\n * Creates a ClientHello message.\n *\n * opaque SessionID<0..32>;\n * enum { null(0), deflate(1), (255) } CompressionMethod;\n * uint8 CipherSuite[2];\n *\n * struct {\n * ProtocolVersion client_version;\n * Random random;\n * SessionID session_id;\n * CipherSuite cipher_suites<2..2^16-2>;\n * CompressionMethod compression_methods<1..2^8-1>;\n * select(extensions_present) {\n * case false:\n * struct {};\n * case true:\n * Extension extensions<0..2^16-1>;\n * };\n * } ClientHello;\n *\n * The extension format for extended client hellos and server hellos is:\n *\n * struct {\n * ExtensionType extension_type;\n * opaque extension_data<0..2^16-1>;\n * } Extension;\n *\n * Here:\n *\n * - \"extension_type\" identifies the particular extension type.\n * - \"extension_data\" contains information specific to the particular\n * extension type.\n *\n * The extension types defined in this document are:\n *\n * enum {\n * server_name(0), max_fragment_length(1),\n * client_certificate_url(2), trusted_ca_keys(3),\n * truncated_hmac(4), status_request(5), (65535)\n * } ExtensionType;\n *\n * @param c the connection.\n *\n * @return the ClientHello byte buffer.\n */\ntls.createClientHello = function(c) {\n // save hello version\n c.session.clientHelloVersion = {\n major: c.version.major,\n minor: c.version.minor\n };\n\n // create supported cipher suites\n var cipherSuites = forge.util.createBuffer();\n for(var i = 0; i < c.cipherSuites.length; ++i) {\n var cs = c.cipherSuites[i];\n cipherSuites.putByte(cs.id[0]);\n cipherSuites.putByte(cs.id[1]);\n }\n var cSuites = cipherSuites.length();\n\n // create supported compression methods, null always supported, but\n // also support deflate if connection has inflate and deflate methods\n var compressionMethods = forge.util.createBuffer();\n compressionMethods.putByte(tls.CompressionMethod.none);\n // FIXME: deflate support disabled until issues with raw deflate data\n // without zlib headers are resolved\n /*\n if(c.inflate !== null && c.deflate !== null) {\n compressionMethods.putByte(tls.CompressionMethod.deflate);\n }\n */\n var cMethods = compressionMethods.length();\n\n // create TLS SNI (server name indication) extension if virtual host\n // has been specified, see RFC 3546\n var extensions = forge.util.createBuffer();\n if(c.virtualHost) {\n // create extension struct\n var ext = forge.util.createBuffer();\n ext.putByte(0x00); // type server_name (ExtensionType is 2 bytes)\n ext.putByte(0x00);\n\n /* In order to provide the server name, clients MAY include an\n * extension of type \"server_name\" in the (extended) client hello.\n * The \"extension_data\" field of this extension SHALL contain\n * \"ServerNameList\" where:\n *\n * struct {\n * NameType name_type;\n * select(name_type) {\n * case host_name: HostName;\n * } name;\n * } ServerName;\n *\n * enum {\n * host_name(0), (255)\n * } NameType;\n *\n * opaque HostName<1..2^16-1>;\n *\n * struct {\n * ServerName server_name_list<1..2^16-1>\n * } ServerNameList;\n */\n var serverName = forge.util.createBuffer();\n serverName.putByte(0x00); // type host_name\n writeVector(serverName, 2, forge.util.createBuffer(c.virtualHost));\n\n // ServerNameList is in extension_data\n var snList = forge.util.createBuffer();\n writeVector(snList, 2, serverName);\n writeVector(ext, 2, snList);\n extensions.putBuffer(ext);\n }\n var extLength = extensions.length();\n if(extLength > 0) {\n // add extension vector length\n extLength += 2;\n }\n\n // determine length of the handshake message\n // cipher suites and compression methods size will need to be\n // updated if more get added to the list\n var sessionId = c.session.id;\n var length =\n sessionId.length + 1 + // session ID vector\n 2 + // version (major + minor)\n 4 + 28 + // random time and random bytes\n 2 + cSuites + // cipher suites vector\n 1 + cMethods + // compression methods vector\n extLength; // extensions vector\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.client_hello);\n rval.putInt24(length); // handshake length\n rval.putByte(c.version.major); // major version\n rval.putByte(c.version.minor); // minor version\n rval.putBytes(c.session.sp.client_random); // random time + bytes\n writeVector(rval, 1, forge.util.createBuffer(sessionId));\n writeVector(rval, 2, cipherSuites);\n writeVector(rval, 1, compressionMethods);\n if(extLength > 0) {\n writeVector(rval, 2, extensions);\n }\n return rval;\n};\n\n/**\n * Creates a ServerHello message.\n *\n * @param c the connection.\n *\n * @return the ServerHello byte buffer.\n */\ntls.createServerHello = function(c) {\n // determine length of the handshake message\n var sessionId = c.session.id;\n var length =\n sessionId.length + 1 + // session ID vector\n 2 + // version (major + minor)\n 4 + 28 + // random time and random bytes\n 2 + // chosen cipher suite\n 1; // chosen compression method\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.server_hello);\n rval.putInt24(length); // handshake length\n rval.putByte(c.version.major); // major version\n rval.putByte(c.version.minor); // minor version\n rval.putBytes(c.session.sp.server_random); // random time + bytes\n writeVector(rval, 1, forge.util.createBuffer(sessionId));\n rval.putByte(c.session.cipherSuite.id[0]);\n rval.putByte(c.session.cipherSuite.id[1]);\n rval.putByte(c.session.compressionMethod);\n return rval;\n};\n\n/**\n * Creates a Certificate message.\n *\n * When this message will be sent:\n * This is the first message the client can send after receiving a server\n * hello done message and the first message the server can send after\n * sending a ServerHello. This client message is only sent if the server\n * requests a certificate. If no suitable certificate is available, the\n * client should send a certificate message containing no certificates. If\n * client authentication is required by the server for the handshake to\n * continue, it may respond with a fatal handshake failure alert.\n *\n * opaque ASN.1Cert<1..2^24-1>;\n *\n * struct {\n * ASN.1Cert certificate_list<0..2^24-1>;\n * } Certificate;\n *\n * @param c the connection.\n *\n * @return the Certificate byte buffer.\n */\ntls.createCertificate = function(c) {\n // TODO: check certificate request to ensure types are supported\n\n // get a certificate (a certificate as a PEM string)\n var client = (c.entity === tls.ConnectionEnd.client);\n var cert = null;\n if(c.getCertificate) {\n var hint;\n if(client) {\n hint = c.session.certificateRequest;\n } else {\n hint = c.session.extensions.server_name.serverNameList;\n }\n cert = c.getCertificate(c, hint);\n }\n\n // buffer to hold certificate list\n var certList = forge.util.createBuffer();\n if(cert !== null) {\n try {\n // normalize cert to a chain of certificates\n if(!forge.util.isArray(cert)) {\n cert = [cert];\n }\n var asn1 = null;\n for(var i = 0; i < cert.length; ++i) {\n var msg = forge.pem.decode(cert[i])[0];\n if(msg.type !== 'CERTIFICATE' &&\n msg.type !== 'X509 CERTIFICATE' &&\n msg.type !== 'TRUSTED CERTIFICATE') {\n var error = new Error('Could not convert certificate from PEM; PEM ' +\n 'header type is not \"CERTIFICATE\", \"X509 CERTIFICATE\", or ' +\n '\"TRUSTED CERTIFICATE\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert certificate from PEM; PEM is encrypted.');\n }\n\n var der = forge.util.createBuffer(msg.body);\n if(asn1 === null) {\n asn1 = forge.asn1.fromDer(der.bytes(), false);\n }\n\n // certificate entry is itself a vector with 3 length bytes\n var certBuffer = forge.util.createBuffer();\n writeVector(certBuffer, 3, der);\n\n // add cert vector to cert list vector\n certList.putBuffer(certBuffer);\n }\n\n // save certificate\n cert = forge.pki.certificateFromAsn1(asn1);\n if(client) {\n c.session.clientCertificate = cert;\n } else {\n c.session.serverCertificate = cert;\n }\n } catch(ex) {\n return c.error(c, {\n message: 'Could not send certificate list.',\n cause: ex,\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.bad_certificate\n }\n });\n }\n }\n\n // determine length of the handshake message\n var length = 3 + certList.length(); // cert list vector\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.certificate);\n rval.putInt24(length);\n writeVector(rval, 3, certList);\n return rval;\n};\n\n/**\n * Creates a ClientKeyExchange message.\n *\n * When this message will be sent:\n * This message is always sent by the client. It will immediately follow the\n * client certificate message, if it is sent. Otherwise it will be the first\n * message sent by the client after it receives the server hello done\n * message.\n *\n * Meaning of this message:\n * With this message, the premaster secret is set, either though direct\n * transmission of the RSA-encrypted secret, or by the transmission of\n * Diffie-Hellman parameters which will allow each side to agree upon the\n * same premaster secret. When the key exchange method is DH_RSA or DH_DSS,\n * client certification has been requested, and the client was able to\n * respond with a certificate which contained a Diffie-Hellman public key\n * whose parameters (group and generator) matched those specified by the\n * server in its certificate, this message will not contain any data.\n *\n * Meaning of this message:\n * If RSA is being used for key agreement and authentication, the client\n * generates a 48-byte premaster secret, encrypts it using the public key\n * from the server's certificate or the temporary RSA key provided in a\n * server key exchange message, and sends the result in an encrypted\n * premaster secret message. This structure is a variant of the client\n * key exchange message, not a message in itself.\n *\n * struct {\n * select(KeyExchangeAlgorithm) {\n * case rsa: EncryptedPreMasterSecret;\n * case diffie_hellman: ClientDiffieHellmanPublic;\n * } exchange_keys;\n * } ClientKeyExchange;\n *\n * struct {\n * ProtocolVersion client_version;\n * opaque random[46];\n * } PreMasterSecret;\n *\n * struct {\n * public-key-encrypted PreMasterSecret pre_master_secret;\n * } EncryptedPreMasterSecret;\n *\n * A public-key-encrypted element is encoded as a vector <0..2^16-1>.\n *\n * @param c the connection.\n *\n * @return the ClientKeyExchange byte buffer.\n */\ntls.createClientKeyExchange = function(c) {\n // create buffer to encrypt\n var b = forge.util.createBuffer();\n\n // add highest client-supported protocol to help server avoid version\n // rollback attacks\n b.putByte(c.session.clientHelloVersion.major);\n b.putByte(c.session.clientHelloVersion.minor);\n\n // generate and add 46 random bytes\n b.putBytes(forge.random.getBytes(46));\n\n // save pre-master secret\n var sp = c.session.sp;\n sp.pre_master_secret = b.getBytes();\n\n // RSA-encrypt the pre-master secret\n var key = c.session.serverCertificate.publicKey;\n b = key.encrypt(sp.pre_master_secret);\n\n /* Note: The encrypted pre-master secret will be stored in a\n public-key-encrypted opaque vector that has the length prefixed using\n 2 bytes, so include those 2 bytes in the handshake message length. This\n is done as a minor optimization instead of calling writeVector(). */\n\n // determine length of the handshake message\n var length = b.length + 2;\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.client_key_exchange);\n rval.putInt24(length);\n // add vector length bytes\n rval.putInt16(b.length);\n rval.putBytes(b);\n return rval;\n};\n\n/**\n * Creates a ServerKeyExchange message.\n *\n * @param c the connection.\n *\n * @return the ServerKeyExchange byte buffer.\n */\ntls.createServerKeyExchange = function(c) {\n // this implementation only supports RSA, no Diffie-Hellman support,\n // so this record is empty\n\n // determine length of the handshake message\n var length = 0;\n\n // build record fragment\n var rval = forge.util.createBuffer();\n if(length > 0) {\n rval.putByte(tls.HandshakeType.server_key_exchange);\n rval.putInt24(length);\n }\n return rval;\n};\n\n/**\n * Gets the signed data used to verify a client-side certificate. See\n * tls.createCertificateVerify() for details.\n *\n * @param c the connection.\n * @param callback the callback to call once the signed data is ready.\n */\ntls.getClientSignature = function(c, callback) {\n // generate data to RSA encrypt\n var b = forge.util.createBuffer();\n b.putBuffer(c.session.md5.digest());\n b.putBuffer(c.session.sha1.digest());\n b = b.getBytes();\n\n // create default signing function as necessary\n c.getSignature = c.getSignature || function(c, b, callback) {\n // do rsa encryption, call callback\n var privateKey = null;\n if(c.getPrivateKey) {\n try {\n privateKey = c.getPrivateKey(c, c.session.clientCertificate);\n privateKey = forge.pki.privateKeyFromPem(privateKey);\n } catch(ex) {\n c.error(c, {\n message: 'Could not get private key.',\n cause: ex,\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n }\n }\n if(privateKey === null) {\n c.error(c, {\n message: 'No private key set.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.internal_error\n }\n });\n } else {\n b = privateKey.sign(b, null);\n }\n callback(c, b);\n };\n\n // get client signature\n c.getSignature(c, b, callback);\n};\n\n/**\n * Creates a CertificateVerify message.\n *\n * Meaning of this message:\n * This structure conveys the client's Diffie-Hellman public value\n * (Yc) if it was not already included in the client's certificate.\n * The encoding used for Yc is determined by the enumerated\n * PublicValueEncoding. This structure is a variant of the client\n * key exchange message, not a message in itself.\n *\n * When this message will be sent:\n * This message is used to provide explicit verification of a client\n * certificate. This message is only sent following a client\n * certificate that has signing capability (i.e. all certificates\n * except those containing fixed Diffie-Hellman parameters). When\n * sent, it will immediately follow the client key exchange message.\n *\n * struct {\n * Signature signature;\n * } CertificateVerify;\n *\n * CertificateVerify.signature.md5_hash\n * MD5(handshake_messages);\n *\n * Certificate.signature.sha_hash\n * SHA(handshake_messages);\n *\n * Here handshake_messages refers to all handshake messages sent or\n * received starting at client hello up to but not including this\n * message, including the type and length fields of the handshake\n * messages.\n *\n * select(SignatureAlgorithm) {\n * case anonymous: struct { };\n * case rsa:\n * digitally-signed struct {\n * opaque md5_hash[16];\n * opaque sha_hash[20];\n * };\n * case dsa:\n * digitally-signed struct {\n * opaque sha_hash[20];\n * };\n * } Signature;\n *\n * In digital signing, one-way hash functions are used as input for a\n * signing algorithm. A digitally-signed element is encoded as an opaque\n * vector <0..2^16-1>, where the length is specified by the signing\n * algorithm and key.\n *\n * In RSA signing, a 36-byte structure of two hashes (one SHA and one\n * MD5) is signed (encrypted with the private key). It is encoded with\n * PKCS #1 block type 0 or type 1 as described in [PKCS1].\n *\n * In DSS, the 20 bytes of the SHA hash are run directly through the\n * Digital Signing Algorithm with no additional hashing.\n *\n * @param c the connection.\n * @param signature the signature to include in the message.\n *\n * @return the CertificateVerify byte buffer.\n */\ntls.createCertificateVerify = function(c, signature) {\n /* Note: The signature will be stored in a \"digitally-signed\" opaque\n vector that has the length prefixed using 2 bytes, so include those\n 2 bytes in the handshake message length. This is done as a minor\n optimization instead of calling writeVector(). */\n\n // determine length of the handshake message\n var length = signature.length + 2;\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.certificate_verify);\n rval.putInt24(length);\n // add vector length bytes\n rval.putInt16(signature.length);\n rval.putBytes(signature);\n return rval;\n};\n\n/**\n * Creates a CertificateRequest message.\n *\n * @param c the connection.\n *\n * @return the CertificateRequest byte buffer.\n */\ntls.createCertificateRequest = function(c) {\n // TODO: support other certificate types\n var certTypes = forge.util.createBuffer();\n\n // common RSA certificate type\n certTypes.putByte(0x01);\n\n // add distinguished names from CA store\n var cAs = forge.util.createBuffer();\n for(var key in c.caStore.certs) {\n var cert = c.caStore.certs[key];\n var dn = forge.pki.distinguishedNameToAsn1(cert.subject);\n var byteBuffer = forge.asn1.toDer(dn);\n cAs.putInt16(byteBuffer.length());\n cAs.putBuffer(byteBuffer);\n }\n\n // TODO: TLS 1.2+ has a different format\n\n // determine length of the handshake message\n var length =\n 1 + certTypes.length() +\n 2 + cAs.length();\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.certificate_request);\n rval.putInt24(length);\n writeVector(rval, 1, certTypes);\n writeVector(rval, 2, cAs);\n return rval;\n};\n\n/**\n * Creates a ServerHelloDone message.\n *\n * @param c the connection.\n *\n * @return the ServerHelloDone byte buffer.\n */\ntls.createServerHelloDone = function(c) {\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.server_hello_done);\n rval.putInt24(0);\n return rval;\n};\n\n/**\n * Creates a ChangeCipherSpec message.\n *\n * The change cipher spec protocol exists to signal transitions in\n * ciphering strategies. The protocol consists of a single message,\n * which is encrypted and compressed under the current (not the pending)\n * connection state. The message consists of a single byte of value 1.\n *\n * struct {\n * enum { change_cipher_spec(1), (255) } type;\n * } ChangeCipherSpec;\n *\n * @return the ChangeCipherSpec byte buffer.\n */\ntls.createChangeCipherSpec = function() {\n var rval = forge.util.createBuffer();\n rval.putByte(0x01);\n return rval;\n};\n\n/**\n * Creates a Finished message.\n *\n * struct {\n * opaque verify_data[12];\n * } Finished;\n *\n * verify_data\n * PRF(master_secret, finished_label, MD5(handshake_messages) +\n * SHA-1(handshake_messages)) [0..11];\n *\n * finished_label\n * For Finished messages sent by the client, the string \"client\n * finished\". For Finished messages sent by the server, the\n * string \"server finished\".\n *\n * handshake_messages\n * All of the data from all handshake messages up to but not\n * including this message. This is only data visible at the\n * handshake layer and does not include record layer headers.\n * This is the concatenation of all the Handshake structures as\n * defined in 7.4 exchanged thus far.\n *\n * @param c the connection.\n *\n * @return the Finished byte buffer.\n */\ntls.createFinished = function(c) {\n // generate verify_data\n var b = forge.util.createBuffer();\n b.putBuffer(c.session.md5.digest());\n b.putBuffer(c.session.sha1.digest());\n\n // TODO: determine prf function and verify length for TLS 1.2\n var client = (c.entity === tls.ConnectionEnd.client);\n var sp = c.session.sp;\n var vdl = 12;\n var prf = prf_TLS1;\n var label = client ? 'client finished' : 'server finished';\n b = prf(sp.master_secret, label, b.getBytes(), vdl);\n\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(tls.HandshakeType.finished);\n rval.putInt24(b.length());\n rval.putBuffer(b);\n return rval;\n};\n\n/**\n * Creates a HeartbeatMessage (See RFC 6520).\n *\n * struct {\n * HeartbeatMessageType type;\n * uint16 payload_length;\n * opaque payload[HeartbeatMessage.payload_length];\n * opaque padding[padding_length];\n * } HeartbeatMessage;\n *\n * The total length of a HeartbeatMessage MUST NOT exceed 2^14 or\n * max_fragment_length when negotiated as defined in [RFC6066].\n *\n * type: The message type, either heartbeat_request or heartbeat_response.\n *\n * payload_length: The length of the payload.\n *\n * payload: The payload consists of arbitrary content.\n *\n * padding: The padding is random content that MUST be ignored by the\n * receiver. The length of a HeartbeatMessage is TLSPlaintext.length\n * for TLS and DTLSPlaintext.length for DTLS. Furthermore, the\n * length of the type field is 1 byte, and the length of the\n * payload_length is 2. Therefore, the padding_length is\n * TLSPlaintext.length - payload_length - 3 for TLS and\n * DTLSPlaintext.length - payload_length - 3 for DTLS. The\n * padding_length MUST be at least 16.\n *\n * The sender of a HeartbeatMessage MUST use a random padding of at\n * least 16 bytes. The padding of a received HeartbeatMessage message\n * MUST be ignored.\n *\n * If the payload_length of a received HeartbeatMessage is too large,\n * the received HeartbeatMessage MUST be discarded silently.\n *\n * @param c the connection.\n * @param type the tls.HeartbeatMessageType.\n * @param payload the heartbeat data to send as the payload.\n * @param [payloadLength] the payload length to use, defaults to the\n * actual payload length.\n *\n * @return the HeartbeatRequest byte buffer.\n */\ntls.createHeartbeat = function(type, payload, payloadLength) {\n if(typeof payloadLength === 'undefined') {\n payloadLength = payload.length;\n }\n // build record fragment\n var rval = forge.util.createBuffer();\n rval.putByte(type); // heartbeat message type\n rval.putInt16(payloadLength); // payload length\n rval.putBytes(payload); // payload\n // padding\n var plaintextLength = rval.length();\n var paddingLength = Math.max(16, plaintextLength - payloadLength - 3);\n rval.putBytes(forge.random.getBytes(paddingLength));\n return rval;\n};\n\n/**\n * Fragments, compresses, encrypts, and queues a record for delivery.\n *\n * @param c the connection.\n * @param record the record to queue.\n */\ntls.queue = function(c, record) {\n // error during record creation\n if(!record) {\n return;\n }\n\n if(record.fragment.length() === 0) {\n if(record.type === tls.ContentType.handshake ||\n record.type === tls.ContentType.alert ||\n record.type === tls.ContentType.change_cipher_spec) {\n // Empty handshake, alert of change cipher spec messages are not allowed per the TLS specification and should not be sent.\n return;\n }\n }\n\n // if the record is a handshake record, update handshake hashes\n if(record.type === tls.ContentType.handshake) {\n var bytes = record.fragment.bytes();\n c.session.md5.update(bytes);\n c.session.sha1.update(bytes);\n bytes = null;\n }\n\n // handle record fragmentation\n var records;\n if(record.fragment.length() <= tls.MaxFragment) {\n records = [record];\n } else {\n // fragment data as long as it is too long\n records = [];\n var data = record.fragment.bytes();\n while(data.length > tls.MaxFragment) {\n records.push(tls.createRecord(c, {\n type: record.type,\n data: forge.util.createBuffer(data.slice(0, tls.MaxFragment))\n }));\n data = data.slice(tls.MaxFragment);\n }\n // add last record\n if(data.length > 0) {\n records.push(tls.createRecord(c, {\n type: record.type,\n data: forge.util.createBuffer(data)\n }));\n }\n }\n\n // compress and encrypt all fragmented records\n for(var i = 0; i < records.length && !c.fail; ++i) {\n // update the record using current write state\n var rec = records[i];\n var s = c.state.current.write;\n if(s.update(c, rec)) {\n // store record\n c.records.push(rec);\n }\n }\n};\n\n/**\n * Flushes all queued records to the output buffer and calls the\n * tlsDataReady() handler on the given connection.\n *\n * @param c the connection.\n *\n * @return true on success, false on failure.\n */\ntls.flush = function(c) {\n for(var i = 0; i < c.records.length; ++i) {\n var record = c.records[i];\n\n // add record header and fragment\n c.tlsData.putByte(record.type);\n c.tlsData.putByte(record.version.major);\n c.tlsData.putByte(record.version.minor);\n c.tlsData.putInt16(record.fragment.length());\n c.tlsData.putBuffer(c.records[i].fragment);\n }\n c.records = [];\n return c.tlsDataReady(c);\n};\n\n/**\n * Maps a pki.certificateError to a tls.Alert.Description.\n *\n * @param error the error to map.\n *\n * @return the alert description.\n */\nvar _certErrorToAlertDesc = function(error) {\n switch(error) {\n case true:\n return true;\n case forge.pki.certificateError.bad_certificate:\n return tls.Alert.Description.bad_certificate;\n case forge.pki.certificateError.unsupported_certificate:\n return tls.Alert.Description.unsupported_certificate;\n case forge.pki.certificateError.certificate_revoked:\n return tls.Alert.Description.certificate_revoked;\n case forge.pki.certificateError.certificate_expired:\n return tls.Alert.Description.certificate_expired;\n case forge.pki.certificateError.certificate_unknown:\n return tls.Alert.Description.certificate_unknown;\n case forge.pki.certificateError.unknown_ca:\n return tls.Alert.Description.unknown_ca;\n default:\n return tls.Alert.Description.bad_certificate;\n }\n};\n\n/**\n * Maps a tls.Alert.Description to a pki.certificateError.\n *\n * @param desc the alert description.\n *\n * @return the certificate error.\n */\nvar _alertDescToCertError = function(desc) {\n switch(desc) {\n case true:\n return true;\n case tls.Alert.Description.bad_certificate:\n return forge.pki.certificateError.bad_certificate;\n case tls.Alert.Description.unsupported_certificate:\n return forge.pki.certificateError.unsupported_certificate;\n case tls.Alert.Description.certificate_revoked:\n return forge.pki.certificateError.certificate_revoked;\n case tls.Alert.Description.certificate_expired:\n return forge.pki.certificateError.certificate_expired;\n case tls.Alert.Description.certificate_unknown:\n return forge.pki.certificateError.certificate_unknown;\n case tls.Alert.Description.unknown_ca:\n return forge.pki.certificateError.unknown_ca;\n default:\n return forge.pki.certificateError.bad_certificate;\n }\n};\n\n/**\n * Verifies a certificate chain against the given connection's\n * Certificate Authority store.\n *\n * @param c the TLS connection.\n * @param chain the certificate chain to verify, with the root or highest\n * authority at the end.\n *\n * @return true if successful, false if not.\n */\ntls.verifyCertificateChain = function(c, chain) {\n try {\n // Make a copy of c.verifyOptions so that we can modify options.verify\n // without modifying c.verifyOptions.\n var options = {};\n for (var key in c.verifyOptions) {\n options[key] = c.verifyOptions[key];\n }\n\n options.verify = function(vfd, depth, chain) {\n // convert pki.certificateError to tls alert description\n var desc = _certErrorToAlertDesc(vfd);\n\n // call application callback\n var ret = c.verify(c, vfd, depth, chain);\n if(ret !== true) {\n if(typeof ret === 'object' && !forge.util.isArray(ret)) {\n // throw custom error\n var error = new Error('The application rejected the certificate.');\n error.send = true;\n error.alert = {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.bad_certificate\n };\n if(ret.message) {\n error.message = ret.message;\n }\n if(ret.alert) {\n error.alert.description = ret.alert;\n }\n throw error;\n }\n\n // convert tls alert description to pki.certificateError\n if(ret !== vfd) {\n ret = _alertDescToCertError(ret);\n }\n }\n\n return ret;\n };\n\n // verify chain\n forge.pki.verifyCertificateChain(c.caStore, chain, options);\n } catch(ex) {\n // build tls error if not already customized\n var err = ex;\n if(typeof err !== 'object' || forge.util.isArray(err)) {\n err = {\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: _certErrorToAlertDesc(ex)\n }\n };\n }\n if(!('send' in err)) {\n err.send = true;\n }\n if(!('alert' in err)) {\n err.alert = {\n level: tls.Alert.Level.fatal,\n description: _certErrorToAlertDesc(err.error)\n };\n }\n\n // send error\n c.error(c, err);\n }\n\n return !c.fail;\n};\n\n/**\n * Creates a new TLS session cache.\n *\n * @param cache optional map of session ID to cached session.\n * @param capacity the maximum size for the cache (default: 100).\n *\n * @return the new TLS session cache.\n */\ntls.createSessionCache = function(cache, capacity) {\n var rval = null;\n\n // assume input is already a session cache object\n if(cache && cache.getSession && cache.setSession && cache.order) {\n rval = cache;\n } else {\n // create cache\n rval = {};\n rval.cache = cache || {};\n rval.capacity = Math.max(capacity || 100, 1);\n rval.order = [];\n\n // store order for sessions, delete session overflow\n for(var key in cache) {\n if(rval.order.length <= capacity) {\n rval.order.push(key);\n } else {\n delete cache[key];\n }\n }\n\n // get a session from a session ID (or get any session)\n rval.getSession = function(sessionId) {\n var session = null;\n var key = null;\n\n // if session ID provided, use it\n if(sessionId) {\n key = forge.util.bytesToHex(sessionId);\n } else if(rval.order.length > 0) {\n // get first session from cache\n key = rval.order[0];\n }\n\n if(key !== null && key in rval.cache) {\n // get cached session and remove from cache\n session = rval.cache[key];\n delete rval.cache[key];\n for(var i in rval.order) {\n if(rval.order[i] === key) {\n rval.order.splice(i, 1);\n break;\n }\n }\n }\n\n return session;\n };\n\n // set a session in the cache\n rval.setSession = function(sessionId, session) {\n // remove session from cache if at capacity\n if(rval.order.length === rval.capacity) {\n var key = rval.order.shift();\n delete rval.cache[key];\n }\n // add session to cache\n var key = forge.util.bytesToHex(sessionId);\n rval.order.push(key);\n rval.cache[key] = session;\n };\n }\n\n return rval;\n};\n\n/**\n * Creates a new TLS connection.\n *\n * See public createConnection() docs for more details.\n *\n * @param options the options for this connection.\n *\n * @return the new TLS connection.\n */\ntls.createConnection = function(options) {\n var caStore = null;\n if(options.caStore) {\n // if CA store is an array, convert it to a CA store object\n if(forge.util.isArray(options.caStore)) {\n caStore = forge.pki.createCaStore(options.caStore);\n } else {\n caStore = options.caStore;\n }\n } else {\n // create empty CA store\n caStore = forge.pki.createCaStore();\n }\n\n // setup default cipher suites\n var cipherSuites = options.cipherSuites || null;\n if(cipherSuites === null) {\n cipherSuites = [];\n for(var key in tls.CipherSuites) {\n cipherSuites.push(tls.CipherSuites[key]);\n }\n }\n\n // set default entity\n var entity = (options.server || false) ?\n tls.ConnectionEnd.server : tls.ConnectionEnd.client;\n\n // create session cache if requested\n var sessionCache = options.sessionCache ?\n tls.createSessionCache(options.sessionCache) : null;\n\n // create TLS connection\n var c = {\n version: {major: tls.Version.major, minor: tls.Version.minor},\n entity: entity,\n sessionId: options.sessionId,\n caStore: caStore,\n sessionCache: sessionCache,\n cipherSuites: cipherSuites,\n connected: options.connected,\n virtualHost: options.virtualHost || null,\n verifyClient: options.verifyClient || false,\n verify: options.verify || function(cn, vfd, dpth, cts) {return vfd;},\n verifyOptions: options.verifyOptions || {},\n getCertificate: options.getCertificate || null,\n getPrivateKey: options.getPrivateKey || null,\n getSignature: options.getSignature || null,\n input: forge.util.createBuffer(),\n tlsData: forge.util.createBuffer(),\n data: forge.util.createBuffer(),\n tlsDataReady: options.tlsDataReady,\n dataReady: options.dataReady,\n heartbeatReceived: options.heartbeatReceived,\n closed: options.closed,\n error: function(c, ex) {\n // set origin if not set\n ex.origin = ex.origin ||\n ((c.entity === tls.ConnectionEnd.client) ? 'client' : 'server');\n\n // send TLS alert\n if(ex.send) {\n tls.queue(c, tls.createAlert(c, ex.alert));\n tls.flush(c);\n }\n\n // error is fatal by default\n var fatal = (ex.fatal !== false);\n if(fatal) {\n // set fail flag\n c.fail = true;\n }\n\n // call error handler first\n options.error(c, ex);\n\n if(fatal) {\n // fatal error, close connection, do not clear fail\n c.close(false);\n }\n },\n deflate: options.deflate || null,\n inflate: options.inflate || null\n };\n\n /**\n * Resets a closed TLS connection for reuse. Called in c.close().\n *\n * @param clearFail true to clear the fail flag (default: true).\n */\n c.reset = function(clearFail) {\n c.version = {major: tls.Version.major, minor: tls.Version.minor};\n c.record = null;\n c.session = null;\n c.peerCertificate = null;\n c.state = {\n pending: null,\n current: null\n };\n c.expect = (c.entity === tls.ConnectionEnd.client) ? SHE : CHE;\n c.fragmented = null;\n c.records = [];\n c.open = false;\n c.handshakes = 0;\n c.handshaking = false;\n c.isConnected = false;\n c.fail = !(clearFail || typeof(clearFail) === 'undefined');\n c.input.clear();\n c.tlsData.clear();\n c.data.clear();\n c.state.current = tls.createConnectionState(c);\n };\n\n // do initial reset of connection\n c.reset();\n\n /**\n * Updates the current TLS engine state based on the given record.\n *\n * @param c the TLS connection.\n * @param record the TLS record to act on.\n */\n var _update = function(c, record) {\n // get record handler (align type in table by subtracting lowest)\n var aligned = record.type - tls.ContentType.change_cipher_spec;\n var handlers = ctTable[c.entity][c.expect];\n if(aligned in handlers) {\n handlers[aligned](c, record);\n } else {\n // unexpected record\n tls.handleUnexpected(c, record);\n }\n };\n\n /**\n * Reads the record header and initializes the next record on the given\n * connection.\n *\n * @param c the TLS connection with the next record.\n *\n * @return 0 if the input data could be processed, otherwise the\n * number of bytes required for data to be processed.\n */\n var _readRecordHeader = function(c) {\n var rval = 0;\n\n // get input buffer and its length\n var b = c.input;\n var len = b.length();\n\n // need at least 5 bytes to initialize a record\n if(len < 5) {\n rval = 5 - len;\n } else {\n // enough bytes for header\n // initialize record\n c.record = {\n type: b.getByte(),\n version: {\n major: b.getByte(),\n minor: b.getByte()\n },\n length: b.getInt16(),\n fragment: forge.util.createBuffer(),\n ready: false\n };\n\n // check record version\n var compatibleVersion = (c.record.version.major === c.version.major);\n if(compatibleVersion && c.session && c.session.version) {\n // session version already set, require same minor version\n compatibleVersion = (c.record.version.minor === c.version.minor);\n }\n if(!compatibleVersion) {\n c.error(c, {\n message: 'Incompatible TLS version.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description: tls.Alert.Description.protocol_version\n }\n });\n }\n }\n\n return rval;\n };\n\n /**\n * Reads the next record's contents and appends its message to any\n * previously fragmented message.\n *\n * @param c the TLS connection with the next record.\n *\n * @return 0 if the input data could be processed, otherwise the\n * number of bytes required for data to be processed.\n */\n var _readRecord = function(c) {\n var rval = 0;\n\n // ensure there is enough input data to get the entire record\n var b = c.input;\n var len = b.length();\n if(len < c.record.length) {\n // not enough data yet, return how much is required\n rval = c.record.length - len;\n } else {\n // there is enough data to parse the pending record\n // fill record fragment and compact input buffer\n c.record.fragment.putBytes(b.getBytes(c.record.length));\n b.compact();\n\n // update record using current read state\n var s = c.state.current.read;\n if(s.update(c, c.record)) {\n // see if there is a previously fragmented message that the\n // new record's message fragment should be appended to\n if(c.fragmented !== null) {\n // if the record type matches a previously fragmented\n // record, append the record fragment to it\n if(c.fragmented.type === c.record.type) {\n // concatenate record fragments\n c.fragmented.fragment.putBuffer(c.record.fragment);\n c.record = c.fragmented;\n } else {\n // error, invalid fragmented record\n c.error(c, {\n message: 'Invalid fragmented record.',\n send: true,\n alert: {\n level: tls.Alert.Level.fatal,\n description:\n tls.Alert.Description.unexpected_message\n }\n });\n }\n }\n\n // record is now ready\n c.record.ready = true;\n }\n }\n\n return rval;\n };\n\n /**\n * Performs a handshake using the TLS Handshake Protocol, as a client.\n *\n * This method should only be called if the connection is in client mode.\n *\n * @param sessionId the session ID to use, null to start a new one.\n */\n c.handshake = function(sessionId) {\n // error to call this in non-client mode\n if(c.entity !== tls.ConnectionEnd.client) {\n // not fatal error\n c.error(c, {\n message: 'Cannot initiate handshake as a server.',\n fatal: false\n });\n } else if(c.handshaking) {\n // handshake is already in progress, fail but not fatal error\n c.error(c, {\n message: 'Handshake already in progress.',\n fatal: false\n });\n } else {\n // clear fail flag on reuse\n if(c.fail && !c.open && c.handshakes === 0) {\n c.fail = false;\n }\n\n // now handshaking\n c.handshaking = true;\n\n // default to blank (new session)\n sessionId = sessionId || '';\n\n // if a session ID was specified, try to find it in the cache\n var session = null;\n if(sessionId.length > 0) {\n if(c.sessionCache) {\n session = c.sessionCache.getSession(sessionId);\n }\n\n // matching session not found in cache, clear session ID\n if(session === null) {\n sessionId = '';\n }\n }\n\n // no session given, grab a session from the cache, if available\n if(sessionId.length === 0 && c.sessionCache) {\n session = c.sessionCache.getSession();\n if(session !== null) {\n sessionId = session.id;\n }\n }\n\n // set up session\n c.session = {\n id: sessionId,\n version: null,\n cipherSuite: null,\n compressionMethod: null,\n serverCertificate: null,\n certificateRequest: null,\n clientCertificate: null,\n sp: {},\n md5: forge.md.md5.create(),\n sha1: forge.md.sha1.create()\n };\n\n // use existing session information\n if(session) {\n // only update version on connection, session version not yet set\n c.version = session.version;\n c.session.sp = session.sp;\n }\n\n // generate new client random\n c.session.sp.client_random = tls.createRandom().getBytes();\n\n // connection now open\n c.open = true;\n\n // send hello\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.handshake,\n data: tls.createClientHello(c)\n }));\n tls.flush(c);\n }\n };\n\n /**\n * Called when TLS protocol data has been received from somewhere and should\n * be processed by the TLS engine.\n *\n * @param data the TLS protocol data, as a string, to process.\n *\n * @return 0 if the data could be processed, otherwise the number of bytes\n * required for data to be processed.\n */\n c.process = function(data) {\n var rval = 0;\n\n // buffer input data\n if(data) {\n c.input.putBytes(data);\n }\n\n // process next record if no failure, process will be called after\n // each record is handled (since handling can be asynchronous)\n if(!c.fail) {\n // reset record if ready and now empty\n if(c.record !== null &&\n c.record.ready && c.record.fragment.isEmpty()) {\n c.record = null;\n }\n\n // if there is no pending record, try to read record header\n if(c.record === null) {\n rval = _readRecordHeader(c);\n }\n\n // read the next record (if record not yet ready)\n if(!c.fail && c.record !== null && !c.record.ready) {\n rval = _readRecord(c);\n }\n\n // record ready to be handled, update engine state\n if(!c.fail && c.record !== null && c.record.ready) {\n _update(c, c.record);\n }\n }\n\n return rval;\n };\n\n /**\n * Requests that application data be packaged into a TLS record. The\n * tlsDataReady handler will be called when the TLS record(s) have been\n * prepared.\n *\n * @param data the application data, as a raw 'binary' encoded string, to\n * be sent; to send utf-16/utf-8 string data, use the return value\n * of util.encodeUtf8(str).\n *\n * @return true on success, false on failure.\n */\n c.prepare = function(data) {\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.application_data,\n data: forge.util.createBuffer(data)\n }));\n return tls.flush(c);\n };\n\n /**\n * Requests that a heartbeat request be packaged into a TLS record for\n * transmission. The tlsDataReady handler will be called when TLS record(s)\n * have been prepared.\n *\n * When a heartbeat response has been received, the heartbeatReceived\n * handler will be called with the matching payload. This handler can\n * be used to clear a retransmission timer, etc.\n *\n * @param payload the heartbeat data to send as the payload in the message.\n * @param [payloadLength] the payload length to use, defaults to the\n * actual payload length.\n *\n * @return true on success, false on failure.\n */\n c.prepareHeartbeatRequest = function(payload, payloadLength) {\n if(payload instanceof forge.util.ByteBuffer) {\n payload = payload.bytes();\n }\n if(typeof payloadLength === 'undefined') {\n payloadLength = payload.length;\n }\n c.expectedHeartbeatPayload = payload;\n tls.queue(c, tls.createRecord(c, {\n type: tls.ContentType.heartbeat,\n data: tls.createHeartbeat(\n tls.HeartbeatMessageType.heartbeat_request, payload, payloadLength)\n }));\n return tls.flush(c);\n };\n\n /**\n * Closes the connection (sends a close_notify alert).\n *\n * @param clearFail true to clear the fail flag (default: true).\n */\n c.close = function(clearFail) {\n // save session if connection didn't fail\n if(!c.fail && c.sessionCache && c.session) {\n // only need to preserve session ID, version, and security params\n var session = {\n id: c.session.id,\n version: c.session.version,\n sp: c.session.sp\n };\n session.sp.keys = null;\n c.sessionCache.setSession(session.id, session);\n }\n\n if(c.open) {\n // connection no longer open, clear input\n c.open = false;\n c.input.clear();\n\n // if connected or handshaking, send an alert\n if(c.isConnected || c.handshaking) {\n c.isConnected = c.handshaking = false;\n\n // send close_notify alert\n tls.queue(c, tls.createAlert(c, {\n level: tls.Alert.Level.warning,\n description: tls.Alert.Description.close_notify\n }));\n tls.flush(c);\n }\n\n // call handler\n c.closed(c);\n }\n\n // reset TLS connection, do not clear fail flag\n c.reset(clearFail);\n };\n\n return c;\n};\n\n/* TLS API */\nmodule.exports = forge.tls = forge.tls || {};\n\n// expose non-functions\nfor(var key in tls) {\n if(typeof tls[key] !== 'function') {\n forge.tls[key] = tls[key];\n }\n}\n\n// expose prf_tls1 for testing\nforge.tls.prf_tls1 = prf_TLS1;\n\n// expose sha1 hmac method\nforge.tls.hmac_sha1 = hmac_sha1;\n\n// expose session cache creation\nforge.tls.createSessionCache = tls.createSessionCache;\n\n/**\n * Creates a new TLS connection. This does not make any assumptions about the\n * transport layer that TLS is working on top of, ie: it does not assume there\n * is a TCP/IP connection or establish one. A TLS connection is totally\n * abstracted away from the layer is runs on top of, it merely establishes a\n * secure channel between a client\" and a \"server\".\n *\n * A TLS connection contains 4 connection states: pending read and write, and\n * current read and write.\n *\n * At initialization, the current read and write states will be null. Only once\n * the security parameters have been set and the keys have been generated can\n * the pending states be converted into current states. Current states will be\n * updated for each record processed.\n *\n * A custom certificate verify callback may be provided to check information\n * like the common name on the server's certificate. It will be called for\n * every certificate in the chain. It has the following signature:\n *\n * variable func(c, certs, index, preVerify)\n * Where:\n * c The TLS connection\n * verified Set to true if certificate was verified, otherwise the alert\n * tls.Alert.Description for why the certificate failed.\n * depth The current index in the chain, where 0 is the server's cert.\n * certs The certificate chain, *NOTE* if the server was anonymous then\n * the chain will be empty.\n *\n * The function returns true on success and on failure either the appropriate\n * tls.Alert.Description or an object with 'alert' set to the appropriate\n * tls.Alert.Description and 'message' set to a custom error message. If true\n * is not returned then the connection will abort using, in order of\n * availability, first the returned alert description, second the preVerify\n * alert description, and lastly the default 'bad_certificate'.\n *\n * There are three callbacks that can be used to make use of client-side\n * certificates where each takes the TLS connection as the first parameter:\n *\n * getCertificate(conn, hint)\n * The second parameter is a hint as to which certificate should be\n * returned. If the connection entity is a client, then the hint will be\n * the CertificateRequest message from the server that is part of the\n * TLS protocol. If the connection entity is a server, then it will be\n * the servername list provided via an SNI extension the ClientHello, if\n * one was provided (empty array if not). The hint can be examined to\n * determine which certificate to use (advanced). Most implementations\n * will just return a certificate. The return value must be a\n * PEM-formatted certificate or an array of PEM-formatted certificates\n * that constitute a certificate chain, with the first in the array/chain\n * being the client's certificate.\n * getPrivateKey(conn, certificate)\n * The second parameter is an forge.pki X.509 certificate object that\n * is associated with the requested private key. The return value must\n * be a PEM-formatted private key.\n * getSignature(conn, bytes, callback)\n * This callback can be used instead of getPrivateKey if the private key\n * is not directly accessible in javascript or should not be. For\n * instance, a secure external web service could provide the signature\n * in exchange for appropriate credentials. The second parameter is a\n * string of bytes to be signed that are part of the TLS protocol. These\n * bytes are used to verify that the private key for the previously\n * provided client-side certificate is accessible to the client. The\n * callback is a function that takes 2 parameters, the TLS connection\n * and the RSA encrypted (signed) bytes as a string. This callback must\n * be called once the signature is ready.\n *\n * @param options the options for this connection:\n * server: true if the connection is server-side, false for client.\n * sessionId: a session ID to reuse, null for a new connection.\n * caStore: an array of certificates to trust.\n * sessionCache: a session cache to use.\n * cipherSuites: an optional array of cipher suites to use,\n * see tls.CipherSuites.\n * connected: function(conn) called when the first handshake completes.\n * virtualHost: the virtual server name to use in a TLS SNI extension.\n * verifyClient: true to require a client certificate in server mode,\n * 'optional' to request one, false not to (default: false).\n * verify: a handler used to custom verify certificates in the chain.\n * verifyOptions: an object with options for the certificate chain validation.\n * See documentation of pki.verifyCertificateChain for possible options.\n * verifyOptions.verify is ignored. If you wish to specify a verify handler\n * use the verify key.\n * getCertificate: an optional callback used to get a certificate or\n * a chain of certificates (as an array).\n * getPrivateKey: an optional callback used to get a private key.\n * getSignature: an optional callback used to get a signature.\n * tlsDataReady: function(conn) called when TLS protocol data has been\n * prepared and is ready to be used (typically sent over a socket\n * connection to its destination), read from conn.tlsData buffer.\n * dataReady: function(conn) called when application data has\n * been parsed from a TLS record and should be consumed by the\n * application, read from conn.data buffer.\n * closed: function(conn) called when the connection has been closed.\n * error: function(conn, error) called when there was an error.\n * deflate: function(inBytes) if provided, will deflate TLS records using\n * the deflate algorithm if the server supports it.\n * inflate: function(inBytes) if provided, will inflate TLS records using\n * the deflate algorithm if the server supports it.\n *\n * @return the new TLS connection.\n */\nforge.tls.createConnection = tls.createConnection;\n","/**\n * Utility functions for web applications.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2018 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nvar baseN = require('./baseN');\n\n/* Utilities API */\nvar util = module.exports = forge.util = forge.util || {};\n\n// define setImmediate and nextTick\n(function() {\n // use native nextTick (unless we're in webpack)\n // webpack (or better node-libs-browser polyfill) sets process.browser.\n // this way we can detect webpack properly\n if(typeof process !== 'undefined' && process.nextTick && !process.browser) {\n util.nextTick = process.nextTick;\n if(typeof setImmediate === 'function') {\n util.setImmediate = setImmediate;\n } else {\n // polyfill setImmediate with nextTick, older versions of node\n // (those w/o setImmediate) won't totally starve IO\n util.setImmediate = util.nextTick;\n }\n return;\n }\n\n // polyfill nextTick with native setImmediate\n if(typeof setImmediate === 'function') {\n util.setImmediate = function() { return setImmediate.apply(undefined, arguments); };\n util.nextTick = function(callback) {\n return setImmediate(callback);\n };\n return;\n }\n\n /* Note: A polyfill upgrade pattern is used here to allow combining\n polyfills. For example, MutationObserver is fast, but blocks UI updates,\n so it needs to allow UI updates periodically, so it falls back on\n postMessage or setTimeout. */\n\n // polyfill with setTimeout\n util.setImmediate = function(callback) {\n setTimeout(callback, 0);\n };\n\n // upgrade polyfill to use postMessage\n if(typeof window !== 'undefined' &&\n typeof window.postMessage === 'function') {\n var msg = 'forge.setImmediate';\n var callbacks = [];\n util.setImmediate = function(callback) {\n callbacks.push(callback);\n // only send message when one hasn't been sent in\n // the current turn of the event loop\n if(callbacks.length === 1) {\n window.postMessage(msg, '*');\n }\n };\n function handler(event) {\n if(event.source === window && event.data === msg) {\n event.stopPropagation();\n var copy = callbacks.slice();\n callbacks.length = 0;\n copy.forEach(function(callback) {\n callback();\n });\n }\n }\n window.addEventListener('message', handler, true);\n }\n\n // upgrade polyfill to use MutationObserver\n if(typeof MutationObserver !== 'undefined') {\n // polyfill with MutationObserver\n var now = Date.now();\n var attr = true;\n var div = document.createElement('div');\n var callbacks = [];\n new MutationObserver(function() {\n var copy = callbacks.slice();\n callbacks.length = 0;\n copy.forEach(function(callback) {\n callback();\n });\n }).observe(div, {attributes: true});\n var oldSetImmediate = util.setImmediate;\n util.setImmediate = function(callback) {\n if(Date.now() - now > 15) {\n now = Date.now();\n oldSetImmediate(callback);\n } else {\n callbacks.push(callback);\n // only trigger observer when it hasn't been triggered in\n // the current turn of the event loop\n if(callbacks.length === 1) {\n div.setAttribute('a', attr = !attr);\n }\n }\n };\n }\n\n util.nextTick = util.setImmediate;\n})();\n\n// check if running under Node.js\nutil.isNodejs =\n typeof process !== 'undefined' && process.versions && process.versions.node;\n\n\n// 'self' will also work in Web Workers (instance of WorkerGlobalScope) while\n// it will point to `window` in the main thread.\n// To remain compatible with older browsers, we fall back to 'window' if 'self'\n// is not available.\nutil.globalScope = (function() {\n if(util.isNodejs) {\n return global;\n }\n\n return typeof self === 'undefined' ? window : self;\n})();\n\n// define isArray\nutil.isArray = Array.isArray || function(x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n};\n\n// define isArrayBuffer\nutil.isArrayBuffer = function(x) {\n return typeof ArrayBuffer !== 'undefined' && x instanceof ArrayBuffer;\n};\n\n// define isArrayBufferView\nutil.isArrayBufferView = function(x) {\n return x && util.isArrayBuffer(x.buffer) && x.byteLength !== undefined;\n};\n\n/**\n * Ensure a bits param is 8, 16, 24, or 32. Used to validate input for\n * algorithms where bit manipulation, JavaScript limitations, and/or algorithm\n * design only allow for byte operations of a limited size.\n *\n * @param n number of bits.\n *\n * Throw Error if n invalid.\n */\nfunction _checkBitsParam(n) {\n if(!(n === 8 || n === 16 || n === 24 || n === 32)) {\n throw new Error('Only 8, 16, 24, or 32 bits supported: ' + n);\n }\n}\n\n// TODO: set ByteBuffer to best available backing\nutil.ByteBuffer = ByteStringBuffer;\n\n/** Buffer w/BinaryString backing */\n\n/**\n * Constructor for a binary string backed byte buffer.\n *\n * @param [b] the bytes to wrap (either encoded as string, one byte per\n * character, or as an ArrayBuffer or Typed Array).\n */\nfunction ByteStringBuffer(b) {\n // TODO: update to match DataBuffer API\n\n // the data in this buffer\n this.data = '';\n // the pointer for reading from this buffer\n this.read = 0;\n\n if(typeof b === 'string') {\n this.data = b;\n } else if(util.isArrayBuffer(b) || util.isArrayBufferView(b)) {\n if(typeof Buffer !== 'undefined' && b instanceof Buffer) {\n this.data = b.toString('binary');\n } else {\n // convert native buffer to forge buffer\n // FIXME: support native buffers internally instead\n var arr = new Uint8Array(b);\n try {\n this.data = String.fromCharCode.apply(null, arr);\n } catch(e) {\n for(var i = 0; i < arr.length; ++i) {\n this.putByte(arr[i]);\n }\n }\n }\n } else if(b instanceof ByteStringBuffer ||\n (typeof b === 'object' && typeof b.data === 'string' &&\n typeof b.read === 'number')) {\n // copy existing buffer\n this.data = b.data;\n this.read = b.read;\n }\n\n // used for v8 optimization\n this._constructedStringLength = 0;\n}\nutil.ByteStringBuffer = ByteStringBuffer;\n\n/* Note: This is an optimization for V8-based browsers. When V8 concatenates\n a string, the strings are only joined logically using a \"cons string\" or\n \"constructed/concatenated string\". These containers keep references to one\n another and can result in very large memory usage. For example, if a 2MB\n string is constructed by concatenating 4 bytes together at a time, the\n memory usage will be ~44MB; so ~22x increase. The strings are only joined\n together when an operation requiring their joining takes place, such as\n substr(). This function is called when adding data to this buffer to ensure\n these types of strings are periodically joined to reduce the memory\n footprint. */\nvar _MAX_CONSTRUCTED_STRING_LENGTH = 4096;\nutil.ByteStringBuffer.prototype._optimizeConstructedString = function(x) {\n this._constructedStringLength += x;\n if(this._constructedStringLength > _MAX_CONSTRUCTED_STRING_LENGTH) {\n // this substr() should cause the constructed string to join\n this.data.substr(0, 1);\n this._constructedStringLength = 0;\n }\n};\n\n/**\n * Gets the number of bytes in this buffer.\n *\n * @return the number of bytes in this buffer.\n */\nutil.ByteStringBuffer.prototype.length = function() {\n return this.data.length - this.read;\n};\n\n/**\n * Gets whether or not this buffer is empty.\n *\n * @return true if this buffer is empty, false if not.\n */\nutil.ByteStringBuffer.prototype.isEmpty = function() {\n return this.length() <= 0;\n};\n\n/**\n * Puts a byte in this buffer.\n *\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putByte = function(b) {\n return this.putBytes(String.fromCharCode(b));\n};\n\n/**\n * Puts a byte in this buffer N times.\n *\n * @param b the byte to put.\n * @param n the number of bytes of value b to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.fillWithByte = function(b, n) {\n b = String.fromCharCode(b);\n var d = this.data;\n while(n > 0) {\n if(n & 1) {\n d += b;\n }\n n >>>= 1;\n if(n > 0) {\n b += b;\n }\n }\n this.data = d;\n this._optimizeConstructedString(n);\n return this;\n};\n\n/**\n * Puts bytes in this buffer.\n *\n * @param bytes the bytes (as a binary encoded string) to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putBytes = function(bytes) {\n this.data += bytes;\n this._optimizeConstructedString(bytes.length);\n return this;\n};\n\n/**\n * Puts a UTF-16 encoded string into this buffer.\n *\n * @param str the string to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putString = function(str) {\n return this.putBytes(util.encodeUtf8(str));\n};\n\n/**\n * Puts a 16-bit integer in this buffer in big-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt16 = function(i) {\n return this.putBytes(\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n/**\n * Puts a 24-bit integer in this buffer in big-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt24 = function(i) {\n return this.putBytes(\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n/**\n * Puts a 32-bit integer in this buffer in big-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt32 = function(i) {\n return this.putBytes(\n String.fromCharCode(i >> 24 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n/**\n * Puts a 16-bit integer in this buffer in little-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt16Le = function(i) {\n return this.putBytes(\n String.fromCharCode(i & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF));\n};\n\n/**\n * Puts a 24-bit integer in this buffer in little-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt24Le = function(i) {\n return this.putBytes(\n String.fromCharCode(i & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF));\n};\n\n/**\n * Puts a 32-bit integer in this buffer in little-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt32Le = function(i) {\n return this.putBytes(\n String.fromCharCode(i & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 24 & 0xFF));\n};\n\n/**\n * Puts an n-bit integer in this buffer in big-endian order.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt = function(i, n) {\n _checkBitsParam(n);\n var bytes = '';\n do {\n n -= 8;\n bytes += String.fromCharCode((i >> n) & 0xFF);\n } while(n > 0);\n return this.putBytes(bytes);\n};\n\n/**\n * Puts a signed n-bit integer in this buffer in big-endian order. Two's\n * complement representation is used.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putSignedInt = function(i, n) {\n // putInt checks n\n if(i < 0) {\n i += 2 << (n - 1);\n }\n return this.putInt(i, n);\n};\n\n/**\n * Puts the given buffer into this buffer.\n *\n * @param buffer the buffer to put into this one.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putBuffer = function(buffer) {\n return this.putBytes(buffer.getBytes());\n};\n\n/**\n * Gets a byte from this buffer and advances the read pointer by 1.\n *\n * @return the byte.\n */\nutil.ByteStringBuffer.prototype.getByte = function() {\n return this.data.charCodeAt(this.read++);\n};\n\n/**\n * Gets a uint16 from this buffer in big-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.ByteStringBuffer.prototype.getInt16 = function() {\n var rval = (\n this.data.charCodeAt(this.read) << 8 ^\n this.data.charCodeAt(this.read + 1));\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in big-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.ByteStringBuffer.prototype.getInt24 = function() {\n var rval = (\n this.data.charCodeAt(this.read) << 16 ^\n this.data.charCodeAt(this.read + 1) << 8 ^\n this.data.charCodeAt(this.read + 2));\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in big-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.ByteStringBuffer.prototype.getInt32 = function() {\n var rval = (\n this.data.charCodeAt(this.read) << 24 ^\n this.data.charCodeAt(this.read + 1) << 16 ^\n this.data.charCodeAt(this.read + 2) << 8 ^\n this.data.charCodeAt(this.read + 3));\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets a uint16 from this buffer in little-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.ByteStringBuffer.prototype.getInt16Le = function() {\n var rval = (\n this.data.charCodeAt(this.read) ^\n this.data.charCodeAt(this.read + 1) << 8);\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in little-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.ByteStringBuffer.prototype.getInt24Le = function() {\n var rval = (\n this.data.charCodeAt(this.read) ^\n this.data.charCodeAt(this.read + 1) << 8 ^\n this.data.charCodeAt(this.read + 2) << 16);\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in little-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.ByteStringBuffer.prototype.getInt32Le = function() {\n var rval = (\n this.data.charCodeAt(this.read) ^\n this.data.charCodeAt(this.read + 1) << 8 ^\n this.data.charCodeAt(this.read + 2) << 16 ^\n this.data.charCodeAt(this.read + 3) << 24);\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets an n-bit integer from this buffer in big-endian order and advances the\n * read pointer by ceil(n/8).\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.ByteStringBuffer.prototype.getInt = function(n) {\n _checkBitsParam(n);\n var rval = 0;\n do {\n // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits.\n rval = (rval << 8) + this.data.charCodeAt(this.read++);\n n -= 8;\n } while(n > 0);\n return rval;\n};\n\n/**\n * Gets a signed n-bit integer from this buffer in big-endian order, using\n * two's complement, and advances the read pointer by n/8.\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.ByteStringBuffer.prototype.getSignedInt = function(n) {\n // getInt checks n\n var x = this.getInt(n);\n var max = 2 << (n - 2);\n if(x >= max) {\n x -= max << 1;\n }\n return x;\n};\n\n/**\n * Reads bytes out as a binary encoded string and clears them from the\n * buffer. Note that the resulting string is binary encoded (in node.js this\n * encoding is referred to as `binary`, it is *not* `utf8`).\n *\n * @param count the number of bytes to read, undefined or null for all.\n *\n * @return a binary encoded string of bytes.\n */\nutil.ByteStringBuffer.prototype.getBytes = function(count) {\n var rval;\n if(count) {\n // read count bytes\n count = Math.min(this.length(), count);\n rval = this.data.slice(this.read, this.read + count);\n this.read += count;\n } else if(count === 0) {\n rval = '';\n } else {\n // read all bytes, optimize to only copy when needed\n rval = (this.read === 0) ? this.data : this.data.slice(this.read);\n this.clear();\n }\n return rval;\n};\n\n/**\n * Gets a binary encoded string of the bytes from this buffer without\n * modifying the read pointer.\n *\n * @param count the number of bytes to get, omit to get all.\n *\n * @return a string full of binary encoded characters.\n */\nutil.ByteStringBuffer.prototype.bytes = function(count) {\n return (typeof(count) === 'undefined' ?\n this.data.slice(this.read) :\n this.data.slice(this.read, this.read + count));\n};\n\n/**\n * Gets a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n *\n * @return the byte.\n */\nutil.ByteStringBuffer.prototype.at = function(i) {\n return this.data.charCodeAt(this.read + i);\n};\n\n/**\n * Puts a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.setAt = function(i, b) {\n this.data = this.data.substr(0, this.read + i) +\n String.fromCharCode(b) +\n this.data.substr(this.read + i + 1);\n return this;\n};\n\n/**\n * Gets the last byte without modifying the read pointer.\n *\n * @return the last byte.\n */\nutil.ByteStringBuffer.prototype.last = function() {\n return this.data.charCodeAt(this.data.length - 1);\n};\n\n/**\n * Creates a copy of this buffer.\n *\n * @return the copy.\n */\nutil.ByteStringBuffer.prototype.copy = function() {\n var c = util.createBuffer(this.data);\n c.read = this.read;\n return c;\n};\n\n/**\n * Compacts this buffer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.compact = function() {\n if(this.read > 0) {\n this.data = this.data.slice(this.read);\n this.read = 0;\n }\n return this;\n};\n\n/**\n * Clears this buffer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.clear = function() {\n this.data = '';\n this.read = 0;\n return this;\n};\n\n/**\n * Shortens this buffer by triming bytes off of the end of this buffer.\n *\n * @param count the number of bytes to trim off.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.truncate = function(count) {\n var len = Math.max(0, this.length() - count);\n this.data = this.data.substr(this.read, len);\n this.read = 0;\n return this;\n};\n\n/**\n * Converts this buffer to a hexadecimal string.\n *\n * @return a hexadecimal string.\n */\nutil.ByteStringBuffer.prototype.toHex = function() {\n var rval = '';\n for(var i = this.read; i < this.data.length; ++i) {\n var b = this.data.charCodeAt(i);\n if(b < 16) {\n rval += '0';\n }\n rval += b.toString(16);\n }\n return rval;\n};\n\n/**\n * Converts this buffer to a UTF-16 string (standard JavaScript string).\n *\n * @return a UTF-16 string.\n */\nutil.ByteStringBuffer.prototype.toString = function() {\n return util.decodeUtf8(this.bytes());\n};\n\n/** End Buffer w/BinaryString backing */\n\n/** Buffer w/UInt8Array backing */\n\n/**\n * FIXME: Experimental. Do not use yet.\n *\n * Constructor for an ArrayBuffer-backed byte buffer.\n *\n * The buffer may be constructed from a string, an ArrayBuffer, DataView, or a\n * TypedArray.\n *\n * If a string is given, its encoding should be provided as an option,\n * otherwise it will default to 'binary'. A 'binary' string is encoded such\n * that each character is one byte in length and size.\n *\n * If an ArrayBuffer, DataView, or TypedArray is given, it will be used\n * *directly* without any copying. Note that, if a write to the buffer requires\n * more space, the buffer will allocate a new backing ArrayBuffer to\n * accommodate. The starting read and write offsets for the buffer may be\n * given as options.\n *\n * @param [b] the initial bytes for this buffer.\n * @param options the options to use:\n * [readOffset] the starting read offset to use (default: 0).\n * [writeOffset] the starting write offset to use (default: the\n * length of the first parameter).\n * [growSize] the minimum amount, in bytes, to grow the buffer by to\n * accommodate writes (default: 1024).\n * [encoding] the encoding ('binary', 'utf8', 'utf16', 'hex') for the\n * first parameter, if it is a string (default: 'binary').\n */\nfunction DataBuffer(b, options) {\n // default options\n options = options || {};\n\n // pointers for read from/write to buffer\n this.read = options.readOffset || 0;\n this.growSize = options.growSize || 1024;\n\n var isArrayBuffer = util.isArrayBuffer(b);\n var isArrayBufferView = util.isArrayBufferView(b);\n if(isArrayBuffer || isArrayBufferView) {\n // use ArrayBuffer directly\n if(isArrayBuffer) {\n this.data = new DataView(b);\n } else {\n // TODO: adjust read/write offset based on the type of view\n // or specify that this must be done in the options ... that the\n // offsets are byte-based\n this.data = new DataView(b.buffer, b.byteOffset, b.byteLength);\n }\n this.write = ('writeOffset' in options ?\n options.writeOffset : this.data.byteLength);\n return;\n }\n\n // initialize to empty array buffer and add any given bytes using putBytes\n this.data = new DataView(new ArrayBuffer(0));\n this.write = 0;\n\n if(b !== null && b !== undefined) {\n this.putBytes(b);\n }\n\n if('writeOffset' in options) {\n this.write = options.writeOffset;\n }\n}\nutil.DataBuffer = DataBuffer;\n\n/**\n * Gets the number of bytes in this buffer.\n *\n * @return the number of bytes in this buffer.\n */\nutil.DataBuffer.prototype.length = function() {\n return this.write - this.read;\n};\n\n/**\n * Gets whether or not this buffer is empty.\n *\n * @return true if this buffer is empty, false if not.\n */\nutil.DataBuffer.prototype.isEmpty = function() {\n return this.length() <= 0;\n};\n\n/**\n * Ensures this buffer has enough empty space to accommodate the given number\n * of bytes. An optional parameter may be given that indicates a minimum\n * amount to grow the buffer if necessary. If the parameter is not given,\n * the buffer will be grown by some previously-specified default amount\n * or heuristic.\n *\n * @param amount the number of bytes to accommodate.\n * @param [growSize] the minimum amount, in bytes, to grow the buffer by if\n * necessary.\n */\nutil.DataBuffer.prototype.accommodate = function(amount, growSize) {\n if(this.length() >= amount) {\n return this;\n }\n growSize = Math.max(growSize || this.growSize, amount);\n\n // grow buffer\n var src = new Uint8Array(\n this.data.buffer, this.data.byteOffset, this.data.byteLength);\n var dst = new Uint8Array(this.length() + growSize);\n dst.set(src);\n this.data = new DataView(dst.buffer);\n\n return this;\n};\n\n/**\n * Puts a byte in this buffer.\n *\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putByte = function(b) {\n this.accommodate(1);\n this.data.setUint8(this.write++, b);\n return this;\n};\n\n/**\n * Puts a byte in this buffer N times.\n *\n * @param b the byte to put.\n * @param n the number of bytes of value b to put.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.fillWithByte = function(b, n) {\n this.accommodate(n);\n for(var i = 0; i < n; ++i) {\n this.data.setUint8(b);\n }\n return this;\n};\n\n/**\n * Puts bytes in this buffer. The bytes may be given as a string, an\n * ArrayBuffer, a DataView, or a TypedArray.\n *\n * @param bytes the bytes to put.\n * @param [encoding] the encoding for the first parameter ('binary', 'utf8',\n * 'utf16', 'hex'), if it is a string (default: 'binary').\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putBytes = function(bytes, encoding) {\n if(util.isArrayBufferView(bytes)) {\n var src = new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength);\n var len = src.byteLength - src.byteOffset;\n this.accommodate(len);\n var dst = new Uint8Array(this.data.buffer, this.write);\n dst.set(src);\n this.write += len;\n return this;\n }\n\n if(util.isArrayBuffer(bytes)) {\n var src = new Uint8Array(bytes);\n this.accommodate(src.byteLength);\n var dst = new Uint8Array(this.data.buffer);\n dst.set(src, this.write);\n this.write += src.byteLength;\n return this;\n }\n\n // bytes is a util.DataBuffer or equivalent\n if(bytes instanceof util.DataBuffer ||\n (typeof bytes === 'object' &&\n typeof bytes.read === 'number' && typeof bytes.write === 'number' &&\n util.isArrayBufferView(bytes.data))) {\n var src = new Uint8Array(bytes.data.byteLength, bytes.read, bytes.length());\n this.accommodate(src.byteLength);\n var dst = new Uint8Array(bytes.data.byteLength, this.write);\n dst.set(src);\n this.write += src.byteLength;\n return this;\n }\n\n if(bytes instanceof util.ByteStringBuffer) {\n // copy binary string and process as the same as a string parameter below\n bytes = bytes.data;\n encoding = 'binary';\n }\n\n // string conversion\n encoding = encoding || 'binary';\n if(typeof bytes === 'string') {\n var view;\n\n // decode from string\n if(encoding === 'hex') {\n this.accommodate(Math.ceil(bytes.length / 2));\n view = new Uint8Array(this.data.buffer, this.write);\n this.write += util.binary.hex.decode(bytes, view, this.write);\n return this;\n }\n if(encoding === 'base64') {\n this.accommodate(Math.ceil(bytes.length / 4) * 3);\n view = new Uint8Array(this.data.buffer, this.write);\n this.write += util.binary.base64.decode(bytes, view, this.write);\n return this;\n }\n\n // encode text as UTF-8 bytes\n if(encoding === 'utf8') {\n // encode as UTF-8 then decode string as raw binary\n bytes = util.encodeUtf8(bytes);\n encoding = 'binary';\n }\n\n // decode string as raw binary\n if(encoding === 'binary' || encoding === 'raw') {\n // one byte per character\n this.accommodate(bytes.length);\n view = new Uint8Array(this.data.buffer, this.write);\n this.write += util.binary.raw.decode(view);\n return this;\n }\n\n // encode text as UTF-16 bytes\n if(encoding === 'utf16') {\n // two bytes per character\n this.accommodate(bytes.length * 2);\n view = new Uint16Array(this.data.buffer, this.write);\n this.write += util.text.utf16.encode(view);\n return this;\n }\n\n throw new Error('Invalid encoding: ' + encoding);\n }\n\n throw Error('Invalid parameter: ' + bytes);\n};\n\n/**\n * Puts the given buffer into this buffer.\n *\n * @param buffer the buffer to put into this one.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putBuffer = function(buffer) {\n this.putBytes(buffer);\n buffer.clear();\n return this;\n};\n\n/**\n * Puts a string into this buffer.\n *\n * @param str the string to put.\n * @param [encoding] the encoding for the string (default: 'utf16').\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putString = function(str) {\n return this.putBytes(str, 'utf16');\n};\n\n/**\n * Puts a 16-bit integer in this buffer in big-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt16 = function(i) {\n this.accommodate(2);\n this.data.setInt16(this.write, i);\n this.write += 2;\n return this;\n};\n\n/**\n * Puts a 24-bit integer in this buffer in big-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt24 = function(i) {\n this.accommodate(3);\n this.data.setInt16(this.write, i >> 8 & 0xFFFF);\n this.data.setInt8(this.write, i >> 16 & 0xFF);\n this.write += 3;\n return this;\n};\n\n/**\n * Puts a 32-bit integer in this buffer in big-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt32 = function(i) {\n this.accommodate(4);\n this.data.setInt32(this.write, i);\n this.write += 4;\n return this;\n};\n\n/**\n * Puts a 16-bit integer in this buffer in little-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt16Le = function(i) {\n this.accommodate(2);\n this.data.setInt16(this.write, i, true);\n this.write += 2;\n return this;\n};\n\n/**\n * Puts a 24-bit integer in this buffer in little-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt24Le = function(i) {\n this.accommodate(3);\n this.data.setInt8(this.write, i >> 16 & 0xFF);\n this.data.setInt16(this.write, i >> 8 & 0xFFFF, true);\n this.write += 3;\n return this;\n};\n\n/**\n * Puts a 32-bit integer in this buffer in little-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt32Le = function(i) {\n this.accommodate(4);\n this.data.setInt32(this.write, i, true);\n this.write += 4;\n return this;\n};\n\n/**\n * Puts an n-bit integer in this buffer in big-endian order.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt = function(i, n) {\n _checkBitsParam(n);\n this.accommodate(n / 8);\n do {\n n -= 8;\n this.data.setInt8(this.write++, (i >> n) & 0xFF);\n } while(n > 0);\n return this;\n};\n\n/**\n * Puts a signed n-bit integer in this buffer in big-endian order. Two's\n * complement representation is used.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putSignedInt = function(i, n) {\n _checkBitsParam(n);\n this.accommodate(n / 8);\n if(i < 0) {\n i += 2 << (n - 1);\n }\n return this.putInt(i, n);\n};\n\n/**\n * Gets a byte from this buffer and advances the read pointer by 1.\n *\n * @return the byte.\n */\nutil.DataBuffer.prototype.getByte = function() {\n return this.data.getInt8(this.read++);\n};\n\n/**\n * Gets a uint16 from this buffer in big-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.DataBuffer.prototype.getInt16 = function() {\n var rval = this.data.getInt16(this.read);\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in big-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.DataBuffer.prototype.getInt24 = function() {\n var rval = (\n this.data.getInt16(this.read) << 8 ^\n this.data.getInt8(this.read + 2));\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in big-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.DataBuffer.prototype.getInt32 = function() {\n var rval = this.data.getInt32(this.read);\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets a uint16 from this buffer in little-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.DataBuffer.prototype.getInt16Le = function() {\n var rval = this.data.getInt16(this.read, true);\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in little-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.DataBuffer.prototype.getInt24Le = function() {\n var rval = (\n this.data.getInt8(this.read) ^\n this.data.getInt16(this.read + 1, true) << 8);\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in little-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.DataBuffer.prototype.getInt32Le = function() {\n var rval = this.data.getInt32(this.read, true);\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets an n-bit integer from this buffer in big-endian order and advances the\n * read pointer by n/8.\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.DataBuffer.prototype.getInt = function(n) {\n _checkBitsParam(n);\n var rval = 0;\n do {\n // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits.\n rval = (rval << 8) + this.data.getInt8(this.read++);\n n -= 8;\n } while(n > 0);\n return rval;\n};\n\n/**\n * Gets a signed n-bit integer from this buffer in big-endian order, using\n * two's complement, and advances the read pointer by n/8.\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.DataBuffer.prototype.getSignedInt = function(n) {\n // getInt checks n\n var x = this.getInt(n);\n var max = 2 << (n - 2);\n if(x >= max) {\n x -= max << 1;\n }\n return x;\n};\n\n/**\n * Reads bytes out as a binary encoded string and clears them from the\n * buffer.\n *\n * @param count the number of bytes to read, undefined or null for all.\n *\n * @return a binary encoded string of bytes.\n */\nutil.DataBuffer.prototype.getBytes = function(count) {\n // TODO: deprecate this method, it is poorly named and\n // this.toString('binary') replaces it\n // add a toTypedArray()/toArrayBuffer() function\n var rval;\n if(count) {\n // read count bytes\n count = Math.min(this.length(), count);\n rval = this.data.slice(this.read, this.read + count);\n this.read += count;\n } else if(count === 0) {\n rval = '';\n } else {\n // read all bytes, optimize to only copy when needed\n rval = (this.read === 0) ? this.data : this.data.slice(this.read);\n this.clear();\n }\n return rval;\n};\n\n/**\n * Gets a binary encoded string of the bytes from this buffer without\n * modifying the read pointer.\n *\n * @param count the number of bytes to get, omit to get all.\n *\n * @return a string full of binary encoded characters.\n */\nutil.DataBuffer.prototype.bytes = function(count) {\n // TODO: deprecate this method, it is poorly named, add \"getString()\"\n return (typeof(count) === 'undefined' ?\n this.data.slice(this.read) :\n this.data.slice(this.read, this.read + count));\n};\n\n/**\n * Gets a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n *\n * @return the byte.\n */\nutil.DataBuffer.prototype.at = function(i) {\n return this.data.getUint8(this.read + i);\n};\n\n/**\n * Puts a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.setAt = function(i, b) {\n this.data.setUint8(i, b);\n return this;\n};\n\n/**\n * Gets the last byte without modifying the read pointer.\n *\n * @return the last byte.\n */\nutil.DataBuffer.prototype.last = function() {\n return this.data.getUint8(this.write - 1);\n};\n\n/**\n * Creates a copy of this buffer.\n *\n * @return the copy.\n */\nutil.DataBuffer.prototype.copy = function() {\n return new util.DataBuffer(this);\n};\n\n/**\n * Compacts this buffer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.compact = function() {\n if(this.read > 0) {\n var src = new Uint8Array(this.data.buffer, this.read);\n var dst = new Uint8Array(src.byteLength);\n dst.set(src);\n this.data = new DataView(dst);\n this.write -= this.read;\n this.read = 0;\n }\n return this;\n};\n\n/**\n * Clears this buffer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.clear = function() {\n this.data = new DataView(new ArrayBuffer(0));\n this.read = this.write = 0;\n return this;\n};\n\n/**\n * Shortens this buffer by triming bytes off of the end of this buffer.\n *\n * @param count the number of bytes to trim off.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.truncate = function(count) {\n this.write = Math.max(0, this.length() - count);\n this.read = Math.min(this.read, this.write);\n return this;\n};\n\n/**\n * Converts this buffer to a hexadecimal string.\n *\n * @return a hexadecimal string.\n */\nutil.DataBuffer.prototype.toHex = function() {\n var rval = '';\n for(var i = this.read; i < this.data.byteLength; ++i) {\n var b = this.data.getUint8(i);\n if(b < 16) {\n rval += '0';\n }\n rval += b.toString(16);\n }\n return rval;\n};\n\n/**\n * Converts this buffer to a string, using the given encoding. If no\n * encoding is given, 'utf8' (UTF-8) is used.\n *\n * @param [encoding] the encoding to use: 'binary', 'utf8', 'utf16', 'hex',\n * 'base64' (default: 'utf8').\n *\n * @return a string representation of the bytes in this buffer.\n */\nutil.DataBuffer.prototype.toString = function(encoding) {\n var view = new Uint8Array(this.data, this.read, this.length());\n encoding = encoding || 'utf8';\n\n // encode to string\n if(encoding === 'binary' || encoding === 'raw') {\n return util.binary.raw.encode(view);\n }\n if(encoding === 'hex') {\n return util.binary.hex.encode(view);\n }\n if(encoding === 'base64') {\n return util.binary.base64.encode(view);\n }\n\n // decode to text\n if(encoding === 'utf8') {\n return util.text.utf8.decode(view);\n }\n if(encoding === 'utf16') {\n return util.text.utf16.decode(view);\n }\n\n throw new Error('Invalid encoding: ' + encoding);\n};\n\n/** End Buffer w/UInt8Array backing */\n\n/**\n * Creates a buffer that stores bytes. A value may be given to populate the\n * buffer with data. This value can either be string of encoded bytes or a\n * regular string of characters. When passing a string of binary encoded\n * bytes, the encoding `raw` should be given. This is also the default. When\n * passing a string of characters, the encoding `utf8` should be given.\n *\n * @param [input] a string with encoded bytes to store in the buffer.\n * @param [encoding] (default: 'raw', other: 'utf8').\n */\nutil.createBuffer = function(input, encoding) {\n // TODO: deprecate, use new ByteBuffer() instead\n encoding = encoding || 'raw';\n if(input !== undefined && encoding === 'utf8') {\n input = util.encodeUtf8(input);\n }\n return new util.ByteBuffer(input);\n};\n\n/**\n * Fills a string with a particular value. If you want the string to be a byte\n * string, pass in String.fromCharCode(theByte).\n *\n * @param c the character to fill the string with, use String.fromCharCode\n * to fill the string with a byte value.\n * @param n the number of characters of value c to fill with.\n *\n * @return the filled string.\n */\nutil.fillString = function(c, n) {\n var s = '';\n while(n > 0) {\n if(n & 1) {\n s += c;\n }\n n >>>= 1;\n if(n > 0) {\n c += c;\n }\n }\n return s;\n};\n\n/**\n * Performs a per byte XOR between two byte strings and returns the result as a\n * string of bytes.\n *\n * @param s1 first string of bytes.\n * @param s2 second string of bytes.\n * @param n the number of bytes to XOR.\n *\n * @return the XOR'd result.\n */\nutil.xorBytes = function(s1, s2, n) {\n var s3 = '';\n var b = '';\n var t = '';\n var i = 0;\n var c = 0;\n for(; n > 0; --n, ++i) {\n b = s1.charCodeAt(i) ^ s2.charCodeAt(i);\n if(c >= 10) {\n s3 += t;\n t = '';\n c = 0;\n }\n t += String.fromCharCode(b);\n ++c;\n }\n s3 += t;\n return s3;\n};\n\n/**\n * Converts a hex string into a 'binary' encoded string of bytes.\n *\n * @param hex the hexadecimal string to convert.\n *\n * @return the binary-encoded string of bytes.\n */\nutil.hexToBytes = function(hex) {\n // TODO: deprecate: \"Deprecated. Use util.binary.hex.decode instead.\"\n var rval = '';\n var i = 0;\n if(hex.length & 1 == 1) {\n // odd number of characters, convert first character alone\n i = 1;\n rval += String.fromCharCode(parseInt(hex[0], 16));\n }\n // convert 2 characters (1 byte) at a time\n for(; i < hex.length; i += 2) {\n rval += String.fromCharCode(parseInt(hex.substr(i, 2), 16));\n }\n return rval;\n};\n\n/**\n * Converts a 'binary' encoded string of bytes to hex.\n *\n * @param bytes the byte string to convert.\n *\n * @return the string of hexadecimal characters.\n */\nutil.bytesToHex = function(bytes) {\n // TODO: deprecate: \"Deprecated. Use util.binary.hex.encode instead.\"\n return util.createBuffer(bytes).toHex();\n};\n\n/**\n * Converts an 32-bit integer to 4-big-endian byte string.\n *\n * @param i the integer.\n *\n * @return the byte string.\n */\nutil.int32ToBytes = function(i) {\n return (\n String.fromCharCode(i >> 24 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n// base64 characters, reverse mapping\nvar _base64 =\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';\nvar _base64Idx = [\n/*43 -43 = 0*/\n/*'+', 1, 2, 3,'/' */\n 62, -1, -1, -1, 63,\n\n/*'0','1','2','3','4','5','6','7','8','9' */\n 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,\n\n/*15, 16, 17,'=', 19, 20, 21 */\n -1, -1, -1, 64, -1, -1, -1,\n\n/*65 - 43 = 22*/\n/*'A','B','C','D','E','F','G','H','I','J','K','L','M', */\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,\n\n/*'N','O','P','Q','R','S','T','U','V','W','X','Y','Z' */\n 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,\n\n/*91 - 43 = 48 */\n/*48, 49, 50, 51, 52, 53 */\n -1, -1, -1, -1, -1, -1,\n\n/*97 - 43 = 54*/\n/*'a','b','c','d','e','f','g','h','i','j','k','l','m' */\n 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,\n\n/*'n','o','p','q','r','s','t','u','v','w','x','y','z' */\n 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51\n];\n\n// base58 characters (Bitcoin alphabet)\nvar _base58 = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz';\n\n/**\n * Base64 encodes a 'binary' encoded string of bytes.\n *\n * @param input the binary encoded string of bytes to base64-encode.\n * @param maxline the maximum number of encoded characters per line to use,\n * defaults to none.\n *\n * @return the base64-encoded output.\n */\nutil.encode64 = function(input, maxline) {\n // TODO: deprecate: \"Deprecated. Use util.binary.base64.encode instead.\"\n var line = '';\n var output = '';\n var chr1, chr2, chr3;\n var i = 0;\n while(i < input.length) {\n chr1 = input.charCodeAt(i++);\n chr2 = input.charCodeAt(i++);\n chr3 = input.charCodeAt(i++);\n\n // encode 4 character group\n line += _base64.charAt(chr1 >> 2);\n line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4));\n if(isNaN(chr2)) {\n line += '==';\n } else {\n line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6));\n line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63);\n }\n\n if(maxline && line.length > maxline) {\n output += line.substr(0, maxline) + '\\r\\n';\n line = line.substr(maxline);\n }\n }\n output += line;\n return output;\n};\n\n/**\n * Base64 decodes a string into a 'binary' encoded string of bytes.\n *\n * @param input the base64-encoded input.\n *\n * @return the binary encoded string.\n */\nutil.decode64 = function(input) {\n // TODO: deprecate: \"Deprecated. Use util.binary.base64.decode instead.\"\n\n // remove all non-base64 characters\n input = input.replace(/[^A-Za-z0-9\\+\\/\\=]/g, '');\n\n var output = '';\n var enc1, enc2, enc3, enc4;\n var i = 0;\n\n while(i < input.length) {\n enc1 = _base64Idx[input.charCodeAt(i++) - 43];\n enc2 = _base64Idx[input.charCodeAt(i++) - 43];\n enc3 = _base64Idx[input.charCodeAt(i++) - 43];\n enc4 = _base64Idx[input.charCodeAt(i++) - 43];\n\n output += String.fromCharCode((enc1 << 2) | (enc2 >> 4));\n if(enc3 !== 64) {\n // decoded at least 2 bytes\n output += String.fromCharCode(((enc2 & 15) << 4) | (enc3 >> 2));\n if(enc4 !== 64) {\n // decoded 3 bytes\n output += String.fromCharCode(((enc3 & 3) << 6) | enc4);\n }\n }\n }\n\n return output;\n};\n\n/**\n * Encodes the given string of characters (a standard JavaScript\n * string) as a binary encoded string where the bytes represent\n * a UTF-8 encoded string of characters. Non-ASCII characters will be\n * encoded as multiple bytes according to UTF-8.\n *\n * @param str a standard string of characters to encode.\n *\n * @return the binary encoded string.\n */\nutil.encodeUtf8 = function(str) {\n return unescape(encodeURIComponent(str));\n};\n\n/**\n * Decodes a binary encoded string that contains bytes that\n * represent a UTF-8 encoded string of characters -- into a\n * string of characters (a standard JavaScript string).\n *\n * @param str the binary encoded string to decode.\n *\n * @return the resulting standard string of characters.\n */\nutil.decodeUtf8 = function(str) {\n return decodeURIComponent(escape(str));\n};\n\n// binary encoding/decoding tools\n// FIXME: Experimental. Do not use yet.\nutil.binary = {\n raw: {},\n hex: {},\n base64: {},\n base58: {},\n baseN : {\n encode: baseN.encode,\n decode: baseN.decode\n }\n};\n\n/**\n * Encodes a Uint8Array as a binary-encoded string. This encoding uses\n * a value between 0 and 255 for each character.\n *\n * @param bytes the Uint8Array to encode.\n *\n * @return the binary-encoded string.\n */\nutil.binary.raw.encode = function(bytes) {\n return String.fromCharCode.apply(null, bytes);\n};\n\n/**\n * Decodes a binary-encoded string to a Uint8Array. This encoding uses\n * a value between 0 and 255 for each character.\n *\n * @param str the binary-encoded string to decode.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.binary.raw.decode = function(str, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(str.length);\n }\n offset = offset || 0;\n var j = offset;\n for(var i = 0; i < str.length; ++i) {\n out[j++] = str.charCodeAt(i);\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Encodes a 'binary' string, ArrayBuffer, DataView, TypedArray, or\n * ByteBuffer as a string of hexadecimal characters.\n *\n * @param bytes the bytes to convert.\n *\n * @return the string of hexadecimal characters.\n */\nutil.binary.hex.encode = util.bytesToHex;\n\n/**\n * Decodes a hex-encoded string to a Uint8Array.\n *\n * @param hex the hexadecimal string to convert.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.binary.hex.decode = function(hex, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(Math.ceil(hex.length / 2));\n }\n offset = offset || 0;\n var i = 0, j = offset;\n if(hex.length & 1) {\n // odd number of characters, convert first character alone\n i = 1;\n out[j++] = parseInt(hex[0], 16);\n }\n // convert 2 characters (1 byte) at a time\n for(; i < hex.length; i += 2) {\n out[j++] = parseInt(hex.substr(i, 2), 16);\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Base64-encodes a Uint8Array.\n *\n * @param input the Uint8Array to encode.\n * @param maxline the maximum number of encoded characters per line to use,\n * defaults to none.\n *\n * @return the base64-encoded output string.\n */\nutil.binary.base64.encode = function(input, maxline) {\n var line = '';\n var output = '';\n var chr1, chr2, chr3;\n var i = 0;\n while(i < input.byteLength) {\n chr1 = input[i++];\n chr2 = input[i++];\n chr3 = input[i++];\n\n // encode 4 character group\n line += _base64.charAt(chr1 >> 2);\n line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4));\n if(isNaN(chr2)) {\n line += '==';\n } else {\n line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6));\n line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63);\n }\n\n if(maxline && line.length > maxline) {\n output += line.substr(0, maxline) + '\\r\\n';\n line = line.substr(maxline);\n }\n }\n output += line;\n return output;\n};\n\n/**\n * Decodes a base64-encoded string to a Uint8Array.\n *\n * @param input the base64-encoded input string.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.binary.base64.decode = function(input, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(Math.ceil(input.length / 4) * 3);\n }\n\n // remove all non-base64 characters\n input = input.replace(/[^A-Za-z0-9\\+\\/\\=]/g, '');\n\n offset = offset || 0;\n var enc1, enc2, enc3, enc4;\n var i = 0, j = offset;\n\n while(i < input.length) {\n enc1 = _base64Idx[input.charCodeAt(i++) - 43];\n enc2 = _base64Idx[input.charCodeAt(i++) - 43];\n enc3 = _base64Idx[input.charCodeAt(i++) - 43];\n enc4 = _base64Idx[input.charCodeAt(i++) - 43];\n\n out[j++] = (enc1 << 2) | (enc2 >> 4);\n if(enc3 !== 64) {\n // decoded at least 2 bytes\n out[j++] = ((enc2 & 15) << 4) | (enc3 >> 2);\n if(enc4 !== 64) {\n // decoded 3 bytes\n out[j++] = ((enc3 & 3) << 6) | enc4;\n }\n }\n }\n\n // make sure result is the exact decoded length\n return output ? (j - offset) : out.subarray(0, j);\n};\n\n// add support for base58 encoding/decoding with Bitcoin alphabet\nutil.binary.base58.encode = function(input, maxline) {\n return util.binary.baseN.encode(input, _base58, maxline);\n};\nutil.binary.base58.decode = function(input, maxline) {\n return util.binary.baseN.decode(input, _base58, maxline);\n};\n\n// text encoding/decoding tools\n// FIXME: Experimental. Do not use yet.\nutil.text = {\n utf8: {},\n utf16: {}\n};\n\n/**\n * Encodes the given string as UTF-8 in a Uint8Array.\n *\n * @param str the string to encode.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.text.utf8.encode = function(str, output, offset) {\n str = util.encodeUtf8(str);\n var out = output;\n if(!out) {\n out = new Uint8Array(str.length);\n }\n offset = offset || 0;\n var j = offset;\n for(var i = 0; i < str.length; ++i) {\n out[j++] = str.charCodeAt(i);\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Decodes the UTF-8 contents from a Uint8Array.\n *\n * @param bytes the Uint8Array to decode.\n *\n * @return the resulting string.\n */\nutil.text.utf8.decode = function(bytes) {\n return util.decodeUtf8(String.fromCharCode.apply(null, bytes));\n};\n\n/**\n * Encodes the given string as UTF-16 in a Uint8Array.\n *\n * @param str the string to encode.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.text.utf16.encode = function(str, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(str.length * 2);\n }\n var view = new Uint16Array(out.buffer);\n offset = offset || 0;\n var j = offset;\n var k = offset;\n for(var i = 0; i < str.length; ++i) {\n view[k++] = str.charCodeAt(i);\n j += 2;\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Decodes the UTF-16 contents from a Uint8Array.\n *\n * @param bytes the Uint8Array to decode.\n *\n * @return the resulting string.\n */\nutil.text.utf16.decode = function(bytes) {\n return String.fromCharCode.apply(null, new Uint16Array(bytes.buffer));\n};\n\n/**\n * Deflates the given data using a flash interface.\n *\n * @param api the flash interface.\n * @param bytes the data.\n * @param raw true to return only raw deflate data, false to include zlib\n * header and trailer.\n *\n * @return the deflated data as a string.\n */\nutil.deflate = function(api, bytes, raw) {\n bytes = util.decode64(api.deflate(util.encode64(bytes)).rval);\n\n // strip zlib header and trailer if necessary\n if(raw) {\n // zlib header is 2 bytes (CMF,FLG) where FLG indicates that\n // there is a 4-byte DICT (alder-32) block before the data if\n // its 5th bit is set\n var start = 2;\n var flg = bytes.charCodeAt(1);\n if(flg & 0x20) {\n start = 6;\n }\n // zlib trailer is 4 bytes of adler-32\n bytes = bytes.substring(start, bytes.length - 4);\n }\n\n return bytes;\n};\n\n/**\n * Inflates the given data using a flash interface.\n *\n * @param api the flash interface.\n * @param bytes the data.\n * @param raw true if the incoming data has no zlib header or trailer and is\n * raw DEFLATE data.\n *\n * @return the inflated data as a string, null on error.\n */\nutil.inflate = function(api, bytes, raw) {\n // TODO: add zlib header and trailer if necessary/possible\n var rval = api.inflate(util.encode64(bytes)).rval;\n return (rval === null) ? null : util.decode64(rval);\n};\n\n/**\n * Sets a storage object.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param obj the storage object, null to remove.\n */\nvar _setStorageObject = function(api, id, obj) {\n if(!api) {\n throw new Error('WebStorage not available.');\n }\n\n var rval;\n if(obj === null) {\n rval = api.removeItem(id);\n } else {\n // json-encode and base64-encode object\n obj = util.encode64(JSON.stringify(obj));\n rval = api.setItem(id, obj);\n }\n\n // handle potential flash error\n if(typeof(rval) !== 'undefined' && rval.rval !== true) {\n var error = new Error(rval.error.message);\n error.id = rval.error.id;\n error.name = rval.error.name;\n throw error;\n }\n};\n\n/**\n * Gets a storage object.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n *\n * @return the storage object entry or null if none exists.\n */\nvar _getStorageObject = function(api, id) {\n if(!api) {\n throw new Error('WebStorage not available.');\n }\n\n // get the existing entry\n var rval = api.getItem(id);\n\n /* Note: We check api.init because we can't do (api == localStorage)\n on IE because of \"Class doesn't support Automation\" exception. Only\n the flash api has an init method so this works too, but we need a\n better solution in the future. */\n\n // flash returns item wrapped in an object, handle special case\n if(api.init) {\n if(rval.rval === null) {\n if(rval.error) {\n var error = new Error(rval.error.message);\n error.id = rval.error.id;\n error.name = rval.error.name;\n throw error;\n }\n // no error, but also no item\n rval = null;\n } else {\n rval = rval.rval;\n }\n }\n\n // handle decoding\n if(rval !== null) {\n // base64-decode and json-decode data\n rval = JSON.parse(util.decode64(rval));\n }\n\n return rval;\n};\n\n/**\n * Stores an item in local storage.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param data the data for the item (any javascript object/primitive).\n */\nvar _setItem = function(api, id, key, data) {\n // get storage object\n var obj = _getStorageObject(api, id);\n if(obj === null) {\n // create a new storage object\n obj = {};\n }\n // update key\n obj[key] = data;\n\n // set storage object\n _setStorageObject(api, id, obj);\n};\n\n/**\n * Gets an item from local storage.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n *\n * @return the item.\n */\nvar _getItem = function(api, id, key) {\n // get storage object\n var rval = _getStorageObject(api, id);\n if(rval !== null) {\n // return data at key\n rval = (key in rval) ? rval[key] : null;\n }\n\n return rval;\n};\n\n/**\n * Removes an item from local storage.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n */\nvar _removeItem = function(api, id, key) {\n // get storage object\n var obj = _getStorageObject(api, id);\n if(obj !== null && key in obj) {\n // remove key\n delete obj[key];\n\n // see if entry has no keys remaining\n var empty = true;\n for(var prop in obj) {\n empty = false;\n break;\n }\n if(empty) {\n // remove entry entirely if no keys are left\n obj = null;\n }\n\n // set storage object\n _setStorageObject(api, id, obj);\n }\n};\n\n/**\n * Clears the local disk storage identified by the given ID.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n */\nvar _clearItems = function(api, id) {\n _setStorageObject(api, id, null);\n};\n\n/**\n * Calls a storage function.\n *\n * @param func the function to call.\n * @param args the arguments for the function.\n * @param location the location argument.\n *\n * @return the return value from the function.\n */\nvar _callStorageFunction = function(func, args, location) {\n var rval = null;\n\n // default storage types\n if(typeof(location) === 'undefined') {\n location = ['web', 'flash'];\n }\n\n // apply storage types in order of preference\n var type;\n var done = false;\n var exception = null;\n for(var idx in location) {\n type = location[idx];\n try {\n if(type === 'flash' || type === 'both') {\n if(args[0] === null) {\n throw new Error('Flash local storage not available.');\n }\n rval = func.apply(this, args);\n done = (type === 'flash');\n }\n if(type === 'web' || type === 'both') {\n args[0] = localStorage;\n rval = func.apply(this, args);\n done = true;\n }\n } catch(ex) {\n exception = ex;\n }\n if(done) {\n break;\n }\n }\n\n if(!done) {\n throw exception;\n }\n\n return rval;\n};\n\n/**\n * Stores an item on local disk.\n *\n * The available types of local storage include 'flash', 'web', and 'both'.\n *\n * The type 'flash' refers to flash local storage (SharedObject). In order\n * to use flash local storage, the 'api' parameter must be valid. The type\n * 'web' refers to WebStorage, if supported by the browser. The type 'both'\n * refers to storing using both 'flash' and 'web', not just one or the\n * other.\n *\n * The location array should list the storage types to use in order of\n * preference:\n *\n * ['flash']: flash only storage\n * ['web']: web only storage\n * ['both']: try to store in both\n * ['flash','web']: store in flash first, but if not available, 'web'\n * ['web','flash']: store in web first, but if not available, 'flash'\n *\n * The location array defaults to: ['web', 'flash']\n *\n * @param api the flash interface, null to use only WebStorage.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param data the data for the item (any javascript object/primitive).\n * @param location an array with the preferred types of storage to use.\n */\nutil.setItem = function(api, id, key, data, location) {\n _callStorageFunction(_setItem, arguments, location);\n};\n\n/**\n * Gets an item on local disk.\n *\n * Set setItem() for details on storage types.\n *\n * @param api the flash interface, null to use only WebStorage.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param location an array with the preferred types of storage to use.\n *\n * @return the item.\n */\nutil.getItem = function(api, id, key, location) {\n return _callStorageFunction(_getItem, arguments, location);\n};\n\n/**\n * Removes an item on local disk.\n *\n * Set setItem() for details on storage types.\n *\n * @param api the flash interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param location an array with the preferred types of storage to use.\n */\nutil.removeItem = function(api, id, key, location) {\n _callStorageFunction(_removeItem, arguments, location);\n};\n\n/**\n * Clears the local disk storage identified by the given ID.\n *\n * Set setItem() for details on storage types.\n *\n * @param api the flash interface if flash is available.\n * @param id the storage ID to use.\n * @param location an array with the preferred types of storage to use.\n */\nutil.clearItems = function(api, id, location) {\n _callStorageFunction(_clearItems, arguments, location);\n};\n\n/**\n * Check if an object is empty.\n *\n * Taken from:\n * http://stackoverflow.com/questions/679915/how-do-i-test-for-an-empty-javascript-object-from-json/679937#679937\n *\n * @param object the object to check.\n */\nutil.isEmpty = function(obj) {\n for(var prop in obj) {\n if(obj.hasOwnProperty(prop)) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * Format with simple printf-style interpolation.\n *\n * %%: literal '%'\n * %s,%o: convert next argument into a string.\n *\n * @param format the string to format.\n * @param ... arguments to interpolate into the format string.\n */\nutil.format = function(format) {\n var re = /%./g;\n // current match\n var match;\n // current part\n var part;\n // current arg index\n var argi = 0;\n // collected parts to recombine later\n var parts = [];\n // last index found\n var last = 0;\n // loop while matches remain\n while((match = re.exec(format))) {\n part = format.substring(last, re.lastIndex - 2);\n // don't add empty strings (ie, parts between %s%s)\n if(part.length > 0) {\n parts.push(part);\n }\n last = re.lastIndex;\n // switch on % code\n var code = match[0][1];\n switch(code) {\n case 's':\n case 'o':\n // check if enough arguments were given\n if(argi < arguments.length) {\n parts.push(arguments[argi++ + 1]);\n } else {\n parts.push('');\n }\n break;\n // FIXME: do proper formating for numbers, etc\n //case 'f':\n //case 'd':\n case '%':\n parts.push('%');\n break;\n default:\n parts.push('<%' + code + '?>');\n }\n }\n // add trailing part of format string\n parts.push(format.substring(last));\n return parts.join('');\n};\n\n/**\n * Formats a number.\n *\n * http://snipplr.com/view/5945/javascript-numberformat--ported-from-php/\n */\nutil.formatNumber = function(number, decimals, dec_point, thousands_sep) {\n // http://kevin.vanzonneveld.net\n // + original by: Jonas Raoni Soares Silva (http://www.jsfromhell.com)\n // + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)\n // + bugfix by: Michael White (http://crestidg.com)\n // + bugfix by: Benjamin Lupton\n // + bugfix by: Allan Jensen (http://www.winternet.no)\n // + revised by: Jonas Raoni Soares Silva (http://www.jsfromhell.com)\n // * example 1: number_format(1234.5678, 2, '.', '');\n // * returns 1: 1234.57\n\n var n = number, c = isNaN(decimals = Math.abs(decimals)) ? 2 : decimals;\n var d = dec_point === undefined ? ',' : dec_point;\n var t = thousands_sep === undefined ?\n '.' : thousands_sep, s = n < 0 ? '-' : '';\n var i = parseInt((n = Math.abs(+n || 0).toFixed(c)), 10) + '';\n var j = (i.length > 3) ? i.length % 3 : 0;\n return s + (j ? i.substr(0, j) + t : '') +\n i.substr(j).replace(/(\\d{3})(?=\\d)/g, '$1' + t) +\n (c ? d + Math.abs(n - i).toFixed(c).slice(2) : '');\n};\n\n/**\n * Formats a byte size.\n *\n * http://snipplr.com/view/5949/format-humanize-file-byte-size-presentation-in-javascript/\n */\nutil.formatSize = function(size) {\n if(size >= 1073741824) {\n size = util.formatNumber(size / 1073741824, 2, '.', '') + ' GiB';\n } else if(size >= 1048576) {\n size = util.formatNumber(size / 1048576, 2, '.', '') + ' MiB';\n } else if(size >= 1024) {\n size = util.formatNumber(size / 1024, 0) + ' KiB';\n } else {\n size = util.formatNumber(size, 0) + ' bytes';\n }\n return size;\n};\n\n/**\n * Converts an IPv4 or IPv6 string representation into bytes (in network order).\n *\n * @param ip the IPv4 or IPv6 address to convert.\n *\n * @return the 4-byte IPv6 or 16-byte IPv6 address or null if the address can't\n * be parsed.\n */\nutil.bytesFromIP = function(ip) {\n if(ip.indexOf('.') !== -1) {\n return util.bytesFromIPv4(ip);\n }\n if(ip.indexOf(':') !== -1) {\n return util.bytesFromIPv6(ip);\n }\n return null;\n};\n\n/**\n * Converts an IPv4 string representation into bytes (in network order).\n *\n * @param ip the IPv4 address to convert.\n *\n * @return the 4-byte address or null if the address can't be parsed.\n */\nutil.bytesFromIPv4 = function(ip) {\n ip = ip.split('.');\n if(ip.length !== 4) {\n return null;\n }\n var b = util.createBuffer();\n for(var i = 0; i < ip.length; ++i) {\n var num = parseInt(ip[i], 10);\n if(isNaN(num)) {\n return null;\n }\n b.putByte(num);\n }\n return b.getBytes();\n};\n\n/**\n * Converts an IPv6 string representation into bytes (in network order).\n *\n * @param ip the IPv6 address to convert.\n *\n * @return the 16-byte address or null if the address can't be parsed.\n */\nutil.bytesFromIPv6 = function(ip) {\n var blanks = 0;\n ip = ip.split(':').filter(function(e) {\n if(e.length === 0) ++blanks;\n return true;\n });\n var zeros = (8 - ip.length + blanks) * 2;\n var b = util.createBuffer();\n for(var i = 0; i < 8; ++i) {\n if(!ip[i] || ip[i].length === 0) {\n b.fillWithByte(0, zeros);\n zeros = 0;\n continue;\n }\n var bytes = util.hexToBytes(ip[i]);\n if(bytes.length < 2) {\n b.putByte(0);\n }\n b.putBytes(bytes);\n }\n return b.getBytes();\n};\n\n/**\n * Converts 4-bytes into an IPv4 string representation or 16-bytes into\n * an IPv6 string representation. The bytes must be in network order.\n *\n * @param bytes the bytes to convert.\n *\n * @return the IPv4 or IPv6 string representation if 4 or 16 bytes,\n * respectively, are given, otherwise null.\n */\nutil.bytesToIP = function(bytes) {\n if(bytes.length === 4) {\n return util.bytesToIPv4(bytes);\n }\n if(bytes.length === 16) {\n return util.bytesToIPv6(bytes);\n }\n return null;\n};\n\n/**\n * Converts 4-bytes into an IPv4 string representation. The bytes must be\n * in network order.\n *\n * @param bytes the bytes to convert.\n *\n * @return the IPv4 string representation or null for an invalid # of bytes.\n */\nutil.bytesToIPv4 = function(bytes) {\n if(bytes.length !== 4) {\n return null;\n }\n var ip = [];\n for(var i = 0; i < bytes.length; ++i) {\n ip.push(bytes.charCodeAt(i));\n }\n return ip.join('.');\n};\n\n/**\n * Converts 16-bytes into an IPv16 string representation. The bytes must be\n * in network order.\n *\n * @param bytes the bytes to convert.\n *\n * @return the IPv16 string representation or null for an invalid # of bytes.\n */\nutil.bytesToIPv6 = function(bytes) {\n if(bytes.length !== 16) {\n return null;\n }\n var ip = [];\n var zeroGroups = [];\n var zeroMaxGroup = 0;\n for(var i = 0; i < bytes.length; i += 2) {\n var hex = util.bytesToHex(bytes[i] + bytes[i + 1]);\n // canonicalize zero representation\n while(hex[0] === '0' && hex !== '0') {\n hex = hex.substr(1);\n }\n if(hex === '0') {\n var last = zeroGroups[zeroGroups.length - 1];\n var idx = ip.length;\n if(!last || idx !== last.end + 1) {\n zeroGroups.push({start: idx, end: idx});\n } else {\n last.end = idx;\n if((last.end - last.start) >\n (zeroGroups[zeroMaxGroup].end - zeroGroups[zeroMaxGroup].start)) {\n zeroMaxGroup = zeroGroups.length - 1;\n }\n }\n }\n ip.push(hex);\n }\n if(zeroGroups.length > 0) {\n var group = zeroGroups[zeroMaxGroup];\n // only shorten group of length > 0\n if(group.end - group.start > 0) {\n ip.splice(group.start, group.end - group.start + 1, '');\n if(group.start === 0) {\n ip.unshift('');\n }\n if(group.end === 7) {\n ip.push('');\n }\n }\n }\n return ip.join(':');\n};\n\n/**\n * Estimates the number of processes that can be run concurrently. If\n * creating Web Workers, keep in mind that the main JavaScript process needs\n * its own core.\n *\n * @param options the options to use:\n * update true to force an update (not use the cached value).\n * @param callback(err, max) called once the operation completes.\n */\nutil.estimateCores = function(options, callback) {\n if(typeof options === 'function') {\n callback = options;\n options = {};\n }\n options = options || {};\n if('cores' in util && !options.update) {\n return callback(null, util.cores);\n }\n if(typeof navigator !== 'undefined' &&\n 'hardwareConcurrency' in navigator &&\n navigator.hardwareConcurrency > 0) {\n util.cores = navigator.hardwareConcurrency;\n return callback(null, util.cores);\n }\n if(typeof Worker === 'undefined') {\n // workers not available\n util.cores = 1;\n return callback(null, util.cores);\n }\n if(typeof Blob === 'undefined') {\n // can't estimate, default to 2\n util.cores = 2;\n return callback(null, util.cores);\n }\n\n // create worker concurrency estimation code as blob\n var blobUrl = URL.createObjectURL(new Blob(['(',\n function() {\n self.addEventListener('message', function(e) {\n // run worker for 4 ms\n var st = Date.now();\n var et = st + 4;\n while(Date.now() < et);\n self.postMessage({st: st, et: et});\n });\n }.toString(),\n ')()'], {type: 'application/javascript'}));\n\n // take 5 samples using 16 workers\n sample([], 5, 16);\n\n function sample(max, samples, numWorkers) {\n if(samples === 0) {\n // get overlap average\n var avg = Math.floor(max.reduce(function(avg, x) {\n return avg + x;\n }, 0) / max.length);\n util.cores = Math.max(1, avg);\n URL.revokeObjectURL(blobUrl);\n return callback(null, util.cores);\n }\n map(numWorkers, function(err, results) {\n max.push(reduce(numWorkers, results));\n sample(max, samples - 1, numWorkers);\n });\n }\n\n function map(numWorkers, callback) {\n var workers = [];\n var results = [];\n for(var i = 0; i < numWorkers; ++i) {\n var worker = new Worker(blobUrl);\n worker.addEventListener('message', function(e) {\n results.push(e.data);\n if(results.length === numWorkers) {\n for(var i = 0; i < numWorkers; ++i) {\n workers[i].terminate();\n }\n callback(null, results);\n }\n });\n workers.push(worker);\n }\n for(var i = 0; i < numWorkers; ++i) {\n workers[i].postMessage(i);\n }\n }\n\n function reduce(numWorkers, results) {\n // find overlapping time windows\n var overlaps = [];\n for(var n = 0; n < numWorkers; ++n) {\n var r1 = results[n];\n var overlap = overlaps[n] = [];\n for(var i = 0; i < numWorkers; ++i) {\n if(n === i) {\n continue;\n }\n var r2 = results[i];\n if((r1.st > r2.st && r1.st < r2.et) ||\n (r2.st > r1.st && r2.st < r1.et)) {\n overlap.push(i);\n }\n }\n }\n // get maximum overlaps ... don't include overlapping worker itself\n // as the main JS process was also being scheduled during the work and\n // would have to be subtracted from the estimate anyway\n return overlaps.reduce(function(max, overlap) {\n return Math.max(max, overlap.length);\n }, 0);\n }\n};\n","/**\n * Javascript implementation of X.509 and related components (such as\n * Certification Signing Requests) of a Public Key Infrastructure.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n *\n * The ASN.1 representation of an X.509v3 certificate is as follows\n * (see RFC 2459):\n *\n * Certificate ::= SEQUENCE {\n * tbsCertificate TBSCertificate,\n * signatureAlgorithm AlgorithmIdentifier,\n * signatureValue BIT STRING\n * }\n *\n * TBSCertificate ::= SEQUENCE {\n * version [0] EXPLICIT Version DEFAULT v1,\n * serialNumber CertificateSerialNumber,\n * signature AlgorithmIdentifier,\n * issuer Name,\n * validity Validity,\n * subject Name,\n * subjectPublicKeyInfo SubjectPublicKeyInfo,\n * issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL,\n * -- If present, version shall be v2 or v3\n * subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL,\n * -- If present, version shall be v2 or v3\n * extensions [3] EXPLICIT Extensions OPTIONAL\n * -- If present, version shall be v3\n * }\n *\n * Version ::= INTEGER { v1(0), v2(1), v3(2) }\n *\n * CertificateSerialNumber ::= INTEGER\n *\n * Name ::= CHOICE {\n * // only one possible choice for now\n * RDNSequence\n * }\n *\n * RDNSequence ::= SEQUENCE OF RelativeDistinguishedName\n *\n * RelativeDistinguishedName ::= SET OF AttributeTypeAndValue\n *\n * AttributeTypeAndValue ::= SEQUENCE {\n * type AttributeType,\n * value AttributeValue\n * }\n * AttributeType ::= OBJECT IDENTIFIER\n * AttributeValue ::= ANY DEFINED BY AttributeType\n *\n * Validity ::= SEQUENCE {\n * notBefore Time,\n * notAfter Time\n * }\n *\n * Time ::= CHOICE {\n * utcTime UTCTime,\n * generalTime GeneralizedTime\n * }\n *\n * UniqueIdentifier ::= BIT STRING\n *\n * SubjectPublicKeyInfo ::= SEQUENCE {\n * algorithm AlgorithmIdentifier,\n * subjectPublicKey BIT STRING\n * }\n *\n * Extensions ::= SEQUENCE SIZE (1..MAX) OF Extension\n *\n * Extension ::= SEQUENCE {\n * extnID OBJECT IDENTIFIER,\n * critical BOOLEAN DEFAULT FALSE,\n * extnValue OCTET STRING\n * }\n *\n * The only key algorithm currently supported for PKI is RSA.\n *\n * RSASSA-PSS signatures are described in RFC 3447 and RFC 4055.\n *\n * PKCS#10 v1.7 describes certificate signing requests:\n *\n * CertificationRequestInfo:\n *\n * CertificationRequestInfo ::= SEQUENCE {\n * version INTEGER { v1(0) } (v1,...),\n * subject Name,\n * subjectPKInfo SubjectPublicKeyInfo{{ PKInfoAlgorithms }},\n * attributes [0] Attributes{{ CRIAttributes }}\n * }\n *\n * Attributes { ATTRIBUTE:IOSet } ::= SET OF Attribute{{ IOSet }}\n *\n * CRIAttributes ATTRIBUTE ::= {\n * ... -- add any locally defined attributes here -- }\n *\n * Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE {\n * type ATTRIBUTE.&id({IOSet}),\n * values SET SIZE(1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type})\n * }\n *\n * CertificationRequest ::= SEQUENCE {\n * certificationRequestInfo CertificationRequestInfo,\n * signatureAlgorithm AlgorithmIdentifier{{ SignatureAlgorithms }},\n * signature BIT STRING\n * }\n */\nvar forge = require('./forge');\nrequire('./aes');\nrequire('./asn1');\nrequire('./des');\nrequire('./md');\nrequire('./mgf');\nrequire('./oids');\nrequire('./pem');\nrequire('./pss');\nrequire('./rsa');\nrequire('./util');\n\n// shortcut for asn.1 API\nvar asn1 = forge.asn1;\n\n/* Public Key Infrastructure (PKI) implementation. */\nvar pki = module.exports = forge.pki = forge.pki || {};\nvar oids = pki.oids;\n\n// short name OID mappings\nvar _shortNames = {};\n_shortNames['CN'] = oids['commonName'];\n_shortNames['commonName'] = 'CN';\n_shortNames['C'] = oids['countryName'];\n_shortNames['countryName'] = 'C';\n_shortNames['L'] = oids['localityName'];\n_shortNames['localityName'] = 'L';\n_shortNames['ST'] = oids['stateOrProvinceName'];\n_shortNames['stateOrProvinceName'] = 'ST';\n_shortNames['O'] = oids['organizationName'];\n_shortNames['organizationName'] = 'O';\n_shortNames['OU'] = oids['organizationalUnitName'];\n_shortNames['organizationalUnitName'] = 'OU';\n_shortNames['E'] = oids['emailAddress'];\n_shortNames['emailAddress'] = 'E';\n\n// validator for an SubjectPublicKeyInfo structure\n// Note: Currently only works with an RSA public key\nvar publicKeyValidator = forge.pki.rsa.publicKeyValidator;\n\n// validator for an X.509v3 certificate\nvar x509CertificateValidator = {\n name: 'Certificate',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'Certificate.TBSCertificate',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'tbsCertificate',\n value: [{\n name: 'Certificate.TBSCertificate.version',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n value: [{\n name: 'Certificate.TBSCertificate.version.integer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'certVersion'\n }]\n }, {\n name: 'Certificate.TBSCertificate.serialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'certSerialNumber'\n }, {\n name: 'Certificate.TBSCertificate.signature',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'Certificate.TBSCertificate.signature.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'certinfoSignatureOid'\n }, {\n name: 'Certificate.TBSCertificate.signature.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n optional: true,\n captureAsn1: 'certinfoSignatureParams'\n }]\n }, {\n name: 'Certificate.TBSCertificate.issuer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certIssuer'\n }, {\n name: 'Certificate.TBSCertificate.validity',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n // Note: UTC and generalized times may both appear so the capture\n // names are based on their detected order, the names used below\n // are only for the common case, which validity time really means\n // \"notBefore\" and which means \"notAfter\" will be determined by order\n value: [{\n // notBefore (Time) (UTC time case)\n name: 'Certificate.TBSCertificate.validity.notBefore (utc)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.UTCTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity1UTCTime'\n }, {\n // notBefore (Time) (generalized time case)\n name: 'Certificate.TBSCertificate.validity.notBefore (generalized)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.GENERALIZEDTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity2GeneralizedTime'\n }, {\n // notAfter (Time) (only UTC time is supported)\n name: 'Certificate.TBSCertificate.validity.notAfter (utc)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.UTCTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity3UTCTime'\n }, {\n // notAfter (Time) (only UTC time is supported)\n name: 'Certificate.TBSCertificate.validity.notAfter (generalized)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.GENERALIZEDTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity4GeneralizedTime'\n }]\n }, {\n // Name (subject) (RDNSequence)\n name: 'Certificate.TBSCertificate.subject',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certSubject'\n },\n // SubjectPublicKeyInfo\n publicKeyValidator,\n {\n // issuerUniqueID (optional)\n name: 'Certificate.TBSCertificate.issuerUniqueID',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n constructed: true,\n optional: true,\n value: [{\n name: 'Certificate.TBSCertificate.issuerUniqueID.id',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n // TODO: support arbitrary bit length ids\n captureBitStringValue: 'certIssuerUniqueId'\n }]\n }, {\n // subjectUniqueID (optional)\n name: 'Certificate.TBSCertificate.subjectUniqueID',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 2,\n constructed: true,\n optional: true,\n value: [{\n name: 'Certificate.TBSCertificate.subjectUniqueID.id',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n // TODO: support arbitrary bit length ids\n captureBitStringValue: 'certSubjectUniqueId'\n }]\n }, {\n // Extensions (optional)\n name: 'Certificate.TBSCertificate.extensions',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 3,\n constructed: true,\n captureAsn1: 'certExtensions',\n optional: true\n }]\n }, {\n // AlgorithmIdentifier (signature algorithm)\n name: 'Certificate.signatureAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // algorithm\n name: 'Certificate.signatureAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'certSignatureOid'\n }, {\n name: 'Certificate.TBSCertificate.signature.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n optional: true,\n captureAsn1: 'certSignatureParams'\n }]\n }, {\n // SignatureValue\n name: 'Certificate.signatureValue',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n captureBitStringValue: 'certSignature'\n }]\n};\n\nvar rsassaPssParameterValidator = {\n name: 'rsapss',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'rsapss.hashAlgorithm',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n value: [{\n name: 'rsapss.hashAlgorithm.AlgorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.SEQUENCE,\n constructed: true,\n optional: true,\n value: [{\n name: 'rsapss.hashAlgorithm.AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'hashOid'\n /* parameter block omitted, for SHA1 NULL anyhow. */\n }]\n }]\n }, {\n name: 'rsapss.maskGenAlgorithm',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n constructed: true,\n value: [{\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.SEQUENCE,\n constructed: true,\n optional: true,\n value: [{\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'maskGenOid'\n }, {\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'maskGenHashOid'\n /* parameter block omitted, for SHA1 NULL anyhow. */\n }]\n }]\n }]\n }, {\n name: 'rsapss.saltLength',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 2,\n optional: true,\n value: [{\n name: 'rsapss.saltLength.saltLength',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.INTEGER,\n constructed: false,\n capture: 'saltLength'\n }]\n }, {\n name: 'rsapss.trailerField',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 3,\n optional: true,\n value: [{\n name: 'rsapss.trailer.trailer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.INTEGER,\n constructed: false,\n capture: 'trailer'\n }]\n }]\n};\n\n// validator for a CertificationRequestInfo structure\nvar certificationRequestInfoValidator = {\n name: 'CertificationRequestInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certificationRequestInfo',\n value: [{\n name: 'CertificationRequestInfo.integer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'certificationRequestInfoVersion'\n }, {\n // Name (subject) (RDNSequence)\n name: 'CertificationRequestInfo.subject',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certificationRequestInfoSubject'\n },\n // SubjectPublicKeyInfo\n publicKeyValidator,\n {\n name: 'CertificationRequestInfo.attributes',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n capture: 'certificationRequestInfoAttributes',\n value: [{\n name: 'CertificationRequestInfo.attributes',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'CertificationRequestInfo.attributes.type',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false\n }, {\n name: 'CertificationRequestInfo.attributes.value',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true\n }]\n }]\n }]\n};\n\n// validator for a CertificationRequest structure\nvar certificationRequestValidator = {\n name: 'CertificationRequest',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'csr',\n value: [\n certificationRequestInfoValidator, {\n // AlgorithmIdentifier (signature algorithm)\n name: 'CertificationRequest.signatureAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // algorithm\n name: 'CertificationRequest.signatureAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'csrSignatureOid'\n }, {\n name: 'CertificationRequest.signatureAlgorithm.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n optional: true,\n captureAsn1: 'csrSignatureParams'\n }]\n }, {\n // signature\n name: 'CertificationRequest.signature',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n captureBitStringValue: 'csrSignature'\n }\n ]\n};\n\n/**\n * Converts an RDNSequence of ASN.1 DER-encoded RelativeDistinguishedName\n * sets into an array with objects that have type and value properties.\n *\n * @param rdn the RDNSequence to convert.\n * @param md a message digest to append type and value to if provided.\n */\npki.RDNAttributesAsArray = function(rdn, md) {\n var rval = [];\n\n // each value in 'rdn' in is a SET of RelativeDistinguishedName\n var set, attr, obj;\n for(var si = 0; si < rdn.value.length; ++si) {\n // get the RelativeDistinguishedName set\n set = rdn.value[si];\n\n // each value in the SET is an AttributeTypeAndValue sequence\n // containing first a type (an OID) and second a value (defined by\n // the OID)\n for(var i = 0; i < set.value.length; ++i) {\n obj = {};\n attr = set.value[i];\n obj.type = asn1.derToOid(attr.value[0].value);\n obj.value = attr.value[1].value;\n obj.valueTagClass = attr.value[1].type;\n // if the OID is known, get its name and short name\n if(obj.type in oids) {\n obj.name = oids[obj.type];\n if(obj.name in _shortNames) {\n obj.shortName = _shortNames[obj.name];\n }\n }\n if(md) {\n md.update(obj.type);\n md.update(obj.value);\n }\n rval.push(obj);\n }\n }\n\n return rval;\n};\n\n/**\n * Converts ASN.1 CRIAttributes into an array with objects that have type and\n * value properties.\n *\n * @param attributes the CRIAttributes to convert.\n */\npki.CRIAttributesAsArray = function(attributes) {\n var rval = [];\n\n // each value in 'attributes' in is a SEQUENCE with an OID and a SET\n for(var si = 0; si < attributes.length; ++si) {\n // get the attribute sequence\n var seq = attributes[si];\n\n // each value in the SEQUENCE containing first a type (an OID) and\n // second a set of values (defined by the OID)\n var type = asn1.derToOid(seq.value[0].value);\n var values = seq.value[1].value;\n for(var vi = 0; vi < values.length; ++vi) {\n var obj = {};\n obj.type = type;\n obj.value = values[vi].value;\n obj.valueTagClass = values[vi].type;\n // if the OID is known, get its name and short name\n if(obj.type in oids) {\n obj.name = oids[obj.type];\n if(obj.name in _shortNames) {\n obj.shortName = _shortNames[obj.name];\n }\n }\n // parse extensions\n if(obj.type === oids.extensionRequest) {\n obj.extensions = [];\n for(var ei = 0; ei < obj.value.length; ++ei) {\n obj.extensions.push(pki.certificateExtensionFromAsn1(obj.value[ei]));\n }\n }\n rval.push(obj);\n }\n }\n\n return rval;\n};\n\n/**\n * Gets an issuer or subject attribute from its name, type, or short name.\n *\n * @param obj the issuer or subject object.\n * @param options a short name string or an object with:\n * shortName the short name for the attribute.\n * name the name for the attribute.\n * type the type for the attribute.\n *\n * @return the attribute.\n */\nfunction _getAttribute(obj, options) {\n if(typeof options === 'string') {\n options = {shortName: options};\n }\n\n var rval = null;\n var attr;\n for(var i = 0; rval === null && i < obj.attributes.length; ++i) {\n attr = obj.attributes[i];\n if(options.type && options.type === attr.type) {\n rval = attr;\n } else if(options.name && options.name === attr.name) {\n rval = attr;\n } else if(options.shortName && options.shortName === attr.shortName) {\n rval = attr;\n }\n }\n return rval;\n}\n\n/**\n * Converts signature parameters from ASN.1 structure.\n *\n * Currently only RSASSA-PSS supported. The PKCS#1 v1.5 signature scheme had\n * no parameters.\n *\n * RSASSA-PSS-params ::= SEQUENCE {\n * hashAlgorithm [0] HashAlgorithm DEFAULT\n * sha1Identifier,\n * maskGenAlgorithm [1] MaskGenAlgorithm DEFAULT\n * mgf1SHA1Identifier,\n * saltLength [2] INTEGER DEFAULT 20,\n * trailerField [3] INTEGER DEFAULT 1\n * }\n *\n * HashAlgorithm ::= AlgorithmIdentifier\n *\n * MaskGenAlgorithm ::= AlgorithmIdentifier\n *\n * AlgorithmIdentifer ::= SEQUENCE {\n * algorithm OBJECT IDENTIFIER,\n * parameters ANY DEFINED BY algorithm OPTIONAL\n * }\n *\n * @param oid The OID specifying the signature algorithm\n * @param obj The ASN.1 structure holding the parameters\n * @param fillDefaults Whether to use return default values where omitted\n * @return signature parameter object\n */\nvar _readSignatureParameters = function(oid, obj, fillDefaults) {\n var params = {};\n\n if(oid !== oids['RSASSA-PSS']) {\n return params;\n }\n\n if(fillDefaults) {\n params = {\n hash: {\n algorithmOid: oids['sha1']\n },\n mgf: {\n algorithmOid: oids['mgf1'],\n hash: {\n algorithmOid: oids['sha1']\n }\n },\n saltLength: 20\n };\n }\n\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, rsassaPssParameterValidator, capture, errors)) {\n var error = new Error('Cannot read RSASSA-PSS parameter block.');\n error.errors = errors;\n throw error;\n }\n\n if(capture.hashOid !== undefined) {\n params.hash = params.hash || {};\n params.hash.algorithmOid = asn1.derToOid(capture.hashOid);\n }\n\n if(capture.maskGenOid !== undefined) {\n params.mgf = params.mgf || {};\n params.mgf.algorithmOid = asn1.derToOid(capture.maskGenOid);\n params.mgf.hash = params.mgf.hash || {};\n params.mgf.hash.algorithmOid = asn1.derToOid(capture.maskGenHashOid);\n }\n\n if(capture.saltLength !== undefined) {\n params.saltLength = capture.saltLength.charCodeAt(0);\n }\n\n return params;\n};\n\n/**\n * Create signature digest for OID.\n *\n * @param options\n * signatureOid: the OID specifying the signature algorithm.\n * type: a human readable type for error messages\n * @return a created md instance. throws if unknown oid.\n */\nvar _createSignatureDigest = function(options) {\n switch(oids[options.signatureOid]) {\n case 'sha1WithRSAEncryption':\n // deprecated alias\n case 'sha1WithRSASignature':\n return forge.md.sha1.create();\n case 'md5WithRSAEncryption':\n return forge.md.md5.create();\n case 'sha256WithRSAEncryption':\n return forge.md.sha256.create();\n case 'sha384WithRSAEncryption':\n return forge.md.sha384.create();\n case 'sha512WithRSAEncryption':\n return forge.md.sha512.create();\n case 'RSASSA-PSS':\n return forge.md.sha256.create();\n default:\n var error = new Error(\n 'Could not compute ' + options.type + ' digest. ' +\n 'Unknown signature OID.');\n error.signatureOid = options.signatureOid;\n throw error;\n }\n};\n\n/**\n * Verify signature on certificate or CSR.\n *\n * @param options:\n * certificate the certificate or CSR to verify.\n * md the signature digest.\n * signature the signature\n * @return a created md instance. throws if unknown oid.\n */\nvar _verifySignature = function(options) {\n var cert = options.certificate;\n var scheme;\n\n switch(cert.signatureOid) {\n case oids.sha1WithRSAEncryption:\n // deprecated alias\n case oids.sha1WithRSASignature:\n /* use PKCS#1 v1.5 padding scheme */\n break;\n case oids['RSASSA-PSS']:\n var hash, mgf;\n\n /* initialize mgf */\n hash = oids[cert.signatureParameters.mgf.hash.algorithmOid];\n if(hash === undefined || forge.md[hash] === undefined) {\n var error = new Error('Unsupported MGF hash function.');\n error.oid = cert.signatureParameters.mgf.hash.algorithmOid;\n error.name = hash;\n throw error;\n }\n\n mgf = oids[cert.signatureParameters.mgf.algorithmOid];\n if(mgf === undefined || forge.mgf[mgf] === undefined) {\n var error = new Error('Unsupported MGF function.');\n error.oid = cert.signatureParameters.mgf.algorithmOid;\n error.name = mgf;\n throw error;\n }\n\n mgf = forge.mgf[mgf].create(forge.md[hash].create());\n\n /* initialize hash function */\n hash = oids[cert.signatureParameters.hash.algorithmOid];\n if(hash === undefined || forge.md[hash] === undefined) {\n var error = new Error('Unsupported RSASSA-PSS hash function.');\n error.oid = cert.signatureParameters.hash.algorithmOid;\n error.name = hash;\n throw error;\n }\n\n scheme = forge.pss.create(\n forge.md[hash].create(), mgf, cert.signatureParameters.saltLength\n );\n break;\n }\n\n // verify signature on cert using public key\n return cert.publicKey.verify(\n options.md.digest().getBytes(), options.signature, scheme\n );\n};\n\n/**\n * Converts an X.509 certificate from PEM format.\n *\n * Note: If the certificate is to be verified then compute hash should\n * be set to true. This will scan the TBSCertificate part of the ASN.1\n * object while it is converted so it doesn't need to be converted back\n * to ASN.1-DER-encoding later.\n *\n * @param pem the PEM-formatted certificate.\n * @param computeHash true to compute the hash for verification.\n * @param strict true to be strict when checking ASN.1 value lengths, false to\n * allow truncated values (default: true).\n *\n * @return the certificate.\n */\npki.certificateFromPem = function(pem, computeHash, strict) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'CERTIFICATE' &&\n msg.type !== 'X509 CERTIFICATE' &&\n msg.type !== 'TRUSTED CERTIFICATE') {\n var error = new Error(\n 'Could not convert certificate from PEM; PEM header type ' +\n 'is not \"CERTIFICATE\", \"X509 CERTIFICATE\", or \"TRUSTED CERTIFICATE\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error(\n 'Could not convert certificate from PEM; PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body, strict);\n\n return pki.certificateFromAsn1(obj, computeHash);\n};\n\n/**\n * Converts an X.509 certificate to PEM format.\n *\n * @param cert the certificate.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted certificate.\n */\npki.certificateToPem = function(cert, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'CERTIFICATE',\n body: asn1.toDer(pki.certificateToAsn1(cert)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Converts an RSA public key from PEM format.\n *\n * @param pem the PEM-formatted public key.\n *\n * @return the public key.\n */\npki.publicKeyFromPem = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'PUBLIC KEY' && msg.type !== 'RSA PUBLIC KEY') {\n var error = new Error('Could not convert public key from PEM; PEM header ' +\n 'type is not \"PUBLIC KEY\" or \"RSA PUBLIC KEY\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert public key from PEM; PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body);\n\n return pki.publicKeyFromAsn1(obj);\n};\n\n/**\n * Converts an RSA public key to PEM format (using a SubjectPublicKeyInfo).\n *\n * @param key the public key.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted public key.\n */\npki.publicKeyToPem = function(key, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'PUBLIC KEY',\n body: asn1.toDer(pki.publicKeyToAsn1(key)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Converts an RSA public key to PEM format (using an RSAPublicKey).\n *\n * @param key the public key.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted public key.\n */\npki.publicKeyToRSAPublicKeyPem = function(key, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'RSA PUBLIC KEY',\n body: asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Gets a fingerprint for the given public key.\n *\n * @param options the options to use.\n * [md] the message digest object to use (defaults to forge.md.sha1).\n * [type] the type of fingerprint, such as 'RSAPublicKey',\n * 'SubjectPublicKeyInfo' (defaults to 'RSAPublicKey').\n * [encoding] an alternative output encoding, such as 'hex'\n * (defaults to none, outputs a byte buffer).\n * [delimiter] the delimiter to use between bytes for 'hex' encoded\n * output, eg: ':' (defaults to none).\n *\n * @return the fingerprint as a byte buffer or other encoding based on options.\n */\npki.getPublicKeyFingerprint = function(key, options) {\n options = options || {};\n var md = options.md || forge.md.sha1.create();\n var type = options.type || 'RSAPublicKey';\n\n var bytes;\n switch(type) {\n case 'RSAPublicKey':\n bytes = asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes();\n break;\n case 'SubjectPublicKeyInfo':\n bytes = asn1.toDer(pki.publicKeyToAsn1(key)).getBytes();\n break;\n default:\n throw new Error('Unknown fingerprint type \"' + options.type + '\".');\n }\n\n // hash public key bytes\n md.start();\n md.update(bytes);\n var digest = md.digest();\n if(options.encoding === 'hex') {\n var hex = digest.toHex();\n if(options.delimiter) {\n return hex.match(/.{2}/g).join(options.delimiter);\n }\n return hex;\n } else if(options.encoding === 'binary') {\n return digest.getBytes();\n } else if(options.encoding) {\n throw new Error('Unknown encoding \"' + options.encoding + '\".');\n }\n return digest;\n};\n\n/**\n * Converts a PKCS#10 certification request (CSR) from PEM format.\n *\n * Note: If the certification request is to be verified then compute hash\n * should be set to true. This will scan the CertificationRequestInfo part of\n * the ASN.1 object while it is converted so it doesn't need to be converted\n * back to ASN.1-DER-encoding later.\n *\n * @param pem the PEM-formatted certificate.\n * @param computeHash true to compute the hash for verification.\n * @param strict true to be strict when checking ASN.1 value lengths, false to\n * allow truncated values (default: true).\n *\n * @return the certification request (CSR).\n */\npki.certificationRequestFromPem = function(pem, computeHash, strict) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'CERTIFICATE REQUEST') {\n var error = new Error('Could not convert certification request from PEM; ' +\n 'PEM header type is not \"CERTIFICATE REQUEST\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert certification request from PEM; ' +\n 'PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body, strict);\n\n return pki.certificationRequestFromAsn1(obj, computeHash);\n};\n\n/**\n * Converts a PKCS#10 certification request (CSR) to PEM format.\n *\n * @param csr the certification request.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted certification request.\n */\npki.certificationRequestToPem = function(csr, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'CERTIFICATE REQUEST',\n body: asn1.toDer(pki.certificationRequestToAsn1(csr)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Creates an empty X.509v3 RSA certificate.\n *\n * @return the certificate.\n */\npki.createCertificate = function() {\n var cert = {};\n cert.version = 0x02;\n cert.serialNumber = '00';\n cert.signatureOid = null;\n cert.signature = null;\n cert.siginfo = {};\n cert.siginfo.algorithmOid = null;\n cert.validity = {};\n cert.validity.notBefore = new Date();\n cert.validity.notAfter = new Date();\n\n cert.issuer = {};\n cert.issuer.getField = function(sn) {\n return _getAttribute(cert.issuer, sn);\n };\n cert.issuer.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.issuer.attributes.push(attr);\n };\n cert.issuer.attributes = [];\n cert.issuer.hash = null;\n\n cert.subject = {};\n cert.subject.getField = function(sn) {\n return _getAttribute(cert.subject, sn);\n };\n cert.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.subject.attributes.push(attr);\n };\n cert.subject.attributes = [];\n cert.subject.hash = null;\n\n cert.extensions = [];\n cert.publicKey = null;\n cert.md = null;\n\n /**\n * Sets the subject of this certificate.\n *\n * @param attrs the array of subject attributes to use.\n * @param uniqueId an optional a unique ID to use.\n */\n cert.setSubject = function(attrs, uniqueId) {\n // set new attributes, clear hash\n _fillMissingFields(attrs);\n cert.subject.attributes = attrs;\n delete cert.subject.uniqueId;\n if(uniqueId) {\n // TODO: support arbitrary bit length ids\n cert.subject.uniqueId = uniqueId;\n }\n cert.subject.hash = null;\n };\n\n /**\n * Sets the issuer of this certificate.\n *\n * @param attrs the array of issuer attributes to use.\n * @param uniqueId an optional a unique ID to use.\n */\n cert.setIssuer = function(attrs, uniqueId) {\n // set new attributes, clear hash\n _fillMissingFields(attrs);\n cert.issuer.attributes = attrs;\n delete cert.issuer.uniqueId;\n if(uniqueId) {\n // TODO: support arbitrary bit length ids\n cert.issuer.uniqueId = uniqueId;\n }\n cert.issuer.hash = null;\n };\n\n /**\n * Sets the extensions of this certificate.\n *\n * @param exts the array of extensions to use.\n */\n cert.setExtensions = function(exts) {\n for(var i = 0; i < exts.length; ++i) {\n _fillMissingExtensionFields(exts[i], {cert: cert});\n }\n // set new extensions\n cert.extensions = exts;\n };\n\n /**\n * Gets an extension by its name or id.\n *\n * @param options the name to use or an object with:\n * name the name to use.\n * id the id to use.\n *\n * @return the extension or null if not found.\n */\n cert.getExtension = function(options) {\n if(typeof options === 'string') {\n options = {name: options};\n }\n\n var rval = null;\n var ext;\n for(var i = 0; rval === null && i < cert.extensions.length; ++i) {\n ext = cert.extensions[i];\n if(options.id && ext.id === options.id) {\n rval = ext;\n } else if(options.name && ext.name === options.name) {\n rval = ext;\n }\n }\n return rval;\n };\n\n /**\n * Signs this certificate using the given private key.\n *\n * @param key the private key to sign with.\n * @param md the message digest object to use (defaults to forge.md.sha1).\n */\n cert.sign = function(key, md) {\n // TODO: get signature OID from private key\n cert.md = md || forge.md.sha1.create();\n var algorithmOid = oids[cert.md.algorithm + 'WithRSAEncryption'];\n if(!algorithmOid) {\n var error = new Error('Could not compute certificate digest. ' +\n 'Unknown message digest algorithm OID.');\n error.algorithm = cert.md.algorithm;\n throw error;\n }\n cert.signatureOid = cert.siginfo.algorithmOid = algorithmOid;\n\n // get TBSCertificate, convert to DER\n cert.tbsCertificate = pki.getTBSCertificate(cert);\n var bytes = asn1.toDer(cert.tbsCertificate);\n\n // digest and sign\n cert.md.update(bytes.getBytes());\n cert.signature = key.sign(cert.md);\n };\n\n /**\n * Attempts verify the signature on the passed certificate using this\n * certificate's public key.\n *\n * @param child the certificate to verify.\n *\n * @return true if verified, false if not.\n */\n cert.verify = function(child) {\n var rval = false;\n\n if(!cert.issued(child)) {\n var issuer = child.issuer;\n var subject = cert.subject;\n var error = new Error(\n 'The parent certificate did not issue the given child ' +\n 'certificate; the child certificate\\'s issuer does not match the ' +\n 'parent\\'s subject.');\n error.expectedIssuer = subject.attributes;\n error.actualIssuer = issuer.attributes;\n throw error;\n }\n\n var md = child.md;\n if(md === null) {\n // create digest for OID signature types\n md = _createSignatureDigest({\n signatureOid: child.signatureOid,\n type: 'certificate'\n });\n\n // produce DER formatted TBSCertificate and digest it\n var tbsCertificate = child.tbsCertificate || pki.getTBSCertificate(child);\n var bytes = asn1.toDer(tbsCertificate);\n md.update(bytes.getBytes());\n }\n\n if(md !== null) {\n rval = _verifySignature({\n certificate: cert, md: md, signature: child.signature\n });\n }\n\n return rval;\n };\n\n /**\n * Returns true if this certificate's issuer matches the passed\n * certificate's subject. Note that no signature check is performed.\n *\n * @param parent the certificate to check.\n *\n * @return true if this certificate's issuer matches the passed certificate's\n * subject.\n */\n cert.isIssuer = function(parent) {\n var rval = false;\n\n var i = cert.issuer;\n var s = parent.subject;\n\n // compare hashes if present\n if(i.hash && s.hash) {\n rval = (i.hash === s.hash);\n } else if(i.attributes.length === s.attributes.length) {\n // all attributes are the same so issuer matches subject\n rval = true;\n var iattr, sattr;\n for(var n = 0; rval && n < i.attributes.length; ++n) {\n iattr = i.attributes[n];\n sattr = s.attributes[n];\n if(iattr.type !== sattr.type || iattr.value !== sattr.value) {\n // attribute mismatch\n rval = false;\n }\n }\n }\n\n return rval;\n };\n\n /**\n * Returns true if this certificate's subject matches the issuer of the\n * given certificate). Note that not signature check is performed.\n *\n * @param child the certificate to check.\n *\n * @return true if this certificate's subject matches the passed\n * certificate's issuer.\n */\n cert.issued = function(child) {\n return child.isIssuer(cert);\n };\n\n /**\n * Generates the subjectKeyIdentifier for this certificate as byte buffer.\n *\n * @return the subjectKeyIdentifier for this certificate as byte buffer.\n */\n cert.generateSubjectKeyIdentifier = function() {\n /* See: 4.2.1.2 section of the the RFC3280, keyIdentifier is either:\n\n (1) The keyIdentifier is composed of the 160-bit SHA-1 hash of the\n value of the BIT STRING subjectPublicKey (excluding the tag,\n length, and number of unused bits).\n\n (2) The keyIdentifier is composed of a four bit type field with\n the value 0100 followed by the least significant 60 bits of the\n SHA-1 hash of the value of the BIT STRING subjectPublicKey\n (excluding the tag, length, and number of unused bit string bits).\n */\n\n // skipping the tag, length, and number of unused bits is the same\n // as just using the RSAPublicKey (for RSA keys, which are the\n // only ones supported)\n return pki.getPublicKeyFingerprint(cert.publicKey, {type: 'RSAPublicKey'});\n };\n\n /**\n * Verifies the subjectKeyIdentifier extension value for this certificate\n * against its public key. If no extension is found, false will be\n * returned.\n *\n * @return true if verified, false if not.\n */\n cert.verifySubjectKeyIdentifier = function() {\n var oid = oids['subjectKeyIdentifier'];\n for(var i = 0; i < cert.extensions.length; ++i) {\n var ext = cert.extensions[i];\n if(ext.id === oid) {\n var ski = cert.generateSubjectKeyIdentifier().getBytes();\n return (forge.util.hexToBytes(ext.subjectKeyIdentifier) === ski);\n }\n }\n return false;\n };\n\n return cert;\n};\n\n/**\n * Converts an X.509v3 RSA certificate from an ASN.1 object.\n *\n * Note: If the certificate is to be verified then compute hash should\n * be set to true. There is currently no implementation for converting\n * a certificate back to ASN.1 so the TBSCertificate part of the ASN.1\n * object needs to be scanned before the cert object is created.\n *\n * @param obj the asn1 representation of an X.509v3 RSA certificate.\n * @param computeHash true to compute the hash for verification.\n *\n * @return the certificate.\n */\npki.certificateFromAsn1 = function(obj, computeHash) {\n // validate certificate and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, x509CertificateValidator, capture, errors)) {\n var error = new Error('Cannot read X.509 certificate. ' +\n 'ASN.1 object is not an X509v3 Certificate.');\n error.errors = errors;\n throw error;\n }\n\n // get oid\n var oid = asn1.derToOid(capture.publicKeyOid);\n if(oid !== pki.oids.rsaEncryption) {\n throw new Error('Cannot read public key. OID is not RSA.');\n }\n\n // create certificate\n var cert = pki.createCertificate();\n cert.version = capture.certVersion ?\n capture.certVersion.charCodeAt(0) : 0;\n var serial = forge.util.createBuffer(capture.certSerialNumber);\n cert.serialNumber = serial.toHex();\n cert.signatureOid = forge.asn1.derToOid(capture.certSignatureOid);\n cert.signatureParameters = _readSignatureParameters(\n cert.signatureOid, capture.certSignatureParams, true);\n cert.siginfo.algorithmOid = forge.asn1.derToOid(capture.certinfoSignatureOid);\n cert.siginfo.parameters = _readSignatureParameters(cert.siginfo.algorithmOid,\n capture.certinfoSignatureParams, false);\n cert.signature = capture.certSignature;\n\n var validity = [];\n if(capture.certValidity1UTCTime !== undefined) {\n validity.push(asn1.utcTimeToDate(capture.certValidity1UTCTime));\n }\n if(capture.certValidity2GeneralizedTime !== undefined) {\n validity.push(asn1.generalizedTimeToDate(\n capture.certValidity2GeneralizedTime));\n }\n if(capture.certValidity3UTCTime !== undefined) {\n validity.push(asn1.utcTimeToDate(capture.certValidity3UTCTime));\n }\n if(capture.certValidity4GeneralizedTime !== undefined) {\n validity.push(asn1.generalizedTimeToDate(\n capture.certValidity4GeneralizedTime));\n }\n if(validity.length > 2) {\n throw new Error('Cannot read notBefore/notAfter validity times; more ' +\n 'than two times were provided in the certificate.');\n }\n if(validity.length < 2) {\n throw new Error('Cannot read notBefore/notAfter validity times; they ' +\n 'were not provided as either UTCTime or GeneralizedTime.');\n }\n cert.validity.notBefore = validity[0];\n cert.validity.notAfter = validity[1];\n\n // keep TBSCertificate to preserve signature when exporting\n cert.tbsCertificate = capture.tbsCertificate;\n\n if(computeHash) {\n // create digest for OID signature type\n cert.md = _createSignatureDigest({\n signatureOid: cert.signatureOid,\n type: 'certificate'\n });\n\n // produce DER formatted TBSCertificate and digest it\n var bytes = asn1.toDer(cert.tbsCertificate);\n cert.md.update(bytes.getBytes());\n }\n\n // handle issuer, build issuer message digest\n var imd = forge.md.sha1.create();\n var ibytes = asn1.toDer(capture.certIssuer);\n imd.update(ibytes.getBytes());\n cert.issuer.getField = function(sn) {\n return _getAttribute(cert.issuer, sn);\n };\n cert.issuer.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.issuer.attributes.push(attr);\n };\n cert.issuer.attributes = pki.RDNAttributesAsArray(capture.certIssuer);\n if(capture.certIssuerUniqueId) {\n cert.issuer.uniqueId = capture.certIssuerUniqueId;\n }\n cert.issuer.hash = imd.digest().toHex();\n\n // handle subject, build subject message digest\n var smd = forge.md.sha1.create();\n var sbytes = asn1.toDer(capture.certSubject);\n smd.update(sbytes.getBytes());\n cert.subject.getField = function(sn) {\n return _getAttribute(cert.subject, sn);\n };\n cert.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.subject.attributes.push(attr);\n };\n cert.subject.attributes = pki.RDNAttributesAsArray(capture.certSubject);\n if(capture.certSubjectUniqueId) {\n cert.subject.uniqueId = capture.certSubjectUniqueId;\n }\n cert.subject.hash = smd.digest().toHex();\n\n // handle extensions\n if(capture.certExtensions) {\n cert.extensions = pki.certificateExtensionsFromAsn1(capture.certExtensions);\n } else {\n cert.extensions = [];\n }\n\n // convert RSA public key from ASN.1\n cert.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo);\n\n return cert;\n};\n\n/**\n * Converts an ASN.1 extensions object (with extension sequences as its\n * values) into an array of extension objects with types and values.\n *\n * Supported extensions:\n *\n * id-ce-keyUsage OBJECT IDENTIFIER ::= { id-ce 15 }\n * KeyUsage ::= BIT STRING {\n * digitalSignature (0),\n * nonRepudiation (1),\n * keyEncipherment (2),\n * dataEncipherment (3),\n * keyAgreement (4),\n * keyCertSign (5),\n * cRLSign (6),\n * encipherOnly (7),\n * decipherOnly (8)\n * }\n *\n * id-ce-basicConstraints OBJECT IDENTIFIER ::= { id-ce 19 }\n * BasicConstraints ::= SEQUENCE {\n * cA BOOLEAN DEFAULT FALSE,\n * pathLenConstraint INTEGER (0..MAX) OPTIONAL\n * }\n *\n * subjectAltName EXTENSION ::= {\n * SYNTAX GeneralNames\n * IDENTIFIED BY id-ce-subjectAltName\n * }\n *\n * GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName\n *\n * GeneralName ::= CHOICE {\n * otherName [0] INSTANCE OF OTHER-NAME,\n * rfc822Name [1] IA5String,\n * dNSName [2] IA5String,\n * x400Address [3] ORAddress,\n * directoryName [4] Name,\n * ediPartyName [5] EDIPartyName,\n * uniformResourceIdentifier [6] IA5String,\n * IPAddress [7] OCTET STRING,\n * registeredID [8] OBJECT IDENTIFIER\n * }\n *\n * OTHER-NAME ::= TYPE-IDENTIFIER\n *\n * EDIPartyName ::= SEQUENCE {\n * nameAssigner [0] DirectoryString {ub-name} OPTIONAL,\n * partyName [1] DirectoryString {ub-name}\n * }\n *\n * @param exts the extensions ASN.1 with extension sequences to parse.\n *\n * @return the array.\n */\npki.certificateExtensionsFromAsn1 = function(exts) {\n var rval = [];\n for(var i = 0; i < exts.value.length; ++i) {\n // get extension sequence\n var extseq = exts.value[i];\n for(var ei = 0; ei < extseq.value.length; ++ei) {\n rval.push(pki.certificateExtensionFromAsn1(extseq.value[ei]));\n }\n }\n\n return rval;\n};\n\n/**\n * Parses a single certificate extension from ASN.1.\n *\n * @param ext the extension in ASN.1 format.\n *\n * @return the parsed extension as an object.\n */\npki.certificateExtensionFromAsn1 = function(ext) {\n // an extension has:\n // [0] extnID OBJECT IDENTIFIER\n // [1] critical BOOLEAN DEFAULT FALSE\n // [2] extnValue OCTET STRING\n var e = {};\n e.id = asn1.derToOid(ext.value[0].value);\n e.critical = false;\n if(ext.value[1].type === asn1.Type.BOOLEAN) {\n e.critical = (ext.value[1].value.charCodeAt(0) !== 0x00);\n e.value = ext.value[2].value;\n } else {\n e.value = ext.value[1].value;\n }\n // if the oid is known, get its name\n if(e.id in oids) {\n e.name = oids[e.id];\n\n // handle key usage\n if(e.name === 'keyUsage') {\n // get value as BIT STRING\n var ev = asn1.fromDer(e.value);\n var b2 = 0x00;\n var b3 = 0x00;\n if(ev.value.length > 1) {\n // skip first byte, just indicates unused bits which\n // will be padded with 0s anyway\n // get bytes with flag bits\n b2 = ev.value.charCodeAt(1);\n b3 = ev.value.length > 2 ? ev.value.charCodeAt(2) : 0;\n }\n // set flags\n e.digitalSignature = (b2 & 0x80) === 0x80;\n e.nonRepudiation = (b2 & 0x40) === 0x40;\n e.keyEncipherment = (b2 & 0x20) === 0x20;\n e.dataEncipherment = (b2 & 0x10) === 0x10;\n e.keyAgreement = (b2 & 0x08) === 0x08;\n e.keyCertSign = (b2 & 0x04) === 0x04;\n e.cRLSign = (b2 & 0x02) === 0x02;\n e.encipherOnly = (b2 & 0x01) === 0x01;\n e.decipherOnly = (b3 & 0x80) === 0x80;\n } else if(e.name === 'basicConstraints') {\n // handle basic constraints\n // get value as SEQUENCE\n var ev = asn1.fromDer(e.value);\n // get cA BOOLEAN flag (defaults to false)\n if(ev.value.length > 0 && ev.value[0].type === asn1.Type.BOOLEAN) {\n e.cA = (ev.value[0].value.charCodeAt(0) !== 0x00);\n } else {\n e.cA = false;\n }\n // get path length constraint\n var value = null;\n if(ev.value.length > 0 && ev.value[0].type === asn1.Type.INTEGER) {\n value = ev.value[0].value;\n } else if(ev.value.length > 1) {\n value = ev.value[1].value;\n }\n if(value !== null) {\n e.pathLenConstraint = asn1.derToInteger(value);\n }\n } else if(e.name === 'extKeyUsage') {\n // handle extKeyUsage\n // value is a SEQUENCE of OIDs\n var ev = asn1.fromDer(e.value);\n for(var vi = 0; vi < ev.value.length; ++vi) {\n var oid = asn1.derToOid(ev.value[vi].value);\n if(oid in oids) {\n e[oids[oid]] = true;\n } else {\n e[oid] = true;\n }\n }\n } else if(e.name === 'nsCertType') {\n // handle nsCertType\n // get value as BIT STRING\n var ev = asn1.fromDer(e.value);\n var b2 = 0x00;\n if(ev.value.length > 1) {\n // skip first byte, just indicates unused bits which\n // will be padded with 0s anyway\n // get bytes with flag bits\n b2 = ev.value.charCodeAt(1);\n }\n // set flags\n e.client = (b2 & 0x80) === 0x80;\n e.server = (b2 & 0x40) === 0x40;\n e.email = (b2 & 0x20) === 0x20;\n e.objsign = (b2 & 0x10) === 0x10;\n e.reserved = (b2 & 0x08) === 0x08;\n e.sslCA = (b2 & 0x04) === 0x04;\n e.emailCA = (b2 & 0x02) === 0x02;\n e.objCA = (b2 & 0x01) === 0x01;\n } else if(\n e.name === 'subjectAltName' ||\n e.name === 'issuerAltName') {\n // handle subjectAltName/issuerAltName\n e.altNames = [];\n\n // ev is a SYNTAX SEQUENCE\n var gn;\n var ev = asn1.fromDer(e.value);\n for(var n = 0; n < ev.value.length; ++n) {\n // get GeneralName\n gn = ev.value[n];\n\n var altName = {\n type: gn.type,\n value: gn.value\n };\n e.altNames.push(altName);\n\n // Note: Support for types 1,2,6,7,8\n switch(gn.type) {\n // rfc822Name\n case 1:\n // dNSName\n case 2:\n // uniformResourceIdentifier (URI)\n case 6:\n break;\n // IPAddress\n case 7:\n // convert to IPv4/IPv6 string representation\n altName.ip = forge.util.bytesToIP(gn.value);\n break;\n // registeredID\n case 8:\n altName.oid = asn1.derToOid(gn.value);\n break;\n default:\n // unsupported\n }\n }\n } else if(e.name === 'subjectKeyIdentifier') {\n // value is an OCTETSTRING w/the hash of the key-type specific\n // public key structure (eg: RSAPublicKey)\n var ev = asn1.fromDer(e.value);\n e.subjectKeyIdentifier = forge.util.bytesToHex(ev.value);\n }\n }\n return e;\n};\n\n/**\n * Converts a PKCS#10 certification request (CSR) from an ASN.1 object.\n *\n * Note: If the certification request is to be verified then compute hash\n * should be set to true. There is currently no implementation for converting\n * a certificate back to ASN.1 so the CertificationRequestInfo part of the\n * ASN.1 object needs to be scanned before the csr object is created.\n *\n * @param obj the asn1 representation of a PKCS#10 certification request (CSR).\n * @param computeHash true to compute the hash for verification.\n *\n * @return the certification request (CSR).\n */\npki.certificationRequestFromAsn1 = function(obj, computeHash) {\n // validate certification request and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, certificationRequestValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#10 certificate request. ' +\n 'ASN.1 object is not a PKCS#10 CertificationRequest.');\n error.errors = errors;\n throw error;\n }\n\n // get oid\n var oid = asn1.derToOid(capture.publicKeyOid);\n if(oid !== pki.oids.rsaEncryption) {\n throw new Error('Cannot read public key. OID is not RSA.');\n }\n\n // create certification request\n var csr = pki.createCertificationRequest();\n csr.version = capture.csrVersion ? capture.csrVersion.charCodeAt(0) : 0;\n csr.signatureOid = forge.asn1.derToOid(capture.csrSignatureOid);\n csr.signatureParameters = _readSignatureParameters(\n csr.signatureOid, capture.csrSignatureParams, true);\n csr.siginfo.algorithmOid = forge.asn1.derToOid(capture.csrSignatureOid);\n csr.siginfo.parameters = _readSignatureParameters(\n csr.siginfo.algorithmOid, capture.csrSignatureParams, false);\n csr.signature = capture.csrSignature;\n\n // keep CertificationRequestInfo to preserve signature when exporting\n csr.certificationRequestInfo = capture.certificationRequestInfo;\n\n if(computeHash) {\n // create digest for OID signature type\n csr.md = _createSignatureDigest({\n signatureOid: csr.signatureOid,\n type: 'certification request'\n });\n\n // produce DER formatted CertificationRequestInfo and digest it\n var bytes = asn1.toDer(csr.certificationRequestInfo);\n csr.md.update(bytes.getBytes());\n }\n\n // handle subject, build subject message digest\n var smd = forge.md.sha1.create();\n csr.subject.getField = function(sn) {\n return _getAttribute(csr.subject, sn);\n };\n csr.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n csr.subject.attributes.push(attr);\n };\n csr.subject.attributes = pki.RDNAttributesAsArray(\n capture.certificationRequestInfoSubject, smd);\n csr.subject.hash = smd.digest().toHex();\n\n // convert RSA public key from ASN.1\n csr.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo);\n\n // convert attributes from ASN.1\n csr.getAttribute = function(sn) {\n return _getAttribute(csr, sn);\n };\n csr.addAttribute = function(attr) {\n _fillMissingFields([attr]);\n csr.attributes.push(attr);\n };\n csr.attributes = pki.CRIAttributesAsArray(\n capture.certificationRequestInfoAttributes || []);\n\n return csr;\n};\n\n/**\n * Creates an empty certification request (a CSR or certificate signing\n * request). Once created, its public key and attributes can be set and then\n * it can be signed.\n *\n * @return the empty certification request.\n */\npki.createCertificationRequest = function() {\n var csr = {};\n csr.version = 0x00;\n csr.signatureOid = null;\n csr.signature = null;\n csr.siginfo = {};\n csr.siginfo.algorithmOid = null;\n\n csr.subject = {};\n csr.subject.getField = function(sn) {\n return _getAttribute(csr.subject, sn);\n };\n csr.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n csr.subject.attributes.push(attr);\n };\n csr.subject.attributes = [];\n csr.subject.hash = null;\n\n csr.publicKey = null;\n csr.attributes = [];\n csr.getAttribute = function(sn) {\n return _getAttribute(csr, sn);\n };\n csr.addAttribute = function(attr) {\n _fillMissingFields([attr]);\n csr.attributes.push(attr);\n };\n csr.md = null;\n\n /**\n * Sets the subject of this certification request.\n *\n * @param attrs the array of subject attributes to use.\n */\n csr.setSubject = function(attrs) {\n // set new attributes\n _fillMissingFields(attrs);\n csr.subject.attributes = attrs;\n csr.subject.hash = null;\n };\n\n /**\n * Sets the attributes of this certification request.\n *\n * @param attrs the array of attributes to use.\n */\n csr.setAttributes = function(attrs) {\n // set new attributes\n _fillMissingFields(attrs);\n csr.attributes = attrs;\n };\n\n /**\n * Signs this certification request using the given private key.\n *\n * @param key the private key to sign with.\n * @param md the message digest object to use (defaults to forge.md.sha1).\n */\n csr.sign = function(key, md) {\n // TODO: get signature OID from private key\n csr.md = md || forge.md.sha1.create();\n var algorithmOid = oids[csr.md.algorithm + 'WithRSAEncryption'];\n if(!algorithmOid) {\n var error = new Error('Could not compute certification request digest. ' +\n 'Unknown message digest algorithm OID.');\n error.algorithm = csr.md.algorithm;\n throw error;\n }\n csr.signatureOid = csr.siginfo.algorithmOid = algorithmOid;\n\n // get CertificationRequestInfo, convert to DER\n csr.certificationRequestInfo = pki.getCertificationRequestInfo(csr);\n var bytes = asn1.toDer(csr.certificationRequestInfo);\n\n // digest and sign\n csr.md.update(bytes.getBytes());\n csr.signature = key.sign(csr.md);\n };\n\n /**\n * Attempts verify the signature on the passed certification request using\n * its public key.\n *\n * A CSR that has been exported to a file in PEM format can be verified using\n * OpenSSL using this command:\n *\n * openssl req -in -verify -noout -text\n *\n * @return true if verified, false if not.\n */\n csr.verify = function() {\n var rval = false;\n\n var md = csr.md;\n if(md === null) {\n md = _createSignatureDigest({\n signatureOid: csr.signatureOid,\n type: 'certification request'\n });\n\n // produce DER formatted CertificationRequestInfo and digest it\n var cri = csr.certificationRequestInfo ||\n pki.getCertificationRequestInfo(csr);\n var bytes = asn1.toDer(cri);\n md.update(bytes.getBytes());\n }\n\n if(md !== null) {\n rval = _verifySignature({\n certificate: csr, md: md, signature: csr.signature\n });\n }\n\n return rval;\n };\n\n return csr;\n};\n\n/**\n * Converts an X.509 subject or issuer to an ASN.1 RDNSequence.\n *\n * @param obj the subject or issuer (distinguished name).\n *\n * @return the ASN.1 RDNSequence.\n */\nfunction _dnToAsn1(obj) {\n // create an empty RDNSequence\n var rval = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n // iterate over attributes\n var attr, set;\n var attrs = obj.attributes;\n for(var i = 0; i < attrs.length; ++i) {\n attr = attrs[i];\n var value = attr.value;\n\n // reuse tag class for attribute value if available\n var valueTagClass = asn1.Type.PRINTABLESTRING;\n if('valueTagClass' in attr) {\n valueTagClass = attr.valueTagClass;\n\n if(valueTagClass === asn1.Type.UTF8) {\n value = forge.util.encodeUtf8(value);\n }\n // FIXME: handle more encodings\n }\n\n // create a RelativeDistinguishedName set\n // each value in the set is an AttributeTypeAndValue first\n // containing the type (an OID) and second the value\n set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AttributeType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.type).getBytes()),\n // AttributeValue\n asn1.create(asn1.Class.UNIVERSAL, valueTagClass, false, value)\n ])\n ]);\n rval.value.push(set);\n }\n\n return rval;\n}\n\n/**\n * Gets all printable attributes (typically of an issuer or subject) in a\n * simplified JSON format for display.\n *\n * @param attrs the attributes.\n *\n * @return the JSON for display.\n */\nfunction _getAttributesAsJson(attrs) {\n var rval = {};\n for(var i = 0; i < attrs.length; ++i) {\n var attr = attrs[i];\n if(attr.shortName && (\n attr.valueTagClass === asn1.Type.UTF8 ||\n attr.valueTagClass === asn1.Type.PRINTABLESTRING ||\n attr.valueTagClass === asn1.Type.IA5STRING)) {\n var value = attr.value;\n if(attr.valueTagClass === asn1.Type.UTF8) {\n value = forge.util.encodeUtf8(attr.value);\n }\n if(!(attr.shortName in rval)) {\n rval[attr.shortName] = value;\n } else if(forge.util.isArray(rval[attr.shortName])) {\n rval[attr.shortName].push(value);\n } else {\n rval[attr.shortName] = [rval[attr.shortName], value];\n }\n }\n }\n return rval;\n}\n\n/**\n * Fills in missing fields in attributes.\n *\n * @param attrs the attributes to fill missing fields in.\n */\nfunction _fillMissingFields(attrs) {\n var attr;\n for(var i = 0; i < attrs.length; ++i) {\n attr = attrs[i];\n\n // populate missing name\n if(typeof attr.name === 'undefined') {\n if(attr.type && attr.type in pki.oids) {\n attr.name = pki.oids[attr.type];\n } else if(attr.shortName && attr.shortName in _shortNames) {\n attr.name = pki.oids[_shortNames[attr.shortName]];\n }\n }\n\n // populate missing type (OID)\n if(typeof attr.type === 'undefined') {\n if(attr.name && attr.name in pki.oids) {\n attr.type = pki.oids[attr.name];\n } else {\n var error = new Error('Attribute type not specified.');\n error.attribute = attr;\n throw error;\n }\n }\n\n // populate missing shortname\n if(typeof attr.shortName === 'undefined') {\n if(attr.name && attr.name in _shortNames) {\n attr.shortName = _shortNames[attr.name];\n }\n }\n\n // convert extensions to value\n if(attr.type === oids.extensionRequest) {\n attr.valueConstructed = true;\n attr.valueTagClass = asn1.Type.SEQUENCE;\n if(!attr.value && attr.extensions) {\n attr.value = [];\n for(var ei = 0; ei < attr.extensions.length; ++ei) {\n attr.value.push(pki.certificateExtensionToAsn1(\n _fillMissingExtensionFields(attr.extensions[ei])));\n }\n }\n }\n\n if(typeof attr.value === 'undefined') {\n var error = new Error('Attribute value not specified.');\n error.attribute = attr;\n throw error;\n }\n }\n}\n\n/**\n * Fills in missing fields in certificate extensions.\n *\n * @param e the extension.\n * @param [options] the options to use.\n * [cert] the certificate the extensions are for.\n *\n * @return the extension.\n */\nfunction _fillMissingExtensionFields(e, options) {\n options = options || {};\n\n // populate missing name\n if(typeof e.name === 'undefined') {\n if(e.id && e.id in pki.oids) {\n e.name = pki.oids[e.id];\n }\n }\n\n // populate missing id\n if(typeof e.id === 'undefined') {\n if(e.name && e.name in pki.oids) {\n e.id = pki.oids[e.name];\n } else {\n var error = new Error('Extension ID not specified.');\n error.extension = e;\n throw error;\n }\n }\n\n if(typeof e.value !== 'undefined') {\n return e;\n }\n\n // handle missing value:\n\n // value is a BIT STRING\n if(e.name === 'keyUsage') {\n // build flags\n var unused = 0;\n var b2 = 0x00;\n var b3 = 0x00;\n if(e.digitalSignature) {\n b2 |= 0x80;\n unused = 7;\n }\n if(e.nonRepudiation) {\n b2 |= 0x40;\n unused = 6;\n }\n if(e.keyEncipherment) {\n b2 |= 0x20;\n unused = 5;\n }\n if(e.dataEncipherment) {\n b2 |= 0x10;\n unused = 4;\n }\n if(e.keyAgreement) {\n b2 |= 0x08;\n unused = 3;\n }\n if(e.keyCertSign) {\n b2 |= 0x04;\n unused = 2;\n }\n if(e.cRLSign) {\n b2 |= 0x02;\n unused = 1;\n }\n if(e.encipherOnly) {\n b2 |= 0x01;\n unused = 0;\n }\n if(e.decipherOnly) {\n b3 |= 0x80;\n unused = 7;\n }\n\n // create bit string\n var value = String.fromCharCode(unused);\n if(b3 !== 0) {\n value += String.fromCharCode(b2) + String.fromCharCode(b3);\n } else if(b2 !== 0) {\n value += String.fromCharCode(b2);\n }\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value);\n } else if(e.name === 'basicConstraints') {\n // basicConstraints is a SEQUENCE\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n // cA BOOLEAN flag defaults to false\n if(e.cA) {\n e.value.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false,\n String.fromCharCode(0xFF)));\n }\n if('pathLenConstraint' in e) {\n e.value.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(e.pathLenConstraint).getBytes()));\n }\n } else if(e.name === 'extKeyUsage') {\n // extKeyUsage is a SEQUENCE of OIDs\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var seq = e.value.value;\n for(var key in e) {\n if(e[key] !== true) {\n continue;\n }\n // key is name in OID map\n if(key in oids) {\n seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID,\n false, asn1.oidToDer(oids[key]).getBytes()));\n } else if(key.indexOf('.') !== -1) {\n // assume key is an OID\n seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID,\n false, asn1.oidToDer(key).getBytes()));\n }\n }\n } else if(e.name === 'nsCertType') {\n // nsCertType is a BIT STRING\n // build flags\n var unused = 0;\n var b2 = 0x00;\n\n if(e.client) {\n b2 |= 0x80;\n unused = 7;\n }\n if(e.server) {\n b2 |= 0x40;\n unused = 6;\n }\n if(e.email) {\n b2 |= 0x20;\n unused = 5;\n }\n if(e.objsign) {\n b2 |= 0x10;\n unused = 4;\n }\n if(e.reserved) {\n b2 |= 0x08;\n unused = 3;\n }\n if(e.sslCA) {\n b2 |= 0x04;\n unused = 2;\n }\n if(e.emailCA) {\n b2 |= 0x02;\n unused = 1;\n }\n if(e.objCA) {\n b2 |= 0x01;\n unused = 0;\n }\n\n // create bit string\n var value = String.fromCharCode(unused);\n if(b2 !== 0) {\n value += String.fromCharCode(b2);\n }\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value);\n } else if(e.name === 'subjectAltName' || e.name === 'issuerAltName') {\n // SYNTAX SEQUENCE\n e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n var altName;\n for(var n = 0; n < e.altNames.length; ++n) {\n altName = e.altNames[n];\n var value = altName.value;\n // handle IP\n if(altName.type === 7 && altName.ip) {\n value = forge.util.bytesFromIP(altName.ip);\n if(value === null) {\n var error = new Error(\n 'Extension \"ip\" value is not a valid IPv4 or IPv6 address.');\n error.extension = e;\n throw error;\n }\n } else if(altName.type === 8) {\n // handle OID\n if(altName.oid) {\n value = asn1.oidToDer(asn1.oidToDer(altName.oid));\n } else {\n // deprecated ... convert value to OID\n value = asn1.oidToDer(value);\n }\n }\n e.value.value.push(asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, altName.type, false,\n value));\n }\n } else if(e.name === 'nsComment' && options.cert) {\n // sanity check value is ASCII (req'd) and not too big\n if(!(/^[\\x00-\\x7F]*$/.test(e.comment)) ||\n (e.comment.length < 1) || (e.comment.length > 128)) {\n throw new Error('Invalid \"nsComment\" content.');\n }\n // IA5STRING opaque comment\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.IA5STRING, false, e.comment);\n } else if(e.name === 'subjectKeyIdentifier' && options.cert) {\n var ski = options.cert.generateSubjectKeyIdentifier();\n e.subjectKeyIdentifier = ski.toHex();\n // OCTETSTRING w/digest\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, ski.getBytes());\n } else if(e.name === 'authorityKeyIdentifier' && options.cert) {\n // SYNTAX SEQUENCE\n e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var seq = e.value.value;\n\n if(e.keyIdentifier) {\n var keyIdentifier = (e.keyIdentifier === true ?\n options.cert.generateSubjectKeyIdentifier().getBytes() :\n e.keyIdentifier);\n seq.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, false, keyIdentifier));\n }\n\n if(e.authorityCertIssuer) {\n var authorityCertIssuer = [\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 4, true, [\n _dnToAsn1(e.authorityCertIssuer === true ?\n options.cert.issuer : e.authorityCertIssuer)\n ])\n ];\n seq.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, authorityCertIssuer));\n }\n\n if(e.serialNumber) {\n var serialNumber = forge.util.hexToBytes(e.serialNumber === true ?\n options.cert.serialNumber : e.serialNumber);\n seq.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, false, serialNumber));\n }\n } else if(e.name === 'cRLDistributionPoints') {\n e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var seq = e.value.value;\n\n // Create sub SEQUENCE of DistributionPointName\n var subSeq = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n // Create fullName CHOICE\n var fullNameGeneralNames = asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, 0, true, []);\n var altName;\n for(var n = 0; n < e.altNames.length; ++n) {\n altName = e.altNames[n];\n var value = altName.value;\n // handle IP\n if(altName.type === 7 && altName.ip) {\n value = forge.util.bytesFromIP(altName.ip);\n if(value === null) {\n var error = new Error(\n 'Extension \"ip\" value is not a valid IPv4 or IPv6 address.');\n error.extension = e;\n throw error;\n }\n } else if(altName.type === 8) {\n // handle OID\n if(altName.oid) {\n value = asn1.oidToDer(asn1.oidToDer(altName.oid));\n } else {\n // deprecated ... convert value to OID\n value = asn1.oidToDer(value);\n }\n }\n fullNameGeneralNames.value.push(asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, altName.type, false,\n value));\n }\n\n // Add to the parent SEQUENCE\n subSeq.value.push(asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, 0, true, [fullNameGeneralNames]));\n seq.push(subSeq);\n }\n\n // ensure value has been defined by now\n if(typeof e.value === 'undefined') {\n var error = new Error('Extension value not specified.');\n error.extension = e;\n throw error;\n }\n\n return e;\n}\n\n/**\n * Convert signature parameters object to ASN.1\n *\n * @param {String} oid Signature algorithm OID\n * @param params The signature parametrs object\n * @return ASN.1 object representing signature parameters\n */\nfunction _signatureParametersToAsn1(oid, params) {\n switch(oid) {\n case oids['RSASSA-PSS']:\n var parts = [];\n\n if(params.hash.algorithmOid !== undefined) {\n parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(params.hash.algorithmOid).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ])\n ]));\n }\n\n if(params.mgf.algorithmOid !== undefined) {\n parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(params.mgf.algorithmOid).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(params.mgf.hash.algorithmOid).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ])\n ])\n ]));\n }\n\n if(params.saltLength !== undefined) {\n parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(params.saltLength).getBytes())\n ]));\n }\n\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, parts);\n\n default:\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '');\n }\n}\n\n/**\n * Converts a certification request's attributes to an ASN.1 set of\n * CRIAttributes.\n *\n * @param csr certification request.\n *\n * @return the ASN.1 set of CRIAttributes.\n */\nfunction _CRIAttributesToAsn1(csr) {\n // create an empty context-specific container\n var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, []);\n\n // no attributes, return empty container\n if(csr.attributes.length === 0) {\n return rval;\n }\n\n // each attribute has a sequence with a type and a set of values\n var attrs = csr.attributes;\n for(var i = 0; i < attrs.length; ++i) {\n var attr = attrs[i];\n var value = attr.value;\n\n // reuse tag class for attribute value if available\n var valueTagClass = asn1.Type.UTF8;\n if('valueTagClass' in attr) {\n valueTagClass = attr.valueTagClass;\n }\n if(valueTagClass === asn1.Type.UTF8) {\n value = forge.util.encodeUtf8(value);\n }\n var valueConstructed = false;\n if('valueConstructed' in attr) {\n valueConstructed = attr.valueConstructed;\n }\n // FIXME: handle more encodings\n\n // create a RelativeDistinguishedName set\n // each value in the set is an AttributeTypeAndValue first\n // containing the type (an OID) and second the value\n var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AttributeType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.type).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n // AttributeValue\n asn1.create(\n asn1.Class.UNIVERSAL, valueTagClass, valueConstructed, value)\n ])\n ]);\n rval.value.push(seq);\n }\n\n return rval;\n}\n\nvar jan_1_1950 = new Date('1950-01-01T00:00:00Z');\nvar jan_1_2050 = new Date('2050-01-01T00:00:00Z');\n\n/**\n * Converts a Date object to ASN.1\n * Handles the different format before and after 1st January 2050\n *\n * @param date date object.\n *\n * @return the ASN.1 object representing the date.\n */\nfunction _dateToAsn1(date) {\n if(date >= jan_1_1950 && date < jan_1_2050) {\n return asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false,\n asn1.dateToUtcTime(date));\n } else {\n return asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false,\n asn1.dateToGeneralizedTime(date));\n }\n}\n\n/**\n * Gets the ASN.1 TBSCertificate part of an X.509v3 certificate.\n *\n * @param cert the certificate.\n *\n * @return the asn1 TBSCertificate.\n */\npki.getTBSCertificate = function(cert) {\n // TBSCertificate\n var notBefore = _dateToAsn1(cert.validity.notBefore);\n var notAfter = _dateToAsn1(cert.validity.notAfter);\n var tbs = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n // integer\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(cert.version).getBytes())\n ]),\n // serialNumber\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(cert.serialNumber)),\n // signature\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(cert.siginfo.algorithmOid).getBytes()),\n // parameters\n _signatureParametersToAsn1(\n cert.siginfo.algorithmOid, cert.siginfo.parameters)\n ]),\n // issuer\n _dnToAsn1(cert.issuer),\n // validity\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n notBefore,\n notAfter\n ]),\n // subject\n _dnToAsn1(cert.subject),\n // SubjectPublicKeyInfo\n pki.publicKeyToAsn1(cert.publicKey)\n ]);\n\n if(cert.issuer.uniqueId) {\n // issuerUniqueID (optional)\n tbs.value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n // TODO: support arbitrary bit length ids\n String.fromCharCode(0x00) +\n cert.issuer.uniqueId\n )\n ])\n );\n }\n if(cert.subject.uniqueId) {\n // subjectUniqueID (optional)\n tbs.value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n // TODO: support arbitrary bit length ids\n String.fromCharCode(0x00) +\n cert.subject.uniqueId\n )\n ])\n );\n }\n\n if(cert.extensions.length > 0) {\n // extensions (optional)\n tbs.value.push(pki.certificateExtensionsToAsn1(cert.extensions));\n }\n\n return tbs;\n};\n\n/**\n * Gets the ASN.1 CertificationRequestInfo part of a\n * PKCS#10 CertificationRequest.\n *\n * @param csr the certification request.\n *\n * @return the asn1 CertificationRequestInfo.\n */\npki.getCertificationRequestInfo = function(csr) {\n // CertificationRequestInfo\n var cri = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(csr.version).getBytes()),\n // subject\n _dnToAsn1(csr.subject),\n // SubjectPublicKeyInfo\n pki.publicKeyToAsn1(csr.publicKey),\n // attributes\n _CRIAttributesToAsn1(csr)\n ]);\n\n return cri;\n};\n\n/**\n * Converts a DistinguishedName (subject or issuer) to an ASN.1 object.\n *\n * @param dn the DistinguishedName.\n *\n * @return the asn1 representation of a DistinguishedName.\n */\npki.distinguishedNameToAsn1 = function(dn) {\n return _dnToAsn1(dn);\n};\n\n/**\n * Converts an X.509v3 RSA certificate to an ASN.1 object.\n *\n * @param cert the certificate.\n *\n * @return the asn1 representation of an X.509v3 RSA certificate.\n */\npki.certificateToAsn1 = function(cert) {\n // prefer cached TBSCertificate over generating one\n var tbsCertificate = cert.tbsCertificate || pki.getTBSCertificate(cert);\n\n // Certificate\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // TBSCertificate\n tbsCertificate,\n // AlgorithmIdentifier (signature algorithm)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(cert.signatureOid).getBytes()),\n // parameters\n _signatureParametersToAsn1(cert.signatureOid, cert.signatureParameters)\n ]),\n // SignatureValue\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n String.fromCharCode(0x00) + cert.signature)\n ]);\n};\n\n/**\n * Converts X.509v3 certificate extensions to ASN.1.\n *\n * @param exts the extensions to convert.\n *\n * @return the extensions in ASN.1 format.\n */\npki.certificateExtensionsToAsn1 = function(exts) {\n // create top-level extension container\n var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 3, true, []);\n\n // create extension sequence (stores a sequence for each extension)\n var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n rval.value.push(seq);\n\n for(var i = 0; i < exts.length; ++i) {\n seq.value.push(pki.certificateExtensionToAsn1(exts[i]));\n }\n\n return rval;\n};\n\n/**\n * Converts a single certificate extension to ASN.1.\n *\n * @param ext the extension to convert.\n *\n * @return the extension in ASN.1 format.\n */\npki.certificateExtensionToAsn1 = function(ext) {\n // create a sequence for each extension\n var extseq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n // extnID (OID)\n extseq.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(ext.id).getBytes()));\n\n // critical defaults to false\n if(ext.critical) {\n // critical BOOLEAN DEFAULT FALSE\n extseq.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false,\n String.fromCharCode(0xFF)));\n }\n\n var value = ext.value;\n if(typeof ext.value !== 'string') {\n // value is asn.1\n value = asn1.toDer(value).getBytes();\n }\n\n // extnValue (OCTET STRING)\n extseq.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, value));\n\n return extseq;\n};\n\n/**\n * Converts a PKCS#10 certification request to an ASN.1 object.\n *\n * @param csr the certification request.\n *\n * @return the asn1 representation of a certification request.\n */\npki.certificationRequestToAsn1 = function(csr) {\n // prefer cached CertificationRequestInfo over generating one\n var cri = csr.certificationRequestInfo ||\n pki.getCertificationRequestInfo(csr);\n\n // Certificate\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // CertificationRequestInfo\n cri,\n // AlgorithmIdentifier (signature algorithm)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(csr.signatureOid).getBytes()),\n // parameters\n _signatureParametersToAsn1(csr.signatureOid, csr.signatureParameters)\n ]),\n // signature\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n String.fromCharCode(0x00) + csr.signature)\n ]);\n};\n\n/**\n * Creates a CA store.\n *\n * @param certs an optional array of certificate objects or PEM-formatted\n * certificate strings to add to the CA store.\n *\n * @return the CA store.\n */\npki.createCaStore = function(certs) {\n // create CA store\n var caStore = {\n // stored certificates\n certs: {}\n };\n\n /**\n * Gets the certificate that issued the passed certificate or its\n * 'parent'.\n *\n * @param cert the certificate to get the parent for.\n *\n * @return the parent certificate or null if none was found.\n */\n caStore.getIssuer = function(cert) {\n var rval = getBySubject(cert.issuer);\n\n // see if there are multiple matches\n /*if(forge.util.isArray(rval)) {\n // TODO: resolve multiple matches by checking\n // authorityKey/subjectKey/issuerUniqueID/other identifiers, etc.\n // FIXME: or alternatively do authority key mapping\n // if possible (X.509v1 certs can't work?)\n throw new Error('Resolving multiple issuer matches not implemented yet.');\n }*/\n\n return rval;\n };\n\n /**\n * Adds a trusted certificate to the store.\n *\n * @param cert the certificate to add as a trusted certificate (either a\n * pki.certificate object or a PEM-formatted certificate).\n */\n caStore.addCertificate = function(cert) {\n // convert from pem if necessary\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n\n ensureSubjectHasHash(cert.subject);\n\n if(!caStore.hasCertificate(cert)) { // avoid duplicate certificates in store\n if(cert.subject.hash in caStore.certs) {\n // subject hash already exists, append to array\n var tmp = caStore.certs[cert.subject.hash];\n if(!forge.util.isArray(tmp)) {\n tmp = [tmp];\n }\n tmp.push(cert);\n caStore.certs[cert.subject.hash] = tmp;\n } else {\n caStore.certs[cert.subject.hash] = cert;\n }\n }\n };\n\n /**\n * Checks to see if the given certificate is in the store.\n *\n * @param cert the certificate to check (either a pki.certificate or a\n * PEM-formatted certificate).\n *\n * @return true if the certificate is in the store, false if not.\n */\n caStore.hasCertificate = function(cert) {\n // convert from pem if necessary\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n\n var match = getBySubject(cert.subject);\n if(!match) {\n return false;\n }\n if(!forge.util.isArray(match)) {\n match = [match];\n }\n // compare DER-encoding of certificates\n var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes();\n for(var i = 0; i < match.length; ++i) {\n var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes();\n if(der1 === der2) {\n return true;\n }\n }\n return false;\n };\n\n /**\n * Lists all of the certificates kept in the store.\n *\n * @return an array of all of the pki.certificate objects in the store.\n */\n caStore.listAllCertificates = function() {\n var certList = [];\n\n for(var hash in caStore.certs) {\n if(caStore.certs.hasOwnProperty(hash)) {\n var value = caStore.certs[hash];\n if(!forge.util.isArray(value)) {\n certList.push(value);\n } else {\n for(var i = 0; i < value.length; ++i) {\n certList.push(value[i]);\n }\n }\n }\n }\n\n return certList;\n };\n\n /**\n * Removes a certificate from the store.\n *\n * @param cert the certificate to remove (either a pki.certificate or a\n * PEM-formatted certificate).\n *\n * @return the certificate that was removed or null if the certificate\n * wasn't in store.\n */\n caStore.removeCertificate = function(cert) {\n var result;\n\n // convert from pem if necessary\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n ensureSubjectHasHash(cert.subject);\n if(!caStore.hasCertificate(cert)) {\n return null;\n }\n\n var match = getBySubject(cert.subject);\n\n if(!forge.util.isArray(match)) {\n result = caStore.certs[cert.subject.hash];\n delete caStore.certs[cert.subject.hash];\n return result;\n }\n\n // compare DER-encoding of certificates\n var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes();\n for(var i = 0; i < match.length; ++i) {\n var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes();\n if(der1 === der2) {\n result = match[i];\n match.splice(i, 1);\n }\n }\n if(match.length === 0) {\n delete caStore.certs[cert.subject.hash];\n }\n\n return result;\n };\n\n function getBySubject(subject) {\n ensureSubjectHasHash(subject);\n return caStore.certs[subject.hash] || null;\n }\n\n function ensureSubjectHasHash(subject) {\n // produce subject hash if it doesn't exist\n if(!subject.hash) {\n var md = forge.md.sha1.create();\n subject.attributes = pki.RDNAttributesAsArray(_dnToAsn1(subject), md);\n subject.hash = md.digest().toHex();\n }\n }\n\n // auto-add passed in certs\n if(certs) {\n // parse PEM-formatted certificates as necessary\n for(var i = 0; i < certs.length; ++i) {\n var cert = certs[i];\n caStore.addCertificate(cert);\n }\n }\n\n return caStore;\n};\n\n/**\n * Certificate verification errors, based on TLS.\n */\npki.certificateError = {\n bad_certificate: 'forge.pki.BadCertificate',\n unsupported_certificate: 'forge.pki.UnsupportedCertificate',\n certificate_revoked: 'forge.pki.CertificateRevoked',\n certificate_expired: 'forge.pki.CertificateExpired',\n certificate_unknown: 'forge.pki.CertificateUnknown',\n unknown_ca: 'forge.pki.UnknownCertificateAuthority'\n};\n\n/**\n * Verifies a certificate chain against the given Certificate Authority store\n * with an optional custom verify callback.\n *\n * @param caStore a certificate store to verify against.\n * @param chain the certificate chain to verify, with the root or highest\n * authority at the end (an array of certificates).\n * @param options a callback to be called for every certificate in the chain or\n * an object with:\n * verify a callback to be called for every certificate in the\n * chain\n * validityCheckDate the date against which the certificate\n * validity period should be checked. Pass null to not check\n * the validity period. By default, the current date is used.\n *\n * The verify callback has the following signature:\n *\n * verified - Set to true if certificate was verified, otherwise the\n * pki.certificateError for why the certificate failed.\n * depth - The current index in the chain, where 0 is the end point's cert.\n * certs - The certificate chain, *NOTE* an empty chain indicates an anonymous\n * end point.\n *\n * The function returns true on success and on failure either the appropriate\n * pki.certificateError or an object with 'error' set to the appropriate\n * pki.certificateError and 'message' set to a custom error message.\n *\n * @return true if successful, error thrown if not.\n */\npki.verifyCertificateChain = function(caStore, chain, options) {\n /* From: RFC3280 - Internet X.509 Public Key Infrastructure Certificate\n Section 6: Certification Path Validation\n See inline parentheticals related to this particular implementation.\n\n The primary goal of path validation is to verify the binding between\n a subject distinguished name or a subject alternative name and subject\n public key, as represented in the end entity certificate, based on the\n public key of the trust anchor. This requires obtaining a sequence of\n certificates that support that binding. That sequence should be provided\n in the passed 'chain'. The trust anchor should be in the given CA\n store. The 'end entity' certificate is the certificate provided by the\n end point (typically a server) and is the first in the chain.\n\n To meet this goal, the path validation process verifies, among other\n things, that a prospective certification path (a sequence of n\n certificates or a 'chain') satisfies the following conditions:\n\n (a) for all x in {1, ..., n-1}, the subject of certificate x is\n the issuer of certificate x+1;\n\n (b) certificate 1 is issued by the trust anchor;\n\n (c) certificate n is the certificate to be validated; and\n\n (d) for all x in {1, ..., n}, the certificate was valid at the\n time in question.\n\n Note that here 'n' is index 0 in the chain and 1 is the last certificate\n in the chain and it must be signed by a certificate in the connection's\n CA store.\n\n The path validation process also determines the set of certificate\n policies that are valid for this path, based on the certificate policies\n extension, policy mapping extension, policy constraints extension, and\n inhibit any-policy extension.\n\n Note: Policy mapping extension not supported (Not Required).\n\n Note: If the certificate has an unsupported critical extension, then it\n must be rejected.\n\n Note: A certificate is self-issued if the DNs that appear in the subject\n and issuer fields are identical and are not empty.\n\n The path validation algorithm assumes the following seven inputs are\n provided to the path processing logic. What this specific implementation\n will use is provided parenthetically:\n\n (a) a prospective certification path of length n (the 'chain')\n (b) the current date/time: ('now').\n (c) user-initial-policy-set: A set of certificate policy identifiers\n naming the policies that are acceptable to the certificate user.\n The user-initial-policy-set contains the special value any-policy\n if the user is not concerned about certificate policy\n (Not implemented. Any policy is accepted).\n (d) trust anchor information, describing a CA that serves as a trust\n anchor for the certification path. The trust anchor information\n includes:\n\n (1) the trusted issuer name,\n (2) the trusted public key algorithm,\n (3) the trusted public key, and\n (4) optionally, the trusted public key parameters associated\n with the public key.\n\n (Trust anchors are provided via certificates in the CA store).\n\n The trust anchor information may be provided to the path processing\n procedure in the form of a self-signed certificate. The trusted anchor\n information is trusted because it was delivered to the path processing\n procedure by some trustworthy out-of-band procedure. If the trusted\n public key algorithm requires parameters, then the parameters are\n provided along with the trusted public key (No parameters used in this\n implementation).\n\n (e) initial-policy-mapping-inhibit, which indicates if policy mapping is\n allowed in the certification path.\n (Not implemented, no policy checking)\n\n (f) initial-explicit-policy, which indicates if the path must be valid\n for at least one of the certificate policies in the user-initial-\n policy-set.\n (Not implemented, no policy checking)\n\n (g) initial-any-policy-inhibit, which indicates whether the\n anyPolicy OID should be processed if it is included in a\n certificate.\n (Not implemented, so any policy is valid provided that it is\n not marked as critical) */\n\n /* Basic Path Processing:\n\n For each certificate in the 'chain', the following is checked:\n\n 1. The certificate validity period includes the current time.\n 2. The certificate was signed by its parent (where the parent is either\n the next in the chain or from the CA store). Allow processing to\n continue to the next step if no parent is found but the certificate is\n in the CA store.\n 3. TODO: The certificate has not been revoked.\n 4. The certificate issuer name matches the parent's subject name.\n 5. TODO: If the certificate is self-issued and not the final certificate\n in the chain, skip this step, otherwise verify that the subject name\n is within one of the permitted subtrees of X.500 distinguished names\n and that each of the alternative names in the subjectAltName extension\n (critical or non-critical) is within one of the permitted subtrees for\n that name type.\n 6. TODO: If the certificate is self-issued and not the final certificate\n in the chain, skip this step, otherwise verify that the subject name\n is not within one of the excluded subtrees for X.500 distinguished\n names and none of the subjectAltName extension names are excluded for\n that name type.\n 7. The other steps in the algorithm for basic path processing involve\n handling the policy extension which is not presently supported in this\n implementation. Instead, if a critical policy extension is found, the\n certificate is rejected as not supported.\n 8. If the certificate is not the first or if its the only certificate in\n the chain (having no parent from the CA store or is self-signed) and it\n has a critical key usage extension, verify that the keyCertSign bit is\n set. If the key usage extension exists, verify that the basic\n constraints extension exists. If the basic constraints extension exists,\n verify that the cA flag is set. If pathLenConstraint is set, ensure that\n the number of certificates that precede in the chain (come earlier\n in the chain as implemented below), excluding the very first in the\n chain (typically the end-entity one), isn't greater than the\n pathLenConstraint. This constraint limits the number of intermediate\n CAs that may appear below a CA before only end-entity certificates\n may be issued. */\n\n // if a verify callback is passed as the third parameter, package it within\n // the options object. This is to support a legacy function signature that\n // expected the verify callback as the third parameter.\n if(typeof options === 'function') {\n options = {verify: options};\n }\n options = options || {};\n\n // copy cert chain references to another array to protect against changes\n // in verify callback\n chain = chain.slice(0);\n var certs = chain.slice(0);\n\n var validityCheckDate = options.validityCheckDate;\n // if no validityCheckDate is specified, default to the current date. Make\n // sure to maintain the value null because it indicates that the validity\n // period should not be checked.\n if(typeof validityCheckDate === 'undefined') {\n validityCheckDate = new Date();\n }\n\n // verify each cert in the chain using its parent, where the parent\n // is either the next in the chain or from the CA store\n var first = true;\n var error = null;\n var depth = 0;\n do {\n var cert = chain.shift();\n var parent = null;\n var selfSigned = false;\n\n if(validityCheckDate) {\n // 1. check valid time\n if(validityCheckDate < cert.validity.notBefore ||\n validityCheckDate > cert.validity.notAfter) {\n error = {\n message: 'Certificate is not valid yet or has expired.',\n error: pki.certificateError.certificate_expired,\n notBefore: cert.validity.notBefore,\n notAfter: cert.validity.notAfter,\n // TODO: we might want to reconsider renaming 'now' to\n // 'validityCheckDate' should this API be changed in the future.\n now: validityCheckDate\n };\n }\n }\n\n // 2. verify with parent from chain or CA store\n if(error === null) {\n parent = chain[0] || caStore.getIssuer(cert);\n if(parent === null) {\n // check for self-signed cert\n if(cert.isIssuer(cert)) {\n selfSigned = true;\n parent = cert;\n }\n }\n\n if(parent) {\n // FIXME: current CA store implementation might have multiple\n // certificates where the issuer can't be determined from the\n // certificate (happens rarely with, eg: old certificates) so normalize\n // by always putting parents into an array\n // TODO: there's may be an extreme degenerate case currently uncovered\n // where an old intermediate certificate seems to have a matching parent\n // but none of the parents actually verify ... but the intermediate\n // is in the CA and it should pass this check; needs investigation\n var parents = parent;\n if(!forge.util.isArray(parents)) {\n parents = [parents];\n }\n\n // try to verify with each possible parent (typically only one)\n var verified = false;\n while(!verified && parents.length > 0) {\n parent = parents.shift();\n try {\n verified = parent.verify(cert);\n } catch(ex) {\n // failure to verify, don't care why, try next one\n }\n }\n\n if(!verified) {\n error = {\n message: 'Certificate signature is invalid.',\n error: pki.certificateError.bad_certificate\n };\n }\n }\n\n if(error === null && (!parent || selfSigned) &&\n !caStore.hasCertificate(cert)) {\n // no parent issuer and certificate itself is not trusted\n error = {\n message: 'Certificate is not trusted.',\n error: pki.certificateError.unknown_ca\n };\n }\n }\n\n // TODO: 3. check revoked\n\n // 4. check for matching issuer/subject\n if(error === null && parent && !cert.isIssuer(parent)) {\n // parent is not issuer\n error = {\n message: 'Certificate issuer is invalid.',\n error: pki.certificateError.bad_certificate\n };\n }\n\n // 5. TODO: check names with permitted names tree\n\n // 6. TODO: check names against excluded names tree\n\n // 7. check for unsupported critical extensions\n if(error === null) {\n // supported extensions\n var se = {\n keyUsage: true,\n basicConstraints: true\n };\n for(var i = 0; error === null && i < cert.extensions.length; ++i) {\n var ext = cert.extensions[i];\n if(ext.critical && !(ext.name in se)) {\n error = {\n message:\n 'Certificate has an unsupported critical extension.',\n error: pki.certificateError.unsupported_certificate\n };\n }\n }\n }\n\n // 8. check for CA if cert is not first or is the only certificate\n // remaining in chain with no parent or is self-signed\n if(error === null &&\n (!first || (chain.length === 0 && (!parent || selfSigned)))) {\n // first check keyUsage extension and then basic constraints\n var bcExt = cert.getExtension('basicConstraints');\n var keyUsageExt = cert.getExtension('keyUsage');\n if(keyUsageExt !== null) {\n // keyCertSign must be true and there must be a basic\n // constraints extension\n if(!keyUsageExt.keyCertSign || bcExt === null) {\n // bad certificate\n error = {\n message:\n 'Certificate keyUsage or basicConstraints conflict ' +\n 'or indicate that the certificate is not a CA. ' +\n 'If the certificate is the only one in the chain or ' +\n 'isn\\'t the first then the certificate must be a ' +\n 'valid CA.',\n error: pki.certificateError.bad_certificate\n };\n }\n }\n // basic constraints cA flag must be set\n if(error === null && bcExt !== null && !bcExt.cA) {\n // bad certificate\n error = {\n message:\n 'Certificate basicConstraints indicates the certificate ' +\n 'is not a CA.',\n error: pki.certificateError.bad_certificate\n };\n }\n // if error is not null and keyUsage is available, then we know it\n // has keyCertSign and there is a basic constraints extension too,\n // which means we can check pathLenConstraint (if it exists)\n if(error === null && keyUsageExt !== null &&\n 'pathLenConstraint' in bcExt) {\n // pathLen is the maximum # of intermediate CA certs that can be\n // found between the current certificate and the end-entity (depth 0)\n // certificate; this number does not include the end-entity (depth 0,\n // last in the chain) even if it happens to be a CA certificate itself\n var pathLen = depth - 1;\n if(pathLen > bcExt.pathLenConstraint) {\n // pathLenConstraint violated, bad certificate\n error = {\n message:\n 'Certificate basicConstraints pathLenConstraint violated.',\n error: pki.certificateError.bad_certificate\n };\n }\n }\n }\n\n // call application callback\n var vfd = (error === null) ? true : error.error;\n var ret = options.verify ? options.verify(vfd, depth, certs) : vfd;\n if(ret === true) {\n // clear any set error\n error = null;\n } else {\n // if passed basic tests, set default message and alert\n if(vfd === true) {\n error = {\n message: 'The application rejected the certificate.',\n error: pki.certificateError.bad_certificate\n };\n }\n\n // check for custom error info\n if(ret || ret === 0) {\n // set custom message and error\n if(typeof ret === 'object' && !forge.util.isArray(ret)) {\n if(ret.message) {\n error.message = ret.message;\n }\n if(ret.error) {\n error.error = ret.error;\n }\n } else if(typeof ret === 'string') {\n // set custom error\n error.error = ret;\n }\n }\n\n // throw error\n throw error;\n }\n\n // no longer first cert in chain\n first = false;\n ++depth;\n } while(chain.length > 0);\n\n return true;\n};\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","'use strict';\nconst Queue = require('yocto-queue');\n\nconst pLimit = concurrency => {\n\tif (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) {\n\t\tthrow new TypeError('Expected `concurrency` to be a number from 1 and up');\n\t}\n\n\tconst queue = new Queue();\n\tlet activeCount = 0;\n\n\tconst next = () => {\n\t\tactiveCount--;\n\n\t\tif (queue.size > 0) {\n\t\t\tqueue.dequeue()();\n\t\t}\n\t};\n\n\tconst run = async (fn, resolve, ...args) => {\n\t\tactiveCount++;\n\n\t\tconst result = (async () => fn(...args))();\n\n\t\tresolve(result);\n\n\t\ttry {\n\t\t\tawait result;\n\t\t} catch {}\n\n\t\tnext();\n\t};\n\n\tconst enqueue = (fn, resolve, ...args) => {\n\t\tqueue.enqueue(run.bind(null, fn, resolve, ...args));\n\n\t\t(async () => {\n\t\t\t// This function needs to wait until the next microtask before comparing\n\t\t\t// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously\n\t\t\t// when the run function is dequeued and called. The comparison in the if-statement\n\t\t\t// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.\n\t\t\tawait Promise.resolve();\n\n\t\t\tif (activeCount < concurrency && queue.size > 0) {\n\t\t\t\tqueue.dequeue()();\n\t\t\t}\n\t\t})();\n\t};\n\n\tconst generator = (fn, ...args) => new Promise(resolve => {\n\t\tenqueue(fn, resolve, ...args);\n\t});\n\n\tObject.defineProperties(generator, {\n\t\tactiveCount: {\n\t\t\tget: () => activeCount\n\t\t},\n\t\tpendingCount: {\n\t\t\tget: () => queue.size\n\t\t},\n\t\tclearQueue: {\n\t\t\tvalue: () => {\n\t\t\t\tqueue.clear();\n\t\t\t}\n\t\t}\n\t});\n\n\treturn generator;\n};\n\nmodule.exports = pLimit;\n","'use strict';\n\nconst codes = {};\n\nfunction createErrorType(code, message, Base) {\n if (!Base) {\n Base = Error\n }\n\n function getMessage (arg1, arg2, arg3) {\n if (typeof message === 'string') {\n return message\n } else {\n return message(arg1, arg2, arg3)\n }\n }\n\n class NodeError extends Base {\n constructor (arg1, arg2, arg3) {\n super(getMessage(arg1, arg2, arg3));\n }\n }\n\n NodeError.prototype.name = Base.name;\n NodeError.prototype.code = code;\n\n codes[code] = NodeError;\n}\n\n// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js\nfunction oneOf(expected, thing) {\n if (Array.isArray(expected)) {\n const len = expected.length;\n expected = expected.map((i) => String(i));\n if (len > 2) {\n return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +\n expected[len - 1];\n } else if (len === 2) {\n return `one of ${thing} ${expected[0]} or ${expected[1]}`;\n } else {\n return `of ${thing} ${expected[0]}`;\n }\n } else {\n return `of ${thing} ${String(expected)}`;\n }\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith\nfunction startsWith(str, search, pos) {\n\treturn str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith\nfunction endsWith(str, search, this_len) {\n\tif (this_len === undefined || this_len > str.length) {\n\t\tthis_len = str.length;\n\t}\n\treturn str.substring(this_len - search.length, this_len) === search;\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes\nfunction includes(str, search, start) {\n if (typeof start !== 'number') {\n start = 0;\n }\n\n if (start + search.length > str.length) {\n return false;\n } else {\n return str.indexOf(search, start) !== -1;\n }\n}\n\ncreateErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {\n return 'The value \"' + value + '\" is invalid for option \"' + name + '\"'\n}, TypeError);\ncreateErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {\n // determiner: 'must be' or 'must not be'\n let determiner;\n if (typeof expected === 'string' && startsWith(expected, 'not ')) {\n determiner = 'must not be';\n expected = expected.replace(/^not /, '');\n } else {\n determiner = 'must be';\n }\n\n let msg;\n if (endsWith(name, ' argument')) {\n // For cases like 'first argument'\n msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;\n } else {\n const type = includes(name, '.') ? 'property' : 'argument';\n msg = `The \"${name}\" ${type} ${determiner} ${oneOf(expected, 'type')}`;\n }\n\n msg += `. Received type ${typeof actual}`;\n return msg;\n}, TypeError);\ncreateErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');\ncreateErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {\n return 'The ' + name + ' method is not implemented'\n});\ncreateErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');\ncreateErrorType('ERR_STREAM_DESTROYED', function (name) {\n return 'Cannot call ' + name + ' after a stream was destroyed';\n});\ncreateErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');\ncreateErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');\ncreateErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');\ncreateErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);\ncreateErrorType('ERR_UNKNOWN_ENCODING', function (arg) {\n return 'Unknown encoding: ' + arg\n}, TypeError);\ncreateErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');\n\nmodule.exports.codes = codes;\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a duplex stream is just a stream that is both readable and writable.\n// Since JS doesn't have multiple prototypal inheritance, this class\n// prototypally inherits from Readable, and then parasitically from\n// Writable.\n\n'use strict';\n\n/**/\nvar objectKeys = Object.keys || function (obj) {\n var keys = [];\n for (var key in obj) keys.push(key);\n return keys;\n};\n/**/\n\nmodule.exports = Duplex;\nconst Readable = require('./_stream_readable');\nconst Writable = require('./_stream_writable');\nrequire('inherits')(Duplex, Readable);\n{\n // Allow the keys array to be GC'ed.\n const keys = objectKeys(Writable.prototype);\n for (var v = 0; v < keys.length; v++) {\n const method = keys[v];\n if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];\n }\n}\nfunction Duplex(options) {\n if (!(this instanceof Duplex)) return new Duplex(options);\n Readable.call(this, options);\n Writable.call(this, options);\n this.allowHalfOpen = true;\n if (options) {\n if (options.readable === false) this.readable = false;\n if (options.writable === false) this.writable = false;\n if (options.allowHalfOpen === false) {\n this.allowHalfOpen = false;\n this.once('end', onend);\n }\n }\n}\nObject.defineProperty(Duplex.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.highWaterMark;\n }\n});\nObject.defineProperty(Duplex.prototype, 'writableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState && this._writableState.getBuffer();\n }\n});\nObject.defineProperty(Duplex.prototype, 'writableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.length;\n }\n});\n\n// the no-half-open enforcer\nfunction onend() {\n // If the writable side ended, then we're ok.\n if (this._writableState.ended) return;\n\n // no more data can be written.\n // But allow more writes to happen in this tick.\n process.nextTick(onEndNT, this);\n}\nfunction onEndNT(self) {\n self.end();\n}\nObject.defineProperty(Duplex.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._readableState === undefined || this._writableState === undefined) {\n return false;\n }\n return this._readableState.destroyed && this._writableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (this._readableState === undefined || this._writableState === undefined) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n this._writableState.destroyed = value;\n }\n});","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a passthrough stream.\n// basically just the most minimal sort of Transform stream.\n// Every written chunk gets output as-is.\n\n'use strict';\n\nmodule.exports = PassThrough;\nconst Transform = require('./_stream_transform');\nrequire('inherits')(PassThrough, Transform);\nfunction PassThrough(options) {\n if (!(this instanceof PassThrough)) return new PassThrough(options);\n Transform.call(this, options);\n}\nPassThrough.prototype._transform = function (chunk, encoding, cb) {\n cb(null, chunk);\n};","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\nmodule.exports = Readable;\n\n/**/\nvar Duplex;\n/**/\n\nReadable.ReadableState = ReadableState;\n\n/**/\nconst EE = require('events').EventEmitter;\nvar EElistenerCount = function EElistenerCount(emitter, type) {\n return emitter.listeners(type).length;\n};\n/**/\n\n/**/\nvar Stream = require('./internal/streams/stream');\n/**/\n\nconst Buffer = require('buffer').Buffer;\nconst OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\n\n/**/\nconst debugUtil = require('util');\nlet debug;\nif (debugUtil && debugUtil.debuglog) {\n debug = debugUtil.debuglog('stream');\n} else {\n debug = function debug() {};\n}\n/**/\n\nconst BufferList = require('./internal/streams/buffer_list');\nconst destroyImpl = require('./internal/streams/destroy');\nconst _require = require('./internal/streams/state'),\n getHighWaterMark = _require.getHighWaterMark;\nconst _require$codes = require('../errors').codes,\n ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,\n ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT;\n\n// Lazy loaded to improve the startup performance.\nlet StringDecoder;\nlet createReadableStreamAsyncIterator;\nlet from;\nrequire('inherits')(Readable, Stream);\nconst errorOrDestroy = destroyImpl.errorOrDestroy;\nconst kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];\nfunction prependListener(emitter, event, fn) {\n // Sadly this is not cacheable as some libraries bundle their own\n // event emitter implementation with them.\n if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);\n\n // This is a hack to make sure that our error handler is attached before any\n // userland ones. NEVER DO THIS. This is here only because this code needs\n // to continue to work with older versions of Node.js that do not include\n // the prependListener() method. The goal is to eventually remove this hack.\n if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];\n}\nfunction ReadableState(options, stream, isDuplex) {\n Duplex = Duplex || require('./_stream_duplex');\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream.\n // These options can be provided separately as readableXXX and writableXXX.\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;\n\n // object stream flag. Used to make read(n) ignore n and to\n // make all the buffer merging and length checks go away\n this.objectMode = !!options.objectMode;\n if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;\n\n // the point at which it stops calling _read() to fill the buffer\n // Note: 0 is a valid value, means \"don't call _read preemptively ever\"\n this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex);\n\n // A linked list is used to store data chunks instead of an array because the\n // linked list can remove elements from the beginning faster than\n // array.shift()\n this.buffer = new BufferList();\n this.length = 0;\n this.pipes = null;\n this.pipesCount = 0;\n this.flowing = null;\n this.ended = false;\n this.endEmitted = false;\n this.reading = false;\n\n // a flag to be able to tell if the event 'readable'/'data' is emitted\n // immediately, or on a later tick. We set this to true at first, because\n // any actions that shouldn't happen until \"later\" should generally also\n // not happen before the first read call.\n this.sync = true;\n\n // whenever we return null, then we set a flag to say\n // that we're awaiting a 'readable' event emission.\n this.needReadable = false;\n this.emittedReadable = false;\n this.readableListening = false;\n this.resumeScheduled = false;\n this.paused = true;\n\n // Should close be emitted on destroy. Defaults to true.\n this.emitClose = options.emitClose !== false;\n\n // Should .destroy() be called after 'end' (and potentially 'finish')\n this.autoDestroy = !!options.autoDestroy;\n\n // has it been destroyed\n this.destroyed = false;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // the number of writers that are awaiting a drain event in .pipe()s\n this.awaitDrain = 0;\n\n // if true, a maybeReadMore has been scheduled\n this.readingMore = false;\n this.decoder = null;\n this.encoding = null;\n if (options.encoding) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n this.decoder = new StringDecoder(options.encoding);\n this.encoding = options.encoding;\n }\n}\nfunction Readable(options) {\n Duplex = Duplex || require('./_stream_duplex');\n if (!(this instanceof Readable)) return new Readable(options);\n\n // Checking for a Stream.Duplex instance is faster here instead of inside\n // the ReadableState constructor, at least with V8 6.5\n const isDuplex = this instanceof Duplex;\n this._readableState = new ReadableState(options, this, isDuplex);\n\n // legacy\n this.readable = true;\n if (options) {\n if (typeof options.read === 'function') this._read = options.read;\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n }\n Stream.call(this);\n}\nObject.defineProperty(Readable.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._readableState === undefined) {\n return false;\n }\n return this._readableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._readableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n }\n});\nReadable.prototype.destroy = destroyImpl.destroy;\nReadable.prototype._undestroy = destroyImpl.undestroy;\nReadable.prototype._destroy = function (err, cb) {\n cb(err);\n};\n\n// Manually shove something into the read() buffer.\n// This returns true if the highWaterMark has not been hit yet,\n// similar to how Writable.write() returns true if you should\n// write() some more.\nReadable.prototype.push = function (chunk, encoding) {\n var state = this._readableState;\n var skipChunkCheck;\n if (!state.objectMode) {\n if (typeof chunk === 'string') {\n encoding = encoding || state.defaultEncoding;\n if (encoding !== state.encoding) {\n chunk = Buffer.from(chunk, encoding);\n encoding = '';\n }\n skipChunkCheck = true;\n }\n } else {\n skipChunkCheck = true;\n }\n return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);\n};\n\n// Unshift should *always* be something directly out of read()\nReadable.prototype.unshift = function (chunk) {\n return readableAddChunk(this, chunk, null, true, false);\n};\nfunction readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {\n debug('readableAddChunk', chunk);\n var state = stream._readableState;\n if (chunk === null) {\n state.reading = false;\n onEofChunk(stream, state);\n } else {\n var er;\n if (!skipChunkCheck) er = chunkInvalid(state, chunk);\n if (er) {\n errorOrDestroy(stream, er);\n } else if (state.objectMode || chunk && chunk.length > 0) {\n if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n if (addToFront) {\n if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);\n } else if (state.ended) {\n errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF());\n } else if (state.destroyed) {\n return false;\n } else {\n state.reading = false;\n if (state.decoder && !encoding) {\n chunk = state.decoder.write(chunk);\n if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);\n } else {\n addChunk(stream, state, chunk, false);\n }\n }\n } else if (!addToFront) {\n state.reading = false;\n maybeReadMore(stream, state);\n }\n }\n\n // We can push more data if we are below the highWaterMark.\n // Also, if we have no data yet, we can stand some more bytes.\n // This is to work around cases where hwm=0, such as the repl.\n return !state.ended && (state.length < state.highWaterMark || state.length === 0);\n}\nfunction addChunk(stream, state, chunk, addToFront) {\n if (state.flowing && state.length === 0 && !state.sync) {\n state.awaitDrain = 0;\n stream.emit('data', chunk);\n } else {\n // update the buffer info.\n state.length += state.objectMode ? 1 : chunk.length;\n if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);\n if (state.needReadable) emitReadable(stream);\n }\n maybeReadMore(stream, state);\n}\nfunction chunkInvalid(state, chunk) {\n var er;\n if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {\n er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);\n }\n return er;\n}\nReadable.prototype.isPaused = function () {\n return this._readableState.flowing === false;\n};\n\n// backwards compatibility.\nReadable.prototype.setEncoding = function (enc) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n const decoder = new StringDecoder(enc);\n this._readableState.decoder = decoder;\n // If setEncoding(null), decoder.encoding equals utf8\n this._readableState.encoding = this._readableState.decoder.encoding;\n\n // Iterate over current buffer to convert already stored Buffers:\n let p = this._readableState.buffer.head;\n let content = '';\n while (p !== null) {\n content += decoder.write(p.data);\n p = p.next;\n }\n this._readableState.buffer.clear();\n if (content !== '') this._readableState.buffer.push(content);\n this._readableState.length = content.length;\n return this;\n};\n\n// Don't raise the hwm > 1GB\nconst MAX_HWM = 0x40000000;\nfunction computeNewHighWaterMark(n) {\n if (n >= MAX_HWM) {\n // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.\n n = MAX_HWM;\n } else {\n // Get the next highest power of 2 to prevent increasing hwm excessively in\n // tiny amounts\n n--;\n n |= n >>> 1;\n n |= n >>> 2;\n n |= n >>> 4;\n n |= n >>> 8;\n n |= n >>> 16;\n n++;\n }\n return n;\n}\n\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction howMuchToRead(n, state) {\n if (n <= 0 || state.length === 0 && state.ended) return 0;\n if (state.objectMode) return 1;\n if (n !== n) {\n // Only flow one buffer at a time\n if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;\n }\n // If we're asking for more than the current hwm, then raise the hwm.\n if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);\n if (n <= state.length) return n;\n // Don't have enough\n if (!state.ended) {\n state.needReadable = true;\n return 0;\n }\n return state.length;\n}\n\n// you can override either this method, or the async _read(n) below.\nReadable.prototype.read = function (n) {\n debug('read', n);\n n = parseInt(n, 10);\n var state = this._readableState;\n var nOrig = n;\n if (n !== 0) state.emittedReadable = false;\n\n // if we're doing read(0) to trigger a readable event, but we\n // already have a bunch of data in the buffer, then just trigger\n // the 'readable' event and move on.\n if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {\n debug('read: emitReadable', state.length, state.ended);\n if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);\n return null;\n }\n n = howMuchToRead(n, state);\n\n // if we've ended, and we're now clear, then finish it up.\n if (n === 0 && state.ended) {\n if (state.length === 0) endReadable(this);\n return null;\n }\n\n // All the actual chunk generation logic needs to be\n // *below* the call to _read. The reason is that in certain\n // synthetic stream cases, such as passthrough streams, _read\n // may be a completely synchronous operation which may change\n // the state of the read buffer, providing enough data when\n // before there was *not* enough.\n //\n // So, the steps are:\n // 1. Figure out what the state of things will be after we do\n // a read from the buffer.\n //\n // 2. If that resulting state will trigger a _read, then call _read.\n // Note that this may be asynchronous, or synchronous. Yes, it is\n // deeply ugly to write APIs this way, but that still doesn't mean\n // that the Readable class should behave improperly, as streams are\n // designed to be sync/async agnostic.\n // Take note if the _read call is sync or async (ie, if the read call\n // has returned yet), so that we know whether or not it's safe to emit\n // 'readable' etc.\n //\n // 3. Actually pull the requested chunks out of the buffer and return.\n\n // if we need a readable event, then we need to do some reading.\n var doRead = state.needReadable;\n debug('need readable', doRead);\n\n // if we currently have less than the highWaterMark, then also read some\n if (state.length === 0 || state.length - n < state.highWaterMark) {\n doRead = true;\n debug('length less than watermark', doRead);\n }\n\n // however, if we've ended, then there's no point, and if we're already\n // reading, then it's unnecessary.\n if (state.ended || state.reading) {\n doRead = false;\n debug('reading or ended', doRead);\n } else if (doRead) {\n debug('do read');\n state.reading = true;\n state.sync = true;\n // if the length is currently zero, then we *need* a readable event.\n if (state.length === 0) state.needReadable = true;\n // call internal read method\n this._read(state.highWaterMark);\n state.sync = false;\n // If _read pushed data synchronously, then `reading` will be false,\n // and we need to re-evaluate how much data we can return to the user.\n if (!state.reading) n = howMuchToRead(nOrig, state);\n }\n var ret;\n if (n > 0) ret = fromList(n, state);else ret = null;\n if (ret === null) {\n state.needReadable = state.length <= state.highWaterMark;\n n = 0;\n } else {\n state.length -= n;\n state.awaitDrain = 0;\n }\n if (state.length === 0) {\n // If we have nothing in the buffer, then we want to know\n // as soon as we *do* get something into the buffer.\n if (!state.ended) state.needReadable = true;\n\n // If we tried to read() past the EOF, then emit end on the next tick.\n if (nOrig !== n && state.ended) endReadable(this);\n }\n if (ret !== null) this.emit('data', ret);\n return ret;\n};\nfunction onEofChunk(stream, state) {\n debug('onEofChunk');\n if (state.ended) return;\n if (state.decoder) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) {\n state.buffer.push(chunk);\n state.length += state.objectMode ? 1 : chunk.length;\n }\n }\n state.ended = true;\n if (state.sync) {\n // if we are sync, wait until next tick to emit the data.\n // Otherwise we risk emitting data in the flow()\n // the readable code triggers during a read() call\n emitReadable(stream);\n } else {\n // emit 'readable' now to make sure it gets picked up.\n state.needReadable = false;\n if (!state.emittedReadable) {\n state.emittedReadable = true;\n emitReadable_(stream);\n }\n }\n}\n\n// Don't emit readable right away in sync mode, because this can trigger\n// another read() call => stack overflow. This way, it might trigger\n// a nextTick recursion warning, but that's not so bad.\nfunction emitReadable(stream) {\n var state = stream._readableState;\n debug('emitReadable', state.needReadable, state.emittedReadable);\n state.needReadable = false;\n if (!state.emittedReadable) {\n debug('emitReadable', state.flowing);\n state.emittedReadable = true;\n process.nextTick(emitReadable_, stream);\n }\n}\nfunction emitReadable_(stream) {\n var state = stream._readableState;\n debug('emitReadable_', state.destroyed, state.length, state.ended);\n if (!state.destroyed && (state.length || state.ended)) {\n stream.emit('readable');\n state.emittedReadable = false;\n }\n\n // The stream needs another readable event if\n // 1. It is not flowing, as the flow mechanism will take\n // care of it.\n // 2. It is not ended.\n // 3. It is below the highWaterMark, so we can schedule\n // another readable later.\n state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;\n flow(stream);\n}\n\n// at this point, the user has presumably seen the 'readable' event,\n// and called read() to consume some data. that may have triggered\n// in turn another _read(n) call, in which case reading = true if\n// it's in progress.\n// However, if we're not ended, or reading, and the length < hwm,\n// then go ahead and try to read some more preemptively.\nfunction maybeReadMore(stream, state) {\n if (!state.readingMore) {\n state.readingMore = true;\n process.nextTick(maybeReadMore_, stream, state);\n }\n}\nfunction maybeReadMore_(stream, state) {\n // Attempt to read more data if we should.\n //\n // The conditions for reading more data are (one of):\n // - Not enough data buffered (state.length < state.highWaterMark). The loop\n // is responsible for filling the buffer with enough data if such data\n // is available. If highWaterMark is 0 and we are not in the flowing mode\n // we should _not_ attempt to buffer any extra data. We'll get more data\n // when the stream consumer calls read() instead.\n // - No data in the buffer, and the stream is in flowing mode. In this mode\n // the loop below is responsible for ensuring read() is called. Failing to\n // call read here would abort the flow and there's no other mechanism for\n // continuing the flow if the stream consumer has just subscribed to the\n // 'data' event.\n //\n // In addition to the above conditions to keep reading data, the following\n // conditions prevent the data from being read:\n // - The stream has ended (state.ended).\n // - There is already a pending 'read' operation (state.reading). This is a\n // case where the the stream has called the implementation defined _read()\n // method, but they are processing the call asynchronously and have _not_\n // called push() with new data. In this case we skip performing more\n // read()s. The execution ends in this method again after the _read() ends\n // up calling push() with more data.\n while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) {\n const len = state.length;\n debug('maybeReadMore read 0');\n stream.read(0);\n if (len === state.length)\n // didn't get any data, stop spinning.\n break;\n }\n state.readingMore = false;\n}\n\n// abstract method. to be overridden in specific implementation classes.\n// call cb(er, data) where data is <= n in length.\n// for virtual (non-string, non-buffer) streams, \"length\" is somewhat\n// arbitrary, and perhaps not very meaningful.\nReadable.prototype._read = function (n) {\n errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()'));\n};\nReadable.prototype.pipe = function (dest, pipeOpts) {\n var src = this;\n var state = this._readableState;\n switch (state.pipesCount) {\n case 0:\n state.pipes = dest;\n break;\n case 1:\n state.pipes = [state.pipes, dest];\n break;\n default:\n state.pipes.push(dest);\n break;\n }\n state.pipesCount += 1;\n debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);\n var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;\n var endFn = doEnd ? onend : unpipe;\n if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn);\n dest.on('unpipe', onunpipe);\n function onunpipe(readable, unpipeInfo) {\n debug('onunpipe');\n if (readable === src) {\n if (unpipeInfo && unpipeInfo.hasUnpiped === false) {\n unpipeInfo.hasUnpiped = true;\n cleanup();\n }\n }\n }\n function onend() {\n debug('onend');\n dest.end();\n }\n\n // when the dest drains, it reduces the awaitDrain counter\n // on the source. This would be more elegant with a .once()\n // handler in flow(), but adding and removing repeatedly is\n // too slow.\n var ondrain = pipeOnDrain(src);\n dest.on('drain', ondrain);\n var cleanedUp = false;\n function cleanup() {\n debug('cleanup');\n // cleanup event handlers once the pipe is broken\n dest.removeListener('close', onclose);\n dest.removeListener('finish', onfinish);\n dest.removeListener('drain', ondrain);\n dest.removeListener('error', onerror);\n dest.removeListener('unpipe', onunpipe);\n src.removeListener('end', onend);\n src.removeListener('end', unpipe);\n src.removeListener('data', ondata);\n cleanedUp = true;\n\n // if the reader is waiting for a drain event from this\n // specific writer, then it would cause it to never start\n // flowing again.\n // So, if this is awaiting a drain, then we just call it now.\n // If we don't know, then assume that we are waiting for one.\n if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();\n }\n src.on('data', ondata);\n function ondata(chunk) {\n debug('ondata');\n var ret = dest.write(chunk);\n debug('dest.write', ret);\n if (ret === false) {\n // If the user unpiped during `dest.write()`, it is possible\n // to get stuck in a permanently paused state if that write\n // also returned false.\n // => Check whether `dest` is still a piping destination.\n if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {\n debug('false write response, pause', state.awaitDrain);\n state.awaitDrain++;\n }\n src.pause();\n }\n }\n\n // if the dest has an error, then stop piping into it.\n // however, don't suppress the throwing behavior for this.\n function onerror(er) {\n debug('onerror', er);\n unpipe();\n dest.removeListener('error', onerror);\n if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er);\n }\n\n // Make sure our error handler is attached before userland ones.\n prependListener(dest, 'error', onerror);\n\n // Both close and finish should trigger unpipe, but only once.\n function onclose() {\n dest.removeListener('finish', onfinish);\n unpipe();\n }\n dest.once('close', onclose);\n function onfinish() {\n debug('onfinish');\n dest.removeListener('close', onclose);\n unpipe();\n }\n dest.once('finish', onfinish);\n function unpipe() {\n debug('unpipe');\n src.unpipe(dest);\n }\n\n // tell the dest that it's being piped to\n dest.emit('pipe', src);\n\n // start the flow if it hasn't been started already.\n if (!state.flowing) {\n debug('pipe resume');\n src.resume();\n }\n return dest;\n};\nfunction pipeOnDrain(src) {\n return function pipeOnDrainFunctionResult() {\n var state = src._readableState;\n debug('pipeOnDrain', state.awaitDrain);\n if (state.awaitDrain) state.awaitDrain--;\n if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {\n state.flowing = true;\n flow(src);\n }\n };\n}\nReadable.prototype.unpipe = function (dest) {\n var state = this._readableState;\n var unpipeInfo = {\n hasUnpiped: false\n };\n\n // if we're not piping anywhere, then do nothing.\n if (state.pipesCount === 0) return this;\n\n // just one destination. most common case.\n if (state.pipesCount === 1) {\n // passed in one, but it's not the right one.\n if (dest && dest !== state.pipes) return this;\n if (!dest) dest = state.pipes;\n\n // got a match.\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n if (dest) dest.emit('unpipe', this, unpipeInfo);\n return this;\n }\n\n // slow case. multiple pipe destinations.\n\n if (!dest) {\n // remove all.\n var dests = state.pipes;\n var len = state.pipesCount;\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, {\n hasUnpiped: false\n });\n return this;\n }\n\n // try to find the right one.\n var index = indexOf(state.pipes, dest);\n if (index === -1) return this;\n state.pipes.splice(index, 1);\n state.pipesCount -= 1;\n if (state.pipesCount === 1) state.pipes = state.pipes[0];\n dest.emit('unpipe', this, unpipeInfo);\n return this;\n};\n\n// set up data events if they are asked for\n// Ensure readable listeners eventually get something\nReadable.prototype.on = function (ev, fn) {\n const res = Stream.prototype.on.call(this, ev, fn);\n const state = this._readableState;\n if (ev === 'data') {\n // update readableListening so that resume() may be a no-op\n // a few lines down. This is needed to support once('readable').\n state.readableListening = this.listenerCount('readable') > 0;\n\n // Try start flowing on next tick if stream isn't explicitly paused\n if (state.flowing !== false) this.resume();\n } else if (ev === 'readable') {\n if (!state.endEmitted && !state.readableListening) {\n state.readableListening = state.needReadable = true;\n state.flowing = false;\n state.emittedReadable = false;\n debug('on readable', state.length, state.reading);\n if (state.length) {\n emitReadable(this);\n } else if (!state.reading) {\n process.nextTick(nReadingNextTick, this);\n }\n }\n }\n return res;\n};\nReadable.prototype.addListener = Readable.prototype.on;\nReadable.prototype.removeListener = function (ev, fn) {\n const res = Stream.prototype.removeListener.call(this, ev, fn);\n if (ev === 'readable') {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this);\n }\n return res;\n};\nReadable.prototype.removeAllListeners = function (ev) {\n const res = Stream.prototype.removeAllListeners.apply(this, arguments);\n if (ev === 'readable' || ev === undefined) {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this);\n }\n return res;\n};\nfunction updateReadableListening(self) {\n const state = self._readableState;\n state.readableListening = self.listenerCount('readable') > 0;\n if (state.resumeScheduled && !state.paused) {\n // flowing needs to be set to true now, otherwise\n // the upcoming resume will not flow.\n state.flowing = true;\n\n // crude way to check if we should resume\n } else if (self.listenerCount('data') > 0) {\n self.resume();\n }\n}\nfunction nReadingNextTick(self) {\n debug('readable nexttick read 0');\n self.read(0);\n}\n\n// pause() and resume() are remnants of the legacy readable stream API\n// If the user uses them, then switch into old mode.\nReadable.prototype.resume = function () {\n var state = this._readableState;\n if (!state.flowing) {\n debug('resume');\n // we flow only if there is no one listening\n // for readable, but we still have to call\n // resume()\n state.flowing = !state.readableListening;\n resume(this, state);\n }\n state.paused = false;\n return this;\n};\nfunction resume(stream, state) {\n if (!state.resumeScheduled) {\n state.resumeScheduled = true;\n process.nextTick(resume_, stream, state);\n }\n}\nfunction resume_(stream, state) {\n debug('resume', state.reading);\n if (!state.reading) {\n stream.read(0);\n }\n state.resumeScheduled = false;\n stream.emit('resume');\n flow(stream);\n if (state.flowing && !state.reading) stream.read(0);\n}\nReadable.prototype.pause = function () {\n debug('call pause flowing=%j', this._readableState.flowing);\n if (this._readableState.flowing !== false) {\n debug('pause');\n this._readableState.flowing = false;\n this.emit('pause');\n }\n this._readableState.paused = true;\n return this;\n};\nfunction flow(stream) {\n const state = stream._readableState;\n debug('flow', state.flowing);\n while (state.flowing && stream.read() !== null);\n}\n\n// wrap an old-style stream as the async data source.\n// This is *not* part of the readable stream interface.\n// It is an ugly unfortunate mess of history.\nReadable.prototype.wrap = function (stream) {\n var state = this._readableState;\n var paused = false;\n stream.on('end', () => {\n debug('wrapped end');\n if (state.decoder && !state.ended) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) this.push(chunk);\n }\n this.push(null);\n });\n stream.on('data', chunk => {\n debug('wrapped data');\n if (state.decoder) chunk = state.decoder.write(chunk);\n\n // don't skip over falsy values in objectMode\n if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;\n var ret = this.push(chunk);\n if (!ret) {\n paused = true;\n stream.pause();\n }\n });\n\n // proxy all the other methods.\n // important when wrapping filters and duplexes.\n for (var i in stream) {\n if (this[i] === undefined && typeof stream[i] === 'function') {\n this[i] = function methodWrap(method) {\n return function methodWrapReturnFunction() {\n return stream[method].apply(stream, arguments);\n };\n }(i);\n }\n }\n\n // proxy certain important events.\n for (var n = 0; n < kProxyEvents.length; n++) {\n stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));\n }\n\n // when we try to consume some more bytes, simply unpause the\n // underlying stream.\n this._read = n => {\n debug('wrapped _read', n);\n if (paused) {\n paused = false;\n stream.resume();\n }\n };\n return this;\n};\nif (typeof Symbol === 'function') {\n Readable.prototype[Symbol.asyncIterator] = function () {\n if (createReadableStreamAsyncIterator === undefined) {\n createReadableStreamAsyncIterator = require('./internal/streams/async_iterator');\n }\n return createReadableStreamAsyncIterator(this);\n };\n}\nObject.defineProperty(Readable.prototype, 'readableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState.highWaterMark;\n }\n});\nObject.defineProperty(Readable.prototype, 'readableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState && this._readableState.buffer;\n }\n});\nObject.defineProperty(Readable.prototype, 'readableFlowing', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState.flowing;\n },\n set: function set(state) {\n if (this._readableState) {\n this._readableState.flowing = state;\n }\n }\n});\n\n// exposed for testing purposes only.\nReadable._fromList = fromList;\nObject.defineProperty(Readable.prototype, 'readableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._readableState.length;\n }\n});\n\n// Pluck off n bytes from an array of buffers.\n// Length is the combined lengths of all the buffers in the list.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction fromList(n, state) {\n // nothing buffered\n if (state.length === 0) return null;\n var ret;\n if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {\n // read it all, truncate the list\n if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);\n state.buffer.clear();\n } else {\n // read part of list\n ret = state.buffer.consume(n, state.decoder);\n }\n return ret;\n}\nfunction endReadable(stream) {\n var state = stream._readableState;\n debug('endReadable', state.endEmitted);\n if (!state.endEmitted) {\n state.ended = true;\n process.nextTick(endReadableNT, state, stream);\n }\n}\nfunction endReadableNT(state, stream) {\n debug('endReadableNT', state.endEmitted, state.length);\n\n // Check that we didn't get one last unshift.\n if (!state.endEmitted && state.length === 0) {\n state.endEmitted = true;\n stream.readable = false;\n stream.emit('end');\n if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the writable side is ready for autoDestroy as well\n const wState = stream._writableState;\n if (!wState || wState.autoDestroy && wState.finished) {\n stream.destroy();\n }\n }\n }\n}\nif (typeof Symbol === 'function') {\n Readable.from = function (iterable, opts) {\n if (from === undefined) {\n from = require('./internal/streams/from');\n }\n return from(Readable, iterable, opts);\n };\n}\nfunction indexOf(xs, x) {\n for (var i = 0, l = xs.length; i < l; i++) {\n if (xs[i] === x) return i;\n }\n return -1;\n}","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a transform stream is a readable/writable stream where you do\n// something with the data. Sometimes it's called a \"filter\",\n// but that's not a great name for it, since that implies a thing where\n// some bits pass through, and others are simply ignored. (That would\n// be a valid example of a transform, of course.)\n//\n// While the output is causally related to the input, it's not a\n// necessarily symmetric or synchronous transformation. For example,\n// a zlib stream might take multiple plain-text writes(), and then\n// emit a single compressed chunk some time in the future.\n//\n// Here's how this works:\n//\n// The Transform stream has all the aspects of the readable and writable\n// stream classes. When you write(chunk), that calls _write(chunk,cb)\n// internally, and returns false if there's a lot of pending writes\n// buffered up. When you call read(), that calls _read(n) until\n// there's enough pending readable data buffered up.\n//\n// In a transform stream, the written data is placed in a buffer. When\n// _read(n) is called, it transforms the queued up data, calling the\n// buffered _write cb's as it consumes chunks. If consuming a single\n// written chunk would result in multiple output chunks, then the first\n// outputted bit calls the readcb, and subsequent chunks just go into\n// the read buffer, and will cause it to emit 'readable' if necessary.\n//\n// This way, back-pressure is actually determined by the reading side,\n// since _read has to be called to start processing a new chunk. However,\n// a pathological inflate type of transform can cause excessive buffering\n// here. For example, imagine a stream where every byte of input is\n// interpreted as an integer from 0-255, and then results in that many\n// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in\n// 1kb of data being output. In this case, you could write a very small\n// amount of input, and end up with a very large amount of output. In\n// such a pathological inflating mechanism, there'd be no way to tell\n// the system to stop doing the transform. A single 4MB write could\n// cause the system to run out of memory.\n//\n// However, even in such a pathological case, only a single written chunk\n// would be consumed, and then the rest would wait (un-transformed) until\n// the results of the previous transformed chunk were consumed.\n\n'use strict';\n\nmodule.exports = Transform;\nconst _require$codes = require('../errors').codes,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,\n ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,\n ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;\nconst Duplex = require('./_stream_duplex');\nrequire('inherits')(Transform, Duplex);\nfunction afterTransform(er, data) {\n var ts = this._transformState;\n ts.transforming = false;\n var cb = ts.writecb;\n if (cb === null) {\n return this.emit('error', new ERR_MULTIPLE_CALLBACK());\n }\n ts.writechunk = null;\n ts.writecb = null;\n if (data != null)\n // single equals check for both `null` and `undefined`\n this.push(data);\n cb(er);\n var rs = this._readableState;\n rs.reading = false;\n if (rs.needReadable || rs.length < rs.highWaterMark) {\n this._read(rs.highWaterMark);\n }\n}\nfunction Transform(options) {\n if (!(this instanceof Transform)) return new Transform(options);\n Duplex.call(this, options);\n this._transformState = {\n afterTransform: afterTransform.bind(this),\n needTransform: false,\n transforming: false,\n writecb: null,\n writechunk: null,\n writeencoding: null\n };\n\n // start out asking for a readable event once data is transformed.\n this._readableState.needReadable = true;\n\n // we have implemented the _read method, and done the other things\n // that Readable wants before the first _read call, so unset the\n // sync guard flag.\n this._readableState.sync = false;\n if (options) {\n if (typeof options.transform === 'function') this._transform = options.transform;\n if (typeof options.flush === 'function') this._flush = options.flush;\n }\n\n // When the writable side finishes, then flush out anything remaining.\n this.on('prefinish', prefinish);\n}\nfunction prefinish() {\n if (typeof this._flush === 'function' && !this._readableState.destroyed) {\n this._flush((er, data) => {\n done(this, er, data);\n });\n } else {\n done(this, null, null);\n }\n}\nTransform.prototype.push = function (chunk, encoding) {\n this._transformState.needTransform = false;\n return Duplex.prototype.push.call(this, chunk, encoding);\n};\n\n// This is the part where you do stuff!\n// override this function in implementation classes.\n// 'chunk' is an input chunk.\n//\n// Call `push(newChunk)` to pass along transformed output\n// to the readable side. You may call 'push' zero or more times.\n//\n// Call `cb(err)` when you are done with this chunk. If you pass\n// an error, then that'll put the hurt on the whole operation. If you\n// never call cb(), then you'll never get another chunk.\nTransform.prototype._transform = function (chunk, encoding, cb) {\n cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));\n};\nTransform.prototype._write = function (chunk, encoding, cb) {\n var ts = this._transformState;\n ts.writecb = cb;\n ts.writechunk = chunk;\n ts.writeencoding = encoding;\n if (!ts.transforming) {\n var rs = this._readableState;\n if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);\n }\n};\n\n// Doesn't matter what the args are here.\n// _transform does all the work.\n// That we got here means that the readable side wants more data.\nTransform.prototype._read = function (n) {\n var ts = this._transformState;\n if (ts.writechunk !== null && !ts.transforming) {\n ts.transforming = true;\n this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);\n } else {\n // mark that we need a transform, so that any data that comes in\n // will get processed, now that we've asked for it.\n ts.needTransform = true;\n }\n};\nTransform.prototype._destroy = function (err, cb) {\n Duplex.prototype._destroy.call(this, err, err2 => {\n cb(err2);\n });\n};\nfunction done(stream, er, data) {\n if (er) return stream.emit('error', er);\n if (data != null)\n // single equals check for both `null` and `undefined`\n stream.push(data);\n\n // TODO(BridgeAR): Write a test for these two error cases\n // if there's nothing in the write buffer, then that means\n // that nothing more will ever be provided\n if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();\n if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();\n return stream.push(null);\n}","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// A bit simpler than readable streams.\n// Implement an async ._write(chunk, encoding, cb), and it'll handle all\n// the drain event emission and buffering.\n\n'use strict';\n\nmodule.exports = Writable;\n\n/* */\nfunction WriteReq(chunk, encoding, cb) {\n this.chunk = chunk;\n this.encoding = encoding;\n this.callback = cb;\n this.next = null;\n}\n\n// It seems a linked list but it is not\n// there will be only 2 of these for each stream\nfunction CorkedRequest(state) {\n this.next = null;\n this.entry = null;\n this.finish = () => {\n onCorkedFinish(this, state);\n };\n}\n/* */\n\n/**/\nvar Duplex;\n/**/\n\nWritable.WritableState = WritableState;\n\n/**/\nconst internalUtil = {\n deprecate: require('util-deprecate')\n};\n/**/\n\n/**/\nvar Stream = require('./internal/streams/stream');\n/**/\n\nconst Buffer = require('buffer').Buffer;\nconst OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\nconst destroyImpl = require('./internal/streams/destroy');\nconst _require = require('./internal/streams/state'),\n getHighWaterMark = _require.getHighWaterMark;\nconst _require$codes = require('../errors').codes,\n ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,\n ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,\n ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,\n ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,\n ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,\n ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;\nconst errorOrDestroy = destroyImpl.errorOrDestroy;\nrequire('inherits')(Writable, Stream);\nfunction nop() {}\nfunction WritableState(options, stream, isDuplex) {\n Duplex = Duplex || require('./_stream_duplex');\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream,\n // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;\n\n // object stream flag to indicate whether or not this stream\n // contains buffers or objects.\n this.objectMode = !!options.objectMode;\n if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;\n\n // the point at which write() starts returning false\n // Note: 0 is a valid value, means that we always return false if\n // the entire buffer is not flushed immediately on write()\n this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex);\n\n // if _final has been called\n this.finalCalled = false;\n\n // drain event flag.\n this.needDrain = false;\n // at the start of calling end()\n this.ending = false;\n // when end() has been called, and returned\n this.ended = false;\n // when 'finish' is emitted\n this.finished = false;\n\n // has it been destroyed\n this.destroyed = false;\n\n // should we decode strings into buffers before passing to _write?\n // this is here so that some node-core streams can optimize string\n // handling at a lower level.\n var noDecode = options.decodeStrings === false;\n this.decodeStrings = !noDecode;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // not an actual buffer we keep track of, but a measurement\n // of how much we're waiting to get pushed to some underlying\n // socket or file.\n this.length = 0;\n\n // a flag to see when we're in the middle of a write.\n this.writing = false;\n\n // when true all writes will be buffered until .uncork() call\n this.corked = 0;\n\n // a flag to be able to tell if the onwrite cb is called immediately,\n // or on a later tick. We set this to true at first, because any\n // actions that shouldn't happen until \"later\" should generally also\n // not happen before the first write call.\n this.sync = true;\n\n // a flag to know if we're processing previously buffered items, which\n // may call the _write() callback in the same tick, so that we don't\n // end up in an overlapped onwrite situation.\n this.bufferProcessing = false;\n\n // the callback that's passed to _write(chunk,cb)\n this.onwrite = function (er) {\n onwrite(stream, er);\n };\n\n // the callback that the user supplies to write(chunk,encoding,cb)\n this.writecb = null;\n\n // the amount that is being written when _write is called.\n this.writelen = 0;\n this.bufferedRequest = null;\n this.lastBufferedRequest = null;\n\n // number of pending user-supplied write callbacks\n // this must be 0 before 'finish' can be emitted\n this.pendingcb = 0;\n\n // emit prefinish if the only thing we're waiting for is _write cbs\n // This is relevant for synchronous Transform streams\n this.prefinished = false;\n\n // True if the error was already emitted and should not be thrown again\n this.errorEmitted = false;\n\n // Should close be emitted on destroy. Defaults to true.\n this.emitClose = options.emitClose !== false;\n\n // Should .destroy() be called after 'finish' (and potentially 'end')\n this.autoDestroy = !!options.autoDestroy;\n\n // count buffered requests\n this.bufferedRequestCount = 0;\n\n // allocate the first CorkedRequest, there is always\n // one allocated and free to use, and we maintain at most two\n this.corkedRequestsFree = new CorkedRequest(this);\n}\nWritableState.prototype.getBuffer = function getBuffer() {\n var current = this.bufferedRequest;\n var out = [];\n while (current) {\n out.push(current);\n current = current.next;\n }\n return out;\n};\n(function () {\n try {\n Object.defineProperty(WritableState.prototype, 'buffer', {\n get: internalUtil.deprecate(function writableStateBufferGetter() {\n return this.getBuffer();\n }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')\n });\n } catch (_) {}\n})();\n\n// Test _writableState for inheritance to account for Duplex streams,\n// whose prototype chain only points to Readable.\nvar realHasInstance;\nif (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {\n realHasInstance = Function.prototype[Symbol.hasInstance];\n Object.defineProperty(Writable, Symbol.hasInstance, {\n value: function value(object) {\n if (realHasInstance.call(this, object)) return true;\n if (this !== Writable) return false;\n return object && object._writableState instanceof WritableState;\n }\n });\n} else {\n realHasInstance = function realHasInstance(object) {\n return object instanceof this;\n };\n}\nfunction Writable(options) {\n Duplex = Duplex || require('./_stream_duplex');\n\n // Writable ctor is applied to Duplexes, too.\n // `realHasInstance` is necessary because using plain `instanceof`\n // would return false, as no `_writableState` property is attached.\n\n // Trying to use the custom `instanceof` for Writable here will also break the\n // Node.js LazyTransform implementation, which has a non-trivial getter for\n // `_writableState` that would lead to infinite recursion.\n\n // Checking for a Stream.Duplex instance is faster here instead of inside\n // the WritableState constructor, at least with V8 6.5\n const isDuplex = this instanceof Duplex;\n if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);\n this._writableState = new WritableState(options, this, isDuplex);\n\n // legacy.\n this.writable = true;\n if (options) {\n if (typeof options.write === 'function') this._write = options.write;\n if (typeof options.writev === 'function') this._writev = options.writev;\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n if (typeof options.final === 'function') this._final = options.final;\n }\n Stream.call(this);\n}\n\n// Otherwise people can pipe Writable streams, which is just wrong.\nWritable.prototype.pipe = function () {\n errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());\n};\nfunction writeAfterEnd(stream, cb) {\n var er = new ERR_STREAM_WRITE_AFTER_END();\n // TODO: defer error events consistently everywhere, not just the cb\n errorOrDestroy(stream, er);\n process.nextTick(cb, er);\n}\n\n// Checks that a user-supplied chunk is valid, especially for the particular\n// mode the stream is in. Currently this means that `null` is never accepted\n// and undefined/non-string values are only allowed in object mode.\nfunction validChunk(stream, state, chunk, cb) {\n var er;\n if (chunk === null) {\n er = new ERR_STREAM_NULL_VALUES();\n } else if (typeof chunk !== 'string' && !state.objectMode) {\n er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);\n }\n if (er) {\n errorOrDestroy(stream, er);\n process.nextTick(cb, er);\n return false;\n }\n return true;\n}\nWritable.prototype.write = function (chunk, encoding, cb) {\n var state = this._writableState;\n var ret = false;\n var isBuf = !state.objectMode && _isUint8Array(chunk);\n if (isBuf && !Buffer.isBuffer(chunk)) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;\n if (typeof cb !== 'function') cb = nop;\n if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {\n state.pendingcb++;\n ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);\n }\n return ret;\n};\nWritable.prototype.cork = function () {\n this._writableState.corked++;\n};\nWritable.prototype.uncork = function () {\n var state = this._writableState;\n if (state.corked) {\n state.corked--;\n if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);\n }\n};\nWritable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {\n // node::ParseEncoding() requires lower case.\n if (typeof encoding === 'string') encoding = encoding.toLowerCase();\n if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);\n this._writableState.defaultEncoding = encoding;\n return this;\n};\nObject.defineProperty(Writable.prototype, 'writableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState && this._writableState.getBuffer();\n }\n});\nfunction decodeChunk(state, chunk, encoding) {\n if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {\n chunk = Buffer.from(chunk, encoding);\n }\n return chunk;\n}\nObject.defineProperty(Writable.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState.highWaterMark;\n }\n});\n\n// if we're already writing something, then just put this\n// in the queue, and wait our turn. Otherwise, call _write\n// If we return false, then we need a drain event, so set that flag.\nfunction writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {\n if (!isBuf) {\n var newChunk = decodeChunk(state, chunk, encoding);\n if (chunk !== newChunk) {\n isBuf = true;\n encoding = 'buffer';\n chunk = newChunk;\n }\n }\n var len = state.objectMode ? 1 : chunk.length;\n state.length += len;\n var ret = state.length < state.highWaterMark;\n // we must ensure that previous needDrain will not be reset to false.\n if (!ret) state.needDrain = true;\n if (state.writing || state.corked) {\n var last = state.lastBufferedRequest;\n state.lastBufferedRequest = {\n chunk,\n encoding,\n isBuf,\n callback: cb,\n next: null\n };\n if (last) {\n last.next = state.lastBufferedRequest;\n } else {\n state.bufferedRequest = state.lastBufferedRequest;\n }\n state.bufferedRequestCount += 1;\n } else {\n doWrite(stream, state, false, len, chunk, encoding, cb);\n }\n return ret;\n}\nfunction doWrite(stream, state, writev, len, chunk, encoding, cb) {\n state.writelen = len;\n state.writecb = cb;\n state.writing = true;\n state.sync = true;\n if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);\n state.sync = false;\n}\nfunction onwriteError(stream, state, sync, er, cb) {\n --state.pendingcb;\n if (sync) {\n // defer the callback if we are being called synchronously\n // to avoid piling up things on the stack\n process.nextTick(cb, er);\n // this can emit finish, and it will always happen\n // after error\n process.nextTick(finishMaybe, stream, state);\n stream._writableState.errorEmitted = true;\n errorOrDestroy(stream, er);\n } else {\n // the caller expect this to happen before if\n // it is async\n cb(er);\n stream._writableState.errorEmitted = true;\n errorOrDestroy(stream, er);\n // this can emit finish, but finish must\n // always follow error\n finishMaybe(stream, state);\n }\n}\nfunction onwriteStateUpdate(state) {\n state.writing = false;\n state.writecb = null;\n state.length -= state.writelen;\n state.writelen = 0;\n}\nfunction onwrite(stream, er) {\n var state = stream._writableState;\n var sync = state.sync;\n var cb = state.writecb;\n if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();\n onwriteStateUpdate(state);\n if (er) onwriteError(stream, state, sync, er, cb);else {\n // Check if we're actually ready to finish, but don't emit yet\n var finished = needFinish(state) || stream.destroyed;\n if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {\n clearBuffer(stream, state);\n }\n if (sync) {\n process.nextTick(afterWrite, stream, state, finished, cb);\n } else {\n afterWrite(stream, state, finished, cb);\n }\n }\n}\nfunction afterWrite(stream, state, finished, cb) {\n if (!finished) onwriteDrain(stream, state);\n state.pendingcb--;\n cb();\n finishMaybe(stream, state);\n}\n\n// Must force callback to be called on nextTick, so that we don't\n// emit 'drain' before the write() consumer gets the 'false' return\n// value, and has a chance to attach a 'drain' listener.\nfunction onwriteDrain(stream, state) {\n if (state.length === 0 && state.needDrain) {\n state.needDrain = false;\n stream.emit('drain');\n }\n}\n\n// if there's something in the buffer waiting, then process it\nfunction clearBuffer(stream, state) {\n state.bufferProcessing = true;\n var entry = state.bufferedRequest;\n if (stream._writev && entry && entry.next) {\n // Fast case, write everything using _writev()\n var l = state.bufferedRequestCount;\n var buffer = new Array(l);\n var holder = state.corkedRequestsFree;\n holder.entry = entry;\n var count = 0;\n var allBuffers = true;\n while (entry) {\n buffer[count] = entry;\n if (!entry.isBuf) allBuffers = false;\n entry = entry.next;\n count += 1;\n }\n buffer.allBuffers = allBuffers;\n doWrite(stream, state, true, state.length, buffer, '', holder.finish);\n\n // doWrite is almost always async, defer these to save a bit of time\n // as the hot path ends with doWrite\n state.pendingcb++;\n state.lastBufferedRequest = null;\n if (holder.next) {\n state.corkedRequestsFree = holder.next;\n holder.next = null;\n } else {\n state.corkedRequestsFree = new CorkedRequest(state);\n }\n state.bufferedRequestCount = 0;\n } else {\n // Slow case, write chunks one-by-one\n while (entry) {\n var chunk = entry.chunk;\n var encoding = entry.encoding;\n var cb = entry.callback;\n var len = state.objectMode ? 1 : chunk.length;\n doWrite(stream, state, false, len, chunk, encoding, cb);\n entry = entry.next;\n state.bufferedRequestCount--;\n // if we didn't call the onwrite immediately, then\n // it means that we need to wait until it does.\n // also, that means that the chunk and cb are currently\n // being processed, so move the buffer counter past them.\n if (state.writing) {\n break;\n }\n }\n if (entry === null) state.lastBufferedRequest = null;\n }\n state.bufferedRequest = entry;\n state.bufferProcessing = false;\n}\nWritable.prototype._write = function (chunk, encoding, cb) {\n cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));\n};\nWritable.prototype._writev = null;\nWritable.prototype.end = function (chunk, encoding, cb) {\n var state = this._writableState;\n if (typeof chunk === 'function') {\n cb = chunk;\n chunk = null;\n encoding = null;\n } else if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);\n\n // .end() fully uncorks\n if (state.corked) {\n state.corked = 1;\n this.uncork();\n }\n\n // ignore unnecessary end() calls.\n if (!state.ending) endWritable(this, state, cb);\n return this;\n};\nObject.defineProperty(Writable.prototype, 'writableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.length;\n }\n});\nfunction needFinish(state) {\n return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;\n}\nfunction callFinal(stream, state) {\n stream._final(err => {\n state.pendingcb--;\n if (err) {\n errorOrDestroy(stream, err);\n }\n state.prefinished = true;\n stream.emit('prefinish');\n finishMaybe(stream, state);\n });\n}\nfunction prefinish(stream, state) {\n if (!state.prefinished && !state.finalCalled) {\n if (typeof stream._final === 'function' && !state.destroyed) {\n state.pendingcb++;\n state.finalCalled = true;\n process.nextTick(callFinal, stream, state);\n } else {\n state.prefinished = true;\n stream.emit('prefinish');\n }\n }\n}\nfunction finishMaybe(stream, state) {\n var need = needFinish(state);\n if (need) {\n prefinish(stream, state);\n if (state.pendingcb === 0) {\n state.finished = true;\n stream.emit('finish');\n if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the readable side is ready for autoDestroy as well\n const rState = stream._readableState;\n if (!rState || rState.autoDestroy && rState.endEmitted) {\n stream.destroy();\n }\n }\n }\n }\n return need;\n}\nfunction endWritable(stream, state, cb) {\n state.ending = true;\n finishMaybe(stream, state);\n if (cb) {\n if (state.finished) process.nextTick(cb);else stream.once('finish', cb);\n }\n state.ended = true;\n stream.writable = false;\n}\nfunction onCorkedFinish(corkReq, state, err) {\n var entry = corkReq.entry;\n corkReq.entry = null;\n while (entry) {\n var cb = entry.callback;\n state.pendingcb--;\n cb(err);\n entry = entry.next;\n }\n\n // reuse the free corkReq.\n state.corkedRequestsFree.next = corkReq;\n}\nObject.defineProperty(Writable.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._writableState === undefined) {\n return false;\n }\n return this._writableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._writableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._writableState.destroyed = value;\n }\n});\nWritable.prototype.destroy = destroyImpl.destroy;\nWritable.prototype._undestroy = destroyImpl.undestroy;\nWritable.prototype._destroy = function (err, cb) {\n cb(err);\n};","'use strict';\n\nconst finished = require('./end-of-stream');\nconst kLastResolve = Symbol('lastResolve');\nconst kLastReject = Symbol('lastReject');\nconst kError = Symbol('error');\nconst kEnded = Symbol('ended');\nconst kLastPromise = Symbol('lastPromise');\nconst kHandlePromise = Symbol('handlePromise');\nconst kStream = Symbol('stream');\nfunction createIterResult(value, done) {\n return {\n value,\n done\n };\n}\nfunction readAndResolve(iter) {\n const resolve = iter[kLastResolve];\n if (resolve !== null) {\n const data = iter[kStream].read();\n // we defer if data is null\n // we can be expecting either 'end' or\n // 'error'\n if (data !== null) {\n iter[kLastPromise] = null;\n iter[kLastResolve] = null;\n iter[kLastReject] = null;\n resolve(createIterResult(data, false));\n }\n }\n}\nfunction onReadable(iter) {\n // we wait for the next tick, because it might\n // emit an error with process.nextTick\n process.nextTick(readAndResolve, iter);\n}\nfunction wrapForNext(lastPromise, iter) {\n return (resolve, reject) => {\n lastPromise.then(() => {\n if (iter[kEnded]) {\n resolve(createIterResult(undefined, true));\n return;\n }\n iter[kHandlePromise](resolve, reject);\n }, reject);\n };\n}\nconst AsyncIteratorPrototype = Object.getPrototypeOf(function () {});\nconst ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({\n get stream() {\n return this[kStream];\n },\n next() {\n // if we have detected an error in the meanwhile\n // reject straight away\n const error = this[kError];\n if (error !== null) {\n return Promise.reject(error);\n }\n if (this[kEnded]) {\n return Promise.resolve(createIterResult(undefined, true));\n }\n if (this[kStream].destroyed) {\n // We need to defer via nextTick because if .destroy(err) is\n // called, the error will be emitted via nextTick, and\n // we cannot guarantee that there is no error lingering around\n // waiting to be emitted.\n return new Promise((resolve, reject) => {\n process.nextTick(() => {\n if (this[kError]) {\n reject(this[kError]);\n } else {\n resolve(createIterResult(undefined, true));\n }\n });\n });\n }\n\n // if we have multiple next() calls\n // we will wait for the previous Promise to finish\n // this logic is optimized to support for await loops,\n // where next() is only called once at a time\n const lastPromise = this[kLastPromise];\n let promise;\n if (lastPromise) {\n promise = new Promise(wrapForNext(lastPromise, this));\n } else {\n // fast path needed to support multiple this.push()\n // without triggering the next() queue\n const data = this[kStream].read();\n if (data !== null) {\n return Promise.resolve(createIterResult(data, false));\n }\n promise = new Promise(this[kHandlePromise]);\n }\n this[kLastPromise] = promise;\n return promise;\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n return() {\n // destroy(err, cb) is a private API\n // we can guarantee we have that here, because we control the\n // Readable class this is attached to\n return new Promise((resolve, reject) => {\n this[kStream].destroy(null, err => {\n if (err) {\n reject(err);\n return;\n }\n resolve(createIterResult(undefined, true));\n });\n });\n }\n}, AsyncIteratorPrototype);\nconst createReadableStreamAsyncIterator = stream => {\n const iterator = Object.create(ReadableStreamAsyncIteratorPrototype, {\n [kStream]: {\n value: stream,\n writable: true\n },\n [kLastResolve]: {\n value: null,\n writable: true\n },\n [kLastReject]: {\n value: null,\n writable: true\n },\n [kError]: {\n value: null,\n writable: true\n },\n [kEnded]: {\n value: stream._readableState.endEmitted,\n writable: true\n },\n // the function passed to new Promise\n // is cached so we avoid allocating a new\n // closure at every run\n [kHandlePromise]: {\n value: (resolve, reject) => {\n const data = iterator[kStream].read();\n if (data) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n resolve(createIterResult(data, false));\n } else {\n iterator[kLastResolve] = resolve;\n iterator[kLastReject] = reject;\n }\n },\n writable: true\n }\n });\n iterator[kLastPromise] = null;\n finished(stream, err => {\n if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {\n const reject = iterator[kLastReject];\n // reject if we are waiting for data in the Promise\n // returned by next() and store the error\n if (reject !== null) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n reject(err);\n }\n iterator[kError] = err;\n return;\n }\n const resolve = iterator[kLastResolve];\n if (resolve !== null) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n resolve(createIterResult(undefined, true));\n }\n iterator[kEnded] = true;\n });\n stream.on('readable', onReadable.bind(null, iterator));\n return iterator;\n};\nmodule.exports = createReadableStreamAsyncIterator;","'use strict';\n\nfunction ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }\nfunction _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }\nfunction _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }\nfunction _toPropertyKey(arg) { var key = _toPrimitive(arg, \"string\"); return typeof key === \"symbol\" ? key : String(key); }\nfunction _toPrimitive(input, hint) { if (typeof input !== \"object\" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || \"default\"); if (typeof res !== \"object\") return res; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (hint === \"string\" ? String : Number)(input); }\nconst _require = require('buffer'),\n Buffer = _require.Buffer;\nconst _require2 = require('util'),\n inspect = _require2.inspect;\nconst custom = inspect && inspect.custom || 'inspect';\nfunction copyBuffer(src, target, offset) {\n Buffer.prototype.copy.call(src, target, offset);\n}\nmodule.exports = class BufferList {\n constructor() {\n this.head = null;\n this.tail = null;\n this.length = 0;\n }\n push(v) {\n const entry = {\n data: v,\n next: null\n };\n if (this.length > 0) this.tail.next = entry;else this.head = entry;\n this.tail = entry;\n ++this.length;\n }\n unshift(v) {\n const entry = {\n data: v,\n next: this.head\n };\n if (this.length === 0) this.tail = entry;\n this.head = entry;\n ++this.length;\n }\n shift() {\n if (this.length === 0) return;\n const ret = this.head.data;\n if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;\n --this.length;\n return ret;\n }\n clear() {\n this.head = this.tail = null;\n this.length = 0;\n }\n join(s) {\n if (this.length === 0) return '';\n var p = this.head;\n var ret = '' + p.data;\n while (p = p.next) ret += s + p.data;\n return ret;\n }\n concat(n) {\n if (this.length === 0) return Buffer.alloc(0);\n const ret = Buffer.allocUnsafe(n >>> 0);\n var p = this.head;\n var i = 0;\n while (p) {\n copyBuffer(p.data, ret, i);\n i += p.data.length;\n p = p.next;\n }\n return ret;\n }\n\n // Consumes a specified amount of bytes or characters from the buffered data.\n consume(n, hasStrings) {\n var ret;\n if (n < this.head.data.length) {\n // `slice` is the same for buffers and strings.\n ret = this.head.data.slice(0, n);\n this.head.data = this.head.data.slice(n);\n } else if (n === this.head.data.length) {\n // First chunk is a perfect match.\n ret = this.shift();\n } else {\n // Result spans more than one buffer.\n ret = hasStrings ? this._getString(n) : this._getBuffer(n);\n }\n return ret;\n }\n first() {\n return this.head.data;\n }\n\n // Consumes a specified amount of characters from the buffered data.\n _getString(n) {\n var p = this.head;\n var c = 1;\n var ret = p.data;\n n -= ret.length;\n while (p = p.next) {\n const str = p.data;\n const nb = n > str.length ? str.length : n;\n if (nb === str.length) ret += str;else ret += str.slice(0, n);\n n -= nb;\n if (n === 0) {\n if (nb === str.length) {\n ++c;\n if (p.next) this.head = p.next;else this.head = this.tail = null;\n } else {\n this.head = p;\n p.data = str.slice(nb);\n }\n break;\n }\n ++c;\n }\n this.length -= c;\n return ret;\n }\n\n // Consumes a specified amount of bytes from the buffered data.\n _getBuffer(n) {\n const ret = Buffer.allocUnsafe(n);\n var p = this.head;\n var c = 1;\n p.data.copy(ret);\n n -= p.data.length;\n while (p = p.next) {\n const buf = p.data;\n const nb = n > buf.length ? buf.length : n;\n buf.copy(ret, ret.length - n, 0, nb);\n n -= nb;\n if (n === 0) {\n if (nb === buf.length) {\n ++c;\n if (p.next) this.head = p.next;else this.head = this.tail = null;\n } else {\n this.head = p;\n p.data = buf.slice(nb);\n }\n break;\n }\n ++c;\n }\n this.length -= c;\n return ret;\n }\n\n // Make sure the linked list only shows the minimal necessary information.\n [custom](_, options) {\n return inspect(this, _objectSpread(_objectSpread({}, options), {}, {\n // Only inspect one level.\n depth: 0,\n // It should not recurse.\n customInspect: false\n }));\n }\n};","'use strict';\n\n// undocumented cb() API, needed for core, not for public API\nfunction destroy(err, cb) {\n const readableDestroyed = this._readableState && this._readableState.destroyed;\n const writableDestroyed = this._writableState && this._writableState.destroyed;\n if (readableDestroyed || writableDestroyed) {\n if (cb) {\n cb(err);\n } else if (err) {\n if (!this._writableState) {\n process.nextTick(emitErrorNT, this, err);\n } else if (!this._writableState.errorEmitted) {\n this._writableState.errorEmitted = true;\n process.nextTick(emitErrorNT, this, err);\n }\n }\n return this;\n }\n\n // we set destroyed to true before firing error callbacks in order\n // to make it re-entrance safe in case destroy() is called within callbacks\n\n if (this._readableState) {\n this._readableState.destroyed = true;\n }\n\n // if this is a duplex stream mark the writable part as destroyed as well\n if (this._writableState) {\n this._writableState.destroyed = true;\n }\n this._destroy(err || null, err => {\n if (!cb && err) {\n if (!this._writableState) {\n process.nextTick(emitErrorAndCloseNT, this, err);\n } else if (!this._writableState.errorEmitted) {\n this._writableState.errorEmitted = true;\n process.nextTick(emitErrorAndCloseNT, this, err);\n } else {\n process.nextTick(emitCloseNT, this);\n }\n } else if (cb) {\n process.nextTick(emitCloseNT, this);\n cb(err);\n } else {\n process.nextTick(emitCloseNT, this);\n }\n });\n return this;\n}\nfunction emitErrorAndCloseNT(self, err) {\n emitErrorNT(self, err);\n emitCloseNT(self);\n}\nfunction emitCloseNT(self) {\n if (self._writableState && !self._writableState.emitClose) return;\n if (self._readableState && !self._readableState.emitClose) return;\n self.emit('close');\n}\nfunction undestroy() {\n if (this._readableState) {\n this._readableState.destroyed = false;\n this._readableState.reading = false;\n this._readableState.ended = false;\n this._readableState.endEmitted = false;\n }\n if (this._writableState) {\n this._writableState.destroyed = false;\n this._writableState.ended = false;\n this._writableState.ending = false;\n this._writableState.finalCalled = false;\n this._writableState.prefinished = false;\n this._writableState.finished = false;\n this._writableState.errorEmitted = false;\n }\n}\nfunction emitErrorNT(self, err) {\n self.emit('error', err);\n}\nfunction errorOrDestroy(stream, err) {\n // We have tests that rely on errors being emitted\n // in the same tick, so changing this is semver major.\n // For now when you opt-in to autoDestroy we allow\n // the error to be emitted nextTick. In a future\n // semver major update we should change the default to this.\n\n const rState = stream._readableState;\n const wState = stream._writableState;\n if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);\n}\nmodule.exports = {\n destroy,\n undestroy,\n errorOrDestroy\n};","// Ported from https://github.com/mafintosh/end-of-stream with\n// permission from the author, Mathias Buus (@mafintosh).\n\n'use strict';\n\nconst ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;\nfunction once(callback) {\n let called = false;\n return function () {\n if (called) return;\n called = true;\n for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {\n args[_key] = arguments[_key];\n }\n callback.apply(this, args);\n };\n}\nfunction noop() {}\nfunction isRequest(stream) {\n return stream.setHeader && typeof stream.abort === 'function';\n}\nfunction eos(stream, opts, callback) {\n if (typeof opts === 'function') return eos(stream, null, opts);\n if (!opts) opts = {};\n callback = once(callback || noop);\n let readable = opts.readable || opts.readable !== false && stream.readable;\n let writable = opts.writable || opts.writable !== false && stream.writable;\n const onlegacyfinish = () => {\n if (!stream.writable) onfinish();\n };\n var writableEnded = stream._writableState && stream._writableState.finished;\n const onfinish = () => {\n writable = false;\n writableEnded = true;\n if (!readable) callback.call(stream);\n };\n var readableEnded = stream._readableState && stream._readableState.endEmitted;\n const onend = () => {\n readable = false;\n readableEnded = true;\n if (!writable) callback.call(stream);\n };\n const onerror = err => {\n callback.call(stream, err);\n };\n const onclose = () => {\n let err;\n if (readable && !readableEnded) {\n if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();\n return callback.call(stream, err);\n }\n if (writable && !writableEnded) {\n if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();\n return callback.call(stream, err);\n }\n };\n const onrequest = () => {\n stream.req.on('finish', onfinish);\n };\n if (isRequest(stream)) {\n stream.on('complete', onfinish);\n stream.on('abort', onclose);\n if (stream.req) onrequest();else stream.on('request', onrequest);\n } else if (writable && !stream._writableState) {\n // legacy streams\n stream.on('end', onlegacyfinish);\n stream.on('close', onlegacyfinish);\n }\n stream.on('end', onend);\n stream.on('finish', onfinish);\n if (opts.error !== false) stream.on('error', onerror);\n stream.on('close', onclose);\n return function () {\n stream.removeListener('complete', onfinish);\n stream.removeListener('abort', onclose);\n stream.removeListener('request', onrequest);\n if (stream.req) stream.req.removeListener('finish', onfinish);\n stream.removeListener('end', onlegacyfinish);\n stream.removeListener('close', onlegacyfinish);\n stream.removeListener('finish', onfinish);\n stream.removeListener('end', onend);\n stream.removeListener('error', onerror);\n stream.removeListener('close', onclose);\n };\n}\nmodule.exports = eos;","'use strict';\n\nfunction asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }\nfunction _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"next\", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"throw\", err); } _next(undefined); }); }; }\nfunction ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }\nfunction _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }\nfunction _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }\nfunction _toPropertyKey(arg) { var key = _toPrimitive(arg, \"string\"); return typeof key === \"symbol\" ? key : String(key); }\nfunction _toPrimitive(input, hint) { if (typeof input !== \"object\" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || \"default\"); if (typeof res !== \"object\") return res; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (hint === \"string\" ? String : Number)(input); }\nconst ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE;\nfunction from(Readable, iterable, opts) {\n let iterator;\n if (iterable && typeof iterable.next === 'function') {\n iterator = iterable;\n } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable);\n const readable = new Readable(_objectSpread({\n objectMode: true\n }, opts));\n // Reading boolean to protect against _read\n // being called before last iteration completion.\n let reading = false;\n readable._read = function () {\n if (!reading) {\n reading = true;\n next();\n }\n };\n function next() {\n return _next2.apply(this, arguments);\n }\n function _next2() {\n _next2 = _asyncToGenerator(function* () {\n try {\n const _yield$iterator$next = yield iterator.next(),\n value = _yield$iterator$next.value,\n done = _yield$iterator$next.done;\n if (done) {\n readable.push(null);\n } else if (readable.push(yield value)) {\n next();\n } else {\n reading = false;\n }\n } catch (err) {\n readable.destroy(err);\n }\n });\n return _next2.apply(this, arguments);\n }\n return readable;\n}\nmodule.exports = from;","// Ported from https://github.com/mafintosh/pump with\n// permission from the author, Mathias Buus (@mafintosh).\n\n'use strict';\n\nlet eos;\nfunction once(callback) {\n let called = false;\n return function () {\n if (called) return;\n called = true;\n callback(...arguments);\n };\n}\nconst _require$codes = require('../../../errors').codes,\n ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,\n ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;\nfunction noop(err) {\n // Rethrow the error if it exists to avoid swallowing it\n if (err) throw err;\n}\nfunction isRequest(stream) {\n return stream.setHeader && typeof stream.abort === 'function';\n}\nfunction destroyer(stream, reading, writing, callback) {\n callback = once(callback);\n let closed = false;\n stream.on('close', () => {\n closed = true;\n });\n if (eos === undefined) eos = require('./end-of-stream');\n eos(stream, {\n readable: reading,\n writable: writing\n }, err => {\n if (err) return callback(err);\n closed = true;\n callback();\n });\n let destroyed = false;\n return err => {\n if (closed) return;\n if (destroyed) return;\n destroyed = true;\n\n // request.destroy just do .end - .abort is what we want\n if (isRequest(stream)) return stream.abort();\n if (typeof stream.destroy === 'function') return stream.destroy();\n callback(err || new ERR_STREAM_DESTROYED('pipe'));\n };\n}\nfunction call(fn) {\n fn();\n}\nfunction pipe(from, to) {\n return from.pipe(to);\n}\nfunction popCallback(streams) {\n if (!streams.length) return noop;\n if (typeof streams[streams.length - 1] !== 'function') return noop;\n return streams.pop();\n}\nfunction pipeline() {\n for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {\n streams[_key] = arguments[_key];\n }\n const callback = popCallback(streams);\n if (Array.isArray(streams[0])) streams = streams[0];\n if (streams.length < 2) {\n throw new ERR_MISSING_ARGS('streams');\n }\n let error;\n const destroys = streams.map(function (stream, i) {\n const reading = i < streams.length - 1;\n const writing = i > 0;\n return destroyer(stream, reading, writing, function (err) {\n if (!error) error = err;\n if (err) destroys.forEach(call);\n if (reading) return;\n destroys.forEach(call);\n callback(error);\n });\n });\n return streams.reduce(pipe);\n}\nmodule.exports = pipeline;","'use strict';\n\nconst ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;\nfunction highWaterMarkFrom(options, isDuplex, duplexKey) {\n return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;\n}\nfunction getHighWaterMark(state, options, duplexKey, isDuplex) {\n const hwm = highWaterMarkFrom(options, isDuplex, duplexKey);\n if (hwm != null) {\n if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {\n const name = isDuplex ? duplexKey : 'highWaterMark';\n throw new ERR_INVALID_OPT_VALUE(name, hwm);\n }\n return Math.floor(hwm);\n }\n\n // Default value\n return state.objectMode ? 16 : 16 * 1024;\n}\nmodule.exports = {\n getHighWaterMark\n};","module.exports = require('stream');\n","var Stream = require('stream');\nif (process.env.READABLE_STREAM === 'disable' && Stream) {\n module.exports = Stream.Readable;\n Object.assign(module.exports, Stream);\n module.exports.Stream = Stream;\n} else {\n exports = module.exports = require('./lib/_stream_readable.js');\n exports.Stream = Stream || exports;\n exports.Readable = exports;\n exports.Writable = require('./lib/_stream_writable.js');\n exports.Duplex = require('./lib/_stream_duplex.js');\n exports.Transform = require('./lib/_stream_transform.js');\n exports.PassThrough = require('./lib/_stream_passthrough.js');\n exports.finished = require('./lib/internal/streams/end-of-stream.js');\n exports.pipeline = require('./lib/internal/streams/pipeline.js');\n}\n","'use strict';\n\nconst {PassThrough} = require('stream');\nconst debug = require('debug')('retry-request');\nconst extend = require('extend');\n\nconst DEFAULTS = {\n objectMode: false,\n retries: 2,\n\n /*\n The maximum time to delay in seconds. If retryDelayMultiplier results in a\n delay greater than maxRetryDelay, retries should delay by maxRetryDelay\n seconds instead.\n */\n maxRetryDelay: 64,\n\n /*\n The multiplier by which to increase the delay time between the completion of\n failed requests, and the initiation of the subsequent retrying request.\n */\n retryDelayMultiplier: 2,\n\n /*\n The length of time to keep retrying in seconds. The last sleep period will\n be shortened as necessary, so that the last retry runs at deadline (and not\n considerably beyond it). The total time starting from when the initial\n request is sent, after which an error will be returned, regardless of the\n retrying attempts made meanwhile.\n */\n totalTimeout: 600,\n\n noResponseRetries: 2,\n currentRetryAttempt: 0,\n shouldRetryFn: function (response) {\n const retryRanges = [\n // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes\n // 1xx - Retry (Informational, request still processing)\n // 2xx - Do not retry (Success)\n // 3xx - Do not retry (Redirect)\n // 4xx - Do not retry (Client errors)\n // 429 - Retry (\"Too Many Requests\")\n // 5xx - Retry (Server errors)\n [100, 199],\n [429, 429],\n [500, 599],\n ];\n\n const statusCode = response.statusCode;\n debug(`Response status: ${statusCode}`);\n\n let range;\n while ((range = retryRanges.shift())) {\n if (statusCode >= range[0] && statusCode <= range[1]) {\n // Not a successful status or redirect.\n return true;\n }\n }\n },\n};\n\nfunction retryRequest(requestOpts, opts, callback) {\n const streamMode = typeof arguments[arguments.length - 1] !== 'function';\n\n if (typeof opts === 'function') {\n callback = opts;\n }\n\n const manualCurrentRetryAttemptWasSet =\n opts && typeof opts.currentRetryAttempt === 'number';\n opts = extend({}, DEFAULTS, opts);\n\n if (typeof opts.request === 'undefined') {\n try {\n // eslint-disable-next-line node/no-unpublished-require\n opts.request = require('request');\n } catch (e) {\n throw new Error('A request library must be provided to retry-request.');\n }\n }\n\n let currentRetryAttempt = opts.currentRetryAttempt;\n\n let numNoResponseAttempts = 0;\n let streamResponseHandled = false;\n\n let retryStream;\n let requestStream;\n let delayStream;\n\n let activeRequest;\n const retryRequest = {\n abort: function () {\n if (activeRequest && activeRequest.abort) {\n activeRequest.abort();\n }\n },\n };\n\n if (streamMode) {\n retryStream = new PassThrough({objectMode: opts.objectMode});\n retryStream.abort = resetStreams;\n }\n\n const timeOfFirstRequest = Date.now();\n if (currentRetryAttempt > 0) {\n retryAfterDelay(currentRetryAttempt);\n } else {\n makeRequest();\n }\n\n if (streamMode) {\n return retryStream;\n } else {\n return retryRequest;\n }\n\n function resetStreams() {\n delayStream = null;\n\n if (requestStream) {\n requestStream.abort && requestStream.abort();\n requestStream.cancel && requestStream.cancel();\n\n if (requestStream.destroy) {\n requestStream.destroy();\n } else if (requestStream.end) {\n requestStream.end();\n }\n }\n }\n\n function makeRequest() {\n currentRetryAttempt++;\n debug(`Current retry attempt: ${currentRetryAttempt}`);\n\n if (streamMode) {\n streamResponseHandled = false;\n\n delayStream = new PassThrough({objectMode: opts.objectMode});\n requestStream = opts.request(requestOpts);\n\n setImmediate(() => {\n retryStream.emit('request');\n });\n\n requestStream\n // gRPC via google-cloud-node can emit an `error` as well as a `response`\n // Whichever it emits, we run with-- we can't run with both. That's what\n // is up with the `streamResponseHandled` tracking.\n .on('error', err => {\n if (streamResponseHandled) {\n return;\n }\n\n streamResponseHandled = true;\n onResponse(err);\n })\n .on('response', (resp, body) => {\n if (streamResponseHandled) {\n return;\n }\n\n streamResponseHandled = true;\n onResponse(null, resp, body);\n })\n .on('complete', retryStream.emit.bind(retryStream, 'complete'));\n\n requestStream.pipe(delayStream);\n } else {\n activeRequest = opts.request(requestOpts, onResponse);\n }\n }\n\n function retryAfterDelay(currentRetryAttempt) {\n if (streamMode) {\n resetStreams();\n }\n\n const nextRetryDelay = getNextRetryDelay({\n maxRetryDelay: opts.maxRetryDelay,\n retryDelayMultiplier: opts.retryDelayMultiplier,\n retryNumber: currentRetryAttempt,\n timeOfFirstRequest,\n totalTimeout: opts.totalTimeout,\n });\n debug(`Next retry delay: ${nextRetryDelay}`);\n\n if (nextRetryDelay <= 0) {\n numNoResponseAttempts = opts.noResponseRetries + 1;\n return;\n }\n\n setTimeout(makeRequest, nextRetryDelay);\n }\n\n function onResponse(err, response, body) {\n // An error such as DNS resolution.\n if (err) {\n numNoResponseAttempts++;\n\n if (numNoResponseAttempts <= opts.noResponseRetries) {\n retryAfterDelay(numNoResponseAttempts);\n } else {\n if (streamMode) {\n retryStream.emit('error', err);\n retryStream.end();\n } else {\n callback(err, response, body);\n }\n }\n\n return;\n }\n\n // Send the response to see if we should try again.\n // NOTE: \"currentRetryAttempt\" isn't accurate by default, as it counts\n // the very first request sent as the first \"retry\". It is only accurate\n // when a user provides their own \"currentRetryAttempt\" option at\n // instantiation.\n const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet\n ? currentRetryAttempt\n : currentRetryAttempt - 1;\n if (\n adjustedCurrentRetryAttempt < opts.retries &&\n opts.shouldRetryFn(response)\n ) {\n retryAfterDelay(currentRetryAttempt);\n return;\n }\n\n // No more attempts need to be made, just continue on.\n if (streamMode) {\n retryStream.emit('response', response);\n delayStream.pipe(retryStream);\n requestStream.on('error', err => {\n retryStream.destroy(err);\n });\n } else {\n callback(err, response, body);\n }\n }\n}\n\nmodule.exports = retryRequest;\n\nfunction getNextRetryDelay(config) {\n const {\n maxRetryDelay,\n retryDelayMultiplier,\n retryNumber,\n timeOfFirstRequest,\n totalTimeout,\n } = config;\n\n const maxRetryDelayMs = maxRetryDelay * 1000;\n const totalTimeoutMs = totalTimeout * 1000;\n\n const jitter = Math.floor(Math.random() * 1000);\n const calculatedNextRetryDelay =\n Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter;\n\n const maxAllowableDelayMs =\n totalTimeoutMs - (Date.now() - timeOfFirstRequest);\n\n return Math.min(\n calculatedNextRetryDelay,\n maxAllowableDelayMs,\n maxRetryDelayMs\n );\n}\n\nmodule.exports.getNextRetryDelay = getNextRetryDelay;\n","module.exports = require('./lib/retry');","var RetryOperation = require('./retry_operation');\n\nexports.operation = function(options) {\n var timeouts = exports.timeouts(options);\n return new RetryOperation(timeouts, {\n forever: options && (options.forever || options.retries === Infinity),\n unref: options && options.unref,\n maxRetryTime: options && options.maxRetryTime\n });\n};\n\nexports.timeouts = function(options) {\n if (options instanceof Array) {\n return [].concat(options);\n }\n\n var opts = {\n retries: 10,\n factor: 2,\n minTimeout: 1 * 1000,\n maxTimeout: Infinity,\n randomize: false\n };\n for (var key in options) {\n opts[key] = options[key];\n }\n\n if (opts.minTimeout > opts.maxTimeout) {\n throw new Error('minTimeout is greater than maxTimeout');\n }\n\n var timeouts = [];\n for (var i = 0; i < opts.retries; i++) {\n timeouts.push(this.createTimeout(i, opts));\n }\n\n if (options && options.forever && !timeouts.length) {\n timeouts.push(this.createTimeout(i, opts));\n }\n\n // sort the array numerically ascending\n timeouts.sort(function(a,b) {\n return a - b;\n });\n\n return timeouts;\n};\n\nexports.createTimeout = function(attempt, opts) {\n var random = (opts.randomize)\n ? (Math.random() + 1)\n : 1;\n\n var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt));\n timeout = Math.min(timeout, opts.maxTimeout);\n\n return timeout;\n};\n\nexports.wrap = function(obj, options, methods) {\n if (options instanceof Array) {\n methods = options;\n options = null;\n }\n\n if (!methods) {\n methods = [];\n for (var key in obj) {\n if (typeof obj[key] === 'function') {\n methods.push(key);\n }\n }\n }\n\n for (var i = 0; i < methods.length; i++) {\n var method = methods[i];\n var original = obj[method];\n\n obj[method] = function retryWrapper(original) {\n var op = exports.operation(options);\n var args = Array.prototype.slice.call(arguments, 1);\n var callback = args.pop();\n\n args.push(function(err) {\n if (op.retry(err)) {\n return;\n }\n if (err) {\n arguments[0] = op.mainError();\n }\n callback.apply(this, arguments);\n });\n\n op.attempt(function() {\n original.apply(obj, args);\n });\n }.bind(obj, original);\n obj[method].options = options;\n }\n};\n","function RetryOperation(timeouts, options) {\n // Compatibility for the old (timeouts, retryForever) signature\n if (typeof options === 'boolean') {\n options = { forever: options };\n }\n\n this._originalTimeouts = JSON.parse(JSON.stringify(timeouts));\n this._timeouts = timeouts;\n this._options = options || {};\n this._maxRetryTime = options && options.maxRetryTime || Infinity;\n this._fn = null;\n this._errors = [];\n this._attempts = 1;\n this._operationTimeout = null;\n this._operationTimeoutCb = null;\n this._timeout = null;\n this._operationStart = null;\n this._timer = null;\n\n if (this._options.forever) {\n this._cachedTimeouts = this._timeouts.slice(0);\n }\n}\nmodule.exports = RetryOperation;\n\nRetryOperation.prototype.reset = function() {\n this._attempts = 1;\n this._timeouts = this._originalTimeouts.slice(0);\n}\n\nRetryOperation.prototype.stop = function() {\n if (this._timeout) {\n clearTimeout(this._timeout);\n }\n if (this._timer) {\n clearTimeout(this._timer);\n }\n\n this._timeouts = [];\n this._cachedTimeouts = null;\n};\n\nRetryOperation.prototype.retry = function(err) {\n if (this._timeout) {\n clearTimeout(this._timeout);\n }\n\n if (!err) {\n return false;\n }\n var currentTime = new Date().getTime();\n if (err && currentTime - this._operationStart >= this._maxRetryTime) {\n this._errors.push(err);\n this._errors.unshift(new Error('RetryOperation timeout occurred'));\n return false;\n }\n\n this._errors.push(err);\n\n var timeout = this._timeouts.shift();\n if (timeout === undefined) {\n if (this._cachedTimeouts) {\n // retry forever, only keep last error\n this._errors.splice(0, this._errors.length - 1);\n timeout = this._cachedTimeouts.slice(-1);\n } else {\n return false;\n }\n }\n\n var self = this;\n this._timer = setTimeout(function() {\n self._attempts++;\n\n if (self._operationTimeoutCb) {\n self._timeout = setTimeout(function() {\n self._operationTimeoutCb(self._attempts);\n }, self._operationTimeout);\n\n if (self._options.unref) {\n self._timeout.unref();\n }\n }\n\n self._fn(self._attempts);\n }, timeout);\n\n if (this._options.unref) {\n this._timer.unref();\n }\n\n return true;\n};\n\nRetryOperation.prototype.attempt = function(fn, timeoutOps) {\n this._fn = fn;\n\n if (timeoutOps) {\n if (timeoutOps.timeout) {\n this._operationTimeout = timeoutOps.timeout;\n }\n if (timeoutOps.cb) {\n this._operationTimeoutCb = timeoutOps.cb;\n }\n }\n\n var self = this;\n if (this._operationTimeoutCb) {\n this._timeout = setTimeout(function() {\n self._operationTimeoutCb();\n }, self._operationTimeout);\n }\n\n this._operationStart = new Date().getTime();\n\n this._fn(this._attempts);\n};\n\nRetryOperation.prototype.try = function(fn) {\n console.log('Using RetryOperation.try() is deprecated');\n this.attempt(fn);\n};\n\nRetryOperation.prototype.start = function(fn) {\n console.log('Using RetryOperation.start() is deprecated');\n this.attempt(fn);\n};\n\nRetryOperation.prototype.start = RetryOperation.prototype.try;\n\nRetryOperation.prototype.errors = function() {\n return this._errors;\n};\n\nRetryOperation.prototype.attempts = function() {\n return this._attempts;\n};\n\nRetryOperation.prototype.mainError = function() {\n if (this._errors.length === 0) {\n return null;\n }\n\n var counts = {};\n var mainError = null;\n var mainErrorCount = 0;\n\n for (var i = 0; i < this._errors.length; i++) {\n var error = this._errors[i];\n var message = error.message;\n var count = (counts[message] || 0) + 1;\n\n counts[message] = count;\n\n if (count >= mainErrorCount) {\n mainError = error;\n mainErrorCount = count;\n }\n }\n\n return mainError;\n};\n","/*! safe-buffer. MIT License. Feross Aboukhadijeh */\n/* eslint-disable node/no-deprecated-api */\nvar buffer = require('buffer')\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.prototype = Object.create(Buffer.prototype)\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n","'use strict';\n\nvar stubs = require('stubs')\n\n/*\n * StreamEvents can be used 2 ways:\n *\n * 1:\n * function MyStream() {\n * require('stream-events').call(this)\n * }\n *\n * 2:\n * require('stream-events')(myStream)\n */\nfunction StreamEvents(stream) {\n stream = stream || this\n\n var cfg = {\n callthrough: true,\n calls: 1\n }\n\n stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading'))\n stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing'))\n\n return stream\n}\n\nmodule.exports = StreamEvents\n","module.exports = shift\n\nfunction shift (stream) {\n var rs = stream._readableState\n if (!rs) return null\n return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs))\n}\n\nfunction getStateLength (state) {\n if (state.buffer.length) {\n // Since node 6.3.0 state.buffer is a BufferList not an array\n if (state.buffer.head) {\n return state.buffer.head.data.length\n }\n\n return state.buffer[0].length\n }\n\n return state.length\n}\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\n/**/\n\nvar Buffer = require('safe-buffer').Buffer;\n/**/\n\nvar isEncoding = Buffer.isEncoding || function (encoding) {\n encoding = '' + encoding;\n switch (encoding && encoding.toLowerCase()) {\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\n return true;\n default:\n return false;\n }\n};\n\nfunction _normalizeEncoding(enc) {\n if (!enc) return 'utf8';\n var retried;\n while (true) {\n switch (enc) {\n case 'utf8':\n case 'utf-8':\n return 'utf8';\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return 'utf16le';\n case 'latin1':\n case 'binary':\n return 'latin1';\n case 'base64':\n case 'ascii':\n case 'hex':\n return enc;\n default:\n if (retried) return; // undefined\n enc = ('' + enc).toLowerCase();\n retried = true;\n }\n }\n};\n\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\n// modules monkey-patch it to support additional encodings\nfunction normalizeEncoding(enc) {\n var nenc = _normalizeEncoding(enc);\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\n return nenc || enc;\n}\n\n// StringDecoder provides an interface for efficiently splitting a series of\n// buffers into a series of JS strings without breaking apart multi-byte\n// characters.\nexports.StringDecoder = StringDecoder;\nfunction StringDecoder(encoding) {\n this.encoding = normalizeEncoding(encoding);\n var nb;\n switch (this.encoding) {\n case 'utf16le':\n this.text = utf16Text;\n this.end = utf16End;\n nb = 4;\n break;\n case 'utf8':\n this.fillLast = utf8FillLast;\n nb = 4;\n break;\n case 'base64':\n this.text = base64Text;\n this.end = base64End;\n nb = 3;\n break;\n default:\n this.write = simpleWrite;\n this.end = simpleEnd;\n return;\n }\n this.lastNeed = 0;\n this.lastTotal = 0;\n this.lastChar = Buffer.allocUnsafe(nb);\n}\n\nStringDecoder.prototype.write = function (buf) {\n if (buf.length === 0) return '';\n var r;\n var i;\n if (this.lastNeed) {\n r = this.fillLast(buf);\n if (r === undefined) return '';\n i = this.lastNeed;\n this.lastNeed = 0;\n } else {\n i = 0;\n }\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\n return r || '';\n};\n\nStringDecoder.prototype.end = utf8End;\n\n// Returns only complete characters in a Buffer\nStringDecoder.prototype.text = utf8Text;\n\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\nStringDecoder.prototype.fillLast = function (buf) {\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\n this.lastNeed -= buf.length;\n};\n\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\n// continuation byte. If an invalid byte is detected, -2 is returned.\nfunction utf8CheckByte(byte) {\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\n return byte >> 6 === 0x02 ? -1 : -2;\n}\n\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\n// needed to complete the UTF-8 character (if applicable) are returned.\nfunction utf8CheckIncomplete(self, buf, i) {\n var j = buf.length - 1;\n if (j < i) return 0;\n var nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 1;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 2;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) {\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\n }\n return nb;\n }\n return 0;\n}\n\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\n// needed or are available. If we see a non-continuation byte where we expect\n// one, we \"replace\" the validated continuation bytes we've seen so far with\n// a single UTF-8 replacement character ('\\ufffd'), to match v8's UTF-8 decoding\n// behavior. The continuation byte check is included three times in the case\n// where all of the continuation bytes for a character exist in the same buffer.\n// It is also done this way as a slight performance increase instead of using a\n// loop.\nfunction utf8CheckExtraBytes(self, buf, p) {\n if ((buf[0] & 0xC0) !== 0x80) {\n self.lastNeed = 0;\n return '\\ufffd';\n }\n if (self.lastNeed > 1 && buf.length > 1) {\n if ((buf[1] & 0xC0) !== 0x80) {\n self.lastNeed = 1;\n return '\\ufffd';\n }\n if (self.lastNeed > 2 && buf.length > 2) {\n if ((buf[2] & 0xC0) !== 0x80) {\n self.lastNeed = 2;\n return '\\ufffd';\n }\n }\n }\n}\n\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\nfunction utf8FillLast(buf) {\n var p = this.lastTotal - this.lastNeed;\n var r = utf8CheckExtraBytes(this, buf, p);\n if (r !== undefined) return r;\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, p, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, p, 0, buf.length);\n this.lastNeed -= buf.length;\n}\n\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\n// partial character, the character's bytes are buffered until the required\n// number of bytes are available.\nfunction utf8Text(buf, i) {\n var total = utf8CheckIncomplete(this, buf, i);\n if (!this.lastNeed) return buf.toString('utf8', i);\n this.lastTotal = total;\n var end = buf.length - (total - this.lastNeed);\n buf.copy(this.lastChar, 0, end);\n return buf.toString('utf8', i, end);\n}\n\n// For UTF-8, a replacement character is added when ending on a partial\n// character.\nfunction utf8End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + '\\ufffd';\n return r;\n}\n\n// UTF-16LE typically needs two bytes per character, but even if we have an even\n// number of bytes available, we need to check if we end on a leading/high\n// surrogate. In that case, we need to wait for the next two bytes in order to\n// decode the last character properly.\nfunction utf16Text(buf, i) {\n if ((buf.length - i) % 2 === 0) {\n var r = buf.toString('utf16le', i);\n if (r) {\n var c = r.charCodeAt(r.length - 1);\n if (c >= 0xD800 && c <= 0xDBFF) {\n this.lastNeed = 2;\n this.lastTotal = 4;\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n return r.slice(0, -1);\n }\n }\n return r;\n }\n this.lastNeed = 1;\n this.lastTotal = 2;\n this.lastChar[0] = buf[buf.length - 1];\n return buf.toString('utf16le', i, buf.length - 1);\n}\n\n// For UTF-16LE we do not explicitly append special replacement characters if we\n// end on a partial character, we simply let v8 handle that.\nfunction utf16End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) {\n var end = this.lastTotal - this.lastNeed;\n return r + this.lastChar.toString('utf16le', 0, end);\n }\n return r;\n}\n\nfunction base64Text(buf, i) {\n var n = (buf.length - i) % 3;\n if (n === 0) return buf.toString('base64', i);\n this.lastNeed = 3 - n;\n this.lastTotal = 3;\n if (n === 1) {\n this.lastChar[0] = buf[buf.length - 1];\n } else {\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n }\n return buf.toString('base64', i, buf.length - n);\n}\n\nfunction base64End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\n return r;\n}\n\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\nfunction simpleWrite(buf) {\n return buf.toString(this.encoding);\n}\n\nfunction simpleEnd(buf) {\n return buf && buf.length ? this.write(buf) : '';\n}","'use strict'\n\nmodule.exports = function stubs(obj, method, cfg, stub) {\n if (!obj || !method || !obj[method])\n throw new Error('You must provide an object and a key for an existing method')\n\n if (!stub) {\n stub = cfg\n cfg = {}\n }\n\n stub = stub || function() {}\n\n cfg.callthrough = cfg.callthrough || false\n cfg.calls = cfg.calls || 0\n\n var norevert = cfg.calls === 0\n\n var cached = obj[method].bind(obj)\n\n obj[method] = function() {\n var args = [].slice.call(arguments)\n var returnVal\n\n if (cfg.callthrough)\n returnVal = cached.apply(obj, args)\n\n returnVal = stub.apply(obj, args) || returnVal\n\n if (!norevert && --cfg.calls === 0)\n obj[method] = cached\n\n return returnVal\n }\n}\n","'use strict';\nconst os = require('os');\nconst tty = require('tty');\nconst hasFlag = require('has-flag');\n\nconst {env} = process;\n\nlet forceColor;\nif (hasFlag('no-color') ||\n\thasFlag('no-colors') ||\n\thasFlag('color=false') ||\n\thasFlag('color=never')) {\n\tforceColor = 0;\n} else if (hasFlag('color') ||\n\thasFlag('colors') ||\n\thasFlag('color=true') ||\n\thasFlag('color=always')) {\n\tforceColor = 1;\n}\n\nif ('FORCE_COLOR' in env) {\n\tif (env.FORCE_COLOR === 'true') {\n\t\tforceColor = 1;\n\t} else if (env.FORCE_COLOR === 'false') {\n\t\tforceColor = 0;\n\t} else {\n\t\tforceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3);\n\t}\n}\n\nfunction translateLevel(level) {\n\tif (level === 0) {\n\t\treturn false;\n\t}\n\n\treturn {\n\t\tlevel,\n\t\thasBasic: true,\n\t\thas256: level >= 2,\n\t\thas16m: level >= 3\n\t};\n}\n\nfunction supportsColor(haveStream, streamIsTTY) {\n\tif (forceColor === 0) {\n\t\treturn 0;\n\t}\n\n\tif (hasFlag('color=16m') ||\n\t\thasFlag('color=full') ||\n\t\thasFlag('color=truecolor')) {\n\t\treturn 3;\n\t}\n\n\tif (hasFlag('color=256')) {\n\t\treturn 2;\n\t}\n\n\tif (haveStream && !streamIsTTY && forceColor === undefined) {\n\t\treturn 0;\n\t}\n\n\tconst min = forceColor || 0;\n\n\tif (env.TERM === 'dumb') {\n\t\treturn min;\n\t}\n\n\tif (process.platform === 'win32') {\n\t\t// Windows 10 build 10586 is the first Windows release that supports 256 colors.\n\t\t// Windows 10 build 14931 is the first release that supports 16m/TrueColor.\n\t\tconst osRelease = os.release().split('.');\n\t\tif (\n\t\t\tNumber(osRelease[0]) >= 10 &&\n\t\t\tNumber(osRelease[2]) >= 10586\n\t\t) {\n\t\t\treturn Number(osRelease[2]) >= 14931 ? 3 : 2;\n\t\t}\n\n\t\treturn 1;\n\t}\n\n\tif ('CI' in env) {\n\t\tif (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {\n\t\t\treturn 1;\n\t\t}\n\n\t\treturn min;\n\t}\n\n\tif ('TEAMCITY_VERSION' in env) {\n\t\treturn /^(9\\.(0*[1-9]\\d*)\\.|\\d{2,}\\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;\n\t}\n\n\tif (env.COLORTERM === 'truecolor') {\n\t\treturn 3;\n\t}\n\n\tif ('TERM_PROGRAM' in env) {\n\t\tconst version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);\n\n\t\tswitch (env.TERM_PROGRAM) {\n\t\t\tcase 'iTerm.app':\n\t\t\t\treturn version >= 3 ? 3 : 2;\n\t\t\tcase 'Apple_Terminal':\n\t\t\t\treturn 2;\n\t\t\t// No default\n\t\t}\n\t}\n\n\tif (/-256(color)?$/i.test(env.TERM)) {\n\t\treturn 2;\n\t}\n\n\tif (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {\n\t\treturn 1;\n\t}\n\n\tif ('COLORTERM' in env) {\n\t\treturn 1;\n\t}\n\n\treturn min;\n}\n\nfunction getSupportLevel(stream) {\n\tconst level = supportsColor(stream, stream && stream.isTTY);\n\treturn translateLevel(level);\n}\n\nmodule.exports = {\n\tsupportsColor: getSupportLevel,\n\tstdout: translateLevel(supportsColor(true, tty.isatty(1))),\n\tstderr: translateLevel(supportsColor(true, tty.isatty(2)))\n};\n","\"use strict\";\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0;\n/**\n * @class TeenyStatisticsWarning\n * @extends Error\n * @description While an error, is used for emitting warnings when\n * meeting certain configured thresholds.\n * @see process.emitWarning\n */\nclass TeenyStatisticsWarning extends Error {\n /**\n * @param {string} message\n */\n constructor(message) {\n super(message);\n this.threshold = 0;\n this.type = '';\n this.value = 0;\n this.name = this.constructor.name;\n Error.captureStackTrace(this, this.constructor);\n }\n}\nexports.TeenyStatisticsWarning = TeenyStatisticsWarning;\nTeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning';\n/**\n * @class TeenyStatistics\n * @description Maintain various statistics internal to teeny-request. Tracking\n * is not automatic and must be instrumented within teeny-request.\n */\nclass TeenyStatistics {\n /**\n * @param {TeenyStatisticsOptions} [opts]\n */\n constructor(opts) {\n /**\n * @type {number}\n * @private\n * @default 0\n */\n this._concurrentRequests = 0;\n /**\n * @type {boolean}\n * @private\n * @default false\n */\n this._didConcurrentRequestWarn = false;\n this._options = TeenyStatistics._prepareOptions(opts);\n }\n /**\n * Returns a copy of the current options.\n * @return {TeenyStatisticsOptions}\n */\n getOptions() {\n return Object.assign({}, this._options);\n }\n /**\n * Change configured statistics options. This will not preserve unspecified\n * options that were previously specified, i.e. this is a reset of options.\n * @param {TeenyStatisticsOptions} [opts]\n * @returns {TeenyStatisticsConfig} The previous options.\n * @see _prepareOptions\n */\n setOptions(opts) {\n const oldOpts = this._options;\n this._options = TeenyStatistics._prepareOptions(opts);\n return oldOpts;\n }\n /**\n * @readonly\n * @return {TeenyStatisticsCounters}\n */\n get counters() {\n return {\n concurrentRequests: this._concurrentRequests,\n };\n }\n /**\n * @description Should call this right before making a request.\n */\n requestStarting() {\n this._concurrentRequests++;\n if (this._options.concurrentRequests > 0 &&\n this._concurrentRequests >= this._options.concurrentRequests &&\n !this._didConcurrentRequestWarn) {\n this._didConcurrentRequestWarn = true;\n const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' +\n this._concurrentRequests +\n ' requests in-flight, which exceeds the configured threshold of ' +\n this._options.concurrentRequests +\n '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' +\n 'variable or the concurrentRequests option of teeny-request to ' +\n 'increase or disable (0) this warning.');\n warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS;\n warning.value = this._concurrentRequests;\n warning.threshold = this._options.concurrentRequests;\n process.emitWarning(warning);\n }\n }\n /**\n * @description When using `requestStarting`, call this after the request\n * has finished.\n */\n requestFinished() {\n // TODO negative?\n this._concurrentRequests--;\n }\n /**\n * Configuration Precedence:\n * 1. Dependency inversion via defined option.\n * 2. Global numeric environment variable.\n * 3. Built-in default.\n * This will not preserve unspecified options previously specified.\n * @param {TeenyStatisticsOptions} [opts]\n * @returns {TeenyStatisticsOptions}\n * @private\n */\n static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) {\n let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS;\n const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS);\n if (diConcurrentRequests !== undefined) {\n concurrentRequests = diConcurrentRequests;\n }\n else if (!Number.isNaN(envConcurrentRequests)) {\n concurrentRequests = envConcurrentRequests;\n }\n return { concurrentRequests };\n }\n}\nexports.TeenyStatistics = TeenyStatistics;\n/**\n * @description A default threshold representing when to warn about excessive\n * in-flight/concurrent requests.\n * @type {number}\n * @static\n * @readonly\n * @default 5000\n */\nTeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000;\n//# sourceMappingURL=TeenyStatistics.js.map","\"use strict\";\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getAgent = exports.pool = void 0;\nconst http_1 = require(\"http\");\nconst https_1 = require(\"https\");\n// eslint-disable-next-line node/no-deprecated-api\nconst url_1 = require(\"url\");\nexports.pool = new Map();\n/**\n * Determines if a proxy should be considered based on the environment.\n *\n * @param uri The request uri\n * @returns {boolean}\n */\nfunction shouldUseProxyForURI(uri) {\n const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy;\n if (!noProxyEnv) {\n return true;\n }\n const givenURI = new URL(uri);\n for (const noProxyRaw of noProxyEnv.split(',')) {\n const noProxy = noProxyRaw.trim();\n if (noProxy === givenURI.origin || noProxy === givenURI.hostname) {\n return false;\n }\n else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) {\n const noProxyWildcard = noProxy.replace(/^\\*\\./, '.');\n if (givenURI.hostname.endsWith(noProxyWildcard)) {\n return false;\n }\n }\n }\n return true;\n}\n/**\n * Returns a custom request Agent if one is found, otherwise returns undefined\n * which will result in the global http(s) Agent being used.\n * @private\n * @param {string} uri The request uri\n * @param {Options} reqOpts The request options\n * @returns {HttpAnyAgent|undefined}\n */\nfunction getAgent(uri, reqOpts) {\n const isHttp = uri.startsWith('http://');\n const proxy = reqOpts.proxy ||\n process.env.HTTP_PROXY ||\n process.env.http_proxy ||\n process.env.HTTPS_PROXY ||\n process.env.https_proxy;\n const poolOptions = Object.assign({}, reqOpts.pool);\n const manuallyProvidedProxy = !!reqOpts.proxy;\n const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri);\n if (proxy && shouldUseProxy) {\n // tslint:disable-next-line variable-name\n const Agent = isHttp\n ? require('http-proxy-agent')\n : require('https-proxy-agent');\n const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions };\n return new Agent(proxyOpts);\n }\n let key = isHttp ? 'http' : 'https';\n if (reqOpts.forever) {\n key += ':forever';\n if (!exports.pool.has(key)) {\n // tslint:disable-next-line variable-name\n const Agent = isHttp ? http_1.Agent : https_1.Agent;\n exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true }));\n }\n }\n return exports.pool.get(key);\n}\nexports.getAgent = getAgent;\n//# sourceMappingURL=agents.js.map","\"use strict\";\n/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.teenyRequest = exports.RequestError = void 0;\nconst node_fetch_1 = require(\"node-fetch\");\nconst stream_1 = require(\"stream\");\nconst uuid = require(\"uuid\");\nconst agents_1 = require(\"./agents\");\nconst TeenyStatistics_1 = require(\"./TeenyStatistics\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst streamEvents = require('stream-events');\nclass RequestError extends Error {\n}\nexports.RequestError = RequestError;\n/**\n * Convert options from Request to Fetch format\n * @private\n * @param reqOpts Request options\n */\nfunction requestToFetchOptions(reqOpts) {\n const options = {\n method: reqOpts.method || 'GET',\n ...(reqOpts.timeout && { timeout: reqOpts.timeout }),\n ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }),\n };\n if (typeof reqOpts.json === 'object') {\n // Add Content-type: application/json header\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['Content-Type'] = 'application/json';\n // Set body to JSON representation of value\n options.body = JSON.stringify(reqOpts.json);\n }\n else {\n if (Buffer.isBuffer(reqOpts.body)) {\n options.body = reqOpts.body;\n }\n else if (typeof reqOpts.body !== 'string') {\n options.body = JSON.stringify(reqOpts.body);\n }\n else {\n options.body = reqOpts.body;\n }\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options.headers = reqOpts.headers;\n let uri = (reqOpts.uri ||\n reqOpts.url);\n if (!uri) {\n throw new Error('Missing uri or url in reqOpts.');\n }\n if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') {\n // eslint-disable-next-line @typescript-eslint/no-var-requires\n const qs = require('querystring');\n const params = qs.stringify(reqOpts.qs);\n uri = uri + '?' + params;\n }\n options.agent = (0, agents_1.getAgent)(uri, reqOpts);\n return { uri, options };\n}\n/**\n * Convert a response from `fetch` to `request` format.\n * @private\n * @param opts The `request` options used to create the request.\n * @param res The Fetch response\n * @returns A `request` response object\n */\nfunction fetchToRequestResponse(opts, res) {\n const request = {};\n request.agent = opts.agent || false;\n request.headers = (opts.headers || {});\n request.href = res.url;\n // headers need to be converted from a map to an obj\n const resHeaders = {};\n res.headers.forEach((value, key) => (resHeaders[key] = value));\n const response = Object.assign(res.body, {\n statusCode: res.status,\n statusMessage: res.statusText,\n request,\n body: res.body,\n headers: resHeaders,\n toJSON: () => ({ headers: resHeaders }),\n });\n return response;\n}\n/**\n * Create POST body from two parts as multipart/related content-type\n * @private\n * @param boundary\n * @param multipart\n */\nfunction createMultipartStream(boundary, multipart) {\n const finale = `--${boundary}--`;\n const stream = new stream_1.PassThrough();\n for (const part of multipart) {\n const preamble = `--${boundary}\\r\\nContent-Type: ${part['Content-Type']}\\r\\n\\r\\n`;\n stream.write(preamble);\n if (typeof part.body === 'string') {\n stream.write(part.body);\n stream.write('\\r\\n');\n }\n else {\n part.body.pipe(stream, { end: false });\n part.body.on('end', () => {\n stream.write('\\r\\n');\n stream.write(finale);\n stream.end();\n });\n }\n }\n return stream;\n}\nfunction teenyRequest(reqOpts, callback) {\n const { uri, options } = requestToFetchOptions(reqOpts);\n const multipart = reqOpts.multipart;\n if (reqOpts.multipart && multipart.length === 2) {\n if (!callback) {\n // TODO: add support for multipart uploads through streaming\n throw new Error('Multipart without callback is not implemented.');\n }\n const boundary = uuid.v4();\n options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`;\n options.body = createMultipartStream(boundary, multipart);\n // Multipart upload\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n const header = res.headers.get('content-type');\n const response = fetchToRequestResponse(options, res);\n const body = response.body;\n if (header === 'application/json' ||\n header === 'application/json; charset=utf-8') {\n res.json().then(json => {\n response.body = json;\n callback(null, response, json);\n }, (err) => {\n callback(err, response, body);\n });\n return;\n }\n res.text().then(text => {\n response.body = text;\n callback(null, response, text);\n }, err => {\n callback(err, response, body);\n });\n }, err => {\n teenyRequest.stats.requestFinished();\n callback(err, null, null);\n });\n return;\n }\n if (callback === undefined) {\n // Stream mode\n const requestStream = streamEvents(new stream_1.PassThrough());\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let responseStream;\n requestStream.once('reading', () => {\n if (responseStream) {\n (0, stream_1.pipeline)(responseStream, requestStream, () => { });\n }\n else {\n requestStream.once('response', () => {\n (0, stream_1.pipeline)(responseStream, requestStream, () => { });\n });\n }\n });\n options.compress = false;\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n responseStream = res.body;\n responseStream.on('error', (err) => {\n requestStream.emit('error', err);\n });\n const response = fetchToRequestResponse(options, res);\n requestStream.emit('response', response);\n }, err => {\n teenyRequest.stats.requestFinished();\n requestStream.emit('error', err);\n });\n // fetch doesn't supply the raw HTTP stream, instead it\n // returns a PassThrough piped from the HTTP response\n // stream.\n return requestStream;\n }\n // GET or POST with callback\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n const header = res.headers.get('content-type');\n const response = fetchToRequestResponse(options, res);\n const body = response.body;\n if (header === 'application/json' ||\n header === 'application/json; charset=utf-8') {\n if (response.statusCode === 204) {\n // Probably a DELETE\n callback(null, response, body);\n return;\n }\n res.json().then(json => {\n response.body = json;\n callback(null, response, json);\n }, err => {\n callback(err, response, body);\n });\n return;\n }\n res.text().then(text => {\n const response = fetchToRequestResponse(options, res);\n response.body = text;\n callback(null, response, text);\n }, err => {\n callback(err, response, body);\n });\n }, err => {\n teenyRequest.stats.requestFinished();\n callback(err, null, null);\n });\n return;\n}\nexports.teenyRequest = teenyRequest;\nteenyRequest.defaults = (defaults) => {\n return (reqOpts, callback) => {\n const opts = { ...defaults, ...reqOpts };\n if (callback === undefined) {\n return teenyRequest(opts);\n }\n teenyRequest(opts, callback);\n };\n};\n/**\n * Single instance of an interface for keeping track of things.\n */\nteenyRequest.stats = new TeenyStatistics_1.TeenyStatistics();\nteenyRequest.resetStats = () => {\n teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions());\n};\n//# sourceMappingURL=index.js.map","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nvar _default = {\n randomUUID: _crypto.default.randomUUID\n};\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nexports.unsafeStringify = unsafeStringify;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).slice(1));\n}\n\nfunction unsafeStringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase();\n}\n\nfunction stringify(arr, offset = 0) {\n const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = require(\"./stringify.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.unsafeStringify)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.URL = exports.DNS = void 0;\nexports.default = v35;\n\nvar _stringify = require(\"./stringify.js\");\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction v35(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n var _namespace;\n\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.unsafeStringify)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _native = _interopRequireDefault(require(\"./native.js\"));\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = require(\"./stringify.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n if (_native.default.randomUUID && !buf && !options) {\n return _native.default.randomUUID();\n }\n\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.unsafeStringify)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.slice(14, 15), 16);\n}\n\nvar _default = version;\nexports.default = _default;","\"use strict\";\n\nvar punycode = require(\"punycode\");\nvar mappingTable = require(\"./lib/mappingTable.json\");\n\nvar PROCESSING_OPTIONS = {\n TRANSITIONAL: 0,\n NONTRANSITIONAL: 1\n};\n\nfunction normalize(str) { // fix bug in v8\n return str.split('\\u0000').map(function (s) { return s.normalize('NFC'); }).join('\\u0000');\n}\n\nfunction findStatus(val) {\n var start = 0;\n var end = mappingTable.length - 1;\n\n while (start <= end) {\n var mid = Math.floor((start + end) / 2);\n\n var target = mappingTable[mid];\n if (target[0][0] <= val && target[0][1] >= val) {\n return target;\n } else if (target[0][0] > val) {\n end = mid - 1;\n } else {\n start = mid + 1;\n }\n }\n\n return null;\n}\n\nvar regexAstralSymbols = /[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]/g;\n\nfunction countSymbols(string) {\n return string\n // replace every surrogate pair with a BMP symbol\n .replace(regexAstralSymbols, '_')\n // then get the length\n .length;\n}\n\nfunction mapChars(domain_name, useSTD3, processing_option) {\n var hasError = false;\n var processed = \"\";\n\n var len = countSymbols(domain_name);\n for (var i = 0; i < len; ++i) {\n var codePoint = domain_name.codePointAt(i);\n var status = findStatus(codePoint);\n\n switch (status[1]) {\n case \"disallowed\":\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n break;\n case \"ignored\":\n break;\n case \"mapped\":\n processed += String.fromCodePoint.apply(String, status[2]);\n break;\n case \"deviation\":\n if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {\n processed += String.fromCodePoint.apply(String, status[2]);\n } else {\n processed += String.fromCodePoint(codePoint);\n }\n break;\n case \"valid\":\n processed += String.fromCodePoint(codePoint);\n break;\n case \"disallowed_STD3_mapped\":\n if (useSTD3) {\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n } else {\n processed += String.fromCodePoint.apply(String, status[2]);\n }\n break;\n case \"disallowed_STD3_valid\":\n if (useSTD3) {\n hasError = true;\n }\n\n processed += String.fromCodePoint(codePoint);\n break;\n }\n }\n\n return {\n string: processed,\n error: hasError\n };\n}\n\nvar combiningMarksRegex = /[\\u0300-\\u036F\\u0483-\\u0489\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065F\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0859-\\u085B\\u08E4-\\u0903\\u093A-\\u093C\\u093E-\\u094F\\u0951-\\u0957\\u0962\\u0963\\u0981-\\u0983\\u09BC\\u09BE-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CD\\u09D7\\u09E2\\u09E3\\u0A01-\\u0A03\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81-\\u0A83\\u0ABC\\u0ABE-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AE2\\u0AE3\\u0B01-\\u0B03\\u0B3C\\u0B3E-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B62\\u0B63\\u0B82\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD7\\u0C00-\\u0C03\\u0C3E-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0C81-\\u0C83\\u0CBC\\u0CBE-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CE2\\u0CE3\\u0D01-\\u0D03\\u0D3E-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4D\\u0D57\\u0D62\\u0D63\\u0D82\\u0D83\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F3E\\u0F3F\\u0F71-\\u0F84\\u0F86\\u0F87\\u0F8D-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102B-\\u103E\\u1056-\\u1059\\u105E-\\u1060\\u1062-\\u1064\\u1067-\\u106D\\u1071-\\u1074\\u1082-\\u108D\\u108F\\u109A-\\u109D\\u135D-\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B4-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u192B\\u1930-\\u193B\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A17-\\u1A1B\\u1A55-\\u1A5E\\u1A60-\\u1A7C\\u1A7F\\u1AB0-\\u1ABE\\u1B00-\\u1B04\\u1B34-\\u1B44\\u1B6B-\\u1B73\\u1B80-\\u1B82\\u1BA1-\\u1BAD\\u1BE6-\\u1BF3\\u1C24-\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE8\\u1CED\\u1CF2-\\u1CF4\\u1CF8\\u1CF9\\u1DC0-\\u1DF5\\u1DFC-\\u1DFF\\u20D0-\\u20F0\\u2CEF-\\u2CF1\\u2D7F\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F-\\uA672\\uA674-\\uA67D\\uA69F\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA823-\\uA827\\uA880\\uA881\\uA8B4-\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA953\\uA980-\\uA983\\uA9B3-\\uA9C0\\uA9E5\\uAA29-\\uAA36\\uAA43\\uAA4C\\uAA4D\\uAA7B-\\uAA7D\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uAAEB-\\uAAEF\\uAAF5\\uAAF6\\uABE3-\\uABEA\\uABEC\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE2D]|\\uD800[\\uDDFD\\uDEE0\\uDF76-\\uDF7A]|\\uD802[\\uDE01-\\uDE03\\uDE05\\uDE06\\uDE0C-\\uDE0F\\uDE38-\\uDE3A\\uDE3F\\uDEE5\\uDEE6]|\\uD804[\\uDC00-\\uDC02\\uDC38-\\uDC46\\uDC7F-\\uDC82\\uDCB0-\\uDCBA\\uDD00-\\uDD02\\uDD27-\\uDD34\\uDD73\\uDD80-\\uDD82\\uDDB3-\\uDDC0\\uDE2C-\\uDE37\\uDEDF-\\uDEEA\\uDF01-\\uDF03\\uDF3C\\uDF3E-\\uDF44\\uDF47\\uDF48\\uDF4B-\\uDF4D\\uDF57\\uDF62\\uDF63\\uDF66-\\uDF6C\\uDF70-\\uDF74]|\\uD805[\\uDCB0-\\uDCC3\\uDDAF-\\uDDB5\\uDDB8-\\uDDC0\\uDE30-\\uDE40\\uDEAB-\\uDEB7]|\\uD81A[\\uDEF0-\\uDEF4\\uDF30-\\uDF36]|\\uD81B[\\uDF51-\\uDF7E\\uDF8F-\\uDF92]|\\uD82F[\\uDC9D\\uDC9E]|\\uD834[\\uDD65-\\uDD69\\uDD6D-\\uDD72\\uDD7B-\\uDD82\\uDD85-\\uDD8B\\uDDAA-\\uDDAD\\uDE42-\\uDE44]|\\uD83A[\\uDCD0-\\uDCD6]|\\uDB40[\\uDD00-\\uDDEF]/;\n\nfunction validateLabel(label, processing_option) {\n if (label.substr(0, 4) === \"xn--\") {\n label = punycode.toUnicode(label);\n processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;\n }\n\n var error = false;\n\n if (normalize(label) !== label ||\n (label[3] === \"-\" && label[4] === \"-\") ||\n label[0] === \"-\" || label[label.length - 1] === \"-\" ||\n label.indexOf(\".\") !== -1 ||\n label.search(combiningMarksRegex) === 0) {\n error = true;\n }\n\n var len = countSymbols(label);\n for (var i = 0; i < len; ++i) {\n var status = findStatus(label.codePointAt(i));\n if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== \"valid\") ||\n (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&\n status[1] !== \"valid\" && status[1] !== \"deviation\")) {\n error = true;\n break;\n }\n }\n\n return {\n label: label,\n error: error\n };\n}\n\nfunction processing(domain_name, useSTD3, processing_option) {\n var result = mapChars(domain_name, useSTD3, processing_option);\n result.string = normalize(result.string);\n\n var labels = result.string.split(\".\");\n for (var i = 0; i < labels.length; ++i) {\n try {\n var validation = validateLabel(labels[i]);\n labels[i] = validation.label;\n result.error = result.error || validation.error;\n } catch(e) {\n result.error = true;\n }\n }\n\n return {\n string: labels.join(\".\"),\n error: result.error\n };\n}\n\nmodule.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {\n var result = processing(domain_name, useSTD3, processing_option);\n var labels = result.string.split(\".\");\n labels = labels.map(function(l) {\n try {\n return punycode.toASCII(l);\n } catch(e) {\n result.error = true;\n return l;\n }\n });\n\n if (verifyDnsLength) {\n var total = labels.slice(0, labels.length - 1).join(\".\").length;\n if (total.length > 253 || total.length === 0) {\n result.error = true;\n }\n\n for (var i=0; i < labels.length; ++i) {\n if (labels.length > 63 || labels.length === 0) {\n result.error = true;\n break;\n }\n }\n }\n\n if (result.error) return null;\n return labels.join(\".\");\n};\n\nmodule.exports.toUnicode = function(domain_name, useSTD3) {\n var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);\n\n return {\n domain: result.string,\n error: result.error\n };\n};\n\nmodule.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","\n/**\n * For Node.js, simply re-export the core `util.deprecate` function.\n */\n\nmodule.exports = require('util').deprecate;\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","\"use strict\";\n\nvar conversions = {};\nmodule.exports = conversions;\n\nfunction sign(x) {\n return x < 0 ? -1 : 1;\n}\n\nfunction evenRound(x) {\n // Round x to the nearest integer, choosing the even integer if it lies halfway between two.\n if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)\n return Math.floor(x);\n } else {\n return Math.round(x);\n }\n}\n\nfunction createNumberConversion(bitLength, typeOpts) {\n if (!typeOpts.unsigned) {\n --bitLength;\n }\n const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);\n const upperBound = Math.pow(2, bitLength) - 1;\n\n const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);\n const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);\n\n return function(V, opts) {\n if (!opts) opts = {};\n\n let x = +V;\n\n if (opts.enforceRange) {\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite number\");\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(\"Argument is not in byte range\");\n }\n\n return x;\n }\n\n if (!isNaN(x) && opts.clamp) {\n x = evenRound(x);\n\n if (x < lowerBound) x = lowerBound;\n if (x > upperBound) x = upperBound;\n return x;\n }\n\n if (!Number.isFinite(x) || x === 0) {\n return 0;\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n x = x % moduloVal;\n\n if (!typeOpts.unsigned && x >= moduloBound) {\n return x - moduloVal;\n } else if (typeOpts.unsigned) {\n if (x < 0) {\n x += moduloVal;\n } else if (x === -0) { // don't return negative zero\n return 0;\n }\n }\n\n return x;\n }\n}\n\nconversions[\"void\"] = function () {\n return undefined;\n};\n\nconversions[\"boolean\"] = function (val) {\n return !!val;\n};\n\nconversions[\"byte\"] = createNumberConversion(8, { unsigned: false });\nconversions[\"octet\"] = createNumberConversion(8, { unsigned: true });\n\nconversions[\"short\"] = createNumberConversion(16, { unsigned: false });\nconversions[\"unsigned short\"] = createNumberConversion(16, { unsigned: true });\n\nconversions[\"long\"] = createNumberConversion(32, { unsigned: false });\nconversions[\"unsigned long\"] = createNumberConversion(32, { unsigned: true });\n\nconversions[\"long long\"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });\nconversions[\"unsigned long long\"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });\n\nconversions[\"double\"] = function (V) {\n const x = +V;\n\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite floating-point value\");\n }\n\n return x;\n};\n\nconversions[\"unrestricted double\"] = function (V) {\n const x = +V;\n\n if (isNaN(x)) {\n throw new TypeError(\"Argument is NaN\");\n }\n\n return x;\n};\n\n// not quite valid, but good enough for JS\nconversions[\"float\"] = conversions[\"double\"];\nconversions[\"unrestricted float\"] = conversions[\"unrestricted double\"];\n\nconversions[\"DOMString\"] = function (V, opts) {\n if (!opts) opts = {};\n\n if (opts.treatNullAsEmptyString && V === null) {\n return \"\";\n }\n\n return String(V);\n};\n\nconversions[\"ByteString\"] = function (V, opts) {\n const x = String(V);\n let c = undefined;\n for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {\n if (c > 255) {\n throw new TypeError(\"Argument is not a valid bytestring\");\n }\n }\n\n return x;\n};\n\nconversions[\"USVString\"] = function (V) {\n const S = String(V);\n const n = S.length;\n const U = [];\n for (let i = 0; i < n; ++i) {\n const c = S.charCodeAt(i);\n if (c < 0xD800 || c > 0xDFFF) {\n U.push(String.fromCodePoint(c));\n } else if (0xDC00 <= c && c <= 0xDFFF) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n if (i === n - 1) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n const d = S.charCodeAt(i + 1);\n if (0xDC00 <= d && d <= 0xDFFF) {\n const a = c & 0x3FF;\n const b = d & 0x3FF;\n U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));\n ++i;\n } else {\n U.push(String.fromCodePoint(0xFFFD));\n }\n }\n }\n }\n\n return U.join('');\n};\n\nconversions[\"Date\"] = function (V, opts) {\n if (!(V instanceof Date)) {\n throw new TypeError(\"Argument is not a Date object\");\n }\n if (isNaN(V)) {\n return undefined;\n }\n\n return V;\n};\n\nconversions[\"RegExp\"] = function (V, opts) {\n if (!(V instanceof RegExp)) {\n V = new RegExp(V);\n }\n\n return V;\n};\n","\"use strict\";\nconst usm = require(\"./url-state-machine\");\n\nexports.implementation = class URLImpl {\n constructor(constructorArgs) {\n const url = constructorArgs[0];\n const base = constructorArgs[1];\n\n let parsedBase = null;\n if (base !== undefined) {\n parsedBase = usm.basicURLParse(base);\n if (parsedBase === \"failure\") {\n throw new TypeError(\"Invalid base URL\");\n }\n }\n\n const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n\n // TODO: query stuff\n }\n\n get href() {\n return usm.serializeURL(this._url);\n }\n\n set href(v) {\n const parsedURL = usm.basicURLParse(v);\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n }\n\n get origin() {\n return usm.serializeURLOrigin(this._url);\n }\n\n get protocol() {\n return this._url.scheme + \":\";\n }\n\n set protocol(v) {\n usm.basicURLParse(v + \":\", { url: this._url, stateOverride: \"scheme start\" });\n }\n\n get username() {\n return this._url.username;\n }\n\n set username(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setTheUsername(this._url, v);\n }\n\n get password() {\n return this._url.password;\n }\n\n set password(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setThePassword(this._url, v);\n }\n\n get host() {\n const url = this._url;\n\n if (url.host === null) {\n return \"\";\n }\n\n if (url.port === null) {\n return usm.serializeHost(url.host);\n }\n\n return usm.serializeHost(url.host) + \":\" + usm.serializeInteger(url.port);\n }\n\n set host(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"host\" });\n }\n\n get hostname() {\n if (this._url.host === null) {\n return \"\";\n }\n\n return usm.serializeHost(this._url.host);\n }\n\n set hostname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"hostname\" });\n }\n\n get port() {\n if (this._url.port === null) {\n return \"\";\n }\n\n return usm.serializeInteger(this._url.port);\n }\n\n set port(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n if (v === \"\") {\n this._url.port = null;\n } else {\n usm.basicURLParse(v, { url: this._url, stateOverride: \"port\" });\n }\n }\n\n get pathname() {\n if (this._url.cannotBeABaseURL) {\n return this._url.path[0];\n }\n\n if (this._url.path.length === 0) {\n return \"\";\n }\n\n return \"/\" + this._url.path.join(\"/\");\n }\n\n set pathname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n this._url.path = [];\n usm.basicURLParse(v, { url: this._url, stateOverride: \"path start\" });\n }\n\n get search() {\n if (this._url.query === null || this._url.query === \"\") {\n return \"\";\n }\n\n return \"?\" + this._url.query;\n }\n\n set search(v) {\n // TODO: query stuff\n\n const url = this._url;\n\n if (v === \"\") {\n url.query = null;\n return;\n }\n\n const input = v[0] === \"?\" ? v.substring(1) : v;\n url.query = \"\";\n usm.basicURLParse(input, { url, stateOverride: \"query\" });\n }\n\n get hash() {\n if (this._url.fragment === null || this._url.fragment === \"\") {\n return \"\";\n }\n\n return \"#\" + this._url.fragment;\n }\n\n set hash(v) {\n if (v === \"\") {\n this._url.fragment = null;\n return;\n }\n\n const input = v[0] === \"#\" ? v.substring(1) : v;\n this._url.fragment = \"\";\n usm.basicURLParse(input, { url: this._url, stateOverride: \"fragment\" });\n }\n\n toJSON() {\n return this.href;\n }\n};\n","\"use strict\";\n\nconst conversions = require(\"webidl-conversions\");\nconst utils = require(\"./utils.js\");\nconst Impl = require(\".//URL-impl.js\");\n\nconst impl = utils.implSymbol;\n\nfunction URL(url) {\n if (!this || this[impl] || !(this instanceof URL)) {\n throw new TypeError(\"Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.\");\n }\n if (arguments.length < 1) {\n throw new TypeError(\"Failed to construct 'URL': 1 argument required, but only \" + arguments.length + \" present.\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 2; ++i) {\n args[i] = arguments[i];\n }\n args[0] = conversions[\"USVString\"](args[0]);\n if (args[1] !== undefined) {\n args[1] = conversions[\"USVString\"](args[1]);\n }\n\n module.exports.setup(this, args);\n}\n\nURL.prototype.toJSON = function toJSON() {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 0; ++i) {\n args[i] = arguments[i];\n }\n return this[impl].toJSON.apply(this[impl], args);\n};\nObject.defineProperty(URL.prototype, \"href\", {\n get() {\n return this[impl].href;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].href = V;\n },\n enumerable: true,\n configurable: true\n});\n\nURL.prototype.toString = function () {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n return this.href;\n};\n\nObject.defineProperty(URL.prototype, \"origin\", {\n get() {\n return this[impl].origin;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"protocol\", {\n get() {\n return this[impl].protocol;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].protocol = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"username\", {\n get() {\n return this[impl].username;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].username = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"password\", {\n get() {\n return this[impl].password;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].password = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"host\", {\n get() {\n return this[impl].host;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].host = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hostname\", {\n get() {\n return this[impl].hostname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hostname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"port\", {\n get() {\n return this[impl].port;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].port = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"pathname\", {\n get() {\n return this[impl].pathname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].pathname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"search\", {\n get() {\n return this[impl].search;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].search = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hash\", {\n get() {\n return this[impl].hash;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hash = V;\n },\n enumerable: true,\n configurable: true\n});\n\n\nmodule.exports = {\n is(obj) {\n return !!obj && obj[impl] instanceof Impl.implementation;\n },\n create(constructorArgs, privateData) {\n let obj = Object.create(URL.prototype);\n this.setup(obj, constructorArgs, privateData);\n return obj;\n },\n setup(obj, constructorArgs, privateData) {\n if (!privateData) privateData = {};\n privateData.wrapper = obj;\n\n obj[impl] = new Impl.implementation(constructorArgs, privateData);\n obj[impl][utils.wrapperSymbol] = obj;\n },\n interface: URL,\n expose: {\n Window: { URL: URL },\n Worker: { URL: URL }\n }\n};\n\n","\"use strict\";\n\nexports.URL = require(\"./URL\").interface;\nexports.serializeURL = require(\"./url-state-machine\").serializeURL;\nexports.serializeURLOrigin = require(\"./url-state-machine\").serializeURLOrigin;\nexports.basicURLParse = require(\"./url-state-machine\").basicURLParse;\nexports.setTheUsername = require(\"./url-state-machine\").setTheUsername;\nexports.setThePassword = require(\"./url-state-machine\").setThePassword;\nexports.serializeHost = require(\"./url-state-machine\").serializeHost;\nexports.serializeInteger = require(\"./url-state-machine\").serializeInteger;\nexports.parseURL = require(\"./url-state-machine\").parseURL;\n","\"use strict\";\r\nconst punycode = require(\"punycode\");\r\nconst tr46 = require(\"tr46\");\r\n\r\nconst specialSchemes = {\r\n ftp: 21,\r\n file: null,\r\n gopher: 70,\r\n http: 80,\r\n https: 443,\r\n ws: 80,\r\n wss: 443\r\n};\r\n\r\nconst failure = Symbol(\"failure\");\r\n\r\nfunction countSymbols(str) {\r\n return punycode.ucs2.decode(str).length;\r\n}\r\n\r\nfunction at(input, idx) {\r\n const c = input[idx];\r\n return isNaN(c) ? undefined : String.fromCodePoint(c);\r\n}\r\n\r\nfunction isASCIIDigit(c) {\r\n return c >= 0x30 && c <= 0x39;\r\n}\r\n\r\nfunction isASCIIAlpha(c) {\r\n return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);\r\n}\r\n\r\nfunction isASCIIAlphanumeric(c) {\r\n return isASCIIAlpha(c) || isASCIIDigit(c);\r\n}\r\n\r\nfunction isASCIIHex(c) {\r\n return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);\r\n}\r\n\r\nfunction isSingleDot(buffer) {\r\n return buffer === \".\" || buffer.toLowerCase() === \"%2e\";\r\n}\r\n\r\nfunction isDoubleDot(buffer) {\r\n buffer = buffer.toLowerCase();\r\n return buffer === \"..\" || buffer === \"%2e.\" || buffer === \".%2e\" || buffer === \"%2e%2e\";\r\n}\r\n\r\nfunction isWindowsDriveLetterCodePoints(cp1, cp2) {\r\n return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);\r\n}\r\n\r\nfunction isWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === \":\" || string[1] === \"|\");\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === \":\";\r\n}\r\n\r\nfunction containsForbiddenHostCodePoint(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|%|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction containsForbiddenHostCodePointExcludingPercent(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction isSpecialScheme(scheme) {\r\n return specialSchemes[scheme] !== undefined;\r\n}\r\n\r\nfunction isSpecial(url) {\r\n return isSpecialScheme(url.scheme);\r\n}\r\n\r\nfunction defaultPort(scheme) {\r\n return specialSchemes[scheme];\r\n}\r\n\r\nfunction percentEncode(c) {\r\n let hex = c.toString(16).toUpperCase();\r\n if (hex.length === 1) {\r\n hex = \"0\" + hex;\r\n }\r\n\r\n return \"%\" + hex;\r\n}\r\n\r\nfunction utf8PercentEncode(c) {\r\n const buf = new Buffer(c);\r\n\r\n let str = \"\";\r\n\r\n for (let i = 0; i < buf.length; ++i) {\r\n str += percentEncode(buf[i]);\r\n }\r\n\r\n return str;\r\n}\r\n\r\nfunction utf8PercentDecode(str) {\r\n const input = new Buffer(str);\r\n const output = [];\r\n for (let i = 0; i < input.length; ++i) {\r\n if (input[i] !== 37) {\r\n output.push(input[i]);\r\n } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {\r\n output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));\r\n i += 2;\r\n } else {\r\n output.push(input[i]);\r\n }\r\n }\r\n return new Buffer(output).toString();\r\n}\r\n\r\nfunction isC0ControlPercentEncode(c) {\r\n return c <= 0x1F || c > 0x7E;\r\n}\r\n\r\nconst extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);\r\nfunction isPathPercentEncode(c) {\r\n return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);\r\n}\r\n\r\nconst extraUserinfoPercentEncodeSet =\r\n new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);\r\nfunction isUserinfoPercentEncode(c) {\r\n return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);\r\n}\r\n\r\nfunction percentEncodeChar(c, encodeSetPredicate) {\r\n const cStr = String.fromCodePoint(c);\r\n\r\n if (encodeSetPredicate(c)) {\r\n return utf8PercentEncode(cStr);\r\n }\r\n\r\n return cStr;\r\n}\r\n\r\nfunction parseIPv4Number(input) {\r\n let R = 10;\r\n\r\n if (input.length >= 2 && input.charAt(0) === \"0\" && input.charAt(1).toLowerCase() === \"x\") {\r\n input = input.substring(2);\r\n R = 16;\r\n } else if (input.length >= 2 && input.charAt(0) === \"0\") {\r\n input = input.substring(1);\r\n R = 8;\r\n }\r\n\r\n if (input === \"\") {\r\n return 0;\r\n }\r\n\r\n const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);\r\n if (regex.test(input)) {\r\n return failure;\r\n }\r\n\r\n return parseInt(input, R);\r\n}\r\n\r\nfunction parseIPv4(input) {\r\n const parts = input.split(\".\");\r\n if (parts[parts.length - 1] === \"\") {\r\n if (parts.length > 1) {\r\n parts.pop();\r\n }\r\n }\r\n\r\n if (parts.length > 4) {\r\n return input;\r\n }\r\n\r\n const numbers = [];\r\n for (const part of parts) {\r\n if (part === \"\") {\r\n return input;\r\n }\r\n const n = parseIPv4Number(part);\r\n if (n === failure) {\r\n return input;\r\n }\r\n\r\n numbers.push(n);\r\n }\r\n\r\n for (let i = 0; i < numbers.length - 1; ++i) {\r\n if (numbers[i] > 255) {\r\n return failure;\r\n }\r\n }\r\n if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {\r\n return failure;\r\n }\r\n\r\n let ipv4 = numbers.pop();\r\n let counter = 0;\r\n\r\n for (const n of numbers) {\r\n ipv4 += n * Math.pow(256, 3 - counter);\r\n ++counter;\r\n }\r\n\r\n return ipv4;\r\n}\r\n\r\nfunction serializeIPv4(address) {\r\n let output = \"\";\r\n let n = address;\r\n\r\n for (let i = 1; i <= 4; ++i) {\r\n output = String(n % 256) + output;\r\n if (i !== 4) {\r\n output = \".\" + output;\r\n }\r\n n = Math.floor(n / 256);\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseIPv6(input) {\r\n const address = [0, 0, 0, 0, 0, 0, 0, 0];\r\n let pieceIndex = 0;\r\n let compress = null;\r\n let pointer = 0;\r\n\r\n input = punycode.ucs2.decode(input);\r\n\r\n if (input[pointer] === 58) {\r\n if (input[pointer + 1] !== 58) {\r\n return failure;\r\n }\r\n\r\n pointer += 2;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n }\r\n\r\n while (pointer < input.length) {\r\n if (pieceIndex === 8) {\r\n return failure;\r\n }\r\n\r\n if (input[pointer] === 58) {\r\n if (compress !== null) {\r\n return failure;\r\n }\r\n ++pointer;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n continue;\r\n }\r\n\r\n let value = 0;\r\n let length = 0;\r\n\r\n while (length < 4 && isASCIIHex(input[pointer])) {\r\n value = value * 0x10 + parseInt(at(input, pointer), 16);\r\n ++pointer;\r\n ++length;\r\n }\r\n\r\n if (input[pointer] === 46) {\r\n if (length === 0) {\r\n return failure;\r\n }\r\n\r\n pointer -= length;\r\n\r\n if (pieceIndex > 6) {\r\n return failure;\r\n }\r\n\r\n let numbersSeen = 0;\r\n\r\n while (input[pointer] !== undefined) {\r\n let ipv4Piece = null;\r\n\r\n if (numbersSeen > 0) {\r\n if (input[pointer] === 46 && numbersSeen < 4) {\r\n ++pointer;\r\n } else {\r\n return failure;\r\n }\r\n }\r\n\r\n if (!isASCIIDigit(input[pointer])) {\r\n return failure;\r\n }\r\n\r\n while (isASCIIDigit(input[pointer])) {\r\n const number = parseInt(at(input, pointer));\r\n if (ipv4Piece === null) {\r\n ipv4Piece = number;\r\n } else if (ipv4Piece === 0) {\r\n return failure;\r\n } else {\r\n ipv4Piece = ipv4Piece * 10 + number;\r\n }\r\n if (ipv4Piece > 255) {\r\n return failure;\r\n }\r\n ++pointer;\r\n }\r\n\r\n address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;\r\n\r\n ++numbersSeen;\r\n\r\n if (numbersSeen === 2 || numbersSeen === 4) {\r\n ++pieceIndex;\r\n }\r\n }\r\n\r\n if (numbersSeen !== 4) {\r\n return failure;\r\n }\r\n\r\n break;\r\n } else if (input[pointer] === 58) {\r\n ++pointer;\r\n if (input[pointer] === undefined) {\r\n return failure;\r\n }\r\n } else if (input[pointer] !== undefined) {\r\n return failure;\r\n }\r\n\r\n address[pieceIndex] = value;\r\n ++pieceIndex;\r\n }\r\n\r\n if (compress !== null) {\r\n let swaps = pieceIndex - compress;\r\n pieceIndex = 7;\r\n while (pieceIndex !== 0 && swaps > 0) {\r\n const temp = address[compress + swaps - 1];\r\n address[compress + swaps - 1] = address[pieceIndex];\r\n address[pieceIndex] = temp;\r\n --pieceIndex;\r\n --swaps;\r\n }\r\n } else if (compress === null && pieceIndex !== 8) {\r\n return failure;\r\n }\r\n\r\n return address;\r\n}\r\n\r\nfunction serializeIPv6(address) {\r\n let output = \"\";\r\n const seqResult = findLongestZeroSequence(address);\r\n const compress = seqResult.idx;\r\n let ignore0 = false;\r\n\r\n for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {\r\n if (ignore0 && address[pieceIndex] === 0) {\r\n continue;\r\n } else if (ignore0) {\r\n ignore0 = false;\r\n }\r\n\r\n if (compress === pieceIndex) {\r\n const separator = pieceIndex === 0 ? \"::\" : \":\";\r\n output += separator;\r\n ignore0 = true;\r\n continue;\r\n }\r\n\r\n output += address[pieceIndex].toString(16);\r\n\r\n if (pieceIndex !== 7) {\r\n output += \":\";\r\n }\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseHost(input, isSpecialArg) {\r\n if (input[0] === \"[\") {\r\n if (input[input.length - 1] !== \"]\") {\r\n return failure;\r\n }\r\n\r\n return parseIPv6(input.substring(1, input.length - 1));\r\n }\r\n\r\n if (!isSpecialArg) {\r\n return parseOpaqueHost(input);\r\n }\r\n\r\n const domain = utf8PercentDecode(input);\r\n const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);\r\n if (asciiDomain === null) {\r\n return failure;\r\n }\r\n\r\n if (containsForbiddenHostCodePoint(asciiDomain)) {\r\n return failure;\r\n }\r\n\r\n const ipv4Host = parseIPv4(asciiDomain);\r\n if (typeof ipv4Host === \"number\" || ipv4Host === failure) {\r\n return ipv4Host;\r\n }\r\n\r\n return asciiDomain;\r\n}\r\n\r\nfunction parseOpaqueHost(input) {\r\n if (containsForbiddenHostCodePointExcludingPercent(input)) {\r\n return failure;\r\n }\r\n\r\n let output = \"\";\r\n const decoded = punycode.ucs2.decode(input);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);\r\n }\r\n return output;\r\n}\r\n\r\nfunction findLongestZeroSequence(arr) {\r\n let maxIdx = null;\r\n let maxLen = 1; // only find elements > 1\r\n let currStart = null;\r\n let currLen = 0;\r\n\r\n for (let i = 0; i < arr.length; ++i) {\r\n if (arr[i] !== 0) {\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n currStart = null;\r\n currLen = 0;\r\n } else {\r\n if (currStart === null) {\r\n currStart = i;\r\n }\r\n ++currLen;\r\n }\r\n }\r\n\r\n // if trailing zeros\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n return {\r\n idx: maxIdx,\r\n len: maxLen\r\n };\r\n}\r\n\r\nfunction serializeHost(host) {\r\n if (typeof host === \"number\") {\r\n return serializeIPv4(host);\r\n }\r\n\r\n // IPv6 serializer\r\n if (host instanceof Array) {\r\n return \"[\" + serializeIPv6(host) + \"]\";\r\n }\r\n\r\n return host;\r\n}\r\n\r\nfunction trimControlChars(url) {\r\n return url.replace(/^[\\u0000-\\u001F\\u0020]+|[\\u0000-\\u001F\\u0020]+$/g, \"\");\r\n}\r\n\r\nfunction trimTabAndNewline(url) {\r\n return url.replace(/\\u0009|\\u000A|\\u000D/g, \"\");\r\n}\r\n\r\nfunction shortenPath(url) {\r\n const path = url.path;\r\n if (path.length === 0) {\r\n return;\r\n }\r\n if (url.scheme === \"file\" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {\r\n return;\r\n }\r\n\r\n path.pop();\r\n}\r\n\r\nfunction includesCredentials(url) {\r\n return url.username !== \"\" || url.password !== \"\";\r\n}\r\n\r\nfunction cannotHaveAUsernamePasswordPort(url) {\r\n return url.host === null || url.host === \"\" || url.cannotBeABaseURL || url.scheme === \"file\";\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetter(string) {\r\n return /^[A-Za-z]:$/.test(string);\r\n}\r\n\r\nfunction URLStateMachine(input, base, encodingOverride, url, stateOverride) {\r\n this.pointer = 0;\r\n this.input = input;\r\n this.base = base || null;\r\n this.encodingOverride = encodingOverride || \"utf-8\";\r\n this.stateOverride = stateOverride;\r\n this.url = url;\r\n this.failure = false;\r\n this.parseError = false;\r\n\r\n if (!this.url) {\r\n this.url = {\r\n scheme: \"\",\r\n username: \"\",\r\n password: \"\",\r\n host: null,\r\n port: null,\r\n path: [],\r\n query: null,\r\n fragment: null,\r\n\r\n cannotBeABaseURL: false\r\n };\r\n\r\n const res = trimControlChars(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n }\r\n\r\n const res = trimTabAndNewline(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n\r\n this.state = stateOverride || \"scheme start\";\r\n\r\n this.buffer = \"\";\r\n this.atFlag = false;\r\n this.arrFlag = false;\r\n this.passwordTokenSeenFlag = false;\r\n\r\n this.input = punycode.ucs2.decode(this.input);\r\n\r\n for (; this.pointer <= this.input.length; ++this.pointer) {\r\n const c = this.input[this.pointer];\r\n const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);\r\n\r\n // exec state machine\r\n const ret = this[\"parse \" + this.state](c, cStr);\r\n if (!ret) {\r\n break; // terminate algorithm\r\n } else if (ret === failure) {\r\n this.failure = true;\r\n break;\r\n }\r\n }\r\n}\r\n\r\nURLStateMachine.prototype[\"parse scheme start\"] = function parseSchemeStart(c, cStr) {\r\n if (isASCIIAlpha(c)) {\r\n this.buffer += cStr.toLowerCase();\r\n this.state = \"scheme\";\r\n } else if (!this.stateOverride) {\r\n this.state = \"no scheme\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse scheme\"] = function parseScheme(c, cStr) {\r\n if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {\r\n this.buffer += cStr.toLowerCase();\r\n } else if (c === 58) {\r\n if (this.stateOverride) {\r\n if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === \"file\") {\r\n return false;\r\n }\r\n\r\n if (this.url.scheme === \"file\" && (this.url.host === \"\" || this.url.host === null)) {\r\n return false;\r\n }\r\n }\r\n this.url.scheme = this.buffer;\r\n this.buffer = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n if (this.url.scheme === \"file\") {\r\n if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file\";\r\n } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {\r\n this.state = \"special relative or authority\";\r\n } else if (isSpecial(this.url)) {\r\n this.state = \"special authority slashes\";\r\n } else if (this.input[this.pointer + 1] === 47) {\r\n this.state = \"path or authority\";\r\n ++this.pointer;\r\n } else {\r\n this.url.cannotBeABaseURL = true;\r\n this.url.path.push(\"\");\r\n this.state = \"cannot-be-a-base-URL path\";\r\n }\r\n } else if (!this.stateOverride) {\r\n this.buffer = \"\";\r\n this.state = \"no scheme\";\r\n this.pointer = -1;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse no scheme\"] = function parseNoScheme(c) {\r\n if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {\r\n return failure;\r\n } else if (this.base.cannotBeABaseURL && c === 35) {\r\n this.url.scheme = this.base.scheme;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.url.cannotBeABaseURL = true;\r\n this.state = \"fragment\";\r\n } else if (this.base.scheme === \"file\") {\r\n this.state = \"file\";\r\n --this.pointer;\r\n } else {\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special relative or authority\"] = function parseSpecialRelativeOrAuthority(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path or authority\"] = function parsePathOrAuthority(c) {\r\n if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative\"] = function parseRelative(c) {\r\n this.url.scheme = this.base.scheme;\r\n if (isNaN(c)) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 47) {\r\n this.state = \"relative slash\";\r\n } else if (c === 63) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n this.state = \"relative slash\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice(0, this.base.path.length - 1);\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative slash\"] = function parseRelativeSlash(c) {\r\n if (isSpecial(this.url) && (c === 47 || c === 92)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"special authority ignore slashes\";\r\n } else if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority slashes\"] = function parseSpecialAuthoritySlashes(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"special authority ignore slashes\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority ignore slashes\"] = function parseSpecialAuthorityIgnoreSlashes(c) {\r\n if (c !== 47 && c !== 92) {\r\n this.state = \"authority\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse authority\"] = function parseAuthority(c, cStr) {\r\n if (c === 64) {\r\n this.parseError = true;\r\n if (this.atFlag) {\r\n this.buffer = \"%40\" + this.buffer;\r\n }\r\n this.atFlag = true;\r\n\r\n // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars\r\n const len = countSymbols(this.buffer);\r\n for (let pointer = 0; pointer < len; ++pointer) {\r\n const codePoint = this.buffer.codePointAt(pointer);\r\n\r\n if (codePoint === 58 && !this.passwordTokenSeenFlag) {\r\n this.passwordTokenSeenFlag = true;\r\n continue;\r\n }\r\n const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);\r\n if (this.passwordTokenSeenFlag) {\r\n this.url.password += encodedCodePoints;\r\n } else {\r\n this.url.username += encodedCodePoints;\r\n }\r\n }\r\n this.buffer = \"\";\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n if (this.atFlag && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.pointer -= countSymbols(this.buffer) + 1;\r\n this.buffer = \"\";\r\n this.state = \"host\";\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse hostname\"] =\r\nURLStateMachine.prototype[\"parse host\"] = function parseHostName(c, cStr) {\r\n if (this.stateOverride && this.url.scheme === \"file\") {\r\n --this.pointer;\r\n this.state = \"file host\";\r\n } else if (c === 58 && !this.arrFlag) {\r\n if (this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"port\";\r\n if (this.stateOverride === \"hostname\") {\r\n return false;\r\n }\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n --this.pointer;\r\n if (isSpecial(this.url) && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n } else if (this.stateOverride && this.buffer === \"\" &&\r\n (includesCredentials(this.url) || this.url.port !== null)) {\r\n this.parseError = true;\r\n return false;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n } else {\r\n if (c === 91) {\r\n this.arrFlag = true;\r\n } else if (c === 93) {\r\n this.arrFlag = false;\r\n }\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse port\"] = function parsePort(c, cStr) {\r\n if (isASCIIDigit(c)) {\r\n this.buffer += cStr;\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92) ||\r\n this.stateOverride) {\r\n if (this.buffer !== \"\") {\r\n const port = parseInt(this.buffer);\r\n if (port > Math.pow(2, 16) - 1) {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.url.port = port === defaultPort(this.url.scheme) ? null : port;\r\n this.buffer = \"\";\r\n }\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nconst fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);\r\n\r\nURLStateMachine.prototype[\"parse file\"] = function parseFile(c) {\r\n this.url.scheme = \"file\";\r\n\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file slash\";\r\n } else if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNaN(c)) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 63) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points\r\n !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||\r\n (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points\r\n !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n shortenPath(this.url);\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file slash\"] = function parseFileSlash(c) {\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file host\";\r\n } else {\r\n if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {\r\n this.url.path.push(this.base.path[0]);\r\n } else {\r\n this.url.host = this.base.host;\r\n }\r\n }\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file host\"] = function parseFileHost(c, cStr) {\r\n if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {\r\n --this.pointer;\r\n if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {\r\n this.parseError = true;\r\n this.state = \"path\";\r\n } else if (this.buffer === \"\") {\r\n this.url.host = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n } else {\r\n let host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n if (host === \"localhost\") {\r\n host = \"\";\r\n }\r\n this.url.host = host;\r\n\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n }\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path start\"] = function parsePathStart(c) {\r\n if (isSpecial(this.url)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"path\";\r\n\r\n if (c !== 47 && c !== 92) {\r\n --this.pointer;\r\n }\r\n } else if (!this.stateOverride && c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (!this.stateOverride && c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (c !== undefined) {\r\n this.state = \"path\";\r\n if (c !== 47) {\r\n --this.pointer;\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path\"] = function parsePath(c) {\r\n if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||\r\n (!this.stateOverride && (c === 63 || c === 35))) {\r\n if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n }\r\n\r\n if (isDoubleDot(this.buffer)) {\r\n shortenPath(this.url);\r\n if (c !== 47 && !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n }\r\n } else if (isSingleDot(this.buffer) && c !== 47 &&\r\n !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n } else if (!isSingleDot(this.buffer)) {\r\n if (this.url.scheme === \"file\" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {\r\n if (this.url.host !== \"\" && this.url.host !== null) {\r\n this.parseError = true;\r\n this.url.host = \"\";\r\n }\r\n this.buffer = this.buffer[0] + \":\";\r\n }\r\n this.url.path.push(this.buffer);\r\n }\r\n this.buffer = \"\";\r\n if (this.url.scheme === \"file\" && (c === undefined || c === 63 || c === 35)) {\r\n while (this.url.path.length > 1 && this.url.path[0] === \"\") {\r\n this.parseError = true;\r\n this.url.path.shift();\r\n }\r\n }\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n }\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += percentEncodeChar(c, isPathPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse cannot-be-a-base-URL path\"] = function parseCannotBeABaseURLPath(c) {\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n // TODO: Add: not a URL code point\r\n if (!isNaN(c) && c !== 37) {\r\n this.parseError = true;\r\n }\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n if (!isNaN(c)) {\r\n this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse query\"] = function parseQuery(c, cStr) {\r\n if (isNaN(c) || (!this.stateOverride && c === 35)) {\r\n if (!isSpecial(this.url) || this.url.scheme === \"ws\" || this.url.scheme === \"wss\") {\r\n this.encodingOverride = \"utf-8\";\r\n }\r\n\r\n const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead\r\n for (let i = 0; i < buffer.length; ++i) {\r\n if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||\r\n buffer[i] === 0x3C || buffer[i] === 0x3E) {\r\n this.url.query += percentEncode(buffer[i]);\r\n } else {\r\n this.url.query += String.fromCodePoint(buffer[i]);\r\n }\r\n }\r\n\r\n this.buffer = \"\";\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse fragment\"] = function parseFragment(c) {\r\n if (isNaN(c)) { // do nothing\r\n } else if (c === 0x0) {\r\n this.parseError = true;\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nfunction serializeURL(url, excludeFragment) {\r\n let output = url.scheme + \":\";\r\n if (url.host !== null) {\r\n output += \"//\";\r\n\r\n if (url.username !== \"\" || url.password !== \"\") {\r\n output += url.username;\r\n if (url.password !== \"\") {\r\n output += \":\" + url.password;\r\n }\r\n output += \"@\";\r\n }\r\n\r\n output += serializeHost(url.host);\r\n\r\n if (url.port !== null) {\r\n output += \":\" + url.port;\r\n }\r\n } else if (url.host === null && url.scheme === \"file\") {\r\n output += \"//\";\r\n }\r\n\r\n if (url.cannotBeABaseURL) {\r\n output += url.path[0];\r\n } else {\r\n for (const string of url.path) {\r\n output += \"/\" + string;\r\n }\r\n }\r\n\r\n if (url.query !== null) {\r\n output += \"?\" + url.query;\r\n }\r\n\r\n if (!excludeFragment && url.fragment !== null) {\r\n output += \"#\" + url.fragment;\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction serializeOrigin(tuple) {\r\n let result = tuple.scheme + \"://\";\r\n result += serializeHost(tuple.host);\r\n\r\n if (tuple.port !== null) {\r\n result += \":\" + tuple.port;\r\n }\r\n\r\n return result;\r\n}\r\n\r\nmodule.exports.serializeURL = serializeURL;\r\n\r\nmodule.exports.serializeURLOrigin = function (url) {\r\n // https://url.spec.whatwg.org/#concept-url-origin\r\n switch (url.scheme) {\r\n case \"blob\":\r\n try {\r\n return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));\r\n } catch (e) {\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n case \"ftp\":\r\n case \"gopher\":\r\n case \"http\":\r\n case \"https\":\r\n case \"ws\":\r\n case \"wss\":\r\n return serializeOrigin({\r\n scheme: url.scheme,\r\n host: url.host,\r\n port: url.port\r\n });\r\n case \"file\":\r\n // spec says \"exercise to the reader\", chrome says \"file://\"\r\n return \"file://\";\r\n default:\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n};\r\n\r\nmodule.exports.basicURLParse = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);\r\n if (usm.failure) {\r\n return \"failure\";\r\n }\r\n\r\n return usm.url;\r\n};\r\n\r\nmodule.exports.setTheUsername = function (url, username) {\r\n url.username = \"\";\r\n const decoded = punycode.ucs2.decode(username);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.setThePassword = function (url, password) {\r\n url.password = \"\";\r\n const decoded = punycode.ucs2.decode(password);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.serializeHost = serializeHost;\r\n\r\nmodule.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;\r\n\r\nmodule.exports.serializeInteger = function (integer) {\r\n return String(integer);\r\n};\r\n\r\nmodule.exports.parseURL = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n // We don't handle blobs, so this just delegates:\r\n return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });\r\n};\r\n","\"use strict\";\n\nmodule.exports.mixin = function mixin(target, source) {\n const keys = Object.getOwnPropertyNames(source);\n for (let i = 0; i < keys.length; ++i) {\n Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));\n }\n};\n\nmodule.exports.wrapperSymbol = Symbol(\"wrapper\");\nmodule.exports.implSymbol = Symbol(\"impl\");\n\nmodule.exports.wrapperForImpl = function (impl) {\n return impl[module.exports.wrapperSymbol];\n};\n\nmodule.exports.implForWrapper = function (wrapper) {\n return wrapper[module.exports.implSymbol];\n};\n\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","'use strict'\nmodule.exports = function (Yallist) {\n Yallist.prototype[Symbol.iterator] = function* () {\n for (let walker = this.head; walker; walker = walker.next) {\n yield walker.value\n }\n }\n}\n","'use strict'\nmodule.exports = Yallist\n\nYallist.Node = Node\nYallist.create = Yallist\n\nfunction Yallist (list) {\n var self = this\n if (!(self instanceof Yallist)) {\n self = new Yallist()\n }\n\n self.tail = null\n self.head = null\n self.length = 0\n\n if (list && typeof list.forEach === 'function') {\n list.forEach(function (item) {\n self.push(item)\n })\n } else if (arguments.length > 0) {\n for (var i = 0, l = arguments.length; i < l; i++) {\n self.push(arguments[i])\n }\n }\n\n return self\n}\n\nYallist.prototype.removeNode = function (node) {\n if (node.list !== this) {\n throw new Error('removing node which does not belong to this list')\n }\n\n var next = node.next\n var prev = node.prev\n\n if (next) {\n next.prev = prev\n }\n\n if (prev) {\n prev.next = next\n }\n\n if (node === this.head) {\n this.head = next\n }\n if (node === this.tail) {\n this.tail = prev\n }\n\n node.list.length--\n node.next = null\n node.prev = null\n node.list = null\n\n return next\n}\n\nYallist.prototype.unshiftNode = function (node) {\n if (node === this.head) {\n return\n }\n\n if (node.list) {\n node.list.removeNode(node)\n }\n\n var head = this.head\n node.list = this\n node.next = head\n if (head) {\n head.prev = node\n }\n\n this.head = node\n if (!this.tail) {\n this.tail = node\n }\n this.length++\n}\n\nYallist.prototype.pushNode = function (node) {\n if (node === this.tail) {\n return\n }\n\n if (node.list) {\n node.list.removeNode(node)\n }\n\n var tail = this.tail\n node.list = this\n node.prev = tail\n if (tail) {\n tail.next = node\n }\n\n this.tail = node\n if (!this.head) {\n this.head = node\n }\n this.length++\n}\n\nYallist.prototype.push = function () {\n for (var i = 0, l = arguments.length; i < l; i++) {\n push(this, arguments[i])\n }\n return this.length\n}\n\nYallist.prototype.unshift = function () {\n for (var i = 0, l = arguments.length; i < l; i++) {\n unshift(this, arguments[i])\n }\n return this.length\n}\n\nYallist.prototype.pop = function () {\n if (!this.tail) {\n return undefined\n }\n\n var res = this.tail.value\n this.tail = this.tail.prev\n if (this.tail) {\n this.tail.next = null\n } else {\n this.head = null\n }\n this.length--\n return res\n}\n\nYallist.prototype.shift = function () {\n if (!this.head) {\n return undefined\n }\n\n var res = this.head.value\n this.head = this.head.next\n if (this.head) {\n this.head.prev = null\n } else {\n this.tail = null\n }\n this.length--\n return res\n}\n\nYallist.prototype.forEach = function (fn, thisp) {\n thisp = thisp || this\n for (var walker = this.head, i = 0; walker !== null; i++) {\n fn.call(thisp, walker.value, i, this)\n walker = walker.next\n }\n}\n\nYallist.prototype.forEachReverse = function (fn, thisp) {\n thisp = thisp || this\n for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {\n fn.call(thisp, walker.value, i, this)\n walker = walker.prev\n }\n}\n\nYallist.prototype.get = function (n) {\n for (var i = 0, walker = this.head; walker !== null && i < n; i++) {\n // abort out of the list early if we hit a cycle\n walker = walker.next\n }\n if (i === n && walker !== null) {\n return walker.value\n }\n}\n\nYallist.prototype.getReverse = function (n) {\n for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {\n // abort out of the list early if we hit a cycle\n walker = walker.prev\n }\n if (i === n && walker !== null) {\n return walker.value\n }\n}\n\nYallist.prototype.map = function (fn, thisp) {\n thisp = thisp || this\n var res = new Yallist()\n for (var walker = this.head; walker !== null;) {\n res.push(fn.call(thisp, walker.value, this))\n walker = walker.next\n }\n return res\n}\n\nYallist.prototype.mapReverse = function (fn, thisp) {\n thisp = thisp || this\n var res = new Yallist()\n for (var walker = this.tail; walker !== null;) {\n res.push(fn.call(thisp, walker.value, this))\n walker = walker.prev\n }\n return res\n}\n\nYallist.prototype.reduce = function (fn, initial) {\n var acc\n var walker = this.head\n if (arguments.length > 1) {\n acc = initial\n } else if (this.head) {\n walker = this.head.next\n acc = this.head.value\n } else {\n throw new TypeError('Reduce of empty list with no initial value')\n }\n\n for (var i = 0; walker !== null; i++) {\n acc = fn(acc, walker.value, i)\n walker = walker.next\n }\n\n return acc\n}\n\nYallist.prototype.reduceReverse = function (fn, initial) {\n var acc\n var walker = this.tail\n if (arguments.length > 1) {\n acc = initial\n } else if (this.tail) {\n walker = this.tail.prev\n acc = this.tail.value\n } else {\n throw new TypeError('Reduce of empty list with no initial value')\n }\n\n for (var i = this.length - 1; walker !== null; i--) {\n acc = fn(acc, walker.value, i)\n walker = walker.prev\n }\n\n return acc\n}\n\nYallist.prototype.toArray = function () {\n var arr = new Array(this.length)\n for (var i = 0, walker = this.head; walker !== null; i++) {\n arr[i] = walker.value\n walker = walker.next\n }\n return arr\n}\n\nYallist.prototype.toArrayReverse = function () {\n var arr = new Array(this.length)\n for (var i = 0, walker = this.tail; walker !== null; i++) {\n arr[i] = walker.value\n walker = walker.prev\n }\n return arr\n}\n\nYallist.prototype.slice = function (from, to) {\n to = to || this.length\n if (to < 0) {\n to += this.length\n }\n from = from || 0\n if (from < 0) {\n from += this.length\n }\n var ret = new Yallist()\n if (to < from || to < 0) {\n return ret\n }\n if (from < 0) {\n from = 0\n }\n if (to > this.length) {\n to = this.length\n }\n for (var i = 0, walker = this.head; walker !== null && i < from; i++) {\n walker = walker.next\n }\n for (; walker !== null && i < to; i++, walker = walker.next) {\n ret.push(walker.value)\n }\n return ret\n}\n\nYallist.prototype.sliceReverse = function (from, to) {\n to = to || this.length\n if (to < 0) {\n to += this.length\n }\n from = from || 0\n if (from < 0) {\n from += this.length\n }\n var ret = new Yallist()\n if (to < from || to < 0) {\n return ret\n }\n if (from < 0) {\n from = 0\n }\n if (to > this.length) {\n to = this.length\n }\n for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {\n walker = walker.prev\n }\n for (; walker !== null && i > from; i--, walker = walker.prev) {\n ret.push(walker.value)\n }\n return ret\n}\n\nYallist.prototype.splice = function (start, deleteCount, ...nodes) {\n if (start > this.length) {\n start = this.length - 1\n }\n if (start < 0) {\n start = this.length + start;\n }\n\n for (var i = 0, walker = this.head; walker !== null && i < start; i++) {\n walker = walker.next\n }\n\n var ret = []\n for (var i = 0; walker && i < deleteCount; i++) {\n ret.push(walker.value)\n walker = this.removeNode(walker)\n }\n if (walker === null) {\n walker = this.tail\n }\n\n if (walker !== this.head && walker !== this.tail) {\n walker = walker.prev\n }\n\n for (var i = 0; i < nodes.length; i++) {\n walker = insert(this, walker, nodes[i])\n }\n return ret;\n}\n\nYallist.prototype.reverse = function () {\n var head = this.head\n var tail = this.tail\n for (var walker = head; walker !== null; walker = walker.prev) {\n var p = walker.prev\n walker.prev = walker.next\n walker.next = p\n }\n this.head = tail\n this.tail = head\n return this\n}\n\nfunction insert (self, node, value) {\n var inserted = node === self.head ?\n new Node(value, null, node, self) :\n new Node(value, node, node.next, self)\n\n if (inserted.next === null) {\n self.tail = inserted\n }\n if (inserted.prev === null) {\n self.head = inserted\n }\n\n self.length++\n\n return inserted\n}\n\nfunction push (self, item) {\n self.tail = new Node(item, self.tail, null, self)\n if (!self.head) {\n self.head = self.tail\n }\n self.length++\n}\n\nfunction unshift (self, item) {\n self.head = new Node(item, null, self.head, self)\n if (!self.tail) {\n self.tail = self.head\n }\n self.length++\n}\n\nfunction Node (value, prev, next, list) {\n if (!(this instanceof Node)) {\n return new Node(value, prev, next, list)\n }\n\n this.list = list\n this.value = value\n\n if (prev) {\n prev.next = this\n this.prev = prev\n } else {\n this.prev = null\n }\n\n if (next) {\n next.prev = this\n this.next = next\n } else {\n this.next = null\n }\n}\n\ntry {\n // add if support for Symbol.iterator is present\n require('./iterator.js')(Yallist)\n} catch (er) {}\n","class Node {\n\t/// value;\n\t/// next;\n\n\tconstructor(value) {\n\t\tthis.value = value;\n\n\t\t// TODO: Remove this when targeting Node.js 12.\n\t\tthis.next = undefined;\n\t}\n}\n\nclass Queue {\n\t// TODO: Use private class fields when targeting Node.js 12.\n\t// #_head;\n\t// #_tail;\n\t// #_size;\n\n\tconstructor() {\n\t\tthis.clear();\n\t}\n\n\tenqueue(value) {\n\t\tconst node = new Node(value);\n\n\t\tif (this._head) {\n\t\t\tthis._tail.next = node;\n\t\t\tthis._tail = node;\n\t\t} else {\n\t\t\tthis._head = node;\n\t\t\tthis._tail = node;\n\t\t}\n\n\t\tthis._size++;\n\t}\n\n\tdequeue() {\n\t\tconst current = this._head;\n\t\tif (!current) {\n\t\t\treturn;\n\t\t}\n\n\t\tthis._head = this._head.next;\n\t\tthis._size--;\n\t\treturn current.value;\n\t}\n\n\tclear() {\n\t\tthis._head = undefined;\n\t\tthis._tail = undefined;\n\t\tthis._size = 0;\n\t}\n\n\tget size() {\n\t\treturn this._size;\n\t}\n\n\t* [Symbol.iterator]() {\n\t\tlet current = this._head;\n\n\t\twhile (current) {\n\t\t\tyield current.value;\n\t\t\tcurrent = current.next;\n\t\t}\n\t}\n}\n\nmodule.exports = Queue;\n",null,"module.exports = require(\"assert\");","module.exports = require(\"buffer\");","module.exports = require(\"child_process\");","module.exports = require(\"crypto\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"punycode\");","module.exports = require(\"querystring\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"tty\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"zlib\");","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(95);\n",""],"names":[],"sourceRoot":""} \ No newline at end of file +{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/oBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9oBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpkEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACz2FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;;;;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACt2BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtaA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9kBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1WA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7aA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpyBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1bA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/XA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChGA;AACA;AACA;AACA;;;;;;;;ACHA;;;;;;;ACAA;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1vDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjgCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/nBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACrBA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpSA;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACvSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChMA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACr0BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/IA;AACA;AACA;AACA;AACA;;;;;;;;;ACJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1uEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9fA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5kBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnmEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACj7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1jBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9iCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACroBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACrRA;;;;;;;;ACAA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9VA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9SA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChoBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnEA;;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7sBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACl7GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChuGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnSA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5rBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACv2BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1nCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;AEDA;AACA;AACA;AACA","sources":[".././lib/locks.js",".././lib/post.js",".././node_modules/@actions/core/lib/command.js",".././node_modules/@actions/core/lib/core.js",".././node_modules/@actions/core/lib/file-command.js",".././node_modules/@actions/core/lib/oidc-utils.js",".././node_modules/@actions/core/lib/path-utils.js",".././node_modules/@actions/core/lib/summary.js",".././node_modules/@actions/core/lib/utils.js",".././node_modules/@actions/github/lib/context.js",".././node_modules/@actions/github/lib/github.js",".././node_modules/@actions/github/lib/internal/utils.js",".././node_modules/@actions/github/lib/utils.js",".././node_modules/@actions/http-client/lib/auth.js",".././node_modules/@actions/http-client/lib/index.js",".././node_modules/@actions/http-client/lib/proxy.js",".././node_modules/@google-cloud/common/build/src/index.js",".././node_modules/@google-cloud/common/build/src/operation.js",".././node_modules/@google-cloud/common/build/src/service-object.js",".././node_modules/@google-cloud/common/build/src/service.js",".././node_modules/@google-cloud/common/build/src/util.js",".././node_modules/@google-cloud/paginator/build/src/index.js",".././node_modules/@google-cloud/paginator/build/src/resource-stream.js",".././node_modules/@google-cloud/projectify/build/src/index.js",".././node_modules/@google-cloud/promisify/build/src/index.js",".././node_modules/@octokit/auth-token/dist-node/index.js",".././node_modules/@octokit/core/dist-node/index.js",".././node_modules/@octokit/endpoint/dist-node/index.js",".././node_modules/@octokit/graphql/dist-node/index.js",".././node_modules/@octokit/plugin-paginate-rest/dist-node/index.js",".././node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js",".././node_modules/@octokit/request-error/dist-node/index.js",".././node_modules/@octokit/request/dist-node/index.js",".././node_modules/@tootallnate/once/dist/index.js",".././node_modules/abort-controller/dist/abort-controller.js",".././node_modules/agent-base/dist/src/index.js",".././node_modules/agent-base/dist/src/promisify.js",".././node_modules/arrify/index.js",".././node_modules/async-retry/lib/index.js",".././node_modules/base64-js/index.js",".././node_modules/before-after-hook/index.js",".././node_modules/before-after-hook/lib/add.js",".././node_modules/before-after-hook/lib/register.js",".././node_modules/before-after-hook/lib/remove.js",".././node_modules/bignumber.js/bignumber.js",".././node_modules/buffer-equal-constant-time/index.js",".././node_modules/compressible/index.js",".././node_modules/debug/src/browser.js",".././node_modules/debug/src/common.js",".././node_modules/debug/src/index.js",".././node_modules/debug/src/node.js",".././node_modules/deprecation/dist-node/index.js",".././node_modules/duplexify/index.js",".././node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.js",".././node_modules/ecdsa-sig-formatter/src/param-bytes-for-alg.js",".././node_modules/end-of-stream/index.js",".././node_modules/ent/decode.js",".././node_modules/ent/encode.js",".././node_modules/ent/index.js",".././node_modules/event-target-shim/dist/event-target-shim.js",".././node_modules/extend/index.js",".././node_modules/fast-xml-parser/src/fxp.js",".././node_modules/fast-xml-parser/src/util.js",".././node_modules/fast-xml-parser/src/validator.js",".././node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js",".././node_modules/fast-xml-parser/src/xmlbuilder/orderedJs2Xml.js",".././node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js",".././node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js",".././node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js",".././node_modules/fast-xml-parser/src/xmlparser/XMLParser.js",".././node_modules/fast-xml-parser/src/xmlparser/node2json.js",".././node_modules/fast-xml-parser/src/xmlparser/xmlNode.js",".././node_modules/gaxios/build/src/common.js",".././node_modules/gaxios/build/src/gaxios.js",".././node_modules/gaxios/build/src/index.js",".././node_modules/gaxios/build/src/retry.js",".././node_modules/gaxios/build/src/util.js",".././node_modules/gaxios/node_modules/agent-base/dist/helpers.js",".././node_modules/gaxios/node_modules/agent-base/dist/index.js",".././node_modules/gaxios/node_modules/https-proxy-agent/dist/index.js",".././node_modules/gaxios/node_modules/https-proxy-agent/dist/parse-proxy-response.js",".././node_modules/gcp-metadata/build/src/gcp-residency.js",".././node_modules/gcp-metadata/build/src/index.js",".././node_modules/google-auth-library/build/src/auth/authclient.js",".././node_modules/google-auth-library/build/src/auth/awsclient.js",".././node_modules/google-auth-library/build/src/auth/awsrequestsigner.js",".././node_modules/google-auth-library/build/src/auth/baseexternalclient.js",".././node_modules/google-auth-library/build/src/auth/computeclient.js",".././node_modules/google-auth-library/build/src/auth/downscopedclient.js",".././node_modules/google-auth-library/build/src/auth/envDetect.js",".././node_modules/google-auth-library/build/src/auth/executable-response.js",".././node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.js",".././node_modules/google-auth-library/build/src/auth/externalclient.js",".././node_modules/google-auth-library/build/src/auth/googleauth.js",".././node_modules/google-auth-library/build/src/auth/iam.js",".././node_modules/google-auth-library/build/src/auth/identitypoolclient.js",".././node_modules/google-auth-library/build/src/auth/idtokenclient.js",".././node_modules/google-auth-library/build/src/auth/impersonated.js",".././node_modules/google-auth-library/build/src/auth/jwtaccess.js",".././node_modules/google-auth-library/build/src/auth/jwtclient.js",".././node_modules/google-auth-library/build/src/auth/loginticket.js",".././node_modules/google-auth-library/build/src/auth/oauth2client.js",".././node_modules/google-auth-library/build/src/auth/oauth2common.js",".././node_modules/google-auth-library/build/src/auth/pluggable-auth-client.js",".././node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.js",".././node_modules/google-auth-library/build/src/auth/refreshclient.js",".././node_modules/google-auth-library/build/src/auth/stscredentials.js",".././node_modules/google-auth-library/build/src/crypto/browser/crypto.js",".././node_modules/google-auth-library/build/src/crypto/crypto.js",".././node_modules/google-auth-library/build/src/crypto/node/crypto.js",".././node_modules/google-auth-library/build/src/index.js",".././node_modules/google-auth-library/build/src/options.js",".././node_modules/google-auth-library/build/src/transporters.js",".././node_modules/google-auth-library/build/src/util.js",".././node_modules/gtoken/build/src/index.js",".././node_modules/has-flag/index.js",".././node_modules/http-proxy-agent/dist/agent.js",".././node_modules/http-proxy-agent/dist/index.js",".././node_modules/https-proxy-agent/dist/agent.js",".././node_modules/https-proxy-agent/dist/index.js",".././node_modules/https-proxy-agent/dist/parse-proxy-response.js",".././node_modules/inherits/inherits.js",".././node_modules/inherits/inherits_browser.js",".././node_modules/is-stream/index.js",".././node_modules/json-bigint/index.js",".././node_modules/json-bigint/lib/parse.js",".././node_modules/json-bigint/lib/stringify.js",".././node_modules/jwa/index.js",".././node_modules/jws/index.js",".././node_modules/jws/lib/data-stream.js",".././node_modules/jws/lib/sign-stream.js",".././node_modules/jws/lib/tostring.js",".././node_modules/jws/lib/verify-stream.js",".././node_modules/mime-db/index.js",".././node_modules/mime-types/index.js",".././node_modules/mime/Mime.js",".././node_modules/mime/index.js",".././node_modules/mime/types/other.js",".././node_modules/mime/types/standard.js",".././node_modules/ms/index.js",".././node_modules/node-fetch/lib/index.js",".././node_modules/once/once.js",".././node_modules/p-limit/index.js",".././node_modules/readable-stream/errors.js",".././node_modules/readable-stream/lib/_stream_duplex.js",".././node_modules/readable-stream/lib/_stream_passthrough.js",".././node_modules/readable-stream/lib/_stream_readable.js",".././node_modules/readable-stream/lib/_stream_transform.js",".././node_modules/readable-stream/lib/_stream_writable.js",".././node_modules/readable-stream/lib/internal/streams/async_iterator.js",".././node_modules/readable-stream/lib/internal/streams/buffer_list.js",".././node_modules/readable-stream/lib/internal/streams/destroy.js",".././node_modules/readable-stream/lib/internal/streams/end-of-stream.js",".././node_modules/readable-stream/lib/internal/streams/from.js",".././node_modules/readable-stream/lib/internal/streams/pipeline.js",".././node_modules/readable-stream/lib/internal/streams/state.js",".././node_modules/readable-stream/lib/internal/streams/stream.js",".././node_modules/readable-stream/readable.js",".././node_modules/retry-request/index.js",".././node_modules/retry/index.js",".././node_modules/retry/lib/retry.js",".././node_modules/retry/lib/retry_operation.js",".././node_modules/safe-buffer/index.js",".././node_modules/stream-events/index.js",".././node_modules/stream-shift/index.js",".././node_modules/string_decoder/lib/string_decoder.js",".././node_modules/strnum/strnum.js",".././node_modules/stubs/index.js",".././node_modules/supports-color/index.js",".././node_modules/teeny-request/build/src/TeenyStatistics.js",".././node_modules/teeny-request/build/src/agents.js",".././node_modules/teeny-request/build/src/index.js",".././node_modules/teeny-request/node_modules/uuid/dist/index.js",".././node_modules/teeny-request/node_modules/uuid/dist/md5.js",".././node_modules/teeny-request/node_modules/uuid/dist/native.js",".././node_modules/teeny-request/node_modules/uuid/dist/nil.js",".././node_modules/teeny-request/node_modules/uuid/dist/parse.js",".././node_modules/teeny-request/node_modules/uuid/dist/regex.js",".././node_modules/teeny-request/node_modules/uuid/dist/rng.js",".././node_modules/teeny-request/node_modules/uuid/dist/sha1.js",".././node_modules/teeny-request/node_modules/uuid/dist/stringify.js",".././node_modules/teeny-request/node_modules/uuid/dist/v1.js",".././node_modules/teeny-request/node_modules/uuid/dist/v3.js",".././node_modules/teeny-request/node_modules/uuid/dist/v35.js",".././node_modules/teeny-request/node_modules/uuid/dist/v4.js",".././node_modules/teeny-request/node_modules/uuid/dist/v5.js",".././node_modules/teeny-request/node_modules/uuid/dist/validate.js",".././node_modules/teeny-request/node_modules/uuid/dist/version.js",".././node_modules/tr46/index.js",".././node_modules/tunnel/index.js",".././node_modules/tunnel/lib/tunnel.js",".././node_modules/undici/index.js",".././node_modules/undici/lib/agent.js",".././node_modules/undici/lib/api/abort-signal.js",".././node_modules/undici/lib/api/api-connect.js",".././node_modules/undici/lib/api/api-pipeline.js",".././node_modules/undici/lib/api/api-request.js",".././node_modules/undici/lib/api/api-stream.js",".././node_modules/undici/lib/api/api-upgrade.js",".././node_modules/undici/lib/api/index.js",".././node_modules/undici/lib/api/readable.js",".././node_modules/undici/lib/api/util.js",".././node_modules/undici/lib/balanced-pool.js",".././node_modules/undici/lib/cache/cache.js",".././node_modules/undici/lib/cache/cachestorage.js",".././node_modules/undici/lib/cache/symbols.js",".././node_modules/undici/lib/cache/util.js",".././node_modules/undici/lib/client.js",".././node_modules/undici/lib/compat/dispatcher-weakref.js",".././node_modules/undici/lib/cookies/constants.js",".././node_modules/undici/lib/cookies/index.js",".././node_modules/undici/lib/cookies/parse.js",".././node_modules/undici/lib/cookies/util.js",".././node_modules/undici/lib/core/connect.js",".././node_modules/undici/lib/core/errors.js",".././node_modules/undici/lib/core/request.js",".././node_modules/undici/lib/core/symbols.js",".././node_modules/undici/lib/core/util.js",".././node_modules/undici/lib/dispatcher-base.js",".././node_modules/undici/lib/dispatcher.js",".././node_modules/undici/lib/fetch/body.js",".././node_modules/undici/lib/fetch/constants.js",".././node_modules/undici/lib/fetch/dataURL.js",".././node_modules/undici/lib/fetch/file.js",".././node_modules/undici/lib/fetch/formdata.js",".././node_modules/undici/lib/fetch/global.js",".././node_modules/undici/lib/fetch/headers.js",".././node_modules/undici/lib/fetch/index.js",".././node_modules/undici/lib/fetch/request.js",".././node_modules/undici/lib/fetch/response.js",".././node_modules/undici/lib/fetch/symbols.js",".././node_modules/undici/lib/fetch/util.js",".././node_modules/undici/lib/fetch/webidl.js",".././node_modules/undici/lib/fileapi/encoding.js",".././node_modules/undici/lib/fileapi/filereader.js",".././node_modules/undici/lib/fileapi/progressevent.js",".././node_modules/undici/lib/fileapi/symbols.js",".././node_modules/undici/lib/fileapi/util.js",".././node_modules/undici/lib/global.js",".././node_modules/undici/lib/handler/DecoratorHandler.js",".././node_modules/undici/lib/handler/RedirectHandler.js",".././node_modules/undici/lib/handler/RetryHandler.js",".././node_modules/undici/lib/interceptor/redirectInterceptor.js",".././node_modules/undici/lib/llhttp/constants.js",".././node_modules/undici/lib/llhttp/llhttp-wasm.js",".././node_modules/undici/lib/llhttp/llhttp_simd-wasm.js",".././node_modules/undici/lib/llhttp/utils.js",".././node_modules/undici/lib/mock/mock-agent.js",".././node_modules/undici/lib/mock/mock-client.js",".././node_modules/undici/lib/mock/mock-errors.js",".././node_modules/undici/lib/mock/mock-interceptor.js",".././node_modules/undici/lib/mock/mock-pool.js",".././node_modules/undici/lib/mock/mock-symbols.js",".././node_modules/undici/lib/mock/mock-utils.js",".././node_modules/undici/lib/mock/pending-interceptors-formatter.js",".././node_modules/undici/lib/mock/pluralizer.js",".././node_modules/undici/lib/node/fixed-queue.js",".././node_modules/undici/lib/pool-base.js",".././node_modules/undici/lib/pool-stats.js",".././node_modules/undici/lib/pool.js",".././node_modules/undici/lib/proxy-agent.js",".././node_modules/undici/lib/timers.js",".././node_modules/undici/lib/websocket/connection.js",".././node_modules/undici/lib/websocket/constants.js",".././node_modules/undici/lib/websocket/events.js",".././node_modules/undici/lib/websocket/frame.js",".././node_modules/undici/lib/websocket/receiver.js",".././node_modules/undici/lib/websocket/symbols.js",".././node_modules/undici/lib/websocket/util.js",".././node_modules/undici/lib/websocket/websocket.js",".././node_modules/universal-user-agent/dist-node/index.js",".././node_modules/util-deprecate/node.js",".././node_modules/uuid/dist/index.js",".././node_modules/uuid/dist/md5.js",".././node_modules/uuid/dist/nil.js",".././node_modules/uuid/dist/parse.js",".././node_modules/uuid/dist/regex.js",".././node_modules/uuid/dist/rng.js",".././node_modules/uuid/dist/sha1.js",".././node_modules/uuid/dist/stringify.js",".././node_modules/uuid/dist/v1.js",".././node_modules/uuid/dist/v3.js",".././node_modules/uuid/dist/v35.js",".././node_modules/uuid/dist/v4.js",".././node_modules/uuid/dist/v5.js",".././node_modules/uuid/dist/validate.js",".././node_modules/uuid/dist/version.js",".././node_modules/webidl-conversions/lib/index.js",".././node_modules/whatwg-url/lib/URL-impl.js",".././node_modules/whatwg-url/lib/URL.js",".././node_modules/whatwg-url/lib/public-api.js",".././node_modules/whatwg-url/lib/url-state-machine.js",".././node_modules/whatwg-url/lib/utils.js",".././node_modules/wrappy/wrappy.js",".././node_modules/yocto-queue/index.js",".././node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../external node-commonjs \"assert\"","../external node-commonjs \"async_hooks\"","../external node-commonjs \"buffer\"","../external node-commonjs \"child_process\"","../external node-commonjs \"console\"","../external node-commonjs \"crypto\"","../external node-commonjs \"diagnostics_channel\"","../external node-commonjs \"events\"","../external node-commonjs \"fs\"","../external node-commonjs \"http\"","../external node-commonjs \"http2\"","../external node-commonjs \"https\"","../external node-commonjs \"net\"","../external node-commonjs \"node:events\"","../external node-commonjs \"node:stream\"","../external node-commonjs \"node:util\"","../external node-commonjs \"os\"","../external node-commonjs \"path\"","../external node-commonjs \"perf_hooks\"","../external node-commonjs \"punycode\"","../external node-commonjs \"querystring\"","../external node-commonjs \"stream\"","../external node-commonjs \"stream/web\"","../external node-commonjs \"string_decoder\"","../external node-commonjs \"tls\"","../external node-commonjs \"tty\"","../external node-commonjs \"url\"","../external node-commonjs \"util\"","../external node-commonjs \"util/types\"","../external node-commonjs \"worker_threads\"","../external node-commonjs \"zlib\"",".././node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js",".././node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js",".././node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js",".././node_modules/@fastify/busboy/deps/streamsearch/sbmh.js",".././node_modules/@fastify/busboy/lib/main.js",".././node_modules/@fastify/busboy/lib/types/multipart.js",".././node_modules/@fastify/busboy/lib/types/urlencoded.js",".././node_modules/@fastify/busboy/lib/utils/Decoder.js",".././node_modules/@fastify/busboy/lib/utils/basename.js",".././node_modules/@fastify/busboy/lib/utils/decodeText.js",".././node_modules/@fastify/busboy/lib/utils/getLimit.js",".././node_modules/@fastify/busboy/lib/utils/parseParams.js",".././node_modules/@google-cloud/storage/build/cjs/src/acl.js",".././node_modules/@google-cloud/storage/build/cjs/src/bucket.js",".././node_modules/@google-cloud/storage/build/cjs/src/channel.js",".././node_modules/@google-cloud/storage/build/cjs/src/crc32c.js",".././node_modules/@google-cloud/storage/build/cjs/src/file.js",".././node_modules/@google-cloud/storage/build/cjs/src/hash-stream-validator.js",".././node_modules/@google-cloud/storage/build/cjs/src/hmacKey.js",".././node_modules/@google-cloud/storage/build/cjs/src/iam.js",".././node_modules/@google-cloud/storage/build/cjs/src/index.js",".././node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/index.js",".././node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service-object.js",".././node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service.js",".././node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/util.js",".././node_modules/@google-cloud/storage/build/cjs/src/notification.js",".././node_modules/@google-cloud/storage/build/cjs/src/resumable-upload.js",".././node_modules/@google-cloud/storage/build/cjs/src/signer.js",".././node_modules/@google-cloud/storage/build/cjs/src/storage.js",".././node_modules/@google-cloud/storage/build/cjs/src/transfer-manager.js",".././node_modules/@google-cloud/storage/build/cjs/src/util.js",".././node_modules/@google-cloud/storage/build/cjs/src/package-json-helper.cjs","../webpack/bootstrap","../webpack/runtime/compat","../webpack/before-startup","../webpack/startup","../webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleCloudStorageMutex = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst github = __importStar(require(\"@actions/github\"));\nconst storage_1 = require(\"@google-cloud/storage\");\nconst common_1 = require(\"@google-cloud/common\");\nconst TIME_PERIOD_PATTERN = /^(?:(\\d+)m)?(?:(\\d+)s)?$/;\nclass TimePeriodError extends Error {\n constructor() {\n super(...arguments);\n this.name = 'TimePeriodError';\n }\n}\n/**\n * Parse a number of microseconds from a time string.\n *\n */\nfunction parseTimePeriod(period) {\n const match = period.match(TIME_PERIOD_PATTERN);\n if (match === null) {\n throw new TimePeriodError(`Invalid time period string: ${period}`);\n }\n else {\n const minutes = match[1] || 0;\n const seconds = match[2] || 0;\n return Number(minutes) * 60 + Number(seconds);\n }\n}\n/**\n * Generate a default identifier this mutex\n *\n * The identifier is used to verify that this invocation is the owner of the\n * mutex before releasing it.\n */\nfunction defaultMutexIdentifier() {\n const context = github.context;\n return `${context.repo.owner}/${context.repo.repo}/${context.workflow}/${context.runNumber}/${context.job}`;\n}\nconst INITIAL_SLEEP = 500;\nclass GoogleCloudStorageMutex {\n constructor(bucket, name, options = {}) {\n this.file = new storage_1.File(new storage_1.Bucket(new storage_1.Storage(), bucket), name, {\n generation: 0\n });\n if (options.timeout === null || options.timeout === undefined) {\n this.timeout = 5 * 60 * 1000; // 5 minutes\n }\n else if (typeof options.timeout === 'string') {\n this.timeout = parseTimePeriod(options.timeout) * 1000;\n }\n else {\n this.timeout = options.timeout;\n }\n if (options.identifier === null || options.identifier === undefined) {\n this.identifier = defaultMutexIdentifier();\n }\n else {\n this.identifier = options.identifier;\n }\n }\n acquire() {\n return __awaiter(this, void 0, void 0, function* () {\n const start = Date.now();\n let sleep = INITIAL_SLEEP;\n for (;;) {\n try {\n yield this.file.save(this.identifier, { resumable: false });\n core.info('Acquired lock');\n return;\n }\n catch (e) {\n if (e instanceof common_1.ApiError && e.code === 412) {\n const delta = Date.now() - start;\n if (delta < this.timeout) {\n core.info(`Cannot acquire lock, sleeping for ${sleep / 1000.0}s`);\n yield new Promise(resolve => setTimeout(resolve, sleep));\n sleep = sleep * 2;\n continue;\n }\n }\n core.info('Cannot acquire lock, raising');\n throw e;\n }\n }\n });\n }\n release() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n const data = yield this.file.download();\n if (data[0].toString() === this.identifier) {\n yield this.file.delete();\n }\n else {\n core.info(`Locked by someone else (${data[0]} !== ${this.identifier})`);\n }\n }\n catch (e) {\n if (e instanceof common_1.ApiError && e.code === 404) {\n // no lock to release\n }\n else {\n throw e;\n }\n }\n });\n }\n}\nexports.GoogleCloudStorageMutex = GoogleCloudStorageMutex;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst locks_1 = require(\"./locks\");\nfunction run() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n const mutex = new locks_1.GoogleCloudStorageMutex(core.getInput('bucket'), core.getInput('filename'), { timeout: core.getInput('timeout') });\n yield mutex.release();\n }\n catch (error) {\n if (error instanceof Error) {\n core.error(error.message);\n }\n else {\n core.error(`Unknown error ${error}`);\n }\n core.setFailed('Failed to release lock');\n }\n });\n}\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));\n }\n command_1.issueCommand('set-env', { name }, convertedVal);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueFileCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n if (options && options.trimWhitespace === false) {\n return inputs;\n }\n return inputs.map(input => input.trim());\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n const filePath = process.env['GITHUB_OUTPUT'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));\n }\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n const filePath = process.env['GITHUB_STATE'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));\n }\n command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.prepareKeyValueMessage = exports.issueFileCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst uuid_1 = require(\"uuid\");\nconst utils_1 = require(\"./utils\");\nfunction issueFileCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueFileCommand = issueFileCommand;\nfunction prepareKeyValueMessage(key, value) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n const convertedValue = utils_1.toCommandValue(value);\n // These should realistically never happen, but just in case someone finds a\n // way to exploit uuid generation let's not allow keys or values that contain\n // the delimiter.\n if (key.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedValue.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;\n}\nexports.prepareKeyValueMessage = prepareKeyValueMessage;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise

} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n var _a, _b, _c;\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;\n this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;\n this.graphqlUrl =\n (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options, ...additionalPlugins) {\n const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins);\n return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nconst undici_1 = require(\"undici\");\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getProxyAgentDispatcher(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgentDispatcher(destinationUrl);\n}\nexports.getProxyAgentDispatcher = getProxyAgentDispatcher;\nfunction getProxyFetch(destinationUrl) {\n const httpDispatcher = getProxyAgentDispatcher(destinationUrl);\n const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () {\n return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher }));\n });\n return proxyFetch;\n}\nexports.getProxyFetch = getProxyFetch;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nexports.defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl),\n fetch: Utils.getProxyFetch(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nconst undici_1 = require(\"undici\");\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes || (exports.HttpCodes = HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers || (exports.Headers = Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes || (exports.MediaTypes = MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n readBodyBuffer() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n const chunks = [];\n this.message.on('data', (chunk) => {\n chunks.push(chunk);\n });\n this.message.on('end', () => {\n resolve(Buffer.concat(chunks));\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n getAgentDispatcher(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (!useProxy) {\n return;\n }\n return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _getProxyAgentDispatcher(parsedUrl, proxyUrl) {\n let proxyAgent;\n if (this._keepAlive) {\n proxyAgent = this._proxyAgentDispatcher;\n }\n // if agent is already assigned use that agent.\n if (proxyAgent) {\n return proxyAgent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {\n token: `${proxyUrl.username}:${proxyUrl.password}`\n })));\n this._proxyAgentDispatcher = proxyAgent;\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {\n rejectUnauthorized: false\n });\n }\n return proxyAgent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n try {\n return new URL(proxyVar);\n }\n catch (_a) {\n if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))\n return new URL(`http://${proxyVar}`);\n }\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const reqHost = reqUrl.hostname;\n if (isLoopbackAddress(reqHost)) {\n return true;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperNoProxyItem === '*' ||\n upperReqHosts.some(x => x === upperNoProxyItem ||\n x.endsWith(`.${upperNoProxyItem}`) ||\n (upperNoProxyItem.startsWith('.') &&\n x.endsWith(`${upperNoProxyItem}`)))) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\nfunction isLoopbackAddress(host) {\n const hostLower = host.toLowerCase();\n return (hostLower === 'localhost' ||\n hostLower.startsWith('127.') ||\n hostLower.startsWith('[::1]') ||\n hostLower.startsWith('[0:0:0:0:0:0:0:1]'));\n}\n//# sourceMappingURL=proxy.js.map","\"use strict\";\n// Copyright 2016 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.ApiError = exports.ServiceObject = exports.Service = exports.Operation = void 0;\n/**\n * @type {module:common/operation}\n * @private\n */\nvar operation_1 = require(\"./operation\");\nObject.defineProperty(exports, \"Operation\", { enumerable: true, get: function () { return operation_1.Operation; } });\n/**\n * @type {module:common/service}\n * @private\n */\nvar service_1 = require(\"./service\");\nObject.defineProperty(exports, \"Service\", { enumerable: true, get: function () { return service_1.Service; } });\n/**\n * @type {module:common/serviceObject}\n * @private\n */\nvar service_object_1 = require(\"./service-object\");\nObject.defineProperty(exports, \"ServiceObject\", { enumerable: true, get: function () { return service_object_1.ServiceObject; } });\n/**\n * @type {module:common/util}\n * @private\n */\nvar util_1 = require(\"./util\");\nObject.defineProperty(exports, \"ApiError\", { enumerable: true, get: function () { return util_1.ApiError; } });\nObject.defineProperty(exports, \"util\", { enumerable: true, get: function () { return util_1.util; } });\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2016 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Operation = void 0;\n/*!\n * @module common/operation\n */\nconst service_object_1 = require(\"./service-object\");\nconst util_1 = require(\"util\");\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass Operation extends service_object_1.ServiceObject {\n /**\n * An Operation object allows you to interact with APIs that take longer to\n * process things.\n *\n * @constructor\n * @alias module:common/operation\n *\n * @param {object} config - Configuration object.\n * @param {module:common/service|module:common/serviceObject|module:common/grpcService|module:common/grpcServiceObject} config.parent - The parent object.\n */\n constructor(config) {\n const methods = {\n /**\n * Checks to see if an operation exists.\n */\n exists: true,\n /**\n * Retrieves the operation.\n */\n get: true,\n /**\n * Retrieves metadata for the operation.\n */\n getMetadata: {\n reqOpts: {\n name: config.id,\n },\n },\n };\n config = Object.assign({\n baseUrl: '',\n }, config);\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n config.methods = (config.methods || methods);\n super(config);\n this.completeListeners = 0;\n this.hasActiveListeners = false;\n this.listenForEvents_();\n }\n /**\n * Wraps the `complete` and `error` events in a Promise.\n *\n * @return {Promise}\n */\n promise() {\n return new Promise((resolve, reject) => {\n this.on('error', reject).on('complete', (metadata) => {\n resolve([metadata]);\n });\n });\n }\n /**\n * Begin listening for events on the operation. This method keeps track of how\n * many \"complete\" listeners are registered and removed, making sure polling\n * is handled automatically.\n *\n * As long as there is one active \"complete\" listener, the connection is open.\n * When there are no more listeners, the polling stops.\n *\n * @private\n */\n listenForEvents_() {\n this.on('newListener', (event) => {\n if (event === 'complete') {\n this.completeListeners++;\n if (!this.hasActiveListeners) {\n this.hasActiveListeners = true;\n this.startPolling_();\n }\n }\n });\n this.on('removeListener', (event) => {\n if (event === 'complete' && --this.completeListeners === 0) {\n this.hasActiveListeners = false;\n }\n });\n }\n /**\n * Poll for a status update. Returns null for an incomplete\n * status, and metadata for a complete status.\n *\n * @private\n */\n poll_(callback) {\n this.getMetadata((err, body) => {\n if (err || body.error) {\n callback(err || body.error);\n return;\n }\n if (!body.done) {\n callback(null);\n return;\n }\n callback(null, body);\n });\n }\n /**\n * Poll `getMetadata` to check the operation's status. This runs a loop to\n * ping the API on an interval.\n *\n * Note: This method is automatically called once a \"complete\" event handler\n * is registered on the operation.\n *\n * @private\n */\n async startPolling_() {\n if (!this.hasActiveListeners) {\n return;\n }\n try {\n const metadata = await (0, util_1.promisify)(this.poll_.bind(this))();\n if (!metadata) {\n setTimeout(this.startPolling_.bind(this), this.pollIntervalMs || 500);\n return;\n }\n this.emit('complete', metadata);\n }\n catch (err) {\n this.emit('error', err);\n }\n }\n}\nexports.Operation = Operation;\n//# sourceMappingURL=operation.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ServiceObject = void 0;\n/*!\n * @module common/service-object\n */\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst arrify = require(\"arrify\");\nconst events_1 = require(\"events\");\nconst extend = require(\"extend\");\nconst util_1 = require(\"./util\");\n/**\n * ServiceObject is a base class, meant to be inherited from by a \"service\n * object,\" like a BigQuery dataset or Storage bucket.\n *\n * Most of the time, these objects share common functionality; they can be\n * created or deleted, and you can get or set their metadata.\n *\n * By inheriting from this class, a service object will be extended with these\n * shared behaviors. Note that any method can be overridden when the service\n * object requires specific behavior.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass ServiceObject extends events_1.EventEmitter {\n /*\n * @constructor\n * @alias module:common/service-object\n *\n * @private\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string} config.createMethod - The method which creates this object.\n * @param {string=} config.id - The identifier of the object. For example, the\n * name of a Storage bucket or Pub/Sub topic.\n * @param {object=} config.methods - A map of each method name that should be inherited.\n * @param {object} config.methods[].reqOpts - Default request options for this\n * particular method. A common use case is when `setMetadata` requires a\n * `PUT` method to override the default `PATCH`.\n * @param {object} config.parent - The parent service instance. For example, an\n * instance of Storage if the object is Bucket.\n */\n constructor(config) {\n super();\n this.metadata = {};\n this.baseUrl = config.baseUrl;\n this.parent = config.parent; // Parent class.\n this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc).\n this.createMethod = config.createMethod;\n this.methods = config.methods || {};\n this.interceptors = [];\n this.pollIntervalMs = config.pollIntervalMs;\n this.projectId = config.projectId;\n if (config.methods) {\n // This filters the ServiceObject instance (e.g. a \"File\") to only have\n // the configured methods. We make a couple of exceptions for core-\n // functionality (\"request()\" and \"getRequestInterceptors()\")\n Object.getOwnPropertyNames(ServiceObject.prototype)\n .filter(methodName => {\n return (\n // All ServiceObjects need `request` and `getRequestInterceptors`.\n // clang-format off\n !/^request/.test(methodName) &&\n !/^getRequestInterceptors/.test(methodName) &&\n // clang-format on\n // The ServiceObject didn't redefine the method.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] ===\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ServiceObject.prototype[methodName] &&\n // This method isn't wanted.\n !config.methods[methodName]);\n })\n .forEach(methodName => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] = undefined;\n });\n }\n }\n create(optionsOrCallback, callback) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const args = [this.id];\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n if (typeof optionsOrCallback === 'object') {\n args.push(optionsOrCallback);\n }\n // Wrap the callback to return *this* instance of the object, not the\n // newly-created one.\n // tslint: disable-next-line no-any\n function onCreate(...args) {\n const [err, instance] = args;\n if (!err) {\n self.metadata = instance.metadata;\n args[1] = self; // replace the created `instance` with this one.\n }\n callback(...args);\n }\n args.push(onCreate);\n // eslint-disable-next-line prefer-spread\n this.createMethod.apply(null, args);\n }\n delete(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const ignoreNotFound = options.ignoreNotFound;\n delete options.ignoreNotFound;\n const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {};\n const reqOpts = extend(true, {\n method: 'DELETE',\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, ...args) => {\n if (err) {\n if (err.code === 404 && ignoreNotFound) {\n err = null;\n }\n }\n callback(err, ...args);\n });\n }\n exists(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n this.get(options, err => {\n if (err) {\n if (err.code === 404) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n return;\n }\n callback(null, true);\n });\n }\n get(optionsOrCallback, cb) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const [opts, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const options = Object.assign({}, opts);\n const autoCreate = options.autoCreate && typeof this.create === 'function';\n delete options.autoCreate;\n function onCreate(err, instance, apiResponse) {\n if (err) {\n if (err.code === 409) {\n self.get(options, callback);\n return;\n }\n callback(err, null, apiResponse);\n return;\n }\n callback(null, instance, apiResponse);\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n if (err.code === 404 && autoCreate) {\n const args = [];\n if (Object.keys(options).length > 0) {\n args.push(options);\n }\n args.push(onCreate);\n self.create(...args);\n return;\n }\n callback(err, null, metadata);\n return;\n }\n callback(null, self, metadata);\n });\n }\n getMetadata(optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.getMetadata === 'object' &&\n this.methods.getMetadata) ||\n {};\n const reqOpts = extend(true, {\n uri: '',\n }, methodConfig.reqOpts, {\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n const localInterceptors = this.interceptors\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n return this.parent.getRequestInterceptors().concat(localInterceptors);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.setMetadata === 'object' &&\n this.methods.setMetadata) ||\n {};\n const reqOpts = extend(true, {}, {\n method: 'PATCH',\n uri: '',\n }, methodConfig.reqOpts, {\n json: metadata,\n qs: options,\n });\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts);\n if (this.projectId) {\n reqOpts.projectId = this.projectId;\n }\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri];\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .filter(x => x.trim()) // Limit to non-empty strings.\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/');\n const childInterceptors = arrify(reqOpts.interceptors_);\n const localInterceptors = [].slice.call(this.interceptors);\n reqOpts.interceptors_ = childInterceptors.concat(localInterceptors);\n if (reqOpts.shouldReturnStream) {\n return this.parent.requestStream(reqOpts);\n }\n this.parent.request(reqOpts, callback);\n }\n request(reqOpts, callback) {\n this.request_(reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return this.request_(opts);\n }\n}\nexports.ServiceObject = ServiceObject;\n(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] });\n//# sourceMappingURL=service-object.js.map","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0;\n/*!\n * @module common/service\n */\nconst arrify = require(\"arrify\");\nconst extend = require(\"extend\");\nconst util_1 = require(\"./util\");\nexports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}';\nclass Service {\n /**\n * Service is a base class, meant to be inherited from by a \"service,\" like\n * BigQuery or Storage.\n *\n * This handles making authenticated requests by exposing a `makeReq_`\n * function.\n *\n * @constructor\n * @alias module:common/service\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string[]} config.scopes - The scopes required for the request.\n * @param {object=} options - [Configuration object](#/docs).\n */\n constructor(config, options = {}) {\n this.baseUrl = config.baseUrl;\n this.apiEndpoint = config.apiEndpoint;\n this.timeout = options.timeout;\n this.globalInterceptors = arrify(options.interceptors_);\n this.interceptors = [];\n this.packageJson = config.packageJson;\n this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN;\n this.projectIdRequired = config.projectIdRequired !== false;\n this.providedUserAgent = options.userAgent;\n const reqCfg = extend({}, config, {\n projectIdRequired: this.projectIdRequired,\n projectId: this.projectId,\n authClient: options.authClient,\n credentials: options.credentials,\n keyFile: options.keyFilename,\n email: options.email,\n token: options.token,\n });\n this.makeAuthenticatedRequest =\n util_1.util.makeAuthenticatedRequestFactory(reqCfg);\n this.authClient = this.makeAuthenticatedRequest.authClient;\n this.getCredentials = this.makeAuthenticatedRequest.getCredentials;\n const isCloudFunctionEnv = !!process.env.FUNCTION_NAME;\n if (isCloudFunctionEnv) {\n this.interceptors.push({\n request(reqOpts) {\n reqOpts.forever = false;\n return reqOpts;\n },\n });\n }\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n return [].slice\n .call(this.globalInterceptors)\n .concat(this.interceptors)\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n }\n getProjectId(callback) {\n if (!callback) {\n return this.getProjectIdAsync();\n }\n this.getProjectIdAsync().then(p => callback(null, p), callback);\n }\n async getProjectIdAsync() {\n const projectId = await this.authClient.getProjectId();\n if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) {\n this.projectId = projectId;\n }\n return this.projectId;\n }\n request_(reqOpts, callback) {\n reqOpts = extend(true, {}, reqOpts, { timeout: this.timeout });\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl];\n if (this.projectIdRequired) {\n if (reqOpts.projectId) {\n uriComponents.push('projects');\n uriComponents.push(reqOpts.projectId);\n }\n else {\n uriComponents.push('projects');\n uriComponents.push(this.projectId);\n }\n }\n uriComponents.push(reqOpts.uri);\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/')\n // Some URIs have colon separators.\n // Bad: https://.../projects/:list\n // Good: https://.../projects:list\n .replace(/\\/:/g, ':');\n const requestInterceptors = this.getRequestInterceptors();\n arrify(reqOpts.interceptors_).forEach(interceptor => {\n if (typeof interceptor.request === 'function') {\n requestInterceptors.push(interceptor.request);\n }\n });\n requestInterceptors.forEach(requestInterceptor => {\n reqOpts = requestInterceptor(reqOpts);\n });\n delete reqOpts.interceptors_;\n const pkg = this.packageJson;\n let userAgent = util_1.util.getUserAgentFromPackageJson(pkg);\n if (this.providedUserAgent) {\n userAgent = `${this.providedUserAgent} ${userAgent}`;\n }\n reqOpts.headers = extend({}, reqOpts.headers, {\n 'User-Agent': userAgent,\n 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${pkg.version}`,\n });\n if (reqOpts.shouldReturnStream) {\n return this.makeAuthenticatedRequest(reqOpts);\n }\n else {\n this.makeAuthenticatedRequest(reqOpts, callback);\n }\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n * @param {function} callback - The callback function passed to `request`.\n */\n request(reqOpts, callback) {\n Service.prototype.request_.call(this, reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = extend(true, reqOpts, { shouldReturnStream: true });\n return Service.prototype.request_.call(this, opts);\n }\n}\nexports.Service = Service;\n//# sourceMappingURL=service.js.map","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.Util = exports.PartialFailureError = exports.ApiError = void 0;\n/*!\n * @module common/util\n */\nconst projectify_1 = require(\"@google-cloud/projectify\");\nconst ent = require(\"ent\");\nconst extend = require(\"extend\");\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst retryRequest = require(\"retry-request\");\nconst stream_1 = require(\"stream\");\nconst teeny_request_1 = require(\"teeny-request\");\nconst service_1 = require(\"./service\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst duplexify = require('duplexify');\nconst requestDefaults = {\n timeout: 60000,\n gzip: true,\n forever: true,\n pool: {\n maxSockets: Infinity,\n },\n};\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n * @private\n */\nconst AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n * @private\n */\nconst MAX_RETRY_DEFAULT = 3;\n/**\n * Custom error type for API errors.\n *\n * @param {object} errorBody - Error object.\n */\nclass ApiError extends Error {\n constructor(errorBodyOrMessage) {\n super();\n if (typeof errorBodyOrMessage !== 'object') {\n this.message = errorBodyOrMessage || '';\n return;\n }\n const errorBody = errorBodyOrMessage;\n this.code = errorBody.code;\n this.errors = errorBody.errors;\n this.response = errorBody.response;\n try {\n this.errors = JSON.parse(this.response.body).error.errors;\n }\n catch (e) {\n this.errors = errorBody.errors;\n }\n this.message = ApiError.createMultiErrorMessage(errorBody, this.errors);\n Error.captureStackTrace(this);\n }\n /**\n * Pieces together an error message by combining all unique error messages\n * returned from a single GoogleError\n *\n * @private\n *\n * @param {GoogleErrorBody} err The original error.\n * @param {GoogleInnerError[]} [errors] Inner errors, if any.\n * @returns {string}\n */\n static createMultiErrorMessage(err, errors) {\n const messages = new Set();\n if (err.message) {\n messages.add(err.message);\n }\n if (errors && errors.length) {\n errors.forEach(({ message }) => messages.add(message));\n }\n else if (err.response && err.response.body) {\n messages.add(ent.decode(err.response.body.toString()));\n }\n else if (!err.message) {\n messages.add('A failure occurred during this request.');\n }\n let messageArr = Array.from(messages);\n if (messageArr.length > 1) {\n messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`);\n messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\\n');\n messageArr.push('\\n');\n }\n return messageArr.join('\\n');\n }\n}\nexports.ApiError = ApiError;\n/**\n * Custom error type for partial errors returned from the API.\n *\n * @param {object} b - Error object.\n */\nclass PartialFailureError extends Error {\n constructor(b) {\n super();\n const errorObject = b;\n this.errors = errorObject.errors;\n this.name = 'PartialFailureError';\n this.response = errorObject.response;\n this.message = ApiError.createMultiErrorMessage(errorObject, this.errors);\n }\n}\nexports.PartialFailureError = PartialFailureError;\nclass Util {\n constructor() {\n this.ApiError = ApiError;\n this.PartialFailureError = PartialFailureError;\n }\n /**\n * No op.\n *\n * @example\n * function doSomething(callback) {\n * callback = callback || noop;\n * }\n */\n noop() { }\n /**\n * Uniformly process an API response.\n *\n * @param {*} err - Error value.\n * @param {*} resp - Response value.\n * @param {*} body - Body value.\n * @param {function} callback - The callback function.\n */\n handleResp(err, resp, body, callback) {\n callback = callback || util.noop;\n const parsedResp = extend(true, { err: err || null }, resp && util.parseHttpRespMessage(resp), body && util.parseHttpRespBody(body));\n // Assign the parsed body to resp.body, even if { json: false } was passed\n // as a request option.\n // We assume that nobody uses the previously unparsed value of resp.body.\n if (!parsedResp.err && resp && typeof parsedResp.body === 'object') {\n parsedResp.resp.body = parsedResp.body;\n }\n if (parsedResp.err && resp) {\n parsedResp.err.response = resp;\n }\n callback(parsedResp.err, parsedResp.body, parsedResp.resp);\n }\n /**\n * Sniff an incoming HTTP response message for errors.\n *\n * @param {object} httpRespMessage - An incoming HTTP response message from `request`.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.resp - The original response object.\n */\n parseHttpRespMessage(httpRespMessage) {\n const parsedHttpRespMessage = {\n resp: httpRespMessage,\n };\n if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) {\n // Unknown error. Format according to ApiError standard.\n parsedHttpRespMessage.err = new ApiError({\n errors: new Array(),\n code: httpRespMessage.statusCode,\n message: httpRespMessage.statusMessage,\n response: httpRespMessage,\n });\n }\n return parsedHttpRespMessage;\n }\n /**\n * Parse the response body from an HTTP request.\n *\n * @param {object} body - The response body.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.body - The original body value provided\n * will try to be JSON.parse'd. If it's successful, the parsed value will\n * be returned here, otherwise the original value and an error will be returned.\n */\n parseHttpRespBody(body) {\n const parsedHttpRespBody = {\n body,\n };\n if (typeof body === 'string') {\n try {\n parsedHttpRespBody.body = JSON.parse(body);\n }\n catch (err) {\n parsedHttpRespBody.body = body;\n }\n }\n if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) {\n // Error from JSON API.\n parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error);\n }\n return parsedHttpRespBody;\n }\n /**\n * Take a Duplexify stream, fetch an authenticated connection header, and\n * create an outgoing writable stream.\n *\n * @param {Duplexify} dup - Duplexify stream.\n * @param {object} options - Configuration object.\n * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through.\n * @param {object} options.metadata - Metadata to send at the head of the request.\n * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object.\n * @param {string=} options.request.method - Default: \"POST\".\n * @param {string=} options.request.qs.uploadType - Default: \"multipart\".\n * @param {string=} options.streamContentType - Default: \"application/octet-stream\".\n * @param {function} onComplete - Callback, executed after the writable Request stream has completed.\n */\n makeWritableStream(dup, options, onComplete) {\n onComplete = onComplete || util.noop;\n const writeStream = new ProgressStream();\n writeStream.on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(writeStream);\n const defaultReqOpts = {\n method: 'POST',\n qs: {\n uploadType: 'multipart',\n },\n timeout: 0,\n maxRetries: 0,\n };\n const metadata = options.metadata || {};\n const reqOpts = extend(true, defaultReqOpts, options.request, {\n multipart: [\n {\n 'Content-Type': 'application/json',\n body: JSON.stringify(metadata),\n },\n {\n 'Content-Type': metadata.contentType || 'application/octet-stream',\n body: writeStream,\n },\n ],\n });\n options.makeAuthenticatedRequest(reqOpts, {\n onAuthenticated(err, authenticatedReqOpts) {\n if (err) {\n dup.destroy(err);\n return;\n }\n const request = teeny_request_1.teenyRequest.defaults(requestDefaults);\n request(authenticatedReqOpts, (err, resp, body) => {\n util.handleResp(err, resp, body, (err, data) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n dup.emit('response', resp);\n onComplete(data);\n });\n });\n },\n });\n }\n /**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted. This is used for rate limit\n * related errors as well as intermittent server errors.\n *\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\n shouldRetryRequest(err) {\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = e.reason;\n if (reason === 'rateLimitExceeded') {\n return true;\n }\n if (reason === 'userRateLimitExceeded') {\n return true;\n }\n if (reason && reason.includes('EAI_AGAIN')) {\n return true;\n }\n }\n }\n }\n return false;\n }\n /**\n * Get a function for making authenticated requests.\n *\n * @param {object} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {object=} config.credentials - Credentials object.\n * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false.\n * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false.\n * @param {string=} config.email - Account email address, required for PEM/P12 usage.\n * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3)\n * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile.\n * @param {array} config.scopes - Array of scopes required for the API.\n */\n makeAuthenticatedRequestFactory(config) {\n const googleAutoAuthConfig = extend({}, config);\n if (googleAutoAuthConfig.projectId === service_1.DEFAULT_PROJECT_ID_TOKEN) {\n delete googleAutoAuthConfig.projectId;\n }\n let authClient;\n if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) {\n // Use an existing `GoogleAuth`\n authClient = googleAutoAuthConfig.authClient;\n }\n else {\n // Pass an `AuthClient` to `GoogleAuth`, if available\n const config = {\n ...googleAutoAuthConfig,\n authClient: googleAutoAuthConfig.authClient,\n };\n authClient = new google_auth_library_1.GoogleAuth(config);\n }\n function makeAuthenticatedRequest(reqOpts, optionsOrCallback) {\n let stream;\n let projectId;\n const reqConfig = extend({}, config);\n let activeRequest_;\n if (!optionsOrCallback) {\n stream = duplexify();\n reqConfig.stream = stream;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined;\n async function setProjectId() {\n projectId = await authClient.getProjectId();\n }\n const onAuthenticated = async (err, authenticatedReqOpts) => {\n const authLibraryError = err;\n const autoAuthFailed = err &&\n err.message.indexOf('Could not load the default credentials') > -1;\n if (autoAuthFailed) {\n // Even though authentication failed, the API might not actually\n // care.\n authenticatedReqOpts = reqOpts;\n }\n if (!err || autoAuthFailed) {\n try {\n // Try with existing `projectId` value\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n if (e instanceof projectify_1.MissingProjectIdError) {\n // A `projectId` was required, but we don't have one.\n try {\n // Attempt to get the `projectId`\n await setProjectId();\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n // Re-use the \"Could not load the default credentials error\" if\n // auto auth failed.\n err = err || e;\n }\n }\n else {\n // Some other error unrelated to missing `projectId`\n err = err || e;\n }\n }\n }\n if (err) {\n if (stream) {\n stream.destroy(err);\n }\n else {\n const fn = options && options.onAuthenticated\n ? options.onAuthenticated\n : callback;\n fn(err);\n }\n return;\n }\n if (options && options.onAuthenticated) {\n options.onAuthenticated(null, authenticatedReqOpts);\n }\n else {\n activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => {\n if (apiResponseError &&\n apiResponseError.code === 401 &&\n authLibraryError) {\n // Re-use the \"Could not load the default credentials error\" if\n // the API request failed due to missing credentials.\n apiResponseError = authLibraryError;\n }\n callback(apiResponseError, ...params);\n });\n }\n };\n const prepareRequest = async () => {\n try {\n const getProjectId = async () => {\n if (config.projectId &&\n config.projectId !== service_1.DEFAULT_PROJECT_ID_TOKEN) {\n // The user provided a project ID. We don't need to check with the\n // auth client, it could be incorrect.\n return config.projectId;\n }\n if (config.projectIdRequired === false) {\n // A projectId is not required. Return the default.\n return service_1.DEFAULT_PROJECT_ID_TOKEN;\n }\n return setProjectId();\n };\n const authorizeRequest = async () => {\n if (reqConfig.customEndpoint &&\n !reqConfig.useAuthWithCustomEndpoint) {\n // Using a custom API override. Do not use `google-auth-library` for\n // authentication. (ex: connecting to a local Datastore server)\n return reqOpts;\n }\n else {\n return authClient.authorizeRequest(reqOpts);\n }\n };\n const [_projectId, authorizedReqOpts] = await Promise.all([\n getProjectId(),\n authorizeRequest(),\n ]);\n if (_projectId) {\n projectId = _projectId;\n }\n return onAuthenticated(null, authorizedReqOpts);\n }\n catch (e) {\n return onAuthenticated(e);\n }\n };\n prepareRequest();\n if (stream) {\n return stream;\n }\n return {\n abort() {\n setImmediate(() => {\n if (activeRequest_) {\n activeRequest_.abort();\n activeRequest_ = null;\n }\n });\n },\n };\n }\n const mar = makeAuthenticatedRequest;\n mar.getCredentials = authClient.getCredentials.bind(authClient);\n mar.authClient = authClient;\n return mar;\n }\n /**\n * Make a request through the `retryRequest` module with built-in error\n * handling and exponential back off.\n *\n * @param {object} reqOpts - Request options in the format `request` expects.\n * @param {object=} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {number=} config.maxRetries - Maximum number of automatic retries\n * attempted before returning the error. (default: 3)\n * @param {object=} config.request - HTTP module for request calls.\n * @param {function} callback - The callback function.\n */\n makeRequest(reqOpts, config, callback) {\n var _a, _b, _c, _d, _e, _f, _g;\n let autoRetryValue = AUTO_RETRY_DEFAULT;\n if (config.autoRetry !== undefined &&\n ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) {\n throw new ApiError('autoRetry is deprecated. Use retryOptions.autoRetry instead.');\n }\n else if (config.autoRetry !== undefined) {\n autoRetryValue = config.autoRetry;\n }\n else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry) !== undefined) {\n autoRetryValue = config.retryOptions.autoRetry;\n }\n let maxRetryValue = MAX_RETRY_DEFAULT;\n if (config.maxRetries && ((_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries)) {\n throw new ApiError('maxRetries is deprecated. Use retryOptions.maxRetries instead.');\n }\n else if (config.maxRetries) {\n maxRetryValue = config.maxRetries;\n }\n else if ((_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries) {\n maxRetryValue = config.retryOptions.maxRetries;\n }\n const options = {\n request: teeny_request_1.teenyRequest.defaults(requestDefaults),\n retries: autoRetryValue !== false ? maxRetryValue : 0,\n noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0,\n shouldRetryFn(httpRespMessage) {\n var _a, _b;\n const err = util.parseHttpRespMessage(httpRespMessage).err;\n if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) {\n return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err));\n }\n return err && util.shouldRetryRequest(err);\n },\n maxRetryDelay: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.maxRetryDelay,\n retryDelayMultiplier: (_f = config.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier,\n totalTimeout: (_g = config.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout,\n };\n if (typeof reqOpts.maxRetries === 'number') {\n options.retries = reqOpts.maxRetries;\n }\n if (!config.stream) {\n return retryRequest(reqOpts, options, (err, response, body) => {\n util.handleResp(err, response, body, callback);\n });\n }\n const dup = config.stream;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let requestStream;\n const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET';\n if (isGetRequest) {\n requestStream = retryRequest(reqOpts, options);\n dup.setReadable(requestStream);\n }\n else {\n // Streaming writable HTTP requests cannot be retried.\n requestStream = options.request(reqOpts);\n dup.setWritable(requestStream);\n }\n // Replay the Request events back to the stream.\n requestStream\n .on('error', dup.destroy.bind(dup))\n .on('response', dup.emit.bind(dup, 'response'))\n .on('complete', dup.emit.bind(dup, 'complete'));\n dup.abort = requestStream.abort;\n return dup;\n }\n /**\n * Decorate the options about to be made in a request.\n *\n * @param {object} reqOpts - The options to be passed to `request`.\n * @param {string} projectId - The project ID.\n * @return {object} reqOpts - The decorated reqOpts.\n */\n decorateRequest(reqOpts, projectId) {\n delete reqOpts.autoPaginate;\n delete reqOpts.autoPaginateVal;\n delete reqOpts.objectMode;\n if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') {\n delete reqOpts.qs.autoPaginate;\n delete reqOpts.qs.autoPaginateVal;\n reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId);\n }\n if (Array.isArray(reqOpts.multipart)) {\n reqOpts.multipart = reqOpts.multipart.map(part => {\n return (0, projectify_1.replaceProjectIdToken)(part, projectId);\n });\n }\n if (reqOpts.json !== null && typeof reqOpts.json === 'object') {\n delete reqOpts.json.autoPaginate;\n delete reqOpts.json.autoPaginateVal;\n reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId);\n }\n reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId);\n return reqOpts;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n isCustomType(unknown, module) {\n function getConstructorName(obj) {\n return obj.constructor && obj.constructor.name.toLowerCase();\n }\n const moduleNameParts = module.split('/');\n const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase();\n const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase();\n if (subModuleName && getConstructorName(unknown) !== subModuleName) {\n return false;\n }\n let walkingModule = unknown;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n if (getConstructorName(walkingModule) === parentModuleName) {\n return true;\n }\n walkingModule = walkingModule.parent;\n if (!walkingModule) {\n return false;\n }\n }\n }\n /**\n * Create a properly-formatted User-Agent string from a package.json file.\n *\n * @param {object} packageJson - A module's package.json file.\n * @return {string} userAgent - The formatted User-Agent string.\n */\n getUserAgentFromPackageJson(packageJson) {\n const hyphenatedPackageName = packageJson.name\n .replace('@google-cloud', 'gcloud-node') // For legacy purposes.\n .replace('/', '-'); // For UA spec-compliance purposes.\n return hyphenatedPackageName + '/' + packageJson.version;\n }\n /**\n * Given two parameters, figure out if this is either:\n * - Just a callback function\n * - An options object, and then a callback function\n * @param optionsOrCallback An options object or callback.\n * @param cb A potentially undefined callback.\n */\n maybeOptionsOrCallback(optionsOrCallback, cb) {\n return typeof optionsOrCallback === 'function'\n ? [{}, optionsOrCallback]\n : [optionsOrCallback, cb];\n }\n}\nexports.Util = Util;\n/**\n * Basic Passthrough Stream that records the number of bytes read\n * every time the cursor is moved.\n */\nclass ProgressStream extends stream_1.Transform {\n constructor() {\n super(...arguments);\n this.bytesRead = 0;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _transform(chunk, encoding, callback) {\n this.bytesRead += chunk.length;\n this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' });\n this.push(chunk);\n callback();\n }\n}\nconst util = new Util();\nexports.util = util;\n//# sourceMappingURL=util.js.map","\"use strict\";\n/*!\n * Copyright 2015 Google Inc. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ResourceStream = exports.paginator = exports.Paginator = void 0;\n/*!\n * @module common/paginator\n */\nconst arrify = require(\"arrify\");\nconst extend = require(\"extend\");\nconst resource_stream_1 = require(\"./resource-stream\");\nObject.defineProperty(exports, \"ResourceStream\", { enumerable: true, get: function () { return resource_stream_1.ResourceStream; } });\n/*! Developer Documentation\n *\n * paginator is used to auto-paginate `nextQuery` methods as well as\n * streamifying them.\n *\n * Before:\n *\n * search.query('done=true', function(err, results, nextQuery) {\n * search.query(nextQuery, function(err, results, nextQuery) {});\n * });\n *\n * After:\n *\n * search.query('done=true', function(err, results) {});\n *\n * Methods to extend should be written to accept callbacks and return a\n * `nextQuery`.\n */\nclass Paginator {\n /**\n * Cache the original method, then overwrite it on the Class's prototype.\n *\n * @param {function} Class - The parent class of the methods to extend.\n * @param {string|string[]} methodNames - Name(s) of the methods to extend.\n */\n // tslint:disable-next-line:variable-name\n extend(Class, methodNames) {\n methodNames = arrify(methodNames);\n methodNames.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n // map the original method to a private member\n Class.prototype[methodName + '_'] = originalMethod;\n // overwrite the original to auto-paginate\n /* eslint-disable @typescript-eslint/no-explicit-any */\n Class.prototype[methodName] = function (...args) {\n const parsedArguments = paginator.parseArguments_(args);\n return paginator.run_(parsedArguments, originalMethod.bind(this));\n };\n });\n }\n /**\n * Wraps paginated API calls in a readable object stream.\n *\n * This method simply calls the nextQuery recursively, emitting results to a\n * stream. The stream ends when `nextQuery` is null.\n *\n * `maxResults` will act as a cap for how many results are fetched and emitted\n * to the stream.\n *\n * @param {string} methodName - Name of the method to streamify.\n * @return {function} - Wrapped function.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n streamify(methodName) {\n return function (\n /* eslint-disable @typescript-eslint/no-explicit-any */\n ...args) {\n const parsedArguments = paginator.parseArguments_(args);\n const originalMethod = this[methodName + '_'] || this[methodName];\n return paginator.runAsStream_(parsedArguments, originalMethod.bind(this));\n };\n }\n /**\n * Parse a pseudo-array `arguments` for a query and callback.\n *\n * @param {array} args - The original `arguments` pseduo-array that the original\n * method received.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n parseArguments_(args) {\n let query;\n let autoPaginate = true;\n let maxApiCalls = -1;\n let maxResults = -1;\n let callback;\n const firstArgument = args[0];\n const lastArgument = args[args.length - 1];\n if (typeof firstArgument === 'function') {\n callback = firstArgument;\n }\n else {\n query = firstArgument;\n }\n if (typeof lastArgument === 'function') {\n callback = lastArgument;\n }\n if (typeof query === 'object') {\n query = extend(true, {}, query);\n // Check if the user only asked for a certain amount of results.\n if (query.maxResults && typeof query.maxResults === 'number') {\n // `maxResults` is used API-wide.\n maxResults = query.maxResults;\n }\n else if (typeof query.pageSize === 'number') {\n // `pageSize` is Pub/Sub's `maxResults`.\n maxResults = query.pageSize;\n }\n if (query.maxApiCalls && typeof query.maxApiCalls === 'number') {\n maxApiCalls = query.maxApiCalls;\n delete query.maxApiCalls;\n }\n // maxResults is the user specified limit.\n if (maxResults !== -1 || query.autoPaginate === false) {\n autoPaginate = false;\n }\n }\n const parsedArguments = {\n query: query || {},\n autoPaginate,\n maxApiCalls,\n maxResults,\n callback,\n };\n parsedArguments.streamOptions = extend(true, {}, parsedArguments.query);\n delete parsedArguments.streamOptions.autoPaginate;\n delete parsedArguments.streamOptions.maxResults;\n delete parsedArguments.streamOptions.pageSize;\n return parsedArguments;\n }\n /**\n * This simply checks to see if `autoPaginate` is set or not, if it's true\n * then we buffer all results, otherwise simply call the original method.\n *\n * @param {array} parsedArguments - Parsed arguments from the original method\n * call.\n * @param {object=|string=} parsedArguments.query - Query object. This is most\n * commonly an object, but to make the API more simple, it can also be a\n * string in some places.\n * @param {function=} parsedArguments.callback - Callback function.\n * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled.\n * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make.\n * @param {number} parsedArguments.maxResults - Maximum results to return.\n * @param {function} originalMethod - The cached method that accepts a callback\n * and returns `nextQuery` to receive more results.\n */\n run_(parsedArguments, originalMethod) {\n const query = parsedArguments.query;\n const callback = parsedArguments.callback;\n if (!parsedArguments.autoPaginate) {\n return originalMethod(query, callback);\n }\n const results = new Array();\n const promise = new Promise((resolve, reject) => {\n paginator\n .runAsStream_(parsedArguments, originalMethod)\n .on('error', reject)\n .on('data', (data) => results.push(data))\n .on('end', () => resolve(results));\n });\n if (!callback) {\n return promise.then(results => [results]);\n }\n promise.then(results => callback(null, results), (err) => callback(err));\n }\n /**\n * This method simply calls the nextQuery recursively, emitting results to a\n * stream. The stream ends when `nextQuery` is null.\n *\n * `maxResults` will act as a cap for how many results are fetched and emitted\n * to the stream.\n *\n * @param {object=|string=} parsedArguments.query - Query object. This is most\n * commonly an object, but to make the API more simple, it can also be a\n * string in some places.\n * @param {function=} parsedArguments.callback - Callback function.\n * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled.\n * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make.\n * @param {number} parsedArguments.maxResults - Maximum results to return.\n * @param {function} originalMethod - The cached method that accepts a callback\n * and returns `nextQuery` to receive more results.\n * @return {stream} - Readable object stream.\n */\n /* eslint-disable @typescript-eslint/no-explicit-any */\n runAsStream_(parsedArguments, originalMethod) {\n return new resource_stream_1.ResourceStream(parsedArguments, originalMethod);\n }\n}\nexports.Paginator = Paginator;\nconst paginator = new Paginator();\nexports.paginator = paginator;\n//# sourceMappingURL=index.js.map","\"use strict\";\n/*!\n * Copyright 2019 Google Inc. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ResourceStream = void 0;\nconst stream_1 = require(\"stream\");\nclass ResourceStream extends stream_1.Transform {\n constructor(args, requestFn) {\n const options = Object.assign({ objectMode: true }, args.streamOptions);\n super(options);\n this._ended = false;\n this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls;\n this._nextQuery = args.query;\n this._reading = false;\n this._requestFn = requestFn;\n this._requestsMade = 0;\n this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults;\n }\n /* eslint-disable @typescript-eslint/no-explicit-any */\n end(...args) {\n this._ended = true;\n return super.end(...args);\n }\n _read() {\n if (this._reading) {\n return;\n }\n this._reading = true;\n // Wrap in a try/catch to catch input linting errors, e.g.\n // an invalid BigQuery query. These errors are thrown in an\n // async fashion, which makes them un-catchable by the user.\n try {\n this._requestFn(this._nextQuery, (err, results, nextQuery) => {\n if (err) {\n this.destroy(err);\n return;\n }\n this._nextQuery = nextQuery;\n if (this._resultsToSend !== Infinity) {\n results = results.splice(0, this._resultsToSend);\n this._resultsToSend -= results.length;\n }\n let more = true;\n for (const result of results) {\n if (this._ended) {\n break;\n }\n more = this.push(result);\n }\n const isFinished = !this._nextQuery || this._resultsToSend < 1;\n const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls;\n if (isFinished || madeMaxCalls) {\n this.end();\n }\n if (more && !this._ended) {\n setImmediate(() => this._read());\n }\n this._reading = false;\n });\n }\n catch (e) {\n this.destroy(e);\n }\n }\n}\nexports.ResourceStream = ResourceStream;\n//# sourceMappingURL=resource-stream.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MissingProjectIdError = exports.replaceProjectIdToken = void 0;\nconst stream_1 = require(\"stream\");\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/**\n * Populate the `{{projectId}}` placeholder.\n *\n * @throws {Error} If a projectId is required, but one is not provided.\n *\n * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped.\n * @param {string} projectId - A projectId. If not provided\n * @return {*} - The original argument with all placeholders populated.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction replaceProjectIdToken(value, projectId) {\n if (Array.isArray(value)) {\n value = value.map(v => replaceProjectIdToken(v, projectId));\n }\n if (value !== null &&\n typeof value === 'object' &&\n !(value instanceof Buffer) &&\n !(value instanceof stream_1.Stream) &&\n typeof value.hasOwnProperty === 'function') {\n for (const opt in value) {\n // eslint-disable-next-line no-prototype-builtins\n if (value.hasOwnProperty(opt)) {\n value[opt] = replaceProjectIdToken(value[opt], projectId);\n }\n }\n }\n if (typeof value === 'string' &&\n value.indexOf('{{projectId}}') > -1) {\n if (!projectId || projectId === '{{projectId}}') {\n throw new MissingProjectIdError();\n }\n value = value.replace(/{{projectId}}/g, projectId);\n }\n return value;\n}\nexports.replaceProjectIdToken = replaceProjectIdToken;\n/**\n * Custom error type for missing project ID errors.\n */\nclass MissingProjectIdError extends Error {\n constructor() {\n super(...arguments);\n this.message = `Sorry, we cannot connect to Cloud Services without a project\n ID. You may specify one with an environment variable named\n \"GOOGLE_CLOUD_PROJECT\".`.replace(/ +/g, ' ');\n }\n}\nexports.MissingProjectIdError = MissingProjectIdError;\n//# sourceMappingURL=index.js.map","\"use strict\";\n/* eslint-disable prefer-rest-params */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0;\n/**\n * Wraps a callback style function to conditionally return a promise.\n *\n * @param {function} originalMethod - The method to promisify.\n * @param {object=} options - Promise options.\n * @param {boolean} options.singular - Resolve the promise with single arg instead of an array.\n * @return {function} wrapped\n */\nfunction promisify(originalMethod, options) {\n if (originalMethod.promisified_) {\n return originalMethod;\n }\n options = options || {};\n const slice = Array.prototype.slice;\n // tslint:disable-next-line:no-any\n const wrapper = function () {\n let last;\n for (last = arguments.length - 1; last >= 0; last--) {\n const arg = arguments[last];\n if (typeof arg === 'undefined') {\n continue; // skip trailing undefined.\n }\n if (typeof arg !== 'function') {\n break; // non-callback last argument found.\n }\n return originalMethod.apply(this, arguments);\n }\n // peel trailing undefined.\n const args = slice.call(arguments, 0, last + 1);\n // tslint:disable-next-line:variable-name\n let PromiseCtor = Promise;\n // Because dedupe will likely create a single install of\n // @google-cloud/common to be shared amongst all modules, we need to\n // localize it at the Service level.\n if (this && this.Promise) {\n PromiseCtor = this.Promise;\n }\n return new PromiseCtor((resolve, reject) => {\n // tslint:disable-next-line:no-any\n args.push((...args) => {\n const callbackArgs = slice.call(args);\n const err = callbackArgs.shift();\n if (err) {\n return reject(err);\n }\n if (options.singular && callbackArgs.length === 1) {\n resolve(callbackArgs[0]);\n }\n else {\n resolve(callbackArgs);\n }\n });\n originalMethod.apply(this, args);\n });\n };\n wrapper.promisified_ = true;\n return wrapper;\n}\nexports.promisify = promisify;\n/**\n * Promisifies certain Class methods. This will not promisify private or\n * streaming methods.\n *\n * @param {module:common/service} Class - Service class.\n * @param {object=} options - Configuration object.\n */\n// tslint:disable-next-line:variable-name\nfunction promisifyAll(Class, options) {\n const exclude = (options && options.exclude) || [];\n const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype);\n const methods = ownPropertyNames.filter(methodName => {\n // clang-format off\n return (!exclude.includes(methodName) &&\n typeof Class.prototype[methodName] === 'function' && // is it a function?\n !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable?\n );\n // clang-format on\n });\n methods.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n if (!originalMethod.promisified_) {\n Class.prototype[methodName] = exports.promisify(originalMethod, options);\n }\n });\n}\nexports.promisifyAll = promisifyAll;\n/**\n * Wraps a promisy type function to conditionally call a callback function.\n *\n * @param {function} originalMethod - The method to callbackify.\n * @param {object=} options - Callback options.\n * @param {boolean} options.singular - Pass to the callback a single arg instead of an array.\n * @return {function} wrapped\n */\nfunction callbackify(originalMethod) {\n if (originalMethod.callbackified_) {\n return originalMethod;\n }\n // tslint:disable-next-line:no-any\n const wrapper = function () {\n if (typeof arguments[arguments.length - 1] !== 'function') {\n return originalMethod.apply(this, arguments);\n }\n const cb = Array.prototype.pop.call(arguments);\n originalMethod.apply(this, arguments).then(\n // tslint:disable-next-line:no-any\n (res) => {\n res = Array.isArray(res) ? res : [res];\n cb(null, ...res);\n }, (err) => cb(err));\n };\n wrapper.callbackified_ = true;\n return wrapper;\n}\nexports.callbackify = callbackify;\n/**\n * Callbackifies certain Class methods. This will not callbackify private or\n * streaming methods.\n *\n * @param {module:common/service} Class - Service class.\n * @param {object=} options - Configuration object.\n */\nfunction callbackifyAll(\n// tslint:disable-next-line:variable-name\nClass, options) {\n const exclude = (options && options.exclude) || [];\n const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype);\n const methods = ownPropertyNames.filter(methodName => {\n // clang-format off\n return (!exclude.includes(methodName) &&\n typeof Class.prototype[methodName] === 'function' && // is it a function?\n !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable?\n );\n // clang-format on\n });\n methods.forEach(methodName => {\n const originalMethod = Class.prototype[methodName];\n if (!originalMethod.callbackified_) {\n Class.prototype[methodName] = exports.callbackify(originalMethod);\n }\n });\n}\nexports.callbackifyAll = callbackifyAll;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n createTokenAuth: () => createTokenAuth\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/auth.js\nvar REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nvar REGEX_IS_INSTALLATION = /^ghs_/;\nvar REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token,\n tokenType\n };\n}\n\n// pkg/dist-src/with-authorization-prefix.js\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n return `token ${token}`;\n}\n\n// pkg/dist-src/hook.js\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(\n route,\n parameters\n );\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\n// pkg/dist-src/index.js\nvar createTokenAuth = function createTokenAuth2(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n if (typeof token !== \"string\") {\n throw new Error(\n \"[@octokit/auth-token] Token passed to createTokenAuth is not a string\"\n );\n }\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n createTokenAuth\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n Octokit: () => Octokit\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_universal_user_agent = require(\"universal-user-agent\");\nvar import_before_after_hook = require(\"before-after-hook\");\nvar import_request = require(\"@octokit/request\");\nvar import_graphql = require(\"@octokit/graphql\");\nvar import_auth_token = require(\"@octokit/auth-token\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"5.1.0\";\n\n// pkg/dist-src/index.js\nvar noop = () => {\n};\nvar consoleWarn = console.warn.bind(console);\nvar consoleError = console.error.bind(console);\nvar userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;\nvar Octokit = class {\n static {\n this.VERSION = VERSION;\n }\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(\n Object.assign(\n {},\n defaults,\n options,\n options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null\n )\n );\n }\n };\n return OctokitWithDefaults;\n }\n static {\n this.plugins = [];\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n const currentPlugins = this.plugins;\n const NewOctokit = class extends this {\n static {\n this.plugins = currentPlugins.concat(\n newPlugins.filter((plugin) => !currentPlugins.includes(plugin))\n );\n }\n };\n return NewOctokit;\n }\n constructor(options = {}) {\n const hook = new import_before_after_hook.Collection();\n const requestDefaults = {\n baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n };\n requestDefaults.headers[\"user-agent\"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail;\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = import_request.request.defaults(requestDefaults);\n this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults);\n this.log = Object.assign(\n {\n debug: noop,\n info: noop,\n warn: consoleWarn,\n error: consoleError\n },\n options.log\n );\n this.hook = hook;\n if (!options.authStrategy) {\n if (!options.auth) {\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n const auth = (0, import_auth_token.createTokenAuth)(options.auth);\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(\n Object.assign(\n {\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n },\n options.auth\n )\n );\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n const classConstructor = this.constructor;\n for (let i = 0; i < classConstructor.plugins.length; ++i) {\n Object.assign(this, classConstructor.plugins[i](this, options));\n }\n }\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n Octokit\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n endpoint: () => endpoint\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/defaults.js\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"9.0.4\";\n\n// pkg/dist-src/defaults.js\nvar userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;\nvar DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\"\n }\n};\n\n// pkg/dist-src/util/lowercase-keys.js\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\n// pkg/dist-src/util/is-plain-object.js\nfunction isPlainObject(value) {\n if (typeof value !== \"object\" || value === null)\n return false;\n if (Object.prototype.toString.call(value) !== \"[object Object]\")\n return false;\n const proto = Object.getPrototypeOf(value);\n if (proto === null)\n return true;\n const Ctor = Object.prototype.hasOwnProperty.call(proto, \"constructor\") && proto.constructor;\n return typeof Ctor === \"function\" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);\n}\n\n// pkg/dist-src/util/merge-deep.js\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if (isPlainObject(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\n\n// pkg/dist-src/util/remove-undefined-properties.js\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === void 0) {\n delete obj[key];\n }\n }\n return obj;\n}\n\n// pkg/dist-src/merge.js\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n } else {\n options = Object.assign({}, route);\n }\n options.headers = lowercaseKeys(options.headers);\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n if (options.url === \"/graphql\") {\n if (defaults && defaults.mediaType.previews?.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(\n (preview) => !mergedOptions.mediaType.previews.includes(preview)\n ).concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, \"\"));\n }\n return mergedOptions;\n}\n\n// pkg/dist-src/util/add-query-parameters.js\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return url + separator + names.map((name) => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\n// pkg/dist-src/util/extract-url-variable-names.js\nvar urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\n// pkg/dist-src/util/omit.js\nfunction omit(object, keysToOmit) {\n const result = { __proto__: null };\n for (const key of Object.keys(object)) {\n if (keysToOmit.indexOf(key) === -1) {\n result[key] = object[key];\n }\n }\n return result;\n}\n\n// pkg/dist-src/util/url-template.js\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n }).join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== void 0 && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(\n encodeValue(operator, value, isKeyOperator(operator) ? key : \"\")\n );\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n result.push(\n encodeValue(operator, value2, isKeyOperator(operator) ? key : \"\")\n );\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n tmp.push(encodeValue(operator, value2));\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n template = template.replace(\n /\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g,\n function(_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function(variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n }\n );\n if (template === \"/\") {\n return template;\n } else {\n return template.replace(/\\/$/, \"\");\n }\n}\n\n// pkg/dist-src/parse.js\nfunction parse(options) {\n let method = options.method.toUpperCase();\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\"\n ]);\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n headers.accept = headers.accept.split(/,/).map(\n (format) => format.replace(\n /application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/,\n `application/vnd$1$2.${options.mediaType.format}`\n )\n ).join(\",\");\n }\n if (url.endsWith(\"/graphql\")) {\n if (options.mediaType.previews?.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n }\n }\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n }\n }\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n return Object.assign(\n { method, url, headers },\n typeof body !== \"undefined\" ? { body } : null,\n options.request ? { request: options.request } : null\n );\n}\n\n// pkg/dist-src/endpoint-with-defaults.js\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS2 = merge(oldDefaults, newDefaults);\n const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2);\n return Object.assign(endpoint2, {\n DEFAULTS: DEFAULTS2,\n defaults: withDefaults.bind(null, DEFAULTS2),\n merge: merge.bind(null, DEFAULTS2),\n parse\n });\n}\n\n// pkg/dist-src/index.js\nvar endpoint = withDefaults(null, DEFAULTS);\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n endpoint\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n GraphqlResponseError: () => GraphqlResponseError,\n graphql: () => graphql2,\n withCustomRequest: () => withCustomRequest\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_request3 = require(\"@octokit/request\");\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"7.0.2\";\n\n// pkg/dist-src/with-defaults.js\nvar import_request2 = require(\"@octokit/request\");\n\n// pkg/dist-src/graphql.js\nvar import_request = require(\"@octokit/request\");\n\n// pkg/dist-src/error.js\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\n` + data.errors.map((e) => ` - ${e.message}`).join(\"\\n\");\n}\nvar GraphqlResponseError = class extends Error {\n constructor(request2, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request2;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\";\n this.errors = response.errors;\n this.data = response.data;\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n};\n\n// pkg/dist-src/graphql.js\nvar NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\"\n];\nvar FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nvar GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request2, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(\n new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`)\n );\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(\n new Error(\n `[@octokit/graphql] \"${key}\" cannot be used as variable name`\n )\n );\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(\n parsedOptions\n ).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request2(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlResponseError(\n requestOptions,\n headers,\n response.data\n );\n }\n return response.data.data;\n });\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(request2, newDefaults) {\n const newRequest = request2.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: newRequest.endpoint\n });\n}\n\n// pkg/dist-src/index.js\nvar graphql2 = withDefaults(import_request3.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n GraphqlResponseError,\n graphql,\n withCustomRequest\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n composePaginateRest: () => composePaginateRest,\n isPaginatingEndpoint: () => isPaginatingEndpoint,\n paginateRest: () => paginateRest,\n paginatingEndpoints: () => paginatingEndpoints\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/version.js\nvar VERSION = \"9.1.5\";\n\n// pkg/dist-src/normalize-paginated-list-response.js\nfunction normalizePaginatedListResponse(response) {\n if (!response.data) {\n return {\n ...response,\n data: []\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n\n// pkg/dist-src/iterator.js\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n try {\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n url = ((normalizedResponse.headers.link || \"\").match(\n /<([^>]+)>;\\s*rel=\"next\"/\n ) || [])[1];\n return { value: normalizedResponse };\n } catch (error) {\n if (error.status !== 409)\n throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n })\n };\n}\n\n// pkg/dist-src/paginate.js\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = void 0;\n }\n return gather(\n octokit,\n [],\n iterator(octokit, route, parameters)[Symbol.asyncIterator](),\n mapFn\n );\n}\nfunction gather(octokit, results, iterator2, mapFn) {\n return iterator2.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(\n mapFn ? mapFn(result.value, done) : result.value.data\n );\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator2, mapFn);\n });\n}\n\n// pkg/dist-src/compose-paginate.js\nvar composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\n// pkg/dist-src/generated/paginating-endpoints.js\nvar paginatingEndpoints = [\n \"GET /advisories\",\n \"GET /app/hook/deliveries\",\n \"GET /app/installation-requests\",\n \"GET /app/installations\",\n \"GET /assignments/{assignment_id}/accepted_assignments\",\n \"GET /classrooms\",\n \"GET /classrooms/{classroom_id}/assignments\",\n \"GET /enterprises/{enterprise}/dependabot/alerts\",\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /licenses\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /orgs/{org}/actions/cache/usage-by-repository\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/actions/variables\",\n \"GET /orgs/{org}/actions/variables/{name}/repositories\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/code-scanning/alerts\",\n \"GET /orgs/{org}/codespaces\",\n \"GET /orgs/{org}/codespaces/secrets\",\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/copilot/billing/seats\",\n \"GET /orgs/{org}/dependabot/alerts\",\n \"GET /orgs/{org}/dependabot/secrets\",\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/members/{username}/codespaces\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/packages\",\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n \"GET /orgs/{org}/personal-access-token-requests\",\n \"GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories\",\n \"GET /orgs/{org}/personal-access-tokens\",\n \"GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/properties/values\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/rulesets\",\n \"GET /orgs/{org}/rulesets/rule-suites\",\n \"GET /orgs/{org}/secret-scanning/alerts\",\n \"GET /orgs/{org}/security-advisories\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/caches\",\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\",\n \"GET /repos/{owner}/{repo}/actions/organization-variables\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/variables\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/activity\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/codespaces\",\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\",\n \"GET /repos/{owner}/{repo}/codespaces/secrets\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/status\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/dependabot/alerts\",\n \"GET /repos/{owner}/{repo}/dependabot/secrets\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/environments\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n \"GET /repos/{owner}/{repo}/rules/branches/{branch}\",\n \"GET /repos/{owner}/{repo}/rulesets\",\n \"GET /repos/{owner}/{repo}/rulesets/rule-suites\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\",\n \"GET /repos/{owner}/{repo}/security-advisories\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repos/{owner}/{repo}/topics\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/codespaces\",\n \"GET /user/codespaces/secrets\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/packages\",\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/social_accounts\",\n \"GET /user/ssh_signing_keys\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/packages\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/social_accounts\",\n \"GET /users/{username}/ssh_signing_keys\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\"\n];\n\n// pkg/dist-src/paginating-endpoints.js\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n// pkg/dist-src/index.js\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n composePaginateRest,\n isPaginatingEndpoint,\n paginateRest,\n paginatingEndpoints\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n legacyRestEndpointMethods: () => legacyRestEndpointMethods,\n restEndpointMethods: () => restEndpointMethods\n});\nmodule.exports = __toCommonJS(dist_src_exports);\n\n// pkg/dist-src/version.js\nvar VERSION = \"10.3.0\";\n\n// pkg/dist-src/generated/endpoints.js\nvar Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\n \"POST /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n addCustomLabelsToSelfHostedRunnerForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n approveWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"\n ],\n createEnvironmentVariable: [\n \"POST /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n createOrgVariable: [\"POST /orgs/{org}/actions/variables\"],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\"\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\"\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\"\n ],\n createRepoVariable: [\"POST /repos/{owner}/{repo}/actions/variables\"],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"\n ],\n deleteActionsCacheById: [\n \"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"\n ],\n deleteActionsCacheByKey: [\n \"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n deleteEnvironmentVariable: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteOrgVariable: [\"DELETE /orgs/{org}/actions/variables/{name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n deleteRepoVariable: [\n \"DELETE /repos/{owner}/{repo}/actions/variables/{name}\"\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\"\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"\n ],\n downloadWorkflowRunAttemptLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"\n ],\n forceCancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel\"\n ],\n generateRunnerJitconfigForOrg: [\n \"POST /orgs/{org}/actions/runners/generate-jitconfig\"\n ],\n generateRunnerJitconfigForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig\"\n ],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\n \"GET /orgs/{org}/actions/cache/usage-by-repository\"\n ],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\"\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getCustomOidcSubClaimForRepo: [\n \"GET /repos/{owner}/{repo}/actions/oidc/customization/sub\"\n ],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n getEnvironmentVariable: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/workflow\"\n ],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\"\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\"\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getOrgVariable: [\"GET /orgs/{org}/actions/variables/{name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] }\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getRepoVariable: [\"GET /repos/{owner}/{repo}/actions/variables/{name}\"],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"\n ],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"\n ],\n listEnvironmentVariables: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"\n ],\n listJobsForWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"\n ],\n listLabelsForSelfHostedRunnerForOrg: [\n \"GET /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n listLabelsForSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listOrgVariables: [\"GET /orgs/{org}/actions/variables\"],\n listRepoOrganizationSecrets: [\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\"\n ],\n listRepoOrganizationVariables: [\n \"GET /repos/{owner}/{repo}/actions/organization-variables\"\n ],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoVariables: [\"GET /repos/{owner}/{repo}/actions/variables\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\"\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n listSelectedReposForOrgVariable: [\n \"GET /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\"\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"\n ],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgVariable: [\n \"DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n reviewCustomGatesForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule\"\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\"\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n setCustomLabelsForSelfHostedRunnerForOrg: [\n \"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n setCustomLabelsForSelfHostedRunnerForRepo: [\n \"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n setCustomOidcSubClaimForRepo: [\n \"PUT /repos/{owner}/{repo}/actions/oidc/customization/sub\"\n ],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/workflow\"\n ],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\"\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\"\n ],\n setWorkflowAccessToRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n updateEnvironmentVariable: [\n \"PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n updateOrgVariable: [\"PATCH /orgs/{org}/actions/variables/{name}\"],\n updateRepoVariable: [\n \"PATCH /repos/{owner}/{repo}/actions/variables/{name}\"\n ]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\"\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\"\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\"\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\"\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\"\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\"\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"] }\n ],\n addRepoToInstallationForAuthenticatedUser: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\"\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\"\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\"\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\"\n ],\n listInstallationRequestsForAuthenticatedApp: [\n \"GET /app/installation-requests\"\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\"\n ],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\n \"POST /app/hook/deliveries/{delivery_id}/attempts\"\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"] }\n ],\n removeRepoFromInstallationForAuthenticatedUser: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\"\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\"\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\"\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\"\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\"\n ]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\n \"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"\n ],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } }\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"\n ],\n getCodeqlDatabase: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}\"\n ],\n getDefaultSetup: [\"GET /repos/{owner}/{repo}/code-scanning/default-setup\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n {},\n { renamed: [\"codeScanning\", \"listAlertInstances\"] }\n ],\n listCodeqlDatabases: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases\"\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"\n ],\n updateDefaultSetup: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/default-setup\"\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n checkPermissionsForDevcontainer: [\n \"GET /repos/{owner}/{repo}/codespaces/permissions_check\"\n ],\n codespaceMachinesForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/machines\"\n ],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}\"\n ],\n createWithPrForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"\n ],\n createWithRepoForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/codespaces\"\n ],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\n \"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/codespaces/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n deleteSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}\"\n ],\n exportForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/exports\"\n ],\n getCodespacesForUserInOrg: [\n \"GET /orgs/{org}/members/{username}/codespaces\"\n ],\n getExportDetailsForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/exports/{export_id}\"\n ],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/codespaces/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/codespaces/secrets/{secret_name}\"],\n getPublicKeyForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/public-key\"\n ],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n getSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}\"\n ],\n listDevcontainersInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\"\n ],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\n \"GET /orgs/{org}/codespaces\",\n {},\n { renamedParameters: { org_id: \"org\" } }\n ],\n listInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/codespaces/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n preFlightWithRepoForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/new\"\n ],\n publishForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/publish\"\n ],\n removeRepositoryForSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n repoMachinesForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/machines\"\n ],\n setRepositoriesForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\n \"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"\n ],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n copilot: {\n addCopilotForBusinessSeatsForTeams: [\n \"POST /orgs/{org}/copilot/billing/selected_teams\"\n ],\n addCopilotForBusinessSeatsForUsers: [\n \"POST /orgs/{org}/copilot/billing/selected_users\"\n ],\n cancelCopilotSeatAssignmentForTeams: [\n \"DELETE /orgs/{org}/copilot/billing/selected_teams\"\n ],\n cancelCopilotSeatAssignmentForUsers: [\n \"DELETE /orgs/{org}/copilot/billing/selected_users\"\n ],\n getCopilotOrganizationDetails: [\"GET /orgs/{org}/copilot/billing\"],\n getCopilotSeatDetailsForUser: [\n \"GET /orgs/{org}/members/{username}/copilot\"\n ],\n listCopilotSeats: [\"GET /orgs/{org}/copilot/billing/seats\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n getAlert: [\"GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/dependabot/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/dependabot/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/dependabot/alerts\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"\n ]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\n \"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"\n ],\n diffRange: [\n \"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"\n ],\n exportSbom: [\"GET /repos/{owner}/{repo}/dependency-graph/sbom\"]\n },\n emojis: { get: [\"GET /emojis\"] },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] }\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\"\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] }\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] }\n ]\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n checkUserCanBeAssignedToIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}\"\n ],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } }\n ]\n },\n meta: {\n get: [\"GET /meta\"],\n getAllVersions: [\"GET /versions\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\n \"DELETE /repos/{owner}/{repo}/import\",\n {},\n {\n deprecated: \"octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import\"\n }\n ],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\"\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\"\n ],\n getCommitAuthors: [\n \"GET /repos/{owner}/{repo}/import/authors\",\n {},\n {\n deprecated: \"octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors\"\n }\n ],\n getImportStatus: [\n \"GET /repos/{owner}/{repo}/import\",\n {},\n {\n deprecated: \"octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status\"\n }\n ],\n getLargeFiles: [\n \"GET /repos/{owner}/{repo}/import/large_files\",\n {},\n {\n deprecated: \"octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files\"\n }\n ],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/repositories\"\n ],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n {},\n { renamed: [\"migrations\", \"listReposForAuthenticatedUser\"] }\n ],\n mapCommitAuthor: [\n \"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\",\n {},\n {\n deprecated: \"octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author\"\n }\n ],\n setLfsPreference: [\n \"PATCH /repos/{owner}/{repo}/import/lfs\",\n {},\n {\n deprecated: \"octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference\"\n }\n ],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\n \"PUT /repos/{owner}/{repo}/import\",\n {},\n {\n deprecated: \"octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import\"\n }\n ],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n updateImport: [\n \"PATCH /repos/{owner}/{repo}/import\",\n {},\n {\n deprecated: \"octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import\"\n }\n ]\n },\n oidc: {\n getOidcCustomSubTemplateForOrg: [\n \"GET /orgs/{org}/actions/oidc/customization/sub\"\n ],\n updateOidcCustomSubTemplateForOrg: [\n \"PUT /orgs/{org}/actions/oidc/customization/sub\"\n ]\n },\n orgs: {\n addSecurityManagerTeam: [\n \"PUT /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\"\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createOrUpdateCustomProperties: [\"PATCH /orgs/{org}/properties/schema\"],\n createOrUpdateCustomPropertiesValuesForRepos: [\n \"PATCH /orgs/{org}/properties/values\"\n ],\n createOrUpdateCustomProperty: [\n \"PUT /orgs/{org}/properties/schema/{custom_property_name}\"\n ],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n delete: [\"DELETE /orgs/{org}\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n enableOrDisableSecurityProductOnAllOrgRepos: [\n \"POST /orgs/{org}/{security_product}/{enablement}\"\n ],\n get: [\"GET /orgs/{org}\"],\n getAllCustomProperties: [\"GET /orgs/{org}/properties/schema\"],\n getCustomProperty: [\n \"GET /orgs/{org}/properties/schema/{custom_property_name}\"\n ],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listCustomPropertiesValuesForRepos: [\"GET /orgs/{org}/properties/values\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPatGrantRepositories: [\n \"GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories\"\n ],\n listPatGrantRequestRepositories: [\n \"GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories\"\n ],\n listPatGrantRequests: [\"GET /orgs/{org}/personal-access-token-requests\"],\n listPatGrants: [\"GET /orgs/{org}/personal-access-tokens\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listSecurityManagerTeams: [\"GET /orgs/{org}/security-managers\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeCustomProperty: [\n \"DELETE /orgs/{org}/properties/schema/{custom_property_name}\"\n ],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\"\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\"\n ],\n removeSecurityManagerTeam: [\n \"DELETE /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n reviewPatGrantRequest: [\n \"POST /orgs/{org}/personal-access-token-requests/{pat_request_id}\"\n ],\n reviewPatGrantRequestsInBulk: [\n \"POST /orgs/{org}/personal-access-token-requests\"\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\"\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\"\n ],\n updatePatAccess: [\"POST /orgs/{org}/personal-access-tokens/{pat_id}\"],\n updatePatAccesses: [\"POST /orgs/{org}/personal-access-tokens\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\"\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n deletePackageForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}\"\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] }\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"\n ]\n }\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\"\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\"\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\"\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n listDockerMigrationConflictingPackagesForAuthenticatedUser: [\n \"GET /user/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForOrganization: [\n \"GET /orgs/{org}/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForUser: [\n \"GET /users/{username}/docker/conflicts\"\n ],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\"\n ],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\"\n ],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ]\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n createForRelease: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForRelease: [\n \"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n listForRelease: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ]\n },\n repos: {\n acceptInvitation: [\n \"PATCH /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"] }\n ],\n acceptInvitationForAuthenticatedUser: [\n \"PATCH /user/repository_invitations/{invitation_id}\"\n ],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n checkAutomatedSecurityFixes: [\n \"GET /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\n \"GET /repos/{owner}/{repo}/compare/{basehead}\"\n ],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentBranchPolicy: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n createDeploymentProtectionRule: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createOrgRuleset: [\"POST /orgs/{org}/rulesets\"],\n createPagesDeployment: [\"POST /repos/{owner}/{repo}/pages/deployment\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createRepoRuleset: [\"POST /repos/{owner}/{repo}/rulesets\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\"\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\n \"DELETE /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"] }\n ],\n declineInvitationForAuthenticatedUser: [\n \"DELETE /user/repository_invitations/{invitation_id}\"\n ],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"\n ],\n deleteDeploymentBranchPolicy: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n deleteOrgRuleset: [\"DELETE /orgs/{org}/rulesets/{ruleset_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n deleteRepoRuleset: [\"DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n deleteTagProtection: [\n \"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n disableDeploymentProtectionRule: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n disablePrivateVulnerabilityReporting: [\n \"DELETE /repos/{owner}/{repo}/private-vulnerability-reporting\"\n ],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] }\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n enablePrivateVulnerabilityReporting: [\n \"PUT /repos/{owner}/{repo}/private-vulnerability-reporting\"\n ],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n generateReleaseNotes: [\n \"POST /repos/{owner}/{repo}/releases/generate-notes\"\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n getAllDeploymentProtectionRules: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"\n ],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"\n ],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n getBranchRules: [\"GET /repos/{owner}/{repo}/rules/branches/{branch}\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getCustomDeploymentProtectionRule: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n getCustomPropertiesValues: [\"GET /repos/{owner}/{repo}/properties/values\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentBranchPolicy: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getOrgRuleSuite: [\"GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}\"],\n getOrgRuleSuites: [\"GET /orgs/{org}/rulesets/rule-suites\"],\n getOrgRuleset: [\"GET /orgs/{org}/rulesets/{ruleset_id}\"],\n getOrgRulesets: [\"GET /orgs/{org}/rulesets\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getRepoRuleSuite: [\n \"GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}\"\n ],\n getRepoRuleSuites: [\"GET /repos/{owner}/{repo}/rulesets/rule-suites\"],\n getRepoRuleset: [\"GET /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n getRepoRulesets: [\"GET /repos/{owner}/{repo}/rulesets\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n getWebhookDelivery: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n listActivities: [\"GET /repos/{owner}/{repo}/activity\"],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listCustomDeploymentRuleIntegrations: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\"\n ],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentBranchPolicies: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"\n ],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\"\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateDeploymentBranchPolicy: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n updateOrgRuleset: [\"PUT /orgs/{org}/rulesets/{ruleset_id}\"],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n updateRepoRuleset: [\"PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] }\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" }\n ]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ]\n },\n securityAdvisories: {\n createPrivateVulnerabilityReport: [\n \"POST /repos/{owner}/{repo}/security-advisories/reports\"\n ],\n createRepositoryAdvisory: [\n \"POST /repos/{owner}/{repo}/security-advisories\"\n ],\n createRepositoryAdvisoryCveRequest: [\n \"POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve\"\n ],\n getGlobalAdvisory: [\"GET /advisories/{ghsa_id}\"],\n getRepositoryAdvisory: [\n \"GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ],\n listGlobalAdvisories: [\"GET /advisories\"],\n listOrgRepositoryAdvisories: [\"GET /orgs/{org}/security-advisories\"],\n listRepositoryAdvisories: [\"GET /repos/{owner}/{repo}/security-advisories\"],\n updateRepositoryAdvisory: [\n \"PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\"\n ],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\n \"POST /user/emails\",\n {},\n { renamed: [\"users\", \"addEmailForAuthenticatedUser\"] }\n ],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n addSocialAccountForAuthenticatedUser: [\"POST /user/social_accounts\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\n \"POST /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"] }\n ],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\n \"POST /user/keys\",\n {},\n { renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"] }\n ],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n createSshSigningKeyForAuthenticatedUser: [\"POST /user/ssh_signing_keys\"],\n deleteEmailForAuthenticated: [\n \"DELETE /user/emails\",\n {},\n { renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"] }\n ],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\n \"DELETE /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"] }\n ],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\n \"DELETE /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"] }\n ],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n deleteSocialAccountForAuthenticatedUser: [\"DELETE /user/social_accounts\"],\n deleteSshSigningKeyForAuthenticatedUser: [\n \"DELETE /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\n \"GET /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"] }\n ],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\n \"GET /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"] }\n ],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n getSshSigningKeyForAuthenticatedUser: [\n \"GET /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\n \"GET /user/blocks\",\n {},\n { renamed: [\"users\", \"listBlockedByAuthenticatedUser\"] }\n ],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\n \"GET /user/emails\",\n {},\n { renamed: [\"users\", \"listEmailsForAuthenticatedUser\"] }\n ],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\n \"GET /user/following\",\n {},\n { renamed: [\"users\", \"listFollowedByAuthenticatedUser\"] }\n ],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\n \"GET /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"] }\n ],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\n \"GET /user/public_emails\",\n {},\n { renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"] }\n ],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\n \"GET /user/keys\",\n {},\n { renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"] }\n ],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n listSocialAccountsForAuthenticatedUser: [\"GET /user/social_accounts\"],\n listSocialAccountsForUser: [\"GET /users/{username}/social_accounts\"],\n listSshSigningKeysForAuthenticatedUser: [\"GET /user/ssh_signing_keys\"],\n listSshSigningKeysForUser: [\"GET /users/{username}/ssh_signing_keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\n \"PATCH /user/email/visibility\",\n {},\n { renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"] }\n ],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\n \"PATCH /user/email/visibility\"\n ],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\nvar endpoints_default = Endpoints;\n\n// pkg/dist-src/endpoints-to-methods.js\nvar endpointMethodsMap = /* @__PURE__ */ new Map();\nfor (const [scope, endpoints] of Object.entries(endpoints_default)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign(\n {\n method,\n url\n },\n defaults\n );\n if (!endpointMethodsMap.has(scope)) {\n endpointMethodsMap.set(scope, /* @__PURE__ */ new Map());\n }\n endpointMethodsMap.get(scope).set(methodName, {\n scope,\n methodName,\n endpointDefaults,\n decorations\n });\n }\n}\nvar handler = {\n has({ scope }, methodName) {\n return endpointMethodsMap.get(scope).has(methodName);\n },\n getOwnPropertyDescriptor(target, methodName) {\n return {\n value: this.get(target, methodName),\n // ensures method is in the cache\n configurable: true,\n writable: true,\n enumerable: true\n };\n },\n defineProperty(target, methodName, descriptor) {\n Object.defineProperty(target.cache, methodName, descriptor);\n return true;\n },\n deleteProperty(target, methodName) {\n delete target.cache[methodName];\n return true;\n },\n ownKeys({ scope }) {\n return [...endpointMethodsMap.get(scope).keys()];\n },\n set(target, methodName, value) {\n return target.cache[methodName] = value;\n },\n get({ octokit, scope, cache }, methodName) {\n if (cache[methodName]) {\n return cache[methodName];\n }\n const method = endpointMethodsMap.get(scope).get(methodName);\n if (!method) {\n return void 0;\n }\n const { endpointDefaults, decorations } = method;\n if (decorations) {\n cache[methodName] = decorate(\n octokit,\n scope,\n methodName,\n endpointDefaults,\n decorations\n );\n } else {\n cache[methodName] = octokit.request.defaults(endpointDefaults);\n }\n return cache[methodName];\n }\n};\nfunction endpointsToMethods(octokit) {\n const newMethods = {};\n for (const scope of endpointMethodsMap.keys()) {\n newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler);\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n function withDecorations(...args) {\n let options = requestWithDefaults.endpoint.merge(...args);\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: void 0\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(\n `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`\n );\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n const options2 = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(\n decorations.renamedParameters\n )) {\n if (name in options2) {\n octokit.log.warn(\n `\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`\n );\n if (!(alias in options2)) {\n options2[alias] = options2[name];\n }\n delete options2[name];\n }\n }\n return requestWithDefaults(options2);\n }\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\n// pkg/dist-src/index.js\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n ...api,\n rest: api\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n legacyRestEndpointMethods,\n restEndpointMethods\n});\n","\"use strict\";\nvar __create = Object.create;\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __getProtoOf = Object.getPrototypeOf;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(\n // If the importer is in node compatibility mode or this is not an ESM\n // file that has been converted to a CommonJS file using a Babel-\n // compatible transform (i.e. \"__esModule\" has not been set), then set\n // \"default\" to the CommonJS \"module.exports\" for node compatibility.\n isNodeMode || !mod || !mod.__esModule ? __defProp(target, \"default\", { value: mod, enumerable: true }) : target,\n mod\n));\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n RequestError: () => RequestError\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_deprecation = require(\"deprecation\");\nvar import_once = __toESM(require(\"once\"));\nvar logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));\nvar logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));\nvar RequestError = class extends Error {\n constructor(message, statusCode, options) {\n super(message);\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(\n / .*$/,\n \" [REDACTED]\"\n )\n });\n }\n requestCopy.url = requestCopy.url.replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\").replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(\n new import_deprecation.Deprecation(\n \"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"\n )\n );\n return statusCode;\n }\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(\n new import_deprecation.Deprecation(\n \"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"\n )\n );\n return headers || {};\n }\n });\n }\n};\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n RequestError\n});\n","\"use strict\";\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toCommonJS = (mod) => __copyProps(__defProp({}, \"__esModule\", { value: true }), mod);\n\n// pkg/dist-src/index.js\nvar dist_src_exports = {};\n__export(dist_src_exports, {\n request: () => request\n});\nmodule.exports = __toCommonJS(dist_src_exports);\nvar import_endpoint = require(\"@octokit/endpoint\");\nvar import_universal_user_agent = require(\"universal-user-agent\");\n\n// pkg/dist-src/version.js\nvar VERSION = \"8.2.0\";\n\n// pkg/dist-src/is-plain-object.js\nfunction isPlainObject(value) {\n if (typeof value !== \"object\" || value === null)\n return false;\n if (Object.prototype.toString.call(value) !== \"[object Object]\")\n return false;\n const proto = Object.getPrototypeOf(value);\n if (proto === null)\n return true;\n const Ctor = Object.prototype.hasOwnProperty.call(proto, \"constructor\") && proto.constructor;\n return typeof Ctor === \"function\" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);\n}\n\n// pkg/dist-src/fetch-wrapper.js\nvar import_request_error = require(\"@octokit/request-error\");\n\n// pkg/dist-src/get-buffer-response.js\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\n// pkg/dist-src/fetch-wrapper.js\nfunction fetchWrapper(requestOptions) {\n var _a, _b, _c;\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false;\n if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n let { fetch } = globalThis;\n if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) {\n fetch = requestOptions.request.fetch;\n }\n if (!fetch) {\n throw new Error(\n \"fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing\"\n );\n }\n return fetch(requestOptions.url, {\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n signal: (_c = requestOptions.request) == null ? void 0 : _c.signal,\n // duplex must be set if request.body is ReadableStream or Async Iterables.\n // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.\n ...requestOptions.body && { duplex: \"half\" }\n }).then(async (response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(\n `[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`\n );\n }\n if (status === 204 || status === 205) {\n return;\n }\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new import_request_error.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: void 0\n },\n request: requestOptions\n });\n }\n if (status === 304) {\n throw new import_request_error.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new import_request_error.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n return parseSuccessResponseBody ? await getResponseData(response) : response.body;\n }).then((data) => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch((error) => {\n if (error instanceof import_request_error.RequestError)\n throw error;\n else if (error.name === \"AbortError\")\n throw error;\n let message = error.message;\n if (error.name === \"TypeError\" && \"cause\" in error) {\n if (error.cause instanceof Error) {\n message = error.cause.message;\n } else if (typeof error.cause === \"string\") {\n message = error.cause;\n }\n }\n throw new import_request_error.RequestError(message, 500, {\n request: requestOptions\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json().catch(() => response.text()).catch(() => \"\");\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBufferResponse(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\")\n return data;\n let suffix;\n if (\"documentation_url\" in data) {\n suffix = ` - ${data.documentation_url}`;\n } else {\n suffix = \"\";\n }\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}${suffix}`;\n }\n return `${data.message}${suffix}`;\n }\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\n// pkg/dist-src/with-defaults.js\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint2 = oldEndpoint.defaults(newDefaults);\n const newApi = function(route, parameters) {\n const endpointOptions = endpoint2.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint2.parse(endpointOptions));\n }\n const request2 = (route2, parameters2) => {\n return fetchWrapper(\n endpoint2.parse(endpoint2.merge(route2, parameters2))\n );\n };\n Object.assign(request2, {\n endpoint: endpoint2,\n defaults: withDefaults.bind(null, endpoint2)\n });\n return endpointOptions.request.hook(request2, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint: endpoint2,\n defaults: withDefaults.bind(null, endpoint2)\n });\n}\n\n// pkg/dist-src/index.js\nvar request = withDefaults(import_endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`\n }\n});\n// Annotate the CommonJS export names for ESM import in node:\n0 && (module.exports = {\n request\n});\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction once(emitter, name, { signal } = {}) {\n return new Promise((resolve, reject) => {\n function cleanup() {\n signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup);\n emitter.removeListener(name, onEvent);\n emitter.removeListener('error', onError);\n }\n function onEvent(...args) {\n cleanup();\n resolve(args);\n }\n function onError(err) {\n cleanup();\n reject(err);\n }\n signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup);\n emitter.on(name, onEvent);\n emitter.on('error', onError);\n });\n}\nexports.default = once;\n//# sourceMappingURL=index.js.map","/**\n * @author Toru Nagashima \n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar eventTargetShim = require('event-target-shim');\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nclass AbortSignal extends eventTargetShim.EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n constructor() {\n super();\n throw new TypeError(\"AbortSignal cannot be constructed directly\");\n }\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n get aborted() {\n const aborted = abortedFlags.get(this);\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? \"null\" : typeof this}`);\n }\n return aborted;\n }\n}\neventTargetShim.defineEventAttribute(AbortSignal.prototype, \"abort\");\n/**\n * Create an AbortSignal object.\n */\nfunction createAbortSignal() {\n const signal = Object.create(AbortSignal.prototype);\n eventTargetShim.EventTarget.call(signal);\n abortedFlags.set(signal, false);\n return signal;\n}\n/**\n * Abort a given signal.\n */\nfunction abortSignal(signal) {\n if (abortedFlags.get(signal) !== false) {\n return;\n }\n abortedFlags.set(signal, true);\n signal.dispatchEvent({ type: \"abort\" });\n}\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap();\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n});\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n });\n}\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nclass AbortController {\n /**\n * Initialize this controller.\n */\n constructor() {\n signals.set(this, createAbortSignal());\n }\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n get signal() {\n return getSignal(this);\n }\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n abort() {\n abortSignal(getSignal(this));\n }\n}\n/**\n * Associated signals.\n */\nconst signals = new WeakMap();\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller) {\n const signal = signals.get(controller);\n if (signal == null) {\n throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? \"null\" : typeof controller}`);\n }\n return signal;\n}\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n});\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n });\n}\n\nexports.AbortController = AbortController;\nexports.AbortSignal = AbortSignal;\nexports.default = AbortController;\n\nmodule.exports = AbortController\nmodule.exports.AbortController = module.exports[\"default\"] = AbortController\nmodule.exports.AbortSignal = AbortSignal\n//# sourceMappingURL=abort-controller.js.map\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst events_1 = require(\"events\");\nconst debug_1 = __importDefault(require(\"debug\"));\nconst promisify_1 = __importDefault(require(\"./promisify\"));\nconst debug = debug_1.default('agent-base');\nfunction isAgent(v) {\n return Boolean(v) && typeof v.addRequest === 'function';\n}\nfunction isSecureEndpoint() {\n const { stack } = new Error();\n if (typeof stack !== 'string')\n return false;\n return stack.split('\\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1);\n}\nfunction createAgent(callback, opts) {\n return new createAgent.Agent(callback, opts);\n}\n(function (createAgent) {\n /**\n * Base `http.Agent` implementation.\n * No pooling/keep-alive is implemented by default.\n *\n * @param {Function} callback\n * @api public\n */\n class Agent extends events_1.EventEmitter {\n constructor(callback, _opts) {\n super();\n let opts = _opts;\n if (typeof callback === 'function') {\n this.callback = callback;\n }\n else if (callback) {\n opts = callback;\n }\n // Timeout for the socket to be returned from the callback\n this.timeout = null;\n if (opts && typeof opts.timeout === 'number') {\n this.timeout = opts.timeout;\n }\n // These aren't actually used by `agent-base`, but are required\n // for the TypeScript definition files in `@types/node` :/\n this.maxFreeSockets = 1;\n this.maxSockets = 1;\n this.maxTotalSockets = Infinity;\n this.sockets = {};\n this.freeSockets = {};\n this.requests = {};\n this.options = {};\n }\n get defaultPort() {\n if (typeof this.explicitDefaultPort === 'number') {\n return this.explicitDefaultPort;\n }\n return isSecureEndpoint() ? 443 : 80;\n }\n set defaultPort(v) {\n this.explicitDefaultPort = v;\n }\n get protocol() {\n if (typeof this.explicitProtocol === 'string') {\n return this.explicitProtocol;\n }\n return isSecureEndpoint() ? 'https:' : 'http:';\n }\n set protocol(v) {\n this.explicitProtocol = v;\n }\n callback(req, opts, fn) {\n throw new Error('\"agent-base\" has no default implementation, you must subclass and override `callback()`');\n }\n /**\n * Called by node-core's \"_http_client.js\" module when creating\n * a new HTTP request with this Agent instance.\n *\n * @api public\n */\n addRequest(req, _opts) {\n const opts = Object.assign({}, _opts);\n if (typeof opts.secureEndpoint !== 'boolean') {\n opts.secureEndpoint = isSecureEndpoint();\n }\n if (opts.host == null) {\n opts.host = 'localhost';\n }\n if (opts.port == null) {\n opts.port = opts.secureEndpoint ? 443 : 80;\n }\n if (opts.protocol == null) {\n opts.protocol = opts.secureEndpoint ? 'https:' : 'http:';\n }\n if (opts.host && opts.path) {\n // If both a `host` and `path` are specified then it's most\n // likely the result of a `url.parse()` call... we need to\n // remove the `path` portion so that `net.connect()` doesn't\n // attempt to open that as a unix socket file.\n delete opts.path;\n }\n delete opts.agent;\n delete opts.hostname;\n delete opts._defaultAgent;\n delete opts.defaultPort;\n delete opts.createConnection;\n // Hint to use \"Connection: close\"\n // XXX: non-documented `http` module API :(\n req._last = true;\n req.shouldKeepAlive = false;\n let timedOut = false;\n let timeoutId = null;\n const timeoutMs = opts.timeout || this.timeout;\n const onerror = (err) => {\n if (req._hadError)\n return;\n req.emit('error', err);\n // For Safety. Some additional errors might fire later on\n // and we need to make sure we don't double-fire the error event.\n req._hadError = true;\n };\n const ontimeout = () => {\n timeoutId = null;\n timedOut = true;\n const err = new Error(`A \"socket\" was not created for HTTP request before ${timeoutMs}ms`);\n err.code = 'ETIMEOUT';\n onerror(err);\n };\n const callbackError = (err) => {\n if (timedOut)\n return;\n if (timeoutId !== null) {\n clearTimeout(timeoutId);\n timeoutId = null;\n }\n onerror(err);\n };\n const onsocket = (socket) => {\n if (timedOut)\n return;\n if (timeoutId != null) {\n clearTimeout(timeoutId);\n timeoutId = null;\n }\n if (isAgent(socket)) {\n // `socket` is actually an `http.Agent` instance, so\n // relinquish responsibility for this `req` to the Agent\n // from here on\n debug('Callback returned another Agent instance %o', socket.constructor.name);\n socket.addRequest(req, opts);\n return;\n }\n if (socket) {\n socket.once('free', () => {\n this.freeSocket(socket, opts);\n });\n req.onSocket(socket);\n return;\n }\n const err = new Error(`no Duplex stream was returned to agent-base for \\`${req.method} ${req.path}\\``);\n onerror(err);\n };\n if (typeof this.callback !== 'function') {\n onerror(new Error('`callback` is not defined'));\n return;\n }\n if (!this.promisifiedCallback) {\n if (this.callback.length >= 3) {\n debug('Converting legacy callback function to promise');\n this.promisifiedCallback = promisify_1.default(this.callback);\n }\n else {\n this.promisifiedCallback = this.callback;\n }\n }\n if (typeof timeoutMs === 'number' && timeoutMs > 0) {\n timeoutId = setTimeout(ontimeout, timeoutMs);\n }\n if ('port' in opts && typeof opts.port !== 'number') {\n opts.port = Number(opts.port);\n }\n try {\n debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`);\n Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError);\n }\n catch (err) {\n Promise.reject(err).catch(callbackError);\n }\n }\n freeSocket(socket, opts) {\n debug('Freeing socket %o %o', socket.constructor.name, opts);\n socket.destroy();\n }\n destroy() {\n debug('Destroying agent %o', this.constructor.name);\n }\n }\n createAgent.Agent = Agent;\n // So that `instanceof` works correctly\n createAgent.prototype = createAgent.Agent.prototype;\n})(createAgent || (createAgent = {}));\nmodule.exports = createAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction promisify(fn) {\n return function (req, opts) {\n return new Promise((resolve, reject) => {\n fn.call(this, req, opts, (err, rtn) => {\n if (err) {\n reject(err);\n }\n else {\n resolve(rtn);\n }\n });\n });\n };\n}\nexports.default = promisify;\n//# sourceMappingURL=promisify.js.map","'use strict';\n\nconst arrify = value => {\n\tif (value === null || value === undefined) {\n\t\treturn [];\n\t}\n\n\tif (Array.isArray(value)) {\n\t\treturn value;\n\t}\n\n\tif (typeof value === 'string') {\n\t\treturn [value];\n\t}\n\n\tif (typeof value[Symbol.iterator] === 'function') {\n\t\treturn [...value];\n\t}\n\n\treturn [value];\n};\n\nmodule.exports = arrify;\n","// Packages\nvar retrier = require('retry');\n\nfunction retry(fn, opts) {\n function run(resolve, reject) {\n var options = opts || {};\n var op;\n\n // Default `randomize` to true\n if (!('randomize' in options)) {\n options.randomize = true;\n }\n\n op = retrier.operation(options);\n\n // We allow the user to abort retrying\n // this makes sense in the cases where\n // knowledge is obtained that retrying\n // would be futile (e.g.: auth errors)\n\n function bail(err) {\n reject(err || new Error('Aborted'));\n }\n\n function onError(err, num) {\n if (err.bail) {\n bail(err);\n return;\n }\n\n if (!op.retry(err)) {\n reject(op.mainError());\n } else if (options.onRetry) {\n options.onRetry(err, num);\n }\n }\n\n function runAttempt(num) {\n var val;\n\n try {\n val = fn(bail, num);\n } catch (err) {\n onError(err, num);\n return;\n }\n\n Promise.resolve(val)\n .then(resolve)\n .catch(function catchIt(err) {\n onError(err, num);\n });\n }\n\n op.attempt(runAttempt);\n }\n\n return new Promise(run);\n}\n\nmodule.exports = retry;\n","'use strict'\n\nexports.byteLength = byteLength\nexports.toByteArray = toByteArray\nexports.fromByteArray = fromByteArray\n\nvar lookup = []\nvar revLookup = []\nvar Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array\n\nvar code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'\nfor (var i = 0, len = code.length; i < len; ++i) {\n lookup[i] = code[i]\n revLookup[code.charCodeAt(i)] = i\n}\n\n// Support decoding URL-safe base64 strings, as Node.js does.\n// See: https://en.wikipedia.org/wiki/Base64#URL_applications\nrevLookup['-'.charCodeAt(0)] = 62\nrevLookup['_'.charCodeAt(0)] = 63\n\nfunction getLens (b64) {\n var len = b64.length\n\n if (len % 4 > 0) {\n throw new Error('Invalid string. Length must be a multiple of 4')\n }\n\n // Trim off extra bytes after placeholder bytes are found\n // See: https://github.com/beatgammit/base64-js/issues/42\n var validLen = b64.indexOf('=')\n if (validLen === -1) validLen = len\n\n var placeHoldersLen = validLen === len\n ? 0\n : 4 - (validLen % 4)\n\n return [validLen, placeHoldersLen]\n}\n\n// base64 is 4/3 + up to two characters of the original data\nfunction byteLength (b64) {\n var lens = getLens(b64)\n var validLen = lens[0]\n var placeHoldersLen = lens[1]\n return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen\n}\n\nfunction _byteLength (b64, validLen, placeHoldersLen) {\n return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen\n}\n\nfunction toByteArray (b64) {\n var tmp\n var lens = getLens(b64)\n var validLen = lens[0]\n var placeHoldersLen = lens[1]\n\n var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen))\n\n var curByte = 0\n\n // if there are placeholders, only get up to the last complete 4 chars\n var len = placeHoldersLen > 0\n ? validLen - 4\n : validLen\n\n var i\n for (i = 0; i < len; i += 4) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 18) |\n (revLookup[b64.charCodeAt(i + 1)] << 12) |\n (revLookup[b64.charCodeAt(i + 2)] << 6) |\n revLookup[b64.charCodeAt(i + 3)]\n arr[curByte++] = (tmp >> 16) & 0xFF\n arr[curByte++] = (tmp >> 8) & 0xFF\n arr[curByte++] = tmp & 0xFF\n }\n\n if (placeHoldersLen === 2) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 2) |\n (revLookup[b64.charCodeAt(i + 1)] >> 4)\n arr[curByte++] = tmp & 0xFF\n }\n\n if (placeHoldersLen === 1) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 10) |\n (revLookup[b64.charCodeAt(i + 1)] << 4) |\n (revLookup[b64.charCodeAt(i + 2)] >> 2)\n arr[curByte++] = (tmp >> 8) & 0xFF\n arr[curByte++] = tmp & 0xFF\n }\n\n return arr\n}\n\nfunction tripletToBase64 (num) {\n return lookup[num >> 18 & 0x3F] +\n lookup[num >> 12 & 0x3F] +\n lookup[num >> 6 & 0x3F] +\n lookup[num & 0x3F]\n}\n\nfunction encodeChunk (uint8, start, end) {\n var tmp\n var output = []\n for (var i = start; i < end; i += 3) {\n tmp =\n ((uint8[i] << 16) & 0xFF0000) +\n ((uint8[i + 1] << 8) & 0xFF00) +\n (uint8[i + 2] & 0xFF)\n output.push(tripletToBase64(tmp))\n }\n return output.join('')\n}\n\nfunction fromByteArray (uint8) {\n var tmp\n var len = uint8.length\n var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes\n var parts = []\n var maxChunkLength = 16383 // must be multiple of 3\n\n // go through the array every three bytes, we'll deal with trailing stuff later\n for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {\n parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))\n }\n\n // pad the end with zeros, but make sure to not forget the extra bytes\n if (extraBytes === 1) {\n tmp = uint8[len - 1]\n parts.push(\n lookup[tmp >> 2] +\n lookup[(tmp << 4) & 0x3F] +\n '=='\n )\n } else if (extraBytes === 2) {\n tmp = (uint8[len - 2] << 8) + uint8[len - 1]\n parts.push(\n lookup[tmp >> 10] +\n lookup[(tmp >> 4) & 0x3F] +\n lookup[(tmp << 2) & 0x3F] +\n '='\n )\n }\n\n return parts.join('')\n}\n","var register = require(\"./lib/register\");\nvar addHook = require(\"./lib/add\");\nvar removeHook = require(\"./lib/remove\");\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind;\nvar bindable = bind.bind(bind);\n\nfunction bindApi(hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(\n null,\n name ? [state, name] : [state]\n );\n hook.api = { remove: removeHookRef };\n hook.remove = removeHookRef;\n [\"before\", \"error\", \"after\", \"wrap\"].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind];\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);\n });\n}\n\nfunction HookSingular() {\n var singularHookName = \"h\";\n var singularHookState = {\n registry: {},\n };\n var singularHook = register.bind(null, singularHookState, singularHookName);\n bindApi(singularHook, singularHookState, singularHookName);\n return singularHook;\n}\n\nfunction HookCollection() {\n var state = {\n registry: {},\n };\n\n var hook = register.bind(null, state);\n bindApi(hook, state);\n\n return hook;\n}\n\nvar collectionHookDeprecationMessageDisplayed = false;\nfunction Hook() {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn(\n '[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4'\n );\n collectionHookDeprecationMessageDisplayed = true;\n }\n return HookCollection();\n}\n\nHook.Singular = HookSingular.bind();\nHook.Collection = HookCollection.bind();\n\nmodule.exports = Hook;\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook;\nmodule.exports.Singular = Hook.Singular;\nmodule.exports.Collection = Hook.Collection;\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n",";(function (globalObject) {\r\n 'use strict';\r\n\r\n/*\r\n * bignumber.js v9.1.2\r\n * A JavaScript library for arbitrary-precision arithmetic.\r\n * https://github.com/MikeMcl/bignumber.js\r\n * Copyright (c) 2022 Michael Mclaughlin \r\n * MIT Licensed.\r\n *\r\n * BigNumber.prototype methods | BigNumber methods\r\n * |\r\n * absoluteValue abs | clone\r\n * comparedTo | config set\r\n * decimalPlaces dp | DECIMAL_PLACES\r\n * dividedBy div | ROUNDING_MODE\r\n * dividedToIntegerBy idiv | EXPONENTIAL_AT\r\n * exponentiatedBy pow | RANGE\r\n * integerValue | CRYPTO\r\n * isEqualTo eq | MODULO_MODE\r\n * isFinite | POW_PRECISION\r\n * isGreaterThan gt | FORMAT\r\n * isGreaterThanOrEqualTo gte | ALPHABET\r\n * isInteger | isBigNumber\r\n * isLessThan lt | maximum max\r\n * isLessThanOrEqualTo lte | minimum min\r\n * isNaN | random\r\n * isNegative | sum\r\n * isPositive |\r\n * isZero |\r\n * minus |\r\n * modulo mod |\r\n * multipliedBy times |\r\n * negated |\r\n * plus |\r\n * precision sd |\r\n * shiftedBy |\r\n * squareRoot sqrt |\r\n * toExponential |\r\n * toFixed |\r\n * toFormat |\r\n * toFraction |\r\n * toJSON |\r\n * toNumber |\r\n * toPrecision |\r\n * toString |\r\n * valueOf |\r\n *\r\n */\r\n\r\n\r\n var BigNumber,\r\n isNumeric = /^-?(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:e[+-]?\\d+)?$/i,\r\n mathceil = Math.ceil,\r\n mathfloor = Math.floor,\r\n\r\n bignumberError = '[BigNumber Error] ',\r\n tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ',\r\n\r\n BASE = 1e14,\r\n LOG_BASE = 14,\r\n MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1\r\n // MAX_INT32 = 0x7fffffff, // 2^31 - 1\r\n POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13],\r\n SQRT_BASE = 1e7,\r\n\r\n // EDITABLE\r\n // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and\r\n // the arguments to toExponential, toFixed, toFormat, and toPrecision.\r\n MAX = 1E9; // 0 to MAX_INT32\r\n\r\n\r\n /*\r\n * Create and return a BigNumber constructor.\r\n */\r\n function clone(configObject) {\r\n var div, convertBase, parseNumeric,\r\n P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null },\r\n ONE = new BigNumber(1),\r\n\r\n\r\n //----------------------------- EDITABLE CONFIG DEFAULTS -------------------------------\r\n\r\n\r\n // The default values below must be integers within the inclusive ranges stated.\r\n // The values can also be changed at run-time using BigNumber.set.\r\n\r\n // The maximum number of decimal places for operations involving division.\r\n DECIMAL_PLACES = 20, // 0 to MAX\r\n\r\n // The rounding mode used when rounding to the above decimal places, and when using\r\n // toExponential, toFixed, toFormat and toPrecision, and round (default value).\r\n // UP 0 Away from zero.\r\n // DOWN 1 Towards zero.\r\n // CEIL 2 Towards +Infinity.\r\n // FLOOR 3 Towards -Infinity.\r\n // HALF_UP 4 Towards nearest neighbour. If equidistant, up.\r\n // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down.\r\n // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour.\r\n // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity.\r\n // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity.\r\n ROUNDING_MODE = 4, // 0 to 8\r\n\r\n // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS]\r\n\r\n // The exponent value at and beneath which toString returns exponential notation.\r\n // Number type: -7\r\n TO_EXP_NEG = -7, // 0 to -MAX\r\n\r\n // The exponent value at and above which toString returns exponential notation.\r\n // Number type: 21\r\n TO_EXP_POS = 21, // 0 to MAX\r\n\r\n // RANGE : [MIN_EXP, MAX_EXP]\r\n\r\n // The minimum exponent value, beneath which underflow to zero occurs.\r\n // Number type: -324 (5e-324)\r\n MIN_EXP = -1e7, // -1 to -MAX\r\n\r\n // The maximum exponent value, above which overflow to Infinity occurs.\r\n // Number type: 308 (1.7976931348623157e+308)\r\n // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow.\r\n MAX_EXP = 1e7, // 1 to MAX\r\n\r\n // Whether to use cryptographically-secure random number generation, if available.\r\n CRYPTO = false, // true or false\r\n\r\n // The modulo mode used when calculating the modulus: a mod n.\r\n // The quotient (q = a / n) is calculated according to the corresponding rounding mode.\r\n // The remainder (r) is calculated as: r = a - n * q.\r\n //\r\n // UP 0 The remainder is positive if the dividend is negative, else is negative.\r\n // DOWN 1 The remainder has the same sign as the dividend.\r\n // This modulo mode is commonly known as 'truncated division' and is\r\n // equivalent to (a % n) in JavaScript.\r\n // FLOOR 3 The remainder has the same sign as the divisor (Python %).\r\n // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function.\r\n // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)).\r\n // The remainder is always positive.\r\n //\r\n // The truncated division, floored division, Euclidian division and IEEE 754 remainder\r\n // modes are commonly used for the modulus operation.\r\n // Although the other rounding modes can also be used, they may not give useful results.\r\n MODULO_MODE = 1, // 0 to 9\r\n\r\n // The maximum number of significant digits of the result of the exponentiatedBy operation.\r\n // If POW_PRECISION is 0, there will be unlimited significant digits.\r\n POW_PRECISION = 0, // 0 to MAX\r\n\r\n // The format specification used by the BigNumber.prototype.toFormat method.\r\n FORMAT = {\r\n prefix: '',\r\n groupSize: 3,\r\n secondaryGroupSize: 0,\r\n groupSeparator: ',',\r\n decimalSeparator: '.',\r\n fractionGroupSize: 0,\r\n fractionGroupSeparator: '\\xA0', // non-breaking space\r\n suffix: ''\r\n },\r\n\r\n // The alphabet used for base conversion. It must be at least 2 characters long, with no '+',\r\n // '-', '.', whitespace, or repeated character.\r\n // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_'\r\n ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz',\r\n alphabetHasNormalDecimalDigits = true;\r\n\r\n\r\n //------------------------------------------------------------------------------------------\r\n\r\n\r\n // CONSTRUCTOR\r\n\r\n\r\n /*\r\n * The BigNumber constructor and exported function.\r\n * Create and return a new instance of a BigNumber object.\r\n *\r\n * v {number|string|BigNumber} A numeric value.\r\n * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive.\r\n */\r\n function BigNumber(v, b) {\r\n var alphabet, c, caseChanged, e, i, isNum, len, str,\r\n x = this;\r\n\r\n // Enable constructor call without `new`.\r\n if (!(x instanceof BigNumber)) return new BigNumber(v, b);\r\n\r\n if (b == null) {\r\n\r\n if (v && v._isBigNumber === true) {\r\n x.s = v.s;\r\n\r\n if (!v.c || v.e > MAX_EXP) {\r\n x.c = x.e = null;\r\n } else if (v.e < MIN_EXP) {\r\n x.c = [x.e = 0];\r\n } else {\r\n x.e = v.e;\r\n x.c = v.c.slice();\r\n }\r\n\r\n return;\r\n }\r\n\r\n if ((isNum = typeof v == 'number') && v * 0 == 0) {\r\n\r\n // Use `1 / n` to handle minus zero also.\r\n x.s = 1 / v < 0 ? (v = -v, -1) : 1;\r\n\r\n // Fast path for integers, where n < 2147483648 (2**31).\r\n if (v === ~~v) {\r\n for (e = 0, i = v; i >= 10; i /= 10, e++);\r\n\r\n if (e > MAX_EXP) {\r\n x.c = x.e = null;\r\n } else {\r\n x.e = e;\r\n x.c = [v];\r\n }\r\n\r\n return;\r\n }\r\n\r\n str = String(v);\r\n } else {\r\n\r\n if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum);\r\n\r\n x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1;\r\n }\r\n\r\n // Decimal point?\r\n if ((e = str.indexOf('.')) > -1) str = str.replace('.', '');\r\n\r\n // Exponential form?\r\n if ((i = str.search(/e/i)) > 0) {\r\n\r\n // Determine exponent.\r\n if (e < 0) e = i;\r\n e += +str.slice(i + 1);\r\n str = str.substring(0, i);\r\n } else if (e < 0) {\r\n\r\n // Integer.\r\n e = str.length;\r\n }\r\n\r\n } else {\r\n\r\n // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}'\r\n intCheck(b, 2, ALPHABET.length, 'Base');\r\n\r\n // Allow exponential notation to be used with base 10 argument, while\r\n // also rounding to DECIMAL_PLACES as with other bases.\r\n if (b == 10 && alphabetHasNormalDecimalDigits) {\r\n x = new BigNumber(v);\r\n return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE);\r\n }\r\n\r\n str = String(v);\r\n\r\n if (isNum = typeof v == 'number') {\r\n\r\n // Avoid potential interpretation of Infinity and NaN as base 44+ values.\r\n if (v * 0 != 0) return parseNumeric(x, str, isNum, b);\r\n\r\n x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1;\r\n\r\n // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}'\r\n if (BigNumber.DEBUG && str.replace(/^0\\.0*|\\./, '').length > 15) {\r\n throw Error\r\n (tooManyDigits + v);\r\n }\r\n } else {\r\n x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1;\r\n }\r\n\r\n alphabet = ALPHABET.slice(0, b);\r\n e = i = 0;\r\n\r\n // Check that str is a valid base b number.\r\n // Don't use RegExp, so alphabet can contain special characters.\r\n for (len = str.length; i < len; i++) {\r\n if (alphabet.indexOf(c = str.charAt(i)) < 0) {\r\n if (c == '.') {\r\n\r\n // If '.' is not the first character and it has not be found before.\r\n if (i > e) {\r\n e = len;\r\n continue;\r\n }\r\n } else if (!caseChanged) {\r\n\r\n // Allow e.g. hexadecimal 'FF' as well as 'ff'.\r\n if (str == str.toUpperCase() && (str = str.toLowerCase()) ||\r\n str == str.toLowerCase() && (str = str.toUpperCase())) {\r\n caseChanged = true;\r\n i = -1;\r\n e = 0;\r\n continue;\r\n }\r\n }\r\n\r\n return parseNumeric(x, String(v), isNum, b);\r\n }\r\n }\r\n\r\n // Prevent later check for length on converted number.\r\n isNum = false;\r\n str = convertBase(str, b, 10, x.s);\r\n\r\n // Decimal point?\r\n if ((e = str.indexOf('.')) > -1) str = str.replace('.', '');\r\n else e = str.length;\r\n }\r\n\r\n // Determine leading zeros.\r\n for (i = 0; str.charCodeAt(i) === 48; i++);\r\n\r\n // Determine trailing zeros.\r\n for (len = str.length; str.charCodeAt(--len) === 48;);\r\n\r\n if (str = str.slice(i, ++len)) {\r\n len -= i;\r\n\r\n // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}'\r\n if (isNum && BigNumber.DEBUG &&\r\n len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) {\r\n throw Error\r\n (tooManyDigits + (x.s * v));\r\n }\r\n\r\n // Overflow?\r\n if ((e = e - i - 1) > MAX_EXP) {\r\n\r\n // Infinity.\r\n x.c = x.e = null;\r\n\r\n // Underflow?\r\n } else if (e < MIN_EXP) {\r\n\r\n // Zero.\r\n x.c = [x.e = 0];\r\n } else {\r\n x.e = e;\r\n x.c = [];\r\n\r\n // Transform base\r\n\r\n // e is the base 10 exponent.\r\n // i is where to slice str to get the first element of the coefficient array.\r\n i = (e + 1) % LOG_BASE;\r\n if (e < 0) i += LOG_BASE; // i < 1\r\n\r\n if (i < len) {\r\n if (i) x.c.push(+str.slice(0, i));\r\n\r\n for (len -= LOG_BASE; i < len;) {\r\n x.c.push(+str.slice(i, i += LOG_BASE));\r\n }\r\n\r\n i = LOG_BASE - (str = str.slice(i)).length;\r\n } else {\r\n i -= len;\r\n }\r\n\r\n for (; i--; str += '0');\r\n x.c.push(+str);\r\n }\r\n } else {\r\n\r\n // Zero.\r\n x.c = [x.e = 0];\r\n }\r\n }\r\n\r\n\r\n // CONSTRUCTOR PROPERTIES\r\n\r\n\r\n BigNumber.clone = clone;\r\n\r\n BigNumber.ROUND_UP = 0;\r\n BigNumber.ROUND_DOWN = 1;\r\n BigNumber.ROUND_CEIL = 2;\r\n BigNumber.ROUND_FLOOR = 3;\r\n BigNumber.ROUND_HALF_UP = 4;\r\n BigNumber.ROUND_HALF_DOWN = 5;\r\n BigNumber.ROUND_HALF_EVEN = 6;\r\n BigNumber.ROUND_HALF_CEIL = 7;\r\n BigNumber.ROUND_HALF_FLOOR = 8;\r\n BigNumber.EUCLID = 9;\r\n\r\n\r\n /*\r\n * Configure infrequently-changing library-wide settings.\r\n *\r\n * Accept an object with the following optional properties (if the value of a property is\r\n * a number, it must be an integer within the inclusive range stated):\r\n *\r\n * DECIMAL_PLACES {number} 0 to MAX\r\n * ROUNDING_MODE {number} 0 to 8\r\n * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX]\r\n * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX]\r\n * CRYPTO {boolean} true or false\r\n * MODULO_MODE {number} 0 to 9\r\n * POW_PRECISION {number} 0 to MAX\r\n * ALPHABET {string} A string of two or more unique characters which does\r\n * not contain '.'.\r\n * FORMAT {object} An object with some of the following properties:\r\n * prefix {string}\r\n * groupSize {number}\r\n * secondaryGroupSize {number}\r\n * groupSeparator {string}\r\n * decimalSeparator {string}\r\n * fractionGroupSize {number}\r\n * fractionGroupSeparator {string}\r\n * suffix {string}\r\n *\r\n * (The values assigned to the above FORMAT object properties are not checked for validity.)\r\n *\r\n * E.g.\r\n * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 })\r\n *\r\n * Ignore properties/parameters set to null or undefined, except for ALPHABET.\r\n *\r\n * Return an object with the properties current values.\r\n */\r\n BigNumber.config = BigNumber.set = function (obj) {\r\n var p, v;\r\n\r\n if (obj != null) {\r\n\r\n if (typeof obj == 'object') {\r\n\r\n // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive.\r\n // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) {\r\n v = obj[p];\r\n intCheck(v, 0, MAX, p);\r\n DECIMAL_PLACES = v;\r\n }\r\n\r\n // ROUNDING_MODE {number} Integer, 0 to 8 inclusive.\r\n // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) {\r\n v = obj[p];\r\n intCheck(v, 0, 8, p);\r\n ROUNDING_MODE = v;\r\n }\r\n\r\n // EXPONENTIAL_AT {number|number[]}\r\n // Integer, -MAX to MAX inclusive or\r\n // [integer -MAX to 0 inclusive, 0 to MAX inclusive].\r\n // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) {\r\n v = obj[p];\r\n if (v && v.pop) {\r\n intCheck(v[0], -MAX, 0, p);\r\n intCheck(v[1], 0, MAX, p);\r\n TO_EXP_NEG = v[0];\r\n TO_EXP_POS = v[1];\r\n } else {\r\n intCheck(v, -MAX, MAX, p);\r\n TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v);\r\n }\r\n }\r\n\r\n // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or\r\n // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive].\r\n // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}'\r\n if (obj.hasOwnProperty(p = 'RANGE')) {\r\n v = obj[p];\r\n if (v && v.pop) {\r\n intCheck(v[0], -MAX, -1, p);\r\n intCheck(v[1], 1, MAX, p);\r\n MIN_EXP = v[0];\r\n MAX_EXP = v[1];\r\n } else {\r\n intCheck(v, -MAX, MAX, p);\r\n if (v) {\r\n MIN_EXP = -(MAX_EXP = v < 0 ? -v : v);\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' cannot be zero: ' + v);\r\n }\r\n }\r\n }\r\n\r\n // CRYPTO {boolean} true or false.\r\n // '[BigNumber Error] CRYPTO not true or false: {v}'\r\n // '[BigNumber Error] crypto unavailable'\r\n if (obj.hasOwnProperty(p = 'CRYPTO')) {\r\n v = obj[p];\r\n if (v === !!v) {\r\n if (v) {\r\n if (typeof crypto != 'undefined' && crypto &&\r\n (crypto.getRandomValues || crypto.randomBytes)) {\r\n CRYPTO = v;\r\n } else {\r\n CRYPTO = !v;\r\n throw Error\r\n (bignumberError + 'crypto unavailable');\r\n }\r\n } else {\r\n CRYPTO = v;\r\n }\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' not true or false: ' + v);\r\n }\r\n }\r\n\r\n // MODULO_MODE {number} Integer, 0 to 9 inclusive.\r\n // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'MODULO_MODE')) {\r\n v = obj[p];\r\n intCheck(v, 0, 9, p);\r\n MODULO_MODE = v;\r\n }\r\n\r\n // POW_PRECISION {number} Integer, 0 to MAX inclusive.\r\n // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}'\r\n if (obj.hasOwnProperty(p = 'POW_PRECISION')) {\r\n v = obj[p];\r\n intCheck(v, 0, MAX, p);\r\n POW_PRECISION = v;\r\n }\r\n\r\n // FORMAT {object}\r\n // '[BigNumber Error] FORMAT not an object: {v}'\r\n if (obj.hasOwnProperty(p = 'FORMAT')) {\r\n v = obj[p];\r\n if (typeof v == 'object') FORMAT = v;\r\n else throw Error\r\n (bignumberError + p + ' not an object: ' + v);\r\n }\r\n\r\n // ALPHABET {string}\r\n // '[BigNumber Error] ALPHABET invalid: {v}'\r\n if (obj.hasOwnProperty(p = 'ALPHABET')) {\r\n v = obj[p];\r\n\r\n // Disallow if less than two characters,\r\n // or if it contains '+', '-', '.', whitespace, or a repeated character.\r\n if (typeof v == 'string' && !/^.?$|[+\\-.\\s]|(.).*\\1/.test(v)) {\r\n alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789';\r\n ALPHABET = v;\r\n } else {\r\n throw Error\r\n (bignumberError + p + ' invalid: ' + v);\r\n }\r\n }\r\n\r\n } else {\r\n\r\n // '[BigNumber Error] Object expected: {v}'\r\n throw Error\r\n (bignumberError + 'Object expected: ' + obj);\r\n }\r\n }\r\n\r\n return {\r\n DECIMAL_PLACES: DECIMAL_PLACES,\r\n ROUNDING_MODE: ROUNDING_MODE,\r\n EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS],\r\n RANGE: [MIN_EXP, MAX_EXP],\r\n CRYPTO: CRYPTO,\r\n MODULO_MODE: MODULO_MODE,\r\n POW_PRECISION: POW_PRECISION,\r\n FORMAT: FORMAT,\r\n ALPHABET: ALPHABET\r\n };\r\n };\r\n\r\n\r\n /*\r\n * Return true if v is a BigNumber instance, otherwise return false.\r\n *\r\n * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed.\r\n *\r\n * v {any}\r\n *\r\n * '[BigNumber Error] Invalid BigNumber: {v}'\r\n */\r\n BigNumber.isBigNumber = function (v) {\r\n if (!v || v._isBigNumber !== true) return false;\r\n if (!BigNumber.DEBUG) return true;\r\n\r\n var i, n,\r\n c = v.c,\r\n e = v.e,\r\n s = v.s;\r\n\r\n out: if ({}.toString.call(c) == '[object Array]') {\r\n\r\n if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) {\r\n\r\n // If the first element is zero, the BigNumber value must be zero.\r\n if (c[0] === 0) {\r\n if (e === 0 && c.length === 1) return true;\r\n break out;\r\n }\r\n\r\n // Calculate number of digits that c[0] should have, based on the exponent.\r\n i = (e + 1) % LOG_BASE;\r\n if (i < 1) i += LOG_BASE;\r\n\r\n // Calculate number of digits of c[0].\r\n //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) {\r\n if (String(c[0]).length == i) {\r\n\r\n for (i = 0; i < c.length; i++) {\r\n n = c[i];\r\n if (n < 0 || n >= BASE || n !== mathfloor(n)) break out;\r\n }\r\n\r\n // Last element cannot be zero, unless it is the only element.\r\n if (n !== 0) return true;\r\n }\r\n }\r\n\r\n // Infinity/NaN\r\n } else if (c === null && e === null && (s === null || s === 1 || s === -1)) {\r\n return true;\r\n }\r\n\r\n throw Error\r\n (bignumberError + 'Invalid BigNumber: ' + v);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the maximum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.maximum = BigNumber.max = function () {\r\n return maxOrMin(arguments, -1);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the minimum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.minimum = BigNumber.min = function () {\r\n return maxOrMin(arguments, 1);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber with a random value equal to or greater than 0 and less than 1,\r\n * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing\r\n * zeros are produced).\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}'\r\n * '[BigNumber Error] crypto unavailable'\r\n */\r\n BigNumber.random = (function () {\r\n var pow2_53 = 0x20000000000000;\r\n\r\n // Return a 53 bit integer n, where 0 <= n < 9007199254740992.\r\n // Check if Math.random() produces more than 32 bits of randomness.\r\n // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits.\r\n // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1.\r\n var random53bitInt = (Math.random() * pow2_53) & 0x1fffff\r\n ? function () { return mathfloor(Math.random() * pow2_53); }\r\n : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) +\r\n (Math.random() * 0x800000 | 0); };\r\n\r\n return function (dp) {\r\n var a, b, e, k, v,\r\n i = 0,\r\n c = [],\r\n rand = new BigNumber(ONE);\r\n\r\n if (dp == null) dp = DECIMAL_PLACES;\r\n else intCheck(dp, 0, MAX);\r\n\r\n k = mathceil(dp / LOG_BASE);\r\n\r\n if (CRYPTO) {\r\n\r\n // Browsers supporting crypto.getRandomValues.\r\n if (crypto.getRandomValues) {\r\n\r\n a = crypto.getRandomValues(new Uint32Array(k *= 2));\r\n\r\n for (; i < k;) {\r\n\r\n // 53 bits:\r\n // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2)\r\n // 11111 11111111 11111111 11111111 11100000 00000000 00000000\r\n // ((Math.pow(2, 32) - 1) >>> 11).toString(2)\r\n // 11111 11111111 11111111\r\n // 0x20000 is 2^21.\r\n v = a[i] * 0x20000 + (a[i + 1] >>> 11);\r\n\r\n // Rejection sampling:\r\n // 0 <= v < 9007199254740992\r\n // Probability that v >= 9e15, is\r\n // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251\r\n if (v >= 9e15) {\r\n b = crypto.getRandomValues(new Uint32Array(2));\r\n a[i] = b[0];\r\n a[i + 1] = b[1];\r\n } else {\r\n\r\n // 0 <= v <= 8999999999999999\r\n // 0 <= (v % 1e14) <= 99999999999999\r\n c.push(v % 1e14);\r\n i += 2;\r\n }\r\n }\r\n i = k / 2;\r\n\r\n // Node.js supporting crypto.randomBytes.\r\n } else if (crypto.randomBytes) {\r\n\r\n // buffer\r\n a = crypto.randomBytes(k *= 7);\r\n\r\n for (; i < k;) {\r\n\r\n // 0x1000000000000 is 2^48, 0x10000000000 is 2^40\r\n // 0x100000000 is 2^32, 0x1000000 is 2^24\r\n // 11111 11111111 11111111 11111111 11111111 11111111 11111111\r\n // 0 <= v < 9007199254740992\r\n v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) +\r\n (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) +\r\n (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6];\r\n\r\n if (v >= 9e15) {\r\n crypto.randomBytes(7).copy(a, i);\r\n } else {\r\n\r\n // 0 <= (v % 1e14) <= 99999999999999\r\n c.push(v % 1e14);\r\n i += 7;\r\n }\r\n }\r\n i = k / 7;\r\n } else {\r\n CRYPTO = false;\r\n throw Error\r\n (bignumberError + 'crypto unavailable');\r\n }\r\n }\r\n\r\n // Use Math.random.\r\n if (!CRYPTO) {\r\n\r\n for (; i < k;) {\r\n v = random53bitInt();\r\n if (v < 9e15) c[i++] = v % 1e14;\r\n }\r\n }\r\n\r\n k = c[--i];\r\n dp %= LOG_BASE;\r\n\r\n // Convert trailing digits to zeros according to dp.\r\n if (k && dp) {\r\n v = POWS_TEN[LOG_BASE - dp];\r\n c[i] = mathfloor(k / v) * v;\r\n }\r\n\r\n // Remove trailing elements which are zero.\r\n for (; c[i] === 0; c.pop(), i--);\r\n\r\n // Zero?\r\n if (i < 0) {\r\n c = [e = 0];\r\n } else {\r\n\r\n // Remove leading elements which are zero and adjust exponent accordingly.\r\n for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE);\r\n\r\n // Count the digits of the first element of c to determine leading zeros, and...\r\n for (i = 1, v = c[0]; v >= 10; v /= 10, i++);\r\n\r\n // adjust the exponent accordingly.\r\n if (i < LOG_BASE) e -= LOG_BASE - i;\r\n }\r\n\r\n rand.e = e;\r\n rand.c = c;\r\n return rand;\r\n };\r\n })();\r\n\r\n\r\n /*\r\n * Return a BigNumber whose value is the sum of the arguments.\r\n *\r\n * arguments {number|string|BigNumber}\r\n */\r\n BigNumber.sum = function () {\r\n var i = 1,\r\n args = arguments,\r\n sum = new BigNumber(args[0]);\r\n for (; i < args.length;) sum = sum.plus(args[i++]);\r\n return sum;\r\n };\r\n\r\n\r\n // PRIVATE FUNCTIONS\r\n\r\n\r\n // Called by BigNumber and BigNumber.prototype.toString.\r\n convertBase = (function () {\r\n var decimal = '0123456789';\r\n\r\n /*\r\n * Convert string of baseIn to an array of numbers of baseOut.\r\n * Eg. toBaseOut('255', 10, 16) returns [15, 15].\r\n * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5].\r\n */\r\n function toBaseOut(str, baseIn, baseOut, alphabet) {\r\n var j,\r\n arr = [0],\r\n arrL,\r\n i = 0,\r\n len = str.length;\r\n\r\n for (; i < len;) {\r\n for (arrL = arr.length; arrL--; arr[arrL] *= baseIn);\r\n\r\n arr[0] += alphabet.indexOf(str.charAt(i++));\r\n\r\n for (j = 0; j < arr.length; j++) {\r\n\r\n if (arr[j] > baseOut - 1) {\r\n if (arr[j + 1] == null) arr[j + 1] = 0;\r\n arr[j + 1] += arr[j] / baseOut | 0;\r\n arr[j] %= baseOut;\r\n }\r\n }\r\n }\r\n\r\n return arr.reverse();\r\n }\r\n\r\n // Convert a numeric string of baseIn to a numeric string of baseOut.\r\n // If the caller is toString, we are converting from base 10 to baseOut.\r\n // If the caller is BigNumber, we are converting from baseIn to base 10.\r\n return function (str, baseIn, baseOut, sign, callerIsToString) {\r\n var alphabet, d, e, k, r, x, xc, y,\r\n i = str.indexOf('.'),\r\n dp = DECIMAL_PLACES,\r\n rm = ROUNDING_MODE;\r\n\r\n // Non-integer.\r\n if (i >= 0) {\r\n k = POW_PRECISION;\r\n\r\n // Unlimited precision.\r\n POW_PRECISION = 0;\r\n str = str.replace('.', '');\r\n y = new BigNumber(baseIn);\r\n x = y.pow(str.length - i);\r\n POW_PRECISION = k;\r\n\r\n // Convert str as if an integer, then restore the fraction part by dividing the\r\n // result by its base raised to a power.\r\n\r\n y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'),\r\n 10, baseOut, decimal);\r\n y.e = y.c.length;\r\n }\r\n\r\n // Convert the number as integer.\r\n\r\n xc = toBaseOut(str, baseIn, baseOut, callerIsToString\r\n ? (alphabet = ALPHABET, decimal)\r\n : (alphabet = decimal, ALPHABET));\r\n\r\n // xc now represents str as an integer and converted to baseOut. e is the exponent.\r\n e = k = xc.length;\r\n\r\n // Remove trailing zeros.\r\n for (; xc[--k] == 0; xc.pop());\r\n\r\n // Zero?\r\n if (!xc[0]) return alphabet.charAt(0);\r\n\r\n // Does str represent an integer? If so, no need for the division.\r\n if (i < 0) {\r\n --e;\r\n } else {\r\n x.c = xc;\r\n x.e = e;\r\n\r\n // The sign is needed for correct rounding.\r\n x.s = sign;\r\n x = div(x, y, dp, rm, baseOut);\r\n xc = x.c;\r\n r = x.r;\r\n e = x.e;\r\n }\r\n\r\n // xc now represents str converted to baseOut.\r\n\r\n // THe index of the rounding digit.\r\n d = e + dp + 1;\r\n\r\n // The rounding digit: the digit to the right of the digit that may be rounded up.\r\n i = xc[d];\r\n\r\n // Look at the rounding digits and mode to determine whether to round up.\r\n\r\n k = baseOut / 2;\r\n r = r || d < 0 || xc[d + 1] != null;\r\n\r\n r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2))\r\n : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 ||\r\n rm == (x.s < 0 ? 8 : 7));\r\n\r\n // If the index of the rounding digit is not greater than zero, or xc represents\r\n // zero, then the result of the base conversion is zero or, if rounding up, a value\r\n // such as 0.00001.\r\n if (d < 1 || !xc[0]) {\r\n\r\n // 1^-dp or 0\r\n str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0);\r\n } else {\r\n\r\n // Truncate xc to the required number of decimal places.\r\n xc.length = d;\r\n\r\n // Round up?\r\n if (r) {\r\n\r\n // Rounding up may mean the previous digit has to be rounded up and so on.\r\n for (--baseOut; ++xc[--d] > baseOut;) {\r\n xc[d] = 0;\r\n\r\n if (!d) {\r\n ++e;\r\n xc = [1].concat(xc);\r\n }\r\n }\r\n }\r\n\r\n // Determine trailing zeros.\r\n for (k = xc.length; !xc[--k];);\r\n\r\n // E.g. [4, 11, 15] becomes 4bf.\r\n for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++]));\r\n\r\n // Add leading zeros, decimal point and trailing zeros as required.\r\n str = toFixedPoint(str, e, alphabet.charAt(0));\r\n }\r\n\r\n // The caller will add the sign.\r\n return str;\r\n };\r\n })();\r\n\r\n\r\n // Perform division in the specified base. Called by div and convertBase.\r\n div = (function () {\r\n\r\n // Assume non-zero x and k.\r\n function multiply(x, k, base) {\r\n var m, temp, xlo, xhi,\r\n carry = 0,\r\n i = x.length,\r\n klo = k % SQRT_BASE,\r\n khi = k / SQRT_BASE | 0;\r\n\r\n for (x = x.slice(); i--;) {\r\n xlo = x[i] % SQRT_BASE;\r\n xhi = x[i] / SQRT_BASE | 0;\r\n m = khi * xlo + xhi * klo;\r\n temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry;\r\n carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi;\r\n x[i] = temp % base;\r\n }\r\n\r\n if (carry) x = [carry].concat(x);\r\n\r\n return x;\r\n }\r\n\r\n function compare(a, b, aL, bL) {\r\n var i, cmp;\r\n\r\n if (aL != bL) {\r\n cmp = aL > bL ? 1 : -1;\r\n } else {\r\n\r\n for (i = cmp = 0; i < aL; i++) {\r\n\r\n if (a[i] != b[i]) {\r\n cmp = a[i] > b[i] ? 1 : -1;\r\n break;\r\n }\r\n }\r\n }\r\n\r\n return cmp;\r\n }\r\n\r\n function subtract(a, b, aL, base) {\r\n var i = 0;\r\n\r\n // Subtract b from a.\r\n for (; aL--;) {\r\n a[aL] -= i;\r\n i = a[aL] < b[aL] ? 1 : 0;\r\n a[aL] = i * base + a[aL] - b[aL];\r\n }\r\n\r\n // Remove leading zeros.\r\n for (; !a[0] && a.length > 1; a.splice(0, 1));\r\n }\r\n\r\n // x: dividend, y: divisor.\r\n return function (x, y, dp, rm, base) {\r\n var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0,\r\n yL, yz,\r\n s = x.s == y.s ? 1 : -1,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n // Either NaN, Infinity or 0?\r\n if (!xc || !xc[0] || !yc || !yc[0]) {\r\n\r\n return new BigNumber(\r\n\r\n // Return NaN if either NaN, or both Infinity or 0.\r\n !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN :\r\n\r\n // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0.\r\n xc && xc[0] == 0 || !yc ? s * 0 : s / 0\r\n );\r\n }\r\n\r\n q = new BigNumber(s);\r\n qc = q.c = [];\r\n e = x.e - y.e;\r\n s = dp + e + 1;\r\n\r\n if (!base) {\r\n base = BASE;\r\n e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE);\r\n s = s / LOG_BASE | 0;\r\n }\r\n\r\n // Result exponent may be one less then the current value of e.\r\n // The coefficients of the BigNumbers from convertBase may have trailing zeros.\r\n for (i = 0; yc[i] == (xc[i] || 0); i++);\r\n\r\n if (yc[i] > (xc[i] || 0)) e--;\r\n\r\n if (s < 0) {\r\n qc.push(1);\r\n more = true;\r\n } else {\r\n xL = xc.length;\r\n yL = yc.length;\r\n i = 0;\r\n s += 2;\r\n\r\n // Normalise xc and yc so highest order digit of yc is >= base / 2.\r\n\r\n n = mathfloor(base / (yc[0] + 1));\r\n\r\n // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1.\r\n // if (n > 1 || n++ == 1 && yc[0] < base / 2) {\r\n if (n > 1) {\r\n yc = multiply(yc, n, base);\r\n xc = multiply(xc, n, base);\r\n yL = yc.length;\r\n xL = xc.length;\r\n }\r\n\r\n xi = yL;\r\n rem = xc.slice(0, yL);\r\n remL = rem.length;\r\n\r\n // Add zeros to make remainder as long as divisor.\r\n for (; remL < yL; rem[remL++] = 0);\r\n yz = yc.slice();\r\n yz = [0].concat(yz);\r\n yc0 = yc[0];\r\n if (yc[1] >= base / 2) yc0++;\r\n // Not necessary, but to prevent trial digit n > base, when using base 3.\r\n // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15;\r\n\r\n do {\r\n n = 0;\r\n\r\n // Compare divisor and remainder.\r\n cmp = compare(yc, rem, yL, remL);\r\n\r\n // If divisor < remainder.\r\n if (cmp < 0) {\r\n\r\n // Calculate trial digit, n.\r\n\r\n rem0 = rem[0];\r\n if (yL != remL) rem0 = rem0 * base + (rem[1] || 0);\r\n\r\n // n is how many times the divisor goes into the current remainder.\r\n n = mathfloor(rem0 / yc0);\r\n\r\n // Algorithm:\r\n // product = divisor multiplied by trial digit (n).\r\n // Compare product and remainder.\r\n // If product is greater than remainder:\r\n // Subtract divisor from product, decrement trial digit.\r\n // Subtract product from remainder.\r\n // If product was less than remainder at the last compare:\r\n // Compare new remainder and divisor.\r\n // If remainder is greater than divisor:\r\n // Subtract divisor from remainder, increment trial digit.\r\n\r\n if (n > 1) {\r\n\r\n // n may be > base only when base is 3.\r\n if (n >= base) n = base - 1;\r\n\r\n // product = divisor * trial digit.\r\n prod = multiply(yc, n, base);\r\n prodL = prod.length;\r\n remL = rem.length;\r\n\r\n // Compare product and remainder.\r\n // If product > remainder then trial digit n too high.\r\n // n is 1 too high about 5% of the time, and is not known to have\r\n // ever been more than 1 too high.\r\n while (compare(prod, rem, prodL, remL) == 1) {\r\n n--;\r\n\r\n // Subtract divisor from product.\r\n subtract(prod, yL < prodL ? yz : yc, prodL, base);\r\n prodL = prod.length;\r\n cmp = 1;\r\n }\r\n } else {\r\n\r\n // n is 0 or 1, cmp is -1.\r\n // If n is 0, there is no need to compare yc and rem again below,\r\n // so change cmp to 1 to avoid it.\r\n // If n is 1, leave cmp as -1, so yc and rem are compared again.\r\n if (n == 0) {\r\n\r\n // divisor < remainder, so n must be at least 1.\r\n cmp = n = 1;\r\n }\r\n\r\n // product = divisor\r\n prod = yc.slice();\r\n prodL = prod.length;\r\n }\r\n\r\n if (prodL < remL) prod = [0].concat(prod);\r\n\r\n // Subtract product from remainder.\r\n subtract(rem, prod, remL, base);\r\n remL = rem.length;\r\n\r\n // If product was < remainder.\r\n if (cmp == -1) {\r\n\r\n // Compare divisor and new remainder.\r\n // If divisor < new remainder, subtract divisor from remainder.\r\n // Trial digit n too low.\r\n // n is 1 too low about 5% of the time, and very rarely 2 too low.\r\n while (compare(yc, rem, yL, remL) < 1) {\r\n n++;\r\n\r\n // Subtract divisor from remainder.\r\n subtract(rem, yL < remL ? yz : yc, remL, base);\r\n remL = rem.length;\r\n }\r\n }\r\n } else if (cmp === 0) {\r\n n++;\r\n rem = [0];\r\n } // else cmp === 1 and n will be 0\r\n\r\n // Add the next digit, n, to the result array.\r\n qc[i++] = n;\r\n\r\n // Update the remainder.\r\n if (rem[0]) {\r\n rem[remL++] = xc[xi] || 0;\r\n } else {\r\n rem = [xc[xi]];\r\n remL = 1;\r\n }\r\n } while ((xi++ < xL || rem[0] != null) && s--);\r\n\r\n more = rem[0] != null;\r\n\r\n // Leading zero?\r\n if (!qc[0]) qc.splice(0, 1);\r\n }\r\n\r\n if (base == BASE) {\r\n\r\n // To calculate q.e, first get the number of digits of qc[0].\r\n for (i = 1, s = qc[0]; s >= 10; s /= 10, i++);\r\n\r\n round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more);\r\n\r\n // Caller is convertBase.\r\n } else {\r\n q.e = e;\r\n q.r = +more;\r\n }\r\n\r\n return q;\r\n };\r\n })();\r\n\r\n\r\n /*\r\n * Return a string representing the value of BigNumber n in fixed-point or exponential\r\n * notation rounded to the specified decimal places or significant digits.\r\n *\r\n * n: a BigNumber.\r\n * i: the index of the last digit required (i.e. the digit that may be rounded up).\r\n * rm: the rounding mode.\r\n * id: 1 (toExponential) or 2 (toPrecision).\r\n */\r\n function format(n, i, rm, id) {\r\n var c0, e, ne, len, str;\r\n\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n if (!n.c) return n.toString();\r\n\r\n c0 = n.c[0];\r\n ne = n.e;\r\n\r\n if (i == null) {\r\n str = coeffToString(n.c);\r\n str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS)\r\n ? toExponential(str, ne)\r\n : toFixedPoint(str, ne, '0');\r\n } else {\r\n n = round(new BigNumber(n), i, rm);\r\n\r\n // n.e may have changed if the value was rounded up.\r\n e = n.e;\r\n\r\n str = coeffToString(n.c);\r\n len = str.length;\r\n\r\n // toPrecision returns exponential notation if the number of significant digits\r\n // specified is less than the number of digits necessary to represent the integer\r\n // part of the value in fixed-point notation.\r\n\r\n // Exponential notation.\r\n if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) {\r\n\r\n // Append zeros?\r\n for (; len < i; str += '0', len++);\r\n str = toExponential(str, e);\r\n\r\n // Fixed-point notation.\r\n } else {\r\n i -= ne;\r\n str = toFixedPoint(str, e, '0');\r\n\r\n // Append zeros?\r\n if (e + 1 > len) {\r\n if (--i > 0) for (str += '.'; i--; str += '0');\r\n } else {\r\n i += e - len;\r\n if (i > 0) {\r\n if (e + 1 == len) str += '.';\r\n for (; i--; str += '0');\r\n }\r\n }\r\n }\r\n }\r\n\r\n return n.s < 0 && c0 ? '-' + str : str;\r\n }\r\n\r\n\r\n // Handle BigNumber.max and BigNumber.min.\r\n // If any number is NaN, return NaN.\r\n function maxOrMin(args, n) {\r\n var k, y,\r\n i = 1,\r\n x = new BigNumber(args[0]);\r\n\r\n for (; i < args.length; i++) {\r\n y = new BigNumber(args[i]);\r\n if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) {\r\n x = y;\r\n }\r\n }\r\n\r\n return x;\r\n }\r\n\r\n\r\n /*\r\n * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP.\r\n * Called by minus, plus and times.\r\n */\r\n function normalise(n, c, e) {\r\n var i = 1,\r\n j = c.length;\r\n\r\n // Remove trailing zeros.\r\n for (; !c[--j]; c.pop());\r\n\r\n // Calculate the base 10 exponent. First get the number of digits of c[0].\r\n for (j = c[0]; j >= 10; j /= 10, i++);\r\n\r\n // Overflow?\r\n if ((e = i + e * LOG_BASE - 1) > MAX_EXP) {\r\n\r\n // Infinity.\r\n n.c = n.e = null;\r\n\r\n // Underflow?\r\n } else if (e < MIN_EXP) {\r\n\r\n // Zero.\r\n n.c = [n.e = 0];\r\n } else {\r\n n.e = e;\r\n n.c = c;\r\n }\r\n\r\n return n;\r\n }\r\n\r\n\r\n // Handle values that fail the validity test in BigNumber.\r\n parseNumeric = (function () {\r\n var basePrefix = /^(-?)0([xbo])(?=\\w[\\w.]*$)/i,\r\n dotAfter = /^([^.]+)\\.$/,\r\n dotBefore = /^\\.([^.]+)$/,\r\n isInfinityOrNaN = /^-?(Infinity|NaN)$/,\r\n whitespaceOrPlus = /^\\s*\\+(?=[\\w.])|^\\s+|\\s+$/g;\r\n\r\n return function (x, str, isNum, b) {\r\n var base,\r\n s = isNum ? str : str.replace(whitespaceOrPlus, '');\r\n\r\n // No exception on ±Infinity or NaN.\r\n if (isInfinityOrNaN.test(s)) {\r\n x.s = isNaN(s) ? null : s < 0 ? -1 : 1;\r\n } else {\r\n if (!isNum) {\r\n\r\n // basePrefix = /^(-?)0([xbo])(?=\\w[\\w.]*$)/i\r\n s = s.replace(basePrefix, function (m, p1, p2) {\r\n base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8;\r\n return !b || b == base ? p1 : m;\r\n });\r\n\r\n if (b) {\r\n base = b;\r\n\r\n // E.g. '1.' to '1', '.1' to '0.1'\r\n s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1');\r\n }\r\n\r\n if (str != s) return new BigNumber(s, base);\r\n }\r\n\r\n // '[BigNumber Error] Not a number: {n}'\r\n // '[BigNumber Error] Not a base {b} number: {n}'\r\n if (BigNumber.DEBUG) {\r\n throw Error\r\n (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str);\r\n }\r\n\r\n // NaN\r\n x.s = null;\r\n }\r\n\r\n x.c = x.e = null;\r\n }\r\n })();\r\n\r\n\r\n /*\r\n * Round x to sd significant digits using rounding mode rm. Check for over/under-flow.\r\n * If r is truthy, it is known that there are more digits after the rounding digit.\r\n */\r\n function round(x, sd, rm, r) {\r\n var d, i, j, k, n, ni, rd,\r\n xc = x.c,\r\n pows10 = POWS_TEN;\r\n\r\n // if x is not Infinity or NaN...\r\n if (xc) {\r\n\r\n // rd is the rounding digit, i.e. the digit after the digit that may be rounded up.\r\n // n is a base 1e14 number, the value of the element of array x.c containing rd.\r\n // ni is the index of n within x.c.\r\n // d is the number of digits of n.\r\n // i is the index of rd within n including leading zeros.\r\n // j is the actual index of rd within n (if < 0, rd is a leading zero).\r\n out: {\r\n\r\n // Get the number of digits of the first element of xc.\r\n for (d = 1, k = xc[0]; k >= 10; k /= 10, d++);\r\n i = sd - d;\r\n\r\n // If the rounding digit is in the first element of xc...\r\n if (i < 0) {\r\n i += LOG_BASE;\r\n j = sd;\r\n n = xc[ni = 0];\r\n\r\n // Get the rounding digit at index j of n.\r\n rd = mathfloor(n / pows10[d - j - 1] % 10);\r\n } else {\r\n ni = mathceil((i + 1) / LOG_BASE);\r\n\r\n if (ni >= xc.length) {\r\n\r\n if (r) {\r\n\r\n // Needed by sqrt.\r\n for (; xc.length <= ni; xc.push(0));\r\n n = rd = 0;\r\n d = 1;\r\n i %= LOG_BASE;\r\n j = i - LOG_BASE + 1;\r\n } else {\r\n break out;\r\n }\r\n } else {\r\n n = k = xc[ni];\r\n\r\n // Get the number of digits of n.\r\n for (d = 1; k >= 10; k /= 10, d++);\r\n\r\n // Get the index of rd within n.\r\n i %= LOG_BASE;\r\n\r\n // Get the index of rd within n, adjusted for leading zeros.\r\n // The number of leading zeros of n is given by LOG_BASE - d.\r\n j = i - LOG_BASE + d;\r\n\r\n // Get the rounding digit at index j of n.\r\n rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10);\r\n }\r\n }\r\n\r\n r = r || sd < 0 ||\r\n\r\n // Are there any non-zero digits after the rounding digit?\r\n // The expression n % pows10[d - j - 1] returns all digits of n to the right\r\n // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714.\r\n xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]);\r\n\r\n r = rm < 4\r\n ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2))\r\n : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 &&\r\n\r\n // Check whether the digit to the left of the rounding digit is odd.\r\n ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 ||\r\n rm == (x.s < 0 ? 8 : 7));\r\n\r\n if (sd < 1 || !xc[0]) {\r\n xc.length = 0;\r\n\r\n if (r) {\r\n\r\n // Convert sd to decimal places.\r\n sd -= x.e + 1;\r\n\r\n // 1, 0.1, 0.01, 0.001, 0.0001 etc.\r\n xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE];\r\n x.e = -sd || 0;\r\n } else {\r\n\r\n // Zero.\r\n xc[0] = x.e = 0;\r\n }\r\n\r\n return x;\r\n }\r\n\r\n // Remove excess digits.\r\n if (i == 0) {\r\n xc.length = ni;\r\n k = 1;\r\n ni--;\r\n } else {\r\n xc.length = ni + 1;\r\n k = pows10[LOG_BASE - i];\r\n\r\n // E.g. 56700 becomes 56000 if 7 is the rounding digit.\r\n // j > 0 means i > number of leading zeros of n.\r\n xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0;\r\n }\r\n\r\n // Round up?\r\n if (r) {\r\n\r\n for (; ;) {\r\n\r\n // If the digit to be rounded up is in the first element of xc...\r\n if (ni == 0) {\r\n\r\n // i will be the length of xc[0] before k is added.\r\n for (i = 1, j = xc[0]; j >= 10; j /= 10, i++);\r\n j = xc[0] += k;\r\n for (k = 1; j >= 10; j /= 10, k++);\r\n\r\n // if i != k the length has increased.\r\n if (i != k) {\r\n x.e++;\r\n if (xc[0] == BASE) xc[0] = 1;\r\n }\r\n\r\n break;\r\n } else {\r\n xc[ni] += k;\r\n if (xc[ni] != BASE) break;\r\n xc[ni--] = 0;\r\n k = 1;\r\n }\r\n }\r\n }\r\n\r\n // Remove trailing zeros.\r\n for (i = xc.length; xc[--i] === 0; xc.pop());\r\n }\r\n\r\n // Overflow? Infinity.\r\n if (x.e > MAX_EXP) {\r\n x.c = x.e = null;\r\n\r\n // Underflow? Zero.\r\n } else if (x.e < MIN_EXP) {\r\n x.c = [x.e = 0];\r\n }\r\n }\r\n\r\n return x;\r\n }\r\n\r\n\r\n function valueOf(n) {\r\n var str,\r\n e = n.e;\r\n\r\n if (e === null) return n.toString();\r\n\r\n str = coeffToString(n.c);\r\n\r\n str = e <= TO_EXP_NEG || e >= TO_EXP_POS\r\n ? toExponential(str, e)\r\n : toFixedPoint(str, e, '0');\r\n\r\n return n.s < 0 ? '-' + str : str;\r\n }\r\n\r\n\r\n // PROTOTYPE/INSTANCE METHODS\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the absolute value of this BigNumber.\r\n */\r\n P.absoluteValue = P.abs = function () {\r\n var x = new BigNumber(this);\r\n if (x.s < 0) x.s = 1;\r\n return x;\r\n };\r\n\r\n\r\n /*\r\n * Return\r\n * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b),\r\n * -1 if the value of this BigNumber is less than the value of BigNumber(y, b),\r\n * 0 if they have the same value,\r\n * or null if the value of either is NaN.\r\n */\r\n P.comparedTo = function (y, b) {\r\n return compare(this, new BigNumber(y, b));\r\n };\r\n\r\n\r\n /*\r\n * If dp is undefined or null or true or false, return the number of decimal places of the\r\n * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN.\r\n *\r\n * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this\r\n * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or\r\n * ROUNDING_MODE if rm is omitted.\r\n *\r\n * [dp] {number} Decimal places: integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.decimalPlaces = P.dp = function (dp, rm) {\r\n var c, n, v,\r\n x = this;\r\n\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n return round(new BigNumber(x), dp + x.e + 1, rm);\r\n }\r\n\r\n if (!(c = x.c)) return null;\r\n n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE;\r\n\r\n // Subtract the number of trailing zeros of the last number.\r\n if (v = c[v]) for (; v % 10 == 0; v /= 10, n--);\r\n if (n < 0) n = 0;\r\n\r\n return n;\r\n };\r\n\r\n\r\n /*\r\n * n / 0 = I\r\n * n / N = N\r\n * n / I = 0\r\n * 0 / n = 0\r\n * 0 / 0 = N\r\n * 0 / N = N\r\n * 0 / I = 0\r\n * N / n = N\r\n * N / 0 = N\r\n * N / N = N\r\n * N / I = N\r\n * I / n = I\r\n * I / 0 = I\r\n * I / N = N\r\n * I / I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber divided by the value of\r\n * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE.\r\n */\r\n P.dividedBy = P.div = function (y, b) {\r\n return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the integer part of dividing the value of this\r\n * BigNumber by the value of BigNumber(y, b).\r\n */\r\n P.dividedToIntegerBy = P.idiv = function (y, b) {\r\n return div(this, new BigNumber(y, b), 0, 1);\r\n };\r\n\r\n\r\n /*\r\n * Return a BigNumber whose value is the value of this BigNumber exponentiated by n.\r\n *\r\n * If m is present, return the result modulo m.\r\n * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE.\r\n * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE.\r\n *\r\n * The modular power operation works efficiently when x, n, and m are integers, otherwise it\r\n * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0.\r\n *\r\n * n {number|string|BigNumber} The exponent. An integer.\r\n * [m] {number|string|BigNumber} The modulus.\r\n *\r\n * '[BigNumber Error] Exponent not an integer: {n}'\r\n */\r\n P.exponentiatedBy = P.pow = function (n, m) {\r\n var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y,\r\n x = this;\r\n\r\n n = new BigNumber(n);\r\n\r\n // Allow NaN and ±Infinity, but not other non-integers.\r\n if (n.c && !n.isInteger()) {\r\n throw Error\r\n (bignumberError + 'Exponent not an integer: ' + valueOf(n));\r\n }\r\n\r\n if (m != null) m = new BigNumber(m);\r\n\r\n // Exponent of MAX_SAFE_INTEGER is 15.\r\n nIsBig = n.e > 14;\r\n\r\n // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0.\r\n if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) {\r\n\r\n // The sign of the result of pow when x is negative depends on the evenness of n.\r\n // If +n overflows to ±Infinity, the evenness of n would be not be known.\r\n y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n)));\r\n return m ? y.mod(m) : y;\r\n }\r\n\r\n nIsNeg = n.s < 0;\r\n\r\n if (m) {\r\n\r\n // x % m returns NaN if abs(m) is zero, or m is NaN.\r\n if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN);\r\n\r\n isModExp = !nIsNeg && x.isInteger() && m.isInteger();\r\n\r\n if (isModExp) x = x.mod(m);\r\n\r\n // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15.\r\n // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15.\r\n } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0\r\n // [1, 240000000]\r\n ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7\r\n // [80000000000000] [99999750000000]\r\n : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) {\r\n\r\n // If x is negative and n is odd, k = -0, else k = 0.\r\n k = x.s < 0 && isOdd(n) ? -0 : 0;\r\n\r\n // If x >= 1, k = ±Infinity.\r\n if (x.e > -1) k = 1 / k;\r\n\r\n // If n is negative return ±0, else return ±Infinity.\r\n return new BigNumber(nIsNeg ? 1 / k : k);\r\n\r\n } else if (POW_PRECISION) {\r\n\r\n // Truncating each coefficient array to a length of k after each multiplication\r\n // equates to truncating significant digits to POW_PRECISION + [28, 41],\r\n // i.e. there will be a minimum of 28 guard digits retained.\r\n k = mathceil(POW_PRECISION / LOG_BASE + 2);\r\n }\r\n\r\n if (nIsBig) {\r\n half = new BigNumber(0.5);\r\n if (nIsNeg) n.s = 1;\r\n nIsOdd = isOdd(n);\r\n } else {\r\n i = Math.abs(+valueOf(n));\r\n nIsOdd = i % 2;\r\n }\r\n\r\n y = new BigNumber(ONE);\r\n\r\n // Performs 54 loop iterations for n of 9007199254740991.\r\n for (; ;) {\r\n\r\n if (nIsOdd) {\r\n y = y.times(x);\r\n if (!y.c) break;\r\n\r\n if (k) {\r\n if (y.c.length > k) y.c.length = k;\r\n } else if (isModExp) {\r\n y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m));\r\n }\r\n }\r\n\r\n if (i) {\r\n i = mathfloor(i / 2);\r\n if (i === 0) break;\r\n nIsOdd = i % 2;\r\n } else {\r\n n = n.times(half);\r\n round(n, n.e + 1, 1);\r\n\r\n if (n.e > 14) {\r\n nIsOdd = isOdd(n);\r\n } else {\r\n i = +valueOf(n);\r\n if (i === 0) break;\r\n nIsOdd = i % 2;\r\n }\r\n }\r\n\r\n x = x.times(x);\r\n\r\n if (k) {\r\n if (x.c && x.c.length > k) x.c.length = k;\r\n } else if (isModExp) {\r\n x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m));\r\n }\r\n }\r\n\r\n if (isModExp) return y;\r\n if (nIsNeg) y = ONE.div(y);\r\n\r\n return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y;\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer\r\n * using rounding mode rm, or ROUNDING_MODE if rm is omitted.\r\n *\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}'\r\n */\r\n P.integerValue = function (rm) {\r\n var n = new BigNumber(this);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n return round(n, n.e + 1, rm);\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isEqualTo = P.eq = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) === 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is a finite number, otherwise return false.\r\n */\r\n P.isFinite = function () {\r\n return !!this.c;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isGreaterThan = P.gt = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) > 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is greater than or equal to the value of\r\n * BigNumber(y, b), otherwise return false.\r\n */\r\n P.isGreaterThanOrEqualTo = P.gte = function (y, b) {\r\n return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0;\r\n\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is an integer, otherwise return false.\r\n */\r\n P.isInteger = function () {\r\n return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is less than the value of BigNumber(y, b),\r\n * otherwise return false.\r\n */\r\n P.isLessThan = P.lt = function (y, b) {\r\n return compare(this, new BigNumber(y, b)) < 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is less than or equal to the value of\r\n * BigNumber(y, b), otherwise return false.\r\n */\r\n P.isLessThanOrEqualTo = P.lte = function (y, b) {\r\n return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is NaN, otherwise return false.\r\n */\r\n P.isNaN = function () {\r\n return !this.s;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is negative, otherwise return false.\r\n */\r\n P.isNegative = function () {\r\n return this.s < 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is positive, otherwise return false.\r\n */\r\n P.isPositive = function () {\r\n return this.s > 0;\r\n };\r\n\r\n\r\n /*\r\n * Return true if the value of this BigNumber is 0 or -0, otherwise return false.\r\n */\r\n P.isZero = function () {\r\n return !!this.c && this.c[0] == 0;\r\n };\r\n\r\n\r\n /*\r\n * n - 0 = n\r\n * n - N = N\r\n * n - I = -I\r\n * 0 - n = -n\r\n * 0 - 0 = 0\r\n * 0 - N = N\r\n * 0 - I = -I\r\n * N - n = N\r\n * N - 0 = N\r\n * N - N = N\r\n * N - I = N\r\n * I - n = I\r\n * I - 0 = I\r\n * I - N = N\r\n * I - I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber minus the value of\r\n * BigNumber(y, b).\r\n */\r\n P.minus = function (y, b) {\r\n var i, j, t, xLTy,\r\n x = this,\r\n a = x.s;\r\n\r\n y = new BigNumber(y, b);\r\n b = y.s;\r\n\r\n // Either NaN?\r\n if (!a || !b) return new BigNumber(NaN);\r\n\r\n // Signs differ?\r\n if (a != b) {\r\n y.s = -b;\r\n return x.plus(y);\r\n }\r\n\r\n var xe = x.e / LOG_BASE,\r\n ye = y.e / LOG_BASE,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n if (!xe || !ye) {\r\n\r\n // Either Infinity?\r\n if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN);\r\n\r\n // Either zero?\r\n if (!xc[0] || !yc[0]) {\r\n\r\n // Return y if y is non-zero, x if x is non-zero, or zero if both are zero.\r\n return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x :\r\n\r\n // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity\r\n ROUNDING_MODE == 3 ? -0 : 0);\r\n }\r\n }\r\n\r\n xe = bitFloor(xe);\r\n ye = bitFloor(ye);\r\n xc = xc.slice();\r\n\r\n // Determine which is the bigger number.\r\n if (a = xe - ye) {\r\n\r\n if (xLTy = a < 0) {\r\n a = -a;\r\n t = xc;\r\n } else {\r\n ye = xe;\r\n t = yc;\r\n }\r\n\r\n t.reverse();\r\n\r\n // Prepend zeros to equalise exponents.\r\n for (b = a; b--; t.push(0));\r\n t.reverse();\r\n } else {\r\n\r\n // Exponents equal. Check digit by digit.\r\n j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b;\r\n\r\n for (a = b = 0; b < j; b++) {\r\n\r\n if (xc[b] != yc[b]) {\r\n xLTy = xc[b] < yc[b];\r\n break;\r\n }\r\n }\r\n }\r\n\r\n // x < y? Point xc to the array of the bigger number.\r\n if (xLTy) {\r\n t = xc;\r\n xc = yc;\r\n yc = t;\r\n y.s = -y.s;\r\n }\r\n\r\n b = (j = yc.length) - (i = xc.length);\r\n\r\n // Append zeros to xc if shorter.\r\n // No need to add zeros to yc if shorter as subtract only needs to start at yc.length.\r\n if (b > 0) for (; b--; xc[i++] = 0);\r\n b = BASE - 1;\r\n\r\n // Subtract yc from xc.\r\n for (; j > a;) {\r\n\r\n if (xc[--j] < yc[j]) {\r\n for (i = j; i && !xc[--i]; xc[i] = b);\r\n --xc[i];\r\n xc[j] += BASE;\r\n }\r\n\r\n xc[j] -= yc[j];\r\n }\r\n\r\n // Remove leading zeros and adjust exponent accordingly.\r\n for (; xc[0] == 0; xc.splice(0, 1), --ye);\r\n\r\n // Zero?\r\n if (!xc[0]) {\r\n\r\n // Following IEEE 754 (2008) 6.3,\r\n // n - n = +0 but n - n = -0 when rounding towards -Infinity.\r\n y.s = ROUNDING_MODE == 3 ? -1 : 1;\r\n y.c = [y.e = 0];\r\n return y;\r\n }\r\n\r\n // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity\r\n // for finite x and y.\r\n return normalise(y, xc, ye);\r\n };\r\n\r\n\r\n /*\r\n * n % 0 = N\r\n * n % N = N\r\n * n % I = n\r\n * 0 % n = 0\r\n * -0 % n = -0\r\n * 0 % 0 = N\r\n * 0 % N = N\r\n * 0 % I = 0\r\n * N % n = N\r\n * N % 0 = N\r\n * N % N = N\r\n * N % I = N\r\n * I % n = N\r\n * I % 0 = N\r\n * I % N = N\r\n * I % I = N\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber modulo the value of\r\n * BigNumber(y, b). The result depends on the value of MODULO_MODE.\r\n */\r\n P.modulo = P.mod = function (y, b) {\r\n var q, s,\r\n x = this;\r\n\r\n y = new BigNumber(y, b);\r\n\r\n // Return NaN if x is Infinity or NaN, or y is NaN or zero.\r\n if (!x.c || !y.s || y.c && !y.c[0]) {\r\n return new BigNumber(NaN);\r\n\r\n // Return x if y is Infinity or x is zero.\r\n } else if (!y.c || x.c && !x.c[0]) {\r\n return new BigNumber(x);\r\n }\r\n\r\n if (MODULO_MODE == 9) {\r\n\r\n // Euclidian division: q = sign(y) * floor(x / abs(y))\r\n // r = x - qy where 0 <= r < abs(y)\r\n s = y.s;\r\n y.s = 1;\r\n q = div(x, y, 0, 3);\r\n y.s = s;\r\n q.s *= s;\r\n } else {\r\n q = div(x, y, 0, MODULO_MODE);\r\n }\r\n\r\n y = x.minus(q.times(y));\r\n\r\n // To match JavaScript %, ensure sign of zero is sign of dividend.\r\n if (!y.c[0] && MODULO_MODE == 1) y.s = x.s;\r\n\r\n return y;\r\n };\r\n\r\n\r\n /*\r\n * n * 0 = 0\r\n * n * N = N\r\n * n * I = I\r\n * 0 * n = 0\r\n * 0 * 0 = 0\r\n * 0 * N = N\r\n * 0 * I = N\r\n * N * n = N\r\n * N * 0 = N\r\n * N * N = N\r\n * N * I = N\r\n * I * n = I\r\n * I * 0 = N\r\n * I * N = N\r\n * I * I = I\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value\r\n * of BigNumber(y, b).\r\n */\r\n P.multipliedBy = P.times = function (y, b) {\r\n var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc,\r\n base, sqrtBase,\r\n x = this,\r\n xc = x.c,\r\n yc = (y = new BigNumber(y, b)).c;\r\n\r\n // Either NaN, ±Infinity or ±0?\r\n if (!xc || !yc || !xc[0] || !yc[0]) {\r\n\r\n // Return NaN if either is NaN, or one is 0 and the other is Infinity.\r\n if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) {\r\n y.c = y.e = y.s = null;\r\n } else {\r\n y.s *= x.s;\r\n\r\n // Return ±Infinity if either is ±Infinity.\r\n if (!xc || !yc) {\r\n y.c = y.e = null;\r\n\r\n // Return ±0 if either is ±0.\r\n } else {\r\n y.c = [0];\r\n y.e = 0;\r\n }\r\n }\r\n\r\n return y;\r\n }\r\n\r\n e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE);\r\n y.s *= x.s;\r\n xcL = xc.length;\r\n ycL = yc.length;\r\n\r\n // Ensure xc points to longer array and xcL to its length.\r\n if (xcL < ycL) {\r\n zc = xc;\r\n xc = yc;\r\n yc = zc;\r\n i = xcL;\r\n xcL = ycL;\r\n ycL = i;\r\n }\r\n\r\n // Initialise the result array with zeros.\r\n for (i = xcL + ycL, zc = []; i--; zc.push(0));\r\n\r\n base = BASE;\r\n sqrtBase = SQRT_BASE;\r\n\r\n for (i = ycL; --i >= 0;) {\r\n c = 0;\r\n ylo = yc[i] % sqrtBase;\r\n yhi = yc[i] / sqrtBase | 0;\r\n\r\n for (k = xcL, j = i + k; j > i;) {\r\n xlo = xc[--k] % sqrtBase;\r\n xhi = xc[k] / sqrtBase | 0;\r\n m = yhi * xlo + xhi * ylo;\r\n xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c;\r\n c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi;\r\n zc[j--] = xlo % base;\r\n }\r\n\r\n zc[j] = c;\r\n }\r\n\r\n if (c) {\r\n ++e;\r\n } else {\r\n zc.splice(0, 1);\r\n }\r\n\r\n return normalise(y, zc, e);\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber negated,\r\n * i.e. multiplied by -1.\r\n */\r\n P.negated = function () {\r\n var x = new BigNumber(this);\r\n x.s = -x.s || null;\r\n return x;\r\n };\r\n\r\n\r\n /*\r\n * n + 0 = n\r\n * n + N = N\r\n * n + I = I\r\n * 0 + n = n\r\n * 0 + 0 = 0\r\n * 0 + N = N\r\n * 0 + I = I\r\n * N + n = N\r\n * N + 0 = N\r\n * N + N = N\r\n * N + I = N\r\n * I + n = I\r\n * I + 0 = I\r\n * I + N = N\r\n * I + I = I\r\n *\r\n * Return a new BigNumber whose value is the value of this BigNumber plus the value of\r\n * BigNumber(y, b).\r\n */\r\n P.plus = function (y, b) {\r\n var t,\r\n x = this,\r\n a = x.s;\r\n\r\n y = new BigNumber(y, b);\r\n b = y.s;\r\n\r\n // Either NaN?\r\n if (!a || !b) return new BigNumber(NaN);\r\n\r\n // Signs differ?\r\n if (a != b) {\r\n y.s = -b;\r\n return x.minus(y);\r\n }\r\n\r\n var xe = x.e / LOG_BASE,\r\n ye = y.e / LOG_BASE,\r\n xc = x.c,\r\n yc = y.c;\r\n\r\n if (!xe || !ye) {\r\n\r\n // Return ±Infinity if either ±Infinity.\r\n if (!xc || !yc) return new BigNumber(a / 0);\r\n\r\n // Either zero?\r\n // Return y if y is non-zero, x if x is non-zero, or zero if both are zero.\r\n if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0);\r\n }\r\n\r\n xe = bitFloor(xe);\r\n ye = bitFloor(ye);\r\n xc = xc.slice();\r\n\r\n // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts.\r\n if (a = xe - ye) {\r\n if (a > 0) {\r\n ye = xe;\r\n t = yc;\r\n } else {\r\n a = -a;\r\n t = xc;\r\n }\r\n\r\n t.reverse();\r\n for (; a--; t.push(0));\r\n t.reverse();\r\n }\r\n\r\n a = xc.length;\r\n b = yc.length;\r\n\r\n // Point xc to the longer array, and b to the shorter length.\r\n if (a - b < 0) {\r\n t = yc;\r\n yc = xc;\r\n xc = t;\r\n b = a;\r\n }\r\n\r\n // Only start adding at yc.length - 1 as the further digits of xc can be ignored.\r\n for (a = 0; b;) {\r\n a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0;\r\n xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE;\r\n }\r\n\r\n if (a) {\r\n xc = [a].concat(xc);\r\n ++ye;\r\n }\r\n\r\n // No need to check for zero, as +x + +y != 0 && -x + -y != 0\r\n // ye = MAX_EXP + 1 possible\r\n return normalise(y, xc, ye);\r\n };\r\n\r\n\r\n /*\r\n * If sd is undefined or null or true or false, return the number of significant digits of\r\n * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN.\r\n * If sd is true include integer-part trailing zeros in the count.\r\n *\r\n * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this\r\n * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or\r\n * ROUNDING_MODE if rm is omitted.\r\n *\r\n * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive.\r\n * boolean: whether to count integer-part trailing zeros: true or false.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}'\r\n */\r\n P.precision = P.sd = function (sd, rm) {\r\n var c, n, v,\r\n x = this;\r\n\r\n if (sd != null && sd !== !!sd) {\r\n intCheck(sd, 1, MAX);\r\n if (rm == null) rm = ROUNDING_MODE;\r\n else intCheck(rm, 0, 8);\r\n\r\n return round(new BigNumber(x), sd, rm);\r\n }\r\n\r\n if (!(c = x.c)) return null;\r\n v = c.length - 1;\r\n n = v * LOG_BASE + 1;\r\n\r\n if (v = c[v]) {\r\n\r\n // Subtract the number of trailing zeros of the last element.\r\n for (; v % 10 == 0; v /= 10, n--);\r\n\r\n // Add the number of digits of the first element.\r\n for (v = c[0]; v >= 10; v /= 10, n++);\r\n }\r\n\r\n if (sd && x.e + 1 > n) n = x.e + 1;\r\n\r\n return n;\r\n };\r\n\r\n\r\n /*\r\n * Return a new BigNumber whose value is the value of this BigNumber shifted by k places\r\n * (powers of 10). Shift to the right if n > 0, and to the left if n < 0.\r\n *\r\n * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}'\r\n */\r\n P.shiftedBy = function (k) {\r\n intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER);\r\n return this.times('1e' + k);\r\n };\r\n\r\n\r\n /*\r\n * sqrt(-n) = N\r\n * sqrt(N) = N\r\n * sqrt(-I) = N\r\n * sqrt(I) = I\r\n * sqrt(0) = 0\r\n * sqrt(-0) = -0\r\n *\r\n * Return a new BigNumber whose value is the square root of the value of this BigNumber,\r\n * rounded according to DECIMAL_PLACES and ROUNDING_MODE.\r\n */\r\n P.squareRoot = P.sqrt = function () {\r\n var m, n, r, rep, t,\r\n x = this,\r\n c = x.c,\r\n s = x.s,\r\n e = x.e,\r\n dp = DECIMAL_PLACES + 4,\r\n half = new BigNumber('0.5');\r\n\r\n // Negative/NaN/Infinity/zero?\r\n if (s !== 1 || !c || !c[0]) {\r\n return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0);\r\n }\r\n\r\n // Initial estimate.\r\n s = Math.sqrt(+valueOf(x));\r\n\r\n // Math.sqrt underflow/overflow?\r\n // Pass x to Math.sqrt as integer, then adjust the exponent of the result.\r\n if (s == 0 || s == 1 / 0) {\r\n n = coeffToString(c);\r\n if ((n.length + e) % 2 == 0) n += '0';\r\n s = Math.sqrt(+n);\r\n e = bitFloor((e + 1) / 2) - (e < 0 || e % 2);\r\n\r\n if (s == 1 / 0) {\r\n n = '5e' + e;\r\n } else {\r\n n = s.toExponential();\r\n n = n.slice(0, n.indexOf('e') + 1) + e;\r\n }\r\n\r\n r = new BigNumber(n);\r\n } else {\r\n r = new BigNumber(s + '');\r\n }\r\n\r\n // Check for zero.\r\n // r could be zero if MIN_EXP is changed after the this value was created.\r\n // This would cause a division by zero (x/t) and hence Infinity below, which would cause\r\n // coeffToString to throw.\r\n if (r.c[0]) {\r\n e = r.e;\r\n s = e + dp;\r\n if (s < 3) s = 0;\r\n\r\n // Newton-Raphson iteration.\r\n for (; ;) {\r\n t = r;\r\n r = half.times(t.plus(div(x, t, dp, 1)));\r\n\r\n if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) {\r\n\r\n // The exponent of r may here be one less than the final result exponent,\r\n // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits\r\n // are indexed correctly.\r\n if (r.e < e) --s;\r\n n = n.slice(s - 3, s + 1);\r\n\r\n // The 4th rounding digit may be in error by -1 so if the 4 rounding digits\r\n // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the\r\n // iteration.\r\n if (n == '9999' || !rep && n == '4999') {\r\n\r\n // On the first iteration only, check to see if rounding up gives the\r\n // exact result as the nines may infinitely repeat.\r\n if (!rep) {\r\n round(t, t.e + DECIMAL_PLACES + 2, 0);\r\n\r\n if (t.times(t).eq(x)) {\r\n r = t;\r\n break;\r\n }\r\n }\r\n\r\n dp += 4;\r\n s += 4;\r\n rep = 1;\r\n } else {\r\n\r\n // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact\r\n // result. If not, then there are further digits and m will be truthy.\r\n if (!+n || !+n.slice(1) && n.charAt(0) == '5') {\r\n\r\n // Truncate to the first rounding digit.\r\n round(r, r.e + DECIMAL_PLACES + 2, 1);\r\n m = !r.times(r).eq(x);\r\n }\r\n\r\n break;\r\n }\r\n }\r\n }\r\n }\r\n\r\n return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in exponential notation and\r\n * rounded using ROUNDING_MODE to dp fixed decimal places.\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.toExponential = function (dp, rm) {\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n dp++;\r\n }\r\n return format(this, dp, rm, 1);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in fixed-point notation rounding\r\n * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted.\r\n *\r\n * Note: as with JavaScript's number type, (-0).toFixed(0) is '0',\r\n * but e.g. (-0.00001).toFixed(0) is '-0'.\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n */\r\n P.toFixed = function (dp, rm) {\r\n if (dp != null) {\r\n intCheck(dp, 0, MAX);\r\n dp = dp + this.e + 1;\r\n }\r\n return format(this, dp, rm);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in fixed-point notation rounded\r\n * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties\r\n * of the format or FORMAT object (see BigNumber.set).\r\n *\r\n * The formatting object may contain some or all of the properties shown below.\r\n *\r\n * FORMAT = {\r\n * prefix: '',\r\n * groupSize: 3,\r\n * secondaryGroupSize: 0,\r\n * groupSeparator: ',',\r\n * decimalSeparator: '.',\r\n * fractionGroupSize: 0,\r\n * fractionGroupSeparator: '\\xA0', // non-breaking space\r\n * suffix: ''\r\n * };\r\n *\r\n * [dp] {number} Decimal places. Integer, 0 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n * [format] {object} Formatting options. See FORMAT pbject above.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}'\r\n * '[BigNumber Error] Argument not an object: {format}'\r\n */\r\n P.toFormat = function (dp, rm, format) {\r\n var str,\r\n x = this;\r\n\r\n if (format == null) {\r\n if (dp != null && rm && typeof rm == 'object') {\r\n format = rm;\r\n rm = null;\r\n } else if (dp && typeof dp == 'object') {\r\n format = dp;\r\n dp = rm = null;\r\n } else {\r\n format = FORMAT;\r\n }\r\n } else if (typeof format != 'object') {\r\n throw Error\r\n (bignumberError + 'Argument not an object: ' + format);\r\n }\r\n\r\n str = x.toFixed(dp, rm);\r\n\r\n if (x.c) {\r\n var i,\r\n arr = str.split('.'),\r\n g1 = +format.groupSize,\r\n g2 = +format.secondaryGroupSize,\r\n groupSeparator = format.groupSeparator || '',\r\n intPart = arr[0],\r\n fractionPart = arr[1],\r\n isNeg = x.s < 0,\r\n intDigits = isNeg ? intPart.slice(1) : intPart,\r\n len = intDigits.length;\r\n\r\n if (g2) {\r\n i = g1;\r\n g1 = g2;\r\n g2 = i;\r\n len -= i;\r\n }\r\n\r\n if (g1 > 0 && len > 0) {\r\n i = len % g1 || g1;\r\n intPart = intDigits.substr(0, i);\r\n for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1);\r\n if (g2 > 0) intPart += groupSeparator + intDigits.slice(i);\r\n if (isNeg) intPart = '-' + intPart;\r\n }\r\n\r\n str = fractionPart\r\n ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize)\r\n ? fractionPart.replace(new RegExp('\\\\d{' + g2 + '}\\\\B', 'g'),\r\n '$&' + (format.fractionGroupSeparator || ''))\r\n : fractionPart)\r\n : intPart;\r\n }\r\n\r\n return (format.prefix || '') + str + (format.suffix || '');\r\n };\r\n\r\n\r\n /*\r\n * Return an array of two BigNumbers representing the value of this BigNumber as a simple\r\n * fraction with an integer numerator and an integer denominator.\r\n * The denominator will be a positive non-zero value less than or equal to the specified\r\n * maximum denominator. If a maximum denominator is not specified, the denominator will be\r\n * the lowest value necessary to represent the number exactly.\r\n *\r\n * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator.\r\n *\r\n * '[BigNumber Error] Argument {not an integer|out of range} : {md}'\r\n */\r\n P.toFraction = function (md) {\r\n var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s,\r\n x = this,\r\n xc = x.c;\r\n\r\n if (md != null) {\r\n n = new BigNumber(md);\r\n\r\n // Throw if md is less than one or is not an integer, unless it is Infinity.\r\n if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) {\r\n throw Error\r\n (bignumberError + 'Argument ' +\r\n (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n));\r\n }\r\n }\r\n\r\n if (!xc) return new BigNumber(x);\r\n\r\n d = new BigNumber(ONE);\r\n n1 = d0 = new BigNumber(ONE);\r\n d1 = n0 = new BigNumber(ONE);\r\n s = coeffToString(xc);\r\n\r\n // Determine initial denominator.\r\n // d is a power of 10 and the minimum max denominator that specifies the value exactly.\r\n e = d.e = s.length - x.e - 1;\r\n d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp];\r\n md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n;\r\n\r\n exp = MAX_EXP;\r\n MAX_EXP = 1 / 0;\r\n n = new BigNumber(s);\r\n\r\n // n0 = d1 = 0\r\n n0.c[0] = 0;\r\n\r\n for (; ;) {\r\n q = div(n, d, 0, 1);\r\n d2 = d0.plus(q.times(d1));\r\n if (d2.comparedTo(md) == 1) break;\r\n d0 = d1;\r\n d1 = d2;\r\n n1 = n0.plus(q.times(d2 = n1));\r\n n0 = d2;\r\n d = n.minus(q.times(d2 = d));\r\n n = d2;\r\n }\r\n\r\n d2 = div(md.minus(d0), d1, 0, 1);\r\n n0 = n0.plus(d2.times(n1));\r\n d0 = d0.plus(d2.times(d1));\r\n n0.s = n1.s = x.s;\r\n e = e * 2;\r\n\r\n // Determine which fraction is closer to x, n0/d0 or n1/d1\r\n r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo(\r\n div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0];\r\n\r\n MAX_EXP = exp;\r\n\r\n return r;\r\n };\r\n\r\n\r\n /*\r\n * Return the value of this BigNumber converted to a number primitive.\r\n */\r\n P.toNumber = function () {\r\n return +valueOf(this);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber rounded to sd significant digits\r\n * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits\r\n * necessary to represent the integer part of the value in fixed-point notation, then use\r\n * exponential notation.\r\n *\r\n * [sd] {number} Significant digits. Integer, 1 to MAX inclusive.\r\n * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive.\r\n *\r\n * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}'\r\n */\r\n P.toPrecision = function (sd, rm) {\r\n if (sd != null) intCheck(sd, 1, MAX);\r\n return format(this, sd, rm, 2);\r\n };\r\n\r\n\r\n /*\r\n * Return a string representing the value of this BigNumber in base b, or base 10 if b is\r\n * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and\r\n * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent\r\n * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than\r\n * TO_EXP_NEG, return exponential notation.\r\n *\r\n * [b] {number} Integer, 2 to ALPHABET.length inclusive.\r\n *\r\n * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}'\r\n */\r\n P.toString = function (b) {\r\n var str,\r\n n = this,\r\n s = n.s,\r\n e = n.e;\r\n\r\n // Infinity or NaN?\r\n if (e === null) {\r\n if (s) {\r\n str = 'Infinity';\r\n if (s < 0) str = '-' + str;\r\n } else {\r\n str = 'NaN';\r\n }\r\n } else {\r\n if (b == null) {\r\n str = e <= TO_EXP_NEG || e >= TO_EXP_POS\r\n ? toExponential(coeffToString(n.c), e)\r\n : toFixedPoint(coeffToString(n.c), e, '0');\r\n } else if (b === 10 && alphabetHasNormalDecimalDigits) {\r\n n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE);\r\n str = toFixedPoint(coeffToString(n.c), n.e, '0');\r\n } else {\r\n intCheck(b, 2, ALPHABET.length, 'Base');\r\n str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true);\r\n }\r\n\r\n if (s < 0 && n.c[0]) str = '-' + str;\r\n }\r\n\r\n return str;\r\n };\r\n\r\n\r\n /*\r\n * Return as toString, but do not accept a base argument, and include the minus sign for\r\n * negative zero.\r\n */\r\n P.valueOf = P.toJSON = function () {\r\n return valueOf(this);\r\n };\r\n\r\n\r\n P._isBigNumber = true;\r\n\r\n if (configObject != null) BigNumber.set(configObject);\r\n\r\n return BigNumber;\r\n }\r\n\r\n\r\n // PRIVATE HELPER FUNCTIONS\r\n\r\n // These functions don't need access to variables,\r\n // e.g. DECIMAL_PLACES, in the scope of the `clone` function above.\r\n\r\n\r\n function bitFloor(n) {\r\n var i = n | 0;\r\n return n > 0 || n === i ? i : i - 1;\r\n }\r\n\r\n\r\n // Return a coefficient array as a string of base 10 digits.\r\n function coeffToString(a) {\r\n var s, z,\r\n i = 1,\r\n j = a.length,\r\n r = a[0] + '';\r\n\r\n for (; i < j;) {\r\n s = a[i++] + '';\r\n z = LOG_BASE - s.length;\r\n for (; z--; s = '0' + s);\r\n r += s;\r\n }\r\n\r\n // Determine trailing zeros.\r\n for (j = r.length; r.charCodeAt(--j) === 48;);\r\n\r\n return r.slice(0, j + 1 || 1);\r\n }\r\n\r\n\r\n // Compare the value of BigNumbers x and y.\r\n function compare(x, y) {\r\n var a, b,\r\n xc = x.c,\r\n yc = y.c,\r\n i = x.s,\r\n j = y.s,\r\n k = x.e,\r\n l = y.e;\r\n\r\n // Either NaN?\r\n if (!i || !j) return null;\r\n\r\n a = xc && !xc[0];\r\n b = yc && !yc[0];\r\n\r\n // Either zero?\r\n if (a || b) return a ? b ? 0 : -j : i;\r\n\r\n // Signs differ?\r\n if (i != j) return i;\r\n\r\n a = i < 0;\r\n b = k == l;\r\n\r\n // Either Infinity?\r\n if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1;\r\n\r\n // Compare exponents.\r\n if (!b) return k > l ^ a ? 1 : -1;\r\n\r\n j = (k = xc.length) < (l = yc.length) ? k : l;\r\n\r\n // Compare digit by digit.\r\n for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1;\r\n\r\n // Compare lengths.\r\n return k == l ? 0 : k > l ^ a ? 1 : -1;\r\n }\r\n\r\n\r\n /*\r\n * Check that n is a primitive number, an integer, and in range, otherwise throw.\r\n */\r\n function intCheck(n, min, max, name) {\r\n if (n < min || n > max || n !== mathfloor(n)) {\r\n throw Error\r\n (bignumberError + (name || 'Argument') + (typeof n == 'number'\r\n ? n < min || n > max ? ' out of range: ' : ' not an integer: '\r\n : ' not a primitive number: ') + String(n));\r\n }\r\n }\r\n\r\n\r\n // Assumes finite n.\r\n function isOdd(n) {\r\n var k = n.c.length - 1;\r\n return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0;\r\n }\r\n\r\n\r\n function toExponential(str, e) {\r\n return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) +\r\n (e < 0 ? 'e' : 'e+') + e;\r\n }\r\n\r\n\r\n function toFixedPoint(str, e, z) {\r\n var len, zs;\r\n\r\n // Negative exponent?\r\n if (e < 0) {\r\n\r\n // Prepend zeros.\r\n for (zs = z + '.'; ++e; zs += z);\r\n str = zs + str;\r\n\r\n // Positive exponent\r\n } else {\r\n len = str.length;\r\n\r\n // Append zeros.\r\n if (++e > len) {\r\n for (zs = z, e -= len; --e; zs += z);\r\n str += zs;\r\n } else if (e < len) {\r\n str = str.slice(0, e) + '.' + str.slice(e);\r\n }\r\n }\r\n\r\n return str;\r\n }\r\n\r\n\r\n // EXPORT\r\n\r\n\r\n BigNumber = clone();\r\n BigNumber['default'] = BigNumber.BigNumber = BigNumber;\r\n\r\n // AMD.\r\n if (typeof define == 'function' && define.amd) {\r\n define(function () { return BigNumber; });\r\n\r\n // Node.js and other environments that support module.exports.\r\n } else if (typeof module != 'undefined' && module.exports) {\r\n module.exports = BigNumber;\r\n\r\n // Browser.\r\n } else {\r\n if (!globalObject) {\r\n globalObject = typeof self != 'undefined' && self ? self : window;\r\n }\r\n\r\n globalObject.BigNumber = BigNumber;\r\n }\r\n})(this);\r\n","/*jshint node:true */\n'use strict';\nvar Buffer = require('buffer').Buffer; // browserify\nvar SlowBuffer = require('buffer').SlowBuffer;\n\nmodule.exports = bufferEq;\n\nfunction bufferEq(a, b) {\n\n // shortcutting on type is necessary for correctness\n if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {\n return false;\n }\n\n // buffer sizes should be well-known information, so despite this\n // shortcutting, it doesn't leak any information about the *contents* of the\n // buffers.\n if (a.length !== b.length) {\n return false;\n }\n\n var c = 0;\n for (var i = 0; i < a.length; i++) {\n /*jshint bitwise:false */\n c |= a[i] ^ b[i]; // XOR\n }\n return c === 0;\n}\n\nbufferEq.install = function() {\n Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) {\n return bufferEq(this, that);\n };\n};\n\nvar origBufEqual = Buffer.prototype.equal;\nvar origSlowBufEqual = SlowBuffer.prototype.equal;\nbufferEq.restore = function() {\n Buffer.prototype.equal = origBufEqual;\n SlowBuffer.prototype.equal = origSlowBufEqual;\n};\n","/*!\n * compressible\n * Copyright(c) 2013 Jonathan Ong\n * Copyright(c) 2014 Jeremiah Senkpiel\n * Copyright(c) 2015 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n'use strict'\n\n/**\n * Module dependencies.\n * @private\n */\n\nvar db = require('mime-db')\n\n/**\n * Module variables.\n * @private\n */\n\nvar COMPRESSIBLE_TYPE_REGEXP = /^text\\/|\\+(?:json|text|xml)$/i\nvar EXTRACT_TYPE_REGEXP = /^\\s*([^;\\s]*)(?:;|\\s|$)/\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = compressible\n\n/**\n * Checks if a type is compressible.\n *\n * @param {string} type\n * @return {Boolean} compressible\n * @public\n */\n\nfunction compressible (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // strip parameters\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n var mime = match && match[1].toLowerCase()\n var data = db[mime]\n\n // return database information\n if (data && data.compressible !== undefined) {\n return data.compressible\n }\n\n // fallback to regexp or unknown\n return COMPRESSIBLE_TYPE_REGEXP.test(mime) || undefined\n}\n","/* eslint-env browser */\n\n/**\n * This is the web browser implementation of `debug()`.\n */\n\nexports.formatArgs = formatArgs;\nexports.save = save;\nexports.load = load;\nexports.useColors = useColors;\nexports.storage = localstorage();\nexports.destroy = (() => {\n\tlet warned = false;\n\n\treturn () => {\n\t\tif (!warned) {\n\t\t\twarned = true;\n\t\t\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\n\t\t}\n\t};\n})();\n\n/**\n * Colors.\n */\n\nexports.colors = [\n\t'#0000CC',\n\t'#0000FF',\n\t'#0033CC',\n\t'#0033FF',\n\t'#0066CC',\n\t'#0066FF',\n\t'#0099CC',\n\t'#0099FF',\n\t'#00CC00',\n\t'#00CC33',\n\t'#00CC66',\n\t'#00CC99',\n\t'#00CCCC',\n\t'#00CCFF',\n\t'#3300CC',\n\t'#3300FF',\n\t'#3333CC',\n\t'#3333FF',\n\t'#3366CC',\n\t'#3366FF',\n\t'#3399CC',\n\t'#3399FF',\n\t'#33CC00',\n\t'#33CC33',\n\t'#33CC66',\n\t'#33CC99',\n\t'#33CCCC',\n\t'#33CCFF',\n\t'#6600CC',\n\t'#6600FF',\n\t'#6633CC',\n\t'#6633FF',\n\t'#66CC00',\n\t'#66CC33',\n\t'#9900CC',\n\t'#9900FF',\n\t'#9933CC',\n\t'#9933FF',\n\t'#99CC00',\n\t'#99CC33',\n\t'#CC0000',\n\t'#CC0033',\n\t'#CC0066',\n\t'#CC0099',\n\t'#CC00CC',\n\t'#CC00FF',\n\t'#CC3300',\n\t'#CC3333',\n\t'#CC3366',\n\t'#CC3399',\n\t'#CC33CC',\n\t'#CC33FF',\n\t'#CC6600',\n\t'#CC6633',\n\t'#CC9900',\n\t'#CC9933',\n\t'#CCCC00',\n\t'#CCCC33',\n\t'#FF0000',\n\t'#FF0033',\n\t'#FF0066',\n\t'#FF0099',\n\t'#FF00CC',\n\t'#FF00FF',\n\t'#FF3300',\n\t'#FF3333',\n\t'#FF3366',\n\t'#FF3399',\n\t'#FF33CC',\n\t'#FF33FF',\n\t'#FF6600',\n\t'#FF6633',\n\t'#FF9900',\n\t'#FF9933',\n\t'#FFCC00',\n\t'#FFCC33'\n];\n\n/**\n * Currently only WebKit-based Web Inspectors, Firefox >= v31,\n * and the Firebug extension (any Firefox version) are known\n * to support \"%c\" CSS customizations.\n *\n * TODO: add a `localStorage` variable to explicitly enable/disable colors\n */\n\n// eslint-disable-next-line complexity\nfunction useColors() {\n\t// NB: In an Electron preload script, document will be defined but not fully\n\t// initialized. Since we know we're in Chrome, we'll just detect this case\n\t// explicitly\n\tif (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {\n\t\treturn true;\n\t}\n\n\t// Internet Explorer and Edge do not support colors.\n\tif (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\\/(\\d+)/)) {\n\t\treturn false;\n\t}\n\n\t// Is webkit? http://stackoverflow.com/a/16459606/376773\n\t// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632\n\treturn (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||\n\t\t// Is firebug? http://stackoverflow.com/a/398120/376773\n\t\t(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||\n\t\t// Is firefox >= v31?\n\t\t// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages\n\t\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\\/(\\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||\n\t\t// Double check webkit in userAgent just in case we are in a worker\n\t\t(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\\/(\\d+)/));\n}\n\n/**\n * Colorize log arguments if enabled.\n *\n * @api public\n */\n\nfunction formatArgs(args) {\n\targs[0] = (this.useColors ? '%c' : '') +\n\t\tthis.namespace +\n\t\t(this.useColors ? ' %c' : ' ') +\n\t\targs[0] +\n\t\t(this.useColors ? '%c ' : ' ') +\n\t\t'+' + module.exports.humanize(this.diff);\n\n\tif (!this.useColors) {\n\t\treturn;\n\t}\n\n\tconst c = 'color: ' + this.color;\n\targs.splice(1, 0, c, 'color: inherit');\n\n\t// The final \"%c\" is somewhat tricky, because there could be other\n\t// arguments passed either before or after the %c, so we need to\n\t// figure out the correct index to insert the CSS into\n\tlet index = 0;\n\tlet lastC = 0;\n\targs[0].replace(/%[a-zA-Z%]/g, match => {\n\t\tif (match === '%%') {\n\t\t\treturn;\n\t\t}\n\t\tindex++;\n\t\tif (match === '%c') {\n\t\t\t// We only are interested in the *last* %c\n\t\t\t// (the user may have provided their own)\n\t\t\tlastC = index;\n\t\t}\n\t});\n\n\targs.splice(lastC, 0, c);\n}\n\n/**\n * Invokes `console.debug()` when available.\n * No-op when `console.debug` is not a \"function\".\n * If `console.debug` is not available, falls back\n * to `console.log`.\n *\n * @api public\n */\nexports.log = console.debug || console.log || (() => {});\n\n/**\n * Save `namespaces`.\n *\n * @param {String} namespaces\n * @api private\n */\nfunction save(namespaces) {\n\ttry {\n\t\tif (namespaces) {\n\t\t\texports.storage.setItem('debug', namespaces);\n\t\t} else {\n\t\t\texports.storage.removeItem('debug');\n\t\t}\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n}\n\n/**\n * Load `namespaces`.\n *\n * @return {String} returns the previously persisted debug modes\n * @api private\n */\nfunction load() {\n\tlet r;\n\ttry {\n\t\tr = exports.storage.getItem('debug');\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n\n\t// If debug isn't set in LS, and we're in Electron, try to load $DEBUG\n\tif (!r && typeof process !== 'undefined' && 'env' in process) {\n\t\tr = process.env.DEBUG;\n\t}\n\n\treturn r;\n}\n\n/**\n * Localstorage attempts to return the localstorage.\n *\n * This is necessary because safari throws\n * when a user disables cookies/localstorage\n * and you attempt to access it.\n *\n * @return {LocalStorage}\n * @api private\n */\n\nfunction localstorage() {\n\ttry {\n\t\t// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context\n\t\t// The Browser also has localStorage in the global context.\n\t\treturn localStorage;\n\t} catch (error) {\n\t\t// Swallow\n\t\t// XXX (@Qix-) should we be logging these?\n\t}\n}\n\nmodule.exports = require('./common')(exports);\n\nconst {formatters} = module.exports;\n\n/**\n * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.\n */\n\nformatters.j = function (v) {\n\ttry {\n\t\treturn JSON.stringify(v);\n\t} catch (error) {\n\t\treturn '[UnexpectedJSONParseError]: ' + error.message;\n\t}\n};\n","\n/**\n * This is the common logic for both the Node.js and web browser\n * implementations of `debug()`.\n */\n\nfunction setup(env) {\n\tcreateDebug.debug = createDebug;\n\tcreateDebug.default = createDebug;\n\tcreateDebug.coerce = coerce;\n\tcreateDebug.disable = disable;\n\tcreateDebug.enable = enable;\n\tcreateDebug.enabled = enabled;\n\tcreateDebug.humanize = require('ms');\n\tcreateDebug.destroy = destroy;\n\n\tObject.keys(env).forEach(key => {\n\t\tcreateDebug[key] = env[key];\n\t});\n\n\t/**\n\t* The currently active debug mode names, and names to skip.\n\t*/\n\n\tcreateDebug.names = [];\n\tcreateDebug.skips = [];\n\n\t/**\n\t* Map of special \"%n\" handling functions, for the debug \"format\" argument.\n\t*\n\t* Valid key names are a single, lower or upper-case letter, i.e. \"n\" and \"N\".\n\t*/\n\tcreateDebug.formatters = {};\n\n\t/**\n\t* Selects a color for a debug namespace\n\t* @param {String} namespace The namespace string for the debug instance to be colored\n\t* @return {Number|String} An ANSI color code for the given namespace\n\t* @api private\n\t*/\n\tfunction selectColor(namespace) {\n\t\tlet hash = 0;\n\n\t\tfor (let i = 0; i < namespace.length; i++) {\n\t\t\thash = ((hash << 5) - hash) + namespace.charCodeAt(i);\n\t\t\thash |= 0; // Convert to 32bit integer\n\t\t}\n\n\t\treturn createDebug.colors[Math.abs(hash) % createDebug.colors.length];\n\t}\n\tcreateDebug.selectColor = selectColor;\n\n\t/**\n\t* Create a debugger with the given `namespace`.\n\t*\n\t* @param {String} namespace\n\t* @return {Function}\n\t* @api public\n\t*/\n\tfunction createDebug(namespace) {\n\t\tlet prevTime;\n\t\tlet enableOverride = null;\n\t\tlet namespacesCache;\n\t\tlet enabledCache;\n\n\t\tfunction debug(...args) {\n\t\t\t// Disabled?\n\t\t\tif (!debug.enabled) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tconst self = debug;\n\n\t\t\t// Set `diff` timestamp\n\t\t\tconst curr = Number(new Date());\n\t\t\tconst ms = curr - (prevTime || curr);\n\t\t\tself.diff = ms;\n\t\t\tself.prev = prevTime;\n\t\t\tself.curr = curr;\n\t\t\tprevTime = curr;\n\n\t\t\targs[0] = createDebug.coerce(args[0]);\n\n\t\t\tif (typeof args[0] !== 'string') {\n\t\t\t\t// Anything else let's inspect with %O\n\t\t\t\targs.unshift('%O');\n\t\t\t}\n\n\t\t\t// Apply any `formatters` transformations\n\t\t\tlet index = 0;\n\t\t\targs[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {\n\t\t\t\t// If we encounter an escaped % then don't increase the array index\n\t\t\t\tif (match === '%%') {\n\t\t\t\t\treturn '%';\n\t\t\t\t}\n\t\t\t\tindex++;\n\t\t\t\tconst formatter = createDebug.formatters[format];\n\t\t\t\tif (typeof formatter === 'function') {\n\t\t\t\t\tconst val = args[index];\n\t\t\t\t\tmatch = formatter.call(self, val);\n\n\t\t\t\t\t// Now we need to remove `args[index]` since it's inlined in the `format`\n\t\t\t\t\targs.splice(index, 1);\n\t\t\t\t\tindex--;\n\t\t\t\t}\n\t\t\t\treturn match;\n\t\t\t});\n\n\t\t\t// Apply env-specific formatting (colors, etc.)\n\t\t\tcreateDebug.formatArgs.call(self, args);\n\n\t\t\tconst logFn = self.log || createDebug.log;\n\t\t\tlogFn.apply(self, args);\n\t\t}\n\n\t\tdebug.namespace = namespace;\n\t\tdebug.useColors = createDebug.useColors();\n\t\tdebug.color = createDebug.selectColor(namespace);\n\t\tdebug.extend = extend;\n\t\tdebug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.\n\n\t\tObject.defineProperty(debug, 'enabled', {\n\t\t\tenumerable: true,\n\t\t\tconfigurable: false,\n\t\t\tget: () => {\n\t\t\t\tif (enableOverride !== null) {\n\t\t\t\t\treturn enableOverride;\n\t\t\t\t}\n\t\t\t\tif (namespacesCache !== createDebug.namespaces) {\n\t\t\t\t\tnamespacesCache = createDebug.namespaces;\n\t\t\t\t\tenabledCache = createDebug.enabled(namespace);\n\t\t\t\t}\n\n\t\t\t\treturn enabledCache;\n\t\t\t},\n\t\t\tset: v => {\n\t\t\t\tenableOverride = v;\n\t\t\t}\n\t\t});\n\n\t\t// Env-specific initialization logic for debug instances\n\t\tif (typeof createDebug.init === 'function') {\n\t\t\tcreateDebug.init(debug);\n\t\t}\n\n\t\treturn debug;\n\t}\n\n\tfunction extend(namespace, delimiter) {\n\t\tconst newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);\n\t\tnewDebug.log = this.log;\n\t\treturn newDebug;\n\t}\n\n\t/**\n\t* Enables a debug mode by namespaces. This can include modes\n\t* separated by a colon and wildcards.\n\t*\n\t* @param {String} namespaces\n\t* @api public\n\t*/\n\tfunction enable(namespaces) {\n\t\tcreateDebug.save(namespaces);\n\t\tcreateDebug.namespaces = namespaces;\n\n\t\tcreateDebug.names = [];\n\t\tcreateDebug.skips = [];\n\n\t\tlet i;\n\t\tconst split = (typeof namespaces === 'string' ? namespaces : '').split(/[\\s,]+/);\n\t\tconst len = split.length;\n\n\t\tfor (i = 0; i < len; i++) {\n\t\t\tif (!split[i]) {\n\t\t\t\t// ignore empty strings\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tnamespaces = split[i].replace(/\\*/g, '.*?');\n\n\t\t\tif (namespaces[0] === '-') {\n\t\t\t\tcreateDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$'));\n\t\t\t} else {\n\t\t\t\tcreateDebug.names.push(new RegExp('^' + namespaces + '$'));\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t* Disable debug output.\n\t*\n\t* @return {String} namespaces\n\t* @api public\n\t*/\n\tfunction disable() {\n\t\tconst namespaces = [\n\t\t\t...createDebug.names.map(toNamespace),\n\t\t\t...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)\n\t\t].join(',');\n\t\tcreateDebug.enable('');\n\t\treturn namespaces;\n\t}\n\n\t/**\n\t* Returns true if the given mode name is enabled, false otherwise.\n\t*\n\t* @param {String} name\n\t* @return {Boolean}\n\t* @api public\n\t*/\n\tfunction enabled(name) {\n\t\tif (name[name.length - 1] === '*') {\n\t\t\treturn true;\n\t\t}\n\n\t\tlet i;\n\t\tlet len;\n\n\t\tfor (i = 0, len = createDebug.skips.length; i < len; i++) {\n\t\t\tif (createDebug.skips[i].test(name)) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\n\t\tfor (i = 0, len = createDebug.names.length; i < len; i++) {\n\t\t\tif (createDebug.names[i].test(name)) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\n\t\treturn false;\n\t}\n\n\t/**\n\t* Convert regexp to namespace\n\t*\n\t* @param {RegExp} regxep\n\t* @return {String} namespace\n\t* @api private\n\t*/\n\tfunction toNamespace(regexp) {\n\t\treturn regexp.toString()\n\t\t\t.substring(2, regexp.toString().length - 2)\n\t\t\t.replace(/\\.\\*\\?$/, '*');\n\t}\n\n\t/**\n\t* Coerce `val`.\n\t*\n\t* @param {Mixed} val\n\t* @return {Mixed}\n\t* @api private\n\t*/\n\tfunction coerce(val) {\n\t\tif (val instanceof Error) {\n\t\t\treturn val.stack || val.message;\n\t\t}\n\t\treturn val;\n\t}\n\n\t/**\n\t* XXX DO NOT USE. This is a temporary stub function.\n\t* XXX It WILL be removed in the next major release.\n\t*/\n\tfunction destroy() {\n\t\tconsole.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');\n\t}\n\n\tcreateDebug.enable(createDebug.load());\n\n\treturn createDebug;\n}\n\nmodule.exports = setup;\n","/**\n * Detect Electron renderer / nwjs process, which is node, but we should\n * treat as a browser.\n */\n\nif (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {\n\tmodule.exports = require('./browser.js');\n} else {\n\tmodule.exports = require('./node.js');\n}\n","/**\n * Module dependencies.\n */\n\nconst tty = require('tty');\nconst util = require('util');\n\n/**\n * This is the Node.js implementation of `debug()`.\n */\n\nexports.init = init;\nexports.log = log;\nexports.formatArgs = formatArgs;\nexports.save = save;\nexports.load = load;\nexports.useColors = useColors;\nexports.destroy = util.deprecate(\n\t() => {},\n\t'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'\n);\n\n/**\n * Colors.\n */\n\nexports.colors = [6, 2, 3, 4, 5, 1];\n\ntry {\n\t// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)\n\t// eslint-disable-next-line import/no-extraneous-dependencies\n\tconst supportsColor = require('supports-color');\n\n\tif (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {\n\t\texports.colors = [\n\t\t\t20,\n\t\t\t21,\n\t\t\t26,\n\t\t\t27,\n\t\t\t32,\n\t\t\t33,\n\t\t\t38,\n\t\t\t39,\n\t\t\t40,\n\t\t\t41,\n\t\t\t42,\n\t\t\t43,\n\t\t\t44,\n\t\t\t45,\n\t\t\t56,\n\t\t\t57,\n\t\t\t62,\n\t\t\t63,\n\t\t\t68,\n\t\t\t69,\n\t\t\t74,\n\t\t\t75,\n\t\t\t76,\n\t\t\t77,\n\t\t\t78,\n\t\t\t79,\n\t\t\t80,\n\t\t\t81,\n\t\t\t92,\n\t\t\t93,\n\t\t\t98,\n\t\t\t99,\n\t\t\t112,\n\t\t\t113,\n\t\t\t128,\n\t\t\t129,\n\t\t\t134,\n\t\t\t135,\n\t\t\t148,\n\t\t\t149,\n\t\t\t160,\n\t\t\t161,\n\t\t\t162,\n\t\t\t163,\n\t\t\t164,\n\t\t\t165,\n\t\t\t166,\n\t\t\t167,\n\t\t\t168,\n\t\t\t169,\n\t\t\t170,\n\t\t\t171,\n\t\t\t172,\n\t\t\t173,\n\t\t\t178,\n\t\t\t179,\n\t\t\t184,\n\t\t\t185,\n\t\t\t196,\n\t\t\t197,\n\t\t\t198,\n\t\t\t199,\n\t\t\t200,\n\t\t\t201,\n\t\t\t202,\n\t\t\t203,\n\t\t\t204,\n\t\t\t205,\n\t\t\t206,\n\t\t\t207,\n\t\t\t208,\n\t\t\t209,\n\t\t\t214,\n\t\t\t215,\n\t\t\t220,\n\t\t\t221\n\t\t];\n\t}\n} catch (error) {\n\t// Swallow - we only care if `supports-color` is available; it doesn't have to be.\n}\n\n/**\n * Build up the default `inspectOpts` object from the environment variables.\n *\n * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js\n */\n\nexports.inspectOpts = Object.keys(process.env).filter(key => {\n\treturn /^debug_/i.test(key);\n}).reduce((obj, key) => {\n\t// Camel-case\n\tconst prop = key\n\t\t.substring(6)\n\t\t.toLowerCase()\n\t\t.replace(/_([a-z])/g, (_, k) => {\n\t\t\treturn k.toUpperCase();\n\t\t});\n\n\t// Coerce string value into JS value\n\tlet val = process.env[key];\n\tif (/^(yes|on|true|enabled)$/i.test(val)) {\n\t\tval = true;\n\t} else if (/^(no|off|false|disabled)$/i.test(val)) {\n\t\tval = false;\n\t} else if (val === 'null') {\n\t\tval = null;\n\t} else {\n\t\tval = Number(val);\n\t}\n\n\tobj[prop] = val;\n\treturn obj;\n}, {});\n\n/**\n * Is stdout a TTY? Colored output is enabled when `true`.\n */\n\nfunction useColors() {\n\treturn 'colors' in exports.inspectOpts ?\n\t\tBoolean(exports.inspectOpts.colors) :\n\t\ttty.isatty(process.stderr.fd);\n}\n\n/**\n * Adds ANSI color escape codes if enabled.\n *\n * @api public\n */\n\nfunction formatArgs(args) {\n\tconst {namespace: name, useColors} = this;\n\n\tif (useColors) {\n\t\tconst c = this.color;\n\t\tconst colorCode = '\\u001B[3' + (c < 8 ? c : '8;5;' + c);\n\t\tconst prefix = ` ${colorCode};1m${name} \\u001B[0m`;\n\n\t\targs[0] = prefix + args[0].split('\\n').join('\\n' + prefix);\n\t\targs.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\\u001B[0m');\n\t} else {\n\t\targs[0] = getDate() + name + ' ' + args[0];\n\t}\n}\n\nfunction getDate() {\n\tif (exports.inspectOpts.hideDate) {\n\t\treturn '';\n\t}\n\treturn new Date().toISOString() + ' ';\n}\n\n/**\n * Invokes `util.format()` with the specified arguments and writes to stderr.\n */\n\nfunction log(...args) {\n\treturn process.stderr.write(util.format(...args) + '\\n');\n}\n\n/**\n * Save `namespaces`.\n *\n * @param {String} namespaces\n * @api private\n */\nfunction save(namespaces) {\n\tif (namespaces) {\n\t\tprocess.env.DEBUG = namespaces;\n\t} else {\n\t\t// If you set a process.env field to null or undefined, it gets cast to the\n\t\t// string 'null' or 'undefined'. Just delete instead.\n\t\tdelete process.env.DEBUG;\n\t}\n}\n\n/**\n * Load `namespaces`.\n *\n * @return {String} returns the previously persisted debug modes\n * @api private\n */\n\nfunction load() {\n\treturn process.env.DEBUG;\n}\n\n/**\n * Init logic for `debug` instances.\n *\n * Create a new `inspectOpts` object in case `useColors` is set\n * differently for a particular `debug` instance.\n */\n\nfunction init(debug) {\n\tdebug.inspectOpts = {};\n\n\tconst keys = Object.keys(exports.inspectOpts);\n\tfor (let i = 0; i < keys.length; i++) {\n\t\tdebug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];\n\t}\n}\n\nmodule.exports = require('./common')(exports);\n\nconst {formatters} = module.exports;\n\n/**\n * Map %o to `util.inspect()`, all on a single line.\n */\n\nformatters.o = function (v) {\n\tthis.inspectOpts.colors = this.useColors;\n\treturn util.inspect(v, this.inspectOpts)\n\t\t.split('\\n')\n\t\t.map(str => str.trim())\n\t\t.join(' ');\n};\n\n/**\n * Map %O to `util.inspect()`, allowing multiple lines if needed.\n */\n\nformatters.O = function (v) {\n\tthis.inspectOpts.colors = this.useColors;\n\treturn util.inspect(v, this.inspectOpts);\n};\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","var stream = require('readable-stream')\nvar eos = require('end-of-stream')\nvar inherits = require('inherits')\nvar shift = require('stream-shift')\n\nvar SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from)\n ? Buffer.from([0])\n : new Buffer([0])\n\nvar onuncork = function(self, fn) {\n if (self._corked) self.once('uncork', fn)\n else fn()\n}\n\nvar autoDestroy = function (self, err) {\n if (self._autoDestroy) self.destroy(err)\n}\n\nvar destroyer = function(self, end) {\n return function(err) {\n if (err) autoDestroy(self, err.message === 'premature close' ? null : err)\n else if (end && !self._ended) self.end()\n }\n}\n\nvar end = function(ws, fn) {\n if (!ws) return fn()\n if (ws._writableState && ws._writableState.finished) return fn()\n if (ws._writableState) return ws.end(fn)\n ws.end()\n fn()\n}\n\nvar noop = function() {}\n\nvar toStreams2 = function(rs) {\n return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs)\n}\n\nvar Duplexify = function(writable, readable, opts) {\n if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts)\n stream.Duplex.call(this, opts)\n\n this._writable = null\n this._readable = null\n this._readable2 = null\n\n this._autoDestroy = !opts || opts.autoDestroy !== false\n this._forwardDestroy = !opts || opts.destroy !== false\n this._forwardEnd = !opts || opts.end !== false\n this._corked = 1 // start corked\n this._ondrain = null\n this._drained = false\n this._forwarding = false\n this._unwrite = null\n this._unread = null\n this._ended = false\n\n this.destroyed = false\n\n if (writable) this.setWritable(writable)\n if (readable) this.setReadable(readable)\n}\n\ninherits(Duplexify, stream.Duplex)\n\nDuplexify.obj = function(writable, readable, opts) {\n if (!opts) opts = {}\n opts.objectMode = true\n opts.highWaterMark = 16\n return new Duplexify(writable, readable, opts)\n}\n\nDuplexify.prototype.cork = function() {\n if (++this._corked === 1) this.emit('cork')\n}\n\nDuplexify.prototype.uncork = function() {\n if (this._corked && --this._corked === 0) this.emit('uncork')\n}\n\nDuplexify.prototype.setWritable = function(writable) {\n if (this._unwrite) this._unwrite()\n\n if (this.destroyed) {\n if (writable && writable.destroy) writable.destroy()\n return\n }\n\n if (writable === null || writable === false) {\n this.end()\n return\n }\n\n var self = this\n var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd))\n\n var ondrain = function() {\n var ondrain = self._ondrain\n self._ondrain = null\n if (ondrain) ondrain()\n }\n\n var clear = function() {\n self._writable.removeListener('drain', ondrain)\n unend()\n }\n\n if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks\n\n this._writable = writable\n this._writable.on('drain', ondrain)\n this._unwrite = clear\n\n this.uncork() // always uncork setWritable\n}\n\nDuplexify.prototype.setReadable = function(readable) {\n if (this._unread) this._unread()\n\n if (this.destroyed) {\n if (readable && readable.destroy) readable.destroy()\n return\n }\n\n if (readable === null || readable === false) {\n this.push(null)\n this.resume()\n return\n }\n\n var self = this\n var unend = eos(readable, {writable:false, readable:true}, destroyer(this))\n\n var onreadable = function() {\n self._forward()\n }\n\n var onend = function() {\n self.push(null)\n }\n\n var clear = function() {\n self._readable2.removeListener('readable', onreadable)\n self._readable2.removeListener('end', onend)\n unend()\n }\n\n this._drained = true\n this._readable = readable\n this._readable2 = readable._readableState ? readable : toStreams2(readable)\n this._readable2.on('readable', onreadable)\n this._readable2.on('end', onend)\n this._unread = clear\n\n this._forward()\n}\n\nDuplexify.prototype._read = function() {\n this._drained = true\n this._forward()\n}\n\nDuplexify.prototype._forward = function() {\n if (this._forwarding || !this._readable2 || !this._drained) return\n this._forwarding = true\n\n var data\n\n while (this._drained && (data = shift(this._readable2)) !== null) {\n if (this.destroyed) continue\n this._drained = this.push(data)\n }\n\n this._forwarding = false\n}\n\nDuplexify.prototype.destroy = function(err, cb) {\n if (!cb) cb = noop\n if (this.destroyed) return cb(null)\n this.destroyed = true\n\n var self = this\n process.nextTick(function() {\n self._destroy(err)\n cb(null)\n })\n}\n\nDuplexify.prototype._destroy = function(err) {\n if (err) {\n var ondrain = this._ondrain\n this._ondrain = null\n if (ondrain) ondrain(err)\n else this.emit('error', err)\n }\n\n if (this._forwardDestroy) {\n if (this._readable && this._readable.destroy) this._readable.destroy()\n if (this._writable && this._writable.destroy) this._writable.destroy()\n }\n\n this.emit('close')\n}\n\nDuplexify.prototype._write = function(data, enc, cb) {\n if (this.destroyed) return\n if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb))\n if (data === SIGNAL_FLUSH) return this._finish(cb)\n if (!this._writable) return cb()\n\n if (this._writable.write(data) === false) this._ondrain = cb\n else if (!this.destroyed) cb()\n}\n\nDuplexify.prototype._finish = function(cb) {\n var self = this\n this.emit('preend')\n onuncork(this, function() {\n end(self._forwardEnd && self._writable, function() {\n // haxx to not emit prefinish twice\n if (self._writableState.prefinished === false) self._writableState.prefinished = true\n self.emit('prefinish')\n onuncork(self, cb)\n })\n })\n}\n\nDuplexify.prototype.end = function(data, enc, cb) {\n if (typeof data === 'function') return this.end(null, null, data)\n if (typeof enc === 'function') return this.end(data, null, enc)\n this._ended = true\n if (data) this.write(data)\n if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH)\n return stream.Writable.prototype.end.call(this, cb)\n}\n\nmodule.exports = Duplexify\n","'use strict';\n\nvar Buffer = require('safe-buffer').Buffer;\n\nvar getParamBytesForAlg = require('./param-bytes-for-alg');\n\nvar MAX_OCTET = 0x80,\n\tCLASS_UNIVERSAL = 0,\n\tPRIMITIVE_BIT = 0x20,\n\tTAG_SEQ = 0x10,\n\tTAG_INT = 0x02,\n\tENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6),\n\tENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6);\n\nfunction base64Url(base64) {\n\treturn base64\n\t\t.replace(/=/g, '')\n\t\t.replace(/\\+/g, '-')\n\t\t.replace(/\\//g, '_');\n}\n\nfunction signatureAsBuffer(signature) {\n\tif (Buffer.isBuffer(signature)) {\n\t\treturn signature;\n\t} else if ('string' === typeof signature) {\n\t\treturn Buffer.from(signature, 'base64');\n\t}\n\n\tthrow new TypeError('ECDSA signature must be a Base64 string or a Buffer');\n}\n\nfunction derToJose(signature, alg) {\n\tsignature = signatureAsBuffer(signature);\n\tvar paramBytes = getParamBytesForAlg(alg);\n\n\t// the DER encoded param should at most be the param size, plus a padding\n\t// zero, since due to being a signed integer\n\tvar maxEncodedParamLength = paramBytes + 1;\n\n\tvar inputLength = signature.length;\n\n\tvar offset = 0;\n\tif (signature[offset++] !== ENCODED_TAG_SEQ) {\n\t\tthrow new Error('Could not find expected \"seq\"');\n\t}\n\n\tvar seqLength = signature[offset++];\n\tif (seqLength === (MAX_OCTET | 1)) {\n\t\tseqLength = signature[offset++];\n\t}\n\n\tif (inputLength - offset < seqLength) {\n\t\tthrow new Error('\"seq\" specified length of \"' + seqLength + '\", only \"' + (inputLength - offset) + '\" remaining');\n\t}\n\n\tif (signature[offset++] !== ENCODED_TAG_INT) {\n\t\tthrow new Error('Could not find expected \"int\" for \"r\"');\n\t}\n\n\tvar rLength = signature[offset++];\n\n\tif (inputLength - offset - 2 < rLength) {\n\t\tthrow new Error('\"r\" specified length of \"' + rLength + '\", only \"' + (inputLength - offset - 2) + '\" available');\n\t}\n\n\tif (maxEncodedParamLength < rLength) {\n\t\tthrow new Error('\"r\" specified length of \"' + rLength + '\", max of \"' + maxEncodedParamLength + '\" is acceptable');\n\t}\n\n\tvar rOffset = offset;\n\toffset += rLength;\n\n\tif (signature[offset++] !== ENCODED_TAG_INT) {\n\t\tthrow new Error('Could not find expected \"int\" for \"s\"');\n\t}\n\n\tvar sLength = signature[offset++];\n\n\tif (inputLength - offset !== sLength) {\n\t\tthrow new Error('\"s\" specified length of \"' + sLength + '\", expected \"' + (inputLength - offset) + '\"');\n\t}\n\n\tif (maxEncodedParamLength < sLength) {\n\t\tthrow new Error('\"s\" specified length of \"' + sLength + '\", max of \"' + maxEncodedParamLength + '\" is acceptable');\n\t}\n\n\tvar sOffset = offset;\n\toffset += sLength;\n\n\tif (offset !== inputLength) {\n\t\tthrow new Error('Expected to consume entire buffer, but \"' + (inputLength - offset) + '\" bytes remain');\n\t}\n\n\tvar rPadding = paramBytes - rLength,\n\t\tsPadding = paramBytes - sLength;\n\n\tvar dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength);\n\n\tfor (offset = 0; offset < rPadding; ++offset) {\n\t\tdst[offset] = 0;\n\t}\n\tsignature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength);\n\n\toffset = paramBytes;\n\n\tfor (var o = offset; offset < o + sPadding; ++offset) {\n\t\tdst[offset] = 0;\n\t}\n\tsignature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength);\n\n\tdst = dst.toString('base64');\n\tdst = base64Url(dst);\n\n\treturn dst;\n}\n\nfunction countPadding(buf, start, stop) {\n\tvar padding = 0;\n\twhile (start + padding < stop && buf[start + padding] === 0) {\n\t\t++padding;\n\t}\n\n\tvar needsSign = buf[start + padding] >= MAX_OCTET;\n\tif (needsSign) {\n\t\t--padding;\n\t}\n\n\treturn padding;\n}\n\nfunction joseToDer(signature, alg) {\n\tsignature = signatureAsBuffer(signature);\n\tvar paramBytes = getParamBytesForAlg(alg);\n\n\tvar signatureBytes = signature.length;\n\tif (signatureBytes !== paramBytes * 2) {\n\t\tthrow new TypeError('\"' + alg + '\" signatures must be \"' + paramBytes * 2 + '\" bytes, saw \"' + signatureBytes + '\"');\n\t}\n\n\tvar rPadding = countPadding(signature, 0, paramBytes);\n\tvar sPadding = countPadding(signature, paramBytes, signature.length);\n\tvar rLength = paramBytes - rPadding;\n\tvar sLength = paramBytes - sPadding;\n\n\tvar rsBytes = 1 + 1 + rLength + 1 + 1 + sLength;\n\n\tvar shortLength = rsBytes < MAX_OCTET;\n\n\tvar dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes);\n\n\tvar offset = 0;\n\tdst[offset++] = ENCODED_TAG_SEQ;\n\tif (shortLength) {\n\t\t// Bit 8 has value \"0\"\n\t\t// bits 7-1 give the length.\n\t\tdst[offset++] = rsBytes;\n\t} else {\n\t\t// Bit 8 of first octet has value \"1\"\n\t\t// bits 7-1 give the number of additional length octets.\n\t\tdst[offset++] = MAX_OCTET\t| 1;\n\t\t// length, base 256\n\t\tdst[offset++] = rsBytes & 0xff;\n\t}\n\tdst[offset++] = ENCODED_TAG_INT;\n\tdst[offset++] = rLength;\n\tif (rPadding < 0) {\n\t\tdst[offset++] = 0;\n\t\toffset += signature.copy(dst, offset, 0, paramBytes);\n\t} else {\n\t\toffset += signature.copy(dst, offset, rPadding, paramBytes);\n\t}\n\tdst[offset++] = ENCODED_TAG_INT;\n\tdst[offset++] = sLength;\n\tif (sPadding < 0) {\n\t\tdst[offset++] = 0;\n\t\tsignature.copy(dst, offset, paramBytes);\n\t} else {\n\t\tsignature.copy(dst, offset, paramBytes + sPadding);\n\t}\n\n\treturn dst;\n}\n\nmodule.exports = {\n\tderToJose: derToJose,\n\tjoseToDer: joseToDer\n};\n","'use strict';\n\nfunction getParamSize(keySize) {\n\tvar result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1);\n\treturn result;\n}\n\nvar paramBytesForAlg = {\n\tES256: getParamSize(256),\n\tES384: getParamSize(384),\n\tES512: getParamSize(521)\n};\n\nfunction getParamBytesForAlg(alg) {\n\tvar paramBytes = paramBytesForAlg[alg];\n\tif (paramBytes) {\n\t\treturn paramBytes;\n\t}\n\n\tthrow new Error('Unknown algorithm \"' + alg + '\"');\n}\n\nmodule.exports = getParamBytesForAlg;\n","var once = require('once');\n\nvar noop = function() {};\n\nvar isRequest = function(stream) {\n\treturn stream.setHeader && typeof stream.abort === 'function';\n};\n\nvar isChildProcess = function(stream) {\n\treturn stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3\n};\n\nvar eos = function(stream, opts, callback) {\n\tif (typeof opts === 'function') return eos(stream, null, opts);\n\tif (!opts) opts = {};\n\n\tcallback = once(callback || noop);\n\n\tvar ws = stream._writableState;\n\tvar rs = stream._readableState;\n\tvar readable = opts.readable || (opts.readable !== false && stream.readable);\n\tvar writable = opts.writable || (opts.writable !== false && stream.writable);\n\tvar cancelled = false;\n\n\tvar onlegacyfinish = function() {\n\t\tif (!stream.writable) onfinish();\n\t};\n\n\tvar onfinish = function() {\n\t\twritable = false;\n\t\tif (!readable) callback.call(stream);\n\t};\n\n\tvar onend = function() {\n\t\treadable = false;\n\t\tif (!writable) callback.call(stream);\n\t};\n\n\tvar onexit = function(exitCode) {\n\t\tcallback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);\n\t};\n\n\tvar onerror = function(err) {\n\t\tcallback.call(stream, err);\n\t};\n\n\tvar onclose = function() {\n\t\tprocess.nextTick(onclosenexttick);\n\t};\n\n\tvar onclosenexttick = function() {\n\t\tif (cancelled) return;\n\t\tif (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));\n\t\tif (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));\n\t};\n\n\tvar onrequest = function() {\n\t\tstream.req.on('finish', onfinish);\n\t};\n\n\tif (isRequest(stream)) {\n\t\tstream.on('complete', onfinish);\n\t\tstream.on('abort', onclose);\n\t\tif (stream.req) onrequest();\n\t\telse stream.on('request', onrequest);\n\t} else if (writable && !ws) { // legacy streams\n\t\tstream.on('end', onlegacyfinish);\n\t\tstream.on('close', onlegacyfinish);\n\t}\n\n\tif (isChildProcess(stream)) stream.on('exit', onexit);\n\n\tstream.on('end', onend);\n\tstream.on('finish', onfinish);\n\tif (opts.error !== false) stream.on('error', onerror);\n\tstream.on('close', onclose);\n\n\treturn function() {\n\t\tcancelled = true;\n\t\tstream.removeListener('complete', onfinish);\n\t\tstream.removeListener('abort', onclose);\n\t\tstream.removeListener('request', onrequest);\n\t\tif (stream.req) stream.req.removeListener('finish', onfinish);\n\t\tstream.removeListener('end', onlegacyfinish);\n\t\tstream.removeListener('close', onlegacyfinish);\n\t\tstream.removeListener('finish', onfinish);\n\t\tstream.removeListener('exit', onexit);\n\t\tstream.removeListener('end', onend);\n\t\tstream.removeListener('error', onerror);\n\t\tstream.removeListener('close', onclose);\n\t};\n};\n\nmodule.exports = eos;\n","var punycode = require('punycode');\nvar entities = require('./entities.json');\n\nmodule.exports = decode;\n\nfunction decode (str) {\n if (typeof str !== 'string') {\n throw new TypeError('Expected a String');\n }\n\n return str.replace(/&(#?[^;\\W]+;?)/g, function (_, match) {\n var m;\n if (m = /^#(\\d+);?$/.exec(match)) {\n return punycode.ucs2.encode([ parseInt(m[1], 10) ]);\n } else if (m = /^#[Xx]([A-Fa-f0-9]+);?/.exec(match)) {\n return punycode.ucs2.encode([ parseInt(m[1], 16) ]);\n } else {\n // named entity\n var hasSemi = /;$/.test(match);\n var withoutSemi = hasSemi ? match.replace(/;$/, '') : match;\n var target = entities[withoutSemi] || (hasSemi && entities[match]);\n\n if (typeof target === 'number') {\n return punycode.ucs2.encode([ target ]);\n } else if (typeof target === 'string') {\n return target;\n } else {\n return '&' + match;\n }\n }\n });\n}\n","var punycode = require('punycode');\nvar revEntities = require('./reversed.json');\n\nmodule.exports = encode;\n\nfunction encode (str, opts) {\n if (typeof str !== 'string') {\n throw new TypeError('Expected a String');\n }\n if (!opts) opts = {};\n\n var numeric = true;\n if (opts.named) numeric = false;\n if (opts.numeric !== undefined) numeric = opts.numeric;\n\n var special = opts.special || {\n '\"': true, \"'\": true,\n '<': true, '>': true,\n '&': true\n };\n\n var codePoints = punycode.ucs2.decode(str);\n var chars = [];\n for (var i = 0; i < codePoints.length; i++) {\n var cc = codePoints[i];\n var c = punycode.ucs2.encode([ cc ]);\n var e = revEntities[cc];\n if (e && (cc >= 127 || special[c]) && !numeric) {\n chars.push('&' + (/;$/.test(e) ? e : e + ';'));\n }\n else if (cc < 32 || cc >= 127 || special[c]) {\n chars.push('&#' + cc + ';');\n }\n else {\n chars.push(c);\n }\n }\n return chars.join('');\n}\n","exports.encode = require('./encode');\nexports.decode = require('./decode');\n","/**\n * @author Toru Nagashima \n * @copyright 2015 Toru Nagashima. All rights reserved.\n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap();\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap();\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event);\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n );\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n );\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true;\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault();\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n });\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true });\n\n // Define accessors\n const keys = Object.keys(event);\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key));\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget;\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation();\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this);\n\n data.stopped = true;\n data.immediateStopped = true;\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation();\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this));\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true;\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this));\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n});\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype);\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event);\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value;\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event;\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto);\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event);\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n });\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key);\n const isFunc = typeof descriptor.value === \"function\";\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n );\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto);\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto);\n wrappers.set(proto, wrapper);\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nfunction wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event));\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nfunction isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nfunction setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase;\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nfunction setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget;\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nfunction setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener;\n}\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap();\n\n// Listener types\nconst CAPTURE = 1;\nconst BUBBLE = 2;\nconst ATTRIBUTE = 3;\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget);\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this);\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next;\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null; // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this);\n\n // Traverse to the tail while removing old value.\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n node = node.next;\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n };\n if (prev === null) {\n listeners.set(eventName, newNode);\n } else {\n prev.next = newNode;\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n );\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this);\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n });\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i]);\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map());\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length);\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i];\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this);\n const optionsIsObj = isObject(options);\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n };\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName);\n if (node === undefined) {\n listeners.set(eventName, newNode);\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null;\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node;\n node = node.next;\n }\n\n // Add it.\n prev.next = newNode;\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this);\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n return\n }\n\n prev = node;\n node = node.next;\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this);\n const eventName = event.type;\n let node = listeners.get(eventName);\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event);\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null;\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n );\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent);\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err);\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent);\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next;\n }\n setPassiveListener(wrappedEvent, null);\n setEventPhase(wrappedEvent, 0);\n setCurrentTarget(wrappedEvent, null);\n\n return !wrappedEvent.defaultPrevented\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n});\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype);\n}\n\nexports.defineEventAttribute = defineEventAttribute;\nexports.EventTarget = EventTarget;\nexports.default = EventTarget;\n\nmodule.exports = EventTarget\nmodule.exports.EventTarget = module.exports[\"default\"] = EventTarget\nmodule.exports.defineEventAttribute = defineEventAttribute\n//# sourceMappingURL=event-target-shim.js.map\n","'use strict';\n\nvar hasOwn = Object.prototype.hasOwnProperty;\nvar toStr = Object.prototype.toString;\nvar defineProperty = Object.defineProperty;\nvar gOPD = Object.getOwnPropertyDescriptor;\n\nvar isArray = function isArray(arr) {\n\tif (typeof Array.isArray === 'function') {\n\t\treturn Array.isArray(arr);\n\t}\n\n\treturn toStr.call(arr) === '[object Array]';\n};\n\nvar isPlainObject = function isPlainObject(obj) {\n\tif (!obj || toStr.call(obj) !== '[object Object]') {\n\t\treturn false;\n\t}\n\n\tvar hasOwnConstructor = hasOwn.call(obj, 'constructor');\n\tvar hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf');\n\t// Not own constructor property must be Object\n\tif (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) {\n\t\treturn false;\n\t}\n\n\t// Own properties are enumerated firstly, so to speed up,\n\t// if last one is own, then all properties are own.\n\tvar key;\n\tfor (key in obj) { /**/ }\n\n\treturn typeof key === 'undefined' || hasOwn.call(obj, key);\n};\n\n// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target\nvar setProperty = function setProperty(target, options) {\n\tif (defineProperty && options.name === '__proto__') {\n\t\tdefineProperty(target, options.name, {\n\t\t\tenumerable: true,\n\t\t\tconfigurable: true,\n\t\t\tvalue: options.newValue,\n\t\t\twritable: true\n\t\t});\n\t} else {\n\t\ttarget[options.name] = options.newValue;\n\t}\n};\n\n// Return undefined instead of __proto__ if '__proto__' is not an own property\nvar getProperty = function getProperty(obj, name) {\n\tif (name === '__proto__') {\n\t\tif (!hasOwn.call(obj, name)) {\n\t\t\treturn void 0;\n\t\t} else if (gOPD) {\n\t\t\t// In early versions of node, obj['__proto__'] is buggy when obj has\n\t\t\t// __proto__ as an own property. Object.getOwnPropertyDescriptor() works.\n\t\t\treturn gOPD(obj, name).value;\n\t\t}\n\t}\n\n\treturn obj[name];\n};\n\nmodule.exports = function extend() {\n\tvar options, name, src, copy, copyIsArray, clone;\n\tvar target = arguments[0];\n\tvar i = 1;\n\tvar length = arguments.length;\n\tvar deep = false;\n\n\t// Handle a deep copy situation\n\tif (typeof target === 'boolean') {\n\t\tdeep = target;\n\t\ttarget = arguments[1] || {};\n\t\t// skip the boolean and the target\n\t\ti = 2;\n\t}\n\tif (target == null || (typeof target !== 'object' && typeof target !== 'function')) {\n\t\ttarget = {};\n\t}\n\n\tfor (; i < length; ++i) {\n\t\toptions = arguments[i];\n\t\t// Only deal with non-null/undefined values\n\t\tif (options != null) {\n\t\t\t// Extend the base object\n\t\t\tfor (name in options) {\n\t\t\t\tsrc = getProperty(target, name);\n\t\t\t\tcopy = getProperty(options, name);\n\n\t\t\t\t// Prevent never-ending loop\n\t\t\t\tif (target !== copy) {\n\t\t\t\t\t// Recurse if we're merging plain objects or arrays\n\t\t\t\t\tif (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) {\n\t\t\t\t\t\tif (copyIsArray) {\n\t\t\t\t\t\t\tcopyIsArray = false;\n\t\t\t\t\t\t\tclone = src && isArray(src) ? src : [];\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tclone = src && isPlainObject(src) ? src : {};\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// Never move original objects, clone them\n\t\t\t\t\t\tsetProperty(target, { name: name, newValue: extend(deep, clone, copy) });\n\n\t\t\t\t\t// Don't bring in undefined values\n\t\t\t\t\t} else if (typeof copy !== 'undefined') {\n\t\t\t\t\t\tsetProperty(target, { name: name, newValue: copy });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Return the modified object\n\treturn target;\n};\n","'use strict';\n\nconst validator = require('./validator');\nconst XMLParser = require('./xmlparser/XMLParser');\nconst XMLBuilder = require('./xmlbuilder/json2xml');\n\nmodule.exports = {\n XMLParser: XMLParser,\n XMLValidator: validator,\n XMLBuilder: XMLBuilder\n}","'use strict';\n\nconst nameStartChar = ':A-Za-z_\\\\u00C0-\\\\u00D6\\\\u00D8-\\\\u00F6\\\\u00F8-\\\\u02FF\\\\u0370-\\\\u037D\\\\u037F-\\\\u1FFF\\\\u200C-\\\\u200D\\\\u2070-\\\\u218F\\\\u2C00-\\\\u2FEF\\\\u3001-\\\\uD7FF\\\\uF900-\\\\uFDCF\\\\uFDF0-\\\\uFFFD';\nconst nameChar = nameStartChar + '\\\\-.\\\\d\\\\u00B7\\\\u0300-\\\\u036F\\\\u203F-\\\\u2040';\nconst nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*'\nconst regexName = new RegExp('^' + nameRegexp + '$');\n\nconst getAllMatches = function(string, regex) {\n const matches = [];\n let match = regex.exec(string);\n while (match) {\n const allmatches = [];\n allmatches.startIndex = regex.lastIndex - match[0].length;\n const len = match.length;\n for (let index = 0; index < len; index++) {\n allmatches.push(match[index]);\n }\n matches.push(allmatches);\n match = regex.exec(string);\n }\n return matches;\n};\n\nconst isName = function(string) {\n const match = regexName.exec(string);\n return !(match === null || typeof match === 'undefined');\n};\n\nexports.isExist = function(v) {\n return typeof v !== 'undefined';\n};\n\nexports.isEmptyObject = function(obj) {\n return Object.keys(obj).length === 0;\n};\n\n/**\n * Copy all the properties of a into b.\n * @param {*} target\n * @param {*} a\n */\nexports.merge = function(target, a, arrayMode) {\n if (a) {\n const keys = Object.keys(a); // will return an array of own properties\n const len = keys.length; //don't make it inline\n for (let i = 0; i < len; i++) {\n if (arrayMode === 'strict') {\n target[keys[i]] = [ a[keys[i]] ];\n } else {\n target[keys[i]] = a[keys[i]];\n }\n }\n }\n};\n/* exports.merge =function (b,a){\n return Object.assign(b,a);\n} */\n\nexports.getValue = function(v) {\n if (exports.isExist(v)) {\n return v;\n } else {\n return '';\n }\n};\n\n// const fakeCall = function(a) {return a;};\n// const fakeCallNoReturn = function() {};\n\nexports.isName = isName;\nexports.getAllMatches = getAllMatches;\nexports.nameRegexp = nameRegexp;\n","'use strict';\n\nconst util = require('./util');\n\nconst defaultOptions = {\n allowBooleanAttributes: false, //A tag can have attributes without any value\n unpairedTags: []\n};\n\n//const tagsPattern = new RegExp(\"<\\\\/?([\\\\w:\\\\-_\\.]+)\\\\s*\\/?>\",\"g\");\nexports.validate = function (xmlData, options) {\n options = Object.assign({}, defaultOptions, options);\n\n //xmlData = xmlData.replace(/(\\r\\n|\\n|\\r)/gm,\"\");//make it single line\n //xmlData = xmlData.replace(/(^\\s*<\\?xml.*?\\?>)/g,\"\");//Remove XML starting tag\n //xmlData = xmlData.replace(/()/g,\"\");//Remove DOCTYPE\n const tags = [];\n let tagFound = false;\n\n //indicates that the root tag has been closed (aka. depth 0 has been reached)\n let reachedRoot = false;\n\n if (xmlData[0] === '\\ufeff') {\n // check for byte order mark (BOM)\n xmlData = xmlData.substr(1);\n }\n \n for (let i = 0; i < xmlData.length; i++) {\n\n if (xmlData[i] === '<' && xmlData[i+1] === '?') {\n i+=2;\n i = readPI(xmlData,i);\n if (i.err) return i;\n }else if (xmlData[i] === '<') {\n //starting of tag\n //read until you reach to '>' avoiding any '>' in attribute value\n let tagStartPos = i;\n i++;\n \n if (xmlData[i] === '!') {\n i = readCommentAndCDATA(xmlData, i);\n continue;\n } else {\n let closingTag = false;\n if (xmlData[i] === '/') {\n //closing tag\n closingTag = true;\n i++;\n }\n //read tagname\n let tagName = '';\n for (; i < xmlData.length &&\n xmlData[i] !== '>' &&\n xmlData[i] !== ' ' &&\n xmlData[i] !== '\\t' &&\n xmlData[i] !== '\\n' &&\n xmlData[i] !== '\\r'; i++\n ) {\n tagName += xmlData[i];\n }\n tagName = tagName.trim();\n //console.log(tagName);\n\n if (tagName[tagName.length - 1] === '/') {\n //self closing tag without attributes\n tagName = tagName.substring(0, tagName.length - 1);\n //continue;\n i--;\n }\n if (!validateTagName(tagName)) {\n let msg;\n if (tagName.trim().length === 0) {\n msg = \"Invalid space after '<'.\";\n } else {\n msg = \"Tag '\"+tagName+\"' is an invalid name.\";\n }\n return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i));\n }\n\n const result = readAttributeStr(xmlData, i);\n if (result === false) {\n return getErrorObject('InvalidAttr', \"Attributes for '\"+tagName+\"' have open quote.\", getLineNumberForPosition(xmlData, i));\n }\n let attrStr = result.value;\n i = result.index;\n\n if (attrStr[attrStr.length - 1] === '/') {\n //self closing tag\n const attrStrStart = i - attrStr.length;\n attrStr = attrStr.substring(0, attrStr.length - 1);\n const isValid = validateAttributeString(attrStr, options);\n if (isValid === true) {\n tagFound = true;\n //continue; //text may presents after self closing tag\n } else {\n //the result from the nested function returns the position of the error within the attribute\n //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute\n //this gives us the absolute index in the entire xml, which we can use to find the line at last\n return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line));\n }\n } else if (closingTag) {\n if (!result.tagClosed) {\n return getErrorObject('InvalidTag', \"Closing tag '\"+tagName+\"' doesn't have proper closing.\", getLineNumberForPosition(xmlData, i));\n } else if (attrStr.trim().length > 0) {\n return getErrorObject('InvalidTag', \"Closing tag '\"+tagName+\"' can't have attributes or invalid starting.\", getLineNumberForPosition(xmlData, tagStartPos));\n } else {\n const otg = tags.pop();\n if (tagName !== otg.tagName) {\n let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos);\n return getErrorObject('InvalidTag',\n \"Expected closing tag '\"+otg.tagName+\"' (opened in line \"+openPos.line+\", col \"+openPos.col+\") instead of closing tag '\"+tagName+\"'.\",\n getLineNumberForPosition(xmlData, tagStartPos));\n }\n\n //when there are no more tags, we reached the root level.\n if (tags.length == 0) {\n reachedRoot = true;\n }\n }\n } else {\n const isValid = validateAttributeString(attrStr, options);\n if (isValid !== true) {\n //the result from the nested function returns the position of the error within the attribute\n //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute\n //this gives us the absolute index in the entire xml, which we can use to find the line at last\n return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line));\n }\n\n //if the root level has been reached before ...\n if (reachedRoot === true) {\n return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i));\n } else if(options.unpairedTags.indexOf(tagName) !== -1){\n //don't push into stack\n } else {\n tags.push({tagName, tagStartPos});\n }\n tagFound = true;\n }\n\n //skip tag text value\n //It may include comments and CDATA value\n for (i++; i < xmlData.length; i++) {\n if (xmlData[i] === '<') {\n if (xmlData[i + 1] === '!') {\n //comment or CADATA\n i++;\n i = readCommentAndCDATA(xmlData, i);\n continue;\n } else if (xmlData[i+1] === '?') {\n i = readPI(xmlData, ++i);\n if (i.err) return i;\n } else{\n break;\n }\n } else if (xmlData[i] === '&') {\n const afterAmp = validateAmpersand(xmlData, i);\n if (afterAmp == -1)\n return getErrorObject('InvalidChar', \"char '&' is not expected.\", getLineNumberForPosition(xmlData, i));\n i = afterAmp;\n }else{\n if (reachedRoot === true && !isWhiteSpace(xmlData[i])) {\n return getErrorObject('InvalidXml', \"Extra text at the end\", getLineNumberForPosition(xmlData, i));\n }\n }\n } //end of reading tag text value\n if (xmlData[i] === '<') {\n i--;\n }\n }\n } else {\n if ( isWhiteSpace(xmlData[i])) {\n continue;\n }\n return getErrorObject('InvalidChar', \"char '\"+xmlData[i]+\"' is not expected.\", getLineNumberForPosition(xmlData, i));\n }\n }\n\n if (!tagFound) {\n return getErrorObject('InvalidXml', 'Start tag expected.', 1);\n }else if (tags.length == 1) {\n return getErrorObject('InvalidTag', \"Unclosed tag '\"+tags[0].tagName+\"'.\", getLineNumberForPosition(xmlData, tags[0].tagStartPos));\n }else if (tags.length > 0) {\n return getErrorObject('InvalidXml', \"Invalid '\"+\n JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\\r?\\n/g, '')+\n \"' found.\", {line: 1, col: 1});\n }\n\n return true;\n};\n\nfunction isWhiteSpace(char){\n return char === ' ' || char === '\\t' || char === '\\n' || char === '\\r';\n}\n/**\n * Read Processing insstructions and skip\n * @param {*} xmlData\n * @param {*} i\n */\nfunction readPI(xmlData, i) {\n const start = i;\n for (; i < xmlData.length; i++) {\n if (xmlData[i] == '?' || xmlData[i] == ' ') {\n //tagname\n const tagname = xmlData.substr(start, i - start);\n if (i > 5 && tagname === 'xml') {\n return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i));\n } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') {\n //check if valid attribut string\n i++;\n break;\n } else {\n continue;\n }\n }\n }\n return i;\n}\n\nfunction readCommentAndCDATA(xmlData, i) {\n if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') {\n //comment\n for (i += 3; i < xmlData.length; i++) {\n if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') {\n i += 2;\n break;\n }\n }\n } else if (\n xmlData.length > i + 8 &&\n xmlData[i + 1] === 'D' &&\n xmlData[i + 2] === 'O' &&\n xmlData[i + 3] === 'C' &&\n xmlData[i + 4] === 'T' &&\n xmlData[i + 5] === 'Y' &&\n xmlData[i + 6] === 'P' &&\n xmlData[i + 7] === 'E'\n ) {\n let angleBracketsCount = 1;\n for (i += 8; i < xmlData.length; i++) {\n if (xmlData[i] === '<') {\n angleBracketsCount++;\n } else if (xmlData[i] === '>') {\n angleBracketsCount--;\n if (angleBracketsCount === 0) {\n break;\n }\n }\n }\n } else if (\n xmlData.length > i + 9 &&\n xmlData[i + 1] === '[' &&\n xmlData[i + 2] === 'C' &&\n xmlData[i + 3] === 'D' &&\n xmlData[i + 4] === 'A' &&\n xmlData[i + 5] === 'T' &&\n xmlData[i + 6] === 'A' &&\n xmlData[i + 7] === '['\n ) {\n for (i += 8; i < xmlData.length; i++) {\n if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') {\n i += 2;\n break;\n }\n }\n }\n\n return i;\n}\n\nconst doubleQuote = '\"';\nconst singleQuote = \"'\";\n\n/**\n * Keep reading xmlData until '<' is found outside the attribute value.\n * @param {string} xmlData\n * @param {number} i\n */\nfunction readAttributeStr(xmlData, i) {\n let attrStr = '';\n let startChar = '';\n let tagClosed = false;\n for (; i < xmlData.length; i++) {\n if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) {\n if (startChar === '') {\n startChar = xmlData[i];\n } else if (startChar !== xmlData[i]) {\n //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa\n } else {\n startChar = '';\n }\n } else if (xmlData[i] === '>') {\n if (startChar === '') {\n tagClosed = true;\n break;\n }\n }\n attrStr += xmlData[i];\n }\n if (startChar !== '') {\n return false;\n }\n\n return {\n value: attrStr,\n index: i,\n tagClosed: tagClosed\n };\n}\n\n/**\n * Select all the attributes whether valid or invalid.\n */\nconst validAttrStrRegxp = new RegExp('(\\\\s*)([^\\\\s=]+)(\\\\s*=)?(\\\\s*([\\'\"])(([\\\\s\\\\S])*?)\\\\5)?', 'g');\n\n//attr, =\"sd\", a=\"amit's\", a=\"sd\"b=\"saf\", ab cd=\"\"\n\nfunction validateAttributeString(attrStr, options) {\n //console.log(\"start:\"+attrStr+\":end\");\n\n //if(attrStr.trim().length === 0) return true; //empty string\n\n const matches = util.getAllMatches(attrStr, validAttrStrRegxp);\n const attrNames = {};\n\n for (let i = 0; i < matches.length; i++) {\n if (matches[i][1].length === 0) {\n //nospace before attribute name: a=\"sd\"b=\"saf\"\n return getErrorObject('InvalidAttr', \"Attribute '\"+matches[i][2]+\"' has no space in starting.\", getPositionFromMatch(matches[i]))\n } else if (matches[i][3] !== undefined && matches[i][4] === undefined) {\n return getErrorObject('InvalidAttr', \"Attribute '\"+matches[i][2]+\"' is without value.\", getPositionFromMatch(matches[i]));\n } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) {\n //independent attribute: ab\n return getErrorObject('InvalidAttr', \"boolean attribute '\"+matches[i][2]+\"' is not allowed.\", getPositionFromMatch(matches[i]));\n }\n /* else if(matches[i][6] === undefined){//attribute without value: ab=\n return { err: { code:\"InvalidAttr\",msg:\"attribute \" + matches[i][2] + \" has no value assigned.\"}};\n } */\n const attrName = matches[i][2];\n if (!validateAttrName(attrName)) {\n return getErrorObject('InvalidAttr', \"Attribute '\"+attrName+\"' is an invalid name.\", getPositionFromMatch(matches[i]));\n }\n if (!attrNames.hasOwnProperty(attrName)) {\n //check for duplicate attribute.\n attrNames[attrName] = 1;\n } else {\n return getErrorObject('InvalidAttr', \"Attribute '\"+attrName+\"' is repeated.\", getPositionFromMatch(matches[i]));\n }\n }\n\n return true;\n}\n\nfunction validateNumberAmpersand(xmlData, i) {\n let re = /\\d/;\n if (xmlData[i] === 'x') {\n i++;\n re = /[\\da-fA-F]/;\n }\n for (; i < xmlData.length; i++) {\n if (xmlData[i] === ';')\n return i;\n if (!xmlData[i].match(re))\n break;\n }\n return -1;\n}\n\nfunction validateAmpersand(xmlData, i) {\n // https://www.w3.org/TR/xml/#dt-charref\n i++;\n if (xmlData[i] === ';')\n return -1;\n if (xmlData[i] === '#') {\n i++;\n return validateNumberAmpersand(xmlData, i);\n }\n let count = 0;\n for (; i < xmlData.length; i++, count++) {\n if (xmlData[i].match(/\\w/) && count < 20)\n continue;\n if (xmlData[i] === ';')\n break;\n return -1;\n }\n return i;\n}\n\nfunction getErrorObject(code, message, lineNumber) {\n return {\n err: {\n code: code,\n msg: message,\n line: lineNumber.line || lineNumber,\n col: lineNumber.col,\n },\n };\n}\n\nfunction validateAttrName(attrName) {\n return util.isName(attrName);\n}\n\n// const startsWithXML = /^xml/i;\n\nfunction validateTagName(tagname) {\n return util.isName(tagname) /* && !tagname.match(startsWithXML) */;\n}\n\n//this function returns the line number for the character at the given index\nfunction getLineNumberForPosition(xmlData, index) {\n const lines = xmlData.substring(0, index).split(/\\r?\\n/);\n return {\n line: lines.length,\n\n // column number is last line's length + 1, because column numbering starts at 1:\n col: lines[lines.length - 1].length + 1\n };\n}\n\n//this function returns the position of the first character of match within attrStr\nfunction getPositionFromMatch(match) {\n return match.startIndex + match[1].length;\n}\n","'use strict';\n//parse Empty Node as self closing node\nconst buildFromOrderedJs = require('./orderedJs2Xml');\n\nconst defaultOptions = {\n attributeNamePrefix: '@_',\n attributesGroupName: false,\n textNodeName: '#text',\n ignoreAttributes: true,\n cdataPropName: false,\n format: false,\n indentBy: ' ',\n suppressEmptyNode: false,\n suppressUnpairedNode: true,\n suppressBooleanAttributes: true,\n tagValueProcessor: function(key, a) {\n return a;\n },\n attributeValueProcessor: function(attrName, a) {\n return a;\n },\n preserveOrder: false,\n commentPropName: false,\n unpairedTags: [],\n entities: [\n { regex: new RegExp(\"&\", \"g\"), val: \"&\" },//it must be on top\n { regex: new RegExp(\">\", \"g\"), val: \">\" },\n { regex: new RegExp(\"<\", \"g\"), val: \"<\" },\n { regex: new RegExp(\"\\'\", \"g\"), val: \"'\" },\n { regex: new RegExp(\"\\\"\", \"g\"), val: \""\" }\n ],\n processEntities: true,\n stopNodes: [],\n // transformTagName: false,\n // transformAttributeName: false,\n oneListGroup: false\n};\n\nfunction Builder(options) {\n this.options = Object.assign({}, defaultOptions, options);\n if (this.options.ignoreAttributes || this.options.attributesGroupName) {\n this.isAttribute = function(/*a*/) {\n return false;\n };\n } else {\n this.attrPrefixLen = this.options.attributeNamePrefix.length;\n this.isAttribute = isAttribute;\n }\n\n this.processTextOrObjNode = processTextOrObjNode\n\n if (this.options.format) {\n this.indentate = indentate;\n this.tagEndChar = '>\\n';\n this.newLine = '\\n';\n } else {\n this.indentate = function() {\n return '';\n };\n this.tagEndChar = '>';\n this.newLine = '';\n }\n}\n\nBuilder.prototype.build = function(jObj) {\n if(this.options.preserveOrder){\n return buildFromOrderedJs(jObj, this.options);\n }else {\n if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){\n jObj = {\n [this.options.arrayNodeName] : jObj\n }\n }\n return this.j2x(jObj, 0).val;\n }\n};\n\nBuilder.prototype.j2x = function(jObj, level) {\n let attrStr = '';\n let val = '';\n for (let key in jObj) {\n if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue;\n if (typeof jObj[key] === 'undefined') {\n // supress undefined node only if it is not an attribute\n if (this.isAttribute(key)) {\n val += '';\n }\n } else if (jObj[key] === null) {\n // null attribute should be ignored by the attribute list, but should not cause the tag closing\n if (this.isAttribute(key)) {\n val += '';\n } else if (key[0] === '?') {\n val += this.indentate(level) + '<' + key + '?' + this.tagEndChar;\n } else {\n val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;\n }\n // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;\n } else if (jObj[key] instanceof Date) {\n val += this.buildTextValNode(jObj[key], key, '', level);\n } else if (typeof jObj[key] !== 'object') {\n //premitive type\n const attr = this.isAttribute(key);\n if (attr) {\n attrStr += this.buildAttrPairStr(attr, '' + jObj[key]);\n }else {\n //tag value\n if (key === this.options.textNodeName) {\n let newval = this.options.tagValueProcessor(key, '' + jObj[key]);\n val += this.replaceEntitiesValue(newval);\n } else {\n val += this.buildTextValNode(jObj[key], key, '', level);\n }\n }\n } else if (Array.isArray(jObj[key])) {\n //repeated nodes\n const arrLen = jObj[key].length;\n let listTagVal = \"\";\n for (let j = 0; j < arrLen; j++) {\n const item = jObj[key][j];\n if (typeof item === 'undefined') {\n // supress undefined node\n } else if (item === null) {\n if(key[0] === \"?\") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar;\n else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;\n // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;\n } else if (typeof item === 'object') {\n if(this.options.oneListGroup ){\n listTagVal += this.j2x(item, level + 1).val;\n }else{\n listTagVal += this.processTextOrObjNode(item, key, level)\n }\n } else {\n listTagVal += this.buildTextValNode(item, key, '', level);\n }\n }\n if(this.options.oneListGroup){\n listTagVal = this.buildObjectNode(listTagVal, key, '', level);\n }\n val += listTagVal;\n } else {\n //nested node\n if (this.options.attributesGroupName && key === this.options.attributesGroupName) {\n const Ks = Object.keys(jObj[key]);\n const L = Ks.length;\n for (let j = 0; j < L; j++) {\n attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]);\n }\n } else {\n val += this.processTextOrObjNode(jObj[key], key, level)\n }\n }\n }\n return {attrStr: attrStr, val: val};\n};\n\nBuilder.prototype.buildAttrPairStr = function(attrName, val){\n val = this.options.attributeValueProcessor(attrName, '' + val);\n val = this.replaceEntitiesValue(val);\n if (this.options.suppressBooleanAttributes && val === \"true\") {\n return ' ' + attrName;\n } else return ' ' + attrName + '=\"' + val + '\"';\n}\n\nfunction processTextOrObjNode (object, key, level) {\n const result = this.j2x(object, level + 1);\n if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) {\n return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level);\n } else {\n return this.buildObjectNode(result.val, key, result.attrStr, level);\n }\n}\n\nBuilder.prototype.buildObjectNode = function(val, key, attrStr, level) {\n if(val === \"\"){\n if(key[0] === \"?\") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar;\n else {\n return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar;\n }\n }else{\n\n let tagEndExp = '' + val + tagEndExp );\n } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) {\n return this.indentate(level) + `` + this.newLine;\n }else {\n return (\n this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar +\n val +\n this.indentate(level) + tagEndExp );\n }\n }\n}\n\nBuilder.prototype.closeTag = function(key){\n let closeTag = \"\";\n if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired\n if(!this.options.suppressUnpairedNode) closeTag = \"/\"\n }else if(this.options.suppressEmptyNode){ //empty\n closeTag = \"/\";\n }else{\n closeTag = `>` + this.newLine;\n }else if (this.options.commentPropName !== false && key === this.options.commentPropName) {\n return this.indentate(level) + `` + this.newLine;\n }else if(key[0] === \"?\") {//PI tag\n return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; \n }else{\n let textValue = this.options.tagValueProcessor(key, val);\n textValue = this.replaceEntitiesValue(textValue);\n \n if( textValue === ''){\n return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar;\n }else{\n return this.indentate(level) + '<' + key + attrStr + '>' +\n textValue +\n ' 0 && this.options.processEntities){\n for (let i=0; i 0) {\n indentation = EOL;\n }\n return arrToStr(jArray, options, \"\", indentation);\n}\n\nfunction arrToStr(arr, options, jPath, indentation) {\n let xmlStr = \"\";\n let isPreviousElementTag = false;\n\n for (let i = 0; i < arr.length; i++) {\n const tagObj = arr[i];\n const tagName = propName(tagObj);\n if(tagName === undefined) continue;\n\n let newJPath = \"\";\n if (jPath.length === 0) newJPath = tagName\n else newJPath = `${jPath}.${tagName}`;\n\n if (tagName === options.textNodeName) {\n let tagText = tagObj[tagName];\n if (!isStopNode(newJPath, options)) {\n tagText = options.tagValueProcessor(tagName, tagText);\n tagText = replaceEntitiesValue(tagText, options);\n }\n if (isPreviousElementTag) {\n xmlStr += indentation;\n }\n xmlStr += tagText;\n isPreviousElementTag = false;\n continue;\n } else if (tagName === options.cdataPropName) {\n if (isPreviousElementTag) {\n xmlStr += indentation;\n }\n xmlStr += ``;\n isPreviousElementTag = false;\n continue;\n } else if (tagName === options.commentPropName) {\n xmlStr += indentation + ``;\n isPreviousElementTag = true;\n continue;\n } else if (tagName[0] === \"?\") {\n const attStr = attr_to_str(tagObj[\":@\"], options);\n const tempInd = tagName === \"?xml\" ? \"\" : indentation;\n let piTextNodeName = tagObj[tagName][0][options.textNodeName];\n piTextNodeName = piTextNodeName.length !== 0 ? \" \" + piTextNodeName : \"\"; //remove extra spacing\n xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`;\n isPreviousElementTag = true;\n continue;\n }\n let newIdentation = indentation;\n if (newIdentation !== \"\") {\n newIdentation += options.indentBy;\n }\n const attStr = attr_to_str(tagObj[\":@\"], options);\n const tagStart = indentation + `<${tagName}${attStr}`;\n const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation);\n if (options.unpairedTags.indexOf(tagName) !== -1) {\n if (options.suppressUnpairedNode) xmlStr += tagStart + \">\";\n else xmlStr += tagStart + \"/>\";\n } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) {\n xmlStr += tagStart + \"/>\";\n } else if (tagValue && tagValue.endsWith(\">\")) {\n xmlStr += tagStart + `>${tagValue}${indentation}`;\n } else {\n xmlStr += tagStart + \">\";\n if (tagValue && indentation !== \"\" && (tagValue.includes(\"/>\") || tagValue.includes(\"`;\n }\n isPreviousElementTag = true;\n }\n\n return xmlStr;\n}\n\nfunction propName(obj) {\n const keys = Object.keys(obj);\n for (let i = 0; i < keys.length; i++) {\n const key = keys[i];\n if(!obj.hasOwnProperty(key)) continue;\n if (key !== \":@\") return key;\n }\n}\n\nfunction attr_to_str(attrMap, options) {\n let attrStr = \"\";\n if (attrMap && !options.ignoreAttributes) {\n for (let attr in attrMap) {\n if(!attrMap.hasOwnProperty(attr)) continue;\n let attrVal = options.attributeValueProcessor(attr, attrMap[attr]);\n attrVal = replaceEntitiesValue(attrVal, options);\n if (attrVal === true && options.suppressBooleanAttributes) {\n attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`;\n } else {\n attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}=\"${attrVal}\"`;\n }\n }\n }\n return attrStr;\n}\n\nfunction isStopNode(jPath, options) {\n jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1);\n let tagName = jPath.substr(jPath.lastIndexOf(\".\") + 1);\n for (let index in options.stopNodes) {\n if (options.stopNodes[index] === jPath || options.stopNodes[index] === \"*.\" + tagName) return true;\n }\n return false;\n}\n\nfunction replaceEntitiesValue(textValue, options) {\n if (textValue && textValue.length > 0 && options.processEntities) {\n for (let i = 0; i < options.entities.length; i++) {\n const entity = options.entities[i];\n textValue = textValue.replace(entity.regex, entity.val);\n }\n }\n return textValue;\n}\nmodule.exports = toXml;\n","const util = require('../util');\n\n//TODO: handle comments\nfunction readDocType(xmlData, i){\n \n const entities = {};\n if( xmlData[i + 3] === 'O' &&\n xmlData[i + 4] === 'C' &&\n xmlData[i + 5] === 'T' &&\n xmlData[i + 6] === 'Y' &&\n xmlData[i + 7] === 'P' &&\n xmlData[i + 8] === 'E')\n { \n i = i+9;\n let angleBracketsCount = 1;\n let hasBody = false, comment = false;\n let exp = \"\";\n for(;i') { //Read tag content\n if(comment){\n if( xmlData[i - 1] === \"-\" && xmlData[i - 2] === \"-\"){\n comment = false;\n angleBracketsCount--;\n }\n }else{\n angleBracketsCount--;\n }\n if (angleBracketsCount === 0) {\n break;\n }\n }else if( xmlData[i] === '['){\n hasBody = true;\n }else{\n exp += xmlData[i];\n }\n }\n if(angleBracketsCount !== 0){\n throw new Error(`Unclosed DOCTYPE`);\n }\n }else{\n throw new Error(`Invalid Tag instead of DOCTYPE`);\n }\n return {entities, i};\n}\n\nfunction readEntityExp(xmlData,i){\n //External entities are not supported\n // \n\n //Parameter entities are not supported\n // \n\n //Internal entities are supported\n // \n \n //read EntityName\n let entityName = \"\";\n for (; i < xmlData.length && (xmlData[i] !== \"'\" && xmlData[i] !== '\"' ); i++) {\n // if(xmlData[i] === \" \") continue;\n // else \n entityName += xmlData[i];\n }\n entityName = entityName.trim();\n if(entityName.indexOf(\" \") !== -1) throw new Error(\"External entites are not supported\");\n\n //read Entity Value\n const startChar = xmlData[i++];\n let val = \"\"\n for (; i < xmlData.length && xmlData[i] !== startChar ; i++) {\n val += xmlData[i];\n }\n return [entityName, val, i];\n}\n\nfunction isComment(xmlData, i){\n if(xmlData[i+1] === '!' &&\n xmlData[i+2] === '-' &&\n xmlData[i+3] === '-') return true\n return false\n}\nfunction isEntity(xmlData, i){\n if(xmlData[i+1] === '!' &&\n xmlData[i+2] === 'E' &&\n xmlData[i+3] === 'N' &&\n xmlData[i+4] === 'T' &&\n xmlData[i+5] === 'I' &&\n xmlData[i+6] === 'T' &&\n xmlData[i+7] === 'Y') return true\n return false\n}\nfunction isElement(xmlData, i){\n if(xmlData[i+1] === '!' &&\n xmlData[i+2] === 'E' &&\n xmlData[i+3] === 'L' &&\n xmlData[i+4] === 'E' &&\n xmlData[i+5] === 'M' &&\n xmlData[i+6] === 'E' &&\n xmlData[i+7] === 'N' &&\n xmlData[i+8] === 'T') return true\n return false\n}\n\nfunction isAttlist(xmlData, i){\n if(xmlData[i+1] === '!' &&\n xmlData[i+2] === 'A' &&\n xmlData[i+3] === 'T' &&\n xmlData[i+4] === 'T' &&\n xmlData[i+5] === 'L' &&\n xmlData[i+6] === 'I' &&\n xmlData[i+7] === 'S' &&\n xmlData[i+8] === 'T') return true\n return false\n}\nfunction isNotation(xmlData, i){\n if(xmlData[i+1] === '!' &&\n xmlData[i+2] === 'N' &&\n xmlData[i+3] === 'O' &&\n xmlData[i+4] === 'T' &&\n xmlData[i+5] === 'A' &&\n xmlData[i+6] === 'T' &&\n xmlData[i+7] === 'I' &&\n xmlData[i+8] === 'O' &&\n xmlData[i+9] === 'N') return true\n return false\n}\n\nfunction validateEntityName(name){\n if (util.isName(name))\n\treturn name;\n else\n throw new Error(`Invalid entity name ${name}`);\n}\n\nmodule.exports = readDocType;\n","\nconst defaultOptions = {\n preserveOrder: false,\n attributeNamePrefix: '@_',\n attributesGroupName: false,\n textNodeName: '#text',\n ignoreAttributes: true,\n removeNSPrefix: false, // remove NS from tag name or attribute name if true\n allowBooleanAttributes: false, //a tag can have attributes without any value\n //ignoreRootElement : false,\n parseTagValue: true,\n parseAttributeValue: false,\n trimValues: true, //Trim string values of tag and attributes\n cdataPropName: false,\n numberParseOptions: {\n hex: true,\n leadingZeros: true,\n eNotation: true\n },\n tagValueProcessor: function(tagName, val) {\n return val;\n },\n attributeValueProcessor: function(attrName, val) {\n return val;\n },\n stopNodes: [], //nested tags will not be parsed even for errors\n alwaysCreateTextNode: false,\n isArray: () => false,\n commentPropName: false,\n unpairedTags: [],\n processEntities: true,\n htmlEntities: false,\n ignoreDeclaration: false,\n ignorePiTags: false,\n transformTagName: false,\n transformAttributeName: false,\n updateTag: function(tagName, jPath, attrs){\n return tagName\n },\n // skipEmptyListItem: false\n};\n \nconst buildOptions = function(options) {\n return Object.assign({}, defaultOptions, options);\n};\n\nexports.buildOptions = buildOptions;\nexports.defaultOptions = defaultOptions;","'use strict';\n///@ts-check\n\nconst util = require('../util');\nconst xmlNode = require('./xmlNode');\nconst readDocType = require(\"./DocTypeReader\");\nconst toNumber = require(\"strnum\");\n\n// const regx =\n// '<((!\\\\[CDATA\\\\[([\\\\s\\\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\\\/)(NAME)\\\\s*>))([^<]*)'\n// .replace(/NAME/g, util.nameRegexp);\n\n//const tagsRegx = new RegExp(\"<(\\\\/?[\\\\w:\\\\-\\._]+)([^>]*)>(\\\\s*\"+cdataRegx+\")*([^<]+)?\",\"g\");\n//const tagsRegx = new RegExp(\"<(\\\\/?)((\\\\w*:)?([\\\\w:\\\\-\\._]+))([^>]*)>([^<]*)(\"+cdataRegx+\"([^<]*))*([^<]+)?\",\"g\");\n\nclass OrderedObjParser{\n constructor(options){\n this.options = options;\n this.currentNode = null;\n this.tagsNodeStack = [];\n this.docTypeEntities = {};\n this.lastEntities = {\n \"apos\" : { regex: /&(apos|#39|#x27);/g, val : \"'\"},\n \"gt\" : { regex: /&(gt|#62|#x3E);/g, val : \">\"},\n \"lt\" : { regex: /&(lt|#60|#x3C);/g, val : \"<\"},\n \"quot\" : { regex: /&(quot|#34|#x22);/g, val : \"\\\"\"},\n };\n this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : \"&\"};\n this.htmlEntities = {\n \"space\": { regex: /&(nbsp|#160);/g, val: \" \" },\n // \"lt\" : { regex: /&(lt|#60);/g, val: \"<\" },\n // \"gt\" : { regex: /&(gt|#62);/g, val: \">\" },\n // \"amp\" : { regex: /&(amp|#38);/g, val: \"&\" },\n // \"quot\" : { regex: /&(quot|#34);/g, val: \"\\\"\" },\n // \"apos\" : { regex: /&(apos|#39);/g, val: \"'\" },\n \"cent\" : { regex: /&(cent|#162);/g, val: \"¢\" },\n \"pound\" : { regex: /&(pound|#163);/g, val: \"£\" },\n \"yen\" : { regex: /&(yen|#165);/g, val: \"¥\" },\n \"euro\" : { regex: /&(euro|#8364);/g, val: \"€\" },\n \"copyright\" : { regex: /&(copy|#169);/g, val: \"©\" },\n \"reg\" : { regex: /&(reg|#174);/g, val: \"®\" },\n \"inr\" : { regex: /&(inr|#8377);/g, val: \"₹\" },\n };\n this.addExternalEntities = addExternalEntities;\n this.parseXml = parseXml;\n this.parseTextData = parseTextData;\n this.resolveNameSpace = resolveNameSpace;\n this.buildAttributesMap = buildAttributesMap;\n this.isItStopNode = isItStopNode;\n this.replaceEntitiesValue = replaceEntitiesValue;\n this.readStopNodeData = readStopNodeData;\n this.saveTextToParentTag = saveTextToParentTag;\n this.addChild = addChild;\n }\n\n}\n\nfunction addExternalEntities(externalEntities){\n const entKeys = Object.keys(externalEntities);\n for (let i = 0; i < entKeys.length; i++) {\n const ent = entKeys[i];\n this.lastEntities[ent] = {\n regex: new RegExp(\"&\"+ent+\";\",\"g\"),\n val : externalEntities[ent]\n }\n }\n}\n\n/**\n * @param {string} val\n * @param {string} tagName\n * @param {string} jPath\n * @param {boolean} dontTrim\n * @param {boolean} hasAttributes\n * @param {boolean} isLeafNode\n * @param {boolean} escapeEntities\n */\nfunction parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) {\n if (val !== undefined) {\n if (this.options.trimValues && !dontTrim) {\n val = val.trim();\n }\n if(val.length > 0){\n if(!escapeEntities) val = this.replaceEntitiesValue(val);\n \n const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode);\n if(newval === null || newval === undefined){\n //don't parse\n return val;\n }else if(typeof newval !== typeof val || newval !== val){\n //overwrite\n return newval;\n }else if(this.options.trimValues){\n return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions);\n }else{\n const trimmedVal = val.trim();\n if(trimmedVal === val){\n return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions);\n }else{\n return val;\n }\n }\n }\n }\n}\n\nfunction resolveNameSpace(tagname) {\n if (this.options.removeNSPrefix) {\n const tags = tagname.split(':');\n const prefix = tagname.charAt(0) === '/' ? '/' : '';\n if (tags[0] === 'xmlns') {\n return '';\n }\n if (tags.length === 2) {\n tagname = prefix + tags[1];\n }\n }\n return tagname;\n}\n\n//TODO: change regex to capture NS\n//const attrsRegx = new RegExp(\"([\\\\w\\\\-\\\\.\\\\:]+)\\\\s*=\\\\s*(['\\\"])((.|\\n)*?)\\\\2\",\"gm\");\nconst attrsRegx = new RegExp('([^\\\\s=]+)\\\\s*(=\\\\s*([\\'\"])([\\\\s\\\\S]*?)\\\\3)?', 'gm');\n\nfunction buildAttributesMap(attrStr, jPath, tagName) {\n if (!this.options.ignoreAttributes && typeof attrStr === 'string') {\n // attrStr = attrStr.replace(/\\r?\\n/g, ' ');\n //attrStr = attrStr || attrStr.trim();\n\n const matches = util.getAllMatches(attrStr, attrsRegx);\n const len = matches.length; //don't make it inline\n const attrs = {};\n for (let i = 0; i < len; i++) {\n const attrName = this.resolveNameSpace(matches[i][1]);\n let oldVal = matches[i][4];\n let aName = this.options.attributeNamePrefix + attrName;\n if (attrName.length) {\n if (this.options.transformAttributeName) {\n aName = this.options.transformAttributeName(aName);\n }\n if(aName === \"__proto__\") aName = \"#__proto__\";\n if (oldVal !== undefined) {\n if (this.options.trimValues) {\n oldVal = oldVal.trim();\n }\n oldVal = this.replaceEntitiesValue(oldVal);\n const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath);\n if(newVal === null || newVal === undefined){\n //don't parse\n attrs[aName] = oldVal;\n }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){\n //overwrite\n attrs[aName] = newVal;\n }else{\n //parse\n attrs[aName] = parseValue(\n oldVal,\n this.options.parseAttributeValue,\n this.options.numberParseOptions\n );\n }\n } else if (this.options.allowBooleanAttributes) {\n attrs[aName] = true;\n }\n }\n }\n if (!Object.keys(attrs).length) {\n return;\n }\n if (this.options.attributesGroupName) {\n const attrCollection = {};\n attrCollection[this.options.attributesGroupName] = attrs;\n return attrCollection;\n }\n return attrs\n }\n}\n\nconst parseXml = function(xmlData) {\n xmlData = xmlData.replace(/\\r\\n?/g, \"\\n\"); //TODO: remove this line\n const xmlObj = new xmlNode('!xml');\n let currentNode = xmlObj;\n let textData = \"\";\n let jPath = \"\";\n for(let i=0; i< xmlData.length; i++){//for each char in XML data\n const ch = xmlData[i];\n if(ch === '<'){\n // const nextIndex = i+1;\n // const _2ndChar = xmlData[nextIndex];\n if( xmlData[i+1] === '/') {//Closing Tag\n const closeIndex = findClosingIndex(xmlData, \">\", i, \"Closing Tag is not closed.\")\n let tagName = xmlData.substring(i+2,closeIndex).trim();\n\n if(this.options.removeNSPrefix){\n const colonIndex = tagName.indexOf(\":\");\n if(colonIndex !== -1){\n tagName = tagName.substr(colonIndex+1);\n }\n }\n\n if(this.options.transformTagName) {\n tagName = this.options.transformTagName(tagName);\n }\n\n if(currentNode){\n textData = this.saveTextToParentTag(textData, currentNode, jPath);\n }\n\n //check if last tag of nested tag was unpaired tag\n const lastTagName = jPath.substring(jPath.lastIndexOf(\".\")+1);\n if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){\n throw new Error(`Unpaired tag can not be used as closing tag: `);\n }\n let propIndex = 0\n if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){\n propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1)\n this.tagsNodeStack.pop();\n }else{\n propIndex = jPath.lastIndexOf(\".\");\n }\n jPath = jPath.substring(0, propIndex);\n\n currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope\n textData = \"\";\n i = closeIndex;\n } else if( xmlData[i+1] === '?') {\n\n let tagData = readTagExp(xmlData,i, false, \"?>\");\n if(!tagData) throw new Error(\"Pi Tag is not closed.\");\n\n textData = this.saveTextToParentTag(textData, currentNode, jPath);\n if( (this.options.ignoreDeclaration && tagData.tagName === \"?xml\") || this.options.ignorePiTags){\n\n }else{\n \n const childNode = new xmlNode(tagData.tagName);\n childNode.add(this.options.textNodeName, \"\");\n \n if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){\n childNode[\":@\"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName);\n }\n this.addChild(currentNode, childNode, jPath)\n\n }\n\n\n i = tagData.closeIndex + 1;\n } else if(xmlData.substr(i + 1, 3) === '!--') {\n const endIndex = findClosingIndex(xmlData, \"-->\", i+4, \"Comment is not closed.\")\n if(this.options.commentPropName){\n const comment = xmlData.substring(i + 4, endIndex - 2);\n\n textData = this.saveTextToParentTag(textData, currentNode, jPath);\n\n currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]);\n }\n i = endIndex;\n } else if( xmlData.substr(i + 1, 2) === '!D') {\n const result = readDocType(xmlData, i);\n this.docTypeEntities = result.entities;\n i = result.i;\n }else if(xmlData.substr(i + 1, 2) === '![') {\n const closeIndex = findClosingIndex(xmlData, \"]]>\", i, \"CDATA is not closed.\") - 2;\n const tagExp = xmlData.substring(i + 9,closeIndex);\n\n textData = this.saveTextToParentTag(textData, currentNode, jPath);\n\n let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true);\n if(val == undefined) val = \"\";\n\n //cdata should be set even if it is 0 length string\n if(this.options.cdataPropName){\n currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]);\n }else{\n currentNode.add(this.options.textNodeName, val);\n }\n \n i = closeIndex + 2;\n }else {//Opening tag\n let result = readTagExp(xmlData,i, this.options.removeNSPrefix);\n let tagName= result.tagName;\n const rawTagName = result.rawTagName;\n let tagExp = result.tagExp;\n let attrExpPresent = result.attrExpPresent;\n let closeIndex = result.closeIndex;\n\n if (this.options.transformTagName) {\n tagName = this.options.transformTagName(tagName);\n }\n \n //save text as child node\n if (currentNode && textData) {\n if(currentNode.tagname !== '!xml'){\n //when nested tag is found\n textData = this.saveTextToParentTag(textData, currentNode, jPath, false);\n }\n }\n\n //check if last tag was unpaired tag\n const lastTag = currentNode;\n if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){\n currentNode = this.tagsNodeStack.pop();\n jPath = jPath.substring(0, jPath.lastIndexOf(\".\"));\n }\n if(tagName !== xmlObj.tagname){\n jPath += jPath ? \".\" + tagName : tagName;\n }\n if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) {\n let tagContent = \"\";\n //self-closing tag\n if(tagExp.length > 0 && tagExp.lastIndexOf(\"/\") === tagExp.length - 1){\n i = result.closeIndex;\n }\n //unpaired tag\n else if(this.options.unpairedTags.indexOf(tagName) !== -1){\n i = result.closeIndex;\n }\n //normal tag\n else{\n //read until closing tag is found\n const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1);\n if(!result) throw new Error(`Unexpected end of ${rawTagName}`);\n i = result.i;\n tagContent = result.tagContent;\n }\n\n const childNode = new xmlNode(tagName);\n if(tagName !== tagExp && attrExpPresent){\n childNode[\":@\"] = this.buildAttributesMap(tagExp, jPath, tagName);\n }\n if(tagContent) {\n tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true);\n }\n \n jPath = jPath.substr(0, jPath.lastIndexOf(\".\"));\n childNode.add(this.options.textNodeName, tagContent);\n \n this.addChild(currentNode, childNode, jPath)\n }else{\n //selfClosing tag\n if(tagExp.length > 0 && tagExp.lastIndexOf(\"/\") === tagExp.length - 1){\n if(tagName[tagName.length - 1] === \"/\"){ //remove trailing '/'\n tagName = tagName.substr(0, tagName.length - 1);\n jPath = jPath.substr(0, jPath.length - 1);\n tagExp = tagName;\n }else{\n tagExp = tagExp.substr(0, tagExp.length - 1);\n }\n \n if(this.options.transformTagName) {\n tagName = this.options.transformTagName(tagName);\n }\n\n const childNode = new xmlNode(tagName);\n if(tagName !== tagExp && attrExpPresent){\n childNode[\":@\"] = this.buildAttributesMap(tagExp, jPath, tagName);\n }\n this.addChild(currentNode, childNode, jPath)\n jPath = jPath.substr(0, jPath.lastIndexOf(\".\"));\n }\n //opening tag\n else{\n const childNode = new xmlNode( tagName);\n this.tagsNodeStack.push(currentNode);\n \n if(tagName !== tagExp && attrExpPresent){\n childNode[\":@\"] = this.buildAttributesMap(tagExp, jPath, tagName);\n }\n this.addChild(currentNode, childNode, jPath)\n currentNode = childNode;\n }\n textData = \"\";\n i = closeIndex;\n }\n }\n }else{\n textData += xmlData[i];\n }\n }\n return xmlObj.child;\n}\n\nfunction addChild(currentNode, childNode, jPath){\n const result = this.options.updateTag(childNode.tagname, jPath, childNode[\":@\"])\n if(result === false){\n }else if(typeof result === \"string\"){\n childNode.tagname = result\n currentNode.addChild(childNode);\n }else{\n currentNode.addChild(childNode);\n }\n}\n\nconst replaceEntitiesValue = function(val){\n\n if(this.options.processEntities){\n for(let entityName in this.docTypeEntities){\n const entity = this.docTypeEntities[entityName];\n val = val.replace( entity.regx, entity.val);\n }\n for(let entityName in this.lastEntities){\n const entity = this.lastEntities[entityName];\n val = val.replace( entity.regex, entity.val);\n }\n if(this.options.htmlEntities){\n for(let entityName in this.htmlEntities){\n const entity = this.htmlEntities[entityName];\n val = val.replace( entity.regex, entity.val);\n }\n }\n val = val.replace( this.ampEntity.regex, this.ampEntity.val);\n }\n return val;\n}\nfunction saveTextToParentTag(textData, currentNode, jPath, isLeafNode) {\n if (textData) { //store previously collected data as textNode\n if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0\n \n textData = this.parseTextData(textData,\n currentNode.tagname,\n jPath,\n false,\n currentNode[\":@\"] ? Object.keys(currentNode[\":@\"]).length !== 0 : false,\n isLeafNode);\n\n if (textData !== undefined && textData !== \"\")\n currentNode.add(this.options.textNodeName, textData);\n textData = \"\";\n }\n return textData;\n}\n\n//TODO: use jPath to simplify the logic\n/**\n * \n * @param {string[]} stopNodes \n * @param {string} jPath\n * @param {string} currentTagName \n */\nfunction isItStopNode(stopNodes, jPath, currentTagName){\n const allNodesExp = \"*.\" + currentTagName;\n for (const stopNodePath in stopNodes) {\n const stopNodeExp = stopNodes[stopNodePath];\n if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true;\n }\n return false;\n}\n\n/**\n * Returns the tag Expression and where it is ending handling single-double quotes situation\n * @param {string} xmlData \n * @param {number} i starting index\n * @returns \n */\nfunction tagExpWithClosingIndex(xmlData, i, closingChar = \">\"){\n let attrBoundary;\n let tagExp = \"\";\n for (let index = i; index < xmlData.length; index++) {\n let ch = xmlData[index];\n if (attrBoundary) {\n if (ch === attrBoundary) attrBoundary = \"\";//reset\n } else if (ch === '\"' || ch === \"'\") {\n attrBoundary = ch;\n } else if (ch === closingChar[0]) {\n if(closingChar[1]){\n if(xmlData[index + 1] === closingChar[1]){\n return {\n data: tagExp,\n index: index\n }\n }\n }else{\n return {\n data: tagExp,\n index: index\n }\n }\n } else if (ch === '\\t') {\n ch = \" \"\n }\n tagExp += ch;\n }\n}\n\nfunction findClosingIndex(xmlData, str, i, errMsg){\n const closingIndex = xmlData.indexOf(str, i);\n if(closingIndex === -1){\n throw new Error(errMsg)\n }else{\n return closingIndex + str.length - 1;\n }\n}\n\nfunction readTagExp(xmlData,i, removeNSPrefix, closingChar = \">\"){\n const result = tagExpWithClosingIndex(xmlData, i+1, closingChar);\n if(!result) return;\n let tagExp = result.data;\n const closeIndex = result.index;\n const separatorIndex = tagExp.search(/\\s/);\n let tagName = tagExp;\n let attrExpPresent = true;\n if(separatorIndex !== -1){//separate tag name and attributes expression\n tagName = tagExp.substring(0, separatorIndex);\n tagExp = tagExp.substring(separatorIndex + 1).trimStart();\n }\n\n const rawTagName = tagName;\n if(removeNSPrefix){\n const colonIndex = tagName.indexOf(\":\");\n if(colonIndex !== -1){\n tagName = tagName.substr(colonIndex+1);\n attrExpPresent = tagName !== result.data.substr(colonIndex + 1);\n }\n }\n\n return {\n tagName: tagName,\n tagExp: tagExp,\n closeIndex: closeIndex,\n attrExpPresent: attrExpPresent,\n rawTagName: rawTagName,\n }\n}\n/**\n * find paired tag for a stop node\n * @param {string} xmlData \n * @param {string} tagName \n * @param {number} i \n */\nfunction readStopNodeData(xmlData, tagName, i){\n const startIndex = i;\n // Starting at 1 since we already have an open tag\n let openTagCount = 1;\n\n for (; i < xmlData.length; i++) {\n if( xmlData[i] === \"<\"){ \n if (xmlData[i+1] === \"/\") {//close tag\n const closeIndex = findClosingIndex(xmlData, \">\", i, `${tagName} is not closed`);\n let closeTagName = xmlData.substring(i+2,closeIndex).trim();\n if(closeTagName === tagName){\n openTagCount--;\n if (openTagCount === 0) {\n return {\n tagContent: xmlData.substring(startIndex, i),\n i : closeIndex\n }\n }\n }\n i=closeIndex;\n } else if(xmlData[i+1] === '?') { \n const closeIndex = findClosingIndex(xmlData, \"?>\", i+1, \"StopNode is not closed.\")\n i=closeIndex;\n } else if(xmlData.substr(i + 1, 3) === '!--') { \n const closeIndex = findClosingIndex(xmlData, \"-->\", i+3, \"StopNode is not closed.\")\n i=closeIndex;\n } else if(xmlData.substr(i + 1, 2) === '![') { \n const closeIndex = findClosingIndex(xmlData, \"]]>\", i, \"StopNode is not closed.\") - 2;\n i=closeIndex;\n } else {\n const tagData = readTagExp(xmlData, i, '>')\n\n if (tagData) {\n const openTagName = tagData && tagData.tagName;\n if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== \"/\") {\n openTagCount++;\n }\n i=tagData.closeIndex;\n }\n }\n }\n }//end for loop\n}\n\nfunction parseValue(val, shouldParse, options) {\n if (shouldParse && typeof val === 'string') {\n //console.log(options)\n const newval = val.trim();\n if(newval === 'true' ) return true;\n else if(newval === 'false' ) return false;\n else return toNumber(val, options);\n } else {\n if (util.isExist(val)) {\n return val;\n } else {\n return '';\n }\n }\n}\n\n\nmodule.exports = OrderedObjParser;\n","const { buildOptions} = require(\"./OptionsBuilder\");\nconst OrderedObjParser = require(\"./OrderedObjParser\");\nconst { prettify} = require(\"./node2json\");\nconst validator = require('../validator');\n\nclass XMLParser{\n \n constructor(options){\n this.externalEntities = {};\n this.options = buildOptions(options);\n \n }\n /**\n * Parse XML dats to JS object \n * @param {string|Buffer} xmlData \n * @param {boolean|Object} validationOption \n */\n parse(xmlData,validationOption){\n if(typeof xmlData === \"string\"){\n }else if( xmlData.toString){\n xmlData = xmlData.toString();\n }else{\n throw new Error(\"XML data is accepted in String or Bytes[] form.\")\n }\n if( validationOption){\n if(validationOption === true) validationOption = {}; //validate with default options\n \n const result = validator.validate(xmlData, validationOption);\n if (result !== true) {\n throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` )\n }\n }\n const orderedObjParser = new OrderedObjParser(this.options);\n orderedObjParser.addExternalEntities(this.externalEntities);\n const orderedResult = orderedObjParser.parseXml(xmlData);\n if(this.options.preserveOrder || orderedResult === undefined) return orderedResult;\n else return prettify(orderedResult, this.options);\n }\n\n /**\n * Add Entity which is not by default supported by this library\n * @param {string} key \n * @param {string} value \n */\n addEntity(key, value){\n if(value.indexOf(\"&\") !== -1){\n throw new Error(\"Entity value can't have '&'\")\n }else if(key.indexOf(\"&\") !== -1 || key.indexOf(\";\") !== -1){\n throw new Error(\"An entity must be set without '&' and ';'. Eg. use '#xD' for ' '\")\n }else if(value === \"&\"){\n throw new Error(\"An entity with value '&' is not permitted\");\n }else{\n this.externalEntities[key] = value;\n }\n }\n}\n\nmodule.exports = XMLParser;","'use strict';\n\n/**\n * \n * @param {array} node \n * @param {any} options \n * @returns \n */\nfunction prettify(node, options){\n return compress( node, options);\n}\n\n/**\n * \n * @param {array} arr \n * @param {object} options \n * @param {string} jPath \n * @returns object\n */\nfunction compress(arr, options, jPath){\n let text;\n const compressedObj = {};\n for (let i = 0; i < arr.length; i++) {\n const tagObj = arr[i];\n const property = propName(tagObj);\n let newJpath = \"\";\n if(jPath === undefined) newJpath = property;\n else newJpath = jPath + \".\" + property;\n\n if(property === options.textNodeName){\n if(text === undefined) text = tagObj[property];\n else text += \"\" + tagObj[property];\n }else if(property === undefined){\n continue;\n }else if(tagObj[property]){\n \n let val = compress(tagObj[property], options, newJpath);\n const isLeaf = isLeafTag(val, options);\n\n if(tagObj[\":@\"]){\n assignAttributes( val, tagObj[\":@\"], newJpath, options);\n }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){\n val = val[options.textNodeName];\n }else if(Object.keys(val).length === 0){\n if(options.alwaysCreateTextNode) val[options.textNodeName] = \"\";\n else val = \"\";\n }\n\n if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) {\n if(!Array.isArray(compressedObj[property])) {\n compressedObj[property] = [ compressedObj[property] ];\n }\n compressedObj[property].push(val);\n }else{\n //TODO: if a node is not an array, then check if it should be an array\n //also determine if it is a leaf node\n if (options.isArray(property, newJpath, isLeaf )) {\n compressedObj[property] = [val];\n }else{\n compressedObj[property] = val;\n }\n }\n }\n \n }\n // if(text && text.length > 0) compressedObj[options.textNodeName] = text;\n if(typeof text === \"string\"){\n if(text.length > 0) compressedObj[options.textNodeName] = text;\n }else if(text !== undefined) compressedObj[options.textNodeName] = text;\n return compressedObj;\n}\n\nfunction propName(obj){\n const keys = Object.keys(obj);\n for (let i = 0; i < keys.length; i++) {\n const key = keys[i];\n if(key !== \":@\") return key;\n }\n}\n\nfunction assignAttributes(obj, attrMap, jpath, options){\n if (attrMap) {\n const keys = Object.keys(attrMap);\n const len = keys.length; //don't make it inline\n for (let i = 0; i < len; i++) {\n const atrrName = keys[i];\n if (options.isArray(atrrName, jpath + \".\" + atrrName, true, true)) {\n obj[atrrName] = [ attrMap[atrrName] ];\n } else {\n obj[atrrName] = attrMap[atrrName];\n }\n }\n }\n}\n\nfunction isLeafTag(obj, options){\n const { textNodeName } = options;\n const propCount = Object.keys(obj).length;\n \n if (propCount === 0) {\n return true;\n }\n\n if (\n propCount === 1 &&\n (obj[textNodeName] || typeof obj[textNodeName] === \"boolean\" || obj[textNodeName] === 0)\n ) {\n return true;\n }\n\n return false;\n}\nexports.prettify = prettify;\n","'use strict';\n\nclass XmlNode{\n constructor(tagname) {\n this.tagname = tagname;\n this.child = []; //nested tags, text, cdata, comments in order\n this[\":@\"] = {}; //attributes map\n }\n add(key,val){\n // this.child.push( {name : key, val: val, isCdata: isCdata });\n if(key === \"__proto__\") key = \"#__proto__\";\n this.child.push( {[key]: val });\n }\n addChild(node) {\n if(node.tagname === \"__proto__\") node.tagname = \"#__proto__\";\n if(node[\":@\"] && Object.keys(node[\":@\"]).length > 0){\n this.child.push( { [node.tagname]: node.child, [\":@\"]: node[\":@\"] });\n }else{\n this.child.push( { [node.tagname]: node.child });\n }\n };\n};\n\n\nmodule.exports = XmlNode;","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.defaultErrorRedactor = exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0;\nconst url_1 = require(\"url\");\nconst util_1 = require(\"./util\");\nconst extend_1 = __importDefault(require(\"extend\"));\n/**\n * Support `instanceof` operator for `GaxiosError`s in different versions of this library.\n *\n * @see {@link GaxiosError[Symbol.hasInstance]}\n */\nexports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`);\n/* eslint-disable-next-line @typescript-eslint/no-explicit-any */\nclass GaxiosError extends Error {\n /**\n * Support `instanceof` operator for `GaxiosError` across builds/duplicated files.\n *\n * @see {@link GAXIOS_ERROR_SYMBOL}\n * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]}\n */\n static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) {\n if (instance &&\n typeof instance === 'object' &&\n exports.GAXIOS_ERROR_SYMBOL in instance &&\n instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) {\n return true;\n }\n // fallback to native\n return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance);\n }\n constructor(message, config, response, error) {\n var _b;\n super(message);\n this.config = config;\n this.response = response;\n this.error = error;\n /**\n * Support `instanceof` operator for `GaxiosError` across builds/duplicated files.\n *\n * @see {@link GAXIOS_ERROR_SYMBOL}\n * @see {@link GaxiosError[Symbol.hasInstance]}\n * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200}\n * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof}\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior}\n */\n this[_a] = util_1.pkg.version;\n // deep-copy config as we do not want to mutate\n // the existing config for future retries/use\n this.config = (0, extend_1.default)(true, {}, config);\n if (this.response) {\n this.response.config = (0, extend_1.default)(true, {}, this.response.config);\n }\n if (this.response) {\n try {\n this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data);\n }\n catch (_c) {\n // best effort - don't throw an error within an error\n // we could set `this.response.config.responseType = 'unknown'`, but\n // that would mutate future calls with this config object.\n }\n this.status = this.response.status;\n }\n if (error && 'code' in error && error.code) {\n this.code = error.code;\n }\n if (config.errorRedactor) {\n config.errorRedactor({\n config: this.config,\n response: this.response,\n });\n }\n }\n}\nexports.GaxiosError = GaxiosError;\nfunction translateData(responseType, data) {\n switch (responseType) {\n case 'stream':\n return data;\n case 'json':\n return JSON.parse(JSON.stringify(data));\n case 'arraybuffer':\n return JSON.parse(Buffer.from(data).toString('utf8'));\n case 'blob':\n return JSON.parse(data.text());\n default:\n return data;\n }\n}\n/**\n * An experimental error redactor.\n *\n * @param config Config to potentially redact properties of\n * @param response Config to potentially redact properties of\n *\n * @experimental\n */\nfunction defaultErrorRedactor(data) {\n const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.';\n function redactHeaders(headers) {\n if (!headers)\n return;\n for (const key of Object.keys(headers)) {\n // any casing of `Authentication`\n if (/^authentication$/.test(key)) {\n headers[key] = REDACT;\n }\n }\n }\n function redactString(obj, key) {\n if (typeof obj === 'object' &&\n obj !== null &&\n typeof obj[key] === 'string') {\n const text = obj[key];\n if (/grant_type=/.test(text) || /assertion=/.test(text)) {\n obj[key] = REDACT;\n }\n }\n }\n function redactObject(obj) {\n if (typeof obj === 'object' && obj !== null) {\n if ('grant_type' in obj) {\n obj['grant_type'] = REDACT;\n }\n if ('assertion' in obj) {\n obj['assertion'] = REDACT;\n }\n }\n }\n if (data.config) {\n redactHeaders(data.config.headers);\n redactString(data.config, 'data');\n redactObject(data.config.data);\n redactString(data.config, 'body');\n redactObject(data.config.body);\n try {\n const url = new url_1.URL(data.config.url || '');\n if (url.searchParams.has('token')) {\n url.searchParams.set('token', REDACT);\n }\n data.config.url = url.toString();\n }\n catch (_b) {\n // ignore error - no need to parse an invalid URL\n }\n }\n if (data.response) {\n defaultErrorRedactor({ config: data.response.config });\n redactHeaders(data.response.headers);\n redactString(data.response, 'data');\n redactObject(data.response.data);\n }\n return data;\n}\nexports.defaultErrorRedactor = defaultErrorRedactor;\n//# sourceMappingURL=common.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Gaxios = void 0;\nconst extend_1 = __importDefault(require(\"extend\"));\nconst https_1 = require(\"https\");\nconst node_fetch_1 = __importDefault(require(\"node-fetch\"));\nconst querystring_1 = __importDefault(require(\"querystring\"));\nconst is_stream_1 = __importDefault(require(\"is-stream\"));\nconst url_1 = require(\"url\");\nconst common_1 = require(\"./common\");\nconst retry_1 = require(\"./retry\");\nconst https_proxy_agent_1 = require(\"https-proxy-agent\");\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fetch = hasFetch() ? window.fetch : node_fetch_1.default;\nfunction hasWindow() {\n return typeof window !== 'undefined' && !!window;\n}\nfunction hasFetch() {\n return hasWindow() && !!window.fetch;\n}\nfunction hasBuffer() {\n return typeof Buffer !== 'undefined';\n}\nfunction hasHeader(options, header) {\n return !!getHeader(options, header);\n}\nfunction getHeader(options, header) {\n header = header.toLowerCase();\n for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) {\n if (header === key.toLowerCase()) {\n return options.headers[key];\n }\n }\n return undefined;\n}\nlet HttpsProxyAgent;\nfunction loadProxy() {\n var _a, _b, _c, _d;\n const proxy = ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.HTTPS_PROXY) ||\n ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.https_proxy) ||\n ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.HTTP_PROXY) ||\n ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.http_proxy);\n if (proxy) {\n HttpsProxyAgent = https_proxy_agent_1.HttpsProxyAgent;\n }\n return proxy;\n}\nloadProxy();\nfunction skipProxy(url) {\n var _a;\n const noProxyEnv = (_a = process.env.NO_PROXY) !== null && _a !== void 0 ? _a : process.env.no_proxy;\n if (!noProxyEnv) {\n return false;\n }\n const noProxyUrls = noProxyEnv.split(',');\n const parsedURL = new url_1.URL(url);\n return !!noProxyUrls.find(url => {\n if (url.startsWith('*.') || url.startsWith('.')) {\n url = url.replace(/^\\*\\./, '.');\n return parsedURL.hostname.endsWith(url);\n }\n else {\n return url === parsedURL.origin || url === parsedURL.hostname;\n }\n });\n}\n// Figure out if we should be using a proxy. Only if it's required, load\n// the https-proxy-agent module as it adds startup cost.\nfunction getProxy(url) {\n // If there is a match between the no_proxy env variables and the url, then do not proxy\n if (skipProxy(url)) {\n return undefined;\n // If there is not a match between the no_proxy env variables and the url, check to see if there should be a proxy\n }\n else {\n return loadProxy();\n }\n}\nclass Gaxios {\n /**\n * The Gaxios class is responsible for making HTTP requests.\n * @param defaults The default set of options to be used for this instance.\n */\n constructor(defaults) {\n this.agentCache = new Map();\n this.defaults = defaults || {};\n }\n /**\n * Perform an HTTP request with the given options.\n * @param opts Set of HTTP options that will be used for this HTTP request.\n */\n async request(opts = {}) {\n opts = this.validateOpts(opts);\n return this._request(opts);\n }\n async _defaultAdapter(opts) {\n const fetchImpl = opts.fetchImplementation || fetch;\n const res = (await fetchImpl(opts.url, opts));\n const data = await this.getResponseData(opts, res);\n return this.translateResponse(opts, res, data);\n }\n /**\n * Internal, retryable version of the `request` method.\n * @param opts Set of HTTP options that will be used for this HTTP request.\n */\n async _request(opts = {}) {\n var _a;\n try {\n let translatedResponse;\n if (opts.adapter) {\n translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this));\n }\n else {\n translatedResponse = await this._defaultAdapter(opts);\n }\n if (!opts.validateStatus(translatedResponse.status)) {\n if (opts.responseType === 'stream') {\n let response = '';\n await new Promise(resolve => {\n (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => {\n response += chunk;\n });\n (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve);\n });\n translatedResponse.data = response;\n }\n throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse);\n }\n return translatedResponse;\n }\n catch (e) {\n const err = e instanceof common_1.GaxiosError\n ? e\n : new common_1.GaxiosError(e.message, opts, undefined, e);\n const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err);\n if (shouldRetry && config) {\n err.config.retryConfig.currentRetryAttempt =\n config.retryConfig.currentRetryAttempt;\n // The error's config could be redacted - therefore we only want to\n // copy the retry state over to the existing config\n opts.retryConfig = (_a = err.config) === null || _a === void 0 ? void 0 : _a.retryConfig;\n return this._request(opts);\n }\n throw err;\n }\n }\n async getResponseData(opts, res) {\n switch (opts.responseType) {\n case 'stream':\n return res.body;\n case 'json': {\n let data = await res.text();\n try {\n data = JSON.parse(data);\n }\n catch (_a) {\n // continue\n }\n return data;\n }\n case 'arraybuffer':\n return res.arrayBuffer();\n case 'blob':\n return res.blob();\n case 'text':\n return res.text();\n default:\n return this.getResponseDataFromContentType(res);\n }\n }\n /**\n * Validates the options, and merges them with defaults.\n * @param opts The original options passed from the client.\n */\n validateOpts(options) {\n const opts = (0, extend_1.default)(true, {}, this.defaults, options);\n if (!opts.url) {\n throw new Error('URL is required.');\n }\n // baseUrl has been deprecated, remove in 2.0\n const baseUrl = opts.baseUrl || opts.baseURL;\n if (baseUrl) {\n opts.url = baseUrl + opts.url;\n }\n opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer;\n if (opts.params && Object.keys(opts.params).length > 0) {\n let additionalQueryParams = opts.paramsSerializer(opts.params);\n if (additionalQueryParams.startsWith('?')) {\n additionalQueryParams = additionalQueryParams.slice(1);\n }\n const prefix = opts.url.includes('?') ? '&' : '?';\n opts.url = opts.url + prefix + additionalQueryParams;\n }\n if (typeof options.maxContentLength === 'number') {\n opts.size = options.maxContentLength;\n }\n if (typeof options.maxRedirects === 'number') {\n opts.follow = options.maxRedirects;\n }\n opts.headers = opts.headers || {};\n if (opts.data) {\n const isFormData = typeof FormData === 'undefined'\n ? false\n : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData;\n if (is_stream_1.default.readable(opts.data)) {\n opts.body = opts.data;\n }\n else if (hasBuffer() && Buffer.isBuffer(opts.data)) {\n // Do not attempt to JSON.stringify() a Buffer:\n opts.body = opts.data;\n if (!hasHeader(opts, 'Content-Type')) {\n opts.headers['Content-Type'] = 'application/json';\n }\n }\n else if (typeof opts.data === 'object') {\n // If www-form-urlencoded content type has been set, but data is\n // provided as an object, serialize the content using querystring:\n if (!isFormData) {\n if (getHeader(opts, 'content-type') ===\n 'application/x-www-form-urlencoded') {\n opts.body = opts.paramsSerializer(opts.data);\n }\n else {\n // } else if (!(opts.data instanceof FormData)) {\n if (!hasHeader(opts, 'Content-Type')) {\n opts.headers['Content-Type'] = 'application/json';\n }\n opts.body = JSON.stringify(opts.data);\n }\n }\n }\n else {\n opts.body = opts.data;\n }\n }\n opts.validateStatus = opts.validateStatus || this.validateStatus;\n opts.responseType = opts.responseType || 'unknown';\n if (!opts.headers['Accept'] && opts.responseType === 'json') {\n opts.headers['Accept'] = 'application/json';\n }\n opts.method = opts.method || 'GET';\n const proxy = getProxy(opts.url);\n if (proxy) {\n if (this.agentCache.has(proxy)) {\n opts.agent = this.agentCache.get(proxy);\n }\n else {\n // Proxy is being used in conjunction with mTLS.\n if (opts.cert && opts.key) {\n const parsedURL = new url_1.URL(proxy);\n opts.agent = new HttpsProxyAgent({\n port: parsedURL.port,\n host: parsedURL.host,\n protocol: parsedURL.protocol,\n cert: opts.cert,\n key: opts.key,\n });\n }\n else {\n opts.agent = new HttpsProxyAgent(proxy);\n }\n this.agentCache.set(proxy, opts.agent);\n }\n }\n else if (opts.cert && opts.key) {\n // Configure client for mTLS:\n if (this.agentCache.has(opts.key)) {\n opts.agent = this.agentCache.get(opts.key);\n }\n else {\n opts.agent = new https_1.Agent({\n cert: opts.cert,\n key: opts.key,\n });\n this.agentCache.set(opts.key, opts.agent);\n }\n }\n if (typeof opts.errorRedactor !== 'function' &&\n opts.errorRedactor !== false) {\n opts.errorRedactor = common_1.defaultErrorRedactor;\n }\n return opts;\n }\n /**\n * By default, throw for any non-2xx status code\n * @param status status code from the HTTP response\n */\n validateStatus(status) {\n return status >= 200 && status < 300;\n }\n /**\n * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo)\n * @param params key value pars to encode\n */\n paramsSerializer(params) {\n return querystring_1.default.stringify(params);\n }\n translateResponse(opts, res, data) {\n // headers need to be converted from a map to an obj\n const headers = {};\n res.headers.forEach((value, key) => {\n headers[key] = value;\n });\n return {\n config: opts,\n data: data,\n headers,\n status: res.status,\n statusText: res.statusText,\n // XMLHttpRequestLike\n request: {\n responseURL: res.url,\n },\n };\n }\n /**\n * Attempts to parse a response by looking at the Content-Type header.\n * @param {FetchResponse} response the HTTP response.\n * @returns {Promise} a promise that resolves to the response data.\n */\n async getResponseDataFromContentType(response) {\n let contentType = response.headers.get('Content-Type');\n if (contentType === null) {\n // Maintain existing functionality by calling text()\n return response.text();\n }\n contentType = contentType.toLowerCase();\n if (contentType.includes('application/json')) {\n let data = await response.text();\n try {\n data = JSON.parse(data);\n }\n catch (_a) {\n // continue\n }\n return data;\n }\n else if (contentType.match(/^text\\//)) {\n return response.text();\n }\n else {\n // If the content type is something not easily handled, just return the raw data (blob)\n return response.blob();\n }\n }\n}\nexports.Gaxios = Gaxios;\n//# sourceMappingURL=gaxios.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0;\nconst gaxios_1 = require(\"./gaxios\");\nObject.defineProperty(exports, \"Gaxios\", { enumerable: true, get: function () { return gaxios_1.Gaxios; } });\nvar common_1 = require(\"./common\");\nObject.defineProperty(exports, \"GaxiosError\", { enumerable: true, get: function () { return common_1.GaxiosError; } });\n/**\n * The default instance used when the `request` method is directly\n * invoked.\n */\nexports.instance = new gaxios_1.Gaxios();\n/**\n * Make an HTTP request using the given options.\n * @param opts Options for the request\n */\nasync function request(opts) {\n return exports.instance.request(opts);\n}\nexports.request = request;\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2018 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getRetryConfig = void 0;\nasync function getRetryConfig(err) {\n var _a;\n let config = getConfig(err);\n if (!err || !err.config || (!config && !err.config.retry)) {\n return { shouldRetry: false };\n }\n config = config || {};\n config.currentRetryAttempt = config.currentRetryAttempt || 0;\n config.retry =\n config.retry === undefined || config.retry === null ? 3 : config.retry;\n config.httpMethodsToRetry = config.httpMethodsToRetry || [\n 'GET',\n 'HEAD',\n 'PUT',\n 'OPTIONS',\n 'DELETE',\n ];\n config.noResponseRetries =\n config.noResponseRetries === undefined || config.noResponseRetries === null\n ? 2\n : config.noResponseRetries;\n // If this wasn't in the list of status codes where we want\n // to automatically retry, return.\n const retryRanges = [\n // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes\n // 1xx - Retry (Informational, request still processing)\n // 2xx - Do not retry (Success)\n // 3xx - Do not retry (Redirect)\n // 4xx - Do not retry (Client errors)\n // 429 - Retry (\"Too Many Requests\")\n // 5xx - Retry (Server errors)\n [100, 199],\n [429, 429],\n [500, 599],\n ];\n config.statusCodesToRetry = config.statusCodesToRetry || retryRanges;\n // Put the config back into the err\n err.config.retryConfig = config;\n // Determine if we should retry the request\n const shouldRetryFn = config.shouldRetry || shouldRetryRequest;\n if (!(await shouldRetryFn(err))) {\n return { shouldRetry: false, config: err.config };\n }\n // Calculate time to wait with exponential backoff.\n // If this is the first retry, look for a configured retryDelay.\n const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100;\n // Formula: retryDelay + ((2^c - 1 / 2) * 1000)\n const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000;\n // We're going to retry! Incremenent the counter.\n err.config.retryConfig.currentRetryAttempt += 1;\n // Create a promise that invokes the retry after the backOffDelay\n const backoff = config.retryBackoff\n ? config.retryBackoff(err, delay)\n : new Promise(resolve => {\n setTimeout(resolve, delay);\n });\n // Notify the user if they added an `onRetryAttempt` handler\n if (config.onRetryAttempt) {\n config.onRetryAttempt(err);\n }\n // Return the promise in which recalls Gaxios to retry the request\n await backoff;\n return { shouldRetry: true, config: err.config };\n}\nexports.getRetryConfig = getRetryConfig;\n/**\n * Determine based on config if we should retry the request.\n * @param err The GaxiosError passed to the interceptor.\n */\nfunction shouldRetryRequest(err) {\n var _a;\n const config = getConfig(err);\n // node-fetch raises an AbortError if signaled:\n // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal\n if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') {\n return false;\n }\n // If there's no config, or retries are disabled, return.\n if (!config || config.retry === 0) {\n return false;\n }\n // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc)\n if (!err.response &&\n (config.currentRetryAttempt || 0) >= config.noResponseRetries) {\n return false;\n }\n // Only retry with configured HttpMethods.\n if (!err.config.method ||\n config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) {\n return false;\n }\n // If this wasn't in the list of status codes where we want\n // to automatically retry, return.\n if (err.response && err.response.status) {\n let isInRange = false;\n for (const [min, max] of config.statusCodesToRetry) {\n const status = err.response.status;\n if (status >= min && status <= max) {\n isInRange = true;\n break;\n }\n }\n if (!isInRange) {\n return false;\n }\n }\n // If we are out of retry attempts, return\n config.currentRetryAttempt = config.currentRetryAttempt || 0;\n if (config.currentRetryAttempt >= config.retry) {\n return false;\n }\n return true;\n}\n/**\n * Acquire the raxConfig object from an GaxiosError if available.\n * @param err The Gaxios error with a config object.\n */\nfunction getConfig(err) {\n if (err && err.config && err.config.retryConfig) {\n return err.config.retryConfig;\n }\n return;\n}\n//# sourceMappingURL=retry.js.map","\"use strict\";\n// Copyright 2023 Google LLC\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.pkg = void 0;\nexports.pkg = require('../../package.json');\n//# sourceMappingURL=util.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.req = exports.json = exports.toBuffer = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nasync function toBuffer(stream) {\n let length = 0;\n const chunks = [];\n for await (const chunk of stream) {\n length += chunk.length;\n chunks.push(chunk);\n }\n return Buffer.concat(chunks, length);\n}\nexports.toBuffer = toBuffer;\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nasync function json(stream) {\n const buf = await toBuffer(stream);\n const str = buf.toString('utf8');\n try {\n return JSON.parse(str);\n }\n catch (_err) {\n const err = _err;\n err.message += ` (input: ${str})`;\n throw err;\n }\n}\nexports.json = json;\nfunction req(url, opts = {}) {\n const href = typeof url === 'string' ? url : url.href;\n const req = (href.startsWith('https:') ? https : http).request(url, opts);\n const promise = new Promise((resolve, reject) => {\n req\n .once('response', resolve)\n .once('error', reject)\n .end();\n });\n req.then = promise.then.bind(promise);\n return req;\n}\nexports.req = req;\n//# sourceMappingURL=helpers.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Agent = void 0;\nconst http = __importStar(require(\"http\"));\n__exportStar(require(\"./helpers\"), exports);\nconst INTERNAL = Symbol('AgentBaseInternalState');\nclass Agent extends http.Agent {\n constructor(opts) {\n super(opts);\n this[INTERNAL] = {};\n }\n /**\n * Determine whether this is an `http` or `https` request.\n */\n isSecureEndpoint(options) {\n if (options) {\n // First check the `secureEndpoint` property explicitly, since this\n // means that a parent `Agent` is \"passing through\" to this instance.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n if (typeof options.secureEndpoint === 'boolean') {\n return options.secureEndpoint;\n }\n // If no explicit `secure` endpoint, check if `protocol` property is\n // set. This will usually be the case since using a full string URL\n // or `URL` instance should be the most common usage.\n if (typeof options.protocol === 'string') {\n return options.protocol === 'https:';\n }\n }\n // Finally, if no `protocol` property was set, then fall back to\n // checking the stack trace of the current call stack, and try to\n // detect the \"https\" module.\n const { stack } = new Error();\n if (typeof stack !== 'string')\n return false;\n return stack\n .split('\\n')\n .some((l) => l.indexOf('(https.js:') !== -1 ||\n l.indexOf('node:https:') !== -1);\n }\n createSocket(req, options, cb) {\n const connectOpts = {\n ...options,\n secureEndpoint: this.isSecureEndpoint(options),\n };\n Promise.resolve()\n .then(() => this.connect(req, connectOpts))\n .then((socket) => {\n if (socket instanceof http.Agent) {\n // @ts-expect-error `addRequest()` isn't defined in `@types/node`\n return socket.addRequest(req, connectOpts);\n }\n this[INTERNAL].currentSocket = socket;\n // @ts-expect-error `createSocket()` isn't defined in `@types/node`\n super.createSocket(req, options, cb);\n }, cb);\n }\n createConnection() {\n const socket = this[INTERNAL].currentSocket;\n this[INTERNAL].currentSocket = undefined;\n if (!socket) {\n throw new Error('No socket was returned in the `connect()` function');\n }\n return socket;\n }\n get defaultPort() {\n return (this[INTERNAL].defaultPort ??\n (this.protocol === 'https:' ? 443 : 80));\n }\n set defaultPort(v) {\n if (this[INTERNAL]) {\n this[INTERNAL].defaultPort = v;\n }\n }\n get protocol() {\n return (this[INTERNAL].protocol ??\n (this.isSecureEndpoint() ? 'https:' : 'http:'));\n }\n set protocol(v) {\n if (this[INTERNAL]) {\n this[INTERNAL].protocol = v;\n }\n }\n}\nexports.Agent = Agent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpsProxyAgent = void 0;\nconst net = __importStar(require(\"net\"));\nconst tls = __importStar(require(\"tls\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst debug_1 = __importDefault(require(\"debug\"));\nconst agent_base_1 = require(\"agent-base\");\nconst url_1 = require(\"url\");\nconst parse_proxy_response_1 = require(\"./parse-proxy-response\");\nconst debug = (0, debug_1.default)('https-proxy-agent');\n/**\n * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to\n * the specified \"HTTP(s) proxy server\" in order to proxy HTTPS requests.\n *\n * Outgoing HTTP requests are first tunneled through the proxy server using the\n * `CONNECT` HTTP request method to establish a connection to the proxy server,\n * and then the proxy server connects to the destination target and issues the\n * HTTP request from the proxy server.\n *\n * `https:` requests have their socket connection upgraded to TLS once\n * the connection to the proxy server has been established.\n */\nclass HttpsProxyAgent extends agent_base_1.Agent {\n constructor(proxy, opts) {\n super(opts);\n this.options = { path: undefined };\n this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy;\n this.proxyHeaders = opts?.headers ?? {};\n debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href);\n // Trim off the brackets from IPv6 addresses\n const host = (this.proxy.hostname || this.proxy.host).replace(/^\\[|\\]$/g, '');\n const port = this.proxy.port\n ? parseInt(this.proxy.port, 10)\n : this.proxy.protocol === 'https:'\n ? 443\n : 80;\n this.connectOpts = {\n // Attempt to negotiate http/1.1 for proxy servers that support http/2\n ALPNProtocols: ['http/1.1'],\n ...(opts ? omit(opts, 'headers') : null),\n host,\n port,\n };\n }\n /**\n * Called when the node-core HTTP client library is creating a\n * new HTTP request.\n */\n async connect(req, opts) {\n const { proxy } = this;\n if (!opts.host) {\n throw new TypeError('No \"host\" provided');\n }\n // Create a socket connection to the proxy server.\n let socket;\n if (proxy.protocol === 'https:') {\n debug('Creating `tls.Socket`: %o', this.connectOpts);\n const servername = this.connectOpts.servername || this.connectOpts.host;\n socket = tls.connect({\n ...this.connectOpts,\n servername: servername && net.isIP(servername) ? undefined : servername,\n });\n }\n else {\n debug('Creating `net.Socket`: %o', this.connectOpts);\n socket = net.connect(this.connectOpts);\n }\n const headers = typeof this.proxyHeaders === 'function'\n ? this.proxyHeaders()\n : { ...this.proxyHeaders };\n const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host;\n let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\\r\\n`;\n // Inject the `Proxy-Authorization` header if necessary.\n if (proxy.username || proxy.password) {\n const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`;\n headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`;\n }\n headers.Host = `${host}:${opts.port}`;\n if (!headers['Proxy-Connection']) {\n headers['Proxy-Connection'] = this.keepAlive\n ? 'Keep-Alive'\n : 'close';\n }\n for (const name of Object.keys(headers)) {\n payload += `${name}: ${headers[name]}\\r\\n`;\n }\n const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket);\n socket.write(`${payload}\\r\\n`);\n const { connect, buffered } = await proxyResponsePromise;\n req.emit('proxyConnect', connect);\n this.emit('proxyConnect', connect, req);\n if (connect.statusCode === 200) {\n req.once('socket', resume);\n if (opts.secureEndpoint) {\n // The proxy is connecting to a TLS server, so upgrade\n // this socket connection to a TLS connection.\n debug('Upgrading socket connection to TLS');\n const servername = opts.servername || opts.host;\n return tls.connect({\n ...omit(opts, 'host', 'path', 'port'),\n socket,\n servername: net.isIP(servername) ? undefined : servername,\n });\n }\n return socket;\n }\n // Some other status code that's not 200... need to re-play the HTTP\n // header \"data\" events onto the socket once the HTTP machinery is\n // attached so that the node core `http` can parse and handle the\n // error status code.\n // Close the original socket, and a new \"fake\" socket is returned\n // instead, so that the proxy doesn't get the HTTP request\n // written to it (which may contain `Authorization` headers or other\n // sensitive data).\n //\n // See: https://hackerone.com/reports/541502\n socket.destroy();\n const fakeSocket = new net.Socket({ writable: false });\n fakeSocket.readable = true;\n // Need to wait for the \"socket\" event to re-play the \"data\" events.\n req.once('socket', (s) => {\n debug('Replaying proxy buffer for failed request');\n (0, assert_1.default)(s.listenerCount('data') > 0);\n // Replay the \"buffered\" Buffer onto the fake `socket`, since at\n // this point the HTTP module machinery has been hooked up for\n // the user.\n s.push(buffered);\n s.push(null);\n });\n return fakeSocket;\n }\n}\nHttpsProxyAgent.protocols = ['http', 'https'];\nexports.HttpsProxyAgent = HttpsProxyAgent;\nfunction resume(socket) {\n socket.resume();\n}\nfunction omit(obj, ...keys) {\n const ret = {};\n let key;\n for (key in obj) {\n if (!keys.includes(key)) {\n ret[key] = obj[key];\n }\n }\n return ret;\n}\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.parseProxyResponse = void 0;\nconst debug_1 = __importDefault(require(\"debug\"));\nconst debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response');\nfunction parseProxyResponse(socket) {\n return new Promise((resolve, reject) => {\n // we need to buffer any HTTP traffic that happens with the proxy before we get\n // the CONNECT response, so that if the response is anything other than an \"200\"\n // response code, then we can re-play the \"data\" events on the socket once the\n // HTTP parser is hooked up...\n let buffersLength = 0;\n const buffers = [];\n function read() {\n const b = socket.read();\n if (b)\n ondata(b);\n else\n socket.once('readable', read);\n }\n function cleanup() {\n socket.removeListener('end', onend);\n socket.removeListener('error', onerror);\n socket.removeListener('readable', read);\n }\n function onend() {\n cleanup();\n debug('onend');\n reject(new Error('Proxy connection ended before receiving CONNECT response'));\n }\n function onerror(err) {\n cleanup();\n debug('onerror %o', err);\n reject(err);\n }\n function ondata(b) {\n buffers.push(b);\n buffersLength += b.length;\n const buffered = Buffer.concat(buffers, buffersLength);\n const endOfHeaders = buffered.indexOf('\\r\\n\\r\\n');\n if (endOfHeaders === -1) {\n // keep buffering\n debug('have not received end of HTTP headers yet...');\n read();\n return;\n }\n const headerParts = buffered\n .slice(0, endOfHeaders)\n .toString('ascii')\n .split('\\r\\n');\n const firstLine = headerParts.shift();\n if (!firstLine) {\n socket.destroy();\n return reject(new Error('No header received from proxy CONNECT response'));\n }\n const firstLineParts = firstLine.split(' ');\n const statusCode = +firstLineParts[1];\n const statusText = firstLineParts.slice(2).join(' ');\n const headers = {};\n for (const header of headerParts) {\n if (!header)\n continue;\n const firstColon = header.indexOf(':');\n if (firstColon === -1) {\n socket.destroy();\n return reject(new Error(`Invalid header from proxy CONNECT response: \"${header}\"`));\n }\n const key = header.slice(0, firstColon).toLowerCase();\n const value = header.slice(firstColon + 1).trimStart();\n const current = headers[key];\n if (typeof current === 'string') {\n headers[key] = [current, value];\n }\n else if (Array.isArray(current)) {\n current.push(value);\n }\n else {\n headers[key] = value;\n }\n }\n debug('got proxy server response: %o %o', firstLine, headers);\n cleanup();\n resolve({\n connect: {\n statusCode,\n statusText,\n headers,\n },\n buffered,\n });\n }\n socket.on('error', onerror);\n socket.on('end', onend);\n read();\n });\n}\nexports.parseProxyResponse = parseProxyResponse;\n//# sourceMappingURL=parse-proxy-response.js.map","\"use strict\";\n/**\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\n/**\n * Known paths unique to Google Compute Engine Linux instances\n */\nexports.GCE_LINUX_BIOS_PATHS = {\n BIOS_DATE: '/sys/class/dmi/id/bios_date',\n BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor',\n};\nconst GCE_MAC_ADDRESS_REGEX = /^42:01/;\n/**\n * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance).\n *\n * Uses the:\n * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}.\n *\n * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise.\n */\nfunction isGoogleCloudServerless() {\n /**\n * `CLOUD_RUN_JOB` is used for Cloud Run Jobs\n * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n *\n * `FUNCTION_NAME` is used in older Cloud Functions environments:\n * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}.\n *\n * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments:\n * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}.\n * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}.\n */\n const isGFEnvironment = process.env.CLOUD_RUN_JOB ||\n process.env.FUNCTION_NAME ||\n process.env.K_SERVICE;\n return !!isGFEnvironment;\n}\nexports.isGoogleCloudServerless = isGoogleCloudServerless;\n/**\n * Determines if the process is running on a Linux Google Compute Engine instance.\n *\n * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise.\n */\nfunction isGoogleComputeEngineLinux() {\n if ((0, os_1.platform)() !== 'linux')\n return false;\n try {\n // ensure this file exist\n (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE);\n // ensure this file exist and matches\n const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8');\n return /Google/.test(biosVendor);\n }\n catch (_a) {\n return false;\n }\n}\nexports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux;\n/**\n * Determines if the process is running on a Google Compute Engine instance with a known\n * MAC address.\n *\n * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise.\n */\nfunction isGoogleComputeEngineMACAddress() {\n const interfaces = (0, os_1.networkInterfaces)();\n for (const item of Object.values(interfaces)) {\n if (!item)\n continue;\n for (const { mac } of item) {\n if (GCE_MAC_ADDRESS_REGEX.test(mac)) {\n return true;\n }\n }\n }\n return false;\n}\nexports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress;\n/**\n * Determines if the process is running on a Google Compute Engine instance.\n *\n * @returns {boolean} `true` if the process is running on GCE, `false` otherwise.\n */\nfunction isGoogleComputeEngine() {\n return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress();\n}\nexports.isGoogleComputeEngine = isGoogleComputeEngine;\n/**\n * Determines if the process is running on Google Cloud Platform.\n *\n * @returns {boolean} `true` if the process is running on GCP, `false` otherwise.\n */\nfunction detectGCPResidency() {\n return isGoogleCloudServerless() || isGoogleComputeEngine();\n}\nexports.detectGCPResidency = detectGCPResidency;\n//# sourceMappingURL=gcp-residency.js.map","\"use strict\";\n/**\n * Copyright 2018 Google LLC\n *\n * Distributed under MIT license.\n * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.requestTimeout = exports.setGCPResidency = exports.getGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.bulk = exports.universe = exports.project = exports.instance = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst jsonBigint = require(\"json-bigint\");\nconst gcp_residency_1 = require(\"./gcp-residency\");\nexports.BASE_PATH = '/computeMetadata/v1';\nexports.HOST_ADDRESS = 'http://169.254.169.254';\nexports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.';\nexports.HEADER_NAME = 'Metadata-Flavor';\nexports.HEADER_VALUE = 'Google';\nexports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE });\n/**\n * Metadata server detection override options.\n *\n * Available via `process.env.METADATA_SERVER_DETECTION`.\n */\nexports.METADATA_SERVER_DETECTION = Object.freeze({\n 'assume-present': \"don't try to ping the metadata server, but assume it's present\",\n none: \"don't try to ping the metadata server, but don't try to use it either\",\n 'bios-only': \"treat the result of a BIOS probe as canonical (don't fall back to pinging)\",\n 'ping-only': 'skip the BIOS probe, and go straight to pinging',\n});\n/**\n * Returns the base URL while taking into account the GCE_METADATA_HOST\n * environment variable if it exists.\n *\n * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1.\n */\nfunction getBaseUrl(baseUrl) {\n if (!baseUrl) {\n baseUrl =\n process.env.GCE_METADATA_IP ||\n process.env.GCE_METADATA_HOST ||\n exports.HOST_ADDRESS;\n }\n // If no scheme is provided default to HTTP:\n if (!/^https?:\\/\\//.test(baseUrl)) {\n baseUrl = `http://${baseUrl}`;\n }\n return new URL(exports.BASE_PATH, baseUrl).href;\n}\n// Accepts an options object passed from the user to the API. In previous\n// versions of the API, it referred to a `Request` or an `Axios` request\n// options object. Now it refers to an object with very limited property\n// names. This is here to help ensure users don't pass invalid options when\n// they upgrade from 0.4 to 0.5 to 0.8.\nfunction validate(options) {\n Object.keys(options).forEach(key => {\n switch (key) {\n case 'params':\n case 'property':\n case 'headers':\n break;\n case 'qs':\n throw new Error(\"'qs' is not a valid configuration option. Please use 'params' instead.\");\n default:\n throw new Error(`'${key}' is not a valid configuration option.`);\n }\n });\n}\nasync function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) {\n let metadataKey = '';\n let params = {};\n let headers = {};\n if (typeof type === 'object') {\n const metadataAccessor = type;\n metadataKey = metadataAccessor.metadataKey;\n params = metadataAccessor.params || params;\n headers = metadataAccessor.headers || headers;\n noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries;\n fastFail = metadataAccessor.fastFail || fastFail;\n }\n else {\n metadataKey = type;\n }\n if (typeof options === 'string') {\n metadataKey += `/${options}`;\n }\n else {\n validate(options);\n if (options.property) {\n metadataKey += `/${options.property}`;\n }\n headers = options.headers || headers;\n params = options.params || params;\n }\n try {\n const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request;\n const res = await requestMethod({\n url: `${getBaseUrl()}/${metadataKey}`,\n headers: { ...exports.HEADERS, ...headers },\n retryConfig: { noResponseRetries },\n params,\n responseType: 'text',\n timeout: requestTimeout(),\n });\n // NOTE: node.js converts all incoming headers to lower case.\n if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) {\n throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`);\n }\n else if (!res.data) {\n throw new Error('Invalid response from the metadata service');\n }\n if (typeof res.data === 'string') {\n try {\n return jsonBigint.parse(res.data);\n }\n catch (_a) {\n /* ignore */\n }\n }\n return res.data;\n }\n catch (e) {\n const err = e;\n if (err.response && err.response.status !== 200) {\n err.message = `Unsuccessful response status code. ${err.message}`;\n }\n throw e;\n }\n}\nasync function fastFailMetadataRequest(options) {\n const secondaryOptions = {\n ...options,\n url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)),\n };\n // We race a connection between DNS/IP to metadata server. There are a couple\n // reasons for this:\n //\n // 1. the DNS is slow in some GCP environments; by checking both, we might\n // detect the runtime environment signficantly faster.\n // 2. we can't just check the IP, which is tarpitted and slow to respond\n // on a user's local machine.\n //\n // Additional logic has been added to make sure that we don't create an\n // unhandled rejection in scenarios where a failure happens sometime\n // after a success.\n //\n // Note, however, if a failure happens prior to a success, a rejection should\n // occur, this is for folks running locally.\n //\n let responded = false;\n const r1 = (0, gaxios_1.request)(options)\n .then(res => {\n responded = true;\n return res;\n })\n .catch(err => {\n if (responded) {\n return r2;\n }\n else {\n responded = true;\n throw err;\n }\n });\n const r2 = (0, gaxios_1.request)(secondaryOptions)\n .then(res => {\n responded = true;\n return res;\n })\n .catch(err => {\n if (responded) {\n return r1;\n }\n else {\n responded = true;\n throw err;\n }\n });\n return Promise.race([r1, r2]);\n}\n/**\n * Obtain metadata for the current GCE instance.\n *\n * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys}\n *\n * @example\n * ```\n * const serviceAccount: {} = await instance('service-accounts/');\n * const serviceAccountEmail: string = await instance('service-accounts/default/email');\n * ```\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction instance(options) {\n return metadataAccessor('instance', options);\n}\nexports.instance = instance;\n/**\n * Obtain metadata for the current GCP project.\n *\n * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys}\n *\n * @example\n * ```\n * const projectId: string = await project('project-id');\n * const numericProjectId: number = await project('numeric-project-id');\n * ```\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction project(options) {\n return metadataAccessor('project', options);\n}\nexports.project = project;\n/**\n * Obtain metadata for the current universe.\n *\n * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys}\n *\n * @example\n * ```\n * const universeDomain: string = await universe('universe_domain');\n * ```\n */\nfunction universe(options) {\n return metadataAccessor('universe', options);\n}\nexports.universe = universe;\n/**\n * Retrieve metadata items in parallel.\n *\n * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys}\n *\n * @example\n * ```\n * const data = await bulk([\n * {\n * metadataKey: 'instance',\n * },\n * {\n * metadataKey: 'project/project-id',\n * },\n * ] as const);\n *\n * // data.instance;\n * // data['project/project-id'];\n * ```\n *\n * @param properties The metadata properties to retrieve\n * @returns The metadata in `metadatakey:value` format\n */\nasync function bulk(properties) {\n const r = {};\n await Promise.all(properties.map(item => {\n return (async () => {\n const res = await metadataAccessor(item);\n const key = item.metadataKey;\n r[key] = res;\n })();\n }));\n return r;\n}\nexports.bulk = bulk;\n/*\n * How many times should we retry detecting GCP environment.\n */\nfunction detectGCPAvailableRetries() {\n return process.env.DETECT_GCP_RETRIES\n ? Number(process.env.DETECT_GCP_RETRIES)\n : 0;\n}\nlet cachedIsAvailableResponse;\n/**\n * Determine if the metadata server is currently available.\n */\nasync function isAvailable() {\n if (process.env.METADATA_SERVER_DETECTION) {\n const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase();\n if (!(value in exports.METADATA_SERVER_DETECTION)) {\n throw new RangeError(`Unknown \\`METADATA_SERVER_DETECTION\\` env variable. Got \\`${value}\\`, but it should be \\`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\\`, or unset`);\n }\n switch (value) {\n case 'assume-present':\n return true;\n case 'none':\n return false;\n case 'bios-only':\n return getGCPResidency();\n case 'ping-only':\n // continue, we want to ping the server\n }\n }\n try {\n // If a user is instantiating several GCP libraries at the same time,\n // this may result in multiple calls to isAvailable(), to detect the\n // runtime environment. We use the same promise for each of these calls\n // to reduce the network load.\n if (cachedIsAvailableResponse === undefined) {\n cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), \n // If the default HOST_ADDRESS has been overridden, we should not\n // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in\n // a non-GCP environment):\n !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST));\n }\n await cachedIsAvailableResponse;\n return true;\n }\n catch (e) {\n const err = e;\n if (process.env.DEBUG_AUTH) {\n console.info(err);\n }\n if (err.type === 'request-timeout') {\n // If running in a GCP environment, metadata endpoint should return\n // within ms.\n return false;\n }\n if (err.response && err.response.status === 404) {\n return false;\n }\n else {\n if (!(err.response && err.response.status === 404) &&\n // A warning is emitted if we see an unexpected err.code, or err.code\n // is not populated:\n (!err.code ||\n ![\n 'EHOSTDOWN',\n 'EHOSTUNREACH',\n 'ENETUNREACH',\n 'ENOENT',\n 'ENOTFOUND',\n 'ECONNREFUSED',\n ].includes(err.code))) {\n let code = 'UNKNOWN';\n if (err.code)\n code = err.code;\n process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning');\n }\n // Failure to resolve the metadata service means that it is not available.\n return false;\n }\n }\n}\nexports.isAvailable = isAvailable;\n/**\n * reset the memoized isAvailable() lookup.\n */\nfunction resetIsAvailableCache() {\n cachedIsAvailableResponse = undefined;\n}\nexports.resetIsAvailableCache = resetIsAvailableCache;\n/**\n * A cache for the detected GCP Residency.\n */\nexports.gcpResidencyCache = null;\n/**\n * Detects GCP Residency.\n * Caches results to reduce costs for subsequent calls.\n *\n * @see setGCPResidency for setting\n */\nfunction getGCPResidency() {\n if (exports.gcpResidencyCache === null) {\n setGCPResidency();\n }\n return exports.gcpResidencyCache;\n}\nexports.getGCPResidency = getGCPResidency;\n/**\n * Sets the detected GCP Residency.\n * Useful for forcing metadata server detection behavior.\n *\n * Set `null` to autodetect the environment (default behavior).\n * @see getGCPResidency for getting\n */\nfunction setGCPResidency(value = null) {\n exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)();\n}\nexports.setGCPResidency = setGCPResidency;\n/**\n * Obtain the timeout for requests to the metadata server.\n *\n * In certain environments and conditions requests can take longer than\n * the default timeout to complete. This function will determine the\n * appropriate timeout based on the environment.\n *\n * @returns {number} a request timeout duration in milliseconds.\n */\nfunction requestTimeout() {\n return getGCPResidency() ? 0 : 3000;\n}\nexports.requestTimeout = requestTimeout;\n__exportStar(require(\"./gcp-residency\"), exports);\n//# sourceMappingURL=index.js.map","\"use strict\";\n// Copyright 2012 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0;\nconst events_1 = require(\"events\");\nconst transporters_1 = require(\"../transporters\");\nconst util_1 = require(\"../util\");\n/**\n * The default cloud universe\n *\n * @see {@link AuthJSONOptions.universe_domain}\n */\nexports.DEFAULT_UNIVERSE = 'googleapis.com';\n/**\n * The default {@link AuthClientOptions.eagerRefreshThresholdMillis}\n */\nexports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000;\nclass AuthClient extends events_1.EventEmitter {\n constructor(opts = {}) {\n var _a, _b, _c, _d, _e;\n super();\n this.credentials = {};\n this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS;\n this.forceRefreshOnFailure = false;\n this.universeDomain = exports.DEFAULT_UNIVERSE;\n const options = (0, util_1.originalOrCamelOptions)(opts);\n // Shared auth options\n this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null;\n this.quotaProjectId = options.get('quota_project_id');\n this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {};\n this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE;\n // Shared client options\n this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter();\n if (opts.transporterOptions) {\n this.transporter.defaults = opts.transporterOptions;\n }\n if (opts.eagerRefreshThresholdMillis) {\n this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis;\n }\n this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false;\n }\n /**\n * Sets the auth credentials.\n */\n setCredentials(credentials) {\n this.credentials = credentials;\n }\n /**\n * Append additional headers, e.g., x-goog-user-project, shared across the\n * classes inheriting AuthClient. This method should be used by any method\n * that overrides getRequestMetadataAsync(), which is a shared helper for\n * setting request information in both gRPC and HTTP API calls.\n *\n * @param headers object to append additional headers to.\n */\n addSharedMetadataHeaders(headers) {\n // quota_project_id, stored in application_default_credentials.json, is set in\n // the x-goog-user-project header, to indicate an alternate account for\n // billing and quota:\n if (!headers['x-goog-user-project'] && // don't override a value the user sets.\n this.quotaProjectId) {\n headers['x-goog-user-project'] = this.quotaProjectId;\n }\n return headers;\n }\n}\nexports.AuthClient = AuthClient;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AwsClient = void 0;\nconst awsrequestsigner_1 = require(\"./awsrequestsigner\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\n/**\n * AWS external account client. This is used for AWS workloads, where\n * AWS STS GetCallerIdentity serialized signed requests are exchanged for\n * GCP access token.\n */\nclass AwsClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiates an AwsClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid AWS credential.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super(options, additionalOptions);\n this.environmentId = options.credential_source.environment_id;\n // This is only required if the AWS region is not available in the\n // AWS_REGION or AWS_DEFAULT_REGION environment variables.\n this.regionUrl = options.credential_source.region_url;\n // This is only required if AWS security credentials are not available in\n // environment variables.\n this.securityCredentialsUrl = options.credential_source.url;\n this.regionalCredVerificationUrl =\n options.credential_source.regional_cred_verification_url;\n this.imdsV2SessionTokenUrl =\n options.credential_source.imdsv2_session_token_url;\n this.awsRequestSigner = null;\n this.region = '';\n this.credentialSourceType = 'aws';\n // Data validators.\n this.validateEnvironmentId();\n }\n validateEnvironmentId() {\n var _a;\n const match = (_a = this.environmentId) === null || _a === void 0 ? void 0 : _a.match(/^(aws)(\\d+)$/);\n if (!match || !this.regionalCredVerificationUrl) {\n throw new Error('No valid AWS \"credential_source\" provided');\n }\n else if (parseInt(match[2], 10) !== 1) {\n throw new Error(`aws version \"${match[2]}\" is not supported in the current build.`);\n }\n }\n /**\n * Triggered when an external subject token is needed to be exchanged for a\n * GCP access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this uses a serialized AWS signed request to the STS GetCallerIdentity\n * endpoint.\n * The logic is summarized as:\n * 1. If imdsv2_session_token_url is provided in the credential source, then\n * fetch the aws session token and include it in the headers of the\n * metadata requests. This is a requirement for IDMSv2 but optional\n * for IDMSv1.\n * 2. Retrieve AWS region from availability-zone.\n * 3a. Check AWS credentials in environment variables. If not found, get\n * from security-credentials endpoint.\n * 3b. Get AWS credentials from security-credentials endpoint. In order\n * to retrieve this, the AWS role needs to be determined by calling\n * security-credentials endpoint without any argument. Then the\n * credentials can be retrieved via: security-credentials/role_name\n * 4. Generate the signed request to AWS STS GetCallerIdentity action.\n * 5. Inject x-goog-cloud-target-resource into header and serialize the\n * signed request. This will be the subject-token to pass to GCP STS.\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n // Initialize AWS request signer if not already initialized.\n if (!this.awsRequestSigner) {\n const metadataHeaders = {};\n // Only retrieve the IMDSv2 session token if both the security credentials and region are\n // not retrievable through the environment.\n // The credential config contains all the URLs by default but clients may be running this\n // where the metadata server is not available and returning the credentials through the environment.\n // Removing this check may break them.\n if (this.shouldUseMetadataServer() && this.imdsV2SessionTokenUrl) {\n metadataHeaders['x-aws-ec2-metadata-token'] =\n await this.getImdsV2SessionToken();\n }\n this.region = await this.getAwsRegion(metadataHeaders);\n this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => {\n // Check environment variables for permanent credentials first.\n // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html\n if (this.securityCredentialsFromEnv) {\n return this.securityCredentialsFromEnv;\n }\n // Since the role on a VM can change, we don't need to cache it.\n const roleName = await this.getAwsRoleName(metadataHeaders);\n // Temporary credentials typically last for several hours.\n // Expiration is returned in response.\n // Consider future optimization of this logic to cache AWS tokens\n // until their natural expiration.\n const awsCreds = await this.getAwsSecurityCredentials(roleName, metadataHeaders);\n return {\n accessKeyId: awsCreds.AccessKeyId,\n secretAccessKey: awsCreds.SecretAccessKey,\n token: awsCreds.Token,\n };\n }, this.region);\n }\n // Generate signed request to AWS STS GetCallerIdentity API.\n // Use the required regional endpoint. Otherwise, the request will fail.\n const options = await this.awsRequestSigner.getRequestOptions({\n url: this.regionalCredVerificationUrl.replace('{region}', this.region),\n method: 'POST',\n });\n // The GCP STS endpoint expects the headers to be formatted as:\n // [\n // {key: 'x-amz-date', value: '...'},\n // {key: 'Authorization', value: '...'},\n // ...\n // ]\n // And then serialized as:\n // encodeURIComponent(JSON.stringify({\n // url: '...',\n // method: 'POST',\n // headers: [{key: 'x-amz-date', value: '...'}, ...]\n // }))\n const reformattedHeader = [];\n const extendedHeaders = Object.assign({\n // The full, canonical resource name of the workload identity pool\n // provider, with or without the HTTPS prefix.\n // Including this header as part of the signature is recommended to\n // ensure data integrity.\n 'x-goog-cloud-target-resource': this.audience,\n }, options.headers);\n // Reformat header to GCP STS expected format.\n for (const key in extendedHeaders) {\n reformattedHeader.push({\n key,\n value: extendedHeaders[key],\n });\n }\n // Serialize the reformatted signed request.\n return encodeURIComponent(JSON.stringify({\n url: options.url,\n method: options.method,\n headers: reformattedHeader,\n }));\n }\n /**\n * @return A promise that resolves with the IMDSv2 Session Token.\n */\n async getImdsV2SessionToken() {\n const opts = {\n url: this.imdsV2SessionTokenUrl,\n method: 'PUT',\n responseType: 'text',\n headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' },\n };\n const response = await this.transporter.request(opts);\n return response.data;\n }\n /**\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the current AWS region.\n */\n async getAwsRegion(headers) {\n // Priority order for region determination:\n // AWS_REGION > AWS_DEFAULT_REGION > metadata server.\n if (this.regionFromEnv) {\n return this.regionFromEnv;\n }\n if (!this.regionUrl) {\n throw new Error('Unable to determine AWS region due to missing ' +\n '\"options.credential_source.region_url\"');\n }\n const opts = {\n url: this.regionUrl,\n method: 'GET',\n responseType: 'text',\n headers: headers,\n };\n const response = await this.transporter.request(opts);\n // Remove last character. For example, if us-east-2b is returned,\n // the region would be us-east-2.\n return response.data.substr(0, response.data.length - 1);\n }\n /**\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the assigned role to the current\n * AWS VM. This is needed for calling the security-credentials endpoint.\n */\n async getAwsRoleName(headers) {\n if (!this.securityCredentialsUrl) {\n throw new Error('Unable to determine AWS role name due to missing ' +\n '\"options.credential_source.url\"');\n }\n const opts = {\n url: this.securityCredentialsUrl,\n method: 'GET',\n responseType: 'text',\n headers: headers,\n };\n const response = await this.transporter.request(opts);\n return response.data;\n }\n /**\n * Retrieves the temporary AWS credentials by calling the security-credentials\n * endpoint as specified in the `credential_source` object.\n * @param roleName The role attached to the current VM.\n * @param headers The headers to be used in the metadata request.\n * @return A promise that resolves with the temporary AWS credentials\n * needed for creating the GetCallerIdentity signed request.\n */\n async getAwsSecurityCredentials(roleName, headers) {\n const response = await this.transporter.request({\n url: `${this.securityCredentialsUrl}/${roleName}`,\n responseType: 'json',\n headers: headers,\n });\n return response.data;\n }\n shouldUseMetadataServer() {\n // The metadata server must be used when either the AWS region or AWS security\n // credentials cannot be retrieved through their defined environment variables.\n return !this.regionFromEnv || !this.securityCredentialsFromEnv;\n }\n get regionFromEnv() {\n // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION.\n // Only one is required.\n return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null);\n }\n get securityCredentialsFromEnv() {\n // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required.\n if (process.env['AWS_ACCESS_KEY_ID'] &&\n process.env['AWS_SECRET_ACCESS_KEY']) {\n return {\n accessKeyId: process.env['AWS_ACCESS_KEY_ID'],\n secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'],\n token: process.env['AWS_SESSION_TOKEN'],\n };\n }\n return null;\n }\n}\nexports.AwsClient = AwsClient;\nAwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254';\nAwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254';\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AwsRequestSigner = void 0;\nconst crypto_1 = require(\"../crypto/crypto\");\n/** AWS Signature Version 4 signing algorithm identifier. */\nconst AWS_ALGORITHM = 'AWS4-HMAC-SHA256';\n/**\n * The termination string for the AWS credential scope value as defined in\n * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html\n */\nconst AWS_REQUEST_TYPE = 'aws4_request';\n/**\n * Implements an AWS API request signer based on the AWS Signature Version 4\n * signing process.\n * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html\n */\nclass AwsRequestSigner {\n /**\n * Instantiates an AWS API request signer used to send authenticated signed\n * requests to AWS APIs based on the AWS Signature Version 4 signing process.\n * This also provides a mechanism to generate the signed request without\n * sending it.\n * @param getCredentials A mechanism to retrieve AWS security credentials\n * when needed.\n * @param region The AWS region to use.\n */\n constructor(getCredentials, region) {\n this.getCredentials = getCredentials;\n this.region = region;\n this.crypto = (0, crypto_1.createCrypto)();\n }\n /**\n * Generates the signed request for the provided HTTP request for calling\n * an AWS API. This follows the steps described at:\n * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html\n * @param amzOptions The AWS request options that need to be signed.\n * @return A promise that resolves with the GaxiosOptions containing the\n * signed HTTP request parameters.\n */\n async getRequestOptions(amzOptions) {\n if (!amzOptions.url) {\n throw new Error('\"url\" is required in \"amzOptions\"');\n }\n // Stringify JSON requests. This will be set in the request body of the\n // generated signed request.\n const requestPayloadData = typeof amzOptions.data === 'object'\n ? JSON.stringify(amzOptions.data)\n : amzOptions.data;\n const url = amzOptions.url;\n const method = amzOptions.method || 'GET';\n const requestPayload = amzOptions.body || requestPayloadData;\n const additionalAmzHeaders = amzOptions.headers;\n const awsSecurityCredentials = await this.getCredentials();\n const uri = new URL(url);\n const headerMap = await generateAuthenticationHeaderMap({\n crypto: this.crypto,\n host: uri.host,\n canonicalUri: uri.pathname,\n canonicalQuerystring: uri.search.substr(1),\n method,\n region: this.region,\n securityCredentials: awsSecurityCredentials,\n requestPayload,\n additionalAmzHeaders,\n });\n // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc.\n const headers = Object.assign(\n // Add x-amz-date if available.\n headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, {\n Authorization: headerMap.authorizationHeader,\n host: uri.host,\n }, additionalAmzHeaders || {});\n if (awsSecurityCredentials.token) {\n Object.assign(headers, {\n 'x-amz-security-token': awsSecurityCredentials.token,\n });\n }\n const awsSignedReq = {\n url,\n method: method,\n headers,\n };\n if (typeof requestPayload !== 'undefined') {\n awsSignedReq.body = requestPayload;\n }\n return awsSignedReq;\n }\n}\nexports.AwsRequestSigner = AwsRequestSigner;\n/**\n * Creates the HMAC-SHA256 hash of the provided message using the\n * provided key.\n *\n * @param crypto The crypto instance used to facilitate cryptographic\n * operations.\n * @param key The HMAC-SHA256 key to use.\n * @param msg The message to hash.\n * @return The computed hash bytes.\n */\nasync function sign(crypto, key, msg) {\n return await crypto.signWithHmacSha256(key, msg);\n}\n/**\n * Calculates the signing key used to calculate the signature for\n * AWS Signature Version 4 based on:\n * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html\n *\n * @param crypto The crypto instance used to facilitate cryptographic\n * operations.\n * @param key The AWS secret access key.\n * @param dateStamp The '%Y%m%d' date format.\n * @param region The AWS region.\n * @param serviceName The AWS service name, eg. sts.\n * @return The signing key bytes.\n */\nasync function getSigningKey(crypto, key, dateStamp, region, serviceName) {\n const kDate = await sign(crypto, `AWS4${key}`, dateStamp);\n const kRegion = await sign(crypto, kDate, region);\n const kService = await sign(crypto, kRegion, serviceName);\n const kSigning = await sign(crypto, kService, 'aws4_request');\n return kSigning;\n}\n/**\n * Generates the authentication header map needed for generating the AWS\n * Signature Version 4 signed request.\n *\n * @param option The options needed to compute the authentication header map.\n * @return The AWS authentication header map which constitutes of the following\n * components: amz-date, authorization header and canonical query string.\n */\nasync function generateAuthenticationHeaderMap(options) {\n const additionalAmzHeaders = options.additionalAmzHeaders || {};\n const requestPayload = options.requestPayload || '';\n // iam.amazonaws.com host => iam service.\n // sts.us-east-2.amazonaws.com => sts service.\n const serviceName = options.host.split('.')[0];\n const now = new Date();\n // Format: '%Y%m%dT%H%M%SZ'.\n const amzDate = now\n .toISOString()\n .replace(/[-:]/g, '')\n .replace(/\\.[0-9]+/, '');\n // Format: '%Y%m%d'.\n const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, '');\n // Change all additional headers to be lower case.\n const reformattedAdditionalAmzHeaders = {};\n Object.keys(additionalAmzHeaders).forEach(key => {\n reformattedAdditionalAmzHeaders[key.toLowerCase()] =\n additionalAmzHeaders[key];\n });\n // Add AWS token if available.\n if (options.securityCredentials.token) {\n reformattedAdditionalAmzHeaders['x-amz-security-token'] =\n options.securityCredentials.token;\n }\n // Header keys need to be sorted alphabetically.\n const amzHeaders = Object.assign({\n host: options.host,\n }, \n // Previously the date was not fixed with x-amz- and could be provided manually.\n // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req\n reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders);\n let canonicalHeaders = '';\n const signedHeadersList = Object.keys(amzHeaders).sort();\n signedHeadersList.forEach(key => {\n canonicalHeaders += `${key}:${amzHeaders[key]}\\n`;\n });\n const signedHeaders = signedHeadersList.join(';');\n const payloadHash = await options.crypto.sha256DigestHex(requestPayload);\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html\n const canonicalRequest = `${options.method}\\n` +\n `${options.canonicalUri}\\n` +\n `${options.canonicalQuerystring}\\n` +\n `${canonicalHeaders}\\n` +\n `${signedHeaders}\\n` +\n `${payloadHash}`;\n const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`;\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html\n const stringToSign = `${AWS_ALGORITHM}\\n` +\n `${amzDate}\\n` +\n `${credentialScope}\\n` +\n (await options.crypto.sha256DigestHex(canonicalRequest));\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html\n const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName);\n const signature = await sign(options.crypto, signingKey, stringToSign);\n // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html\n const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` +\n `${credentialScope}, SignedHeaders=${signedHeaders}, ` +\n `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`;\n return {\n // Do not return x-amz-date if date is available.\n amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate,\n authorizationHeader,\n canonicalQuerystring: options.canonicalQuerystring,\n };\n}\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0;\nconst stream = require(\"stream\");\nconst authclient_1 = require(\"./authclient\");\nconst sts = require(\"./stscredentials\");\nconst util_1 = require(\"../util\");\n/**\n * The required token exchange grant_type: rfc8693#section-2.1\n */\nconst STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange';\n/**\n * The requested token exchange requested_token_type: rfc8693#section-2.1\n */\nconst STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/** The default OAuth scope to request when none is provided. */\nconst DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform';\n/** Default impersonated token lifespan in seconds.*/\nconst DEFAULT_TOKEN_LIFESPAN = 3600;\n/**\n * Offset to take into account network delays and server clock skews.\n */\nexports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000;\n/**\n * The credentials JSON file type for external account clients.\n * There are 3 types of JSON configs:\n * 1. authorized_user => Google end user credential\n * 2. service_account => Google service account credential\n * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s)\n */\nexports.EXTERNAL_ACCOUNT_TYPE = 'external_account';\n/**\n * Cloud resource manager URL used to retrieve project information.\n *\n * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead\n **/\nexports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/';\n/** The workforce audience pattern. */\nconst WORKFORCE_AUDIENCE_PATTERN = '//iam\\\\.googleapis\\\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+';\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst pkg = require('../../../package.json');\n/**\n * For backwards compatibility.\n */\nvar authclient_2 = require(\"./authclient\");\nObject.defineProperty(exports, \"DEFAULT_UNIVERSE\", { enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } });\n/**\n * Base external account client. This is used to instantiate AuthClients for\n * exchanging external account credentials for GCP access token and authorizing\n * requests to GCP APIs.\n * The base class implements common logic for exchanging various type of\n * external credentials for GCP access token. The logic of determining and\n * retrieving the external credential based on the environment and\n * credential_source will be left for the subclasses.\n */\nclass BaseExternalAccountClient extends authclient_1.AuthClient {\n /**\n * Instantiate a BaseExternalAccountClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file. The camelCased options\n * are aliases for the snake_cased options.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super({ ...options, ...additionalOptions });\n const opts = (0, util_1.originalOrCamelOptions)(options);\n if (opts.get('type') !== exports.EXTERNAL_ACCOUNT_TYPE) {\n throw new Error(`Expected \"${exports.EXTERNAL_ACCOUNT_TYPE}\" type but ` +\n `received \"${options.type}\"`);\n }\n const clientId = opts.get('client_id');\n const clientSecret = opts.get('client_secret');\n const tokenUrl = opts.get('token_url');\n const subjectTokenType = opts.get('subject_token_type');\n const workforcePoolUserProject = opts.get('workforce_pool_user_project');\n const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url');\n const serviceAccountImpersonation = opts.get('service_account_impersonation');\n const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds');\n this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') ||\n `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`);\n if (clientId) {\n this.clientAuth = {\n confidentialClientType: 'basic',\n clientId,\n clientSecret,\n };\n }\n this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth);\n this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE];\n this.cachedAccessToken = null;\n this.audience = opts.get('audience');\n this.subjectTokenType = subjectTokenType;\n this.workforcePoolUserProject = workforcePoolUserProject;\n const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN);\n if (this.workforcePoolUserProject &&\n !this.audience.match(workforceAudiencePattern)) {\n throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' +\n 'credentials.');\n }\n this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl;\n this.serviceAccountImpersonationLifetime =\n serviceAccountImpersonationLifetime;\n if (this.serviceAccountImpersonationLifetime) {\n this.configLifetimeRequested = true;\n }\n else {\n this.configLifetimeRequested = false;\n this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN;\n }\n this.projectNumber = this.getProjectNumber(this.audience);\n }\n /** The service account email to be impersonated, if available. */\n getServiceAccountEmail() {\n var _a;\n if (this.serviceAccountImpersonationUrl) {\n if (this.serviceAccountImpersonationUrl.length > 256) {\n /**\n * Prevents DOS attacks.\n * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84}\n **/\n throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`);\n }\n // Parse email from URL. The formal looks as follows:\n // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken\n const re = /serviceAccounts\\/(?[^:]+):generateAccessToken$/;\n const result = re.exec(this.serviceAccountImpersonationUrl);\n return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null;\n }\n return null;\n }\n /**\n * Provides a mechanism to inject GCP access tokens directly.\n * When the provided credential expires, a new credential, using the\n * external account options, is retrieved.\n * @param credentials The Credentials object to set on the current client.\n */\n setCredentials(credentials) {\n super.setCredentials(credentials);\n this.cachedAccessToken = credentials;\n }\n /**\n * @return A promise that resolves with the current GCP access token\n * response. If the current credential is expired, a new one is retrieved.\n */\n async getAccessToken() {\n // If cached access token is unavailable or expired, force refresh.\n if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) {\n await this.refreshAccessTokenAsync();\n }\n // Return GCP access token in GetAccessTokenResponse format.\n return {\n token: this.cachedAccessToken.access_token,\n res: this.cachedAccessToken.res,\n };\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * The result has the form:\n * { Authorization: 'Bearer ' }\n */\n async getRequestHeaders() {\n const accessTokenResponse = await this.getAccessToken();\n const headers = {\n Authorization: `Bearer ${accessTokenResponse.token}`,\n };\n return this.addSharedMetadataHeaders(headers);\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n /**\n * @return A promise that resolves with the project ID corresponding to the\n * current workload identity pool or current workforce pool if\n * determinable. For workforce pool credential, it returns the project ID\n * corresponding to the workforcePoolUserProject.\n * This is introduced to match the current pattern of using the Auth\n * library:\n * const projectId = await auth.getProjectId();\n * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`;\n * const res = await client.request({ url });\n * The resource may not have permission\n * (resourcemanager.projects.get) to call this API or the required\n * scopes may not be selected:\n * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes\n */\n async getProjectId() {\n const projectNumber = this.projectNumber || this.workforcePoolUserProject;\n if (this.projectId) {\n // Return previously determined project ID.\n return this.projectId;\n }\n else if (projectNumber) {\n // Preferable not to use request() to avoid retrial policies.\n const headers = await this.getRequestHeaders();\n const response = await this.transporter.request({\n headers,\n url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`,\n responseType: 'json',\n });\n this.projectId = response.data.projectId;\n return this.projectId;\n }\n return null;\n }\n /**\n * Authenticates the provided HTTP request, processes it and resolves with the\n * returned response.\n * @param opts The HTTP request options.\n * @param retry Whether the current attempt is a retry after a failed attempt.\n * @return A promise that resolves with the successful response.\n */\n async requestAsync(opts, retry = false) {\n let response;\n try {\n const requestHeaders = await this.getRequestHeaders();\n opts.headers = opts.headers || {};\n if (requestHeaders && requestHeaders['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] =\n requestHeaders['x-goog-user-project'];\n }\n if (requestHeaders && requestHeaders.Authorization) {\n opts.headers.Authorization = requestHeaders.Authorization;\n }\n response = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - forceRefreshOnFailure is true\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n this.forceRefreshOnFailure) {\n await this.refreshAccessTokenAsync();\n return await this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return response;\n }\n /**\n * Forces token refresh, even if unexpired tokens are currently cached.\n * External credentials are exchanged for GCP access tokens via the token\n * exchange endpoint and other settings provided in the client options\n * object.\n * If the service_account_impersonation_url is provided, an additional\n * step to exchange the external account GCP access token for a service\n * account impersonated token is performed.\n * @return A promise that resolves with the fresh GCP access tokens.\n */\n async refreshAccessTokenAsync() {\n // Retrieve the external credential.\n const subjectToken = await this.retrieveSubjectToken();\n // Construct the STS credentials options.\n const stsCredentialsOptions = {\n grantType: STS_GRANT_TYPE,\n audience: this.audience,\n requestedTokenType: STS_REQUEST_TOKEN_TYPE,\n subjectToken,\n subjectTokenType: this.subjectTokenType,\n // generateAccessToken requires the provided access token to have\n // scopes:\n // https://www.googleapis.com/auth/iam or\n // https://www.googleapis.com/auth/cloud-platform\n // The new service account access token scopes will match the user\n // provided ones.\n scope: this.serviceAccountImpersonationUrl\n ? [DEFAULT_OAUTH_SCOPE]\n : this.getScopesArray(),\n };\n // Exchange the external credentials for a GCP access token.\n // Client auth is prioritized over passing the workforcePoolUserProject\n // parameter for STS token exchange.\n const additionalOptions = !this.clientAuth && this.workforcePoolUserProject\n ? { userProject: this.workforcePoolUserProject }\n : undefined;\n const additionalHeaders = {\n 'x-goog-api-client': this.getMetricsHeaderValue(),\n };\n const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions);\n if (this.serviceAccountImpersonationUrl) {\n this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token);\n }\n else if (stsResponse.expires_in) {\n // Save response in cached access token.\n this.cachedAccessToken = {\n access_token: stsResponse.access_token,\n expiry_date: new Date().getTime() + stsResponse.expires_in * 1000,\n res: stsResponse.res,\n };\n }\n else {\n // Save response in cached access token.\n this.cachedAccessToken = {\n access_token: stsResponse.access_token,\n res: stsResponse.res,\n };\n }\n // Save credentials.\n this.credentials = {};\n Object.assign(this.credentials, this.cachedAccessToken);\n delete this.credentials.res;\n // Trigger tokens event to notify external listeners.\n this.emit('tokens', {\n refresh_token: null,\n expiry_date: this.cachedAccessToken.expiry_date,\n access_token: this.cachedAccessToken.access_token,\n token_type: 'Bearer',\n id_token: null,\n });\n // Return the cached access token.\n return this.cachedAccessToken;\n }\n /**\n * Returns the workload identity pool project number if it is determinable\n * from the audience resource name.\n * @param audience The STS audience used to determine the project number.\n * @return The project number associated with the workload identity pool, if\n * this can be determined from the STS audience field. Otherwise, null is\n * returned.\n */\n getProjectNumber(audience) {\n // STS audience pattern:\n // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/...\n const match = audience.match(/\\/projects\\/([^/]+)/);\n if (!match) {\n return null;\n }\n return match[1];\n }\n /**\n * Exchanges an external account GCP access token for a service\n * account impersonated access token using iamcredentials\n * GenerateAccessToken API.\n * @param token The access token to exchange for a service account access\n * token.\n * @return A promise that resolves with the service account impersonated\n * credentials response.\n */\n async getImpersonatedAccessToken(token) {\n const opts = {\n url: this.serviceAccountImpersonationUrl,\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n },\n data: {\n scope: this.getScopesArray(),\n lifetime: this.serviceAccountImpersonationLifetime + 's',\n },\n responseType: 'json',\n };\n const response = await this.transporter.request(opts);\n const successResponse = response.data;\n return {\n access_token: successResponse.accessToken,\n // Convert from ISO format to timestamp.\n expiry_date: new Date(successResponse.expireTime).getTime(),\n res: response,\n };\n }\n /**\n * Returns whether the provided credentials are expired or not.\n * If there is no expiry time, assumes the token is not expired or expiring.\n * @param accessToken The credentials to check for expiration.\n * @return Whether the credentials are expired or not.\n */\n isExpired(accessToken) {\n const now = new Date().getTime();\n return accessToken.expiry_date\n ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis\n : false;\n }\n /**\n * @return The list of scopes for the requested GCP access token.\n */\n getScopesArray() {\n // Since scopes can be provided as string or array, the type should\n // be normalized.\n if (typeof this.scopes === 'string') {\n return [this.scopes];\n }\n return this.scopes || [DEFAULT_OAUTH_SCOPE];\n }\n getMetricsHeaderValue() {\n const nodeVersion = process.version.replace(/^v/, '');\n const saImpersonation = this.serviceAccountImpersonationUrl !== undefined;\n const credentialSourceType = this.credentialSourceType\n ? this.credentialSourceType\n : 'unknown';\n return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`;\n }\n}\nexports.BaseExternalAccountClient = BaseExternalAccountClient;\n","\"use strict\";\n// Copyright 2013 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Compute = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst gcpMetadata = require(\"gcp-metadata\");\nconst oauth2client_1 = require(\"./oauth2client\");\nclass Compute extends oauth2client_1.OAuth2Client {\n /**\n * Google Compute Engine service account credentials.\n *\n * Retrieve access token from the metadata server.\n * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications\n */\n constructor(options = {}) {\n super(options);\n // Start with an expired refresh token, which will automatically be\n // refreshed before the first API call is made.\n this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' };\n this.serviceAccountEmail = options.serviceAccountEmail || 'default';\n this.scopes = Array.isArray(options.scopes)\n ? options.scopes\n : options.scopes\n ? [options.scopes]\n : [];\n }\n /**\n * Refreshes the access token.\n * @param refreshToken Unused parameter\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`;\n let data;\n try {\n const instanceOptions = {\n property: tokenPath,\n };\n if (this.scopes.length > 0) {\n instanceOptions.params = {\n scopes: this.scopes.join(','),\n };\n }\n data = await gcpMetadata.instance(instanceOptions);\n }\n catch (e) {\n if (e instanceof gaxios_1.GaxiosError) {\n e.message = `Could not refresh access token: ${e.message}`;\n this.wrapError(e);\n }\n throw e;\n }\n const tokens = data;\n if (data && data.expires_in) {\n tokens.expiry_date = new Date().getTime() + data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res: null };\n }\n /**\n * Fetches an ID token.\n * @param targetAudience the audience for the fetched ID token.\n */\n async fetchIdToken(targetAudience) {\n const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` +\n `?format=full&audience=${targetAudience}`;\n let idToken;\n try {\n const instanceOptions = {\n property: idTokenPath,\n };\n idToken = await gcpMetadata.instance(instanceOptions);\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Could not fetch ID token: ${e.message}`;\n }\n throw e;\n }\n return idToken;\n }\n wrapError(e) {\n const res = e.response;\n if (res && res.status) {\n e.status = res.status;\n if (res.status === 403) {\n e.message =\n 'A Forbidden error was returned while attempting to retrieve an access ' +\n 'token for the Compute Engine built-in service account. This may be because the Compute ' +\n 'Engine instance does not have the correct permission scopes specified: ' +\n e.message;\n }\n else if (res.status === 404) {\n e.message =\n 'A Not Found error was returned while attempting to retrieve an access' +\n 'token for the Compute Engine built-in service account. This may be because the Compute ' +\n 'Engine instance does not have any permission scopes specified: ' +\n e.message;\n }\n }\n }\n}\nexports.Compute = Compute;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0;\nconst stream = require(\"stream\");\nconst authclient_1 = require(\"./authclient\");\nconst sts = require(\"./stscredentials\");\n/**\n * The required token exchange grant_type: rfc8693#section-2.1\n */\nconst STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange';\n/**\n * The requested token exchange requested_token_type: rfc8693#section-2.1\n */\nconst STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/**\n * The requested token exchange subject_token_type: rfc8693#section-2.1\n */\nconst STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token';\n/**\n * The maximum number of access boundary rules a Credential Access Boundary\n * can contain.\n */\nexports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10;\n/**\n * Offset to take into account network delays and server clock skews.\n */\nexports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000;\n/**\n * Defines a set of Google credentials that are downscoped from an existing set\n * of Google OAuth2 credentials. This is useful to restrict the Identity and\n * Access Management (IAM) permissions that a short-lived credential can use.\n * The common pattern of usage is to have a token broker with elevated access\n * generate these downscoped credentials from higher access source credentials\n * and pass the downscoped short-lived access tokens to a token consumer via\n * some secure authenticated channel for limited access to Google Cloud Storage\n * resources.\n */\nclass DownscopedClient extends authclient_1.AuthClient {\n /**\n * Instantiates a downscoped client object using the provided source\n * AuthClient and credential access boundary rules.\n * To downscope permissions of a source AuthClient, a Credential Access\n * Boundary that specifies which resources the new credential can access, as\n * well as an upper bound on the permissions that are available on each\n * resource, has to be defined. A downscoped client can then be instantiated\n * using the source AuthClient and the Credential Access Boundary.\n * @param authClient The source AuthClient to be downscoped based on the\n * provided Credential Access Boundary rules.\n * @param credentialAccessBoundary The Credential Access Boundary which\n * contains a list of access boundary rules. Each rule contains information\n * on the resource that the rule applies to, the upper bound of the\n * permissions that are available on that resource and an optional\n * condition to further restrict permissions.\n * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.**\n * Optional additional behavior customization options.\n * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.**\n * Optional quota project id for setting up in the x-goog-user-project header.\n */\n constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) {\n super({ ...additionalOptions, quotaProjectId });\n this.authClient = authClient;\n this.credentialAccessBoundary = credentialAccessBoundary;\n // Check 1-10 Access Boundary Rules are defined within Credential Access\n // Boundary.\n if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) {\n throw new Error('At least one access boundary rule needs to be defined.');\n }\n else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length >\n exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) {\n throw new Error('The provided access boundary has more than ' +\n `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`);\n }\n // Check at least one permission should be defined in each Access Boundary\n // Rule.\n for (const rule of credentialAccessBoundary.accessBoundary\n .accessBoundaryRules) {\n if (rule.availablePermissions.length === 0) {\n throw new Error('At least one permission should be defined in access boundary rules.');\n }\n }\n this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`);\n this.cachedDownscopedAccessToken = null;\n }\n /**\n * Provides a mechanism to inject Downscoped access tokens directly.\n * The expiry_date field is required to facilitate determination of the token\n * expiration which would make it easier for the token consumer to handle.\n * @param credentials The Credentials object to set on the current client.\n */\n setCredentials(credentials) {\n if (!credentials.expiry_date) {\n throw new Error('The access token expiry_date field is missing in the provided ' +\n 'credentials.');\n }\n super.setCredentials(credentials);\n this.cachedDownscopedAccessToken = credentials;\n }\n async getAccessToken() {\n // If the cached access token is unavailable or expired, force refresh.\n // The Downscoped access token will be returned in\n // DownscopedAccessTokenResponse format.\n if (!this.cachedDownscopedAccessToken ||\n this.isExpired(this.cachedDownscopedAccessToken)) {\n await this.refreshAccessTokenAsync();\n }\n // Return Downscoped access token in DownscopedAccessTokenResponse format.\n return {\n token: this.cachedDownscopedAccessToken.access_token,\n expirationTime: this.cachedDownscopedAccessToken.expiry_date,\n res: this.cachedDownscopedAccessToken.res,\n };\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * The result has the form:\n * { Authorization: 'Bearer ' }\n */\n async getRequestHeaders() {\n const accessTokenResponse = await this.getAccessToken();\n const headers = {\n Authorization: `Bearer ${accessTokenResponse.token}`,\n };\n return this.addSharedMetadataHeaders(headers);\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n /**\n * Authenticates the provided HTTP request, processes it and resolves with the\n * returned response.\n * @param opts The HTTP request options.\n * @param retry Whether the current attempt is a retry after a failed attempt.\n * @return A promise that resolves with the successful response.\n */\n async requestAsync(opts, retry = false) {\n let response;\n try {\n const requestHeaders = await this.getRequestHeaders();\n opts.headers = opts.headers || {};\n if (requestHeaders && requestHeaders['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] =\n requestHeaders['x-goog-user-project'];\n }\n if (requestHeaders && requestHeaders.Authorization) {\n opts.headers.Authorization = requestHeaders.Authorization;\n }\n response = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - forceRefreshOnFailure is true\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n this.forceRefreshOnFailure) {\n await this.refreshAccessTokenAsync();\n return await this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return response;\n }\n /**\n * Forces token refresh, even if unexpired tokens are currently cached.\n * GCP access tokens are retrieved from authclient object/source credential.\n * Then GCP access tokens are exchanged for downscoped access tokens via the\n * token exchange endpoint.\n * @return A promise that resolves with the fresh downscoped access token.\n */\n async refreshAccessTokenAsync() {\n var _a;\n // Retrieve GCP access token from source credential.\n const subjectToken = (await this.authClient.getAccessToken()).token;\n // Construct the STS credentials options.\n const stsCredentialsOptions = {\n grantType: STS_GRANT_TYPE,\n requestedTokenType: STS_REQUEST_TOKEN_TYPE,\n subjectToken: subjectToken,\n subjectTokenType: STS_SUBJECT_TOKEN_TYPE,\n };\n // Exchange the source AuthClient access token for a Downscoped access\n // token.\n const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary);\n /**\n * The STS endpoint will only return the expiration time for the downscoped\n * access token if the original access token represents a service account.\n * The downscoped token's expiration time will always match the source\n * credential expiration. When no expires_in is returned, we can copy the\n * source credential's expiration time.\n */\n const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null;\n const expiryDate = stsResponse.expires_in\n ? new Date().getTime() + stsResponse.expires_in * 1000\n : sourceCredExpireDate;\n // Save response in cached access token.\n this.cachedDownscopedAccessToken = {\n access_token: stsResponse.access_token,\n expiry_date: expiryDate,\n res: stsResponse.res,\n };\n // Save credentials.\n this.credentials = {};\n Object.assign(this.credentials, this.cachedDownscopedAccessToken);\n delete this.credentials.res;\n // Trigger tokens event to notify external listeners.\n this.emit('tokens', {\n refresh_token: null,\n expiry_date: this.cachedDownscopedAccessToken.expiry_date,\n access_token: this.cachedDownscopedAccessToken.access_token,\n token_type: 'Bearer',\n id_token: null,\n });\n // Return the cached access token.\n return this.cachedDownscopedAccessToken;\n }\n /**\n * Returns whether the provided credentials are expired or not.\n * If there is no expiry time, assumes the token is not expired or expiring.\n * @param downscopedAccessToken The credentials to check for expiration.\n * @return Whether the credentials are expired or not.\n */\n isExpired(downscopedAccessToken) {\n const now = new Date().getTime();\n return downscopedAccessToken.expiry_date\n ? now >=\n downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis\n : false;\n }\n}\nexports.DownscopedClient = DownscopedClient;\n","\"use strict\";\n// Copyright 2018 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getEnv = exports.clear = exports.GCPEnv = void 0;\nconst gcpMetadata = require(\"gcp-metadata\");\nvar GCPEnv;\n(function (GCPEnv) {\n GCPEnv[\"APP_ENGINE\"] = \"APP_ENGINE\";\n GCPEnv[\"KUBERNETES_ENGINE\"] = \"KUBERNETES_ENGINE\";\n GCPEnv[\"CLOUD_FUNCTIONS\"] = \"CLOUD_FUNCTIONS\";\n GCPEnv[\"COMPUTE_ENGINE\"] = \"COMPUTE_ENGINE\";\n GCPEnv[\"CLOUD_RUN\"] = \"CLOUD_RUN\";\n GCPEnv[\"NONE\"] = \"NONE\";\n})(GCPEnv || (exports.GCPEnv = GCPEnv = {}));\nlet envPromise;\nfunction clear() {\n envPromise = undefined;\n}\nexports.clear = clear;\nasync function getEnv() {\n if (envPromise) {\n return envPromise;\n }\n envPromise = getEnvMemoized();\n return envPromise;\n}\nexports.getEnv = getEnv;\nasync function getEnvMemoized() {\n let env = GCPEnv.NONE;\n if (isAppEngine()) {\n env = GCPEnv.APP_ENGINE;\n }\n else if (isCloudFunction()) {\n env = GCPEnv.CLOUD_FUNCTIONS;\n }\n else if (await isComputeEngine()) {\n if (await isKubernetesEngine()) {\n env = GCPEnv.KUBERNETES_ENGINE;\n }\n else if (isCloudRun()) {\n env = GCPEnv.CLOUD_RUN;\n }\n else {\n env = GCPEnv.COMPUTE_ENGINE;\n }\n }\n else {\n env = GCPEnv.NONE;\n }\n return env;\n}\nfunction isAppEngine() {\n return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME);\n}\nfunction isCloudFunction() {\n return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET);\n}\n/**\n * This check only verifies that the environment is running knative.\n * This must be run *after* checking for Kubernetes, otherwise it will\n * return a false positive.\n */\nfunction isCloudRun() {\n return !!process.env.K_CONFIGURATION;\n}\nasync function isKubernetesEngine() {\n try {\n await gcpMetadata.instance('attributes/cluster-name');\n return true;\n }\n catch (e) {\n return false;\n }\n}\nasync function isComputeEngine() {\n return gcpMetadata.isAvailable();\n}\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0;\nconst SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2';\nconst OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token';\nconst OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt';\n/**\n * Defines the response of a 3rd party executable run by the pluggable auth client.\n */\nclass ExecutableResponse {\n /**\n * Instantiates an ExecutableResponse instance using the provided JSON object\n * from the output of the executable.\n * @param responseJson Response from a 3rd party executable, loaded from a\n * run of the executable or a cached output file.\n */\n constructor(responseJson) {\n // Check that the required fields exist in the json response.\n if (!responseJson.version) {\n throw new InvalidVersionFieldError(\"Executable response must contain a 'version' field.\");\n }\n if (responseJson.success === undefined) {\n throw new InvalidSuccessFieldError(\"Executable response must contain a 'success' field.\");\n }\n this.version = responseJson.version;\n this.success = responseJson.success;\n // Validate required fields for a successful response.\n if (this.success) {\n this.expirationTime = responseJson.expiration_time;\n this.tokenType = responseJson.token_type;\n // Validate token type field.\n if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE &&\n this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 &&\n this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) {\n throw new InvalidTokenTypeFieldError(\"Executable response must contain a 'token_type' field when successful \" +\n `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`);\n }\n // Validate subject token.\n if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) {\n if (!responseJson.saml_response) {\n throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`);\n }\n this.subjectToken = responseJson.saml_response;\n }\n else {\n if (!responseJson.id_token) {\n throw new InvalidSubjectTokenError(\"Executable response must contain a 'id_token' field when \" +\n `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`);\n }\n this.subjectToken = responseJson.id_token;\n }\n }\n else {\n // Both code and message must be provided for unsuccessful responses.\n if (!responseJson.code) {\n throw new InvalidCodeFieldError(\"Executable response must contain a 'code' field when unsuccessful.\");\n }\n if (!responseJson.message) {\n throw new InvalidMessageFieldError(\"Executable response must contain a 'message' field when unsuccessful.\");\n }\n this.errorCode = responseJson.code;\n this.errorMessage = responseJson.message;\n }\n }\n /**\n * @return A boolean representing if the response has a valid token. Returns\n * true when the response was successful and the token is not expired.\n */\n isValid() {\n return !this.isExpired() && this.success;\n }\n /**\n * @return A boolean representing if the response is expired. Returns true if the\n * provided timeout has passed.\n */\n isExpired() {\n return (this.expirationTime !== undefined &&\n this.expirationTime < Math.round(Date.now() / 1000));\n }\n}\nexports.ExecutableResponse = ExecutableResponse;\n/**\n * An error thrown by the ExecutableResponse class.\n */\nclass ExecutableResponseError extends Error {\n constructor(message) {\n super(message);\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.ExecutableResponseError = ExecutableResponseError;\n/**\n * An error thrown when the 'version' field in an executable response is missing or invalid.\n */\nclass InvalidVersionFieldError extends ExecutableResponseError {\n}\nexports.InvalidVersionFieldError = InvalidVersionFieldError;\n/**\n * An error thrown when the 'success' field in an executable response is missing or invalid.\n */\nclass InvalidSuccessFieldError extends ExecutableResponseError {\n}\nexports.InvalidSuccessFieldError = InvalidSuccessFieldError;\n/**\n * An error thrown when the 'expiration_time' field in an executable response is missing or invalid.\n */\nclass InvalidExpirationTimeFieldError extends ExecutableResponseError {\n}\nexports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError;\n/**\n * An error thrown when the 'token_type' field in an executable response is missing or invalid.\n */\nclass InvalidTokenTypeFieldError extends ExecutableResponseError {\n}\nexports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError;\n/**\n * An error thrown when the 'code' field in an executable response is missing or invalid.\n */\nclass InvalidCodeFieldError extends ExecutableResponseError {\n}\nexports.InvalidCodeFieldError = InvalidCodeFieldError;\n/**\n * An error thrown when the 'message' field in an executable response is missing or invalid.\n */\nclass InvalidMessageFieldError extends ExecutableResponseError {\n}\nexports.InvalidMessageFieldError = InvalidMessageFieldError;\n/**\n * An error thrown when the subject token in an executable response is missing or invalid.\n */\nclass InvalidSubjectTokenError extends ExecutableResponseError {\n}\nexports.InvalidSubjectTokenError = InvalidSubjectTokenError;\n","\"use strict\";\n// Copyright 2023 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0;\nconst authclient_1 = require(\"./authclient\");\nconst oauth2common_1 = require(\"./oauth2common\");\nconst gaxios_1 = require(\"gaxios\");\nconst stream = require(\"stream\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\n/**\n * The credentials JSON file type for external account authorized user clients.\n */\nexports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user';\n/**\n * Handler for token refresh requests sent to the token_url endpoint for external\n * authorized user credentials.\n */\nclass ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler {\n /**\n * Initializes an ExternalAccountAuthorizedUserHandler instance.\n * @param url The URL of the token refresh endpoint.\n * @param transporter The transporter to use for the refresh request.\n * @param clientAuthentication The client authentication credentials to use\n * for the refresh request.\n */\n constructor(url, transporter, clientAuthentication) {\n super(clientAuthentication);\n this.url = url;\n this.transporter = transporter;\n }\n /**\n * Requests a new access token from the token_url endpoint using the provided\n * refresh token.\n * @param refreshToken The refresh token to use to generate a new access token.\n * @param additionalHeaders Optional additional headers to pass along the\n * request.\n * @return A promise that resolves with the token refresh response containing\n * the requested access token and its expiration time.\n */\n async refreshToken(refreshToken, additionalHeaders) {\n const values = new URLSearchParams({\n grant_type: 'refresh_token',\n refresh_token: refreshToken,\n });\n const headers = {\n 'Content-Type': 'application/x-www-form-urlencoded',\n ...additionalHeaders,\n };\n const opts = {\n url: this.url,\n method: 'POST',\n headers,\n data: values.toString(),\n responseType: 'json',\n };\n // Apply OAuth client authentication.\n this.applyClientAuthenticationOptions(opts);\n try {\n const response = await this.transporter.request(opts);\n // Successful response.\n const tokenRefreshResponse = response.data;\n tokenRefreshResponse.res = response;\n return tokenRefreshResponse;\n }\n catch (error) {\n // Translate error to OAuthError.\n if (error instanceof gaxios_1.GaxiosError && error.response) {\n throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, \n // Preserve other fields from the original error.\n error);\n }\n // Request could fail before the server responds.\n throw error;\n }\n }\n}\n/**\n * External Account Authorized User Client. This is used for OAuth2 credentials\n * sourced using external identities through Workforce Identity Federation.\n * Obtaining the initial access and refresh token can be done through the\n * Google Cloud CLI.\n */\nclass ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient {\n /**\n * Instantiates an ExternalAccountAuthorizedUserClient instances using the\n * provided JSON object loaded from a credentials files.\n * An error is throws if the credential is not valid.\n * @param options The external account authorized user option object typically\n * from the external accoutn authorized user JSON credential file.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super({ ...options, ...additionalOptions });\n this.refreshToken = options.refresh_token;\n const clientAuth = {\n confidentialClientType: 'basic',\n clientId: options.client_id,\n clientSecret: options.client_secret,\n };\n this.externalAccountAuthorizedUserHandler =\n new ExternalAccountAuthorizedUserHandler(options.token_url, this.transporter, clientAuth);\n this.cachedAccessToken = null;\n this.quotaProjectId = options.quota_project_id;\n // As threshold could be zero,\n // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the\n // zero value.\n if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') {\n this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET;\n }\n else {\n this.eagerRefreshThresholdMillis = additionalOptions\n .eagerRefreshThresholdMillis;\n }\n this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure);\n if (options.universe_domain) {\n this.universeDomain = options.universe_domain;\n }\n }\n async getAccessToken() {\n // If cached access token is unavailable or expired, force refresh.\n if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) {\n await this.refreshAccessTokenAsync();\n }\n // Return GCP access token in GetAccessTokenResponse format.\n return {\n token: this.cachedAccessToken.access_token,\n res: this.cachedAccessToken.res,\n };\n }\n async getRequestHeaders() {\n const accessTokenResponse = await this.getAccessToken();\n const headers = {\n Authorization: `Bearer ${accessTokenResponse.token}`,\n };\n return this.addSharedMetadataHeaders(headers);\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n /**\n * Authenticates the provided HTTP request, processes it and resolves with the\n * returned response.\n * @param opts The HTTP request options.\n * @param retry Whether the current attempt is a retry after a failed attempt.\n * @return A promise that resolves with the successful response.\n */\n async requestAsync(opts, retry = false) {\n let response;\n try {\n const requestHeaders = await this.getRequestHeaders();\n opts.headers = opts.headers || {};\n if (requestHeaders && requestHeaders['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] =\n requestHeaders['x-goog-user-project'];\n }\n if (requestHeaders && requestHeaders.Authorization) {\n opts.headers.Authorization = requestHeaders.Authorization;\n }\n response = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - forceRefreshOnFailure is true\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n this.forceRefreshOnFailure) {\n await this.refreshAccessTokenAsync();\n return await this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return response;\n }\n /**\n * Forces token refresh, even if unexpired tokens are currently cached.\n * @return A promise that resolves with the refreshed credential.\n */\n async refreshAccessTokenAsync() {\n // Refresh the access token using the refresh token.\n const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken);\n this.cachedAccessToken = {\n access_token: refreshResponse.access_token,\n expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000,\n res: refreshResponse.res,\n };\n if (refreshResponse.refresh_token !== undefined) {\n this.refreshToken = refreshResponse.refresh_token;\n }\n return this.cachedAccessToken;\n }\n /**\n * Returns whether the provided credentials are expired or not.\n * If there is no expiry time, assumes the token is not expired or expiring.\n * @param credentials The credentials to check for expiration.\n * @return Whether the credentials are expired or not.\n */\n isExpired(credentials) {\n const now = new Date().getTime();\n return credentials.expiry_date\n ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis\n : false;\n }\n}\nexports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ExternalAccountClient = void 0;\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst identitypoolclient_1 = require(\"./identitypoolclient\");\nconst awsclient_1 = require(\"./awsclient\");\nconst pluggable_auth_client_1 = require(\"./pluggable-auth-client\");\n/**\n * Dummy class with no constructor. Developers are expected to use fromJSON.\n */\nclass ExternalAccountClient {\n constructor() {\n throw new Error('ExternalAccountClients should be initialized via: ' +\n 'ExternalAccountClient.fromJSON(), ' +\n 'directly via explicit constructors, eg. ' +\n 'new AwsClient(options), new IdentityPoolClient(options), new' +\n 'PluggableAuthClientOptions, or via ' +\n 'new GoogleAuth(options).getClient()');\n }\n /**\n * This static method will instantiate the\n * corresponding type of external account credential depending on the\n * underlying credential source.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n * @return A BaseExternalAccountClient instance or null if the options\n * provided do not correspond to an external account credential.\n */\n static fromJSON(options, additionalOptions) {\n var _a, _b;\n if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) {\n return new awsclient_1.AwsClient(options, additionalOptions);\n }\n else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) {\n return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions);\n }\n else {\n return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions);\n }\n }\n else {\n return null;\n }\n }\n}\nexports.ExternalAccountClient = ExternalAccountClient;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0;\nconst child_process_1 = require(\"child_process\");\nconst fs = require(\"fs\");\nconst gcpMetadata = require(\"gcp-metadata\");\nconst os = require(\"os\");\nconst path = require(\"path\");\nconst crypto_1 = require(\"../crypto/crypto\");\nconst transporters_1 = require(\"../transporters\");\nconst computeclient_1 = require(\"./computeclient\");\nconst idtokenclient_1 = require(\"./idtokenclient\");\nconst envDetect_1 = require(\"./envDetect\");\nconst jwtclient_1 = require(\"./jwtclient\");\nconst refreshclient_1 = require(\"./refreshclient\");\nconst impersonated_1 = require(\"./impersonated\");\nconst externalclient_1 = require(\"./externalclient\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst authclient_1 = require(\"./authclient\");\nconst externalAccountAuthorizedUserClient_1 = require(\"./externalAccountAuthorizedUserClient\");\nconst util_1 = require(\"../util\");\nexports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com';\nconst GoogleAuthExceptionMessages = {\n NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \\n' +\n 'To learn more about authentication and Google APIs, visit: \\n' +\n 'https://cloud.google.com/docs/authentication/getting-started',\n NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \\n' +\n 'To learn more about authentication and Google APIs, visit: \\n' +\n 'https://cloud.google.com/docs/authentication/getting-started',\n NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\\n' +\n 'To learn more about Universe Domain retrieval, visit: \\n' +\n 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys',\n};\nclass GoogleAuth {\n // Note: this properly is only public to satisify unit tests.\n // https://github.com/Microsoft/TypeScript/issues/5228\n get isGCE() {\n return this.checkIsGCE;\n }\n /**\n * Configuration is resolved in the following order of precedence:\n * - {@link GoogleAuthOptions.credentials `credentials`}\n * - {@link GoogleAuthOptions.keyFilename `keyFilename`}\n * - {@link GoogleAuthOptions.keyFile `keyFile`}\n *\n * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the\n * {@link AuthClient `AuthClient`s}.\n *\n * @param opts\n */\n constructor(opts) {\n /**\n * Caches a value indicating whether the auth layer is running on Google\n * Compute Engine.\n * @private\n */\n this.checkIsGCE = undefined;\n // To save the contents of the JSON credential file\n this.jsonContent = null;\n this.cachedCredential = null;\n this.clientOptions = {};\n opts = opts || {};\n this._cachedProjectId = opts.projectId || null;\n this.cachedCredential = opts.authClient || null;\n this.keyFilename = opts.keyFilename || opts.keyFile;\n this.scopes = opts.scopes;\n this.jsonContent = opts.credentials || null;\n this.clientOptions = opts.clientOptions || {};\n if (opts.universeDomain) {\n this.clientOptions.universeDomain = opts.universeDomain;\n }\n }\n // GAPIC client libraries should always use self-signed JWTs. The following\n // variables are set on the JWT client in order to indicate the type of library,\n // and sign the JWT with the correct audience and scopes (if not supplied).\n setGapicJWTValues(client) {\n client.defaultServicePath = this.defaultServicePath;\n client.useJWTAccessWithScope = this.useJWTAccessWithScope;\n client.defaultScopes = this.defaultScopes;\n }\n getProjectId(callback) {\n if (callback) {\n this.getProjectIdAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.getProjectIdAsync();\n }\n }\n /**\n * A temporary method for internal `getProjectId` usages where `null` is\n * acceptable. In a future major release, `getProjectId` should return `null`\n * (as the `Promise` base signature describes) and this private\n * method should be removed.\n *\n * @returns Promise that resolves with project id (or `null`)\n */\n async getProjectIdOptional() {\n try {\n return await this.getProjectId();\n }\n catch (e) {\n if (e instanceof Error &&\n e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) {\n return null;\n }\n else {\n throw e;\n }\n }\n }\n /*\n * A private method for finding and caching a projectId.\n *\n * Supports environments in order of precedence:\n * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable\n * - GOOGLE_APPLICATION_CREDENTIALS JSON file\n * - Cloud SDK: `gcloud config config-helper --format json`\n * - GCE project ID from metadata server\n *\n * @returns projectId\n */\n async findAndCacheProjectId() {\n let projectId = null;\n projectId || (projectId = await this.getProductionProjectId());\n projectId || (projectId = await this.getFileProjectId());\n projectId || (projectId = await this.getDefaultServiceProjectId());\n projectId || (projectId = await this.getGCEProjectId());\n projectId || (projectId = await this.getExternalAccountClientProjectId());\n if (projectId) {\n this._cachedProjectId = projectId;\n return projectId;\n }\n else {\n throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND);\n }\n }\n async getProjectIdAsync() {\n if (this._cachedProjectId) {\n return this._cachedProjectId;\n }\n if (!this._findProjectIdPromise) {\n this._findProjectIdPromise = this.findAndCacheProjectId();\n }\n return this._findProjectIdPromise;\n }\n /**\n * Retrieves a universe domain from the metadata server via\n * {@link gcpMetadata.universe}.\n *\n * @returns a universe domain\n */\n async getUniverseDomainFromMetadataServer() {\n var _a;\n let universeDomain;\n try {\n universeDomain = await gcpMetadata.universe('universe_domain');\n universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE);\n }\n catch (e) {\n if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) {\n universeDomain = authclient_1.DEFAULT_UNIVERSE;\n }\n else {\n throw e;\n }\n }\n return universeDomain;\n }\n /**\n * Retrieves, caches, and returns the universe domain in the following order\n * of precedence:\n * - The universe domain in {@link GoogleAuth.clientOptions}\n * - An existing or ADC {@link AuthClient}'s universe domain\n * - {@link gcpMetadata.universe}, if {@link Compute} client\n *\n * @returns The universe domain\n */\n async getUniverseDomain() {\n let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain');\n try {\n universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain);\n }\n catch (_a) {\n // client or ADC is not available\n universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE);\n }\n return universeDomain;\n }\n /**\n * @returns Any scopes (user-specified or default scopes specified by the\n * client library) that need to be set on the current Auth client.\n */\n getAnyScopes() {\n return this.scopes || this.defaultScopes;\n }\n getApplicationDefault(optionsOrCallback = {}, callback) {\n let options;\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else {\n options = optionsOrCallback;\n }\n if (callback) {\n this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback);\n }\n else {\n return this.getApplicationDefaultAsync(options);\n }\n }\n async getApplicationDefaultAsync(options = {}) {\n // If we've already got a cached credential, return it.\n // This will also preserve one's configured quota project, in case they\n // set one directly on the credential previously.\n if (this.cachedCredential) {\n return await this.prepareAndCacheADC(this.cachedCredential);\n }\n // Since this is a 'new' ADC to cache we will use the environment variable\n // if it's available. We prefer this value over the value from ADC.\n const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT'];\n let credential;\n // Check for the existence of a local environment variable pointing to the\n // location of the credential file. This is typically used in local\n // developer scenarios.\n credential =\n await this._tryGetApplicationCredentialsFromEnvironmentVariable(options);\n if (credential) {\n if (credential instanceof jwtclient_1.JWT) {\n credential.scopes = this.scopes;\n }\n else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) {\n credential.scopes = this.getAnyScopes();\n }\n return await this.prepareAndCacheADC(credential, quotaProjectIdOverride);\n }\n // Look in the well-known credential file location.\n credential =\n await this._tryGetApplicationCredentialsFromWellKnownFile(options);\n if (credential) {\n if (credential instanceof jwtclient_1.JWT) {\n credential.scopes = this.scopes;\n }\n else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) {\n credential.scopes = this.getAnyScopes();\n }\n return await this.prepareAndCacheADC(credential, quotaProjectIdOverride);\n }\n // Determine if we're running on GCE.\n if (await this._checkIsGCE()) {\n // set universe domain for Compute client\n if (!(0, util_1.originalOrCamelOptions)(options).get('universe_domain')) {\n options.universeDomain =\n await this.getUniverseDomainFromMetadataServer();\n }\n options.scopes = this.getAnyScopes();\n return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride);\n }\n throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.');\n }\n async prepareAndCacheADC(credential, quotaProjectIdOverride) {\n const projectId = await this.getProjectIdOptional();\n if (quotaProjectIdOverride) {\n credential.quotaProjectId = quotaProjectIdOverride;\n }\n this.cachedCredential = credential;\n return { credential, projectId };\n }\n /**\n * Determines whether the auth layer is running on Google Compute Engine.\n * Checks for GCP Residency, then fallback to checking if metadata server\n * is available.\n *\n * @returns A promise that resolves with the boolean.\n * @api private\n */\n async _checkIsGCE() {\n if (this.checkIsGCE === undefined) {\n this.checkIsGCE =\n gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable());\n }\n return this.checkIsGCE;\n }\n /**\n * Attempts to load default credentials from the environment variable path..\n * @returns Promise that resolves with the OAuth2Client or null.\n * @api private\n */\n async _tryGetApplicationCredentialsFromEnvironmentVariable(options) {\n const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] ||\n process.env['google_application_credentials'];\n if (!credentialsPath || credentialsPath.length === 0) {\n return null;\n }\n try {\n return this._getApplicationCredentialsFromFilePath(credentialsPath, options);\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`;\n }\n throw e;\n }\n }\n /**\n * Attempts to load default credentials from a well-known file location\n * @return Promise that resolves with the OAuth2Client or null.\n * @api private\n */\n async _tryGetApplicationCredentialsFromWellKnownFile(options) {\n // First, figure out the location of the file, depending upon the OS type.\n let location = null;\n if (this._isWindows()) {\n // Windows\n location = process.env['APPDATA'];\n }\n else {\n // Linux or Mac\n const home = process.env['HOME'];\n if (home) {\n location = path.join(home, '.config');\n }\n }\n // If we found the root path, expand it.\n if (location) {\n location = path.join(location, 'gcloud', 'application_default_credentials.json');\n if (!fs.existsSync(location)) {\n location = null;\n }\n }\n // The file does not exist.\n if (!location) {\n return null;\n }\n // The file seems to exist. Try to use it.\n const client = await this._getApplicationCredentialsFromFilePath(location, options);\n return client;\n }\n /**\n * Attempts to load default credentials from a file at the given path..\n * @param filePath The path to the file to read.\n * @returns Promise that resolves with the OAuth2Client\n * @api private\n */\n async _getApplicationCredentialsFromFilePath(filePath, options = {}) {\n // Make sure the path looks like a string.\n if (!filePath || filePath.length === 0) {\n throw new Error('The file path is invalid.');\n }\n // Make sure there is a file at the path. lstatSync will throw if there is\n // nothing there.\n try {\n // Resolve path to actual file in case of symlink. Expect a thrown error\n // if not resolvable.\n filePath = fs.realpathSync(filePath);\n if (!fs.lstatSync(filePath).isFile()) {\n throw new Error();\n }\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`;\n }\n throw err;\n }\n // Now open a read stream on the file, and parse it.\n const readStream = fs.createReadStream(filePath);\n return this.fromStream(readStream, options);\n }\n /**\n * Create a credentials instance using a given impersonated input options.\n * @param json The impersonated input object.\n * @returns JWT or UserRefresh Client with data\n */\n fromImpersonatedJSON(json) {\n var _a, _b, _c, _d, _e;\n if (!json) {\n throw new Error('Must pass in a JSON object containing an impersonated refresh token');\n }\n if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) {\n throw new Error(`The incoming JSON object does not have the \"${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}\" type`);\n }\n if (!json.source_credentials) {\n throw new Error('The incoming JSON object does not contain a source_credentials field');\n }\n if (!json.service_account_impersonation_url) {\n throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field');\n }\n // Create source client for impersonation\n const sourceClient = new refreshclient_1.UserRefreshClient();\n sourceClient.fromJSON(json.source_credentials);\n if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) {\n /**\n * Prevents DOS attacks.\n * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85}\n **/\n throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`);\n }\n // Extreact service account from service_account_impersonation_url\n const targetPrincipal = (_c = (_b = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target;\n if (!targetPrincipal) {\n throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`);\n }\n const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : [];\n const client = new impersonated_1.Impersonated({\n ...json,\n delegates: (_e = json.delegates) !== null && _e !== void 0 ? _e : [],\n sourceClient: sourceClient,\n targetPrincipal: targetPrincipal,\n targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes],\n });\n return client;\n }\n /**\n * Create a credentials instance using the given input options.\n * @param json The input object.\n * @param options The JWT or UserRefresh options for the client\n * @returns JWT or UserRefresh Client with data\n */\n fromJSON(json, options = {}) {\n let client;\n // user's preferred universe domain\n const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain');\n if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) {\n client = new refreshclient_1.UserRefreshClient(options);\n client.fromJSON(json);\n }\n else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) {\n client = this.fromImpersonatedJSON(json);\n }\n else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n client = externalclient_1.ExternalAccountClient.fromJSON(json, options);\n client.scopes = this.getAnyScopes();\n }\n else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) {\n client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options);\n }\n else {\n options.scopes = this.scopes;\n client = new jwtclient_1.JWT(options);\n this.setGapicJWTValues(client);\n client.fromJSON(json);\n }\n if (preferredUniverseDomain) {\n client.universeDomain = preferredUniverseDomain;\n }\n return client;\n }\n /**\n * Return a JWT or UserRefreshClient from JavaScript object, caching both the\n * object used to instantiate and the client.\n * @param json The input object.\n * @param options The JWT or UserRefresh options for the client\n * @returns JWT or UserRefresh Client with data\n */\n _cacheClientFromJSON(json, options) {\n const client = this.fromJSON(json, options);\n // cache both raw data used to instantiate client and client itself.\n this.jsonContent = json;\n this.cachedCredential = client;\n return client;\n }\n fromStream(inputStream, optionsOrCallback = {}, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else {\n options = optionsOrCallback;\n }\n if (callback) {\n this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback);\n }\n else {\n return this.fromStreamAsync(inputStream, options);\n }\n }\n fromStreamAsync(inputStream, options) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n throw new Error('Must pass in a stream containing the Google auth settings.');\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n try {\n const data = JSON.parse(s);\n const r = this._cacheClientFromJSON(data, options);\n return resolve(r);\n }\n catch (err) {\n // If we failed parsing this.keyFileName, assume that it\n // is a PEM or p12 certificate:\n if (!this.keyFilename)\n throw err;\n const client = new jwtclient_1.JWT({\n ...this.clientOptions,\n keyFile: this.keyFilename,\n });\n this.cachedCredential = client;\n this.setGapicJWTValues(client);\n return resolve(client);\n }\n }\n catch (err) {\n return reject(err);\n }\n });\n });\n }\n /**\n * Create a credentials instance using the given API key string.\n * @param apiKey The API key string\n * @param options An optional options object.\n * @returns A JWT loaded from the key\n */\n fromAPIKey(apiKey, options) {\n options = options || {};\n const client = new jwtclient_1.JWT(options);\n client.fromAPIKey(apiKey);\n return client;\n }\n /**\n * Determines whether the current operating system is Windows.\n * @api private\n */\n _isWindows() {\n const sys = os.platform();\n if (sys && sys.length >= 3) {\n if (sys.substring(0, 3).toLowerCase() === 'win') {\n return true;\n }\n }\n return false;\n }\n /**\n * Run the Google Cloud SDK command that prints the default project ID\n */\n async getDefaultServiceProjectId() {\n return new Promise(resolve => {\n (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => {\n if (!err && stdout) {\n try {\n const projectId = JSON.parse(stdout).configuration.properties.core.project;\n resolve(projectId);\n return;\n }\n catch (e) {\n // ignore errors\n }\n }\n resolve(null);\n });\n });\n }\n /**\n * Loads the project id from environment variables.\n * @api private\n */\n getProductionProjectId() {\n return (process.env['GCLOUD_PROJECT'] ||\n process.env['GOOGLE_CLOUD_PROJECT'] ||\n process.env['gcloud_project'] ||\n process.env['google_cloud_project']);\n }\n /**\n * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file.\n * @api private\n */\n async getFileProjectId() {\n if (this.cachedCredential) {\n // Try to read the project ID from the cached credentials file\n return this.cachedCredential.projectId;\n }\n // Ensure the projectId is loaded from the keyFile if available.\n if (this.keyFilename) {\n const creds = await this.getClient();\n if (creds && creds.projectId) {\n return creds.projectId;\n }\n }\n // Try to load a credentials file and read its project ID\n const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable();\n if (r) {\n return r.projectId;\n }\n else {\n return null;\n }\n }\n /**\n * Gets the project ID from external account client if available.\n */\n async getExternalAccountClientProjectId() {\n if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) {\n return null;\n }\n const creds = await this.getClient();\n // Do not suppress the underlying error, as the error could contain helpful\n // information for debugging and fixing. This is especially true for\n // external account creds as in order to get the project ID, the following\n // operations have to succeed:\n // 1. Valid credentials file should be supplied.\n // 2. Ability to retrieve access tokens from STS token exchange API.\n // 3. Ability to exchange for service account impersonated credentials (if\n // enabled).\n // 4. Ability to get project info using the access token from step 2 or 3.\n // Without surfacing the error, it is harder for developers to determine\n // which step went wrong.\n return await creds.getProjectId();\n }\n /**\n * Gets the Compute Engine project ID if it can be inferred.\n */\n async getGCEProjectId() {\n try {\n const r = await gcpMetadata.project('project-id');\n return r;\n }\n catch (e) {\n // Ignore any errors\n return null;\n }\n }\n getCredentials(callback) {\n if (callback) {\n this.getCredentialsAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.getCredentialsAsync();\n }\n }\n async getCredentialsAsync() {\n const client = await this.getClient();\n if (client instanceof impersonated_1.Impersonated) {\n return { client_email: client.getTargetPrincipal() };\n }\n if (client instanceof baseexternalclient_1.BaseExternalAccountClient) {\n const serviceAccountEmail = client.getServiceAccountEmail();\n if (serviceAccountEmail) {\n return {\n client_email: serviceAccountEmail,\n universe_domain: client.universeDomain,\n };\n }\n }\n if (this.jsonContent) {\n return {\n client_email: this.jsonContent.client_email,\n private_key: this.jsonContent.private_key,\n universe_domain: this.jsonContent.universe_domain,\n };\n }\n if (await this._checkIsGCE()) {\n const [client_email, universe_domain] = await Promise.all([\n gcpMetadata.instance('service-accounts/default/email'),\n this.getUniverseDomain(),\n ]);\n return { client_email, universe_domain };\n }\n throw new Error(GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND);\n }\n /**\n * Automatically obtain an {@link AuthClient `AuthClient`} based on the\n * provided configuration. If no options were passed, use Application\n * Default Credentials.\n */\n async getClient() {\n if (!this.cachedCredential) {\n if (this.jsonContent) {\n this._cacheClientFromJSON(this.jsonContent, this.clientOptions);\n }\n else if (this.keyFilename) {\n const filePath = path.resolve(this.keyFilename);\n const stream = fs.createReadStream(filePath);\n await this.fromStreamAsync(stream, this.clientOptions);\n }\n else {\n await this.getApplicationDefaultAsync(this.clientOptions);\n }\n }\n return this.cachedCredential;\n }\n /**\n * Creates a client which will fetch an ID token for authorization.\n * @param targetAudience the audience for the fetched ID token.\n * @returns IdTokenClient for making HTTP calls authenticated with ID tokens.\n */\n async getIdTokenClient(targetAudience) {\n const client = await this.getClient();\n if (!('fetchIdToken' in client)) {\n throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.');\n }\n return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client });\n }\n /**\n * Automatically obtain application default credentials, and return\n * an access token for making requests.\n */\n async getAccessToken() {\n const client = await this.getClient();\n return (await client.getAccessToken()).token;\n }\n /**\n * Obtain the HTTP headers that will provide authorization for a given\n * request.\n */\n async getRequestHeaders(url) {\n const client = await this.getClient();\n return client.getRequestHeaders(url);\n }\n /**\n * Obtain credentials for a request, then attach the appropriate headers to\n * the request options.\n * @param opts Axios or Request options on which to attach the headers\n */\n async authorizeRequest(opts) {\n opts = opts || {};\n const url = opts.url || opts.uri;\n const client = await this.getClient();\n const headers = await client.getRequestHeaders(url);\n opts.headers = Object.assign(opts.headers || {}, headers);\n return opts;\n }\n /**\n * Automatically obtain application default credentials, and make an\n * HTTP request using the given options.\n * @param opts Axios request options for the HTTP request.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n async request(opts) {\n const client = await this.getClient();\n return client.request(opts);\n }\n /**\n * Determine the compute environment in which the code is running.\n */\n getEnv() {\n return (0, envDetect_1.getEnv)();\n }\n /**\n * Sign the given data with the current private key, or go out\n * to the IAM API to sign it.\n * @param data The data to be signed.\n * @param endpoint A custom endpoint to use.\n *\n * @example\n * ```\n * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/');\n * ```\n */\n async sign(data, endpoint) {\n const client = await this.getClient();\n const universe = await this.getUniverseDomain();\n endpoint =\n endpoint ||\n `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`;\n if (client instanceof impersonated_1.Impersonated) {\n const signed = await client.sign(data);\n return signed.signedBlob;\n }\n const crypto = (0, crypto_1.createCrypto)();\n if (client instanceof jwtclient_1.JWT && client.key) {\n const sign = await crypto.sign(client.key, data);\n return sign;\n }\n const creds = await this.getCredentials();\n if (!creds.client_email) {\n throw new Error('Cannot sign data without `client_email`.');\n }\n return this.signBlob(crypto, creds.client_email, data, endpoint);\n }\n async signBlob(crypto, emailOrUniqueId, data, endpoint) {\n const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`);\n const res = await this.request({\n method: 'POST',\n url: url.href,\n data: {\n payload: crypto.encodeBase64StringUtf8(data),\n },\n });\n return res.data.signedBlob;\n }\n}\nexports.GoogleAuth = GoogleAuth;\n/**\n * Export DefaultTransporter as a static property of the class.\n */\nGoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter;\n","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IAMAuth = void 0;\nclass IAMAuth {\n /**\n * IAM credentials.\n *\n * @param selector the iam authority selector\n * @param token the token\n * @constructor\n */\n constructor(selector, token) {\n this.selector = selector;\n this.token = token;\n this.selector = selector;\n this.token = token;\n }\n /**\n * Acquire the HTTP headers required to make an authenticated request.\n */\n getRequestHeaders() {\n return {\n 'x-goog-iam-authority-selector': this.selector,\n 'x-goog-iam-authorization-token': this.token,\n };\n }\n}\nexports.IAMAuth = IAMAuth;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar _a, _b, _c;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IdentityPoolClient = void 0;\nconst fs = require(\"fs\");\nconst util_1 = require(\"util\");\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst util_2 = require(\"../util\");\n// fs.readfile is undefined in browser karma tests causing\n// `npm run browser-test` to fail as test.oauth2.ts imports this file via\n// src/index.ts.\n// Fallback to void function to avoid promisify throwing a TypeError.\nconst readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { }));\nconst realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { }));\nconst lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { }));\n/**\n * Defines the Url-sourced and file-sourced external account clients mainly\n * used for K8s and Azure workloads.\n */\nclass IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiate an IdentityPoolClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid file-sourced or\n * url-sourced credential or a workforce pool user project is provided\n * with a non workforce audience.\n * @param options The external account options object typically loaded\n * from the external account JSON credential file. The camelCased options\n * are aliases for the snake_cased options.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super(options, additionalOptions);\n const opts = (0, util_2.originalOrCamelOptions)(options);\n const credentialSource = opts.get('credential_source');\n const credentialSourceOpts = (0, util_2.originalOrCamelOptions)(credentialSource);\n this.file = credentialSourceOpts.get('file');\n this.url = credentialSourceOpts.get('url');\n this.headers = credentialSourceOpts.get('headers');\n if (this.file && this.url) {\n throw new Error('No valid Identity Pool \"credential_source\" provided, must be either file or url.');\n }\n else if (this.file && !this.url) {\n this.credentialSourceType = 'file';\n }\n else if (!this.file && this.url) {\n this.credentialSourceType = 'url';\n }\n else {\n throw new Error('No valid Identity Pool \"credential_source\" provided, must be either file or url.');\n }\n const formatOpts = (0, util_2.originalOrCamelOptions)(credentialSourceOpts.get('format'));\n // Text is the default format type.\n this.formatType = formatOpts.get('type') || 'text';\n this.formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name');\n if (this.formatType !== 'json' && this.formatType !== 'text') {\n throw new Error(`Invalid credential_source format \"${this.formatType}\"`);\n }\n if (this.formatType === 'json' && !this.formatSubjectTokenFieldName) {\n throw new Error('Missing subject_token_field_name for JSON credential_source format');\n }\n }\n /**\n * Triggered when a external subject token is needed to be exchanged for a GCP\n * access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this either retrieves the local credential from a file location (k8s\n * workload) or by sending a GET request to a local metadata server (Azure\n * workloads).\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n if (this.file) {\n return await this.getTokenFromFile(this.file, this.formatType, this.formatSubjectTokenFieldName);\n }\n return await this.getTokenFromUrl(this.url, this.formatType, this.formatSubjectTokenFieldName, this.headers);\n }\n /**\n * Looks up the external subject token in the file path provided and\n * resolves with that token.\n * @param file The file path where the external credential is located.\n * @param formatType The token file or URL response type (JSON or text).\n * @param formatSubjectTokenFieldName For JSON response types, this is the\n * subject_token field name. For Azure, this is access_token. For text\n * response types, this is ignored.\n * @return A promise that resolves with the external subject token.\n */\n async getTokenFromFile(filePath, formatType, formatSubjectTokenFieldName) {\n // Make sure there is a file at the path. lstatSync will throw if there is\n // nothing there.\n try {\n // Resolve path to actual file in case of symlink. Expect a thrown error\n // if not resolvable.\n filePath = await realpath(filePath);\n if (!(await lstat(filePath)).isFile()) {\n throw new Error();\n }\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`;\n }\n throw err;\n }\n let subjectToken;\n const rawText = await readFile(filePath, { encoding: 'utf8' });\n if (formatType === 'text') {\n subjectToken = rawText;\n }\n else if (formatType === 'json' && formatSubjectTokenFieldName) {\n const json = JSON.parse(rawText);\n subjectToken = json[formatSubjectTokenFieldName];\n }\n if (!subjectToken) {\n throw new Error('Unable to parse the subject_token from the credential_source file');\n }\n return subjectToken;\n }\n /**\n * Sends a GET request to the URL provided and resolves with the returned\n * external subject token.\n * @param url The URL to call to retrieve the subject token. This is typically\n * a local metadata server.\n * @param formatType The token file or URL response type (JSON or text).\n * @param formatSubjectTokenFieldName For JSON response types, this is the\n * subject_token field name. For Azure, this is access_token. For text\n * response types, this is ignored.\n * @param headers The optional additional headers to send with the request to\n * the metadata server url.\n * @return A promise that resolves with the external subject token.\n */\n async getTokenFromUrl(url, formatType, formatSubjectTokenFieldName, headers) {\n const opts = {\n url,\n method: 'GET',\n headers,\n responseType: formatType,\n };\n let subjectToken;\n if (formatType === 'text') {\n const response = await this.transporter.request(opts);\n subjectToken = response.data;\n }\n else if (formatType === 'json' && formatSubjectTokenFieldName) {\n const response = await this.transporter.request(opts);\n subjectToken = response.data[formatSubjectTokenFieldName];\n }\n if (!subjectToken) {\n throw new Error('Unable to parse the subject_token from the credential_source URL');\n }\n return subjectToken;\n }\n}\nexports.IdentityPoolClient = IdentityPoolClient;\n","\"use strict\";\n// Copyright 2020 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.IdTokenClient = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nclass IdTokenClient extends oauth2client_1.OAuth2Client {\n /**\n * Google ID Token client\n *\n * Retrieve ID token from the metadata server.\n * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server\n */\n constructor(options) {\n super(options);\n this.targetAudience = options.targetAudience;\n this.idTokenProvider = options.idTokenProvider;\n }\n async getRequestMetadataAsync(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n url) {\n if (!this.credentials.id_token ||\n !this.credentials.expiry_date ||\n this.isTokenExpiring()) {\n const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience);\n this.credentials = {\n id_token: idToken,\n expiry_date: this.getIdTokenExpiryDate(idToken),\n };\n }\n const headers = {\n Authorization: 'Bearer ' + this.credentials.id_token,\n };\n return { headers };\n }\n getIdTokenExpiryDate(idToken) {\n const payloadB64 = idToken.split('.')[1];\n if (payloadB64) {\n const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii'));\n return payload.exp * 1000;\n }\n }\n}\nexports.IdTokenClient = IdTokenClient;\n","\"use strict\";\n/**\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nconst gaxios_1 = require(\"gaxios\");\nexports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account';\nclass Impersonated extends oauth2client_1.OAuth2Client {\n /**\n * Impersonated service account credentials.\n *\n * Create a new access token by impersonating another service account.\n *\n * Impersonated Credentials allowing credentials issued to a user or\n * service account to impersonate another. The source project using\n * Impersonated Credentials must enable the \"IAMCredentials\" API.\n * Also, the target service account must grant the orginating principal\n * the \"Service Account Token Creator\" IAM role.\n *\n * @param {object} options - The configuration object.\n * @param {object} [options.sourceClient] the source credential used as to\n * acquire the impersonated credentials.\n * @param {string} [options.targetPrincipal] the service account to\n * impersonate.\n * @param {string[]} [options.delegates] the chained list of delegates\n * required to grant the final access_token. If set, the sequence of\n * identities must have \"Service Account Token Creator\" capability granted to\n * the preceding identity. For example, if set to [serviceAccountB,\n * serviceAccountC], the sourceCredential must have the Token Creator role on\n * serviceAccountB. serviceAccountB must have the Token Creator on\n * serviceAccountC. Finally, C must have Token Creator on target_principal.\n * If left unset, sourceCredential must have that role on targetPrincipal.\n * @param {string[]} [options.targetScopes] scopes to request during the\n * authorization grant.\n * @param {number} [options.lifetime] number of seconds the delegated\n * credential should be valid for up to 3600 seconds by default, or 43,200\n * seconds by extending the token's lifetime, see:\n * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth\n * @param {string} [options.endpoint] api endpoint override.\n */\n constructor(options = {}) {\n var _a, _b, _c, _d, _e, _f;\n super(options);\n // Start with an expired refresh token, which will automatically be\n // refreshed before the first API call is made.\n this.credentials = {\n expiry_date: 1,\n refresh_token: 'impersonated-placeholder',\n };\n this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client();\n this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : '';\n this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : [];\n this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : [];\n this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600;\n this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com';\n }\n /**\n * Signs some bytes.\n *\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation}\n * @param blobToSign String to sign.\n * @return denoting the keyyID and signedBlob in base64 string\n */\n async sign(blobToSign) {\n await this.sourceClient.getAccessToken();\n const name = `projects/-/serviceAccounts/${this.targetPrincipal}`;\n const u = `${this.endpoint}/v1/${name}:signBlob`;\n const body = {\n delegates: this.delegates,\n payload: Buffer.from(blobToSign).toString('base64'),\n };\n const res = await this.sourceClient.request({\n url: u,\n data: body,\n method: 'POST',\n });\n return res.data;\n }\n /** The service account email to be impersonated. */\n getTargetPrincipal() {\n return this.targetPrincipal;\n }\n /**\n * Refreshes the access token.\n * @param refreshToken Unused parameter\n */\n async refreshToken(refreshToken) {\n var _a, _b, _c, _d, _e, _f;\n try {\n await this.sourceClient.getAccessToken();\n const name = 'projects/-/serviceAccounts/' + this.targetPrincipal;\n const u = `${this.endpoint}/v1/${name}:generateAccessToken`;\n const body = {\n delegates: this.delegates,\n scope: this.targetScopes,\n lifetime: this.lifetime + 's',\n };\n const res = await this.sourceClient.request({\n url: u,\n data: body,\n method: 'POST',\n });\n const tokenResponse = res.data;\n this.credentials.access_token = tokenResponse.accessToken;\n this.credentials.expiry_date = Date.parse(tokenResponse.expireTime);\n return {\n tokens: this.credentials,\n res,\n };\n }\n catch (error) {\n if (!(error instanceof Error))\n throw error;\n let status = 0;\n let message = '';\n if (error instanceof gaxios_1.GaxiosError) {\n status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status;\n message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message;\n }\n if (status && message) {\n error.message = `${status}: unable to impersonate: ${message}`;\n throw error;\n }\n else {\n error.message = `unable to impersonate: ${error}`;\n throw error;\n }\n }\n }\n /**\n * Generates an OpenID Connect ID token for a service account.\n *\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation}\n *\n * @param targetAudience the audience for the fetched ID token.\n * @param options the for the request\n * @return an OpenID Connect ID token\n */\n async fetchIdToken(targetAudience, options) {\n var _a;\n await this.sourceClient.getAccessToken();\n const name = `projects/-/serviceAccounts/${this.targetPrincipal}`;\n const u = `${this.endpoint}/v1/${name}:generateIdToken`;\n const body = {\n delegates: this.delegates,\n audience: targetAudience,\n includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true,\n };\n const res = await this.sourceClient.request({\n url: u,\n data: body,\n method: 'POST',\n });\n return res.data.token;\n }\n}\nexports.Impersonated = Impersonated;\n","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.JWTAccess = void 0;\nconst jws = require(\"jws\");\nconst util_1 = require(\"../util\");\nconst DEFAULT_HEADER = {\n alg: 'RS256',\n typ: 'JWT',\n};\nclass JWTAccess {\n /**\n * JWTAccess service account credentials.\n *\n * Create a new access token by using the credential to create a new JWT token\n * that's recognized as the access token.\n *\n * @param email the service account email address.\n * @param key the private key that will be used to sign the token.\n * @param keyId the ID of the private key used to sign the token.\n */\n constructor(email, key, keyId, eagerRefreshThresholdMillis) {\n this.cache = new util_1.LRUCache({\n capacity: 500,\n maxAge: 60 * 60 * 1000,\n });\n this.email = email;\n this.key = key;\n this.keyId = keyId;\n this.eagerRefreshThresholdMillis =\n eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000;\n }\n /**\n * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url\n *\n * @param url The URI being authorized.\n * @param scopes The scope or scopes being authorized\n * @returns A string that returns the cached key.\n */\n getCachedKey(url, scopes) {\n let cacheKey = url;\n if (scopes && Array.isArray(scopes) && scopes.length) {\n cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`;\n }\n else if (typeof scopes === 'string') {\n cacheKey = url ? `${url}_${scopes}` : scopes;\n }\n if (!cacheKey) {\n throw Error('Scopes or url must be provided');\n }\n return cacheKey;\n }\n /**\n * Get a non-expired access token, after refreshing if necessary.\n *\n * @param url The URI being authorized.\n * @param additionalClaims An object with a set of additional claims to\n * include in the payload.\n * @returns An object that includes the authorization header.\n */\n getRequestHeaders(url, additionalClaims, scopes) {\n // Return cached authorization headers, unless we are within\n // eagerRefreshThresholdMillis ms of them expiring:\n const key = this.getCachedKey(url, scopes);\n const cachedToken = this.cache.get(key);\n const now = Date.now();\n if (cachedToken &&\n cachedToken.expiration - now > this.eagerRefreshThresholdMillis) {\n return cachedToken.headers;\n }\n const iat = Math.floor(Date.now() / 1000);\n const exp = JWTAccess.getExpirationTime(iat);\n let defaultClaims;\n // Turn scopes into space-separated string\n if (Array.isArray(scopes)) {\n scopes = scopes.join(' ');\n }\n // If scopes are specified, sign with scopes\n if (scopes) {\n defaultClaims = {\n iss: this.email,\n sub: this.email,\n scope: scopes,\n exp,\n iat,\n };\n }\n else {\n defaultClaims = {\n iss: this.email,\n sub: this.email,\n aud: url,\n exp,\n iat,\n };\n }\n // if additionalClaims are provided, ensure they do not collide with\n // other required claims.\n if (additionalClaims) {\n for (const claim in defaultClaims) {\n if (additionalClaims[claim]) {\n throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`);\n }\n }\n }\n const header = this.keyId\n ? { ...DEFAULT_HEADER, kid: this.keyId }\n : DEFAULT_HEADER;\n const payload = Object.assign(defaultClaims, additionalClaims);\n // Sign the jwt and add it to the cache\n const signedJWT = jws.sign({ header, payload, secret: this.key });\n const headers = { Authorization: `Bearer ${signedJWT}` };\n this.cache.set(key, {\n expiration: exp * 1000,\n headers,\n });\n return headers;\n }\n /**\n * Returns an expiration time for the JWT token.\n *\n * @param iat The issued at time for the JWT.\n * @returns An expiration time for the JWT.\n */\n static getExpirationTime(iat) {\n const exp = iat + 3600; // 3600 seconds = 1 hour\n return exp;\n }\n /**\n * Create a JWTAccess credentials instance using the given input options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the service account auth settings.');\n }\n if (!json.client_email) {\n throw new Error('The incoming JSON object does not contain a client_email field');\n }\n if (!json.private_key) {\n throw new Error('The incoming JSON object does not contain a private_key field');\n }\n // Extract the relevant information from the json key file.\n this.email = json.client_email;\n this.key = json.private_key;\n this.keyId = json.private_key_id;\n this.projectId = json.project_id;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n reject(new Error('Must pass in a stream containing the service account auth settings.'));\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('data', chunk => (s += chunk))\n .on('error', reject)\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n resolve();\n }\n catch (err) {\n reject(err);\n }\n });\n });\n }\n}\nexports.JWTAccess = JWTAccess;\n","\"use strict\";\n// Copyright 2013 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.JWT = void 0;\nconst gtoken_1 = require(\"gtoken\");\nconst jwtaccess_1 = require(\"./jwtaccess\");\nconst oauth2client_1 = require(\"./oauth2client\");\nconst authclient_1 = require(\"./authclient\");\nclass JWT extends oauth2client_1.OAuth2Client {\n constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) {\n const opts = optionsOrEmail && typeof optionsOrEmail === 'object'\n ? optionsOrEmail\n : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject };\n super(opts);\n this.email = opts.email;\n this.keyFile = opts.keyFile;\n this.key = opts.key;\n this.keyId = opts.keyId;\n this.scopes = opts.scopes;\n this.subject = opts.subject;\n this.additionalClaims = opts.additionalClaims;\n // Start with an expired refresh token, which will automatically be\n // refreshed before the first API call is made.\n this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 };\n }\n /**\n * Creates a copy of the credential with the specified scopes.\n * @param scopes List of requested scopes or a single scope.\n * @return The cloned instance.\n */\n createScoped(scopes) {\n const jwt = new JWT(this);\n jwt.scopes = scopes;\n return jwt;\n }\n /**\n * Obtains the metadata to be sent with the request.\n *\n * @param url the URI being authorized.\n */\n async getRequestMetadataAsync(url) {\n url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url;\n const useSelfSignedJWT = (!this.hasUserScopes() && url) ||\n (this.useJWTAccessWithScope && this.hasAnyScopes()) ||\n this.universeDomain !== authclient_1.DEFAULT_UNIVERSE;\n if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) {\n throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`);\n }\n if (!this.apiKey && useSelfSignedJWT) {\n if (this.additionalClaims &&\n this.additionalClaims.target_audience) {\n const { tokens } = await this.refreshToken();\n return {\n headers: this.addSharedMetadataHeaders({\n Authorization: `Bearer ${tokens.id_token}`,\n }),\n };\n }\n else {\n // no scopes have been set, but a uri has been provided. Use JWTAccess\n // credentials.\n if (!this.access) {\n this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis);\n }\n let scopes;\n if (this.hasUserScopes()) {\n scopes = this.scopes;\n }\n else if (!url) {\n scopes = this.defaultScopes;\n }\n const useScopes = this.useJWTAccessWithScope ||\n this.universeDomain !== authclient_1.DEFAULT_UNIVERSE;\n const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, \n // Scopes take precedent over audience for signing,\n // so we only provide them if `useJWTAccessWithScope` is on or\n // if we are in a non-default universe\n useScopes ? scopes : undefined);\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n }\n else if (this.hasAnyScopes() || this.apiKey) {\n return super.getRequestMetadataAsync(url);\n }\n else {\n // If no audience, apiKey, or scopes are provided, we should not attempt\n // to populate any headers:\n return { headers: {} };\n }\n }\n /**\n * Fetches an ID token.\n * @param targetAudience the audience for the fetched ID token.\n */\n async fetchIdToken(targetAudience) {\n // Create a new gToken for fetching an ID token\n const gtoken = new gtoken_1.GoogleToken({\n iss: this.email,\n sub: this.subject,\n scope: this.scopes || this.defaultScopes,\n keyFile: this.keyFile,\n key: this.key,\n additionalClaims: { target_audience: targetAudience },\n transporter: this.transporter,\n });\n await gtoken.getToken({\n forceRefresh: true,\n });\n if (!gtoken.idToken) {\n throw new Error('Unknown error: Failed to fetch ID token');\n }\n return gtoken.idToken;\n }\n /**\n * Determine if there are currently scopes available.\n */\n hasUserScopes() {\n if (!this.scopes) {\n return false;\n }\n return this.scopes.length > 0;\n }\n /**\n * Are there any default or user scopes defined.\n */\n hasAnyScopes() {\n if (this.scopes && this.scopes.length > 0)\n return true;\n if (this.defaultScopes && this.defaultScopes.length > 0)\n return true;\n return false;\n }\n authorize(callback) {\n if (callback) {\n this.authorizeAsync().then(r => callback(null, r), callback);\n }\n else {\n return this.authorizeAsync();\n }\n }\n async authorizeAsync() {\n const result = await this.refreshToken();\n if (!result) {\n throw new Error('No result returned');\n }\n this.credentials = result.tokens;\n this.credentials.refresh_token = 'jwt-placeholder';\n this.key = this.gtoken.key;\n this.email = this.gtoken.iss;\n return result.tokens;\n }\n /**\n * Refreshes the access token.\n * @param refreshToken ignored\n * @private\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n const gtoken = this.createGToken();\n const token = await gtoken.getToken({\n forceRefresh: this.isTokenExpiring(),\n });\n const tokens = {\n access_token: token.access_token,\n token_type: 'Bearer',\n expiry_date: gtoken.expiresAt,\n id_token: gtoken.idToken,\n };\n this.emit('tokens', tokens);\n return { res: null, tokens };\n }\n /**\n * Create a gToken if it doesn't already exist.\n */\n createGToken() {\n if (!this.gtoken) {\n this.gtoken = new gtoken_1.GoogleToken({\n iss: this.email,\n sub: this.subject,\n scope: this.scopes || this.defaultScopes,\n keyFile: this.keyFile,\n key: this.key,\n additionalClaims: this.additionalClaims,\n transporter: this.transporter,\n });\n }\n return this.gtoken;\n }\n /**\n * Create a JWT credentials instance using the given input options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the service account auth settings.');\n }\n if (!json.client_email) {\n throw new Error('The incoming JSON object does not contain a client_email field');\n }\n if (!json.private_key) {\n throw new Error('The incoming JSON object does not contain a private_key field');\n }\n // Extract the relevant information from the json key file.\n this.email = json.client_email;\n this.key = json.private_key;\n this.keyId = json.private_key_id;\n this.projectId = json.project_id;\n this.quotaProjectId = json.quota_project_id;\n this.universeDomain = json.universe_domain || this.universeDomain;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n throw new Error('Must pass in a stream containing the service account auth settings.');\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n resolve();\n }\n catch (e) {\n reject(e);\n }\n });\n });\n }\n /**\n * Creates a JWT credentials instance using an API Key for authentication.\n * @param apiKey The API Key in string form.\n */\n fromAPIKey(apiKey) {\n if (typeof apiKey !== 'string') {\n throw new Error('Must provide an API Key string.');\n }\n this.apiKey = apiKey;\n }\n /**\n * Using the key or keyFile on the JWT client, obtain an object that contains\n * the key and the client email.\n */\n async getCredentials() {\n if (this.key) {\n return { private_key: this.key, client_email: this.email };\n }\n else if (this.keyFile) {\n const gtoken = this.createGToken();\n const creds = await gtoken.getCredentials(this.keyFile);\n return { private_key: creds.privateKey, client_email: creds.clientEmail };\n }\n throw new Error('A key or a keyFile must be provided to getCredentials.');\n }\n}\nexports.JWT = JWT;\n","\"use strict\";\n// Copyright 2014 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.LoginTicket = void 0;\nclass LoginTicket {\n /**\n * Create a simple class to extract user ID from an ID Token\n *\n * @param {string} env Envelope of the jwt\n * @param {TokenPayload} pay Payload of the jwt\n * @constructor\n */\n constructor(env, pay) {\n this.envelope = env;\n this.payload = pay;\n }\n getEnvelope() {\n return this.envelope;\n }\n getPayload() {\n return this.payload;\n }\n /**\n * Create a simple class to extract user ID from an ID Token\n *\n * @return The user ID\n */\n getUserId() {\n const payload = this.getPayload();\n if (payload && payload.sub) {\n return payload.sub;\n }\n return null;\n }\n /**\n * Returns attributes from the login ticket. This can contain\n * various information about the user session.\n *\n * @return The envelope and payload\n */\n getAttributes() {\n return { envelope: this.getEnvelope(), payload: this.getPayload() };\n }\n}\nexports.LoginTicket = LoginTicket;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst querystring = require(\"querystring\");\nconst stream = require(\"stream\");\nconst formatEcdsa = require(\"ecdsa-sig-formatter\");\nconst crypto_1 = require(\"../crypto/crypto\");\nconst authclient_1 = require(\"./authclient\");\nconst loginticket_1 = require(\"./loginticket\");\nvar CodeChallengeMethod;\n(function (CodeChallengeMethod) {\n CodeChallengeMethod[\"Plain\"] = \"plain\";\n CodeChallengeMethod[\"S256\"] = \"S256\";\n})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {}));\nvar CertificateFormat;\n(function (CertificateFormat) {\n CertificateFormat[\"PEM\"] = \"PEM\";\n CertificateFormat[\"JWK\"] = \"JWK\";\n})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {}));\nclass OAuth2Client extends authclient_1.AuthClient {\n constructor(optionsOrClientId, clientSecret, redirectUri) {\n const opts = optionsOrClientId && typeof optionsOrClientId === 'object'\n ? optionsOrClientId\n : { clientId: optionsOrClientId, clientSecret, redirectUri };\n super(opts);\n this.certificateCache = {};\n this.certificateExpiry = null;\n this.certificateCacheFormat = CertificateFormat.PEM;\n this.refreshTokenPromises = new Map();\n this._clientId = opts.clientId;\n this._clientSecret = opts.clientSecret;\n this.redirectUri = opts.redirectUri;\n this.endpoints = {\n tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo',\n oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth',\n oauth2TokenUrl: 'https://oauth2.googleapis.com/token',\n oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke',\n oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs',\n oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs',\n oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key',\n ...opts.endpoints,\n };\n this.issuers = opts.issuers || [\n 'accounts.google.com',\n 'https://accounts.google.com',\n this.universeDomain,\n ];\n }\n /**\n * Generates URL for consent page landing.\n * @param opts Options.\n * @return URL to consent page.\n */\n generateAuthUrl(opts = {}) {\n if (opts.code_challenge_method && !opts.code_challenge) {\n throw new Error('If a code_challenge_method is provided, code_challenge must be included.');\n }\n opts.response_type = opts.response_type || 'code';\n opts.client_id = opts.client_id || this._clientId;\n opts.redirect_uri = opts.redirect_uri || this.redirectUri;\n // Allow scopes to be passed either as array or a string\n if (Array.isArray(opts.scope)) {\n opts.scope = opts.scope.join(' ');\n }\n const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString();\n return (rootUrl +\n '?' +\n querystring.stringify(opts));\n }\n generateCodeVerifier() {\n // To make the code compatible with browser SubtleCrypto we need to make\n // this method async.\n throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.');\n }\n /**\n * Convenience method to automatically generate a code_verifier, and its\n * resulting SHA256. If used, this must be paired with a S256\n * code_challenge_method.\n *\n * For a full example see:\n * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js\n */\n async generateCodeVerifierAsync() {\n // base64 encoding uses 6 bits per character, and we want to generate128\n // characters. 6*128/8 = 96.\n const crypto = (0, crypto_1.createCrypto)();\n const randomString = crypto.randomBytesBase64(96);\n // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/\n // \"-\"/\".\"/\"_\"/\"~\". Base64 encoded strings are pretty close, so we're just\n // swapping out a few chars.\n const codeVerifier = randomString\n .replace(/\\+/g, '~')\n .replace(/=/g, '_')\n .replace(/\\//g, '-');\n // Generate the base64 encoded SHA256\n const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier);\n // We need to use base64UrlEncoding instead of standard base64\n const codeChallenge = unencodedCodeChallenge\n .split('=')[0]\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n return { codeVerifier, codeChallenge };\n }\n getToken(codeOrOptions, callback) {\n const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions;\n if (callback) {\n this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response));\n }\n else {\n return this.getTokenAsync(options);\n }\n }\n async getTokenAsync(options) {\n const url = this.endpoints.oauth2TokenUrl.toString();\n const values = {\n code: options.code,\n client_id: options.client_id || this._clientId,\n client_secret: this._clientSecret,\n redirect_uri: options.redirect_uri || this.redirectUri,\n grant_type: 'authorization_code',\n code_verifier: options.codeVerifier,\n };\n const res = await this.transporter.request({\n method: 'POST',\n url,\n data: querystring.stringify(values),\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n });\n const tokens = res.data;\n if (res.data && res.data.expires_in) {\n tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res };\n }\n /**\n * Refreshes the access token.\n * @param refresh_token Existing refresh token.\n * @private\n */\n async refreshToken(refreshToken) {\n if (!refreshToken) {\n return this.refreshTokenNoCache(refreshToken);\n }\n // If a request to refresh using the same token has started,\n // return the same promise.\n if (this.refreshTokenPromises.has(refreshToken)) {\n return this.refreshTokenPromises.get(refreshToken);\n }\n const p = this.refreshTokenNoCache(refreshToken).then(r => {\n this.refreshTokenPromises.delete(refreshToken);\n return r;\n }, e => {\n this.refreshTokenPromises.delete(refreshToken);\n throw e;\n });\n this.refreshTokenPromises.set(refreshToken, p);\n return p;\n }\n async refreshTokenNoCache(refreshToken) {\n var _a;\n if (!refreshToken) {\n throw new Error('No refresh token is set.');\n }\n const url = this.endpoints.oauth2TokenUrl.toString();\n const data = {\n refresh_token: refreshToken,\n client_id: this._clientId,\n client_secret: this._clientSecret,\n grant_type: 'refresh_token',\n };\n let res;\n try {\n // request for new token\n res = await this.transporter.request({\n method: 'POST',\n url,\n data: querystring.stringify(data),\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n });\n }\n catch (e) {\n if (e instanceof gaxios_1.GaxiosError &&\n e.message === 'invalid_grant' &&\n ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) &&\n /ReAuth/i.test(e.response.data.error_description)) {\n e.message = JSON.stringify(e.response.data);\n }\n throw e;\n }\n const tokens = res.data;\n // TODO: de-duplicate this code from a few spots\n if (res.data && res.data.expires_in) {\n tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000;\n delete tokens.expires_in;\n }\n this.emit('tokens', tokens);\n return { tokens, res };\n }\n refreshAccessToken(callback) {\n if (callback) {\n this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback);\n }\n else {\n return this.refreshAccessTokenAsync();\n }\n }\n async refreshAccessTokenAsync() {\n const r = await this.refreshToken(this.credentials.refresh_token);\n const tokens = r.tokens;\n tokens.refresh_token = this.credentials.refresh_token;\n this.credentials = tokens;\n return { credentials: this.credentials, res: r.res };\n }\n getAccessToken(callback) {\n if (callback) {\n this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback);\n }\n else {\n return this.getAccessTokenAsync();\n }\n }\n async getAccessTokenAsync() {\n const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring();\n if (shouldRefresh) {\n if (!this.credentials.refresh_token) {\n if (this.refreshHandler) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n return { token: this.credentials.access_token };\n }\n }\n else {\n throw new Error('No refresh token or refresh handler callback is set.');\n }\n }\n const r = await this.refreshAccessTokenAsync();\n if (!r.credentials || (r.credentials && !r.credentials.access_token)) {\n throw new Error('Could not refresh access token.');\n }\n return { token: r.credentials.access_token, res: r.res };\n }\n else {\n return { token: this.credentials.access_token };\n }\n }\n /**\n * The main authentication interface. It takes an optional url which when\n * present is the endpoint being accessed, and returns a Promise which\n * resolves with authorization header fields.\n *\n * In OAuth2Client, the result has the form:\n * { Authorization: 'Bearer ' }\n * @param url The optional url being authorized\n */\n async getRequestHeaders(url) {\n const headers = (await this.getRequestMetadataAsync(url)).headers;\n return headers;\n }\n async getRequestMetadataAsync(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n url) {\n const thisCreds = this.credentials;\n if (!thisCreds.access_token &&\n !thisCreds.refresh_token &&\n !this.apiKey &&\n !this.refreshHandler) {\n throw new Error('No access, refresh token, API key or refresh handler callback is set.');\n }\n if (thisCreds.access_token && !this.isTokenExpiring()) {\n thisCreds.token_type = thisCreds.token_type || 'Bearer';\n const headers = {\n Authorization: thisCreds.token_type + ' ' + thisCreds.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n // If refreshHandler exists, call processAndValidateRefreshHandler().\n if (this.refreshHandler) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n const headers = {\n Authorization: 'Bearer ' + this.credentials.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers) };\n }\n }\n if (this.apiKey) {\n return { headers: { 'X-Goog-Api-Key': this.apiKey } };\n }\n let r = null;\n let tokens = null;\n try {\n r = await this.refreshToken(thisCreds.refresh_token);\n tokens = r.tokens;\n }\n catch (err) {\n const e = err;\n if (e.response &&\n (e.response.status === 403 || e.response.status === 404)) {\n e.message = `Could not refresh access token: ${e.message}`;\n }\n throw e;\n }\n const credentials = this.credentials;\n credentials.token_type = credentials.token_type || 'Bearer';\n tokens.refresh_token = credentials.refresh_token;\n this.credentials = tokens;\n const headers = {\n Authorization: credentials.token_type + ' ' + tokens.access_token,\n };\n return { headers: this.addSharedMetadataHeaders(headers), res: r.res };\n }\n /**\n * Generates an URL to revoke the given token.\n * @param token The existing token to be revoked.\n *\n * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL}\n */\n static getRevokeTokenUrl(token) {\n return new OAuth2Client().getRevokeTokenURL(token).toString();\n }\n /**\n * Generates a URL to revoke the given token.\n *\n * @param token The existing token to be revoked.\n */\n getRevokeTokenURL(token) {\n const url = new URL(this.endpoints.oauth2RevokeUrl);\n url.searchParams.append('token', token);\n return url;\n }\n revokeToken(token, callback) {\n const opts = {\n url: this.getRevokeTokenURL(token).toString(),\n method: 'POST',\n };\n if (callback) {\n this.transporter\n .request(opts)\n .then(r => callback(null, r), callback);\n }\n else {\n return this.transporter.request(opts);\n }\n }\n revokeCredentials(callback) {\n if (callback) {\n this.revokeCredentialsAsync().then(res => callback(null, res), callback);\n }\n else {\n return this.revokeCredentialsAsync();\n }\n }\n async revokeCredentialsAsync() {\n const token = this.credentials.access_token;\n this.credentials = {};\n if (token) {\n return this.revokeToken(token);\n }\n else {\n throw new Error('No access token to revoke.');\n }\n }\n request(opts, callback) {\n if (callback) {\n this.requestAsync(opts).then(r => callback(null, r), e => {\n return callback(e, e.response);\n });\n }\n else {\n return this.requestAsync(opts);\n }\n }\n async requestAsync(opts, retry = false) {\n let r2;\n try {\n const r = await this.getRequestMetadataAsync(opts.url);\n opts.headers = opts.headers || {};\n if (r.headers && r.headers['x-goog-user-project']) {\n opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project'];\n }\n if (r.headers && r.headers.Authorization) {\n opts.headers.Authorization = r.headers.Authorization;\n }\n if (this.apiKey) {\n opts.headers['X-Goog-Api-Key'] = this.apiKey;\n }\n r2 = await this.transporter.request(opts);\n }\n catch (e) {\n const res = e.response;\n if (res) {\n const statusCode = res.status;\n // Retry the request for metadata if the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - An access_token and refresh_token were available, but either no\n // expiry_date was available or the forceRefreshOnFailure flag is set.\n // The absent expiry_date case can happen when developers stash the\n // access_token and refresh_token for later use, but the access_token\n // fails on the first try because it's expired. Some developers may\n // choose to enable forceRefreshOnFailure to mitigate time-related\n // errors.\n // Or the following criteria are true:\n // - We haven't already retried. It only makes sense to retry once.\n // - The response was a 401 or a 403\n // - The request didn't send a readableStream\n // - No refresh_token was available\n // - An access_token and a refreshHandler callback were available, but\n // either no expiry_date was available or the forceRefreshOnFailure\n // flag is set. The access_token fails on the first try because it's\n // expired. Some developers may choose to enable forceRefreshOnFailure\n // to mitigate time-related errors.\n const mayRequireRefresh = this.credentials &&\n this.credentials.access_token &&\n this.credentials.refresh_token &&\n (!this.credentials.expiry_date || this.forceRefreshOnFailure);\n const mayRequireRefreshWithNoRefreshToken = this.credentials &&\n this.credentials.access_token &&\n !this.credentials.refresh_token &&\n (!this.credentials.expiry_date || this.forceRefreshOnFailure) &&\n this.refreshHandler;\n const isReadableStream = res.config.data instanceof stream.Readable;\n const isAuthErr = statusCode === 401 || statusCode === 403;\n if (!retry && isAuthErr && !isReadableStream && mayRequireRefresh) {\n await this.refreshAccessTokenAsync();\n return this.requestAsync(opts, true);\n }\n else if (!retry &&\n isAuthErr &&\n !isReadableStream &&\n mayRequireRefreshWithNoRefreshToken) {\n const refreshedAccessToken = await this.processAndValidateRefreshHandler();\n if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) {\n this.setCredentials(refreshedAccessToken);\n }\n return this.requestAsync(opts, true);\n }\n }\n throw e;\n }\n return r2;\n }\n verifyIdToken(options, callback) {\n // This function used to accept two arguments instead of an options object.\n // Check the types to help users upgrade with less pain.\n // This check can be removed after a 2.0 release.\n if (callback && typeof callback !== 'function') {\n throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.');\n }\n if (callback) {\n this.verifyIdTokenAsync(options).then(r => callback(null, r), callback);\n }\n else {\n return this.verifyIdTokenAsync(options);\n }\n }\n async verifyIdTokenAsync(options) {\n if (!options.idToken) {\n throw new Error('The verifyIdToken method requires an ID Token');\n }\n const response = await this.getFederatedSignonCertsAsync();\n const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry);\n return login;\n }\n /**\n * Obtains information about the provisioned access token. Especially useful\n * if you want to check the scopes that were provisioned to a given token.\n *\n * @param accessToken Required. The Access Token for which you want to get\n * user info.\n */\n async getTokenInfo(accessToken) {\n const { data } = await this.transporter.request({\n method: 'POST',\n headers: {\n 'Content-Type': 'application/x-www-form-urlencoded',\n Authorization: `Bearer ${accessToken}`,\n },\n url: this.endpoints.tokenInfoUrl.toString(),\n });\n const info = Object.assign({\n expiry_date: new Date().getTime() + data.expires_in * 1000,\n scopes: data.scope.split(' '),\n }, data);\n delete info.expires_in;\n delete info.scope;\n return info;\n }\n getFederatedSignonCerts(callback) {\n if (callback) {\n this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback);\n }\n else {\n return this.getFederatedSignonCertsAsync();\n }\n }\n async getFederatedSignonCertsAsync() {\n const nowTime = new Date().getTime();\n const format = (0, crypto_1.hasBrowserCrypto)()\n ? CertificateFormat.JWK\n : CertificateFormat.PEM;\n if (this.certificateExpiry &&\n nowTime < this.certificateExpiry.getTime() &&\n this.certificateCacheFormat === format) {\n return { certs: this.certificateCache, format };\n }\n let res;\n let url;\n switch (format) {\n case CertificateFormat.PEM:\n url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString();\n break;\n case CertificateFormat.JWK:\n url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString();\n break;\n default:\n throw new Error(`Unsupported certificate format ${format}`);\n }\n try {\n res = await this.transporter.request({ url });\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Failed to retrieve verification certificates: ${e.message}`;\n }\n throw e;\n }\n const cacheControl = res ? res.headers['cache-control'] : undefined;\n let cacheAge = -1;\n if (cacheControl) {\n const pattern = new RegExp('max-age=([0-9]*)');\n const regexResult = pattern.exec(cacheControl);\n if (regexResult && regexResult.length === 2) {\n // Cache results with max-age (in seconds)\n cacheAge = Number(regexResult[1]) * 1000; // milliseconds\n }\n }\n let certificates = {};\n switch (format) {\n case CertificateFormat.PEM:\n certificates = res.data;\n break;\n case CertificateFormat.JWK:\n for (const key of res.data.keys) {\n certificates[key.kid] = key;\n }\n break;\n default:\n throw new Error(`Unsupported certificate format ${format}`);\n }\n const now = new Date();\n this.certificateExpiry =\n cacheAge === -1 ? null : new Date(now.getTime() + cacheAge);\n this.certificateCache = certificates;\n this.certificateCacheFormat = format;\n return { certs: certificates, format, res };\n }\n getIapPublicKeys(callback) {\n if (callback) {\n this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback);\n }\n else {\n return this.getIapPublicKeysAsync();\n }\n }\n async getIapPublicKeysAsync() {\n let res;\n const url = this.endpoints.oauth2IapPublicKeyUrl.toString();\n try {\n res = await this.transporter.request({ url });\n }\n catch (e) {\n if (e instanceof Error) {\n e.message = `Failed to retrieve verification certificates: ${e.message}`;\n }\n throw e;\n }\n return { pubkeys: res.data, res };\n }\n verifySignedJwtWithCerts() {\n // To make the code compatible with browser SubtleCrypto we need to make\n // this method async.\n throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.');\n }\n /**\n * Verify the id token is signed with the correct certificate\n * and is from the correct audience.\n * @param jwt The jwt to verify (The ID Token in this case).\n * @param certs The array of certs to test the jwt against.\n * @param requiredAudience The audience to test the jwt against.\n * @param issuers The allowed issuers of the jwt (Optional).\n * @param maxExpiry The max expiry the certificate can be (Optional).\n * @return Returns a promise resolving to LoginTicket on verification.\n */\n async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) {\n const crypto = (0, crypto_1.createCrypto)();\n if (!maxExpiry) {\n maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_;\n }\n const segments = jwt.split('.');\n if (segments.length !== 3) {\n throw new Error('Wrong number of segments in token: ' + jwt);\n }\n const signed = segments[0] + '.' + segments[1];\n let signature = segments[2];\n let envelope;\n let payload;\n try {\n envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0]));\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`;\n }\n throw err;\n }\n if (!envelope) {\n throw new Error(\"Can't parse token envelope: \" + segments[0]);\n }\n try {\n payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1]));\n }\n catch (err) {\n if (err instanceof Error) {\n err.message = `Can't parse token payload '${segments[0]}`;\n }\n throw err;\n }\n if (!payload) {\n throw new Error(\"Can't parse token payload: \" + segments[1]);\n }\n if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) {\n // If this is not present, then there's no reason to attempt verification\n throw new Error('No pem found for envelope: ' + JSON.stringify(envelope));\n }\n const cert = certs[envelope.kid];\n if (envelope.alg === 'ES256') {\n signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64');\n }\n const verified = await crypto.verify(cert, signed, signature);\n if (!verified) {\n throw new Error('Invalid token signature: ' + jwt);\n }\n if (!payload.iat) {\n throw new Error('No issue time in token: ' + JSON.stringify(payload));\n }\n if (!payload.exp) {\n throw new Error('No expiration time in token: ' + JSON.stringify(payload));\n }\n const iat = Number(payload.iat);\n if (isNaN(iat))\n throw new Error('iat field using invalid format');\n const exp = Number(payload.exp);\n if (isNaN(exp))\n throw new Error('exp field using invalid format');\n const now = new Date().getTime() / 1000;\n if (exp >= now + maxExpiry) {\n throw new Error('Expiration time too far in future: ' + JSON.stringify(payload));\n }\n const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_;\n const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_;\n if (now < earliest) {\n throw new Error('Token used too early, ' +\n now +\n ' < ' +\n earliest +\n ': ' +\n JSON.stringify(payload));\n }\n if (now > latest) {\n throw new Error('Token used too late, ' +\n now +\n ' > ' +\n latest +\n ': ' +\n JSON.stringify(payload));\n }\n if (issuers && issuers.indexOf(payload.iss) < 0) {\n throw new Error('Invalid issuer, expected one of [' +\n issuers +\n '], but got ' +\n payload.iss);\n }\n // Check the audience matches if we have one\n if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) {\n const aud = payload.aud;\n let audVerified = false;\n // If the requiredAudience is an array, check if it contains token\n // audience\n if (requiredAudience.constructor === Array) {\n audVerified = requiredAudience.indexOf(aud) > -1;\n }\n else {\n audVerified = aud === requiredAudience;\n }\n if (!audVerified) {\n throw new Error('Wrong recipient, payload audience != requiredAudience');\n }\n }\n return new loginticket_1.LoginTicket(envelope, payload);\n }\n /**\n * Returns a promise that resolves with AccessTokenResponse type if\n * refreshHandler is defined.\n * If not, nothing is returned.\n */\n async processAndValidateRefreshHandler() {\n if (this.refreshHandler) {\n const accessTokenResponse = await this.refreshHandler();\n if (!accessTokenResponse.access_token) {\n throw new Error('No access token is returned by the refreshHandler callback.');\n }\n return accessTokenResponse;\n }\n return;\n }\n /**\n * Returns true if a token is expired or will expire within\n * eagerRefreshThresholdMillismilliseconds.\n * If there is no expiry time, assumes the token is not expired or expiring.\n */\n isTokenExpiring() {\n const expiryDate = this.credentials.expiry_date;\n return expiryDate\n ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis\n : false;\n }\n}\nexports.OAuth2Client = OAuth2Client;\n/**\n * @deprecated use instance's {@link OAuth2Client.endpoints}\n */\nOAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo';\n/**\n * Clock skew - five minutes in seconds\n */\nOAuth2Client.CLOCK_SKEW_SECS_ = 300;\n/**\n * The default max Token Lifetime is one day in seconds\n */\nOAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0;\nconst querystring = require(\"querystring\");\nconst crypto_1 = require(\"../crypto/crypto\");\n/** List of HTTP methods that accept request bodies. */\nconst METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH'];\n/**\n * Abstract class for handling client authentication in OAuth-based\n * operations.\n * When request-body client authentication is used, only application/json and\n * application/x-www-form-urlencoded content types for HTTP methods that support\n * request bodies are supported.\n */\nclass OAuthClientAuthHandler {\n /**\n * Instantiates an OAuth client authentication handler.\n * @param clientAuthentication The client auth credentials.\n */\n constructor(clientAuthentication) {\n this.clientAuthentication = clientAuthentication;\n this.crypto = (0, crypto_1.createCrypto)();\n }\n /**\n * Applies client authentication on the OAuth request's headers or POST\n * body but does not process the request.\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n * @param bearerToken The optional bearer token to use for authentication.\n * When this is used, no client authentication credentials are needed.\n */\n applyClientAuthenticationOptions(opts, bearerToken) {\n // Inject authenticated header.\n this.injectAuthenticatedHeaders(opts, bearerToken);\n // Inject authenticated request body.\n if (!bearerToken) {\n this.injectAuthenticatedRequestBody(opts);\n }\n }\n /**\n * Applies client authentication on the request's header if either\n * basic authentication or bearer token authentication is selected.\n *\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n * @param bearerToken The optional bearer token to use for authentication.\n * When this is used, no client authentication credentials are needed.\n */\n injectAuthenticatedHeaders(opts, bearerToken) {\n var _a;\n // Bearer token prioritized higher than basic Auth.\n if (bearerToken) {\n opts.headers = opts.headers || {};\n Object.assign(opts.headers, {\n Authorization: `Bearer ${bearerToken}}`,\n });\n }\n else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') {\n opts.headers = opts.headers || {};\n const clientId = this.clientAuthentication.clientId;\n const clientSecret = this.clientAuthentication.clientSecret || '';\n const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`);\n Object.assign(opts.headers, {\n Authorization: `Basic ${base64EncodedCreds}`,\n });\n }\n }\n /**\n * Applies client authentication on the request's body if request-body\n * client authentication is selected.\n *\n * @param opts The GaxiosOptions whose headers or data are to be modified\n * depending on the client authentication mechanism to be used.\n */\n injectAuthenticatedRequestBody(opts) {\n var _a;\n if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') {\n const method = (opts.method || 'GET').toUpperCase();\n // Inject authenticated request body.\n if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) {\n // Get content-type.\n let contentType;\n const headers = opts.headers || {};\n for (const key in headers) {\n if (key.toLowerCase() === 'content-type' && headers[key]) {\n contentType = headers[key].toLowerCase();\n break;\n }\n }\n if (contentType === 'application/x-www-form-urlencoded') {\n opts.data = opts.data || '';\n const data = querystring.parse(opts.data);\n Object.assign(data, {\n client_id: this.clientAuthentication.clientId,\n client_secret: this.clientAuthentication.clientSecret || '',\n });\n opts.data = querystring.stringify(data);\n }\n else if (contentType === 'application/json') {\n opts.data = opts.data || {};\n Object.assign(opts.data, {\n client_id: this.clientAuthentication.clientId,\n client_secret: this.clientAuthentication.clientSecret || '',\n });\n }\n else {\n throw new Error(`${contentType} content-types are not supported with ` +\n `${this.clientAuthentication.confidentialClientType} ` +\n 'client authentication');\n }\n }\n else {\n throw new Error(`${method} HTTP method does not support ` +\n `${this.clientAuthentication.confidentialClientType} ` +\n 'client authentication');\n }\n }\n }\n}\nexports.OAuthClientAuthHandler = OAuthClientAuthHandler;\n/**\n * Converts an OAuth error response to a native JavaScript Error.\n * @param resp The OAuth error response to convert to a native Error object.\n * @param err The optional original error. If provided, the error properties\n * will be copied to the new error.\n * @return The converted native Error object.\n */\nfunction getErrorFromOAuthErrorResponse(resp, err) {\n // Error response.\n const errorCode = resp.error;\n const errorDescription = resp.error_description;\n const errorUri = resp.error_uri;\n let message = `Error code ${errorCode}`;\n if (typeof errorDescription !== 'undefined') {\n message += `: ${errorDescription}`;\n }\n if (typeof errorUri !== 'undefined') {\n message += ` - ${errorUri}`;\n }\n const newError = new Error(message);\n // Copy properties from original error to newly generated error.\n if (err) {\n const keys = Object.keys(err);\n if (err.stack) {\n // Copy error.stack if available.\n keys.push('stack');\n }\n keys.forEach(key => {\n // Do not overwrite the message field.\n if (key !== 'message') {\n Object.defineProperty(newError, key, {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n value: err[key],\n writable: false,\n enumerable: true,\n });\n }\n });\n }\n return newError;\n}\nexports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse;\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PluggableAuthClient = exports.ExecutableError = void 0;\nconst baseexternalclient_1 = require(\"./baseexternalclient\");\nconst executable_response_1 = require(\"./executable-response\");\nconst pluggable_auth_handler_1 = require(\"./pluggable-auth-handler\");\n/**\n * Error thrown from the executable run by PluggableAuthClient.\n */\nclass ExecutableError extends Error {\n constructor(message, code) {\n super(`The executable failed with exit code: ${code} and error message: ${message}.`);\n this.code = code;\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\nexports.ExecutableError = ExecutableError;\n/**\n * The default executable timeout when none is provided, in milliseconds.\n */\nconst DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000;\n/**\n * The minimum allowed executable timeout in milliseconds.\n */\nconst MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000;\n/**\n * The maximum allowed executable timeout in milliseconds.\n */\nconst MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000;\n/**\n * The environment variable to check to see if executable can be run.\n * Value must be set to '1' for the executable to run.\n */\nconst GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES';\n/**\n * The maximum currently supported executable version.\n */\nconst MAXIMUM_EXECUTABLE_VERSION = 1;\n/**\n * PluggableAuthClient enables the exchange of workload identity pool external credentials for\n * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These\n * scripts/executables are completely independent of the Google Cloud Auth libraries. These\n * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token\n * to be exchanged for a Google access token.\n *\n *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable\n * must be set to '1'. This is for security reasons.\n *\n *

Both OIDC and SAML are supported. The executable must adhere to a specific response format\n * defined below.\n *\n *

The executable must print out the 3rd party token to STDOUT in JSON format. When an\n * output_file is specified in the credential configuration, the executable must also handle writing the\n * JSON response to this file.\n *\n *

\n * OIDC response sample:\n * {\n *   \"version\": 1,\n *   \"success\": true,\n *   \"token_type\": \"urn:ietf:params:oauth:token-type:id_token\",\n *   \"id_token\": \"HEADER.PAYLOAD.SIGNATURE\",\n *   \"expiration_time\": 1620433341\n * }\n *\n * SAML2 response sample:\n * {\n *   \"version\": 1,\n *   \"success\": true,\n *   \"token_type\": \"urn:ietf:params:oauth:token-type:saml2\",\n *   \"saml_response\": \"...\",\n *   \"expiration_time\": 1620433341\n * }\n *\n * Error response sample:\n * {\n *   \"version\": 1,\n *   \"success\": false,\n *   \"code\": \"401\",\n *   \"message\": \"Error message.\"\n * }\n * 
\n *\n *

The \"expiration_time\" field in the JSON response is only required for successful\n * responses when an output file was specified in the credential configuration\n *\n *

The auth libraries will populate certain environment variables that will be accessible by the\n * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE,\n * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and\n * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE.\n *\n *

Please see this repositories README for a complete executable request/response specification.\n */\nclass PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient {\n /**\n * Instantiates a PluggableAuthClient instance using the provided JSON\n * object loaded from an external account credentials file.\n * An error is thrown if the credential is not a valid pluggable auth credential.\n * @param options The external account options object typically loaded from\n * the external account JSON credential file.\n * @param additionalOptions **DEPRECATED, all options are available in the\n * `options` parameter.** Optional additional behavior customization options.\n * These currently customize expiration threshold time and whether to retry\n * on 401/403 API request errors.\n */\n constructor(options, additionalOptions) {\n super(options, additionalOptions);\n if (!options.credential_source.executable) {\n throw new Error('No valid Pluggable Auth \"credential_source\" provided.');\n }\n this.command = options.credential_source.executable.command;\n if (!this.command) {\n throw new Error('No valid Pluggable Auth \"credential_source\" provided.');\n }\n // Check if the provided timeout exists and if it is valid.\n if (options.credential_source.executable.timeout_millis === undefined) {\n this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS;\n }\n else {\n this.timeoutMillis = options.credential_source.executable.timeout_millis;\n if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS ||\n this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) {\n throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` +\n `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`);\n }\n }\n this.outputFile = options.credential_source.executable.output_file;\n this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({\n command: this.command,\n timeoutMillis: this.timeoutMillis,\n outputFile: this.outputFile,\n });\n this.credentialSourceType = 'executable';\n }\n /**\n * Triggered when an external subject token is needed to be exchanged for a\n * GCP access token via GCP STS endpoint.\n * This uses the `options.credential_source` object to figure out how\n * to retrieve the token using the current environment. In this case,\n * this calls a user provided executable which returns the subject token.\n * The logic is summarized as:\n * 1. Validated that the executable is allowed to run. The\n * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to\n * 1 for security reasons.\n * 2. If an output file is specified by the user, check the file location\n * for a response. If the file exists and contains a valid response,\n * return the subject token from the file.\n * 3. Call the provided executable and return response.\n * @return A promise that resolves with the external subject token.\n */\n async retrieveSubjectToken() {\n // Check if the executable is allowed to run.\n if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') {\n throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' +\n 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' +\n 'Variable to 1.');\n }\n let executableResponse = undefined;\n // Try to get cached executable response from output file.\n if (this.outputFile) {\n executableResponse = await this.handler.retrieveCachedResponse();\n }\n // If no response from output file, call the executable.\n if (!executableResponse) {\n // Set up environment map with required values for the executable.\n const envMap = new Map();\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience);\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType);\n // Always set to 0 because interactive mode is not supported.\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0');\n if (this.outputFile) {\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile);\n }\n const serviceAccountEmail = this.getServiceAccountEmail();\n if (serviceAccountEmail) {\n envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail);\n }\n executableResponse =\n await this.handler.retrieveResponseFromExecutable(envMap);\n }\n if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) {\n throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`);\n }\n // Check that response was successful.\n if (!executableResponse.success) {\n throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode);\n }\n // Check that response contains expiration time if output file was specified.\n if (this.outputFile) {\n if (!executableResponse.expirationTime) {\n throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.');\n }\n }\n // Check that response is not expired.\n if (executableResponse.isExpired()) {\n throw new Error('Executable response is expired.');\n }\n // Return subject token from response.\n return executableResponse.subjectToken;\n }\n}\nexports.PluggableAuthClient = PluggableAuthClient;\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PluggableAuthHandler = void 0;\nconst pluggable_auth_client_1 = require(\"./pluggable-auth-client\");\nconst executable_response_1 = require(\"./executable-response\");\nconst childProcess = require(\"child_process\");\nconst fs = require(\"fs\");\n/**\n * A handler used to retrieve 3rd party token responses from user defined\n * executables and cached file output for the PluggableAuthClient class.\n */\nclass PluggableAuthHandler {\n /**\n * Instantiates a PluggableAuthHandler instance using the provided\n * PluggableAuthHandlerOptions object.\n */\n constructor(options) {\n if (!options.command) {\n throw new Error('No command provided.');\n }\n this.commandComponents = PluggableAuthHandler.parseCommand(options.command);\n this.timeoutMillis = options.timeoutMillis;\n if (!this.timeoutMillis) {\n throw new Error('No timeoutMillis provided.');\n }\n this.outputFile = options.outputFile;\n }\n /**\n * Calls user provided executable to get a 3rd party subject token and\n * returns the response.\n * @param envMap a Map of additional Environment Variables required for\n * the executable.\n * @return A promise that resolves with the executable response.\n */\n retrieveResponseFromExecutable(envMap) {\n return new Promise((resolve, reject) => {\n // Spawn process to run executable using added environment variables.\n const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), {\n env: { ...process.env, ...Object.fromEntries(envMap) },\n });\n let output = '';\n // Append stdout to output as executable runs.\n child.stdout.on('data', (data) => {\n output += data;\n });\n // Append stderr as executable runs.\n child.stderr.on('data', (err) => {\n output += err;\n });\n // Set up a timeout to end the child process and throw an error.\n const timeout = setTimeout(() => {\n // Kill child process and remove listeners so 'close' event doesn't get\n // read after child process is killed.\n child.removeAllListeners();\n child.kill();\n return reject(new Error('The executable failed to finish within the timeout specified.'));\n }, this.timeoutMillis);\n child.on('close', (code) => {\n // Cancel timeout if executable closes before timeout is reached.\n clearTimeout(timeout);\n if (code === 0) {\n // If the executable completed successfully, try to return the parsed response.\n try {\n const responseJson = JSON.parse(output);\n const response = new executable_response_1.ExecutableResponse(responseJson);\n return resolve(response);\n }\n catch (error) {\n if (error instanceof executable_response_1.ExecutableResponseError) {\n return reject(error);\n }\n return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`));\n }\n }\n else {\n return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString()));\n }\n });\n });\n }\n /**\n * Checks user provided output file for response from previous run of\n * executable and return the response if it exists, is formatted correctly, and is not expired.\n */\n async retrieveCachedResponse() {\n if (!this.outputFile || this.outputFile.length === 0) {\n return undefined;\n }\n let filePath;\n try {\n filePath = await fs.promises.realpath(this.outputFile);\n }\n catch (_a) {\n // If file path cannot be resolved, return undefined.\n return undefined;\n }\n if (!(await fs.promises.lstat(filePath)).isFile()) {\n // If path does not lead to file, return undefined.\n return undefined;\n }\n const responseString = await fs.promises.readFile(filePath, {\n encoding: 'utf8',\n });\n if (responseString === '') {\n return undefined;\n }\n try {\n const responseJson = JSON.parse(responseString);\n const response = new executable_response_1.ExecutableResponse(responseJson);\n // Check if response is successful and unexpired.\n if (response.isValid()) {\n return new executable_response_1.ExecutableResponse(responseJson);\n }\n return undefined;\n }\n catch (error) {\n if (error instanceof executable_response_1.ExecutableResponseError) {\n throw error;\n }\n throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`);\n }\n }\n /**\n * Parses given command string into component array, splitting on spaces unless\n * spaces are between quotation marks.\n */\n static parseCommand(command) {\n // Split the command into components by splitting on spaces,\n // unless spaces are contained in quotation marks.\n const components = command.match(/(?:[^\\s\"]+|\"[^\"]*\")+/g);\n if (!components) {\n throw new Error(`Provided command: \"${command}\" could not be parsed.`);\n }\n // Remove quotation marks from the beginning and end of each component if they are present.\n for (let i = 0; i < components.length; i++) {\n if (components[i][0] === '\"' && components[i].slice(-1) === '\"') {\n components[i] = components[i].slice(1, -1);\n }\n }\n return components;\n }\n}\nexports.PluggableAuthHandler = PluggableAuthHandler;\n","\"use strict\";\n// Copyright 2015 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0;\nconst oauth2client_1 = require(\"./oauth2client\");\nexports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user';\nclass UserRefreshClient extends oauth2client_1.OAuth2Client {\n constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) {\n const opts = optionsOrClientId && typeof optionsOrClientId === 'object'\n ? optionsOrClientId\n : {\n clientId: optionsOrClientId,\n clientSecret,\n refreshToken,\n eagerRefreshThresholdMillis,\n forceRefreshOnFailure,\n };\n super(opts);\n this._refreshToken = opts.refreshToken;\n this.credentials.refresh_token = opts.refreshToken;\n }\n /**\n * Refreshes the access token.\n * @param refreshToken An ignored refreshToken..\n * @param callback Optional callback.\n */\n async refreshTokenNoCache(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n refreshToken) {\n return super.refreshTokenNoCache(this._refreshToken);\n }\n /**\n * Create a UserRefreshClient credentials instance using the given input\n * options.\n * @param json The input object.\n */\n fromJSON(json) {\n if (!json) {\n throw new Error('Must pass in a JSON object containing the user refresh token');\n }\n if (json.type !== 'authorized_user') {\n throw new Error('The incoming JSON object does not have the \"authorized_user\" type');\n }\n if (!json.client_id) {\n throw new Error('The incoming JSON object does not contain a client_id field');\n }\n if (!json.client_secret) {\n throw new Error('The incoming JSON object does not contain a client_secret field');\n }\n if (!json.refresh_token) {\n throw new Error('The incoming JSON object does not contain a refresh_token field');\n }\n this._clientId = json.client_id;\n this._clientSecret = json.client_secret;\n this._refreshToken = json.refresh_token;\n this.credentials.refresh_token = json.refresh_token;\n this.quotaProjectId = json.quota_project_id;\n this.universeDomain = json.universe_domain || this.universeDomain;\n }\n fromStream(inputStream, callback) {\n if (callback) {\n this.fromStreamAsync(inputStream).then(() => callback(), callback);\n }\n else {\n return this.fromStreamAsync(inputStream);\n }\n }\n async fromStreamAsync(inputStream) {\n return new Promise((resolve, reject) => {\n if (!inputStream) {\n return reject(new Error('Must pass in a stream containing the user refresh token.'));\n }\n let s = '';\n inputStream\n .setEncoding('utf8')\n .on('error', reject)\n .on('data', chunk => (s += chunk))\n .on('end', () => {\n try {\n const data = JSON.parse(s);\n this.fromJSON(data);\n return resolve();\n }\n catch (err) {\n return reject(err);\n }\n });\n });\n }\n}\nexports.UserRefreshClient = UserRefreshClient;\n","\"use strict\";\n// Copyright 2021 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.StsCredentials = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst querystring = require(\"querystring\");\nconst transporters_1 = require(\"../transporters\");\nconst oauth2common_1 = require(\"./oauth2common\");\n/**\n * Implements the OAuth 2.0 token exchange based on\n * https://tools.ietf.org/html/rfc8693\n */\nclass StsCredentials extends oauth2common_1.OAuthClientAuthHandler {\n /**\n * Initializes an STS credentials instance.\n * @param tokenExchangeEndpoint The token exchange endpoint.\n * @param clientAuthentication The client authentication credentials if\n * available.\n */\n constructor(tokenExchangeEndpoint, clientAuthentication) {\n super(clientAuthentication);\n this.tokenExchangeEndpoint = tokenExchangeEndpoint;\n this.transporter = new transporters_1.DefaultTransporter();\n }\n /**\n * Exchanges the provided token for another type of token based on the\n * rfc8693 spec.\n * @param stsCredentialsOptions The token exchange options used to populate\n * the token exchange request.\n * @param additionalHeaders Optional additional headers to pass along the\n * request.\n * @param options Optional additional GCP-specific non-spec defined options\n * to send with the request.\n * Example: `&options=${encodeUriComponent(JSON.stringified(options))}`\n * @return A promise that resolves with the token exchange response containing\n * the requested token and its expiration time.\n */\n async exchangeToken(stsCredentialsOptions, additionalHeaders, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options) {\n var _a, _b, _c;\n const values = {\n grant_type: stsCredentialsOptions.grantType,\n resource: stsCredentialsOptions.resource,\n audience: stsCredentialsOptions.audience,\n scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '),\n requested_token_type: stsCredentialsOptions.requestedTokenType,\n subject_token: stsCredentialsOptions.subjectToken,\n subject_token_type: stsCredentialsOptions.subjectTokenType,\n actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken,\n actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType,\n // Non-standard GCP-specific options.\n options: options && JSON.stringify(options),\n };\n // Remove undefined fields.\n Object.keys(values).forEach(key => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n if (typeof values[key] === 'undefined') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n delete values[key];\n }\n });\n const headers = {\n 'Content-Type': 'application/x-www-form-urlencoded',\n };\n // Inject additional STS headers if available.\n Object.assign(headers, additionalHeaders || {});\n const opts = {\n url: this.tokenExchangeEndpoint.toString(),\n method: 'POST',\n headers,\n data: querystring.stringify(values),\n responseType: 'json',\n };\n // Apply OAuth client authentication.\n this.applyClientAuthenticationOptions(opts);\n try {\n const response = await this.transporter.request(opts);\n // Successful response.\n const stsSuccessfulResponse = response.data;\n stsSuccessfulResponse.res = response;\n return stsSuccessfulResponse;\n }\n catch (error) {\n // Translate error to OAuthError.\n if (error instanceof gaxios_1.GaxiosError && error.response) {\n throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, \n // Preserve other fields from the original error.\n error);\n }\n // Request could fail before the server responds.\n throw error;\n }\n }\n}\nexports.StsCredentials = StsCredentials;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/* global window */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.BrowserCrypto = void 0;\n// This file implements crypto functions we need using in-browser\n// SubtleCrypto interface `window.crypto.subtle`.\nconst base64js = require(\"base64-js\");\nconst crypto_1 = require(\"../crypto\");\nclass BrowserCrypto {\n constructor() {\n if (typeof window === 'undefined' ||\n window.crypto === undefined ||\n window.crypto.subtle === undefined) {\n throw new Error(\"SubtleCrypto not found. Make sure it's an https:// website.\");\n }\n }\n async sha256DigestBase64(str) {\n // SubtleCrypto digest() method is async, so we must make\n // this method async as well.\n // To calculate SHA256 digest using SubtleCrypto, we first\n // need to convert an input string to an ArrayBuffer:\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const inputBuffer = new TextEncoder().encode(str);\n // Result is ArrayBuffer as well.\n const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer);\n return base64js.fromByteArray(new Uint8Array(outputBuffer));\n }\n randomBytesBase64(count) {\n const array = new Uint8Array(count);\n window.crypto.getRandomValues(array);\n return base64js.fromByteArray(array);\n }\n static padBase64(base64) {\n // base64js requires padding, so let's add some '='\n while (base64.length % 4 !== 0) {\n base64 += '=';\n }\n return base64;\n }\n async verify(pubkey, data, signature) {\n const algo = {\n name: 'RSASSA-PKCS1-v1_5',\n hash: { name: 'SHA-256' },\n };\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const dataArray = new TextEncoder().encode(data);\n const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature));\n const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']);\n // SubtleCrypto's verify method is async so we must make\n // this method async as well.\n const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray);\n return result;\n }\n async sign(privateKey, data) {\n const algo = {\n name: 'RSASSA-PKCS1-v1_5',\n hash: { name: 'SHA-256' },\n };\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const dataArray = new TextEncoder().encode(data);\n const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']);\n // SubtleCrypto's sign method is async so we must make\n // this method async as well.\n const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray);\n return base64js.fromByteArray(new Uint8Array(result));\n }\n decodeBase64StringUtf8(base64) {\n const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64));\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const result = new TextDecoder().decode(uint8array);\n return result;\n }\n encodeBase64StringUtf8(text) {\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const uint8array = new TextEncoder().encode(text);\n const result = base64js.fromByteArray(uint8array);\n return result;\n }\n /**\n * Computes the SHA-256 hash of the provided string.\n * @param str The plain text string to hash.\n * @return A promise that resolves with the SHA-256 hash of the provided\n * string in hexadecimal encoding.\n */\n async sha256DigestHex(str) {\n // SubtleCrypto digest() method is async, so we must make\n // this method async as well.\n // To calculate SHA256 digest using SubtleCrypto, we first\n // need to convert an input string to an ArrayBuffer:\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const inputBuffer = new TextEncoder().encode(str);\n // Result is ArrayBuffer as well.\n const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer);\n return (0, crypto_1.fromArrayBufferToHex)(outputBuffer);\n }\n /**\n * Computes the HMAC hash of a message using the provided crypto key and the\n * SHA-256 algorithm.\n * @param key The secret crypto key in utf-8 or ArrayBuffer format.\n * @param msg The plain text message.\n * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer\n * format.\n */\n async signWithHmacSha256(key, msg) {\n // Convert key, if provided in ArrayBuffer format, to string.\n const rawKey = typeof key === 'string'\n ? key\n : String.fromCharCode(...new Uint16Array(key));\n // eslint-disable-next-line node/no-unsupported-features/node-builtins\n const enc = new TextEncoder();\n const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), {\n name: 'HMAC',\n hash: {\n name: 'SHA-256',\n },\n }, false, ['sign']);\n return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg));\n }\n}\nexports.BrowserCrypto = BrowserCrypto;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n/* global window */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0;\nconst crypto_1 = require(\"./browser/crypto\");\nconst crypto_2 = require(\"./node/crypto\");\nfunction createCrypto() {\n if (hasBrowserCrypto()) {\n return new crypto_1.BrowserCrypto();\n }\n return new crypto_2.NodeCrypto();\n}\nexports.createCrypto = createCrypto;\nfunction hasBrowserCrypto() {\n return (typeof window !== 'undefined' &&\n typeof window.crypto !== 'undefined' &&\n typeof window.crypto.subtle !== 'undefined');\n}\nexports.hasBrowserCrypto = hasBrowserCrypto;\n/**\n * Converts an ArrayBuffer to a hexadecimal string.\n * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string.\n * @return The hexadecimal encoding of the ArrayBuffer.\n */\nfunction fromArrayBufferToHex(arrayBuffer) {\n // Convert buffer to byte array.\n const byteArray = Array.from(new Uint8Array(arrayBuffer));\n // Convert bytes to hex string.\n return byteArray\n .map(byte => {\n return byte.toString(16).padStart(2, '0');\n })\n .join('');\n}\nexports.fromArrayBufferToHex = fromArrayBufferToHex;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.NodeCrypto = void 0;\nconst crypto = require(\"crypto\");\nclass NodeCrypto {\n async sha256DigestBase64(str) {\n return crypto.createHash('sha256').update(str).digest('base64');\n }\n randomBytesBase64(count) {\n return crypto.randomBytes(count).toString('base64');\n }\n async verify(pubkey, data, signature) {\n const verifier = crypto.createVerify('RSA-SHA256');\n verifier.update(data);\n verifier.end();\n return verifier.verify(pubkey, signature, 'base64');\n }\n async sign(privateKey, data) {\n const signer = crypto.createSign('RSA-SHA256');\n signer.update(data);\n signer.end();\n return signer.sign(privateKey, 'base64');\n }\n decodeBase64StringUtf8(base64) {\n return Buffer.from(base64, 'base64').toString('utf-8');\n }\n encodeBase64StringUtf8(text) {\n return Buffer.from(text, 'utf-8').toString('base64');\n }\n /**\n * Computes the SHA-256 hash of the provided string.\n * @param str The plain text string to hash.\n * @return A promise that resolves with the SHA-256 hash of the provided\n * string in hexadecimal encoding.\n */\n async sha256DigestHex(str) {\n return crypto.createHash('sha256').update(str).digest('hex');\n }\n /**\n * Computes the HMAC hash of a message using the provided crypto key and the\n * SHA-256 algorithm.\n * @param key The secret crypto key in utf-8 or ArrayBuffer format.\n * @param msg The plain text message.\n * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer\n * format.\n */\n async signWithHmacSha256(key, msg) {\n const cryptoKey = typeof key === 'string' ? key : toBuffer(key);\n return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest());\n }\n}\nexports.NodeCrypto = NodeCrypto;\n/**\n * Converts a Node.js Buffer to an ArrayBuffer.\n * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer\n * @param buffer The Buffer input to covert.\n * @return The ArrayBuffer representation of the input.\n */\nfunction toArrayBuffer(buffer) {\n return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);\n}\n/**\n * Converts an ArrayBuffer to a Node.js Buffer.\n * @param arrayBuffer The ArrayBuffer input to covert.\n * @return The Buffer representation of the input.\n */\nfunction toBuffer(arrayBuffer) {\n return Buffer.from(arrayBuffer);\n}\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gcpMetadata = void 0;\n// Copyright 2017 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nconst googleauth_1 = require(\"./auth/googleauth\");\nObject.defineProperty(exports, \"GoogleAuth\", { enumerable: true, get: function () { return googleauth_1.GoogleAuth; } });\nexports.gcpMetadata = require(\"gcp-metadata\");\nvar authclient_1 = require(\"./auth/authclient\");\nObject.defineProperty(exports, \"AuthClient\", { enumerable: true, get: function () { return authclient_1.AuthClient; } });\nObject.defineProperty(exports, \"DEFAULT_UNIVERSE\", { enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } });\nvar computeclient_1 = require(\"./auth/computeclient\");\nObject.defineProperty(exports, \"Compute\", { enumerable: true, get: function () { return computeclient_1.Compute; } });\nvar envDetect_1 = require(\"./auth/envDetect\");\nObject.defineProperty(exports, \"GCPEnv\", { enumerable: true, get: function () { return envDetect_1.GCPEnv; } });\nvar iam_1 = require(\"./auth/iam\");\nObject.defineProperty(exports, \"IAMAuth\", { enumerable: true, get: function () { return iam_1.IAMAuth; } });\nvar idtokenclient_1 = require(\"./auth/idtokenclient\");\nObject.defineProperty(exports, \"IdTokenClient\", { enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } });\nvar jwtaccess_1 = require(\"./auth/jwtaccess\");\nObject.defineProperty(exports, \"JWTAccess\", { enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } });\nvar jwtclient_1 = require(\"./auth/jwtclient\");\nObject.defineProperty(exports, \"JWT\", { enumerable: true, get: function () { return jwtclient_1.JWT; } });\nvar impersonated_1 = require(\"./auth/impersonated\");\nObject.defineProperty(exports, \"Impersonated\", { enumerable: true, get: function () { return impersonated_1.Impersonated; } });\nvar oauth2client_1 = require(\"./auth/oauth2client\");\nObject.defineProperty(exports, \"CodeChallengeMethod\", { enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } });\nObject.defineProperty(exports, \"OAuth2Client\", { enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } });\nvar loginticket_1 = require(\"./auth/loginticket\");\nObject.defineProperty(exports, \"LoginTicket\", { enumerable: true, get: function () { return loginticket_1.LoginTicket; } });\nvar refreshclient_1 = require(\"./auth/refreshclient\");\nObject.defineProperty(exports, \"UserRefreshClient\", { enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } });\nvar awsclient_1 = require(\"./auth/awsclient\");\nObject.defineProperty(exports, \"AwsClient\", { enumerable: true, get: function () { return awsclient_1.AwsClient; } });\nvar identitypoolclient_1 = require(\"./auth/identitypoolclient\");\nObject.defineProperty(exports, \"IdentityPoolClient\", { enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } });\nvar externalclient_1 = require(\"./auth/externalclient\");\nObject.defineProperty(exports, \"ExternalAccountClient\", { enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } });\nvar baseexternalclient_1 = require(\"./auth/baseexternalclient\");\nObject.defineProperty(exports, \"BaseExternalAccountClient\", { enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } });\nvar downscopedclient_1 = require(\"./auth/downscopedclient\");\nObject.defineProperty(exports, \"DownscopedClient\", { enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } });\nvar pluggable_auth_client_1 = require(\"./auth/pluggable-auth-client\");\nObject.defineProperty(exports, \"PluggableAuthClient\", { enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } });\nvar transporters_1 = require(\"./transporters\");\nObject.defineProperty(exports, \"DefaultTransporter\", { enumerable: true, get: function () { return transporters_1.DefaultTransporter; } });\nconst auth = new googleauth_1.GoogleAuth();\nexports.auth = auth;\n","\"use strict\";\n// Copyright 2017 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validate = void 0;\n// Accepts an options object passed from the user to the API. In the\n// previous version of the API, it referred to a `Request` options object.\n// Now it refers to an Axiox Request Config object. This is here to help\n// ensure users don't pass invalid options when they upgrade from 0.x to 1.x.\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction validate(options) {\n const vpairs = [\n { invalid: 'uri', expected: 'url' },\n { invalid: 'json', expected: 'data' },\n { invalid: 'qs', expected: 'params' },\n ];\n for (const pair of vpairs) {\n if (options[pair.invalid]) {\n const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`;\n throw new Error(e);\n }\n }\n}\nexports.validate = validate;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DefaultTransporter = void 0;\nconst gaxios_1 = require(\"gaxios\");\nconst options_1 = require(\"./options\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst pkg = require('../../package.json');\nconst PRODUCT_NAME = 'google-api-nodejs-client';\nclass DefaultTransporter {\n constructor() {\n /**\n * A configurable, replacable `Gaxios` instance.\n */\n this.instance = new gaxios_1.Gaxios();\n }\n /**\n * Configures request options before making a request.\n * @param opts GaxiosOptions options.\n * @return Configured options.\n */\n configure(opts = {}) {\n opts.headers = opts.headers || {};\n if (typeof window === 'undefined') {\n // set transporter user agent if not in browser\n const uaValue = opts.headers['User-Agent'];\n if (!uaValue) {\n opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT;\n }\n else if (!uaValue.includes(`${PRODUCT_NAME}/`)) {\n opts.headers['User-Agent'] = `${uaValue} ${DefaultTransporter.USER_AGENT}`;\n }\n // track google-auth-library-nodejs version:\n if (!opts.headers['x-goog-api-client']) {\n const nodeVersion = process.version.replace(/^v/, '');\n opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`;\n }\n }\n return opts;\n }\n /**\n * Makes a request using Gaxios with given options.\n * @param opts GaxiosOptions options.\n * @param callback optional callback that contains GaxiosResponse object.\n * @return GaxiosPromise, assuming no callback is passed.\n */\n request(opts) {\n // ensure the user isn't passing in request-style options\n opts = this.configure(opts);\n (0, options_1.validate)(opts);\n return this.instance.request(opts).catch(e => {\n throw this.processError(e);\n });\n }\n get defaults() {\n return this.instance.defaults;\n }\n set defaults(opts) {\n this.instance.defaults = opts;\n }\n /**\n * Changes the error to include details from the body.\n */\n processError(e) {\n const res = e.response;\n const err = e;\n const body = res ? res.data : null;\n if (res && body && body.error && res.status !== 200) {\n if (typeof body.error === 'string') {\n err.message = body.error;\n err.status = res.status;\n }\n else if (Array.isArray(body.error.errors)) {\n err.message = body.error.errors\n .map((err2) => err2.message)\n .join('\\n');\n err.code = body.error.code;\n err.errors = body.error.errors;\n }\n else {\n err.message = body.error.message;\n err.code = body.error.code;\n }\n }\n else if (res && res.status >= 400) {\n // Consider all 4xx and 5xx responses errors.\n err.message = body;\n err.status = res.status;\n }\n return err;\n }\n}\nexports.DefaultTransporter = DefaultTransporter;\n/**\n * Default user agent.\n */\nDefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`;\n","\"use strict\";\n// Copyright 2023 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.LRUCache = exports.originalOrCamelOptions = exports.snakeToCamel = void 0;\n/**\n * Returns the camel case of a provided string.\n *\n * @remarks\n *\n * Match any `_` and not `_` pair, then return the uppercase of the not `_`\n * character.\n *\n * @internal\n *\n * @param str the string to convert\n * @returns the camelCase'd string\n */\nfunction snakeToCamel(str) {\n return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase());\n}\nexports.snakeToCamel = snakeToCamel;\n/**\n * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference\n * for original, non-camelCase key.\n *\n * @param obj object to lookup a value in\n * @returns a `get` function for getting `obj[key || snakeKey]`, if available\n */\nfunction originalOrCamelOptions(obj) {\n /**\n *\n * @param key an index of object, preferably snake_case\n * @returns the value `obj[key || snakeKey]`, if available\n */\n function get(key) {\n var _a;\n const o = (obj || {});\n return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)];\n }\n return { get };\n}\nexports.originalOrCamelOptions = originalOrCamelOptions;\n/**\n * A simple LRU cache utility.\n * Not meant for external usage.\n *\n * @experimental\n * @internal\n */\nclass LRUCache {\n constructor(options) {\n _LRUCache_instances.add(this);\n /**\n * Maps are in order. Thus, the older item is the first item.\n *\n * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map}\n */\n _LRUCache_cache.set(this, new Map());\n this.capacity = options.capacity;\n this.maxAge = options.maxAge;\n }\n /**\n * Add an item to the cache.\n *\n * @param key the key to upsert\n * @param value the value of the key\n */\n set(key, value) {\n __classPrivateFieldGet(this, _LRUCache_instances, \"m\", _LRUCache_moveToEnd).call(this, key, value);\n __classPrivateFieldGet(this, _LRUCache_instances, \"m\", _LRUCache_evict).call(this);\n }\n /**\n * Get an item from the cache.\n *\n * @param key the key to retrieve\n */\n get(key) {\n const item = __classPrivateFieldGet(this, _LRUCache_cache, \"f\").get(key);\n if (!item)\n return;\n __classPrivateFieldGet(this, _LRUCache_instances, \"m\", _LRUCache_moveToEnd).call(this, key, item.value);\n __classPrivateFieldGet(this, _LRUCache_instances, \"m\", _LRUCache_evict).call(this);\n return item.value;\n }\n}\nexports.LRUCache = LRUCache;\n_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) {\n __classPrivateFieldGet(this, _LRUCache_cache, \"f\").delete(key);\n __classPrivateFieldGet(this, _LRUCache_cache, \"f\").set(key, {\n value,\n lastAccessed: Date.now(),\n });\n}, _LRUCache_evict = function _LRUCache_evict() {\n const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0;\n /**\n * Because we know Maps are in order, this item is both the\n * last item in the list (capacity) and oldest (maxAge).\n */\n let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, \"f\").entries().next();\n while (!oldestItem.done &&\n (__classPrivateFieldGet(this, _LRUCache_cache, \"f\").size > this.capacity || // too many\n oldestItem.value[1].lastAccessed < cutoffDate) // too old\n ) {\n __classPrivateFieldGet(this, _LRUCache_cache, \"f\").delete(oldestItem.value[0]);\n oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, \"f\").entries().next();\n }\n};\n","\"use strict\";\n/**\n * Copyright 2018 Google LLC\n *\n * Distributed under MIT license.\n * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT\n */\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GoogleToken = void 0;\nconst fs = require(\"fs\");\nconst gaxios_1 = require(\"gaxios\");\nconst jws = require(\"jws\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst readFile = fs.readFile\n ? (0, util_1.promisify)(fs.readFile)\n : async () => {\n // if running in the web-browser, fs.readFile may not have been shimmed.\n throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS');\n };\nconst GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token';\nconst GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token=';\nclass ErrorWithCode extends Error {\n constructor(message, code) {\n super(message);\n this.code = code;\n }\n}\nclass GoogleToken {\n get accessToken() {\n return this.rawToken ? this.rawToken.access_token : undefined;\n }\n get idToken() {\n return this.rawToken ? this.rawToken.id_token : undefined;\n }\n get tokenType() {\n return this.rawToken ? this.rawToken.token_type : undefined;\n }\n get refreshToken() {\n return this.rawToken ? this.rawToken.refresh_token : undefined;\n }\n /**\n * Create a GoogleToken.\n *\n * @param options Configuration object.\n */\n constructor(options) {\n _GoogleToken_instances.add(this);\n this.transporter = {\n request: opts => (0, gaxios_1.request)(opts),\n };\n _GoogleToken_inFlightRequest.set(this, void 0);\n __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_configure).call(this, options);\n }\n /**\n * Returns whether the token has expired.\n *\n * @return true if the token has expired, false otherwise.\n */\n hasExpired() {\n const now = new Date().getTime();\n if (this.rawToken && this.expiresAt) {\n return now >= this.expiresAt;\n }\n else {\n return true;\n }\n }\n /**\n * Returns whether the token will expire within eagerRefreshThresholdMillis\n *\n * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise.\n */\n isTokenExpiring() {\n var _a;\n const now = new Date().getTime();\n const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0;\n if (this.rawToken && this.expiresAt) {\n return this.expiresAt <= now + eagerRefreshThresholdMillis;\n }\n else {\n return true;\n }\n }\n getToken(callback, opts = {}) {\n if (typeof callback === 'object') {\n opts = callback;\n callback = undefined;\n }\n opts = Object.assign({\n forceRefresh: false,\n }, opts);\n if (callback) {\n const cb = callback;\n __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback);\n return;\n }\n return __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_getTokenAsync).call(this, opts);\n }\n /**\n * Given a keyFile, extract the key and client email if available\n * @param keyFile Path to a json, pem, or p12 file that contains the key.\n * @returns an object with privateKey and clientEmail properties\n */\n async getCredentials(keyFile) {\n const ext = path.extname(keyFile);\n switch (ext) {\n case '.json': {\n const key = await readFile(keyFile, 'utf8');\n const body = JSON.parse(key);\n const privateKey = body.private_key;\n const clientEmail = body.client_email;\n if (!privateKey || !clientEmail) {\n throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS');\n }\n return { privateKey, clientEmail };\n }\n case '.der':\n case '.crt':\n case '.pem': {\n const privateKey = await readFile(keyFile, 'utf8');\n return { privateKey };\n }\n case '.p12':\n case '.pfx': {\n throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' +\n 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE');\n }\n default:\n throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' +\n 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE');\n }\n }\n revokeToken(callback) {\n if (callback) {\n __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback);\n return;\n }\n return __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_revokeTokenAsync).call(this);\n }\n}\nexports.GoogleToken = GoogleToken;\n_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) {\n if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, \"f\") && !opts.forceRefresh) {\n return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, \"f\");\n }\n try {\n return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_getTokenAsyncInner).call(this, opts), \"f\"));\n }\n finally {\n __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, \"f\");\n }\n}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) {\n if (this.isTokenExpiring() === false && opts.forceRefresh === false) {\n return Promise.resolve(this.rawToken);\n }\n if (!this.key && !this.keyFile) {\n throw new Error('No key or keyFile set.');\n }\n if (!this.key && this.keyFile) {\n const creds = await this.getCredentials(this.keyFile);\n this.key = creds.privateKey;\n this.iss = creds.clientEmail || this.iss;\n if (!creds.clientEmail) {\n __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_ensureEmail).call(this);\n }\n }\n return __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_requestToken).call(this);\n}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() {\n if (!this.iss) {\n throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS');\n }\n}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() {\n if (!this.accessToken) {\n throw new Error('No token to revoke.');\n }\n const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken;\n await this.transporter.request({\n url,\n retry: true,\n });\n __classPrivateFieldGet(this, _GoogleToken_instances, \"m\", _GoogleToken_configure).call(this, {\n email: this.iss,\n sub: this.sub,\n key: this.key,\n keyFile: this.keyFile,\n scope: this.scope,\n additionalClaims: this.additionalClaims,\n });\n}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) {\n this.keyFile = options.keyFile;\n this.key = options.key;\n this.rawToken = undefined;\n this.iss = options.email || options.iss;\n this.sub = options.sub;\n this.additionalClaims = options.additionalClaims;\n if (typeof options.scope === 'object') {\n this.scope = options.scope.join(' ');\n }\n else {\n this.scope = options.scope;\n }\n this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis;\n if (options.transporter) {\n this.transporter = options.transporter;\n }\n}, _GoogleToken_requestToken = \n/**\n * Request the token from Google.\n */\nasync function _GoogleToken_requestToken() {\n var _a, _b;\n const iat = Math.floor(new Date().getTime() / 1000);\n const additionalClaims = this.additionalClaims || {};\n const payload = Object.assign({\n iss: this.iss,\n scope: this.scope,\n aud: GOOGLE_TOKEN_URL,\n exp: iat + 3600,\n iat,\n sub: this.sub,\n }, additionalClaims);\n const signedJWT = jws.sign({\n header: { alg: 'RS256' },\n payload,\n secret: this.key,\n });\n try {\n const r = await this.transporter.request({\n method: 'POST',\n url: GOOGLE_TOKEN_URL,\n data: {\n grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer',\n assertion: signedJWT,\n },\n headers: { 'Content-Type': 'application/x-www-form-urlencoded' },\n responseType: 'json',\n retryConfig: {\n httpMethodsToRetry: ['POST'],\n },\n });\n this.rawToken = r.data;\n this.expiresAt =\n r.data.expires_in === null || r.data.expires_in === undefined\n ? undefined\n : (iat + r.data.expires_in) * 1000;\n return this.rawToken;\n }\n catch (e) {\n this.rawToken = undefined;\n this.tokenExpires = undefined;\n const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data)\n ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data\n : {};\n if (body.error) {\n const desc = body.error_description\n ? `: ${body.error_description}`\n : '';\n e.message = `${body.error}${desc}`;\n }\n throw e;\n }\n};\n//# sourceMappingURL=index.js.map","'use strict';\n\nmodule.exports = (flag, argv = process.argv) => {\n\tconst prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');\n\tconst position = argv.indexOf(prefix + flag);\n\tconst terminatorPosition = argv.indexOf('--');\n\treturn position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);\n};\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst net_1 = __importDefault(require(\"net\"));\nconst tls_1 = __importDefault(require(\"tls\"));\nconst url_1 = __importDefault(require(\"url\"));\nconst debug_1 = __importDefault(require(\"debug\"));\nconst once_1 = __importDefault(require(\"@tootallnate/once\"));\nconst agent_base_1 = require(\"agent-base\");\nconst debug = (0, debug_1.default)('http-proxy-agent');\nfunction isHTTPS(protocol) {\n return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false;\n}\n/**\n * The `HttpProxyAgent` implements an HTTP Agent subclass that connects\n * to the specified \"HTTP proxy server\" in order to proxy HTTP requests.\n *\n * @api public\n */\nclass HttpProxyAgent extends agent_base_1.Agent {\n constructor(_opts) {\n let opts;\n if (typeof _opts === 'string') {\n opts = url_1.default.parse(_opts);\n }\n else {\n opts = _opts;\n }\n if (!opts) {\n throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');\n }\n debug('Creating new HttpProxyAgent instance: %o', opts);\n super(opts);\n const proxy = Object.assign({}, opts);\n // If `true`, then connect to the proxy server over TLS.\n // Defaults to `false`.\n this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol);\n // Prefer `hostname` over `host`, and set the `port` if needed.\n proxy.host = proxy.hostname || proxy.host;\n if (typeof proxy.port === 'string') {\n proxy.port = parseInt(proxy.port, 10);\n }\n if (!proxy.port && proxy.host) {\n proxy.port = this.secureProxy ? 443 : 80;\n }\n if (proxy.host && proxy.path) {\n // If both a `host` and `path` are specified then it's most likely\n // the result of a `url.parse()` call... we need to remove the\n // `path` portion so that `net.connect()` doesn't attempt to open\n // that as a Unix socket file.\n delete proxy.path;\n delete proxy.pathname;\n }\n this.proxy = proxy;\n }\n /**\n * Called when the node-core HTTP client library is creating a\n * new HTTP request.\n *\n * @api protected\n */\n callback(req, opts) {\n return __awaiter(this, void 0, void 0, function* () {\n const { proxy, secureProxy } = this;\n const parsed = url_1.default.parse(req.path);\n if (!parsed.protocol) {\n parsed.protocol = 'http:';\n }\n if (!parsed.hostname) {\n parsed.hostname = opts.hostname || opts.host || null;\n }\n if (parsed.port == null && typeof opts.port) {\n parsed.port = String(opts.port);\n }\n if (parsed.port === '80') {\n // if port is 80, then we can remove the port so that the\n // \":80\" portion is not on the produced URL\n parsed.port = '';\n }\n // Change the `http.ClientRequest` instance's \"path\" field\n // to the absolute path of the URL that will be requested.\n req.path = url_1.default.format(parsed);\n // Inject the `Proxy-Authorization` header if necessary.\n if (proxy.auth) {\n req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`);\n }\n // Create a socket connection to the proxy server.\n let socket;\n if (secureProxy) {\n debug('Creating `tls.Socket`: %o', proxy);\n socket = tls_1.default.connect(proxy);\n }\n else {\n debug('Creating `net.Socket`: %o', proxy);\n socket = net_1.default.connect(proxy);\n }\n // At this point, the http ClientRequest's internal `_header` field\n // might have already been set. If this is the case then we'll need\n // to re-generate the string since we just changed the `req.path`.\n if (req._header) {\n let first;\n let endOfHeaders;\n debug('Regenerating stored HTTP header string for request');\n req._header = null;\n req._implicitHeader();\n if (req.output && req.output.length > 0) {\n // Node < 12\n debug('Patching connection write() output buffer with updated header');\n first = req.output[0];\n endOfHeaders = first.indexOf('\\r\\n\\r\\n') + 4;\n req.output[0] = req._header + first.substring(endOfHeaders);\n debug('Output buffer: %o', req.output);\n }\n else if (req.outputData && req.outputData.length > 0) {\n // Node >= 12\n debug('Patching connection write() output buffer with updated header');\n first = req.outputData[0].data;\n endOfHeaders = first.indexOf('\\r\\n\\r\\n') + 4;\n req.outputData[0].data =\n req._header + first.substring(endOfHeaders);\n debug('Output buffer: %o', req.outputData[0].data);\n }\n }\n // Wait for the socket's `connect` event, so that this `callback()`\n // function throws instead of the `http` request machinery. This is\n // important for i.e. `PacProxyAgent` which determines a failed proxy\n // connection via the `callback()` function throwing.\n yield (0, once_1.default)(socket, 'connect');\n return socket;\n });\n }\n}\nexports.default = HttpProxyAgent;\n//# sourceMappingURL=agent.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst agent_1 = __importDefault(require(\"./agent\"));\nfunction createHttpProxyAgent(opts) {\n return new agent_1.default(opts);\n}\n(function (createHttpProxyAgent) {\n createHttpProxyAgent.HttpProxyAgent = agent_1.default;\n createHttpProxyAgent.prototype = agent_1.default.prototype;\n})(createHttpProxyAgent || (createHttpProxyAgent = {}));\nmodule.exports = createHttpProxyAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst net_1 = __importDefault(require(\"net\"));\nconst tls_1 = __importDefault(require(\"tls\"));\nconst url_1 = __importDefault(require(\"url\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst debug_1 = __importDefault(require(\"debug\"));\nconst agent_base_1 = require(\"agent-base\");\nconst parse_proxy_response_1 = __importDefault(require(\"./parse-proxy-response\"));\nconst debug = debug_1.default('https-proxy-agent:agent');\n/**\n * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to\n * the specified \"HTTP(s) proxy server\" in order to proxy HTTPS requests.\n *\n * Outgoing HTTP requests are first tunneled through the proxy server using the\n * `CONNECT` HTTP request method to establish a connection to the proxy server,\n * and then the proxy server connects to the destination target and issues the\n * HTTP request from the proxy server.\n *\n * `https:` requests have their socket connection upgraded to TLS once\n * the connection to the proxy server has been established.\n *\n * @api public\n */\nclass HttpsProxyAgent extends agent_base_1.Agent {\n constructor(_opts) {\n let opts;\n if (typeof _opts === 'string') {\n opts = url_1.default.parse(_opts);\n }\n else {\n opts = _opts;\n }\n if (!opts) {\n throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');\n }\n debug('creating new HttpsProxyAgent instance: %o', opts);\n super(opts);\n const proxy = Object.assign({}, opts);\n // If `true`, then connect to the proxy server over TLS.\n // Defaults to `false`.\n this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol);\n // Prefer `hostname` over `host`, and set the `port` if needed.\n proxy.host = proxy.hostname || proxy.host;\n if (typeof proxy.port === 'string') {\n proxy.port = parseInt(proxy.port, 10);\n }\n if (!proxy.port && proxy.host) {\n proxy.port = this.secureProxy ? 443 : 80;\n }\n // ALPN is supported by Node.js >= v5.\n // attempt to negotiate http/1.1 for proxy servers that support http/2\n if (this.secureProxy && !('ALPNProtocols' in proxy)) {\n proxy.ALPNProtocols = ['http 1.1'];\n }\n if (proxy.host && proxy.path) {\n // If both a `host` and `path` are specified then it's most likely\n // the result of a `url.parse()` call... we need to remove the\n // `path` portion so that `net.connect()` doesn't attempt to open\n // that as a Unix socket file.\n delete proxy.path;\n delete proxy.pathname;\n }\n this.proxy = proxy;\n }\n /**\n * Called when the node-core HTTP client library is creating a\n * new HTTP request.\n *\n * @api protected\n */\n callback(req, opts) {\n return __awaiter(this, void 0, void 0, function* () {\n const { proxy, secureProxy } = this;\n // Create a socket connection to the proxy server.\n let socket;\n if (secureProxy) {\n debug('Creating `tls.Socket`: %o', proxy);\n socket = tls_1.default.connect(proxy);\n }\n else {\n debug('Creating `net.Socket`: %o', proxy);\n socket = net_1.default.connect(proxy);\n }\n const headers = Object.assign({}, proxy.headers);\n const hostname = `${opts.host}:${opts.port}`;\n let payload = `CONNECT ${hostname} HTTP/1.1\\r\\n`;\n // Inject the `Proxy-Authorization` header if necessary.\n if (proxy.auth) {\n headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`;\n }\n // The `Host` header should only include the port\n // number when it is not the default port.\n let { host, port, secureEndpoint } = opts;\n if (!isDefaultPort(port, secureEndpoint)) {\n host += `:${port}`;\n }\n headers.Host = host;\n headers.Connection = 'close';\n for (const name of Object.keys(headers)) {\n payload += `${name}: ${headers[name]}\\r\\n`;\n }\n const proxyResponsePromise = parse_proxy_response_1.default(socket);\n socket.write(`${payload}\\r\\n`);\n const { statusCode, buffered } = yield proxyResponsePromise;\n if (statusCode === 200) {\n req.once('socket', resume);\n if (opts.secureEndpoint) {\n // The proxy is connecting to a TLS server, so upgrade\n // this socket connection to a TLS connection.\n debug('Upgrading socket connection to TLS');\n const servername = opts.servername || opts.host;\n return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket,\n servername }));\n }\n return socket;\n }\n // Some other status code that's not 200... need to re-play the HTTP\n // header \"data\" events onto the socket once the HTTP machinery is\n // attached so that the node core `http` can parse and handle the\n // error status code.\n // Close the original socket, and a new \"fake\" socket is returned\n // instead, so that the proxy doesn't get the HTTP request\n // written to it (which may contain `Authorization` headers or other\n // sensitive data).\n //\n // See: https://hackerone.com/reports/541502\n socket.destroy();\n const fakeSocket = new net_1.default.Socket({ writable: false });\n fakeSocket.readable = true;\n // Need to wait for the \"socket\" event to re-play the \"data\" events.\n req.once('socket', (s) => {\n debug('replaying proxy buffer for failed request');\n assert_1.default(s.listenerCount('data') > 0);\n // Replay the \"buffered\" Buffer onto the fake `socket`, since at\n // this point the HTTP module machinery has been hooked up for\n // the user.\n s.push(buffered);\n s.push(null);\n });\n return fakeSocket;\n });\n }\n}\nexports.default = HttpsProxyAgent;\nfunction resume(socket) {\n socket.resume();\n}\nfunction isDefaultPort(port, secure) {\n return Boolean((!secure && port === 80) || (secure && port === 443));\n}\nfunction isHTTPS(protocol) {\n return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false;\n}\nfunction omit(obj, ...keys) {\n const ret = {};\n let key;\n for (key in obj) {\n if (!keys.includes(key)) {\n ret[key] = obj[key];\n }\n }\n return ret;\n}\n//# sourceMappingURL=agent.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst agent_1 = __importDefault(require(\"./agent\"));\nfunction createHttpsProxyAgent(opts) {\n return new agent_1.default(opts);\n}\n(function (createHttpsProxyAgent) {\n createHttpsProxyAgent.HttpsProxyAgent = agent_1.default;\n createHttpsProxyAgent.prototype = agent_1.default.prototype;\n})(createHttpsProxyAgent || (createHttpsProxyAgent = {}));\nmodule.exports = createHttpsProxyAgent;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst debug_1 = __importDefault(require(\"debug\"));\nconst debug = debug_1.default('https-proxy-agent:parse-proxy-response');\nfunction parseProxyResponse(socket) {\n return new Promise((resolve, reject) => {\n // we need to buffer any HTTP traffic that happens with the proxy before we get\n // the CONNECT response, so that if the response is anything other than an \"200\"\n // response code, then we can re-play the \"data\" events on the socket once the\n // HTTP parser is hooked up...\n let buffersLength = 0;\n const buffers = [];\n function read() {\n const b = socket.read();\n if (b)\n ondata(b);\n else\n socket.once('readable', read);\n }\n function cleanup() {\n socket.removeListener('end', onend);\n socket.removeListener('error', onerror);\n socket.removeListener('close', onclose);\n socket.removeListener('readable', read);\n }\n function onclose(err) {\n debug('onclose had error %o', err);\n }\n function onend() {\n debug('onend');\n }\n function onerror(err) {\n cleanup();\n debug('onerror %o', err);\n reject(err);\n }\n function ondata(b) {\n buffers.push(b);\n buffersLength += b.length;\n const buffered = Buffer.concat(buffers, buffersLength);\n const endOfHeaders = buffered.indexOf('\\r\\n\\r\\n');\n if (endOfHeaders === -1) {\n // keep buffering\n debug('have not received end of HTTP headers yet...');\n read();\n return;\n }\n const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\\r\\n'));\n const statusCode = +firstLine.split(' ')[1];\n debug('got proxy server response: %o', firstLine);\n resolve({\n statusCode,\n buffered\n });\n }\n socket.on('error', onerror);\n socket.on('close', onclose);\n socket.on('end', onend);\n read();\n });\n}\nexports.default = parseProxyResponse;\n//# sourceMappingURL=parse-proxy-response.js.map","try {\n var util = require('util');\n /* istanbul ignore next */\n if (typeof util.inherits !== 'function') throw '';\n module.exports = util.inherits;\n} catch (e) {\n /* istanbul ignore next */\n module.exports = require('./inherits_browser.js');\n}\n","if (typeof Object.create === 'function') {\n // implementation from standard node.js 'util' module\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n ctor.prototype = Object.create(superCtor.prototype, {\n constructor: {\n value: ctor,\n enumerable: false,\n writable: true,\n configurable: true\n }\n })\n }\n };\n} else {\n // old school shim for old browsers\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n var TempCtor = function () {}\n TempCtor.prototype = superCtor.prototype\n ctor.prototype = new TempCtor()\n ctor.prototype.constructor = ctor\n }\n }\n}\n","'use strict';\n\nconst isStream = stream =>\n\tstream !== null &&\n\ttypeof stream === 'object' &&\n\ttypeof stream.pipe === 'function';\n\nisStream.writable = stream =>\n\tisStream(stream) &&\n\tstream.writable !== false &&\n\ttypeof stream._write === 'function' &&\n\ttypeof stream._writableState === 'object';\n\nisStream.readable = stream =>\n\tisStream(stream) &&\n\tstream.readable !== false &&\n\ttypeof stream._read === 'function' &&\n\ttypeof stream._readableState === 'object';\n\nisStream.duplex = stream =>\n\tisStream.writable(stream) &&\n\tisStream.readable(stream);\n\nisStream.transform = stream =>\n\tisStream.duplex(stream) &&\n\ttypeof stream._transform === 'function';\n\nmodule.exports = isStream;\n","var json_stringify = require('./lib/stringify.js').stringify;\nvar json_parse = require('./lib/parse.js');\n\nmodule.exports = function(options) {\n return {\n parse: json_parse(options),\n stringify: json_stringify\n }\n};\n//create the default method members with no options applied for backwards compatibility\nmodule.exports.parse = json_parse();\nmodule.exports.stringify = json_stringify;\n","var BigNumber = null;\n\n// regexpxs extracted from\n// (c) BSD-3-Clause\n// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors\n\nconst suspectProtoRx = /(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])(?:p|\\\\u0070)(?:r|\\\\u0072)(?:o|\\\\u006[Ff])(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])/;\nconst suspectConstructorRx = /(?:c|\\\\u0063)(?:o|\\\\u006[Ff])(?:n|\\\\u006[Ee])(?:s|\\\\u0073)(?:t|\\\\u0074)(?:r|\\\\u0072)(?:u|\\\\u0075)(?:c|\\\\u0063)(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:r|\\\\u0072)/;\n\n/*\n json_parse.js\n 2012-06-20\n\n Public Domain.\n\n NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\n This file creates a json_parse function.\n During create you can (optionally) specify some behavioural switches\n\n require('json-bigint')(options)\n\n The optional options parameter holds switches that drive certain\n aspects of the parsing process:\n * options.strict = true will warn about duplicate-key usage in the json.\n The default (strict = false) will silently ignore those and overwrite\n values for keys that are in duplicate use.\n\n The resulting function follows this signature:\n json_parse(text, reviver)\n This method parses a JSON text to produce an object or array.\n It can throw a SyntaxError exception.\n\n The optional reviver parameter is a function that can filter and\n transform the results. It receives each of the keys and values,\n and its return value is used instead of the original value.\n If it returns what it received, then the structure is not modified.\n If it returns undefined then the member is deleted.\n\n Example:\n\n // Parse the text. Values that look like ISO date strings will\n // be converted to Date objects.\n\n myData = json_parse(text, function (key, value) {\n var a;\n if (typeof value === 'string') {\n a =\n/^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2}(?:\\.\\d*)?)Z$/.exec(value);\n if (a) {\n return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],\n +a[5], +a[6]));\n }\n }\n return value;\n });\n\n This is a reference implementation. You are free to copy, modify, or\n redistribute.\n\n This code should be minified before deployment.\n See http://javascript.crockford.com/jsmin.html\n\n USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO\n NOT CONTROL.\n*/\n\n/*members \"\", \"\\\"\", \"\\/\", \"\\\\\", at, b, call, charAt, f, fromCharCode,\n hasOwnProperty, message, n, name, prototype, push, r, t, text\n*/\n\nvar json_parse = function (options) {\n 'use strict';\n\n // This is a function that can parse a JSON text, producing a JavaScript\n // data structure. It is a simple, recursive descent parser. It does not use\n // eval or regular expressions, so it can be used as a model for implementing\n // a JSON parser in other languages.\n\n // We are defining the function inside of another function to avoid creating\n // global variables.\n\n // Default options one can override by passing options to the parse()\n var _options = {\n strict: false, // not being strict means do not generate syntax errors for \"duplicate key\"\n storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string\n alwaysParseAsBig: false, // toggles whether all numbers should be Big\n useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js\n protoAction: 'error',\n constructorAction: 'error',\n };\n\n // If there are options, then use them to override the default _options\n if (options !== undefined && options !== null) {\n if (options.strict === true) {\n _options.strict = true;\n }\n if (options.storeAsString === true) {\n _options.storeAsString = true;\n }\n _options.alwaysParseAsBig =\n options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false;\n _options.useNativeBigInt =\n options.useNativeBigInt === true ? options.useNativeBigInt : false;\n\n if (typeof options.constructorAction !== 'undefined') {\n if (\n options.constructorAction === 'error' ||\n options.constructorAction === 'ignore' ||\n options.constructorAction === 'preserve'\n ) {\n _options.constructorAction = options.constructorAction;\n } else {\n throw new Error(\n `Incorrect value for constructorAction option, must be \"error\", \"ignore\" or undefined but passed ${options.constructorAction}`\n );\n }\n }\n\n if (typeof options.protoAction !== 'undefined') {\n if (\n options.protoAction === 'error' ||\n options.protoAction === 'ignore' ||\n options.protoAction === 'preserve'\n ) {\n _options.protoAction = options.protoAction;\n } else {\n throw new Error(\n `Incorrect value for protoAction option, must be \"error\", \"ignore\" or undefined but passed ${options.protoAction}`\n );\n }\n }\n }\n\n var at, // The index of the current character\n ch, // The current character\n escapee = {\n '\"': '\"',\n '\\\\': '\\\\',\n '/': '/',\n b: '\\b',\n f: '\\f',\n n: '\\n',\n r: '\\r',\n t: '\\t',\n },\n text,\n error = function (m) {\n // Call error when something is wrong.\n\n throw {\n name: 'SyntaxError',\n message: m,\n at: at,\n text: text,\n };\n },\n next = function (c) {\n // If a c parameter is provided, verify that it matches the current character.\n\n if (c && c !== ch) {\n error(\"Expected '\" + c + \"' instead of '\" + ch + \"'\");\n }\n\n // Get the next character. When there are no more characters,\n // return the empty string.\n\n ch = text.charAt(at);\n at += 1;\n return ch;\n },\n number = function () {\n // Parse a number value.\n\n var number,\n string = '';\n\n if (ch === '-') {\n string = '-';\n next('-');\n }\n while (ch >= '0' && ch <= '9') {\n string += ch;\n next();\n }\n if (ch === '.') {\n string += '.';\n while (next() && ch >= '0' && ch <= '9') {\n string += ch;\n }\n }\n if (ch === 'e' || ch === 'E') {\n string += ch;\n next();\n if (ch === '-' || ch === '+') {\n string += ch;\n next();\n }\n while (ch >= '0' && ch <= '9') {\n string += ch;\n next();\n }\n }\n number = +string;\n if (!isFinite(number)) {\n error('Bad number');\n } else {\n if (BigNumber == null) BigNumber = require('bignumber.js');\n //if (number > 9007199254740992 || number < -9007199254740992)\n // Bignumber has stricter check: everything with length > 15 digits disallowed\n if (string.length > 15)\n return _options.storeAsString\n ? string\n : _options.useNativeBigInt\n ? BigInt(string)\n : new BigNumber(string);\n else\n return !_options.alwaysParseAsBig\n ? number\n : _options.useNativeBigInt\n ? BigInt(number)\n : new BigNumber(number);\n }\n },\n string = function () {\n // Parse a string value.\n\n var hex,\n i,\n string = '',\n uffff;\n\n // When parsing for string values, we must look for \" and \\ characters.\n\n if (ch === '\"') {\n var startAt = at;\n while (next()) {\n if (ch === '\"') {\n if (at - 1 > startAt) string += text.substring(startAt, at - 1);\n next();\n return string;\n }\n if (ch === '\\\\') {\n if (at - 1 > startAt) string += text.substring(startAt, at - 1);\n next();\n if (ch === 'u') {\n uffff = 0;\n for (i = 0; i < 4; i += 1) {\n hex = parseInt(next(), 16);\n if (!isFinite(hex)) {\n break;\n }\n uffff = uffff * 16 + hex;\n }\n string += String.fromCharCode(uffff);\n } else if (typeof escapee[ch] === 'string') {\n string += escapee[ch];\n } else {\n break;\n }\n startAt = at;\n }\n }\n }\n error('Bad string');\n },\n white = function () {\n // Skip whitespace.\n\n while (ch && ch <= ' ') {\n next();\n }\n },\n word = function () {\n // true, false, or null.\n\n switch (ch) {\n case 't':\n next('t');\n next('r');\n next('u');\n next('e');\n return true;\n case 'f':\n next('f');\n next('a');\n next('l');\n next('s');\n next('e');\n return false;\n case 'n':\n next('n');\n next('u');\n next('l');\n next('l');\n return null;\n }\n error(\"Unexpected '\" + ch + \"'\");\n },\n value, // Place holder for the value function.\n array = function () {\n // Parse an array value.\n\n var array = [];\n\n if (ch === '[') {\n next('[');\n white();\n if (ch === ']') {\n next(']');\n return array; // empty array\n }\n while (ch) {\n array.push(value());\n white();\n if (ch === ']') {\n next(']');\n return array;\n }\n next(',');\n white();\n }\n }\n error('Bad array');\n },\n object = function () {\n // Parse an object value.\n\n var key,\n object = Object.create(null);\n\n if (ch === '{') {\n next('{');\n white();\n if (ch === '}') {\n next('}');\n return object; // empty object\n }\n while (ch) {\n key = string();\n white();\n next(':');\n if (\n _options.strict === true &&\n Object.hasOwnProperty.call(object, key)\n ) {\n error('Duplicate key \"' + key + '\"');\n }\n\n if (suspectProtoRx.test(key) === true) {\n if (_options.protoAction === 'error') {\n error('Object contains forbidden prototype property');\n } else if (_options.protoAction === 'ignore') {\n value();\n } else {\n object[key] = value();\n }\n } else if (suspectConstructorRx.test(key) === true) {\n if (_options.constructorAction === 'error') {\n error('Object contains forbidden constructor property');\n } else if (_options.constructorAction === 'ignore') {\n value();\n } else {\n object[key] = value();\n }\n } else {\n object[key] = value();\n }\n\n white();\n if (ch === '}') {\n next('}');\n return object;\n }\n next(',');\n white();\n }\n }\n error('Bad object');\n };\n\n value = function () {\n // Parse a JSON value. It could be an object, an array, a string, a number,\n // or a word.\n\n white();\n switch (ch) {\n case '{':\n return object();\n case '[':\n return array();\n case '\"':\n return string();\n case '-':\n return number();\n default:\n return ch >= '0' && ch <= '9' ? number() : word();\n }\n };\n\n // Return the json_parse function. It will have access to all of the above\n // functions and variables.\n\n return function (source, reviver) {\n var result;\n\n text = source + '';\n at = 0;\n ch = ' ';\n result = value();\n white();\n if (ch) {\n error('Syntax error');\n }\n\n // If there is a reviver function, we recursively walk the new structure,\n // passing each name/value pair to the reviver function for possible\n // transformation, starting with a temporary root object that holds the result\n // in an empty key. If there is not a reviver function, we simply return the\n // result.\n\n return typeof reviver === 'function'\n ? (function walk(holder, key) {\n var k,\n v,\n value = holder[key];\n if (value && typeof value === 'object') {\n Object.keys(value).forEach(function (k) {\n v = walk(value, k);\n if (v !== undefined) {\n value[k] = v;\n } else {\n delete value[k];\n }\n });\n }\n return reviver.call(holder, key, value);\n })({ '': result }, '')\n : result;\n };\n};\n\nmodule.exports = json_parse;\n","var BigNumber = require('bignumber.js');\n\n/*\n json2.js\n 2013-05-26\n\n Public Domain.\n\n NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.\n\n See http://www.JSON.org/js.html\n\n\n This code should be minified before deployment.\n See http://javascript.crockford.com/jsmin.html\n\n USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO\n NOT CONTROL.\n\n\n This file creates a global JSON object containing two methods: stringify\n and parse.\n\n JSON.stringify(value, replacer, space)\n value any JavaScript value, usually an object or array.\n\n replacer an optional parameter that determines how object\n values are stringified for objects. It can be a\n function or an array of strings.\n\n space an optional parameter that specifies the indentation\n of nested structures. If it is omitted, the text will\n be packed without extra whitespace. If it is a number,\n it will specify the number of spaces to indent at each\n level. If it is a string (such as '\\t' or ' '),\n it contains the characters used to indent at each level.\n\n This method produces a JSON text from a JavaScript value.\n\n When an object value is found, if the object contains a toJSON\n method, its toJSON method will be called and the result will be\n stringified. A toJSON method does not serialize: it returns the\n value represented by the name/value pair that should be serialized,\n or undefined if nothing should be serialized. The toJSON method\n will be passed the key associated with the value, and this will be\n bound to the value\n\n For example, this would serialize Dates as ISO strings.\n\n Date.prototype.toJSON = function (key) {\n function f(n) {\n // Format integers to have at least two digits.\n return n < 10 ? '0' + n : n;\n }\n\n return this.getUTCFullYear() + '-' +\n f(this.getUTCMonth() + 1) + '-' +\n f(this.getUTCDate()) + 'T' +\n f(this.getUTCHours()) + ':' +\n f(this.getUTCMinutes()) + ':' +\n f(this.getUTCSeconds()) + 'Z';\n };\n\n You can provide an optional replacer method. It will be passed the\n key and value of each member, with this bound to the containing\n object. The value that is returned from your method will be\n serialized. If your method returns undefined, then the member will\n be excluded from the serialization.\n\n If the replacer parameter is an array of strings, then it will be\n used to select the members to be serialized. It filters the results\n such that only members with keys listed in the replacer array are\n stringified.\n\n Values that do not have JSON representations, such as undefined or\n functions, will not be serialized. Such values in objects will be\n dropped; in arrays they will be replaced with null. You can use\n a replacer function to replace those with JSON values.\n JSON.stringify(undefined) returns undefined.\n\n The optional space parameter produces a stringification of the\n value that is filled with line breaks and indentation to make it\n easier to read.\n\n If the space parameter is a non-empty string, then that string will\n be used for indentation. If the space parameter is a number, then\n the indentation will be that many spaces.\n\n Example:\n\n text = JSON.stringify(['e', {pluribus: 'unum'}]);\n // text is '[\"e\",{\"pluribus\":\"unum\"}]'\n\n\n text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\\t');\n // text is '[\\n\\t\"e\",\\n\\t{\\n\\t\\t\"pluribus\": \"unum\"\\n\\t}\\n]'\n\n text = JSON.stringify([new Date()], function (key, value) {\n return this[key] instanceof Date ?\n 'Date(' + this[key] + ')' : value;\n });\n // text is '[\"Date(---current time---)\"]'\n\n\n JSON.parse(text, reviver)\n This method parses a JSON text to produce an object or array.\n It can throw a SyntaxError exception.\n\n The optional reviver parameter is a function that can filter and\n transform the results. It receives each of the keys and values,\n and its return value is used instead of the original value.\n If it returns what it received, then the structure is not modified.\n If it returns undefined then the member is deleted.\n\n Example:\n\n // Parse the text. Values that look like ISO date strings will\n // be converted to Date objects.\n\n myData = JSON.parse(text, function (key, value) {\n var a;\n if (typeof value === 'string') {\n a =\n/^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2}(?:\\.\\d*)?)Z$/.exec(value);\n if (a) {\n return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],\n +a[5], +a[6]));\n }\n }\n return value;\n });\n\n myData = JSON.parse('[\"Date(09/09/2001)\"]', function (key, value) {\n var d;\n if (typeof value === 'string' &&\n value.slice(0, 5) === 'Date(' &&\n value.slice(-1) === ')') {\n d = new Date(value.slice(5, -1));\n if (d) {\n return d;\n }\n }\n return value;\n });\n\n\n This is a reference implementation. You are free to copy, modify, or\n redistribute.\n*/\n\n/*jslint evil: true, regexp: true */\n\n/*members \"\", \"\\b\", \"\\t\", \"\\n\", \"\\f\", \"\\r\", \"\\\"\", JSON, \"\\\\\", apply,\n call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,\n getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,\n lastIndex, length, parse, prototype, push, replace, slice, stringify,\n test, toJSON, toString, valueOf\n*/\n\n\n// Create a JSON object only if one does not already exist. We create the\n// methods in a closure to avoid creating global variables.\n\nvar JSON = module.exports;\n\n(function () {\n 'use strict';\n\n function f(n) {\n // Format integers to have at least two digits.\n return n < 10 ? '0' + n : n;\n }\n\n var cx = /[\\u0000\\u00ad\\u0600-\\u0604\\u070f\\u17b4\\u17b5\\u200c-\\u200f\\u2028-\\u202f\\u2060-\\u206f\\ufeff\\ufff0-\\uffff]/g,\n escapable = /[\\\\\\\"\\x00-\\x1f\\x7f-\\x9f\\u00ad\\u0600-\\u0604\\u070f\\u17b4\\u17b5\\u200c-\\u200f\\u2028-\\u202f\\u2060-\\u206f\\ufeff\\ufff0-\\uffff]/g,\n gap,\n indent,\n meta = { // table of character substitutions\n '\\b': '\\\\b',\n '\\t': '\\\\t',\n '\\n': '\\\\n',\n '\\f': '\\\\f',\n '\\r': '\\\\r',\n '\"' : '\\\\\"',\n '\\\\': '\\\\\\\\'\n },\n rep;\n\n\n function quote(string) {\n\n// If the string contains no control characters, no quote characters, and no\n// backslash characters, then we can safely slap some quotes around it.\n// Otherwise we must also replace the offending characters with safe escape\n// sequences.\n\n escapable.lastIndex = 0;\n return escapable.test(string) ? '\"' + string.replace(escapable, function (a) {\n var c = meta[a];\n return typeof c === 'string'\n ? c\n : '\\\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);\n }) + '\"' : '\"' + string + '\"';\n }\n\n\n function str(key, holder) {\n\n// Produce a string from holder[key].\n\n var i, // The loop counter.\n k, // The member key.\n v, // The member value.\n length,\n mind = gap,\n partial,\n value = holder[key],\n isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value));\n\n// If the value has a toJSON method, call it to obtain a replacement value.\n\n if (value && typeof value === 'object' &&\n typeof value.toJSON === 'function') {\n value = value.toJSON(key);\n }\n\n// If we were called with a replacer function, then call the replacer to\n// obtain a replacement value.\n\n if (typeof rep === 'function') {\n value = rep.call(holder, key, value);\n }\n\n// What happens next depends on the value's type.\n\n switch (typeof value) {\n case 'string':\n if (isBigNumber) {\n return value;\n } else {\n return quote(value);\n }\n\n case 'number':\n\n// JSON numbers must be finite. Encode non-finite numbers as null.\n\n return isFinite(value) ? String(value) : 'null';\n\n case 'boolean':\n case 'null':\n case 'bigint':\n\n// If the value is a boolean or null, convert it to a string. Note:\n// typeof null does not produce 'null'. The case is included here in\n// the remote chance that this gets fixed someday.\n\n return String(value);\n\n// If the type is 'object', we might be dealing with an object or an array or\n// null.\n\n case 'object':\n\n// Due to a specification blunder in ECMAScript, typeof null is 'object',\n// so watch out for that case.\n\n if (!value) {\n return 'null';\n }\n\n// Make an array to hold the partial results of stringifying this object value.\n\n gap += indent;\n partial = [];\n\n// Is the value an array?\n\n if (Object.prototype.toString.apply(value) === '[object Array]') {\n\n// The value is an array. Stringify every element. Use null as a placeholder\n// for non-JSON values.\n\n length = value.length;\n for (i = 0; i < length; i += 1) {\n partial[i] = str(i, value) || 'null';\n }\n\n// Join all of the elements together, separated with commas, and wrap them in\n// brackets.\n\n v = partial.length === 0\n ? '[]'\n : gap\n ? '[\\n' + gap + partial.join(',\\n' + gap) + '\\n' + mind + ']'\n : '[' + partial.join(',') + ']';\n gap = mind;\n return v;\n }\n\n// If the replacer is an array, use it to select the members to be stringified.\n\n if (rep && typeof rep === 'object') {\n length = rep.length;\n for (i = 0; i < length; i += 1) {\n if (typeof rep[i] === 'string') {\n k = rep[i];\n v = str(k, value);\n if (v) {\n partial.push(quote(k) + (gap ? ': ' : ':') + v);\n }\n }\n }\n } else {\n\n// Otherwise, iterate through all of the keys in the object.\n\n Object.keys(value).forEach(function(k) {\n var v = str(k, value);\n if (v) {\n partial.push(quote(k) + (gap ? ': ' : ':') + v);\n }\n });\n }\n\n// Join all of the member texts together, separated with commas,\n// and wrap them in braces.\n\n v = partial.length === 0\n ? '{}'\n : gap\n ? '{\\n' + gap + partial.join(',\\n' + gap) + '\\n' + mind + '}'\n : '{' + partial.join(',') + '}';\n gap = mind;\n return v;\n }\n }\n\n// If the JSON object does not yet have a stringify method, give it one.\n\n if (typeof JSON.stringify !== 'function') {\n JSON.stringify = function (value, replacer, space) {\n\n// The stringify method takes a value and an optional replacer, and an optional\n// space parameter, and returns a JSON text. The replacer can be a function\n// that can replace values, or an array of strings that will select the keys.\n// A default replacer method can be provided. Use of the space parameter can\n// produce text that is more easily readable.\n\n var i;\n gap = '';\n indent = '';\n\n// If the space parameter is a number, make an indent string containing that\n// many spaces.\n\n if (typeof space === 'number') {\n for (i = 0; i < space; i += 1) {\n indent += ' ';\n }\n\n// If the space parameter is a string, it will be used as the indent string.\n\n } else if (typeof space === 'string') {\n indent = space;\n }\n\n// If there is a replacer, it must be a function or an array.\n// Otherwise, throw an error.\n\n rep = replacer;\n if (replacer && typeof replacer !== 'function' &&\n (typeof replacer !== 'object' ||\n typeof replacer.length !== 'number')) {\n throw new Error('JSON.stringify');\n }\n\n// Make a fake root object containing our value under the key of ''.\n// Return the result of stringifying the value.\n\n return str('', {'': value});\n };\n }\n}());\n","var bufferEqual = require('buffer-equal-constant-time');\nvar Buffer = require('safe-buffer').Buffer;\nvar crypto = require('crypto');\nvar formatEcdsa = require('ecdsa-sig-formatter');\nvar util = require('util');\n\nvar MSG_INVALID_ALGORITHM = '\"%s\" is not a valid algorithm.\\n Supported algorithms are:\\n \"HS256\", \"HS384\", \"HS512\", \"RS256\", \"RS384\", \"RS512\", \"PS256\", \"PS384\", \"PS512\", \"ES256\", \"ES384\", \"ES512\" and \"none\".'\nvar MSG_INVALID_SECRET = 'secret must be a string or buffer';\nvar MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer';\nvar MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object';\n\nvar supportsKeyObjects = typeof crypto.createPublicKey === 'function';\nif (supportsKeyObjects) {\n MSG_INVALID_VERIFIER_KEY += ' or a KeyObject';\n MSG_INVALID_SECRET += 'or a KeyObject';\n}\n\nfunction checkIsPublicKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return;\n }\n\n if (!supportsKeyObjects) {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key !== 'object') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.type !== 'string') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.asymmetricKeyType !== 'string') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n\n if (typeof key.export !== 'function') {\n throw typeError(MSG_INVALID_VERIFIER_KEY);\n }\n};\n\nfunction checkIsPrivateKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return;\n }\n\n if (typeof key === 'object') {\n return;\n }\n\n throw typeError(MSG_INVALID_SIGNER_KEY);\n};\n\nfunction checkIsSecretKey(key) {\n if (Buffer.isBuffer(key)) {\n return;\n }\n\n if (typeof key === 'string') {\n return key;\n }\n\n if (!supportsKeyObjects) {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (typeof key !== 'object') {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (key.type !== 'secret') {\n throw typeError(MSG_INVALID_SECRET);\n }\n\n if (typeof key.export !== 'function') {\n throw typeError(MSG_INVALID_SECRET);\n }\n}\n\nfunction fromBase64(base64) {\n return base64\n .replace(/=/g, '')\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n}\n\nfunction toBase64(base64url) {\n base64url = base64url.toString();\n\n var padding = 4 - base64url.length % 4;\n if (padding !== 4) {\n for (var i = 0; i < padding; ++i) {\n base64url += '=';\n }\n }\n\n return base64url\n .replace(/\\-/g, '+')\n .replace(/_/g, '/');\n}\n\nfunction typeError(template) {\n var args = [].slice.call(arguments, 1);\n var errMsg = util.format.bind(util, template).apply(null, args);\n return new TypeError(errMsg);\n}\n\nfunction bufferOrString(obj) {\n return Buffer.isBuffer(obj) || typeof obj === 'string';\n}\n\nfunction normalizeInput(thing) {\n if (!bufferOrString(thing))\n thing = JSON.stringify(thing);\n return thing;\n}\n\nfunction createHmacSigner(bits) {\n return function sign(thing, secret) {\n checkIsSecretKey(secret);\n thing = normalizeInput(thing);\n var hmac = crypto.createHmac('sha' + bits, secret);\n var sig = (hmac.update(thing), hmac.digest('base64'))\n return fromBase64(sig);\n }\n}\n\nfunction createHmacVerifier(bits) {\n return function verify(thing, signature, secret) {\n var computedSig = createHmacSigner(bits)(thing, secret);\n return bufferEqual(Buffer.from(signature), Buffer.from(computedSig));\n }\n}\n\nfunction createKeySigner(bits) {\n return function sign(thing, privateKey) {\n checkIsPrivateKey(privateKey);\n thing = normalizeInput(thing);\n // Even though we are specifying \"RSA\" here, this works with ECDSA\n // keys as well.\n var signer = crypto.createSign('RSA-SHA' + bits);\n var sig = (signer.update(thing), signer.sign(privateKey, 'base64'));\n return fromBase64(sig);\n }\n}\n\nfunction createKeyVerifier(bits) {\n return function verify(thing, signature, publicKey) {\n checkIsPublicKey(publicKey);\n thing = normalizeInput(thing);\n signature = toBase64(signature);\n var verifier = crypto.createVerify('RSA-SHA' + bits);\n verifier.update(thing);\n return verifier.verify(publicKey, signature, 'base64');\n }\n}\n\nfunction createPSSKeySigner(bits) {\n return function sign(thing, privateKey) {\n checkIsPrivateKey(privateKey);\n thing = normalizeInput(thing);\n var signer = crypto.createSign('RSA-SHA' + bits);\n var sig = (signer.update(thing), signer.sign({\n key: privateKey,\n padding: crypto.constants.RSA_PKCS1_PSS_PADDING,\n saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST\n }, 'base64'));\n return fromBase64(sig);\n }\n}\n\nfunction createPSSKeyVerifier(bits) {\n return function verify(thing, signature, publicKey) {\n checkIsPublicKey(publicKey);\n thing = normalizeInput(thing);\n signature = toBase64(signature);\n var verifier = crypto.createVerify('RSA-SHA' + bits);\n verifier.update(thing);\n return verifier.verify({\n key: publicKey,\n padding: crypto.constants.RSA_PKCS1_PSS_PADDING,\n saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST\n }, signature, 'base64');\n }\n}\n\nfunction createECDSASigner(bits) {\n var inner = createKeySigner(bits);\n return function sign() {\n var signature = inner.apply(null, arguments);\n signature = formatEcdsa.derToJose(signature, 'ES' + bits);\n return signature;\n };\n}\n\nfunction createECDSAVerifer(bits) {\n var inner = createKeyVerifier(bits);\n return function verify(thing, signature, publicKey) {\n signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64');\n var result = inner(thing, signature, publicKey);\n return result;\n };\n}\n\nfunction createNoneSigner() {\n return function sign() {\n return '';\n }\n}\n\nfunction createNoneVerifier() {\n return function verify(thing, signature) {\n return signature === '';\n }\n}\n\nmodule.exports = function jwa(algorithm) {\n var signerFactories = {\n hs: createHmacSigner,\n rs: createKeySigner,\n ps: createPSSKeySigner,\n es: createECDSASigner,\n none: createNoneSigner,\n }\n var verifierFactories = {\n hs: createHmacVerifier,\n rs: createKeyVerifier,\n ps: createPSSKeyVerifier,\n es: createECDSAVerifer,\n none: createNoneVerifier,\n }\n var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/);\n if (!match)\n throw typeError(MSG_INVALID_ALGORITHM, algorithm);\n var algo = (match[1] || match[3]).toLowerCase();\n var bits = match[2];\n\n return {\n sign: signerFactories[algo](bits),\n verify: verifierFactories[algo](bits),\n }\n};\n","/*global exports*/\nvar SignStream = require('./lib/sign-stream');\nvar VerifyStream = require('./lib/verify-stream');\n\nvar ALGORITHMS = [\n 'HS256', 'HS384', 'HS512',\n 'RS256', 'RS384', 'RS512',\n 'PS256', 'PS384', 'PS512',\n 'ES256', 'ES384', 'ES512'\n];\n\nexports.ALGORITHMS = ALGORITHMS;\nexports.sign = SignStream.sign;\nexports.verify = VerifyStream.verify;\nexports.decode = VerifyStream.decode;\nexports.isValid = VerifyStream.isValid;\nexports.createSign = function createSign(opts) {\n return new SignStream(opts);\n};\nexports.createVerify = function createVerify(opts) {\n return new VerifyStream(opts);\n};\n","/*global module, process*/\nvar Buffer = require('safe-buffer').Buffer;\nvar Stream = require('stream');\nvar util = require('util');\n\nfunction DataStream(data) {\n this.buffer = null;\n this.writable = true;\n this.readable = true;\n\n // No input\n if (!data) {\n this.buffer = Buffer.alloc(0);\n return this;\n }\n\n // Stream\n if (typeof data.pipe === 'function') {\n this.buffer = Buffer.alloc(0);\n data.pipe(this);\n return this;\n }\n\n // Buffer or String\n // or Object (assumedly a passworded key)\n if (data.length || typeof data === 'object') {\n this.buffer = data;\n this.writable = false;\n process.nextTick(function () {\n this.emit('end', data);\n this.readable = false;\n this.emit('close');\n }.bind(this));\n return this;\n }\n\n throw new TypeError('Unexpected data type ('+ typeof data + ')');\n}\nutil.inherits(DataStream, Stream);\n\nDataStream.prototype.write = function write(data) {\n this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]);\n this.emit('data', data);\n};\n\nDataStream.prototype.end = function end(data) {\n if (data)\n this.write(data);\n this.emit('end', data);\n this.emit('close');\n this.writable = false;\n this.readable = false;\n};\n\nmodule.exports = DataStream;\n","/*global module*/\nvar Buffer = require('safe-buffer').Buffer;\nvar DataStream = require('./data-stream');\nvar jwa = require('jwa');\nvar Stream = require('stream');\nvar toString = require('./tostring');\nvar util = require('util');\n\nfunction base64url(string, encoding) {\n return Buffer\n .from(string, encoding)\n .toString('base64')\n .replace(/=/g, '')\n .replace(/\\+/g, '-')\n .replace(/\\//g, '_');\n}\n\nfunction jwsSecuredInput(header, payload, encoding) {\n encoding = encoding || 'utf8';\n var encodedHeader = base64url(toString(header), 'binary');\n var encodedPayload = base64url(toString(payload), encoding);\n return util.format('%s.%s', encodedHeader, encodedPayload);\n}\n\nfunction jwsSign(opts) {\n var header = opts.header;\n var payload = opts.payload;\n var secretOrKey = opts.secret || opts.privateKey;\n var encoding = opts.encoding;\n var algo = jwa(header.alg);\n var securedInput = jwsSecuredInput(header, payload, encoding);\n var signature = algo.sign(securedInput, secretOrKey);\n return util.format('%s.%s', securedInput, signature);\n}\n\nfunction SignStream(opts) {\n var secret = opts.secret||opts.privateKey||opts.key;\n var secretStream = new DataStream(secret);\n this.readable = true;\n this.header = opts.header;\n this.encoding = opts.encoding;\n this.secret = this.privateKey = this.key = secretStream;\n this.payload = new DataStream(opts.payload);\n this.secret.once('close', function () {\n if (!this.payload.writable && this.readable)\n this.sign();\n }.bind(this));\n\n this.payload.once('close', function () {\n if (!this.secret.writable && this.readable)\n this.sign();\n }.bind(this));\n}\nutil.inherits(SignStream, Stream);\n\nSignStream.prototype.sign = function sign() {\n try {\n var signature = jwsSign({\n header: this.header,\n payload: this.payload.buffer,\n secret: this.secret.buffer,\n encoding: this.encoding\n });\n this.emit('done', signature);\n this.emit('data', signature);\n this.emit('end');\n this.readable = false;\n return signature;\n } catch (e) {\n this.readable = false;\n this.emit('error', e);\n this.emit('close');\n }\n};\n\nSignStream.sign = jwsSign;\n\nmodule.exports = SignStream;\n","/*global module*/\nvar Buffer = require('buffer').Buffer;\n\nmodule.exports = function toString(obj) {\n if (typeof obj === 'string')\n return obj;\n if (typeof obj === 'number' || Buffer.isBuffer(obj))\n return obj.toString();\n return JSON.stringify(obj);\n};\n","/*global module*/\nvar Buffer = require('safe-buffer').Buffer;\nvar DataStream = require('./data-stream');\nvar jwa = require('jwa');\nvar Stream = require('stream');\nvar toString = require('./tostring');\nvar util = require('util');\nvar JWS_REGEX = /^[a-zA-Z0-9\\-_]+?\\.[a-zA-Z0-9\\-_]+?\\.([a-zA-Z0-9\\-_]+)?$/;\n\nfunction isObject(thing) {\n return Object.prototype.toString.call(thing) === '[object Object]';\n}\n\nfunction safeJsonParse(thing) {\n if (isObject(thing))\n return thing;\n try { return JSON.parse(thing); }\n catch (e) { return undefined; }\n}\n\nfunction headerFromJWS(jwsSig) {\n var encodedHeader = jwsSig.split('.', 1)[0];\n return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary'));\n}\n\nfunction securedInputFromJWS(jwsSig) {\n return jwsSig.split('.', 2).join('.');\n}\n\nfunction signatureFromJWS(jwsSig) {\n return jwsSig.split('.')[2];\n}\n\nfunction payloadFromJWS(jwsSig, encoding) {\n encoding = encoding || 'utf8';\n var payload = jwsSig.split('.')[1];\n return Buffer.from(payload, 'base64').toString(encoding);\n}\n\nfunction isValidJws(string) {\n return JWS_REGEX.test(string) && !!headerFromJWS(string);\n}\n\nfunction jwsVerify(jwsSig, algorithm, secretOrKey) {\n if (!algorithm) {\n var err = new Error(\"Missing algorithm parameter for jws.verify\");\n err.code = \"MISSING_ALGORITHM\";\n throw err;\n }\n jwsSig = toString(jwsSig);\n var signature = signatureFromJWS(jwsSig);\n var securedInput = securedInputFromJWS(jwsSig);\n var algo = jwa(algorithm);\n return algo.verify(securedInput, signature, secretOrKey);\n}\n\nfunction jwsDecode(jwsSig, opts) {\n opts = opts || {};\n jwsSig = toString(jwsSig);\n\n if (!isValidJws(jwsSig))\n return null;\n\n var header = headerFromJWS(jwsSig);\n\n if (!header)\n return null;\n\n var payload = payloadFromJWS(jwsSig);\n if (header.typ === 'JWT' || opts.json)\n payload = JSON.parse(payload, opts.encoding);\n\n return {\n header: header,\n payload: payload,\n signature: signatureFromJWS(jwsSig)\n };\n}\n\nfunction VerifyStream(opts) {\n opts = opts || {};\n var secretOrKey = opts.secret||opts.publicKey||opts.key;\n var secretStream = new DataStream(secretOrKey);\n this.readable = true;\n this.algorithm = opts.algorithm;\n this.encoding = opts.encoding;\n this.secret = this.publicKey = this.key = secretStream;\n this.signature = new DataStream(opts.signature);\n this.secret.once('close', function () {\n if (!this.signature.writable && this.readable)\n this.verify();\n }.bind(this));\n\n this.signature.once('close', function () {\n if (!this.secret.writable && this.readable)\n this.verify();\n }.bind(this));\n}\nutil.inherits(VerifyStream, Stream);\nVerifyStream.prototype.verify = function verify() {\n try {\n var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer);\n var obj = jwsDecode(this.signature.buffer, this.encoding);\n this.emit('done', valid, obj);\n this.emit('data', valid);\n this.emit('end');\n this.readable = false;\n return valid;\n } catch (e) {\n this.readable = false;\n this.emit('error', e);\n this.emit('close');\n }\n};\n\nVerifyStream.decode = jwsDecode;\nVerifyStream.isValid = isValidJws;\nVerifyStream.verify = jwsVerify;\n\nmodule.exports = VerifyStream;\n","/*!\n * mime-db\n * Copyright(c) 2014 Jonathan Ong\n * Copyright(c) 2015-2022 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n/**\n * Module exports.\n */\n\nmodule.exports = require('./db.json')\n","/*!\n * mime-types\n * Copyright(c) 2014 Jonathan Ong\n * Copyright(c) 2015 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n'use strict'\n\n/**\n * Module dependencies.\n * @private\n */\n\nvar db = require('mime-db')\nvar extname = require('path').extname\n\n/**\n * Module variables.\n * @private\n */\n\nvar EXTRACT_TYPE_REGEXP = /^\\s*([^;\\s]*)(?:;|\\s|$)/\nvar TEXT_TYPE_REGEXP = /^text\\//i\n\n/**\n * Module exports.\n * @public\n */\n\nexports.charset = charset\nexports.charsets = { lookup: charset }\nexports.contentType = contentType\nexports.extension = extension\nexports.extensions = Object.create(null)\nexports.lookup = lookup\nexports.types = Object.create(null)\n\n// Populate the extensions/types maps\npopulateMaps(exports.extensions, exports.types)\n\n/**\n * Get the default charset for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction charset (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n var mime = match && db[match[1].toLowerCase()]\n\n if (mime && mime.charset) {\n return mime.charset\n }\n\n // default text/* to utf-8\n if (match && TEXT_TYPE_REGEXP.test(match[1])) {\n return 'UTF-8'\n }\n\n return false\n}\n\n/**\n * Create a full Content-Type header given a MIME type or extension.\n *\n * @param {string} str\n * @return {boolean|string}\n */\n\nfunction contentType (str) {\n // TODO: should this even be in this module?\n if (!str || typeof str !== 'string') {\n return false\n }\n\n var mime = str.indexOf('/') === -1\n ? exports.lookup(str)\n : str\n\n if (!mime) {\n return false\n }\n\n // TODO: use content-type or other module\n if (mime.indexOf('charset') === -1) {\n var charset = exports.charset(mime)\n if (charset) mime += '; charset=' + charset.toLowerCase()\n }\n\n return mime\n}\n\n/**\n * Get the default extension for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction extension (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n\n // get extensions\n var exts = match && exports.extensions[match[1].toLowerCase()]\n\n if (!exts || !exts.length) {\n return false\n }\n\n return exts[0]\n}\n\n/**\n * Lookup the MIME type for a file path/extension.\n *\n * @param {string} path\n * @return {boolean|string}\n */\n\nfunction lookup (path) {\n if (!path || typeof path !== 'string') {\n return false\n }\n\n // get the extension (\"ext\" or \".ext\" or full path)\n var extension = extname('x.' + path)\n .toLowerCase()\n .substr(1)\n\n if (!extension) {\n return false\n }\n\n return exports.types[extension] || false\n}\n\n/**\n * Populate the extensions and types maps.\n * @private\n */\n\nfunction populateMaps (extensions, types) {\n // source preference (least -> most)\n var preference = ['nginx', 'apache', undefined, 'iana']\n\n Object.keys(db).forEach(function forEachMimeType (type) {\n var mime = db[type]\n var exts = mime.extensions\n\n if (!exts || !exts.length) {\n return\n }\n\n // mime -> extensions\n extensions[type] = exts\n\n // extension -> mime\n for (var i = 0; i < exts.length; i++) {\n var extension = exts[i]\n\n if (types[extension]) {\n var from = preference.indexOf(db[types[extension]].source)\n var to = preference.indexOf(mime.source)\n\n if (types[extension] !== 'application/octet-stream' &&\n (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) {\n // skip the remapping\n continue\n }\n }\n\n // set the extension -> mime\n types[extension] = type\n }\n })\n}\n","'use strict';\n\n/**\n * @param typeMap [Object] Map of MIME type -> Array[extensions]\n * @param ...\n */\nfunction Mime() {\n this._types = Object.create(null);\n this._extensions = Object.create(null);\n\n for (let i = 0; i < arguments.length; i++) {\n this.define(arguments[i]);\n }\n\n this.define = this.define.bind(this);\n this.getType = this.getType.bind(this);\n this.getExtension = this.getExtension.bind(this);\n}\n\n/**\n * Define mimetype -> extension mappings. Each key is a mime-type that maps\n * to an array of extensions associated with the type. The first extension is\n * used as the default extension for the type.\n *\n * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']});\n *\n * If a type declares an extension that has already been defined, an error will\n * be thrown. To suppress this error and force the extension to be associated\n * with the new type, pass `force`=true. Alternatively, you may prefix the\n * extension with \"*\" to map the type to extension, without mapping the\n * extension to the type.\n *\n * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']});\n *\n *\n * @param map (Object) type definitions\n * @param force (Boolean) if true, force overriding of existing definitions\n */\nMime.prototype.define = function(typeMap, force) {\n for (let type in typeMap) {\n let extensions = typeMap[type].map(function(t) {\n return t.toLowerCase();\n });\n type = type.toLowerCase();\n\n for (let i = 0; i < extensions.length; i++) {\n const ext = extensions[i];\n\n // '*' prefix = not the preferred type for this extension. So fixup the\n // extension, and skip it.\n if (ext[0] === '*') {\n continue;\n }\n\n if (!force && (ext in this._types)) {\n throw new Error(\n 'Attempt to change mapping for \"' + ext +\n '\" extension from \"' + this._types[ext] + '\" to \"' + type +\n '\". Pass `force=true` to allow this, otherwise remove \"' + ext +\n '\" from the list of extensions for \"' + type + '\".'\n );\n }\n\n this._types[ext] = type;\n }\n\n // Use first extension as default\n if (force || !this._extensions[type]) {\n const ext = extensions[0];\n this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1);\n }\n }\n};\n\n/**\n * Lookup a mime type based on extension\n */\nMime.prototype.getType = function(path) {\n path = String(path);\n let last = path.replace(/^.*[/\\\\]/, '').toLowerCase();\n let ext = last.replace(/^.*\\./, '').toLowerCase();\n\n let hasPath = last.length < path.length;\n let hasDot = ext.length < last.length - 1;\n\n return (hasDot || !hasPath) && this._types[ext] || null;\n};\n\n/**\n * Return file extension associated with a mime type\n */\nMime.prototype.getExtension = function(type) {\n type = /^\\s*([^;\\s]*)/.test(type) && RegExp.$1;\n return type && this._extensions[type.toLowerCase()] || null;\n};\n\nmodule.exports = Mime;\n","'use strict';\n\nlet Mime = require('./Mime');\nmodule.exports = new Mime(require('./types/standard'), require('./types/other'));\n","module.exports = {\"application/prs.cww\":[\"cww\"],\"application/vnd.1000minds.decision-model+xml\":[\"1km\"],\"application/vnd.3gpp.pic-bw-large\":[\"plb\"],\"application/vnd.3gpp.pic-bw-small\":[\"psb\"],\"application/vnd.3gpp.pic-bw-var\":[\"pvb\"],\"application/vnd.3gpp2.tcap\":[\"tcap\"],\"application/vnd.3m.post-it-notes\":[\"pwn\"],\"application/vnd.accpac.simply.aso\":[\"aso\"],\"application/vnd.accpac.simply.imp\":[\"imp\"],\"application/vnd.acucobol\":[\"acu\"],\"application/vnd.acucorp\":[\"atc\",\"acutc\"],\"application/vnd.adobe.air-application-installer-package+zip\":[\"air\"],\"application/vnd.adobe.formscentral.fcdt\":[\"fcdt\"],\"application/vnd.adobe.fxp\":[\"fxp\",\"fxpl\"],\"application/vnd.adobe.xdp+xml\":[\"xdp\"],\"application/vnd.adobe.xfdf\":[\"xfdf\"],\"application/vnd.ahead.space\":[\"ahead\"],\"application/vnd.airzip.filesecure.azf\":[\"azf\"],\"application/vnd.airzip.filesecure.azs\":[\"azs\"],\"application/vnd.amazon.ebook\":[\"azw\"],\"application/vnd.americandynamics.acc\":[\"acc\"],\"application/vnd.amiga.ami\":[\"ami\"],\"application/vnd.android.package-archive\":[\"apk\"],\"application/vnd.anser-web-certificate-issue-initiation\":[\"cii\"],\"application/vnd.anser-web-funds-transfer-initiation\":[\"fti\"],\"application/vnd.antix.game-component\":[\"atx\"],\"application/vnd.apple.installer+xml\":[\"mpkg\"],\"application/vnd.apple.keynote\":[\"key\"],\"application/vnd.apple.mpegurl\":[\"m3u8\"],\"application/vnd.apple.numbers\":[\"numbers\"],\"application/vnd.apple.pages\":[\"pages\"],\"application/vnd.apple.pkpass\":[\"pkpass\"],\"application/vnd.aristanetworks.swi\":[\"swi\"],\"application/vnd.astraea-software.iota\":[\"iota\"],\"application/vnd.audiograph\":[\"aep\"],\"application/vnd.balsamiq.bmml+xml\":[\"bmml\"],\"application/vnd.blueice.multipass\":[\"mpm\"],\"application/vnd.bmi\":[\"bmi\"],\"application/vnd.businessobjects\":[\"rep\"],\"application/vnd.chemdraw+xml\":[\"cdxml\"],\"application/vnd.chipnuts.karaoke-mmd\":[\"mmd\"],\"application/vnd.cinderella\":[\"cdy\"],\"application/vnd.citationstyles.style+xml\":[\"csl\"],\"application/vnd.claymore\":[\"cla\"],\"application/vnd.cloanto.rp9\":[\"rp9\"],\"application/vnd.clonk.c4group\":[\"c4g\",\"c4d\",\"c4f\",\"c4p\",\"c4u\"],\"application/vnd.cluetrust.cartomobile-config\":[\"c11amc\"],\"application/vnd.cluetrust.cartomobile-config-pkg\":[\"c11amz\"],\"application/vnd.commonspace\":[\"csp\"],\"application/vnd.contact.cmsg\":[\"cdbcmsg\"],\"application/vnd.cosmocaller\":[\"cmc\"],\"application/vnd.crick.clicker\":[\"clkx\"],\"application/vnd.crick.clicker.keyboard\":[\"clkk\"],\"application/vnd.crick.clicker.palette\":[\"clkp\"],\"application/vnd.crick.clicker.template\":[\"clkt\"],\"application/vnd.crick.clicker.wordbank\":[\"clkw\"],\"application/vnd.criticaltools.wbs+xml\":[\"wbs\"],\"application/vnd.ctc-posml\":[\"pml\"],\"application/vnd.cups-ppd\":[\"ppd\"],\"application/vnd.curl.car\":[\"car\"],\"application/vnd.curl.pcurl\":[\"pcurl\"],\"application/vnd.dart\":[\"dart\"],\"application/vnd.data-vision.rdz\":[\"rdz\"],\"application/vnd.dbf\":[\"dbf\"],\"application/vnd.dece.data\":[\"uvf\",\"uvvf\",\"uvd\",\"uvvd\"],\"application/vnd.dece.ttml+xml\":[\"uvt\",\"uvvt\"],\"application/vnd.dece.unspecified\":[\"uvx\",\"uvvx\"],\"application/vnd.dece.zip\":[\"uvz\",\"uvvz\"],\"application/vnd.denovo.fcselayout-link\":[\"fe_launch\"],\"application/vnd.dna\":[\"dna\"],\"application/vnd.dolby.mlp\":[\"mlp\"],\"application/vnd.dpgraph\":[\"dpg\"],\"application/vnd.dreamfactory\":[\"dfac\"],\"application/vnd.ds-keypoint\":[\"kpxx\"],\"application/vnd.dvb.ait\":[\"ait\"],\"application/vnd.dvb.service\":[\"svc\"],\"application/vnd.dynageo\":[\"geo\"],\"application/vnd.ecowin.chart\":[\"mag\"],\"application/vnd.enliven\":[\"nml\"],\"application/vnd.epson.esf\":[\"esf\"],\"application/vnd.epson.msf\":[\"msf\"],\"application/vnd.epson.quickanime\":[\"qam\"],\"application/vnd.epson.salt\":[\"slt\"],\"application/vnd.epson.ssf\":[\"ssf\"],\"application/vnd.eszigno3+xml\":[\"es3\",\"et3\"],\"application/vnd.ezpix-album\":[\"ez2\"],\"application/vnd.ezpix-package\":[\"ez3\"],\"application/vnd.fdf\":[\"fdf\"],\"application/vnd.fdsn.mseed\":[\"mseed\"],\"application/vnd.fdsn.seed\":[\"seed\",\"dataless\"],\"application/vnd.flographit\":[\"gph\"],\"application/vnd.fluxtime.clip\":[\"ftc\"],\"application/vnd.framemaker\":[\"fm\",\"frame\",\"maker\",\"book\"],\"application/vnd.frogans.fnc\":[\"fnc\"],\"application/vnd.frogans.ltf\":[\"ltf\"],\"application/vnd.fsc.weblaunch\":[\"fsc\"],\"application/vnd.fujitsu.oasys\":[\"oas\"],\"application/vnd.fujitsu.oasys2\":[\"oa2\"],\"application/vnd.fujitsu.oasys3\":[\"oa3\"],\"application/vnd.fujitsu.oasysgp\":[\"fg5\"],\"application/vnd.fujitsu.oasysprs\":[\"bh2\"],\"application/vnd.fujixerox.ddd\":[\"ddd\"],\"application/vnd.fujixerox.docuworks\":[\"xdw\"],\"application/vnd.fujixerox.docuworks.binder\":[\"xbd\"],\"application/vnd.fuzzysheet\":[\"fzs\"],\"application/vnd.genomatix.tuxedo\":[\"txd\"],\"application/vnd.geogebra.file\":[\"ggb\"],\"application/vnd.geogebra.tool\":[\"ggt\"],\"application/vnd.geometry-explorer\":[\"gex\",\"gre\"],\"application/vnd.geonext\":[\"gxt\"],\"application/vnd.geoplan\":[\"g2w\"],\"application/vnd.geospace\":[\"g3w\"],\"application/vnd.gmx\":[\"gmx\"],\"application/vnd.google-apps.document\":[\"gdoc\"],\"application/vnd.google-apps.presentation\":[\"gslides\"],\"application/vnd.google-apps.spreadsheet\":[\"gsheet\"],\"application/vnd.google-earth.kml+xml\":[\"kml\"],\"application/vnd.google-earth.kmz\":[\"kmz\"],\"application/vnd.grafeq\":[\"gqf\",\"gqs\"],\"application/vnd.groove-account\":[\"gac\"],\"application/vnd.groove-help\":[\"ghf\"],\"application/vnd.groove-identity-message\":[\"gim\"],\"application/vnd.groove-injector\":[\"grv\"],\"application/vnd.groove-tool-message\":[\"gtm\"],\"application/vnd.groove-tool-template\":[\"tpl\"],\"application/vnd.groove-vcard\":[\"vcg\"],\"application/vnd.hal+xml\":[\"hal\"],\"application/vnd.handheld-entertainment+xml\":[\"zmm\"],\"application/vnd.hbci\":[\"hbci\"],\"application/vnd.hhe.lesson-player\":[\"les\"],\"application/vnd.hp-hpgl\":[\"hpgl\"],\"application/vnd.hp-hpid\":[\"hpid\"],\"application/vnd.hp-hps\":[\"hps\"],\"application/vnd.hp-jlyt\":[\"jlt\"],\"application/vnd.hp-pcl\":[\"pcl\"],\"application/vnd.hp-pclxl\":[\"pclxl\"],\"application/vnd.hydrostatix.sof-data\":[\"sfd-hdstx\"],\"application/vnd.ibm.minipay\":[\"mpy\"],\"application/vnd.ibm.modcap\":[\"afp\",\"listafp\",\"list3820\"],\"application/vnd.ibm.rights-management\":[\"irm\"],\"application/vnd.ibm.secure-container\":[\"sc\"],\"application/vnd.iccprofile\":[\"icc\",\"icm\"],\"application/vnd.igloader\":[\"igl\"],\"application/vnd.immervision-ivp\":[\"ivp\"],\"application/vnd.immervision-ivu\":[\"ivu\"],\"application/vnd.insors.igm\":[\"igm\"],\"application/vnd.intercon.formnet\":[\"xpw\",\"xpx\"],\"application/vnd.intergeo\":[\"i2g\"],\"application/vnd.intu.qbo\":[\"qbo\"],\"application/vnd.intu.qfx\":[\"qfx\"],\"application/vnd.ipunplugged.rcprofile\":[\"rcprofile\"],\"application/vnd.irepository.package+xml\":[\"irp\"],\"application/vnd.is-xpr\":[\"xpr\"],\"application/vnd.isac.fcs\":[\"fcs\"],\"application/vnd.jam\":[\"jam\"],\"application/vnd.jcp.javame.midlet-rms\":[\"rms\"],\"application/vnd.jisp\":[\"jisp\"],\"application/vnd.joost.joda-archive\":[\"joda\"],\"application/vnd.kahootz\":[\"ktz\",\"ktr\"],\"application/vnd.kde.karbon\":[\"karbon\"],\"application/vnd.kde.kchart\":[\"chrt\"],\"application/vnd.kde.kformula\":[\"kfo\"],\"application/vnd.kde.kivio\":[\"flw\"],\"application/vnd.kde.kontour\":[\"kon\"],\"application/vnd.kde.kpresenter\":[\"kpr\",\"kpt\"],\"application/vnd.kde.kspread\":[\"ksp\"],\"application/vnd.kde.kword\":[\"kwd\",\"kwt\"],\"application/vnd.kenameaapp\":[\"htke\"],\"application/vnd.kidspiration\":[\"kia\"],\"application/vnd.kinar\":[\"kne\",\"knp\"],\"application/vnd.koan\":[\"skp\",\"skd\",\"skt\",\"skm\"],\"application/vnd.kodak-descriptor\":[\"sse\"],\"application/vnd.las.las+xml\":[\"lasxml\"],\"application/vnd.llamagraphics.life-balance.desktop\":[\"lbd\"],\"application/vnd.llamagraphics.life-balance.exchange+xml\":[\"lbe\"],\"application/vnd.lotus-1-2-3\":[\"123\"],\"application/vnd.lotus-approach\":[\"apr\"],\"application/vnd.lotus-freelance\":[\"pre\"],\"application/vnd.lotus-notes\":[\"nsf\"],\"application/vnd.lotus-organizer\":[\"org\"],\"application/vnd.lotus-screencam\":[\"scm\"],\"application/vnd.lotus-wordpro\":[\"lwp\"],\"application/vnd.macports.portpkg\":[\"portpkg\"],\"application/vnd.mapbox-vector-tile\":[\"mvt\"],\"application/vnd.mcd\":[\"mcd\"],\"application/vnd.medcalcdata\":[\"mc1\"],\"application/vnd.mediastation.cdkey\":[\"cdkey\"],\"application/vnd.mfer\":[\"mwf\"],\"application/vnd.mfmp\":[\"mfm\"],\"application/vnd.micrografx.flo\":[\"flo\"],\"application/vnd.micrografx.igx\":[\"igx\"],\"application/vnd.mif\":[\"mif\"],\"application/vnd.mobius.daf\":[\"daf\"],\"application/vnd.mobius.dis\":[\"dis\"],\"application/vnd.mobius.mbk\":[\"mbk\"],\"application/vnd.mobius.mqy\":[\"mqy\"],\"application/vnd.mobius.msl\":[\"msl\"],\"application/vnd.mobius.plc\":[\"plc\"],\"application/vnd.mobius.txf\":[\"txf\"],\"application/vnd.mophun.application\":[\"mpn\"],\"application/vnd.mophun.certificate\":[\"mpc\"],\"application/vnd.mozilla.xul+xml\":[\"xul\"],\"application/vnd.ms-artgalry\":[\"cil\"],\"application/vnd.ms-cab-compressed\":[\"cab\"],\"application/vnd.ms-excel\":[\"xls\",\"xlm\",\"xla\",\"xlc\",\"xlt\",\"xlw\"],\"application/vnd.ms-excel.addin.macroenabled.12\":[\"xlam\"],\"application/vnd.ms-excel.sheet.binary.macroenabled.12\":[\"xlsb\"],\"application/vnd.ms-excel.sheet.macroenabled.12\":[\"xlsm\"],\"application/vnd.ms-excel.template.macroenabled.12\":[\"xltm\"],\"application/vnd.ms-fontobject\":[\"eot\"],\"application/vnd.ms-htmlhelp\":[\"chm\"],\"application/vnd.ms-ims\":[\"ims\"],\"application/vnd.ms-lrm\":[\"lrm\"],\"application/vnd.ms-officetheme\":[\"thmx\"],\"application/vnd.ms-outlook\":[\"msg\"],\"application/vnd.ms-pki.seccat\":[\"cat\"],\"application/vnd.ms-pki.stl\":[\"*stl\"],\"application/vnd.ms-powerpoint\":[\"ppt\",\"pps\",\"pot\"],\"application/vnd.ms-powerpoint.addin.macroenabled.12\":[\"ppam\"],\"application/vnd.ms-powerpoint.presentation.macroenabled.12\":[\"pptm\"],\"application/vnd.ms-powerpoint.slide.macroenabled.12\":[\"sldm\"],\"application/vnd.ms-powerpoint.slideshow.macroenabled.12\":[\"ppsm\"],\"application/vnd.ms-powerpoint.template.macroenabled.12\":[\"potm\"],\"application/vnd.ms-project\":[\"mpp\",\"mpt\"],\"application/vnd.ms-word.document.macroenabled.12\":[\"docm\"],\"application/vnd.ms-word.template.macroenabled.12\":[\"dotm\"],\"application/vnd.ms-works\":[\"wps\",\"wks\",\"wcm\",\"wdb\"],\"application/vnd.ms-wpl\":[\"wpl\"],\"application/vnd.ms-xpsdocument\":[\"xps\"],\"application/vnd.mseq\":[\"mseq\"],\"application/vnd.musician\":[\"mus\"],\"application/vnd.muvee.style\":[\"msty\"],\"application/vnd.mynfc\":[\"taglet\"],\"application/vnd.neurolanguage.nlu\":[\"nlu\"],\"application/vnd.nitf\":[\"ntf\",\"nitf\"],\"application/vnd.noblenet-directory\":[\"nnd\"],\"application/vnd.noblenet-sealer\":[\"nns\"],\"application/vnd.noblenet-web\":[\"nnw\"],\"application/vnd.nokia.n-gage.ac+xml\":[\"*ac\"],\"application/vnd.nokia.n-gage.data\":[\"ngdat\"],\"application/vnd.nokia.n-gage.symbian.install\":[\"n-gage\"],\"application/vnd.nokia.radio-preset\":[\"rpst\"],\"application/vnd.nokia.radio-presets\":[\"rpss\"],\"application/vnd.novadigm.edm\":[\"edm\"],\"application/vnd.novadigm.edx\":[\"edx\"],\"application/vnd.novadigm.ext\":[\"ext\"],\"application/vnd.oasis.opendocument.chart\":[\"odc\"],\"application/vnd.oasis.opendocument.chart-template\":[\"otc\"],\"application/vnd.oasis.opendocument.database\":[\"odb\"],\"application/vnd.oasis.opendocument.formula\":[\"odf\"],\"application/vnd.oasis.opendocument.formula-template\":[\"odft\"],\"application/vnd.oasis.opendocument.graphics\":[\"odg\"],\"application/vnd.oasis.opendocument.graphics-template\":[\"otg\"],\"application/vnd.oasis.opendocument.image\":[\"odi\"],\"application/vnd.oasis.opendocument.image-template\":[\"oti\"],\"application/vnd.oasis.opendocument.presentation\":[\"odp\"],\"application/vnd.oasis.opendocument.presentation-template\":[\"otp\"],\"application/vnd.oasis.opendocument.spreadsheet\":[\"ods\"],\"application/vnd.oasis.opendocument.spreadsheet-template\":[\"ots\"],\"application/vnd.oasis.opendocument.text\":[\"odt\"],\"application/vnd.oasis.opendocument.text-master\":[\"odm\"],\"application/vnd.oasis.opendocument.text-template\":[\"ott\"],\"application/vnd.oasis.opendocument.text-web\":[\"oth\"],\"application/vnd.olpc-sugar\":[\"xo\"],\"application/vnd.oma.dd2+xml\":[\"dd2\"],\"application/vnd.openblox.game+xml\":[\"obgx\"],\"application/vnd.openofficeorg.extension\":[\"oxt\"],\"application/vnd.openstreetmap.data+xml\":[\"osm\"],\"application/vnd.openxmlformats-officedocument.presentationml.presentation\":[\"pptx\"],\"application/vnd.openxmlformats-officedocument.presentationml.slide\":[\"sldx\"],\"application/vnd.openxmlformats-officedocument.presentationml.slideshow\":[\"ppsx\"],\"application/vnd.openxmlformats-officedocument.presentationml.template\":[\"potx\"],\"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\":[\"xlsx\"],\"application/vnd.openxmlformats-officedocument.spreadsheetml.template\":[\"xltx\"],\"application/vnd.openxmlformats-officedocument.wordprocessingml.document\":[\"docx\"],\"application/vnd.openxmlformats-officedocument.wordprocessingml.template\":[\"dotx\"],\"application/vnd.osgeo.mapguide.package\":[\"mgp\"],\"application/vnd.osgi.dp\":[\"dp\"],\"application/vnd.osgi.subsystem\":[\"esa\"],\"application/vnd.palm\":[\"pdb\",\"pqa\",\"oprc\"],\"application/vnd.pawaafile\":[\"paw\"],\"application/vnd.pg.format\":[\"str\"],\"application/vnd.pg.osasli\":[\"ei6\"],\"application/vnd.picsel\":[\"efif\"],\"application/vnd.pmi.widget\":[\"wg\"],\"application/vnd.pocketlearn\":[\"plf\"],\"application/vnd.powerbuilder6\":[\"pbd\"],\"application/vnd.previewsystems.box\":[\"box\"],\"application/vnd.proteus.magazine\":[\"mgz\"],\"application/vnd.publishare-delta-tree\":[\"qps\"],\"application/vnd.pvi.ptid1\":[\"ptid\"],\"application/vnd.quark.quarkxpress\":[\"qxd\",\"qxt\",\"qwd\",\"qwt\",\"qxl\",\"qxb\"],\"application/vnd.rar\":[\"rar\"],\"application/vnd.realvnc.bed\":[\"bed\"],\"application/vnd.recordare.musicxml\":[\"mxl\"],\"application/vnd.recordare.musicxml+xml\":[\"musicxml\"],\"application/vnd.rig.cryptonote\":[\"cryptonote\"],\"application/vnd.rim.cod\":[\"cod\"],\"application/vnd.rn-realmedia\":[\"rm\"],\"application/vnd.rn-realmedia-vbr\":[\"rmvb\"],\"application/vnd.route66.link66+xml\":[\"link66\"],\"application/vnd.sailingtracker.track\":[\"st\"],\"application/vnd.seemail\":[\"see\"],\"application/vnd.sema\":[\"sema\"],\"application/vnd.semd\":[\"semd\"],\"application/vnd.semf\":[\"semf\"],\"application/vnd.shana.informed.formdata\":[\"ifm\"],\"application/vnd.shana.informed.formtemplate\":[\"itp\"],\"application/vnd.shana.informed.interchange\":[\"iif\"],\"application/vnd.shana.informed.package\":[\"ipk\"],\"application/vnd.simtech-mindmapper\":[\"twd\",\"twds\"],\"application/vnd.smaf\":[\"mmf\"],\"application/vnd.smart.teacher\":[\"teacher\"],\"application/vnd.software602.filler.form+xml\":[\"fo\"],\"application/vnd.solent.sdkm+xml\":[\"sdkm\",\"sdkd\"],\"application/vnd.spotfire.dxp\":[\"dxp\"],\"application/vnd.spotfire.sfs\":[\"sfs\"],\"application/vnd.stardivision.calc\":[\"sdc\"],\"application/vnd.stardivision.draw\":[\"sda\"],\"application/vnd.stardivision.impress\":[\"sdd\"],\"application/vnd.stardivision.math\":[\"smf\"],\"application/vnd.stardivision.writer\":[\"sdw\",\"vor\"],\"application/vnd.stardivision.writer-global\":[\"sgl\"],\"application/vnd.stepmania.package\":[\"smzip\"],\"application/vnd.stepmania.stepchart\":[\"sm\"],\"application/vnd.sun.wadl+xml\":[\"wadl\"],\"application/vnd.sun.xml.calc\":[\"sxc\"],\"application/vnd.sun.xml.calc.template\":[\"stc\"],\"application/vnd.sun.xml.draw\":[\"sxd\"],\"application/vnd.sun.xml.draw.template\":[\"std\"],\"application/vnd.sun.xml.impress\":[\"sxi\"],\"application/vnd.sun.xml.impress.template\":[\"sti\"],\"application/vnd.sun.xml.math\":[\"sxm\"],\"application/vnd.sun.xml.writer\":[\"sxw\"],\"application/vnd.sun.xml.writer.global\":[\"sxg\"],\"application/vnd.sun.xml.writer.template\":[\"stw\"],\"application/vnd.sus-calendar\":[\"sus\",\"susp\"],\"application/vnd.svd\":[\"svd\"],\"application/vnd.symbian.install\":[\"sis\",\"sisx\"],\"application/vnd.syncml+xml\":[\"xsm\"],\"application/vnd.syncml.dm+wbxml\":[\"bdm\"],\"application/vnd.syncml.dm+xml\":[\"xdm\"],\"application/vnd.syncml.dmddf+xml\":[\"ddf\"],\"application/vnd.tao.intent-module-archive\":[\"tao\"],\"application/vnd.tcpdump.pcap\":[\"pcap\",\"cap\",\"dmp\"],\"application/vnd.tmobile-livetv\":[\"tmo\"],\"application/vnd.trid.tpt\":[\"tpt\"],\"application/vnd.triscape.mxs\":[\"mxs\"],\"application/vnd.trueapp\":[\"tra\"],\"application/vnd.ufdl\":[\"ufd\",\"ufdl\"],\"application/vnd.uiq.theme\":[\"utz\"],\"application/vnd.umajin\":[\"umj\"],\"application/vnd.unity\":[\"unityweb\"],\"application/vnd.uoml+xml\":[\"uoml\"],\"application/vnd.vcx\":[\"vcx\"],\"application/vnd.visio\":[\"vsd\",\"vst\",\"vss\",\"vsw\"],\"application/vnd.visionary\":[\"vis\"],\"application/vnd.vsf\":[\"vsf\"],\"application/vnd.wap.wbxml\":[\"wbxml\"],\"application/vnd.wap.wmlc\":[\"wmlc\"],\"application/vnd.wap.wmlscriptc\":[\"wmlsc\"],\"application/vnd.webturbo\":[\"wtb\"],\"application/vnd.wolfram.player\":[\"nbp\"],\"application/vnd.wordperfect\":[\"wpd\"],\"application/vnd.wqd\":[\"wqd\"],\"application/vnd.wt.stf\":[\"stf\"],\"application/vnd.xara\":[\"xar\"],\"application/vnd.xfdl\":[\"xfdl\"],\"application/vnd.yamaha.hv-dic\":[\"hvd\"],\"application/vnd.yamaha.hv-script\":[\"hvs\"],\"application/vnd.yamaha.hv-voice\":[\"hvp\"],\"application/vnd.yamaha.openscoreformat\":[\"osf\"],\"application/vnd.yamaha.openscoreformat.osfpvg+xml\":[\"osfpvg\"],\"application/vnd.yamaha.smaf-audio\":[\"saf\"],\"application/vnd.yamaha.smaf-phrase\":[\"spf\"],\"application/vnd.yellowriver-custom-menu\":[\"cmp\"],\"application/vnd.zul\":[\"zir\",\"zirz\"],\"application/vnd.zzazz.deck+xml\":[\"zaz\"],\"application/x-7z-compressed\":[\"7z\"],\"application/x-abiword\":[\"abw\"],\"application/x-ace-compressed\":[\"ace\"],\"application/x-apple-diskimage\":[\"*dmg\"],\"application/x-arj\":[\"arj\"],\"application/x-authorware-bin\":[\"aab\",\"x32\",\"u32\",\"vox\"],\"application/x-authorware-map\":[\"aam\"],\"application/x-authorware-seg\":[\"aas\"],\"application/x-bcpio\":[\"bcpio\"],\"application/x-bdoc\":[\"*bdoc\"],\"application/x-bittorrent\":[\"torrent\"],\"application/x-blorb\":[\"blb\",\"blorb\"],\"application/x-bzip\":[\"bz\"],\"application/x-bzip2\":[\"bz2\",\"boz\"],\"application/x-cbr\":[\"cbr\",\"cba\",\"cbt\",\"cbz\",\"cb7\"],\"application/x-cdlink\":[\"vcd\"],\"application/x-cfs-compressed\":[\"cfs\"],\"application/x-chat\":[\"chat\"],\"application/x-chess-pgn\":[\"pgn\"],\"application/x-chrome-extension\":[\"crx\"],\"application/x-cocoa\":[\"cco\"],\"application/x-conference\":[\"nsc\"],\"application/x-cpio\":[\"cpio\"],\"application/x-csh\":[\"csh\"],\"application/x-debian-package\":[\"*deb\",\"udeb\"],\"application/x-dgc-compressed\":[\"dgc\"],\"application/x-director\":[\"dir\",\"dcr\",\"dxr\",\"cst\",\"cct\",\"cxt\",\"w3d\",\"fgd\",\"swa\"],\"application/x-doom\":[\"wad\"],\"application/x-dtbncx+xml\":[\"ncx\"],\"application/x-dtbook+xml\":[\"dtb\"],\"application/x-dtbresource+xml\":[\"res\"],\"application/x-dvi\":[\"dvi\"],\"application/x-envoy\":[\"evy\"],\"application/x-eva\":[\"eva\"],\"application/x-font-bdf\":[\"bdf\"],\"application/x-font-ghostscript\":[\"gsf\"],\"application/x-font-linux-psf\":[\"psf\"],\"application/x-font-pcf\":[\"pcf\"],\"application/x-font-snf\":[\"snf\"],\"application/x-font-type1\":[\"pfa\",\"pfb\",\"pfm\",\"afm\"],\"application/x-freearc\":[\"arc\"],\"application/x-futuresplash\":[\"spl\"],\"application/x-gca-compressed\":[\"gca\"],\"application/x-glulx\":[\"ulx\"],\"application/x-gnumeric\":[\"gnumeric\"],\"application/x-gramps-xml\":[\"gramps\"],\"application/x-gtar\":[\"gtar\"],\"application/x-hdf\":[\"hdf\"],\"application/x-httpd-php\":[\"php\"],\"application/x-install-instructions\":[\"install\"],\"application/x-iso9660-image\":[\"*iso\"],\"application/x-iwork-keynote-sffkey\":[\"*key\"],\"application/x-iwork-numbers-sffnumbers\":[\"*numbers\"],\"application/x-iwork-pages-sffpages\":[\"*pages\"],\"application/x-java-archive-diff\":[\"jardiff\"],\"application/x-java-jnlp-file\":[\"jnlp\"],\"application/x-keepass2\":[\"kdbx\"],\"application/x-latex\":[\"latex\"],\"application/x-lua-bytecode\":[\"luac\"],\"application/x-lzh-compressed\":[\"lzh\",\"lha\"],\"application/x-makeself\":[\"run\"],\"application/x-mie\":[\"mie\"],\"application/x-mobipocket-ebook\":[\"prc\",\"mobi\"],\"application/x-ms-application\":[\"application\"],\"application/x-ms-shortcut\":[\"lnk\"],\"application/x-ms-wmd\":[\"wmd\"],\"application/x-ms-wmz\":[\"wmz\"],\"application/x-ms-xbap\":[\"xbap\"],\"application/x-msaccess\":[\"mdb\"],\"application/x-msbinder\":[\"obd\"],\"application/x-mscardfile\":[\"crd\"],\"application/x-msclip\":[\"clp\"],\"application/x-msdos-program\":[\"*exe\"],\"application/x-msdownload\":[\"*exe\",\"*dll\",\"com\",\"bat\",\"*msi\"],\"application/x-msmediaview\":[\"mvb\",\"m13\",\"m14\"],\"application/x-msmetafile\":[\"*wmf\",\"*wmz\",\"*emf\",\"emz\"],\"application/x-msmoney\":[\"mny\"],\"application/x-mspublisher\":[\"pub\"],\"application/x-msschedule\":[\"scd\"],\"application/x-msterminal\":[\"trm\"],\"application/x-mswrite\":[\"wri\"],\"application/x-netcdf\":[\"nc\",\"cdf\"],\"application/x-ns-proxy-autoconfig\":[\"pac\"],\"application/x-nzb\":[\"nzb\"],\"application/x-perl\":[\"pl\",\"pm\"],\"application/x-pilot\":[\"*prc\",\"*pdb\"],\"application/x-pkcs12\":[\"p12\",\"pfx\"],\"application/x-pkcs7-certificates\":[\"p7b\",\"spc\"],\"application/x-pkcs7-certreqresp\":[\"p7r\"],\"application/x-rar-compressed\":[\"*rar\"],\"application/x-redhat-package-manager\":[\"rpm\"],\"application/x-research-info-systems\":[\"ris\"],\"application/x-sea\":[\"sea\"],\"application/x-sh\":[\"sh\"],\"application/x-shar\":[\"shar\"],\"application/x-shockwave-flash\":[\"swf\"],\"application/x-silverlight-app\":[\"xap\"],\"application/x-sql\":[\"sql\"],\"application/x-stuffit\":[\"sit\"],\"application/x-stuffitx\":[\"sitx\"],\"application/x-subrip\":[\"srt\"],\"application/x-sv4cpio\":[\"sv4cpio\"],\"application/x-sv4crc\":[\"sv4crc\"],\"application/x-t3vm-image\":[\"t3\"],\"application/x-tads\":[\"gam\"],\"application/x-tar\":[\"tar\"],\"application/x-tcl\":[\"tcl\",\"tk\"],\"application/x-tex\":[\"tex\"],\"application/x-tex-tfm\":[\"tfm\"],\"application/x-texinfo\":[\"texinfo\",\"texi\"],\"application/x-tgif\":[\"*obj\"],\"application/x-ustar\":[\"ustar\"],\"application/x-virtualbox-hdd\":[\"hdd\"],\"application/x-virtualbox-ova\":[\"ova\"],\"application/x-virtualbox-ovf\":[\"ovf\"],\"application/x-virtualbox-vbox\":[\"vbox\"],\"application/x-virtualbox-vbox-extpack\":[\"vbox-extpack\"],\"application/x-virtualbox-vdi\":[\"vdi\"],\"application/x-virtualbox-vhd\":[\"vhd\"],\"application/x-virtualbox-vmdk\":[\"vmdk\"],\"application/x-wais-source\":[\"src\"],\"application/x-web-app-manifest+json\":[\"webapp\"],\"application/x-x509-ca-cert\":[\"der\",\"crt\",\"pem\"],\"application/x-xfig\":[\"fig\"],\"application/x-xliff+xml\":[\"*xlf\"],\"application/x-xpinstall\":[\"xpi\"],\"application/x-xz\":[\"xz\"],\"application/x-zmachine\":[\"z1\",\"z2\",\"z3\",\"z4\",\"z5\",\"z6\",\"z7\",\"z8\"],\"audio/vnd.dece.audio\":[\"uva\",\"uvva\"],\"audio/vnd.digital-winds\":[\"eol\"],\"audio/vnd.dra\":[\"dra\"],\"audio/vnd.dts\":[\"dts\"],\"audio/vnd.dts.hd\":[\"dtshd\"],\"audio/vnd.lucent.voice\":[\"lvp\"],\"audio/vnd.ms-playready.media.pya\":[\"pya\"],\"audio/vnd.nuera.ecelp4800\":[\"ecelp4800\"],\"audio/vnd.nuera.ecelp7470\":[\"ecelp7470\"],\"audio/vnd.nuera.ecelp9600\":[\"ecelp9600\"],\"audio/vnd.rip\":[\"rip\"],\"audio/x-aac\":[\"aac\"],\"audio/x-aiff\":[\"aif\",\"aiff\",\"aifc\"],\"audio/x-caf\":[\"caf\"],\"audio/x-flac\":[\"flac\"],\"audio/x-m4a\":[\"*m4a\"],\"audio/x-matroska\":[\"mka\"],\"audio/x-mpegurl\":[\"m3u\"],\"audio/x-ms-wax\":[\"wax\"],\"audio/x-ms-wma\":[\"wma\"],\"audio/x-pn-realaudio\":[\"ram\",\"ra\"],\"audio/x-pn-realaudio-plugin\":[\"rmp\"],\"audio/x-realaudio\":[\"*ra\"],\"audio/x-wav\":[\"*wav\"],\"chemical/x-cdx\":[\"cdx\"],\"chemical/x-cif\":[\"cif\"],\"chemical/x-cmdf\":[\"cmdf\"],\"chemical/x-cml\":[\"cml\"],\"chemical/x-csml\":[\"csml\"],\"chemical/x-xyz\":[\"xyz\"],\"image/prs.btif\":[\"btif\"],\"image/prs.pti\":[\"pti\"],\"image/vnd.adobe.photoshop\":[\"psd\"],\"image/vnd.airzip.accelerator.azv\":[\"azv\"],\"image/vnd.dece.graphic\":[\"uvi\",\"uvvi\",\"uvg\",\"uvvg\"],\"image/vnd.djvu\":[\"djvu\",\"djv\"],\"image/vnd.dvb.subtitle\":[\"*sub\"],\"image/vnd.dwg\":[\"dwg\"],\"image/vnd.dxf\":[\"dxf\"],\"image/vnd.fastbidsheet\":[\"fbs\"],\"image/vnd.fpx\":[\"fpx\"],\"image/vnd.fst\":[\"fst\"],\"image/vnd.fujixerox.edmics-mmr\":[\"mmr\"],\"image/vnd.fujixerox.edmics-rlc\":[\"rlc\"],\"image/vnd.microsoft.icon\":[\"ico\"],\"image/vnd.ms-dds\":[\"dds\"],\"image/vnd.ms-modi\":[\"mdi\"],\"image/vnd.ms-photo\":[\"wdp\"],\"image/vnd.net-fpx\":[\"npx\"],\"image/vnd.pco.b16\":[\"b16\"],\"image/vnd.tencent.tap\":[\"tap\"],\"image/vnd.valve.source.texture\":[\"vtf\"],\"image/vnd.wap.wbmp\":[\"wbmp\"],\"image/vnd.xiff\":[\"xif\"],\"image/vnd.zbrush.pcx\":[\"pcx\"],\"image/x-3ds\":[\"3ds\"],\"image/x-cmu-raster\":[\"ras\"],\"image/x-cmx\":[\"cmx\"],\"image/x-freehand\":[\"fh\",\"fhc\",\"fh4\",\"fh5\",\"fh7\"],\"image/x-icon\":[\"*ico\"],\"image/x-jng\":[\"jng\"],\"image/x-mrsid-image\":[\"sid\"],\"image/x-ms-bmp\":[\"*bmp\"],\"image/x-pcx\":[\"*pcx\"],\"image/x-pict\":[\"pic\",\"pct\"],\"image/x-portable-anymap\":[\"pnm\"],\"image/x-portable-bitmap\":[\"pbm\"],\"image/x-portable-graymap\":[\"pgm\"],\"image/x-portable-pixmap\":[\"ppm\"],\"image/x-rgb\":[\"rgb\"],\"image/x-tga\":[\"tga\"],\"image/x-xbitmap\":[\"xbm\"],\"image/x-xpixmap\":[\"xpm\"],\"image/x-xwindowdump\":[\"xwd\"],\"message/vnd.wfa.wsc\":[\"wsc\"],\"model/vnd.collada+xml\":[\"dae\"],\"model/vnd.dwf\":[\"dwf\"],\"model/vnd.gdl\":[\"gdl\"],\"model/vnd.gtw\":[\"gtw\"],\"model/vnd.mts\":[\"mts\"],\"model/vnd.opengex\":[\"ogex\"],\"model/vnd.parasolid.transmit.binary\":[\"x_b\"],\"model/vnd.parasolid.transmit.text\":[\"x_t\"],\"model/vnd.sap.vds\":[\"vds\"],\"model/vnd.usdz+zip\":[\"usdz\"],\"model/vnd.valve.source.compiled-map\":[\"bsp\"],\"model/vnd.vtu\":[\"vtu\"],\"text/prs.lines.tag\":[\"dsc\"],\"text/vnd.curl\":[\"curl\"],\"text/vnd.curl.dcurl\":[\"dcurl\"],\"text/vnd.curl.mcurl\":[\"mcurl\"],\"text/vnd.curl.scurl\":[\"scurl\"],\"text/vnd.dvb.subtitle\":[\"sub\"],\"text/vnd.fly\":[\"fly\"],\"text/vnd.fmi.flexstor\":[\"flx\"],\"text/vnd.graphviz\":[\"gv\"],\"text/vnd.in3d.3dml\":[\"3dml\"],\"text/vnd.in3d.spot\":[\"spot\"],\"text/vnd.sun.j2me.app-descriptor\":[\"jad\"],\"text/vnd.wap.wml\":[\"wml\"],\"text/vnd.wap.wmlscript\":[\"wmls\"],\"text/x-asm\":[\"s\",\"asm\"],\"text/x-c\":[\"c\",\"cc\",\"cxx\",\"cpp\",\"h\",\"hh\",\"dic\"],\"text/x-component\":[\"htc\"],\"text/x-fortran\":[\"f\",\"for\",\"f77\",\"f90\"],\"text/x-handlebars-template\":[\"hbs\"],\"text/x-java-source\":[\"java\"],\"text/x-lua\":[\"lua\"],\"text/x-markdown\":[\"mkd\"],\"text/x-nfo\":[\"nfo\"],\"text/x-opml\":[\"opml\"],\"text/x-org\":[\"*org\"],\"text/x-pascal\":[\"p\",\"pas\"],\"text/x-processing\":[\"pde\"],\"text/x-sass\":[\"sass\"],\"text/x-scss\":[\"scss\"],\"text/x-setext\":[\"etx\"],\"text/x-sfv\":[\"sfv\"],\"text/x-suse-ymp\":[\"ymp\"],\"text/x-uuencode\":[\"uu\"],\"text/x-vcalendar\":[\"vcs\"],\"text/x-vcard\":[\"vcf\"],\"video/vnd.dece.hd\":[\"uvh\",\"uvvh\"],\"video/vnd.dece.mobile\":[\"uvm\",\"uvvm\"],\"video/vnd.dece.pd\":[\"uvp\",\"uvvp\"],\"video/vnd.dece.sd\":[\"uvs\",\"uvvs\"],\"video/vnd.dece.video\":[\"uvv\",\"uvvv\"],\"video/vnd.dvb.file\":[\"dvb\"],\"video/vnd.fvt\":[\"fvt\"],\"video/vnd.mpegurl\":[\"mxu\",\"m4u\"],\"video/vnd.ms-playready.media.pyv\":[\"pyv\"],\"video/vnd.uvvu.mp4\":[\"uvu\",\"uvvu\"],\"video/vnd.vivo\":[\"viv\"],\"video/x-f4v\":[\"f4v\"],\"video/x-fli\":[\"fli\"],\"video/x-flv\":[\"flv\"],\"video/x-m4v\":[\"m4v\"],\"video/x-matroska\":[\"mkv\",\"mk3d\",\"mks\"],\"video/x-mng\":[\"mng\"],\"video/x-ms-asf\":[\"asf\",\"asx\"],\"video/x-ms-vob\":[\"vob\"],\"video/x-ms-wm\":[\"wm\"],\"video/x-ms-wmv\":[\"wmv\"],\"video/x-ms-wmx\":[\"wmx\"],\"video/x-ms-wvx\":[\"wvx\"],\"video/x-msvideo\":[\"avi\"],\"video/x-sgi-movie\":[\"movie\"],\"video/x-smv\":[\"smv\"],\"x-conference/x-cooltalk\":[\"ice\"]};","module.exports = {\"application/andrew-inset\":[\"ez\"],\"application/applixware\":[\"aw\"],\"application/atom+xml\":[\"atom\"],\"application/atomcat+xml\":[\"atomcat\"],\"application/atomdeleted+xml\":[\"atomdeleted\"],\"application/atomsvc+xml\":[\"atomsvc\"],\"application/atsc-dwd+xml\":[\"dwd\"],\"application/atsc-held+xml\":[\"held\"],\"application/atsc-rsat+xml\":[\"rsat\"],\"application/bdoc\":[\"bdoc\"],\"application/calendar+xml\":[\"xcs\"],\"application/ccxml+xml\":[\"ccxml\"],\"application/cdfx+xml\":[\"cdfx\"],\"application/cdmi-capability\":[\"cdmia\"],\"application/cdmi-container\":[\"cdmic\"],\"application/cdmi-domain\":[\"cdmid\"],\"application/cdmi-object\":[\"cdmio\"],\"application/cdmi-queue\":[\"cdmiq\"],\"application/cu-seeme\":[\"cu\"],\"application/dash+xml\":[\"mpd\"],\"application/davmount+xml\":[\"davmount\"],\"application/docbook+xml\":[\"dbk\"],\"application/dssc+der\":[\"dssc\"],\"application/dssc+xml\":[\"xdssc\"],\"application/ecmascript\":[\"es\",\"ecma\"],\"application/emma+xml\":[\"emma\"],\"application/emotionml+xml\":[\"emotionml\"],\"application/epub+zip\":[\"epub\"],\"application/exi\":[\"exi\"],\"application/express\":[\"exp\"],\"application/fdt+xml\":[\"fdt\"],\"application/font-tdpfr\":[\"pfr\"],\"application/geo+json\":[\"geojson\"],\"application/gml+xml\":[\"gml\"],\"application/gpx+xml\":[\"gpx\"],\"application/gxf\":[\"gxf\"],\"application/gzip\":[\"gz\"],\"application/hjson\":[\"hjson\"],\"application/hyperstudio\":[\"stk\"],\"application/inkml+xml\":[\"ink\",\"inkml\"],\"application/ipfix\":[\"ipfix\"],\"application/its+xml\":[\"its\"],\"application/java-archive\":[\"jar\",\"war\",\"ear\"],\"application/java-serialized-object\":[\"ser\"],\"application/java-vm\":[\"class\"],\"application/javascript\":[\"js\",\"mjs\"],\"application/json\":[\"json\",\"map\"],\"application/json5\":[\"json5\"],\"application/jsonml+json\":[\"jsonml\"],\"application/ld+json\":[\"jsonld\"],\"application/lgr+xml\":[\"lgr\"],\"application/lost+xml\":[\"lostxml\"],\"application/mac-binhex40\":[\"hqx\"],\"application/mac-compactpro\":[\"cpt\"],\"application/mads+xml\":[\"mads\"],\"application/manifest+json\":[\"webmanifest\"],\"application/marc\":[\"mrc\"],\"application/marcxml+xml\":[\"mrcx\"],\"application/mathematica\":[\"ma\",\"nb\",\"mb\"],\"application/mathml+xml\":[\"mathml\"],\"application/mbox\":[\"mbox\"],\"application/mediaservercontrol+xml\":[\"mscml\"],\"application/metalink+xml\":[\"metalink\"],\"application/metalink4+xml\":[\"meta4\"],\"application/mets+xml\":[\"mets\"],\"application/mmt-aei+xml\":[\"maei\"],\"application/mmt-usd+xml\":[\"musd\"],\"application/mods+xml\":[\"mods\"],\"application/mp21\":[\"m21\",\"mp21\"],\"application/mp4\":[\"mp4s\",\"m4p\"],\"application/msword\":[\"doc\",\"dot\"],\"application/mxf\":[\"mxf\"],\"application/n-quads\":[\"nq\"],\"application/n-triples\":[\"nt\"],\"application/node\":[\"cjs\"],\"application/octet-stream\":[\"bin\",\"dms\",\"lrf\",\"mar\",\"so\",\"dist\",\"distz\",\"pkg\",\"bpk\",\"dump\",\"elc\",\"deploy\",\"exe\",\"dll\",\"deb\",\"dmg\",\"iso\",\"img\",\"msi\",\"msp\",\"msm\",\"buffer\"],\"application/oda\":[\"oda\"],\"application/oebps-package+xml\":[\"opf\"],\"application/ogg\":[\"ogx\"],\"application/omdoc+xml\":[\"omdoc\"],\"application/onenote\":[\"onetoc\",\"onetoc2\",\"onetmp\",\"onepkg\"],\"application/oxps\":[\"oxps\"],\"application/p2p-overlay+xml\":[\"relo\"],\"application/patch-ops-error+xml\":[\"xer\"],\"application/pdf\":[\"pdf\"],\"application/pgp-encrypted\":[\"pgp\"],\"application/pgp-signature\":[\"asc\",\"sig\"],\"application/pics-rules\":[\"prf\"],\"application/pkcs10\":[\"p10\"],\"application/pkcs7-mime\":[\"p7m\",\"p7c\"],\"application/pkcs7-signature\":[\"p7s\"],\"application/pkcs8\":[\"p8\"],\"application/pkix-attr-cert\":[\"ac\"],\"application/pkix-cert\":[\"cer\"],\"application/pkix-crl\":[\"crl\"],\"application/pkix-pkipath\":[\"pkipath\"],\"application/pkixcmp\":[\"pki\"],\"application/pls+xml\":[\"pls\"],\"application/postscript\":[\"ai\",\"eps\",\"ps\"],\"application/provenance+xml\":[\"provx\"],\"application/pskc+xml\":[\"pskcxml\"],\"application/raml+yaml\":[\"raml\"],\"application/rdf+xml\":[\"rdf\",\"owl\"],\"application/reginfo+xml\":[\"rif\"],\"application/relax-ng-compact-syntax\":[\"rnc\"],\"application/resource-lists+xml\":[\"rl\"],\"application/resource-lists-diff+xml\":[\"rld\"],\"application/rls-services+xml\":[\"rs\"],\"application/route-apd+xml\":[\"rapd\"],\"application/route-s-tsid+xml\":[\"sls\"],\"application/route-usd+xml\":[\"rusd\"],\"application/rpki-ghostbusters\":[\"gbr\"],\"application/rpki-manifest\":[\"mft\"],\"application/rpki-roa\":[\"roa\"],\"application/rsd+xml\":[\"rsd\"],\"application/rss+xml\":[\"rss\"],\"application/rtf\":[\"rtf\"],\"application/sbml+xml\":[\"sbml\"],\"application/scvp-cv-request\":[\"scq\"],\"application/scvp-cv-response\":[\"scs\"],\"application/scvp-vp-request\":[\"spq\"],\"application/scvp-vp-response\":[\"spp\"],\"application/sdp\":[\"sdp\"],\"application/senml+xml\":[\"senmlx\"],\"application/sensml+xml\":[\"sensmlx\"],\"application/set-payment-initiation\":[\"setpay\"],\"application/set-registration-initiation\":[\"setreg\"],\"application/shf+xml\":[\"shf\"],\"application/sieve\":[\"siv\",\"sieve\"],\"application/smil+xml\":[\"smi\",\"smil\"],\"application/sparql-query\":[\"rq\"],\"application/sparql-results+xml\":[\"srx\"],\"application/srgs\":[\"gram\"],\"application/srgs+xml\":[\"grxml\"],\"application/sru+xml\":[\"sru\"],\"application/ssdl+xml\":[\"ssdl\"],\"application/ssml+xml\":[\"ssml\"],\"application/swid+xml\":[\"swidtag\"],\"application/tei+xml\":[\"tei\",\"teicorpus\"],\"application/thraud+xml\":[\"tfi\"],\"application/timestamped-data\":[\"tsd\"],\"application/toml\":[\"toml\"],\"application/trig\":[\"trig\"],\"application/ttml+xml\":[\"ttml\"],\"application/ubjson\":[\"ubj\"],\"application/urc-ressheet+xml\":[\"rsheet\"],\"application/urc-targetdesc+xml\":[\"td\"],\"application/voicexml+xml\":[\"vxml\"],\"application/wasm\":[\"wasm\"],\"application/widget\":[\"wgt\"],\"application/winhlp\":[\"hlp\"],\"application/wsdl+xml\":[\"wsdl\"],\"application/wspolicy+xml\":[\"wspolicy\"],\"application/xaml+xml\":[\"xaml\"],\"application/xcap-att+xml\":[\"xav\"],\"application/xcap-caps+xml\":[\"xca\"],\"application/xcap-diff+xml\":[\"xdf\"],\"application/xcap-el+xml\":[\"xel\"],\"application/xcap-ns+xml\":[\"xns\"],\"application/xenc+xml\":[\"xenc\"],\"application/xhtml+xml\":[\"xhtml\",\"xht\"],\"application/xliff+xml\":[\"xlf\"],\"application/xml\":[\"xml\",\"xsl\",\"xsd\",\"rng\"],\"application/xml-dtd\":[\"dtd\"],\"application/xop+xml\":[\"xop\"],\"application/xproc+xml\":[\"xpl\"],\"application/xslt+xml\":[\"*xsl\",\"xslt\"],\"application/xspf+xml\":[\"xspf\"],\"application/xv+xml\":[\"mxml\",\"xhvml\",\"xvml\",\"xvm\"],\"application/yang\":[\"yang\"],\"application/yin+xml\":[\"yin\"],\"application/zip\":[\"zip\"],\"audio/3gpp\":[\"*3gpp\"],\"audio/adpcm\":[\"adp\"],\"audio/amr\":[\"amr\"],\"audio/basic\":[\"au\",\"snd\"],\"audio/midi\":[\"mid\",\"midi\",\"kar\",\"rmi\"],\"audio/mobile-xmf\":[\"mxmf\"],\"audio/mp3\":[\"*mp3\"],\"audio/mp4\":[\"m4a\",\"mp4a\"],\"audio/mpeg\":[\"mpga\",\"mp2\",\"mp2a\",\"mp3\",\"m2a\",\"m3a\"],\"audio/ogg\":[\"oga\",\"ogg\",\"spx\",\"opus\"],\"audio/s3m\":[\"s3m\"],\"audio/silk\":[\"sil\"],\"audio/wav\":[\"wav\"],\"audio/wave\":[\"*wav\"],\"audio/webm\":[\"weba\"],\"audio/xm\":[\"xm\"],\"font/collection\":[\"ttc\"],\"font/otf\":[\"otf\"],\"font/ttf\":[\"ttf\"],\"font/woff\":[\"woff\"],\"font/woff2\":[\"woff2\"],\"image/aces\":[\"exr\"],\"image/apng\":[\"apng\"],\"image/avif\":[\"avif\"],\"image/bmp\":[\"bmp\"],\"image/cgm\":[\"cgm\"],\"image/dicom-rle\":[\"drle\"],\"image/emf\":[\"emf\"],\"image/fits\":[\"fits\"],\"image/g3fax\":[\"g3\"],\"image/gif\":[\"gif\"],\"image/heic\":[\"heic\"],\"image/heic-sequence\":[\"heics\"],\"image/heif\":[\"heif\"],\"image/heif-sequence\":[\"heifs\"],\"image/hej2k\":[\"hej2\"],\"image/hsj2\":[\"hsj2\"],\"image/ief\":[\"ief\"],\"image/jls\":[\"jls\"],\"image/jp2\":[\"jp2\",\"jpg2\"],\"image/jpeg\":[\"jpeg\",\"jpg\",\"jpe\"],\"image/jph\":[\"jph\"],\"image/jphc\":[\"jhc\"],\"image/jpm\":[\"jpm\"],\"image/jpx\":[\"jpx\",\"jpf\"],\"image/jxr\":[\"jxr\"],\"image/jxra\":[\"jxra\"],\"image/jxrs\":[\"jxrs\"],\"image/jxs\":[\"jxs\"],\"image/jxsc\":[\"jxsc\"],\"image/jxsi\":[\"jxsi\"],\"image/jxss\":[\"jxss\"],\"image/ktx\":[\"ktx\"],\"image/ktx2\":[\"ktx2\"],\"image/png\":[\"png\"],\"image/sgi\":[\"sgi\"],\"image/svg+xml\":[\"svg\",\"svgz\"],\"image/t38\":[\"t38\"],\"image/tiff\":[\"tif\",\"tiff\"],\"image/tiff-fx\":[\"tfx\"],\"image/webp\":[\"webp\"],\"image/wmf\":[\"wmf\"],\"message/disposition-notification\":[\"disposition-notification\"],\"message/global\":[\"u8msg\"],\"message/global-delivery-status\":[\"u8dsn\"],\"message/global-disposition-notification\":[\"u8mdn\"],\"message/global-headers\":[\"u8hdr\"],\"message/rfc822\":[\"eml\",\"mime\"],\"model/3mf\":[\"3mf\"],\"model/gltf+json\":[\"gltf\"],\"model/gltf-binary\":[\"glb\"],\"model/iges\":[\"igs\",\"iges\"],\"model/mesh\":[\"msh\",\"mesh\",\"silo\"],\"model/mtl\":[\"mtl\"],\"model/obj\":[\"obj\"],\"model/step+xml\":[\"stpx\"],\"model/step+zip\":[\"stpz\"],\"model/step-xml+zip\":[\"stpxz\"],\"model/stl\":[\"stl\"],\"model/vrml\":[\"wrl\",\"vrml\"],\"model/x3d+binary\":[\"*x3db\",\"x3dbz\"],\"model/x3d+fastinfoset\":[\"x3db\"],\"model/x3d+vrml\":[\"*x3dv\",\"x3dvz\"],\"model/x3d+xml\":[\"x3d\",\"x3dz\"],\"model/x3d-vrml\":[\"x3dv\"],\"text/cache-manifest\":[\"appcache\",\"manifest\"],\"text/calendar\":[\"ics\",\"ifb\"],\"text/coffeescript\":[\"coffee\",\"litcoffee\"],\"text/css\":[\"css\"],\"text/csv\":[\"csv\"],\"text/html\":[\"html\",\"htm\",\"shtml\"],\"text/jade\":[\"jade\"],\"text/jsx\":[\"jsx\"],\"text/less\":[\"less\"],\"text/markdown\":[\"markdown\",\"md\"],\"text/mathml\":[\"mml\"],\"text/mdx\":[\"mdx\"],\"text/n3\":[\"n3\"],\"text/plain\":[\"txt\",\"text\",\"conf\",\"def\",\"list\",\"log\",\"in\",\"ini\"],\"text/richtext\":[\"rtx\"],\"text/rtf\":[\"*rtf\"],\"text/sgml\":[\"sgml\",\"sgm\"],\"text/shex\":[\"shex\"],\"text/slim\":[\"slim\",\"slm\"],\"text/spdx\":[\"spdx\"],\"text/stylus\":[\"stylus\",\"styl\"],\"text/tab-separated-values\":[\"tsv\"],\"text/troff\":[\"t\",\"tr\",\"roff\",\"man\",\"me\",\"ms\"],\"text/turtle\":[\"ttl\"],\"text/uri-list\":[\"uri\",\"uris\",\"urls\"],\"text/vcard\":[\"vcard\"],\"text/vtt\":[\"vtt\"],\"text/xml\":[\"*xml\"],\"text/yaml\":[\"yaml\",\"yml\"],\"video/3gpp\":[\"3gp\",\"3gpp\"],\"video/3gpp2\":[\"3g2\"],\"video/h261\":[\"h261\"],\"video/h263\":[\"h263\"],\"video/h264\":[\"h264\"],\"video/iso.segment\":[\"m4s\"],\"video/jpeg\":[\"jpgv\"],\"video/jpm\":[\"*jpm\",\"jpgm\"],\"video/mj2\":[\"mj2\",\"mjp2\"],\"video/mp2t\":[\"ts\"],\"video/mp4\":[\"mp4\",\"mp4v\",\"mpg4\"],\"video/mpeg\":[\"mpeg\",\"mpg\",\"mpe\",\"m1v\",\"m2v\"],\"video/ogg\":[\"ogv\"],\"video/quicktime\":[\"qt\",\"mov\"],\"video/webm\":[\"webm\"]};","/**\n * Helpers.\n */\n\nvar s = 1000;\nvar m = s * 60;\nvar h = m * 60;\nvar d = h * 24;\nvar w = d * 7;\nvar y = d * 365.25;\n\n/**\n * Parse or format the given `val`.\n *\n * Options:\n *\n * - `long` verbose formatting [false]\n *\n * @param {String|Number} val\n * @param {Object} [options]\n * @throws {Error} throw an error if val is not a non-empty string or a number\n * @return {String|Number}\n * @api public\n */\n\nmodule.exports = function(val, options) {\n options = options || {};\n var type = typeof val;\n if (type === 'string' && val.length > 0) {\n return parse(val);\n } else if (type === 'number' && isFinite(val)) {\n return options.long ? fmtLong(val) : fmtShort(val);\n }\n throw new Error(\n 'val is not a non-empty string or a valid number. val=' +\n JSON.stringify(val)\n );\n};\n\n/**\n * Parse the given `str` and return milliseconds.\n *\n * @param {String} str\n * @return {Number}\n * @api private\n */\n\nfunction parse(str) {\n str = String(str);\n if (str.length > 100) {\n return;\n }\n var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(\n str\n );\n if (!match) {\n return;\n }\n var n = parseFloat(match[1]);\n var type = (match[2] || 'ms').toLowerCase();\n switch (type) {\n case 'years':\n case 'year':\n case 'yrs':\n case 'yr':\n case 'y':\n return n * y;\n case 'weeks':\n case 'week':\n case 'w':\n return n * w;\n case 'days':\n case 'day':\n case 'd':\n return n * d;\n case 'hours':\n case 'hour':\n case 'hrs':\n case 'hr':\n case 'h':\n return n * h;\n case 'minutes':\n case 'minute':\n case 'mins':\n case 'min':\n case 'm':\n return n * m;\n case 'seconds':\n case 'second':\n case 'secs':\n case 'sec':\n case 's':\n return n * s;\n case 'milliseconds':\n case 'millisecond':\n case 'msecs':\n case 'msec':\n case 'ms':\n return n;\n default:\n return undefined;\n }\n}\n\n/**\n * Short format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtShort(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return Math.round(ms / d) + 'd';\n }\n if (msAbs >= h) {\n return Math.round(ms / h) + 'h';\n }\n if (msAbs >= m) {\n return Math.round(ms / m) + 'm';\n }\n if (msAbs >= s) {\n return Math.round(ms / s) + 's';\n }\n return ms + 'ms';\n}\n\n/**\n * Long format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtLong(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return plural(ms, msAbs, d, 'day');\n }\n if (msAbs >= h) {\n return plural(ms, msAbs, h, 'hour');\n }\n if (msAbs >= m) {\n return plural(ms, msAbs, m, 'minute');\n }\n if (msAbs >= s) {\n return plural(ms, msAbs, s, 'second');\n }\n return ms + ' ms';\n}\n\n/**\n * Pluralization helper.\n */\n\nfunction plural(ms, msAbs, n, name) {\n var isPlural = msAbs >= n * 1.5;\n return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar whatwgUrl = _interopDefault(require('whatwg-url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\nconst URL = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\n/**\n * Wrapper around `new URL` to handle arbitrary URLs\n *\n * @param {string} urlStr\n * @return {void}\n */\nfunction parseURL(urlStr) {\n\t/*\n \tCheck whether the URL is absolute or not\n \t\tScheme: https://tools.ietf.org/html/rfc3986#section-3.1\n \tAbsolute URL: https://tools.ietf.org/html/rfc3986#section-4.3\n */\n\tif (/^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.exec(urlStr)) {\n\t\turlStr = new URL(urlStr).toString();\n\t}\n\n\t// Fallback to old implementation for arbitrary URLs\n\treturn parse_url(urlStr);\n}\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parseURL(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parseURL(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parseURL(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\nconst URL$1 = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\n\nconst isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {\n\tconst orig = new URL$1(original).hostname;\n\tconst dest = new URL$1(destination).hostname;\n\n\treturn orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);\n};\n\n/**\n * isSameProtocol reports whether the two provided URLs use the same protocol.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nconst isSameProtocol = function isSameProtocol(destination, original) {\n\tconst orig = new URL$1(original).protocol;\n\tconst dest = new URL$1(destination).protocol;\n\n\treturn orig === dest;\n};\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\tdestroyStream(request.body, error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\n\t\t\tfinalize();\n\t\t});\n\n\t\tfixResponseChunkedTransferBadEnding(req, function (err) {\n\t\t\tif (signal && signal.aborted) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\t\t});\n\n\t\t/* c8 ignore next 18 */\n\t\tif (parseInt(process.version.substring(1)) < 14) {\n\t\t\t// Before Node.js 14, pipeline() does not fully support async iterators and does not always\n\t\t\t// properly handle when the socket close/end events are out of order.\n\t\t\treq.on('socket', function (s) {\n\t\t\t\ts.addListener('close', function (hadError) {\n\t\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\t\tconst hasDataListener = s.listenerCount('data') > 0;\n\n\t\t\t\t\t// if end happened before close but the socket didn't emit an error, do it now\n\t\t\t\t\tif (response && hasDataListener && !hadError && !(signal && signal.aborted)) {\n\t\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\t\tresponse.body.emit('error', err);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t}\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL$1(location, request.url).toString();\n\t\t\t\t} catch (err) {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOpts.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\traw.on('end', function () {\n\t\t\t\t\t// some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.\n\t\t\t\t\tif (!response) {\n\t\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\t\tresolve(response);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\nfunction fixResponseChunkedTransferBadEnding(request, errorCallback) {\n\tlet socket;\n\n\trequest.on('socket', function (s) {\n\t\tsocket = s;\n\t});\n\n\trequest.on('response', function (response) {\n\t\tconst headers = response.headers;\n\n\t\tif (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {\n\t\t\tresponse.once('close', function (hadError) {\n\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\tconst hasDataListener = socket.listenerCount('data') > 0;\n\n\t\t\t\tif (hasDataListener && !hadError) {\n\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\terrorCallback(err);\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n}\n\nfunction destroyStream(stream, err) {\n\tif (stream.destroy) {\n\t\tstream.destroy(err);\n\t} else {\n\t\t// node < 8\n\t\tstream.emit('error', err);\n\t\tstream.end();\n\t}\n}\n\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","'use strict';\nconst Queue = require('yocto-queue');\n\nconst pLimit = concurrency => {\n\tif (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) {\n\t\tthrow new TypeError('Expected `concurrency` to be a number from 1 and up');\n\t}\n\n\tconst queue = new Queue();\n\tlet activeCount = 0;\n\n\tconst next = () => {\n\t\tactiveCount--;\n\n\t\tif (queue.size > 0) {\n\t\t\tqueue.dequeue()();\n\t\t}\n\t};\n\n\tconst run = async (fn, resolve, ...args) => {\n\t\tactiveCount++;\n\n\t\tconst result = (async () => fn(...args))();\n\n\t\tresolve(result);\n\n\t\ttry {\n\t\t\tawait result;\n\t\t} catch {}\n\n\t\tnext();\n\t};\n\n\tconst enqueue = (fn, resolve, ...args) => {\n\t\tqueue.enqueue(run.bind(null, fn, resolve, ...args));\n\n\t\t(async () => {\n\t\t\t// This function needs to wait until the next microtask before comparing\n\t\t\t// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously\n\t\t\t// when the run function is dequeued and called. The comparison in the if-statement\n\t\t\t// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.\n\t\t\tawait Promise.resolve();\n\n\t\t\tif (activeCount < concurrency && queue.size > 0) {\n\t\t\t\tqueue.dequeue()();\n\t\t\t}\n\t\t})();\n\t};\n\n\tconst generator = (fn, ...args) => new Promise(resolve => {\n\t\tenqueue(fn, resolve, ...args);\n\t});\n\n\tObject.defineProperties(generator, {\n\t\tactiveCount: {\n\t\t\tget: () => activeCount\n\t\t},\n\t\tpendingCount: {\n\t\t\tget: () => queue.size\n\t\t},\n\t\tclearQueue: {\n\t\t\tvalue: () => {\n\t\t\t\tqueue.clear();\n\t\t\t}\n\t\t}\n\t});\n\n\treturn generator;\n};\n\nmodule.exports = pLimit;\n","'use strict';\n\nconst codes = {};\n\nfunction createErrorType(code, message, Base) {\n if (!Base) {\n Base = Error\n }\n\n function getMessage (arg1, arg2, arg3) {\n if (typeof message === 'string') {\n return message\n } else {\n return message(arg1, arg2, arg3)\n }\n }\n\n class NodeError extends Base {\n constructor (arg1, arg2, arg3) {\n super(getMessage(arg1, arg2, arg3));\n }\n }\n\n NodeError.prototype.name = Base.name;\n NodeError.prototype.code = code;\n\n codes[code] = NodeError;\n}\n\n// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js\nfunction oneOf(expected, thing) {\n if (Array.isArray(expected)) {\n const len = expected.length;\n expected = expected.map((i) => String(i));\n if (len > 2) {\n return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +\n expected[len - 1];\n } else if (len === 2) {\n return `one of ${thing} ${expected[0]} or ${expected[1]}`;\n } else {\n return `of ${thing} ${expected[0]}`;\n }\n } else {\n return `of ${thing} ${String(expected)}`;\n }\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith\nfunction startsWith(str, search, pos) {\n\treturn str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith\nfunction endsWith(str, search, this_len) {\n\tif (this_len === undefined || this_len > str.length) {\n\t\tthis_len = str.length;\n\t}\n\treturn str.substring(this_len - search.length, this_len) === search;\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes\nfunction includes(str, search, start) {\n if (typeof start !== 'number') {\n start = 0;\n }\n\n if (start + search.length > str.length) {\n return false;\n } else {\n return str.indexOf(search, start) !== -1;\n }\n}\n\ncreateErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {\n return 'The value \"' + value + '\" is invalid for option \"' + name + '\"'\n}, TypeError);\ncreateErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {\n // determiner: 'must be' or 'must not be'\n let determiner;\n if (typeof expected === 'string' && startsWith(expected, 'not ')) {\n determiner = 'must not be';\n expected = expected.replace(/^not /, '');\n } else {\n determiner = 'must be';\n }\n\n let msg;\n if (endsWith(name, ' argument')) {\n // For cases like 'first argument'\n msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;\n } else {\n const type = includes(name, '.') ? 'property' : 'argument';\n msg = `The \"${name}\" ${type} ${determiner} ${oneOf(expected, 'type')}`;\n }\n\n msg += `. Received type ${typeof actual}`;\n return msg;\n}, TypeError);\ncreateErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');\ncreateErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {\n return 'The ' + name + ' method is not implemented'\n});\ncreateErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');\ncreateErrorType('ERR_STREAM_DESTROYED', function (name) {\n return 'Cannot call ' + name + ' after a stream was destroyed';\n});\ncreateErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');\ncreateErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');\ncreateErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');\ncreateErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);\ncreateErrorType('ERR_UNKNOWN_ENCODING', function (arg) {\n return 'Unknown encoding: ' + arg\n}, TypeError);\ncreateErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');\n\nmodule.exports.codes = codes;\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a duplex stream is just a stream that is both readable and writable.\n// Since JS doesn't have multiple prototypal inheritance, this class\n// prototypally inherits from Readable, and then parasitically from\n// Writable.\n\n'use strict';\n\n/**/\nvar objectKeys = Object.keys || function (obj) {\n var keys = [];\n for (var key in obj) keys.push(key);\n return keys;\n};\n/**/\n\nmodule.exports = Duplex;\nconst Readable = require('./_stream_readable');\nconst Writable = require('./_stream_writable');\nrequire('inherits')(Duplex, Readable);\n{\n // Allow the keys array to be GC'ed.\n const keys = objectKeys(Writable.prototype);\n for (var v = 0; v < keys.length; v++) {\n const method = keys[v];\n if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];\n }\n}\nfunction Duplex(options) {\n if (!(this instanceof Duplex)) return new Duplex(options);\n Readable.call(this, options);\n Writable.call(this, options);\n this.allowHalfOpen = true;\n if (options) {\n if (options.readable === false) this.readable = false;\n if (options.writable === false) this.writable = false;\n if (options.allowHalfOpen === false) {\n this.allowHalfOpen = false;\n this.once('end', onend);\n }\n }\n}\nObject.defineProperty(Duplex.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.highWaterMark;\n }\n});\nObject.defineProperty(Duplex.prototype, 'writableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState && this._writableState.getBuffer();\n }\n});\nObject.defineProperty(Duplex.prototype, 'writableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.length;\n }\n});\n\n// the no-half-open enforcer\nfunction onend() {\n // If the writable side ended, then we're ok.\n if (this._writableState.ended) return;\n\n // no more data can be written.\n // But allow more writes to happen in this tick.\n process.nextTick(onEndNT, this);\n}\nfunction onEndNT(self) {\n self.end();\n}\nObject.defineProperty(Duplex.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._readableState === undefined || this._writableState === undefined) {\n return false;\n }\n return this._readableState.destroyed && this._writableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (this._readableState === undefined || this._writableState === undefined) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n this._writableState.destroyed = value;\n }\n});","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a passthrough stream.\n// basically just the most minimal sort of Transform stream.\n// Every written chunk gets output as-is.\n\n'use strict';\n\nmodule.exports = PassThrough;\nconst Transform = require('./_stream_transform');\nrequire('inherits')(PassThrough, Transform);\nfunction PassThrough(options) {\n if (!(this instanceof PassThrough)) return new PassThrough(options);\n Transform.call(this, options);\n}\nPassThrough.prototype._transform = function (chunk, encoding, cb) {\n cb(null, chunk);\n};","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\nmodule.exports = Readable;\n\n/**/\nvar Duplex;\n/**/\n\nReadable.ReadableState = ReadableState;\n\n/**/\nconst EE = require('events').EventEmitter;\nvar EElistenerCount = function EElistenerCount(emitter, type) {\n return emitter.listeners(type).length;\n};\n/**/\n\n/**/\nvar Stream = require('./internal/streams/stream');\n/**/\n\nconst Buffer = require('buffer').Buffer;\nconst OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\n\n/**/\nconst debugUtil = require('util');\nlet debug;\nif (debugUtil && debugUtil.debuglog) {\n debug = debugUtil.debuglog('stream');\n} else {\n debug = function debug() {};\n}\n/**/\n\nconst BufferList = require('./internal/streams/buffer_list');\nconst destroyImpl = require('./internal/streams/destroy');\nconst _require = require('./internal/streams/state'),\n getHighWaterMark = _require.getHighWaterMark;\nconst _require$codes = require('../errors').codes,\n ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,\n ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT;\n\n// Lazy loaded to improve the startup performance.\nlet StringDecoder;\nlet createReadableStreamAsyncIterator;\nlet from;\nrequire('inherits')(Readable, Stream);\nconst errorOrDestroy = destroyImpl.errorOrDestroy;\nconst kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];\nfunction prependListener(emitter, event, fn) {\n // Sadly this is not cacheable as some libraries bundle their own\n // event emitter implementation with them.\n if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);\n\n // This is a hack to make sure that our error handler is attached before any\n // userland ones. NEVER DO THIS. This is here only because this code needs\n // to continue to work with older versions of Node.js that do not include\n // the prependListener() method. The goal is to eventually remove this hack.\n if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];\n}\nfunction ReadableState(options, stream, isDuplex) {\n Duplex = Duplex || require('./_stream_duplex');\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream.\n // These options can be provided separately as readableXXX and writableXXX.\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;\n\n // object stream flag. Used to make read(n) ignore n and to\n // make all the buffer merging and length checks go away\n this.objectMode = !!options.objectMode;\n if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;\n\n // the point at which it stops calling _read() to fill the buffer\n // Note: 0 is a valid value, means \"don't call _read preemptively ever\"\n this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex);\n\n // A linked list is used to store data chunks instead of an array because the\n // linked list can remove elements from the beginning faster than\n // array.shift()\n this.buffer = new BufferList();\n this.length = 0;\n this.pipes = null;\n this.pipesCount = 0;\n this.flowing = null;\n this.ended = false;\n this.endEmitted = false;\n this.reading = false;\n\n // a flag to be able to tell if the event 'readable'/'data' is emitted\n // immediately, or on a later tick. We set this to true at first, because\n // any actions that shouldn't happen until \"later\" should generally also\n // not happen before the first read call.\n this.sync = true;\n\n // whenever we return null, then we set a flag to say\n // that we're awaiting a 'readable' event emission.\n this.needReadable = false;\n this.emittedReadable = false;\n this.readableListening = false;\n this.resumeScheduled = false;\n this.paused = true;\n\n // Should close be emitted on destroy. Defaults to true.\n this.emitClose = options.emitClose !== false;\n\n // Should .destroy() be called after 'end' (and potentially 'finish')\n this.autoDestroy = !!options.autoDestroy;\n\n // has it been destroyed\n this.destroyed = false;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // the number of writers that are awaiting a drain event in .pipe()s\n this.awaitDrain = 0;\n\n // if true, a maybeReadMore has been scheduled\n this.readingMore = false;\n this.decoder = null;\n this.encoding = null;\n if (options.encoding) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n this.decoder = new StringDecoder(options.encoding);\n this.encoding = options.encoding;\n }\n}\nfunction Readable(options) {\n Duplex = Duplex || require('./_stream_duplex');\n if (!(this instanceof Readable)) return new Readable(options);\n\n // Checking for a Stream.Duplex instance is faster here instead of inside\n // the ReadableState constructor, at least with V8 6.5\n const isDuplex = this instanceof Duplex;\n this._readableState = new ReadableState(options, this, isDuplex);\n\n // legacy\n this.readable = true;\n if (options) {\n if (typeof options.read === 'function') this._read = options.read;\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n }\n Stream.call(this);\n}\nObject.defineProperty(Readable.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._readableState === undefined) {\n return false;\n }\n return this._readableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._readableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._readableState.destroyed = value;\n }\n});\nReadable.prototype.destroy = destroyImpl.destroy;\nReadable.prototype._undestroy = destroyImpl.undestroy;\nReadable.prototype._destroy = function (err, cb) {\n cb(err);\n};\n\n// Manually shove something into the read() buffer.\n// This returns true if the highWaterMark has not been hit yet,\n// similar to how Writable.write() returns true if you should\n// write() some more.\nReadable.prototype.push = function (chunk, encoding) {\n var state = this._readableState;\n var skipChunkCheck;\n if (!state.objectMode) {\n if (typeof chunk === 'string') {\n encoding = encoding || state.defaultEncoding;\n if (encoding !== state.encoding) {\n chunk = Buffer.from(chunk, encoding);\n encoding = '';\n }\n skipChunkCheck = true;\n }\n } else {\n skipChunkCheck = true;\n }\n return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);\n};\n\n// Unshift should *always* be something directly out of read()\nReadable.prototype.unshift = function (chunk) {\n return readableAddChunk(this, chunk, null, true, false);\n};\nfunction readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {\n debug('readableAddChunk', chunk);\n var state = stream._readableState;\n if (chunk === null) {\n state.reading = false;\n onEofChunk(stream, state);\n } else {\n var er;\n if (!skipChunkCheck) er = chunkInvalid(state, chunk);\n if (er) {\n errorOrDestroy(stream, er);\n } else if (state.objectMode || chunk && chunk.length > 0) {\n if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n if (addToFront) {\n if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);\n } else if (state.ended) {\n errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF());\n } else if (state.destroyed) {\n return false;\n } else {\n state.reading = false;\n if (state.decoder && !encoding) {\n chunk = state.decoder.write(chunk);\n if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);\n } else {\n addChunk(stream, state, chunk, false);\n }\n }\n } else if (!addToFront) {\n state.reading = false;\n maybeReadMore(stream, state);\n }\n }\n\n // We can push more data if we are below the highWaterMark.\n // Also, if we have no data yet, we can stand some more bytes.\n // This is to work around cases where hwm=0, such as the repl.\n return !state.ended && (state.length < state.highWaterMark || state.length === 0);\n}\nfunction addChunk(stream, state, chunk, addToFront) {\n if (state.flowing && state.length === 0 && !state.sync) {\n state.awaitDrain = 0;\n stream.emit('data', chunk);\n } else {\n // update the buffer info.\n state.length += state.objectMode ? 1 : chunk.length;\n if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);\n if (state.needReadable) emitReadable(stream);\n }\n maybeReadMore(stream, state);\n}\nfunction chunkInvalid(state, chunk) {\n var er;\n if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {\n er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);\n }\n return er;\n}\nReadable.prototype.isPaused = function () {\n return this._readableState.flowing === false;\n};\n\n// backwards compatibility.\nReadable.prototype.setEncoding = function (enc) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n const decoder = new StringDecoder(enc);\n this._readableState.decoder = decoder;\n // If setEncoding(null), decoder.encoding equals utf8\n this._readableState.encoding = this._readableState.decoder.encoding;\n\n // Iterate over current buffer to convert already stored Buffers:\n let p = this._readableState.buffer.head;\n let content = '';\n while (p !== null) {\n content += decoder.write(p.data);\n p = p.next;\n }\n this._readableState.buffer.clear();\n if (content !== '') this._readableState.buffer.push(content);\n this._readableState.length = content.length;\n return this;\n};\n\n// Don't raise the hwm > 1GB\nconst MAX_HWM = 0x40000000;\nfunction computeNewHighWaterMark(n) {\n if (n >= MAX_HWM) {\n // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.\n n = MAX_HWM;\n } else {\n // Get the next highest power of 2 to prevent increasing hwm excessively in\n // tiny amounts\n n--;\n n |= n >>> 1;\n n |= n >>> 2;\n n |= n >>> 4;\n n |= n >>> 8;\n n |= n >>> 16;\n n++;\n }\n return n;\n}\n\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction howMuchToRead(n, state) {\n if (n <= 0 || state.length === 0 && state.ended) return 0;\n if (state.objectMode) return 1;\n if (n !== n) {\n // Only flow one buffer at a time\n if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;\n }\n // If we're asking for more than the current hwm, then raise the hwm.\n if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);\n if (n <= state.length) return n;\n // Don't have enough\n if (!state.ended) {\n state.needReadable = true;\n return 0;\n }\n return state.length;\n}\n\n// you can override either this method, or the async _read(n) below.\nReadable.prototype.read = function (n) {\n debug('read', n);\n n = parseInt(n, 10);\n var state = this._readableState;\n var nOrig = n;\n if (n !== 0) state.emittedReadable = false;\n\n // if we're doing read(0) to trigger a readable event, but we\n // already have a bunch of data in the buffer, then just trigger\n // the 'readable' event and move on.\n if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {\n debug('read: emitReadable', state.length, state.ended);\n if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);\n return null;\n }\n n = howMuchToRead(n, state);\n\n // if we've ended, and we're now clear, then finish it up.\n if (n === 0 && state.ended) {\n if (state.length === 0) endReadable(this);\n return null;\n }\n\n // All the actual chunk generation logic needs to be\n // *below* the call to _read. The reason is that in certain\n // synthetic stream cases, such as passthrough streams, _read\n // may be a completely synchronous operation which may change\n // the state of the read buffer, providing enough data when\n // before there was *not* enough.\n //\n // So, the steps are:\n // 1. Figure out what the state of things will be after we do\n // a read from the buffer.\n //\n // 2. If that resulting state will trigger a _read, then call _read.\n // Note that this may be asynchronous, or synchronous. Yes, it is\n // deeply ugly to write APIs this way, but that still doesn't mean\n // that the Readable class should behave improperly, as streams are\n // designed to be sync/async agnostic.\n // Take note if the _read call is sync or async (ie, if the read call\n // has returned yet), so that we know whether or not it's safe to emit\n // 'readable' etc.\n //\n // 3. Actually pull the requested chunks out of the buffer and return.\n\n // if we need a readable event, then we need to do some reading.\n var doRead = state.needReadable;\n debug('need readable', doRead);\n\n // if we currently have less than the highWaterMark, then also read some\n if (state.length === 0 || state.length - n < state.highWaterMark) {\n doRead = true;\n debug('length less than watermark', doRead);\n }\n\n // however, if we've ended, then there's no point, and if we're already\n // reading, then it's unnecessary.\n if (state.ended || state.reading) {\n doRead = false;\n debug('reading or ended', doRead);\n } else if (doRead) {\n debug('do read');\n state.reading = true;\n state.sync = true;\n // if the length is currently zero, then we *need* a readable event.\n if (state.length === 0) state.needReadable = true;\n // call internal read method\n this._read(state.highWaterMark);\n state.sync = false;\n // If _read pushed data synchronously, then `reading` will be false,\n // and we need to re-evaluate how much data we can return to the user.\n if (!state.reading) n = howMuchToRead(nOrig, state);\n }\n var ret;\n if (n > 0) ret = fromList(n, state);else ret = null;\n if (ret === null) {\n state.needReadable = state.length <= state.highWaterMark;\n n = 0;\n } else {\n state.length -= n;\n state.awaitDrain = 0;\n }\n if (state.length === 0) {\n // If we have nothing in the buffer, then we want to know\n // as soon as we *do* get something into the buffer.\n if (!state.ended) state.needReadable = true;\n\n // If we tried to read() past the EOF, then emit end on the next tick.\n if (nOrig !== n && state.ended) endReadable(this);\n }\n if (ret !== null) this.emit('data', ret);\n return ret;\n};\nfunction onEofChunk(stream, state) {\n debug('onEofChunk');\n if (state.ended) return;\n if (state.decoder) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) {\n state.buffer.push(chunk);\n state.length += state.objectMode ? 1 : chunk.length;\n }\n }\n state.ended = true;\n if (state.sync) {\n // if we are sync, wait until next tick to emit the data.\n // Otherwise we risk emitting data in the flow()\n // the readable code triggers during a read() call\n emitReadable(stream);\n } else {\n // emit 'readable' now to make sure it gets picked up.\n state.needReadable = false;\n if (!state.emittedReadable) {\n state.emittedReadable = true;\n emitReadable_(stream);\n }\n }\n}\n\n// Don't emit readable right away in sync mode, because this can trigger\n// another read() call => stack overflow. This way, it might trigger\n// a nextTick recursion warning, but that's not so bad.\nfunction emitReadable(stream) {\n var state = stream._readableState;\n debug('emitReadable', state.needReadable, state.emittedReadable);\n state.needReadable = false;\n if (!state.emittedReadable) {\n debug('emitReadable', state.flowing);\n state.emittedReadable = true;\n process.nextTick(emitReadable_, stream);\n }\n}\nfunction emitReadable_(stream) {\n var state = stream._readableState;\n debug('emitReadable_', state.destroyed, state.length, state.ended);\n if (!state.destroyed && (state.length || state.ended)) {\n stream.emit('readable');\n state.emittedReadable = false;\n }\n\n // The stream needs another readable event if\n // 1. It is not flowing, as the flow mechanism will take\n // care of it.\n // 2. It is not ended.\n // 3. It is below the highWaterMark, so we can schedule\n // another readable later.\n state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;\n flow(stream);\n}\n\n// at this point, the user has presumably seen the 'readable' event,\n// and called read() to consume some data. that may have triggered\n// in turn another _read(n) call, in which case reading = true if\n// it's in progress.\n// However, if we're not ended, or reading, and the length < hwm,\n// then go ahead and try to read some more preemptively.\nfunction maybeReadMore(stream, state) {\n if (!state.readingMore) {\n state.readingMore = true;\n process.nextTick(maybeReadMore_, stream, state);\n }\n}\nfunction maybeReadMore_(stream, state) {\n // Attempt to read more data if we should.\n //\n // The conditions for reading more data are (one of):\n // - Not enough data buffered (state.length < state.highWaterMark). The loop\n // is responsible for filling the buffer with enough data if such data\n // is available. If highWaterMark is 0 and we are not in the flowing mode\n // we should _not_ attempt to buffer any extra data. We'll get more data\n // when the stream consumer calls read() instead.\n // - No data in the buffer, and the stream is in flowing mode. In this mode\n // the loop below is responsible for ensuring read() is called. Failing to\n // call read here would abort the flow and there's no other mechanism for\n // continuing the flow if the stream consumer has just subscribed to the\n // 'data' event.\n //\n // In addition to the above conditions to keep reading data, the following\n // conditions prevent the data from being read:\n // - The stream has ended (state.ended).\n // - There is already a pending 'read' operation (state.reading). This is a\n // case where the the stream has called the implementation defined _read()\n // method, but they are processing the call asynchronously and have _not_\n // called push() with new data. In this case we skip performing more\n // read()s. The execution ends in this method again after the _read() ends\n // up calling push() with more data.\n while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) {\n const len = state.length;\n debug('maybeReadMore read 0');\n stream.read(0);\n if (len === state.length)\n // didn't get any data, stop spinning.\n break;\n }\n state.readingMore = false;\n}\n\n// abstract method. to be overridden in specific implementation classes.\n// call cb(er, data) where data is <= n in length.\n// for virtual (non-string, non-buffer) streams, \"length\" is somewhat\n// arbitrary, and perhaps not very meaningful.\nReadable.prototype._read = function (n) {\n errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()'));\n};\nReadable.prototype.pipe = function (dest, pipeOpts) {\n var src = this;\n var state = this._readableState;\n switch (state.pipesCount) {\n case 0:\n state.pipes = dest;\n break;\n case 1:\n state.pipes = [state.pipes, dest];\n break;\n default:\n state.pipes.push(dest);\n break;\n }\n state.pipesCount += 1;\n debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);\n var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;\n var endFn = doEnd ? onend : unpipe;\n if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn);\n dest.on('unpipe', onunpipe);\n function onunpipe(readable, unpipeInfo) {\n debug('onunpipe');\n if (readable === src) {\n if (unpipeInfo && unpipeInfo.hasUnpiped === false) {\n unpipeInfo.hasUnpiped = true;\n cleanup();\n }\n }\n }\n function onend() {\n debug('onend');\n dest.end();\n }\n\n // when the dest drains, it reduces the awaitDrain counter\n // on the source. This would be more elegant with a .once()\n // handler in flow(), but adding and removing repeatedly is\n // too slow.\n var ondrain = pipeOnDrain(src);\n dest.on('drain', ondrain);\n var cleanedUp = false;\n function cleanup() {\n debug('cleanup');\n // cleanup event handlers once the pipe is broken\n dest.removeListener('close', onclose);\n dest.removeListener('finish', onfinish);\n dest.removeListener('drain', ondrain);\n dest.removeListener('error', onerror);\n dest.removeListener('unpipe', onunpipe);\n src.removeListener('end', onend);\n src.removeListener('end', unpipe);\n src.removeListener('data', ondata);\n cleanedUp = true;\n\n // if the reader is waiting for a drain event from this\n // specific writer, then it would cause it to never start\n // flowing again.\n // So, if this is awaiting a drain, then we just call it now.\n // If we don't know, then assume that we are waiting for one.\n if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();\n }\n src.on('data', ondata);\n function ondata(chunk) {\n debug('ondata');\n var ret = dest.write(chunk);\n debug('dest.write', ret);\n if (ret === false) {\n // If the user unpiped during `dest.write()`, it is possible\n // to get stuck in a permanently paused state if that write\n // also returned false.\n // => Check whether `dest` is still a piping destination.\n if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {\n debug('false write response, pause', state.awaitDrain);\n state.awaitDrain++;\n }\n src.pause();\n }\n }\n\n // if the dest has an error, then stop piping into it.\n // however, don't suppress the throwing behavior for this.\n function onerror(er) {\n debug('onerror', er);\n unpipe();\n dest.removeListener('error', onerror);\n if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er);\n }\n\n // Make sure our error handler is attached before userland ones.\n prependListener(dest, 'error', onerror);\n\n // Both close and finish should trigger unpipe, but only once.\n function onclose() {\n dest.removeListener('finish', onfinish);\n unpipe();\n }\n dest.once('close', onclose);\n function onfinish() {\n debug('onfinish');\n dest.removeListener('close', onclose);\n unpipe();\n }\n dest.once('finish', onfinish);\n function unpipe() {\n debug('unpipe');\n src.unpipe(dest);\n }\n\n // tell the dest that it's being piped to\n dest.emit('pipe', src);\n\n // start the flow if it hasn't been started already.\n if (!state.flowing) {\n debug('pipe resume');\n src.resume();\n }\n return dest;\n};\nfunction pipeOnDrain(src) {\n return function pipeOnDrainFunctionResult() {\n var state = src._readableState;\n debug('pipeOnDrain', state.awaitDrain);\n if (state.awaitDrain) state.awaitDrain--;\n if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {\n state.flowing = true;\n flow(src);\n }\n };\n}\nReadable.prototype.unpipe = function (dest) {\n var state = this._readableState;\n var unpipeInfo = {\n hasUnpiped: false\n };\n\n // if we're not piping anywhere, then do nothing.\n if (state.pipesCount === 0) return this;\n\n // just one destination. most common case.\n if (state.pipesCount === 1) {\n // passed in one, but it's not the right one.\n if (dest && dest !== state.pipes) return this;\n if (!dest) dest = state.pipes;\n\n // got a match.\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n if (dest) dest.emit('unpipe', this, unpipeInfo);\n return this;\n }\n\n // slow case. multiple pipe destinations.\n\n if (!dest) {\n // remove all.\n var dests = state.pipes;\n var len = state.pipesCount;\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, {\n hasUnpiped: false\n });\n return this;\n }\n\n // try to find the right one.\n var index = indexOf(state.pipes, dest);\n if (index === -1) return this;\n state.pipes.splice(index, 1);\n state.pipesCount -= 1;\n if (state.pipesCount === 1) state.pipes = state.pipes[0];\n dest.emit('unpipe', this, unpipeInfo);\n return this;\n};\n\n// set up data events if they are asked for\n// Ensure readable listeners eventually get something\nReadable.prototype.on = function (ev, fn) {\n const res = Stream.prototype.on.call(this, ev, fn);\n const state = this._readableState;\n if (ev === 'data') {\n // update readableListening so that resume() may be a no-op\n // a few lines down. This is needed to support once('readable').\n state.readableListening = this.listenerCount('readable') > 0;\n\n // Try start flowing on next tick if stream isn't explicitly paused\n if (state.flowing !== false) this.resume();\n } else if (ev === 'readable') {\n if (!state.endEmitted && !state.readableListening) {\n state.readableListening = state.needReadable = true;\n state.flowing = false;\n state.emittedReadable = false;\n debug('on readable', state.length, state.reading);\n if (state.length) {\n emitReadable(this);\n } else if (!state.reading) {\n process.nextTick(nReadingNextTick, this);\n }\n }\n }\n return res;\n};\nReadable.prototype.addListener = Readable.prototype.on;\nReadable.prototype.removeListener = function (ev, fn) {\n const res = Stream.prototype.removeListener.call(this, ev, fn);\n if (ev === 'readable') {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this);\n }\n return res;\n};\nReadable.prototype.removeAllListeners = function (ev) {\n const res = Stream.prototype.removeAllListeners.apply(this, arguments);\n if (ev === 'readable' || ev === undefined) {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this);\n }\n return res;\n};\nfunction updateReadableListening(self) {\n const state = self._readableState;\n state.readableListening = self.listenerCount('readable') > 0;\n if (state.resumeScheduled && !state.paused) {\n // flowing needs to be set to true now, otherwise\n // the upcoming resume will not flow.\n state.flowing = true;\n\n // crude way to check if we should resume\n } else if (self.listenerCount('data') > 0) {\n self.resume();\n }\n}\nfunction nReadingNextTick(self) {\n debug('readable nexttick read 0');\n self.read(0);\n}\n\n// pause() and resume() are remnants of the legacy readable stream API\n// If the user uses them, then switch into old mode.\nReadable.prototype.resume = function () {\n var state = this._readableState;\n if (!state.flowing) {\n debug('resume');\n // we flow only if there is no one listening\n // for readable, but we still have to call\n // resume()\n state.flowing = !state.readableListening;\n resume(this, state);\n }\n state.paused = false;\n return this;\n};\nfunction resume(stream, state) {\n if (!state.resumeScheduled) {\n state.resumeScheduled = true;\n process.nextTick(resume_, stream, state);\n }\n}\nfunction resume_(stream, state) {\n debug('resume', state.reading);\n if (!state.reading) {\n stream.read(0);\n }\n state.resumeScheduled = false;\n stream.emit('resume');\n flow(stream);\n if (state.flowing && !state.reading) stream.read(0);\n}\nReadable.prototype.pause = function () {\n debug('call pause flowing=%j', this._readableState.flowing);\n if (this._readableState.flowing !== false) {\n debug('pause');\n this._readableState.flowing = false;\n this.emit('pause');\n }\n this._readableState.paused = true;\n return this;\n};\nfunction flow(stream) {\n const state = stream._readableState;\n debug('flow', state.flowing);\n while (state.flowing && stream.read() !== null);\n}\n\n// wrap an old-style stream as the async data source.\n// This is *not* part of the readable stream interface.\n// It is an ugly unfortunate mess of history.\nReadable.prototype.wrap = function (stream) {\n var state = this._readableState;\n var paused = false;\n stream.on('end', () => {\n debug('wrapped end');\n if (state.decoder && !state.ended) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) this.push(chunk);\n }\n this.push(null);\n });\n stream.on('data', chunk => {\n debug('wrapped data');\n if (state.decoder) chunk = state.decoder.write(chunk);\n\n // don't skip over falsy values in objectMode\n if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;\n var ret = this.push(chunk);\n if (!ret) {\n paused = true;\n stream.pause();\n }\n });\n\n // proxy all the other methods.\n // important when wrapping filters and duplexes.\n for (var i in stream) {\n if (this[i] === undefined && typeof stream[i] === 'function') {\n this[i] = function methodWrap(method) {\n return function methodWrapReturnFunction() {\n return stream[method].apply(stream, arguments);\n };\n }(i);\n }\n }\n\n // proxy certain important events.\n for (var n = 0; n < kProxyEvents.length; n++) {\n stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));\n }\n\n // when we try to consume some more bytes, simply unpause the\n // underlying stream.\n this._read = n => {\n debug('wrapped _read', n);\n if (paused) {\n paused = false;\n stream.resume();\n }\n };\n return this;\n};\nif (typeof Symbol === 'function') {\n Readable.prototype[Symbol.asyncIterator] = function () {\n if (createReadableStreamAsyncIterator === undefined) {\n createReadableStreamAsyncIterator = require('./internal/streams/async_iterator');\n }\n return createReadableStreamAsyncIterator(this);\n };\n}\nObject.defineProperty(Readable.prototype, 'readableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState.highWaterMark;\n }\n});\nObject.defineProperty(Readable.prototype, 'readableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState && this._readableState.buffer;\n }\n});\nObject.defineProperty(Readable.prototype, 'readableFlowing', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState.flowing;\n },\n set: function set(state) {\n if (this._readableState) {\n this._readableState.flowing = state;\n }\n }\n});\n\n// exposed for testing purposes only.\nReadable._fromList = fromList;\nObject.defineProperty(Readable.prototype, 'readableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._readableState.length;\n }\n});\n\n// Pluck off n bytes from an array of buffers.\n// Length is the combined lengths of all the buffers in the list.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction fromList(n, state) {\n // nothing buffered\n if (state.length === 0) return null;\n var ret;\n if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {\n // read it all, truncate the list\n if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);\n state.buffer.clear();\n } else {\n // read part of list\n ret = state.buffer.consume(n, state.decoder);\n }\n return ret;\n}\nfunction endReadable(stream) {\n var state = stream._readableState;\n debug('endReadable', state.endEmitted);\n if (!state.endEmitted) {\n state.ended = true;\n process.nextTick(endReadableNT, state, stream);\n }\n}\nfunction endReadableNT(state, stream) {\n debug('endReadableNT', state.endEmitted, state.length);\n\n // Check that we didn't get one last unshift.\n if (!state.endEmitted && state.length === 0) {\n state.endEmitted = true;\n stream.readable = false;\n stream.emit('end');\n if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the writable side is ready for autoDestroy as well\n const wState = stream._writableState;\n if (!wState || wState.autoDestroy && wState.finished) {\n stream.destroy();\n }\n }\n }\n}\nif (typeof Symbol === 'function') {\n Readable.from = function (iterable, opts) {\n if (from === undefined) {\n from = require('./internal/streams/from');\n }\n return from(Readable, iterable, opts);\n };\n}\nfunction indexOf(xs, x) {\n for (var i = 0, l = xs.length; i < l; i++) {\n if (xs[i] === x) return i;\n }\n return -1;\n}","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a transform stream is a readable/writable stream where you do\n// something with the data. Sometimes it's called a \"filter\",\n// but that's not a great name for it, since that implies a thing where\n// some bits pass through, and others are simply ignored. (That would\n// be a valid example of a transform, of course.)\n//\n// While the output is causally related to the input, it's not a\n// necessarily symmetric or synchronous transformation. For example,\n// a zlib stream might take multiple plain-text writes(), and then\n// emit a single compressed chunk some time in the future.\n//\n// Here's how this works:\n//\n// The Transform stream has all the aspects of the readable and writable\n// stream classes. When you write(chunk), that calls _write(chunk,cb)\n// internally, and returns false if there's a lot of pending writes\n// buffered up. When you call read(), that calls _read(n) until\n// there's enough pending readable data buffered up.\n//\n// In a transform stream, the written data is placed in a buffer. When\n// _read(n) is called, it transforms the queued up data, calling the\n// buffered _write cb's as it consumes chunks. If consuming a single\n// written chunk would result in multiple output chunks, then the first\n// outputted bit calls the readcb, and subsequent chunks just go into\n// the read buffer, and will cause it to emit 'readable' if necessary.\n//\n// This way, back-pressure is actually determined by the reading side,\n// since _read has to be called to start processing a new chunk. However,\n// a pathological inflate type of transform can cause excessive buffering\n// here. For example, imagine a stream where every byte of input is\n// interpreted as an integer from 0-255, and then results in that many\n// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in\n// 1kb of data being output. In this case, you could write a very small\n// amount of input, and end up with a very large amount of output. In\n// such a pathological inflating mechanism, there'd be no way to tell\n// the system to stop doing the transform. A single 4MB write could\n// cause the system to run out of memory.\n//\n// However, even in such a pathological case, only a single written chunk\n// would be consumed, and then the rest would wait (un-transformed) until\n// the results of the previous transformed chunk were consumed.\n\n'use strict';\n\nmodule.exports = Transform;\nconst _require$codes = require('../errors').codes,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,\n ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,\n ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;\nconst Duplex = require('./_stream_duplex');\nrequire('inherits')(Transform, Duplex);\nfunction afterTransform(er, data) {\n var ts = this._transformState;\n ts.transforming = false;\n var cb = ts.writecb;\n if (cb === null) {\n return this.emit('error', new ERR_MULTIPLE_CALLBACK());\n }\n ts.writechunk = null;\n ts.writecb = null;\n if (data != null)\n // single equals check for both `null` and `undefined`\n this.push(data);\n cb(er);\n var rs = this._readableState;\n rs.reading = false;\n if (rs.needReadable || rs.length < rs.highWaterMark) {\n this._read(rs.highWaterMark);\n }\n}\nfunction Transform(options) {\n if (!(this instanceof Transform)) return new Transform(options);\n Duplex.call(this, options);\n this._transformState = {\n afterTransform: afterTransform.bind(this),\n needTransform: false,\n transforming: false,\n writecb: null,\n writechunk: null,\n writeencoding: null\n };\n\n // start out asking for a readable event once data is transformed.\n this._readableState.needReadable = true;\n\n // we have implemented the _read method, and done the other things\n // that Readable wants before the first _read call, so unset the\n // sync guard flag.\n this._readableState.sync = false;\n if (options) {\n if (typeof options.transform === 'function') this._transform = options.transform;\n if (typeof options.flush === 'function') this._flush = options.flush;\n }\n\n // When the writable side finishes, then flush out anything remaining.\n this.on('prefinish', prefinish);\n}\nfunction prefinish() {\n if (typeof this._flush === 'function' && !this._readableState.destroyed) {\n this._flush((er, data) => {\n done(this, er, data);\n });\n } else {\n done(this, null, null);\n }\n}\nTransform.prototype.push = function (chunk, encoding) {\n this._transformState.needTransform = false;\n return Duplex.prototype.push.call(this, chunk, encoding);\n};\n\n// This is the part where you do stuff!\n// override this function in implementation classes.\n// 'chunk' is an input chunk.\n//\n// Call `push(newChunk)` to pass along transformed output\n// to the readable side. You may call 'push' zero or more times.\n//\n// Call `cb(err)` when you are done with this chunk. If you pass\n// an error, then that'll put the hurt on the whole operation. If you\n// never call cb(), then you'll never get another chunk.\nTransform.prototype._transform = function (chunk, encoding, cb) {\n cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));\n};\nTransform.prototype._write = function (chunk, encoding, cb) {\n var ts = this._transformState;\n ts.writecb = cb;\n ts.writechunk = chunk;\n ts.writeencoding = encoding;\n if (!ts.transforming) {\n var rs = this._readableState;\n if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);\n }\n};\n\n// Doesn't matter what the args are here.\n// _transform does all the work.\n// That we got here means that the readable side wants more data.\nTransform.prototype._read = function (n) {\n var ts = this._transformState;\n if (ts.writechunk !== null && !ts.transforming) {\n ts.transforming = true;\n this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);\n } else {\n // mark that we need a transform, so that any data that comes in\n // will get processed, now that we've asked for it.\n ts.needTransform = true;\n }\n};\nTransform.prototype._destroy = function (err, cb) {\n Duplex.prototype._destroy.call(this, err, err2 => {\n cb(err2);\n });\n};\nfunction done(stream, er, data) {\n if (er) return stream.emit('error', er);\n if (data != null)\n // single equals check for both `null` and `undefined`\n stream.push(data);\n\n // TODO(BridgeAR): Write a test for these two error cases\n // if there's nothing in the write buffer, then that means\n // that nothing more will ever be provided\n if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();\n if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();\n return stream.push(null);\n}","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// A bit simpler than readable streams.\n// Implement an async ._write(chunk, encoding, cb), and it'll handle all\n// the drain event emission and buffering.\n\n'use strict';\n\nmodule.exports = Writable;\n\n/* */\nfunction WriteReq(chunk, encoding, cb) {\n this.chunk = chunk;\n this.encoding = encoding;\n this.callback = cb;\n this.next = null;\n}\n\n// It seems a linked list but it is not\n// there will be only 2 of these for each stream\nfunction CorkedRequest(state) {\n this.next = null;\n this.entry = null;\n this.finish = () => {\n onCorkedFinish(this, state);\n };\n}\n/* */\n\n/**/\nvar Duplex;\n/**/\n\nWritable.WritableState = WritableState;\n\n/**/\nconst internalUtil = {\n deprecate: require('util-deprecate')\n};\n/**/\n\n/**/\nvar Stream = require('./internal/streams/stream');\n/**/\n\nconst Buffer = require('buffer').Buffer;\nconst OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\nconst destroyImpl = require('./internal/streams/destroy');\nconst _require = require('./internal/streams/state'),\n getHighWaterMark = _require.getHighWaterMark;\nconst _require$codes = require('../errors').codes,\n ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,\n ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,\n ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,\n ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,\n ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,\n ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;\nconst errorOrDestroy = destroyImpl.errorOrDestroy;\nrequire('inherits')(Writable, Stream);\nfunction nop() {}\nfunction WritableState(options, stream, isDuplex) {\n Duplex = Duplex || require('./_stream_duplex');\n options = options || {};\n\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream,\n // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;\n\n // object stream flag to indicate whether or not this stream\n // contains buffers or objects.\n this.objectMode = !!options.objectMode;\n if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;\n\n // the point at which write() starts returning false\n // Note: 0 is a valid value, means that we always return false if\n // the entire buffer is not flushed immediately on write()\n this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex);\n\n // if _final has been called\n this.finalCalled = false;\n\n // drain event flag.\n this.needDrain = false;\n // at the start of calling end()\n this.ending = false;\n // when end() has been called, and returned\n this.ended = false;\n // when 'finish' is emitted\n this.finished = false;\n\n // has it been destroyed\n this.destroyed = false;\n\n // should we decode strings into buffers before passing to _write?\n // this is here so that some node-core streams can optimize string\n // handling at a lower level.\n var noDecode = options.decodeStrings === false;\n this.decodeStrings = !noDecode;\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = options.defaultEncoding || 'utf8';\n\n // not an actual buffer we keep track of, but a measurement\n // of how much we're waiting to get pushed to some underlying\n // socket or file.\n this.length = 0;\n\n // a flag to see when we're in the middle of a write.\n this.writing = false;\n\n // when true all writes will be buffered until .uncork() call\n this.corked = 0;\n\n // a flag to be able to tell if the onwrite cb is called immediately,\n // or on a later tick. We set this to true at first, because any\n // actions that shouldn't happen until \"later\" should generally also\n // not happen before the first write call.\n this.sync = true;\n\n // a flag to know if we're processing previously buffered items, which\n // may call the _write() callback in the same tick, so that we don't\n // end up in an overlapped onwrite situation.\n this.bufferProcessing = false;\n\n // the callback that's passed to _write(chunk,cb)\n this.onwrite = function (er) {\n onwrite(stream, er);\n };\n\n // the callback that the user supplies to write(chunk,encoding,cb)\n this.writecb = null;\n\n // the amount that is being written when _write is called.\n this.writelen = 0;\n this.bufferedRequest = null;\n this.lastBufferedRequest = null;\n\n // number of pending user-supplied write callbacks\n // this must be 0 before 'finish' can be emitted\n this.pendingcb = 0;\n\n // emit prefinish if the only thing we're waiting for is _write cbs\n // This is relevant for synchronous Transform streams\n this.prefinished = false;\n\n // True if the error was already emitted and should not be thrown again\n this.errorEmitted = false;\n\n // Should close be emitted on destroy. Defaults to true.\n this.emitClose = options.emitClose !== false;\n\n // Should .destroy() be called after 'finish' (and potentially 'end')\n this.autoDestroy = !!options.autoDestroy;\n\n // count buffered requests\n this.bufferedRequestCount = 0;\n\n // allocate the first CorkedRequest, there is always\n // one allocated and free to use, and we maintain at most two\n this.corkedRequestsFree = new CorkedRequest(this);\n}\nWritableState.prototype.getBuffer = function getBuffer() {\n var current = this.bufferedRequest;\n var out = [];\n while (current) {\n out.push(current);\n current = current.next;\n }\n return out;\n};\n(function () {\n try {\n Object.defineProperty(WritableState.prototype, 'buffer', {\n get: internalUtil.deprecate(function writableStateBufferGetter() {\n return this.getBuffer();\n }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')\n });\n } catch (_) {}\n})();\n\n// Test _writableState for inheritance to account for Duplex streams,\n// whose prototype chain only points to Readable.\nvar realHasInstance;\nif (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {\n realHasInstance = Function.prototype[Symbol.hasInstance];\n Object.defineProperty(Writable, Symbol.hasInstance, {\n value: function value(object) {\n if (realHasInstance.call(this, object)) return true;\n if (this !== Writable) return false;\n return object && object._writableState instanceof WritableState;\n }\n });\n} else {\n realHasInstance = function realHasInstance(object) {\n return object instanceof this;\n };\n}\nfunction Writable(options) {\n Duplex = Duplex || require('./_stream_duplex');\n\n // Writable ctor is applied to Duplexes, too.\n // `realHasInstance` is necessary because using plain `instanceof`\n // would return false, as no `_writableState` property is attached.\n\n // Trying to use the custom `instanceof` for Writable here will also break the\n // Node.js LazyTransform implementation, which has a non-trivial getter for\n // `_writableState` that would lead to infinite recursion.\n\n // Checking for a Stream.Duplex instance is faster here instead of inside\n // the WritableState constructor, at least with V8 6.5\n const isDuplex = this instanceof Duplex;\n if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);\n this._writableState = new WritableState(options, this, isDuplex);\n\n // legacy.\n this.writable = true;\n if (options) {\n if (typeof options.write === 'function') this._write = options.write;\n if (typeof options.writev === 'function') this._writev = options.writev;\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n if (typeof options.final === 'function') this._final = options.final;\n }\n Stream.call(this);\n}\n\n// Otherwise people can pipe Writable streams, which is just wrong.\nWritable.prototype.pipe = function () {\n errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());\n};\nfunction writeAfterEnd(stream, cb) {\n var er = new ERR_STREAM_WRITE_AFTER_END();\n // TODO: defer error events consistently everywhere, not just the cb\n errorOrDestroy(stream, er);\n process.nextTick(cb, er);\n}\n\n// Checks that a user-supplied chunk is valid, especially for the particular\n// mode the stream is in. Currently this means that `null` is never accepted\n// and undefined/non-string values are only allowed in object mode.\nfunction validChunk(stream, state, chunk, cb) {\n var er;\n if (chunk === null) {\n er = new ERR_STREAM_NULL_VALUES();\n } else if (typeof chunk !== 'string' && !state.objectMode) {\n er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);\n }\n if (er) {\n errorOrDestroy(stream, er);\n process.nextTick(cb, er);\n return false;\n }\n return true;\n}\nWritable.prototype.write = function (chunk, encoding, cb) {\n var state = this._writableState;\n var ret = false;\n var isBuf = !state.objectMode && _isUint8Array(chunk);\n if (isBuf && !Buffer.isBuffer(chunk)) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;\n if (typeof cb !== 'function') cb = nop;\n if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {\n state.pendingcb++;\n ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);\n }\n return ret;\n};\nWritable.prototype.cork = function () {\n this._writableState.corked++;\n};\nWritable.prototype.uncork = function () {\n var state = this._writableState;\n if (state.corked) {\n state.corked--;\n if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);\n }\n};\nWritable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {\n // node::ParseEncoding() requires lower case.\n if (typeof encoding === 'string') encoding = encoding.toLowerCase();\n if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);\n this._writableState.defaultEncoding = encoding;\n return this;\n};\nObject.defineProperty(Writable.prototype, 'writableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState && this._writableState.getBuffer();\n }\n});\nfunction decodeChunk(state, chunk, encoding) {\n if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {\n chunk = Buffer.from(chunk, encoding);\n }\n return chunk;\n}\nObject.defineProperty(Writable.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState.highWaterMark;\n }\n});\n\n// if we're already writing something, then just put this\n// in the queue, and wait our turn. Otherwise, call _write\n// If we return false, then we need a drain event, so set that flag.\nfunction writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {\n if (!isBuf) {\n var newChunk = decodeChunk(state, chunk, encoding);\n if (chunk !== newChunk) {\n isBuf = true;\n encoding = 'buffer';\n chunk = newChunk;\n }\n }\n var len = state.objectMode ? 1 : chunk.length;\n state.length += len;\n var ret = state.length < state.highWaterMark;\n // we must ensure that previous needDrain will not be reset to false.\n if (!ret) state.needDrain = true;\n if (state.writing || state.corked) {\n var last = state.lastBufferedRequest;\n state.lastBufferedRequest = {\n chunk,\n encoding,\n isBuf,\n callback: cb,\n next: null\n };\n if (last) {\n last.next = state.lastBufferedRequest;\n } else {\n state.bufferedRequest = state.lastBufferedRequest;\n }\n state.bufferedRequestCount += 1;\n } else {\n doWrite(stream, state, false, len, chunk, encoding, cb);\n }\n return ret;\n}\nfunction doWrite(stream, state, writev, len, chunk, encoding, cb) {\n state.writelen = len;\n state.writecb = cb;\n state.writing = true;\n state.sync = true;\n if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);\n state.sync = false;\n}\nfunction onwriteError(stream, state, sync, er, cb) {\n --state.pendingcb;\n if (sync) {\n // defer the callback if we are being called synchronously\n // to avoid piling up things on the stack\n process.nextTick(cb, er);\n // this can emit finish, and it will always happen\n // after error\n process.nextTick(finishMaybe, stream, state);\n stream._writableState.errorEmitted = true;\n errorOrDestroy(stream, er);\n } else {\n // the caller expect this to happen before if\n // it is async\n cb(er);\n stream._writableState.errorEmitted = true;\n errorOrDestroy(stream, er);\n // this can emit finish, but finish must\n // always follow error\n finishMaybe(stream, state);\n }\n}\nfunction onwriteStateUpdate(state) {\n state.writing = false;\n state.writecb = null;\n state.length -= state.writelen;\n state.writelen = 0;\n}\nfunction onwrite(stream, er) {\n var state = stream._writableState;\n var sync = state.sync;\n var cb = state.writecb;\n if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();\n onwriteStateUpdate(state);\n if (er) onwriteError(stream, state, sync, er, cb);else {\n // Check if we're actually ready to finish, but don't emit yet\n var finished = needFinish(state) || stream.destroyed;\n if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {\n clearBuffer(stream, state);\n }\n if (sync) {\n process.nextTick(afterWrite, stream, state, finished, cb);\n } else {\n afterWrite(stream, state, finished, cb);\n }\n }\n}\nfunction afterWrite(stream, state, finished, cb) {\n if (!finished) onwriteDrain(stream, state);\n state.pendingcb--;\n cb();\n finishMaybe(stream, state);\n}\n\n// Must force callback to be called on nextTick, so that we don't\n// emit 'drain' before the write() consumer gets the 'false' return\n// value, and has a chance to attach a 'drain' listener.\nfunction onwriteDrain(stream, state) {\n if (state.length === 0 && state.needDrain) {\n state.needDrain = false;\n stream.emit('drain');\n }\n}\n\n// if there's something in the buffer waiting, then process it\nfunction clearBuffer(stream, state) {\n state.bufferProcessing = true;\n var entry = state.bufferedRequest;\n if (stream._writev && entry && entry.next) {\n // Fast case, write everything using _writev()\n var l = state.bufferedRequestCount;\n var buffer = new Array(l);\n var holder = state.corkedRequestsFree;\n holder.entry = entry;\n var count = 0;\n var allBuffers = true;\n while (entry) {\n buffer[count] = entry;\n if (!entry.isBuf) allBuffers = false;\n entry = entry.next;\n count += 1;\n }\n buffer.allBuffers = allBuffers;\n doWrite(stream, state, true, state.length, buffer, '', holder.finish);\n\n // doWrite is almost always async, defer these to save a bit of time\n // as the hot path ends with doWrite\n state.pendingcb++;\n state.lastBufferedRequest = null;\n if (holder.next) {\n state.corkedRequestsFree = holder.next;\n holder.next = null;\n } else {\n state.corkedRequestsFree = new CorkedRequest(state);\n }\n state.bufferedRequestCount = 0;\n } else {\n // Slow case, write chunks one-by-one\n while (entry) {\n var chunk = entry.chunk;\n var encoding = entry.encoding;\n var cb = entry.callback;\n var len = state.objectMode ? 1 : chunk.length;\n doWrite(stream, state, false, len, chunk, encoding, cb);\n entry = entry.next;\n state.bufferedRequestCount--;\n // if we didn't call the onwrite immediately, then\n // it means that we need to wait until it does.\n // also, that means that the chunk and cb are currently\n // being processed, so move the buffer counter past them.\n if (state.writing) {\n break;\n }\n }\n if (entry === null) state.lastBufferedRequest = null;\n }\n state.bufferedRequest = entry;\n state.bufferProcessing = false;\n}\nWritable.prototype._write = function (chunk, encoding, cb) {\n cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));\n};\nWritable.prototype._writev = null;\nWritable.prototype.end = function (chunk, encoding, cb) {\n var state = this._writableState;\n if (typeof chunk === 'function') {\n cb = chunk;\n chunk = null;\n encoding = null;\n } else if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);\n\n // .end() fully uncorks\n if (state.corked) {\n state.corked = 1;\n this.uncork();\n }\n\n // ignore unnecessary end() calls.\n if (!state.ending) endWritable(this, state, cb);\n return this;\n};\nObject.defineProperty(Writable.prototype, 'writableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n return this._writableState.length;\n }\n});\nfunction needFinish(state) {\n return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;\n}\nfunction callFinal(stream, state) {\n stream._final(err => {\n state.pendingcb--;\n if (err) {\n errorOrDestroy(stream, err);\n }\n state.prefinished = true;\n stream.emit('prefinish');\n finishMaybe(stream, state);\n });\n}\nfunction prefinish(stream, state) {\n if (!state.prefinished && !state.finalCalled) {\n if (typeof stream._final === 'function' && !state.destroyed) {\n state.pendingcb++;\n state.finalCalled = true;\n process.nextTick(callFinal, stream, state);\n } else {\n state.prefinished = true;\n stream.emit('prefinish');\n }\n }\n}\nfunction finishMaybe(stream, state) {\n var need = needFinish(state);\n if (need) {\n prefinish(stream, state);\n if (state.pendingcb === 0) {\n state.finished = true;\n stream.emit('finish');\n if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the readable side is ready for autoDestroy as well\n const rState = stream._readableState;\n if (!rState || rState.autoDestroy && rState.endEmitted) {\n stream.destroy();\n }\n }\n }\n }\n return need;\n}\nfunction endWritable(stream, state, cb) {\n state.ending = true;\n finishMaybe(stream, state);\n if (cb) {\n if (state.finished) process.nextTick(cb);else stream.once('finish', cb);\n }\n state.ended = true;\n stream.writable = false;\n}\nfunction onCorkedFinish(corkReq, state, err) {\n var entry = corkReq.entry;\n corkReq.entry = null;\n while (entry) {\n var cb = entry.callback;\n state.pendingcb--;\n cb(err);\n entry = entry.next;\n }\n\n // reuse the free corkReq.\n state.corkedRequestsFree.next = corkReq;\n}\nObject.defineProperty(Writable.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get() {\n if (this._writableState === undefined) {\n return false;\n }\n return this._writableState.destroyed;\n },\n set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._writableState) {\n return;\n }\n\n // backward compatibility, the user is explicitly\n // managing destroyed\n this._writableState.destroyed = value;\n }\n});\nWritable.prototype.destroy = destroyImpl.destroy;\nWritable.prototype._undestroy = destroyImpl.undestroy;\nWritable.prototype._destroy = function (err, cb) {\n cb(err);\n};","'use strict';\n\nconst finished = require('./end-of-stream');\nconst kLastResolve = Symbol('lastResolve');\nconst kLastReject = Symbol('lastReject');\nconst kError = Symbol('error');\nconst kEnded = Symbol('ended');\nconst kLastPromise = Symbol('lastPromise');\nconst kHandlePromise = Symbol('handlePromise');\nconst kStream = Symbol('stream');\nfunction createIterResult(value, done) {\n return {\n value,\n done\n };\n}\nfunction readAndResolve(iter) {\n const resolve = iter[kLastResolve];\n if (resolve !== null) {\n const data = iter[kStream].read();\n // we defer if data is null\n // we can be expecting either 'end' or\n // 'error'\n if (data !== null) {\n iter[kLastPromise] = null;\n iter[kLastResolve] = null;\n iter[kLastReject] = null;\n resolve(createIterResult(data, false));\n }\n }\n}\nfunction onReadable(iter) {\n // we wait for the next tick, because it might\n // emit an error with process.nextTick\n process.nextTick(readAndResolve, iter);\n}\nfunction wrapForNext(lastPromise, iter) {\n return (resolve, reject) => {\n lastPromise.then(() => {\n if (iter[kEnded]) {\n resolve(createIterResult(undefined, true));\n return;\n }\n iter[kHandlePromise](resolve, reject);\n }, reject);\n };\n}\nconst AsyncIteratorPrototype = Object.getPrototypeOf(function () {});\nconst ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({\n get stream() {\n return this[kStream];\n },\n next() {\n // if we have detected an error in the meanwhile\n // reject straight away\n const error = this[kError];\n if (error !== null) {\n return Promise.reject(error);\n }\n if (this[kEnded]) {\n return Promise.resolve(createIterResult(undefined, true));\n }\n if (this[kStream].destroyed) {\n // We need to defer via nextTick because if .destroy(err) is\n // called, the error will be emitted via nextTick, and\n // we cannot guarantee that there is no error lingering around\n // waiting to be emitted.\n return new Promise((resolve, reject) => {\n process.nextTick(() => {\n if (this[kError]) {\n reject(this[kError]);\n } else {\n resolve(createIterResult(undefined, true));\n }\n });\n });\n }\n\n // if we have multiple next() calls\n // we will wait for the previous Promise to finish\n // this logic is optimized to support for await loops,\n // where next() is only called once at a time\n const lastPromise = this[kLastPromise];\n let promise;\n if (lastPromise) {\n promise = new Promise(wrapForNext(lastPromise, this));\n } else {\n // fast path needed to support multiple this.push()\n // without triggering the next() queue\n const data = this[kStream].read();\n if (data !== null) {\n return Promise.resolve(createIterResult(data, false));\n }\n promise = new Promise(this[kHandlePromise]);\n }\n this[kLastPromise] = promise;\n return promise;\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n return() {\n // destroy(err, cb) is a private API\n // we can guarantee we have that here, because we control the\n // Readable class this is attached to\n return new Promise((resolve, reject) => {\n this[kStream].destroy(null, err => {\n if (err) {\n reject(err);\n return;\n }\n resolve(createIterResult(undefined, true));\n });\n });\n }\n}, AsyncIteratorPrototype);\nconst createReadableStreamAsyncIterator = stream => {\n const iterator = Object.create(ReadableStreamAsyncIteratorPrototype, {\n [kStream]: {\n value: stream,\n writable: true\n },\n [kLastResolve]: {\n value: null,\n writable: true\n },\n [kLastReject]: {\n value: null,\n writable: true\n },\n [kError]: {\n value: null,\n writable: true\n },\n [kEnded]: {\n value: stream._readableState.endEmitted,\n writable: true\n },\n // the function passed to new Promise\n // is cached so we avoid allocating a new\n // closure at every run\n [kHandlePromise]: {\n value: (resolve, reject) => {\n const data = iterator[kStream].read();\n if (data) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n resolve(createIterResult(data, false));\n } else {\n iterator[kLastResolve] = resolve;\n iterator[kLastReject] = reject;\n }\n },\n writable: true\n }\n });\n iterator[kLastPromise] = null;\n finished(stream, err => {\n if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {\n const reject = iterator[kLastReject];\n // reject if we are waiting for data in the Promise\n // returned by next() and store the error\n if (reject !== null) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n reject(err);\n }\n iterator[kError] = err;\n return;\n }\n const resolve = iterator[kLastResolve];\n if (resolve !== null) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n resolve(createIterResult(undefined, true));\n }\n iterator[kEnded] = true;\n });\n stream.on('readable', onReadable.bind(null, iterator));\n return iterator;\n};\nmodule.exports = createReadableStreamAsyncIterator;","'use strict';\n\nfunction ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }\nfunction _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }\nfunction _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }\nfunction _toPropertyKey(arg) { var key = _toPrimitive(arg, \"string\"); return typeof key === \"symbol\" ? key : String(key); }\nfunction _toPrimitive(input, hint) { if (typeof input !== \"object\" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || \"default\"); if (typeof res !== \"object\") return res; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (hint === \"string\" ? String : Number)(input); }\nconst _require = require('buffer'),\n Buffer = _require.Buffer;\nconst _require2 = require('util'),\n inspect = _require2.inspect;\nconst custom = inspect && inspect.custom || 'inspect';\nfunction copyBuffer(src, target, offset) {\n Buffer.prototype.copy.call(src, target, offset);\n}\nmodule.exports = class BufferList {\n constructor() {\n this.head = null;\n this.tail = null;\n this.length = 0;\n }\n push(v) {\n const entry = {\n data: v,\n next: null\n };\n if (this.length > 0) this.tail.next = entry;else this.head = entry;\n this.tail = entry;\n ++this.length;\n }\n unshift(v) {\n const entry = {\n data: v,\n next: this.head\n };\n if (this.length === 0) this.tail = entry;\n this.head = entry;\n ++this.length;\n }\n shift() {\n if (this.length === 0) return;\n const ret = this.head.data;\n if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;\n --this.length;\n return ret;\n }\n clear() {\n this.head = this.tail = null;\n this.length = 0;\n }\n join(s) {\n if (this.length === 0) return '';\n var p = this.head;\n var ret = '' + p.data;\n while (p = p.next) ret += s + p.data;\n return ret;\n }\n concat(n) {\n if (this.length === 0) return Buffer.alloc(0);\n const ret = Buffer.allocUnsafe(n >>> 0);\n var p = this.head;\n var i = 0;\n while (p) {\n copyBuffer(p.data, ret, i);\n i += p.data.length;\n p = p.next;\n }\n return ret;\n }\n\n // Consumes a specified amount of bytes or characters from the buffered data.\n consume(n, hasStrings) {\n var ret;\n if (n < this.head.data.length) {\n // `slice` is the same for buffers and strings.\n ret = this.head.data.slice(0, n);\n this.head.data = this.head.data.slice(n);\n } else if (n === this.head.data.length) {\n // First chunk is a perfect match.\n ret = this.shift();\n } else {\n // Result spans more than one buffer.\n ret = hasStrings ? this._getString(n) : this._getBuffer(n);\n }\n return ret;\n }\n first() {\n return this.head.data;\n }\n\n // Consumes a specified amount of characters from the buffered data.\n _getString(n) {\n var p = this.head;\n var c = 1;\n var ret = p.data;\n n -= ret.length;\n while (p = p.next) {\n const str = p.data;\n const nb = n > str.length ? str.length : n;\n if (nb === str.length) ret += str;else ret += str.slice(0, n);\n n -= nb;\n if (n === 0) {\n if (nb === str.length) {\n ++c;\n if (p.next) this.head = p.next;else this.head = this.tail = null;\n } else {\n this.head = p;\n p.data = str.slice(nb);\n }\n break;\n }\n ++c;\n }\n this.length -= c;\n return ret;\n }\n\n // Consumes a specified amount of bytes from the buffered data.\n _getBuffer(n) {\n const ret = Buffer.allocUnsafe(n);\n var p = this.head;\n var c = 1;\n p.data.copy(ret);\n n -= p.data.length;\n while (p = p.next) {\n const buf = p.data;\n const nb = n > buf.length ? buf.length : n;\n buf.copy(ret, ret.length - n, 0, nb);\n n -= nb;\n if (n === 0) {\n if (nb === buf.length) {\n ++c;\n if (p.next) this.head = p.next;else this.head = this.tail = null;\n } else {\n this.head = p;\n p.data = buf.slice(nb);\n }\n break;\n }\n ++c;\n }\n this.length -= c;\n return ret;\n }\n\n // Make sure the linked list only shows the minimal necessary information.\n [custom](_, options) {\n return inspect(this, _objectSpread(_objectSpread({}, options), {}, {\n // Only inspect one level.\n depth: 0,\n // It should not recurse.\n customInspect: false\n }));\n }\n};","'use strict';\n\n// undocumented cb() API, needed for core, not for public API\nfunction destroy(err, cb) {\n const readableDestroyed = this._readableState && this._readableState.destroyed;\n const writableDestroyed = this._writableState && this._writableState.destroyed;\n if (readableDestroyed || writableDestroyed) {\n if (cb) {\n cb(err);\n } else if (err) {\n if (!this._writableState) {\n process.nextTick(emitErrorNT, this, err);\n } else if (!this._writableState.errorEmitted) {\n this._writableState.errorEmitted = true;\n process.nextTick(emitErrorNT, this, err);\n }\n }\n return this;\n }\n\n // we set destroyed to true before firing error callbacks in order\n // to make it re-entrance safe in case destroy() is called within callbacks\n\n if (this._readableState) {\n this._readableState.destroyed = true;\n }\n\n // if this is a duplex stream mark the writable part as destroyed as well\n if (this._writableState) {\n this._writableState.destroyed = true;\n }\n this._destroy(err || null, err => {\n if (!cb && err) {\n if (!this._writableState) {\n process.nextTick(emitErrorAndCloseNT, this, err);\n } else if (!this._writableState.errorEmitted) {\n this._writableState.errorEmitted = true;\n process.nextTick(emitErrorAndCloseNT, this, err);\n } else {\n process.nextTick(emitCloseNT, this);\n }\n } else if (cb) {\n process.nextTick(emitCloseNT, this);\n cb(err);\n } else {\n process.nextTick(emitCloseNT, this);\n }\n });\n return this;\n}\nfunction emitErrorAndCloseNT(self, err) {\n emitErrorNT(self, err);\n emitCloseNT(self);\n}\nfunction emitCloseNT(self) {\n if (self._writableState && !self._writableState.emitClose) return;\n if (self._readableState && !self._readableState.emitClose) return;\n self.emit('close');\n}\nfunction undestroy() {\n if (this._readableState) {\n this._readableState.destroyed = false;\n this._readableState.reading = false;\n this._readableState.ended = false;\n this._readableState.endEmitted = false;\n }\n if (this._writableState) {\n this._writableState.destroyed = false;\n this._writableState.ended = false;\n this._writableState.ending = false;\n this._writableState.finalCalled = false;\n this._writableState.prefinished = false;\n this._writableState.finished = false;\n this._writableState.errorEmitted = false;\n }\n}\nfunction emitErrorNT(self, err) {\n self.emit('error', err);\n}\nfunction errorOrDestroy(stream, err) {\n // We have tests that rely on errors being emitted\n // in the same tick, so changing this is semver major.\n // For now when you opt-in to autoDestroy we allow\n // the error to be emitted nextTick. In a future\n // semver major update we should change the default to this.\n\n const rState = stream._readableState;\n const wState = stream._writableState;\n if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);\n}\nmodule.exports = {\n destroy,\n undestroy,\n errorOrDestroy\n};","// Ported from https://github.com/mafintosh/end-of-stream with\n// permission from the author, Mathias Buus (@mafintosh).\n\n'use strict';\n\nconst ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;\nfunction once(callback) {\n let called = false;\n return function () {\n if (called) return;\n called = true;\n for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {\n args[_key] = arguments[_key];\n }\n callback.apply(this, args);\n };\n}\nfunction noop() {}\nfunction isRequest(stream) {\n return stream.setHeader && typeof stream.abort === 'function';\n}\nfunction eos(stream, opts, callback) {\n if (typeof opts === 'function') return eos(stream, null, opts);\n if (!opts) opts = {};\n callback = once(callback || noop);\n let readable = opts.readable || opts.readable !== false && stream.readable;\n let writable = opts.writable || opts.writable !== false && stream.writable;\n const onlegacyfinish = () => {\n if (!stream.writable) onfinish();\n };\n var writableEnded = stream._writableState && stream._writableState.finished;\n const onfinish = () => {\n writable = false;\n writableEnded = true;\n if (!readable) callback.call(stream);\n };\n var readableEnded = stream._readableState && stream._readableState.endEmitted;\n const onend = () => {\n readable = false;\n readableEnded = true;\n if (!writable) callback.call(stream);\n };\n const onerror = err => {\n callback.call(stream, err);\n };\n const onclose = () => {\n let err;\n if (readable && !readableEnded) {\n if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();\n return callback.call(stream, err);\n }\n if (writable && !writableEnded) {\n if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();\n return callback.call(stream, err);\n }\n };\n const onrequest = () => {\n stream.req.on('finish', onfinish);\n };\n if (isRequest(stream)) {\n stream.on('complete', onfinish);\n stream.on('abort', onclose);\n if (stream.req) onrequest();else stream.on('request', onrequest);\n } else if (writable && !stream._writableState) {\n // legacy streams\n stream.on('end', onlegacyfinish);\n stream.on('close', onlegacyfinish);\n }\n stream.on('end', onend);\n stream.on('finish', onfinish);\n if (opts.error !== false) stream.on('error', onerror);\n stream.on('close', onclose);\n return function () {\n stream.removeListener('complete', onfinish);\n stream.removeListener('abort', onclose);\n stream.removeListener('request', onrequest);\n if (stream.req) stream.req.removeListener('finish', onfinish);\n stream.removeListener('end', onlegacyfinish);\n stream.removeListener('close', onlegacyfinish);\n stream.removeListener('finish', onfinish);\n stream.removeListener('end', onend);\n stream.removeListener('error', onerror);\n stream.removeListener('close', onclose);\n };\n}\nmodule.exports = eos;","'use strict';\n\nfunction asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }\nfunction _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"next\", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"throw\", err); } _next(undefined); }); }; }\nfunction ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }\nfunction _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }\nfunction _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }\nfunction _toPropertyKey(arg) { var key = _toPrimitive(arg, \"string\"); return typeof key === \"symbol\" ? key : String(key); }\nfunction _toPrimitive(input, hint) { if (typeof input !== \"object\" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || \"default\"); if (typeof res !== \"object\") return res; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (hint === \"string\" ? String : Number)(input); }\nconst ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE;\nfunction from(Readable, iterable, opts) {\n let iterator;\n if (iterable && typeof iterable.next === 'function') {\n iterator = iterable;\n } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable);\n const readable = new Readable(_objectSpread({\n objectMode: true\n }, opts));\n // Reading boolean to protect against _read\n // being called before last iteration completion.\n let reading = false;\n readable._read = function () {\n if (!reading) {\n reading = true;\n next();\n }\n };\n function next() {\n return _next2.apply(this, arguments);\n }\n function _next2() {\n _next2 = _asyncToGenerator(function* () {\n try {\n const _yield$iterator$next = yield iterator.next(),\n value = _yield$iterator$next.value,\n done = _yield$iterator$next.done;\n if (done) {\n readable.push(null);\n } else if (readable.push(yield value)) {\n next();\n } else {\n reading = false;\n }\n } catch (err) {\n readable.destroy(err);\n }\n });\n return _next2.apply(this, arguments);\n }\n return readable;\n}\nmodule.exports = from;","// Ported from https://github.com/mafintosh/pump with\n// permission from the author, Mathias Buus (@mafintosh).\n\n'use strict';\n\nlet eos;\nfunction once(callback) {\n let called = false;\n return function () {\n if (called) return;\n called = true;\n callback(...arguments);\n };\n}\nconst _require$codes = require('../../../errors').codes,\n ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,\n ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;\nfunction noop(err) {\n // Rethrow the error if it exists to avoid swallowing it\n if (err) throw err;\n}\nfunction isRequest(stream) {\n return stream.setHeader && typeof stream.abort === 'function';\n}\nfunction destroyer(stream, reading, writing, callback) {\n callback = once(callback);\n let closed = false;\n stream.on('close', () => {\n closed = true;\n });\n if (eos === undefined) eos = require('./end-of-stream');\n eos(stream, {\n readable: reading,\n writable: writing\n }, err => {\n if (err) return callback(err);\n closed = true;\n callback();\n });\n let destroyed = false;\n return err => {\n if (closed) return;\n if (destroyed) return;\n destroyed = true;\n\n // request.destroy just do .end - .abort is what we want\n if (isRequest(stream)) return stream.abort();\n if (typeof stream.destroy === 'function') return stream.destroy();\n callback(err || new ERR_STREAM_DESTROYED('pipe'));\n };\n}\nfunction call(fn) {\n fn();\n}\nfunction pipe(from, to) {\n return from.pipe(to);\n}\nfunction popCallback(streams) {\n if (!streams.length) return noop;\n if (typeof streams[streams.length - 1] !== 'function') return noop;\n return streams.pop();\n}\nfunction pipeline() {\n for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {\n streams[_key] = arguments[_key];\n }\n const callback = popCallback(streams);\n if (Array.isArray(streams[0])) streams = streams[0];\n if (streams.length < 2) {\n throw new ERR_MISSING_ARGS('streams');\n }\n let error;\n const destroys = streams.map(function (stream, i) {\n const reading = i < streams.length - 1;\n const writing = i > 0;\n return destroyer(stream, reading, writing, function (err) {\n if (!error) error = err;\n if (err) destroys.forEach(call);\n if (reading) return;\n destroys.forEach(call);\n callback(error);\n });\n });\n return streams.reduce(pipe);\n}\nmodule.exports = pipeline;","'use strict';\n\nconst ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;\nfunction highWaterMarkFrom(options, isDuplex, duplexKey) {\n return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;\n}\nfunction getHighWaterMark(state, options, duplexKey, isDuplex) {\n const hwm = highWaterMarkFrom(options, isDuplex, duplexKey);\n if (hwm != null) {\n if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {\n const name = isDuplex ? duplexKey : 'highWaterMark';\n throw new ERR_INVALID_OPT_VALUE(name, hwm);\n }\n return Math.floor(hwm);\n }\n\n // Default value\n return state.objectMode ? 16 : 16 * 1024;\n}\nmodule.exports = {\n getHighWaterMark\n};","module.exports = require('stream');\n","var Stream = require('stream');\nif (process.env.READABLE_STREAM === 'disable' && Stream) {\n module.exports = Stream.Readable;\n Object.assign(module.exports, Stream);\n module.exports.Stream = Stream;\n} else {\n exports = module.exports = require('./lib/_stream_readable.js');\n exports.Stream = Stream || exports;\n exports.Readable = exports;\n exports.Writable = require('./lib/_stream_writable.js');\n exports.Duplex = require('./lib/_stream_duplex.js');\n exports.Transform = require('./lib/_stream_transform.js');\n exports.PassThrough = require('./lib/_stream_passthrough.js');\n exports.finished = require('./lib/internal/streams/end-of-stream.js');\n exports.pipeline = require('./lib/internal/streams/pipeline.js');\n}\n","'use strict';\n\nconst {PassThrough} = require('stream');\nconst extend = require('extend');\n\nlet debug = () => {};\nif (\n typeof process !== 'undefined' &&\n 'env' in process &&\n typeof process.env === 'object' &&\n process.env.DEBUG === 'retry-request'\n) {\n debug = message => {\n console.log('retry-request:', message);\n };\n}\n\nconst DEFAULTS = {\n objectMode: false,\n retries: 2,\n\n /*\n The maximum time to delay in seconds. If retryDelayMultiplier results in a\n delay greater than maxRetryDelay, retries should delay by maxRetryDelay\n seconds instead.\n */\n maxRetryDelay: 64,\n\n /*\n The multiplier by which to increase the delay time between the completion of\n failed requests, and the initiation of the subsequent retrying request.\n */\n retryDelayMultiplier: 2,\n\n /*\n The length of time to keep retrying in seconds. The last sleep period will\n be shortened as necessary, so that the last retry runs at deadline (and not\n considerably beyond it). The total time starting from when the initial\n request is sent, after which an error will be returned, regardless of the\n retrying attempts made meanwhile.\n */\n totalTimeout: 600,\n\n noResponseRetries: 2,\n currentRetryAttempt: 0,\n shouldRetryFn: function (response) {\n const retryRanges = [\n // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes\n // 1xx - Retry (Informational, request still processing)\n // 2xx - Do not retry (Success)\n // 3xx - Do not retry (Redirect)\n // 4xx - Do not retry (Client errors)\n // 429 - Retry (\"Too Many Requests\")\n // 5xx - Retry (Server errors)\n [100, 199],\n [429, 429],\n [500, 599],\n ];\n\n const statusCode = response.statusCode;\n debug(`Response status: ${statusCode}`);\n\n let range;\n while ((range = retryRanges.shift())) {\n if (statusCode >= range[0] && statusCode <= range[1]) {\n // Not a successful status or redirect.\n return true;\n }\n }\n },\n};\n\nfunction retryRequest(requestOpts, opts, callback) {\n if (typeof requestOpts === 'string') {\n requestOpts = {url: requestOpts};\n }\n\n const streamMode = typeof arguments[arguments.length - 1] !== 'function';\n\n if (typeof opts === 'function') {\n callback = opts;\n }\n\n const manualCurrentRetryAttemptWasSet =\n opts && typeof opts.currentRetryAttempt === 'number';\n opts = extend({}, DEFAULTS, opts);\n\n if (typeof opts.request === 'undefined') {\n throw new Error('A request library must be provided to retry-request.');\n }\n\n let currentRetryAttempt = opts.currentRetryAttempt;\n\n let numNoResponseAttempts = 0;\n let streamResponseHandled = false;\n\n let retryStream;\n let requestStream;\n let delayStream;\n\n let activeRequest;\n const retryRequest = {\n abort: function () {\n if (activeRequest && activeRequest.abort) {\n activeRequest.abort();\n }\n },\n };\n\n if (streamMode) {\n retryStream = new PassThrough({objectMode: opts.objectMode});\n retryStream.abort = resetStreams;\n }\n\n const timeOfFirstRequest = Date.now();\n if (currentRetryAttempt > 0) {\n retryAfterDelay(currentRetryAttempt);\n } else {\n makeRequest();\n }\n\n if (streamMode) {\n return retryStream;\n } else {\n return retryRequest;\n }\n\n function resetStreams() {\n delayStream = null;\n\n if (requestStream) {\n requestStream.abort && requestStream.abort();\n requestStream.cancel && requestStream.cancel();\n\n if (requestStream.destroy) {\n requestStream.destroy();\n } else if (requestStream.end) {\n requestStream.end();\n }\n }\n }\n\n function makeRequest() {\n let finishHandled = false;\n currentRetryAttempt++;\n debug(`Current retry attempt: ${currentRetryAttempt}`);\n\n function handleFinish(args = []) {\n if (!finishHandled) {\n finishHandled = true;\n retryStream.emit('complete', ...args);\n }\n }\n\n if (streamMode) {\n streamResponseHandled = false;\n\n delayStream = new PassThrough({objectMode: opts.objectMode});\n requestStream = opts.request(requestOpts);\n\n setImmediate(() => {\n retryStream.emit('request');\n });\n\n requestStream\n // gRPC via google-cloud-node can emit an `error` as well as a `response`\n // Whichever it emits, we run with-- we can't run with both. That's what\n // is up with the `streamResponseHandled` tracking.\n .on('error', err => {\n if (streamResponseHandled) {\n return;\n }\n\n streamResponseHandled = true;\n onResponse(err);\n })\n .on('response', (resp, body) => {\n if (streamResponseHandled) {\n return;\n }\n\n streamResponseHandled = true;\n onResponse(null, resp, body);\n })\n .on('complete', (...params) => handleFinish(params))\n .on('finish', (...params) => handleFinish(params));\n\n requestStream.pipe(delayStream);\n } else {\n activeRequest = opts.request(requestOpts, onResponse);\n }\n }\n\n function retryAfterDelay(currentRetryAttempt) {\n if (streamMode) {\n resetStreams();\n }\n\n const nextRetryDelay = getNextRetryDelay({\n maxRetryDelay: opts.maxRetryDelay,\n retryDelayMultiplier: opts.retryDelayMultiplier,\n retryNumber: currentRetryAttempt,\n timeOfFirstRequest,\n totalTimeout: opts.totalTimeout,\n });\n debug(`Next retry delay: ${nextRetryDelay}`);\n\n if (nextRetryDelay <= 0) {\n numNoResponseAttempts = opts.noResponseRetries + 1;\n return;\n }\n\n setTimeout(makeRequest, nextRetryDelay);\n }\n\n function onResponse(err, response, body) {\n // An error such as DNS resolution.\n if (err) {\n numNoResponseAttempts++;\n\n if (numNoResponseAttempts <= opts.noResponseRetries) {\n retryAfterDelay(numNoResponseAttempts);\n } else {\n if (streamMode) {\n retryStream.emit('error', err);\n retryStream.end();\n } else {\n callback(err, response, body);\n }\n }\n\n return;\n }\n\n // Send the response to see if we should try again.\n // NOTE: \"currentRetryAttempt\" isn't accurate by default, as it counts\n // the very first request sent as the first \"retry\". It is only accurate\n // when a user provides their own \"currentRetryAttempt\" option at\n // instantiation.\n const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet\n ? currentRetryAttempt\n : currentRetryAttempt - 1;\n if (\n adjustedCurrentRetryAttempt < opts.retries &&\n opts.shouldRetryFn(response)\n ) {\n retryAfterDelay(currentRetryAttempt);\n return;\n }\n\n // No more attempts need to be made, just continue on.\n if (streamMode) {\n retryStream.emit('response', response);\n delayStream.pipe(retryStream);\n requestStream.on('error', err => {\n retryStream.destroy(err);\n });\n } else {\n callback(err, response, body);\n }\n }\n}\n\nmodule.exports = retryRequest;\n\nfunction getNextRetryDelay(config) {\n const {\n maxRetryDelay,\n retryDelayMultiplier,\n retryNumber,\n timeOfFirstRequest,\n totalTimeout,\n } = config;\n\n const maxRetryDelayMs = maxRetryDelay * 1000;\n const totalTimeoutMs = totalTimeout * 1000;\n\n const jitter = Math.floor(Math.random() * 1000);\n const calculatedNextRetryDelay =\n Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter;\n\n const maxAllowableDelayMs =\n totalTimeoutMs - (Date.now() - timeOfFirstRequest);\n\n return Math.min(\n calculatedNextRetryDelay,\n maxAllowableDelayMs,\n maxRetryDelayMs\n );\n}\n\nmodule.exports.defaults = DEFAULTS;\nmodule.exports.getNextRetryDelay = getNextRetryDelay;\n","module.exports = require('./lib/retry');","var RetryOperation = require('./retry_operation');\n\nexports.operation = function(options) {\n var timeouts = exports.timeouts(options);\n return new RetryOperation(timeouts, {\n forever: options && (options.forever || options.retries === Infinity),\n unref: options && options.unref,\n maxRetryTime: options && options.maxRetryTime\n });\n};\n\nexports.timeouts = function(options) {\n if (options instanceof Array) {\n return [].concat(options);\n }\n\n var opts = {\n retries: 10,\n factor: 2,\n minTimeout: 1 * 1000,\n maxTimeout: Infinity,\n randomize: false\n };\n for (var key in options) {\n opts[key] = options[key];\n }\n\n if (opts.minTimeout > opts.maxTimeout) {\n throw new Error('minTimeout is greater than maxTimeout');\n }\n\n var timeouts = [];\n for (var i = 0; i < opts.retries; i++) {\n timeouts.push(this.createTimeout(i, opts));\n }\n\n if (options && options.forever && !timeouts.length) {\n timeouts.push(this.createTimeout(i, opts));\n }\n\n // sort the array numerically ascending\n timeouts.sort(function(a,b) {\n return a - b;\n });\n\n return timeouts;\n};\n\nexports.createTimeout = function(attempt, opts) {\n var random = (opts.randomize)\n ? (Math.random() + 1)\n : 1;\n\n var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt));\n timeout = Math.min(timeout, opts.maxTimeout);\n\n return timeout;\n};\n\nexports.wrap = function(obj, options, methods) {\n if (options instanceof Array) {\n methods = options;\n options = null;\n }\n\n if (!methods) {\n methods = [];\n for (var key in obj) {\n if (typeof obj[key] === 'function') {\n methods.push(key);\n }\n }\n }\n\n for (var i = 0; i < methods.length; i++) {\n var method = methods[i];\n var original = obj[method];\n\n obj[method] = function retryWrapper(original) {\n var op = exports.operation(options);\n var args = Array.prototype.slice.call(arguments, 1);\n var callback = args.pop();\n\n args.push(function(err) {\n if (op.retry(err)) {\n return;\n }\n if (err) {\n arguments[0] = op.mainError();\n }\n callback.apply(this, arguments);\n });\n\n op.attempt(function() {\n original.apply(obj, args);\n });\n }.bind(obj, original);\n obj[method].options = options;\n }\n};\n","function RetryOperation(timeouts, options) {\n // Compatibility for the old (timeouts, retryForever) signature\n if (typeof options === 'boolean') {\n options = { forever: options };\n }\n\n this._originalTimeouts = JSON.parse(JSON.stringify(timeouts));\n this._timeouts = timeouts;\n this._options = options || {};\n this._maxRetryTime = options && options.maxRetryTime || Infinity;\n this._fn = null;\n this._errors = [];\n this._attempts = 1;\n this._operationTimeout = null;\n this._operationTimeoutCb = null;\n this._timeout = null;\n this._operationStart = null;\n this._timer = null;\n\n if (this._options.forever) {\n this._cachedTimeouts = this._timeouts.slice(0);\n }\n}\nmodule.exports = RetryOperation;\n\nRetryOperation.prototype.reset = function() {\n this._attempts = 1;\n this._timeouts = this._originalTimeouts.slice(0);\n}\n\nRetryOperation.prototype.stop = function() {\n if (this._timeout) {\n clearTimeout(this._timeout);\n }\n if (this._timer) {\n clearTimeout(this._timer);\n }\n\n this._timeouts = [];\n this._cachedTimeouts = null;\n};\n\nRetryOperation.prototype.retry = function(err) {\n if (this._timeout) {\n clearTimeout(this._timeout);\n }\n\n if (!err) {\n return false;\n }\n var currentTime = new Date().getTime();\n if (err && currentTime - this._operationStart >= this._maxRetryTime) {\n this._errors.push(err);\n this._errors.unshift(new Error('RetryOperation timeout occurred'));\n return false;\n }\n\n this._errors.push(err);\n\n var timeout = this._timeouts.shift();\n if (timeout === undefined) {\n if (this._cachedTimeouts) {\n // retry forever, only keep last error\n this._errors.splice(0, this._errors.length - 1);\n timeout = this._cachedTimeouts.slice(-1);\n } else {\n return false;\n }\n }\n\n var self = this;\n this._timer = setTimeout(function() {\n self._attempts++;\n\n if (self._operationTimeoutCb) {\n self._timeout = setTimeout(function() {\n self._operationTimeoutCb(self._attempts);\n }, self._operationTimeout);\n\n if (self._options.unref) {\n self._timeout.unref();\n }\n }\n\n self._fn(self._attempts);\n }, timeout);\n\n if (this._options.unref) {\n this._timer.unref();\n }\n\n return true;\n};\n\nRetryOperation.prototype.attempt = function(fn, timeoutOps) {\n this._fn = fn;\n\n if (timeoutOps) {\n if (timeoutOps.timeout) {\n this._operationTimeout = timeoutOps.timeout;\n }\n if (timeoutOps.cb) {\n this._operationTimeoutCb = timeoutOps.cb;\n }\n }\n\n var self = this;\n if (this._operationTimeoutCb) {\n this._timeout = setTimeout(function() {\n self._operationTimeoutCb();\n }, self._operationTimeout);\n }\n\n this._operationStart = new Date().getTime();\n\n this._fn(this._attempts);\n};\n\nRetryOperation.prototype.try = function(fn) {\n console.log('Using RetryOperation.try() is deprecated');\n this.attempt(fn);\n};\n\nRetryOperation.prototype.start = function(fn) {\n console.log('Using RetryOperation.start() is deprecated');\n this.attempt(fn);\n};\n\nRetryOperation.prototype.start = RetryOperation.prototype.try;\n\nRetryOperation.prototype.errors = function() {\n return this._errors;\n};\n\nRetryOperation.prototype.attempts = function() {\n return this._attempts;\n};\n\nRetryOperation.prototype.mainError = function() {\n if (this._errors.length === 0) {\n return null;\n }\n\n var counts = {};\n var mainError = null;\n var mainErrorCount = 0;\n\n for (var i = 0; i < this._errors.length; i++) {\n var error = this._errors[i];\n var message = error.message;\n var count = (counts[message] || 0) + 1;\n\n counts[message] = count;\n\n if (count >= mainErrorCount) {\n mainError = error;\n mainErrorCount = count;\n }\n }\n\n return mainError;\n};\n","/*! safe-buffer. MIT License. Feross Aboukhadijeh */\n/* eslint-disable node/no-deprecated-api */\nvar buffer = require('buffer')\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.prototype = Object.create(Buffer.prototype)\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n","'use strict';\n\nvar stubs = require('stubs')\n\n/*\n * StreamEvents can be used 2 ways:\n *\n * 1:\n * function MyStream() {\n * require('stream-events').call(this)\n * }\n *\n * 2:\n * require('stream-events')(myStream)\n */\nfunction StreamEvents(stream) {\n stream = stream || this\n\n var cfg = {\n callthrough: true,\n calls: 1\n }\n\n stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading'))\n stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing'))\n\n return stream\n}\n\nmodule.exports = StreamEvents\n","module.exports = shift\n\nfunction shift (stream) {\n var rs = stream._readableState\n if (!rs) return null\n return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs))\n}\n\nfunction getStateLength (state) {\n if (state.buffer.length) {\n // Since node 6.3.0 state.buffer is a BufferList not an array\n if (state.buffer.head) {\n return state.buffer.head.data.length\n }\n\n return state.buffer[0].length\n }\n\n return state.length\n}\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\n/**/\n\nvar Buffer = require('safe-buffer').Buffer;\n/**/\n\nvar isEncoding = Buffer.isEncoding || function (encoding) {\n encoding = '' + encoding;\n switch (encoding && encoding.toLowerCase()) {\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\n return true;\n default:\n return false;\n }\n};\n\nfunction _normalizeEncoding(enc) {\n if (!enc) return 'utf8';\n var retried;\n while (true) {\n switch (enc) {\n case 'utf8':\n case 'utf-8':\n return 'utf8';\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return 'utf16le';\n case 'latin1':\n case 'binary':\n return 'latin1';\n case 'base64':\n case 'ascii':\n case 'hex':\n return enc;\n default:\n if (retried) return; // undefined\n enc = ('' + enc).toLowerCase();\n retried = true;\n }\n }\n};\n\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\n// modules monkey-patch it to support additional encodings\nfunction normalizeEncoding(enc) {\n var nenc = _normalizeEncoding(enc);\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\n return nenc || enc;\n}\n\n// StringDecoder provides an interface for efficiently splitting a series of\n// buffers into a series of JS strings without breaking apart multi-byte\n// characters.\nexports.StringDecoder = StringDecoder;\nfunction StringDecoder(encoding) {\n this.encoding = normalizeEncoding(encoding);\n var nb;\n switch (this.encoding) {\n case 'utf16le':\n this.text = utf16Text;\n this.end = utf16End;\n nb = 4;\n break;\n case 'utf8':\n this.fillLast = utf8FillLast;\n nb = 4;\n break;\n case 'base64':\n this.text = base64Text;\n this.end = base64End;\n nb = 3;\n break;\n default:\n this.write = simpleWrite;\n this.end = simpleEnd;\n return;\n }\n this.lastNeed = 0;\n this.lastTotal = 0;\n this.lastChar = Buffer.allocUnsafe(nb);\n}\n\nStringDecoder.prototype.write = function (buf) {\n if (buf.length === 0) return '';\n var r;\n var i;\n if (this.lastNeed) {\n r = this.fillLast(buf);\n if (r === undefined) return '';\n i = this.lastNeed;\n this.lastNeed = 0;\n } else {\n i = 0;\n }\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\n return r || '';\n};\n\nStringDecoder.prototype.end = utf8End;\n\n// Returns only complete characters in a Buffer\nStringDecoder.prototype.text = utf8Text;\n\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\nStringDecoder.prototype.fillLast = function (buf) {\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\n this.lastNeed -= buf.length;\n};\n\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\n// continuation byte. If an invalid byte is detected, -2 is returned.\nfunction utf8CheckByte(byte) {\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\n return byte >> 6 === 0x02 ? -1 : -2;\n}\n\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\n// needed to complete the UTF-8 character (if applicable) are returned.\nfunction utf8CheckIncomplete(self, buf, i) {\n var j = buf.length - 1;\n if (j < i) return 0;\n var nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 1;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 2;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) {\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\n }\n return nb;\n }\n return 0;\n}\n\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\n// needed or are available. If we see a non-continuation byte where we expect\n// one, we \"replace\" the validated continuation bytes we've seen so far with\n// a single UTF-8 replacement character ('\\ufffd'), to match v8's UTF-8 decoding\n// behavior. The continuation byte check is included three times in the case\n// where all of the continuation bytes for a character exist in the same buffer.\n// It is also done this way as a slight performance increase instead of using a\n// loop.\nfunction utf8CheckExtraBytes(self, buf, p) {\n if ((buf[0] & 0xC0) !== 0x80) {\n self.lastNeed = 0;\n return '\\ufffd';\n }\n if (self.lastNeed > 1 && buf.length > 1) {\n if ((buf[1] & 0xC0) !== 0x80) {\n self.lastNeed = 1;\n return '\\ufffd';\n }\n if (self.lastNeed > 2 && buf.length > 2) {\n if ((buf[2] & 0xC0) !== 0x80) {\n self.lastNeed = 2;\n return '\\ufffd';\n }\n }\n }\n}\n\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\nfunction utf8FillLast(buf) {\n var p = this.lastTotal - this.lastNeed;\n var r = utf8CheckExtraBytes(this, buf, p);\n if (r !== undefined) return r;\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, p, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, p, 0, buf.length);\n this.lastNeed -= buf.length;\n}\n\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\n// partial character, the character's bytes are buffered until the required\n// number of bytes are available.\nfunction utf8Text(buf, i) {\n var total = utf8CheckIncomplete(this, buf, i);\n if (!this.lastNeed) return buf.toString('utf8', i);\n this.lastTotal = total;\n var end = buf.length - (total - this.lastNeed);\n buf.copy(this.lastChar, 0, end);\n return buf.toString('utf8', i, end);\n}\n\n// For UTF-8, a replacement character is added when ending on a partial\n// character.\nfunction utf8End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + '\\ufffd';\n return r;\n}\n\n// UTF-16LE typically needs two bytes per character, but even if we have an even\n// number of bytes available, we need to check if we end on a leading/high\n// surrogate. In that case, we need to wait for the next two bytes in order to\n// decode the last character properly.\nfunction utf16Text(buf, i) {\n if ((buf.length - i) % 2 === 0) {\n var r = buf.toString('utf16le', i);\n if (r) {\n var c = r.charCodeAt(r.length - 1);\n if (c >= 0xD800 && c <= 0xDBFF) {\n this.lastNeed = 2;\n this.lastTotal = 4;\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n return r.slice(0, -1);\n }\n }\n return r;\n }\n this.lastNeed = 1;\n this.lastTotal = 2;\n this.lastChar[0] = buf[buf.length - 1];\n return buf.toString('utf16le', i, buf.length - 1);\n}\n\n// For UTF-16LE we do not explicitly append special replacement characters if we\n// end on a partial character, we simply let v8 handle that.\nfunction utf16End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) {\n var end = this.lastTotal - this.lastNeed;\n return r + this.lastChar.toString('utf16le', 0, end);\n }\n return r;\n}\n\nfunction base64Text(buf, i) {\n var n = (buf.length - i) % 3;\n if (n === 0) return buf.toString('base64', i);\n this.lastNeed = 3 - n;\n this.lastTotal = 3;\n if (n === 1) {\n this.lastChar[0] = buf[buf.length - 1];\n } else {\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n }\n return buf.toString('base64', i, buf.length - n);\n}\n\nfunction base64End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\n return r;\n}\n\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\nfunction simpleWrite(buf) {\n return buf.toString(this.encoding);\n}\n\nfunction simpleEnd(buf) {\n return buf && buf.length ? this.write(buf) : '';\n}","const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/;\nconst numRegex = /^([\\-\\+])?(0*)(\\.[0-9]+([eE]\\-?[0-9]+)?|[0-9]+(\\.[0-9]+([eE]\\-?[0-9]+)?)?)$/;\n// const octRegex = /0x[a-z0-9]+/;\n// const binRegex = /0x[a-z0-9]+/;\n\n\n//polyfill\nif (!Number.parseInt && window.parseInt) {\n Number.parseInt = window.parseInt;\n}\nif (!Number.parseFloat && window.parseFloat) {\n Number.parseFloat = window.parseFloat;\n}\n\n \nconst consider = {\n hex : true,\n leadingZeros: true,\n decimalPoint: \"\\.\",\n eNotation: true\n //skipLike: /regex/\n};\n\nfunction toNumber(str, options = {}){\n // const options = Object.assign({}, consider);\n // if(opt.leadingZeros === false){\n // options.leadingZeros = false;\n // }else if(opt.hex === false){\n // options.hex = false;\n // }\n\n options = Object.assign({}, consider, options );\n if(!str || typeof str !== \"string\" ) return str;\n \n let trimmedStr = str.trim();\n // if(trimmedStr === \"0.0\") return 0;\n // else if(trimmedStr === \"+0.0\") return 0;\n // else if(trimmedStr === \"-0.0\") return -0;\n\n if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str;\n else if (options.hex && hexRegex.test(trimmedStr)) {\n return Number.parseInt(trimmedStr, 16);\n // } else if (options.parseOct && octRegex.test(str)) {\n // return Number.parseInt(val, 8);\n // }else if (options.parseBin && binRegex.test(str)) {\n // return Number.parseInt(val, 2);\n }else{\n //separate negative sign, leading zeros, and rest number\n const match = numRegex.exec(trimmedStr);\n if(match){\n const sign = match[1];\n const leadingZeros = match[2];\n let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros\n //trim ending zeros for floating number\n \n const eNotation = match[4] || match[6];\n if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== \".\") return str; //-0123\n else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== \".\") return str; //0123\n else{//no leading zeros or leading zeros are allowed\n const num = Number(trimmedStr);\n const numStr = \"\" + num;\n if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation\n if(options.eNotation) return num;\n else return str;\n }else if(eNotation){ //given number has enotation\n if(options.eNotation) return num;\n else return str;\n }else if(trimmedStr.indexOf(\".\") !== -1){ //floating number\n // const decimalPart = match[5].substr(1);\n // const intPart = trimmedStr.substr(0,trimmedStr.indexOf(\".\"));\n\n \n // const p = numStr.indexOf(\".\");\n // const givenIntPart = numStr.substr(0,p);\n // const givenDecPart = numStr.substr(p+1);\n if(numStr === \"0\" && (numTrimmedByZeros === \"\") ) return num; //0.0\n else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000\n else if( sign && numStr === \"-\"+numTrimmedByZeros) return num;\n else return str;\n }\n \n if(leadingZeros){\n // if(numTrimmedByZeros === numStr){\n // if(options.leadingZeros) return num;\n // else return str;\n // }else return str;\n if(numTrimmedByZeros === numStr) return num;\n else if(sign+numTrimmedByZeros === numStr) return num;\n else return str;\n }\n\n if(trimmedStr === numStr) return num;\n else if(trimmedStr === sign+numStr) return num;\n // else{\n // //number with +/- sign\n // trimmedStr.test(/[-+][0-9]);\n\n // }\n return str;\n }\n // else if(!eNotation && trimmedStr && trimmedStr !== Number(trimmedStr) ) return str;\n \n }else{ //non-numeric string\n return str;\n }\n }\n}\n\n/**\n * \n * @param {string} numStr without leading zeros\n * @returns \n */\nfunction trimZeros(numStr){\n if(numStr && numStr.indexOf(\".\") !== -1){//float\n numStr = numStr.replace(/0+$/, \"\"); //remove ending zeros\n if(numStr === \".\") numStr = \"0\";\n else if(numStr[0] === \".\") numStr = \"0\"+numStr;\n else if(numStr[numStr.length-1] === \".\") numStr = numStr.substr(0,numStr.length-1);\n return numStr;\n }\n return numStr;\n}\nmodule.exports = toNumber\n","'use strict'\n\nmodule.exports = function stubs(obj, method, cfg, stub) {\n if (!obj || !method || !obj[method])\n throw new Error('You must provide an object and a key for an existing method')\n\n if (!stub) {\n stub = cfg\n cfg = {}\n }\n\n stub = stub || function() {}\n\n cfg.callthrough = cfg.callthrough || false\n cfg.calls = cfg.calls || 0\n\n var norevert = cfg.calls === 0\n\n var cached = obj[method].bind(obj)\n\n obj[method] = function() {\n var args = [].slice.call(arguments)\n var returnVal\n\n if (cfg.callthrough)\n returnVal = cached.apply(obj, args)\n\n returnVal = stub.apply(obj, args) || returnVal\n\n if (!norevert && --cfg.calls === 0)\n obj[method] = cached\n\n return returnVal\n }\n}\n","'use strict';\nconst os = require('os');\nconst tty = require('tty');\nconst hasFlag = require('has-flag');\n\nconst {env} = process;\n\nlet forceColor;\nif (hasFlag('no-color') ||\n\thasFlag('no-colors') ||\n\thasFlag('color=false') ||\n\thasFlag('color=never')) {\n\tforceColor = 0;\n} else if (hasFlag('color') ||\n\thasFlag('colors') ||\n\thasFlag('color=true') ||\n\thasFlag('color=always')) {\n\tforceColor = 1;\n}\n\nif ('FORCE_COLOR' in env) {\n\tif (env.FORCE_COLOR === 'true') {\n\t\tforceColor = 1;\n\t} else if (env.FORCE_COLOR === 'false') {\n\t\tforceColor = 0;\n\t} else {\n\t\tforceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3);\n\t}\n}\n\nfunction translateLevel(level) {\n\tif (level === 0) {\n\t\treturn false;\n\t}\n\n\treturn {\n\t\tlevel,\n\t\thasBasic: true,\n\t\thas256: level >= 2,\n\t\thas16m: level >= 3\n\t};\n}\n\nfunction supportsColor(haveStream, streamIsTTY) {\n\tif (forceColor === 0) {\n\t\treturn 0;\n\t}\n\n\tif (hasFlag('color=16m') ||\n\t\thasFlag('color=full') ||\n\t\thasFlag('color=truecolor')) {\n\t\treturn 3;\n\t}\n\n\tif (hasFlag('color=256')) {\n\t\treturn 2;\n\t}\n\n\tif (haveStream && !streamIsTTY && forceColor === undefined) {\n\t\treturn 0;\n\t}\n\n\tconst min = forceColor || 0;\n\n\tif (env.TERM === 'dumb') {\n\t\treturn min;\n\t}\n\n\tif (process.platform === 'win32') {\n\t\t// Windows 10 build 10586 is the first Windows release that supports 256 colors.\n\t\t// Windows 10 build 14931 is the first release that supports 16m/TrueColor.\n\t\tconst osRelease = os.release().split('.');\n\t\tif (\n\t\t\tNumber(osRelease[0]) >= 10 &&\n\t\t\tNumber(osRelease[2]) >= 10586\n\t\t) {\n\t\t\treturn Number(osRelease[2]) >= 14931 ? 3 : 2;\n\t\t}\n\n\t\treturn 1;\n\t}\n\n\tif ('CI' in env) {\n\t\tif (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {\n\t\t\treturn 1;\n\t\t}\n\n\t\treturn min;\n\t}\n\n\tif ('TEAMCITY_VERSION' in env) {\n\t\treturn /^(9\\.(0*[1-9]\\d*)\\.|\\d{2,}\\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;\n\t}\n\n\tif (env.COLORTERM === 'truecolor') {\n\t\treturn 3;\n\t}\n\n\tif ('TERM_PROGRAM' in env) {\n\t\tconst version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);\n\n\t\tswitch (env.TERM_PROGRAM) {\n\t\t\tcase 'iTerm.app':\n\t\t\t\treturn version >= 3 ? 3 : 2;\n\t\t\tcase 'Apple_Terminal':\n\t\t\t\treturn 2;\n\t\t\t// No default\n\t\t}\n\t}\n\n\tif (/-256(color)?$/i.test(env.TERM)) {\n\t\treturn 2;\n\t}\n\n\tif (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {\n\t\treturn 1;\n\t}\n\n\tif ('COLORTERM' in env) {\n\t\treturn 1;\n\t}\n\n\treturn min;\n}\n\nfunction getSupportLevel(stream) {\n\tconst level = supportsColor(stream, stream && stream.isTTY);\n\treturn translateLevel(level);\n}\n\nmodule.exports = {\n\tsupportsColor: getSupportLevel,\n\tstdout: translateLevel(supportsColor(true, tty.isatty(1))),\n\tstderr: translateLevel(supportsColor(true, tty.isatty(2)))\n};\n","\"use strict\";\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0;\n/**\n * @class TeenyStatisticsWarning\n * @extends Error\n * @description While an error, is used for emitting warnings when\n * meeting certain configured thresholds.\n * @see process.emitWarning\n */\nclass TeenyStatisticsWarning extends Error {\n /**\n * @param {string} message\n */\n constructor(message) {\n super(message);\n this.threshold = 0;\n this.type = '';\n this.value = 0;\n this.name = this.constructor.name;\n Error.captureStackTrace(this, this.constructor);\n }\n}\nexports.TeenyStatisticsWarning = TeenyStatisticsWarning;\nTeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning';\n/**\n * @class TeenyStatistics\n * @description Maintain various statistics internal to teeny-request. Tracking\n * is not automatic and must be instrumented within teeny-request.\n */\nclass TeenyStatistics {\n /**\n * @param {TeenyStatisticsOptions} [opts]\n */\n constructor(opts) {\n /**\n * @type {number}\n * @private\n * @default 0\n */\n this._concurrentRequests = 0;\n /**\n * @type {boolean}\n * @private\n * @default false\n */\n this._didConcurrentRequestWarn = false;\n this._options = TeenyStatistics._prepareOptions(opts);\n }\n /**\n * Returns a copy of the current options.\n * @return {TeenyStatisticsOptions}\n */\n getOptions() {\n return Object.assign({}, this._options);\n }\n /**\n * Change configured statistics options. This will not preserve unspecified\n * options that were previously specified, i.e. this is a reset of options.\n * @param {TeenyStatisticsOptions} [opts]\n * @returns {TeenyStatisticsConfig} The previous options.\n * @see _prepareOptions\n */\n setOptions(opts) {\n const oldOpts = this._options;\n this._options = TeenyStatistics._prepareOptions(opts);\n return oldOpts;\n }\n /**\n * @readonly\n * @return {TeenyStatisticsCounters}\n */\n get counters() {\n return {\n concurrentRequests: this._concurrentRequests,\n };\n }\n /**\n * @description Should call this right before making a request.\n */\n requestStarting() {\n this._concurrentRequests++;\n if (this._options.concurrentRequests > 0 &&\n this._concurrentRequests >= this._options.concurrentRequests &&\n !this._didConcurrentRequestWarn) {\n this._didConcurrentRequestWarn = true;\n const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' +\n this._concurrentRequests +\n ' requests in-flight, which exceeds the configured threshold of ' +\n this._options.concurrentRequests +\n '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' +\n 'variable or the concurrentRequests option of teeny-request to ' +\n 'increase or disable (0) this warning.');\n warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS;\n warning.value = this._concurrentRequests;\n warning.threshold = this._options.concurrentRequests;\n process.emitWarning(warning);\n }\n }\n /**\n * @description When using `requestStarting`, call this after the request\n * has finished.\n */\n requestFinished() {\n // TODO negative?\n this._concurrentRequests--;\n }\n /**\n * Configuration Precedence:\n * 1. Dependency inversion via defined option.\n * 2. Global numeric environment variable.\n * 3. Built-in default.\n * This will not preserve unspecified options previously specified.\n * @param {TeenyStatisticsOptions} [opts]\n * @returns {TeenyStatisticsOptions}\n * @private\n */\n static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) {\n let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS;\n const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS);\n if (diConcurrentRequests !== undefined) {\n concurrentRequests = diConcurrentRequests;\n }\n else if (!Number.isNaN(envConcurrentRequests)) {\n concurrentRequests = envConcurrentRequests;\n }\n return { concurrentRequests };\n }\n}\nexports.TeenyStatistics = TeenyStatistics;\n/**\n * @description A default threshold representing when to warn about excessive\n * in-flight/concurrent requests.\n * @type {number}\n * @static\n * @readonly\n * @default 5000\n */\nTeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000;\n//# sourceMappingURL=TeenyStatistics.js.map","\"use strict\";\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getAgent = exports.pool = void 0;\nconst http_1 = require(\"http\");\nconst https_1 = require(\"https\");\n// eslint-disable-next-line node/no-deprecated-api\nconst url_1 = require(\"url\");\nexports.pool = new Map();\n/**\n * Determines if a proxy should be considered based on the environment.\n *\n * @param uri The request uri\n * @returns {boolean}\n */\nfunction shouldUseProxyForURI(uri) {\n const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy;\n if (!noProxyEnv) {\n return true;\n }\n const givenURI = new URL(uri);\n for (const noProxyRaw of noProxyEnv.split(',')) {\n const noProxy = noProxyRaw.trim();\n if (noProxy === givenURI.origin || noProxy === givenURI.hostname) {\n return false;\n }\n else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) {\n const noProxyWildcard = noProxy.replace(/^\\*\\./, '.');\n if (givenURI.hostname.endsWith(noProxyWildcard)) {\n return false;\n }\n }\n }\n return true;\n}\n/**\n * Returns a custom request Agent if one is found, otherwise returns undefined\n * which will result in the global http(s) Agent being used.\n * @private\n * @param {string} uri The request uri\n * @param {Options} reqOpts The request options\n * @returns {HttpAnyAgent|undefined}\n */\nfunction getAgent(uri, reqOpts) {\n const isHttp = uri.startsWith('http://');\n const proxy = reqOpts.proxy ||\n process.env.HTTP_PROXY ||\n process.env.http_proxy ||\n process.env.HTTPS_PROXY ||\n process.env.https_proxy;\n const poolOptions = Object.assign({}, reqOpts.pool);\n const manuallyProvidedProxy = !!reqOpts.proxy;\n const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri);\n if (proxy && shouldUseProxy) {\n // tslint:disable-next-line variable-name\n const Agent = isHttp\n ? require('http-proxy-agent')\n : require('https-proxy-agent');\n const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions };\n return new Agent(proxyOpts);\n }\n let key = isHttp ? 'http' : 'https';\n if (reqOpts.forever) {\n key += ':forever';\n if (!exports.pool.has(key)) {\n // tslint:disable-next-line variable-name\n const Agent = isHttp ? http_1.Agent : https_1.Agent;\n exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true }));\n }\n }\n return exports.pool.get(key);\n}\nexports.getAgent = getAgent;\n//# sourceMappingURL=agents.js.map","\"use strict\";\n/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.teenyRequest = exports.RequestError = void 0;\nconst node_fetch_1 = require(\"node-fetch\");\nconst stream_1 = require(\"stream\");\nconst uuid = require(\"uuid\");\nconst agents_1 = require(\"./agents\");\nconst TeenyStatistics_1 = require(\"./TeenyStatistics\");\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nconst streamEvents = require('stream-events');\nclass RequestError extends Error {\n}\nexports.RequestError = RequestError;\n/**\n * Convert options from Request to Fetch format\n * @private\n * @param reqOpts Request options\n */\nfunction requestToFetchOptions(reqOpts) {\n const options = {\n method: reqOpts.method || 'GET',\n ...(reqOpts.timeout && { timeout: reqOpts.timeout }),\n ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }),\n };\n if (typeof reqOpts.json === 'object') {\n // Add Content-type: application/json header\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['Content-Type'] = 'application/json';\n // Set body to JSON representation of value\n options.body = JSON.stringify(reqOpts.json);\n }\n else {\n if (Buffer.isBuffer(reqOpts.body)) {\n options.body = reqOpts.body;\n }\n else if (typeof reqOpts.body !== 'string') {\n options.body = JSON.stringify(reqOpts.body);\n }\n else {\n options.body = reqOpts.body;\n }\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options.headers = reqOpts.headers;\n let uri = (reqOpts.uri ||\n reqOpts.url);\n if (!uri) {\n throw new Error('Missing uri or url in reqOpts.');\n }\n if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') {\n // eslint-disable-next-line @typescript-eslint/no-var-requires\n const qs = require('querystring');\n const params = qs.stringify(reqOpts.qs);\n uri = uri + '?' + params;\n }\n options.agent = (0, agents_1.getAgent)(uri, reqOpts);\n return { uri, options };\n}\n/**\n * Convert a response from `fetch` to `request` format.\n * @private\n * @param opts The `request` options used to create the request.\n * @param res The Fetch response\n * @returns A `request` response object\n */\nfunction fetchToRequestResponse(opts, res) {\n const request = {};\n request.agent = opts.agent || false;\n request.headers = (opts.headers || {});\n request.href = res.url;\n // headers need to be converted from a map to an obj\n const resHeaders = {};\n res.headers.forEach((value, key) => (resHeaders[key] = value));\n const response = Object.assign(res.body, {\n statusCode: res.status,\n statusMessage: res.statusText,\n request,\n body: res.body,\n headers: resHeaders,\n toJSON: () => ({ headers: resHeaders }),\n });\n return response;\n}\n/**\n * Create POST body from two parts as multipart/related content-type\n * @private\n * @param boundary\n * @param multipart\n */\nfunction createMultipartStream(boundary, multipart) {\n const finale = `--${boundary}--`;\n const stream = new stream_1.PassThrough();\n for (const part of multipart) {\n const preamble = `--${boundary}\\r\\nContent-Type: ${part['Content-Type']}\\r\\n\\r\\n`;\n stream.write(preamble);\n if (typeof part.body === 'string') {\n stream.write(part.body);\n stream.write('\\r\\n');\n }\n else {\n part.body.pipe(stream, { end: false });\n part.body.on('end', () => {\n stream.write('\\r\\n');\n stream.write(finale);\n stream.end();\n });\n }\n }\n return stream;\n}\nfunction teenyRequest(reqOpts, callback) {\n const { uri, options } = requestToFetchOptions(reqOpts);\n const multipart = reqOpts.multipart;\n if (reqOpts.multipart && multipart.length === 2) {\n if (!callback) {\n // TODO: add support for multipart uploads through streaming\n throw new Error('Multipart without callback is not implemented.');\n }\n const boundary = uuid.v4();\n options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`;\n options.body = createMultipartStream(boundary, multipart);\n // Multipart upload\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n const header = res.headers.get('content-type');\n const response = fetchToRequestResponse(options, res);\n const body = response.body;\n if (header === 'application/json' ||\n header === 'application/json; charset=utf-8') {\n res.json().then(json => {\n response.body = json;\n callback(null, response, json);\n }, (err) => {\n callback(err, response, body);\n });\n return;\n }\n res.text().then(text => {\n response.body = text;\n callback(null, response, text);\n }, err => {\n callback(err, response, body);\n });\n }, err => {\n teenyRequest.stats.requestFinished();\n callback(err, null, null);\n });\n return;\n }\n if (callback === undefined) {\n // Stream mode\n const requestStream = streamEvents(new stream_1.PassThrough());\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let responseStream;\n requestStream.once('reading', () => {\n if (responseStream) {\n (0, stream_1.pipeline)(responseStream, requestStream, () => { });\n }\n else {\n requestStream.once('response', () => {\n (0, stream_1.pipeline)(responseStream, requestStream, () => { });\n });\n }\n });\n options.compress = false;\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n responseStream = res.body;\n responseStream.on('error', (err) => {\n requestStream.emit('error', err);\n });\n const response = fetchToRequestResponse(options, res);\n requestStream.emit('response', response);\n }, err => {\n teenyRequest.stats.requestFinished();\n requestStream.emit('error', err);\n });\n // fetch doesn't supply the raw HTTP stream, instead it\n // returns a PassThrough piped from the HTTP response\n // stream.\n return requestStream;\n }\n // GET or POST with callback\n teenyRequest.stats.requestStarting();\n (0, node_fetch_1.default)(uri, options).then(res => {\n teenyRequest.stats.requestFinished();\n const header = res.headers.get('content-type');\n const response = fetchToRequestResponse(options, res);\n const body = response.body;\n if (header === 'application/json' ||\n header === 'application/json; charset=utf-8') {\n if (response.statusCode === 204) {\n // Probably a DELETE\n callback(null, response, body);\n return;\n }\n res.json().then(json => {\n response.body = json;\n callback(null, response, json);\n }, err => {\n callback(err, response, body);\n });\n return;\n }\n res.text().then(text => {\n const response = fetchToRequestResponse(options, res);\n response.body = text;\n callback(null, response, text);\n }, err => {\n callback(err, response, body);\n });\n }, err => {\n teenyRequest.stats.requestFinished();\n callback(err, null, null);\n });\n return;\n}\nexports.teenyRequest = teenyRequest;\nteenyRequest.defaults = (defaults) => {\n return (reqOpts, callback) => {\n const opts = { ...defaults, ...reqOpts };\n if (callback === undefined) {\n return teenyRequest(opts);\n }\n teenyRequest(opts, callback);\n };\n};\n/**\n * Single instance of an interface for keeping track of things.\n */\nteenyRequest.stats = new TeenyStatistics_1.TeenyStatistics();\nteenyRequest.resetStats = () => {\n teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions());\n};\n//# sourceMappingURL=index.js.map","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nvar _default = {\n randomUUID: _crypto.default.randomUUID\n};\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nexports.unsafeStringify = unsafeStringify;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).slice(1));\n}\n\nfunction unsafeStringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]];\n}\n\nfunction stringify(arr, offset = 0) {\n const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = require(\"./stringify.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.unsafeStringify)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.URL = exports.DNS = void 0;\nexports.default = v35;\n\nvar _stringify = require(\"./stringify.js\");\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction v35(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n var _namespace;\n\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.unsafeStringify)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _native = _interopRequireDefault(require(\"./native.js\"));\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = require(\"./stringify.js\");\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n if (_native.default.randomUUID && !buf && !options) {\n return _native.default.randomUUID();\n }\n\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.unsafeStringify)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.slice(14, 15), 16);\n}\n\nvar _default = version;\nexports.default = _default;","\"use strict\";\n\nvar punycode = require(\"punycode\");\nvar mappingTable = require(\"./lib/mappingTable.json\");\n\nvar PROCESSING_OPTIONS = {\n TRANSITIONAL: 0,\n NONTRANSITIONAL: 1\n};\n\nfunction normalize(str) { // fix bug in v8\n return str.split('\\u0000').map(function (s) { return s.normalize('NFC'); }).join('\\u0000');\n}\n\nfunction findStatus(val) {\n var start = 0;\n var end = mappingTable.length - 1;\n\n while (start <= end) {\n var mid = Math.floor((start + end) / 2);\n\n var target = mappingTable[mid];\n if (target[0][0] <= val && target[0][1] >= val) {\n return target;\n } else if (target[0][0] > val) {\n end = mid - 1;\n } else {\n start = mid + 1;\n }\n }\n\n return null;\n}\n\nvar regexAstralSymbols = /[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]/g;\n\nfunction countSymbols(string) {\n return string\n // replace every surrogate pair with a BMP symbol\n .replace(regexAstralSymbols, '_')\n // then get the length\n .length;\n}\n\nfunction mapChars(domain_name, useSTD3, processing_option) {\n var hasError = false;\n var processed = \"\";\n\n var len = countSymbols(domain_name);\n for (var i = 0; i < len; ++i) {\n var codePoint = domain_name.codePointAt(i);\n var status = findStatus(codePoint);\n\n switch (status[1]) {\n case \"disallowed\":\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n break;\n case \"ignored\":\n break;\n case \"mapped\":\n processed += String.fromCodePoint.apply(String, status[2]);\n break;\n case \"deviation\":\n if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {\n processed += String.fromCodePoint.apply(String, status[2]);\n } else {\n processed += String.fromCodePoint(codePoint);\n }\n break;\n case \"valid\":\n processed += String.fromCodePoint(codePoint);\n break;\n case \"disallowed_STD3_mapped\":\n if (useSTD3) {\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n } else {\n processed += String.fromCodePoint.apply(String, status[2]);\n }\n break;\n case \"disallowed_STD3_valid\":\n if (useSTD3) {\n hasError = true;\n }\n\n processed += String.fromCodePoint(codePoint);\n break;\n }\n }\n\n return {\n string: processed,\n error: hasError\n };\n}\n\nvar combiningMarksRegex = /[\\u0300-\\u036F\\u0483-\\u0489\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065F\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0859-\\u085B\\u08E4-\\u0903\\u093A-\\u093C\\u093E-\\u094F\\u0951-\\u0957\\u0962\\u0963\\u0981-\\u0983\\u09BC\\u09BE-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CD\\u09D7\\u09E2\\u09E3\\u0A01-\\u0A03\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81-\\u0A83\\u0ABC\\u0ABE-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AE2\\u0AE3\\u0B01-\\u0B03\\u0B3C\\u0B3E-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B62\\u0B63\\u0B82\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD7\\u0C00-\\u0C03\\u0C3E-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0C81-\\u0C83\\u0CBC\\u0CBE-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CE2\\u0CE3\\u0D01-\\u0D03\\u0D3E-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4D\\u0D57\\u0D62\\u0D63\\u0D82\\u0D83\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F3E\\u0F3F\\u0F71-\\u0F84\\u0F86\\u0F87\\u0F8D-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102B-\\u103E\\u1056-\\u1059\\u105E-\\u1060\\u1062-\\u1064\\u1067-\\u106D\\u1071-\\u1074\\u1082-\\u108D\\u108F\\u109A-\\u109D\\u135D-\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B4-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u192B\\u1930-\\u193B\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A17-\\u1A1B\\u1A55-\\u1A5E\\u1A60-\\u1A7C\\u1A7F\\u1AB0-\\u1ABE\\u1B00-\\u1B04\\u1B34-\\u1B44\\u1B6B-\\u1B73\\u1B80-\\u1B82\\u1BA1-\\u1BAD\\u1BE6-\\u1BF3\\u1C24-\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE8\\u1CED\\u1CF2-\\u1CF4\\u1CF8\\u1CF9\\u1DC0-\\u1DF5\\u1DFC-\\u1DFF\\u20D0-\\u20F0\\u2CEF-\\u2CF1\\u2D7F\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F-\\uA672\\uA674-\\uA67D\\uA69F\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA823-\\uA827\\uA880\\uA881\\uA8B4-\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA953\\uA980-\\uA983\\uA9B3-\\uA9C0\\uA9E5\\uAA29-\\uAA36\\uAA43\\uAA4C\\uAA4D\\uAA7B-\\uAA7D\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uAAEB-\\uAAEF\\uAAF5\\uAAF6\\uABE3-\\uABEA\\uABEC\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE2D]|\\uD800[\\uDDFD\\uDEE0\\uDF76-\\uDF7A]|\\uD802[\\uDE01-\\uDE03\\uDE05\\uDE06\\uDE0C-\\uDE0F\\uDE38-\\uDE3A\\uDE3F\\uDEE5\\uDEE6]|\\uD804[\\uDC00-\\uDC02\\uDC38-\\uDC46\\uDC7F-\\uDC82\\uDCB0-\\uDCBA\\uDD00-\\uDD02\\uDD27-\\uDD34\\uDD73\\uDD80-\\uDD82\\uDDB3-\\uDDC0\\uDE2C-\\uDE37\\uDEDF-\\uDEEA\\uDF01-\\uDF03\\uDF3C\\uDF3E-\\uDF44\\uDF47\\uDF48\\uDF4B-\\uDF4D\\uDF57\\uDF62\\uDF63\\uDF66-\\uDF6C\\uDF70-\\uDF74]|\\uD805[\\uDCB0-\\uDCC3\\uDDAF-\\uDDB5\\uDDB8-\\uDDC0\\uDE30-\\uDE40\\uDEAB-\\uDEB7]|\\uD81A[\\uDEF0-\\uDEF4\\uDF30-\\uDF36]|\\uD81B[\\uDF51-\\uDF7E\\uDF8F-\\uDF92]|\\uD82F[\\uDC9D\\uDC9E]|\\uD834[\\uDD65-\\uDD69\\uDD6D-\\uDD72\\uDD7B-\\uDD82\\uDD85-\\uDD8B\\uDDAA-\\uDDAD\\uDE42-\\uDE44]|\\uD83A[\\uDCD0-\\uDCD6]|\\uDB40[\\uDD00-\\uDDEF]/;\n\nfunction validateLabel(label, processing_option) {\n if (label.substr(0, 4) === \"xn--\") {\n label = punycode.toUnicode(label);\n processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;\n }\n\n var error = false;\n\n if (normalize(label) !== label ||\n (label[3] === \"-\" && label[4] === \"-\") ||\n label[0] === \"-\" || label[label.length - 1] === \"-\" ||\n label.indexOf(\".\") !== -1 ||\n label.search(combiningMarksRegex) === 0) {\n error = true;\n }\n\n var len = countSymbols(label);\n for (var i = 0; i < len; ++i) {\n var status = findStatus(label.codePointAt(i));\n if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== \"valid\") ||\n (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&\n status[1] !== \"valid\" && status[1] !== \"deviation\")) {\n error = true;\n break;\n }\n }\n\n return {\n label: label,\n error: error\n };\n}\n\nfunction processing(domain_name, useSTD3, processing_option) {\n var result = mapChars(domain_name, useSTD3, processing_option);\n result.string = normalize(result.string);\n\n var labels = result.string.split(\".\");\n for (var i = 0; i < labels.length; ++i) {\n try {\n var validation = validateLabel(labels[i]);\n labels[i] = validation.label;\n result.error = result.error || validation.error;\n } catch(e) {\n result.error = true;\n }\n }\n\n return {\n string: labels.join(\".\"),\n error: result.error\n };\n}\n\nmodule.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {\n var result = processing(domain_name, useSTD3, processing_option);\n var labels = result.string.split(\".\");\n labels = labels.map(function(l) {\n try {\n return punycode.toASCII(l);\n } catch(e) {\n result.error = true;\n return l;\n }\n });\n\n if (verifyDnsLength) {\n var total = labels.slice(0, labels.length - 1).join(\".\").length;\n if (total.length > 253 || total.length === 0) {\n result.error = true;\n }\n\n for (var i=0; i < labels.length; ++i) {\n if (labels.length > 63 || labels.length === 0) {\n result.error = true;\n break;\n }\n }\n }\n\n if (result.error) return null;\n return labels.join(\".\");\n};\n\nmodule.exports.toUnicode = function(domain_name, useSTD3) {\n var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);\n\n return {\n domain: result.string,\n error: result.error\n };\n};\n\nmodule.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict'\n\nconst Client = require('./lib/client')\nconst Dispatcher = require('./lib/dispatcher')\nconst errors = require('./lib/core/errors')\nconst Pool = require('./lib/pool')\nconst BalancedPool = require('./lib/balanced-pool')\nconst Agent = require('./lib/agent')\nconst util = require('./lib/core/util')\nconst { InvalidArgumentError } = errors\nconst api = require('./lib/api')\nconst buildConnector = require('./lib/core/connect')\nconst MockClient = require('./lib/mock/mock-client')\nconst MockAgent = require('./lib/mock/mock-agent')\nconst MockPool = require('./lib/mock/mock-pool')\nconst mockErrors = require('./lib/mock/mock-errors')\nconst ProxyAgent = require('./lib/proxy-agent')\nconst RetryHandler = require('./lib/handler/RetryHandler')\nconst { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')\nconst DecoratorHandler = require('./lib/handler/DecoratorHandler')\nconst RedirectHandler = require('./lib/handler/RedirectHandler')\nconst createRedirectInterceptor = require('./lib/interceptor/redirectInterceptor')\n\nlet hasCrypto\ntry {\n require('crypto')\n hasCrypto = true\n} catch {\n hasCrypto = false\n}\n\nObject.assign(Dispatcher.prototype, api)\n\nmodule.exports.Dispatcher = Dispatcher\nmodule.exports.Client = Client\nmodule.exports.Pool = Pool\nmodule.exports.BalancedPool = BalancedPool\nmodule.exports.Agent = Agent\nmodule.exports.ProxyAgent = ProxyAgent\nmodule.exports.RetryHandler = RetryHandler\n\nmodule.exports.DecoratorHandler = DecoratorHandler\nmodule.exports.RedirectHandler = RedirectHandler\nmodule.exports.createRedirectInterceptor = createRedirectInterceptor\n\nmodule.exports.buildConnector = buildConnector\nmodule.exports.errors = errors\n\nfunction makeDispatcher (fn) {\n return (url, opts, handler) => {\n if (typeof opts === 'function') {\n handler = opts\n opts = null\n }\n\n if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) {\n throw new InvalidArgumentError('invalid url')\n }\n\n if (opts != null && typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (opts && opts.path != null) {\n if (typeof opts.path !== 'string') {\n throw new InvalidArgumentError('invalid opts.path')\n }\n\n let path = opts.path\n if (!opts.path.startsWith('/')) {\n path = `/${path}`\n }\n\n url = new URL(util.parseOrigin(url).origin + path)\n } else {\n if (!opts) {\n opts = typeof url === 'object' ? url : {}\n }\n\n url = util.parseURL(url)\n }\n\n const { agent, dispatcher = getGlobalDispatcher() } = opts\n\n if (agent) {\n throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?')\n }\n\n return fn.call(dispatcher, {\n ...opts,\n origin: url.origin,\n path: url.search ? `${url.pathname}${url.search}` : url.pathname,\n method: opts.method || (opts.body ? 'PUT' : 'GET')\n }, handler)\n }\n}\n\nmodule.exports.setGlobalDispatcher = setGlobalDispatcher\nmodule.exports.getGlobalDispatcher = getGlobalDispatcher\n\nif (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) {\n let fetchImpl = null\n module.exports.fetch = async function fetch (resource) {\n if (!fetchImpl) {\n fetchImpl = require('./lib/fetch').fetch\n }\n\n try {\n return await fetchImpl(...arguments)\n } catch (err) {\n if (typeof err === 'object') {\n Error.captureStackTrace(err, this)\n }\n\n throw err\n }\n }\n module.exports.Headers = require('./lib/fetch/headers').Headers\n module.exports.Response = require('./lib/fetch/response').Response\n module.exports.Request = require('./lib/fetch/request').Request\n module.exports.FormData = require('./lib/fetch/formdata').FormData\n module.exports.File = require('./lib/fetch/file').File\n module.exports.FileReader = require('./lib/fileapi/filereader').FileReader\n\n const { setGlobalOrigin, getGlobalOrigin } = require('./lib/fetch/global')\n\n module.exports.setGlobalOrigin = setGlobalOrigin\n module.exports.getGlobalOrigin = getGlobalOrigin\n\n const { CacheStorage } = require('./lib/cache/cachestorage')\n const { kConstruct } = require('./lib/cache/symbols')\n\n // Cache & CacheStorage are tightly coupled with fetch. Even if it may run\n // in an older version of Node, it doesn't have any use without fetch.\n module.exports.caches = new CacheStorage(kConstruct)\n}\n\nif (util.nodeMajor >= 16) {\n const { deleteCookie, getCookies, getSetCookies, setCookie } = require('./lib/cookies')\n\n module.exports.deleteCookie = deleteCookie\n module.exports.getCookies = getCookies\n module.exports.getSetCookies = getSetCookies\n module.exports.setCookie = setCookie\n\n const { parseMIMEType, serializeAMimeType } = require('./lib/fetch/dataURL')\n\n module.exports.parseMIMEType = parseMIMEType\n module.exports.serializeAMimeType = serializeAMimeType\n}\n\nif (util.nodeMajor >= 18 && hasCrypto) {\n const { WebSocket } = require('./lib/websocket/websocket')\n\n module.exports.WebSocket = WebSocket\n}\n\nmodule.exports.request = makeDispatcher(api.request)\nmodule.exports.stream = makeDispatcher(api.stream)\nmodule.exports.pipeline = makeDispatcher(api.pipeline)\nmodule.exports.connect = makeDispatcher(api.connect)\nmodule.exports.upgrade = makeDispatcher(api.upgrade)\n\nmodule.exports.MockClient = MockClient\nmodule.exports.MockPool = MockPool\nmodule.exports.MockAgent = MockAgent\nmodule.exports.mockErrors = mockErrors\n","'use strict'\n\nconst { InvalidArgumentError } = require('./core/errors')\nconst { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('./core/symbols')\nconst DispatcherBase = require('./dispatcher-base')\nconst Pool = require('./pool')\nconst Client = require('./client')\nconst util = require('./core/util')\nconst createRedirectInterceptor = require('./interceptor/redirectInterceptor')\nconst { WeakRef, FinalizationRegistry } = require('./compat/dispatcher-weakref')()\n\nconst kOnConnect = Symbol('onConnect')\nconst kOnDisconnect = Symbol('onDisconnect')\nconst kOnConnectionError = Symbol('onConnectionError')\nconst kMaxRedirections = Symbol('maxRedirections')\nconst kOnDrain = Symbol('onDrain')\nconst kFactory = Symbol('factory')\nconst kFinalizer = Symbol('finalizer')\nconst kOptions = Symbol('options')\n\nfunction defaultFactory (origin, opts) {\n return opts && opts.connections === 1\n ? new Client(origin, opts)\n : new Pool(origin, opts)\n}\n\nclass Agent extends DispatcherBase {\n constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) {\n super()\n\n if (typeof factory !== 'function') {\n throw new InvalidArgumentError('factory must be a function.')\n }\n\n if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {\n throw new InvalidArgumentError('connect must be a function or an object')\n }\n\n if (!Number.isInteger(maxRedirections) || maxRedirections < 0) {\n throw new InvalidArgumentError('maxRedirections must be a positive number')\n }\n\n if (connect && typeof connect !== 'function') {\n connect = { ...connect }\n }\n\n this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent)\n ? options.interceptors.Agent\n : [createRedirectInterceptor({ maxRedirections })]\n\n this[kOptions] = { ...util.deepClone(options), connect }\n this[kOptions].interceptors = options.interceptors\n ? { ...options.interceptors }\n : undefined\n this[kMaxRedirections] = maxRedirections\n this[kFactory] = factory\n this[kClients] = new Map()\n this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => {\n const ref = this[kClients].get(key)\n if (ref !== undefined && ref.deref() === undefined) {\n this[kClients].delete(key)\n }\n })\n\n const agent = this\n\n this[kOnDrain] = (origin, targets) => {\n agent.emit('drain', origin, [agent, ...targets])\n }\n\n this[kOnConnect] = (origin, targets) => {\n agent.emit('connect', origin, [agent, ...targets])\n }\n\n this[kOnDisconnect] = (origin, targets, err) => {\n agent.emit('disconnect', origin, [agent, ...targets], err)\n }\n\n this[kOnConnectionError] = (origin, targets, err) => {\n agent.emit('connectionError', origin, [agent, ...targets], err)\n }\n }\n\n get [kRunning] () {\n let ret = 0\n for (const ref of this[kClients].values()) {\n const client = ref.deref()\n /* istanbul ignore next: gc is undeterministic */\n if (client) {\n ret += client[kRunning]\n }\n }\n return ret\n }\n\n [kDispatch] (opts, handler) {\n let key\n if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {\n key = String(opts.origin)\n } else {\n throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')\n }\n\n const ref = this[kClients].get(key)\n\n let dispatcher = ref ? ref.deref() : null\n if (!dispatcher) {\n dispatcher = this[kFactory](opts.origin, this[kOptions])\n .on('drain', this[kOnDrain])\n .on('connect', this[kOnConnect])\n .on('disconnect', this[kOnDisconnect])\n .on('connectionError', this[kOnConnectionError])\n\n this[kClients].set(key, new WeakRef(dispatcher))\n this[kFinalizer].register(dispatcher, key)\n }\n\n return dispatcher.dispatch(opts, handler)\n }\n\n async [kClose] () {\n const closePromises = []\n for (const ref of this[kClients].values()) {\n const client = ref.deref()\n /* istanbul ignore else: gc is undeterministic */\n if (client) {\n closePromises.push(client.close())\n }\n }\n\n await Promise.all(closePromises)\n }\n\n async [kDestroy] (err) {\n const destroyPromises = []\n for (const ref of this[kClients].values()) {\n const client = ref.deref()\n /* istanbul ignore else: gc is undeterministic */\n if (client) {\n destroyPromises.push(client.destroy(err))\n }\n }\n\n await Promise.all(destroyPromises)\n }\n}\n\nmodule.exports = Agent\n","const { addAbortListener } = require('../core/util')\nconst { RequestAbortedError } = require('../core/errors')\n\nconst kListener = Symbol('kListener')\nconst kSignal = Symbol('kSignal')\n\nfunction abort (self) {\n if (self.abort) {\n self.abort()\n } else {\n self.onError(new RequestAbortedError())\n }\n}\n\nfunction addSignal (self, signal) {\n self[kSignal] = null\n self[kListener] = null\n\n if (!signal) {\n return\n }\n\n if (signal.aborted) {\n abort(self)\n return\n }\n\n self[kSignal] = signal\n self[kListener] = () => {\n abort(self)\n }\n\n addAbortListener(self[kSignal], self[kListener])\n}\n\nfunction removeSignal (self) {\n if (!self[kSignal]) {\n return\n }\n\n if ('removeEventListener' in self[kSignal]) {\n self[kSignal].removeEventListener('abort', self[kListener])\n } else {\n self[kSignal].removeListener('abort', self[kListener])\n }\n\n self[kSignal] = null\n self[kListener] = null\n}\n\nmodule.exports = {\n addSignal,\n removeSignal\n}\n","'use strict'\n\nconst { AsyncResource } = require('async_hooks')\nconst { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')\nconst util = require('../core/util')\nconst { addSignal, removeSignal } = require('./abort-signal')\n\nclass ConnectHandler extends AsyncResource {\n constructor (opts, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n const { signal, opaque, responseHeaders } = opts\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n super('UNDICI_CONNECT')\n\n this.opaque = opaque || null\n this.responseHeaders = responseHeaders || null\n this.callback = callback\n this.abort = null\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n if (!this.callback) {\n throw new RequestAbortedError()\n }\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders () {\n throw new SocketError('bad connect', null)\n }\n\n onUpgrade (statusCode, rawHeaders, socket) {\n const { callback, opaque, context } = this\n\n removeSignal(this)\n\n this.callback = null\n\n let headers = rawHeaders\n // Indicates is an HTTP2Session\n if (headers != null) {\n headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n }\n\n this.runInAsyncScope(callback, null, null, {\n statusCode,\n headers,\n socket,\n opaque,\n context\n })\n }\n\n onError (err) {\n const { callback, opaque } = this\n\n removeSignal(this)\n\n if (callback) {\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n }\n}\n\nfunction connect (opts, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n connect.call(this, opts, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n const connectHandler = new ConnectHandler(opts, callback)\n this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler)\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts && opts.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = connect\n","'use strict'\n\nconst {\n Readable,\n Duplex,\n PassThrough\n} = require('stream')\nconst {\n InvalidArgumentError,\n InvalidReturnValueError,\n RequestAbortedError\n} = require('../core/errors')\nconst util = require('../core/util')\nconst { AsyncResource } = require('async_hooks')\nconst { addSignal, removeSignal } = require('./abort-signal')\nconst assert = require('assert')\n\nconst kResume = Symbol('resume')\n\nclass PipelineRequest extends Readable {\n constructor () {\n super({ autoDestroy: true })\n\n this[kResume] = null\n }\n\n _read () {\n const { [kResume]: resume } = this\n\n if (resume) {\n this[kResume] = null\n resume()\n }\n }\n\n _destroy (err, callback) {\n this._read()\n\n callback(err)\n }\n}\n\nclass PipelineResponse extends Readable {\n constructor (resume) {\n super({ autoDestroy: true })\n this[kResume] = resume\n }\n\n _read () {\n this[kResume]()\n }\n\n _destroy (err, callback) {\n if (!err && !this._readableState.endEmitted) {\n err = new RequestAbortedError()\n }\n\n callback(err)\n }\n}\n\nclass PipelineHandler extends AsyncResource {\n constructor (opts, handler) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (typeof handler !== 'function') {\n throw new InvalidArgumentError('invalid handler')\n }\n\n const { signal, method, opaque, onInfo, responseHeaders } = opts\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n if (method === 'CONNECT') {\n throw new InvalidArgumentError('invalid method')\n }\n\n if (onInfo && typeof onInfo !== 'function') {\n throw new InvalidArgumentError('invalid onInfo callback')\n }\n\n super('UNDICI_PIPELINE')\n\n this.opaque = opaque || null\n this.responseHeaders = responseHeaders || null\n this.handler = handler\n this.abort = null\n this.context = null\n this.onInfo = onInfo || null\n\n this.req = new PipelineRequest().on('error', util.nop)\n\n this.ret = new Duplex({\n readableObjectMode: opts.objectMode,\n autoDestroy: true,\n read: () => {\n const { body } = this\n\n if (body && body.resume) {\n body.resume()\n }\n },\n write: (chunk, encoding, callback) => {\n const { req } = this\n\n if (req.push(chunk, encoding) || req._readableState.destroyed) {\n callback()\n } else {\n req[kResume] = callback\n }\n },\n destroy: (err, callback) => {\n const { body, req, res, ret, abort } = this\n\n if (!err && !ret._readableState.endEmitted) {\n err = new RequestAbortedError()\n }\n\n if (abort && err) {\n abort()\n }\n\n util.destroy(body, err)\n util.destroy(req, err)\n util.destroy(res, err)\n\n removeSignal(this)\n\n callback(err)\n }\n }).on('prefinish', () => {\n const { req } = this\n\n // Node < 15 does not call _final in same tick.\n req.push(null)\n })\n\n this.res = null\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n const { ret, res } = this\n\n assert(!res, 'pipeline cannot be retried')\n\n if (ret.destroyed) {\n throw new RequestAbortedError()\n }\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders (statusCode, rawHeaders, resume) {\n const { opaque, handler, context } = this\n\n if (statusCode < 200) {\n if (this.onInfo) {\n const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n this.onInfo({ statusCode, headers })\n }\n return\n }\n\n this.res = new PipelineResponse(resume)\n\n let body\n try {\n this.handler = null\n const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n body = this.runInAsyncScope(handler, null, {\n statusCode,\n headers,\n opaque,\n body: this.res,\n context\n })\n } catch (err) {\n this.res.on('error', util.nop)\n throw err\n }\n\n if (!body || typeof body.on !== 'function') {\n throw new InvalidReturnValueError('expected Readable')\n }\n\n body\n .on('data', (chunk) => {\n const { ret, body } = this\n\n if (!ret.push(chunk) && body.pause) {\n body.pause()\n }\n })\n .on('error', (err) => {\n const { ret } = this\n\n util.destroy(ret, err)\n })\n .on('end', () => {\n const { ret } = this\n\n ret.push(null)\n })\n .on('close', () => {\n const { ret } = this\n\n if (!ret._readableState.ended) {\n util.destroy(ret, new RequestAbortedError())\n }\n })\n\n this.body = body\n }\n\n onData (chunk) {\n const { res } = this\n return res.push(chunk)\n }\n\n onComplete (trailers) {\n const { res } = this\n res.push(null)\n }\n\n onError (err) {\n const { ret } = this\n this.handler = null\n util.destroy(ret, err)\n }\n}\n\nfunction pipeline (opts, handler) {\n try {\n const pipelineHandler = new PipelineHandler(opts, handler)\n this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler)\n return pipelineHandler.ret\n } catch (err) {\n return new PassThrough().destroy(err)\n }\n}\n\nmodule.exports = pipeline\n","'use strict'\n\nconst Readable = require('./readable')\nconst {\n InvalidArgumentError,\n RequestAbortedError\n} = require('../core/errors')\nconst util = require('../core/util')\nconst { getResolveErrorBodyCallback } = require('./util')\nconst { AsyncResource } = require('async_hooks')\nconst { addSignal, removeSignal } = require('./abort-signal')\n\nclass RequestHandler extends AsyncResource {\n constructor (opts, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts\n\n try {\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {\n throw new InvalidArgumentError('invalid highWaterMark')\n }\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n if (method === 'CONNECT') {\n throw new InvalidArgumentError('invalid method')\n }\n\n if (onInfo && typeof onInfo !== 'function') {\n throw new InvalidArgumentError('invalid onInfo callback')\n }\n\n super('UNDICI_REQUEST')\n } catch (err) {\n if (util.isStream(body)) {\n util.destroy(body.on('error', util.nop), err)\n }\n throw err\n }\n\n this.responseHeaders = responseHeaders || null\n this.opaque = opaque || null\n this.callback = callback\n this.res = null\n this.abort = null\n this.body = body\n this.trailers = {}\n this.context = null\n this.onInfo = onInfo || null\n this.throwOnError = throwOnError\n this.highWaterMark = highWaterMark\n\n if (util.isStream(body)) {\n body.on('error', (err) => {\n this.onError(err)\n })\n }\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n if (!this.callback) {\n throw new RequestAbortedError()\n }\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders (statusCode, rawHeaders, resume, statusMessage) {\n const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this\n\n const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n\n if (statusCode < 200) {\n if (this.onInfo) {\n this.onInfo({ statusCode, headers })\n }\n return\n }\n\n const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers\n const contentType = parsedHeaders['content-type']\n const body = new Readable({ resume, abort, contentType, highWaterMark })\n\n this.callback = null\n this.res = body\n if (callback !== null) {\n if (this.throwOnError && statusCode >= 400) {\n this.runInAsyncScope(getResolveErrorBodyCallback, null,\n { callback, body, contentType, statusCode, statusMessage, headers }\n )\n } else {\n this.runInAsyncScope(callback, null, null, {\n statusCode,\n headers,\n trailers: this.trailers,\n opaque,\n body,\n context\n })\n }\n }\n }\n\n onData (chunk) {\n const { res } = this\n return res.push(chunk)\n }\n\n onComplete (trailers) {\n const { res } = this\n\n removeSignal(this)\n\n util.parseHeaders(trailers, this.trailers)\n\n res.push(null)\n }\n\n onError (err) {\n const { res, callback, body, opaque } = this\n\n removeSignal(this)\n\n if (callback) {\n // TODO: Does this need queueMicrotask?\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n\n if (res) {\n this.res = null\n // Ensure all queued handlers are invoked before destroying res.\n queueMicrotask(() => {\n util.destroy(res, err)\n })\n }\n\n if (body) {\n this.body = null\n util.destroy(body, err)\n }\n }\n}\n\nfunction request (opts, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n request.call(this, opts, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n this.dispatch(opts, new RequestHandler(opts, callback))\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts && opts.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = request\nmodule.exports.RequestHandler = RequestHandler\n","'use strict'\n\nconst { finished, PassThrough } = require('stream')\nconst {\n InvalidArgumentError,\n InvalidReturnValueError,\n RequestAbortedError\n} = require('../core/errors')\nconst util = require('../core/util')\nconst { getResolveErrorBodyCallback } = require('./util')\nconst { AsyncResource } = require('async_hooks')\nconst { addSignal, removeSignal } = require('./abort-signal')\n\nclass StreamHandler extends AsyncResource {\n constructor (opts, factory, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts\n\n try {\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n if (typeof factory !== 'function') {\n throw new InvalidArgumentError('invalid factory')\n }\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n if (method === 'CONNECT') {\n throw new InvalidArgumentError('invalid method')\n }\n\n if (onInfo && typeof onInfo !== 'function') {\n throw new InvalidArgumentError('invalid onInfo callback')\n }\n\n super('UNDICI_STREAM')\n } catch (err) {\n if (util.isStream(body)) {\n util.destroy(body.on('error', util.nop), err)\n }\n throw err\n }\n\n this.responseHeaders = responseHeaders || null\n this.opaque = opaque || null\n this.factory = factory\n this.callback = callback\n this.res = null\n this.abort = null\n this.context = null\n this.trailers = null\n this.body = body\n this.onInfo = onInfo || null\n this.throwOnError = throwOnError || false\n\n if (util.isStream(body)) {\n body.on('error', (err) => {\n this.onError(err)\n })\n }\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n if (!this.callback) {\n throw new RequestAbortedError()\n }\n\n this.abort = abort\n this.context = context\n }\n\n onHeaders (statusCode, rawHeaders, resume, statusMessage) {\n const { factory, opaque, context, callback, responseHeaders } = this\n\n const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n\n if (statusCode < 200) {\n if (this.onInfo) {\n this.onInfo({ statusCode, headers })\n }\n return\n }\n\n this.factory = null\n\n let res\n\n if (this.throwOnError && statusCode >= 400) {\n const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers\n const contentType = parsedHeaders['content-type']\n res = new PassThrough()\n\n this.callback = null\n this.runInAsyncScope(getResolveErrorBodyCallback, null,\n { callback, body: res, contentType, statusCode, statusMessage, headers }\n )\n } else {\n if (factory === null) {\n return\n }\n\n res = this.runInAsyncScope(factory, null, {\n statusCode,\n headers,\n opaque,\n context\n })\n\n if (\n !res ||\n typeof res.write !== 'function' ||\n typeof res.end !== 'function' ||\n typeof res.on !== 'function'\n ) {\n throw new InvalidReturnValueError('expected Writable')\n }\n\n // TODO: Avoid finished. It registers an unnecessary amount of listeners.\n finished(res, { readable: false }, (err) => {\n const { callback, res, opaque, trailers, abort } = this\n\n this.res = null\n if (err || !res.readable) {\n util.destroy(res, err)\n }\n\n this.callback = null\n this.runInAsyncScope(callback, null, err || null, { opaque, trailers })\n\n if (err) {\n abort()\n }\n })\n }\n\n res.on('drain', resume)\n\n this.res = res\n\n const needDrain = res.writableNeedDrain !== undefined\n ? res.writableNeedDrain\n : res._writableState && res._writableState.needDrain\n\n return needDrain !== true\n }\n\n onData (chunk) {\n const { res } = this\n\n return res ? res.write(chunk) : true\n }\n\n onComplete (trailers) {\n const { res } = this\n\n removeSignal(this)\n\n if (!res) {\n return\n }\n\n this.trailers = util.parseHeaders(trailers)\n\n res.end()\n }\n\n onError (err) {\n const { res, callback, opaque, body } = this\n\n removeSignal(this)\n\n this.factory = null\n\n if (res) {\n this.res = null\n util.destroy(res, err)\n } else if (callback) {\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n\n if (body) {\n this.body = null\n util.destroy(body, err)\n }\n }\n}\n\nfunction stream (opts, factory, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n stream.call(this, opts, factory, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n this.dispatch(opts, new StreamHandler(opts, factory, callback))\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts && opts.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = stream\n","'use strict'\n\nconst { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')\nconst { AsyncResource } = require('async_hooks')\nconst util = require('../core/util')\nconst { addSignal, removeSignal } = require('./abort-signal')\nconst assert = require('assert')\n\nclass UpgradeHandler extends AsyncResource {\n constructor (opts, callback) {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('invalid opts')\n }\n\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n const { signal, opaque, responseHeaders } = opts\n\n if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {\n throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')\n }\n\n super('UNDICI_UPGRADE')\n\n this.responseHeaders = responseHeaders || null\n this.opaque = opaque || null\n this.callback = callback\n this.abort = null\n this.context = null\n\n addSignal(this, signal)\n }\n\n onConnect (abort, context) {\n if (!this.callback) {\n throw new RequestAbortedError()\n }\n\n this.abort = abort\n this.context = null\n }\n\n onHeaders () {\n throw new SocketError('bad upgrade', null)\n }\n\n onUpgrade (statusCode, rawHeaders, socket) {\n const { callback, opaque, context } = this\n\n assert.strictEqual(statusCode, 101)\n\n removeSignal(this)\n\n this.callback = null\n const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)\n this.runInAsyncScope(callback, null, null, {\n headers,\n socket,\n opaque,\n context\n })\n }\n\n onError (err) {\n const { callback, opaque } = this\n\n removeSignal(this)\n\n if (callback) {\n this.callback = null\n queueMicrotask(() => {\n this.runInAsyncScope(callback, null, err, { opaque })\n })\n }\n }\n}\n\nfunction upgrade (opts, callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n upgrade.call(this, opts, (err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n try {\n const upgradeHandler = new UpgradeHandler(opts, callback)\n this.dispatch({\n ...opts,\n method: opts.method || 'GET',\n upgrade: opts.protocol || 'Websocket'\n }, upgradeHandler)\n } catch (err) {\n if (typeof callback !== 'function') {\n throw err\n }\n const opaque = opts && opts.opaque\n queueMicrotask(() => callback(err, { opaque }))\n }\n}\n\nmodule.exports = upgrade\n","'use strict'\n\nmodule.exports.request = require('./api-request')\nmodule.exports.stream = require('./api-stream')\nmodule.exports.pipeline = require('./api-pipeline')\nmodule.exports.upgrade = require('./api-upgrade')\nmodule.exports.connect = require('./api-connect')\n","// Ported from https://github.com/nodejs/undici/pull/907\n\n'use strict'\n\nconst assert = require('assert')\nconst { Readable } = require('stream')\nconst { RequestAbortedError, NotSupportedError, InvalidArgumentError } = require('../core/errors')\nconst util = require('../core/util')\nconst { ReadableStreamFrom, toUSVString } = require('../core/util')\n\nlet Blob\n\nconst kConsume = Symbol('kConsume')\nconst kReading = Symbol('kReading')\nconst kBody = Symbol('kBody')\nconst kAbort = Symbol('abort')\nconst kContentType = Symbol('kContentType')\n\nconst noop = () => {}\n\nmodule.exports = class BodyReadable extends Readable {\n constructor ({\n resume,\n abort,\n contentType = '',\n highWaterMark = 64 * 1024 // Same as nodejs fs streams.\n }) {\n super({\n autoDestroy: true,\n read: resume,\n highWaterMark\n })\n\n this._readableState.dataEmitted = false\n\n this[kAbort] = abort\n this[kConsume] = null\n this[kBody] = null\n this[kContentType] = contentType\n\n // Is stream being consumed through Readable API?\n // This is an optimization so that we avoid checking\n // for 'data' and 'readable' listeners in the hot path\n // inside push().\n this[kReading] = false\n }\n\n destroy (err) {\n if (this.destroyed) {\n // Node < 16\n return this\n }\n\n if (!err && !this._readableState.endEmitted) {\n err = new RequestAbortedError()\n }\n\n if (err) {\n this[kAbort]()\n }\n\n return super.destroy(err)\n }\n\n emit (ev, ...args) {\n if (ev === 'data') {\n // Node < 16.7\n this._readableState.dataEmitted = true\n } else if (ev === 'error') {\n // Node < 16\n this._readableState.errorEmitted = true\n }\n return super.emit(ev, ...args)\n }\n\n on (ev, ...args) {\n if (ev === 'data' || ev === 'readable') {\n this[kReading] = true\n }\n return super.on(ev, ...args)\n }\n\n addListener (ev, ...args) {\n return this.on(ev, ...args)\n }\n\n off (ev, ...args) {\n const ret = super.off(ev, ...args)\n if (ev === 'data' || ev === 'readable') {\n this[kReading] = (\n this.listenerCount('data') > 0 ||\n this.listenerCount('readable') > 0\n )\n }\n return ret\n }\n\n removeListener (ev, ...args) {\n return this.off(ev, ...args)\n }\n\n push (chunk) {\n if (this[kConsume] && chunk !== null && this.readableLength === 0) {\n consumePush(this[kConsume], chunk)\n return this[kReading] ? super.push(chunk) : true\n }\n return super.push(chunk)\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-text\n async text () {\n return consume(this, 'text')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-json\n async json () {\n return consume(this, 'json')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-blob\n async blob () {\n return consume(this, 'blob')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-arraybuffer\n async arrayBuffer () {\n return consume(this, 'arrayBuffer')\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-formdata\n async formData () {\n // TODO: Implement.\n throw new NotSupportedError()\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-bodyused\n get bodyUsed () {\n return util.isDisturbed(this)\n }\n\n // https://fetch.spec.whatwg.org/#dom-body-body\n get body () {\n if (!this[kBody]) {\n this[kBody] = ReadableStreamFrom(this)\n if (this[kConsume]) {\n // TODO: Is this the best way to force a lock?\n this[kBody].getReader() // Ensure stream is locked.\n assert(this[kBody].locked)\n }\n }\n return this[kBody]\n }\n\n dump (opts) {\n let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144\n const signal = opts && opts.signal\n\n if (signal) {\n try {\n if (typeof signal !== 'object' || !('aborted' in signal)) {\n throw new InvalidArgumentError('signal must be an AbortSignal')\n }\n util.throwIfAborted(signal)\n } catch (err) {\n return Promise.reject(err)\n }\n }\n\n if (this.closed) {\n return Promise.resolve(null)\n }\n\n return new Promise((resolve, reject) => {\n const signalListenerCleanup = signal\n ? util.addAbortListener(signal, () => {\n this.destroy()\n })\n : noop\n\n this\n .on('close', function () {\n signalListenerCleanup()\n if (signal && signal.aborted) {\n reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' }))\n } else {\n resolve(null)\n }\n })\n .on('error', noop)\n .on('data', function (chunk) {\n limit -= chunk.length\n if (limit <= 0) {\n this.destroy()\n }\n })\n .resume()\n })\n }\n}\n\n// https://streams.spec.whatwg.org/#readablestream-locked\nfunction isLocked (self) {\n // Consume is an implicit lock.\n return (self[kBody] && self[kBody].locked === true) || self[kConsume]\n}\n\n// https://fetch.spec.whatwg.org/#body-unusable\nfunction isUnusable (self) {\n return util.isDisturbed(self) || isLocked(self)\n}\n\nasync function consume (stream, type) {\n if (isUnusable(stream)) {\n throw new TypeError('unusable')\n }\n\n assert(!stream[kConsume])\n\n return new Promise((resolve, reject) => {\n stream[kConsume] = {\n type,\n stream,\n resolve,\n reject,\n length: 0,\n body: []\n }\n\n stream\n .on('error', function (err) {\n consumeFinish(this[kConsume], err)\n })\n .on('close', function () {\n if (this[kConsume].body !== null) {\n consumeFinish(this[kConsume], new RequestAbortedError())\n }\n })\n\n process.nextTick(consumeStart, stream[kConsume])\n })\n}\n\nfunction consumeStart (consume) {\n if (consume.body === null) {\n return\n }\n\n const { _readableState: state } = consume.stream\n\n for (const chunk of state.buffer) {\n consumePush(consume, chunk)\n }\n\n if (state.endEmitted) {\n consumeEnd(this[kConsume])\n } else {\n consume.stream.on('end', function () {\n consumeEnd(this[kConsume])\n })\n }\n\n consume.stream.resume()\n\n while (consume.stream.read() != null) {\n // Loop\n }\n}\n\nfunction consumeEnd (consume) {\n const { type, body, resolve, stream, length } = consume\n\n try {\n if (type === 'text') {\n resolve(toUSVString(Buffer.concat(body)))\n } else if (type === 'json') {\n resolve(JSON.parse(Buffer.concat(body)))\n } else if (type === 'arrayBuffer') {\n const dst = new Uint8Array(length)\n\n let pos = 0\n for (const buf of body) {\n dst.set(buf, pos)\n pos += buf.byteLength\n }\n\n resolve(dst.buffer)\n } else if (type === 'blob') {\n if (!Blob) {\n Blob = require('buffer').Blob\n }\n resolve(new Blob(body, { type: stream[kContentType] }))\n }\n\n consumeFinish(consume)\n } catch (err) {\n stream.destroy(err)\n }\n}\n\nfunction consumePush (consume, chunk) {\n consume.length += chunk.length\n consume.body.push(chunk)\n}\n\nfunction consumeFinish (consume, err) {\n if (consume.body === null) {\n return\n }\n\n if (err) {\n consume.reject(err)\n } else {\n consume.resolve()\n }\n\n consume.type = null\n consume.stream = null\n consume.resolve = null\n consume.reject = null\n consume.length = 0\n consume.body = null\n}\n","const assert = require('assert')\nconst {\n ResponseStatusCodeError\n} = require('../core/errors')\nconst { toUSVString } = require('../core/util')\n\nasync function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {\n assert(body)\n\n let chunks = []\n let limit = 0\n\n for await (const chunk of body) {\n chunks.push(chunk)\n limit += chunk.length\n if (limit > 128 * 1024) {\n chunks = null\n break\n }\n }\n\n if (statusCode === 204 || !contentType || !chunks) {\n process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))\n return\n }\n\n try {\n if (contentType.startsWith('application/json')) {\n const payload = JSON.parse(toUSVString(Buffer.concat(chunks)))\n process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))\n return\n }\n\n if (contentType.startsWith('text/')) {\n const payload = toUSVString(Buffer.concat(chunks))\n process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))\n return\n }\n } catch (err) {\n // Process in a fallback if error\n }\n\n process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))\n}\n\nmodule.exports = { getResolveErrorBodyCallback }\n","'use strict'\n\nconst {\n BalancedPoolMissingUpstreamError,\n InvalidArgumentError\n} = require('./core/errors')\nconst {\n PoolBase,\n kClients,\n kNeedDrain,\n kAddClient,\n kRemoveClient,\n kGetDispatcher\n} = require('./pool-base')\nconst Pool = require('./pool')\nconst { kUrl, kInterceptors } = require('./core/symbols')\nconst { parseOrigin } = require('./core/util')\nconst kFactory = Symbol('factory')\n\nconst kOptions = Symbol('options')\nconst kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor')\nconst kCurrentWeight = Symbol('kCurrentWeight')\nconst kIndex = Symbol('kIndex')\nconst kWeight = Symbol('kWeight')\nconst kMaxWeightPerServer = Symbol('kMaxWeightPerServer')\nconst kErrorPenalty = Symbol('kErrorPenalty')\n\nfunction getGreatestCommonDivisor (a, b) {\n if (b === 0) return a\n return getGreatestCommonDivisor(b, a % b)\n}\n\nfunction defaultFactory (origin, opts) {\n return new Pool(origin, opts)\n}\n\nclass BalancedPool extends PoolBase {\n constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) {\n super()\n\n this[kOptions] = opts\n this[kIndex] = -1\n this[kCurrentWeight] = 0\n\n this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100\n this[kErrorPenalty] = this[kOptions].errorPenalty || 15\n\n if (!Array.isArray(upstreams)) {\n upstreams = [upstreams]\n }\n\n if (typeof factory !== 'function') {\n throw new InvalidArgumentError('factory must be a function.')\n }\n\n this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool)\n ? opts.interceptors.BalancedPool\n : []\n this[kFactory] = factory\n\n for (const upstream of upstreams) {\n this.addUpstream(upstream)\n }\n this._updateBalancedPoolStats()\n }\n\n addUpstream (upstream) {\n const upstreamOrigin = parseOrigin(upstream).origin\n\n if (this[kClients].find((pool) => (\n pool[kUrl].origin === upstreamOrigin &&\n pool.closed !== true &&\n pool.destroyed !== true\n ))) {\n return this\n }\n const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions]))\n\n this[kAddClient](pool)\n pool.on('connect', () => {\n pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty])\n })\n\n pool.on('connectionError', () => {\n pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])\n this._updateBalancedPoolStats()\n })\n\n pool.on('disconnect', (...args) => {\n const err = args[2]\n if (err && err.code === 'UND_ERR_SOCKET') {\n // decrease the weight of the pool.\n pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])\n this._updateBalancedPoolStats()\n }\n })\n\n for (const client of this[kClients]) {\n client[kWeight] = this[kMaxWeightPerServer]\n }\n\n this._updateBalancedPoolStats()\n\n return this\n }\n\n _updateBalancedPoolStats () {\n this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0)\n }\n\n removeUpstream (upstream) {\n const upstreamOrigin = parseOrigin(upstream).origin\n\n const pool = this[kClients].find((pool) => (\n pool[kUrl].origin === upstreamOrigin &&\n pool.closed !== true &&\n pool.destroyed !== true\n ))\n\n if (pool) {\n this[kRemoveClient](pool)\n }\n\n return this\n }\n\n get upstreams () {\n return this[kClients]\n .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true)\n .map((p) => p[kUrl].origin)\n }\n\n [kGetDispatcher] () {\n // We validate that pools is greater than 0,\n // otherwise we would have to wait until an upstream\n // is added, which might never happen.\n if (this[kClients].length === 0) {\n throw new BalancedPoolMissingUpstreamError()\n }\n\n const dispatcher = this[kClients].find(dispatcher => (\n !dispatcher[kNeedDrain] &&\n dispatcher.closed !== true &&\n dispatcher.destroyed !== true\n ))\n\n if (!dispatcher) {\n return\n }\n\n const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true)\n\n if (allClientsBusy) {\n return\n }\n\n let counter = 0\n\n let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain])\n\n while (counter++ < this[kClients].length) {\n this[kIndex] = (this[kIndex] + 1) % this[kClients].length\n const pool = this[kClients][this[kIndex]]\n\n // find pool index with the largest weight\n if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) {\n maxWeightIndex = this[kIndex]\n }\n\n // decrease the current weight every `this[kClients].length`.\n if (this[kIndex] === 0) {\n // Set the current weight to the next lower weight.\n this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor]\n\n if (this[kCurrentWeight] <= 0) {\n this[kCurrentWeight] = this[kMaxWeightPerServer]\n }\n }\n if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) {\n return pool\n }\n }\n\n this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight]\n this[kIndex] = maxWeightIndex\n return this[kClients][maxWeightIndex]\n }\n}\n\nmodule.exports = BalancedPool\n","'use strict'\n\nconst { kConstruct } = require('./symbols')\nconst { urlEquals, fieldValues: getFieldValues } = require('./util')\nconst { kEnumerableProperty, isDisturbed } = require('../core/util')\nconst { kHeadersList } = require('../core/symbols')\nconst { webidl } = require('../fetch/webidl')\nconst { Response, cloneResponse } = require('../fetch/response')\nconst { Request } = require('../fetch/request')\nconst { kState, kHeaders, kGuard, kRealm } = require('../fetch/symbols')\nconst { fetching } = require('../fetch/index')\nconst { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = require('../fetch/util')\nconst assert = require('assert')\nconst { getGlobalDispatcher } = require('../global')\n\n/**\n * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation\n * @typedef {Object} CacheBatchOperation\n * @property {'delete' | 'put'} type\n * @property {any} request\n * @property {any} response\n * @property {import('../../types/cache').CacheQueryOptions} options\n */\n\n/**\n * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list\n * @typedef {[any, any][]} requestResponseList\n */\n\nclass Cache {\n /**\n * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list\n * @type {requestResponseList}\n */\n #relevantRequestResponseList\n\n constructor () {\n if (arguments[0] !== kConstruct) {\n webidl.illegalConstructor()\n }\n\n this.#relevantRequestResponseList = arguments[1]\n }\n\n async match (request, options = {}) {\n webidl.brandCheck(this, Cache)\n webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' })\n\n request = webidl.converters.RequestInfo(request)\n options = webidl.converters.CacheQueryOptions(options)\n\n const p = await this.matchAll(request, options)\n\n if (p.length === 0) {\n return\n }\n\n return p[0]\n }\n\n async matchAll (request = undefined, options = {}) {\n webidl.brandCheck(this, Cache)\n\n if (request !== undefined) request = webidl.converters.RequestInfo(request)\n options = webidl.converters.CacheQueryOptions(options)\n\n // 1.\n let r = null\n\n // 2.\n if (request !== undefined) {\n if (request instanceof Request) {\n // 2.1.1\n r = request[kState]\n\n // 2.1.2\n if (r.method !== 'GET' && !options.ignoreMethod) {\n return []\n }\n } else if (typeof request === 'string') {\n // 2.2.1\n r = new Request(request)[kState]\n }\n }\n\n // 5.\n // 5.1\n const responses = []\n\n // 5.2\n if (request === undefined) {\n // 5.2.1\n for (const requestResponse of this.#relevantRequestResponseList) {\n responses.push(requestResponse[1])\n }\n } else { // 5.3\n // 5.3.1\n const requestResponses = this.#queryCache(r, options)\n\n // 5.3.2\n for (const requestResponse of requestResponses) {\n responses.push(requestResponse[1])\n }\n }\n\n // 5.4\n // We don't implement CORs so we don't need to loop over the responses, yay!\n\n // 5.5.1\n const responseList = []\n\n // 5.5.2\n for (const response of responses) {\n // 5.5.2.1\n const responseObject = new Response(response.body?.source ?? null)\n const body = responseObject[kState].body\n responseObject[kState] = response\n responseObject[kState].body = body\n responseObject[kHeaders][kHeadersList] = response.headersList\n responseObject[kHeaders][kGuard] = 'immutable'\n\n responseList.push(responseObject)\n }\n\n // 6.\n return Object.freeze(responseList)\n }\n\n async add (request) {\n webidl.brandCheck(this, Cache)\n webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' })\n\n request = webidl.converters.RequestInfo(request)\n\n // 1.\n const requests = [request]\n\n // 2.\n const responseArrayPromise = this.addAll(requests)\n\n // 3.\n return await responseArrayPromise\n }\n\n async addAll (requests) {\n webidl.brandCheck(this, Cache)\n webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' })\n\n requests = webidl.converters['sequence'](requests)\n\n // 1.\n const responsePromises = []\n\n // 2.\n const requestList = []\n\n // 3.\n for (const request of requests) {\n if (typeof request === 'string') {\n continue\n }\n\n // 3.1\n const r = request[kState]\n\n // 3.2\n if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') {\n throw webidl.errors.exception({\n header: 'Cache.addAll',\n message: 'Expected http/s scheme when method is not GET.'\n })\n }\n }\n\n // 4.\n /** @type {ReturnType[]} */\n const fetchControllers = []\n\n // 5.\n for (const request of requests) {\n // 5.1\n const r = new Request(request)[kState]\n\n // 5.2\n if (!urlIsHttpHttpsScheme(r.url)) {\n throw webidl.errors.exception({\n header: 'Cache.addAll',\n message: 'Expected http/s scheme.'\n })\n }\n\n // 5.4\n r.initiator = 'fetch'\n r.destination = 'subresource'\n\n // 5.5\n requestList.push(r)\n\n // 5.6\n const responsePromise = createDeferredPromise()\n\n // 5.7\n fetchControllers.push(fetching({\n request: r,\n dispatcher: getGlobalDispatcher(),\n processResponse (response) {\n // 1.\n if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {\n responsePromise.reject(webidl.errors.exception({\n header: 'Cache.addAll',\n message: 'Received an invalid status code or the request failed.'\n }))\n } else if (response.headersList.contains('vary')) { // 2.\n // 2.1\n const fieldValues = getFieldValues(response.headersList.get('vary'))\n\n // 2.2\n for (const fieldValue of fieldValues) {\n // 2.2.1\n if (fieldValue === '*') {\n responsePromise.reject(webidl.errors.exception({\n header: 'Cache.addAll',\n message: 'invalid vary field value'\n }))\n\n for (const controller of fetchControllers) {\n controller.abort()\n }\n\n return\n }\n }\n }\n },\n processResponseEndOfBody (response) {\n // 1.\n if (response.aborted) {\n responsePromise.reject(new DOMException('aborted', 'AbortError'))\n return\n }\n\n // 2.\n responsePromise.resolve(response)\n }\n }))\n\n // 5.8\n responsePromises.push(responsePromise.promise)\n }\n\n // 6.\n const p = Promise.all(responsePromises)\n\n // 7.\n const responses = await p\n\n // 7.1\n const operations = []\n\n // 7.2\n let index = 0\n\n // 7.3\n for (const response of responses) {\n // 7.3.1\n /** @type {CacheBatchOperation} */\n const operation = {\n type: 'put', // 7.3.2\n request: requestList[index], // 7.3.3\n response // 7.3.4\n }\n\n operations.push(operation) // 7.3.5\n\n index++ // 7.3.6\n }\n\n // 7.5\n const cacheJobPromise = createDeferredPromise()\n\n // 7.6.1\n let errorData = null\n\n // 7.6.2\n try {\n this.#batchCacheOperations(operations)\n } catch (e) {\n errorData = e\n }\n\n // 7.6.3\n queueMicrotask(() => {\n // 7.6.3.1\n if (errorData === null) {\n cacheJobPromise.resolve(undefined)\n } else {\n // 7.6.3.2\n cacheJobPromise.reject(errorData)\n }\n })\n\n // 7.7\n return cacheJobPromise.promise\n }\n\n async put (request, response) {\n webidl.brandCheck(this, Cache)\n webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' })\n\n request = webidl.converters.RequestInfo(request)\n response = webidl.converters.Response(response)\n\n // 1.\n let innerRequest = null\n\n // 2.\n if (request instanceof Request) {\n innerRequest = request[kState]\n } else { // 3.\n innerRequest = new Request(request)[kState]\n }\n\n // 4.\n if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') {\n throw webidl.errors.exception({\n header: 'Cache.put',\n message: 'Expected an http/s scheme when method is not GET'\n })\n }\n\n // 5.\n const innerResponse = response[kState]\n\n // 6.\n if (innerResponse.status === 206) {\n throw webidl.errors.exception({\n header: 'Cache.put',\n message: 'Got 206 status'\n })\n }\n\n // 7.\n if (innerResponse.headersList.contains('vary')) {\n // 7.1.\n const fieldValues = getFieldValues(innerResponse.headersList.get('vary'))\n\n // 7.2.\n for (const fieldValue of fieldValues) {\n // 7.2.1\n if (fieldValue === '*') {\n throw webidl.errors.exception({\n header: 'Cache.put',\n message: 'Got * vary field value'\n })\n }\n }\n }\n\n // 8.\n if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) {\n throw webidl.errors.exception({\n header: 'Cache.put',\n message: 'Response body is locked or disturbed'\n })\n }\n\n // 9.\n const clonedResponse = cloneResponse(innerResponse)\n\n // 10.\n const bodyReadPromise = createDeferredPromise()\n\n // 11.\n if (innerResponse.body != null) {\n // 11.1\n const stream = innerResponse.body.stream\n\n // 11.2\n const reader = stream.getReader()\n\n // 11.3\n readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject)\n } else {\n bodyReadPromise.resolve(undefined)\n }\n\n // 12.\n /** @type {CacheBatchOperation[]} */\n const operations = []\n\n // 13.\n /** @type {CacheBatchOperation} */\n const operation = {\n type: 'put', // 14.\n request: innerRequest, // 15.\n response: clonedResponse // 16.\n }\n\n // 17.\n operations.push(operation)\n\n // 19.\n const bytes = await bodyReadPromise.promise\n\n if (clonedResponse.body != null) {\n clonedResponse.body.source = bytes\n }\n\n // 19.1\n const cacheJobPromise = createDeferredPromise()\n\n // 19.2.1\n let errorData = null\n\n // 19.2.2\n try {\n this.#batchCacheOperations(operations)\n } catch (e) {\n errorData = e\n }\n\n // 19.2.3\n queueMicrotask(() => {\n // 19.2.3.1\n if (errorData === null) {\n cacheJobPromise.resolve()\n } else { // 19.2.3.2\n cacheJobPromise.reject(errorData)\n }\n })\n\n return cacheJobPromise.promise\n }\n\n async delete (request, options = {}) {\n webidl.brandCheck(this, Cache)\n webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' })\n\n request = webidl.converters.RequestInfo(request)\n options = webidl.converters.CacheQueryOptions(options)\n\n /**\n * @type {Request}\n */\n let r = null\n\n if (request instanceof Request) {\n r = request[kState]\n\n if (r.method !== 'GET' && !options.ignoreMethod) {\n return false\n }\n } else {\n assert(typeof request === 'string')\n\n r = new Request(request)[kState]\n }\n\n /** @type {CacheBatchOperation[]} */\n const operations = []\n\n /** @type {CacheBatchOperation} */\n const operation = {\n type: 'delete',\n request: r,\n options\n }\n\n operations.push(operation)\n\n const cacheJobPromise = createDeferredPromise()\n\n let errorData = null\n let requestResponses\n\n try {\n requestResponses = this.#batchCacheOperations(operations)\n } catch (e) {\n errorData = e\n }\n\n queueMicrotask(() => {\n if (errorData === null) {\n cacheJobPromise.resolve(!!requestResponses?.length)\n } else {\n cacheJobPromise.reject(errorData)\n }\n })\n\n return cacheJobPromise.promise\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys\n * @param {any} request\n * @param {import('../../types/cache').CacheQueryOptions} options\n * @returns {readonly Request[]}\n */\n async keys (request = undefined, options = {}) {\n webidl.brandCheck(this, Cache)\n\n if (request !== undefined) request = webidl.converters.RequestInfo(request)\n options = webidl.converters.CacheQueryOptions(options)\n\n // 1.\n let r = null\n\n // 2.\n if (request !== undefined) {\n // 2.1\n if (request instanceof Request) {\n // 2.1.1\n r = request[kState]\n\n // 2.1.2\n if (r.method !== 'GET' && !options.ignoreMethod) {\n return []\n }\n } else if (typeof request === 'string') { // 2.2\n r = new Request(request)[kState]\n }\n }\n\n // 4.\n const promise = createDeferredPromise()\n\n // 5.\n // 5.1\n const requests = []\n\n // 5.2\n if (request === undefined) {\n // 5.2.1\n for (const requestResponse of this.#relevantRequestResponseList) {\n // 5.2.1.1\n requests.push(requestResponse[0])\n }\n } else { // 5.3\n // 5.3.1\n const requestResponses = this.#queryCache(r, options)\n\n // 5.3.2\n for (const requestResponse of requestResponses) {\n // 5.3.2.1\n requests.push(requestResponse[0])\n }\n }\n\n // 5.4\n queueMicrotask(() => {\n // 5.4.1\n const requestList = []\n\n // 5.4.2\n for (const request of requests) {\n const requestObject = new Request('https://a')\n requestObject[kState] = request\n requestObject[kHeaders][kHeadersList] = request.headersList\n requestObject[kHeaders][kGuard] = 'immutable'\n requestObject[kRealm] = request.client\n\n // 5.4.2.1\n requestList.push(requestObject)\n }\n\n // 5.4.3\n promise.resolve(Object.freeze(requestList))\n })\n\n return promise.promise\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm\n * @param {CacheBatchOperation[]} operations\n * @returns {requestResponseList}\n */\n #batchCacheOperations (operations) {\n // 1.\n const cache = this.#relevantRequestResponseList\n\n // 2.\n const backupCache = [...cache]\n\n // 3.\n const addedItems = []\n\n // 4.1\n const resultList = []\n\n try {\n // 4.2\n for (const operation of operations) {\n // 4.2.1\n if (operation.type !== 'delete' && operation.type !== 'put') {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'operation type does not match \"delete\" or \"put\"'\n })\n }\n\n // 4.2.2\n if (operation.type === 'delete' && operation.response != null) {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'delete operation should not have an associated response'\n })\n }\n\n // 4.2.3\n if (this.#queryCache(operation.request, operation.options, addedItems).length) {\n throw new DOMException('???', 'InvalidStateError')\n }\n\n // 4.2.4\n let requestResponses\n\n // 4.2.5\n if (operation.type === 'delete') {\n // 4.2.5.1\n requestResponses = this.#queryCache(operation.request, operation.options)\n\n // TODO: the spec is wrong, this is needed to pass WPTs\n if (requestResponses.length === 0) {\n return []\n }\n\n // 4.2.5.2\n for (const requestResponse of requestResponses) {\n const idx = cache.indexOf(requestResponse)\n assert(idx !== -1)\n\n // 4.2.5.2.1\n cache.splice(idx, 1)\n }\n } else if (operation.type === 'put') { // 4.2.6\n // 4.2.6.1\n if (operation.response == null) {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'put operation should have an associated response'\n })\n }\n\n // 4.2.6.2\n const r = operation.request\n\n // 4.2.6.3\n if (!urlIsHttpHttpsScheme(r.url)) {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'expected http or https scheme'\n })\n }\n\n // 4.2.6.4\n if (r.method !== 'GET') {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'not get method'\n })\n }\n\n // 4.2.6.5\n if (operation.options != null) {\n throw webidl.errors.exception({\n header: 'Cache.#batchCacheOperations',\n message: 'options must not be defined'\n })\n }\n\n // 4.2.6.6\n requestResponses = this.#queryCache(operation.request)\n\n // 4.2.6.7\n for (const requestResponse of requestResponses) {\n const idx = cache.indexOf(requestResponse)\n assert(idx !== -1)\n\n // 4.2.6.7.1\n cache.splice(idx, 1)\n }\n\n // 4.2.6.8\n cache.push([operation.request, operation.response])\n\n // 4.2.6.10\n addedItems.push([operation.request, operation.response])\n }\n\n // 4.2.7\n resultList.push([operation.request, operation.response])\n }\n\n // 4.3\n return resultList\n } catch (e) { // 5.\n // 5.1\n this.#relevantRequestResponseList.length = 0\n\n // 5.2\n this.#relevantRequestResponseList = backupCache\n\n // 5.3\n throw e\n }\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#query-cache\n * @param {any} requestQuery\n * @param {import('../../types/cache').CacheQueryOptions} options\n * @param {requestResponseList} targetStorage\n * @returns {requestResponseList}\n */\n #queryCache (requestQuery, options, targetStorage) {\n /** @type {requestResponseList} */\n const resultList = []\n\n const storage = targetStorage ?? this.#relevantRequestResponseList\n\n for (const requestResponse of storage) {\n const [cachedRequest, cachedResponse] = requestResponse\n if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) {\n resultList.push(requestResponse)\n }\n }\n\n return resultList\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm\n * @param {any} requestQuery\n * @param {any} request\n * @param {any | null} response\n * @param {import('../../types/cache').CacheQueryOptions | undefined} options\n * @returns {boolean}\n */\n #requestMatchesCachedItem (requestQuery, request, response = null, options) {\n // if (options?.ignoreMethod === false && request.method === 'GET') {\n // return false\n // }\n\n const queryURL = new URL(requestQuery.url)\n\n const cachedURL = new URL(request.url)\n\n if (options?.ignoreSearch) {\n cachedURL.search = ''\n\n queryURL.search = ''\n }\n\n if (!urlEquals(queryURL, cachedURL, true)) {\n return false\n }\n\n if (\n response == null ||\n options?.ignoreVary ||\n !response.headersList.contains('vary')\n ) {\n return true\n }\n\n const fieldValues = getFieldValues(response.headersList.get('vary'))\n\n for (const fieldValue of fieldValues) {\n if (fieldValue === '*') {\n return false\n }\n\n const requestValue = request.headersList.get(fieldValue)\n const queryValue = requestQuery.headersList.get(fieldValue)\n\n // If one has the header and the other doesn't, or one has\n // a different value than the other, return false\n if (requestValue !== queryValue) {\n return false\n }\n }\n\n return true\n }\n}\n\nObject.defineProperties(Cache.prototype, {\n [Symbol.toStringTag]: {\n value: 'Cache',\n configurable: true\n },\n match: kEnumerableProperty,\n matchAll: kEnumerableProperty,\n add: kEnumerableProperty,\n addAll: kEnumerableProperty,\n put: kEnumerableProperty,\n delete: kEnumerableProperty,\n keys: kEnumerableProperty\n})\n\nconst cacheQueryOptionConverters = [\n {\n key: 'ignoreSearch',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'ignoreMethod',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'ignoreVary',\n converter: webidl.converters.boolean,\n defaultValue: false\n }\n]\n\nwebidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters)\n\nwebidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([\n ...cacheQueryOptionConverters,\n {\n key: 'cacheName',\n converter: webidl.converters.DOMString\n }\n])\n\nwebidl.converters.Response = webidl.interfaceConverter(Response)\n\nwebidl.converters['sequence'] = webidl.sequenceConverter(\n webidl.converters.RequestInfo\n)\n\nmodule.exports = {\n Cache\n}\n","'use strict'\n\nconst { kConstruct } = require('./symbols')\nconst { Cache } = require('./cache')\nconst { webidl } = require('../fetch/webidl')\nconst { kEnumerableProperty } = require('../core/util')\n\nclass CacheStorage {\n /**\n * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map\n * @type {Map}\n */\n async has (cacheName) {\n webidl.brandCheck(this, CacheStorage)\n webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' })\n\n cacheName = webidl.converters.DOMString(cacheName)\n\n // 2.1.1\n // 2.2\n return this.#caches.has(cacheName)\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open\n * @param {string} cacheName\n * @returns {Promise}\n */\n async open (cacheName) {\n webidl.brandCheck(this, CacheStorage)\n webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' })\n\n cacheName = webidl.converters.DOMString(cacheName)\n\n // 2.1\n if (this.#caches.has(cacheName)) {\n // await caches.open('v1') !== await caches.open('v1')\n\n // 2.1.1\n const cache = this.#caches.get(cacheName)\n\n // 2.1.1.1\n return new Cache(kConstruct, cache)\n }\n\n // 2.2\n const cache = []\n\n // 2.3\n this.#caches.set(cacheName, cache)\n\n // 2.4\n return new Cache(kConstruct, cache)\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete\n * @param {string} cacheName\n * @returns {Promise}\n */\n async delete (cacheName) {\n webidl.brandCheck(this, CacheStorage)\n webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' })\n\n cacheName = webidl.converters.DOMString(cacheName)\n\n return this.#caches.delete(cacheName)\n }\n\n /**\n * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys\n * @returns {string[]}\n */\n async keys () {\n webidl.brandCheck(this, CacheStorage)\n\n // 2.1\n const keys = this.#caches.keys()\n\n // 2.2\n return [...keys]\n }\n}\n\nObject.defineProperties(CacheStorage.prototype, {\n [Symbol.toStringTag]: {\n value: 'CacheStorage',\n configurable: true\n },\n match: kEnumerableProperty,\n has: kEnumerableProperty,\n open: kEnumerableProperty,\n delete: kEnumerableProperty,\n keys: kEnumerableProperty\n})\n\nmodule.exports = {\n CacheStorage\n}\n","'use strict'\n\nmodule.exports = {\n kConstruct: require('../core/symbols').kConstruct\n}\n","'use strict'\n\nconst assert = require('assert')\nconst { URLSerializer } = require('../fetch/dataURL')\nconst { isValidHeaderName } = require('../fetch/util')\n\n/**\n * @see https://url.spec.whatwg.org/#concept-url-equals\n * @param {URL} A\n * @param {URL} B\n * @param {boolean | undefined} excludeFragment\n * @returns {boolean}\n */\nfunction urlEquals (A, B, excludeFragment = false) {\n const serializedA = URLSerializer(A, excludeFragment)\n\n const serializedB = URLSerializer(B, excludeFragment)\n\n return serializedA === serializedB\n}\n\n/**\n * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262\n * @param {string} header\n */\nfunction fieldValues (header) {\n assert(header !== null)\n\n const values = []\n\n for (let value of header.split(',')) {\n value = value.trim()\n\n if (!value.length) {\n continue\n } else if (!isValidHeaderName(value)) {\n continue\n }\n\n values.push(value)\n }\n\n return values\n}\n\nmodule.exports = {\n urlEquals,\n fieldValues\n}\n","// @ts-check\n\n'use strict'\n\n/* global WebAssembly */\n\nconst assert = require('assert')\nconst net = require('net')\nconst http = require('http')\nconst { pipeline } = require('stream')\nconst util = require('./core/util')\nconst timers = require('./timers')\nconst Request = require('./core/request')\nconst DispatcherBase = require('./dispatcher-base')\nconst {\n RequestContentLengthMismatchError,\n ResponseContentLengthMismatchError,\n InvalidArgumentError,\n RequestAbortedError,\n HeadersTimeoutError,\n HeadersOverflowError,\n SocketError,\n InformationalError,\n BodyTimeoutError,\n HTTPParserError,\n ResponseExceededMaxSizeError,\n ClientDestroyedError\n} = require('./core/errors')\nconst buildConnector = require('./core/connect')\nconst {\n kUrl,\n kReset,\n kServerName,\n kClient,\n kBusy,\n kParser,\n kConnect,\n kBlocking,\n kResuming,\n kRunning,\n kPending,\n kSize,\n kWriting,\n kQueue,\n kConnected,\n kConnecting,\n kNeedDrain,\n kNoRef,\n kKeepAliveDefaultTimeout,\n kHostHeader,\n kPendingIdx,\n kRunningIdx,\n kError,\n kPipelining,\n kSocket,\n kKeepAliveTimeoutValue,\n kMaxHeadersSize,\n kKeepAliveMaxTimeout,\n kKeepAliveTimeoutThreshold,\n kHeadersTimeout,\n kBodyTimeout,\n kStrictContentLength,\n kConnector,\n kMaxRedirections,\n kMaxRequests,\n kCounter,\n kClose,\n kDestroy,\n kDispatch,\n kInterceptors,\n kLocalAddress,\n kMaxResponseSize,\n kHTTPConnVersion,\n // HTTP2\n kHost,\n kHTTP2Session,\n kHTTP2SessionState,\n kHTTP2BuildRequest,\n kHTTP2CopyHeaders,\n kHTTP1BuildRequest\n} = require('./core/symbols')\n\n/** @type {import('http2')} */\nlet http2\ntry {\n http2 = require('http2')\n} catch {\n // @ts-ignore\n http2 = { constants: {} }\n}\n\nconst {\n constants: {\n HTTP2_HEADER_AUTHORITY,\n HTTP2_HEADER_METHOD,\n HTTP2_HEADER_PATH,\n HTTP2_HEADER_SCHEME,\n HTTP2_HEADER_CONTENT_LENGTH,\n HTTP2_HEADER_EXPECT,\n HTTP2_HEADER_STATUS\n }\n} = http2\n\n// Experimental\nlet h2ExperimentalWarned = false\n\nconst FastBuffer = Buffer[Symbol.species]\n\nconst kClosedResolve = Symbol('kClosedResolve')\n\nconst channels = {}\n\ntry {\n const diagnosticsChannel = require('diagnostics_channel')\n channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders')\n channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect')\n channels.connectError = diagnosticsChannel.channel('undici:client:connectError')\n channels.connected = diagnosticsChannel.channel('undici:client:connected')\n} catch {\n channels.sendHeaders = { hasSubscribers: false }\n channels.beforeConnect = { hasSubscribers: false }\n channels.connectError = { hasSubscribers: false }\n channels.connected = { hasSubscribers: false }\n}\n\n/**\n * @type {import('../types/client').default}\n */\nclass Client extends DispatcherBase {\n /**\n *\n * @param {string|URL} url\n * @param {import('../types/client').Client.Options} options\n */\n constructor (url, {\n interceptors,\n maxHeaderSize,\n headersTimeout,\n socketTimeout,\n requestTimeout,\n connectTimeout,\n bodyTimeout,\n idleTimeout,\n keepAlive,\n keepAliveTimeout,\n maxKeepAliveTimeout,\n keepAliveMaxTimeout,\n keepAliveTimeoutThreshold,\n socketPath,\n pipelining,\n tls,\n strictContentLength,\n maxCachedSessions,\n maxRedirections,\n connect,\n maxRequestsPerClient,\n localAddress,\n maxResponseSize,\n autoSelectFamily,\n autoSelectFamilyAttemptTimeout,\n // h2\n allowH2,\n maxConcurrentStreams\n } = {}) {\n super()\n\n if (keepAlive !== undefined) {\n throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead')\n }\n\n if (socketTimeout !== undefined) {\n throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead')\n }\n\n if (requestTimeout !== undefined) {\n throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead')\n }\n\n if (idleTimeout !== undefined) {\n throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead')\n }\n\n if (maxKeepAliveTimeout !== undefined) {\n throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead')\n }\n\n if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) {\n throw new InvalidArgumentError('invalid maxHeaderSize')\n }\n\n if (socketPath != null && typeof socketPath !== 'string') {\n throw new InvalidArgumentError('invalid socketPath')\n }\n\n if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) {\n throw new InvalidArgumentError('invalid connectTimeout')\n }\n\n if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) {\n throw new InvalidArgumentError('invalid keepAliveTimeout')\n }\n\n if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) {\n throw new InvalidArgumentError('invalid keepAliveMaxTimeout')\n }\n\n if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) {\n throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold')\n }\n\n if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) {\n throw new InvalidArgumentError('headersTimeout must be a positive integer or zero')\n }\n\n if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) {\n throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero')\n }\n\n if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {\n throw new InvalidArgumentError('connect must be a function or an object')\n }\n\n if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {\n throw new InvalidArgumentError('maxRedirections must be a positive number')\n }\n\n if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) {\n throw new InvalidArgumentError('maxRequestsPerClient must be a positive number')\n }\n\n if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) {\n throw new InvalidArgumentError('localAddress must be valid string IP address')\n }\n\n if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) {\n throw new InvalidArgumentError('maxResponseSize must be a positive number')\n }\n\n if (\n autoSelectFamilyAttemptTimeout != null &&\n (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)\n ) {\n throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')\n }\n\n // h2\n if (allowH2 != null && typeof allowH2 !== 'boolean') {\n throw new InvalidArgumentError('allowH2 must be a valid boolean value')\n }\n\n if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {\n throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0')\n }\n\n if (typeof connect !== 'function') {\n connect = buildConnector({\n ...tls,\n maxCachedSessions,\n allowH2,\n socketPath,\n timeout: connectTimeout,\n ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),\n ...connect\n })\n }\n\n this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client)\n ? interceptors.Client\n : [createRedirectInterceptor({ maxRedirections })]\n this[kUrl] = util.parseOrigin(url)\n this[kConnector] = connect\n this[kSocket] = null\n this[kPipelining] = pipelining != null ? pipelining : 1\n this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize\n this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout\n this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout\n this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold\n this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout]\n this[kServerName] = null\n this[kLocalAddress] = localAddress != null ? localAddress : null\n this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming\n this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming\n this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\\r\\n`\n this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3\n this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3\n this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength\n this[kMaxRedirections] = maxRedirections\n this[kMaxRequests] = maxRequestsPerClient\n this[kClosedResolve] = null\n this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1\n this[kHTTPConnVersion] = 'h1'\n\n // HTTP/2\n this[kHTTP2Session] = null\n this[kHTTP2SessionState] = !allowH2\n ? null\n : {\n // streams: null, // Fixed queue of streams - For future support of `push`\n openStreams: 0, // Keep track of them to decide wether or not unref the session\n maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server\n }\n this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}`\n\n // kQueue is built up of 3 sections separated by\n // the kRunningIdx and kPendingIdx indices.\n // | complete | running | pending |\n // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length\n // kRunningIdx points to the first running element.\n // kPendingIdx points to the first pending element.\n // This implements a fast queue with an amortized\n // time of O(1).\n\n this[kQueue] = []\n this[kRunningIdx] = 0\n this[kPendingIdx] = 0\n }\n\n get pipelining () {\n return this[kPipelining]\n }\n\n set pipelining (value) {\n this[kPipelining] = value\n resume(this, true)\n }\n\n get [kPending] () {\n return this[kQueue].length - this[kPendingIdx]\n }\n\n get [kRunning] () {\n return this[kPendingIdx] - this[kRunningIdx]\n }\n\n get [kSize] () {\n return this[kQueue].length - this[kRunningIdx]\n }\n\n get [kConnected] () {\n return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed\n }\n\n get [kBusy] () {\n const socket = this[kSocket]\n return (\n (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) ||\n (this[kSize] >= (this[kPipelining] || 1)) ||\n this[kPending] > 0\n )\n }\n\n /* istanbul ignore: only used for test */\n [kConnect] (cb) {\n connect(this)\n this.once('connect', cb)\n }\n\n [kDispatch] (opts, handler) {\n const origin = opts.origin || this[kUrl].origin\n\n const request = this[kHTTPConnVersion] === 'h2'\n ? Request[kHTTP2BuildRequest](origin, opts, handler)\n : Request[kHTTP1BuildRequest](origin, opts, handler)\n\n this[kQueue].push(request)\n if (this[kResuming]) {\n // Do nothing.\n } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {\n // Wait a tick in case stream/iterator is ended in the same tick.\n this[kResuming] = 1\n process.nextTick(resume, this)\n } else {\n resume(this, true)\n }\n\n if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) {\n this[kNeedDrain] = 2\n }\n\n return this[kNeedDrain] < 2\n }\n\n async [kClose] () {\n // TODO: for H2 we need to gracefully flush the remaining enqueued\n // request and close each stream.\n return new Promise((resolve) => {\n if (!this[kSize]) {\n resolve(null)\n } else {\n this[kClosedResolve] = resolve\n }\n })\n }\n\n async [kDestroy] (err) {\n return new Promise((resolve) => {\n const requests = this[kQueue].splice(this[kPendingIdx])\n for (let i = 0; i < requests.length; i++) {\n const request = requests[i]\n errorRequest(this, request, err)\n }\n\n const callback = () => {\n if (this[kClosedResolve]) {\n // TODO (fix): Should we error here with ClientDestroyedError?\n this[kClosedResolve]()\n this[kClosedResolve] = null\n }\n resolve()\n }\n\n if (this[kHTTP2Session] != null) {\n util.destroy(this[kHTTP2Session], err)\n this[kHTTP2Session] = null\n this[kHTTP2SessionState] = null\n }\n\n if (!this[kSocket]) {\n queueMicrotask(callback)\n } else {\n util.destroy(this[kSocket].on('close', callback), err)\n }\n\n resume(this)\n })\n }\n}\n\nfunction onHttp2SessionError (err) {\n assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')\n\n this[kSocket][kError] = err\n\n onError(this[kClient], err)\n}\n\nfunction onHttp2FrameError (type, code, id) {\n const err = new InformationalError(`HTTP/2: \"frameError\" received - type ${type}, code ${code}`)\n\n if (id === 0) {\n this[kSocket][kError] = err\n onError(this[kClient], err)\n }\n}\n\nfunction onHttp2SessionEnd () {\n util.destroy(this, new SocketError('other side closed'))\n util.destroy(this[kSocket], new SocketError('other side closed'))\n}\n\nfunction onHTTP2GoAway (code) {\n const client = this[kClient]\n const err = new InformationalError(`HTTP/2: \"GOAWAY\" frame received with code ${code}`)\n client[kSocket] = null\n client[kHTTP2Session] = null\n\n if (client.destroyed) {\n assert(this[kPending] === 0)\n\n // Fail entire queue.\n const requests = client[kQueue].splice(client[kRunningIdx])\n for (let i = 0; i < requests.length; i++) {\n const request = requests[i]\n errorRequest(this, request, err)\n }\n } else if (client[kRunning] > 0) {\n // Fail head of pipeline.\n const request = client[kQueue][client[kRunningIdx]]\n client[kQueue][client[kRunningIdx]++] = null\n\n errorRequest(client, request, err)\n }\n\n client[kPendingIdx] = client[kRunningIdx]\n\n assert(client[kRunning] === 0)\n\n client.emit('disconnect',\n client[kUrl],\n [client],\n err\n )\n\n resume(client)\n}\n\nconst constants = require('./llhttp/constants')\nconst createRedirectInterceptor = require('./interceptor/redirectInterceptor')\nconst EMPTY_BUF = Buffer.alloc(0)\n\nasync function lazyllhttp () {\n const llhttpWasmData = process.env.JEST_WORKER_ID ? require('./llhttp/llhttp-wasm.js') : undefined\n\n let mod\n try {\n mod = await WebAssembly.compile(Buffer.from(require('./llhttp/llhttp_simd-wasm.js'), 'base64'))\n } catch (e) {\n /* istanbul ignore next */\n\n // We could check if the error was caused by the simd option not\n // being enabled, but the occurring of this other error\n // * https://github.com/emscripten-core/emscripten/issues/11495\n // got me to remove that check to avoid breaking Node 12.\n mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || require('./llhttp/llhttp-wasm.js'), 'base64'))\n }\n\n return await WebAssembly.instantiate(mod, {\n env: {\n /* eslint-disable camelcase */\n\n wasm_on_url: (p, at, len) => {\n /* istanbul ignore next */\n return 0\n },\n wasm_on_status: (p, at, len) => {\n assert.strictEqual(currentParser.ptr, p)\n const start = at - currentBufferPtr + currentBufferRef.byteOffset\n return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0\n },\n wasm_on_message_begin: (p) => {\n assert.strictEqual(currentParser.ptr, p)\n return currentParser.onMessageBegin() || 0\n },\n wasm_on_header_field: (p, at, len) => {\n assert.strictEqual(currentParser.ptr, p)\n const start = at - currentBufferPtr + currentBufferRef.byteOffset\n return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0\n },\n wasm_on_header_value: (p, at, len) => {\n assert.strictEqual(currentParser.ptr, p)\n const start = at - currentBufferPtr + currentBufferRef.byteOffset\n return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0\n },\n wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => {\n assert.strictEqual(currentParser.ptr, p)\n return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0\n },\n wasm_on_body: (p, at, len) => {\n assert.strictEqual(currentParser.ptr, p)\n const start = at - currentBufferPtr + currentBufferRef.byteOffset\n return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0\n },\n wasm_on_message_complete: (p) => {\n assert.strictEqual(currentParser.ptr, p)\n return currentParser.onMessageComplete() || 0\n }\n\n /* eslint-enable camelcase */\n }\n })\n}\n\nlet llhttpInstance = null\nlet llhttpPromise = lazyllhttp()\nllhttpPromise.catch()\n\nlet currentParser = null\nlet currentBufferRef = null\nlet currentBufferSize = 0\nlet currentBufferPtr = null\n\nconst TIMEOUT_HEADERS = 1\nconst TIMEOUT_BODY = 2\nconst TIMEOUT_IDLE = 3\n\nclass Parser {\n constructor (client, socket, { exports }) {\n assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0)\n\n this.llhttp = exports\n this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE)\n this.client = client\n this.socket = socket\n this.timeout = null\n this.timeoutValue = null\n this.timeoutType = null\n this.statusCode = null\n this.statusText = ''\n this.upgrade = false\n this.headers = []\n this.headersSize = 0\n this.headersMaxSize = client[kMaxHeadersSize]\n this.shouldKeepAlive = false\n this.paused = false\n this.resume = this.resume.bind(this)\n\n this.bytesRead = 0\n\n this.keepAlive = ''\n this.contentLength = ''\n this.connection = ''\n this.maxResponseSize = client[kMaxResponseSize]\n }\n\n setTimeout (value, type) {\n this.timeoutType = type\n if (value !== this.timeoutValue) {\n timers.clearTimeout(this.timeout)\n if (value) {\n this.timeout = timers.setTimeout(onParserTimeout, value, this)\n // istanbul ignore else: only for jest\n if (this.timeout.unref) {\n this.timeout.unref()\n }\n } else {\n this.timeout = null\n }\n this.timeoutValue = value\n } else if (this.timeout) {\n // istanbul ignore else: only for jest\n if (this.timeout.refresh) {\n this.timeout.refresh()\n }\n }\n }\n\n resume () {\n if (this.socket.destroyed || !this.paused) {\n return\n }\n\n assert(this.ptr != null)\n assert(currentParser == null)\n\n this.llhttp.llhttp_resume(this.ptr)\n\n assert(this.timeoutType === TIMEOUT_BODY)\n if (this.timeout) {\n // istanbul ignore else: only for jest\n if (this.timeout.refresh) {\n this.timeout.refresh()\n }\n }\n\n this.paused = false\n this.execute(this.socket.read() || EMPTY_BUF) // Flush parser.\n this.readMore()\n }\n\n readMore () {\n while (!this.paused && this.ptr) {\n const chunk = this.socket.read()\n if (chunk === null) {\n break\n }\n this.execute(chunk)\n }\n }\n\n execute (data) {\n assert(this.ptr != null)\n assert(currentParser == null)\n assert(!this.paused)\n\n const { socket, llhttp } = this\n\n if (data.length > currentBufferSize) {\n if (currentBufferPtr) {\n llhttp.free(currentBufferPtr)\n }\n currentBufferSize = Math.ceil(data.length / 4096) * 4096\n currentBufferPtr = llhttp.malloc(currentBufferSize)\n }\n\n new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data)\n\n // Call `execute` on the wasm parser.\n // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data,\n // and finally the length of bytes to parse.\n // The return value is an error code or `constants.ERROR.OK`.\n try {\n let ret\n\n try {\n currentBufferRef = data\n currentParser = this\n ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length)\n /* eslint-disable-next-line no-useless-catch */\n } catch (err) {\n /* istanbul ignore next: difficult to make a test case for */\n throw err\n } finally {\n currentParser = null\n currentBufferRef = null\n }\n\n const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr\n\n if (ret === constants.ERROR.PAUSED_UPGRADE) {\n this.onUpgrade(data.slice(offset))\n } else if (ret === constants.ERROR.PAUSED) {\n this.paused = true\n socket.unshift(data.slice(offset))\n } else if (ret !== constants.ERROR.OK) {\n const ptr = llhttp.llhttp_get_error_reason(this.ptr)\n let message = ''\n /* istanbul ignore else: difficult to make a test case for */\n if (ptr) {\n const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0)\n message =\n 'Response does not match the HTTP/1.1 protocol (' +\n Buffer.from(llhttp.memory.buffer, ptr, len).toString() +\n ')'\n }\n throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset))\n }\n } catch (err) {\n util.destroy(socket, err)\n }\n }\n\n destroy () {\n assert(this.ptr != null)\n assert(currentParser == null)\n\n this.llhttp.llhttp_free(this.ptr)\n this.ptr = null\n\n timers.clearTimeout(this.timeout)\n this.timeout = null\n this.timeoutValue = null\n this.timeoutType = null\n\n this.paused = false\n }\n\n onStatus (buf) {\n this.statusText = buf.toString()\n }\n\n onMessageBegin () {\n const { socket, client } = this\n\n /* istanbul ignore next: difficult to make a test case for */\n if (socket.destroyed) {\n return -1\n }\n\n const request = client[kQueue][client[kRunningIdx]]\n if (!request) {\n return -1\n }\n }\n\n onHeaderField (buf) {\n const len = this.headers.length\n\n if ((len & 1) === 0) {\n this.headers.push(buf)\n } else {\n this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf])\n }\n\n this.trackHeader(buf.length)\n }\n\n onHeaderValue (buf) {\n let len = this.headers.length\n\n if ((len & 1) === 1) {\n this.headers.push(buf)\n len += 1\n } else {\n this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf])\n }\n\n const key = this.headers[len - 2]\n if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') {\n this.keepAlive += buf.toString()\n } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') {\n this.connection += buf.toString()\n } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') {\n this.contentLength += buf.toString()\n }\n\n this.trackHeader(buf.length)\n }\n\n trackHeader (len) {\n this.headersSize += len\n if (this.headersSize >= this.headersMaxSize) {\n util.destroy(this.socket, new HeadersOverflowError())\n }\n }\n\n onUpgrade (head) {\n const { upgrade, client, socket, headers, statusCode } = this\n\n assert(upgrade)\n\n const request = client[kQueue][client[kRunningIdx]]\n assert(request)\n\n assert(!socket.destroyed)\n assert(socket === client[kSocket])\n assert(!this.paused)\n assert(request.upgrade || request.method === 'CONNECT')\n\n this.statusCode = null\n this.statusText = ''\n this.shouldKeepAlive = null\n\n assert(this.headers.length % 2 === 0)\n this.headers = []\n this.headersSize = 0\n\n socket.unshift(head)\n\n socket[kParser].destroy()\n socket[kParser] = null\n\n socket[kClient] = null\n socket[kError] = null\n socket\n .removeListener('error', onSocketError)\n .removeListener('readable', onSocketReadable)\n .removeListener('end', onSocketEnd)\n .removeListener('close', onSocketClose)\n\n client[kSocket] = null\n client[kQueue][client[kRunningIdx]++] = null\n client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade'))\n\n try {\n request.onUpgrade(statusCode, headers, socket)\n } catch (err) {\n util.destroy(socket, err)\n }\n\n resume(client)\n }\n\n onHeadersComplete (statusCode, upgrade, shouldKeepAlive) {\n const { client, socket, headers, statusText } = this\n\n /* istanbul ignore next: difficult to make a test case for */\n if (socket.destroyed) {\n return -1\n }\n\n const request = client[kQueue][client[kRunningIdx]]\n\n /* istanbul ignore next: difficult to make a test case for */\n if (!request) {\n return -1\n }\n\n assert(!this.upgrade)\n assert(this.statusCode < 200)\n\n if (statusCode === 100) {\n util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket)))\n return -1\n }\n\n /* this can only happen if server is misbehaving */\n if (upgrade && !request.upgrade) {\n util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket)))\n return -1\n }\n\n assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS)\n\n this.statusCode = statusCode\n this.shouldKeepAlive = (\n shouldKeepAlive ||\n // Override llhttp value which does not allow keepAlive for HEAD.\n (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive')\n )\n\n if (this.statusCode >= 200) {\n const bodyTimeout = request.bodyTimeout != null\n ? request.bodyTimeout\n : client[kBodyTimeout]\n this.setTimeout(bodyTimeout, TIMEOUT_BODY)\n } else if (this.timeout) {\n // istanbul ignore else: only for jest\n if (this.timeout.refresh) {\n this.timeout.refresh()\n }\n }\n\n if (request.method === 'CONNECT') {\n assert(client[kRunning] === 1)\n this.upgrade = true\n return 2\n }\n\n if (upgrade) {\n assert(client[kRunning] === 1)\n this.upgrade = true\n return 2\n }\n\n assert(this.headers.length % 2 === 0)\n this.headers = []\n this.headersSize = 0\n\n if (this.shouldKeepAlive && client[kPipelining]) {\n const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null\n\n if (keepAliveTimeout != null) {\n const timeout = Math.min(\n keepAliveTimeout - client[kKeepAliveTimeoutThreshold],\n client[kKeepAliveMaxTimeout]\n )\n if (timeout <= 0) {\n socket[kReset] = true\n } else {\n client[kKeepAliveTimeoutValue] = timeout\n }\n } else {\n client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout]\n }\n } else {\n // Stop more requests from being dispatched.\n socket[kReset] = true\n }\n\n const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false\n\n if (request.aborted) {\n return -1\n }\n\n if (request.method === 'HEAD') {\n return 1\n }\n\n if (statusCode < 200) {\n return 1\n }\n\n if (socket[kBlocking]) {\n socket[kBlocking] = false\n resume(client)\n }\n\n return pause ? constants.ERROR.PAUSED : 0\n }\n\n onBody (buf) {\n const { client, socket, statusCode, maxResponseSize } = this\n\n if (socket.destroyed) {\n return -1\n }\n\n const request = client[kQueue][client[kRunningIdx]]\n assert(request)\n\n assert.strictEqual(this.timeoutType, TIMEOUT_BODY)\n if (this.timeout) {\n // istanbul ignore else: only for jest\n if (this.timeout.refresh) {\n this.timeout.refresh()\n }\n }\n\n assert(statusCode >= 200)\n\n if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) {\n util.destroy(socket, new ResponseExceededMaxSizeError())\n return -1\n }\n\n this.bytesRead += buf.length\n\n if (request.onData(buf) === false) {\n return constants.ERROR.PAUSED\n }\n }\n\n onMessageComplete () {\n const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this\n\n if (socket.destroyed && (!statusCode || shouldKeepAlive)) {\n return -1\n }\n\n if (upgrade) {\n return\n }\n\n const request = client[kQueue][client[kRunningIdx]]\n assert(request)\n\n assert(statusCode >= 100)\n\n this.statusCode = null\n this.statusText = ''\n this.bytesRead = 0\n this.contentLength = ''\n this.keepAlive = ''\n this.connection = ''\n\n assert(this.headers.length % 2 === 0)\n this.headers = []\n this.headersSize = 0\n\n if (statusCode < 200) {\n return\n }\n\n /* istanbul ignore next: should be handled by llhttp? */\n if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) {\n util.destroy(socket, new ResponseContentLengthMismatchError())\n return -1\n }\n\n request.onComplete(headers)\n\n client[kQueue][client[kRunningIdx]++] = null\n\n if (socket[kWriting]) {\n assert.strictEqual(client[kRunning], 0)\n // Response completed before request.\n util.destroy(socket, new InformationalError('reset'))\n return constants.ERROR.PAUSED\n } else if (!shouldKeepAlive) {\n util.destroy(socket, new InformationalError('reset'))\n return constants.ERROR.PAUSED\n } else if (socket[kReset] && client[kRunning] === 0) {\n // Destroy socket once all requests have completed.\n // The request at the tail of the pipeline is the one\n // that requested reset and no further requests should\n // have been queued since then.\n util.destroy(socket, new InformationalError('reset'))\n return constants.ERROR.PAUSED\n } else if (client[kPipelining] === 1) {\n // We must wait a full event loop cycle to reuse this socket to make sure\n // that non-spec compliant servers are not closing the connection even if they\n // said they won't.\n setImmediate(resume, client)\n } else {\n resume(client)\n }\n }\n}\n\nfunction onParserTimeout (parser) {\n const { socket, timeoutType, client } = parser\n\n /* istanbul ignore else */\n if (timeoutType === TIMEOUT_HEADERS) {\n if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) {\n assert(!parser.paused, 'cannot be paused while waiting for headers')\n util.destroy(socket, new HeadersTimeoutError())\n }\n } else if (timeoutType === TIMEOUT_BODY) {\n if (!parser.paused) {\n util.destroy(socket, new BodyTimeoutError())\n }\n } else if (timeoutType === TIMEOUT_IDLE) {\n assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue])\n util.destroy(socket, new InformationalError('socket idle timeout'))\n }\n}\n\nfunction onSocketReadable () {\n const { [kParser]: parser } = this\n if (parser) {\n parser.readMore()\n }\n}\n\nfunction onSocketError (err) {\n const { [kClient]: client, [kParser]: parser } = this\n\n assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')\n\n if (client[kHTTPConnVersion] !== 'h2') {\n // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded\n // to the user.\n if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) {\n // We treat all incoming data so for as a valid response.\n parser.onMessageComplete()\n return\n }\n }\n\n this[kError] = err\n\n onError(this[kClient], err)\n}\n\nfunction onError (client, err) {\n if (\n client[kRunning] === 0 &&\n err.code !== 'UND_ERR_INFO' &&\n err.code !== 'UND_ERR_SOCKET'\n ) {\n // Error is not caused by running request and not a recoverable\n // socket error.\n\n assert(client[kPendingIdx] === client[kRunningIdx])\n\n const requests = client[kQueue].splice(client[kRunningIdx])\n for (let i = 0; i < requests.length; i++) {\n const request = requests[i]\n errorRequest(client, request, err)\n }\n assert(client[kSize] === 0)\n }\n}\n\nfunction onSocketEnd () {\n const { [kParser]: parser, [kClient]: client } = this\n\n if (client[kHTTPConnVersion] !== 'h2') {\n if (parser.statusCode && !parser.shouldKeepAlive) {\n // We treat all incoming data so far as a valid response.\n parser.onMessageComplete()\n return\n }\n }\n\n util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))\n}\n\nfunction onSocketClose () {\n const { [kClient]: client, [kParser]: parser } = this\n\n if (client[kHTTPConnVersion] === 'h1' && parser) {\n if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) {\n // We treat all incoming data so far as a valid response.\n parser.onMessageComplete()\n }\n\n this[kParser].destroy()\n this[kParser] = null\n }\n\n const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))\n\n client[kSocket] = null\n\n if (client.destroyed) {\n assert(client[kPending] === 0)\n\n // Fail entire queue.\n const requests = client[kQueue].splice(client[kRunningIdx])\n for (let i = 0; i < requests.length; i++) {\n const request = requests[i]\n errorRequest(client, request, err)\n }\n } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') {\n // Fail head of pipeline.\n const request = client[kQueue][client[kRunningIdx]]\n client[kQueue][client[kRunningIdx]++] = null\n\n errorRequest(client, request, err)\n }\n\n client[kPendingIdx] = client[kRunningIdx]\n\n assert(client[kRunning] === 0)\n\n client.emit('disconnect', client[kUrl], [client], err)\n\n resume(client)\n}\n\nasync function connect (client) {\n assert(!client[kConnecting])\n assert(!client[kSocket])\n\n let { host, hostname, protocol, port } = client[kUrl]\n\n // Resolve ipv6\n if (hostname[0] === '[') {\n const idx = hostname.indexOf(']')\n\n assert(idx !== -1)\n const ip = hostname.substring(1, idx)\n\n assert(net.isIP(ip))\n hostname = ip\n }\n\n client[kConnecting] = true\n\n if (channels.beforeConnect.hasSubscribers) {\n channels.beforeConnect.publish({\n connectParams: {\n host,\n hostname,\n protocol,\n port,\n servername: client[kServerName],\n localAddress: client[kLocalAddress]\n },\n connector: client[kConnector]\n })\n }\n\n try {\n const socket = await new Promise((resolve, reject) => {\n client[kConnector]({\n host,\n hostname,\n protocol,\n port,\n servername: client[kServerName],\n localAddress: client[kLocalAddress]\n }, (err, socket) => {\n if (err) {\n reject(err)\n } else {\n resolve(socket)\n }\n })\n })\n\n if (client.destroyed) {\n util.destroy(socket.on('error', () => {}), new ClientDestroyedError())\n return\n }\n\n client[kConnecting] = false\n\n assert(socket)\n\n const isH2 = socket.alpnProtocol === 'h2'\n if (isH2) {\n if (!h2ExperimentalWarned) {\n h2ExperimentalWarned = true\n process.emitWarning('H2 support is experimental, expect them to change at any time.', {\n code: 'UNDICI-H2'\n })\n }\n\n const session = http2.connect(client[kUrl], {\n createConnection: () => socket,\n peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams\n })\n\n client[kHTTPConnVersion] = 'h2'\n session[kClient] = client\n session[kSocket] = socket\n session.on('error', onHttp2SessionError)\n session.on('frameError', onHttp2FrameError)\n session.on('end', onHttp2SessionEnd)\n session.on('goaway', onHTTP2GoAway)\n session.on('close', onSocketClose)\n session.unref()\n\n client[kHTTP2Session] = session\n socket[kHTTP2Session] = session\n } else {\n if (!llhttpInstance) {\n llhttpInstance = await llhttpPromise\n llhttpPromise = null\n }\n\n socket[kNoRef] = false\n socket[kWriting] = false\n socket[kReset] = false\n socket[kBlocking] = false\n socket[kParser] = new Parser(client, socket, llhttpInstance)\n }\n\n socket[kCounter] = 0\n socket[kMaxRequests] = client[kMaxRequests]\n socket[kClient] = client\n socket[kError] = null\n\n socket\n .on('error', onSocketError)\n .on('readable', onSocketReadable)\n .on('end', onSocketEnd)\n .on('close', onSocketClose)\n\n client[kSocket] = socket\n\n if (channels.connected.hasSubscribers) {\n channels.connected.publish({\n connectParams: {\n host,\n hostname,\n protocol,\n port,\n servername: client[kServerName],\n localAddress: client[kLocalAddress]\n },\n connector: client[kConnector],\n socket\n })\n }\n client.emit('connect', client[kUrl], [client])\n } catch (err) {\n if (client.destroyed) {\n return\n }\n\n client[kConnecting] = false\n\n if (channels.connectError.hasSubscribers) {\n channels.connectError.publish({\n connectParams: {\n host,\n hostname,\n protocol,\n port,\n servername: client[kServerName],\n localAddress: client[kLocalAddress]\n },\n connector: client[kConnector],\n error: err\n })\n }\n\n if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {\n assert(client[kRunning] === 0)\n while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) {\n const request = client[kQueue][client[kPendingIdx]++]\n errorRequest(client, request, err)\n }\n } else {\n onError(client, err)\n }\n\n client.emit('connectionError', client[kUrl], [client], err)\n }\n\n resume(client)\n}\n\nfunction emitDrain (client) {\n client[kNeedDrain] = 0\n client.emit('drain', client[kUrl], [client])\n}\n\nfunction resume (client, sync) {\n if (client[kResuming] === 2) {\n return\n }\n\n client[kResuming] = 2\n\n _resume(client, sync)\n client[kResuming] = 0\n\n if (client[kRunningIdx] > 256) {\n client[kQueue].splice(0, client[kRunningIdx])\n client[kPendingIdx] -= client[kRunningIdx]\n client[kRunningIdx] = 0\n }\n}\n\nfunction _resume (client, sync) {\n while (true) {\n if (client.destroyed) {\n assert(client[kPending] === 0)\n return\n }\n\n if (client[kClosedResolve] && !client[kSize]) {\n client[kClosedResolve]()\n client[kClosedResolve] = null\n return\n }\n\n const socket = client[kSocket]\n\n if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') {\n if (client[kSize] === 0) {\n if (!socket[kNoRef] && socket.unref) {\n socket.unref()\n socket[kNoRef] = true\n }\n } else if (socket[kNoRef] && socket.ref) {\n socket.ref()\n socket[kNoRef] = false\n }\n\n if (client[kSize] === 0) {\n if (socket[kParser].timeoutType !== TIMEOUT_IDLE) {\n socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE)\n }\n } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) {\n if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) {\n const request = client[kQueue][client[kRunningIdx]]\n const headersTimeout = request.headersTimeout != null\n ? request.headersTimeout\n : client[kHeadersTimeout]\n socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS)\n }\n }\n }\n\n if (client[kBusy]) {\n client[kNeedDrain] = 2\n } else if (client[kNeedDrain] === 2) {\n if (sync) {\n client[kNeedDrain] = 1\n process.nextTick(emitDrain, client)\n } else {\n emitDrain(client)\n }\n continue\n }\n\n if (client[kPending] === 0) {\n return\n }\n\n if (client[kRunning] >= (client[kPipelining] || 1)) {\n return\n }\n\n const request = client[kQueue][client[kPendingIdx]]\n\n if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) {\n if (client[kRunning] > 0) {\n return\n }\n\n client[kServerName] = request.servername\n\n if (socket && socket.servername !== request.servername) {\n util.destroy(socket, new InformationalError('servername changed'))\n return\n }\n }\n\n if (client[kConnecting]) {\n return\n }\n\n if (!socket && !client[kHTTP2Session]) {\n connect(client)\n return\n }\n\n if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) {\n return\n }\n\n if (client[kRunning] > 0 && !request.idempotent) {\n // Non-idempotent request cannot be retried.\n // Ensure that no other requests are inflight and\n // could cause failure.\n return\n }\n\n if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) {\n // Don't dispatch an upgrade until all preceding requests have completed.\n // A misbehaving server might upgrade the connection before all pipelined\n // request has completed.\n return\n }\n\n if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 &&\n (util.isStream(request.body) || util.isAsyncIterable(request.body))) {\n // Request with stream or iterator body can error while other requests\n // are inflight and indirectly error those as well.\n // Ensure this doesn't happen by waiting for inflight\n // to complete before dispatching.\n\n // Request with stream or iterator body cannot be retried.\n // Ensure that no other requests are inflight and\n // could cause failure.\n return\n }\n\n if (!request.aborted && write(client, request)) {\n client[kPendingIdx]++\n } else {\n client[kQueue].splice(client[kPendingIdx], 1)\n }\n }\n}\n\n// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2\nfunction shouldSendContentLength (method) {\n return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'\n}\n\nfunction write (client, request) {\n if (client[kHTTPConnVersion] === 'h2') {\n writeH2(client, client[kHTTP2Session], request)\n return\n }\n\n const { body, method, path, host, upgrade, headers, blocking, reset } = request\n\n // https://tools.ietf.org/html/rfc7231#section-4.3.1\n // https://tools.ietf.org/html/rfc7231#section-4.3.2\n // https://tools.ietf.org/html/rfc7231#section-4.3.5\n\n // Sending a payload body on a request that does not\n // expect it can cause undefined behavior on some\n // servers and corrupt connection state. Do not\n // re-use the connection for further requests.\n\n const expectsPayload = (\n method === 'PUT' ||\n method === 'POST' ||\n method === 'PATCH'\n )\n\n if (body && typeof body.read === 'function') {\n // Try to read EOF in order to get length.\n body.read(0)\n }\n\n const bodyLength = util.bodyLength(body)\n\n let contentLength = bodyLength\n\n if (contentLength === null) {\n contentLength = request.contentLength\n }\n\n if (contentLength === 0 && !expectsPayload) {\n // https://tools.ietf.org/html/rfc7230#section-3.3.2\n // A user agent SHOULD NOT send a Content-Length header field when\n // the request message does not contain a payload body and the method\n // semantics do not anticipate such a body.\n\n contentLength = null\n }\n\n // https://github.com/nodejs/undici/issues/2046\n // A user agent may send a Content-Length header with 0 value, this should be allowed.\n if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) {\n if (client[kStrictContentLength]) {\n errorRequest(client, request, new RequestContentLengthMismatchError())\n return false\n }\n\n process.emitWarning(new RequestContentLengthMismatchError())\n }\n\n const socket = client[kSocket]\n\n try {\n request.onConnect((err) => {\n if (request.aborted || request.completed) {\n return\n }\n\n errorRequest(client, request, err || new RequestAbortedError())\n\n util.destroy(socket, new InformationalError('aborted'))\n })\n } catch (err) {\n errorRequest(client, request, err)\n }\n\n if (request.aborted) {\n return false\n }\n\n if (method === 'HEAD') {\n // https://github.com/mcollina/undici/issues/258\n // Close after a HEAD request to interop with misbehaving servers\n // that may send a body in the response.\n\n socket[kReset] = true\n }\n\n if (upgrade || method === 'CONNECT') {\n // On CONNECT or upgrade, block pipeline from dispatching further\n // requests on this connection.\n\n socket[kReset] = true\n }\n\n if (reset != null) {\n socket[kReset] = reset\n }\n\n if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) {\n socket[kReset] = true\n }\n\n if (blocking) {\n socket[kBlocking] = true\n }\n\n let header = `${method} ${path} HTTP/1.1\\r\\n`\n\n if (typeof host === 'string') {\n header += `host: ${host}\\r\\n`\n } else {\n header += client[kHostHeader]\n }\n\n if (upgrade) {\n header += `connection: upgrade\\r\\nupgrade: ${upgrade}\\r\\n`\n } else if (client[kPipelining] && !socket[kReset]) {\n header += 'connection: keep-alive\\r\\n'\n } else {\n header += 'connection: close\\r\\n'\n }\n\n if (headers) {\n header += headers\n }\n\n if (channels.sendHeaders.hasSubscribers) {\n channels.sendHeaders.publish({ request, headers: header, socket })\n }\n\n /* istanbul ignore else: assertion */\n if (!body || bodyLength === 0) {\n if (contentLength === 0) {\n socket.write(`${header}content-length: 0\\r\\n\\r\\n`, 'latin1')\n } else {\n assert(contentLength === null, 'no body must not have content length')\n socket.write(`${header}\\r\\n`, 'latin1')\n }\n request.onRequestSent()\n } else if (util.isBuffer(body)) {\n assert(contentLength === body.byteLength, 'buffer body must have content length')\n\n socket.cork()\n socket.write(`${header}content-length: ${contentLength}\\r\\n\\r\\n`, 'latin1')\n socket.write(body)\n socket.uncork()\n request.onBodySent(body)\n request.onRequestSent()\n if (!expectsPayload) {\n socket[kReset] = true\n }\n } else if (util.isBlobLike(body)) {\n if (typeof body.stream === 'function') {\n writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload })\n } else {\n writeBlob({ body, client, request, socket, contentLength, header, expectsPayload })\n }\n } else if (util.isStream(body)) {\n writeStream({ body, client, request, socket, contentLength, header, expectsPayload })\n } else if (util.isIterable(body)) {\n writeIterable({ body, client, request, socket, contentLength, header, expectsPayload })\n } else {\n assert(false)\n }\n\n return true\n}\n\nfunction writeH2 (client, session, request) {\n const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request\n\n let headers\n if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim())\n else headers = reqHeaders\n\n if (upgrade) {\n errorRequest(client, request, new Error('Upgrade not supported for H2'))\n return false\n }\n\n try {\n // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event?\n request.onConnect((err) => {\n if (request.aborted || request.completed) {\n return\n }\n\n errorRequest(client, request, err || new RequestAbortedError())\n })\n } catch (err) {\n errorRequest(client, request, err)\n }\n\n if (request.aborted) {\n return false\n }\n\n /** @type {import('node:http2').ClientHttp2Stream} */\n let stream\n const h2State = client[kHTTP2SessionState]\n\n headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost]\n headers[HTTP2_HEADER_METHOD] = method\n\n if (method === 'CONNECT') {\n session.ref()\n // we are already connected, streams are pending, first request\n // will create a new stream. We trigger a request to create the stream and wait until\n // `ready` event is triggered\n // We disabled endStream to allow the user to write to the stream\n stream = session.request(headers, { endStream: false, signal })\n\n if (stream.id && !stream.pending) {\n request.onUpgrade(null, null, stream)\n ++h2State.openStreams\n } else {\n stream.once('ready', () => {\n request.onUpgrade(null, null, stream)\n ++h2State.openStreams\n })\n }\n\n stream.once('close', () => {\n h2State.openStreams -= 1\n // TODO(HTTP/2): unref only if current streams count is 0\n if (h2State.openStreams === 0) session.unref()\n })\n\n return true\n }\n\n // https://tools.ietf.org/html/rfc7540#section-8.3\n // :path and :scheme headers must be omited when sending CONNECT\n\n headers[HTTP2_HEADER_PATH] = path\n headers[HTTP2_HEADER_SCHEME] = 'https'\n\n // https://tools.ietf.org/html/rfc7231#section-4.3.1\n // https://tools.ietf.org/html/rfc7231#section-4.3.2\n // https://tools.ietf.org/html/rfc7231#section-4.3.5\n\n // Sending a payload body on a request that does not\n // expect it can cause undefined behavior on some\n // servers and corrupt connection state. Do not\n // re-use the connection for further requests.\n\n const expectsPayload = (\n method === 'PUT' ||\n method === 'POST' ||\n method === 'PATCH'\n )\n\n if (body && typeof body.read === 'function') {\n // Try to read EOF in order to get length.\n body.read(0)\n }\n\n let contentLength = util.bodyLength(body)\n\n if (contentLength == null) {\n contentLength = request.contentLength\n }\n\n if (contentLength === 0 || !expectsPayload) {\n // https://tools.ietf.org/html/rfc7230#section-3.3.2\n // A user agent SHOULD NOT send a Content-Length header field when\n // the request message does not contain a payload body and the method\n // semantics do not anticipate such a body.\n\n contentLength = null\n }\n\n // https://github.com/nodejs/undici/issues/2046\n // A user agent may send a Content-Length header with 0 value, this should be allowed.\n if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) {\n if (client[kStrictContentLength]) {\n errorRequest(client, request, new RequestContentLengthMismatchError())\n return false\n }\n\n process.emitWarning(new RequestContentLengthMismatchError())\n }\n\n if (contentLength != null) {\n assert(body, 'no body must not have content length')\n headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`\n }\n\n session.ref()\n\n const shouldEndStream = method === 'GET' || method === 'HEAD'\n if (expectContinue) {\n headers[HTTP2_HEADER_EXPECT] = '100-continue'\n stream = session.request(headers, { endStream: shouldEndStream, signal })\n\n stream.once('continue', writeBodyH2)\n } else {\n stream = session.request(headers, {\n endStream: shouldEndStream,\n signal\n })\n writeBodyH2()\n }\n\n // Increment counter as we have new several streams open\n ++h2State.openStreams\n\n stream.once('response', headers => {\n const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers\n\n if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) {\n stream.pause()\n }\n })\n\n stream.once('end', () => {\n request.onComplete([])\n })\n\n stream.on('data', (chunk) => {\n if (request.onData(chunk) === false) {\n stream.pause()\n }\n })\n\n stream.once('close', () => {\n h2State.openStreams -= 1\n // TODO(HTTP/2): unref only if current streams count is 0\n if (h2State.openStreams === 0) {\n session.unref()\n }\n })\n\n stream.once('error', function (err) {\n if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {\n h2State.streams -= 1\n util.destroy(stream, err)\n }\n })\n\n stream.once('frameError', (type, code) => {\n const err = new InformationalError(`HTTP/2: \"frameError\" received - type ${type}, code ${code}`)\n errorRequest(client, request, err)\n\n if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {\n h2State.streams -= 1\n util.destroy(stream, err)\n }\n })\n\n // stream.on('aborted', () => {\n // // TODO(HTTP/2): Support aborted\n // })\n\n // stream.on('timeout', () => {\n // // TODO(HTTP/2): Support timeout\n // })\n\n // stream.on('push', headers => {\n // // TODO(HTTP/2): Suppor push\n // })\n\n // stream.on('trailers', headers => {\n // // TODO(HTTP/2): Support trailers\n // })\n\n return true\n\n function writeBodyH2 () {\n /* istanbul ignore else: assertion */\n if (!body) {\n request.onRequestSent()\n } else if (util.isBuffer(body)) {\n assert(contentLength === body.byteLength, 'buffer body must have content length')\n stream.cork()\n stream.write(body)\n stream.uncork()\n stream.end()\n request.onBodySent(body)\n request.onRequestSent()\n } else if (util.isBlobLike(body)) {\n if (typeof body.stream === 'function') {\n writeIterable({\n client,\n request,\n contentLength,\n h2stream: stream,\n expectsPayload,\n body: body.stream(),\n socket: client[kSocket],\n header: ''\n })\n } else {\n writeBlob({\n body,\n client,\n request,\n contentLength,\n expectsPayload,\n h2stream: stream,\n header: '',\n socket: client[kSocket]\n })\n }\n } else if (util.isStream(body)) {\n writeStream({\n body,\n client,\n request,\n contentLength,\n expectsPayload,\n socket: client[kSocket],\n h2stream: stream,\n header: ''\n })\n } else if (util.isIterable(body)) {\n writeIterable({\n body,\n client,\n request,\n contentLength,\n expectsPayload,\n header: '',\n h2stream: stream,\n socket: client[kSocket]\n })\n } else {\n assert(false)\n }\n }\n}\n\nfunction writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {\n assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')\n\n if (client[kHTTPConnVersion] === 'h2') {\n // For HTTP/2, is enough to pipe the stream\n const pipe = pipeline(\n body,\n h2stream,\n (err) => {\n if (err) {\n util.destroy(body, err)\n util.destroy(h2stream, err)\n } else {\n request.onRequestSent()\n }\n }\n )\n\n pipe.on('data', onPipeData)\n pipe.once('end', () => {\n pipe.removeListener('data', onPipeData)\n util.destroy(pipe)\n })\n\n function onPipeData (chunk) {\n request.onBodySent(chunk)\n }\n\n return\n }\n\n let finished = false\n\n const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })\n\n const onData = function (chunk) {\n if (finished) {\n return\n }\n\n try {\n if (!writer.write(chunk) && this.pause) {\n this.pause()\n }\n } catch (err) {\n util.destroy(this, err)\n }\n }\n const onDrain = function () {\n if (finished) {\n return\n }\n\n if (body.resume) {\n body.resume()\n }\n }\n const onAbort = function () {\n if (finished) {\n return\n }\n const err = new RequestAbortedError()\n queueMicrotask(() => onFinished(err))\n }\n const onFinished = function (err) {\n if (finished) {\n return\n }\n\n finished = true\n\n assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1))\n\n socket\n .off('drain', onDrain)\n .off('error', onFinished)\n\n body\n .removeListener('data', onData)\n .removeListener('end', onFinished)\n .removeListener('error', onFinished)\n .removeListener('close', onAbort)\n\n if (!err) {\n try {\n writer.end()\n } catch (er) {\n err = er\n }\n }\n\n writer.destroy(err)\n\n if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) {\n util.destroy(body, err)\n } else {\n util.destroy(body)\n }\n }\n\n body\n .on('data', onData)\n .on('end', onFinished)\n .on('error', onFinished)\n .on('close', onAbort)\n\n if (body.resume) {\n body.resume()\n }\n\n socket\n .on('drain', onDrain)\n .on('error', onFinished)\n}\n\nasync function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {\n assert(contentLength === body.size, 'blob body must have content length')\n\n const isH2 = client[kHTTPConnVersion] === 'h2'\n try {\n if (contentLength != null && contentLength !== body.size) {\n throw new RequestContentLengthMismatchError()\n }\n\n const buffer = Buffer.from(await body.arrayBuffer())\n\n if (isH2) {\n h2stream.cork()\n h2stream.write(buffer)\n h2stream.uncork()\n } else {\n socket.cork()\n socket.write(`${header}content-length: ${contentLength}\\r\\n\\r\\n`, 'latin1')\n socket.write(buffer)\n socket.uncork()\n }\n\n request.onBodySent(buffer)\n request.onRequestSent()\n\n if (!expectsPayload) {\n socket[kReset] = true\n }\n\n resume(client)\n } catch (err) {\n util.destroy(isH2 ? h2stream : socket, err)\n }\n}\n\nasync function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {\n assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')\n\n let callback = null\n function onDrain () {\n if (callback) {\n const cb = callback\n callback = null\n cb()\n }\n }\n\n const waitForDrain = () => new Promise((resolve, reject) => {\n assert(callback === null)\n\n if (socket[kError]) {\n reject(socket[kError])\n } else {\n callback = resolve\n }\n })\n\n if (client[kHTTPConnVersion] === 'h2') {\n h2stream\n .on('close', onDrain)\n .on('drain', onDrain)\n\n try {\n // It's up to the user to somehow abort the async iterable.\n for await (const chunk of body) {\n if (socket[kError]) {\n throw socket[kError]\n }\n\n const res = h2stream.write(chunk)\n request.onBodySent(chunk)\n if (!res) {\n await waitForDrain()\n }\n }\n } catch (err) {\n h2stream.destroy(err)\n } finally {\n request.onRequestSent()\n h2stream.end()\n h2stream\n .off('close', onDrain)\n .off('drain', onDrain)\n }\n\n return\n }\n\n socket\n .on('close', onDrain)\n .on('drain', onDrain)\n\n const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })\n try {\n // It's up to the user to somehow abort the async iterable.\n for await (const chunk of body) {\n if (socket[kError]) {\n throw socket[kError]\n }\n\n if (!writer.write(chunk)) {\n await waitForDrain()\n }\n }\n\n writer.end()\n } catch (err) {\n writer.destroy(err)\n } finally {\n socket\n .off('close', onDrain)\n .off('drain', onDrain)\n }\n}\n\nclass AsyncWriter {\n constructor ({ socket, request, contentLength, client, expectsPayload, header }) {\n this.socket = socket\n this.request = request\n this.contentLength = contentLength\n this.client = client\n this.bytesWritten = 0\n this.expectsPayload = expectsPayload\n this.header = header\n\n socket[kWriting] = true\n }\n\n write (chunk) {\n const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this\n\n if (socket[kError]) {\n throw socket[kError]\n }\n\n if (socket.destroyed) {\n return false\n }\n\n const len = Buffer.byteLength(chunk)\n if (!len) {\n return true\n }\n\n // We should defer writing chunks.\n if (contentLength !== null && bytesWritten + len > contentLength) {\n if (client[kStrictContentLength]) {\n throw new RequestContentLengthMismatchError()\n }\n\n process.emitWarning(new RequestContentLengthMismatchError())\n }\n\n socket.cork()\n\n if (bytesWritten === 0) {\n if (!expectsPayload) {\n socket[kReset] = true\n }\n\n if (contentLength === null) {\n socket.write(`${header}transfer-encoding: chunked\\r\\n`, 'latin1')\n } else {\n socket.write(`${header}content-length: ${contentLength}\\r\\n\\r\\n`, 'latin1')\n }\n }\n\n if (contentLength === null) {\n socket.write(`\\r\\n${len.toString(16)}\\r\\n`, 'latin1')\n }\n\n this.bytesWritten += len\n\n const ret = socket.write(chunk)\n\n socket.uncork()\n\n request.onBodySent(chunk)\n\n if (!ret) {\n if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) {\n // istanbul ignore else: only for jest\n if (socket[kParser].timeout.refresh) {\n socket[kParser].timeout.refresh()\n }\n }\n }\n\n return ret\n }\n\n end () {\n const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this\n request.onRequestSent()\n\n socket[kWriting] = false\n\n if (socket[kError]) {\n throw socket[kError]\n }\n\n if (socket.destroyed) {\n return\n }\n\n if (bytesWritten === 0) {\n if (expectsPayload) {\n // https://tools.ietf.org/html/rfc7230#section-3.3.2\n // A user agent SHOULD send a Content-Length in a request message when\n // no Transfer-Encoding is sent and the request method defines a meaning\n // for an enclosed payload body.\n\n socket.write(`${header}content-length: 0\\r\\n\\r\\n`, 'latin1')\n } else {\n socket.write(`${header}\\r\\n`, 'latin1')\n }\n } else if (contentLength === null) {\n socket.write('\\r\\n0\\r\\n\\r\\n', 'latin1')\n }\n\n if (contentLength !== null && bytesWritten !== contentLength) {\n if (client[kStrictContentLength]) {\n throw new RequestContentLengthMismatchError()\n } else {\n process.emitWarning(new RequestContentLengthMismatchError())\n }\n }\n\n if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) {\n // istanbul ignore else: only for jest\n if (socket[kParser].timeout.refresh) {\n socket[kParser].timeout.refresh()\n }\n }\n\n resume(client)\n }\n\n destroy (err) {\n const { socket, client } = this\n\n socket[kWriting] = false\n\n if (err) {\n assert(client[kRunning] <= 1, 'pipeline should only contain this request')\n util.destroy(socket, err)\n }\n }\n}\n\nfunction errorRequest (client, request, err) {\n try {\n request.onError(err)\n assert(request.aborted)\n } catch (err) {\n client.emit('error', err)\n }\n}\n\nmodule.exports = Client\n","'use strict'\n\n/* istanbul ignore file: only for Node 12 */\n\nconst { kConnected, kSize } = require('../core/symbols')\n\nclass CompatWeakRef {\n constructor (value) {\n this.value = value\n }\n\n deref () {\n return this.value[kConnected] === 0 && this.value[kSize] === 0\n ? undefined\n : this.value\n }\n}\n\nclass CompatFinalizer {\n constructor (finalizer) {\n this.finalizer = finalizer\n }\n\n register (dispatcher, key) {\n if (dispatcher.on) {\n dispatcher.on('disconnect', () => {\n if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) {\n this.finalizer(key)\n }\n })\n }\n }\n}\n\nmodule.exports = function () {\n // FIXME: remove workaround when the Node bug is fixed\n // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308\n if (process.env.NODE_V8_COVERAGE) {\n return {\n WeakRef: CompatWeakRef,\n FinalizationRegistry: CompatFinalizer\n }\n }\n return {\n WeakRef: global.WeakRef || CompatWeakRef,\n FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer\n }\n}\n","'use strict'\n\n// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size\nconst maxAttributeValueSize = 1024\n\n// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size\nconst maxNameValuePairSize = 4096\n\nmodule.exports = {\n maxAttributeValueSize,\n maxNameValuePairSize\n}\n","'use strict'\n\nconst { parseSetCookie } = require('./parse')\nconst { stringify, getHeadersList } = require('./util')\nconst { webidl } = require('../fetch/webidl')\nconst { Headers } = require('../fetch/headers')\n\n/**\n * @typedef {Object} Cookie\n * @property {string} name\n * @property {string} value\n * @property {Date|number|undefined} expires\n * @property {number|undefined} maxAge\n * @property {string|undefined} domain\n * @property {string|undefined} path\n * @property {boolean|undefined} secure\n * @property {boolean|undefined} httpOnly\n * @property {'Strict'|'Lax'|'None'} sameSite\n * @property {string[]} unparsed\n */\n\n/**\n * @param {Headers} headers\n * @returns {Record}\n */\nfunction getCookies (headers) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' })\n\n webidl.brandCheck(headers, Headers, { strict: false })\n\n const cookie = headers.get('cookie')\n const out = {}\n\n if (!cookie) {\n return out\n }\n\n for (const piece of cookie.split(';')) {\n const [name, ...value] = piece.split('=')\n\n out[name.trim()] = value.join('=')\n }\n\n return out\n}\n\n/**\n * @param {Headers} headers\n * @param {string} name\n * @param {{ path?: string, domain?: string }|undefined} attributes\n * @returns {void}\n */\nfunction deleteCookie (headers, name, attributes) {\n webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' })\n\n webidl.brandCheck(headers, Headers, { strict: false })\n\n name = webidl.converters.DOMString(name)\n attributes = webidl.converters.DeleteCookieAttributes(attributes)\n\n // Matches behavior of\n // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278\n setCookie(headers, {\n name,\n value: '',\n expires: new Date(0),\n ...attributes\n })\n}\n\n/**\n * @param {Headers} headers\n * @returns {Cookie[]}\n */\nfunction getSetCookies (headers) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' })\n\n webidl.brandCheck(headers, Headers, { strict: false })\n\n const cookies = getHeadersList(headers).cookies\n\n if (!cookies) {\n return []\n }\n\n // In older versions of undici, cookies is a list of name:value.\n return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair))\n}\n\n/**\n * @param {Headers} headers\n * @param {Cookie} cookie\n * @returns {void}\n */\nfunction setCookie (headers, cookie) {\n webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' })\n\n webidl.brandCheck(headers, Headers, { strict: false })\n\n cookie = webidl.converters.Cookie(cookie)\n\n const str = stringify(cookie)\n\n if (str) {\n headers.append('Set-Cookie', stringify(cookie))\n }\n}\n\nwebidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([\n {\n converter: webidl.nullableConverter(webidl.converters.DOMString),\n key: 'path',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters.DOMString),\n key: 'domain',\n defaultValue: null\n }\n])\n\nwebidl.converters.Cookie = webidl.dictionaryConverter([\n {\n converter: webidl.converters.DOMString,\n key: 'name'\n },\n {\n converter: webidl.converters.DOMString,\n key: 'value'\n },\n {\n converter: webidl.nullableConverter((value) => {\n if (typeof value === 'number') {\n return webidl.converters['unsigned long long'](value)\n }\n\n return new Date(value)\n }),\n key: 'expires',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters['long long']),\n key: 'maxAge',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters.DOMString),\n key: 'domain',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters.DOMString),\n key: 'path',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters.boolean),\n key: 'secure',\n defaultValue: null\n },\n {\n converter: webidl.nullableConverter(webidl.converters.boolean),\n key: 'httpOnly',\n defaultValue: null\n },\n {\n converter: webidl.converters.USVString,\n key: 'sameSite',\n allowedValues: ['Strict', 'Lax', 'None']\n },\n {\n converter: webidl.sequenceConverter(webidl.converters.DOMString),\n key: 'unparsed',\n defaultValue: []\n }\n])\n\nmodule.exports = {\n getCookies,\n deleteCookie,\n getSetCookies,\n setCookie\n}\n","'use strict'\n\nconst { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')\nconst { isCTLExcludingHtab } = require('./util')\nconst { collectASequenceOfCodePointsFast } = require('../fetch/dataURL')\nconst assert = require('assert')\n\n/**\n * @description Parses the field-value attributes of a set-cookie header string.\n * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4\n * @param {string} header\n * @returns if the header is invalid, null will be returned\n */\nfunction parseSetCookie (header) {\n // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F\n // character (CTL characters excluding HTAB): Abort these steps and\n // ignore the set-cookie-string entirely.\n if (isCTLExcludingHtab(header)) {\n return null\n }\n\n let nameValuePair = ''\n let unparsedAttributes = ''\n let name = ''\n let value = ''\n\n // 2. If the set-cookie-string contains a %x3B (\";\") character:\n if (header.includes(';')) {\n // 1. The name-value-pair string consists of the characters up to,\n // but not including, the first %x3B (\";\"), and the unparsed-\n // attributes consist of the remainder of the set-cookie-string\n // (including the %x3B (\";\") in question).\n const position = { position: 0 }\n\n nameValuePair = collectASequenceOfCodePointsFast(';', header, position)\n unparsedAttributes = header.slice(position.position)\n } else {\n // Otherwise:\n\n // 1. The name-value-pair string consists of all the characters\n // contained in the set-cookie-string, and the unparsed-\n // attributes is the empty string.\n nameValuePair = header\n }\n\n // 3. If the name-value-pair string lacks a %x3D (\"=\") character, then\n // the name string is empty, and the value string is the value of\n // name-value-pair.\n if (!nameValuePair.includes('=')) {\n value = nameValuePair\n } else {\n // Otherwise, the name string consists of the characters up to, but\n // not including, the first %x3D (\"=\") character, and the (possibly\n // empty) value string consists of the characters after the first\n // %x3D (\"=\") character.\n const position = { position: 0 }\n name = collectASequenceOfCodePointsFast(\n '=',\n nameValuePair,\n position\n )\n value = nameValuePair.slice(position.position + 1)\n }\n\n // 4. Remove any leading or trailing WSP characters from the name\n // string and the value string.\n name = name.trim()\n value = value.trim()\n\n // 5. If the sum of the lengths of the name string and the value string\n // is more than 4096 octets, abort these steps and ignore the set-\n // cookie-string entirely.\n if (name.length + value.length > maxNameValuePairSize) {\n return null\n }\n\n // 6. The cookie-name is the name string, and the cookie-value is the\n // value string.\n return {\n name, value, ...parseUnparsedAttributes(unparsedAttributes)\n }\n}\n\n/**\n * Parses the remaining attributes of a set-cookie header\n * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4\n * @param {string} unparsedAttributes\n * @param {[Object.]={}} cookieAttributeList\n */\nfunction parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) {\n // 1. If the unparsed-attributes string is empty, skip the rest of\n // these steps.\n if (unparsedAttributes.length === 0) {\n return cookieAttributeList\n }\n\n // 2. Discard the first character of the unparsed-attributes (which\n // will be a %x3B (\";\") character).\n assert(unparsedAttributes[0] === ';')\n unparsedAttributes = unparsedAttributes.slice(1)\n\n let cookieAv = ''\n\n // 3. If the remaining unparsed-attributes contains a %x3B (\";\")\n // character:\n if (unparsedAttributes.includes(';')) {\n // 1. Consume the characters of the unparsed-attributes up to, but\n // not including, the first %x3B (\";\") character.\n cookieAv = collectASequenceOfCodePointsFast(\n ';',\n unparsedAttributes,\n { position: 0 }\n )\n unparsedAttributes = unparsedAttributes.slice(cookieAv.length)\n } else {\n // Otherwise:\n\n // 1. Consume the remainder of the unparsed-attributes.\n cookieAv = unparsedAttributes\n unparsedAttributes = ''\n }\n\n // Let the cookie-av string be the characters consumed in this step.\n\n let attributeName = ''\n let attributeValue = ''\n\n // 4. If the cookie-av string contains a %x3D (\"=\") character:\n if (cookieAv.includes('=')) {\n // 1. The (possibly empty) attribute-name string consists of the\n // characters up to, but not including, the first %x3D (\"=\")\n // character, and the (possibly empty) attribute-value string\n // consists of the characters after the first %x3D (\"=\")\n // character.\n const position = { position: 0 }\n\n attributeName = collectASequenceOfCodePointsFast(\n '=',\n cookieAv,\n position\n )\n attributeValue = cookieAv.slice(position.position + 1)\n } else {\n // Otherwise:\n\n // 1. The attribute-name string consists of the entire cookie-av\n // string, and the attribute-value string is empty.\n attributeName = cookieAv\n }\n\n // 5. Remove any leading or trailing WSP characters from the attribute-\n // name string and the attribute-value string.\n attributeName = attributeName.trim()\n attributeValue = attributeValue.trim()\n\n // 6. If the attribute-value is longer than 1024 octets, ignore the\n // cookie-av string and return to Step 1 of this algorithm.\n if (attributeValue.length > maxAttributeValueSize) {\n return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)\n }\n\n // 7. Process the attribute-name and attribute-value according to the\n // requirements in the following subsections. (Notice that\n // attributes with unrecognized attribute-names are ignored.)\n const attributeNameLowercase = attributeName.toLowerCase()\n\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1\n // If the attribute-name case-insensitively matches the string\n // \"Expires\", the user agent MUST process the cookie-av as follows.\n if (attributeNameLowercase === 'expires') {\n // 1. Let the expiry-time be the result of parsing the attribute-value\n // as cookie-date (see Section 5.1.1).\n const expiryTime = new Date(attributeValue)\n\n // 2. If the attribute-value failed to parse as a cookie date, ignore\n // the cookie-av.\n\n cookieAttributeList.expires = expiryTime\n } else if (attributeNameLowercase === 'max-age') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2\n // If the attribute-name case-insensitively matches the string \"Max-\n // Age\", the user agent MUST process the cookie-av as follows.\n\n // 1. If the first character of the attribute-value is not a DIGIT or a\n // \"-\" character, ignore the cookie-av.\n const charCode = attributeValue.charCodeAt(0)\n\n if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') {\n return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)\n }\n\n // 2. If the remainder of attribute-value contains a non-DIGIT\n // character, ignore the cookie-av.\n if (!/^\\d+$/.test(attributeValue)) {\n return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)\n }\n\n // 3. Let delta-seconds be the attribute-value converted to an integer.\n const deltaSeconds = Number(attributeValue)\n\n // 4. Let cookie-age-limit be the maximum age of the cookie (which\n // SHOULD be 400 days or less, see Section 4.1.2.2).\n\n // 5. Set delta-seconds to the smaller of its present value and cookie-\n // age-limit.\n // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs)\n\n // 6. If delta-seconds is less than or equal to zero (0), let expiry-\n // time be the earliest representable date and time. Otherwise, let\n // the expiry-time be the current date and time plus delta-seconds\n // seconds.\n // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds\n\n // 7. Append an attribute to the cookie-attribute-list with an\n // attribute-name of Max-Age and an attribute-value of expiry-time.\n cookieAttributeList.maxAge = deltaSeconds\n } else if (attributeNameLowercase === 'domain') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3\n // If the attribute-name case-insensitively matches the string \"Domain\",\n // the user agent MUST process the cookie-av as follows.\n\n // 1. Let cookie-domain be the attribute-value.\n let cookieDomain = attributeValue\n\n // 2. If cookie-domain starts with %x2E (\".\"), let cookie-domain be\n // cookie-domain without its leading %x2E (\".\").\n if (cookieDomain[0] === '.') {\n cookieDomain = cookieDomain.slice(1)\n }\n\n // 3. Convert the cookie-domain to lower case.\n cookieDomain = cookieDomain.toLowerCase()\n\n // 4. Append an attribute to the cookie-attribute-list with an\n // attribute-name of Domain and an attribute-value of cookie-domain.\n cookieAttributeList.domain = cookieDomain\n } else if (attributeNameLowercase === 'path') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4\n // If the attribute-name case-insensitively matches the string \"Path\",\n // the user agent MUST process the cookie-av as follows.\n\n // 1. If the attribute-value is empty or if the first character of the\n // attribute-value is not %x2F (\"/\"):\n let cookiePath = ''\n if (attributeValue.length === 0 || attributeValue[0] !== '/') {\n // 1. Let cookie-path be the default-path.\n cookiePath = '/'\n } else {\n // Otherwise:\n\n // 1. Let cookie-path be the attribute-value.\n cookiePath = attributeValue\n }\n\n // 2. Append an attribute to the cookie-attribute-list with an\n // attribute-name of Path and an attribute-value of cookie-path.\n cookieAttributeList.path = cookiePath\n } else if (attributeNameLowercase === 'secure') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5\n // If the attribute-name case-insensitively matches the string \"Secure\",\n // the user agent MUST append an attribute to the cookie-attribute-list\n // with an attribute-name of Secure and an empty attribute-value.\n\n cookieAttributeList.secure = true\n } else if (attributeNameLowercase === 'httponly') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6\n // If the attribute-name case-insensitively matches the string\n // \"HttpOnly\", the user agent MUST append an attribute to the cookie-\n // attribute-list with an attribute-name of HttpOnly and an empty\n // attribute-value.\n\n cookieAttributeList.httpOnly = true\n } else if (attributeNameLowercase === 'samesite') {\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7\n // If the attribute-name case-insensitively matches the string\n // \"SameSite\", the user agent MUST process the cookie-av as follows:\n\n // 1. Let enforcement be \"Default\".\n let enforcement = 'Default'\n\n const attributeValueLowercase = attributeValue.toLowerCase()\n // 2. If cookie-av's attribute-value is a case-insensitive match for\n // \"None\", set enforcement to \"None\".\n if (attributeValueLowercase.includes('none')) {\n enforcement = 'None'\n }\n\n // 3. If cookie-av's attribute-value is a case-insensitive match for\n // \"Strict\", set enforcement to \"Strict\".\n if (attributeValueLowercase.includes('strict')) {\n enforcement = 'Strict'\n }\n\n // 4. If cookie-av's attribute-value is a case-insensitive match for\n // \"Lax\", set enforcement to \"Lax\".\n if (attributeValueLowercase.includes('lax')) {\n enforcement = 'Lax'\n }\n\n // 5. Append an attribute to the cookie-attribute-list with an\n // attribute-name of \"SameSite\" and an attribute-value of\n // enforcement.\n cookieAttributeList.sameSite = enforcement\n } else {\n cookieAttributeList.unparsed ??= []\n\n cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`)\n }\n\n // 8. Return to Step 1 of this algorithm.\n return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)\n}\n\nmodule.exports = {\n parseSetCookie,\n parseUnparsedAttributes\n}\n","'use strict'\n\nconst assert = require('assert')\nconst { kHeadersList } = require('../core/symbols')\n\nfunction isCTLExcludingHtab (value) {\n if (value.length === 0) {\n return false\n }\n\n for (const char of value) {\n const code = char.charCodeAt(0)\n\n if (\n (code >= 0x00 || code <= 0x08) ||\n (code >= 0x0A || code <= 0x1F) ||\n code === 0x7F\n ) {\n return false\n }\n }\n}\n\n/**\n CHAR = \n token = 1*\n separators = \"(\" | \")\" | \"<\" | \">\" | \"@\"\n | \",\" | \";\" | \":\" | \"\\\" | <\">\n | \"/\" | \"[\" | \"]\" | \"?\" | \"=\"\n | \"{\" | \"}\" | SP | HT\n * @param {string} name\n */\nfunction validateCookieName (name) {\n for (const char of name) {\n const code = char.charCodeAt(0)\n\n if (\n (code <= 0x20 || code > 0x7F) ||\n char === '(' ||\n char === ')' ||\n char === '>' ||\n char === '<' ||\n char === '@' ||\n char === ',' ||\n char === ';' ||\n char === ':' ||\n char === '\\\\' ||\n char === '\"' ||\n char === '/' ||\n char === '[' ||\n char === ']' ||\n char === '?' ||\n char === '=' ||\n char === '{' ||\n char === '}'\n ) {\n throw new Error('Invalid cookie name')\n }\n }\n}\n\n/**\n cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )\n cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E\n ; US-ASCII characters excluding CTLs,\n ; whitespace DQUOTE, comma, semicolon,\n ; and backslash\n * @param {string} value\n */\nfunction validateCookieValue (value) {\n for (const char of value) {\n const code = char.charCodeAt(0)\n\n if (\n code < 0x21 || // exclude CTLs (0-31)\n code === 0x22 ||\n code === 0x2C ||\n code === 0x3B ||\n code === 0x5C ||\n code > 0x7E // non-ascii\n ) {\n throw new Error('Invalid header value')\n }\n }\n}\n\n/**\n * path-value = \n * @param {string} path\n */\nfunction validateCookiePath (path) {\n for (const char of path) {\n const code = char.charCodeAt(0)\n\n if (code < 0x21 || char === ';') {\n throw new Error('Invalid cookie path')\n }\n }\n}\n\n/**\n * I have no idea why these values aren't allowed to be honest,\n * but Deno tests these. - Khafra\n * @param {string} domain\n */\nfunction validateCookieDomain (domain) {\n if (\n domain.startsWith('-') ||\n domain.endsWith('.') ||\n domain.endsWith('-')\n ) {\n throw new Error('Invalid cookie domain')\n }\n}\n\n/**\n * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1\n * @param {number|Date} date\n IMF-fixdate = day-name \",\" SP date1 SP time-of-day SP GMT\n ; fixed length/zone/capitalization subset of the format\n ; see Section 3.3 of [RFC5322]\n\n day-name = %x4D.6F.6E ; \"Mon\", case-sensitive\n / %x54.75.65 ; \"Tue\", case-sensitive\n / %x57.65.64 ; \"Wed\", case-sensitive\n / %x54.68.75 ; \"Thu\", case-sensitive\n / %x46.72.69 ; \"Fri\", case-sensitive\n / %x53.61.74 ; \"Sat\", case-sensitive\n / %x53.75.6E ; \"Sun\", case-sensitive\n date1 = day SP month SP year\n ; e.g., 02 Jun 1982\n\n day = 2DIGIT\n month = %x4A.61.6E ; \"Jan\", case-sensitive\n / %x46.65.62 ; \"Feb\", case-sensitive\n / %x4D.61.72 ; \"Mar\", case-sensitive\n / %x41.70.72 ; \"Apr\", case-sensitive\n / %x4D.61.79 ; \"May\", case-sensitive\n / %x4A.75.6E ; \"Jun\", case-sensitive\n / %x4A.75.6C ; \"Jul\", case-sensitive\n / %x41.75.67 ; \"Aug\", case-sensitive\n / %x53.65.70 ; \"Sep\", case-sensitive\n / %x4F.63.74 ; \"Oct\", case-sensitive\n / %x4E.6F.76 ; \"Nov\", case-sensitive\n / %x44.65.63 ; \"Dec\", case-sensitive\n year = 4DIGIT\n\n GMT = %x47.4D.54 ; \"GMT\", case-sensitive\n\n time-of-day = hour \":\" minute \":\" second\n ; 00:00:00 - 23:59:60 (leap second)\n\n hour = 2DIGIT\n minute = 2DIGIT\n second = 2DIGIT\n */\nfunction toIMFDate (date) {\n if (typeof date === 'number') {\n date = new Date(date)\n }\n\n const days = [\n 'Sun', 'Mon', 'Tue', 'Wed',\n 'Thu', 'Fri', 'Sat'\n ]\n\n const months = [\n 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',\n 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'\n ]\n\n const dayName = days[date.getUTCDay()]\n const day = date.getUTCDate().toString().padStart(2, '0')\n const month = months[date.getUTCMonth()]\n const year = date.getUTCFullYear()\n const hour = date.getUTCHours().toString().padStart(2, '0')\n const minute = date.getUTCMinutes().toString().padStart(2, '0')\n const second = date.getUTCSeconds().toString().padStart(2, '0')\n\n return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT`\n}\n\n/**\n max-age-av = \"Max-Age=\" non-zero-digit *DIGIT\n ; In practice, both expires-av and max-age-av\n ; are limited to dates representable by the\n ; user agent.\n * @param {number} maxAge\n */\nfunction validateCookieMaxAge (maxAge) {\n if (maxAge < 0) {\n throw new Error('Invalid cookie max-age')\n }\n}\n\n/**\n * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1\n * @param {import('./index').Cookie} cookie\n */\nfunction stringify (cookie) {\n if (cookie.name.length === 0) {\n return null\n }\n\n validateCookieName(cookie.name)\n validateCookieValue(cookie.value)\n\n const out = [`${cookie.name}=${cookie.value}`]\n\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1\n // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2\n if (cookie.name.startsWith('__Secure-')) {\n cookie.secure = true\n }\n\n if (cookie.name.startsWith('__Host-')) {\n cookie.secure = true\n cookie.domain = null\n cookie.path = '/'\n }\n\n if (cookie.secure) {\n out.push('Secure')\n }\n\n if (cookie.httpOnly) {\n out.push('HttpOnly')\n }\n\n if (typeof cookie.maxAge === 'number') {\n validateCookieMaxAge(cookie.maxAge)\n out.push(`Max-Age=${cookie.maxAge}`)\n }\n\n if (cookie.domain) {\n validateCookieDomain(cookie.domain)\n out.push(`Domain=${cookie.domain}`)\n }\n\n if (cookie.path) {\n validateCookiePath(cookie.path)\n out.push(`Path=${cookie.path}`)\n }\n\n if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') {\n out.push(`Expires=${toIMFDate(cookie.expires)}`)\n }\n\n if (cookie.sameSite) {\n out.push(`SameSite=${cookie.sameSite}`)\n }\n\n for (const part of cookie.unparsed) {\n if (!part.includes('=')) {\n throw new Error('Invalid unparsed')\n }\n\n const [key, ...value] = part.split('=')\n\n out.push(`${key.trim()}=${value.join('=')}`)\n }\n\n return out.join('; ')\n}\n\nlet kHeadersListNode\n\nfunction getHeadersList (headers) {\n if (headers[kHeadersList]) {\n return headers[kHeadersList]\n }\n\n if (!kHeadersListNode) {\n kHeadersListNode = Object.getOwnPropertySymbols(headers).find(\n (symbol) => symbol.description === 'headers list'\n )\n\n assert(kHeadersListNode, 'Headers cannot be parsed')\n }\n\n const headersList = headers[kHeadersListNode]\n assert(headersList)\n\n return headersList\n}\n\nmodule.exports = {\n isCTLExcludingHtab,\n stringify,\n getHeadersList\n}\n","'use strict'\n\nconst net = require('net')\nconst assert = require('assert')\nconst util = require('./util')\nconst { InvalidArgumentError, ConnectTimeoutError } = require('./errors')\n\nlet tls // include tls conditionally since it is not always available\n\n// TODO: session re-use does not wait for the first\n// connection to resolve the session and might therefore\n// resolve the same servername multiple times even when\n// re-use is enabled.\n\nlet SessionCache\n// FIXME: remove workaround when the Node bug is fixed\n// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308\nif (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {\n SessionCache = class WeakSessionCache {\n constructor (maxCachedSessions) {\n this._maxCachedSessions = maxCachedSessions\n this._sessionCache = new Map()\n this._sessionRegistry = new global.FinalizationRegistry((key) => {\n if (this._sessionCache.size < this._maxCachedSessions) {\n return\n }\n\n const ref = this._sessionCache.get(key)\n if (ref !== undefined && ref.deref() === undefined) {\n this._sessionCache.delete(key)\n }\n })\n }\n\n get (sessionKey) {\n const ref = this._sessionCache.get(sessionKey)\n return ref ? ref.deref() : null\n }\n\n set (sessionKey, session) {\n if (this._maxCachedSessions === 0) {\n return\n }\n\n this._sessionCache.set(sessionKey, new WeakRef(session))\n this._sessionRegistry.register(session, sessionKey)\n }\n }\n} else {\n SessionCache = class SimpleSessionCache {\n constructor (maxCachedSessions) {\n this._maxCachedSessions = maxCachedSessions\n this._sessionCache = new Map()\n }\n\n get (sessionKey) {\n return this._sessionCache.get(sessionKey)\n }\n\n set (sessionKey, session) {\n if (this._maxCachedSessions === 0) {\n return\n }\n\n if (this._sessionCache.size >= this._maxCachedSessions) {\n // remove the oldest session\n const { value: oldestKey } = this._sessionCache.keys().next()\n this._sessionCache.delete(oldestKey)\n }\n\n this._sessionCache.set(sessionKey, session)\n }\n }\n}\n\nfunction buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {\n if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {\n throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')\n }\n\n const options = { path: socketPath, ...opts }\n const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)\n timeout = timeout == null ? 10e3 : timeout\n allowH2 = allowH2 != null ? allowH2 : false\n return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {\n let socket\n if (protocol === 'https:') {\n if (!tls) {\n tls = require('tls')\n }\n servername = servername || options.servername || util.getServerName(host) || null\n\n const sessionKey = servername || hostname\n const session = sessionCache.get(sessionKey) || null\n\n assert(sessionKey)\n\n socket = tls.connect({\n highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...\n ...options,\n servername,\n session,\n localAddress,\n // TODO(HTTP/2): Add support for h2c\n ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],\n socket: httpSocket, // upgrade socket connection\n port: port || 443,\n host: hostname\n })\n\n socket\n .on('session', function (session) {\n // TODO (fix): Can a session become invalid once established? Don't think so?\n sessionCache.set(sessionKey, session)\n })\n } else {\n assert(!httpSocket, 'httpSocket can only be sent on TLS update')\n socket = net.connect({\n highWaterMark: 64 * 1024, // Same as nodejs fs streams.\n ...options,\n localAddress,\n port: port || 80,\n host: hostname\n })\n }\n\n // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket\n if (options.keepAlive == null || options.keepAlive) {\n const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay\n socket.setKeepAlive(true, keepAliveInitialDelay)\n }\n\n const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout)\n\n socket\n .setNoDelay(true)\n .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {\n cancelTimeout()\n\n if (callback) {\n const cb = callback\n callback = null\n cb(null, this)\n }\n })\n .on('error', function (err) {\n cancelTimeout()\n\n if (callback) {\n const cb = callback\n callback = null\n cb(err)\n }\n })\n\n return socket\n }\n}\n\nfunction setupTimeout (onConnectTimeout, timeout) {\n if (!timeout) {\n return () => {}\n }\n\n let s1 = null\n let s2 = null\n const timeoutId = setTimeout(() => {\n // setImmediate is added to make sure that we priotorise socket error events over timeouts\n s1 = setImmediate(() => {\n if (process.platform === 'win32') {\n // Windows needs an extra setImmediate probably due to implementation differences in the socket logic\n s2 = setImmediate(() => onConnectTimeout())\n } else {\n onConnectTimeout()\n }\n })\n }, timeout)\n return () => {\n clearTimeout(timeoutId)\n clearImmediate(s1)\n clearImmediate(s2)\n }\n}\n\nfunction onConnectTimeout (socket) {\n util.destroy(socket, new ConnectTimeoutError())\n}\n\nmodule.exports = buildConnector\n","'use strict'\n\nclass UndiciError extends Error {\n constructor (message) {\n super(message)\n this.name = 'UndiciError'\n this.code = 'UND_ERR'\n }\n}\n\nclass ConnectTimeoutError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, ConnectTimeoutError)\n this.name = 'ConnectTimeoutError'\n this.message = message || 'Connect Timeout Error'\n this.code = 'UND_ERR_CONNECT_TIMEOUT'\n }\n}\n\nclass HeadersTimeoutError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, HeadersTimeoutError)\n this.name = 'HeadersTimeoutError'\n this.message = message || 'Headers Timeout Error'\n this.code = 'UND_ERR_HEADERS_TIMEOUT'\n }\n}\n\nclass HeadersOverflowError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, HeadersOverflowError)\n this.name = 'HeadersOverflowError'\n this.message = message || 'Headers Overflow Error'\n this.code = 'UND_ERR_HEADERS_OVERFLOW'\n }\n}\n\nclass BodyTimeoutError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, BodyTimeoutError)\n this.name = 'BodyTimeoutError'\n this.message = message || 'Body Timeout Error'\n this.code = 'UND_ERR_BODY_TIMEOUT'\n }\n}\n\nclass ResponseStatusCodeError extends UndiciError {\n constructor (message, statusCode, headers, body) {\n super(message)\n Error.captureStackTrace(this, ResponseStatusCodeError)\n this.name = 'ResponseStatusCodeError'\n this.message = message || 'Response Status Code Error'\n this.code = 'UND_ERR_RESPONSE_STATUS_CODE'\n this.body = body\n this.status = statusCode\n this.statusCode = statusCode\n this.headers = headers\n }\n}\n\nclass InvalidArgumentError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, InvalidArgumentError)\n this.name = 'InvalidArgumentError'\n this.message = message || 'Invalid Argument Error'\n this.code = 'UND_ERR_INVALID_ARG'\n }\n}\n\nclass InvalidReturnValueError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, InvalidReturnValueError)\n this.name = 'InvalidReturnValueError'\n this.message = message || 'Invalid Return Value Error'\n this.code = 'UND_ERR_INVALID_RETURN_VALUE'\n }\n}\n\nclass RequestAbortedError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, RequestAbortedError)\n this.name = 'AbortError'\n this.message = message || 'Request aborted'\n this.code = 'UND_ERR_ABORTED'\n }\n}\n\nclass InformationalError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, InformationalError)\n this.name = 'InformationalError'\n this.message = message || 'Request information'\n this.code = 'UND_ERR_INFO'\n }\n}\n\nclass RequestContentLengthMismatchError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, RequestContentLengthMismatchError)\n this.name = 'RequestContentLengthMismatchError'\n this.message = message || 'Request body length does not match content-length header'\n this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'\n }\n}\n\nclass ResponseContentLengthMismatchError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, ResponseContentLengthMismatchError)\n this.name = 'ResponseContentLengthMismatchError'\n this.message = message || 'Response body length does not match content-length header'\n this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'\n }\n}\n\nclass ClientDestroyedError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, ClientDestroyedError)\n this.name = 'ClientDestroyedError'\n this.message = message || 'The client is destroyed'\n this.code = 'UND_ERR_DESTROYED'\n }\n}\n\nclass ClientClosedError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, ClientClosedError)\n this.name = 'ClientClosedError'\n this.message = message || 'The client is closed'\n this.code = 'UND_ERR_CLOSED'\n }\n}\n\nclass SocketError extends UndiciError {\n constructor (message, socket) {\n super(message)\n Error.captureStackTrace(this, SocketError)\n this.name = 'SocketError'\n this.message = message || 'Socket error'\n this.code = 'UND_ERR_SOCKET'\n this.socket = socket\n }\n}\n\nclass NotSupportedError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, NotSupportedError)\n this.name = 'NotSupportedError'\n this.message = message || 'Not supported error'\n this.code = 'UND_ERR_NOT_SUPPORTED'\n }\n}\n\nclass BalancedPoolMissingUpstreamError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, NotSupportedError)\n this.name = 'MissingUpstreamError'\n this.message = message || 'No upstream has been added to the BalancedPool'\n this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'\n }\n}\n\nclass HTTPParserError extends Error {\n constructor (message, code, data) {\n super(message)\n Error.captureStackTrace(this, HTTPParserError)\n this.name = 'HTTPParserError'\n this.code = code ? `HPE_${code}` : undefined\n this.data = data ? data.toString() : undefined\n }\n}\n\nclass ResponseExceededMaxSizeError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, ResponseExceededMaxSizeError)\n this.name = 'ResponseExceededMaxSizeError'\n this.message = message || 'Response content exceeded max size'\n this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'\n }\n}\n\nclass RequestRetryError extends UndiciError {\n constructor (message, code, { headers, data }) {\n super(message)\n Error.captureStackTrace(this, RequestRetryError)\n this.name = 'RequestRetryError'\n this.message = message || 'Request retry error'\n this.code = 'UND_ERR_REQ_RETRY'\n this.statusCode = code\n this.data = data\n this.headers = headers\n }\n}\n\nmodule.exports = {\n HTTPParserError,\n UndiciError,\n HeadersTimeoutError,\n HeadersOverflowError,\n BodyTimeoutError,\n RequestContentLengthMismatchError,\n ConnectTimeoutError,\n ResponseStatusCodeError,\n InvalidArgumentError,\n InvalidReturnValueError,\n RequestAbortedError,\n ClientDestroyedError,\n ClientClosedError,\n InformationalError,\n SocketError,\n NotSupportedError,\n ResponseContentLengthMismatchError,\n BalancedPoolMissingUpstreamError,\n ResponseExceededMaxSizeError,\n RequestRetryError\n}\n","'use strict'\n\nconst {\n InvalidArgumentError,\n NotSupportedError\n} = require('./errors')\nconst assert = require('assert')\nconst { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols')\nconst util = require('./util')\n\n// tokenRegExp and headerCharRegex have been lifted from\n// https://github.com/nodejs/node/blob/main/lib/_http_common.js\n\n/**\n * Verifies that the given val is a valid HTTP token\n * per the rules defined in RFC 7230\n * See https://tools.ietf.org/html/rfc7230#section-3.2.6\n */\nconst tokenRegExp = /^[\\^_`a-zA-Z\\-0-9!#$%&'*+.|~]+$/\n\n/**\n * Matches if val contains an invalid field-vchar\n * field-value = *( field-content / obs-fold )\n * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]\n * field-vchar = VCHAR / obs-text\n */\nconst headerCharRegex = /[^\\t\\x20-\\x7e\\x80-\\xff]/\n\n// Verifies that a given path is valid does not contain control chars \\x00 to \\x20\nconst invalidPathRegex = /[^\\u0021-\\u00ff]/\n\nconst kHandler = Symbol('handler')\n\nconst channels = {}\n\nlet extractBody\n\ntry {\n const diagnosticsChannel = require('diagnostics_channel')\n channels.create = diagnosticsChannel.channel('undici:request:create')\n channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent')\n channels.headers = diagnosticsChannel.channel('undici:request:headers')\n channels.trailers = diagnosticsChannel.channel('undici:request:trailers')\n channels.error = diagnosticsChannel.channel('undici:request:error')\n} catch {\n channels.create = { hasSubscribers: false }\n channels.bodySent = { hasSubscribers: false }\n channels.headers = { hasSubscribers: false }\n channels.trailers = { hasSubscribers: false }\n channels.error = { hasSubscribers: false }\n}\n\nclass Request {\n constructor (origin, {\n path,\n method,\n body,\n headers,\n query,\n idempotent,\n blocking,\n upgrade,\n headersTimeout,\n bodyTimeout,\n reset,\n throwOnError,\n expectContinue\n }, handler) {\n if (typeof path !== 'string') {\n throw new InvalidArgumentError('path must be a string')\n } else if (\n path[0] !== '/' &&\n !(path.startsWith('http://') || path.startsWith('https://')) &&\n method !== 'CONNECT'\n ) {\n throw new InvalidArgumentError('path must be an absolute URL or start with a slash')\n } else if (invalidPathRegex.exec(path) !== null) {\n throw new InvalidArgumentError('invalid request path')\n }\n\n if (typeof method !== 'string') {\n throw new InvalidArgumentError('method must be a string')\n } else if (tokenRegExp.exec(method) === null) {\n throw new InvalidArgumentError('invalid request method')\n }\n\n if (upgrade && typeof upgrade !== 'string') {\n throw new InvalidArgumentError('upgrade must be a string')\n }\n\n if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) {\n throw new InvalidArgumentError('invalid headersTimeout')\n }\n\n if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) {\n throw new InvalidArgumentError('invalid bodyTimeout')\n }\n\n if (reset != null && typeof reset !== 'boolean') {\n throw new InvalidArgumentError('invalid reset')\n }\n\n if (expectContinue != null && typeof expectContinue !== 'boolean') {\n throw new InvalidArgumentError('invalid expectContinue')\n }\n\n this.headersTimeout = headersTimeout\n\n this.bodyTimeout = bodyTimeout\n\n this.throwOnError = throwOnError === true\n\n this.method = method\n\n this.abort = null\n\n if (body == null) {\n this.body = null\n } else if (util.isStream(body)) {\n this.body = body\n\n const rState = this.body._readableState\n if (!rState || !rState.autoDestroy) {\n this.endHandler = function autoDestroy () {\n util.destroy(this)\n }\n this.body.on('end', this.endHandler)\n }\n\n this.errorHandler = err => {\n if (this.abort) {\n this.abort(err)\n } else {\n this.error = err\n }\n }\n this.body.on('error', this.errorHandler)\n } else if (util.isBuffer(body)) {\n this.body = body.byteLength ? body : null\n } else if (ArrayBuffer.isView(body)) {\n this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null\n } else if (body instanceof ArrayBuffer) {\n this.body = body.byteLength ? Buffer.from(body) : null\n } else if (typeof body === 'string') {\n this.body = body.length ? Buffer.from(body) : null\n } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) {\n this.body = body\n } else {\n throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')\n }\n\n this.completed = false\n\n this.aborted = false\n\n this.upgrade = upgrade || null\n\n this.path = query ? util.buildURL(path, query) : path\n\n this.origin = origin\n\n this.idempotent = idempotent == null\n ? method === 'HEAD' || method === 'GET'\n : idempotent\n\n this.blocking = blocking == null ? false : blocking\n\n this.reset = reset == null ? null : reset\n\n this.host = null\n\n this.contentLength = null\n\n this.contentType = null\n\n this.headers = ''\n\n // Only for H2\n this.expectContinue = expectContinue != null ? expectContinue : false\n\n if (Array.isArray(headers)) {\n if (headers.length % 2 !== 0) {\n throw new InvalidArgumentError('headers array must be even')\n }\n for (let i = 0; i < headers.length; i += 2) {\n processHeader(this, headers[i], headers[i + 1])\n }\n } else if (headers && typeof headers === 'object') {\n const keys = Object.keys(headers)\n for (let i = 0; i < keys.length; i++) {\n const key = keys[i]\n processHeader(this, key, headers[key])\n }\n } else if (headers != null) {\n throw new InvalidArgumentError('headers must be an object or an array')\n }\n\n if (util.isFormDataLike(this.body)) {\n if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) {\n throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.')\n }\n\n if (!extractBody) {\n extractBody = require('../fetch/body.js').extractBody\n }\n\n const [bodyStream, contentType] = extractBody(body)\n if (this.contentType == null) {\n this.contentType = contentType\n this.headers += `content-type: ${contentType}\\r\\n`\n }\n this.body = bodyStream.stream\n this.contentLength = bodyStream.length\n } else if (util.isBlobLike(body) && this.contentType == null && body.type) {\n this.contentType = body.type\n this.headers += `content-type: ${body.type}\\r\\n`\n }\n\n util.validateHandler(handler, method, upgrade)\n\n this.servername = util.getServerName(this.host)\n\n this[kHandler] = handler\n\n if (channels.create.hasSubscribers) {\n channels.create.publish({ request: this })\n }\n }\n\n onBodySent (chunk) {\n if (this[kHandler].onBodySent) {\n try {\n return this[kHandler].onBodySent(chunk)\n } catch (err) {\n this.abort(err)\n }\n }\n }\n\n onRequestSent () {\n if (channels.bodySent.hasSubscribers) {\n channels.bodySent.publish({ request: this })\n }\n\n if (this[kHandler].onRequestSent) {\n try {\n return this[kHandler].onRequestSent()\n } catch (err) {\n this.abort(err)\n }\n }\n }\n\n onConnect (abort) {\n assert(!this.aborted)\n assert(!this.completed)\n\n if (this.error) {\n abort(this.error)\n } else {\n this.abort = abort\n return this[kHandler].onConnect(abort)\n }\n }\n\n onHeaders (statusCode, headers, resume, statusText) {\n assert(!this.aborted)\n assert(!this.completed)\n\n if (channels.headers.hasSubscribers) {\n channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })\n }\n\n try {\n return this[kHandler].onHeaders(statusCode, headers, resume, statusText)\n } catch (err) {\n this.abort(err)\n }\n }\n\n onData (chunk) {\n assert(!this.aborted)\n assert(!this.completed)\n\n try {\n return this[kHandler].onData(chunk)\n } catch (err) {\n this.abort(err)\n return false\n }\n }\n\n onUpgrade (statusCode, headers, socket) {\n assert(!this.aborted)\n assert(!this.completed)\n\n return this[kHandler].onUpgrade(statusCode, headers, socket)\n }\n\n onComplete (trailers) {\n this.onFinally()\n\n assert(!this.aborted)\n\n this.completed = true\n if (channels.trailers.hasSubscribers) {\n channels.trailers.publish({ request: this, trailers })\n }\n\n try {\n return this[kHandler].onComplete(trailers)\n } catch (err) {\n // TODO (fix): This might be a bad idea?\n this.onError(err)\n }\n }\n\n onError (error) {\n this.onFinally()\n\n if (channels.error.hasSubscribers) {\n channels.error.publish({ request: this, error })\n }\n\n if (this.aborted) {\n return\n }\n this.aborted = true\n\n return this[kHandler].onError(error)\n }\n\n onFinally () {\n if (this.errorHandler) {\n this.body.off('error', this.errorHandler)\n this.errorHandler = null\n }\n\n if (this.endHandler) {\n this.body.off('end', this.endHandler)\n this.endHandler = null\n }\n }\n\n // TODO: adjust to support H2\n addHeader (key, value) {\n processHeader(this, key, value)\n return this\n }\n\n static [kHTTP1BuildRequest] (origin, opts, handler) {\n // TODO: Migrate header parsing here, to make Requests\n // HTTP agnostic\n return new Request(origin, opts, handler)\n }\n\n static [kHTTP2BuildRequest] (origin, opts, handler) {\n const headers = opts.headers\n opts = { ...opts, headers: null }\n\n const request = new Request(origin, opts, handler)\n\n request.headers = {}\n\n if (Array.isArray(headers)) {\n if (headers.length % 2 !== 0) {\n throw new InvalidArgumentError('headers array must be even')\n }\n for (let i = 0; i < headers.length; i += 2) {\n processHeader(request, headers[i], headers[i + 1], true)\n }\n } else if (headers && typeof headers === 'object') {\n const keys = Object.keys(headers)\n for (let i = 0; i < keys.length; i++) {\n const key = keys[i]\n processHeader(request, key, headers[key], true)\n }\n } else if (headers != null) {\n throw new InvalidArgumentError('headers must be an object or an array')\n }\n\n return request\n }\n\n static [kHTTP2CopyHeaders] (raw) {\n const rawHeaders = raw.split('\\r\\n')\n const headers = {}\n\n for (const header of rawHeaders) {\n const [key, value] = header.split(': ')\n\n if (value == null || value.length === 0) continue\n\n if (headers[key]) headers[key] += `,${value}`\n else headers[key] = value\n }\n\n return headers\n }\n}\n\nfunction processHeaderValue (key, val, skipAppend) {\n if (val && typeof val === 'object') {\n throw new InvalidArgumentError(`invalid ${key} header`)\n }\n\n val = val != null ? `${val}` : ''\n\n if (headerCharRegex.exec(val) !== null) {\n throw new InvalidArgumentError(`invalid ${key} header`)\n }\n\n return skipAppend ? val : `${key}: ${val}\\r\\n`\n}\n\nfunction processHeader (request, key, val, skipAppend = false) {\n if (val && (typeof val === 'object' && !Array.isArray(val))) {\n throw new InvalidArgumentError(`invalid ${key} header`)\n } else if (val === undefined) {\n return\n }\n\n if (\n request.host === null &&\n key.length === 4 &&\n key.toLowerCase() === 'host'\n ) {\n if (headerCharRegex.exec(val) !== null) {\n throw new InvalidArgumentError(`invalid ${key} header`)\n }\n // Consumed by Client\n request.host = val\n } else if (\n request.contentLength === null &&\n key.length === 14 &&\n key.toLowerCase() === 'content-length'\n ) {\n request.contentLength = parseInt(val, 10)\n if (!Number.isFinite(request.contentLength)) {\n throw new InvalidArgumentError('invalid content-length header')\n }\n } else if (\n request.contentType === null &&\n key.length === 12 &&\n key.toLowerCase() === 'content-type'\n ) {\n request.contentType = val\n if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)\n else request.headers += processHeaderValue(key, val)\n } else if (\n key.length === 17 &&\n key.toLowerCase() === 'transfer-encoding'\n ) {\n throw new InvalidArgumentError('invalid transfer-encoding header')\n } else if (\n key.length === 10 &&\n key.toLowerCase() === 'connection'\n ) {\n const value = typeof val === 'string' ? val.toLowerCase() : null\n if (value !== 'close' && value !== 'keep-alive') {\n throw new InvalidArgumentError('invalid connection header')\n } else if (value === 'close') {\n request.reset = true\n }\n } else if (\n key.length === 10 &&\n key.toLowerCase() === 'keep-alive'\n ) {\n throw new InvalidArgumentError('invalid keep-alive header')\n } else if (\n key.length === 7 &&\n key.toLowerCase() === 'upgrade'\n ) {\n throw new InvalidArgumentError('invalid upgrade header')\n } else if (\n key.length === 6 &&\n key.toLowerCase() === 'expect'\n ) {\n throw new NotSupportedError('expect header not supported')\n } else if (tokenRegExp.exec(key) === null) {\n throw new InvalidArgumentError('invalid header key')\n } else {\n if (Array.isArray(val)) {\n for (let i = 0; i < val.length; i++) {\n if (skipAppend) {\n if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`\n else request.headers[key] = processHeaderValue(key, val[i], skipAppend)\n } else {\n request.headers += processHeaderValue(key, val[i])\n }\n }\n } else {\n if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)\n else request.headers += processHeaderValue(key, val)\n }\n }\n}\n\nmodule.exports = Request\n","module.exports = {\n kClose: Symbol('close'),\n kDestroy: Symbol('destroy'),\n kDispatch: Symbol('dispatch'),\n kUrl: Symbol('url'),\n kWriting: Symbol('writing'),\n kResuming: Symbol('resuming'),\n kQueue: Symbol('queue'),\n kConnect: Symbol('connect'),\n kConnecting: Symbol('connecting'),\n kHeadersList: Symbol('headers list'),\n kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),\n kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),\n kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),\n kKeepAliveTimeoutValue: Symbol('keep alive timeout'),\n kKeepAlive: Symbol('keep alive'),\n kHeadersTimeout: Symbol('headers timeout'),\n kBodyTimeout: Symbol('body timeout'),\n kServerName: Symbol('server name'),\n kLocalAddress: Symbol('local address'),\n kHost: Symbol('host'),\n kNoRef: Symbol('no ref'),\n kBodyUsed: Symbol('used'),\n kRunning: Symbol('running'),\n kBlocking: Symbol('blocking'),\n kPending: Symbol('pending'),\n kSize: Symbol('size'),\n kBusy: Symbol('busy'),\n kQueued: Symbol('queued'),\n kFree: Symbol('free'),\n kConnected: Symbol('connected'),\n kClosed: Symbol('closed'),\n kNeedDrain: Symbol('need drain'),\n kReset: Symbol('reset'),\n kDestroyed: Symbol.for('nodejs.stream.destroyed'),\n kMaxHeadersSize: Symbol('max headers size'),\n kRunningIdx: Symbol('running index'),\n kPendingIdx: Symbol('pending index'),\n kError: Symbol('error'),\n kClients: Symbol('clients'),\n kClient: Symbol('client'),\n kParser: Symbol('parser'),\n kOnDestroyed: Symbol('destroy callbacks'),\n kPipelining: Symbol('pipelining'),\n kSocket: Symbol('socket'),\n kHostHeader: Symbol('host header'),\n kConnector: Symbol('connector'),\n kStrictContentLength: Symbol('strict content length'),\n kMaxRedirections: Symbol('maxRedirections'),\n kMaxRequests: Symbol('maxRequestsPerClient'),\n kProxy: Symbol('proxy agent options'),\n kCounter: Symbol('socket request counter'),\n kInterceptors: Symbol('dispatch interceptors'),\n kMaxResponseSize: Symbol('max response size'),\n kHTTP2Session: Symbol('http2Session'),\n kHTTP2SessionState: Symbol('http2Session state'),\n kHTTP2BuildRequest: Symbol('http2 build request'),\n kHTTP1BuildRequest: Symbol('http1 build request'),\n kHTTP2CopyHeaders: Symbol('http2 copy headers'),\n kHTTPConnVersion: Symbol('http connection version'),\n kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),\n kConstruct: Symbol('constructable')\n}\n","'use strict'\n\nconst assert = require('assert')\nconst { kDestroyed, kBodyUsed } = require('./symbols')\nconst { IncomingMessage } = require('http')\nconst stream = require('stream')\nconst net = require('net')\nconst { InvalidArgumentError } = require('./errors')\nconst { Blob } = require('buffer')\nconst nodeUtil = require('util')\nconst { stringify } = require('querystring')\n\nconst [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))\n\nfunction nop () {}\n\nfunction isStream (obj) {\n return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'\n}\n\n// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)\nfunction isBlobLike (object) {\n return (Blob && object instanceof Blob) || (\n object &&\n typeof object === 'object' &&\n (typeof object.stream === 'function' ||\n typeof object.arrayBuffer === 'function') &&\n /^(Blob|File)$/.test(object[Symbol.toStringTag])\n )\n}\n\nfunction buildURL (url, queryParams) {\n if (url.includes('?') || url.includes('#')) {\n throw new Error('Query params cannot be passed when url already contains \"?\" or \"#\".')\n }\n\n const stringified = stringify(queryParams)\n\n if (stringified) {\n url += '?' + stringified\n }\n\n return url\n}\n\nfunction parseURL (url) {\n if (typeof url === 'string') {\n url = new URL(url)\n\n if (!/^https?:/.test(url.origin || url.protocol)) {\n throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')\n }\n\n return url\n }\n\n if (!url || typeof url !== 'object') {\n throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')\n }\n\n if (!/^https?:/.test(url.origin || url.protocol)) {\n throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')\n }\n\n if (!(url instanceof URL)) {\n if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {\n throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')\n }\n\n if (url.path != null && typeof url.path !== 'string') {\n throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')\n }\n\n if (url.pathname != null && typeof url.pathname !== 'string') {\n throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')\n }\n\n if (url.hostname != null && typeof url.hostname !== 'string') {\n throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')\n }\n\n if (url.origin != null && typeof url.origin !== 'string') {\n throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')\n }\n\n const port = url.port != null\n ? url.port\n : (url.protocol === 'https:' ? 443 : 80)\n let origin = url.origin != null\n ? url.origin\n : `${url.protocol}//${url.hostname}:${port}`\n let path = url.path != null\n ? url.path\n : `${url.pathname || ''}${url.search || ''}`\n\n if (origin.endsWith('/')) {\n origin = origin.substring(0, origin.length - 1)\n }\n\n if (path && !path.startsWith('/')) {\n path = `/${path}`\n }\n // new URL(path, origin) is unsafe when `path` contains an absolute URL\n // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:\n // If first parameter is a relative URL, second param is required, and will be used as the base URL.\n // If first parameter is an absolute URL, a given second param will be ignored.\n url = new URL(origin + path)\n }\n\n return url\n}\n\nfunction parseOrigin (url) {\n url = parseURL(url)\n\n if (url.pathname !== '/' || url.search || url.hash) {\n throw new InvalidArgumentError('invalid url')\n }\n\n return url\n}\n\nfunction getHostname (host) {\n if (host[0] === '[') {\n const idx = host.indexOf(']')\n\n assert(idx !== -1)\n return host.substring(1, idx)\n }\n\n const idx = host.indexOf(':')\n if (idx === -1) return host\n\n return host.substring(0, idx)\n}\n\n// IP addresses are not valid server names per RFC6066\n// > Currently, the only server names supported are DNS hostnames\nfunction getServerName (host) {\n if (!host) {\n return null\n }\n\n assert.strictEqual(typeof host, 'string')\n\n const servername = getHostname(host)\n if (net.isIP(servername)) {\n return ''\n }\n\n return servername\n}\n\nfunction deepClone (obj) {\n return JSON.parse(JSON.stringify(obj))\n}\n\nfunction isAsyncIterable (obj) {\n return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function')\n}\n\nfunction isIterable (obj) {\n return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function'))\n}\n\nfunction bodyLength (body) {\n if (body == null) {\n return 0\n } else if (isStream(body)) {\n const state = body._readableState\n return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length)\n ? state.length\n : null\n } else if (isBlobLike(body)) {\n return body.size != null ? body.size : null\n } else if (isBuffer(body)) {\n return body.byteLength\n }\n\n return null\n}\n\nfunction isDestroyed (stream) {\n return !stream || !!(stream.destroyed || stream[kDestroyed])\n}\n\nfunction isReadableAborted (stream) {\n const state = stream && stream._readableState\n return isDestroyed(stream) && state && !state.endEmitted\n}\n\nfunction destroy (stream, err) {\n if (stream == null || !isStream(stream) || isDestroyed(stream)) {\n return\n }\n\n if (typeof stream.destroy === 'function') {\n if (Object.getPrototypeOf(stream).constructor === IncomingMessage) {\n // See: https://github.com/nodejs/node/pull/38505/files\n stream.socket = null\n }\n\n stream.destroy(err)\n } else if (err) {\n process.nextTick((stream, err) => {\n stream.emit('error', err)\n }, stream, err)\n }\n\n if (stream.destroyed !== true) {\n stream[kDestroyed] = true\n }\n}\n\nconst KEEPALIVE_TIMEOUT_EXPR = /timeout=(\\d+)/\nfunction parseKeepAliveTimeout (val) {\n const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR)\n return m ? parseInt(m[1], 10) * 1000 : null\n}\n\nfunction parseHeaders (headers, obj = {}) {\n // For H2 support\n if (!Array.isArray(headers)) return headers\n\n for (let i = 0; i < headers.length; i += 2) {\n const key = headers[i].toString().toLowerCase()\n let val = obj[key]\n\n if (!val) {\n if (Array.isArray(headers[i + 1])) {\n obj[key] = headers[i + 1].map(x => x.toString('utf8'))\n } else {\n obj[key] = headers[i + 1].toString('utf8')\n }\n } else {\n if (!Array.isArray(val)) {\n val = [val]\n obj[key] = val\n }\n val.push(headers[i + 1].toString('utf8'))\n }\n }\n\n // See https://github.com/nodejs/node/pull/46528\n if ('content-length' in obj && 'content-disposition' in obj) {\n obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1')\n }\n\n return obj\n}\n\nfunction parseRawHeaders (headers) {\n const ret = []\n let hasContentLength = false\n let contentDispositionIdx = -1\n\n for (let n = 0; n < headers.length; n += 2) {\n const key = headers[n + 0].toString()\n const val = headers[n + 1].toString('utf8')\n\n if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) {\n ret.push(key, val)\n hasContentLength = true\n } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {\n contentDispositionIdx = ret.push(key, val) - 1\n } else {\n ret.push(key, val)\n }\n }\n\n // See https://github.com/nodejs/node/pull/46528\n if (hasContentLength && contentDispositionIdx !== -1) {\n ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1')\n }\n\n return ret\n}\n\nfunction isBuffer (buffer) {\n // See, https://github.com/mcollina/undici/pull/319\n return buffer instanceof Uint8Array || Buffer.isBuffer(buffer)\n}\n\nfunction validateHandler (handler, method, upgrade) {\n if (!handler || typeof handler !== 'object') {\n throw new InvalidArgumentError('handler must be an object')\n }\n\n if (typeof handler.onConnect !== 'function') {\n throw new InvalidArgumentError('invalid onConnect method')\n }\n\n if (typeof handler.onError !== 'function') {\n throw new InvalidArgumentError('invalid onError method')\n }\n\n if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) {\n throw new InvalidArgumentError('invalid onBodySent method')\n }\n\n if (upgrade || method === 'CONNECT') {\n if (typeof handler.onUpgrade !== 'function') {\n throw new InvalidArgumentError('invalid onUpgrade method')\n }\n } else {\n if (typeof handler.onHeaders !== 'function') {\n throw new InvalidArgumentError('invalid onHeaders method')\n }\n\n if (typeof handler.onData !== 'function') {\n throw new InvalidArgumentError('invalid onData method')\n }\n\n if (typeof handler.onComplete !== 'function') {\n throw new InvalidArgumentError('invalid onComplete method')\n }\n }\n}\n\n// A body is disturbed if it has been read from and it cannot\n// be re-used without losing state or data.\nfunction isDisturbed (body) {\n return !!(body && (\n stream.isDisturbed\n ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed?\n : body[kBodyUsed] ||\n body.readableDidRead ||\n (body._readableState && body._readableState.dataEmitted) ||\n isReadableAborted(body)\n ))\n}\n\nfunction isErrored (body) {\n return !!(body && (\n stream.isErrored\n ? stream.isErrored(body)\n : /state: 'errored'/.test(nodeUtil.inspect(body)\n )))\n}\n\nfunction isReadable (body) {\n return !!(body && (\n stream.isReadable\n ? stream.isReadable(body)\n : /state: 'readable'/.test(nodeUtil.inspect(body)\n )))\n}\n\nfunction getSocketInfo (socket) {\n return {\n localAddress: socket.localAddress,\n localPort: socket.localPort,\n remoteAddress: socket.remoteAddress,\n remotePort: socket.remotePort,\n remoteFamily: socket.remoteFamily,\n timeout: socket.timeout,\n bytesWritten: socket.bytesWritten,\n bytesRead: socket.bytesRead\n }\n}\n\nasync function * convertIterableToBuffer (iterable) {\n for await (const chunk of iterable) {\n yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)\n }\n}\n\nlet ReadableStream\nfunction ReadableStreamFrom (iterable) {\n if (!ReadableStream) {\n ReadableStream = require('stream/web').ReadableStream\n }\n\n if (ReadableStream.from) {\n return ReadableStream.from(convertIterableToBuffer(iterable))\n }\n\n let iterator\n return new ReadableStream(\n {\n async start () {\n iterator = iterable[Symbol.asyncIterator]()\n },\n async pull (controller) {\n const { done, value } = await iterator.next()\n if (done) {\n queueMicrotask(() => {\n controller.close()\n })\n } else {\n const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)\n controller.enqueue(new Uint8Array(buf))\n }\n return controller.desiredSize > 0\n },\n async cancel (reason) {\n await iterator.return()\n }\n },\n 0\n )\n}\n\n// The chunk should be a FormData instance and contains\n// all the required methods.\nfunction isFormDataLike (object) {\n return (\n object &&\n typeof object === 'object' &&\n typeof object.append === 'function' &&\n typeof object.delete === 'function' &&\n typeof object.get === 'function' &&\n typeof object.getAll === 'function' &&\n typeof object.has === 'function' &&\n typeof object.set === 'function' &&\n object[Symbol.toStringTag] === 'FormData'\n )\n}\n\nfunction throwIfAborted (signal) {\n if (!signal) { return }\n if (typeof signal.throwIfAborted === 'function') {\n signal.throwIfAborted()\n } else {\n if (signal.aborted) {\n // DOMException not available < v17.0.0\n const err = new Error('The operation was aborted')\n err.name = 'AbortError'\n throw err\n }\n }\n}\n\nfunction addAbortListener (signal, listener) {\n if ('addEventListener' in signal) {\n signal.addEventListener('abort', listener, { once: true })\n return () => signal.removeEventListener('abort', listener)\n }\n signal.addListener('abort', listener)\n return () => signal.removeListener('abort', listener)\n}\n\nconst hasToWellFormed = !!String.prototype.toWellFormed\n\n/**\n * @param {string} val\n */\nfunction toUSVString (val) {\n if (hasToWellFormed) {\n return `${val}`.toWellFormed()\n } else if (nodeUtil.toUSVString) {\n return nodeUtil.toUSVString(val)\n }\n\n return `${val}`\n}\n\n// Parsed accordingly to RFC 9110\n// https://www.rfc-editor.org/rfc/rfc9110#field.content-range\nfunction parseRangeHeader (range) {\n if (range == null || range === '') return { start: 0, end: null, size: null }\n\n const m = range ? range.match(/^bytes (\\d+)-(\\d+)\\/(\\d+)?$/) : null\n return m\n ? {\n start: parseInt(m[1]),\n end: m[2] ? parseInt(m[2]) : null,\n size: m[3] ? parseInt(m[3]) : null\n }\n : null\n}\n\nconst kEnumerableProperty = Object.create(null)\nkEnumerableProperty.enumerable = true\n\nmodule.exports = {\n kEnumerableProperty,\n nop,\n isDisturbed,\n isErrored,\n isReadable,\n toUSVString,\n isReadableAborted,\n isBlobLike,\n parseOrigin,\n parseURL,\n getServerName,\n isStream,\n isIterable,\n isAsyncIterable,\n isDestroyed,\n parseRawHeaders,\n parseHeaders,\n parseKeepAliveTimeout,\n destroy,\n bodyLength,\n deepClone,\n ReadableStreamFrom,\n isBuffer,\n validateHandler,\n getSocketInfo,\n isFormDataLike,\n buildURL,\n throwIfAborted,\n addAbortListener,\n parseRangeHeader,\n nodeMajor,\n nodeMinor,\n nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13),\n safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE']\n}\n","'use strict'\n\nconst Dispatcher = require('./dispatcher')\nconst {\n ClientDestroyedError,\n ClientClosedError,\n InvalidArgumentError\n} = require('./core/errors')\nconst { kDestroy, kClose, kDispatch, kInterceptors } = require('./core/symbols')\n\nconst kDestroyed = Symbol('destroyed')\nconst kClosed = Symbol('closed')\nconst kOnDestroyed = Symbol('onDestroyed')\nconst kOnClosed = Symbol('onClosed')\nconst kInterceptedDispatch = Symbol('Intercepted Dispatch')\n\nclass DispatcherBase extends Dispatcher {\n constructor () {\n super()\n\n this[kDestroyed] = false\n this[kOnDestroyed] = null\n this[kClosed] = false\n this[kOnClosed] = []\n }\n\n get destroyed () {\n return this[kDestroyed]\n }\n\n get closed () {\n return this[kClosed]\n }\n\n get interceptors () {\n return this[kInterceptors]\n }\n\n set interceptors (newInterceptors) {\n if (newInterceptors) {\n for (let i = newInterceptors.length - 1; i >= 0; i--) {\n const interceptor = this[kInterceptors][i]\n if (typeof interceptor !== 'function') {\n throw new InvalidArgumentError('interceptor must be an function')\n }\n }\n }\n\n this[kInterceptors] = newInterceptors\n }\n\n close (callback) {\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n this.close((err, data) => {\n return err ? reject(err) : resolve(data)\n })\n })\n }\n\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n if (this[kDestroyed]) {\n queueMicrotask(() => callback(new ClientDestroyedError(), null))\n return\n }\n\n if (this[kClosed]) {\n if (this[kOnClosed]) {\n this[kOnClosed].push(callback)\n } else {\n queueMicrotask(() => callback(null, null))\n }\n return\n }\n\n this[kClosed] = true\n this[kOnClosed].push(callback)\n\n const onClosed = () => {\n const callbacks = this[kOnClosed]\n this[kOnClosed] = null\n for (let i = 0; i < callbacks.length; i++) {\n callbacks[i](null, null)\n }\n }\n\n // Should not error.\n this[kClose]()\n .then(() => this.destroy())\n .then(() => {\n queueMicrotask(onClosed)\n })\n }\n\n destroy (err, callback) {\n if (typeof err === 'function') {\n callback = err\n err = null\n }\n\n if (callback === undefined) {\n return new Promise((resolve, reject) => {\n this.destroy(err, (err, data) => {\n return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data)\n })\n })\n }\n\n if (typeof callback !== 'function') {\n throw new InvalidArgumentError('invalid callback')\n }\n\n if (this[kDestroyed]) {\n if (this[kOnDestroyed]) {\n this[kOnDestroyed].push(callback)\n } else {\n queueMicrotask(() => callback(null, null))\n }\n return\n }\n\n if (!err) {\n err = new ClientDestroyedError()\n }\n\n this[kDestroyed] = true\n this[kOnDestroyed] = this[kOnDestroyed] || []\n this[kOnDestroyed].push(callback)\n\n const onDestroyed = () => {\n const callbacks = this[kOnDestroyed]\n this[kOnDestroyed] = null\n for (let i = 0; i < callbacks.length; i++) {\n callbacks[i](null, null)\n }\n }\n\n // Should not error.\n this[kDestroy](err).then(() => {\n queueMicrotask(onDestroyed)\n })\n }\n\n [kInterceptedDispatch] (opts, handler) {\n if (!this[kInterceptors] || this[kInterceptors].length === 0) {\n this[kInterceptedDispatch] = this[kDispatch]\n return this[kDispatch](opts, handler)\n }\n\n let dispatch = this[kDispatch].bind(this)\n for (let i = this[kInterceptors].length - 1; i >= 0; i--) {\n dispatch = this[kInterceptors][i](dispatch)\n }\n this[kInterceptedDispatch] = dispatch\n return dispatch(opts, handler)\n }\n\n dispatch (opts, handler) {\n if (!handler || typeof handler !== 'object') {\n throw new InvalidArgumentError('handler must be an object')\n }\n\n try {\n if (!opts || typeof opts !== 'object') {\n throw new InvalidArgumentError('opts must be an object.')\n }\n\n if (this[kDestroyed] || this[kOnDestroyed]) {\n throw new ClientDestroyedError()\n }\n\n if (this[kClosed]) {\n throw new ClientClosedError()\n }\n\n return this[kInterceptedDispatch](opts, handler)\n } catch (err) {\n if (typeof handler.onError !== 'function') {\n throw new InvalidArgumentError('invalid onError method')\n }\n\n handler.onError(err)\n\n return false\n }\n }\n}\n\nmodule.exports = DispatcherBase\n","'use strict'\n\nconst EventEmitter = require('events')\n\nclass Dispatcher extends EventEmitter {\n dispatch () {\n throw new Error('not implemented')\n }\n\n close () {\n throw new Error('not implemented')\n }\n\n destroy () {\n throw new Error('not implemented')\n }\n}\n\nmodule.exports = Dispatcher\n","'use strict'\n\nconst Busboy = require('@fastify/busboy')\nconst util = require('../core/util')\nconst {\n ReadableStreamFrom,\n isBlobLike,\n isReadableStreamLike,\n readableStreamClose,\n createDeferredPromise,\n fullyReadBody\n} = require('./util')\nconst { FormData } = require('./formdata')\nconst { kState } = require('./symbols')\nconst { webidl } = require('./webidl')\nconst { DOMException, structuredClone } = require('./constants')\nconst { Blob, File: NativeFile } = require('buffer')\nconst { kBodyUsed } = require('../core/symbols')\nconst assert = require('assert')\nconst { isErrored } = require('../core/util')\nconst { isUint8Array, isArrayBuffer } = require('util/types')\nconst { File: UndiciFile } = require('./file')\nconst { parseMIMEType, serializeAMimeType } = require('./dataURL')\n\nlet ReadableStream = globalThis.ReadableStream\n\n/** @type {globalThis['File']} */\nconst File = NativeFile ?? UndiciFile\nconst textEncoder = new TextEncoder()\nconst textDecoder = new TextDecoder()\n\n// https://fetch.spec.whatwg.org/#concept-bodyinit-extract\nfunction extractBody (object, keepalive = false) {\n if (!ReadableStream) {\n ReadableStream = require('stream/web').ReadableStream\n }\n\n // 1. Let stream be null.\n let stream = null\n\n // 2. If object is a ReadableStream object, then set stream to object.\n if (object instanceof ReadableStream) {\n stream = object\n } else if (isBlobLike(object)) {\n // 3. Otherwise, if object is a Blob object, set stream to the\n // result of running object’s get stream.\n stream = object.stream()\n } else {\n // 4. Otherwise, set stream to a new ReadableStream object, and set\n // up stream.\n stream = new ReadableStream({\n async pull (controller) {\n controller.enqueue(\n typeof source === 'string' ? textEncoder.encode(source) : source\n )\n queueMicrotask(() => readableStreamClose(controller))\n },\n start () {},\n type: undefined\n })\n }\n\n // 5. Assert: stream is a ReadableStream object.\n assert(isReadableStreamLike(stream))\n\n // 6. Let action be null.\n let action = null\n\n // 7. Let source be null.\n let source = null\n\n // 8. Let length be null.\n let length = null\n\n // 9. Let type be null.\n let type = null\n\n // 10. Switch on object:\n if (typeof object === 'string') {\n // Set source to the UTF-8 encoding of object.\n // Note: setting source to a Uint8Array here breaks some mocking assumptions.\n source = object\n\n // Set type to `text/plain;charset=UTF-8`.\n type = 'text/plain;charset=UTF-8'\n } else if (object instanceof URLSearchParams) {\n // URLSearchParams\n\n // spec says to run application/x-www-form-urlencoded on body.list\n // this is implemented in Node.js as apart of an URLSearchParams instance toString method\n // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490\n // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100\n\n // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list.\n source = object.toString()\n\n // Set type to `application/x-www-form-urlencoded;charset=UTF-8`.\n type = 'application/x-www-form-urlencoded;charset=UTF-8'\n } else if (isArrayBuffer(object)) {\n // BufferSource/ArrayBuffer\n\n // Set source to a copy of the bytes held by object.\n source = new Uint8Array(object.slice())\n } else if (ArrayBuffer.isView(object)) {\n // BufferSource/ArrayBufferView\n\n // Set source to a copy of the bytes held by object.\n source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))\n } else if (util.isFormDataLike(object)) {\n const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}`\n const prefix = `--${boundary}\\r\\nContent-Disposition: form-data`\n\n /*! formdata-polyfill. MIT License. Jimmy Wärting */\n const escape = (str) =>\n str.replace(/\\n/g, '%0A').replace(/\\r/g, '%0D').replace(/\"/g, '%22')\n const normalizeLinefeeds = (value) => value.replace(/\\r?\\n|\\r/g, '\\r\\n')\n\n // Set action to this step: run the multipart/form-data\n // encoding algorithm, with object’s entry list and UTF-8.\n // - This ensures that the body is immutable and can't be changed afterwords\n // - That the content-length is calculated in advance.\n // - And that all parts are pre-encoded and ready to be sent.\n\n const blobParts = []\n const rn = new Uint8Array([13, 10]) // '\\r\\n'\n length = 0\n let hasUnknownSizeValue = false\n\n for (const [name, value] of object) {\n if (typeof value === 'string') {\n const chunk = textEncoder.encode(prefix +\n `; name=\"${escape(normalizeLinefeeds(name))}\"` +\n `\\r\\n\\r\\n${normalizeLinefeeds(value)}\\r\\n`)\n blobParts.push(chunk)\n length += chunk.byteLength\n } else {\n const chunk = textEncoder.encode(`${prefix}; name=\"${escape(normalizeLinefeeds(name))}\"` +\n (value.name ? `; filename=\"${escape(value.name)}\"` : '') + '\\r\\n' +\n `Content-Type: ${\n value.type || 'application/octet-stream'\n }\\r\\n\\r\\n`)\n blobParts.push(chunk, value, rn)\n if (typeof value.size === 'number') {\n length += chunk.byteLength + value.size + rn.byteLength\n } else {\n hasUnknownSizeValue = true\n }\n }\n }\n\n const chunk = textEncoder.encode(`--${boundary}--`)\n blobParts.push(chunk)\n length += chunk.byteLength\n if (hasUnknownSizeValue) {\n length = null\n }\n\n // Set source to object.\n source = object\n\n action = async function * () {\n for (const part of blobParts) {\n if (part.stream) {\n yield * part.stream()\n } else {\n yield part\n }\n }\n }\n\n // Set type to `multipart/form-data; boundary=`,\n // followed by the multipart/form-data boundary string generated\n // by the multipart/form-data encoding algorithm.\n type = 'multipart/form-data; boundary=' + boundary\n } else if (isBlobLike(object)) {\n // Blob\n\n // Set source to object.\n source = object\n\n // Set length to object’s size.\n length = object.size\n\n // If object’s type attribute is not the empty byte sequence, set\n // type to its value.\n if (object.type) {\n type = object.type\n }\n } else if (typeof object[Symbol.asyncIterator] === 'function') {\n // If keepalive is true, then throw a TypeError.\n if (keepalive) {\n throw new TypeError('keepalive')\n }\n\n // If object is disturbed or locked, then throw a TypeError.\n if (util.isDisturbed(object) || object.locked) {\n throw new TypeError(\n 'Response body object should not be disturbed or locked'\n )\n }\n\n stream =\n object instanceof ReadableStream ? object : ReadableStreamFrom(object)\n }\n\n // 11. If source is a byte sequence, then set action to a\n // step that returns source and length to source’s length.\n if (typeof source === 'string' || util.isBuffer(source)) {\n length = Buffer.byteLength(source)\n }\n\n // 12. If action is non-null, then run these steps in in parallel:\n if (action != null) {\n // Run action.\n let iterator\n stream = new ReadableStream({\n async start () {\n iterator = action(object)[Symbol.asyncIterator]()\n },\n async pull (controller) {\n const { value, done } = await iterator.next()\n if (done) {\n // When running action is done, close stream.\n queueMicrotask(() => {\n controller.close()\n })\n } else {\n // Whenever one or more bytes are available and stream is not errored,\n // enqueue a Uint8Array wrapping an ArrayBuffer containing the available\n // bytes into stream.\n if (!isErrored(stream)) {\n controller.enqueue(new Uint8Array(value))\n }\n }\n return controller.desiredSize > 0\n },\n async cancel (reason) {\n await iterator.return()\n },\n type: undefined\n })\n }\n\n // 13. Let body be a body whose stream is stream, source is source,\n // and length is length.\n const body = { stream, source, length }\n\n // 14. Return (body, type).\n return [body, type]\n}\n\n// https://fetch.spec.whatwg.org/#bodyinit-safely-extract\nfunction safelyExtractBody (object, keepalive = false) {\n if (!ReadableStream) {\n // istanbul ignore next\n ReadableStream = require('stream/web').ReadableStream\n }\n\n // To safely extract a body and a `Content-Type` value from\n // a byte sequence or BodyInit object object, run these steps:\n\n // 1. If object is a ReadableStream object, then:\n if (object instanceof ReadableStream) {\n // Assert: object is neither disturbed nor locked.\n // istanbul ignore next\n assert(!util.isDisturbed(object), 'The body has already been consumed.')\n // istanbul ignore next\n assert(!object.locked, 'The stream is locked.')\n }\n\n // 2. Return the results of extracting object.\n return extractBody(object, keepalive)\n}\n\nfunction cloneBody (body) {\n // To clone a body body, run these steps:\n\n // https://fetch.spec.whatwg.org/#concept-body-clone\n\n // 1. Let « out1, out2 » be the result of teeing body’s stream.\n const [out1, out2] = body.stream.tee()\n const out2Clone = structuredClone(out2, { transfer: [out2] })\n // This, for whatever reasons, unrefs out2Clone which allows\n // the process to exit by itself.\n const [, finalClone] = out2Clone.tee()\n\n // 2. Set body’s stream to out1.\n body.stream = out1\n\n // 3. Return a body whose stream is out2 and other members are copied from body.\n return {\n stream: finalClone,\n length: body.length,\n source: body.source\n }\n}\n\nasync function * consumeBody (body) {\n if (body) {\n if (isUint8Array(body)) {\n yield body\n } else {\n const stream = body.stream\n\n if (util.isDisturbed(stream)) {\n throw new TypeError('The body has already been consumed.')\n }\n\n if (stream.locked) {\n throw new TypeError('The stream is locked.')\n }\n\n // Compat.\n stream[kBodyUsed] = true\n\n yield * stream\n }\n }\n}\n\nfunction throwIfAborted (state) {\n if (state.aborted) {\n throw new DOMException('The operation was aborted.', 'AbortError')\n }\n}\n\nfunction bodyMixinMethods (instance) {\n const methods = {\n blob () {\n // The blob() method steps are to return the result of\n // running consume body with this and the following step\n // given a byte sequence bytes: return a Blob whose\n // contents are bytes and whose type attribute is this’s\n // MIME type.\n return specConsumeBody(this, (bytes) => {\n let mimeType = bodyMimeType(this)\n\n if (mimeType === 'failure') {\n mimeType = ''\n } else if (mimeType) {\n mimeType = serializeAMimeType(mimeType)\n }\n\n // Return a Blob whose contents are bytes and type attribute\n // is mimeType.\n return new Blob([bytes], { type: mimeType })\n }, instance)\n },\n\n arrayBuffer () {\n // The arrayBuffer() method steps are to return the result\n // of running consume body with this and the following step\n // given a byte sequence bytes: return a new ArrayBuffer\n // whose contents are bytes.\n return specConsumeBody(this, (bytes) => {\n return new Uint8Array(bytes).buffer\n }, instance)\n },\n\n text () {\n // The text() method steps are to return the result of running\n // consume body with this and UTF-8 decode.\n return specConsumeBody(this, utf8DecodeBytes, instance)\n },\n\n json () {\n // The json() method steps are to return the result of running\n // consume body with this and parse JSON from bytes.\n return specConsumeBody(this, parseJSONFromBytes, instance)\n },\n\n async formData () {\n webidl.brandCheck(this, instance)\n\n throwIfAborted(this[kState])\n\n const contentType = this.headers.get('Content-Type')\n\n // If mimeType’s essence is \"multipart/form-data\", then:\n if (/multipart\\/form-data/.test(contentType)) {\n const headers = {}\n for (const [key, value] of this.headers) headers[key.toLowerCase()] = value\n\n const responseFormData = new FormData()\n\n let busboy\n\n try {\n busboy = new Busboy({\n headers,\n preservePath: true\n })\n } catch (err) {\n throw new DOMException(`${err}`, 'AbortError')\n }\n\n busboy.on('field', (name, value) => {\n responseFormData.append(name, value)\n })\n busboy.on('file', (name, value, filename, encoding, mimeType) => {\n const chunks = []\n\n if (encoding === 'base64' || encoding.toLowerCase() === 'base64') {\n let base64chunk = ''\n\n value.on('data', (chunk) => {\n base64chunk += chunk.toString().replace(/[\\r\\n]/gm, '')\n\n const end = base64chunk.length - base64chunk.length % 4\n chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64'))\n\n base64chunk = base64chunk.slice(end)\n })\n value.on('end', () => {\n chunks.push(Buffer.from(base64chunk, 'base64'))\n responseFormData.append(name, new File(chunks, filename, { type: mimeType }))\n })\n } else {\n value.on('data', (chunk) => {\n chunks.push(chunk)\n })\n value.on('end', () => {\n responseFormData.append(name, new File(chunks, filename, { type: mimeType }))\n })\n }\n })\n\n const busboyResolve = new Promise((resolve, reject) => {\n busboy.on('finish', resolve)\n busboy.on('error', (err) => reject(new TypeError(err)))\n })\n\n if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk)\n busboy.end()\n await busboyResolve\n\n return responseFormData\n } else if (/application\\/x-www-form-urlencoded/.test(contentType)) {\n // Otherwise, if mimeType’s essence is \"application/x-www-form-urlencoded\", then:\n\n // 1. Let entries be the result of parsing bytes.\n let entries\n try {\n let text = ''\n // application/x-www-form-urlencoded parser will keep the BOM.\n // https://url.spec.whatwg.org/#concept-urlencoded-parser\n // Note that streaming decoder is stateful and cannot be reused\n const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true })\n\n for await (const chunk of consumeBody(this[kState].body)) {\n if (!isUint8Array(chunk)) {\n throw new TypeError('Expected Uint8Array chunk')\n }\n text += streamingDecoder.decode(chunk, { stream: true })\n }\n text += streamingDecoder.decode()\n entries = new URLSearchParams(text)\n } catch (err) {\n // istanbul ignore next: Unclear when new URLSearchParams can fail on a string.\n // 2. If entries is failure, then throw a TypeError.\n throw Object.assign(new TypeError(), { cause: err })\n }\n\n // 3. Return a new FormData object whose entries are entries.\n const formData = new FormData()\n for (const [name, value] of entries) {\n formData.append(name, value)\n }\n return formData\n } else {\n // Wait a tick before checking if the request has been aborted.\n // Otherwise, a TypeError can be thrown when an AbortError should.\n await Promise.resolve()\n\n throwIfAborted(this[kState])\n\n // Otherwise, throw a TypeError.\n throw webidl.errors.exception({\n header: `${instance.name}.formData`,\n message: 'Could not parse content as FormData.'\n })\n }\n }\n }\n\n return methods\n}\n\nfunction mixinBody (prototype) {\n Object.assign(prototype.prototype, bodyMixinMethods(prototype))\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#concept-body-consume-body\n * @param {Response|Request} object\n * @param {(value: unknown) => unknown} convertBytesToJSValue\n * @param {Response|Request} instance\n */\nasync function specConsumeBody (object, convertBytesToJSValue, instance) {\n webidl.brandCheck(object, instance)\n\n throwIfAborted(object[kState])\n\n // 1. If object is unusable, then return a promise rejected\n // with a TypeError.\n if (bodyUnusable(object[kState].body)) {\n throw new TypeError('Body is unusable')\n }\n\n // 2. Let promise be a new promise.\n const promise = createDeferredPromise()\n\n // 3. Let errorSteps given error be to reject promise with error.\n const errorSteps = (error) => promise.reject(error)\n\n // 4. Let successSteps given a byte sequence data be to resolve\n // promise with the result of running convertBytesToJSValue\n // with data. If that threw an exception, then run errorSteps\n // with that exception.\n const successSteps = (data) => {\n try {\n promise.resolve(convertBytesToJSValue(data))\n } catch (e) {\n errorSteps(e)\n }\n }\n\n // 5. If object’s body is null, then run successSteps with an\n // empty byte sequence.\n if (object[kState].body == null) {\n successSteps(new Uint8Array())\n return promise.promise\n }\n\n // 6. Otherwise, fully read object’s body given successSteps,\n // errorSteps, and object’s relevant global object.\n await fullyReadBody(object[kState].body, successSteps, errorSteps)\n\n // 7. Return promise.\n return promise.promise\n}\n\n// https://fetch.spec.whatwg.org/#body-unusable\nfunction bodyUnusable (body) {\n // An object including the Body interface mixin is\n // said to be unusable if its body is non-null and\n // its body’s stream is disturbed or locked.\n return body != null && (body.stream.locked || util.isDisturbed(body.stream))\n}\n\n/**\n * @see https://encoding.spec.whatwg.org/#utf-8-decode\n * @param {Buffer} buffer\n */\nfunction utf8DecodeBytes (buffer) {\n if (buffer.length === 0) {\n return ''\n }\n\n // 1. Let buffer be the result of peeking three bytes from\n // ioQueue, converted to a byte sequence.\n\n // 2. If buffer is 0xEF 0xBB 0xBF, then read three\n // bytes from ioQueue. (Do nothing with those bytes.)\n if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) {\n buffer = buffer.subarray(3)\n }\n\n // 3. Process a queue with an instance of UTF-8’s\n // decoder, ioQueue, output, and \"replacement\".\n const output = textDecoder.decode(buffer)\n\n // 4. Return output.\n return output\n}\n\n/**\n * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value\n * @param {Uint8Array} bytes\n */\nfunction parseJSONFromBytes (bytes) {\n return JSON.parse(utf8DecodeBytes(bytes))\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#concept-body-mime-type\n * @param {import('./response').Response|import('./request').Request} object\n */\nfunction bodyMimeType (object) {\n const { headersList } = object[kState]\n const contentType = headersList.get('content-type')\n\n if (contentType === null) {\n return 'failure'\n }\n\n return parseMIMEType(contentType)\n}\n\nmodule.exports = {\n extractBody,\n safelyExtractBody,\n cloneBody,\n mixinBody\n}\n","'use strict'\n\nconst { MessageChannel, receiveMessageOnPort } = require('worker_threads')\n\nconst corsSafeListedMethods = ['GET', 'HEAD', 'POST']\nconst corsSafeListedMethodsSet = new Set(corsSafeListedMethods)\n\nconst nullBodyStatus = [101, 204, 205, 304]\n\nconst redirectStatus = [301, 302, 303, 307, 308]\nconst redirectStatusSet = new Set(redirectStatus)\n\n// https://fetch.spec.whatwg.org/#block-bad-port\nconst badPorts = [\n '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79',\n '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137',\n '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532',\n '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723',\n '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697',\n '10080'\n]\n\nconst badPortsSet = new Set(badPorts)\n\n// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies\nconst referrerPolicy = [\n '',\n 'no-referrer',\n 'no-referrer-when-downgrade',\n 'same-origin',\n 'origin',\n 'strict-origin',\n 'origin-when-cross-origin',\n 'strict-origin-when-cross-origin',\n 'unsafe-url'\n]\nconst referrerPolicySet = new Set(referrerPolicy)\n\nconst requestRedirect = ['follow', 'manual', 'error']\n\nconst safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE']\nconst safeMethodsSet = new Set(safeMethods)\n\nconst requestMode = ['navigate', 'same-origin', 'no-cors', 'cors']\n\nconst requestCredentials = ['omit', 'same-origin', 'include']\n\nconst requestCache = [\n 'default',\n 'no-store',\n 'reload',\n 'no-cache',\n 'force-cache',\n 'only-if-cached'\n]\n\n// https://fetch.spec.whatwg.org/#request-body-header-name\nconst requestBodyHeader = [\n 'content-encoding',\n 'content-language',\n 'content-location',\n 'content-type',\n // See https://github.com/nodejs/undici/issues/2021\n // 'Content-Length' is a forbidden header name, which is typically\n // removed in the Headers implementation. However, undici doesn't\n // filter out headers, so we add it here.\n 'content-length'\n]\n\n// https://fetch.spec.whatwg.org/#enumdef-requestduplex\nconst requestDuplex = [\n 'half'\n]\n\n// http://fetch.spec.whatwg.org/#forbidden-method\nconst forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK']\nconst forbiddenMethodsSet = new Set(forbiddenMethods)\n\nconst subresource = [\n 'audio',\n 'audioworklet',\n 'font',\n 'image',\n 'manifest',\n 'paintworklet',\n 'script',\n 'style',\n 'track',\n 'video',\n 'xslt',\n ''\n]\nconst subresourceSet = new Set(subresource)\n\n/** @type {globalThis['DOMException']} */\nconst DOMException = globalThis.DOMException ?? (() => {\n // DOMException was only made a global in Node v17.0.0,\n // but fetch supports >= v16.8.\n try {\n atob('~')\n } catch (err) {\n return Object.getPrototypeOf(err).constructor\n }\n})()\n\nlet channel\n\n/** @type {globalThis['structuredClone']} */\nconst structuredClone =\n globalThis.structuredClone ??\n // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js\n // structuredClone was added in v17.0.0, but fetch supports v16.8\n function structuredClone (value, options = undefined) {\n if (arguments.length === 0) {\n throw new TypeError('missing argument')\n }\n\n if (!channel) {\n channel = new MessageChannel()\n }\n channel.port1.unref()\n channel.port2.unref()\n channel.port1.postMessage(value, options?.transfer)\n return receiveMessageOnPort(channel.port2).message\n }\n\nmodule.exports = {\n DOMException,\n structuredClone,\n subresource,\n forbiddenMethods,\n requestBodyHeader,\n referrerPolicy,\n requestRedirect,\n requestMode,\n requestCredentials,\n requestCache,\n redirectStatus,\n corsSafeListedMethods,\n nullBodyStatus,\n safeMethods,\n badPorts,\n requestDuplex,\n subresourceSet,\n badPortsSet,\n redirectStatusSet,\n corsSafeListedMethodsSet,\n safeMethodsSet,\n forbiddenMethodsSet,\n referrerPolicySet\n}\n","const assert = require('assert')\nconst { atob } = require('buffer')\nconst { isomorphicDecode } = require('./util')\n\nconst encoder = new TextEncoder()\n\n/**\n * @see https://mimesniff.spec.whatwg.org/#http-token-code-point\n */\nconst HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/\nconst HTTP_WHITESPACE_REGEX = /(\\u000A|\\u000D|\\u0009|\\u0020)/ // eslint-disable-line\n/**\n * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point\n */\nconst HTTP_QUOTED_STRING_TOKENS = /[\\u0009|\\u0020-\\u007E|\\u0080-\\u00FF]/ // eslint-disable-line\n\n// https://fetch.spec.whatwg.org/#data-url-processor\n/** @param {URL} dataURL */\nfunction dataURLProcessor (dataURL) {\n // 1. Assert: dataURL’s scheme is \"data\".\n assert(dataURL.protocol === 'data:')\n\n // 2. Let input be the result of running the URL\n // serializer on dataURL with exclude fragment\n // set to true.\n let input = URLSerializer(dataURL, true)\n\n // 3. Remove the leading \"data:\" string from input.\n input = input.slice(5)\n\n // 4. Let position point at the start of input.\n const position = { position: 0 }\n\n // 5. Let mimeType be the result of collecting a\n // sequence of code points that are not equal\n // to U+002C (,), given position.\n let mimeType = collectASequenceOfCodePointsFast(\n ',',\n input,\n position\n )\n\n // 6. Strip leading and trailing ASCII whitespace\n // from mimeType.\n // Undici implementation note: we need to store the\n // length because if the mimetype has spaces removed,\n // the wrong amount will be sliced from the input in\n // step #9\n const mimeTypeLength = mimeType.length\n mimeType = removeASCIIWhitespace(mimeType, true, true)\n\n // 7. If position is past the end of input, then\n // return failure\n if (position.position >= input.length) {\n return 'failure'\n }\n\n // 8. Advance position by 1.\n position.position++\n\n // 9. Let encodedBody be the remainder of input.\n const encodedBody = input.slice(mimeTypeLength + 1)\n\n // 10. Let body be the percent-decoding of encodedBody.\n let body = stringPercentDecode(encodedBody)\n\n // 11. If mimeType ends with U+003B (;), followed by\n // zero or more U+0020 SPACE, followed by an ASCII\n // case-insensitive match for \"base64\", then:\n if (/;(\\u0020){0,}base64$/i.test(mimeType)) {\n // 1. Let stringBody be the isomorphic decode of body.\n const stringBody = isomorphicDecode(body)\n\n // 2. Set body to the forgiving-base64 decode of\n // stringBody.\n body = forgivingBase64(stringBody)\n\n // 3. If body is failure, then return failure.\n if (body === 'failure') {\n return 'failure'\n }\n\n // 4. Remove the last 6 code points from mimeType.\n mimeType = mimeType.slice(0, -6)\n\n // 5. Remove trailing U+0020 SPACE code points from mimeType,\n // if any.\n mimeType = mimeType.replace(/(\\u0020)+$/, '')\n\n // 6. Remove the last U+003B (;) code point from mimeType.\n mimeType = mimeType.slice(0, -1)\n }\n\n // 12. If mimeType starts with U+003B (;), then prepend\n // \"text/plain\" to mimeType.\n if (mimeType.startsWith(';')) {\n mimeType = 'text/plain' + mimeType\n }\n\n // 13. Let mimeTypeRecord be the result of parsing\n // mimeType.\n let mimeTypeRecord = parseMIMEType(mimeType)\n\n // 14. If mimeTypeRecord is failure, then set\n // mimeTypeRecord to text/plain;charset=US-ASCII.\n if (mimeTypeRecord === 'failure') {\n mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII')\n }\n\n // 15. Return a new data: URL struct whose MIME\n // type is mimeTypeRecord and body is body.\n // https://fetch.spec.whatwg.org/#data-url-struct\n return { mimeType: mimeTypeRecord, body }\n}\n\n// https://url.spec.whatwg.org/#concept-url-serializer\n/**\n * @param {URL} url\n * @param {boolean} excludeFragment\n */\nfunction URLSerializer (url, excludeFragment = false) {\n if (!excludeFragment) {\n return url.href\n }\n\n const href = url.href\n const hashLength = url.hash.length\n\n return hashLength === 0 ? href : href.substring(0, href.length - hashLength)\n}\n\n// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points\n/**\n * @param {(char: string) => boolean} condition\n * @param {string} input\n * @param {{ position: number }} position\n */\nfunction collectASequenceOfCodePoints (condition, input, position) {\n // 1. Let result be the empty string.\n let result = ''\n\n // 2. While position doesn’t point past the end of input and the\n // code point at position within input meets the condition condition:\n while (position.position < input.length && condition(input[position.position])) {\n // 1. Append that code point to the end of result.\n result += input[position.position]\n\n // 2. Advance position by 1.\n position.position++\n }\n\n // 3. Return result.\n return result\n}\n\n/**\n * A faster collectASequenceOfCodePoints that only works when comparing a single character.\n * @param {string} char\n * @param {string} input\n * @param {{ position: number }} position\n */\nfunction collectASequenceOfCodePointsFast (char, input, position) {\n const idx = input.indexOf(char, position.position)\n const start = position.position\n\n if (idx === -1) {\n position.position = input.length\n return input.slice(start)\n }\n\n position.position = idx\n return input.slice(start, position.position)\n}\n\n// https://url.spec.whatwg.org/#string-percent-decode\n/** @param {string} input */\nfunction stringPercentDecode (input) {\n // 1. Let bytes be the UTF-8 encoding of input.\n const bytes = encoder.encode(input)\n\n // 2. Return the percent-decoding of bytes.\n return percentDecode(bytes)\n}\n\n// https://url.spec.whatwg.org/#percent-decode\n/** @param {Uint8Array} input */\nfunction percentDecode (input) {\n // 1. Let output be an empty byte sequence.\n /** @type {number[]} */\n const output = []\n\n // 2. For each byte byte in input:\n for (let i = 0; i < input.length; i++) {\n const byte = input[i]\n\n // 1. If byte is not 0x25 (%), then append byte to output.\n if (byte !== 0x25) {\n output.push(byte)\n\n // 2. Otherwise, if byte is 0x25 (%) and the next two bytes\n // after byte in input are not in the ranges\n // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F),\n // and 0x61 (a) to 0x66 (f), all inclusive, append byte\n // to output.\n } else if (\n byte === 0x25 &&\n !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2]))\n ) {\n output.push(0x25)\n\n // 3. Otherwise:\n } else {\n // 1. Let bytePoint be the two bytes after byte in input,\n // decoded, and then interpreted as hexadecimal number.\n const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2])\n const bytePoint = Number.parseInt(nextTwoBytes, 16)\n\n // 2. Append a byte whose value is bytePoint to output.\n output.push(bytePoint)\n\n // 3. Skip the next two bytes in input.\n i += 2\n }\n }\n\n // 3. Return output.\n return Uint8Array.from(output)\n}\n\n// https://mimesniff.spec.whatwg.org/#parse-a-mime-type\n/** @param {string} input */\nfunction parseMIMEType (input) {\n // 1. Remove any leading and trailing HTTP whitespace\n // from input.\n input = removeHTTPWhitespace(input, true, true)\n\n // 2. Let position be a position variable for input,\n // initially pointing at the start of input.\n const position = { position: 0 }\n\n // 3. Let type be the result of collecting a sequence\n // of code points that are not U+002F (/) from\n // input, given position.\n const type = collectASequenceOfCodePointsFast(\n '/',\n input,\n position\n )\n\n // 4. If type is the empty string or does not solely\n // contain HTTP token code points, then return failure.\n // https://mimesniff.spec.whatwg.org/#http-token-code-point\n if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) {\n return 'failure'\n }\n\n // 5. If position is past the end of input, then return\n // failure\n if (position.position > input.length) {\n return 'failure'\n }\n\n // 6. Advance position by 1. (This skips past U+002F (/).)\n position.position++\n\n // 7. Let subtype be the result of collecting a sequence of\n // code points that are not U+003B (;) from input, given\n // position.\n let subtype = collectASequenceOfCodePointsFast(\n ';',\n input,\n position\n )\n\n // 8. Remove any trailing HTTP whitespace from subtype.\n subtype = removeHTTPWhitespace(subtype, false, true)\n\n // 9. If subtype is the empty string or does not solely\n // contain HTTP token code points, then return failure.\n if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) {\n return 'failure'\n }\n\n const typeLowercase = type.toLowerCase()\n const subtypeLowercase = subtype.toLowerCase()\n\n // 10. Let mimeType be a new MIME type record whose type\n // is type, in ASCII lowercase, and subtype is subtype,\n // in ASCII lowercase.\n // https://mimesniff.spec.whatwg.org/#mime-type\n const mimeType = {\n type: typeLowercase,\n subtype: subtypeLowercase,\n /** @type {Map} */\n parameters: new Map(),\n // https://mimesniff.spec.whatwg.org/#mime-type-essence\n essence: `${typeLowercase}/${subtypeLowercase}`\n }\n\n // 11. While position is not past the end of input:\n while (position.position < input.length) {\n // 1. Advance position by 1. (This skips past U+003B (;).)\n position.position++\n\n // 2. Collect a sequence of code points that are HTTP\n // whitespace from input given position.\n collectASequenceOfCodePoints(\n // https://fetch.spec.whatwg.org/#http-whitespace\n char => HTTP_WHITESPACE_REGEX.test(char),\n input,\n position\n )\n\n // 3. Let parameterName be the result of collecting a\n // sequence of code points that are not U+003B (;)\n // or U+003D (=) from input, given position.\n let parameterName = collectASequenceOfCodePoints(\n (char) => char !== ';' && char !== '=',\n input,\n position\n )\n\n // 4. Set parameterName to parameterName, in ASCII\n // lowercase.\n parameterName = parameterName.toLowerCase()\n\n // 5. If position is not past the end of input, then:\n if (position.position < input.length) {\n // 1. If the code point at position within input is\n // U+003B (;), then continue.\n if (input[position.position] === ';') {\n continue\n }\n\n // 2. Advance position by 1. (This skips past U+003D (=).)\n position.position++\n }\n\n // 6. If position is past the end of input, then break.\n if (position.position > input.length) {\n break\n }\n\n // 7. Let parameterValue be null.\n let parameterValue = null\n\n // 8. If the code point at position within input is\n // U+0022 (\"), then:\n if (input[position.position] === '\"') {\n // 1. Set parameterValue to the result of collecting\n // an HTTP quoted string from input, given position\n // and the extract-value flag.\n parameterValue = collectAnHTTPQuotedString(input, position, true)\n\n // 2. Collect a sequence of code points that are not\n // U+003B (;) from input, given position.\n collectASequenceOfCodePointsFast(\n ';',\n input,\n position\n )\n\n // 9. Otherwise:\n } else {\n // 1. Set parameterValue to the result of collecting\n // a sequence of code points that are not U+003B (;)\n // from input, given position.\n parameterValue = collectASequenceOfCodePointsFast(\n ';',\n input,\n position\n )\n\n // 2. Remove any trailing HTTP whitespace from parameterValue.\n parameterValue = removeHTTPWhitespace(parameterValue, false, true)\n\n // 3. If parameterValue is the empty string, then continue.\n if (parameterValue.length === 0) {\n continue\n }\n }\n\n // 10. If all of the following are true\n // - parameterName is not the empty string\n // - parameterName solely contains HTTP token code points\n // - parameterValue solely contains HTTP quoted-string token code points\n // - mimeType’s parameters[parameterName] does not exist\n // then set mimeType’s parameters[parameterName] to parameterValue.\n if (\n parameterName.length !== 0 &&\n HTTP_TOKEN_CODEPOINTS.test(parameterName) &&\n (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) &&\n !mimeType.parameters.has(parameterName)\n ) {\n mimeType.parameters.set(parameterName, parameterValue)\n }\n }\n\n // 12. Return mimeType.\n return mimeType\n}\n\n// https://infra.spec.whatwg.org/#forgiving-base64-decode\n/** @param {string} data */\nfunction forgivingBase64 (data) {\n // 1. Remove all ASCII whitespace from data.\n data = data.replace(/[\\u0009\\u000A\\u000C\\u000D\\u0020]/g, '') // eslint-disable-line\n\n // 2. If data’s code point length divides by 4 leaving\n // no remainder, then:\n if (data.length % 4 === 0) {\n // 1. If data ends with one or two U+003D (=) code points,\n // then remove them from data.\n data = data.replace(/=?=$/, '')\n }\n\n // 3. If data’s code point length divides by 4 leaving\n // a remainder of 1, then return failure.\n if (data.length % 4 === 1) {\n return 'failure'\n }\n\n // 4. If data contains a code point that is not one of\n // U+002B (+)\n // U+002F (/)\n // ASCII alphanumeric\n // then return failure.\n if (/[^+/0-9A-Za-z]/.test(data)) {\n return 'failure'\n }\n\n const binary = atob(data)\n const bytes = new Uint8Array(binary.length)\n\n for (let byte = 0; byte < binary.length; byte++) {\n bytes[byte] = binary.charCodeAt(byte)\n }\n\n return bytes\n}\n\n// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string\n// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string\n/**\n * @param {string} input\n * @param {{ position: number }} position\n * @param {boolean?} extractValue\n */\nfunction collectAnHTTPQuotedString (input, position, extractValue) {\n // 1. Let positionStart be position.\n const positionStart = position.position\n\n // 2. Let value be the empty string.\n let value = ''\n\n // 3. Assert: the code point at position within input\n // is U+0022 (\").\n assert(input[position.position] === '\"')\n\n // 4. Advance position by 1.\n position.position++\n\n // 5. While true:\n while (true) {\n // 1. Append the result of collecting a sequence of code points\n // that are not U+0022 (\") or U+005C (\\) from input, given\n // position, to value.\n value += collectASequenceOfCodePoints(\n (char) => char !== '\"' && char !== '\\\\',\n input,\n position\n )\n\n // 2. If position is past the end of input, then break.\n if (position.position >= input.length) {\n break\n }\n\n // 3. Let quoteOrBackslash be the code point at position within\n // input.\n const quoteOrBackslash = input[position.position]\n\n // 4. Advance position by 1.\n position.position++\n\n // 5. If quoteOrBackslash is U+005C (\\), then:\n if (quoteOrBackslash === '\\\\') {\n // 1. If position is past the end of input, then append\n // U+005C (\\) to value and break.\n if (position.position >= input.length) {\n value += '\\\\'\n break\n }\n\n // 2. Append the code point at position within input to value.\n value += input[position.position]\n\n // 3. Advance position by 1.\n position.position++\n\n // 6. Otherwise:\n } else {\n // 1. Assert: quoteOrBackslash is U+0022 (\").\n assert(quoteOrBackslash === '\"')\n\n // 2. Break.\n break\n }\n }\n\n // 6. If the extract-value flag is set, then return value.\n if (extractValue) {\n return value\n }\n\n // 7. Return the code points from positionStart to position,\n // inclusive, within input.\n return input.slice(positionStart, position.position)\n}\n\n/**\n * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type\n */\nfunction serializeAMimeType (mimeType) {\n assert(mimeType !== 'failure')\n const { parameters, essence } = mimeType\n\n // 1. Let serialization be the concatenation of mimeType’s\n // type, U+002F (/), and mimeType’s subtype.\n let serialization = essence\n\n // 2. For each name → value of mimeType’s parameters:\n for (let [name, value] of parameters.entries()) {\n // 1. Append U+003B (;) to serialization.\n serialization += ';'\n\n // 2. Append name to serialization.\n serialization += name\n\n // 3. Append U+003D (=) to serialization.\n serialization += '='\n\n // 4. If value does not solely contain HTTP token code\n // points or value is the empty string, then:\n if (!HTTP_TOKEN_CODEPOINTS.test(value)) {\n // 1. Precede each occurence of U+0022 (\") or\n // U+005C (\\) in value with U+005C (\\).\n value = value.replace(/(\\\\|\")/g, '\\\\$1')\n\n // 2. Prepend U+0022 (\") to value.\n value = '\"' + value\n\n // 3. Append U+0022 (\") to value.\n value += '\"'\n }\n\n // 5. Append value to serialization.\n serialization += value\n }\n\n // 3. Return serialization.\n return serialization\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#http-whitespace\n * @param {string} char\n */\nfunction isHTTPWhiteSpace (char) {\n return char === '\\r' || char === '\\n' || char === '\\t' || char === ' '\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#http-whitespace\n * @param {string} str\n */\nfunction removeHTTPWhitespace (str, leading = true, trailing = true) {\n let lead = 0\n let trail = str.length - 1\n\n if (leading) {\n for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++);\n }\n\n if (trailing) {\n for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--);\n }\n\n return str.slice(lead, trail + 1)\n}\n\n/**\n * @see https://infra.spec.whatwg.org/#ascii-whitespace\n * @param {string} char\n */\nfunction isASCIIWhitespace (char) {\n return char === '\\r' || char === '\\n' || char === '\\t' || char === '\\f' || char === ' '\n}\n\n/**\n * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace\n */\nfunction removeASCIIWhitespace (str, leading = true, trailing = true) {\n let lead = 0\n let trail = str.length - 1\n\n if (leading) {\n for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++);\n }\n\n if (trailing) {\n for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--);\n }\n\n return str.slice(lead, trail + 1)\n}\n\nmodule.exports = {\n dataURLProcessor,\n URLSerializer,\n collectASequenceOfCodePoints,\n collectASequenceOfCodePointsFast,\n stringPercentDecode,\n parseMIMEType,\n collectAnHTTPQuotedString,\n serializeAMimeType\n}\n","'use strict'\n\nconst { Blob, File: NativeFile } = require('buffer')\nconst { types } = require('util')\nconst { kState } = require('./symbols')\nconst { isBlobLike } = require('./util')\nconst { webidl } = require('./webidl')\nconst { parseMIMEType, serializeAMimeType } = require('./dataURL')\nconst { kEnumerableProperty } = require('../core/util')\nconst encoder = new TextEncoder()\n\nclass File extends Blob {\n constructor (fileBits, fileName, options = {}) {\n // The File constructor is invoked with two or three parameters, depending\n // on whether the optional dictionary parameter is used. When the File()\n // constructor is invoked, user agents must run the following steps:\n webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' })\n\n fileBits = webidl.converters['sequence'](fileBits)\n fileName = webidl.converters.USVString(fileName)\n options = webidl.converters.FilePropertyBag(options)\n\n // 1. Let bytes be the result of processing blob parts given fileBits and\n // options.\n // Note: Blob handles this for us\n\n // 2. Let n be the fileName argument to the constructor.\n const n = fileName\n\n // 3. Process FilePropertyBag dictionary argument by running the following\n // substeps:\n\n // 1. If the type member is provided and is not the empty string, let t\n // be set to the type dictionary member. If t contains any characters\n // outside the range U+0020 to U+007E, then set t to the empty string\n // and return from these substeps.\n // 2. Convert every character in t to ASCII lowercase.\n let t = options.type\n let d\n\n // eslint-disable-next-line no-labels\n substep: {\n if (t) {\n t = parseMIMEType(t)\n\n if (t === 'failure') {\n t = ''\n // eslint-disable-next-line no-labels\n break substep\n }\n\n t = serializeAMimeType(t).toLowerCase()\n }\n\n // 3. If the lastModified member is provided, let d be set to the\n // lastModified dictionary member. If it is not provided, set d to the\n // current date and time represented as the number of milliseconds since\n // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).\n d = options.lastModified\n }\n\n // 4. Return a new File object F such that:\n // F refers to the bytes byte sequence.\n // F.size is set to the number of total bytes in bytes.\n // F.name is set to n.\n // F.type is set to t.\n // F.lastModified is set to d.\n\n super(processBlobParts(fileBits, options), { type: t })\n this[kState] = {\n name: n,\n lastModified: d,\n type: t\n }\n }\n\n get name () {\n webidl.brandCheck(this, File)\n\n return this[kState].name\n }\n\n get lastModified () {\n webidl.brandCheck(this, File)\n\n return this[kState].lastModified\n }\n\n get type () {\n webidl.brandCheck(this, File)\n\n return this[kState].type\n }\n}\n\nclass FileLike {\n constructor (blobLike, fileName, options = {}) {\n // TODO: argument idl type check\n\n // The File constructor is invoked with two or three parameters, depending\n // on whether the optional dictionary parameter is used. When the File()\n // constructor is invoked, user agents must run the following steps:\n\n // 1. Let bytes be the result of processing blob parts given fileBits and\n // options.\n\n // 2. Let n be the fileName argument to the constructor.\n const n = fileName\n\n // 3. Process FilePropertyBag dictionary argument by running the following\n // substeps:\n\n // 1. If the type member is provided and is not the empty string, let t\n // be set to the type dictionary member. If t contains any characters\n // outside the range U+0020 to U+007E, then set t to the empty string\n // and return from these substeps.\n // TODO\n const t = options.type\n\n // 2. Convert every character in t to ASCII lowercase.\n // TODO\n\n // 3. If the lastModified member is provided, let d be set to the\n // lastModified dictionary member. If it is not provided, set d to the\n // current date and time represented as the number of milliseconds since\n // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).\n const d = options.lastModified ?? Date.now()\n\n // 4. Return a new File object F such that:\n // F refers to the bytes byte sequence.\n // F.size is set to the number of total bytes in bytes.\n // F.name is set to n.\n // F.type is set to t.\n // F.lastModified is set to d.\n\n this[kState] = {\n blobLike,\n name: n,\n type: t,\n lastModified: d\n }\n }\n\n stream (...args) {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.stream(...args)\n }\n\n arrayBuffer (...args) {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.arrayBuffer(...args)\n }\n\n slice (...args) {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.slice(...args)\n }\n\n text (...args) {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.text(...args)\n }\n\n get size () {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.size\n }\n\n get type () {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].blobLike.type\n }\n\n get name () {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].name\n }\n\n get lastModified () {\n webidl.brandCheck(this, FileLike)\n\n return this[kState].lastModified\n }\n\n get [Symbol.toStringTag] () {\n return 'File'\n }\n}\n\nObject.defineProperties(File.prototype, {\n [Symbol.toStringTag]: {\n value: 'File',\n configurable: true\n },\n name: kEnumerableProperty,\n lastModified: kEnumerableProperty\n})\n\nwebidl.converters.Blob = webidl.interfaceConverter(Blob)\n\nwebidl.converters.BlobPart = function (V, opts) {\n if (webidl.util.Type(V) === 'Object') {\n if (isBlobLike(V)) {\n return webidl.converters.Blob(V, { strict: false })\n }\n\n if (\n ArrayBuffer.isView(V) ||\n types.isAnyArrayBuffer(V)\n ) {\n return webidl.converters.BufferSource(V, opts)\n }\n }\n\n return webidl.converters.USVString(V, opts)\n}\n\nwebidl.converters['sequence'] = webidl.sequenceConverter(\n webidl.converters.BlobPart\n)\n\n// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag\nwebidl.converters.FilePropertyBag = webidl.dictionaryConverter([\n {\n key: 'lastModified',\n converter: webidl.converters['long long'],\n get defaultValue () {\n return Date.now()\n }\n },\n {\n key: 'type',\n converter: webidl.converters.DOMString,\n defaultValue: ''\n },\n {\n key: 'endings',\n converter: (value) => {\n value = webidl.converters.DOMString(value)\n value = value.toLowerCase()\n\n if (value !== 'native') {\n value = 'transparent'\n }\n\n return value\n },\n defaultValue: 'transparent'\n }\n])\n\n/**\n * @see https://www.w3.org/TR/FileAPI/#process-blob-parts\n * @param {(NodeJS.TypedArray|Blob|string)[]} parts\n * @param {{ type: string, endings: string }} options\n */\nfunction processBlobParts (parts, options) {\n // 1. Let bytes be an empty sequence of bytes.\n /** @type {NodeJS.TypedArray[]} */\n const bytes = []\n\n // 2. For each element in parts:\n for (const element of parts) {\n // 1. If element is a USVString, run the following substeps:\n if (typeof element === 'string') {\n // 1. Let s be element.\n let s = element\n\n // 2. If the endings member of options is \"native\", set s\n // to the result of converting line endings to native\n // of element.\n if (options.endings === 'native') {\n s = convertLineEndingsNative(s)\n }\n\n // 3. Append the result of UTF-8 encoding s to bytes.\n bytes.push(encoder.encode(s))\n } else if (\n types.isAnyArrayBuffer(element) ||\n types.isTypedArray(element)\n ) {\n // 2. If element is a BufferSource, get a copy of the\n // bytes held by the buffer source, and append those\n // bytes to bytes.\n if (!element.buffer) { // ArrayBuffer\n bytes.push(new Uint8Array(element))\n } else {\n bytes.push(\n new Uint8Array(element.buffer, element.byteOffset, element.byteLength)\n )\n }\n } else if (isBlobLike(element)) {\n // 3. If element is a Blob, append the bytes it represents\n // to bytes.\n bytes.push(element)\n }\n }\n\n // 3. Return bytes.\n return bytes\n}\n\n/**\n * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native\n * @param {string} s\n */\nfunction convertLineEndingsNative (s) {\n // 1. Let native line ending be be the code point U+000A LF.\n let nativeLineEnding = '\\n'\n\n // 2. If the underlying platform’s conventions are to\n // represent newlines as a carriage return and line feed\n // sequence, set native line ending to the code point\n // U+000D CR followed by the code point U+000A LF.\n if (process.platform === 'win32') {\n nativeLineEnding = '\\r\\n'\n }\n\n return s.replace(/\\r?\\n/g, nativeLineEnding)\n}\n\n// If this function is moved to ./util.js, some tools (such as\n// rollup) will warn about circular dependencies. See:\n// https://github.com/nodejs/undici/issues/1629\nfunction isFileLike (object) {\n return (\n (NativeFile && object instanceof NativeFile) ||\n object instanceof File || (\n object &&\n (typeof object.stream === 'function' ||\n typeof object.arrayBuffer === 'function') &&\n object[Symbol.toStringTag] === 'File'\n )\n )\n}\n\nmodule.exports = { File, FileLike, isFileLike }\n","'use strict'\n\nconst { isBlobLike, toUSVString, makeIterator } = require('./util')\nconst { kState } = require('./symbols')\nconst { File: UndiciFile, FileLike, isFileLike } = require('./file')\nconst { webidl } = require('./webidl')\nconst { Blob, File: NativeFile } = require('buffer')\n\n/** @type {globalThis['File']} */\nconst File = NativeFile ?? UndiciFile\n\n// https://xhr.spec.whatwg.org/#formdata\nclass FormData {\n constructor (form) {\n if (form !== undefined) {\n throw webidl.errors.conversionFailed({\n prefix: 'FormData constructor',\n argument: 'Argument 1',\n types: ['undefined']\n })\n }\n\n this[kState] = []\n }\n\n append (name, value, filename = undefined) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' })\n\n if (arguments.length === 3 && !isBlobLike(value)) {\n throw new TypeError(\n \"Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'\"\n )\n }\n\n // 1. Let value be value if given; otherwise blobValue.\n\n name = webidl.converters.USVString(name)\n value = isBlobLike(value)\n ? webidl.converters.Blob(value, { strict: false })\n : webidl.converters.USVString(value)\n filename = arguments.length === 3\n ? webidl.converters.USVString(filename)\n : undefined\n\n // 2. Let entry be the result of creating an entry with\n // name, value, and filename if given.\n const entry = makeEntry(name, value, filename)\n\n // 3. Append entry to this’s entry list.\n this[kState].push(entry)\n }\n\n delete (name) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' })\n\n name = webidl.converters.USVString(name)\n\n // The delete(name) method steps are to remove all entries whose name\n // is name from this’s entry list.\n this[kState] = this[kState].filter(entry => entry.name !== name)\n }\n\n get (name) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' })\n\n name = webidl.converters.USVString(name)\n\n // 1. If there is no entry whose name is name in this’s entry list,\n // then return null.\n const idx = this[kState].findIndex((entry) => entry.name === name)\n if (idx === -1) {\n return null\n }\n\n // 2. Return the value of the first entry whose name is name from\n // this’s entry list.\n return this[kState][idx].value\n }\n\n getAll (name) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' })\n\n name = webidl.converters.USVString(name)\n\n // 1. If there is no entry whose name is name in this’s entry list,\n // then return the empty list.\n // 2. Return the values of all entries whose name is name, in order,\n // from this’s entry list.\n return this[kState]\n .filter((entry) => entry.name === name)\n .map((entry) => entry.value)\n }\n\n has (name) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' })\n\n name = webidl.converters.USVString(name)\n\n // The has(name) method steps are to return true if there is an entry\n // whose name is name in this’s entry list; otherwise false.\n return this[kState].findIndex((entry) => entry.name === name) !== -1\n }\n\n set (name, value, filename = undefined) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' })\n\n if (arguments.length === 3 && !isBlobLike(value)) {\n throw new TypeError(\n \"Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'\"\n )\n }\n\n // The set(name, value) and set(name, blobValue, filename) method steps\n // are:\n\n // 1. Let value be value if given; otherwise blobValue.\n\n name = webidl.converters.USVString(name)\n value = isBlobLike(value)\n ? webidl.converters.Blob(value, { strict: false })\n : webidl.converters.USVString(value)\n filename = arguments.length === 3\n ? toUSVString(filename)\n : undefined\n\n // 2. Let entry be the result of creating an entry with name, value, and\n // filename if given.\n const entry = makeEntry(name, value, filename)\n\n // 3. If there are entries in this’s entry list whose name is name, then\n // replace the first such entry with entry and remove the others.\n const idx = this[kState].findIndex((entry) => entry.name === name)\n if (idx !== -1) {\n this[kState] = [\n ...this[kState].slice(0, idx),\n entry,\n ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name)\n ]\n } else {\n // 4. Otherwise, append entry to this’s entry list.\n this[kState].push(entry)\n }\n }\n\n entries () {\n webidl.brandCheck(this, FormData)\n\n return makeIterator(\n () => this[kState].map(pair => [pair.name, pair.value]),\n 'FormData',\n 'key+value'\n )\n }\n\n keys () {\n webidl.brandCheck(this, FormData)\n\n return makeIterator(\n () => this[kState].map(pair => [pair.name, pair.value]),\n 'FormData',\n 'key'\n )\n }\n\n values () {\n webidl.brandCheck(this, FormData)\n\n return makeIterator(\n () => this[kState].map(pair => [pair.name, pair.value]),\n 'FormData',\n 'value'\n )\n }\n\n /**\n * @param {(value: string, key: string, self: FormData) => void} callbackFn\n * @param {unknown} thisArg\n */\n forEach (callbackFn, thisArg = globalThis) {\n webidl.brandCheck(this, FormData)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' })\n\n if (typeof callbackFn !== 'function') {\n throw new TypeError(\n \"Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'.\"\n )\n }\n\n for (const [key, value] of this) {\n callbackFn.apply(thisArg, [value, key, this])\n }\n }\n}\n\nFormData.prototype[Symbol.iterator] = FormData.prototype.entries\n\nObject.defineProperties(FormData.prototype, {\n [Symbol.toStringTag]: {\n value: 'FormData',\n configurable: true\n }\n})\n\n/**\n * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry\n * @param {string} name\n * @param {string|Blob} value\n * @param {?string} filename\n * @returns\n */\nfunction makeEntry (name, value, filename) {\n // 1. Set name to the result of converting name into a scalar value string.\n // \"To convert a string into a scalar value string, replace any surrogates\n // with U+FFFD.\"\n // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end\n name = Buffer.from(name).toString('utf8')\n\n // 2. If value is a string, then set value to the result of converting\n // value into a scalar value string.\n if (typeof value === 'string') {\n value = Buffer.from(value).toString('utf8')\n } else {\n // 3. Otherwise:\n\n // 1. If value is not a File object, then set value to a new File object,\n // representing the same bytes, whose name attribute value is \"blob\"\n if (!isFileLike(value)) {\n value = value instanceof Blob\n ? new File([value], 'blob', { type: value.type })\n : new FileLike(value, 'blob', { type: value.type })\n }\n\n // 2. If filename is given, then set value to a new File object,\n // representing the same bytes, whose name attribute is filename.\n if (filename !== undefined) {\n /** @type {FilePropertyBag} */\n const options = {\n type: value.type,\n lastModified: value.lastModified\n }\n\n value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile\n ? new File([value], filename, options)\n : new FileLike(value, filename, options)\n }\n }\n\n // 4. Return an entry whose name is name and whose value is value.\n return { name, value }\n}\n\nmodule.exports = { FormData }\n","'use strict'\n\n// In case of breaking changes, increase the version\n// number to avoid conflicts.\nconst globalOrigin = Symbol.for('undici.globalOrigin.1')\n\nfunction getGlobalOrigin () {\n return globalThis[globalOrigin]\n}\n\nfunction setGlobalOrigin (newOrigin) {\n if (newOrigin === undefined) {\n Object.defineProperty(globalThis, globalOrigin, {\n value: undefined,\n writable: true,\n enumerable: false,\n configurable: false\n })\n\n return\n }\n\n const parsedURL = new URL(newOrigin)\n\n if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') {\n throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`)\n }\n\n Object.defineProperty(globalThis, globalOrigin, {\n value: parsedURL,\n writable: true,\n enumerable: false,\n configurable: false\n })\n}\n\nmodule.exports = {\n getGlobalOrigin,\n setGlobalOrigin\n}\n","// https://github.com/Ethan-Arrowood/undici-fetch\n\n'use strict'\n\nconst { kHeadersList, kConstruct } = require('../core/symbols')\nconst { kGuard } = require('./symbols')\nconst { kEnumerableProperty } = require('../core/util')\nconst {\n makeIterator,\n isValidHeaderName,\n isValidHeaderValue\n} = require('./util')\nconst { webidl } = require('./webidl')\nconst assert = require('assert')\n\nconst kHeadersMap = Symbol('headers map')\nconst kHeadersSortedMap = Symbol('headers map sorted')\n\n/**\n * @param {number} code\n */\nfunction isHTTPWhiteSpaceCharCode (code) {\n return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize\n * @param {string} potentialValue\n */\nfunction headerValueNormalize (potentialValue) {\n // To normalize a byte sequence potentialValue, remove\n // any leading and trailing HTTP whitespace bytes from\n // potentialValue.\n let i = 0; let j = potentialValue.length\n\n while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j\n while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i\n\n return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j)\n}\n\nfunction fill (headers, object) {\n // To fill a Headers object headers with a given object object, run these steps:\n\n // 1. If object is a sequence, then for each header in object:\n // Note: webidl conversion to array has already been done.\n if (Array.isArray(object)) {\n for (let i = 0; i < object.length; ++i) {\n const header = object[i]\n // 1. If header does not contain exactly two items, then throw a TypeError.\n if (header.length !== 2) {\n throw webidl.errors.exception({\n header: 'Headers constructor',\n message: `expected name/value pair to be length 2, found ${header.length}.`\n })\n }\n\n // 2. Append (header’s first item, header’s second item) to headers.\n appendHeader(headers, header[0], header[1])\n }\n } else if (typeof object === 'object' && object !== null) {\n // Note: null should throw\n\n // 2. Otherwise, object is a record, then for each key → value in object,\n // append (key, value) to headers\n const keys = Object.keys(object)\n for (let i = 0; i < keys.length; ++i) {\n appendHeader(headers, keys[i], object[keys[i]])\n }\n } else {\n throw webidl.errors.conversionFailed({\n prefix: 'Headers constructor',\n argument: 'Argument 1',\n types: ['sequence>', 'record']\n })\n }\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#concept-headers-append\n */\nfunction appendHeader (headers, name, value) {\n // 1. Normalize value.\n value = headerValueNormalize(value)\n\n // 2. If name is not a header name or value is not a\n // header value, then throw a TypeError.\n if (!isValidHeaderName(name)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.append',\n value: name,\n type: 'header name'\n })\n } else if (!isValidHeaderValue(value)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.append',\n value,\n type: 'header value'\n })\n }\n\n // 3. If headers’s guard is \"immutable\", then throw a TypeError.\n // 4. Otherwise, if headers’s guard is \"request\" and name is a\n // forbidden header name, return.\n // Note: undici does not implement forbidden header names\n if (headers[kGuard] === 'immutable') {\n throw new TypeError('immutable')\n } else if (headers[kGuard] === 'request-no-cors') {\n // 5. Otherwise, if headers’s guard is \"request-no-cors\":\n // TODO\n }\n\n // 6. Otherwise, if headers’s guard is \"response\" and name is a\n // forbidden response-header name, return.\n\n // 7. Append (name, value) to headers’s header list.\n return headers[kHeadersList].append(name, value)\n\n // 8. If headers’s guard is \"request-no-cors\", then remove\n // privileged no-CORS request headers from headers\n}\n\nclass HeadersList {\n /** @type {[string, string][]|null} */\n cookies = null\n\n constructor (init) {\n if (init instanceof HeadersList) {\n this[kHeadersMap] = new Map(init[kHeadersMap])\n this[kHeadersSortedMap] = init[kHeadersSortedMap]\n this.cookies = init.cookies === null ? null : [...init.cookies]\n } else {\n this[kHeadersMap] = new Map(init)\n this[kHeadersSortedMap] = null\n }\n }\n\n // https://fetch.spec.whatwg.org/#header-list-contains\n contains (name) {\n // A header list list contains a header name name if list\n // contains a header whose name is a byte-case-insensitive\n // match for name.\n name = name.toLowerCase()\n\n return this[kHeadersMap].has(name)\n }\n\n clear () {\n this[kHeadersMap].clear()\n this[kHeadersSortedMap] = null\n this.cookies = null\n }\n\n // https://fetch.spec.whatwg.org/#concept-header-list-append\n append (name, value) {\n this[kHeadersSortedMap] = null\n\n // 1. If list contains name, then set name to the first such\n // header’s name.\n const lowercaseName = name.toLowerCase()\n const exists = this[kHeadersMap].get(lowercaseName)\n\n // 2. Append (name, value) to list.\n if (exists) {\n const delimiter = lowercaseName === 'cookie' ? '; ' : ', '\n this[kHeadersMap].set(lowercaseName, {\n name: exists.name,\n value: `${exists.value}${delimiter}${value}`\n })\n } else {\n this[kHeadersMap].set(lowercaseName, { name, value })\n }\n\n if (lowercaseName === 'set-cookie') {\n this.cookies ??= []\n this.cookies.push(value)\n }\n }\n\n // https://fetch.spec.whatwg.org/#concept-header-list-set\n set (name, value) {\n this[kHeadersSortedMap] = null\n const lowercaseName = name.toLowerCase()\n\n if (lowercaseName === 'set-cookie') {\n this.cookies = [value]\n }\n\n // 1. If list contains name, then set the value of\n // the first such header to value and remove the\n // others.\n // 2. Otherwise, append header (name, value) to list.\n this[kHeadersMap].set(lowercaseName, { name, value })\n }\n\n // https://fetch.spec.whatwg.org/#concept-header-list-delete\n delete (name) {\n this[kHeadersSortedMap] = null\n\n name = name.toLowerCase()\n\n if (name === 'set-cookie') {\n this.cookies = null\n }\n\n this[kHeadersMap].delete(name)\n }\n\n // https://fetch.spec.whatwg.org/#concept-header-list-get\n get (name) {\n const value = this[kHeadersMap].get(name.toLowerCase())\n\n // 1. If list does not contain name, then return null.\n // 2. Return the values of all headers in list whose name\n // is a byte-case-insensitive match for name,\n // separated from each other by 0x2C 0x20, in order.\n return value === undefined ? null : value.value\n }\n\n * [Symbol.iterator] () {\n // use the lowercased name\n for (const [name, { value }] of this[kHeadersMap]) {\n yield [name, value]\n }\n }\n\n get entries () {\n const headers = {}\n\n if (this[kHeadersMap].size) {\n for (const { name, value } of this[kHeadersMap].values()) {\n headers[name] = value\n }\n }\n\n return headers\n }\n}\n\n// https://fetch.spec.whatwg.org/#headers-class\nclass Headers {\n constructor (init = undefined) {\n if (init === kConstruct) {\n return\n }\n this[kHeadersList] = new HeadersList()\n\n // The new Headers(init) constructor steps are:\n\n // 1. Set this’s guard to \"none\".\n this[kGuard] = 'none'\n\n // 2. If init is given, then fill this with init.\n if (init !== undefined) {\n init = webidl.converters.HeadersInit(init)\n fill(this, init)\n }\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-append\n append (name, value) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' })\n\n name = webidl.converters.ByteString(name)\n value = webidl.converters.ByteString(value)\n\n return appendHeader(this, name, value)\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-delete\n delete (name) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' })\n\n name = webidl.converters.ByteString(name)\n\n // 1. If name is not a header name, then throw a TypeError.\n if (!isValidHeaderName(name)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.delete',\n value: name,\n type: 'header name'\n })\n }\n\n // 2. If this’s guard is \"immutable\", then throw a TypeError.\n // 3. Otherwise, if this’s guard is \"request\" and name is a\n // forbidden header name, return.\n // 4. Otherwise, if this’s guard is \"request-no-cors\", name\n // is not a no-CORS-safelisted request-header name, and\n // name is not a privileged no-CORS request-header name,\n // return.\n // 5. Otherwise, if this’s guard is \"response\" and name is\n // a forbidden response-header name, return.\n // Note: undici does not implement forbidden header names\n if (this[kGuard] === 'immutable') {\n throw new TypeError('immutable')\n } else if (this[kGuard] === 'request-no-cors') {\n // TODO\n }\n\n // 6. If this’s header list does not contain name, then\n // return.\n if (!this[kHeadersList].contains(name)) {\n return\n }\n\n // 7. Delete name from this’s header list.\n // 8. If this’s guard is \"request-no-cors\", then remove\n // privileged no-CORS request headers from this.\n this[kHeadersList].delete(name)\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-get\n get (name) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' })\n\n name = webidl.converters.ByteString(name)\n\n // 1. If name is not a header name, then throw a TypeError.\n if (!isValidHeaderName(name)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.get',\n value: name,\n type: 'header name'\n })\n }\n\n // 2. Return the result of getting name from this’s header\n // list.\n return this[kHeadersList].get(name)\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-has\n has (name) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' })\n\n name = webidl.converters.ByteString(name)\n\n // 1. If name is not a header name, then throw a TypeError.\n if (!isValidHeaderName(name)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.has',\n value: name,\n type: 'header name'\n })\n }\n\n // 2. Return true if this’s header list contains name;\n // otherwise false.\n return this[kHeadersList].contains(name)\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-set\n set (name, value) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' })\n\n name = webidl.converters.ByteString(name)\n value = webidl.converters.ByteString(value)\n\n // 1. Normalize value.\n value = headerValueNormalize(value)\n\n // 2. If name is not a header name or value is not a\n // header value, then throw a TypeError.\n if (!isValidHeaderName(name)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.set',\n value: name,\n type: 'header name'\n })\n } else if (!isValidHeaderValue(value)) {\n throw webidl.errors.invalidArgument({\n prefix: 'Headers.set',\n value,\n type: 'header value'\n })\n }\n\n // 3. If this’s guard is \"immutable\", then throw a TypeError.\n // 4. Otherwise, if this’s guard is \"request\" and name is a\n // forbidden header name, return.\n // 5. Otherwise, if this’s guard is \"request-no-cors\" and\n // name/value is not a no-CORS-safelisted request-header,\n // return.\n // 6. Otherwise, if this’s guard is \"response\" and name is a\n // forbidden response-header name, return.\n // Note: undici does not implement forbidden header names\n if (this[kGuard] === 'immutable') {\n throw new TypeError('immutable')\n } else if (this[kGuard] === 'request-no-cors') {\n // TODO\n }\n\n // 7. Set (name, value) in this’s header list.\n // 8. If this’s guard is \"request-no-cors\", then remove\n // privileged no-CORS request headers from this\n this[kHeadersList].set(name, value)\n }\n\n // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie\n getSetCookie () {\n webidl.brandCheck(this, Headers)\n\n // 1. If this’s header list does not contain `Set-Cookie`, then return « ».\n // 2. Return the values of all headers in this’s header list whose name is\n // a byte-case-insensitive match for `Set-Cookie`, in order.\n\n const list = this[kHeadersList].cookies\n\n if (list) {\n return [...list]\n }\n\n return []\n }\n\n // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine\n get [kHeadersSortedMap] () {\n if (this[kHeadersList][kHeadersSortedMap]) {\n return this[kHeadersList][kHeadersSortedMap]\n }\n\n // 1. Let headers be an empty list of headers with the key being the name\n // and value the value.\n const headers = []\n\n // 2. Let names be the result of convert header names to a sorted-lowercase\n // set with all the names of the headers in list.\n const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1)\n const cookies = this[kHeadersList].cookies\n\n // 3. For each name of names:\n for (let i = 0; i < names.length; ++i) {\n const [name, value] = names[i]\n // 1. If name is `set-cookie`, then:\n if (name === 'set-cookie') {\n // 1. Let values be a list of all values of headers in list whose name\n // is a byte-case-insensitive match for name, in order.\n\n // 2. For each value of values:\n // 1. Append (name, value) to headers.\n for (let j = 0; j < cookies.length; ++j) {\n headers.push([name, cookies[j]])\n }\n } else {\n // 2. Otherwise:\n\n // 1. Let value be the result of getting name from list.\n\n // 2. Assert: value is non-null.\n assert(value !== null)\n\n // 3. Append (name, value) to headers.\n headers.push([name, value])\n }\n }\n\n this[kHeadersList][kHeadersSortedMap] = headers\n\n // 4. Return headers.\n return headers\n }\n\n keys () {\n webidl.brandCheck(this, Headers)\n\n if (this[kGuard] === 'immutable') {\n const value = this[kHeadersSortedMap]\n return makeIterator(() => value, 'Headers',\n 'key')\n }\n\n return makeIterator(\n () => [...this[kHeadersSortedMap].values()],\n 'Headers',\n 'key'\n )\n }\n\n values () {\n webidl.brandCheck(this, Headers)\n\n if (this[kGuard] === 'immutable') {\n const value = this[kHeadersSortedMap]\n return makeIterator(() => value, 'Headers',\n 'value')\n }\n\n return makeIterator(\n () => [...this[kHeadersSortedMap].values()],\n 'Headers',\n 'value'\n )\n }\n\n entries () {\n webidl.brandCheck(this, Headers)\n\n if (this[kGuard] === 'immutable') {\n const value = this[kHeadersSortedMap]\n return makeIterator(() => value, 'Headers',\n 'key+value')\n }\n\n return makeIterator(\n () => [...this[kHeadersSortedMap].values()],\n 'Headers',\n 'key+value'\n )\n }\n\n /**\n * @param {(value: string, key: string, self: Headers) => void} callbackFn\n * @param {unknown} thisArg\n */\n forEach (callbackFn, thisArg = globalThis) {\n webidl.brandCheck(this, Headers)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' })\n\n if (typeof callbackFn !== 'function') {\n throw new TypeError(\n \"Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'.\"\n )\n }\n\n for (const [key, value] of this) {\n callbackFn.apply(thisArg, [value, key, this])\n }\n }\n\n [Symbol.for('nodejs.util.inspect.custom')] () {\n webidl.brandCheck(this, Headers)\n\n return this[kHeadersList]\n }\n}\n\nHeaders.prototype[Symbol.iterator] = Headers.prototype.entries\n\nObject.defineProperties(Headers.prototype, {\n append: kEnumerableProperty,\n delete: kEnumerableProperty,\n get: kEnumerableProperty,\n has: kEnumerableProperty,\n set: kEnumerableProperty,\n getSetCookie: kEnumerableProperty,\n keys: kEnumerableProperty,\n values: kEnumerableProperty,\n entries: kEnumerableProperty,\n forEach: kEnumerableProperty,\n [Symbol.iterator]: { enumerable: false },\n [Symbol.toStringTag]: {\n value: 'Headers',\n configurable: true\n }\n})\n\nwebidl.converters.HeadersInit = function (V) {\n if (webidl.util.Type(V) === 'Object') {\n if (V[Symbol.iterator]) {\n return webidl.converters['sequence>'](V)\n }\n\n return webidl.converters['record'](V)\n }\n\n throw webidl.errors.conversionFailed({\n prefix: 'Headers constructor',\n argument: 'Argument 1',\n types: ['sequence>', 'record']\n })\n}\n\nmodule.exports = {\n fill,\n Headers,\n HeadersList\n}\n","// https://github.com/Ethan-Arrowood/undici-fetch\n\n'use strict'\n\nconst {\n Response,\n makeNetworkError,\n makeAppropriateNetworkError,\n filterResponse,\n makeResponse\n} = require('./response')\nconst { Headers } = require('./headers')\nconst { Request, makeRequest } = require('./request')\nconst zlib = require('zlib')\nconst {\n bytesMatch,\n makePolicyContainer,\n clonePolicyContainer,\n requestBadPort,\n TAOCheck,\n appendRequestOriginHeader,\n responseLocationURL,\n requestCurrentURL,\n setRequestReferrerPolicyOnRedirect,\n tryUpgradeRequestToAPotentiallyTrustworthyURL,\n createOpaqueTimingInfo,\n appendFetchMetadata,\n corsCheck,\n crossOriginResourcePolicyCheck,\n determineRequestsReferrer,\n coarsenedSharedCurrentTime,\n createDeferredPromise,\n isBlobLike,\n sameOrigin,\n isCancelled,\n isAborted,\n isErrorLike,\n fullyReadBody,\n readableStreamClose,\n isomorphicEncode,\n urlIsLocal,\n urlIsHttpHttpsScheme,\n urlHasHttpsScheme\n} = require('./util')\nconst { kState, kHeaders, kGuard, kRealm } = require('./symbols')\nconst assert = require('assert')\nconst { safelyExtractBody } = require('./body')\nconst {\n redirectStatusSet,\n nullBodyStatus,\n safeMethodsSet,\n requestBodyHeader,\n subresourceSet,\n DOMException\n} = require('./constants')\nconst { kHeadersList } = require('../core/symbols')\nconst EE = require('events')\nconst { Readable, pipeline } = require('stream')\nconst { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = require('../core/util')\nconst { dataURLProcessor, serializeAMimeType } = require('./dataURL')\nconst { TransformStream } = require('stream/web')\nconst { getGlobalDispatcher } = require('../global')\nconst { webidl } = require('./webidl')\nconst { STATUS_CODES } = require('http')\nconst GET_OR_HEAD = ['GET', 'HEAD']\n\n/** @type {import('buffer').resolveObjectURL} */\nlet resolveObjectURL\nlet ReadableStream = globalThis.ReadableStream\n\nclass Fetch extends EE {\n constructor (dispatcher) {\n super()\n\n this.dispatcher = dispatcher\n this.connection = null\n this.dump = false\n this.state = 'ongoing'\n // 2 terminated listeners get added per request,\n // but only 1 gets removed. If there are 20 redirects,\n // 21 listeners will be added.\n // See https://github.com/nodejs/undici/issues/1711\n // TODO (fix): Find and fix root cause for leaked listener.\n this.setMaxListeners(21)\n }\n\n terminate (reason) {\n if (this.state !== 'ongoing') {\n return\n }\n\n this.state = 'terminated'\n this.connection?.destroy(reason)\n this.emit('terminated', reason)\n }\n\n // https://fetch.spec.whatwg.org/#fetch-controller-abort\n abort (error) {\n if (this.state !== 'ongoing') {\n return\n }\n\n // 1. Set controller’s state to \"aborted\".\n this.state = 'aborted'\n\n // 2. Let fallbackError be an \"AbortError\" DOMException.\n // 3. Set error to fallbackError if it is not given.\n if (!error) {\n error = new DOMException('The operation was aborted.', 'AbortError')\n }\n\n // 4. Let serializedError be StructuredSerialize(error).\n // If that threw an exception, catch it, and let\n // serializedError be StructuredSerialize(fallbackError).\n\n // 5. Set controller’s serialized abort reason to serializedError.\n this.serializedAbortReason = error\n\n this.connection?.destroy(error)\n this.emit('terminated', error)\n }\n}\n\n// https://fetch.spec.whatwg.org/#fetch-method\nfunction fetch (input, init = {}) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' })\n\n // 1. Let p be a new promise.\n const p = createDeferredPromise()\n\n // 2. Let requestObject be the result of invoking the initial value of\n // Request as constructor with input and init as arguments. If this throws\n // an exception, reject p with it and return p.\n let requestObject\n\n try {\n requestObject = new Request(input, init)\n } catch (e) {\n p.reject(e)\n return p.promise\n }\n\n // 3. Let request be requestObject’s request.\n const request = requestObject[kState]\n\n // 4. If requestObject’s signal’s aborted flag is set, then:\n if (requestObject.signal.aborted) {\n // 1. Abort the fetch() call with p, request, null, and\n // requestObject’s signal’s abort reason.\n abortFetch(p, request, null, requestObject.signal.reason)\n\n // 2. Return p.\n return p.promise\n }\n\n // 5. Let globalObject be request’s client’s global object.\n const globalObject = request.client.globalObject\n\n // 6. If globalObject is a ServiceWorkerGlobalScope object, then set\n // request’s service-workers mode to \"none\".\n if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') {\n request.serviceWorkers = 'none'\n }\n\n // 7. Let responseObject be null.\n let responseObject = null\n\n // 8. Let relevantRealm be this’s relevant Realm.\n const relevantRealm = null\n\n // 9. Let locallyAborted be false.\n let locallyAborted = false\n\n // 10. Let controller be null.\n let controller = null\n\n // 11. Add the following abort steps to requestObject’s signal:\n addAbortListener(\n requestObject.signal,\n () => {\n // 1. Set locallyAborted to true.\n locallyAborted = true\n\n // 2. Assert: controller is non-null.\n assert(controller != null)\n\n // 3. Abort controller with requestObject’s signal’s abort reason.\n controller.abort(requestObject.signal.reason)\n\n // 4. Abort the fetch() call with p, request, responseObject,\n // and requestObject’s signal’s abort reason.\n abortFetch(p, request, responseObject, requestObject.signal.reason)\n }\n )\n\n // 12. Let handleFetchDone given response response be to finalize and\n // report timing with response, globalObject, and \"fetch\".\n const handleFetchDone = (response) =>\n finalizeAndReportTiming(response, 'fetch')\n\n // 13. Set controller to the result of calling fetch given request,\n // with processResponseEndOfBody set to handleFetchDone, and processResponse\n // given response being these substeps:\n\n const processResponse = (response) => {\n // 1. If locallyAborted is true, terminate these substeps.\n if (locallyAborted) {\n return Promise.resolve()\n }\n\n // 2. If response’s aborted flag is set, then:\n if (response.aborted) {\n // 1. Let deserializedError be the result of deserialize a serialized\n // abort reason given controller’s serialized abort reason and\n // relevantRealm.\n\n // 2. Abort the fetch() call with p, request, responseObject, and\n // deserializedError.\n\n abortFetch(p, request, responseObject, controller.serializedAbortReason)\n return Promise.resolve()\n }\n\n // 3. If response is a network error, then reject p with a TypeError\n // and terminate these substeps.\n if (response.type === 'error') {\n p.reject(\n Object.assign(new TypeError('fetch failed'), { cause: response.error })\n )\n return Promise.resolve()\n }\n\n // 4. Set responseObject to the result of creating a Response object,\n // given response, \"immutable\", and relevantRealm.\n responseObject = new Response()\n responseObject[kState] = response\n responseObject[kRealm] = relevantRealm\n responseObject[kHeaders][kHeadersList] = response.headersList\n responseObject[kHeaders][kGuard] = 'immutable'\n responseObject[kHeaders][kRealm] = relevantRealm\n\n // 5. Resolve p with responseObject.\n p.resolve(responseObject)\n }\n\n controller = fetching({\n request,\n processResponseEndOfBody: handleFetchDone,\n processResponse,\n dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici\n })\n\n // 14. Return p.\n return p.promise\n}\n\n// https://fetch.spec.whatwg.org/#finalize-and-report-timing\nfunction finalizeAndReportTiming (response, initiatorType = 'other') {\n // 1. If response is an aborted network error, then return.\n if (response.type === 'error' && response.aborted) {\n return\n }\n\n // 2. If response’s URL list is null or empty, then return.\n if (!response.urlList?.length) {\n return\n }\n\n // 3. Let originalURL be response’s URL list[0].\n const originalURL = response.urlList[0]\n\n // 4. Let timingInfo be response’s timing info.\n let timingInfo = response.timingInfo\n\n // 5. Let cacheState be response’s cache state.\n let cacheState = response.cacheState\n\n // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return.\n if (!urlIsHttpHttpsScheme(originalURL)) {\n return\n }\n\n // 7. If timingInfo is null, then return.\n if (timingInfo === null) {\n return\n }\n\n // 8. If response’s timing allow passed flag is not set, then:\n if (!response.timingAllowPassed) {\n // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo.\n timingInfo = createOpaqueTimingInfo({\n startTime: timingInfo.startTime\n })\n\n // 2. Set cacheState to the empty string.\n cacheState = ''\n }\n\n // 9. Set timingInfo’s end time to the coarsened shared current time\n // given global’s relevant settings object’s cross-origin isolated\n // capability.\n // TODO: given global’s relevant settings object’s cross-origin isolated\n // capability?\n timingInfo.endTime = coarsenedSharedCurrentTime()\n\n // 10. Set response’s timing info to timingInfo.\n response.timingInfo = timingInfo\n\n // 11. Mark resource timing for timingInfo, originalURL, initiatorType,\n // global, and cacheState.\n markResourceTiming(\n timingInfo,\n originalURL,\n initiatorType,\n globalThis,\n cacheState\n )\n}\n\n// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing\nfunction markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) {\n if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) {\n performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState)\n }\n}\n\n// https://fetch.spec.whatwg.org/#abort-fetch\nfunction abortFetch (p, request, responseObject, error) {\n // Note: AbortSignal.reason was added in node v17.2.0\n // which would give us an undefined error to reject with.\n // Remove this once node v16 is no longer supported.\n if (!error) {\n error = new DOMException('The operation was aborted.', 'AbortError')\n }\n\n // 1. Reject promise with error.\n p.reject(error)\n\n // 2. If request’s body is not null and is readable, then cancel request’s\n // body with error.\n if (request.body != null && isReadable(request.body?.stream)) {\n request.body.stream.cancel(error).catch((err) => {\n if (err.code === 'ERR_INVALID_STATE') {\n // Node bug?\n return\n }\n throw err\n })\n }\n\n // 3. If responseObject is null, then return.\n if (responseObject == null) {\n return\n }\n\n // 4. Let response be responseObject’s response.\n const response = responseObject[kState]\n\n // 5. If response’s body is not null and is readable, then error response’s\n // body with error.\n if (response.body != null && isReadable(response.body?.stream)) {\n response.body.stream.cancel(error).catch((err) => {\n if (err.code === 'ERR_INVALID_STATE') {\n // Node bug?\n return\n }\n throw err\n })\n }\n}\n\n// https://fetch.spec.whatwg.org/#fetching\nfunction fetching ({\n request,\n processRequestBodyChunkLength,\n processRequestEndOfBody,\n processResponse,\n processResponseEndOfBody,\n processResponseConsumeBody,\n useParallelQueue = false,\n dispatcher // undici\n}) {\n // 1. Let taskDestination be null.\n let taskDestination = null\n\n // 2. Let crossOriginIsolatedCapability be false.\n let crossOriginIsolatedCapability = false\n\n // 3. If request’s client is non-null, then:\n if (request.client != null) {\n // 1. Set taskDestination to request’s client’s global object.\n taskDestination = request.client.globalObject\n\n // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin\n // isolated capability.\n crossOriginIsolatedCapability =\n request.client.crossOriginIsolatedCapability\n }\n\n // 4. If useParallelQueue is true, then set taskDestination to the result of\n // starting a new parallel queue.\n // TODO\n\n // 5. Let timingInfo be a new fetch timing info whose start time and\n // post-redirect start time are the coarsened shared current time given\n // crossOriginIsolatedCapability.\n const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability)\n const timingInfo = createOpaqueTimingInfo({\n startTime: currenTime\n })\n\n // 6. Let fetchParams be a new fetch params whose\n // request is request,\n // timing info is timingInfo,\n // process request body chunk length is processRequestBodyChunkLength,\n // process request end-of-body is processRequestEndOfBody,\n // process response is processResponse,\n // process response consume body is processResponseConsumeBody,\n // process response end-of-body is processResponseEndOfBody,\n // task destination is taskDestination,\n // and cross-origin isolated capability is crossOriginIsolatedCapability.\n const fetchParams = {\n controller: new Fetch(dispatcher),\n request,\n timingInfo,\n processRequestBodyChunkLength,\n processRequestEndOfBody,\n processResponse,\n processResponseConsumeBody,\n processResponseEndOfBody,\n taskDestination,\n crossOriginIsolatedCapability\n }\n\n // 7. If request’s body is a byte sequence, then set request’s body to\n // request’s body as a body.\n // NOTE: Since fetching is only called from fetch, body should already be\n // extracted.\n assert(!request.body || request.body.stream)\n\n // 8. If request’s window is \"client\", then set request’s window to request’s\n // client, if request’s client’s global object is a Window object; otherwise\n // \"no-window\".\n if (request.window === 'client') {\n // TODO: What if request.client is null?\n request.window =\n request.client?.globalObject?.constructor?.name === 'Window'\n ? request.client\n : 'no-window'\n }\n\n // 9. If request’s origin is \"client\", then set request’s origin to request’s\n // client’s origin.\n if (request.origin === 'client') {\n // TODO: What if request.client is null?\n request.origin = request.client?.origin\n }\n\n // 10. If all of the following conditions are true:\n // TODO\n\n // 11. If request’s policy container is \"client\", then:\n if (request.policyContainer === 'client') {\n // 1. If request’s client is non-null, then set request’s policy\n // container to a clone of request’s client’s policy container. [HTML]\n if (request.client != null) {\n request.policyContainer = clonePolicyContainer(\n request.client.policyContainer\n )\n } else {\n // 2. Otherwise, set request’s policy container to a new policy\n // container.\n request.policyContainer = makePolicyContainer()\n }\n }\n\n // 12. If request’s header list does not contain `Accept`, then:\n if (!request.headersList.contains('accept')) {\n // 1. Let value be `*/*`.\n const value = '*/*'\n\n // 2. A user agent should set value to the first matching statement, if\n // any, switching on request’s destination:\n // \"document\"\n // \"frame\"\n // \"iframe\"\n // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8`\n // \"image\"\n // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5`\n // \"style\"\n // `text/css,*/*;q=0.1`\n // TODO\n\n // 3. Append `Accept`/value to request’s header list.\n request.headersList.append('accept', value)\n }\n\n // 13. If request’s header list does not contain `Accept-Language`, then\n // user agents should append `Accept-Language`/an appropriate value to\n // request’s header list.\n if (!request.headersList.contains('accept-language')) {\n request.headersList.append('accept-language', '*')\n }\n\n // 14. If request’s priority is null, then use request’s initiator and\n // destination appropriately in setting request’s priority to a\n // user-agent-defined object.\n if (request.priority === null) {\n // TODO\n }\n\n // 15. If request is a subresource request, then:\n if (subresourceSet.has(request.destination)) {\n // TODO\n }\n\n // 16. Run main fetch given fetchParams.\n mainFetch(fetchParams)\n .catch(err => {\n fetchParams.controller.terminate(err)\n })\n\n // 17. Return fetchParam's controller\n return fetchParams.controller\n}\n\n// https://fetch.spec.whatwg.org/#concept-main-fetch\nasync function mainFetch (fetchParams, recursive = false) {\n // 1. Let request be fetchParams’s request.\n const request = fetchParams.request\n\n // 2. Let response be null.\n let response = null\n\n // 3. If request’s local-URLs-only flag is set and request’s current URL is\n // not local, then set response to a network error.\n if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) {\n response = makeNetworkError('local URLs only')\n }\n\n // 4. Run report Content Security Policy violations for request.\n // TODO\n\n // 5. Upgrade request to a potentially trustworthy URL, if appropriate.\n tryUpgradeRequestToAPotentiallyTrustworthyURL(request)\n\n // 6. If should request be blocked due to a bad port, should fetching request\n // be blocked as mixed content, or should request be blocked by Content\n // Security Policy returns blocked, then set response to a network error.\n if (requestBadPort(request) === 'blocked') {\n response = makeNetworkError('bad port')\n }\n // TODO: should fetching request be blocked as mixed content?\n // TODO: should request be blocked by Content Security Policy?\n\n // 7. If request’s referrer policy is the empty string, then set request’s\n // referrer policy to request’s policy container’s referrer policy.\n if (request.referrerPolicy === '') {\n request.referrerPolicy = request.policyContainer.referrerPolicy\n }\n\n // 8. If request’s referrer is not \"no-referrer\", then set request’s\n // referrer to the result of invoking determine request’s referrer.\n if (request.referrer !== 'no-referrer') {\n request.referrer = determineRequestsReferrer(request)\n }\n\n // 9. Set request’s current URL’s scheme to \"https\" if all of the following\n // conditions are true:\n // - request’s current URL’s scheme is \"http\"\n // - request’s current URL’s host is a domain\n // - Matching request’s current URL’s host per Known HSTS Host Domain Name\n // Matching results in either a superdomain match with an asserted\n // includeSubDomains directive or a congruent match (with or without an\n // asserted includeSubDomains directive). [HSTS]\n // TODO\n\n // 10. If recursive is false, then run the remaining steps in parallel.\n // TODO\n\n // 11. If response is null, then set response to the result of running\n // the steps corresponding to the first matching statement:\n if (response === null) {\n response = await (async () => {\n const currentURL = requestCurrentURL(request)\n\n if (\n // - request’s current URL’s origin is same origin with request’s origin,\n // and request’s response tainting is \"basic\"\n (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') ||\n // request’s current URL’s scheme is \"data\"\n (currentURL.protocol === 'data:') ||\n // - request’s mode is \"navigate\" or \"websocket\"\n (request.mode === 'navigate' || request.mode === 'websocket')\n ) {\n // 1. Set request’s response tainting to \"basic\".\n request.responseTainting = 'basic'\n\n // 2. Return the result of running scheme fetch given fetchParams.\n return await schemeFetch(fetchParams)\n }\n\n // request’s mode is \"same-origin\"\n if (request.mode === 'same-origin') {\n // 1. Return a network error.\n return makeNetworkError('request mode cannot be \"same-origin\"')\n }\n\n // request’s mode is \"no-cors\"\n if (request.mode === 'no-cors') {\n // 1. If request’s redirect mode is not \"follow\", then return a network\n // error.\n if (request.redirect !== 'follow') {\n return makeNetworkError(\n 'redirect mode cannot be \"follow\" for \"no-cors\" request'\n )\n }\n\n // 2. Set request’s response tainting to \"opaque\".\n request.responseTainting = 'opaque'\n\n // 3. Return the result of running scheme fetch given fetchParams.\n return await schemeFetch(fetchParams)\n }\n\n // request’s current URL’s scheme is not an HTTP(S) scheme\n if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) {\n // Return a network error.\n return makeNetworkError('URL scheme must be a HTTP(S) scheme')\n }\n\n // - request’s use-CORS-preflight flag is set\n // - request’s unsafe-request flag is set and either request’s method is\n // not a CORS-safelisted method or CORS-unsafe request-header names with\n // request’s header list is not empty\n // 1. Set request’s response tainting to \"cors\".\n // 2. Let corsWithPreflightResponse be the result of running HTTP fetch\n // given fetchParams and true.\n // 3. If corsWithPreflightResponse is a network error, then clear cache\n // entries using request.\n // 4. Return corsWithPreflightResponse.\n // TODO\n\n // Otherwise\n // 1. Set request’s response tainting to \"cors\".\n request.responseTainting = 'cors'\n\n // 2. Return the result of running HTTP fetch given fetchParams.\n return await httpFetch(fetchParams)\n })()\n }\n\n // 12. If recursive is true, then return response.\n if (recursive) {\n return response\n }\n\n // 13. If response is not a network error and response is not a filtered\n // response, then:\n if (response.status !== 0 && !response.internalResponse) {\n // If request’s response tainting is \"cors\", then:\n if (request.responseTainting === 'cors') {\n // 1. Let headerNames be the result of extracting header list values\n // given `Access-Control-Expose-Headers` and response’s header list.\n // TODO\n // 2. If request’s credentials mode is not \"include\" and headerNames\n // contains `*`, then set response’s CORS-exposed header-name list to\n // all unique header names in response’s header list.\n // TODO\n // 3. Otherwise, if headerNames is not null or failure, then set\n // response’s CORS-exposed header-name list to headerNames.\n // TODO\n }\n\n // Set response to the following filtered response with response as its\n // internal response, depending on request’s response tainting:\n if (request.responseTainting === 'basic') {\n response = filterResponse(response, 'basic')\n } else if (request.responseTainting === 'cors') {\n response = filterResponse(response, 'cors')\n } else if (request.responseTainting === 'opaque') {\n response = filterResponse(response, 'opaque')\n } else {\n assert(false)\n }\n }\n\n // 14. Let internalResponse be response, if response is a network error,\n // and response’s internal response otherwise.\n let internalResponse =\n response.status === 0 ? response : response.internalResponse\n\n // 15. If internalResponse’s URL list is empty, then set it to a clone of\n // request’s URL list.\n if (internalResponse.urlList.length === 0) {\n internalResponse.urlList.push(...request.urlList)\n }\n\n // 16. If request’s timing allow failed flag is unset, then set\n // internalResponse’s timing allow passed flag.\n if (!request.timingAllowFailed) {\n response.timingAllowPassed = true\n }\n\n // 17. If response is not a network error and any of the following returns\n // blocked\n // - should internalResponse to request be blocked as mixed content\n // - should internalResponse to request be blocked by Content Security Policy\n // - should internalResponse to request be blocked due to its MIME type\n // - should internalResponse to request be blocked due to nosniff\n // TODO\n\n // 18. If response’s type is \"opaque\", internalResponse’s status is 206,\n // internalResponse’s range-requested flag is set, and request’s header\n // list does not contain `Range`, then set response and internalResponse\n // to a network error.\n if (\n response.type === 'opaque' &&\n internalResponse.status === 206 &&\n internalResponse.rangeRequested &&\n !request.headers.contains('range')\n ) {\n response = internalResponse = makeNetworkError()\n }\n\n // 19. If response is not a network error and either request’s method is\n // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status,\n // set internalResponse’s body to null and disregard any enqueuing toward\n // it (if any).\n if (\n response.status !== 0 &&\n (request.method === 'HEAD' ||\n request.method === 'CONNECT' ||\n nullBodyStatus.includes(internalResponse.status))\n ) {\n internalResponse.body = null\n fetchParams.controller.dump = true\n }\n\n // 20. If request’s integrity metadata is not the empty string, then:\n if (request.integrity) {\n // 1. Let processBodyError be this step: run fetch finale given fetchParams\n // and a network error.\n const processBodyError = (reason) =>\n fetchFinale(fetchParams, makeNetworkError(reason))\n\n // 2. If request’s response tainting is \"opaque\", or response’s body is null,\n // then run processBodyError and abort these steps.\n if (request.responseTainting === 'opaque' || response.body == null) {\n processBodyError(response.error)\n return\n }\n\n // 3. Let processBody given bytes be these steps:\n const processBody = (bytes) => {\n // 1. If bytes do not match request’s integrity metadata,\n // then run processBodyError and abort these steps. [SRI]\n if (!bytesMatch(bytes, request.integrity)) {\n processBodyError('integrity mismatch')\n return\n }\n\n // 2. Set response’s body to bytes as a body.\n response.body = safelyExtractBody(bytes)[0]\n\n // 3. Run fetch finale given fetchParams and response.\n fetchFinale(fetchParams, response)\n }\n\n // 4. Fully read response’s body given processBody and processBodyError.\n await fullyReadBody(response.body, processBody, processBodyError)\n } else {\n // 21. Otherwise, run fetch finale given fetchParams and response.\n fetchFinale(fetchParams, response)\n }\n}\n\n// https://fetch.spec.whatwg.org/#concept-scheme-fetch\n// given a fetch params fetchParams\nfunction schemeFetch (fetchParams) {\n // Note: since the connection is destroyed on redirect, which sets fetchParams to a\n // cancelled state, we do not want this condition to trigger *unless* there have been\n // no redirects. See https://github.com/nodejs/undici/issues/1776\n // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.\n if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) {\n return Promise.resolve(makeAppropriateNetworkError(fetchParams))\n }\n\n // 2. Let request be fetchParams’s request.\n const { request } = fetchParams\n\n const { protocol: scheme } = requestCurrentURL(request)\n\n // 3. Switch on request’s current URL’s scheme and run the associated steps:\n switch (scheme) {\n case 'about:': {\n // If request’s current URL’s path is the string \"blank\", then return a new response\n // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) »,\n // and body is the empty byte sequence as a body.\n\n // Otherwise, return a network error.\n return Promise.resolve(makeNetworkError('about scheme is not supported'))\n }\n case 'blob:': {\n if (!resolveObjectURL) {\n resolveObjectURL = require('buffer').resolveObjectURL\n }\n\n // 1. Let blobURLEntry be request’s current URL’s blob URL entry.\n const blobURLEntry = requestCurrentURL(request)\n\n // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56\n // Buffer.resolveObjectURL does not ignore URL queries.\n if (blobURLEntry.search.length !== 0) {\n return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.'))\n }\n\n const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString())\n\n // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s\n // object is not a Blob object, then return a network error.\n if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) {\n return Promise.resolve(makeNetworkError('invalid method'))\n }\n\n // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object.\n const bodyWithType = safelyExtractBody(blobURLEntryObject)\n\n // 4. Let body be bodyWithType’s body.\n const body = bodyWithType[0]\n\n // 5. Let length be body’s length, serialized and isomorphic encoded.\n const length = isomorphicEncode(`${body.length}`)\n\n // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence.\n const type = bodyWithType[1] ?? ''\n\n // 7. Return a new response whose status message is `OK`, header list is\n // « (`Content-Length`, length), (`Content-Type`, type) », and body is body.\n const response = makeResponse({\n statusText: 'OK',\n headersList: [\n ['content-length', { name: 'Content-Length', value: length }],\n ['content-type', { name: 'Content-Type', value: type }]\n ]\n })\n\n response.body = body\n\n return Promise.resolve(response)\n }\n case 'data:': {\n // 1. Let dataURLStruct be the result of running the\n // data: URL processor on request’s current URL.\n const currentURL = requestCurrentURL(request)\n const dataURLStruct = dataURLProcessor(currentURL)\n\n // 2. If dataURLStruct is failure, then return a\n // network error.\n if (dataURLStruct === 'failure') {\n return Promise.resolve(makeNetworkError('failed to fetch the data URL'))\n }\n\n // 3. Let mimeType be dataURLStruct’s MIME type, serialized.\n const mimeType = serializeAMimeType(dataURLStruct.mimeType)\n\n // 4. Return a response whose status message is `OK`,\n // header list is « (`Content-Type`, mimeType) »,\n // and body is dataURLStruct’s body as a body.\n return Promise.resolve(makeResponse({\n statusText: 'OK',\n headersList: [\n ['content-type', { name: 'Content-Type', value: mimeType }]\n ],\n body: safelyExtractBody(dataURLStruct.body)[0]\n }))\n }\n case 'file:': {\n // For now, unfortunate as it is, file URLs are left as an exercise for the reader.\n // When in doubt, return a network error.\n return Promise.resolve(makeNetworkError('not implemented... yet...'))\n }\n case 'http:':\n case 'https:': {\n // Return the result of running HTTP fetch given fetchParams.\n\n return httpFetch(fetchParams)\n .catch((err) => makeNetworkError(err))\n }\n default: {\n return Promise.resolve(makeNetworkError('unknown scheme'))\n }\n }\n}\n\n// https://fetch.spec.whatwg.org/#finalize-response\nfunction finalizeResponse (fetchParams, response) {\n // 1. Set fetchParams’s request’s done flag.\n fetchParams.request.done = true\n\n // 2, If fetchParams’s process response done is not null, then queue a fetch\n // task to run fetchParams’s process response done given response, with\n // fetchParams’s task destination.\n if (fetchParams.processResponseDone != null) {\n queueMicrotask(() => fetchParams.processResponseDone(response))\n }\n}\n\n// https://fetch.spec.whatwg.org/#fetch-finale\nfunction fetchFinale (fetchParams, response) {\n // 1. If response is a network error, then:\n if (response.type === 'error') {\n // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ».\n response.urlList = [fetchParams.request.urlList[0]]\n\n // 2. Set response’s timing info to the result of creating an opaque timing\n // info for fetchParams’s timing info.\n response.timingInfo = createOpaqueTimingInfo({\n startTime: fetchParams.timingInfo.startTime\n })\n }\n\n // 2. Let processResponseEndOfBody be the following steps:\n const processResponseEndOfBody = () => {\n // 1. Set fetchParams’s request’s done flag.\n fetchParams.request.done = true\n\n // If fetchParams’s process response end-of-body is not null,\n // then queue a fetch task to run fetchParams’s process response\n // end-of-body given response with fetchParams’s task destination.\n if (fetchParams.processResponseEndOfBody != null) {\n queueMicrotask(() => fetchParams.processResponseEndOfBody(response))\n }\n }\n\n // 3. If fetchParams’s process response is non-null, then queue a fetch task\n // to run fetchParams’s process response given response, with fetchParams’s\n // task destination.\n if (fetchParams.processResponse != null) {\n queueMicrotask(() => fetchParams.processResponse(response))\n }\n\n // 4. If response’s body is null, then run processResponseEndOfBody.\n if (response.body == null) {\n processResponseEndOfBody()\n } else {\n // 5. Otherwise:\n\n // 1. Let transformStream be a new a TransformStream.\n\n // 2. Let identityTransformAlgorithm be an algorithm which, given chunk,\n // enqueues chunk in transformStream.\n const identityTransformAlgorithm = (chunk, controller) => {\n controller.enqueue(chunk)\n }\n\n // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm\n // and flushAlgorithm set to processResponseEndOfBody.\n const transformStream = new TransformStream({\n start () {},\n transform: identityTransformAlgorithm,\n flush: processResponseEndOfBody\n }, {\n size () {\n return 1\n }\n }, {\n size () {\n return 1\n }\n })\n\n // 4. Set response’s body to the result of piping response’s body through transformStream.\n response.body = { stream: response.body.stream.pipeThrough(transformStream) }\n }\n\n // 6. If fetchParams’s process response consume body is non-null, then:\n if (fetchParams.processResponseConsumeBody != null) {\n // 1. Let processBody given nullOrBytes be this step: run fetchParams’s\n // process response consume body given response and nullOrBytes.\n const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes)\n\n // 2. Let processBodyError be this step: run fetchParams’s process\n // response consume body given response and failure.\n const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure)\n\n // 3. If response’s body is null, then queue a fetch task to run processBody\n // given null, with fetchParams’s task destination.\n if (response.body == null) {\n queueMicrotask(() => processBody(null))\n } else {\n // 4. Otherwise, fully read response’s body given processBody, processBodyError,\n // and fetchParams’s task destination.\n return fullyReadBody(response.body, processBody, processBodyError)\n }\n return Promise.resolve()\n }\n}\n\n// https://fetch.spec.whatwg.org/#http-fetch\nasync function httpFetch (fetchParams) {\n // 1. Let request be fetchParams’s request.\n const request = fetchParams.request\n\n // 2. Let response be null.\n let response = null\n\n // 3. Let actualResponse be null.\n let actualResponse = null\n\n // 4. Let timingInfo be fetchParams’s timing info.\n const timingInfo = fetchParams.timingInfo\n\n // 5. If request’s service-workers mode is \"all\", then:\n if (request.serviceWorkers === 'all') {\n // TODO\n }\n\n // 6. If response is null, then:\n if (response === null) {\n // 1. If makeCORSPreflight is true and one of these conditions is true:\n // TODO\n\n // 2. If request’s redirect mode is \"follow\", then set request’s\n // service-workers mode to \"none\".\n if (request.redirect === 'follow') {\n request.serviceWorkers = 'none'\n }\n\n // 3. Set response and actualResponse to the result of running\n // HTTP-network-or-cache fetch given fetchParams.\n actualResponse = response = await httpNetworkOrCacheFetch(fetchParams)\n\n // 4. If request’s response tainting is \"cors\" and a CORS check\n // for request and response returns failure, then return a network error.\n if (\n request.responseTainting === 'cors' &&\n corsCheck(request, response) === 'failure'\n ) {\n return makeNetworkError('cors failure')\n }\n\n // 5. If the TAO check for request and response returns failure, then set\n // request’s timing allow failed flag.\n if (TAOCheck(request, response) === 'failure') {\n request.timingAllowFailed = true\n }\n }\n\n // 7. If either request’s response tainting or response’s type\n // is \"opaque\", and the cross-origin resource policy check with\n // request’s origin, request’s client, request’s destination,\n // and actualResponse returns blocked, then return a network error.\n if (\n (request.responseTainting === 'opaque' || response.type === 'opaque') &&\n crossOriginResourcePolicyCheck(\n request.origin,\n request.client,\n request.destination,\n actualResponse\n ) === 'blocked'\n ) {\n return makeNetworkError('blocked')\n }\n\n // 8. If actualResponse’s status is a redirect status, then:\n if (redirectStatusSet.has(actualResponse.status)) {\n // 1. If actualResponse’s status is not 303, request’s body is not null,\n // and the connection uses HTTP/2, then user agents may, and are even\n // encouraged to, transmit an RST_STREAM frame.\n // See, https://github.com/whatwg/fetch/issues/1288\n if (request.redirect !== 'manual') {\n fetchParams.controller.connection.destroy()\n }\n\n // 2. Switch on request’s redirect mode:\n if (request.redirect === 'error') {\n // Set response to a network error.\n response = makeNetworkError('unexpected redirect')\n } else if (request.redirect === 'manual') {\n // Set response to an opaque-redirect filtered response whose internal\n // response is actualResponse.\n // NOTE(spec): On the web this would return an `opaqueredirect` response,\n // but that doesn't make sense server side.\n // See https://github.com/nodejs/undici/issues/1193.\n response = actualResponse\n } else if (request.redirect === 'follow') {\n // Set response to the result of running HTTP-redirect fetch given\n // fetchParams and response.\n response = await httpRedirectFetch(fetchParams, response)\n } else {\n assert(false)\n }\n }\n\n // 9. Set response’s timing info to timingInfo.\n response.timingInfo = timingInfo\n\n // 10. Return response.\n return response\n}\n\n// https://fetch.spec.whatwg.org/#http-redirect-fetch\nfunction httpRedirectFetch (fetchParams, response) {\n // 1. Let request be fetchParams’s request.\n const request = fetchParams.request\n\n // 2. Let actualResponse be response, if response is not a filtered response,\n // and response’s internal response otherwise.\n const actualResponse = response.internalResponse\n ? response.internalResponse\n : response\n\n // 3. Let locationURL be actualResponse’s location URL given request’s current\n // URL’s fragment.\n let locationURL\n\n try {\n locationURL = responseLocationURL(\n actualResponse,\n requestCurrentURL(request).hash\n )\n\n // 4. If locationURL is null, then return response.\n if (locationURL == null) {\n return response\n }\n } catch (err) {\n // 5. If locationURL is failure, then return a network error.\n return Promise.resolve(makeNetworkError(err))\n }\n\n // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network\n // error.\n if (!urlIsHttpHttpsScheme(locationURL)) {\n return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme'))\n }\n\n // 7. If request’s redirect count is 20, then return a network error.\n if (request.redirectCount === 20) {\n return Promise.resolve(makeNetworkError('redirect count exceeded'))\n }\n\n // 8. Increase request’s redirect count by 1.\n request.redirectCount += 1\n\n // 9. If request’s mode is \"cors\", locationURL includes credentials, and\n // request’s origin is not same origin with locationURL’s origin, then return\n // a network error.\n if (\n request.mode === 'cors' &&\n (locationURL.username || locationURL.password) &&\n !sameOrigin(request, locationURL)\n ) {\n return Promise.resolve(makeNetworkError('cross origin not allowed for request mode \"cors\"'))\n }\n\n // 10. If request’s response tainting is \"cors\" and locationURL includes\n // credentials, then return a network error.\n if (\n request.responseTainting === 'cors' &&\n (locationURL.username || locationURL.password)\n ) {\n return Promise.resolve(makeNetworkError(\n 'URL cannot contain credentials for request mode \"cors\"'\n ))\n }\n\n // 11. If actualResponse’s status is not 303, request’s body is non-null,\n // and request’s body’s source is null, then return a network error.\n if (\n actualResponse.status !== 303 &&\n request.body != null &&\n request.body.source == null\n ) {\n return Promise.resolve(makeNetworkError())\n }\n\n // 12. If one of the following is true\n // - actualResponse’s status is 301 or 302 and request’s method is `POST`\n // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD`\n if (\n ([301, 302].includes(actualResponse.status) && request.method === 'POST') ||\n (actualResponse.status === 303 &&\n !GET_OR_HEAD.includes(request.method))\n ) {\n // then:\n // 1. Set request’s method to `GET` and request’s body to null.\n request.method = 'GET'\n request.body = null\n\n // 2. For each headerName of request-body-header name, delete headerName from\n // request’s header list.\n for (const headerName of requestBodyHeader) {\n request.headersList.delete(headerName)\n }\n }\n\n // 13. If request’s current URL’s origin is not same origin with locationURL’s\n // origin, then for each headerName of CORS non-wildcard request-header name,\n // delete headerName from request’s header list.\n if (!sameOrigin(requestCurrentURL(request), locationURL)) {\n // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name\n request.headersList.delete('authorization')\n\n // https://fetch.spec.whatwg.org/#authentication-entries\n request.headersList.delete('proxy-authorization', true)\n\n // \"Cookie\" and \"Host\" are forbidden request-headers, which undici doesn't implement.\n request.headersList.delete('cookie')\n request.headersList.delete('host')\n }\n\n // 14. If request’s body is non-null, then set request’s body to the first return\n // value of safely extracting request’s body’s source.\n if (request.body != null) {\n assert(request.body.source != null)\n request.body = safelyExtractBody(request.body.source)[0]\n }\n\n // 15. Let timingInfo be fetchParams’s timing info.\n const timingInfo = fetchParams.timingInfo\n\n // 16. Set timingInfo’s redirect end time and post-redirect start time to the\n // coarsened shared current time given fetchParams’s cross-origin isolated\n // capability.\n timingInfo.redirectEndTime = timingInfo.postRedirectStartTime =\n coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability)\n\n // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s\n // redirect start time to timingInfo’s start time.\n if (timingInfo.redirectStartTime === 0) {\n timingInfo.redirectStartTime = timingInfo.startTime\n }\n\n // 18. Append locationURL to request’s URL list.\n request.urlList.push(locationURL)\n\n // 19. Invoke set request’s referrer policy on redirect on request and\n // actualResponse.\n setRequestReferrerPolicyOnRedirect(request, actualResponse)\n\n // 20. Return the result of running main fetch given fetchParams and true.\n return mainFetch(fetchParams, true)\n}\n\n// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch\nasync function httpNetworkOrCacheFetch (\n fetchParams,\n isAuthenticationFetch = false,\n isNewConnectionFetch = false\n) {\n // 1. Let request be fetchParams’s request.\n const request = fetchParams.request\n\n // 2. Let httpFetchParams be null.\n let httpFetchParams = null\n\n // 3. Let httpRequest be null.\n let httpRequest = null\n\n // 4. Let response be null.\n let response = null\n\n // 5. Let storedResponse be null.\n // TODO: cache\n\n // 6. Let httpCache be null.\n const httpCache = null\n\n // 7. Let the revalidatingFlag be unset.\n const revalidatingFlag = false\n\n // 8. Run these steps, but abort when the ongoing fetch is terminated:\n\n // 1. If request’s window is \"no-window\" and request’s redirect mode is\n // \"error\", then set httpFetchParams to fetchParams and httpRequest to\n // request.\n if (request.window === 'no-window' && request.redirect === 'error') {\n httpFetchParams = fetchParams\n httpRequest = request\n } else {\n // Otherwise:\n\n // 1. Set httpRequest to a clone of request.\n httpRequest = makeRequest(request)\n\n // 2. Set httpFetchParams to a copy of fetchParams.\n httpFetchParams = { ...fetchParams }\n\n // 3. Set httpFetchParams’s request to httpRequest.\n httpFetchParams.request = httpRequest\n }\n\n // 3. Let includeCredentials be true if one of\n const includeCredentials =\n request.credentials === 'include' ||\n (request.credentials === 'same-origin' &&\n request.responseTainting === 'basic')\n\n // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s\n // body is non-null; otherwise null.\n const contentLength = httpRequest.body ? httpRequest.body.length : null\n\n // 5. Let contentLengthHeaderValue be null.\n let contentLengthHeaderValue = null\n\n // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or\n // `PUT`, then set contentLengthHeaderValue to `0`.\n if (\n httpRequest.body == null &&\n ['POST', 'PUT'].includes(httpRequest.method)\n ) {\n contentLengthHeaderValue = '0'\n }\n\n // 7. If contentLength is non-null, then set contentLengthHeaderValue to\n // contentLength, serialized and isomorphic encoded.\n if (contentLength != null) {\n contentLengthHeaderValue = isomorphicEncode(`${contentLength}`)\n }\n\n // 8. If contentLengthHeaderValue is non-null, then append\n // `Content-Length`/contentLengthHeaderValue to httpRequest’s header\n // list.\n if (contentLengthHeaderValue != null) {\n httpRequest.headersList.append('content-length', contentLengthHeaderValue)\n }\n\n // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`,\n // contentLengthHeaderValue) to httpRequest’s header list.\n\n // 10. If contentLength is non-null and httpRequest’s keepalive is true,\n // then:\n if (contentLength != null && httpRequest.keepalive) {\n // NOTE: keepalive is a noop outside of browser context.\n }\n\n // 11. If httpRequest’s referrer is a URL, then append\n // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded,\n // to httpRequest’s header list.\n if (httpRequest.referrer instanceof URL) {\n httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href))\n }\n\n // 12. Append a request `Origin` header for httpRequest.\n appendRequestOriginHeader(httpRequest)\n\n // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA]\n appendFetchMetadata(httpRequest)\n\n // 14. If httpRequest’s header list does not contain `User-Agent`, then\n // user agents should append `User-Agent`/default `User-Agent` value to\n // httpRequest’s header list.\n if (!httpRequest.headersList.contains('user-agent')) {\n httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node')\n }\n\n // 15. If httpRequest’s cache mode is \"default\" and httpRequest’s header\n // list contains `If-Modified-Since`, `If-None-Match`,\n // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set\n // httpRequest’s cache mode to \"no-store\".\n if (\n httpRequest.cache === 'default' &&\n (httpRequest.headersList.contains('if-modified-since') ||\n httpRequest.headersList.contains('if-none-match') ||\n httpRequest.headersList.contains('if-unmodified-since') ||\n httpRequest.headersList.contains('if-match') ||\n httpRequest.headersList.contains('if-range'))\n ) {\n httpRequest.cache = 'no-store'\n }\n\n // 16. If httpRequest’s cache mode is \"no-cache\", httpRequest’s prevent\n // no-cache cache-control header modification flag is unset, and\n // httpRequest’s header list does not contain `Cache-Control`, then append\n // `Cache-Control`/`max-age=0` to httpRequest’s header list.\n if (\n httpRequest.cache === 'no-cache' &&\n !httpRequest.preventNoCacheCacheControlHeaderModification &&\n !httpRequest.headersList.contains('cache-control')\n ) {\n httpRequest.headersList.append('cache-control', 'max-age=0')\n }\n\n // 17. If httpRequest’s cache mode is \"no-store\" or \"reload\", then:\n if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') {\n // 1. If httpRequest’s header list does not contain `Pragma`, then append\n // `Pragma`/`no-cache` to httpRequest’s header list.\n if (!httpRequest.headersList.contains('pragma')) {\n httpRequest.headersList.append('pragma', 'no-cache')\n }\n\n // 2. If httpRequest’s header list does not contain `Cache-Control`,\n // then append `Cache-Control`/`no-cache` to httpRequest’s header list.\n if (!httpRequest.headersList.contains('cache-control')) {\n httpRequest.headersList.append('cache-control', 'no-cache')\n }\n }\n\n // 18. If httpRequest’s header list contains `Range`, then append\n // `Accept-Encoding`/`identity` to httpRequest’s header list.\n if (httpRequest.headersList.contains('range')) {\n httpRequest.headersList.append('accept-encoding', 'identity')\n }\n\n // 19. Modify httpRequest’s header list per HTTP. Do not append a given\n // header if httpRequest’s header list contains that header’s name.\n // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129\n if (!httpRequest.headersList.contains('accept-encoding')) {\n if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) {\n httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate')\n } else {\n httpRequest.headersList.append('accept-encoding', 'gzip, deflate')\n }\n }\n\n httpRequest.headersList.delete('host')\n\n // 20. If includeCredentials is true, then:\n if (includeCredentials) {\n // 1. If the user agent is not configured to block cookies for httpRequest\n // (see section 7 of [COOKIES]), then:\n // TODO: credentials\n // 2. If httpRequest’s header list does not contain `Authorization`, then:\n // TODO: credentials\n }\n\n // 21. If there’s a proxy-authentication entry, use it as appropriate.\n // TODO: proxy-authentication\n\n // 22. Set httpCache to the result of determining the HTTP cache\n // partition, given httpRequest.\n // TODO: cache\n\n // 23. If httpCache is null, then set httpRequest’s cache mode to\n // \"no-store\".\n if (httpCache == null) {\n httpRequest.cache = 'no-store'\n }\n\n // 24. If httpRequest’s cache mode is neither \"no-store\" nor \"reload\",\n // then:\n if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') {\n // TODO: cache\n }\n\n // 9. If aborted, then return the appropriate network error for fetchParams.\n // TODO\n\n // 10. If response is null, then:\n if (response == null) {\n // 1. If httpRequest’s cache mode is \"only-if-cached\", then return a\n // network error.\n if (httpRequest.mode === 'only-if-cached') {\n return makeNetworkError('only if cached')\n }\n\n // 2. Let forwardResponse be the result of running HTTP-network fetch\n // given httpFetchParams, includeCredentials, and isNewConnectionFetch.\n const forwardResponse = await httpNetworkFetch(\n httpFetchParams,\n includeCredentials,\n isNewConnectionFetch\n )\n\n // 3. If httpRequest’s method is unsafe and forwardResponse’s status is\n // in the range 200 to 399, inclusive, invalidate appropriate stored\n // responses in httpCache, as per the \"Invalidation\" chapter of HTTP\n // Caching, and set storedResponse to null. [HTTP-CACHING]\n if (\n !safeMethodsSet.has(httpRequest.method) &&\n forwardResponse.status >= 200 &&\n forwardResponse.status <= 399\n ) {\n // TODO: cache\n }\n\n // 4. If the revalidatingFlag is set and forwardResponse’s status is 304,\n // then:\n if (revalidatingFlag && forwardResponse.status === 304) {\n // TODO: cache\n }\n\n // 5. If response is null, then:\n if (response == null) {\n // 1. Set response to forwardResponse.\n response = forwardResponse\n\n // 2. Store httpRequest and forwardResponse in httpCache, as per the\n // \"Storing Responses in Caches\" chapter of HTTP Caching. [HTTP-CACHING]\n // TODO: cache\n }\n }\n\n // 11. Set response’s URL list to a clone of httpRequest’s URL list.\n response.urlList = [...httpRequest.urlList]\n\n // 12. If httpRequest’s header list contains `Range`, then set response’s\n // range-requested flag.\n if (httpRequest.headersList.contains('range')) {\n response.rangeRequested = true\n }\n\n // 13. Set response’s request-includes-credentials to includeCredentials.\n response.requestIncludesCredentials = includeCredentials\n\n // 14. If response’s status is 401, httpRequest’s response tainting is not\n // \"cors\", includeCredentials is true, and request’s window is an environment\n // settings object, then:\n // TODO\n\n // 15. If response’s status is 407, then:\n if (response.status === 407) {\n // 1. If request’s window is \"no-window\", then return a network error.\n if (request.window === 'no-window') {\n return makeNetworkError()\n }\n\n // 2. ???\n\n // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams.\n if (isCancelled(fetchParams)) {\n return makeAppropriateNetworkError(fetchParams)\n }\n\n // 4. Prompt the end user as appropriate in request’s window and store\n // the result as a proxy-authentication entry. [HTTP-AUTH]\n // TODO: Invoke some kind of callback?\n\n // 5. Set response to the result of running HTTP-network-or-cache fetch given\n // fetchParams.\n // TODO\n return makeNetworkError('proxy authentication required')\n }\n\n // 16. If all of the following are true\n if (\n // response’s status is 421\n response.status === 421 &&\n // isNewConnectionFetch is false\n !isNewConnectionFetch &&\n // request’s body is null, or request’s body is non-null and request’s body’s source is non-null\n (request.body == null || request.body.source != null)\n ) {\n // then:\n\n // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.\n if (isCancelled(fetchParams)) {\n return makeAppropriateNetworkError(fetchParams)\n }\n\n // 2. Set response to the result of running HTTP-network-or-cache\n // fetch given fetchParams, isAuthenticationFetch, and true.\n\n // TODO (spec): The spec doesn't specify this but we need to cancel\n // the active response before we can start a new one.\n // https://github.com/whatwg/fetch/issues/1293\n fetchParams.controller.connection.destroy()\n\n response = await httpNetworkOrCacheFetch(\n fetchParams,\n isAuthenticationFetch,\n true\n )\n }\n\n // 17. If isAuthenticationFetch is true, then create an authentication entry\n if (isAuthenticationFetch) {\n // TODO\n }\n\n // 18. Return response.\n return response\n}\n\n// https://fetch.spec.whatwg.org/#http-network-fetch\nasync function httpNetworkFetch (\n fetchParams,\n includeCredentials = false,\n forceNewConnection = false\n) {\n assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed)\n\n fetchParams.controller.connection = {\n abort: null,\n destroyed: false,\n destroy (err) {\n if (!this.destroyed) {\n this.destroyed = true\n this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError'))\n }\n }\n }\n\n // 1. Let request be fetchParams’s request.\n const request = fetchParams.request\n\n // 2. Let response be null.\n let response = null\n\n // 3. Let timingInfo be fetchParams’s timing info.\n const timingInfo = fetchParams.timingInfo\n\n // 4. Let httpCache be the result of determining the HTTP cache partition,\n // given request.\n // TODO: cache\n const httpCache = null\n\n // 5. If httpCache is null, then set request’s cache mode to \"no-store\".\n if (httpCache == null) {\n request.cache = 'no-store'\n }\n\n // 6. Let networkPartitionKey be the result of determining the network\n // partition key given request.\n // TODO\n\n // 7. Let newConnection be \"yes\" if forceNewConnection is true; otherwise\n // \"no\".\n const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars\n\n // 8. Switch on request’s mode:\n if (request.mode === 'websocket') {\n // Let connection be the result of obtaining a WebSocket connection,\n // given request’s current URL.\n // TODO\n } else {\n // Let connection be the result of obtaining a connection, given\n // networkPartitionKey, request’s current URL’s origin,\n // includeCredentials, and forceNewConnection.\n // TODO\n }\n\n // 9. Run these steps, but abort when the ongoing fetch is terminated:\n\n // 1. If connection is failure, then return a network error.\n\n // 2. Set timingInfo’s final connection timing info to the result of\n // calling clamp and coarsen connection timing info with connection’s\n // timing info, timingInfo’s post-redirect start time, and fetchParams’s\n // cross-origin isolated capability.\n\n // 3. If connection is not an HTTP/2 connection, request’s body is non-null,\n // and request’s body’s source is null, then append (`Transfer-Encoding`,\n // `chunked`) to request’s header list.\n\n // 4. Set timingInfo’s final network-request start time to the coarsened\n // shared current time given fetchParams’s cross-origin isolated\n // capability.\n\n // 5. Set response to the result of making an HTTP request over connection\n // using request with the following caveats:\n\n // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS]\n // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH]\n\n // - If request’s body is non-null, and request’s body’s source is null,\n // then the user agent may have a buffer of up to 64 kibibytes and store\n // a part of request’s body in that buffer. If the user agent reads from\n // request’s body beyond that buffer’s size and the user agent needs to\n // resend request, then instead return a network error.\n\n // - Set timingInfo’s final network-response start time to the coarsened\n // shared current time given fetchParams’s cross-origin isolated capability,\n // immediately after the user agent’s HTTP parser receives the first byte\n // of the response (e.g., frame header bytes for HTTP/2 or response status\n // line for HTTP/1.x).\n\n // - Wait until all the headers are transmitted.\n\n // - Any responses whose status is in the range 100 to 199, inclusive,\n // and is not 101, are to be ignored, except for the purposes of setting\n // timingInfo’s final network-response start time above.\n\n // - If request’s header list contains `Transfer-Encoding`/`chunked` and\n // response is transferred via HTTP/1.0 or older, then return a network\n // error.\n\n // - If the HTTP request results in a TLS client certificate dialog, then:\n\n // 1. If request’s window is an environment settings object, make the\n // dialog available in request’s window.\n\n // 2. Otherwise, return a network error.\n\n // To transmit request’s body body, run these steps:\n let requestBody = null\n // 1. If body is null and fetchParams’s process request end-of-body is\n // non-null, then queue a fetch task given fetchParams’s process request\n // end-of-body and fetchParams’s task destination.\n if (request.body == null && fetchParams.processRequestEndOfBody) {\n queueMicrotask(() => fetchParams.processRequestEndOfBody())\n } else if (request.body != null) {\n // 2. Otherwise, if body is non-null:\n\n // 1. Let processBodyChunk given bytes be these steps:\n const processBodyChunk = async function * (bytes) {\n // 1. If the ongoing fetch is terminated, then abort these steps.\n if (isCancelled(fetchParams)) {\n return\n }\n\n // 2. Run this step in parallel: transmit bytes.\n yield bytes\n\n // 3. If fetchParams’s process request body is non-null, then run\n // fetchParams’s process request body given bytes’s length.\n fetchParams.processRequestBodyChunkLength?.(bytes.byteLength)\n }\n\n // 2. Let processEndOfBody be these steps:\n const processEndOfBody = () => {\n // 1. If fetchParams is canceled, then abort these steps.\n if (isCancelled(fetchParams)) {\n return\n }\n\n // 2. If fetchParams’s process request end-of-body is non-null,\n // then run fetchParams’s process request end-of-body.\n if (fetchParams.processRequestEndOfBody) {\n fetchParams.processRequestEndOfBody()\n }\n }\n\n // 3. Let processBodyError given e be these steps:\n const processBodyError = (e) => {\n // 1. If fetchParams is canceled, then abort these steps.\n if (isCancelled(fetchParams)) {\n return\n }\n\n // 2. If e is an \"AbortError\" DOMException, then abort fetchParams’s controller.\n if (e.name === 'AbortError') {\n fetchParams.controller.abort()\n } else {\n fetchParams.controller.terminate(e)\n }\n }\n\n // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody,\n // processBodyError, and fetchParams’s task destination.\n requestBody = (async function * () {\n try {\n for await (const bytes of request.body.stream) {\n yield * processBodyChunk(bytes)\n }\n processEndOfBody()\n } catch (err) {\n processBodyError(err)\n }\n })()\n }\n\n try {\n // socket is only provided for websockets\n const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody })\n\n if (socket) {\n response = makeResponse({ status, statusText, headersList, socket })\n } else {\n const iterator = body[Symbol.asyncIterator]()\n fetchParams.controller.next = () => iterator.next()\n\n response = makeResponse({ status, statusText, headersList })\n }\n } catch (err) {\n // 10. If aborted, then:\n if (err.name === 'AbortError') {\n // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame.\n fetchParams.controller.connection.destroy()\n\n // 2. Return the appropriate network error for fetchParams.\n return makeAppropriateNetworkError(fetchParams, err)\n }\n\n return makeNetworkError(err)\n }\n\n // 11. Let pullAlgorithm be an action that resumes the ongoing fetch\n // if it is suspended.\n const pullAlgorithm = () => {\n fetchParams.controller.resume()\n }\n\n // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s\n // controller with reason, given reason.\n const cancelAlgorithm = (reason) => {\n fetchParams.controller.abort(reason)\n }\n\n // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by\n // the user agent.\n // TODO\n\n // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object\n // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent.\n // TODO\n\n // 15. Let stream be a new ReadableStream.\n // 16. Set up stream with pullAlgorithm set to pullAlgorithm,\n // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to\n // highWaterMark, and sizeAlgorithm set to sizeAlgorithm.\n if (!ReadableStream) {\n ReadableStream = require('stream/web').ReadableStream\n }\n\n const stream = new ReadableStream(\n {\n async start (controller) {\n fetchParams.controller.controller = controller\n },\n async pull (controller) {\n await pullAlgorithm(controller)\n },\n async cancel (reason) {\n await cancelAlgorithm(reason)\n }\n },\n {\n highWaterMark: 0,\n size () {\n return 1\n }\n }\n )\n\n // 17. Run these steps, but abort when the ongoing fetch is terminated:\n\n // 1. Set response’s body to a new body whose stream is stream.\n response.body = { stream }\n\n // 2. If response is not a network error and request’s cache mode is\n // not \"no-store\", then update response in httpCache for request.\n // TODO\n\n // 3. If includeCredentials is true and the user agent is not configured\n // to block cookies for request (see section 7 of [COOKIES]), then run the\n // \"set-cookie-string\" parsing algorithm (see section 5.2 of [COOKIES]) on\n // the value of each header whose name is a byte-case-insensitive match for\n // `Set-Cookie` in response’s header list, if any, and request’s current URL.\n // TODO\n\n // 18. If aborted, then:\n // TODO\n\n // 19. Run these steps in parallel:\n\n // 1. Run these steps, but abort when fetchParams is canceled:\n fetchParams.controller.on('terminated', onAborted)\n fetchParams.controller.resume = async () => {\n // 1. While true\n while (true) {\n // 1-3. See onData...\n\n // 4. Set bytes to the result of handling content codings given\n // codings and bytes.\n let bytes\n let isFailure\n try {\n const { done, value } = await fetchParams.controller.next()\n\n if (isAborted(fetchParams)) {\n break\n }\n\n bytes = done ? undefined : value\n } catch (err) {\n if (fetchParams.controller.ended && !timingInfo.encodedBodySize) {\n // zlib doesn't like empty streams.\n bytes = undefined\n } else {\n bytes = err\n\n // err may be propagated from the result of calling readablestream.cancel,\n // which might not be an error. https://github.com/nodejs/undici/issues/2009\n isFailure = true\n }\n }\n\n if (bytes === undefined) {\n // 2. Otherwise, if the bytes transmission for response’s message\n // body is done normally and stream is readable, then close\n // stream, finalize response for fetchParams and response, and\n // abort these in-parallel steps.\n readableStreamClose(fetchParams.controller.controller)\n\n finalizeResponse(fetchParams, response)\n\n return\n }\n\n // 5. Increase timingInfo’s decoded body size by bytes’s length.\n timingInfo.decodedBodySize += bytes?.byteLength ?? 0\n\n // 6. If bytes is failure, then terminate fetchParams’s controller.\n if (isFailure) {\n fetchParams.controller.terminate(bytes)\n return\n }\n\n // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes\n // into stream.\n fetchParams.controller.controller.enqueue(new Uint8Array(bytes))\n\n // 8. If stream is errored, then terminate the ongoing fetch.\n if (isErrored(stream)) {\n fetchParams.controller.terminate()\n return\n }\n\n // 9. If stream doesn’t need more data ask the user agent to suspend\n // the ongoing fetch.\n if (!fetchParams.controller.controller.desiredSize) {\n return\n }\n }\n }\n\n // 2. If aborted, then:\n function onAborted (reason) {\n // 2. If fetchParams is aborted, then:\n if (isAborted(fetchParams)) {\n // 1. Set response’s aborted flag.\n response.aborted = true\n\n // 2. If stream is readable, then error stream with the result of\n // deserialize a serialized abort reason given fetchParams’s\n // controller’s serialized abort reason and an\n // implementation-defined realm.\n if (isReadable(stream)) {\n fetchParams.controller.controller.error(\n fetchParams.controller.serializedAbortReason\n )\n }\n } else {\n // 3. Otherwise, if stream is readable, error stream with a TypeError.\n if (isReadable(stream)) {\n fetchParams.controller.controller.error(new TypeError('terminated', {\n cause: isErrorLike(reason) ? reason : undefined\n }))\n }\n }\n\n // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame.\n // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so.\n fetchParams.controller.connection.destroy()\n }\n\n // 20. Return response.\n return response\n\n async function dispatch ({ body }) {\n const url = requestCurrentURL(request)\n /** @type {import('../..').Agent} */\n const agent = fetchParams.controller.dispatcher\n\n return new Promise((resolve, reject) => agent.dispatch(\n {\n path: url.pathname + url.search,\n origin: url.origin,\n method: request.method,\n body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body,\n headers: request.headersList.entries,\n maxRedirections: 0,\n upgrade: request.mode === 'websocket' ? 'websocket' : undefined\n },\n {\n body: null,\n abort: null,\n\n onConnect (abort) {\n // TODO (fix): Do we need connection here?\n const { connection } = fetchParams.controller\n\n if (connection.destroyed) {\n abort(new DOMException('The operation was aborted.', 'AbortError'))\n } else {\n fetchParams.controller.on('terminated', abort)\n this.abort = connection.abort = abort\n }\n },\n\n onHeaders (status, headersList, resume, statusText) {\n if (status < 200) {\n return\n }\n\n let codings = []\n let location = ''\n\n const headers = new Headers()\n\n // For H2, the headers are a plain JS object\n // We distinguish between them and iterate accordingly\n if (Array.isArray(headersList)) {\n for (let n = 0; n < headersList.length; n += 2) {\n const key = headersList[n + 0].toString('latin1')\n const val = headersList[n + 1].toString('latin1')\n if (key.toLowerCase() === 'content-encoding') {\n // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1\n // \"All content-coding values are case-insensitive...\"\n codings = val.toLowerCase().split(',').map((x) => x.trim())\n } else if (key.toLowerCase() === 'location') {\n location = val\n }\n\n headers[kHeadersList].append(key, val)\n }\n } else {\n const keys = Object.keys(headersList)\n for (const key of keys) {\n const val = headersList[key]\n if (key.toLowerCase() === 'content-encoding') {\n // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1\n // \"All content-coding values are case-insensitive...\"\n codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse()\n } else if (key.toLowerCase() === 'location') {\n location = val\n }\n\n headers[kHeadersList].append(key, val)\n }\n }\n\n this.body = new Readable({ read: resume })\n\n const decoders = []\n\n const willFollow = request.redirect === 'follow' &&\n location &&\n redirectStatusSet.has(status)\n\n // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding\n if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) {\n for (const coding of codings) {\n // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2\n if (coding === 'x-gzip' || coding === 'gzip') {\n decoders.push(zlib.createGunzip({\n // Be less strict when decoding compressed responses, since sometimes\n // servers send slightly invalid responses that are still accepted\n // by common browsers.\n // Always using Z_SYNC_FLUSH is what cURL does.\n flush: zlib.constants.Z_SYNC_FLUSH,\n finishFlush: zlib.constants.Z_SYNC_FLUSH\n }))\n } else if (coding === 'deflate') {\n decoders.push(zlib.createInflate())\n } else if (coding === 'br') {\n decoders.push(zlib.createBrotliDecompress())\n } else {\n decoders.length = 0\n break\n }\n }\n }\n\n resolve({\n status,\n statusText,\n headersList: headers[kHeadersList],\n body: decoders.length\n ? pipeline(this.body, ...decoders, () => { })\n : this.body.on('error', () => {})\n })\n\n return true\n },\n\n onData (chunk) {\n if (fetchParams.controller.dump) {\n return\n }\n\n // 1. If one or more bytes have been transmitted from response’s\n // message body, then:\n\n // 1. Let bytes be the transmitted bytes.\n const bytes = chunk\n\n // 2. Let codings be the result of extracting header list values\n // given `Content-Encoding` and response’s header list.\n // See pullAlgorithm.\n\n // 3. Increase timingInfo’s encoded body size by bytes’s length.\n timingInfo.encodedBodySize += bytes.byteLength\n\n // 4. See pullAlgorithm...\n\n return this.body.push(bytes)\n },\n\n onComplete () {\n if (this.abort) {\n fetchParams.controller.off('terminated', this.abort)\n }\n\n fetchParams.controller.ended = true\n\n this.body.push(null)\n },\n\n onError (error) {\n if (this.abort) {\n fetchParams.controller.off('terminated', this.abort)\n }\n\n this.body?.destroy(error)\n\n fetchParams.controller.terminate(error)\n\n reject(error)\n },\n\n onUpgrade (status, headersList, socket) {\n if (status !== 101) {\n return\n }\n\n const headers = new Headers()\n\n for (let n = 0; n < headersList.length; n += 2) {\n const key = headersList[n + 0].toString('latin1')\n const val = headersList[n + 1].toString('latin1')\n\n headers[kHeadersList].append(key, val)\n }\n\n resolve({\n status,\n statusText: STATUS_CODES[status],\n headersList: headers[kHeadersList],\n socket\n })\n\n return true\n }\n }\n ))\n }\n}\n\nmodule.exports = {\n fetch,\n Fetch,\n fetching,\n finalizeAndReportTiming\n}\n","/* globals AbortController */\n\n'use strict'\n\nconst { extractBody, mixinBody, cloneBody } = require('./body')\nconst { Headers, fill: fillHeaders, HeadersList } = require('./headers')\nconst { FinalizationRegistry } = require('../compat/dispatcher-weakref')()\nconst util = require('../core/util')\nconst {\n isValidHTTPToken,\n sameOrigin,\n normalizeMethod,\n makePolicyContainer,\n normalizeMethodRecord\n} = require('./util')\nconst {\n forbiddenMethodsSet,\n corsSafeListedMethodsSet,\n referrerPolicy,\n requestRedirect,\n requestMode,\n requestCredentials,\n requestCache,\n requestDuplex\n} = require('./constants')\nconst { kEnumerableProperty } = util\nconst { kHeaders, kSignal, kState, kGuard, kRealm } = require('./symbols')\nconst { webidl } = require('./webidl')\nconst { getGlobalOrigin } = require('./global')\nconst { URLSerializer } = require('./dataURL')\nconst { kHeadersList, kConstruct } = require('../core/symbols')\nconst assert = require('assert')\nconst { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = require('events')\n\nlet TransformStream = globalThis.TransformStream\n\nconst kAbortController = Symbol('abortController')\n\nconst requestFinalizer = new FinalizationRegistry(({ signal, abort }) => {\n signal.removeEventListener('abort', abort)\n})\n\n// https://fetch.spec.whatwg.org/#request-class\nclass Request {\n // https://fetch.spec.whatwg.org/#dom-request\n constructor (input, init = {}) {\n if (input === kConstruct) {\n return\n }\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' })\n\n input = webidl.converters.RequestInfo(input)\n init = webidl.converters.RequestInit(init)\n\n // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object\n this[kRealm] = {\n settingsObject: {\n baseUrl: getGlobalOrigin(),\n get origin () {\n return this.baseUrl?.origin\n },\n policyContainer: makePolicyContainer()\n }\n }\n\n // 1. Let request be null.\n let request = null\n\n // 2. Let fallbackMode be null.\n let fallbackMode = null\n\n // 3. Let baseURL be this’s relevant settings object’s API base URL.\n const baseUrl = this[kRealm].settingsObject.baseUrl\n\n // 4. Let signal be null.\n let signal = null\n\n // 5. If input is a string, then:\n if (typeof input === 'string') {\n // 1. Let parsedURL be the result of parsing input with baseURL.\n // 2. If parsedURL is failure, then throw a TypeError.\n let parsedURL\n try {\n parsedURL = new URL(input, baseUrl)\n } catch (err) {\n throw new TypeError('Failed to parse URL from ' + input, { cause: err })\n }\n\n // 3. If parsedURL includes credentials, then throw a TypeError.\n if (parsedURL.username || parsedURL.password) {\n throw new TypeError(\n 'Request cannot be constructed from a URL that includes credentials: ' +\n input\n )\n }\n\n // 4. Set request to a new request whose URL is parsedURL.\n request = makeRequest({ urlList: [parsedURL] })\n\n // 5. Set fallbackMode to \"cors\".\n fallbackMode = 'cors'\n } else {\n // 6. Otherwise:\n\n // 7. Assert: input is a Request object.\n assert(input instanceof Request)\n\n // 8. Set request to input’s request.\n request = input[kState]\n\n // 9. Set signal to input’s signal.\n signal = input[kSignal]\n }\n\n // 7. Let origin be this’s relevant settings object’s origin.\n const origin = this[kRealm].settingsObject.origin\n\n // 8. Let window be \"client\".\n let window = 'client'\n\n // 9. If request’s window is an environment settings object and its origin\n // is same origin with origin, then set window to request’s window.\n if (\n request.window?.constructor?.name === 'EnvironmentSettingsObject' &&\n sameOrigin(request.window, origin)\n ) {\n window = request.window\n }\n\n // 10. If init[\"window\"] exists and is non-null, then throw a TypeError.\n if (init.window != null) {\n throw new TypeError(`'window' option '${window}' must be null`)\n }\n\n // 11. If init[\"window\"] exists, then set window to \"no-window\".\n if ('window' in init) {\n window = 'no-window'\n }\n\n // 12. Set request to a new request with the following properties:\n request = makeRequest({\n // URL request’s URL.\n // undici implementation note: this is set as the first item in request's urlList in makeRequest\n // method request’s method.\n method: request.method,\n // header list A copy of request’s header list.\n // undici implementation note: headersList is cloned in makeRequest\n headersList: request.headersList,\n // unsafe-request flag Set.\n unsafeRequest: request.unsafeRequest,\n // client This’s relevant settings object.\n client: this[kRealm].settingsObject,\n // window window.\n window,\n // priority request’s priority.\n priority: request.priority,\n // origin request’s origin. The propagation of the origin is only significant for navigation requests\n // being handled by a service worker. In this scenario a request can have an origin that is different\n // from the current client.\n origin: request.origin,\n // referrer request’s referrer.\n referrer: request.referrer,\n // referrer policy request’s referrer policy.\n referrerPolicy: request.referrerPolicy,\n // mode request’s mode.\n mode: request.mode,\n // credentials mode request’s credentials mode.\n credentials: request.credentials,\n // cache mode request’s cache mode.\n cache: request.cache,\n // redirect mode request’s redirect mode.\n redirect: request.redirect,\n // integrity metadata request’s integrity metadata.\n integrity: request.integrity,\n // keepalive request’s keepalive.\n keepalive: request.keepalive,\n // reload-navigation flag request’s reload-navigation flag.\n reloadNavigation: request.reloadNavigation,\n // history-navigation flag request’s history-navigation flag.\n historyNavigation: request.historyNavigation,\n // URL list A clone of request’s URL list.\n urlList: [...request.urlList]\n })\n\n const initHasKey = Object.keys(init).length !== 0\n\n // 13. If init is not empty, then:\n if (initHasKey) {\n // 1. If request’s mode is \"navigate\", then set it to \"same-origin\".\n if (request.mode === 'navigate') {\n request.mode = 'same-origin'\n }\n\n // 2. Unset request’s reload-navigation flag.\n request.reloadNavigation = false\n\n // 3. Unset request’s history-navigation flag.\n request.historyNavigation = false\n\n // 4. Set request’s origin to \"client\".\n request.origin = 'client'\n\n // 5. Set request’s referrer to \"client\"\n request.referrer = 'client'\n\n // 6. Set request’s referrer policy to the empty string.\n request.referrerPolicy = ''\n\n // 7. Set request’s URL to request’s current URL.\n request.url = request.urlList[request.urlList.length - 1]\n\n // 8. Set request’s URL list to « request’s URL ».\n request.urlList = [request.url]\n }\n\n // 14. If init[\"referrer\"] exists, then:\n if (init.referrer !== undefined) {\n // 1. Let referrer be init[\"referrer\"].\n const referrer = init.referrer\n\n // 2. If referrer is the empty string, then set request’s referrer to \"no-referrer\".\n if (referrer === '') {\n request.referrer = 'no-referrer'\n } else {\n // 1. Let parsedReferrer be the result of parsing referrer with\n // baseURL.\n // 2. If parsedReferrer is failure, then throw a TypeError.\n let parsedReferrer\n try {\n parsedReferrer = new URL(referrer, baseUrl)\n } catch (err) {\n throw new TypeError(`Referrer \"${referrer}\" is not a valid URL.`, { cause: err })\n }\n\n // 3. If one of the following is true\n // - parsedReferrer’s scheme is \"about\" and path is the string \"client\"\n // - parsedReferrer’s origin is not same origin with origin\n // then set request’s referrer to \"client\".\n if (\n (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') ||\n (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl))\n ) {\n request.referrer = 'client'\n } else {\n // 4. Otherwise, set request’s referrer to parsedReferrer.\n request.referrer = parsedReferrer\n }\n }\n }\n\n // 15. If init[\"referrerPolicy\"] exists, then set request’s referrer policy\n // to it.\n if (init.referrerPolicy !== undefined) {\n request.referrerPolicy = init.referrerPolicy\n }\n\n // 16. Let mode be init[\"mode\"] if it exists, and fallbackMode otherwise.\n let mode\n if (init.mode !== undefined) {\n mode = init.mode\n } else {\n mode = fallbackMode\n }\n\n // 17. If mode is \"navigate\", then throw a TypeError.\n if (mode === 'navigate') {\n throw webidl.errors.exception({\n header: 'Request constructor',\n message: 'invalid request mode navigate.'\n })\n }\n\n // 18. If mode is non-null, set request’s mode to mode.\n if (mode != null) {\n request.mode = mode\n }\n\n // 19. If init[\"credentials\"] exists, then set request’s credentials mode\n // to it.\n if (init.credentials !== undefined) {\n request.credentials = init.credentials\n }\n\n // 18. If init[\"cache\"] exists, then set request’s cache mode to it.\n if (init.cache !== undefined) {\n request.cache = init.cache\n }\n\n // 21. If request’s cache mode is \"only-if-cached\" and request’s mode is\n // not \"same-origin\", then throw a TypeError.\n if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') {\n throw new TypeError(\n \"'only-if-cached' can be set only with 'same-origin' mode\"\n )\n }\n\n // 22. If init[\"redirect\"] exists, then set request’s redirect mode to it.\n if (init.redirect !== undefined) {\n request.redirect = init.redirect\n }\n\n // 23. If init[\"integrity\"] exists, then set request’s integrity metadata to it.\n if (init.integrity != null) {\n request.integrity = String(init.integrity)\n }\n\n // 24. If init[\"keepalive\"] exists, then set request’s keepalive to it.\n if (init.keepalive !== undefined) {\n request.keepalive = Boolean(init.keepalive)\n }\n\n // 25. If init[\"method\"] exists, then:\n if (init.method !== undefined) {\n // 1. Let method be init[\"method\"].\n let method = init.method\n\n // 2. If method is not a method or method is a forbidden method, then\n // throw a TypeError.\n if (!isValidHTTPToken(method)) {\n throw new TypeError(`'${method}' is not a valid HTTP method.`)\n }\n\n if (forbiddenMethodsSet.has(method.toUpperCase())) {\n throw new TypeError(`'${method}' HTTP method is unsupported.`)\n }\n\n // 3. Normalize method.\n method = normalizeMethodRecord[method] ?? normalizeMethod(method)\n\n // 4. Set request’s method to method.\n request.method = method\n }\n\n // 26. If init[\"signal\"] exists, then set signal to it.\n if (init.signal !== undefined) {\n signal = init.signal\n }\n\n // 27. Set this’s request to request.\n this[kState] = request\n\n // 28. Set this’s signal to a new AbortSignal object with this’s relevant\n // Realm.\n // TODO: could this be simplified with AbortSignal.any\n // (https://dom.spec.whatwg.org/#dom-abortsignal-any)\n const ac = new AbortController()\n this[kSignal] = ac.signal\n this[kSignal][kRealm] = this[kRealm]\n\n // 29. If signal is not null, then make this’s signal follow signal.\n if (signal != null) {\n if (\n !signal ||\n typeof signal.aborted !== 'boolean' ||\n typeof signal.addEventListener !== 'function'\n ) {\n throw new TypeError(\n \"Failed to construct 'Request': member signal is not of type AbortSignal.\"\n )\n }\n\n if (signal.aborted) {\n ac.abort(signal.reason)\n } else {\n // Keep a strong ref to ac while request object\n // is alive. This is needed to prevent AbortController\n // from being prematurely garbage collected.\n // See, https://github.com/nodejs/undici/issues/1926.\n this[kAbortController] = ac\n\n const acRef = new WeakRef(ac)\n const abort = function () {\n const ac = acRef.deref()\n if (ac !== undefined) {\n ac.abort(this.reason)\n }\n }\n\n // Third-party AbortControllers may not work with these.\n // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619.\n try {\n // If the max amount of listeners is equal to the default, increase it\n // This is only available in node >= v19.9.0\n if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) {\n setMaxListeners(100, signal)\n } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) {\n setMaxListeners(100, signal)\n }\n } catch {}\n\n util.addAbortListener(signal, abort)\n requestFinalizer.register(ac, { signal, abort })\n }\n }\n\n // 30. Set this’s headers to a new Headers object with this’s relevant\n // Realm, whose header list is request’s header list and guard is\n // \"request\".\n this[kHeaders] = new Headers(kConstruct)\n this[kHeaders][kHeadersList] = request.headersList\n this[kHeaders][kGuard] = 'request'\n this[kHeaders][kRealm] = this[kRealm]\n\n // 31. If this’s request’s mode is \"no-cors\", then:\n if (mode === 'no-cors') {\n // 1. If this’s request’s method is not a CORS-safelisted method,\n // then throw a TypeError.\n if (!corsSafeListedMethodsSet.has(request.method)) {\n throw new TypeError(\n `'${request.method} is unsupported in no-cors mode.`\n )\n }\n\n // 2. Set this’s headers’s guard to \"request-no-cors\".\n this[kHeaders][kGuard] = 'request-no-cors'\n }\n\n // 32. If init is not empty, then:\n if (initHasKey) {\n /** @type {HeadersList} */\n const headersList = this[kHeaders][kHeadersList]\n // 1. Let headers be a copy of this’s headers and its associated header\n // list.\n // 2. If init[\"headers\"] exists, then set headers to init[\"headers\"].\n const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList)\n\n // 3. Empty this’s headers’s header list.\n headersList.clear()\n\n // 4. If headers is a Headers object, then for each header in its header\n // list, append header’s name/header’s value to this’s headers.\n if (headers instanceof HeadersList) {\n for (const [key, val] of headers) {\n headersList.append(key, val)\n }\n // Note: Copy the `set-cookie` meta-data.\n headersList.cookies = headers.cookies\n } else {\n // 5. Otherwise, fill this’s headers with headers.\n fillHeaders(this[kHeaders], headers)\n }\n }\n\n // 33. Let inputBody be input’s request’s body if input is a Request\n // object; otherwise null.\n const inputBody = input instanceof Request ? input[kState].body : null\n\n // 34. If either init[\"body\"] exists and is non-null or inputBody is\n // non-null, and request’s method is `GET` or `HEAD`, then throw a\n // TypeError.\n if (\n (init.body != null || inputBody != null) &&\n (request.method === 'GET' || request.method === 'HEAD')\n ) {\n throw new TypeError('Request with GET/HEAD method cannot have body.')\n }\n\n // 35. Let initBody be null.\n let initBody = null\n\n // 36. If init[\"body\"] exists and is non-null, then:\n if (init.body != null) {\n // 1. Let Content-Type be null.\n // 2. Set initBody and Content-Type to the result of extracting\n // init[\"body\"], with keepalive set to request’s keepalive.\n const [extractedBody, contentType] = extractBody(\n init.body,\n request.keepalive\n )\n initBody = extractedBody\n\n // 3, If Content-Type is non-null and this’s headers’s header list does\n // not contain `Content-Type`, then append `Content-Type`/Content-Type to\n // this’s headers.\n if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) {\n this[kHeaders].append('content-type', contentType)\n }\n }\n\n // 37. Let inputOrInitBody be initBody if it is non-null; otherwise\n // inputBody.\n const inputOrInitBody = initBody ?? inputBody\n\n // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is\n // null, then:\n if (inputOrInitBody != null && inputOrInitBody.source == null) {\n // 1. If initBody is non-null and init[\"duplex\"] does not exist,\n // then throw a TypeError.\n if (initBody != null && init.duplex == null) {\n throw new TypeError('RequestInit: duplex option is required when sending a body.')\n }\n\n // 2. If this’s request’s mode is neither \"same-origin\" nor \"cors\",\n // then throw a TypeError.\n if (request.mode !== 'same-origin' && request.mode !== 'cors') {\n throw new TypeError(\n 'If request is made from ReadableStream, mode should be \"same-origin\" or \"cors\"'\n )\n }\n\n // 3. Set this’s request’s use-CORS-preflight flag.\n request.useCORSPreflightFlag = true\n }\n\n // 39. Let finalBody be inputOrInitBody.\n let finalBody = inputOrInitBody\n\n // 40. If initBody is null and inputBody is non-null, then:\n if (initBody == null && inputBody != null) {\n // 1. If input is unusable, then throw a TypeError.\n if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) {\n throw new TypeError(\n 'Cannot construct a Request with a Request object that has already been used.'\n )\n }\n\n // 2. Set finalBody to the result of creating a proxy for inputBody.\n if (!TransformStream) {\n TransformStream = require('stream/web').TransformStream\n }\n\n // https://streams.spec.whatwg.org/#readablestream-create-a-proxy\n const identityTransform = new TransformStream()\n inputBody.stream.pipeThrough(identityTransform)\n finalBody = {\n source: inputBody.source,\n length: inputBody.length,\n stream: identityTransform.readable\n }\n }\n\n // 41. Set this’s request’s body to finalBody.\n this[kState].body = finalBody\n }\n\n // Returns request’s HTTP method, which is \"GET\" by default.\n get method () {\n webidl.brandCheck(this, Request)\n\n // The method getter steps are to return this’s request’s method.\n return this[kState].method\n }\n\n // Returns the URL of request as a string.\n get url () {\n webidl.brandCheck(this, Request)\n\n // The url getter steps are to return this’s request’s URL, serialized.\n return URLSerializer(this[kState].url)\n }\n\n // Returns a Headers object consisting of the headers associated with request.\n // Note that headers added in the network layer by the user agent will not\n // be accounted for in this object, e.g., the \"Host\" header.\n get headers () {\n webidl.brandCheck(this, Request)\n\n // The headers getter steps are to return this’s headers.\n return this[kHeaders]\n }\n\n // Returns the kind of resource requested by request, e.g., \"document\"\n // or \"script\".\n get destination () {\n webidl.brandCheck(this, Request)\n\n // The destination getter are to return this’s request’s destination.\n return this[kState].destination\n }\n\n // Returns the referrer of request. Its value can be a same-origin URL if\n // explicitly set in init, the empty string to indicate no referrer, and\n // \"about:client\" when defaulting to the global’s default. This is used\n // during fetching to determine the value of the `Referer` header of the\n // request being made.\n get referrer () {\n webidl.brandCheck(this, Request)\n\n // 1. If this’s request’s referrer is \"no-referrer\", then return the\n // empty string.\n if (this[kState].referrer === 'no-referrer') {\n return ''\n }\n\n // 2. If this’s request’s referrer is \"client\", then return\n // \"about:client\".\n if (this[kState].referrer === 'client') {\n return 'about:client'\n }\n\n // Return this’s request’s referrer, serialized.\n return this[kState].referrer.toString()\n }\n\n // Returns the referrer policy associated with request.\n // This is used during fetching to compute the value of the request’s\n // referrer.\n get referrerPolicy () {\n webidl.brandCheck(this, Request)\n\n // The referrerPolicy getter steps are to return this’s request’s referrer policy.\n return this[kState].referrerPolicy\n }\n\n // Returns the mode associated with request, which is a string indicating\n // whether the request will use CORS, or will be restricted to same-origin\n // URLs.\n get mode () {\n webidl.brandCheck(this, Request)\n\n // The mode getter steps are to return this’s request’s mode.\n return this[kState].mode\n }\n\n // Returns the credentials mode associated with request,\n // which is a string indicating whether credentials will be sent with the\n // request always, never, or only when sent to a same-origin URL.\n get credentials () {\n // The credentials getter steps are to return this’s request’s credentials mode.\n return this[kState].credentials\n }\n\n // Returns the cache mode associated with request,\n // which is a string indicating how the request will\n // interact with the browser’s cache when fetching.\n get cache () {\n webidl.brandCheck(this, Request)\n\n // The cache getter steps are to return this’s request’s cache mode.\n return this[kState].cache\n }\n\n // Returns the redirect mode associated with request,\n // which is a string indicating how redirects for the\n // request will be handled during fetching. A request\n // will follow redirects by default.\n get redirect () {\n webidl.brandCheck(this, Request)\n\n // The redirect getter steps are to return this’s request’s redirect mode.\n return this[kState].redirect\n }\n\n // Returns request’s subresource integrity metadata, which is a\n // cryptographic hash of the resource being fetched. Its value\n // consists of multiple hashes separated by whitespace. [SRI]\n get integrity () {\n webidl.brandCheck(this, Request)\n\n // The integrity getter steps are to return this’s request’s integrity\n // metadata.\n return this[kState].integrity\n }\n\n // Returns a boolean indicating whether or not request can outlive the\n // global in which it was created.\n get keepalive () {\n webidl.brandCheck(this, Request)\n\n // The keepalive getter steps are to return this’s request’s keepalive.\n return this[kState].keepalive\n }\n\n // Returns a boolean indicating whether or not request is for a reload\n // navigation.\n get isReloadNavigation () {\n webidl.brandCheck(this, Request)\n\n // The isReloadNavigation getter steps are to return true if this’s\n // request’s reload-navigation flag is set; otherwise false.\n return this[kState].reloadNavigation\n }\n\n // Returns a boolean indicating whether or not request is for a history\n // navigation (a.k.a. back-foward navigation).\n get isHistoryNavigation () {\n webidl.brandCheck(this, Request)\n\n // The isHistoryNavigation getter steps are to return true if this’s request’s\n // history-navigation flag is set; otherwise false.\n return this[kState].historyNavigation\n }\n\n // Returns the signal associated with request, which is an AbortSignal\n // object indicating whether or not request has been aborted, and its\n // abort event handler.\n get signal () {\n webidl.brandCheck(this, Request)\n\n // The signal getter steps are to return this’s signal.\n return this[kSignal]\n }\n\n get body () {\n webidl.brandCheck(this, Request)\n\n return this[kState].body ? this[kState].body.stream : null\n }\n\n get bodyUsed () {\n webidl.brandCheck(this, Request)\n\n return !!this[kState].body && util.isDisturbed(this[kState].body.stream)\n }\n\n get duplex () {\n webidl.brandCheck(this, Request)\n\n return 'half'\n }\n\n // Returns a clone of request.\n clone () {\n webidl.brandCheck(this, Request)\n\n // 1. If this is unusable, then throw a TypeError.\n if (this.bodyUsed || this.body?.locked) {\n throw new TypeError('unusable')\n }\n\n // 2. Let clonedRequest be the result of cloning this’s request.\n const clonedRequest = cloneRequest(this[kState])\n\n // 3. Let clonedRequestObject be the result of creating a Request object,\n // given clonedRequest, this’s headers’s guard, and this’s relevant Realm.\n const clonedRequestObject = new Request(kConstruct)\n clonedRequestObject[kState] = clonedRequest\n clonedRequestObject[kRealm] = this[kRealm]\n clonedRequestObject[kHeaders] = new Headers(kConstruct)\n clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList\n clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard]\n clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm]\n\n // 4. Make clonedRequestObject’s signal follow this’s signal.\n const ac = new AbortController()\n if (this.signal.aborted) {\n ac.abort(this.signal.reason)\n } else {\n util.addAbortListener(\n this.signal,\n () => {\n ac.abort(this.signal.reason)\n }\n )\n }\n clonedRequestObject[kSignal] = ac.signal\n\n // 4. Return clonedRequestObject.\n return clonedRequestObject\n }\n}\n\nmixinBody(Request)\n\nfunction makeRequest (init) {\n // https://fetch.spec.whatwg.org/#requests\n const request = {\n method: 'GET',\n localURLsOnly: false,\n unsafeRequest: false,\n body: null,\n client: null,\n reservedClient: null,\n replacesClientId: '',\n window: 'client',\n keepalive: false,\n serviceWorkers: 'all',\n initiator: '',\n destination: '',\n priority: null,\n origin: 'client',\n policyContainer: 'client',\n referrer: 'client',\n referrerPolicy: '',\n mode: 'no-cors',\n useCORSPreflightFlag: false,\n credentials: 'same-origin',\n useCredentials: false,\n cache: 'default',\n redirect: 'follow',\n integrity: '',\n cryptoGraphicsNonceMetadata: '',\n parserMetadata: '',\n reloadNavigation: false,\n historyNavigation: false,\n userActivation: false,\n taintedOrigin: false,\n redirectCount: 0,\n responseTainting: 'basic',\n preventNoCacheCacheControlHeaderModification: false,\n done: false,\n timingAllowFailed: false,\n ...init,\n headersList: init.headersList\n ? new HeadersList(init.headersList)\n : new HeadersList()\n }\n request.url = request.urlList[0]\n return request\n}\n\n// https://fetch.spec.whatwg.org/#concept-request-clone\nfunction cloneRequest (request) {\n // To clone a request request, run these steps:\n\n // 1. Let newRequest be a copy of request, except for its body.\n const newRequest = makeRequest({ ...request, body: null })\n\n // 2. If request’s body is non-null, set newRequest’s body to the\n // result of cloning request’s body.\n if (request.body != null) {\n newRequest.body = cloneBody(request.body)\n }\n\n // 3. Return newRequest.\n return newRequest\n}\n\nObject.defineProperties(Request.prototype, {\n method: kEnumerableProperty,\n url: kEnumerableProperty,\n headers: kEnumerableProperty,\n redirect: kEnumerableProperty,\n clone: kEnumerableProperty,\n signal: kEnumerableProperty,\n duplex: kEnumerableProperty,\n destination: kEnumerableProperty,\n body: kEnumerableProperty,\n bodyUsed: kEnumerableProperty,\n isHistoryNavigation: kEnumerableProperty,\n isReloadNavigation: kEnumerableProperty,\n keepalive: kEnumerableProperty,\n integrity: kEnumerableProperty,\n cache: kEnumerableProperty,\n credentials: kEnumerableProperty,\n attribute: kEnumerableProperty,\n referrerPolicy: kEnumerableProperty,\n referrer: kEnumerableProperty,\n mode: kEnumerableProperty,\n [Symbol.toStringTag]: {\n value: 'Request',\n configurable: true\n }\n})\n\nwebidl.converters.Request = webidl.interfaceConverter(\n Request\n)\n\n// https://fetch.spec.whatwg.org/#requestinfo\nwebidl.converters.RequestInfo = function (V) {\n if (typeof V === 'string') {\n return webidl.converters.USVString(V)\n }\n\n if (V instanceof Request) {\n return webidl.converters.Request(V)\n }\n\n return webidl.converters.USVString(V)\n}\n\nwebidl.converters.AbortSignal = webidl.interfaceConverter(\n AbortSignal\n)\n\n// https://fetch.spec.whatwg.org/#requestinit\nwebidl.converters.RequestInit = webidl.dictionaryConverter([\n {\n key: 'method',\n converter: webidl.converters.ByteString\n },\n {\n key: 'headers',\n converter: webidl.converters.HeadersInit\n },\n {\n key: 'body',\n converter: webidl.nullableConverter(\n webidl.converters.BodyInit\n )\n },\n {\n key: 'referrer',\n converter: webidl.converters.USVString\n },\n {\n key: 'referrerPolicy',\n converter: webidl.converters.DOMString,\n // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy\n allowedValues: referrerPolicy\n },\n {\n key: 'mode',\n converter: webidl.converters.DOMString,\n // https://fetch.spec.whatwg.org/#concept-request-mode\n allowedValues: requestMode\n },\n {\n key: 'credentials',\n converter: webidl.converters.DOMString,\n // https://fetch.spec.whatwg.org/#requestcredentials\n allowedValues: requestCredentials\n },\n {\n key: 'cache',\n converter: webidl.converters.DOMString,\n // https://fetch.spec.whatwg.org/#requestcache\n allowedValues: requestCache\n },\n {\n key: 'redirect',\n converter: webidl.converters.DOMString,\n // https://fetch.spec.whatwg.org/#requestredirect\n allowedValues: requestRedirect\n },\n {\n key: 'integrity',\n converter: webidl.converters.DOMString\n },\n {\n key: 'keepalive',\n converter: webidl.converters.boolean\n },\n {\n key: 'signal',\n converter: webidl.nullableConverter(\n (signal) => webidl.converters.AbortSignal(\n signal,\n { strict: false }\n )\n )\n },\n {\n key: 'window',\n converter: webidl.converters.any\n },\n {\n key: 'duplex',\n converter: webidl.converters.DOMString,\n allowedValues: requestDuplex\n }\n])\n\nmodule.exports = { Request, makeRequest }\n","'use strict'\n\nconst { Headers, HeadersList, fill } = require('./headers')\nconst { extractBody, cloneBody, mixinBody } = require('./body')\nconst util = require('../core/util')\nconst { kEnumerableProperty } = util\nconst {\n isValidReasonPhrase,\n isCancelled,\n isAborted,\n isBlobLike,\n serializeJavascriptValueToJSONString,\n isErrorLike,\n isomorphicEncode\n} = require('./util')\nconst {\n redirectStatusSet,\n nullBodyStatus,\n DOMException\n} = require('./constants')\nconst { kState, kHeaders, kGuard, kRealm } = require('./symbols')\nconst { webidl } = require('./webidl')\nconst { FormData } = require('./formdata')\nconst { getGlobalOrigin } = require('./global')\nconst { URLSerializer } = require('./dataURL')\nconst { kHeadersList, kConstruct } = require('../core/symbols')\nconst assert = require('assert')\nconst { types } = require('util')\n\nconst ReadableStream = globalThis.ReadableStream || require('stream/web').ReadableStream\nconst textEncoder = new TextEncoder('utf-8')\n\n// https://fetch.spec.whatwg.org/#response-class\nclass Response {\n // Creates network error Response.\n static error () {\n // TODO\n const relevantRealm = { settingsObject: {} }\n\n // The static error() method steps are to return the result of creating a\n // Response object, given a new network error, \"immutable\", and this’s\n // relevant Realm.\n const responseObject = new Response()\n responseObject[kState] = makeNetworkError()\n responseObject[kRealm] = relevantRealm\n responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList\n responseObject[kHeaders][kGuard] = 'immutable'\n responseObject[kHeaders][kRealm] = relevantRealm\n return responseObject\n }\n\n // https://fetch.spec.whatwg.org/#dom-response-json\n static json (data, init = {}) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' })\n\n if (init !== null) {\n init = webidl.converters.ResponseInit(init)\n }\n\n // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data.\n const bytes = textEncoder.encode(\n serializeJavascriptValueToJSONString(data)\n )\n\n // 2. Let body be the result of extracting bytes.\n const body = extractBody(bytes)\n\n // 3. Let responseObject be the result of creating a Response object, given a new response,\n // \"response\", and this’s relevant Realm.\n const relevantRealm = { settingsObject: {} }\n const responseObject = new Response()\n responseObject[kRealm] = relevantRealm\n responseObject[kHeaders][kGuard] = 'response'\n responseObject[kHeaders][kRealm] = relevantRealm\n\n // 4. Perform initialize a response given responseObject, init, and (body, \"application/json\").\n initializeResponse(responseObject, init, { body: body[0], type: 'application/json' })\n\n // 5. Return responseObject.\n return responseObject\n }\n\n // Creates a redirect Response that redirects to url with status status.\n static redirect (url, status = 302) {\n const relevantRealm = { settingsObject: {} }\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' })\n\n url = webidl.converters.USVString(url)\n status = webidl.converters['unsigned short'](status)\n\n // 1. Let parsedURL be the result of parsing url with current settings\n // object’s API base URL.\n // 2. If parsedURL is failure, then throw a TypeError.\n // TODO: base-URL?\n let parsedURL\n try {\n parsedURL = new URL(url, getGlobalOrigin())\n } catch (err) {\n throw Object.assign(new TypeError('Failed to parse URL from ' + url), {\n cause: err\n })\n }\n\n // 3. If status is not a redirect status, then throw a RangeError.\n if (!redirectStatusSet.has(status)) {\n throw new RangeError('Invalid status code ' + status)\n }\n\n // 4. Let responseObject be the result of creating a Response object,\n // given a new response, \"immutable\", and this’s relevant Realm.\n const responseObject = new Response()\n responseObject[kRealm] = relevantRealm\n responseObject[kHeaders][kGuard] = 'immutable'\n responseObject[kHeaders][kRealm] = relevantRealm\n\n // 5. Set responseObject’s response’s status to status.\n responseObject[kState].status = status\n\n // 6. Let value be parsedURL, serialized and isomorphic encoded.\n const value = isomorphicEncode(URLSerializer(parsedURL))\n\n // 7. Append `Location`/value to responseObject’s response’s header list.\n responseObject[kState].headersList.append('location', value)\n\n // 8. Return responseObject.\n return responseObject\n }\n\n // https://fetch.spec.whatwg.org/#dom-response\n constructor (body = null, init = {}) {\n if (body !== null) {\n body = webidl.converters.BodyInit(body)\n }\n\n init = webidl.converters.ResponseInit(init)\n\n // TODO\n this[kRealm] = { settingsObject: {} }\n\n // 1. Set this’s response to a new response.\n this[kState] = makeResponse({})\n\n // 2. Set this’s headers to a new Headers object with this’s relevant\n // Realm, whose header list is this’s response’s header list and guard\n // is \"response\".\n this[kHeaders] = new Headers(kConstruct)\n this[kHeaders][kGuard] = 'response'\n this[kHeaders][kHeadersList] = this[kState].headersList\n this[kHeaders][kRealm] = this[kRealm]\n\n // 3. Let bodyWithType be null.\n let bodyWithType = null\n\n // 4. If body is non-null, then set bodyWithType to the result of extracting body.\n if (body != null) {\n const [extractedBody, type] = extractBody(body)\n bodyWithType = { body: extractedBody, type }\n }\n\n // 5. Perform initialize a response given this, init, and bodyWithType.\n initializeResponse(this, init, bodyWithType)\n }\n\n // Returns response’s type, e.g., \"cors\".\n get type () {\n webidl.brandCheck(this, Response)\n\n // The type getter steps are to return this’s response’s type.\n return this[kState].type\n }\n\n // Returns response’s URL, if it has one; otherwise the empty string.\n get url () {\n webidl.brandCheck(this, Response)\n\n const urlList = this[kState].urlList\n\n // The url getter steps are to return the empty string if this’s\n // response’s URL is null; otherwise this’s response’s URL,\n // serialized with exclude fragment set to true.\n const url = urlList[urlList.length - 1] ?? null\n\n if (url === null) {\n return ''\n }\n\n return URLSerializer(url, true)\n }\n\n // Returns whether response was obtained through a redirect.\n get redirected () {\n webidl.brandCheck(this, Response)\n\n // The redirected getter steps are to return true if this’s response’s URL\n // list has more than one item; otherwise false.\n return this[kState].urlList.length > 1\n }\n\n // Returns response’s status.\n get status () {\n webidl.brandCheck(this, Response)\n\n // The status getter steps are to return this’s response’s status.\n return this[kState].status\n }\n\n // Returns whether response’s status is an ok status.\n get ok () {\n webidl.brandCheck(this, Response)\n\n // The ok getter steps are to return true if this’s response’s status is an\n // ok status; otherwise false.\n return this[kState].status >= 200 && this[kState].status <= 299\n }\n\n // Returns response’s status message.\n get statusText () {\n webidl.brandCheck(this, Response)\n\n // The statusText getter steps are to return this’s response’s status\n // message.\n return this[kState].statusText\n }\n\n // Returns response’s headers as Headers.\n get headers () {\n webidl.brandCheck(this, Response)\n\n // The headers getter steps are to return this’s headers.\n return this[kHeaders]\n }\n\n get body () {\n webidl.brandCheck(this, Response)\n\n return this[kState].body ? this[kState].body.stream : null\n }\n\n get bodyUsed () {\n webidl.brandCheck(this, Response)\n\n return !!this[kState].body && util.isDisturbed(this[kState].body.stream)\n }\n\n // Returns a clone of response.\n clone () {\n webidl.brandCheck(this, Response)\n\n // 1. If this is unusable, then throw a TypeError.\n if (this.bodyUsed || (this.body && this.body.locked)) {\n throw webidl.errors.exception({\n header: 'Response.clone',\n message: 'Body has already been consumed.'\n })\n }\n\n // 2. Let clonedResponse be the result of cloning this’s response.\n const clonedResponse = cloneResponse(this[kState])\n\n // 3. Return the result of creating a Response object, given\n // clonedResponse, this’s headers’s guard, and this’s relevant Realm.\n const clonedResponseObject = new Response()\n clonedResponseObject[kState] = clonedResponse\n clonedResponseObject[kRealm] = this[kRealm]\n clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList\n clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard]\n clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm]\n\n return clonedResponseObject\n }\n}\n\nmixinBody(Response)\n\nObject.defineProperties(Response.prototype, {\n type: kEnumerableProperty,\n url: kEnumerableProperty,\n status: kEnumerableProperty,\n ok: kEnumerableProperty,\n redirected: kEnumerableProperty,\n statusText: kEnumerableProperty,\n headers: kEnumerableProperty,\n clone: kEnumerableProperty,\n body: kEnumerableProperty,\n bodyUsed: kEnumerableProperty,\n [Symbol.toStringTag]: {\n value: 'Response',\n configurable: true\n }\n})\n\nObject.defineProperties(Response, {\n json: kEnumerableProperty,\n redirect: kEnumerableProperty,\n error: kEnumerableProperty\n})\n\n// https://fetch.spec.whatwg.org/#concept-response-clone\nfunction cloneResponse (response) {\n // To clone a response response, run these steps:\n\n // 1. If response is a filtered response, then return a new identical\n // filtered response whose internal response is a clone of response’s\n // internal response.\n if (response.internalResponse) {\n return filterResponse(\n cloneResponse(response.internalResponse),\n response.type\n )\n }\n\n // 2. Let newResponse be a copy of response, except for its body.\n const newResponse = makeResponse({ ...response, body: null })\n\n // 3. If response’s body is non-null, then set newResponse’s body to the\n // result of cloning response’s body.\n if (response.body != null) {\n newResponse.body = cloneBody(response.body)\n }\n\n // 4. Return newResponse.\n return newResponse\n}\n\nfunction makeResponse (init) {\n return {\n aborted: false,\n rangeRequested: false,\n timingAllowPassed: false,\n requestIncludesCredentials: false,\n type: 'default',\n status: 200,\n timingInfo: null,\n cacheState: '',\n statusText: '',\n ...init,\n headersList: init.headersList\n ? new HeadersList(init.headersList)\n : new HeadersList(),\n urlList: init.urlList ? [...init.urlList] : []\n }\n}\n\nfunction makeNetworkError (reason) {\n const isError = isErrorLike(reason)\n return makeResponse({\n type: 'error',\n status: 0,\n error: isError\n ? reason\n : new Error(reason ? String(reason) : reason),\n aborted: reason && reason.name === 'AbortError'\n })\n}\n\nfunction makeFilteredResponse (response, state) {\n state = {\n internalResponse: response,\n ...state\n }\n\n return new Proxy(response, {\n get (target, p) {\n return p in state ? state[p] : target[p]\n },\n set (target, p, value) {\n assert(!(p in state))\n target[p] = value\n return true\n }\n })\n}\n\n// https://fetch.spec.whatwg.org/#concept-filtered-response\nfunction filterResponse (response, type) {\n // Set response to the following filtered response with response as its\n // internal response, depending on request’s response tainting:\n if (type === 'basic') {\n // A basic filtered response is a filtered response whose type is \"basic\"\n // and header list excludes any headers in internal response’s header list\n // whose name is a forbidden response-header name.\n\n // Note: undici does not implement forbidden response-header names\n return makeFilteredResponse(response, {\n type: 'basic',\n headersList: response.headersList\n })\n } else if (type === 'cors') {\n // A CORS filtered response is a filtered response whose type is \"cors\"\n // and header list excludes any headers in internal response’s header\n // list whose name is not a CORS-safelisted response-header name, given\n // internal response’s CORS-exposed header-name list.\n\n // Note: undici does not implement CORS-safelisted response-header names\n return makeFilteredResponse(response, {\n type: 'cors',\n headersList: response.headersList\n })\n } else if (type === 'opaque') {\n // An opaque filtered response is a filtered response whose type is\n // \"opaque\", URL list is the empty list, status is 0, status message\n // is the empty byte sequence, header list is empty, and body is null.\n\n return makeFilteredResponse(response, {\n type: 'opaque',\n urlList: Object.freeze([]),\n status: 0,\n statusText: '',\n body: null\n })\n } else if (type === 'opaqueredirect') {\n // An opaque-redirect filtered response is a filtered response whose type\n // is \"opaqueredirect\", status is 0, status message is the empty byte\n // sequence, header list is empty, and body is null.\n\n return makeFilteredResponse(response, {\n type: 'opaqueredirect',\n status: 0,\n statusText: '',\n headersList: [],\n body: null\n })\n } else {\n assert(false)\n }\n}\n\n// https://fetch.spec.whatwg.org/#appropriate-network-error\nfunction makeAppropriateNetworkError (fetchParams, err = null) {\n // 1. Assert: fetchParams is canceled.\n assert(isCancelled(fetchParams))\n\n // 2. Return an aborted network error if fetchParams is aborted;\n // otherwise return a network error.\n return isAborted(fetchParams)\n ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err }))\n : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err }))\n}\n\n// https://whatpr.org/fetch/1392.html#initialize-a-response\nfunction initializeResponse (response, init, body) {\n // 1. If init[\"status\"] is not in the range 200 to 599, inclusive, then\n // throw a RangeError.\n if (init.status !== null && (init.status < 200 || init.status > 599)) {\n throw new RangeError('init[\"status\"] must be in the range of 200 to 599, inclusive.')\n }\n\n // 2. If init[\"statusText\"] does not match the reason-phrase token production,\n // then throw a TypeError.\n if ('statusText' in init && init.statusText != null) {\n // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2:\n // reason-phrase = *( HTAB / SP / VCHAR / obs-text )\n if (!isValidReasonPhrase(String(init.statusText))) {\n throw new TypeError('Invalid statusText')\n }\n }\n\n // 3. Set response’s response’s status to init[\"status\"].\n if ('status' in init && init.status != null) {\n response[kState].status = init.status\n }\n\n // 4. Set response’s response’s status message to init[\"statusText\"].\n if ('statusText' in init && init.statusText != null) {\n response[kState].statusText = init.statusText\n }\n\n // 5. If init[\"headers\"] exists, then fill response’s headers with init[\"headers\"].\n if ('headers' in init && init.headers != null) {\n fill(response[kHeaders], init.headers)\n }\n\n // 6. If body was given, then:\n if (body) {\n // 1. If response's status is a null body status, then throw a TypeError.\n if (nullBodyStatus.includes(response.status)) {\n throw webidl.errors.exception({\n header: 'Response constructor',\n message: 'Invalid response status code ' + response.status\n })\n }\n\n // 2. Set response's body to body's body.\n response[kState].body = body.body\n\n // 3. If body's type is non-null and response's header list does not contain\n // `Content-Type`, then append (`Content-Type`, body's type) to response's header list.\n if (body.type != null && !response[kState].headersList.contains('Content-Type')) {\n response[kState].headersList.append('content-type', body.type)\n }\n }\n}\n\nwebidl.converters.ReadableStream = webidl.interfaceConverter(\n ReadableStream\n)\n\nwebidl.converters.FormData = webidl.interfaceConverter(\n FormData\n)\n\nwebidl.converters.URLSearchParams = webidl.interfaceConverter(\n URLSearchParams\n)\n\n// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit\nwebidl.converters.XMLHttpRequestBodyInit = function (V) {\n if (typeof V === 'string') {\n return webidl.converters.USVString(V)\n }\n\n if (isBlobLike(V)) {\n return webidl.converters.Blob(V, { strict: false })\n }\n\n if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) {\n return webidl.converters.BufferSource(V)\n }\n\n if (util.isFormDataLike(V)) {\n return webidl.converters.FormData(V, { strict: false })\n }\n\n if (V instanceof URLSearchParams) {\n return webidl.converters.URLSearchParams(V)\n }\n\n return webidl.converters.DOMString(V)\n}\n\n// https://fetch.spec.whatwg.org/#bodyinit\nwebidl.converters.BodyInit = function (V) {\n if (V instanceof ReadableStream) {\n return webidl.converters.ReadableStream(V)\n }\n\n // Note: the spec doesn't include async iterables,\n // this is an undici extension.\n if (V?.[Symbol.asyncIterator]) {\n return V\n }\n\n return webidl.converters.XMLHttpRequestBodyInit(V)\n}\n\nwebidl.converters.ResponseInit = webidl.dictionaryConverter([\n {\n key: 'status',\n converter: webidl.converters['unsigned short'],\n defaultValue: 200\n },\n {\n key: 'statusText',\n converter: webidl.converters.ByteString,\n defaultValue: ''\n },\n {\n key: 'headers',\n converter: webidl.converters.HeadersInit\n }\n])\n\nmodule.exports = {\n makeNetworkError,\n makeResponse,\n makeAppropriateNetworkError,\n filterResponse,\n Response,\n cloneResponse\n}\n","'use strict'\n\nmodule.exports = {\n kUrl: Symbol('url'),\n kHeaders: Symbol('headers'),\n kSignal: Symbol('signal'),\n kState: Symbol('state'),\n kGuard: Symbol('guard'),\n kRealm: Symbol('realm')\n}\n","'use strict'\n\nconst { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = require('./constants')\nconst { getGlobalOrigin } = require('./global')\nconst { performance } = require('perf_hooks')\nconst { isBlobLike, toUSVString, ReadableStreamFrom } = require('../core/util')\nconst assert = require('assert')\nconst { isUint8Array } = require('util/types')\n\n// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable\n/** @type {import('crypto')|undefined} */\nlet crypto\n\ntry {\n crypto = require('crypto')\n} catch {\n\n}\n\nfunction responseURL (response) {\n // https://fetch.spec.whatwg.org/#responses\n // A response has an associated URL. It is a pointer to the last URL\n // in response’s URL list and null if response’s URL list is empty.\n const urlList = response.urlList\n const length = urlList.length\n return length === 0 ? null : urlList[length - 1].toString()\n}\n\n// https://fetch.spec.whatwg.org/#concept-response-location-url\nfunction responseLocationURL (response, requestFragment) {\n // 1. If response’s status is not a redirect status, then return null.\n if (!redirectStatusSet.has(response.status)) {\n return null\n }\n\n // 2. Let location be the result of extracting header list values given\n // `Location` and response’s header list.\n let location = response.headersList.get('location')\n\n // 3. If location is a header value, then set location to the result of\n // parsing location with response’s URL.\n if (location !== null && isValidHeaderValue(location)) {\n location = new URL(location, responseURL(response))\n }\n\n // 4. If location is a URL whose fragment is null, then set location’s\n // fragment to requestFragment.\n if (location && !location.hash) {\n location.hash = requestFragment\n }\n\n // 5. Return location.\n return location\n}\n\n/** @returns {URL} */\nfunction requestCurrentURL (request) {\n return request.urlList[request.urlList.length - 1]\n}\n\nfunction requestBadPort (request) {\n // 1. Let url be request’s current URL.\n const url = requestCurrentURL(request)\n\n // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port,\n // then return blocked.\n if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) {\n return 'blocked'\n }\n\n // 3. Return allowed.\n return 'allowed'\n}\n\nfunction isErrorLike (object) {\n return object instanceof Error || (\n object?.constructor?.name === 'Error' ||\n object?.constructor?.name === 'DOMException'\n )\n}\n\n// Check whether |statusText| is a ByteString and\n// matches the Reason-Phrase token production.\n// RFC 2616: https://tools.ietf.org/html/rfc2616\n// RFC 7230: https://tools.ietf.org/html/rfc7230\n// \"reason-phrase = *( HTAB / SP / VCHAR / obs-text )\"\n// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116\nfunction isValidReasonPhrase (statusText) {\n for (let i = 0; i < statusText.length; ++i) {\n const c = statusText.charCodeAt(i)\n if (\n !(\n (\n c === 0x09 || // HTAB\n (c >= 0x20 && c <= 0x7e) || // SP / VCHAR\n (c >= 0x80 && c <= 0xff)\n ) // obs-text\n )\n ) {\n return false\n }\n }\n return true\n}\n\n/**\n * @see https://tools.ietf.org/html/rfc7230#section-3.2.6\n * @param {number} c\n */\nfunction isTokenCharCode (c) {\n switch (c) {\n case 0x22:\n case 0x28:\n case 0x29:\n case 0x2c:\n case 0x2f:\n case 0x3a:\n case 0x3b:\n case 0x3c:\n case 0x3d:\n case 0x3e:\n case 0x3f:\n case 0x40:\n case 0x5b:\n case 0x5c:\n case 0x5d:\n case 0x7b:\n case 0x7d:\n // DQUOTE and \"(),/:;<=>?@[\\]{}\"\n return false\n default:\n // VCHAR %x21-7E\n return c >= 0x21 && c <= 0x7e\n }\n}\n\n/**\n * @param {string} characters\n */\nfunction isValidHTTPToken (characters) {\n if (characters.length === 0) {\n return false\n }\n for (let i = 0; i < characters.length; ++i) {\n if (!isTokenCharCode(characters.charCodeAt(i))) {\n return false\n }\n }\n return true\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#header-name\n * @param {string} potentialValue\n */\nfunction isValidHeaderName (potentialValue) {\n return isValidHTTPToken(potentialValue)\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#header-value\n * @param {string} potentialValue\n */\nfunction isValidHeaderValue (potentialValue) {\n // - Has no leading or trailing HTTP tab or space bytes.\n // - Contains no 0x00 (NUL) or HTTP newline bytes.\n if (\n potentialValue.startsWith('\\t') ||\n potentialValue.startsWith(' ') ||\n potentialValue.endsWith('\\t') ||\n potentialValue.endsWith(' ')\n ) {\n return false\n }\n\n if (\n potentialValue.includes('\\0') ||\n potentialValue.includes('\\r') ||\n potentialValue.includes('\\n')\n ) {\n return false\n }\n\n return true\n}\n\n// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect\nfunction setRequestReferrerPolicyOnRedirect (request, actualResponse) {\n // Given a request request and a response actualResponse, this algorithm\n // updates request’s referrer policy according to the Referrer-Policy\n // header (if any) in actualResponse.\n\n // 1. Let policy be the result of executing § 8.1 Parse a referrer policy\n // from a Referrer-Policy header on actualResponse.\n\n // 8.1 Parse a referrer policy from a Referrer-Policy header\n // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list.\n const { headersList } = actualResponse\n // 2. Let policy be the empty string.\n // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token.\n // 4. Return policy.\n const policyHeader = (headersList.get('referrer-policy') ?? '').split(',')\n\n // Note: As the referrer-policy can contain multiple policies\n // separated by comma, we need to loop through all of them\n // and pick the first valid one.\n // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy\n let policy = ''\n if (policyHeader.length > 0) {\n // The right-most policy takes precedence.\n // The left-most policy is the fallback.\n for (let i = policyHeader.length; i !== 0; i--) {\n const token = policyHeader[i - 1].trim()\n if (referrerPolicyTokens.has(token)) {\n policy = token\n break\n }\n }\n }\n\n // 2. If policy is not the empty string, then set request’s referrer policy to policy.\n if (policy !== '') {\n request.referrerPolicy = policy\n }\n}\n\n// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check\nfunction crossOriginResourcePolicyCheck () {\n // TODO\n return 'allowed'\n}\n\n// https://fetch.spec.whatwg.org/#concept-cors-check\nfunction corsCheck () {\n // TODO\n return 'success'\n}\n\n// https://fetch.spec.whatwg.org/#concept-tao-check\nfunction TAOCheck () {\n // TODO\n return 'success'\n}\n\nfunction appendFetchMetadata (httpRequest) {\n // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header\n // TODO\n\n // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header\n\n // 1. Assert: r’s url is a potentially trustworthy URL.\n // TODO\n\n // 2. Let header be a Structured Header whose value is a token.\n let header = null\n\n // 3. Set header’s value to r’s mode.\n header = httpRequest.mode\n\n // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list.\n httpRequest.headersList.set('sec-fetch-mode', header)\n\n // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header\n // TODO\n\n // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header\n // TODO\n}\n\n// https://fetch.spec.whatwg.org/#append-a-request-origin-header\nfunction appendRequestOriginHeader (request) {\n // 1. Let serializedOrigin be the result of byte-serializing a request origin with request.\n let serializedOrigin = request.origin\n\n // 2. If request’s response tainting is \"cors\" or request’s mode is \"websocket\", then append (`Origin`, serializedOrigin) to request’s header list.\n if (request.responseTainting === 'cors' || request.mode === 'websocket') {\n if (serializedOrigin) {\n request.headersList.append('origin', serializedOrigin)\n }\n\n // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then:\n } else if (request.method !== 'GET' && request.method !== 'HEAD') {\n // 1. Switch on request’s referrer policy:\n switch (request.referrerPolicy) {\n case 'no-referrer':\n // Set serializedOrigin to `null`.\n serializedOrigin = null\n break\n case 'no-referrer-when-downgrade':\n case 'strict-origin':\n case 'strict-origin-when-cross-origin':\n // If request’s origin is a tuple origin, its scheme is \"https\", and request’s current URL’s scheme is not \"https\", then set serializedOrigin to `null`.\n if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) {\n serializedOrigin = null\n }\n break\n case 'same-origin':\n // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`.\n if (!sameOrigin(request, requestCurrentURL(request))) {\n serializedOrigin = null\n }\n break\n default:\n // Do nothing.\n }\n\n if (serializedOrigin) {\n // 2. Append (`Origin`, serializedOrigin) to request’s header list.\n request.headersList.append('origin', serializedOrigin)\n }\n }\n}\n\nfunction coarsenedSharedCurrentTime (crossOriginIsolatedCapability) {\n // TODO\n return performance.now()\n}\n\n// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info\nfunction createOpaqueTimingInfo (timingInfo) {\n return {\n startTime: timingInfo.startTime ?? 0,\n redirectStartTime: 0,\n redirectEndTime: 0,\n postRedirectStartTime: timingInfo.startTime ?? 0,\n finalServiceWorkerStartTime: 0,\n finalNetworkResponseStartTime: 0,\n finalNetworkRequestStartTime: 0,\n endTime: 0,\n encodedBodySize: 0,\n decodedBodySize: 0,\n finalConnectionTimingInfo: null\n }\n}\n\n// https://html.spec.whatwg.org/multipage/origin.html#policy-container\nfunction makePolicyContainer () {\n // Note: the fetch spec doesn't make use of embedder policy or CSP list\n return {\n referrerPolicy: 'strict-origin-when-cross-origin'\n }\n}\n\n// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container\nfunction clonePolicyContainer (policyContainer) {\n return {\n referrerPolicy: policyContainer.referrerPolicy\n }\n}\n\n// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer\nfunction determineRequestsReferrer (request) {\n // 1. Let policy be request's referrer policy.\n const policy = request.referrerPolicy\n\n // Note: policy cannot (shouldn't) be null or an empty string.\n assert(policy)\n\n // 2. Let environment be request’s client.\n\n let referrerSource = null\n\n // 3. Switch on request’s referrer:\n if (request.referrer === 'client') {\n // Note: node isn't a browser and doesn't implement document/iframes,\n // so we bypass this step and replace it with our own.\n\n const globalOrigin = getGlobalOrigin()\n\n if (!globalOrigin || globalOrigin.origin === 'null') {\n return 'no-referrer'\n }\n\n // note: we need to clone it as it's mutated\n referrerSource = new URL(globalOrigin)\n } else if (request.referrer instanceof URL) {\n // Let referrerSource be request’s referrer.\n referrerSource = request.referrer\n }\n\n // 4. Let request’s referrerURL be the result of stripping referrerSource for\n // use as a referrer.\n let referrerURL = stripURLForReferrer(referrerSource)\n\n // 5. Let referrerOrigin be the result of stripping referrerSource for use as\n // a referrer, with the origin-only flag set to true.\n const referrerOrigin = stripURLForReferrer(referrerSource, true)\n\n // 6. If the result of serializing referrerURL is a string whose length is\n // greater than 4096, set referrerURL to referrerOrigin.\n if (referrerURL.toString().length > 4096) {\n referrerURL = referrerOrigin\n }\n\n const areSameOrigin = sameOrigin(request, referrerURL)\n const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) &&\n !isURLPotentiallyTrustworthy(request.url)\n\n // 8. Execute the switch statements corresponding to the value of policy:\n switch (policy) {\n case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true)\n case 'unsafe-url': return referrerURL\n case 'same-origin':\n return areSameOrigin ? referrerOrigin : 'no-referrer'\n case 'origin-when-cross-origin':\n return areSameOrigin ? referrerURL : referrerOrigin\n case 'strict-origin-when-cross-origin': {\n const currentURL = requestCurrentURL(request)\n\n // 1. If the origin of referrerURL and the origin of request’s current\n // URL are the same, then return referrerURL.\n if (sameOrigin(referrerURL, currentURL)) {\n return referrerURL\n }\n\n // 2. If referrerURL is a potentially trustworthy URL and request’s\n // current URL is not a potentially trustworthy URL, then return no\n // referrer.\n if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) {\n return 'no-referrer'\n }\n\n // 3. Return referrerOrigin.\n return referrerOrigin\n }\n case 'strict-origin': // eslint-disable-line\n /**\n * 1. If referrerURL is a potentially trustworthy URL and\n * request’s current URL is not a potentially trustworthy URL,\n * then return no referrer.\n * 2. Return referrerOrigin\n */\n case 'no-referrer-when-downgrade': // eslint-disable-line\n /**\n * 1. If referrerURL is a potentially trustworthy URL and\n * request’s current URL is not a potentially trustworthy URL,\n * then return no referrer.\n * 2. Return referrerOrigin\n */\n\n default: // eslint-disable-line\n return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin\n }\n}\n\n/**\n * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url\n * @param {URL} url\n * @param {boolean|undefined} originOnly\n */\nfunction stripURLForReferrer (url, originOnly) {\n // 1. Assert: url is a URL.\n assert(url instanceof URL)\n\n // 2. If url’s scheme is a local scheme, then return no referrer.\n if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') {\n return 'no-referrer'\n }\n\n // 3. Set url’s username to the empty string.\n url.username = ''\n\n // 4. Set url’s password to the empty string.\n url.password = ''\n\n // 5. Set url’s fragment to null.\n url.hash = ''\n\n // 6. If the origin-only flag is true, then:\n if (originOnly) {\n // 1. Set url’s path to « the empty string ».\n url.pathname = ''\n\n // 2. Set url’s query to null.\n url.search = ''\n }\n\n // 7. Return url.\n return url\n}\n\nfunction isURLPotentiallyTrustworthy (url) {\n if (!(url instanceof URL)) {\n return false\n }\n\n // If child of about, return true\n if (url.href === 'about:blank' || url.href === 'about:srcdoc') {\n return true\n }\n\n // If scheme is data, return true\n if (url.protocol === 'data:') return true\n\n // If file, return true\n if (url.protocol === 'file:') return true\n\n return isOriginPotentiallyTrustworthy(url.origin)\n\n function isOriginPotentiallyTrustworthy (origin) {\n // If origin is explicitly null, return false\n if (origin == null || origin === 'null') return false\n\n const originAsURL = new URL(origin)\n\n // If secure, return true\n if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') {\n return true\n }\n\n // If localhost or variants, return true\n if (/^127(?:\\.[0-9]+){0,2}\\.[0-9]+$|^\\[(?:0*:)*?:?0*1\\]$/.test(originAsURL.hostname) ||\n (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) ||\n (originAsURL.hostname.endsWith('.localhost'))) {\n return true\n }\n\n // If any other, return false\n return false\n }\n}\n\n/**\n * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist\n * @param {Uint8Array} bytes\n * @param {string} metadataList\n */\nfunction bytesMatch (bytes, metadataList) {\n // If node is not built with OpenSSL support, we cannot check\n // a request's integrity, so allow it by default (the spec will\n // allow requests if an invalid hash is given, as precedence).\n /* istanbul ignore if: only if node is built with --without-ssl */\n if (crypto === undefined) {\n return true\n }\n\n // 1. Let parsedMetadata be the result of parsing metadataList.\n const parsedMetadata = parseMetadata(metadataList)\n\n // 2. If parsedMetadata is no metadata, return true.\n if (parsedMetadata === 'no metadata') {\n return true\n }\n\n // 3. If parsedMetadata is the empty set, return true.\n if (parsedMetadata.length === 0) {\n return true\n }\n\n // 4. Let metadata be the result of getting the strongest\n // metadata from parsedMetadata.\n const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))\n // get the strongest algorithm\n const strongest = list[0].algo\n // get all entries that use the strongest algorithm; ignore weaker\n const metadata = list.filter((item) => item.algo === strongest)\n\n // 5. For each item in metadata:\n for (const item of metadata) {\n // 1. Let algorithm be the alg component of item.\n const algorithm = item.algo\n\n // 2. Let expectedValue be the val component of item.\n let expectedValue = item.hash\n\n // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e\n // \"be liberal with padding\". This is annoying, and it's not even in the spec.\n\n if (expectedValue.endsWith('==')) {\n expectedValue = expectedValue.slice(0, -2)\n }\n\n // 3. Let actualValue be the result of applying algorithm to bytes.\n let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')\n\n if (actualValue.endsWith('==')) {\n actualValue = actualValue.slice(0, -2)\n }\n\n // 4. If actualValue is a case-sensitive match for expectedValue,\n // return true.\n if (actualValue === expectedValue) {\n return true\n }\n\n let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')\n\n if (actualBase64URL.endsWith('==')) {\n actualBase64URL = actualBase64URL.slice(0, -2)\n }\n\n if (actualBase64URL === expectedValue) {\n return true\n }\n }\n\n // 6. Return false.\n return false\n}\n\n// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options\n// https://www.w3.org/TR/CSP2/#source-list-syntax\n// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1\nconst parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\\x21-\\x7e]?)?/i\n\n/**\n * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata\n * @param {string} metadata\n */\nfunction parseMetadata (metadata) {\n // 1. Let result be the empty set.\n /** @type {{ algo: string, hash: string }[]} */\n const result = []\n\n // 2. Let empty be equal to true.\n let empty = true\n\n const supportedHashes = crypto.getHashes()\n\n // 3. For each token returned by splitting metadata on spaces:\n for (const token of metadata.split(' ')) {\n // 1. Set empty to false.\n empty = false\n\n // 2. Parse token as a hash-with-options.\n const parsedToken = parseHashWithOptions.exec(token)\n\n // 3. If token does not parse, continue to the next token.\n if (parsedToken === null || parsedToken.groups === undefined) {\n // Note: Chromium blocks the request at this point, but Firefox\n // gives a warning that an invalid integrity was given. The\n // correct behavior is to ignore these, and subsequently not\n // check the integrity of the resource.\n continue\n }\n\n // 4. Let algorithm be the hash-algo component of token.\n const algorithm = parsedToken.groups.algo\n\n // 5. If algorithm is a hash function recognized by the user\n // agent, add the parsed token to result.\n if (supportedHashes.includes(algorithm.toLowerCase())) {\n result.push(parsedToken.groups)\n }\n }\n\n // 4. Return no metadata if empty is true, otherwise return result.\n if (empty === true) {\n return 'no metadata'\n }\n\n return result\n}\n\n// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request\nfunction tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {\n // TODO\n}\n\n/**\n * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin}\n * @param {URL} A\n * @param {URL} B\n */\nfunction sameOrigin (A, B) {\n // 1. If A and B are the same opaque origin, then return true.\n if (A.origin === B.origin && A.origin === 'null') {\n return true\n }\n\n // 2. If A and B are both tuple origins and their schemes,\n // hosts, and port are identical, then return true.\n if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) {\n return true\n }\n\n // 3. Return false.\n return false\n}\n\nfunction createDeferredPromise () {\n let res\n let rej\n const promise = new Promise((resolve, reject) => {\n res = resolve\n rej = reject\n })\n\n return { promise, resolve: res, reject: rej }\n}\n\nfunction isAborted (fetchParams) {\n return fetchParams.controller.state === 'aborted'\n}\n\nfunction isCancelled (fetchParams) {\n return fetchParams.controller.state === 'aborted' ||\n fetchParams.controller.state === 'terminated'\n}\n\nconst normalizeMethodRecord = {\n delete: 'DELETE',\n DELETE: 'DELETE',\n get: 'GET',\n GET: 'GET',\n head: 'HEAD',\n HEAD: 'HEAD',\n options: 'OPTIONS',\n OPTIONS: 'OPTIONS',\n post: 'POST',\n POST: 'POST',\n put: 'PUT',\n PUT: 'PUT'\n}\n\n// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.\nObject.setPrototypeOf(normalizeMethodRecord, null)\n\n/**\n * @see https://fetch.spec.whatwg.org/#concept-method-normalize\n * @param {string} method\n */\nfunction normalizeMethod (method) {\n return normalizeMethodRecord[method.toLowerCase()] ?? method\n}\n\n// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string\nfunction serializeJavascriptValueToJSONString (value) {\n // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »).\n const result = JSON.stringify(value)\n\n // 2. If result is undefined, then throw a TypeError.\n if (result === undefined) {\n throw new TypeError('Value is not JSON serializable')\n }\n\n // 3. Assert: result is a string.\n assert(typeof result === 'string')\n\n // 4. Return result.\n return result\n}\n\n// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object\nconst esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))\n\n/**\n * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object\n * @param {() => unknown[]} iterator\n * @param {string} name name of the instance\n * @param {'key'|'value'|'key+value'} kind\n */\nfunction makeIterator (iterator, name, kind) {\n const object = {\n index: 0,\n kind,\n target: iterator\n }\n\n const i = {\n next () {\n // 1. Let interface be the interface for which the iterator prototype object exists.\n\n // 2. Let thisValue be the this value.\n\n // 3. Let object be ? ToObject(thisValue).\n\n // 4. If object is a platform object, then perform a security\n // check, passing:\n\n // 5. If object is not a default iterator object for interface,\n // then throw a TypeError.\n if (Object.getPrototypeOf(this) !== i) {\n throw new TypeError(\n `'next' called on an object that does not implement interface ${name} Iterator.`\n )\n }\n\n // 6. Let index be object’s index.\n // 7. Let kind be object’s kind.\n // 8. Let values be object’s target's value pairs to iterate over.\n const { index, kind, target } = object\n const values = target()\n\n // 9. Let len be the length of values.\n const len = values.length\n\n // 10. If index is greater than or equal to len, then return\n // CreateIterResultObject(undefined, true).\n if (index >= len) {\n return { value: undefined, done: true }\n }\n\n // 11. Let pair be the entry in values at index index.\n const pair = values[index]\n\n // 12. Set object’s index to index + 1.\n object.index = index + 1\n\n // 13. Return the iterator result for pair and kind.\n return iteratorResult(pair, kind)\n },\n // The class string of an iterator prototype object for a given interface is the\n // result of concatenating the identifier of the interface and the string \" Iterator\".\n [Symbol.toStringTag]: `${name} Iterator`\n }\n\n // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%.\n Object.setPrototypeOf(i, esIteratorPrototype)\n // esIteratorPrototype needs to be the prototype of i\n // which is the prototype of an empty object. Yes, it's confusing.\n return Object.setPrototypeOf({}, i)\n}\n\n// https://webidl.spec.whatwg.org/#iterator-result\nfunction iteratorResult (pair, kind) {\n let result\n\n // 1. Let result be a value determined by the value of kind:\n switch (kind) {\n case 'key': {\n // 1. Let idlKey be pair’s key.\n // 2. Let key be the result of converting idlKey to an\n // ECMAScript value.\n // 3. result is key.\n result = pair[0]\n break\n }\n case 'value': {\n // 1. Let idlValue be pair’s value.\n // 2. Let value be the result of converting idlValue to\n // an ECMAScript value.\n // 3. result is value.\n result = pair[1]\n break\n }\n case 'key+value': {\n // 1. Let idlKey be pair’s key.\n // 2. Let idlValue be pair’s value.\n // 3. Let key be the result of converting idlKey to an\n // ECMAScript value.\n // 4. Let value be the result of converting idlValue to\n // an ECMAScript value.\n // 5. Let array be ! ArrayCreate(2).\n // 6. Call ! CreateDataProperty(array, \"0\", key).\n // 7. Call ! CreateDataProperty(array, \"1\", value).\n // 8. result is array.\n result = pair\n break\n }\n }\n\n // 2. Return CreateIterResultObject(result, false).\n return { value: result, done: false }\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#body-fully-read\n */\nasync function fullyReadBody (body, processBody, processBodyError) {\n // 1. If taskDestination is null, then set taskDestination to\n // the result of starting a new parallel queue.\n\n // 2. Let successSteps given a byte sequence bytes be to queue a\n // fetch task to run processBody given bytes, with taskDestination.\n const successSteps = processBody\n\n // 3. Let errorSteps be to queue a fetch task to run processBodyError,\n // with taskDestination.\n const errorSteps = processBodyError\n\n // 4. Let reader be the result of getting a reader for body’s stream.\n // If that threw an exception, then run errorSteps with that\n // exception and return.\n let reader\n\n try {\n reader = body.stream.getReader()\n } catch (e) {\n errorSteps(e)\n return\n }\n\n // 5. Read all bytes from reader, given successSteps and errorSteps.\n try {\n const result = await readAllBytes(reader)\n successSteps(result)\n } catch (e) {\n errorSteps(e)\n }\n}\n\n/** @type {ReadableStream} */\nlet ReadableStream = globalThis.ReadableStream\n\nfunction isReadableStreamLike (stream) {\n if (!ReadableStream) {\n ReadableStream = require('stream/web').ReadableStream\n }\n\n return stream instanceof ReadableStream || (\n stream[Symbol.toStringTag] === 'ReadableStream' &&\n typeof stream.tee === 'function'\n )\n}\n\nconst MAXIMUM_ARGUMENT_LENGTH = 65535\n\n/**\n * @see https://infra.spec.whatwg.org/#isomorphic-decode\n * @param {number[]|Uint8Array} input\n */\nfunction isomorphicDecode (input) {\n // 1. To isomorphic decode a byte sequence input, return a string whose code point\n // length is equal to input’s length and whose code points have the same values\n // as the values of input’s bytes, in the same order.\n\n if (input.length < MAXIMUM_ARGUMENT_LENGTH) {\n return String.fromCharCode(...input)\n }\n\n return input.reduce((previous, current) => previous + String.fromCharCode(current), '')\n}\n\n/**\n * @param {ReadableStreamController} controller\n */\nfunction readableStreamClose (controller) {\n try {\n controller.close()\n } catch (err) {\n // TODO: add comment explaining why this error occurs.\n if (!err.message.includes('Controller is already closed')) {\n throw err\n }\n }\n}\n\n/**\n * @see https://infra.spec.whatwg.org/#isomorphic-encode\n * @param {string} input\n */\nfunction isomorphicEncode (input) {\n // 1. Assert: input contains no code points greater than U+00FF.\n for (let i = 0; i < input.length; i++) {\n assert(input.charCodeAt(i) <= 0xFF)\n }\n\n // 2. Return a byte sequence whose length is equal to input’s code\n // point length and whose bytes have the same values as the\n // values of input’s code points, in the same order\n return input\n}\n\n/**\n * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes\n * @see https://streams.spec.whatwg.org/#read-loop\n * @param {ReadableStreamDefaultReader} reader\n */\nasync function readAllBytes (reader) {\n const bytes = []\n let byteLength = 0\n\n while (true) {\n const { done, value: chunk } = await reader.read()\n\n if (done) {\n // 1. Call successSteps with bytes.\n return Buffer.concat(bytes, byteLength)\n }\n\n // 1. If chunk is not a Uint8Array object, call failureSteps\n // with a TypeError and abort these steps.\n if (!isUint8Array(chunk)) {\n throw new TypeError('Received non-Uint8Array chunk')\n }\n\n // 2. Append the bytes represented by chunk to bytes.\n bytes.push(chunk)\n byteLength += chunk.length\n\n // 3. Read-loop given reader, bytes, successSteps, and failureSteps.\n }\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#is-local\n * @param {URL} url\n */\nfunction urlIsLocal (url) {\n assert('protocol' in url) // ensure it's a url object\n\n const protocol = url.protocol\n\n return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:'\n}\n\n/**\n * @param {string|URL} url\n */\nfunction urlHasHttpsScheme (url) {\n if (typeof url === 'string') {\n return url.startsWith('https:')\n }\n\n return url.protocol === 'https:'\n}\n\n/**\n * @see https://fetch.spec.whatwg.org/#http-scheme\n * @param {URL} url\n */\nfunction urlIsHttpHttpsScheme (url) {\n assert('protocol' in url) // ensure it's a url object\n\n const protocol = url.protocol\n\n return protocol === 'http:' || protocol === 'https:'\n}\n\n/**\n * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0.\n */\nconst hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key))\n\nmodule.exports = {\n isAborted,\n isCancelled,\n createDeferredPromise,\n ReadableStreamFrom,\n toUSVString,\n tryUpgradeRequestToAPotentiallyTrustworthyURL,\n coarsenedSharedCurrentTime,\n determineRequestsReferrer,\n makePolicyContainer,\n clonePolicyContainer,\n appendFetchMetadata,\n appendRequestOriginHeader,\n TAOCheck,\n corsCheck,\n crossOriginResourcePolicyCheck,\n createOpaqueTimingInfo,\n setRequestReferrerPolicyOnRedirect,\n isValidHTTPToken,\n requestBadPort,\n requestCurrentURL,\n responseURL,\n responseLocationURL,\n isBlobLike,\n isURLPotentiallyTrustworthy,\n isValidReasonPhrase,\n sameOrigin,\n normalizeMethod,\n serializeJavascriptValueToJSONString,\n makeIterator,\n isValidHeaderName,\n isValidHeaderValue,\n hasOwn,\n isErrorLike,\n fullyReadBody,\n bytesMatch,\n isReadableStreamLike,\n readableStreamClose,\n isomorphicEncode,\n isomorphicDecode,\n urlIsLocal,\n urlHasHttpsScheme,\n urlIsHttpHttpsScheme,\n readAllBytes,\n normalizeMethodRecord\n}\n","'use strict'\n\nconst { types } = require('util')\nconst { hasOwn, toUSVString } = require('./util')\n\n/** @type {import('../../types/webidl').Webidl} */\nconst webidl = {}\nwebidl.converters = {}\nwebidl.util = {}\nwebidl.errors = {}\n\nwebidl.errors.exception = function (message) {\n return new TypeError(`${message.header}: ${message.message}`)\n}\n\nwebidl.errors.conversionFailed = function (context) {\n const plural = context.types.length === 1 ? '' : ' one of'\n const message =\n `${context.argument} could not be converted to` +\n `${plural}: ${context.types.join(', ')}.`\n\n return webidl.errors.exception({\n header: context.prefix,\n message\n })\n}\n\nwebidl.errors.invalidArgument = function (context) {\n return webidl.errors.exception({\n header: context.prefix,\n message: `\"${context.value}\" is an invalid ${context.type}.`\n })\n}\n\n// https://webidl.spec.whatwg.org/#implements\nwebidl.brandCheck = function (V, I, opts = undefined) {\n if (opts?.strict !== false && !(V instanceof I)) {\n throw new TypeError('Illegal invocation')\n } else {\n return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag]\n }\n}\n\nwebidl.argumentLengthCheck = function ({ length }, min, ctx) {\n if (length < min) {\n throw webidl.errors.exception({\n message: `${min} argument${min !== 1 ? 's' : ''} required, ` +\n `but${length ? ' only' : ''} ${length} found.`,\n ...ctx\n })\n }\n}\n\nwebidl.illegalConstructor = function () {\n throw webidl.errors.exception({\n header: 'TypeError',\n message: 'Illegal constructor'\n })\n}\n\n// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values\nwebidl.util.Type = function (V) {\n switch (typeof V) {\n case 'undefined': return 'Undefined'\n case 'boolean': return 'Boolean'\n case 'string': return 'String'\n case 'symbol': return 'Symbol'\n case 'number': return 'Number'\n case 'bigint': return 'BigInt'\n case 'function':\n case 'object': {\n if (V === null) {\n return 'Null'\n }\n\n return 'Object'\n }\n }\n}\n\n// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint\nwebidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) {\n let upperBound\n let lowerBound\n\n // 1. If bitLength is 64, then:\n if (bitLength === 64) {\n // 1. Let upperBound be 2^53 − 1.\n upperBound = Math.pow(2, 53) - 1\n\n // 2. If signedness is \"unsigned\", then let lowerBound be 0.\n if (signedness === 'unsigned') {\n lowerBound = 0\n } else {\n // 3. Otherwise let lowerBound be −2^53 + 1.\n lowerBound = Math.pow(-2, 53) + 1\n }\n } else if (signedness === 'unsigned') {\n // 2. Otherwise, if signedness is \"unsigned\", then:\n\n // 1. Let lowerBound be 0.\n lowerBound = 0\n\n // 2. Let upperBound be 2^bitLength − 1.\n upperBound = Math.pow(2, bitLength) - 1\n } else {\n // 3. Otherwise:\n\n // 1. Let lowerBound be -2^bitLength − 1.\n lowerBound = Math.pow(-2, bitLength) - 1\n\n // 2. Let upperBound be 2^bitLength − 1 − 1.\n upperBound = Math.pow(2, bitLength - 1) - 1\n }\n\n // 4. Let x be ? ToNumber(V).\n let x = Number(V)\n\n // 5. If x is −0, then set x to +0.\n if (x === 0) {\n x = 0\n }\n\n // 6. If the conversion is to an IDL type associated\n // with the [EnforceRange] extended attribute, then:\n if (opts.enforceRange === true) {\n // 1. If x is NaN, +∞, or −∞, then throw a TypeError.\n if (\n Number.isNaN(x) ||\n x === Number.POSITIVE_INFINITY ||\n x === Number.NEGATIVE_INFINITY\n ) {\n throw webidl.errors.exception({\n header: 'Integer conversion',\n message: `Could not convert ${V} to an integer.`\n })\n }\n\n // 2. Set x to IntegerPart(x).\n x = webidl.util.IntegerPart(x)\n\n // 3. If x < lowerBound or x > upperBound, then\n // throw a TypeError.\n if (x < lowerBound || x > upperBound) {\n throw webidl.errors.exception({\n header: 'Integer conversion',\n message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.`\n })\n }\n\n // 4. Return x.\n return x\n }\n\n // 7. If x is not NaN and the conversion is to an IDL\n // type associated with the [Clamp] extended\n // attribute, then:\n if (!Number.isNaN(x) && opts.clamp === true) {\n // 1. Set x to min(max(x, lowerBound), upperBound).\n x = Math.min(Math.max(x, lowerBound), upperBound)\n\n // 2. Round x to the nearest integer, choosing the\n // even integer if it lies halfway between two,\n // and choosing +0 rather than −0.\n if (Math.floor(x) % 2 === 0) {\n x = Math.floor(x)\n } else {\n x = Math.ceil(x)\n }\n\n // 3. Return x.\n return x\n }\n\n // 8. If x is NaN, +0, +∞, or −∞, then return +0.\n if (\n Number.isNaN(x) ||\n (x === 0 && Object.is(0, x)) ||\n x === Number.POSITIVE_INFINITY ||\n x === Number.NEGATIVE_INFINITY\n ) {\n return 0\n }\n\n // 9. Set x to IntegerPart(x).\n x = webidl.util.IntegerPart(x)\n\n // 10. Set x to x modulo 2^bitLength.\n x = x % Math.pow(2, bitLength)\n\n // 11. If signedness is \"signed\" and x ≥ 2^bitLength − 1,\n // then return x − 2^bitLength.\n if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) {\n return x - Math.pow(2, bitLength)\n }\n\n // 12. Otherwise, return x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart\nwebidl.util.IntegerPart = function (n) {\n // 1. Let r be floor(abs(n)).\n const r = Math.floor(Math.abs(n))\n\n // 2. If n < 0, then return -1 × r.\n if (n < 0) {\n return -1 * r\n }\n\n // 3. Otherwise, return r.\n return r\n}\n\n// https://webidl.spec.whatwg.org/#es-sequence\nwebidl.sequenceConverter = function (converter) {\n return (V) => {\n // 1. If Type(V) is not Object, throw a TypeError.\n if (webidl.util.Type(V) !== 'Object') {\n throw webidl.errors.exception({\n header: 'Sequence',\n message: `Value of type ${webidl.util.Type(V)} is not an Object.`\n })\n }\n\n // 2. Let method be ? GetMethod(V, @@iterator).\n /** @type {Generator} */\n const method = V?.[Symbol.iterator]?.()\n const seq = []\n\n // 3. If method is undefined, throw a TypeError.\n if (\n method === undefined ||\n typeof method.next !== 'function'\n ) {\n throw webidl.errors.exception({\n header: 'Sequence',\n message: 'Object is not an iterator.'\n })\n }\n\n // https://webidl.spec.whatwg.org/#create-sequence-from-iterable\n while (true) {\n const { done, value } = method.next()\n\n if (done) {\n break\n }\n\n seq.push(converter(value))\n }\n\n return seq\n }\n}\n\n// https://webidl.spec.whatwg.org/#es-to-record\nwebidl.recordConverter = function (keyConverter, valueConverter) {\n return (O) => {\n // 1. If Type(O) is not Object, throw a TypeError.\n if (webidl.util.Type(O) !== 'Object') {\n throw webidl.errors.exception({\n header: 'Record',\n message: `Value of type ${webidl.util.Type(O)} is not an Object.`\n })\n }\n\n // 2. Let result be a new empty instance of record.\n const result = {}\n\n if (!types.isProxy(O)) {\n // Object.keys only returns enumerable properties\n const keys = Object.keys(O)\n\n for (const key of keys) {\n // 1. Let typedKey be key converted to an IDL value of type K.\n const typedKey = keyConverter(key)\n\n // 2. Let value be ? Get(O, key).\n // 3. Let typedValue be value converted to an IDL value of type V.\n const typedValue = valueConverter(O[key])\n\n // 4. Set result[typedKey] to typedValue.\n result[typedKey] = typedValue\n }\n\n // 5. Return result.\n return result\n }\n\n // 3. Let keys be ? O.[[OwnPropertyKeys]]().\n const keys = Reflect.ownKeys(O)\n\n // 4. For each key of keys.\n for (const key of keys) {\n // 1. Let desc be ? O.[[GetOwnProperty]](key).\n const desc = Reflect.getOwnPropertyDescriptor(O, key)\n\n // 2. If desc is not undefined and desc.[[Enumerable]] is true:\n if (desc?.enumerable) {\n // 1. Let typedKey be key converted to an IDL value of type K.\n const typedKey = keyConverter(key)\n\n // 2. Let value be ? Get(O, key).\n // 3. Let typedValue be value converted to an IDL value of type V.\n const typedValue = valueConverter(O[key])\n\n // 4. Set result[typedKey] to typedValue.\n result[typedKey] = typedValue\n }\n }\n\n // 5. Return result.\n return result\n }\n}\n\nwebidl.interfaceConverter = function (i) {\n return (V, opts = {}) => {\n if (opts.strict !== false && !(V instanceof i)) {\n throw webidl.errors.exception({\n header: i.name,\n message: `Expected ${V} to be an instance of ${i.name}.`\n })\n }\n\n return V\n }\n}\n\nwebidl.dictionaryConverter = function (converters) {\n return (dictionary) => {\n const type = webidl.util.Type(dictionary)\n const dict = {}\n\n if (type === 'Null' || type === 'Undefined') {\n return dict\n } else if (type !== 'Object') {\n throw webidl.errors.exception({\n header: 'Dictionary',\n message: `Expected ${dictionary} to be one of: Null, Undefined, Object.`\n })\n }\n\n for (const options of converters) {\n const { key, defaultValue, required, converter } = options\n\n if (required === true) {\n if (!hasOwn(dictionary, key)) {\n throw webidl.errors.exception({\n header: 'Dictionary',\n message: `Missing required key \"${key}\".`\n })\n }\n }\n\n let value = dictionary[key]\n const hasDefault = hasOwn(options, 'defaultValue')\n\n // Only use defaultValue if value is undefined and\n // a defaultValue options was provided.\n if (hasDefault && value !== null) {\n value = value ?? defaultValue\n }\n\n // A key can be optional and have no default value.\n // When this happens, do not perform a conversion,\n // and do not assign the key a value.\n if (required || hasDefault || value !== undefined) {\n value = converter(value)\n\n if (\n options.allowedValues &&\n !options.allowedValues.includes(value)\n ) {\n throw webidl.errors.exception({\n header: 'Dictionary',\n message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.`\n })\n }\n\n dict[key] = value\n }\n }\n\n return dict\n }\n}\n\nwebidl.nullableConverter = function (converter) {\n return (V) => {\n if (V === null) {\n return V\n }\n\n return converter(V)\n }\n}\n\n// https://webidl.spec.whatwg.org/#es-DOMString\nwebidl.converters.DOMString = function (V, opts = {}) {\n // 1. If V is null and the conversion is to an IDL type\n // associated with the [LegacyNullToEmptyString]\n // extended attribute, then return the DOMString value\n // that represents the empty string.\n if (V === null && opts.legacyNullToEmptyString) {\n return ''\n }\n\n // 2. Let x be ? ToString(V).\n if (typeof V === 'symbol') {\n throw new TypeError('Could not convert argument of type symbol to string.')\n }\n\n // 3. Return the IDL DOMString value that represents the\n // same sequence of code units as the one the\n // ECMAScript String value x represents.\n return String(V)\n}\n\n// https://webidl.spec.whatwg.org/#es-ByteString\nwebidl.converters.ByteString = function (V) {\n // 1. Let x be ? ToString(V).\n // Note: DOMString converter perform ? ToString(V)\n const x = webidl.converters.DOMString(V)\n\n // 2. If the value of any element of x is greater than\n // 255, then throw a TypeError.\n for (let index = 0; index < x.length; index++) {\n if (x.charCodeAt(index) > 255) {\n throw new TypeError(\n 'Cannot convert argument to a ByteString because the character at ' +\n `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.`\n )\n }\n }\n\n // 3. Return an IDL ByteString value whose length is the\n // length of x, and where the value of each element is\n // the value of the corresponding element of x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#es-USVString\nwebidl.converters.USVString = toUSVString\n\n// https://webidl.spec.whatwg.org/#es-boolean\nwebidl.converters.boolean = function (V) {\n // 1. Let x be the result of computing ToBoolean(V).\n const x = Boolean(V)\n\n // 2. Return the IDL boolean value that is the one that represents\n // the same truth value as the ECMAScript Boolean value x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#es-any\nwebidl.converters.any = function (V) {\n return V\n}\n\n// https://webidl.spec.whatwg.org/#es-long-long\nwebidl.converters['long long'] = function (V) {\n // 1. Let x be ? ConvertToInt(V, 64, \"signed\").\n const x = webidl.util.ConvertToInt(V, 64, 'signed')\n\n // 2. Return the IDL long long value that represents\n // the same numeric value as x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#es-unsigned-long-long\nwebidl.converters['unsigned long long'] = function (V) {\n // 1. Let x be ? ConvertToInt(V, 64, \"unsigned\").\n const x = webidl.util.ConvertToInt(V, 64, 'unsigned')\n\n // 2. Return the IDL unsigned long long value that\n // represents the same numeric value as x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#es-unsigned-long\nwebidl.converters['unsigned long'] = function (V) {\n // 1. Let x be ? ConvertToInt(V, 32, \"unsigned\").\n const x = webidl.util.ConvertToInt(V, 32, 'unsigned')\n\n // 2. Return the IDL unsigned long value that\n // represents the same numeric value as x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#es-unsigned-short\nwebidl.converters['unsigned short'] = function (V, opts) {\n // 1. Let x be ? ConvertToInt(V, 16, \"unsigned\").\n const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts)\n\n // 2. Return the IDL unsigned short value that represents\n // the same numeric value as x.\n return x\n}\n\n// https://webidl.spec.whatwg.org/#idl-ArrayBuffer\nwebidl.converters.ArrayBuffer = function (V, opts = {}) {\n // 1. If Type(V) is not Object, or V does not have an\n // [[ArrayBufferData]] internal slot, then throw a\n // TypeError.\n // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances\n // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances\n if (\n webidl.util.Type(V) !== 'Object' ||\n !types.isAnyArrayBuffer(V)\n ) {\n throw webidl.errors.conversionFailed({\n prefix: `${V}`,\n argument: `${V}`,\n types: ['ArrayBuffer']\n })\n }\n\n // 2. If the conversion is not to an IDL type associated\n // with the [AllowShared] extended attribute, and\n // IsSharedArrayBuffer(V) is true, then throw a\n // TypeError.\n if (opts.allowShared === false && types.isSharedArrayBuffer(V)) {\n throw webidl.errors.exception({\n header: 'ArrayBuffer',\n message: 'SharedArrayBuffer is not allowed.'\n })\n }\n\n // 3. If the conversion is not to an IDL type associated\n // with the [AllowResizable] extended attribute, and\n // IsResizableArrayBuffer(V) is true, then throw a\n // TypeError.\n // Note: resizable ArrayBuffers are currently a proposal.\n\n // 4. Return the IDL ArrayBuffer value that is a\n // reference to the same object as V.\n return V\n}\n\nwebidl.converters.TypedArray = function (V, T, opts = {}) {\n // 1. Let T be the IDL type V is being converted to.\n\n // 2. If Type(V) is not Object, or V does not have a\n // [[TypedArrayName]] internal slot with a value\n // equal to T’s name, then throw a TypeError.\n if (\n webidl.util.Type(V) !== 'Object' ||\n !types.isTypedArray(V) ||\n V.constructor.name !== T.name\n ) {\n throw webidl.errors.conversionFailed({\n prefix: `${T.name}`,\n argument: `${V}`,\n types: [T.name]\n })\n }\n\n // 3. If the conversion is not to an IDL type associated\n // with the [AllowShared] extended attribute, and\n // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is\n // true, then throw a TypeError.\n if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {\n throw webidl.errors.exception({\n header: 'ArrayBuffer',\n message: 'SharedArrayBuffer is not allowed.'\n })\n }\n\n // 4. If the conversion is not to an IDL type associated\n // with the [AllowResizable] extended attribute, and\n // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is\n // true, then throw a TypeError.\n // Note: resizable array buffers are currently a proposal\n\n // 5. Return the IDL value of type T that is a reference\n // to the same object as V.\n return V\n}\n\nwebidl.converters.DataView = function (V, opts = {}) {\n // 1. If Type(V) is not Object, or V does not have a\n // [[DataView]] internal slot, then throw a TypeError.\n if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) {\n throw webidl.errors.exception({\n header: 'DataView',\n message: 'Object is not a DataView.'\n })\n }\n\n // 2. If the conversion is not to an IDL type associated\n // with the [AllowShared] extended attribute, and\n // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true,\n // then throw a TypeError.\n if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {\n throw webidl.errors.exception({\n header: 'ArrayBuffer',\n message: 'SharedArrayBuffer is not allowed.'\n })\n }\n\n // 3. If the conversion is not to an IDL type associated\n // with the [AllowResizable] extended attribute, and\n // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is\n // true, then throw a TypeError.\n // Note: resizable ArrayBuffers are currently a proposal\n\n // 4. Return the IDL DataView value that is a reference\n // to the same object as V.\n return V\n}\n\n// https://webidl.spec.whatwg.org/#BufferSource\nwebidl.converters.BufferSource = function (V, opts = {}) {\n if (types.isAnyArrayBuffer(V)) {\n return webidl.converters.ArrayBuffer(V, opts)\n }\n\n if (types.isTypedArray(V)) {\n return webidl.converters.TypedArray(V, V.constructor)\n }\n\n if (types.isDataView(V)) {\n return webidl.converters.DataView(V, opts)\n }\n\n throw new TypeError(`Could not convert ${V} to a BufferSource.`)\n}\n\nwebidl.converters['sequence'] = webidl.sequenceConverter(\n webidl.converters.ByteString\n)\n\nwebidl.converters['sequence>'] = webidl.sequenceConverter(\n webidl.converters['sequence']\n)\n\nwebidl.converters['record'] = webidl.recordConverter(\n webidl.converters.ByteString,\n webidl.converters.ByteString\n)\n\nmodule.exports = {\n webidl\n}\n","'use strict'\n\n/**\n * @see https://encoding.spec.whatwg.org/#concept-encoding-get\n * @param {string|undefined} label\n */\nfunction getEncoding (label) {\n if (!label) {\n return 'failure'\n }\n\n // 1. Remove any leading and trailing ASCII whitespace from label.\n // 2. If label is an ASCII case-insensitive match for any of the\n // labels listed in the table below, then return the\n // corresponding encoding; otherwise return failure.\n switch (label.trim().toLowerCase()) {\n case 'unicode-1-1-utf-8':\n case 'unicode11utf8':\n case 'unicode20utf8':\n case 'utf-8':\n case 'utf8':\n case 'x-unicode20utf8':\n return 'UTF-8'\n case '866':\n case 'cp866':\n case 'csibm866':\n case 'ibm866':\n return 'IBM866'\n case 'csisolatin2':\n case 'iso-8859-2':\n case 'iso-ir-101':\n case 'iso8859-2':\n case 'iso88592':\n case 'iso_8859-2':\n case 'iso_8859-2:1987':\n case 'l2':\n case 'latin2':\n return 'ISO-8859-2'\n case 'csisolatin3':\n case 'iso-8859-3':\n case 'iso-ir-109':\n case 'iso8859-3':\n case 'iso88593':\n case 'iso_8859-3':\n case 'iso_8859-3:1988':\n case 'l3':\n case 'latin3':\n return 'ISO-8859-3'\n case 'csisolatin4':\n case 'iso-8859-4':\n case 'iso-ir-110':\n case 'iso8859-4':\n case 'iso88594':\n case 'iso_8859-4':\n case 'iso_8859-4:1988':\n case 'l4':\n case 'latin4':\n return 'ISO-8859-4'\n case 'csisolatincyrillic':\n case 'cyrillic':\n case 'iso-8859-5':\n case 'iso-ir-144':\n case 'iso8859-5':\n case 'iso88595':\n case 'iso_8859-5':\n case 'iso_8859-5:1988':\n return 'ISO-8859-5'\n case 'arabic':\n case 'asmo-708':\n case 'csiso88596e':\n case 'csiso88596i':\n case 'csisolatinarabic':\n case 'ecma-114':\n case 'iso-8859-6':\n case 'iso-8859-6-e':\n case 'iso-8859-6-i':\n case 'iso-ir-127':\n case 'iso8859-6':\n case 'iso88596':\n case 'iso_8859-6':\n case 'iso_8859-6:1987':\n return 'ISO-8859-6'\n case 'csisolatingreek':\n case 'ecma-118':\n case 'elot_928':\n case 'greek':\n case 'greek8':\n case 'iso-8859-7':\n case 'iso-ir-126':\n case 'iso8859-7':\n case 'iso88597':\n case 'iso_8859-7':\n case 'iso_8859-7:1987':\n case 'sun_eu_greek':\n return 'ISO-8859-7'\n case 'csiso88598e':\n case 'csisolatinhebrew':\n case 'hebrew':\n case 'iso-8859-8':\n case 'iso-8859-8-e':\n case 'iso-ir-138':\n case 'iso8859-8':\n case 'iso88598':\n case 'iso_8859-8':\n case 'iso_8859-8:1988':\n case 'visual':\n return 'ISO-8859-8'\n case 'csiso88598i':\n case 'iso-8859-8-i':\n case 'logical':\n return 'ISO-8859-8-I'\n case 'csisolatin6':\n case 'iso-8859-10':\n case 'iso-ir-157':\n case 'iso8859-10':\n case 'iso885910':\n case 'l6':\n case 'latin6':\n return 'ISO-8859-10'\n case 'iso-8859-13':\n case 'iso8859-13':\n case 'iso885913':\n return 'ISO-8859-13'\n case 'iso-8859-14':\n case 'iso8859-14':\n case 'iso885914':\n return 'ISO-8859-14'\n case 'csisolatin9':\n case 'iso-8859-15':\n case 'iso8859-15':\n case 'iso885915':\n case 'iso_8859-15':\n case 'l9':\n return 'ISO-8859-15'\n case 'iso-8859-16':\n return 'ISO-8859-16'\n case 'cskoi8r':\n case 'koi':\n case 'koi8':\n case 'koi8-r':\n case 'koi8_r':\n return 'KOI8-R'\n case 'koi8-ru':\n case 'koi8-u':\n return 'KOI8-U'\n case 'csmacintosh':\n case 'mac':\n case 'macintosh':\n case 'x-mac-roman':\n return 'macintosh'\n case 'iso-8859-11':\n case 'iso8859-11':\n case 'iso885911':\n case 'tis-620':\n case 'windows-874':\n return 'windows-874'\n case 'cp1250':\n case 'windows-1250':\n case 'x-cp1250':\n return 'windows-1250'\n case 'cp1251':\n case 'windows-1251':\n case 'x-cp1251':\n return 'windows-1251'\n case 'ansi_x3.4-1968':\n case 'ascii':\n case 'cp1252':\n case 'cp819':\n case 'csisolatin1':\n case 'ibm819':\n case 'iso-8859-1':\n case 'iso-ir-100':\n case 'iso8859-1':\n case 'iso88591':\n case 'iso_8859-1':\n case 'iso_8859-1:1987':\n case 'l1':\n case 'latin1':\n case 'us-ascii':\n case 'windows-1252':\n case 'x-cp1252':\n return 'windows-1252'\n case 'cp1253':\n case 'windows-1253':\n case 'x-cp1253':\n return 'windows-1253'\n case 'cp1254':\n case 'csisolatin5':\n case 'iso-8859-9':\n case 'iso-ir-148':\n case 'iso8859-9':\n case 'iso88599':\n case 'iso_8859-9':\n case 'iso_8859-9:1989':\n case 'l5':\n case 'latin5':\n case 'windows-1254':\n case 'x-cp1254':\n return 'windows-1254'\n case 'cp1255':\n case 'windows-1255':\n case 'x-cp1255':\n return 'windows-1255'\n case 'cp1256':\n case 'windows-1256':\n case 'x-cp1256':\n return 'windows-1256'\n case 'cp1257':\n case 'windows-1257':\n case 'x-cp1257':\n return 'windows-1257'\n case 'cp1258':\n case 'windows-1258':\n case 'x-cp1258':\n return 'windows-1258'\n case 'x-mac-cyrillic':\n case 'x-mac-ukrainian':\n return 'x-mac-cyrillic'\n case 'chinese':\n case 'csgb2312':\n case 'csiso58gb231280':\n case 'gb2312':\n case 'gb_2312':\n case 'gb_2312-80':\n case 'gbk':\n case 'iso-ir-58':\n case 'x-gbk':\n return 'GBK'\n case 'gb18030':\n return 'gb18030'\n case 'big5':\n case 'big5-hkscs':\n case 'cn-big5':\n case 'csbig5':\n case 'x-x-big5':\n return 'Big5'\n case 'cseucpkdfmtjapanese':\n case 'euc-jp':\n case 'x-euc-jp':\n return 'EUC-JP'\n case 'csiso2022jp':\n case 'iso-2022-jp':\n return 'ISO-2022-JP'\n case 'csshiftjis':\n case 'ms932':\n case 'ms_kanji':\n case 'shift-jis':\n case 'shift_jis':\n case 'sjis':\n case 'windows-31j':\n case 'x-sjis':\n return 'Shift_JIS'\n case 'cseuckr':\n case 'csksc56011987':\n case 'euc-kr':\n case 'iso-ir-149':\n case 'korean':\n case 'ks_c_5601-1987':\n case 'ks_c_5601-1989':\n case 'ksc5601':\n case 'ksc_5601':\n case 'windows-949':\n return 'EUC-KR'\n case 'csiso2022kr':\n case 'hz-gb-2312':\n case 'iso-2022-cn':\n case 'iso-2022-cn-ext':\n case 'iso-2022-kr':\n case 'replacement':\n return 'replacement'\n case 'unicodefffe':\n case 'utf-16be':\n return 'UTF-16BE'\n case 'csunicode':\n case 'iso-10646-ucs-2':\n case 'ucs-2':\n case 'unicode':\n case 'unicodefeff':\n case 'utf-16':\n case 'utf-16le':\n return 'UTF-16LE'\n case 'x-user-defined':\n return 'x-user-defined'\n default: return 'failure'\n }\n}\n\nmodule.exports = {\n getEncoding\n}\n","'use strict'\n\nconst {\n staticPropertyDescriptors,\n readOperation,\n fireAProgressEvent\n} = require('./util')\nconst {\n kState,\n kError,\n kResult,\n kEvents,\n kAborted\n} = require('./symbols')\nconst { webidl } = require('../fetch/webidl')\nconst { kEnumerableProperty } = require('../core/util')\n\nclass FileReader extends EventTarget {\n constructor () {\n super()\n\n this[kState] = 'empty'\n this[kResult] = null\n this[kError] = null\n this[kEvents] = {\n loadend: null,\n error: null,\n abort: null,\n load: null,\n progress: null,\n loadstart: null\n }\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer\n * @param {import('buffer').Blob} blob\n */\n readAsArrayBuffer (blob) {\n webidl.brandCheck(this, FileReader)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' })\n\n blob = webidl.converters.Blob(blob, { strict: false })\n\n // The readAsArrayBuffer(blob) method, when invoked,\n // must initiate a read operation for blob with ArrayBuffer.\n readOperation(this, blob, 'ArrayBuffer')\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#readAsBinaryString\n * @param {import('buffer').Blob} blob\n */\n readAsBinaryString (blob) {\n webidl.brandCheck(this, FileReader)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' })\n\n blob = webidl.converters.Blob(blob, { strict: false })\n\n // The readAsBinaryString(blob) method, when invoked,\n // must initiate a read operation for blob with BinaryString.\n readOperation(this, blob, 'BinaryString')\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#readAsDataText\n * @param {import('buffer').Blob} blob\n * @param {string?} encoding\n */\n readAsText (blob, encoding = undefined) {\n webidl.brandCheck(this, FileReader)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' })\n\n blob = webidl.converters.Blob(blob, { strict: false })\n\n if (encoding !== undefined) {\n encoding = webidl.converters.DOMString(encoding)\n }\n\n // The readAsText(blob, encoding) method, when invoked,\n // must initiate a read operation for blob with Text and encoding.\n readOperation(this, blob, 'Text', encoding)\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL\n * @param {import('buffer').Blob} blob\n */\n readAsDataURL (blob) {\n webidl.brandCheck(this, FileReader)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' })\n\n blob = webidl.converters.Blob(blob, { strict: false })\n\n // The readAsDataURL(blob) method, when invoked, must\n // initiate a read operation for blob with DataURL.\n readOperation(this, blob, 'DataURL')\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dfn-abort\n */\n abort () {\n // 1. If this's state is \"empty\" or if this's state is\n // \"done\" set this's result to null and terminate\n // this algorithm.\n if (this[kState] === 'empty' || this[kState] === 'done') {\n this[kResult] = null\n return\n }\n\n // 2. If this's state is \"loading\" set this's state to\n // \"done\" and set this's result to null.\n if (this[kState] === 'loading') {\n this[kState] = 'done'\n this[kResult] = null\n }\n\n // 3. If there are any tasks from this on the file reading\n // task source in an affiliated task queue, then remove\n // those tasks from that task queue.\n this[kAborted] = true\n\n // 4. Terminate the algorithm for the read method being processed.\n // TODO\n\n // 5. Fire a progress event called abort at this.\n fireAProgressEvent('abort', this)\n\n // 6. If this's state is not \"loading\", fire a progress\n // event called loadend at this.\n if (this[kState] !== 'loading') {\n fireAProgressEvent('loadend', this)\n }\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate\n */\n get readyState () {\n webidl.brandCheck(this, FileReader)\n\n switch (this[kState]) {\n case 'empty': return this.EMPTY\n case 'loading': return this.LOADING\n case 'done': return this.DONE\n }\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dom-filereader-result\n */\n get result () {\n webidl.brandCheck(this, FileReader)\n\n // The result attribute’s getter, when invoked, must return\n // this's result.\n return this[kResult]\n }\n\n /**\n * @see https://w3c.github.io/FileAPI/#dom-filereader-error\n */\n get error () {\n webidl.brandCheck(this, FileReader)\n\n // The error attribute’s getter, when invoked, must return\n // this's error.\n return this[kError]\n }\n\n get onloadend () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].loadend\n }\n\n set onloadend (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].loadend) {\n this.removeEventListener('loadend', this[kEvents].loadend)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].loadend = fn\n this.addEventListener('loadend', fn)\n } else {\n this[kEvents].loadend = null\n }\n }\n\n get onerror () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].error\n }\n\n set onerror (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].error) {\n this.removeEventListener('error', this[kEvents].error)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].error = fn\n this.addEventListener('error', fn)\n } else {\n this[kEvents].error = null\n }\n }\n\n get onloadstart () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].loadstart\n }\n\n set onloadstart (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].loadstart) {\n this.removeEventListener('loadstart', this[kEvents].loadstart)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].loadstart = fn\n this.addEventListener('loadstart', fn)\n } else {\n this[kEvents].loadstart = null\n }\n }\n\n get onprogress () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].progress\n }\n\n set onprogress (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].progress) {\n this.removeEventListener('progress', this[kEvents].progress)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].progress = fn\n this.addEventListener('progress', fn)\n } else {\n this[kEvents].progress = null\n }\n }\n\n get onload () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].load\n }\n\n set onload (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].load) {\n this.removeEventListener('load', this[kEvents].load)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].load = fn\n this.addEventListener('load', fn)\n } else {\n this[kEvents].load = null\n }\n }\n\n get onabort () {\n webidl.brandCheck(this, FileReader)\n\n return this[kEvents].abort\n }\n\n set onabort (fn) {\n webidl.brandCheck(this, FileReader)\n\n if (this[kEvents].abort) {\n this.removeEventListener('abort', this[kEvents].abort)\n }\n\n if (typeof fn === 'function') {\n this[kEvents].abort = fn\n this.addEventListener('abort', fn)\n } else {\n this[kEvents].abort = null\n }\n }\n}\n\n// https://w3c.github.io/FileAPI/#dom-filereader-empty\nFileReader.EMPTY = FileReader.prototype.EMPTY = 0\n// https://w3c.github.io/FileAPI/#dom-filereader-loading\nFileReader.LOADING = FileReader.prototype.LOADING = 1\n// https://w3c.github.io/FileAPI/#dom-filereader-done\nFileReader.DONE = FileReader.prototype.DONE = 2\n\nObject.defineProperties(FileReader.prototype, {\n EMPTY: staticPropertyDescriptors,\n LOADING: staticPropertyDescriptors,\n DONE: staticPropertyDescriptors,\n readAsArrayBuffer: kEnumerableProperty,\n readAsBinaryString: kEnumerableProperty,\n readAsText: kEnumerableProperty,\n readAsDataURL: kEnumerableProperty,\n abort: kEnumerableProperty,\n readyState: kEnumerableProperty,\n result: kEnumerableProperty,\n error: kEnumerableProperty,\n onloadstart: kEnumerableProperty,\n onprogress: kEnumerableProperty,\n onload: kEnumerableProperty,\n onabort: kEnumerableProperty,\n onerror: kEnumerableProperty,\n onloadend: kEnumerableProperty,\n [Symbol.toStringTag]: {\n value: 'FileReader',\n writable: false,\n enumerable: false,\n configurable: true\n }\n})\n\nObject.defineProperties(FileReader, {\n EMPTY: staticPropertyDescriptors,\n LOADING: staticPropertyDescriptors,\n DONE: staticPropertyDescriptors\n})\n\nmodule.exports = {\n FileReader\n}\n","'use strict'\n\nconst { webidl } = require('../fetch/webidl')\n\nconst kState = Symbol('ProgressEvent state')\n\n/**\n * @see https://xhr.spec.whatwg.org/#progressevent\n */\nclass ProgressEvent extends Event {\n constructor (type, eventInitDict = {}) {\n type = webidl.converters.DOMString(type)\n eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {})\n\n super(type, eventInitDict)\n\n this[kState] = {\n lengthComputable: eventInitDict.lengthComputable,\n loaded: eventInitDict.loaded,\n total: eventInitDict.total\n }\n }\n\n get lengthComputable () {\n webidl.brandCheck(this, ProgressEvent)\n\n return this[kState].lengthComputable\n }\n\n get loaded () {\n webidl.brandCheck(this, ProgressEvent)\n\n return this[kState].loaded\n }\n\n get total () {\n webidl.brandCheck(this, ProgressEvent)\n\n return this[kState].total\n }\n}\n\nwebidl.converters.ProgressEventInit = webidl.dictionaryConverter([\n {\n key: 'lengthComputable',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'loaded',\n converter: webidl.converters['unsigned long long'],\n defaultValue: 0\n },\n {\n key: 'total',\n converter: webidl.converters['unsigned long long'],\n defaultValue: 0\n },\n {\n key: 'bubbles',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'cancelable',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'composed',\n converter: webidl.converters.boolean,\n defaultValue: false\n }\n])\n\nmodule.exports = {\n ProgressEvent\n}\n","'use strict'\n\nmodule.exports = {\n kState: Symbol('FileReader state'),\n kResult: Symbol('FileReader result'),\n kError: Symbol('FileReader error'),\n kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'),\n kEvents: Symbol('FileReader events'),\n kAborted: Symbol('FileReader aborted')\n}\n","'use strict'\n\nconst {\n kState,\n kError,\n kResult,\n kAborted,\n kLastProgressEventFired\n} = require('./symbols')\nconst { ProgressEvent } = require('./progressevent')\nconst { getEncoding } = require('./encoding')\nconst { DOMException } = require('../fetch/constants')\nconst { serializeAMimeType, parseMIMEType } = require('../fetch/dataURL')\nconst { types } = require('util')\nconst { StringDecoder } = require('string_decoder')\nconst { btoa } = require('buffer')\n\n/** @type {PropertyDescriptor} */\nconst staticPropertyDescriptors = {\n enumerable: true,\n writable: false,\n configurable: false\n}\n\n/**\n * @see https://w3c.github.io/FileAPI/#readOperation\n * @param {import('./filereader').FileReader} fr\n * @param {import('buffer').Blob} blob\n * @param {string} type\n * @param {string?} encodingName\n */\nfunction readOperation (fr, blob, type, encodingName) {\n // 1. If fr’s state is \"loading\", throw an InvalidStateError\n // DOMException.\n if (fr[kState] === 'loading') {\n throw new DOMException('Invalid state', 'InvalidStateError')\n }\n\n // 2. Set fr’s state to \"loading\".\n fr[kState] = 'loading'\n\n // 3. Set fr’s result to null.\n fr[kResult] = null\n\n // 4. Set fr’s error to null.\n fr[kError] = null\n\n // 5. Let stream be the result of calling get stream on blob.\n /** @type {import('stream/web').ReadableStream} */\n const stream = blob.stream()\n\n // 6. Let reader be the result of getting a reader from stream.\n const reader = stream.getReader()\n\n // 7. Let bytes be an empty byte sequence.\n /** @type {Uint8Array[]} */\n const bytes = []\n\n // 8. Let chunkPromise be the result of reading a chunk from\n // stream with reader.\n let chunkPromise = reader.read()\n\n // 9. Let isFirstChunk be true.\n let isFirstChunk = true\n\n // 10. In parallel, while true:\n // Note: \"In parallel\" just means non-blocking\n // Note 2: readOperation itself cannot be async as double\n // reading the body would then reject the promise, instead\n // of throwing an error.\n ;(async () => {\n while (!fr[kAborted]) {\n // 1. Wait for chunkPromise to be fulfilled or rejected.\n try {\n const { done, value } = await chunkPromise\n\n // 2. If chunkPromise is fulfilled, and isFirstChunk is\n // true, queue a task to fire a progress event called\n // loadstart at fr.\n if (isFirstChunk && !fr[kAborted]) {\n queueMicrotask(() => {\n fireAProgressEvent('loadstart', fr)\n })\n }\n\n // 3. Set isFirstChunk to false.\n isFirstChunk = false\n\n // 4. If chunkPromise is fulfilled with an object whose\n // done property is false and whose value property is\n // a Uint8Array object, run these steps:\n if (!done && types.isUint8Array(value)) {\n // 1. Let bs be the byte sequence represented by the\n // Uint8Array object.\n\n // 2. Append bs to bytes.\n bytes.push(value)\n\n // 3. If roughly 50ms have passed since these steps\n // were last invoked, queue a task to fire a\n // progress event called progress at fr.\n if (\n (\n fr[kLastProgressEventFired] === undefined ||\n Date.now() - fr[kLastProgressEventFired] >= 50\n ) &&\n !fr[kAborted]\n ) {\n fr[kLastProgressEventFired] = Date.now()\n queueMicrotask(() => {\n fireAProgressEvent('progress', fr)\n })\n }\n\n // 4. Set chunkPromise to the result of reading a\n // chunk from stream with reader.\n chunkPromise = reader.read()\n } else if (done) {\n // 5. Otherwise, if chunkPromise is fulfilled with an\n // object whose done property is true, queue a task\n // to run the following steps and abort this algorithm:\n queueMicrotask(() => {\n // 1. Set fr’s state to \"done\".\n fr[kState] = 'done'\n\n // 2. Let result be the result of package data given\n // bytes, type, blob’s type, and encodingName.\n try {\n const result = packageData(bytes, type, blob.type, encodingName)\n\n // 4. Else:\n\n if (fr[kAborted]) {\n return\n }\n\n // 1. Set fr’s result to result.\n fr[kResult] = result\n\n // 2. Fire a progress event called load at the fr.\n fireAProgressEvent('load', fr)\n } catch (error) {\n // 3. If package data threw an exception error:\n\n // 1. Set fr’s error to error.\n fr[kError] = error\n\n // 2. Fire a progress event called error at fr.\n fireAProgressEvent('error', fr)\n }\n\n // 5. If fr’s state is not \"loading\", fire a progress\n // event called loadend at the fr.\n if (fr[kState] !== 'loading') {\n fireAProgressEvent('loadend', fr)\n }\n })\n\n break\n }\n } catch (error) {\n if (fr[kAborted]) {\n return\n }\n\n // 6. Otherwise, if chunkPromise is rejected with an\n // error error, queue a task to run the following\n // steps and abort this algorithm:\n queueMicrotask(() => {\n // 1. Set fr’s state to \"done\".\n fr[kState] = 'done'\n\n // 2. Set fr’s error to error.\n fr[kError] = error\n\n // 3. Fire a progress event called error at fr.\n fireAProgressEvent('error', fr)\n\n // 4. If fr’s state is not \"loading\", fire a progress\n // event called loadend at fr.\n if (fr[kState] !== 'loading') {\n fireAProgressEvent('loadend', fr)\n }\n })\n\n break\n }\n }\n })()\n}\n\n/**\n * @see https://w3c.github.io/FileAPI/#fire-a-progress-event\n * @see https://dom.spec.whatwg.org/#concept-event-fire\n * @param {string} e The name of the event\n * @param {import('./filereader').FileReader} reader\n */\nfunction fireAProgressEvent (e, reader) {\n // The progress event e does not bubble. e.bubbles must be false\n // The progress event e is NOT cancelable. e.cancelable must be false\n const event = new ProgressEvent(e, {\n bubbles: false,\n cancelable: false\n })\n\n reader.dispatchEvent(event)\n}\n\n/**\n * @see https://w3c.github.io/FileAPI/#blob-package-data\n * @param {Uint8Array[]} bytes\n * @param {string} type\n * @param {string?} mimeType\n * @param {string?} encodingName\n */\nfunction packageData (bytes, type, mimeType, encodingName) {\n // 1. A Blob has an associated package data algorithm, given\n // bytes, a type, a optional mimeType, and a optional\n // encodingName, which switches on type and runs the\n // associated steps:\n\n switch (type) {\n case 'DataURL': {\n // 1. Return bytes as a DataURL [RFC2397] subject to\n // the considerations below:\n // * Use mimeType as part of the Data URL if it is\n // available in keeping with the Data URL\n // specification [RFC2397].\n // * If mimeType is not available return a Data URL\n // without a media-type. [RFC2397].\n\n // https://datatracker.ietf.org/doc/html/rfc2397#section-3\n // dataurl := \"data:\" [ mediatype ] [ \";base64\" ] \",\" data\n // mediatype := [ type \"/\" subtype ] *( \";\" parameter )\n // data := *urlchar\n // parameter := attribute \"=\" value\n let dataURL = 'data:'\n\n const parsed = parseMIMEType(mimeType || 'application/octet-stream')\n\n if (parsed !== 'failure') {\n dataURL += serializeAMimeType(parsed)\n }\n\n dataURL += ';base64,'\n\n const decoder = new StringDecoder('latin1')\n\n for (const chunk of bytes) {\n dataURL += btoa(decoder.write(chunk))\n }\n\n dataURL += btoa(decoder.end())\n\n return dataURL\n }\n case 'Text': {\n // 1. Let encoding be failure\n let encoding = 'failure'\n\n // 2. If the encodingName is present, set encoding to the\n // result of getting an encoding from encodingName.\n if (encodingName) {\n encoding = getEncoding(encodingName)\n }\n\n // 3. If encoding is failure, and mimeType is present:\n if (encoding === 'failure' && mimeType) {\n // 1. Let type be the result of parse a MIME type\n // given mimeType.\n const type = parseMIMEType(mimeType)\n\n // 2. If type is not failure, set encoding to the result\n // of getting an encoding from type’s parameters[\"charset\"].\n if (type !== 'failure') {\n encoding = getEncoding(type.parameters.get('charset'))\n }\n }\n\n // 4. If encoding is failure, then set encoding to UTF-8.\n if (encoding === 'failure') {\n encoding = 'UTF-8'\n }\n\n // 5. Decode bytes using fallback encoding encoding, and\n // return the result.\n return decode(bytes, encoding)\n }\n case 'ArrayBuffer': {\n // Return a new ArrayBuffer whose contents are bytes.\n const sequence = combineByteSequences(bytes)\n\n return sequence.buffer\n }\n case 'BinaryString': {\n // Return bytes as a binary string, in which every byte\n // is represented by a code unit of equal value [0..255].\n let binaryString = ''\n\n const decoder = new StringDecoder('latin1')\n\n for (const chunk of bytes) {\n binaryString += decoder.write(chunk)\n }\n\n binaryString += decoder.end()\n\n return binaryString\n }\n }\n}\n\n/**\n * @see https://encoding.spec.whatwg.org/#decode\n * @param {Uint8Array[]} ioQueue\n * @param {string} encoding\n */\nfunction decode (ioQueue, encoding) {\n const bytes = combineByteSequences(ioQueue)\n\n // 1. Let BOMEncoding be the result of BOM sniffing ioQueue.\n const BOMEncoding = BOMSniffing(bytes)\n\n let slice = 0\n\n // 2. If BOMEncoding is non-null:\n if (BOMEncoding !== null) {\n // 1. Set encoding to BOMEncoding.\n encoding = BOMEncoding\n\n // 2. Read three bytes from ioQueue, if BOMEncoding is\n // UTF-8; otherwise read two bytes.\n // (Do nothing with those bytes.)\n slice = BOMEncoding === 'UTF-8' ? 3 : 2\n }\n\n // 3. Process a queue with an instance of encoding’s\n // decoder, ioQueue, output, and \"replacement\".\n\n // 4. Return output.\n\n const sliced = bytes.slice(slice)\n return new TextDecoder(encoding).decode(sliced)\n}\n\n/**\n * @see https://encoding.spec.whatwg.org/#bom-sniff\n * @param {Uint8Array} ioQueue\n */\nfunction BOMSniffing (ioQueue) {\n // 1. Let BOM be the result of peeking 3 bytes from ioQueue,\n // converted to a byte sequence.\n const [a, b, c] = ioQueue\n\n // 2. For each of the rows in the table below, starting with\n // the first one and going down, if BOM starts with the\n // bytes given in the first column, then return the\n // encoding given in the cell in the second column of that\n // row. Otherwise, return null.\n if (a === 0xEF && b === 0xBB && c === 0xBF) {\n return 'UTF-8'\n } else if (a === 0xFE && b === 0xFF) {\n return 'UTF-16BE'\n } else if (a === 0xFF && b === 0xFE) {\n return 'UTF-16LE'\n }\n\n return null\n}\n\n/**\n * @param {Uint8Array[]} sequences\n */\nfunction combineByteSequences (sequences) {\n const size = sequences.reduce((a, b) => {\n return a + b.byteLength\n }, 0)\n\n let offset = 0\n\n return sequences.reduce((a, b) => {\n a.set(b, offset)\n offset += b.byteLength\n return a\n }, new Uint8Array(size))\n}\n\nmodule.exports = {\n staticPropertyDescriptors,\n readOperation,\n fireAProgressEvent\n}\n","'use strict'\n\n// We include a version number for the Dispatcher API. In case of breaking changes,\n// this version number must be increased to avoid conflicts.\nconst globalDispatcher = Symbol.for('undici.globalDispatcher.1')\nconst { InvalidArgumentError } = require('./core/errors')\nconst Agent = require('./agent')\n\nif (getGlobalDispatcher() === undefined) {\n setGlobalDispatcher(new Agent())\n}\n\nfunction setGlobalDispatcher (agent) {\n if (!agent || typeof agent.dispatch !== 'function') {\n throw new InvalidArgumentError('Argument agent must implement Agent')\n }\n Object.defineProperty(globalThis, globalDispatcher, {\n value: agent,\n writable: true,\n enumerable: false,\n configurable: false\n })\n}\n\nfunction getGlobalDispatcher () {\n return globalThis[globalDispatcher]\n}\n\nmodule.exports = {\n setGlobalDispatcher,\n getGlobalDispatcher\n}\n","'use strict'\n\nmodule.exports = class DecoratorHandler {\n constructor (handler) {\n this.handler = handler\n }\n\n onConnect (...args) {\n return this.handler.onConnect(...args)\n }\n\n onError (...args) {\n return this.handler.onError(...args)\n }\n\n onUpgrade (...args) {\n return this.handler.onUpgrade(...args)\n }\n\n onHeaders (...args) {\n return this.handler.onHeaders(...args)\n }\n\n onData (...args) {\n return this.handler.onData(...args)\n }\n\n onComplete (...args) {\n return this.handler.onComplete(...args)\n }\n\n onBodySent (...args) {\n return this.handler.onBodySent(...args)\n }\n}\n","'use strict'\n\nconst util = require('../core/util')\nconst { kBodyUsed } = require('../core/symbols')\nconst assert = require('assert')\nconst { InvalidArgumentError } = require('../core/errors')\nconst EE = require('events')\n\nconst redirectableStatusCodes = [300, 301, 302, 303, 307, 308]\n\nconst kBody = Symbol('body')\n\nclass BodyAsyncIterable {\n constructor (body) {\n this[kBody] = body\n this[kBodyUsed] = false\n }\n\n async * [Symbol.asyncIterator] () {\n assert(!this[kBodyUsed], 'disturbed')\n this[kBodyUsed] = true\n yield * this[kBody]\n }\n}\n\nclass RedirectHandler {\n constructor (dispatch, maxRedirections, opts, handler) {\n if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {\n throw new InvalidArgumentError('maxRedirections must be a positive number')\n }\n\n util.validateHandler(handler, opts.method, opts.upgrade)\n\n this.dispatch = dispatch\n this.location = null\n this.abort = null\n this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy\n this.maxRedirections = maxRedirections\n this.handler = handler\n this.history = []\n\n if (util.isStream(this.opts.body)) {\n // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp\n // so that it can be dispatched again?\n // TODO (fix): Do we need 100-expect support to provide a way to do this properly?\n if (util.bodyLength(this.opts.body) === 0) {\n this.opts.body\n .on('data', function () {\n assert(false)\n })\n }\n\n if (typeof this.opts.body.readableDidRead !== 'boolean') {\n this.opts.body[kBodyUsed] = false\n EE.prototype.on.call(this.opts.body, 'data', function () {\n this[kBodyUsed] = true\n })\n }\n } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') {\n // TODO (fix): We can't access ReadableStream internal state\n // to determine whether or not it has been disturbed. This is just\n // a workaround.\n this.opts.body = new BodyAsyncIterable(this.opts.body)\n } else if (\n this.opts.body &&\n typeof this.opts.body !== 'string' &&\n !ArrayBuffer.isView(this.opts.body) &&\n util.isIterable(this.opts.body)\n ) {\n // TODO: Should we allow re-using iterable if !this.opts.idempotent\n // or through some other flag?\n this.opts.body = new BodyAsyncIterable(this.opts.body)\n }\n }\n\n onConnect (abort) {\n this.abort = abort\n this.handler.onConnect(abort, { history: this.history })\n }\n\n onUpgrade (statusCode, headers, socket) {\n this.handler.onUpgrade(statusCode, headers, socket)\n }\n\n onError (error) {\n this.handler.onError(error)\n }\n\n onHeaders (statusCode, headers, resume, statusText) {\n this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body)\n ? null\n : parseLocation(statusCode, headers)\n\n if (this.opts.origin) {\n this.history.push(new URL(this.opts.path, this.opts.origin))\n }\n\n if (!this.location) {\n return this.handler.onHeaders(statusCode, headers, resume, statusText)\n }\n\n const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)))\n const path = search ? `${pathname}${search}` : pathname\n\n // Remove headers referring to the original URL.\n // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers.\n // https://tools.ietf.org/html/rfc7231#section-6.4\n this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin)\n this.opts.path = path\n this.opts.origin = origin\n this.opts.maxRedirections = 0\n this.opts.query = null\n\n // https://tools.ietf.org/html/rfc7231#section-6.4.4\n // In case of HTTP 303, always replace method to be either HEAD or GET\n if (statusCode === 303 && this.opts.method !== 'HEAD') {\n this.opts.method = 'GET'\n this.opts.body = null\n }\n }\n\n onData (chunk) {\n if (this.location) {\n /*\n https://tools.ietf.org/html/rfc7231#section-6.4\n\n TLDR: undici always ignores 3xx response bodies.\n\n Redirection is used to serve the requested resource from another URL, so it is assumes that\n no body is generated (and thus can be ignored). Even though generating a body is not prohibited.\n\n For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually\n (which means it's optional and not mandated) contain just an hyperlink to the value of\n the Location response header, so the body can be ignored safely.\n\n For status 300, which is \"Multiple Choices\", the spec mentions both generating a Location\n response header AND a response body with the other possible location to follow.\n Since the spec explicitily chooses not to specify a format for such body and leave it to\n servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it.\n */\n } else {\n return this.handler.onData(chunk)\n }\n }\n\n onComplete (trailers) {\n if (this.location) {\n /*\n https://tools.ietf.org/html/rfc7231#section-6.4\n\n TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections\n and neither are useful if present.\n\n See comment on onData method above for more detailed informations.\n */\n\n this.location = null\n this.abort = null\n\n this.dispatch(this.opts, this)\n } else {\n this.handler.onComplete(trailers)\n }\n }\n\n onBodySent (chunk) {\n if (this.handler.onBodySent) {\n this.handler.onBodySent(chunk)\n }\n }\n}\n\nfunction parseLocation (statusCode, headers) {\n if (redirectableStatusCodes.indexOf(statusCode) === -1) {\n return null\n }\n\n for (let i = 0; i < headers.length; i += 2) {\n if (headers[i].toString().toLowerCase() === 'location') {\n return headers[i + 1]\n }\n }\n}\n\n// https://tools.ietf.org/html/rfc7231#section-6.4.4\nfunction shouldRemoveHeader (header, removeContent, unknownOrigin) {\n return (\n (header.length === 4 && header.toString().toLowerCase() === 'host') ||\n (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||\n (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||\n (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')\n )\n}\n\n// https://tools.ietf.org/html/rfc7231#section-6.4\nfunction cleanRequestHeaders (headers, removeContent, unknownOrigin) {\n const ret = []\n if (Array.isArray(headers)) {\n for (let i = 0; i < headers.length; i += 2) {\n if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) {\n ret.push(headers[i], headers[i + 1])\n }\n }\n } else if (headers && typeof headers === 'object') {\n for (const key of Object.keys(headers)) {\n if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) {\n ret.push(key, headers[key])\n }\n }\n } else {\n assert(headers == null, 'headers must be an object or an array')\n }\n return ret\n}\n\nmodule.exports = RedirectHandler\n","const assert = require('assert')\n\nconst { kRetryHandlerDefaultRetry } = require('../core/symbols')\nconst { RequestRetryError } = require('../core/errors')\nconst { isDisturbed, parseHeaders, parseRangeHeader } = require('../core/util')\n\nfunction calculateRetryAfterHeader (retryAfter) {\n const current = Date.now()\n const diff = new Date(retryAfter).getTime() - current\n\n return diff\n}\n\nclass RetryHandler {\n constructor (opts, handlers) {\n const { retryOptions, ...dispatchOpts } = opts\n const {\n // Retry scoped\n retry: retryFn,\n maxRetries,\n maxTimeout,\n minTimeout,\n timeoutFactor,\n // Response scoped\n methods,\n errorCodes,\n retryAfter,\n statusCodes\n } = retryOptions ?? {}\n\n this.dispatch = handlers.dispatch\n this.handler = handlers.handler\n this.opts = dispatchOpts\n this.abort = null\n this.aborted = false\n this.retryOpts = {\n retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],\n retryAfter: retryAfter ?? true,\n maxTimeout: maxTimeout ?? 30 * 1000, // 30s,\n timeout: minTimeout ?? 500, // .5s\n timeoutFactor: timeoutFactor ?? 2,\n maxRetries: maxRetries ?? 5,\n // What errors we should retry\n methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'],\n // Indicates which errors to retry\n statusCodes: statusCodes ?? [500, 502, 503, 504, 429],\n // List of errors to retry\n errorCodes: errorCodes ?? [\n 'ECONNRESET',\n 'ECONNREFUSED',\n 'ENOTFOUND',\n 'ENETDOWN',\n 'ENETUNREACH',\n 'EHOSTDOWN',\n 'EHOSTUNREACH',\n 'EPIPE'\n ]\n }\n\n this.retryCount = 0\n this.start = 0\n this.end = null\n this.etag = null\n this.resume = null\n\n // Handle possible onConnect duplication\n this.handler.onConnect(reason => {\n this.aborted = true\n if (this.abort) {\n this.abort(reason)\n } else {\n this.reason = reason\n }\n })\n }\n\n onRequestSent () {\n if (this.handler.onRequestSent) {\n this.handler.onRequestSent()\n }\n }\n\n onUpgrade (statusCode, headers, socket) {\n if (this.handler.onUpgrade) {\n this.handler.onUpgrade(statusCode, headers, socket)\n }\n }\n\n onConnect (abort) {\n if (this.aborted) {\n abort(this.reason)\n } else {\n this.abort = abort\n }\n }\n\n onBodySent (chunk) {\n if (this.handler.onBodySent) return this.handler.onBodySent(chunk)\n }\n\n static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) {\n const { statusCode, code, headers } = err\n const { method, retryOptions } = opts\n const {\n maxRetries,\n timeout,\n maxTimeout,\n timeoutFactor,\n statusCodes,\n errorCodes,\n methods\n } = retryOptions\n let { counter, currentTimeout } = state\n\n currentTimeout =\n currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout\n\n // Any code that is not a Undici's originated and allowed to retry\n if (\n code &&\n code !== 'UND_ERR_REQ_RETRY' &&\n code !== 'UND_ERR_SOCKET' &&\n !errorCodes.includes(code)\n ) {\n cb(err)\n return\n }\n\n // If a set of method are provided and the current method is not in the list\n if (Array.isArray(methods) && !methods.includes(method)) {\n cb(err)\n return\n }\n\n // If a set of status code are provided and the current status code is not in the list\n if (\n statusCode != null &&\n Array.isArray(statusCodes) &&\n !statusCodes.includes(statusCode)\n ) {\n cb(err)\n return\n }\n\n // If we reached the max number of retries\n if (counter > maxRetries) {\n cb(err)\n return\n }\n\n let retryAfterHeader = headers != null && headers['retry-after']\n if (retryAfterHeader) {\n retryAfterHeader = Number(retryAfterHeader)\n retryAfterHeader = isNaN(retryAfterHeader)\n ? calculateRetryAfterHeader(retryAfterHeader)\n : retryAfterHeader * 1e3 // Retry-After is in seconds\n }\n\n const retryTimeout =\n retryAfterHeader > 0\n ? Math.min(retryAfterHeader, maxTimeout)\n : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout)\n\n state.currentTimeout = retryTimeout\n\n setTimeout(() => cb(null), retryTimeout)\n }\n\n onHeaders (statusCode, rawHeaders, resume, statusMessage) {\n const headers = parseHeaders(rawHeaders)\n\n this.retryCount += 1\n\n if (statusCode >= 300) {\n this.abort(\n new RequestRetryError('Request failed', statusCode, {\n headers,\n count: this.retryCount\n })\n )\n return false\n }\n\n // Checkpoint for resume from where we left it\n if (this.resume != null) {\n this.resume = null\n\n if (statusCode !== 206) {\n return true\n }\n\n const contentRange = parseRangeHeader(headers['content-range'])\n // If no content range\n if (!contentRange) {\n this.abort(\n new RequestRetryError('Content-Range mismatch', statusCode, {\n headers,\n count: this.retryCount\n })\n )\n return false\n }\n\n // Let's start with a weak etag check\n if (this.etag != null && this.etag !== headers.etag) {\n this.abort(\n new RequestRetryError('ETag mismatch', statusCode, {\n headers,\n count: this.retryCount\n })\n )\n return false\n }\n\n const { start, size, end = size } = contentRange\n\n assert(this.start === start, 'content-range mismatch')\n assert(this.end == null || this.end === end, 'content-range mismatch')\n\n this.resume = resume\n return true\n }\n\n if (this.end == null) {\n if (statusCode === 206) {\n // First time we receive 206\n const range = parseRangeHeader(headers['content-range'])\n\n if (range == null) {\n return this.handler.onHeaders(\n statusCode,\n rawHeaders,\n resume,\n statusMessage\n )\n }\n\n const { start, size, end = size } = range\n\n assert(\n start != null && Number.isFinite(start) && this.start !== start,\n 'content-range mismatch'\n )\n assert(Number.isFinite(start))\n assert(\n end != null && Number.isFinite(end) && this.end !== end,\n 'invalid content-length'\n )\n\n this.start = start\n this.end = end\n }\n\n // We make our best to checkpoint the body for further range headers\n if (this.end == null) {\n const contentLength = headers['content-length']\n this.end = contentLength != null ? Number(contentLength) : null\n }\n\n assert(Number.isFinite(this.start))\n assert(\n this.end == null || Number.isFinite(this.end),\n 'invalid content-length'\n )\n\n this.resume = resume\n this.etag = headers.etag != null ? headers.etag : null\n\n return this.handler.onHeaders(\n statusCode,\n rawHeaders,\n resume,\n statusMessage\n )\n }\n\n const err = new RequestRetryError('Request failed', statusCode, {\n headers,\n count: this.retryCount\n })\n\n this.abort(err)\n\n return false\n }\n\n onData (chunk) {\n this.start += chunk.length\n\n return this.handler.onData(chunk)\n }\n\n onComplete (rawTrailers) {\n this.retryCount = 0\n return this.handler.onComplete(rawTrailers)\n }\n\n onError (err) {\n if (this.aborted || isDisturbed(this.opts.body)) {\n return this.handler.onError(err)\n }\n\n this.retryOpts.retry(\n err,\n {\n state: { counter: this.retryCount++, currentTimeout: this.retryAfter },\n opts: { retryOptions: this.retryOpts, ...this.opts }\n },\n onRetry.bind(this)\n )\n\n function onRetry (err) {\n if (err != null || this.aborted || isDisturbed(this.opts.body)) {\n return this.handler.onError(err)\n }\n\n if (this.start !== 0) {\n this.opts = {\n ...this.opts,\n headers: {\n ...this.opts.headers,\n range: `bytes=${this.start}-${this.end ?? ''}`\n }\n }\n }\n\n try {\n this.dispatch(this.opts, this)\n } catch (err) {\n this.handler.onError(err)\n }\n }\n }\n}\n\nmodule.exports = RetryHandler\n","'use strict'\n\nconst RedirectHandler = require('../handler/RedirectHandler')\n\nfunction createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) {\n return (dispatch) => {\n return function Intercept (opts, handler) {\n const { maxRedirections = defaultMaxRedirections } = opts\n\n if (!maxRedirections) {\n return dispatch(opts, handler)\n }\n\n const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler)\n opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting.\n return dispatch(opts, redirectHandler)\n }\n }\n}\n\nmodule.exports = createRedirectInterceptor\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0;\nconst utils_1 = require(\"./utils\");\n// C headers\nvar ERROR;\n(function (ERROR) {\n ERROR[ERROR[\"OK\"] = 0] = \"OK\";\n ERROR[ERROR[\"INTERNAL\"] = 1] = \"INTERNAL\";\n ERROR[ERROR[\"STRICT\"] = 2] = \"STRICT\";\n ERROR[ERROR[\"LF_EXPECTED\"] = 3] = \"LF_EXPECTED\";\n ERROR[ERROR[\"UNEXPECTED_CONTENT_LENGTH\"] = 4] = \"UNEXPECTED_CONTENT_LENGTH\";\n ERROR[ERROR[\"CLOSED_CONNECTION\"] = 5] = \"CLOSED_CONNECTION\";\n ERROR[ERROR[\"INVALID_METHOD\"] = 6] = \"INVALID_METHOD\";\n ERROR[ERROR[\"INVALID_URL\"] = 7] = \"INVALID_URL\";\n ERROR[ERROR[\"INVALID_CONSTANT\"] = 8] = \"INVALID_CONSTANT\";\n ERROR[ERROR[\"INVALID_VERSION\"] = 9] = \"INVALID_VERSION\";\n ERROR[ERROR[\"INVALID_HEADER_TOKEN\"] = 10] = \"INVALID_HEADER_TOKEN\";\n ERROR[ERROR[\"INVALID_CONTENT_LENGTH\"] = 11] = \"INVALID_CONTENT_LENGTH\";\n ERROR[ERROR[\"INVALID_CHUNK_SIZE\"] = 12] = \"INVALID_CHUNK_SIZE\";\n ERROR[ERROR[\"INVALID_STATUS\"] = 13] = \"INVALID_STATUS\";\n ERROR[ERROR[\"INVALID_EOF_STATE\"] = 14] = \"INVALID_EOF_STATE\";\n ERROR[ERROR[\"INVALID_TRANSFER_ENCODING\"] = 15] = \"INVALID_TRANSFER_ENCODING\";\n ERROR[ERROR[\"CB_MESSAGE_BEGIN\"] = 16] = \"CB_MESSAGE_BEGIN\";\n ERROR[ERROR[\"CB_HEADERS_COMPLETE\"] = 17] = \"CB_HEADERS_COMPLETE\";\n ERROR[ERROR[\"CB_MESSAGE_COMPLETE\"] = 18] = \"CB_MESSAGE_COMPLETE\";\n ERROR[ERROR[\"CB_CHUNK_HEADER\"] = 19] = \"CB_CHUNK_HEADER\";\n ERROR[ERROR[\"CB_CHUNK_COMPLETE\"] = 20] = \"CB_CHUNK_COMPLETE\";\n ERROR[ERROR[\"PAUSED\"] = 21] = \"PAUSED\";\n ERROR[ERROR[\"PAUSED_UPGRADE\"] = 22] = \"PAUSED_UPGRADE\";\n ERROR[ERROR[\"PAUSED_H2_UPGRADE\"] = 23] = \"PAUSED_H2_UPGRADE\";\n ERROR[ERROR[\"USER\"] = 24] = \"USER\";\n})(ERROR = exports.ERROR || (exports.ERROR = {}));\nvar TYPE;\n(function (TYPE) {\n TYPE[TYPE[\"BOTH\"] = 0] = \"BOTH\";\n TYPE[TYPE[\"REQUEST\"] = 1] = \"REQUEST\";\n TYPE[TYPE[\"RESPONSE\"] = 2] = \"RESPONSE\";\n})(TYPE = exports.TYPE || (exports.TYPE = {}));\nvar FLAGS;\n(function (FLAGS) {\n FLAGS[FLAGS[\"CONNECTION_KEEP_ALIVE\"] = 1] = \"CONNECTION_KEEP_ALIVE\";\n FLAGS[FLAGS[\"CONNECTION_CLOSE\"] = 2] = \"CONNECTION_CLOSE\";\n FLAGS[FLAGS[\"CONNECTION_UPGRADE\"] = 4] = \"CONNECTION_UPGRADE\";\n FLAGS[FLAGS[\"CHUNKED\"] = 8] = \"CHUNKED\";\n FLAGS[FLAGS[\"UPGRADE\"] = 16] = \"UPGRADE\";\n FLAGS[FLAGS[\"CONTENT_LENGTH\"] = 32] = \"CONTENT_LENGTH\";\n FLAGS[FLAGS[\"SKIPBODY\"] = 64] = \"SKIPBODY\";\n FLAGS[FLAGS[\"TRAILING\"] = 128] = \"TRAILING\";\n // 1 << 8 is unused\n FLAGS[FLAGS[\"TRANSFER_ENCODING\"] = 512] = \"TRANSFER_ENCODING\";\n})(FLAGS = exports.FLAGS || (exports.FLAGS = {}));\nvar LENIENT_FLAGS;\n(function (LENIENT_FLAGS) {\n LENIENT_FLAGS[LENIENT_FLAGS[\"HEADERS\"] = 1] = \"HEADERS\";\n LENIENT_FLAGS[LENIENT_FLAGS[\"CHUNKED_LENGTH\"] = 2] = \"CHUNKED_LENGTH\";\n LENIENT_FLAGS[LENIENT_FLAGS[\"KEEP_ALIVE\"] = 4] = \"KEEP_ALIVE\";\n})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {}));\nvar METHODS;\n(function (METHODS) {\n METHODS[METHODS[\"DELETE\"] = 0] = \"DELETE\";\n METHODS[METHODS[\"GET\"] = 1] = \"GET\";\n METHODS[METHODS[\"HEAD\"] = 2] = \"HEAD\";\n METHODS[METHODS[\"POST\"] = 3] = \"POST\";\n METHODS[METHODS[\"PUT\"] = 4] = \"PUT\";\n /* pathological */\n METHODS[METHODS[\"CONNECT\"] = 5] = \"CONNECT\";\n METHODS[METHODS[\"OPTIONS\"] = 6] = \"OPTIONS\";\n METHODS[METHODS[\"TRACE\"] = 7] = \"TRACE\";\n /* WebDAV */\n METHODS[METHODS[\"COPY\"] = 8] = \"COPY\";\n METHODS[METHODS[\"LOCK\"] = 9] = \"LOCK\";\n METHODS[METHODS[\"MKCOL\"] = 10] = \"MKCOL\";\n METHODS[METHODS[\"MOVE\"] = 11] = \"MOVE\";\n METHODS[METHODS[\"PROPFIND\"] = 12] = \"PROPFIND\";\n METHODS[METHODS[\"PROPPATCH\"] = 13] = \"PROPPATCH\";\n METHODS[METHODS[\"SEARCH\"] = 14] = \"SEARCH\";\n METHODS[METHODS[\"UNLOCK\"] = 15] = \"UNLOCK\";\n METHODS[METHODS[\"BIND\"] = 16] = \"BIND\";\n METHODS[METHODS[\"REBIND\"] = 17] = \"REBIND\";\n METHODS[METHODS[\"UNBIND\"] = 18] = \"UNBIND\";\n METHODS[METHODS[\"ACL\"] = 19] = \"ACL\";\n /* subversion */\n METHODS[METHODS[\"REPORT\"] = 20] = \"REPORT\";\n METHODS[METHODS[\"MKACTIVITY\"] = 21] = \"MKACTIVITY\";\n METHODS[METHODS[\"CHECKOUT\"] = 22] = \"CHECKOUT\";\n METHODS[METHODS[\"MERGE\"] = 23] = \"MERGE\";\n /* upnp */\n METHODS[METHODS[\"M-SEARCH\"] = 24] = \"M-SEARCH\";\n METHODS[METHODS[\"NOTIFY\"] = 25] = \"NOTIFY\";\n METHODS[METHODS[\"SUBSCRIBE\"] = 26] = \"SUBSCRIBE\";\n METHODS[METHODS[\"UNSUBSCRIBE\"] = 27] = \"UNSUBSCRIBE\";\n /* RFC-5789 */\n METHODS[METHODS[\"PATCH\"] = 28] = \"PATCH\";\n METHODS[METHODS[\"PURGE\"] = 29] = \"PURGE\";\n /* CalDAV */\n METHODS[METHODS[\"MKCALENDAR\"] = 30] = \"MKCALENDAR\";\n /* RFC-2068, section 19.6.1.2 */\n METHODS[METHODS[\"LINK\"] = 31] = \"LINK\";\n METHODS[METHODS[\"UNLINK\"] = 32] = \"UNLINK\";\n /* icecast */\n METHODS[METHODS[\"SOURCE\"] = 33] = \"SOURCE\";\n /* RFC-7540, section 11.6 */\n METHODS[METHODS[\"PRI\"] = 34] = \"PRI\";\n /* RFC-2326 RTSP */\n METHODS[METHODS[\"DESCRIBE\"] = 35] = \"DESCRIBE\";\n METHODS[METHODS[\"ANNOUNCE\"] = 36] = \"ANNOUNCE\";\n METHODS[METHODS[\"SETUP\"] = 37] = \"SETUP\";\n METHODS[METHODS[\"PLAY\"] = 38] = \"PLAY\";\n METHODS[METHODS[\"PAUSE\"] = 39] = \"PAUSE\";\n METHODS[METHODS[\"TEARDOWN\"] = 40] = \"TEARDOWN\";\n METHODS[METHODS[\"GET_PARAMETER\"] = 41] = \"GET_PARAMETER\";\n METHODS[METHODS[\"SET_PARAMETER\"] = 42] = \"SET_PARAMETER\";\n METHODS[METHODS[\"REDIRECT\"] = 43] = \"REDIRECT\";\n METHODS[METHODS[\"RECORD\"] = 44] = \"RECORD\";\n /* RAOP */\n METHODS[METHODS[\"FLUSH\"] = 45] = \"FLUSH\";\n})(METHODS = exports.METHODS || (exports.METHODS = {}));\nexports.METHODS_HTTP = [\n METHODS.DELETE,\n METHODS.GET,\n METHODS.HEAD,\n METHODS.POST,\n METHODS.PUT,\n METHODS.CONNECT,\n METHODS.OPTIONS,\n METHODS.TRACE,\n METHODS.COPY,\n METHODS.LOCK,\n METHODS.MKCOL,\n METHODS.MOVE,\n METHODS.PROPFIND,\n METHODS.PROPPATCH,\n METHODS.SEARCH,\n METHODS.UNLOCK,\n METHODS.BIND,\n METHODS.REBIND,\n METHODS.UNBIND,\n METHODS.ACL,\n METHODS.REPORT,\n METHODS.MKACTIVITY,\n METHODS.CHECKOUT,\n METHODS.MERGE,\n METHODS['M-SEARCH'],\n METHODS.NOTIFY,\n METHODS.SUBSCRIBE,\n METHODS.UNSUBSCRIBE,\n METHODS.PATCH,\n METHODS.PURGE,\n METHODS.MKCALENDAR,\n METHODS.LINK,\n METHODS.UNLINK,\n METHODS.PRI,\n // TODO(indutny): should we allow it with HTTP?\n METHODS.SOURCE,\n];\nexports.METHODS_ICE = [\n METHODS.SOURCE,\n];\nexports.METHODS_RTSP = [\n METHODS.OPTIONS,\n METHODS.DESCRIBE,\n METHODS.ANNOUNCE,\n METHODS.SETUP,\n METHODS.PLAY,\n METHODS.PAUSE,\n METHODS.TEARDOWN,\n METHODS.GET_PARAMETER,\n METHODS.SET_PARAMETER,\n METHODS.REDIRECT,\n METHODS.RECORD,\n METHODS.FLUSH,\n // For AirPlay\n METHODS.GET,\n METHODS.POST,\n];\nexports.METHOD_MAP = utils_1.enumToMap(METHODS);\nexports.H_METHOD_MAP = {};\nObject.keys(exports.METHOD_MAP).forEach((key) => {\n if (/^H/.test(key)) {\n exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key];\n }\n});\nvar FINISH;\n(function (FINISH) {\n FINISH[FINISH[\"SAFE\"] = 0] = \"SAFE\";\n FINISH[FINISH[\"SAFE_WITH_CB\"] = 1] = \"SAFE_WITH_CB\";\n FINISH[FINISH[\"UNSAFE\"] = 2] = \"UNSAFE\";\n})(FINISH = exports.FINISH || (exports.FINISH = {}));\nexports.ALPHA = [];\nfor (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) {\n // Upper case\n exports.ALPHA.push(String.fromCharCode(i));\n // Lower case\n exports.ALPHA.push(String.fromCharCode(i + 0x20));\n}\nexports.NUM_MAP = {\n 0: 0, 1: 1, 2: 2, 3: 3, 4: 4,\n 5: 5, 6: 6, 7: 7, 8: 8, 9: 9,\n};\nexports.HEX_MAP = {\n 0: 0, 1: 1, 2: 2, 3: 3, 4: 4,\n 5: 5, 6: 6, 7: 7, 8: 8, 9: 9,\n A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF,\n a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf,\n};\nexports.NUM = [\n '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',\n];\nexports.ALPHANUM = exports.ALPHA.concat(exports.NUM);\nexports.MARK = ['-', '_', '.', '!', '~', '*', '\\'', '(', ')'];\nexports.USERINFO_CHARS = exports.ALPHANUM\n .concat(exports.MARK)\n .concat(['%', ';', ':', '&', '=', '+', '$', ',']);\n// TODO(indutny): use RFC\nexports.STRICT_URL_CHAR = [\n '!', '\"', '$', '%', '&', '\\'',\n '(', ')', '*', '+', ',', '-', '.', '/',\n ':', ';', '<', '=', '>',\n '@', '[', '\\\\', ']', '^', '_',\n '`',\n '{', '|', '}', '~',\n].concat(exports.ALPHANUM);\nexports.URL_CHAR = exports.STRICT_URL_CHAR\n .concat(['\\t', '\\f']);\n// All characters with 0x80 bit set to 1\nfor (let i = 0x80; i <= 0xff; i++) {\n exports.URL_CHAR.push(i);\n}\nexports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']);\n/* Tokens as defined by rfc 2616. Also lowercases them.\n * token = 1*\n * separators = \"(\" | \")\" | \"<\" | \">\" | \"@\"\n * | \",\" | \";\" | \":\" | \"\\\" | <\">\n * | \"/\" | \"[\" | \"]\" | \"?\" | \"=\"\n * | \"{\" | \"}\" | SP | HT\n */\nexports.STRICT_TOKEN = [\n '!', '#', '$', '%', '&', '\\'',\n '*', '+', '-', '.',\n '^', '_', '`',\n '|', '~',\n].concat(exports.ALPHANUM);\nexports.TOKEN = exports.STRICT_TOKEN.concat([' ']);\n/*\n * Verify that a char is a valid visible (printable) US-ASCII\n * character or %x80-FF\n */\nexports.HEADER_CHARS = ['\\t'];\nfor (let i = 32; i <= 255; i++) {\n if (i !== 127) {\n exports.HEADER_CHARS.push(i);\n }\n}\n// ',' = \\x44\nexports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44);\nexports.MAJOR = exports.NUM_MAP;\nexports.MINOR = exports.MAJOR;\nvar HEADER_STATE;\n(function (HEADER_STATE) {\n HEADER_STATE[HEADER_STATE[\"GENERAL\"] = 0] = \"GENERAL\";\n HEADER_STATE[HEADER_STATE[\"CONNECTION\"] = 1] = \"CONNECTION\";\n HEADER_STATE[HEADER_STATE[\"CONTENT_LENGTH\"] = 2] = \"CONTENT_LENGTH\";\n HEADER_STATE[HEADER_STATE[\"TRANSFER_ENCODING\"] = 3] = \"TRANSFER_ENCODING\";\n HEADER_STATE[HEADER_STATE[\"UPGRADE\"] = 4] = \"UPGRADE\";\n HEADER_STATE[HEADER_STATE[\"CONNECTION_KEEP_ALIVE\"] = 5] = \"CONNECTION_KEEP_ALIVE\";\n HEADER_STATE[HEADER_STATE[\"CONNECTION_CLOSE\"] = 6] = \"CONNECTION_CLOSE\";\n HEADER_STATE[HEADER_STATE[\"CONNECTION_UPGRADE\"] = 7] = \"CONNECTION_UPGRADE\";\n HEADER_STATE[HEADER_STATE[\"TRANSFER_ENCODING_CHUNKED\"] = 8] = \"TRANSFER_ENCODING_CHUNKED\";\n})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {}));\nexports.SPECIAL_HEADERS = {\n 'connection': HEADER_STATE.CONNECTION,\n 'content-length': HEADER_STATE.CONTENT_LENGTH,\n 'proxy-connection': HEADER_STATE.CONNECTION,\n 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING,\n 'upgrade': HEADER_STATE.UPGRADE,\n};\n//# sourceMappingURL=constants.js.map","module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC1kAIABBGGpCADcDACAAQgA3AwAgAEE4akIANwMAIABBMGpCADcDACAAQShqQgA3AwAgAEEgakIANwMAIABBEGpCADcDACAAQQhqQgA3AwAgAEHdATYCHEEAC3sBAX8CQCAAKAIMIgMNAAJAIAAoAgRFDQAgACABNgIECwJAIAAgASACEMSAgIAAIgMNACAAKAIMDwsgACADNgIcQQAhAyAAKAIEIgFFDQAgACABIAIgACgCCBGBgICAAAAiAUUNACAAIAI2AhQgACABNgIMIAEhAwsgAwvk8wEDDn8DfgR/I4CAgIAAQRBrIgMkgICAgAAgASEEIAEhBSABIQYgASEHIAEhCCABIQkgASEKIAEhCyABIQwgASENIAEhDiABIQ8CQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgACgCHCIQQX9qDt0B2gEB2QECAwQFBgcICQoLDA0O2AEPENcBERLWARMUFRYXGBkaG+AB3wEcHR7VAR8gISIjJCXUASYnKCkqKyzTAdIBLS7RAdABLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVG2wFHSElKzwHOAUvNAUzMAU1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4ABgQGCAYMBhAGFAYYBhwGIAYkBigGLAYwBjQGOAY8BkAGRAZIBkwGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwHLAcoBuAHJAbkByAG6AbsBvAG9Ab4BvwHAAcEBwgHDAcQBxQHGAQDcAQtBACEQDMYBC0EOIRAMxQELQQ0hEAzEAQtBDyEQDMMBC0EQIRAMwgELQRMhEAzBAQtBFCEQDMABC0EVIRAMvwELQRYhEAy+AQtBFyEQDL0BC0EYIRAMvAELQRkhEAy7AQtBGiEQDLoBC0EbIRAMuQELQRwhEAy4AQtBCCEQDLcBC0EdIRAMtgELQSAhEAy1AQtBHyEQDLQBC0EHIRAMswELQSEhEAyyAQtBIiEQDLEBC0EeIRAMsAELQSMhEAyvAQtBEiEQDK4BC0ERIRAMrQELQSQhEAysAQtBJSEQDKsBC0EmIRAMqgELQSchEAypAQtBwwEhEAyoAQtBKSEQDKcBC0ErIRAMpgELQSwhEAylAQtBLSEQDKQBC0EuIRAMowELQS8hEAyiAQtBxAEhEAyhAQtBMCEQDKABC0E0IRAMnwELQQwhEAyeAQtBMSEQDJ0BC0EyIRAMnAELQTMhEAybAQtBOSEQDJoBC0E1IRAMmQELQcUBIRAMmAELQQshEAyXAQtBOiEQDJYBC0E2IRAMlQELQQohEAyUAQtBNyEQDJMBC0E4IRAMkgELQTwhEAyRAQtBOyEQDJABC0E9IRAMjwELQQkhEAyOAQtBKCEQDI0BC0E+IRAMjAELQT8hEAyLAQtBwAAhEAyKAQtBwQAhEAyJAQtBwgAhEAyIAQtBwwAhEAyHAQtBxAAhEAyGAQtBxQAhEAyFAQtBxgAhEAyEAQtBKiEQDIMBC0HHACEQDIIBC0HIACEQDIEBC0HJACEQDIABC0HKACEQDH8LQcsAIRAMfgtBzQAhEAx9C0HMACEQDHwLQc4AIRAMewtBzwAhEAx6C0HQACEQDHkLQdEAIRAMeAtB0gAhEAx3C0HTACEQDHYLQdQAIRAMdQtB1gAhEAx0C0HVACEQDHMLQQYhEAxyC0HXACEQDHELQQUhEAxwC0HYACEQDG8LQQQhEAxuC0HZACEQDG0LQdoAIRAMbAtB2wAhEAxrC0HcACEQDGoLQQMhEAxpC0HdACEQDGgLQd4AIRAMZwtB3wAhEAxmC0HhACEQDGULQeAAIRAMZAtB4gAhEAxjC0HjACEQDGILQQIhEAxhC0HkACEQDGALQeUAIRAMXwtB5gAhEAxeC0HnACEQDF0LQegAIRAMXAtB6QAhEAxbC0HqACEQDFoLQesAIRAMWQtB7AAhEAxYC0HtACEQDFcLQe4AIRAMVgtB7wAhEAxVC0HwACEQDFQLQfEAIRAMUwtB8gAhEAxSC0HzACEQDFELQfQAIRAMUAtB9QAhEAxPC0H2ACEQDE4LQfcAIRAMTQtB+AAhEAxMC0H5ACEQDEsLQfoAIRAMSgtB+wAhEAxJC0H8ACEQDEgLQf0AIRAMRwtB/gAhEAxGC0H/ACEQDEULQYABIRAMRAtBgQEhEAxDC0GCASEQDEILQYMBIRAMQQtBhAEhEAxAC0GFASEQDD8LQYYBIRAMPgtBhwEhEAw9C0GIASEQDDwLQYkBIRAMOwtBigEhEAw6C0GLASEQDDkLQYwBIRAMOAtBjQEhEAw3C0GOASEQDDYLQY8BIRAMNQtBkAEhEAw0C0GRASEQDDMLQZIBIRAMMgtBkwEhEAwxC0GUASEQDDALQZUBIRAMLwtBlgEhEAwuC0GXASEQDC0LQZgBIRAMLAtBmQEhEAwrC0GaASEQDCoLQZsBIRAMKQtBnAEhEAwoC0GdASEQDCcLQZ4BIRAMJgtBnwEhEAwlC0GgASEQDCQLQaEBIRAMIwtBogEhEAwiC0GjASEQDCELQaQBIRAMIAtBpQEhEAwfC0GmASEQDB4LQacBIRAMHQtBqAEhEAwcC0GpASEQDBsLQaoBIRAMGgtBqwEhEAwZC0GsASEQDBgLQa0BIRAMFwtBrgEhEAwWC0EBIRAMFQtBrwEhEAwUC0GwASEQDBMLQbEBIRAMEgtBswEhEAwRC0GyASEQDBALQbQBIRAMDwtBtQEhEAwOC0G2ASEQDA0LQbcBIRAMDAtBuAEhEAwLC0G5ASEQDAoLQboBIRAMCQtBuwEhEAwIC0HGASEQDAcLQbwBIRAMBgtBvQEhEAwFC0G+ASEQDAQLQb8BIRAMAwtBwAEhEAwCC0HCASEQDAELQcEBIRALA0ACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQDscBAAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxweHyAhIyUoP0BBREVGR0hJSktMTU9QUVJT3gNXWVtcXWBiZWZnaGlqa2xtb3BxcnN0dXZ3eHl6e3x9foABggGFAYYBhwGJAYsBjAGNAY4BjwGQAZEBlAGVAZYBlwGYAZkBmgGbAZwBnQGeAZ8BoAGhAaIBowGkAaUBpgGnAagBqQGqAasBrAGtAa4BrwGwAbEBsgGzAbQBtQG2AbcBuAG5AboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBxwHIAckBygHLAcwBzQHOAc8B0AHRAdIB0wHUAdUB1gHXAdgB2QHaAdsB3AHdAd4B4AHhAeIB4wHkAeUB5gHnAegB6QHqAesB7AHtAe4B7wHwAfEB8gHzAZkCpAKwAv4C/gILIAEiBCACRw3zAUHdASEQDP8DCyABIhAgAkcN3QFBwwEhEAz+AwsgASIBIAJHDZABQfcAIRAM/QMLIAEiASACRw2GAUHvACEQDPwDCyABIgEgAkcNf0HqACEQDPsDCyABIgEgAkcNe0HoACEQDPoDCyABIgEgAkcNeEHmACEQDPkDCyABIgEgAkcNGkEYIRAM+AMLIAEiASACRw0UQRIhEAz3AwsgASIBIAJHDVlBxQAhEAz2AwsgASIBIAJHDUpBPyEQDPUDCyABIgEgAkcNSEE8IRAM9AMLIAEiASACRw1BQTEhEAzzAwsgAC0ALkEBRg3rAwyHAgsgACABIgEgAhDAgICAAEEBRw3mASAAQgA3AyAM5wELIAAgASIBIAIQtICAgAAiEA3nASABIQEM9QILAkAgASIBIAJHDQBBBiEQDPADCyAAIAFBAWoiASACELuAgIAAIhAN6AEgASEBDDELIABCADcDIEESIRAM1QMLIAEiECACRw0rQR0hEAztAwsCQCABIgEgAkYNACABQQFqIQFBECEQDNQDC0EHIRAM7AMLIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN5QFBCCEQDOsDCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEUIRAM0gMLQQkhEAzqAwsgASEBIAApAyBQDeQBIAEhAQzyAgsCQCABIgEgAkcNAEELIRAM6QMLIAAgAUEBaiIBIAIQtoCAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3mASABIQEMDQsgACABIgEgAhC6gICAACIQDecBIAEhAQzwAgsCQCABIgEgAkcNAEEPIRAM5QMLIAEtAAAiEEE7Rg0IIBBBDUcN6AEgAUEBaiEBDO8CCyAAIAEiASACELqAgIAAIhAN6AEgASEBDPICCwNAAkAgAS0AAEHwtYCAAGotAAAiEEEBRg0AIBBBAkcN6wEgACgCBCEQIABBADYCBCAAIBAgAUEBaiIBELmAgIAAIhAN6gEgASEBDPQCCyABQQFqIgEgAkcNAAtBEiEQDOIDCyAAIAEiASACELqAgIAAIhAN6QEgASEBDAoLIAEiASACRw0GQRshEAzgAwsCQCABIgEgAkcNAEEWIRAM4AMLIABBioCAgAA2AgggACABNgIEIAAgASACELiAgIAAIhAN6gEgASEBQSAhEAzGAwsCQCABIgEgAkYNAANAAkAgAS0AAEHwt4CAAGotAAAiEEECRg0AAkAgEEF/ag4E5QHsAQDrAewBCyABQQFqIQFBCCEQDMgDCyABQQFqIgEgAkcNAAtBFSEQDN8DC0EVIRAM3gMLA0ACQCABLQAAQfC5gIAAai0AACIQQQJGDQAgEEF/ag4E3gHsAeAB6wHsAQsgAUEBaiIBIAJHDQALQRghEAzdAwsCQCABIgEgAkYNACAAQYuAgIAANgIIIAAgATYCBCABIQFBByEQDMQDC0EZIRAM3AMLIAFBAWohAQwCCwJAIAEiFCACRw0AQRohEAzbAwsgFCEBAkAgFC0AAEFzag4U3QLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gIA7gILQQAhECAAQQA2AhwgAEGvi4CAADYCECAAQQI2AgwgACAUQQFqNgIUDNoDCwJAIAEtAAAiEEE7Rg0AIBBBDUcN6AEgAUEBaiEBDOUCCyABQQFqIQELQSIhEAy/AwsCQCABIhAgAkcNAEEcIRAM2AMLQgAhESAQIQEgEC0AAEFQag435wHmAQECAwQFBgcIAAAAAAAAAAkKCwwNDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxAREhMUAAtBHiEQDL0DC0ICIREM5QELQgMhEQzkAQtCBCERDOMBC0IFIREM4gELQgYhEQzhAQtCByERDOABC0IIIREM3wELQgkhEQzeAQtCCiERDN0BC0ILIREM3AELQgwhEQzbAQtCDSERDNoBC0IOIREM2QELQg8hEQzYAQtCCiERDNcBC0ILIREM1gELQgwhEQzVAQtCDSERDNQBC0IOIREM0wELQg8hEQzSAQtCACERAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQLQAAQVBqDjflAeQBAAECAwQFBgfmAeYB5gHmAeYB5gHmAQgJCgsMDeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gEODxAREhPmAQtCAiERDOQBC0IDIREM4wELQgQhEQziAQtCBSERDOEBC0IGIREM4AELQgchEQzfAQtCCCERDN4BC0IJIREM3QELQgohEQzcAQtCCyERDNsBC0IMIREM2gELQg0hEQzZAQtCDiERDNgBC0IPIREM1wELQgohEQzWAQtCCyERDNUBC0IMIREM1AELQg0hEQzTAQtCDiERDNIBC0IPIREM0QELIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN0gFBHyEQDMADCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEkIRAMpwMLQSAhEAy/AwsgACABIhAgAhC+gICAAEF/ag4FtgEAxQIB0QHSAQtBESEQDKQDCyAAQQE6AC8gECEBDLsDCyABIgEgAkcN0gFBJCEQDLsDCyABIg0gAkcNHkHGACEQDLoDCyAAIAEiASACELKAgIAAIhAN1AEgASEBDLUBCyABIhAgAkcNJkHQACEQDLgDCwJAIAEiASACRw0AQSghEAy4AwsgAEEANgIEIABBjICAgAA2AgggACABIAEQsYCAgAAiEA3TASABIQEM2AELAkAgASIQIAJHDQBBKSEQDLcDCyAQLQAAIgFBIEYNFCABQQlHDdMBIBBBAWohAQwVCwJAIAEiASACRg0AIAFBAWohAQwXC0EqIRAMtQMLAkAgASIQIAJHDQBBKyEQDLUDCwJAIBAtAAAiAUEJRg0AIAFBIEcN1QELIAAtACxBCEYN0wEgECEBDJEDCwJAIAEiASACRw0AQSwhEAy0AwsgAS0AAEEKRw3VASABQQFqIQEMyQILIAEiDiACRw3VAUEvIRAMsgMLA0ACQCABLQAAIhBBIEYNAAJAIBBBdmoOBADcAdwBANoBCyABIQEM4AELIAFBAWoiASACRw0AC0ExIRAMsQMLQTIhECABIhQgAkYNsAMgAiAUayAAKAIAIgFqIRUgFCABa0EDaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfC7gIAAai0AAEcNAQJAIAFBA0cNAEEGIQEMlgMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLEDCyAAQQA2AgAgFCEBDNkBC0EzIRAgASIUIAJGDa8DIAIgFGsgACgCACIBaiEVIBQgAWtBCGohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUH0u4CAAGotAABHDQECQCABQQhHDQBBBSEBDJUDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAywAwsgAEEANgIAIBQhAQzYAQtBNCEQIAEiFCACRg2uAyACIBRrIAAoAgAiAWohFSAUIAFrQQVqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw0BAkAgAUEFRw0AQQchAQyUAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMrwMLIABBADYCACAUIQEM1wELAkAgASIBIAJGDQADQAJAIAEtAABBgL6AgABqLQAAIhBBAUYNACAQQQJGDQogASEBDN0BCyABQQFqIgEgAkcNAAtBMCEQDK4DC0EwIRAMrQMLAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AIBBBdmoOBNkB2gHaAdkB2gELIAFBAWoiASACRw0AC0E4IRAMrQMLQTghEAysAwsDQAJAIAEtAAAiEEEgRg0AIBBBCUcNAwsgAUEBaiIBIAJHDQALQTwhEAyrAwsDQAJAIAEtAAAiEEEgRg0AAkACQCAQQXZqDgTaAQEB2gEACyAQQSxGDdsBCyABIQEMBAsgAUEBaiIBIAJHDQALQT8hEAyqAwsgASEBDNsBC0HAACEQIAEiFCACRg2oAyACIBRrIAAoAgAiAWohFiAUIAFrQQZqIRcCQANAIBQtAABBIHIgAUGAwICAAGotAABHDQEgAUEGRg2OAyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAypAwsgAEEANgIAIBQhAQtBNiEQDI4DCwJAIAEiDyACRw0AQcEAIRAMpwMLIABBjICAgAA2AgggACAPNgIEIA8hASAALQAsQX9qDgTNAdUB1wHZAYcDCyABQQFqIQEMzAELAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgciAQIBBBv39qQf8BcUEaSRtB/wFxIhBBCUYNACAQQSBGDQACQAJAAkACQCAQQZ1/ag4TAAMDAwMDAwMBAwMDAwMDAwMDAgMLIAFBAWohAUExIRAMkQMLIAFBAWohAUEyIRAMkAMLIAFBAWohAUEzIRAMjwMLIAEhAQzQAQsgAUEBaiIBIAJHDQALQTUhEAylAwtBNSEQDKQDCwJAIAEiASACRg0AA0ACQCABLQAAQYC8gIAAai0AAEEBRg0AIAEhAQzTAQsgAUEBaiIBIAJHDQALQT0hEAykAwtBPSEQDKMDCyAAIAEiASACELCAgIAAIhAN1gEgASEBDAELIBBBAWohAQtBPCEQDIcDCwJAIAEiASACRw0AQcIAIRAMoAMLAkADQAJAIAEtAABBd2oOGAAC/gL+AoQD/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4CAP4CCyABQQFqIgEgAkcNAAtBwgAhEAygAwsgAUEBaiEBIAAtAC1BAXFFDb0BIAEhAQtBLCEQDIUDCyABIgEgAkcN0wFBxAAhEAydAwsDQAJAIAEtAABBkMCAgABqLQAAQQFGDQAgASEBDLcCCyABQQFqIgEgAkcNAAtBxQAhEAycAwsgDS0AACIQQSBGDbMBIBBBOkcNgQMgACgCBCEBIABBADYCBCAAIAEgDRCvgICAACIBDdABIA1BAWohAQyzAgtBxwAhECABIg0gAkYNmgMgAiANayAAKAIAIgFqIRYgDSABa0EFaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGQwoCAAGotAABHDYADIAFBBUYN9AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmgMLQcgAIRAgASINIAJGDZkDIAIgDWsgACgCACIBaiEWIA0gAWtBCWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBlsKAgABqLQAARw3/AgJAIAFBCUcNAEECIQEM9QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJkDCwJAIAEiDSACRw0AQckAIRAMmQMLAkACQCANLQAAIgFBIHIgASABQb9/akH/AXFBGkkbQf8BcUGSf2oOBwCAA4ADgAOAA4ADAYADCyANQQFqIQFBPiEQDIADCyANQQFqIQFBPyEQDP8CC0HKACEQIAEiDSACRg2XAyACIA1rIAAoAgAiAWohFiANIAFrQQFqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaDCgIAAai0AAEcN/QIgAUEBRg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyXAwtBywAhECABIg0gAkYNlgMgAiANayAAKAIAIgFqIRYgDSABa0EOaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGiwoCAAGotAABHDfwCIAFBDkYN8AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlgMLQcwAIRAgASINIAJGDZUDIAIgDWsgACgCACIBaiEWIA0gAWtBD2ohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBwMKAgABqLQAARw37AgJAIAFBD0cNAEEDIQEM8QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJUDC0HNACEQIAEiDSACRg2UAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQdDCgIAAai0AAEcN+gICQCABQQVHDQBBBCEBDPACCyABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyUAwsCQCABIg0gAkcNAEHOACEQDJQDCwJAAkACQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZ1/ag4TAP0C/QL9Av0C/QL9Av0C/QL9Av0C/QL9AgH9Av0C/QICA/0CCyANQQFqIQFBwQAhEAz9AgsgDUEBaiEBQcIAIRAM/AILIA1BAWohAUHDACEQDPsCCyANQQFqIQFBxAAhEAz6AgsCQCABIgEgAkYNACAAQY2AgIAANgIIIAAgATYCBCABIQFBxQAhEAz6AgtBzwAhEAySAwsgECEBAkACQCAQLQAAQXZqDgQBqAKoAgCoAgsgEEEBaiEBC0EnIRAM+AILAkAgASIBIAJHDQBB0QAhEAyRAwsCQCABLQAAQSBGDQAgASEBDI0BCyABQQFqIQEgAC0ALUEBcUUNxwEgASEBDIwBCyABIhcgAkcNyAFB0gAhEAyPAwtB0wAhECABIhQgAkYNjgMgAiAUayAAKAIAIgFqIRYgFCABa0EBaiEXA0AgFC0AACABQdbCgIAAai0AAEcNzAEgAUEBRg3HASABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAyOAwsCQCABIgEgAkcNAEHVACEQDI4DCyABLQAAQQpHDcwBIAFBAWohAQzHAQsCQCABIgEgAkcNAEHWACEQDI0DCwJAAkAgAS0AAEF2ag4EAM0BzQEBzQELIAFBAWohAQzHAQsgAUEBaiEBQcoAIRAM8wILIAAgASIBIAIQroCAgAAiEA3LASABIQFBzQAhEAzyAgsgAC0AKUEiRg2FAwymAgsCQCABIgEgAkcNAEHbACEQDIoDC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgAS0AAEFQag4K1AHTAQABAgMEBQYI1QELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMzAELQQkhEEEBIRRBACEXQQAhFgzLAQsCQCABIgEgAkcNAEHdACEQDIkDCyABLQAAQS5HDcwBIAFBAWohAQymAgsgASIBIAJHDcwBQd8AIRAMhwMLAkAgASIBIAJGDQAgAEGOgICAADYCCCAAIAE2AgQgASEBQdAAIRAM7gILQeAAIRAMhgMLQeEAIRAgASIBIAJGDYUDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHiwoCAAGotAABHDc0BIBRBA0YNzAEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhQMLQeIAIRAgASIBIAJGDYQDIAIgAWsgACgCACIUaiEWIAEgFGtBAmohFwNAIAEtAAAgFEHmwoCAAGotAABHDcwBIBRBAkYNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhAMLQeMAIRAgASIBIAJGDYMDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHpwoCAAGotAABHDcsBIBRBA0YNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMgwMLAkAgASIBIAJHDQBB5QAhEAyDAwsgACABQQFqIgEgAhCogICAACIQDc0BIAEhAUHWACEQDOkCCwJAIAEiASACRg0AA0ACQCABLQAAIhBBIEYNAAJAAkACQCAQQbh/ag4LAAHPAc8BzwHPAc8BzwHPAc8BAs8BCyABQQFqIQFB0gAhEAztAgsgAUEBaiEBQdMAIRAM7AILIAFBAWohAUHUACEQDOsCCyABQQFqIgEgAkcNAAtB5AAhEAyCAwtB5AAhEAyBAwsDQAJAIAEtAABB8MKAgABqLQAAIhBBAUYNACAQQX5qDgPPAdAB0QHSAQsgAUEBaiIBIAJHDQALQeYAIRAMgAMLAkAgASIBIAJGDQAgAUEBaiEBDAMLQecAIRAM/wILA0ACQCABLQAAQfDEgIAAai0AACIQQQFGDQACQCAQQX5qDgTSAdMB1AEA1QELIAEhAUHXACEQDOcCCyABQQFqIgEgAkcNAAtB6AAhEAz+AgsCQCABIgEgAkcNAEHpACEQDP4CCwJAIAEtAAAiEEF2ag4augHVAdUBvAHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHKAdUB1QEA0wELIAFBAWohAQtBBiEQDOMCCwNAAkAgAS0AAEHwxoCAAGotAABBAUYNACABIQEMngILIAFBAWoiASACRw0AC0HqACEQDPsCCwJAIAEiASACRg0AIAFBAWohAQwDC0HrACEQDPoCCwJAIAEiASACRw0AQewAIRAM+gILIAFBAWohAQwBCwJAIAEiASACRw0AQe0AIRAM+QILIAFBAWohAQtBBCEQDN4CCwJAIAEiFCACRw0AQe4AIRAM9wILIBQhAQJAAkACQCAULQAAQfDIgIAAai0AAEF/ag4H1AHVAdYBAJwCAQLXAQsgFEEBaiEBDAoLIBRBAWohAQzNAQtBACEQIABBADYCHCAAQZuSgIAANgIQIABBBzYCDCAAIBRBAWo2AhQM9gILAkADQAJAIAEtAABB8MiAgABqLQAAIhBBBEYNAAJAAkAgEEF/ag4H0gHTAdQB2QEABAHZAQsgASEBQdoAIRAM4AILIAFBAWohAUHcACEQDN8CCyABQQFqIgEgAkcNAAtB7wAhEAz2AgsgAUEBaiEBDMsBCwJAIAEiFCACRw0AQfAAIRAM9QILIBQtAABBL0cN1AEgFEEBaiEBDAYLAkAgASIUIAJHDQBB8QAhEAz0AgsCQCAULQAAIgFBL0cNACAUQQFqIQFB3QAhEAzbAgsgAUF2aiIEQRZLDdMBQQEgBHRBiYCAAnFFDdMBDMoCCwJAIAEiASACRg0AIAFBAWohAUHeACEQDNoCC0HyACEQDPICCwJAIAEiFCACRw0AQfQAIRAM8gILIBQhAQJAIBQtAABB8MyAgABqLQAAQX9qDgPJApQCANQBC0HhACEQDNgCCwJAIAEiFCACRg0AA0ACQCAULQAAQfDKgIAAai0AACIBQQNGDQACQCABQX9qDgLLAgDVAQsgFCEBQd8AIRAM2gILIBRBAWoiFCACRw0AC0HzACEQDPECC0HzACEQDPACCwJAIAEiASACRg0AIABBj4CAgAA2AgggACABNgIEIAEhAUHgACEQDNcCC0H1ACEQDO8CCwJAIAEiASACRw0AQfYAIRAM7wILIABBj4CAgAA2AgggACABNgIEIAEhAQtBAyEQDNQCCwNAIAEtAABBIEcNwwIgAUEBaiIBIAJHDQALQfcAIRAM7AILAkAgASIBIAJHDQBB+AAhEAzsAgsgAS0AAEEgRw3OASABQQFqIQEM7wELIAAgASIBIAIQrICAgAAiEA3OASABIQEMjgILAkAgASIEIAJHDQBB+gAhEAzqAgsgBC0AAEHMAEcN0QEgBEEBaiEBQRMhEAzPAQsCQCABIgQgAkcNAEH7ACEQDOkCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRADQCAELQAAIAFB8M6AgABqLQAARw3QASABQQVGDc4BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQfsAIRAM6AILAkAgASIEIAJHDQBB/AAhEAzoAgsCQAJAIAQtAABBvX9qDgwA0QHRAdEB0QHRAdEB0QHRAdEB0QEB0QELIARBAWohAUHmACEQDM8CCyAEQQFqIQFB5wAhEAzOAgsCQCABIgQgAkcNAEH9ACEQDOcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDc8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH9ACEQDOcCCyAAQQA2AgAgEEEBaiEBQRAhEAzMAQsCQCABIgQgAkcNAEH+ACEQDOYCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUH2zoCAAGotAABHDc4BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH+ACEQDOYCCyAAQQA2AgAgEEEBaiEBQRYhEAzLAQsCQCABIgQgAkcNAEH/ACEQDOUCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUH8zoCAAGotAABHDc0BIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH/ACEQDOUCCyAAQQA2AgAgEEEBaiEBQQUhEAzKAQsCQCABIgQgAkcNAEGAASEQDOQCCyAELQAAQdkARw3LASAEQQFqIQFBCCEQDMkBCwJAIAEiBCACRw0AQYEBIRAM4wILAkACQCAELQAAQbJ/ag4DAMwBAcwBCyAEQQFqIQFB6wAhEAzKAgsgBEEBaiEBQewAIRAMyQILAkAgASIEIAJHDQBBggEhEAziAgsCQAJAIAQtAABBuH9qDggAywHLAcsBywHLAcsBAcsBCyAEQQFqIQFB6gAhEAzJAgsgBEEBaiEBQe0AIRAMyAILAkAgASIEIAJHDQBBgwEhEAzhAgsgAiAEayAAKAIAIgFqIRAgBCABa0ECaiEUAkADQCAELQAAIAFBgM+AgABqLQAARw3JASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBA2AgBBgwEhEAzhAgtBACEQIABBADYCACAUQQFqIQEMxgELAkAgASIEIAJHDQBBhAEhEAzgAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBg8+AgABqLQAARw3IASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhAEhEAzgAgsgAEEANgIAIBBBAWohAUEjIRAMxQELAkAgASIEIAJHDQBBhQEhEAzfAgsCQAJAIAQtAABBtH9qDggAyAHIAcgByAHIAcgBAcgBCyAEQQFqIQFB7wAhEAzGAgsgBEEBaiEBQfAAIRAMxQILAkAgASIEIAJHDQBBhgEhEAzeAgsgBC0AAEHFAEcNxQEgBEEBaiEBDIMCCwJAIAEiBCACRw0AQYcBIRAM3QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQYjPgIAAai0AAEcNxQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYcBIRAM3QILIABBADYCACAQQQFqIQFBLSEQDMIBCwJAIAEiBCACRw0AQYgBIRAM3AILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNxAEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYgBIRAM3AILIABBADYCACAQQQFqIQFBKSEQDMEBCwJAIAEiASACRw0AQYkBIRAM2wILQQEhECABLQAAQd8ARw3AASABQQFqIQEMgQILAkAgASIEIAJHDQBBigEhEAzaAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQA0AgBC0AACABQYzPgIAAai0AAEcNwQEgAUEBRg2vAiABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGKASEQDNkCCwJAIAEiBCACRw0AQYsBIRAM2QILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQY7PgIAAai0AAEcNwQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYsBIRAM2QILIABBADYCACAQQQFqIQFBAiEQDL4BCwJAIAEiBCACRw0AQYwBIRAM2AILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNwAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYwBIRAM2AILIABBADYCACAQQQFqIQFBHyEQDL0BCwJAIAEiBCACRw0AQY0BIRAM1wILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNvwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY0BIRAM1wILIABBADYCACAQQQFqIQFBCSEQDLwBCwJAIAEiBCACRw0AQY4BIRAM1gILAkACQCAELQAAQbd/ag4HAL8BvwG/Ab8BvwEBvwELIARBAWohAUH4ACEQDL0CCyAEQQFqIQFB+QAhEAy8AgsCQCABIgQgAkcNAEGPASEQDNUCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGRz4CAAGotAABHDb0BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGPASEQDNUCCyAAQQA2AgAgEEEBaiEBQRghEAy6AQsCQCABIgQgAkcNAEGQASEQDNQCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUGXz4CAAGotAABHDbwBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGQASEQDNQCCyAAQQA2AgAgEEEBaiEBQRchEAy5AQsCQCABIgQgAkcNAEGRASEQDNMCCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUGaz4CAAGotAABHDbsBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGRASEQDNMCCyAAQQA2AgAgEEEBaiEBQRUhEAy4AQsCQCABIgQgAkcNAEGSASEQDNICCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGhz4CAAGotAABHDboBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGSASEQDNICCyAAQQA2AgAgEEEBaiEBQR4hEAy3AQsCQCABIgQgAkcNAEGTASEQDNECCyAELQAAQcwARw24ASAEQQFqIQFBCiEQDLYBCwJAIAQgAkcNAEGUASEQDNACCwJAAkAgBC0AAEG/f2oODwC5AbkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AQG5AQsgBEEBaiEBQf4AIRAMtwILIARBAWohAUH/ACEQDLYCCwJAIAQgAkcNAEGVASEQDM8CCwJAAkAgBC0AAEG/f2oOAwC4AQG4AQsgBEEBaiEBQf0AIRAMtgILIARBAWohBEGAASEQDLUCCwJAIAQgAkcNAEGWASEQDM4CCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUGnz4CAAGotAABHDbYBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGWASEQDM4CCyAAQQA2AgAgEEEBaiEBQQshEAyzAQsCQCAEIAJHDQBBlwEhEAzNAgsCQAJAAkACQCAELQAAQVNqDiMAuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AQG4AbgBuAG4AbgBArgBuAG4AQO4AQsgBEEBaiEBQfsAIRAMtgILIARBAWohAUH8ACEQDLUCCyAEQQFqIQRBgQEhEAy0AgsgBEEBaiEEQYIBIRAMswILAkAgBCACRw0AQZgBIRAMzAILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQanPgIAAai0AAEcNtAEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZgBIRAMzAILIABBADYCACAQQQFqIQFBGSEQDLEBCwJAIAQgAkcNAEGZASEQDMsCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGuz4CAAGotAABHDbMBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGZASEQDMsCCyAAQQA2AgAgEEEBaiEBQQYhEAywAQsCQCAEIAJHDQBBmgEhEAzKAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBtM+AgABqLQAARw2yASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmgEhEAzKAgsgAEEANgIAIBBBAWohAUEcIRAMrwELAkAgBCACRw0AQZsBIRAMyQILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbbPgIAAai0AAEcNsQEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZsBIRAMyQILIABBADYCACAQQQFqIQFBJyEQDK4BCwJAIAQgAkcNAEGcASEQDMgCCwJAAkAgBC0AAEGsf2oOAgABsQELIARBAWohBEGGASEQDK8CCyAEQQFqIQRBhwEhEAyuAgsCQCAEIAJHDQBBnQEhEAzHAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBuM+AgABqLQAARw2vASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBnQEhEAzHAgsgAEEANgIAIBBBAWohAUEmIRAMrAELAkAgBCACRw0AQZ4BIRAMxgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbrPgIAAai0AAEcNrgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ4BIRAMxgILIABBADYCACAQQQFqIQFBAyEQDKsBCwJAIAQgAkcNAEGfASEQDMUCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDa0BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGfASEQDMUCCyAAQQA2AgAgEEEBaiEBQQwhEAyqAQsCQCAEIAJHDQBBoAEhEAzEAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBvM+AgABqLQAARw2sASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBoAEhEAzEAgsgAEEANgIAIBBBAWohAUENIRAMqQELAkAgBCACRw0AQaEBIRAMwwILAkACQCAELQAAQbp/ag4LAKwBrAGsAawBrAGsAawBrAGsAQGsAQsgBEEBaiEEQYsBIRAMqgILIARBAWohBEGMASEQDKkCCwJAIAQgAkcNAEGiASEQDMICCyAELQAAQdAARw2pASAEQQFqIQQM6QELAkAgBCACRw0AQaMBIRAMwQILAkACQCAELQAAQbd/ag4HAaoBqgGqAaoBqgEAqgELIARBAWohBEGOASEQDKgCCyAEQQFqIQFBIiEQDKYBCwJAIAQgAkcNAEGkASEQDMACCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHAz4CAAGotAABHDagBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGkASEQDMACCyAAQQA2AgAgEEEBaiEBQR0hEAylAQsCQCAEIAJHDQBBpQEhEAy/AgsCQAJAIAQtAABBrn9qDgMAqAEBqAELIARBAWohBEGQASEQDKYCCyAEQQFqIQFBBCEQDKQBCwJAIAQgAkcNAEGmASEQDL4CCwJAAkACQAJAAkAgBC0AAEG/f2oOFQCqAaoBqgGqAaoBqgGqAaoBqgGqAQGqAaoBAqoBqgEDqgGqAQSqAQsgBEEBaiEEQYgBIRAMqAILIARBAWohBEGJASEQDKcCCyAEQQFqIQRBigEhEAymAgsgBEEBaiEEQY8BIRAMpQILIARBAWohBEGRASEQDKQCCwJAIAQgAkcNAEGnASEQDL0CCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDaUBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGnASEQDL0CCyAAQQA2AgAgEEEBaiEBQREhEAyiAQsCQCAEIAJHDQBBqAEhEAy8AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBws+AgABqLQAARw2kASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqAEhEAy8AgsgAEEANgIAIBBBAWohAUEsIRAMoQELAkAgBCACRw0AQakBIRAMuwILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQcXPgIAAai0AAEcNowEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQakBIRAMuwILIABBADYCACAQQQFqIQFBKyEQDKABCwJAIAQgAkcNAEGqASEQDLoCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHKz4CAAGotAABHDaIBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGqASEQDLoCCyAAQQA2AgAgEEEBaiEBQRQhEAyfAQsCQCAEIAJHDQBBqwEhEAy5AgsCQAJAAkACQCAELQAAQb5/ag4PAAECpAGkAaQBpAGkAaQBpAGkAaQBpAGkAQOkAQsgBEEBaiEEQZMBIRAMogILIARBAWohBEGUASEQDKECCyAEQQFqIQRBlQEhEAygAgsgBEEBaiEEQZYBIRAMnwILAkAgBCACRw0AQawBIRAMuAILIAQtAABBxQBHDZ8BIARBAWohBAzgAQsCQCAEIAJHDQBBrQEhEAy3AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBzc+AgABqLQAARw2fASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrQEhEAy3AgsgAEEANgIAIBBBAWohAUEOIRAMnAELAkAgBCACRw0AQa4BIRAMtgILIAQtAABB0ABHDZ0BIARBAWohAUElIRAMmwELAkAgBCACRw0AQa8BIRAMtQILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNnQEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQa8BIRAMtQILIABBADYCACAQQQFqIQFBKiEQDJoBCwJAIAQgAkcNAEGwASEQDLQCCwJAAkAgBC0AAEGrf2oOCwCdAZ0BnQGdAZ0BnQGdAZ0BnQEBnQELIARBAWohBEGaASEQDJsCCyAEQQFqIQRBmwEhEAyaAgsCQCAEIAJHDQBBsQEhEAyzAgsCQAJAIAQtAABBv39qDhQAnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBAZwBCyAEQQFqIQRBmQEhEAyaAgsgBEEBaiEEQZwBIRAMmQILAkAgBCACRw0AQbIBIRAMsgILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQdnPgIAAai0AAEcNmgEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbIBIRAMsgILIABBADYCACAQQQFqIQFBISEQDJcBCwJAIAQgAkcNAEGzASEQDLECCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUHdz4CAAGotAABHDZkBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGzASEQDLECCyAAQQA2AgAgEEEBaiEBQRohEAyWAQsCQCAEIAJHDQBBtAEhEAywAgsCQAJAAkAgBC0AAEG7f2oOEQCaAZoBmgGaAZoBmgGaAZoBmgEBmgGaAZoBmgGaAQKaAQsgBEEBaiEEQZ0BIRAMmAILIARBAWohBEGeASEQDJcCCyAEQQFqIQRBnwEhEAyWAgsCQCAEIAJHDQBBtQEhEAyvAgsgAiAEayAAKAIAIgFqIRQgBCABa0EFaiEQAkADQCAELQAAIAFB5M+AgABqLQAARw2XASABQQVGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtQEhEAyvAgsgAEEANgIAIBBBAWohAUEoIRAMlAELAkAgBCACRw0AQbYBIRAMrgILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQerPgIAAai0AAEcNlgEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbYBIRAMrgILIABBADYCACAQQQFqIQFBByEQDJMBCwJAIAQgAkcNAEG3ASEQDK0CCwJAAkAgBC0AAEG7f2oODgCWAZYBlgGWAZYBlgGWAZYBlgGWAZYBlgEBlgELIARBAWohBEGhASEQDJQCCyAEQQFqIQRBogEhEAyTAgsCQCAEIAJHDQBBuAEhEAysAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB7c+AgABqLQAARw2UASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuAEhEAysAgsgAEEANgIAIBBBAWohAUESIRAMkQELAkAgBCACRw0AQbkBIRAMqwILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNkwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbkBIRAMqwILIABBADYCACAQQQFqIQFBICEQDJABCwJAIAQgAkcNAEG6ASEQDKoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHyz4CAAGotAABHDZIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG6ASEQDKoCCyAAQQA2AgAgEEEBaiEBQQ8hEAyPAQsCQCAEIAJHDQBBuwEhEAypAgsCQAJAIAQtAABBt39qDgcAkgGSAZIBkgGSAQGSAQsgBEEBaiEEQaUBIRAMkAILIARBAWohBEGmASEQDI8CCwJAIAQgAkcNAEG8ASEQDKgCCyACIARrIAAoAgAiAWohFCAEIAFrQQdqIRACQANAIAQtAAAgAUH0z4CAAGotAABHDZABIAFBB0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG8ASEQDKgCCyAAQQA2AgAgEEEBaiEBQRshEAyNAQsCQCAEIAJHDQBBvQEhEAynAgsCQAJAAkAgBC0AAEG+f2oOEgCRAZEBkQGRAZEBkQGRAZEBkQEBkQGRAZEBkQGRAZEBApEBCyAEQQFqIQRBpAEhEAyPAgsgBEEBaiEEQacBIRAMjgILIARBAWohBEGoASEQDI0CCwJAIAQgAkcNAEG+ASEQDKYCCyAELQAAQc4ARw2NASAEQQFqIQQMzwELAkAgBCACRw0AQb8BIRAMpQILAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgBC0AAEG/f2oOFQABAgOcAQQFBpwBnAGcAQcICQoLnAEMDQ4PnAELIARBAWohAUHoACEQDJoCCyAEQQFqIQFB6QAhEAyZAgsgBEEBaiEBQe4AIRAMmAILIARBAWohAUHyACEQDJcCCyAEQQFqIQFB8wAhEAyWAgsgBEEBaiEBQfYAIRAMlQILIARBAWohAUH3ACEQDJQCCyAEQQFqIQFB+gAhEAyTAgsgBEEBaiEEQYMBIRAMkgILIARBAWohBEGEASEQDJECCyAEQQFqIQRBhQEhEAyQAgsgBEEBaiEEQZIBIRAMjwILIARBAWohBEGYASEQDI4CCyAEQQFqIQRBoAEhEAyNAgsgBEEBaiEEQaMBIRAMjAILIARBAWohBEGqASEQDIsCCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEGrASEQDIsCC0HAASEQDKMCCyAAIAUgAhCqgICAACIBDYsBIAUhAQxcCwJAIAYgAkYNACAGQQFqIQUMjQELQcIBIRAMoQILA0ACQCAQLQAAQXZqDgSMAQAAjwEACyAQQQFqIhAgAkcNAAtBwwEhEAygAgsCQCAHIAJGDQAgAEGRgICAADYCCCAAIAc2AgQgByEBQQEhEAyHAgtBxAEhEAyfAgsCQCAHIAJHDQBBxQEhEAyfAgsCQAJAIActAABBdmoOBAHOAc4BAM4BCyAHQQFqIQYMjQELIAdBAWohBQyJAQsCQCAHIAJHDQBBxgEhEAyeAgsCQAJAIActAABBdmoOFwGPAY8BAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAQCPAQsgB0EBaiEHC0GwASEQDIQCCwJAIAggAkcNAEHIASEQDJ0CCyAILQAAQSBHDY0BIABBADsBMiAIQQFqIQFBswEhEAyDAgsgASEXAkADQCAXIgcgAkYNASAHLQAAQVBqQf8BcSIQQQpPDcwBAkAgAC8BMiIUQZkzSw0AIAAgFEEKbCIUOwEyIBBB//8DcyAUQf7/A3FJDQAgB0EBaiEXIAAgFCAQaiIQOwEyIBBB//8DcUHoB0kNAQsLQQAhECAAQQA2AhwgAEHBiYCAADYCECAAQQ02AgwgACAHQQFqNgIUDJwCC0HHASEQDJsCCyAAIAggAhCugICAACIQRQ3KASAQQRVHDYwBIABByAE2AhwgACAINgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAyaAgsCQCAJIAJHDQBBzAEhEAyaAgtBACEUQQEhF0EBIRZBACEQAkACQAJAAkACQAJAAkACQAJAIAktAABBUGoOCpYBlQEAAQIDBAUGCJcBC0ECIRAMBgtBAyEQDAULQQQhEAwEC0EFIRAMAwtBBiEQDAILQQchEAwBC0EIIRALQQAhF0EAIRZBACEUDI4BC0EJIRBBASEUQQAhF0EAIRYMjQELAkAgCiACRw0AQc4BIRAMmQILIAotAABBLkcNjgEgCkEBaiEJDMoBCyALIAJHDY4BQdABIRAMlwILAkAgCyACRg0AIABBjoCAgAA2AgggACALNgIEQbcBIRAM/gELQdEBIRAMlgILAkAgBCACRw0AQdIBIRAMlgILIAIgBGsgACgCACIQaiEUIAQgEGtBBGohCwNAIAQtAAAgEEH8z4CAAGotAABHDY4BIBBBBEYN6QEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB0gEhEAyVAgsgACAMIAIQrICAgAAiAQ2NASAMIQEMuAELAkAgBCACRw0AQdQBIRAMlAILIAIgBGsgACgCACIQaiEUIAQgEGtBAWohDANAIAQtAAAgEEGB0ICAAGotAABHDY8BIBBBAUYNjgEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB1AEhEAyTAgsCQCAEIAJHDQBB1gEhEAyTAgsgAiAEayAAKAIAIhBqIRQgBCAQa0ECaiELA0AgBC0AACAQQYPQgIAAai0AAEcNjgEgEEECRg2QASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHWASEQDJICCwJAIAQgAkcNAEHXASEQDJICCwJAAkAgBC0AAEG7f2oOEACPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAY8BCyAEQQFqIQRBuwEhEAz5AQsgBEEBaiEEQbwBIRAM+AELAkAgBCACRw0AQdgBIRAMkQILIAQtAABByABHDYwBIARBAWohBAzEAQsCQCAEIAJGDQAgAEGQgICAADYCCCAAIAQ2AgRBvgEhEAz3AQtB2QEhEAyPAgsCQCAEIAJHDQBB2gEhEAyPAgsgBC0AAEHIAEYNwwEgAEEBOgAoDLkBCyAAQQI6AC8gACAEIAIQpoCAgAAiEA2NAUHCASEQDPQBCyAALQAoQX9qDgK3AbkBuAELA0ACQCAELQAAQXZqDgQAjgGOAQCOAQsgBEEBaiIEIAJHDQALQd0BIRAMiwILIABBADoALyAALQAtQQRxRQ2EAgsgAEEAOgAvIABBAToANCABIQEMjAELIBBBFUYN2gEgAEEANgIcIAAgATYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMiAILAkAgACAQIAIQtICAgAAiBA0AIBAhAQyBAgsCQCAEQRVHDQAgAEEDNgIcIAAgEDYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMiAILIABBADYCHCAAIBA2AhQgAEGnjoCAADYCECAAQRI2AgxBACEQDIcCCyAQQRVGDdYBIABBADYCHCAAIAE2AhQgAEHajYCAADYCECAAQRQ2AgxBACEQDIYCCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNjQEgAEEHNgIcIAAgEDYCFCAAIBQ2AgxBACEQDIUCCyAAIAAvATBBgAFyOwEwIAEhAQtBKiEQDOoBCyAQQRVGDdEBIABBADYCHCAAIAE2AhQgAEGDjICAADYCECAAQRM2AgxBACEQDIICCyAQQRVGDc8BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDIECCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyNAQsgAEEMNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDIACCyAQQRVGDcwBIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDP8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyMAQsgAEENNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDP4BCyAQQRVGDckBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDP0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyLAQsgAEEONgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPwBCyAAQQA2AhwgACABNgIUIABBwJWAgAA2AhAgAEECNgIMQQAhEAz7AQsgEEEVRg3FASAAQQA2AhwgACABNgIUIABBxoyAgAA2AhAgAEEjNgIMQQAhEAz6AQsgAEEQNgIcIAAgATYCFCAAIBA2AgxBACEQDPkBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQzxAQsgAEERNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPgBCyAQQRVGDcEBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPcBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyIAQsgAEETNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPYBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQztAQsgAEEUNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPUBCyAQQRVGDb0BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDPQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyGAQsgAEEWNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPMBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQt4CAgAAiBA0AIAFBAWohAQzpAQsgAEEXNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPIBCyAAQQA2AhwgACABNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzxAQtCASERCyAQQQFqIQECQCAAKQMgIhJC//////////8PVg0AIAAgEkIEhiARhDcDICABIQEMhAELIABBADYCHCAAIAE2AhQgAEGtiYCAADYCECAAQQw2AgxBACEQDO8BCyAAQQA2AhwgACAQNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzuAQsgACgCBCEXIABBADYCBCAQIBGnaiIWIQEgACAXIBAgFiAUGyIQELWAgIAAIhRFDXMgAEEFNgIcIAAgEDYCFCAAIBQ2AgxBACEQDO0BCyAAQQA2AhwgACAQNgIUIABBqpyAgAA2AhAgAEEPNgIMQQAhEAzsAQsgACAQIAIQtICAgAAiAQ0BIBAhAQtBDiEQDNEBCwJAIAFBFUcNACAAQQI2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAzqAQsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAM6QELIAFBAWohEAJAIAAvATAiAUGAAXFFDQACQCAAIBAgAhC7gICAACIBDQAgECEBDHALIAFBFUcNugEgAEEFNgIcIAAgEDYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAM6QELAkAgAUGgBHFBoARHDQAgAC0ALUECcQ0AIABBADYCHCAAIBA2AhQgAEGWk4CAADYCECAAQQQ2AgxBACEQDOkBCyAAIBAgAhC9gICAABogECEBAkACQAJAAkACQCAAIBAgAhCzgICAAA4WAgEABAQEBAQEBAQEBAQEBAQEBAQEAwQLIABBAToALgsgACAALwEwQcAAcjsBMCAQIQELQSYhEAzRAQsgAEEjNgIcIAAgEDYCFCAAQaWWgIAANgIQIABBFTYCDEEAIRAM6QELIABBADYCHCAAIBA2AhQgAEHVi4CAADYCECAAQRE2AgxBACEQDOgBCyAALQAtQQFxRQ0BQcMBIRAMzgELAkAgDSACRg0AA0ACQCANLQAAQSBGDQAgDSEBDMQBCyANQQFqIg0gAkcNAAtBJSEQDOcBC0ElIRAM5gELIAAoAgQhBCAAQQA2AgQgACAEIA0Qr4CAgAAiBEUNrQEgAEEmNgIcIAAgBDYCDCAAIA1BAWo2AhRBACEQDOUBCyAQQRVGDasBIABBADYCHCAAIAE2AhQgAEH9jYCAADYCECAAQR02AgxBACEQDOQBCyAAQSc2AhwgACABNgIUIAAgEDYCDEEAIRAM4wELIBAhAUEBIRQCQAJAAkACQAJAAkACQCAALQAsQX5qDgcGBQUDAQIABQsgACAALwEwQQhyOwEwDAMLQQIhFAwBC0EEIRQLIABBAToALCAAIAAvATAgFHI7ATALIBAhAQtBKyEQDMoBCyAAQQA2AhwgACAQNgIUIABBq5KAgAA2AhAgAEELNgIMQQAhEAziAQsgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDEEAIRAM4QELIABBADoALCAQIQEMvQELIBAhAUEBIRQCQAJAAkACQAJAIAAtACxBe2oOBAMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0EpIRAMxQELIABBADYCHCAAIAE2AhQgAEHwlICAADYCECAAQQM2AgxBACEQDN0BCwJAIA4tAABBDUcNACAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA5BAWohAQx1CyAAQSw2AhwgACABNgIMIAAgDkEBajYCFEEAIRAM3QELIAAtAC1BAXFFDQFBxAEhEAzDAQsCQCAOIAJHDQBBLSEQDNwBCwJAAkADQAJAIA4tAABBdmoOBAIAAAMACyAOQQFqIg4gAkcNAAtBLSEQDN0BCyAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA4hAQx0CyAAQSw2AhwgACAONgIUIAAgATYCDEEAIRAM3AELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHMLIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzbAQsgACgCBCEEIABBADYCBCAAIAQgDhCxgICAACIEDaABIA4hAQzOAQsgEEEsRw0BIAFBAWohEEEBIQECQAJAAkACQAJAIAAtACxBe2oOBAMBAgQACyAQIQEMBAtBAiEBDAELQQQhAQsgAEEBOgAsIAAgAC8BMCABcjsBMCAQIQEMAQsgACAALwEwQQhyOwEwIBAhAQtBOSEQDL8BCyAAQQA6ACwgASEBC0E0IRAMvQELIAAgAC8BMEEgcjsBMCABIQEMAgsgACgCBCEEIABBADYCBAJAIAAgBCABELGAgIAAIgQNACABIQEMxwELIABBNzYCHCAAIAE2AhQgACAENgIMQQAhEAzUAQsgAEEIOgAsIAEhAQtBMCEQDLkBCwJAIAAtAChBAUYNACABIQEMBAsgAC0ALUEIcUUNkwEgASEBDAMLIAAtADBBIHENlAFBxQEhEAy3AQsCQCAPIAJGDQACQANAAkAgDy0AAEFQaiIBQf8BcUEKSQ0AIA8hAUE1IRAMugELIAApAyAiEUKZs+bMmbPmzBlWDQEgACARQgp+IhE3AyAgESABrUL/AYMiEkJ/hVYNASAAIBEgEnw3AyAgD0EBaiIPIAJHDQALQTkhEAzRAQsgACgCBCECIABBADYCBCAAIAIgD0EBaiIEELGAgIAAIgINlQEgBCEBDMMBC0E5IRAMzwELAkAgAC8BMCIBQQhxRQ0AIAAtAChBAUcNACAALQAtQQhxRQ2QAQsgACABQff7A3FBgARyOwEwIA8hAQtBNyEQDLQBCyAAIAAvATBBEHI7ATAMqwELIBBBFUYNiwEgAEEANgIcIAAgATYCFCAAQfCOgIAANgIQIABBHDYCDEEAIRAMywELIABBwwA2AhwgACABNgIMIAAgDUEBajYCFEEAIRAMygELAkAgAS0AAEE6Rw0AIAAoAgQhECAAQQA2AgQCQCAAIBAgARCvgICAACIQDQAgAUEBaiEBDGMLIABBwwA2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMygELIABBADYCHCAAIAE2AhQgAEGxkYCAADYCECAAQQo2AgxBACEQDMkBCyAAQQA2AhwgACABNgIUIABBoJmAgAA2AhAgAEEeNgIMQQAhEAzIAQsgAEEANgIACyAAQYASOwEqIAAgF0EBaiIBIAIQqICAgAAiEA0BIAEhAQtBxwAhEAysAQsgEEEVRw2DASAAQdEANgIcIAAgATYCFCAAQeOXgIAANgIQIABBFTYCDEEAIRAMxAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDF4LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMwwELIABBADYCHCAAIBQ2AhQgAEHBqICAADYCECAAQQc2AgwgAEEANgIAQQAhEAzCAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAzBAQtBACEQIABBADYCHCAAIAE2AhQgAEGAkYCAADYCECAAQQk2AgwMwAELIBBBFUYNfSAAQQA2AhwgACABNgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAy/AQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgAUEBaiEBAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBAJAIAAgECABEK2AgIAAIhANACABIQEMXAsgAEHYADYCHCAAIAE2AhQgACAQNgIMQQAhEAy+AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMrQELIABB2QA2AhwgACABNgIUIAAgBDYCDEEAIRAMvQELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKsBCyAAQdoANgIcIAAgATYCFCAAIAQ2AgxBACEQDLwBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQypAQsgAEHcADYCHCAAIAE2AhQgACAENgIMQQAhEAy7AQsCQCABLQAAQVBqIhBB/wFxQQpPDQAgACAQOgAqIAFBAWohAUHPACEQDKIBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQynAQsgAEHeADYCHCAAIAE2AhQgACAENgIMQQAhEAy6AQsgAEEANgIAIBdBAWohAQJAIAAtAClBI08NACABIQEMWQsgAEEANgIcIAAgATYCFCAAQdOJgIAANgIQIABBCDYCDEEAIRAMuQELIABBADYCAAtBACEQIABBADYCHCAAIAE2AhQgAEGQs4CAADYCECAAQQg2AgwMtwELIABBADYCACAXQQFqIQECQCAALQApQSFHDQAgASEBDFYLIABBADYCHCAAIAE2AhQgAEGbioCAADYCECAAQQg2AgxBACEQDLYBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKSIQQV1qQQtPDQAgASEBDFULAkAgEEEGSw0AQQEgEHRBygBxRQ0AIAEhAQxVC0EAIRAgAEEANgIcIAAgATYCFCAAQfeJgIAANgIQIABBCDYCDAy1AQsgEEEVRg1xIABBADYCHCAAIAE2AhQgAEG5jYCAADYCECAAQRo2AgxBACEQDLQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxUCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLMBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDLIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDLEBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxRCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLABCyAAQQA2AhwgACABNgIUIABBxoqAgAA2AhAgAEEHNgIMQQAhEAyvAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAyuAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAytAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMTQsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAysAQsgAEEANgIcIAAgATYCFCAAQdyIgIAANgIQIABBBzYCDEEAIRAMqwELIBBBP0cNASABQQFqIQELQQUhEAyQAQtBACEQIABBADYCHCAAIAE2AhQgAEH9koCAADYCECAAQQc2AgwMqAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMpwELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMpgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEYLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMpQELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0gA2AhwgACAUNgIUIAAgATYCDEEAIRAMpAELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0wA2AhwgACAUNgIUIAAgATYCDEEAIRAMowELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDEMLIABB5QA2AhwgACAUNgIUIAAgATYCDEEAIRAMogELIABBADYCHCAAIBQ2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKEBCyAAQQA2AhwgACABNgIUIABBw4+AgAA2AhAgAEEHNgIMQQAhEAygAQtBACEQIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgwMnwELIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgxBACEQDJ4BCyAAQQA2AhwgACAUNgIUIABB/pGAgAA2AhAgAEEHNgIMQQAhEAydAQsgAEEANgIcIAAgATYCFCAAQY6bgIAANgIQIABBBjYCDEEAIRAMnAELIBBBFUYNVyAAQQA2AhwgACABNgIUIABBzI6AgAA2AhAgAEEgNgIMQQAhEAybAQsgAEEANgIAIBBBAWohAUEkIRALIAAgEDoAKSAAKAIEIRAgAEEANgIEIAAgECABEKuAgIAAIhANVCABIQEMPgsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQfGbgIAANgIQIABBBjYCDAyXAQsgAUEVRg1QIABBADYCHCAAIAU2AhQgAEHwjICAADYCECAAQRs2AgxBACEQDJYBCyAAKAIEIQUgAEEANgIEIAAgBSAQEKmAgIAAIgUNASAQQQFqIQULQa0BIRAMewsgAEHBATYCHCAAIAU2AgwgACAQQQFqNgIUQQAhEAyTAQsgACgCBCEGIABBADYCBCAAIAYgEBCpgICAACIGDQEgEEEBaiEGC0GuASEQDHgLIABBwgE2AhwgACAGNgIMIAAgEEEBajYCFEEAIRAMkAELIABBADYCHCAAIAc2AhQgAEGXi4CAADYCECAAQQ02AgxBACEQDI8BCyAAQQA2AhwgACAINgIUIABB45CAgAA2AhAgAEEJNgIMQQAhEAyOAQsgAEEANgIcIAAgCDYCFCAAQZSNgIAANgIQIABBITYCDEEAIRAMjQELQQEhFkEAIRdBACEUQQEhEAsgACAQOgArIAlBAWohCAJAAkAgAC0ALUEQcQ0AAkACQAJAIAAtACoOAwEAAgQLIBZFDQMMAgsgFA0BDAILIBdFDQELIAAoAgQhECAAQQA2AgQgACAQIAgQrYCAgAAiEEUNPSAAQckBNgIcIAAgCDYCFCAAIBA2AgxBACEQDIwBCyAAKAIEIQQgAEEANgIEIAAgBCAIEK2AgIAAIgRFDXYgAEHKATYCHCAAIAg2AhQgACAENgIMQQAhEAyLAQsgACgCBCEEIABBADYCBCAAIAQgCRCtgICAACIERQ10IABBywE2AhwgACAJNgIUIAAgBDYCDEEAIRAMigELIAAoAgQhBCAAQQA2AgQgACAEIAoQrYCAgAAiBEUNciAAQc0BNgIcIAAgCjYCFCAAIAQ2AgxBACEQDIkBCwJAIAstAABBUGoiEEH/AXFBCk8NACAAIBA6ACogC0EBaiEKQbYBIRAMcAsgACgCBCEEIABBADYCBCAAIAQgCxCtgICAACIERQ1wIABBzwE2AhwgACALNgIUIAAgBDYCDEEAIRAMiAELIABBADYCHCAAIAQ2AhQgAEGQs4CAADYCECAAQQg2AgwgAEEANgIAQQAhEAyHAQsgAUEVRg0/IABBADYCHCAAIAw2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDIYBCyAAQYEEOwEoIAAoAgQhECAAQgA3AwAgACAQIAxBAWoiDBCrgICAACIQRQ04IABB0wE2AhwgACAMNgIUIAAgEDYCDEEAIRAMhQELIABBADYCAAtBACEQIABBADYCHCAAIAQ2AhQgAEHYm4CAADYCECAAQQg2AgwMgwELIAAoAgQhECAAQgA3AwAgACAQIAtBAWoiCxCrgICAACIQDQFBxgEhEAxpCyAAQQI6ACgMVQsgAEHVATYCHCAAIAs2AhQgACAQNgIMQQAhEAyAAQsgEEEVRg03IABBADYCHCAAIAQ2AhQgAEGkjICAADYCECAAQRA2AgxBACEQDH8LIAAtADRBAUcNNCAAIAQgAhC8gICAACIQRQ00IBBBFUcNNSAAQdwBNgIcIAAgBDYCFCAAQdWWgIAANgIQIABBFTYCDEEAIRAMfgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQMfQtBACEQDGMLQQIhEAxiC0ENIRAMYQtBDyEQDGALQSUhEAxfC0ETIRAMXgtBFSEQDF0LQRYhEAxcC0EXIRAMWwtBGCEQDFoLQRkhEAxZC0EaIRAMWAtBGyEQDFcLQRwhEAxWC0EdIRAMVQtBHyEQDFQLQSEhEAxTC0EjIRAMUgtBxgAhEAxRC0EuIRAMUAtBLyEQDE8LQTshEAxOC0E9IRAMTQtByAAhEAxMC0HJACEQDEsLQcsAIRAMSgtBzAAhEAxJC0HOACEQDEgLQdEAIRAMRwtB1QAhEAxGC0HYACEQDEULQdkAIRAMRAtB2wAhEAxDC0HkACEQDEILQeUAIRAMQQtB8QAhEAxAC0H0ACEQDD8LQY0BIRAMPgtBlwEhEAw9C0GpASEQDDwLQawBIRAMOwtBwAEhEAw6C0G5ASEQDDkLQa8BIRAMOAtBsQEhEAw3C0GyASEQDDYLQbQBIRAMNQtBtQEhEAw0C0G6ASEQDDMLQb0BIRAMMgtBvwEhEAwxC0HBASEQDDALIABBADYCHCAAIAQ2AhQgAEHpi4CAADYCECAAQR82AgxBACEQDEgLIABB2wE2AhwgACAENgIUIABB+paAgAA2AhAgAEEVNgIMQQAhEAxHCyAAQfgANgIcIAAgDDYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMRgsgAEHRADYCHCAAIAU2AhQgAEGwl4CAADYCECAAQRU2AgxBACEQDEULIABB+QA2AhwgACABNgIUIAAgEDYCDEEAIRAMRAsgAEH4ADYCHCAAIAE2AhQgAEHKmICAADYCECAAQRU2AgxBACEQDEMLIABB5AA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAxCCyAAQdcANgIcIAAgATYCFCAAQcmXgIAANgIQIABBFTYCDEEAIRAMQQsgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMQAsgAEHCADYCHCAAIAE2AhQgAEHjmICAADYCECAAQRU2AgxBACEQDD8LIABBADYCBCAAIA8gDxCxgICAACIERQ0BIABBOjYCHCAAIAQ2AgwgACAPQQFqNgIUQQAhEAw+CyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBEUNACAAQTs2AhwgACAENgIMIAAgAUEBajYCFEEAIRAMPgsgAUEBaiEBDC0LIA9BAWohAQwtCyAAQQA2AhwgACAPNgIUIABB5JKAgAA2AhAgAEEENgIMQQAhEAw7CyAAQTY2AhwgACAENgIUIAAgAjYCDEEAIRAMOgsgAEEuNgIcIAAgDjYCFCAAIAQ2AgxBACEQDDkLIABB0AA2AhwgACABNgIUIABBkZiAgAA2AhAgAEEVNgIMQQAhEAw4CyANQQFqIQEMLAsgAEEVNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMNgsgAEEbNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNQsgAEEPNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNAsgAEELNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMMwsgAEEaNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMgsgAEELNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMQsgAEEKNgIcIAAgATYCFCAAQeSWgIAANgIQIABBFTYCDEEAIRAMMAsgAEEeNgIcIAAgATYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAMLwsgAEEANgIcIAAgEDYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMLgsgAEEENgIcIAAgATYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMLQsgAEEANgIAIAtBAWohCwtBuAEhEAwSCyAAQQA2AgAgEEEBaiEBQfUAIRAMEQsgASEBAkAgAC0AKUEFRw0AQeMAIRAMEQtB4gAhEAwQC0EAIRAgAEEANgIcIABB5JGAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAwoCyAAQQA2AgAgF0EBaiEBQcAAIRAMDgtBASEBCyAAIAE6ACwgAEEANgIAIBdBAWohAQtBKCEQDAsLIAEhAQtBOCEQDAkLAkAgASIPIAJGDQADQAJAIA8tAABBgL6AgABqLQAAIgFBAUYNACABQQJHDQMgD0EBaiEBDAQLIA9BAWoiDyACRw0AC0E+IRAMIgtBPiEQDCELIABBADoALCAPIQEMAQtBCyEQDAYLQTohEAwFCyABQQFqIQFBLSEQDAQLIAAgAToALCAAQQA2AgAgFkEBaiEBQQwhEAwDCyAAQQA2AgAgF0EBaiEBQQohEAwCCyAAQQA2AgALIABBADoALCANIQFBCSEQDAALC0EAIRAgAEEANgIcIAAgCzYCFCAAQc2QgIAANgIQIABBCTYCDAwXC0EAIRAgAEEANgIcIAAgCjYCFCAAQemKgIAANgIQIABBCTYCDAwWC0EAIRAgAEEANgIcIAAgCTYCFCAAQbeQgIAANgIQIABBCTYCDAwVC0EAIRAgAEEANgIcIAAgCDYCFCAAQZyRgIAANgIQIABBCTYCDAwUC0EAIRAgAEEANgIcIAAgATYCFCAAQc2QgIAANgIQIABBCTYCDAwTC0EAIRAgAEEANgIcIAAgATYCFCAAQemKgIAANgIQIABBCTYCDAwSC0EAIRAgAEEANgIcIAAgATYCFCAAQbeQgIAANgIQIABBCTYCDAwRC0EAIRAgAEEANgIcIAAgATYCFCAAQZyRgIAANgIQIABBCTYCDAwQC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwPC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwOC0EAIRAgAEEANgIcIAAgATYCFCAAQcCSgIAANgIQIABBCzYCDAwNC0EAIRAgAEEANgIcIAAgATYCFCAAQZWJgIAANgIQIABBCzYCDAwMC0EAIRAgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDAwLC0EAIRAgAEEANgIcIAAgATYCFCAAQfuPgIAANgIQIABBCjYCDAwKC0EAIRAgAEEANgIcIAAgATYCFCAAQfGZgIAANgIQIABBAjYCDAwJC0EAIRAgAEEANgIcIAAgATYCFCAAQcSUgIAANgIQIABBAjYCDAwIC0EAIRAgAEEANgIcIAAgATYCFCAAQfKVgIAANgIQIABBAjYCDAwHCyAAQQI2AhwgACABNgIUIABBnJqAgAA2AhAgAEEWNgIMQQAhEAwGC0EBIRAMBQtB1AAhECABIgQgAkYNBCADQQhqIAAgBCACQdjCgIAAQQoQxYCAgAAgAygCDCEEIAMoAggOAwEEAgALEMqAgIAAAAsgAEEANgIcIABBtZqAgAA2AhAgAEEXNgIMIAAgBEEBajYCFEEAIRAMAgsgAEEANgIcIAAgBDYCFCAAQcqagIAANgIQIABBCTYCDEEAIRAMAQsCQCABIgQgAkcNAEEiIRAMAQsgAEGJgICAADYCCCAAIAQ2AgRBISEQCyADQRBqJICAgIAAIBALrwEBAn8gASgCACEGAkACQCACIANGDQAgBCAGaiEEIAYgA2ogAmshByACIAZBf3MgBWoiBmohBQNAAkAgAi0AACAELQAARg0AQQIhBAwDCwJAIAYNAEEAIQQgBSECDAMLIAZBf2ohBiAEQQFqIQQgAkEBaiICIANHDQALIAchBiADIQILIABBATYCACABIAY2AgAgACACNgIEDwsgAUEANgIAIAAgBDYCACAAIAI2AgQLCgAgABDHgICAAAvyNgELfyOAgICAAEEQayIBJICAgIAAAkBBACgCoNCAgAANAEEAEMuAgIAAQYDUhIAAayICQdkASQ0AQQAhAwJAQQAoAuDTgIAAIgQNAEEAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEIakFwcUHYqtWqBXMiBDYC4NOAgABBAEEANgL004CAAEEAQQA2AsTTgIAAC0EAIAI2AszTgIAAQQBBgNSEgAA2AsjTgIAAQQBBgNSEgAA2ApjQgIAAQQAgBDYCrNCAgABBAEF/NgKo0ICAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALQYDUhIAAQXhBgNSEgABrQQ9xQQBBgNSEgABBCGpBD3EbIgNqIgRBBGogAkFIaiIFIANrIgNBAXI2AgBBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAQYDUhIAAIAVqQTg2AgQLAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABB7AFLDQACQEEAKAKI0ICAACIGQRAgAEETakFwcSAAQQtJGyICQQN2IgR2IgNBA3FFDQACQAJAIANBAXEgBHJBAXMiBUEDdCIEQbDQgIAAaiIDIARBuNCAgABqKAIAIgQoAggiAkcNAEEAIAZBfiAFd3E2AojQgIAADAELIAMgAjYCCCACIAM2AgwLIARBCGohAyAEIAVBA3QiBUEDcjYCBCAEIAVqIgQgBCgCBEEBcjYCBAwMCyACQQAoApDQgIAAIgdNDQECQCADRQ0AAkACQCADIAR0QQIgBHQiA0EAIANrcnEiA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqIgRBA3QiA0Gw0ICAAGoiBSADQbjQgIAAaigCACIDKAIIIgBHDQBBACAGQX4gBHdxIgY2AojQgIAADAELIAUgADYCCCAAIAU2AgwLIAMgAkEDcjYCBCADIARBA3QiBGogBCACayIFNgIAIAMgAmoiACAFQQFyNgIEAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQQCQAJAIAZBASAHQQN2dCIIcQ0AQQAgBiAIcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCAENgIMIAIgBDYCCCAEIAI2AgwgBCAINgIICyADQQhqIQNBACAANgKc0ICAAEEAIAU2ApDQgIAADAwLQQAoAozQgIAAIglFDQEgCUEAIAlrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqQQJ0QbjSgIAAaigCACIAKAIEQXhxIAJrIQQgACEFAkADQAJAIAUoAhAiAw0AIAVBFGooAgAiA0UNAgsgAygCBEF4cSACayIFIAQgBSAESSIFGyEEIAMgACAFGyEAIAMhBQwACwsgACgCGCEKAkAgACgCDCIIIABGDQAgACgCCCIDQQAoApjQgIAASRogCCADNgIIIAMgCDYCDAwLCwJAIABBFGoiBSgCACIDDQAgACgCECIDRQ0DIABBEGohBQsDQCAFIQsgAyIIQRRqIgUoAgAiAw0AIAhBEGohBSAIKAIQIgMNAAsgC0EANgIADAoLQX8hAiAAQb9/Sw0AIABBE2oiA0FwcSECQQAoAozQgIAAIgdFDQBBACELAkAgAkGAAkkNAEEfIQsgAkH///8HSw0AIANBCHYiAyADQYD+P2pBEHZBCHEiA3QiBCAEQYDgH2pBEHZBBHEiBHQiBSAFQYCAD2pBEHZBAnEiBXRBD3YgAyAEciAFcmsiA0EBdCACIANBFWp2QQFxckEcaiELC0EAIAJrIQQCQAJAAkACQCALQQJ0QbjSgIAAaigCACIFDQBBACEDQQAhCAwBC0EAIQMgAkEAQRkgC0EBdmsgC0EfRht0IQBBACEIA0ACQCAFKAIEQXhxIAJrIgYgBE8NACAGIQQgBSEIIAYNAEEAIQQgBSEIIAUhAwwDCyADIAVBFGooAgAiBiAGIAUgAEEddkEEcWpBEGooAgAiBUYbIAMgBhshAyAAQQF0IQAgBQ0ACwsCQCADIAhyDQBBACEIQQIgC3QiA0EAIANrciAHcSIDRQ0DIANBACADa3FBf2oiAyADQQx2QRBxIgN2IgVBBXZBCHEiACADciAFIAB2IgNBAnZBBHEiBXIgAyAFdiIDQQF2QQJxIgVyIAMgBXYiA0EBdkEBcSIFciADIAV2akECdEG40oCAAGooAgAhAwsgA0UNAQsDQCADKAIEQXhxIAJrIgYgBEkhAAJAIAMoAhAiBQ0AIANBFGooAgAhBQsgBiAEIAAbIQQgAyAIIAAbIQggBSEDIAUNAAsLIAhFDQAgBEEAKAKQ0ICAACACa08NACAIKAIYIQsCQCAIKAIMIgAgCEYNACAIKAIIIgNBACgCmNCAgABJGiAAIAM2AgggAyAANgIMDAkLAkAgCEEUaiIFKAIAIgMNACAIKAIQIgNFDQMgCEEQaiEFCwNAIAUhBiADIgBBFGoiBSgCACIDDQAgAEEQaiEFIAAoAhAiAw0ACyAGQQA2AgAMCAsCQEEAKAKQ0ICAACIDIAJJDQBBACgCnNCAgAAhBAJAAkAgAyACayIFQRBJDQAgBCACaiIAIAVBAXI2AgRBACAFNgKQ0ICAAEEAIAA2ApzQgIAAIAQgA2ogBTYCACAEIAJBA3I2AgQMAQsgBCADQQNyNgIEIAQgA2oiAyADKAIEQQFyNgIEQQBBADYCnNCAgABBAEEANgKQ0ICAAAsgBEEIaiEDDAoLAkBBACgClNCAgAAiACACTQ0AQQAoAqDQgIAAIgMgAmoiBCAAIAJrIgVBAXI2AgRBACAFNgKU0ICAAEEAIAQ2AqDQgIAAIAMgAkEDcjYCBCADQQhqIQMMCgsCQAJAQQAoAuDTgIAARQ0AQQAoAujTgIAAIQQMAQtBAEJ/NwLs04CAAEEAQoCAhICAgMAANwLk04CAAEEAIAFBDGpBcHFB2KrVqgVzNgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgABBgIAEIQQLQQAhAwJAIAQgAkHHAGoiB2oiBkEAIARrIgtxIgggAksNAEEAQTA2AvjTgIAADAoLAkBBACgCwNOAgAAiA0UNAAJAQQAoArjTgIAAIgQgCGoiBSAETQ0AIAUgA00NAQtBACEDQQBBMDYC+NOAgAAMCgtBAC0AxNOAgABBBHENBAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQAJAIAMoAgAiBSAESw0AIAUgAygCBGogBEsNAwsgAygCCCIDDQALC0EAEMuAgIAAIgBBf0YNBSAIIQYCQEEAKALk04CAACIDQX9qIgQgAHFFDQAgCCAAayAEIABqQQAgA2txaiEGCyAGIAJNDQUgBkH+////B0sNBQJAQQAoAsDTgIAAIgNFDQBBACgCuNOAgAAiBCAGaiIFIARNDQYgBSADSw0GCyAGEMuAgIAAIgMgAEcNAQwHCyAGIABrIAtxIgZB/v///wdLDQQgBhDLgICAACIAIAMoAgAgAygCBGpGDQMgACEDCwJAIANBf0YNACACQcgAaiAGTQ0AAkAgByAGa0EAKALo04CAACIEakEAIARrcSIEQf7///8HTQ0AIAMhAAwHCwJAIAQQy4CAgABBf0YNACAEIAZqIQYgAyEADAcLQQAgBmsQy4CAgAAaDAQLIAMhACADQX9HDQUMAwtBACEIDAcLQQAhAAwFCyAAQX9HDQILQQBBACgCxNOAgABBBHI2AsTTgIAACyAIQf7///8HSw0BIAgQy4CAgAAhAEEAEMuAgIAAIQMgAEF/Rg0BIANBf0YNASAAIANPDQEgAyAAayIGIAJBOGpNDQELQQBBACgCuNOAgAAgBmoiAzYCuNOAgAACQCADQQAoArzTgIAATQ0AQQAgAzYCvNOAgAALAkACQAJAAkBBACgCoNCAgAAiBEUNAEHI04CAACEDA0AgACADKAIAIgUgAygCBCIIakYNAiADKAIIIgMNAAwDCwsCQAJAQQAoApjQgIAAIgNFDQAgACADTw0BC0EAIAA2ApjQgIAAC0EAIQNBACAGNgLM04CAAEEAIAA2AsjTgIAAQQBBfzYCqNCAgABBAEEAKALg04CAADYCrNCAgABBAEEANgLU04CAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgQgBkFIaiIFIANrIgNBAXI2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAIAAgBWpBODYCBAwCCyADLQAMQQhxDQAgBCAFSQ0AIAQgAE8NACAEQXggBGtBD3FBACAEQQhqQQ9xGyIFaiIAQQAoApTQgIAAIAZqIgsgBWsiBUEBcjYCBCADIAggBmo2AgRBAEEAKALw04CAADYCpNCAgABBACAFNgKU0ICAAEEAIAA2AqDQgIAAIAQgC2pBODYCBAwBCwJAIABBACgCmNCAgAAiCE8NAEEAIAA2ApjQgIAAIAAhCAsgACAGaiEFQcjTgIAAIQMCQAJAAkACQAJAAkACQANAIAMoAgAgBUYNASADKAIIIgMNAAwCCwsgAy0ADEEIcUUNAQtByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiIFIARLDQMLIAMoAgghAwwACwsgAyAANgIAIAMgAygCBCAGajYCBCAAQXggAGtBD3FBACAAQQhqQQ9xG2oiCyACQQNyNgIEIAVBeCAFa0EPcUEAIAVBCGpBD3EbaiIGIAsgAmoiAmshAwJAIAYgBEcNAEEAIAI2AqDQgIAAQQBBACgClNCAgAAgA2oiAzYClNCAgAAgAiADQQFyNgIEDAMLAkAgBkEAKAKc0ICAAEcNAEEAIAI2ApzQgIAAQQBBACgCkNCAgAAgA2oiAzYCkNCAgAAgAiADQQFyNgIEIAIgA2ogAzYCAAwDCwJAIAYoAgQiBEEDcUEBRw0AIARBeHEhBwJAAkAgBEH/AUsNACAGKAIIIgUgBEEDdiIIQQN0QbDQgIAAaiIARhoCQCAGKAIMIgQgBUcNAEEAQQAoAojQgIAAQX4gCHdxNgKI0ICAAAwCCyAEIABGGiAEIAU2AgggBSAENgIMDAELIAYoAhghCQJAAkAgBigCDCIAIAZGDQAgBigCCCIEIAhJGiAAIAQ2AgggBCAANgIMDAELAkAgBkEUaiIEKAIAIgUNACAGQRBqIgQoAgAiBQ0AQQAhAAwBCwNAIAQhCCAFIgBBFGoiBCgCACIFDQAgAEEQaiEEIAAoAhAiBQ0ACyAIQQA2AgALIAlFDQACQAJAIAYgBigCHCIFQQJ0QbjSgIAAaiIEKAIARw0AIAQgADYCACAADQFBAEEAKAKM0ICAAEF+IAV3cTYCjNCAgAAMAgsgCUEQQRQgCSgCECAGRhtqIAA2AgAgAEUNAQsgACAJNgIYAkAgBigCECIERQ0AIAAgBDYCECAEIAA2AhgLIAYoAhQiBEUNACAAQRRqIAQ2AgAgBCAANgIYCyAHIANqIQMgBiAHaiIGKAIEIQQLIAYgBEF+cTYCBCACIANqIAM2AgAgAiADQQFyNgIEAkAgA0H/AUsNACADQXhxQbDQgIAAaiEEAkACQEEAKAKI0ICAACIFQQEgA0EDdnQiA3ENAEEAIAUgA3I2AojQgIAAIAQhAwwBCyAEKAIIIQMLIAMgAjYCDCAEIAI2AgggAiAENgIMIAIgAzYCCAwDC0EfIQQCQCADQf///wdLDQAgA0EIdiIEIARBgP4/akEQdkEIcSIEdCIFIAVBgOAfakEQdkEEcSIFdCIAIABBgIAPakEQdkECcSIAdEEPdiAEIAVyIAByayIEQQF0IAMgBEEVanZBAXFyQRxqIQQLIAIgBDYCHCACQgA3AhAgBEECdEG40oCAAGohBQJAQQAoAozQgIAAIgBBASAEdCIIcQ0AIAUgAjYCAEEAIAAgCHI2AozQgIAAIAIgBTYCGCACIAI2AgggAiACNgIMDAMLIANBAEEZIARBAXZrIARBH0YbdCEEIAUoAgAhAANAIAAiBSgCBEF4cSADRg0CIARBHXYhACAEQQF0IQQgBSAAQQRxakEQaiIIKAIAIgANAAsgCCACNgIAIAIgBTYCGCACIAI2AgwgAiACNgIIDAILIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgsgBkFIaiIIIANrIgNBAXI2AgQgACAIakE4NgIEIAQgBUE3IAVrQQ9xQQAgBUFJakEPcRtqQUFqIgggCCAEQRBqSRsiCEEjNgIEQQBBACgC8NOAgAA2AqTQgIAAQQAgAzYClNCAgABBACALNgKg0ICAACAIQRBqQQApAtDTgIAANwIAIAhBACkCyNOAgAA3AghBACAIQQhqNgLQ04CAAEEAIAY2AszTgIAAQQAgADYCyNOAgABBAEEANgLU04CAACAIQSRqIQMDQCADQQc2AgAgA0EEaiIDIAVJDQALIAggBEYNAyAIIAgoAgRBfnE2AgQgCCAIIARrIgA2AgAgBCAAQQFyNgIEAkAgAEH/AUsNACAAQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgAEEDdnQiAHENAEEAIAUgAHI2AojQgIAAIAMhBQwBCyADKAIIIQULIAUgBDYCDCADIAQ2AgggBCADNgIMIAQgBTYCCAwEC0EfIQMCQCAAQf///wdLDQAgAEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCIIIAhBgIAPakEQdkECcSIIdEEPdiADIAVyIAhyayIDQQF0IAAgA0EVanZBAXFyQRxqIQMLIAQgAzYCHCAEQgA3AhAgA0ECdEG40oCAAGohBQJAQQAoAozQgIAAIghBASADdCIGcQ0AIAUgBDYCAEEAIAggBnI2AozQgIAAIAQgBTYCGCAEIAQ2AgggBCAENgIMDAQLIABBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhCANAIAgiBSgCBEF4cSAARg0DIANBHXYhCCADQQF0IQMgBSAIQQRxakEQaiIGKAIAIggNAAsgBiAENgIAIAQgBTYCGCAEIAQ2AgwgBCAENgIIDAMLIAUoAggiAyACNgIMIAUgAjYCCCACQQA2AhggAiAFNgIMIAIgAzYCCAsgC0EIaiEDDAULIAUoAggiAyAENgIMIAUgBDYCCCAEQQA2AhggBCAFNgIMIAQgAzYCCAtBACgClNCAgAAiAyACTQ0AQQAoAqDQgIAAIgQgAmoiBSADIAJrIgNBAXI2AgRBACADNgKU0ICAAEEAIAU2AqDQgIAAIAQgAkEDcjYCBCAEQQhqIQMMAwtBACEDQQBBMDYC+NOAgAAMAgsCQCALRQ0AAkACQCAIIAgoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAA2AgAgAA0BQQAgB0F+IAV3cSIHNgKM0ICAAAwCCyALQRBBFCALKAIQIAhGG2ogADYCACAARQ0BCyAAIAs2AhgCQCAIKAIQIgNFDQAgACADNgIQIAMgADYCGAsgCEEUaigCACIDRQ0AIABBFGogAzYCACADIAA2AhgLAkACQCAEQQ9LDQAgCCAEIAJqIgNBA3I2AgQgCCADaiIDIAMoAgRBAXI2AgQMAQsgCCACaiIAIARBAXI2AgQgCCACQQNyNgIEIAAgBGogBDYCAAJAIARB/wFLDQAgBEF4cUGw0ICAAGohAwJAAkBBACgCiNCAgAAiBUEBIARBA3Z0IgRxDQBBACAFIARyNgKI0ICAACADIQQMAQsgAygCCCEECyAEIAA2AgwgAyAANgIIIAAgAzYCDCAAIAQ2AggMAQtBHyEDAkAgBEH///8HSw0AIARBCHYiAyADQYD+P2pBEHZBCHEiA3QiBSAFQYDgH2pBEHZBBHEiBXQiAiACQYCAD2pBEHZBAnEiAnRBD3YgAyAFciACcmsiA0EBdCAEIANBFWp2QQFxckEcaiEDCyAAIAM2AhwgAEIANwIQIANBAnRBuNKAgABqIQUCQCAHQQEgA3QiAnENACAFIAA2AgBBACAHIAJyNgKM0ICAACAAIAU2AhggACAANgIIIAAgADYCDAwBCyAEQQBBGSADQQF2ayADQR9GG3QhAyAFKAIAIQICQANAIAIiBSgCBEF4cSAERg0BIANBHXYhAiADQQF0IQMgBSACQQRxakEQaiIGKAIAIgINAAsgBiAANgIAIAAgBTYCGCAAIAA2AgwgACAANgIIDAELIAUoAggiAyAANgIMIAUgADYCCCAAQQA2AhggACAFNgIMIAAgAzYCCAsgCEEIaiEDDAELAkAgCkUNAAJAAkAgACAAKAIcIgVBAnRBuNKAgABqIgMoAgBHDQAgAyAINgIAIAgNAUEAIAlBfiAFd3E2AozQgIAADAILIApBEEEUIAooAhAgAEYbaiAINgIAIAhFDQELIAggCjYCGAJAIAAoAhAiA0UNACAIIAM2AhAgAyAINgIYCyAAQRRqKAIAIgNFDQAgCEEUaiADNgIAIAMgCDYCGAsCQAJAIARBD0sNACAAIAQgAmoiA0EDcjYCBCAAIANqIgMgAygCBEEBcjYCBAwBCyAAIAJqIgUgBEEBcjYCBCAAIAJBA3I2AgQgBSAEaiAENgIAAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQMCQAJAQQEgB0EDdnQiCCAGcQ0AQQAgCCAGcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCADNgIMIAIgAzYCCCADIAI2AgwgAyAINgIIC0EAIAU2ApzQgIAAQQAgBDYCkNCAgAALIABBCGohAwsgAUEQaiSAgICAACADCwoAIAAQyYCAgAAL4g0BB38CQCAARQ0AIABBeGoiASAAQXxqKAIAIgJBeHEiAGohAwJAIAJBAXENACACQQNxRQ0BIAEgASgCACICayIBQQAoApjQgIAAIgRJDQEgAiAAaiEAAkAgAUEAKAKc0ICAAEYNAAJAIAJB/wFLDQAgASgCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgASgCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAwsgAiAGRhogAiAENgIIIAQgAjYCDAwCCyABKAIYIQcCQAJAIAEoAgwiBiABRg0AIAEoAggiAiAESRogBiACNgIIIAIgBjYCDAwBCwJAIAFBFGoiAigCACIEDQAgAUEQaiICKAIAIgQNAEEAIQYMAQsDQCACIQUgBCIGQRRqIgIoAgAiBA0AIAZBEGohAiAGKAIQIgQNAAsgBUEANgIACyAHRQ0BAkACQCABIAEoAhwiBEECdEG40oCAAGoiAigCAEcNACACIAY2AgAgBg0BQQBBACgCjNCAgABBfiAEd3E2AozQgIAADAMLIAdBEEEUIAcoAhAgAUYbaiAGNgIAIAZFDQILIAYgBzYCGAJAIAEoAhAiAkUNACAGIAI2AhAgAiAGNgIYCyABKAIUIgJFDQEgBkEUaiACNgIAIAIgBjYCGAwBCyADKAIEIgJBA3FBA0cNACADIAJBfnE2AgRBACAANgKQ0ICAACABIABqIAA2AgAgASAAQQFyNgIEDwsgASADTw0AIAMoAgQiAkEBcUUNAAJAAkAgAkECcQ0AAkAgA0EAKAKg0ICAAEcNAEEAIAE2AqDQgIAAQQBBACgClNCAgAAgAGoiADYClNCAgAAgASAAQQFyNgIEIAFBACgCnNCAgABHDQNBAEEANgKQ0ICAAEEAQQA2ApzQgIAADwsCQCADQQAoApzQgIAARw0AQQAgATYCnNCAgABBAEEAKAKQ0ICAACAAaiIANgKQ0ICAACABIABBAXI2AgQgASAAaiAANgIADwsgAkF4cSAAaiEAAkACQCACQf8BSw0AIAMoAggiBCACQQN2IgVBA3RBsNCAgABqIgZGGgJAIAMoAgwiAiAERw0AQQBBACgCiNCAgABBfiAFd3E2AojQgIAADAILIAIgBkYaIAIgBDYCCCAEIAI2AgwMAQsgAygCGCEHAkACQCADKAIMIgYgA0YNACADKAIIIgJBACgCmNCAgABJGiAGIAI2AgggAiAGNgIMDAELAkAgA0EUaiICKAIAIgQNACADQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQACQAJAIAMgAygCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAgsgB0EQQRQgBygCECADRhtqIAY2AgAgBkUNAQsgBiAHNgIYAkAgAygCECICRQ0AIAYgAjYCECACIAY2AhgLIAMoAhQiAkUNACAGQRRqIAI2AgAgAiAGNgIYCyABIABqIAA2AgAgASAAQQFyNgIEIAFBACgCnNCAgABHDQFBACAANgKQ0ICAAA8LIAMgAkF+cTYCBCABIABqIAA2AgAgASAAQQFyNgIECwJAIABB/wFLDQAgAEF4cUGw0ICAAGohAgJAAkBBACgCiNCAgAAiBEEBIABBA3Z0IgBxDQBBACAEIAByNgKI0ICAACACIQAMAQsgAigCCCEACyAAIAE2AgwgAiABNgIIIAEgAjYCDCABIAA2AggPC0EfIQICQCAAQf///wdLDQAgAEEIdiICIAJBgP4/akEQdkEIcSICdCIEIARBgOAfakEQdkEEcSIEdCIGIAZBgIAPakEQdkECcSIGdEEPdiACIARyIAZyayICQQF0IAAgAkEVanZBAXFyQRxqIQILIAEgAjYCHCABQgA3AhAgAkECdEG40oCAAGohBAJAAkBBACgCjNCAgAAiBkEBIAJ0IgNxDQAgBCABNgIAQQAgBiADcjYCjNCAgAAgASAENgIYIAEgATYCCCABIAE2AgwMAQsgAEEAQRkgAkEBdmsgAkEfRht0IQIgBCgCACEGAkADQCAGIgQoAgRBeHEgAEYNASACQR12IQYgAkEBdCECIAQgBkEEcWpBEGoiAygCACIGDQALIAMgATYCACABIAQ2AhggASABNgIMIAEgATYCCAwBCyAEKAIIIgAgATYCDCAEIAE2AgggAUEANgIYIAEgBDYCDCABIAA2AggLQQBBACgCqNCAgABBf2oiAUF/IAEbNgKo0ICAAAsLBAAAAAtOAAJAIAANAD8AQRB0DwsCQCAAQf//A3ENACAAQX9MDQACQCAAQRB2QAAiAEF/Rw0AQQBBMDYC+NOAgABBfw8LIABBEHQPCxDKgICAAAAL8gICA38BfgJAIAJFDQAgACABOgAAIAIgAGoiA0F/aiABOgAAIAJBA0kNACAAIAE6AAIgACABOgABIANBfWogAToAACADQX5qIAE6AAAgAkEHSQ0AIAAgAToAAyADQXxqIAE6AAAgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIFayICQSBJDQAgAa1CgYCAgBB+IQYgAyAFaiEBA0AgASAGNwMYIAEgBjcDECABIAY3AwggASAGNwMAIAFBIGohASACQWBqIgJBH0sNAAsLIAALC45IAQBBgAgLhkgBAAAAAgAAAAMAAAAAAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAGAAAABwAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEludmFsaWQgY2hhciBpbiB1cmwgcXVlcnkAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9ib2R5AENvbnRlbnQtTGVuZ3RoIG92ZXJmbG93AENodW5rIHNpemUgb3ZlcmZsb3cAUmVzcG9uc2Ugb3ZlcmZsb3cASW52YWxpZCBtZXRob2QgZm9yIEhUVFAveC54IHJlcXVlc3QASW52YWxpZCBtZXRob2QgZm9yIFJUU1AveC54IHJlcXVlc3QARXhwZWN0ZWQgU09VUkNFIG1ldGhvZCBmb3IgSUNFL3gueCByZXF1ZXN0AEludmFsaWQgY2hhciBpbiB1cmwgZnJhZ21lbnQgc3RhcnQARXhwZWN0ZWQgZG90AFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fc3RhdHVzAEludmFsaWQgcmVzcG9uc2Ugc3RhdHVzAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMAVXNlciBjYWxsYmFjayBlcnJvcgBgb25fcmVzZXRgIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19oZWFkZXJgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2JlZ2luYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlYCBjYWxsYmFjayBlcnJvcgBgb25fc3RhdHVzX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdmVyc2lvbl9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX3VybF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWVzc2FnZV9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX21ldGhvZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lYCBjYWxsYmFjayBlcnJvcgBVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNlcnZlcgBJbnZhbGlkIGhlYWRlciB2YWx1ZSBjaGFyAEludmFsaWQgaGVhZGVyIGZpZWxkIGNoYXIAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl92ZXJzaW9uAEludmFsaWQgbWlub3IgdmVyc2lvbgBJbnZhbGlkIG1ham9yIHZlcnNpb24ARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgdmVyc2lvbgBFeHBlY3RlZCBDUkxGIGFmdGVyIHZlcnNpb24ASW52YWxpZCBIVFRQIHZlcnNpb24ASW52YWxpZCBoZWFkZXIgdG9rZW4AU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl91cmwASW52YWxpZCBjaGFyYWN0ZXJzIGluIHVybABVbmV4cGVjdGVkIHN0YXJ0IGNoYXIgaW4gdXJsAERvdWJsZSBAIGluIHVybABFbXB0eSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXJhY3RlciBpbiBDb250ZW50LUxlbmd0aABEdXBsaWNhdGUgQ29udGVudC1MZW5ndGgASW52YWxpZCBjaGFyIGluIHVybCBwYXRoAENvbnRlbnQtTGVuZ3RoIGNhbid0IGJlIHByZXNlbnQgd2l0aCBUcmFuc2Zlci1FbmNvZGluZwBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBzaXplAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX3ZhbHVlAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgdmFsdWUATWlzc2luZyBleHBlY3RlZCBMRiBhZnRlciBoZWFkZXIgdmFsdWUASW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIHF1b3RlIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGVkIHZhbHVlAFBhdXNlZCBieSBvbl9oZWFkZXJzX2NvbXBsZXRlAEludmFsaWQgRU9GIHN0YXRlAG9uX3Jlc2V0IHBhdXNlAG9uX2NodW5rX2hlYWRlciBwYXVzZQBvbl9tZXNzYWdlX2JlZ2luIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl92YWx1ZSBwYXVzZQBvbl9zdGF0dXNfY29tcGxldGUgcGF1c2UAb25fdmVyc2lvbl9jb21wbGV0ZSBwYXVzZQBvbl91cmxfY29tcGxldGUgcGF1c2UAb25fY2h1bmtfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlIHBhdXNlAG9uX21lc3NhZ2VfY29tcGxldGUgcGF1c2UAb25fbWV0aG9kX2NvbXBsZXRlIHBhdXNlAG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19leHRlbnNpb25fbmFtZSBwYXVzZQBVbmV4cGVjdGVkIHNwYWNlIGFmdGVyIHN0YXJ0IGxpbmUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fbmFtZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIG5hbWUAUGF1c2Ugb24gQ09OTkVDVC9VcGdyYWRlAFBhdXNlIG9uIFBSSS9VcGdyYWRlAEV4cGVjdGVkIEhUVFAvMiBDb25uZWN0aW9uIFByZWZhY2UAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9tZXRob2QARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgbWV0aG9kAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX2ZpZWxkAFBhdXNlZABJbnZhbGlkIHdvcmQgZW5jb3VudGVyZWQASW52YWxpZCBtZXRob2QgZW5jb3VudGVyZWQAVW5leHBlY3RlZCBjaGFyIGluIHVybCBzY2hlbWEAUmVxdWVzdCBoYXMgaW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgAFNXSVRDSF9QUk9YWQBVU0VfUFJPWFkATUtBQ1RJVklUWQBVTlBST0NFU1NBQkxFX0VOVElUWQBDT1BZAE1PVkVEX1BFUk1BTkVOVExZAFRPT19FQVJMWQBOT1RJRlkARkFJTEVEX0RFUEVOREVOQ1kAQkFEX0dBVEVXQVkAUExBWQBQVVQAQ0hFQ0tPVVQAR0FURVdBWV9USU1FT1VUAFJFUVVFU1RfVElNRU9VVABORVRXT1JLX0NPTk5FQ1RfVElNRU9VVABDT05ORUNUSU9OX1RJTUVPVVQATE9HSU5fVElNRU9VVABORVRXT1JLX1JFQURfVElNRU9VVABQT1NUAE1JU0RJUkVDVEVEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9SRVFVRVNUAENMSUVOVF9DTE9TRURfTE9BRF9CQUxBTkNFRF9SRVFVRVNUAEJBRF9SRVFVRVNUAEhUVFBfUkVRVUVTVF9TRU5UX1RPX0hUVFBTX1BPUlQAUkVQT1JUAElNX0FfVEVBUE9UAFJFU0VUX0NPTlRFTlQATk9fQ09OVEVOVABQQVJUSUFMX0NPTlRFTlQASFBFX0lOVkFMSURfQ09OU1RBTlQASFBFX0NCX1JFU0VUAEdFVABIUEVfU1RSSUNUAENPTkZMSUNUAFRFTVBPUkFSWV9SRURJUkVDVABQRVJNQU5FTlRfUkVESVJFQ1QAQ09OTkVDVABNVUxUSV9TVEFUVVMASFBFX0lOVkFMSURfU1RBVFVTAFRPT19NQU5ZX1JFUVVFU1RTAEVBUkxZX0hJTlRTAFVOQVZBSUxBQkxFX0ZPUl9MRUdBTF9SRUFTT05TAE9QVElPTlMAU1dJVENISU5HX1BST1RPQ09MUwBWQVJJQU5UX0FMU09fTkVHT1RJQVRFUwBNVUxUSVBMRV9DSE9JQ0VTAElOVEVSTkFMX1NFUlZFUl9FUlJPUgBXRUJfU0VSVkVSX1VOS05PV05fRVJST1IAUkFJTEdVTl9FUlJPUgBJREVOVElUWV9QUk9WSURFUl9BVVRIRU5USUNBVElPTl9FUlJPUgBTU0xfQ0VSVElGSUNBVEVfRVJST1IASU5WQUxJRF9YX0ZPUldBUkRFRF9GT1IAU0VUX1BBUkFNRVRFUgBHRVRfUEFSQU1FVEVSAEhQRV9VU0VSAFNFRV9PVEhFUgBIUEVfQ0JfQ0hVTktfSEVBREVSAE1LQ0FMRU5EQVIAU0VUVVAAV0VCX1NFUlZFUl9JU19ET1dOAFRFQVJET1dOAEhQRV9DTE9TRURfQ09OTkVDVElPTgBIRVVSSVNUSUNfRVhQSVJBVElPTgBESVNDT05ORUNURURfT1BFUkFUSU9OAE5PTl9BVVRIT1JJVEFUSVZFX0lORk9STUFUSU9OAEhQRV9JTlZBTElEX1ZFUlNJT04ASFBFX0NCX01FU1NBR0VfQkVHSU4AU0lURV9JU19GUk9aRU4ASFBFX0lOVkFMSURfSEVBREVSX1RPS0VOAElOVkFMSURfVE9LRU4ARk9SQklEREVOAEVOSEFOQ0VfWU9VUl9DQUxNAEhQRV9JTlZBTElEX1VSTABCTE9DS0VEX0JZX1BBUkVOVEFMX0NPTlRST0wATUtDT0wAQUNMAEhQRV9JTlRFUk5BTABSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFX1VOT0ZGSUNJQUwASFBFX09LAFVOTElOSwBVTkxPQ0sAUFJJAFJFVFJZX1dJVEgASFBFX0lOVkFMSURfQ09OVEVOVF9MRU5HVEgASFBFX1VORVhQRUNURURfQ09OVEVOVF9MRU5HVEgARkxVU0gAUFJPUFBBVENIAE0tU0VBUkNIAFVSSV9UT09fTE9ORwBQUk9DRVNTSU5HAE1JU0NFTExBTkVPVVNfUEVSU0lTVEVOVF9XQVJOSU5HAE1JU0NFTExBTkVPVVNfV0FSTklORwBIUEVfSU5WQUxJRF9UUkFOU0ZFUl9FTkNPRElORwBFeHBlY3RlZCBDUkxGAEhQRV9JTlZBTElEX0NIVU5LX1NJWkUATU9WRQBDT05USU5VRQBIUEVfQ0JfU1RBVFVTX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJTX0NPTVBMRVRFAEhQRV9DQl9WRVJTSU9OX0NPTVBMRVRFAEhQRV9DQl9VUkxfQ09NUExFVEUASFBFX0NCX0NIVU5LX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfVkFMVUVfQ09NUExFVEUASFBFX0NCX0NIVU5LX0VYVEVOU0lPTl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX05BTUVfQ09NUExFVEUASFBFX0NCX01FU1NBR0VfQ09NUExFVEUASFBFX0NCX01FVEhPRF9DT01QTEVURQBIUEVfQ0JfSEVBREVSX0ZJRUxEX0NPTVBMRVRFAERFTEVURQBIUEVfSU5WQUxJRF9FT0ZfU1RBVEUASU5WQUxJRF9TU0xfQ0VSVElGSUNBVEUAUEFVU0UATk9fUkVTUE9OU0UAVU5TVVBQT1JURURfTUVESUFfVFlQRQBHT05FAE5PVF9BQ0NFUFRBQkxFAFNFUlZJQ0VfVU5BVkFJTEFCTEUAUkFOR0VfTk9UX1NBVElTRklBQkxFAE9SSUdJTl9JU19VTlJFQUNIQUJMRQBSRVNQT05TRV9JU19TVEFMRQBQVVJHRQBNRVJHRQBSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFAFJFUVVFU1RfSEVBREVSX1RPT19MQVJHRQBQQVlMT0FEX1RPT19MQVJHRQBJTlNVRkZJQ0lFTlRfU1RPUkFHRQBIUEVfUEFVU0VEX1VQR1JBREUASFBFX1BBVVNFRF9IMl9VUEdSQURFAFNPVVJDRQBBTk5PVU5DRQBUUkFDRQBIUEVfVU5FWFBFQ1RFRF9TUEFDRQBERVNDUklCRQBVTlNVQlNDUklCRQBSRUNPUkQASFBFX0lOVkFMSURfTUVUSE9EAE5PVF9GT1VORABQUk9QRklORABVTkJJTkQAUkVCSU5EAFVOQVVUSE9SSVpFRABNRVRIT0RfTk9UX0FMTE9XRUQASFRUUF9WRVJTSU9OX05PVF9TVVBQT1JURUQAQUxSRUFEWV9SRVBPUlRFRABBQ0NFUFRFRABOT1RfSU1QTEVNRU5URUQATE9PUF9ERVRFQ1RFRABIUEVfQ1JfRVhQRUNURUQASFBFX0xGX0VYUEVDVEVEAENSRUFURUQASU1fVVNFRABIUEVfUEFVU0VEAFRJTUVPVVRfT0NDVVJFRABQQVlNRU5UX1JFUVVJUkVEAFBSRUNPTkRJVElPTl9SRVFVSVJFRABQUk9YWV9BVVRIRU5USUNBVElPTl9SRVFVSVJFRABORVRXT1JLX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAExFTkdUSF9SRVFVSVJFRABTU0xfQ0VSVElGSUNBVEVfUkVRVUlSRUQAVVBHUkFERV9SRVFVSVJFRABQQUdFX0VYUElSRUQAUFJFQ09ORElUSU9OX0ZBSUxFRABFWFBFQ1RBVElPTl9GQUlMRUQAUkVWQUxJREFUSU9OX0ZBSUxFRABTU0xfSEFORFNIQUtFX0ZBSUxFRABMT0NLRUQAVFJBTlNGT1JNQVRJT05fQVBQTElFRABOT1RfTU9ESUZJRUQATk9UX0VYVEVOREVEAEJBTkRXSURUSF9MSU1JVF9FWENFRURFRABTSVRFX0lTX09WRVJMT0FERUQASEVBRABFeHBlY3RlZCBIVFRQLwAAXhMAACYTAAAwEAAA8BcAAJ0TAAAVEgAAORcAAPASAAAKEAAAdRIAAK0SAACCEwAATxQAAH8QAACgFQAAIxQAAIkSAACLFAAATRUAANQRAADPFAAAEBgAAMkWAADcFgAAwREAAOAXAAC7FAAAdBQAAHwVAADlFAAACBcAAB8QAABlFQAAoxQAACgVAAACFQAAmRUAACwQAACLGQAATw8AANQOAABqEAAAzhAAAAIXAACJDgAAbhMAABwTAABmFAAAVhcAAMETAADNEwAAbBMAAGgXAABmFwAAXxcAACITAADODwAAaQ4AANgOAABjFgAAyxMAAKoOAAAoFwAAJhcAAMUTAABdFgAA6BEAAGcTAABlEwAA8hYAAHMTAAAdFwAA+RYAAPMRAADPDgAAzhUAAAwSAACzEQAApREAAGEQAAAyFwAAuxMAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIDAgICAgIAAAICAAICAAICAgICAgICAgIABAAAAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAACAAICAgICAAACAgACAgACAgICAgICAgICAAMABAAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAgACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbG9zZWVlcC1hbGl2ZQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBY2h1bmtlZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEAAAEBAAEBAAEBAQEBAQEBAQEAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlY3Rpb25lbnQtbGVuZ3Rob25yb3h5LWNvbm5lY3Rpb24AAAAAAAAAAAAAAAAAAAByYW5zZmVyLWVuY29kaW5ncGdyYWRlDQoNCg0KU00NCg0KVFRQL0NFL1RTUC8AAAAAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQIAAQMAAAAAAAAAAAAAAAAAAAAAAAAEAQEFAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAAAAAAAAAAAAMEAAAEBAQEBAQEBAQEBAUEBAQEBAQEBAQEBAQABAAGBwQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAIAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOT1VOQ0VFQ0tPVVRORUNURVRFQ1JJQkVMVVNIRVRFQURTRUFSQ0hSR0VDVElWSVRZTEVOREFSVkVPVElGWVBUSU9OU0NIU0VBWVNUQVRDSEdFT1JESVJFQ1RPUlRSQ0hQQVJBTUVURVJVUkNFQlNDUklCRUFSRE9XTkFDRUlORE5LQ0tVQlNDUklCRUhUVFAvQURUUC8='\n","module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC0kBAXsgAEEQav0MAAAAAAAAAAAAAAAAAAAAACIB/QsDACAAIAH9CwMAIABBMGogAf0LAwAgAEEgaiAB/QsDACAAQd0BNgIcQQALewEBfwJAIAAoAgwiAw0AAkAgACgCBEUNACAAIAE2AgQLAkAgACABIAIQxICAgAAiAw0AIAAoAgwPCyAAIAM2AhxBACEDIAAoAgQiAUUNACAAIAEgAiAAKAIIEYGAgIAAACIBRQ0AIAAgAjYCFCAAIAE2AgwgASEDCyADC+TzAQMOfwN+BH8jgICAgABBEGsiAySAgICAACABIQQgASEFIAEhBiABIQcgASEIIAEhCSABIQogASELIAEhDCABIQ0gASEOIAEhDwJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAKAIcIhBBf2oO3QHaAQHZAQIDBAUGBwgJCgsMDQ7YAQ8Q1wEREtYBExQVFhcYGRob4AHfARwdHtUBHyAhIiMkJdQBJicoKSorLNMB0gEtLtEB0AEvMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUbbAUdISUrPAc4BS80BTMwBTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gAGBAYIBgwGEAYUBhgGHAYgBiQGKAYsBjAGNAY4BjwGQAZEBkgGTAZQBlQGWAZcBmAGZAZoBmwGcAZ0BngGfAaABoQGiAaMBpAGlAaYBpwGoAakBqgGrAawBrQGuAa8BsAGxAbIBswG0AbUBtgG3AcsBygG4AckBuQHIAboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBANwBC0EAIRAMxgELQQ4hEAzFAQtBDSEQDMQBC0EPIRAMwwELQRAhEAzCAQtBEyEQDMEBC0EUIRAMwAELQRUhEAy/AQtBFiEQDL4BC0EXIRAMvQELQRghEAy8AQtBGSEQDLsBC0EaIRAMugELQRshEAy5AQtBHCEQDLgBC0EIIRAMtwELQR0hEAy2AQtBICEQDLUBC0EfIRAMtAELQQchEAyzAQtBISEQDLIBC0EiIRAMsQELQR4hEAywAQtBIyEQDK8BC0ESIRAMrgELQREhEAytAQtBJCEQDKwBC0ElIRAMqwELQSYhEAyqAQtBJyEQDKkBC0HDASEQDKgBC0EpIRAMpwELQSshEAymAQtBLCEQDKUBC0EtIRAMpAELQS4hEAyjAQtBLyEQDKIBC0HEASEQDKEBC0EwIRAMoAELQTQhEAyfAQtBDCEQDJ4BC0ExIRAMnQELQTIhEAycAQtBMyEQDJsBC0E5IRAMmgELQTUhEAyZAQtBxQEhEAyYAQtBCyEQDJcBC0E6IRAMlgELQTYhEAyVAQtBCiEQDJQBC0E3IRAMkwELQTghEAySAQtBPCEQDJEBC0E7IRAMkAELQT0hEAyPAQtBCSEQDI4BC0EoIRAMjQELQT4hEAyMAQtBPyEQDIsBC0HAACEQDIoBC0HBACEQDIkBC0HCACEQDIgBC0HDACEQDIcBC0HEACEQDIYBC0HFACEQDIUBC0HGACEQDIQBC0EqIRAMgwELQccAIRAMggELQcgAIRAMgQELQckAIRAMgAELQcoAIRAMfwtBywAhEAx+C0HNACEQDH0LQcwAIRAMfAtBzgAhEAx7C0HPACEQDHoLQdAAIRAMeQtB0QAhEAx4C0HSACEQDHcLQdMAIRAMdgtB1AAhEAx1C0HWACEQDHQLQdUAIRAMcwtBBiEQDHILQdcAIRAMcQtBBSEQDHALQdgAIRAMbwtBBCEQDG4LQdkAIRAMbQtB2gAhEAxsC0HbACEQDGsLQdwAIRAMagtBAyEQDGkLQd0AIRAMaAtB3gAhEAxnC0HfACEQDGYLQeEAIRAMZQtB4AAhEAxkC0HiACEQDGMLQeMAIRAMYgtBAiEQDGELQeQAIRAMYAtB5QAhEAxfC0HmACEQDF4LQecAIRAMXQtB6AAhEAxcC0HpACEQDFsLQeoAIRAMWgtB6wAhEAxZC0HsACEQDFgLQe0AIRAMVwtB7gAhEAxWC0HvACEQDFULQfAAIRAMVAtB8QAhEAxTC0HyACEQDFILQfMAIRAMUQtB9AAhEAxQC0H1ACEQDE8LQfYAIRAMTgtB9wAhEAxNC0H4ACEQDEwLQfkAIRAMSwtB+gAhEAxKC0H7ACEQDEkLQfwAIRAMSAtB/QAhEAxHC0H+ACEQDEYLQf8AIRAMRQtBgAEhEAxEC0GBASEQDEMLQYIBIRAMQgtBgwEhEAxBC0GEASEQDEALQYUBIRAMPwtBhgEhEAw+C0GHASEQDD0LQYgBIRAMPAtBiQEhEAw7C0GKASEQDDoLQYsBIRAMOQtBjAEhEAw4C0GNASEQDDcLQY4BIRAMNgtBjwEhEAw1C0GQASEQDDQLQZEBIRAMMwtBkgEhEAwyC0GTASEQDDELQZQBIRAMMAtBlQEhEAwvC0GWASEQDC4LQZcBIRAMLQtBmAEhEAwsC0GZASEQDCsLQZoBIRAMKgtBmwEhEAwpC0GcASEQDCgLQZ0BIRAMJwtBngEhEAwmC0GfASEQDCULQaABIRAMJAtBoQEhEAwjC0GiASEQDCILQaMBIRAMIQtBpAEhEAwgC0GlASEQDB8LQaYBIRAMHgtBpwEhEAwdC0GoASEQDBwLQakBIRAMGwtBqgEhEAwaC0GrASEQDBkLQawBIRAMGAtBrQEhEAwXC0GuASEQDBYLQQEhEAwVC0GvASEQDBQLQbABIRAMEwtBsQEhEAwSC0GzASEQDBELQbIBIRAMEAtBtAEhEAwPC0G1ASEQDA4LQbYBIRAMDQtBtwEhEAwMC0G4ASEQDAsLQbkBIRAMCgtBugEhEAwJC0G7ASEQDAgLQcYBIRAMBwtBvAEhEAwGC0G9ASEQDAULQb4BIRAMBAtBvwEhEAwDC0HAASEQDAILQcIBIRAMAQtBwQEhEAsDQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAOxwEAAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB4fICEjJSg/QEFERUZHSElKS0xNT1BRUlPeA1dZW1xdYGJlZmdoaWprbG1vcHFyc3R1dnd4eXp7fH1+gAGCAYUBhgGHAYkBiwGMAY0BjgGPAZABkQGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwG4AbkBugG7AbwBvQG+Ab8BwAHBAcIBwwHEAcUBxgHHAcgByQHKAcsBzAHNAc4BzwHQAdEB0gHTAdQB1QHWAdcB2AHZAdoB2wHcAd0B3gHgAeEB4gHjAeQB5QHmAecB6AHpAeoB6wHsAe0B7gHvAfAB8QHyAfMBmQKkArAC/gL+AgsgASIEIAJHDfMBQd0BIRAM/wMLIAEiECACRw3dAUHDASEQDP4DCyABIgEgAkcNkAFB9wAhEAz9AwsgASIBIAJHDYYBQe8AIRAM/AMLIAEiASACRw1/QeoAIRAM+wMLIAEiASACRw17QegAIRAM+gMLIAEiASACRw14QeYAIRAM+QMLIAEiASACRw0aQRghEAz4AwsgASIBIAJHDRRBEiEQDPcDCyABIgEgAkcNWUHFACEQDPYDCyABIgEgAkcNSkE/IRAM9QMLIAEiASACRw1IQTwhEAz0AwsgASIBIAJHDUFBMSEQDPMDCyAALQAuQQFGDesDDIcCCyAAIAEiASACEMCAgIAAQQFHDeYBIABCADcDIAznAQsgACABIgEgAhC0gICAACIQDecBIAEhAQz1AgsCQCABIgEgAkcNAEEGIRAM8AMLIAAgAUEBaiIBIAIQu4CAgAAiEA3oASABIQEMMQsgAEIANwMgQRIhEAzVAwsgASIQIAJHDStBHSEQDO0DCwJAIAEiASACRg0AIAFBAWohAUEQIRAM1AMLQQchEAzsAwsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3lAUEIIRAM6wMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQRQhEAzSAwtBCSEQDOoDCyABIQEgACkDIFAN5AEgASEBDPICCwJAIAEiASACRw0AQQshEAzpAwsgACABQQFqIgEgAhC2gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeYBIAEhAQwNCyAAIAEiASACELqAgIAAIhAN5wEgASEBDPACCwJAIAEiASACRw0AQQ8hEAzlAwsgAS0AACIQQTtGDQggEEENRw3oASABQQFqIQEM7wILIAAgASIBIAIQuoCAgAAiEA3oASABIQEM8gILA0ACQCABLQAAQfC1gIAAai0AACIQQQFGDQAgEEECRw3rASAAKAIEIRAgAEEANgIEIAAgECABQQFqIgEQuYCAgAAiEA3qASABIQEM9AILIAFBAWoiASACRw0AC0ESIRAM4gMLIAAgASIBIAIQuoCAgAAiEA3pASABIQEMCgsgASIBIAJHDQZBGyEQDOADCwJAIAEiASACRw0AQRYhEAzgAwsgAEGKgICAADYCCCAAIAE2AgQgACABIAIQuICAgAAiEA3qASABIQFBICEQDMYDCwJAIAEiASACRg0AA0ACQCABLQAAQfC3gIAAai0AACIQQQJGDQACQCAQQX9qDgTlAewBAOsB7AELIAFBAWohAUEIIRAMyAMLIAFBAWoiASACRw0AC0EVIRAM3wMLQRUhEAzeAwsDQAJAIAEtAABB8LmAgABqLQAAIhBBAkYNACAQQX9qDgTeAewB4AHrAewBCyABQQFqIgEgAkcNAAtBGCEQDN0DCwJAIAEiASACRg0AIABBi4CAgAA2AgggACABNgIEIAEhAUEHIRAMxAMLQRkhEAzcAwsgAUEBaiEBDAILAkAgASIUIAJHDQBBGiEQDNsDCyAUIQECQCAULQAAQXNqDhTdAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAgDuAgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQM2gMLAkAgAS0AACIQQTtGDQAgEEENRw3oASABQQFqIQEM5QILIAFBAWohAQtBIiEQDL8DCwJAIAEiECACRw0AQRwhEAzYAwtCACERIBAhASAQLQAAQVBqDjfnAeYBAQIDBAUGBwgAAAAAAAAACQoLDA0OAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPEBESExQAC0EeIRAMvQMLQgIhEQzlAQtCAyERDOQBC0IEIREM4wELQgUhEQziAQtCBiERDOEBC0IHIREM4AELQgghEQzfAQtCCSERDN4BC0IKIREM3QELQgshEQzcAQtCDCERDNsBC0INIREM2gELQg4hEQzZAQtCDyERDNgBC0IKIREM1wELQgshEQzWAQtCDCERDNUBC0INIREM1AELQg4hEQzTAQtCDyERDNIBC0IAIRECQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAtAABBUGoON+UB5AEAAQIDBAUGB+YB5gHmAeYB5gHmAeYBCAkKCwwN5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAQ4PEBESE+YBC0ICIREM5AELQgMhEQzjAQtCBCERDOIBC0IFIREM4QELQgYhEQzgAQtCByERDN8BC0IIIREM3gELQgkhEQzdAQtCCiERDNwBC0ILIREM2wELQgwhEQzaAQtCDSERDNkBC0IOIREM2AELQg8hEQzXAQtCCiERDNYBC0ILIREM1QELQgwhEQzUAQtCDSERDNMBC0IOIREM0gELQg8hEQzRAQsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3SAUEfIRAMwAMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQSQhEAynAwtBICEQDL8DCyAAIAEiECACEL6AgIAAQX9qDgW2AQDFAgHRAdIBC0ERIRAMpAMLIABBAToALyAQIQEMuwMLIAEiASACRw3SAUEkIRAMuwMLIAEiDSACRw0eQcYAIRAMugMLIAAgASIBIAIQsoCAgAAiEA3UASABIQEMtQELIAEiECACRw0mQdAAIRAMuAMLAkAgASIBIAJHDQBBKCEQDLgDCyAAQQA2AgQgAEGMgICAADYCCCAAIAEgARCxgICAACIQDdMBIAEhAQzYAQsCQCABIhAgAkcNAEEpIRAMtwMLIBAtAAAiAUEgRg0UIAFBCUcN0wEgEEEBaiEBDBULAkAgASIBIAJGDQAgAUEBaiEBDBcLQSohEAy1AwsCQCABIhAgAkcNAEErIRAMtQMLAkAgEC0AACIBQQlGDQAgAUEgRw3VAQsgAC0ALEEIRg3TASAQIQEMkQMLAkAgASIBIAJHDQBBLCEQDLQDCyABLQAAQQpHDdUBIAFBAWohAQzJAgsgASIOIAJHDdUBQS8hEAyyAwsDQAJAIAEtAAAiEEEgRg0AAkAgEEF2ag4EANwB3AEA2gELIAEhAQzgAQsgAUEBaiIBIAJHDQALQTEhEAyxAwtBMiEQIAEiFCACRg2wAyACIBRrIAAoAgAiAWohFSAUIAFrQQNqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB8LuAgABqLQAARw0BAkAgAUEDRw0AQQYhAQyWAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMsQMLIABBADYCACAUIQEM2QELQTMhECABIhQgAkYNrwMgAiAUayAAKAIAIgFqIRUgFCABa0EIaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfS7gIAAai0AAEcNAQJAIAFBCEcNAEEFIQEMlQMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLADCyAAQQA2AgAgFCEBDNgBC0E0IRAgASIUIAJGDa4DIAIgFGsgACgCACIBaiEVIBQgAWtBBWohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUHQwoCAAGotAABHDQECQCABQQVHDQBBByEBDJQDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAyvAwsgAEEANgIAIBQhAQzXAQsCQCABIgEgAkYNAANAAkAgAS0AAEGAvoCAAGotAAAiEEEBRg0AIBBBAkYNCiABIQEM3QELIAFBAWoiASACRw0AC0EwIRAMrgMLQTAhEAytAwsCQCABIgEgAkYNAANAAkAgAS0AACIQQSBGDQAgEEF2ag4E2QHaAdoB2QHaAQsgAUEBaiIBIAJHDQALQTghEAytAwtBOCEQDKwDCwNAAkAgAS0AACIQQSBGDQAgEEEJRw0DCyABQQFqIgEgAkcNAAtBPCEQDKsDCwNAAkAgAS0AACIQQSBGDQACQAJAIBBBdmoOBNoBAQHaAQALIBBBLEYN2wELIAEhAQwECyABQQFqIgEgAkcNAAtBPyEQDKoDCyABIQEM2wELQcAAIRAgASIUIAJGDagDIAIgFGsgACgCACIBaiEWIBQgAWtBBmohFwJAA0AgFC0AAEEgciABQYDAgIAAai0AAEcNASABQQZGDY4DIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADKkDCyAAQQA2AgAgFCEBC0E2IRAMjgMLAkAgASIPIAJHDQBBwQAhEAynAwsgAEGMgICAADYCCCAAIA82AgQgDyEBIAAtACxBf2oOBM0B1QHXAdkBhwMLIAFBAWohAQzMAQsCQCABIgEgAkYNAANAAkAgAS0AACIQQSByIBAgEEG/f2pB/wFxQRpJG0H/AXEiEEEJRg0AIBBBIEYNAAJAAkACQAJAIBBBnX9qDhMAAwMDAwMDAwEDAwMDAwMDAwMCAwsgAUEBaiEBQTEhEAyRAwsgAUEBaiEBQTIhEAyQAwsgAUEBaiEBQTMhEAyPAwsgASEBDNABCyABQQFqIgEgAkcNAAtBNSEQDKUDC0E1IRAMpAMLAkAgASIBIAJGDQADQAJAIAEtAABBgLyAgABqLQAAQQFGDQAgASEBDNMBCyABQQFqIgEgAkcNAAtBPSEQDKQDC0E9IRAMowMLIAAgASIBIAIQsICAgAAiEA3WASABIQEMAQsgEEEBaiEBC0E8IRAMhwMLAkAgASIBIAJHDQBBwgAhEAygAwsCQANAAkAgAS0AAEF3ag4YAAL+Av4ChAP+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gIA/gILIAFBAWoiASACRw0AC0HCACEQDKADCyABQQFqIQEgAC0ALUEBcUUNvQEgASEBC0EsIRAMhQMLIAEiASACRw3TAUHEACEQDJ0DCwNAAkAgAS0AAEGQwICAAGotAABBAUYNACABIQEMtwILIAFBAWoiASACRw0AC0HFACEQDJwDCyANLQAAIhBBIEYNswEgEEE6Rw2BAyAAKAIEIQEgAEEANgIEIAAgASANEK+AgIAAIgEN0AEgDUEBaiEBDLMCC0HHACEQIAEiDSACRg2aAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQZDCgIAAai0AAEcNgAMgAUEFRg30AiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyaAwtByAAhECABIg0gAkYNmQMgAiANayAAKAIAIgFqIRYgDSABa0EJaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGWwoCAAGotAABHDf8CAkAgAUEJRw0AQQIhAQz1AgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmQMLAkAgASINIAJHDQBByQAhEAyZAwsCQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZJ/ag4HAIADgAOAA4ADgAMBgAMLIA1BAWohAUE+IRAMgAMLIA1BAWohAUE/IRAM/wILQcoAIRAgASINIAJGDZcDIAIgDWsgACgCACIBaiEWIA0gAWtBAWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBoMKAgABqLQAARw39AiABQQFGDfACIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJcDC0HLACEQIAEiDSACRg2WAyACIA1rIAAoAgAiAWohFiANIAFrQQ5qIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaLCgIAAai0AAEcN/AIgAUEORg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyWAwtBzAAhECABIg0gAkYNlQMgAiANayAAKAIAIgFqIRYgDSABa0EPaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUHAwoCAAGotAABHDfsCAkAgAUEPRw0AQQMhAQzxAgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlQMLQc0AIRAgASINIAJGDZQDIAIgDWsgACgCACIBaiEWIA0gAWtBBWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw36AgJAIAFBBUcNAEEEIQEM8AILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJQDCwJAIAEiDSACRw0AQc4AIRAMlAMLAkACQAJAAkAgDS0AACIBQSByIAEgAUG/f2pB/wFxQRpJG0H/AXFBnX9qDhMA/QL9Av0C/QL9Av0C/QL9Av0C/QL9Av0CAf0C/QL9AgID/QILIA1BAWohAUHBACEQDP0CCyANQQFqIQFBwgAhEAz8AgsgDUEBaiEBQcMAIRAM+wILIA1BAWohAUHEACEQDPoCCwJAIAEiASACRg0AIABBjYCAgAA2AgggACABNgIEIAEhAUHFACEQDPoCC0HPACEQDJIDCyAQIQECQAJAIBAtAABBdmoOBAGoAqgCAKgCCyAQQQFqIQELQSchEAz4AgsCQCABIgEgAkcNAEHRACEQDJEDCwJAIAEtAABBIEYNACABIQEMjQELIAFBAWohASAALQAtQQFxRQ3HASABIQEMjAELIAEiFyACRw3IAUHSACEQDI8DC0HTACEQIAEiFCACRg2OAyACIBRrIAAoAgAiAWohFiAUIAFrQQFqIRcDQCAULQAAIAFB1sKAgABqLQAARw3MASABQQFGDccBIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADI4DCwJAIAEiASACRw0AQdUAIRAMjgMLIAEtAABBCkcNzAEgAUEBaiEBDMcBCwJAIAEiASACRw0AQdYAIRAMjQMLAkACQCABLQAAQXZqDgQAzQHNAQHNAQsgAUEBaiEBDMcBCyABQQFqIQFBygAhEAzzAgsgACABIgEgAhCugICAACIQDcsBIAEhAUHNACEQDPICCyAALQApQSJGDYUDDKYCCwJAIAEiASACRw0AQdsAIRAMigMLQQAhFEEBIRdBASEWQQAhEAJAAkACQAJAAkACQAJAAkACQCABLQAAQVBqDgrUAdMBAAECAwQFBgjVAQtBAiEQDAYLQQMhEAwFC0EEIRAMBAtBBSEQDAMLQQYhEAwCC0EHIRAMAQtBCCEQC0EAIRdBACEWQQAhFAzMAQtBCSEQQQEhFEEAIRdBACEWDMsBCwJAIAEiASACRw0AQd0AIRAMiQMLIAEtAABBLkcNzAEgAUEBaiEBDKYCCyABIgEgAkcNzAFB3wAhEAyHAwsCQCABIgEgAkYNACAAQY6AgIAANgIIIAAgATYCBCABIQFB0AAhEAzuAgtB4AAhEAyGAwtB4QAhECABIgEgAkYNhQMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQeLCgIAAai0AAEcNzQEgFEEDRg3MASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyFAwtB4gAhECABIgEgAkYNhAMgAiABayAAKAIAIhRqIRYgASAUa0ECaiEXA0AgAS0AACAUQebCgIAAai0AAEcNzAEgFEECRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyEAwtB4wAhECABIgEgAkYNgwMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQenCgIAAai0AAEcNywEgFEEDRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyDAwsCQCABIgEgAkcNAEHlACEQDIMDCyAAIAFBAWoiASACEKiAgIAAIhANzQEgASEBQdYAIRAM6QILAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AAkACQAJAIBBBuH9qDgsAAc8BzwHPAc8BzwHPAc8BzwECzwELIAFBAWohAUHSACEQDO0CCyABQQFqIQFB0wAhEAzsAgsgAUEBaiEBQdQAIRAM6wILIAFBAWoiASACRw0AC0HkACEQDIIDC0HkACEQDIEDCwNAAkAgAS0AAEHwwoCAAGotAAAiEEEBRg0AIBBBfmoOA88B0AHRAdIBCyABQQFqIgEgAkcNAAtB5gAhEAyAAwsCQCABIgEgAkYNACABQQFqIQEMAwtB5wAhEAz/AgsDQAJAIAEtAABB8MSAgABqLQAAIhBBAUYNAAJAIBBBfmoOBNIB0wHUAQDVAQsgASEBQdcAIRAM5wILIAFBAWoiASACRw0AC0HoACEQDP4CCwJAIAEiASACRw0AQekAIRAM/gILAkAgAS0AACIQQXZqDhq6AdUB1QG8AdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAcoB1QHVAQDTAQsgAUEBaiEBC0EGIRAM4wILA0ACQCABLQAAQfDGgIAAai0AAEEBRg0AIAEhAQyeAgsgAUEBaiIBIAJHDQALQeoAIRAM+wILAkAgASIBIAJGDQAgAUEBaiEBDAMLQesAIRAM+gILAkAgASIBIAJHDQBB7AAhEAz6AgsgAUEBaiEBDAELAkAgASIBIAJHDQBB7QAhEAz5AgsgAUEBaiEBC0EEIRAM3gILAkAgASIUIAJHDQBB7gAhEAz3AgsgFCEBAkACQAJAIBQtAABB8MiAgABqLQAAQX9qDgfUAdUB1gEAnAIBAtcBCyAUQQFqIQEMCgsgFEEBaiEBDM0BC0EAIRAgAEEANgIcIABBm5KAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAz2AgsCQANAAkAgAS0AAEHwyICAAGotAAAiEEEERg0AAkACQCAQQX9qDgfSAdMB1AHZAQAEAdkBCyABIQFB2gAhEAzgAgsgAUEBaiEBQdwAIRAM3wILIAFBAWoiASACRw0AC0HvACEQDPYCCyABQQFqIQEMywELAkAgASIUIAJHDQBB8AAhEAz1AgsgFC0AAEEvRw3UASAUQQFqIQEMBgsCQCABIhQgAkcNAEHxACEQDPQCCwJAIBQtAAAiAUEvRw0AIBRBAWohAUHdACEQDNsCCyABQXZqIgRBFksN0wFBASAEdEGJgIACcUUN0wEMygILAkAgASIBIAJGDQAgAUEBaiEBQd4AIRAM2gILQfIAIRAM8gILAkAgASIUIAJHDQBB9AAhEAzyAgsgFCEBAkAgFC0AAEHwzICAAGotAABBf2oOA8kClAIA1AELQeEAIRAM2AILAkAgASIUIAJGDQADQAJAIBQtAABB8MqAgABqLQAAIgFBA0YNAAJAIAFBf2oOAssCANUBCyAUIQFB3wAhEAzaAgsgFEEBaiIUIAJHDQALQfMAIRAM8QILQfMAIRAM8AILAkAgASIBIAJGDQAgAEGPgICAADYCCCAAIAE2AgQgASEBQeAAIRAM1wILQfUAIRAM7wILAkAgASIBIAJHDQBB9gAhEAzvAgsgAEGPgICAADYCCCAAIAE2AgQgASEBC0EDIRAM1AILA0AgAS0AAEEgRw3DAiABQQFqIgEgAkcNAAtB9wAhEAzsAgsCQCABIgEgAkcNAEH4ACEQDOwCCyABLQAAQSBHDc4BIAFBAWohAQzvAQsgACABIgEgAhCsgICAACIQDc4BIAEhAQyOAgsCQCABIgQgAkcNAEH6ACEQDOoCCyAELQAAQcwARw3RASAEQQFqIQFBEyEQDM8BCwJAIAEiBCACRw0AQfsAIRAM6QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEANAIAQtAAAgAUHwzoCAAGotAABHDdABIAFBBUYNzgEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBB+wAhEAzoAgsCQCABIgQgAkcNAEH8ACEQDOgCCwJAAkAgBC0AAEG9f2oODADRAdEB0QHRAdEB0QHRAdEB0QHRAQHRAQsgBEEBaiEBQeYAIRAMzwILIARBAWohAUHnACEQDM4CCwJAIAEiBCACRw0AQf0AIRAM5wILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNzwEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf0AIRAM5wILIABBADYCACAQQQFqIQFBECEQDMwBCwJAIAEiBCACRw0AQf4AIRAM5gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQfbOgIAAai0AAEcNzgEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf4AIRAM5gILIABBADYCACAQQQFqIQFBFiEQDMsBCwJAIAEiBCACRw0AQf8AIRAM5QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQfzOgIAAai0AAEcNzQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf8AIRAM5QILIABBADYCACAQQQFqIQFBBSEQDMoBCwJAIAEiBCACRw0AQYABIRAM5AILIAQtAABB2QBHDcsBIARBAWohAUEIIRAMyQELAkAgASIEIAJHDQBBgQEhEAzjAgsCQAJAIAQtAABBsn9qDgMAzAEBzAELIARBAWohAUHrACEQDMoCCyAEQQFqIQFB7AAhEAzJAgsCQCABIgQgAkcNAEGCASEQDOICCwJAAkAgBC0AAEG4f2oOCADLAcsBywHLAcsBywEBywELIARBAWohAUHqACEQDMkCCyAEQQFqIQFB7QAhEAzIAgsCQCABIgQgAkcNAEGDASEQDOECCyACIARrIAAoAgAiAWohECAEIAFrQQJqIRQCQANAIAQtAAAgAUGAz4CAAGotAABHDckBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgEDYCAEGDASEQDOECC0EAIRAgAEEANgIAIBRBAWohAQzGAQsCQCABIgQgAkcNAEGEASEQDOACCyACIARrIAAoAgAiAWohFCAEIAFrQQRqIRACQANAIAQtAAAgAUGDz4CAAGotAABHDcgBIAFBBEYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGEASEQDOACCyAAQQA2AgAgEEEBaiEBQSMhEAzFAQsCQCABIgQgAkcNAEGFASEQDN8CCwJAAkAgBC0AAEG0f2oOCADIAcgByAHIAcgByAEByAELIARBAWohAUHvACEQDMYCCyAEQQFqIQFB8AAhEAzFAgsCQCABIgQgAkcNAEGGASEQDN4CCyAELQAAQcUARw3FASAEQQFqIQEMgwILAkAgASIEIAJHDQBBhwEhEAzdAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBiM+AgABqLQAARw3FASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhwEhEAzdAgsgAEEANgIAIBBBAWohAUEtIRAMwgELAkAgASIEIAJHDQBBiAEhEAzcAgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw3EASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiAEhEAzcAgsgAEEANgIAIBBBAWohAUEpIRAMwQELAkAgASIBIAJHDQBBiQEhEAzbAgtBASEQIAEtAABB3wBHDcABIAFBAWohAQyBAgsCQCABIgQgAkcNAEGKASEQDNoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRADQCAELQAAIAFBjM+AgABqLQAARw3BASABQQFGDa8CIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYoBIRAM2QILAkAgASIEIAJHDQBBiwEhEAzZAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBjs+AgABqLQAARw3BASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiwEhEAzZAgsgAEEANgIAIBBBAWohAUECIRAMvgELAkAgASIEIAJHDQBBjAEhEAzYAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw3AASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjAEhEAzYAgsgAEEANgIAIBBBAWohAUEfIRAMvQELAkAgASIEIAJHDQBBjQEhEAzXAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8s+AgABqLQAARw2/ASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjQEhEAzXAgsgAEEANgIAIBBBAWohAUEJIRAMvAELAkAgASIEIAJHDQBBjgEhEAzWAgsCQAJAIAQtAABBt39qDgcAvwG/Ab8BvwG/AQG/AQsgBEEBaiEBQfgAIRAMvQILIARBAWohAUH5ACEQDLwCCwJAIAEiBCACRw0AQY8BIRAM1QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQZHPgIAAai0AAEcNvQEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY8BIRAM1QILIABBADYCACAQQQFqIQFBGCEQDLoBCwJAIAEiBCACRw0AQZABIRAM1AILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQZfPgIAAai0AAEcNvAEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZABIRAM1AILIABBADYCACAQQQFqIQFBFyEQDLkBCwJAIAEiBCACRw0AQZEBIRAM0wILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQZrPgIAAai0AAEcNuwEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZEBIRAM0wILIABBADYCACAQQQFqIQFBFSEQDLgBCwJAIAEiBCACRw0AQZIBIRAM0gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQaHPgIAAai0AAEcNugEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZIBIRAM0gILIABBADYCACAQQQFqIQFBHiEQDLcBCwJAIAEiBCACRw0AQZMBIRAM0QILIAQtAABBzABHDbgBIARBAWohAUEKIRAMtgELAkAgBCACRw0AQZQBIRAM0AILAkACQCAELQAAQb9/ag4PALkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AbkBAbkBCyAEQQFqIQFB/gAhEAy3AgsgBEEBaiEBQf8AIRAMtgILAkAgBCACRw0AQZUBIRAMzwILAkACQCAELQAAQb9/ag4DALgBAbgBCyAEQQFqIQFB/QAhEAy2AgsgBEEBaiEEQYABIRAMtQILAkAgBCACRw0AQZYBIRAMzgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQafPgIAAai0AAEcNtgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZYBIRAMzgILIABBADYCACAQQQFqIQFBCyEQDLMBCwJAIAQgAkcNAEGXASEQDM0CCwJAAkACQAJAIAQtAABBU2oOIwC4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBAbgBuAG4AbgBuAECuAG4AbgBA7gBCyAEQQFqIQFB+wAhEAy2AgsgBEEBaiEBQfwAIRAMtQILIARBAWohBEGBASEQDLQCCyAEQQFqIQRBggEhEAyzAgsCQCAEIAJHDQBBmAEhEAzMAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBqc+AgABqLQAARw20ASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmAEhEAzMAgsgAEEANgIAIBBBAWohAUEZIRAMsQELAkAgBCACRw0AQZkBIRAMywILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQa7PgIAAai0AAEcNswEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZkBIRAMywILIABBADYCACAQQQFqIQFBBiEQDLABCwJAIAQgAkcNAEGaASEQDMoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG0z4CAAGotAABHDbIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGaASEQDMoCCyAAQQA2AgAgEEEBaiEBQRwhEAyvAQsCQCAEIAJHDQBBmwEhEAzJAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBts+AgABqLQAARw2xASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmwEhEAzJAgsgAEEANgIAIBBBAWohAUEnIRAMrgELAkAgBCACRw0AQZwBIRAMyAILAkACQCAELQAAQax/ag4CAAGxAQsgBEEBaiEEQYYBIRAMrwILIARBAWohBEGHASEQDK4CCwJAIAQgAkcNAEGdASEQDMcCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG4z4CAAGotAABHDa8BIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGdASEQDMcCCyAAQQA2AgAgEEEBaiEBQSYhEAysAQsCQCAEIAJHDQBBngEhEAzGAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBus+AgABqLQAARw2uASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBngEhEAzGAgsgAEEANgIAIBBBAWohAUEDIRAMqwELAkAgBCACRw0AQZ8BIRAMxQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNrQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ8BIRAMxQILIABBADYCACAQQQFqIQFBDCEQDKoBCwJAIAQgAkcNAEGgASEQDMQCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUG8z4CAAGotAABHDawBIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGgASEQDMQCCyAAQQA2AgAgEEEBaiEBQQ0hEAypAQsCQCAEIAJHDQBBoQEhEAzDAgsCQAJAIAQtAABBun9qDgsArAGsAawBrAGsAawBrAGsAawBAawBCyAEQQFqIQRBiwEhEAyqAgsgBEEBaiEEQYwBIRAMqQILAkAgBCACRw0AQaIBIRAMwgILIAQtAABB0ABHDakBIARBAWohBAzpAQsCQCAEIAJHDQBBowEhEAzBAgsCQAJAIAQtAABBt39qDgcBqgGqAaoBqgGqAQCqAQsgBEEBaiEEQY4BIRAMqAILIARBAWohAUEiIRAMpgELAkAgBCACRw0AQaQBIRAMwAILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQcDPgIAAai0AAEcNqAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaQBIRAMwAILIABBADYCACAQQQFqIQFBHSEQDKUBCwJAIAQgAkcNAEGlASEQDL8CCwJAAkAgBC0AAEGuf2oOAwCoAQGoAQsgBEEBaiEEQZABIRAMpgILIARBAWohAUEEIRAMpAELAkAgBCACRw0AQaYBIRAMvgILAkACQAJAAkACQCAELQAAQb9/ag4VAKoBqgGqAaoBqgGqAaoBqgGqAaoBAaoBqgECqgGqAQOqAaoBBKoBCyAEQQFqIQRBiAEhEAyoAgsgBEEBaiEEQYkBIRAMpwILIARBAWohBEGKASEQDKYCCyAEQQFqIQRBjwEhEAylAgsgBEEBaiEEQZEBIRAMpAILAkAgBCACRw0AQacBIRAMvQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNpQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQacBIRAMvQILIABBADYCACAQQQFqIQFBESEQDKIBCwJAIAQgAkcNAEGoASEQDLwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHCz4CAAGotAABHDaQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGoASEQDLwCCyAAQQA2AgAgEEEBaiEBQSwhEAyhAQsCQCAEIAJHDQBBqQEhEAy7AgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBxc+AgABqLQAARw2jASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqQEhEAy7AgsgAEEANgIAIBBBAWohAUErIRAMoAELAkAgBCACRw0AQaoBIRAMugILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQcrPgIAAai0AAEcNogEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaoBIRAMugILIABBADYCACAQQQFqIQFBFCEQDJ8BCwJAIAQgAkcNAEGrASEQDLkCCwJAAkACQAJAIAQtAABBvn9qDg8AAQKkAaQBpAGkAaQBpAGkAaQBpAGkAaQBA6QBCyAEQQFqIQRBkwEhEAyiAgsgBEEBaiEEQZQBIRAMoQILIARBAWohBEGVASEQDKACCyAEQQFqIQRBlgEhEAyfAgsCQCAEIAJHDQBBrAEhEAy4AgsgBC0AAEHFAEcNnwEgBEEBaiEEDOABCwJAIAQgAkcNAEGtASEQDLcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHNz4CAAGotAABHDZ8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGtASEQDLcCCyAAQQA2AgAgEEEBaiEBQQ4hEAycAQsCQCAEIAJHDQBBrgEhEAy2AgsgBC0AAEHQAEcNnQEgBEEBaiEBQSUhEAybAQsCQCAEIAJHDQBBrwEhEAy1AgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw2dASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrwEhEAy1AgsgAEEANgIAIBBBAWohAUEqIRAMmgELAkAgBCACRw0AQbABIRAMtAILAkACQCAELQAAQat/ag4LAJ0BnQGdAZ0BnQGdAZ0BnQGdAQGdAQsgBEEBaiEEQZoBIRAMmwILIARBAWohBEGbASEQDJoCCwJAIAQgAkcNAEGxASEQDLMCCwJAAkAgBC0AAEG/f2oOFACcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAEBnAELIARBAWohBEGZASEQDJoCCyAEQQFqIQRBnAEhEAyZAgsCQCAEIAJHDQBBsgEhEAyyAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFB2c+AgABqLQAARw2aASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBsgEhEAyyAgsgAEEANgIAIBBBAWohAUEhIRAMlwELAkAgBCACRw0AQbMBIRAMsQILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQd3PgIAAai0AAEcNmQEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbMBIRAMsQILIABBADYCACAQQQFqIQFBGiEQDJYBCwJAIAQgAkcNAEG0ASEQDLACCwJAAkACQCAELQAAQbt/ag4RAJoBmgGaAZoBmgGaAZoBmgGaAQGaAZoBmgGaAZoBApoBCyAEQQFqIQRBnQEhEAyYAgsgBEEBaiEEQZ4BIRAMlwILIARBAWohBEGfASEQDJYCCwJAIAQgAkcNAEG1ASEQDK8CCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUHkz4CAAGotAABHDZcBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG1ASEQDK8CCyAAQQA2AgAgEEEBaiEBQSghEAyUAQsCQCAEIAJHDQBBtgEhEAyuAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB6s+AgABqLQAARw2WASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtgEhEAyuAgsgAEEANgIAIBBBAWohAUEHIRAMkwELAkAgBCACRw0AQbcBIRAMrQILAkACQCAELQAAQbt/ag4OAJYBlgGWAZYBlgGWAZYBlgGWAZYBlgGWAQGWAQsgBEEBaiEEQaEBIRAMlAILIARBAWohBEGiASEQDJMCCwJAIAQgAkcNAEG4ASEQDKwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDZQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG4ASEQDKwCCyAAQQA2AgAgEEEBaiEBQRIhEAyRAQsCQCAEIAJHDQBBuQEhEAyrAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw2TASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuQEhEAyrAgsgAEEANgIAIBBBAWohAUEgIRAMkAELAkAgBCACRw0AQboBIRAMqgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNkgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQboBIRAMqgILIABBADYCACAQQQFqIQFBDyEQDI8BCwJAIAQgAkcNAEG7ASEQDKkCCwJAAkAgBC0AAEG3f2oOBwCSAZIBkgGSAZIBAZIBCyAEQQFqIQRBpQEhEAyQAgsgBEEBaiEEQaYBIRAMjwILAkAgBCACRw0AQbwBIRAMqAILIAIgBGsgACgCACIBaiEUIAQgAWtBB2ohEAJAA0AgBC0AACABQfTPgIAAai0AAEcNkAEgAUEHRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbwBIRAMqAILIABBADYCACAQQQFqIQFBGyEQDI0BCwJAIAQgAkcNAEG9ASEQDKcCCwJAAkACQCAELQAAQb5/ag4SAJEBkQGRAZEBkQGRAZEBkQGRAQGRAZEBkQGRAZEBkQECkQELIARBAWohBEGkASEQDI8CCyAEQQFqIQRBpwEhEAyOAgsgBEEBaiEEQagBIRAMjQILAkAgBCACRw0AQb4BIRAMpgILIAQtAABBzgBHDY0BIARBAWohBAzPAQsCQCAEIAJHDQBBvwEhEAylAgsCQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAELQAAQb9/ag4VAAECA5wBBAUGnAGcAZwBBwgJCgucAQwNDg+cAQsgBEEBaiEBQegAIRAMmgILIARBAWohAUHpACEQDJkCCyAEQQFqIQFB7gAhEAyYAgsgBEEBaiEBQfIAIRAMlwILIARBAWohAUHzACEQDJYCCyAEQQFqIQFB9gAhEAyVAgsgBEEBaiEBQfcAIRAMlAILIARBAWohAUH6ACEQDJMCCyAEQQFqIQRBgwEhEAySAgsgBEEBaiEEQYQBIRAMkQILIARBAWohBEGFASEQDJACCyAEQQFqIQRBkgEhEAyPAgsgBEEBaiEEQZgBIRAMjgILIARBAWohBEGgASEQDI0CCyAEQQFqIQRBowEhEAyMAgsgBEEBaiEEQaoBIRAMiwILAkAgBCACRg0AIABBkICAgAA2AgggACAENgIEQasBIRAMiwILQcABIRAMowILIAAgBSACEKqAgIAAIgENiwEgBSEBDFwLAkAgBiACRg0AIAZBAWohBQyNAQtBwgEhEAyhAgsDQAJAIBAtAABBdmoOBIwBAACPAQALIBBBAWoiECACRw0AC0HDASEQDKACCwJAIAcgAkYNACAAQZGAgIAANgIIIAAgBzYCBCAHIQFBASEQDIcCC0HEASEQDJ8CCwJAIAcgAkcNAEHFASEQDJ8CCwJAAkAgBy0AAEF2ag4EAc4BzgEAzgELIAdBAWohBgyNAQsgB0EBaiEFDIkBCwJAIAcgAkcNAEHGASEQDJ4CCwJAAkAgBy0AAEF2ag4XAY8BjwEBjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAI8BCyAHQQFqIQcLQbABIRAMhAILAkAgCCACRw0AQcgBIRAMnQILIAgtAABBIEcNjQEgAEEAOwEyIAhBAWohAUGzASEQDIMCCyABIRcCQANAIBciByACRg0BIActAABBUGpB/wFxIhBBCk8NzAECQCAALwEyIhRBmTNLDQAgACAUQQpsIhQ7ATIgEEH//wNzIBRB/v8DcUkNACAHQQFqIRcgACAUIBBqIhA7ATIgEEH//wNxQegHSQ0BCwtBACEQIABBADYCHCAAQcGJgIAANgIQIABBDTYCDCAAIAdBAWo2AhQMnAILQccBIRAMmwILIAAgCCACEK6AgIAAIhBFDcoBIBBBFUcNjAEgAEHIATYCHCAAIAg2AhQgAEHJl4CAADYCECAAQRU2AgxBACEQDJoCCwJAIAkgAkcNAEHMASEQDJoCC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgCS0AAEFQag4KlgGVAQABAgMEBQYIlwELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMjgELQQkhEEEBIRRBACEXQQAhFgyNAQsCQCAKIAJHDQBBzgEhEAyZAgsgCi0AAEEuRw2OASAKQQFqIQkMygELIAsgAkcNjgFB0AEhEAyXAgsCQCALIAJGDQAgAEGOgICAADYCCCAAIAs2AgRBtwEhEAz+AQtB0QEhEAyWAgsCQCAEIAJHDQBB0gEhEAyWAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EEaiELA0AgBC0AACAQQfzPgIAAai0AAEcNjgEgEEEERg3pASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHSASEQDJUCCyAAIAwgAhCsgICAACIBDY0BIAwhAQy4AQsCQCAEIAJHDQBB1AEhEAyUAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EBaiEMA0AgBC0AACAQQYHQgIAAai0AAEcNjwEgEEEBRg2OASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHUASEQDJMCCwJAIAQgAkcNAEHWASEQDJMCCyACIARrIAAoAgAiEGohFCAEIBBrQQJqIQsDQCAELQAAIBBBg9CAgABqLQAARw2OASAQQQJGDZABIBBBAWohECAEQQFqIgQgAkcNAAsgACAUNgIAQdYBIRAMkgILAkAgBCACRw0AQdcBIRAMkgILAkACQCAELQAAQbt/ag4QAI8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwEBjwELIARBAWohBEG7ASEQDPkBCyAEQQFqIQRBvAEhEAz4AQsCQCAEIAJHDQBB2AEhEAyRAgsgBC0AAEHIAEcNjAEgBEEBaiEEDMQBCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEG+ASEQDPcBC0HZASEQDI8CCwJAIAQgAkcNAEHaASEQDI8CCyAELQAAQcgARg3DASAAQQE6ACgMuQELIABBAjoALyAAIAQgAhCmgICAACIQDY0BQcIBIRAM9AELIAAtAChBf2oOArcBuQG4AQsDQAJAIAQtAABBdmoOBACOAY4BAI4BCyAEQQFqIgQgAkcNAAtB3QEhEAyLAgsgAEEAOgAvIAAtAC1BBHFFDYQCCyAAQQA6AC8gAEEBOgA0IAEhAQyMAQsgEEEVRg3aASAAQQA2AhwgACABNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAyIAgsCQCAAIBAgAhC0gICAACIEDQAgECEBDIECCwJAIARBFUcNACAAQQM2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAyIAgsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMhwILIBBBFUYN1gEgAEEANgIcIAAgATYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMhgILIAAoAgQhFyAAQQA2AgQgECARp2oiFiEBIAAgFyAQIBYgFBsiEBC1gICAACIURQ2NASAAQQc2AhwgACAQNgIUIAAgFDYCDEEAIRAMhQILIAAgAC8BMEGAAXI7ATAgASEBC0EqIRAM6gELIBBBFUYN0QEgAEEANgIcIAAgATYCFCAAQYOMgIAANgIQIABBEzYCDEEAIRAMggILIBBBFUYNzwEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAMgQILIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDI0BCyAAQQw2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMgAILIBBBFUYNzAEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM/wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIwBCyAAQQ02AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/gELIBBBFUYNyQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM/QELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIsBCyAAQQ42AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/AELIABBADYCHCAAIAE2AhQgAEHAlYCAADYCECAAQQI2AgxBACEQDPsBCyAQQRVGDcUBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPoBCyAAQRA2AhwgACABNgIUIAAgEDYCDEEAIRAM+QELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDPEBCyAAQRE2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM+AELIBBBFUYNwQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM9wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIgBCyAAQRM2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM9gELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDO0BCyAAQRQ2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM9QELIBBBFUYNvQEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM9AELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIYBCyAAQRY2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM8wELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC3gICAACIEDQAgAUEBaiEBDOkBCyAAQRc2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM8gELIABBADYCHCAAIAE2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDPEBC0IBIRELIBBBAWohAQJAIAApAyAiEkL//////////w9WDQAgACASQgSGIBGENwMgIAEhAQyEAQsgAEEANgIcIAAgATYCFCAAQa2JgIAANgIQIABBDDYCDEEAIRAM7wELIABBADYCHCAAIBA2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDO4BCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNcyAAQQU2AhwgACAQNgIUIAAgFDYCDEEAIRAM7QELIABBADYCHCAAIBA2AhQgAEGqnICAADYCECAAQQ82AgxBACEQDOwBCyAAIBAgAhC0gICAACIBDQEgECEBC0EOIRAM0QELAkAgAUEVRw0AIABBAjYCHCAAIBA2AhQgAEGwmICAADYCECAAQRU2AgxBACEQDOoBCyAAQQA2AhwgACAQNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAzpAQsgAUEBaiEQAkAgAC8BMCIBQYABcUUNAAJAIAAgECACELuAgIAAIgENACAQIQEMcAsgAUEVRw26ASAAQQU2AhwgACAQNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAzpAQsCQCABQaAEcUGgBEcNACAALQAtQQJxDQAgAEEANgIcIAAgEDYCFCAAQZaTgIAANgIQIABBBDYCDEEAIRAM6QELIAAgECACEL2AgIAAGiAQIQECQAJAAkACQAJAIAAgECACELOAgIAADhYCAQAEBAQEBAQEBAQEBAQEBAQEBAQDBAsgAEEBOgAuCyAAIAAvATBBwAByOwEwIBAhAQtBJiEQDNEBCyAAQSM2AhwgACAQNgIUIABBpZaAgAA2AhAgAEEVNgIMQQAhEAzpAQsgAEEANgIcIAAgEDYCFCAAQdWLgIAANgIQIABBETYCDEEAIRAM6AELIAAtAC1BAXFFDQFBwwEhEAzOAQsCQCANIAJGDQADQAJAIA0tAABBIEYNACANIQEMxAELIA1BAWoiDSACRw0AC0ElIRAM5wELQSUhEAzmAQsgACgCBCEEIABBADYCBCAAIAQgDRCvgICAACIERQ2tASAAQSY2AhwgACAENgIMIAAgDUEBajYCFEEAIRAM5QELIBBBFUYNqwEgAEEANgIcIAAgATYCFCAAQf2NgIAANgIQIABBHTYCDEEAIRAM5AELIABBJzYCHCAAIAE2AhQgACAQNgIMQQAhEAzjAQsgECEBQQEhFAJAAkACQAJAAkACQAJAIAAtACxBfmoOBwYFBQMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0ErIRAMygELIABBADYCHCAAIBA2AhQgAEGrkoCAADYCECAAQQs2AgxBACEQDOIBCyAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMQQAhEAzhAQsgAEEAOgAsIBAhAQy9AQsgECEBQQEhFAJAAkACQAJAAkAgAC0ALEF7ag4EAwECAAULIAAgAC8BMEEIcjsBMAwDC0ECIRQMAQtBBCEUCyAAQQE6ACwgACAALwEwIBRyOwEwCyAQIQELQSkhEAzFAQsgAEEANgIcIAAgATYCFCAAQfCUgIAANgIQIABBAzYCDEEAIRAM3QELAkAgDi0AAEENRw0AIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHULIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzdAQsgAC0ALUEBcUUNAUHEASEQDMMBCwJAIA4gAkcNAEEtIRAM3AELAkACQANAAkAgDi0AAEF2ag4EAgAAAwALIA5BAWoiDiACRw0AC0EtIRAM3QELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDiEBDHQLIABBLDYCHCAAIA42AhQgACABNgIMQQAhEAzcAQsgACgCBCEBIABBADYCBAJAIAAgASAOELGAgIAAIgENACAOQQFqIQEMcwsgAEEsNgIcIAAgATYCDCAAIA5BAWo2AhRBACEQDNsBCyAAKAIEIQQgAEEANgIEIAAgBCAOELGAgIAAIgQNoAEgDiEBDM4BCyAQQSxHDQEgAUEBaiEQQQEhAQJAAkACQAJAAkAgAC0ALEF7ag4EAwECBAALIBAhAQwEC0ECIQEMAQtBBCEBCyAAQQE6ACwgACAALwEwIAFyOwEwIBAhAQwBCyAAIAAvATBBCHI7ATAgECEBC0E5IRAMvwELIABBADoALCABIQELQTQhEAy9AQsgACAALwEwQSByOwEwIAEhAQwCCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBA0AIAEhAQzHAQsgAEE3NgIcIAAgATYCFCAAIAQ2AgxBACEQDNQBCyAAQQg6ACwgASEBC0EwIRAMuQELAkAgAC0AKEEBRg0AIAEhAQwECyAALQAtQQhxRQ2TASABIQEMAwsgAC0AMEEgcQ2UAUHFASEQDLcBCwJAIA8gAkYNAAJAA0ACQCAPLQAAQVBqIgFB/wFxQQpJDQAgDyEBQTUhEAy6AQsgACkDICIRQpmz5syZs+bMGVYNASAAIBFCCn4iETcDICARIAGtQv8BgyISQn+FVg0BIAAgESASfDcDICAPQQFqIg8gAkcNAAtBOSEQDNEBCyAAKAIEIQIgAEEANgIEIAAgAiAPQQFqIgQQsYCAgAAiAg2VASAEIQEMwwELQTkhEAzPAQsCQCAALwEwIgFBCHFFDQAgAC0AKEEBRw0AIAAtAC1BCHFFDZABCyAAIAFB9/sDcUGABHI7ATAgDyEBC0E3IRAMtAELIAAgAC8BMEEQcjsBMAyrAQsgEEEVRg2LASAAQQA2AhwgACABNgIUIABB8I6AgAA2AhAgAEEcNgIMQQAhEAzLAQsgAEHDADYCHCAAIAE2AgwgACANQQFqNgIUQQAhEAzKAQsCQCABLQAAQTpHDQAgACgCBCEQIABBADYCBAJAIAAgECABEK+AgIAAIhANACABQQFqIQEMYwsgAEHDADYCHCAAIBA2AgwgACABQQFqNgIUQQAhEAzKAQsgAEEANgIcIAAgATYCFCAAQbGRgIAANgIQIABBCjYCDEEAIRAMyQELIABBADYCHCAAIAE2AhQgAEGgmYCAADYCECAAQR42AgxBACEQDMgBCyAAQQA2AgALIABBgBI7ASogACAXQQFqIgEgAhCogICAACIQDQEgASEBC0HHACEQDKwBCyAQQRVHDYMBIABB0QA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAzEAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAzDAQsgAEEANgIcIAAgFDYCFCAAQcGogIAANgIQIABBBzYCDCAAQQA2AgBBACEQDMIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxdCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDMEBC0EAIRAgAEEANgIcIAAgATYCFCAAQYCRgIAANgIQIABBCTYCDAzAAQsgEEEVRg19IABBADYCHCAAIAE2AhQgAEGUjYCAADYCECAAQSE2AgxBACEQDL8BC0EBIRZBACEXQQAhFEEBIRALIAAgEDoAKyABQQFqIQECQAJAIAAtAC1BEHENAAJAAkACQCAALQAqDgMBAAIECyAWRQ0DDAILIBQNAQwCCyAXRQ0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQrYCAgAAiEA0AIAEhAQxcCyAAQdgANgIcIAAgATYCFCAAIBA2AgxBACEQDL4BCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQytAQsgAEHZADYCHCAAIAE2AhQgACAENgIMQQAhEAy9AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMqwELIABB2gA2AhwgACABNgIUIAAgBDYCDEEAIRAMvAELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKkBCyAAQdwANgIcIAAgATYCFCAAIAQ2AgxBACEQDLsBCwJAIAEtAABBUGoiEEH/AXFBCk8NACAAIBA6ACogAUEBaiEBQc8AIRAMogELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKcBCyAAQd4ANgIcIAAgATYCFCAAIAQ2AgxBACEQDLoBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKUEjTw0AIAEhAQxZCyAAQQA2AhwgACABNgIUIABB04mAgAA2AhAgAEEINgIMQQAhEAy5AQsgAEEANgIAC0EAIRAgAEEANgIcIAAgATYCFCAAQZCzgIAANgIQIABBCDYCDAy3AQsgAEEANgIAIBdBAWohAQJAIAAtAClBIUcNACABIQEMVgsgAEEANgIcIAAgATYCFCAAQZuKgIAANgIQIABBCDYCDEEAIRAMtgELIABBADYCACAXQQFqIQECQCAALQApIhBBXWpBC08NACABIQEMVQsCQCAQQQZLDQBBASAQdEHKAHFFDQAgASEBDFULQQAhECAAQQA2AhwgACABNgIUIABB94mAgAA2AhAgAEEINgIMDLUBCyAQQRVGDXEgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMtAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFQLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMswELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMsgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMsQELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFELIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMsAELIABBADYCHCAAIAE2AhQgAEHGioCAADYCECAAQQc2AgxBACEQDK8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDK4BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDK0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDKwBCyAAQQA2AhwgACABNgIUIABB3IiAgAA2AhAgAEEHNgIMQQAhEAyrAQsgEEE/Rw0BIAFBAWohAQtBBSEQDJABC0EAIRAgAEEANgIcIAAgATYCFCAAQf2SgIAANgIQIABBBzYCDAyoAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAynAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAymAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMRgsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAylAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHSADYCHCAAIBQ2AhQgACABNgIMQQAhEAykAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHTADYCHCAAIBQ2AhQgACABNgIMQQAhEAyjAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMQwsgAEHlADYCHCAAIBQ2AhQgACABNgIMQQAhEAyiAQsgAEEANgIcIAAgFDYCFCAAQcOPgIAANgIQIABBBzYCDEEAIRAMoQELIABBADYCHCAAIAE2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKABC0EAIRAgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDAyfAQsgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDEEAIRAMngELIABBADYCHCAAIBQ2AhQgAEH+kYCAADYCECAAQQc2AgxBACEQDJ0BCyAAQQA2AhwgACABNgIUIABBjpuAgAA2AhAgAEEGNgIMQQAhEAycAQsgEEEVRg1XIABBADYCHCAAIAE2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDJsBCyAAQQA2AgAgEEEBaiEBQSQhEAsgACAQOgApIAAoAgQhECAAQQA2AgQgACAQIAEQq4CAgAAiEA1UIAEhAQw+CyAAQQA2AgALQQAhECAAQQA2AhwgACAENgIUIABB8ZuAgAA2AhAgAEEGNgIMDJcBCyABQRVGDVAgAEEANgIcIAAgBTYCFCAAQfCMgIAANgIQIABBGzYCDEEAIRAMlgELIAAoAgQhBSAAQQA2AgQgACAFIBAQqYCAgAAiBQ0BIBBBAWohBQtBrQEhEAx7CyAAQcEBNgIcIAAgBTYCDCAAIBBBAWo2AhRBACEQDJMBCyAAKAIEIQYgAEEANgIEIAAgBiAQEKmAgIAAIgYNASAQQQFqIQYLQa4BIRAMeAsgAEHCATYCHCAAIAY2AgwgACAQQQFqNgIUQQAhEAyQAQsgAEEANgIcIAAgBzYCFCAAQZeLgIAANgIQIABBDTYCDEEAIRAMjwELIABBADYCHCAAIAg2AhQgAEHjkICAADYCECAAQQk2AgxBACEQDI4BCyAAQQA2AhwgACAINgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAyNAQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgCUEBaiEIAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBCAAIBAgCBCtgICAACIQRQ09IABByQE2AhwgACAINgIUIAAgEDYCDEEAIRAMjAELIAAoAgQhBCAAQQA2AgQgACAEIAgQrYCAgAAiBEUNdiAAQcoBNgIcIAAgCDYCFCAAIAQ2AgxBACEQDIsBCyAAKAIEIQQgAEEANgIEIAAgBCAJEK2AgIAAIgRFDXQgAEHLATYCHCAAIAk2AhQgACAENgIMQQAhEAyKAQsgACgCBCEEIABBADYCBCAAIAQgChCtgICAACIERQ1yIABBzQE2AhwgACAKNgIUIAAgBDYCDEEAIRAMiQELAkAgCy0AAEFQaiIQQf8BcUEKTw0AIAAgEDoAKiALQQFqIQpBtgEhEAxwCyAAKAIEIQQgAEEANgIEIAAgBCALEK2AgIAAIgRFDXAgAEHPATYCHCAAIAs2AhQgACAENgIMQQAhEAyIAQsgAEEANgIcIAAgBDYCFCAAQZCzgIAANgIQIABBCDYCDCAAQQA2AgBBACEQDIcBCyABQRVGDT8gAEEANgIcIAAgDDYCFCAAQcyOgIAANgIQIABBIDYCDEEAIRAMhgELIABBgQQ7ASggACgCBCEQIABCADcDACAAIBAgDEEBaiIMEKuAgIAAIhBFDTggAEHTATYCHCAAIAw2AhQgACAQNgIMQQAhEAyFAQsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQdibgIAANgIQIABBCDYCDAyDAQsgACgCBCEQIABCADcDACAAIBAgC0EBaiILEKuAgIAAIhANAUHGASEQDGkLIABBAjoAKAxVCyAAQdUBNgIcIAAgCzYCFCAAIBA2AgxBACEQDIABCyAQQRVGDTcgAEEANgIcIAAgBDYCFCAAQaSMgIAANgIQIABBEDYCDEEAIRAMfwsgAC0ANEEBRw00IAAgBCACELyAgIAAIhBFDTQgEEEVRw01IABB3AE2AhwgACAENgIUIABB1ZaAgAA2AhAgAEEVNgIMQQAhEAx+C0EAIRAgAEEANgIcIABBr4uAgAA2AhAgAEECNgIMIAAgFEEBajYCFAx9C0EAIRAMYwtBAiEQDGILQQ0hEAxhC0EPIRAMYAtBJSEQDF8LQRMhEAxeC0EVIRAMXQtBFiEQDFwLQRchEAxbC0EYIRAMWgtBGSEQDFkLQRohEAxYC0EbIRAMVwtBHCEQDFYLQR0hEAxVC0EfIRAMVAtBISEQDFMLQSMhEAxSC0HGACEQDFELQS4hEAxQC0EvIRAMTwtBOyEQDE4LQT0hEAxNC0HIACEQDEwLQckAIRAMSwtBywAhEAxKC0HMACEQDEkLQc4AIRAMSAtB0QAhEAxHC0HVACEQDEYLQdgAIRAMRQtB2QAhEAxEC0HbACEQDEMLQeQAIRAMQgtB5QAhEAxBC0HxACEQDEALQfQAIRAMPwtBjQEhEAw+C0GXASEQDD0LQakBIRAMPAtBrAEhEAw7C0HAASEQDDoLQbkBIRAMOQtBrwEhEAw4C0GxASEQDDcLQbIBIRAMNgtBtAEhEAw1C0G1ASEQDDQLQboBIRAMMwtBvQEhEAwyC0G/ASEQDDELQcEBIRAMMAsgAEEANgIcIAAgBDYCFCAAQemLgIAANgIQIABBHzYCDEEAIRAMSAsgAEHbATYCHCAAIAQ2AhQgAEH6loCAADYCECAAQRU2AgxBACEQDEcLIABB+AA2AhwgACAMNgIUIABBypiAgAA2AhAgAEEVNgIMQQAhEAxGCyAAQdEANgIcIAAgBTYCFCAAQbCXgIAANgIQIABBFTYCDEEAIRAMRQsgAEH5ADYCHCAAIAE2AhQgACAQNgIMQQAhEAxECyAAQfgANgIcIAAgATYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMQwsgAEHkADYCHCAAIAE2AhQgAEHjl4CAADYCECAAQRU2AgxBACEQDEILIABB1wA2AhwgACABNgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAxBCyAAQQA2AhwgACABNgIUIABBuY2AgAA2AhAgAEEaNgIMQQAhEAxACyAAQcIANgIcIAAgATYCFCAAQeOYgIAANgIQIABBFTYCDEEAIRAMPwsgAEEANgIEIAAgDyAPELGAgIAAIgRFDQEgAEE6NgIcIAAgBDYCDCAAIA9BAWo2AhRBACEQDD4LIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCxgICAACIERQ0AIABBOzYCHCAAIAQ2AgwgACABQQFqNgIUQQAhEAw+CyABQQFqIQEMLQsgD0EBaiEBDC0LIABBADYCHCAAIA82AhQgAEHkkoCAADYCECAAQQQ2AgxBACEQDDsLIABBNjYCHCAAIAQ2AhQgACACNgIMQQAhEAw6CyAAQS42AhwgACAONgIUIAAgBDYCDEEAIRAMOQsgAEHQADYCHCAAIAE2AhQgAEGRmICAADYCECAAQRU2AgxBACEQDDgLIA1BAWohAQwsCyAAQRU2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAw2CyAAQRs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw1CyAAQQ82AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw0CyAAQQs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAwzCyAAQRo2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwyCyAAQQs2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwxCyAAQQo2AhwgACABNgIUIABB5JaAgAA2AhAgAEEVNgIMQQAhEAwwCyAAQR42AhwgACABNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAwvCyAAQQA2AhwgACAQNgIUIABB2o2AgAA2AhAgAEEUNgIMQQAhEAwuCyAAQQQ2AhwgACABNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAwtCyAAQQA2AgAgC0EBaiELC0G4ASEQDBILIABBADYCACAQQQFqIQFB9QAhEAwRCyABIQECQCAALQApQQVHDQBB4wAhEAwRC0HiACEQDBALQQAhECAAQQA2AhwgAEHkkYCAADYCECAAQQc2AgwgACAUQQFqNgIUDCgLIABBADYCACAXQQFqIQFBwAAhEAwOC0EBIQELIAAgAToALCAAQQA2AgAgF0EBaiEBC0EoIRAMCwsgASEBC0E4IRAMCQsCQCABIg8gAkYNAANAAkAgDy0AAEGAvoCAAGotAAAiAUEBRg0AIAFBAkcNAyAPQQFqIQEMBAsgD0EBaiIPIAJHDQALQT4hEAwiC0E+IRAMIQsgAEEAOgAsIA8hAQwBC0ELIRAMBgtBOiEQDAULIAFBAWohAUEtIRAMBAsgACABOgAsIABBADYCACAWQQFqIQFBDCEQDAMLIABBADYCACAXQQFqIQFBCiEQDAILIABBADYCAAsgAEEAOgAsIA0hAUEJIRAMAAsLQQAhECAAQQA2AhwgACALNgIUIABBzZCAgAA2AhAgAEEJNgIMDBcLQQAhECAAQQA2AhwgACAKNgIUIABB6YqAgAA2AhAgAEEJNgIMDBYLQQAhECAAQQA2AhwgACAJNgIUIABBt5CAgAA2AhAgAEEJNgIMDBULQQAhECAAQQA2AhwgACAINgIUIABBnJGAgAA2AhAgAEEJNgIMDBQLQQAhECAAQQA2AhwgACABNgIUIABBzZCAgAA2AhAgAEEJNgIMDBMLQQAhECAAQQA2AhwgACABNgIUIABB6YqAgAA2AhAgAEEJNgIMDBILQQAhECAAQQA2AhwgACABNgIUIABBt5CAgAA2AhAgAEEJNgIMDBELQQAhECAAQQA2AhwgACABNgIUIABBnJGAgAA2AhAgAEEJNgIMDBALQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA8LQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA4LQQAhECAAQQA2AhwgACABNgIUIABBwJKAgAA2AhAgAEELNgIMDA0LQQAhECAAQQA2AhwgACABNgIUIABBlYmAgAA2AhAgAEELNgIMDAwLQQAhECAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMDAsLQQAhECAAQQA2AhwgACABNgIUIABB+4+AgAA2AhAgAEEKNgIMDAoLQQAhECAAQQA2AhwgACABNgIUIABB8ZmAgAA2AhAgAEECNgIMDAkLQQAhECAAQQA2AhwgACABNgIUIABBxJSAgAA2AhAgAEECNgIMDAgLQQAhECAAQQA2AhwgACABNgIUIABB8pWAgAA2AhAgAEECNgIMDAcLIABBAjYCHCAAIAE2AhQgAEGcmoCAADYCECAAQRY2AgxBACEQDAYLQQEhEAwFC0HUACEQIAEiBCACRg0EIANBCGogACAEIAJB2MKAgABBChDFgICAACADKAIMIQQgAygCCA4DAQQCAAsQyoCAgAAACyAAQQA2AhwgAEG1moCAADYCECAAQRc2AgwgACAEQQFqNgIUQQAhEAwCCyAAQQA2AhwgACAENgIUIABBypqAgAA2AhAgAEEJNgIMQQAhEAwBCwJAIAEiBCACRw0AQSIhEAwBCyAAQYmAgIAANgIIIAAgBDYCBEEhIRALIANBEGokgICAgAAgEAuvAQECfyABKAIAIQYCQAJAIAIgA0YNACAEIAZqIQQgBiADaiACayEHIAIgBkF/cyAFaiIGaiEFA0ACQCACLQAAIAQtAABGDQBBAiEEDAMLAkAgBg0AQQAhBCAFIQIMAwsgBkF/aiEGIARBAWohBCACQQFqIgIgA0cNAAsgByEGIAMhAgsgAEEBNgIAIAEgBjYCACAAIAI2AgQPCyABQQA2AgAgACAENgIAIAAgAjYCBAsKACAAEMeAgIAAC/I2AQt/I4CAgIAAQRBrIgEkgICAgAACQEEAKAKg0ICAAA0AQQAQy4CAgABBgNSEgABrIgJB2QBJDQBBACEDAkBBACgC4NOAgAAiBA0AQQBCfzcC7NOAgABBAEKAgISAgIDAADcC5NOAgABBACABQQhqQXBxQdiq1aoFcyIENgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgAALQQAgAjYCzNOAgABBAEGA1ISAADYCyNOAgABBAEGA1ISAADYCmNCAgABBACAENgKs0ICAAEEAQX82AqjQgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAtBgNSEgABBeEGA1ISAAGtBD3FBAEGA1ISAAEEIakEPcRsiA2oiBEEEaiACQUhqIgUgA2siA0EBcjYCAEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgABBgNSEgAAgBWpBODYCBAsCQAJAAkACQAJAAkACQAJAAkACQAJAAkAgAEHsAUsNAAJAQQAoAojQgIAAIgZBECAAQRNqQXBxIABBC0kbIgJBA3YiBHYiA0EDcUUNAAJAAkAgA0EBcSAEckEBcyIFQQN0IgRBsNCAgABqIgMgBEG40ICAAGooAgAiBCgCCCICRw0AQQAgBkF+IAV3cTYCiNCAgAAMAQsgAyACNgIIIAIgAzYCDAsgBEEIaiEDIAQgBUEDdCIFQQNyNgIEIAQgBWoiBCAEKAIEQQFyNgIEDAwLIAJBACgCkNCAgAAiB00NAQJAIANFDQACQAJAIAMgBHRBAiAEdCIDQQAgA2tycSIDQQAgA2txQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmoiBEEDdCIDQbDQgIAAaiIFIANBuNCAgABqKAIAIgMoAggiAEcNAEEAIAZBfiAEd3EiBjYCiNCAgAAMAQsgBSAANgIIIAAgBTYCDAsgAyACQQNyNgIEIAMgBEEDdCIEaiAEIAJrIgU2AgAgAyACaiIAIAVBAXI2AgQCQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhBAJAAkAgBkEBIAdBA3Z0IghxDQBBACAGIAhyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAQ2AgwgAiAENgIIIAQgAjYCDCAEIAg2AggLIANBCGohA0EAIAA2ApzQgIAAQQAgBTYCkNCAgAAMDAtBACgCjNCAgAAiCUUNASAJQQAgCWtxQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmpBAnRBuNKAgABqKAIAIgAoAgRBeHEgAmshBCAAIQUCQANAAkAgBSgCECIDDQAgBUEUaigCACIDRQ0CCyADKAIEQXhxIAJrIgUgBCAFIARJIgUbIQQgAyAAIAUbIQAgAyEFDAALCyAAKAIYIQoCQCAAKAIMIgggAEYNACAAKAIIIgNBACgCmNCAgABJGiAIIAM2AgggAyAINgIMDAsLAkAgAEEUaiIFKAIAIgMNACAAKAIQIgNFDQMgAEEQaiEFCwNAIAUhCyADIghBFGoiBSgCACIDDQAgCEEQaiEFIAgoAhAiAw0ACyALQQA2AgAMCgtBfyECIABBv39LDQAgAEETaiIDQXBxIQJBACgCjNCAgAAiB0UNAEEAIQsCQCACQYACSQ0AQR8hCyACQf///wdLDQAgA0EIdiIDIANBgP4/akEQdkEIcSIDdCIEIARBgOAfakEQdkEEcSIEdCIFIAVBgIAPakEQdkECcSIFdEEPdiADIARyIAVyayIDQQF0IAIgA0EVanZBAXFyQRxqIQsLQQAgAmshBAJAAkACQAJAIAtBAnRBuNKAgABqKAIAIgUNAEEAIQNBACEIDAELQQAhAyACQQBBGSALQQF2ayALQR9GG3QhAEEAIQgDQAJAIAUoAgRBeHEgAmsiBiAETw0AIAYhBCAFIQggBg0AQQAhBCAFIQggBSEDDAMLIAMgBUEUaigCACIGIAYgBSAAQR12QQRxakEQaigCACIFRhsgAyAGGyEDIABBAXQhACAFDQALCwJAIAMgCHINAEEAIQhBAiALdCIDQQAgA2tyIAdxIgNFDQMgA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBUEFdkEIcSIAIANyIAUgAHYiA0ECdkEEcSIFciADIAV2IgNBAXZBAnEiBXIgAyAFdiIDQQF2QQFxIgVyIAMgBXZqQQJ0QbjSgIAAaigCACEDCyADRQ0BCwNAIAMoAgRBeHEgAmsiBiAESSEAAkAgAygCECIFDQAgA0EUaigCACEFCyAGIAQgABshBCADIAggABshCCAFIQMgBQ0ACwsgCEUNACAEQQAoApDQgIAAIAJrTw0AIAgoAhghCwJAIAgoAgwiACAIRg0AIAgoAggiA0EAKAKY0ICAAEkaIAAgAzYCCCADIAA2AgwMCQsCQCAIQRRqIgUoAgAiAw0AIAgoAhAiA0UNAyAIQRBqIQULA0AgBSEGIAMiAEEUaiIFKAIAIgMNACAAQRBqIQUgACgCECIDDQALIAZBADYCAAwICwJAQQAoApDQgIAAIgMgAkkNAEEAKAKc0ICAACEEAkACQCADIAJrIgVBEEkNACAEIAJqIgAgBUEBcjYCBEEAIAU2ApDQgIAAQQAgADYCnNCAgAAgBCADaiAFNgIAIAQgAkEDcjYCBAwBCyAEIANBA3I2AgQgBCADaiIDIAMoAgRBAXI2AgRBAEEANgKc0ICAAEEAQQA2ApDQgIAACyAEQQhqIQMMCgsCQEEAKAKU0ICAACIAIAJNDQBBACgCoNCAgAAiAyACaiIEIAAgAmsiBUEBcjYCBEEAIAU2ApTQgIAAQQAgBDYCoNCAgAAgAyACQQNyNgIEIANBCGohAwwKCwJAAkBBACgC4NOAgABFDQBBACgC6NOAgAAhBAwBC0EAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEMakFwcUHYqtWqBXM2AuDTgIAAQQBBADYC9NOAgABBAEEANgLE04CAAEGAgAQhBAtBACEDAkAgBCACQccAaiIHaiIGQQAgBGsiC3EiCCACSw0AQQBBMDYC+NOAgAAMCgsCQEEAKALA04CAACIDRQ0AAkBBACgCuNOAgAAiBCAIaiIFIARNDQAgBSADTQ0BC0EAIQNBAEEwNgL404CAAAwKC0EALQDE04CAAEEEcQ0EAkACQAJAQQAoAqDQgIAAIgRFDQBByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiAESw0DCyADKAIIIgMNAAsLQQAQy4CAgAAiAEF/Rg0FIAghBgJAQQAoAuTTgIAAIgNBf2oiBCAAcUUNACAIIABrIAQgAGpBACADa3FqIQYLIAYgAk0NBSAGQf7///8HSw0FAkBBACgCwNOAgAAiA0UNAEEAKAK404CAACIEIAZqIgUgBE0NBiAFIANLDQYLIAYQy4CAgAAiAyAARw0BDAcLIAYgAGsgC3EiBkH+////B0sNBCAGEMuAgIAAIgAgAygCACADKAIEakYNAyAAIQMLAkAgA0F/Rg0AIAJByABqIAZNDQACQCAHIAZrQQAoAujTgIAAIgRqQQAgBGtxIgRB/v///wdNDQAgAyEADAcLAkAgBBDLgICAAEF/Rg0AIAQgBmohBiADIQAMBwtBACAGaxDLgICAABoMBAsgAyEAIANBf0cNBQwDC0EAIQgMBwtBACEADAULIABBf0cNAgtBAEEAKALE04CAAEEEcjYCxNOAgAALIAhB/v///wdLDQEgCBDLgICAACEAQQAQy4CAgAAhAyAAQX9GDQEgA0F/Rg0BIAAgA08NASADIABrIgYgAkE4ak0NAQtBAEEAKAK404CAACAGaiIDNgK404CAAAJAIANBACgCvNOAgABNDQBBACADNgK804CAAAsCQAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQCAAIAMoAgAiBSADKAIEIghqRg0CIAMoAggiAw0ADAMLCwJAAkBBACgCmNCAgAAiA0UNACAAIANPDQELQQAgADYCmNCAgAALQQAhA0EAIAY2AszTgIAAQQAgADYCyNOAgABBAEF/NgKo0ICAAEEAQQAoAuDTgIAANgKs0ICAAEEAQQA2AtTTgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiBCAGQUhqIgUgA2siA0EBcjYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgAAgACAFakE4NgIEDAILIAMtAAxBCHENACAEIAVJDQAgBCAATw0AIARBeCAEa0EPcUEAIARBCGpBD3EbIgVqIgBBACgClNCAgAAgBmoiCyAFayIFQQFyNgIEIAMgCCAGajYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAU2ApTQgIAAQQAgADYCoNCAgAAgBCALakE4NgIEDAELAkAgAEEAKAKY0ICAACIITw0AQQAgADYCmNCAgAAgACEICyAAIAZqIQVByNOAgAAhAwJAAkACQAJAAkACQAJAA0AgAygCACAFRg0BIAMoAggiAw0ADAILCyADLQAMQQhxRQ0BC0HI04CAACEDA0ACQCADKAIAIgUgBEsNACAFIAMoAgRqIgUgBEsNAwsgAygCCCEDDAALCyADIAA2AgAgAyADKAIEIAZqNgIEIABBeCAAa0EPcUEAIABBCGpBD3EbaiILIAJBA3I2AgQgBUF4IAVrQQ9xQQAgBUEIakEPcRtqIgYgCyACaiICayEDAkAgBiAERw0AQQAgAjYCoNCAgABBAEEAKAKU0ICAACADaiIDNgKU0ICAACACIANBAXI2AgQMAwsCQCAGQQAoApzQgIAARw0AQQAgAjYCnNCAgABBAEEAKAKQ0ICAACADaiIDNgKQ0ICAACACIANBAXI2AgQgAiADaiADNgIADAMLAkAgBigCBCIEQQNxQQFHDQAgBEF4cSEHAkACQCAEQf8BSw0AIAYoAggiBSAEQQN2IghBA3RBsNCAgABqIgBGGgJAIAYoAgwiBCAFRw0AQQBBACgCiNCAgABBfiAId3E2AojQgIAADAILIAQgAEYaIAQgBTYCCCAFIAQ2AgwMAQsgBigCGCEJAkACQCAGKAIMIgAgBkYNACAGKAIIIgQgCEkaIAAgBDYCCCAEIAA2AgwMAQsCQCAGQRRqIgQoAgAiBQ0AIAZBEGoiBCgCACIFDQBBACEADAELA0AgBCEIIAUiAEEUaiIEKAIAIgUNACAAQRBqIQQgACgCECIFDQALIAhBADYCAAsgCUUNAAJAAkAgBiAGKAIcIgVBAnRBuNKAgABqIgQoAgBHDQAgBCAANgIAIAANAUEAQQAoAozQgIAAQX4gBXdxNgKM0ICAAAwCCyAJQRBBFCAJKAIQIAZGG2ogADYCACAARQ0BCyAAIAk2AhgCQCAGKAIQIgRFDQAgACAENgIQIAQgADYCGAsgBigCFCIERQ0AIABBFGogBDYCACAEIAA2AhgLIAcgA2ohAyAGIAdqIgYoAgQhBAsgBiAEQX5xNgIEIAIgA2ogAzYCACACIANBAXI2AgQCQCADQf8BSw0AIANBeHFBsNCAgABqIQQCQAJAQQAoAojQgIAAIgVBASADQQN2dCIDcQ0AQQAgBSADcjYCiNCAgAAgBCEDDAELIAQoAgghAwsgAyACNgIMIAQgAjYCCCACIAQ2AgwgAiADNgIIDAMLQR8hBAJAIANB////B0sNACADQQh2IgQgBEGA/j9qQRB2QQhxIgR0IgUgBUGA4B9qQRB2QQRxIgV0IgAgAEGAgA9qQRB2QQJxIgB0QQ92IAQgBXIgAHJrIgRBAXQgAyAEQRVqdkEBcXJBHGohBAsgAiAENgIcIAJCADcCECAEQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiAEEBIAR0IghxDQAgBSACNgIAQQAgACAIcjYCjNCAgAAgAiAFNgIYIAIgAjYCCCACIAI2AgwMAwsgA0EAQRkgBEEBdmsgBEEfRht0IQQgBSgCACEAA0AgACIFKAIEQXhxIANGDQIgBEEddiEAIARBAXQhBCAFIABBBHFqQRBqIggoAgAiAA0ACyAIIAI2AgAgAiAFNgIYIAIgAjYCDCACIAI2AggMAgsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiCyAGQUhqIgggA2siA0EBcjYCBCAAIAhqQTg2AgQgBCAFQTcgBWtBD3FBACAFQUlqQQ9xG2pBQWoiCCAIIARBEGpJGyIIQSM2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAs2AqDQgIAAIAhBEGpBACkC0NOAgAA3AgAgCEEAKQLI04CAADcCCEEAIAhBCGo2AtDTgIAAQQAgBjYCzNOAgABBACAANgLI04CAAEEAQQA2AtTTgIAAIAhBJGohAwNAIANBBzYCACADQQRqIgMgBUkNAAsgCCAERg0DIAggCCgCBEF+cTYCBCAIIAggBGsiADYCACAEIABBAXI2AgQCQCAAQf8BSw0AIABBeHFBsNCAgABqIQMCQAJAQQAoAojQgIAAIgVBASAAQQN2dCIAcQ0AQQAgBSAAcjYCiNCAgAAgAyEFDAELIAMoAgghBQsgBSAENgIMIAMgBDYCCCAEIAM2AgwgBCAFNgIIDAQLQR8hAwJAIABB////B0sNACAAQQh2IgMgA0GA/j9qQRB2QQhxIgN0IgUgBUGA4B9qQRB2QQRxIgV0IgggCEGAgA9qQRB2QQJxIgh0QQ92IAMgBXIgCHJrIgNBAXQgACADQRVqdkEBcXJBHGohAwsgBCADNgIcIARCADcCECADQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiCEEBIAN0IgZxDQAgBSAENgIAQQAgCCAGcjYCjNCAgAAgBCAFNgIYIAQgBDYCCCAEIAQ2AgwMBAsgAEEAQRkgA0EBdmsgA0EfRht0IQMgBSgCACEIA0AgCCIFKAIEQXhxIABGDQMgA0EddiEIIANBAXQhAyAFIAhBBHFqQRBqIgYoAgAiCA0ACyAGIAQ2AgAgBCAFNgIYIAQgBDYCDCAEIAQ2AggMAwsgBSgCCCIDIAI2AgwgBSACNgIIIAJBADYCGCACIAU2AgwgAiADNgIICyALQQhqIQMMBQsgBSgCCCIDIAQ2AgwgBSAENgIIIARBADYCGCAEIAU2AgwgBCADNgIIC0EAKAKU0ICAACIDIAJNDQBBACgCoNCAgAAiBCACaiIFIAMgAmsiA0EBcjYCBEEAIAM2ApTQgIAAQQAgBTYCoNCAgAAgBCACQQNyNgIEIARBCGohAwwDC0EAIQNBAEEwNgL404CAAAwCCwJAIAtFDQACQAJAIAggCCgCHCIFQQJ0QbjSgIAAaiIDKAIARw0AIAMgADYCACAADQFBACAHQX4gBXdxIgc2AozQgIAADAILIAtBEEEUIAsoAhAgCEYbaiAANgIAIABFDQELIAAgCzYCGAJAIAgoAhAiA0UNACAAIAM2AhAgAyAANgIYCyAIQRRqKAIAIgNFDQAgAEEUaiADNgIAIAMgADYCGAsCQAJAIARBD0sNACAIIAQgAmoiA0EDcjYCBCAIIANqIgMgAygCBEEBcjYCBAwBCyAIIAJqIgAgBEEBcjYCBCAIIAJBA3I2AgQgACAEaiAENgIAAkAgBEH/AUsNACAEQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgBEEDdnQiBHENAEEAIAUgBHI2AojQgIAAIAMhBAwBCyADKAIIIQQLIAQgADYCDCADIAA2AgggACADNgIMIAAgBDYCCAwBC0EfIQMCQCAEQf///wdLDQAgBEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCICIAJBgIAPakEQdkECcSICdEEPdiADIAVyIAJyayIDQQF0IAQgA0EVanZBAXFyQRxqIQMLIAAgAzYCHCAAQgA3AhAgA0ECdEG40oCAAGohBQJAIAdBASADdCICcQ0AIAUgADYCAEEAIAcgAnI2AozQgIAAIAAgBTYCGCAAIAA2AgggACAANgIMDAELIARBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhAgJAA0AgAiIFKAIEQXhxIARGDQEgA0EddiECIANBAXQhAyAFIAJBBHFqQRBqIgYoAgAiAg0ACyAGIAA2AgAgACAFNgIYIAAgADYCDCAAIAA2AggMAQsgBSgCCCIDIAA2AgwgBSAANgIIIABBADYCGCAAIAU2AgwgACADNgIICyAIQQhqIQMMAQsCQCAKRQ0AAkACQCAAIAAoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAg2AgAgCA0BQQAgCUF+IAV3cTYCjNCAgAAMAgsgCkEQQRQgCigCECAARhtqIAg2AgAgCEUNAQsgCCAKNgIYAkAgACgCECIDRQ0AIAggAzYCECADIAg2AhgLIABBFGooAgAiA0UNACAIQRRqIAM2AgAgAyAINgIYCwJAAkAgBEEPSw0AIAAgBCACaiIDQQNyNgIEIAAgA2oiAyADKAIEQQFyNgIEDAELIAAgAmoiBSAEQQFyNgIEIAAgAkEDcjYCBCAFIARqIAQ2AgACQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhAwJAAkBBASAHQQN2dCIIIAZxDQBBACAIIAZyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAM2AgwgAiADNgIIIAMgAjYCDCADIAg2AggLQQAgBTYCnNCAgABBACAENgKQ0ICAAAsgAEEIaiEDCyABQRBqJICAgIAAIAMLCgAgABDJgICAAAviDQEHfwJAIABFDQAgAEF4aiIBIABBfGooAgAiAkF4cSIAaiEDAkAgAkEBcQ0AIAJBA3FFDQEgASABKAIAIgJrIgFBACgCmNCAgAAiBEkNASACIABqIQACQCABQQAoApzQgIAARg0AAkAgAkH/AUsNACABKAIIIgQgAkEDdiIFQQN0QbDQgIAAaiIGRhoCQCABKAIMIgIgBEcNAEEAQQAoAojQgIAAQX4gBXdxNgKI0ICAAAwDCyACIAZGGiACIAQ2AgggBCACNgIMDAILIAEoAhghBwJAAkAgASgCDCIGIAFGDQAgASgCCCICIARJGiAGIAI2AgggAiAGNgIMDAELAkAgAUEUaiICKAIAIgQNACABQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQECQAJAIAEgASgCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAwsgB0EQQRQgBygCECABRhtqIAY2AgAgBkUNAgsgBiAHNgIYAkAgASgCECICRQ0AIAYgAjYCECACIAY2AhgLIAEoAhQiAkUNASAGQRRqIAI2AgAgAiAGNgIYDAELIAMoAgQiAkEDcUEDRw0AIAMgAkF+cTYCBEEAIAA2ApDQgIAAIAEgAGogADYCACABIABBAXI2AgQPCyABIANPDQAgAygCBCICQQFxRQ0AAkACQCACQQJxDQACQCADQQAoAqDQgIAARw0AQQAgATYCoNCAgABBAEEAKAKU0ICAACAAaiIANgKU0ICAACABIABBAXI2AgQgAUEAKAKc0ICAAEcNA0EAQQA2ApDQgIAAQQBBADYCnNCAgAAPCwJAIANBACgCnNCAgABHDQBBACABNgKc0ICAAEEAQQAoApDQgIAAIABqIgA2ApDQgIAAIAEgAEEBcjYCBCABIABqIAA2AgAPCyACQXhxIABqIQACQAJAIAJB/wFLDQAgAygCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgAygCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAgsgAiAGRhogAiAENgIIIAQgAjYCDAwBCyADKAIYIQcCQAJAIAMoAgwiBiADRg0AIAMoAggiAkEAKAKY0ICAAEkaIAYgAjYCCCACIAY2AgwMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEGDAELA0AgAiEFIAQiBkEUaiICKAIAIgQNACAGQRBqIQIgBigCECIEDQALIAVBADYCAAsgB0UNAAJAAkAgAyADKAIcIgRBAnRBuNKAgABqIgIoAgBHDQAgAiAGNgIAIAYNAUEAQQAoAozQgIAAQX4gBHdxNgKM0ICAAAwCCyAHQRBBFCAHKAIQIANGG2ogBjYCACAGRQ0BCyAGIAc2AhgCQCADKAIQIgJFDQAgBiACNgIQIAIgBjYCGAsgAygCFCICRQ0AIAZBFGogAjYCACACIAY2AhgLIAEgAGogADYCACABIABBAXI2AgQgAUEAKAKc0ICAAEcNAUEAIAA2ApDQgIAADwsgAyACQX5xNgIEIAEgAGogADYCACABIABBAXI2AgQLAkAgAEH/AUsNACAAQXhxQbDQgIAAaiECAkACQEEAKAKI0ICAACIEQQEgAEEDdnQiAHENAEEAIAQgAHI2AojQgIAAIAIhAAwBCyACKAIIIQALIAAgATYCDCACIAE2AgggASACNgIMIAEgADYCCA8LQR8hAgJAIABB////B0sNACAAQQh2IgIgAkGA/j9qQRB2QQhxIgJ0IgQgBEGA4B9qQRB2QQRxIgR0IgYgBkGAgA9qQRB2QQJxIgZ0QQ92IAIgBHIgBnJrIgJBAXQgACACQRVqdkEBcXJBHGohAgsgASACNgIcIAFCADcCECACQQJ0QbjSgIAAaiEEAkACQEEAKAKM0ICAACIGQQEgAnQiA3ENACAEIAE2AgBBACAGIANyNgKM0ICAACABIAQ2AhggASABNgIIIAEgATYCDAwBCyAAQQBBGSACQQF2ayACQR9GG3QhAiAEKAIAIQYCQANAIAYiBCgCBEF4cSAARg0BIAJBHXYhBiACQQF0IQIgBCAGQQRxakEQaiIDKAIAIgYNAAsgAyABNgIAIAEgBDYCGCABIAE2AgwgASABNgIIDAELIAQoAggiACABNgIMIAQgATYCCCABQQA2AhggASAENgIMIAEgADYCCAtBAEEAKAKo0ICAAEF/aiIBQX8gARs2AqjQgIAACwsEAAAAC04AAkAgAA0APwBBEHQPCwJAIABB//8DcQ0AIABBf0wNAAJAIABBEHZAACIAQX9HDQBBAEEwNgL404CAAEF/DwsgAEEQdA8LEMqAgIAAAAvyAgIDfwF+AkAgAkUNACAAIAE6AAAgAiAAaiIDQX9qIAE6AAAgAkEDSQ0AIAAgAToAAiAAIAE6AAEgA0F9aiABOgAAIANBfmogAToAACACQQdJDQAgACABOgADIANBfGogAToAACACQQlJDQAgAEEAIABrQQNxIgRqIgMgAUH/AXFBgYKECGwiATYCACADIAIgBGtBfHEiBGoiAkF8aiABNgIAIARBCUkNACADIAE2AgggAyABNgIEIAJBeGogATYCACACQXRqIAE2AgAgBEEZSQ0AIAMgATYCGCADIAE2AhQgAyABNgIQIAMgATYCDCACQXBqIAE2AgAgAkFsaiABNgIAIAJBaGogATYCACACQWRqIAE2AgAgBCADQQRxQRhyIgVrIgJBIEkNACABrUKBgICAEH4hBiADIAVqIQEDQCABIAY3AxggASAGNwMQIAEgBjcDCCABIAY3AwAgAUEgaiEBIAJBYGoiAkEfSw0ACwsgAAsLjkgBAEGACAuGSAEAAAACAAAAAwAAAAAAAAAAAAAABAAAAAUAAAAAAAAAAAAAAAYAAAAHAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASW52YWxpZCBjaGFyIGluIHVybCBxdWVyeQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2JvZHkAQ29udGVudC1MZW5ndGggb3ZlcmZsb3cAQ2h1bmsgc2l6ZSBvdmVyZmxvdwBSZXNwb25zZSBvdmVyZmxvdwBJbnZhbGlkIG1ldGhvZCBmb3IgSFRUUC94LnggcmVxdWVzdABJbnZhbGlkIG1ldGhvZCBmb3IgUlRTUC94LnggcmVxdWVzdABFeHBlY3RlZCBTT1VSQ0UgbWV0aG9kIGZvciBJQ0UveC54IHJlcXVlc3QASW52YWxpZCBjaGFyIGluIHVybCBmcmFnbWVudCBzdGFydABFeHBlY3RlZCBkb3QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9zdGF0dXMASW52YWxpZCByZXNwb25zZSBzdGF0dXMASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucwBVc2VyIGNhbGxiYWNrIGVycm9yAGBvbl9yZXNldGAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2hlYWRlcmAgY2FsbGJhY2sgZXJyb3IAYG9uX21lc3NhZ2VfYmVnaW5gIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19leHRlbnNpb25fdmFsdWVgIGNhbGxiYWNrIGVycm9yAGBvbl9zdGF0dXNfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl92ZXJzaW9uX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdXJsX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWV0aG9kX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX25hbWVgIGNhbGxiYWNrIGVycm9yAFVuZXhwZWN0ZWQgY2hhciBpbiB1cmwgc2VydmVyAEludmFsaWQgaGVhZGVyIHZhbHVlIGNoYXIASW52YWxpZCBoZWFkZXIgZmllbGQgY2hhcgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3ZlcnNpb24ASW52YWxpZCBtaW5vciB2ZXJzaW9uAEludmFsaWQgbWFqb3IgdmVyc2lvbgBFeHBlY3RlZCBzcGFjZSBhZnRlciB2ZXJzaW9uAEV4cGVjdGVkIENSTEYgYWZ0ZXIgdmVyc2lvbgBJbnZhbGlkIEhUVFAgdmVyc2lvbgBJbnZhbGlkIGhlYWRlciB0b2tlbgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3VybABJbnZhbGlkIGNoYXJhY3RlcnMgaW4gdXJsAFVuZXhwZWN0ZWQgc3RhcnQgY2hhciBpbiB1cmwARG91YmxlIEAgaW4gdXJsAEVtcHR5IENvbnRlbnQtTGVuZ3RoAEludmFsaWQgY2hhcmFjdGVyIGluIENvbnRlbnQtTGVuZ3RoAER1cGxpY2F0ZSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXIgaW4gdXJsIHBhdGgAQ29udGVudC1MZW5ndGggY2FuJ3QgYmUgcHJlc2VudCB3aXRoIFRyYW5zZmVyLUVuY29kaW5nAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIHNpemUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfdmFsdWUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyB2YWx1ZQBNaXNzaW5nIGV4cGVjdGVkIExGIGFmdGVyIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AgaGVhZGVyIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGUgdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyBxdW90ZWQgdmFsdWUAUGF1c2VkIGJ5IG9uX2hlYWRlcnNfY29tcGxldGUASW52YWxpZCBFT0Ygc3RhdGUAb25fcmVzZXQgcGF1c2UAb25fY2h1bmtfaGVhZGVyIHBhdXNlAG9uX21lc3NhZ2VfYmVnaW4gcGF1c2UAb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlIHBhdXNlAG9uX3N0YXR1c19jb21wbGV0ZSBwYXVzZQBvbl92ZXJzaW9uX2NvbXBsZXRlIHBhdXNlAG9uX3VybF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19jb21wbGV0ZSBwYXVzZQBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGUgcGF1c2UAb25fbWVzc2FnZV9jb21wbGV0ZSBwYXVzZQBvbl9tZXRob2RfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lIHBhdXNlAFVuZXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgc3RhcnQgbGluZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgbmFtZQBQYXVzZSBvbiBDT05ORUNUL1VwZ3JhZGUAUGF1c2Ugb24gUFJJL1VwZ3JhZGUARXhwZWN0ZWQgSFRUUC8yIENvbm5lY3Rpb24gUHJlZmFjZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX21ldGhvZABFeHBlY3RlZCBzcGFjZSBhZnRlciBtZXRob2QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfZmllbGQAUGF1c2VkAEludmFsaWQgd29yZCBlbmNvdW50ZXJlZABJbnZhbGlkIG1ldGhvZCBlbmNvdW50ZXJlZABVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNjaGVtYQBSZXF1ZXN0IGhhcyBpbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AAU1dJVENIX1BST1hZAFVTRV9QUk9YWQBNS0FDVElWSVRZAFVOUFJPQ0VTU0FCTEVfRU5USVRZAENPUFkATU9WRURfUEVSTUFORU5UTFkAVE9PX0VBUkxZAE5PVElGWQBGQUlMRURfREVQRU5ERU5DWQBCQURfR0FURVdBWQBQTEFZAFBVVABDSEVDS09VVABHQVRFV0FZX1RJTUVPVVQAUkVRVUVTVF9USU1FT1VUAE5FVFdPUktfQ09OTkVDVF9USU1FT1VUAENPTk5FQ1RJT05fVElNRU9VVABMT0dJTl9USU1FT1VUAE5FVFdPUktfUkVBRF9USU1FT1VUAFBPU1QATUlTRElSRUNURURfUkVRVUVTVABDTElFTlRfQ0xPU0VEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9MT0FEX0JBTEFOQ0VEX1JFUVVFU1QAQkFEX1JFUVVFU1QASFRUUF9SRVFVRVNUX1NFTlRfVE9fSFRUUFNfUE9SVABSRVBPUlQASU1fQV9URUFQT1QAUkVTRVRfQ09OVEVOVABOT19DT05URU5UAFBBUlRJQUxfQ09OVEVOVABIUEVfSU5WQUxJRF9DT05TVEFOVABIUEVfQ0JfUkVTRVQAR0VUAEhQRV9TVFJJQ1QAQ09ORkxJQ1QAVEVNUE9SQVJZX1JFRElSRUNUAFBFUk1BTkVOVF9SRURJUkVDVABDT05ORUNUAE1VTFRJX1NUQVRVUwBIUEVfSU5WQUxJRF9TVEFUVVMAVE9PX01BTllfUkVRVUVTVFMARUFSTFlfSElOVFMAVU5BVkFJTEFCTEVfRk9SX0xFR0FMX1JFQVNPTlMAT1BUSU9OUwBTV0lUQ0hJTkdfUFJPVE9DT0xTAFZBUklBTlRfQUxTT19ORUdPVElBVEVTAE1VTFRJUExFX0NIT0lDRVMASU5URVJOQUxfU0VSVkVSX0VSUk9SAFdFQl9TRVJWRVJfVU5LTk9XTl9FUlJPUgBSQUlMR1VOX0VSUk9SAElERU5USVRZX1BST1ZJREVSX0FVVEhFTlRJQ0FUSU9OX0VSUk9SAFNTTF9DRVJUSUZJQ0FURV9FUlJPUgBJTlZBTElEX1hfRk9SV0FSREVEX0ZPUgBTRVRfUEFSQU1FVEVSAEdFVF9QQVJBTUVURVIASFBFX1VTRVIAU0VFX09USEVSAEhQRV9DQl9DSFVOS19IRUFERVIATUtDQUxFTkRBUgBTRVRVUABXRUJfU0VSVkVSX0lTX0RPV04AVEVBUkRPV04ASFBFX0NMT1NFRF9DT05ORUNUSU9OAEhFVVJJU1RJQ19FWFBJUkFUSU9OAERJU0NPTk5FQ1RFRF9PUEVSQVRJT04ATk9OX0FVVEhPUklUQVRJVkVfSU5GT1JNQVRJT04ASFBFX0lOVkFMSURfVkVSU0lPTgBIUEVfQ0JfTUVTU0FHRV9CRUdJTgBTSVRFX0lTX0ZST1pFTgBIUEVfSU5WQUxJRF9IRUFERVJfVE9LRU4ASU5WQUxJRF9UT0tFTgBGT1JCSURERU4ARU5IQU5DRV9ZT1VSX0NBTE0ASFBFX0lOVkFMSURfVVJMAEJMT0NLRURfQllfUEFSRU5UQUxfQ09OVFJPTABNS0NPTABBQ0wASFBFX0lOVEVSTkFMAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0VfVU5PRkZJQ0lBTABIUEVfT0sAVU5MSU5LAFVOTE9DSwBQUkkAUkVUUllfV0lUSABIUEVfSU5WQUxJRF9DT05URU5UX0xFTkdUSABIUEVfVU5FWFBFQ1RFRF9DT05URU5UX0xFTkdUSABGTFVTSABQUk9QUEFUQ0gATS1TRUFSQ0gAVVJJX1RPT19MT05HAFBST0NFU1NJTkcATUlTQ0VMTEFORU9VU19QRVJTSVNURU5UX1dBUk5JTkcATUlTQ0VMTEFORU9VU19XQVJOSU5HAEhQRV9JTlZBTElEX1RSQU5TRkVSX0VOQ09ESU5HAEV4cGVjdGVkIENSTEYASFBFX0lOVkFMSURfQ0hVTktfU0laRQBNT1ZFAENPTlRJTlVFAEhQRV9DQl9TVEFUVVNfQ09NUExFVEUASFBFX0NCX0hFQURFUlNfQ09NUExFVEUASFBFX0NCX1ZFUlNJT05fQ09NUExFVEUASFBFX0NCX1VSTF9DT01QTEVURQBIUEVfQ0JfQ0hVTktfQ09NUExFVEUASFBFX0NCX0hFQURFUl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX1ZBTFVFX0NPTVBMRVRFAEhQRV9DQl9DSFVOS19FWFRFTlNJT05fTkFNRV9DT01QTEVURQBIUEVfQ0JfTUVTU0FHRV9DT01QTEVURQBIUEVfQ0JfTUVUSE9EX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfRklFTERfQ09NUExFVEUAREVMRVRFAEhQRV9JTlZBTElEX0VPRl9TVEFURQBJTlZBTElEX1NTTF9DRVJUSUZJQ0FURQBQQVVTRQBOT19SRVNQT05TRQBVTlNVUFBPUlRFRF9NRURJQV9UWVBFAEdPTkUATk9UX0FDQ0VQVEFCTEUAU0VSVklDRV9VTkFWQUlMQUJMRQBSQU5HRV9OT1RfU0FUSVNGSUFCTEUAT1JJR0lOX0lTX1VOUkVBQ0hBQkxFAFJFU1BPTlNFX0lTX1NUQUxFAFBVUkdFAE1FUkdFAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0UAUkVRVUVTVF9IRUFERVJfVE9PX0xBUkdFAFBBWUxPQURfVE9PX0xBUkdFAElOU1VGRklDSUVOVF9TVE9SQUdFAEhQRV9QQVVTRURfVVBHUkFERQBIUEVfUEFVU0VEX0gyX1VQR1JBREUAU09VUkNFAEFOTk9VTkNFAFRSQUNFAEhQRV9VTkVYUEVDVEVEX1NQQUNFAERFU0NSSUJFAFVOU1VCU0NSSUJFAFJFQ09SRABIUEVfSU5WQUxJRF9NRVRIT0QATk9UX0ZPVU5EAFBST1BGSU5EAFVOQklORABSRUJJTkQAVU5BVVRIT1JJWkVEAE1FVEhPRF9OT1RfQUxMT1dFRABIVFRQX1ZFUlNJT05fTk9UX1NVUFBPUlRFRABBTFJFQURZX1JFUE9SVEVEAEFDQ0VQVEVEAE5PVF9JTVBMRU1FTlRFRABMT09QX0RFVEVDVEVEAEhQRV9DUl9FWFBFQ1RFRABIUEVfTEZfRVhQRUNURUQAQ1JFQVRFRABJTV9VU0VEAEhQRV9QQVVTRUQAVElNRU9VVF9PQ0NVUkVEAFBBWU1FTlRfUkVRVUlSRUQAUFJFQ09ORElUSU9OX1JFUVVJUkVEAFBST1hZX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAE5FVFdPUktfQVVUSEVOVElDQVRJT05fUkVRVUlSRUQATEVOR1RIX1JFUVVJUkVEAFNTTF9DRVJUSUZJQ0FURV9SRVFVSVJFRABVUEdSQURFX1JFUVVJUkVEAFBBR0VfRVhQSVJFRABQUkVDT05ESVRJT05fRkFJTEVEAEVYUEVDVEFUSU9OX0ZBSUxFRABSRVZBTElEQVRJT05fRkFJTEVEAFNTTF9IQU5EU0hBS0VfRkFJTEVEAExPQ0tFRABUUkFOU0ZPUk1BVElPTl9BUFBMSUVEAE5PVF9NT0RJRklFRABOT1RfRVhURU5ERUQAQkFORFdJRFRIX0xJTUlUX0VYQ0VFREVEAFNJVEVfSVNfT1ZFUkxPQURFRABIRUFEAEV4cGVjdGVkIEhUVFAvAABeEwAAJhMAADAQAADwFwAAnRMAABUSAAA5FwAA8BIAAAoQAAB1EgAArRIAAIITAABPFAAAfxAAAKAVAAAjFAAAiRIAAIsUAABNFQAA1BEAAM8UAAAQGAAAyRYAANwWAADBEQAA4BcAALsUAAB0FAAAfBUAAOUUAAAIFwAAHxAAAGUVAACjFAAAKBUAAAIVAACZFQAALBAAAIsZAABPDwAA1A4AAGoQAADOEAAAAhcAAIkOAABuEwAAHBMAAGYUAABWFwAAwRMAAM0TAABsEwAAaBcAAGYXAABfFwAAIhMAAM4PAABpDgAA2A4AAGMWAADLEwAAqg4AACgXAAAmFwAAxRMAAF0WAADoEQAAZxMAAGUTAADyFgAAcxMAAB0XAAD5FgAA8xEAAM8OAADOFQAADBIAALMRAAClEQAAYRAAADIXAAC7EwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAgMCAgICAgAAAgIAAgIAAgICAgICAgICAgAEAAAAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAgICAAIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIAAgICAgIAAAICAAICAAICAgICAgICAgIAAwAEAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsb3NlZWVwLWFsaXZlAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQFjaHVua2VkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQABAQEBAQAAAQEAAQEAAQEBAQEBAQEBAQAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVjdGlvbmVudC1sZW5ndGhvbnJveHktY29ubmVjdGlvbgAAAAAAAAAAAAAAAAAAAHJhbnNmZXItZW5jb2RpbmdwZ3JhZGUNCg0KDQpTTQ0KDQpUVFAvQ0UvVFNQLwAAAAAAAAAAAAAAAAECAAEDAAAAAAAAAAAAAAAAAAAAAAAABAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQUBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAABAAACAAAAAAAAAAAAAAAAAAAAAAAAAwQAAAQEBAQEBAQEBAQEBQQEBAQEBAQEBAQEBAAEAAYHBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQABAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAgAAAAACAAAAAAAAAAAAAAAAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5PVU5DRUVDS09VVE5FQ1RFVEVDUklCRUxVU0hFVEVBRFNFQVJDSFJHRUNUSVZJVFlMRU5EQVJWRU9USUZZUFRJT05TQ0hTRUFZU1RBVENIR0VPUkRJUkVDVE9SVFJDSFBBUkFNRVRFUlVSQ0VCU0NSSUJFQVJET1dOQUNFSU5ETktDS1VCU0NSSUJFSFRUUC9BRFRQLw=='\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.enumToMap = void 0;\nfunction enumToMap(obj) {\n const res = {};\n Object.keys(obj).forEach((key) => {\n const value = obj[key];\n if (typeof value === 'number') {\n res[key] = value;\n }\n });\n return res;\n}\nexports.enumToMap = enumToMap;\n//# sourceMappingURL=utils.js.map","'use strict'\n\nconst { kClients } = require('../core/symbols')\nconst Agent = require('../agent')\nconst {\n kAgent,\n kMockAgentSet,\n kMockAgentGet,\n kDispatches,\n kIsMockActive,\n kNetConnect,\n kGetNetConnect,\n kOptions,\n kFactory\n} = require('./mock-symbols')\nconst MockClient = require('./mock-client')\nconst MockPool = require('./mock-pool')\nconst { matchValue, buildMockOptions } = require('./mock-utils')\nconst { InvalidArgumentError, UndiciError } = require('../core/errors')\nconst Dispatcher = require('../dispatcher')\nconst Pluralizer = require('./pluralizer')\nconst PendingInterceptorsFormatter = require('./pending-interceptors-formatter')\n\nclass FakeWeakRef {\n constructor (value) {\n this.value = value\n }\n\n deref () {\n return this.value\n }\n}\n\nclass MockAgent extends Dispatcher {\n constructor (opts) {\n super(opts)\n\n this[kNetConnect] = true\n this[kIsMockActive] = true\n\n // Instantiate Agent and encapsulate\n if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) {\n throw new InvalidArgumentError('Argument opts.agent must implement Agent')\n }\n const agent = opts && opts.agent ? opts.agent : new Agent(opts)\n this[kAgent] = agent\n\n this[kClients] = agent[kClients]\n this[kOptions] = buildMockOptions(opts)\n }\n\n get (origin) {\n let dispatcher = this[kMockAgentGet](origin)\n\n if (!dispatcher) {\n dispatcher = this[kFactory](origin)\n this[kMockAgentSet](origin, dispatcher)\n }\n return dispatcher\n }\n\n dispatch (opts, handler) {\n // Call MockAgent.get to perform additional setup before dispatching as normal\n this.get(opts.origin)\n return this[kAgent].dispatch(opts, handler)\n }\n\n async close () {\n await this[kAgent].close()\n this[kClients].clear()\n }\n\n deactivate () {\n this[kIsMockActive] = false\n }\n\n activate () {\n this[kIsMockActive] = true\n }\n\n enableNetConnect (matcher) {\n if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) {\n if (Array.isArray(this[kNetConnect])) {\n this[kNetConnect].push(matcher)\n } else {\n this[kNetConnect] = [matcher]\n }\n } else if (typeof matcher === 'undefined') {\n this[kNetConnect] = true\n } else {\n throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.')\n }\n }\n\n disableNetConnect () {\n this[kNetConnect] = false\n }\n\n // This is required to bypass issues caused by using global symbols - see:\n // https://github.com/nodejs/undici/issues/1447\n get isMockActive () {\n return this[kIsMockActive]\n }\n\n [kMockAgentSet] (origin, dispatcher) {\n this[kClients].set(origin, new FakeWeakRef(dispatcher))\n }\n\n [kFactory] (origin) {\n const mockOptions = Object.assign({ agent: this }, this[kOptions])\n return this[kOptions] && this[kOptions].connections === 1\n ? new MockClient(origin, mockOptions)\n : new MockPool(origin, mockOptions)\n }\n\n [kMockAgentGet] (origin) {\n // First check if we can immediately find it\n const ref = this[kClients].get(origin)\n if (ref) {\n return ref.deref()\n }\n\n // If the origin is not a string create a dummy parent pool and return to user\n if (typeof origin !== 'string') {\n const dispatcher = this[kFactory]('http://localhost:9999')\n this[kMockAgentSet](origin, dispatcher)\n return dispatcher\n }\n\n // If we match, create a pool and assign the same dispatches\n for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) {\n const nonExplicitDispatcher = nonExplicitRef.deref()\n if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) {\n const dispatcher = this[kFactory](origin)\n this[kMockAgentSet](origin, dispatcher)\n dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches]\n return dispatcher\n }\n }\n }\n\n [kGetNetConnect] () {\n return this[kNetConnect]\n }\n\n pendingInterceptors () {\n const mockAgentClients = this[kClients]\n\n return Array.from(mockAgentClients.entries())\n .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin })))\n .filter(({ pending }) => pending)\n }\n\n assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) {\n const pending = this.pendingInterceptors()\n\n if (pending.length === 0) {\n return\n }\n\n const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length)\n\n throw new UndiciError(`\n${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending:\n\n${pendingInterceptorsFormatter.format(pending)}\n`.trim())\n }\n}\n\nmodule.exports = MockAgent\n","'use strict'\n\nconst { promisify } = require('util')\nconst Client = require('../client')\nconst { buildMockDispatch } = require('./mock-utils')\nconst {\n kDispatches,\n kMockAgent,\n kClose,\n kOriginalClose,\n kOrigin,\n kOriginalDispatch,\n kConnected\n} = require('./mock-symbols')\nconst { MockInterceptor } = require('./mock-interceptor')\nconst Symbols = require('../core/symbols')\nconst { InvalidArgumentError } = require('../core/errors')\n\n/**\n * MockClient provides an API that extends the Client to influence the mockDispatches.\n */\nclass MockClient extends Client {\n constructor (origin, opts) {\n super(origin, opts)\n\n if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {\n throw new InvalidArgumentError('Argument opts.agent must implement Agent')\n }\n\n this[kMockAgent] = opts.agent\n this[kOrigin] = origin\n this[kDispatches] = []\n this[kConnected] = 1\n this[kOriginalDispatch] = this.dispatch\n this[kOriginalClose] = this.close.bind(this)\n\n this.dispatch = buildMockDispatch.call(this)\n this.close = this[kClose]\n }\n\n get [Symbols.kConnected] () {\n return this[kConnected]\n }\n\n /**\n * Sets up the base interceptor for mocking replies from undici.\n */\n intercept (opts) {\n return new MockInterceptor(opts, this[kDispatches])\n }\n\n async [kClose] () {\n await promisify(this[kOriginalClose])()\n this[kConnected] = 0\n this[kMockAgent][Symbols.kClients].delete(this[kOrigin])\n }\n}\n\nmodule.exports = MockClient\n","'use strict'\n\nconst { UndiciError } = require('../core/errors')\n\nclass MockNotMatchedError extends UndiciError {\n constructor (message) {\n super(message)\n Error.captureStackTrace(this, MockNotMatchedError)\n this.name = 'MockNotMatchedError'\n this.message = message || 'The request does not match any registered mock dispatches'\n this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'\n }\n}\n\nmodule.exports = {\n MockNotMatchedError\n}\n","'use strict'\n\nconst { getResponseData, buildKey, addMockDispatch } = require('./mock-utils')\nconst {\n kDispatches,\n kDispatchKey,\n kDefaultHeaders,\n kDefaultTrailers,\n kContentLength,\n kMockDispatch\n} = require('./mock-symbols')\nconst { InvalidArgumentError } = require('../core/errors')\nconst { buildURL } = require('../core/util')\n\n/**\n * Defines the scope API for an interceptor reply\n */\nclass MockScope {\n constructor (mockDispatch) {\n this[kMockDispatch] = mockDispatch\n }\n\n /**\n * Delay a reply by a set amount in ms.\n */\n delay (waitInMs) {\n if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) {\n throw new InvalidArgumentError('waitInMs must be a valid integer > 0')\n }\n\n this[kMockDispatch].delay = waitInMs\n return this\n }\n\n /**\n * For a defined reply, never mark as consumed.\n */\n persist () {\n this[kMockDispatch].persist = true\n return this\n }\n\n /**\n * Allow one to define a reply for a set amount of matching requests.\n */\n times (repeatTimes) {\n if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) {\n throw new InvalidArgumentError('repeatTimes must be a valid integer > 0')\n }\n\n this[kMockDispatch].times = repeatTimes\n return this\n }\n}\n\n/**\n * Defines an interceptor for a Mock\n */\nclass MockInterceptor {\n constructor (opts, mockDispatches) {\n if (typeof opts !== 'object') {\n throw new InvalidArgumentError('opts must be an object')\n }\n if (typeof opts.path === 'undefined') {\n throw new InvalidArgumentError('opts.path must be defined')\n }\n if (typeof opts.method === 'undefined') {\n opts.method = 'GET'\n }\n // See https://github.com/nodejs/undici/issues/1245\n // As per RFC 3986, clients are not supposed to send URI\n // fragments to servers when they retrieve a document,\n if (typeof opts.path === 'string') {\n if (opts.query) {\n opts.path = buildURL(opts.path, opts.query)\n } else {\n // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811\n const parsedURL = new URL(opts.path, 'data://')\n opts.path = parsedURL.pathname + parsedURL.search\n }\n }\n if (typeof opts.method === 'string') {\n opts.method = opts.method.toUpperCase()\n }\n\n this[kDispatchKey] = buildKey(opts)\n this[kDispatches] = mockDispatches\n this[kDefaultHeaders] = {}\n this[kDefaultTrailers] = {}\n this[kContentLength] = false\n }\n\n createMockScopeDispatchData (statusCode, data, responseOptions = {}) {\n const responseData = getResponseData(data)\n const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {}\n const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }\n const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers }\n\n return { statusCode, data, headers, trailers }\n }\n\n validateReplyParameters (statusCode, data, responseOptions) {\n if (typeof statusCode === 'undefined') {\n throw new InvalidArgumentError('statusCode must be defined')\n }\n if (typeof data === 'undefined') {\n throw new InvalidArgumentError('data must be defined')\n }\n if (typeof responseOptions !== 'object') {\n throw new InvalidArgumentError('responseOptions must be an object')\n }\n }\n\n /**\n * Mock an undici request with a defined reply.\n */\n reply (replyData) {\n // Values of reply aren't available right now as they\n // can only be available when the reply callback is invoked.\n if (typeof replyData === 'function') {\n // We'll first wrap the provided callback in another function,\n // this function will properly resolve the data from the callback\n // when invoked.\n const wrappedDefaultsCallback = (opts) => {\n // Our reply options callback contains the parameter for statusCode, data and options.\n const resolvedData = replyData(opts)\n\n // Check if it is in the right format\n if (typeof resolvedData !== 'object') {\n throw new InvalidArgumentError('reply options callback must return an object')\n }\n\n const { statusCode, data = '', responseOptions = {} } = resolvedData\n this.validateReplyParameters(statusCode, data, responseOptions)\n // Since the values can be obtained immediately we return them\n // from this higher order function that will be resolved later.\n return {\n ...this.createMockScopeDispatchData(statusCode, data, responseOptions)\n }\n }\n\n // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data.\n const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback)\n return new MockScope(newMockDispatch)\n }\n\n // We can have either one or three parameters, if we get here,\n // we should have 1-3 parameters. So we spread the arguments of\n // this function to obtain the parameters, since replyData will always\n // just be the statusCode.\n const [statusCode, data = '', responseOptions = {}] = [...arguments]\n this.validateReplyParameters(statusCode, data, responseOptions)\n\n // Send in-already provided data like usual\n const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions)\n const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData)\n return new MockScope(newMockDispatch)\n }\n\n /**\n * Mock an undici request with a defined error.\n */\n replyWithError (error) {\n if (typeof error === 'undefined') {\n throw new InvalidArgumentError('error must be defined')\n }\n\n const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error })\n return new MockScope(newMockDispatch)\n }\n\n /**\n * Set default reply headers on the interceptor for subsequent replies\n */\n defaultReplyHeaders (headers) {\n if (typeof headers === 'undefined') {\n throw new InvalidArgumentError('headers must be defined')\n }\n\n this[kDefaultHeaders] = headers\n return this\n }\n\n /**\n * Set default reply trailers on the interceptor for subsequent replies\n */\n defaultReplyTrailers (trailers) {\n if (typeof trailers === 'undefined') {\n throw new InvalidArgumentError('trailers must be defined')\n }\n\n this[kDefaultTrailers] = trailers\n return this\n }\n\n /**\n * Set reply content length header for replies on the interceptor\n */\n replyContentLength () {\n this[kContentLength] = true\n return this\n }\n}\n\nmodule.exports.MockInterceptor = MockInterceptor\nmodule.exports.MockScope = MockScope\n","'use strict'\n\nconst { promisify } = require('util')\nconst Pool = require('../pool')\nconst { buildMockDispatch } = require('./mock-utils')\nconst {\n kDispatches,\n kMockAgent,\n kClose,\n kOriginalClose,\n kOrigin,\n kOriginalDispatch,\n kConnected\n} = require('./mock-symbols')\nconst { MockInterceptor } = require('./mock-interceptor')\nconst Symbols = require('../core/symbols')\nconst { InvalidArgumentError } = require('../core/errors')\n\n/**\n * MockPool provides an API that extends the Pool to influence the mockDispatches.\n */\nclass MockPool extends Pool {\n constructor (origin, opts) {\n super(origin, opts)\n\n if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {\n throw new InvalidArgumentError('Argument opts.agent must implement Agent')\n }\n\n this[kMockAgent] = opts.agent\n this[kOrigin] = origin\n this[kDispatches] = []\n this[kConnected] = 1\n this[kOriginalDispatch] = this.dispatch\n this[kOriginalClose] = this.close.bind(this)\n\n this.dispatch = buildMockDispatch.call(this)\n this.close = this[kClose]\n }\n\n get [Symbols.kConnected] () {\n return this[kConnected]\n }\n\n /**\n * Sets up the base interceptor for mocking replies from undici.\n */\n intercept (opts) {\n return new MockInterceptor(opts, this[kDispatches])\n }\n\n async [kClose] () {\n await promisify(this[kOriginalClose])()\n this[kConnected] = 0\n this[kMockAgent][Symbols.kClients].delete(this[kOrigin])\n }\n}\n\nmodule.exports = MockPool\n","'use strict'\n\nmodule.exports = {\n kAgent: Symbol('agent'),\n kOptions: Symbol('options'),\n kFactory: Symbol('factory'),\n kDispatches: Symbol('dispatches'),\n kDispatchKey: Symbol('dispatch key'),\n kDefaultHeaders: Symbol('default headers'),\n kDefaultTrailers: Symbol('default trailers'),\n kContentLength: Symbol('content length'),\n kMockAgent: Symbol('mock agent'),\n kMockAgentSet: Symbol('mock agent set'),\n kMockAgentGet: Symbol('mock agent get'),\n kMockDispatch: Symbol('mock dispatch'),\n kClose: Symbol('close'),\n kOriginalClose: Symbol('original agent close'),\n kOrigin: Symbol('origin'),\n kIsMockActive: Symbol('is mock active'),\n kNetConnect: Symbol('net connect'),\n kGetNetConnect: Symbol('get net connect'),\n kConnected: Symbol('connected')\n}\n","'use strict'\n\nconst { MockNotMatchedError } = require('./mock-errors')\nconst {\n kDispatches,\n kMockAgent,\n kOriginalDispatch,\n kOrigin,\n kGetNetConnect\n} = require('./mock-symbols')\nconst { buildURL, nop } = require('../core/util')\nconst { STATUS_CODES } = require('http')\nconst {\n types: {\n isPromise\n }\n} = require('util')\n\nfunction matchValue (match, value) {\n if (typeof match === 'string') {\n return match === value\n }\n if (match instanceof RegExp) {\n return match.test(value)\n }\n if (typeof match === 'function') {\n return match(value) === true\n }\n return false\n}\n\nfunction lowerCaseEntries (headers) {\n return Object.fromEntries(\n Object.entries(headers).map(([headerName, headerValue]) => {\n return [headerName.toLocaleLowerCase(), headerValue]\n })\n )\n}\n\n/**\n * @param {import('../../index').Headers|string[]|Record} headers\n * @param {string} key\n */\nfunction getHeaderByName (headers, key) {\n if (Array.isArray(headers)) {\n for (let i = 0; i < headers.length; i += 2) {\n if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) {\n return headers[i + 1]\n }\n }\n\n return undefined\n } else if (typeof headers.get === 'function') {\n return headers.get(key)\n } else {\n return lowerCaseEntries(headers)[key.toLocaleLowerCase()]\n }\n}\n\n/** @param {string[]} headers */\nfunction buildHeadersFromArray (headers) { // fetch HeadersList\n const clone = headers.slice()\n const entries = []\n for (let index = 0; index < clone.length; index += 2) {\n entries.push([clone[index], clone[index + 1]])\n }\n return Object.fromEntries(entries)\n}\n\nfunction matchHeaders (mockDispatch, headers) {\n if (typeof mockDispatch.headers === 'function') {\n if (Array.isArray(headers)) { // fetch HeadersList\n headers = buildHeadersFromArray(headers)\n }\n return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {})\n }\n if (typeof mockDispatch.headers === 'undefined') {\n return true\n }\n if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') {\n return false\n }\n\n for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) {\n const headerValue = getHeaderByName(headers, matchHeaderName)\n\n if (!matchValue(matchHeaderValue, headerValue)) {\n return false\n }\n }\n return true\n}\n\nfunction safeUrl (path) {\n if (typeof path !== 'string') {\n return path\n }\n\n const pathSegments = path.split('?')\n\n if (pathSegments.length !== 2) {\n return path\n }\n\n const qp = new URLSearchParams(pathSegments.pop())\n qp.sort()\n return [...pathSegments, qp.toString()].join('?')\n}\n\nfunction matchKey (mockDispatch, { path, method, body, headers }) {\n const pathMatch = matchValue(mockDispatch.path, path)\n const methodMatch = matchValue(mockDispatch.method, method)\n const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true\n const headersMatch = matchHeaders(mockDispatch, headers)\n return pathMatch && methodMatch && bodyMatch && headersMatch\n}\n\nfunction getResponseData (data) {\n if (Buffer.isBuffer(data)) {\n return data\n } else if (typeof data === 'object') {\n return JSON.stringify(data)\n } else {\n return data.toString()\n }\n}\n\nfunction getMockDispatch (mockDispatches, key) {\n const basePath = key.query ? buildURL(key.path, key.query) : key.path\n const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath\n\n // Match path\n let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath))\n if (matchedMockDispatches.length === 0) {\n throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`)\n }\n\n // Match method\n matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method))\n if (matchedMockDispatches.length === 0) {\n throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`)\n }\n\n // Match body\n matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true)\n if (matchedMockDispatches.length === 0) {\n throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`)\n }\n\n // Match headers\n matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers))\n if (matchedMockDispatches.length === 0) {\n throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`)\n }\n\n return matchedMockDispatches[0]\n}\n\nfunction addMockDispatch (mockDispatches, key, data) {\n const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false }\n const replyData = typeof data === 'function' ? { callback: data } : { ...data }\n const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } }\n mockDispatches.push(newMockDispatch)\n return newMockDispatch\n}\n\nfunction deleteMockDispatch (mockDispatches, key) {\n const index = mockDispatches.findIndex(dispatch => {\n if (!dispatch.consumed) {\n return false\n }\n return matchKey(dispatch, key)\n })\n if (index !== -1) {\n mockDispatches.splice(index, 1)\n }\n}\n\nfunction buildKey (opts) {\n const { path, method, body, headers, query } = opts\n return {\n path,\n method,\n body,\n headers,\n query\n }\n}\n\nfunction generateKeyValues (data) {\n return Object.entries(data).reduce((keyValuePairs, [key, value]) => [\n ...keyValuePairs,\n Buffer.from(`${key}`),\n Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`)\n ], [])\n}\n\n/**\n * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status\n * @param {number} statusCode\n */\nfunction getStatusText (statusCode) {\n return STATUS_CODES[statusCode] || 'unknown'\n}\n\nasync function getResponse (body) {\n const buffers = []\n for await (const data of body) {\n buffers.push(data)\n }\n return Buffer.concat(buffers).toString('utf8')\n}\n\n/**\n * Mock dispatch function used to simulate undici dispatches\n */\nfunction mockDispatch (opts, handler) {\n // Get mock dispatch from built key\n const key = buildKey(opts)\n const mockDispatch = getMockDispatch(this[kDispatches], key)\n\n mockDispatch.timesInvoked++\n\n // Here's where we resolve a callback if a callback is present for the dispatch data.\n if (mockDispatch.data.callback) {\n mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) }\n }\n\n // Parse mockDispatch data\n const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch\n const { timesInvoked, times } = mockDispatch\n\n // If it's used up and not persistent, mark as consumed\n mockDispatch.consumed = !persist && timesInvoked >= times\n mockDispatch.pending = timesInvoked < times\n\n // If specified, trigger dispatch error\n if (error !== null) {\n deleteMockDispatch(this[kDispatches], key)\n handler.onError(error)\n return true\n }\n\n // Handle the request with a delay if necessary\n if (typeof delay === 'number' && delay > 0) {\n setTimeout(() => {\n handleReply(this[kDispatches])\n }, delay)\n } else {\n handleReply(this[kDispatches])\n }\n\n function handleReply (mockDispatches, _data = data) {\n // fetch's HeadersList is a 1D string array\n const optsHeaders = Array.isArray(opts.headers)\n ? buildHeadersFromArray(opts.headers)\n : opts.headers\n const body = typeof _data === 'function'\n ? _data({ ...opts, headers: optsHeaders })\n : _data\n\n // util.types.isPromise is likely needed for jest.\n if (isPromise(body)) {\n // If handleReply is asynchronous, throwing an error\n // in the callback will reject the promise, rather than\n // synchronously throw the error, which breaks some tests.\n // Rather, we wait for the callback to resolve if it is a\n // promise, and then re-run handleReply with the new body.\n body.then((newData) => handleReply(mockDispatches, newData))\n return\n }\n\n const responseData = getResponseData(body)\n const responseHeaders = generateKeyValues(headers)\n const responseTrailers = generateKeyValues(trailers)\n\n handler.abort = nop\n handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode))\n handler.onData(Buffer.from(responseData))\n handler.onComplete(responseTrailers)\n deleteMockDispatch(mockDispatches, key)\n }\n\n function resume () {}\n\n return true\n}\n\nfunction buildMockDispatch () {\n const agent = this[kMockAgent]\n const origin = this[kOrigin]\n const originalDispatch = this[kOriginalDispatch]\n\n return function dispatch (opts, handler) {\n if (agent.isMockActive) {\n try {\n mockDispatch.call(this, opts, handler)\n } catch (error) {\n if (error instanceof MockNotMatchedError) {\n const netConnect = agent[kGetNetConnect]()\n if (netConnect === false) {\n throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`)\n }\n if (checkNetConnect(netConnect, origin)) {\n originalDispatch.call(this, opts, handler)\n } else {\n throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`)\n }\n } else {\n throw error\n }\n }\n } else {\n originalDispatch.call(this, opts, handler)\n }\n }\n}\n\nfunction checkNetConnect (netConnect, origin) {\n const url = new URL(origin)\n if (netConnect === true) {\n return true\n } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) {\n return true\n }\n return false\n}\n\nfunction buildMockOptions (opts) {\n if (opts) {\n const { agent, ...mockOptions } = opts\n return mockOptions\n }\n}\n\nmodule.exports = {\n getResponseData,\n getMockDispatch,\n addMockDispatch,\n deleteMockDispatch,\n buildKey,\n generateKeyValues,\n matchValue,\n getResponse,\n getStatusText,\n mockDispatch,\n buildMockDispatch,\n checkNetConnect,\n buildMockOptions,\n getHeaderByName\n}\n","'use strict'\n\nconst { Transform } = require('stream')\nconst { Console } = require('console')\n\n/**\n * Gets the output of `console.table(…)` as a string.\n */\nmodule.exports = class PendingInterceptorsFormatter {\n constructor ({ disableColors } = {}) {\n this.transform = new Transform({\n transform (chunk, _enc, cb) {\n cb(null, chunk)\n }\n })\n\n this.logger = new Console({\n stdout: this.transform,\n inspectOptions: {\n colors: !disableColors && !process.env.CI\n }\n })\n }\n\n format (pendingInterceptors) {\n const withPrettyHeaders = pendingInterceptors.map(\n ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({\n Method: method,\n Origin: origin,\n Path: path,\n 'Status code': statusCode,\n Persistent: persist ? '✅' : '❌',\n Invocations: timesInvoked,\n Remaining: persist ? Infinity : times - timesInvoked\n }))\n\n this.logger.table(withPrettyHeaders)\n return this.transform.read().toString()\n }\n}\n","'use strict'\n\nconst singulars = {\n pronoun: 'it',\n is: 'is',\n was: 'was',\n this: 'this'\n}\n\nconst plurals = {\n pronoun: 'they',\n is: 'are',\n was: 'were',\n this: 'these'\n}\n\nmodule.exports = class Pluralizer {\n constructor (singular, plural) {\n this.singular = singular\n this.plural = plural\n }\n\n pluralize (count) {\n const one = count === 1\n const keys = one ? singulars : plurals\n const noun = one ? this.singular : this.plural\n return { ...keys, count, noun }\n }\n}\n","/* eslint-disable */\n\n'use strict'\n\n// Extracted from node/lib/internal/fixed_queue.js\n\n// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two.\nconst kSize = 2048;\nconst kMask = kSize - 1;\n\n// The FixedQueue is implemented as a singly-linked list of fixed-size\n// circular buffers. It looks something like this:\n//\n// head tail\n// | |\n// v v\n// +-----------+ <-----\\ +-----------+ <------\\ +-----------+\n// | [null] | \\----- | next | \\------- | next |\n// +-----------+ +-----------+ +-----------+\n// | item | <-- bottom | item | <-- bottom | [empty] |\n// | item | | item | | [empty] |\n// | item | | item | | [empty] |\n// | item | | item | | [empty] |\n// | item | | item | bottom --> | item |\n// | item | | item | | item |\n// | ... | | ... | | ... |\n// | item | | item | | item |\n// | item | | item | | item |\n// | [empty] | <-- top | item | | item |\n// | [empty] | | item | | item |\n// | [empty] | | [empty] | <-- top top --> | [empty] |\n// +-----------+ +-----------+ +-----------+\n//\n// Or, if there is only one circular buffer, it looks something\n// like either of these:\n//\n// head tail head tail\n// | | | |\n// v v v v\n// +-----------+ +-----------+\n// | [null] | | [null] |\n// +-----------+ +-----------+\n// | [empty] | | item |\n// | [empty] | | item |\n// | item | <-- bottom top --> | [empty] |\n// | item | | [empty] |\n// | [empty] | <-- top bottom --> | item |\n// | [empty] | | item |\n// +-----------+ +-----------+\n//\n// Adding a value means moving `top` forward by one, removing means\n// moving `bottom` forward by one. After reaching the end, the queue\n// wraps around.\n//\n// When `top === bottom` the current queue is empty and when\n// `top + 1 === bottom` it's full. This wastes a single space of storage\n// but allows much quicker checks.\n\nclass FixedCircularBuffer {\n constructor() {\n this.bottom = 0;\n this.top = 0;\n this.list = new Array(kSize);\n this.next = null;\n }\n\n isEmpty() {\n return this.top === this.bottom;\n }\n\n isFull() {\n return ((this.top + 1) & kMask) === this.bottom;\n }\n\n push(data) {\n this.list[this.top] = data;\n this.top = (this.top + 1) & kMask;\n }\n\n shift() {\n const nextItem = this.list[this.bottom];\n if (nextItem === undefined)\n return null;\n this.list[this.bottom] = undefined;\n this.bottom = (this.bottom + 1) & kMask;\n return nextItem;\n }\n}\n\nmodule.exports = class FixedQueue {\n constructor() {\n this.head = this.tail = new FixedCircularBuffer();\n }\n\n isEmpty() {\n return this.head.isEmpty();\n }\n\n push(data) {\n if (this.head.isFull()) {\n // Head is full: Creates a new queue, sets the old queue's `.next` to it,\n // and sets it as the new main queue.\n this.head = this.head.next = new FixedCircularBuffer();\n }\n this.head.push(data);\n }\n\n shift() {\n const tail = this.tail;\n const next = tail.shift();\n if (tail.isEmpty() && tail.next !== null) {\n // If there is another queue, it forms the new tail.\n this.tail = tail.next;\n }\n return next;\n }\n};\n","'use strict'\n\nconst DispatcherBase = require('./dispatcher-base')\nconst FixedQueue = require('./node/fixed-queue')\nconst { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('./core/symbols')\nconst PoolStats = require('./pool-stats')\n\nconst kClients = Symbol('clients')\nconst kNeedDrain = Symbol('needDrain')\nconst kQueue = Symbol('queue')\nconst kClosedResolve = Symbol('closed resolve')\nconst kOnDrain = Symbol('onDrain')\nconst kOnConnect = Symbol('onConnect')\nconst kOnDisconnect = Symbol('onDisconnect')\nconst kOnConnectionError = Symbol('onConnectionError')\nconst kGetDispatcher = Symbol('get dispatcher')\nconst kAddClient = Symbol('add client')\nconst kRemoveClient = Symbol('remove client')\nconst kStats = Symbol('stats')\n\nclass PoolBase extends DispatcherBase {\n constructor () {\n super()\n\n this[kQueue] = new FixedQueue()\n this[kClients] = []\n this[kQueued] = 0\n\n const pool = this\n\n this[kOnDrain] = function onDrain (origin, targets) {\n const queue = pool[kQueue]\n\n let needDrain = false\n\n while (!needDrain) {\n const item = queue.shift()\n if (!item) {\n break\n }\n pool[kQueued]--\n needDrain = !this.dispatch(item.opts, item.handler)\n }\n\n this[kNeedDrain] = needDrain\n\n if (!this[kNeedDrain] && pool[kNeedDrain]) {\n pool[kNeedDrain] = false\n pool.emit('drain', origin, [pool, ...targets])\n }\n\n if (pool[kClosedResolve] && queue.isEmpty()) {\n Promise\n .all(pool[kClients].map(c => c.close()))\n .then(pool[kClosedResolve])\n }\n }\n\n this[kOnConnect] = (origin, targets) => {\n pool.emit('connect', origin, [pool, ...targets])\n }\n\n this[kOnDisconnect] = (origin, targets, err) => {\n pool.emit('disconnect', origin, [pool, ...targets], err)\n }\n\n this[kOnConnectionError] = (origin, targets, err) => {\n pool.emit('connectionError', origin, [pool, ...targets], err)\n }\n\n this[kStats] = new PoolStats(this)\n }\n\n get [kBusy] () {\n return this[kNeedDrain]\n }\n\n get [kConnected] () {\n return this[kClients].filter(client => client[kConnected]).length\n }\n\n get [kFree] () {\n return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length\n }\n\n get [kPending] () {\n let ret = this[kQueued]\n for (const { [kPending]: pending } of this[kClients]) {\n ret += pending\n }\n return ret\n }\n\n get [kRunning] () {\n let ret = 0\n for (const { [kRunning]: running } of this[kClients]) {\n ret += running\n }\n return ret\n }\n\n get [kSize] () {\n let ret = this[kQueued]\n for (const { [kSize]: size } of this[kClients]) {\n ret += size\n }\n return ret\n }\n\n get stats () {\n return this[kStats]\n }\n\n async [kClose] () {\n if (this[kQueue].isEmpty()) {\n return Promise.all(this[kClients].map(c => c.close()))\n } else {\n return new Promise((resolve) => {\n this[kClosedResolve] = resolve\n })\n }\n }\n\n async [kDestroy] (err) {\n while (true) {\n const item = this[kQueue].shift()\n if (!item) {\n break\n }\n item.handler.onError(err)\n }\n\n return Promise.all(this[kClients].map(c => c.destroy(err)))\n }\n\n [kDispatch] (opts, handler) {\n const dispatcher = this[kGetDispatcher]()\n\n if (!dispatcher) {\n this[kNeedDrain] = true\n this[kQueue].push({ opts, handler })\n this[kQueued]++\n } else if (!dispatcher.dispatch(opts, handler)) {\n dispatcher[kNeedDrain] = true\n this[kNeedDrain] = !this[kGetDispatcher]()\n }\n\n return !this[kNeedDrain]\n }\n\n [kAddClient] (client) {\n client\n .on('drain', this[kOnDrain])\n .on('connect', this[kOnConnect])\n .on('disconnect', this[kOnDisconnect])\n .on('connectionError', this[kOnConnectionError])\n\n this[kClients].push(client)\n\n if (this[kNeedDrain]) {\n process.nextTick(() => {\n if (this[kNeedDrain]) {\n this[kOnDrain](client[kUrl], [this, client])\n }\n })\n }\n\n return this\n }\n\n [kRemoveClient] (client) {\n client.close(() => {\n const idx = this[kClients].indexOf(client)\n if (idx !== -1) {\n this[kClients].splice(idx, 1)\n }\n })\n\n this[kNeedDrain] = this[kClients].some(dispatcher => (\n !dispatcher[kNeedDrain] &&\n dispatcher.closed !== true &&\n dispatcher.destroyed !== true\n ))\n }\n}\n\nmodule.exports = {\n PoolBase,\n kClients,\n kNeedDrain,\n kAddClient,\n kRemoveClient,\n kGetDispatcher\n}\n","const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('./core/symbols')\nconst kPool = Symbol('pool')\n\nclass PoolStats {\n constructor (pool) {\n this[kPool] = pool\n }\n\n get connected () {\n return this[kPool][kConnected]\n }\n\n get free () {\n return this[kPool][kFree]\n }\n\n get pending () {\n return this[kPool][kPending]\n }\n\n get queued () {\n return this[kPool][kQueued]\n }\n\n get running () {\n return this[kPool][kRunning]\n }\n\n get size () {\n return this[kPool][kSize]\n }\n}\n\nmodule.exports = PoolStats\n","'use strict'\n\nconst {\n PoolBase,\n kClients,\n kNeedDrain,\n kAddClient,\n kGetDispatcher\n} = require('./pool-base')\nconst Client = require('./client')\nconst {\n InvalidArgumentError\n} = require('./core/errors')\nconst util = require('./core/util')\nconst { kUrl, kInterceptors } = require('./core/symbols')\nconst buildConnector = require('./core/connect')\n\nconst kOptions = Symbol('options')\nconst kConnections = Symbol('connections')\nconst kFactory = Symbol('factory')\n\nfunction defaultFactory (origin, opts) {\n return new Client(origin, opts)\n}\n\nclass Pool extends PoolBase {\n constructor (origin, {\n connections,\n factory = defaultFactory,\n connect,\n connectTimeout,\n tls,\n maxCachedSessions,\n socketPath,\n autoSelectFamily,\n autoSelectFamilyAttemptTimeout,\n allowH2,\n ...options\n } = {}) {\n super()\n\n if (connections != null && (!Number.isFinite(connections) || connections < 0)) {\n throw new InvalidArgumentError('invalid connections')\n }\n\n if (typeof factory !== 'function') {\n throw new InvalidArgumentError('factory must be a function.')\n }\n\n if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {\n throw new InvalidArgumentError('connect must be a function or an object')\n }\n\n if (typeof connect !== 'function') {\n connect = buildConnector({\n ...tls,\n maxCachedSessions,\n allowH2,\n socketPath,\n timeout: connectTimeout,\n ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),\n ...connect\n })\n }\n\n this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool)\n ? options.interceptors.Pool\n : []\n this[kConnections] = connections || null\n this[kUrl] = util.parseOrigin(origin)\n this[kOptions] = { ...util.deepClone(options), connect, allowH2 }\n this[kOptions].interceptors = options.interceptors\n ? { ...options.interceptors }\n : undefined\n this[kFactory] = factory\n }\n\n [kGetDispatcher] () {\n let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain])\n\n if (dispatcher) {\n return dispatcher\n }\n\n if (!this[kConnections] || this[kClients].length < this[kConnections]) {\n dispatcher = this[kFactory](this[kUrl], this[kOptions])\n this[kAddClient](dispatcher)\n }\n\n return dispatcher\n }\n}\n\nmodule.exports = Pool\n","'use strict'\n\nconst { kProxy, kClose, kDestroy, kInterceptors } = require('./core/symbols')\nconst { URL } = require('url')\nconst Agent = require('./agent')\nconst Pool = require('./pool')\nconst DispatcherBase = require('./dispatcher-base')\nconst { InvalidArgumentError, RequestAbortedError } = require('./core/errors')\nconst buildConnector = require('./core/connect')\n\nconst kAgent = Symbol('proxy agent')\nconst kClient = Symbol('proxy client')\nconst kProxyHeaders = Symbol('proxy headers')\nconst kRequestTls = Symbol('request tls settings')\nconst kProxyTls = Symbol('proxy tls settings')\nconst kConnectEndpoint = Symbol('connect endpoint function')\n\nfunction defaultProtocolPort (protocol) {\n return protocol === 'https:' ? 443 : 80\n}\n\nfunction buildProxyOptions (opts) {\n if (typeof opts === 'string') {\n opts = { uri: opts }\n }\n\n if (!opts || !opts.uri) {\n throw new InvalidArgumentError('Proxy opts.uri is mandatory')\n }\n\n return {\n uri: opts.uri,\n protocol: opts.protocol || 'https'\n }\n}\n\nfunction defaultFactory (origin, opts) {\n return new Pool(origin, opts)\n}\n\nclass ProxyAgent extends DispatcherBase {\n constructor (opts) {\n super(opts)\n this[kProxy] = buildProxyOptions(opts)\n this[kAgent] = new Agent(opts)\n this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent)\n ? opts.interceptors.ProxyAgent\n : []\n\n if (typeof opts === 'string') {\n opts = { uri: opts }\n }\n\n if (!opts || !opts.uri) {\n throw new InvalidArgumentError('Proxy opts.uri is mandatory')\n }\n\n const { clientFactory = defaultFactory } = opts\n\n if (typeof clientFactory !== 'function') {\n throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')\n }\n\n this[kRequestTls] = opts.requestTls\n this[kProxyTls] = opts.proxyTls\n this[kProxyHeaders] = opts.headers || {}\n\n const resolvedUrl = new URL(opts.uri)\n const { origin, port, host, username, password } = resolvedUrl\n\n if (opts.auth && opts.token) {\n throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')\n } else if (opts.auth) {\n /* @deprecated in favour of opts.token */\n this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}`\n } else if (opts.token) {\n this[kProxyHeaders]['proxy-authorization'] = opts.token\n } else if (username && password) {\n this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}`\n }\n\n const connect = buildConnector({ ...opts.proxyTls })\n this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })\n this[kClient] = clientFactory(resolvedUrl, { connect })\n this[kAgent] = new Agent({\n ...opts,\n connect: async (opts, callback) => {\n let requestedHost = opts.host\n if (!opts.port) {\n requestedHost += `:${defaultProtocolPort(opts.protocol)}`\n }\n try {\n const { socket, statusCode } = await this[kClient].connect({\n origin,\n port,\n path: requestedHost,\n signal: opts.signal,\n headers: {\n ...this[kProxyHeaders],\n host\n }\n })\n if (statusCode !== 200) {\n socket.on('error', () => {}).destroy()\n callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))\n }\n if (opts.protocol !== 'https:') {\n callback(null, socket)\n return\n }\n let servername\n if (this[kRequestTls]) {\n servername = this[kRequestTls].servername\n } else {\n servername = opts.servername\n }\n this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback)\n } catch (err) {\n callback(err)\n }\n }\n })\n }\n\n dispatch (opts, handler) {\n const { host } = new URL(opts.origin)\n const headers = buildHeaders(opts.headers)\n throwIfProxyAuthIsSent(headers)\n return this[kAgent].dispatch(\n {\n ...opts,\n headers: {\n ...headers,\n host\n }\n },\n handler\n )\n }\n\n async [kClose] () {\n await this[kAgent].close()\n await this[kClient].close()\n }\n\n async [kDestroy] () {\n await this[kAgent].destroy()\n await this[kClient].destroy()\n }\n}\n\n/**\n * @param {string[] | Record} headers\n * @returns {Record}\n */\nfunction buildHeaders (headers) {\n // When using undici.fetch, the headers list is stored\n // as an array.\n if (Array.isArray(headers)) {\n /** @type {Record} */\n const headersPair = {}\n\n for (let i = 0; i < headers.length; i += 2) {\n headersPair[headers[i]] = headers[i + 1]\n }\n\n return headersPair\n }\n\n return headers\n}\n\n/**\n * @param {Record} headers\n *\n * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers\n * Nevertheless, it was changed and to avoid a security vulnerability by end users\n * this check was created.\n * It should be removed in the next major version for performance reasons\n */\nfunction throwIfProxyAuthIsSent (headers) {\n const existProxyAuth = headers && Object.keys(headers)\n .find((key) => key.toLowerCase() === 'proxy-authorization')\n if (existProxyAuth) {\n throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor')\n }\n}\n\nmodule.exports = ProxyAgent\n","'use strict'\n\nlet fastNow = Date.now()\nlet fastNowTimeout\n\nconst fastTimers = []\n\nfunction onTimeout () {\n fastNow = Date.now()\n\n let len = fastTimers.length\n let idx = 0\n while (idx < len) {\n const timer = fastTimers[idx]\n\n if (timer.state === 0) {\n timer.state = fastNow + timer.delay\n } else if (timer.state > 0 && fastNow >= timer.state) {\n timer.state = -1\n timer.callback(timer.opaque)\n }\n\n if (timer.state === -1) {\n timer.state = -2\n if (idx !== len - 1) {\n fastTimers[idx] = fastTimers.pop()\n } else {\n fastTimers.pop()\n }\n len -= 1\n } else {\n idx += 1\n }\n }\n\n if (fastTimers.length > 0) {\n refreshTimeout()\n }\n}\n\nfunction refreshTimeout () {\n if (fastNowTimeout && fastNowTimeout.refresh) {\n fastNowTimeout.refresh()\n } else {\n clearTimeout(fastNowTimeout)\n fastNowTimeout = setTimeout(onTimeout, 1e3)\n if (fastNowTimeout.unref) {\n fastNowTimeout.unref()\n }\n }\n}\n\nclass Timeout {\n constructor (callback, delay, opaque) {\n this.callback = callback\n this.delay = delay\n this.opaque = opaque\n\n // -2 not in timer list\n // -1 in timer list but inactive\n // 0 in timer list waiting for time\n // > 0 in timer list waiting for time to expire\n this.state = -2\n\n this.refresh()\n }\n\n refresh () {\n if (this.state === -2) {\n fastTimers.push(this)\n if (!fastNowTimeout || fastTimers.length === 1) {\n refreshTimeout()\n }\n }\n\n this.state = 0\n }\n\n clear () {\n this.state = -1\n }\n}\n\nmodule.exports = {\n setTimeout (callback, delay, opaque) {\n return delay < 1e3\n ? setTimeout(callback, delay, opaque)\n : new Timeout(callback, delay, opaque)\n },\n clearTimeout (timeout) {\n if (timeout instanceof Timeout) {\n timeout.clear()\n } else {\n clearTimeout(timeout)\n }\n }\n}\n","'use strict'\n\nconst diagnosticsChannel = require('diagnostics_channel')\nconst { uid, states } = require('./constants')\nconst {\n kReadyState,\n kSentClose,\n kByteParser,\n kReceivedClose\n} = require('./symbols')\nconst { fireEvent, failWebsocketConnection } = require('./util')\nconst { CloseEvent } = require('./events')\nconst { makeRequest } = require('../fetch/request')\nconst { fetching } = require('../fetch/index')\nconst { Headers } = require('../fetch/headers')\nconst { getGlobalDispatcher } = require('../global')\nconst { kHeadersList } = require('../core/symbols')\n\nconst channels = {}\nchannels.open = diagnosticsChannel.channel('undici:websocket:open')\nchannels.close = diagnosticsChannel.channel('undici:websocket:close')\nchannels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error')\n\n/** @type {import('crypto')} */\nlet crypto\ntry {\n crypto = require('crypto')\n} catch {\n\n}\n\n/**\n * @see https://websockets.spec.whatwg.org/#concept-websocket-establish\n * @param {URL} url\n * @param {string|string[]} protocols\n * @param {import('./websocket').WebSocket} ws\n * @param {(response: any) => void} onEstablish\n * @param {Partial} options\n */\nfunction establishWebSocketConnection (url, protocols, ws, onEstablish, options) {\n // 1. Let requestURL be a copy of url, with its scheme set to \"http\", if url’s\n // scheme is \"ws\", and to \"https\" otherwise.\n const requestURL = url\n\n requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:'\n\n // 2. Let request be a new request, whose URL is requestURL, client is client,\n // service-workers mode is \"none\", referrer is \"no-referrer\", mode is\n // \"websocket\", credentials mode is \"include\", cache mode is \"no-store\" ,\n // and redirect mode is \"error\".\n const request = makeRequest({\n urlList: [requestURL],\n serviceWorkers: 'none',\n referrer: 'no-referrer',\n mode: 'websocket',\n credentials: 'include',\n cache: 'no-store',\n redirect: 'error'\n })\n\n // Note: undici extension, allow setting custom headers.\n if (options.headers) {\n const headersList = new Headers(options.headers)[kHeadersList]\n\n request.headersList = headersList\n }\n\n // 3. Append (`Upgrade`, `websocket`) to request’s header list.\n // 4. Append (`Connection`, `Upgrade`) to request’s header list.\n // Note: both of these are handled by undici currently.\n // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397\n\n // 5. Let keyValue be a nonce consisting of a randomly selected\n // 16-byte value that has been forgiving-base64-encoded and\n // isomorphic encoded.\n const keyValue = crypto.randomBytes(16).toString('base64')\n\n // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s\n // header list.\n request.headersList.append('sec-websocket-key', keyValue)\n\n // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s\n // header list.\n request.headersList.append('sec-websocket-version', '13')\n\n // 8. For each protocol in protocols, combine\n // (`Sec-WebSocket-Protocol`, protocol) in request’s header\n // list.\n for (const protocol of protocols) {\n request.headersList.append('sec-websocket-protocol', protocol)\n }\n\n // 9. Let permessageDeflate be a user-agent defined\n // \"permessage-deflate\" extension header value.\n // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673\n // TODO: enable once permessage-deflate is supported\n const permessageDeflate = '' // 'permessage-deflate; 15'\n\n // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to\n // request’s header list.\n // request.headersList.append('sec-websocket-extensions', permessageDeflate)\n\n // 11. Fetch request with useParallelQueue set to true, and\n // processResponse given response being these steps:\n const controller = fetching({\n request,\n useParallelQueue: true,\n dispatcher: options.dispatcher ?? getGlobalDispatcher(),\n processResponse (response) {\n // 1. If response is a network error or its status is not 101,\n // fail the WebSocket connection.\n if (response.type === 'error' || response.status !== 101) {\n failWebsocketConnection(ws, 'Received network error or non-101 status code.')\n return\n }\n\n // 2. If protocols is not the empty list and extracting header\n // list values given `Sec-WebSocket-Protocol` and response’s\n // header list results in null, failure, or the empty byte\n // sequence, then fail the WebSocket connection.\n if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) {\n failWebsocketConnection(ws, 'Server did not respond with sent protocols.')\n return\n }\n\n // 3. Follow the requirements stated step 2 to step 6, inclusive,\n // of the last set of steps in section 4.1 of The WebSocket\n // Protocol to validate response. This either results in fail\n // the WebSocket connection or the WebSocket connection is\n // established.\n\n // 2. If the response lacks an |Upgrade| header field or the |Upgrade|\n // header field contains a value that is not an ASCII case-\n // insensitive match for the value \"websocket\", the client MUST\n // _Fail the WebSocket Connection_.\n if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') {\n failWebsocketConnection(ws, 'Server did not set Upgrade header to \"websocket\".')\n return\n }\n\n // 3. If the response lacks a |Connection| header field or the\n // |Connection| header field doesn't contain a token that is an\n // ASCII case-insensitive match for the value \"Upgrade\", the client\n // MUST _Fail the WebSocket Connection_.\n if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') {\n failWebsocketConnection(ws, 'Server did not set Connection header to \"upgrade\".')\n return\n }\n\n // 4. If the response lacks a |Sec-WebSocket-Accept| header field or\n // the |Sec-WebSocket-Accept| contains a value other than the\n // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket-\n // Key| (as a string, not base64-decoded) with the string \"258EAFA5-\n // E914-47DA-95CA-C5AB0DC85B11\" but ignoring any leading and\n // trailing whitespace, the client MUST _Fail the WebSocket\n // Connection_.\n const secWSAccept = response.headersList.get('Sec-WebSocket-Accept')\n const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64')\n if (secWSAccept !== digest) {\n failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.')\n return\n }\n\n // 5. If the response includes a |Sec-WebSocket-Extensions| header\n // field and this header field indicates the use of an extension\n // that was not present in the client's handshake (the server has\n // indicated an extension not requested by the client), the client\n // MUST _Fail the WebSocket Connection_. (The parsing of this\n // header field to determine which extensions are requested is\n // discussed in Section 9.1.)\n const secExtension = response.headersList.get('Sec-WebSocket-Extensions')\n\n if (secExtension !== null && secExtension !== permessageDeflate) {\n failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.')\n return\n }\n\n // 6. If the response includes a |Sec-WebSocket-Protocol| header field\n // and this header field indicates the use of a subprotocol that was\n // not present in the client's handshake (the server has indicated a\n // subprotocol not requested by the client), the client MUST _Fail\n // the WebSocket Connection_.\n const secProtocol = response.headersList.get('Sec-WebSocket-Protocol')\n\n if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) {\n failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.')\n return\n }\n\n response.socket.on('data', onSocketData)\n response.socket.on('close', onSocketClose)\n response.socket.on('error', onSocketError)\n\n if (channels.open.hasSubscribers) {\n channels.open.publish({\n address: response.socket.address(),\n protocol: secProtocol,\n extensions: secExtension\n })\n }\n\n onEstablish(response)\n }\n })\n\n return controller\n}\n\n/**\n * @param {Buffer} chunk\n */\nfunction onSocketData (chunk) {\n if (!this.ws[kByteParser].write(chunk)) {\n this.pause()\n }\n}\n\n/**\n * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol\n * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4\n */\nfunction onSocketClose () {\n const { ws } = this\n\n // If the TCP connection was closed after the\n // WebSocket closing handshake was completed, the WebSocket connection\n // is said to have been closed _cleanly_.\n const wasClean = ws[kSentClose] && ws[kReceivedClose]\n\n let code = 1005\n let reason = ''\n\n const result = ws[kByteParser].closingInfo\n\n if (result) {\n code = result.code ?? 1005\n reason = result.reason\n } else if (!ws[kSentClose]) {\n // If _The WebSocket\n // Connection is Closed_ and no Close control frame was received by the\n // endpoint (such as could occur if the underlying transport connection\n // is lost), _The WebSocket Connection Close Code_ is considered to be\n // 1006.\n code = 1006\n }\n\n // 1. Change the ready state to CLOSED (3).\n ws[kReadyState] = states.CLOSED\n\n // 2. If the user agent was required to fail the WebSocket\n // connection, or if the WebSocket connection was closed\n // after being flagged as full, fire an event named error\n // at the WebSocket object.\n // TODO\n\n // 3. Fire an event named close at the WebSocket object,\n // using CloseEvent, with the wasClean attribute\n // initialized to true if the connection closed cleanly\n // and false otherwise, the code attribute initialized to\n // the WebSocket connection close code, and the reason\n // attribute initialized to the result of applying UTF-8\n // decode without BOM to the WebSocket connection close\n // reason.\n fireEvent('close', ws, CloseEvent, {\n wasClean, code, reason\n })\n\n if (channels.close.hasSubscribers) {\n channels.close.publish({\n websocket: ws,\n code,\n reason\n })\n }\n}\n\nfunction onSocketError (error) {\n const { ws } = this\n\n ws[kReadyState] = states.CLOSING\n\n if (channels.socketError.hasSubscribers) {\n channels.socketError.publish(error)\n }\n\n this.destroy()\n}\n\nmodule.exports = {\n establishWebSocketConnection\n}\n","'use strict'\n\n// This is a Globally Unique Identifier unique used\n// to validate that the endpoint accepts websocket\n// connections.\n// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3\nconst uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11'\n\n/** @type {PropertyDescriptor} */\nconst staticPropertyDescriptors = {\n enumerable: true,\n writable: false,\n configurable: false\n}\n\nconst states = {\n CONNECTING: 0,\n OPEN: 1,\n CLOSING: 2,\n CLOSED: 3\n}\n\nconst opcodes = {\n CONTINUATION: 0x0,\n TEXT: 0x1,\n BINARY: 0x2,\n CLOSE: 0x8,\n PING: 0x9,\n PONG: 0xA\n}\n\nconst maxUnsigned16Bit = 2 ** 16 - 1 // 65535\n\nconst parserStates = {\n INFO: 0,\n PAYLOADLENGTH_16: 2,\n PAYLOADLENGTH_64: 3,\n READ_DATA: 4\n}\n\nconst emptyBuffer = Buffer.allocUnsafe(0)\n\nmodule.exports = {\n uid,\n staticPropertyDescriptors,\n states,\n opcodes,\n maxUnsigned16Bit,\n parserStates,\n emptyBuffer\n}\n","'use strict'\n\nconst { webidl } = require('../fetch/webidl')\nconst { kEnumerableProperty } = require('../core/util')\nconst { MessagePort } = require('worker_threads')\n\n/**\n * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent\n */\nclass MessageEvent extends Event {\n #eventInit\n\n constructor (type, eventInitDict = {}) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' })\n\n type = webidl.converters.DOMString(type)\n eventInitDict = webidl.converters.MessageEventInit(eventInitDict)\n\n super(type, eventInitDict)\n\n this.#eventInit = eventInitDict\n }\n\n get data () {\n webidl.brandCheck(this, MessageEvent)\n\n return this.#eventInit.data\n }\n\n get origin () {\n webidl.brandCheck(this, MessageEvent)\n\n return this.#eventInit.origin\n }\n\n get lastEventId () {\n webidl.brandCheck(this, MessageEvent)\n\n return this.#eventInit.lastEventId\n }\n\n get source () {\n webidl.brandCheck(this, MessageEvent)\n\n return this.#eventInit.source\n }\n\n get ports () {\n webidl.brandCheck(this, MessageEvent)\n\n if (!Object.isFrozen(this.#eventInit.ports)) {\n Object.freeze(this.#eventInit.ports)\n }\n\n return this.#eventInit.ports\n }\n\n initMessageEvent (\n type,\n bubbles = false,\n cancelable = false,\n data = null,\n origin = '',\n lastEventId = '',\n source = null,\n ports = []\n ) {\n webidl.brandCheck(this, MessageEvent)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' })\n\n return new MessageEvent(type, {\n bubbles, cancelable, data, origin, lastEventId, source, ports\n })\n }\n}\n\n/**\n * @see https://websockets.spec.whatwg.org/#the-closeevent-interface\n */\nclass CloseEvent extends Event {\n #eventInit\n\n constructor (type, eventInitDict = {}) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' })\n\n type = webidl.converters.DOMString(type)\n eventInitDict = webidl.converters.CloseEventInit(eventInitDict)\n\n super(type, eventInitDict)\n\n this.#eventInit = eventInitDict\n }\n\n get wasClean () {\n webidl.brandCheck(this, CloseEvent)\n\n return this.#eventInit.wasClean\n }\n\n get code () {\n webidl.brandCheck(this, CloseEvent)\n\n return this.#eventInit.code\n }\n\n get reason () {\n webidl.brandCheck(this, CloseEvent)\n\n return this.#eventInit.reason\n }\n}\n\n// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface\nclass ErrorEvent extends Event {\n #eventInit\n\n constructor (type, eventInitDict) {\n webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' })\n\n super(type, eventInitDict)\n\n type = webidl.converters.DOMString(type)\n eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {})\n\n this.#eventInit = eventInitDict\n }\n\n get message () {\n webidl.brandCheck(this, ErrorEvent)\n\n return this.#eventInit.message\n }\n\n get filename () {\n webidl.brandCheck(this, ErrorEvent)\n\n return this.#eventInit.filename\n }\n\n get lineno () {\n webidl.brandCheck(this, ErrorEvent)\n\n return this.#eventInit.lineno\n }\n\n get colno () {\n webidl.brandCheck(this, ErrorEvent)\n\n return this.#eventInit.colno\n }\n\n get error () {\n webidl.brandCheck(this, ErrorEvent)\n\n return this.#eventInit.error\n }\n}\n\nObject.defineProperties(MessageEvent.prototype, {\n [Symbol.toStringTag]: {\n value: 'MessageEvent',\n configurable: true\n },\n data: kEnumerableProperty,\n origin: kEnumerableProperty,\n lastEventId: kEnumerableProperty,\n source: kEnumerableProperty,\n ports: kEnumerableProperty,\n initMessageEvent: kEnumerableProperty\n})\n\nObject.defineProperties(CloseEvent.prototype, {\n [Symbol.toStringTag]: {\n value: 'CloseEvent',\n configurable: true\n },\n reason: kEnumerableProperty,\n code: kEnumerableProperty,\n wasClean: kEnumerableProperty\n})\n\nObject.defineProperties(ErrorEvent.prototype, {\n [Symbol.toStringTag]: {\n value: 'ErrorEvent',\n configurable: true\n },\n message: kEnumerableProperty,\n filename: kEnumerableProperty,\n lineno: kEnumerableProperty,\n colno: kEnumerableProperty,\n error: kEnumerableProperty\n})\n\nwebidl.converters.MessagePort = webidl.interfaceConverter(MessagePort)\n\nwebidl.converters['sequence'] = webidl.sequenceConverter(\n webidl.converters.MessagePort\n)\n\nconst eventInit = [\n {\n key: 'bubbles',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'cancelable',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'composed',\n converter: webidl.converters.boolean,\n defaultValue: false\n }\n]\n\nwebidl.converters.MessageEventInit = webidl.dictionaryConverter([\n ...eventInit,\n {\n key: 'data',\n converter: webidl.converters.any,\n defaultValue: null\n },\n {\n key: 'origin',\n converter: webidl.converters.USVString,\n defaultValue: ''\n },\n {\n key: 'lastEventId',\n converter: webidl.converters.DOMString,\n defaultValue: ''\n },\n {\n key: 'source',\n // Node doesn't implement WindowProxy or ServiceWorker, so the only\n // valid value for source is a MessagePort.\n converter: webidl.nullableConverter(webidl.converters.MessagePort),\n defaultValue: null\n },\n {\n key: 'ports',\n converter: webidl.converters['sequence'],\n get defaultValue () {\n return []\n }\n }\n])\n\nwebidl.converters.CloseEventInit = webidl.dictionaryConverter([\n ...eventInit,\n {\n key: 'wasClean',\n converter: webidl.converters.boolean,\n defaultValue: false\n },\n {\n key: 'code',\n converter: webidl.converters['unsigned short'],\n defaultValue: 0\n },\n {\n key: 'reason',\n converter: webidl.converters.USVString,\n defaultValue: ''\n }\n])\n\nwebidl.converters.ErrorEventInit = webidl.dictionaryConverter([\n ...eventInit,\n {\n key: 'message',\n converter: webidl.converters.DOMString,\n defaultValue: ''\n },\n {\n key: 'filename',\n converter: webidl.converters.USVString,\n defaultValue: ''\n },\n {\n key: 'lineno',\n converter: webidl.converters['unsigned long'],\n defaultValue: 0\n },\n {\n key: 'colno',\n converter: webidl.converters['unsigned long'],\n defaultValue: 0\n },\n {\n key: 'error',\n converter: webidl.converters.any\n }\n])\n\nmodule.exports = {\n MessageEvent,\n CloseEvent,\n ErrorEvent\n}\n","'use strict'\n\nconst { maxUnsigned16Bit } = require('./constants')\n\n/** @type {import('crypto')} */\nlet crypto\ntry {\n crypto = require('crypto')\n} catch {\n\n}\n\nclass WebsocketFrameSend {\n /**\n * @param {Buffer|undefined} data\n */\n constructor (data) {\n this.frameData = data\n this.maskKey = crypto.randomBytes(4)\n }\n\n createFrame (opcode) {\n const bodyLength = this.frameData?.byteLength ?? 0\n\n /** @type {number} */\n let payloadLength = bodyLength // 0-125\n let offset = 6\n\n if (bodyLength > maxUnsigned16Bit) {\n offset += 8 // payload length is next 8 bytes\n payloadLength = 127\n } else if (bodyLength > 125) {\n offset += 2 // payload length is next 2 bytes\n payloadLength = 126\n }\n\n const buffer = Buffer.allocUnsafe(bodyLength + offset)\n\n // Clear first 2 bytes, everything else is overwritten\n buffer[0] = buffer[1] = 0\n buffer[0] |= 0x80 // FIN\n buffer[0] = (buffer[0] & 0xF0) + opcode // opcode\n\n /*! ws. MIT License. Einar Otto Stangvik */\n buffer[offset - 4] = this.maskKey[0]\n buffer[offset - 3] = this.maskKey[1]\n buffer[offset - 2] = this.maskKey[2]\n buffer[offset - 1] = this.maskKey[3]\n\n buffer[1] = payloadLength\n\n if (payloadLength === 126) {\n buffer.writeUInt16BE(bodyLength, 2)\n } else if (payloadLength === 127) {\n // Clear extended payload length\n buffer[2] = buffer[3] = 0\n buffer.writeUIntBE(bodyLength, 4, 6)\n }\n\n buffer[1] |= 0x80 // MASK\n\n // mask body\n for (let i = 0; i < bodyLength; i++) {\n buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4]\n }\n\n return buffer\n }\n}\n\nmodule.exports = {\n WebsocketFrameSend\n}\n","'use strict'\n\nconst { Writable } = require('stream')\nconst diagnosticsChannel = require('diagnostics_channel')\nconst { parserStates, opcodes, states, emptyBuffer } = require('./constants')\nconst { kReadyState, kSentClose, kResponse, kReceivedClose } = require('./symbols')\nconst { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = require('./util')\nconst { WebsocketFrameSend } = require('./frame')\n\n// This code was influenced by ws released under the MIT license.\n// Copyright (c) 2011 Einar Otto Stangvik \n// Copyright (c) 2013 Arnout Kazemier and contributors\n// Copyright (c) 2016 Luigi Pinca and contributors\n\nconst channels = {}\nchannels.ping = diagnosticsChannel.channel('undici:websocket:ping')\nchannels.pong = diagnosticsChannel.channel('undici:websocket:pong')\n\nclass ByteParser extends Writable {\n #buffers = []\n #byteOffset = 0\n\n #state = parserStates.INFO\n\n #info = {}\n #fragments = []\n\n constructor (ws) {\n super()\n\n this.ws = ws\n }\n\n /**\n * @param {Buffer} chunk\n * @param {() => void} callback\n */\n _write (chunk, _, callback) {\n this.#buffers.push(chunk)\n this.#byteOffset += chunk.length\n\n this.run(callback)\n }\n\n /**\n * Runs whenever a new chunk is received.\n * Callback is called whenever there are no more chunks buffering,\n * or not enough bytes are buffered to parse.\n */\n run (callback) {\n while (true) {\n if (this.#state === parserStates.INFO) {\n // If there aren't enough bytes to parse the payload length, etc.\n if (this.#byteOffset < 2) {\n return callback()\n }\n\n const buffer = this.consume(2)\n\n this.#info.fin = (buffer[0] & 0x80) !== 0\n this.#info.opcode = buffer[0] & 0x0F\n\n // If we receive a fragmented message, we use the type of the first\n // frame to parse the full message as binary/text, when it's terminated\n this.#info.originalOpcode ??= this.#info.opcode\n\n this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION\n\n if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) {\n // Only text and binary frames can be fragmented\n failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.')\n return\n }\n\n const payloadLength = buffer[1] & 0x7F\n\n if (payloadLength <= 125) {\n this.#info.payloadLength = payloadLength\n this.#state = parserStates.READ_DATA\n } else if (payloadLength === 126) {\n this.#state = parserStates.PAYLOADLENGTH_16\n } else if (payloadLength === 127) {\n this.#state = parserStates.PAYLOADLENGTH_64\n }\n\n if (this.#info.fragmented && payloadLength > 125) {\n // A fragmented frame can't be fragmented itself\n failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.')\n return\n } else if (\n (this.#info.opcode === opcodes.PING ||\n this.#info.opcode === opcodes.PONG ||\n this.#info.opcode === opcodes.CLOSE) &&\n payloadLength > 125\n ) {\n // Control frames can have a payload length of 125 bytes MAX\n failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.')\n return\n } else if (this.#info.opcode === opcodes.CLOSE) {\n if (payloadLength === 1) {\n failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.')\n return\n }\n\n const body = this.consume(payloadLength)\n\n this.#info.closeInfo = this.parseCloseBody(false, body)\n\n if (!this.ws[kSentClose]) {\n // If an endpoint receives a Close frame and did not previously send a\n // Close frame, the endpoint MUST send a Close frame in response. (When\n // sending a Close frame in response, the endpoint typically echos the\n // status code it received.)\n const body = Buffer.allocUnsafe(2)\n body.writeUInt16BE(this.#info.closeInfo.code, 0)\n const closeFrame = new WebsocketFrameSend(body)\n\n this.ws[kResponse].socket.write(\n closeFrame.createFrame(opcodes.CLOSE),\n (err) => {\n if (!err) {\n this.ws[kSentClose] = true\n }\n }\n )\n }\n\n // Upon either sending or receiving a Close control frame, it is said\n // that _The WebSocket Closing Handshake is Started_ and that the\n // WebSocket connection is in the CLOSING state.\n this.ws[kReadyState] = states.CLOSING\n this.ws[kReceivedClose] = true\n\n this.end()\n\n return\n } else if (this.#info.opcode === opcodes.PING) {\n // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in\n // response, unless it already received a Close frame.\n // A Pong frame sent in response to a Ping frame must have identical\n // \"Application data\"\n\n const body = this.consume(payloadLength)\n\n if (!this.ws[kReceivedClose]) {\n const frame = new WebsocketFrameSend(body)\n\n this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG))\n\n if (channels.ping.hasSubscribers) {\n channels.ping.publish({\n payload: body\n })\n }\n }\n\n this.#state = parserStates.INFO\n\n if (this.#byteOffset > 0) {\n continue\n } else {\n callback()\n return\n }\n } else if (this.#info.opcode === opcodes.PONG) {\n // A Pong frame MAY be sent unsolicited. This serves as a\n // unidirectional heartbeat. A response to an unsolicited Pong frame is\n // not expected.\n\n const body = this.consume(payloadLength)\n\n if (channels.pong.hasSubscribers) {\n channels.pong.publish({\n payload: body\n })\n }\n\n if (this.#byteOffset > 0) {\n continue\n } else {\n callback()\n return\n }\n }\n } else if (this.#state === parserStates.PAYLOADLENGTH_16) {\n if (this.#byteOffset < 2) {\n return callback()\n }\n\n const buffer = this.consume(2)\n\n this.#info.payloadLength = buffer.readUInt16BE(0)\n this.#state = parserStates.READ_DATA\n } else if (this.#state === parserStates.PAYLOADLENGTH_64) {\n if (this.#byteOffset < 8) {\n return callback()\n }\n\n const buffer = this.consume(8)\n const upper = buffer.readUInt32BE(0)\n\n // 2^31 is the maxinimum bytes an arraybuffer can contain\n // on 32-bit systems. Although, on 64-bit systems, this is\n // 2^53-1 bytes.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length\n // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275\n // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e\n if (upper > 2 ** 31 - 1) {\n failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.')\n return\n }\n\n const lower = buffer.readUInt32BE(4)\n\n this.#info.payloadLength = (upper << 8) + lower\n this.#state = parserStates.READ_DATA\n } else if (this.#state === parserStates.READ_DATA) {\n if (this.#byteOffset < this.#info.payloadLength) {\n // If there is still more data in this chunk that needs to be read\n return callback()\n } else if (this.#byteOffset >= this.#info.payloadLength) {\n // If the server sent multiple frames in a single chunk\n\n const body = this.consume(this.#info.payloadLength)\n\n this.#fragments.push(body)\n\n // If the frame is unfragmented, or a fragmented frame was terminated,\n // a message was received\n if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) {\n const fullMessage = Buffer.concat(this.#fragments)\n\n websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage)\n\n this.#info = {}\n this.#fragments.length = 0\n }\n\n this.#state = parserStates.INFO\n }\n }\n\n if (this.#byteOffset > 0) {\n continue\n } else {\n callback()\n break\n }\n }\n }\n\n /**\n * Take n bytes from the buffered Buffers\n * @param {number} n\n * @returns {Buffer|null}\n */\n consume (n) {\n if (n > this.#byteOffset) {\n return null\n } else if (n === 0) {\n return emptyBuffer\n }\n\n if (this.#buffers[0].length === n) {\n this.#byteOffset -= this.#buffers[0].length\n return this.#buffers.shift()\n }\n\n const buffer = Buffer.allocUnsafe(n)\n let offset = 0\n\n while (offset !== n) {\n const next = this.#buffers[0]\n const { length } = next\n\n if (length + offset === n) {\n buffer.set(this.#buffers.shift(), offset)\n break\n } else if (length + offset > n) {\n buffer.set(next.subarray(0, n - offset), offset)\n this.#buffers[0] = next.subarray(n - offset)\n break\n } else {\n buffer.set(this.#buffers.shift(), offset)\n offset += next.length\n }\n }\n\n this.#byteOffset -= n\n\n return buffer\n }\n\n parseCloseBody (onlyCode, data) {\n // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5\n /** @type {number|undefined} */\n let code\n\n if (data.length >= 2) {\n // _The WebSocket Connection Close Code_ is\n // defined as the status code (Section 7.4) contained in the first Close\n // control frame received by the application\n code = data.readUInt16BE(0)\n }\n\n if (onlyCode) {\n if (!isValidStatusCode(code)) {\n return null\n }\n\n return { code }\n }\n\n // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6\n /** @type {Buffer} */\n let reason = data.subarray(2)\n\n // Remove BOM\n if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) {\n reason = reason.subarray(3)\n }\n\n if (code !== undefined && !isValidStatusCode(code)) {\n return null\n }\n\n try {\n // TODO: optimize this\n reason = new TextDecoder('utf-8', { fatal: true }).decode(reason)\n } catch {\n return null\n }\n\n return { code, reason }\n }\n\n get closingInfo () {\n return this.#info.closeInfo\n }\n}\n\nmodule.exports = {\n ByteParser\n}\n","'use strict'\n\nmodule.exports = {\n kWebSocketURL: Symbol('url'),\n kReadyState: Symbol('ready state'),\n kController: Symbol('controller'),\n kResponse: Symbol('response'),\n kBinaryType: Symbol('binary type'),\n kSentClose: Symbol('sent close'),\n kReceivedClose: Symbol('received close'),\n kByteParser: Symbol('byte parser')\n}\n","'use strict'\n\nconst { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = require('./symbols')\nconst { states, opcodes } = require('./constants')\nconst { MessageEvent, ErrorEvent } = require('./events')\n\n/* globals Blob */\n\n/**\n * @param {import('./websocket').WebSocket} ws\n */\nfunction isEstablished (ws) {\n // If the server's response is validated as provided for above, it is\n // said that _The WebSocket Connection is Established_ and that the\n // WebSocket Connection is in the OPEN state.\n return ws[kReadyState] === states.OPEN\n}\n\n/**\n * @param {import('./websocket').WebSocket} ws\n */\nfunction isClosing (ws) {\n // Upon either sending or receiving a Close control frame, it is said\n // that _The WebSocket Closing Handshake is Started_ and that the\n // WebSocket connection is in the CLOSING state.\n return ws[kReadyState] === states.CLOSING\n}\n\n/**\n * @param {import('./websocket').WebSocket} ws\n */\nfunction isClosed (ws) {\n return ws[kReadyState] === states.CLOSED\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#concept-event-fire\n * @param {string} e\n * @param {EventTarget} target\n * @param {EventInit | undefined} eventInitDict\n */\nfunction fireEvent (e, target, eventConstructor = Event, eventInitDict) {\n // 1. If eventConstructor is not given, then let eventConstructor be Event.\n\n // 2. Let event be the result of creating an event given eventConstructor,\n // in the relevant realm of target.\n // 3. Initialize event’s type attribute to e.\n const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap\n\n // 4. Initialize any other IDL attributes of event as described in the\n // invocation of this algorithm.\n\n // 5. Return the result of dispatching event at target, with legacy target\n // override flag set if set.\n target.dispatchEvent(event)\n}\n\n/**\n * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol\n * @param {import('./websocket').WebSocket} ws\n * @param {number} type Opcode\n * @param {Buffer} data application data\n */\nfunction websocketMessageReceived (ws, type, data) {\n // 1. If ready state is not OPEN (1), then return.\n if (ws[kReadyState] !== states.OPEN) {\n return\n }\n\n // 2. Let dataForEvent be determined by switching on type and binary type:\n let dataForEvent\n\n if (type === opcodes.TEXT) {\n // -> type indicates that the data is Text\n // a new DOMString containing data\n try {\n dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data)\n } catch {\n failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.')\n return\n }\n } else if (type === opcodes.BINARY) {\n if (ws[kBinaryType] === 'blob') {\n // -> type indicates that the data is Binary and binary type is \"blob\"\n // a new Blob object, created in the relevant Realm of the WebSocket\n // object, that represents data as its raw data\n dataForEvent = new Blob([data])\n } else {\n // -> type indicates that the data is Binary and binary type is \"arraybuffer\"\n // a new ArrayBuffer object, created in the relevant Realm of the\n // WebSocket object, whose contents are data\n dataForEvent = new Uint8Array(data).buffer\n }\n }\n\n // 3. Fire an event named message at the WebSocket object, using MessageEvent,\n // with the origin attribute initialized to the serialization of the WebSocket\n // object’s url's origin, and the data attribute initialized to dataForEvent.\n fireEvent('message', ws, MessageEvent, {\n origin: ws[kWebSocketURL].origin,\n data: dataForEvent\n })\n}\n\n/**\n * @see https://datatracker.ietf.org/doc/html/rfc6455\n * @see https://datatracker.ietf.org/doc/html/rfc2616\n * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407\n * @param {string} protocol\n */\nfunction isValidSubprotocol (protocol) {\n // If present, this value indicates one\n // or more comma-separated subprotocol the client wishes to speak,\n // ordered by preference. The elements that comprise this value\n // MUST be non-empty strings with characters in the range U+0021 to\n // U+007E not including separator characters as defined in\n // [RFC2616] and MUST all be unique strings.\n if (protocol.length === 0) {\n return false\n }\n\n for (const char of protocol) {\n const code = char.charCodeAt(0)\n\n if (\n code < 0x21 ||\n code > 0x7E ||\n char === '(' ||\n char === ')' ||\n char === '<' ||\n char === '>' ||\n char === '@' ||\n char === ',' ||\n char === ';' ||\n char === ':' ||\n char === '\\\\' ||\n char === '\"' ||\n char === '/' ||\n char === '[' ||\n char === ']' ||\n char === '?' ||\n char === '=' ||\n char === '{' ||\n char === '}' ||\n code === 32 || // SP\n code === 9 // HT\n ) {\n return false\n }\n }\n\n return true\n}\n\n/**\n * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4\n * @param {number} code\n */\nfunction isValidStatusCode (code) {\n if (code >= 1000 && code < 1015) {\n return (\n code !== 1004 && // reserved\n code !== 1005 && // \"MUST NOT be set as a status code\"\n code !== 1006 // \"MUST NOT be set as a status code\"\n )\n }\n\n return code >= 3000 && code <= 4999\n}\n\n/**\n * @param {import('./websocket').WebSocket} ws\n * @param {string|undefined} reason\n */\nfunction failWebsocketConnection (ws, reason) {\n const { [kController]: controller, [kResponse]: response } = ws\n\n controller.abort()\n\n if (response?.socket && !response.socket.destroyed) {\n response.socket.destroy()\n }\n\n if (reason) {\n fireEvent('error', ws, ErrorEvent, {\n error: new Error(reason)\n })\n }\n}\n\nmodule.exports = {\n isEstablished,\n isClosing,\n isClosed,\n fireEvent,\n isValidSubprotocol,\n isValidStatusCode,\n failWebsocketConnection,\n websocketMessageReceived\n}\n","'use strict'\n\nconst { webidl } = require('../fetch/webidl')\nconst { DOMException } = require('../fetch/constants')\nconst { URLSerializer } = require('../fetch/dataURL')\nconst { getGlobalOrigin } = require('../fetch/global')\nconst { staticPropertyDescriptors, states, opcodes, emptyBuffer } = require('./constants')\nconst {\n kWebSocketURL,\n kReadyState,\n kController,\n kBinaryType,\n kResponse,\n kSentClose,\n kByteParser\n} = require('./symbols')\nconst { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = require('./util')\nconst { establishWebSocketConnection } = require('./connection')\nconst { WebsocketFrameSend } = require('./frame')\nconst { ByteParser } = require('./receiver')\nconst { kEnumerableProperty, isBlobLike } = require('../core/util')\nconst { getGlobalDispatcher } = require('../global')\nconst { types } = require('util')\n\nlet experimentalWarned = false\n\n// https://websockets.spec.whatwg.org/#interface-definition\nclass WebSocket extends EventTarget {\n #events = {\n open: null,\n error: null,\n close: null,\n message: null\n }\n\n #bufferedAmount = 0\n #protocol = ''\n #extensions = ''\n\n /**\n * @param {string} url\n * @param {string|string[]} protocols\n */\n constructor (url, protocols = []) {\n super()\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' })\n\n if (!experimentalWarned) {\n experimentalWarned = true\n process.emitWarning('WebSockets are experimental, expect them to change at any time.', {\n code: 'UNDICI-WS'\n })\n }\n\n const options = webidl.converters['DOMString or sequence or WebSocketInit'](protocols)\n\n url = webidl.converters.USVString(url)\n protocols = options.protocols\n\n // 1. Let baseURL be this's relevant settings object's API base URL.\n const baseURL = getGlobalOrigin()\n\n // 1. Let urlRecord be the result of applying the URL parser to url with baseURL.\n let urlRecord\n\n try {\n urlRecord = new URL(url, baseURL)\n } catch (e) {\n // 3. If urlRecord is failure, then throw a \"SyntaxError\" DOMException.\n throw new DOMException(e, 'SyntaxError')\n }\n\n // 4. If urlRecord’s scheme is \"http\", then set urlRecord’s scheme to \"ws\".\n if (urlRecord.protocol === 'http:') {\n urlRecord.protocol = 'ws:'\n } else if (urlRecord.protocol === 'https:') {\n // 5. Otherwise, if urlRecord’s scheme is \"https\", set urlRecord’s scheme to \"wss\".\n urlRecord.protocol = 'wss:'\n }\n\n // 6. If urlRecord’s scheme is not \"ws\" or \"wss\", then throw a \"SyntaxError\" DOMException.\n if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') {\n throw new DOMException(\n `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`,\n 'SyntaxError'\n )\n }\n\n // 7. If urlRecord’s fragment is non-null, then throw a \"SyntaxError\"\n // DOMException.\n if (urlRecord.hash || urlRecord.href.endsWith('#')) {\n throw new DOMException('Got fragment', 'SyntaxError')\n }\n\n // 8. If protocols is a string, set protocols to a sequence consisting\n // of just that string.\n if (typeof protocols === 'string') {\n protocols = [protocols]\n }\n\n // 9. If any of the values in protocols occur more than once or otherwise\n // fail to match the requirements for elements that comprise the value\n // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket\n // protocol, then throw a \"SyntaxError\" DOMException.\n if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) {\n throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError')\n }\n\n if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) {\n throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError')\n }\n\n // 10. Set this's url to urlRecord.\n this[kWebSocketURL] = new URL(urlRecord.href)\n\n // 11. Let client be this's relevant settings object.\n\n // 12. Run this step in parallel:\n\n // 1. Establish a WebSocket connection given urlRecord, protocols,\n // and client.\n this[kController] = establishWebSocketConnection(\n urlRecord,\n protocols,\n this,\n (response) => this.#onConnectionEstablished(response),\n options\n )\n\n // Each WebSocket object has an associated ready state, which is a\n // number representing the state of the connection. Initially it must\n // be CONNECTING (0).\n this[kReadyState] = WebSocket.CONNECTING\n\n // The extensions attribute must initially return the empty string.\n\n // The protocol attribute must initially return the empty string.\n\n // Each WebSocket object has an associated binary type, which is a\n // BinaryType. Initially it must be \"blob\".\n this[kBinaryType] = 'blob'\n }\n\n /**\n * @see https://websockets.spec.whatwg.org/#dom-websocket-close\n * @param {number|undefined} code\n * @param {string|undefined} reason\n */\n close (code = undefined, reason = undefined) {\n webidl.brandCheck(this, WebSocket)\n\n if (code !== undefined) {\n code = webidl.converters['unsigned short'](code, { clamp: true })\n }\n\n if (reason !== undefined) {\n reason = webidl.converters.USVString(reason)\n }\n\n // 1. If code is present, but is neither an integer equal to 1000 nor an\n // integer in the range 3000 to 4999, inclusive, throw an\n // \"InvalidAccessError\" DOMException.\n if (code !== undefined) {\n if (code !== 1000 && (code < 3000 || code > 4999)) {\n throw new DOMException('invalid code', 'InvalidAccessError')\n }\n }\n\n let reasonByteLength = 0\n\n // 2. If reason is present, then run these substeps:\n if (reason !== undefined) {\n // 1. Let reasonBytes be the result of encoding reason.\n // 2. If reasonBytes is longer than 123 bytes, then throw a\n // \"SyntaxError\" DOMException.\n reasonByteLength = Buffer.byteLength(reason)\n\n if (reasonByteLength > 123) {\n throw new DOMException(\n `Reason must be less than 123 bytes; received ${reasonByteLength}`,\n 'SyntaxError'\n )\n }\n }\n\n // 3. Run the first matching steps from the following list:\n if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) {\n // If this's ready state is CLOSING (2) or CLOSED (3)\n // Do nothing.\n } else if (!isEstablished(this)) {\n // If the WebSocket connection is not yet established\n // Fail the WebSocket connection and set this's ready state\n // to CLOSING (2).\n failWebsocketConnection(this, 'Connection was closed before it was established.')\n this[kReadyState] = WebSocket.CLOSING\n } else if (!isClosing(this)) {\n // If the WebSocket closing handshake has not yet been started\n // Start the WebSocket closing handshake and set this's ready\n // state to CLOSING (2).\n // - If neither code nor reason is present, the WebSocket Close\n // message must not have a body.\n // - If code is present, then the status code to use in the\n // WebSocket Close message must be the integer given by code.\n // - If reason is also present, then reasonBytes must be\n // provided in the Close message after the status code.\n\n const frame = new WebsocketFrameSend()\n\n // If neither code nor reason is present, the WebSocket Close\n // message must not have a body.\n\n // If code is present, then the status code to use in the\n // WebSocket Close message must be the integer given by code.\n if (code !== undefined && reason === undefined) {\n frame.frameData = Buffer.allocUnsafe(2)\n frame.frameData.writeUInt16BE(code, 0)\n } else if (code !== undefined && reason !== undefined) {\n // If reason is also present, then reasonBytes must be\n // provided in the Close message after the status code.\n frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength)\n frame.frameData.writeUInt16BE(code, 0)\n // the body MAY contain UTF-8-encoded data with value /reason/\n frame.frameData.write(reason, 2, 'utf-8')\n } else {\n frame.frameData = emptyBuffer\n }\n\n /** @type {import('stream').Duplex} */\n const socket = this[kResponse].socket\n\n socket.write(frame.createFrame(opcodes.CLOSE), (err) => {\n if (!err) {\n this[kSentClose] = true\n }\n })\n\n // Upon either sending or receiving a Close control frame, it is said\n // that _The WebSocket Closing Handshake is Started_ and that the\n // WebSocket connection is in the CLOSING state.\n this[kReadyState] = states.CLOSING\n } else {\n // Otherwise\n // Set this's ready state to CLOSING (2).\n this[kReadyState] = WebSocket.CLOSING\n }\n }\n\n /**\n * @see https://websockets.spec.whatwg.org/#dom-websocket-send\n * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data\n */\n send (data) {\n webidl.brandCheck(this, WebSocket)\n\n webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' })\n\n data = webidl.converters.WebSocketSendData(data)\n\n // 1. If this's ready state is CONNECTING, then throw an\n // \"InvalidStateError\" DOMException.\n if (this[kReadyState] === WebSocket.CONNECTING) {\n throw new DOMException('Sent before connected.', 'InvalidStateError')\n }\n\n // 2. Run the appropriate set of steps from the following list:\n // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1\n // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2\n\n if (!isEstablished(this) || isClosing(this)) {\n return\n }\n\n /** @type {import('stream').Duplex} */\n const socket = this[kResponse].socket\n\n // If data is a string\n if (typeof data === 'string') {\n // If the WebSocket connection is established and the WebSocket\n // closing handshake has not yet started, then the user agent\n // must send a WebSocket Message comprised of the data argument\n // using a text frame opcode; if the data cannot be sent, e.g.\n // because it would need to be buffered but the buffer is full,\n // the user agent must flag the WebSocket as full and then close\n // the WebSocket connection. Any invocation of this method with a\n // string argument that does not throw an exception must increase\n // the bufferedAmount attribute by the number of bytes needed to\n // express the argument as UTF-8.\n\n const value = Buffer.from(data)\n const frame = new WebsocketFrameSend(value)\n const buffer = frame.createFrame(opcodes.TEXT)\n\n this.#bufferedAmount += value.byteLength\n socket.write(buffer, () => {\n this.#bufferedAmount -= value.byteLength\n })\n } else if (types.isArrayBuffer(data)) {\n // If the WebSocket connection is established, and the WebSocket\n // closing handshake has not yet started, then the user agent must\n // send a WebSocket Message comprised of data using a binary frame\n // opcode; if the data cannot be sent, e.g. because it would need\n // to be buffered but the buffer is full, the user agent must flag\n // the WebSocket as full and then close the WebSocket connection.\n // The data to be sent is the data stored in the buffer described\n // by the ArrayBuffer object. Any invocation of this method with an\n // ArrayBuffer argument that does not throw an exception must\n // increase the bufferedAmount attribute by the length of the\n // ArrayBuffer in bytes.\n\n const value = Buffer.from(data)\n const frame = new WebsocketFrameSend(value)\n const buffer = frame.createFrame(opcodes.BINARY)\n\n this.#bufferedAmount += value.byteLength\n socket.write(buffer, () => {\n this.#bufferedAmount -= value.byteLength\n })\n } else if (ArrayBuffer.isView(data)) {\n // If the WebSocket connection is established, and the WebSocket\n // closing handshake has not yet started, then the user agent must\n // send a WebSocket Message comprised of data using a binary frame\n // opcode; if the data cannot be sent, e.g. because it would need to\n // be buffered but the buffer is full, the user agent must flag the\n // WebSocket as full and then close the WebSocket connection. The\n // data to be sent is the data stored in the section of the buffer\n // described by the ArrayBuffer object that data references. Any\n // invocation of this method with this kind of argument that does\n // not throw an exception must increase the bufferedAmount attribute\n // by the length of data’s buffer in bytes.\n\n const ab = Buffer.from(data, data.byteOffset, data.byteLength)\n\n const frame = new WebsocketFrameSend(ab)\n const buffer = frame.createFrame(opcodes.BINARY)\n\n this.#bufferedAmount += ab.byteLength\n socket.write(buffer, () => {\n this.#bufferedAmount -= ab.byteLength\n })\n } else if (isBlobLike(data)) {\n // If the WebSocket connection is established, and the WebSocket\n // closing handshake has not yet started, then the user agent must\n // send a WebSocket Message comprised of data using a binary frame\n // opcode; if the data cannot be sent, e.g. because it would need to\n // be buffered but the buffer is full, the user agent must flag the\n // WebSocket as full and then close the WebSocket connection. The data\n // to be sent is the raw data represented by the Blob object. Any\n // invocation of this method with a Blob argument that does not throw\n // an exception must increase the bufferedAmount attribute by the size\n // of the Blob object’s raw data, in bytes.\n\n const frame = new WebsocketFrameSend()\n\n data.arrayBuffer().then((ab) => {\n const value = Buffer.from(ab)\n frame.frameData = value\n const buffer = frame.createFrame(opcodes.BINARY)\n\n this.#bufferedAmount += value.byteLength\n socket.write(buffer, () => {\n this.#bufferedAmount -= value.byteLength\n })\n })\n }\n }\n\n get readyState () {\n webidl.brandCheck(this, WebSocket)\n\n // The readyState getter steps are to return this's ready state.\n return this[kReadyState]\n }\n\n get bufferedAmount () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#bufferedAmount\n }\n\n get url () {\n webidl.brandCheck(this, WebSocket)\n\n // The url getter steps are to return this's url, serialized.\n return URLSerializer(this[kWebSocketURL])\n }\n\n get extensions () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#extensions\n }\n\n get protocol () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#protocol\n }\n\n get onopen () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#events.open\n }\n\n set onopen (fn) {\n webidl.brandCheck(this, WebSocket)\n\n if (this.#events.open) {\n this.removeEventListener('open', this.#events.open)\n }\n\n if (typeof fn === 'function') {\n this.#events.open = fn\n this.addEventListener('open', fn)\n } else {\n this.#events.open = null\n }\n }\n\n get onerror () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#events.error\n }\n\n set onerror (fn) {\n webidl.brandCheck(this, WebSocket)\n\n if (this.#events.error) {\n this.removeEventListener('error', this.#events.error)\n }\n\n if (typeof fn === 'function') {\n this.#events.error = fn\n this.addEventListener('error', fn)\n } else {\n this.#events.error = null\n }\n }\n\n get onclose () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#events.close\n }\n\n set onclose (fn) {\n webidl.brandCheck(this, WebSocket)\n\n if (this.#events.close) {\n this.removeEventListener('close', this.#events.close)\n }\n\n if (typeof fn === 'function') {\n this.#events.close = fn\n this.addEventListener('close', fn)\n } else {\n this.#events.close = null\n }\n }\n\n get onmessage () {\n webidl.brandCheck(this, WebSocket)\n\n return this.#events.message\n }\n\n set onmessage (fn) {\n webidl.brandCheck(this, WebSocket)\n\n if (this.#events.message) {\n this.removeEventListener('message', this.#events.message)\n }\n\n if (typeof fn === 'function') {\n this.#events.message = fn\n this.addEventListener('message', fn)\n } else {\n this.#events.message = null\n }\n }\n\n get binaryType () {\n webidl.brandCheck(this, WebSocket)\n\n return this[kBinaryType]\n }\n\n set binaryType (type) {\n webidl.brandCheck(this, WebSocket)\n\n if (type !== 'blob' && type !== 'arraybuffer') {\n this[kBinaryType] = 'blob'\n } else {\n this[kBinaryType] = type\n }\n }\n\n /**\n * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol\n */\n #onConnectionEstablished (response) {\n // processResponse is called when the \"response’s header list has been received and initialized.\"\n // once this happens, the connection is open\n this[kResponse] = response\n\n const parser = new ByteParser(this)\n parser.on('drain', function onParserDrain () {\n this.ws[kResponse].socket.resume()\n })\n\n response.socket.ws = this\n this[kByteParser] = parser\n\n // 1. Change the ready state to OPEN (1).\n this[kReadyState] = states.OPEN\n\n // 2. Change the extensions attribute’s value to the extensions in use, if\n // it is not the null value.\n // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1\n const extensions = response.headersList.get('sec-websocket-extensions')\n\n if (extensions !== null) {\n this.#extensions = extensions\n }\n\n // 3. Change the protocol attribute’s value to the subprotocol in use, if\n // it is not the null value.\n // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9\n const protocol = response.headersList.get('sec-websocket-protocol')\n\n if (protocol !== null) {\n this.#protocol = protocol\n }\n\n // 4. Fire an event named open at the WebSocket object.\n fireEvent('open', this)\n }\n}\n\n// https://websockets.spec.whatwg.org/#dom-websocket-connecting\nWebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING\n// https://websockets.spec.whatwg.org/#dom-websocket-open\nWebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN\n// https://websockets.spec.whatwg.org/#dom-websocket-closing\nWebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING\n// https://websockets.spec.whatwg.org/#dom-websocket-closed\nWebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED\n\nObject.defineProperties(WebSocket.prototype, {\n CONNECTING: staticPropertyDescriptors,\n OPEN: staticPropertyDescriptors,\n CLOSING: staticPropertyDescriptors,\n CLOSED: staticPropertyDescriptors,\n url: kEnumerableProperty,\n readyState: kEnumerableProperty,\n bufferedAmount: kEnumerableProperty,\n onopen: kEnumerableProperty,\n onerror: kEnumerableProperty,\n onclose: kEnumerableProperty,\n close: kEnumerableProperty,\n onmessage: kEnumerableProperty,\n binaryType: kEnumerableProperty,\n send: kEnumerableProperty,\n extensions: kEnumerableProperty,\n protocol: kEnumerableProperty,\n [Symbol.toStringTag]: {\n value: 'WebSocket',\n writable: false,\n enumerable: false,\n configurable: true\n }\n})\n\nObject.defineProperties(WebSocket, {\n CONNECTING: staticPropertyDescriptors,\n OPEN: staticPropertyDescriptors,\n CLOSING: staticPropertyDescriptors,\n CLOSED: staticPropertyDescriptors\n})\n\nwebidl.converters['sequence'] = webidl.sequenceConverter(\n webidl.converters.DOMString\n)\n\nwebidl.converters['DOMString or sequence'] = function (V) {\n if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) {\n return webidl.converters['sequence'](V)\n }\n\n return webidl.converters.DOMString(V)\n}\n\n// This implements the propsal made in https://github.com/whatwg/websockets/issues/42\nwebidl.converters.WebSocketInit = webidl.dictionaryConverter([\n {\n key: 'protocols',\n converter: webidl.converters['DOMString or sequence'],\n get defaultValue () {\n return []\n }\n },\n {\n key: 'dispatcher',\n converter: (V) => V,\n get defaultValue () {\n return getGlobalDispatcher()\n }\n },\n {\n key: 'headers',\n converter: webidl.nullableConverter(webidl.converters.HeadersInit)\n }\n])\n\nwebidl.converters['DOMString or sequence or WebSocketInit'] = function (V) {\n if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) {\n return webidl.converters.WebSocketInit(V)\n }\n\n return { protocols: webidl.converters['DOMString or sequence'](V) }\n}\n\nwebidl.converters.WebSocketSendData = function (V) {\n if (webidl.util.Type(V) === 'Object') {\n if (isBlobLike(V)) {\n return webidl.converters.Blob(V, { strict: false })\n }\n\n if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) {\n return webidl.converters.BufferSource(V)\n }\n }\n\n return webidl.converters.USVString(V)\n}\n\nmodule.exports = {\n WebSocket\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && process.version !== undefined) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","\n/**\n * For Node.js, simply re-export the core `util.deprecate` function.\n */\n\nmodule.exports = require('util').deprecate;\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","\"use strict\";\n\nvar conversions = {};\nmodule.exports = conversions;\n\nfunction sign(x) {\n return x < 0 ? -1 : 1;\n}\n\nfunction evenRound(x) {\n // Round x to the nearest integer, choosing the even integer if it lies halfway between two.\n if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)\n return Math.floor(x);\n } else {\n return Math.round(x);\n }\n}\n\nfunction createNumberConversion(bitLength, typeOpts) {\n if (!typeOpts.unsigned) {\n --bitLength;\n }\n const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);\n const upperBound = Math.pow(2, bitLength) - 1;\n\n const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);\n const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);\n\n return function(V, opts) {\n if (!opts) opts = {};\n\n let x = +V;\n\n if (opts.enforceRange) {\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite number\");\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(\"Argument is not in byte range\");\n }\n\n return x;\n }\n\n if (!isNaN(x) && opts.clamp) {\n x = evenRound(x);\n\n if (x < lowerBound) x = lowerBound;\n if (x > upperBound) x = upperBound;\n return x;\n }\n\n if (!Number.isFinite(x) || x === 0) {\n return 0;\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n x = x % moduloVal;\n\n if (!typeOpts.unsigned && x >= moduloBound) {\n return x - moduloVal;\n } else if (typeOpts.unsigned) {\n if (x < 0) {\n x += moduloVal;\n } else if (x === -0) { // don't return negative zero\n return 0;\n }\n }\n\n return x;\n }\n}\n\nconversions[\"void\"] = function () {\n return undefined;\n};\n\nconversions[\"boolean\"] = function (val) {\n return !!val;\n};\n\nconversions[\"byte\"] = createNumberConversion(8, { unsigned: false });\nconversions[\"octet\"] = createNumberConversion(8, { unsigned: true });\n\nconversions[\"short\"] = createNumberConversion(16, { unsigned: false });\nconversions[\"unsigned short\"] = createNumberConversion(16, { unsigned: true });\n\nconversions[\"long\"] = createNumberConversion(32, { unsigned: false });\nconversions[\"unsigned long\"] = createNumberConversion(32, { unsigned: true });\n\nconversions[\"long long\"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });\nconversions[\"unsigned long long\"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });\n\nconversions[\"double\"] = function (V) {\n const x = +V;\n\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite floating-point value\");\n }\n\n return x;\n};\n\nconversions[\"unrestricted double\"] = function (V) {\n const x = +V;\n\n if (isNaN(x)) {\n throw new TypeError(\"Argument is NaN\");\n }\n\n return x;\n};\n\n// not quite valid, but good enough for JS\nconversions[\"float\"] = conversions[\"double\"];\nconversions[\"unrestricted float\"] = conversions[\"unrestricted double\"];\n\nconversions[\"DOMString\"] = function (V, opts) {\n if (!opts) opts = {};\n\n if (opts.treatNullAsEmptyString && V === null) {\n return \"\";\n }\n\n return String(V);\n};\n\nconversions[\"ByteString\"] = function (V, opts) {\n const x = String(V);\n let c = undefined;\n for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {\n if (c > 255) {\n throw new TypeError(\"Argument is not a valid bytestring\");\n }\n }\n\n return x;\n};\n\nconversions[\"USVString\"] = function (V) {\n const S = String(V);\n const n = S.length;\n const U = [];\n for (let i = 0; i < n; ++i) {\n const c = S.charCodeAt(i);\n if (c < 0xD800 || c > 0xDFFF) {\n U.push(String.fromCodePoint(c));\n } else if (0xDC00 <= c && c <= 0xDFFF) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n if (i === n - 1) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n const d = S.charCodeAt(i + 1);\n if (0xDC00 <= d && d <= 0xDFFF) {\n const a = c & 0x3FF;\n const b = d & 0x3FF;\n U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));\n ++i;\n } else {\n U.push(String.fromCodePoint(0xFFFD));\n }\n }\n }\n }\n\n return U.join('');\n};\n\nconversions[\"Date\"] = function (V, opts) {\n if (!(V instanceof Date)) {\n throw new TypeError(\"Argument is not a Date object\");\n }\n if (isNaN(V)) {\n return undefined;\n }\n\n return V;\n};\n\nconversions[\"RegExp\"] = function (V, opts) {\n if (!(V instanceof RegExp)) {\n V = new RegExp(V);\n }\n\n return V;\n};\n","\"use strict\";\nconst usm = require(\"./url-state-machine\");\n\nexports.implementation = class URLImpl {\n constructor(constructorArgs) {\n const url = constructorArgs[0];\n const base = constructorArgs[1];\n\n let parsedBase = null;\n if (base !== undefined) {\n parsedBase = usm.basicURLParse(base);\n if (parsedBase === \"failure\") {\n throw new TypeError(\"Invalid base URL\");\n }\n }\n\n const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n\n // TODO: query stuff\n }\n\n get href() {\n return usm.serializeURL(this._url);\n }\n\n set href(v) {\n const parsedURL = usm.basicURLParse(v);\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n }\n\n get origin() {\n return usm.serializeURLOrigin(this._url);\n }\n\n get protocol() {\n return this._url.scheme + \":\";\n }\n\n set protocol(v) {\n usm.basicURLParse(v + \":\", { url: this._url, stateOverride: \"scheme start\" });\n }\n\n get username() {\n return this._url.username;\n }\n\n set username(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setTheUsername(this._url, v);\n }\n\n get password() {\n return this._url.password;\n }\n\n set password(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setThePassword(this._url, v);\n }\n\n get host() {\n const url = this._url;\n\n if (url.host === null) {\n return \"\";\n }\n\n if (url.port === null) {\n return usm.serializeHost(url.host);\n }\n\n return usm.serializeHost(url.host) + \":\" + usm.serializeInteger(url.port);\n }\n\n set host(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"host\" });\n }\n\n get hostname() {\n if (this._url.host === null) {\n return \"\";\n }\n\n return usm.serializeHost(this._url.host);\n }\n\n set hostname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"hostname\" });\n }\n\n get port() {\n if (this._url.port === null) {\n return \"\";\n }\n\n return usm.serializeInteger(this._url.port);\n }\n\n set port(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n if (v === \"\") {\n this._url.port = null;\n } else {\n usm.basicURLParse(v, { url: this._url, stateOverride: \"port\" });\n }\n }\n\n get pathname() {\n if (this._url.cannotBeABaseURL) {\n return this._url.path[0];\n }\n\n if (this._url.path.length === 0) {\n return \"\";\n }\n\n return \"/\" + this._url.path.join(\"/\");\n }\n\n set pathname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n this._url.path = [];\n usm.basicURLParse(v, { url: this._url, stateOverride: \"path start\" });\n }\n\n get search() {\n if (this._url.query === null || this._url.query === \"\") {\n return \"\";\n }\n\n return \"?\" + this._url.query;\n }\n\n set search(v) {\n // TODO: query stuff\n\n const url = this._url;\n\n if (v === \"\") {\n url.query = null;\n return;\n }\n\n const input = v[0] === \"?\" ? v.substring(1) : v;\n url.query = \"\";\n usm.basicURLParse(input, { url, stateOverride: \"query\" });\n }\n\n get hash() {\n if (this._url.fragment === null || this._url.fragment === \"\") {\n return \"\";\n }\n\n return \"#\" + this._url.fragment;\n }\n\n set hash(v) {\n if (v === \"\") {\n this._url.fragment = null;\n return;\n }\n\n const input = v[0] === \"#\" ? v.substring(1) : v;\n this._url.fragment = \"\";\n usm.basicURLParse(input, { url: this._url, stateOverride: \"fragment\" });\n }\n\n toJSON() {\n return this.href;\n }\n};\n","\"use strict\";\n\nconst conversions = require(\"webidl-conversions\");\nconst utils = require(\"./utils.js\");\nconst Impl = require(\".//URL-impl.js\");\n\nconst impl = utils.implSymbol;\n\nfunction URL(url) {\n if (!this || this[impl] || !(this instanceof URL)) {\n throw new TypeError(\"Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.\");\n }\n if (arguments.length < 1) {\n throw new TypeError(\"Failed to construct 'URL': 1 argument required, but only \" + arguments.length + \" present.\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 2; ++i) {\n args[i] = arguments[i];\n }\n args[0] = conversions[\"USVString\"](args[0]);\n if (args[1] !== undefined) {\n args[1] = conversions[\"USVString\"](args[1]);\n }\n\n module.exports.setup(this, args);\n}\n\nURL.prototype.toJSON = function toJSON() {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 0; ++i) {\n args[i] = arguments[i];\n }\n return this[impl].toJSON.apply(this[impl], args);\n};\nObject.defineProperty(URL.prototype, \"href\", {\n get() {\n return this[impl].href;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].href = V;\n },\n enumerable: true,\n configurable: true\n});\n\nURL.prototype.toString = function () {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n return this.href;\n};\n\nObject.defineProperty(URL.prototype, \"origin\", {\n get() {\n return this[impl].origin;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"protocol\", {\n get() {\n return this[impl].protocol;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].protocol = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"username\", {\n get() {\n return this[impl].username;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].username = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"password\", {\n get() {\n return this[impl].password;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].password = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"host\", {\n get() {\n return this[impl].host;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].host = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hostname\", {\n get() {\n return this[impl].hostname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hostname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"port\", {\n get() {\n return this[impl].port;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].port = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"pathname\", {\n get() {\n return this[impl].pathname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].pathname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"search\", {\n get() {\n return this[impl].search;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].search = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hash\", {\n get() {\n return this[impl].hash;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hash = V;\n },\n enumerable: true,\n configurable: true\n});\n\n\nmodule.exports = {\n is(obj) {\n return !!obj && obj[impl] instanceof Impl.implementation;\n },\n create(constructorArgs, privateData) {\n let obj = Object.create(URL.prototype);\n this.setup(obj, constructorArgs, privateData);\n return obj;\n },\n setup(obj, constructorArgs, privateData) {\n if (!privateData) privateData = {};\n privateData.wrapper = obj;\n\n obj[impl] = new Impl.implementation(constructorArgs, privateData);\n obj[impl][utils.wrapperSymbol] = obj;\n },\n interface: URL,\n expose: {\n Window: { URL: URL },\n Worker: { URL: URL }\n }\n};\n\n","\"use strict\";\n\nexports.URL = require(\"./URL\").interface;\nexports.serializeURL = require(\"./url-state-machine\").serializeURL;\nexports.serializeURLOrigin = require(\"./url-state-machine\").serializeURLOrigin;\nexports.basicURLParse = require(\"./url-state-machine\").basicURLParse;\nexports.setTheUsername = require(\"./url-state-machine\").setTheUsername;\nexports.setThePassword = require(\"./url-state-machine\").setThePassword;\nexports.serializeHost = require(\"./url-state-machine\").serializeHost;\nexports.serializeInteger = require(\"./url-state-machine\").serializeInteger;\nexports.parseURL = require(\"./url-state-machine\").parseURL;\n","\"use strict\";\r\nconst punycode = require(\"punycode\");\r\nconst tr46 = require(\"tr46\");\r\n\r\nconst specialSchemes = {\r\n ftp: 21,\r\n file: null,\r\n gopher: 70,\r\n http: 80,\r\n https: 443,\r\n ws: 80,\r\n wss: 443\r\n};\r\n\r\nconst failure = Symbol(\"failure\");\r\n\r\nfunction countSymbols(str) {\r\n return punycode.ucs2.decode(str).length;\r\n}\r\n\r\nfunction at(input, idx) {\r\n const c = input[idx];\r\n return isNaN(c) ? undefined : String.fromCodePoint(c);\r\n}\r\n\r\nfunction isASCIIDigit(c) {\r\n return c >= 0x30 && c <= 0x39;\r\n}\r\n\r\nfunction isASCIIAlpha(c) {\r\n return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);\r\n}\r\n\r\nfunction isASCIIAlphanumeric(c) {\r\n return isASCIIAlpha(c) || isASCIIDigit(c);\r\n}\r\n\r\nfunction isASCIIHex(c) {\r\n return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);\r\n}\r\n\r\nfunction isSingleDot(buffer) {\r\n return buffer === \".\" || buffer.toLowerCase() === \"%2e\";\r\n}\r\n\r\nfunction isDoubleDot(buffer) {\r\n buffer = buffer.toLowerCase();\r\n return buffer === \"..\" || buffer === \"%2e.\" || buffer === \".%2e\" || buffer === \"%2e%2e\";\r\n}\r\n\r\nfunction isWindowsDriveLetterCodePoints(cp1, cp2) {\r\n return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);\r\n}\r\n\r\nfunction isWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === \":\" || string[1] === \"|\");\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === \":\";\r\n}\r\n\r\nfunction containsForbiddenHostCodePoint(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|%|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction containsForbiddenHostCodePointExcludingPercent(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction isSpecialScheme(scheme) {\r\n return specialSchemes[scheme] !== undefined;\r\n}\r\n\r\nfunction isSpecial(url) {\r\n return isSpecialScheme(url.scheme);\r\n}\r\n\r\nfunction defaultPort(scheme) {\r\n return specialSchemes[scheme];\r\n}\r\n\r\nfunction percentEncode(c) {\r\n let hex = c.toString(16).toUpperCase();\r\n if (hex.length === 1) {\r\n hex = \"0\" + hex;\r\n }\r\n\r\n return \"%\" + hex;\r\n}\r\n\r\nfunction utf8PercentEncode(c) {\r\n const buf = new Buffer(c);\r\n\r\n let str = \"\";\r\n\r\n for (let i = 0; i < buf.length; ++i) {\r\n str += percentEncode(buf[i]);\r\n }\r\n\r\n return str;\r\n}\r\n\r\nfunction utf8PercentDecode(str) {\r\n const input = new Buffer(str);\r\n const output = [];\r\n for (let i = 0; i < input.length; ++i) {\r\n if (input[i] !== 37) {\r\n output.push(input[i]);\r\n } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {\r\n output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));\r\n i += 2;\r\n } else {\r\n output.push(input[i]);\r\n }\r\n }\r\n return new Buffer(output).toString();\r\n}\r\n\r\nfunction isC0ControlPercentEncode(c) {\r\n return c <= 0x1F || c > 0x7E;\r\n}\r\n\r\nconst extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);\r\nfunction isPathPercentEncode(c) {\r\n return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);\r\n}\r\n\r\nconst extraUserinfoPercentEncodeSet =\r\n new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);\r\nfunction isUserinfoPercentEncode(c) {\r\n return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);\r\n}\r\n\r\nfunction percentEncodeChar(c, encodeSetPredicate) {\r\n const cStr = String.fromCodePoint(c);\r\n\r\n if (encodeSetPredicate(c)) {\r\n return utf8PercentEncode(cStr);\r\n }\r\n\r\n return cStr;\r\n}\r\n\r\nfunction parseIPv4Number(input) {\r\n let R = 10;\r\n\r\n if (input.length >= 2 && input.charAt(0) === \"0\" && input.charAt(1).toLowerCase() === \"x\") {\r\n input = input.substring(2);\r\n R = 16;\r\n } else if (input.length >= 2 && input.charAt(0) === \"0\") {\r\n input = input.substring(1);\r\n R = 8;\r\n }\r\n\r\n if (input === \"\") {\r\n return 0;\r\n }\r\n\r\n const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);\r\n if (regex.test(input)) {\r\n return failure;\r\n }\r\n\r\n return parseInt(input, R);\r\n}\r\n\r\nfunction parseIPv4(input) {\r\n const parts = input.split(\".\");\r\n if (parts[parts.length - 1] === \"\") {\r\n if (parts.length > 1) {\r\n parts.pop();\r\n }\r\n }\r\n\r\n if (parts.length > 4) {\r\n return input;\r\n }\r\n\r\n const numbers = [];\r\n for (const part of parts) {\r\n if (part === \"\") {\r\n return input;\r\n }\r\n const n = parseIPv4Number(part);\r\n if (n === failure) {\r\n return input;\r\n }\r\n\r\n numbers.push(n);\r\n }\r\n\r\n for (let i = 0; i < numbers.length - 1; ++i) {\r\n if (numbers[i] > 255) {\r\n return failure;\r\n }\r\n }\r\n if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {\r\n return failure;\r\n }\r\n\r\n let ipv4 = numbers.pop();\r\n let counter = 0;\r\n\r\n for (const n of numbers) {\r\n ipv4 += n * Math.pow(256, 3 - counter);\r\n ++counter;\r\n }\r\n\r\n return ipv4;\r\n}\r\n\r\nfunction serializeIPv4(address) {\r\n let output = \"\";\r\n let n = address;\r\n\r\n for (let i = 1; i <= 4; ++i) {\r\n output = String(n % 256) + output;\r\n if (i !== 4) {\r\n output = \".\" + output;\r\n }\r\n n = Math.floor(n / 256);\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseIPv6(input) {\r\n const address = [0, 0, 0, 0, 0, 0, 0, 0];\r\n let pieceIndex = 0;\r\n let compress = null;\r\n let pointer = 0;\r\n\r\n input = punycode.ucs2.decode(input);\r\n\r\n if (input[pointer] === 58) {\r\n if (input[pointer + 1] !== 58) {\r\n return failure;\r\n }\r\n\r\n pointer += 2;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n }\r\n\r\n while (pointer < input.length) {\r\n if (pieceIndex === 8) {\r\n return failure;\r\n }\r\n\r\n if (input[pointer] === 58) {\r\n if (compress !== null) {\r\n return failure;\r\n }\r\n ++pointer;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n continue;\r\n }\r\n\r\n let value = 0;\r\n let length = 0;\r\n\r\n while (length < 4 && isASCIIHex(input[pointer])) {\r\n value = value * 0x10 + parseInt(at(input, pointer), 16);\r\n ++pointer;\r\n ++length;\r\n }\r\n\r\n if (input[pointer] === 46) {\r\n if (length === 0) {\r\n return failure;\r\n }\r\n\r\n pointer -= length;\r\n\r\n if (pieceIndex > 6) {\r\n return failure;\r\n }\r\n\r\n let numbersSeen = 0;\r\n\r\n while (input[pointer] !== undefined) {\r\n let ipv4Piece = null;\r\n\r\n if (numbersSeen > 0) {\r\n if (input[pointer] === 46 && numbersSeen < 4) {\r\n ++pointer;\r\n } else {\r\n return failure;\r\n }\r\n }\r\n\r\n if (!isASCIIDigit(input[pointer])) {\r\n return failure;\r\n }\r\n\r\n while (isASCIIDigit(input[pointer])) {\r\n const number = parseInt(at(input, pointer));\r\n if (ipv4Piece === null) {\r\n ipv4Piece = number;\r\n } else if (ipv4Piece === 0) {\r\n return failure;\r\n } else {\r\n ipv4Piece = ipv4Piece * 10 + number;\r\n }\r\n if (ipv4Piece > 255) {\r\n return failure;\r\n }\r\n ++pointer;\r\n }\r\n\r\n address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;\r\n\r\n ++numbersSeen;\r\n\r\n if (numbersSeen === 2 || numbersSeen === 4) {\r\n ++pieceIndex;\r\n }\r\n }\r\n\r\n if (numbersSeen !== 4) {\r\n return failure;\r\n }\r\n\r\n break;\r\n } else if (input[pointer] === 58) {\r\n ++pointer;\r\n if (input[pointer] === undefined) {\r\n return failure;\r\n }\r\n } else if (input[pointer] !== undefined) {\r\n return failure;\r\n }\r\n\r\n address[pieceIndex] = value;\r\n ++pieceIndex;\r\n }\r\n\r\n if (compress !== null) {\r\n let swaps = pieceIndex - compress;\r\n pieceIndex = 7;\r\n while (pieceIndex !== 0 && swaps > 0) {\r\n const temp = address[compress + swaps - 1];\r\n address[compress + swaps - 1] = address[pieceIndex];\r\n address[pieceIndex] = temp;\r\n --pieceIndex;\r\n --swaps;\r\n }\r\n } else if (compress === null && pieceIndex !== 8) {\r\n return failure;\r\n }\r\n\r\n return address;\r\n}\r\n\r\nfunction serializeIPv6(address) {\r\n let output = \"\";\r\n const seqResult = findLongestZeroSequence(address);\r\n const compress = seqResult.idx;\r\n let ignore0 = false;\r\n\r\n for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {\r\n if (ignore0 && address[pieceIndex] === 0) {\r\n continue;\r\n } else if (ignore0) {\r\n ignore0 = false;\r\n }\r\n\r\n if (compress === pieceIndex) {\r\n const separator = pieceIndex === 0 ? \"::\" : \":\";\r\n output += separator;\r\n ignore0 = true;\r\n continue;\r\n }\r\n\r\n output += address[pieceIndex].toString(16);\r\n\r\n if (pieceIndex !== 7) {\r\n output += \":\";\r\n }\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseHost(input, isSpecialArg) {\r\n if (input[0] === \"[\") {\r\n if (input[input.length - 1] !== \"]\") {\r\n return failure;\r\n }\r\n\r\n return parseIPv6(input.substring(1, input.length - 1));\r\n }\r\n\r\n if (!isSpecialArg) {\r\n return parseOpaqueHost(input);\r\n }\r\n\r\n const domain = utf8PercentDecode(input);\r\n const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);\r\n if (asciiDomain === null) {\r\n return failure;\r\n }\r\n\r\n if (containsForbiddenHostCodePoint(asciiDomain)) {\r\n return failure;\r\n }\r\n\r\n const ipv4Host = parseIPv4(asciiDomain);\r\n if (typeof ipv4Host === \"number\" || ipv4Host === failure) {\r\n return ipv4Host;\r\n }\r\n\r\n return asciiDomain;\r\n}\r\n\r\nfunction parseOpaqueHost(input) {\r\n if (containsForbiddenHostCodePointExcludingPercent(input)) {\r\n return failure;\r\n }\r\n\r\n let output = \"\";\r\n const decoded = punycode.ucs2.decode(input);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);\r\n }\r\n return output;\r\n}\r\n\r\nfunction findLongestZeroSequence(arr) {\r\n let maxIdx = null;\r\n let maxLen = 1; // only find elements > 1\r\n let currStart = null;\r\n let currLen = 0;\r\n\r\n for (let i = 0; i < arr.length; ++i) {\r\n if (arr[i] !== 0) {\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n currStart = null;\r\n currLen = 0;\r\n } else {\r\n if (currStart === null) {\r\n currStart = i;\r\n }\r\n ++currLen;\r\n }\r\n }\r\n\r\n // if trailing zeros\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n return {\r\n idx: maxIdx,\r\n len: maxLen\r\n };\r\n}\r\n\r\nfunction serializeHost(host) {\r\n if (typeof host === \"number\") {\r\n return serializeIPv4(host);\r\n }\r\n\r\n // IPv6 serializer\r\n if (host instanceof Array) {\r\n return \"[\" + serializeIPv6(host) + \"]\";\r\n }\r\n\r\n return host;\r\n}\r\n\r\nfunction trimControlChars(url) {\r\n return url.replace(/^[\\u0000-\\u001F\\u0020]+|[\\u0000-\\u001F\\u0020]+$/g, \"\");\r\n}\r\n\r\nfunction trimTabAndNewline(url) {\r\n return url.replace(/\\u0009|\\u000A|\\u000D/g, \"\");\r\n}\r\n\r\nfunction shortenPath(url) {\r\n const path = url.path;\r\n if (path.length === 0) {\r\n return;\r\n }\r\n if (url.scheme === \"file\" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {\r\n return;\r\n }\r\n\r\n path.pop();\r\n}\r\n\r\nfunction includesCredentials(url) {\r\n return url.username !== \"\" || url.password !== \"\";\r\n}\r\n\r\nfunction cannotHaveAUsernamePasswordPort(url) {\r\n return url.host === null || url.host === \"\" || url.cannotBeABaseURL || url.scheme === \"file\";\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetter(string) {\r\n return /^[A-Za-z]:$/.test(string);\r\n}\r\n\r\nfunction URLStateMachine(input, base, encodingOverride, url, stateOverride) {\r\n this.pointer = 0;\r\n this.input = input;\r\n this.base = base || null;\r\n this.encodingOverride = encodingOverride || \"utf-8\";\r\n this.stateOverride = stateOverride;\r\n this.url = url;\r\n this.failure = false;\r\n this.parseError = false;\r\n\r\n if (!this.url) {\r\n this.url = {\r\n scheme: \"\",\r\n username: \"\",\r\n password: \"\",\r\n host: null,\r\n port: null,\r\n path: [],\r\n query: null,\r\n fragment: null,\r\n\r\n cannotBeABaseURL: false\r\n };\r\n\r\n const res = trimControlChars(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n }\r\n\r\n const res = trimTabAndNewline(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n\r\n this.state = stateOverride || \"scheme start\";\r\n\r\n this.buffer = \"\";\r\n this.atFlag = false;\r\n this.arrFlag = false;\r\n this.passwordTokenSeenFlag = false;\r\n\r\n this.input = punycode.ucs2.decode(this.input);\r\n\r\n for (; this.pointer <= this.input.length; ++this.pointer) {\r\n const c = this.input[this.pointer];\r\n const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);\r\n\r\n // exec state machine\r\n const ret = this[\"parse \" + this.state](c, cStr);\r\n if (!ret) {\r\n break; // terminate algorithm\r\n } else if (ret === failure) {\r\n this.failure = true;\r\n break;\r\n }\r\n }\r\n}\r\n\r\nURLStateMachine.prototype[\"parse scheme start\"] = function parseSchemeStart(c, cStr) {\r\n if (isASCIIAlpha(c)) {\r\n this.buffer += cStr.toLowerCase();\r\n this.state = \"scheme\";\r\n } else if (!this.stateOverride) {\r\n this.state = \"no scheme\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse scheme\"] = function parseScheme(c, cStr) {\r\n if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {\r\n this.buffer += cStr.toLowerCase();\r\n } else if (c === 58) {\r\n if (this.stateOverride) {\r\n if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === \"file\") {\r\n return false;\r\n }\r\n\r\n if (this.url.scheme === \"file\" && (this.url.host === \"\" || this.url.host === null)) {\r\n return false;\r\n }\r\n }\r\n this.url.scheme = this.buffer;\r\n this.buffer = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n if (this.url.scheme === \"file\") {\r\n if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file\";\r\n } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {\r\n this.state = \"special relative or authority\";\r\n } else if (isSpecial(this.url)) {\r\n this.state = \"special authority slashes\";\r\n } else if (this.input[this.pointer + 1] === 47) {\r\n this.state = \"path or authority\";\r\n ++this.pointer;\r\n } else {\r\n this.url.cannotBeABaseURL = true;\r\n this.url.path.push(\"\");\r\n this.state = \"cannot-be-a-base-URL path\";\r\n }\r\n } else if (!this.stateOverride) {\r\n this.buffer = \"\";\r\n this.state = \"no scheme\";\r\n this.pointer = -1;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse no scheme\"] = function parseNoScheme(c) {\r\n if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {\r\n return failure;\r\n } else if (this.base.cannotBeABaseURL && c === 35) {\r\n this.url.scheme = this.base.scheme;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.url.cannotBeABaseURL = true;\r\n this.state = \"fragment\";\r\n } else if (this.base.scheme === \"file\") {\r\n this.state = \"file\";\r\n --this.pointer;\r\n } else {\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special relative or authority\"] = function parseSpecialRelativeOrAuthority(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path or authority\"] = function parsePathOrAuthority(c) {\r\n if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative\"] = function parseRelative(c) {\r\n this.url.scheme = this.base.scheme;\r\n if (isNaN(c)) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 47) {\r\n this.state = \"relative slash\";\r\n } else if (c === 63) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n this.state = \"relative slash\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice(0, this.base.path.length - 1);\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative slash\"] = function parseRelativeSlash(c) {\r\n if (isSpecial(this.url) && (c === 47 || c === 92)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"special authority ignore slashes\";\r\n } else if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority slashes\"] = function parseSpecialAuthoritySlashes(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"special authority ignore slashes\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority ignore slashes\"] = function parseSpecialAuthorityIgnoreSlashes(c) {\r\n if (c !== 47 && c !== 92) {\r\n this.state = \"authority\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse authority\"] = function parseAuthority(c, cStr) {\r\n if (c === 64) {\r\n this.parseError = true;\r\n if (this.atFlag) {\r\n this.buffer = \"%40\" + this.buffer;\r\n }\r\n this.atFlag = true;\r\n\r\n // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars\r\n const len = countSymbols(this.buffer);\r\n for (let pointer = 0; pointer < len; ++pointer) {\r\n const codePoint = this.buffer.codePointAt(pointer);\r\n\r\n if (codePoint === 58 && !this.passwordTokenSeenFlag) {\r\n this.passwordTokenSeenFlag = true;\r\n continue;\r\n }\r\n const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);\r\n if (this.passwordTokenSeenFlag) {\r\n this.url.password += encodedCodePoints;\r\n } else {\r\n this.url.username += encodedCodePoints;\r\n }\r\n }\r\n this.buffer = \"\";\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n if (this.atFlag && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.pointer -= countSymbols(this.buffer) + 1;\r\n this.buffer = \"\";\r\n this.state = \"host\";\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse hostname\"] =\r\nURLStateMachine.prototype[\"parse host\"] = function parseHostName(c, cStr) {\r\n if (this.stateOverride && this.url.scheme === \"file\") {\r\n --this.pointer;\r\n this.state = \"file host\";\r\n } else if (c === 58 && !this.arrFlag) {\r\n if (this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"port\";\r\n if (this.stateOverride === \"hostname\") {\r\n return false;\r\n }\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n --this.pointer;\r\n if (isSpecial(this.url) && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n } else if (this.stateOverride && this.buffer === \"\" &&\r\n (includesCredentials(this.url) || this.url.port !== null)) {\r\n this.parseError = true;\r\n return false;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n } else {\r\n if (c === 91) {\r\n this.arrFlag = true;\r\n } else if (c === 93) {\r\n this.arrFlag = false;\r\n }\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse port\"] = function parsePort(c, cStr) {\r\n if (isASCIIDigit(c)) {\r\n this.buffer += cStr;\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92) ||\r\n this.stateOverride) {\r\n if (this.buffer !== \"\") {\r\n const port = parseInt(this.buffer);\r\n if (port > Math.pow(2, 16) - 1) {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.url.port = port === defaultPort(this.url.scheme) ? null : port;\r\n this.buffer = \"\";\r\n }\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nconst fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);\r\n\r\nURLStateMachine.prototype[\"parse file\"] = function parseFile(c) {\r\n this.url.scheme = \"file\";\r\n\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file slash\";\r\n } else if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNaN(c)) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 63) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points\r\n !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||\r\n (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points\r\n !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n shortenPath(this.url);\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file slash\"] = function parseFileSlash(c) {\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file host\";\r\n } else {\r\n if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {\r\n this.url.path.push(this.base.path[0]);\r\n } else {\r\n this.url.host = this.base.host;\r\n }\r\n }\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file host\"] = function parseFileHost(c, cStr) {\r\n if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {\r\n --this.pointer;\r\n if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {\r\n this.parseError = true;\r\n this.state = \"path\";\r\n } else if (this.buffer === \"\") {\r\n this.url.host = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n } else {\r\n let host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n if (host === \"localhost\") {\r\n host = \"\";\r\n }\r\n this.url.host = host;\r\n\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n }\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path start\"] = function parsePathStart(c) {\r\n if (isSpecial(this.url)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"path\";\r\n\r\n if (c !== 47 && c !== 92) {\r\n --this.pointer;\r\n }\r\n } else if (!this.stateOverride && c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (!this.stateOverride && c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (c !== undefined) {\r\n this.state = \"path\";\r\n if (c !== 47) {\r\n --this.pointer;\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path\"] = function parsePath(c) {\r\n if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||\r\n (!this.stateOverride && (c === 63 || c === 35))) {\r\n if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n }\r\n\r\n if (isDoubleDot(this.buffer)) {\r\n shortenPath(this.url);\r\n if (c !== 47 && !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n }\r\n } else if (isSingleDot(this.buffer) && c !== 47 &&\r\n !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n } else if (!isSingleDot(this.buffer)) {\r\n if (this.url.scheme === \"file\" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {\r\n if (this.url.host !== \"\" && this.url.host !== null) {\r\n this.parseError = true;\r\n this.url.host = \"\";\r\n }\r\n this.buffer = this.buffer[0] + \":\";\r\n }\r\n this.url.path.push(this.buffer);\r\n }\r\n this.buffer = \"\";\r\n if (this.url.scheme === \"file\" && (c === undefined || c === 63 || c === 35)) {\r\n while (this.url.path.length > 1 && this.url.path[0] === \"\") {\r\n this.parseError = true;\r\n this.url.path.shift();\r\n }\r\n }\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n }\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += percentEncodeChar(c, isPathPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse cannot-be-a-base-URL path\"] = function parseCannotBeABaseURLPath(c) {\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n // TODO: Add: not a URL code point\r\n if (!isNaN(c) && c !== 37) {\r\n this.parseError = true;\r\n }\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n if (!isNaN(c)) {\r\n this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse query\"] = function parseQuery(c, cStr) {\r\n if (isNaN(c) || (!this.stateOverride && c === 35)) {\r\n if (!isSpecial(this.url) || this.url.scheme === \"ws\" || this.url.scheme === \"wss\") {\r\n this.encodingOverride = \"utf-8\";\r\n }\r\n\r\n const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead\r\n for (let i = 0; i < buffer.length; ++i) {\r\n if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||\r\n buffer[i] === 0x3C || buffer[i] === 0x3E) {\r\n this.url.query += percentEncode(buffer[i]);\r\n } else {\r\n this.url.query += String.fromCodePoint(buffer[i]);\r\n }\r\n }\r\n\r\n this.buffer = \"\";\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse fragment\"] = function parseFragment(c) {\r\n if (isNaN(c)) { // do nothing\r\n } else if (c === 0x0) {\r\n this.parseError = true;\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nfunction serializeURL(url, excludeFragment) {\r\n let output = url.scheme + \":\";\r\n if (url.host !== null) {\r\n output += \"//\";\r\n\r\n if (url.username !== \"\" || url.password !== \"\") {\r\n output += url.username;\r\n if (url.password !== \"\") {\r\n output += \":\" + url.password;\r\n }\r\n output += \"@\";\r\n }\r\n\r\n output += serializeHost(url.host);\r\n\r\n if (url.port !== null) {\r\n output += \":\" + url.port;\r\n }\r\n } else if (url.host === null && url.scheme === \"file\") {\r\n output += \"//\";\r\n }\r\n\r\n if (url.cannotBeABaseURL) {\r\n output += url.path[0];\r\n } else {\r\n for (const string of url.path) {\r\n output += \"/\" + string;\r\n }\r\n }\r\n\r\n if (url.query !== null) {\r\n output += \"?\" + url.query;\r\n }\r\n\r\n if (!excludeFragment && url.fragment !== null) {\r\n output += \"#\" + url.fragment;\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction serializeOrigin(tuple) {\r\n let result = tuple.scheme + \"://\";\r\n result += serializeHost(tuple.host);\r\n\r\n if (tuple.port !== null) {\r\n result += \":\" + tuple.port;\r\n }\r\n\r\n return result;\r\n}\r\n\r\nmodule.exports.serializeURL = serializeURL;\r\n\r\nmodule.exports.serializeURLOrigin = function (url) {\r\n // https://url.spec.whatwg.org/#concept-url-origin\r\n switch (url.scheme) {\r\n case \"blob\":\r\n try {\r\n return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));\r\n } catch (e) {\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n case \"ftp\":\r\n case \"gopher\":\r\n case \"http\":\r\n case \"https\":\r\n case \"ws\":\r\n case \"wss\":\r\n return serializeOrigin({\r\n scheme: url.scheme,\r\n host: url.host,\r\n port: url.port\r\n });\r\n case \"file\":\r\n // spec says \"exercise to the reader\", chrome says \"file://\"\r\n return \"file://\";\r\n default:\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n};\r\n\r\nmodule.exports.basicURLParse = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);\r\n if (usm.failure) {\r\n return \"failure\";\r\n }\r\n\r\n return usm.url;\r\n};\r\n\r\nmodule.exports.setTheUsername = function (url, username) {\r\n url.username = \"\";\r\n const decoded = punycode.ucs2.decode(username);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.setThePassword = function (url, password) {\r\n url.password = \"\";\r\n const decoded = punycode.ucs2.decode(password);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.serializeHost = serializeHost;\r\n\r\nmodule.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;\r\n\r\nmodule.exports.serializeInteger = function (integer) {\r\n return String(integer);\r\n};\r\n\r\nmodule.exports.parseURL = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n // We don't handle blobs, so this just delegates:\r\n return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });\r\n};\r\n","\"use strict\";\n\nmodule.exports.mixin = function mixin(target, source) {\n const keys = Object.getOwnPropertyNames(source);\n for (let i = 0; i < keys.length; ++i) {\n Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));\n }\n};\n\nmodule.exports.wrapperSymbol = Symbol(\"wrapper\");\nmodule.exports.implSymbol = Symbol(\"impl\");\n\nmodule.exports.wrapperForImpl = function (impl) {\n return impl[module.exports.wrapperSymbol];\n};\n\nmodule.exports.implForWrapper = function (wrapper) {\n return wrapper[module.exports.implSymbol];\n};\n\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","class Node {\n\t/// value;\n\t/// next;\n\n\tconstructor(value) {\n\t\tthis.value = value;\n\n\t\t// TODO: Remove this when targeting Node.js 12.\n\t\tthis.next = undefined;\n\t}\n}\n\nclass Queue {\n\t// TODO: Use private class fields when targeting Node.js 12.\n\t// #_head;\n\t// #_tail;\n\t// #_size;\n\n\tconstructor() {\n\t\tthis.clear();\n\t}\n\n\tenqueue(value) {\n\t\tconst node = new Node(value);\n\n\t\tif (this._head) {\n\t\t\tthis._tail.next = node;\n\t\t\tthis._tail = node;\n\t\t} else {\n\t\t\tthis._head = node;\n\t\t\tthis._tail = node;\n\t\t}\n\n\t\tthis._size++;\n\t}\n\n\tdequeue() {\n\t\tconst current = this._head;\n\t\tif (!current) {\n\t\t\treturn;\n\t\t}\n\n\t\tthis._head = this._head.next;\n\t\tthis._size--;\n\t\treturn current.value;\n\t}\n\n\tclear() {\n\t\tthis._head = undefined;\n\t\tthis._tail = undefined;\n\t\tthis._size = 0;\n\t}\n\n\tget size() {\n\t\treturn this._size;\n\t}\n\n\t* [Symbol.iterator]() {\n\t\tlet current = this._head;\n\n\t\twhile (current) {\n\t\t\tyield current.value;\n\t\t\tcurrent = current.next;\n\t\t}\n\t}\n}\n\nmodule.exports = Queue;\n",null,"module.exports = require(\"assert\");","module.exports = require(\"async_hooks\");","module.exports = require(\"buffer\");","module.exports = require(\"child_process\");","module.exports = require(\"console\");","module.exports = require(\"crypto\");","module.exports = require(\"diagnostics_channel\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"http2\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"node:events\");","module.exports = require(\"node:stream\");","module.exports = require(\"node:util\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"perf_hooks\");","module.exports = require(\"punycode\");","module.exports = require(\"querystring\");","module.exports = require(\"stream\");","module.exports = require(\"stream/web\");","module.exports = require(\"string_decoder\");","module.exports = require(\"tls\");","module.exports = require(\"tty\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"util/types\");","module.exports = require(\"worker_threads\");","module.exports = require(\"zlib\");","'use strict'\n\nconst WritableStream = require('node:stream').Writable\nconst inherits = require('node:util').inherits\n\nconst StreamSearch = require('../../streamsearch/sbmh')\n\nconst PartStream = require('./PartStream')\nconst HeaderParser = require('./HeaderParser')\n\nconst DASH = 45\nconst B_ONEDASH = Buffer.from('-')\nconst B_CRLF = Buffer.from('\\r\\n')\nconst EMPTY_FN = function () {}\n\nfunction Dicer (cfg) {\n if (!(this instanceof Dicer)) { return new Dicer(cfg) }\n WritableStream.call(this, cfg)\n\n if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') }\n\n if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined }\n\n this._headerFirst = cfg.headerFirst\n\n this._dashes = 0\n this._parts = 0\n this._finished = false\n this._realFinish = false\n this._isPreamble = true\n this._justMatched = false\n this._firstWrite = true\n this._inHeader = true\n this._part = undefined\n this._cb = undefined\n this._ignoreData = false\n this._partOpts = { highWaterMark: cfg.partHwm }\n this._pause = false\n\n const self = this\n this._hparser = new HeaderParser(cfg)\n this._hparser.on('header', function (header) {\n self._inHeader = false\n self._part.emit('header', header)\n })\n}\ninherits(Dicer, WritableStream)\n\nDicer.prototype.emit = function (ev) {\n if (ev === 'finish' && !this._realFinish) {\n if (!this._finished) {\n const self = this\n process.nextTick(function () {\n self.emit('error', new Error('Unexpected end of multipart data'))\n if (self._part && !self._ignoreData) {\n const type = (self._isPreamble ? 'Preamble' : 'Part')\n self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'))\n self._part.push(null)\n process.nextTick(function () {\n self._realFinish = true\n self.emit('finish')\n self._realFinish = false\n })\n return\n }\n self._realFinish = true\n self.emit('finish')\n self._realFinish = false\n })\n }\n } else { WritableStream.prototype.emit.apply(this, arguments) }\n}\n\nDicer.prototype._write = function (data, encoding, cb) {\n // ignore unexpected data (e.g. extra trailer data after finished)\n if (!this._hparser && !this._bparser) { return cb() }\n\n if (this._headerFirst && this._isPreamble) {\n if (!this._part) {\n this._part = new PartStream(this._partOpts)\n if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() }\n }\n const r = this._hparser.push(data)\n if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }\n }\n\n // allows for \"easier\" testing\n if (this._firstWrite) {\n this._bparser.push(B_CRLF)\n this._firstWrite = false\n }\n\n this._bparser.push(data)\n\n if (this._pause) { this._cb = cb } else { cb() }\n}\n\nDicer.prototype.reset = function () {\n this._part = undefined\n this._bparser = undefined\n this._hparser = undefined\n}\n\nDicer.prototype.setBoundary = function (boundary) {\n const self = this\n this._bparser = new StreamSearch('\\r\\n--' + boundary)\n this._bparser.on('info', function (isMatch, data, start, end) {\n self._oninfo(isMatch, data, start, end)\n })\n}\n\nDicer.prototype._ignore = function () {\n if (this._part && !this._ignoreData) {\n this._ignoreData = true\n this._part.on('error', EMPTY_FN)\n // we must perform some kind of read on the stream even though we are\n // ignoring the data, otherwise node's Readable stream will not emit 'end'\n // after pushing null to the stream\n this._part.resume()\n }\n}\n\nDicer.prototype._oninfo = function (isMatch, data, start, end) {\n let buf; const self = this; let i = 0; let r; let shouldWriteMore = true\n\n if (!this._part && this._justMatched && data) {\n while (this._dashes < 2 && (start + i) < end) {\n if (data[start + i] === DASH) {\n ++i\n ++this._dashes\n } else {\n if (this._dashes) { buf = B_ONEDASH }\n this._dashes = 0\n break\n }\n }\n if (this._dashes === 2) {\n if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) }\n this.reset()\n this._finished = true\n // no more parts will be added\n if (self._parts === 0) {\n self._realFinish = true\n self.emit('finish')\n self._realFinish = false\n }\n }\n if (this._dashes) { return }\n }\n if (this._justMatched) { this._justMatched = false }\n if (!this._part) {\n this._part = new PartStream(this._partOpts)\n this._part._read = function (n) {\n self._unpause()\n }\n if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() }\n if (!this._isPreamble) { this._inHeader = true }\n }\n if (data && start < end && !this._ignoreData) {\n if (this._isPreamble || !this._inHeader) {\n if (buf) { shouldWriteMore = this._part.push(buf) }\n shouldWriteMore = this._part.push(data.slice(start, end))\n if (!shouldWriteMore) { this._pause = true }\n } else if (!this._isPreamble && this._inHeader) {\n if (buf) { this._hparser.push(buf) }\n r = this._hparser.push(data.slice(start, end))\n if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) }\n }\n }\n if (isMatch) {\n this._hparser.reset()\n if (this._isPreamble) { this._isPreamble = false } else {\n if (start !== end) {\n ++this._parts\n this._part.on('end', function () {\n if (--self._parts === 0) {\n if (self._finished) {\n self._realFinish = true\n self.emit('finish')\n self._realFinish = false\n } else {\n self._unpause()\n }\n }\n })\n }\n }\n this._part.push(null)\n this._part = undefined\n this._ignoreData = false\n this._justMatched = true\n this._dashes = 0\n }\n}\n\nDicer.prototype._unpause = function () {\n if (!this._pause) { return }\n\n this._pause = false\n if (this._cb) {\n const cb = this._cb\n this._cb = undefined\n cb()\n }\n}\n\nmodule.exports = Dicer\n","'use strict'\n\nconst EventEmitter = require('node:events').EventEmitter\nconst inherits = require('node:util').inherits\nconst getLimit = require('../../../lib/utils/getLimit')\n\nconst StreamSearch = require('../../streamsearch/sbmh')\n\nconst B_DCRLF = Buffer.from('\\r\\n\\r\\n')\nconst RE_CRLF = /\\r\\n/g\nconst RE_HDR = /^([^:]+):[ \\t]?([\\x00-\\xFF]+)?$/ // eslint-disable-line no-control-regex\n\nfunction HeaderParser (cfg) {\n EventEmitter.call(this)\n\n cfg = cfg || {}\n const self = this\n this.nread = 0\n this.maxed = false\n this.npairs = 0\n this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000)\n this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024)\n this.buffer = ''\n this.header = {}\n this.finished = false\n this.ss = new StreamSearch(B_DCRLF)\n this.ss.on('info', function (isMatch, data, start, end) {\n if (data && !self.maxed) {\n if (self.nread + end - start >= self.maxHeaderSize) {\n end = self.maxHeaderSize - self.nread + start\n self.nread = self.maxHeaderSize\n self.maxed = true\n } else { self.nread += (end - start) }\n\n self.buffer += data.toString('binary', start, end)\n }\n if (isMatch) { self._finish() }\n })\n}\ninherits(HeaderParser, EventEmitter)\n\nHeaderParser.prototype.push = function (data) {\n const r = this.ss.push(data)\n if (this.finished) { return r }\n}\n\nHeaderParser.prototype.reset = function () {\n this.finished = false\n this.buffer = ''\n this.header = {}\n this.ss.reset()\n}\n\nHeaderParser.prototype._finish = function () {\n if (this.buffer) { this._parseHeader() }\n this.ss.matches = this.ss.maxMatches\n const header = this.header\n this.header = {}\n this.buffer = ''\n this.finished = true\n this.nread = this.npairs = 0\n this.maxed = false\n this.emit('header', header)\n}\n\nHeaderParser.prototype._parseHeader = function () {\n if (this.npairs === this.maxHeaderPairs) { return }\n\n const lines = this.buffer.split(RE_CRLF)\n const len = lines.length\n let m, h\n\n for (var i = 0; i < len; ++i) { // eslint-disable-line no-var\n if (lines[i].length === 0) { continue }\n if (lines[i][0] === '\\t' || lines[i][0] === ' ') {\n // folded header content\n // RFC2822 says to just remove the CRLF and not the whitespace following\n // it, so we follow the RFC and include the leading whitespace ...\n if (h) {\n this.header[h][this.header[h].length - 1] += lines[i]\n continue\n }\n }\n\n const posColon = lines[i].indexOf(':')\n if (\n posColon === -1 ||\n posColon === 0\n ) {\n return\n }\n m = RE_HDR.exec(lines[i])\n h = m[1].toLowerCase()\n this.header[h] = this.header[h] || []\n this.header[h].push((m[2] || ''))\n if (++this.npairs === this.maxHeaderPairs) { break }\n }\n}\n\nmodule.exports = HeaderParser\n","'use strict'\n\nconst inherits = require('node:util').inherits\nconst ReadableStream = require('node:stream').Readable\n\nfunction PartStream (opts) {\n ReadableStream.call(this, opts)\n}\ninherits(PartStream, ReadableStream)\n\nPartStream.prototype._read = function (n) {}\n\nmodule.exports = PartStream\n","'use strict'\n\n/**\n * Copyright Brian White. All rights reserved.\n *\n * @see https://github.com/mscdex/streamsearch\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n *\n * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation\n * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool\n */\nconst EventEmitter = require('node:events').EventEmitter\nconst inherits = require('node:util').inherits\n\nfunction SBMH (needle) {\n if (typeof needle === 'string') {\n needle = Buffer.from(needle)\n }\n\n if (!Buffer.isBuffer(needle)) {\n throw new TypeError('The needle has to be a String or a Buffer.')\n }\n\n const needleLength = needle.length\n\n if (needleLength === 0) {\n throw new Error('The needle cannot be an empty String/Buffer.')\n }\n\n if (needleLength > 256) {\n throw new Error('The needle cannot have a length bigger than 256.')\n }\n\n this.maxMatches = Infinity\n this.matches = 0\n\n this._occ = new Array(256)\n .fill(needleLength) // Initialize occurrence table.\n this._lookbehind_size = 0\n this._needle = needle\n this._bufpos = 0\n\n this._lookbehind = Buffer.alloc(needleLength)\n\n // Populate occurrence table with analysis of the needle,\n // ignoring last letter.\n for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var\n this._occ[needle[i]] = needleLength - 1 - i\n }\n}\ninherits(SBMH, EventEmitter)\n\nSBMH.prototype.reset = function () {\n this._lookbehind_size = 0\n this.matches = 0\n this._bufpos = 0\n}\n\nSBMH.prototype.push = function (chunk, pos) {\n if (!Buffer.isBuffer(chunk)) {\n chunk = Buffer.from(chunk, 'binary')\n }\n const chlen = chunk.length\n this._bufpos = pos || 0\n let r\n while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) }\n return r\n}\n\nSBMH.prototype._sbmh_feed = function (data) {\n const len = data.length\n const needle = this._needle\n const needleLength = needle.length\n const lastNeedleChar = needle[needleLength - 1]\n\n // Positive: points to a position in `data`\n // pos == 3 points to data[3]\n // Negative: points to a position in the lookbehind buffer\n // pos == -2 points to lookbehind[lookbehind_size - 2]\n let pos = -this._lookbehind_size\n let ch\n\n if (pos < 0) {\n // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool\n // search with character lookup code that considers both the\n // lookbehind buffer and the current round's haystack data.\n //\n // Loop until\n // there is a match.\n // or until\n // we've moved past the position that requires the\n // lookbehind buffer. In this case we switch to the\n // optimized loop.\n // or until\n // the character to look at lies outside the haystack.\n while (pos < 0 && pos <= len - needleLength) {\n ch = this._sbmh_lookup_char(data, pos + needleLength - 1)\n\n if (\n ch === lastNeedleChar &&\n this._sbmh_memcmp(data, pos, needleLength - 1)\n ) {\n this._lookbehind_size = 0\n ++this.matches\n this.emit('info', true)\n\n return (this._bufpos = pos + needleLength)\n }\n pos += this._occ[ch]\n }\n\n // No match.\n\n if (pos < 0) {\n // There's too few data for Boyer-Moore-Horspool to run,\n // so let's use a different algorithm to skip as much as\n // we can.\n // Forward pos until\n // the trailing part of lookbehind + data\n // looks like the beginning of the needle\n // or until\n // pos == 0\n while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos }\n }\n\n if (pos >= 0) {\n // Discard lookbehind buffer.\n this.emit('info', false, this._lookbehind, 0, this._lookbehind_size)\n this._lookbehind_size = 0\n } else {\n // Cut off part of the lookbehind buffer that has\n // been processed and append the entire haystack\n // into it.\n const bytesToCutOff = this._lookbehind_size + pos\n if (bytesToCutOff > 0) {\n // The cut off data is guaranteed not to contain the needle.\n this.emit('info', false, this._lookbehind, 0, bytesToCutOff)\n }\n\n this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff,\n this._lookbehind_size - bytesToCutOff)\n this._lookbehind_size -= bytesToCutOff\n\n data.copy(this._lookbehind, this._lookbehind_size)\n this._lookbehind_size += len\n\n this._bufpos = len\n return len\n }\n }\n\n pos += (pos >= 0) * this._bufpos\n\n // Lookbehind buffer is now empty. We only need to check if the\n // needle is in the haystack.\n if (data.indexOf(needle, pos) !== -1) {\n pos = data.indexOf(needle, pos)\n ++this.matches\n if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) }\n\n return (this._bufpos = pos + needleLength)\n } else {\n pos = len - needleLength\n }\n\n // There was no match. If there's trailing haystack data that we cannot\n // match yet using the Boyer-Moore-Horspool algorithm (because the trailing\n // data is less than the needle size) then match using a modified\n // algorithm that starts matching from the beginning instead of the end.\n // Whatever trailing data is left after running this algorithm is added to\n // the lookbehind buffer.\n while (\n pos < len &&\n (\n data[pos] !== needle[0] ||\n (\n (Buffer.compare(\n data.subarray(pos, pos + len - pos),\n needle.subarray(0, len - pos)\n ) !== 0)\n )\n )\n ) {\n ++pos\n }\n if (pos < len) {\n data.copy(this._lookbehind, 0, pos, pos + (len - pos))\n this._lookbehind_size = len - pos\n }\n\n // Everything until pos is guaranteed not to contain needle data.\n if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) }\n\n this._bufpos = len\n return len\n}\n\nSBMH.prototype._sbmh_lookup_char = function (data, pos) {\n return (pos < 0)\n ? this._lookbehind[this._lookbehind_size + pos]\n : data[pos]\n}\n\nSBMH.prototype._sbmh_memcmp = function (data, pos, len) {\n for (var i = 0; i < len; ++i) { // eslint-disable-line no-var\n if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false }\n }\n return true\n}\n\nmodule.exports = SBMH\n","'use strict'\n\nconst WritableStream = require('node:stream').Writable\nconst { inherits } = require('node:util')\nconst Dicer = require('../deps/dicer/lib/Dicer')\n\nconst MultipartParser = require('./types/multipart')\nconst UrlencodedParser = require('./types/urlencoded')\nconst parseParams = require('./utils/parseParams')\n\nfunction Busboy (opts) {\n if (!(this instanceof Busboy)) { return new Busboy(opts) }\n\n if (typeof opts !== 'object') {\n throw new TypeError('Busboy expected an options-Object.')\n }\n if (typeof opts.headers !== 'object') {\n throw new TypeError('Busboy expected an options-Object with headers-attribute.')\n }\n if (typeof opts.headers['content-type'] !== 'string') {\n throw new TypeError('Missing Content-Type-header.')\n }\n\n const {\n headers,\n ...streamOptions\n } = opts\n\n this.opts = {\n autoDestroy: false,\n ...streamOptions\n }\n WritableStream.call(this, this.opts)\n\n this._done = false\n this._parser = this.getParserByHeaders(headers)\n this._finished = false\n}\ninherits(Busboy, WritableStream)\n\nBusboy.prototype.emit = function (ev) {\n if (ev === 'finish') {\n if (!this._done) {\n this._parser?.end()\n return\n } else if (this._finished) {\n return\n }\n this._finished = true\n }\n WritableStream.prototype.emit.apply(this, arguments)\n}\n\nBusboy.prototype.getParserByHeaders = function (headers) {\n const parsed = parseParams(headers['content-type'])\n\n const cfg = {\n defCharset: this.opts.defCharset,\n fileHwm: this.opts.fileHwm,\n headers,\n highWaterMark: this.opts.highWaterMark,\n isPartAFile: this.opts.isPartAFile,\n limits: this.opts.limits,\n parsedConType: parsed,\n preservePath: this.opts.preservePath\n }\n\n if (MultipartParser.detect.test(parsed[0])) {\n return new MultipartParser(this, cfg)\n }\n if (UrlencodedParser.detect.test(parsed[0])) {\n return new UrlencodedParser(this, cfg)\n }\n throw new Error('Unsupported Content-Type.')\n}\n\nBusboy.prototype._write = function (chunk, encoding, cb) {\n this._parser.write(chunk, cb)\n}\n\nmodule.exports = Busboy\nmodule.exports.default = Busboy\nmodule.exports.Busboy = Busboy\n\nmodule.exports.Dicer = Dicer\n","'use strict'\n\n// TODO:\n// * support 1 nested multipart level\n// (see second multipart example here:\n// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)\n// * support limits.fieldNameSize\n// -- this will require modifications to utils.parseParams\n\nconst { Readable } = require('node:stream')\nconst { inherits } = require('node:util')\n\nconst Dicer = require('../../deps/dicer/lib/Dicer')\n\nconst parseParams = require('../utils/parseParams')\nconst decodeText = require('../utils/decodeText')\nconst basename = require('../utils/basename')\nconst getLimit = require('../utils/getLimit')\n\nconst RE_BOUNDARY = /^boundary$/i\nconst RE_FIELD = /^form-data$/i\nconst RE_CHARSET = /^charset$/i\nconst RE_FILENAME = /^filename$/i\nconst RE_NAME = /^name$/i\n\nMultipart.detect = /^multipart\\/form-data/i\nfunction Multipart (boy, cfg) {\n let i\n let len\n const self = this\n let boundary\n const limits = cfg.limits\n const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined))\n const parsedConType = cfg.parsedConType || []\n const defCharset = cfg.defCharset || 'utf8'\n const preservePath = cfg.preservePath\n const fileOpts = { highWaterMark: cfg.fileHwm }\n\n for (i = 0, len = parsedConType.length; i < len; ++i) {\n if (Array.isArray(parsedConType[i]) &&\n RE_BOUNDARY.test(parsedConType[i][0])) {\n boundary = parsedConType[i][1]\n break\n }\n }\n\n function checkFinished () {\n if (nends === 0 && finished && !boy._done) {\n finished = false\n self.end()\n }\n }\n\n if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') }\n\n const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)\n const fileSizeLimit = getLimit(limits, 'fileSize', Infinity)\n const filesLimit = getLimit(limits, 'files', Infinity)\n const fieldsLimit = getLimit(limits, 'fields', Infinity)\n const partsLimit = getLimit(limits, 'parts', Infinity)\n const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)\n const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024)\n\n let nfiles = 0\n let nfields = 0\n let nends = 0\n let curFile\n let curField\n let finished = false\n\n this._needDrain = false\n this._pause = false\n this._cb = undefined\n this._nparts = 0\n this._boy = boy\n\n const parserCfg = {\n boundary,\n maxHeaderPairs: headerPairsLimit,\n maxHeaderSize: headerSizeLimit,\n partHwm: fileOpts.highWaterMark,\n highWaterMark: cfg.highWaterMark\n }\n\n this.parser = new Dicer(parserCfg)\n this.parser.on('drain', function () {\n self._needDrain = false\n if (self._cb && !self._pause) {\n const cb = self._cb\n self._cb = undefined\n cb()\n }\n }).on('part', function onPart (part) {\n if (++self._nparts > partsLimit) {\n self.parser.removeListener('part', onPart)\n self.parser.on('part', skipPart)\n boy.hitPartsLimit = true\n boy.emit('partsLimit')\n return skipPart(part)\n }\n\n // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let\n // us emit 'end' early since we know the part has ended if we are already\n // seeing the next part\n if (curField) {\n const field = curField\n field.emit('end')\n field.removeAllListeners('end')\n }\n\n part.on('header', function (header) {\n let contype\n let fieldname\n let parsed\n let charset\n let encoding\n let filename\n let nsize = 0\n\n if (header['content-type']) {\n parsed = parseParams(header['content-type'][0])\n if (parsed[0]) {\n contype = parsed[0].toLowerCase()\n for (i = 0, len = parsed.length; i < len; ++i) {\n if (RE_CHARSET.test(parsed[i][0])) {\n charset = parsed[i][1].toLowerCase()\n break\n }\n }\n }\n }\n\n if (contype === undefined) { contype = 'text/plain' }\n if (charset === undefined) { charset = defCharset }\n\n if (header['content-disposition']) {\n parsed = parseParams(header['content-disposition'][0])\n if (!RE_FIELD.test(parsed[0])) { return skipPart(part) }\n for (i = 0, len = parsed.length; i < len; ++i) {\n if (RE_NAME.test(parsed[i][0])) {\n fieldname = parsed[i][1]\n } else if (RE_FILENAME.test(parsed[i][0])) {\n filename = parsed[i][1]\n if (!preservePath) { filename = basename(filename) }\n }\n }\n } else { return skipPart(part) }\n\n if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' }\n\n let onData,\n onEnd\n\n if (isPartAFile(fieldname, contype, filename)) {\n // file/binary field\n if (nfiles === filesLimit) {\n if (!boy.hitFilesLimit) {\n boy.hitFilesLimit = true\n boy.emit('filesLimit')\n }\n return skipPart(part)\n }\n\n ++nfiles\n\n if (!boy._events.file) {\n self.parser._ignore()\n return\n }\n\n ++nends\n const file = new FileStream(fileOpts)\n curFile = file\n file.on('end', function () {\n --nends\n self._pause = false\n checkFinished()\n if (self._cb && !self._needDrain) {\n const cb = self._cb\n self._cb = undefined\n cb()\n }\n })\n file._read = function (n) {\n if (!self._pause) { return }\n self._pause = false\n if (self._cb && !self._needDrain) {\n const cb = self._cb\n self._cb = undefined\n cb()\n }\n }\n boy.emit('file', fieldname, file, filename, encoding, contype)\n\n onData = function (data) {\n if ((nsize += data.length) > fileSizeLimit) {\n const extralen = fileSizeLimit - nsize + data.length\n if (extralen > 0) { file.push(data.slice(0, extralen)) }\n file.truncated = true\n file.bytesRead = fileSizeLimit\n part.removeAllListeners('data')\n file.emit('limit')\n return\n } else if (!file.push(data)) { self._pause = true }\n\n file.bytesRead = nsize\n }\n\n onEnd = function () {\n curFile = undefined\n file.push(null)\n }\n } else {\n // non-file field\n if (nfields === fieldsLimit) {\n if (!boy.hitFieldsLimit) {\n boy.hitFieldsLimit = true\n boy.emit('fieldsLimit')\n }\n return skipPart(part)\n }\n\n ++nfields\n ++nends\n let buffer = ''\n let truncated = false\n curField = part\n\n onData = function (data) {\n if ((nsize += data.length) > fieldSizeLimit) {\n const extralen = (fieldSizeLimit - (nsize - data.length))\n buffer += data.toString('binary', 0, extralen)\n truncated = true\n part.removeAllListeners('data')\n } else { buffer += data.toString('binary') }\n }\n\n onEnd = function () {\n curField = undefined\n if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) }\n boy.emit('field', fieldname, buffer, false, truncated, encoding, contype)\n --nends\n checkFinished()\n }\n }\n\n /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become\n broken. Streams2/streams3 is a huge black box of confusion, but\n somehow overriding the sync state seems to fix things again (and still\n seems to work for previous node versions).\n */\n part._readableState.sync = false\n\n part.on('data', onData)\n part.on('end', onEnd)\n }).on('error', function (err) {\n if (curFile) { curFile.emit('error', err) }\n })\n }).on('error', function (err) {\n boy.emit('error', err)\n }).on('finish', function () {\n finished = true\n checkFinished()\n })\n}\n\nMultipart.prototype.write = function (chunk, cb) {\n const r = this.parser.write(chunk)\n if (r && !this._pause) {\n cb()\n } else {\n this._needDrain = !r\n this._cb = cb\n }\n}\n\nMultipart.prototype.end = function () {\n const self = this\n\n if (self.parser.writable) {\n self.parser.end()\n } else if (!self._boy._done) {\n process.nextTick(function () {\n self._boy._done = true\n self._boy.emit('finish')\n })\n }\n}\n\nfunction skipPart (part) {\n part.resume()\n}\n\nfunction FileStream (opts) {\n Readable.call(this, opts)\n\n this.bytesRead = 0\n\n this.truncated = false\n}\n\ninherits(FileStream, Readable)\n\nFileStream.prototype._read = function (n) {}\n\nmodule.exports = Multipart\n","'use strict'\n\nconst Decoder = require('../utils/Decoder')\nconst decodeText = require('../utils/decodeText')\nconst getLimit = require('../utils/getLimit')\n\nconst RE_CHARSET = /^charset$/i\n\nUrlEncoded.detect = /^application\\/x-www-form-urlencoded/i\nfunction UrlEncoded (boy, cfg) {\n const limits = cfg.limits\n const parsedConType = cfg.parsedConType\n this.boy = boy\n\n this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)\n this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100)\n this.fieldsLimit = getLimit(limits, 'fields', Infinity)\n\n let charset\n for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var\n if (Array.isArray(parsedConType[i]) &&\n RE_CHARSET.test(parsedConType[i][0])) {\n charset = parsedConType[i][1].toLowerCase()\n break\n }\n }\n\n if (charset === undefined) { charset = cfg.defCharset || 'utf8' }\n\n this.decoder = new Decoder()\n this.charset = charset\n this._fields = 0\n this._state = 'key'\n this._checkingBytes = true\n this._bytesKey = 0\n this._bytesVal = 0\n this._key = ''\n this._val = ''\n this._keyTrunc = false\n this._valTrunc = false\n this._hitLimit = false\n}\n\nUrlEncoded.prototype.write = function (data, cb) {\n if (this._fields === this.fieldsLimit) {\n if (!this.boy.hitFieldsLimit) {\n this.boy.hitFieldsLimit = true\n this.boy.emit('fieldsLimit')\n }\n return cb()\n }\n\n let idxeq; let idxamp; let i; let p = 0; const len = data.length\n\n while (p < len) {\n if (this._state === 'key') {\n idxeq = idxamp = undefined\n for (i = p; i < len; ++i) {\n if (!this._checkingBytes) { ++p }\n if (data[i] === 0x3D/* = */) {\n idxeq = i\n break\n } else if (data[i] === 0x26/* & */) {\n idxamp = i\n break\n }\n if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {\n this._hitLimit = true\n break\n } else if (this._checkingBytes) { ++this._bytesKey }\n }\n\n if (idxeq !== undefined) {\n // key with assignment\n if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) }\n this._state = 'val'\n\n this._hitLimit = false\n this._checkingBytes = true\n this._val = ''\n this._bytesVal = 0\n this._valTrunc = false\n this.decoder.reset()\n\n p = idxeq + 1\n } else if (idxamp !== undefined) {\n // key with no assignment\n ++this._fields\n let key; const keyTrunc = this._keyTrunc\n if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key }\n\n this._hitLimit = false\n this._checkingBytes = true\n this._key = ''\n this._bytesKey = 0\n this._keyTrunc = false\n this.decoder.reset()\n\n if (key.length) {\n this.boy.emit('field', decodeText(key, 'binary', this.charset),\n '',\n keyTrunc,\n false)\n }\n\n p = idxamp + 1\n if (this._fields === this.fieldsLimit) { return cb() }\n } else if (this._hitLimit) {\n // we may not have hit the actual limit if there are encoded bytes...\n if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) }\n p = i\n if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {\n // yep, we actually did hit the limit\n this._checkingBytes = false\n this._keyTrunc = true\n }\n } else {\n if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) }\n p = len\n }\n } else {\n idxamp = undefined\n for (i = p; i < len; ++i) {\n if (!this._checkingBytes) { ++p }\n if (data[i] === 0x26/* & */) {\n idxamp = i\n break\n }\n if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {\n this._hitLimit = true\n break\n } else if (this._checkingBytes) { ++this._bytesVal }\n }\n\n if (idxamp !== undefined) {\n ++this._fields\n if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) }\n this.boy.emit('field', decodeText(this._key, 'binary', this.charset),\n decodeText(this._val, 'binary', this.charset),\n this._keyTrunc,\n this._valTrunc)\n this._state = 'key'\n\n this._hitLimit = false\n this._checkingBytes = true\n this._key = ''\n this._bytesKey = 0\n this._keyTrunc = false\n this.decoder.reset()\n\n p = idxamp + 1\n if (this._fields === this.fieldsLimit) { return cb() }\n } else if (this._hitLimit) {\n // we may not have hit the actual limit if there are encoded bytes...\n if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) }\n p = i\n if ((this._val === '' && this.fieldSizeLimit === 0) ||\n (this._bytesVal = this._val.length) === this.fieldSizeLimit) {\n // yep, we actually did hit the limit\n this._checkingBytes = false\n this._valTrunc = true\n }\n } else {\n if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) }\n p = len\n }\n }\n }\n cb()\n}\n\nUrlEncoded.prototype.end = function () {\n if (this.boy._done) { return }\n\n if (this._state === 'key' && this._key.length > 0) {\n this.boy.emit('field', decodeText(this._key, 'binary', this.charset),\n '',\n this._keyTrunc,\n false)\n } else if (this._state === 'val') {\n this.boy.emit('field', decodeText(this._key, 'binary', this.charset),\n decodeText(this._val, 'binary', this.charset),\n this._keyTrunc,\n this._valTrunc)\n }\n this.boy._done = true\n this.boy.emit('finish')\n}\n\nmodule.exports = UrlEncoded\n","'use strict'\n\nconst RE_PLUS = /\\+/g\n\nconst HEX = [\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,\n 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n]\n\nfunction Decoder () {\n this.buffer = undefined\n}\nDecoder.prototype.write = function (str) {\n // Replace '+' with ' ' before decoding\n str = str.replace(RE_PLUS, ' ')\n let res = ''\n let i = 0; let p = 0; const len = str.length\n for (; i < len; ++i) {\n if (this.buffer !== undefined) {\n if (!HEX[str.charCodeAt(i)]) {\n res += '%' + this.buffer\n this.buffer = undefined\n --i // retry character\n } else {\n this.buffer += str[i]\n ++p\n if (this.buffer.length === 2) {\n res += String.fromCharCode(parseInt(this.buffer, 16))\n this.buffer = undefined\n }\n }\n } else if (str[i] === '%') {\n if (i > p) {\n res += str.substring(p, i)\n p = i\n }\n this.buffer = ''\n ++p\n }\n }\n if (p < len && this.buffer === undefined) { res += str.substring(p) }\n return res\n}\nDecoder.prototype.reset = function () {\n this.buffer = undefined\n}\n\nmodule.exports = Decoder\n","'use strict'\n\nmodule.exports = function basename (path) {\n if (typeof path !== 'string') { return '' }\n for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var\n switch (path.charCodeAt(i)) {\n case 0x2F: // '/'\n case 0x5C: // '\\'\n path = path.slice(i + 1)\n return (path === '..' || path === '.' ? '' : path)\n }\n }\n return (path === '..' || path === '.' ? '' : path)\n}\n","'use strict'\n\n// Node has always utf-8\nconst utf8Decoder = new TextDecoder('utf-8')\nconst textDecoders = new Map([\n ['utf-8', utf8Decoder],\n ['utf8', utf8Decoder]\n])\n\nfunction getDecoder (charset) {\n let lc\n while (true) {\n switch (charset) {\n case 'utf-8':\n case 'utf8':\n return decoders.utf8\n case 'latin1':\n case 'ascii': // TODO: Make these a separate, strict decoder?\n case 'us-ascii':\n case 'iso-8859-1':\n case 'iso8859-1':\n case 'iso88591':\n case 'iso_8859-1':\n case 'windows-1252':\n case 'iso_8859-1:1987':\n case 'cp1252':\n case 'x-cp1252':\n return decoders.latin1\n case 'utf16le':\n case 'utf-16le':\n case 'ucs2':\n case 'ucs-2':\n return decoders.utf16le\n case 'base64':\n return decoders.base64\n default:\n if (lc === undefined) {\n lc = true\n charset = charset.toLowerCase()\n continue\n }\n return decoders.other.bind(charset)\n }\n }\n}\n\nconst decoders = {\n utf8: (data, sourceEncoding) => {\n if (data.length === 0) {\n return ''\n }\n if (typeof data === 'string') {\n data = Buffer.from(data, sourceEncoding)\n }\n return data.utf8Slice(0, data.length)\n },\n\n latin1: (data, sourceEncoding) => {\n if (data.length === 0) {\n return ''\n }\n if (typeof data === 'string') {\n return data\n }\n return data.latin1Slice(0, data.length)\n },\n\n utf16le: (data, sourceEncoding) => {\n if (data.length === 0) {\n return ''\n }\n if (typeof data === 'string') {\n data = Buffer.from(data, sourceEncoding)\n }\n return data.ucs2Slice(0, data.length)\n },\n\n base64: (data, sourceEncoding) => {\n if (data.length === 0) {\n return ''\n }\n if (typeof data === 'string') {\n data = Buffer.from(data, sourceEncoding)\n }\n return data.base64Slice(0, data.length)\n },\n\n other: (data, sourceEncoding) => {\n if (data.length === 0) {\n return ''\n }\n if (typeof data === 'string') {\n data = Buffer.from(data, sourceEncoding)\n }\n\n if (textDecoders.has(this.toString())) {\n try {\n return textDecoders.get(this).decode(data)\n } catch (e) { }\n }\n return typeof data === 'string'\n ? data\n : data.toString()\n }\n}\n\nfunction decodeText (text, sourceEncoding, destEncoding) {\n if (text) {\n return getDecoder(destEncoding)(text, sourceEncoding)\n }\n return text\n}\n\nmodule.exports = decodeText\n","'use strict'\n\nmodule.exports = function getLimit (limits, name, defaultLimit) {\n if (\n !limits ||\n limits[name] === undefined ||\n limits[name] === null\n ) { return defaultLimit }\n\n if (\n typeof limits[name] !== 'number' ||\n isNaN(limits[name])\n ) { throw new TypeError('Limit ' + name + ' is not a valid number') }\n\n return limits[name]\n}\n","/* eslint-disable object-property-newline */\n'use strict'\n\nconst decodeText = require('./decodeText')\n\nconst RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g\n\nconst EncodedLookup = {\n '%00': '\\x00', '%01': '\\x01', '%02': '\\x02', '%03': '\\x03', '%04': '\\x04',\n '%05': '\\x05', '%06': '\\x06', '%07': '\\x07', '%08': '\\x08', '%09': '\\x09',\n '%0a': '\\x0a', '%0A': '\\x0a', '%0b': '\\x0b', '%0B': '\\x0b', '%0c': '\\x0c',\n '%0C': '\\x0c', '%0d': '\\x0d', '%0D': '\\x0d', '%0e': '\\x0e', '%0E': '\\x0e',\n '%0f': '\\x0f', '%0F': '\\x0f', '%10': '\\x10', '%11': '\\x11', '%12': '\\x12',\n '%13': '\\x13', '%14': '\\x14', '%15': '\\x15', '%16': '\\x16', '%17': '\\x17',\n '%18': '\\x18', '%19': '\\x19', '%1a': '\\x1a', '%1A': '\\x1a', '%1b': '\\x1b',\n '%1B': '\\x1b', '%1c': '\\x1c', '%1C': '\\x1c', '%1d': '\\x1d', '%1D': '\\x1d',\n '%1e': '\\x1e', '%1E': '\\x1e', '%1f': '\\x1f', '%1F': '\\x1f', '%20': '\\x20',\n '%21': '\\x21', '%22': '\\x22', '%23': '\\x23', '%24': '\\x24', '%25': '\\x25',\n '%26': '\\x26', '%27': '\\x27', '%28': '\\x28', '%29': '\\x29', '%2a': '\\x2a',\n '%2A': '\\x2a', '%2b': '\\x2b', '%2B': '\\x2b', '%2c': '\\x2c', '%2C': '\\x2c',\n '%2d': '\\x2d', '%2D': '\\x2d', '%2e': '\\x2e', '%2E': '\\x2e', '%2f': '\\x2f',\n '%2F': '\\x2f', '%30': '\\x30', '%31': '\\x31', '%32': '\\x32', '%33': '\\x33',\n '%34': '\\x34', '%35': '\\x35', '%36': '\\x36', '%37': '\\x37', '%38': '\\x38',\n '%39': '\\x39', '%3a': '\\x3a', '%3A': '\\x3a', '%3b': '\\x3b', '%3B': '\\x3b',\n '%3c': '\\x3c', '%3C': '\\x3c', '%3d': '\\x3d', '%3D': '\\x3d', '%3e': '\\x3e',\n '%3E': '\\x3e', '%3f': '\\x3f', '%3F': '\\x3f', '%40': '\\x40', '%41': '\\x41',\n '%42': '\\x42', '%43': '\\x43', '%44': '\\x44', '%45': '\\x45', '%46': '\\x46',\n '%47': '\\x47', '%48': '\\x48', '%49': '\\x49', '%4a': '\\x4a', '%4A': '\\x4a',\n '%4b': '\\x4b', '%4B': '\\x4b', '%4c': '\\x4c', '%4C': '\\x4c', '%4d': '\\x4d',\n '%4D': '\\x4d', '%4e': '\\x4e', '%4E': '\\x4e', '%4f': '\\x4f', '%4F': '\\x4f',\n '%50': '\\x50', '%51': '\\x51', '%52': '\\x52', '%53': '\\x53', '%54': '\\x54',\n '%55': '\\x55', '%56': '\\x56', '%57': '\\x57', '%58': '\\x58', '%59': '\\x59',\n '%5a': '\\x5a', '%5A': '\\x5a', '%5b': '\\x5b', '%5B': '\\x5b', '%5c': '\\x5c',\n '%5C': '\\x5c', '%5d': '\\x5d', '%5D': '\\x5d', '%5e': '\\x5e', '%5E': '\\x5e',\n '%5f': '\\x5f', '%5F': '\\x5f', '%60': '\\x60', '%61': '\\x61', '%62': '\\x62',\n '%63': '\\x63', '%64': '\\x64', '%65': '\\x65', '%66': '\\x66', '%67': '\\x67',\n '%68': '\\x68', '%69': '\\x69', '%6a': '\\x6a', '%6A': '\\x6a', '%6b': '\\x6b',\n '%6B': '\\x6b', '%6c': '\\x6c', '%6C': '\\x6c', '%6d': '\\x6d', '%6D': '\\x6d',\n '%6e': '\\x6e', '%6E': '\\x6e', '%6f': '\\x6f', '%6F': '\\x6f', '%70': '\\x70',\n '%71': '\\x71', '%72': '\\x72', '%73': '\\x73', '%74': '\\x74', '%75': '\\x75',\n '%76': '\\x76', '%77': '\\x77', '%78': '\\x78', '%79': '\\x79', '%7a': '\\x7a',\n '%7A': '\\x7a', '%7b': '\\x7b', '%7B': '\\x7b', '%7c': '\\x7c', '%7C': '\\x7c',\n '%7d': '\\x7d', '%7D': '\\x7d', '%7e': '\\x7e', '%7E': '\\x7e', '%7f': '\\x7f',\n '%7F': '\\x7f', '%80': '\\x80', '%81': '\\x81', '%82': '\\x82', '%83': '\\x83',\n '%84': '\\x84', '%85': '\\x85', '%86': '\\x86', '%87': '\\x87', '%88': '\\x88',\n '%89': '\\x89', '%8a': '\\x8a', '%8A': '\\x8a', '%8b': '\\x8b', '%8B': '\\x8b',\n '%8c': '\\x8c', '%8C': '\\x8c', '%8d': '\\x8d', '%8D': '\\x8d', '%8e': '\\x8e',\n '%8E': '\\x8e', '%8f': '\\x8f', '%8F': '\\x8f', '%90': '\\x90', '%91': '\\x91',\n '%92': '\\x92', '%93': '\\x93', '%94': '\\x94', '%95': '\\x95', '%96': '\\x96',\n '%97': '\\x97', '%98': '\\x98', '%99': '\\x99', '%9a': '\\x9a', '%9A': '\\x9a',\n '%9b': '\\x9b', '%9B': '\\x9b', '%9c': '\\x9c', '%9C': '\\x9c', '%9d': '\\x9d',\n '%9D': '\\x9d', '%9e': '\\x9e', '%9E': '\\x9e', '%9f': '\\x9f', '%9F': '\\x9f',\n '%a0': '\\xa0', '%A0': '\\xa0', '%a1': '\\xa1', '%A1': '\\xa1', '%a2': '\\xa2',\n '%A2': '\\xa2', '%a3': '\\xa3', '%A3': '\\xa3', '%a4': '\\xa4', '%A4': '\\xa4',\n '%a5': '\\xa5', '%A5': '\\xa5', '%a6': '\\xa6', '%A6': '\\xa6', '%a7': '\\xa7',\n '%A7': '\\xa7', '%a8': '\\xa8', '%A8': '\\xa8', '%a9': '\\xa9', '%A9': '\\xa9',\n '%aa': '\\xaa', '%Aa': '\\xaa', '%aA': '\\xaa', '%AA': '\\xaa', '%ab': '\\xab',\n '%Ab': '\\xab', '%aB': '\\xab', '%AB': '\\xab', '%ac': '\\xac', '%Ac': '\\xac',\n '%aC': '\\xac', '%AC': '\\xac', '%ad': '\\xad', '%Ad': '\\xad', '%aD': '\\xad',\n '%AD': '\\xad', '%ae': '\\xae', '%Ae': '\\xae', '%aE': '\\xae', '%AE': '\\xae',\n '%af': '\\xaf', '%Af': '\\xaf', '%aF': '\\xaf', '%AF': '\\xaf', '%b0': '\\xb0',\n '%B0': '\\xb0', '%b1': '\\xb1', '%B1': '\\xb1', '%b2': '\\xb2', '%B2': '\\xb2',\n '%b3': '\\xb3', '%B3': '\\xb3', '%b4': '\\xb4', '%B4': '\\xb4', '%b5': '\\xb5',\n '%B5': '\\xb5', '%b6': '\\xb6', '%B6': '\\xb6', '%b7': '\\xb7', '%B7': '\\xb7',\n '%b8': '\\xb8', '%B8': '\\xb8', '%b9': '\\xb9', '%B9': '\\xb9', '%ba': '\\xba',\n '%Ba': '\\xba', '%bA': '\\xba', '%BA': '\\xba', '%bb': '\\xbb', '%Bb': '\\xbb',\n '%bB': '\\xbb', '%BB': '\\xbb', '%bc': '\\xbc', '%Bc': '\\xbc', '%bC': '\\xbc',\n '%BC': '\\xbc', '%bd': '\\xbd', '%Bd': '\\xbd', '%bD': '\\xbd', '%BD': '\\xbd',\n '%be': '\\xbe', '%Be': '\\xbe', '%bE': '\\xbe', '%BE': '\\xbe', '%bf': '\\xbf',\n '%Bf': '\\xbf', '%bF': '\\xbf', '%BF': '\\xbf', '%c0': '\\xc0', '%C0': '\\xc0',\n '%c1': '\\xc1', '%C1': '\\xc1', '%c2': '\\xc2', '%C2': '\\xc2', '%c3': '\\xc3',\n '%C3': '\\xc3', '%c4': '\\xc4', '%C4': '\\xc4', '%c5': '\\xc5', '%C5': '\\xc5',\n '%c6': '\\xc6', '%C6': '\\xc6', '%c7': '\\xc7', '%C7': '\\xc7', '%c8': '\\xc8',\n '%C8': '\\xc8', '%c9': '\\xc9', '%C9': '\\xc9', '%ca': '\\xca', '%Ca': '\\xca',\n '%cA': '\\xca', '%CA': '\\xca', '%cb': '\\xcb', '%Cb': '\\xcb', '%cB': '\\xcb',\n '%CB': '\\xcb', '%cc': '\\xcc', '%Cc': '\\xcc', '%cC': '\\xcc', '%CC': '\\xcc',\n '%cd': '\\xcd', '%Cd': '\\xcd', '%cD': '\\xcd', '%CD': '\\xcd', '%ce': '\\xce',\n '%Ce': '\\xce', '%cE': '\\xce', '%CE': '\\xce', '%cf': '\\xcf', '%Cf': '\\xcf',\n '%cF': '\\xcf', '%CF': '\\xcf', '%d0': '\\xd0', '%D0': '\\xd0', '%d1': '\\xd1',\n '%D1': '\\xd1', '%d2': '\\xd2', '%D2': '\\xd2', '%d3': '\\xd3', '%D3': '\\xd3',\n '%d4': '\\xd4', '%D4': '\\xd4', '%d5': '\\xd5', '%D5': '\\xd5', '%d6': '\\xd6',\n '%D6': '\\xd6', '%d7': '\\xd7', '%D7': '\\xd7', '%d8': '\\xd8', '%D8': '\\xd8',\n '%d9': '\\xd9', '%D9': '\\xd9', '%da': '\\xda', '%Da': '\\xda', '%dA': '\\xda',\n '%DA': '\\xda', '%db': '\\xdb', '%Db': '\\xdb', '%dB': '\\xdb', '%DB': '\\xdb',\n '%dc': '\\xdc', '%Dc': '\\xdc', '%dC': '\\xdc', '%DC': '\\xdc', '%dd': '\\xdd',\n '%Dd': '\\xdd', '%dD': '\\xdd', '%DD': '\\xdd', '%de': '\\xde', '%De': '\\xde',\n '%dE': '\\xde', '%DE': '\\xde', '%df': '\\xdf', '%Df': '\\xdf', '%dF': '\\xdf',\n '%DF': '\\xdf', '%e0': '\\xe0', '%E0': '\\xe0', '%e1': '\\xe1', '%E1': '\\xe1',\n '%e2': '\\xe2', '%E2': '\\xe2', '%e3': '\\xe3', '%E3': '\\xe3', '%e4': '\\xe4',\n '%E4': '\\xe4', '%e5': '\\xe5', '%E5': '\\xe5', '%e6': '\\xe6', '%E6': '\\xe6',\n '%e7': '\\xe7', '%E7': '\\xe7', '%e8': '\\xe8', '%E8': '\\xe8', '%e9': '\\xe9',\n '%E9': '\\xe9', '%ea': '\\xea', '%Ea': '\\xea', '%eA': '\\xea', '%EA': '\\xea',\n '%eb': '\\xeb', '%Eb': '\\xeb', '%eB': '\\xeb', '%EB': '\\xeb', '%ec': '\\xec',\n '%Ec': '\\xec', '%eC': '\\xec', '%EC': '\\xec', '%ed': '\\xed', '%Ed': '\\xed',\n '%eD': '\\xed', '%ED': '\\xed', '%ee': '\\xee', '%Ee': '\\xee', '%eE': '\\xee',\n '%EE': '\\xee', '%ef': '\\xef', '%Ef': '\\xef', '%eF': '\\xef', '%EF': '\\xef',\n '%f0': '\\xf0', '%F0': '\\xf0', '%f1': '\\xf1', '%F1': '\\xf1', '%f2': '\\xf2',\n '%F2': '\\xf2', '%f3': '\\xf3', '%F3': '\\xf3', '%f4': '\\xf4', '%F4': '\\xf4',\n '%f5': '\\xf5', '%F5': '\\xf5', '%f6': '\\xf6', '%F6': '\\xf6', '%f7': '\\xf7',\n '%F7': '\\xf7', '%f8': '\\xf8', '%F8': '\\xf8', '%f9': '\\xf9', '%F9': '\\xf9',\n '%fa': '\\xfa', '%Fa': '\\xfa', '%fA': '\\xfa', '%FA': '\\xfa', '%fb': '\\xfb',\n '%Fb': '\\xfb', '%fB': '\\xfb', '%FB': '\\xfb', '%fc': '\\xfc', '%Fc': '\\xfc',\n '%fC': '\\xfc', '%FC': '\\xfc', '%fd': '\\xfd', '%Fd': '\\xfd', '%fD': '\\xfd',\n '%FD': '\\xfd', '%fe': '\\xfe', '%Fe': '\\xfe', '%fE': '\\xfe', '%FE': '\\xfe',\n '%ff': '\\xff', '%Ff': '\\xff', '%fF': '\\xff', '%FF': '\\xff'\n}\n\nfunction encodedReplacer (match) {\n return EncodedLookup[match]\n}\n\nconst STATE_KEY = 0\nconst STATE_VALUE = 1\nconst STATE_CHARSET = 2\nconst STATE_LANG = 3\n\nfunction parseParams (str) {\n const res = []\n let state = STATE_KEY\n let charset = ''\n let inquote = false\n let escaping = false\n let p = 0\n let tmp = ''\n const len = str.length\n\n for (var i = 0; i < len; ++i) { // eslint-disable-line no-var\n const char = str[i]\n if (char === '\\\\' && inquote) {\n if (escaping) { escaping = false } else {\n escaping = true\n continue\n }\n } else if (char === '\"') {\n if (!escaping) {\n if (inquote) {\n inquote = false\n state = STATE_KEY\n } else { inquote = true }\n continue\n } else { escaping = false }\n } else {\n if (escaping && inquote) { tmp += '\\\\' }\n escaping = false\n if ((state === STATE_CHARSET || state === STATE_LANG) && char === \"'\") {\n if (state === STATE_CHARSET) {\n state = STATE_LANG\n charset = tmp.substring(1)\n } else { state = STATE_VALUE }\n tmp = ''\n continue\n } else if (state === STATE_KEY &&\n (char === '*' || char === '=') &&\n res.length) {\n state = char === '*'\n ? STATE_CHARSET\n : STATE_VALUE\n res[p] = [tmp, undefined]\n tmp = ''\n continue\n } else if (!inquote && char === ';') {\n state = STATE_KEY\n if (charset) {\n if (tmp.length) {\n tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),\n 'binary',\n charset)\n }\n charset = ''\n } else if (tmp.length) {\n tmp = decodeText(tmp, 'binary', 'utf8')\n }\n if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp }\n tmp = ''\n ++p\n continue\n } else if (!inquote && (char === ' ' || char === '\\t')) { continue }\n }\n tmp += char\n }\n if (charset && tmp.length) {\n tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),\n 'binary',\n charset)\n } else if (tmp) {\n tmp = decodeText(tmp, 'binary', 'utf8')\n }\n\n if (res[p] === undefined) {\n if (tmp) { res[p] = tmp }\n } else { res[p][1] = tmp }\n\n return res\n}\n\nmodule.exports = parseParams\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AclRoleAccessorMethods = exports.Acl = void 0;\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * Attach functionality to a {@link Storage.acl} instance. This will add an\n * object for each role group (owners, readers, and writers), with each object\n * containing methods to add or delete a type of entity.\n *\n * As an example, here are a few methods that are created.\n *\n * myBucket.acl.readers.deleteGroup('groupId', function(err) {});\n *\n * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {});\n *\n * myBucket.acl.writers.addDomain('example.com', function(err, acl) {});\n *\n * @private\n */\nclass AclRoleAccessorMethods {\n constructor() {\n this.owners = {};\n this.readers = {};\n this.writers = {};\n /**\n * An object of convenience methods to add or delete owner ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.owners.addAllAuthenticatedUsers`\n * - `myFile.acl.owners.deleteAllAuthenticatedUsers`\n * - `myFile.acl.owners.addAllUsers`\n * - `myFile.acl.owners.deleteAllUsers`\n * - `myFile.acl.owners.addDomain`\n * - `myFile.acl.owners.deleteDomain`\n * - `myFile.acl.owners.addGroup`\n * - `myFile.acl.owners.deleteGroup`\n * - `myFile.acl.owners.addProject`\n * - `myFile.acl.owners.deleteProject`\n * - `myFile.acl.owners.addUser`\n * - `myFile.acl.owners.deleteUser`\n *\n * @name Acl#owners\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as an owner of a file.\n * //-\n * const myBucket = gcs.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n * myFile.acl.owners.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.OWNER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.owners.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.owners = {};\n /**\n * An object of convenience methods to add or delete reader ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.readers.addAllAuthenticatedUsers`\n * - `myFile.acl.readers.deleteAllAuthenticatedUsers`\n * - `myFile.acl.readers.addAllUsers`\n * - `myFile.acl.readers.deleteAllUsers`\n * - `myFile.acl.readers.addDomain`\n * - `myFile.acl.readers.deleteDomain`\n * - `myFile.acl.readers.addGroup`\n * - `myFile.acl.readers.deleteGroup`\n * - `myFile.acl.readers.addProject`\n * - `myFile.acl.readers.deleteProject`\n * - `myFile.acl.readers.addUser`\n * - `myFile.acl.readers.deleteUser`\n *\n * @name Acl#readers\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as a reader of a file.\n * //-\n * myFile.acl.readers.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.READER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.readers.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.readers = {};\n /**\n * An object of convenience methods to add or delete writer ACL permissions\n * for a given entity.\n *\n * The supported methods include:\n *\n * - `myFile.acl.writers.addAllAuthenticatedUsers`\n * - `myFile.acl.writers.deleteAllAuthenticatedUsers`\n * - `myFile.acl.writers.addAllUsers`\n * - `myFile.acl.writers.deleteAllUsers`\n * - `myFile.acl.writers.addDomain`\n * - `myFile.acl.writers.deleteDomain`\n * - `myFile.acl.writers.addGroup`\n * - `myFile.acl.writers.deleteGroup`\n * - `myFile.acl.writers.addProject`\n * - `myFile.acl.writers.deleteProject`\n * - `myFile.acl.writers.addUser`\n * - `myFile.acl.writers.deleteUser`\n *\n * @name Acl#writers\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * //-\n * // Add a user as a writer of a file.\n * //-\n * myFile.acl.writers.addUser('email@example.com', function(err, aclObject)\n * {});\n *\n * //-\n * // For reference, the above command is the same as running the following.\n * //-\n * myFile.acl.add({\n * entity: 'user-email@example.com',\n * role: gcs.acl.WRITER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.writers.addUser('email@example.com').then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n this.writers = {};\n AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this));\n }\n _assignAccessMethods(role) {\n const accessMethods = AclRoleAccessorMethods.accessMethods;\n const entities = AclRoleAccessorMethods.entities;\n const roleGroup = role.toLowerCase() + 's';\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[roleGroup] = entities.reduce((acc, entity) => {\n const isPrefix = entity.charAt(entity.length - 1) === '-';\n accessMethods.forEach(accessMethod => {\n let method = accessMethod + entity[0].toUpperCase() + entity.substring(1);\n if (isPrefix) {\n method = method.replace('-', '');\n }\n // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the\n // more complex API of specifying an `entity` and `role`.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n acc[method] = (entityId, options, callback) => {\n let apiEntity;\n if (typeof options === 'function') {\n callback = options;\n options = {};\n }\n if (isPrefix) {\n apiEntity = entity + entityId;\n }\n else {\n // If the entity is not a prefix, it is a special entity group\n // that does not require further details. The accessor methods\n // only accept a callback.\n apiEntity = entity;\n callback = entityId;\n }\n options = Object.assign({\n entity: apiEntity,\n role,\n }, options);\n const args = [options];\n if (typeof callback === 'function') {\n args.push(callback);\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return this[accessMethod].apply(this, args);\n };\n });\n return acc;\n }, {});\n }\n}\nexports.AclRoleAccessorMethods = AclRoleAccessorMethods;\nAclRoleAccessorMethods.accessMethods = ['add', 'delete'];\nAclRoleAccessorMethods.entities = [\n // Special entity groups that do not require further specification.\n 'allAuthenticatedUsers',\n 'allUsers',\n // Entity groups that require specification, e.g. `user-email@example.com`.\n 'domain-',\n 'group-',\n 'project-',\n 'user-',\n];\nAclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER'];\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against an\n * object or bucket (for example, `READ` or `WRITE`); the entity defines who the\n * permission applies to (for example, a specific user or group of users).\n *\n * Where an `entity` value is accepted, we follow the format the Cloud Storage\n * API expects.\n *\n * Refer to\n * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls\n * for the most up-to-date values.\n *\n * - `user-userId`\n * - `user-email`\n * - `group-groupId`\n * - `group-email`\n * - `domain-domain`\n * - `project-team-projectId`\n * - `allUsers`\n * - `allAuthenticatedUsers`\n *\n * Examples:\n *\n * - The user \"liz@example.com\" would be `user-liz@example.com`.\n * - The group \"example@googlegroups.com\" would be\n * `group-example@googlegroups.com`.\n * - To refer to all members of the Google Apps for Business domain\n * \"example.com\", the entity would be `domain-example.com`.\n *\n * For more detailed information, see\n * {@link http://goo.gl/6qBBPO| About Access Control Lists}.\n *\n * @constructor Acl\n * @mixin\n * @param {object} options Configuration options.\n */\nclass Acl extends AclRoleAccessorMethods {\n constructor(options) {\n super();\n this.pathPrefix = options.pathPrefix;\n this.request_ = options.request;\n }\n /**\n * @typedef {array} AddAclResponse\n * @property {object} 0 The Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback AddAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} acl The Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Add access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation}\n *\n * @param {object} options Configuration options.\n * @param {string} options.entity Whose permissions will be added.\n * @param {string} options.role Permissions allowed for the defined entity.\n * See {@link https://cloud.google.com/storage/docs/access-control Access\n * Control}.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {AddAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * const options = {\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.OWNER_ROLE\n * };\n *\n * myBucket.acl.add(options, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * // Here is how you would grant ownership permissions to a user on a\n * specific\n * // revision of a file.\n * //-\n * myFile.acl.add({\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.OWNER_ROLE,\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_add_file_owner\n * Example of adding an owner to a file:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_owner\n * Example of adding an owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_default_owner\n * Example of adding a default owner to a bucket:\n */\n add(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'POST',\n uri: '',\n qs: query,\n maxRetries: 0, //explicitly set this value since this is a non-idempotent function\n json: {\n entity: options.entity,\n role: options.role.toUpperCase(),\n },\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, this.makeAclObject_(resp), resp);\n });\n }\n /**\n * @typedef {array} RemoveAclResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback RemoveAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation}\n *\n * @param {object} options Configuration object.\n * @param {string} options.entity Whose permissions will be revoked.\n * @param {int} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {RemoveAclCallback} callback The callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * myBucket.acl.delete({\n * entity: 'user-useremail@example.com'\n * }, function(err, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.delete({\n * entity: 'user-useremail@example.com',\n * generation: 1\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_owner\n * Example of removing an owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_default_owner\n * Example of removing a default owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_file_owner\n * Example of removing an owner from a bucket:\n */\n delete(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'DELETE',\n uri: '/' + encodeURIComponent(options.entity),\n qs: query,\n }, (err, resp) => {\n callback(err, resp);\n });\n }\n /**\n * @typedef {array} GetAclResponse\n * @property {object|object[]} 0 Single or array of Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object|object[]} acl Single or array of Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get access controls on a {@link Bucket} or {@link File}. If\n * an entity is omitted, you will receive an array of all applicable access\n * controls.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation}\n *\n * @param {object|function} [options] Configuration options. If you want to\n * receive a list of all access controls, pass the callback function as\n * the only argument.\n * @param {string} options.entity Whose permissions will be fetched.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * myBucket.acl.get({\n * entity: 'user-useremail@example.com'\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // Get all access controls.\n * //-\n * myBucket.acl.get(function(err, aclObjects, apiResponse) {\n * // aclObjects = [\n * // {\n * // entity: 'user-useremail@example.com',\n * // role: 'owner'\n * // }\n * // ]\n * });\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.get({\n * entity: 'user-useremail@example.com',\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.get().then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_print_file_acl\n * Example of printing a file's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_file_acl_for_user\n * Example of printing a file's ACL for a specific user:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl\n * Example of printing a bucket's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl_for_user\n * Example of printing a bucket's ACL for a specific user:\n */\n get(optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;\n let path = '';\n const query = {};\n if (options) {\n path = '/' + encodeURIComponent(options.entity);\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n }\n this.request({\n uri: path,\n qs: query,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n let results;\n if (resp.items) {\n results = resp.items.map(this.makeAclObject_);\n }\n else {\n results = this.makeAclObject_(resp);\n }\n callback(null, results, resp);\n });\n }\n /**\n * @typedef {array} UpdateAclResponse\n * @property {object} 0 The updated Acl Objects.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback UpdateAclCallback\n * @param {?Error} err Request error, if any.\n * @param {object} acl The updated Acl Objects.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Update access controls on a {@link Bucket} or {@link File}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation}\n *\n * @param {object} options Configuration options.\n * @param {string} options.entity Whose permissions will be updated.\n * @param {string} options.role Permissions allowed for the defined entity.\n * See {@link Storage.acl}.\n * @param {number} [options.generation] **File Objects Only** Select a specific\n * revision of this file (as opposed to the latest version, the default).\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {UpdateAclCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n * const myFile = myBucket.file('my-file');\n *\n * const options = {\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.WRITER_ROLE\n * };\n *\n * myBucket.acl.update(options, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // For file ACL operations, you can also specify a `generation` property.\n * //-\n * myFile.acl.update({\n * entity: 'user-useremail@example.com',\n * role: gcs.acl.WRITER_ROLE,\n * generation: 1\n * }, function(err, aclObject, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myFile.acl.update(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n update(options, callback) {\n const query = {};\n if (options.generation) {\n query.generation = options.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n this.request({\n method: 'PUT',\n uri: '/' + encodeURIComponent(options.entity),\n qs: query,\n json: {\n role: options.role.toUpperCase(),\n },\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, this.makeAclObject_(resp), resp);\n });\n }\n /**\n * Transform API responses to a consistent object format.\n *\n * @private\n */\n makeAclObject_(accessControlObject) {\n const obj = {\n entity: accessControlObject.entity,\n role: accessControlObject.role,\n };\n if (accessControlObject.projectTeam) {\n obj.projectTeam = accessControlObject.projectTeam;\n }\n return obj;\n }\n /**\n * Patch requests up to the bucket's request object.\n *\n * @private\n *\n * @param {string} method Action.\n * @param {string} path Request path.\n * @param {*} query Request query object.\n * @param {*} body Request body contents.\n * @param {function} callback Callback function.\n */\n request(reqOpts, callback) {\n reqOpts.uri = this.pathPrefix + reqOpts.uri;\n this.request_(reqOpts, callback);\n }\n}\nexports.Acl = Acl;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Acl, {\n exclude: ['request'],\n});\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst paginator_1 = require(\"@google-cloud/paginator\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst fs = __importStar(require(\"fs\"));\nconst mime = __importStar(require(\"mime-types\"));\nconst path = __importStar(require(\"path\"));\nconst p_limit_1 = __importDefault(require(\"p-limit\"));\nconst util_1 = require(\"util\");\nconst async_retry_1 = __importDefault(require(\"async-retry\"));\nconst util_js_1 = require(\"./util.js\");\nconst acl_js_1 = require(\"./acl.js\");\nconst file_js_1 = require(\"./file.js\");\nconst iam_js_1 = require(\"./iam.js\");\nconst notification_js_1 = require(\"./notification.js\");\nconst storage_js_1 = require(\"./storage.js\");\nconst signer_js_1 = require(\"./signer.js\");\nconst stream_1 = require(\"stream\");\nconst url_1 = require(\"url\");\nvar BucketActionToHTTPMethod;\n(function (BucketActionToHTTPMethod) {\n BucketActionToHTTPMethod[\"list\"] = \"GET\";\n})(BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = BucketActionToHTTPMethod = {}));\nvar AvailableServiceObjectMethods;\n(function (AvailableServiceObjectMethods) {\n AvailableServiceObjectMethods[AvailableServiceObjectMethods[\"setMetadata\"] = 0] = \"setMetadata\";\n AvailableServiceObjectMethods[AvailableServiceObjectMethods[\"delete\"] = 1] = \"delete\";\n})(AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = AvailableServiceObjectMethods = {}));\nvar BucketExceptionMessages;\n(function (BucketExceptionMessages) {\n BucketExceptionMessages[\"PROVIDE_SOURCE_FILE\"] = \"You must provide at least one source file.\";\n BucketExceptionMessages[\"DESTINATION_FILE_NOT_SPECIFIED\"] = \"A destination file must be specified.\";\n BucketExceptionMessages[\"CHANNEL_ID_REQUIRED\"] = \"An ID is required to create a channel.\";\n BucketExceptionMessages[\"TOPIC_NAME_REQUIRED\"] = \"A valid topic name is required.\";\n BucketExceptionMessages[\"CONFIGURATION_OBJECT_PREFIX_REQUIRED\"] = \"A configuration object with a prefix is required.\";\n BucketExceptionMessages[\"SPECIFY_FILE_NAME\"] = \"A file name must be specified.\";\n BucketExceptionMessages[\"METAGENERATION_NOT_PROVIDED\"] = \"A metageneration must be provided.\";\n BucketExceptionMessages[\"SUPPLY_NOTIFICATION_ID\"] = \"You must supply a notification ID.\";\n})(BucketExceptionMessages || (exports.BucketExceptionMessages = BucketExceptionMessages = {}));\n/**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n/**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n/**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n/**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n/**\n * A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n *\n * @name Bucket#crc32cGenerator\n * @type {CRC32CValidator}\n */\n/**\n * Get and set IAM policies for your bucket.\n *\n * @name Bucket#iam\n * @mixes Iam\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management}\n * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Get the IAM policy for your bucket.\n * //-\n * bucket.iam.getPolicy(function(err, policy) {\n * console.log(policy);\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.getPolicy().then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_view_bucket_iam_members\n * Example of retrieving a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_add_bucket_iam_member\n * Example of adding to a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_remove_bucket_iam_member\n * Example of removing from a bucket's IAM policy:\n */\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against\n * an object or bucket (for example, `READ` or `WRITE`); the entity defines\n * who the permission applies to (for example, a specific user or group of\n * users).\n *\n * The `acl` object on a Bucket instance provides methods to get you a list of\n * the ACLs defined on your bucket, as well as set, update, and delete them.\n *\n * Buckets also have\n * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs}\n * for all created files. Default ACLs specify permissions that all new\n * objects added to the bucket will inherit by default. You can add, delete,\n * get, and update entities and permissions for these as well with\n * {@link Bucket#acl.default}.\n *\n * See {@link http://goo.gl/6qBBPO| About Access Control Lists}\n * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs}\n *\n * @name Bucket#acl\n * @mixes Acl\n * @property {Acl} default Cloud Storage Buckets have\n * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs}\n * for all created files. You can add, delete, get, and update entities and\n * permissions for these as well. The method signatures and examples are all\n * the same, after only prefixing the method call with `default`.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Make a bucket's contents publicly readable.\n * //-\n * const myBucket = storage.bucket('my-bucket');\n *\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * myBucket.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl\n * Example of printing a bucket's ACL:\n *\n * @example include:samples/acl.js\n * region_tag:storage_print_bucket_acl_for_user\n * Example of printing a bucket's ACL for a specific user:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_owner\n * Example of adding an owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_owner\n * Example of removing an owner from a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_add_bucket_default_owner\n * Example of adding a default owner to a bucket:\n *\n * @example include:samples/acl.js\n * region_tag:storage_remove_bucket_default_owner\n * Example of removing a default owner from a bucket:\n */\n/**\n * The API-formatted resource description of the bucket.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name Bucket#metadata\n * @type {object}\n */\n/**\n * The bucket's name.\n * @name Bucket#name\n * @type {string}\n */\n/**\n * Get {@link File} objects for the files currently in the bucket as a\n * readable object stream.\n *\n * @method Bucket#getFilesStream\n * @param {GetFilesOptions} [query] Query object for listing files.\n * @returns {ReadableStream} A readable stream that emits {@link File} instances.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getFilesStream()\n * .on('error', console.error)\n * .on('data', function(file) {\n * // file is a File object.\n * })\n * .on('end', function() {\n * // All files retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * bucket.getFilesStream()\n * .on('data', function(file) {\n * this.end();\n * });\n *\n * //-\n * // If you're filtering files with a delimiter, you should use\n * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to\n * // preserve the `apiResponse` argument.\n * //-\n * const prefixes = [];\n *\n * function callback(err, files, nextQuery, apiResponse) {\n * prefixes = prefixes.concat(apiResponse.prefixes);\n *\n * if (nextQuery) {\n * bucket.getFiles(nextQuery, callback);\n * } else {\n * // prefixes = The finished array of prefixes.\n * }\n * }\n *\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/'\n * }, callback);\n * ```\n */\n/**\n * Create a Bucket object to interact with a Cloud Storage bucket.\n *\n * @class\n * @hideconstructor\n *\n * @param {Storage} storage A {@link Storage} instance.\n * @param {string} name The name of the bucket.\n * @param {object} [options] Configuration object.\n * @param {string} [options.userProject] User project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * ```\n */\nclass Bucket extends index_js_1.ServiceObject {\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n getFilesStream(query) {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n constructor(storage, name, options) {\n var _a, _b, _c, _d;\n options = options || {};\n // Allow for \"gs://\"-style input, and strip any trailing slashes.\n name = name.replace(/^gs:\\/\\//, '').replace(/\\/+$/, '');\n const requestQueryObject = {};\n if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) {\n requestQueryObject.ifGenerationMatch =\n options.preconditionOpts.ifGenerationMatch;\n }\n if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) {\n requestQueryObject.ifGenerationNotMatch =\n options.preconditionOpts.ifGenerationNotMatch;\n }\n if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) {\n requestQueryObject.ifMetagenerationMatch =\n options.preconditionOpts.ifMetagenerationMatch;\n }\n if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) {\n requestQueryObject.ifMetagenerationNotMatch =\n options.preconditionOpts.ifMetagenerationNotMatch;\n }\n const userProject = options.userProject;\n if (typeof userProject === 'string') {\n requestQueryObject.userProject = userProject;\n }\n const methods = {\n /**\n * Create a bucket.\n *\n * @method Bucket#create\n * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket.\n * @param {CreateBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * bucket.create(function(err, bucket, apiResponse) {\n * if (!err) {\n * // The bucket was created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.create().then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n create: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * IamDeleteBucketOptions Configuration options.\n * @property {boolean} [ignoreNotFound = false] Ignore an error if\n * the bucket does not exist.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} DeleteBucketResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation}\n *\n * @method Bucket#delete\n * @param {DeleteBucketOptions} [options] Configuration options.\n * @param {boolean} [options.ignoreNotFound = false] Ignore an error if\n * the bucket does not exist.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * bucket.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_delete_bucket\n * Another example:\n */\n delete: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} BucketExistsResponse\n * @property {boolean} 0 Whether the {@link Bucket} exists.\n */\n /**\n * @callback BucketExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the {@link Bucket} exists.\n */\n /**\n * Check if the bucket exists.\n *\n * @method Bucket#exists\n * @param {BucketExistsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {BucketExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get()\n * @property {boolean} [autoCreate] Automatically create the object if\n * it does not exist. Default: `false`\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} GetBucketResponse\n * @property {Bucket} 0 The {@link Bucket}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket} bucket The {@link Bucket}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get a bucket if it exists.\n *\n * You may optionally use this to \"get or create\" an object by providing\n * an object with `autoCreate` set to `true`. Any extra configuration that\n * is normally required for the `create` method must be contained within\n * this object as well.\n *\n * @method Bucket#get\n * @param {GetBucketOptions} [options] Configuration options.\n * @param {boolean} [options.autoCreate] Automatically create the object if\n * it does not exist. Default: `false`\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetBucketCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.get(function(err, bucket, apiResponse) {\n * // `bucket.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.get().then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetBucketMetadataResponse\n * @property {object} 0 The bucket metadata.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Get the bucket's metadata.\n *\n * To set metadata, see {@link Bucket#setMetadata}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation}\n *\n * @method Bucket#getMetadata\n * @param {GetBucketMetadataOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_get_requester_pays_status\n * Example of retrieving the requester pays status of a bucket:\n */\n getMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} SetBucketMetadataResponse\n * @property {object} apiResponse The full API response.\n */\n /**\n * @callback SetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n */\n /**\n * Set the bucket's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @method Bucket#setMetadata\n * @param {object} metadata The metadata you wish to set.\n * @param {SetBucketMetadataOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Set website metadata field on the bucket.\n * //-\n * const metadata = {\n * website: {\n * mainPageSuffix: 'http://example.com',\n * notFoundPage: 'http://example.com/404.html'\n * }\n * };\n *\n * bucket.setMetadata(metadata, function(err, apiResponse) {});\n *\n * //-\n * // Enable versioning for your bucket.\n * //-\n * bucket.setMetadata({\n * versioning: {\n * enabled: true\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Enable KMS encryption for objects within this bucket.\n * //-\n * bucket.setMetadata({\n * encryption: {\n * defaultKmsKeyName: 'projects/grape-spaceship-123/...'\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Set the default event-based hold value for new objects in this\n * // bucket.\n * //-\n * bucket.setMetadata({\n * defaultEventBasedHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Remove object lifecycle rules.\n * //-\n * bucket.setMetadata({\n * lifecycle: null\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setMetadata(metadata).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n };\n super({\n parent: storage,\n baseUrl: '/b',\n id: name,\n createMethod: storage.createBucket.bind(storage),\n methods,\n });\n this.name = name;\n this.storage = storage;\n this.userProject = options.userProject;\n this.acl = new acl_js_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/acl',\n });\n this.acl.default = new acl_js_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/defaultObjectAcl',\n });\n this.crc32cGenerator =\n options.crc32cGenerator || this.storage.crc32cGenerator;\n this.iam = new iam_js_1.Iam(this);\n this.getFilesStream = paginator_1.paginator.streamify('getFiles');\n this.instanceRetryValue = storage.retryOptions.autoRetry;\n this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts;\n }\n /**\n * The bucket's Cloud Storage URI (`gs://`)\n *\n * @example\n * ```ts\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * // `gs://my-bucket`\n * const href = bucket.cloudStorageURI.href;\n * ```\n */\n get cloudStorageURI() {\n const uri = new url_1.URL('gs://');\n uri.host = this.name;\n return uri;\n }\n /**\n * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule().\n * @property {boolean} [append=true] The new rules will be appended to any\n * pre-existing rules.\n */\n /**\n *\n * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects\n * in this bucket.\n * @property {string|object} action The action to be taken upon matching of\n * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'.\n * **Note**: For configuring a raw-formatted rule object to be passed as `action`\n * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}.\n * @property {object} condition Condition a bucket must meet before the\n * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}.\n * @property {string} [storageClass] When using the `setStorageClass`\n * action, provide this option to dictate which storage class the object\n * should update to. Please see\n * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions.\n */\n /**\n * Add an object lifecycle management rule to the bucket.\n *\n * By default, an Object Lifecycle Management rule provided to this method\n * will be included to the existing policy. To replace all existing rules,\n * supply the `options` argument, setting `append` to `false`.\n *\n * To add multiple rules, pass a list to the `rule` parameter. Calling this\n * function multiple times asynchronously does not guarantee that all rules\n * are added correctly.\n *\n * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects\n * in this bucket.\n * @param {string|object} rule.action The action to be taken upon matching of\n * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'.\n * **Note**: For configuring a raw-formatted rule object to be passed as `action`\n * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}.\n * @param {object} rule.condition Condition a bucket must meet before the\n * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}.\n * @param {string} [rule.storageClass] When using the `setStorageClass`\n * action, provide this option to dictate which storage class the object\n * should update to.\n * @param {AddLifecycleRuleOptions} [options] Configuration object.\n * @param {boolean} [options.append=true] Append the new rule to the existing\n * policy.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Automatically have an object deleted from this bucket once it is 3 years\n * // of age.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * age: 365 * 3 // Specified in days.\n * }\n * }, function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * const lifecycleRules = bucket.metadata.lifecycle.rule;\n *\n * // Iterate over the Object Lifecycle Management rules on this bucket.\n * lifecycleRules.forEach(lifecycleRule => {});\n * });\n *\n * //-\n * // By default, the rule you provide will be added to the existing policy.\n * // Optionally, you can disable this behavior to replace all of the\n * // pre-existing rules.\n * //-\n * const options = {\n * append: false\n * };\n *\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * age: 365 * 3 // Specified in days.\n * }\n * }, options, function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // All rules have been replaced with the new \"delete\" rule.\n *\n * // Iterate over the Object Lifecycle Management rules on this bucket.\n * lifecycleRules.forEach(lifecycleRule => {});\n * });\n *\n * //-\n * // For objects created before 2018, \"downgrade\" the storage class.\n * //-\n * bucket.addLifecycleRule({\n * action: 'setStorageClass',\n * storageClass: 'COLDLINE',\n * condition: {\n * createdBefore: new Date('2018')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete objects created before 2016 which have the Coldline storage\n * // class.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * matchesStorageClass: [\n * 'COLDLINE'\n * ],\n * createdBefore: new Date('2016')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a noncurrent timestamp that is at least 100 days.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * daysSinceNoncurrentTime: 100\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a noncurrent timestamp before 2020-01-01.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * noncurrentTimeBefore: new Date('2020-01-01')\n * }\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Delete object that has a customTime that is at least 100 days.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * daysSinceCustomTime: 100\n * }\n * }, function(err, apiResponse) ());\n *\n * //-\n * // Delete object that has a customTime before 2020-01-01.\n * //-\n * bucket.addLifecycleRule({\n * action: 'delete',\n * condition: {\n * customTimeBefore: new Date('2020-01-01')\n * }\n * }, function(err, apiResponse) {});\n * ```\n */\n addLifecycleRule(rule, optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n options = options || {};\n const rules = Array.isArray(rule) ? rule : [rule];\n for (const curRule of rules) {\n if (curRule.condition.createdBefore instanceof Date) {\n curRule.condition.createdBefore = curRule.condition.createdBefore\n .toISOString()\n .replace(/T.+$/, '');\n }\n if (curRule.condition.customTimeBefore instanceof Date) {\n curRule.condition.customTimeBefore = curRule.condition.customTimeBefore\n .toISOString()\n .replace(/T.+$/, '');\n }\n if (curRule.condition.noncurrentTimeBefore instanceof Date) {\n curRule.condition.noncurrentTimeBefore =\n curRule.condition.noncurrentTimeBefore\n .toISOString()\n .replace(/T.+$/, '');\n }\n }\n if (options.append === false) {\n this.setMetadata({ lifecycle: { rule: rules } }, options, callback);\n return;\n }\n // The default behavior appends the previously-defined lifecycle rules with\n // the new ones just passed in by the user.\n this.getMetadata((err, metadata) => {\n var _a, _b;\n if (err) {\n callback(err);\n return;\n }\n const currentLifecycleRules = Array.isArray((_a = metadata.lifecycle) === null || _a === void 0 ? void 0 : _a.rule)\n ? (_b = metadata.lifecycle) === null || _b === void 0 ? void 0 : _b.rule\n : [];\n this.setMetadata({\n lifecycle: { rule: currentLifecycleRules.concat(rules) },\n }, options, callback);\n });\n }\n /**\n * @typedef {object} CombineOptions\n * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of\n * the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback CombineCallback\n * @param {?Error} err Request error, if any.\n * @param {File} newFile The new {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} CombineResponse\n * @property {File} 0 The new {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * Combine multiple files into one new file.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation}\n *\n * @throws {Error} if a non-array is provided as sources argument.\n * @throws {Error} if no sources are provided.\n * @throws {Error} if no destination is provided.\n *\n * @param {string[]|File[]} sources The source files that will be\n * combined.\n * @param {string|File} destination The file you would like the\n * source files combined into.\n * @param {CombineOptions} [options] Configuration options.\n * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of\n * the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n \n * @param {CombineCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const logBucket = storage.bucket('log-bucket');\n *\n * const sources = [\n * logBucket.file('2013-logs.txt'),\n * logBucket.file('2014-logs.txt')\n * ];\n *\n * const allLogs = logBucket.file('all-logs.txt');\n *\n * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) {\n * // newFile === allLogs\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * logBucket.combine(sources, allLogs).then(function(data) {\n * const newFile = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n combine(sources, destination, optionsOrCallback, callback) {\n var _a;\n if (!Array.isArray(sources) || sources.length === 0) {\n throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE);\n }\n if (!destination) {\n throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED);\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required\n AvailableServiceObjectMethods.setMetadata, // Same as above\n options);\n const convertToFile = (file) => {\n if (file instanceof file_js_1.File) {\n return file;\n }\n return this.file(file);\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n sources = sources.map(convertToFile);\n const destinationFile = convertToFile(destination);\n callback = callback || index_js_1.util.noop;\n if (!destinationFile.metadata.contentType) {\n const destinationContentType = mime.contentType(destinationFile.name);\n if (destinationContentType) {\n destinationFile.metadata.contentType = destinationContentType;\n }\n }\n let maxRetries = this.storage.retryOptions.maxRetries;\n if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) ===\n undefined &&\n options.ifGenerationMatch === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n maxRetries = 0;\n }\n if (options.ifGenerationMatch === undefined) {\n Object.assign(options, destinationFile.instancePreconditionOpts, options);\n }\n // Make the request from the destination File object.\n destinationFile.request({\n method: 'POST',\n uri: '/compose',\n maxRetries,\n json: {\n destination: {\n contentType: destinationFile.metadata.contentType,\n },\n sourceObjects: sources.map(source => {\n const sourceObject = {\n name: source.name,\n };\n if (source.metadata && source.metadata.generation) {\n sourceObject.generation = parseInt(source.metadata.generation.toString());\n }\n return sourceObject;\n }),\n },\n qs: options,\n }, (err, resp) => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n if (err) {\n callback(err, null, resp);\n return;\n }\n callback(null, destinationFile, resp);\n });\n }\n /**\n * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}.\n *\n * @typedef {object} CreateChannelConfig\n * @property {string} address The address where notifications are\n * delivered for this channel.\n * @property {string} [delimiter] Returns results in a directory-like mode.\n * @property {number} [maxResults] Maximum number of `items` plus `prefixes`\n * to return in a single page of responses.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [prefix] Filter results to objects whose names begin\n * with this prefix.\n * @property {string} [projection=noAcl] Set of properties to return.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {boolean} [versions=false] If `true`, lists all versions of an object\n * as distinct results.\n */\n /**\n * @typedef {object} CreateChannelOptions\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} CreateChannelResponse\n * @property {Channel} 0 The new {@link Channel}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CreateChannelCallback\n * @param {?Error} err Request error, if any.\n * @param {Channel} channel The new {@link Channel}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Create a channel that will be notified when objects in this bucket changes.\n *\n * @throws {Error} If an ID is not provided.\n * @throws {Error} If an address is not provided.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation}\n *\n * @param {string} id The ID of the channel to create.\n * @param {CreateChannelConfig} config Configuration for creating channel.\n * @param {string} config.address The address where notifications are\n * delivered for this channel.\n * @param {string} [config.delimiter] Returns results in a directory-like mode.\n * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes`\n * to return in a single page of responses.\n * @param {string} [config.pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @param {string} [config.prefix] Filter results to objects whose names begin\n * with this prefix.\n * @param {string} [config.projection=noAcl] Set of properties to return.\n * @param {string} [config.userProject] The ID of the project which will be\n * billed for the request.\n * @param {boolean} [config.versions=false] If `true`, lists all versions of an object\n * as distinct results.\n * @param {CreateChannelOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {CreateChannelCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const id = 'new-channel-id';\n *\n * const config = {\n * address: 'https://...'\n * };\n *\n * bucket.createChannel(id, config, function(err, channel, apiResponse) {\n * if (!err) {\n * // Channel created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.createChannel(id, config).then(function(data) {\n * const channel = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n createChannel(id, config, optionsOrCallback, callback) {\n if (typeof id !== 'string') {\n throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED);\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.request({\n method: 'POST',\n uri: '/o/watch',\n json: Object.assign({\n id,\n type: 'web_hook',\n }, config),\n qs: options,\n }, (err, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n const resourceId = apiResponse.resourceId;\n const channel = this.storage.channel(id, resourceId);\n channel.metadata = apiResponse;\n callback(null, channel, apiResponse);\n });\n }\n /**\n * Metadata to set for the Notification.\n *\n * @typedef {object} CreateNotificationOptions\n * @property {object} [customAttributes] An optional list of additional\n * attributes to attach to each Cloud PubSub message published for this\n * notification subscription.\n * @property {string[]} [eventTypes] If present, only send notifications about\n * listed event types. If empty, sent notifications for all event types.\n * @property {string} [objectNamePrefix] If present, only apply this\n * notification configuration to object names that begin with this prefix.\n * @property {string} [payloadFormat] The desired content of the Payload.\n * Defaults to `JSON_API_V1`.\n *\n * Acceptable values are:\n * - `JSON_API_V1`\n *\n * - `NONE`\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback CreateNotificationCallback\n * @param {?Error} err Request error, if any.\n * @param {Notification} notification The new {@link Notification}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} CreateNotificationResponse\n * @property {Notification} 0 The new {@link Notification}.\n * @property {object} 1 The full API response.\n */\n /**\n * Creates a notification subscription for the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert}\n *\n * @param {Topic|string} topic The Cloud PubSub topic to which this\n * subscription publishes. If the project ID is omitted, the current\n * project ID will be used.\n *\n * Acceptable formats are:\n * - `projects/grape-spaceship-123/topics/my-topic`\n *\n * - `my-topic`\n * @param {CreateNotificationOptions} [options] Metadata to set for the\n * notification.\n * @param {object} [options.customAttributes] An optional list of additional\n * attributes to attach to each Cloud PubSub message published for this\n * notification subscription.\n * @param {string[]} [options.eventTypes] If present, only send notifications about\n * listed event types. If empty, sent notifications for all event types.\n * @param {string} [options.objectNamePrefix] If present, only apply this\n * notification configuration to object names that begin with this prefix.\n * @param {string} [options.payloadFormat] The desired content of the Payload.\n * Defaults to `JSON_API_V1`.\n *\n * Acceptable values are:\n * - `JSON_API_V1`\n *\n * - `NONE`\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {CreateNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a valid topic is not provided.\n * @see Notification#create\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const callback = function(err, notification, apiResponse) {\n * if (!err) {\n * // The notification was created successfully.\n * }\n * };\n *\n * myBucket.createNotification('my-topic', callback);\n *\n * //-\n * // Configure the nofiication by providing Notification metadata.\n * //-\n * const metadata = {\n * objectNamePrefix: 'prefix-'\n * };\n *\n * myBucket.createNotification('my-topic', metadata, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * myBucket.createNotification('my-topic').then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/createNotification.js\n * region_tag:storage_create_bucket_notifications\n * Another example:\n */\n createNotification(topic, optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n const topicIsObject = topic !== null && typeof topic === 'object';\n if (topicIsObject && index_js_1.util.isCustomType(topic, 'pubsub/topic')) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n topic = topic.name;\n }\n if (typeof topic !== 'string') {\n throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED);\n }\n const body = Object.assign({ topic }, options);\n if (body.topic.indexOf('projects') !== 0) {\n body.topic = 'projects/{{projectId}}/topics/' + body.topic;\n }\n body.topic = '//pubsub.googleapis.com/' + body.topic;\n if (!body.payloadFormat) {\n body.payloadFormat = 'JSON_API_V1';\n }\n const query = {};\n if (body.userProject) {\n query.userProject = body.userProject;\n delete body.userProject;\n }\n this.request({\n method: 'POST',\n uri: '/notificationConfigs',\n json: (0, util_js_1.convertObjKeysToSnakeCase)(body),\n qs: query,\n maxRetries: 0, //explicitly set this value since this is a non-idempotent function\n }, (err, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n const notification = this.notification(apiResponse.id);\n notification.metadata = apiResponse;\n callback(null, notification, apiResponse);\n });\n }\n /**\n * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles}\n * for all of the supported properties.\n * @property {boolean} [force] Suppress errors until all files have been\n * processed.\n */\n /**\n * @callback DeleteFilesCallback\n * @param {?Error|?Error[]} err Request error, if any, or array of errors from\n * files that were not able to be deleted.\n * @param {object} [apiResponse] The full API response.\n */\n /**\n * Iterate over the bucket's files, calling `file.delete()` on each.\n *\n * This is not an atomic request. A delete attempt will be\n * made for each file individually. Any one can fail, in which case only a\n * portion of the files you intended to be deleted would have.\n *\n * Operations are performed in parallel, up to 10 at once. The first error\n * breaks the loop and will execute the provided callback with it. Specify\n * `{ force: true }` to suppress the errors until all files have had a chance\n * to be processed.\n *\n * File preconditions cannot be passed to this function. It will not retry unless\n * the idempotency strategy is set to retry always.\n *\n * The `query` object passed as the first argument will also be passed to\n * {@link Bucket#getFiles}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation}\n *\n * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles}\n * @param {boolean} [query.force] Suppress errors until all files have been\n * processed.\n * @param {DeleteFilesCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Delete all of the files in the bucket.\n * //-\n * bucket.deleteFiles(function(err) {});\n *\n * //-\n * // By default, if a file cannot be deleted, this method will stop deleting\n * // files from your bucket. You can override this setting with `force:\n * // true`.\n * //-\n * bucket.deleteFiles({\n * force: true\n * }, function(errors) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * });\n *\n * //-\n * // The first argument to this method acts as a query to\n * // {@link Bucket#getFiles}. As an example, you can delete files\n * // which match a prefix.\n * //-\n * bucket.deleteFiles({\n * prefix: 'images/'\n * }, function(err) {\n * if (!err) {\n * // All files in the `images` directory have been deleted.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.deleteFiles().then(function() {});\n * ```\n */\n deleteFiles(queryOrCallback, callback) {\n let query = {};\n if (typeof queryOrCallback === 'function') {\n callback = queryOrCallback;\n }\n else if (queryOrCallback) {\n query = queryOrCallback;\n }\n const MAX_PARALLEL_LIMIT = 10;\n const MAX_QUEUE_SIZE = 1000;\n const errors = [];\n const deleteFile = (file) => {\n return file.delete(query).catch(err => {\n if (!query.force) {\n throw err;\n }\n errors.push(err);\n });\n };\n (async () => {\n try {\n let promises = [];\n const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT);\n const filesStream = this.getFilesStream(query);\n for await (const curFile of filesStream) {\n if (promises.length >= MAX_QUEUE_SIZE) {\n await Promise.all(promises);\n promises = [];\n }\n promises.push(limit(() => deleteFile(curFile)).catch(e => {\n filesStream.destroy();\n throw e;\n }));\n }\n await Promise.all(promises);\n callback(errors.length > 0 ? errors : null);\n }\n catch (e) {\n callback(e);\n return;\n }\n })();\n }\n /**\n * @deprecated\n * @typedef {array} DeleteLabelsResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @deprecated\n * @callback DeleteLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata Bucket's metadata.\n */\n /**\n * @deprecated Use setMetadata directly\n * Delete one or more labels from this bucket.\n *\n * @param {string|string[]} [labels] The labels to delete. If no labels are\n * provided, all of the labels are removed.\n * @param {DeleteLabelsCallback} [callback] Callback function.\n * @param {DeleteLabelsOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Delete all of the labels from this bucket.\n * //-\n * bucket.deleteLabels(function(err, apiResponse) {});\n *\n * //-\n * // Delete a single label.\n * //-\n * bucket.deleteLabels('labelone', function(err, apiResponse) {});\n *\n * //-\n * // Delete a specific set of labels.\n * //-\n * bucket.deleteLabels([\n * 'labelone',\n * 'labeltwo'\n * ], function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.deleteLabels().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) {\n let labels = new Array();\n let options = {};\n if (typeof labelsOrCallbackOrOptions === 'function') {\n callback = labelsOrCallbackOrOptions;\n }\n else if (typeof labelsOrCallbackOrOptions === 'string') {\n labels = [labelsOrCallbackOrOptions];\n }\n else if (Array.isArray(labelsOrCallbackOrOptions)) {\n labels = labelsOrCallbackOrOptions;\n }\n else if (labelsOrCallbackOrOptions) {\n options = labelsOrCallbackOrOptions;\n }\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n const deleteLabels = (labels) => {\n const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => {\n nullLabelMap[labelKey] = null;\n return nullLabelMap;\n }, {});\n if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) {\n this.setLabels(nullLabelMap, options, callback);\n }\n else {\n this.setLabels(nullLabelMap, callback);\n }\n };\n if (labels.length === 0) {\n this.getLabels((err, labels) => {\n if (err) {\n callback(err);\n return;\n }\n deleteLabels(Object.keys(labels));\n });\n }\n else {\n deleteLabels(labels);\n }\n }\n /**\n * @typedef {array} DisableRequesterPaysResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DisableRequesterPaysCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n *

\n * Early Access Testers Only\n *

\n * This feature is not yet widely-available.\n *

\n *
\n *\n * Disable `requesterPays` functionality from this bucket.\n *\n * @param {DisableRequesterPaysCallback} [callback] Callback function.\n * @param {DisableRequesterPaysOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.disableRequesterPays(function(err, apiResponse) {\n * if (!err) {\n * // requesterPays functionality disabled successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.disableRequesterPays().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_disable_requester_pays\n * Example of disabling requester pays:\n */\n disableRequesterPays(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.setMetadata({\n billing: {\n requesterPays: false,\n },\n }, options, callback);\n }\n /**\n * Configuration object for enabling logging.\n *\n * @typedef {object} EnableLoggingOptions\n * @property {string|Bucket} [bucket] The bucket for the log entries. By\n * default, the current bucket is used.\n * @property {string} prefix A unique prefix for log object names.\n */\n /**\n * Enable logging functionality for this bucket. This will make two API\n * requests, first to grant Cloud Storage WRITE permission to the bucket, then\n * to set the appropriate configuration on the Bucket's metadata.\n *\n * @param {EnableLoggingOptions} config Configuration options.\n * @param {string|Bucket} [config.bucket] The bucket for the log entries. By\n * default, the current bucket is used.\n * @param {string} config.prefix A unique prefix for log object names.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * const config = {\n * prefix: 'log'\n * };\n *\n * bucket.enableLogging(config, function(err, apiResponse) {\n * if (!err) {\n * // Logging functionality enabled successfully.\n * }\n * });\n *\n * ```\n * @example\n * Optionally, provide a destination bucket.\n * ```\n * const config = {\n * prefix: 'log',\n * bucket: 'destination-bucket'\n * };\n *\n * bucket.enableLogging(config, function(err, apiResponse) {});\n * ```\n *\n * @example\n * If the callback is omitted, we'll return a Promise.\n * ```\n * bucket.enableLogging(config).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n enableLogging(config, callback) {\n if (!config ||\n typeof config === 'function' ||\n typeof config.prefix === 'undefined') {\n throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED);\n }\n let logBucket = this.id;\n if (config.bucket && config.bucket instanceof Bucket) {\n logBucket = config.bucket.id;\n }\n else if (config.bucket && typeof config.bucket === 'string') {\n logBucket = config.bucket;\n }\n const options = {};\n if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) {\n options.ifMetagenerationMatch = config.ifMetagenerationMatch;\n }\n if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) {\n options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch;\n }\n (async () => {\n try {\n const [policy] = await this.iam.getPolicy();\n policy.bindings.push({\n members: ['group:cloud-storage-analytics@google.com'],\n role: 'roles/storage.objectCreator',\n });\n await this.iam.setPolicy(policy);\n this.setMetadata({\n logging: {\n logBucket,\n logObjectPrefix: config.prefix,\n },\n }, options, callback);\n }\n catch (e) {\n callback(e);\n return;\n }\n })();\n }\n /**\n * @typedef {array} EnableRequesterPaysResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback EnableRequesterPaysCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n *
\n * Early Access Testers Only\n *

\n * This feature is not yet widely-available.\n *

\n *
\n *\n * Enable `requesterPays` functionality for this bucket. This enables you, the\n * bucket owner, to have the requesting user assume the charges for the access\n * to your bucket and its contents.\n *\n * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback]\n * Callback function or precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.enableRequesterPays(function(err, apiResponse) {\n * if (!err) {\n * // requesterPays functionality enabled successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.enableRequesterPays().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/requesterPays.js\n * region_tag:storage_enable_requester_pays\n * Example of enabling requester pays:\n */\n enableRequesterPays(optionsOrCallback, cb) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n cb = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.setMetadata({\n billing: {\n requesterPays: true,\n },\n }, options, cb);\n }\n /**\n * Create a {@link File} object. See {@link File} to see how to handle\n * the different use cases you may have.\n *\n * @param {string} name The name of the file in this bucket.\n * @param {FileOptions} [options] Configuration options.\n * @param {string|number} [options.generation] Only use a specific revision of\n * this file.\n * @param {string} [options.encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * KMS key ring must use the same location as the bucket.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for all requests made from File object.\n * @returns {File}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-existing-file.png');\n * ```\n */\n file(name, options) {\n if (!name) {\n throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME);\n }\n return new file_js_1.File(this, name, options);\n }\n /**\n * @typedef {array} GetFilesResponse\n * @property {File[]} 0 Array of {@link File} instances.\n * @param {object} nextQuery 1 A query object to receive more results.\n * @param {object} apiResponse 2 The full API response.\n */\n /**\n * @callback GetFilesCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files Array of {@link File} instances.\n * @param {object} nextQuery A query object to receive more results.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Query object for listing files.\n *\n * @typedef {object} GetFilesOptions\n * @property {boolean} [autoPaginate=true] Have pagination handled\n * automatically.\n * @property {string} [delimiter] Results will contain only objects whose\n * names, aside from the prefix, do not contain delimiter. Objects whose\n * names, aside from the prefix, contain delimiter will have their name\n * truncated after the delimiter, returned in `apiResponse.prefixes`.\n * Duplicate prefixes are omitted.\n * @property {string} [endOffset] Filter results to objects whose names are\n * lexicographically before endOffset. If startOffset is also set, the objects\n * listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @property {boolean} [includeTrailingDelimiter] If true, objects that end in\n * exactly one instance of delimiter have their metadata included in items[]\n * in addition to the relevant part of the object name appearing in prefixes[].\n * @property {string} [prefix] Filter results to objects whose names begin\n * with this prefix.\n * @property {string} [matchGlob] A glob pattern used to filter results,\n * for example foo*bar\n * @property {number} [maxApiCalls] Maximum number of API calls to make.\n * @property {number} [maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [startOffset] Filter results to objects whose names are\n * lexicographically equal to or after startOffset. If endOffset is also set,\n * the objects listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {boolean} [versions] If true, returns File objects scoped to\n * their versions.\n */\n /**\n * Get {@link File} objects for the files currently in the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation}\n *\n * @param {GetFilesOptions} [query] Query object for listing files.\n * @param {boolean} [query.autoPaginate=true] Have pagination handled\n * automatically.\n * @param {string} [query.delimiter] Results will contain only objects whose\n * names, aside from the prefix, do not contain delimiter. Objects whose\n * names, aside from the prefix, contain delimiter will have their name\n * truncated after the delimiter, returned in `apiResponse.prefixes`.\n * Duplicate prefixes are omitted.\n * @param {string} [query.endOffset] Filter results to objects whose names are\n * lexicographically before endOffset. If startOffset is also set, the objects\n * listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in\n * exactly one instance of delimiter have their metadata included in items[]\n * in addition to the relevant part of the object name appearing in prefixes[].\n * @param {string} [query.prefix] Filter results to objects whose names begin\n * with this prefix.\n * @param {number} [query.maxApiCalls] Maximum number of API calls to make.\n * @param {number} [query.maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @param {string} [query.pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @param {string} [query.startOffset] Filter results to objects whose names are\n * lexicographically equal to or after startOffset. If endOffset is also set,\n * the objects listed have names between startOffset (inclusive) and endOffset (exclusive).\n * @param {string} [query.userProject] The ID of the project which will be\n * billed for the request.\n * @param {boolean} [query.versions] If true, returns File objects scoped to\n * their versions.\n * @param {GetFilesCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getFiles(function(err, files) {\n * if (!err) {\n * // files is an array of File objects.\n * }\n * });\n *\n * //-\n * // If your bucket has versioning enabled, you can get all of your files\n * // scoped to their generation.\n * //-\n * bucket.getFiles({\n * versions: true\n * }, function(err, files) {\n * // Each file is scoped to its generation.\n * });\n *\n * //-\n * // To control how many API requests are made and page through the results\n * // manually, set `autoPaginate` to `false`.\n * //-\n * const callback = function(err, files, nextQuery, apiResponse) {\n * if (nextQuery) {\n * // More results exist.\n * bucket.getFiles(nextQuery, callback);\n * }\n *\n * // The `metadata` property is populated for you with the metadata at the\n * // time of fetching.\n * files[0].metadata;\n *\n * // However, in cases where you are concerned the metadata could have\n * // changed, use the `getMetadata` method.\n * files[0].getMetadata(function(err, metadata) {});\n * };\n *\n * bucket.getFiles({\n * autoPaginate: false\n * }, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getFiles().then(function(data) {\n * const files = data[0];\n * });\n *\n * ```\n * @example\n *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. \"a\"
  2. \"a/b/c/d\"
  3. \"b/d/e\"

Using a delimiter of `/` will return a single file, \"a\".

`apiResponse.prefixes` will return the \"sub-directories\" that were found:

  1. \"a/\"
  2. \"b/\"
\n * ```\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/'\n * }, function(err, files, nextQuery, apiResponse) {\n * // files = [\n * // {File} // File object for file \"a\"\n * // ]\n *\n * // apiResponse.prefixes = [\n * // 'a/',\n * // 'b/'\n * // ]\n * });\n * ```\n *\n * @example\n * Using prefixes, it's now possible to simulate a file system with follow-up requests.\n * ```\n * bucket.getFiles({\n * autoPaginate: false,\n * delimiter: '/',\n * prefix: 'a/'\n * }, function(err, files, nextQuery, apiResponse) {\n * // No files found within \"directory\" a.\n * // files = []\n *\n * // However, a \"sub-directory\" was found.\n * // This prefix can be used to continue traversing the \"file system\".\n * // apiResponse.prefixes = [\n * // 'a/b/'\n * // ]\n * });\n * ```\n *\n * @example include:samples/files.js\n * region_tag:storage_list_files\n * Another example:\n *\n * @example include:samples/files.js\n * region_tag:storage_list_files_with_prefix\n * Example of listing files, filtered by a prefix:\n */\n getFiles(queryOrCallback, callback) {\n let query = typeof queryOrCallback === 'object' ? queryOrCallback : {};\n if (!callback) {\n callback = queryOrCallback;\n }\n query = Object.assign({}, query);\n this.request({\n uri: '/o',\n qs: query,\n }, (err, resp) => {\n if (err) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const files = itemsArray.map((file) => {\n const options = {};\n if (query.versions) {\n options.generation = file.generation;\n }\n if (file.kmsKeyName) {\n options.kmsKeyName = file.kmsKeyName;\n }\n const fileInstance = this.file(file.name, options);\n fileInstance.metadata = file;\n return fileInstance;\n });\n let nextQuery = null;\n if (resp.nextPageToken) {\n nextQuery = Object.assign({}, query, {\n pageToken: resp.nextPageToken,\n });\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n callback(null, files, nextQuery, resp);\n });\n }\n /**\n * @deprecated\n * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels().\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @deprecated\n * @typedef {array} GetLabelsResponse\n * @property {object} 0 Object of labels currently set on this bucket.\n */\n /**\n * @deprecated\n * @callback GetLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} labels Object of labels currently set on this bucket.\n */\n /**\n * @deprecated Use getMetadata directly.\n * Get the labels currently set on this bucket.\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetLabelsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.getLabels(function(err, labels) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // labels = {\n * // label: 'labelValue',\n * // ...\n * // }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getLabels().then(function(data) {\n * const labels = data[0];\n * });\n * ```\n */\n getLabels(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n callback(err, null);\n return;\n }\n callback(null, (metadata === null || metadata === void 0 ? void 0 : metadata.labels) || {});\n });\n }\n /**\n * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback GetNotificationsCallback\n * @param {?Error} err Request error, if any.\n * @param {Notification[]} notifications Array of {@link Notification}\n * instances.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} GetNotificationsResponse\n * @property {Notification[]} 0 Array of {@link Notification} instances.\n * @property {object} 1 The full API response.\n */\n /**\n * Retrieves a list of notification subscriptions for a given bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list}\n *\n * @param {GetNotificationsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * bucket.getNotifications(function(err, notifications, apiResponse) {\n * if (!err) {\n * // notifications is an array of Notification objects.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getNotifications().then(function(data) {\n * const notifications = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/listNotifications.js\n * region_tag:storage_list_bucket_notifications\n * Another example:\n */\n getNotifications(optionsOrCallback, callback) {\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = optionsOrCallback;\n }\n this.request({\n uri: '/notificationConfigs',\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const notifications = itemsArray.map((notification) => {\n const notificationInstance = this.notification(notification.id);\n notificationInstance.metadata = notification;\n return notificationInstance;\n });\n callback(null, notifications, resp);\n });\n }\n /**\n * @typedef {array} GetSignedUrlResponse\n * @property {object} 0 The signed URL.\n */\n /**\n * @callback GetSignedUrlCallback\n * @param {?Error} err Request error, if any.\n * @param {object} url The signed URL.\n */\n /**\n * @typedef {object} GetBucketSignedUrlConfig\n * @property {string} action Currently only supports \"list\" (HTTP: GET).\n * @property {*} expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * @property {string} [version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @property {string} [cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @property {object} [extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @property {object} [queryParams] Additional query parameters to include\n * in the signed URL.\n */\n /**\n * Get a signed URL to allow limited time access to a bucket.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed URL. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference}\n *\n * @throws {Error} if an expiration timestamp from the past is given.\n *\n * @param {GetBucketSignedUrlConfig} config Configuration object.\n * @param {string} config.action Currently only supports \"list\" (HTTP: GET).\n * @param {*} config.expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * @param {string} [config.version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @param {object} [config.extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @property {object} [config.queryParams] Additional query parameters to include\n * in the signed URL.\n * @param {GetSignedUrlCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * //-\n * // Generate a URL that allows temporary access to list files in a bucket.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'list',\n * expires: '03-17-2025'\n * };\n *\n * bucket.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The bucket is now available to be listed from this URL.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.getSignedUrl(config).then(function(data) {\n * const url = data[0];\n * });\n * ```\n */\n getSignedUrl(cfg, callback) {\n const method = BucketActionToHTTPMethod[cfg.action];\n const signConfig = {\n method,\n expires: cfg.expires,\n version: cfg.version,\n cname: cfg.cname,\n extensionHeaders: cfg.extensionHeaders || {},\n queryParams: cfg.queryParams || {},\n };\n if (!this.signer) {\n this.signer = new signer_js_1.URLSigner(this.storage.authClient, this);\n }\n this.signer\n .getSignedUrl(signConfig)\n .then(signedUrl => callback(null, signedUrl), callback);\n }\n /**\n * @callback BucketLockCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Lock a previously-defined retention policy. This will prevent changes to\n * the policy.\n *\n * @throws {Error} if a metageneration is not provided.\n *\n * @param {number|string} metageneration The bucket's metageneration. This is\n * accesssible from calling {@link File#getMetadata}.\n * @param {BucketLockCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const metageneration = 2;\n *\n * bucket.lock(metageneration, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.lock(metageneration).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n lock(metageneration, callback) {\n const metatype = typeof metageneration;\n if (metatype !== 'number' && metatype !== 'string') {\n throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED);\n }\n this.request({\n method: 'POST',\n uri: '/lockRetentionPolicy',\n qs: {\n ifMetagenerationMatch: metageneration,\n },\n }, callback);\n }\n /**\n * @typedef {array} MakeBucketPrivateResponse\n * @property {File[]} 0 List of files made private.\n */\n /**\n * @callback MakeBucketPrivateCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files List of files made private.\n */\n /**\n * @typedef {object} MakeBucketPrivateOptions\n * @property {boolean} [includeFiles=false] Make each file in the bucket\n * private.\n * @property {Metadata} [metadata] Define custom metadata properties to define\n * along with the operation.\n * @property {boolean} [force] Queue errors occurred while making files\n * private until all files have been processed.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Make the bucket listing private.\n *\n * You may also choose to make the contents of the bucket private by\n * specifying `includeFiles: true`. This will automatically run\n * {@link File#makePrivate} for every file in the bucket.\n *\n * When specifying `includeFiles: true`, use `force: true` to delay execution\n * of your callback until all files have been processed. By default, the\n * callback is executed after the first error. Use `force` to queue such\n * errors until all files have been processed, after which they will be\n * returned as an array as the first argument to your callback.\n *\n * NOTE: This may cause the process to be long-running and use a high number\n * of requests. Use with caution.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {MakeBucketPrivateOptions} [options] Configuration options.\n * @param {boolean} [options.includeFiles=false] Make each file in the bucket\n * private.\n * @param {Metadata} [options.metadata] Define custom metadata properties to define\n * along with the operation.\n * @param {boolean} [options.force] Queue errors occurred while making files\n * private until all files have been processed.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {MakeBucketPrivateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Make the bucket private.\n * //-\n * bucket.makePrivate(function(err) {});\n *\n * //-\n * // Make the bucket and its contents private.\n * //-\n * const opts = {\n * includeFiles: true\n * };\n *\n * bucket.makePrivate(opts, function(err, files) {\n * // `err`:\n * // The first error to occur, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made private in the bucket.\n * });\n *\n * //-\n * // Make the bucket and its contents private, using force to suppress errors\n * // until all files have been processed.\n * //-\n * const opts = {\n * includeFiles: true,\n * force: true\n * };\n *\n * bucket.makePrivate(opts, function(errors, files) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made private in the bucket.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.makePrivate(opts).then(function(data) {\n * const files = data[0];\n * });\n * ```\n */\n makePrivate(optionsOrCallback, callback) {\n var _a, _b, _c, _d;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n options.private = true;\n const query = {\n predefinedAcl: 'projectPrivate',\n };\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) {\n query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch;\n }\n if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) {\n query.ifGenerationNotMatch =\n options.preconditionOpts.ifGenerationNotMatch;\n }\n if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) {\n query.ifMetagenerationMatch =\n options.preconditionOpts.ifMetagenerationMatch;\n }\n if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) {\n query.ifMetagenerationNotMatch =\n options.preconditionOpts.ifMetagenerationNotMatch;\n }\n // You aren't allowed to set both predefinedAcl & acl properties on a bucket\n // so acl must explicitly be nullified.\n const metadata = { ...options.metadata, acl: null };\n this.setMetadata(metadata, query, (err) => {\n if (err) {\n callback(err);\n }\n const internalCall = () => {\n if (options.includeFiles) {\n return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options);\n }\n return Promise.resolve([]);\n };\n internalCall()\n .then(files => callback(null, files))\n .catch(callback);\n });\n }\n /**\n * @typedef {object} MakeBucketPublicOptions\n * @property {boolean} [includeFiles=false] Make each file in the bucket\n * private.\n * @property {boolean} [force] Queue errors occurred while making files\n * private until all files have been processed.\n */\n /**\n * @callback MakeBucketPublicCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files List of files made public.\n */\n /**\n * @typedef {array} MakeBucketPublicResponse\n * @property {File[]} 0 List of files made public.\n */\n /**\n * Make the bucket publicly readable.\n *\n * You may also choose to make the contents of the bucket publicly readable by\n * specifying `includeFiles: true`. This will automatically run\n * {@link File#makePublic} for every file in the bucket.\n *\n * When specifying `includeFiles: true`, use `force: true` to delay execution\n * of your callback until all files have been processed. By default, the\n * callback is executed after the first error. Use `force` to queue such\n * errors until all files have been processed, after which they will be\n * returned as an array as the first argument to your callback.\n *\n * NOTE: This may cause the process to be long-running and use a high number\n * of requests. Use with caution.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation}\n *\n * @param {MakeBucketPublicOptions} [options] Configuration options.\n * @param {boolean} [options.includeFiles=false] Make each file in the bucket\n * private.\n * @param {boolean} [options.force] Queue errors occurred while making files\n * private until all files have been processed.\n * @param {MakeBucketPublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Make the bucket publicly readable.\n * //-\n * bucket.makePublic(function(err) {});\n *\n * //-\n * // Make the bucket and its contents publicly readable.\n * //-\n * const opts = {\n * includeFiles: true\n * };\n *\n * bucket.makePublic(opts, function(err, files) {\n * // `err`:\n * // The first error to occur, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made public in the bucket.\n * });\n *\n * //-\n * // Make the bucket and its contents publicly readable, using force to\n * // suppress errors until all files have been processed.\n * //-\n * const opts = {\n * includeFiles: true,\n * force: true\n * };\n *\n * bucket.makePublic(opts, function(errors, files) {\n * // `errors`:\n * // Array of errors if any occurred, otherwise null.\n * //\n * // `files`:\n * // Array of files successfully made public in the bucket.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.makePublic(opts).then(function(data) {\n * const files = data[0];\n * });\n * ```\n */\n makePublic(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const req = { public: true, ...options };\n this.acl\n .add({\n entity: 'allUsers',\n role: 'READER',\n })\n .then(() => {\n return this.acl.default.add({\n entity: 'allUsers',\n role: 'READER',\n });\n })\n .then(() => {\n if (req.includeFiles) {\n return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req);\n }\n return [];\n })\n .then(files => callback(null, files), callback);\n }\n /**\n * Get a reference to a Cloud Pub/Sub Notification.\n *\n * @param {string} id ID of notification.\n * @returns {Notification}\n * @see Notification\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const notification = bucket.notification('1');\n * ```\n */\n notification(id) {\n if (!id) {\n throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID);\n }\n return new notification_js_1.Notification(this, id);\n }\n /**\n * Remove an already-existing retention policy from this bucket, if it is not\n * locked.\n *\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataOptions} [options] Options, including precondition options\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * bucket.removeRetentionPeriod(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.removeRetentionPeriod().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n removeRetentionPeriod(optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n retentionPolicy: null,\n }, options, callback);\n }\n /**\n * Makes request and applies userProject query parameter if necessary.\n *\n * @private\n *\n * @param {object} reqOpts - The request options.\n * @param {function} callback - The callback function.\n */\n request(reqOpts, callback) {\n if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) {\n reqOpts.qs = { ...reqOpts.qs, userProject: this.userProject };\n }\n return super.request(reqOpts, callback);\n }\n /**\n * @deprecated\n * @typedef {array} SetLabelsResponse\n * @property {object} 0 The bucket metadata.\n */\n /**\n * @deprecated\n * @callback SetLabelsCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The bucket metadata.\n */\n /**\n * @deprecated\n * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @deprecated Use setMetadata directly.\n * Set labels on the bucket.\n *\n * This makes an underlying call to {@link Bucket#setMetadata}, which\n * is a PATCH request. This means an individual label can be overwritten, but\n * unmentioned labels will not be touched.\n *\n * @param {object} labels Labels to set on the bucket.\n * @param {SetLabelsOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetLabelsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * const labels = {\n * labelone: 'labelonevalue',\n * labeltwo: 'labeltwovalue'\n * };\n *\n * bucket.setLabels(labels, function(err, metadata) {\n * if (!err) {\n * // Labels set successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setLabels(labels).then(function(data) {\n * const metadata = data[0];\n * });\n * ```\n */\n setLabels(labels, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || index_js_1.util.noop;\n this.setMetadata({ labels }, options, callback);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options);\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * Lock all objects contained in the bucket, based on their creation time. Any\n * attempt to overwrite or delete objects younger than the retention period\n * will result in a `PERMISSION_DENIED` error.\n *\n * An unlocked retention policy can be modified or removed from the bucket via\n * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A\n * locked retention policy cannot be removed or shortened in duration for the\n * lifetime of the bucket. Attempting to remove or decrease period of a locked\n * retention policy will result in a `PERMISSION_DENIED` error. You can still\n * increase the policy.\n *\n * @param {*} duration In seconds, the minimum retention time for all objects\n * contained in this bucket.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataCallback} [options] Options, including precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const DURATION_SECONDS = 15780000; // 6 months.\n *\n * //-\n * // Lock the objects in this bucket for 6 months.\n * //-\n * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setRetentionPeriod(duration, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n retentionPolicy: {\n retentionPeriod: duration.toString(),\n },\n }, options, callback);\n }\n /**\n *\n * @typedef {object} Cors\n * @property {number} [maxAgeSeconds] The number of seconds the browser is\n * allowed to make requests before it must repeat the preflight request.\n * @property {string[]} [method] HTTP method allowed for cross origin resource\n * sharing with this bucket.\n * @property {string[]} [origin] an origin allowed for cross origin resource\n * sharing with this bucket.\n * @property {string[]} [responseHeader] A header allowed for cross origin\n * resource sharing with this bucket.\n */\n /**\n * This can be used to set the CORS configuration on the bucket.\n *\n * The configuration will be overwritten with the value passed into this.\n *\n * @param {Cors[]} corsConfiguration The new CORS configuration to set\n * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is\n * allowed to make requests before it must repeat the preflight request.\n * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource\n * sharing with this bucket.\n * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource\n * sharing with this bucket.\n * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin\n * resource sharing with this bucket.\n * @param {SetBucketMetadataCallback} [callback] Callback function.\n * @param {SetBucketMetadataOptions} [options] Options, including precondition options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const bucket = storage.bucket('albums');\n *\n * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour\n * bucket.setCorsConfiguration(corsConfiguration);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setCorsConfiguration(corsConfiguration).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n this.setMetadata({\n cors: corsConfiguration,\n }, options, callback);\n }\n /**\n * @typedef {object} SetBucketStorageClassOptions\n * @property {string} [userProject] - The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback SetBucketStorageClassCallback\n * @param {?Error} err Request error, if any.\n */\n /**\n * Set the default storage class for new files in this bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} storageClass The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`).\n * **Note:** The storage classes `multi_regional`, `regional`, and\n * `durable_reduced_availability` are now legacy and will be deprecated in\n * the future.\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] - The ID of the project which will be\n * billed for the request.\n * @param {SetStorageClassCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.setStorageClass('nearline', function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // The storage class was updated successfully.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.setStorageClass('nearline').then(function() {});\n * ```\n */\n setStorageClass(storageClass, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n // In case we get input like `storageClass`, convert to `storage_class`.\n storageClass = storageClass\n .replace(/-/g, '_')\n .replace(/([a-z])([A-Z])/g, (_, low, up) => {\n return low + '_' + up;\n })\n .toUpperCase();\n this.setMetadata({ storageClass }, options, callback);\n }\n /**\n * Set a user project to be billed for all requests made from this Bucket\n * object and any files referenced from this Bucket object.\n *\n * @param {string} userProject The user project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * bucket.setUserProject('grape-spaceship-123');\n * ```\n */\n setUserProject(userProject) {\n this.userProject = userProject;\n const methods = [\n 'create',\n 'delete',\n 'exists',\n 'get',\n 'getMetadata',\n 'setMetadata',\n ];\n methods.forEach(method => {\n const methodConfig = this.methods[method];\n if (typeof methodConfig === 'object') {\n if (typeof methodConfig.reqOpts === 'object') {\n Object.assign(methodConfig.reqOpts.qs, { userProject });\n }\n else {\n methodConfig.reqOpts = {\n qs: { userProject },\n };\n }\n }\n });\n }\n /**\n * @typedef {object} UploadOptions Configuration options for Bucket#upload().\n * @property {string|File} [destination] The place to save\n * your file. If given a string, the file will be uploaded to the bucket\n * using the string as a filename. When given a File object, your local\n * file will be uploaded to the File object's bucket and under the File\n * object's name. Lastly, when this argument is omitted, the file is uploaded\n * to your bucket using the name of the local file.\n * @property {string} [encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @property {boolean} [gzip] Automatically gzip the file. This will set\n * `options.metadata.contentEncoding` to `gzip`.\n * @property {string} [kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * @property {object} [metadata] See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}.\n * @property {string} [offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {boolean} [resumable=true] Resumable uploads are automatically\n * enabled and must be shut off explicitly by setting to false.\n * @property {number} [timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @property {string} [uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with an\n * MD5 checksum for maximum reliability. CRC32c will provide better\n * performance with less reliability. You may also choose to skip\n * validation completely, however this is **not recommended**.\n */\n /**\n * @typedef {array} UploadResponse\n * @property {object} 0 The uploaded {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback UploadCallback\n * @param {?Error} err Request error, if any.\n * @param {object} file The uploaded {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Upload a file to the bucket. This is a convenience method that wraps\n * {@link File#createWriteStream}.\n *\n * Resumable uploads are enabled by default\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation}\n *\n * @param {string} pathString The fully qualified path to the file you\n * wish to upload to your bucket.\n * @param {UploadOptions} [options] Configuration options.\n * @param {string|File} [options.destination] The place to save\n * your file. If given a string, the file will be uploaded to the bucket\n * using the string as a filename. When given a File object, your local\n * file will be uploaded to the File object's bucket and under the File\n * object's name. Lastly, when this argument is omitted, the file is uploaded\n * to your bucket using the name of the local file.\n * @param {string} [options.encryptionKey] A custom encryption key. See\n * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}.\n * @param {boolean} [options.gzip] Automatically gzip the file. This will set\n * `options.metadata.contentEncoding` to `gzip`.\n * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will\n * be used to encrypt the object. Must be in the format:\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.\n * @param {object} [options.metadata] See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}.\n * @param {string} [options.offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @param {string} [options.predefinedAcl] Apply a predefined set of access\n * controls to this object.\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @param {boolean} [options.private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @param {boolean} [options.public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @param {boolean} [options.resumable=true] Resumable uploads are automatically\n * enabled and must be shut off explicitly by setting to false.\n * @param {number} [options.timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @param {string} [options.uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {string|boolean} [options.validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with an\n * MD5 checksum for maximum reliability. CRC32c will provide better\n * performance with less reliability. You may also choose to skip\n * validation completely, however this is **not recommended**.\n * @param {UploadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n *\n * //-\n * // Upload a file from a local path.\n * //-\n * bucket.upload('/local/path/image.png', function(err, file, apiResponse) {\n * // Your bucket now contains:\n * // - \"image.png\" (with the contents of `/local/path/image.png')\n *\n * // `file` is an instance of a File object that refers to your new file.\n * });\n *\n *\n * //-\n * // It's not always that easy. You will likely want to specify the filename\n * // used when your new file lands in your bucket.\n * //\n * // You may also want to set metadata or customize other options.\n * //-\n * const options = {\n * destination: 'new-image.png',\n * validation: 'crc32c',\n * metadata: {\n * metadata: {\n * event: 'Fall trip to the zoo'\n * }\n * }\n * };\n *\n * bucket.upload('local-image.png', options, function(err, file) {\n * // Your bucket now contains:\n * // - \"new-image.png\" (with the contents of `local-image.png')\n *\n * // `file` is an instance of a File object that refers to your new file.\n * });\n *\n * //-\n * // You can also have a file gzip'd on the fly.\n * //-\n * bucket.upload('index.html', { gzip: true }, function(err, file) {\n * // Your bucket now contains:\n * // - \"index.html\" (automatically compressed with gzip)\n *\n * // Downloading the file with `file.download` will automatically decode\n * the\n * // file.\n * });\n *\n * //-\n * // You may also re-use a File object, {File}, that references\n * // the file you wish to create or overwrite.\n * //-\n * const options = {\n * destination: bucket.file('existing-file.png'),\n * resumable: false\n * };\n *\n * bucket.upload('local-img.png', options, function(err, newFile) {\n * // Your bucket now contains:\n * // - \"existing-file.png\" (with the contents of `local-img.png')\n *\n * // Note:\n * // The `newFile` parameter is equal to `file`.\n * });\n *\n * //-\n * // To use\n * // \n * // Customer-supplied Encryption Keys, provide the `encryptionKey`\n * option.\n * //-\n * const crypto = require('crypto');\n * const encryptionKey = crypto.randomBytes(32);\n *\n * bucket.upload('img.png', {\n * encryptionKey: encryptionKey\n * }, function(err, newFile) {\n * // `img.png` was uploaded with your custom encryption key.\n *\n * // `newFile` is already configured to use the encryption key when making\n * // operations on the remote object.\n *\n * // However, to use your encryption key later, you must create a `File`\n * // instance with the `key` supplied:\n * const file = bucket.file('img.png', {\n * encryptionKey: encryptionKey\n * });\n *\n * // Or with `file#setEncryptionKey`:\n * const file = bucket.file('img.png');\n * file.setEncryptionKey(encryptionKey);\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.upload('local-image.png').then(function(data) {\n * const file = data[0];\n * });\n *\n * To upload a file from a URL, use {@link File#createWriteStream}.\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_upload_file\n * Another example:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_upload_encrypted_file\n * Example of uploading an encrypted file:\n */\n upload(pathString, optionsOrCallback, callback) {\n var _a, _b;\n const upload = (numberOfRetries) => {\n const returnValue = (0, async_retry_1.default)(async (bail) => {\n await new Promise((resolve, reject) => {\n var _a, _b;\n if (numberOfRetries === 0 &&\n ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) {\n newFile.storage.retryOptions.autoRetry = false;\n }\n const writable = newFile.createWriteStream(options);\n if (options.onUploadProgress) {\n writable.on('progress', options.onUploadProgress);\n }\n fs.createReadStream(pathString)\n .on('error', bail)\n .pipe(writable)\n .on('error', err => {\n if (this.storage.retryOptions.autoRetry &&\n this.storage.retryOptions.retryableErrorFn(err)) {\n return reject(err);\n }\n else {\n return bail(err);\n }\n })\n .on('finish', () => {\n return resolve();\n });\n });\n }, {\n retries: numberOfRetries,\n factor: this.storage.retryOptions.retryDelayMultiplier,\n maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds\n maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n if (!callback) {\n return returnValue;\n }\n else {\n return returnValue\n .then(() => {\n if (callback) {\n return callback(null, newFile, newFile.metadata);\n }\n })\n .catch(callback);\n }\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n if (global['GCLOUD_SANDBOX_ENV']) {\n return;\n }\n let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n options = Object.assign({\n metadata: {},\n }, options);\n // Do not retry if precondition option ifGenerationMatch is not set\n // because this is a file operation\n let maxRetries = this.storage.retryOptions.maxRetries;\n if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n maxRetries = 0;\n }\n let newFile;\n if (options.destination instanceof file_js_1.File) {\n newFile = options.destination;\n }\n else if (options.destination !== null &&\n typeof options.destination === 'string') {\n // Use the string as the name of the file.\n newFile = this.file(options.destination, {\n encryptionKey: options.encryptionKey,\n kmsKeyName: options.kmsKeyName,\n preconditionOpts: this.instancePreconditionOpts,\n });\n }\n else {\n // Resort to using the name of the incoming file.\n const destination = path.basename(pathString);\n newFile = this.file(destination, {\n encryptionKey: options.encryptionKey,\n kmsKeyName: options.kmsKeyName,\n preconditionOpts: this.instancePreconditionOpts,\n });\n }\n upload(maxRetries);\n }\n /**\n * @private\n *\n * @typedef {object} MakeAllFilesPublicPrivateOptions\n * @property {boolean} [force] Suppress errors until all files have been\n * processed.\n * @property {boolean} [private] Make files private.\n * @property {boolean} [public] Make files public.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @private\n *\n * @callback SetBucketMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {File[]} files Files that were updated.\n */\n /**\n * @typedef {array} MakeAllFilesPublicPrivateResponse\n * @property {File[]} 0 List of files affected.\n */\n /**\n * Iterate over all of a bucket's files, calling `file.makePublic()` (public)\n * or `file.makePrivate()` (private) on each.\n *\n * Operations are performed in parallel, up to 10 at once. The first error\n * breaks the loop, and will execute the provided callback with it. Specify\n * `{ force: true }` to suppress the errors.\n *\n * @private\n *\n * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options.\n * @param {boolean} [options.force] Suppress errors until all files have been\n * processed.\n * @param {boolean} [options.private] Make files private.\n * @param {boolean} [options.public] Make files public.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n \n * @param {MakeAllFilesPublicPrivateCallback} callback Callback function.\n *\n * @return {Promise}\n */\n makeAllFilesPublicPrivate_(optionsOrCallback, callback) {\n const MAX_PARALLEL_LIMIT = 10;\n const errors = [];\n const updatedFiles = [];\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const processFile = async (file) => {\n try {\n await (options.public ? file.makePublic() : file.makePrivate(options));\n updatedFiles.push(file);\n }\n catch (e) {\n if (!options.force) {\n throw e;\n }\n errors.push(e);\n }\n };\n this.getFiles(options)\n .then(([files]) => {\n const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT);\n const promises = files.map(file => {\n return limit(() => processFile(file));\n });\n return Promise.all(promises);\n })\n .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles));\n }\n getId() {\n return this.id;\n }\n disableAutoRetryConditionallyIdempotent_(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n coreOpts, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n methodType, localPreconditionOptions) {\n var _a, _b;\n if (typeof coreOpts === 'object' &&\n ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined &&\n (methodType === AvailableServiceObjectMethods.setMetadata ||\n methodType === AvailableServiceObjectMethods.delete) &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) {\n this.storage.retryOptions.autoRetry = false;\n }\n else if (this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n }\n}\nexports.Bucket = Bucket;\n/*! Developer Documentation\n *\n * These methods can be auto-paginated.\n */\npaginator_1.paginator.extend(Bucket, 'getFiles');\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Bucket, {\n exclude: ['cloudStorageURI', 'request', 'file', 'notification'],\n});\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Channel = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * Create a channel object to interact with a Cloud Storage channel.\n *\n * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification}\n *\n * @class\n *\n * @param {string} id The ID of the channel.\n * @param {string} resourceId The resource ID of the channel.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * ```\n */\nclass Channel extends index_js_1.ServiceObject {\n constructor(storage, id, resourceId) {\n const config = {\n parent: storage,\n baseUrl: '/channels',\n // An ID shouldn't be included in the API requests.\n // RE:\n // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145\n id: '',\n methods: {\n // Only need `request`.\n },\n };\n super(config);\n this.metadata.id = id;\n this.metadata.resourceId = resourceId;\n }\n /**\n * @typedef {array} StopResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback StopCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Stop this channel.\n *\n * @param {StopCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * channel.stop(function(err, apiResponse) {\n * if (!err) {\n * // Channel stopped successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * channel.stop().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n stop(callback) {\n callback = callback || index_js_1.util.noop;\n this.request({\n method: 'POST',\n uri: '/stop',\n json: this.metadata,\n }, (err, apiResponse) => {\n callback(err, apiResponse);\n });\n }\n}\nexports.Channel = Channel;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Channel);\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _CRC32C_crc32c;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0;\nconst fs_1 = require(\"fs\");\n/**\n * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c}\n */\nconst CRC32C_EXTENSIONS = [\n 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c,\n 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b,\n 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c,\n 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384,\n 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc,\n 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a,\n 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512,\n 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa,\n 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad,\n 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a,\n 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf,\n 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957,\n 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f,\n 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927,\n 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f,\n 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7,\n 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e,\n 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859,\n 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e,\n 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6,\n 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de,\n 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c,\n 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4,\n 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c,\n 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b,\n 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c,\n 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5,\n 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d,\n 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975,\n 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d,\n 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905,\n 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed,\n 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8,\n 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff,\n 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8,\n 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540,\n 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78,\n 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee,\n 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6,\n 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e,\n 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69,\n 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,\n 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351,\n];\nexports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS;\nconst CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS);\nexports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE;\nconst CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C();\nexports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR;\nconst CRC32C_EXCEPTION_MESSAGES = {\n INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`,\n INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`,\n INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`,\n};\nexports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES;\nclass CRC32C {\n /**\n * Constructs a new `CRC32C` object.\n *\n * Reconstruction is recommended via the `CRC32C.from` static method.\n *\n * @param initialValue An initial CRC32C value - a signed 32-bit integer.\n */\n constructor(initialValue = 0) {\n /** Current CRC32C value */\n _CRC32C_crc32c.set(this, 0);\n __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, \"f\");\n }\n /**\n * Calculates a CRC32C from a provided buffer.\n *\n * Implementation inspired from:\n * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c}\n * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c}\n * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage}\n *\n * @param data The `Buffer` to generate the CRC32C from\n */\n update(data) {\n let current = __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\") ^ 0xffffffff;\n for (const d of data) {\n const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff];\n current = tablePoly ^ (current >>> 8);\n }\n __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, \"f\");\n }\n /**\n * Validates a provided input to the current CRC32C value.\n *\n * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer\n */\n validate(input) {\n if (typeof input === 'number') {\n return input === __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\");\n }\n else if (typeof input === 'string') {\n return input === this.toString();\n }\n else if (Buffer.isBuffer(input)) {\n return Buffer.compare(input, this.toBuffer()) === 0;\n }\n else {\n // `CRC32C`-like object\n return input.toString() === this.toString();\n }\n }\n /**\n * Returns a `Buffer` representation of the CRC32C value\n */\n toBuffer() {\n const buffer = Buffer.alloc(4);\n buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, \"f\"));\n return buffer;\n }\n /**\n * Returns a JSON-compatible, base64-encoded representation of the CRC32C value.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`}\n */\n toJSON() {\n return this.toString();\n }\n /**\n * Returns a base64-encoded representation of the CRC32C value.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`}\n */\n toString() {\n return this.toBuffer().toString('base64');\n }\n /**\n * Returns the `number` representation of the CRC32C value as a signed 32-bit integer\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`}\n */\n valueOf() {\n return __classPrivateFieldGet(this, _CRC32C_crc32c, \"f\");\n }\n /**\n * Generates a `CRC32C` from a compatible buffer format.\n *\n * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`\n */\n static fromBuffer(value) {\n let buffer;\n if (Buffer.isBuffer(value)) {\n buffer = value;\n }\n else if ('buffer' in value) {\n // `ArrayBufferView`\n buffer = Buffer.from(value.buffer);\n }\n else {\n // `ArrayBuffer`\n buffer = Buffer.from(value);\n }\n if (buffer.byteLength !== 4) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength));\n }\n return new CRC32C(buffer.readInt32BE());\n }\n static async fromFile(file) {\n const crc32c = new CRC32C();\n await new Promise((resolve, reject) => {\n (0, fs_1.createReadStream)(file)\n .on('data', (d) => crc32c.update(d))\n .on('end', resolve)\n .on('error', reject);\n });\n return crc32c;\n }\n /**\n * Generates a `CRC32C` from 4-byte base64-encoded data (string).\n *\n * @param value 4-byte base64-encoded data (string)\n */\n static fromString(value) {\n const buffer = Buffer.from(value, 'base64');\n if (buffer.byteLength !== 4) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength));\n }\n return this.fromBuffer(buffer);\n }\n /**\n * Generates a `CRC32C` from a safe, unsigned 32-bit integer.\n *\n * @param value an unsigned 32-bit integer\n */\n static fromNumber(value) {\n if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) {\n throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value));\n }\n return new CRC32C(value);\n }\n /**\n * Generates a `CRC32C` from a variety of compatable types.\n * Note: strings are treated as input, not as file paths to read from.\n *\n * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string)\n */\n static from(value) {\n if (typeof value === 'number') {\n return this.fromNumber(value);\n }\n else if (typeof value === 'string') {\n return this.fromString(value);\n }\n else if ('byteLength' in value) {\n // `ArrayBuffer` | `Buffer` | `ArrayBufferView`\n return this.fromBuffer(value);\n }\n else {\n // `CRC32CValidator`/`CRC32C`-like\n return this.fromString(value.toString());\n }\n }\n}\nexports.CRC32C = CRC32C;\n_CRC32C_crc32c = new WeakMap();\nCRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS;\nCRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _File_instances, _File_validateIntegrity;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst compressible_1 = __importDefault(require(\"compressible\"));\nconst crypto = __importStar(require(\"crypto\"));\nconst fs = __importStar(require(\"fs\"));\nconst mime_1 = __importDefault(require(\"mime\"));\nconst resumableUpload = __importStar(require(\"./resumable-upload.js\"));\nconst stream_1 = require(\"stream\");\nconst zlib = __importStar(require(\"zlib\"));\nconst storage_js_1 = require(\"./storage.js\");\nconst bucket_js_1 = require(\"./bucket.js\");\nconst acl_js_1 = require(\"./acl.js\");\nconst signer_js_1 = require(\"./signer.js\");\nconst util_js_1 = require(\"./nodejs-common/util.js\");\nconst duplexify_1 = __importDefault(require(\"duplexify\"));\nconst util_js_2 = require(\"./util.js\");\nconst crc32c_js_1 = require(\"./crc32c.js\");\nconst hash_stream_validator_js_1 = require(\"./hash-stream-validator.js\");\nconst async_retry_1 = __importDefault(require(\"async-retry\"));\nvar ActionToHTTPMethod;\n(function (ActionToHTTPMethod) {\n ActionToHTTPMethod[\"read\"] = \"GET\";\n ActionToHTTPMethod[\"write\"] = \"PUT\";\n ActionToHTTPMethod[\"delete\"] = \"DELETE\";\n ActionToHTTPMethod[\"resumable\"] = \"POST\";\n})(ActionToHTTPMethod || (exports.ActionToHTTPMethod = ActionToHTTPMethod = {}));\n/**\n * @private\n */\nexports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com';\n/**\n * @private\n */\nconst GS_URL_REGEXP = /^gs:\\/\\/([a-z0-9_.-]+)\\/(.+)$/;\nclass RequestError extends Error {\n}\nexports.RequestError = RequestError;\nconst SEVEN_DAYS = 7 * 24 * 60 * 60;\nvar FileExceptionMessages;\n(function (FileExceptionMessages) {\n FileExceptionMessages[\"EXPIRATION_TIME_NA\"] = \"An expiration time is not available.\";\n FileExceptionMessages[\"DESTINATION_NO_NAME\"] = \"Destination file should have a name.\";\n FileExceptionMessages[\"INVALID_VALIDATION_FILE_RANGE\"] = \"Cannot use validation with file ranges (start/end).\";\n FileExceptionMessages[\"MD5_NOT_AVAILABLE\"] = \"MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects.\";\n FileExceptionMessages[\"EQUALS_CONDITION_TWO_ELEMENTS\"] = \"Equals condition must be an array of 2 elements.\";\n FileExceptionMessages[\"STARTS_WITH_TWO_ELEMENTS\"] = \"StartsWith condition must be an array of 2 elements.\";\n FileExceptionMessages[\"CONTENT_LENGTH_RANGE_MIN_MAX\"] = \"ContentLengthRange must have numeric min & max fields.\";\n FileExceptionMessages[\"DOWNLOAD_MISMATCH\"] = \"The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again.\";\n FileExceptionMessages[\"UPLOAD_MISMATCH_DELETE_FAIL\"] = \"The uploaded data did not match the data from the server.\\n As a precaution, we attempted to delete the file, but it was not successful.\\n To be sure the content is the same, you should try removing the file manually,\\n then uploading the file again.\\n \\n\\nThe delete attempt failed with this message:\\n\\n \";\n FileExceptionMessages[\"UPLOAD_MISMATCH\"] = \"The uploaded data did not match the data from the server.\\n As a precaution, the file has been deleted.\\n To be sure the content is the same, you should try uploading the file again.\";\n FileExceptionMessages[\"MD5_RESUMED_UPLOAD\"] = \"MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value\";\n FileExceptionMessages[\"MISSING_RESUME_CRC32C_FINAL_UPLOAD\"] = \"The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`.\";\n})(FileExceptionMessages || (exports.FileExceptionMessages = FileExceptionMessages = {}));\n/**\n * A File object is created from your {@link Bucket} object using\n * {@link Bucket#file}.\n *\n * @class\n */\nclass File extends index_js_1.ServiceObject {\n /**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * An ACL consists of one or more entries, where each entry grants permissions\n * to an entity. Permissions define the actions that can be performed against\n * an object or bucket (for example, `READ` or `WRITE`); the entity defines\n * who the permission applies to (for example, a specific user or group of\n * users).\n *\n * The `acl` object on a File instance provides methods to get you a list of\n * the ACLs defined on your bucket, as well as set, update, and delete them.\n *\n * See {@link http://goo.gl/6qBBPO| About Access Control lists}\n *\n * @name File#acl\n * @mixes Acl\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * //-\n * // Make a file publicly readable.\n * //-\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * file.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n /**\n * The API-formatted resource description of the file.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name File#metadata\n * @type {object}\n */\n /**\n * The file's name.\n * @name File#name\n * @type {string}\n */\n /**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n /**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n /**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n /**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n /**\n * @callback Crc32cGeneratorCallback\n * @returns {CRC32CValidator}\n */\n /**\n * @typedef {object} FileOptions Options passed to the File constructor.\n * @property {string} [encryptionKey] A custom encryption key.\n * @property {number} [generation] Generation to scope the file to.\n * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this\n * object, if the object is encrypted by such a key. Limited availability;\n * usable only by enabled projects.\n * @property {string} [userProject] The ID of the project which will be\n * billed for all requests made from File object.\n * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n */\n /**\n * Constructs a file object.\n *\n * @param {Bucket} bucket The Bucket instance this file is\n * attached to.\n * @param {string} name The name of the remote file.\n * @param {FileOptions} [options] Configuration options.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * ```\n */\n constructor(bucket, name, options = {}) {\n var _a, _b;\n const requestQueryObject = {};\n let generation;\n if (options.generation !== null) {\n if (typeof options.generation === 'string') {\n generation = Number(options.generation);\n }\n else {\n generation = options.generation;\n }\n if (!isNaN(generation)) {\n requestQueryObject.generation = generation;\n }\n }\n Object.assign(requestQueryObject, options.preconditionOpts);\n const userProject = options.userProject || bucket.userProject;\n if (typeof userProject === 'string') {\n requestQueryObject.userProject = userProject;\n }\n const methods = {\n /**\n * @typedef {array} DeleteFileResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteFileCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Delete the file.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation}\n *\n * @method File#delete\n * @param {object} [options] Configuration options.\n * @param {boolean} [options.ignoreNotFound = false] Ignore an error if\n * the file does not exist.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteFileCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * file.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_delete_file\n * Another example:\n */\n delete: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} FileExistsResponse\n * @property {boolean} 0 Whether the {@link File} exists.\n */\n /**\n * @callback FileExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the {@link File} exists.\n */\n /**\n * Check if the file exists.\n *\n * @method File#exists\n * @param {options} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {FileExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetFileResponse\n * @property {File} 0 The {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetFileCallback\n * @param {?Error} err Request error, if any.\n * @param {File} file The {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get a file object and its metadata if it exists.\n *\n * @method File#get\n * @param {options} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetFileCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.get(function(err, file, apiResponse) {\n * // file.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.get().then(function(data) {\n * const file = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} GetFileMetadataResponse\n * @property {object} 0 The {@link File} metadata.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback GetFileMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} metadata The {@link File} metadata.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get the file's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation}\n *\n * @method File#getMetadata\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetFileMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_get_metadata\n * Another example:\n */\n getMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata().\n * @param {string} [userProject] The ID of the project which will be billed for the request.\n */\n /**\n * @callback SetFileMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} SetFileMetadataResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Merge the given metadata with the current remote file's metadata. This\n * will set metadata if it was previously unset or update previously set\n * metadata. To unset previously set metadata, set its value to null.\n *\n * You can set custom key/value pairs in the metadata key of the given\n * object, however the other properties outside of this object must adhere\n * to the {@link https://goo.gl/BOnnCK| official API documentation}.\n *\n *\n * See the examples below for more information.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation}\n *\n * @method File#setMetadata\n * @param {object} [metadata] The metadata you wish to update.\n * @param {SetFileMetadataOptions} [options] Configuration options.\n * @param {SetFileMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * const metadata = {\n * contentType: 'application/x-font-ttf',\n * metadata: {\n * my: 'custom',\n * properties: 'go here'\n * }\n * };\n *\n * file.setMetadata(metadata, function(err, apiResponse) {});\n *\n * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' }\n * file.setMetadata({\n * metadata: {\n * abc: '123', // will be set.\n * unsetMe: null, // will be unset (deleted).\n * hello: 'goodbye' // will be updated from 'world' to 'goodbye'.\n * }\n * }, function(err, apiResponse) {\n * // metadata should now be { abc: '123', hello: 'goodbye' }\n * });\n *\n * //-\n * // Set a temporary hold on this file from its bucket's retention period\n * // configuration.\n * //\n * file.setMetadata({\n * temporaryHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // Alternatively, you may set a temporary hold. This will follow the\n * // same behavior as an event-based hold, with the exception that the\n * // bucket's retention policy will not renew for this file from the time\n * // the hold is released.\n * //-\n * file.setMetadata({\n * eventBasedHold: true\n * }, function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.setMetadata(metadata).then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n };\n super({\n parent: bucket,\n baseUrl: '/o',\n id: encodeURIComponent(name),\n methods,\n });\n _File_instances.add(this);\n this.bucket = bucket;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this.storage = bucket.parent;\n // @TODO Can this duplicate code from above be avoided?\n if (options.generation !== null) {\n let generation;\n if (typeof options.generation === 'string') {\n generation = Number(options.generation);\n }\n else {\n generation = options.generation;\n }\n if (!isNaN(generation)) {\n this.generation = generation;\n }\n }\n this.kmsKeyName = options.kmsKeyName;\n this.userProject = userProject;\n this.name = name;\n if (options.encryptionKey) {\n this.setEncryptionKey(options.encryptionKey);\n }\n this.acl = new acl_js_1.Acl({\n request: this.request.bind(this),\n pathPrefix: '/acl',\n });\n this.crc32cGenerator =\n options.crc32cGenerator || this.bucket.crc32cGenerator;\n this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry;\n this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts;\n }\n /**\n * The object's Cloud Storage URI (`gs://`)\n *\n * @example\n * ```ts\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('image.png');\n *\n * // `gs://my-bucket/image.png`\n * const href = file.cloudStorageURI.href;\n * ```\n */\n get cloudStorageURI() {\n const uri = this.bucket.cloudStorageURI;\n uri.pathname = this.name;\n return uri;\n }\n /**\n * A helper method for determining if a request should be retried based on preconditions.\n * This should only be used for methods where the idempotency is determined by\n * `ifGenerationMatch`\n * @private\n *\n * A request should not be retried under the following conditions:\n * - if precondition option `ifGenerationMatch` is not set OR\n * - if `idempotencyStrategy` is set to `RetryNever`\n */\n shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) {\n var _a;\n return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined &&\n ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever);\n }\n /**\n * @typedef {array} CopyResponse\n * @property {File} 0 The copied {@link File}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CopyCallback\n * @param {?Error} err Request error, if any.\n * @param {File} copiedFile The copied {@link File}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} CopyOptions Configuration options for File#copy(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @property {string} [cacheControl] The cacheControl setting for the new file.\n * @property {string} [contentEncoding] The contentEncoding setting for the new file.\n * @property {string} [contentType] The contentType setting for the new file.\n * @property {string} [destinationKmsKeyName] Resource name of the Cloud\n * KMS key, of the form\n * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,\n * that will be used to encrypt the object. Overwrites the object\n * metadata's `kms_key_name` value, if any.\n * @property {Metadata} [metadata] Metadata to specify on the copied file.\n * @property {string} [predefinedAcl] Set the ACL for the new file.\n * @property {string} [token] A previously-returned `rewriteToken` from an\n * unfinished rewrite request.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Copy this file to another file. By default, this will copy the file to the\n * same bucket, but you can choose to copy it to another Bucket by providing\n * a Bucket or File object or a URL starting with \"gs://\".\n * The generation of the file will not be preserved.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation}\n *\n * @throws {Error} If the destination file is not provided.\n *\n * @param {string|Bucket|File} destination Destination file.\n * @param {CopyOptions} [options] Configuration options. See an\n * @param {CopyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // You can pass in a variety of types for the destination.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // If you pass in a string for the destination, the file is copied to its\n * // current bucket, under the new name provided.\n * //-\n * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) {\n * // `my-bucket` now contains:\n * // - \"my-image.png\"\n * // - \"my-image-copy.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a string starting with \"gs://\" for the destination, the\n * // file is copied to the other bucket and under the new name provided.\n * //-\n * const newLocation = 'gs://another-bucket/my-image-copy.png';\n * file.copy(newLocation, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image-copy.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a Bucket object, the file will be copied to that bucket\n * // using the same name.\n * //-\n * const anotherBucket = storage.bucket('another-bucket');\n * file.copy(anotherBucket, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image.png\"\n *\n * // `copiedFile` is an instance of a File object that refers to your new\n * // file.\n * });\n *\n * //-\n * // If you pass in a File object, you have complete control over the new\n * // bucket and filename.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n * file.copy(anotherFile, function(err, copiedFile, apiResponse) {\n * // `my-bucket` still contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-awesome-image.png\"\n *\n * // Note:\n * // The `copiedFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.copy(newLocation).then(function(data) {\n * const newFile = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_copy_file\n * Another example:\n */\n copy(destination, optionsOrCallback, callback) {\n var _a, _b;\n const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME);\n if (!destination) {\n throw noDestinationError;\n }\n let options = {};\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n else if (optionsOrCallback) {\n options = { ...optionsOrCallback };\n }\n callback = callback || index_js_1.util.noop;\n let destBucket;\n let destName;\n let newFile;\n if (typeof destination === 'string') {\n const parsedDestination = GS_URL_REGEXP.exec(destination);\n if (parsedDestination !== null && parsedDestination.length === 3) {\n destBucket = this.storage.bucket(parsedDestination[1]);\n destName = parsedDestination[2];\n }\n else {\n destBucket = this.bucket;\n destName = destination;\n }\n }\n else if (destination instanceof bucket_js_1.Bucket) {\n destBucket = destination;\n destName = this.name;\n }\n else if (destination instanceof File) {\n destBucket = destination.bucket;\n destName = destination.name;\n newFile = destination;\n }\n else {\n throw noDestinationError;\n }\n const query = {};\n if (this.generation !== undefined) {\n query.sourceGeneration = this.generation;\n }\n if (options.token !== undefined) {\n query.rewriteToken = options.token;\n }\n if (options.userProject !== undefined) {\n query.userProject = options.userProject;\n delete options.userProject;\n }\n if (options.predefinedAcl !== undefined) {\n query.destinationPredefinedAcl = options.predefinedAcl;\n delete options.predefinedAcl;\n }\n newFile = newFile || destBucket.file(destName);\n const headers = {};\n if (this.encryptionKey !== undefined) {\n headers['x-goog-copy-source-encryption-algorithm'] = 'AES256';\n headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64;\n headers['x-goog-copy-source-encryption-key-sha256'] =\n this.encryptionKeyHash;\n }\n if (newFile.encryptionKey !== undefined) {\n this.setEncryptionKey(newFile.encryptionKey);\n }\n else if (options.destinationKmsKeyName !== undefined) {\n query.destinationKmsKeyName = options.destinationKmsKeyName;\n delete options.destinationKmsKeyName;\n }\n else if (newFile.kmsKeyName !== undefined) {\n query.destinationKmsKeyName = newFile.kmsKeyName;\n }\n if (query.destinationKmsKeyName) {\n this.kmsKeyName = query.destinationKmsKeyName;\n const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor);\n if (keyIndex > -1) {\n this.interceptors.splice(keyIndex, 1);\n }\n }\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {\n this.storage.retryOptions.autoRetry = false;\n }\n if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) {\n query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch;\n delete options.preconditionOpts;\n }\n this.request({\n method: 'POST',\n uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`,\n qs: query,\n json: options,\n headers,\n }, (err, resp) => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n if (err) {\n callback(err, null, resp);\n return;\n }\n if (resp.rewriteToken) {\n const options = {\n token: resp.rewriteToken,\n };\n if (query.userProject) {\n options.userProject = query.userProject;\n }\n if (query.destinationKmsKeyName) {\n options.destinationKmsKeyName = query.destinationKmsKeyName;\n }\n this.copy(newFile, options, callback);\n return;\n }\n callback(null, newFile, resp);\n });\n }\n /**\n * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with a\n * CRC32c checksum. You may use MD5 if preferred, but that hash is not\n * supported for composite objects. An error will be raised if MD5 is\n * specified but is not available. You may also choose to skip validation\n * completely, however this is **not recommended**.\n * @property {number} [start] A byte offset to begin the file's download\n * from. Default is 0. NOTE: Byte ranges are inclusive; that is,\n * `options.start = 0` and `options.end = 999` represent the first 1000\n * bytes in a file or object. NOTE: when specifying a byte range, data\n * integrity is not available.\n * @property {number} [end] A byte offset to stop reading the file at.\n * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and\n * `options.end = 999` represent the first 1000 bytes in a file or object.\n * NOTE: when specifying a byte range, data integrity is not available.\n * @property {boolean} [decompress=true] Disable auto decompression of the\n * received data. By default this option is set to `true`.\n * Applicable in cases where the data was uploaded with\n * `gzip: true` option. See {@link File#createWriteStream}.\n */\n /**\n * Create a readable stream to read the contents of the remote file. It can be\n * piped to a writable stream or listened to for 'data' events to read a\n * file's contents.\n *\n * In the unlikely event there is a mismatch between what you downloaded and\n * the version in your Bucket, your error handler will receive an error with\n * code \"CONTENT_DOWNLOAD_MISMATCH\". If you receive this error, the best\n * recourse is to try downloading the file again.\n *\n * NOTE: Readable streams will emit the `end` event when the file is fully\n * downloaded.\n *\n * @param {CreateReadStreamOptions} [options] Configuration options.\n * @returns {ReadableStream}\n *\n * @example\n * ```\n * //-\n * //

Downloading a File

\n * //\n * // The example below demonstrates how we can reference a remote file, then\n * // pipe its contents to a local file. This is effectively creating a local\n * // backup of your remote data.\n * //-\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * const fs = require('fs');\n * const remoteFile = bucket.file('image.png');\n * const localFilename = '/Users/stephen/Photos/image.png';\n *\n * remoteFile.createReadStream()\n * .on('error', function(err) {})\n * .on('response', function(response) {\n * // Server connected and responded with the specified status and headers.\n * })\n * .on('end', function() {\n * // The file is fully downloaded.\n * })\n * .pipe(fs.createWriteStream(localFilename));\n *\n * //-\n * // To limit the downloaded data to only a byte range, pass an options\n * // object.\n * //-\n * const logFile = myBucket.file('access_log');\n * logFile.createReadStream({\n * start: 10000,\n * end: 20000\n * })\n * .on('error', function(err) {})\n * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));\n *\n * //-\n * // To read a tail byte range, specify only `options.end` as a negative\n * // number.\n * //-\n * const logFile = myBucket.file('access_log');\n * logFile.createReadStream({\n * end: -100\n * })\n * .on('error', function(err) {})\n * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));\n * ```\n */\n createReadStream(options = {}) {\n options = Object.assign({ decompress: true }, options);\n const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number';\n const tailRequest = options.end < 0;\n let validateStream = undefined;\n let request = undefined;\n const throughStream = new util_js_2.PassThroughShim();\n let crc32c = true;\n let md5 = false;\n if (typeof options.validation === 'string') {\n const value = options.validation.toLowerCase().trim();\n crc32c = value === 'crc32c';\n md5 = value === 'md5';\n }\n else if (options.validation === false) {\n crc32c = false;\n }\n const shouldRunValidation = !rangeRequest && (crc32c || md5);\n if (rangeRequest) {\n if (typeof options.validation === 'string' ||\n options.validation === true) {\n throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE);\n }\n // Range requests can't receive data integrity checks.\n crc32c = false;\n md5 = false;\n }\n const onComplete = (err) => {\n if (err) {\n // There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed.\n // This causes a memory leak, so cleanup the sockets manually here by destroying the agent.\n if (request === null || request === void 0 ? void 0 : request.agent) {\n request.agent.destroy();\n }\n throughStream.destroy(err);\n }\n };\n // We listen to the response event from the request stream so that we\n // can...\n //\n // 1) Intercept any data from going to the user if an error occurred.\n // 2) Calculate the hashes from the http.IncomingMessage response\n // stream,\n // which will return the bytes from the source without decompressing\n // gzip'd content. We then send it through decompressed, if\n // applicable, to the user.\n const onResponse = (err, _body, rawResponseStream) => {\n if (err) {\n // Get error message from the body.\n this.getBufferFromReadable(rawResponseStream).then(body => {\n err.message = body.toString('utf8');\n throughStream.destroy(err);\n });\n return;\n }\n request = rawResponseStream.request;\n const headers = rawResponseStream.toJSON().headers;\n const isCompressed = headers['content-encoding'] === 'gzip';\n const hashes = {};\n // The object is safe to validate if:\n // 1. It was stored gzip and returned to us gzip OR\n // 2. It was never stored as gzip\n const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' &&\n isCompressed) ||\n headers['x-goog-stored-content-encoding'] === 'identity';\n const transformStreams = [];\n if (shouldRunValidation) {\n // The x-goog-hash header should be set with a crc32c and md5 hash.\n // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx'\n if (typeof headers['x-goog-hash'] === 'string') {\n headers['x-goog-hash']\n .split(',')\n .forEach((hashKeyValPair) => {\n const delimiterIndex = hashKeyValPair.indexOf('=');\n const hashType = hashKeyValPair.substring(0, delimiterIndex);\n const hashValue = hashKeyValPair.substring(delimiterIndex + 1);\n hashes[hashType] = hashValue;\n });\n }\n validateStream = new hash_stream_validator_js_1.HashStreamValidator({\n crc32c,\n md5,\n crc32cGenerator: this.crc32cGenerator,\n crc32cExpected: hashes.crc32c,\n md5Expected: hashes.md5,\n });\n }\n if (md5 && !hashes.md5) {\n const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE);\n hashError.code = 'MD5_NOT_AVAILABLE';\n throughStream.destroy(hashError);\n return;\n }\n if (safeToValidate && shouldRunValidation && validateStream) {\n transformStreams.push(validateStream);\n }\n if (isCompressed && options.decompress) {\n transformStreams.push(zlib.createGunzip());\n }\n (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete);\n };\n // Authenticate the request, then pipe the remote API request to the stream\n // returned to the user.\n const makeRequest = () => {\n const query = { alt: 'media' };\n if (this.generation) {\n query.generation = this.generation;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n const headers = {\n 'Accept-Encoding': 'gzip',\n 'Cache-Control': 'no-store',\n };\n if (rangeRequest) {\n const start = typeof options.start === 'number' ? options.start : '0';\n const end = typeof options.end === 'number' ? options.end : '';\n headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`;\n }\n const reqOpts = {\n uri: '',\n headers,\n qs: query,\n };\n if (options[util_js_1.GCCL_GCS_CMD_KEY]) {\n reqOpts[util_js_1.GCCL_GCS_CMD_KEY] = options[util_js_1.GCCL_GCS_CMD_KEY];\n }\n this.requestStream(reqOpts)\n .on('error', err => {\n throughStream.destroy(err);\n })\n .on('response', res => {\n throughStream.emit('response', res);\n index_js_1.util.handleResp(null, res, null, onResponse);\n })\n .resume();\n };\n throughStream.on('reading', makeRequest);\n return throughStream;\n }\n /**\n * @callback CreateResumableUploadCallback\n * @param {?Error} err Request error, if any.\n * @param {string} uri The resumable upload's unique session URI.\n */\n /**\n * @typedef {array} CreateResumableUploadResponse\n * @property {string} 0 The resumable upload's unique session URI.\n */\n /**\n * @typedef {object} CreateResumableUploadOptions\n * @property {object} [metadata] Metadata to set on the file.\n * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload.\n * @property {string} [origin] Origin header to set for the upload.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string} [chunkSize] Create a separate request per chunk. This\n * value is in bytes and should be a multiple of 256 KiB (2^18).\n * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.}\n */\n /**\n * Create a unique resumable upload session URI. This is the first step when\n * performing a resumable upload.\n *\n * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide}\n * for more on how the entire process works.\n *\n *

Note

\n *\n * If you are just looking to perform a resumable upload without worrying\n * about any of the details, see {@link File#createWriteStream}. Resumable\n * uploads are performed by default.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide}\n *\n * @param {CreateResumableUploadOptions} [options] Configuration options.\n * @param {CreateResumableUploadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * file.createResumableUpload(function(err, uri) {\n * if (!err) {\n * // `uri` can be used to PUT data to.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.createResumableUpload().then(function(data) {\n * const uri = data[0];\n * });\n * ```\n */\n createResumableUpload(optionsOrCallback, callback) {\n var _a, _b;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const retryOptions = this.storage.retryOptions;\n if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&\n ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n retryOptions.autoRetry = false;\n }\n resumableUpload.createURI({\n authClient: this.storage.authClient,\n apiEndpoint: this.storage.apiEndpoint,\n bucket: this.bucket.name,\n customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}),\n file: this.name,\n generation: this.generation,\n key: this.encryptionKey,\n kmsKeyName: this.kmsKeyName,\n metadata: options.metadata,\n offset: options.offset,\n origin: options.origin,\n predefinedAcl: options.predefinedAcl,\n private: options.private,\n public: options.public,\n userProject: options.userProject || this.userProject,\n retryOptions: retryOptions,\n params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts,\n [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY],\n }, callback);\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n }\n /**\n * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream().\n * @property {string} [contentType] Alias for\n * `options.metadata.contentType`. If set to `auto`, the file name is used\n * to determine the contentType.\n * @property {string|boolean} [gzip] If true, automatically gzip the file.\n * If set to `auto`, the contentType is used to determine if the file\n * should be gzipped. This will set `options.metadata.contentEncoding` to\n * `gzip` if necessary.\n * @property {object} [metadata] See the examples below or\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}\n * for more details.\n * @property {number} [offset] The starting byte of the upload stream, for\n * resuming an interrupted upload. Defaults to 0.\n * @property {string} [predefinedAcl] Apply a predefined set of access\n * controls to this object.\n *\n * Acceptable values are:\n * - **`authenticatedRead`** - Object owner gets `OWNER` access, and\n * `allAuthenticatedUsers` get `READER` access.\n *\n * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and\n * project team owners get `OWNER` access.\n *\n * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project\n * team owners get `READER` access.\n *\n * - **`private`** - Object owner gets `OWNER` access.\n *\n * - **`projectPrivate`** - Object owner gets `OWNER` access, and project\n * team members get access according to their roles.\n *\n * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`\n * get `READER` access.\n * @property {boolean} [private] Make the uploaded file private. (Alias for\n * `options.predefinedAcl = 'private'`)\n * @property {boolean} [public] Make the uploaded file public. (Alias for\n * `options.predefinedAcl = 'publicRead'`)\n * @property {boolean} [resumable] Force a resumable upload. NOTE: When\n * working with streams, the file format and size is unknown until it's\n * completely consumed. Because of this, it's best for you to be explicit\n * for what makes sense given your input.\n * @property {number} [timeout=60000] Set the HTTP request timeout in\n * milliseconds. This option is not available for resumable uploads.\n * Default: `60000`\n * @property {string} [uri] The URI for an already-created resumable\n * upload. See {@link File#createResumableUpload}.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n * @property {string|boolean} [validation] Possible values: `\"md5\"`,\n * `\"crc32c\"`, or `false`. By default, data integrity is validated with a\n * CRC32c checksum. You may use MD5 if preferred, but that hash is not\n * supported for composite objects. An error will be raised if MD5 is\n * specified but is not available. You may also choose to skip validation\n * completely, however this is **not recommended**. In addition to specifying\n * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will\n * cause the server to perform validation in addition to client validation.\n * NOTE: Validation is automatically skipped for objects that were\n * uploaded using the `gzip` option and have already compressed content.\n */\n /**\n * Create a writable stream to overwrite the contents of the file in your\n * bucket.\n *\n * A File object can also be used to create files for the first time.\n *\n * Resumable uploads are automatically enabled and must be shut off explicitly\n * by setting `options.resumable` to `false`.\n *\n *\n *

\n * There is some overhead when using a resumable upload that can cause\n * noticeable performance degradation while uploading a series of small\n * files. When uploading files less than 10MB, it is recommended that the\n * resumable feature is disabled.\n *

\n *\n * NOTE: Writable streams will emit the `finish` event when the file is fully\n * uploaded.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation}\n *\n * @param {CreateWriteStreamOptions} [options] Configuration options.\n * @returns {WritableStream}\n *\n * @example\n * ```\n * const fs = require('fs');\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * //

Uploading a File

\n * //\n * // Now, consider a case where we want to upload a file to your bucket. You\n * // have the option of using {@link Bucket#upload}, but that is just\n * // a convenience method which will do the following.\n * //-\n * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')\n * .pipe(file.createWriteStream())\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n *\n * //-\n * //

Uploading a File with gzip compression

\n * //-\n * fs.createReadStream('/Users/stephen/site/index.html')\n * .pipe(file.createWriteStream({ gzip: true }))\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n *\n * //-\n * // Downloading the file with `createReadStream` will automatically decode\n * // the file.\n * //-\n *\n * //-\n * //

Uploading a File with Metadata

\n * //\n * // One last case you may run into is when you want to upload a file to your\n * // bucket and set its metadata at the same time. Like above, you can use\n * // {@link Bucket#upload} to do this, which is just a wrapper around\n * // the following.\n * //-\n * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')\n * .pipe(file.createWriteStream({\n * metadata: {\n * contentType: 'image/jpeg',\n * metadata: {\n * custom: 'metadata'\n * }\n * }\n * }))\n * .on('error', function(err) {})\n * .on('finish', function() {\n * // The file upload is complete.\n * });\n * ```\n *\n * //-\n * //

Continuing a Resumable Upload

\n * //\n * // One can capture a `uri` from a resumable upload to reuse later.\n * // Additionally, for validation, one can also capture and pass `crc32c`.\n * //-\n * let uri: string | undefined = undefined;\n * let resumeCRC32C: string | undefined = undefined;\n *\n * fs.createWriteStream()\n * .on('uri', link => {uri = link})\n * .on('crc32', crc32c => {resumeCRC32C = crc32c});\n *\n * // later...\n * fs.createWriteStream({uri, resumeCRC32C});\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n createWriteStream(options = {}) {\n var _a;\n (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {});\n if (options.contentType) {\n options.metadata.contentType = options.contentType;\n }\n if (!options.metadata.contentType ||\n options.metadata.contentType === 'auto') {\n const detectedContentType = mime_1.default.getType(this.name);\n if (detectedContentType) {\n options.metadata.contentType = detectedContentType;\n }\n }\n let gzip = options.gzip;\n if (gzip === 'auto') {\n gzip = (0, compressible_1.default)(options.metadata.contentType || '');\n }\n if (gzip) {\n options.metadata.contentEncoding = 'gzip';\n }\n let crc32c = true;\n let md5 = false;\n if (typeof options.validation === 'string') {\n options.validation = options.validation.toLowerCase();\n crc32c = options.validation === 'crc32c';\n md5 = options.validation === 'md5';\n }\n else if (options.validation === false) {\n crc32c = false;\n md5 = false;\n }\n if (options.offset) {\n if (md5) {\n throw new RangeError(FileExceptionMessages.MD5_RESUMED_UPLOAD);\n }\n if (crc32c && !options.isPartialUpload && !options.resumeCRC32C) {\n throw new RangeError(FileExceptionMessages.MISSING_RESUME_CRC32C_FINAL_UPLOAD);\n }\n }\n /**\n * A callback for determining when the underlying pipeline is complete.\n * It's possible the pipeline callback could error before the write stream\n * calls `final` so by default this will destroy the write stream unless the\n * write stream sets this callback via its `final` handler.\n * @param error An optional error\n */\n let pipelineCallback = error => {\n writeStream.destroy(error || undefined);\n };\n // A stream for consumer to write to\n const writeStream = new stream_1.Writable({\n final(cb) {\n // Set the pipeline callback to this callback so the pipeline's results\n // can be populated to the consumer\n pipelineCallback = cb;\n emitStream.end();\n },\n write(chunk, encoding, cb) {\n emitStream.write(chunk, encoding, cb);\n },\n });\n const transformStreams = [];\n if (gzip) {\n transformStreams.push(zlib.createGzip());\n }\n const emitStream = new util_js_2.PassThroughShim();\n let hashCalculatingStream = null;\n if (crc32c || md5) {\n const crc32cInstance = options.resumeCRC32C\n ? crc32c_js_1.CRC32C.from(options.resumeCRC32C)\n : undefined;\n hashCalculatingStream = new hash_stream_validator_js_1.HashStreamValidator({\n crc32c,\n crc32cInstance,\n md5,\n crc32cGenerator: this.crc32cGenerator,\n updateHashesOnly: true,\n });\n transformStreams.push(hashCalculatingStream);\n }\n const fileWriteStream = (0, duplexify_1.default)();\n let fileWriteStreamMetadataReceived = false;\n // Handing off emitted events to users\n emitStream.on('reading', () => writeStream.emit('reading'));\n emitStream.on('writing', () => writeStream.emit('writing'));\n fileWriteStream.on('uri', evt => writeStream.emit('uri', evt));\n fileWriteStream.on('progress', evt => writeStream.emit('progress', evt));\n fileWriteStream.on('response', resp => writeStream.emit('response', resp));\n fileWriteStream.once('metadata', () => {\n fileWriteStreamMetadataReceived = true;\n });\n writeStream.once('writing', () => {\n if (options.resumable === false) {\n this.startSimpleUpload_(fileWriteStream, options);\n }\n else {\n this.startResumableUpload_(fileWriteStream, options);\n }\n (0, stream_1.pipeline)(emitStream, ...transformStreams, fileWriteStream, async (e) => {\n if (e) {\n return pipelineCallback(e);\n }\n // We want to make sure we've received the metadata from the server in order\n // to properly validate the object's integrity. Depending on the type of upload,\n // the stream could close before the response is returned.\n if (!fileWriteStreamMetadataReceived) {\n try {\n await new Promise((resolve, reject) => {\n fileWriteStream.once('metadata', resolve);\n fileWriteStream.once('error', reject);\n });\n }\n catch (e) {\n return pipelineCallback(e);\n }\n }\n // Emit the local CRC32C value for future validation, if validation is enabled.\n if (hashCalculatingStream === null || hashCalculatingStream === void 0 ? void 0 : hashCalculatingStream.crc32c) {\n writeStream.emit('crc32c', hashCalculatingStream.crc32c);\n }\n try {\n // Metadata may not be ready if the upload is a partial upload,\n // nothing to validate yet.\n const metadataNotReady = options.isPartialUpload && !this.metadata;\n if (hashCalculatingStream && !metadataNotReady) {\n await __classPrivateFieldGet(this, _File_instances, \"m\", _File_validateIntegrity).call(this, hashCalculatingStream, {\n crc32c,\n md5,\n });\n }\n pipelineCallback();\n }\n catch (e) {\n pipelineCallback(e);\n }\n });\n });\n return writeStream;\n }\n delete(optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_js_1.AvailableServiceObjectMethods.delete, options);\n super\n .delete(options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * @typedef {array} DownloadResponse\n * @property [0] The contents of a File.\n */\n /**\n * @callback DownloadCallback\n * @param err Request error, if any.\n * @param contents The contents of a File.\n */\n /**\n * Convenience method to download a file into memory or to a local\n * destination.\n *\n * @param {object} [options] Configuration options. The arguments match those\n * passed to {@link File#createReadStream}.\n * @param {string} [options.destination] Local file path to write the file's\n * contents to.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DownloadCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Download a file into memory. The contents will be available as the\n * second\n * // argument in the demonstration below, `contents`.\n * //-\n * file.download(function(err, contents) {});\n *\n * //-\n * // Download a file to a local destination.\n * //-\n * file.download({\n * destination: '/Users/me/Desktop/file-backup.txt'\n * }, function(err) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.download().then(function(data) {\n * const contents = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_download_file\n * Another example:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_download_encrypted_file\n * Example of downloading an encrypted file:\n *\n * @example include:samples/requesterPays.js\n * region_tag:storage_download_file_requester_pays\n * Example of downloading a file where the requester pays:\n */\n download(optionsOrCallback, cb) {\n let options;\n if (typeof optionsOrCallback === 'function') {\n cb = optionsOrCallback;\n options = {};\n }\n else {\n options = optionsOrCallback;\n }\n let called = false;\n const callback = ((...args) => {\n if (!called)\n cb(...args);\n called = true;\n });\n const destination = options.destination;\n delete options.destination;\n const fileStream = this.createReadStream(options);\n let receivedData = false;\n if (destination) {\n fileStream\n .on('error', callback)\n .once('data', data => {\n // We know that the file exists the server - now we can truncate/write to a file\n receivedData = true;\n const writable = fs.createWriteStream(destination);\n writable.write(data);\n fileStream\n .pipe(writable)\n .on('error', callback)\n .on('finish', callback);\n })\n .on('end', () => {\n // In the case of an empty file no data will be received before the end event fires\n if (!receivedData) {\n const data = Buffer.alloc(0);\n try {\n fs.writeFileSync(destination, data);\n callback(null, data);\n }\n catch (e) {\n callback(e, data);\n }\n }\n });\n }\n else {\n this.getBufferFromReadable(fileStream)\n .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents))\n .catch(callback);\n }\n }\n /**\n * The Storage API allows you to use a custom key for server-side encryption.\n *\n * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}\n *\n * @param {string|buffer} encryptionKey An AES-256 encryption key.\n * @returns {File}\n *\n * @example\n * ```\n * const crypto = require('crypto');\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const encryptionKey = crypto.randomBytes(32);\n *\n * const fileWithCustomEncryption = myBucket.file('my-file');\n * fileWithCustomEncryption.setEncryptionKey(encryptionKey);\n *\n * const fileWithoutCustomEncryption = myBucket.file('my-file');\n *\n * fileWithCustomEncryption.save('data', function(err) {\n * // Try to download with the File object that hasn't had\n * // `setEncryptionKey()` called:\n * fileWithoutCustomEncryption.download(function(err) {\n * // We will receive an error:\n * // err.message === 'Bad Request'\n *\n * // Try again with the File object we called `setEncryptionKey()` on:\n * fileWithCustomEncryption.download(function(err, contents) {\n * // contents.toString() === 'data'\n * });\n * });\n * });\n *\n * ```\n * @example include:samples/encryption.js\n * region_tag:storage_upload_encrypted_file\n * Example of uploading an encrypted file:\n *\n * @example include:samples/encryption.js\n * region_tag:storage_download_encrypted_file\n * Example of downloading an encrypted file:\n */\n setEncryptionKey(encryptionKey) {\n this.encryptionKey = encryptionKey;\n this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64');\n this.encryptionKeyHash = crypto\n .createHash('sha256')\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n .update(this.encryptionKeyBase64, 'base64')\n .digest('base64');\n this.encryptionKeyInterceptor = {\n request: reqOpts => {\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256';\n reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64;\n reqOpts.headers['x-goog-encryption-key-sha256'] =\n this.encryptionKeyHash;\n return reqOpts;\n },\n };\n this.interceptors.push(this.encryptionKeyInterceptor);\n return this;\n }\n /**\n * @typedef {array} GetExpirationDateResponse\n * @property {date} 0 A Date object representing the earliest time this file's\n * retention policy will expire.\n */\n /**\n * @callback GetExpirationDateCallback\n * @param {?Error} err Request error, if any.\n * @param {date} expirationDate A Date object representing the earliest time\n * this file's retention policy will expire.\n */\n /**\n * If this bucket has a retention policy defined, use this method to get a\n * Date object representing the earliest time this file will expire.\n *\n * @param {GetExpirationDateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const storage = require('@google-cloud/storage')();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.getExpirationDate(function(err, expirationDate) {\n * // expirationDate is a Date object.\n * });\n * ```\n */\n getExpirationDate(callback) {\n this.getMetadata((err, metadata, apiResponse) => {\n if (err) {\n callback(err, null, apiResponse);\n return;\n }\n if (!metadata.retentionExpirationTime) {\n const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA);\n callback(error, null, apiResponse);\n return;\n }\n callback(null, new Date(metadata.retentionExpirationTime), apiResponse);\n });\n }\n /**\n * @typedef {array} GenerateSignedPostPolicyV2Response\n * @property {object} 0 The document policy.\n */\n /**\n * @callback GenerateSignedPostPolicyV2Callback\n * @param {?Error} err Request error, if any.\n * @param {object} policy The document policy.\n */\n /**\n * Get a signed policy document to allow a user to upload data with a POST\n * request.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed policy. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process}\n *\n * @throws {Error} If an expiration timestamp from the past is given.\n * @throws {Error} If options.equals has an array with less or more than two\n * members.\n * @throws {Error} If options.startsWith has an array with less or more than two\n * members.\n *\n * @param {object} options Configuration options.\n * @param {array|array[]} [options.equals] Array of request parameters and\n * their expected value (e.g. [['$', '']]). Values are\n * translated into equality constraints in the conditions field of the\n * policy document (e.g. ['eq', '$', '']). If only one\n * equality condition is to be specified, options.equals can be a one-\n * dimensional array (e.g. ['$', '']).\n * @param {*} options.expires - A timestamp when this policy will expire. Any\n * value given is passed to `new Date()`.\n * @param {array|array[]} [options.startsWith] Array of request parameters and\n * their expected prefixes (e.g. [['$', '']). Values are\n * translated into starts-with constraints in the conditions field of the\n * policy document (e.g. ['starts-with', '$', '']). If only\n * one prefix condition is to be specified, options.startsWith can be a\n * one- dimensional array (e.g. ['$', '']).\n * @param {string} [options.acl] ACL for the object from possibly predefined\n * ACLs.\n * @param {string} [options.successRedirect] The URL to which the user client\n * is redirected if the upload is successful.\n * @param {string} [options.successStatus] - The status of the Google Storage\n * response if the upload is successful (must be string).\n * @param {object} [options.contentLengthRange]\n * @param {number} [options.contentLengthRange.min] Minimum value for the\n * request's content length.\n * @param {number} [options.contentLengthRange.max] Maximum value for the\n * request's content length.\n * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const options = {\n * equals: ['$Content-Type', 'image/jpeg'],\n * expires: '10-25-2022',\n * contentLengthRange: {\n * min: 0,\n * max: 1024\n * }\n * };\n *\n * file.generateSignedPostPolicyV2(options, function(err, policy) {\n * // policy.string: the policy document in plain text.\n * // policy.base64: the policy document in base64.\n * // policy.signature: the policy signature in base64.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.generateSignedPostPolicyV2(options).then(function(data) {\n * const policy = data[0];\n * });\n * ```\n */\n generateSignedPostPolicyV2(optionsOrCallback, cb) {\n const args = (0, util_js_2.normalize)(optionsOrCallback, cb);\n let options = args.options;\n const callback = args.callback;\n const expires = new Date(options.expires);\n if (isNaN(expires.getTime())) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expires.valueOf() < Date.now()) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n options = Object.assign({}, options);\n const conditions = [\n ['eq', '$key', this.name],\n {\n bucket: this.bucket.name,\n },\n ];\n if (Array.isArray(options.equals)) {\n if (!Array.isArray(options.equals[0])) {\n options.equals = [options.equals];\n }\n options.equals.forEach(condition => {\n if (!Array.isArray(condition) || condition.length !== 2) {\n throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS);\n }\n conditions.push(['eq', condition[0], condition[1]]);\n });\n }\n if (Array.isArray(options.startsWith)) {\n if (!Array.isArray(options.startsWith[0])) {\n options.startsWith = [options.startsWith];\n }\n options.startsWith.forEach(condition => {\n if (!Array.isArray(condition) || condition.length !== 2) {\n throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS);\n }\n conditions.push(['starts-with', condition[0], condition[1]]);\n });\n }\n if (options.acl) {\n conditions.push({\n acl: options.acl,\n });\n }\n if (options.successRedirect) {\n conditions.push({\n success_action_redirect: options.successRedirect,\n });\n }\n if (options.successStatus) {\n conditions.push({\n success_action_status: options.successStatus,\n });\n }\n if (options.contentLengthRange) {\n const min = options.contentLengthRange.min;\n const max = options.contentLengthRange.max;\n if (typeof min !== 'number' || typeof max !== 'number') {\n throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX);\n }\n conditions.push(['content-length-range', min, max]);\n }\n const policy = {\n expiration: expires.toISOString(),\n conditions,\n };\n const policyString = JSON.stringify(policy);\n const policyBase64 = Buffer.from(policyString).toString('base64');\n this.storage.authClient.sign(policyBase64).then(signature => {\n callback(null, {\n string: policyString,\n base64: policyBase64,\n signature,\n });\n }, err => {\n callback(new signer_js_1.SigningError(err.message));\n });\n }\n /**\n * @typedef {object} SignedPostPolicyV4Output\n * @property {string} url The request URL.\n * @property {object} fields The form fields to include in the POST request.\n */\n /**\n * @typedef {array} GenerateSignedPostPolicyV4Response\n * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields.\n */\n /**\n * @callback GenerateSignedPostPolicyV4Callback\n * @param {?Error} err Request error, if any.\n * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields.\n */\n /**\n * Get a v4 signed policy document to allow a user to upload data with a POST\n * request.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed policy. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference}\n *\n * @param {object} options Configuration options.\n * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any\n * value given is passed to `new Date()`.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instead of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in\n * the result, e.g. \"https://cdn.example.com\".\n * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument}\n * to include in the signed policy. Any fields with key beginning with 'x-ignore-'\n * will not be included in the policy to be signed.\n * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document}\n * to include in the signed policy. All fields given in `config.fields` are\n * automatically included in the conditions array, adding the same entry\n * in both `fields` and `conditions` will result in duplicate entries.\n *\n * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const options = {\n * expires: '10-25-2022',\n * conditions: [\n * ['eq', '$Content-Type', 'image/jpeg'],\n * ['content-length-range', 0, 1024],\n * ],\n * fields: {\n * acl: 'public-read',\n * 'x-goog-meta-foo': 'bar',\n * 'x-ignore-mykey': 'data'\n * }\n * };\n *\n * file.generateSignedPostPolicyV4(options, function(err, response) {\n * // response.url The request URL\n * // response.fields The form fields (including the signature) to include\n * // to be used to upload objects by HTML forms.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.generateSignedPostPolicyV4(options).then(function(data) {\n * const response = data[0];\n * // response.url The request URL\n * // response.fields The form fields (including the signature) to include\n * // to be used to upload objects by HTML forms.\n * });\n * ```\n */\n generateSignedPostPolicyV4(optionsOrCallback, cb) {\n const args = (0, util_js_2.normalize)(optionsOrCallback, cb);\n let options = args.options;\n const callback = args.callback;\n const expires = new Date(options.expires);\n if (isNaN(expires.getTime())) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expires.valueOf() < Date.now()) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) {\n throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`);\n }\n options = Object.assign({}, options);\n let fields = Object.assign({}, options.fields);\n const now = new Date();\n const nowISO = (0, util_js_2.formatAsUTCISO)(now, true);\n const todayISO = (0, util_js_2.formatAsUTCISO)(now);\n const sign = async () => {\n const { client_email } = await this.storage.authClient.getCredentials();\n const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`;\n fields = {\n ...fields,\n bucket: this.bucket.name,\n key: this.name,\n 'x-goog-date': nowISO,\n 'x-goog-credential': credential,\n 'x-goog-algorithm': 'GOOG4-RSA-SHA256',\n };\n const conditions = options.conditions || [];\n Object.entries(fields).forEach(([key, value]) => {\n if (!key.startsWith('x-ignore-')) {\n conditions.push({ [key]: value });\n }\n });\n delete fields.bucket;\n const expiration = (0, util_js_2.formatAsUTCISO)(expires, true, '-', ':');\n const policy = {\n conditions,\n expiration,\n };\n const policyString = (0, util_js_2.unicodeJSONStringify)(policy);\n const policyBase64 = Buffer.from(policyString).toString('base64');\n try {\n const signature = await this.storage.authClient.sign(policyBase64);\n const signatureHex = Buffer.from(signature, 'base64').toString('hex');\n fields['policy'] = policyBase64;\n fields['x-goog-signature'] = signatureHex;\n let url;\n if (options.virtualHostedStyle) {\n url = `https://${this.bucket.name}.storage.googleapis.com/`;\n }\n else if (options.bucketBoundHostname) {\n url = `${options.bucketBoundHostname}/`;\n }\n else {\n url = `${exports.STORAGE_POST_POLICY_BASE_URL}/${this.bucket.name}/`;\n }\n return {\n url,\n fields,\n };\n }\n catch (err) {\n throw new signer_js_1.SigningError(err.message);\n }\n };\n sign().then(res => callback(null, res), callback);\n }\n /**\n * @typedef {array} GetSignedUrlResponse\n * @property {object} 0 The signed URL.\n */\n /**\n * @callback GetSignedUrlCallback\n * @param {?Error} err Request error, if any.\n * @param {object} url The signed URL.\n */\n /**\n * Get a signed URL to allow limited time access to the file.\n *\n * In Google Cloud Platform environments, such as Cloud Functions and App\n * Engine, you usually don't provide a `keyFilename` or `credentials` during\n * instantiation. In those environments, we call the\n * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}\n * to create a signed URL. That API requires either the\n * `https://www.googleapis.com/auth/iam` or\n * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are\n * enabled.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference}\n *\n * @throws {Error} if an expiration timestamp from the past is given.\n *\n * @param {object} config Configuration object.\n * @param {string} config.action \"read\" (HTTP: GET), \"write\" (HTTP: PUT), or\n * \"delete\" (HTTP: DELETE), \"resumable\" (HTTP: POST).\n * When using \"resumable\", the header `X-Goog-Resumable: start` has\n * to be sent when making a request with the signed URL.\n * @param {*} config.expires A timestamp when this link will expire. Any value\n * given is passed to `new Date()`.\n * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.\n * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}\n * @param {string} [config.version='v2'] The signing version to use, either\n * 'v2' or 'v4'.\n * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style\n * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style\n * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs\n * should generally be preferred instaed of path-style URL.\n * Currently defaults to `false` for path-style, although this may change in a\n * future major-version release.\n * @param {string} [config.cname] The cname for this bucket, i.e.,\n * \"https://cdn.example.com\".\n * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like\n * if you provide this, the client must provide this HTTP header with this same\n * value in its request, so to if this parameter is not provided here,\n * the client must not provide any value for this HTTP header in its request.\n * @param {string} [config.contentType] Just like if you provide this, the client\n * must provide this HTTP header with this same value in its request, so to if\n * this parameter is not provided here, the client must not provide any value\n * for this HTTP header in its request.\n * @param {object} [config.extensionHeaders] If these headers are used, the\n * server will check to make sure that the client provides matching\n * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}\n * for the requirements of this feature, most notably:\n * - The header name must be prefixed with `x-goog-`\n * - The header name must be all lowercase\n *\n * Note: Multi-valued header passed as an array in the extensionHeaders\n * object is converted into a string, delimited by `,` with\n * no space. Requests made using the signed URL will need to\n * delimit multi-valued headers using a single `,` as well, or\n * else the server will report a mismatched signature.\n * @param {object} [config.queryParams] Additional query parameters to include\n * in the signed URL.\n * @param {string} [config.promptSaveAs] The filename to prompt the user to\n * save the file as when the signed url is accessed. This is ignored if\n * `config.responseDisposition` is set.\n * @param {string} [config.responseDisposition] The\n * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the\n * signed url.\n * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value\n * given is passed to `new Date()`.\n * Note: Use for 'v4' only.\n * @param {string} [config.responseType] The response-content-type parameter\n * of the signed url.\n * @param {GetSignedUrlCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Generate a URL that allows temporary access to download your file.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'read',\n * expires: '03-17-2025',\n * };\n *\n * file.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file is now available to read from this URL.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // Generate a URL that allows temporary access to download your file.\n * // Access will begin at accessibleAt and end at expires.\n * //-\n * const request = require('request');\n *\n * const config = {\n * action: 'read',\n * expires: '03-17-2025',\n * accessibleAt: '03-13-2025'\n * };\n *\n * file.getSignedUrl(config, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025.\n * request(url, function(err, resp) {\n * // resp.statusCode = 200\n * });\n * });\n *\n * //-\n * // Generate a URL to allow write permissions. This means anyone with this\n * URL\n * // can send a POST request with new data that will overwrite the file.\n * //-\n * file.getSignedUrl({\n * action: 'write',\n * expires: '03-17-2025'\n * }, function(err, url) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n *\n * // The file is now available to be written to.\n * const writeStream = request.put(url);\n * writeStream.end('New data');\n *\n * writeStream.on('complete', function(resp) {\n * // Confirm the new content was saved.\n * file.download(function(err, fileContents) {\n * console.log('Contents:', fileContents.toString());\n * // Contents: New data\n * });\n * });\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.getSignedUrl(config).then(function(data) {\n * const url = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_generate_signed_url\n * Another example:\n */\n getSignedUrl(cfg, callback) {\n const method = ActionToHTTPMethod[cfg.action];\n const extensionHeaders = (0, util_js_2.objectKeyToLowercase)(cfg.extensionHeaders || {});\n if (cfg.action === 'resumable') {\n extensionHeaders['x-goog-resumable'] = 'start';\n }\n const queryParams = Object.assign({}, cfg.queryParams);\n if (typeof cfg.responseType === 'string') {\n queryParams['response-content-type'] = cfg.responseType;\n }\n if (typeof cfg.promptSaveAs === 'string') {\n queryParams['response-content-disposition'] =\n 'attachment; filename=\"' + cfg.promptSaveAs + '\"';\n }\n if (typeof cfg.responseDisposition === 'string') {\n queryParams['response-content-disposition'] = cfg.responseDisposition;\n }\n if (this.generation) {\n queryParams['generation'] = this.generation.toString();\n }\n const signConfig = {\n method,\n expires: cfg.expires,\n accessibleAt: cfg.accessibleAt,\n extensionHeaders,\n queryParams,\n contentMd5: cfg.contentMd5,\n contentType: cfg.contentType,\n };\n if (cfg.cname) {\n signConfig.cname = cfg.cname;\n }\n if (cfg.version) {\n signConfig.version = cfg.version;\n }\n if (cfg.virtualHostedStyle) {\n signConfig.virtualHostedStyle = cfg.virtualHostedStyle;\n }\n if (!this.signer) {\n this.signer = new signer_js_1.URLSigner(this.storage.authClient, this.bucket, this);\n }\n this.signer\n .getSignedUrl(signConfig)\n .then(signedUrl => callback(null, signedUrl), callback);\n }\n /**\n * @callback IsPublicCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} resp Whether file is public or not.\n */\n /**\n * @typedef {array} IsPublicResponse\n * @property {boolean} 0 Whether file is public or not.\n */\n /**\n * Check whether this file is public or not by sending\n * a HEAD request without credentials.\n * No errors from the server indicates that the current\n * file is public.\n * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden}\n * indicates that file is private.\n * Any other non 403 error is propagated to user.\n *\n * @param {IsPublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Check whether the file is publicly accessible.\n * //-\n * file.isPublic(function(err, resp) {\n * if (err) {\n * console.error(err);\n * return;\n * }\n * console.log(`the file ${file.id} is public: ${resp}`) ;\n * })\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.isPublic().then(function(data) {\n * const resp = data[0];\n * });\n * ```\n */\n isPublic(callback) {\n var _a;\n // Build any custom headers based on the defined interceptors on the parent\n // storage object and this object\n const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || [];\n const fileInterceptors = this.interceptors || [];\n const allInterceptors = storageInterceptors.concat(fileInterceptors);\n const headers = allInterceptors.reduce((acc, curInterceptor) => {\n const currentHeaders = curInterceptor.request({\n uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`,\n });\n Object.assign(acc, currentHeaders.headers);\n return acc;\n }, {});\n index_js_1.util.makeRequest({\n method: 'GET',\n uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`,\n headers,\n }, {\n retryOptions: this.storage.retryOptions,\n }, (err) => {\n if (err) {\n const apiError = err;\n if (apiError.code === 403) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n }\n else {\n callback(null, true);\n }\n });\n }\n /**\n * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate().\n * @property {Metadata} [metadata] Define custom metadata properties to define\n * along with the operation.\n * @property {boolean} [strict] If true, set the file to be private to\n * only the owner user. Otherwise, it will be private to the project.\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback MakeFilePrivateCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} MakeFilePrivateResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Make a file private to the project and remove all other permissions.\n * Set `options.strict` to true to make the file private to only the owner.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation}\n *\n * @param {MakeFilePrivateOptions} [options] Configuration options.\n * @param {MakeFilePrivateCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * //-\n * // Set the file private so only project maintainers can see and modify it.\n * //-\n * file.makePrivate(function(err) {});\n *\n * //-\n * // Set the file private so only the owner can see and modify it.\n * //-\n * file.makePrivate({ strict: true }, function(err) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.makePrivate().then(function(data) {\n * const apiResponse = data[0];\n * });\n * ```\n */\n makePrivate(optionsOrCallback, callback) {\n var _a, _b;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const query = {\n predefinedAcl: options.strict ? 'private' : 'projectPrivate',\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n };\n if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) {\n query.ifMetagenerationMatch =\n (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch;\n delete options.preconditionOpts;\n }\n if (options.userProject) {\n query.userProject = options.userProject;\n }\n // You aren't allowed to set both predefinedAcl & acl properties on a file,\n // so acl must explicitly be nullified, destroying all previous acls on the\n // file.\n const metadata = { ...options.metadata, acl: null };\n this.setMetadata(metadata, query, callback);\n }\n /**\n * @typedef {array} MakeFilePublicResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback MakeFilePublicCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Set a file to be publicly readable and maintain all previous permissions.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation}\n *\n * @param {MakeFilePublicCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n *\n * file.makePublic(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.makePublic().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_make_public\n * Another example:\n */\n makePublic(callback) {\n callback = callback || index_js_1.util.noop;\n this.acl.add({\n entity: 'allUsers',\n role: 'READER',\n }, (err, acl, resp) => {\n callback(err, resp);\n });\n }\n /**\n * The public URL of this File\n * Use {@link File#makePublic} to enable anonymous access via the returned URL.\n *\n * @returns {string}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-file');\n *\n * // publicUrl will be \"https://storage.googleapis.com/albums/my-file\"\n * const publicUrl = file.publicUrl();\n * ```\n */\n publicUrl() {\n return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`;\n }\n /**\n * @typedef {array} MoveResponse\n * @property {File} 0 The destination File.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback MoveCallback\n * @param {?Error} err Request error, if any.\n * @param {?File} destinationFile The destination File.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} MoveOptions Configuration options for File#move(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Move this file to another location. By default, this will rename the file\n * and keep it in the same bucket, but you can choose to move it to another\n * Bucket by providing a Bucket or File object or a URL beginning with\n * \"gs://\".\n *\n * **Warning**:\n * There is currently no atomic `move` method in the Cloud Storage API,\n * so this method is a composition of {@link File#copy} (to the new\n * location) and {@link File#delete} (from the old location). While\n * unlikely, it is possible that an error returned to your callback could be\n * triggered from either one of these API calls failing, which could leave a\n * duplicate file lingering. The error message will indicate what operation\n * has failed.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation}\n *\n * @throws {Error} If the destination file is not provided.\n *\n * @param {string|Bucket|File} destination Destination file.\n * @param {MoveCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * //-\n * // You can pass in a variety of types for the destination.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // If you pass in a string for the destination, the file is moved to its\n * // current bucket, under the new name provided.\n * //-\n * file.move('my-image-new.png', function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * // but contains instead:\n * // - \"my-image-new.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a string starting with \"gs://\" for the destination, the\n * // file is copied to the other bucket and under the new name provided.\n * //-\n * const newLocation = 'gs://another-bucket/my-image-new.png';\n * file.move(newLocation, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image-new.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a Bucket object, the file will be moved to that bucket\n * // using the same name.\n * //-\n * const anotherBucket = gcs.bucket('another-bucket');\n *\n * file.move(anotherBucket, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-image.png\"\n *\n * // `destinationFile` is an instance of a File object that refers to your\n * // new file.\n * });\n *\n * //-\n * // If you pass in a File object, you have complete control over the new\n * // bucket and filename.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n *\n * file.move(anotherFile, function(err, destinationFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * //\n * // `another-bucket` now contains:\n * // - \"my-awesome-image.png\"\n *\n * // Note:\n * // The `destinationFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.move('my-image-new.png').then(function(data) {\n * const destinationFile = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/files.js\n * region_tag:storage_move_file\n * Another example:\n */\n move(destination, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || index_js_1.util.noop;\n this.copy(destination, options, (err, destinationFile, copyApiResponse) => {\n if (err) {\n err.message = 'file#copy failed with an error - ' + err.message;\n callback(err, null, copyApiResponse);\n return;\n }\n if (this.name !== destinationFile.name ||\n this.bucket.name !== destinationFile.bucket.name) {\n this.delete(options, (err, apiResponse) => {\n if (err) {\n err.message = 'file#delete failed with an error - ' + err.message;\n callback(err, destinationFile, apiResponse);\n return;\n }\n callback(null, destinationFile, copyApiResponse);\n });\n }\n else {\n callback(null, destinationFile, copyApiResponse);\n }\n });\n }\n /**\n * @typedef {array} RenameResponse\n * @property {File} 0 The destination File.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback RenameCallback\n * @param {?Error} err Request error, if any.\n * @param {?File} destinationFile The destination File.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {object} RenameOptions Configuration options for File#move(). See an\n * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.\n * @param {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * Rename this file.\n *\n * **Warning**:\n * There is currently no atomic `rename` method in the Cloud Storage API,\n * so this method is an alias of {@link File#move}, which in turn is a\n * composition of {@link File#copy} (to the new location) and\n * {@link File#delete} (from the old location). While\n * unlikely, it is possible that an error returned to your callback could be\n * triggered from either one of these API calls failing, which could leave a\n * duplicate file lingering. The error message will indicate what operation\n * has failed.\n *\n * @param {string|File} destinationFile Destination file.\n * @param {RenameCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // You can pass in a string or a File object.\n * //\n * // For all of the below examples, assume we are working with the following\n * // Bucket and File objects.\n * //-\n *\n * const bucket = storage.bucket('my-bucket');\n * const file = bucket.file('my-image.png');\n *\n * //-\n * // You can pass in a string for the destinationFile.\n * //-\n * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n * // but contains instead:\n * // - \"renamed-image.png\"\n *\n * // `renamedFile` is an instance of a File object that refers to your\n * // renamed file.\n * });\n *\n * //-\n * // You can pass in a File object.\n * //-\n * const anotherFile = anotherBucket.file('my-awesome-image.png');\n *\n * file.rename(anotherFile, function(err, renamedFile, apiResponse) {\n * // `my-bucket` no longer contains:\n * // - \"my-image.png\"\n *\n * // Note:\n * // The `renamedFile` parameter is equal to `anotherFile`.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.rename('my-renamed-image.png').then(function(data) {\n * const renamedFile = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n rename(destinationFile, optionsOrCallback, callback) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n callback = callback || index_js_1.util.noop;\n this.move(destinationFile, options, callback);\n }\n /**\n * Makes request and applies userProject query parameter if necessary.\n *\n * @private\n *\n * @param {object} reqOpts - The request options.\n * @param {function} callback - The callback function.\n */\n request(reqOpts, callback) {\n return this.parent.request.call(this, reqOpts, callback);\n }\n /**\n * @callback RotateEncryptionKeyCallback\n * @extends CopyCallback\n */\n /**\n * @typedef RotateEncryptionKeyResponse\n * @extends CopyResponse\n */\n /**\n * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options\n * for File#rotateEncryptionKey().\n * If a string or Buffer is provided, it is interpreted as an AES-256,\n * customer-supplied encryption key. If you'd like to use a Cloud KMS key\n * name, you must specify an options object with the property name:\n * `kmsKeyName`.\n * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key.\n * @param {string} [options.kmsKeyName] A Cloud KMS key name.\n */\n /**\n * This method allows you to update the encryption key associated with this\n * file.\n *\n * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}\n *\n * @param {RotateEncryptionKeyOptions} [options] - Configuration options.\n * @param {RotateEncryptionKeyCallback} [callback]\n * @returns {Promise}\n *\n * @example include:samples/encryption.js\n * region_tag:storage_rotate_encryption_key\n * Example of rotating the encryption key for this file:\n */\n rotateEncryptionKey(optionsOrCallback, callback) {\n var _a;\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n let options = {};\n if (typeof optionsOrCallback === 'string' ||\n optionsOrCallback instanceof Buffer) {\n options = {\n encryptionKey: optionsOrCallback,\n };\n }\n else if (typeof optionsOrCallback === 'object') {\n options = optionsOrCallback;\n }\n const newFile = this.bucket.file(this.id, options);\n const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined\n ? { preconditionOpts: options.preconditionOpts }\n : {};\n this.copy(newFile, copyOptions, callback);\n }\n /**\n * @typedef {object} SaveOptions\n * @extends CreateWriteStreamOptions\n */\n /**\n * @callback SaveCallback\n * @param {?Error} err Request error, if any.\n */\n /**\n * Write strings or buffers to a file.\n *\n * *This is a convenience method which wraps {@link File#createWriteStream}.*\n * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly.\n *\n * Resumable uploads are automatically enabled and must be shut off explicitly\n * by setting `options.resumable` to `false`.\n *\n * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff.\n *\n *

\n * There is some overhead when using a resumable upload that can cause\n * noticeable performance degradation while uploading a series of small\n * files. When uploading files less than 10MB, it is recommended that the\n * resumable feature is disabled.\n *

\n *\n * @param {SaveData} data The data to write to a file.\n * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options`\n * parameter.\n * @param {SaveCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const file = myBucket.file('my-file');\n * const contents = 'This is the contents of the file.';\n *\n * file.save(contents, function(err) {\n * if (!err) {\n * // File written successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.save(contents).then(function() {});\n * ```\n */\n save(data, optionsOrCallback, callback) {\n // tslint:enable:no-any\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n let maxRetries = this.storage.retryOptions.maxRetries;\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {\n maxRetries = 0;\n }\n const returnValue = (0, async_retry_1.default)(async (bail) => {\n return new Promise((resolve, reject) => {\n if (maxRetries === 0) {\n this.storage.retryOptions.autoRetry = false;\n }\n const writable = this.createWriteStream(options);\n if (options.onUploadProgress) {\n writable.on('progress', options.onUploadProgress);\n }\n const handleError = (err) => {\n if (this.storage.retryOptions.autoRetry &&\n this.storage.retryOptions.retryableErrorFn(err)) {\n return reject(err);\n }\n return bail(err);\n };\n if (typeof data === 'string' || Buffer.isBuffer(data)) {\n writable\n .on('error', handleError)\n .on('finish', () => resolve())\n .end(data);\n }\n else {\n (0, stream_1.pipeline)(data, writable, err => {\n if (err) {\n if (typeof data !== 'function') {\n // Only PipelineSourceFunction can be retried. Async-iterables\n // and Readable streams can only be consumed once.\n return bail(err);\n }\n handleError(err);\n }\n else {\n resolve();\n }\n });\n }\n });\n }, {\n retries: maxRetries,\n factor: this.storage.retryOptions.retryDelayMultiplier,\n maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds\n maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n if (!callback) {\n return returnValue;\n }\n else {\n return returnValue\n .then(() => {\n if (callback) {\n return callback();\n }\n })\n .catch(callback);\n }\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_js_1.AvailableServiceObjectMethods.setMetadata, options);\n if (metadata.retention !== undefined) {\n options.overrideUnlockedRetention = true;\n }\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n /**\n * @typedef {array} SetStorageClassResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass().\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @callback SetStorageClassCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Set the storage class for this file.\n *\n * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class}\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} storageClass The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`)\n * **Note:** The storage classes `multi_regional` and `regional`\n * are now legacy and will be deprecated in the future.\n * @param {SetStorageClassOptions} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {SetStorageClassCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * file.setStorageClass('nearline', function(err, apiResponse) {\n * if (err) {\n * // Error handling omitted.\n * }\n *\n * // The storage class was updated successfully.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * file.setStorageClass('nearline').then(function() {});\n * ```\n */\n setStorageClass(storageClass, optionsOrCallback, callback) {\n callback =\n typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n const req = {\n ...options,\n // In case we get input like `storageClass`, convert to `storage_class`.\n storageClass: storageClass\n .replace(/-/g, '_')\n .replace(/([a-z])([A-Z])/g, (_, low, up) => {\n return low + '_' + up;\n })\n .toUpperCase(),\n };\n this.copy(this, req, (err, file, apiResponse) => {\n if (err) {\n callback(err, apiResponse);\n return;\n }\n this.metadata = file.metadata;\n callback(null, apiResponse);\n });\n }\n /**\n * Set a user project to be billed for all requests made from this File\n * object.\n *\n * @param {string} userProject The user project.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('albums');\n * const file = bucket.file('my-file');\n *\n * file.setUserProject('grape-spaceship-123');\n * ```\n */\n setUserProject(userProject) {\n this.bucket.setUserProject.call(this, userProject);\n }\n /**\n * This creates a resumable-upload upload stream.\n *\n * @param {Duplexify} stream - Duplexify stream of data to pipe to the file.\n * @param {object=} options - Configuration object.\n *\n * @private\n */\n startResumableUpload_(dup, options = {}) {\n var _a;\n (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {});\n const retryOptions = this.storage.retryOptions;\n if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options.preconditionOpts)) {\n retryOptions.autoRetry = false;\n }\n const uploadStream = resumableUpload.upload({\n authClient: this.storage.authClient,\n apiEndpoint: this.storage.apiEndpoint,\n bucket: this.bucket.name,\n customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}),\n file: this.name,\n generation: this.generation,\n isPartialUpload: options.isPartialUpload,\n key: this.encryptionKey,\n kmsKeyName: this.kmsKeyName,\n metadata: options.metadata,\n offset: options.offset,\n predefinedAcl: options.predefinedAcl,\n private: options.private,\n public: options.public,\n uri: options.uri,\n userProject: options.userProject || this.userProject,\n retryOptions: { ...retryOptions },\n params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts,\n chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize,\n highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark,\n [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY],\n });\n uploadStream\n .on('response', resp => {\n dup.emit('response', resp);\n })\n .on('uri', uri => {\n dup.emit('uri', uri);\n })\n .on('metadata', metadata => {\n this.metadata = metadata;\n dup.emit('metadata');\n })\n .on('finish', () => {\n dup.emit('complete');\n })\n .on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(uploadStream);\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n }\n /**\n * Takes a readable stream and pipes it to a remote file. Unlike\n * `startResumableUpload_`, which uses the resumable upload technique, this\n * method uses a simple upload (all or nothing).\n *\n * @param {Duplexify} dup - Duplexify stream of data to pipe to the file.\n * @param {object=} options - Configuration object.\n *\n * @private\n */\n startSimpleUpload_(dup, options = {}) {\n var _a;\n (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {});\n const apiEndpoint = this.storage.apiEndpoint;\n const bucketName = this.bucket.name;\n const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`;\n const reqOpts = {\n qs: {\n name: this.name,\n },\n uri: uri,\n [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY],\n };\n if (this.generation !== undefined) {\n reqOpts.qs.ifGenerationMatch = this.generation;\n }\n if (this.kmsKeyName !== undefined) {\n reqOpts.qs.kmsKeyName = this.kmsKeyName;\n }\n if (typeof options.timeout === 'number') {\n reqOpts.timeout = options.timeout;\n }\n if (options.userProject || this.userProject) {\n reqOpts.qs.userProject = options.userProject || this.userProject;\n }\n if (options.predefinedAcl) {\n reqOpts.qs.predefinedAcl = options.predefinedAcl;\n }\n else if (options.private) {\n reqOpts.qs.predefinedAcl = 'private';\n }\n else if (options.public) {\n reqOpts.qs.predefinedAcl = 'publicRead';\n }\n Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts);\n index_js_1.util.makeWritableStream(dup, {\n makeAuthenticatedRequest: (reqOpts) => {\n this.request(reqOpts, (err, body, resp) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n this.metadata = body;\n dup.emit('metadata', body);\n dup.emit('response', resp);\n dup.emit('complete');\n });\n },\n metadata: options.metadata,\n request: reqOpts,\n });\n }\n disableAutoRetryConditionallyIdempotent_(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n coreOpts, methodType, localPreconditionOptions) {\n var _a, _b, _c, _d;\n if ((typeof coreOpts === 'object' &&\n ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined &&\n methodType === bucket_js_1.AvailableServiceObjectMethods.delete &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n if ((typeof coreOpts === 'object' &&\n ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined &&\n (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined &&\n methodType === bucket_js_1.AvailableServiceObjectMethods.setMetadata &&\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryConditional) ||\n this.storage.retryOptions.idempotencyStrategy ===\n storage_js_1.IdempotencyStrategy.RetryNever) {\n this.storage.retryOptions.autoRetry = false;\n }\n }\n async getBufferFromReadable(readable) {\n const buf = [];\n for await (const chunk of readable) {\n buf.push(chunk);\n }\n return Buffer.concat(buf);\n }\n}\nexports.File = File;\n_File_instances = new WeakSet(), _File_validateIntegrity = \n/**\n *\n * @param hashCalculatingStream\n * @param verify\n * @returns {boolean} Returns `true` if valid, throws with error otherwise\n */\nasync function _File_validateIntegrity(hashCalculatingStream, verify = {}) {\n const metadata = this.metadata;\n // If we're doing validation, assume the worst\n let dataMismatch = !!(verify.crc32c || verify.md5);\n if (verify.crc32c && metadata.crc32c) {\n dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c);\n }\n if (verify.md5 && metadata.md5Hash) {\n dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash);\n }\n if (dataMismatch) {\n const errors = [];\n let code = '';\n let message = '';\n try {\n await this.delete();\n if (verify.md5 && !metadata.md5Hash) {\n code = 'MD5_NOT_AVAILABLE';\n message = FileExceptionMessages.MD5_NOT_AVAILABLE;\n }\n else {\n code = 'FILE_NO_UPLOAD';\n message = FileExceptionMessages.UPLOAD_MISMATCH;\n }\n }\n catch (e) {\n const error = e;\n code = 'FILE_NO_UPLOAD_DELETE';\n message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`;\n errors.push(error);\n }\n const error = new RequestError(message);\n error.code = code;\n error.errors = errors;\n throw error;\n }\n return true;\n};\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(File, {\n exclude: [\n 'cloudStorageURI',\n 'publicUrl',\n 'request',\n 'save',\n 'setEncryptionKey',\n 'shouldRetryBasedOnPreconditionAndIdempotencyStrat',\n 'getBufferFromReadable',\n ],\n});\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HashStreamValidator = void 0;\nconst crypto_1 = require(\"crypto\");\nconst stream_1 = require(\"stream\");\nconst crc32c_js_1 = require(\"./crc32c.js\");\nconst file_js_1 = require(\"./file.js\");\nclass HashStreamValidator extends stream_1.Transform {\n constructor(options = {}) {\n super();\n this.updateHashesOnly = false;\n _HashStreamValidator_crc32cHash.set(this, undefined);\n _HashStreamValidator_md5Hash.set(this, undefined);\n _HashStreamValidator_md5Digest.set(this, '');\n this.crc32cEnabled = !!options.crc32c;\n this.md5Enabled = !!options.md5;\n this.updateHashesOnly = !!options.updateHashesOnly;\n this.crc32cExpected = options.crc32cExpected;\n this.md5Expected = options.md5Expected;\n if (this.crc32cEnabled) {\n if (options.crc32cInstance) {\n __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, \"f\");\n }\n else {\n const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR;\n __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), \"f\");\n }\n }\n if (this.md5Enabled) {\n __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), \"f\");\n }\n }\n /**\n * Return the current CRC32C value, if available.\n */\n get crc32c() {\n var _a;\n return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\")) === null || _a === void 0 ? void 0 : _a.toString();\n }\n _flush(callback) {\n if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\")) {\n __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\").digest('base64'), \"f\");\n }\n if (this.updateHashesOnly) {\n callback();\n return;\n }\n // If we're doing validation, assume the worst-- a data integrity\n // mismatch. If not, these tests won't be performed, and we can assume\n // the best.\n // We must check if the server decompressed the data on serve because hash\n // validation is not possible in this case.\n let failed = this.crc32cEnabled || this.md5Enabled;\n if (this.crc32cEnabled && this.crc32cExpected) {\n failed = !this.test('crc32c', this.crc32cExpected);\n }\n if (this.md5Enabled && this.md5Expected) {\n failed = !this.test('md5', this.md5Expected);\n }\n if (failed) {\n const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH);\n mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH';\n callback(mismatchError);\n }\n else {\n callback();\n }\n }\n _transform(chunk, encoding, callback) {\n this.push(chunk, encoding);\n try {\n if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\"))\n __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\").update(chunk);\n if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\"))\n __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\").update(chunk);\n callback();\n }\n catch (e) {\n callback(e);\n }\n }\n test(hash, sum) {\n const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum;\n if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\")) {\n return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, \"f\").validate(check);\n }\n if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, \"f\")) {\n return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, \"f\") === check;\n }\n return false;\n }\n}\nexports.HashStreamValidator = HashStreamValidator;\n_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap();\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HmacKey = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst storage_js_1 = require(\"./storage.js\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * The API-formatted resource description of the HMAC key.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name HmacKey#metadata\n * @type {object}\n */\n/**\n * An HmacKey object contains metadata of an HMAC key created from a\n * service account through the {@link Storage} client using\n * {@link Storage#createHmacKey}.\n *\n * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation}\n *\n * @class\n */\nclass HmacKey extends index_js_1.ServiceObject {\n /**\n * @typedef {object} HmacKeyOptions\n * @property {string} [projectId] The project ID of the project that owns\n * the service account of the requested HMAC key. If not provided,\n * the project ID used to instantiate the Storage client will be used.\n */\n /**\n * Constructs an HmacKey object.\n *\n * Note: this only create a local reference to an HMAC key, to create\n * an HMAC key, use {@link Storage#createHmacKey}.\n *\n * @param {Storage} storage The Storage instance this HMAC key is\n * attached to.\n * @param {string} accessId The unique accessId for this HMAC key.\n * @param {HmacKeyOptions} options Constructor configurations.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const hmacKey = storage.hmacKey('access-id');\n * ```\n */\n constructor(storage, accessId, options) {\n const methods = {\n /**\n * @typedef {object} DeleteHmacKeyOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @typedef {array} DeleteHmacKeyResponse\n * @property {object} 0 The full API response.\n */\n /**\n * @callback DeleteHmacKeyCallback\n * @param {?Error} err Request error, if any.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Deletes an HMAC key.\n * Key state must be set to `INACTIVE` prior to deletion.\n * Caution: HMAC keys cannot be recovered once you delete them.\n *\n * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists.\n *\n * @method HmacKey#delete\n * @param {DeleteHmacKeyOptions} [options] Configuration options.\n * @param {DeleteHmacKeyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Delete HMAC key after making the key inactive.\n * //-\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * hmacKey.delete((err) => {\n * if (err) {\n * console.error(err);\n * return;\n * }\n * // The HMAC key is deleted.\n * });\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * hmacKey\n * .setMetadata({state: 'INACTIVE'})\n * .then(() => {\n * return hmacKey.delete();\n * });\n * ```\n */\n delete: true,\n /**\n * @callback GetHmacKeyCallback\n * @param {?Error} err Request error, if any.\n * @param {HmacKey} hmacKey this {@link HmacKey} instance.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} GetHmacKeyResponse\n * @property {HmacKey} 0 This {@link HmacKey} instance.\n * @property {object} 1 The full API response.\n */\n /**\n * @typedef {object} GetHmacKeyOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * Retrieves and populate an HMAC key's metadata, and return\n * this {@link HmacKey} instance.\n *\n * HmacKey.get() does not give the HMAC key secret, as\n * it is only returned on creation.\n *\n * The authenticated user must have `storage.hmacKeys.get` permission\n * for the project in which the key exists.\n *\n * @method HmacKey#get\n * @param {GetHmacKeyOptions} [options] Configuration options.\n * @param {GetHmacKeyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Get the HmacKey's Metadata.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .get((err, hmacKey) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * // do something with the returned HmacKey object.\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .get()\n * .then((data) => {\n * const hmacKey = data[0];\n * });\n * ```\n */\n get: true,\n /**\n * @typedef {object} GetHmacKeyMetadataOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * Retrieves and populate an HMAC key's metadata, and return\n * the HMAC key's metadata as an object.\n *\n * HmacKey.getMetadata() does not give the HMAC key secret, as\n * it is only returned on creation.\n *\n * The authenticated user must have `storage.hmacKeys.get` permission\n * for the project in which the key exists.\n *\n * @method HmacKey#getMetadata\n * @param {GetHmacKeyMetadataOptions} [options] Configuration options.\n * @param {HmacKeyMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * //-\n * // Get the HmacKey's metadata and populate to the metadata property.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .getMetadata((err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * console.log(hmacKeyMetadata);\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .getMetadata()\n * .then((data) => {\n * const hmacKeyMetadata = data[0];\n * console.log(hmacKeyMetadata);\n * });\n * ```\n */\n getMetadata: true,\n /**\n * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update.\n * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'.\n * @property {string} [etag] Include an etag from a previous get HMAC key request\n * to perform safe read-modify-write.\n */\n /**\n * @typedef {object} SetHmacKeyMetadataOptions\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @callback HmacKeyMetadataCallback\n * @param {?Error} err Request error, if any.\n * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object.\n * @param {object} apiResponse The full API response.\n */\n /**\n * @typedef {array} HmacKeyMetadataResponse\n * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object.\n * @property {object} 1 The full API response.\n */\n /**\n * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for\n * valid states.\n *\n * @method HmacKey#setMetadata\n * @param {SetHmacKeyMetadata} metadata The new metadata.\n * @param {SetHmacKeyMetadataOptions} [options] Configuration options.\n * @param {HmacKeyMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * const metadata = {\n * state: 'INACTIVE',\n * };\n *\n * storage.hmacKey('ACCESS_ID')\n * .setMetadata(metadata, (err, hmacKeyMetadata) => {\n * if (err) {\n * // The request was an error.\n * console.error(err);\n * return;\n * }\n * console.log(hmacKeyMetadata);\n * });\n *\n * //-\n * // If the callback is omitted, a promise is returned.\n * //-\n * storage.hmacKey('ACCESS_ID')\n * .setMetadata(metadata)\n * .then((data) => {\n * const hmacKeyMetadata = data[0];\n * console.log(hmacKeyMetadata);\n * });\n * ```\n */\n setMetadata: {\n reqOpts: {\n method: 'PUT',\n },\n },\n };\n const projectId = (options && options.projectId) || storage.projectId;\n super({\n parent: storage,\n id: accessId,\n baseUrl: `/projects/${projectId}/hmacKeys`,\n methods,\n });\n this.storage = storage;\n this.instanceRetryValue = storage.retryOptions.autoRetry;\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways\n if (this.storage.retryOptions.idempotencyStrategy !==\n storage_js_1.IdempotencyStrategy.RetryAlways) {\n this.storage.retryOptions.autoRetry = false;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n cb =\n typeof optionsOrCallback === 'function'\n ? optionsOrCallback\n : cb;\n super\n .setMetadata(metadata, options)\n .then(resp => cb(null, ...resp))\n .catch(cb)\n .finally(() => {\n this.storage.retryOptions.autoRetry = this.instanceRetryValue;\n });\n }\n}\nexports.HmacKey = HmacKey;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(HmacKey);\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Iam = exports.IAMExceptionMessages = void 0;\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst util_js_1 = require(\"./util.js\");\nvar IAMExceptionMessages;\n(function (IAMExceptionMessages) {\n IAMExceptionMessages[\"POLICY_OBJECT_REQUIRED\"] = \"A policy object is required.\";\n IAMExceptionMessages[\"PERMISSIONS_REQUIRED\"] = \"Permissions are required.\";\n})(IAMExceptionMessages || (exports.IAMExceptionMessages = IAMExceptionMessages = {}));\n/**\n * Get and set IAM policies for your Cloud Storage bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management}\n * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @constructor Iam\n *\n * @param {Bucket} bucket The parent instance.\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * // bucket.iam\n * ```\n */\nclass Iam {\n constructor(bucket) {\n this.request_ = bucket.request.bind(bucket);\n this.resourceId_ = 'buckets/' + bucket.getId();\n }\n /**\n * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy().\n * @property {number} [requestedPolicyVersion] The version of IAM policies to\n * request. If a policy with a condition is requested without setting\n * this, the server will return an error. This must be set to a value\n * of 3 to retrieve IAM policies containing conditions. This is to\n * prevent client code that isn't aware of IAM conditions from\n * interpreting and modifying policies incorrectly. The service might\n * return a policy with version lower than the one that was requested,\n * based on the feature syntax in the policy fetched.\n * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions}\n * @property {string} [userProject] The ID of the project which will be\n * billed for the request.\n */\n /**\n * @typedef {array} GetPolicyResponse\n * @property {Policy} 0 The policy.\n * @property {object} 1 The full API response.\n */\n /**\n * @typedef {object} Policy\n * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles.\n * @property {string} [policy.etag] Etags are used to perform a read-modify-write.\n * @property {number} [policy.version] The syntax schema version of the Policy.\n * To set an IAM policy with conditional binding, this field must be set to\n * 3 or greater.\n * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions}\n */\n /**\n * @typedef {object} PolicyBinding\n * @property {string} role Role that is assigned to members.\n * @property {string[]} members Specifies the identities requesting access for the bucket.\n * @property {Expr} [condition] The condition that is associated with this binding.\n */\n /**\n * @typedef {object} Expr\n * @property {string} [title] An optional title for the expression, i.e. a\n * short string describing its purpose. This can be used e.g. in UIs\n * which allow to enter the expression.\n * @property {string} [description] An optional description of the\n * expression. This is a longer text which describes the expression,\n * e.g. when hovered over it in a UI.\n * @property {string} expression Textual representation of an expression in\n * Common Expression Language syntax. The application context of the\n * containing message determines which well-known feature set of CEL\n * is supported.The condition that is associated with this binding.\n *\n * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions\n */\n /**\n * Get the IAM policy.\n *\n * @param {GetPolicyOptions} [options] Request options.\n * @param {GetPolicyCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * bucket.iam.getPolicy(\n * {requestedPolicyVersion: 3},\n * function(err, policy, apiResponse) {\n *\n * },\n * );\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.getPolicy({requestedPolicyVersion: 3})\n * .then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_view_bucket_iam_members\n * Example of retrieving a bucket's IAM policy:\n */\n getPolicy(optionsOrCallback, callback) {\n const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback);\n const qs = {};\n if (options.userProject) {\n qs.userProject = options.userProject;\n }\n if (options.requestedPolicyVersion !== null &&\n options.requestedPolicyVersion !== undefined) {\n qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion;\n }\n this.request_({\n uri: '/iam',\n qs,\n }, cb);\n }\n /**\n * Set the IAM policy.\n *\n * @throws {Error} If no policy is provided.\n *\n * @param {Policy} policy The policy.\n * @param {SetPolicyOptions} [options] Configuration options.\n * @param {SetPolicyCallback} callback Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation}\n * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * const myPolicy = {\n * bindings: [\n * {\n * role: 'roles/storage.admin',\n * members:\n * ['serviceAccount:myotherproject@appspot.gserviceaccount.com']\n * }\n * ]\n * };\n *\n * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.setPolicy(myPolicy).then(function(data) {\n * const policy = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/iam.js\n * region_tag:storage_add_bucket_iam_member\n * Example of adding to a bucket's IAM policy:\n *\n * @example include:samples/iam.js\n * region_tag:storage_remove_bucket_iam_member\n * Example of removing from a bucket's IAM policy:\n */\n setPolicy(policy, optionsOrCallback, callback) {\n if (policy === null || typeof policy !== 'object') {\n throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED);\n }\n const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback);\n let maxRetries;\n if (policy.etag === undefined) {\n maxRetries = 0;\n }\n this.request_({\n method: 'PUT',\n uri: '/iam',\n maxRetries,\n json: Object.assign({\n resourceId: this.resourceId_,\n }, policy),\n qs: options,\n }, cb);\n }\n /**\n * Test a set of permissions for a resource.\n *\n * @throws {Error} If permissions are not provided.\n *\n * @param {string|string[]} permissions The permission(s) to test for.\n * @param {TestIamPermissionsOptions} [options] Configuration object.\n * @param {TestIamPermissionsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n *\n * //-\n * // Test a single permission.\n * //-\n * const test = 'storage.buckets.delete';\n *\n * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) {\n * console.log(permissions);\n * // {\n * // \"storage.buckets.delete\": true\n * // }\n * });\n *\n * //-\n * // Test several permissions at once.\n * //-\n * const tests = [\n * 'storage.buckets.delete',\n * 'storage.buckets.get'\n * ];\n *\n * bucket.iam.testPermissions(tests, function(err, permissions) {\n * console.log(permissions);\n * // {\n * // \"storage.buckets.delete\": false,\n * // \"storage.buckets.get\": true\n * // }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * bucket.iam.testPermissions(test).then(function(data) {\n * const permissions = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n testPermissions(permissions, optionsOrCallback, callback) {\n if (!Array.isArray(permissions) && typeof permissions !== 'string') {\n throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED);\n }\n const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback);\n const permissionsArray = Array.isArray(permissions)\n ? permissions\n : [permissions];\n const req = Object.assign({\n permissions: permissionsArray,\n }, options);\n this.request_({\n uri: '/iam/testPermissions',\n qs: req,\n useQuerystring: true,\n }, (err, resp) => {\n if (err) {\n cb(err, null, resp);\n return;\n }\n const availablePermissions = Array.isArray(resp.permissions)\n ? resp.permissions\n : [];\n const permissionsHash = permissionsArray.reduce((acc, permission) => {\n acc[permission] = availablePermissions.indexOf(permission) > -1;\n return acc;\n }, {});\n cb(null, permissionsHash, resp);\n });\n }\n}\nexports.Iam = Iam;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Iam);\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = exports.Storage = exports.IdempotencyStrategy = exports.ApiError = void 0;\n/**\n * The `@google-cloud/storage` package has a single named export which is the\n * {@link Storage} (ES6) class, which should be instantiated with `new`.\n *\n * See {@link Storage} and {@link ClientConfig} for client methods and\n * configuration options.\n *\n * @module {Storage} @google-cloud/storage\n * @alias nodejs-storage\n *\n * @example\n * Install the client library with npm:\n * ```\n * npm install --save @google-cloud/storage\n * ```\n *\n * @example\n * Import the client library\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * ```\n *\n * @example\n * Create a client that uses Application\n * Default Credentials (ADC):\n * ```\n * const storage = new Storage();\n * ```\n *\n * @example\n * Create a client with explicit\n * credentials:\n * ```\n * const storage = new Storage({ projectId:\n * 'your-project-id', keyFilename: '/path/to/keyfile.json'\n * });\n * ```\n *\n * @example include:samples/quickstart.js\n * region_tag:storage_quickstart\n * Full quickstart example:\n */\nvar index_js_1 = require(\"./nodejs-common/index.js\");\nObject.defineProperty(exports, \"ApiError\", { enumerable: true, get: function () { return index_js_1.ApiError; } });\nvar storage_js_1 = require(\"./storage.js\");\nObject.defineProperty(exports, \"IdempotencyStrategy\", { enumerable: true, get: function () { return storage_js_1.IdempotencyStrategy; } });\nObject.defineProperty(exports, \"Storage\", { enumerable: true, get: function () { return storage_js_1.Storage; } });\nvar bucket_js_1 = require(\"./bucket.js\");\nObject.defineProperty(exports, \"Bucket\", { enumerable: true, get: function () { return bucket_js_1.Bucket; } });\n__exportStar(require(\"./crc32c.js\"), exports);\nvar channel_js_1 = require(\"./channel.js\");\nObject.defineProperty(exports, \"Channel\", { enumerable: true, get: function () { return channel_js_1.Channel; } });\nvar file_js_1 = require(\"./file.js\");\nObject.defineProperty(exports, \"File\", { enumerable: true, get: function () { return file_js_1.File; } });\n__exportStar(require(\"./hash-stream-validator.js\"), exports);\nvar hmacKey_js_1 = require(\"./hmacKey.js\");\nObject.defineProperty(exports, \"HmacKey\", { enumerable: true, get: function () { return hmacKey_js_1.HmacKey; } });\nvar iam_js_1 = require(\"./iam.js\");\nObject.defineProperty(exports, \"Iam\", { enumerable: true, get: function () { return iam_js_1.Iam; } });\nvar notification_js_1 = require(\"./notification.js\");\nObject.defineProperty(exports, \"Notification\", { enumerable: true, get: function () { return notification_js_1.Notification; } });\n__exportStar(require(\"./transfer-manager.js\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0;\nvar service_js_1 = require(\"./service.js\");\nObject.defineProperty(exports, \"Service\", { enumerable: true, get: function () { return service_js_1.Service; } });\nvar service_object_js_1 = require(\"./service-object.js\");\nObject.defineProperty(exports, \"ServiceObject\", { enumerable: true, get: function () { return service_object_js_1.ServiceObject; } });\nvar util_js_1 = require(\"./util.js\");\nObject.defineProperty(exports, \"ApiError\", { enumerable: true, get: function () { return util_js_1.ApiError; } });\nObject.defineProperty(exports, \"util\", { enumerable: true, get: function () { return util_js_1.util; } });\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ServiceObject = void 0;\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst events_1 = require(\"events\");\nconst util_js_1 = require(\"./util.js\");\n/**\n * ServiceObject is a base class, meant to be inherited from by a \"service\n * object,\" like a BigQuery dataset or Storage bucket.\n *\n * Most of the time, these objects share common functionality; they can be\n * created or deleted, and you can get or set their metadata.\n *\n * By inheriting from this class, a service object will be extended with these\n * shared behaviors. Note that any method can be overridden when the service\n * object requires specific behavior.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nclass ServiceObject extends events_1.EventEmitter {\n /*\n * @constructor\n * @alias module:common/service-object\n *\n * @private\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string} config.createMethod - The method which creates this object.\n * @param {string=} config.id - The identifier of the object. For example, the\n * name of a Storage bucket or Pub/Sub topic.\n * @param {object=} config.methods - A map of each method name that should be inherited.\n * @param {object} config.methods[].reqOpts - Default request options for this\n * particular method. A common use case is when `setMetadata` requires a\n * `PUT` method to override the default `PATCH`.\n * @param {object} config.parent - The parent service instance. For example, an\n * instance of Storage if the object is Bucket.\n */\n constructor(config) {\n super();\n this.metadata = {};\n this.baseUrl = config.baseUrl;\n this.parent = config.parent; // Parent class.\n this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc).\n this.createMethod = config.createMethod;\n this.methods = config.methods || {};\n this.interceptors = [];\n this.projectId = config.projectId;\n if (config.methods) {\n // This filters the ServiceObject instance (e.g. a \"File\") to only have\n // the configured methods. We make a couple of exceptions for core-\n // functionality (\"request()\" and \"getRequestInterceptors()\")\n Object.getOwnPropertyNames(ServiceObject.prototype)\n .filter(methodName => {\n return (\n // All ServiceObjects need `request` and `getRequestInterceptors`.\n // clang-format off\n !/^request/.test(methodName) &&\n !/^getRequestInterceptors/.test(methodName) &&\n // clang-format on\n // The ServiceObject didn't redefine the method.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] ===\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ServiceObject.prototype[methodName] &&\n // This method isn't wanted.\n !config.methods[methodName]);\n })\n .forEach(methodName => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this[methodName] = undefined;\n });\n }\n }\n create(optionsOrCallback, callback) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const args = [this.id];\n if (typeof optionsOrCallback === 'function') {\n callback = optionsOrCallback;\n }\n if (typeof optionsOrCallback === 'object') {\n args.push(optionsOrCallback);\n }\n // Wrap the callback to return *this* instance of the object, not the\n // newly-created one.\n // tslint: disable-next-line no-any\n function onCreate(...args) {\n const [err, instance] = args;\n if (!err) {\n self.metadata = instance.metadata;\n if (self.id && instance.metadata) {\n self.id = instance.metadata.id;\n }\n args[1] = self; // replace the created `instance` with this one.\n }\n callback(...args);\n }\n args.push(onCreate);\n // eslint-disable-next-line prefer-spread\n this.createMethod.apply(null, args);\n }\n delete(optionsOrCallback, cb) {\n var _a;\n const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const ignoreNotFound = options.ignoreNotFound;\n delete options.ignoreNotFound;\n const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {};\n const reqOpts = {\n method: 'DELETE',\n uri: '',\n ...methodConfig.reqOpts,\n qs: {\n ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs,\n ...options,\n },\n };\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n if (err) {\n if (err.code === 404 && ignoreNotFound) {\n err = null;\n }\n }\n callback(err, res);\n });\n }\n exists(optionsOrCallback, cb) {\n const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n this.get(options, err => {\n if (err) {\n if (err.code === 404) {\n callback(null, false);\n }\n else {\n callback(err);\n }\n return;\n }\n callback(null, true);\n });\n }\n get(optionsOrCallback, cb) {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const [opts, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const options = Object.assign({}, opts);\n const autoCreate = options.autoCreate && typeof this.create === 'function';\n delete options.autoCreate;\n function onCreate(err, instance, apiResponse) {\n if (err) {\n if (err.code === 409) {\n self.get(options, callback);\n return;\n }\n callback(err, null, apiResponse);\n return;\n }\n callback(null, instance, apiResponse);\n }\n this.getMetadata(options, (err, metadata) => {\n if (err) {\n if (err.code === 404 && autoCreate) {\n const args = [];\n if (Object.keys(options).length > 0) {\n args.push(options);\n }\n args.push(onCreate);\n self.create(...args);\n return;\n }\n callback(err, null, metadata);\n return;\n }\n callback(null, self, metadata);\n });\n }\n getMetadata(optionsOrCallback, cb) {\n var _a;\n const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.getMetadata === 'object' &&\n this.methods.getMetadata) ||\n {};\n const reqOpts = {\n uri: '',\n ...methodConfig.reqOpts,\n qs: {\n ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs,\n ...options,\n },\n };\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n const localInterceptors = this.interceptors\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n return this.parent.getRequestInterceptors().concat(localInterceptors);\n }\n setMetadata(metadata, optionsOrCallback, cb) {\n var _a, _b;\n const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb);\n const methodConfig = (typeof this.methods.setMetadata === 'object' &&\n this.methods.setMetadata) ||\n {};\n const reqOpts = {\n method: 'PATCH',\n uri: '',\n ...methodConfig.reqOpts,\n json: {\n ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.json,\n ...metadata,\n },\n qs: {\n ...(_b = methodConfig.reqOpts) === null || _b === void 0 ? void 0 : _b.qs,\n ...options,\n },\n };\n // The `request` method may have been overridden to hold any special\n // behavior. Ensure we call the original `request` method.\n ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => {\n this.metadata = body;\n callback(err, this.metadata, res);\n });\n }\n request_(reqOpts, callback) {\n reqOpts = { ...reqOpts };\n if (this.projectId) {\n reqOpts.projectId = this.projectId;\n }\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri];\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .filter(x => x.trim()) // Limit to non-empty strings.\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/');\n const childInterceptors = Array.isArray(reqOpts.interceptors_)\n ? reqOpts.interceptors_\n : [];\n const localInterceptors = [].slice.call(this.interceptors);\n reqOpts.interceptors_ = childInterceptors.concat(localInterceptors);\n if (reqOpts.shouldReturnStream) {\n return this.parent.requestStream(reqOpts);\n }\n this.parent.request(reqOpts, callback);\n }\n request(reqOpts, callback) {\n this.request_(reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = { ...reqOpts, shouldReturnStream: true };\n return this.request_(opts);\n }\n}\nexports.ServiceObject = ServiceObject;\n(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] });\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0;\nconst uuid = __importStar(require(\"uuid\"));\nconst util_js_1 = require(\"./util.js\");\nconst util_js_2 = require(\"../util.js\");\nexports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}';\nclass Service {\n /**\n * Service is a base class, meant to be inherited from by a \"service,\" like\n * BigQuery or Storage.\n *\n * This handles making authenticated requests by exposing a `makeReq_`\n * function.\n *\n * @constructor\n * @alias module:common/service\n *\n * @param {object} config - Configuration object.\n * @param {string} config.baseUrl - The base URL to make API requests to.\n * @param {string[]} config.scopes - The scopes required for the request.\n * @param {object=} options - [Configuration object](#/docs).\n */\n constructor(config, options = {}) {\n this.baseUrl = config.baseUrl;\n this.apiEndpoint = config.apiEndpoint;\n this.timeout = options.timeout;\n this.globalInterceptors = Array.isArray(options.interceptors_)\n ? options.interceptors_\n : [];\n this.interceptors = [];\n this.packageJson = config.packageJson;\n this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN;\n this.projectIdRequired = config.projectIdRequired !== false;\n this.providedUserAgent = options.userAgent;\n const reqCfg = {\n ...config,\n projectIdRequired: this.projectIdRequired,\n projectId: this.projectId,\n authClient: options.authClient || config.authClient,\n credentials: options.credentials,\n keyFile: options.keyFilename,\n email: options.email,\n token: options.token,\n };\n this.makeAuthenticatedRequest =\n util_js_1.util.makeAuthenticatedRequestFactory(reqCfg);\n this.authClient = this.makeAuthenticatedRequest.authClient;\n this.getCredentials = this.makeAuthenticatedRequest.getCredentials;\n const isCloudFunctionEnv = !!process.env.FUNCTION_NAME;\n if (isCloudFunctionEnv) {\n this.interceptors.push({\n request(reqOpts) {\n reqOpts.forever = false;\n return reqOpts;\n },\n });\n }\n }\n /**\n * Return the user's custom request interceptors.\n */\n getRequestInterceptors() {\n // Interceptors should be returned in the order they were assigned.\n return [].slice\n .call(this.globalInterceptors)\n .concat(this.interceptors)\n .filter(interceptor => typeof interceptor.request === 'function')\n .map(interceptor => interceptor.request);\n }\n getProjectId(callback) {\n if (!callback) {\n return this.getProjectIdAsync();\n }\n this.getProjectIdAsync().then(p => callback(null, p), callback);\n }\n async getProjectIdAsync() {\n const projectId = await this.authClient.getProjectId();\n if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) {\n this.projectId = projectId;\n }\n return this.projectId;\n }\n request_(reqOpts, callback) {\n reqOpts = { ...reqOpts, timeout: this.timeout };\n const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0;\n const uriComponents = [this.baseUrl];\n if (this.projectIdRequired) {\n if (reqOpts.projectId) {\n uriComponents.push('projects');\n uriComponents.push(reqOpts.projectId);\n }\n else {\n uriComponents.push('projects');\n uriComponents.push(this.projectId);\n }\n }\n uriComponents.push(reqOpts.uri);\n if (isAbsoluteUrl) {\n uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri));\n }\n reqOpts.uri = uriComponents\n .map(uriComponent => {\n const trimSlashesRegex = /^\\/*|\\/*$/g;\n return uriComponent.replace(trimSlashesRegex, '');\n })\n .join('/')\n // Some URIs have colon separators.\n // Bad: https://.../projects/:list\n // Good: https://.../projects:list\n .replace(/\\/:/g, ':');\n const requestInterceptors = this.getRequestInterceptors();\n const interceptorArray = Array.isArray(reqOpts.interceptors_)\n ? reqOpts.interceptors_\n : [];\n interceptorArray.forEach(interceptor => {\n if (typeof interceptor.request === 'function') {\n requestInterceptors.push(interceptor.request);\n }\n });\n requestInterceptors.forEach(requestInterceptor => {\n reqOpts = requestInterceptor(reqOpts);\n });\n delete reqOpts.interceptors_;\n const pkg = this.packageJson;\n let userAgent = (0, util_js_2.getUserAgentString)();\n if (this.providedUserAgent) {\n userAgent = `${this.providedUserAgent} ${userAgent}`;\n }\n reqOpts.headers = {\n ...reqOpts.headers,\n 'User-Agent': userAgent,\n 'x-goog-api-client': `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${pkg.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`,\n };\n if (reqOpts[util_js_1.GCCL_GCS_CMD_KEY]) {\n reqOpts.headers['x-goog-api-client'] += ` gccl-gcs-cmd/${reqOpts[util_js_1.GCCL_GCS_CMD_KEY]}`;\n }\n if (reqOpts.shouldReturnStream) {\n return this.makeAuthenticatedRequest(reqOpts);\n }\n else {\n this.makeAuthenticatedRequest(reqOpts, callback);\n }\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n * @param {function} callback - The callback function passed to `request`.\n */\n request(reqOpts, callback) {\n Service.prototype.request_.call(this, reqOpts, callback);\n }\n /**\n * Make an authenticated API request.\n *\n * @param {object} reqOpts - Request options that are passed to `request`.\n * @param {string} reqOpts.uri - A URI relative to the baseUrl.\n */\n requestStream(reqOpts) {\n const opts = { ...reqOpts, shouldReturnStream: true };\n return Service.prototype.request_.call(this, opts);\n }\n}\nexports.Service = Service;\n","\"use strict\";\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.util = exports.Util = exports.PartialFailureError = exports.ApiError = exports.GCCL_GCS_CMD_KEY = void 0;\n/*!\n * @module common/util\n */\nconst projectify_1 = require(\"@google-cloud/projectify\");\nconst ent = __importStar(require(\"ent\"));\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst retry_request_1 = __importDefault(require(\"retry-request\"));\nconst stream_1 = require(\"stream\");\nconst teeny_request_1 = require(\"teeny-request\");\nconst uuid = __importStar(require(\"uuid\"));\nconst service_js_1 = require(\"./service.js\");\nconst util_js_1 = require(\"../util.js\");\nconst duplexify_1 = __importDefault(require(\"duplexify\"));\n// eslint-disable-next-line @typescript-eslint/ban-ts-comment\n// @ts-ignore\nconst package_json_helper_cjs_1 = require(\"../package-json-helper.cjs\");\nconst packageJson = (0, package_json_helper_cjs_1.getPackageJSON)();\n/**\n * A unique symbol for providing a `gccl-gcs-cmd` value\n * for the `X-Goog-API-Client` header.\n *\n * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V`\n **/\nexports.GCCL_GCS_CMD_KEY = Symbol.for('GCCL_GCS_CMD');\nconst requestDefaults = {\n timeout: 60000,\n gzip: true,\n forever: true,\n pool: {\n maxSockets: Infinity,\n },\n};\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n * @private\n */\nconst AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n * @private\n */\nconst MAX_RETRY_DEFAULT = 3;\n/**\n * Custom error type for API errors.\n *\n * @param {object} errorBody - Error object.\n */\nclass ApiError extends Error {\n constructor(errorBodyOrMessage) {\n super();\n if (typeof errorBodyOrMessage !== 'object') {\n this.message = errorBodyOrMessage || '';\n return;\n }\n const errorBody = errorBodyOrMessage;\n this.code = errorBody.code;\n this.errors = errorBody.errors;\n this.response = errorBody.response;\n try {\n this.errors = JSON.parse(this.response.body).error.errors;\n }\n catch (e) {\n this.errors = errorBody.errors;\n }\n this.message = ApiError.createMultiErrorMessage(errorBody, this.errors);\n Error.captureStackTrace(this);\n }\n /**\n * Pieces together an error message by combining all unique error messages\n * returned from a single GoogleError\n *\n * @private\n *\n * @param {GoogleErrorBody} err The original error.\n * @param {GoogleInnerError[]} [errors] Inner errors, if any.\n * @returns {string}\n */\n static createMultiErrorMessage(err, errors) {\n const messages = new Set();\n if (err.message) {\n messages.add(err.message);\n }\n if (errors && errors.length) {\n errors.forEach(({ message }) => messages.add(message));\n }\n else if (err.response && err.response.body) {\n messages.add(ent.decode(err.response.body.toString()));\n }\n else if (!err.message) {\n messages.add('A failure occurred during this request.');\n }\n let messageArr = Array.from(messages);\n if (messageArr.length > 1) {\n messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`);\n messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\\n');\n messageArr.push('\\n');\n }\n return messageArr.join('\\n');\n }\n}\nexports.ApiError = ApiError;\n/**\n * Custom error type for partial errors returned from the API.\n *\n * @param {object} b - Error object.\n */\nclass PartialFailureError extends Error {\n constructor(b) {\n super();\n const errorObject = b;\n this.errors = errorObject.errors;\n this.name = 'PartialFailureError';\n this.response = errorObject.response;\n this.message = ApiError.createMultiErrorMessage(errorObject, this.errors);\n }\n}\nexports.PartialFailureError = PartialFailureError;\nclass Util {\n constructor() {\n this.ApiError = ApiError;\n this.PartialFailureError = PartialFailureError;\n }\n /**\n * No op.\n *\n * @example\n * function doSomething(callback) {\n * callback = callback || noop;\n * }\n */\n noop() { }\n /**\n * Uniformly process an API response.\n *\n * @param {*} err - Error value.\n * @param {*} resp - Response value.\n * @param {*} body - Body value.\n * @param {function} callback - The callback function.\n */\n handleResp(err, resp, body, callback) {\n callback = callback || util.noop;\n const parsedResp = {\n err: err || null,\n ...(resp && util.parseHttpRespMessage(resp)),\n ...(body && util.parseHttpRespBody(body)),\n };\n // Assign the parsed body to resp.body, even if { json: false } was passed\n // as a request option.\n // We assume that nobody uses the previously unparsed value of resp.body.\n if (!parsedResp.err && resp && typeof parsedResp.body === 'object') {\n parsedResp.resp.body = parsedResp.body;\n }\n if (parsedResp.err && resp) {\n parsedResp.err.response = resp;\n }\n callback(parsedResp.err, parsedResp.body, parsedResp.resp);\n }\n /**\n * Sniff an incoming HTTP response message for errors.\n *\n * @param {object} httpRespMessage - An incoming HTTP response message from `request`.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.resp - The original response object.\n */\n parseHttpRespMessage(httpRespMessage) {\n const parsedHttpRespMessage = {\n resp: httpRespMessage,\n };\n if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) {\n // Unknown error. Format according to ApiError standard.\n parsedHttpRespMessage.err = new ApiError({\n errors: new Array(),\n code: httpRespMessage.statusCode,\n message: httpRespMessage.statusMessage,\n response: httpRespMessage,\n });\n }\n return parsedHttpRespMessage;\n }\n /**\n * Parse the response body from an HTTP request.\n *\n * @param {object} body - The response body.\n * @return {object} parsedHttpRespMessage - The parsed response.\n * @param {?error} parsedHttpRespMessage.err - An error detected.\n * @param {object} parsedHttpRespMessage.body - The original body value provided\n * will try to be JSON.parse'd. If it's successful, the parsed value will\n * be returned here, otherwise the original value and an error will be returned.\n */\n parseHttpRespBody(body) {\n const parsedHttpRespBody = {\n body,\n };\n if (typeof body === 'string') {\n try {\n parsedHttpRespBody.body = JSON.parse(body);\n }\n catch (err) {\n parsedHttpRespBody.body = body;\n }\n }\n if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) {\n // Error from JSON API.\n parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error);\n }\n return parsedHttpRespBody;\n }\n /**\n * Take a Duplexify stream, fetch an authenticated connection header, and\n * create an outgoing writable stream.\n *\n * @param {Duplexify} dup - Duplexify stream.\n * @param {object} options - Configuration object.\n * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through.\n * @param {object} options.metadata - Metadata to send at the head of the request.\n * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object.\n * @param {string=} options.request.method - Default: \"POST\".\n * @param {string=} options.request.qs.uploadType - Default: \"multipart\".\n * @param {string=} options.streamContentType - Default: \"application/octet-stream\".\n * @param {function} onComplete - Callback, executed after the writable Request stream has completed.\n */\n makeWritableStream(dup, options, onComplete) {\n var _a;\n onComplete = onComplete || util.noop;\n const writeStream = new ProgressStream();\n writeStream.on('progress', evt => dup.emit('progress', evt));\n dup.setWritable(writeStream);\n const defaultReqOpts = {\n method: 'POST',\n qs: {\n uploadType: 'multipart',\n },\n timeout: 0,\n maxRetries: 0,\n };\n const metadata = options.metadata || {};\n const reqOpts = {\n ...defaultReqOpts,\n ...options.request,\n qs: {\n ...defaultReqOpts.qs,\n ...(_a = options.request) === null || _a === void 0 ? void 0 : _a.qs,\n },\n multipart: [\n {\n 'Content-Type': 'application/json',\n body: JSON.stringify(metadata),\n },\n {\n 'Content-Type': metadata.contentType || 'application/octet-stream',\n body: writeStream,\n },\n ],\n };\n options.makeAuthenticatedRequest(reqOpts, {\n onAuthenticated(err, authenticatedReqOpts) {\n if (err) {\n dup.destroy(err);\n return;\n }\n requestDefaults.headers = util._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]);\n const request = teeny_request_1.teenyRequest.defaults(requestDefaults);\n request(authenticatedReqOpts, (err, resp, body) => {\n util.handleResp(err, resp, body, (err, data) => {\n if (err) {\n dup.destroy(err);\n return;\n }\n dup.emit('response', resp);\n onComplete(data);\n });\n });\n },\n });\n }\n /**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted. This is used for rate limit\n * related errors as well as intermittent server errors.\n *\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\n shouldRetryRequest(err) {\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = e.reason;\n if (reason === 'rateLimitExceeded') {\n return true;\n }\n if (reason === 'userRateLimitExceeded') {\n return true;\n }\n if (reason && reason.includes('EAI_AGAIN')) {\n return true;\n }\n }\n }\n }\n return false;\n }\n /**\n * Get a function for making authenticated requests.\n *\n * @param {object} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {object=} config.credentials - Credentials object.\n * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false.\n * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false.\n * @param {string=} config.email - Account email address, required for PEM/P12 usage.\n * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3)\n * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile.\n * @param {array} config.scopes - Array of scopes required for the API.\n */\n makeAuthenticatedRequestFactory(config) {\n const googleAutoAuthConfig = { ...config };\n if (googleAutoAuthConfig.projectId === service_js_1.DEFAULT_PROJECT_ID_TOKEN) {\n delete googleAutoAuthConfig.projectId;\n }\n let authClient;\n if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) {\n // Use an existing `GoogleAuth`\n authClient = googleAutoAuthConfig.authClient;\n }\n else {\n // Pass an `AuthClient` to `GoogleAuth`, if available\n const config = {\n ...googleAutoAuthConfig,\n authClient: googleAutoAuthConfig.authClient,\n };\n authClient = new google_auth_library_1.GoogleAuth(config);\n }\n function makeAuthenticatedRequest(reqOpts, optionsOrCallback) {\n let stream;\n let projectId;\n const reqConfig = { ...config };\n let activeRequest_;\n if (!optionsOrCallback) {\n stream = (0, duplexify_1.default)();\n reqConfig.stream = stream;\n }\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined;\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined;\n async function setProjectId() {\n projectId = await authClient.getProjectId();\n }\n const onAuthenticated = async (err, authenticatedReqOpts) => {\n const authLibraryError = err;\n const autoAuthFailed = err &&\n typeof err.message === 'string' &&\n err.message.indexOf('Could not load the default credentials') > -1;\n if (autoAuthFailed) {\n // Even though authentication failed, the API might not actually\n // care.\n authenticatedReqOpts = reqOpts;\n }\n if (!err || autoAuthFailed) {\n try {\n // Try with existing `projectId` value\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n if (e instanceof projectify_1.MissingProjectIdError) {\n // A `projectId` was required, but we don't have one.\n try {\n // Attempt to get the `projectId`\n await setProjectId();\n authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId);\n err = null;\n }\n catch (e) {\n // Re-use the \"Could not load the default credentials error\" if\n // auto auth failed.\n err = err || e;\n }\n }\n else {\n // Some other error unrelated to missing `projectId`\n err = err || e;\n }\n }\n }\n if (err) {\n if (stream) {\n stream.destroy(err);\n }\n else {\n const fn = options && options.onAuthenticated\n ? options.onAuthenticated\n : callback;\n fn(err);\n }\n return;\n }\n if (options && options.onAuthenticated) {\n options.onAuthenticated(null, authenticatedReqOpts);\n }\n else {\n activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => {\n if (apiResponseError &&\n apiResponseError.code === 401 &&\n authLibraryError) {\n // Re-use the \"Could not load the default credentials error\" if\n // the API request failed due to missing credentials.\n apiResponseError = authLibraryError;\n }\n callback(apiResponseError, ...params);\n });\n }\n };\n const prepareRequest = async () => {\n try {\n const getProjectId = async () => {\n if (config.projectId &&\n config.projectId !== service_js_1.DEFAULT_PROJECT_ID_TOKEN) {\n // The user provided a project ID. We don't need to check with the\n // auth client, it could be incorrect.\n return config.projectId;\n }\n if (config.projectIdRequired === false) {\n // A projectId is not required. Return the default.\n return service_js_1.DEFAULT_PROJECT_ID_TOKEN;\n }\n return setProjectId();\n };\n const authorizeRequest = async () => {\n if (reqConfig.customEndpoint &&\n !reqConfig.useAuthWithCustomEndpoint) {\n // Using a custom API override. Do not use `google-auth-library` for\n // authentication. (ex: connecting to a local Datastore server)\n return reqOpts;\n }\n else {\n return authClient.authorizeRequest(reqOpts);\n }\n };\n const [_projectId, authorizedReqOpts] = await Promise.all([\n getProjectId(),\n authorizeRequest(),\n ]);\n if (_projectId) {\n projectId = _projectId;\n }\n return onAuthenticated(null, authorizedReqOpts);\n }\n catch (e) {\n return onAuthenticated(e);\n }\n };\n prepareRequest();\n if (stream) {\n return stream;\n }\n return {\n abort() {\n setImmediate(() => {\n if (activeRequest_) {\n activeRequest_.abort();\n activeRequest_ = null;\n }\n });\n },\n };\n }\n const mar = makeAuthenticatedRequest;\n mar.getCredentials = authClient.getCredentials.bind(authClient);\n mar.authClient = authClient;\n return mar;\n }\n /**\n * Make a request through the `retryRequest` module with built-in error\n * handling and exponential back off.\n *\n * @param {object} reqOpts - Request options in the format `request` expects.\n * @param {object=} config - Configuration object.\n * @param {boolean=} config.autoRetry - Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * (default: true)\n * @param {number=} config.maxRetries - Maximum number of automatic retries\n * attempted before returning the error. (default: 3)\n * @param {object=} config.request - HTTP module for request calls.\n * @param {function} callback - The callback function.\n */\n makeRequest(reqOpts, config, callback) {\n var _a, _b, _c, _d, _e;\n let autoRetryValue = AUTO_RETRY_DEFAULT;\n if (config.autoRetry !== undefined) {\n autoRetryValue = config.autoRetry;\n }\n else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) {\n autoRetryValue = config.retryOptions.autoRetry;\n }\n let maxRetryValue = MAX_RETRY_DEFAULT;\n if (config.maxRetries !== undefined) {\n maxRetryValue = config.maxRetries;\n }\n else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) {\n maxRetryValue = config.retryOptions.maxRetries;\n }\n requestDefaults.headers = this._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]);\n const options = {\n request: teeny_request_1.teenyRequest.defaults(requestDefaults),\n retries: autoRetryValue !== false ? maxRetryValue : 0,\n noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0,\n shouldRetryFn(httpRespMessage) {\n var _a, _b;\n const err = util.parseHttpRespMessage(httpRespMessage).err;\n if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) {\n return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err));\n }\n return err && util.shouldRetryRequest(err);\n },\n maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay,\n retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier,\n totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout,\n };\n if (typeof reqOpts.maxRetries === 'number') {\n options.retries = reqOpts.maxRetries;\n options.noResponseRetries = reqOpts.maxRetries;\n }\n if (!config.stream) {\n return (0, retry_request_1.default)(reqOpts, options, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (err, response, body) => {\n util.handleResp(err, response, body, callback);\n });\n }\n const dup = config.stream;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let requestStream;\n const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET';\n if (isGetRequest) {\n requestStream = (0, retry_request_1.default)(reqOpts, options);\n dup.setReadable(requestStream);\n }\n else {\n // Streaming writable HTTP requests cannot be retried.\n requestStream = options.request(reqOpts);\n dup.setWritable(requestStream);\n }\n // Replay the Request events back to the stream.\n requestStream\n .on('error', dup.destroy.bind(dup))\n .on('response', dup.emit.bind(dup, 'response'))\n .on('complete', dup.emit.bind(dup, 'complete'));\n dup.abort = requestStream.abort;\n return dup;\n }\n /**\n * Decorate the options about to be made in a request.\n *\n * @param {object} reqOpts - The options to be passed to `request`.\n * @param {string} projectId - The project ID.\n * @return {object} reqOpts - The decorated reqOpts.\n */\n decorateRequest(reqOpts, projectId) {\n delete reqOpts.autoPaginate;\n delete reqOpts.autoPaginateVal;\n delete reqOpts.objectMode;\n if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') {\n delete reqOpts.qs.autoPaginate;\n delete reqOpts.qs.autoPaginateVal;\n reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId);\n }\n if (Array.isArray(reqOpts.multipart)) {\n reqOpts.multipart = reqOpts.multipart.map(part => {\n return (0, projectify_1.replaceProjectIdToken)(part, projectId);\n });\n }\n if (reqOpts.json !== null && typeof reqOpts.json === 'object') {\n delete reqOpts.json.autoPaginate;\n delete reqOpts.json.autoPaginateVal;\n reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId);\n }\n reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId);\n return reqOpts;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n isCustomType(unknown, module) {\n function getConstructorName(obj) {\n return obj.constructor && obj.constructor.name.toLowerCase();\n }\n const moduleNameParts = module.split('/');\n const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase();\n const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase();\n if (subModuleName && getConstructorName(unknown) !== subModuleName) {\n return false;\n }\n let walkingModule = unknown;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n if (getConstructorName(walkingModule) === parentModuleName) {\n return true;\n }\n walkingModule = walkingModule.parent;\n if (!walkingModule) {\n return false;\n }\n }\n }\n /**\n * Given two parameters, figure out if this is either:\n * - Just a callback function\n * - An options object, and then a callback function\n * @param optionsOrCallback An options object or callback.\n * @param cb A potentially undefined callback.\n */\n maybeOptionsOrCallback(optionsOrCallback, cb) {\n return typeof optionsOrCallback === 'function'\n ? [{}, optionsOrCallback]\n : [optionsOrCallback, cb];\n }\n _getDefaultHeaders(gcclGcsCmd) {\n const headers = {\n 'User-Agent': (0, util_js_1.getUserAgentString)(),\n 'x-goog-api-client': `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`,\n };\n if (gcclGcsCmd) {\n headers['x-goog-api-client'] += ` gccl-gcs-cmd/${gcclGcsCmd}`;\n }\n return headers;\n }\n}\nexports.Util = Util;\n/**\n * Basic Passthrough Stream that records the number of bytes read\n * every time the cursor is moved.\n */\nclass ProgressStream extends stream_1.Transform {\n constructor() {\n super(...arguments);\n this.bytesRead = 0;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _transform(chunk, encoding, callback) {\n this.bytesRead += chunk.length;\n this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' });\n this.push(chunk);\n callback();\n }\n}\nconst util = new Util();\nexports.util = util;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Notification = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\n/**\n * The API-formatted resource description of the notification.\n *\n * Note: This is not guaranteed to be up-to-date when accessed. To get the\n * latest record, call the `getMetadata()` method.\n *\n * @name Notification#metadata\n * @type {object}\n */\n/**\n * A Notification object is created from your {@link Bucket} object using\n * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub\n * notifications.\n *\n * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage}\n *\n * @class\n * @hideconstructor\n *\n * @param {Bucket} bucket The bucket instance this notification is attached to.\n * @param {string} id The ID of the notification.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n *\n * const notification = myBucket.notification('1');\n * ```\n */\nclass Notification extends index_js_1.ServiceObject {\n constructor(bucket, id) {\n const requestQueryObject = {};\n const methods = {\n /**\n * Creates a notification subscription for the bucket.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert}\n * @method Notification#create\n *\n * @param {Topic|string} topic The Cloud PubSub topic to which this\n * subscription publishes. If the project ID is omitted, the current\n * project ID will be used.\n *\n * Acceptable formats are:\n * - `projects/grape-spaceship-123/topics/my-topic`\n *\n * - `my-topic`\n * @param {CreateNotificationRequest} [options] Metadata to set for\n * the notification.\n * @param {CreateNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a valid topic is not provided.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.create(function(err, notification, apiResponse) {\n * if (!err) {\n * // The notification was created successfully.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.create().then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n create: true,\n /**\n * @typedef {array} DeleteNotificationResponse\n * @property {object} 0 The full API response.\n */\n /**\n * Permanently deletes a notification subscription.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation}\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {DeleteNotificationCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.delete(function(err, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.delete().then(function(data) {\n * const apiResponse = data[0];\n * });\n *\n * ```\n * @example include:samples/deleteNotification.js\n * region_tag:storage_delete_bucket_notification\n * Another example:\n */\n delete: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * Get a notification and its metadata if it exists.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation}\n *\n * @param {object} [options] Configuration options.\n * See {@link Bucket#createNotification} for create options.\n * @param {boolean} [options.autoCreate] Automatically create the object if\n * it does not exist. Default: `false`.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationCallback} [callback] Callback function.\n * @return {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.get(function(err, notification, apiResponse) {\n * // `notification.metadata` has been populated.\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.get().then(function(data) {\n * const notification = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n get: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * Get the notification's metadata.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation}\n *\n * @param {object} [options] Configuration options.\n * @param {string} [options.userProject] The ID of the project which will be\n * billed for the request.\n * @param {GetNotificationMetadataCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.getMetadata(function(err, metadata, apiResponse) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.getMetadata().then(function(data) {\n * const metadata = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/getMetadataNotifications.js\n * region_tag:storage_print_pubsub_bucket_notification\n * Another example:\n */\n getMetadata: {\n reqOpts: {\n qs: requestQueryObject,\n },\n },\n /**\n * @typedef {array} NotificationExistsResponse\n * @property {boolean} 0 Whether the notification exists or not.\n */\n /**\n * @callback NotificationExistsCallback\n * @param {?Error} err Request error, if any.\n * @param {boolean} exists Whether the notification exists or not.\n */\n /**\n * Check if the notification exists.\n *\n * @method Notification#exists\n * @param {NotificationExistsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const myBucket = storage.bucket('my-bucket');\n * const notification = myBucket.notification('1');\n *\n * notification.exists(function(err, exists) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * notification.exists().then(function(data) {\n * const exists = data[0];\n * });\n * ```\n */\n exists: true,\n };\n super({\n parent: bucket,\n baseUrl: '/notificationConfigs',\n id: id.toString(),\n createMethod: bucket.createNotification.bind(bucket),\n methods,\n });\n }\n}\nexports.Notification = Notification;\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Notification);\n","\"use strict\";\n// Copyright 2022 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkUploadStatus = exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0;\nconst abort_controller_1 = __importDefault(require(\"abort-controller\"));\nconst crypto_1 = require(\"crypto\");\nconst gaxios = __importStar(require(\"gaxios\"));\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst stream_1 = require(\"stream\");\nconst async_retry_1 = __importDefault(require(\"async-retry\"));\nconst uuid = __importStar(require(\"uuid\"));\nconst util_js_1 = require(\"./util.js\");\nconst util_js_2 = require(\"./nodejs-common/util.js\");\n// eslint-disable-next-line @typescript-eslint/ban-ts-comment\n// @ts-ignore\nconst package_json_helper_cjs_1 = require(\"./package-json-helper.cjs\");\nconst NOT_FOUND_STATUS_CODE = 404;\nconst RESUMABLE_INCOMPLETE_STATUS_CODE = 308;\nconst DEFAULT_API_ENDPOINT_REGEX = /.*\\.googleapis\\.com/;\nconst packageJson = (0, package_json_helper_cjs_1.getPackageJSON)();\nexports.PROTOCOL_REGEX = /^(\\w*):\\/\\//;\nclass Upload extends stream_1.Writable {\n constructor(cfg) {\n var _a;\n super(cfg);\n _Upload_instances.add(this);\n this.numBytesWritten = 0;\n this.numRetries = 0;\n this.currentInvocationId = {\n checkUploadStatus: uuid.v4(),\n chunk: uuid.v4(),\n uri: uuid.v4(),\n };\n /**\n * A cache of buffers written to this instance, ready for consuming\n */\n this.writeBuffers = [];\n this.numChunksReadInRequest = 0;\n /**\n * An array of buffers used for caching the most recent upload chunk.\n * We should not assume that the server received all bytes sent in the request.\n * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n */\n this.localWriteCache = [];\n this.localWriteCacheByteLength = 0;\n this.upstreamEnded = false;\n _Upload_gcclGcsCmd.set(this, void 0);\n cfg = cfg || {};\n if (!cfg.bucket || !cfg.file) {\n throw new Error('A bucket and file name are required');\n }\n if (cfg.offset && !cfg.uri) {\n throw new RangeError('Cannot provide an `offset` without providing a `uri`');\n }\n if (cfg.isPartialUpload && !cfg.chunkSize) {\n throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`');\n }\n cfg.authConfig = cfg.authConfig || {};\n cfg.authConfig.scopes = [\n 'https://www.googleapis.com/auth/devstorage.full_control',\n ];\n this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig);\n this.apiEndpoint = 'https://storage.googleapis.com';\n if (cfg.apiEndpoint) {\n this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint);\n if (!DEFAULT_API_ENDPOINT_REGEX.test(cfg.apiEndpoint)) {\n this.authClient = gaxios;\n }\n }\n this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`;\n this.bucket = cfg.bucket;\n const cacheKeyElements = [cfg.bucket, cfg.file];\n if (typeof cfg.generation === 'number') {\n cacheKeyElements.push(`${cfg.generation}`);\n }\n this.cacheKey = cacheKeyElements.join('/');\n this.customRequestOptions = cfg.customRequestOptions || {};\n this.file = cfg.file;\n this.generation = cfg.generation;\n this.kmsKeyName = cfg.kmsKeyName;\n this.metadata = cfg.metadata || {};\n this.offset = cfg.offset;\n this.origin = cfg.origin;\n this.params = cfg.params || {};\n this.userProject = cfg.userProject;\n this.chunkSize = cfg.chunkSize;\n this.retryOptions = cfg.retryOptions;\n this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false;\n if (cfg.key) {\n const base64Key = Buffer.from(cfg.key).toString('base64');\n this.encryption = {\n key: base64Key,\n hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'),\n };\n }\n this.predefinedAcl = cfg.predefinedAcl;\n if (cfg.private)\n this.predefinedAcl = 'private';\n if (cfg.public)\n this.predefinedAcl = 'publicRead';\n const autoRetry = cfg.retryOptions.autoRetry;\n this.uriProvidedManually = !!cfg.uri;\n this.uri = cfg.uri;\n if (this.offset) {\n // we're resuming an incomplete upload\n this.numBytesWritten = this.offset;\n }\n this.numRetries = 0; // counter for number of retries currently executed\n if (!autoRetry) {\n cfg.retryOptions.maxRetries = 0;\n }\n this.timeOfFirstRequest = Date.now();\n const contentLength = cfg.metadata\n ? Number(cfg.metadata.contentLength)\n : NaN;\n this.contentLength = isNaN(contentLength) ? '*' : contentLength;\n __classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_2.GCCL_GCS_CMD_KEY], \"f\");\n this.once('writing', () => {\n if (this.uri) {\n this.continueUploading();\n }\n else {\n this.createURI(err => {\n if (err) {\n return this.destroy(err);\n }\n this.startUploading();\n return;\n });\n }\n });\n }\n /**\n * Prevent 'finish' event until the upload has succeeded.\n *\n * @param fireFinishEvent The finish callback\n */\n _final(fireFinishEvent = () => { }) {\n this.upstreamEnded = true;\n this.once('uploadFinished', fireFinishEvent);\n process.nextTick(() => {\n this.emit('upstreamFinished');\n // it's possible `_write` may not be called - namely for empty object uploads\n this.emit('writing');\n });\n }\n /**\n * Handles incoming data from upstream\n *\n * @param chunk The chunk to append to the buffer\n * @param encoding The encoding of the chunk\n * @param readCallback A callback for when the buffer has been read downstream\n */\n _write(chunk, encoding, readCallback = () => { }) {\n // Backwards-compatible event\n this.emit('writing');\n this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk);\n this.once('readFromChunkBuffer', readCallback);\n process.nextTick(() => this.emit('wroteToChunkBuffer'));\n }\n /**\n * Prepends the local buffer to write buffer and resets it.\n *\n * @param keepLastBytes number of bytes to keep from the end of the local buffer.\n */\n prependLocalBufferToUpstream(keepLastBytes) {\n // Typically, the upstream write buffers should be smaller than the local\n // cache, so we can save time by setting the local cache as the new\n // upstream write buffer array and appending the old array to it\n let initialBuffers = [];\n if (keepLastBytes) {\n // we only want the last X bytes\n let bytesKept = 0;\n while (keepLastBytes > bytesKept) {\n // load backwards because we want the last X bytes\n // note: `localWriteCacheByteLength` is reset below\n let buf = this.localWriteCache.pop();\n if (!buf)\n break;\n bytesKept += buf.byteLength;\n if (bytesKept > keepLastBytes) {\n // we have gone over the amount desired, let's keep the last X bytes\n // of this buffer\n const diff = bytesKept - keepLastBytes;\n buf = buf.subarray(diff);\n bytesKept -= diff;\n }\n initialBuffers.unshift(buf);\n }\n }\n else {\n // we're keeping all of the local cache, simply use it as the initial buffer\n initialBuffers = this.localWriteCache;\n }\n // Append the old upstream to the new\n const append = this.writeBuffers;\n this.writeBuffers = initialBuffers;\n for (const buf of append) {\n this.writeBuffers.push(buf);\n }\n // reset last buffers sent\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_resetLocalBuffersCache).call(this);\n }\n /**\n * Retrieves data from upstream's buffer.\n *\n * @param limit The maximum amount to return from the buffer.\n */\n *pullFromChunkBuffer(limit) {\n while (limit) {\n const buf = this.writeBuffers.shift();\n if (!buf)\n break;\n let bufToYield = buf;\n if (buf.byteLength > limit) {\n bufToYield = buf.subarray(0, limit);\n this.writeBuffers.unshift(buf.subarray(limit));\n limit = 0;\n }\n else {\n limit -= buf.byteLength;\n }\n yield bufToYield;\n // Notify upstream we've read from the buffer and we're able to consume\n // more. It can also potentially send more data down as we're currently\n // iterating.\n this.emit('readFromChunkBuffer');\n }\n }\n /**\n * A handler for determining if data is ready to be read from upstream.\n *\n * @returns If there will be more chunks to read in the future\n */\n async waitForNextChunk() {\n const willBeMoreChunks = await new Promise(resolve => {\n // There's data available - it should be digested\n if (this.writeBuffers.length) {\n return resolve(true);\n }\n // The upstream writable ended, we shouldn't expect any more data.\n if (this.upstreamEnded) {\n return resolve(false);\n }\n // Nothing immediate seems to be determined. We need to prepare some\n // listeners to determine next steps...\n const wroteToChunkBufferCallback = () => {\n removeListeners();\n return resolve(true);\n };\n const upstreamFinishedCallback = () => {\n removeListeners();\n // this should be the last chunk, if there's anything there\n if (this.writeBuffers.length)\n return resolve(true);\n return resolve(false);\n };\n // Remove listeners when we're ready to callback.\n const removeListeners = () => {\n this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback);\n this.removeListener('upstreamFinished', upstreamFinishedCallback);\n };\n // If there's data recently written it should be digested\n this.once('wroteToChunkBuffer', wroteToChunkBufferCallback);\n // If the upstream finishes let's see if there's anything to grab\n this.once('upstreamFinished', upstreamFinishedCallback);\n });\n return willBeMoreChunks;\n }\n /**\n * Reads data from upstream up to the provided `limit`.\n * Ends when the limit has reached or no data is expected to be pushed from upstream.\n *\n * @param limit The most amount of data this iterator should return. `Infinity` by default.\n */\n async *upstreamIterator(limit = Infinity) {\n // read from upstream chunk buffer\n while (limit && (await this.waitForNextChunk())) {\n // read until end or limit has been reached\n for (const chunk of this.pullFromChunkBuffer(limit)) {\n limit -= chunk.byteLength;\n yield chunk;\n }\n }\n }\n createURI(callback) {\n if (!callback) {\n return this.createURIAsync();\n }\n this.createURIAsync().then(r => callback(null, r), callback);\n }\n async createURIAsync() {\n const metadata = { ...this.metadata };\n const headers = {};\n // Delete content length and content type from metadata if they exist.\n // These are headers and should not be sent as part of the metadata.\n if (metadata.contentLength) {\n headers['X-Upload-Content-Length'] = metadata.contentLength.toString();\n delete metadata.contentLength;\n }\n if (metadata.contentType) {\n headers['X-Upload-Content-Type'] = metadata.contentType;\n delete metadata.contentType;\n }\n let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`;\n if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")) {\n googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")}`;\n }\n // Check if headers already exist before creating new ones\n const reqOpts = {\n method: 'POST',\n url: [this.baseURI, this.bucket, 'o'].join('/'),\n params: Object.assign({\n name: this.file,\n uploadType: 'resumable',\n }, this.params),\n data: metadata,\n headers: {\n 'User-Agent': (0, util_js_1.getUserAgentString)(),\n 'x-goog-api-client': googAPIClient,\n ...headers,\n },\n };\n if (metadata.contentLength) {\n reqOpts.headers['X-Upload-Content-Length'] =\n metadata.contentLength.toString();\n }\n if (metadata.contentType) {\n reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType;\n }\n if (typeof this.generation !== 'undefined') {\n reqOpts.params.ifGenerationMatch = this.generation;\n }\n if (this.kmsKeyName) {\n reqOpts.params.kmsKeyName = this.kmsKeyName;\n }\n if (this.predefinedAcl) {\n reqOpts.params.predefinedAcl = this.predefinedAcl;\n }\n if (this.origin) {\n reqOpts.headers.Origin = this.origin;\n }\n const uri = await (0, async_retry_1.default)(async (bail) => {\n var _a, _b, _c;\n try {\n const res = await this.makeRequest(reqOpts);\n // We have successfully got a URI we can now create a new invocation id\n this.currentInvocationId.uri = uuid.v4();\n return res.headers.location;\n }\n catch (err) {\n const e = err;\n const apiError = {\n code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status,\n name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText,\n message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText,\n errors: [\n {\n reason: e.code,\n },\n ],\n };\n if (this.retryOptions.maxRetries > 0 &&\n this.retryOptions.retryableErrorFn(apiError)) {\n throw e;\n }\n else {\n return bail(e);\n }\n }\n }, {\n retries: this.retryOptions.maxRetries,\n factor: this.retryOptions.retryDelayMultiplier,\n maxTimeout: this.retryOptions.maxRetryDelay * 1000, //convert to milliseconds\n maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds\n });\n this.uri = uri;\n this.offset = 0;\n // emit the newly generated URI for future reuse, if necessary.\n this.emit('uri', uri);\n return uri;\n }\n async continueUploading() {\n var _a;\n (_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset());\n return this.startUploading();\n }\n async startUploading() {\n const multiChunkMode = !!this.chunkSize;\n let responseReceived = false;\n this.numChunksReadInRequest = 0;\n if (!this.offset) {\n this.offset = 0;\n }\n // Check if the offset (server) is too far behind the current stream\n if (this.offset < this.numBytesWritten) {\n const delta = this.numBytesWritten - this.offset;\n const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`;\n this.emit('error', new RangeError(message));\n return;\n }\n // Check if we should 'fast-forward' to the relevant data to upload\n if (this.numBytesWritten < this.offset) {\n // 'fast-forward' to the byte where we need to upload.\n // only push data from the byte after the one we left off on\n const fastForwardBytes = this.offset - this.numBytesWritten;\n for await (const _chunk of this.upstreamIterator(fastForwardBytes)) {\n _chunk; // discard the data up until the point we want\n }\n this.numBytesWritten = this.offset;\n }\n let expectedUploadSize = undefined;\n // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available\n if (typeof this.contentLength === 'number') {\n expectedUploadSize = this.contentLength - this.numBytesWritten;\n }\n // `expectedUploadSize` should be no more than the `chunkSize`.\n // It's possible this is the last chunk request for a multiple\n // chunk upload, thus smaller than the chunk size.\n if (this.chunkSize) {\n expectedUploadSize = expectedUploadSize\n ? Math.min(this.chunkSize, expectedUploadSize)\n : this.chunkSize;\n }\n // A queue for the upstream data\n const upstreamQueue = this.upstreamIterator(expectedUploadSize);\n // The primary read stream for this request. This stream retrieves no more\n // than the exact requested amount from upstream.\n const requestStream = new stream_1.Readable({\n read: async () => {\n // Don't attempt to retrieve data upstream if we already have a response\n if (responseReceived)\n requestStream.push(null);\n const result = await upstreamQueue.next();\n if (result.value) {\n this.numChunksReadInRequest++;\n if (multiChunkMode) {\n // save ever buffer used in the request in multi-chunk mode\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_addLocalBufferCache).call(this, result.value);\n }\n else {\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_resetLocalBuffersCache).call(this);\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_addLocalBufferCache).call(this, result.value);\n }\n this.numBytesWritten += result.value.byteLength;\n this.emit('progress', {\n bytesWritten: this.numBytesWritten,\n contentLength: this.contentLength,\n });\n requestStream.push(result.value);\n }\n if (result.done) {\n requestStream.push(null);\n }\n },\n });\n let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`;\n if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")) {\n googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")}`;\n }\n const headers = {\n 'User-Agent': (0, util_js_1.getUserAgentString)(),\n 'x-goog-api-client': googAPIClient,\n };\n // If using multiple chunk upload, set appropriate header\n if (multiChunkMode) {\n // We need to know how much data is available upstream to set the `Content-Range` header.\n // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n for await (const chunk of this.upstreamIterator(expectedUploadSize)) {\n // This will conveniently track and keep the size of the buffers.\n // We will reach either the expected upload size or the remainder of the stream.\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_addLocalBufferCache).call(this, chunk);\n }\n // This is the sum from the `#addLocalBufferCache` calls\n const bytesToUpload = this.localWriteCacheByteLength;\n // Important: we want to know if the upstream has ended and the queue is empty before\n // unshifting data back into the queue. This way we will know if this is the last request or not.\n const isLastChunkOfUpload = !(await this.waitForNextChunk());\n // Important: put the data back in the queue for the actual upload\n this.prependLocalBufferToUpstream();\n let totalObjectSize = this.contentLength;\n if (typeof this.contentLength !== 'number' &&\n isLastChunkOfUpload &&\n !this.isPartialUpload) {\n // Let's let the server know this is the last chunk of the object since we didn't set it before.\n totalObjectSize = bytesToUpload + this.numBytesWritten;\n }\n // `- 1` as the ending byte is inclusive in the request.\n const endingByte = bytesToUpload + this.numBytesWritten - 1;\n // `Content-Length` for multiple chunk uploads is the size of the chunk,\n // not the overall object\n headers['Content-Length'] = bytesToUpload;\n headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`;\n }\n else {\n headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`;\n }\n const reqOpts = {\n method: 'PUT',\n url: this.uri,\n headers,\n body: requestStream,\n };\n try {\n const resp = await this.makeRequestStream(reqOpts);\n if (resp) {\n responseReceived = true;\n await this.responseHandler(resp);\n }\n }\n catch (e) {\n const err = e;\n if (this.retryOptions.retryableErrorFn(err)) {\n this.attemptDelayedRetry({\n status: NaN,\n data: err,\n });\n return;\n }\n this.destroy(err);\n }\n }\n // Process the API response to look for errors that came in\n // the response body.\n async responseHandler(resp) {\n if (resp.data.error) {\n this.destroy(resp.data.error);\n return;\n }\n // At this point we can safely create a new id for the chunk\n this.currentInvocationId.chunk = uuid.v4();\n const moreDataToUpload = await this.waitForNextChunk();\n const shouldContinueWithNextMultiChunkRequest = this.chunkSize &&\n resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE &&\n resp.headers.range &&\n moreDataToUpload;\n /**\n * This is true when we're expecting to upload more data in a future request,\n * yet the upstream for the upload session has been exhausted.\n */\n const shouldContinueUploadInAnotherRequest = this.isPartialUpload &&\n resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE &&\n !moreDataToUpload;\n if (shouldContinueWithNextMultiChunkRequest) {\n // Use the upper value in this header to determine where to start the next chunk.\n // We should not assume that the server received all bytes sent in the request.\n // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n const range = resp.headers.range;\n this.offset = Number(range.split('-')[1]) + 1;\n // We should not assume that the server received all bytes sent in the request.\n // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload\n const missingBytes = this.numBytesWritten - this.offset;\n if (missingBytes) {\n // As multi-chunk uploads send one chunk per request and pulls one\n // chunk into the pipeline, prepending the missing bytes back should\n // be fine for the next request.\n this.prependLocalBufferToUpstream(missingBytes);\n this.numBytesWritten -= missingBytes;\n }\n else {\n // No bytes missing - no need to keep the local cache\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_resetLocalBuffersCache).call(this);\n }\n // continue uploading next chunk\n this.continueUploading();\n }\n else if (!this.isSuccessfulResponse(resp.status) &&\n !shouldContinueUploadInAnotherRequest) {\n const err = new Error('Upload failed');\n err.code = resp.status;\n err.name = 'Upload failed';\n if (resp === null || resp === void 0 ? void 0 : resp.data) {\n err.errors = [resp === null || resp === void 0 ? void 0 : resp.data];\n }\n this.destroy(err);\n }\n else {\n // no need to keep the cache\n __classPrivateFieldGet(this, _Upload_instances, \"m\", _Upload_resetLocalBuffersCache).call(this);\n if (resp && resp.data) {\n resp.data.size = Number(resp.data.size);\n }\n this.emit('metadata', resp.data);\n // Allow the object (Upload) to continue naturally so the user's\n // \"finish\" event fires.\n this.emit('uploadFinished');\n }\n }\n /**\n * Check the status of an existing resumable upload.\n *\n * @param cfg A configuration to use. `uri` is required.\n * @returns the current upload status\n */\n async checkUploadStatus(config = {}) {\n let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`;\n if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")) {\n googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, \"f\")}`;\n }\n const opts = {\n method: 'PUT',\n url: this.uri,\n headers: {\n 'Content-Length': 0,\n 'Content-Range': 'bytes */*',\n 'User-Agent': (0, util_js_1.getUserAgentString)(),\n 'x-goog-api-client': googAPIClient,\n },\n };\n try {\n const resp = await this.makeRequest(opts);\n // Successfully got the offset we can now create a new offset invocation id\n this.currentInvocationId.checkUploadStatus = uuid.v4();\n return resp;\n }\n catch (e) {\n if (config.retry === false ||\n !(e instanceof Error) ||\n !this.retryOptions.retryableErrorFn(e)) {\n throw e;\n }\n const retryDelay = this.getRetryDelay();\n if (retryDelay <= 0) {\n throw e;\n }\n await new Promise(res => setTimeout(res, retryDelay));\n return this.checkUploadStatus(config);\n }\n }\n async getAndSetOffset() {\n try {\n // we want to handle retries in this method.\n const resp = await this.checkUploadStatus({ retry: false });\n if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) {\n if (typeof resp.headers.range === 'string') {\n this.offset = Number(resp.headers.range.split('-')[1]) + 1;\n return;\n }\n }\n this.offset = 0;\n }\n catch (e) {\n const err = e;\n if (this.retryOptions.retryableErrorFn(err)) {\n this.attemptDelayedRetry({\n status: NaN,\n data: err,\n });\n return;\n }\n this.destroy(err);\n }\n }\n async makeRequest(reqOpts) {\n if (this.encryption) {\n reqOpts.headers = reqOpts.headers || {};\n reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256';\n reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString();\n reqOpts.headers['x-goog-encryption-key-sha256'] =\n this.encryption.hash.toString();\n }\n if (this.userProject) {\n reqOpts.params = reqOpts.params || {};\n reqOpts.params.userProject = this.userProject;\n }\n // Let gaxios know we will handle a 308 error code ourselves.\n reqOpts.validateStatus = (status) => {\n return (this.isSuccessfulResponse(status) ||\n status === RESUMABLE_INCOMPLETE_STATUS_CODE);\n };\n const combinedReqOpts = {\n ...this.customRequestOptions,\n ...reqOpts,\n headers: {\n ...this.customRequestOptions.headers,\n ...reqOpts.headers,\n },\n };\n const res = await this.authClient.request(combinedReqOpts);\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n return res;\n }\n async makeRequestStream(reqOpts) {\n const controller = new abort_controller_1.default();\n const errorCallback = () => controller.abort();\n this.once('error', errorCallback);\n if (this.userProject) {\n reqOpts.params = reqOpts.params || {};\n reqOpts.params.userProject = this.userProject;\n }\n reqOpts.signal = controller.signal;\n reqOpts.validateStatus = () => true;\n const combinedReqOpts = {\n ...this.customRequestOptions,\n ...reqOpts,\n headers: {\n ...this.customRequestOptions.headers,\n ...reqOpts.headers,\n },\n };\n const res = await this.authClient.request(combinedReqOpts);\n const successfulRequest = this.onResponse(res);\n this.removeListener('error', errorCallback);\n return successfulRequest ? res : null;\n }\n /**\n * @return {bool} is the request good?\n */\n onResponse(resp) {\n if (resp.status !== 200 &&\n this.retryOptions.retryableErrorFn({\n code: resp.status,\n message: resp.statusText,\n name: resp.statusText,\n })) {\n this.attemptDelayedRetry(resp);\n return false;\n }\n this.emit('response', resp);\n return true;\n }\n /**\n * @param resp GaxiosResponse object from previous attempt\n */\n attemptDelayedRetry(resp) {\n if (this.numRetries < this.retryOptions.maxRetries) {\n if (resp.status === NOT_FOUND_STATUS_CODE &&\n this.numChunksReadInRequest === 0) {\n this.startUploading();\n }\n else {\n const retryDelay = this.getRetryDelay();\n if (retryDelay <= 0) {\n this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`));\n return;\n }\n // Unshift the local cache back in case it's needed for the next request.\n this.numBytesWritten -= this.localWriteCacheByteLength;\n this.prependLocalBufferToUpstream();\n // We don't know how much data has been received by the server.\n // `continueUploading` will recheck the offset via `getAndSetOffset`.\n // If `offset` < `numberBytesReceived` then we will raise a RangeError\n // as we've streamed too much data that has been missed - this should\n // not be the case for multi-chunk uploads as `lastChunkSent` is the\n // body of the entire request.\n this.offset = undefined;\n setTimeout(this.continueUploading.bind(this), retryDelay);\n }\n this.numRetries++;\n }\n else {\n this.destroy(new Error('Retry limit exceeded - ' + resp.data));\n }\n }\n /**\n * The amount of time to wait before retrying the request, in milliseconds.\n * If negative, do not retry.\n *\n * @returns the amount of time to wait, in milliseconds.\n */\n getRetryDelay() {\n const randomMs = Math.round(Math.random() * 1000);\n const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) *\n 1000 +\n randomMs;\n const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 -\n (Date.now() - this.timeOfFirstRequest);\n const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000;\n return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs);\n }\n /*\n * Prepare user-defined API endpoint for compatibility with our API.\n */\n sanitizeEndpoint(url) {\n if (!exports.PROTOCOL_REGEX.test(url)) {\n url = `https://${url}`;\n }\n return url.replace(/\\/+$/, ''); // Remove trailing slashes\n }\n /**\n * Check if a given status code is 2xx\n *\n * @param status The status code to check\n * @returns if the status is 2xx\n */\n isSuccessfulResponse(status) {\n return status >= 200 && status < 300;\n }\n}\nexports.Upload = Upload;\n_Upload_gcclGcsCmd = new WeakMap(), _Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() {\n this.localWriteCache = [];\n this.localWriteCacheByteLength = 0;\n}, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) {\n this.localWriteCache.push(buf);\n this.localWriteCacheByteLength += buf.byteLength;\n};\nfunction upload(cfg) {\n return new Upload(cfg);\n}\nexports.upload = upload;\nfunction createURI(cfg, callback) {\n const up = new Upload(cfg);\n if (!callback) {\n return up.createURI();\n }\n up.createURI().then(r => callback(null, r), callback);\n}\nexports.createURI = createURI;\n/**\n * Check the status of an existing resumable upload.\n *\n * @param cfg A configuration to use. `uri` is required.\n * @returns the current upload status\n */\nfunction checkUploadStatus(cfg) {\n const up = new Upload(cfg);\n return up.checkUploadStatus();\n}\nexports.checkUploadStatus = checkUploadStatus;\n","\"use strict\";\n// Copyright 2020 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0;\nconst crypto = __importStar(require(\"crypto\"));\nconst url = __importStar(require(\"url\"));\nconst storage_js_1 = require(\"./storage.js\");\nconst util_js_1 = require(\"./util.js\");\nvar SignerExceptionMessages;\n(function (SignerExceptionMessages) {\n SignerExceptionMessages[\"ACCESSIBLE_DATE_INVALID\"] = \"The accessible at date provided was invalid.\";\n SignerExceptionMessages[\"EXPIRATION_BEFORE_ACCESSIBLE_DATE\"] = \"An expiration date cannot be before accessible date.\";\n SignerExceptionMessages[\"X_GOOG_CONTENT_SHA256\"] = \"The header X-Goog-Content-SHA256 must be a hexadecimal string.\";\n})(SignerExceptionMessages || (exports.SignerExceptionMessages = SignerExceptionMessages = {}));\n/*\n * Default signing version for getSignedUrl is 'v2'.\n */\nconst DEFAULT_SIGNING_VERSION = 'v2';\nconst SEVEN_DAYS = 7 * 24 * 60 * 60;\n/**\n * @const {string}\n * @private\n */\nexports.PATH_STYLED_HOST = 'https://storage.googleapis.com';\nclass URLSigner {\n constructor(authClient, bucket, file) {\n this.bucket = bucket;\n this.file = file;\n this.authClient = authClient;\n }\n getSignedUrl(cfg) {\n const expiresInSeconds = this.parseExpires(cfg.expires);\n const method = cfg.method;\n const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt);\n if (expiresInSeconds < accessibleAtInSeconds) {\n throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE);\n }\n let customHost;\n // Default style is `path`.\n const isVirtualHostedStyle = cfg.virtualHostedStyle || false;\n if (cfg.cname) {\n customHost = cfg.cname;\n }\n else if (isVirtualHostedStyle) {\n customHost = `https://${this.bucket.name}.storage.googleapis.com`;\n }\n const secondsToMilliseconds = 1000;\n const config = Object.assign({}, cfg, {\n method,\n expiration: expiresInSeconds,\n accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds),\n bucket: this.bucket.name,\n file: this.file ? (0, util_js_1.encodeURI)(this.file.name, false) : undefined,\n });\n if (customHost) {\n config.cname = customHost;\n }\n const version = cfg.version || DEFAULT_SIGNING_VERSION;\n let promise;\n if (version === 'v2') {\n promise = this.getSignedUrlV2(config);\n }\n else if (version === 'v4') {\n promise = this.getSignedUrlV4(config);\n }\n else {\n throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`);\n }\n return promise.then(query => {\n query = Object.assign(query, cfg.queryParams);\n const signedUrl = new url.URL(config.cname || exports.PATH_STYLED_HOST);\n signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file);\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n signedUrl.search = (0, util_js_1.qsStringify)(query);\n return signedUrl.href;\n });\n }\n getSignedUrlV2(config) {\n const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {});\n const resourcePath = this.getResourcePath(false, config.bucket, config.file);\n const blobToSign = [\n config.method,\n config.contentMd5 || '',\n config.contentType || '',\n config.expiration,\n canonicalHeadersString + resourcePath,\n ].join('\\n');\n const sign = async () => {\n const authClient = this.authClient;\n try {\n const signature = await authClient.sign(blobToSign);\n const credentials = await authClient.getCredentials();\n return {\n GoogleAccessId: credentials.client_email,\n Expires: config.expiration,\n Signature: signature,\n };\n }\n catch (err) {\n const error = err;\n const signingErr = new SigningError(error.message);\n signingErr.stack = error.stack;\n throw signingErr;\n }\n };\n return sign();\n }\n getSignedUrlV4(config) {\n config.accessibleAt = config.accessibleAt\n ? config.accessibleAt\n : new Date();\n const millisecondsToSeconds = 1.0 / 1000.0;\n const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds;\n // v4 limit expiration to be 7 days maximum\n if (expiresPeriodInSeconds > SEVEN_DAYS) {\n throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`);\n }\n const extensionHeaders = Object.assign({}, config.extensionHeaders);\n const fqdn = new url.URL(config.cname || exports.PATH_STYLED_HOST);\n extensionHeaders.host = fqdn.host;\n if (config.contentMd5) {\n extensionHeaders['content-md5'] = config.contentMd5;\n }\n if (config.contentType) {\n extensionHeaders['content-type'] = config.contentType;\n }\n let contentSha256;\n const sha256Header = extensionHeaders['x-goog-content-sha256'];\n if (sha256Header) {\n if (typeof sha256Header !== 'string' ||\n !/[A-Fa-f0-9]{40}/.test(sha256Header)) {\n throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256);\n }\n contentSha256 = sha256Header;\n }\n const signedHeaders = Object.keys(extensionHeaders)\n .map(header => header.toLowerCase())\n .sort()\n .join(';');\n const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders);\n const datestamp = (0, util_js_1.formatAsUTCISO)(config.accessibleAt);\n const credentialScope = `${datestamp}/auto/storage/goog4_request`;\n const sign = async () => {\n const credentials = await this.authClient.getCredentials();\n const credential = `${credentials.client_email}/${credentialScope}`;\n const dateISO = (0, util_js_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true);\n const queryParams = {\n 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256',\n 'X-Goog-Credential': credential,\n 'X-Goog-Date': dateISO,\n 'X-Goog-Expires': expiresPeriodInSeconds.toString(10),\n 'X-Goog-SignedHeaders': signedHeaders,\n ...(config.queryParams || {}),\n };\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const canonicalQueryParams = this.getCanonicalQueryParams(queryParams);\n const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256);\n const hash = crypto\n .createHash('sha256')\n .update(canonicalRequest)\n .digest('hex');\n const blobToSign = [\n 'GOOG4-RSA-SHA256',\n dateISO,\n credentialScope,\n hash,\n ].join('\\n');\n try {\n const signature = await this.authClient.sign(blobToSign);\n const signatureHex = Buffer.from(signature, 'base64').toString('hex');\n const signedQuery = Object.assign({}, queryParams, {\n 'X-Goog-Signature': signatureHex,\n });\n return signedQuery;\n }\n catch (err) {\n const error = err;\n const signingErr = new SigningError(error.message);\n signingErr.stack = error.stack;\n throw signingErr;\n }\n };\n return sign();\n }\n /**\n * Create canonical headers for signing v4 url.\n *\n * The canonical headers for v4-signing a request demands header names are\n * first lowercased, followed by sorting the header names.\n * Then, construct the canonical headers part of the request:\n * + \":\" + Trim() + \"\\n\"\n * ..\n * + \":\" + Trim() + \"\\n\"\n *\n * @param headers\n * @private\n */\n getCanonicalHeaders(headers) {\n // Sort headers by their lowercased names\n const sortedHeaders = (0, util_js_1.objectEntries)(headers)\n // Convert header names to lowercase\n .map(([headerName, value]) => [\n headerName.toLowerCase(),\n value,\n ])\n .sort((a, b) => a[0].localeCompare(b[0]));\n return sortedHeaders\n .filter(([, value]) => value !== undefined)\n .map(([headerName, value]) => {\n // - Convert Array (multi-valued header) into string, delimited by\n // ',' (no space).\n // - Trim leading and trailing spaces.\n // - Convert sequential (2+) spaces into a single space\n const canonicalValue = `${value}`.trim().replace(/\\s{2,}/g, ' ');\n return `${headerName}:${canonicalValue}\\n`;\n })\n .join('');\n }\n getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) {\n return [\n method,\n path,\n query,\n headers,\n signedHeaders,\n contentSha256 || 'UNSIGNED-PAYLOAD',\n ].join('\\n');\n }\n getCanonicalQueryParams(query) {\n return (0, util_js_1.objectEntries)(query)\n .map(([key, value]) => [(0, util_js_1.encodeURI)(key, true), (0, util_js_1.encodeURI)(value, true)])\n .sort((a, b) => (a[0] < b[0] ? -1 : 1))\n .map(([key, value]) => `${key}=${value}`)\n .join('&');\n }\n getResourcePath(cname, bucket, file) {\n if (cname) {\n return '/' + (file || '');\n }\n else if (file) {\n return `/${bucket}/${file}`;\n }\n else {\n return `/${bucket}`;\n }\n }\n parseExpires(expires, current = new Date()) {\n const expiresInMSeconds = new Date(expires).valueOf();\n if (isNaN(expiresInMSeconds)) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID);\n }\n if (expiresInMSeconds < current.valueOf()) {\n throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST);\n }\n return Math.floor(expiresInMSeconds / 1000); // The API expects seconds.\n }\n parseAccessibleAt(accessibleAt) {\n const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf();\n if (isNaN(accessibleAtInMSeconds)) {\n throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID);\n }\n return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds.\n }\n}\nexports.URLSigner = URLSigner;\n/**\n * Custom error type for errors related to getting signed errors and policies.\n *\n * @private\n */\nclass SigningError extends Error {\n constructor() {\n super(...arguments);\n this.name = 'SigningError';\n }\n}\nexports.SigningError = SigningError;\n","\"use strict\";\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0;\nconst index_js_1 = require(\"./nodejs-common/index.js\");\nconst paginator_1 = require(\"@google-cloud/paginator\");\nconst promisify_1 = require(\"@google-cloud/promisify\");\nconst stream_1 = require(\"stream\");\nconst bucket_js_1 = require(\"./bucket.js\");\nconst channel_js_1 = require(\"./channel.js\");\nconst file_js_1 = require(\"./file.js\");\nconst util_js_1 = require(\"./util.js\");\n// eslint-disable-next-line @typescript-eslint/ban-ts-comment\n// @ts-ignore\nconst package_json_helper_cjs_1 = require(\"./package-json-helper.cjs\");\nconst hmacKey_js_1 = require(\"./hmacKey.js\");\nconst crc32c_js_1 = require(\"./crc32c.js\");\nvar IdempotencyStrategy;\n(function (IdempotencyStrategy) {\n IdempotencyStrategy[IdempotencyStrategy[\"RetryAlways\"] = 0] = \"RetryAlways\";\n IdempotencyStrategy[IdempotencyStrategy[\"RetryConditional\"] = 1] = \"RetryConditional\";\n IdempotencyStrategy[IdempotencyStrategy[\"RetryNever\"] = 2] = \"RetryNever\";\n})(IdempotencyStrategy || (exports.IdempotencyStrategy = IdempotencyStrategy = {}));\nvar ExceptionMessages;\n(function (ExceptionMessages) {\n ExceptionMessages[\"EXPIRATION_DATE_INVALID\"] = \"The expiration date provided was invalid.\";\n ExceptionMessages[\"EXPIRATION_DATE_PAST\"] = \"An expiration date cannot be in the past.\";\n})(ExceptionMessages || (exports.ExceptionMessages = ExceptionMessages = {}));\nvar StorageExceptionMessages;\n(function (StorageExceptionMessages) {\n StorageExceptionMessages[\"BUCKET_NAME_REQUIRED\"] = \"A bucket name is needed to use Cloud Storage.\";\n StorageExceptionMessages[\"BUCKET_NAME_REQUIRED_CREATE\"] = \"A name is required to create a bucket.\";\n StorageExceptionMessages[\"HMAC_SERVICE_ACCOUNT\"] = \"The first argument must be a service account email to create an HMAC key.\";\n StorageExceptionMessages[\"HMAC_ACCESS_ID\"] = \"An access ID is needed to create an HmacKey object.\";\n})(StorageExceptionMessages || (exports.StorageExceptionMessages = StorageExceptionMessages = {}));\nexports.PROTOCOL_REGEX = /^(\\w*):\\/\\//;\n/**\n * Default behavior: Automatically retry retriable server errors.\n *\n * @const {boolean}\n */\nexports.AUTO_RETRY_DEFAULT = true;\n/**\n * Default behavior: Only attempt to retry retriable errors 3 times.\n *\n * @const {number}\n */\nexports.MAX_RETRY_DEFAULT = 3;\n/**\n * Default behavior: Wait twice as long as previous retry before retrying.\n *\n * @const {number}\n */\nexports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2;\n/**\n * Default behavior: If the operation doesn't succeed after 600 seconds,\n * stop retrying.\n *\n * @const {number}\n */\nexports.TOTAL_TIMEOUT_DEFAULT = 600;\n/**\n * Default behavior: Wait no more than 64 seconds between retries.\n *\n * @const {number}\n */\nexports.MAX_RETRY_DELAY_DEFAULT = 64;\n/**\n * Default behavior: Retry conditionally idempotent operations if correct preconditions are set.\n *\n * @const {enum}\n * @private\n */\nconst IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional;\n/**\n * Returns true if the API request should be retried, given the error that was\n * given the first time the request was attempted.\n * @const\n * @param {error} err - The API error to check if it is appropriate to retry.\n * @return {boolean} True if the API request should be retried, false otherwise.\n */\nconst RETRYABLE_ERR_FN_DEFAULT = function (err) {\n var _a;\n const isConnectionProblem = (reason) => {\n return (reason.includes('eai_again') || // DNS lookup error\n reason === 'econnreset' ||\n reason === 'unexpected connection closure' ||\n reason === 'epipe' ||\n reason === 'socket connection timeout');\n };\n if (err) {\n if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) {\n return true;\n }\n if (typeof err.code === 'string') {\n if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) {\n return true;\n }\n const reason = err.code.toLowerCase();\n if (isConnectionProblem(reason)) {\n return true;\n }\n }\n if (err.errors) {\n for (const e of err.errors) {\n const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase();\n if (reason && isConnectionProblem(reason)) {\n return true;\n }\n }\n }\n }\n return false;\n};\nexports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT;\n/*! Developer Documentation\n *\n * Invoke this method to create a new Storage object bound with pre-determined\n * configuration options. For each object that can be created (e.g., a bucket),\n * there is an equivalent static and instance method. While they are classes,\n * they can be instantiated without use of the `new` keyword.\n */\n/**\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share objects with other\n * users and allow other users to access your buckets and objects.\n *\n * This object provides constants to refer to the three permission levels that\n * can be granted to an entity:\n *\n * - `gcs.acl.OWNER_ROLE` - (\"OWNER\")\n * - `gcs.acl.READER_ROLE` - (\"READER\")\n * - `gcs.acl.WRITER_ROLE` - (\"WRITER\")\n *\n * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists}\n *\n * @name Storage#acl\n * @type {object}\n * @property {string} OWNER_ROLE\n * @property {string} READER_ROLE\n * @property {string} WRITER_ROLE\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const albums = storage.bucket('albums');\n *\n * //-\n * // Make all of the files currently in a bucket publicly readable.\n * //-\n * const options = {\n * entity: 'allUsers',\n * role: storage.acl.READER_ROLE\n * };\n *\n * albums.acl.add(options, function(err, aclObject) {});\n *\n * //-\n * // Make any new objects added to a bucket publicly readable.\n * //-\n * albums.acl.default.add(options, function(err, aclObject) {});\n *\n * //-\n * // Grant a user ownership permissions to a bucket.\n * //-\n * albums.acl.add({\n * entity: 'user-useremail@example.com',\n * role: storage.acl.OWNER_ROLE\n * }, function(err, aclObject) {});\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * albums.acl.add(options).then(function(data) {\n * const aclObject = data[0];\n * const apiResponse = data[1];\n * });\n * ```\n */\n/**\n * Get {@link Bucket} objects for all of the buckets in your project as\n * a readable object stream.\n *\n * @method Storage#getBucketsStream\n * @param {GetBucketsRequest} [query] Query object for listing buckets.\n * @returns {ReadableStream} A readable stream that emits {@link Bucket}\n * instances.\n *\n * @example\n * ```\n * storage.getBucketsStream()\n * .on('error', console.error)\n * .on('data', function(bucket) {\n * // bucket is a Bucket object.\n * })\n * .on('end', function() {\n * // All buckets retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * storage.getBucketsStream()\n * .on('data', function(bucket) {\n * this.end();\n * });\n * ```\n */\n/**\n * Get {@link HmacKey} objects for all of the HMAC keys in the project in a\n * readable object stream.\n *\n * @method Storage#getHmacKeysStream\n * @param {GetHmacKeysOptions} [options] Configuration options.\n * @returns {ReadableStream} A readable stream that emits {@link HmacKey}\n * instances.\n *\n * @example\n * ```\n * storage.getHmacKeysStream()\n * .on('error', console.error)\n * .on('data', function(hmacKey) {\n * // hmacKey is an HmacKey object.\n * })\n * .on('end', function() {\n * // All HmacKey retrieved.\n * });\n *\n * //-\n * // If you anticipate many results, you can end a stream early to prevent\n * // unnecessary processing and API requests.\n * //-\n * storage.getHmacKeysStream()\n * .on('data', function(bucket) {\n * this.end();\n * });\n * ```\n */\n/**\n *

ACLs

\n * Cloud Storage uses access control lists (ACLs) to manage object and\n * bucket access. ACLs are the mechanism you use to share files with other users\n * and allow other users to access your buckets and files.\n *\n * To learn more about ACLs, read this overview on\n * {@link https://cloud.google.com/storage/docs/access-control| Access Control}.\n *\n * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview}\n * See {@link https://cloud.google.com/storage/docs/access-control| Access Control}\n *\n * @class\n */\nclass Storage extends index_js_1.Service {\n getBucketsStream() {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n getHmacKeysStream() {\n // placeholder body, overwritten in constructor\n return new stream_1.Readable();\n }\n /**\n * @callback Crc32cGeneratorToStringCallback\n * A method returning the CRC32C as a base64-encoded string.\n *\n * @returns {string}\n *\n * @example\n * Hashing the string 'data' should return 'rth90Q=='\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.toString(); // 'rth90Q=='\n * ```\n **/\n /**\n * @callback Crc32cGeneratorValidateCallback\n * A method validating a base64-encoded CRC32C string.\n *\n * @param {string} [value] base64-encoded CRC32C string to validate\n * @returns {boolean}\n *\n * @example\n * Should return `true` if the value matches, `false` otherwise\n *\n * ```js\n * const buffer = Buffer.from('data');\n * crc32c.update(buffer);\n * crc32c.validate('DkjKuA=='); // false\n * crc32c.validate('rth90Q=='); // true\n * ```\n **/\n /**\n * @callback Crc32cGeneratorUpdateCallback\n * A method for passing `Buffer`s for CRC32C generation.\n *\n * @param {Buffer} [data] data to update CRC32C value with\n * @returns {undefined}\n *\n * @example\n * Hashing buffers from 'some ' and 'text\\n'\n *\n * ```js\n * const buffer1 = Buffer.from('some ');\n * crc32c.update(buffer1);\n *\n * const buffer2 = Buffer.from('text\\n');\n * crc32c.update(buffer2);\n *\n * crc32c.toString(); // 'DkjKuA=='\n * ```\n **/\n /**\n * @typedef {object} CRC32CValidator\n * @property {Crc32cGeneratorToStringCallback}\n * @property {Crc32cGeneratorValidateCallback}\n * @property {Crc32cGeneratorUpdateCallback}\n */\n /**\n * @callback Crc32cGeneratorCallback\n * @returns {CRC32CValidator}\n */\n /**\n * @typedef {object} StorageOptions\n * @property {string} [projectId] The project ID from the Google Developer's\n * Console, e.g. 'grape-spaceship-123'. We will also check the environment\n * variable `GCLOUD_PROJECT` for your project ID. If your app is running\n * in an environment which supports {@link\n * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application\n * Application Default Credentials}, your project ID will be detected\n * automatically.\n * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key\n * downloaded from the Google Developers Console. If you provide a path to\n * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and\n * .p12 require you to specify the `email` option as well.\n * @property {string} [email] Account email address. Required when using a .pem\n * or .p12 keyFilename.\n * @property {object} [credentials] Credentials object.\n * @property {string} [credentials.client_email]\n * @property {string} [credentials.private_key]\n * @property {object} [retryOptions] Options for customizing retries. Retriable server errors\n * will be retried with exponential delay between them dictated by the formula\n * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout\n * has been reached. Retries will only happen if autoRetry is set to true.\n * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the\n * response is related to rate limits or certain intermittent server\n * errors. We will exponentially backoff subsequent requests by default.\n * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to\n * increase the delay time between the completion of failed requests, and the\n * initiation of the subsequent retrying request.\n * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from\n * when the initial request is sent, after which an error will\n * be returned, regardless of the retrying attempts made meanwhile.\n * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests.\n * When this value is reached, ``retryDelayMultiplier`` will no longer be used to\n * increase delay time.\n * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries\n * attempted before returning the error.\n * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given\n * error should be retried and false otherwise.\n * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration\n * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways -\n * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional -\n * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never\n * retry a conditionally idempotent operation.\n * @property {string} [userAgent] The value to be prepended to the User-Agent\n * header in API requests.\n * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one.\n * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out.\n * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned.\n * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests.\n * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint.\n * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C}\n */\n /**\n * Constructs the Storage client.\n *\n * @example\n * Create a client that uses Application Default Credentials\n * (ADC)\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * ```\n *\n * @example\n * Create a client with explicit credentials\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * keyFilename: '/path/to/keyfile.json'\n * });\n * ```\n *\n * @example\n * Create a client with credentials passed\n * by value as a JavaScript object\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * credentials: {\n * type: 'service_account',\n * project_id: 'xxxxxxx',\n * private_key_id: 'xxxx',\n * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\\n-----END PRIVATE KEY-----\\n',\n * client_email: 'xxxx',\n * client_id: 'xxx',\n * auth_uri: 'https://accounts.google.com/o/oauth2/auth',\n * token_uri: 'https://oauth2.googleapis.com/token',\n * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs',\n * client_x509_cert_url: 'xxx',\n * }\n * });\n * ```\n *\n * @example\n * Create a client with credentials passed\n * by loading a JSON file directly from disk\n * ```\n * const storage = new Storage({\n * projectId: 'your-project-id',\n * credentials: require('/path/to-keyfile.json')\n * });\n * ```\n *\n * @example\n * Create a client with an `AuthClient` (e.g. `DownscopedClient`)\n * ```\n * const {DownscopedClient} = require('google-auth-library');\n * const authClient = new DownscopedClient({...});\n *\n * const storage = new Storage({authClient});\n * ```\n *\n * Additional samples:\n * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1\n * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js\n *\n * @param {StorageOptions} [options] Configuration options.\n */\n constructor(options = {}) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p;\n let apiEndpoint = 'https://storage.googleapis.com';\n let customEndpoint = false;\n // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead.\n const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST;\n if (typeof EMULATOR_HOST === 'string') {\n apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST);\n customEndpoint = true;\n }\n if (options.apiEndpoint) {\n apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint);\n customEndpoint = true;\n }\n options = Object.assign({}, options, { apiEndpoint });\n // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead.\n const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`;\n const config = {\n apiEndpoint: options.apiEndpoint,\n retryOptions: {\n autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined\n ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry\n : exports.AUTO_RETRY_DEFAULT,\n maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries)\n ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries\n : exports.MAX_RETRY_DEFAULT,\n retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier)\n ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier\n : exports.RETRY_DELAY_MULTIPLIER_DEFAULT,\n totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout)\n ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout\n : exports.TOTAL_TIMEOUT_DEFAULT,\n maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay)\n ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay\n : exports.MAX_RETRY_DELAY_DEFAULT,\n retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn)\n ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn\n : exports.RETRYABLE_ERR_FN_DEFAULT,\n idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined\n ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy\n : IDEMPOTENCY_STRATEGY_DEFAULT,\n },\n baseUrl,\n customEndpoint,\n useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint,\n projectIdRequired: false,\n scopes: [\n 'https://www.googleapis.com/auth/iam',\n 'https://www.googleapis.com/auth/cloud-platform',\n 'https://www.googleapis.com/auth/devstorage.full_control',\n ],\n packageJson: (0, package_json_helper_cjs_1.getPackageJSON)(),\n };\n super(config, options);\n /**\n * Reference to {@link Storage.acl}.\n *\n * @name Storage#acl\n * @see Storage.acl\n */\n this.acl = Storage.acl;\n this.crc32cGenerator =\n options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR;\n this.retryOptions = config.retryOptions;\n this.getBucketsStream = paginator_1.paginator.streamify('getBuckets');\n this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys');\n }\n static sanitizeEndpoint(url) {\n if (!exports.PROTOCOL_REGEX.test(url)) {\n url = `https://${url}`;\n }\n return url.replace(/\\/+$/, ''); // Remove trailing slashes\n }\n /**\n * Get a reference to a Cloud Storage bucket.\n *\n * @param {string} name Name of the bucket.\n * @param {object} [options] Configuration object.\n * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to\n * encrypt objects inserted into this bucket, if no encryption method is\n * specified.\n * @param {string} [options.userProject] User project to be billed for all\n * requests made from this Bucket object.\n * @returns {Bucket}\n * @see Bucket\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const albums = storage.bucket('albums');\n * const photos = storage.bucket('photos');\n * ```\n */\n bucket(name, options) {\n if (!name) {\n throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED);\n }\n return new bucket_js_1.Bucket(this, name, options);\n }\n /**\n * Reference a channel to receive notifications about changes to your bucket.\n *\n * @param {string} id The ID of the channel.\n * @param {string} resourceId The resource ID of the channel.\n * @returns {Channel}\n * @see Channel\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const channel = storage.channel('id', 'resource-id');\n * ```\n */\n channel(id, resourceId) {\n return new channel_js_1.Channel(this, id, resourceId);\n }\n /**\n * @typedef {array} CreateBucketResponse\n * @property {Bucket} 0 The new {@link Bucket}.\n * @property {object} 1 The full API response.\n */\n /**\n * @callback CreateBucketCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket} bucket The new {@link Bucket}.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Metadata to set for the bucket.\n *\n * @typedef {object} CreateBucketRequest\n * @property {boolean} [archive=false] Specify the storage class as Archive.\n * @property {object} [autoclass.enabled=false] Specify whether Autoclass is\n * enabled for the bucket.\n * @property {object} [autoclass.terminalStorageClass='NEARLINE'] The storage class that objects in an Autoclass bucket eventually transition to if\n * they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE.\n * @property {boolean} [coldline=false] Specify the storage class as Coldline.\n * @property {Cors[]} [cors=[]] Specify the CORS configuration to use.\n * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets.\n * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}.\n * @property {boolean} [dra=false] Specify the storage class as Durable Reduced\n * Availability.\n * @property {boolean} [enableObjectRetention=false] Specifiy whether or not object retention should be enabled on this bucket.\n * @property {string} [location] Specify the bucket's location. If specifying\n * a dual-region, the `customPlacementConfig` property should be set in conjunction.\n * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}.\n * @property {boolean} [multiRegional=false] Specify the storage class as\n * Multi-Regional.\n * @property {boolean} [nearline=false] Specify the storage class as Nearline.\n * @property {boolean} [regional=false] Specify the storage class as Regional.\n * @property {boolean} [requesterPays=false] **Early Access Testers Only**\n * Force the use of the User Project metadata field to assign operational\n * costs when an operation is made on a Bucket and its objects.\n * @property {string} [rpo] For dual-region buckets, controls whether turbo\n * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`).\n * @property {boolean} [standard=true] Specify the storage class as Standard.\n * @property {string} [storageClass] The new storage class. (`standard`,\n * `nearline`, `coldline`, or `archive`).\n * **Note:** The storage classes `multi_regional`, `regional`, and\n * `durable_reduced_availability` are now legacy and will be deprecated in\n * the future.\n * @property {Versioning} [versioning=undefined] Specify the versioning status.\n * @property {string} [userProject] The ID of the project which will be billed\n * for the request.\n */\n /**\n * Create a bucket.\n *\n * Cloud Storage uses a flat namespace, so you can't create a bucket with\n * a name that is already in use. For more information, see\n * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation}\n * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}\n *\n * @param {string} name Name of the bucket to create.\n * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket.\n * @param {CreateBucketCallback} [callback] Callback function.\n * @returns {Promise}\n * @throws {Error} If a name is not provided.\n * @see Bucket#create\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const callback = function(err, bucket, apiResponse) {\n * // `bucket` is a Bucket object.\n * };\n *\n * storage.createBucket('new-bucket', callback);\n *\n * //-\n * // Create a bucket in a specific location and region. See the \n * // Official JSON API docs for complete details on the `location`\n * option.\n * // \n * //-\n * const metadata = {\n * location: 'US-CENTRAL1',\n * regional: true\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // Create a bucket with a retention policy of 6 months.\n * //-\n * const metadata = {\n * retentionPolicy: {\n * retentionPeriod: 15780000 // 6 months in seconds.\n * }\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // Enable versioning on a new bucket.\n * //-\n * const metadata = {\n * versioning: {\n * enabled: true\n * }\n * };\n *\n * storage.createBucket('new-bucket', metadata, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.createBucket('new-bucket').then(function(data) {\n * const bucket = data[0];\n * const apiResponse = data[1];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_create_bucket\n * Another example:\n */\n createBucket(name, metadataOrCallback, callback) {\n if (!name) {\n throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE);\n }\n let metadata;\n if (!callback) {\n callback = metadataOrCallback;\n metadata = {};\n }\n else {\n metadata = metadataOrCallback;\n }\n const body = {\n ...metadata,\n name,\n };\n const storageClasses = {\n archive: 'ARCHIVE',\n coldline: 'COLDLINE',\n dra: 'DURABLE_REDUCED_AVAILABILITY',\n multiRegional: 'MULTI_REGIONAL',\n nearline: 'NEARLINE',\n regional: 'REGIONAL',\n standard: 'STANDARD',\n };\n const storageClassKeys = Object.keys(storageClasses);\n for (const storageClass of storageClassKeys) {\n if (body[storageClass]) {\n if (metadata.storageClass && metadata.storageClass !== storageClass) {\n throw new Error(`Both \\`${storageClass}\\` and \\`storageClass\\` were provided.`);\n }\n body.storageClass = storageClasses[storageClass];\n delete body[storageClass];\n }\n }\n if (body.requesterPays) {\n body.billing = {\n requesterPays: body.requesterPays,\n };\n delete body.requesterPays;\n }\n const query = {\n project: this.projectId,\n };\n if (body.userProject) {\n query.userProject = body.userProject;\n delete body.userProject;\n }\n if (body.enableObjectRetention) {\n query.enableObjectRetention = body.enableObjectRetention;\n delete body.enableObjectRetention;\n }\n this.request({\n method: 'POST',\n uri: '/b',\n qs: query,\n json: body,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const bucket = this.bucket(name);\n bucket.metadata = resp;\n callback(null, bucket, resp);\n });\n }\n /**\n * @typedef {object} CreateHmacKeyOptions\n * @property {string} [projectId] The project ID of the project that owns\n * the service account of the requested HMAC key. If not provided,\n * the project ID used to instantiate the Storage client will be used.\n * @property {string} [userProject] This parameter is currently ignored.\n */\n /**\n * @typedef {object} HmacKeyMetadata\n * @property {string} accessId The access id identifies which HMAC key was\n * used to sign a request when authenticating with HMAC.\n * @property {string} etag Used to perform a read-modify-write of the key.\n * @property {string} id The resource name of the HMAC key.\n * @property {string} projectId The project ID.\n * @property {string} serviceAccountEmail The service account's email this\n * HMAC key is created for.\n * @property {string} state The state of this HMAC key. One of \"ACTIVE\",\n * \"INACTIVE\" or \"DELETED\".\n * @property {string} timeCreated The creation time of the HMAC key in\n * RFC 3339 format.\n * @property {string} [updated] The time this HMAC key was last updated in\n * RFC 3339 format.\n */\n /**\n * @typedef {array} CreateHmacKeyResponse\n * @property {HmacKey} 0 The HmacKey instance created from API response.\n * @property {string} 1 The HMAC key's secret used to access the XML API.\n * @property {object} 3 The raw API response.\n */\n /**\n * @callback CreateHmacKeyCallback Callback function.\n * @param {?Error} err Request error, if any.\n * @param {HmacKey} hmacKey The HmacKey instance created from API response.\n * @param {string} secret The HMAC key's secret used to access the XML API.\n * @param {object} apiResponse The raw API response.\n */\n /**\n * Create an HMAC key associated with an service account to authenticate\n * requests to the Cloud Storage XML API.\n *\n * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation}\n *\n * @param {string} serviceAccountEmail The service account's email address\n * with which the HMAC key is created for.\n * @param {CreateHmacKeyCallback} [callback] Callback function.\n * @return {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('google-cloud/storage');\n * const storage = new Storage();\n *\n * // Replace with your service account's email address\n * const serviceAccountEmail =\n * 'my-service-account@appspot.gserviceaccount.com';\n *\n * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) {\n * if (!err) {\n * // Securely store the secret for use with the XML API.\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.createHmacKey(serviceAccountEmail)\n * .then((response) => {\n * const hmacKey = response[0];\n * const secret = response[1];\n * // Securely store the secret for use with the XML API.\n * });\n * ```\n */\n createHmacKey(serviceAccountEmail, optionsOrCb, cb) {\n if (typeof serviceAccountEmail !== 'string') {\n throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT);\n }\n const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb);\n const query = Object.assign({}, options, { serviceAccountEmail });\n const projectId = query.projectId || this.projectId;\n delete query.projectId;\n this.request({\n method: 'POST',\n uri: `/projects/${projectId}/hmacKeys`,\n qs: query,\n maxRetries: 0, //explicitly set this value since this is a non-idempotent function\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const metadata = resp.metadata;\n const hmacKey = this.hmacKey(metadata.accessId, {\n projectId: metadata.projectId,\n });\n hmacKey.metadata = resp.metadata;\n callback(null, hmacKey, resp.secret, resp);\n });\n }\n /**\n * Query object for listing buckets.\n *\n * @typedef {object} GetBucketsRequest\n * @property {boolean} [autoPaginate=true] Have pagination handled\n * automatically.\n * @property {number} [maxApiCalls] Maximum number of API calls to make.\n * @property {number} [maxResults] Maximum number of items plus prefixes to\n * return per call.\n * Note: By default will handle pagination automatically\n * if more than 1 page worth of results are requested per call.\n * When `autoPaginate` is set to `false` the smaller of `maxResults`\n * or 1 page of results will be returned per call.\n * @property {string} [pageToken] A previously-returned page token\n * representing part of the larger set of results to view.\n * @property {string} [userProject] The ID of the project which will be billed\n * for the request.\n */\n /**\n * @typedef {array} GetBucketsResponse\n * @property {Bucket[]} 0 Array of {@link Bucket} instances.\n * @property {object} 1 nextQuery A query object to receive more results.\n * @property {object} 2 The full API response.\n */\n /**\n * @callback GetBucketsCallback\n * @param {?Error} err Request error, if any.\n * @param {Bucket[]} buckets Array of {@link Bucket} instances.\n * @param {object} nextQuery A query object to receive more results.\n * @param {object} apiResponse The full API response.\n */\n /**\n * Get Bucket objects for all of the buckets in your project.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation}\n *\n * @param {GetBucketsRequest} [query] Query object for listing buckets.\n * @param {GetBucketsCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * storage.getBuckets(function(err, buckets) {\n * if (!err) {\n * // buckets is an array of Bucket objects.\n * }\n * });\n *\n * //-\n * // To control how many API requests are made and page through the results\n * // manually, set `autoPaginate` to `false`.\n * //-\n * const callback = function(err, buckets, nextQuery, apiResponse) {\n * if (nextQuery) {\n * // More results exist.\n * storage.getBuckets(nextQuery, callback);\n * }\n *\n * // The `metadata` property is populated for you with the metadata at the\n * // time of fetching.\n * buckets[0].metadata;\n *\n * // However, in cases where you are concerned the metadata could have\n * // changed, use the `getMetadata` method.\n * buckets[0].getMetadata(function(err, metadata, apiResponse) {});\n * };\n *\n * storage.getBuckets({\n * autoPaginate: false\n * }, callback);\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.getBuckets().then(function(data) {\n * const buckets = data[0];\n * });\n *\n * ```\n * @example include:samples/buckets.js\n * region_tag:storage_list_buckets\n * Another example:\n */\n getBuckets(optionsOrCallback, cb) {\n const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb);\n options.project = options.project || this.projectId;\n this.request({\n uri: '/b',\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const buckets = itemsArray.map((bucket) => {\n const bucketInstance = this.bucket(bucket.id);\n bucketInstance.metadata = bucket;\n return bucketInstance;\n });\n const nextQuery = resp.nextPageToken\n ? Object.assign({}, options, { pageToken: resp.nextPageToken })\n : null;\n callback(null, buckets, nextQuery, resp);\n });\n }\n getHmacKeys(optionsOrCb, cb) {\n const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb);\n const query = Object.assign({}, options);\n const projectId = query.projectId || this.projectId;\n delete query.projectId;\n this.request({\n uri: `/projects/${projectId}/hmacKeys`,\n qs: query,\n }, (err, resp) => {\n if (err) {\n callback(err, null, null, resp);\n return;\n }\n const itemsArray = resp.items ? resp.items : [];\n const hmacKeys = itemsArray.map((hmacKey) => {\n const hmacKeyInstance = this.hmacKey(hmacKey.accessId, {\n projectId: hmacKey.projectId,\n });\n hmacKeyInstance.metadata = hmacKey;\n return hmacKeyInstance;\n });\n const nextQuery = resp.nextPageToken\n ? Object.assign({}, options, { pageToken: resp.nextPageToken })\n : null;\n callback(null, hmacKeys, nextQuery, resp);\n });\n }\n /**\n * @typedef {array} GetServiceAccountResponse\n * @property {object} 0 The service account resource.\n * @property {object} 1 The full\n * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}.\n */\n /**\n * @callback GetServiceAccountCallback\n * @param {?Error} err Request error, if any.\n * @param {object} serviceAccount The serviceAccount resource.\n * @param {string} serviceAccount.emailAddress The service account email\n * address.\n * @param {object} apiResponse The full\n * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}.\n */\n /**\n * Get the email address of this project's Google Cloud Storage service\n * account.\n *\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation}\n * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource}\n *\n * @param {object} [options] Configuration object.\n * @param {string} [options.userProject] User project to be billed for this\n * request.\n * @param {GetServiceAccountCallback} [callback] Callback function.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n *\n * storage.getServiceAccount(function(err, serviceAccount, apiResponse) {\n * if (!err) {\n * const serviceAccountEmail = serviceAccount.emailAddress;\n * }\n * });\n *\n * //-\n * // If the callback is omitted, we'll return a Promise.\n * //-\n * storage.getServiceAccount().then(function(data) {\n * const serviceAccountEmail = data[0].emailAddress;\n * const apiResponse = data[1];\n * });\n * ```\n */\n getServiceAccount(optionsOrCallback, cb) {\n const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb);\n this.request({\n uri: `/projects/${this.projectId}/serviceAccount`,\n qs: options,\n }, (err, resp) => {\n if (err) {\n callback(err, null, resp);\n return;\n }\n const camelCaseResponse = {};\n for (const prop in resp) {\n // eslint-disable-next-line no-prototype-builtins\n if (resp.hasOwnProperty(prop)) {\n const camelCaseProp = prop.replace(/_(\\w)/g, (_, match) => match.toUpperCase());\n camelCaseResponse[camelCaseProp] = resp[prop];\n }\n }\n callback(null, camelCaseResponse, resp);\n });\n }\n /**\n * Get a reference to an HmacKey object.\n * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to\n * retrieve and populate the metadata.\n *\n * To get a reference to an HMAC key that's not created for a service\n * account in the same project used to instantiate the Storage client,\n * supply the project's ID as `projectId` in the `options` argument.\n *\n * @param {string} accessId The HMAC key's access ID.\n * @param {HmacKeyOptions} options HmacKey constructor options.\n * @returns {HmacKey}\n * @see HmacKey\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const hmacKey = storage.hmacKey('ACCESS_ID');\n * ```\n */\n hmacKey(accessId, options) {\n if (!accessId) {\n throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID);\n }\n return new hmacKey_js_1.HmacKey(this, accessId, options);\n }\n}\nexports.Storage = Storage;\n/**\n * {@link Bucket} class.\n *\n * @name Storage.Bucket\n * @see Bucket\n * @type {Constructor}\n */\nStorage.Bucket = bucket_js_1.Bucket;\n/**\n * {@link Channel} class.\n *\n * @name Storage.Channel\n * @see Channel\n * @type {Constructor}\n */\nStorage.Channel = channel_js_1.Channel;\n/**\n * {@link File} class.\n *\n * @name Storage.File\n * @see File\n * @type {Constructor}\n */\nStorage.File = file_js_1.File;\n/**\n * {@link HmacKey} class.\n *\n * @name Storage.HmacKey\n * @see HmacKey\n * @type {Constructor}\n */\nStorage.HmacKey = hmacKey_js_1.HmacKey;\nStorage.acl = {\n OWNER_ROLE: 'OWNER',\n READER_ROLE: 'READER',\n WRITER_ROLE: 'WRITER',\n};\n/*! Developer Documentation\n *\n * These methods can be auto-paginated.\n */\npaginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']);\n/*! Developer Documentation\n *\n * All async methods (except for streams) will return a Promise in the event\n * that a callback is omitted.\n */\n(0, promisify_1.promisifyAll)(Storage, {\n exclude: ['bucket', 'channel', 'hmacKey'],\n});\n","\"use strict\";\n/*!\n * Copyright 2022 Google LLC. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiClientHeaders, _XMLMultiPartUploadHelper_handleErrorResponse;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TransferManager = exports.MultiPartUploadError = void 0;\nconst file_js_1 = require(\"./file.js\");\nconst p_limit_1 = __importDefault(require(\"p-limit\"));\nconst path = __importStar(require(\"path\"));\nconst fs_1 = require(\"fs\");\nconst crc32c_js_1 = require(\"./crc32c.js\");\nconst google_auth_library_1 = require(\"google-auth-library\");\nconst fast_xml_parser_1 = require(\"fast-xml-parser\");\nconst async_retry_1 = __importDefault(require(\"async-retry\"));\nconst crypto_1 = require(\"crypto\");\nconst util_js_1 = require(\"./nodejs-common/util.js\");\nconst util_js_2 = require(\"./util.js\");\n// eslint-disable-next-line @typescript-eslint/ban-ts-comment\n// @ts-ignore\nconst package_json_helper_cjs_1 = require(\"./package-json-helper.cjs\");\nconst packageJson = (0, package_json_helper_cjs_1.getPackageJSON)();\n/**\n * Default number of concurrently executing promises to use when calling uploadManyFiles.\n *\n */\nconst DEFAULT_PARALLEL_UPLOAD_LIMIT = 5;\n/**\n * Default number of concurrently executing promises to use when calling downloadManyFiles.\n *\n */\nconst DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 5;\n/**\n * Default number of concurrently executing promises to use when calling downloadFileInChunks.\n *\n */\nconst DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 5;\n/**\n * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks.\n *\n */\nconst DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024;\n/**\n * The chunk size in bytes to use when calling downloadFileInChunks.\n *\n */\nconst DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024;\n/**\n * The chunk size in bytes to use when calling uploadFileInChunks.\n *\n */\nconst UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024;\n/**\n * Default number of concurrently executing promises to use when calling uploadFileInChunks.\n *\n */\nconst DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 5;\nconst EMPTY_REGEX = '(?:)';\n/**\n * The `gccl-gcs-cmd` value for the `X-Goog-API-Client` header.\n * Example: `gccl-gcs-cmd/tm.upload_many`\n *\n * @see {@link GCCL_GCS_CMD}.\n * @see {@link GCCL_GCS_CMD_KEY}.\n */\nconst GCCL_GCS_CMD_FEATURE = {\n UPLOAD_MANY: 'tm.upload_many',\n DOWNLOAD_MANY: 'tm.download_many',\n UPLOAD_SHARDED: 'tm.upload_sharded',\n DOWNLOAD_SHARDED: 'tm.download_sharded',\n};\nconst defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => {\n return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap);\n};\nclass MultiPartUploadError extends Error {\n constructor(message, uploadId, partsMap) {\n super(message);\n this.uploadId = uploadId;\n this.partsMap = partsMap;\n }\n}\nexports.MultiPartUploadError = MultiPartUploadError;\n/**\n * Class representing an implementation of MPU in the XML API. This class is not meant for public usage.\n *\n * @private\n *\n */\nclass XMLMultiPartUploadHelper {\n constructor(bucket, fileName, uploadId, partsMap) {\n _XMLMultiPartUploadHelper_instances.add(this);\n this.authClient = bucket.storage.authClient || new google_auth_library_1.GoogleAuth();\n this.uploadId = uploadId || '';\n this.bucket = bucket;\n this.fileName = fileName;\n this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`;\n this.xmlBuilder = new fast_xml_parser_1.XMLBuilder({ arrayNodeName: 'Part' });\n this.xmlParser = new fast_xml_parser_1.XMLParser();\n this.partsMap = partsMap || new Map();\n this.retryOptions = {\n retries: this.bucket.storage.retryOptions.maxRetries,\n factor: this.bucket.storage.retryOptions.retryDelayMultiplier,\n maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000,\n maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000,\n };\n }\n /**\n * Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id.\n *\n * @returns {Promise}\n */\n async initiateUpload(headers = {}) {\n const url = `${this.baseUrl}?uploads`;\n return (0, async_retry_1.default)(async (bail) => {\n try {\n const res = await this.authClient.request({\n headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this, headers),\n method: 'POST',\n url,\n });\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n const parsedXML = this.xmlParser.parse(res.data);\n this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId;\n }\n catch (e) {\n __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail);\n }\n }, this.retryOptions);\n }\n /**\n * Uploads the provided chunk of data to the XML API using the previously created upload id.\n *\n * @param {number} partNumber the sequence number of this chunk.\n * @param {Buffer} chunk the chunk of data to be uploaded.\n * @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server\n * to validate the chunk was not corrupted.\n * @returns {Promise}\n */\n async uploadPart(partNumber, chunk, validation) {\n const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`;\n let headers = __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this);\n if (validation === 'md5') {\n const hash = (0, crypto_1.createHash)('md5').update(chunk).digest('base64');\n headers = {\n 'Content-MD5': hash,\n };\n }\n return (0, async_retry_1.default)(async (bail) => {\n try {\n const res = await this.authClient.request({\n url,\n method: 'PUT',\n body: chunk,\n headers,\n });\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n this.partsMap.set(partNumber, res.headers['etag']);\n }\n catch (e) {\n __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail);\n }\n }, this.retryOptions);\n }\n /**\n * Sends the final request of the MPU to tell GCS the upload is now complete.\n *\n * @returns {Promise}\n */\n async completeUpload() {\n const url = `${this.baseUrl}?uploadId=${this.uploadId}`;\n const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0]));\n const parts = [];\n for (const entry of sortedMap.entries()) {\n parts.push({ PartNumber: entry[0], ETag: entry[1] });\n }\n const body = `${this.xmlBuilder.build(parts)}`;\n return (0, async_retry_1.default)(async (bail) => {\n try {\n const res = await this.authClient.request({\n headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this),\n url,\n method: 'POST',\n body,\n });\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n return res;\n }\n catch (e) {\n __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail);\n return;\n }\n }, this.retryOptions);\n }\n /**\n * Aborts an multipart upload that is in progress. Once aborted, any parts in the process of being uploaded fail,\n * and future requests using the upload ID fail.\n *\n * @returns {Promise}\n */\n async abortUpload() {\n const url = `${this.baseUrl}?uploadId=${this.uploadId}`;\n return (0, async_retry_1.default)(async (bail) => {\n try {\n const res = await this.authClient.request({\n url,\n method: 'DELETE',\n });\n if (res.data && res.data.error) {\n throw res.data.error;\n }\n }\n catch (e) {\n __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, \"m\", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail);\n return;\n }\n }, this.retryOptions);\n }\n}\n_XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_setGoogApiClientHeaders = function _XMLMultiPartUploadHelper_setGoogApiClientHeaders(headers = {}) {\n let headerFound = false;\n let userAgentFound = false;\n for (const [key, value] of Object.entries(headers)) {\n if (key.toLocaleLowerCase().trim() === 'x-goog-api-client') {\n headerFound = true;\n // Prepend command feature to value, if not already there\n if (!value.includes(GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED)) {\n headers[key] = `${value} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`;\n }\n }\n else if (key.toLocaleLowerCase().trim() === 'user-agent') {\n userAgentFound = true;\n }\n }\n // If the header isn't present, add it\n if (!headerFound) {\n headers['x-goog-api-client'] = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`;\n }\n // If the User-Agent isn't present, add it\n if (!userAgentFound) {\n headers['User-Agent'] = (0, util_js_2.getUserAgentString)();\n }\n return headers;\n}, _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) {\n if (this.bucket.storage.retryOptions.autoRetry &&\n this.bucket.storage.retryOptions.retryableErrorFn(err)) {\n throw err;\n }\n else {\n bail(err);\n }\n};\n/**\n * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket.\n *\n * @class\n * @hideconstructor\n *\n * @param {Bucket} bucket A {@link Bucket} instance\n *\n */\nclass TransferManager {\n constructor(bucket) {\n this.bucket = bucket;\n }\n /**\n * @typedef {object} UploadManyFilesOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when uploading the files.\n * @property {boolean} [skipIfExists] Do not upload the file if it already exists in\n * the bucket. This will set the precondition ifGenerationMatch = 0.\n * @property {string} [prefix] A prefix to append to all of the uploaded files.\n * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through\n * to each individual upload operation.\n *\n */\n /**\n * Upload multiple files in parallel to the bucket. This is a convenience method\n * that utilizes {@link Bucket#upload} to perform the upload.\n *\n * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name.\n * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list.\n * to be uploaded to the bucket\n * @param {UploadManyFilesOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Upload multiple files in parallel.\n * //-\n * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']);\n * // Your bucket now contains:\n * // - \"local/path/file1.txt\" (with the contents of '/local/path/file1.txt')\n * // - \"local/path/file2.txt\" (with the contents of '/local/path/file2.txt')\n * const response = await transferManager.uploadManyFiles('/local/directory');\n * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure.\n * ```\n *\n */\n async uploadManyFiles(filePathsOrDirectory, options = {}) {\n var _a;\n if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) {\n options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0;\n }\n else if (options.skipIfExists &&\n options.passthroughOptions === undefined) {\n options.passthroughOptions = {\n preconditionOpts: {\n ifGenerationMatch: 0,\n },\n };\n }\n const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT);\n const promises = [];\n let allPaths = [];\n if (!Array.isArray(filePathsOrDirectory)) {\n for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) {\n allPaths.push(curPath);\n }\n }\n else {\n allPaths = filePathsOrDirectory;\n }\n for (const filePath of allPaths) {\n const stat = await fs_1.promises.lstat(filePath);\n if (stat.isDirectory()) {\n continue;\n }\n const passThroughOptionsCopy = {\n ...options.passthroughOptions,\n [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.UPLOAD_MANY,\n };\n passThroughOptionsCopy.destination = filePath\n .split(path.sep)\n .join(path.posix.sep);\n if (options.prefix) {\n passThroughOptionsCopy.destination = path.posix.join(...options.prefix.split(path.sep), passThroughOptionsCopy.destination);\n }\n promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy)));\n }\n return Promise.all(promises);\n }\n /**\n * @typedef {object} DownloadManyFilesOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when downloading the files.\n * @property {string} [prefix] A prefix to append to all of the downloaded files.\n * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files.\n * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through\n * to each individual download operation.\n *\n */\n /**\n * Download multiple files in parallel to the local filesystem. This is a convenience method\n * that utilizes {@link File#download} to perform the download.\n *\n * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If\n * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded.\n * @param {DownloadManyFilesOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Download multiple files in parallel.\n * //-\n * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']);\n * // The following files have been downloaded:\n * // - \"file1.txt\" (with the contents from my-bucket.file1.txt)\n * // - \"file2.txt\" (with the contents from my-bucket.file2.txt)\n * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]);\n * // The following files have been downloaded:\n * // - \"file1.txt\" (with the contents from my-bucket.file1.txt)\n * // - \"file2.txt\" (with the contents from my-bucket.file2.txt)\n * const response = await transferManager.downloadManyFiles('test-folder');\n * // All files with GCS prefix of 'test-folder' have been downloaded.\n * ```\n *\n */\n async downloadManyFiles(filesOrFolder, options = {}) {\n const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT);\n const promises = [];\n let files = [];\n if (!Array.isArray(filesOrFolder)) {\n const directoryFiles = await this.bucket.getFiles({\n prefix: filesOrFolder,\n });\n files = directoryFiles[0];\n }\n else {\n files = filesOrFolder.map(curFile => {\n if (typeof curFile === 'string') {\n return this.bucket.file(curFile);\n }\n return curFile;\n });\n }\n const stripRegexString = options.stripPrefix\n ? `^${options.stripPrefix}`\n : EMPTY_REGEX;\n const regex = new RegExp(stripRegexString, 'g');\n for (const file of files) {\n const passThroughOptionsCopy = {\n ...options.passthroughOptions,\n [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_MANY,\n };\n if (options.prefix) {\n passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name);\n }\n if (options.stripPrefix) {\n passThroughOptionsCopy.destination = file.name.replace(regex, '');\n }\n promises.push(limit(() => file.download(passThroughOptionsCopy)));\n }\n return Promise.all(promises);\n }\n /**\n * @typedef {object} DownloadFileInChunksOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when downloading the file.\n * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded.\n * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete.\n * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory.\n *\n */\n /**\n * Download a large file in chunks utilizing parallel download operations. This is a convenience method\n * that utilizes {@link File#download} to perform the download.\n *\n * @param {File | string} fileOrName {@link File} to download.\n * @param {DownloadFileInChunksOptions} [options] Configuration options.\n * @returns {Promise}\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Download a large file in chunks utilizing parallel operations.\n * //-\n * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt');\n * // Your local directory now contains:\n * // - \"large-file.txt\" (with the contents from my-bucket.large-file.txt)\n * ```\n *\n */\n async downloadFileInChunks(fileOrName, options = {}) {\n let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE;\n let limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT);\n const noReturnData = Boolean(options.noReturnData);\n const promises = [];\n const file = typeof fileOrName === 'string'\n ? this.bucket.file(fileOrName)\n : fileOrName;\n const fileInfo = await file.get();\n const size = parseInt(fileInfo[0].metadata.size.toString());\n // If the file size does not meet the threshold download it as a single chunk.\n if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) {\n limit = (0, p_limit_1.default)(1);\n chunkSize = size;\n }\n let start = 0;\n const filePath = options.destination || path.basename(file.name);\n const fileToWrite = await fs_1.promises.open(filePath, 'w');\n while (start < size) {\n const chunkStart = start;\n let chunkEnd = start + chunkSize - 1;\n chunkEnd = chunkEnd > size ? size : chunkEnd;\n promises.push(limit(async () => {\n const resp = await file.download({\n start: chunkStart,\n end: chunkEnd,\n [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_SHARDED,\n });\n const result = await fileToWrite.write(resp[0], 0, resp[0].length, chunkStart);\n if (noReturnData)\n return;\n return result.buffer;\n }));\n start += chunkSize;\n }\n let chunks;\n try {\n chunks = await Promise.all(promises);\n }\n finally {\n await fileToWrite.close();\n }\n if (options.validation === 'crc32c' && fileInfo[0].metadata.crc32c) {\n const downloadedCrc32C = await crc32c_js_1.CRC32C.fromFile(filePath);\n if (!downloadedCrc32C.validate(fileInfo[0].metadata.crc32c)) {\n const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH);\n mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH';\n throw mismatchError;\n }\n }\n if (noReturnData)\n return;\n return [Buffer.concat(chunks, size)];\n }\n /**\n * @typedef {object} UploadFileInChunksOptions\n * @property {number} [concurrencyLimit] The number of concurrently executing promises\n * to use when uploading the file.\n * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded.\n * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path.\n * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified\n * defaults to the specified concurrency limit.\n * @property {string} [uploadId] If specified attempts to resume a previous upload.\n * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk\n * specified in partsMap\n * @property {object} [headers] headers to be sent when initiating the multipart upload.\n * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload}\n * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set,\n * failures will be automatically aborted.\n *\n */\n /**\n * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and\n * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to\n * resume the upload.\n *\n * @param {string} [filePath] The path of the file to be uploaded\n * @param {UploadFileInChunksOptions} [options] Configuration options.\n * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this.\n * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map.\n *\n * @example\n * ```\n * const {Storage} = require('@google-cloud/storage');\n * const storage = new Storage();\n * const bucket = storage.bucket('my-bucket');\n * const transferManager = new TransferManager(bucket);\n *\n * //-\n * // Upload a large file in chunks utilizing parallel operations.\n * //-\n * const response = await transferManager.uploadFileInChunks('large-file.txt');\n * // Your bucket now contains:\n * // - \"large-file.txt\"\n * ```\n *\n *\n */\n async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) {\n const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE;\n const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT);\n const maxQueueSize = options.maxQueueSize ||\n options.concurrencyLimit ||\n DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT;\n const fileName = options.uploadName || path.basename(filePath);\n const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap);\n let partNumber = 1;\n let promises = [];\n try {\n if (options.uploadId === undefined) {\n await mpuHelper.initiateUpload(options.headers);\n }\n const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize;\n const readStream = (0, fs_1.createReadStream)(filePath, {\n highWaterMark: chunkSize,\n start: startOrResumptionByte,\n });\n // p-limit only limits the number of running promises. We do not want to hold an entire\n // large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory.\n for await (const curChunk of readStream) {\n if (promises.length >= maxQueueSize) {\n await Promise.all(promises);\n promises = [];\n }\n promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation)));\n }\n await Promise.all(promises);\n return await mpuHelper.completeUpload();\n }\n catch (e) {\n if ((options.autoAbortFailure === undefined || options.autoAbortFailure) &&\n mpuHelper.uploadId) {\n try {\n await mpuHelper.abortUpload();\n return;\n }\n catch (e) {\n throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap);\n }\n }\n throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap);\n }\n }\n async *getPathsFromDirectory(directory) {\n const filesAndSubdirectories = await fs_1.promises.readdir(directory, {\n withFileTypes: true,\n });\n for (const curFileOrDirectory of filesAndSubdirectories) {\n const fullPath = path.join(directory, curFileOrDirectory.name);\n curFileOrDirectory.isDirectory()\n ? yield* this.getPathsFromDirectory(fullPath)\n : yield fullPath;\n }\n }\n}\nexports.TransferManager = TransferManager;\n","\"use strict\";\n\n// Copyright 2019 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\nvar __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = {\n enumerable: true,\n get: function () {\n return m[k];\n }\n };\n }\n Object.defineProperty(o, k2, desc);\n} : function (o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n});\nvar __setModuleDefault = this && this.__setModuleDefault || (Object.create ? function (o, v) {\n Object.defineProperty(o, \"default\", {\n enumerable: true,\n value: v\n });\n} : function (o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = this && this.__importStar || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.PassThroughShim = exports.getModuleFormat = exports.getDirName = exports.getUserAgentString = exports.getRuntimeTrackingString = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0;\nconst path = __importStar(require(\"path\"));\nconst querystring = __importStar(require(\"querystring\"));\nconst stream_1 = require(\"stream\");\nconst url = __importStar(require(\"url\"));\n// eslint-disable-next-line @typescript-eslint/ban-ts-comment\n// @ts-ignore\nconst package_json_helper_cjs_1 = require(\"./package-json-helper.cjs\");\n// Done to avoid a problem with mangling of identifiers when using esModuleInterop\nconst fileURLToPath = url.fileURLToPath;\nconst isEsm = false;\nfunction normalize(optionsOrCallback, cb) {\n const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;\n return {\n options,\n callback\n };\n}\nexports.normalize = normalize;\n/**\n * Flatten an object into an Array of arrays, [[key, value], ..].\n * Implements Object.entries() for Node.js <8\n * @internal\n */\nfunction objectEntries(obj) {\n return Object.keys(obj).map(key => [key, obj[key]]);\n}\nexports.objectEntries = objectEntries;\n/**\n * Encode `str` with encodeURIComponent, plus these\n * reserved characters: `! * ' ( )`.\n *\n * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent}\n *\n * @param {string} str The URI component to encode.\n * @return {string} The encoded string.\n */\nfunction fixedEncodeURIComponent(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase());\n}\nexports.fixedEncodeURIComponent = fixedEncodeURIComponent;\n/**\n * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent.\n *\n * Encode every byte except `A-Z a-Z 0-9 ~ - . _`.\n *\n * @param {string} uri The URI to encode.\n * @param [boolean=false] encodeSlash If `true`, the \"/\" character is not encoded.\n * @return {string} The encoded string.\n */\nfunction encodeURI(uri, encodeSlash) {\n // Split the string by `/`, and conditionally rejoin them with either\n // %2F if encodeSlash is `true`, or '/' if `false`.\n return uri.split('/').map(fixedEncodeURIComponent).join(encodeSlash ? '%2F' : '/');\n}\nexports.encodeURI = encodeURI;\n/**\n * Serialize an object to a URL query string using util.encodeURI(uri, true).\n * @param {string} url The object to serialize.\n * @return {string} Serialized string.\n */\nfunction qsStringify(qs) {\n return querystring.stringify(qs, '&', '=', {\n encodeURIComponent: component => encodeURI(component, true)\n });\n}\nexports.qsStringify = qsStringify;\nfunction objectKeyToLowercase(object) {\n const newObj = {};\n for (let key of Object.keys(object)) {\n const value = object[key];\n key = key.toLowerCase();\n newObj[key] = value;\n }\n return newObj;\n}\nexports.objectKeyToLowercase = objectKeyToLowercase;\n/**\n * JSON encode str, with unicode \\u+ representation.\n * @param {object} obj The object to encode.\n * @return {string} Serialized string.\n */\nfunction unicodeJSONStringify(obj) {\n return JSON.stringify(obj).replace(/[\\u0080-\\uFFFF]/g, char => '\\\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4));\n}\nexports.unicodeJSONStringify = unicodeJSONStringify;\n/**\n * Converts the given objects keys to snake_case\n * @param {object} obj object to convert keys to snake case.\n * @returns {object} object with keys converted to snake case.\n */\nfunction convertObjKeysToSnakeCase(obj) {\n if (obj instanceof Date || obj instanceof RegExp) {\n return obj;\n }\n if (Array.isArray(obj)) {\n return obj.map(convertObjKeysToSnakeCase);\n }\n if (obj instanceof Object) {\n return Object.keys(obj).reduce((acc, cur) => {\n const s = cur[0].toLocaleLowerCase() + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => {\n return `_${p1.toLowerCase()}`;\n });\n acc[s] = convertObjKeysToSnakeCase(obj[cur]);\n return acc;\n }, Object());\n }\n return obj;\n}\nexports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase;\n/**\n * Formats the provided date object as a UTC ISO string.\n * @param {Date} dateTimeToFormat date object to be formatted.\n * @param {boolean} includeTime flag to include hours, minutes, seconds in output.\n * @param {string} dateDelimiter delimiter between date components.\n * @param {string} timeDelimiter delimiter between time components.\n * @returns {string} UTC ISO format of provided date obect.\n */\nfunction formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') {\n const year = dateTimeToFormat.getUTCFullYear();\n const month = dateTimeToFormat.getUTCMonth() + 1;\n const day = dateTimeToFormat.getUTCDate();\n const hour = dateTimeToFormat.getUTCHours();\n const minute = dateTimeToFormat.getUTCMinutes();\n const second = dateTimeToFormat.getUTCSeconds();\n let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month.toString().padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`;\n if (includeTime) {\n resultString = `${resultString}T${hour.toString().padStart(2, '0')}${timeDelimiter}${minute.toString().padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`;\n }\n return resultString;\n}\nexports.formatAsUTCISO = formatAsUTCISO;\n/**\n * Examines the runtime environment and returns the appropriate tracking string.\n * @returns {string} metrics tracking string based on the current runtime environment.\n */\nfunction getRuntimeTrackingString() {\n if (\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n globalThis.Deno &&\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n globalThis.Deno.version &&\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n globalThis.Deno.version.deno) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n return `gl-deno/${globalThis.Deno.version.deno}`;\n } else {\n return `gl-node/${process.versions.node}`;\n }\n}\nexports.getRuntimeTrackingString = getRuntimeTrackingString;\n/**\n * Looks at package.json and creates the user-agent string to be applied to request headers.\n * @returns {string} user agent string.\n */\nfunction getUserAgentString() {\n const pkg = (0, package_json_helper_cjs_1.getPackageJSON)();\n const hyphenatedPackageName = pkg.name.replace('@google-cloud', 'gcloud-node') // For legacy purposes.\n .replace('/', '-'); // For UA spec-compliance purposes.\n return hyphenatedPackageName + '/' + pkg.version;\n}\nexports.getUserAgentString = getUserAgentString;\nfunction getDirName() {\n let dirToUse = '';\n try {\n dirToUse = __dirname;\n } catch (e) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n dirToUse = __dirname;\n }\n return dirToUse;\n}\nexports.getDirName = getDirName;\nfunction getModuleFormat() {\n return isEsm ? 'ESM' : 'CJS';\n}\nexports.getModuleFormat = getModuleFormat;\nclass PassThroughShim extends stream_1.PassThrough {\n constructor() {\n super(...arguments);\n this.shouldEmitReading = true;\n this.shouldEmitWriting = true;\n }\n _read(size) {\n if (this.shouldEmitReading) {\n this.emit('reading');\n this.shouldEmitReading = false;\n }\n super._read(size);\n }\n _write(chunk, encoding, callback) {\n if (this.shouldEmitWriting) {\n this.emit('writing');\n this.shouldEmitWriting = false;\n }\n // Per the nodejs documention, callback must be invoked on the next tick\n process.nextTick(() => {\n super._write(chunk, encoding, callback);\n });\n }\n _final(callback) {\n // If the stream is empty (i.e. empty file) final will be invoked before _read / _write\n // and we should still emit the proper events.\n if (this.shouldEmitReading) {\n this.emit('reading');\n this.shouldEmitReading = false;\n }\n if (this.shouldEmitWriting) {\n this.emit('writing');\n this.shouldEmitWriting = false;\n }\n callback(null);\n }\n}\nexports.PassThroughShim = PassThroughShim;\n","// Copyright 2023 Google LLC\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n/* eslint-disable node/no-missing-require */\n\nfunction getPackageJSON() {\n return require('../../../package.json');\n}\n\nexports.getPackageJSON = getPackageJSON;\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(95);\n",""],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/dist/post/licenses.txt b/dist/post/licenses.txt index ad7b961..fddbd30 100644 --- a/dist/post/licenses.txt +++ b/dist/post/licenses.txt @@ -47,6 +47,28 @@ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +@fastify/busboy +MIT +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + @google-cloud/common Apache-2.0 @@ -1585,7 +1607,7 @@ MIT The MIT License (MIT) ===================== -Copyright © `<2022>` `Michael Mclaughlin` +Copyright © `<2023>` `Michael Mclaughlin` Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation @@ -2025,209 +2047,29 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -fast-text-encoding -Apache-2.0 - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. +fast-xml-parser +MIT +MIT License - Copyright {yyyy} {name of copyright owner} +Copyright (c) 2017 Amit Kumar Gupta - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: - http://www.apache.org/licenses/LICENSE-2.0 +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. gaxios @@ -2848,31 +2690,6 @@ Apache-2.0 limitations under the License. -google-p12-pem -MIT -The MIT License (MIT) - -Copyright (c) 2014 Ryan Seys - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - gtoken MIT The MIT License (MIT) @@ -2916,6 +2733,28 @@ MIT https-proxy-agent MIT +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. inherits ISC @@ -2937,31 +2776,6 @@ PERFORMANCE OF THIS SOFTWARE. -is-plain-object -MIT -The MIT License (MIT) - -Copyright (c) 2014-2017, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - is-stream MIT MIT License @@ -3041,25 +2855,6 @@ FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TOR ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -lru-cache -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - mime MIT The MIT License (MIT) @@ -3190,341 +2985,6 @@ SOFTWARE. -node-forge -(BSD-3-Clause OR GPL-2.0) -You may use the Forge project under the terms of either the BSD License or the -GNU General Public License (GPL) Version 2. - -The BSD License is recommended for most projects. It is simple and easy to -understand and it places almost no restrictions on what you can do with the -Forge project. - -If the GPL suits your project better you are also free to use Forge under -that license. - -You don't have to do anything special to choose one license or the other and -you don't have to notify anyone which license you are using. You are free to -use this project in commercial projects as long as the copyright header is -left intact. - -If you are a commercial entity and use this set of libraries in your -commercial software then reasonable payment to Digital Bazaar, if you can -afford it, is not required but is expected and would be appreciated. If this -library saves you time, then it's saving you money. The cost of developing -the Forge software was on the order of several hundred hours and tens of -thousands of dollars. We are attempting to strike a balance between helping -the development community while not being taken advantage of by lucrative -commercial entities for our efforts. - -------------------------------------------------------------------------------- -New BSD License (3-clause) -Copyright (c) 2010, Digital Bazaar, Inc. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Digital Bazaar, Inc. nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL DIGITAL BAZAAR BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -------------------------------------------------------------------------------- - GNU GENERAL PUBLIC LICENSE - Version 2, June 1991 - - Copyright (C) 1989, 1991 Free Software Foundation, Inc. - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -License is intended to guarantee your freedom to share and change free -software--to make sure the software is free for all its users. This -General Public License applies to most of the Free Software -Foundation's software and to any other program whose authors commit to -using it. (Some other Free Software Foundation software is covered by -the GNU Lesser General Public License instead.) You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -this service if you wish), that you receive source code or can get it -if you want it, that you can change the software or use pieces of it -in new free programs; and that you know you can do these things. - - To protect your rights, we need to make restrictions that forbid -anyone to deny you these rights or to ask you to surrender the rights. -These restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must give the recipients all the rights that -you have. You must make sure that they, too, receive or can get the -source code. And you must show them these terms so they know their -rights. - - We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - - Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - - Finally, any free program is threatened constantly by software -patents. We wish to avoid the danger that redistributors of a free -program will individually obtain patent licenses, in effect making the -program proprietary. To prevent this, we have made it clear that any -patent must be licensed for everyone's free use or not licensed at all. - - The precise terms and conditions for copying, distribution and -modification follow. - - GNU GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License applies to any program or other work which contains -a notice placed by the copyright holder saying it may be distributed -under the terms of this General Public License. The "Program", below, -refers to any such program or work, and a "work based on the Program" -means either the Program or any derivative work under copyright law: -that is to say, a work containing the Program or a portion of it, -either verbatim or with modifications and/or translated into another -language. (Hereinafter, translation is included without limitation in -the term "modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running the Program is not restricted, and the output from the Program -is covered only if its contents constitute a work based on the -Program (independent of having been made by running the Program). -Whether that is true depends on what the Program does. - - 1. You may copy and distribute verbatim copies of the Program's -source code as you receive it, in any medium, provided that you -conspicuously and appropriately publish on each copy an appropriate -copyright notice and disclaimer of warranty; keep intact all the -notices that refer to this License and to the absence of any warranty; -and give any other recipients of the Program a copy of this License -along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - - 2. You may modify your copy or copies of the Program or any portion -of it, thus forming a work based on the Program, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any - part thereof, to be licensed as a whole at no charge to all third - parties under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a - notice that there is no warranty (or else, saying that you provide - a warranty) and that users may redistribute the program under - these conditions, and telling the user how to view a copy of this - License. (Exception: if the Program itself is interactive but - does not normally print such an announcement, your work based on - the Program is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Program, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections - 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your - cost of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer - to distribute corresponding source code. (This alternative is - allowed only for noncommercial distribution and only if you - received the program in object code or executable form with such - an offer, in accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source -code means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to -control compilation and installation of the executable. However, as a -special exception, the source code distributed need not include -anything that is normally distributed (in either source or binary -form) with the major components (compiler, kernel, and so on) of the -operating system on which the executable runs, unless that component -itself accompanies the executable. - -If distribution of executable or object code is made by offering -access to copy from a designated place, then offering equivalent -access to copy the source code from the same place counts as -distribution of the source code, even though third parties are not -compelled to copy the source along with the object code. - - 4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt -otherwise to copy, modify, sublicense or distribute the Program is -void, and will automatically terminate your rights under this License. -However, parties who have received copies, or rights, from you under -this License will not have their licenses terminated so long as such -parties remain in full compliance. - - 5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Program or works based on it. - - 6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties to -this License. - - 7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Program at all. For example, if a patent -license would not permit royalty-free redistribution of the Program by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License -may add an explicit geographical distribution limitation excluding -those countries, so that distribution is permitted only in or among -countries not thus excluded. In such case, this License incorporates -the limitation as if written in the body of this License. - - 9. The Free Software Foundation may publish revised and/or new versions -of the General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and conditions -either of that version or of any later version published by the Free -Software Foundation. If the Program does not specify a version number of -this License, you may choose any version ever published by the Free Software -Foundation. - - 10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the author -to ask for permission. For software which is copyrighted by the Free -Software Foundation, write to the Free Software Foundation; we sometimes -make exceptions for this. Our decision will be guided by the two goals -of preserving the free status of all derivatives of our free software and -of promoting the sharing and reuse of software generally. - - NO WARRANTY - - 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY -FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN -OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES -PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED -OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS -TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE -PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, -REPAIR OR CORRECTION. - - 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR -REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, -INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING -OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED -TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY -YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER -PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGES. - - - once ISC The ISC License @@ -3762,6 +3222,31 @@ IN THE SOFTWARE. +strnum +MIT +MIT License + +Copyright (c) 2021 Natural Intelligence + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + stubs MIT @@ -4012,6 +3497,31 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +undici +MIT +MIT License + +Copyright (c) Matteo Collina and Undici contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + universal-user-agent ISC # [ISC License](https://spdx.org/licenses/ISC) @@ -4124,25 +3634,6 @@ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -yallist -ISC -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - yocto-queue MIT MIT License